diff --git a/README.md b/README.md index c1ecca34cbf0fef8462a1331a217b3e102c18a2a..6a2e1f5bf2c36dd1e3c3f92514abfafeefe98c2c 100644 --- a/README.md +++ b/README.md @@ -6,4 +6,21 @@ language: - en metrics: - wer ---- \ No newline at end of file +--- +# LibriSpeech pruned_transducer_stateless7_streaming + +This model is based on the icefall `pruned_transducer_stateless7_streaming` recipe, +but the model parameters are modified to be smaller in size. It can be +considered a streaming version of [this model](https://huggingface.co/Zengwei/icefall-asr-librispeech-pruned-transducer-stateless7-20M-2023-01-28) and follows +the same parameter configuration. + +The main difference from is that +this model additionally uses simulated RIRs for training, which effectively doubles the training data. + +## Performance Record + +| Decoding method | test-clean | test-other | +|---------------------------|------------|------------| +| greedy search | 3.58 | 9.29 | +| fast beam search | 3.57 | 9.05 | +| modified beam search | 3.41 | 8.94 | diff --git a/data/lang_bpe_500/bpe.model b/data/lang_bpe_500/bpe.model new file mode 100644 index 0000000000000000000000000000000000000000..0a7fdb4e15f063e06d9936c71e13525b31c588e3 --- /dev/null +++ b/data/lang_bpe_500/bpe.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c53433de083c4a6ad12d034550ef22de68cec62c4f58932a7b6b8b2f1e743fa5 +size 244865 diff --git a/data/lang_bpe_500/tokens.txt b/data/lang_bpe_500/tokens.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4d1bf82e68de1354f2a3f5fd51094d61d65c77c --- /dev/null +++ b/data/lang_bpe_500/tokens.txt @@ -0,0 +1,502 @@ + 0 + 1 + 2 +S 3 +▁THE 4 +▁A 5 +T 6 +▁AND 7 +ED 8 +▁OF 9 +▁TO 10 +E 11 +D 12 +N 13 +ING 14 +▁IN 15 +Y 16 +M 17 +C 18 +▁I 19 +A 20 +P 21 +▁HE 22 +R 23 +O 24 +L 25 +RE 26 +I 27 +U 28 +ER 29 +▁IT 30 +LY 31 +▁THAT 32 +▁WAS 33 +▁ 34 +▁S 35 +AR 36 +▁BE 37 +F 38 +▁C 39 +IN 40 +B 41 +▁FOR 42 +OR 43 +LE 44 +' 45 +▁HIS 46 +▁YOU 47 +AL 48 +▁RE 49 +V 50 +▁B 51 +G 52 +RI 53 +▁E 54 +▁WITH 55 +▁T 56 +▁AS 57 +LL 58 +▁P 59 +▁HER 60 +ST 61 +▁HAD 62 +▁SO 63 +▁F 64 +W 65 +CE 66 +▁IS 67 +ND 68 +▁NOT 69 +TH 70 +▁BUT 71 +EN 72 +▁SHE 73 +▁ON 74 +VE 75 +ON 76 +SE 77 +▁DE 78 +UR 79 +▁G 80 +CH 81 +K 82 +TER 83 +▁AT 84 +IT 85 +▁ME 86 +RO 87 +NE 88 +RA 89 +ES 90 +IL 91 +NG 92 +IC 93 +▁NO 94 +▁HIM 95 +ENT 96 +IR 97 +▁WE 98 +H 99 +▁DO 100 +▁ALL 101 +▁HAVE 102 +LO 103 +▁BY 104 +▁MY 105 +▁MO 106 +▁THIS 107 +LA 108 +▁ST 109 +▁WHICH 110 +▁CON 111 +▁THEY 112 +CK 113 +TE 114 +▁SAID 115 +▁FROM 116 +▁GO 117 +▁WHO 118 +▁TH 119 +▁OR 120 +▁D 121 +▁W 122 +VER 123 +LI 124 +▁SE 125 +▁ONE 126 +▁CA 127 +▁AN 128 +▁LA 129 +▁WERE 130 +EL 131 +▁HA 132 +▁MAN 133 +▁FA 134 +▁EX 135 +AD 136 +▁SU 137 +RY 138 +▁MI 139 +AT 140 +▁BO 141 +▁WHEN 142 +AN 143 +THER 144 +PP 145 +ATION 146 +▁FI 147 +▁WOULD 148 +▁PRO 149 +OW 150 +ET 151 +▁O 152 +▁THERE 153 +▁HO 154 +ION 155 +▁WHAT 156 +▁FE 157 +▁PA 158 +US 159 +MENT 160 +▁MA 161 +UT 162 +▁OUT 163 +▁THEIR 164 +▁IF 165 +▁LI 166 +▁K 167 +▁WILL 168 +▁ARE 169 +ID 170 +▁RO 171 +DE 172 +TION 173 +▁WA 174 +PE 175 +▁UP 176 +▁SP 177 +▁PO 178 +IGHT 179 +▁UN 180 +RU 181 +▁LO 182 +AS 183 +OL 184 +▁LE 185 +▁BEEN 186 +▁SH 187 +▁RA 188 +▁SEE 189 +KE 190 +UL 191 +TED 192 +▁SA 193 +UN 194 +UND 195 +ANT 196 +▁NE 197 +IS 198 +▁THEM 199 +CI 200 +GE 201 +▁COULD 202 +▁DIS 203 +OM 204 +ISH 205 +HE 206 +EST 207 +▁SOME 208 +ENCE 209 +ITY 210 +IVE 211 +▁US 212 +▁MORE 213 +▁EN 214 +ARD 215 +ATE 216 +▁YOUR 217 +▁INTO 218 +▁KNOW 219 +▁CO 220 +ANCE 221 +▁TIME 222 +▁WI 223 +▁YE 224 +AGE 225 +▁NOW 226 +TI 227 +FF 228 +ABLE 229 +▁VERY 230 +▁LIKE 231 +AM 232 +HI 233 +Z 234 +▁OTHER 235 +▁THAN 236 +▁LITTLE 237 +▁DID 238 +▁LOOK 239 +TY 240 +ERS 241 +▁CAN 242 +▁CHA 243 +▁AR 244 +X 245 +FUL 246 +UGH 247 +▁BA 248 +▁DAY 249 +▁ABOUT 250 +TEN 251 +IM 252 +▁ANY 253 +▁PRE 254 +▁OVER 255 +IES 256 +NESS 257 +ME 258 +BLE 259 +▁M 260 +ROW 261 +▁HAS 262 +▁GREAT 263 +▁VI 264 +TA 265 +▁AFTER 266 +PER 267 +▁AGAIN 268 +HO 269 +SH 270 +▁UPON 271 +▁DI 272 +▁HAND 273 +▁COM 274 +IST 275 +TURE 276 +▁STA 277 +▁THEN 278 +▁SHOULD 279 +▁GA 280 +OUS 281 +OUR 282 +▁WELL 283 +▁ONLY 284 +MAN 285 +▁GOOD 286 +▁TWO 287 +▁MAR 288 +▁SAY 289 +▁HU 290 +TING 291 +▁OUR 292 +RESS 293 +▁DOWN 294 +IOUS 295 +▁BEFORE 296 +▁DA 297 +▁NA 298 +QUI 299 +▁MADE 300 +▁EVERY 301 +▁OLD 302 +▁EVEN 303 +IG 304 +▁COME 305 +▁GRA 306 +▁RI 307 +▁LONG 308 +OT 309 +SIDE 310 +WARD 311 +▁FO 312 +▁WHERE 313 +MO 314 +LESS 315 +▁SC 316 +▁MUST 317 +▁NEVER 318 +▁HOW 319 +▁CAME 320 +▁SUCH 321 +▁RU 322 +▁TAKE 323 +▁WO 324 +▁CAR 325 +UM 326 +AK 327 +▁THINK 328 +▁MUCH 329 +▁MISTER 330 +▁MAY 331 +▁JO 332 +▁WAY 333 +▁COMP 334 +▁THOUGHT 335 +▁STO 336 +▁MEN 337 +▁BACK 338 +▁DON 339 +J 340 +▁LET 341 +▁TRA 342 +▁FIRST 343 +▁JUST 344 +▁VA 345 +▁OWN 346 +▁PLA 347 +▁MAKE 348 +ATED 349 +▁HIMSELF 350 +▁WENT 351 +▁PI 352 +GG 353 +RING 354 +▁DU 355 +▁MIGHT 356 +▁PART 357 +▁GIVE 358 +▁IMP 359 +▁BU 360 +▁PER 361 +▁PLACE 362 +▁HOUSE 363 +▁THROUGH 364 +IAN 365 +▁SW 366 +▁UNDER 367 +QUE 368 +▁AWAY 369 +▁LOVE 370 +QUA 371 +▁LIFE 372 +▁GET 373 +▁WITHOUT 374 +▁PASS 375 +▁TURN 376 +IGN 377 +▁HEAD 378 +▁MOST 379 +▁THOSE 380 +▁SHALL 381 +▁EYES 382 +▁COL 383 +▁STILL 384 +▁NIGHT 385 +▁NOTHING 386 +ITION 387 +HA 388 +▁TELL 389 +▁WORK 390 +▁LAST 391 +▁NEW 392 +▁FACE 393 +▁HI 394 +▁WORD 395 +▁FOUND 396 +▁COUNT 397 +▁OB 398 +▁WHILE 399 +▁SHA 400 +▁MEAN 401 +▁SAW 402 +▁PEOPLE 403 +▁FRIEND 404 +▁THREE 405 +▁ROOM 406 +▁SAME 407 +▁THOUGH 408 +▁RIGHT 409 +▁CHILD 410 +▁FATHER 411 +▁ANOTHER 412 +▁HEART 413 +▁WANT 414 +▁TOOK 415 +OOK 416 +▁LIGHT 417 +▁MISSUS 418 +▁OPEN 419 +▁JU 420 +▁ASKED 421 +PORT 422 +▁LEFT 423 +▁JA 424 +▁WORLD 425 +▁HOME 426 +▁WHY 427 +▁ALWAYS 428 +▁ANSWER 429 +▁SEEMED 430 +▁SOMETHING 431 +▁GIRL 432 +▁BECAUSE 433 +▁NAME 434 +▁TOLD 435 +▁NI 436 +▁HIGH 437 +IZE 438 +▁WOMAN 439 +▁FOLLOW 440 +▁RETURN 441 +▁KNEW 442 +▁EACH 443 +▁KIND 444 +▁JE 445 +▁ACT 446 +▁LU 447 +▁CERTAIN 448 +▁YEARS 449 +▁QUITE 450 +▁APPEAR 451 +▁BETTER 452 +▁HALF 453 +▁PRESENT 454 +▁PRINCE 455 +SHIP 456 +▁ALSO 457 +▁BEGAN 458 +▁HAVING 459 +▁ENOUGH 460 +▁PERSON 461 +▁LADY 462 +▁WHITE 463 +▁COURSE 464 +▁VOICE 465 +▁SPEAK 466 +▁POWER 467 +▁MORNING 468 +▁BETWEEN 469 +▁AMONG 470 +▁KEEP 471 +▁WALK 472 +▁MATTER 473 +▁TEA 474 +▁BELIEVE 475 +▁SMALL 476 +▁TALK 477 +▁FELT 478 +▁HORSE 479 +▁MYSELF 480 +▁SIX 481 +▁HOWEVER 482 +▁FULL 483 +▁HERSELF 484 +▁POINT 485 +▁STOOD 486 +▁HUNDRED 487 +▁ALMOST 488 +▁SINCE 489 +▁LARGE 490 +▁LEAVE 491 +▁PERHAPS 492 +▁DARK 493 +▁SUDDEN 494 +▁REPLIED 495 +▁ANYTHING 496 +▁WONDER 497 +▁UNTIL 498 +Q 499 +#0 500 +#1 501 diff --git a/exp/cpu_jit.pt b/exp/cpu_jit.pt new file mode 100644 index 0000000000000000000000000000000000000000..4610aba3d67c184629d18ef77fadf0b82de4f99a --- /dev/null +++ b/exp/cpu_jit.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:590f80b5b73e8b207121113e18ea6cb0f4254e415a9af6c19ab43b7611e4cd24 +size 134187228 diff --git a/exp/decoder_jit_trace.pt b/exp/decoder_jit_trace.pt new file mode 100644 index 0000000000000000000000000000000000000000..4f995dbb90fc2b25340f4dde59745100b4ba9862 --- /dev/null +++ b/exp/decoder_jit_trace.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:23520c0ed0d6738d9dd52584296394b9b9ec577c930b6150404e4508f3ac8381 +size 1047141 diff --git a/exp/encoder_jit_trace.pt b/exp/encoder_jit_trace.pt new file mode 100644 index 0000000000000000000000000000000000000000..f664cef11422084e63293e740d2f84a1468b9c7a --- /dev/null +++ b/exp/encoder_jit_trace.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c67b2fef64df0fdefebd8a45cd45f16c5f43d6b2a156ef70cb7cd40654324cd5 +size 129943703 diff --git a/exp/export.sh b/exp/export.sh new file mode 100644 index 0000000000000000000000000000000000000000..d3c5aa0b6f0e3ad164b18702209dc2380642369e --- /dev/null +++ b/exp/export.sh @@ -0,0 +1,18 @@ +./pruned_transducer_stateless7_streaming/export.py \ + --bpe-model data/lang_bpe_500/bpe.model \ + --use-averaged-model=True \ + --epoch 30 \ + --avg 9 \ + --decode-chunk-len 32 \ + --jit 0 \ + --exp-dir ./pruned_transducer_stateless7_streaming/exp/ \ + --num-encoder-layers 2,2,2,2,2 \ + --feedforward-dims 768,768,768,768,768 \ + --nhead 8,8,8,8,8 \ + --encoder-dims 256,256,256,256,256 \ + --attention-dims 192,192,192,192,192 \ + --encoder-unmasked-dims 192,192,192,192,192 \ + --zipformer-downsampling-factors 1,2,4,8,2 \ + --cnn-module-kernels 31,31,31,31,31 \ + --decoder-dim 512 \ + --joiner-dim 512 \ No newline at end of file diff --git a/exp/jit_trace_export.sh b/exp/jit_trace_export.sh new file mode 100644 index 0000000000000000000000000000000000000000..6737cd84d857c4d4a68d6f676f23caab7ff66b98 --- /dev/null +++ b/exp/jit_trace_export.sh @@ -0,0 +1,17 @@ +./pruned_transducer_stateless7_streaming/jit_trace_export.py \ + --bpe-model data/lang_bpe_500/bpe.model \ + --use-averaged-model=True \ + --epoch 30 \ + --avg 9 \ + --decode-chunk-len 32 \ + --exp-dir ./pruned_transducer_stateless7_streaming/exp \ + --num-encoder-layers 2,2,2,2,2 \ + --feedforward-dims 768,768,768,768,768 \ + --nhead 8,8,8,8,8 \ + --encoder-dims 256,256,256,256,256 \ + --attention-dims 192,192,192,192,192 \ + --encoder-unmasked-dims 192,192,192,192,192 \ + --zipformer-downsampling-factors 1,2,4,8,2 \ + --cnn-module-kernels 31,31,31,31,31 \ + --decoder-dim 512 \ + --joiner-dim 512 \ No newline at end of file diff --git a/exp/joiner_jit_trace.pt b/exp/joiner_jit_trace.pt new file mode 100644 index 0000000000000000000000000000000000000000..67da1bd2ddca0e45e28c52a6d1609274358cb4c8 --- /dev/null +++ b/exp/joiner_jit_trace.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ecb6e215a43492b243ad4af3adf2cf5b761987fdb5a92656453b0e7b16c6de6 +size 2611547 diff --git a/exp/pretrained.pt b/exp/pretrained.pt new file mode 100644 index 0000000000000000000000000000000000000000..07ca04c9f938117f0384325c6d649d54b25cfd95 --- /dev/null +++ b/exp/pretrained.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dfffe9f495d6a0599cc77c6b415ae04a18baa0446b3d73b7a431867296fd0de1 +size 82988698 diff --git a/exp/tensorboard/events.out.tfevents.1680303114.r2n03.443763.0 b/exp/tensorboard/events.out.tfevents.1680303114.r2n03.443763.0 new file mode 100644 index 0000000000000000000000000000000000000000..60abf49be3cd37e965642de21fb2624b51158bff --- /dev/null +++ b/exp/tensorboard/events.out.tfevents.1680303114.r2n03.443763.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abfff0db01df52f1470a881db15a291e852a7c84aa0c26a25c98acc7542b5773 +size 1984002 diff --git a/log/fast_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt b/log/fast_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..a93a7524d71ece0d0575cfe0b557b6f8639b239c --- /dev/null +++ b/log/fast_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt @@ -0,0 +1,12851 @@ +%WER = 3.57 +Errors: 218 insertions, 142 deletions, 1519 substitutions, over 52576 reference words (50915 correct) +Search below for sections starting with PER-UTT DETAILS:, SUBSTITUTIONS:, DELETIONS:, INSERTIONS:, PER-WORD STATS: + +PER-UTT DETAILS: corr or (ref->hyp) +1089-134686-0000-1733: HE HOPED THERE WOULD BE STEW FOR DINNER TURNIPS AND CARROTS AND BRUISED POTATOES AND FAT MUTTON PIECES TO BE LADLED OUT IN THICK PEPPERED (FLOUR->FLOWER) FATTENED SAUCE +1089-134686-0001-1734: STUFF IT INTO YOU HIS BELLY COUNSELLED HIM +1089-134686-0002-1735: AFTER EARLY (NIGHTFALL->NIGHT FALL) THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS +1089-134686-0003-1736: HELLO BERTIE ANY GOOD IN YOUR MIND +1089-134686-0004-1737: NUMBER TEN FRESH (NELLY->NELLIE) IS WAITING ON YOU GOOD NIGHT HUSBAND +1089-134686-0005-1738: THE MUSIC CAME NEARER AND HE RECALLED THE WORDS THE WORDS OF SHELLEY'S FRAGMENT UPON THE MOON WANDERING COMPANIONLESS PALE FOR WEARINESS +1089-134686-0006-1739: THE DULL LIGHT FELL MORE FAINTLY UPON THE PAGE WHEREON ANOTHER EQUATION BEGAN TO UNFOLD ITSELF SLOWLY AND TO SPREAD ABROAD ITS WIDENING TAIL +1089-134686-0007-1740: A COLD LUCID INDIFFERENCE REIGNED IN HIS SOUL +1089-134686-0008-1741: THE CHAOS IN WHICH HIS (ARDOUR->ARDOR) EXTINGUISHED ITSELF WAS A COLD INDIFFERENT KNOWLEDGE OF HIMSELF +1089-134686-0009-1742: AT MOST BY AN ALMS GIVEN TO A BEGGAR WHOSE BLESSING HE FLED FROM HE MIGHT HOPE WEARILY TO WIN FOR HIMSELF SOME MEASURE OF ACTUAL GRACE +1089-134686-0010-1743: WELL NOW ENNIS I DECLARE YOU HAVE A HEAD AND SO HAS MY STICK +1089-134686-0011-1744: ON SATURDAY MORNINGS WHEN THE (SODALITY->SODELITY) MET IN THE CHAPEL TO RECITE THE LITTLE OFFICE HIS PLACE WAS A CUSHIONED KNEELING DESK AT THE RIGHT OF THE ALTAR FROM WHICH HE LED HIS WING OF BOYS THROUGH THE RESPONSES +1089-134686-0012-1745: HER EYES SEEMED TO REGARD HIM WITH MILD PITY HER HOLINESS A STRANGE LIGHT GLOWING FAINTLY UPON HER FRAIL FLESH DID NOT HUMILIATE THE SINNER WHO APPROACHED HER +1089-134686-0013-1746: IF EVER HE WAS IMPELLED TO CAST SIN FROM HIM AND TO REPENT THE IMPULSE THAT MOVED HIM WAS THE WISH TO BE HER KNIGHT +1089-134686-0014-1747: HE TRIED TO THINK HOW IT COULD BE +1089-134686-0015-1748: BUT THE DUSK DEEPENING IN THE (SCHOOLROOM->SCHOOL ROOM) COVERED OVER HIS THOUGHTS THE BELL RANG +1089-134686-0016-1749: THEN YOU CAN ASK HIM QUESTIONS ON THE CATECHISM (DEDALUS->DAEDALUS) +1089-134686-0017-1750: STEPHEN LEANING BACK AND DRAWING IDLY ON HIS SCRIBBLER LISTENED TO THE TALK ABOUT HIM WHICH HERON CHECKED FROM TIME TO TIME BY SAYING +1089-134686-0018-1751: IT WAS STRANGE TOO THAT HE FOUND AN ARID PLEASURE IN FOLLOWING UP TO THE END THE RIGID LINES OF THE DOCTRINES OF THE CHURCH AND PENETRATING INTO OBSCURE SILENCES ONLY TO HEAR AND FEEL THE MORE DEEPLY HIS OWN CONDEMNATION +1089-134686-0019-1752: THE SENTENCE OF SAINT JAMES WHICH SAYS THAT HE WHO OFFENDS AGAINST ONE COMMANDMENT BECOMES GUILTY OF ALL HAD SEEMED TO HIM FIRST A SWOLLEN PHRASE UNTIL HE HAD BEGUN TO GROPE IN THE DARKNESS OF HIS OWN STATE +1089-134686-0020-1753: IF A MAN HAD STOLEN A POUND IN HIS YOUTH AND HAD USED THAT POUND TO (AMASS->A MASS) A HUGE FORTUNE HOW MUCH WAS HE OBLIGED TO GIVE BACK THE POUND HE HAD STOLEN ONLY OR THE POUND TOGETHER WITH THE COMPOUND INTEREST ACCRUING UPON IT OR ALL HIS HUGE FORTUNE +1089-134686-0021-1754: IF A LAYMAN IN GIVING BAPTISM POUR THE WATER BEFORE SAYING THE WORDS IS THE CHILD BAPTIZED +1089-134686-0022-1755: HOW COMES IT THAT WHILE THE FIRST BEATITUDE PROMISES THE KINGDOM OF HEAVEN TO THE POOR OF HEART THE SECOND BEATITUDE PROMISES ALSO TO THE MEEK THAT THEY SHALL POSSESS THE LAND +1089-134686-0023-1756: WHY WAS THE SACRAMENT OF THE EUCHARIST INSTITUTED UNDER THE TWO SPECIES OF BREAD AND WINE IF JESUS CHRIST BE PRESENT BODY AND BLOOD SOUL AND DIVINITY IN THE BREAD ALONE AND IN THE WINE ALONE +1089-134686-0024-1757: IF THE WINE CHANGE INTO VINEGAR AND THE HOST CRUMBLE INTO CORRUPTION AFTER THEY HAVE BEEN CONSECRATED IS JESUS CHRIST STILL PRESENT UNDER THEIR SPECIES AS GOD AND AS MAN +1089-134686-0025-1758: A GENTLE KICK FROM THE TALL BOY IN THE BENCH BEHIND URGED STEPHEN TO ASK A DIFFICULT QUESTION +1089-134686-0026-1759: THE RECTOR DID NOT ASK FOR A CATECHISM TO HEAR THE LESSON FROM +1089-134686-0027-1760: HE CLASPED HIS HANDS ON THE DESK AND SAID +1089-134686-0028-1761: THE RETREAT WILL BEGIN ON WEDNESDAY AFTERNOON IN (HONOUR->HONOR) OF SAINT FRANCIS (XAVIER->ZEVIOUR) WHOSE FEAST DAY IS SATURDAY +1089-134686-0029-1762: ON FRIDAY CONFESSION WILL BE HEARD ALL THE AFTERNOON AFTER BEADS +1089-134686-0030-1763: BEWARE OF MAKING THAT MISTAKE +1089-134686-0031-1764: STEPHEN'S HEART BEGAN SLOWLY TO FOLD AND FADE WITH FEAR LIKE A WITHERING FLOWER +1089-134686-0032-1765: HE (IS->HAS) CALLED AS YOU KNOW THE APOSTLE OF THE INDIES +1089-134686-0033-1766: A GREAT SAINT SAINT FRANCIS (XAVIER->ZAVIER) +1089-134686-0034-1767: THE RECTOR PAUSED AND THEN SHAKING HIS CLASPED HANDS BEFORE HIM WENT ON +1089-134686-0035-1768: HE HAD THE FAITH IN HIM THAT MOVES MOUNTAINS +1089-134686-0036-1769: A GREAT SAINT SAINT FRANCIS (XAVIER->ZEVIER) +1089-134686-0037-1770: IN THE SILENCE THEIR DARK FIRE KINDLED THE DUSK INTO A TAWNY GLOW +1089-134691-0000-1707: HE COULD WAIT NO LONGER +1089-134691-0001-1708: FOR A FULL HOUR HE HAD PACED UP AND DOWN WAITING BUT HE COULD WAIT NO LONGER +1089-134691-0002-1709: HE SET OFF ABRUPTLY FOR THE BULL WALKING RAPIDLY LEST HIS FATHER'S SHRILL WHISTLE MIGHT CALL HIM BACK AND IN A FEW MOMENTS HE HAD ROUNDED THE CURVE AT THE POLICE BARRACK AND WAS SAFE +1089-134691-0003-1710: THE UNIVERSITY +1089-134691-0004-1711: PRIDE AFTER SATISFACTION UPLIFTED HIM LIKE LONG SLOW WAVES +1089-134691-0005-1712: WHOSE FEET ARE AS THE FEET OF (HARTS->HEARTS) AND UNDERNEATH THE EVERLASTING ARMS +1089-134691-0006-1713: THE PRIDE OF THAT DIM IMAGE BROUGHT BACK TO HIS MIND THE DIGNITY OF THE OFFICE HE HAD REFUSED +1089-134691-0007-1714: SOON THE WHOLE BRIDGE WAS TREMBLING AND RESOUNDING +1089-134691-0008-1715: THE UNCOUTH FACES PASSED HIM TWO BY TWO STAINED YELLOW OR RED OR LIVID BY THE SEA AND AS HE STROVE TO LOOK AT THEM WITH EASE AND INDIFFERENCE A FAINT STAIN OF PERSONAL SHAME AND COMMISERATION ROSE TO HIS OWN FACE +1089-134691-0009-1716: ANGRY WITH HIMSELF HE TRIED TO HIDE HIS FACE FROM THEIR EYES BY GAZING DOWN SIDEWAYS INTO THE SHALLOW SWIRLING WATER UNDER THE BRIDGE BUT HE STILL SAW A REFLECTION THEREIN OF THEIR TOP HEAVY SILK HATS AND HUMBLE TAPE LIKE COLLARS AND LOOSELY HANGING CLERICAL CLOTHES BROTHER HICKEY +1089-134691-0010-1717: BROTHER (MAC ARDLE->MICARTLE) BROTHER (KEOGH->KIEV) +1089-134691-0011-1718: THEIR PIETY WOULD BE LIKE THEIR NAMES LIKE THEIR FACES LIKE THEIR CLOTHES AND (IT->*) WAS IDLE FOR HIM TO TELL HIMSELF THAT THEIR HUMBLE AND CONTRITE HEARTS IT MIGHT BE PAID A FAR RICHER TRIBUTE OF DEVOTION THAN HIS HAD EVER BEEN A GIFT TENFOLD MORE ACCEPTABLE THAN HIS ELABORATE ADORATION +1089-134691-0012-1719: IT WAS IDLE FOR HIM TO MOVE HIMSELF TO BE GENEROUS TOWARDS THEM TO TELL HIMSELF THAT IF HE EVER CAME TO THEIR GATES STRIPPED OF HIS PRIDE BEATEN AND IN BEGGAR'S WEEDS THAT THEY WOULD BE GENEROUS TOWARDS HIM LOVING HIM AS THEMSELVES +1089-134691-0013-1720: IDLE AND EMBITTERING (FINALLY->FINELY) TO ARGUE AGAINST HIS OWN DISPASSIONATE CERTITUDE THAT THE COMMANDMENT OF LOVE BADE US NOT TO LOVE OUR (NEIGHBOUR->NEIGHBOR) AS OURSELVES WITH THE SAME AMOUNT AND INTENSITY OF LOVE BUT TO LOVE HIM AS OURSELVES WITH THE SAME KIND OF LOVE +1089-134691-0014-1721: THE PHRASE AND THE DAY AND THE SCENE HARMONIZED IN (A CHORD->ACCORD) +1089-134691-0015-1722: WORDS WAS IT THEIR (COLOURS->COLORS) +1089-134691-0016-1723: THEY WERE VOYAGING ACROSS THE DESERTS OF THE SKY A HOST OF NOMADS ON THE MARCH VOYAGING HIGH OVER IRELAND WESTWARD BOUND +1089-134691-0017-1724: THE EUROPE THEY HAD COME FROM LAY OUT THERE BEYOND THE IRISH SEA EUROPE OF STRANGE TONGUES AND VALLEYED AND (WOODBEGIRT->WOOD BEGIRT) AND (CITADELLED->CITADELED) AND OF (ENTRENCHED->INTRENCHED) AND (MARSHALLED->MARSHALED) RACES +1089-134691-0018-1725: AGAIN AGAIN +1089-134691-0019-1726: A VOICE FROM BEYOND THE WORLD WAS CALLING +1089-134691-0020-1727: (HELLO->HALLO) STEPHANOS HERE COMES THE (DEDALUS->DAEDALUS) +1089-134691-0021-1728: THEIR DIVING STONE POISED ON ITS RUDE SUPPORTS AND ROCKING UNDER THEIR PLUNGES AND THE ROUGH HEWN STONES OF THE SLOPING BREAKWATER OVER WHICH THEY SCRAMBLED IN THEIR (HORSEPLAY->HORSE PLAY) GLEAMED WITH COLD WET LUSTRE +1089-134691-0022-1729: HE STOOD STILL IN DEFERENCE TO THEIR CALLS AND PARRIED THEIR BANTER WITH EASY WORDS +1089-134691-0023-1730: IT WAS A PAIN TO SEE THEM AND A SWORD LIKE PAIN TO SEE THE SIGNS OF ADOLESCENCE THAT MADE REPELLENT THEIR PITIABLE NAKEDNESS +1089-134691-0024-1731: STEPHANOS (DEDALOS->DELOS) +1089-134691-0025-1732: A MOMENT BEFORE THE GHOST OF THE ANCIENT KINGDOM OF THE DANES HAD LOOKED FORTH THROUGH THE VESTURE OF THE (HAZEWRAPPED->HAYES WRAPPED) CITY +1188-133604-0000-1771: YOU WILL FIND ME CONTINUALLY SPEAKING OF FOUR MEN TITIAN (HOLBEIN->HOLBINE) TURNER AND (TINTORET->TINTARETTE) IN ALMOST THE SAME TERMS +1188-133604-0001-1772: THEY UNITE EVERY QUALITY AND SOMETIMES YOU WILL FIND ME REFERRING TO THEM AS COLORISTS SOMETIMES AS (CHIAROSCURISTS->KIERRASCURISTS) +1188-133604-0002-1773: BY BEING STUDIOUS OF (COLOR->COLOUR) THEY ARE STUDIOUS OF DIVISION AND WHILE THE (CHIAROSCURIST->CUIRASCURISTS) DEVOTES HIMSELF TO THE REPRESENTATION OF DEGREES OF FORCE IN ONE THING UNSEPARATED LIGHT THE COLORISTS HAVE FOR THEIR FUNCTION THE ATTAINMENT OF BEAUTY BY ARRANGEMENT OF THE DIVISIONS OF LIGHT +1188-133604-0003-1774: MY FIRST AND PRINCIPAL REASON WAS THAT THEY ENFORCED BEYOND ALL RESISTANCE ON ANY STUDENT WHO MIGHT ATTEMPT TO COPY THEM THIS METHOD OF LAYING PORTIONS OF DISTINCT HUE SIDE BY SIDE +1188-133604-0004-1775: SOME OF THE TOUCHES INDEED WHEN THE TINT HAS BEEN MIXED WITH MUCH WATER HAVE BEEN LAID IN LITTLE DROPS OR PONDS SO THAT THE PIGMENT MIGHT CRYSTALLIZE HARD AT THE EDGE +1188-133604-0005-1776: IT IS THE HEAD OF A PARROT WITH A LITTLE FLOWER IN HIS BEAK FROM A PICTURE OF (CARPACCIO'S->CARPATIUS) ONE OF HIS SERIES OF THE LIFE OF SAINT GEORGE +1188-133604-0006-1777: THEN HE COMES TO THE BEAK OF IT +1188-133604-0007-1778: THE BROWN GROUND BENEATH IS LEFT FOR THE MOST PART ONE TOUCH OF BLACK IS PUT FOR THE HOLLOW (TWO->TOO) DELICATE LINES OF DARK (GRAY->GREY) DEFINE THE OUTER CURVE AND ONE LITTLE QUIVERING TOUCH OF WHITE DRAWS THE INNER EDGE OF THE MANDIBLE +1188-133604-0008-1779: FOR BELIEVE ME THE FINAL PHILOSOPHY OF ART CAN ONLY RATIFY THEIR OPINION THAT THE BEAUTY OF A COCK ROBIN IS TO BE (RED->READ) AND OF A GRASS PLOT TO BE GREEN AND THE BEST SKILL OF ART IS (IN->AN) INSTANTLY SEIZING ON THE MANIFOLD DELICIOUSNESS OF LIGHT WHICH YOU CAN ONLY SEIZE BY PRECISION OF INSTANTANEOUS TOUCH +1188-133604-0009-1780: NOW YOU WILL SEE IN THESE STUDIES (THAT->AT) THE MOMENT THE WHITE IS (INCLOSED->ENCLOSED) PROPERLY AND (HARMONIZED->HARMONIZE) WITH THE OTHER HUES IT BECOMES SOMEHOW MORE PRECIOUS AND PEARLY THAN THE WHITE PAPER AND THAT I AM NOT AFRAID TO LEAVE A WHOLE FIELD OF UNTREATED WHITE PAPER ALL ROUND IT BEING SURE THAT EVEN THE LITTLE DIAMONDS IN THE ROUND WINDOW WILL TELL AS JEWELS IF THEY ARE GRADATED JUSTLY +1188-133604-0010-1781: BUT IN THIS (VIGNETTE->VINEYARD) COPIED FROM TURNER YOU HAVE THE TWO PRINCIPLES BROUGHT OUT PERFECTLY +1188-133604-0011-1782: THEY ARE BEYOND ALL OTHER WORKS (THAT->THAN) I KNOW EXISTING DEPENDENT FOR THEIR EFFECT ON LOW SUBDUED TONES THEIR FAVORITE CHOICE IN TIME OF DAY BEING EITHER DAWN OR TWILIGHT AND EVEN THEIR BRIGHTEST SUNSETS PRODUCED CHIEFLY OUT OF GRAY PAPER +1188-133604-0012-1783: IT MAY BE THAT A GREAT (COLORIST->COLORLESS) WILL USE HIS UTMOST FORCE OF COLOR AS A SINGER HIS FULL POWER OF VOICE BUT LOUD OR LOW THE VIRTUE IS IN BOTH CASES ALWAYS IN REFINEMENT NEVER IN LOUDNESS +1188-133604-0013-1784: IT MUST REMEMBER BE ONE OR THE OTHER +1188-133604-0014-1785: DO NOT THEREFORE THINK THAT THE GOTHIC (SCHOOL IS->SCHOOLS) AN EASY ONE +1188-133604-0015-1786: THE LAW OF THAT SCHOOL IS THAT EVERYTHING SHALL BE SEEN CLEARLY OR AT LEAST ONLY IN SUCH MIST OR FAINTNESS AS SHALL BE DELIGHTFUL AND I HAVE NO DOUBT THAT THE BEST INTRODUCTION TO IT WOULD BE THE ELEMENTARY PRACTICE OF PAINTING EVERY STUDY ON A GOLDEN GROUND +1188-133604-0016-1787: THIS AT ONCE COMPELS YOU TO UNDERSTAND THAT THE WORK IS TO BE IMAGINATIVE AND DECORATIVE THAT IT REPRESENTS BEAUTIFUL THINGS IN THE CLEAREST WAY BUT NOT UNDER EXISTING CONDITIONS AND THAT IN FACT YOU ARE PRODUCING (JEWELER'S->JEWELERS) WORK RATHER THAN PICTURES +1188-133604-0017-1788: THAT A STYLE (IS->WAS) RESTRAINED OR SEVERE DOES NOT MEAN THAT IT IS ALSO ERRONEOUS +1188-133604-0018-1789: IN ALL EARLY GOTHIC ART INDEED YOU WILL FIND FAILURE OF THIS KIND ESPECIALLY DISTORTION AND RIGIDITY WHICH ARE IN MANY RESPECTS PAINFULLY TO BE COMPARED WITH THE SPLENDID REPOSE OF CLASSIC ART +1188-133604-0019-1790: THE LARGE LETTER CONTAINS INDEED ENTIRELY FEEBLE AND ILL DRAWN FIGURES THAT IS MERELY CHILDISH AND FAILING WORK OF AN INFERIOR HAND IT IS NOT CHARACTERISTIC OF GOTHIC OR ANY OTHER SCHOOL +1188-133604-0020-1791: BUT OBSERVE YOU CAN ONLY DO THIS ON ONE CONDITION THAT OF STRIVING ALSO TO CREATE IN REALITY THE BEAUTY WHICH YOU SEEK IN IMAGINATION +1188-133604-0021-1792: IT WILL BE WHOLLY IMPOSSIBLE FOR YOU TO RETAIN THE TRANQUILLITY OF TEMPER AND FELICITY OF FAITH NECESSARY FOR NOBLE (PURIST->PUREST) PAINTING UNLESS YOU ARE ACTIVELY ENGAGED IN PROMOTING THE FELICITY AND PEACE OF PRACTICAL LIFE +1188-133604-0022-1793: YOU MUST LOOK AT HIM IN THE FACE FIGHT HIM CONQUER HIM WITH WHAT (SCATHE->SCATH) YOU MAY YOU NEED NOT THINK TO KEEP OUT OF THE WAY OF HIM +1188-133604-0023-1794: THE (COLORIST->CHOLERIST) SAYS FIRST OF ALL AS MY DELICIOUS (PAROQUET->PARAQUET) WAS RUBY SO THIS NASTY VIPER SHALL BE BLACK AND THEN (IS->AS) THE QUESTION CAN I ROUND HIM OFF EVEN THOUGH HE IS BLACK AND MAKE HIM SLIMY AND YET SPRINGY AND CLOSE DOWN CLOTTED LIKE A POOL OF BLACK BLOOD ON THE EARTH ALL THE SAME +1188-133604-0024-1795: NOTHING WILL BE MORE PRECIOUS TO YOU I THINK IN THE PRACTICAL STUDY OF ART THAN THE CONVICTION WHICH WILL FORCE ITSELF ON YOU MORE AND MORE EVERY HOUR OF THE WAY ALL THINGS ARE BOUND TOGETHER LITTLE AND GREAT IN SPIRIT AND IN MATTER +1188-133604-0025-1796: YOU KNOW I HAVE JUST BEEN TELLING YOU HOW THIS SCHOOL OF MATERIALISM AND CLAY INVOLVED ITSELF AT LAST IN CLOUD AND FIRE +1188-133604-0026-1797: HERE IS AN EQUALLY TYPICAL GREEK SCHOOL LANDSCAPE BY WILSON LOST WHOLLY IN GOLDEN MIST THE TREES SO SLIGHTLY DRAWN THAT YOU DON'T KNOW IF THEY ARE TREES OR TOWERS AND NO CARE FOR COLOR (WHATEVER->WHATSOEVER) PERFECTLY DECEPTIVE AND (MARVELOUS->MARVELLOUS) EFFECT OF SUNSHINE THROUGH THE MIST APOLLO (AND->IN) THE PYTHON +1188-133604-0027-1798: NOW HERE IS RAPHAEL EXACTLY BETWEEN THE TWO TREES STILL DRAWN LEAF BY LEAF WHOLLY FORMAL BUT BEAUTIFUL MIST COMING GRADUALLY INTO THE DISTANCE +1188-133604-0028-1799: WELL THEN LAST HERE IS TURNER'S GREEK SCHOOL OF THE HIGHEST CLASS AND YOU DEFINE HIS ART ABSOLUTELY AS FIRST THE DISPLAYING INTENSELY AND WITH THE STERNEST INTELLECT OF NATURAL FORM AS IT IS AND THEN THE ENVELOPMENT OF IT WITH CLOUD AND FIRE +1188-133604-0029-1800: ONLY THERE ARE TWO SORTS OF CLOUD (AND->IN) FIRE +1188-133604-0030-1801: HE KNOWS THEM BOTH +1188-133604-0031-1802: THERE'S ONE AND THERE'S ANOTHER THE DUDLEY AND THE FLINT +1188-133604-0032-1803: IT IS ONLY A PENCIL OUTLINE BY EDWARD BURNE JONES IN ILLUSTRATION OF THE STORY OF PSYCHE IT IS THE INTRODUCTION OF PSYCHE AFTER ALL HER TROUBLES INTO HEAVEN +1188-133604-0033-1804: EVERY PLANT IN THE GRASS IS SET FORMALLY GROWS PERFECTLY AND MAY BE REALIZED COMPLETELY +1188-133604-0034-1805: EXQUISITE ORDER AND UNIVERSAL WITH ETERNAL LIFE AND LIGHT THIS IS THE FAITH AND EFFORT OF THE SCHOOLS OF (CRYSTAL->CRISTEL) AND YOU MAY DESCRIBE AND COMPLETE THEIR WORK QUITE LITERALLY BY TAKING ANY VERSES OF CHAUCER IN HIS TENDER MOOD AND OBSERVING HOW HE INSISTS ON THE CLEARNESS AND BRIGHTNESS FIRST AND THEN ON THE ORDER +1188-133604-0035-1806: THUS IN CHAUCER'S DREAM +1188-133604-0036-1807: IN BOTH THESE HIGH MYTHICAL SUBJECTS THE SURROUNDING NATURE THOUGH SUFFERING IS STILL DIGNIFIED AND BEAUTIFUL +1188-133604-0037-1808: EVERY LINE IN WHICH THE MASTER TRACES IT EVEN WHERE SEEMINGLY NEGLIGENT IS LOVELY AND SET DOWN WITH A MEDITATIVE CALMNESS WHICH MAKES THESE TWO ETCHINGS CAPABLE OF BEING PLACED BESIDE THE MOST TRANQUIL WORK OF (HOLBEIN->HOLBINE) OR (DUERER->DURE) +1188-133604-0038-1809: BUT NOW HERE IS A SUBJECT OF WHICH YOU WILL WONDER AT FIRST WHY TURNER DREW IT AT ALL +1188-133604-0039-1810: IT HAS NO BEAUTY WHATSOEVER NO SPECIALTY OF PICTURESQUENESS (AND->IN) ALL ITS LINES ARE CRAMPED AND POOR +1188-133604-0040-1811: THE CRAMPNESS (AND->IN) THE POVERTY ARE ALL INTENDED +1188-133604-0041-1812: IT IS A GLEANER BRINGING DOWN HER ONE SHEAF OF CORN TO AN OLD (WATERMILL->WATER MILL) ITSELF MOSSY AND RENT SCARCELY ABLE TO GET ITS STONES TO TURN +1188-133604-0042-1813: THE SCENE IS ABSOLUTELY ARCADIAN +1188-133604-0043-1814: SEE THAT YOUR LIVES BE IN NOTHING WORSE THAN A BOY'S CLIMBING FOR HIS ENTANGLED KITE +1188-133604-0044-1815: IT WILL BE WELL FOR YOU IF YOU JOIN NOT WITH THOSE WHO INSTEAD OF KITES FLY FALCONS WHO INSTEAD OF OBEYING THE LAST WORDS OF THE GREAT CLOUD SHEPHERD TO FEED HIS SHEEP LIVE THE LIVES HOW MUCH LESS THAN VANITY OF THE WAR WOLF (AND->IN) THE (GIER->GEAR) EAGLE +121-121726-0000-2558: ALSO A POPULAR CONTRIVANCE WHEREBY LOVE MAKING MAY BE SUSPENDED BUT NOT STOPPED DURING THE PICNIC SEASON +121-121726-0001-2559: (HARANGUE->HURRANG) THE TIRESOME PRODUCT OF A TIRELESS TONGUE +121-121726-0002-2560: ANGOR PAIN PAINFUL TO HEAR +121-121726-0003-2561: (HAY->HEY) FEVER A (HEART->HARD) TROUBLE CAUSED BY FALLING IN LOVE WITH A GRASS WIDOW +121-121726-0004-2562: HEAVEN A GOOD PLACE TO BE RAISED TO +121-121726-0005-2563: HEDGE A FENCE +121-121726-0006-2564: HEREDITY THE CAUSE OF ALL OUR FAULTS +121-121726-0007-2565: HORSE SENSE A DEGREE OF WISDOM THAT KEEPS ONE FROM BETTING ON THE RACES +121-121726-0008-2566: HOSE MAN'S EXCUSE FOR WETTING THE WALK +121-121726-0009-2567: HOTEL A PLACE WHERE A GUEST OFTEN GIVES UP GOOD DOLLARS FOR POOR QUARTERS +121-121726-0010-2568: (HOUSECLEANING->HOUSE CLEANING) A DOMESTIC UPHEAVAL THAT MAKES IT EASY FOR THE GOVERNMENT TO ENLIST ALL THE SOLDIERS IT NEEDS +121-121726-0011-2569: HUSBAND THE NEXT THING TO A WIFE +121-121726-0012-2570: HUSSY WOMAN AND BOND TIE +121-121726-0013-2571: TIED TO A WOMAN +121-121726-0014-2572: HYPOCRITE A HORSE DEALER +121-123852-0000-2615: THOSE PRETTY WRONGS THAT LIBERTY COMMITS WHEN I AM (SOMETIME->SOME TIME) ABSENT FROM THY HEART THY BEAUTY AND THY YEARS FULL WELL BEFITS FOR STILL TEMPTATION FOLLOWS WHERE THOU ART +121-123852-0001-2616: (AY->I) ME +121-123852-0002-2617: NO MATTER THEN ALTHOUGH MY FOOT DID STAND UPON THE FARTHEST EARTH (REMOV'D->REMOVED) FROM THEE FOR NIMBLE THOUGHT CAN JUMP BOTH SEA AND LAND AS SOON AS THINK THE PLACE WHERE HE WOULD BE BUT AH +121-123852-0003-2618: THOUGHT KILLS ME THAT I AM NOT (THOUGHT->BOUGHT) TO LEAP LARGE LENGTHS OF MILES WHEN THOU ART GONE BUT THAT SO MUCH OF EARTH AND WATER WROUGHT I MUST ATTEND TIME'S LEISURE WITH MY MOAN RECEIVING (NOUGHT->NOT) BY ELEMENTS SO SLOW BUT HEAVY TEARS (BADGES->BADGERS) OF EITHER'S WOE +121-123852-0004-2619: MY HEART DOTH PLEAD THAT THOU IN HIM DOST LIE A CLOSET NEVER (PIERC'D->PIERCED) WITH CRYSTAL EYES BUT THE DEFENDANT DOTH THAT (PLEA->PLEAD) DENY AND SAYS IN HIM THY FAIR APPEARANCE LIES +121-123859-0000-2573: YOU ARE MY ALL THE WORLD AND I MUST STRIVE TO KNOW MY SHAMES AND PRAISES FROM YOUR TONGUE NONE ELSE TO ME NOR I TO NONE ALIVE THAT MY (STEEL'D SENSE->STEELED SCENTS) OR CHANGES RIGHT OR WRONG +121-123859-0001-2574: (O->OH) TIS THE FIRST TIS FLATTERY IN MY SEEING AND MY GREAT MIND MOST KINGLY DRINKS IT UP MINE EYE WELL KNOWS WHAT WITH HIS GUST IS (GREEING->GREEN) AND TO HIS (PALATE->PALLET) DOTH PREPARE THE CUP IF IT BE (POISON'D->POISONED) TIS THE LESSER SIN THAT MINE EYE LOVES IT AND DOTH FIRST BEGIN +121-123859-0002-2575: BUT RECKONING TIME WHOSE (MILLION'D->MILLIONED) ACCIDENTS CREEP IN TWIXT VOWS AND CHANGE DECREES OF KINGS TAN SACRED BEAUTY BLUNT THE (SHARP'ST INTENTS->SHARPEST INTENSE) DIVERT STRONG MINDS TO THE COURSE OF ALTERING THINGS ALAS WHY FEARING OF TIME'S TYRANNY MIGHT I NOT THEN SAY NOW I LOVE YOU BEST WHEN I WAS CERTAIN (O'ER INCERTAINTY->OR IN CERTAINTY) CROWNING THE PRESENT DOUBTING OF THE REST +121-123859-0003-2576: LOVE IS A BABE THEN MIGHT I NOT SAY SO TO GIVE FULL GROWTH TO THAT WHICH STILL DOTH GROW +121-123859-0004-2577: SO I (RETURN REBUK'D->RETURNED REBUKED) TO MY CONTENT AND GAIN BY ILL THRICE MORE THAN I HAVE SPENT +121-127105-0000-2578: IT WAS THIS OBSERVATION THAT DREW FROM DOUGLAS NOT IMMEDIATELY BUT LATER IN THE EVENING A REPLY THAT HAD THE INTERESTING CONSEQUENCE TO WHICH I CALL ATTENTION +121-127105-0001-2579: (SOMEONE->SOME ONE) ELSE TOLD A STORY NOT PARTICULARLY EFFECTIVE WHICH I SAW HE WAS NOT FOLLOWING +121-127105-0002-2580: CRIED ONE OF THE WOMEN HE TOOK NO NOTICE OF HER HE LOOKED AT ME BUT AS IF INSTEAD OF ME HE SAW WHAT HE SPOKE OF +121-127105-0003-2581: THERE WAS A UNANIMOUS GROAN AT THIS AND MUCH REPROACH AFTER WHICH IN HIS PREOCCUPIED WAY HE EXPLAINED +121-127105-0004-2582: THE (STORY'S->STORIES) WRITTEN +121-127105-0005-2583: I COULD WRITE TO MY MAN AND ENCLOSE THE KEY HE COULD SEND DOWN THE PACKET AS HE FINDS IT +121-127105-0006-2584: THE OTHERS RESENTED POSTPONEMENT BUT IT WAS JUST HIS SCRUPLES THAT CHARMED ME +121-127105-0007-2585: TO THIS HIS ANSWER WAS PROMPT OH THANK GOD NO AND IS THE RECORD YOURS +121-127105-0008-2586: HE HUNG FIRE AGAIN A (WOMAN'S->WOMAN') +121-127105-0009-2587: SHE HAS BEEN DEAD THESE TWENTY YEARS +121-127105-0010-2588: SHE SENT ME THE PAGES IN QUESTION BEFORE SHE DIED +121-127105-0011-2589: SHE WAS THE MOST AGREEABLE WOMAN I'VE EVER KNOWN IN HER POSITION SHE WOULD HAVE BEEN WORTHY OF ANY WHATEVER +121-127105-0012-2590: IT WASN'T SIMPLY THAT SHE SAID SO BUT THAT I KNEW SHE HADN'T I WAS SURE I COULD SEE +121-127105-0013-2591: YOU'LL EASILY JUDGE WHY WHEN YOU HEAR BECAUSE THE THING HAD BEEN SUCH A SCARE HE CONTINUED TO FIX ME +121-127105-0014-2592: YOU ARE ACUTE +121-127105-0015-2593: HE QUITTED THE FIRE AND DROPPED BACK INTO HIS CHAIR +121-127105-0016-2594: PROBABLY NOT TILL THE SECOND POST +121-127105-0017-2595: IT WAS ALMOST THE TONE OF HOPE EVERYBODY WILL STAY +121-127105-0018-2596: CRIED THE LADIES WHOSE DEPARTURE HAD BEEN FIXED +121-127105-0019-2597: MISSUS GRIFFIN HOWEVER EXPRESSED THE NEED FOR A LITTLE MORE LIGHT +121-127105-0020-2598: WHO WAS IT SHE WAS IN LOVE WITH THE STORY WILL TELL I TOOK UPON MYSELF TO REPLY OH I CAN'T WAIT FOR THE STORY THE STORY WON'T TELL SAID DOUGLAS NOT IN ANY LITERAL VULGAR WAY MORE'S THE PITY THEN +121-127105-0021-2599: WON'T YOU TELL DOUGLAS +121-127105-0022-2600: (WELL->FOR) IF I DON'T KNOW WHO SHE WAS IN LOVE WITH I KNOW WHO HE WAS +121-127105-0023-2601: LET ME SAY HERE DISTINCTLY TO HAVE DONE WITH IT THAT THIS NARRATIVE FROM AN EXACT TRANSCRIPT OF MY OWN MADE MUCH LATER IS WHAT I SHALL PRESENTLY GIVE +121-127105-0024-2602: POOR DOUGLAS BEFORE HIS DEATH WHEN IT WAS IN SIGHT COMMITTED TO ME THE MANUSCRIPT THAT REACHED HIM ON THE THIRD OF THESE DAYS AND THAT ON THE SAME SPOT WITH IMMENSE EFFECT HE BEGAN TO READ TO OUR HUSHED LITTLE CIRCLE ON THE NIGHT OF THE FOURTH +121-127105-0025-2603: THE DEPARTING LADIES WHO HAD SAID THEY WOULD STAY DIDN'T OF COURSE THANK HEAVEN STAY THEY DEPARTED IN CONSEQUENCE OF ARRANGEMENTS MADE IN A RAGE OF CURIOSITY AS THEY PROFESSED PRODUCED BY THE TOUCHES WITH WHICH HE HAD ALREADY WORKED US UP +121-127105-0026-2604: THE FIRST OF THESE TOUCHES CONVEYED THAT THE WRITTEN STATEMENT TOOK UP THE TALE AT A POINT AFTER IT HAD IN A MANNER BEGUN +121-127105-0027-2605: HE HAD FOR HIS OWN TOWN RESIDENCE A BIG HOUSE FILLED WITH THE SPOILS OF TRAVEL AND THE TROPHIES OF THE CHASE BUT IT WAS TO HIS COUNTRY HOME AN OLD FAMILY PLACE IN ESSEX THAT HE WISHED HER IMMEDIATELY TO PROCEED +121-127105-0028-2606: THE AWKWARD THING WAS THAT THEY HAD PRACTICALLY NO OTHER RELATIONS AND THAT HIS OWN AFFAIRS TOOK UP ALL HIS TIME +121-127105-0029-2607: THERE WERE PLENTY OF PEOPLE TO HELP BUT OF COURSE THE YOUNG LADY WHO SHOULD GO DOWN AS GOVERNESS WOULD BE IN SUPREME AUTHORITY +121-127105-0030-2608: I DON'T ANTICIPATE +121-127105-0031-2609: SHE WAS YOUNG UNTRIED NERVOUS IT WAS A VISION OF SERIOUS DUTIES AND LITTLE COMPANY OF REALLY GREAT LONELINESS +121-127105-0032-2610: YES BUT THAT'S JUST THE BEAUTY OF HER PASSION +121-127105-0033-2611: IT WAS THE BEAUTY OF IT +121-127105-0034-2612: IT SOUNDED DULL IT SOUNDED STRANGE AND ALL THE MORE SO BECAUSE OF HIS MAIN CONDITION WHICH WAS +121-127105-0035-2613: SHE PROMISED TO DO THIS AND SHE MENTIONED TO ME THAT WHEN FOR A MOMENT DISBURDENED DELIGHTED HE HELD HER HAND THANKING HER FOR THE SACRIFICE SHE ALREADY FELT REWARDED +121-127105-0036-2614: BUT WAS THAT ALL HER REWARD ONE OF THE LADIES ASKED +1221-135766-0000-1305: HOW STRANGE IT SEEMED TO THE SAD WOMAN AS SHE WATCHED THE GROWTH AND THE BEAUTY THAT BECAME EVERY DAY MORE BRILLIANT AND THE INTELLIGENCE THAT THREW ITS QUIVERING SUNSHINE OVER THE TINY FEATURES OF THIS CHILD +1221-135766-0001-1306: GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME (DISHONOURED->DISHONORED) BOSOM TO CONNECT HER PARENT (FOR EVER->FOREVER) WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN +1221-135766-0002-1307: YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION +1221-135766-0003-1308: THE CHILD HAD A NATIVE GRACE WHICH DOES NOT INVARIABLY (CO EXIST->COEXIST) WITH FAULTLESS BEAUTY ITS ATTIRE HOWEVER SIMPLE ALWAYS IMPRESSED THE BEHOLDER AS IF IT WERE THE VERY GARB THAT PRECISELY BECAME IT BEST +1221-135766-0004-1309: THIS OUTWARD MUTABILITY INDICATED AND DID NOT MORE THAN FAIRLY EXPRESS THE VARIOUS PROPERTIES OF HER INNER LIFE +1221-135766-0005-1310: HESTER COULD ONLY ACCOUNT FOR THE CHILD'S CHARACTER AND EVEN THEN MOST VAGUELY AND IMPERFECTLY BY RECALLING WHAT SHE HERSELF HAD BEEN DURING THAT MOMENTOUS PERIOD WHILE PEARL WAS IMBIBING HER SOUL FROM THE SPIRITUAL WORLD AND HER BODILY FRAME FROM ITS MATERIAL OF EARTH +1221-135766-0006-1311: THEY WERE NOW ILLUMINATED BY THE MORNING RADIANCE OF A YOUNG CHILD'S DISPOSITION BUT LATER IN THE DAY OF EARTHLY EXISTENCE MIGHT BE PROLIFIC OF THE STORM AND WHIRLWIND +1221-135766-0007-1312: HESTER PRYNNE NEVERTHELESS THE LOVING MOTHER OF THIS ONE CHILD RAN LITTLE RISK OF ERRING ON THE SIDE OF UNDUE SEVERITY +1221-135766-0008-1313: MINDFUL HOWEVER OF HER OWN ERRORS AND MISFORTUNES SHE EARLY SOUGHT TO IMPOSE A TENDER BUT STRICT CONTROL OVER THE INFANT IMMORTALITY THAT WAS COMMITTED TO HER CHARGE +1221-135766-0009-1314: AS TO ANY OTHER KIND OF DISCIPLINE WHETHER ADDRESSED TO HER MIND OR HEART LITTLE PEARL MIGHT OR MIGHT NOT BE WITHIN ITS REACH IN ACCORDANCE WITH THE CAPRICE THAT (RULED->ROLLED) THE MOMENT +1221-135766-0010-1315: IT WAS A LOOK SO INTELLIGENT YET INEXPLICABLE PERVERSE SOMETIMES SO MALICIOUS BUT GENERALLY ACCOMPANIED BY A WILD FLOW OF SPIRITS THAT HESTER COULD NOT HELP QUESTIONING AT SUCH MOMENTS WHETHER PEARL WAS A HUMAN CHILD +1221-135766-0011-1316: BEHOLDING IT HESTER WAS CONSTRAINED TO RUSH TOWARDS THE CHILD TO PURSUE THE LITTLE ELF IN THE FLIGHT WHICH SHE INVARIABLY BEGAN TO SNATCH HER TO HER BOSOM WITH A CLOSE PRESSURE AND EARNEST KISSES NOT SO MUCH FROM OVERFLOWING LOVE AS TO ASSURE HERSELF THAT PEARL WAS FLESH AND BLOOD AND NOT UTTERLY DELUSIVE +1221-135766-0012-1317: BROODING OVER ALL THESE MATTERS THE MOTHER FELT LIKE ONE WHO HAS EVOKED A SPIRIT BUT BY SOME IRREGULARITY IN THE PROCESS OF CONJURATION HAS FAILED TO WIN THE MASTER WORD THAT SHOULD CONTROL THIS NEW AND INCOMPREHENSIBLE INTELLIGENCE +1221-135766-0013-1318: PEARL WAS A BORN OUTCAST OF THE (INFANTILE->INVENTILE) WORLD +1221-135766-0014-1319: PEARL SAW AND GAZED INTENTLY BUT NEVER SOUGHT TO MAKE ACQUAINTANCE +1221-135766-0015-1320: IF SPOKEN TO SHE WOULD NOT SPEAK AGAIN +1221-135767-0000-1280: HESTER PRYNNE WENT ONE DAY TO THE MANSION OF GOVERNOR BELLINGHAM WITH A PAIR OF GLOVES WHICH SHE HAD FRINGED AND EMBROIDERED TO HIS ORDER AND WHICH WERE TO BE WORN ON SOME GREAT OCCASION OF STATE FOR THOUGH THE CHANCES OF A POPULAR ELECTION HAD CAUSED THIS FORMER RULER TO DESCEND A STEP OR TWO FROM THE HIGHEST RANK HE STILL HELD AN (HONOURABLE->HONORABLE) AND INFLUENTIAL PLACE AMONG THE COLONIAL MAGISTRACY +1221-135767-0001-1281: ANOTHER AND FAR MORE IMPORTANT REASON THAN THE DELIVERY OF A PAIR OF EMBROIDERED GLOVES IMPELLED HESTER AT THIS TIME TO SEEK AN INTERVIEW WITH A PERSONAGE OF SO MUCH POWER AND ACTIVITY IN THE AFFAIRS OF THE SETTLEMENT +1221-135767-0002-1282: AT THAT EPOCH OF PRISTINE SIMPLICITY HOWEVER MATTERS OF EVEN SLIGHTER PUBLIC INTEREST AND OF FAR LESS INTRINSIC WEIGHT THAN THE WELFARE OF HESTER AND HER CHILD WERE STRANGELY MIXED UP WITH THE DELIBERATIONS OF LEGISLATORS AND ACTS OF STATE +1221-135767-0003-1283: THE PERIOD WAS HARDLY IF AT ALL EARLIER THAN THAT OF OUR STORY WHEN A DISPUTE CONCERNING THE RIGHT OF PROPERTY IN A PIG NOT ONLY CAUSED A FIERCE AND BITTER CONTEST IN THE LEGISLATIVE BODY OF THE COLONY BUT RESULTED IN AN IMPORTANT MODIFICATION OF THE FRAMEWORK ITSELF OF THE LEGISLATURE +1221-135767-0004-1284: WE HAVE SPOKEN OF PEARL'S RICH AND LUXURIANT BEAUTY A BEAUTY THAT SHONE WITH DEEP AND VIVID TINTS A BRIGHT COMPLEXION EYES POSSESSING INTENSITY BOTH OF DEPTH AND GLOW AND HAIR ALREADY OF A DEEP GLOSSY BROWN AND WHICH IN AFTER YEARS WOULD BE NEARLY AKIN TO BLACK +1221-135767-0005-1285: IT WAS THE SCARLET LETTER IN ANOTHER FORM THE SCARLET LETTER ENDOWED WITH LIFE +1221-135767-0006-1286: THE MOTHER HERSELF AS IF THE RED IGNOMINY WERE SO DEEPLY SCORCHED INTO HER BRAIN THAT ALL HER CONCEPTIONS ASSUMED ITS FORM HAD CAREFULLY WROUGHT OUT THE SIMILITUDE LAVISHING MANY HOURS OF MORBID INGENUITY TO CREATE AN ANALOGY BETWEEN THE OBJECT OF HER AFFECTION AND THE EMBLEM OF HER GUILT AND TORTURE +1221-135767-0007-1287: BUT IN TRUTH PEARL WAS THE ONE AS WELL AS THE OTHER AND ONLY IN CONSEQUENCE OF THAT IDENTITY HAD HESTER CONTRIVED SO PERFECTLY TO REPRESENT THE SCARLET LETTER IN HER APPEARANCE +1221-135767-0008-1288: COME THEREFORE AND LET US FLING MUD AT THEM +1221-135767-0009-1289: BUT PEARL WHO WAS A DAUNTLESS CHILD AFTER FROWNING STAMPING HER FOOT AND SHAKING HER LITTLE HAND WITH A VARIETY OF THREATENING GESTURES SUDDENLY MADE A RUSH AT THE KNOT OF HER ENEMIES AND PUT THEM ALL TO FLIGHT +1221-135767-0010-1290: SHE SCREAMED AND SHOUTED TOO WITH A TERRIFIC VOLUME OF SOUND WHICH DOUBTLESS CAUSED THE HEARTS OF THE FUGITIVES TO QUAKE WITHIN THEM +1221-135767-0011-1291: IT WAS FURTHER DECORATED WITH STRANGE AND SEEMINGLY CABALISTIC FIGURES AND DIAGRAMS SUITABLE TO THE QUAINT TASTE OF THE AGE WHICH HAD BEEN DRAWN IN THE STUCCO WHEN NEWLY LAID ON AND HAD NOW GROWN HARD AND DURABLE FOR THE ADMIRATION OF AFTER TIMES +1221-135767-0012-1292: THEY APPROACHED THE DOOR WHICH WAS OF AN ARCHED FORM AND FLANKED ON EACH SIDE BY A NARROW TOWER OR PROJECTION OF THE EDIFICE IN BOTH OF WHICH WERE LATTICE WINDOWS THE WOODEN SHUTTERS TO CLOSE OVER THEM AT NEED +1221-135767-0013-1293: LIFTING THE IRON HAMMER THAT HUNG AT THE PORTAL HESTER PRYNNE GAVE A SUMMONS WHICH WAS ANSWERED BY ONE OF THE GOVERNOR'S BOND (SERVANT->SERVANTS) A FREE BORN ENGLISHMAN BUT NOW A SEVEN YEARS SLAVE +1221-135767-0014-1294: YEA HIS (HONOURABLE->HONORABLE) WORSHIP IS WITHIN BUT HE HATH A GODLY MINISTER OR TWO WITH HIM AND LIKEWISE A LEECH +1221-135767-0015-1295: YE MAY NOT SEE HIS WORSHIP NOW +1221-135767-0016-1296: WITH MANY VARIATIONS SUGGESTED BY THE NATURE OF HIS BUILDING MATERIALS DIVERSITY OF CLIMATE AND A DIFFERENT MODE OF SOCIAL LIFE GOVERNOR BELLINGHAM HAD PLANNED HIS NEW HABITATION AFTER THE RESIDENCES OF GENTLEMEN OF (FAIR ESTATE->FAIREST STATE) IN HIS NATIVE LAND +1221-135767-0017-1297: ON THE TABLE IN TOKEN THAT THE SENTIMENT OF OLD ENGLISH HOSPITALITY HAD NOT BEEN LEFT BEHIND STOOD A LARGE PEWTER TANKARD AT THE BOTTOM OF WHICH HAD HESTER OR PEARL PEEPED INTO IT THEY MIGHT HAVE SEEN THE FROTHY REMNANT OF A RECENT DRAUGHT OF ALE +1221-135767-0018-1298: LITTLE PEARL WHO WAS AS GREATLY PLEASED WITH THE GLEAMING (ARMOUR->ARMOR) AS SHE HAD BEEN WITH THE GLITTERING FRONTISPIECE OF THE HOUSE SPENT SOME TIME LOOKING INTO THE POLISHED MIRROR OF THE BREASTPLATE +1221-135767-0019-1299: MOTHER CRIED SHE I SEE YOU HERE LOOK (LOOK->*) +1221-135767-0020-1300: IN TRUTH SHE SEEMED ABSOLUTELY HIDDEN BEHIND IT +1221-135767-0021-1301: PEARL ACCORDINGLY RAN TO THE BOW WINDOW AT THE FURTHER END OF THE HALL AND LOOKED ALONG THE VISTA OF A GARDEN WALK CARPETED WITH CLOSELY SHAVEN GRASS AND BORDERED WITH SOME RUDE AND IMMATURE ATTEMPT AT SHRUBBERY +1221-135767-0022-1302: BUT THE PROPRIETOR APPEARED (ALREADY->ALL READY) TO HAVE RELINQUISHED AS HOPELESS THE EFFORT TO PERPETUATE ON THIS SIDE OF THE ATLANTIC IN A HARD SOIL AND AMID THE CLOSE STRUGGLE FOR SUBSISTENCE THE NATIVE ENGLISH TASTE FOR ORNAMENTAL GARDENING +1221-135767-0023-1303: THERE WERE A FEW ROSE BUSHES HOWEVER AND A NUMBER OF APPLE TREES PROBABLY THE DESCENDANTS OF THOSE PLANTED BY THE REVEREND MISTER BLACKSTONE THE FIRST SETTLER OF THE PENINSULA THAT HALF MYTHOLOGICAL PERSONAGE WHO RIDES THROUGH OUR EARLY ANNALS SEATED ON THE BACK OF A BULL +1221-135767-0024-1304: PEARL SEEING THE ROSE BUSHES BEGAN TO CRY FOR A RED ROSE AND WOULD NOT BE PACIFIED +1284-1180-0000-829: HE WORE BLUE SILK STOCKINGS BLUE (KNEE PANTS->KNEEP HANDS) WITH GOLD BUCKLES A BLUE RUFFLED WAIST AND A JACKET OF BRIGHT BLUE BRAIDED WITH GOLD +1284-1180-0001-830: HIS HAT HAD A PEAKED CROWN AND A FLAT BRIM AND AROUND THE BRIM WAS A ROW OF TINY GOLDEN BELLS THAT TINKLED WHEN HE MOVED +1284-1180-0002-831: INSTEAD OF SHOES THE OLD MAN WORE BOOTS WITH (TURNOVER->TURN OVER) TOPS AND HIS BLUE COAT HAD WIDE CUFFS OF GOLD BRAID +1284-1180-0003-832: FOR A LONG TIME HE HAD WISHED TO EXPLORE THE BEAUTIFUL LAND OF OZ IN WHICH THEY LIVED +1284-1180-0004-833: WHEN THEY WERE OUTSIDE UNC SIMPLY LATCHED THE DOOR AND STARTED UP THE PATH +1284-1180-0005-834: NO ONE WOULD DISTURB THEIR LITTLE HOUSE EVEN IF (ANYONE->ANY ONE) CAME SO FAR INTO THE THICK FOREST WHILE THEY WERE GONE +1284-1180-0006-835: AT THE FOOT OF THE MOUNTAIN THAT SEPARATED THE COUNTRY OF THE MUNCHKINS FROM THE COUNTRY OF THE GILLIKINS THE PATH DIVIDED +1284-1180-0007-836: HE KNEW IT WOULD TAKE THEM TO THE HOUSE OF THE CROOKED MAGICIAN WHOM HE HAD NEVER SEEN BUT WHO WAS (THEIR->THERE) NEAREST (NEIGHBOR->NEIGHBOUR) +1284-1180-0008-837: ALL THE MORNING THEY TRUDGED UP THE MOUNTAIN PATH AND AT (NOON UNC->NOONK) AND OJO SAT ON A FALLEN TREE TRUNK AND ATE THE LAST OF THE BREAD WHICH THE OLD MUNCHKIN HAD PLACED IN HIS POCKET +1284-1180-0009-838: THEN THEY STARTED ON AGAIN AND TWO HOURS LATER CAME IN SIGHT OF THE HOUSE OF DOCTOR PIPT +1284-1180-0010-839: UNC KNOCKED AT THE DOOR OF THE HOUSE (AND A->INTO) CHUBBY PLEASANT FACED WOMAN DRESSED ALL IN BLUE OPENED IT AND GREETED THE VISITORS WITH A SMILE +1284-1180-0011-840: I AM MY DEAR AND ALL STRANGERS ARE WELCOME TO MY HOME +1284-1180-0012-841: WE HAVE COME FROM A FAR LONELIER PLACE THAN THIS A LONELIER PLACE +1284-1180-0013-842: AND YOU MUST BE OJO THE UNLUCKY SHE ADDED +1284-1180-0014-843: OJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HIS LIFE +1284-1180-0015-844: WE ARE (TRAVELING->TRAVELLING) REPLIED OJO AND WE STOPPED AT YOUR HOUSE JUST TO REST AND REFRESH OURSELVES +1284-1180-0016-845: THE WOMAN SEEMED THOUGHTFUL +1284-1180-0017-846: AT ONE END STOOD A GREAT FIREPLACE IN WHICH A BLUE LOG WAS BLAZING WITH A BLUE FLAME AND OVER THE FIRE HUNG FOUR KETTLES IN A ROW ALL BUBBLING AND STEAMING AT A GREAT RATE +1284-1180-0018-847: IT TAKES ME SEVERAL YEARS TO MAKE THIS MAGIC POWDER BUT AT THIS MOMENT I AM PLEASED TO SAY IT IS NEARLY DONE YOU SEE I AM MAKING IT FOR MY GOOD WIFE MARGOLOTTE WHO WANTS TO USE SOME OF IT FOR A PURPOSE OF HER OWN +1284-1180-0019-848: YOU MUST KNOW SAID MARGOLOTTE WHEN THEY WERE ALL SEATED TOGETHER ON THE BROAD WINDOW SEAT THAT MY HUSBAND FOOLISHLY GAVE AWAY ALL THE POWDER OF LIFE HE FIRST MADE TO OLD (MOMBI->MUMBIE) THE WITCH WHO USED TO LIVE IN THE COUNTRY OF THE GILLIKINS TO THE NORTH OF HERE +1284-1180-0020-849: THE FIRST LOT WE TESTED ON OUR GLASS (CAT->HAT) WHICH NOT ONLY BEGAN TO LIVE BUT HAS LIVED EVER SINCE +1284-1180-0021-850: I THINK THE NEXT GLASS CAT THE MAGICIAN MAKES WILL HAVE NEITHER BRAINS NOR HEART FOR THEN IT WILL NOT OBJECT TO CATCHING MICE AND (MAY->THEY) PROVE OF SOME USE TO US +1284-1180-0022-851: I'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ +1284-1180-0023-852: YOU SEE (I'VE->I HAVE) LIVED ALL MY LIFE WITH UNC NUNKIE THE SILENT ONE AND THERE WAS NO ONE TO TELL ME ANYTHING +1284-1180-0024-853: THAT IS ONE REASON YOU ARE OJO THE UNLUCKY SAID THE WOMAN IN (A->*) SYMPATHETIC TONE +1284-1180-0025-854: I THINK I MUST SHOW YOU MY PATCHWORK GIRL SAID MARGOLOTTE LAUGHING AT THE BOY'S ASTONISHMENT FOR SHE IS RATHER DIFFICULT TO EXPLAIN +1284-1180-0026-855: BUT FIRST I WILL TELL YOU THAT FOR MANY YEARS I HAVE LONGED FOR A SERVANT TO HELP ME WITH THE HOUSEWORK AND TO (COOK->COPE) THE MEALS AND WASH THE DISHES +1284-1180-0027-856: YET THAT TASK WAS NOT SO EASY AS YOU MAY SUPPOSE +1284-1180-0028-857: A BED QUILT MADE OF PATCHES OF DIFFERENT KINDS AND (COLORS->COLLARS) OF CLOTH ALL NEATLY SEWED TOGETHER +1284-1180-0029-858: SOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE THE PATCHES AND COLORS ARE SO MIXED UP +1284-1180-0030-859: WHEN I FOUND IT I SAID TO MYSELF THAT IT WOULD DO NICELY FOR MY SERVANT GIRL FOR WHEN SHE WAS BROUGHT TO LIFE SHE WOULD NOT BE PROUD NOR HAUGHTY AS THE GLASS CAT IS FOR SUCH A DREADFUL MIXTURE OF (COLORS->COLOURS) WOULD DISCOURAGE HER FROM TRYING TO BE AS DIGNIFIED AS THE BLUE MUNCHKINS ARE +1284-1180-0031-860: AT THE EMERALD CITY WHERE OUR PRINCESS OZMA LIVES GREEN IS THE POPULAR COLOR +1284-1180-0032-861: I WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE WENT TO A TALL CUPBOARD AND THREW OPEN THE DOORS +1284-1181-0000-807: OJO EXAMINED THIS CURIOUS CONTRIVANCE WITH WONDER +1284-1181-0001-808: MARGOLOTTE HAD FIRST MADE THE GIRL'S FORM FROM THE PATCHWORK QUILT AND THEN SHE HAD DRESSED IT WITH A PATCHWORK SKIRT AND AN APRON WITH POCKETS IN IT USING THE SAME GAY MATERIAL THROUGHOUT +1284-1181-0002-809: THE HEAD OF THE PATCHWORK GIRL WAS THE MOST CURIOUS PART OF HER +1284-1181-0003-810: THE HAIR WAS OF BROWN YARN AND HUNG DOWN ON HER NECK IN SEVERAL NEAT BRAIDS +1284-1181-0004-811: GOLD IS THE MOST COMMON METAL IN THE LAND OF OZ AND IS USED FOR MANY PURPOSES BECAUSE IT IS SOFT AND PLIABLE +1284-1181-0005-812: NO I FORGOT ALL ABOUT THE BRAINS EXCLAIMED THE WOMAN +1284-1181-0006-813: WELL THAT MAY BE TRUE AGREED MARGOLOTTE BUT ON THE CONTRARY A SERVANT WITH TOO MUCH BRAINS IS SURE TO BECOME INDEPENDENT AND HIGH AND MIGHTY AND FEEL ABOVE HER WORK +1284-1181-0007-814: SHE POURED INTO THE DISH A QUANTITY FROM EACH OF THESE BOTTLES +1284-1181-0008-815: I THINK THAT WILL DO SHE CONTINUED FOR THE OTHER QUALITIES ARE NOT NEEDED IN A SERVANT +1284-1181-0009-816: SHE RAN TO HER HUSBAND'S SIDE AT ONCE AND HELPED HIM LIFT THE FOUR KETTLES FROM THE FIRE +1284-1181-0010-817: THEIR CONTENTS HAD ALL BOILED AWAY LEAVING IN THE BOTTOM OF EACH KETTLE A FEW GRAINS OF FINE WHITE POWDER +1284-1181-0011-818: VERY CAREFULLY THE MAGICIAN REMOVED THIS POWDER PLACING IT (ALL TOGETHER->ALTOGETHER) IN A GOLDEN DISH WHERE HE MIXED IT WITH A GOLDEN SPOON +1284-1181-0012-819: NO ONE SAW HIM DO THIS FOR ALL WERE LOOKING AT THE POWDER OF LIFE BUT SOON THE WOMAN REMEMBERED WHAT SHE HAD BEEN DOING AND CAME BACK TO THE CUPBOARD +1284-1181-0013-820: OJO BECAME A BIT UNEASY AT THIS FOR HE HAD ALREADY PUT QUITE A LOT OF THE CLEVERNESS POWDER IN THE DISH BUT HE DARED NOT INTERFERE AND SO HE COMFORTED HIMSELF WITH THE THOUGHT THAT ONE CANNOT HAVE TOO MUCH CLEVERNESS +1284-1181-0014-821: HE SELECTED A SMALL GOLD BOTTLE WITH A PEPPER BOX TOP SO THAT THE POWDER MIGHT BE SPRINKLED ON ANY OBJECT THROUGH THE SMALL HOLES +1284-1181-0015-822: MOST PEOPLE TALK TOO MUCH SO IT IS A RELIEF TO FIND ONE WHO TALKS TOO LITTLE +1284-1181-0016-823: I AM NOT ALLOWED TO PERFORM MAGIC EXCEPT FOR MY OWN AMUSEMENT HE TOLD HIS VISITORS AS HE LIGHTED A PIPE WITH A CROOKED STEM AND BEGAN TO SMOKE +1284-1181-0017-824: THE WIZARD OF OZ WHO USED TO BE A HUMBUG AND KNEW NO MAGIC AT ALL HAS BEEN TAKING LESSONS OF GLINDA AND I'M TOLD HE IS GETTING TO BE A PRETTY GOOD WIZARD BUT HE IS MERELY THE ASSISTANT OF THE GREAT SORCERESS +1284-1181-0018-825: IT TRULY IS ASSERTED THE MAGICIAN +1284-1181-0019-826: I NOW USE THEM AS ORNAMENTAL STATUARY IN MY GARDEN +1284-1181-0020-827: DEAR ME WHAT A (CHATTERBOX->CHATTER BOX) YOU'RE GETTING TO BE (UNC->YOUNG) REMARKED THE MAGICIAN WHO WAS PLEASED WITH THE COMPLIMENT +1284-1181-0021-828: ASKED THE VOICE IN SCORNFUL (ACCENTS->ACCENT) +1284-134647-0000-862: THE GRATEFUL APPLAUSE OF THE CLERGY HAS CONSECRATED THE MEMORY OF A PRINCE WHO INDULGED THEIR PASSIONS AND PROMOTED THEIR INTEREST +1284-134647-0001-863: THE EDICT OF MILAN THE GREAT CHARTER OF TOLERATION HAD CONFIRMED TO EACH INDIVIDUAL OF THE ROMAN WORLD THE PRIVILEGE OF CHOOSING AND PROFESSING HIS OWN RELIGION +1284-134647-0002-864: BUT THIS INESTIMABLE PRIVILEGE WAS SOON VIOLATED WITH THE KNOWLEDGE OF TRUTH THE EMPEROR IMBIBED THE MAXIMS OF PERSECUTION AND THE (SECTS->SEX) WHICH DISSENTED FROM THE CATHOLIC CHURCH WERE AFFLICTED AND OPPRESSED BY THE TRIUMPH OF CHRISTIANITY +1284-134647-0003-865: (CONSTANTINE->KONSTANTINE) EASILY BELIEVED THAT THE HERETICS WHO PRESUMED TO DISPUTE HIS OPINIONS OR TO OPPOSE HIS COMMANDS WERE GUILTY OF THE MOST ABSURD AND CRIMINAL OBSTINACY AND THAT A SEASONABLE APPLICATION OF MODERATE SEVERITIES MIGHT SAVE THOSE UNHAPPY MEN FROM THE DANGER OF AN EVERLASTING CONDEMNATION +1284-134647-0004-866: SOME OF THE PENAL REGULATIONS WERE COPIED FROM THE EDICTS OF DIOCLETIAN AND THIS METHOD OF CONVERSION WAS APPLAUDED BY THE SAME BISHOPS WHO HAD (FELT->FELLED) THE HAND OF OPPRESSION AND PLEADED FOR THE RIGHTS OF HUMANITY +1284-134647-0005-867: THEY ASSERTED WITH CONFIDENCE AND ALMOST WITH EXULTATION THAT THE APOSTOLICAL SUCCESSION WAS INTERRUPTED THAT ALL THE BISHOPS OF EUROPE AND ASIA WERE INFECTED BY THE CONTAGION OF GUILT AND SCHISM AND THAT THE PREROGATIVES OF THE CATHOLIC CHURCH WERE CONFINED TO THE CHOSEN PORTION OF THE AFRICAN BELIEVERS WHO ALONE HAD PRESERVED INVIOLATE THE INTEGRITY OF THEIR FAITH AND DISCIPLINE +1284-134647-0006-868: BISHOPS VIRGINS AND EVEN SPOTLESS INFANTS WERE SUBJECTED TO THE DISGRACE OF A PUBLIC PENANCE BEFORE THEY COULD BE ADMITTED TO THE COMMUNION OF THE DONATISTS +1284-134647-0007-869: PROSCRIBED BY THE CIVIL AND ECCLESIASTICAL POWERS OF THE EMPIRE THE (DONATISTS->DONATIST) STILL MAINTAINED IN SOME PROVINCES PARTICULARLY IN NUMIDIA THEIR SUPERIOR NUMBERS AND FOUR HUNDRED BISHOPS ACKNOWLEDGED THE JURISDICTION OF THEIR PRIMATE +1320-122612-0000-120: SINCE THE PERIOD OF OUR TALE THE ACTIVE SPIRIT OF THE COUNTRY HAS SURROUNDED IT WITH A BELT OF RICH (AND THRIVING->ENTHRIBING) SETTLEMENTS THOUGH NONE BUT THE HUNTER OR THE SAVAGE IS EVER KNOWN EVEN NOW TO PENETRATE ITS WILD RECESSES +1320-122612-0001-121: THE DEWS WERE SUFFERED TO EXHALE AND THE SUN HAD DISPERSED THE MISTS AND WAS SHEDDING A STRONG AND CLEAR LIGHT IN THE FOREST WHEN THE (TRAVELERS->TRAVELLERS) RESUMED THEIR JOURNEY +1320-122612-0002-122: AFTER PROCEEDING A FEW MILES THE PROGRESS OF HAWKEYE WHO LED THE ADVANCE BECAME MORE DELIBERATE AND WATCHFUL +1320-122612-0003-123: HE OFTEN STOPPED TO EXAMINE THE TREES NOR DID HE CROSS A RIVULET WITHOUT ATTENTIVELY CONSIDERING THE QUANTITY THE VELOCITY AND THE COLOR OF ITS WATERS +1320-122612-0004-124: DISTRUSTING HIS OWN JUDGMENT HIS APPEALS TO THE OPINION OF CHINGACHGOOK WERE FREQUENT AND EARNEST +1320-122612-0005-125: YET HERE ARE WE WITHIN A SHORT RANGE OF THE (SCAROONS->SCARONS) AND NOT A SIGN OF A TRAIL HAVE WE CROSSED +1320-122612-0006-126: LET US RETRACE OUR STEPS AND EXAMINE AS WE GO WITH KEENER EYES +1320-122612-0007-127: (CHINGACHGOOK->CHINGACHOOK) HAD CAUGHT THE LOOK AND MOTIONING WITH HIS HAND HE BADE HIM SPEAK +1320-122612-0008-128: THE EYES OF THE WHOLE PARTY FOLLOWED THE UNEXPECTED MOVEMENT AND READ THEIR SUCCESS IN THE AIR OF TRIUMPH THAT THE YOUTH ASSUMED +1320-122612-0009-129: IT WOULD HAVE BEEN MORE WONDERFUL HAD HE SPOKEN WITHOUT A BIDDING +1320-122612-0010-130: SEE SAID UNCAS POINTING NORTH AND SOUTH AT THE EVIDENT MARKS OF THE BROAD TRAIL ON EITHER SIDE OF HIM THE DARK HAIR HAS GONE TOWARD THE FOREST +1320-122612-0011-131: IF A ROCK OR A RIVULET OR A BIT OF EARTH HARDER THAN COMMON SEVERED THE LINKS OF THE (CLEW->CLUE) THEY FOLLOWED THE TRUE EYE OF THE SCOUT RECOVERED THEM AT A DISTANCE AND SELDOM RENDERED THE DELAY OF A SINGLE MOMENT NECESSARY +1320-122612-0012-132: EXTINGUISHED BRANDS WERE LYING AROUND A SPRING THE OFFALS OF A DEER WERE SCATTERED ABOUT THE PLACE AND THE TREES BORE EVIDENT MARKS OF HAVING BEEN BROWSED BY THE HORSES +1320-122612-0013-133: A CIRCLE OF A FEW HUNDRED FEET IN CIRCUMFERENCE WAS DRAWN AND EACH OF THE PARTY TOOK A SEGMENT FOR HIS PORTION +1320-122612-0014-134: THE EXAMINATION HOWEVER RESULTED IN NO DISCOVERY +1320-122612-0015-135: THE WHOLE PARTY CROWDED TO THE SPOT WHERE UNCAS POINTED OUT THE IMPRESSION OF A MOCCASIN IN THE MOIST (ALLUVION->ALLUVIAN) +1320-122612-0016-136: RUN BACK UNCAS AND BRING ME THE SIZE OF THE SINGER'S FOOT +1320-122617-0000-78: NOTWITHSTANDING THE HIGH RESOLUTION OF HAWKEYE HE FULLY COMPREHENDED ALL THE DIFFICULTIES AND DANGER HE WAS ABOUT TO INCUR +1320-122617-0001-79: IN HIS RETURN TO THE CAMP HIS ACUTE AND PRACTISED INTELLECTS WERE INTENTLY ENGAGED IN DEVISING MEANS TO COUNTERACT A WATCHFULNESS AND SUSPICION ON THE PART OF HIS ENEMIES THAT HE KNEW WERE IN NO DEGREE INFERIOR TO HIS OWN +1320-122617-0002-80: IN OTHER WORDS WHILE HE HAD IMPLICIT FAITH IN THE ABILITY OF (BALAAM'S->BAYLIM'S) ASS TO SPEAK HE WAS SOMEWHAT (SKEPTICAL->SCEPTICAL) ON THE SUBJECT OF A BEAR'S SINGING AND YET HE HAD BEEN ASSURED OF THE LATTER ON THE TESTIMONY OF HIS OWN EXQUISITE ORGANS +1320-122617-0003-81: THERE WAS SOMETHING IN HIS AIR AND MANNER THAT BETRAYED TO THE SCOUT THE UTTER CONFUSION OF THE STATE OF HIS MIND +1320-122617-0004-82: THE INGENIOUS HAWKEYE WHO RECALLED THE HASTY MANNER IN WHICH THE OTHER HAD ABANDONED HIS POST AT THE BEDSIDE OF THE SICK WOMAN WAS NOT WITHOUT HIS SUSPICIONS CONCERNING THE SUBJECT OF SO MUCH SOLEMN DELIBERATION +1320-122617-0005-83: THE BEAR SHOOK HIS SHAGGY SIDES AND THEN A WELL KNOWN VOICE REPLIED +1320-122617-0006-84: CAN THESE THINGS BE RETURNED DAVID BREATHING MORE FREELY AS THE TRUTH BEGAN TO DAWN UPON HIM +1320-122617-0007-85: COME COME RETURNED HAWKEYE UNCASING HIS HONEST COUNTENANCE THE BETTER TO ASSURE THE WAVERING CONFIDENCE OF HIS COMPANION YOU MAY SEE A SKIN WHICH IF IT BE NOT AS WHITE AS ONE OF THE GENTLE ONES HAS NO TINGE OF RED TO IT THAT THE WINDS OF THE HEAVEN AND THE SUN HAVE NOT BESTOWED NOW LET US TO BUSINESS +1320-122617-0008-86: THE YOUNG MAN IS IN BONDAGE AND MUCH I FEAR HIS DEATH IS DECREED +1320-122617-0009-87: I GREATLY MOURN THAT ONE SO WELL DISPOSED SHOULD DIE IN HIS IGNORANCE AND I HAVE SOUGHT A GOODLY HYMN CAN YOU LEAD ME TO HIM +1320-122617-0010-88: THE TASK WILL NOT BE DIFFICULT RETURNED DAVID HESITATING THOUGH I GREATLY FEAR YOUR PRESENCE WOULD RATHER INCREASE THAN MITIGATE HIS UNHAPPY FORTUNES +1320-122617-0011-89: THE LODGE IN WHICH UNCAS WAS CONFINED WAS IN THE VERY CENTER OF THE VILLAGE AND IN A SITUATION PERHAPS MORE DIFFICULT THAN ANY OTHER TO APPROACH OR LEAVE WITHOUT OBSERVATION +1320-122617-0012-90: FOUR OR FIVE OF THE LATTER ONLY LINGERED ABOUT THE DOOR OF THE PRISON OF UNCAS WARY BUT CLOSE OBSERVERS OF THE MANNER OF THEIR CAPTIVE +1320-122617-0013-91: DELIVERED IN A STRONG TONE OF ASSENT ANNOUNCED THE GRATIFICATION THE SAVAGE WOULD RECEIVE (IN->AND) WITNESSING SUCH AN EXHIBITION OF WEAKNESS IN AN ENEMY SO LONG HATED AND SO MUCH FEARED +1320-122617-0014-92: THEY DREW BACK A LITTLE FROM THE ENTRANCE AND MOTIONED TO THE SUPPOSED (CONJURER->CONJUROR) TO ENTER +1320-122617-0015-93: BUT THE BEAR INSTEAD OF OBEYING MAINTAINED THE (SEAT->SEED) IT HAD TAKEN AND GROWLED +1320-122617-0016-94: THE CUNNING MAN IS AFRAID THAT HIS BREATH WILL BLOW UPON HIS BROTHERS AND TAKE AWAY THEIR COURAGE TOO CONTINUED DAVID IMPROVING THE HINT HE RECEIVED THEY MUST STAND FURTHER OFF +1320-122617-0017-95: THEN AS IF SATISFIED OF THEIR SAFETY THE SCOUT LEFT HIS POSITION AND SLOWLY ENTERED THE PLACE +1320-122617-0018-96: IT WAS SILENT AND GLOOMY BEING TENANTED SOLELY BY THE CAPTIVE AND LIGHTED BY THE DYING EMBERS OF A FIRE WHICH HAD BEEN USED FOR THE (PURPOSED->PURPOSE) OF COOKERY +1320-122617-0019-97: UNCAS OCCUPIED A DISTANT CORNER IN A RECLINING ATTITUDE BEING RIGIDLY BOUND BOTH HANDS AND FEET BY STRONG AND PAINFUL (WITHES->WIDTHS) +1320-122617-0020-98: THE SCOUT WHO HAD LEFT DAVID AT THE DOOR TO ASCERTAIN THEY WERE NOT OBSERVED THOUGHT IT PRUDENT TO PRESERVE HIS DISGUISE UNTIL ASSURED OF THEIR PRIVACY +1320-122617-0021-99: WHAT SHALL WE DO WITH THE MINGOES AT THE DOOR THEY COUNT SIX AND (THIS->THE) SINGER IS AS GOOD AS NOTHING +1320-122617-0022-100: THE DELAWARES ARE CHILDREN OF THE TORTOISE AND (THEY->THE) OUTSTRIP THE DEER +1320-122617-0023-101: UNCAS WHO HAD ALREADY APPROACHED THE DOOR IN READINESS TO LEAD THE WAY NOW RECOILED AND PLACED HIMSELF ONCE MORE IN THE BOTTOM OF THE LODGE +1320-122617-0024-102: BUT HAWKEYE WHO WAS TOO MUCH OCCUPIED WITH HIS OWN THOUGHTS TO NOTE THE MOVEMENT CONTINUED SPEAKING MORE TO HIMSELF THAN TO HIS COMPANION +1320-122617-0025-103: SO UNCAS YOU HAD BETTER TAKE THE LEAD WHILE I WILL PUT ON THE SKIN AGAIN AND TRUST TO CUNNING FOR WANT OF SPEED +1320-122617-0026-104: WELL WHAT CAN'T BE DONE BY MAIN COURAGE (IN->AND) WAR MUST BE DONE BY CIRCUMVENTION +1320-122617-0027-105: AS SOON AS THESE DISPOSITIONS WERE MADE THE SCOUT TURNED TO DAVID AND GAVE HIM HIS PARTING INSTRUCTIONS +1320-122617-0028-106: MY PURSUITS ARE PEACEFUL AND MY TEMPER I HUMBLY TRUST IS GREATLY GIVEN TO MERCY AND LOVE RETURNED DAVID A LITTLE NETTLED AT SO DIRECT AN ATTACK ON HIS MANHOOD BUT THERE ARE NONE WHO CAN SAY THAT I HAVE EVER FORGOTTEN MY FAITH IN THE LORD EVEN IN THE GREATEST STRAITS +1320-122617-0029-107: IF YOU ARE NOT THEN KNOCKED ON THE HEAD YOUR BEING A NON (COMPOSSER WILL->COMPOSSIBLE) PROTECT YOU AND YOU'LL THEN HAVE A GOOD REASON TO EXPECT TO DIE IN YOUR BED +1320-122617-0030-108: SO CHOOSE FOR YOURSELF TO MAKE A RUSH OR TARRY HERE +1320-122617-0031-109: BRAVELY AND GENEROUSLY HAS HE BATTLED IN MY BEHALF AND THIS AND MORE WILL I DARE IN HIS SERVICE +1320-122617-0032-110: KEEP SILENT AS LONG AS MAY BE AND IT WOULD BE WISE WHEN YOU DO SPEAK TO BREAK OUT SUDDENLY IN ONE OF YOUR SHOUTINGS WHICH WILL SERVE TO REMIND THE INDIANS THAT YOU ARE NOT ALTOGETHER AS RESPONSIBLE AS MEN SHOULD BE +1320-122617-0033-111: IF HOWEVER THEY TAKE YOUR SCALP AS I TRUST AND BELIEVE THEY WILL NOT DEPEND ON IT UNCAS AND I WILL NOT FORGET THE DEED BUT REVENGE IT AS BECOMES TRUE WARRIORS AND TRUSTY FRIENDS +1320-122617-0034-112: HOLD SAID DAVID PERCEIVING THAT WITH THIS ASSURANCE THEY WERE ABOUT TO LEAVE HIM I AM AN UNWORTHY AND HUMBLE FOLLOWER OF ONE WHO TAUGHT NOT THE DAMNABLE PRINCIPLE OF REVENGE +1320-122617-0035-113: THEN HEAVING A HEAVY SIGH PROBABLY AMONG THE LAST HE EVER DREW IN PINING FOR A CONDITION HE HAD SO LONG ABANDONED HE ADDED IT IS WHAT I WOULD WISH TO PRACTISE MYSELF AS ONE WITHOUT A CROSS OF BLOOD THOUGH IT IS NOT ALWAYS EASY TO DEAL WITH AN INDIAN AS YOU WOULD WITH A FELLOW CHRISTIAN +1320-122617-0036-114: GOD BLESS YOU FRIEND I DO BELIEVE YOUR SCENT (IS->HAS) NOT GREATLY WRONG WHEN THE MATTER IS DULY CONSIDERED AND KEEPING ETERNITY BEFORE THE EYES THOUGH MUCH DEPENDS ON THE NATURAL GIFTS AND THE FORCE OF TEMPTATION +1320-122617-0037-115: THE DELAWARE DOG HE SAID LEANING FORWARD AND PEERING THROUGH THE DIM LIGHT TO CATCH THE EXPRESSION OF THE OTHER'S FEATURES IS HE AFRAID +1320-122617-0038-116: WILL THE HURONS HEAR HIS GROANS +1320-122617-0039-117: THE (MOHICAN->MOHICANS) STARTED ON HIS FEET AND SHOOK HIS SHAGGY COVERING AS THOUGH THE ANIMAL HE COUNTERFEITED WAS ABOUT TO MAKE SOME DESPERATE EFFORT +1320-122617-0040-118: HE HAD NO OCCASION TO DELAY FOR AT THE NEXT INSTANT A BURST OF CRIES FILLED THE OUTER AIR AND RAN ALONG THE WHOLE EXTENT OF THE VILLAGE +1320-122617-0041-119: UNCAS CAST HIS SKIN AND STEPPED FORTH IN HIS OWN BEAUTIFUL PROPORTIONS +1580-141083-0000-1949: I WILL ENDEAVOUR IN MY STATEMENT TO AVOID SUCH TERMS AS WOULD SERVE TO LIMIT THE EVENTS TO ANY PARTICULAR PLACE OR GIVE A CLUE AS TO THE PEOPLE CONCERNED +1580-141083-0001-1950: I HAD ALWAYS KNOWN HIM TO BE RESTLESS IN HIS MANNER BUT ON THIS PARTICULAR OCCASION HE WAS IN SUCH A STATE OF UNCONTROLLABLE AGITATION THAT IT WAS CLEAR SOMETHING VERY UNUSUAL HAD OCCURRED +1580-141083-0002-1951: MY FRIEND'S TEMPER HAD NOT IMPROVED SINCE HE HAD BEEN DEPRIVED OF THE CONGENIAL SURROUNDINGS OF BAKER STREET +1580-141083-0003-1952: WITHOUT HIS (SCRAPBOOKS->SCRAP BOOKS) HIS CHEMICALS AND HIS HOMELY UNTIDINESS HE WAS AN UNCOMFORTABLE MAN +1580-141083-0004-1953: I HAD TO READ IT OVER CAREFULLY AS THE TEXT MUST BE ABSOLUTELY CORRECT +1580-141083-0005-1954: I WAS ABSENT RATHER MORE THAN AN HOUR +1580-141083-0006-1955: THE ONLY DUPLICATE WHICH EXISTED SO FAR AS I KNEW WAS THAT WHICH BELONGED TO MY SERVANT (BANNISTER->BANISTER) A MAN WHO HAS LOOKED AFTER MY ROOM FOR TEN YEARS AND WHOSE HONESTY IS ABSOLUTELY ABOVE SUSPICION +1580-141083-0007-1956: THE MOMENT I LOOKED AT MY TABLE I WAS AWARE THAT SOMEONE HAD RUMMAGED AMONG MY (PAPERS->PAPER) +1580-141083-0008-1957: THE PROOF WAS IN THREE LONG SLIPS I HAD LEFT THEM ALL TOGETHER +1580-141083-0009-1958: (THE ALTERNATIVE->THEY ALL TURNED OF) WAS THAT (SOMEONE->SOME ONE) PASSING HAD OBSERVED THE KEY IN THE DOOR HAD KNOWN THAT I WAS OUT AND HAD ENTERED TO LOOK AT THE PAPERS +1580-141083-0010-1959: I GAVE HIM A LITTLE BRANDY AND LEFT HIM COLLAPSED IN A CHAIR WHILE I MADE A MOST CAREFUL EXAMINATION OF THE ROOM +1580-141083-0011-1960: A BROKEN TIP OF LEAD WAS LYING THERE ALSO +1580-141083-0012-1961: NOT ONLY THIS BUT ON THE TABLE I FOUND A SMALL BALL OF BLACK DOUGH OR CLAY WITH SPECKS OF SOMETHING WHICH LOOKS LIKE SAWDUST IN IT +1580-141083-0013-1962: ABOVE ALL THINGS I DESIRE TO SETTLE THE MATTER QUIETLY AND DISCREETLY +1580-141083-0014-1963: TO THE BEST OF MY BELIEF THEY WERE ROLLED UP +1580-141083-0015-1964: DID (ANYONE->ANY ONE) KNOW THAT THESE PROOFS WOULD BE THERE NO ONE SAVE THE PRINTER +1580-141083-0016-1965: I WAS IN SUCH A HURRY TO COME TO YOU YOU LEFT YOUR DOOR OPEN +1580-141083-0017-1966: SO IT SEEMS TO ME +1580-141083-0018-1967: NOW MISTER (SOAMES->SOLMES) AT YOUR DISPOSAL +1580-141083-0019-1968: ABOVE WERE THREE STUDENTS ONE ON EACH STORY +1580-141083-0020-1969: THEN HE APPROACHED IT AND STANDING ON TIPTOE WITH HIS (NECK->NET) CRANED HE LOOKED INTO THE ROOM +1580-141083-0021-1970: THERE IS NO OPENING EXCEPT THE ONE PANE SAID OUR LEARNED GUIDE +1580-141083-0022-1971: I AM AFRAID THERE ARE NO SIGNS HERE SAID HE +1580-141083-0023-1972: ONE COULD HARDLY HOPE FOR ANY UPON SO DRY A DAY +1580-141083-0024-1973: YOU LEFT HIM IN A CHAIR YOU SAY WHICH CHAIR BY THE WINDOW THERE +1580-141083-0025-1974: THE (MAN->MEN) ENTERED AND TOOK THE PAPERS SHEET BY SHEET FROM THE CENTRAL TABLE +1580-141083-0026-1975: AS A MATTER OF FACT HE COULD NOT SAID (SOAMES->SOLMES) FOR I ENTERED BY THE SIDE DOOR +1580-141083-0027-1976: HOW LONG WOULD IT TAKE HIM TO DO THAT USING EVERY POSSIBLE CONTRACTION A QUARTER OF AN HOUR NOT LESS +1580-141083-0028-1977: THEN HE TOSSED IT DOWN AND SEIZED THE NEXT +1580-141083-0029-1978: HE WAS IN THE MIDST OF THAT WHEN YOUR RETURN CAUSED HIM TO MAKE A VERY HURRIED RETREAT VERY HURRIED SINCE HE HAD NOT TIME TO REPLACE THE PAPERS WHICH WOULD TELL YOU THAT HE HAD BEEN THERE +1580-141083-0030-1979: MISTER (SOAMES->SOLMES) WAS SOMEWHAT OVERWHELMED BY THIS FLOOD OF INFORMATION +1580-141083-0031-1980: HOLMES HELD OUT A SMALL CHIP WITH THE LETTERS N (N->*) AND A SPACE OF CLEAR WOOD AFTER THEM YOU SEE +1580-141083-0032-1981: WATSON I HAVE ALWAYS DONE YOU AN INJUSTICE THERE ARE OTHERS +1580-141083-0033-1982: I WAS HOPING THAT IF THE PAPER ON WHICH HE WROTE WAS THIN SOME TRACE OF IT MIGHT COME THROUGH UPON THIS POLISHED SURFACE NO I SEE NOTHING +1580-141083-0034-1983: AS HOLMES DREW THE CURTAIN I WAS AWARE FROM SOME LITTLE RIGIDITY AND (*->AN) ALERTNESS OF HIS ATTITUDE THAT HE WAS PREPARED FOR AN EMERGENCY +1580-141083-0035-1984: HOLMES TURNED AWAY AND STOOPED SUDDENLY TO THE FLOOR (HALLOA WHAT'S->HULLO WHAT IS) THIS +1580-141083-0036-1985: HOLMES (HELD IT->HUTTED) OUT ON HIS OPEN PALM IN THE GLARE OF THE ELECTRIC LIGHT +1580-141083-0037-1986: WHAT COULD HE DO HE CAUGHT UP EVERYTHING WHICH WOULD BETRAY HIM AND HE RUSHED INTO YOUR BEDROOM TO CONCEAL HIMSELF +1580-141083-0038-1987: I UNDERSTAND YOU TO SAY THAT THERE ARE THREE STUDENTS WHO USE THIS STAIR AND ARE IN THE HABIT OF PASSING YOUR DOOR YES THERE ARE +1580-141083-0039-1988: AND THEY ARE ALL IN FOR THIS EXAMINATION YES +1580-141083-0040-1989: ONE HARDLY LIKES TO THROW SUSPICION WHERE THERE ARE NO PROOFS +1580-141083-0041-1990: LET US (HEAR->SEE) THE SUSPICIONS I WILL LOOK AFTER THE PROOFS +1580-141083-0042-1991: MY SCHOLAR HAS BEEN LEFT (*->A) VERY POOR BUT HE IS HARD WORKING AND INDUSTRIOUS HE WILL DO WELL +1580-141083-0043-1992: THE TOP FLOOR BELONGS TO (MILES->MYLES) MC LAREN +1580-141083-0044-1993: I DARE NOT GO SO FAR AS THAT BUT OF THE THREE HE IS PERHAPS THE LEAST UNLIKELY +1580-141083-0045-1994: HE WAS STILL SUFFERING FROM THIS SUDDEN DISTURBANCE OF THE QUIET ROUTINE OF HIS LIFE +1580-141083-0046-1995: BUT I HAVE OCCASIONALLY DONE THE SAME THING AT OTHER TIMES +1580-141083-0047-1996: DID YOU LOOK AT THESE PAPERS ON THE TABLE +1580-141083-0048-1997: HOW CAME YOU TO LEAVE THE KEY IN THE DOOR +1580-141083-0049-1998: (ANYONE->ANY ONE) IN THE ROOM COULD GET OUT YES SIR +1580-141083-0050-1999: I (*->HAVE) REALLY DON'T THINK HE KNEW MUCH ABOUT IT MISTER HOLMES +1580-141083-0051-2000: ONLY FOR A MINUTE OR SO +1580-141083-0052-2001: OH I WOULD NOT VENTURE TO SAY SIR +1580-141083-0053-2002: YOU HAVEN'T SEEN ANY OF THEM NO SIR +1580-141084-0000-2003: IT WAS THE INDIAN WHOSE DARK SILHOUETTE APPEARED SUDDENLY UPON HIS BLIND +1580-141084-0001-2004: HE WAS PACING SWIFTLY UP AND DOWN HIS ROOM +1580-141084-0002-2005: (THIS->THE) SET OF ROOMS IS QUITE THE OLDEST IN THE COLLEGE AND IT IS NOT UNUSUAL FOR VISITORS TO GO OVER THEM +1580-141084-0003-2006: NO NAMES PLEASE SAID HOLMES AS WE KNOCKED AT (GILCHRIST'S->GILCHER'S) DOOR +1580-141084-0004-2007: OF COURSE HE DID NOT REALIZE THAT IT WAS I WHO WAS KNOCKING BUT NONE THE LESS HIS CONDUCT WAS VERY UNCOURTEOUS AND INDEED UNDER THE CIRCUMSTANCES RATHER SUSPICIOUS +1580-141084-0005-2008: THAT IS VERY IMPORTANT SAID HOLMES +1580-141084-0006-2009: YOU DON'T SEEM TO REALIZE THE POSITION +1580-141084-0007-2010: TO MORROW (IS->WAS) THE EXAMINATION +1580-141084-0008-2011: I CANNOT ALLOW THE EXAMINATION TO BE HELD IF ONE OF THE PAPERS HAS BEEN TAMPERED WITH THE SITUATION MUST BE FACED +1580-141084-0009-2012: IT IS POSSIBLE THAT I MAY BE IN A POSITION THEN TO INDICATE SOME COURSE OF ACTION +1580-141084-0010-2013: I WILL TAKE THE BLACK CLAY WITH ME ALSO THE PENCIL CUTTINGS GOOD BYE +1580-141084-0011-2014: WHEN WE WERE OUT IN THE DARKNESS OF THE QUADRANGLE WE AGAIN LOOKED UP AT THE WINDOWS +1580-141084-0012-2015: THE FOUL MOUTHED FELLOW AT THE TOP +1580-141084-0013-2016: HE IS THE ONE WITH THE WORST RECORD +1580-141084-0014-2017: WHY (BANNISTER->BANISTER) THE SERVANT WHAT'S HIS GAME IN THE MATTER +1580-141084-0015-2018: HE IMPRESSED ME AS BEING A PERFECTLY HONEST MAN +1580-141084-0016-2019: MY FRIEND DID NOT APPEAR TO BE DEPRESSED BY HIS FAILURE BUT SHRUGGED HIS SHOULDERS (IN->AND) HALF HUMOROUS RESIGNATION +1580-141084-0017-2020: NO GOOD MY DEAR WATSON +1580-141084-0018-2021: I THINK SO YOU HAVE FORMED A CONCLUSION +1580-141084-0019-2022: YES MY DEAR WATSON I HAVE SOLVED THE MYSTERY +1580-141084-0020-2023: LOOK AT THAT HE HELD OUT HIS HAND +1580-141084-0021-2024: ON THE PALM WERE THREE LITTLE PYRAMIDS OF BLACK DOUGHY CLAY +1580-141084-0022-2025: AND ONE MORE THIS MORNING +1580-141084-0023-2026: IN A FEW HOURS THE EXAMINATION WOULD COMMENCE AND HE WAS STILL IN THE DILEMMA BETWEEN MAKING THE FACTS PUBLIC AND ALLOWING THE CULPRIT TO COMPETE FOR THE VALUABLE SCHOLARSHIP +1580-141084-0024-2027: HE COULD HARDLY STAND STILL SO GREAT WAS HIS MENTAL AGITATION AND HE RAN TOWARDS HOLMES WITH (TWO->TOO) EAGER HANDS OUTSTRETCHED THANK HEAVEN THAT YOU HAVE COME +1580-141084-0025-2028: YOU KNOW HIM I THINK SO +1580-141084-0026-2029: IF THIS MATTER IS NOT TO BECOME PUBLIC WE MUST GIVE OURSELVES CERTAIN POWERS AND RESOLVE OURSELVES INTO A SMALL PRIVATE COURT MARTIAL +1580-141084-0027-2030: NO SIR CERTAINLY NOT +1580-141084-0028-2031: THERE WAS NO MAN SIR +1580-141084-0029-2032: HIS TROUBLED BLUE EYES GLANCED AT EACH OF US AND FINALLY RESTED WITH AN EXPRESSION OF BLANK DISMAY UPON (BANNISTER->BANISTER) IN THE FARTHER CORNER +1580-141084-0030-2033: JUST CLOSE THE DOOR SAID HOLMES +1580-141084-0031-2034: WE WANT TO KNOW MISTER (GILCHRIST->GILGRIST) HOW YOU AN HONOURABLE MAN EVER CAME TO COMMIT SUCH AN ACTION AS THAT OF YESTERDAY +1580-141084-0032-2035: FOR A MOMENT (GILCHRIST->GO CHRIST) WITH UPRAISED HAND TRIED TO CONTROL HIS WRITHING FEATURES +1580-141084-0033-2036: COME COME SAID HOLMES KINDLY IT IS HUMAN TO ERR AND AT LEAST NO ONE CAN ACCUSE YOU OF BEING A CALLOUS CRIMINAL +1580-141084-0034-2037: WELL WELL DON'T TROUBLE TO ANSWER LISTEN AND SEE THAT I DO YOU (NO->KNOW) INJUSTICE +1580-141084-0035-2038: HE COULD EXAMINE THE PAPERS IN HIS OWN OFFICE +1580-141084-0036-2039: THE INDIAN I ALSO THOUGHT NOTHING OF +1580-141084-0037-2040: WHEN I APPROACHED YOUR ROOM I EXAMINED THE WINDOW +1580-141084-0038-2041: NO ONE LESS THAN THAT WOULD HAVE A CHANCE +1580-141084-0039-2042: I ENTERED AND I TOOK YOU INTO MY CONFIDENCE AS TO THE SUGGESTIONS OF THE SIDE TABLE +1580-141084-0040-2043: HE RETURNED CARRYING HIS JUMPING SHOES WHICH ARE PROVIDED AS YOU ARE (AWARE->WHERE) WITH SEVERAL SHARP SPIKES +1580-141084-0041-2044: NO HARM WOULD HAVE BEEN DONE HAD IT NOT BEEN THAT AS HE PASSED YOUR DOOR HE PERCEIVED THE KEY WHICH HAD BEEN LEFT BY THE CARELESSNESS OF YOUR SERVANT +1580-141084-0042-2045: A SUDDEN IMPULSE CAME OVER HIM TO ENTER AND SEE IF THEY WERE INDEED THE PROOFS +1580-141084-0043-2046: HE PUT HIS SHOES ON THE TABLE +1580-141084-0044-2047: GLOVES SAID THE YOUNG MAN +1580-141084-0045-2048: SUDDENLY HE HEARD HIM AT THE VERY DOOR THERE WAS NO POSSIBLE ESCAPE +1580-141084-0046-2049: HAVE I TOLD THE TRUTH MISTER (GILCHRIST->GILGRIST) +1580-141084-0047-2050: I HAVE A LETTER HERE MISTER (SOAMES->SOLMES) WHICH I WROTE TO YOU EARLY THIS MORNING IN THE MIDDLE OF A RESTLESS NIGHT +1580-141084-0048-2051: IT (WILL->WOULD) BE CLEAR TO YOU FROM WHAT I HAVE SAID THAT ONLY YOU COULD HAVE LET THIS YOUNG MAN OUT SINCE YOU WERE LEFT IN THE ROOM AND MUST HAVE LOCKED THE DOOR WHEN YOU WENT OUT +1580-141084-0049-2052: IT WAS SIMPLE ENOUGH SIR IF YOU ONLY HAD KNOWN BUT WITH ALL YOUR CLEVERNESS IT WAS IMPOSSIBLE THAT YOU COULD KNOW +1580-141084-0050-2053: IF MISTER (SOAMES->SOLMES) SAW THEM THE GAME WAS UP +1995-1826-0000-750: IN THE DEBATE BETWEEN THE SENIOR SOCIETIES HER DEFENCE OF THE FIFTEENTH AMENDMENT HAD BEEN NOT ONLY A NOTABLE BIT OF REASONING BUT DELIVERED WITH REAL ENTHUSIASM +1995-1826-0001-751: THE SOUTH SHE HAD NOT THOUGHT OF SERIOUSLY AND YET KNOWING OF ITS DELIGHTFUL HOSPITALITY AND MILD CLIMATE SHE WAS NOT AVERSE TO CHARLESTON OR NEW ORLEANS +1995-1826-0002-752: JOHN TAYLOR WHO HAD SUPPORTED HER THROUGH COLLEGE WAS INTERESTED IN COTTON +1995-1826-0003-753: BETTER GO HE HAD (COUNSELLED->COUNSEL) SENTENTIOUSLY +1995-1826-0004-754: MIGHT LEARN SOMETHING USEFUL DOWN THERE +1995-1826-0005-755: BUT JOHN THERE'S NO SOCIETY JUST ELEMENTARY WORK +1995-1826-0006-756: BEEN LOOKING UP (TOOMS->TOMBS) COUNTY +1995-1826-0007-757: (FIND->FIVE) SOME (CRESSWELLS->CRUSTWELLS) THERE BIG PLANTATIONS RATED AT TWO HUNDRED AND FIFTY THOUSAND DOLLARS +1995-1826-0008-758: SOME OTHERS TOO BIG COTTON COUNTY +1995-1826-0009-759: YOU OUGHT TO KNOW JOHN IF I TEACH NEGROES I'LL SCARCELY SEE MUCH OF PEOPLE IN MY OWN CLASS +1995-1826-0010-760: AT ANY RATE I SAY GO +1995-1826-0011-761: HERE SHE WAS TEACHING DIRTY CHILDREN AND THE SMELL OF CONFUSED ODORS AND BODILY PERSPIRATION WAS TO HER AT TIMES UNBEARABLE +1995-1826-0012-762: SHE WANTED A GLANCE OF THE NEW BOOKS AND PERIODICALS AND TALK OF (GREAT PHILANTHROPIES->GRATEFUL ANTHROPIES) AND REFORMS +1995-1826-0013-763: SO FOR THE HUNDREDTH TIME SHE WAS THINKING (TODAY->TO DAY) AS SHE WALKED ALONE UP THE LANE BACK OF THE BARN AND THEN SLOWLY DOWN THROUGH THE BOTTOMS +1995-1826-0014-764: COTTON SHE PAUSED +1995-1826-0015-765: SHE HAD ALMOST FORGOTTEN THAT IT WAS HERE WITHIN TOUCH (AND->IN) SIGHT +1995-1826-0016-766: THE GLIMMERING SEA OF DELICATE LEAVES WHISPERED AND MURMURED BEFORE HER STRETCHING AWAY TO THE NORTHWARD +1995-1826-0017-767: THERE MIGHT BE A BIT OF POETRY HERE AND THERE BUT MOST OF THIS PLACE WAS SUCH DESPERATE PROSE +1995-1826-0018-768: HER REGARD SHIFTED TO THE GREEN STALKS AND LEAVES AGAIN AND SHE STARTED TO MOVE AWAY +1995-1826-0019-769: COTTON IS A WONDERFUL THING IS IT NOT BOYS SHE SAID RATHER PRIMLY +1995-1826-0020-770: MISS TAYLOR DID NOT KNOW MUCH ABOUT COTTON BUT AT LEAST ONE MORE (REMARK->REMARKED) SEEMED CALLED FOR +1995-1826-0021-771: DON'T KNOW WELL OF ALL THINGS INWARDLY COMMENTED MISS TAYLOR LITERALLY BORN IN COTTON AND OH WELL AS MUCH AS TO ASK WHAT'S THE USE SHE TURNED AGAIN TO GO +1995-1826-0022-772: I SUPPOSE THOUGH IT'S TOO EARLY FOR THEM THEN CAME THE EXPLOSION +1995-1826-0023-773: (GOOBERS->GOULD WAS) DON'T GROW ON THE (TOPS OF VINES->TOPSY BANDS) BUT (UNDERGROUND->ON THE GROUND) ON THE ROOTS LIKE YAMS IS THAT SO +1995-1826-0024-774: THE GOLDEN FLEECE IT'S THE SILVER FLEECE HE (HARKENED->HEARKENED) +1995-1826-0025-775: (SOME TIME YOU'LL->SOMETIME YOU) TELL ME PLEASE WON'T YOU +1995-1826-0026-776: (NOW->THOU) FOR ONE LITTLE HALF HOUR SHE HAD BEEN A WOMAN TALKING TO A BOY NO NOT EVEN THAT SHE HAD BEEN TALKING JUST TALKING THERE WERE NO PERSONS IN THE CONVERSATION JUST THINGS ONE THING COTTON +1995-1836-0000-735: THE HON (CHARLES->*) SMITH MISS SARAH'S BROTHER WAS WALKING SWIFTLY UPTOWN FROM MISTER EASTERLY'S WALL STREET OFFICE AND HIS FACE WAS PALE +1995-1836-0001-736: AT LAST THE COTTON COMBINE WAS TO ALL APPEARANCES AN ASSURED FACT AND HE WAS SLATED FOR THE SENATE +1995-1836-0002-737: WHY SHOULD HE NOT BE AS OTHER MEN +1995-1836-0003-738: SHE WAS NOT HERSELF A NOTABLY INTELLIGENT WOMAN SHE GREATLY ADMIRED INTELLIGENCE OR WHATEVER LOOKED TO HER LIKE INTELLIGENCE IN OTHERS +1995-1836-0004-739: AS SHE AWAITED HER (GUESTS->GUESS) SHE SURVEYED THE TABLE WITH BOTH SATISFACTION AND DISQUIETUDE FOR HER SOCIAL FUNCTIONS WERE FEW (TONIGHT->TO NIGHT) THERE WERE SHE CHECKED THEM OFF ON HER FINGERS SIR JAMES (CREIGHTON->CRITON) THE RICH ENGLISH MANUFACTURER AND LADY (CREIGHTON->CRIGHTON) MISTER AND MISSUS (VANDERPOOL->VAN DERPOOL) MISTER HARRY (CRESSWELL->CRESWELL) AND HIS SISTER JOHN TAYLOR AND HIS SISTER AND MISTER CHARLES SMITH WHOM THE EVENING PAPERS MENTIONED AS LIKELY TO BE UNITED STATES SENATOR FROM NEW JERSEY A SELECTION OF GUESTS THAT HAD BEEN DETERMINED UNKNOWN TO THE HOSTESS BY THE MEETING OF COTTON INTERESTS EARLIER IN THE DAY +1995-1836-0005-740: MISSUS (GREY->GRAY) HAD MET SOUTHERNERS BEFORE BUT NOT INTIMATELY AND SHE ALWAYS HAD IN MIND VIVIDLY THEIR CRUELTY TO POOR NEGROES A SUBJECT SHE MADE A POINT OF INTRODUCING FORTHWITH +1995-1836-0006-741: SHE WAS THEREFORE MOST AGREEABLY SURPRISED TO HEAR MISTER (CRESSWELL->CRESWELL) EXPRESS HIMSELF SO CORDIALLY AS APPROVING OF NEGRO EDUCATION +1995-1836-0007-742: (BUT YOU->DO) BELIEVE IN SOME EDUCATION ASKED MARY TAYLOR +1995-1836-0008-743: I BELIEVE IN THE TRAINING OF PEOPLE TO THEIR (HIGHEST->HAS) CAPACITY THE ENGLISHMAN HERE HEARTILY SECONDED HIM +1995-1836-0009-744: BUT (CRESSWELL->CRASWELL) ADDED SIGNIFICANTLY CAPACITY DIFFERS ENORMOUSLY BETWEEN RACES +1995-1836-0010-745: THE VANDERPOOLS WERE SURE (OF->*) THIS AND THE ENGLISHMAN INSTANCING INDIA BECAME QUITE ELOQUENT MISSUS (GREY->GRAY) WAS MYSTIFIED BUT HARDLY DARED ADMIT IT THE GENERAL TREND OF THE CONVERSATION SEEMED TO BE THAT MOST INDIVIDUALS NEEDED TO BE SUBMITTED TO THE SHARPEST SCRUTINY BEFORE BEING ALLOWED MUCH EDUCATION AND AS FOR THE LOWER RACES IT WAS SIMPLY CRIMINAL TO OPEN SUCH USELESS OPPORTUNITIES TO THEM +1995-1836-0011-746: POSITIVELY HEROIC ADDED (CRESSWELL->CRASWELL) AVOIDING HIS SISTER'S EYES +1995-1836-0012-747: BUT (WE'RE->WE ARE) NOT (ER->A) EXACTLY (WELCOMED->WELCOME) +1995-1836-0013-748: MARY (TAYLOR->TAILOR) HOWEVER RELATED THE TALE OF ZORA TO MISSUS (GREY'S->GRAY'S) PRIVATE EAR LATER +1995-1836-0014-749: FORTUNATELY SAID MISTER (VANDERPOOL NORTHERNERS->VAN DERPOOL NOR THE NOSE) AND SOUTHERNERS (ARE ARRIVING->ALL RIVING) AT A BETTER MUTUAL UNDERSTANDING ON MOST OF THESE MATTERS +1995-1837-0000-777: HE KNEW THE SILVER FLEECE HIS AND (ZORA'S->ZORAS) MUST BE RUINED +1995-1837-0001-778: IT WAS THE FIRST GREAT SORROW OF HIS LIFE IT WAS NOT SO MUCH THE LOSS OF THE COTTON ITSELF BUT THE FANTASY THE HOPES THE DREAMS BUILT AROUND IT +1995-1837-0002-779: AH THE SWAMP THE CRUEL SWAMP +1995-1837-0003-780: (THE->WHO) REVELATION OF HIS LOVE LIGHTED AND BRIGHTENED SLOWLY TILL IT FLAMED LIKE A SUNRISE OVER HIM AND LEFT HIM IN BURNING WONDER +1995-1837-0004-781: HE PANTED TO KNOW IF SHE TOO KNEW OR KNEW AND CARED NOT OR CARED AND KNEW NOT +1995-1837-0005-782: SHE WAS SO STRANGE (AND->IN) HUMAN A CREATURE +1995-1837-0006-783: THE WORLD WAS WATER VEILED IN MISTS +1995-1837-0007-784: THEN OF A SUDDEN AT MIDDAY THE SUN SHOT OUT HOT AND STILL NO BREATH OF AIR STIRRED THE SKY WAS LIKE BLUE STEEL THE EARTH STEAMED +1995-1837-0008-785: WHERE WAS THE USE OF IMAGINING +1995-1837-0009-786: THE LAGOON HAD BEEN LEVEL WITH THE (DYKES->DIKES) A WEEK AGO AND NOW +1995-1837-0010-787: PERHAPS SHE TOO MIGHT BE THERE WAITING WEEPING +1995-1837-0011-788: HE STARTED AT THE THOUGHT HE HURRIED FORTH SADLY +1995-1837-0012-789: HE SPLASHED AND STAMPED ALONG FARTHER AND FARTHER ONWARD UNTIL HE NEARED THE RAMPART OF THE CLEARING AND PUT FOOT UPON THE TREE BRIDGE +1995-1837-0013-790: THEN HE LOOKED DOWN THE LAGOON WAS DRY +1995-1837-0014-791: HE STOOD A MOMENT BEWILDERED THEN TURNED AND RUSHED UPON THE ISLAND A GREAT SHEET OF DAZZLING SUNLIGHT SWEPT THE PLACE AND BENEATH LAY A MIGHTY MASS OF OLIVE GREEN THICK TALL WET AND WILLOWY +1995-1837-0015-792: THE SQUARES OF COTTON SHARP EDGED HEAVY WERE JUST ABOUT TO BURST TO (BOLLS->BOWLS) +1995-1837-0016-793: FOR ONE LONG MOMENT HE PAUSED STUPID AGAPE WITH UTTER AMAZEMENT THEN LEANED DIZZILY AGAINST (A->THE) TREE +1995-1837-0017-794: HE GAZED ABOUT PERPLEXED ASTONISHED +1995-1837-0018-795: HERE LAY THE READING OF THE RIDDLE WITH INFINITE WORK AND PAIN SOME ONE HAD DUG A CANAL FROM THE LAGOON TO THE CREEK INTO WHICH THE FORMER HAD DRAINED BY A LONG AND CROOKED WAY THUS ALLOWING IT TO EMPTY DIRECTLY +1995-1837-0019-796: HE SAT DOWN WEAK BEWILDERED AND ONE THOUGHT WAS UPPERMOST (ZORA->SORA) +1995-1837-0020-797: THE YEARS OF THE DAYS OF HER DYING WERE TEN +1995-1837-0021-798: THE HOPE AND DREAM OF HARVEST WAS UPON THE LAND +1995-1837-0022-799: UP IN THE SICK ROOM ZORA LAY ON THE LITTLE WHITE BED +1995-1837-0023-800: THE (NET->NED) AND WEB OF ENDLESS THINGS HAD BEEN CRAWLING AND CREEPING AROUND HER SHE HAD STRUGGLED IN DUMB SPEECHLESS TERROR AGAINST SOME MIGHTY GRASPING THAT STROVE FOR HER LIFE WITH GNARLED AND CREEPING FINGERS BUT NOW AT LAST (WEAKLY->WEEKLY) SHE OPENED HER EYES AND QUESTIONED +1995-1837-0024-801: FOR A WHILE SHE LAY IN HER CHAIR IN HAPPY DREAMY PLEASURE AT SUN AND BIRD AND TREE +1995-1837-0025-802: SHE ROSE WITH A FLEETING GLANCE GATHERED THE SHAWL (ROUND->AROUND) HER THEN GLIDING FORWARD WAVERING TREMULOUS SLIPPED ACROSS THE ROAD AND INTO THE SWAMP +1995-1837-0026-803: SHE HAD BEEN BORN WITHIN ITS BORDERS WITHIN ITS BORDERS SHE HAD LIVED AND GROWN AND WITHIN ITS (BORDERS->BORDER) SHE HAD MET HER LOVE +1995-1837-0027-804: ON SHE HURRIED UNTIL SWEEPING DOWN TO THE LAGOON AND THE ISLAND LO THE COTTON LAY BEFORE HER +1995-1837-0028-805: THE CHAIR WAS EMPTY BUT HE KNEW +1995-1837-0029-806: HE DARTED THROUGH THE TREES AND PAUSED A TALL MAN STRONGLY BUT SLIMLY MADE +2094-142345-0000-308: IT IS A VERY FINE OLD PLACE OF RED BRICK SOFTENED BY A PALE POWDERY LICHEN WHICH HAS DISPERSED ITSELF WITH HAPPY IRREGULARITY SO AS TO BRING THE RED BRICK INTO TERMS OF FRIENDLY COMPANIONSHIP WITH (THE->A) LIMESTONE ORNAMENTS SURROUNDING THE THREE GABLES THE WINDOWS AND THE DOOR PLACE +2094-142345-0001-309: BUT THE WINDOWS ARE PATCHED WITH WOODEN PANES AND THE DOOR I THINK IS LIKE THE GATE IT IS NEVER OPENED +2094-142345-0002-310: FOR IT IS A SOLID HEAVY HANDSOME DOOR AND MUST ONCE HAVE BEEN IN THE HABIT OF SHUTTING WITH A SONOROUS BANG BEHIND (A->THE) LIVERIED LACKEY WHO HAD JUST SEEN HIS MASTER AND MISTRESS OFF THE GROUNDS IN A CARRIAGE AND PAIR +2094-142345-0003-311: A LARGE OPEN FIREPLACE WITH RUSTY DOGS IN IT AND A BARE BOARDED FLOOR AT THE FAR END FLEECES OF WOOL STACKED UP IN THE MIDDLE OF THE FLOOR SOME EMPTY CORN BAGS +2094-142345-0004-312: AND WHAT THROUGH THE LEFT HAND WINDOW +2094-142345-0005-313: SEVERAL CLOTHES HORSES A PILLION A SPINNING WHEEL AND AN OLD BOX WIDE OPEN AND STUFFED FULL OF COLOURED RAGS +2094-142345-0006-314: AT THE EDGE OF THIS BOX THERE LIES A GREAT WOODEN DOLL WHICH SO FAR AS MUTILATION IS CONCERNED BEARS A STRONG RESEMBLANCE TO THE FINEST GREEK SCULPTURE AND ESPECIALLY IN THE TOTAL LOSS OF ITS NOSE +2094-142345-0007-315: THE HISTORY OF THE HOUSE IS PLAIN NOW +2094-142345-0008-316: BUT THERE IS ALWAYS (A->AS) STRONGER SENSE OF LIFE WHEN THE SUN IS BRILLIANT AFTER RAIN AND NOW HE IS POURING DOWN HIS BEAMS AND MAKING SPARKLES AMONG THE WET STRAW AND LIGHTING UP EVERY PATCH OF VIVID GREEN MOSS ON THE RED TILES OF THE COW SHED AND TURNING EVEN THE MUDDY WATER THAT IS HURRYING ALONG THE CHANNEL TO THE DRAIN INTO A MIRROR FOR THE YELLOW (BILLED->BUILD) DUCKS WHO ARE SEIZING THE OPPORTUNITY OF GETTING A DRINK WITH AS MUCH BODY IN IT AS POSSIBLE +2094-142345-0009-317: FOR THE GREAT BARN DOORS ARE THROWN WIDE OPEN AND MEN ARE BUSY THERE MENDING THE HARNESS UNDER THE SUPERINTENDENCE OF MISTER GOBY THE (WHITTAW->WIDOW) OTHERWISE SADDLER WHO ENTERTAINS THEM WITH THE LATEST (TREDDLESTON->TREDDLESTONE) GOSSIP +2094-142345-0010-318: (HETTY->HETTY'S) SORREL OFTEN TOOK THE OPPORTUNITY WHEN HER AUNT'S BACK WAS TURNED OF LOOKING AT THE PLEASING REFLECTION OF HERSELF IN THOSE POLISHED (SURFACES->SERVICES) FOR THE OAK TABLE WAS USUALLY TURNED UP LIKE A SCREEN AND WAS MORE FOR ORNAMENT THAN FOR USE AND SHE COULD SEE HERSELF SOMETIMES IN THE GREAT ROUND PEWTER DISHES THAT WERE RANGED ON THE SHELVES ABOVE THE LONG DEAL DINNER TABLE OR IN THE HOBS OF THE GRATE WHICH ALWAYS SHONE LIKE JASPER +2094-142345-0011-319: DO NOT SUPPOSE HOWEVER THAT MISSUS POYSER WAS ELDERLY OR SHREWISH IN HER APPEARANCE SHE WAS A GOOD LOOKING WOMAN NOT MORE THAN EIGHT AND THIRTY OF FAIR COMPLEXION AND SANDY HAIR WELL SHAPEN LIGHT FOOTED +2094-142345-0012-320: THE FAMILY LIKENESS BETWEEN HER AND HER NIECE (DINAH->DINA) MORRIS WITH THE CONTRAST BETWEEN HER KEENNESS AND (DINAH'S->DYNAS) SERAPHIC GENTLENESS OF EXPRESSION MIGHT HAVE SERVED A PAINTER AS AN EXCELLENT SUGGESTION FOR A MARTHA AND MARY +2094-142345-0013-321: HER TONGUE WAS NOT LESS KEEN THAN HER EYE AND WHENEVER A DAMSEL CAME WITHIN (EARSHOT->EAR SHOT) SEEMED TO TAKE UP AN UNFINISHED LECTURE AS A BARREL ORGAN TAKES UP A TUNE PRECISELY AT THE POINT WHERE IT HAD LEFT OFF +2094-142345-0014-322: THE FACT THAT IT WAS CHURNING DAY WAS ANOTHER REASON WHY IT WAS INCONVENIENT TO HAVE THE (WHITTAWS->WIDOWS) AND WHY CONSEQUENTLY MISSUS POYSER SHOULD SCOLD MOLLY THE HOUSEMAID WITH UNUSUAL SEVERITY +2094-142345-0015-323: TO ALL APPEARANCE MOLLY HAD GOT THROUGH HER AFTER DINNER WORK IN AN EXEMPLARY MANNER HAD CLEANED HERSELF WITH GREAT DISPATCH AND NOW CAME TO ASK SUBMISSIVELY IF SHE SHOULD SIT DOWN TO HER SPINNING TILL MILKING TIME +2094-142345-0016-324: SPINNING INDEED +2094-142345-0017-325: I NEVER KNEW YOUR EQUALS FOR GALLOWSNESS +2094-142345-0018-326: WHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE TO KNOW +2094-142345-0019-327: COMB THE WOOL FOR THE (WHITTAWS INDEED->WIDOWS INDE) +2094-142345-0020-328: THAT'S WHAT YOU'D LIKE TO BE DOING IS IT +2094-142345-0021-329: THAT'S THE WAY WITH YOU THAT'S THE ROAD YOU'D ALL LIKE TO GO HEADLONGS TO RUIN +2094-142345-0022-330: MISTER (OTTLEY'S->OAKLEIGHS) INDEED +2094-142345-0023-331: (YOU'RE->YOU ARE) A RARE (UN->AND) FOR SITTING DOWN TO YOUR WORK A LITTLE WHILE AFTER (IT'S->ITS) TIME TO PUT BY +2094-142345-0024-332: (MUNNY->MONEY) MY (IRON'S TWITE->IRONS QUITE) TOLD PEASE PUT IT DOWN TO WARM +2094-142345-0025-333: COLD IS IT MY DARLING BLESS YOUR SWEET FACE +2094-142345-0026-334: SHE'S GOING TO PUT THE IRONING THINGS AWAY +2094-142345-0027-335: (MUNNY->MONEY) I (TOULD IKE->DID LIKE) TO DO INTO (DE->THE) BARN TO TOMMY TO SEE (DE WHITTAWD->THE WIDOWED) +2094-142345-0028-336: NO NO (NO TOTTY UD->TODDY HAD) GET HER FEET WET SAID MISSUS POYSER CARRYING AWAY HER IRON +2094-142345-0029-337: DID EVER ANYBODY SEE THE LIKE SCREAMED MISSUS POYSER RUNNING TOWARDS THE TABLE WHEN HER EYE HAD FALLEN ON THE BLUE STREAM +2094-142345-0030-338: TOTTY HOWEVER HAD DESCENDED FROM HER CHAIR WITH GREAT SWIFTNESS AND WAS ALREADY IN RETREAT TOWARDS THE DAIRY WITH A SORT OF WADDLING RUN AND AN AMOUNT OF FAT ON THE NAPE OF HER NECK WHICH MADE HER LOOK LIKE THE METAMORPHOSIS OF A WHITE SUCKLING PIG +2094-142345-0031-339: AND SHE WAS VERY FOND OF YOU TOO AUNT RACHEL +2094-142345-0032-340: I OFTEN HEARD HER TALK OF YOU IN THE SAME SORT OF WAY +2094-142345-0033-341: WHEN SHE HAD THAT BAD ILLNESS AND I WAS ONLY ELEVEN YEARS OLD SHE USED TO SAY YOU'LL HAVE A FRIEND ON EARTH IN YOUR AUNT RACHEL IF I'M TAKEN FROM YOU FOR SHE HAS A KIND HEART AND I'M SURE I'VE FOUND IT SO +2094-142345-0034-342: AND THERE'S LINEN IN THE HOUSE AS I COULD WELL SPARE YOU FOR (I'VE->I) GOT LOTS (O->OF) SHEETING AND TABLE CLOTHING AND (TOWELLING AS->TOWELINGS) ISN'T MADE UP +2094-142345-0035-343: BUT NOT MORE THAN WHAT'S IN THE BIBLE (AUNT->AND) SAID DINAH +2094-142345-0036-344: NAY DEAR AUNT YOU NEVER HEARD ME SAY THAT ALL PEOPLE ARE CALLED TO FORSAKE THEIR WORK AND THEIR FAMILIES +2094-142345-0037-345: WE CAN ALL BE SERVANTS OF GOD WHEREVER OUR LOT IS CAST BUT HE GIVES US DIFFERENT SORTS OF WORK ACCORDING AS HE FITS US FOR IT AND CALLS US TO IT +2094-142345-0038-346: I CAN NO MORE HELP SPENDING MY LIFE IN TRYING TO DO WHAT I CAN FOR THE SOULS OF OTHERS THAN YOU COULD HELP RUNNING IF YOU HEARD LITTLE TOTTY CRYING AT THE OTHER END OF THE HOUSE THE VOICE WOULD GO TO YOUR HEART YOU WOULD THINK THE DEAR CHILD WAS IN TROUBLE OR IN DANGER AND YOU COULDN'T REST WITHOUT RUNNING TO HELP HER AND COMFORT HER +2094-142345-0039-347: I'VE STRONG ASSURANCE THAT NO EVIL WILL HAPPEN TO YOU AND MY UNCLE AND THE CHILDREN FROM ANYTHING (I'VE->I HAVE) DONE +2094-142345-0040-348: I DIDN'T PREACH WITHOUT DIRECTION +2094-142345-0041-349: DIRECTION +2094-142345-0042-350: I (HANNA->HAD A) COMMON PATIENCE WITH YOU +2094-142345-0043-351: BY THIS TIME THE TWO GENTLEMEN HAD REACHED THE PALINGS AND HAD GOT DOWN FROM THEIR HORSES IT WAS PLAIN THEY MEANT TO COME IN +2094-142345-0044-352: SAID MISTER IRWINE WITH HIS STATELY CORDIALITY +2094-142345-0045-353: OH SIR DON'T MENTION IT SAID MISSUS POYSER +2094-142345-0046-354: I DELIGHT IN YOUR KITCHEN +2094-142345-0047-355: POYSER IS NOT AT HOME IS HE +2094-142345-0048-356: SAID CAPTAIN DONNITHORNE (SEATING->SITTING) HIMSELF WHERE HE COULD SEE ALONG THE SHORT PASSAGE TO THE OPEN DAIRY DOOR +2094-142345-0049-357: NO SIR HE ISN'T HE'S GONE TO (ROSSETER->ROSSITUR) TO SEE MISTER WEST THE FACTOR ABOUT THE WOOL +2094-142345-0050-358: BUT THERE'S FATHER (THE->IN) BARN SIR IF HE'D BE OF ANY USE +2094-142345-0051-359: NO THANK YOU I'LL JUST LOOK AT THE WHELPS AND LEAVE A MESSAGE ABOUT THEM WITH YOUR SHEPHERD +2094-142345-0052-360: I MUST COME ANOTHER DAY AND SEE YOUR HUSBAND I WANT TO HAVE A CONSULTATION WITH HIM ABOUT HORSES +2094-142345-0053-361: FOR IF (HE'S->IS) ANYWHERE ON THE FARM WE CAN SEND FOR HIM IN A MINUTE +2094-142345-0054-362: OH SIR SAID MISSUS POYSER RATHER ALARMED YOU WOULDN'T LIKE IT AT ALL +2094-142345-0055-363: BUT YOU KNOW MORE ABOUT THAT THAN I DO SIR +2094-142345-0056-364: I THINK I SHOULD BE DOING YOU A SERVICE TO TURN YOU OUT OF SUCH A PLACE +2094-142345-0057-365: I (KNOW HIS->KNOWS) FARM IS IN BETTER ORDER THAN ANY OTHER WITHIN TEN MILES OF US AND AS FOR THE KITCHEN HE ADDED SMILING I DON'T BELIEVE THERE'S ONE IN THE KINGDOM TO BEAT IT +2094-142345-0058-366: BY THE BY (I'VE->I HAVE) NEVER SEEN YOUR DAIRY I MUST SEE YOUR (DAIRY->DEARIE) MISSUS POYSER +2094-142345-0059-367: THIS MISSUS POYSER SAID BLUSHING AND BELIEVING THAT THE CAPTAIN WAS REALLY INTERESTED IN HER MILK PANS AND WOULD ADJUST HIS OPINION OF HER TO THE APPEARANCE OF HER DAIRY +2094-142345-0060-368: OH I'VE NO DOUBT IT'S IN CAPITAL ORDER +2300-131720-0000-1816: THE PARIS PLANT LIKE THAT AT THE CRYSTAL PALACE WAS A TEMPORARY EXHIBIT +2300-131720-0001-1817: THE LONDON PLANT WAS LESS TEMPORARY BUT NOT PERMANENT SUPPLYING BEFORE IT WAS TORN OUT NO FEWER THAN THREE THOUSAND LAMPS IN HOTELS CHURCHES STORES AND DWELLINGS IN THE VICINITY OF HOLBORN (VIADUCT->VIEDUC) +2300-131720-0002-1818: THERE (MESSRS->MESSIERS) JOHNSON AND HAMMER PUT INTO PRACTICE MANY OF THE IDEAS NOW STANDARD IN THE ART AND SECURED MUCH USEFUL DATA FOR THE WORK IN NEW YORK OF WHICH THE STORY HAS JUST BEEN TOLD +2300-131720-0003-1819: THE DYNAMO ELECTRIC MACHINE THOUGH SMALL WAS ROBUST FOR UNDER ALL THE VARYING SPEEDS OF WATER POWER AND THE VICISSITUDES OF THE PLANT TO WHICH IT BELONGED IT CONTINUED IN ACTIVE USE UNTIL EIGHTEEN NINETY NINE SEVENTEEN YEARS +2300-131720-0004-1820: OWING TO HIS INSISTENCE ON LOW PRESSURE DIRECT CURRENT FOR USE IN DENSELY POPULATED DISTRICTS AS THE ONLY SAFE AND TRULY UNIVERSAL PROFITABLE WAY OF DELIVERING ELECTRICAL ENERGY TO THE CONSUMERS EDISON HAS BEEN FREQUENTLY SPOKEN OF AS AN OPPONENT OF THE ALTERNATING CURRENT +2300-131720-0005-1821: WHY IF WE ERECT A STATION AT THE FALLS IT IS A GREAT ECONOMY TO GET IT UP TO THE CITY +2300-131720-0006-1822: THERE SEEMS NO GOOD REASON FOR BELIEVING THAT IT WILL CHANGE +2300-131720-0007-1823: BROAD AS THE PRAIRIES AND FREE IN THOUGHT AS THE WINDS THAT (SWEEP->SWEPT) THEM HE IS (IDIOSYNCRATICALLY->IDIOS AND CRATICALLY) OPPOSED TO LOOSE AND WASTEFUL METHODS TO PLANS OF EMPIRE THAT NEGLECT THE POOR AT THE GATE +2300-131720-0008-1824: EVERYTHING HE HAS DONE HAS BEEN AIMED AT THE CONSERVATION OF ENERGY THE CONTRACTION OF SPACE THE INTENSIFICATION OF CULTURE +2300-131720-0009-1825: FOR SOME YEARS IT WAS NOT FOUND FEASIBLE TO OPERATE MOTORS ON ALTERNATING CURRENT CIRCUITS AND THAT REASON WAS OFTEN URGED AGAINST (IT->ITS) SERIOUSLY +2300-131720-0010-1826: IT COULD NOT BE USED FOR ELECTROPLATING OR DEPOSITION NOR COULD IT CHARGE STORAGE BATTERIES ALL OF WHICH ARE EASILY WITHIN THE ABILITY OF THE DIRECT CURRENT +2300-131720-0011-1827: BUT WHEN IT CAME TO BE A QUESTION OF LIGHTING A SCATTERED SUBURB A GROUP OF DWELLINGS ON THE OUTSKIRTS A REMOTE COUNTRY RESIDENCE OR A FARM HOUSE THE ALTERNATING CURRENT IN ALL ELEMENTS SAVE ITS DANGER WAS AND IS IDEAL +2300-131720-0012-1828: EDISON WAS INTOLERANT OF SHAM AND (SHODDY->SHODY) AND NOTHING WOULD SATISFY HIM THAT COULD NOT STAND CROSS EXAMINATION BY MICROSCOPE TEST TUBE AND GALVANOMETER +2300-131720-0013-1829: UNLESS HE COULD SECURE AN ENGINE OF SMOOTHER RUNNING AND MORE EXACTLY (GOVERNED->GOVERN) AND REGULATED THAN THOSE (AVAILABLE->AVALUABLE) FOR HIS DYNAMO AND LAMP EDISON REALIZED THAT HE WOULD FIND IT ALMOST IMPOSSIBLE TO GIVE A STEADY LIGHT +2300-131720-0014-1830: MISTER EDISON WAS A LEADER FAR AHEAD OF THE TIME +2300-131720-0015-1831: HE OBTAINED THE DESIRED SPEED AND (LOAD->LOWED) WITH A FRICTION (BRAKE->BREAK) ALSO REGULATOR OF SPEED BUT WAITED FOR AN INDICATOR TO VERIFY IT +2300-131720-0016-1832: THEN AGAIN THERE WAS NO KNOWN WAY TO (LUBRICATE->LUBRICADE) AN ENGINE FOR CONTINUOUS RUNNING AND MISTER EDISON INFORMED ME THAT AS A MARINE ENGINE STARTED BEFORE THE SHIP LEFT NEW YORK AND CONTINUED RUNNING UNTIL IT REACHED ITS HOME PORT SO AN ENGINE FOR HIS PURPOSES MUST PRODUCE LIGHT AT ALL TIMES +2300-131720-0017-1833: EDISON HAD INSTALLED HIS HISTORIC FIRST GREAT CENTRAL STATION SYSTEM IN NEW YORK ON THE MULTIPLE ARC SYSTEM COVERED BY HIS FEEDER AND MAIN INVENTION WHICH RESULTED IN A NOTABLE SAVING IN THE COST OF CONDUCTORS AS AGAINST A STRAIGHT TWO WIRE SYSTEM THROUGHOUT OF THE TREE KIND +2300-131720-0018-1834: HE SOON FORESAW THAT STILL GREATER ECONOMY WOULD BE NECESSARY FOR COMMERCIAL SUCCESS NOT ALONE FOR THE LARGER TERRITORY OPENING BUT FOR THE COMPACT (DISTRICTS->DISTRICT) OF LARGE CITIES +2300-131720-0019-1835: THE STRONG POSITION HELD BY THE EDISON SYSTEM UNDER THE STRENUOUS COMPETITION THAT WAS ALREADY SPRINGING UP WAS ENORMOUSLY IMPROVED BY THE INTRODUCTION OF THE THREE WIRE SYSTEM AND IT GAVE AN IMMEDIATE IMPETUS TO INCANDESCENT LIGHTING +2300-131720-0020-1836: IT WAS SPECIALLY SUITED FOR A TRIAL PLANT ALSO IN THE EARLY DAYS WHEN A YIELD OF SIX OR EIGHT LAMPS TO THE HORSE POWER WAS CONSIDERED SUBJECT FOR CONGRATULATION +2300-131720-0021-1837: THE STREET CONDUCTORS WERE OF THE OVERHEAD POLE LINE CONSTRUCTION AND WERE INSTALLED BY THE CONSTRUCTION COMPANY THAT HAD BEEN ORGANIZED BY EDISON TO BUILD (AND EQUIP->AN EQUIPPED) CENTRAL STATIONS +2300-131720-0022-1838: MEANWHILE HE HAD CALLED UPON ME TO MAKE A REPORT OF THE THREE WIRE SYSTEM KNOWN IN ENGLAND AS THE HOPKINSON BOTH DOCTOR JOHN HOPKINSON AND MISTER EDISON BEING INDEPENDENT (INVENTORS->IN VENORS) AT PRACTICALLY THE SAME TIME +2300-131720-0023-1839: I THINK HE WAS PERHAPS MORE APPRECIATIVE THAN I WAS OF THE DISCIPLINE OF THE EDISON CONSTRUCTION DEPARTMENT AND THOUGHT IT WOULD BE WELL FOR US TO WAIT UNTIL THE MORNING OF THE FOURTH BEFORE WE STARTED UP +2300-131720-0024-1840: BUT THE PLANT RAN AND IT WAS THE FIRST THREE WIRE STATION IN THIS COUNTRY +2300-131720-0025-1841: THEY WERE LATER USED AS RESERVE MACHINES AND FINALLY WITH THE ENGINE RETIRED FROM SERVICE AS PART OF THE COLLECTION OF EDISONIA BUT THEY REMAIN IN PRACTICALLY AS GOOD CONDITION AS (WHEN->ONE) INSTALLED IN EIGHTEEN EIGHTY THREE +2300-131720-0026-1842: THE (ARC->ARK) LAMP INSTALLED OUTSIDE A CUSTOMER'S PREMISES OR IN A CIRCUIT FOR PUBLIC STREET LIGHTING BURNED SO MANY HOURS NIGHTLY SO MANY NIGHTS IN THE MONTH AND WAS PAID FOR AT THAT RATE SUBJECT TO REBATE FOR HOURS WHEN THE LAMP MIGHT BE OUT THROUGH ACCIDENT +2300-131720-0027-1843: EDISON HELD THAT THE ELECTRICITY SOLD MUST BE MEASURED JUST LIKE GAS OR WATER AND HE PROCEEDED TO DEVELOP A METER +2300-131720-0028-1844: THERE WAS INFINITE SCEPTICISM AROUND HIM ON THE SUBJECT AND WHILE OTHER INVENTORS WERE ALSO GIVING THE SUBJECT THEIR THOUGHT THE PUBLIC TOOK IT FOR GRANTED THAT ANYTHING SO UTTERLY INTANGIBLE AS ELECTRICITY THAT COULD NOT BE SEEN OR WEIGHED AND ONLY GAVE SECONDARY EVIDENCE OF ITSELF AT THE EXACT POINT OF USE COULD NOT BE BROUGHT TO ACCURATE REGISTRATION +2300-131720-0029-1845: HENCE THE EDISON ELECTROLYTIC METER IS NO LONGER USED DESPITE ITS EXCELLENT QUALITIES +2300-131720-0030-1846: THE (PRINCIPLE->PRINCIPAL) EMPLOYED IN THE EDISON ELECTROLYTIC METER IS THAT WHICH EXEMPLIFIES THE POWER OF ELECTRICITY TO DECOMPOSE A CHEMICAL SUBSTANCE +2300-131720-0031-1847: ASSOCIATED WITH THIS SIMPLE FORM OF APPARATUS WERE VARIOUS INGENIOUS DETAILS AND REFINEMENTS TO SECURE REGULARITY OF OPERATION FREEDOM FROM INACCURACY AND IMMUNITY FROM SUCH TAMPERING AS WOULD PERMIT THEFT OF CURRENT OR DAMAGE +2300-131720-0032-1848: THE STANDARD EDISON METER PRACTICE WAS TO REMOVE THE CELLS ONCE A MONTH TO THE METER ROOM OF THE CENTRAL STATION COMPANY FOR EXAMINATION ANOTHER SET BEING SUBSTITUTED +2300-131720-0033-1849: IN DECEMBER EIGHTEEN EIGHTY EIGHT MISTER W J JENKS READ AN INTERESTING PAPER BEFORE THE AMERICAN INSTITUTE OF ELECTRICAL ENGINEERS ON THE SIX YEARS OF PRACTICAL EXPERIENCE HAD UP TO THAT TIME WITH THE (METER->METRE) THEN MORE GENERALLY IN USE THAN ANY OTHER +2300-131720-0034-1850: THE OTHERS HAVING BEEN IN OPERATION TOO SHORT A TIME TO SHOW DEFINITE RESULTS ALTHOUGH THEY ALSO WENT QUICKLY TO A DIVIDEND BASIS +2300-131720-0035-1851: IN THIS CONNECTION IT SHOULD BE MENTIONED THAT THE ASSOCIATION OF EDISON ILLUMINATING COMPANIES IN THE SAME YEAR ADOPTED RESOLUTIONS UNANIMOUSLY TO THE EFFECT THAT THE EDISON METER WAS ACCURATE AND THAT ITS USE WAS NOT EXPENSIVE FOR STATIONS ABOVE ONE THOUSAND LIGHTS AND THAT THE BEST FINANCIAL RESULTS WERE INVARIABLY SECURED IN A STATION SELLING CURRENT BY (METER->METRE) +2300-131720-0036-1852: THE (METER->METRE) CONTINUED IN GENERAL SERVICE DURING EIGHTEEN NINETY NINE AND PROBABLY UP TO THE CLOSE OF THE CENTURY +2300-131720-0037-1853: HE WEIGHED AND (REWEIGHED->REWAIED) THE (METER->METRE) PLATES AND PURSUED EVERY LINE OF INVESTIGATION IMAGINABLE BUT ALL IN VAIN +2300-131720-0038-1854: HE FELT HE WAS UP AGAINST IT AND THAT PERHAPS ANOTHER KIND OF A JOB WOULD SUIT HIM BETTER +2300-131720-0039-1855: THE PROBLEM WAS (SOLVED->SOBBED) +2300-131720-0040-1856: WE WERE MORE INTERESTED IN THE TECHNICAL CONDITION OF THE STATION THAN IN THE COMMERCIAL PART +2300-131720-0041-1857: WE HAD (METERS->METRES) IN WHICH THERE WERE TWO BOTTLES OF LIQUID +237-126133-0000-2407: HERE SHE WOULD STAY COMFORTED AND (SOOTHED->SOOTHE) AMONG THE LOVELY PLANTS AND RICH EXOTICS REJOICING THE HEART OF OLD TURNER THE GARDENER WHO SINCE POLLY'S FIRST RAPTUROUS ENTRANCE HAD TAKEN HER INTO HIS GOOD GRACES FOR ALL TIME +237-126133-0001-2408: EVERY CHANCE SHE COULD STEAL AFTER PRACTICE HOURS WERE OVER AND AFTER THE CLAMOROUS DEMANDS OF THE BOYS UPON HER TIME WERE FULLY SATISFIED WAS SEIZED TO FLY ON THE WINGS OF THE WIND TO THE FLOWERS +237-126133-0002-2409: THEN DEAR SAID MISSUS WHITNEY YOU MUST BE KINDER TO HER THAN EVER THINK WHAT IT WOULD BE FOR ONE OF YOU TO BE AWAY FROM HOME EVEN AMONG FRIENDS +237-126133-0003-2410: SOMEHOW OF ALL THE DAYS WHEN THE HOME FEELING WAS THE STRONGEST THIS DAY IT SEEMED AS IF SHE COULD BEAR IT NO LONGER +237-126133-0004-2411: IF SHE COULD ONLY SEE PHRONSIE FOR JUST ONE MOMENT +237-126133-0005-2412: OH SHE'S ALWAYS AT THE PIANO SAID VAN SHE MUST BE THERE NOW SOMEWHERE AND THEN SOMEBODY LAUGHED +237-126133-0006-2413: AT THIS THE BUNDLE OPENED SUDDENLY AND OUT POPPED PHRONSIE +237-126133-0007-2414: BUT POLLY COULDN'T SPEAK AND IF JASPER HADN'T CAUGHT HER JUST IN TIME SHE WOULD HAVE TUMBLED OVER BACKWARD FROM THE STOOL PHRONSIE AND ALL +237-126133-0008-2415: ASKED PHRONSIE WITH HER LITTLE FACE CLOSE TO POLLY'S OWN +237-126133-0009-2416: NOW YOU'LL STAY CRIED VAN SAY POLLY WON'T YOU +237-126133-0010-2417: OH YOU ARE THE DEAREST AND BEST MISTER KING I EVER SAW BUT HOW DID YOU MAKE MAMMY LET HER COME +237-126133-0011-2418: ISN'T HE SPLENDID CRIED JASPER (IN->AN) INTENSE PRIDE SWELLING UP FATHER KNEW HOW TO DO IT +237-126133-0012-2419: THERE THERE HE SAID SOOTHINGLY PATTING HER BROWN FUZZY HEAD +237-126133-0013-2420: I KNOW GASPED POLLY CONTROLLING HER SOBS I WON'T ONLY I CAN'T THANK YOU +237-126133-0014-2421: ASKED PHRONSIE IN INTENSE INTEREST SLIPPING DOWN OUT OF POLLY'S ARMS AND CROWDING UP CLOSE TO JASPER'S SIDE +237-126133-0015-2422: YES ALL ALONE BY HIMSELF ASSERTED JASPER VEHEMENTLY AND WINKING FURIOUSLY TO THE OTHERS TO STOP THEIR LAUGHING HE DID NOW TRULY PHRONSIE +237-126133-0016-2423: OH NO (JASPER->JAPSER) I MUST GO BY MY VERY OWN SELF +237-126133-0017-2424: THERE JAP YOU'VE CAUGHT IT LAUGHED PERCY WHILE THE OTHERS SCREAMED AT THE SIGHT OF JASPER'S FACE +237-126133-0018-2425: DON'T MIND IT POLLY WHISPERED JASPER TWASN'T HER FAULT +237-126133-0019-2426: DEAR ME EJACULATED THE OLD GENTLEMAN IN THE UTMOST AMAZEMENT AND SUCH A TIME AS I'VE HAD TO GET HER HERE TOO +237-126133-0020-2427: HOW DID HER MOTHER EVER LET HER GO +237-126133-0021-2428: SHE ASKED IMPULSIVELY I DIDN'T BELIEVE YOU COULD PERSUADE HER FATHER +237-126133-0022-2429: I DIDN'T HAVE ANY FEARS IF I WORKED IT RIGHTLY SAID THE OLD GENTLEMAN COMPLACENTLY +237-126133-0023-2430: HE CRIED IN HIGH DUDGEON JUST AS IF HE OWNED THE WHOLE OF THE PEPPERS AND COULD DISPOSE OF THEM ALL TO SUIT HIS FANCY +237-126133-0024-2431: AND THE OLD GENTLEMAN WAS SO DELIGHTED WITH HIS SUCCESS THAT HE HAD TO BURST OUT INTO A SERIES OF SHORT HAPPY BITS OF LAUGHTER THAT OCCUPIED QUITE A SPACE OF TIME +237-126133-0025-2432: AT LAST HE CAME OUT OF THEM AND WIPED HIS FACE VIGOROUSLY +237-134493-0000-2388: IT IS SIXTEEN YEARS SINCE JOHN (BERGSON->BERKSON) DIED +237-134493-0001-2389: HIS WIFE NOW LIES BESIDE HIM AND THE WHITE SHAFT THAT MARKS THEIR GRAVES GLEAMS ACROSS THE WHEAT FIELDS +237-134493-0002-2390: FROM THE NORWEGIAN GRAVEYARD ONE LOOKS OUT OVER A VAST (CHECKER->CHEQUER) BOARD MARKED OFF IN SQUARES OF WHEAT AND CORN LIGHT AND DARK (DARK->*) AND LIGHT +237-134493-0003-2391: FROM THE GRAVEYARD GATE ONE CAN COUNT A DOZEN (GAYLY->GAILY) PAINTED (FARMHOUSES->FARM HOUSES) THE GILDED WEATHER (VANES->VEINS) ON THE BIG RED BARNS WINK AT EACH OTHER ACROSS THE GREEN AND BROWN AND YELLOW FIELDS +237-134493-0004-2392: THE AIR AND THE EARTH ARE CURIOUSLY MATED AND INTERMINGLED AS IF THE ONE WERE THE BREATH OF THE OTHER +237-134493-0005-2393: HE WAS A SPLENDID FIGURE OF A BOY TALL AND STRAIGHT AS A YOUNG PINE TREE WITH A HANDSOME HEAD AND STORMY (GRAY->GREY) EYES DEEPLY SET UNDER A SERIOUS BROW +237-134493-0006-2394: THAT'S NOT MUCH OF A JOB FOR AN ATHLETE HERE I'VE BEEN TO TOWN AND BACK +237-134493-0007-2395: (ALEXANDRA LETS->ALEXANDER THAT'S) YOU SLEEP LATE +237-134493-0008-2396: SHE GATHERED UP HER REINS +237-134493-0009-2397: PLEASE WAIT FOR ME MARIE (EMIL->AMYL) COAXED +237-134493-0010-2398: I NEVER SEE (LOU'S->LOOSE) SCYTHE OVER HERE +237-134493-0011-2399: HOW BROWN YOU'VE GOT SINCE YOU CAME HOME I WISH I HAD AN (ATHLETE->ADETE) TO MOW MY ORCHARD +237-134493-0012-2400: I GET WET TO MY KNEES WHEN I GO DOWN TO (PICK->PIC) CHERRIES +237-134493-0013-2401: INDEED HE HAD LOOKED AWAY WITH THE PURPOSE OF NOT SEEING IT +237-134493-0014-2402: THEY THINK (YOU'RE->YOU ARE) PROUD BECAUSE YOU'VE BEEN AWAY TO SCHOOL OR SOMETHING +237-134493-0015-2403: THERE WAS SOMETHING INDIVIDUAL ABOUT THE GREAT FARM A MOST UNUSUAL TRIMNESS AND CARE FOR DETAIL +237-134493-0016-2404: ON EITHER SIDE OF THE ROAD FOR A MILE BEFORE YOU REACHED THE FOOT OF THE HILL STOOD TALL (OSAGE ORANGE HEDGES->O SAGE ORANGES) THEIR GLOSSY GREEN MARKING OFF THE YELLOW FIELDS +237-134493-0017-2405: ANY ONE THEREABOUTS WOULD HAVE TOLD YOU THAT THIS WAS ONE OF THE RICHEST FARMS ON THE DIVIDE AND THAT THE FARMER WAS A WOMAN ALEXANDRA (BERGSON->BERGIN) +237-134493-0018-2406: THERE IS EVEN A WHITE ROW OF (BEEHIVES->BEE HIVES) IN THE ORCHARD UNDER THE WALNUT TREES +237-134500-0000-2345: FRANK READ ENGLISH SLOWLY AND THE MORE HE READ ABOUT THIS DIVORCE CASE THE ANGRIER HE GREW +237-134500-0001-2346: MARIE SIGHED +237-134500-0002-2347: A (BRISK->BRACE) WIND HAD COME UP AND WAS DRIVING PUFFY WHITE CLOUDS ACROSS THE SKY +237-134500-0003-2348: THE (ORCHARD->ARCHWOOD) WAS SPARKLING AND RIPPLING IN THE SUN +237-134500-0004-2349: THAT INVITATION DECIDED HER +237-134500-0005-2350: OH BUT (I'M->I AM) GLAD TO GET THIS PLACE MOWED +237-134500-0006-2351: JUST SMELL THE WILD ROSES THEY ARE ALWAYS SO SPICY AFTER A RAIN +237-134500-0007-2352: WE NEVER HAD SO MANY OF THEM IN HERE BEFORE +237-134500-0008-2353: I SUPPOSE IT'S THE WET SEASON WILL YOU HAVE TO CUT THEM TOO +237-134500-0009-2354: I SUPPOSE THAT'S THE WET SEASON TOO THEN +237-134500-0010-2355: IT'S EXCITING TO SEE EVERYTHING GROWING SO FAST AND TO GET THE GRASS CUT +237-134500-0011-2356: AREN'T YOU SPLASHED LOOK AT THE SPIDER WEBS ALL OVER THE GRASS +237-134500-0012-2357: IN A FEW MOMENTS HE HEARD THE CHERRIES DROPPING SMARTLY INTO THE PAIL AND HE BEGAN TO SWING HIS SCYTHE WITH THAT LONG EVEN STROKE THAT FEW AMERICAN BOYS EVER LEARN +237-134500-0013-2358: MARIE PICKED CHERRIES AND SANG SOFTLY TO HERSELF STRIPPING ONE GLITTERING (BRANCH->RANCH) AFTER ANOTHER SHIVERING WHEN SHE (CAUGHT->THOUGHT) A SHOWER OF RAINDROPS ON HER NECK AND HAIR +237-134500-0014-2359: AND (EMIL->AMYL) MOWED HIS WAY SLOWLY DOWN TOWARD THE CHERRY TREES +237-134500-0015-2360: THAT SUMMER THE RAINS HAD BEEN SO MANY AND OPPORTUNE THAT IT WAS ALMOST MORE THAN SHABATA AND HIS MAN COULD DO TO KEEP UP WITH THE CORN THE ORCHARD WAS A NEGLECTED WILDERNESS +237-134500-0016-2361: I DON'T KNOW ALL OF THEM BUT I KNOW LINDENS ARE +237-134500-0017-2362: IF I FEEL THAT WAY I FEEL THAT WAY +237-134500-0018-2363: HE REACHED UP AMONG THE BRANCHES AND BEGAN TO PICK THE SWEET INSIPID FRUIT LONG IVORY COLORED BERRIES TIPPED WITH FAINT PINK LIKE WHITE CORAL THAT FALL TO THE GROUND UNHEEDED ALL SUMMER THROUGH +237-134500-0019-2364: HE DROPPED A HANDFUL INTO HER LAP +237-134500-0020-2365: YES DON'T YOU +237-134500-0021-2366: OH EVER SO MUCH ONLY HE SEEMS KIND OF (STAID AND->STAY AT IN) SCHOOL TEACHERY +237-134500-0022-2367: WHEN SHE USED TO TELL ME ABOUT HIM I ALWAYS WONDERED WHETHER SHE WASN'T A LITTLE IN LOVE WITH HIM +237-134500-0023-2368: IT WOULD SERVE YOU ALL RIGHT IF SHE WALKED OFF WITH (CARL->KARL) +237-134500-0024-2369: I LIKE TO TALK TO (CARL->KARL) ABOUT NEW YORK AND WHAT A FELLOW CAN DO THERE +237-134500-0025-2370: OH (EMIL->AMY ILL) +237-134500-0026-2371: SURELY YOU ARE NOT THINKING OF GOING OFF THERE +237-134500-0027-2372: (MARIE'S->MARI'S) FACE FELL UNDER HIS BROODING GAZE +237-134500-0028-2373: (I'M->I AM) SURE (ALEXANDRA HOPES->ALEXANDER HELPS) YOU WILL STAY ON HERE SHE MURMURED +237-134500-0029-2374: I DON'T WANT TO STAND AROUND AND LOOK ON +237-134500-0030-2375: I WANT TO BE DOING SOMETHING ON MY OWN ACCOUNT +237-134500-0031-2376: SOMETIMES I DON'T WANT TO DO ANYTHING AT ALL AND SOMETIMES I WANT TO PULL THE FOUR CORNERS OF THE DIVIDE TOGETHER HE THREW OUT HIS ARM AND BROUGHT IT BACK WITH A JERK SO LIKE A (TABLE CLOTH->TABLECLOTH) +237-134500-0032-2377: I GET TIRED OF SEEING (MEN->MAN) AND HORSES GOING UP AND DOWN UP AND DOWN +237-134500-0033-2378: I WISH YOU WEREN'T SO RESTLESS AND DIDN'T GET SO WORKED UP OVER THINGS SHE SAID SADLY +237-134500-0034-2379: THANK YOU HE RETURNED SHORTLY +237-134500-0035-2380: AND YOU NEVER USED TO BE CROSS TO ME +237-134500-0036-2381: I CAN'T PLAY WITH YOU LIKE A LITTLE BOY ANY MORE HE SAID SLOWLY THAT'S WHAT YOU MISS (MARIE->MARI) +237-134500-0037-2382: BUT (EMIL->AM ILL) IF I UNDERSTAND (THEN->IN) ALL OUR GOOD TIMES ARE OVER WE CAN NEVER DO NICE THINGS TOGETHER ANY MORE +237-134500-0038-2383: AND ANYHOW THERE'S NOTHING TO UNDERSTAND +237-134500-0039-2384: THAT WON'T LAST IT WILL GO AWAY AND THINGS WILL BE JUST AS THEY USED TO +237-134500-0040-2385: I PRAY FOR YOU BUT THAT'S NOT THE SAME AS IF YOU PRAYED YOURSELF +237-134500-0041-2386: I CAN'T PRAY TO HAVE THE THINGS I WANT HE SAID SLOWLY AND I WON'T PRAY NOT TO HAVE THEM NOT IF I'M DAMNED FOR IT +237-134500-0042-2387: THEN ALL OUR GOOD TIMES ARE OVER +260-123286-0000-200: SATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL ROUND NO LAND IN SIGHT +260-123286-0001-201: THE HORIZON SEEMS EXTREMELY DISTANT +260-123286-0002-202: ALL MY DANGER AND SUFFERINGS WERE NEEDED TO STRIKE A SPARK OF HUMAN FEELING OUT OF HIM BUT NOW THAT I AM WELL HIS NATURE HAS RESUMED ITS SWAY +260-123286-0003-203: YOU SEEM ANXIOUS MY UNCLE I SAID SEEING HIM CONTINUALLY WITH HIS GLASS TO HIS EYE ANXIOUS +260-123286-0004-204: ONE MIGHT BE WITH LESS REASON THAN NOW +260-123286-0005-205: I AM NOT COMPLAINING THAT THE RATE IS SLOW BUT THAT THE SEA IS SO WIDE +260-123286-0006-206: WE ARE LOSING TIME AND THE FACT IS I HAVE NOT COME ALL THIS WAY TO TAKE A LITTLE SAIL UPON A POND ON A RAFT +260-123286-0007-207: HE CALLED THIS SEA (A POND->UPON) AND OUR LONG VOYAGE TAKING A LITTLE SAIL +260-123286-0008-208: THEREFORE DON'T TALK TO ME ABOUT VIEWS AND PROSPECTS +260-123286-0009-209: I TAKE THIS AS MY ANSWER AND I LEAVE THE PROFESSOR TO BITE HIS LIPS WITH IMPATIENCE +260-123286-0010-210: SUNDAY AUGUST SIXTEENTH +260-123286-0011-211: NOTHING NEW (WEATHER->WHETHER) UNCHANGED THE WIND FRESHENS +260-123286-0012-212: BUT THERE SEEMED NO REASON (TO->OF) FEAR +260-123286-0013-213: THE SHADOW OF THE RAFT WAS CLEARLY OUTLINED UPON THE SURFACE OF THE WAVES +260-123286-0014-214: TRULY (THIS->THE) SEA IS OF INFINITE (WIDTH->WID) +260-123286-0015-215: IT MUST BE AS WIDE AS THE MEDITERRANEAN OR THE ATLANTIC AND WHY NOT +260-123286-0016-216: THESE THOUGHTS AGITATED ME ALL DAY AND MY IMAGINATION SCARCELY CALMED DOWN AFTER SEVERAL HOURS SLEEP +260-123286-0017-217: I SHUDDER AS I RECALL THESE MONSTERS TO MY REMEMBRANCE +260-123286-0018-218: I SAW AT THE HAMBURG MUSEUM THE SKELETON OF ONE OF THESE CREATURES THIRTY FEET IN LENGTH +260-123286-0019-219: I SUPPOSE PROFESSOR LIEDENBROCK WAS OF MY OPINION TOO AND EVEN SHARED MY FEARS FOR AFTER HAVING EXAMINED THE (PICK->PIG) HIS EYES TRAVERSED THE OCEAN FROM SIDE TO SIDE +260-123286-0020-220: TUESDAY AUGUST EIGHTEENTH +260-123286-0021-221: DURING HIS WATCH I SLEPT +260-123286-0022-222: TWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE ME +260-123286-0023-223: THE RAFT WAS HEAVED UP ON A WATERY MOUNTAIN AND PITCHED DOWN AGAIN AT A DISTANCE OF TWENTY FATHOMS +260-123286-0024-224: THERE'S A (WHALE->WAIL) A (WHALE->WELL) CRIED THE PROFESSOR +260-123286-0025-225: (FLIGHT->FIGHT) WAS OUT OF THE QUESTION NOW THE REPTILES ROSE THEY WHEELED AROUND OUR LITTLE RAFT WITH A RAPIDITY GREATER THAN THAT OF EXPRESS TRAINS +260-123286-0026-226: TWO MONSTERS (ONLY->OMER) WERE CREATING ALL THIS COMMOTION AND BEFORE MY EYES (ARE->OUR) TWO REPTILES OF THE PRIMITIVE WORLD +260-123286-0027-227: I CAN DISTINGUISH THE EYE OF THE (ICHTHYOSAURUS->ITHUSORIS) GLOWING LIKE A RED HOT (COAL->CO) AND AS LARGE AS A MAN'S HEAD +260-123286-0028-228: ITS JAW IS ENORMOUS AND ACCORDING TO NATURALISTS IT IS ARMED WITH NO LESS THAN ONE HUNDRED AND EIGHTY TWO TEETH +260-123286-0029-229: THOSE HUGE CREATURES ATTACKED EACH OTHER WITH THE GREATEST ANIMOSITY +260-123286-0030-230: SUDDENLY THE (ICHTHYOSAURUS->IDEAS) AND THE (PLESIOSAURUS->PLEAS) DISAPPEAR BELOW LEAVING A (WHIRLPOOL->WAR POOL) EDDYING IN THE WATER +260-123286-0031-231: AS FOR THE (ICHTHYOSAURUS->ETHIOSORIS) HAS HE RETURNED TO HIS SUBMARINE CAVERN +260-123288-0000-232: THE ROARINGS BECOME LOST IN THE DISTANCE +260-123288-0001-233: THE WEATHER IF WE MAY USE (THAT->THE) TERM WILL CHANGE BEFORE LONG +260-123288-0002-234: THE ATMOSPHERE IS CHARGED WITH (VAPOURS->VAPORS) PERVADED WITH THE ELECTRICITY GENERATED BY THE EVAPORATION OF (SALINE->SAILING) WATERS +260-123288-0003-235: THE ELECTRIC LIGHT CAN SCARCELY PENETRATE (THROUGH->TO) THE DENSE CURTAIN WHICH (HAS->IS) DROPPED OVER THE THEATRE ON WHICH THE BATTLE OF THE ELEMENTS IS ABOUT TO BE WAGED +260-123288-0004-236: THE AIR IS HEAVY THE SEA IS CALM +260-123288-0005-237: FROM TIME TO TIME A FLEECY TUFT OF (MIST->MISTS) WITH YET SOME GLEAMING LIGHT LEFT UPON IT DROPS DOWN UPON THE DENSE FLOOR OF GREY AND LOSES ITSELF IN THE OPAQUE AND IMPENETRABLE MASS +260-123288-0006-238: THE ATMOSPHERE (IS->AS) EVIDENTLY CHARGED (AND->IN) SURCHARGED WITH ELECTRICITY +260-123288-0007-239: THE WIND NEVER LULLS BUT TO ACQUIRE INCREASED STRENGTH THE VAST BANK OF HEAVY CLOUDS IS A HUGE RESERVOIR OF FEARFUL WINDY GUSTS AND RUSHING STORMS +260-123288-0008-240: THERE'S A HEAVY STORM COMING ON I CRIED POINTING TOWARDS THE HORIZON +260-123288-0009-241: THOSE CLOUDS SEEM AS IF THEY WERE GOING TO CRUSH THE SEA +260-123288-0010-242: ON THE MAST ALREADY I SEE THE LIGHT PLAY OF A (LAMBENT->LAMENT) SAINT (ELMO'S->ABLE'S) FIRE THE OUTSTRETCHED SAIL CATCHES NOT A BREATH OF WIND AND HANGS LIKE A SHEET OF LEAD +260-123288-0011-243: BUT IF WE HAVE NOW CEASED TO ADVANCE WHY DO WE YET LEAVE THAT SAIL LOOSE WHICH AT THE FIRST SHOCK OF (THE->A) TEMPEST MAY CAPSIZE US IN A MOMENT +260-123288-0012-244: THAT WILL BE (*->THE) SAFEST NO NO NEVER +260-123288-0013-245: (THE->THAT) PILED UP (VAPOURS CONDENSE->VAPORS CONTENSED) INTO WATER AND THE AIR PUT INTO VIOLENT ACTION TO SUPPLY THE VACUUM LEFT BY THE CONDENSATION OF THE (MISTS->MIST) ROUSES ITSELF INTO A WHIRLWIND +260-123288-0014-246: HANS STIRS NOT +260-123288-0015-247: FROM THE UNDER SURFACE OF THE CLOUDS THERE ARE CONTINUAL (EMISSIONS->ADMISSIONS) OF LURID LIGHT ELECTRIC MATTER IS IN CONTINUAL EVOLUTION FROM THEIR COMPONENT MOLECULES THE GASEOUS ELEMENTS OF THE AIR NEED TO BE SLAKED WITH MOISTURE FOR INNUMERABLE COLUMNS OF WATER RUSH UPWARDS INTO THE AIR AND FALL BACK AGAIN IN WHITE FOAM +260-123288-0016-248: I REFER TO THE THERMOMETER IT INDICATES THE FIGURE IS OBLITERATED +260-123288-0017-249: IS THE (ATMOSPHERIC->ATMOSPHERE) CONDITION HAVING ONCE REACHED (THIS DENSITY->OSTENSITY) TO BECOME FINAL +260-123288-0018-250: THE RAFT BEARS ON STILL TO THE SOUTH EAST +260-123288-0019-251: AT NOON THE VIOLENCE OF THE STORM REDOUBLES +260-123288-0020-252: EACH OF US IS LASHED TO SOME PART OF THE RAFT +260-123288-0021-253: THE WAVES RISE ABOVE OUR HEADS +260-123288-0022-254: THEY (SEEM->SEEMED) TO BE WE ARE LOST BUT I AM NOT SURE +260-123288-0023-255: HE NODS HIS CONSENT +260-123288-0024-256: THE (FIREBALL->FIRE BALL) HALF OF IT WHITE HALF AZURE BLUE AND THE SIZE OF A TEN INCH SHELL MOVED SLOWLY ABOUT THE RAFT BUT REVOLVING ON ITS OWN AXIS WITH ASTONISHING VELOCITY AS IF (WHIPPED->WHIP) ROUND BY THE FORCE OF THE WHIRLWIND +260-123288-0025-257: HERE IT COMES THERE IT GLIDES NOW IT IS UP THE RAGGED STUMP OF THE MAST THENCE IT LIGHTLY LEAPS ON THE PROVISION BAG DESCENDS WITH A LIGHT BOUND AND JUST SKIMS THE POWDER MAGAZINE HORRIBLE +260-123288-0026-258: WE SHALL BE BLOWN UP BUT NO THE DAZZLING DISK OF MYSTERIOUS LIGHT NIMBLY LEAPS ASIDE IT APPROACHES HANS WHO FIXES HIS BLUE EYE UPON IT STEADILY IT THREATENS THE HEAD OF MY UNCLE WHO FALLS UPON HIS KNEES WITH HIS HEAD DOWN TO AVOID IT +260-123288-0027-259: A SUFFOCATING SMELL OF NITROGEN FILLS THE AIR IT ENTERS THE THROAT IT FILLS THE LUNGS +260-123288-0028-260: WE SUFFER STIFLING PAINS +260-123440-0000-179: AND HOW ODD THE DIRECTIONS WILL LOOK +260-123440-0001-180: POOR ALICE +260-123440-0002-181: IT WAS THE WHITE RABBIT RETURNING SPLENDIDLY DRESSED WITH A PAIR OF WHITE KID GLOVES IN ONE HAND AND A LARGE FAN IN THE OTHER HE CAME TROTTING ALONG IN A GREAT HURRY MUTTERING TO HIMSELF AS HE CAME OH THE DUCHESS THE DUCHESS +260-123440-0003-182: OH WON'T SHE BE SAVAGE IF I'VE KEPT HER WAITING +260-123440-0004-183: ALICE TOOK UP THE FAN AND GLOVES AND AS THE HALL WAS VERY HOT SHE KEPT FANNING HERSELF ALL THE TIME SHE WENT ON TALKING DEAR DEAR HOW QUEER EVERYTHING IS TO DAY +260-123440-0005-184: AND YESTERDAY THINGS WENT ON JUST AS USUAL +260-123440-0006-185: I WONDER IF I'VE BEEN CHANGED IN THE NIGHT +260-123440-0007-186: I ALMOST THINK I CAN REMEMBER FEELING (A->*) LITTLE DIFFERENT +260-123440-0008-187: I'LL TRY IF I KNOW ALL THE THINGS I USED TO KNOW +260-123440-0009-188: I SHALL NEVER GET TO TWENTY AT THAT RATE +260-123440-0010-189: HOW CHEERFULLY HE SEEMS TO GRIN HOW NEATLY SPREAD HIS CLAWS AND WELCOME LITTLE FISHES IN WITH GENTLY SMILING JAWS +260-123440-0011-190: NO I'VE MADE UP MY MIND ABOUT IT IF I'M MABEL I'LL STAY DOWN HERE +260-123440-0012-191: IT'LL BE NO USE THEIR PUTTING THEIR HEADS DOWN AND SAYING COME UP AGAIN DEAR +260-123440-0013-192: I AM SO VERY TIRED OF BEING ALL ALONE HERE +260-123440-0014-193: AND I DECLARE IT'S TOO BAD THAT IT IS +260-123440-0015-194: I WISH I HADN'T CRIED SO MUCH SAID ALICE AS SHE SWAM ABOUT TRYING TO FIND HER WAY OUT +260-123440-0016-195: I SHALL BE PUNISHED FOR IT NOW I SUPPOSE BY BEING DROWNED IN MY OWN TEARS +260-123440-0017-196: THAT WILL BE A QUEER THING TO BE SURE +260-123440-0018-197: I AM VERY TIRED OF SWIMMING ABOUT HERE (O->OH) MOUSE +260-123440-0019-198: CRIED ALICE AGAIN FOR THIS TIME THE MOUSE WAS BRISTLING ALL OVER AND SHE FELT CERTAIN IT MUST BE REALLY OFFENDED +260-123440-0020-199: WE WON'T TALK ABOUT HER ANY MORE IF YOU'D RATHER NOT WE INDEED +2830-3979-0000-1120: WE WANT YOU TO HELP US PUBLISH SOME LEADING WORK OF LUTHER'S FOR THE GENERAL AMERICAN MARKET WILL YOU DO IT +2830-3979-0001-1121: THE CONDITION IS THAT I WILL BE PERMITTED TO MAKE LUTHER TALK AMERICAN (STREAMLINE HIM->STREAM LINE HYMN) SO TO SPEAK BECAUSE YOU WILL NEVER GET PEOPLE WHETHER IN OR OUTSIDE THE LUTHERAN CHURCH ACTUALLY TO READ LUTHER UNLESS WE MAKE HIM TALK AS HE WOULD TALK (TODAY->TO DAY) TO AMERICANS +2830-3979-0002-1122: LET US BEGIN WITH THAT HIS COMMENTARY ON (GALATIANS->GALLATIONS) +2830-3979-0003-1123: THE UNDERTAKING WHICH (SEEMED->SEEMS) SO ATTRACTIVE WHEN VIEWED AS A LITERARY TASK PROVED A MOST DIFFICULT ONE AND AT TIMES BECAME OPPRESSIVE +2830-3979-0004-1124: IT WAS WRITTEN IN LATIN +2830-3979-0005-1125: THE WORK HAD TO BE CONDENSED +2830-3979-0006-1126: A WORD SHOULD NOW BE SAID ABOUT THE ORIGIN OF LUTHER'S COMMENTARY (ON GALATIANS->ANGULATIONS) +2830-3979-0007-1127: MUCH LATER WHEN A FRIEND OF HIS WAS PREPARING AN (EDITION->ADDITION) OF ALL HIS LATIN WORKS HE REMARKED TO HIS HOME CIRCLE IF I HAD MY WAY ABOUT IT THEY WOULD REPUBLISH ONLY THOSE OF MY BOOKS WHICH HAVE DOCTRINE MY (GALATIANS->GALLATIONS) FOR INSTANCE +2830-3979-0008-1128: IN OTHER WORDS THESE THREE MEN TOOK DOWN THE LECTURES WHICH LUTHER ADDRESSED TO HIS STUDENTS IN THE COURSE OF (GALATIANS->GALLATIONS) AND (ROERER->ROAR) PREPARED THE MANUSCRIPT FOR THE PRINTER +2830-3979-0009-1129: IT PRESENTS LIKE NO OTHER OF LUTHER'S WRITINGS THE CENTRAL THOUGHT OF CHRISTIANITY THE JUSTIFICATION OF THE SINNER FOR THE SAKE OF CHRIST'S MERITS ALONE +2830-3979-0010-1130: BUT THE ESSENCE OF LUTHER'S LECTURES IS THERE +2830-3979-0011-1131: THE LORD WHO HAS GIVEN US POWER TO TEACH AND TO HEAR LET HIM ALSO GIVE US THE POWER TO SERVE AND TO DO LUKE TWO +2830-3979-0012-1132: THE WORD OF OUR GOD SHALL STAND (FOREVER->FOR EVER) +2830-3980-0000-1043: IN EVERY WAY THEY SOUGHT TO UNDERMINE THE AUTHORITY OF SAINT PAUL +2830-3980-0001-1044: THEY SAID TO THE GALATIANS YOU HAVE NO RIGHT TO THINK HIGHLY OF PAUL +2830-3980-0002-1045: HE WAS THE LAST TO TURN TO CHRIST +2830-3980-0003-1046: (PAUL->PAW) CAME LATER (AND IS->IN HIS) BENEATH US +2830-3980-0004-1047: INDEED HE PERSECUTED THE CHURCH OF CHRIST FOR A LONG TIME +2830-3980-0005-1048: DO YOU SUPPOSE THAT GOD FOR THE SAKE OF A FEW LUTHERAN HERETICS WOULD DISOWN HIS ENTIRE CHURCH +2830-3980-0006-1049: AGAINST THESE BOASTING FALSE APOSTLES PAUL BOLDLY DEFENDS HIS APOSTOLIC AUTHORITY (AND->IN) MINISTRY +2830-3980-0007-1050: AS THE AMBASSADOR OF A GOVERNMENT IS HONORED FOR HIS OFFICE AND NOT FOR HIS PRIVATE PERSON SO THE MINISTER OF CHRIST SHOULD EXALT HIS OFFICE IN ORDER TO GAIN AUTHORITY AMONG MEN +2830-3980-0008-1051: (PAUL TAKES->POLITICS) PRIDE IN HIS MINISTRY NOT TO HIS OWN PRAISE BUT TO THE PRAISE OF GOD +2830-3980-0009-1052: PAUL (AN->AND) APOSTLE NOT OF MEN ET CETERA +2830-3980-0010-1053: EITHER HE CALLS MINISTERS THROUGH THE AGENCY OF MEN OR HE CALLS THEM DIRECTLY AS HE CALLED THE PROPHETS AND APOSTLES +2830-3980-0011-1054: PAUL DECLARES THAT THE FALSE APOSTLES WERE CALLED OR SENT NEITHER BY MEN NOR BY MAN +2830-3980-0012-1055: THE MOST THEY COULD CLAIM IS THAT THEY WERE SENT BY OTHERS +2830-3980-0013-1056: HE MENTIONS THE APOSTLES FIRST BECAUSE THEY WERE APPOINTED DIRECTLY BY GOD +2830-3980-0014-1057: THE CALL IS NOT TO BE TAKEN LIGHTLY +2830-3980-0015-1058: FOR A PERSON TO (POSSESS->POSSESSED) KNOWLEDGE IS NOT ENOUGH +2830-3980-0016-1059: IT SPOILS ONE'S BEST WORK +2830-3980-0017-1060: WHEN I WAS A YOUNG MAN I THOUGHT PAUL WAS MAKING TOO MUCH OF HIS CALL +2830-3980-0018-1061: I DID NOT THEN REALIZE THE IMPORTANCE OF THE MINISTRY +2830-3980-0019-1062: I KNEW NOTHING OF THE DOCTRINE OF FAITH BECAUSE WE WERE TAUGHT SOPHISTRY INSTEAD OF CERTAINTY AND NOBODY UNDERSTOOD SPIRITUAL BOASTING +2830-3980-0020-1063: THIS IS NO SINFUL PRIDE IT IS HOLY PRIDE +2830-3980-0021-1064: AND GOD THE FATHER WHO RAISED HIM FROM THE DEAD +2830-3980-0022-1065: THE CLAUSE (SEEMS->SEEMED) SUPERFLUOUS ON FIRST SIGHT +2830-3980-0023-1066: THESE (PERVERTERS->PERVERTIVES) OF THE RIGHTEOUSNESS OF CHRIST RESIST THE FATHER AND THE SON AND THE WORKS OF THEM BOTH +2830-3980-0024-1067: IN THIS WHOLE EPISTLE PAUL TREATS OF THE RESURRECTION OF CHRIST +2830-3980-0025-1068: BY HIS RESURRECTION CHRIST WON THE VICTORY OVER LAW SIN FLESH WORLD DEVIL DEATH HELL AND EVERY EVIL +2830-3980-0026-1069: (VERSE TWO->FIRST TOO) +2830-3980-0027-1070: AND ALL THE BRETHREN WHICH ARE WITH ME +2830-3980-0028-1071: THIS SHOULD GO FAR IN SHUTTING THE MOUTHS OF THE FALSE APOSTLES +2830-3980-0029-1072: ALTHOUGH THE BRETHREN WITH ME ARE NOT APOSTLES LIKE MYSELF YET THEY ARE ALL OF ONE MIND WITH ME THINK WRITE AND TEACH AS I DO +2830-3980-0030-1073: THEY DO NOT GO WHERE THE ENEMIES OF THE GOSPEL PREDOMINATE THEY GO WHERE THE CHRISTIANS ARE +2830-3980-0031-1074: WHY DO THEY NOT INVADE THE CATHOLIC PROVINCES AND PREACH THEIR DOCTRINE TO GODLESS PRINCES BISHOPS AND DOCTORS AS WE HAVE DONE BY THE HELP OF GOD +2830-3980-0032-1075: WE LOOK FOR THAT REWARD WHICH (EYE->I) HATH NOT SEEN NOR EAR HEARD NEITHER HATH ENTERED INTO THE HEART OF MAN +2830-3980-0033-1076: NOT ALL THE (GALATIANS->GALLATIONS) HAD BECOME PERVERTED +2830-3980-0034-1077: THESE MEANS CANNOT BE CONTAMINATED +2830-3980-0035-1078: THEY (REMAIN->REMAINED) DIVINE REGARDLESS OF MEN'S OPINION +2830-3980-0036-1079: WHEREVER THE MEANS OF GRACE ARE FOUND THERE IS THE HOLY CHURCH EVEN THOUGH ANTICHRIST REIGNS THERE +2830-3980-0037-1080: SO MUCH FOR THE TITLE OF THE EPISTLE NOW FOLLOWS THE GREETING OF THE APOSTLE VERSE THREE +2830-3980-0038-1081: GRACE BE TO YOU (AND->IN) PEACE FROM GOD THE FATHER AND FROM OUR LORD JESUS CHRIST +2830-3980-0039-1082: THE TERMS OF GRACE AND PEACE ARE COMMON TERMS WITH PAUL AND ARE NOW PRETTY WELL UNDERSTOOD +2830-3980-0040-1083: THE GREETING OF THE APOSTLE IS REFRESHING +2830-3980-0041-1084: GRACE INVOLVES THE REMISSION OF SINS PEACE AND A HAPPY CONSCIENCE +2830-3980-0042-1085: THE WORLD (BRANDS->BRINGS) THIS A PERNICIOUS DOCTRINE +2830-3980-0043-1086: EXPERIENCE PROVES THIS +2830-3980-0044-1087: HOWEVER THE GRACE AND PEACE OF GOD WILL +2830-3980-0045-1088: MEN SHOULD NOT SPECULATE ABOUT THE NATURE OF GOD +2830-3980-0046-1089: WAS IT NOT ENOUGH TO SAY FROM GOD THE FATHER +2830-3980-0047-1090: TO DO SO IS TO LOSE GOD ALTOGETHER BECAUSE GOD BECOMES INTOLERABLE WHEN WE SEEK TO MEASURE (AND TO->INTO) COMPREHEND HIS INFINITE MAJESTY +2830-3980-0048-1091: HE CAME DOWN TO EARTH LIVED AMONG MEN SUFFERED WAS CRUCIFIED AND THEN HE DIED STANDING CLEARLY BEFORE US SO THAT OUR HEARTS AND EYES MAY FASTEN UPON HIM +2830-3980-0049-1092: EMBRACE HIM AND FORGET ABOUT THE NATURE OF GOD +2830-3980-0050-1093: DID NOT CHRIST HIMSELF SAY I AM THE WAY AND THE TRUTH AND THE LIFE NO MAN COMETH UNTO THE FATHER BUT BY ME +2830-3980-0051-1094: WHEN YOU ARGUE ABOUT THE NATURE OF GOD APART FROM THE QUESTION OF JUSTIFICATION YOU MAY BE AS PROFOUND AS YOU LIKE +2830-3980-0052-1095: WE ARE TO HEAR CHRIST WHO HAS BEEN APPOINTED BY THE FATHER AS OUR DIVINE TEACHER +2830-3980-0053-1096: AT THE SAME TIME PAUL CONFIRMS OUR CREED THAT CHRIST IS VERY GOD +2830-3980-0054-1097: THAT CHRIST IS VERY GOD IS APPARENT IN THAT PAUL ASCRIBES TO HIM DIVINE POWERS EQUALLY WITH THE FATHER AS FOR INSTANCE THE POWER (TO->DOES) DISPENSE GRACE AND PEACE +2830-3980-0055-1098: TO BESTOW PEACE AND GRACE LIES IN THE PROVINCE OF GOD WHO ALONE CAN CREATE THESE BLESSINGS THE ANGELS CANNOT +2830-3980-0056-1099: OTHERWISE PAUL SHOULD HAVE WRITTEN GRACE FROM GOD THE FATHER AND PEACE FROM OUR LORD JESUS CHRIST +2830-3980-0057-1100: THE ARIANS TOOK CHRIST FOR A NOBLE AND PERFECT CREATURE SUPERIOR EVEN TO THE ANGELS BECAUSE BY HIM GOD CREATED HEAVEN AND EARTH +2830-3980-0058-1101: MOHAMMED ALSO SPEAKS HIGHLY OF CHRIST +2830-3980-0059-1102: PAUL STICKS TO HIS THEME +2830-3980-0060-1103: HE NEVER LOSES SIGHT OF THE PURPOSE OF HIS EPISTLE +2830-3980-0061-1104: NOT GOLD OR SILVER OR (PASCHAL->PASSION) LAMBS OR AN ANGEL BUT HIMSELF WHAT FOR +2830-3980-0062-1105: NOT FOR A CROWN OR A KINGDOM OR (OUR->A) GOODNESS BUT FOR OUR SINS +2830-3980-0063-1106: UNDERSCORE THESE WORDS FOR THEY ARE FULL OF COMFORT FOR SORE CONSCIENCES +2830-3980-0064-1107: HOW MAY WE OBTAIN REMISSION OF OUR SINS +2830-3980-0065-1108: PAUL ANSWERS THE MAN WHO IS NAMED JESUS CHRIST AND THE SON OF GOD GAVE HIMSELF FOR OUR SINS +2830-3980-0066-1109: SINCE CHRIST WAS GIVEN FOR OUR SINS IT STANDS TO REASON THAT THEY CANNOT BE PUT AWAY BY OUR OWN EFFORTS +2830-3980-0067-1110: THIS SENTENCE ALSO DEFINES OUR SINS AS GREAT SO GREAT IN FACT THAT THE WHOLE WORLD COULD NOT MAKE AMENDS FOR A SINGLE SIN +2830-3980-0068-1111: THE GREATNESS OF THE RANSOM CHRIST THE SON OF GOD INDICATES THIS +2830-3980-0069-1112: THE VICIOUS CHARACTER OF SIN IS BROUGHT OUT BY THE WORDS WHO GAVE HIMSELF FOR OUR SINS +2830-3980-0070-1113: BUT WE ARE CARELESS WE MAKE LIGHT OF SIN +2830-3980-0071-1114: WE THINK THAT BY SOME LITTLE WORK OR MERIT WE CAN DISMISS (SIN->IN) +2830-3980-0072-1115: THIS PASSAGE THEN BEARS OUT THE FACT THAT ALL MEN ARE SOLD UNDER SIN +2830-3980-0073-1116: THIS ATTITUDE SPRINGS FROM A FALSE CONCEPTION OF SIN THE CONCEPTION THAT SIN IS A SMALL MATTER EASILY (TAKEN->TAKING) CARE OF BY GOOD WORKS THAT WE MUST PRESENT OURSELVES (UNTO->INTO) GOD WITH (A->*) GOOD CONSCIENCE THAT WE MUST FEEL NO SIN BEFORE WE MAY FEEL THAT CHRIST WAS GIVEN FOR OUR SINS +2830-3980-0074-1117: (THIS ATTITUDE->THE SATITUDE) IS UNIVERSAL (AND->IN) PARTICULARLY DEVELOPED IN THOSE WHO CONSIDER THEMSELVES BETTER THAN OTHERS +2830-3980-0075-1118: BUT THE REAL SIGNIFICANCE AND COMFORT OF THE WORDS FOR OUR SINS IS LOST UPON THEM +2830-3980-0076-1119: ON THE OTHER HAND WE ARE NOT TO REGARD THEM AS SO TERRIBLE THAT WE MUST DESPAIR +2961-960-0000-497: HE PASSES ABRUPTLY FROM PERSONS TO IDEAS AND NUMBERS AND FROM IDEAS AND NUMBERS TO PERSONS FROM THE HEAVENS TO MAN FROM ASTRONOMY TO PHYSIOLOGY HE CONFUSES OR RATHER DOES NOT DISTINGUISH SUBJECT AND OBJECT FIRST AND FINAL CAUSES AND IS DREAMING OF GEOMETRICAL FIGURES LOST IN A FLUX OF SENSE +2961-960-0001-498: THE INFLUENCE (WITH->WHICH) THE TIMAEUS HAS EXERCISED UPON POSTERITY IS DUE PARTLY TO A MISUNDERSTANDING +2961-960-0002-499: IN THE SUPPOSED DEPTHS OF THIS DIALOGUE THE NEO (PLATONISTS->PLATINISTS) FOUND HIDDEN MEANINGS (AND->IN) CONNECTIONS WITH THE JEWISH AND CHRISTIAN SCRIPTURES AND OUT OF THEM THEY ELICITED DOCTRINES QUITE AT VARIANCE WITH THE SPIRIT OF PLATO +2961-960-0003-500: THEY WERE ABSORBED IN HIS THEOLOGY AND WERE UNDER THE DOMINION OF HIS NAME WHILE THAT WHICH WAS TRULY GREAT AND TRULY (CHARACTERISTIC->CORRECTORISTIC) IN HIM HIS EFFORT TO REALIZE AND CONNECT ABSTRACTIONS WAS NOT UNDERSTOOD BY THEM AT ALL +2961-960-0004-501: THERE IS NO DANGER OF THE MODERN (COMMENTATORS->COMMON TEACHERS) ON THE (TIMAEUS->TIMIRAS) FALLING INTO THE ABSURDITIES OF THE (NEO PLATONISTS->NEW PLATANISTS) +2961-960-0005-502: IN THE PRESENT DAY WE ARE WELL AWARE THAT AN ANCIENT PHILOSOPHER IS TO BE INTERPRETED FROM HIMSELF AND BY THE CONTEMPORARY HISTORY OF THOUGHT +2961-960-0006-503: THE FANCIES OF THE (NEO PLATONISTS->NEW PLATANISTS) ARE ONLY INTERESTING TO US BECAUSE THEY EXHIBIT A PHASE OF THE HUMAN MIND WHICH PREVAILED WIDELY IN THE FIRST CENTURIES OF THE CHRISTIAN ERA AND IS NOT WHOLLY EXTINCT IN OUR OWN DAY +2961-960-0007-504: BUT THEY HAVE NOTHING TO DO WITH THE INTERPRETATION OF PLATO AND IN SPIRIT THEY ARE OPPOSED TO HIM +2961-960-0008-505: WE DO NOT KNOW HOW PLATO WOULD HAVE ARRANGED HIS OWN DIALOGUES OR WHETHER THE THOUGHT OF ARRANGING ANY OF THEM BESIDES THE (TWO TRILOGIES->TUTRILOGIES) WHICH HE HAS EXPRESSLY CONNECTED WAS EVER PRESENT TO HIS MIND +2961-960-0009-506: THE DIALOGUE IS PRIMARILY CONCERNED WITH THE ANIMAL CREATION INCLUDING UNDER THIS TERM THE HEAVENLY BODIES AND WITH MAN ONLY AS ONE AMONG THE ANIMALS +2961-960-0010-507: BUT HE HAS NOT AS YET (DEFINED->THE FIND) THIS INTERMEDIATE TERRITORY WHICH LIES SOMEWHERE BETWEEN MEDICINE AND MATHEMATICS AND HE WOULD HAVE FELT THAT THERE WAS AS GREAT AN IMPIETY IN RANKING THEORIES OF PHYSICS FIRST IN THE ORDER OF KNOWLEDGE AS IN PLACING THE BODY BEFORE THE SOUL +2961-960-0011-508: WITH (HERACLEITUS->HERACLITUS) HE ACKNOWLEDGES THE PERPETUAL FLUX LIKE (ANAXAGORAS->AN EXAGGERUS) HE ASSERTS THE PREDOMINANCE OF MIND ALTHOUGH ADMITTING AN ELEMENT OF NECESSITY WHICH REASON IS INCAPABLE OF SUBDUING LIKE THE (PYTHAGOREANS->PYTHAGORIANS) HE SUPPOSES THE MYSTERY OF THE WORLD TO BE CONTAINED IN NUMBER +2961-960-0012-509: MANY IF NOT ALL THE ELEMENTS OF THE (PRE SOCRATIC->PRIESTHOO CRADIC) PHILOSOPHY ARE INCLUDED IN THE (TIMAEUS->TIMIUS) +2961-960-0013-510: IT IS PROBABLE THAT THE RELATION OF THE IDEAS TO GOD OR OF GOD TO THE WORLD WAS DIFFERENTLY CONCEIVED BY HIM AT DIFFERENT TIMES OF HIS LIFE +2961-960-0014-511: THE IDEAS ALSO REMAIN BUT THEY HAVE BECOME TYPES IN NATURE FORMS OF MEN ANIMALS BIRDS FISHES +2961-960-0015-512: THE STYLE AND PLAN OF THE (TIMAEUS->TENEAS) DIFFER GREATLY FROM THAT OF ANY OTHER OF THE PLATONIC DIALOGUES +2961-960-0016-513: BUT PLATO HAS NOT THE SAME (MASTERY->MYSTERY) OVER HIS INSTRUMENT WHICH HE EXHIBITS IN THE (PHAEDRUS->FEATURES) OR (SYMPOSIUM->SIMPOSE HIM) +2961-960-0017-514: NOTHING CAN EXCEED THE BEAUTY OR ART OF (THE->*) INTRODUCTION IN WHICH (HE IS->HIS) USING WORDS AFTER HIS ACCUSTOMED MANNER +2961-960-0018-515: BUT IN THE REST OF THE WORK THE POWER OF LANGUAGE SEEMS TO FAIL HIM AND THE DRAMATIC FORM IS WHOLLY GIVEN UP +2961-960-0019-516: HE COULD WRITE IN (ONE->ONE'S) STYLE BUT NOT IN ANOTHER (AND->*) THE GREEK LANGUAGE HAD NOT AS YET BEEN FASHIONED BY ANY POET OR PHILOSOPHER TO DESCRIBE PHYSICAL PHENOMENA +2961-960-0020-517: AND HENCE WE FIND THE SAME SORT OF CLUMSINESS IN THE (TIMAEUS->TIMAIRS) OF PLATO WHICH CHARACTERIZES THE PHILOSOPHICAL POEM OF LUCRETIUS +2961-960-0021-518: THERE IS A WANT OF FLOW AND OFTEN A DEFECT OF RHYTHM THE MEANING IS SOMETIMES OBSCURE AND THERE IS A GREATER USE OF APPOSITION (AND->IN) MORE OF REPETITION THAN OCCURS IN PLATO'S EARLIER WRITINGS +2961-960-0022-519: PLATO HAD NOT THE COMMAND OF HIS MATERIALS WHICH WOULD HAVE ENABLED HIM TO PRODUCE A PERFECT WORK OF ART +2961-961-0000-520: SOCRATES BEGINS (THE TIMAEUS->TO TEARS) WITH A SUMMARY OF THE REPUBLIC +2961-961-0001-521: AND NOW HE DESIRES TO SEE THE IDEAL STATE SET IN MOTION HE WOULD LIKE TO KNOW HOW SHE BEHAVED IN SOME GREAT STRUGGLE +2961-961-0002-522: AND THEREFORE TO YOU I TURN (TIMAEUS->TO ME AS) CITIZEN OF (LOCRIS->LOCHRIS) WHO ARE AT ONCE A PHILOSOPHER (AND->IN) A STATESMAN AND TO YOU (CRITIAS->CRITUS) WHOM ALL ATHENIANS KNOW TO BE SIMILARLY ACCOMPLISHED AND TO HERMOCRATES (WHO IS->WHOSE) ALSO FITTED BY NATURE AND EDUCATION TO SHARE IN OUR DISCOURSE +2961-961-0003-523: I WILL IF (TIMAEUS APPROVES->TO ME AS IT PROVES) I APPROVE +2961-961-0004-524: LISTEN THEN SOCRATES TO A TALE OF (SOLON'S->SILENCE) WHO BEING THE FRIEND OF (DROPIDAS MY->TROPIDAS BY) GREAT GRANDFATHER TOLD IT TO MY GRANDFATHER (CRITIAS->CRITIUS) AND HE TOLD ME +2961-961-0005-525: SOME POEMS OF (SOLON->SOLEMN) WERE RECITED BY THE BOYS +2961-961-0006-526: AND WHAT WAS THE SUBJECT OF THE POEM SAID THE PERSON WHO MADE THE REMARK +2961-961-0007-527: THE SUBJECT WAS A VERY NOBLE ONE HE DESCRIBED THE MOST FAMOUS ACTION IN WHICH THE ATHENIAN PEOPLE WERE EVER ENGAGED +2961-961-0008-528: BUT THE MEMORY OF THEIR EXPLOITS (HAS->HAD) PASSED AWAY OWING TO THE LAPSE OF TIME AND THE EXTINCTION OF THE ACTORS +2961-961-0009-529: TELL US SAID THE OTHER THE WHOLE STORY AND WHERE SOLON HEARD THE STORY +2961-961-0010-530: BUT IN EGYPT THE TRADITIONS OF OUR OWN AND OTHER LANDS ARE BY US REGISTERED FOR EVER IN OUR TEMPLES +2961-961-0011-531: THE GENEALOGIES WHICH YOU HAVE RECITED TO US OUT OF YOUR OWN (ANNALS SOLON->ANNAL SOLEMN) ARE A MERE CHILDREN'S STORY +2961-961-0012-532: FOR IN THE TIMES BEFORE THE GREAT FLOOD ATHENS WAS THE GREATEST AND BEST OF CITIES AND DID THE NOBLEST DEEDS AND HAD THE BEST CONSTITUTION OF ANY UNDER THE FACE OF HEAVEN +2961-961-0013-533: (SOLON->SOLEMN) MARVELLED AND DESIRED TO BE INFORMED OF THE PARTICULARS +2961-961-0014-534: NINE THOUSAND YEARS HAVE ELAPSED SINCE SHE (FOUNDED->FOUND IT) YOURS AND EIGHT THOUSAND SINCE (SHE FOUNDED->YOU FOUND IT) OURS AS OUR ANNALS RECORD +2961-961-0015-535: MANY LAWS EXIST AMONG US WHICH ARE THE COUNTERPART OF YOURS AS THEY WERE IN THE OLDEN TIME +2961-961-0016-536: I WILL BRIEFLY DESCRIBE (THEM->HIM) TO YOU AND YOU SHALL READ THE ACCOUNT OF THEM AT YOUR LEISURE IN THE SACRED REGISTERS +2961-961-0017-537: OBSERVE AGAIN WHAT CARE THE LAW TOOK IN THE PURSUIT OF WISDOM SEARCHING OUT THE DEEP THINGS OF THE WORLD AND APPLYING THEM TO THE USE OF (MAN->MEN) +2961-961-0018-538: THE MOST FAMOUS OF THEM ALL WAS THE OVERTHROW OF THE ISLAND OF ATLANTIS +2961-961-0019-539: FOR AT THE PERIL OF HER OWN EXISTENCE AND WHEN THE (OTHER->OTTER) HELLENES HAD DESERTED HER SHE REPELLED THE INVADER AND OF HER OWN ACCORD GAVE LIBERTY TO ALL THE NATIONS WITHIN THE PILLARS +2961-961-0020-540: THIS IS THE EXPLANATION OF THE SHALLOWS WHICH ARE FOUND IN THAT PART OF THE ATLANTIC OCEAN +2961-961-0021-541: BUT I WOULD NOT SPEAK AT THE TIME BECAUSE I WANTED TO REFRESH MY MEMORY +2961-961-0022-542: THEN (NOW->THOU) LET ME EXPLAIN TO YOU THE ORDER OF OUR ENTERTAINMENT FIRST TIMAEUS WHO IS A NATURAL PHILOSOPHER WILL SPEAK OF THE ORIGIN OF THE WORLD GOING DOWN TO THE CREATION OF (MAN->MEN) AND THEN I SHALL RECEIVE THE MEN WHOM HE HAS CREATED AND SOME OF WHOM WILL HAVE BEEN EDUCATED BY YOU AND INTRODUCE THEM TO YOU AS THE LOST ATHENIAN CITIZENS OF WHOM THE EGYPTIAN (RECORD->RECORDS) SPOKE +3570-5694-0000-2433: BUT ALREADY AT A POINT IN ECONOMIC EVOLUTION FAR (ANTEDATING->ANTETING) THE EMERGENCE OF THE LADY (SPECIALISED->SPECIALIZED) CONSUMPTION OF GOODS AS AN EVIDENCE OF PECUNIARY STRENGTH HAD BEGUN TO WORK OUT IN A MORE OR LESS (ELABORATE->CELEBRATE) SYSTEM +3570-5694-0001-2434: THE UTILITY OF CONSUMPTION AS AN EVIDENCE OF WEALTH IS TO BE CLASSED AS A DERIVATIVE GROWTH +3570-5694-0002-2435: SUCH CONSUMPTION AS FALLS (TO->THROUGH) THE WOMEN IS MERELY INCIDENTAL TO THEIR WORK IT IS A MEANS TO THEIR CONTINUED (LABOUR->LABOR) AND NOT (A->TO) CONSUMPTION DIRECTED TO THEIR OWN COMFORT AND (FULNESS->FULLNESS) OF LIFE +3570-5694-0003-2436: WITH A FURTHER ADVANCE (IN->AND) CULTURE THIS (TABU->TABOU) MAY (CHANGE->CHANGED) INTO SIMPLE CUSTOM OF A MORE OR LESS RIGOROUS CHARACTER BUT WHATEVER BE THE THEORETICAL BASIS OF THE DISTINCTION WHICH IS MAINTAINED WHETHER IT BE (*->AT) A (TABU->BOOT) OR A LARGER CONVENTIONALITY THE FEATURES OF THE CONVENTIONAL SCHEME OF CONSUMPTION DO NOT CHANGE EASILY +3570-5694-0004-2437: IN THE NATURE OF THINGS LUXURIES AND THE COMFORTS OF LIFE BELONG TO THE LEISURE CLASS +3570-5694-0005-2438: UNDER THE (TABU->TABOO) CERTAIN VICTUALS AND MORE PARTICULARLY CERTAIN BEVERAGES ARE STRICTLY RESERVED FOR THE USE OF THE SUPERIOR CLASS +3570-5694-0006-2439: DRUNKENNESS AND THE OTHER PATHOLOGICAL CONSEQUENCES OF THE FREE USE OF STIMULANTS THEREFORE TEND IN THEIR TURN TO BECOME HONORIFIC AS BEING A MARK AT THE SECOND REMOVE OF THE SUPERIOR STATUS OF THOSE WHO ARE ABLE TO AFFORD THE INDULGENCE +3570-5694-0007-2440: IT HAS EVEN HAPPENED THAT THE NAME FOR CERTAIN DISEASED CONDITIONS OF THE BODY ARISING FROM SUCH AN ORIGIN HAS PASSED INTO EVERYDAY SPEECH AS A SYNONYM FOR NOBLE OR GENTLE +3570-5694-0008-2441: THE CONSUMPTION OF LUXURIES IN THE TRUE SENSE IS A CONSUMPTION DIRECTED TO THE COMFORT OF THE CONSUMER HIMSELF AND IS THEREFORE A MARK OF THE MASTER +3570-5694-0009-2442: WITH MANY QUALIFICATIONS WITH MORE QUALIFICATIONS AS THE PATRIARCHAL TRADITION HAS GRADUALLY WEAKENED THE GENERAL RULE IS FELT TO BE RIGHT AND BINDING THAT WOMEN SHOULD CONSUME ONLY FOR THE BENEFIT OF THEIR MASTERS +3570-5694-0010-2443: THE OBJECTION OF COURSE PRESENTS ITSELF THAT EXPENDITURE ON WOMEN'S DRESS AND HOUSEHOLD PARAPHERNALIA IS AN OBVIOUS EXCEPTION TO THIS RULE BUT IT WILL APPEAR IN THE SEQUEL THAT THIS EXCEPTION IS MUCH MORE OBVIOUS THAN SUBSTANTIAL +3570-5694-0011-2444: THE CUSTOM OF FESTIVE GATHERINGS PROBABLY ORIGINATED IN MOTIVES OF CONVIVIALITY AND RELIGION THESE MOTIVES ARE ALSO PRESENT IN THE LATER DEVELOPMENT (BUT->THAT) THEY DO NOT CONTINUE TO BE THE SOLE MOTIVES +3570-5694-0012-2445: THERE IS A MORE OR LESS ELABORATE SYSTEM OF RANK AND (GRADES->GRATES) +3570-5694-0013-2446: THIS DIFFERENTIATION IS FURTHERED BY THE INHERITANCE OF WEALTH AND THE CONSEQUENT INHERITANCE OF GENTILITY +3570-5694-0014-2447: MANY OF THESE (AFFILIATED->ARE FILIATED) GENTLEMEN OF LEISURE ARE AT THE SAME TIME (LESSER MEN->LESS AMEN) OF SUBSTANCE IN THEIR OWN RIGHT SO THAT SOME OF THEM ARE SCARCELY AT ALL OTHERS ONLY PARTIALLY TO BE RATED AS VICARIOUS CONSUMERS +3570-5694-0015-2448: SO MANY OF THEM HOWEVER AS MAKE UP THE RETAINER AND HANGERS ON OF THE PATRON MAY BE CLASSED AS VICARIOUS CONSUMER WITHOUT QUALIFICATION +3570-5694-0016-2449: MANY OF THESE AGAIN AND ALSO MANY OF THE OTHER ARISTOCRACY OF LESS DEGREE HAVE IN TURN ATTACHED TO THEIR PERSONS A MORE OR LESS COMPREHENSIVE GROUP OF VICARIOUS CONSUMER IN THE PERSONS OF THEIR WIVES AND CHILDREN THEIR SERVANTS RETAINERS ET CETERA +3570-5694-0017-2450: THE WEARING OF UNIFORMS (OR->ARE) LIVERIES IMPLIES A CONSIDERABLE DEGREE OF DEPENDENCE AND MAY EVEN BE SAID TO BE A MARK OF SERVITUDE REAL OR OSTENSIBLE +3570-5694-0018-2451: THE WEARERS OF UNIFORMS AND LIVERIES MAY BE ROUGHLY DIVIDED INTO TWO CLASSES THE FREE AND THE SERVILE OR THE NOBLE AND THE IGNOBLE +3570-5694-0019-2452: BUT THE GENERAL DISTINCTION IS NOT ON THAT ACCOUNT TO BE OVERLOOKED +3570-5694-0020-2453: SO THOSE (OFFICES->OFFICERS) WHICH ARE BY RIGHT THE PROPER EMPLOYMENT OF THE LEISURE CLASS ARE NOBLE SUCH AS GOVERNMENT FIGHTING HUNTING THE CARE OF ARMS AND (ACCOUTREMENTS->ACCUTMENTS) AND THE LIKE IN SHORT THOSE WHICH MAY BE CLASSED AS OSTENSIBLY PREDATORY EMPLOYMENTS +3570-5694-0021-2454: WHENEVER AS IN THESE CASES THE MENIAL SERVICE IN QUESTION HAS TO DO DIRECTLY WITH (THE->A) PRIMARY LEISURE EMPLOYMENTS OF FIGHTING AND HUNTING IT EASILY ACQUIRES A REFLECTED HONORIFIC CHARACTER +3570-5694-0022-2455: THE LIVERY BECOMES OBNOXIOUS TO NEARLY ALL WHO ARE REQUIRED TO WEAR IT +3570-5695-0000-2456: IN A GENERAL WAY THOUGH NOT WHOLLY NOR CONSISTENTLY THESE TWO GROUPS COINCIDE +3570-5695-0001-2457: THE DEPENDENT WHO WAS FIRST DELEGATED FOR THESE DUTIES WAS THE WIFE OR THE CHIEF WIFE AND AS WOULD BE EXPECTED IN (THE->A) LATER DEVELOPMENT OF THE INSTITUTION WHEN THE NUMBER OF PERSONS BY WHOM THESE DUTIES ARE CUSTOMARILY PERFORMED GRADUALLY NARROWS THE WIFE REMAINS THE LAST +3570-5695-0002-2458: BUT AS WE DESCEND THE SOCIAL SCALE THE POINT IS PRESENTLY REACHED WHERE THE DUTIES OF (VICARIOUS->VIPEROUS) LEISURE AND CONSUMPTION DEVOLVE UPON THE WIFE ALONE +3570-5695-0003-2459: IN THE COMMUNITIES OF THE WESTERN CULTURE THIS POINT IS AT PRESENT FOUND AMONG THE LOWER MIDDLE CLASS +3570-5695-0004-2460: IF BEAUTY OR COMFORT IS ACHIEVED AND IT IS A MORE OR LESS FORTUITOUS CIRCUMSTANCE IF THEY ARE THEY MUST BE ACHIEVED BY MEANS AND METHODS THAT COMMEND THEMSELVES TO THE GREAT ECONOMIC LAW OF WASTED EFFORT +3570-5695-0005-2461: THE MAN OF THE HOUSEHOLD ALSO CAN DO SOMETHING IN THIS DIRECTION AND INDEED HE COMMONLY DOES BUT WITH A STILL LOWER DESCENT INTO THE LEVELS OF INDIGENCE ALONG THE MARGIN OF THE SLUMS THE MAN AND PRESENTLY ALSO THE CHILDREN VIRTUALLY CEASE TO CONSUME VALUABLE GOODS FOR APPEARANCES AND THE WOMAN REMAINS VIRTUALLY THE SOLE EXPONENT OF THE HOUSEHOLD'S PECUNIARY DECENCY +3570-5695-0006-2462: VERY MUCH OF SQUALOR AND DISCOMFORT WILL BE ENDURED BEFORE THE LAST TRINKET OR THE LAST (PRETENSE->PRETENCE) OF PECUNIARY (DECENCY IS->DECENCIES) PUT AWAY +3570-5695-0007-2463: THERE IS NO CLASS (AND->IN) NO COUNTRY THAT HAS YIELDED SO ABJECTLY BEFORE THE PRESSURE OF PHYSICAL WANT AS TO DENY THEMSELVES ALL GRATIFICATION OF THIS HIGHER OR SPIRITUAL NEED +3570-5695-0008-2464: THE QUESTION IS WHICH OF THE TWO METHODS WILL MOST EFFECTIVELY REACH THE PERSONS WHOSE CONVICTIONS IT IS DESIRED TO (AFFECT->EFFECT) +3570-5695-0009-2465: EACH WILL THEREFORE SERVE ABOUT EQUALLY WELL DURING THE EARLIER STAGES OF SOCIAL GROWTH +3570-5695-0010-2466: THE MODERN ORGANIZATION OF INDUSTRY WORKS IN THE SAME DIRECTION ALSO BY ANOTHER LINE +3570-5695-0011-2467: IT IS EVIDENT THEREFORE THAT THE PRESENT TREND OF THE DEVELOPMENT IS IN THE DIRECTION OF HEIGHTENING THE UTILITY OF CONSPICUOUS CONSUMPTION AS COMPARED WITH LEISURE +3570-5695-0012-2468: IT IS ALSO NOTICEABLE THAT THE SERVICEABILITY OF CONSUMPTION AS A MEANS OF REPUTE AS WELL AS THE INSISTENCE ON IT AS AN ELEMENT OF DECENCY IS AT ITS BEST IN THOSE PORTIONS OF THE COMMUNITY WHERE THE HUMAN (CONTACT->CONDUCT) OF THE INDIVIDUAL IS WIDEST AND THE MOBILITY OF THE POPULATION IS GREATEST +3570-5695-0013-2469: CONSUMPTION BECOMES A LARGER ELEMENT IN THE STANDARD OF LIVING IN THE CITY THAN IN THE COUNTRY +3570-5695-0014-2470: AMONG THE COUNTRY POPULATION ITS (PLACE IS->PLACES) TO SOME EXTENT TAKEN BY SAVINGS AND HOME COMFORTS KNOWN THROUGH THE MEDIUM OF (NEIGHBORHOOD->NEIGHBOURHOOD) GOSSIP SUFFICIENTLY TO SERVE THE LIKE GENERAL PURPOSE OF PECUNIARY REPUTE +3570-5695-0015-2471: THE RESULT IS A GREAT MOBILITY OF THE LABOR EMPLOYED IN PRINTING PERHAPS GREATER THAN IN ANY OTHER EQUALLY WELL DEFINED AND CONSIDERABLE BODY OF WORKMEN +3570-5696-0000-2472: UNDER THE SIMPLE TEST OF EFFECTIVENESS FOR ADVERTISING WE SHOULD EXPECT TO FIND LEISURE AND THE CONSPICUOUS CONSUMPTION OF GOODS DIVIDING THE FIELD OF PECUNIARY EMULATION PRETTY EVENLY BETWEEN THEM AT THE OUTSET +3570-5696-0001-2473: BUT THE ACTUAL COURSE OF DEVELOPMENT HAS BEEN SOMEWHAT DIFFERENT FROM THIS IDEAL SCHEME LEISURE HELD THE FIRST PLACE AT THE START AND CAME TO (HOLD A->ALL THE) RANK (VERY MUCH->VERIMENT) ABOVE WASTEFUL CONSUMPTION OF GOODS BOTH AS A DIRECT EXPONENT OF WEALTH AND AS AN ELEMENT IN THE STANDARD OF DECENCY DURING THE (QUASI->COURSE I) PEACEABLE CULTURE +3570-5696-0002-2474: OTHER CIRCUMSTANCES PERMITTING THAT INSTINCT DISPOSES MEN TO LOOK WITH (FAVOR->FAVOUR) UPON PRODUCTIVE EFFICIENCY AND ON WHATEVER IS OF HUMAN USE +3570-5696-0003-2475: A RECONCILIATION BETWEEN THE TWO CONFLICTING REQUIREMENTS IS (EFFECTED->AFFECTED) BY (A->*) RESORT TO MAKE BELIEVE (MANY AND->MEN IN) INTRICATE POLITE OBSERVANCES AND SOCIAL DUTIES OF A CEREMONIAL NATURE ARE DEVELOPED MANY ORGANIZATIONS ARE FOUNDED WITH SOME SPECIOUS OBJECT OF AMELIORATION EMBODIED IN THEIR OFFICIAL STYLE AND TITLE THERE IS MUCH COMING AND GOING AND A DEAL OF TALK TO THE END THAT THE (TALKERS MAY->TALK IS) NOT HAVE OCCASION TO REFLECT ON WHAT IS THE EFFECTUAL ECONOMIC VALUE OF THEIR TRAFFIC +3570-5696-0004-2476: THE (SALIENT->SAILORED) FEATURES OF THIS DEVELOPMENT OF DOMESTIC SERVICE HAVE ALREADY BEEN INDICATED +3570-5696-0005-2477: THROUGHOUT THE ENTIRE (EVOLUTION->REVOLUTION) OF CONSPICUOUS EXPENDITURE WHETHER OF GOODS OR OF SERVICES OR HUMAN LIFE RUNS THE OBVIOUS IMPLICATION THAT IN ORDER TO EFFECTUALLY MEND THE CONSUMER'S GOOD FAME IT MUST BE AN EXPENDITURE OF SUPERFLUITIES +3570-5696-0006-2478: AS USED IN THE SPEECH OF (EVERYDAY->EVERY DAY) LIFE THE WORD CARRIES AN UNDERTONE OF DEPRECATION +3570-5696-0007-2479: THE USE OF THE WORD WASTE AS A TECHNICAL TERM THEREFORE IMPLIES NO DEPRECATION OF THE MOTIVES OR OF THE ENDS SOUGHT BY THE CONSUMER UNDER THIS CANON OF CONSPICUOUS WASTE +3570-5696-0008-2480: BUT IT IS (ON OTHER->ANOTHER) GROUNDS WORTH NOTING THAT THE TERM (WASTE->WASTES) IN THE LANGUAGE OF EVERYDAY LIFE IMPLIES DEPRECATION OF WHAT IS CHARACTERIZED AS WASTEFUL +3570-5696-0009-2481: IN STRICT ACCURACY NOTHING SHOULD BE INCLUDED UNDER THE HEAD OF CONSPICUOUS WASTE BUT SUCH EXPENDITURE AS IS INCURRED ON THE GROUND OF AN INVIDIOUS PECUNIARY COMPARISON +3570-5696-0010-2482: AN ARTICLE MAY BE USEFUL AND WASTEFUL BOTH AND ITS UTILITY TO THE CONSUMER MAY BE MADE UP OF USE AND WASTE IN THE MOST VARYING PROPORTIONS +3575-170457-0000-369: AND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP I LAID MY HEAD SHE FEARED FOR TIME I WAS NOT MADE BUT FOR ETERNITY +3575-170457-0001-370: WHY ARE WE TO BE DENIED EACH OTHER'S SOCIETY +3575-170457-0002-371: WHY ARE WE TO BE DIVIDED +3575-170457-0003-372: SURELY IT MUST BE BECAUSE WE ARE IN DANGER OF LOVING EACH OTHER TOO WELL OF LOSING SIGHT OF THE CREATOR (IN->AND) IDOLATRY OF THE CREATURE +3575-170457-0004-373: WE USED TO DISPUTE ABOUT POLITICS AND RELIGION +3575-170457-0005-374: SHE (A TORY AND->ATTORIAN) CLERGYMAN'S DAUGHTER WAS ALWAYS IN A MINORITY OF ONE IN OUR HOUSE OF VIOLENT (DISSENT->DESCENT) AND RADICALISM +3575-170457-0006-375: HER FEEBLE HEALTH GAVE HER HER YIELDING MANNER FOR SHE COULD NEVER OPPOSE ANY ONE WITHOUT GATHERING UP ALL HER STRENGTH FOR THE STRUGGLE +3575-170457-0007-376: HE SPOKE FRENCH PERFECTLY I HAVE BEEN TOLD WHEN NEED WAS BUT DELIGHTED USUALLY IN TALKING THE BROADEST YORKSHIRE +3575-170457-0008-377: AND SO LIFE AND DEATH HAVE DISPERSED THE CIRCLE OF VIOLENT RADICALS AND DISSENTERS INTO WHICH TWENTY YEARS AGO THE LITTLE QUIET RESOLUTE CLERGYMAN'S DAUGHTER WAS RECEIVED AND BY WHOM SHE WAS TRULY LOVED AND HONOURED +3575-170457-0009-378: JANUARY AND FEBRUARY OF EIGHTEEN THIRTY SEVEN HAD PASSED AWAY AND STILL THERE WAS NO REPLY FROM (SOUTHEY->SALVI) +3575-170457-0010-379: I AM NOT DEPRECIATING IT WHEN I SAY THAT IN THESE TIMES IT IS NOT RARE +3575-170457-0011-380: BUT IT IS NOT WITH A VIEW TO DISTINCTION THAT YOU SHOULD CULTIVATE THIS TALENT IF YOU CONSULT YOUR OWN HAPPINESS +3575-170457-0012-381: YOU WILL SAY THAT A WOMAN HAS NO NEED OF SUCH A CAUTION THERE CAN BE NO PERIL IN IT FOR HER +3575-170457-0013-382: THE MORE SHE IS ENGAGED IN HER PROPER DUTIES THE LESS LEISURE WILL SHE HAVE FOR IT EVEN AS AN ACCOMPLISHMENT AND A RECREATION +3575-170457-0014-383: TO THOSE DUTIES YOU HAVE NOT YET BEEN CALLED AND WHEN YOU ARE YOU WILL BE LESS EAGER FOR CELEBRITY +3575-170457-0015-384: BUT DO NOT SUPPOSE THAT I DISPARAGE THE GIFT WHICH YOU POSSESS NOR THAT I WOULD DISCOURAGE YOU FROM EXERCISING IT I ONLY EXHORT YOU SO TO THINK OF IT AND SO TO USE IT AS TO RENDER IT CONDUCIVE TO YOUR OWN PERMANENT GOOD +3575-170457-0016-385: FAREWELL MADAM +3575-170457-0017-386: THOUGH I MAY BE BUT AN UNGRACIOUS ADVISER YOU WILL ALLOW ME THEREFORE TO SUBSCRIBE MYSELF WITH THE BEST WISHES FOR YOUR HAPPINESS HERE AND HEREAFTER YOUR TRUE FRIEND ROBERT (SOUTHEY->SELVEY) +3575-170457-0018-387: SIR MARCH SIXTEENTH +3575-170457-0019-388: I (HAD->HAVE) NOT VENTURED TO HOPE FOR SUCH A REPLY SO (CONSIDERATE->CONSIDER IT) IN ITS TONE SO NOBLE IN ITS SPIRIT +3575-170457-0020-389: I KNOW THE FIRST LETTER I WROTE TO YOU WAS ALL SENSELESS TRASH FROM BEGINNING TO END BUT I AM NOT ALTOGETHER THE IDLE DREAMING BEING IT WOULD SEEM TO DENOTE +3575-170457-0021-390: I THOUGHT IT THEREFORE MY DUTY WHEN I LEFT SCHOOL TO BECOME A GOVERNESS +3575-170457-0022-391: IN THE EVENINGS I CONFESS I DO THINK BUT I NEVER TROUBLE ANY ONE ELSE WITH MY THOUGHTS +3575-170457-0023-392: I CAREFULLY AVOID ANY APPEARANCE OF PREOCCUPATION AND ECCENTRICITY WHICH MIGHT LEAD THOSE I LIVE AMONGST TO SUSPECT THE NATURE OF MY PURSUITS +3575-170457-0024-393: I DON'T ALWAYS SUCCEED FOR SOMETIMES WHEN I'M TEACHING OR SEWING I WOULD RATHER BE READING (OR->A) WRITING BUT I TRY TO DENY MYSELF AND MY FATHER'S APPROBATION AMPLY REWARDED ME FOR THE PRIVATION +3575-170457-0025-394: AGAIN I THANK YOU THIS INCIDENT I SUPPOSE WILL BE RENEWED NO MORE IF I LIVE TO BE AN OLD WOMAN I SHALL REMEMBER IT THIRTY YEARS HENCE AS A BRIGHT DREAM +3575-170457-0026-395: P S PRAY SIR EXCUSE ME FOR WRITING TO YOU A SECOND TIME I COULD NOT HELP WRITING PARTLY TO TELL YOU HOW THANKFUL I AM FOR YOUR KINDNESS AND PARTLY TO LET YOU KNOW THAT YOUR ADVICE SHALL NOT BE WASTED HOWEVER SORROWFULLY AND RELUCTANTLY IT MAY BE AT FIRST FOLLOWED (C B->*) +3575-170457-0027-396: I CANNOT DENY MYSELF THE GRATIFICATION OF INSERTING (SOUTHEY'S->SO THESE) REPLY +3575-170457-0028-397: (KESWICK->KEZWICK) MARCH TWENTY SECOND EIGHTEEN THIRTY SEVEN DEAR (MADAM->MADAME) +3575-170457-0029-398: YOUR LETTER HAS GIVEN ME GREAT PLEASURE AND I SHOULD NOT FORGIVE MYSELF IF I DID NOT TELL YOU SO +3575-170457-0030-399: OF THIS SECOND LETTER ALSO SHE SPOKE AND TOLD ME THAT IT CONTAINED AN INVITATION FOR HER TO GO AND SEE THE POET IF EVER SHE VISITED THE LAKES +3575-170457-0031-400: ON AUGUST TWENTY SEVENTH EIGHTEEN THIRTY SEVEN SHE WRITES +3575-170457-0032-401: COME COME (I AM->I'M) GETTING REALLY TIRED OF YOUR ABSENCE +3575-170457-0033-402: SATURDAY AFTER SATURDAY COMES ROUND AND I CAN HAVE NO HOPE OF HEARING YOUR KNOCK AT THE DOOR AND THEN BEING TOLD THAT (MISS E->MISSY) IS COME OH DEAR +3575-170457-0034-403: IN THIS MONOTONOUS LIFE OF (MINE->MIND) THAT WAS A PLEASANT EVENT +3575-170457-0035-404: I WISH (IT WOULD->YOU WERE) RECUR AGAIN BUT IT WILL TAKE TWO OR THREE INTERVIEWS BEFORE THE STIFFNESS THE ESTRANGEMENT OF THIS LONG SEPARATION WILL WEAR AWAY +3575-170457-0036-405: MY EYES (FILL WITH->FILLED) TEARS WHEN I CONTRAST THE BLISS OF SUCH A STATE BRIGHTENED BY HOPES OF THE FUTURE WITH THE MELANCHOLY STATE I NOW LIVE IN UNCERTAIN THAT I EVER FELT TRUE CONTRITION WANDERING IN THOUGHT (AND DEED->INDEED) LONGING FOR HOLINESS WHICH I SHALL NEVER NEVER OBTAIN SMITTEN (AT->THAT) TIMES TO THE HEART WITH THE CONVICTION THAT GHASTLY CALVINISTIC DOCTRINES ARE TRUE DARKENED (IN->AND) SHORT BY THE VERY SHADOWS OF SPIRITUAL DEATH +3575-170457-0037-406: IF CHRISTIAN PERFECTION BE NECESSARY TO SALVATION I SHALL NEVER BE SAVED MY HEART IS A VERY (HOTBED->HOT BED) FOR SINFUL THOUGHTS AND WHEN I DECIDE ON AN ACTION I SCARCELY REMEMBER TO LOOK TO MY REDEEMER FOR (*->A) DIRECTION +3575-170457-0038-407: AND MEANTIME I KNOW THE GREATNESS OF JEHOVAH I ACKNOWLEDGE THE PERFECTION OF HIS WORD I ADORE THE PURITY OF THE CHRISTIAN FAITH MY THEORY IS RIGHT MY PRACTICE HORRIBLY WRONG +3575-170457-0039-408: THE CHRISTMAS HOLIDAYS CAME AND SHE AND ANNE RETURNED TO THE PARSONAGE AND TO THAT HAPPY HOME CIRCLE IN WHICH ALONE THEIR NATURES EXPANDED AMONGST ALL OTHER PEOPLE THEY SHRIVELLED UP MORE OR LESS +3575-170457-0040-409: INDEED THERE WERE ONLY ONE OR TWO STRANGERS WHO COULD BE ADMITTED AMONG THE SISTERS WITHOUT PRODUCING THE SAME RESULT +3575-170457-0041-410: SHE WAS GONE OUT INTO THE VILLAGE ON SOME ERRAND WHEN AS SHE WAS DESCENDING THE STEEP STREET HER FOOT SLIPPED ON THE ICE AND SHE FELL (IT->HE) WAS DARK AND NO ONE SAW HER MISCHANCE TILL AFTER A TIME HER GROANS ATTRACTED THE ATTENTION OF A PASSER BY +3575-170457-0042-411: UNFORTUNATELY THE FRACTURE COULD NOT BE SET TILL SIX O'CLOCK THE NEXT MORNING AS NO SURGEON WAS TO BE HAD BEFORE THAT TIME AND SHE NOW LIES AT (OUR->HER) HOUSE IN A VERY DOUBTFUL AND DANGEROUS STATE +3575-170457-0043-412: HOWEVER REMEMBERING WHAT YOU TOLD ME NAMELY THAT YOU HAD COMMENDED THE MATTER TO A HIGHER DECISION THAN OURS AND THAT YOU WERE RESOLVED TO SUBMIT WITH RESIGNATION TO THAT DECISION WHATEVER IT MIGHT BE I HOLD IT MY DUTY TO YIELD ALSO AND TO BE SILENT (IT->AND) MAY BE ALL FOR THE BEST +3575-170457-0044-413: AFTER THIS DISAPPOINTMENT I NEVER DARE RECKON WITH CERTAINTY ON THE ENJOYMENT OF A PLEASURE AGAIN IT SEEMS AS IF SOME FATALITY STOOD BETWEEN YOU AND ME +3575-170457-0045-414: I AM NOT GOOD ENOUGH FOR YOU AND YOU MUST BE KEPT FROM THE CONTAMINATION OF (TOO->TWO) INTIMATE SOCIETY +3575-170457-0046-415: A GOOD (NEIGHBOUR->NEIGHBOR) OF THE (BRONTES->BRONTEES) A CLEVER INTELLIGENT YORKSHIRE WOMAN WHO KEEPS A (DRUGGIST'S->DRUGGIST) SHOP IN HAWORTH (AND->*) FROM HER OCCUPATION HER EXPERIENCE AND EXCELLENT SENSE HOLDS THE POSITION OF VILLAGE (DOCTRESS->DOCTRIS) AND NURSE AND AS SUCH HAS BEEN A FRIEND IN MANY A TIME OF TRIAL AND SICKNESS AND DEATH IN THE HOUSEHOLDS ROUND TOLD ME A CHARACTERISTIC LITTLE INCIDENT CONNECTED WITH TABBY'S FRACTURED LEG +3575-170457-0047-416: TABBY HAD LIVED WITH THEM FOR TEN OR TWELVE YEARS AND WAS AS CHARLOTTE EXPRESSED IT ONE OF THE FAMILY +3575-170457-0048-417: HE REFUSED AT FIRST TO LISTEN TO THE CAREFUL ADVICE IT WAS REPUGNANT TO HIS LIBERAL NATURE +3575-170457-0049-418: THIS DECISION WAS COMMUNICATED TO THE GIRLS +3575-170457-0050-419: TABBY HAD TENDED THEM IN THEIR CHILDHOOD THEY AND NONE OTHER SHOULD TEND HER IN HER INFIRMITY (AND->IN) AGE +3575-170457-0051-420: AT TEA TIME THEY WERE SAD AND SILENT AND THE MEAL WENT AWAY UNTOUCHED BY ANY OF THE THREE +3575-170457-0052-421: SHE HAD ANOTHER WEIGHT ON HER MIND THIS CHRISTMAS +3575-170457-0053-422: BUT ANNE HAD BEGUN TO SUFFER JUST BEFORE THE HOLIDAYS AND CHARLOTTE WATCHED OVER HER YOUNGER SISTERS WITH (THE->A) JEALOUS VIGILANCE OF SOME WILD CREATURE THAT CHANGES HER VERY NATURE IF DANGER THREATENS HER YOUNG +3575-170457-0054-423: STUNG BY ANXIETY FOR THIS LITTLE SISTER SHE UPBRAIDED MISS W FOR HER FANCIED INDIFFERENCE TO ANNE'S STATE OF HEALTH +3575-170457-0055-424: STILL HER HEART HAD RECEIVED A SHOCK IN THE PERCEPTION OF ANNE'S DELICACY AND ALL THESE HOLIDAYS SHE WATCHED OVER HER WITH THE LONGING FOND ANXIETY WHICH IS SO FULL OF SUDDEN PANGS OF FEAR +3575-170457-0056-425: I DOUBT WHETHER BRANWELL WAS MAINTAINING HIMSELF AT THIS TIME +3729-6852-0000-1660: TO CELEBRATE THE ARRIVAL OF HER SON (SILVIA->SYLVIA) GAVE A SPLENDID SUPPER TO WHICH SHE HAD INVITED ALL HER RELATIVES AND IT WAS A GOOD OPPORTUNITY FOR ME TO MAKE THEIR ACQUAINTANCE +3729-6852-0001-1661: WITHOUT SAYING IT POSITIVELY SHE MADE ME UNDERSTAND THAT BEING HERSELF AN ILLUSTRIOUS MEMBER OF THE REPUBLIC OF LETTERS SHE WAS WELL AWARE THAT SHE WAS SPEAKING TO AN INSECT +3729-6852-0002-1662: IN ORDER TO PLEASE HER I SPOKE TO HER OF THE (ABBE CONTI->ABBEY KANTI) AND I HAD OCCASION TO QUOTE TWO LINES OF THAT PROFOUND WRITER +3729-6852-0003-1663: (MADAM->MADAME) CORRECTED ME WITH A PATRONIZING AIR FOR MY PRONUNCIATION OF THE WORD (SCEVRA->SCAFFRA) WHICH MEANS DIVIDED SAYING THAT IT OUGHT TO BE PRONOUNCED (SCEURA->SKURA) AND SHE ADDED THAT I OUGHT TO BE VERY GLAD TO HAVE LEARNED SO MUCH ON THE FIRST DAY OF MY ARRIVAL IN PARIS TELLING ME THAT IT WOULD BE AN IMPORTANT DAY IN MY LIFE +3729-6852-0004-1664: HER FACE WAS AN ENIGMA FOR IT INSPIRED (EVERYONE->EVERY ONE) WITH THE WARMEST SYMPATHY AND YET IF YOU EXAMINED IT ATTENTIVELY THERE WAS NOT ONE BEAUTIFUL FEATURE SHE COULD NOT BE CALLED HANDSOME BUT NO ONE COULD HAVE THOUGHT HER UGLY +3729-6852-0005-1665: (SILVIA->SYLVIA) WAS THE ADORATION OF FRANCE AND HER TALENT WAS THE REAL SUPPORT OF ALL THE COMEDIES WHICH THE GREATEST AUTHORS WROTE FOR HER ESPECIALLY OF THE PLAYS OF (MARIVAUX->MARIVAL) FOR WITHOUT HER HIS COMEDIES WOULD NEVER HAVE GONE TO POSTERITY +3729-6852-0006-1666: (SILVIA->SYLVIA) DID NOT THINK THAT HER GOOD CONDUCT WAS A MERIT FOR SHE KNEW THAT SHE WAS VIRTUOUS ONLY BECAUSE HER SELF LOVE COMPELLED HER TO BE SO AND SHE NEVER EXHIBITED ANY PRIDE OR ASSUMED ANY SUPERIORITY TOWARDS HER THEATRICAL SISTERS ALTHOUGH SATISFIED TO SHINE BY THEIR TALENT OR THEIR BEAUTY THEY CARED LITTLE ABOUT RENDERING THEMSELVES CONSPICUOUS BY THEIR VIRTUE +3729-6852-0007-1667: TWO YEARS BEFORE HER DEATH I SAW HER PERFORM THE CHARACTER OF MARIANNE IN THE COMEDY OF (MARIVAUX->MARAVAUX) AND IN SPITE OF HER AGE AND DECLINING HEALTH THE ILLUSION WAS COMPLETE +3729-6852-0008-1668: SHE WAS (HONOURABLY->HONORABLY) BURIED IN THE CHURCH OF SAINT (SAUVEUR->SEVER) WITHOUT THE SLIGHTEST OPPOSITION FROM THE VENERABLE PRIEST WHO FAR FROM SHARING THE ANTI (CHRISTAIN->CHRISTIAN) INTOLERANCY OF THE CLERGY IN GENERAL SAID THAT HER PROFESSION AS AN ACTRESS HAD NOT HINDERED HER FROM BEING A GOOD CHRISTIAN AND THAT THE EARTH WAS (THE->A) COMMON MOTHER OF ALL HUMAN BEINGS AS JESUS CHRIST HAD BEEN THE SAVIOUR OF ALL MANKIND +3729-6852-0009-1669: YOU WILL FORGIVE ME DEAR READER IF I HAVE MADE YOU ATTEND THE FUNERAL OF (SILVIA->SYLVIA) TEN YEARS BEFORE HER DEATH BELIEVE ME I HAVE NO INTENTION OF PERFORMING A MIRACLE YOU MAY CONSOLE YOURSELF WITH THE IDEA THAT I SHALL SPARE YOU THAT UNPLEASANT TASK WHEN POOR (SILVIA->SYLVIA) DIES +3729-6852-0010-1670: I NEVER HAD ANY FAMILY +3729-6852-0011-1671: I HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I HAVE FORGOTTEN IT SINCE I HAVE BEEN IN SERVICE +3729-6852-0012-1672: I SHALL CALL YOU (ESPRIT->A SPREE) +3729-6852-0013-1673: YOU DO ME A GREAT HONOUR +3729-6852-0014-1674: HERE GO AND GET ME CHANGE FOR A LOUIS I HAVE IT SIR +3729-6852-0015-1675: AT YOUR SERVICE SIR +3729-6852-0016-1676: MADAME QUINSON BESIDES CAN ANSWER YOUR (ENQUIRIES->INQUIRIES) +3729-6852-0017-1677: I SEE A QUANTITY OF CHAIRS FOR HIRE AT THE RATE OF ONE (SOU->SOUS) MEN READING THE NEWSPAPER UNDER THE SHADE OF THE TREES GIRLS AND MEN BREAKFASTING EITHER ALONE OR IN COMPANY WAITERS WHO WERE RAPIDLY GOING UP AND DOWN A NARROW STAIRCASE HIDDEN UNDER THE FOLIAGE +3729-6852-0018-1678: I SIT DOWN AT A SMALL TABLE A WAITER COMES IMMEDIATELY TO (ENQUIRE->INQUIRE) MY WISHES +3729-6852-0019-1679: I TELL HIM TO GIVE ME SOME COFFEE IF IT IS GOOD +3729-6852-0020-1680: THEN TURNING TOWARDS ME HE SAYS THAT I LOOK LIKE A FOREIGNER AND WHEN I SAY THAT I AM AN ITALIAN HE BEGINS TO SPEAK TO ME OF THE COURT (OF->*) THE CITY OF THE THEATRES AND AT LAST HE OFFERS TO ACCOMPANY ME EVERYWHERE +3729-6852-0021-1681: I THANK HIM AND TAKE MY LEAVE +3729-6852-0022-1682: I ADDRESS HIM IN ITALIAN AND HE ANSWERS VERY WITTILY BUT HIS WAY OF SPEAKING MAKES ME SMILE AND I TELL HIM WHY +3729-6852-0023-1683: MY REMARK PLEASES HIM BUT I SOON PROVE TO HIM THAT IT IS NOT THE RIGHT WAY TO SPEAK HOWEVER PERFECT MAY HAVE BEEN THE LANGUAGE OF THAT ANCIENT WRITER +3729-6852-0024-1684: I SEE A CROWD IN ONE CORNER OF THE GARDEN EVERYBODY STANDING STILL AND LOOKING UP +3729-6852-0025-1685: IS THERE NOT A MERIDIAN EVERYWHERE +3729-6852-0026-1686: YES BUT THE MERIDIAN OF THE PALAIS ROYAL IS THE MOST EXACT +3729-6852-0027-1687: THAT IS TRUE (BADAUDERIE->BADR'D GREE) +3729-6852-0028-1688: ALL THESE HONEST PERSONS ARE WAITING THEIR TURN TO GET THEIR SNUFF BOXES FILLED +3729-6852-0029-1689: IT IS SOLD EVERYWHERE BUT FOR THE LAST THREE WEEKS NOBODY WILL USE ANY SNUFF BUT THAT SOLD AT THE (CIVET->SEVETTE) CAT +3729-6852-0030-1690: IS IT BETTER THAN ANYWHERE ELSE +3729-6852-0031-1691: BUT HOW DID SHE MANAGE TO RENDER IT SO FASHIONABLE +3729-6852-0032-1692: SIMPLY BY STOPPING HER CARRIAGE TWO OR THREE TIMES BEFORE THE SHOP TO HAVE HER SNUFF BOX FILLED AND BY SAYING ALOUD TO THE YOUNG GIRL WHO HANDED BACK THE BOX THAT HER SNUFF WAS THE VERY BEST IN PARIS +3729-6852-0033-1693: YOU ARE NOW IN THE ONLY COUNTRY IN THE WORLD WHERE WIT CAN MAKE A FORTUNE BY SELLING EITHER A GENUINE OR A FALSE ARTICLE IN THE FIRST CASE IT RECEIVES THE WELCOME OF INTELLIGENT AND TALENTED PEOPLE AND IN THE SECOND FOOLS ARE ALWAYS READY TO REWARD IT FOR SILLINESS IS TRULY A CHARACTERISTIC OF THE PEOPLE HERE AND HOWEVER WONDERFUL IT MAY APPEAR SILLINESS IS THE DAUGHTER OF WIT +3729-6852-0034-1694: LET A MAN RUN AND EVERYBODY WILL RUN AFTER HIM THE CROWD WILL NOT STOP UNLESS THE MAN IS PROVED TO BE MAD BUT TO PROVE IT IS INDEED A DIFFICULT TASK BECAUSE WE HAVE A CROWD OF MEN WHO MAD FROM THEIR BIRTH ARE STILL CONSIDERED WISE +3729-6852-0035-1695: IT SEEMS TO ME I REPLIED THAT SUCH APPROVAL SUCH RATIFICATION OF THE OPINION EXPRESSED BY THE KING THE PRINCES OF THE BLOOD ET CETERA IS RATHER A PROOF OF THE AFFECTION FELT FOR THEM BY THE NATION FOR THE FRENCH CARRY THAT AFFECTION TO SUCH AN EXTENT THAT THEY BELIEVE THEM INFALLIBLE +3729-6852-0036-1696: WHEN THE KING COMES TO PARIS EVERYBODY CALLS OUT VIVE (LE ROI->LAUROI) +3729-6852-0037-1697: SHE INTRODUCED ME TO ALL HER GUESTS AND GAVE ME SOME PARTICULARS RESPECTING EVERY ONE OF THEM +3729-6852-0038-1698: WHAT SIR I SAID TO HIM AM I FORTUNATE ENOUGH TO SEE YOU +3729-6852-0039-1699: HE HIMSELF RECITED THE SAME PASSAGE IN FRENCH AND POLITELY POINTED OUT THE PARTS IN WHICH HE THOUGHT THAT I HAD IMPROVED ON THE ORIGINAL +3729-6852-0040-1700: FOR THE FIRST DAY SIR I THINK THAT WHAT YOU HAVE DONE GIVES GREAT HOPES OF YOU AND WITHOUT ANY DOUBT YOU WILL MAKE RAPID PROGRESS +3729-6852-0041-1701: I BELIEVE IT SIR AND THAT IS WHAT I FEAR THEREFORE THE PRINCIPAL OBJECT OF MY VISIT HERE IS TO DEVOTE MYSELF ENTIRELY TO THE STUDY OF THE FRENCH LANGUAGE +3729-6852-0042-1702: I AM A VERY UNPLEASANT PUPIL ALWAYS ASKING QUESTIONS CURIOUS TROUBLESOME INSATIABLE AND EVEN SUPPOSING THAT I COULD MEET WITH THE TEACHER I REQUIRE I AM AFRAID I AM NOT RICH ENOUGH TO PAY HIM +3729-6852-0043-1703: I RESIDE IN THE (MARAIS RUE->MARAY GRUE) DE (DOUZE PORTES->DUSPORT) +3729-6852-0044-1704: I WILL MAKE YOU TRANSLATE THEM INTO FRENCH AND YOU NEED NOT BE AFRAID OF MY FINDING YOU INSATIABLE +3729-6852-0045-1705: HE HAD A GOOD APPETITE COULD TELL A GOOD STORY WITHOUT LAUGHING (WAS->WITH) CELEBRATED FOR HIS WITTY REPARTEES AND HIS SOCIABLE MANNERS BUT HE SPENT HIS LIFE AT HOME SELDOM GOING OUT AND SEEING HARDLY (ANYONE->ANY ONE) BECAUSE HE ALWAYS HAD A PIPE IN HIS MOUTH AND WAS SURROUNDED BY AT LEAST TWENTY CATS WITH WHICH HE WOULD AMUSE HIMSELF ALL DAY +3729-6852-0046-1706: HIS HOUSEKEEPER HAD THE MANAGEMENT OF EVERYTHING SHE NEVER ALLOWED HIM TO BE IN NEED OF ANYTHING AND SHE GAVE NO ACCOUNT OF HIS MONEY WHICH SHE KEPT ALTOGETHER BECAUSE HE NEVER ASKED HER TO RENDER ANY ACCOUNTS +4077-13751-0000-1258: ON THE SIXTH OF APRIL EIGHTEEN THIRTY THE CHURCH OF JESUS CHRIST OF LATTER DAY SAINTS WAS (FORMALLY->FORMERLY) ORGANIZED AND THUS TOOK ON A LEGAL EXISTENCE +4077-13751-0001-1259: ITS ORIGIN WAS SMALL A GERM AN INSIGNIFICANT SEED HARDLY TO BE THOUGHT OF AS LIKELY TO AROUSE OPPOSITION +4077-13751-0002-1260: INSTEAD OF BUT SIX REGULARLY AFFILIATED MEMBERS AND AT MOST TWO SCORE OF ADHERENTS THE ORGANIZATION NUMBERS (TODAY->TO DAY) MANY HUNDRED THOUSAND SOULS +4077-13751-0003-1261: IN PLACE OF A SINGLE HAMLET IN THE SMALLEST CORNER OF WHICH THE MEMBERS COULD HAVE CONGREGATED THERE NOW ARE ABOUT SEVENTY STAKES OF ZION AND ABOUT SEVEN HUNDRED ORGANIZED WARDS EACH WARD AND STAKE WITH ITS FULL COMPLEMENT OF OFFICERS AND PRIESTHOOD ORGANIZATIONS +4077-13751-0004-1262: THE (PRACTISE->PRACTICE) OF GATHERING ITS PROSELYTES INTO ONE PLACE PREVENTS THE BUILDING UP AND STRENGTHENING OF FOREIGN BRANCHES AND INASMUCH AS EXTENSIVE AND STRONG ORGANIZATIONS ARE SELDOM MET WITH ABROAD VERY ERRONEOUS IDEAS EXIST CONCERNING THE STRENGTH OF THE CHURCH +4077-13751-0005-1263: NEVERTHELESS THE MUSTARD SEED AMONG THE SMALLEST OF ALL (SEEDS->SEATS) HAS ATTAINED (THE->THAT) PROPORTIONS OF A TREE AND THE BIRDS OF THE AIR ARE NESTING IN ITS BRANCHES THE ACORN IS NOW (AN->IN) OAK OFFERING PROTECTION AND THE SWEETS OF SATISFACTION TO EVERY EARNEST PILGRIM JOURNEYING ITS WAY (FOR->FIR) TRUTH +4077-13751-0006-1264: THEIR EYES WERE FROM THE FIRST TURNED IN ANTICIPATION TOWARD THE EVENING SUN NOT MERELY THAT THE WORK OF (PROSELYTING->PROSELY) SHOULD BE CARRIED ON IN THE WEST BUT THAT THE HEADQUARTERS OF THE CHURCH SHOULD BE (THERE->THEIR) ESTABLISHED +4077-13751-0007-1265: THE BOOK (OF->O) MORMON HAD TAUGHT THE PEOPLE THE TRUE ORIGIN AND DESTINY OF THE AMERICAN INDIANS AND TOWARD THIS DARK SKINNED REMNANT OF A ONCE MIGHTY PEOPLE THE MISSIONARIES OF MORMONISM EARLY TURNED THEIR EYES AND WITH THEIR EYES WENT THEIR HEARTS AND THEIR HOPES +4077-13751-0008-1266: IT IS NOTABLE THAT THE INDIAN TRIBES HAVE (GENERALLY->GERALLY) REGARDED (THE->THEIR) RELIGION OF THE LATTER DAY SAINTS WITH FAVOR SEEING IN THE BOOK OF MORMON STRIKING AGREEMENT WITH THEIR OWN TRADITIONS +4077-13751-0009-1267: THE FIRST WELL ESTABLISHED SEAT OF THE CHURCH WAS IN THE PRETTY LITTLE TOWN OF (KIRTLAND->CURTALIND) OHIO ALMOST WITHIN SIGHT OF LAKE ERIE AND HERE SOON ROSE THE FIRST TEMPLE OF MODERN TIMES +4077-13751-0010-1268: TO THE FERVENT LATTER DAY SAINT A TEMPLE IS NOT SIMPLY A CHURCH BUILDING A HOUSE FOR RELIGIOUS ASSEMBLY +4077-13751-0011-1269: SOON THOUSANDS OF CONVERTS HAD RENTED OR PURCHASED HOMES IN MISSOURI INDEPENDENCE JACKSON COUNTY BEING THEIR (CENTER->CENTRE) BUT FROM THE FIRST THEY WERE UNPOPULAR AMONG THE (MISSOURIANS->MISSOURIENS) +4077-13751-0012-1270: THE LIEUTENANT GOVERNOR (LILBURN->LITTLE BURN) W BOGGS AFTERWARD GOVERNOR WAS A PRONOUNCED MORMON HATER AND THROUGHOUT THE PERIOD OF THE TROUBLES HE (MANIFESTED->MANIFEST HIS) SYMPATHY WITH THE PERSECUTORS +4077-13751-0013-1271: THEIR SUFFERINGS HAVE NEVER YET BEEN FITLY CHRONICLED BY HUMAN SCRIBE +4077-13751-0014-1272: MAKING THEIR WAY ACROSS THE RIVER MOST OF THE REFUGEES FOUND SHELTER AMONG THE MORE HOSPITABLE PEOPLE OF CLAY COUNTY AND AFTERWARD ESTABLISHED THEMSELVES IN (CALDWELL->CAULDWELL) COUNTY THEREIN FOUNDING THE CITY OF FAR WEST +4077-13751-0015-1273: A SMALL SETTLEMENT HAD BEEN FOUNDED BY MORMON FAMILIES ON SHOAL CREEK AND HERE ON THE THIRTIETH OF OCTOBER EIGHTEEN THIRTY EIGHT A COMPANY OF TWO HUNDRED AND FORTY FELL UPON THE HAPLESS SETTLERS AND BUTCHERED A SCORE +4077-13751-0016-1274: BE IT SAID TO THE HONOR OF SOME OF THE OFFICERS ENTRUSTED WITH THE TERRIBLE COMMISSION THAT WHEN THEY LEARNED ITS TRUE SIGNIFICANCE THEY (RESIGNED->RESIGN) THEIR AUTHORITY RATHER THAN HAVE ANYTHING TO DO WITH WHAT THEY DESIGNATED A COLD BLOODED BUTCHERY +4077-13751-0017-1275: OH WHAT A RECORD TO READ WHAT A PICTURE TO GAZE UPON HOW AWFUL THE FACT +4077-13751-0018-1276: AMERICAN (SCHOOL BOYS->SCHOOLBOYS) READ WITH EMOTIONS OF HORROR OF THE ALBIGENSES DRIVEN BEATEN AND KILLED WITH A (PAPAL->PEPPEL) LEGATE DIRECTING THE BUTCHERY AND OF THE (VAUDOIS->FAUDOIS) HUNTED AND HOUNDED LIKE BEASTS AS THE EFFECT OF A ROYAL DECREE AND THEY YET SHALL READ IN THE HISTORY OF THEIR OWN COUNTRY OF SCENES AS TERRIBLE AS THESE IN THE EXHIBITION OF INJUSTICE AND INHUMAN HATE +4077-13751-0019-1277: WHO BEGAN THE QUARREL WAS IT THE MORMONS +4077-13751-0020-1278: AS (A SAMPLE->THE SABLE) OF THE PRESS COMMENTS AGAINST THE BRUTALITY OF THE MISSOURIANS I QUOTE A PARAGRAPH FROM THE (QUINCY->QUINCEY) ARGUS MARCH SIXTEENTH EIGHTEEN THIRTY NINE +4077-13751-0021-1279: IT WILL BE OBSERVED THAT AN ORGANIZED MOB AIDED BY MANY OF THE CIVIL AND MILITARY OFFICERS OF MISSOURI WITH GOVERNOR BOGGS AT THEIR HEAD HAVE BEEN THE PROMINENT ACTORS IN THIS BUSINESS INCITED TOO IT APPEARS AGAINST THE MORMONS BY POLITICAL HATRED AND BY THE ADDITIONAL MOTIVES OF PLUNDER AND REVENGE +4077-13754-0000-1241: THE ARMY FOUND THE PEOPLE IN POVERTY AND LEFT THEM IN COMPARATIVE WEALTH +4077-13754-0001-1242: BUT A WORD FURTHER CONCERNING THE EXPEDITION IN GENERAL +4077-13754-0002-1243: IT WAS THROUGH FLOYD'S ADVICE THAT (BUCHANAN ORDERED->YOU CANNOT ORDER) THE MILITARY EXPEDITION TO UTAH OSTENSIBLY TO INSTALL CERTAIN FEDERAL OFFICIALS AND TO REPRESS AN ALLEGED INFANTILE REBELLION WHICH IN FACT HAD NEVER COME INTO EXISTENCE BUT IN REALITY TO FURTHER THE (INTERESTS->ENTRANCE) OF THE SECESSIONISTS +4077-13754-0003-1244: MOREOVER HAD THE PEOPLE BEEN INCLINED TO REBELLION WHAT (GREATER->GREAT) OPPORTUNITY COULD THEY HAVE WISHED +4077-13754-0004-1245: ALREADY A NORTH AND (A->THE) SOUTH WERE TALKED OF WHY NOT SET UP ALSO (A->*) WEST +4077-13754-0005-1246: THEY KNEW NO NORTH NO SOUTH NO EAST NO WEST THEY STOOD POSITIVELY BY THE CONSTITUTION AND WOULD HAVE NOTHING TO DO IN THE BLOODY STRIFE BETWEEN BROTHERS UNLESS INDEED THEY WERE SUMMONED BY THE AUTHORITY TO WHICH THEY HAD ALREADY ONCE LOYALLY RESPONDED TO FURNISH MEN (AND->IN) ARMS FOR (THEIR->THE) COUNTRY'S NEED +4077-13754-0006-1247: WHAT THE LATTER DAY (SAINTS->SAYS) CALL CELESTIAL MARRIAGE IS CHARACTERISTIC OF THE CHURCH AND IS IN VERY GENERAL (PRACTISE->PRACTICE) BUT OF CELESTIAL MARRIAGE PLURALITY OF WIVES WAS AN INCIDENT NEVER AN ESSENTIAL +4077-13754-0007-1248: WE BELIEVE IN A LITERAL RESURRECTION AND AN ACTUAL HEREAFTER IN WHICH FUTURE (STATE->STATES) SHALL BE RECOGNIZED EVERY SANCTIFIED AND AUTHORIZED RELATIONSHIP EXISTING HERE ON EARTH OF PARENT AND CHILD (BROTHER AND->BRETHREN) SISTER HUSBAND AND WIFE +4077-13754-0008-1249: IT HAS BEEN MY PRIVILEGE TO TREAD THE SOIL OF MANY LANDS TO OBSERVE THE CUSTOMS AND STUDY THE HABITS OF MORE NATIONS THAN ONE AND I HAVE YET (TO FIND->DEFINED) THE PLACE AND MEET THE PEOPLE WHERE AND WITH WHOM THE PURITY OF MAN AND WOMAN IS HELD MORE PRECIOUS THAN AMONG THE MALIGNED MORMONS IN THE MOUNTAIN VALLEYS OF THE WEST +4077-13754-0009-1250: AT THE INCEPTION OF (PLURAL->PEARL) MARRIAGE AMONG THE LATTER DAY SAINTS THERE WAS NO LAW NATIONAL OR STATE AGAINST ITS (PRACTISE->PRACTICE) +4077-13754-0010-1251: IN EIGHTEEN SIXTY TWO A LAW WAS ENACTED WITH (THE->A) PURPOSE OF SUPPRESSING (PLURAL->PORAL) MARRIAGE AND AS HAD BEEN PREDICTED IN THE NATIONAL SENATE PRIOR TO ITS PASSAGE IT LAY FOR MANY YEARS A DEAD LETTER +4077-13754-0011-1252: FEDERAL JUDGES AND UNITED STATES ATTORNEYS (IN UTAH->AND NEW TOP) WHO WERE NOT (MORMONS->MORE MEN'S) NOR LOVERS OF (MORMONISM->WOMANISM) REFUSED TO ENTERTAIN COMPLAINTS OR PROSECUTE CASES UNDER THE LAW BECAUSE OF ITS MANIFEST INJUSTICE AND INADEQUACY +4077-13754-0012-1253: THIS MEANT THAT FOR AN ALLEGED (MISDEMEANOR->MISDEMEANOUR) FOR WHICH CONGRESS PRESCRIBED A MAXIMUM PENALTY OF SIX MONTHS IMPRISONMENT AND A FINE OF THREE HUNDRED DOLLARS A MAN MIGHT BE IMPRISONED FOR LIFE (AYE->I) FOR MANY TERMS OF A MAN'S NATURAL LIFE DID THE COURT'S POWER TO ENFORCE ITS SENTENCES EXTEND SO FAR AND MIGHT BE FINED MILLIONS OF DOLLARS +4077-13754-0013-1254: BEFORE THIS (TRAVESTY->TRAVASTY) ON THE ADMINISTRATION OF LAW COULD BE (BROUGHT->WROUGHT) BEFORE THE COURT OF LAST RESORT AND THERE (MEET->MET) WITH THE REVERSAL AND REBUKE IT DESERVED MEN WERE IMPRISONED UNDER (SENTENCES->SENTENCE) OF MANY YEARS DURATION +4077-13754-0014-1255: THE PEOPLE CONTESTED THESE MEASURES ONE BY ONE IN THE COURTS PRESENTING IN CASE AFTER CASE THE DIFFERENT PHASES OF THE SUBJECT AND URGING THE UNCONSTITUTIONALITY OF THE MEASURE +4077-13754-0015-1256: THEN THE CHURCH WAS DISINCORPORATED AND ITS PROPERTY BOTH REAL AND PERSONAL CONFISCATED AND (ESCHEATED->ISTIATED) TO THE GOVERNMENT OF THE UNITED STATES AND ALTHOUGH THE PERSONAL PROPERTY WAS SOON RESTORED REAL ESTATE OF GREAT VALUE LONG LAY IN THE HANDS OF THE (COURT'S->COURTS) RECEIVER AND THE MORMON CHURCH HAD TO PAY THE NATIONAL GOVERNMENT HIGH RENTAL ON ITS OWN PROPERTY +4077-13754-0016-1257: AND SO THE STORY OF MORMONISM RUNS ON ITS FINALE HAS NOT YET BEEN WRITTEN THE CURRENT PRESS PRESENTS CONTINUOUSLY NEW STAGES OF ITS PROGRESS NEW DEVELOPMENTS OF ITS PLAN +4446-2271-0000-1133: (MAINHALL->MAIN HALL) LIKED ALEXANDER BECAUSE HE WAS AN ENGINEER +4446-2271-0001-1134: (HE HAD->WE NOT) PRECONCEIVED IDEAS ABOUT EVERYTHING AND HIS IDEA ABOUT AMERICANS WAS THAT THEY SHOULD BE ENGINEERS OR MECHANICS +4446-2271-0002-1135: (IT'S->ITS) TREMENDOUSLY WELL PUT ON TOO +4446-2271-0003-1136: IT'S BEEN ON ONLY TWO WEEKS AND I'VE BEEN HALF A DOZEN TIMES ALREADY +4446-2271-0004-1137: DO YOU KNOW ALEXANDER (MAINHALL->MAIN HALL) LOOKED WITH PERPLEXITY UP INTO THE TOP OF THE HANSOM AND RUBBED HIS PINK CHEEK WITH HIS GLOVED FINGER DO YOU KNOW I SOMETIMES THINK OF TAKING TO CRITICISM SERIOUSLY MYSELF +4446-2271-0005-1138: SHE SAVES HER HAND TOO (SHE'S AT->SHE SAID) HER BEST IN THE SECOND ACT +4446-2271-0006-1139: HE'S BEEN WANTING TO MARRY HILDA THESE THREE YEARS AND MORE +4446-2271-0007-1140: SHE DOESN'T TAKE UP WITH ANYBODY YOU KNOW +4446-2271-0008-1141: IRENE (BURGOYNE->WERE GOING) ONE OF HER FAMILY TOLD ME IN CONFIDENCE THAT THERE WAS A ROMANCE SOMEWHERE BACK IN THE BEGINNING +4446-2271-0009-1142: (MAINHALL VOUCHED->MEANHAW VOUCH) FOR HER CONSTANCY WITH A LOFTINESS THAT MADE ALEXANDER SMILE EVEN WHILE A KIND OF RAPID EXCITEMENT WAS TINGLING THROUGH HIM +4446-2271-0010-1143: HE'S ANOTHER WHO'S AWFULLY KEEN ABOUT HER LET ME INTRODUCE YOU +4446-2271-0011-1144: SIR HARRY (TOWNE->TOWN) MISTER BARTLEY ALEXANDER THE AMERICAN ENGINEER +4446-2271-0012-1145: I SAY SIR HARRY THE LITTLE GIRL'S GOING FAMOUSLY TO NIGHT ISN'T SHE +4446-2271-0013-1146: (DO->*) YOU KNOW I THOUGHT THE DANCE (A BIT CONSCIOUS->OF GOOD CONSCIENCE) TO NIGHT FOR THE FIRST TIME +4446-2271-0014-1147: (WESTMERE->WESTMIR) AND I WERE BACK AFTER THE FIRST ACT AND WE THOUGHT SHE SEEMED QUITE UNCERTAIN OF HERSELF +4446-2271-0015-1148: A LITTLE ATTACK OF NERVES POSSIBLY +4446-2271-0016-1149: HE WAS BEGINNING TO FEEL (A->THE) KEEN INTEREST IN THE SLENDER BAREFOOT DONKEY GIRL WHO SLIPPED IN AND OUT OF THE PLAY SINGING LIKE SOME ONE WINDING THROUGH A HILLY FIELD +4446-2271-0017-1150: ONE NIGHT WHEN HE AND WINIFRED WERE SITTING TOGETHER ON THE BRIDGE HE TOLD HER (THAT->THE) THINGS HAD HAPPENED WHILE HE WAS STUDYING ABROAD THAT HE WAS SORRY FOR ONE THING IN PARTICULAR AND HE ASKED HER WHETHER SHE THOUGHT SHE OUGHT TO KNOW ABOUT THEM +4446-2271-0018-1151: SHE CONSIDERED (*->FOR) A MOMENT AND THEN SAID NO I THINK NOT (THOUGH->THE WAY) I AM GLAD YOU ASK ME +4446-2271-0019-1152: AFTER THAT IT WAS EASY TO FORGET ACTUALLY TO FORGET +4446-2271-0020-1153: OF COURSE HE REFLECTED SHE ALWAYS HAD THAT COMBINATION OF SOMETHING HOMELY AND SENSIBLE AND SOMETHING UTTERLY WILD AND DAFT +4446-2271-0021-1154: SHE MUST CARE ABOUT THE THEATRE A GREAT DEAL MORE THAN SHE USED TO +4446-2271-0022-1155: I'M GLAD SHE'S HELD HER OWN (SINCE->SE) +4446-2271-0023-1156: AFTER ALL WE WERE AWFULLY YOUNG +4446-2271-0024-1157: I SHOULDN'T WONDER IF SHE COULD LAUGH ABOUT IT WITH ME NOW +4446-2273-0000-1158: HILDA WAS VERY NICE TO HIM AND HE SAT ON THE EDGE OF HIS CHAIR FLUSHED WITH HIS CONVERSATIONAL EFFORTS AND MOVING HIS CHIN ABOUT NERVOUSLY OVER HIS HIGH COLLAR +4446-2273-0001-1159: THEY ASKED HIM TO COME TO SEE THEM IN CHELSEA AND THEY SPOKE VERY TENDERLY OF HILDA +4446-2273-0002-1160: LAMB WOULDN'T CARE A GREAT DEAL ABOUT MANY OF THEM I FANCY +4446-2273-0003-1161: WHEN BARTLEY ARRIVED AT BEDFORD SQUARE ON SUNDAY EVENING MARIE THE PRETTY LITTLE FRENCH GIRL MET HIM AT THE DOOR AND CONDUCTED HIM UPSTAIRS +4446-2273-0004-1162: I SHOULD NEVER HAVE ASKED YOU IF MOLLY HAD BEEN HERE FOR I REMEMBER YOU DON'T LIKE ENGLISH COOKERY +4446-2273-0005-1163: I HAVEN'T HAD A CHANCE YET TO TELL YOU WHAT A JOLLY LITTLE PLACE I THINK THIS IS +4446-2273-0006-1164: THEY ARE ALL SKETCHES MADE ABOUT THE (VILLA D'ESTE->VALIDESTE) YOU SEE +4446-2273-0007-1165: THOSE FELLOWS ARE ALL VERY LOYAL EVEN (MAINHALL->MAIN HALL) +4446-2273-0008-1166: I'VE MANAGED TO SAVE SOMETHING EVERY YEAR AND THAT WITH HELPING MY THREE SISTERS NOW AND THEN AND TIDING POOR COUSIN MIKE OVER BAD SEASONS +4446-2273-0009-1167: IT'S NOT PARTICULARLY RARE SHE SAID BUT SOME OF IT WAS MY MOTHER'S +4446-2273-0010-1168: THERE WAS WATERCRESS SOUP AND SOLE AND A DELIGHTFUL OMELETTE STUFFED WITH MUSHROOMS AND TRUFFLES AND TWO SMALL RARE DUCKLINGS AND ARTICHOKES AND A DRY YELLOW RHONE WINE OF WHICH BARTLEY HAD ALWAYS BEEN VERY FOND +4446-2273-0011-1169: THERE IS NOTHING ELSE THAT LOOKS SO JOLLY +4446-2273-0012-1170: THANK YOU BUT I DON'T LIKE IT SO WELL AS THIS +4446-2273-0013-1171: HAVE YOU BEEN IN PARIS MUCH THESE LATE YEARS +4446-2273-0014-1172: THERE ARE (*->A) FEW CHANGES IN THE OLD QUARTER +4446-2273-0015-1173: DON'T I THOUGH I'M SO SORRY TO HEAR IT HOW DID HER SON TURN OUT +4446-2273-0016-1174: HER HAIR IS STILL LIKE FLAX AND HER BLUE EYES ARE JUST LIKE A BABY'S AND SHE HAS THE SAME THREE FRECKLES ON HER LITTLE NOSE AND TALKS ABOUT GOING BACK TO HER (BAINS DE MER->BANDUME) +4446-2273-0017-1175: HOW JOLLY IT WAS BEING YOUNG HILDA +4446-2273-0018-1176: DO YOU REMEMBER THAT FIRST WALK WE TOOK TOGETHER IN PARIS +4446-2273-0019-1177: COME WE'LL HAVE OUR COFFEE IN THE OTHER ROOM AND YOU CAN SMOKE +4446-2273-0020-1178: I THINK WE DID SHE ANSWERED DEMURELY +4446-2273-0021-1179: WHAT SHE WANTED FROM US WAS NEITHER OUR FLOWERS NOR OUR (FRANCS->FRANKS) BUT JUST OUR YOUTH +4446-2273-0022-1180: THEY WERE BOTH REMEMBERING WHAT THE WOMAN HAD SAID WHEN SHE TOOK THE MONEY GOD GIVE YOU A HAPPY LOVE +4446-2273-0023-1181: THE STRANGE WOMAN AND HER PASSIONATE SENTENCE THAT RANG OUT SO SHARPLY HAD FRIGHTENED THEM BOTH +4446-2273-0024-1182: BARTLEY STARTED WHEN HILDA RANG THE LITTLE BELL BESIDE HER DEAR ME WHY DID YOU DO THAT +4446-2273-0025-1183: IT WAS VERY JOLLY HE MURMURED LAZILY AS MARIE CAME IN TO TAKE AWAY THE COFFEE +4446-2273-0026-1184: HAVE I TOLD YOU ABOUT MY NEW PLAY +4446-2273-0027-1185: WHEN SHE FINISHED ALEXANDER SHOOK HIMSELF OUT OF A REVERIE +4446-2273-0028-1186: NONSENSE OF COURSE I CAN'T REALLY SING EXCEPT THE WAY MY MOTHER AND GRANDMOTHER DID BEFORE ME +4446-2273-0029-1187: IT'S REALLY TOO WARM IN THIS ROOM TO SING DON'T YOU FEEL IT +4446-2273-0030-1188: ALEXANDER WENT OVER AND OPENED THE WINDOW FOR HER +4446-2273-0031-1189: THERE JUST IN (FRONT->FRON) +4446-2273-0032-1190: HE STOOD A LITTLE BEHIND HER AND TRIED TO STEADY HIMSELF AS HE SAID IT'S SOFT AND MISTY SEE HOW WHITE THE STARS ARE +4446-2273-0033-1191: FOR A LONG TIME NEITHER HILDA NOR BARTLEY (SPOKE->SPO) +4446-2273-0034-1192: HE FELT A TREMOR RUN THROUGH THE SLENDER YELLOW FIGURE IN FRONT OF HIM +4446-2273-0035-1193: BARTLEY LEANED OVER HER SHOULDER WITHOUT TOUCHING HER AND WHISPERED IN HER EAR YOU ARE GIVING ME A CHANCE YES +4446-2273-0036-1194: ALEXANDER (UNCLENCHED->CLENCHED) THE TWO HANDS AT HIS SIDES +4446-2275-0000-1195: THE STOP AT QUEENSTOWN THE TEDIOUS PASSAGE (UP->OF) THE (MERSEY->MERCY) WERE THINGS THAT HE NOTED DIMLY THROUGH HIS GROWING IMPATIENCE +4446-2275-0001-1196: SHE BLUSHED AND SMILED AND FUMBLED HIS CARD IN HER CONFUSION BEFORE SHE RAN UPSTAIRS +4446-2275-0002-1197: ALEXANDER PACED UP AND DOWN THE HALLWAY BUTTONING AND UNBUTTONING HIS OVERCOAT UNTIL SHE RETURNED AND TOOK HIM UP TO HILDA'S LIVING ROOM +4446-2275-0003-1198: THE ROOM WAS EMPTY WHEN HE (ENTERED->ENTER) +4446-2275-0004-1199: ALEXANDER DID NOT SIT DOWN +4446-2275-0005-1200: I FELT IT IN MY BONES WHEN I WOKE THIS MORNING THAT SOMETHING SPLENDID WAS GOING TO TURN UP +4446-2275-0006-1201: I THOUGHT IT MIGHT BE SISTER KATE OR COUSIN MIKE WOULD BE HAPPENING ALONG +4446-2275-0007-1202: SHE PUSHED HIM TOWARD THE BIG CHAIR BY THE FIRE AND SAT DOWN ON A STOOL AT THE OPPOSITE SIDE OF THE HEARTH HER KNEES DRAWN UP TO HER CHIN LAUGHING LIKE A HAPPY LITTLE GIRL +4446-2275-0008-1203: WHEN DID YOU COME BARTLEY AND HOW DID IT HAPPEN YOU HAVEN'T SPOKEN A WORD +4446-2275-0009-1204: I GOT IN ABOUT TEN MINUTES AGO +4446-2275-0010-1205: ALEXANDER LEANED FORWARD AND WARMED HIS HANDS BEFORE THE BLAZE +4446-2275-0011-1206: BARTLEY BENT (LOWER->LOWERED) OVER THE FIRE +4446-2275-0012-1207: SHE LOOKED AT HIS HEAVY SHOULDERS AND BIG DETERMINED HEAD THRUST FORWARD LIKE A CATAPULT IN LEASH +4446-2275-0013-1208: I'LL DO ANYTHING YOU WISH ME TO BARTLEY SHE SAID TREMULOUSLY +4446-2275-0014-1209: I CAN'T STAND SEEING YOU MISERABLE +4446-2275-0015-1210: HE PULLED UP A WINDOW AS IF THE AIR WERE HEAVY +4446-2275-0016-1211: HILDA WATCHED HIM FROM (HER->THE) CORNER TREMBLING AND SCARCELY BREATHING DARK SHADOWS GROWING ABOUT HER EYES IT +4446-2275-0017-1212: BUT IT'S WORSE NOW IT'S UNBEARABLE +4446-2275-0018-1213: I GET NOTHING BUT MISERY OUT OF EITHER +4446-2275-0019-1214: THE WORLD IS ALL THERE JUST AS IT USED TO BE BUT I CAN'T GET AT IT ANY MORE +4446-2275-0020-1215: IT WAS MYSELF I WAS DEFYING HILDA +4446-2275-0021-1216: (HILDA'S->HELDA'S) FACE QUIVERED BUT SHE WHISPERED YES I THINK IT MUST HAVE BEEN +4446-2275-0022-1217: BUT WHY DIDN'T YOU TELL ME WHEN YOU WERE HERE IN THE SUMMER +4446-2275-0023-1218: ALEXANDER GROANED I MEANT TO BUT SOMEHOW I COULDN'T +4446-2275-0024-1219: SHE PRESSED HIS HAND GENTLY IN GRATITUDE +4446-2275-0025-1220: WEREN'T YOU HAPPY THEN AT ALL +4446-2275-0026-1221: SHE CLOSED HER EYES AND TOOK A DEEP BREATH AS IF TO DRAW IN AGAIN THE FRAGRANCE OF THOSE DAYS +4446-2275-0027-1222: HE MOVED UNEASILY AND HIS CHAIR CREAKED +4446-2275-0028-1223: YES YES SHE HURRIED PULLING HER HAND GENTLY AWAY FROM HIM +4446-2275-0029-1224: PLEASE TELL ME ONE THING BARTLEY AT LEAST TELL ME THAT YOU BELIEVE I THOUGHT I WAS MAKING YOU HAPPY +4446-2275-0030-1225: YES (HILDA->HELDA) I KNOW THAT HE SAID SIMPLY +4446-2275-0031-1226: I UNDERSTAND BARTLEY I WAS WRONG +4446-2275-0032-1227: BUT I DIDN'T KNOW YOU'VE ONLY TO TELL ME NOW +4446-2275-0033-1228: WHAT I MEAN IS THAT I WANT YOU TO PROMISE NEVER TO SEE ME AGAIN NO MATTER HOW OFTEN I COME NO MATTER HOW HARD I BEG +4446-2275-0034-1229: KEEP AWAY IF YOU WISH WHEN HAVE I EVER FOLLOWED YOU +4446-2275-0035-1230: ALEXANDER ROSE AND SHOOK HIMSELF ANGRILY YES I KNOW I'M COWARDLY +4446-2275-0036-1231: HE TOOK (HER->A) ROUGHLY IN HIS ARMS DO YOU KNOW WHAT I MEAN +4446-2275-0037-1232: OH BARTLEY WHAT AM I TO DO +4446-2275-0038-1233: I WILL ASK THE LEAST IMAGINABLE BUT I MUST HAVE SOMETHING +4446-2275-0039-1234: I MUST KNOW ABOUT YOU +4446-2275-0040-1235: THE SIGHT OF YOU BARTLEY TO SEE YOU LIVING AND HAPPY AND SUCCESSFUL CAN I NEVER MAKE YOU UNDERSTAND WHAT THAT MEANS TO ME +4446-2275-0041-1236: YOU SEE LOVING SOME ONE AS I LOVE YOU MAKES THE WHOLE WORLD DIFFERENT +4446-2275-0042-1237: AND THEN YOU CAME BACK NOT CARING VERY MUCH BUT IT MADE NO DIFFERENCE +4446-2275-0043-1238: BARTLEY BENT OVER AND TOOK HER IN HIS ARMS KISSING HER MOUTH AND HER WET TIRED EYES +4446-2275-0044-1239: (*->I) DON'T CRY DON'T CRY HE WHISPERED +4446-2275-0045-1240: WE'VE TORTURED EACH OTHER ENOUGH FOR (TONIGHT->TO NIGHT) +4507-16021-0000-1469: CHAPTER ONE ORIGIN +4507-16021-0001-1470: IT ENGENDERS A WHOLE WORLD LA (PEGRE->PEG) FOR WHICH (READ->RED) THEFT AND A HELL LA (PEGRENNE->PEGRIN) FOR WHICH (READ->RED) HUNGER +4507-16021-0002-1471: THUS IDLENESS IS THE MOTHER +4507-16021-0003-1472: SHE HAS A SON THEFT AND A DAUGHTER HUNGER +4507-16021-0004-1473: WHAT IS SLANG +4507-16021-0005-1474: WE HAVE NEVER UNDERSTOOD THIS SORT OF OBJECTIONS +4507-16021-0006-1475: SLANG IS ODIOUS +4507-16021-0007-1476: SLANG MAKES ONE SHUDDER +4507-16021-0008-1477: WHO DENIES THAT OF COURSE IT DOES +4507-16021-0009-1478: WHEN IT IS A QUESTION OF PROBING A WOUND A GULF A SOCIETY SINCE WHEN HAS IT BEEN CONSIDERED WRONG TO GO TOO FAR TO GO TO THE BOTTOM +4507-16021-0010-1479: WE HAVE ALWAYS THOUGHT THAT IT WAS SOMETIMES A COURAGEOUS ACT AND AT LEAST A SIMPLE AND USEFUL DEED WORTHY OF THE SYMPATHETIC ATTENTION WHICH DUTY ACCEPTED (AND->IN) FULFILLED MERITS +4507-16021-0011-1480: WHY SHOULD ONE NOT EXPLORE EVERYTHING AND STUDY EVERYTHING +4507-16021-0012-1481: WHY SHOULD ONE HALT ON THE WAY +4507-16021-0013-1482: NOTHING IS MORE LUGUBRIOUS THAN THE CONTEMPLATION THUS IN ITS NUDITY IN THE BROAD LIGHT OF THOUGHT OF THE HORRIBLE SWARMING OF SLANG +4507-16021-0014-1483: (NOW->NO) WHEN HAS HORROR EVER EXCLUDED STUDY +4507-16021-0015-1484: SINCE WHEN HAS MALADY BANISHED MEDICINE +4507-16021-0016-1485: CAN ONE IMAGINE A NATURALIST REFUSING TO STUDY THE VIPER THE BAT THE SCORPION THE CENTIPEDE THE (TARANTULA->TERENTIAL) AND ONE WHO WOULD CAST THEM BACK INTO THEIR DARKNESS SAYING (OH->O) HOW UGLY THAT IS +4507-16021-0017-1486: HE WOULD BE LIKE A PHILOLOGIST REFUSING TO EXAMINE A FACT IN LANGUAGE A PHILOSOPHER HESITATING TO SCRUTINIZE A FACT IN HUMANITY +4507-16021-0018-1487: WHAT IS SLANG PROPERLY SPEAKING +4507-16021-0019-1488: IT IS THE LANGUAGE OF WRETCHEDNESS +4507-16021-0020-1489: WE MAY BE STOPPED THE FACT MAY BE PUT TO US IN GENERAL TERMS WHICH IS ONE WAY OF ATTENUATING IT WE MAY BE TOLD THAT ALL TRADES PROFESSIONS IT MAY BE ADDED ALL THE ACCIDENTS OF THE SOCIAL HIERARCHY AND ALL FORMS OF INTELLIGENCE HAVE THEIR OWN SLANG +4507-16021-0021-1490: THE PAINTER WHO SAYS MY GRINDER THE NOTARY WHO SAYS MY SKIP THE GUTTER THE (HAIRDRESSER->HAIR DRESSER) WHO SAYS MY (MEALYBACK->MEALLY BACK) THE COBBLER WHO SAYS MY CUB TALKS SLANG +4507-16021-0022-1491: THERE IS THE SLANG OF THE AFFECTED LADY AS WELL AS OF THE (PRECIEUSES->PURSUS) +4507-16021-0023-1492: THE SUGAR MANUFACTURER WHO SAYS LOAF CLARIFIED LUMPS BASTARD COMMON BURNT THIS HONEST MANUFACTURER TALKS SLANG +4507-16021-0024-1493: ALGEBRA MEDICINE (BOTANY->BARTANY) HAVE EACH THEIR SLANG +4507-16021-0025-1494: TO MEET THE NEEDS OF THIS CONFLICT WRETCHEDNESS HAS INVENTED A LANGUAGE OF COMBAT WHICH IS SLANG +4507-16021-0026-1495: TO KEEP AFLOAT AND TO RESCUE FROM OBLIVION TO HOLD ABOVE THE GULF WERE IT BUT A FRAGMENT OF SOME LANGUAGE WHICH MAN HAS SPOKEN AND WHICH WOULD OTHERWISE BE LOST THAT IS TO SAY ONE OF THE ELEMENTS GOOD OR BAD OF WHICH CIVILIZATION IS COMPOSED OR BY WHICH IT IS COMPLICATED TO EXTEND THE RECORDS OF SOCIAL OBSERVATION IS TO SERVE CIVILIZATION ITSELF +4507-16021-0027-1496: PHOENICIAN VERY GOOD +4507-16021-0028-1497: EVEN DIALECT LET THAT PASS +4507-16021-0029-1498: TO THIS WE REPLY IN ONE WORD ONLY +4507-16021-0030-1499: ASSUREDLY IF THE TONGUE WHICH A NATION OR A PROVINCE HAS SPOKEN IS WORTHY OF INTEREST THE LANGUAGE WHICH HAS BEEN SPOKEN BY A MISERY IS STILL MORE WORTHY OF ATTENTION AND STUDY +4507-16021-0031-1500: AND THEN WE INSIST UPON IT THE STUDY OF SOCIAL DEFORMITIES AND INFIRMITIES AND THE TASK OF POINTING THEM OUT WITH A VIEW TO REMEDY IS NOT A BUSINESS IN WHICH (CHOICE IS->CHOICES) PERMITTED +4507-16021-0032-1501: HE MUST DESCEND WITH HIS HEART FULL OF CHARITY AND SEVERITY AT THE SAME TIME AS A BROTHER AND AS (A->HE) JUDGE TO THOSE IMPENETRABLE CASEMATES (WHERE->WERE) CRAWL PELL MELL THOSE WHO BLEED AND THOSE WHO DEAL THE BLOW THOSE WHO WEEP AND THOSE WHO CURSE THOSE WHO FAST (AND->IN) THOSE WHO DEVOUR THOSE WHO ENDURE EVIL AND THOSE WHO INFLICT IT +4507-16021-0033-1502: DO WE REALLY KNOW THE MOUNTAIN WELL WHEN WE ARE NOT ACQUAINTED WITH THE CAVERN +4507-16021-0034-1503: THEY CONSTITUTE TWO DIFFERENT ORDERS OF FACTS WHICH CORRESPOND TO EACH OTHER WHICH ARE ALWAYS INTERLACED AND WHICH OFTEN BRING FORTH RESULTS +4507-16021-0035-1504: TRUE HISTORY BEING A MIXTURE OF ALL THINGS THE TRUE HISTORIAN MINGLES IN EVERYTHING +4507-16021-0036-1505: FACTS FORM ONE OF THESE AND IDEAS THE OTHER +4507-16021-0037-1506: THERE IT CLOTHES ITSELF IN WORD MASKS IN METAPHOR RAGS +4507-16021-0038-1507: IN (THIS GUISE->THE SKIES) IT BECOMES HORRIBLE +4507-16021-0039-1508: ONE PERCEIVES WITHOUT UNDERSTANDING IT A HIDEOUS MURMUR SOUNDING ALMOST LIKE HUMAN ACCENTS BUT MORE NEARLY RESEMBLING A HOWL THAN AN ARTICULATE WORD +4507-16021-0040-1509: ONE THINKS ONE HEARS (HYDRAS->HYDRAST) TALKING +4507-16021-0041-1510: IT IS UNINTELLIGIBLE IN THE DARK +4507-16021-0042-1511: IT IS BLACK IN MISFORTUNE IT IS BLACKER STILL (IN->AND) CRIME THESE TWO BLACKNESSES AMALGAMATED (COMPOSE SLANG->COMPOSED SLING) +4507-16021-0043-1512: THE EARTH IS NOT DEVOID OF RESEMBLANCE TO A JAIL +4507-16021-0044-1513: LOOK CLOSELY AT LIFE +4507-16021-0045-1514: IT IS SO MADE THAT EVERYWHERE WE FEEL THE SENSE OF PUNISHMENT +4507-16021-0046-1515: EACH DAY HAS ITS OWN GREAT GRIEF (OR->FOR) ITS LITTLE CARE +4507-16021-0047-1516: YESTERDAY (YOU->*) WERE TREMBLING FOR A HEALTH THAT IS DEAR TO YOU TO DAY YOU FEAR FOR YOUR OWN TO MORROW IT WILL BE ANXIETY ABOUT MONEY THE DAY AFTER TO MORROW THE (DIATRIBE->DIETRIBE) OF A SLANDERER THE DAY AFTER THAT THE MISFORTUNE OF SOME FRIEND THEN THE PREVAILING WEATHER THEN SOMETHING THAT HAS BEEN BROKEN OR LOST THEN A PLEASURE WITH WHICH YOUR CONSCIENCE AND YOUR VERTEBRAL COLUMN REPROACH YOU AGAIN THE COURSE OF PUBLIC AFFAIRS +4507-16021-0048-1517: THIS WITHOUT RECKONING IN THE PAINS OF THE HEART AND SO (IT->TO) GOES ON +4507-16021-0049-1518: THERE IS HARDLY ONE DAY OUT OF A HUNDRED WHICH IS WHOLLY JOYOUS AND SUNNY +4507-16021-0050-1519: AND YOU BELONG TO THAT SMALL CLASS WHO ARE (*->A) HAPPY +4507-16021-0051-1520: IN THIS WORLD EVIDENTLY THE VESTIBULE OF ANOTHER THERE ARE NO FORTUNATE +4507-16021-0052-1521: THE REAL HUMAN DIVISION IS THIS THE LUMINOUS AND THE SHADY +4507-16021-0053-1522: TO DIMINISH THE NUMBER OF THE SHADY TO AUGMENT THE NUMBER OF THE LUMINOUS THAT IS THE OBJECT +4507-16021-0054-1523: THAT IS WHY WE CRY EDUCATION SCIENCE +4507-16021-0055-1524: TO TEACH READING MEANS TO (LIGHT->WRITE) THE FIRE EVERY SYLLABLE SPELLED OUT SPARKLES +4507-16021-0056-1525: HOWEVER HE WHO SAYS LIGHT DOES NOT NECESSARILY SAY JOY +4507-16021-0057-1526: PEOPLE SUFFER IN THE LIGHT EXCESS BURNS +4507-16021-0058-1527: THE FLAME IS THE ENEMY OF THE WING +4507-16021-0059-1528: TO BURN WITHOUT CEASING TO FLY THEREIN LIES THE MARVEL OF GENIUS +4970-29093-0000-2093: YOU'LL NEVER DIG IT OUT OF THE (ASTOR->ASTRO) LIBRARY +4970-29093-0001-2094: TO THE YOUNG AMERICAN HERE OR ELSEWHERE THE PATHS TO FORTUNE ARE INNUMERABLE AND ALL OPEN THERE IS INVITATION IN THE AIR AND SUCCESS IN ALL HIS WIDE HORIZON +4970-29093-0002-2095: HE HAS NO TRADITIONS TO BIND HIM OR GUIDE HIM AND HIS IMPULSE IS TO BREAK AWAY FROM THE OCCUPATION HIS FATHER HAS FOLLOWED AND MAKE A NEW WAY FOR HIMSELF +4970-29093-0003-2096: THE MODEST FELLOW WOULD HAVE LIKED FAME THRUST UPON HIM FOR SOME WORTHY ACHIEVEMENT IT MIGHT BE FOR A BOOK OR FOR THE (SKILLFUL->SKILFUL) MANAGEMENT OF SOME GREAT NEWSPAPER OR FOR SOME DARING EXPEDITION LIKE THAT OF LIEUTENANT STRAIN OR DOCTOR KANE +4970-29093-0004-2097: HE WAS UNABLE TO DECIDE EXACTLY WHAT IT SHOULD BE +4970-29093-0005-2098: SOMETIMES HE THOUGHT HE WOULD LIKE TO STAND IN A CONSPICUOUS PULPIT AND HUMBLY PREACH THE GOSPEL OF REPENTANCE AND IT EVEN CROSSED HIS MIND THAT IT WOULD BE NOBLE TO GIVE HIMSELF TO A MISSIONARY LIFE TO SOME BENIGHTED REGION WHERE THE DATE PALM (GROWS->GROVES) AND THE NIGHTINGALE'S VOICE IS IN TUNE AND THE (BUL BUL->BULBUL) SINGS ON THE OFF NIGHTS +4970-29093-0006-2099: LAW SEEMED TO HIM WELL ENOUGH AS A SCIENCE BUT HE NEVER COULD DISCOVER A PRACTICAL CASE WHERE IT APPEARED TO HIM WORTH WHILE TO GO TO LAW AND ALL THE CLIENTS WHO STOPPED WITH THIS NEW CLERK (IN->AND) THE ANTE ROOM OF THE LAW OFFICE WHERE HE WAS WRITING PHILIP INVARIABLY ADVISED TO SETTLE NO MATTER HOW BUT (SETTLE->SETTLED) GREATLY TO THE DISGUST OF HIS EMPLOYER WHO KNEW THAT JUSTICE BETWEEN MAN AND MAN COULD ONLY BE ATTAINED BY THE RECOGNIZED PROCESSES WITH THE ATTENDANT (FEES->BEES) +4970-29093-0007-2100: IT IS SUCH A NOBLE AMBITION THAT IT IS A PITY IT HAS USUALLY SUCH A SHALLOW FOUNDATION +4970-29093-0008-2101: HE WANTED TO BEGIN AT THE TOP OF THE LADDER +4970-29093-0009-2102: PHILIP THEREFORE READ DILIGENTLY IN THE ASTOR LIBRARY PLANNED LITERARY WORKS THAT SHOULD COMPEL ATTENTION AND (NURSED->NURSE) HIS GENIUS +4970-29093-0010-2103: HE HAD NO FRIEND WISE ENOUGH TO TELL HIM TO STEP INTO THE DORKING CONVENTION (THEN->THAN) IN SESSION MAKE A SKETCH OF THE MEN AND WOMEN ON THE PLATFORM AND TAKE IT TO THE EDITOR OF THE DAILY (GRAPEVINE->GRAPE VINE) AND SEE WHAT HE COULD GET A LINE FOR IT +4970-29093-0011-2104: (O->OH) VERY WELL SAID (GRINGO->GREENOW) TURNING AWAY WITH A SHADE OF CONTEMPT YOU'LL FIND IF YOU ARE GOING INTO LITERATURE AND NEWSPAPER WORK THAT YOU CAN'T AFFORD A CONSCIENCE LIKE THAT +4970-29093-0012-2105: BUT PHILIP DID AFFORD IT AND HE WROTE (THANKING->THINKING) HIS FRIENDS AND DECLINING BECAUSE HE SAID THE POLITICAL SCHEME WOULD FAIL AND OUGHT TO FAIL +4970-29093-0013-2106: AND HE WENT BACK TO HIS BOOKS AND TO HIS WAITING FOR AN OPENING LARGE ENOUGH FOR HIS DIGNIFIED ENTRANCE INTO THE LITERARY WORLD +4970-29093-0014-2107: WELL I'M GOING AS AN ENGINEER YOU (CAN->COULD) GO AS ONE +4970-29093-0015-2108: YOU CAN BEGIN BY CARRYING A ROD AND PUTTING DOWN THE FIGURES +4970-29093-0016-2109: NO (ITS->IT'S) NOT TOO SOON +4970-29093-0017-2110: I'VE BEEN READY TO GO ANYWHERE FOR SIX MONTHS +4970-29093-0018-2111: THE TWO YOUNG MEN WHO WERE BY THIS TIME FULL OF THE (ADVENTURE->ADVENTURER) WENT DOWN TO THE WALL STREET OFFICE OF HENRY'S UNCLE AND HAD A TALK WITH THAT WILY OPERATOR +4970-29093-0019-2112: THE NIGHT WAS SPENT IN PACKING UP AND WRITING LETTERS FOR PHILIP WOULD NOT TAKE SUCH AN IMPORTANT STEP WITHOUT INFORMING HIS FRIENDS +4970-29093-0020-2113: WHY IT'S (IN->A) MISSOURI SOMEWHERE ON THE FRONTIER I THINK WE'LL GET A MAP +4970-29093-0021-2114: I WAS AFRAID IT WAS NEARER HOME +4970-29093-0022-2115: HE KNEW HIS UNCLE WOULD BE GLAD TO HEAR THAT HE HAD AT LAST TURNED HIS THOUGHTS TO A PRACTICAL MATTER +4970-29093-0023-2116: HE WELL KNEW THE PERILS OF THE FRONTIER THE SAVAGE STATE OF SOCIETY THE LURKING INDIANS AND THE DANGERS OF FEVER +4970-29095-0000-2054: SHE WAS TIRED OF OTHER THINGS +4970-29095-0001-2055: SHE TRIED THIS MORNING AN AIR OR TWO UPON THE PIANO (SANG->SAYING) A SIMPLE SONG IN A SWEET BUT SLIGHTLY METALLIC VOICE AND THEN SEATING HERSELF BY THE OPEN WINDOW READ PHILIP'S LETTER +4970-29095-0002-2056: WELL MOTHER SAID THE YOUNG STUDENT LOOKING UP WITH A SHADE OF IMPATIENCE +4970-29095-0003-2057: I HOPE THEE TOLD THE ELDERS THAT FATHER AND I ARE RESPONSIBLE FOR THE PIANO AND THAT MUCH AS THEE LOVES MUSIC THEE IS NEVER IN THE ROOM WHEN IT IS PLAYED +4970-29095-0004-2058: I HEARD FATHER TELL COUSIN ABNER THAT HE WAS WHIPPED SO OFTEN FOR WHISTLING WHEN HE WAS A BOY THAT HE WAS DETERMINED TO HAVE WHAT COMPENSATION HE COULD GET NOW +4970-29095-0005-2059: THY WAYS GREATLY TRY ME RUTH AND ALL THY RELATIONS +4970-29095-0006-2060: IS THY FATHER WILLING THEE SHOULD GO AWAY TO A SCHOOL OF THE WORLD'S PEOPLE +4970-29095-0007-2061: I HAVE NOT ASKED HIM RUTH REPLIED WITH A LOOK THAT MIGHT IMPLY THAT SHE WAS ONE OF THOSE DETERMINED LITTLE BODIES WHO FIRST MADE UP HER OWN MIND AND THEN COMPELLED OTHERS TO MAKE UP THEIRS IN ACCORDANCE WITH HERS +4970-29095-0008-2062: MOTHER (I'M->I AM) GOING TO STUDY MEDICINE +4970-29095-0009-2063: MARGARET BOLTON ALMOST LOST FOR A MOMENT HER HABITUAL PLACIDITY +4970-29095-0010-2064: (THEE->THE) STUDY MEDICINE +4970-29095-0011-2065: DOES THEE THINK THEE COULD STAND IT SIX MONTHS +4970-29095-0012-2066: AND BESIDES SUPPOSE THEE DOES LEARN MEDICINE +4970-29095-0013-2067: I WILL (PRACTICE->PRACTISE) IT +4970-29095-0014-2068: (WHERE->WHERE'S) THEE AND THY FAMILY ARE KNOWN +4970-29095-0015-2069: IF I CAN GET (PATIENTS->PATIENCE) +4970-29095-0016-2070: RUTH SAT QUITE STILL FOR A TIME WITH FACE (INTENT->AND TENT) AND FLUSHED IT WAS OUT NOW +4970-29095-0017-2071: THE (SIGHT SEERS->SIGHTSEERS) RETURNED (IN->AND) HIGH SPIRITS FROM THE CITY +4970-29095-0018-2072: RUTH ASKED THE ENTHUSIASTS IF THEY WOULD LIKE TO LIVE IN SUCH A SOUNDING (MAUSOLEUM->MUSOLEUM) WITH ITS GREAT HALLS AND ECHOING ROOMS AND NO COMFORTABLE PLACE IN IT FOR THE ACCOMMODATION OF ANY BODY +4970-29095-0019-2073: AND THEN THERE WAS BROAD STREET +4970-29095-0020-2074: THERE (*->IS) CERTAINLY WAS NO END TO IT AND EVEN RUTH WAS (PHILADELPHIAN->PHILADELPHIA) ENOUGH TO BELIEVE THAT A STREET OUGHT NOT TO HAVE ANY END OR ARCHITECTURAL (POINT->BLINT) UPON WHICH THE WEARY EYE COULD REST +4970-29095-0021-2075: BUT NEITHER SAINT (GIRARD->GERARD) NOR BROAD STREET NEITHER WONDERS OF THE (MINT->MENT) NOR THE GLORIES OF THE HALL WHERE THE GHOSTS OF OUR FATHERS SIT ALWAYS SIGNING THE DECLARATION (IMPRESSED->IMPRESS) THE (VISITORS->VISITOR) SO MUCH AS THE SPLENDORS OF THE CHESTNUT STREET WINDOWS AND THE BARGAINS ON EIGHTH STREET +4970-29095-0022-2076: IS THEE GOING TO THE YEARLY MEETING RUTH ASKED ONE OF THE GIRLS +4970-29095-0023-2077: I HAVE NOTHING TO WEAR REPLIED (THAT->THE) DEMURE PERSON +4970-29095-0024-2078: IT HAS OCCUPIED MOTHER A LONG TIME TO FIND (AT->*) THE SHOPS THE EXACT SHADE FOR HER NEW BONNET +4970-29095-0025-2079: AND THEE WON'T GO WHY SHOULD I +4970-29095-0026-2080: IF I GO TO MEETING AT ALL I LIKE BEST TO SIT IN THE QUIET OLD HOUSE IN GERMANTOWN WHERE THE WINDOWS ARE ALL OPEN AND I CAN SEE THE TREES AND (HEAR->HERE) THE STIR OF THE LEAVES +4970-29095-0027-2081: IT'S SUCH A CRUSH AT THE YEARLY MEETING AT ARCH STREET AND THEN THERE'S THE ROW OF SLEEK LOOKING YOUNG MEN WHO (LINE->LIE IN) THE CURBSTONE AND STARE AT US AS WE COME OUT +4970-29095-0028-2082: HE DOESN'T SAY BUT IT'S ON THE FRONTIER AND ON THE MAP EVERYTHING BEYOND IT IS MARKED INDIANS AND DESERT AND LOOKS AS DESOLATE AS A (WEDNESDAY->WIND ZAY) MEETING (HUMPH->*) IT WAS TIME FOR HIM TO DO SOMETHING +4970-29095-0029-2083: IS HE GOING TO START A DAILY NEWSPAPER AMONG THE (KICK A POOS->KICKAPOOS) +4970-29095-0030-2084: FATHER (THEE'S UNJUST TO->THESE UNJUSTI) PHILIP HE'S GOING INTO BUSINESS +4970-29095-0031-2085: HE DOESN'T SAY EXACTLY WHAT IT IS SAID RUTH A LITTLE DUBIOUSLY BUT IT'S SOMETHING ABOUT LAND AND RAILROADS AND (THEE->HE) KNOWS FATHER THAT FORTUNES ARE MADE NOBODY KNOWS EXACTLY HOW IN A NEW COUNTRY +4970-29095-0032-2086: (BUT->THAT) PHILIP IS HONEST AND HE HAS TALENT ENOUGH IF HE WILL STOP SCRIBBLING TO MAKE HIS WAY +4970-29095-0033-2087: WHAT A (BOX WOMEN->BOXWOMEN) ARE PUT INTO MEASURED FOR IT AND (PUT IN->PUTTING) YOUNG IF WE GO ANYWHERE IT'S IN A BOX VEILED AND PINIONED AND SHUT IN BY DISABILITIES +4970-29095-0034-2088: WHY SHOULD I (RUST->REST) AND BE STUPID AND SIT IN (INACTION->AN ACTION) BECAUSE I AM A GIRL +4970-29095-0035-2089: AND IF I HAD A FORTUNE WOULD THEE WANT ME TO LEAD A USELESS LIFE +4970-29095-0036-2090: HAS (THEE->THE) CONSULTED THY MOTHER ABOUT A CAREER I SUPPOSE IT IS A CAREER (*->OF) THEE WANTS +4970-29095-0037-2091: BUT THAT WISE AND PLACID WOMAN UNDERSTOOD THE SWEET REBEL A GREAT DEAL BETTER THAN RUTH UNDERSTOOD HERSELF +4970-29095-0038-2092: RUTH WAS GLAD TO HEAR THAT PHILIP HAD MADE A PUSH INTO THE WORLD AND SHE WAS SURE THAT HIS TALENT AND COURAGE WOULD MAKE (A WAY->AWAY) FOR HIM +4992-23283-0000-2140: BUT THE MORE FORGETFULNESS HAD THEN PREVAILED THE MORE POWERFUL WAS THE FORCE OF REMEMBRANCE WHEN SHE AWOKE +4992-23283-0001-2141: MISS MILNER'S HEALTH IS NOT GOOD +4992-23283-0002-2142: SAID MISSUS (HORTON->WHARTON) A FEW MINUTES AFTER +4992-23283-0003-2143: SO THERE IS TO ME ADDED SANDFORD WITH A SARCASTIC SNEER +4992-23283-0004-2144: AND YET YOU MUST OWN HER (BEHAVIOUR->BEHAVIOR) HAS WARRANTED THEM HAS IT NOT BEEN IN THIS PARTICULAR INCOHERENT AND UNACCOUNTABLE +4992-23283-0005-2145: NOT THAT I KNOW OF NOT ONE MORE THAT I KNOW OF HE REPLIED WITH ASTONISHMENT AT WHAT SHE HAD INSINUATED AND YET WITH A PERFECT ASSURANCE THAT SHE WAS IN THE WRONG +4992-23283-0006-2146: PERHAPS I AM MISTAKEN ANSWERED SHE +4992-23283-0007-2147: TO ASK ANY MORE QUESTIONS OF YOU I BELIEVE WOULD BE UNFAIR +4992-23283-0008-2148: HE SEEMED TO WAIT FOR HER REPLY BUT AS SHE MADE NONE HE PROCEEDED +4992-23283-0009-2149: (OH->O) MY LORD CRIED MISS WOODLEY WITH A MOST FORCIBLE ACCENT YOU ARE THE LAST (PERSON->PERSONAL) ON EARTH SHE WOULD PARDON ME FOR (ENTRUSTING->INTRUSTING) +4992-23283-0010-2150: BUT IN SUCH A CASE MISS MILNER'S ELECTION OF A HUSBAND SHALL NOT DIRECT MINE +4992-23283-0011-2151: IF SHE DOES NOT KNOW HOW TO ESTIMATE HER OWN VALUE I DO +4992-23283-0012-2152: INDEPENDENT OF HER FORTUNE SHE HAS BEAUTY TO CAPTIVATE THE HEART OF ANY MAN AND WITH ALL HER FOLLIES SHE HAS A FRANKNESS IN HER MANNER AN UNAFFECTED WISDOM IN HER THOUGHTS (A->OF) VIVACITY IN HER CONVERSATION AND WITHAL A SOFTNESS IN HER DEMEANOUR THAT MIGHT ALONE ENGAGE THE AFFECTIONS OF A MAN OF THE NICEST SENTIMENTS AND THE STRONGEST UNDERSTANDING +4992-23283-0013-2153: MY LORD MISS MILNER'S TASTE IS NOT A DEPRAVED ONE IT IS BUT TOO REFINED +4992-23283-0014-2154: WHAT CAN YOU MEAN BY THAT MISS WOODLEY YOU TALK MYSTERIOUSLY +4992-23283-0015-2155: IS SHE NOT AFRAID THAT I WILL THWART HER INCLINATIONS +4992-23283-0016-2156: AGAIN HE SEARCHED HIS OWN THOUGHTS NOR INEFFECTUALLY AS BEFORE +4992-23283-0017-2157: MISS WOODLEY WAS TOO LITTLE VERSED IN THE SUBJECT TO KNOW THIS WOULD HAVE BEEN NOT TO LOVE AT ALL AT LEAST NOT TO THE EXTENT OF BREAKING THROUGH ENGAGEMENTS AND ALL THE VARIOUS OBSTACLES THAT STILL (MILITATED->MITIGATED) AGAINST THEIR UNION +4992-23283-0018-2158: TO RELIEVE HER FROM BOTH HE LAID HIS HAND WITH FORCE UPON HIS HEART AND SAID DO YOU BELIEVE ME +4992-23283-0019-2159: I WILL MAKE NO UNJUST USE OF WHAT I KNOW HE REPLIED WITH FIRMNESS I BELIEVE YOU MY LORD +4992-23283-0020-2160: I HAVE NEVER YET HOWEVER BEEN VANQUISHED BY THEM AND EVEN UPON THIS OCCASION MY REASON SHALL COMBAT THEM TO THE LAST AND MY REASON SHALL FAIL ME BEFORE I DO WRONG +4992-41797-0000-2117: YES DEAD THESE FOUR YEARS (AN->AND) A GOOD JOB FOR HER TOO +4992-41797-0001-2118: WELL AS I SAY IT'S AN AWFUL QUEER WORLD THEY CLAP ALL THE BURGLARS (INTO->AND) JAIL (AND->*) THE MURDERERS (AND->IN) THE (WIFE->WHITE) BEATERS (I'VE->I) ALLERS THOUGHT A GENTLE REPROOF WOULD BE ENOUGH PUNISHMENT FOR A WIFE (BEATER->PETER) CAUSE HE PROBABLY HAS A LOT (O->OF) PROVOCATION THAT NOBODY KNOWS AND THE (FIREBUGS->FIRE BUGS) CAN'T THINK (O->OF) THE RIGHT NAME SOMETHING LIKE (CENDENARIES AN->SENDIARIES AND) THE BREAKERS (O->OF) THE (PEACE AN->PIECE AND) WHAT NOT (AN->AND) YET THE LAW HAS (NOTHIN->NOTHING) TO SAY TO A MAN LIKE (HEN LORD->HANDLED) +4992-41797-0002-2119: GRANDFATHER WAS ALEXANDER CAREY L (L->*) D DOCTOR OF LAWS THAT IS +4992-41797-0003-2120: MISTER POPHAM LAID DOWN HIS BRUSH +4992-41797-0004-2121: I (SWAN TO MAN->SWAY INTO MEN) HE EJACULATED IF YOU DON'T WORK HARD YOU CAN'T KEEP UP WITH THE (TIMES->TUBS) DOCTOR OF LAWS +4992-41797-0005-2122: DONE HE AIN'T DONE A THING (HE'D OUGHTER SENCE->HE ORDERS SINCE) HE WAS BORN +4992-41797-0006-2123: HE KEEPS THE THOU SHALT NOT (COMMANDMENTS->COMMANDS) FIRST RATE HEN LORD DOES +4992-41797-0007-2124: HE (GIVE->GAVE) UP HIS POSITION AND SHUT THE FAMILY UP IN THAT TOMB OF A HOUSE (SO T->SEWED) HE (COULD->COULDN'T) STUDY HIS BOOKS +4992-41797-0008-2125: MISTER POPHAM EXAGGERATED NOTHING BUT ON THE CONTRARY LEFT MUCH UNSAID IN HIS NARRATIVE OF THE FAMILY AT THE HOUSE OF LORDS +4992-41797-0009-2126: HENRY LORD WITH THE DEGREE OF (PH->P) D (*->D) TO HIS CREDIT HAD BEEN PROFESSOR OF ZOOLOGY AT A NEW ENGLAND COLLEGE BUT HAD RESIGNED HIS POST IN ORDER TO WRITE A SERIES OF SCIENTIFIC TEXT BOOKS +4992-41797-0010-2127: ALWAYS IRRITABLE COLD INDIFFERENT HE HAD GROWN RAPIDLY MORE SO AS YEARS WENT ON +4992-41797-0011-2128: WHATEVER (APPEALED->APPEAL) TO HER SENSE OF BEAUTY WAS STRAIGHTWAY TRANSFERRED TO PAPER OR CANVAS +4992-41797-0012-2129: SHE IS WILD TO KNOW HOW TO DO (THINGS->THING) +4992-41797-0013-2130: SHE MAKES EFFORT AFTER EFFORT TREMBLING WITH EAGERNESS AND WHEN SHE FAILS TO REPRODUCE WHAT SHE SEES SHE WORKS HERSELF INTO A FRENZY OF GRIEF AND DISAPPOINTMENT +4992-41797-0014-2131: WHEN SHE COULD NOT MAKE A RABBIT OR A BIRD LOOK REAL ON PAPER SHE SEARCHED IN HER FATHER'S BOOKS FOR PICTURES OF ITS BONES +4992-41797-0015-2132: CYRIL THERE MUST BE SOME BETTER WAY OF DOING I JUST DRAW THE OUTLINE OF AN ANIMAL AND THEN I PUT HAIRS OR FEATHERS ON IT THEY HAVE NO BODIES +4992-41797-0016-2133: THEY COULDN'T RUN (NOR->OR) MOVE THEY'RE JUST PASTEBOARD +4992-41797-0017-2134: HE WOULDN'T SEARCH SO DON'T WORRY REPLIED CYRIL QUIETLY AND THE TWO LOOKED AT EACH OTHER AND KNEW THAT IT WAS SO +4992-41797-0018-2135: THERE IN THE CEDAR HOLLOW THEN LIVED OLIVE LORD AN ANGRY RESENTFUL LITTLE CREATURE WEIGHED DOWN BY A FIERCE SENSE OF INJURY +4992-41797-0019-2136: (OLIVE'S->ALL OF HIS) MOURNFUL BLACK EYES MET NANCY'S SPARKLING BROWN ONES +4992-41797-0020-2137: NANCY'S CURLY CHESTNUT CROP SHONE IN THE SUN AND OLIVE'S THICK BLACK (PLAITS->PLATES) LOOKED BLACKER BY CONTRAST +4992-41797-0021-2138: (SHE'S->SHE IS) WONDERFUL MORE WONDERFUL (THAN->IN) ANYBODY WE'VE EVER SEEN ANYWHERE AND SHE DRAWS BETTER THAN THE TEACHER IN CHARLESTOWN +4992-41797-0022-2139: SHE'S OLDER THAN I AM BUT SO TINY AND SAD AND SHY THAT SHE SEEMS LIKE A CHILD +4992-41806-0000-2161: NATTY HARMON TRIED THE KITCHEN PUMP SECRETLY SEVERAL TIMES DURING THE EVENING FOR THE WATER HAD TO RUN UP HILL ALL THE WAY FROM THE WELL TO THE KITCHEN SINK AND HE BELIEVED THIS TO BE A CONTINUAL MIRACLE THAT MIGHT GIVE OUT AT ANY MOMENT +4992-41806-0001-2162: TO NIGHT THERE WAS NO NEED OF EXTRA HEAT AND THERE WERE GREAT CEREMONIES TO BE OBSERVED IN LIGHTING THE FIRES ON THE HEARTHSTONES +4992-41806-0002-2163: THEY BEGAN WITH THE ONE IN THE FAMILY SITTING ROOM COLONEL WHEELER RALPH THURSTON MISTER AND MISSUS BILL HARMON WITH (NATTY->NANNIE) AND (RUFUS->RUFFUS) MISTER AND MISSUS POPHAM WITH DIGBY AND (LALLIE->LILY) JOY ALL STANDING IN ADMIRING GROUPS AND THRILLING WITH DELIGHT AT THE ORDER OF EVENTS +4992-41806-0003-2164: KATHLEEN WAVED THE TORCH TO AND FRO AS SHE RECITED SOME BEAUTIFUL LINES WRITTEN FOR SOME SUCH PURPOSE AS THAT WHICH CALLED THEM TOGETHER TO NIGHT +4992-41806-0004-2165: (BURN->BURNE) FIRE BURN FLICKER FLICKER FLAME +4992-41806-0005-2166: NEXT CAME OLIVE'S TURN TO HELP IN THE CEREMONIES +4992-41806-0006-2167: RALPH THURSTON HAD FOUND A LINE OF LATIN FOR THEM IN HIS BELOVED (HORACE TIBI SPLENDET->HORNS TIBBY SPLENDID) FOCUS FOR YOU THE HEARTH FIRE SHINES +4992-41806-0007-2168: OLIVE HAD PAINTED THE MOTTO ON A LONG NARROW PANEL OF CANVAS AND GIVING IT TO MISTER POPHAM STOOD BY THE FIRESIDE WHILE HE DEFTLY FITTED IT INTO THE PLACE PREPARED FOR IT +4992-41806-0008-2169: (OLIVE->AH) HAS ANOTHER LOVELY GIFT FOR THE YELLOW HOUSE SAID MOTHER CAREY RISING AND TO CARRY OUT THE NEXT PART OF THE PROGRAMME WE SHALL HAVE TO GO IN PROCESSION UPSTAIRS TO MY BEDROOM +4992-41806-0009-2170: EXCLAIMED BILL HARMON TO HIS WIFE AS THEY WENT THROUGH THE LIGHTED HALL +4992-41806-0010-2171: AIN'T THEY THE GREATEST +4992-41806-0011-2172: MOTHER CAREY POURED COFFEE NANCY CHOCOLATE AND THE (OTHERS HELPED SERVE->OTHER SELF SERVED) THE SANDWICHES AND CAKE DOUGHNUTS AND TARTS +4992-41806-0012-2173: AT THAT MOMENT THE GENTLEMAN ENTERED BEARING A HUGE OBJECT CONCEALED BY A PIECE OF GREEN FELT +4992-41806-0013-2174: APPROACHING THE DINING TABLE HE CAREFULLY PLACED THE ARTICLE IN THE CENTRE AND REMOVED THE CLOTH +4992-41806-0014-2175: THINKS (I TO->OUT OF) MYSELF I NEVER SEEN ANYTHING (OSH POPHAM->I) COULDN'T (MEND->MEN) IF HE TOOK TIME ENOUGH AND GLUE ENOUGH SO I CARRIED THIS LITTLE FELLER HOME IN A BUSHEL BASKET ONE NIGHT LAST MONTH (AN->AND) I'VE SPENT ELEVEN (EVENIN'S PUTTIN->EVENINGS PUTTING) HIM TOGETHER +4992-41806-0015-2176: MISSUS HARMON THOUGHT HE SANG TOO MUCH AND TOLD HER (HUSBAND PRIVATELY->HUSBABLY) THAT IF HE WAS A CANARY BIRD SHE SHOULD WANT TO KEEP A TABLE COVER (OVER->OF) HIS HEAD MOST OF THE TIME BUT HE WAS IMMENSELY POPULAR WITH THE REST OF HIS AUDIENCE +4992-41806-0016-2177: THE FACE OF THE MAHOGANY SHONE WITH DELIGHT AND WHY NOT WHEN IT WAS DOING EVERYTHING ALMOST EVERYTHING WITHIN THE SCOPE OF A PIANO AND YET THE FAMILY HAD ENJOYED WEEKS OF GOOD NOURISHING MEALS ON WHAT HAD BEEN SAVED BY ITS EXERTIONS +4992-41806-0017-2178: WE SHUT OUR EYES THE FLOWERS BLOOM ON WE MURMUR BUT THE (CORN EARS->CORNIERS) FILL WE CHOOSE THE SHADOW BUT THE SUN THAT CASTS IT SHINES BEHIND US STILL +5105-28233-0000-1649: LENGTH OF SERVICE FOURTEEN YEARS THREE MONTHS AND FIVE DAYS +5105-28233-0001-1650: HE SEEMED BORN TO PLEASE WITHOUT BEING CONSCIOUS OF THE POWER HE POSSESSED +5105-28233-0002-1651: IT MUST BE OWNED AND NO ONE WAS MORE READY TO CONFESS IT THAN HIMSELF THAT HIS LITERARY ATTAINMENTS WERE BY NO MEANS OF A HIGH ORDER +5105-28233-0003-1652: WE DON'T (SPIN->SPEND) TOPS (IS->AS) A FAVORITE SAYING AMONGST ARTILLERY OFFICERS INDICATING THAT THEY DO NOT SHIRK THEIR DUTY BY FRIVOLOUS PURSUITS BUT IT MUST BE CONFESSED THAT SERVADAC BEING NATURALLY IDLE WAS VERY MUCH GIVEN TO SPINNING TOPS +5105-28233-0004-1653: ONCE (IN->AN) ACTION HE WAS LEADING A DETACHMENT OF INFANTRY THROUGH AN (INTRENCHMENT->ENTRENCHMENT) +5105-28233-0005-1654: SOMETIMES HE WOULD WANDER ON FOOT UPON THE SANDY SHORE AND SOMETIMES HE WOULD ENJOY A RIDE ALONG THE SUMMIT OF THE CLIFF ALTOGETHER BEING IN NO HURRY AT ALL TO BRING HIS TASK TO AN END +5105-28233-0006-1655: NO CATHEDRAL NOT EVEN BURGOS ITSELF COULD VIE WITH THE CHURCH AT (MONTMARTRE->MOUNT MARSHRA) +5105-28233-0007-1656: BEN ZOOF'S MOST AMBITIOUS DESIRE WAS TO INDUCE THE CAPTAIN TO GO WITH HIM AND END HIS DAYS IN HIS MUCH LOVED HOME AND SO INCESSANTLY WERE SERVADAC'S EARS BESIEGED WITH DESCRIPTIONS OF THE UNPARALLELED BEAUTIES AND ADVANTAGES OF THIS EIGHTEENTH (ARRONDISSEMENT->ARE UNDISSIMA) OF PARIS THAT HE COULD SCARCELY HEAR THE NAME OF (MONTMARTRE->MONTMARTRA) WITHOUT A CONSCIOUS THRILL OF AVERSION +5105-28233-0008-1657: WHEN A PRIVATE IN THE EIGHTH CAVALRY HE HAD BEEN ON THE POINT OF QUITTING THE ARMY AT TWENTY EIGHT YEARS OF AGE BUT UNEXPECTEDLY HE HAD BEEN APPOINTED ORDERLY TO CAPTAIN SERVADAC +5105-28233-0009-1658: THE BOND OF UNION THUS EFFECTED COULD NEVER BE SEVERED AND ALTHOUGH BEN (ZOOF'S->ZEF'S) ACHIEVEMENTS HAD FAIRLY EARNED HIM THE RIGHT OF RETIREMENT HE FIRMLY DECLINED ALL (HONORS->HONOURS) OR ANY PENSION THAT MIGHT PART HIM FROM HIS SUPERIOR OFFICER +5105-28233-0010-1659: (UNLIKE->I MAKE) HIS MASTER HE MADE NO PRETENSION TO ANY GIFT OF POETIC POWER BUT HIS INEXHAUSTIBLE MEMORY MADE HIM A LIVING ENCYCLOPAEDIA AND FOR HIS STOCK OF ANECDOTES AND TROOPER'S TALES HE WAS MATCHLESS +5105-28240-0000-1624: FAST AS HIS LEGS COULD CARRY HIM SERVADAC HAD MADE HIS WAY TO THE TOP OF THE CLIFF +5105-28240-0001-1625: IT WAS QUITE TRUE THAT A VESSEL WAS IN SIGHT HARDLY MORE THAN SIX MILES FROM THE SHORE BUT OWING TO THE INCREASE IN THE EARTH'S CONVEXITY AND THE CONSEQUENT LIMITATION OF THE RANGE OF VISION THE RIGGING OF THE TOPMASTS ALONE WAS VISIBLE ABOVE THE WATER +5105-28240-0002-1626: EXCLAIMED SERVADAC KEEPING HIS EYE UNMOVED AT HIS TELESCOPE +5105-28240-0003-1627: SHE IS UNDER (SAIL->SALE) BUT SHE IS COUNT TIMASCHEFF'S YACHT HE WAS RIGHT +5105-28240-0004-1628: IF THE COUNT WERE ON BOARD A STRANGE FATALITY WAS BRINGING HIM TO THE PRESENCE OF HIS RIVAL +5105-28240-0005-1629: HE RECKONED THEREFORE NOT ONLY UPON ASCERTAINING THE EXTENT OF THE LATE CATASTROPHE BUT UPON LEARNING ITS CAUSE +5105-28240-0006-1630: THE WIND BEING ADVERSE THE DOBRYNA DID NOT MAKE VERY RAPID PROGRESS BUT AS THE WEATHER IN SPITE OF A FEW CLOUDS REMAINED CALM AND THE SEA WAS QUITE SMOOTH SHE WAS ENABLED TO HOLD A STEADY COURSE +5105-28240-0007-1631: SERVADAC TOOK IT FOR GRANTED THAT THE DOBRYNA WAS ENDEAVORING TO PUT IN +5105-28240-0008-1632: A NARROW CHANNEL FORMED A PASSAGE THROUGH THE RIDGE OF ROCKS THAT PROTECTED IT FROM THE OPEN SEA AND WHICH EVEN IN THE ROUGHEST WEATHER WOULD (ENSURE->INSURE) THE CALMNESS OF ITS WATERS +5105-28240-0009-1633: SLIGHTLY CHANGING HER COURSE SHE FIRST STRUCK HER MAINSAIL AND IN ORDER TO FACILITATE THE MOVEMENTS OF HER HELMSMAN SOON CARRIED NOTHING BUT HER TWO TOPSAILS BRIGANTINE AND JIB +5105-28240-0010-1634: CAPTAIN SERVADAC HASTENED (TOWARDS->TOWARD) HIM +5105-28240-0011-1635: I LEFT YOU ON A CONTINENT AND HERE I HAVE THE HONOR OF FINDING YOU ON AN ISLAND +5105-28240-0012-1636: NEVER MIND NOW INTERPOSED THE CAPTAIN WE WILL TALK OF THAT BY AND BY +5105-28240-0013-1637: NOTHING MORE THAN YOU KNOW YOURSELF +5105-28240-0014-1638: ARE YOU CERTAIN THAT THIS IS THE MEDITERRANEAN +5105-28240-0015-1639: FOR SOME MOMENTS HE SEEMED PERFECTLY STUPEFIED (*->AND) THEN RECOVERING HIMSELF HE BEGAN TO OVERWHELM THE COUNT WITH A TORRENT OF QUESTIONS +5105-28240-0016-1640: TO ALL THESE INQUIRIES THE COUNT RESPONDED IN THE AFFIRMATIVE +5105-28240-0017-1641: SOME MYSTERIOUS FORCE SEEMED TO HAVE BROUGHT ABOUT A CONVULSION OF THE ELEMENTS +5105-28240-0018-1642: YOU WILL TAKE ME ON BOARD COUNT WILL YOU NOT +5105-28240-0019-1643: MY YACHT IS AT YOUR SERVICE SIR EVEN SHOULD YOU REQUIRE TO MAKE A TOUR (ROUND->AROUND) THE WORLD +5105-28240-0020-1644: THE COUNT SHOOK HIS HEAD +5105-28240-0021-1645: BEFORE STARTING IT WAS INDISPENSABLE THAT THE ENGINE OF THE DOBRYNA SHOULD BE REPAIRED TO SAIL UNDER CANVAS ONLY WOULD IN CONTRARY WINDS AND ROUGH SEAS BE BOTH TEDIOUS AND DIFFICULT +5105-28240-0022-1646: IT WAS ON THE LAST DAY OF JANUARY THAT THE REPAIRS OF THE SCHOONER WERE COMPLETED +5105-28240-0023-1647: A SLIGHT DIMINUTION IN THE EXCESSIVELY HIGH TEMPERATURE WHICH HAD PREVAILED FOR THE LAST FEW WEEKS WAS THE ONLY APPARENT CHANGE IN THE GENERAL ORDER OF THINGS BUT WHETHER THIS WAS TO BE ATTRIBUTED TO ANY ALTERATION IN THE EARTH'S ORBIT WAS A QUESTION WHICH WOULD STILL REQUIRE SEVERAL DAYS TO DECIDE +5105-28240-0024-1648: DOUBTS NOW AROSE AND SOME DISCUSSION FOLLOWED WHETHER OR NOT IT WAS DESIRABLE FOR BEN ZOOF TO ACCOMPANY HIS MASTER +5105-28241-0000-1604: HER SEA GOING QUALITIES WERE EXCELLENT AND WOULD HAVE AMPLY SUFFICED FOR A CIRCUMNAVIGATION OF THE GLOBE +5105-28241-0001-1605: AFTER AN APPRENTICESHIP ON A MERCHANT SHIP HE HAD ENTERED THE IMPERIAL NAVY AND HAD ALREADY REACHED THE RANK OF LIEUTENANT WHEN THE COUNT APPOINTED HIM TO THE CHARGE OF HIS OWN PRIVATE YACHT IN WHICH HE WAS ACCUSTOMED TO SPEND BY (FAR THE->FARTHER) GREATER PART OF HIS TIME THROUGHOUT THE WINTER GENERALLY CRUISING IN THE MEDITERRANEAN WHILST IN THE SUMMER HE VISITED MORE NORTHERN WATERS +5105-28241-0002-1606: THE LATE ASTOUNDING EVENTS HOWEVER HAD RENDERED PROCOPE MANIFESTLY UNEASY AND NOT THE LESS SO FROM HIS CONSCIOUSNESS THAT THE COUNT SECRETLY PARTOOK OF HIS OWN ANXIETY +5105-28241-0003-1607: STEAM UP AND CANVAS SPREAD THE SCHOONER STARTED (EASTWARDS->EASTWARD) +5105-28241-0004-1608: ALTHOUGH ONLY A MODERATE BREEZE WAS BLOWING THE SEA WAS ROUGH A CIRCUMSTANCE TO BE ACCOUNTED FOR ONLY BY THE DIMINUTION IN THE FORCE OF THE EARTH'S ATTRACTION RENDERING THE LIQUID (PARTICLES->PARTICLE) SO BUOYANT THAT BY THE MERE EFFECT OF OSCILLATION THEY WERE CARRIED TO A HEIGHT THAT WAS QUITE UNPRECEDENTED +5105-28241-0005-1609: FOR A FEW MILES SHE FOLLOWED THE LINE HITHERTO PRESUMABLY OCCUPIED BY THE COAST OF ALGERIA BUT NO LAND APPEARED TO THE SOUTH +5105-28241-0006-1610: THE LOG AND THE COMPASS THEREFORE WERE ABLE TO BE CALLED UPON TO DO THE WORK OF THE SEXTANT WHICH HAD BECOME UTTERLY USELESS +5105-28241-0007-1611: (THERE IS->THERE'S) NO FEAR OF THAT SIR +5105-28241-0008-1612: (*->THAT) THE EARTH HAS UNDOUBTEDLY ENTERED UPON A NEW ORBIT BUT SHE IS NOT INCURRING ANY PROBABLE RISK OF BEING PRECIPITATED (ONTO->ON TO) THE SUN +5105-28241-0009-1613: AND WHAT DEMONSTRATION DO YOU OFFER ASKED SERVADAC EAGERLY THAT IT WILL NOT HAPPEN +5105-28241-0010-1614: OCEAN (REIGNED->RAINED) SUPREME +5105-28241-0011-1615: ALL THE IMAGES OF HIS PAST LIFE FLOATED UPON HIS MEMORY HIS THOUGHTS SPED AWAY TO HIS NATIVE FRANCE ONLY TO RETURN AGAIN TO WONDER WHETHER THE DEPTHS OF OCEAN WOULD REVEAL ANY TRACES OF THE ALGERIAN METROPOLIS +5105-28241-0012-1616: IS IT NOT IMPOSSIBLE HE MURMURED ALOUD THAT ANY CITY SHOULD DISAPPEAR SO COMPLETELY +5105-28241-0013-1617: WOULD NOT THE LOFTIEST EMINENCES OF THE CITY AT LEAST BE VISIBLE +5105-28241-0014-1618: ANOTHER CIRCUMSTANCE WAS MOST REMARKABLE +5105-28241-0015-1619: TO THE SURPRISE OF ALL AND ESPECIALLY OF LIEUTENANT PROCOPE THE LINE INDICATED A BOTTOM AT A NEARLY UNIFORM DEPTH OF FROM FOUR TO FIVE FATHOMS AND ALTHOUGH THE SOUNDING WAS PERSEVERED WITH CONTINUOUSLY FOR MORE THAN TWO HOURS OVER A CONSIDERABLE AREA THE DIFFERENCES OF LEVEL WERE INSIGNIFICANT NOT CORRESPONDING IN ANY DEGREE TO WHAT WOULD BE EXPECTED OVER THE SITE OF A CITY THAT HAD BEEN TERRACED LIKE THE SEATS OF AN (AMPHITHEATER->AMPHITHEATRE) +5105-28241-0016-1620: YOU MUST SEE LIEUTENANT I SHOULD THINK THAT WE ARE NOT SO NEAR THE COAST OF ALGERIA AS YOU IMAGINED +5105-28241-0017-1621: AFTER PONDERING (AWHILE->A WHILE) HE SAID IF WE WERE FARTHER AWAY I SHOULD EXPECT TO FIND A DEPTH OF TWO OR THREE HUNDRED FATHOMS INSTEAD OF FIVE FATHOMS FIVE FATHOMS +5105-28241-0018-1622: ITS DEPTH REMAINED INVARIABLE STILL FOUR OR AT MOST FIVE FATHOMS AND ALTHOUGH ITS BOTTOM WAS ASSIDUOUSLY DREDGED IT WAS ONLY TO PROVE IT BARREN OF MARINE PRODUCTION OF ANY TYPE +5105-28241-0019-1623: NOTHING WAS TO BE DONE BUT TO PUT ABOUT AND RETURN (IN->AND) DISAPPOINTMENT (TOWARDS->TOWARD) THE NORTH +5142-33396-0000-898: AT ANOTHER TIME (HARALD->HAROLD) ASKED +5142-33396-0001-899: WHAT IS YOUR COUNTRY OLAF HAVE YOU ALWAYS BEEN A THRALL THE THRALL'S EYES FLASHED +5142-33396-0002-900: TWO HUNDRED WARRIORS FEASTED IN HIS HALL AND FOLLOWED HIM TO BATTLE +5142-33396-0003-901: THE REST OF YOU OFF A VIKING HE HAD THREE SHIPS +5142-33396-0004-902: THESE HE GAVE TO THREE OF MY BROTHERS +5142-33396-0005-903: BUT I STAYED THAT SPRING AND BUILT ME A BOAT +5142-33396-0006-904: I MADE HER (FOR->FALL) ONLY TWENTY OARS BECAUSE I THOUGHT FEW MEN WOULD FOLLOW ME FOR I WAS YOUNG FIFTEEN YEARS OLD +5142-33396-0007-905: AT THE PROW I CARVED THE HEAD WITH OPEN MOUTH AND FORKED TONGUE THRUST OUT +5142-33396-0008-906: I PAINTED THE EYES RED FOR ANGER +5142-33396-0009-907: THERE STAND SO I SAID AND GLARE AND HISS AT MY FOES +5142-33396-0010-908: IN THE STERN I (CURVED->CARVED) THE TAIL UP ALMOST AS HIGH AS THE HEAD +5142-33396-0011-909: THERE SHE SAT ON THE ROLLERS AS FAIR A SHIP AS I EVER SAW +5142-33396-0012-910: THEN I WILL GET ME A FARM AND (WILL WINTER->WE'LL WINNER) IN THAT LAND NOW WHO WILL FOLLOW ME +5142-33396-0013-911: HE IS BUT A BOY THE (MEN->MAN) SAID +5142-33396-0014-912: THIRTY MEN ONE AFTER ANOTHER RAISED THEIR HORNS AND SAID +5142-33396-0015-913: AS OUR BOAT FLASHED DOWN THE ROLLERS INTO THE WATER I MADE THIS SONG AND SANG IT +5142-33396-0016-914: SO WE (HARRIED->HURRIED) THE COAST OF NORWAY +5142-33396-0017-915: WE ATE (AT->IT) MANY MEN'S TABLES UNINVITED +5142-33396-0018-916: (MY->I) DRAGON'S BELLY IS NEVER FULL AND ON BOARD WENT THE (GOLD->GOL) +5142-33396-0019-917: OH IT IS BETTER TO LIVE ON THE SEA AND LET OTHER MEN RAISE YOUR CROPS AND COOK YOUR MEALS +5142-33396-0020-918: A HOUSE SMELLS OF SMOKE A (SHIP SMELLS->SHIP'S MILLS) OF FROLIC +5142-33396-0021-919: UP AND DOWN THE WATER WE WENT TO GET MUCH WEALTH AND MUCH FROLIC +5142-33396-0022-920: WHAT (OF->IS) THE FARM (OLAF->OLOFF) NOT YET I ANSWERED VIKING IS BETTER FOR SUMMER +5142-33396-0023-921: IT WAS SO DARK THAT I COULD SEE NOTHING BUT A FEW SPARKS ON THE HEARTH +5142-33396-0024-922: I STOOD WITH MY BACK TO THE WALL FOR I WANTED NO SWORD REACHING OUT OF THE DARK FOR ME +5142-33396-0025-923: COME COME I CALLED WHEN NO ONE OBEYED A FIRE +5142-33396-0026-924: MY MEN LAUGHED YES A STINGY (HOST->HOSE) +5142-33396-0027-925: HE ACTS AS THOUGH HE (HAD->IS) NOT EXPECTED US +5142-33396-0028-926: ON A BENCH IN A FAR CORNER WERE A DOZEN PEOPLE HUDDLED TOGETHER +5142-33396-0029-927: BRING IN THE TABLE WE ARE HUNGRY +5142-33396-0030-928: THE THRALLS WERE (BRINGING->RINGING) IN A GREAT POT OF MEAT +5142-33396-0031-929: THEY SET UP A CRANE OVER THE FIRE AND HUNG THE POT UPON IT AND WE SAT AND WATCHED IT BOIL WHILE WE JOKED AT LAST THE SUPPER BEGAN +5142-33396-0032-930: THE FARMER SAT GLOOMILY ON THE BENCH AND WOULD NOT EAT AND YOU CANNOT WONDER FOR HE SAW US PUTTING POTFULS OF HIS GOOD BEEF AND (BASKET LOADS->BASCULADES) OF BREAD (INTO->AND) OUR BIG MOUTHS +5142-33396-0033-931: YOU WOULD NOT EAT WITH US YOU CANNOT SAY NO TO HALF OF MY ALE I DRINK THIS TO YOUR HEALTH +5142-33396-0034-932: THEN I DRANK HALF OF THE HORNFUL AND (SENT->SET) THE REST ACROSS THE FIRE TO THE FARMER HE TOOK IT AND SMILED SAYING +5142-33396-0035-933: DID YOU EVER HAVE SUCH A LORDLY GUEST BEFORE I WENT ON +5142-33396-0036-934: SO I WILL GIVE OUT THIS LAW THAT MY MEN SHALL NEVER LEAVE YOU (ALONE->ALO) +5142-33396-0037-935: (HAKON->HAWKIN) THERE SHALL BE YOUR CONSTANT COMPANION FRIEND FARMER +5142-33396-0038-936: HE SHALL NOT LEAVE YOU DAY OR NIGHT WHETHER YOU ARE WORKING OR PLAYING OR SLEEPING +5142-33396-0039-937: I (NAMED->NAME) NINE OTHERS AND SAID +5142-33396-0040-938: AND THESE SHALL FOLLOW YOUR THRALLS IN THE SAME WAY +5142-33396-0041-939: SO I SET GUARDS OVER EVERY ONE IN THAT HOUSE +5142-33396-0042-940: SO NO TALES GOT OUT TO THE NEIGHBORS BESIDES IT WAS A LONELY PLACE AND BY GOOD LUCK NO ONE CAME THAT WAY +5142-33396-0043-941: THEIR EYES DANCED BIG (THORLEIF->TORE LEAFS) STOOD UP AND STRETCHED HIMSELF +5142-33396-0044-942: (I AM->I'M) STIFF WITH LONG SITTING HE SAID I ITCH FOR A FIGHT I TURNED TO THE FARMER +5142-33396-0045-943: THIS IS OUR LAST FEAST WITH YOU I SAID +5142-33396-0046-944: BY THE BEARD OF ODIN I CRIED YOU HAVE TAKEN OUR JOKE LIKE A MAN +5142-33396-0047-945: MY MEN POUNDED THE TABLE WITH THEIR FISTS +5142-33396-0048-946: BY THE HAMMER (OF->A) THOR SHOUTED GRIM (HERE->THERE) IS NO STINGY COWARD +5142-33396-0049-947: HERE FRIEND TAKE IT AND HE THRUST IT INTO THE FARMER'S HAND +5142-33396-0050-948: MAY YOU DRINK (HEART'S EASE->HEARTSEASE) FROM IT FOR MANY YEARS +5142-33396-0051-949: AND WITH IT I LEAVE YOU A NAME (SIF->SIFT) THE FRIENDLY I SHALL HOPE TO DRINK WITH YOU (SOMETIME->SOME TIME) IN VALHALLA +5142-33396-0052-950: HERE IS A RING FOR SIF THE FRIENDLY AND HERE IS A BRACELET (*->AND) A SWORD WOULD NOT BE ASHAMED TO HANG AT YOUR SIDE +5142-33396-0053-951: I TOOK FIVE GREAT BRACELETS OF GOLD FROM OUR TREASURE CHEST AND GAVE THEM TO (HIM->*) +5142-33396-0054-952: THAT IS THE BEST WAY TO DECIDE FOR THE SPEAR WILL ALWAYS POINT SOMEWHERE AND ONE THING IS AS GOOD AS ANOTHER +5142-33396-0055-953: THAT TIME IT POINTED US INTO YOUR FATHER'S SHIPS +5142-33396-0056-954: HERE THEY SAID IS A RASCAL WHO HAS BEEN HARRYING OUR COASTS +5142-33396-0057-955: WE SUNK HIS SHIP AND MEN BUT HIM WE BROUGHT TO YOU +5142-33396-0058-956: A ROBBER VIKING SAID THE KING AND (*->HE) SCOWLED AT ME +5142-33396-0059-957: YES AND WITH ALL YOUR FINGERS IT TOOK YOU A YEAR TO CATCH ME THE KING FROWNED MORE ANGRILY +5142-33396-0060-958: TAKE HIM OUT (THORKEL->TORQUAL) AND LET HIM TASTE YOUR SWORD +5142-33396-0061-959: YOUR MOTHER THE QUEEN WAS STANDING BY +5142-33396-0062-960: NOW SHE PUT HER HAND ON HIS ARM AND SMILED AND SAID +5142-33396-0063-961: AND WOULD HE NOT BE A GOOD GIFT FOR OUR BABY +5142-33396-0064-962: YOUR FATHER THOUGHT A MOMENT (THEN->AND) LOOKED AT YOUR MOTHER AND (SMILED->SMIL) +5142-33396-0065-963: SOFT HEART HE SAID GENTLY TO HER THEN TO (THORKEL->TORQUAL) WELL LET HIM GO (THORKEL->TORKLE) +5142-33396-0066-964: THEN HE TURNED TO ME AGAIN FROWNING +5142-33396-0067-965: BUT YOUNG SHARP TONGUE NOW THAT (WE HAVE->WE'VE) CAUGHT YOU (WE->*) WILL PUT YOU INTO A TRAP THAT YOU CANNOT GET OUT OF +5142-33396-0068-966: SO I LIVED AND NOW AM YOUR TOOTH THRALL WELL IT IS THE LUCK OF WAR +5142-36377-0000-870: IT WAS ONE OF THE MASTERLY AND CHARMING STORIES OF (DUMAS->DE MAU) THE ELDER +5142-36377-0001-871: IN FIVE MINUTES I WAS IN A NEW WORLD AND MY MELANCHOLY ROOM WAS FULL OF THE LIVELIEST FRENCH COMPANY +5142-36377-0002-872: THE SOUND OF AN IMPERATIVE AND UNCOMPROMISING BELL RECALLED ME IN DUE TIME TO THE REGIONS OF REALITY +5142-36377-0003-873: AMBROSE MET ME AT THE BOTTOM OF THE STAIRS AND SHOWED ME THE WAY TO THE SUPPER ROOM +5142-36377-0004-874: SHE SIGNED TO ME WITH A GHOSTLY SOLEMNITY TO TAKE THE VACANT PLACE ON THE LEFT OF HER FATHER +5142-36377-0005-875: THE DOOR OPENED AGAIN WHILE I WAS STILL STUDYING THE TWO BROTHERS WITHOUT I HONESTLY CONFESS BEING VERY FAVORABLY IMPRESSED BY EITHER OF THEM +5142-36377-0006-876: A NEW MEMBER OF THE FAMILY CIRCLE WHO INSTANTLY ATTRACTED MY ATTENTION ENTERED THE ROOM +5142-36377-0007-877: A LITTLE CRACKED THAT IN THE POPULAR PHRASE WAS MY IMPRESSION OF THE STRANGER WHO NOW MADE HIS APPEARANCE IN THE SUPPER ROOM +5142-36377-0008-878: MISTER (MEADOWCROFT->MEADOWCROF) THE ELDER HAVING NOT SPOKEN ONE WORD THUS FAR HIMSELF INTRODUCED THE (NEWCOMER->NEW COMER) TO ME WITH A SIDE GLANCE AT HIS SONS WHICH HAD SOMETHING LIKE DEFIANCE IN IT A GLANCE WHICH AS I WAS SORRY TO NOTICE WAS RETURNED WITH THE DEFIANCE ON THEIR SIDE BY THE TWO YOUNG MEN +5142-36377-0009-879: PHILIP (LEFRANK->LE FRANK) THIS IS MY OVERLOOKER MISTER (JAGO->YAGO) SAID THE OLD MAN FORMALLY PRESENTING US +5142-36377-0010-880: HE IS NOT WELL HE HAS COME OVER THE OCEAN FOR REST AND (CHANGE OF->CHANGES) SCENE +5142-36377-0011-881: (MISTER JAGO->THIS GEOGO) IS AN AMERICAN PHILIP +5142-36377-0012-882: MAKE ACQUAINTANCE WITH (MISTER JAGO->MISS GIAGO) SIT TOGETHER +5142-36377-0013-883: THEY POINTEDLY DREW BACK FROM JOHN (JAGO->YAGO) AS HE APPROACHED THE EMPTY CHAIR NEXT TO ME AND MOVED ROUND TO THE OPPOSITE SIDE OF THE TABLE +5142-36377-0014-884: A PRETTY GIRL AND SO FAR AS I COULD JUDGE BY APPEARANCES A GOOD GIRL TOO DESCRIBING HER GENERALLY I MAY SAY THAT SHE HAD A SMALL HEAD WELL CARRIED AND WELL SET ON HER SHOULDERS BRIGHT GRAY EYES THAT LOOKED AT YOU HONESTLY AND MEANT WHAT THEY LOOKED A TRIM SLIGHT LITTLE FIGURE TOO SLIGHT FOR OUR ENGLISH NOTIONS OF BEAUTY A STRONG AMERICAN ACCENT AND A RARE THING IN AMERICA A PLEASANTLY TONED VOICE WHICH MADE THE ACCENT AGREEABLE TO ENGLISH EARS +5142-36377-0015-885: OUR FIRST IMPRESSIONS OF PEOPLE ARE IN NINE CASES OUT OF TEN THE RIGHT IMPRESSIONS +5142-36377-0016-886: FOR ONCE IN A WAY I PROVED A TRUE PROPHET +5142-36377-0017-887: THE ONLY CHEERFUL CONVERSATION WAS THE CONVERSATION ACROSS THE TABLE BETWEEN NAOMI AND ME +5142-36377-0018-888: HE LOOKED UP (AT NAOMI->AND NOW ON ME) DOUBTINGLY FROM HIS PLATE AND LOOKED DOWN AGAIN SLOWLY WITH A FROWN +5142-36377-0019-889: WHEN I ADDRESSED HIM HE ANSWERED CONSTRAINEDLY +5142-36377-0020-890: A MORE DREARY AND MORE DISUNITED FAMILY PARTY I NEVER SAT AT THE TABLE WITH +5142-36377-0021-891: ENVY HATRED MALICE AND UNCHARITABLENESS ARE NEVER SO ESSENTIALLY DETESTABLE TO MY MIND AS WHEN THEY ARE ANIMATED BY (A->THE) SENSE OF PROPRIETY AND WORK UNDER THE SURFACE BUT FOR MY INTEREST IN (NAOMI->NAY OWE ME) AND MY OTHER INTEREST IN THE LITTLE LOVE LOOKS WHICH I NOW AND THEN SURPRISED PASSING BETWEEN HER AND AMBROSE I SHOULD NEVER HAVE SAT THROUGH THAT SUPPER +5142-36377-0022-892: I WISH YOU GOOD NIGHT SHE LAID HER BONY HANDS ON THE BACK OF MISTER MEADOWCROFT'S INVALID CHAIR CUT HIM SHORT IN HIS FAREWELL SALUTATION TO ME AND WHEELED HIM OUT TO HIS BED AS IF SHE WERE WHEELING HIM OUT TO HIS GRAVE +5142-36377-0023-893: YOU WERE QUITE RIGHT TO SAY NO AMBROSE BEGAN NEVER SMOKE WITH (JOHN JAGO->JOHNNIAGO) HIS CIGARS WILL POISON YOU +5142-36377-0024-894: (NAOMI->THEY ONLY) SHOOK HER FOREFINGER REPROACHFULLY AT THEM AS IF THE TWO STURDY YOUNG FARMERS HAD BEEN TWO CHILDREN +5142-36377-0025-895: SILAS SLUNK AWAY WITHOUT A WORD OF PROTEST AMBROSE STOOD HIS GROUND EVIDENTLY BENT ON MAKING HIS PEACE (WITH->WHEN) NAOMI BEFORE HE LEFT HER SEEING THAT I WAS IN THE WAY I WALKED ASIDE TOWARD A GLASS DOOR AT THE LOWER END OF THE ROOM +5142-36586-0000-967: IT IS MANIFEST THAT MAN IS NOW SUBJECT TO MUCH VARIABILITY +5142-36586-0001-968: SO IT IS WITH THE LOWER ANIMALS +5142-36586-0002-969: THE (VARIABILITY->VERY ABILITY) OF MULTIPLE (PARTS->PART) +5142-36586-0003-970: BUT THIS SUBJECT WILL BE MORE PROPERLY DISCUSSED WHEN WE TREAT OF THE DIFFERENT RACES OF MANKIND +5142-36586-0004-971: EFFECTS OF THE INCREASED USE AND DISUSE OF PARTS +5142-36600-0000-896: CHAPTER SEVEN ON THE RACES OF MAN +5142-36600-0001-897: IN DETERMINING WHETHER TWO OR MORE ALLIED FORMS OUGHT TO BE RANKED (AS->TO) SPECIES OR VARIETIES NATURALISTS ARE PRACTICALLY GUIDED BY THE FOLLOWING CONSIDERATIONS NAMELY THE AMOUNT OF DIFFERENCE BETWEEN THEM AND WHETHER SUCH (DIFFERENCES->DIFFERENCE IS) RELATE TO FEW OR MANY POINTS OF STRUCTURE AND WHETHER THEY ARE OF PHYSIOLOGICAL IMPORTANCE BUT MORE ESPECIALLY WHETHER THEY ARE CONSTANT +5639-40744-0000-137: ELEVEN O'CLOCK HAD STRUCK IT WAS A FINE CLEAR NIGHT (THEY->THERE) WERE THE ONLY PERSONS ON THE ROAD AND THEY SAUNTERED LEISURELY ALONG TO AVOID PAYING THE PRICE OF FATIGUE FOR THE RECREATION PROVIDED FOR THE TOLEDANS IN (THEIR->THE) VALLEY OR ON THE BANKS OF THEIR RIVER +5639-40744-0001-138: SECURE AS HE THOUGHT IN THE CAREFUL ADMINISTRATION OF JUSTICE IN THAT CITY AND THE CHARACTER OF ITS WELL DISPOSED INHABITANTS THE GOOD (HIDALGO->HAD ALGO) WAS FAR FROM THINKING THAT ANY DISASTER COULD (BEFAL->BEFALL) HIS FAMILY +5639-40744-0002-139: (RODOLFO->RUDOLPHO) AND HIS COMPANIONS WITH THEIR FACES MUFFLED IN THEIR CLOAKS STARED RUDELY AND INSOLENTLY AT THE MOTHER THE DAUGHTER AND THE SERVANT MAID +5639-40744-0003-140: IN A MOMENT HE COMMUNICATED HIS THOUGHTS TO HIS COMPANIONS AND IN THE NEXT MOMENT THEY RESOLVED TO TURN BACK AND CARRY HER OFF TO PLEASE (RODOLFO->RUDOLPHO) FOR THE RICH WHO ARE OPEN HANDED ALWAYS FIND (PARASITES->PARRICIDES) READY TO ENCOURAGE THEIR BAD PROPENSITIES AND THUS TO CONCEIVE THIS WICKED DESIGN TO COMMUNICATE IT APPROVE IT RESOLVE ON RAVISHING LEOCADIA AND TO CARRY THAT DESIGN INTO EFFECT WAS THE WORK OF A MOMENT +5639-40744-0004-141: THEY DREW THEIR SWORDS HID THEIR FACES IN THE FLAPS OF THEIR CLOAKS TURNED BACK AND SOON CAME IN FRONT OF THE LITTLE PARTY WHO HAD NOT YET DONE GIVING THANKS TO GOD FOR THEIR ESCAPE FROM THOSE AUDACIOUS MEN +5639-40744-0005-142: FINALLY THE ONE PARTY WENT OFF EXULTING AND THE OTHER WAS LEFT IN DESOLATION AND WOE +5639-40744-0006-143: (RODOLFO->RODOLPHO) ARRIVED AT HIS OWN HOUSE WITHOUT ANY IMPEDIMENT (AND LEOCADIA'S->A UCADIUS) PARENTS REACHED THEIRS HEART BROKEN AND DESPAIRING +5639-40744-0007-144: MEANWHILE (RODOLFO->RUDOLPHO) HAD (LEOCADIA->LOCALIA) SAFE IN HIS CUSTODY AND IN HIS OWN APARTMENT +5639-40744-0008-145: WHO TOUCHES ME AM I IN BED +5639-40744-0009-146: MOTHER DEAR FATHER DO YOU HEAR ME +5639-40744-0010-147: IT IS THE ONLY AMENDS I ASK OF YOU FOR THE WRONG YOU HAVE DONE ME +5639-40744-0011-148: SHE FOUND THE DOOR BUT IT WAS LOCKED OUTSIDE +5639-40744-0012-149: SHE SUCCEEDED IN OPENING THE WINDOW AND THE MOONLIGHT SHONE IN SO BRIGHTLY THAT SHE COULD DISTINGUISH THE (COLOUR->COLOR) OF SOME DAMASK (HANGINGS->HANGING) IN THE ROOM +5639-40744-0013-150: SHE SAW THAT THE BED WAS GILDED AND SO RICH THAT IT SEEMED THAT OF A PRINCE (*->THE) RATHER (THAN->THAT) OF A PRIVATE GENTLEMAN +5639-40744-0014-151: AMONG OTHER THINGS ON WHICH (SHE->HE) CAST HER EYES WAS A SMALL CRUCIFIX OF SOLID SILVER STANDING ON A CABINET NEAR THE WINDOW +5639-40744-0015-152: THIS PERSON WAS (RODOLFO->RIDOLPHO) WHO THOUGH HE HAD GONE TO LOOK FOR HIS FRIENDS HAD CHANGED HIS MIND IN THAT RESPECT (NOT THINKING->NOTHING) IT ADVISABLE TO ACQUAINT THEM WITH WHAT HAD PASSED BETWEEN HIM AND THE GIRL +5639-40744-0016-153: ON THE CONTRARY HE RESOLVED TO TELL THEM THAT REPENTING OF HIS VIOLENCE AND MOVED BY (HER->A) TEARS HE HAD ONLY CARRIED HER HALF WAY TOWARDS HIS HOUSE AND THEN LET HER GO +5639-40744-0017-154: CHOKING WITH EMOTION (LEOCADI->LUCADIA) MADE A SIGN TO HER PARENTS THAT SHE WISHED TO BE ALONE WITH THEM +5639-40744-0018-155: THAT WOULD BE VERY WELL MY CHILD REPLIED HER FATHER IF YOUR PLAN WERE NOT LIABLE TO BE FRUSTRATED BY ORDINARY CUNNING BUT NO DOUBT THIS IMAGE (HAS->HAD) BEEN ALREADY MISSED BY ITS OWNER AND HE WILL HAVE SET IT DOWN FOR CERTAIN THAT IT WAS TAKEN OUT OF THE ROOM BY THE PERSON HE LOCKED UP THERE +5639-40744-0019-156: WHAT YOU HAD BEST DO MY CHILD IS TO KEEP IT AND PRAY TO IT THAT SINCE IT WAS A WITNESS TO YOUR UNDOING IT WILL DEIGN TO VINDICATE YOUR CAUSE BY ITS RIGHTEOUS JUDGMENT +5639-40744-0020-157: THUS DID (THIS->THE) HUMANE AND RIGHT MINDED FATHER COMFORT HIS UNHAPPY DAUGHTER AND HER MOTHER EMBRACING HER AGAIN DID ALL SHE COULD TO SOOTHE (HER->A) FEELINGS +5639-40744-0021-158: SHE MEANWHILE PASSED HER LIFE WITH HER PARENTS IN THE STRICTEST RETIREMENT NEVER LETTING HERSELF BE SEEN BUT SHUNNING EVERY EYE LEST IT SHOULD READ HER MISFORTUNE IN HER FACE +5639-40744-0022-159: TIME ROLLED ON THE HOUR OF HER DELIVERY ARRIVED IT TOOK PLACE IN THE UTMOST SECRECY HER MOTHER TAKING UPON HER THE OFFICE OF MIDWIFE (AND->AS) SHE GAVE BIRTH TO A SON ONE OF THE MOST BEAUTIFUL EVER SEEN +5639-40744-0023-160: WHEN THE BOY WALKED THROUGH THE STREETS BLESSINGS WERE SHOWERED UPON HIM BY ALL WHO SAW HIM (BLESSINGS->BLESSING) UPON HIS BEAUTY UPON THE MOTHER THAT BORE HIM UPON THE FATHER THAT BEGOT HIM UPON THOSE WHO BROUGHT HIM UP SO WELL +5639-40744-0024-161: ONE DAY WHEN THE BOY WAS SENT BY HIS GRANDFATHER WITH A MESSAGE TO A RELATION HE PASSED ALONG A STREET IN WHICH THERE WAS A GREAT CONCOURSE OF HORSEMEN +5639-40744-0025-162: THE BED SHE TOO WELL REMEMBERED WAS THERE AND ABOVE ALL THE CABINET ON WHICH HAD STOOD THE IMAGE SHE HAD TAKEN AWAY WAS STILL ON THE SAME SPOT +5639-40744-0026-163: (LUIS->LOUIS) WAS OUT OF DANGER IN A FORTNIGHT IN A MONTH HE ROSE FROM HIS BED AND (DURING->DREWING) ALL THAT TIME HE WAS VISITED DAILY BY HIS MOTHER AND GRANDMOTHER AND TREATED BY THE MASTER AND MISTRESS OF THE HOUSE AS IF HE WAS THEIR OWN CHILD +5639-40744-0027-164: THUS SAYING AND PRESSING THE CRUCIFIX TO HER BREAST SHE FELL FAINTING INTO THE ARMS OF DONA (ESTAFANIA->ESTAFFANIA) WHO AS A GENTLEWOMAN TO WHOSE SEX PITY IS (AS->A) NATURAL AS CRUELTY (IS->AS) TO MAN INSTANTLY PRESSED HER LIPS TO THOSE OF THE FAINTING GIRL SHEDDING OVER HER SO MANY TEARS THAT THERE NEEDED NO OTHER SPRINKLING OF WATER TO RECOVER (LEOCADIA->LOCATIA) FROM HER SWOON +5639-40744-0028-165: I HAVE GREAT THINGS TO TELL YOU SENOR SAID (DONA ESTAFANIA->DORNEST DA FANIA) TO HER HUSBAND THE CREAM AND SUBSTANCE OF WHICH IS THIS THE FAINTING GIRL BEFORE YOU IS YOUR DAUGHTER AND (THAT->THE) BOY IS YOUR GRANDSON +5639-40744-0029-166: THIS TRUTH WHICH I HAVE LEARNED FROM HER LIPS IS CONFIRMED BY HIS FACE IN WHICH WE HAVE BOTH BEHELD THAT OF OUR SON +5639-40744-0030-167: JUST THEN (LEOCADIA->LEOKADIA) CAME TO HERSELF AND EMBRACING THE CROSS SEEMED CHANGED INTO A SEA OF TEARS AND THE GENTLEMAN (REMAINED->REMAINING) IN UTTER BEWILDERMENT UNTIL HIS WIFE HAD REPEATED TO HIM FROM BEGINNING TO END (LEOCADIA'S->LEUCEDES) WHOLE STORY AND HE BELIEVED IT THROUGH THE BLESSED DISPENSATION OF HEAVEN WHICH HAD CONFIRMED IT BY SO MANY CONVINCING TESTIMONIES +5639-40744-0031-168: SO PERSUASIVE WERE HER ENTREATIES AND SO STRONG HER ASSURANCES THAT NO HARM WHATEVER COULD RESULT TO THEM FROM THE INFORMATION SHE SOUGHT THEY WERE INDUCED TO CONFESS THAT ONE SUMMER'S NIGHT THE SAME SHE HAD MENTIONED THEMSELVES (AND->IN) ANOTHER FRIEND BEING OUT ON A (STROLL->STRAW) WITH (RODOLFO->RADOLPHO) THEY HAD BEEN CONCERNED IN THE (ABDUCTION->ADOCTION) OF A GIRL WHOM (RODOLFO->UDOLPHO) CARRIED OFF WHILST THE REST OF THEM DETAINED HER FAMILY WHO MADE A GREAT OUTCRY AND WOULD HAVE DEFENDED HER IF THEY COULD +5639-40744-0032-169: FOR GOD'S SAKE MY LADY MOTHER GIVE ME A WIFE WHO WOULD BE AN AGREEABLE COMPANION NOT ONE WHO WILL DISGUST ME SO THAT WE MAY BOTH BEAR EVENLY AND WITH MUTUAL GOOD WILL THE YOKE IMPOSED ON US BY HEAVEN INSTEAD OF PULLING THIS WAY AND THAT WAY AND FRETTING EACH OTHER TO DEATH +5639-40744-0033-170: HER BEARING WAS GRACEFUL (AND->*) ANIMATED SHE LED HER SON BY THE HAND AND BEFORE HER WALKED TWO MAIDS WITH WAX LIGHTS AND SILVER CANDLESTICKS +5639-40744-0034-171: ALL ROSE TO DO HER REVERENCE AS IF SOMETHING FROM HEAVEN HAD MIRACULOUSLY APPEARED BEFORE THEM BUT GAZING ON HER ENTRANCED WITH ADMIRATION NOT ONE OF THEM WAS ABLE TO ADDRESS A SINGLE WORD TO HER +5639-40744-0035-172: SHE REFLECTED HOW NEAR SHE STOOD TO THE CRISIS WHICH WAS TO DETERMINE WHETHER SHE WAS TO BE BLESSED OR UNHAPPY FOR EVER AND RACKED BY THE INTENSITY OF HER EMOTIONS SHE SUDDENLY CHANGED (COLOUR->COLOR) HER HEAD DROPPED AND SHE FELL FORWARD IN A SWOON INTO THE ARMS OF THE (DISMAYED ESTAFANIA->DISMAYEDESTAFHANIA) +5639-40744-0036-173: HIS MOTHER HAD LEFT HER TO HIM AS BEING HER DESTINED PROTECTOR BUT WHEN SHE SAW THAT HE TOO WAS INSENSIBLE SHE WAS NEAR MAKING A THIRD AND WOULD HAVE DONE SO HAD HE NOT COME TO HIMSELF +5639-40744-0037-174: KNOW THEN SON OF MY HEART THAT THIS FAINTING LADY IS YOUR REAL BRIDE I SAY REAL BECAUSE SHE IS THE ONE WHOM YOUR FATHER AND I HAVE CHOSEN FOR YOU AND (THE->A) PORTRAIT WAS A PRETENCE +5639-40744-0038-175: JUST AT (THE->A) MOMENT WHEN THE TEARS OF THE PITYING BEHOLDERS FLOWED FASTEST AND THEIR EJACULATIONS WERE MOST EXPRESSIVE OF DESPAIR (LEOCADIA->THE OCCAS) GAVE SIGNS OF RECOVERY AND BROUGHT BACK GLADNESS TO THE HEARTS OF ALL +5639-40744-0039-176: WHEN SHE CAME TO HER SENSES AND BLUSHING TO FIND HERSELF IN (RODOLFO'S->GODOLPH'S) ARMS WOULD HAVE DISENGAGED HERSELF NO SENORA HE SAID THAT MUST NOT BE STRIVE NOT TO WITHDRAW FROM THE ARMS OF HIM WHO HOLDS YOU IN HIS SOUL +5639-40744-0040-177: THIS WAS DONE FOR THE EVENT TOOK PLACE AT A TIME (WHEN->BY) THE CONSENT OF THE PARTIES WAS SUFFICIENT FOR THE CELEBRATION OF A MARRIAGE WITHOUT ANY OF THE PRELIMINARY FORMALITIES WHICH ARE NOW SO PROPERLY REQUIRED +5639-40744-0041-178: NOR WAS (RODOLFO->RIDOLPHAL) LESS SURPRISED THAN THEY AND THE BETTER TO ASSURE HIMSELF OF SO WONDERFUL A FACT HE BEGGED (LEOCADIA->LOU KATYA) TO GIVE HIM SOME TOKEN WHICH SHOULD MAKE PERFECTLY CLEAR TO HIM THAT WHICH INDEED HE DID NOT DOUBT SINCE IT WAS AUTHENTICATED BY HIS PARENTS +5683-32865-0000-2483: YOU KNOW CAPTAIN LAKE +5683-32865-0001-2484: SAID LORD CHELFORD ADDRESSING ME +5683-32865-0002-2485: HE HAD HIS HAND UPON LAKE'S SHOULDER +5683-32865-0003-2486: THEY ARE COUSINS YOU KNOW WE ARE ALL COUSINS +5683-32865-0004-2487: WHATEVER LORD CHELFORD SAID MISS BRANDON RECEIVED IT VERY GRACIOUSLY AND EVEN WITH A MOMENTARY SMILE +5683-32865-0005-2488: BUT HER GREETING TO CAPTAIN (LAKE->LEEK) WAS MORE THAN USUALLY HAUGHTY AND FROZEN AND HER FEATURES I FANCIED PARTICULARLY PROUD AND PALE +5683-32865-0006-2489: AT DINNER LAKE WAS EASY AND AMUSING +5683-32865-0007-2490: (I'M->I AM) GLAD YOU LIKE IT SAYS (WYLDER->WILDER) CHUCKLING BENIGNANTLY ON IT OVER HIS SHOULDER +5683-32865-0008-2491: I BELIEVE I HAVE A LITTLE TASTE THAT WAY THOSE ARE ALL REAL YOU KNOW THOSE JEWELS +5683-32865-0009-2492: AND HE PLACED IT IN THAT GENTLEMAN'S FINGERS WHO NOW TOOK HIS TURN AT THE LAMP AND CONTEMPLATED THE LITTLE (PARALLELOGRAM->PARALLELLOGRAM) WITH A GLEAM OF SLY AMUSEMENT +5683-32865-0010-2493: I WAS THINKING IT'S VERY LIKE THE ACE OF HEARTS ANSWERED THE CAPTAIN SOFTLY SMILING ON +5683-32865-0011-2494: WHEREUPON LAKE LAUGHED QUIETLY STILL LOOKING ON THE ACE OF HEARTS WITH HIS SLY EYES +5683-32865-0012-2495: AND WYLDER LAUGHED TOO MORE SUDDENLY AND NOISILY THAN THE HUMOUR OF THE JOKE SEEMED QUITE TO CALL FOR AND GLANCED A GRIM LOOK FROM THE CORNERS OF HIS EYES ON LAKE BUT THE GALLANT CAPTAIN DID NOT SEEM TO PERCEIVE IT AND AFTER A FEW SECONDS MORE HE HANDED IT VERY INNOCENTLY BACK TO MISSUS DOROTHY ONLY REMARKING +5683-32865-0013-2496: DO YOU KNOW LAKE OH I REALLY CAN'T TELL BUT HE'LL SOON TIRE OF COUNTRY LIFE +5683-32865-0014-2497: HE'S NOT A MAN FOR COUNTRY QUARTERS +5683-32865-0015-2498: I HAD A HORRID DREAM ABOUT HIM LAST NIGHT THAT +5683-32865-0016-2499: OH I KNOW THAT'S (LORNE->LORN) BRANDON +5683-32865-0017-2500: ALL THE TIME HE WAS TALKING TO ME HIS ANGRY LITTLE EYES WERE FOLLOWING LAKE +5683-32866-0000-2527: MISS LAKE DECLINED THE CARRIAGE TO NIGHT +5683-32866-0001-2528: AND HE ADDED SOMETHING STILL LESS COMPLIMENTARY +5683-32866-0002-2529: BUT DON'T THESE VERY WISE THINGS SOMETIMES TURN OUT VERY FOOLISHLY +5683-32866-0003-2530: IN THE MEANTIME I HAD FORMED A NEW IDEA OF HER +5683-32866-0004-2531: BY THIS TIME LORD CHELFORD AND WYLDER RETURNED AND DISGUSTED RATHER WITH MYSELF I RUMINATED ON MY WANT OF (GENERAL SHIP->GENERALSHIP) +5683-32866-0005-2532: AND HE MADE A LITTLE DIP OF HIS CANE TOWARDS BRANDON HALL OVER HIS SHOULDER +5683-32866-0006-2533: YES SO THEY SAID BUT THAT WOULD I THINK HAVE BEEN WORSE +5683-32866-0007-2534: IF A FELLOW'S BEEN A LITTLE BIT WILD (HE'S BEELZEBUB->HE IS BIELDS ABOVE) AT ONCE +5683-32866-0008-2535: (BRACTON'S->BROCKTON'S) A VERY GOOD FELLOW I CAN ASSURE YOU +5683-32866-0009-2536: I DON'T KNOW (AND->ONE) CAN'T SAY HOW YOU (FINE->FIND) GENTLEMEN (DEFINE->TO FIND) WICKEDNESS ONLY AS AN OBSCURE FEMALE I SPEAK ACCORDING TO MY LIGHTS AND HE IS GENERALLY THOUGHT THE WICKEDEST MAN IN THIS COUNTY +5683-32866-0010-2537: WELL YOU KNOW RADIE WOMEN LIKE WICKED FELLOWS IT IS CONTRAST I SUPPOSE BUT THEY DO AND I'M SURE FROM WHAT BRACTON HAS SAID TO ME I KNOW HIM INTIMATELY THAT DORCAS LIKES HIM AND I CAN'T CONCEIVE WHY THEY ARE NOT MARRIED +5683-32866-0011-2538: THEIR WALK CONTINUED SILENT FOR THE GREATER PART NEITHER WAS QUITE SATISFIED WITH THE OTHER BUT RACHEL AT LAST SAID +5683-32866-0012-2539: NOW THAT'S IMPOSSIBLE RADIE FOR I REALLY DON'T THINK I ONCE THOUGHT OF HIM ALL THIS EVENING EXCEPT JUST WHILE WE WERE TALKING +5683-32866-0013-2540: THERE WAS A BRIGHT MOONLIGHT BROKEN BY THE SHADOWS OF OVERHANGING BOUGHS AND WITHERED LEAVES AND THE MOTTLED LIGHTS AND SHADOWS GLIDED ODDLY ACROSS HIS PALE FEATURES +5683-32866-0014-2541: DON'T INSULT ME STANLEY BY TALKING AGAIN AS YOU DID THIS MORNING +5683-32866-0015-2542: WHAT I SAY IS ALTOGETHER ON YOUR OWN (ACCOUNT->ACCOUN) +5683-32866-0016-2543: MARK MY WORDS YOU'LL FIND HIM TOO STRONG FOR YOU (AYE->I) AND TOO DEEP +5683-32866-0017-2544: I AM VERY UNEASY ABOUT IT WHATEVER IT IS I CAN'T HELP IT +5683-32866-0018-2545: TO MY MIND THERE HAS ALWAYS BEEN SOMETHING INEXPRESSIBLY AWFUL IN FAMILY FEUDS +5683-32866-0019-2546: THE MYSTERY OF THEIR ORIGIN THEIR CAPACITY FOR EVOLVING LATENT FACULTIES OF CRIME AND THE (STEADY->STUDY) VITALITY WITH WHICH THEY (SURVIVE->SURVIVED) THE HEARSE AND SPEAK THEIR DEEP MOUTHED MALIGNITIES IN EVERY NEW BORN GENERATION HAVE ASSOCIATED THEM SOMEHOW IN MY MIND WITH (A->THE) SPELL OF LIFE EXCEEDING AND DISTINCT FROM HUMAN AND (A SPECIAL->ESPECIAL) SATANIC ACTION +5683-32866-0020-2547: THE FLOOR MORE THAN ANYTHING ELSE SHOWED THE GREAT AGE OF THE ROOM +5683-32866-0021-2548: MY BED WAS UNEXCEPTIONABLY COMFORTABLE BUT IN MY THEN MOOD I COULD HAVE WISHED IT A GREAT DEAL MORE MODERN +5683-32866-0022-2549: ITS CURTAINS WERE OF THICK AND FADED TAPESTRY +5683-32866-0023-2550: ALL THE FURNITURE BELONGED TO OTHER TIMES +5683-32866-0024-2551: I (SHAN'T->SHA'N'T) TROUBLE YOU ABOUT MY TRAIN OF THOUGHTS OR FANCIES BUT I BEGAN TO FEEL VERY LIKE A GENTLEMAN IN A GHOST STORY WATCHING EXPERIMENTALLY IN A HAUNTED CHAMBER +5683-32866-0025-2552: I DID NOT EVEN TAKE THE PRECAUTION OF SMOKING UP THE CHIMNEY +5683-32866-0026-2553: I BOLDLY LIGHTED MY (CHEROOT->TROUT) +5683-32866-0027-2554: A COLD BRIGHT MOON WAS SHINING WITH CLEAR SHARP LIGHTS AND SHADOWS +5683-32866-0028-2555: THE SOMBRE OLD TREES LIKE GIGANTIC HEARSE PLUMES BLACK AND AWFUL +5683-32866-0029-2556: SOMEHOW I HAD GROWN NERVOUS +5683-32866-0030-2557: A LITTLE BIT OF PLASTER (TUMBLED->TUMBLE) DOWN THE CHIMNEY AND STARTLED ME CONFOUNDEDLY +5683-32879-0000-2501: IT WAS NOT VERY MUCH PAST ELEVEN THAT MORNING WHEN THE PONY CARRIAGE FROM BRANDON DREW UP BEFORE THE LITTLE GARDEN WICKET OF REDMAN'S FARM +5683-32879-0001-2502: (WELL->WHILE) SHE WAS BETTER THOUGH SHE HAD HAD A BAD NIGHT +5683-32879-0002-2503: SO THERE CAME A STEP AND A LITTLE RUSTLING OF FEMININE DRAPERIES THE SMALL DOOR OPENED AND RACHEL ENTERED WITH HER HAND EXTENDED AND A PALE SMILE OF WELCOME +5683-32879-0003-2504: WOMEN CAN HIDE THEIR PAIN BETTER THAN WE MEN AND BEAR IT BETTER TOO EXCEPT WHEN SHAME DROPS FIRE INTO THE DREADFUL CHALICE +5683-32879-0004-2505: BUT POOR RACHEL LAKE HAD MORE THAN THAT STOICAL HYPOCRISY WHICH ENABLES THE TORTURED SPIRITS OF HER SEX TO LIFT A PALE FACE THROUGH THE FLAMES AND SMILE +5683-32879-0005-2506: THIS TRANSIENT SPRING AND LIGHTING UP ARE BEAUTIFUL A GLAMOUR BEGUILING OUR SENSES +5683-32879-0006-2507: THERE WAS SOMETHING OF SWEETNESS AND FONDNESS IN HER TONES AND MANNER WHICH WAS NEW TO RACHEL AND COMFORTING AND SHE RETURNED THE GREETING AS KINDLY AND FELT MORE LIKE HER FORMER SELF +5683-32879-0007-2508: RACHEL'S PALE AND SHARPENED FEATURES AND DILATED EYE STRUCK HER WITH A PAINFUL SURPRISE +5683-32879-0008-2509: YOU HAVE BEEN SO ILL MY POOR RACHEL +5683-32879-0009-2510: ILL AND TROUBLED DEAR TROUBLED IN MIND AND MISERABLY NERVOUS +5683-32879-0010-2511: POOR RACHEL HER NATURE RECOILED FROM DECEIT AND SHE TOLD AT ALL EVENTS AS MUCH OF THE TRUTH AS SHE DARED +5683-32879-0011-2512: SHE SPOKE WITH A SUDDEN ENERGY WHICH PARTOOK (OF->A) FEAR AND PASSION AND FLUSHED HER THIN CHEEK AND MADE HER LANGUID EYES FLASH +5683-32879-0012-2513: THANK YOU RACHEL MY COUSIN RACHEL MY ONLY FRIEND +5683-32879-0013-2514: CHELFORD HAD A NOTE FROM MISTER (WYLDER->WILDER) THIS MORNING ANOTHER NOTE HIS COMING DELAYED AND SOMETHING OF HIS HAVING TO SEE SOME PERSON WHO (IS->WAS) ABROAD CONTINUED DORCAS AFTER A LITTLE PAUSE +5683-32879-0014-2515: YES SOMETHING EVERYTHING SAID RACHEL HURRIEDLY LOOKING FROWNINGLY AT A FLOWER WHICH SHE WAS TWIRLING IN HER FINGERS +5683-32879-0015-2516: YES SAID RACHEL +5683-32879-0016-2517: AND THE WAN ORACLE HAVING SPOKEN SHE (SATE->SAT) DOWN IN THE SAME SORT OF ABSTRACTION AGAIN BESIDE DORCAS AND SHE LOOKED FULL IN HER COUSIN'S EYES +5683-32879-0017-2518: OF MARK WYLDER I SAY THIS HIS NAME HAS BEEN FOR YEARS HATEFUL TO ME AND RECENTLY IT HAS BECOME FRIGHTFUL AND YOU WILL PROMISE ME SIMPLY THIS THAT YOU WILL NEVER ASK ME TO SPEAK AGAIN ABOUT HIM +5683-32879-0018-2519: IT IS AN ANTIPATHY AN ANTIPATHY I CANNOT GET OVER DEAR DORCAS YOU MAY THINK IT A MADNESS BUT DON'T BLAME ME +5683-32879-0019-2520: I HAVE VERY FEW TO LOVE ME NOW AND I THOUGHT YOU MIGHT LOVE ME AS I HAVE BEGUN TO LOVE YOU +5683-32879-0020-2521: AND SHE THREW HER ARMS ROUND HER COUSIN'S NECK AND BRAVE RACHEL AT LAST BURST INTO TEARS +5683-32879-0021-2522: DORCAS IN HER STRANGE WAY WAS MOVED +5683-32879-0022-2523: I LIKE YOU STILL RACHEL I'M SURE I'LL ALWAYS LIKE YOU +5683-32879-0023-2524: YOU RESEMBLE ME RACHEL YOU ARE FEARLESS AND INFLEXIBLE AND GENEROUS +5683-32879-0024-2525: YES RACHEL I DO LOVE YOU +5683-32879-0025-2526: THANK YOU DORCAS DEAR +61-70968-0000-2179: HE BEGAN A CONFUSED COMPLAINT AGAINST THE WIZARD WHO HAD VANISHED BEHIND THE CURTAIN ON THE LEFT +61-70968-0001-2180: (GIVE->CUVE) NOT SO EARNEST A MIND TO THESE (MUMMERIES->MEMORIES) CHILD +61-70968-0002-2181: A GOLDEN FORTUNE AND A HAPPY LIFE +61-70968-0003-2182: HE WAS LIKE UNTO MY FATHER IN A WAY AND YET WAS NOT MY FATHER +61-70968-0004-2183: ALSO THERE WAS A STRIPLING PAGE WHO TURNED INTO A MAID +61-70968-0005-2184: THIS WAS SO SWEET A LADY SIR AND IN SOME MANNER I DO THINK SHE DIED +61-70968-0006-2185: BUT THEN THE PICTURE WAS GONE AS QUICKLY AS IT CAME +61-70968-0007-2186: SISTER NELL DO YOU HEAR THESE MARVELS +61-70968-0008-2187: TAKE YOUR PLACE AND LET US SEE WHAT THE CRYSTAL CAN SHOW TO YOU +61-70968-0009-2188: LIKE AS NOT YOUNG MASTER THOUGH I AM AN OLD MAN +61-70968-0010-2189: FORTHWITH ALL RAN TO THE OPENING OF THE TENT TO SEE WHAT MIGHT BE AMISS BUT MASTER WILL WHO PEEPED OUT FIRST NEEDED NO MORE THAN ONE GLANCE +61-70968-0011-2190: HE GAVE WAY TO THE OTHERS VERY READILY AND RETREATED UNPERCEIVED BY THE SQUIRE AND MISTRESS FITZOOTH TO THE REAR OF THE TENT +61-70968-0012-2191: CRIES OF (A NOTTINGHAM A->UNNOTTINGHAM ARE) NOTTINGHAM +61-70968-0013-2192: BEFORE THEM FLED THE STROLLER AND HIS THREE SONS (CAPLESS->CAPLICE) AND TERRIFIED +61-70968-0014-2193: WHAT IS THE TUMULT AND RIOTING CRIED OUT THE SQUIRE AUTHORITATIVELY AND HE BLEW TWICE ON (A->THE) SILVER WHISTLE WHICH HUNG AT HIS BELT +61-70968-0015-2194: NAY WE (REFUSED->WERE FREEZED) THEIR REQUEST MOST POLITELY MOST NOBLE SAID THE LITTLE STROLLER +61-70968-0016-2195: AND THEN THEY BECAME VEXED AND WOULD HAVE SNATCHED YOUR PURSE FROM US +61-70968-0017-2196: I COULD NOT SEE MY BOY INJURED EXCELLENCE FOR BUT DOING HIS DUTY AS ONE OF CUMBERLAND'S SONS +61-70968-0018-2197: SO I DID PUSH THIS FELLOW +61-70968-0019-2198: IT IS ENOUGH SAID GEORGE GAMEWELL SHARPLY (AND->AS) HE TURNED UPON THE CROWD +61-70968-0020-2199: SHAME ON YOU CITIZENS CRIED HE I BLUSH FOR MY FELLOWS OF NOTTINGHAM +61-70968-0021-2200: SURELY WE CAN SUBMIT WITH GOOD GRACE +61-70968-0022-2201: TIS FINE FOR YOU TO TALK OLD MAN ANSWERED THE LEAN SULLEN APPRENTICE +61-70968-0023-2202: BUT I WRESTLED WITH THIS FELLOW AND DO KNOW THAT HE PLAYED UNFAIRLY IN THE SECOND BOUT +61-70968-0024-2203: SPOKE THE SQUIRE LOSING ALL (PATIENCE->PATIENT) AND IT WAS TO YOU THAT I GAVE ANOTHER (PURSE IN->PERSON) CONSOLATION +61-70968-0025-2204: COME TO ME MEN HERE HERE HE RAISED HIS VOICE STILL LOUDER +61-70968-0026-2205: THE STROLLERS TOOK THEIR PART IN IT WITH (HEARTY->HARDY) ZEST NOW THAT THEY HAD SOME CHANCE OF BEATING OFF THEIR FOES +61-70968-0027-2206: ROBIN AND THE LITTLE TUMBLER BETWEEN THEM TRIED TO FORCE THE SQUIRE TO STAND BACK AND VERY VALIANTLY (DID->DO) THESE TWO COMPORT THEMSELVES +61-70968-0028-2207: THE HEAD AND CHIEF OF THE RIOT THE NOTTINGHAM (APPRENTICE->APPRENTICED) WITH CLENCHED FISTS THREATENED MONTFICHET +61-70968-0029-2208: THE SQUIRE HELPED TO THRUST THEM ALL IN AND ENTERED SWIFTLY HIMSELF +61-70968-0030-2209: NOW BE SILENT ON YOUR LIVES HE BEGAN BUT THE CAPTURED APPRENTICE SET UP AN INSTANT SHOUT +61-70968-0031-2210: SILENCE YOU (KNAVE->NAVE) CRIED MONTFICHET +61-70968-0032-2211: HE FELT FOR AND FOUND THE WIZARD'S BLACK CLOTH THE SQUIRE WAS QUITE OUT OF BREATH +61-70968-0033-2212: THRUSTING OPEN THE PROPER ENTRANCE OF THE TENT ROBIN SUDDENLY RUSHED FORTH WITH HIS BURDEN WITH A GREAT SHOUT +61-70968-0034-2213: A MONTFICHET A MONTFICHET GAMEWELL TO THE RESCUE +61-70968-0035-2214: TAKING ADVANTAGE OF THIS THE SQUIRE'S FEW MEN REDOUBLED THEIR EFFORTS AND ENCOURAGED BY (ROBIN'S->ROBINS) AND THE LITTLE STROLLER'S CRIES FOUGHT THEIR WAY TO HIM +61-70968-0036-2215: GEORGE MONTFICHET WILL NEVER FORGET THIS DAY +61-70968-0037-2216: WHAT IS YOUR NAME LORDING ASKED THE LITTLE STROLLER PRESENTLY +61-70968-0038-2217: ROBIN FITZOOTH +61-70968-0039-2218: AND MINE IS WILL STUTELEY SHALL WE BE COMRADES +61-70968-0040-2219: RIGHT WILLINGLY FOR BETWEEN US WE HAVE WON THE BATTLE ANSWERED ROBIN +61-70968-0041-2220: I LIKE YOU WILL YOU ARE THE SECOND WILL THAT I HAVE MET AND LIKED WITHIN TWO DAYS IS THERE A SIGN IN THAT +61-70968-0042-2221: (MONTFICHET->MARTFICHERE) CALLED OUT FOR ROBIN TO GIVE HIM AN ARM +61-70968-0043-2222: FRIENDS SAID (MONTFICHET->MONTFICHE) FAINTLY TO THE WRESTLERS BEAR US ESCORT SO FAR AS THE SHERIFF'S HOUSE +61-70968-0044-2223: IT WILL NOT BE SAFE FOR YOU TO STAY HERE NOW +61-70968-0045-2224: PRAY FOLLOW US WITH MINE (AND->IN) MY LORD SHERIFF'S MEN +61-70968-0046-2225: NOTTINGHAM CASTLE WAS REACHED AND ADMITTANCE WAS DEMANDED +61-70968-0047-2226: MASTER MONCEUX THE SHERIFF OF NOTTINGHAM WAS MIGHTILY PUT ABOUT WHEN TOLD OF THE RIOTING +61-70968-0048-2227: AND HENRY MIGHT RETURN TO ENGLAND AT ANY MOMENT +61-70968-0049-2228: HAVE YOUR WILL CHILD IF THE BOY ALSO WILLS IT MONTFICHET ANSWERED FEELING TOO ILL TO OPPOSE ANYTHING VERY STRONGLY JUST THEN +61-70968-0050-2229: HE MADE AN EFFORT TO HIDE HIS CONDITION FROM THEM ALL AND ROBIN FELT HIS FINGERS TIGHTEN UPON HIS ARM +61-70968-0051-2230: (BEG->BEGGED) ME A ROOM OF THE SHERIFF CHILD QUICKLY +61-70968-0052-2231: BUT WHO IS THIS FELLOW PLUCKING AT YOUR (SLEEVE->STEVE) +61-70968-0053-2232: HE IS MY ESQUIRE EXCELLENCY RETURNED ROBIN WITH DIGNITY +61-70968-0054-2233: MISTRESS FITZOOTH HAD BEEN CARRIED OFF BY THE SHERIFF'S DAUGHTER AND HER MAIDS AS SOON AS THEY HAD ENTERED THE HOUSE SO THAT ROBIN ALONE HAD THE CARE OF MONTFICHET +61-70968-0055-2234: ROBIN WAS GLAD WHEN AT LENGTH THEY WERE LEFT TO THEIR OWN DEVICES +61-70968-0056-2235: THE WINE DID CERTAINLY BRING BACK THE COLOR TO THE SQUIRE'S CHEEKS +61-70968-0057-2236: THESE ESCAPADES ARE NOT FOR OLD (GAMEWELL LAD->GAME WELL LED) HIS DAY HAS COME TO TWILIGHT +61-70968-0058-2237: WILL YOU FORGIVE ME NOW +61-70968-0059-2238: (IT WILL->IT'LL) BE NO DISAPPOINTMENT TO ME +61-70968-0060-2239: NO THANKS I AM GLAD TO GIVE YOU SUCH EASY HAPPINESS +61-70968-0061-2240: YOU ARE A WORTHY LEECH WILL PRESENTLY WHISPERED ROBIN THE WINE HAS WORKED A MARVEL +61-70968-0062-2241: (AY->I) AND SHOW YOU SOME PRETTY TRICKS +61-70970-0000-2242: YOUNG FITZOOTH HAD BEEN COMMANDED TO HIS MOTHER'S CHAMBER SO SOON AS HE HAD COME OUT FROM HIS CONVERSE WITH THE SQUIRE +61-70970-0001-2243: THERE BEFELL AN ANXIOUS INTERVIEW MISTRESS FITZOOTH ARGUING FOR AND AGAINST THE SQUIRE'S PROJECT IN A BREATH +61-70970-0002-2244: MOST OF ALL ROBIN THOUGHT OF HIS FATHER WHAT WOULD HE COUNSEL +61-70970-0003-2245: IF FOR A WHIM YOU BEGGAR YOURSELF I CANNOT STAY YOU +61-70970-0004-2246: BUT TAKE IT WHILST I LIVE AND (WEAR->WHERE) MONTFICHET'S SHIELD IN THE DAYS WHEN MY EYES CAN BE REJOICED BY SO BRAVE A SIGHT FOR YOU WILL (NE'ER->NEVER) DISGRACE OUR (SCUTCHEON->STATUNE) I WARRANT ME +61-70970-0005-2247: THE LAD HAD CHECKED HIM THEN +61-70970-0006-2248: NEVER THAT SIR HE HAD SAID +61-70970-0007-2249: HE WAS IN DEEP CONVERSE WITH THE CLERK AND ENTERED THE HALL HOLDING HIM BY THE ARM +61-70970-0008-2250: NOW TO BED BOY +61-70970-0009-2251: TIS LATE AND I GO MYSELF WITHIN A SHORT SPACE +61-70970-0010-2252: DISMISS YOUR SQUIRE ROBIN AND BID ME GOOD (E E N->EVEN) +61-70970-0011-2253: AS ANY IN ENGLAND I WOULD SAY SAID GAMEWELL PROUDLY THAT IS IN HIS DAY +61-70970-0012-2254: YET HE WILL TEACH YOU A FEW TRICKS WHEN MORNING IS COME +61-70970-0013-2255: THERE WAS NO CHANCE TO ALTER HIS SLEEPING ROOM TO ONE NEARER TO GAMEWELL'S CHAMBER +61-70970-0014-2256: PRESENTLY HE CROSSED THE FLOOR OF HIS ROOM WITH DECIDED STEP +61-70970-0015-2257: WILL CRIED HE SOFTLY AND STUTELEY WHO HAD CHOSEN HIS COUCH ACROSS THE DOOR OF HIS YOUNG MASTER'S CHAMBER SPRANG UP AT ONCE IN ANSWER +61-70970-0016-2258: WE WILL GO OUT TOGETHER TO THE BOWER THERE IS A WAY DOWN TO THE COURT FROM MY WINDOW +61-70970-0017-2259: REST AND BE STILL UNTIL I WARN YOU +61-70970-0018-2260: THE HOURS PASSED WEARILY BY AND MOVEMENT COULD YET BE HEARD ABOUT THE HALL +61-70970-0019-2261: AT LAST ALL WAS QUIET AND BLACK IN THE COURTYARD OF GAMEWELL +61-70970-0020-2262: WILL WHISPERED ROBIN OPENING HIS DOOR AS HE SPOKE ARE YOU READY +61-70970-0021-2263: THEY THEN RENEWED THEIR JOURNEY AND UNDER THE BETTER LIGHT MADE A SAFE CROSSING OF THE STABLE ROOFS +61-70970-0022-2264: ROBIN ENTERED THE HUT DRAGGING THE UNWILLING ESQUIRE AFTER HIM +61-70970-0023-2265: BE NOT SO FOOLISH FRIEND SAID FITZOOTH CROSSLY +61-70970-0024-2266: THEY MOVED THEREAFTER CAUTIOUSLY ABOUT THE HUT GROPING BEFORE AND ABOUT THEM TO FIND SOMETHING TO SHOW THAT WARRENTON HAD FULFILLED HIS MISSION +61-70970-0025-2267: THEY WERE UPON THE VERGE OF AN OPEN TRAP IN THE FAR CORNER OF THE HUT AND STUTELEY HAD TRIPPED OVER THE EDGE OF THE REVERSED FLAP MOUTH OF THIS PIT +61-70970-0026-2268: (FITZOOTH'S->FITTOOTH'S) HAND RESTED AT LAST UPON THE TOP RUNG OF A LADDER AND SLOWLY THE TRUTH CAME TO HIM +61-70970-0027-2269: ROBIN CAREFULLY DESCENDED THE LADDER AND FOUND HIMSELF SOON UPON FIRM ROCKY GROUND +61-70970-0028-2270: STUTELEY WAS BY HIS SIDE IN A FLASH AND THEN THEY BOTH BEGAN FEELING ABOUT THEM TO ASCERTAIN THE SHAPE AND CHARACTER OF THIS VAULT +61-70970-0029-2271: FROM THE BLACKNESS BEHIND THE LIGHT THEY HEARD A VOICE WARRENTON'S +61-70970-0030-2272: SAVE ME MASTERS BUT YOU STARTLED ME RARELY +61-70970-0031-2273: CRIED HE WAVING THE (LANTHORN->LANTERN) BEFORE HIM TO MAKE SURE THAT THESE WERE NO GHOSTS IN FRONT OF HIM +61-70970-0032-2274: (ENQUIRED->INQUIRED) ROBIN WITH HIS (SUSPICIONS->SUSPICION) STILL UPON HIM +61-70970-0033-2275: TRULY SUCH A HORSE (SHOULD->WOULD) BE WORTH MUCH IN NOTTINGHAM FAIR +61-70970-0034-2276: NAY NAY LORDING ANSWERED WARRENTON WITH A HALF LAUGH +61-70970-0035-2277: WARRENTON SPOKE THUS WITH SIGNIFICANCE TO SHOW ROBIN THAT HE WAS NOT TO THINK (GEOFFREY'S->JEFFREY'S) CLAIMS TO THE ESTATE WOULD BE PASSED BY +61-70970-0036-2278: ROBIN FITZOOTH SAW THAT HIS DOUBTS OF WARRENTON HAD BEEN UNFAIR AND HE BECAME ASHAMED OF HIMSELF FOR (HARBORING->HARBOURING) THEM +61-70970-0037-2279: HIS TONES RANG PLEASANTLY ON WARRENTON'S EARS AND FORTHWITH (A->THE) GOOD FELLOWSHIP WAS HERALDED BETWEEN THEM +61-70970-0038-2280: THE OLD SERVANT TOLD HIM QUIETLY AS THEY CREPT BACK TO GAMEWELL THAT THIS (PASSAGE WAY->PASSAGEWAY) LED FROM THE HUT IN THE (PLEASANCE->PLEASANTS) TO SHERWOOD AND THAT (GEOFFREY->JEFFREY) FOR THE TIME WAS HIDING WITH THE OUTLAWS IN THE FOREST +61-70970-0039-2281: HE IMPLORES US TO BE DISCREET AS THE GRAVE IN THIS MATTER FOR IN SOOTH HIS LIFE IS IN THE HOLLOW OF OUR HANDS +61-70970-0040-2282: THEY (REGAINED->REGAIN) THEIR APARTMENT APPARENTLY WITHOUT DISTURBING THE HOUSEHOLD OF GAMEWELL +672-122797-0000-1529: OUT IN THE WOODS STOOD A NICE LITTLE FIR TREE +672-122797-0001-1530: THE PLACE HE HAD WAS A VERY GOOD ONE THE SUN SHONE ON HIM AS TO FRESH AIR THERE WAS ENOUGH OF THAT AND ROUND HIM GREW MANY LARGE SIZED COMRADES PINES AS WELL AS (FIRS->FURS) +672-122797-0002-1531: HE DID NOT THINK OF THE WARM SUN AND OF THE FRESH AIR HE DID NOT CARE FOR THE LITTLE COTTAGE CHILDREN THAT RAN ABOUT (AND->IN) PRATTLED WHEN THEY WERE IN THE WOODS LOOKING FOR WILD STRAWBERRIES +672-122797-0003-1532: BUT THIS WAS WHAT THE TREE COULD NOT BEAR TO HEAR +672-122797-0004-1533: IN WINTER WHEN THE SNOW LAY GLITTERING ON THE GROUND A HARE WOULD OFTEN COME LEAPING ALONG AND JUMP RIGHT OVER THE LITTLE TREE +672-122797-0005-1534: OH THAT MADE HIM SO ANGRY +672-122797-0006-1535: TO GROW AND GROW TO GET OLDER AND BE TALL THOUGHT THE TREE THAT AFTER ALL IS THE MOST DELIGHTFUL THING IN THE WORLD +672-122797-0007-1536: IN AUTUMN THE (WOOD CUTTERS->WOODCUTTERS) ALWAYS CAME AND FELLED SOME OF THE LARGEST TREES +672-122797-0008-1537: THIS HAPPENED EVERY YEAR AND THE YOUNG FIR TREE THAT HAD NOW GROWN TO A VERY COMELY (SIZE->SIZED) TREMBLED AT THE SIGHT FOR THE MAGNIFICENT GREAT TREES FELL TO THE EARTH WITH NOISE AND CRACKING THE BRANCHES WERE LOPPED OFF AND THE TREES LOOKED LONG AND BARE THEY WERE HARDLY TO BE (RECOGNISED->RECOGNIZED) AND THEN THEY WERE LAID IN CARTS AND THE HORSES DRAGGED THEM OUT OF THE WOOD +672-122797-0009-1538: HAVE YOU NOT MET (THEM ANYWHERE->THE MANY WHERE) +672-122797-0010-1539: REJOICE IN THY GROWTH SAID THE SUNBEAMS +672-122797-0011-1540: AND THEN WHAT HAPPENS THEN +672-122797-0012-1541: I WOULD FAIN KNOW IF I AM DESTINED FOR SO GLORIOUS A CAREER CRIED THE TREE REJOICING +672-122797-0013-1542: I AM NOW TALL AND MY BRANCHES SPREAD LIKE THE OTHERS THAT WERE CARRIED OFF LAST YEAR OH +672-122797-0014-1543: WERE I BUT ALREADY ON THE CART +672-122797-0015-1544: (WERE->WHERE) I IN THE WARM ROOM WITH ALL THE (SPLENDOR->SPLENDOUR) AND MAGNIFICENCE +672-122797-0016-1545: YES THEN SOMETHING BETTER SOMETHING STILL GRANDER WILL SURELY FOLLOW OR WHEREFORE SHOULD THEY THUS ORNAMENT ME +672-122797-0017-1546: SOMETHING BETTER SOMETHING STILL GRANDER MUST FOLLOW BUT WHAT +672-122797-0018-1547: REJOICE IN OUR PRESENCE SAID THE (AIR->HEIR) AND THE SUNLIGHT +672-122797-0019-1548: REJOICE IN THY OWN FRESH YOUTH +672-122797-0020-1549: BUT THE TREE DID NOT REJOICE AT ALL HE GREW AND GREW AND WAS GREEN BOTH WINTER AND SUMMER +672-122797-0021-1550: AND TOWARDS CHRISTMAS HE WAS ONE OF THE FIRST THAT WAS CUT DOWN +672-122797-0022-1551: THE AXE STRUCK DEEP INTO THE VERY PITH THE TREE FELL TO THE EARTH WITH A SIGH HE FELT A PANG IT WAS LIKE A SWOON HE COULD NOT THINK OF HAPPINESS FOR HE WAS SORROWFUL AT BEING SEPARATED FROM HIS HOME FROM THE PLACE WHERE HE HAD SPRUNG UP +672-122797-0023-1552: HE WELL KNEW THAT HE SHOULD NEVER SEE HIS DEAR OLD COMRADES THE LITTLE BUSHES AND FLOWERS AROUND HIM (ANYMORE->ANY MORE) PERHAPS NOT EVEN THE BIRDS +672-122797-0024-1553: THE DEPARTURE WAS NOT AT ALL AGREEABLE +672-122797-0025-1554: THE TREE ONLY CAME TO HIMSELF WHEN HE WAS UNLOADED IN A (COURT YARD->COURTYARD) WITH THE OTHER TREES AND HEARD A MAN SAY THAT ONE IS SPLENDID WE DON'T WANT THE OTHERS +672-122797-0026-1555: THERE TOO WERE LARGE EASY CHAIRS SILKEN SOFAS LARGE TABLES FULL OF PICTURE BOOKS AND FULL OF TOYS WORTH HUNDREDS AND HUNDREDS OF CROWNS AT LEAST THE CHILDREN SAID SO +672-122797-0027-1556: THE SERVANTS AS WELL AS THE YOUNG LADIES DECORATED IT +672-122797-0028-1557: THIS EVENING THEY ALL SAID +672-122797-0029-1558: HOW IT WILL SHINE THIS EVENING +672-122797-0030-1559: PERHAPS THE OTHER TREES FROM THE FOREST WILL COME TO LOOK AT ME +672-122797-0031-1560: IT BLAZED UP FAMOUSLY HELP HELP +672-122797-0032-1561: CRIED THE YOUNG LADIES AND THEY QUICKLY PUT OUT THE FIRE +672-122797-0033-1562: A STORY +672-122797-0034-1563: A STORY CRIED THE CHILDREN DRAWING A LITTLE FAT MAN TOWARDS THE TREE +672-122797-0035-1564: BUT I SHALL TELL ONLY ONE STORY +672-122797-0036-1565: HUMPY (DUMPY->DON'T BE) FELL DOWNSTAIRS AND YET HE MARRIED THE PRINCESS +672-122797-0037-1566: THAT'S THE WAY OF THE WORLD +672-122797-0038-1567: THOUGHT THE FIR TREE AND BELIEVED IT ALL BECAUSE THE MAN WHO TOLD THE STORY WAS SO GOOD LOOKING WELL WELL +672-122797-0039-1568: I WON'T TREMBLE TO MORROW THOUGHT THE FIR TREE +672-122797-0040-1569: AND THE WHOLE NIGHT THE TREE STOOD STILL AND IN DEEP THOUGHT +672-122797-0041-1570: IN THE MORNING THE SERVANT AND THE HOUSEMAID CAME IN +672-122797-0042-1571: BUT THEY DRAGGED HIM OUT OF THE ROOM AND UP THE STAIRS INTO THE LOFT AND HERE IN A DARK CORNER WHERE NO DAYLIGHT COULD ENTER THEY LEFT HIM +672-122797-0043-1572: WHAT'S THE MEANING OF THIS THOUGHT THE TREE +672-122797-0044-1573: AND HE LEANED AGAINST THE WALL LOST IN REVERIE +672-122797-0045-1574: TIME ENOUGH HAD HE TOO FOR HIS REFLECTIONS FOR DAYS AND NIGHTS PASSED ON AND NOBODY CAME UP AND WHEN AT LAST SOMEBODY DID COME IT WAS ONLY TO PUT SOME GREAT TRUNKS IN A CORNER OUT OF THE WAY +672-122797-0046-1575: TIS NOW WINTER OUT OF DOORS THOUGHT THE TREE +672-122797-0047-1576: HOW KIND MAN IS AFTER ALL +672-122797-0048-1577: IF IT ONLY WERE NOT SO DARK HERE AND SO TERRIBLY LONELY +672-122797-0049-1578: SQUEAK (SQUEAK->SQUI) +672-122797-0050-1579: THEY SNUFFED ABOUT THE FIR TREE AND RUSTLED AMONG THE BRANCHES +672-122797-0051-1580: I AM BY NO MEANS OLD SAID THE FIR TREE +672-122797-0052-1581: THERE'S MANY A ONE CONSIDERABLY OLDER THAN I AM +672-122797-0053-1582: THEY WERE SO EXTREMELY CURIOUS +672-122797-0054-1583: I KNOW NO SUCH PLACE SAID THE TREE +672-122797-0055-1584: AND THEN HE TOLD ALL ABOUT HIS YOUTH AND THE LITTLE MICE HAD NEVER HEARD THE LIKE BEFORE AND THEY LISTENED AND SAID +672-122797-0056-1585: SAID THE FIR TREE THINKING OVER WHAT HE HAD HIMSELF RELATED +672-122797-0057-1586: YES IN REALITY THOSE WERE HAPPY TIMES +672-122797-0058-1587: WHO (IS->IT'S) HUMPY DUMPY ASKED THE MICE +672-122797-0059-1588: ONLY THAT ONE ANSWERED THE TREE +672-122797-0060-1589: IT IS A VERY STUPID STORY +672-122797-0061-1590: DON'T YOU KNOW ONE ABOUT BACON AND TALLOW CANDLES CAN'T YOU TELL ANY LARDER STORIES +672-122797-0062-1591: NO SAID THE TREE +672-122797-0063-1592: THEN GOOD (BYE->BY) SAID THE RATS AND THEY WENT HOME +672-122797-0064-1593: AT LAST THE LITTLE MICE STAYED AWAY ALSO AND THE TREE SIGHED AFTER ALL IT WAS VERY PLEASANT WHEN THE SLEEK LITTLE MICE SAT ROUND ME AND LISTENED TO WHAT I TOLD THEM +672-122797-0065-1594: NOW THAT TOO IS OVER +672-122797-0066-1595: WHY ONE MORNING THERE CAME A QUANTITY OF PEOPLE AND SET TO WORK IN THE LOFT +672-122797-0067-1596: THE TRUNKS WERE MOVED THE TREE WAS PULLED OUT AND THROWN RATHER HARD IT IS TRUE DOWN ON THE FLOOR BUT A MAN DREW HIM TOWARDS THE STAIRS WHERE THE DAYLIGHT SHONE +672-122797-0068-1597: BUT IT WAS NOT THE FIR TREE THAT THEY MEANT +672-122797-0069-1598: IT WAS IN A CORNER THAT HE LAY AMONG WEEDS AND NETTLES +672-122797-0070-1599: THE GOLDEN STAR OF TINSEL WAS STILL ON THE TOP OF THE TREE AND GLITTERED IN THE SUNSHINE +672-122797-0071-1600: IN THE (COURT YARD->COURTYARD) SOME OF THE (MERRY->MARRIED) CHILDREN WERE PLAYING WHO HAD DANCED AT CHRISTMAS ROUND THE FIR TREE AND WERE SO GLAD AT THE SIGHT OF HIM +672-122797-0072-1601: AND THE GARDENER'S BOY CHOPPED THE TREE INTO SMALL PIECES THERE WAS A WHOLE HEAP LYING THERE +672-122797-0073-1602: THE WOOD FLAMED UP SPLENDIDLY UNDER THE LARGE BREWING COPPER AND (IT SIGHED->ITS SIDE) SO DEEPLY +672-122797-0074-1603: HOWEVER THAT WAS OVER NOW THE TREE GONE THE STORY AT AN END +6829-68769-0000-1858: KENNETH AND BETH REFRAINED FROM TELLING THE OTHER GIRLS OR UNCLE JOHN OF OLD WILL ROGERS'S VISIT BUT THEY GOT MISTER WATSON IN THE LIBRARY AND QUESTIONED HIM CLOSELY ABOUT THE PENALTY FOR FORGING A (CHECK->CHEQUE) +6829-68769-0001-1859: IT WAS A SERIOUS CRIME INDEED MISTER WATSON TOLD THEM AND TOM GATES BADE FAIR TO SERVE A LENGTHY TERM IN (*->THE) STATE'S PRISON AS A CONSEQUENCE OF HIS RASH ACT +6829-68769-0002-1860: I CAN'T SEE IT IN THAT LIGHT SAID THE OLD LAWYER +6829-68769-0003-1861: IT WAS A DELIBERATE THEFT FROM HIS EMPLOYERS TO PROTECT A GIRL HE LOVED +6829-68769-0004-1862: BUT THEY COULD NOT HAVE PROVEN A (CASE->GASE) AGAINST LUCY IF SHE WAS INNOCENT AND ALL THEIR THREATS OF ARRESTING HER WERE PROBABLY (*->A) MERE BLUFF +6829-68769-0005-1863: HE WAS (*->A) SOFT HEARTED AND IMPETUOUS SAID BETH AND BEING IN LOVE HE DIDN'T STOP TO COUNT THE COST +6829-68769-0006-1864: IF THE PROSECUTION WERE WITHDRAWN AND THE CASE SETTLED WITH THE VICTIM OF THE FORGED CHECK THEN THE YOUNG MAN WOULD BE ALLOWED HIS FREEDOM +6829-68769-0007-1865: BUT UNDER THE CIRCUMSTANCES I DOUBT (IF->OF) SUCH AN ARRANGEMENT COULD BE MADE +6829-68769-0008-1866: (FAIRVIEW WAS->FAIR VIEW'S) TWELVE MILES AWAY BUT BY TEN O'CLOCK THEY DREW UP AT THE COUNTY (JAIL->DRALE) +6829-68769-0009-1867: THEY WERE RECEIVED IN THE LITTLE OFFICE BY A MAN NAMED MARKHAM WHO WAS THE JAILER +6829-68769-0010-1868: WE WISH TO TALK WITH HIM ANSWERED KENNETH TALK +6829-68769-0011-1869: I'M RUNNING FOR REPRESENTATIVE ON THE REPUBLICAN TICKET SAID KENNETH QUIETLY +6829-68769-0012-1870: (OH->I'LL) SAY THAT'S DIFFERENT OBSERVED MARKHAM ALTERING HIS (DEMEANOR->DEMEANOUR) +6829-68769-0013-1871: MAY WE (SEE->SEA) GATES AT ONCE ASKED KENNETH +6829-68769-0014-1872: THEY FOLLOWED THE JAILER ALONG (A->THE) SUCCESSION OF PASSAGES +6829-68769-0015-1873: SOMETIMES (I'M->ON) THAT (YEARNING->YEARNIN) FOR A SMOKE I'M NEARLY CRAZY (AN->AND) I (DUNNO->DON'T KNOW) WHICH IS (WORST->WORSE) DYIN ONE WAY OR (ANOTHER->THE OTHER) +6829-68769-0016-1874: HE UNLOCKED THE DOOR AND CALLED HERE'S VISITORS TOM +6829-68769-0017-1875: (WORSE->HORSE) TOM WORSE (N EVER->THAN ARROW) REPLIED THE JAILER GLOOMILY +6829-68769-0018-1876: (MISS DE->MISTER) GRAF SAID KENNETH NOTICING THE BOY'S FACE CRITICALLY AS HE STOOD WHERE THE LIGHT FROM THE PASSAGE FELL UPON IT +6829-68769-0019-1877: SORRY WE HAVEN'T ANY RECEPTION ROOM IN THE JAIL +6829-68769-0020-1878: SIT DOWN PLEASE SAID GATES IN A CHEERFUL AND PLEASANT VOICE THERE'S A (BENCH->PINCH) HERE +6829-68769-0021-1879: A FRESH WHOLESOME LOOKING BOY WAS TOM GATES WITH STEADY GRAY EYES AN INTELLIGENT FOREHEAD BUT A SENSITIVE RATHER WEAK MOUTH +6829-68769-0022-1880: WE HAVE HEARD SOMETHING OF YOUR STORY SAID KENNETH AND (ARE->OUR) INTERESTED IN IT +6829-68769-0023-1881: I DIDN'T STOP TO THINK WHETHER IT WAS FOOLISH OR NOT I DID IT AND I'M GLAD I DID (*->IT) +6829-68769-0024-1882: OLD WILL IS A FINE FELLOW BUT POOR AND HELPLESS SINCE MISSUS ROGERS HAD HER ACCIDENT +6829-68769-0025-1883: THEN ROGERS WOULDN'T DO ANYTHING BUT LEAD HER AROUND AND WAIT UPON HER AND THE PLACE WENT TO RACK AND RUIN +6829-68769-0026-1884: HE SPOKE SIMPLY BUT PACED UP AND DOWN THE NARROW CELL IN FRONT OF THEM +6829-68769-0027-1885: WHOSE NAME DID YOU SIGN TO THE CHECK ASKED KENNETH +6829-68769-0028-1886: HE IS SUPPOSED TO SIGN ALL THE CHECKS OF THE (CONCERN->CONCER) +6829-68769-0029-1887: IT'S A STOCK COMPANY (AND->IN) RICH +6829-68769-0030-1888: I WAS (BOOKKEEPER->BITKEEPER) SO IT WAS EASY TO GET A BLANK CHECK AND FORGE THE SIGNATURE +6829-68769-0031-1889: AS REGARDS MY ROBBING THE COMPANY I'LL SAY THAT I SAVED (THEM->HIM) A HEAVY LOSS ONE DAY +6829-68769-0032-1890: I DISCOVERED AND PUT OUT A FIRE THAT WOULD HAVE DESTROYED THE WHOLE PLANT BUT MARSHALL NEVER EVEN THANKED ME +6829-68769-0033-1891: IT WAS BETTER FOR HIM TO THINK THE GIRL UNFEELING THAN TO KNOW THE TRUTH +6829-68769-0034-1892: I'M GOING TO SEE MISTER (MARSHALL->MARSHAL) SAID KENNETH AND DISCOVER WHAT I CAN DO TO ASSIST YOU THANK YOU SIR +6829-68769-0035-1893: IT WON'T BE MUCH BUT I'M GRATEFUL TO FIND A FRIEND +6829-68769-0036-1894: THEY LEFT HIM THEN FOR THE JAILER ARRIVED TO UNLOCK THE DOOR AND ESCORT THEM TO THE OFFICE +6829-68769-0037-1895: I'VE SEEN LOTS OF THAT KIND IN MY DAY +6829-68769-0038-1896: AND IT RUINS A MAN'S DISPOSITION +6829-68769-0039-1897: HE LOOKED UP RATHER UNGRACIOUSLY BUT MOTIONED THEM TO BE SEATED +6829-68769-0040-1898: SOME GIRL HAS BEEN (*->IN) HERE TWICE TO INTERVIEW MY MEN AND I HAVE REFUSED TO ADMIT HER +6829-68769-0041-1899: I'M NOT ELECTIONEERING JUST NOW +6829-68769-0042-1900: OH WELL SIR WHAT ABOUT HIM +6829-68769-0043-1901: AND HE DESERVES A TERM (IN->AND) STATE'S PRISON +6829-68769-0044-1902: IT HAS COST ME TWICE SIXTY DOLLARS (IN->AN) ANNOYANCE +6829-68769-0045-1903: I'LL PAY ALL THE (COSTS->COST) BESIDES +6829-68769-0046-1904: YOU'RE FOOLISH WHY SHOULD YOU DO ALL THIS +6829-68769-0047-1905: I HAVE MY OWN REASONS MISTER (MARSHALL->MARSHAL) +6829-68769-0048-1906: GIVE ME A (CHECK->CHEQUE) FOR A HUNDRED AND FIFTY AND I'LL TURN OVER TO YOU THE FORGED CHECK AND QUASH FURTHER PROCEEDINGS +6829-68769-0049-1907: HE DETESTED THE GRASPING DISPOSITION THAT WOULD (ENDEAVOR->ENDEAVOUR) TO TAKE ADVANTAGE OF HIS EVIDENT DESIRE TO HELP YOUNG GATES +6829-68769-0050-1908: BETH UNEASY AT HIS SILENCE NUDGED HIM +6829-68769-0051-1909: THERE WAS A GRIM SMILE OF AMUSEMENT ON HIS SHREWD FACE +6829-68769-0052-1910: HE MIGHT HAVE HAD THAT FORGED CHECK FOR THE FACE OF IT IF HE'D BEEN SHARP +6829-68769-0053-1911: AND TO THINK WE CAN SAVE ALL THAT MISERY AND DESPAIR BY THE PAYMENT OF A HUNDRED AND FIFTY DOLLARS +6829-68771-0000-1912: SO TO THE SURPRISE OF THE DEMOCRATIC COMMITTEE AND ALL HIS FRIENDS MISTER HOPKINS ANNOUNCED THAT HE WOULD OPPOSE (FORBES'S->FORTS) AGGRESSIVE CAMPAIGN WITH AN EQUAL AGGRESSIVENESS AND SPEND AS MANY DOLLARS IN DOING SO AS MIGHT BE NECESSARY +6829-68771-0001-1913: ONE OF MISTER HOPKINS'S FIRST TASKS AFTER CALLING HIS FAITHFUL (HENCHMEN->HENCHMAN) AROUND HIM WAS TO MAKE A CAREFUL (CANVASS->CANVAS) OF THE VOTERS OF HIS DISTRICT TO SEE WHAT WAS STILL TO BE ACCOMPLISHED +6829-68771-0002-1914: THE WEAK (KNEED->NEED) CONTINGENCY MUST BE STRENGTHENED AND FORTIFIED AND A COUPLE OF HUNDRED VOTES IN ONE WAY OR (ANOTHER->THE OTHER) SECURED FROM THE OPPOSITION +6829-68771-0003-1915: THE DEMOCRATIC COMMITTEE FIGURED OUT A WAY TO DO THIS +6829-68771-0004-1916: UNDER ORDINARY CONDITIONS REYNOLDS WAS SURE TO BE ELECTED BUT THE COMMITTEE PROPOSED TO SACRIFICE HIM IN ORDER TO (ELECT->ELEC) HOPKINS +6829-68771-0005-1917: THE ONLY THING NECESSARY WAS TO FIX SETH REYNOLDS AND THIS HOPKINS ARRANGED PERSONALLY +6829-68771-0006-1918: AND THIS WAS WHY KENNETH AND BETH DISCOVERED HIM CONVERSING WITH THE YOUNG WOMAN IN THE BUGGY +6829-68771-0007-1919: THE DESCRIPTION SHE GAVE OF THE COMING RECEPTION TO THE (WOMAN'S->WOMEN'S) POLITICAL LEAGUE WAS SO HUMOROUS AND DIVERTING THAT THEY WERE BOTH LAUGHING HEARTILY OVER THE THING WHEN THE YOUNG PEOPLE PASSED THEM AND THUS MISTER HOPKINS FAILED TO NOTICE WHO THE OCCUPANTS OF THE OTHER VEHICLE WERE +6829-68771-0008-1920: THESE WOMEN WERE FLATTERED BY THE ATTENTION OF THE YOUNG LADY AND HAD PROMISED TO ASSIST IN ELECTING MISTER FORBES +6829-68771-0009-1921: LOUISE HOPED FOR EXCELLENT RESULTS FROM THIS ORGANIZATION AND WISHED THE ENTERTAINMENT TO BE SO EFFECTIVE IN WINNING THEIR GOOD WILL THAT THEY WOULD WORK EARNESTLY FOR THE CAUSE IN WHICH THEY WERE ENLISTED +6829-68771-0010-1922: THE (FAIRVIEW->FAIR VIEW) BAND WAS ENGAGED TO DISCOURSE AS MUCH HARMONY AS IT COULD PRODUCE AND THE RESOURCES OF THE GREAT HOUSE WERE TAXED TO ENTERTAIN THE GUESTS +6829-68771-0011-1923: TABLES WERE SPREAD ON THE LAWN AND A DAINTY BUT SUBSTANTIAL REPAST WAS TO BE SERVED +6829-68771-0012-1924: THIS WAS THE FIRST OCCASION WITHIN A GENERATION WHEN SUCH AN ENTERTAINMENT HAD BEEN GIVEN AT ELMHURST AND THE ONLY ONE WITHIN THE MEMORY OF MAN (WHERE->WERE) THE NEIGHBORS AND COUNTRY PEOPLE HAD BEEN (*->THE) INVITED (GUESTS->GUEST) +6829-68771-0013-1925: THE (ATTENDANCE->ATTENDANTS) WAS UNEXPECTEDLY LARGE AND THE GIRLS WERE DELIGHTED FORESEEING GREAT SUCCESS FOR THEIR (FETE->FIGHT) +6829-68771-0014-1926: WE OUGHT TO HAVE MORE (ATTENDANTS->ATTENDANCE) BETH SAID LOUISE APPROACHING HER COUSIN +6829-68771-0015-1927: WON'T YOU RUN INTO THE HOUSE AND SEE IF MARTHA CAN'T SPARE ONE OR TWO MORE MAIDS +6829-68771-0016-1928: SHE WAS VERY FOND OF THE YOUNG LADIES WHOM SHE HAD KNOWN WHEN AUNT JANE WAS (THE->THEIR) MISTRESS HERE AND BETH WAS HER (ESPECIAL FAVORITE->SPECIAL FAVOURITE) +6829-68771-0017-1929: THE HOUSEKEEPER LED THE WAY (AND->IN) BETH FOLLOWED +6829-68771-0018-1930: FOR A MOMENT BETH STOOD STARING WHILE THE NEW MAID REGARDED HER WITH COMPOSURE AND (A->OF) SLIGHT SMILE UPON HER BEAUTIFUL FACE +6829-68771-0019-1931: SHE WAS DRESSED IN THE REGULATION COSTUME OF THE MAIDS AT ELMHURST A PLAIN BLACK GOWN WITH (*->A) WHITE APRON AND CAP +6829-68771-0020-1932: THEN SHE GAVE A LITTLE LAUGH AND REPLIED NO MISS BETH I'M ELIZABETH PARSONS +6829-68771-0021-1933: BUT IT CAN'T BE PROTESTED THE GIRL +6829-68771-0022-1934: I ATTEND TO THE HOUSEHOLD MENDING YOU KNOW AND CARE FOR THE LINEN +6829-68771-0023-1935: YOU SPEAK LIKE AN EDUCATED PERSON SAID BETH WONDERINGLY WHERE IS YOUR HOME +6829-68771-0024-1936: FOR THE FIRST TIME THE MAID SEEMED A LITTLE CONFUSED AND HER GAZE WANDERED FROM THE FACE OF HER VISITOR +6829-68771-0025-1937: SHE SAT DOWN IN A ROCKING CHAIR AND CLASPING HER HANDS IN HER LAP (ROCKED->ROCK) SLOWLY BACK AND FORTH I'M SORRY SAID BETH +6829-68771-0026-1938: ELIZA (PARSONS->PARSON) SHOOK HER HEAD +6829-68771-0027-1939: THEY THEY EXCITE ME IN SOME WAY AND I I CAN'T BEAR THEM YOU MUST EXCUSE ME +6829-68771-0028-1940: SHE EVEN SEEMED MILDLY AMUSED AT THE ATTENTION SHE ATTRACTED +6829-68771-0029-1941: BETH WAS A BEAUTIFUL GIRL THE HANDSOMEST OF THE THREE COUSINS BY FAR YET ELIZA SURPASSED HER (IN->A) NATURAL CHARM AND SEEMED WELL AWARE OF THE FACT +6829-68771-0030-1942: HER MANNER WAS NEITHER INDEPENDENT NOR ASSERTIVE BUT RATHER ONE OF WELL BRED COMPOSURE AND CALM RELIANCE +6829-68771-0031-1943: HER EYES WANDERED TO THE MAID'S HANDS +6829-68771-0032-1944: HOWEVER HER FEATURES AND FORM MIGHT REPRESS ANY EVIDENCE OF NERVOUSNESS THESE HANDS TOLD A DIFFERENT STORY +6829-68771-0033-1945: SHE ROSE QUICKLY TO HER FEET WITH AN IMPETUOUS GESTURE THAT MADE HER VISITOR CATCH HER BREATH +6829-68771-0034-1946: I WISH I KNEW MYSELF SHE CRIED FIERCELY +6829-68771-0035-1947: WILL YOU LEAVE ME ALONE IN MY OWN ROOM OR MUST I GO AWAY TO ESCAPE YOU +6829-68771-0036-1948: ELIZA CLOSED THE DOOR BEHIND HER WITH A DECIDED SLAM AND A KEY CLICKED IN THE LOCK +6930-75918-0000-0: CONCORD RETURNED TO ITS PLACE AMIDST THE TENTS +6930-75918-0001-1: THE ENGLISH (FORWARDED->FOOTED) TO THE FRENCH BASKETS OF FLOWERS OF WHICH THEY HAD MADE A PLENTIFUL PROVISION TO GREET THE ARRIVAL OF THE YOUNG PRINCESS THE FRENCH IN RETURN INVITED THE ENGLISH TO A SUPPER WHICH WAS TO BE GIVEN THE NEXT DAY +6930-75918-0002-2: CONGRATULATIONS WERE POURED IN UPON THE PRINCESS EVERYWHERE DURING HER JOURNEY +6930-75918-0003-3: FROM THE RESPECT PAID HER ON ALL SIDES SHE SEEMED LIKE A QUEEN AND FROM THE ADORATION WITH WHICH SHE WAS TREATED BY TWO OR THREE SHE APPEARED AN OBJECT OF WORSHIP THE QUEEN MOTHER GAVE THE FRENCH THE MOST AFFECTIONATE RECEPTION FRANCE WAS HER NATIVE COUNTRY AND SHE HAD SUFFERED TOO MUCH UNHAPPINESS IN ENGLAND FOR ENGLAND TO HAVE MADE HER FORGET FRANCE +6930-75918-0004-4: SHE TAUGHT HER DAUGHTER THEN BY HER OWN AFFECTION FOR IT THAT LOVE FOR A COUNTRY WHERE THEY HAD BOTH BEEN HOSPITABLY RECEIVED AND (WHERE->WERE) A BRILLIANT FUTURE OPENED (BEFORE->FOR) THEM +6930-75918-0005-5: THE COUNT HAD THROWN HIMSELF BACK ON HIS SEAT LEANING HIS SHOULDERS AGAINST THE PARTITION OF THE TENT AND REMAINED THUS HIS FACE BURIED IN HIS HANDS WITH HEAVING CHEST AND RESTLESS LIMBS +6930-75918-0006-6: THIS HAS INDEED BEEN (A->AN) HARASSING DAY CONTINUED THE YOUNG MAN HIS EYES FIXED UPON HIS FRIEND +6930-75918-0007-7: YOU WILL BE FRANK WITH ME I ALWAYS AM +6930-75918-0008-8: CAN YOU IMAGINE WHY BUCKINGHAM HAS BEEN SO VIOLENT I SUSPECT +6930-75918-0009-9: IT IS YOU WHO ARE MISTAKEN RAOUL I HAVE READ HIS DISTRESS IN HIS EYES IN HIS EVERY GESTURE AND ACTION THE WHOLE DAY +6930-75918-0010-10: I CAN PERCEIVE LOVE CLEARLY ENOUGH +6930-75918-0011-11: I AM CONVINCED OF WHAT I SAY SAID THE COUNT +6930-75918-0012-12: IT IS ANNOYANCE THEN +6930-75918-0013-13: IN THOSE VERY TERMS I EVEN ADDED MORE +6930-75918-0014-14: BUT CONTINUED RAOUL NOT INTERRUPTED BY THIS MOVEMENT OF HIS FRIEND HEAVEN BE PRAISED THE FRENCH WHO ARE PRONOUNCED TO BE THOUGHTLESS AND INDISCREET RECKLESS EVEN ARE CAPABLE OF BRINGING A CALM AND SOUND JUDGMENT TO BEAR ON MATTERS OF SUCH HIGH IMPORTANCE +6930-75918-0015-15: THUS IT IS THAT THE HONOR OF THREE IS SAVED OUR (COUNTRY'S->COUNTRY) OUR (MASTER'S->MASTERS) AND OUR OWN +6930-75918-0016-16: YES I NEED REPOSE MANY THINGS HAVE AGITATED ME TO DAY BOTH IN MIND AND BODY WHEN YOU RETURN TO MORROW I SHALL NO LONGER BE THE SAME MAN +6930-75918-0017-17: (BUT->BY) IN THIS FRIENDLY PRESSURE RAOUL COULD DETECT THE NERVOUS AGITATION OF A GREAT INTERNAL CONFLICT +6930-75918-0018-18: THE NIGHT WAS CLEAR STARLIT AND SPLENDID THE TEMPEST HAD PASSED AWAY AND THE SWEET INFLUENCES OF THE EVENING HAD RESTORED LIFE PEACE AND SECURITY EVERYWHERE +6930-75918-0019-19: UPON THE LARGE SQUARE IN FRONT OF THE HOTEL THE SHADOWS OF THE TENTS INTERSECTED BY THE GOLDEN MOONBEAMS FORMED AS IT WERE A HUGE MOSAIC OF JET AND YELLOW FLAGSTONES +6930-75918-0020-20: (BRAGELONNE->BRIGALON) WATCHED FOR SOME TIME THE CONDUCT OF THE TWO LOVERS LISTENED TO THE LOUD AND UNCIVIL SLUMBERS OF MANICAMP WHO SNORED AS IMPERIOUSLY AS THOUGH HE WAS WEARING HIS BLUE AND GOLD INSTEAD OF HIS VIOLET SUIT +6930-76324-0000-21: GOLIATH MAKES ANOTHER DISCOVERY +6930-76324-0001-22: (THEY->THERE) WERE CERTAINLY NO (NEARER->NEAR) THE SOLUTION OF THEIR PROBLEM +6930-76324-0002-23: THE POOR LITTLE THINGS CRIED CYNTHIA THINK OF THEM HAVING BEEN TURNED TO THE WALL ALL THESE YEARS +6930-76324-0003-24: NOW WHAT (WAS->IS) THE SENSE OF IT (TWO->TOO) INNOCENT BABIES LIKE THAT +6930-76324-0004-25: BUT JOYCE HAD NOT BEEN LISTENING ALL AT ONCE SHE PUT DOWN HER CANDLE ON THE TABLE AND FACED HER COMPANION +6930-76324-0005-26: THE TWIN BROTHER DID SOMETHING SHE DIDN'T LIKE AND SHE TURNED HIS PICTURE TO THE WALL +6930-76324-0006-27: HERS HAPPENED TO BE (IN->ON) THE SAME FRAME TOO BUT SHE EVIDENTLY DIDN'T CARE ABOUT (THAT->IT) +6930-76324-0007-28: NOW WHAT HAVE YOU TO SAY CYNTHIA (SPRAGUE->SP) +6930-76324-0008-29: I THOUGHT WE WERE STUMPED AGAIN WHEN I FIRST SAW THAT PICTURE BUT IT'S BEEN OF SOME USE AFTER ALL +6930-76324-0009-30: DO YOU SUPPOSE THE MINIATURE WAS A COPY OF THE SAME THING +6930-76324-0010-31: (WHAT->WHEN) IN THE WORLD IS (THAT->IT) QUERIED JOYCE +6930-76324-0011-32: (THEY->MAY) WORRY ME TERRIBLY AND BESIDES I'D LIKE TO SEE WHAT THIS LOVELY FURNITURE LOOKS LIKE WITHOUT SUCH QUANTITIES OF DUST ALL OVER IT GOOD SCHEME (CYN->SIN) +6930-76324-0012-33: (WE'LL->WILL) COME IN HERE THIS AFTERNOON WITH OLD CLOTHES ON AND (HAVE->HALF) A REGULAR HOUSE CLEANING +6930-76324-0013-34: IT CAN'T HURT ANYTHING I'M SURE FOR WE WON'T DISTURB THINGS AT ALL +6930-76324-0014-35: THIS THOUGHT HOWEVER DID NOT ENTER THE HEADS OF THE ENTHUSIASTIC PAIR +6930-76324-0015-36: SMUGGLING THE HOUSE CLEANING PARAPHERNALIA INTO THE CELLAR WINDOW UNOBSERVED THAT AFTERNOON PROVED NO EASY TASK FOR CYNTHIA HAD ADDED A (WHISK->WHISKED) BROOM AND DUST PAN TO THE OUTFIT +6930-76324-0016-37: THE LURE PROVED TOO MUCH FOR HIM AND HE CAME SPORTING AFTER IT AS (FRISKILY->FRISKLY) AS A YOUNG KITTEN MUCH TO CYNTHIA'S DELIGHT WHEN SHE CAUGHT SIGHT OF HIM +6930-76324-0017-38: OH LET HIM COME ALONG SHE URGED I DO LOVE TO SEE HIM ABOUT THAT OLD HOUSE +6930-76324-0018-39: HE MAKES IT SORT OF COZIER +6930-76324-0019-40: NOW LET'S DUST THE FURNITURE AND PICTURES +6930-76324-0020-41: YET LITTLE AS IT WAS IT HAD ALREADY MADE A VAST DIFFERENCE IN THE ASPECT OF THE ROOM +6930-76324-0021-42: SURFACE DUST AT LEAST HAD BEEN REMOVED AND THE FINE OLD FURNITURE GAVE A HINT OF ITS REAL ELEGANCE AND POLISH +6930-76324-0022-43: THEN SHE SUDDENLY REMARKED +6930-76324-0023-44: AND MY POCKET MONEY IS GETTING LOW AGAIN AND YOU HAVEN'T ANY LEFT AS USUAL +6930-76324-0024-45: THEY SAY ILLUMINATION BY (CANDLE LIGHT->CANDLELIGHT) IS THE PRETTIEST IN THE WORLD +6930-76324-0025-46: WHY IT'S GOLIATH AS USUAL THEY BOTH CRIED PEERING IN +6930-76324-0026-47: ISN'T HE THE GREATEST FOR GETTING INTO ODD CORNERS +6930-76324-0027-48: FORGETTING ALL THEIR WEARINESS THEY SEIZED THEIR CANDLES AND SCURRIED THROUGH THE HOUSE FINDING (AN->ON) OCCASIONAL PAPER TUCKED AWAY IN SOME ODD CORNER +6930-76324-0028-49: WELL I'M CONVINCED THAT THE BOARDED UP HOUSE MYSTERY HAPPENED NOT EARLIER THAN APRIL SIXTEENTH EIGHTEEN SIXTY ONE AND PROBABLY NOT MUCH LATER +6930-81414-0000-50: NO WORDS WERE SPOKEN NO LANGUAGE WAS UTTERED SAVE THAT OF WAILING AND HISSING AND THAT SOMEHOW WAS INDISTINCT AS IF IT EXISTED IN FANCY AND NOT IN REALITY +6930-81414-0001-51: I HEARD A NOISE BEHIND I TURNED AND SAW (KAFFAR->KAFFIR) HIS BLACK EYES SHINING WHILE IN HIS HAND HE HELD A GLEAMING KNIFE HE LIFTED IT ABOVE HIS HEAD AS IF TO STRIKE BUT I HAD THE STRENGTH OF TEN MEN AND I HURLED HIM FROM ME +6930-81414-0002-52: ONWARD SAID A DISTANT VOICE +6930-81414-0003-53: NO SOUND BROKE THE STILLNESS OF THE NIGHT +6930-81414-0004-54: THE STORY OF ITS EVIL INFLUENCE CAME BACK TO ME AND IN MY BEWILDERED CONDITION I WONDERED WHETHER THERE WAS NOT SOME TRUTH IN WHAT HAD BEEN SAID +6930-81414-0005-55: WHAT WAS THAT +6930-81414-0006-56: WHAT THEN A HUMAN HAND LARGE AND SHAPELY APPEARED DISTINCTLY ON THE SURFACE OF THE POND +6930-81414-0007-57: NOTHING MORE NOT EVEN THE WRIST TO WHICH IT MIGHT BE ATTACHED +6930-81414-0008-58: IT DID NOT BECKON OR INDEED MOVE AT ALL IT WAS AS STILL AS THE HAND OF DEATH +6930-81414-0009-59: I AWOKE TO CONSCIOUSNESS FIGHTING AT FIRST IT SEEMED AS IF I WAS FIGHTING WITH (A->THE) PHANTOM BUT GRADUALLY MY OPPONENT BECAME MORE REAL TO ME IT WAS (KAFFAR->KAFFIR) +6930-81414-0010-60: A SOUND OF VOICES A FLASH OF LIGHT +6930-81414-0011-61: A FEELING OF FREEDOM AND I WAS AWAKE WHERE +6930-81414-0012-62: SAID ANOTHER VOICE WHICH I RECOGNIZED AS VOLTAIRE'S (KAFFAR->KAFFIR) +6930-81414-0013-63: I HAD SCARCELY KNOWN WHAT I HAD BEEN SAYING OR DOING UP TO THIS TIME BUT AS HE SPOKE I LOOKED AT MY HAND +6930-81414-0014-64: IN THE LIGHT OF THE MOON I SAW A KNIFE RED WITH BLOOD AND MY HAND TOO WAS ALSO (DISCOLOURED->DISCOLORED) +6930-81414-0015-65: I DO NOT KNOW I AM DAZED BEWILDERED +6930-81414-0016-66: BUT THAT IS (KAFFAR'S KNIFE->KAFFIR'S KNIF) +6930-81414-0017-67: I KNOW HE HAD IT THIS VERY (EVENING->EVEN) +6930-81414-0018-68: I (REMEMBER->REMEMBERED) SAYING HAVE WE BEEN TOGETHER +6930-81414-0019-69: VOLTAIRE PICKED UP SOMETHING FROM THE GROUND AND LOOKED AT IT +6930-81414-0020-70: I SAY YOU DO KNOW WHAT THIS MEANS AND YOU MUST TELL US +6930-81414-0021-71: A TERRIBLE THOUGHT FLASHED INTO MY MIND +6930-81414-0022-72: I HAD AGAIN BEEN ACTING UNDER THE INFLUENCE OF THIS MAN'S POWER +6930-81414-0023-73: PERCHANCE TOO (KAFFAR'S->KAFFIRS) DEATH MIGHT SERVE HIM IN GOOD STEAD +6930-81414-0024-74: MY TONGUE REFUSED TO ARTICULATE MY POWER OF SPEECH LEFT ME +6930-81414-0025-75: MY POSITION WAS TOO TERRIBLE +6930-81414-0026-76: MY OVERWROUGHT NERVES YIELDED AT LAST +6930-81414-0027-77: FOR SOME TIME AFTER THAT I REMEMBERED NOTHING DISTINCTLY +7021-79730-0000-1399: THE THREE MODES OF MANAGEMENT +7021-79730-0001-1400: TO SUPPOSE THAT THE OBJECT OF THIS WORK IS TO AID IN EFFECTING SUCH A SUBSTITUTION AS THAT IS ENTIRELY TO MISTAKE ITS NATURE AND DESIGN +7021-79730-0002-1401: BY REASON AND AFFECTION +7021-79730-0003-1402: AS THE (CHAISE->CHASE) DRIVES AWAY MARY STANDS BEWILDERED AND PERPLEXED ON THE (DOOR STEP->DOORSTEP) HER MIND IN A TUMULT OF EXCITEMENT IN WHICH HATRED OF THE DOCTOR DISTRUST AND SUSPICION OF HER MOTHER DISAPPOINTMENT VEXATION AND ILL HUMOR SURGE AND SWELL AMONG THOSE (DELICATE->DELEGATE) ORGANIZATIONS ON WHICH THE STRUCTURE AND DEVELOPMENT OF THE SOUL SO CLOSELY DEPEND DOING PERHAPS AN IRREPARABLE INJURY +7021-79730-0004-1403: THE MOTHER AS SOON AS THE (CHAISE->CHASE) IS SO FAR TURNED THAT MARY CAN NO LONGER WATCH THE EXPRESSION OF HER COUNTENANCE GOES AWAY FROM THE DOOR WITH A SMILE OF COMPLACENCY AND SATISFACTION (UPON->ON) HER FACE AT THE INGENUITY AND SUCCESS OF HER LITTLE ARTIFICE +7021-79730-0005-1404: SO YOU WILL BE A GOOD GIRL I KNOW AND NOT MAKE ANY TROUBLE BUT WILL STAY AT HOME CONTENTEDLY WON'T YOU +7021-79730-0006-1405: THE MOTHER IN MANAGING THE CASE IN THIS WAY (RELIES->REALIZE) PARTLY ON CONVINCING THE REASON OF THE CHILD AND PARTLY ON AN APPEAL TO HER AFFECTION +7021-79730-0007-1406: IF YOU SHOULD NOT BE A GOOD GIRL BUT SHOULD SHOW SIGNS OF MAKING US ANY TROUBLE I SHALL HAVE TO SEND YOU OUT SOMEWHERE TO THE BACK PART OF THE HOUSE UNTIL WE ARE GONE +7021-79730-0008-1407: BUT THIS LAST (SUPPOSITION->OPPOSITION) IS ALMOST ALWAYS UNNECESSARY FOR IF MARY HAS BEEN HABITUALLY MANAGED ON THIS PRINCIPLE SHE WILL NOT MAKE ANY TROUBLE +7021-79730-0009-1408: IT IS INDEED TRUE THAT THE IMPORTANCE OF TACT AND SKILL IN THE TRAINING OF THE YOUNG AND OF CULTIVATING THEIR REASON AND SECURING THEIR AFFECTION (CAN NOT->CANNOT) BE OVERRATED +7021-79740-0000-1384: TO SUCH PERSONS THESE INDIRECT MODES OF TRAINING CHILDREN IN HABITS OF SUBORDINATION TO THEIR WILL OR RATHER OF YIELDING TO THEIR INFLUENCE ARE SPECIALLY USEFUL +7021-79740-0001-1385: DELLA HAD A YOUNG SISTER NAMED MARIA AND A COUSIN WHOSE NAME WAS JANE +7021-79740-0002-1386: NOW (DELIA->GALLIA) CONTRIVED TO OBTAIN A GREAT INFLUENCE AND (ASCENDENCY->A SCENE) OVER THE MINDS OF THE CHILDREN BY MEANS OF THESE DOLLS +7021-79740-0003-1387: TO GIVE AN IDEA OF THESE CONVERSATIONS I WILL REPORT ONE OF THEM IN FULL +7021-79740-0004-1388: YOU HAVE COME (ANDELLA ANDELLA->AMDELLA AND DELLA) WAS THE NAME OF JANE'S DOLL TO MAKE ROSALIE A VISIT +7021-79740-0005-1389: I AM VERY GLAD +7021-79740-0006-1390: I EXPECT YOU HAVE BEEN A VERY GOOD GIRL (ANDELLA->ANNE DELA) SINCE YOU WERE HERE LAST +7021-79740-0007-1391: THEN TURNING TO JANE SHE ASKED IN A SOMEWHAT ALTERED TONE HAS SHE BEEN A GOOD GIRL JANE +7021-79740-0008-1392: FOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLAYING UPON THE PIAZZA WITH BLOCKS AND OTHER PLAYTHINGS AND FINALLY HAD GONE INTO THE HOUSE LEAVING ALL THE THINGS ON THE FLOOR OF THE PIAZZA INSTEAD OF PUTTING THEM AWAY IN THEIR PLACES AS THEY OUGHT TO HAVE DONE +7021-79740-0009-1393: THEY WERE NOW PLAYING WITH THEIR DOLLS IN THE (PARLOR->PARLOUR) +7021-79740-0010-1394: (DELIA->DAHLIA) CAME TO THE (PARLOR->PARLOUR) AND WITH AN AIR OF GREAT MYSTERY BECKONED THE CHILDREN ASIDE AND SAID TO THEM IN A WHISPER LEAVE (ANDELLA->ANDDELA) AND ROSALIE HERE AND DON'T SAY A WORD TO THEM +7021-79740-0011-1395: SO SAYING SHE LED THE WAY ON TIPTOE FOLLOWED BY THE CHILDREN OUT OF THE ROOM AND ROUND BY A CIRCUITOUS ROUTE TO THE PIAZZA THERE +7021-79740-0012-1396: SAID SHE POINTING TO THE PLAYTHINGS SEE +7021-79740-0013-1397: PUT THESE PLAYTHINGS ALL AWAY QUICK AND CAREFULLY AND WE WILL NOT LET THEM KNOW (ANY THING->ANYTHING) ABOUT YOUR LEAVING THEM OUT +7021-79740-0014-1398: AND THIS METHOD OF TREATING THE CASE WAS MUCH MORE EFFECTUAL IN MAKING THEM DISPOSED TO AVOID COMMITTING A SIMILAR FAULT ANOTHER TIME THAN ANY DIRECT REBUKES OR EXPRESSIONS OF DISPLEASURE ADDRESSED PERSONALLY TO THEM WOULD HAVE BEEN +7021-79759-0000-1378: NATURE OF THE EFFECT PRODUCED BY EARLY IMPRESSIONS +7021-79759-0001-1379: THAT IS COMPARATIVELY NOTHING +7021-79759-0002-1380: THEY ARE CHIEFLY FORMED FROM COMBINATIONS OF THE IMPRESSIONS MADE IN CHILDHOOD +7021-79759-0003-1381: VAST IMPORTANCE AND INFLUENCE OF THIS MENTAL FURNISHING +7021-79759-0004-1382: WITHOUT GOING TO ANY SUCH EXTREME AS THIS WE CAN EASILY SEE ON REFLECTION HOW VAST (AN->AND) INFLUENCE ON THE IDEAS AND CONCEPTIONS AS WELL AS ON THE PRINCIPLES OF ACTION (IN->AND) MATURE YEARS MUST BE EXERTED BY THE NATURE AND CHARACTER OF THE IMAGES WHICH THE PERIOD OF INFANCY AND CHILDHOOD (IMPRESSES->IMPRESS) UPON THE MIND +7021-79759-0005-1383: THE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY VIOLENCE TO WHICH A FATHER SUBJECTS HIS SON MAY SOON PASS AWAY BUT THE MEMORY OF IT DOES NOT PASS AWAY WITH THE PAIN +7021-85628-0000-1409: BUT (ANDERS->ANDREWS) CARED NOTHING ABOUT THAT +7021-85628-0001-1410: HE MADE A BOW SO DEEP THAT HIS BACK CAME NEAR BREAKING AND HE WAS DUMBFOUNDED I CAN TELL YOU WHEN HE SAW IT WAS NOBODY BUT ANDERS +7021-85628-0002-1411: HE WAS SUCH A BIG BOY THAT HE WORE HIGH BOOTS AND CARRIED A JACK KNIFE +7021-85628-0003-1412: NOW THIS KNIFE WAS A SPLENDID ONE THOUGH HALF THE BLADE WAS GONE AND THE HANDLE WAS A LITTLE CRACKED AND ANDERS KNEW THAT ONE IS ALMOST A MAN AS SOON AS ONE HAS A (JACK KNIFE->JACKKNIFE) +7021-85628-0004-1413: YES WHY NOT THOUGHT (ANDERS->ANDREWS) +7021-85628-0005-1414: SEEING THAT I AM SO FINE I MAY AS WELL GO AND VISIT THE KING +7021-85628-0006-1415: I AM GOING TO THE COURT BALL ANSWERED (ANDERS->ANDRES) +7021-85628-0007-1416: AND SHE TOOK (ANDERS->ANDREW'S) HAND AND WALKED WITH HIM UP THE BROAD MARBLE STAIRS WHERE SOLDIERS WERE POSTED AT EVERY THIRD STEP AND THROUGH THE MAGNIFICENT HALLS WHERE COURTIERS IN SILK AND VELVET STOOD BOWING WHEREVER HE WENT +7021-85628-0008-1417: FOR LIKE AS NOT THEY MUST HAVE THOUGHT HIM A PRINCE WHEN THEY SAW HIS FINE CAP +7021-85628-0009-1418: AT THE FARTHER END OF THE LARGEST HALL A TABLE WAS SET WITH GOLDEN CUPS AND GOLDEN PLATES IN LONG ROWS +7021-85628-0010-1419: ON HUGE SILVER PLATTERS WERE PYRAMIDS OF TARTS AND CAKES AND RED WINE SPARKLED IN GLITTERING DECANTERS +7021-85628-0011-1420: THE PRINCESS SAT DOWN UNDER A BLUE CANOPY WITH BOUQUETS OF ROSES AND SHE LET (ANDERS->ANDRE) SIT IN A GOLDEN CHAIR BY HER SIDE +7021-85628-0012-1421: BUT YOU MUST NOT EAT WITH YOUR CAP ON YOUR HEAD SHE SAID AND WAS GOING TO TAKE IT OFF +7021-85628-0013-1422: THE PRINCESS CERTAINLY WAS BEAUTIFUL AND HE WOULD HAVE DEARLY LIKED TO BE KISSED BY HER BUT THE CAP WHICH HIS MOTHER HAD MADE HE WOULD NOT GIVE UP ON ANY CONDITION +7021-85628-0014-1423: HE ONLY SHOOK HIS HEAD +7021-85628-0015-1424: WELL BUT NOW SAID THE PRINCESS AND SHE FILLED HIS POCKETS WITH CAKES AND PUT HER OWN HEAVY GOLD CHAIN (AROUND->ROUND) HIS NECK AND BENT DOWN AND KISSED HIM +7021-85628-0016-1425: THAT IS A VERY FINE CAP YOU HAVE HE SAID +7021-85628-0017-1426: SO IT IS SAID (ANDERS->ANDREWS) +7021-85628-0018-1427: AND IT IS MADE OF MOTHER'S BEST YARN AND SHE KNITTED IT HERSELF AND EVERYBODY WANTS TO GET IT AWAY FROM ME +7021-85628-0019-1428: WITH ONE JUMP ANDERS GOT OUT OF HIS CHAIR +7021-85628-0020-1429: HE DARTED LIKE AN ARROW THROUGH ALL THE HALLS DOWN ALL THE STAIRS AND ACROSS THE YARD +7021-85628-0021-1430: HE STILL HELD ON TO IT WITH BOTH HANDS AS HE RUSHED INTO HIS MOTHER'S COTTAGE +7021-85628-0022-1431: AND ALL HIS BROTHERS AND SISTERS STOOD ROUND AND LISTENED WITH THEIR MOUTHS OPEN +7021-85628-0023-1432: BUT WHEN HIS BIG BROTHER HEARD THAT HE HAD REFUSED TO GIVE HIS CAP FOR A KING'S GOLDEN CROWN HE SAID THAT ANDERS WAS A STUPID +7021-85628-0024-1433: (ANDERS->ANDREW'S) FACE GREW RED +7021-85628-0025-1434: BUT HIS MOTHER HUGGED HIM CLOSE +7021-85628-0026-1435: NO MY LITTLE (SON->FUN) SHE SAID +7021-85628-0027-1436: IF YOU DRESSED IN SILK AND GOLD FROM TOP TO TOE YOU COULD NOT LOOK ANY NICER THAN IN YOUR LITTLE RED CAP +7127-75946-0000-467: AT THE CONCLUSION OF THE BANQUET WHICH WAS SERVED AT FIVE O'CLOCK THE KING ENTERED HIS CABINET WHERE HIS TAILORS WERE AWAITING HIM FOR THE PURPOSE OF TRYING ON THE CELEBRATED COSTUME REPRESENTING SPRING WHICH WAS THE RESULT OF SO MUCH IMAGINATION AND HAD COST SO MANY EFFORTS OF THOUGHT TO THE DESIGNERS AND ORNAMENT WORKERS OF THE COURT +7127-75946-0001-468: AH VERY WELL +7127-75946-0002-469: LET HIM COME IN THEN SAID THE KING AND AS IF COLBERT HAD BEEN LISTENING AT THE DOOR FOR THE PURPOSE OF KEEPING HIMSELF (AU COURANT->OKARRANT) WITH THE CONVERSATION HE ENTERED AS SOON AS THE KING HAD PRONOUNCED HIS NAME TO THE TWO COURTIERS +7127-75946-0003-470: GENTLEMEN TO YOUR POSTS WHEREUPON SAINT (AIGNAN->DAN) AND (VILLEROY->VILLEROI) TOOK THEIR LEAVE +7127-75946-0004-471: CERTAINLY SIRE BUT I MUST HAVE MONEY TO DO THAT WHAT +7127-75946-0005-472: WHAT DO YOU MEAN INQUIRED (LOUIS->LOUISE) +7127-75946-0006-473: HE HAS GIVEN THEM WITH TOO MUCH GRACE NOT TO HAVE OTHERS STILL TO GIVE IF THEY ARE REQUIRED WHICH IS THE CASE AT THE PRESENT MOMENT +7127-75946-0007-474: IT IS NECESSARY THEREFORE THAT HE SHOULD COMPLY THE KING FROWNED +7127-75946-0008-475: DOES YOUR MAJESTY THEN NO LONGER BELIEVE THE DISLOYAL ATTEMPT +7127-75946-0009-476: NOT AT ALL YOU ARE ON THE CONTRARY MOST AGREEABLE TO ME +7127-75946-0010-477: YOUR MAJESTY'S PLAN THEN IN THIS AFFAIR IS +7127-75946-0011-478: YOU WILL TAKE THEM FROM MY PRIVATE TREASURE +7127-75946-0012-479: THE NEWS CIRCULATED WITH THE RAPIDITY OF LIGHTNING DURING ITS PROGRESS IT KINDLED EVERY VARIETY OF COQUETRY DESIRE AND WILD AMBITION +7127-75946-0013-480: THE KING HAD COMPLETED HIS (TOILETTE->TOILET) BY NINE O'CLOCK HE APPEARED IN AN OPEN CARRIAGE DECORATED WITH BRANCHES OF TREES AND FLOWERS +7127-75946-0014-481: THE QUEENS HAD TAKEN THEIR SEATS UPON A MAGNIFICENT (DIAS->DAIS) OR PLATFORM ERECTED UPON THE BORDERS OF THE LAKE IN A (THEATER->THEATRE) OF WONDERFUL ELEGANCE OF CONSTRUCTION +7127-75946-0015-482: SUDDENLY FOR THE PURPOSE OF RESTORING PEACE AND ORDER (SPRING->SPRANG) ACCOMPANIED BY HIS WHOLE COURT MADE HIS APPEARANCE +7127-75946-0016-483: THE SEASONS ALLIES OF SPRING FOLLOWED HIM CLOSELY TO FORM A QUADRILLE WHICH AFTER MANY WORDS OF MORE OR LESS FLATTERING IMPORT WAS THE COMMENCEMENT OF THE DANCE +7127-75946-0017-484: HIS LEGS THE BEST SHAPED AT COURT WERE DISPLAYED TO GREAT ADVANTAGE IN FLESH (COLORED->COLOURED) SILKEN HOSE (OF->A) SILK SO FINE AND SO TRANSPARENT THAT IT SEEMED ALMOST LIKE FLESH ITSELF +7127-75946-0018-485: THERE WAS SOMETHING IN HIS CARRIAGE WHICH RESEMBLED THE BUOYANT MOVEMENTS OF AN IMMORTAL AND HE DID NOT DANCE SO MUCH AS (SEEM->SEEMED) TO SOAR ALONG +7127-75946-0019-486: YES IT IS SUPPRESSED +7127-75946-0020-487: FAR FROM IT SIRE YOUR MAJESTY (HAVING->HEAVEN) GIVEN NO DIRECTIONS ABOUT IT THE MUSICIANS HAVE RETAINED IT +7127-75946-0021-488: YES SIRE AND READY DRESSED FOR THE BALLET +7127-75946-0022-489: SIRE HE SAID YOUR MAJESTY'S MOST DEVOTED SERVANT APPROACHES TO PERFORM A SERVICE ON THIS OCCASION WITH SIMILAR ZEAL THAT HE HAS ALREADY SHOWN ON THE FIELD OF BATTLE +7127-75946-0023-490: THE KING SEEMED ONLY PLEASED WITH EVERY ONE PRESENT +7127-75946-0024-491: MONSIEUR WAS THE ONLY ONE WHO DID NOT UNDERSTAND ANYTHING ABOUT THE MATTER +7127-75946-0025-492: THE BALLET BEGAN THE EFFECT WAS MORE THAN BEAUTIFUL +7127-75946-0026-493: WHEN THE MUSIC BY ITS BURSTS OF MELODY CARRIED AWAY THESE ILLUSTRIOUS DANCERS WHEN (THE->THIS) SIMPLE UNTUTORED PANTOMIME OF THAT PERIOD ONLY THE MORE NATURAL ON ACCOUNT OF THE VERY INDIFFERENT ACTING OF THE AUGUST ACTORS HAD REACHED ITS CULMINATING POINT OF TRIUMPH THE (THEATER->THEATRE) SHOOK WITH TUMULTUOUS APPLAUSE +7127-75946-0027-494: DISDAINFUL OF A SUCCESS OF WHICH MADAME SHOWED NO (ACKNOWLEDGEMENT->ACKNOWLEDGMENT) HE THOUGHT OF NOTHING BUT BOLDLY REGAINING THE MARKED PREFERENCE OF THE PRINCESS +7127-75946-0028-495: BY DEGREES ALL HIS HAPPINESS ALL HIS BRILLIANCY SUBSIDED INTO REGRET AND UNEASINESS SO THAT HIS LIMBS LOST THEIR POWER HIS ARMS HUNG HEAVILY BY HIS SIDES AND HIS HEAD DROOPED AS THOUGH HE WAS STUPEFIED +7127-75946-0029-496: THE KING WHO HAD FROM THIS MOMENT BECOME IN REALITY THE PRINCIPAL DANCER IN THE QUADRILLE CAST A LOOK UPON HIS VANQUISHED RIVAL +7127-75947-0000-426: EVERY ONE COULD OBSERVE HIS AGITATION AND PROSTRATION A PROSTRATION WHICH WAS INDEED THE MORE REMARKABLE SINCE PEOPLE WERE NOT ACCUSTOMED TO SEE HIM WITH HIS ARMS HANGING LISTLESSLY BY HIS SIDE HIS HEAD BEWILDERED AND HIS EYES WITH ALL THEIR BRIGHT INTELLIGENCE (BEDIMMED->BEDEMNED) +7127-75947-0001-427: UPON THIS MADAME DEIGNED TO TURN HER EYES LANGUISHINGLY TOWARDS THE COMTE OBSERVING +7127-75947-0002-428: DO YOU THINK SO SHE REPLIED WITH INDIFFERENCE +7127-75947-0003-429: YES THE CHARACTER WHICH YOUR ROYAL HIGHNESS ASSUMED IS IN PERFECT HARMONY WITH YOUR OWN +7127-75947-0004-430: EXPLAIN YOURSELF +7127-75947-0005-431: I ALLUDE TO THE GODDESS +7127-75947-0006-432: THE PRINCESS INQUIRED NO +7127-75947-0007-433: SHE THEN ROSE HUMMING THE AIR TO WHICH SHE WAS PRESENTLY GOING TO DANCE +7127-75947-0008-434: THE ARROW PIERCED HIS HEART AND WOUNDED HIM MORTALLY +7127-75947-0009-435: A QUARTER OF AN HOUR AFTERWARDS HE RETURNED TO THE (THEATER->THEATRE) BUT IT WILL BE READILY BELIEVED THAT IT WAS ONLY A POWERFUL EFFORT OF REASON OVER HIS GREAT EXCITEMENT THAT ENABLED HIM TO GO BACK OR PERHAPS FOR LOVE IS THUS STRANGELY CONSTITUTED HE FOUND IT IMPOSSIBLE EVEN TO REMAIN MUCH LONGER SEPARATED FROM (THE->THEIR) PRESENCE OF ONE WHO HAD BROKEN HIS HEART +7127-75947-0010-436: WHEN SHE PERCEIVED THE YOUNG MAN SHE ROSE LIKE A WOMAN SURPRISED IN THE MIDST OF IDEAS SHE WAS DESIROUS OF CONCEALING FROM HERSELF +7127-75947-0011-437: REMAIN I IMPLORE YOU THE EVENING IS MOST LOVELY +7127-75947-0012-438: INDEED (AH->A) +7127-75947-0013-439: I REMEMBER NOW AND I CONGRATULATE MYSELF DO YOU LOVE ANY ONE +7127-75947-0014-440: FORGIVE ME I HARDLY KNOW WHAT I AM SAYING A THOUSAND TIMES FORGIVE ME MADAME WAS RIGHT QUITE RIGHT THIS BRUTAL EXILE HAS COMPLETELY TURNED MY BRAIN +7127-75947-0015-441: THERE CANNOT BE A DOUBT HE RECEIVED YOU KINDLY FOR IN FACT YOU RETURNED WITHOUT HIS PERMISSION +7127-75947-0016-442: OH MADEMOISELLE WHY HAVE I NOT A DEVOTED SISTER OR A TRUE FRIEND SUCH AS YOURSELF +7127-75947-0017-443: WHAT ALREADY HERE THEY SAID TO HER +7127-75947-0018-444: I HAVE BEEN HERE THIS QUARTER OF AN HOUR REPLIED LA (VALLIERE->VALLIERS) +7127-75947-0019-445: DID NOT THE DANCING AMUSE YOU NO +7127-75947-0020-446: NO MORE THAN THE DANCING +7127-75947-0021-447: LA (VALLIERE IS->VALLIERS) QUITE A POETESS SAID (TONNAY CHARENTE->TONISON) +7127-75947-0022-448: I AM A WOMAN AND THERE ARE FEW LIKE ME WHOEVER LOVES ME FLATTERS ME WHOEVER FLATTERS ME PLEASES ME AND WHOEVER PLEASES WELL SAID MONTALAIS YOU DO NOT FINISH +7127-75947-0023-449: IT IS TOO DIFFICULT REPLIED MADEMOISELLE (DE TONNAY CHARENTE->DETONICHALANT) LAUGHING LOUDLY +7127-75947-0024-450: (LOOK->LUCK) YONDER DO YOU NOT SEE THE MOON SLOWLY RISING SILVERING THE TOPMOST BRANCHES OF THE CHESTNUTS AND THE (OAKS->YOKES) +7127-75947-0025-451: EXQUISITE SOFT TURF OF THE WOODS THE HAPPINESS WHICH YOUR FRIENDSHIP CONFERS UPON ME +7127-75947-0026-452: WELL SAID MADEMOISELLE DE (TONNAY CHARENTE->TONE) I ALSO THINK A GOOD DEAL BUT I TAKE CARE +7127-75947-0027-453: TO SAY NOTHING SAID MONTALAIS SO THAT WHEN MADEMOISELLE DE (TONNAY CHARENTE->TO NECHERANT) THINKS (ATHENAIS->ETHNEE) IS THE ONLY ONE WHO KNOWS IT +7127-75947-0028-454: QUICK QUICK THEN AMONG THE HIGH REED GRASS SAID MONTALAIS STOOP (ATHENAIS->ETHINAY) YOU ARE SO TALL +7127-75947-0029-455: THE YOUNG GIRLS HAD INDEED MADE THEMSELVES SMALL INDEED INVISIBLE +7127-75947-0030-456: SHE WAS HERE JUST NOW SAID THE COUNT +7127-75947-0031-457: YOU ARE POSITIVE THEN +7127-75947-0032-458: YES BUT PERHAPS I FRIGHTENED HER (IN->AND) WHAT WAY +7127-75947-0033-459: HOW IS IT LA (VALLIERE->VALLIERS) SAID MADEMOISELLE DE (TONNAY CHARENTE->TINACHANT) THAT THE VICOMTE DE (BRAGELONNE->BRAGELONE) SPOKE OF YOU AS LOUISE +7127-75947-0034-460: IT SEEMS THE KING WILL NOT CONSENT TO IT +7127-75947-0035-461: GOOD GRACIOUS (HAS->AS) THE KING ANY RIGHT TO INTERFERE IN MATTERS OF THAT KIND +7127-75947-0036-462: I GIVE MY CONSENT +7127-75947-0037-463: OH I AM SPEAKING SERIOUSLY REPLIED MONTALAIS AND MY OPINION IN THIS CASE IS QUITE AS GOOD AS THE KING'S I SUPPOSE IS IT NOT LOUISE +7127-75947-0038-464: LET US RUN THEN SAID ALL THREE AND GRACEFULLY LIFTING UP THE LONG SKIRTS OF THEIR SILK DRESSES THEY LIGHTLY RAN ACROSS THE OPEN SPACE BETWEEN THE LAKE AND THE THICKEST COVERT OF THE PARK +7127-75947-0039-465: IN FACT THE SOUND OF MADAME'S AND THE QUEEN'S CARRIAGES COULD BE HEARD IN THE DISTANCE UPON THE HARD DRY GROUND OF THE ROADS FOLLOWED BY THE (MOUNTED->MOUNTAIN) CAVALIERS +7127-75947-0040-466: IN THIS WAY THE FETE OF THE WHOLE COURT WAS A FETE ALSO FOR THE MYSTERIOUS INHABITANTS OF THE FOREST FOR CERTAINLY THE DEER IN THE BRAKE THE PHEASANT ON THE BRANCH THE FOX IN ITS HOLE WERE ALL LISTENING +7176-88083-0000-707: ALL ABOUT HIM WAS A TUMULT OF BRIGHT AND BROKEN COLOR SCATTERED IN BROAD SPLASHES +7176-88083-0001-708: THE (MERGANSER->MERGANCER) HAD A CRESTED HEAD OF IRIDESCENT GREEN BLACK A BROAD COLLAR OF LUSTROUS WHITE BLACK BACK BLACK AND WHITE WINGS WHITE BELLY SIDES FINELY PENCILLED (IN->AND) BLACK AND WHITE AND (A->HER) BREAST OF RICH CHESTNUT RED STREAKED WITH BLACK +7176-88083-0002-709: HIS FEET WERE RED HIS LONG NARROW BEAK WITH ITS SAW TOOTHED EDGES AND SHARP HOOKED TIP WAS BRIGHT RED +7176-88083-0003-710: BUT HERE HE WAS AT A TERRIBLE DISADVANTAGE AS COMPARED WITH THE OWLS HAWKS AND EAGLES HE HAD NO RENDING CLAWS +7176-88083-0004-711: BUT SUDDENLY STRAIGHT AND SWIFT AS A DIVING CORMORANT HE SHOT DOWN INTO THE TORRENT AND DISAPPEARED BENEATH THE SURFACE +7176-88083-0005-712: ONCE FAIRLY A WING HOWEVER HE WHEELED AND MADE BACK HURRIEDLY FOR HIS PERCH +7176-88083-0006-713: IT MIGHT HAVE SEEMED THAT A TROUT OF THIS SIZE WAS A FAIRLY SUBSTANTIAL MEAL +7176-88083-0007-714: BUT SUCH WAS HIS KEENNESS THAT EVEN WHILE THE WIDE FLUKES OF HIS ENGORGED VICTIM WERE STILL STICKING OUT AT THE CORNERS OF HIS BEAK HIS FIERCE RED EYES WERE ONCE MORE PEERING DOWNWARD INTO THE TORRENT IN SEARCH OF FRESH PREY +7176-88083-0008-715: IN DESPAIR HE HURLED HIMSELF DOWNWARD TOO SOON +7176-88083-0009-716: THE GREAT HAWK (FOLLOWED->FOWLED) HURRIEDLY TO RETRIEVE HIS PREY FROM THE GROUND +7176-88083-0010-717: THE CAT GROWLED SOFTLY PICKED UP THE PRIZE IN HER JAWS AND TROTTED INTO THE BUSHES TO DEVOUR IT +7176-88083-0011-718: IN FACT HE HAD JUST FINISHED IT THE LAST OF THE TROUT'S TAIL HAD JUST VANISHED WITH A SPASM DOWN HIS STRAINED GULLET WHEN THE BAFFLED HAWK CAUGHT SIGHT OF HIM AND SWOOPED +7176-88083-0012-719: THE HAWK ALIGHTED ON THE DEAD BRANCH AND SAT UPRIGHT MOTIONLESS AS IF SURPRISED +7176-88083-0013-720: LIKE HIS UNFORTUNATE LITTLE COUSIN THE TEAL HE TOO HAD FELT THE FEAR OF DEATH SMITTEN INTO HIS HEART AND WAS HEADING DESPERATELY FOR THE REFUGE OF SOME DARK OVERHANGING BANK DEEP FRINGED WITH WEEDS WHERE THE DREADFUL EYE OF THE HAWK SHOULD NOT DISCERN HIM +7176-88083-0014-721: THE HAWK SAT UPON THE BRANCH AND WATCHED HIS QUARRY SWIMMING BENEATH THE SURFACE +7176-88083-0015-722: ALMOST INSTANTLY HE WAS FORCED TO THE (TOP->TO) +7176-88083-0016-723: STRAIGHTWAY THE (HAWK->HOT) GLIDED FROM HIS PERCH AND DARTED AFTER HIM +7176-88083-0017-724: BUT AT THIS POINT IN THE RAPIDS IT WAS IMPOSSIBLE FOR HIM TO STAY DOWN +7176-88083-0018-725: BUT THIS FREQUENTER OF THE HEIGHTS OF AIR FOR ALL HIS SAVAGE VALOR WAS TROUBLED AT THE LEAPING WAVES AND THE TOSSING FOAM OF THESE MAD RAPIDS HE DID NOT UNDERSTAND THEM +7176-88083-0019-726: AS HE FLEW HIS DOWN REACHING CLUTCHING TALONS WERE NOT HALF A YARD ABOVE THE FUGITIVE'S HEAD +7176-88083-0020-727: WHERE THE (WAVES->WAY IS) FOR AN INSTANT SANK THEY CAME CLOSER BUT NOT QUITE WITHIN GRASPING REACH +7176-88083-0021-728: BUT AS BEFORE THE LEAPING WAVES OF THE RAPIDS WERE TOO MUCH FOR HIS PURSUER AND HE WAS ABLE TO FLAP HIS WAY ONWARD IN A CLOUD OF FOAM WHILE DOOM HUNG LOW ABOVE HIS HEAD YET HESITATED TO STRIKE +7176-88083-0022-729: THE HAWK EMBITTERED BY THE LOSS OF HIS FIRST QUARRY HAD BECOME AS DOGGED IN PURSUIT AS A WEASEL NOT TO BE SHAKEN OFF OR EVADED OR DECEIVED +7176-88083-0023-730: HE HAD A LOT OF LINE OUT AND THE PLACE WAS NONE TOO FREE FOR A LONG CAST BUT HE WAS IMPATIENT TO DROP HIS FLIES AGAIN ON THE SPOT WHERE THE BIG FISH WAS FEEDING +7176-88083-0024-731: THE LAST DROP FLY AS LUCK WOULD HAVE IT CAUGHT JUST IN THE CORNER OF THE HAWK'S ANGRILY OPEN BEAK HOOKING ITSELF FIRMLY +7176-88083-0025-732: AT THE SUDDEN SHARP STING OF IT THE GREAT BIRD TURNED HIS HEAD AND NOTICED FOR THE FIRST TIME THE FISHERMAN STANDING ON THE BANK +7176-88083-0026-733: THE DRAG UPON HIS BEAK AND THE LIGHT CHECK UPON HIS WINGS WERE INEXPLICABLE TO HIM AND APPALLING +7176-88083-0027-734: (THEN->THAN) THE LEADER PARTED FROM THE LINE +7176-92135-0000-661: HE IS A WELCOME FIGURE AT THE GARDEN PARTIES OF THE ELECT WHO ARE ALWAYS READY TO ENCOURAGE HIM BY ACCEPTING FREE SEATS FOR HIS PLAY ACTOR MANAGERS NOD TO HIM EDITORS ALLOW HIM TO CONTRIBUTE WITHOUT CHARGE TO A (SYMPOSIUM->SIMPOSIUM) ON THE PRICE OF GOLF BALLS +7176-92135-0001-662: IN SHORT HE BECOMES A PROMINENT FIGURE IN LONDON SOCIETY AND IF HE IS NOT CAREFUL SOMEBODY WILL SAY SO +7176-92135-0002-663: BUT EVEN THE UNSUCCESSFUL DRAMATIST HAS HIS MOMENTS +7176-92135-0003-664: YOUR PLAY MUST BE NOT MERELY A GOOD PLAY BUT A SUCCESSFUL ONE +7176-92135-0004-665: FRANKLY I CANNOT ALWAYS SAY +7176-92135-0005-666: BUT SUPPOSE YOU SAID I'M FOND OF WRITING MY PEOPLE ALWAYS SAY MY LETTERS HOME ARE GOOD ENOUGH FOR PUNCH +7176-92135-0006-667: I'VE GOT A LITTLE IDEA FOR A PLAY ABOUT A MAN AND A WOMAN AND ANOTHER WOMAN AND BUT PERHAPS (I'D->I) BETTER KEEP THE PLOT A SECRET FOR THE MOMENT +7176-92135-0007-668: ANYHOW IT'S JOLLY EXCITING AND I CAN DO THE DIALOGUE ALL RIGHT +7176-92135-0008-669: LEND ME YOUR EAR FOR TEN MINUTES AND YOU SHALL LEARN JUST WHAT STAGECRAFT IS +7176-92135-0009-670: AND I SHOULD BEGIN WITH A SHORT HOMILY ON SOLILOQUY +7176-92135-0010-671: (HAM->HIM) TO BE OR NOT TO BE +7176-92135-0011-672: NOW THE OBJECT OF THIS (SOLILOQUY->SOLOQUY) IS PLAIN +7176-92135-0012-673: INDEED IRRESOLUTION (BEING->MEAN) THE KEYNOTE OF HAMLET'S SOLILOQUY A CLEVER PLAYER COULD TO SOME EXTENT INDICATE THE WHOLE THIRTY LINES BY A (SILENT->SILENCE) WORKING OF THE (JAW->JOB) BUT AT THE SAME TIME IT WOULD BE IDLE TO DENY THAT HE WOULD MISS THE FINER SHADES OF THE DRAMATIST'S MEANING +7176-92135-0013-674: WE MODERNS HOWEVER SEE THE ABSURDITY OF IT +7176-92135-0014-675: IF IT BE GRANTED FIRST THAT THE THOUGHTS OF A CERTAIN CHARACTER SHOULD BE KNOWN TO THE AUDIENCE AND SECONDLY THAT SOLILOQUY OR THE HABIT OF THINKING ALOUD IS IN OPPOSITION TO MODERN STAGE (TECHNIQUE->TYPE MAKE) HOW SHALL A SOLILOQUY BE AVOIDED WITHOUT DAMAGE TO THE PLAY +7176-92135-0015-676: AND SO ON TILL YOU GET TO THE END (WHEN OPHELIA->ONE OF YOU) MIGHT SAY AH YES OR SOMETHING NON COMMITTAL OF THAT SORT +7176-92135-0016-677: THIS WOULD BE AN EASY WAY OF DOING IT BUT IT WOULD NOT BE THE BEST WAY FOR THE REASON THAT IT IS TOO EASY TO CALL ATTENTION TO ITSELF +7176-92135-0017-678: IN THE OLD BADLY MADE PLAY IT WAS FREQUENTLY NECESSARY FOR ONE OF THE CHARACTERS TO TAKE THE AUDIENCE INTO HIS CONFIDENCE +7176-92135-0018-679: IN THE MODERN WELL CONSTRUCTED PLAY HE SIMPLY RINGS UP AN IMAGINARY CONFEDERATE AND TELLS HIM WHAT HE IS GOING TO DO COULD ANYTHING BE MORE NATURAL +7176-92135-0019-680: I WANT DOUBLE NINE (HAL LO->HELLO) +7176-92135-0020-681: DOUBLE NINE (TWO->TO) THREE (ELSINORE->ELSINOR) DOUBLE (NINE->NOT) YES (HALLO->HELLO) IS THAT YOU HORATIO (HAMLET->PANEL) SPEAKING +7176-92135-0021-682: I SAY I'VE BEEN (WONDERING->WANDERING) ABOUT THIS BUSINESS +7176-92135-0022-683: TO BE OR NOT TO BE THAT IS THE QUESTION WHETHER TIS NOBLER IN THE MIND TO SUFFER THE SLINGS AND ARROWS WHAT NO HAMLET SPEAKING +7176-92135-0023-684: YOU GAVE ME DOUBLE FIVE I WANT DOUBLE NINE (HALLO->HELLO) IS THAT YOU HORATIO HAMLET SPEAKING +7176-92135-0024-685: TO BE OR NOT TO BE THAT IS THE QUESTION WHETHER TIS NOBLER +7176-92135-0025-686: IT IS TO LET HAMLET IF THAT HAPPEN TO BE THE NAME OF YOUR CHARACTER ENTER WITH A SMALL DOG PET FALCON (MONGOOSE->MONGOO'S) TAME BEAR OR WHATEVER ANIMAL IS MOST IN KEEPING WITH THE PART AND CONFIDE IN THIS ANIMAL SUCH SORROWS HOPES OR SECRET HISTORY AS THE AUDIENCE HAS GOT TO KNOW +7176-92135-0026-687: (ENTER->INTER) HAMLET WITH HIS FAVOURITE (BOAR HOUND->BOREHOUND) +7176-92135-0027-688: LADY (LARKSPUR STARTS->LARKSBURG START) SUDDENLY AND TURNS (TOWARDS->TOWARD) HIM +7176-92135-0028-689: (LARKSPUR BIT->LARKSPER BID) ME AGAIN THIS MORNING FOR THE THIRD TIME +7176-92135-0029-690: I WANT TO GET AWAY FROM IT ALL (SWOONS->SWOON) +7176-92135-0030-691: (ENTER->ENTERED) LORD ARTHUR (FLUFFINOSE->FLAPHANO'S) +7176-92135-0031-692: AND THERE YOU ARE YOU WILL OF COURSE APPRECIATE THAT THE UNFINISHED SENTENCES NOT ONLY SAVE TIME BUT ALSO MAKE THE MANOEUVRING VERY MUCH MORE NATURAL +7176-92135-0032-693: HOW YOU MAY BE WONDERING ARE YOU TO BEGIN YOUR MASTERPIECE +7176-92135-0033-694: RELAPSES INTO SILENCE FOR THE REST OF THE EVENING +7176-92135-0034-695: THE DUCHESS OF SOUTHBRIDGE TO LORD REGGIE (OH REGGIE->O READY) WHAT DID YOU SAY +7176-92135-0035-696: THEN LORD (TUPPENY WELL->TUPPENNY) WHAT ABOUT AUCTION +7176-92135-0036-697: THE CROWD DRIFTS OFF (LEAVING->LEAPING) THE HERO AND HEROINE ALONE IN THE MIDDLE OF THE STAGE AND THEN YOU CAN BEGIN +7176-92135-0037-698: THEN IS THE TIME TO INTRODUCE A MEAL ON THE STAGE +7176-92135-0038-699: A STAGE MEAL IS POPULAR BECAUSE IT (PROVES->PROVED) TO THE AUDIENCE THAT THE ACTORS EVEN WHEN CALLED CHARLES (HAWTREY->HALTREE) OR OWEN (NARES->NEAR'S) ARE REAL PEOPLE JUST LIKE YOU AND ME +7176-92135-0039-700: (TEA->T) PLEASE MATTHEWS BUTLER IMPASSIVELY +7176-92135-0040-701: (HOSTESS->HOSTES) REPLACES LUMP AND INCLINES EMPTY TEAPOT OVER TRAY FOR (A->*) MOMENT THEN (HANDS HIM->HANDSOME) A CUP PAINTED BROWN INSIDE (THUS DECEIVING->LUSTY SEEING) THE GENTLEMAN WITH THE TELESCOPE IN THE UPPER CIRCLE +7176-92135-0041-702: (RE ENTER->REINTER) BUTLER AND THREE FOOTMEN WHO (REMOVE->MOVED) THE TEA THINGS HOSTESS (TO->TWO) GUEST +7176-92135-0042-703: (IN->AND) NOVELS THE HERO HAS OFTEN PUSHED HIS MEALS AWAY UNTASTED BUT NO (STAGE->STEED) HERO WOULD DO ANYTHING SO UNNATURAL AS THIS +7176-92135-0043-704: TWO BITES ARE MADE AND THE (BREAD->ABREAD) IS CRUMBLED WITH AN AIR OF GREAT EAGERNESS INDEED ONE FEELS THAT IN REAL LIFE THE (GUEST->GUESTS) WOULD CLUTCH HOLD OF THE FOOTMAN AND SAY HALF A (MO OLD->MOLE) CHAP I HAVEN'T NEARLY FINISHED BUT THE (ACTOR IS->ACTOR'S) BETTER SCHOOLED THAN THIS +7176-92135-0044-705: BUT IT IS THE CIGARETTE WHICH CHIEFLY HAS BROUGHT THE MODERN DRAMA TO ITS PRESENT STATE OF PERFECTION +7176-92135-0045-706: LORD JOHN TAKING OUT GOLD (CIGARETTE->SICK RED) CASE FROM HIS LEFT HAND UPPER WAISTCOAT POCKET +7729-102255-0000-261: THE BOGUS LEGISLATURE NUMBERED THIRTY SIX MEMBERS +7729-102255-0001-262: THIS WAS AT THE MARCH ELECTION EIGHTEEN FIFTY FIVE +7729-102255-0002-263: THAT SUMMER'S (EMIGRATION->IMMIGRATION) HOWEVER BEING MAINLY FROM THE FREE STATES GREATLY CHANGED THE RELATIVE STRENGTH OF THE TWO PARTIES +7729-102255-0003-264: FOR GENERAL SERVICE THEREFORE REQUIRING NO SPECIAL EFFORT THE NUMERICAL STRENGTH OF THE FACTIONS WAS ABOUT EQUAL WHILE ON EXTRAORDINARY OCCASIONS THE TWO THOUSAND BORDER RUFFIAN RESERVE LYING A LITTLE FARTHER BACK FROM THE STATE LINE COULD AT ANY TIME EASILY TURN THE SCALE +7729-102255-0004-265: THE FREE STATE MEN HAD ONLY THEIR CONVICTIONS THEIR INTELLIGENCE THEIR COURAGE AND THE MORAL SUPPORT OF THE NORTH THE CONSPIRACY HAD ITS SECRET COMBINATION THE TERRITORIAL OFFICIALS THE LEGISLATURE THE BOGUS LAWS THE COURTS THE MILITIA OFFICERS THE PRESIDENT AND THE ARMY +7729-102255-0005-266: THIS WAS A FORMIDABLE ARRAY OF ADVANTAGES SLAVERY WAS PLAYING WITH LOADED DICE +7729-102255-0006-267: (COMING->COMMON) BY WAY OF THE MISSOURI RIVER TOWNS HE FELL FIRST AMONG BORDER RUFFIAN COMPANIONSHIP AND INFLUENCES AND PERHAPS HAVING HIS INCLINATIONS ALREADY (MOLDED->MOULDED) BY HIS WASHINGTON INSTRUCTIONS HIS EARLY IMPRESSIONS WERE DECIDEDLY ADVERSE TO THE FREE STATE CAUSE +7729-102255-0007-268: HIS RECEPTION SPEECH AT (WESTPORT->WESTWARD) IN WHICH HE MAINTAINED THE LEGALITY OF THE LEGISLATURE AND HIS DETERMINATION TO ENFORCE THEIR LAWS DELIGHTED HIS PRO SLAVERY AUDITORS +7729-102255-0008-269: ALL THE TERRITORIAL DIGNITARIES WERE PRESENT GOVERNOR (SHANNON->SHAN AND) PRESIDED JOHN CALHOUN THE SURVEYOR GENERAL MADE THE PRINCIPAL SPEECH A DENUNCIATION OF THE (ABOLITIONISTS->ABOLITIONIST) SUPPORTING THE TOPEKA MOVEMENT CHIEF JUSTICE (LECOMPTE->LE COMTE) DIGNIFIED THE OCCASION WITH APPROVING REMARKS +7729-102255-0009-270: ALL (DISSENT->DESCENT) ALL NON COMPLIANCE ALL HESITATION ALL MERE SILENCE EVEN WERE IN THEIR STRONGHOLD TOWNS LIKE (LEAVENWORTH->LEVIN WORTH) BRANDED AS ABOLITIONISM DECLARED TO BE HOSTILITY TO THE PUBLIC WELFARE AND PUNISHED WITH PROSCRIPTION PERSONAL VIOLENCE EXPULSION AND FREQUENTLY DEATH +7729-102255-0010-271: OF THE LYNCHINGS THE MOBS AND THE MURDERS IT WOULD BE IMPOSSIBLE EXCEPT IN A VERY EXTENDED WORK TO NOTE THE FREQUENT AND ATROCIOUS DETAILS +7729-102255-0011-272: THE PRESENT CHAPTERS CAN ONLY TOUCH UPON THE MORE SALIENT MOVEMENTS OF THE CIVIL WAR IN KANSAS WHICH HAPPILY (WERE->ARE) NOT SANGUINARY IF HOWEVER THE INDIVIDUAL AND MORE ISOLATED CASES OF BLOODSHED COULD BE DESCRIBED THEY WOULD SHOW A STARTLING AGGREGATE OF BARBARITY AND (*->A) LOSS OF LIFE FOR OPINION'S SAKE +7729-102255-0012-273: SEVERAL HUNDRED FREE STATE MEN PROMPTLY RESPONDED TO THE SUMMONS +7729-102255-0013-274: IT WAS IN FACT THE BEST WEAPON OF ITS DAY +7729-102255-0014-275: THE LEADERS OF THE CONSPIRACY BECAME DISTRUSTFUL OF THEIR POWER TO CRUSH THE TOWN +7729-102255-0015-276: ONE OF HIS MILITIA GENERALS SUGGESTED THAT THE GOVERNOR SHOULD REQUIRE THE OUTLAWS AT LAWRENCE AND ELSEWHERE TO SURRENDER THE (SHARPS->SHARP'S) RIFLES ANOTHER WROTE ASKING HIM TO CALL OUT THE GOVERNMENT TROOPS AT FORT (LEAVENWORTH->LEVINWORTH) +7729-102255-0016-277: THE GOVERNOR ON HIS PART BECOMING DOUBTFUL OF THE LEGALITY OF EMPLOYING MISSOURI MILITIA TO ENFORCE KANSAS LAWS WAS ALSO EAGER TO SECURE THE HELP OF FEDERAL TROOPS +7729-102255-0017-278: SHERIFF JONES HAD HIS POCKETS ALWAYS FULL OF WRITS ISSUED IN THE SPIRIT OF PERSECUTION BUT WAS OFTEN BAFFLED BY THE SHARP WITS AND READY RESOURCES OF THE FREE STATE PEOPLE AND SOMETIMES DEFIED OUTRIGHT +7729-102255-0018-279: LITTLE BY LITTLE HOWEVER THE LATTER BECAME HEMMED AND BOUND IN THE MESHES OF THE VARIOUS DEVICES AND PROCEEDINGS WHICH THE TERRITORIAL OFFICIALS EVOLVED FROM THE BOGUS LAWS +7729-102255-0019-280: TO EMBARRASS THIS DAMAGING EXPOSURE JUDGE (LECOMPTE->LECOMTE) ISSUED A WRIT AGAINST THE EX GOVERNOR ON A FRIVOLOUS CHARGE OF CONTEMPT +7729-102255-0020-281: THE INCIDENT WAS NOT VIOLENT NOR EVEN DRAMATIC NO POSSE WAS SUMMONED NO FURTHER EFFORT MADE AND (REEDER->READER) FEARING PERSONAL VIOLENCE SOON FLED IN DISGUISE +7729-102255-0021-282: BUT THE AFFAIR WAS MAGNIFIED AS A CROWNING PROOF THAT THE FREE STATE MEN WERE (INSURRECTIONISTS->INSURRECTIONOUS) AND OUTLAWS +7729-102255-0022-283: FROM THESE AGAIN SPRANG BARRICADED AND FORTIFIED DWELLINGS CAMPS AND (SCOUTING->SCOUT) PARTIES FINALLY CULMINATING IN ROVING GUERRILLA BANDS HALF PARTISAN HALF PREDATORY +7729-102255-0023-284: THEIR DISTINCTIVE CHARACTERS HOWEVER DISPLAY ONE BROAD AND UNFAILING DIFFERENCE +7729-102255-0024-285: THE FREE STATE MEN CLUNG TO THEIR PRAIRIE TOWNS AND PRAIRIE RAVINES WITH ALL THE OBSTINACY AND COURAGE OF TRUE DEFENDERS OF THEIR HOMES AND FIRESIDES +7729-102255-0025-286: (THEIR->THERE) ASSUMED CHARACTER CHANGED WITH THEIR CHANGING OPPORTUNITIES OR NECESSITIES +7729-102255-0026-287: IN THE SHOOTING OF (SHERIFF->SHERIFF'S) JONES (IN->AND) LAWRENCE AND IN THE REFUSAL OF EX GOVERNOR (BEEDER->READER) TO ALLOW THE DEPUTY MARSHAL TO ARREST HIM THEY DISCOVERED GRAVE (OFFENSES->OFFENCES) AGAINST THE TERRITORIAL AND (*->THE) UNITED STATES LAWS +7729-102255-0027-288: FOOTNOTE (SUMNER->SUMMER) TO SHANNON MAY TWELFTH EIGHTEEN FIFTY SIX +7729-102255-0028-289: PRIVATE PERSONS WHO (HAD LEASED->AT LEAST) THE FREE STATE HOTEL VAINLY BESOUGHT THE VARIOUS AUTHORITIES TO (PREVENT->PRESENT) THE DESTRUCTION OF THEIR PROPERTY +7729-102255-0029-290: TEN DAYS WERE CONSUMED IN THESE NEGOTIATIONS BUT THE SPIRIT OF VENGEANCE REFUSED TO YIELD +7729-102255-0030-291: HE SUMMONED HALF A DOZEN CITIZENS TO JOIN HIS POSSE WHO FOLLOWED OBEYED AND ASSISTED HIM +7729-102255-0031-292: HE CONTINUED HIS PRETENDED SEARCH AND TO GIVE COLOR TO HIS ERRAND MADE (TWO ARRESTS->TO ARREST) +7729-102255-0032-293: THE FREE STATE HOTEL A STONE BUILDING IN DIMENSIONS FIFTY BY SEVENTY FEET THREE STORIES HIGH AND HANDSOMELY FURNISHED PREVIOUSLY OCCUPIED ONLY FOR LODGING ROOMS ON THAT DAY FOR THE FIRST TIME OPENED ITS TABLE ACCOMMODATIONS TO THE PUBLIC AND PROVIDED A FREE DINNER IN HONOR OF THE OCCASION +7729-102255-0033-294: AS HE HAD PROMISED TO PROTECT THE HOTEL THE REASSURED CITIZENS BEGAN TO LAUGH AT THEIR OWN FEARS +7729-102255-0034-295: TO THEIR SORROW THEY WERE SOON UNDECEIVED +7729-102255-0035-296: THE MILITARY FORCE PARTLY RABBLE PARTLY ORGANIZED HAD MEANWHILE MOVED INTO THE TOWN +7729-102255-0036-297: HE PLANTED A COMPANY BEFORE THE HOTEL AND DEMANDED A SURRENDER OF THE ARMS BELONGING TO THE FREE STATE MILITARY COMPANIES +7729-102255-0037-298: HALF AN HOUR LATER TURNING A DEAF EAR TO ALL REMONSTRANCE HE GAVE THE PROPRIETORS UNTIL FIVE O'CLOCK TO REMOVE THEIR FAMILIES AND PERSONAL PROPERTY FROM THE FREE STATE HOTEL +7729-102255-0038-299: (ATCHISON->ATTITSON) WHO HAD BEEN HARANGUING THE MOB PLANTED HIS TWO GUNS BEFORE THE BUILDING AND TRAINED THEM UPON IT +7729-102255-0039-300: THE INMATES BEING REMOVED AT THE APPOINTED HOUR A FEW CANNON BALLS WERE FIRED THROUGH THE STONE WALLS +7729-102255-0040-301: IN THIS INCIDENT CONTRASTING THE CREATIVE AND THE DESTRUCTIVE SPIRIT OF THE FACTIONS THE (EMIGRANT AID->IMMIGRANT AIDS) SOCIETY OF MASSACHUSETTS FINDS ITS MOST HONORABLE AND TRIUMPHANT VINDICATION +7729-102255-0041-302: THE WHOLE PROCEEDING WAS SO CHILDISH THE MISERABLE PLOT SO TRANSPARENT THE (OUTRAGE->OUTRAGED) SO GROSS AS TO BRING DISGUST TO THE BETTER CLASS OF BORDER RUFFIANS WHO WERE WITNESSES AND ACCESSORIES +7729-102255-0042-303: (RELOCATED->RE LOCATED) FOOTNOTE GOVERNOR ROBINSON BEING ON HIS WAY EAST THE STEAMBOAT ON WHICH HE WAS (TRAVELING->TRAVELLING) STOPPED AT LEXINGTON MISSOURI +7729-102255-0043-304: IN A FEW DAYS AN OFFICER CAME WITH A REQUISITION FROM GOVERNOR SHANNON AND TOOK THE PRISONER BY (LAND TO WESTPORT->LANDA WEST PORT) AND AFTERWARDS FROM THERE TO KANSAS CITY (AND LEAVENWORTH->IN LEVINWORTH) +7729-102255-0044-305: HERE HE WAS PLACED IN THE CUSTODY OF CAPTAIN MARTIN OF THE KICKAPOO RANGERS WHO PROVED A KIND JAILER AND MATERIALLY ASSISTED IN PROTECTING HIM FROM THE DANGEROUS INTENTIONS OF THE MOB WHICH AT THAT TIME HELD (LEAVENWORTH->LEVIN WORTH) UNDER (A->THE) REIGN OF TERROR +7729-102255-0045-306: CAPTAIN MARTIN SAID I SHALL GIVE YOU A PISTOL TO HELP PROTECT YOURSELF IF WORSE COMES TO WORST +7729-102255-0046-307: IN THE EARLY MORNING OF THE NEXT DAY MAY TWENTY NINTH A COMPANY OF DRAGOONS WITH ONE EMPTY SADDLE CAME DOWN FROM THE FORT AND WHILE THE PRO SLAVERY MEN STILL SLEPT THE PRISONER AND HIS ESCORT WERE ON THEIR WAY ACROSS THE PRAIRIES TO LECOMPTON IN THE CHARGE OF OFFICERS OF THE UNITED STATES ARMY +8224-274381-0000-1451: THOUGH THROWN INTO PRISON FOR THIS ENTERPRISE AND DETAINED SOME TIME HE WAS NOT DISCOURAGED BUT STILL CONTINUED BY HIS COUNTENANCE AND PROTECTION TO INFUSE SPIRIT INTO THE DISTRESSED ROYALISTS +8224-274381-0001-1452: AMONG OTHER PERSONS OF DISTINCTION WHO UNITED THEMSELVES TO HIM WAS LORD NAPIER OF (MERCHISTON->MURCHISON) SON OF THE FAMOUS INVENTOR OF THE (LOGARITHMS->LOGARTHEMS) THE PERSON TO WHOM THE TITLE OF A GREAT MAN IS MORE JUSTLY DUE THAN TO ANY OTHER WHOM HIS COUNTRY EVER PRODUCED +8224-274381-0002-1453: WHILE THE FORMER FORETOLD THAT THE SCOTTISH COVENANTERS WERE SECRETLY FORMING A UNION WITH THE ENGLISH PARLIAMENT AND INCULCATED THE NECESSITY OF PREVENTING THEM BY SOME VIGOROUS UNDERTAKING THE LATTER STILL INSISTED THAT EVERY SUCH ATTEMPT WOULD PRECIPITATE THEM INTO MEASURES TO WHICH OTHERWISE THEY WERE NOT PERHAPS INCLINED +8224-274381-0003-1454: THE KING'S EARS WERE NOW OPEN TO MONTROSE'S (COUNSELS->COUNCILS) WHO PROPOSED NONE BUT THE BOLDEST AND MOST DARING AGREEABLY TO THE DESPERATE STATE OF THE ROYAL CAUSE IN SCOTLAND +8224-274381-0004-1455: FIVE HUNDRED MEN MORE WHO HAD BEEN LEVIED BY THE COVENANTERS WERE PERSUADED TO EMBRACE THE ROYAL CAUSE AND WITH THIS COMBINED FORCE HE HASTENED TO ATTACK LORD (ELCHO->ELKO) WHO LAY AT PERTH WITH AN ARMY OF SIX THOUSAND MEN ASSEMBLED UPON THE FIRST NEWS OF THE IRISH INVASION +8224-274381-0005-1456: DREADING THE SUPERIOR POWER OF ARGYLE WHO HAVING JOINED HIS VASSALS TO A FORCE LEVIED BY THE PUBLIC WAS APPROACHING WITH A CONSIDERABLE ARMY MONTROSE HASTENED (NORTHWARDS->NORTHWARD) IN ORDER TO ROUSE AGAIN THE MARQUIS OF (HUNTLEY->HUNTLY) AND THE GORDONS WHO HAVING BEFORE HASTILY TAKEN ARMS HAD BEEN INSTANTLY SUPPRESSED BY THE COVENANTERS +8224-274381-0006-1457: THIS NOBLEMAN'S CHARACTER THOUGH CELEBRATED FOR POLITICAL COURAGE AND CONDUCT WAS VERY LOW FOR MILITARY PROWESS AND AFTER SOME SKIRMISHES IN WHICH HE WAS WORSTED HE HERE ALLOWED MONTROSE TO ESCAPE HIM +8224-274381-0007-1458: BY QUICK MARCHES THROUGH THESE INACCESSIBLE MOUNTAINS THAT GENERAL FREED HIMSELF FROM THE SUPERIOR FORCES OF THE COVENANTERS +8224-274381-0008-1459: WITH THESE AND SOME (REENFORCEMENTS->REINFORCEMENTS) OF THE (ATHOLEMEN->ETHEL MEN) AND (MACDONALDS->MC DONALDS) WHOM HE HAD RECALLED MONTROSE FELL SUDDENLY UPON ARGYLE'S COUNTRY AND LET LOOSE UPON IT ALL THE RAGE OF WAR CARRYING OFF THE CATTLE BURNING THE HOUSES AND PUTTING THE INHABITANTS TO THE SWORD +8224-274381-0009-1460: THIS SEVERITY BY WHICH MONTROSE SULLIED HIS VICTORIES WAS THE RESULT OF PRIVATE ANIMOSITY AGAINST THE CHIEFTAIN AS MUCH AS OF ZEAL FOR THE PUBLIC CAUSE (ARGYLE->OUR GUILE) COLLECTING THREE THOUSAND MEN MARCHED IN QUEST OF THE ENEMY WHO HAD RETIRED WITH THEIR PLUNDER AND HE LAY AT (INNERLOCHY->INERLOCHY) SUPPOSING HIMSELF STILL AT A CONSIDERABLE DISTANCE FROM THEM +8224-274381-0010-1461: BY A QUICK AND UNEXPECTED MARCH MONTROSE HASTENED TO (INNERLOCHY->IN A LOCKY) AND PRESENTED HIMSELF IN ORDER OF BATTLE BEFORE THE SURPRISED BUT NOT (AFFRIGHTENED->A FRIGHTENED) COVENANTERS +8224-274381-0011-1462: HIS CONDUCT AND PRESENCE OF MIND IN THIS EMERGENCE APPEARED CONSPICUOUS +8224-274381-0012-1463: MONTROSE WEAK IN CAVALRY HERE LINED HIS TROOPS OF HORSE WITH INFANTRY AND AFTER PUTTING THE ENEMY'S HORSE TO ROUT FELL WITH UNITED FORCE UPON THEIR FOOT WHO WERE ENTIRELY CUT IN PIECES THOUGH WITH THE LOSS OF THE GALLANT LORD GORDON ON THE PART OF THE ROYALISTS +8224-274381-0013-1464: FROM THE SAME MEN NEW REGIMENTS AND NEW COMPANIES WERE FORMED DIFFERENT OFFICERS APPOINTED AND THE WHOLE MILITARY FORCE PUT INTO SUCH HANDS AS THE INDEPENDENTS COULD RELY ON +8224-274381-0014-1465: BESIDES MEMBERS OF PARLIAMENT WHO WERE EXCLUDED MANY OFFICERS UNWILLING TO SERVE UNDER THE NEW GENERALS THREW UP THEIR COMMISSIONS AND (UNWARILY->THEN WARILY) FACILITATED THE PROJECT OF PUTTING THE ARMY ENTIRELY INTO THE HANDS OF THAT FACTION +8224-274381-0015-1466: THOUGH THE DISCIPLINE OF THE FORMER PARLIAMENTARY ARMY WAS NOT CONTEMPTIBLE A MORE EXACT PLAN WAS INTRODUCED AND RIGOROUSLY EXECUTED BY THESE NEW COMMANDERS +8224-274381-0016-1467: VALOR INDEED WAS VERY GENERALLY DIFFUSED OVER THE ONE PARTY AS WELL AS THE OTHER DURING THIS PERIOD DISCIPLINE ALSO WAS ATTAINED BY THE FORCES OF THE PARLIAMENT BUT THE PERFECTION OF THE MILITARY ART IN CONCERTING THE GENERAL PLANS OF ACTION AND THE OPERATIONS OF THE FIELD SEEMS STILL ON BOTH SIDES TO HAVE BEEN IN A GREAT MEASURE WANTING +8224-274381-0017-1468: HISTORIANS AT LEAST PERHAPS FROM THEIR OWN IGNORANCE AND INEXPERIENCE HAVE NOT REMARKED ANY THING BUT A HEADLONG IMPETUOUS CONDUCT EACH PARTY HURRYING TO A BATTLE (WHERE->WERE) VALOR AND FORTUNE CHIEFLY DETERMINED THE SUCCESS +8224-274384-0000-1437: HE PASSED THROUGH HENLEY SAINT (ALBANS->ALBAN'S) AND CAME SO NEAR TO LONDON AS HARROW ON THE HILL +8224-274384-0001-1438: THE SCOTTISH GENERALS AND COMMISSIONERS AFFECTED GREAT SURPRISE ON THE APPEARANCE OF THE KING AND THOUGH THEY PAID HIM ALL THE EXTERIOR RESPECT DUE TO HIS DIGNITY THEY INSTANTLY SET A GUARD UPON HIM UNDER COLOR OF PROTECTION AND MADE HIM IN REALITY A PRISONER +8224-274384-0002-1439: THEY INFORMED THE ENGLISH PARLIAMENT OF THIS UNEXPECTED INCIDENT AND ASSURED THEM THAT THEY HAD ENTERED INTO NO PRIVATE TREATY WITH THE KING +8224-274384-0003-1440: OR HATH HE GIVEN US ANY GIFT +8224-274384-0004-1441: AND THE MEN OF ISRAEL ANSWERED THE MEN OF JUDAH AND SAID WE HAVE TEN PARTS IN THE KING AND WE HAVE ALSO MORE RIGHT IN DAVID THAN YE WHY THEN DID YE DESPISE US THAT OUR ADVICE SHOULD NOT BE FIRST HAD IN BRINGING BACK OUR KING +8224-274384-0005-1442: ANOTHER PREACHER AFTER REPROACHING HIM TO HIS FACE WITH HIS MISGOVERNMENT ORDERED THIS (PSALM->SUM) TO BE SUNG +8224-274384-0006-1443: THE KING STOOD UP AND CALLED FOR THAT PSALM WHICH BEGINS WITH THESE WORDS +8224-274384-0007-1444: HAVE MERCY LORD ON ME I PRAY FOR MEN WOULD ME DEVOUR +8224-274384-0008-1445: THE GOOD NATURED AUDIENCE IN PITY TO (FALLEN->FALL IN) MAJESTY SHOWED FOR ONCE GREATER DEFERENCE TO THE KING THAN TO THE MINISTER AND SUNG THE PSALM WHICH THE FORMER HAD CALLED FOR +8224-274384-0009-1446: THE PARLIAMENT AND THE SCOTS LAID THEIR PROPOSALS BEFORE THE KING +8224-274384-0010-1447: BEFORE THE SETTLEMENT OF TERMS THE ADMINISTRATION MUST BE POSSESSED ENTIRELY BY THE PARLIAMENTS OF BOTH KINGDOMS AND HOW INCOMPATIBLE THAT SCHEME WITH THE LIBERTY OF THE KING IS EASILY IMAGINED +8224-274384-0011-1448: THE ENGLISH IT IS EVIDENT HAD THEY NOT BEEN PREVIOUSLY ASSURED OF RECEIVING THE KING WOULD NEVER HAVE PARTED WITH SO CONSIDERABLE A SUM AND WHILE THEY WEAKENED THEMSELVES BY THE SAME MEASURE HAVE STRENGTHENED A PEOPLE WITH WHOM THEY MUST AFTERWARDS HAVE SO MATERIAL AN INTEREST TO DISCUSS +8224-274384-0012-1449: IF ANY STILL RETAINED (RANCOR->RANCOUR) AGAINST HIM IN HIS PRESENT CONDITION THEY PASSED IN SILENCE WHILE HIS WELL WISHERS MORE GENEROUS THAN PRUDENT ACCOMPANIED HIS MARCH WITH TEARS WITH ACCLAMATIONS AND WITH PRAYERS FOR HIS SAFETY +8224-274384-0013-1450: HIS DEATH IN THIS CONJUNCTURE WAS A PUBLIC MISFORTUNE +8230-279154-0000-617: THE ANALYSIS OF KNOWLEDGE WILL OCCUPY US UNTIL THE END OF THE THIRTEENTH LECTURE AND IS THE MOST DIFFICULT PART OF OUR WHOLE ENTERPRISE +8230-279154-0001-618: WHAT IS CALLED PERCEPTION DIFFERS FROM SENSATION BY THE FACT THAT THE SENSATIONAL INGREDIENTS BRING UP HABITUAL ASSOCIATES IMAGES AND EXPECTATIONS OF THEIR USUAL (CORRELATES->COROLLETS) ALL OF WHICH ARE SUBJECTIVELY INDISTINGUISHABLE FROM THE SENSATION +8230-279154-0002-619: WHETHER OR NOT THIS PRINCIPLE IS LIABLE TO EXCEPTIONS (EVERYONE->EVERY ONE) WOULD AGREE THAT (IS->IT) HAS A BROAD MEASURE OF TRUTH THOUGH THE WORD EXACTLY MIGHT SEEM AN OVERSTATEMENT AND IT MIGHT SEEM MORE CORRECT TO SAY THAT IDEAS APPROXIMATELY REPRESENT IMPRESSIONS +8230-279154-0003-620: AND WHAT SORT OF EVIDENCE IS LOGICALLY POSSIBLE +8230-279154-0004-621: THERE IS NO LOGICAL IMPOSSIBILITY IN THE HYPOTHESIS THAT THE WORLD SPRANG INTO BEING FIVE MINUTES AGO EXACTLY AS IT THEN WAS WITH (A->THE) POPULATION THAT REMEMBERED A WHOLLY UNREAL PAST +8230-279154-0005-622: ALL THAT I AM DOING IS TO USE ITS LOGICAL TENABILITY AS A HELP IN THE ANALYSIS OF WHAT OCCURS WHEN WE REMEMBER +8230-279154-0006-623: THE BEHAVIOURIST WHO ATTEMPTS TO MAKE PSYCHOLOGY A RECORD OF (BEHAVIOUR->BEHAVIOR) HAS TO TRUST HIS MEMORY IN MAKING THE RECORD +8230-279154-0007-624: HABIT IS A CONCEPT INVOLVING THE OCCURRENCE OF SIMILAR EVENTS AT DIFFERENT TIMES IF THE (BEHAVIOURIST FEELS->BEHAVIORIST FILLS) CONFIDENT THAT THERE IS SUCH A PHENOMENON AS HABIT THAT CAN ONLY BE BECAUSE HE TRUSTS HIS MEMORY WHEN IT ASSURES HIM THAT THERE HAVE BEEN OTHER TIMES +8230-279154-0008-625: BUT I DO NOT THINK SUCH AN INFERENCE IS WARRANTED +8230-279154-0009-626: OUR CONFIDENCE OR LACK OF CONFIDENCE IN THE ACCURACY OF A MEMORY IMAGE MUST IN FUNDAMENTAL CASES BE BASED UPON A CHARACTERISTIC OF THE IMAGE ITSELF SINCE WE CANNOT EVOKE THE PAST BODILY AND COMPARE IT WITH THE PRESENT IMAGE +8230-279154-0010-627: WE SOMETIMES HAVE IMAGES THAT ARE BY NO MEANS PECULIARLY VAGUE WHICH YET WE DO NOT TRUST FOR EXAMPLE UNDER THE INFLUENCE OF FATIGUE WE MAY SEE A FRIEND'S FACE VIVIDLY AND CLEARLY BUT HORRIBLY DISTORTED +8230-279154-0011-628: SOME IMAGES LIKE SOME SENSATIONS FEEL VERY FAMILIAR WHILE OTHERS FEEL STRANGE +8230-279154-0012-629: FAMILIARITY IS A (FEELING->FILLING) CAPABLE OF DEGREES +8230-279154-0013-630: IN AN IMAGE OF A WELL KNOWN FACE FOR EXAMPLE SOME PARTS MAY FEEL MORE FAMILIAR THAN OTHERS WHEN THIS HAPPENS WE HAVE MORE BELIEF IN THE ACCURACY OF THE FAMILIAR PARTS THAN IN THAT OF THE UNFAMILIAR PARTS +8230-279154-0014-631: I COME NOW TO THE OTHER CHARACTERISTIC WHICH MEMORY IMAGES MUST HAVE IN ORDER TO ACCOUNT FOR OUR KNOWLEDGE OF THE PAST +8230-279154-0015-632: THEY MUST HAVE SOME CHARACTERISTIC WHICH MAKES US REGARD THEM AS REFERRING TO MORE OR LESS REMOTE PORTIONS OF THE PAST +8230-279154-0016-633: IN ACTUAL FACT THERE ARE DOUBTLESS VARIOUS FACTORS THAT CONCUR IN GIVING US THE FEELING OF GREATER OR LESS REMOTENESS IN SOME REMEMBERED EVENT +8230-279154-0017-634: THERE MAY BE A SPECIFIC FEELING WHICH COULD BE CALLED THE (FEELING->FILLING) OF PASTNESS ESPECIALLY WHERE IMMEDIATE MEMORY IS CONCERNED +8230-279154-0018-635: THERE IS OF COURSE A DIFFERENCE BETWEEN KNOWING THE TEMPORAL RELATION OF A REMEMBERED EVENT TO THE PRESENT AND KNOWING THE TIME ORDER OF TWO REMEMBERED EVENTS +8230-279154-0019-636: IT WOULD SEEM THAT ONLY RATHER RECENT EVENTS CAN BE PLACED AT ALL ACCURATELY BY MEANS OF FEELINGS GIVING THEIR TEMPORAL RELATION TO THE PRESENT BUT IT IS CLEAR THAT SUCH FEELINGS MUST PLAY AN ESSENTIAL PART IN THE PROCESS OF DATING REMEMBERED EVENTS +8230-279154-0020-637: IF WE HAD RETAINED THE SUBJECT OR ACT IN KNOWLEDGE THE WHOLE PROBLEM OF MEMORY WOULD HAVE BEEN COMPARATIVELY SIMPLE +8230-279154-0021-638: REMEMBERING HAS TO BE A PRESENT OCCURRENCE IN SOME WAY RESEMBLING OR RELATED TO WHAT IS REMEMBERED +8230-279154-0022-639: SOME POINTS MAY BE TAKEN AS FIXED AND SUCH AS ANY THEORY OF MEMORY MUST ARRIVE AT +8230-279154-0023-640: IN THIS CASE AS IN MOST OTHERS WHAT MAY BE TAKEN AS CERTAIN IN ADVANCE IS RATHER VAGUE +8230-279154-0024-641: THE FIRST OF OUR VAGUE BUT INDUBITABLE DATA IS THAT THERE IS KNOWLEDGE OF THE PAST +8230-279154-0025-642: WE MIGHT PROVISIONALLY THOUGH PERHAPS NOT QUITE CORRECTLY DEFINE MEMORY AS THAT WAY OF KNOWING ABOUT THE PAST WHICH HAS NO ANALOGUE IN OUR KNOWLEDGE OF THE FUTURE SUCH A DEFINITION WOULD AT LEAST SERVE TO MARK THE PROBLEM WITH WHICH WE ARE CONCERNED THOUGH SOME EXPECTATIONS MAY DESERVE TO RANK WITH MEMORY AS REGARDS IMMEDIACY +8230-279154-0026-643: THIS DISTINCTION IS VITAL TO THE UNDERSTANDING OF MEMORY BUT IT IS NOT SO EASY TO CARRY OUT IN PRACTICE AS IT IS TO DRAW IN THEORY +8230-279154-0027-644: A (GRAMOPHONE->GRAMMAPHONE) BY THE HELP OF SUITABLE RECORDS MIGHT RELATE TO US THE INCIDENTS OF ITS PAST AND PEOPLE ARE NOT SO DIFFERENT FROM GRAMOPHONES AS THEY LIKE TO BELIEVE +8230-279154-0028-645: I CAN SET TO WORK NOW TO REMEMBER THINGS I NEVER REMEMBERED BEFORE SUCH AS WHAT I HAD TO EAT FOR BREAKFAST THIS MORNING AND IT CAN HARDLY BE WHOLLY HABIT THAT ENABLES ME TO DO THIS +8230-279154-0029-646: THE FACT THAT A MAN CAN RECITE A POEM DOES NOT SHOW THAT HE REMEMBERS ANY PREVIOUS OCCASION ON WHICH HE HAS RECITED OR READ IT +8230-279154-0030-647: (SEMON'S->SIMMONS) TWO BOOKS MENTIONED IN AN EARLIER LECTURE DO NOT TOUCH KNOWLEDGE MEMORY AT ALL CLOSELY +8230-279154-0031-648: THEY GIVE LAWS ACCORDING TO WHICH IMAGES OF PAST OCCURRENCES COME INTO OUR MINDS BUT DO NOT DISCUSS OUR BELIEF THAT THESE IMAGES REFER TO PAST OCCURRENCES WHICH IS WHAT CONSTITUTES KNOWLEDGE MEMORY +8230-279154-0032-649: IT IS THIS THAT IS OF INTEREST TO THEORY OF KNOWLEDGE +8230-279154-0033-650: IT IS BY NO MEANS ALWAYS RELIABLE ALMOST EVERYBODY HAS AT SOME TIME EXPERIENCED THE WELL KNOWN ILLUSION THAT ALL THAT IS HAPPENING NOW HAPPENED BEFORE AT SOME TIME +8230-279154-0034-651: WHENEVER THE SENSE OF FAMILIARITY OCCURS WITHOUT A DEFINITE OBJECT IT (LEADS->LEAVES) US TO SEARCH THE ENVIRONMENT UNTIL WE ARE SATISFIED THAT WE HAVE FOUND THE APPROPRIATE OBJECT WHICH LEADS US TO THE JUDGMENT THIS IS FAMILIAR +8230-279154-0035-652: THUS NO KNOWLEDGE AS TO THE PAST IS TO BE DERIVED FROM THE FEELING OF FAMILIARITY ALONE +8230-279154-0036-653: A FURTHER STAGE IS RECOGNITION +8230-279154-0037-654: RECOGNITION IN THIS SENSE DOES NOT NECESSARILY INVOLVE MORE THAN A HABIT OF ASSOCIATION THE KIND OF OBJECT WE ARE SEEING AT THE MOMENT IS ASSOCIATED WITH THE WORD CAT OR WITH AN AUDITORY IMAGE OF PURRING OR WHATEVER OTHER CHARACTERISTIC WE MAY HAPPEN TO RECOGNIZE IN THE CAT OF THE MOMENT +8230-279154-0038-655: WE ARE OF COURSE IN FACT ABLE TO JUDGE WHEN WE RECOGNIZE AN OBJECT THAT WE HAVE SEEN IT BEFORE BUT THIS JUDGMENT IS SOMETHING OVER AND ABOVE RECOGNITION IN THIS FIRST SENSE AND MAY VERY PROBABLY BE IMPOSSIBLE TO ANIMALS THAT NEVERTHELESS HAVE THE EXPERIENCE OF RECOGNITION IN THIS FIRST SENSE OF THE WORD +8230-279154-0039-656: THIS KNOWLEDGE IS MEMORY IN ONE SENSE THOUGH IN ANOTHER IT IS NOT +8230-279154-0040-657: THERE ARE HOWEVER SEVERAL POINTS IN WHICH SUCH AN ACCOUNT OF RECOGNITION IS INADEQUATE TO BEGIN WITH IT MIGHT SEEM AT FIRST SIGHT MORE CORRECT TO DEFINE RECOGNITION AS I HAVE SEEN THIS BEFORE THAN AS THIS HAS EXISTED BEFORE +8230-279154-0041-658: THE DEFINITION OF MY EXPERIENCE IS DIFFICULT BROADLY SPEAKING IT IS EVERYTHING THAT IS CONNECTED WITH WHAT I AM EXPERIENCING NOW BY CERTAIN LINKS OF WHICH THE VARIOUS FORMS OF MEMORY ARE AMONG THE MOST IMPORTANT +8230-279154-0042-659: THUS IF I RECOGNIZE A THING THE OCCASION OF ITS PREVIOUS EXISTENCE IN VIRTUE OF WHICH I RECOGNIZE IT FORMS PART OF MY EXPERIENCE BY DEFINITION RECOGNITION WILL BE ONE OF THE MARKS BY WHICH MY EXPERIENCE IS SINGLED OUT FROM THE REST OF THE WORLD +8230-279154-0043-660: OF COURSE THE WORDS THIS HAS EXISTED BEFORE ARE (A->OF) VERY INADEQUATE TRANSLATION OF WHAT ACTUALLY HAPPENS WHEN WE FORM A JUDGMENT OF RECOGNITION BUT THAT IS UNAVOIDABLE WORDS ARE FRAMED TO EXPRESS A LEVEL OF THOUGHT WHICH IS BY NO MEANS PRIMITIVE AND ARE QUITE INCAPABLE OF EXPRESSING SUCH AN ELEMENTARY OCCURRENCE AS RECOGNITION +8455-210777-0000-972: I (REMAINED->REMAIN) THERE ALONE FOR MANY HOURS BUT I MUST ACKNOWLEDGE THAT BEFORE I LEFT THE CHAMBERS I HAD GRADUALLY BROUGHT MYSELF TO LOOK AT THE MATTER IN ANOTHER LIGHT +8455-210777-0001-973: HAD (EVA CRASWELLER->EITHER CRUSWELLER) NOT BEEN GOOD LOOKING HAD JACK BEEN STILL AT COLLEGE HAD SIR KENNINGTON OVAL REMAINED IN ENGLAND HAD MISTER (BUNNIT AND->BUNNITT IN) THE BAR KEEPER NOT SUCCEEDED IN STOPPING MY CARRIAGE ON THE HILL SHOULD I HAVE SUCCEEDED IN ARRANGING FOR THE FINAL DEPARTURE OF MY OLD FRIEND +8455-210777-0002-974: ON ARRIVING AT HOME AT MY OWN RESIDENCE I FOUND THAT OUR SALON WAS FILLED WITH A BRILLIANT COMPANY +8455-210777-0003-975: AS I SPOKE I MADE HIM A GRACIOUS BOW AND I THINK I SHOWED HIM BY MY MODE OF ADDRESS THAT I DID NOT BEAR ANY GRUDGE AS TO MY INDIVIDUAL SELF +8455-210777-0004-976: I HAVE COME TO YOUR SHORES MISTER PRESIDENT WITH THE PURPOSE OF SEEING HOW THINGS ARE PROGRESSING IN THIS DISTANT QUARTER OF THE WORLD +8455-210777-0005-977: WE HAVE OUR LITTLE STRUGGLES HERE AS ELSEWHERE AND ALL THINGS CANNOT BE DONE BY ROSE WATER +8455-210777-0006-978: WE ARE QUITE SATISFIED NOW CAPTAIN (BATTLEAX->BATTLE AXE) SAID MY WIFE +8455-210777-0007-979: QUITE SATISFIED SAID EVA +8455-210777-0008-980: THE LADIES IN COMPLIANCE WITH THAT SOFTNESS OF HEART WHICH IS THEIR CHARACTERISTIC ARE ON ONE SIDE AND THE MEN BY WHOM THE WORLD HAS TO BE MANAGED ARE ON THE OTHER +8455-210777-0009-981: NO DOUBT IN PROCESS OF TIME THE LADIES WILL FOLLOW +8455-210777-0010-982: THEIR MASTERS SAID MISSUS (NEVERBEND->NEVERBAND) +8455-210777-0011-983: I DID NOT MEAN SAID CAPTAIN (BATTLEAX->BATTLE AXE) TO TOUCH UPON PUBLIC SUBJECTS AT SUCH A MOMENT AS THIS +8455-210777-0012-984: MISSUS NEVERBEND YOU MUST INDEED BE PROUD OF YOUR SON +8455-210777-0013-985: JACK HAD BEEN STANDING IN THE FAR CORNER OF THE ROOM TALKING TO EVA AND WAS NOW REDUCED TO SILENCE BY HIS PRAISES +8455-210777-0014-986: SIR KENNINGTON OVAL IS A VERY FINE PLAYER SAID MY WIFE +8455-210777-0015-987: I (AND->AM) MY WIFE AND SON AND THE TWO (CRASWELLERS->CRESTWELLERS) AND THREE OR FOUR OTHERS AGREED TO DINE ON BOARD THE SHIP ON THE NEXT +8455-210777-0016-988: THIS I FELT WAS PAID TO ME AS BEING PRESIDENT OF THE REPUBLIC AND I ENDEAVOURED TO BEHAVE MYSELF WITH SUCH MINGLED HUMILITY AND DIGNITY AS MIGHT (BEFIT->BE FIT) THE OCCASION BUT I COULD NOT BUT FEEL THAT SOMETHING WAS WANTING TO THE SIMPLICITY OF MY ORDINARY LIFE +8455-210777-0017-989: MY WIFE ON THE SPUR OF THE MOMENT MANAGED TO GIVE THE (GENTLEMEN->GENTLEMAN) A VERY GOOD DINNER +8455-210777-0018-990: THIS SHE SAID WAS TRUE HOSPITALITY AND I AM NOT SURE THAT I DID NOT AGREE WITH (HER->THAT) +8455-210777-0019-991: THEN THERE WERE THREE OR FOUR LEADING MEN OF THE COMMUNITY WITH THEIR WIVES WHO WERE FOR THE MOST PART THE FATHERS AND MOTHERS OF THE YOUNG LADIES +8455-210777-0020-992: OH YES SAID JACK AND I'M NOWHERE +8455-210777-0021-993: BUT I MEAN TO HAVE MY INNINGS BEFORE LONG +8455-210777-0022-994: OF WHAT MISSUS (NEVERBEND HAD GONE THROUGH IN PROVIDING BIRDS->NEVERS) BEASTS AND FISHES NOT TO TALK OF TARTS AND JELLIES FOR THE DINNER OF THAT DAY NO ONE BUT MYSELF CAN HAVE ANY IDEA BUT IT MUST BE ADMITTED THAT SHE ACCOMPLISHED HER TASK WITH THOROUGH SUCCESS +8455-210777-0023-995: WE SAT WITH THE (OFFICERS->OFFICER) SOME LITTLE TIME AFTER DINNER AND THEN WENT ASHORE +8455-210777-0024-996: HOW MUCH OF EVIL OF REAL ACCOMPLISHED EVIL HAD THERE NOT OCCURRED TO ME DURING THE LAST FEW DAYS +8455-210777-0025-997: WHAT COULD I DO NOW BUT JUST LAY MYSELF DOWN AND DIE +8455-210777-0026-998: AND THE DEATH OF WHICH I DREAMT COULD NOT ALAS +8455-210777-0027-999: WHEN THIS CAPTAIN SHOULD HAVE TAKEN HIMSELF AND HIS VESSEL BACK TO ENGLAND I WOULD RETIRE TO A SMALL FARM WHICH I POSSESSED AT THE (FARTHEST->FURTHEST) SIDE OF THE ISLAND AND THERE IN SECLUSION WOULD I END MY DAYS +8455-210777-0028-1000: JACK WOULD BECOME EVA'S HAPPY HUSBAND AND WOULD REMAIN AMIDST THE HURRIED DUTIES OF THE EAGER WORLD +8455-210777-0029-1001: THINKING OF ALL THIS I WENT TO SLEEP +8455-210777-0030-1002: MISTER NEVERBEND BEGAN THE CAPTAIN AND I (OBSERVED->OBSERVE) THAT UP TO THAT MOMENT HE HAD GENERALLY ADDRESSED ME AS PRESIDENT IT CANNOT BE DENIED THAT WE HAVE COME HERE ON AN UNPLEASANT MISSION +8455-210777-0031-1003: YOU HAVE RECEIVED US WITH ALL THAT COURTESY AND HOSPITALITY FOR WHICH YOUR CHARACTER (*->AND) IN ENGLAND (STANDS->STAND) SO HIGH +8455-210777-0032-1004: IT IS A DUTY SAID I +8455-210777-0033-1005: BUT YOUR POWER IS SO SUPERIOR TO ANY THAT I CAN ADVANCE AS TO MAKE US HERE FEEL THAT THERE IS NO DISGRACE IN YIELDING TO IT +8455-210777-0034-1006: NOT A DOUBT BUT HAD YOUR FORCE BEEN ONLY DOUBLE OR (TREBLE->TROUBLE) OUR OWN I SHOULD HAVE FOUND IT MY DUTY TO STRUGGLE WITH YOU +8455-210777-0035-1007: THAT IS ALL QUITE TRUE MISTER NEVERBEND SAID SIR (FERDINANDO BROWN->FERDINAND OBROWN) +8455-210777-0036-1008: I CAN AFFORD TO SMILE BECAUSE I AM ABSOLUTELY POWERLESS BEFORE YOU BUT I DO NOT THE LESS FEEL THAT IN A MATTER (IN->OF) WHICH THE PROGRESS OF THE WORLD IS CONCERNED I OR RATHER WE HAVE BEEN PUT DOWN BY BRUTE FORCE +8455-210777-0037-1009: YOU HAVE COME TO US THREATENING US WITH ABSOLUTE DESTRUCTION +8455-210777-0038-1010: THEREFORE I FEEL MYSELF QUITE ABLE AS PRESIDENT OF THIS REPUBLIC TO RECEIVE YOU WITH A COURTESY DUE TO THE SERVANTS OF A FRIENDLY ALLY +8455-210777-0039-1011: I CAN ASSURE YOU HE HAS NOT EVEN ALLOWED ME TO SEE THE TRIGGER SINCE I HAVE BEEN ON BOARD +8455-210777-0040-1012: THEN SAID SIR FERDINANDO THERE IS NOTHING FOR IT BUT THAT (HE->WE) MUST TAKE YOU WITH HIM +8455-210777-0041-1013: THERE CAME UPON ME A SUDDEN SHOCK WHEN I HEARD THESE WORDS WHICH EXCEEDED ANYTHING WHICH I HAD YET FELT +8455-210777-0042-1014: YOU HEAR WHAT SIR FERDINANDO BROWN HAS SAID REPLIED CAPTAIN (BATTLEAX->BATTLE AXE) +8455-210777-0043-1015: BUT WHAT IS THE DELICATE MISSION I ASKED +8455-210777-0044-1016: I WAS TO BE TAKEN AWAY AND CARRIED TO ENGLAND OR ELSEWHERE OR DROWNED UPON THE VOYAGE IT MATTERED NOT WHICH +8455-210777-0045-1017: THEN THE REPUBLIC OF (BRITANNULA->BRITAIN NULA) WAS TO BE DECLARED AS NON EXISTENT AND THE BRITISH FLAG WAS TO BE EXALTED AND A BRITISH GOVERNOR INSTALLED IN THE EXECUTIVE CHAMBERS +8455-210777-0046-1018: YOU MAY BE QUITE SURE (IT'S->TO) THERE SAID CAPTAIN (BATTLEAX->BATTLE AXE) AND THAT I CAN SO USE IT AS TO HALF OBLITERATE YOUR TOWN WITHIN TWO MINUTES OF MY RETURN ON BOARD +8455-210777-0047-1019: YOU PROPOSE TO KIDNAP ME I SAID +8455-210777-0048-1020: WHAT (WOULD->WILL) BECOME OF YOUR GUN WERE I TO KIDNAP YOU +8455-210777-0049-1021: LIEUTENANT (CROSSTREES->CROSS TREES) IS A VERY GALLANT OFFICER +8455-210777-0050-1022: ONE OF US ALWAYS REMAINS ON BOARD WHILE THE OTHER IS ON SHORE +8455-210777-0051-1023: WHAT WORLD WIDE INIQUITY SUCH A SPEECH AS THAT DISCLOSES SAID I STILL TURNING MYSELF TO THE CAPTAIN FOR THOUGH I WOULD HAVE CRUSHED THEM BOTH BY MY WORDS HAD IT BEEN POSSIBLE MY DISLIKE (CENTRED->SENATE) ITSELF ON SIR FERDINANDO +8455-210777-0052-1024: YOU WILL ALLOW ME TO SUGGEST SAID HE THAT THAT IS A MATTER OF OPINION +8455-210777-0053-1025: WERE I TO COMPLY WITH YOUR ORDERS WITHOUT EXPRESSING MY OWN OPINION I SHOULD SEEM TO HAVE DONE SO WILLINGLY HEREAFTER +8455-210777-0054-1026: THE LETTER RAN AS FOLLOWS +8455-210777-0055-1027: SIR I HAVE IT IN COMMAND TO INFORM YOUR EXCELLENCY THAT YOU HAVE BEEN APPOINTED GOVERNOR OF THE CROWN COLONY WHICH IS CALLED (BRITANNULA->BRITAIN NULA) +8455-210777-0056-1028: THE PECULIAR CIRCUMSTANCES OF THE COLONY ARE WITHIN YOUR EXCELLENCY'S KNOWLEDGE +8455-210777-0057-1029: BUT IN THEIR SELECTION OF A CONSTITUTION THE (BRITANNULISTS->BRITON ULYSTS) HAVE UNFORTUNATELY ALLOWED THEMSELVES BUT ONE (DELIBERATIVE->DELIBERATE) ASSEMBLY AND HENCE (HAVE->HAS) SPRUNG THEIR PRESENT DIFFICULTIES +8455-210777-0058-1030: IT IS FOUNDED ON THE ACKNOWLEDGED WEAKNESS OF THOSE WHO SURVIVE THAT PERIOD OF LIFE AT WHICH MEN CEASE TO WORK +8455-210777-0059-1031: BUT IT IS SURMISED THAT YOU WILL FIND DIFFICULTIES IN THE WAY OF YOUR ENTERING AT ONCE UPON YOUR (GOVERNMENT->GOVERNOR) +8455-210777-0060-1032: THE JOHN BRIGHT (IS ARMED->HIS ARM) WITH A WEAPON OF GREAT POWER AGAINST WHICH IT IS IMPOSSIBLE THAT THE PEOPLE OF (BRITANNULA->BRITAIN EULO) SHOULD PREVAIL +8455-210777-0061-1033: YOU WILL CARRY OUT WITH YOU ONE HUNDRED MEN OF THE NORTH (NORTH WEST->NORTHWEST) BIRMINGHAM REGIMENT WHICH WILL PROBABLY SUFFICE FOR YOUR OWN SECURITY AS IT IS THOUGHT THAT IF MISTER NEVERBEND BE WITHDRAWN THE PEOPLE WILL REVERT EASILY TO THEIR OLD HABITS OF OBEDIENCE +8455-210777-0062-1034: WHEN DO YOU INTEND THAT THE JOHN BRIGHT SHALL START +8455-210777-0063-1035: TO DAY I SHOUTED +8455-210777-0064-1036: AND I HAVE NO ONE READY TO WHOM I CAN GIVE UP THE ARCHIVES OF THE GOVERNMENT +8455-210777-0065-1037: I SHALL BE HAPPY TO TAKE CHARGE OF THEM SAID SIR FERDINANDO +8455-210777-0066-1038: THEY OF COURSE MUST ALL BE ALTERED +8455-210777-0067-1039: OR OF THE HABITS OF OUR PEOPLE IT IS QUITE IMPOSSIBLE +8455-210777-0068-1040: YOUR POWER IS SUFFICIENT I SAID +8455-210777-0069-1041: IF YOU WILL GIVE US YOUR PROMISE TO MEET CAPTAIN (BATTLEAX->ADELAX) HERE AT THIS TIME TO MORROW WE WILL STRETCH A POINT AND DELAY THE DEPARTURE OF THE JOHN BRIGHT FOR TWENTY FOUR HOURS +8455-210777-0070-1042: AND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTRACT FROM ME A PROMISE THAT I WOULD DEPART IN PEACE +8463-287645-0000-543: THIS WAS WHAT DID THE MISCHIEF SO FAR AS THE RUNNING AWAY WAS CONCERNED +8463-287645-0001-544: IT IS HARDLY NECESSARY TO SAY MORE OF THEM HERE +8463-287645-0002-545: FROM THE MANNER IN WHICH (HE->SHE) EXPRESSED HIMSELF WITH REGARD TO ROBERT (HOLLAN->HOLLAND) NO MAN IN THE WHOLE RANGE OF HIS RECOLLECTIONS WILL BE LONGER REMEMBERED THAN HE HIS (ENTHRALMENT->ENTHRALIMENT) WHILE UNDER (HOLLAN->HOLLAND) WILL HARDLY EVER BE FORGOTTEN +8463-287645-0003-546: OF THIS PARTY EDWARD A BOY OF SEVENTEEN CALLED FORTH MUCH SYMPATHY HE TOO WAS CLAIMED BY (HOLLAN->HOLLAND) +8463-287645-0004-547: JOHN WESLEY COMBASH JACOB TAYLOR AND THOMAS EDWARD SKINNER +8463-287645-0005-548: A FEW YEARS BACK ONE OF THEIR SLAVES A COACHMAN WAS KEPT ON THE COACH BOX ONE (COLD->CALLED) NIGHT WHEN THEY WERE OUT AT A BALL UNTIL HE BECAME ALMOST FROZEN TO DEATH IN FACT HE DID DIE IN THE INFIRMARY FROM THE EFFECTS OF THE FROST ABOUT ONE WEEK AFTERWARDS +8463-287645-0006-549: THE DOCTOR WHO ATTENDED THE INJURED CREATURE IN THIS CASE WAS SIMPLY TOLD THAT SHE SLIPPED AND FELL DOWN (*->THE) STAIRS AS SHE WAS COMING DOWN +8463-287645-0007-550: ANOTHER CASE SAID JOHN WESLEY WAS A LITTLE GIRL HALF GROWN WHO WAS WASHING WINDOWS (UP STAIRS->UPSTAIRS) ONE DAY AND UNLUCKILY FELL ASLEEP IN THE WINDOW AND IN THIS POSITION WAS FOUND BY HER MISTRESS IN A RAGE THE MISTRESS (HIT->HID) HER A HEAVY SLAP KNOCKED HER OUT OF THE WINDOW AND SHE FELL TO THE PAVEMENT AND DIED IN A FEW HOURS FROM THE EFFECTS THEREOF +8463-287645-0008-551: AS USUAL NOTHING WAS DONE IN THE WAY OF PUNISHMENT +8463-287645-0009-552: I NEVER KNEW OF BUT ONE MAN WHO COULD EVER PLEASE HIM +8463-287645-0010-553: HE WORKED ME VERY HARD HE WANTED TO BE BEATING ME ALL THE TIME +8463-287645-0011-554: SHE WAS A LARGE HOMELY WOMAN THEY WERE COMMON WHITE PEOPLE WITH NO REPUTATION IN THE COMMUNITY +8463-287645-0012-555: SUBSTANTIALLY THIS WAS JACOB'S UNVARNISHED DESCRIPTION OF HIS MASTER AND MISTRESS +8463-287645-0013-556: AS TO HIS AGE AND ALSO THE NAME OF HIS MASTER JACOB'S STATEMENT VARIED SOMEWHAT FROM THE ADVERTISEMENT +8463-287645-0014-557: OF STARTING I DIDN'T KNOW THE WAY TO COME +8463-294825-0000-558: IT'S ALMOST BEYOND CONJECTURE +8463-294825-0001-559: THIS REALITY BEGINS TO EXPLAIN THE DARK POWER AND (OTHERWORLDLY->OTHER WORLDLY) FASCINATION OF TWENTY THOUSAND LEAGUES UNDER THE SEAS +8463-294825-0002-560: FIRST AS A PARIS STOCKBROKER LATER AS A CELEBRATED AUTHOR AND YACHTSMAN HE WENT ON FREQUENT VOYAGES TO BRITAIN AMERICA THE MEDITERRANEAN +8463-294825-0003-561: NEMO BUILDS A FABULOUS (FUTURISTIC->FUTUREISTIC) SUBMARINE THE NAUTILUS THEN CONDUCTS AN UNDERWATER CAMPAIGN OF VENGEANCE AGAINST HIS IMPERIALIST OPPRESSOR +8463-294825-0004-562: IN ALL THE NOVEL (HAD->HEAD) A DIFFICULT GESTATION +8463-294825-0005-563: OTHER SUBTLETIES OCCUR INSIDE EACH EPISODE THE TEXTURES SPARKLING WITH WIT INFORMATION AND INSIGHT +8463-294825-0006-564: HIS SPECIFICATIONS FOR AN OPEN SEA SUBMARINE AND A SELF (CONTAINED->CONTAINING) DIVING SUIT WERE DECADES BEFORE THEIR TIME YET MODERN TECHNOLOGY BEARS THEM OUT TRIUMPHANTLY +8463-294825-0007-565: EVEN THE SUPPORTING CAST IS SHREWDLY DRAWN PROFESSOR ARONNAX THE CAREER SCIENTIST CAUGHT IN AN ETHICAL CONFLICT CONSEIL THE COMPULSIVE CLASSIFIER WHO SUPPLIES HUMOROUS TAG LINES FOR (VERNE'S->VERN'S) FAST FACTS THE HARPOONER NED LAND A CREATURE OF CONSTANT APPETITES MAN AS HEROIC ANIMAL +8463-294825-0008-566: BUT MUCH OF THE (NOVEL'S->NOVELS) BROODING POWER COMES FROM CAPTAIN NEMO +8463-294825-0009-567: THIS COMPULSION LEADS NEMO INTO UGLY CONTRADICTIONS (HE'S->HE IS) A (FIGHTER->FRIGHTER) FOR FREEDOM YET ALL WHO BOARD HIS SHIP (ARE->OR) IMPRISONED THERE FOR GOOD HE WORKS TO SAVE LIVES BOTH HUMAN AND ANIMAL YET HE HIMSELF CREATES A (HOLOCAUST->HOLOCOST) HE DETESTS IMPERIALISM YET HE LAYS PERSONAL CLAIM TO THE SOUTH POLE +8463-294825-0010-568: AND IN THIS LAST ACTION HE FALLS INTO THE CLASSIC SIN OF PRIDE +8463-294825-0011-569: (HE'S->HE IS) SWIFTLY (PUNISHED->PUNISH) +8463-294825-0012-570: THE NAUTILUS NEARLY PERISHES IN THE ANTARCTIC AND NEMO SINKS INTO A GROWING DEPRESSION +8463-294825-0013-571: FOR MANY THEN THIS BOOK HAS BEEN A SOURCE OF FASCINATION SURELY ONE OF THE MOST INFLUENTIAL NOVELS EVER WRITTEN AN INSPIRATION FOR SUCH SCIENTISTS AND DISCOVERERS AS ENGINEER SIMON LAKE OCEANOGRAPHER WILLIAM (BEEBE->B) POLAR (TRAVELER SIR ERNEST->TRAVELLERS ARE EARNEST) SHACKLETON +8463-294825-0014-572: FATHOM SIX FEET +8463-294825-0015-573: (GRAM->GRAHAM) ROUGHLY (ONE->WON) TWENTY EIGHTH OF AN OUNCE +8463-294825-0016-574: (MILLIGRAM->MILAGRAM) ROUGHLY ONE TWENTY EIGHT (THOUSAND->THOUSANDTH) OF AN OUNCE +8463-294825-0017-575: (LITER->LATER) ROUGHLY (ONE QUART->WON COURT) +8463-294825-0018-576: METER ROUGHLY ONE YARD THREE INCHES +8463-294825-0019-577: (MILLIMETER->MILLIMETRE) ROUGHLY ONE TWENTY FIFTH OF AN INCH +8463-294828-0000-578: CHAPTER THREE AS MASTER WISHES +8463-294828-0001-579: THREE SECONDS BEFORE THE ARRIVAL OF J B HOBSON'S LETTER I (NO->KNOW) MORE DREAMED OF CHASING THE UNICORN THAN OF TRYING FOR THE (NORTHWEST->NORTH WEST) PASSAGE +8463-294828-0002-580: EVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JOURNEY EXHAUSTED AND BADLY NEEDING (A REST->ARREST) +8463-294828-0003-581: I WANTED NOTHING MORE THAN TO SEE MY COUNTRY AGAIN MY FRIENDS MY MODEST QUARTERS BY THE BOTANICAL GARDENS MY DEARLY BELOVED COLLECTIONS +8463-294828-0004-582: BUT NOW NOTHING COULD HOLD ME BACK +8463-294828-0005-583: CONSEIL WAS MY MANSERVANT +8463-294828-0006-584: FROM RUBBING SHOULDERS WITH SCIENTISTS IN OUR LITTLE UNIVERSE BY THE BOTANICAL GARDENS THE BOY HAD COME TO KNOW A THING OR TWO +8463-294828-0007-585: CLASSIFYING WAS EVERYTHING TO HIM SO HE KNEW NOTHING ELSE WELL VERSED IN (THE->A) THEORY OF CLASSIFICATION HE WAS POORLY VERSED IN ITS PRACTICAL APPLICATION AND I DOUBT THAT HE COULD TELL A SPERM WHALE FROM A BALEEN WHALE +8463-294828-0008-586: AND YET WHAT A FINE GALLANT (LAD->LA) +8463-294828-0009-587: NOT ONCE DID HE COMMENT ON THE LENGTH OR THE HARDSHIPS OF (A->THE) JOURNEY +8463-294828-0010-588: NEVER DID HE OBJECT TO BUCKLING UP HIS (SUITCASE->SUIT CASE) FOR ANY COUNTRY WHATEVER CHINA OR THE CONGO NO MATTER HOW FAR OFF IT WAS +8463-294828-0011-589: HE WENT HERE THERE AND EVERYWHERE IN PERFECT CONTENTMENT +8463-294828-0012-590: PLEASE FORGIVE ME FOR THIS UNDERHANDED WAY OF ADMITTING (*->THAT) I HAD TURNED FORTY +8463-294828-0013-591: HE WAS A FANATIC ON FORMALITY AND HE ONLY ADDRESSED ME IN THE THIRD PERSON TO THE POINT WHERE IT GOT (TIRESOME->TO HIRESUM) +8463-294828-0014-592: THERE WAS GOOD REASON TO STOP AND THINK EVEN FOR THE WORLD'S MOST EMOTIONLESS MAN +8463-294828-0015-593: CONSEIL I CALLED A THIRD (TIME->TON) CONSEIL APPEARED +8463-294828-0016-594: (DID->DEAD) MASTER (SUMMON->SUMMONED) ME HE SAID ENTERING +8463-294828-0017-595: PACK AS MUCH INTO MY TRUNK AS YOU CAN MY (TRAVELING->TRAVELLING) KIT MY SUITS SHIRTS AND SOCKS DON'T BOTHER COUNTING JUST SQUEEZE IT ALL IN AND HURRY +8463-294828-0018-596: WE'LL DEAL WITH THEM LATER WHAT +8463-294828-0019-597: ANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WHOLE MENAGERIE TO FRANCE +8463-294828-0020-598: YES WE ARE CERTAINLY I REPLIED EVASIVELY BUT AFTER WE MAKE A DETOUR +8463-294828-0021-599: A ROUTE SLIGHTLY LESS DIRECT THAT'S ALL +8463-294828-0022-600: WE'RE LEAVING ON THE ABRAHAM LINCOLN +8463-294828-0023-601: YOU SEE MY FRIEND IT'S AN ISSUE OF THE MONSTER THE NOTORIOUS NARWHALE +8463-294828-0024-602: WE DON'T KNOW WHERE IT WILL TAKE US +8463-294828-0025-603: BUT (WE'RE->WERE) GOING JUST THE SAME +8463-294828-0026-604: WE HAVE A COMMANDER (WHO'S->WHOSE) GAME FOR ANYTHING +8463-294828-0027-605: I LEFT INSTRUCTIONS FOR SHIPPING MY CONTAINERS OF STUFFED ANIMALS AND DRIED PLANTS TO PARIS FRANCE +8463-294828-0028-606: I OPENED A LINE OF CREDIT SUFFICIENT TO COVER THE (BABIRUSA->BABRUSA) AND CONSEIL AT MY HEELS I JUMPED INTO A CARRIAGE +8463-294828-0029-607: OUR BAGGAGE WAS IMMEDIATELY CARRIED TO THE DECK OF THE FRIGATE I RUSHED ABOARD +8463-294828-0030-608: I ASKED FOR COMMANDER (FARRAGUT->FERRAGUT) +8463-294828-0031-609: ONE OF THE SAILORS LED ME TO THE (AFTERDECK->AFTER DECK) WHERE I STOOD IN THE PRESENCE OF A SMART LOOKING OFFICER WHO EXTENDED HIS HAND TO ME +8463-294828-0032-610: IN PERSON WELCOME ABOARD PROFESSOR YOUR CABIN IS WAITING FOR YOU +8463-294828-0033-611: I WAS WELL SATISFIED WITH MY CABIN WHICH WAS LOCATED IN THE STERN AND OPENED INTO THE (OFFICERS MESS->OFFICER'S MASS) +8463-294828-0034-612: (WE'LL->WILL) BE QUITE COMFORTABLE HERE I TOLD CONSEIL +8463-294828-0035-613: AND SO IF (I'D->I HAD) BEEN DELAYED BY A QUARTER OF AN HOUR OR EVEN LESS THE FRIGATE WOULD HAVE GONE WITHOUT ME AND I WOULD HAVE MISSED OUT ON THIS UNEARTHLY EXTRAORDINARY AND INCONCEIVABLE EXPEDITION WHOSE TRUE STORY MIGHT WELL MEET WITH SOME SKEPTICISM +8463-294828-0036-614: THE WHARVES OF BROOKLYN AND EVERY PART OF NEW YORK BORDERING THE EAST RIVER WERE CROWDED WITH CURIOSITY SEEKERS +8463-294828-0037-615: DEPARTING FROM FIVE HUNDRED THOUSAND THROATS THREE CHEERS BURST FORTH IN SUCCESSION +8463-294828-0038-616: THOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE THESE TIGHTLY PACKED MASSES HAILING THE ABRAHAM LINCOLN UNTIL IT REACHED THE WATERS OF THE HUDSON RIVER AT THE TIP OF THE LONG PENINSULA THAT FORMS NEW YORK CITY +8555-284447-0000-2299: THEN HE RUSHED (DOWN STAIRS->DOWNSTAIRS) INTO THE COURTYARD SHOUTING LOUDLY FOR HIS SOLDIERS AND THREATENING TO PATCH EVERYBODY IN HIS DOMINIONS (IF->AT) THE SAILORMAN WAS NOT RECAPTURED +8555-284447-0001-2300: HOLD HIM FAST MY MEN AND AS SOON AS I'VE HAD MY COFFEE AND OATMEAL (I'LL->I WILL) TAKE HIM TO THE ROOM OF THE GREAT KNIFE AND (PATCH->PAT) HIM +8555-284447-0002-2301: I WOULDN'T MIND A CUP (O->OF) COFFEE MYSELF SAID CAP'N BILL (I'VE->I HAVE) HAD (CONSID'BLE->CONSIDERABLE) EXERCISE THIS (MORNIN->MORNING) AND I'M (ALL READY->ALREADY) FOR (BREAKFAS->BREAKFAST) +8555-284447-0003-2302: BUT CAP'N BILL MADE NO SUCH ATTEMPT KNOWING IT WOULD BE USELESS +8555-284447-0004-2303: AS SOON AS THEY ENTERED THE ROOM OF THE GREAT KNIFE THE BOOLOOROO GAVE A YELL OF DISAPPOINTMENT +8555-284447-0005-2304: THE ROOM OF THE GREAT KNIFE WAS HIGH AND BIG AND AROUND IT RAN ROWS OF BENCHES FOR THE SPECTATORS TO SIT UPON +8555-284447-0006-2305: IN ONE PLACE AT THE HEAD OF THE ROOM WAS A RAISED PLATFORM FOR THE ROYAL FAMILY WITH ELEGANT (THRONE->THROWN) CHAIRS FOR THE KING AND QUEEN AND SIX SMALLER BUT RICHLY UPHOLSTERED CHAIRS (FOR THE SNUBNOSED->WITH A SNUB NOSED) PRINCESSES +8555-284447-0007-2306: (THEREFORE->THEY ARE FOR) HER MAJESTY PAID NO ATTENTION TO (ANYONE->ANY ONE) AND NO ONE PAID ANY ATTENTION TO HER +8555-284447-0008-2307: RICH JEWELS OF (BLUE STONES->BLUESTS) GLITTERED UPON THEIR PERSONS AND THE ROYAL LADIES WERE FULLY AS GORGEOUS AS (THEY->THERE) WERE HAUGHTY AND OVERBEARING +8555-284447-0009-2308: (MORNIN->MORNING) GIRLS (HOPE YE FEEL->OH BE BILL) AS WELL AS (YE->YOU) LOOK +8555-284447-0010-2309: (CONTROL->CONTROLL) YOURSELVES MY DEARS REPLIED THE BOOLOOROO THE WORST PUNISHMENT I KNOW HOW TO INFLICT ON (ANYONE->ANY ONE) THIS PRISONER IS ABOUT TO SUFFER (YOU'LL->YOU WILL) SEE A VERY PRETTY PATCHING MY ROYAL DAUGHTERS +8555-284447-0011-2310: SUPPOSE IT'S (A FRIEND->OF BRAND) +8555-284447-0012-2311: THE CAPTAIN SHOOK HIS HEAD +8555-284447-0013-2312: WHY YOU (SAID->SIT) TO FETCH THE FIRST LIVING CREATURE WE MET AND THAT WAS (THIS BILLYGOAT->THE SPILLIGOAT) REPLIED THE CAPTAIN PANTING HARD AS HE HELD FAST TO ONE OF THE GOAT'S HORNS +8555-284447-0014-2313: THE IDEA OF PATCHING CAP'N BILL TO A GOAT WAS VASTLY AMUSING TO HIM AND THE MORE HE THOUGHT OF IT THE MORE HE ROARED WITH LAUGHTER +8555-284447-0015-2314: THEY LOOK SOMETHING ALIKE YOU KNOW SUGGESTED THE CAPTAIN OF THE GUARDS LOOKING FROM ONE TO THE OTHER DOUBTFULLY AND (THEY'RE->THEY) NEARLY THE SAME SIZE IF YOU STAND THE (GOAT->BOAT) ON HIS HIND LEGS THEY'VE BOTH GOT THE SAME STYLE OF WHISKERS AND THEY'RE BOTH OF (EM->THEM) OBSTINATE AND DANGEROUS SO THEY OUGHT TO MAKE A GOOD PATCH SPLENDID +8555-284447-0016-2315: FINE GLORIOUS +8555-284447-0017-2316: WHEN THIS HAD BEEN ACCOMPLISHED THE BOOLOOROO LEANED OVER TO TRY TO DISCOVER WHY THE FRAME ROLLED AWAY SEEMINGLY OF ITS OWN ACCORD AND HE WAS THE MORE PUZZLED BECAUSE IT HAD NEVER DONE SUCH A THING BEFORE +8555-284447-0018-2317: AT ONCE THE GOAT GAVE A LEAP ESCAPED FROM THE SOLDIERS AND WITH BOWED HEAD RUSHED UPON THE BOOLOOROO +8555-284447-0019-2318: BEFORE ANY COULD STOP HIM HE (BUTTED->BUDDED) HIS MAJESTY SO FURIOUSLY THAT THE (KING->KING'S) SOARED FAR INTO THE AIR AND TUMBLED IN A HEAP AMONG THE BENCHES WHERE HE LAY MOANING AND GROANING +8555-284447-0020-2319: THE (GOAT'S WARLIKE->GOATS WORE LIKE) SPIRIT WAS ROUSED BY THIS SUCCESSFUL ATTACK +8555-284447-0021-2320: THEN THEY SPED IN GREAT HASTE FOR THE DOOR AND THE GOAT GAVE A FINAL BUTT THAT SENT THE ROW OF ROYAL LADIES ALL DIVING INTO THE CORRIDOR IN ANOTHER TANGLE WHEREUPON THEY SHRIEKED IN A MANNER THAT TERRIFIED (EVERYONE->EVERY ONE) WITHIN SOUND OF THEIR VOICES +8555-284447-0022-2321: I HAD A NOTION IT WAS YOU (MATE AS SAVED->MADE TO SEE) ME FROM THE KNIFE +8555-284447-0023-2322: I COULDN'T SHIVER MUCH (BEIN->BEING) BOUND SO TIGHT BUT WHEN I'M LOOSE I MEAN TO HAVE (JUS ONE->JUST SWUNG) GOOD SHIVER TO RELIEVE MY (FEELIN'S->FEELINS) +8555-284447-0024-2323: COME AND GET THE BOOLOOROO SHE SAID GOING TOWARD THE BENCHES +8555-284449-0000-2324: SO THEY WERE QUITE WILLING TO OBEY THE ORDERS OF THEIR GIRL QUEEN AND IN A SHORT TIME THE (BLASTS->BLAST) OF TRUMPETS AND ROLL OF DRUMS AND CLASHING OF CYMBALS TOLD TROT AND CAP'N BILL THAT THE BLUE BANDS HAD (ASSEMBLED->A SIMPLED) BEFORE THE PALACE +8555-284449-0001-2325: THEN THEY ALL MARCHED OUT A LITTLE WAY INTO THE FIELDS AND FOUND THAT THE ARMY OF PINKIES HAD ALREADY FORMED AND WAS ADVANCING STEADILY TOWARD THEM +8555-284449-0002-2326: AT THE HEAD OF THE PINKIES WERE GHIP GHISIZZLE AND BUTTON BRIGHT WHO HAD THE PARROT ON HIS SHOULDER AND THEY WERE SUPPORTED BY CAPTAIN (CORALIE->CORALLY) AND CAPTAIN (TINTINT->TINTANT) AND ROSALIE THE WITCH +8555-284449-0003-2327: WHEN THE (BLUESKINS->BLUESKIN) SAW GHIP GHISIZZLE THEY RAISED ANOTHER GREAT SHOUT FOR HE WAS THE (FAVORITE->FAVOURITE) OF THE SOLDIERS AND VERY POPULAR WITH ALL THE PEOPLE +8555-284449-0004-2328: SINCE LAST THURSDAY I (GHIP->GIP) GHISIZZLE HAVE BEEN THE LAWFUL BOOLOOROO OF THE BLUE COUNTRY BUT NOW THAT YOU ARE CONQUERED BY QUEEN TROT I SUPPOSE I AM CONQUERED TOO AND YOU HAVE NO BOOLOOROO AT ALL +8555-284449-0005-2329: WHEN HE FINISHED SHE SAID CHEERFULLY +8555-284449-0006-2330: DON'T WORRY SIZZLE DEAR IT'LL ALL COME RIGHT PRETTY SOON +8555-284449-0007-2331: NOW THEN LET'S ENTER THE CITY (AN->AND) ENJOY THE (GRAND->GREAT) FEAST (THAT'S->ITS) BEING COOKED I'M NEARLY (STARVED->STORMED) MYSELF FOR THIS (CONQUERIN KINGDOMS->CONQUERING KINGDOM'S) IS HARD WORK +8555-284449-0008-2332: THEN SHE GAVE ROSALIE BACK HER MAGIC RING THANKING THE KIND WITCH FOR ALL SHE HAD DONE FOR THEM +8555-284449-0009-2333: YOU ARE (*->A) MATE REPLIED THE SAILOR +8555-284449-0010-2334: IT WILL BE SUCH A SATISFACTION +8555-284449-0011-2335: THE GUARDS HAD A TERRIBLE STRUGGLE WITH THE GOAT WHICH WAS LOOSE IN THE ROOM AND STILL WANTED TO FIGHT BUT FINALLY THEY SUBDUED THE ANIMAL AND THEN THEY TOOK THE BOOLOOROO OUT OF THE FRAME HE WAS TIED IN AND BROUGHT BOTH HIM AND THE GOAT BEFORE QUEEN TROT WHO AWAITED THEM IN THE THRONE ROOM OF THE PALACE +8555-284449-0012-2336: I'LL GLADLY DO THAT PROMISED THE NEW BOOLOOROO AND I'LL FEED THE (HONORABLE GOAT->HONED) ALL THE SHAVINGS AND LEATHER AND TIN CANS HE CAN EAT BESIDES THE GRASS +8555-284449-0013-2337: (SCUSE->EXCUSE) ME SAID (TROT->SHOT) I NEGLECTED TO TELL YOU THAT YOU'RE NOT THE BOOLOOROO ANY MORE +8555-284449-0014-2338: THE FORMER BOOLOOROO GROANED +8555-284449-0015-2339: I'LL NOT BE WICKED ANY MORE SIGHED THE OLD BOOLOOROO I'LL REFORM +8555-284449-0016-2340: AS A PRIVATE CITIZEN I SHALL BE A MODEL OF DEPORTMENT BECAUSE IT WOULD BE DANGEROUS TO BE OTHERWISE +8555-284449-0017-2341: WHEN FIRST THEY ENTERED THE THRONE ROOM THEY TRIED TO BE AS HAUGHTY AND SCORNFUL AS EVER BUT THE BLUES WHO WERE ASSEMBLED THERE ALL LAUGHED AT THEM AND JEERED THEM FOR THERE WAS NOT A SINGLE PERSON IN ALL THE BLUE COUNTRY WHO LOVED THE PRINCESSES THE LEAST LITTLE BIT +8555-284449-0018-2342: SO GHIP GHISIZZLE ORDERED THE CAPTAIN TO TAKE A FILE OF SOLDIERS AND ESCORT THE RAVING BEAUTIES TO THEIR NEW HOME +8555-284449-0019-2343: THAT EVENING TROT GAVE A GRAND BALL IN THE PALACE TO WHICH THE MOST IMPORTANT OF THE PINKIES AND THE BLUESKINS WERE INVITED +8555-284449-0020-2344: THE COMBINED BANDS OF BOTH THE COUNTRIES PLAYED THE MUSIC AND A FINE SUPPER WAS SERVED +8555-292519-0000-2283: BRIGHTER THAN EARLY (DAWN'S->DAWNS) MOST BRILLIANT DYE ARE BLOWN CLEAR BANDS OF COLOR THROUGH THE SKY THAT SWIRL AND SWEEP AND MEET TO BREAK AND FOAM LIKE RAINBOW VEILS UPON A BUBBLE'S DOME +8555-292519-0001-2284: GUIDED BY YOU HOW WE MIGHT STROLL TOWARDS DEATH OUR ONLY MUSIC ONE ANOTHER'S BREATH THROUGH (GARDENS->GARDEN'S) INTIMATE WITH HOLLYHOCKS WHERE (*->IS) SILENT POPPIES BURN BETWEEN THE ROCKS BY POOLS WHERE BIRCHES BEND TO CONFIDANTS ABOVE GREEN WATERS SCUMMED WITH (*->THE) LILY PLANTS +8555-292519-0002-2285: VENICE +8555-292519-0003-2286: IN A SUNSET GLOWING OF CRIMSON AND GOLD SHE LIES THE GLORY OF THE WORLD A (BEACHED->BEECHED) KING'S GALLEY (WHOSE->WHO) SAILS ARE FURLED WHO IS HUNG WITH TAPESTRIES RICH AND OLD +8555-292519-0004-2287: THE PITY THAT WE MUST COME AND GO +8555-292519-0005-2288: WHILE THE OLD GOLD AND THE MARBLE STAYS FOREVER GLEAMING ITS SOFT STRONG BLAZE CALM IN THE EARLY EVENING GLOW +8555-292519-0006-2289: THE PLEASANT GRAVEYARD OF MY SOUL WITH SENTIMENTAL CYPRESS TREES AND FLOWERS IS FILLED THAT I MAY STROLL IN MEDITATION AT MY EASE +8555-292519-0007-2290: IT IS MY HEART HUNG IN THE SKY AND NO CLOUDS EVER FLOAT BETWEEN THE GRAVE FLOWERS AND MY HEART ON HIGH +8555-292519-0008-2291: OVER THE TRACK LINED CITY STREET THE YOUNG (MEN->MAN) THE GRINNING (MEN->MAN) PASS +8555-292519-0009-2292: HO YE SAILS THAT SEEM TO (WANDER IN->WONDER AND) DREAM FILLED MEADOWS SAY IS THE SHORE WHERE I STAND THE ONLY FIELD OF STRUGGLE OR ARE YE HIT AND BATTERED OUT THERE BY WAVES AND WIND GUSTS AS YE TACK OVER A CLASHING SEA OF WATERY ECHOES +8555-292519-0010-2293: OLD DANCES ARE SIMPLIFIED OF THEIR YEARNING BLEACHED BY TIME +8555-292519-0011-2294: HE HAD GOT INTO HER COURTYARD +8555-292519-0012-2295: THROUGH THE BLACK NIGHT RAIN HE SANG TO HER WINDOW BARS +8555-292519-0013-2296: THAT WAS BUT RUSTLING OF (DRIPPING->TRIPPING) PLANTS IN THE DARK +8555-292519-0014-2297: SHE WAS ALONE THAT NIGHT +8555-292519-0015-2298: HE HAD BROKEN INTO HER COURTYARD +908-157963-0000-1321: TO FADE AWAY LIKE MORNING BEAUTY FROM HER MORTAL DAY DOWN BY THE RIVER OF (ADONA->ADONNA) HER SOFT VOICE IS HEARD AND THUS HER GENTLE LAMENTATION FALLS LIKE MORNING DEW +908-157963-0001-1322: (O->OH) LIFE OF THIS OUR SPRING +908-157963-0002-1323: WHY FADES THE LOTUS OF THE WATER +908-157963-0003-1324: WHY FADE THESE CHILDREN OF THE SPRING +908-157963-0004-1325: (THEL->FELL) IS LIKE A WATRY BOW AND LIKE A PARTING CLOUD LIKE A REFLECTION IN A GLASS LIKE SHADOWS IN THE WATER LIKE DREAMS OF INFANTS LIKE A SMILE UPON AN (INFANTS->INFANT'S) FACE +908-157963-0005-1326: LIKE THE (DOVES VOICE->DOVE'S BOYS) LIKE TRANSIENT DAY LIKE MUSIC IN THE AIR AH +908-157963-0006-1327: AND GENTLE SLEEP THE SLEEP OF DEATH AND GENTLY HEAR THE VOICE OF HIM THAT WALKETH IN THE GARDEN IN THE EVENING TIME +908-157963-0007-1328: THE (LILLY->LILY) OF THE VALLEY BREATHING IN THE HUMBLE GRASS (ANSWERD->ANSWERED) THE LOVELY (MAID AND->MAIDEN) SAID I AM A (WATRY->WATCHERY) WEED AND I AM VERY SMALL AND LOVE TO DWELL IN LOWLY VALES SO WEAK THE GILDED BUTTERFLY SCARCE (PERCHES->PURCHASE) ON MY HEAD YET I AM VISITED FROM HEAVEN AND HE THAT SMILES ON ALL WALKS IN THE VALLEY AND EACH MORN OVER ME SPREADS HIS HAND SAYING REJOICE THOU HUMBLE GRASS THOU (NEW BORN->NEWBORN) LILY FLOWER +908-157963-0008-1329: THOU GENTLE MAID OF SILENT VALLEYS AND OF MODEST BROOKS FOR THOU (SHALL->SHALT) BE CLOTHED IN LIGHT AND FED WITH MORNING MANNA TILL (SUMMERS->SUMMER'S) HEAT MELTS THEE BESIDE THE FOUNTAINS AND THE SPRINGS TO FLOURISH IN ETERNAL VALES THEY WHY (SHOULD THEL->SHOULDST THOU) COMPLAIN +908-157963-0009-1330: WHY SHOULD THE MISTRESS OF THE (VALES->VEILS) OF (HAR->HOAR) UTTER A SIGH +908-157963-0010-1331: SHE (CEASD->CEASED) AND (SMILD->SMILED) IN TEARS THEN SAT DOWN IN HER SILVER SHRINE +908-157963-0011-1332: WHICH THOU DOST SCATTER ON EVERY LITTLE BLADE OF GRASS THAT SPRINGS REVIVES THE MILKED COW AND TAMES THE FIRE BREATHING STEED +908-157963-0012-1333: BUT (THEL->THOUGH) IS LIKE A FAINT CLOUD KINDLED AT THE RISING SUN I VANISH FROM MY PEARLY THRONE AND WHO SHALL FIND MY PLACE +908-157963-0013-1334: AND WHY IT SCATTERS ITS BRIGHT BEAUTY (THRO->THROUGH) THE (HUMID->HUMAN) AIR +908-157963-0014-1335: DESCEND (O->A) LITTLE CLOUD AND HOVER BEFORE THE EYES OF (THEL->FELL) +908-157963-0015-1336: O LITTLE CLOUD THE VIRGIN SAID I CHARGE THEE TO TELL ME WHY THOU COMPLAINEST NOW WHEN IN ONE HOUR THOU FADE AWAY THEN WE SHALL SEEK THEE BUT NOT FIND AH (THEL->FELL) IS LIKE TO THEE +908-157963-0016-1337: I PASS AWAY YET I COMPLAIN AND NO ONE HEARS MY VOICE +908-157963-0017-1338: THE CLOUD THEN (SHEWD->SHOWED) HIS GOLDEN HEAD AND HIS BRIGHT FORM (EMERG'D->EMERGED) +908-157963-0018-1339: AND FEAREST THOU BECAUSE I VANISH AND AM SEEN NO MORE +908-157963-0019-1340: IT IS TO TENFOLD LIFE TO LOVE TO PEACE AND RAPTURES (HOLY->WHOLLY) UNSEEN DESCENDING WEIGH MY LIGHT WINGS UPON BALMY FLOWERS AND COURT THE FAIR EYED (DEW->DO) TO TAKE ME TO HER SHINING TENT THE WEEPING VIRGIN TREMBLING KNEELS BEFORE THE RISEN SUN +908-157963-0020-1341: TILL WE ARISE (LINK'D->LINKED) IN A GOLDEN BAND AND NEVER PART BUT WALK UNITED BEARING FOOD TO ALL OUR TENDER FLOWERS +908-157963-0021-1342: LIVES NOT ALONE NOR (OR->OF) ITSELF FEAR NOT AND I WILL CALL THE WEAK WORM FROM ITS LOWLY BED AND THOU SHALT HEAR ITS VOICE +908-157963-0022-1343: COME FORTH WORM AND THE SILENT VALLEY TO THY PENSIVE QUEEN +908-157963-0023-1344: THE HELPLESS WORM AROSE AND SAT UPON THE (LILLYS->LILY'S) LEAF AND THE BRIGHT (CLOUD SAILD->CLOUDS SAILED) ON TO FIND HIS PARTNER IN THE VALE +908-157963-0024-1345: IMAGE OF WEAKNESS ART THOU BUT A WORM +908-157963-0025-1346: I SEE THEY LAY HELPLESS AND NAKED WEEPING AND NONE TO ANSWER NONE TO CHERISH THEE WITH (MOTHERS->MOTHER'S) SMILES +908-157963-0026-1347: AND SAYS THOU MOTHER OF MY CHILDREN I HAVE LOVED THEE AND I HAVE GIVEN THEE A CROWN THAT NONE CAN TAKE AWAY +908-157963-0027-1348: AND LAY ME DOWN IN THY COLD BED AND LEAVE MY SHINING LOT +908-157963-0028-1349: OR AN EYE OF GIFTS AND GRACES (SHOWRING->SHOWERING) FRUITS AND COINED GOLD +908-157963-0029-1350: WHY A TONGUE (IMPRESS'D->IMPRESSED) WITH HONEY FROM EVERY WIND +908-157963-0030-1351: WHY AN EAR A WHIRLPOOL FIERCE TO DRAW CREATIONS IN +908-31957-0000-1352: ALL IS SAID WITHOUT A WORD +908-31957-0001-1353: I SIT BENEATH THY LOOKS AS CHILDREN DO IN THE NOON SUN WITH SOULS THAT TREMBLE THROUGH THEIR HAPPY EYELIDS FROM AN UNAVERRED YET (PRODIGAL->CHRONICAL) INWARD JOY +908-31957-0002-1354: I DID NOT WRONG MYSELF SO BUT I PLACED A WRONG ON THEE +908-31957-0003-1355: WHEN CALLED BEFORE I TOLD HOW HASTILY I DROPPED MY FLOWERS OR (BRAKE->BREAK) OFF FROM A GAME +908-31957-0004-1356: SHALL I NEVER MISS HOME TALK AND BLESSING AND THE COMMON KISS THAT COMES TO EACH IN TURN NOR COUNT IT STRANGE WHEN I LOOK UP TO DROP ON A NEW RANGE OF WALLS AND FLOORS ANOTHER HOME THAN THIS +908-31957-0005-1357: ALAS I HAVE GRIEVED SO I AM HARD TO LOVE +908-31957-0006-1358: OPEN THY HEART WIDE AND FOLD WITHIN THE WET WINGS OF THY DOVE +908-31957-0007-1359: COULD IT MEAN TO LAST A LOVE SET PENDULOUS BETWEEN SORROW AND SORROW +908-31957-0008-1360: NAY I RATHER THRILLED DISTRUSTING EVERY LIGHT THAT SEEMED TO GILD THE ONWARD PATH AND (FEARED->FEAR) TO OVERLEAN A FINGER EVEN +908-31957-0009-1361: AND THOUGH I HAVE GROWN SERENE AND STRONG SINCE THEN I THINK THAT GOD HAS WILLED A STILL RENEWABLE FEAR +908-31957-0010-1362: O LOVE O TROTH +908-31957-0011-1363: AND LOVE BE FALSE +908-31957-0012-1364: IF HE TO KEEP ONE OATH MUST LOSE ONE JOY BY HIS LIFE'S STAR FORETOLD +908-31957-0013-1365: SLOW TO WORLD GREETINGS QUICK WITH ITS O LIST WHEN THE (ANGELS->ANGEL) SPEAK +908-31957-0014-1366: A RING OF AMETHYST I COULD NOT WEAR HERE PLAINER TO MY SIGHT THAN THAT FIRST KISS +908-31957-0015-1367: THAT WAS THE CHRISM OF LOVE WHICH (LOVE'S->LOVES) OWN CROWN WITH SANCTIFYING SWEETNESS DID (PRECEDE->PROCEED) THE THIRD UPON MY LIPS WAS FOLDED DOWN (IN PERFECT->IMPERFECT) PURPLE STATE SINCE WHEN INDEED I HAVE BEEN PROUD AND SAID MY LOVE MY OWN +908-31957-0016-1368: DEAREST TEACH ME SO TO POUR OUT GRATITUDE AS THOU DOST GOOD +908-31957-0017-1369: MUSSULMANS AND (GIAOURS->GUY ORS) THROW KERCHIEFS AT A SMILE AND HAVE NO RUTH FOR ANY WEEPING +908-31957-0018-1370: BUT THOU ART NOT SUCH A LOVER MY BELOVED +908-31957-0019-1371: THOU CANST WAIT THROUGH SORROW AND SICKNESS TO BRING SOULS TO TOUCH AND THINK IT SOON WHEN OTHERS CRY TOO LATE +908-31957-0020-1372: I (THANK->THINK) ALL WHO HAVE LOVED ME IN THEIR HEARTS WITH THANKS AND LOVE FROM MINE +908-31957-0021-1373: OH TO SHOOT MY SOUL'S FULL MEANING INTO FUTURE YEARS THAT THEY SHOULD LEND IT UTTERANCE AND SALUTE LOVE THAT ENDURES FROM LIFE THAT DISAPPEARS +908-31957-0022-1374: THEN I LONG TRIED BY NATURAL ILLS RECEIVED THE COMFORT FAST WHILE BUDDING AT THY SIGHT MY PILGRIM'S STAFF GAVE OUT GREEN LEAVES WITH MORNING DEWS (IMPEARLED->IMPELLED) +908-31957-0023-1375: I LOVE THEE FREELY AS MEN STRIVE FOR RIGHT I LOVE THEE PURELY AS THEY TURN FROM PRAISE +908-31957-0024-1376: I LOVE THEE WITH THE PASSION PUT TO USE IN MY OLD (GRIEFS->GREEDS) AND WITH MY CHILDHOOD'S FAITH +908-31957-0025-1377: I LOVE THEE WITH A LOVE I SEEMED TO LOSE WITH MY LOST SAINTS I LOVE THEE WITH THE BREATH SMILES TEARS OF ALL MY LIFE AND IF GOD CHOOSE I SHALL BUT LOVE THEE BETTER AFTER DEATH + +SUBSTITUTIONS: count ref -> hyp +29 AND -> IN +18 IN -> AND +15 A -> THE +11 THE -> A +9 AN -> AND +7 THIS -> THE +6 I'VE -> I +6 ANYONE -> ANY +5 SOAMES -> SOLMES +5 SILVIA -> SYLVIA +5 O -> OF +5 A -> OF +4 WHERE -> WERE +4 TWO -> TOO +4 THAT -> THE +4 O -> OH +4 METER -> METRE +4 MEN -> MAN +4 MAN -> MEN +4 IS -> AS +4 IN -> AN +4 I'M -> I +4 GALATIANS -> GALLATIONS +4 BATTLEAX -> BATTLE +3 VALLIERE -> VALLIERS +3 TRAVELING -> TRAVELLING +3 TOWARDS -> TOWARD +3 TODAY -> TO +3 THEY -> THERE +3 THEL -> FELL +3 THEATER -> THEATRE +3 THE -> THEIR +3 SOLON -> SOLEMN +3 RODOLFO -> RUDOLPHO +3 PRACTISE -> PRACTICE +3 OH -> O +3 OF -> A +3 MAINHALL -> MAIN +3 KAFFAR -> KAFFIR +3 IS -> WAS +3 HOLLAN -> HOLLAND +3 HER -> A +3 HE'S -> HE +3 EVERYONE -> EVERY +3 BRITANNULA -> BRITAIN +3 BANNISTER -> BANISTER +3 ANDERS -> ANDREWS +2 YOU'RE -> YOU +2 WYLDER -> WILDER +2 WHITTAWS -> WIDOWS +2 WHEN -> ONE +2 WE'LL -> WILL +2 VAPOURS -> VAPORS +2 VANDERPOOL -> VAN +2 TWO -> TO +2 TONIGHT -> TO +2 TO -> OF +2 TIMAEUS -> TO +2 THORKEL -> TORQUAL +2 THEN -> THAN +2 THEM -> HIM +2 THEIR -> THERE +2 THEIR -> THE +2 THEE -> THE +2 THE -> THAT +2 THAT -> IT +2 SOMETIME -> SOME +2 SOMEONE -> SOME +2 SHE'S -> SHE +2 SEEM -> SEEMED +2 ROUND -> AROUND +2 READ -> RED +2 PLATONISTS -> PLATANISTS +2 PARLOR -> PARLOUR +2 ONE -> WON +2 NOW -> THOU +2 NO -> KNOW +2 NEO -> NEW +2 NEIGHBOUR -> NEIGHBOR +2 MUNNY -> MONEY +2 MORNIN -> MORNING +2 MARSHALL -> MARSHAL +2 MADAM -> MADAME +2 LEAVENWORTH -> LEVINWORTH +2 LEAVENWORTH -> LEVIN +2 JAGO -> YAGO +2 IT'S -> ITS +2 IT -> ITS +2 IS -> HIS +2 IS -> HAS +2 INTO -> AND +2 IN -> A +2 I'D -> I +2 I -> I'M +2 HONOURABLE -> HONORABLE +2 HOLBEIN -> HOLBINE +2 HE -> WE +2 HAS -> HAD +2 HALLO -> HELLO +2 GREY -> GRAY +2 GRAY -> GREY +2 GILCHRIST -> GILGRIST +2 FOUNDED -> FOUND +2 FEELING -> FILLING +2 FAVORITE -> FAVOURITE +2 FAIRVIEW -> FAIR +2 EMIL -> AMYL +2 DISSENT -> DESCENT +2 DEDALUS -> DAEDALUS +2 DE -> THE +2 CRESSWELL -> CRESWELL +2 CRESSWELL -> CRASWELL +2 COURT -> COURTYARD +2 COLOUR -> COLOR +2 CHECK -> CHEQUE +2 CHAISE -> CHASE +2 CARL -> KARL +2 BUT -> THAT +2 BRAKE -> BREAK +2 BEHAVIOUR -> BEHAVIOR +2 AYE -> I +2 AY -> I +2 AS -> TO +2 ARE -> OUR +2 ANOTHER -> THE +2 ANDERS -> ANDREW'S +2 AND -> INTO +2 AND -> AS +2 ALEXANDRA -> ALEXANDER +1 ZORA'S -> ZORAS +1 ZORA -> SORA +1 ZOOF'S -> ZEF'S +1 YOU'LL -> YOU +1 YEARNING -> YEARNIN +1 YE -> YOU +1 YE -> BE +1 XAVIER -> ZEVIOUR +1 XAVIER -> ZEVIER +1 XAVIER -> ZAVIER +1 WOULD -> WILL +1 WOULD -> WERE +1 WORST -> WORSE +1 WORSE -> HORSE +1 WOODBEGIRT -> WOOD +1 WOOD -> WOODCUTTERS +1 WONDERING -> WANDERING +1 WOMAN'S -> WOMEN'S +1 WOMAN'S -> WOMAN' +1 WITHES -> WIDTHS +1 WITH -> WHICH +1 WITH -> WHEN +1 WINTER -> WINNER +1 WILL -> WOULD +1 WILL -> WE'LL +1 WIFE -> WHITE +1 WIDTH -> WID +1 WHOSE -> WHO +1 WHO'S -> WHOSE +1 WHO -> WHOSE +1 WHITTAWD -> WIDOWED +1 WHITTAW -> WIDOW +1 WHISK -> WHISKED +1 WHIRLPOOL -> WAR +1 WHIPPED -> WHIP +1 WHERE -> WHERE'S +1 WHEN -> BY +1 WHATEVER -> WHATSOEVER +1 WHAT'S -> WHAT +1 WHAT -> WHEN +1 WHALE -> WELL +1 WHALE -> WAIL +1 WESTPORT -> WESTWARD +1 WESTPORT -> PORT +1 WESTMERE -> WESTMIR +1 WERE -> WHERE +1 WERE -> ARE +1 WELL -> WHILE +1 WELL -> FOR +1 WELCOMED -> WELCOME +1 WEDNESDAY -> WIND +1 WEATHER -> WHETHER +1 WEAR -> WHERE +1 WEAKLY -> WEEKLY +1 WE'RE -> WERE +1 WE'RE -> WE +1 WE -> WE'VE +1 WAVES -> WAY +1 WATRY -> WATCHERY +1 WATERMILL -> WATER +1 WASTE -> WASTES +1 WAS -> WITH +1 WAS -> VIEW'S +1 WAS -> IS +1 WARLIKE -> WORE +1 WANDER -> WONDER +1 VOUCHED -> VOUCH +1 VOICE -> BOYS +1 VISITORS -> VISITOR +1 VILLEROY -> VILLEROI +1 VILLA -> VALIDESTE +1 VIGNETTE -> VINEYARD +1 VICARIOUS -> VIPEROUS +1 VIADUCT -> VIEDUC +1 VERY -> VERIMENT +1 VERSE -> FIRST +1 VERNE'S -> VERN'S +1 VAUDOIS -> FAUDOIS +1 VARIABILITY -> VERY +1 VANES -> VEINS +1 VALES -> VEILS +1 UTAH -> NEW +1 UPON -> ON +1 UP -> UPSTAIRS +1 UP -> OF +1 UNWARILY -> THEN +1 UNTO -> INTO +1 UNLIKE -> I +1 UNJUST -> UNJUSTI +1 UNDERGROUND -> ON +1 UNCLENCHED -> CLENCHED +1 UNC -> YOUNG +1 UN -> AND +1 TWO -> TUTRILOGIES +1 TWITE -> QUITE +1 TURNOVER -> TURN +1 TUPPENY -> TUPPENNY +1 TUMBLED -> TUMBLE +1 TROT -> SHOT +1 TREDDLESTON -> TREDDLESTONE +1 TREBLE -> TROUBLE +1 TRAVESTY -> TRAVASTY +1 TRAVELERS -> TRAVELLERS +1 TRAVELER -> TRAVELLERS +1 TOWNE -> TOWN +1 TOWELLING -> TOWELINGS +1 TOULD -> DID +1 TOTTY -> HAD +1 TOPS -> TOPSY +1 TOP -> TO +1 TOOMS -> TOMBS +1 TOO -> TWO +1 TONNAY -> TONISON +1 TONNAY -> TONE +1 TONNAY -> TO +1 TONNAY -> TINACHANT +1 TOILETTE -> TOILET +1 TO -> WEST +1 TO -> TWO +1 TO -> THROUGH +1 TO -> INTO +1 TO -> DOES +1 TO -> DEFINED +1 TIRESOME -> TO +1 TINTORET -> TINTARETTE +1 TINTINT -> TINTANT +1 TIMES -> TUBS +1 TIME -> YOU +1 TIME -> TON +1 TIMAEUS -> TIMIUS +1 TIMAEUS -> TIMIRAS +1 TIMAEUS -> TIMAIRS +1 TIMAEUS -> TENEAS +1 TIMAEUS -> TEARS +1 TIBI -> TIBBY +1 THUS -> LUSTY +1 THROUGH -> TO +1 THRONE -> THROWN +1 THRO -> THROUGH +1 THOUSAND -> THOUSANDTH +1 THOUGHT -> BOUGHT +1 THOUGH -> THE +1 THORLEIF -> TORE +1 THORKEL -> TORKLE +1 THIS -> OSTENSITY +1 THINGS -> THING +1 THEY'RE -> THEY +1 THEY -> THE +1 THEY -> MAY +1 THEREFORE -> THEY +1 THERE -> THERE'S +1 THERE -> THEIR +1 THEN -> IN +1 THEN -> AND +1 THEM -> THE +1 THEL -> THOUGH +1 THEL -> THOU +1 THEE'S -> THESE +1 THEE -> HE +1 THE -> WHO +1 THE -> TO +1 THE -> THIS +1 THE -> THEY +1 THE -> IN +1 THAT'S -> ITS +1 THAT -> THAN +1 THAT -> AT +1 THANKING -> THINKING +1 THANK -> THINK +1 THAN -> THAT +1 THAN -> IN +1 TECHNIQUE -> TYPE +1 TEA -> T +1 TAYLOR -> TAILOR +1 TARANTULA -> TERENTIAL +1 TALKERS -> TALK +1 TAKEN -> TAKING +1 TABU -> TABOU +1 TABU -> TABOO +1 TABU -> BOOT +1 TABLE -> TABLECLOTH +1 SYMPOSIUM -> SIMPOSIUM +1 SYMPOSIUM -> SIMPOSE +1 SWOONS -> SWOON +1 SWEEP -> SWEPT +1 SWAN -> SWAY +1 SUSPICIONS -> SUSPICION +1 SURVIVE -> SURVIVED +1 SURFACES -> SERVICES +1 SUPPOSITION -> OPPOSITION +1 SUMNER -> SUMMER +1 SUMMON -> SUMMONED +1 SUMMERS -> SUMMER'S +1 SUITCASE -> SUIT +1 STROLL -> STRAW +1 STREAMLINE -> STREAM +1 STORY'S -> STORIES +1 STEEL'D -> STEELED +1 STEADY -> STUDY +1 STATE -> STATES +1 STARVED -> STORMED +1 STARTS -> START +1 STANDS -> STAND +1 STAID -> STAY +1 STAGE -> STEED +1 SQUEAK -> SQUI +1 SPRING -> SPRANG +1 SPRAGUE -> SP +1 SPOKE -> SPO +1 SPLENDOR -> SPLENDOUR +1 SPLENDET -> SPLENDID +1 SPIN -> SPEND +1 SPECIALISED -> SPECIALIZED +1 SOUTHEY'S -> SO +1 SOUTHEY -> SELVEY +1 SOUTHEY -> SALVI +1 SOU -> SOUS +1 SOOTHED -> SOOTHE +1 SON -> FUN +1 SOME -> SOMETIME +1 SOLVED -> SOBBED +1 SOLON'S -> SILENCE +1 SOLILOQUY -> SOLOQUY +1 SODALITY -> SODELITY +1 SOCRATIC -> CRADIC +1 SO -> SEWED +1 SNUBNOSED -> SNUB +1 SMILED -> SMIL +1 SMILD -> SMILED +1 SMELLS -> MILLS +1 SLEEVE -> STEVE +1 SLANG -> SLING +1 SKILLFUL -> SKILFUL +1 SKEPTICAL -> SCEPTICAL +1 SIZE -> SIZED +1 SIR -> ARE +1 SINCE -> SE +1 SIN -> IN +1 SILENT -> SILENCE +1 SIGHT -> SIGHTSEERS +1 SIGHED -> SIDE +1 SIF -> SIFT +1 SHOWRING -> SHOWERING +1 SHOULD -> WOULD +1 SHOULD -> SHOULDST +1 SHODDY -> SHODY +1 SHIP -> SHIP'S +1 SHEWD -> SHOWED +1 SHERIFF -> SHERIFF'S +1 SHE -> YOU +1 SHE -> HE +1 SHARPS -> SHARP'S +1 SHARP'ST -> SHARPEST +1 SHANNON -> SHAN +1 SHAN'T -> SHA'N'T +1 SHALL -> SHALT +1 SETTLE -> SETTLED +1 SERVE -> SERVED +1 SERVANT -> SERVANTS +1 SENTENCES -> SENTENCE +1 SENT -> SET +1 SENSE -> SCENTS +1 SENCE -> SINCE +1 SEMON'S -> SIMMONS +1 SEEMS -> SEEMED +1 SEEMED -> SEEMS +1 SEEDS -> SEATS +1 SEE -> SEA +1 SECTS -> SEX +1 SEATING -> SITTING +1 SEAT -> SEED +1 SCUTCHEON -> STATUNE +1 SCUSE -> EXCUSE +1 SCRAPBOOKS -> SCRAP +1 SCOUTING -> SCOUT +1 SCHOOLROOM -> SCHOOL +1 SCHOOL -> SCHOOLS +1 SCHOOL -> SCHOOLBOYS +1 SCEVRA -> SCAFFRA +1 SCEURA -> SKURA +1 SCATHE -> SCATH +1 SCAROONS -> SCARONS +1 SAVED -> SEE +1 SAUVEUR -> SEVER +1 SATE -> SAT +1 SANG -> SAYING +1 SAMPLE -> SABLE +1 SALINE -> SAILING +1 SALIENT -> SAILORED +1 SAINTS -> SAYS +1 SAILD -> SAILED +1 SAIL -> SALE +1 SAID -> SIT +1 RUST -> REST +1 RULED -> ROLLED +1 RUFUS -> RUFFUS +1 RUE -> GRUE +1 ROSSETER -> ROSSITUR +1 ROERER -> ROAR +1 RODOLFO'S -> GODOLPH'S +1 RODOLFO -> UDOLPHO +1 RODOLFO -> RODOLPHO +1 RODOLFO -> RIDOLPHO +1 RODOLFO -> RIDOLPHAL +1 RODOLFO -> RADOLPHO +1 ROCKED -> ROCK +1 ROBIN'S -> ROBINS +1 REWEIGHED -> REWAIED +1 RETURN -> RETURNED +1 RESIGNED -> RESIGN +1 REMOVE -> MOVED +1 REMOV'D -> REMOVED +1 REMEMBER -> REMEMBERED +1 REMARK -> REMARKED +1 REMAINED -> REMAINING +1 REMAINED -> REMAIN +1 REMAIN -> REMAINED +1 RELOCATED -> RE +1 RELIES -> REALIZE +1 REIGNED -> RAINED +1 REGGIE -> READY +1 REGAINED -> REGAIN +1 REFUSED -> WERE +1 REENFORCEMENTS -> REINFORCEMENTS +1 REEDER -> READER +1 RED -> READ +1 RECORD -> RECORDS +1 RECOGNISED -> RECOGNIZED +1 REBUK'D -> REBUKED +1 RE -> REINTER +1 RANCOR -> RANCOUR +1 QUINCY -> QUINCEY +1 QUASI -> COURSE +1 QUART -> COURT +1 PYTHAGOREANS -> PYTHAGORIANS +1 PUTTIN -> PUTTING +1 PUT -> PUTTING +1 PURSE -> PERSON +1 PURPOSED -> PURPOSE +1 PURIST -> PUREST +1 PUNISHED -> PUNISH +1 PSALM -> SUM +1 PROVES -> PROVED +1 PROSELYTING -> PROSELY +1 PRODIGAL -> CHRONICAL +1 PRINCIPLE -> PRINCIPAL +1 PREVENT -> PRESENT +1 PRETENSE -> PRETENCE +1 PRECIEUSES -> PURSUS +1 PRECEDE -> PROCEED +1 PRE -> PRIESTHOO +1 PRACTICE -> PRACTISE +1 POSSESS -> POSSESSED +1 POISON'D -> POISONED +1 POINT -> BLINT +1 PLURAL -> PORAL +1 PLURAL -> PEARL +1 PLESIOSAURUS -> PLEAS +1 PLEASANCE -> PLEASANTS +1 PLEA -> PLEAD +1 PLATONISTS -> PLATINISTS +1 PLAITS -> PLATES +1 PLACE -> PLACES +1 PIERC'D -> PIERCED +1 PICK -> PIG +1 PICK -> PIC +1 PHILANTHROPIES -> ANTHROPIES +1 PHILADELPHIAN -> PHILADELPHIA +1 PHAEDRUS -> FEATURES +1 PH -> P +1 PERVERTERS -> PERVERTIVES +1 PERSON -> PERSONAL +1 PERCHES -> PURCHASE +1 PEGRENNE -> PEGRIN +1 PEGRE -> PEG +1 PEACE -> PIECE +1 PAUL -> POLITICS +1 PAUL -> PAW +1 PATIENTS -> PATIENCE +1 PATIENCE -> PATIENT +1 PATCH -> PAT +1 PASSAGE -> PASSAGEWAY +1 PASCHAL -> PASSION +1 PARTS -> PART +1 PARTICLES -> PARTICLE +1 PARSONS -> PARSON +1 PAROQUET -> PARAQUET +1 PARASITES -> PARRICIDES +1 PARALLELOGRAM -> PARALLELLOGRAM +1 PAPERS -> PAPER +1 PAPAL -> PEPPEL +1 PANTS -> HANDS +1 PALATE -> PALLET +1 OVER -> OF +1 OUTRAGE -> OUTRAGED +1 OUR -> HER +1 OUR -> A +1 OUGHTER -> ORDERS +1 OTTLEY'S -> OAKLEIGHS +1 OTHERWORLDLY -> OTHER +1 OTHERS -> OTHER +1 OTHER -> OTTER +1 OSH -> I +1 OSAGE -> O +1 ORDERED -> CANNOT +1 ORCHARD -> ARCHWOOD +1 ORANGE -> SAGE +1 OR -> OF +1 OR -> FOR +1 OR -> ARE +1 OR -> A +1 OPHELIA -> OF +1 ONTO -> ON +1 ONLY -> OMER +1 ONE -> SWUNG +1 ONE -> ONE'S +1 ON -> ANOTHER +1 ON -> ANGULATIONS +1 OLIVE'S -> ALL +1 OLIVE -> AH +1 OLAF -> OLOFF +1 OH -> I'LL +1 OFFICES -> OFFICERS +1 OFFICERS -> OFFICER'S +1 OFFICERS -> OFFICER +1 OFFENSES -> OFFENCES +1 OF -> O +1 OF -> IS +1 OF -> BANDS +1 OBSERVED -> OBSERVE +1 OAKS -> YOKES +1 O'ER -> OR +1 O -> A +1 NURSED -> NURSE +1 NOW -> NO +1 NOVEL'S -> NOVELS +1 NOUGHT -> NOT +1 NOTTINGHAM -> ARE +1 NOTHIN -> NOTHING +1 NOT -> NOTHING +1 NORTHWEST -> NORTH +1 NORTHWARDS -> NORTHWARD +1 NORTHERNERS -> DERPOOL +1 NORTH -> NORTHWEST +1 NOR -> OR +1 NOON -> NOONK +1 NO -> TODDY +1 NINE -> NOT +1 NIGHTFALL -> NIGHT +1 NEWCOMER -> NEW +1 NEW -> NEWBORN +1 NEVERBEND -> NEVERS +1 NEVERBEND -> NEVERBAND +1 NET -> NED +1 NELLY -> NELLIE +1 NEIGHBORHOOD -> NEIGHBOURHOOD +1 NEIGHBOR -> NEIGHBOUR +1 NECK -> NET +1 NEARER -> NEAR +1 NE'ER -> NEVER +1 NATTY -> NANNIE +1 NARES -> NEAR'S +1 NAOMI -> THEY +1 NAOMI -> NOW +1 NAOMI -> NAY +1 NAMED -> NAME +1 N -> THAN +1 MY -> I +1 MY -> BY +1 MUMMERIES -> MEMORIES +1 MOUNTED -> MOUNTAIN +1 MOTHERS -> MOTHER'S +1 MORMONS -> MORE +1 MORMONISM -> WOMANISM +1 MONTMARTRE -> MOUNT +1 MONTMARTRE -> MONTMARTRA +1 MONTFICHET -> MONTFICHE +1 MONTFICHET -> MARTFICHERE +1 MONGOOSE -> MONGOO'S +1 MOMBI -> MUMBIE +1 MOLDED -> MOULDED +1 MOHICAN -> MOHICANS +1 MO -> MOLE +1 MISTS -> MIST +1 MISTER -> THIS +1 MISTER -> MISS +1 MIST -> MISTS +1 MISSOURIANS -> MISSOURIENS +1 MISS -> MISTER +1 MISS -> MISSY +1 MISDEMEANOR -> MISDEMEANOUR +1 MINT -> MENT +1 MINE -> MIND +1 MILLION'D -> MILLIONED +1 MILLIMETER -> MILLIMETRE +1 MILLIGRAM -> MILAGRAM +1 MILITATED -> MITIGATED +1 MILES -> MYLES +1 METERS -> METRES +1 MESSRS -> MESSIERS +1 MESS -> MASS +1 MERSEY -> MERCY +1 MERRY -> MARRIED +1 MERGANSER -> MERGANCER +1 MERCHISTON -> MURCHISON +1 MEND -> MEN +1 MEN -> AMEN +1 MEET -> MET +1 MEALYBACK -> MEALLY +1 MEADOWCROFT -> MEADOWCROF +1 MAY -> THEY +1 MAY -> IS +1 MAUSOLEUM -> MUSOLEUM +1 MATE -> MADE +1 MASTERY -> MYSTERY +1 MASTER'S -> MASTERS +1 MARVELOUS -> MARVELLOUS +1 MARSHALLED -> MARSHALED +1 MARIVAUX -> MARIVAL +1 MARIVAUX -> MARAVAUX +1 MARIE'S -> MARI'S +1 MARIE -> MARI +1 MARAIS -> MARAY +1 MANY -> MEN +1 MANIFESTED -> MANIFEST +1 MAINHALL -> MEANHAW +1 MAID -> MAIDEN +1 MACDONALDS -> MC +1 MAC -> MICARTLE +1 LUIS -> LOUIS +1 LUBRICATE -> LUBRICADE +1 LOWER -> LOWERED +1 LOVE'S -> LOVES +1 LOUIS -> LOUISE +1 LOU'S -> LOOSE +1 LORNE -> LORN +1 LOOK -> LUCK +1 LOGARITHMS -> LOGARTHEMS +1 LOCRIS -> LOCHRIS +1 LOAD -> LOWED +1 LITER -> LATER +1 LINK'D -> LINKED +1 LINE -> LIE +1 LILLYS -> LILY'S +1 LILLY -> LILY +1 LILBURN -> LITTLE +1 LIGHT -> WRITE +1 LETS -> THAT'S +1 LESSER -> LESS +1 LEOCADIA'S -> UCADIUS +1 LEOCADIA'S -> LEUCEDES +1 LEOCADIA -> THE +1 LEOCADIA -> LOU +1 LEOCADIA -> LOCATIA +1 LEOCADIA -> LOCALIA +1 LEOCADIA -> LEOKADIA +1 LEOCADI -> LUCADIA +1 LEFRANK -> LE +1 LECOMPTE -> LECOMTE +1 LECOMPTE -> LE +1 LEAVING -> LEAPING +1 LEASED -> LEAST +1 LEADS -> LEAVES +1 LE -> LAUROI +1 LARKSPUR -> LARKSPER +1 LARKSPUR -> LARKSBURG +1 LANTHORN -> LANTERN +1 LAND -> LANDA +1 LAMBENT -> LAMENT +1 LALLIE -> LILY +1 LAKE -> LEEK +1 LAD -> WELL +1 LAD -> LA +1 LABOUR -> LABOR +1 KNOW -> KNOWS +1 KNIFE -> KNIF +1 KNEED -> NEED +1 KNEE -> KNEEP +1 KNAVE -> NAVE +1 KIRTLAND -> CURTALIND +1 KINGDOMS -> KINGDOM'S +1 KING -> KING'S +1 KICK -> KICKAPOOS +1 KESWICK -> KEZWICK +1 KEOGH -> KIEV +1 KAFFAR'S -> KAFFIRS +1 KAFFAR'S -> KAFFIR'S +1 JUS -> JUST +1 JOHN -> JOHNNIAGO +1 JEWELER'S -> JEWELERS +1 JAW -> JOB +1 JASPER -> JAPSER +1 JAIL -> DRALE +1 JAGO -> GIAGO +1 JAGO -> GEOGO +1 JACK -> JACKKNIFE +1 ITS -> IT'S +1 IT'S -> TO +1 IT -> YOU +1 IT -> TO +1 IT -> IT'LL +1 IT -> HE +1 IT -> AND +1 IS -> IT'S +1 IS -> IT +1 IRON'S -> IRONS +1 INVENTORS -> IN +1 INTRENCHMENT -> ENTRENCHMENT +1 INTERESTS -> ENTRANCE +1 INTENTS -> INTENSE +1 INTENT -> AND +1 INSURRECTIONISTS -> INSURRECTIONOUS +1 INNERLOCHY -> INERLOCHY +1 INNERLOCHY -> IN +1 INFANTS -> INFANT'S +1 INFANTILE -> INVENTILE +1 INDEED -> INDE +1 INCLOSED -> ENCLOSED +1 INCERTAINTY -> IN +1 INACTION -> AN +1 IN -> ON +1 IN -> OF +1 IN -> IMPERFECT +1 IMPRESSES -> IMPRESS +1 IMPRESSED -> IMPRESS +1 IMPRESS'D -> IMPRESSED +1 IMPEARLED -> IMPELLED +1 IKE -> LIKE +1 IF -> OF +1 IF -> AT +1 IDIOSYNCRATICALLY -> IDIOS +1 ICHTHYOSAURUS -> ITHUSORIS +1 ICHTHYOSAURUS -> IDEAS +1 ICHTHYOSAURUS -> ETHIOSORIS +1 I'M -> ON +1 I'LL -> I +1 I -> OUT +1 HYDRAS -> HYDRAST +1 HUSBAND -> HUSBABLY +1 HUNTLEY -> HUNTLY +1 HUMID -> HUMAN +1 HOUSECLEANING -> HOUSE +1 HOTBED -> HOT +1 HOSTESS -> HOSTES +1 HOST -> HOSE +1 HORTON -> WHARTON +1 HORSEPLAY -> HORSE +1 HORACE -> HORNS +1 HOPES -> HELPS +1 HOPE -> OH +1 HONOURABLY -> HONORABLY +1 HONOUR -> HONOR +1 HONORS -> HONOURS +1 HONORABLE -> HONED +1 HOLY -> WHOLLY +1 HOLOCAUST -> HOLOCOST +1 HOLD -> ALL +1 HIT -> HID +1 HIM -> LINE +1 HILDA'S -> HELDA'S +1 HILDA -> HELDA +1 HIGHEST -> HAS +1 HIDALGO -> HAD +1 HETTY -> HETTY'S +1 HERE -> THERE +1 HERACLEITUS -> HERACLITUS +1 HER -> THE +1 HER -> THAT +1 HENCHMEN -> HENCHMAN +1 HEN -> HANDLED +1 HELPED -> SELF +1 HELLO -> HALLO +1 HELD -> HUTTED +1 HEDGES -> ORANGES +1 HEARTY -> HARDY +1 HEART'S -> HEARTSEASE +1 HEART -> HARD +1 HEAR -> SEE +1 HEAR -> HERE +1 HE'S -> IS +1 HE'D -> HE +1 HE -> SHE +1 HE -> HIS +1 HAZEWRAPPED -> HAYES +1 HAY -> HEY +1 HAWTREY -> HALTREE +1 HAWK -> HOT +1 HAVING -> HEAVEN +1 HAVE -> HAS +1 HAVE -> HALF +1 HAS -> IS +1 HAS -> AS +1 HARTS -> HEARTS +1 HARRIED -> HURRIED +1 HARMONIZED -> HARMONIZE +1 HARKENED -> HEARKENED +1 HARBORING -> HARBOURING +1 HARANGUE -> HURRANG +1 HARALD -> HAROLD +1 HAR -> HOAR +1 HANNA -> HAD +1 HANGINGS -> HANGING +1 HANDS -> HANDSOME +1 HAMLET -> PANEL +1 HAM -> HIM +1 HALLOA -> HULLO +1 HAL -> HELLO +1 HAKON -> HAWKIN +1 HAIRDRESSER -> HAIR +1 HAD -> NOT +1 HAD -> IS +1 HAD -> HEAD +1 HAD -> HAVE +1 HAD -> AT +1 GUISE -> SKIES +1 GUESTS -> GUEST +1 GUESTS -> GUESS +1 GUEST -> GUESTS +1 GROWS -> GROVES +1 GRINGO -> GREENOW +1 GRIEFS -> GREEDS +1 GREY'S -> GRAY'S +1 GREEING -> GREEN +1 GREATER -> GREAT +1 GREAT -> GRATEFUL +1 GRAPEVINE -> GRAPE +1 GRAND -> GREAT +1 GRAMOPHONE -> GRAMMAPHONE +1 GRAM -> GRAHAM +1 GRADES -> GRATES +1 GOVERNMENT -> GOVERNOR +1 GOVERNED -> GOVERN +1 GOOBERS -> GOULD +1 GOLD -> GOL +1 GOAT'S -> GOATS +1 GOAT -> BOAT +1 GIVE -> GAVE +1 GIVE -> CUVE +1 GIRARD -> GERARD +1 GILCHRIST'S -> GILCHER'S +1 GILCHRIST -> GO +1 GIER -> GEAR +1 GIAOURS -> GUY +1 GHIP -> GIP +1 GEOFFREY'S -> JEFFREY'S +1 GEOFFREY -> JEFFREY +1 GENTLEMEN -> GENTLEMAN +1 GENERALLY -> GERALLY +1 GENERAL -> GENERALSHIP +1 GAYLY -> GAILY +1 GARDENS -> GARDEN'S +1 GAMEWELL -> GAME +1 FUTURISTIC -> FUTUREISTIC +1 FULNESS -> FULLNESS +1 FRONT -> FRON +1 FRISKILY -> FRISKLY +1 FRIEND -> BRAND +1 FRANCS -> FRANKS +1 FORWARDED -> FOOTED +1 FORMALLY -> FORMERLY +1 FOREVER -> FOR +1 FORBES'S -> FORTS +1 FOR -> WITH +1 FOR -> FOREVER +1 FOR -> FIR +1 FOR -> FALL +1 FOLLOWED -> FOWLED +1 FLUFFINOSE -> FLAPHANO'S +1 FLOUR -> FLOWER +1 FLIGHT -> FIGHT +1 FITZOOTH'S -> FITTOOTH'S +1 FIRS -> FURS +1 FIREBUGS -> FIRE +1 FIREBALL -> FIRE +1 FINE -> FIND +1 FIND -> FIVE +1 FINALLY -> FINELY +1 FILL -> FILLED +1 FIGHTER -> FRIGHTER +1 FETE -> FIGHT +1 FERDINANDO -> FERDINAND +1 FELT -> FELLED +1 FEES -> BEES +1 FEELS -> FILLS +1 FEELIN'S -> FEELINS +1 FEEL -> BILL +1 FEARED -> FEAR +1 FAVOR -> FAVOUR +1 FARTHEST -> FURTHEST +1 FARRAGUT -> FERRAGUT +1 FARMHOUSES -> FARM +1 FAR -> FARTHER +1 FALLEN -> FALL +1 FAIR -> FAIREST +1 EYE -> I +1 EVOLUTION -> REVOLUTION +1 EVERYDAY -> EVERY +1 EVER -> ARROW +1 EVENING -> EVEN +1 EVENIN'S -> EVENINGS +1 EVA -> EITHER +1 ESTATE -> STATE +1 ESTAFANIA -> ESTAFFANIA +1 ESTAFANIA -> DA +1 ESPRIT -> A +1 ESPECIAL -> SPECIAL +1 ESCHEATED -> ISTIATED +1 ERNEST -> EARNEST +1 ER -> A +1 EQUIP -> EQUIPPED +1 ENTRUSTING -> INTRUSTING +1 ENTRENCHED -> INTRENCHED +1 ENTHRALMENT -> ENTHRALIMENT +1 ENTERED -> ENTER +1 ENTER -> INTER +1 ENTER -> ENTERED +1 ENSURE -> INSURE +1 ENQUIRIES -> INQUIRIES +1 ENQUIRED -> INQUIRED +1 ENQUIRE -> INQUIRE +1 ENDEAVOR -> ENDEAVOUR +1 EMISSIONS -> ADMISSIONS +1 EMIL -> AMY +1 EMIL -> AM +1 EMIGRATION -> IMMIGRATION +1 EMIGRANT -> IMMIGRANT +1 EMERG'D -> EMERGED +1 EM -> THEM +1 ELSINORE -> ELSINOR +1 ELMO'S -> ABLE'S +1 ELECT -> ELEC +1 ELCHO -> ELKO +1 ELABORATE -> CELEBRATE +1 EFFECTED -> AFFECTED +1 EDITION -> ADDITION +1 EASTWARDS -> EASTWARD +1 EARSHOT -> EAR +1 E -> EVEN +1 DYKES -> DIKES +1 DURING -> DREWING +1 DUNNO -> DON'T +1 DUMPY -> DON'T +1 DUMAS -> DE +1 DUERER -> DURE +1 DRUGGIST'S -> DRUGGIST +1 DROPIDAS -> TROPIDAS +1 DRIPPING -> TRIPPING +1 DOWN -> DOWNSTAIRS +1 DOVES -> DOVE'S +1 DOUZE -> DUSPORT +1 DOOR -> DOORSTEP +1 DONATISTS -> DONATIST +1 DONA -> DORNEST +1 DOCTRESS -> DOCTRIS +1 DISTRICTS -> DISTRICT +1 DISMAYED -> DISMAYEDESTAFHANIA +1 DISHONOURED -> DISHONORED +1 DISCOLOURED -> DISCOLORED +1 DINAH'S -> DYNAS +1 DINAH -> DINA +1 DIFFERENCES -> DIFFERENCE +1 DID -> DO +1 DID -> DEAD +1 DIATRIBE -> DIETRIBE +1 DIAS -> DAIS +1 DEW -> DO +1 DEMEANOR -> DEMEANOUR +1 DELICATE -> DELEGATE +1 DELIBERATIVE -> DELIBERATE +1 DELIA -> GALLIA +1 DELIA -> DAHLIA +1 DEFINED -> THE +1 DEFINE -> TO +1 DEDALOS -> DELOS +1 DECENCY -> DECENCIES +1 DECEIVING -> SEEING +1 DE -> DETONICHALANT +1 DAWN'S -> DAWNS +1 DAIRY -> DEARIE +1 CYN -> SIN +1 CURVED -> CARVED +1 CRYSTAL -> CRISTEL +1 CROSSTREES -> CROSS +1 CRITIAS -> CRITUS +1 CRITIAS -> CRITIUS +1 CRESSWELLS -> CRUSTWELLS +1 CREIGHTON -> CRITON +1 CREIGHTON -> CRIGHTON +1 CRASWELLERS -> CRESTWELLERS +1 CRASWELLER -> CRUSWELLER +1 COURT'S -> COURTS +1 COUNTRY'S -> COUNTRY +1 COUNSELS -> COUNCILS +1 COUNSELLED -> COUNSEL +1 COULD -> COULDN'T +1 COSTS -> COST +1 CORRELATES -> COROLLETS +1 CORN -> CORNIERS +1 CORALIE -> CORALLY +1 COOK -> COPE +1 CONTROL -> CONTROLL +1 CONTI -> KANTI +1 CONTAINED -> CONTAINING +1 CONTACT -> CONDUCT +1 CONSTANTINE -> KONSTANTINE +1 CONSIDERATE -> CONSIDER +1 CONSID'BLE -> CONSIDERABLE +1 CONSCIOUS -> CONSCIENCE +1 CONQUERIN -> CONQUERING +1 CONJURER -> CONJUROR +1 CONDENSE -> CONTENSED +1 CONCERN -> CONCER +1 COMPOSSER -> COMPOSSIBLE +1 COMPOSE -> COMPOSED +1 COMMENTATORS -> COMMON +1 COMMANDMENTS -> COMMANDS +1 COMING -> COMMON +1 COLOURS -> COLORS +1 COLORS -> COLOURS +1 COLORS -> COLLARS +1 COLORIST -> COLORLESS +1 COLORIST -> CHOLERIST +1 COLORED -> COLOURED +1 COLOR -> COLOUR +1 COLD -> CALLED +1 COAL -> CO +1 CO -> COEXIST +1 CLOUD -> CLOUDS +1 CLEW -> CLUE +1 CIVET -> SEVETTE +1 CITADELLED -> CITADELED +1 CIGARETTE -> SICK +1 CHRISTAIN -> CHRISTIAN +1 CHOICE -> CHOICES +1 CHINGACHGOOK -> CHINGACHOOK +1 CHIAROSCURISTS -> KIERRASCURISTS +1 CHIAROSCURIST -> CUIRASCURISTS +1 CHEROOT -> TROUT +1 CHECKER -> CHEQUER +1 CHATTERBOX -> CHATTER +1 CHARENTE -> NECHERANT +1 CHARACTERISTIC -> CORRECTORISTIC +1 CHANGE -> CHANGES +1 CHANGE -> CHANGED +1 CENTRED -> SENATE +1 CENTER -> CENTRE +1 CENDENARIES -> SENDIARIES +1 CEASD -> CEASED +1 CAUGHT -> THOUGHT +1 CAT -> HAT +1 CASE -> GASE +1 CARPACCIO'S -> CARPATIUS +1 CAPLESS -> CAPLICE +1 CANVASS -> CANVAS +1 CANDLE -> CANDLELIGHT +1 CAN -> COULD +1 CAN -> CANNOT +1 CALDWELL -> CAULDWELL +1 BYE -> BY +1 BUTTED -> BUDDED +1 BUT -> DO +1 BUT -> BY +1 BURN -> BURNE +1 BURGOYNE -> WERE +1 BUNNIT -> BUNNITT +1 BUL -> BULBUL +1 BUCHANAN -> YOU +1 BROWN -> OBROWN +1 BROUGHT -> WROUGHT +1 BROTHER -> BRETHREN +1 BRONTES -> BRONTEES +1 BRITANNULISTS -> BRITON +1 BRISK -> BRACE +1 BRINGING -> RINGING +1 BREAKFAS -> BREAKFAST +1 BREAD -> ABREAD +1 BRANDS -> BRINGS +1 BRANCH -> RANCH +1 BRAGELONNE -> BRIGALON +1 BRAGELONNE -> BRAGELONE +1 BRACTON'S -> BROCKTON'S +1 BOX -> BOXWOMEN +1 BOTANY -> BARTANY +1 BORDERS -> BORDER +1 BOOKKEEPER -> BITKEEPER +1 BOLLS -> BOWLS +1 BOAR -> BOREHOUND +1 BLUESKINS -> BLUESKIN +1 BLUE -> BLUESTS +1 BLESSINGS -> BLESSING +1 BLASTS -> BLAST +1 BIT -> GOOD +1 BIT -> BID +1 BILLYGOAT -> SPILLIGOAT +1 BILLED -> BUILD +1 BERGSON -> BERKSON +1 BERGSON -> BERGIN +1 BENCH -> PINCH +1 BEING -> MEAN +1 BEIN -> BEING +1 BEHAVIOURIST -> BEHAVIORIST +1 BEG -> BEGGED +1 BEFORE -> FOR +1 BEFIT -> BE +1 BEFAL -> BEFALL +1 BEELZEBUB -> IS +1 BEEHIVES -> BEE +1 BEEDER -> READER +1 BEEBE -> B +1 BEDIMMED -> BEDEMNED +1 BEATER -> PETER +1 BEACHED -> BEECHED +1 BATTLEAX -> ADELAX +1 BASKET -> BASCULADES +1 BALAAM'S -> BAYLIM'S +1 BAINS -> BANDUME +1 BADGES -> BADGERS +1 BADAUDERIE -> BADR'D +1 BABIRUSA -> BABRUSA +1 AWHILE -> A +1 AWARE -> WHERE +1 AVAILABLE -> AVALUABLE +1 AUNT -> AND +1 AU -> OKARRANT +1 ATTITUDE -> SATITUDE +1 ATTENDANTS -> ATTENDANCE +1 ATTENDANCE -> ATTENDANTS +1 ATMOSPHERIC -> ATMOSPHERE +1 ATHOLEMEN -> ETHEL +1 ATHLETE -> ADETE +1 ATHENAIS -> ETHNEE +1 ATHENAIS -> ETHINAY +1 ATCHISON -> ATTITSON +1 AT -> THAT +1 AT -> SAID +1 AT -> IT +1 AT -> AND +1 ASTOR -> ASTRO +1 ASSEMBLED -> A +1 ASCENDENCY -> A +1 AS -> A +1 ARRONDISSEMENT -> ARE +1 ARRIVING -> RIVING +1 ARRESTS -> ARREST +1 AROUND -> ROUND +1 ARMOUR -> ARMOR +1 ARMED -> ARM +1 ARGYLE -> OUR +1 ARE -> OR +1 ARE -> ALL +1 ARDOUR -> ARDOR +1 ARC -> ARK +1 APPROVES -> ME +1 APPRENTICE -> APPRENTICED +1 APPEALED -> APPEAL +1 ANYWHERE -> MANY +1 ANYMORE -> ANY +1 ANY -> ANYTHING +1 ANTEDATING -> ANTETING +1 ANSWERD -> ANSWERED +1 ANNALS -> ANNAL +1 ANGELS -> ANGEL +1 ANDERS -> ANDRES +1 ANDERS -> ANDRE +1 ANDELLA -> ANNE +1 ANDELLA -> ANDDELA +1 ANDELLA -> AND +1 ANDELLA -> AMDELLA +1 AND -> ONE +1 AND -> INDEED +1 AND -> ENTHRIBING +1 AND -> AT +1 AND -> AN +1 AND -> AM +1 AND -> A +1 ANAXAGORAS -> AN +1 AN -> ON +1 AN -> IN +1 AMPHITHEATER -> AMPHITHEATRE +1 AMASS -> A +1 ALTERNATIVE -> ALL +1 ALREADY -> ALL +1 ALONE -> ALO +1 ALLUVION -> ALLUVIAN +1 ALL -> ALTOGETHER +1 ALL -> ALREADY +1 ALBANS -> ALBAN'S +1 AIR -> HEIR +1 AIGNAN -> DAN +1 AID -> AIDS +1 AH -> A +1 AFTERDECK -> AFTER +1 AFFRIGHTENED -> A +1 AFFILIATED -> ARE +1 AFFECT -> EFFECT +1 ADVENTURE -> ADVENTURER +1 ADONA -> ADONNA +1 ACTOR -> ACTOR'S +1 ACKNOWLEDGEMENT -> ACKNOWLEDGMENT +1 ACCOUTREMENTS -> ACCUTMENTS +1 ACCOUNT -> ACCOUN +1 ACCENTS -> ACCENT +1 ABOLITIONISTS -> ABOLITIONIST +1 ABDUCTION -> ADOCTION +1 ABBE -> ABBEY +1 A -> UPON +1 A -> UNNOTTINGHAM +1 A -> TO +1 A -> HER +1 A -> HE +1 A -> ESPECIAL +1 A -> AWAY +1 A -> ATTORIAN +1 A -> AS +1 A -> ARREST +1 A -> AN +1 A -> ACCORD + +DELETIONS: count ref +9 IS +9 A +7 AND +4 CHARENTE +3 OF +3 IN +2 YOU +2 YARD +2 WILL +2 WAY +2 TO +2 THE +2 STAIRS +2 N +2 IT +2 HIM +2 E +2 DE +2 AM +1 YOU'LL +1 WOMEN +1 WITH +1 WEST +1 WELL +1 WE +1 VINES +1 UNC +1 UD +1 TRILOGIES +1 TORY +1 TONNAY +1 TOGETHER +1 THROUGH +1 THRIVING +1 THINKING +1 THING +1 TAKES +1 T +1 STONES +1 STEP +1 SPECIAL +1 SHIP +1 SEERS +1 ROI +1 REST +1 READY +1 PROVIDING +1 PRIVATELY +1 PORTES +1 POPHAM +1 POOS +1 POND +1 PERFECT +1 OTHER +1 OLD +1 NOT +1 MUCH +1 MER +1 LORD +1 LOOK +1 LOADS +1 LO +1 LIGHT +1 L +1 KNIFE +1 JAGO +1 HUMPH +1 HOUND +1 HIS +1 HAVE +1 HAD +1 GONE +1 GOAT +1 GALATIANS +1 FIND +1 EXIST +1 EVER +1 ESTAFANIA +1 ENTER +1 EASE +1 EARS +1 DO +1 DENSITY +1 DEED +1 DARK +1 D'ESTE +1 CUTTERS +1 COURANT +1 CLOTH +1 CHORD +1 CHARLES +1 C +1 BUL +1 BOYS +1 BORN +1 BIRDS +1 B +1 AT +1 AS +1 ARDLE + +INSERTIONS: count hyp +11 ONE +11 A +9 THE +8 IS +5 IT +5 HAVE +5 AND +4 IN +4 DAY +4 AXE +4 ARE +4 AM +3 OF +3 ME +3 HALL +2 WORTH +2 WILL +2 TIME +2 THAT +2 OTHER +2 NULA +2 NIGHT +2 MAKE +2 ILL +2 I +2 HIS +2 FOR +2 FIND +2 AS +1 ZAY +1 YOU +1 WRAPPED +1 WORLDLY +1 WHILE +1 WHERE +1 WEST +1 WAY +1 WAS +1 WARILY +1 VINE +1 VIEW +1 VENORS +1 UNDISSIMA +1 ULYSTS +1 TURNED +1 TREES +1 TOP +1 TO +1 THESE +1 TENT +1 TEACHERS +1 SPREE +1 SIMPLED +1 SHOT +1 SCENE +1 ROOM +1 RED +1 READY +1 PROVES +1 POOL +1 PLAY +1 OWE +1 OVER +1 ORS +1 ORDER +1 ONLY +1 ON +1 OCCAS +1 NOSED +1 NOSE +1 NOR +1 MORE +1 MILL +1 MEN'S +1 MEN +1 MAU +1 MASS +1 MARSHRA +1 LOCKY +1 LOCATED +1 LIKE +1 LED +1 LEAFS +1 KNOW +1 KATYA +1 HYMN +1 HOUSES +1 HIVES +1 HIRESUM +1 HIM +1 HE +1 HAD +1 GUILE +1 GROUND +1 GREE +1 GOING +1 FRIGHTENED +1 FREEZED +1 FRANK +1 FIT +1 FILIATED +1 FANIA +1 FALL +1 EXAGGERUS +1 EVER +1 EULO +1 DRESSER +1 DONALDS +1 DERPOOL +1 DELLA +1 DELA +1 DECK +1 D +1 CRATICALLY +1 COMTE +1 COMER +1 CLEANING +1 CHRIST +1 CERTAINTY +1 CASE +1 BURN +1 BUGS +1 BOX +1 BOOKS +1 BIELDS +1 BEGIRT +1 BED +1 BE +1 BALL +1 BACK +1 AT +1 AN +1 ALGO +1 ACTION +1 ABOVE +1 ABILITY + +PER-WORD STATS: word corr tot_errs count_in_ref count_in_hyp +AND 1740 88 1787 1781 +A 1125 84 1166 1168 +THE 3438 72 3461 3487 +IN 875 71 905 916 +IS 446 37 468 461 +TO 1330 31 1340 1351 +OF 1790 30 1799 1811 +I 708 26 711 731 +AN 154 20 165 163 +ONE 187 18 191 201 +IT 549 18 558 558 +THAT 602 17 610 611 +O 4 15 14 9 +AS 379 14 383 389 +ARE 178 14 182 188 +HE 522 13 526 531 +MEN 57 12 62 64 +HAD 315 12 321 321 +YOU 416 11 418 425 +WERE 184 10 186 192 +THIS 255 10 263 257 +THEY 204 10 209 209 +FOR 416 10 420 422 +AT 279 10 284 284 +WILL 139 9 143 144 +WHERE 44 9 49 48 +TWO 64 9 71 66 +ON 277 9 279 284 +OH 29 9 33 34 +HAVE 212 9 215 218 +THERE 135 8 137 141 +THEIR 169 8 173 173 +RODOLFO 0 8 8 0 +MAN 63 8 67 67 +HAS 104 8 108 108 +ANY 84 8 85 91 +AM 57 8 59 63 +WAS 576 7 579 580 +TIMAEUS 2 7 9 2 +OR 172 7 176 175 +I'M 28 7 33 30 +HIM 212 7 215 216 +HER 319 7 324 321 +ANDERS 4 7 11 4 +ALL 223 7 225 228 +THAN 86 6 88 90 +OTHER 63 6 65 67 +ITS 81 6 82 86 +INTO 102 6 104 106 +I'VE 17 6 23 17 +HIS 472 6 473 477 +DE 5 6 10 6 +ANYONE 0 6 6 0 +WITH 421 5 424 423 +WHEN 130 5 133 132 +WELL 72 5 75 74 +WE 150 5 152 153 +TOO 60 5 61 64 +TONNAY 0 5 5 0 +THEN 121 5 125 122 +THEL 0 5 5 0 +SYLVIA 0 5 0 5 +SOME 86 5 87 90 +SOLMES 0 5 0 5 +SOAMES 0 5 5 0 +SILVIA 0 5 5 0 +SHE 279 5 281 282 +OUR 79 5 81 82 +NOT 336 5 338 339 +NEW 34 5 35 38 +LEOCADIA 1 5 6 1 +JAGO 0 5 5 0 +IT'S 26 5 29 28 +GALATIANS 1 5 6 1 +FIND 20 5 22 23 +CHARENTE 0 5 5 0 +BATTLEAX 0 5 5 0 +WOULD 139 4 141 141 +WAY 71 4 73 73 +TIME 85 4 87 87 +THROUGH 40 4 42 42 +THEM 119 4 122 120 +SEEMED 29 4 30 32 +RED 18 4 19 21 +PRACTISE 1 4 4 2 +PRACTICE 6 4 7 9 +NOW 91 4 94 92 +NO 166 4 169 167 +METRE 0 4 0 4 +METER 7 4 11 7 +ME 184 4 184 188 +MAINHALL 0 4 4 0 +LEAVENWORTH 0 4 4 0 +KNOW 75 4 76 78 +HELLO 1 4 2 4 +HE'S 5 4 9 5 +GREY 1 4 3 3 +GRAY 3 4 5 5 +GALLATIONS 0 4 0 4 +EVERY 31 4 31 35 +ENTER 6 4 9 7 +EMIL 0 4 4 0 +DO 93 4 94 96 +DAY 50 4 50 54 +CRESSWELL 0 4 4 0 +BY 248 4 248 252 +BUT 340 4 344 340 +BATTLE 6 4 6 10 +AXE 1 4 1 5 +ANDELLA 0 4 4 0 +XAVIER 0 3 3 0 +WHOSE 13 3 14 15 +WHO 153 3 154 155 +WEST 6 3 7 8 +WE'LL 4 3 6 5 +VALLIERS 0 3 0 3 +VALLIERE 0 3 3 0 +TRAVELLING 0 3 0 3 +TRAVELING 0 3 3 0 +TOWARDS 16 3 19 16 +TOWARD 8 3 8 11 +TODAY 0 3 3 0 +THOU 18 3 18 21 +THORKEL 0 3 3 0 +THEE 27 3 30 27 +THEATRE 2 3 2 5 +THEATER 0 3 3 0 +TABU 0 3 3 0 +SOMETIME 0 3 2 1 +SOLON 1 3 4 1 +SOLEMN 1 3 1 4 +SEE 64 3 65 66 +SCHOOL 9 3 11 10 +RUDOLPHO 0 3 0 3 +ROUND 15 3 17 16 +REMAINED 4 3 6 5 +READY 9 3 10 11 +READ 16 3 18 17 +PLATONISTS 0 3 3 0 +OFFICERS 8 3 10 9 +NIGHT 24 3 24 27 +NEIGHBOUR 0 3 2 1 +NEIGHBOR 0 3 1 2 +NAOMI 2 3 5 2 +N 1 3 4 1 +MISTER 46 3 48 47 +MISS 16 3 18 17 +MAY 54 3 56 55 +MAIN 3 3 3 6 +LE 0 3 1 2 +KAFFIR 0 3 0 3 +KAFFAR 0 3 3 0 +ICHTHYOSAURUS 0 3 3 0 +HONORABLE 1 3 2 3 +HOLLAND 0 3 0 3 +HOLLAN 0 3 3 0 +HALLO 0 3 2 1 +HALL 9 3 9 12 +GUESTS 3 3 5 4 +GREAT 73 3 74 75 +GILCHRIST 0 3 3 0 +FELL 16 3 16 19 +FALL 2 3 2 5 +FAIR 6 3 7 8 +EVERYONE 0 3 3 0 +EVER 33 3 35 34 +ESTAFANIA 0 3 3 0 +E 0 3 3 0 +DID 65 3 67 66 +COURT 10 3 12 11 +COLOUR 0 3 2 1 +COLORS 1 3 3 2 +COLOR 9 3 10 11 +BRITANNULA 0 3 3 0 +BRITAIN 1 3 1 4 +BE 314 3 314 317 +BANNISTER 0 3 3 0 +BANISTER 0 3 0 3 +AROUND 11 3 12 13 +ANOTHER 34 3 36 35 +ANDREWS 0 3 0 3 +YOU'RE 3 2 5 3 +YOU'LL 7 2 9 7 +YE 6 2 8 6 +YARD 3 2 5 3 +YAGO 0 2 0 2 +WYLDER 3 2 5 3 +WORTH 4 2 4 6 +WORSE 5 2 6 6 +WOOD 3 2 4 4 +WON 2 2 2 4 +WOMAN'S 0 2 2 0 +WILDER 0 2 0 2 +WIDOWS 0 2 0 2 +WHITTAWS 0 2 2 0 +WHILE 34 2 34 36 +WHAT 112 2 113 113 +WHALE 2 2 4 2 +WESTPORT 0 2 2 0 +WE'RE 1 2 3 1 +VERY 82 2 83 83 +VAPOURS 0 2 2 0 +VAPORS 0 2 0 2 +VANDERPOOL 0 2 2 0 +VAN 2 2 2 4 +UPON 93 2 94 94 +UP 108 2 110 108 +UNC 3 2 5 3 +TRAVELLERS 0 2 0 2 +TORQUAL 0 2 0 2 +TOP 10 2 11 11 +TONIGHT 0 2 2 0 +THOUGHT 53 2 54 54 +THOUGH 32 2 33 33 +THINKING 7 2 8 8 +THING 21 2 22 22 +THESE 68 2 68 70 +THAT'S 13 2 14 14 +T 0 2 1 1 +SYMPOSIUM 0 2 2 0 +STATE 26 2 27 27 +STAIRS 6 2 8 6 +SPECIAL 1 2 2 2 +SOUTHEY 0 2 2 0 +SOMEONE 1 2 3 1 +SO 196 2 197 197 +SMILED 3 2 4 4 +SINCE 24 2 25 25 +SIN 12 2 13 13 +SILENCE 7 2 7 9 +SHOULD 59 2 61 59 +SHOT 2 2 2 4 +SHIP 7 2 9 7 +SHE'S 4 2 6 4 +SEEMS 11 2 12 12 +SEEM 11 2 13 11 +SAID 160 2 161 161 +REST 13 2 14 14 +REMAIN 5 2 6 6 +READER 1 2 1 3 +RE 0 2 1 1 +PUTTING 7 2 7 9 +PROVES 1 2 2 2 +PLURAL 0 2 2 0 +PLATANISTS 0 2 0 2 +PICK 1 2 3 1 +PERSON 12 2 13 13 +PAUL 13 2 15 13 +PATIENCE 1 2 2 2 +PARLOUR 0 2 0 2 +PARLOR 0 2 2 0 +OVER 58 2 59 59 +ONLY 76 2 77 77 +NULA 0 2 0 2 +NOTHING 33 2 33 35 +NORTHWEST 0 2 1 1 +NORTH 7 2 8 8 +NOR 20 2 21 21 +NEVERBEND 4 2 6 4 +NET 0 2 1 1 +NEO 1 2 3 1 +MY 223 2 225 223 +MUNNY 0 2 2 0 +MORNING 21 2 21 23 +MORNIN 0 2 2 0 +MORE 119 2 119 121 +MONTMARTRE 0 2 2 0 +MONTFICHET 7 2 9 7 +MONEY 5 2 5 7 +MISTS 2 2 3 3 +MIST 4 2 5 5 +MASS 2 2 2 4 +MARSHALL 1 2 3 1 +MARSHAL 1 2 1 3 +MARIVAUX 0 2 2 0 +MANY 40 2 41 41 +MAKE 40 2 40 42 +MADAME 4 2 4 6 +MADAM 1 2 3 1 +LOUIS 1 2 2 2 +LOOK 30 2 32 30 +LINE 12 2 13 13 +LILY 2 2 2 4 +LIKE 105 2 105 107 +LIGHT 37 2 39 37 +LEVINWORTH 0 2 0 2 +LEVIN 0 2 0 2 +LEOCADIA'S 0 2 2 0 +LECOMPTE 0 2 2 0 +LARKSPUR 0 2 2 0 +LAD 1 2 3 1 +KNIFE 8 2 10 8 +KARL 0 2 0 2 +KAFFAR'S 0 2 2 0 +INNERLOCHY 0 2 2 0 +INDEED 28 2 29 29 +IMPRESSED 3 2 4 4 +IMPRESS 0 2 0 2 +ILL 6 2 6 8 +IF 129 2 131 129 +I'LL 13 2 14 14 +I'D 1 2 3 1 +HOT 3 2 3 5 +HORSE 6 2 6 8 +HONOURABLE 1 2 3 1 +HOLBINE 0 2 0 2 +HOLBEIN 0 2 2 0 +HERE 69 2 70 70 +HEAR 18 2 20 18 +HANDS 16 2 17 17 +GUEST 3 2 4 4 +GOAT 5 2 7 5 +GIVE 28 2 30 28 +GILGRIST 0 2 0 2 +FOUNDED 3 2 5 3 +FOUND 21 2 21 23 +FOREVER 1 2 2 2 +FIRE 22 2 22 24 +FILLING 0 2 0 2 +FIGHT 3 2 3 5 +FEELING 9 2 11 9 +FAVOURITE 1 2 1 3 +FAVORITE 2 2 4 2 +FAIRVIEW 0 2 2 0 +EVEN 51 2 51 53 +ESPECIAL 0 2 1 1 +ENTERED 20 2 21 21 +DON'T 38 2 38 40 +DISSENT 0 2 2 0 +DESCENT 2 2 2 4 +DERPOOL 0 2 0 2 +DELIA 0 2 2 0 +DEFINED 1 2 2 2 +DEDALUS 0 2 2 0 +DAEDALUS 0 2 0 2 +CRITIAS 0 2 2 0 +CRESWELL 0 2 0 2 +CREIGHTON 0 2 2 0 +CRASWELL 0 2 0 2 +COURTYARD 4 2 4 6 +COULD 94 2 95 95 +COMMON 8 2 8 10 +COLOURS 0 2 1 1 +COLORIST 0 2 2 0 +CO 0 2 1 1 +CHEQUE 0 2 0 2 +CHECK 6 2 8 6 +CHASE 1 2 1 3 +CHANGE 7 2 9 7 +CHAISE 0 2 2 0 +CASE 15 2 16 16 +CARL 0 2 2 0 +CANNOT 16 2 16 18 +CAN 64 2 66 64 +BURN 3 2 4 4 +BUL 0 2 2 0 +BREAK 3 2 3 5 +BRAKE 1 2 3 1 +BRAGELONNE 0 2 2 0 +BOYS 5 2 6 6 +BOX 7 2 8 8 +BIT 7 2 9 7 +BERGSON 0 2 2 0 +BEING 39 2 40 40 +BEHAVIOUR 0 2 2 0 +BEHAVIOR 0 2 0 2 +B 1 2 2 2 +AYE 0 2 2 0 +AY 0 2 2 0 +ATTENDANTS 0 2 1 1 +ATTENDANCE 0 2 1 1 +ATHENAIS 0 2 2 0 +ARREST 1 2 1 3 +ANDREW'S 0 2 0 2 +AMYL 0 2 0 2 +ALREADY 21 2 22 22 +ALEXANDRA 1 2 3 1 +ALEXANDER 13 2 13 15 +AH 6 2 7 7 +ZORAS 0 1 0 1 +ZORA'S 0 1 1 0 +ZORA 2 1 3 2 +ZOOF'S 1 1 2 1 +ZEVIOUR 0 1 0 1 +ZEVIER 0 1 0 1 +ZEF'S 0 1 0 1 +ZAY 0 1 0 1 +ZAVIER 0 1 0 1 +YOUNG 43 1 43 44 +YOKES 0 1 0 1 +YEARNING 1 1 2 1 +YEARNIN 0 1 0 1 +WROUGHT 2 1 2 3 +WRITE 4 1 4 5 +WRAPPED 0 1 0 1 +WORST 3 1 4 3 +WORLDLY 0 1 0 1 +WORE 3 1 3 4 +WOODCUTTERS 0 1 0 1 +WOODBEGIRT 0 1 1 0 +WONDERING 1 1 2 1 +WONDER 7 1 7 8 +WOMEN'S 1 1 1 2 +WOMEN 7 1 8 7 +WOMANISM 0 1 0 1 +WOMAN' 0 1 0 1 +WITHES 0 1 1 0 +WINTER 4 1 5 4 +WINNER 0 1 0 1 +WIND 8 1 8 9 +WIFE 16 1 17 16 +WIDTHS 0 1 0 1 +WIDTH 0 1 1 0 +WIDOWED 0 1 0 1 +WIDOW 1 1 1 2 +WID 0 1 0 1 +WHOLLY 9 1 9 10 +WHO'S 1 1 2 1 +WHITTAWD 0 1 1 0 +WHITTAW 0 1 1 0 +WHITE 23 1 23 24 +WHISKED 0 1 0 1 +WHISK 0 1 1 0 +WHIRLPOOL 1 1 2 1 +WHIPPED 1 1 2 1 +WHIP 0 1 0 1 +WHICH 216 1 216 217 +WHETHER 23 1 23 24 +WHERE'S 0 1 0 1 +WHATSOEVER 1 1 1 2 +WHATEVER 12 1 13 12 +WHAT'S 4 1 5 4 +WHARTON 0 1 0 1 +WESTWARD 1 1 1 2 +WESTMIR 0 1 0 1 +WESTMERE 0 1 1 0 +WELCOMED 0 1 1 0 +WELCOME 6 1 6 7 +WEEKLY 0 1 0 1 +WEDNESDAY 1 1 2 1 +WEATHER 5 1 6 5 +WEAR 4 1 5 4 +WEAKLY 0 1 1 0 +WE'VE 2 1 2 3 +WAVES 6 1 7 6 +WATRY 1 1 2 1 +WATERMILL 0 1 1 0 +WATER 19 1 19 20 +WATCHERY 0 1 0 1 +WASTES 0 1 0 1 +WASTE 4 1 5 4 +WARLIKE 0 1 1 0 +WARILY 0 1 0 1 +WAR 5 1 5 6 +WANDERING 2 1 2 3 +WANDER 1 1 2 1 +WAIL 0 1 0 1 +VOUCHED 0 1 1 0 +VOUCH 0 1 0 1 +VOICE 17 1 18 17 +VISITORS 4 1 5 4 +VISITOR 2 1 2 3 +VIPEROUS 0 1 0 1 +VINEYARD 0 1 0 1 +VINES 0 1 1 0 +VINE 0 1 0 1 +VILLEROY 0 1 1 0 +VILLEROI 0 1 0 1 +VILLA 0 1 1 0 +VIGNETTE 0 1 1 0 +VIEW'S 0 1 0 1 +VIEW 2 1 2 3 +VIEDUC 0 1 0 1 +VICARIOUS 3 1 4 3 +VIADUCT 0 1 1 0 +VERSE 1 1 2 1 +VERNE'S 0 1 1 0 +VERN'S 0 1 0 1 +VERIMENT 0 1 0 1 +VENORS 0 1 0 1 +VEINS 0 1 0 1 +VEILS 1 1 1 2 +VAUDOIS 0 1 1 0 +VARIABILITY 1 1 2 1 +VANES 0 1 1 0 +VALIDESTE 0 1 0 1 +VALES 2 1 3 2 +UTAH 1 1 2 1 +UPSTAIRS 3 1 3 4 +UNWARILY 0 1 1 0 +UNTO 2 1 3 2 +UNNOTTINGHAM 0 1 0 1 +UNLIKE 0 1 1 0 +UNJUSTI 0 1 0 1 +UNJUST 1 1 2 1 +UNDISSIMA 0 1 0 1 +UNDERGROUND 0 1 1 0 +UNCLENCHED 0 1 1 0 +UN 0 1 1 0 +ULYSTS 0 1 0 1 +UDOLPHO 0 1 0 1 +UD 0 1 1 0 +UCADIUS 0 1 0 1 +TYPE 1 1 1 2 +TWITE 0 1 1 0 +TUTRILOGIES 0 1 0 1 +TURNOVER 0 1 1 0 +TURNED 21 1 21 22 +TURN 18 1 18 19 +TUPPENY 0 1 1 0 +TUPPENNY 0 1 0 1 +TUMBLED 2 1 3 2 +TUMBLE 0 1 0 1 +TUBS 0 1 0 1 +TROUT 1 1 1 2 +TROUBLE 8 1 8 9 +TROT 4 1 5 4 +TROPIDAS 0 1 0 1 +TRIPPING 0 1 0 1 +TRILOGIES 0 1 1 0 +TREES 19 1 19 20 +TREDDLESTONE 0 1 0 1 +TREDDLESTON 0 1 1 0 +TREBLE 0 1 1 0 +TRAVESTY 0 1 1 0 +TRAVELERS 0 1 1 0 +TRAVELER 0 1 1 0 +TRAVASTY 0 1 0 1 +TOWNE 0 1 1 0 +TOWN 6 1 6 7 +TOWELLING 0 1 1 0 +TOWELINGS 0 1 0 1 +TOULD 0 1 1 0 +TOTTY 2 1 3 2 +TORY 0 1 1 0 +TORKLE 0 1 0 1 +TORE 0 1 0 1 +TOPSY 0 1 0 1 +TOPS 3 1 4 3 +TOOMS 0 1 1 0 +TONISON 0 1 0 1 +TONE 5 1 5 6 +TON 0 1 0 1 +TOMBS 0 1 0 1 +TOILETTE 0 1 1 0 +TOILET 0 1 0 1 +TOGETHER 15 1 16 15 +TODDY 0 1 0 1 +TIRESOME 1 1 2 1 +TINTORET 0 1 1 0 +TINTINT 0 1 1 0 +TINTARETTE 0 1 0 1 +TINTANT 0 1 0 1 +TINACHANT 0 1 0 1 +TIMIUS 0 1 0 1 +TIMIRAS 0 1 0 1 +TIMES 20 1 21 20 +TIMAIRS 0 1 0 1 +TIBI 0 1 1 0 +TIBBY 0 1 0 1 +THUS 20 1 21 20 +THROWN 4 1 4 5 +THRONE 3 1 4 3 +THRO 0 1 1 0 +THRIVING 0 1 1 0 +THOUSANDTH 0 1 0 1 +THOUSAND 12 1 13 12 +THORLEIF 0 1 1 0 +THINK 52 1 52 53 +THINGS 33 1 34 33 +THEY'RE 2 1 3 2 +THEREFORE 19 1 20 19 +THERE'S 12 1 12 13 +THEE'S 0 1 1 0 +THANKING 2 1 3 2 +THANK 12 1 13 12 +TERENTIAL 0 1 0 1 +TENT 5 1 5 6 +TENEAS 0 1 0 1 +TECHNIQUE 0 1 1 0 +TEARS 11 1 11 12 +TEACHERS 0 1 0 1 +TEA 2 1 3 2 +TAYLOR 6 1 7 6 +TARANTULA 0 1 1 0 +TALKERS 0 1 1 0 +TALK 19 1 19 20 +TAKING 7 1 7 8 +TAKES 2 1 3 2 +TAKEN 14 1 15 14 +TAILOR 0 1 0 1 +TABOU 0 1 0 1 +TABOO 0 1 0 1 +TABLECLOTH 0 1 0 1 +TABLE 23 1 24 23 +SWUNG 0 1 0 1 +SWOONS 0 1 1 0 +SWOON 3 1 3 4 +SWEPT 1 1 1 2 +SWEEP 1 1 2 1 +SWAY 1 1 1 2 +SWAN 0 1 1 0 +SUSPICIONS 2 1 3 2 +SUSPICION 4 1 4 5 +SURVIVED 0 1 0 1 +SURVIVE 1 1 2 1 +SURFACES 0 1 1 0 +SUPPOSITION 0 1 1 0 +SUMNER 0 1 1 0 +SUMMONED 3 1 3 4 +SUMMON 0 1 1 0 +SUMMERS 0 1 1 0 +SUMMER'S 2 1 2 3 +SUMMER 6 1 6 7 +SUM 1 1 1 2 +SUITCASE 0 1 1 0 +SUIT 4 1 4 5 +STUDY 12 1 12 13 +STROLL 2 1 3 2 +STREAMLINE 0 1 1 0 +STREAM 1 1 1 2 +STRAW 1 1 1 2 +STORY'S 0 1 1 0 +STORMED 0 1 0 1 +STORIES 3 1 3 4 +STONES 2 1 3 2 +STEVE 0 1 0 1 +STEP 6 1 7 6 +STEELED 0 1 0 1 +STEEL'D 0 1 1 0 +STEED 1 1 1 2 +STEADY 4 1 5 4 +STAY 11 1 11 12 +STATUNE 0 1 0 1 +STATES 6 1 6 7 +STARVED 0 1 1 0 +STARTS 0 1 1 0 +START 3 1 3 4 +STANDS 2 1 3 2 +STAND 13 1 13 14 +STAID 0 1 1 0 +STAGE 5 1 6 5 +SQUI 0 1 0 1 +SQUEAK 1 1 2 1 +SPRING 7 1 8 7 +SPREE 0 1 0 1 +SPRANG 3 1 3 4 +SPRAGUE 0 1 1 0 +SPOKE 14 1 15 14 +SPO 0 1 0 1 +SPLENDOUR 0 1 0 1 +SPLENDOR 0 1 1 0 +SPLENDID 9 1 9 10 +SPLENDET 0 1 1 0 +SPIN 0 1 1 0 +SPILLIGOAT 0 1 0 1 +SPEND 2 1 2 3 +SPECIALIZED 0 1 0 1 +SPECIALISED 0 1 1 0 +SP 0 1 0 1 +SOUTHEY'S 0 1 1 0 +SOUS 0 1 0 1 +SOU 0 1 1 0 +SORA 0 1 0 1 +SOOTHED 0 1 1 0 +SOOTHE 1 1 1 2 +SON 14 1 15 14 +SOLVED 1 1 2 1 +SOLOQUY 0 1 0 1 +SOLON'S 0 1 1 0 +SOLILOQUY 4 1 5 4 +SODELITY 0 1 0 1 +SODALITY 0 1 1 0 +SOCRATIC 0 1 1 0 +SOBBED 0 1 0 1 +SNUBNOSED 0 1 1 0 +SNUB 0 1 0 1 +SMILD 0 1 1 0 +SMIL 0 1 0 1 +SMELLS 1 1 2 1 +SLING 0 1 0 1 +SLEEVE 0 1 1 0 +SLANG 11 1 12 11 +SKURA 0 1 0 1 +SKILLFUL 0 1 1 0 +SKILFUL 0 1 0 1 +SKIES 0 1 0 1 +SKEPTICAL 0 1 1 0 +SIZED 1 1 1 2 +SIZE 4 1 5 4 +SITTING 4 1 4 5 +SIT 11 1 11 12 +SIR 35 1 36 35 +SIMPOSIUM 0 1 0 1 +SIMPOSE 0 1 0 1 +SIMPLED 0 1 0 1 +SIMMONS 0 1 0 1 +SILENT 10 1 11 10 +SIGHTSEERS 0 1 0 1 +SIGHT 19 1 20 19 +SIGHED 3 1 4 3 +SIFT 0 1 0 1 +SIF 1 1 2 1 +SIDE 23 1 23 24 +SICK 2 1 2 3 +SHOWRING 0 1 1 0 +SHOWERING 0 1 0 1 +SHOWED 5 1 5 6 +SHOULDST 0 1 0 1 +SHODY 0 1 0 1 +SHODDY 0 1 1 0 +SHIP'S 0 1 0 1 +SHEWD 0 1 1 0 +SHERIFF'S 3 1 3 4 +SHERIFF 3 1 4 3 +SHARPS 0 1 1 0 +SHARPEST 1 1 1 2 +SHARP'ST 0 1 1 0 +SHARP'S 0 1 0 1 +SHANNON 2 1 3 2 +SHAN'T 0 1 1 0 +SHAN 0 1 0 1 +SHALT 2 1 2 3 +SHALL 43 1 44 43 +SHA'N'T 0 1 0 1 +SEX 2 1 2 3 +SEWED 1 1 1 2 +SEVETTE 0 1 0 1 +SEVER 0 1 0 1 +SETTLED 1 1 1 2 +SETTLE 2 1 3 2 +SET 19 1 19 20 +SERVICES 1 1 1 2 +SERVED 4 1 4 5 +SERVE 11 1 12 11 +SERVANTS 4 1 4 5 +SERVANT 11 1 12 11 +SENTENCES 2 1 3 2 +SENTENCE 3 1 3 4 +SENT 5 1 6 5 +SENSE 15 1 16 15 +SENDIARIES 0 1 0 1 +SENCE 0 1 1 0 +SENATE 2 1 2 3 +SEMON'S 0 1 1 0 +SELVEY 0 1 0 1 +SELF 5 1 5 6 +SEERS 0 1 1 0 +SEEING 12 1 12 13 +SEEDS 0 1 1 0 +SEED 2 1 2 3 +SECTS 0 1 1 0 +SEATS 3 1 3 4 +SEATING 1 1 2 1 +SEAT 3 1 4 3 +SEA 18 1 18 19 +SE 0 1 0 1 +SCUTCHEON 0 1 1 0 +SCUSE 0 1 1 0 +SCRAPBOOKS 0 1 1 0 +SCRAP 0 1 0 1 +SCOUTING 0 1 1 0 +SCOUT 5 1 5 6 +SCHOOLS 1 1 1 2 +SCHOOLROOM 0 1 1 0 +SCHOOLBOYS 0 1 0 1 +SCEVRA 0 1 1 0 +SCEURA 0 1 1 0 +SCEPTICAL 0 1 0 1 +SCENTS 0 1 0 1 +SCENE 3 1 3 4 +SCATHE 0 1 1 0 +SCATH 0 1 0 1 +SCAROONS 0 1 1 0 +SCARONS 0 1 0 1 +SCAFFRA 0 1 0 1 +SAYS 12 1 12 13 +SAYING 15 1 15 16 +SAVED 4 1 5 4 +SAUVEUR 0 1 1 0 +SATITUDE 0 1 0 1 +SATE 0 1 1 0 +SAT 18 1 18 19 +SANG 4 1 5 4 +SAMPLE 0 1 1 0 +SALVI 0 1 0 1 +SALINE 0 1 1 0 +SALIENT 1 1 2 1 +SALE 0 1 0 1 +SAINTS 4 1 5 4 +SAILORED 0 1 0 1 +SAILING 0 1 0 1 +SAILED 0 1 0 1 +SAILD 0 1 1 0 +SAIL 5 1 6 5 +SAGE 0 1 0 1 +SABLE 0 1 0 1 +RUST 0 1 1 0 +RULED 0 1 1 0 +RUFUS 0 1 1 0 +RUFFUS 0 1 0 1 +RUE 0 1 1 0 +ROSSITUR 0 1 0 1 +ROSSETER 0 1 1 0 +ROOM 41 1 41 42 +ROLLED 3 1 3 4 +ROI 0 1 1 0 +ROERER 0 1 1 0 +RODOLPHO 0 1 0 1 +RODOLFO'S 0 1 1 0 +ROCKED 0 1 1 0 +ROCK 1 1 1 2 +ROBINS 0 1 0 1 +ROBIN'S 0 1 1 0 +ROAR 0 1 0 1 +RIVING 0 1 0 1 +RINGING 0 1 0 1 +RIDOLPHO 0 1 0 1 +RIDOLPHAL 0 1 0 1 +REWEIGHED 0 1 1 0 +REWAIED 0 1 0 1 +REVOLUTION 0 1 0 1 +RETURNED 18 1 18 19 +RETURN 8 1 9 8 +RESIGNED 1 1 2 1 +RESIGN 0 1 0 1 +REMOVED 4 1 4 5 +REMOVE 3 1 4 3 +REMOV'D 0 1 1 0 +REMEMBERED 11 1 11 12 +REMEMBER 9 1 10 9 +REMARKED 4 1 4 5 +REMARK 2 1 3 2 +REMAINING 0 1 0 1 +RELOCATED 0 1 1 0 +RELIES 0 1 1 0 +REINTER 0 1 0 1 +REINFORCEMENTS 0 1 0 1 +REIGNED 1 1 2 1 +REGGIE 1 1 2 1 +REGAINED 0 1 1 0 +REGAIN 0 1 0 1 +REFUSED 7 1 8 7 +REENFORCEMENTS 0 1 1 0 +REEDER 0 1 1 0 +RECORDS 2 1 2 3 +RECORD 6 1 7 6 +RECOGNIZED 3 1 3 4 +RECOGNISED 0 1 1 0 +REBUKED 0 1 0 1 +REBUK'D 0 1 1 0 +REALIZE 4 1 4 5 +RANCOUR 0 1 0 1 +RANCOR 0 1 1 0 +RANCH 0 1 0 1 +RAINED 0 1 0 1 +RADOLPHO 0 1 0 1 +QUITE 29 1 29 30 +QUINCY 0 1 1 0 +QUINCEY 0 1 0 1 +QUASI 0 1 1 0 +QUART 0 1 1 0 +PYTHAGORIANS 0 1 0 1 +PYTHAGOREANS 0 1 1 0 +PUTTIN 0 1 1 0 +PUT 31 1 32 31 +PURSUS 0 1 0 1 +PURSE 1 1 2 1 +PURPOSED 0 1 1 0 +PURPOSE 10 1 10 11 +PURIST 0 1 1 0 +PUREST 0 1 0 1 +PURCHASE 0 1 0 1 +PUNISHED 3 1 4 3 +PUNISH 0 1 0 1 +PSALM 2 1 3 2 +PROVIDING 0 1 1 0 +PROVED 6 1 6 7 +PROSELYTING 0 1 1 0 +PROSELY 0 1 0 1 +PRODIGAL 0 1 1 0 +PROCEED 1 1 1 2 +PRIVATELY 0 1 1 0 +PRINCIPLE 3 1 4 3 +PRINCIPAL 4 1 4 5 +PRIESTHOO 0 1 0 1 +PREVENT 0 1 1 0 +PRETENSE 0 1 1 0 +PRETENCE 1 1 1 2 +PRESENT 20 1 20 21 +PRECIEUSES 0 1 1 0 +PRECEDE 0 1 1 0 +PRE 0 1 1 0 +POSSESSED 3 1 3 4 +POSSESS 2 1 3 2 +PORTES 0 1 1 0 +PORT 1 1 1 2 +PORAL 0 1 0 1 +POPHAM 4 1 5 4 +POOS 0 1 1 0 +POOL 1 1 1 2 +POND 2 1 3 2 +POLITICS 1 1 1 2 +POISONED 0 1 0 1 +POISON'D 0 1 1 0 +POINT 13 1 14 13 +PLESIOSAURUS 0 1 1 0 +PLEASANTS 0 1 0 1 +PLEASANCE 0 1 1 0 +PLEAS 0 1 0 1 +PLEAD 1 1 1 2 +PLEA 0 1 1 0 +PLAY 12 1 12 13 +PLATINISTS 0 1 0 1 +PLATES 2 1 2 3 +PLAITS 0 1 1 0 +PLACES 1 1 1 2 +PLACE 38 1 39 38 +PINCH 0 1 0 1 +PIG 2 1 2 3 +PIERCED 1 1 1 2 +PIERC'D 0 1 1 0 +PIECE 1 1 1 2 +PIC 0 1 0 1 +PHILANTHROPIES 0 1 1 0 +PHILADELPHIAN 0 1 1 0 +PHILADELPHIA 0 1 0 1 +PHAEDRUS 0 1 1 0 +PH 0 1 1 0 +PETER 0 1 0 1 +PERVERTIVES 0 1 0 1 +PERVERTERS 0 1 1 0 +PERSONAL 7 1 7 8 +PERFECT 6 1 7 6 +PERCHES 0 1 1 0 +PEPPEL 0 1 0 1 +PEGRIN 0 1 0 1 +PEGRENNE 0 1 1 0 +PEGRE 0 1 1 0 +PEG 0 1 0 1 +PEARL 12 1 12 13 +PEACE 13 1 14 13 +PAW 0 1 0 1 +PATIENTS 0 1 1 0 +PATIENT 0 1 0 1 +PATCH 3 1 4 3 +PAT 0 1 0 1 +PASSION 3 1 3 4 +PASSAGEWAY 0 1 0 1 +PASSAGE 8 1 9 8 +PASCHAL 0 1 1 0 +PARTS 6 1 7 6 +PARTICLES 0 1 1 0 +PARTICLE 0 1 0 1 +PART 22 1 22 23 +PARSONS 1 1 2 1 +PARSON 0 1 0 1 +PARRICIDES 0 1 0 1 +PAROQUET 0 1 1 0 +PARASITES 0 1 1 0 +PARAQUET 0 1 0 1 +PARALLELOGRAM 0 1 1 0 +PARALLELLOGRAM 0 1 0 1 +PAPERS 7 1 8 7 +PAPER 8 1 8 9 +PAPAL 0 1 1 0 +PANTS 0 1 1 0 +PANEL 1 1 1 2 +PALLET 0 1 0 1 +PALATE 0 1 1 0 +P 1 1 1 2 +OWE 0 1 0 1 +OUTRAGED 0 1 0 1 +OUTRAGE 0 1 1 0 +OUT 100 1 100 101 +OUGHTER 0 1 1 0 +OTTLEY'S 0 1 1 0 +OTTER 0 1 0 1 +OTHERWORLDLY 0 1 1 0 +OTHERS 22 1 23 22 +OSTENSITY 0 1 0 1 +OSH 0 1 1 0 +OSAGE 0 1 1 0 +ORS 0 1 0 1 +ORDERS 3 1 3 4 +ORDERED 2 1 3 2 +ORDER 22 1 22 23 +ORCHARD 3 1 4 3 +ORANGES 0 1 0 1 +ORANGE 0 1 1 0 +OPPOSITION 4 1 4 5 +OPHELIA 0 1 1 0 +ONTO 0 1 1 0 +ONE'S 1 1 1 2 +OMER 0 1 0 1 +OLOFF 0 1 0 1 +OLIVE'S 2 1 3 2 +OLIVE 3 1 4 3 +OLD 39 1 40 39 +OLAF 1 1 2 1 +OKARRANT 0 1 0 1 +OFFICES 0 1 1 0 +OFFICER'S 0 1 0 1 +OFFICER 4 1 4 5 +OFFENSES 0 1 1 0 +OFFENCES 0 1 0 1 +OCCAS 0 1 0 1 +OBSERVED 5 1 6 5 +OBSERVE 4 1 4 5 +OBROWN 0 1 0 1 +OAKS 0 1 1 0 +OAKLEIGHS 0 1 0 1 +O'ER 0 1 1 0 +NURSED 0 1 1 0 +NURSE 1 1 1 2 +NOVELS 2 1 2 3 +NOVEL'S 0 1 1 0 +NOUGHT 0 1 1 0 +NOTTINGHAM 6 1 7 6 +NOTHIN 0 1 1 0 +NOSED 0 1 0 1 +NOSE 2 1 2 3 +NORTHWARDS 0 1 1 0 +NORTHWARD 1 1 1 2 +NORTHERNERS 0 1 1 0 +NOONK 0 1 0 1 +NOON 2 1 3 2 +NINE 10 1 11 10 +NIGHTFALL 0 1 1 0 +NEWCOMER 0 1 1 0 +NEWBORN 0 1 0 1 +NEVERS 0 1 0 1 +NEVERBAND 0 1 0 1 +NEVER 63 1 63 64 +NELLY 0 1 1 0 +NELLIE 0 1 0 1 +NEIGHBOURHOOD 0 1 0 1 +NEIGHBORHOOD 0 1 1 0 +NEED 12 1 12 13 +NED 1 1 1 2 +NECK 5 1 6 5 +NECHERANT 0 1 0 1 +NEARER 3 1 4 3 +NEAR'S 0 1 0 1 +NEAR 6 1 6 7 +NE'ER 0 1 1 0 +NAY 5 1 5 6 +NAVE 0 1 0 1 +NATTY 1 1 2 1 +NARES 0 1 1 0 +NANNIE 0 1 0 1 +NAMED 3 1 4 3 +NAME 14 1 14 15 +MYSTERY 5 1 5 6 +MYLES 0 1 0 1 +MUSOLEUM 0 1 0 1 +MURCHISON 0 1 0 1 +MUMMERIES 0 1 1 0 +MUMBIE 0 1 0 1 +MUCH 68 1 69 68 +MOVED 10 1 10 11 +MOUNTED 0 1 1 0 +MOUNTAIN 5 1 5 6 +MOUNT 0 1 0 1 +MOULDED 0 1 0 1 +MOTHERS 1 1 2 1 +MOTHER'S 4 1 4 5 +MORMONS 3 1 4 3 +MORMONISM 2 1 3 2 +MONTMARTRA 0 1 0 1 +MONTFICHE 0 1 0 1 +MONGOOSE 0 1 1 0 +MONGOO'S 0 1 0 1 +MOMBI 0 1 1 0 +MOLE 0 1 0 1 +MOLDED 0 1 1 0 +MOHICANS 0 1 0 1 +MOHICAN 0 1 1 0 +MO 0 1 1 0 +MITIGATED 0 1 0 1 +MISSY 0 1 0 1 +MISSOURIENS 0 1 0 1 +MISSOURIANS 1 1 2 1 +MISDEMEANOUR 0 1 0 1 +MISDEMEANOR 0 1 1 0 +MINT 0 1 1 0 +MINE 6 1 7 6 +MIND 29 1 29 30 +MILLS 0 1 0 1 +MILLIONED 0 1 0 1 +MILLION'D 0 1 1 0 +MILLIMETRE 0 1 0 1 +MILLIMETER 0 1 1 0 +MILLIGRAM 0 1 1 0 +MILL 0 1 0 1 +MILITATED 0 1 1 0 +MILES 6 1 7 6 +MILAGRAM 0 1 0 1 +MICARTLE 0 1 0 1 +METRES 0 1 0 1 +METERS 0 1 1 0 +MET 10 1 10 11 +MESSRS 0 1 1 0 +MESSIERS 0 1 0 1 +MESS 0 1 1 0 +MERSEY 0 1 1 0 +MERRY 0 1 1 0 +MERGANSER 0 1 1 0 +MERGANCER 0 1 0 1 +MERCY 2 1 2 3 +MERCHISTON 0 1 1 0 +MER 0 1 1 0 +MENT 0 1 0 1 +MEND 1 1 2 1 +MEN'S 2 1 2 3 +MEMORIES 0 1 0 1 +MEET 6 1 7 6 +MEANHAW 0 1 0 1 +MEAN 9 1 9 10 +MEALYBACK 0 1 1 0 +MEALLY 0 1 0 1 +MEADOWCROFT 0 1 1 0 +MEADOWCROF 0 1 0 1 +MC 1 1 1 2 +MAUSOLEUM 0 1 1 0 +MAU 0 1 0 1 +MATE 1 1 2 1 +MASTERY 0 1 1 0 +MASTERS 3 1 3 4 +MASTER'S 1 1 2 1 +MARVELOUS 0 1 1 0 +MARVELLOUS 0 1 0 1 +MARTFICHERE 0 1 0 1 +MARSHRA 0 1 0 1 +MARSHALLED 0 1 1 0 +MARSHALED 0 1 0 1 +MARRIED 2 1 2 3 +MARIVAL 0 1 0 1 +MARIE'S 0 1 1 0 +MARIE 5 1 6 5 +MARI'S 0 1 0 1 +MARI 0 1 0 1 +MARAY 0 1 0 1 +MARAVAUX 0 1 0 1 +MARAIS 0 1 1 0 +MANIFESTED 0 1 1 0 +MANIFEST 2 1 2 3 +MAIDEN 0 1 0 1 +MAID 5 1 6 5 +MADE 61 1 61 62 +MACDONALDS 0 1 1 0 +MAC 0 1 1 0 +LUSTY 0 1 0 1 +LUIS 0 1 1 0 +LUCK 3 1 3 4 +LUCADIA 0 1 0 1 +LUBRICATE 0 1 1 0 +LUBRICADE 0 1 0 1 +LOWERED 0 1 0 1 +LOWER 5 1 6 5 +LOWED 0 1 0 1 +LOVES 3 1 3 4 +LOVE'S 0 1 1 0 +LOUISE 4 1 4 5 +LOU'S 0 1 1 0 +LOU 0 1 0 1 +LORNE 0 1 1 0 +LORN 0 1 0 1 +LORD 22 1 23 22 +LOOSE 5 1 5 6 +LOGARTHEMS 0 1 0 1 +LOGARITHMS 0 1 1 0 +LOCRIS 0 1 1 0 +LOCKY 0 1 0 1 +LOCHRIS 0 1 0 1 +LOCATIA 0 1 0 1 +LOCATED 1 1 1 2 +LOCALIA 0 1 0 1 +LOADS 0 1 1 0 +LOAD 0 1 1 0 +LO 1 1 2 1 +LITTLE 101 1 101 102 +LITER 0 1 1 0 +LINKED 0 1 0 1 +LINK'D 0 1 1 0 +LILY'S 0 1 0 1 +LILLYS 0 1 1 0 +LILLY 0 1 1 0 +LILBURN 0 1 1 0 +LIE 1 1 1 2 +LEUCEDES 0 1 0 1 +LETS 0 1 1 0 +LESSER 1 1 2 1 +LESS 28 1 28 29 +LEOKADIA 0 1 0 1 +LEOCADI 0 1 1 0 +LEFRANK 0 1 1 0 +LEEK 0 1 0 1 +LED 7 1 7 8 +LECOMTE 0 1 0 1 +LEAVING 5 1 6 5 +LEAVES 5 1 5 6 +LEAST 15 1 15 16 +LEASED 0 1 1 0 +LEAPING 3 1 3 4 +LEAFS 0 1 0 1 +LEADS 2 1 3 2 +LAUROI 0 1 0 1 +LATER 14 1 14 15 +LARKSPER 0 1 0 1 +LARKSBURG 0 1 0 1 +LANTHORN 0 1 1 0 +LANTERN 0 1 0 1 +LANDA 0 1 0 1 +LAND 12 1 13 12 +LAMENT 0 1 0 1 +LAMBENT 0 1 1 0 +LALLIE 0 1 1 0 +LAKE 12 1 13 12 +LABOUR 0 1 1 0 +LABOR 1 1 1 2 +LA 5 1 5 6 +L 1 1 2 1 +KONSTANTINE 0 1 0 1 +KNOWS 6 1 6 7 +KNIF 0 1 0 1 +KNEEP 0 1 0 1 +KNEED 0 1 1 0 +KNEE 0 1 1 0 +KNAVE 0 1 1 0 +KIRTLAND 0 1 1 0 +KINGDOMS 1 1 2 1 +KINGDOM'S 0 1 0 1 +KING'S 4 1 4 5 +KING 25 1 26 25 +KIEV 0 1 0 1 +KIERRASCURISTS 0 1 0 1 +KICKAPOOS 0 1 0 1 +KICK 1 1 2 1 +KEZWICK 0 1 0 1 +KESWICK 0 1 1 0 +KEOGH 0 1 1 0 +KATYA 0 1 0 1 +KANTI 0 1 0 1 +KAFFIRS 0 1 0 1 +KAFFIR'S 0 1 0 1 +JUST 42 1 42 43 +JUS 0 1 1 0 +JOHNNIAGO 0 1 0 1 +JOHN 15 1 16 15 +JOB 4 1 4 5 +JEWELERS 0 1 0 1 +JEWELER'S 0 1 1 0 +JEFFREY'S 0 1 0 1 +JEFFREY 0 1 0 1 +JAW 1 1 2 1 +JASPER 5 1 6 5 +JAPSER 0 1 0 1 +JAIL 3 1 4 3 +JACKKNIFE 0 1 0 1 +JACK 5 1 6 5 +ITHUSORIS 0 1 0 1 +IT'LL 2 1 2 3 +ISTIATED 0 1 0 1 +IRONS 0 1 0 1 +IRON'S 0 1 1 0 +INVENTORS 1 1 2 1 +INVENTILE 0 1 0 1 +INTRUSTING 0 1 0 1 +INTRENCHMENT 0 1 1 0 +INTRENCHED 0 1 0 1 +INTERESTS 1 1 2 1 +INTER 0 1 0 1 +INTENTS 0 1 1 0 +INTENT 0 1 1 0 +INTENSE 2 1 2 3 +INSURRECTIONOUS 0 1 0 1 +INSURRECTIONISTS 0 1 1 0 +INSURE 0 1 0 1 +INQUIRIES 1 1 1 2 +INQUIRED 2 1 2 3 +INQUIRE 0 1 0 1 +INFANTS 2 1 3 2 +INFANTILE 1 1 2 1 +INFANT'S 0 1 0 1 +INERLOCHY 0 1 0 1 +INDE 0 1 0 1 +INCLOSED 0 1 1 0 +INCERTAINTY 0 1 1 0 +INACTION 0 1 1 0 +IMPRESSES 0 1 1 0 +IMPRESS'D 0 1 1 0 +IMPERFECT 0 1 0 1 +IMPELLED 2 1 2 3 +IMPEARLED 0 1 1 0 +IMMIGRATION 0 1 0 1 +IMMIGRANT 0 1 0 1 +IKE 0 1 1 0 +IDIOSYNCRATICALLY 0 1 1 0 +IDIOS 0 1 0 1 +IDEAS 11 1 11 12 +HYMN 1 1 1 2 +HYDRAST 0 1 0 1 +HYDRAS 0 1 1 0 +HUTTED 0 1 0 1 +HUSBAND 8 1 9 8 +HUSBABLY 0 1 0 1 +HURRIED 6 1 6 7 +HURRANG 0 1 0 1 +HUNTLY 0 1 0 1 +HUNTLEY 0 1 1 0 +HUMPH 0 1 1 0 +HUMID 0 1 1 0 +HUMAN 15 1 15 16 +HULLO 0 1 0 1 +HOUSES 1 1 1 2 +HOUSECLEANING 0 1 1 0 +HOUSE 34 1 34 35 +HOUND 0 1 1 0 +HOTBED 0 1 1 0 +HOSTESS 2 1 3 2 +HOSTES 0 1 0 1 +HOST 2 1 3 2 +HOSE 2 1 2 3 +HORTON 0 1 1 0 +HORSEPLAY 0 1 1 0 +HORNS 2 1 2 3 +HORACE 0 1 1 0 +HOPES 5 1 6 5 +HOPE 9 1 10 9 +HONOURS 0 1 0 1 +HONOURABLY 0 1 1 0 +HONOUR 1 1 2 1 +HONORS 0 1 1 0 +HONORABLY 0 1 0 1 +HONOR 4 1 4 5 +HONED 0 1 0 1 +HOLY 2 1 3 2 +HOLOCOST 0 1 0 1 +HOLOCAUST 0 1 1 0 +HOLD 7 1 8 7 +HOAR 0 1 0 1 +HIVES 0 1 0 1 +HIT 1 1 2 1 +HIRESUM 0 1 0 1 +HILDA'S 1 1 2 1 +HILDA 8 1 9 8 +HIGHEST 2 1 3 2 +HIDALGO 0 1 1 0 +HID 1 1 1 2 +HEY 0 1 0 1 +HETTY'S 0 1 0 1 +HETTY 0 1 1 0 +HERACLITUS 0 1 0 1 +HERACLEITUS 0 1 1 0 +HENCHMEN 0 1 1 0 +HENCHMAN 0 1 0 1 +HEN 1 1 2 1 +HELPS 0 1 0 1 +HELPED 2 1 3 2 +HELDA'S 0 1 0 1 +HELDA 0 1 0 1 +HELD 14 1 15 14 +HEIR 0 1 0 1 +HEDGES 0 1 1 0 +HEAVEN 14 1 14 15 +HEARTY 0 1 1 0 +HEARTSEASE 0 1 0 1 +HEARTS 8 1 8 9 +HEART'S 0 1 1 0 +HEART 27 1 28 27 +HEARKENED 0 1 0 1 +HEAD 36 1 36 37 +HE'D 2 1 3 2 +HAZEWRAPPED 0 1 1 0 +HAYES 0 1 0 1 +HAY 0 1 1 0 +HAWTREY 0 1 1 0 +HAWKIN 0 1 0 1 +HAWK 6 1 7 6 +HAVING 11 1 12 11 +HAT 1 1 1 2 +HARTS 0 1 1 0 +HARRIED 0 1 1 0 +HAROLD 0 1 0 1 +HARMONIZED 1 1 2 1 +HARMONIZE 0 1 0 1 +HARKENED 0 1 1 0 +HARDY 0 1 0 1 +HARD 12 1 12 13 +HARBOURING 0 1 0 1 +HARBORING 0 1 1 0 +HARANGUE 0 1 1 0 +HARALD 0 1 1 0 +HAR 0 1 1 0 +HANNA 0 1 1 0 +HANGINGS 0 1 1 0 +HANGING 2 1 2 3 +HANDSOME 3 1 3 4 +HANDLED 0 1 0 1 +HAMLET 5 1 6 5 +HAM 0 1 1 0 +HALTREE 0 1 0 1 +HALLOA 0 1 1 0 +HALF 19 1 19 20 +HAL 0 1 1 0 +HAKON 0 1 1 0 +HAIRDRESSER 0 1 1 0 +HAIR 6 1 6 7 +GUY 0 1 0 1 +GUISE 0 1 1 0 +GUILE 0 1 0 1 +GUESS 0 1 0 1 +GRUE 0 1 0 1 +GROWS 1 1 2 1 +GROVES 0 1 0 1 +GROUND 10 1 10 11 +GRINGO 0 1 1 0 +GRIEFS 0 1 1 0 +GREY'S 0 1 1 0 +GREENOW 0 1 0 1 +GREEN 12 1 12 13 +GREEING 0 1 1 0 +GREEDS 0 1 0 1 +GREE 0 1 0 1 +GREATER 8 1 9 8 +GRAY'S 0 1 0 1 +GRATES 0 1 0 1 +GRATEFUL 2 1 2 3 +GRAPEVINE 0 1 1 0 +GRAPE 0 1 0 1 +GRAND 1 1 2 1 +GRAMOPHONE 0 1 1 0 +GRAMMAPHONE 0 1 0 1 +GRAM 0 1 1 0 +GRAHAM 0 1 0 1 +GRADES 0 1 1 0 +GOVERNOR 14 1 14 15 +GOVERNMENT 7 1 8 7 +GOVERNED 0 1 1 0 +GOVERN 0 1 0 1 +GOULD 0 1 0 1 +GOOD 69 1 69 70 +GOOBERS 0 1 1 0 +GONE 13 1 14 13 +GOLD 14 1 15 14 +GOL 0 1 0 1 +GOING 26 1 26 27 +GODOLPH'S 0 1 0 1 +GOATS 0 1 0 1 +GOAT'S 1 1 2 1 +GO 37 1 37 38 +GIRARD 0 1 1 0 +GIP 0 1 0 1 +GILCHRIST'S 0 1 1 0 +GILCHER'S 0 1 0 1 +GIER 0 1 1 0 +GIAOURS 0 1 1 0 +GIAGO 0 1 0 1 +GHIP 3 1 4 3 +GERARD 0 1 0 1 +GERALLY 0 1 0 1 +GEOGO 0 1 0 1 +GEOFFREY'S 0 1 1 0 +GEOFFREY 0 1 1 0 +GENTLEMEN 5 1 6 5 +GENTLEMAN 8 1 8 9 +GENERALSHIP 0 1 0 1 +GENERALLY 7 1 8 7 +GENERAL 16 1 17 16 +GEAR 0 1 0 1 +GAYLY 0 1 1 0 +GAVE 31 1 31 32 +GASE 0 1 0 1 +GARDENS 2 1 3 2 +GARDEN'S 0 1 0 1 +GAMEWELL 6 1 7 6 +GAME 4 1 4 5 +GALLIA 0 1 0 1 +GAILY 0 1 0 1 +FUTURISTIC 0 1 1 0 +FUTUREISTIC 0 1 0 1 +FURTHEST 0 1 0 1 +FURS 0 1 0 1 +FUN 0 1 0 1 +FULNESS 0 1 1 0 +FULLNESS 0 1 0 1 +FRONT 5 1 6 5 +FRON 0 1 0 1 +FRISKLY 0 1 0 1 +FRISKILY 0 1 1 0 +FRIGHTER 0 1 0 1 +FRIGHTENED 2 1 2 3 +FRIEND 20 1 21 20 +FREEZED 0 1 0 1 +FRANKS 0 1 0 1 +FRANK 2 1 2 3 +FRANCS 0 1 1 0 +FOWLED 0 1 0 1 +FORWARDED 0 1 1 0 +FORTS 0 1 0 1 +FORMERLY 0 1 0 1 +FORMALLY 2 1 3 2 +FORBES'S 0 1 1 0 +FOOTED 1 1 1 2 +FOLLOWED 14 1 15 14 +FLUFFINOSE 0 1 1 0 +FLOWER 4 1 4 5 +FLOUR 0 1 1 0 +FLIGHT 2 1 3 2 +FLAPHANO'S 0 1 0 1 +FIVE 15 1 15 16 +FITZOOTH'S 0 1 1 0 +FITTOOTH'S 0 1 0 1 +FIT 0 1 0 1 +FIRST 67 1 67 68 +FIRS 0 1 1 0 +FIREBUGS 0 1 1 0 +FIREBALL 0 1 1 0 +FIR 9 1 9 10 +FINELY 1 1 1 2 +FINE 16 1 17 16 +FINALLY 7 1 8 7 +FILLS 2 1 2 3 +FILLED 8 1 8 9 +FILL 1 1 2 1 +FILIATED 0 1 0 1 +FIGHTER 0 1 1 0 +FETE 2 1 3 2 +FERRAGUT 0 1 0 1 +FERDINANDO 4 1 5 4 +FERDINAND 0 1 0 1 +FELT 18 1 19 18 +FELLED 1 1 1 2 +FEES 0 1 1 0 +FEELS 1 1 2 1 +FEELINS 0 1 0 1 +FEELIN'S 0 1 1 0 +FEEL 17 1 18 17 +FEATURES 9 1 9 10 +FEARED 2 1 3 2 +FEAR 12 1 12 13 +FAVOUR 0 1 0 1 +FAVOR 1 1 2 1 +FAUDOIS 0 1 0 1 +FARTHEST 1 1 2 1 +FARTHER 6 1 6 7 +FARRAGUT 0 1 1 0 +FARMHOUSES 0 1 1 0 +FARM 8 1 8 9 +FAR 29 1 30 29 +FANIA 0 1 0 1 +FALLEN 2 1 3 2 +FAIREST 0 1 0 1 +EYE 14 1 15 14 +EXIST 2 1 3 2 +EXCUSE 3 1 3 4 +EXAGGERUS 0 1 0 1 +EVOLUTION 2 1 3 2 +EVERYDAY 2 1 3 2 +EVENINGS 1 1 1 2 +EVENING 14 1 15 14 +EVENIN'S 0 1 1 0 +EVA 2 1 3 2 +EULO 0 1 0 1 +ETHNEE 0 1 0 1 +ETHIOSORIS 0 1 0 1 +ETHINAY 0 1 0 1 +ETHEL 0 1 0 1 +ESTATE 2 1 3 2 +ESTAFFANIA 0 1 0 1 +ESPRIT 0 1 1 0 +ESCHEATED 0 1 1 0 +ERNEST 0 1 1 0 +ER 0 1 1 0 +EQUIPPED 0 1 0 1 +EQUIP 0 1 1 0 +ENTRUSTING 0 1 1 0 +ENTRENCHMENT 0 1 0 1 +ENTRENCHED 0 1 1 0 +ENTRANCE 4 1 4 5 +ENTHRIBING 0 1 0 1 +ENTHRALMENT 0 1 1 0 +ENTHRALIMENT 0 1 0 1 +ENSURE 0 1 1 0 +ENQUIRIES 0 1 1 0 +ENQUIRED 0 1 1 0 +ENQUIRE 0 1 1 0 +ENDEAVOUR 1 1 1 2 +ENDEAVOR 0 1 1 0 +ENCLOSED 0 1 0 1 +EMISSIONS 0 1 1 0 +EMIGRATION 0 1 1 0 +EMIGRANT 0 1 1 0 +EMERGED 0 1 0 1 +EMERG'D 0 1 1 0 +EM 0 1 1 0 +ELSINORE 0 1 1 0 +ELSINOR 0 1 0 1 +ELMO'S 0 1 1 0 +ELKO 0 1 0 1 +ELECT 1 1 2 1 +ELEC 0 1 0 1 +ELCHO 0 1 1 0 +ELABORATE 2 1 3 2 +EITHER 8 1 8 9 +EFFECTED 1 1 2 1 +EFFECT 9 1 9 10 +EDITION 0 1 1 0 +EASTWARDS 0 1 1 0 +EASTWARD 0 1 0 1 +EASE 2 1 3 2 +EARSHOT 0 1 1 0 +EARS 4 1 5 4 +EARNEST 4 1 4 5 +EAR 6 1 6 7 +DYNAS 0 1 0 1 +DYKES 0 1 1 0 +DUSPORT 0 1 0 1 +DURING 11 1 12 11 +DURE 0 1 0 1 +DUNNO 0 1 1 0 +DUMPY 1 1 2 1 +DUMAS 0 1 1 0 +DUERER 0 1 1 0 +DRUGGIST'S 0 1 1 0 +DRUGGIST 0 1 0 1 +DROPIDAS 0 1 1 0 +DRIPPING 0 1 1 0 +DREWING 0 1 0 1 +DRESSER 0 1 0 1 +DRALE 0 1 0 1 +DOWNSTAIRS 1 1 1 2 +DOWN 72 1 73 72 +DOVES 0 1 1 0 +DOVE'S 0 1 0 1 +DOUZE 0 1 1 0 +DORNEST 0 1 0 1 +DOORSTEP 0 1 0 1 +DOOR 35 1 36 35 +DONATISTS 1 1 2 1 +DONATIST 0 1 0 1 +DONALDS 0 1 0 1 +DONA 1 1 2 1 +DOES 14 1 14 15 +DOCTRIS 0 1 0 1 +DOCTRESS 0 1 1 0 +DISTRICTS 1 1 2 1 +DISTRICT 1 1 1 2 +DISMAYEDESTAFHANIA 0 1 0 1 +DISMAYED 0 1 1 0 +DISHONOURED 0 1 1 0 +DISHONORED 0 1 0 1 +DISCOLOURED 0 1 1 0 +DISCOLORED 0 1 0 1 +DINAH'S 0 1 1 0 +DINAH 1 1 2 1 +DINA 0 1 0 1 +DIKES 0 1 0 1 +DIFFERENCES 1 1 2 1 +DIFFERENCE 5 1 5 6 +DIETRIBE 0 1 0 1 +DIATRIBE 0 1 1 0 +DIAS 0 1 1 0 +DEW 1 1 2 1 +DETONICHALANT 0 1 0 1 +DENSITY 0 1 1 0 +DEMEANOUR 1 1 1 2 +DEMEANOR 0 1 1 0 +DELOS 0 1 0 1 +DELLA 1 1 1 2 +DELICATE 3 1 4 3 +DELIBERATIVE 0 1 1 0 +DELIBERATE 2 1 2 3 +DELEGATE 0 1 0 1 +DELA 0 1 0 1 +DEFINE 4 1 5 4 +DEED 2 1 3 2 +DEDALOS 0 1 1 0 +DECK 1 1 1 2 +DECENCY 3 1 4 3 +DECENCIES 0 1 0 1 +DECEIVING 0 1 1 0 +DEARIE 0 1 0 1 +DEAD 5 1 5 6 +DAWNS 0 1 0 1 +DAWN'S 0 1 1 0 +DARK 16 1 17 16 +DAN 0 1 0 1 +DAIS 0 1 0 1 +DAIRY 4 1 5 4 +DAHLIA 0 1 0 1 +DA 0 1 0 1 +D'ESTE 0 1 1 0 +D 2 1 2 3 +CYN 0 1 1 0 +CUVE 0 1 0 1 +CUTTERS 0 1 1 0 +CURVED 0 1 1 0 +CURTALIND 0 1 0 1 +CUIRASCURISTS 0 1 0 1 +CRYSTAL 3 1 4 3 +CRUSWELLER 0 1 0 1 +CRUSTWELLS 0 1 0 1 +CROSSTREES 0 1 1 0 +CROSS 5 1 5 6 +CRITUS 0 1 0 1 +CRITON 0 1 0 1 +CRITIUS 0 1 0 1 +CRISTEL 0 1 0 1 +CRIGHTON 0 1 0 1 +CRESTWELLERS 0 1 0 1 +CRESSWELLS 0 1 1 0 +CRATICALLY 0 1 0 1 +CRASWELLERS 0 1 1 0 +CRASWELLER 0 1 1 0 +CRADIC 0 1 0 1 +COURTS 2 1 2 3 +COURT'S 1 1 2 1 +COURSE 19 1 19 20 +COURANT 0 1 1 0 +COUNTRY'S 1 1 2 1 +COUNTRY 25 1 25 26 +COUNSELS 0 1 1 0 +COUNSELLED 1 1 2 1 +COUNSEL 1 1 1 2 +COUNCILS 0 1 0 1 +COULDN'T 6 1 6 7 +COSTS 0 1 1 0 +COST 4 1 4 5 +CORRELATES 0 1 1 0 +CORRECTORISTIC 0 1 0 1 +COROLLETS 0 1 0 1 +CORNIERS 0 1 0 1 +CORN 4 1 5 4 +CORALLY 0 1 0 1 +CORALIE 0 1 1 0 +COPE 0 1 0 1 +COOK 1 1 2 1 +CONTROLL 0 1 0 1 +CONTROL 3 1 4 3 +CONTI 0 1 1 0 +CONTENSED 0 1 0 1 +CONTAINING 0 1 0 1 +CONTAINED 2 1 3 2 +CONTACT 0 1 1 0 +CONSTANTINE 0 1 1 0 +CONSIDERATE 0 1 1 0 +CONSIDERABLE 6 1 6 7 +CONSIDER 1 1 1 2 +CONSID'BLE 0 1 1 0 +CONSCIOUS 2 1 3 2 +CONSCIENCE 4 1 4 5 +CONQUERING 0 1 0 1 +CONQUERIN 0 1 1 0 +CONJUROR 0 1 0 1 +CONJURER 0 1 1 0 +CONDUCT 6 1 6 7 +CONDENSE 0 1 1 0 +CONCERN 0 1 1 0 +CONCER 0 1 0 1 +COMTE 1 1 1 2 +COMPOSSIBLE 0 1 0 1 +COMPOSSER 0 1 1 0 +COMPOSED 1 1 1 2 +COMPOSE 0 1 1 0 +COMMENTATORS 0 1 1 0 +COMMANDS 1 1 1 2 +COMMANDMENTS 0 1 1 0 +COMING 6 1 7 6 +COMER 0 1 0 1 +COLOURED 1 1 1 2 +COLORLESS 0 1 0 1 +COLORED 1 1 2 1 +COLLARS 1 1 1 2 +COLD 8 1 9 8 +COEXIST 0 1 0 1 +COAL 0 1 1 0 +CLUE 1 1 1 2 +CLOUDS 6 1 6 7 +CLOUD 10 1 11 10 +CLOTH 3 1 4 3 +CLEW 0 1 1 0 +CLENCHED 1 1 1 2 +CLEANING 2 1 2 3 +CIVET 0 1 1 0 +CITADELLED 0 1 1 0 +CITADELED 0 1 0 1 +CIGARETTE 1 1 2 1 +CHRONICAL 0 1 0 1 +CHRISTIAN 6 1 6 7 +CHRISTAIN 0 1 1 0 +CHRIST 22 1 22 23 +CHORD 0 1 1 0 +CHOLERIST 0 1 0 1 +CHOICES 0 1 0 1 +CHOICE 1 1 2 1 +CHINGACHOOK 0 1 0 1 +CHINGACHGOOK 1 1 2 1 +CHIAROSCURISTS 0 1 1 0 +CHIAROSCURIST 0 1 1 0 +CHEROOT 0 1 1 0 +CHEQUER 0 1 0 1 +CHECKER 0 1 1 0 +CHATTERBOX 0 1 1 0 +CHATTER 0 1 0 1 +CHARLES 2 1 3 2 +CHARACTERISTIC 9 1 10 9 +CHANGES 3 1 3 4 +CHANGED 6 1 6 7 +CERTAINTY 2 1 2 3 +CENTRED 0 1 1 0 +CENTRE 1 1 1 2 +CENTER 1 1 2 1 +CENDENARIES 0 1 1 0 +CELEBRATE 1 1 1 2 +CEASED 1 1 1 2 +CEASD 0 1 1 0 +CAULDWELL 0 1 0 1 +CAUGHT 9 1 10 9 +CAT 6 1 7 6 +CARVED 1 1 1 2 +CARPATIUS 0 1 0 1 +CARPACCIO'S 0 1 1 0 +CAPLICE 0 1 0 1 +CAPLESS 0 1 1 0 +CANVASS 0 1 1 0 +CANVAS 4 1 4 5 +CANDLELIGHT 0 1 0 1 +CANDLE 1 1 2 1 +CALLED 24 1 24 25 +CALDWELL 0 1 1 0 +C 0 1 1 0 +BYE 1 1 2 1 +BUTTED 0 1 1 0 +BURNE 1 1 1 2 +BURGOYNE 0 1 1 0 +BUNNITT 0 1 0 1 +BUNNIT 0 1 1 0 +BULBUL 0 1 0 1 +BUILD 1 1 1 2 +BUGS 0 1 0 1 +BUDDED 0 1 0 1 +BUCHANAN 0 1 1 0 +BROWN 9 1 10 9 +BROUGHT 13 1 14 13 +BROTHER 7 1 8 7 +BRONTES 0 1 1 0 +BRONTEES 0 1 0 1 +BROCKTON'S 0 1 0 1 +BRITON 0 1 0 1 +BRITANNULISTS 0 1 1 0 +BRISK 0 1 1 0 +BRINGS 0 1 0 1 +BRINGING 4 1 5 4 +BRIGALON 0 1 0 1 +BRETHREN 2 1 2 3 +BREAKFAST 1 1 1 2 +BREAKFAS 0 1 1 0 +BREAD 4 1 5 4 +BRANDS 1 1 2 1 +BRAND 0 1 0 1 +BRANCH 3 1 4 3 +BRAGELONE 0 1 0 1 +BRACTON'S 0 1 1 0 +BRACE 0 1 0 1 +BOXWOMEN 0 1 0 1 +BOWLS 0 1 0 1 +BOUGHT 0 1 0 1 +BOTANY 0 1 1 0 +BORN 7 1 8 7 +BOREHOUND 0 1 0 1 +BORDERS 3 1 4 3 +BORDER 3 1 3 4 +BOOT 0 1 0 1 +BOOKS 8 1 8 9 +BOOKKEEPER 0 1 1 0 +BOLLS 0 1 1 0 +BOAT 2 1 2 3 +BOAR 0 1 1 0 +BLUESTS 0 1 0 1 +BLUESKINS 1 1 2 1 +BLUESKIN 0 1 0 1 +BLUE 20 1 21 20 +BLINT 0 1 0 1 +BLESSINGS 2 1 3 2 +BLESSING 2 1 2 3 +BLASTS 0 1 1 0 +BLAST 0 1 0 1 +BITKEEPER 0 1 0 1 +BIRDS 3 1 4 3 +BILLYGOAT 0 1 1 0 +BILLED 0 1 1 0 +BILL 6 1 6 7 +BIELDS 0 1 0 1 +BID 1 1 1 2 +BERKSON 0 1 0 1 +BERGIN 0 1 0 1 +BENCH 3 1 4 3 +BEIN 0 1 1 0 +BEHAVIOURIST 1 1 2 1 +BEHAVIORIST 0 1 0 1 +BEGIRT 0 1 0 1 +BEGGED 1 1 1 2 +BEG 1 1 2 1 +BEFORE 74 1 75 74 +BEFIT 0 1 1 0 +BEFALL 0 1 0 1 +BEFAL 0 1 1 0 +BEES 0 1 0 1 +BEELZEBUB 0 1 1 0 +BEEHIVES 0 1 1 0 +BEEDER 0 1 1 0 +BEECHED 0 1 0 1 +BEEBE 0 1 1 0 +BEE 0 1 0 1 +BEDIMMED 0 1 1 0 +BEDEMNED 0 1 0 1 +BED 12 1 12 13 +BEATER 0 1 1 0 +BEACHED 0 1 1 0 +BAYLIM'S 0 1 0 1 +BASKET 1 1 2 1 +BASCULADES 0 1 0 1 +BARTANY 0 1 0 1 +BANDUME 0 1 0 1 +BANDS 4 1 4 5 +BALL 4 1 4 5 +BALAAM'S 0 1 1 0 +BAINS 0 1 1 0 +BADR'D 0 1 0 1 +BADGES 0 1 1 0 +BADGERS 0 1 0 1 +BADAUDERIE 0 1 1 0 +BACK 45 1 45 46 +BABRUSA 0 1 0 1 +BABIRUSA 0 1 1 0 +AWHILE 0 1 1 0 +AWAY 50 1 50 51 +AWARE 5 1 6 5 +AVALUABLE 0 1 0 1 +AVAILABLE 0 1 1 0 +AUNT 4 1 5 4 +AU 0 1 1 0 +ATTORIAN 0 1 0 1 +ATTITUDE 3 1 4 3 +ATTITSON 0 1 0 1 +ATMOSPHERIC 0 1 1 0 +ATMOSPHERE 2 1 2 3 +ATHOLEMEN 0 1 1 0 +ATHLETE 1 1 2 1 +ATCHISON 0 1 1 0 +ASTRO 0 1 0 1 +ASTOR 1 1 2 1 +ASSEMBLED 2 1 3 2 +ASCENDENCY 0 1 1 0 +ARROW 2 1 2 3 +ARRONDISSEMENT 0 1 1 0 +ARRIVING 1 1 2 1 +ARRESTS 0 1 1 0 +ARMOUR 0 1 1 0 +ARMOR 0 1 0 1 +ARMED 1 1 2 1 +ARM 5 1 5 6 +ARK 0 1 0 1 +ARGYLE 1 1 2 1 +ARDOUR 0 1 1 0 +ARDOR 0 1 0 1 +ARDLE 0 1 1 0 +ARCHWOOD 0 1 0 1 +ARC 1 1 2 1 +APPROVES 0 1 1 0 +APPRENTICED 0 1 0 1 +APPRENTICE 2 1 3 2 +APPEALED 0 1 1 0 +APPEAL 1 1 1 2 +ANYWHERE 5 1 6 5 +ANYTHING 17 1 17 18 +ANYMORE 0 1 1 0 +ANTHROPIES 0 1 0 1 +ANTETING 0 1 0 1 +ANTEDATING 0 1 1 0 +ANSWERED 14 1 14 15 +ANSWERD 0 1 1 0 +ANNE 2 1 2 3 +ANNALS 2 1 3 2 +ANNAL 0 1 0 1 +ANGULATIONS 0 1 0 1 +ANGELS 2 1 3 2 +ANGEL 1 1 1 2 +ANDRES 0 1 0 1 +ANDRE 0 1 0 1 +ANDDELA 0 1 0 1 +ANAXAGORAS 0 1 1 0 +AMY 0 1 0 1 +AMPHITHEATRE 0 1 0 1 +AMPHITHEATER 0 1 1 0 +AMEN 0 1 0 1 +AMDELLA 0 1 0 1 +AMASS 0 1 1 0 +ALTOGETHER 6 1 6 7 +ALTERNATIVE 0 1 1 0 +ALONE 22 1 23 22 +ALO 0 1 0 1 +ALLUVION 0 1 1 0 +ALLUVIAN 0 1 0 1 +ALGO 0 1 0 1 +ALBANS 0 1 1 0 +ALBAN'S 0 1 0 1 +AIR 24 1 25 24 +AIGNAN 0 1 1 0 +AIDS 0 1 0 1 +AID 1 1 2 1 +AFTERDECK 0 1 1 0 +AFTER 58 1 58 59 +AFFRIGHTENED 0 1 1 0 +AFFILIATED 1 1 2 1 +AFFECTED 3 1 3 4 +AFFECT 0 1 1 0 +ADVENTURER 0 1 0 1 +ADVENTURE 0 1 1 0 +ADONNA 0 1 0 1 +ADONA 0 1 1 0 +ADOCTION 0 1 0 1 +ADMISSIONS 0 1 0 1 +ADETE 0 1 0 1 +ADELAX 0 1 0 1 +ADDITION 0 1 0 1 +ACTOR'S 0 1 0 1 +ACTOR 1 1 2 1 +ACTION 11 1 11 12 +ACKNOWLEDGMENT 0 1 0 1 +ACKNOWLEDGEMENT 0 1 1 0 +ACCUTMENTS 0 1 0 1 +ACCOUTREMENTS 0 1 1 0 +ACCOUNT 8 1 9 8 +ACCOUN 0 1 0 1 +ACCORD 2 1 2 3 +ACCENTS 1 1 2 1 +ACCENT 3 1 3 4 +ABREAD 0 1 0 1 +ABOVE 17 1 17 18 +ABOLITIONISTS 0 1 1 0 +ABOLITIONIST 0 1 0 1 +ABLE'S 0 1 0 1 +ABILITY 2 1 2 3 +ABDUCTION 0 1 1 0 +ABBEY 0 1 0 1 +ABBE 0 1 1 0 +ZOOLOGY 1 0 1 1 +ZOOF 1 0 1 1 +ZION 1 0 1 1 +ZEST 1 0 1 1 +ZEAL 2 0 2 2 +YOUTH 5 0 5 5 +YOURSELVES 1 0 1 1 +YOURSELF 8 0 8 8 +YOURS 3 0 3 3 +YOUR 109 0 109 109 +YOUNGER 1 0 1 1 +YOU'VE 4 0 4 4 +YOU'D 3 0 3 3 +YORKSHIRE 2 0 2 2 +YORK 6 0 6 6 +YONDER 1 0 1 1 +YOKE 1 0 1 1 +YIELDING 3 0 3 3 +YIELDED 2 0 2 2 +YIELD 3 0 3 3 +YET 43 0 43 43 +YESTERDAY 3 0 3 3 +YES 33 0 33 33 +YELLOW 9 0 9 9 +YELL 1 0 1 1 +YEARS 34 0 34 34 +YEARLY 2 0 2 2 +YEAR 5 0 5 5 +YEA 1 0 1 1 +YARN 2 0 2 2 +YAMS 1 0 1 1 +YACHTSMAN 1 0 1 1 +YACHT 3 0 3 3 +WROTE 6 0 6 6 +WRONGS 1 0 1 1 +WRONG 10 0 10 10 +WRITTEN 7 0 7 7 +WRITS 1 0 1 1 +WRITINGS 2 0 2 2 +WRITING 6 0 6 6 +WRITHING 1 0 1 1 +WRITES 1 0 1 1 +WRITER 2 0 2 2 +WRIT 1 0 1 1 +WRIST 1 0 1 1 +WRETCHEDNESS 2 0 2 2 +WRESTLERS 1 0 1 1 +WRESTLED 1 0 1 1 +WOUNDED 1 0 1 1 +WOUND 1 0 1 1 +WOULDN'T 5 0 5 5 +WORTHY 6 0 6 6 +WORSTED 1 0 1 1 +WORSHIP 3 0 3 3 +WORRY 3 0 3 3 +WORN 1 0 1 1 +WORM 4 0 4 4 +WORLD'S 2 0 2 2 +WORLD 37 0 37 37 +WORKS 8 0 8 8 +WORKMEN 1 0 1 1 +WORKING 3 0 3 3 +WORKERS 1 0 1 1 +WORKED 5 0 5 5 +WORK 34 0 34 34 +WORDS 20 0 20 20 +WORD 20 0 20 20 +WOOL 3 0 3 3 +WOODS 3 0 3 3 +WOODLEY 3 0 3 3 +WOODEN 3 0 3 3 +WONDERS 1 0 1 1 +WONDERINGLY 1 0 1 1 +WONDERFUL 7 0 7 7 +WONDERED 2 0 2 2 +WON'T 15 0 15 15 +WOMAN 28 0 28 28 +WOLF 1 0 1 1 +WOKE 1 0 1 1 +WOE 2 0 2 2 +WIZARD'S 1 0 1 1 +WIZARD 3 0 3 3 +WIVES 3 0 3 3 +WITTY 1 0 1 1 +WITTILY 1 0 1 1 +WITS 1 0 1 1 +WITNESSING 1 0 1 1 +WITNESSES 1 0 1 1 +WITNESS 1 0 1 1 +WITHOUT 37 0 37 37 +WITHIN 23 0 23 23 +WITHERING 1 0 1 1 +WITHERED 1 0 1 1 +WITHDRAWN 2 0 2 2 +WITHDRAW 1 0 1 1 +WITHAL 1 0 1 1 +WITCH 3 0 3 3 +WIT 3 0 3 3 +WISHES 3 0 3 3 +WISHERS 1 0 1 1 +WISHED 6 0 6 6 +WISH 11 0 11 11 +WISE 5 0 5 5 +WISDOM 3 0 3 3 +WIRE 4 0 4 4 +WIPED 1 0 1 1 +WINNING 1 0 1 1 +WINKING 1 0 1 1 +WINK 1 0 1 1 +WINIFRED 1 0 1 1 +WINGS 5 0 5 5 +WING 3 0 3 3 +WINE 7 0 7 7 +WINDY 1 0 1 1 +WINDS 3 0 3 3 +WINDOWS 7 0 7 7 +WINDOW 16 0 16 16 +WINDING 1 0 1 1 +WIN 2 0 2 2 +WILY 1 0 1 1 +WILSON 1 0 1 1 +WILLS 1 0 1 1 +WILLOWY 1 0 1 1 +WILLINGLY 2 0 2 2 +WILLING 2 0 2 2 +WILLIAM 1 0 1 1 +WILLED 1 0 1 1 +WILDERNESS 1 0 1 1 +WILD 9 0 9 9 +WIDEST 1 0 1 1 +WIDENING 1 0 1 1 +WIDELY 1 0 1 1 +WIDE 9 0 9 9 +WICKET 1 0 1 1 +WICKEDNESS 1 0 1 1 +WICKEDEST 1 0 1 1 +WICKED 3 0 3 3 +WHY 44 0 44 44 +WHOM 18 0 18 18 +WHOLESOME 1 0 1 1 +WHOLE 25 0 25 25 +WHOEVER 3 0 3 3 +WHITNEY 1 0 1 1 +WHISTLING 1 0 1 1 +WHISTLE 2 0 2 2 +WHISPERED 7 0 7 7 +WHISPER 1 0 1 1 +WHISKERS 1 0 1 1 +WHIRLWIND 3 0 3 3 +WHIM 1 0 1 1 +WHILST 3 0 3 3 +WHEREVER 3 0 3 3 +WHEREUPON 3 0 3 3 +WHEREON 1 0 1 1 +WHEREFORE 1 0 1 1 +WHEREBY 1 0 1 1 +WHENEVER 3 0 3 3 +WHELPS 1 0 1 1 +WHEELING 1 0 1 1 +WHEELER 1 0 1 1 +WHEELED 3 0 3 3 +WHEEL 1 0 1 1 +WHEAT 2 0 2 2 +WHARVES 1 0 1 1 +WETTING 1 0 1 1 +WET 9 0 9 9 +WESTERN 1 0 1 1 +WESLEY 2 0 2 2 +WEREN'T 2 0 2 2 +WENT 25 0 25 25 +WELFARE 2 0 2 2 +WEIGHT 2 0 2 2 +WEIGHED 3 0 3 3 +WEIGH 1 0 1 1 +WEEPING 4 0 4 4 +WEEP 1 0 1 1 +WEEKS 4 0 4 4 +WEEK 2 0 2 2 +WEEDS 3 0 3 3 +WEED 1 0 1 1 +WEBS 1 0 1 1 +WEB 1 0 1 1 +WEASEL 1 0 1 1 +WEARY 1 0 1 1 +WEARING 2 0 2 2 +WEARINESS 2 0 2 2 +WEARILY 2 0 2 2 +WEARERS 1 0 1 1 +WEAPON 2 0 2 2 +WEALTH 5 0 5 5 +WEAKNESS 3 0 3 3 +WEAKENED 2 0 2 2 +WEAK 6 0 6 6 +WAYS 1 0 1 1 +WAX 1 0 1 1 +WAVING 2 0 2 2 +WAVERING 2 0 2 2 +WAVED 1 0 1 1 +WATSON 5 0 5 5 +WATERY 2 0 2 2 +WATERS 6 0 6 6 +WATERCRESS 1 0 1 1 +WATCHING 1 0 1 1 +WATCHFULNESS 1 0 1 1 +WATCHFUL 1 0 1 1 +WATCHED 7 0 7 7 +WATCH 2 0 2 2 +WASTEFUL 4 0 4 4 +WASTED 2 0 2 2 +WASN'T 2 0 2 2 +WASHINGTON 1 0 1 1 +WASHING 1 0 1 1 +WASH 1 0 1 1 +WARY 1 0 1 1 +WARRIORS 2 0 2 2 +WARRENTON'S 2 0 2 2 +WARRENTON 4 0 4 4 +WARRANTED 2 0 2 2 +WARRANT 1 0 1 1 +WARN 1 0 1 1 +WARMEST 1 0 1 1 +WARMED 1 0 1 1 +WARM 4 0 4 4 +WARDS 1 0 1 1 +WARD 1 0 1 1 +WANTS 3 0 3 3 +WANTING 3 0 3 3 +WANTED 8 0 8 8 +WANT 19 0 19 19 +WANDERED 2 0 2 2 +WAN 1 0 1 1 +WALNUT 1 0 1 1 +WALLS 2 0 2 2 +WALL 6 0 6 6 +WALKS 1 0 1 1 +WALKING 2 0 2 2 +WALKETH 1 0 1 1 +WALKED 6 0 6 6 +WALK 5 0 5 5 +WAITING 7 0 7 7 +WAITERS 1 0 1 1 +WAITER 1 0 1 1 +WAITED 1 0 1 1 +WAIT 8 0 8 8 +WAISTCOAT 1 0 1 1 +WAIST 1 0 1 1 +WAILING 1 0 1 1 +WAGED 1 0 1 1 +WADDLING 1 0 1 1 +W 3 0 3 3 +VULGAR 1 0 1 1 +VOYAGING 2 0 2 2 +VOYAGES 1 0 1 1 +VOYAGE 2 0 2 2 +VOWS 1 0 1 1 +VOTES 1 0 1 1 +VOTERS 1 0 1 1 +VOLUME 1 0 1 1 +VOLTAIRE'S 1 0 1 1 +VOLTAIRE 1 0 1 1 +VOICES 2 0 2 2 +VIVIDLY 2 0 2 2 +VIVID 2 0 2 2 +VIVE 1 0 1 1 +VIVACITY 1 0 1 1 +VITALITY 1 0 1 1 +VITAL 1 0 1 1 +VISTA 1 0 1 1 +VISITED 4 0 4 4 +VISIT 4 0 4 4 +VISION 2 0 2 2 +VISIBLE 2 0 2 2 +VIRTUOUS 1 0 1 1 +VIRTUE 3 0 3 3 +VIRTUALLY 2 0 2 2 +VIRGINS 1 0 1 1 +VIRGIN 2 0 2 2 +VIPER 2 0 2 2 +VIOLET 1 0 1 1 +VIOLENT 5 0 5 5 +VIOLENCE 5 0 5 5 +VIOLATED 1 0 1 1 +VINEGAR 1 0 1 1 +VINDICATION 1 0 1 1 +VINDICATE 1 0 1 1 +VILLAGE 4 0 4 4 +VIKING 3 0 3 3 +VIGOROUSLY 1 0 1 1 +VIGOROUS 1 0 1 1 +VIGILANCE 1 0 1 1 +VIEWS 1 0 1 1 +VIEWED 1 0 1 1 +VIE 1 0 1 1 +VICTUALS 1 0 1 1 +VICTORY 1 0 1 1 +VICTORIES 1 0 1 1 +VICTIM 2 0 2 2 +VICOMTE 1 0 1 1 +VICISSITUDES 1 0 1 1 +VICIOUS 1 0 1 1 +VICINITY 1 0 1 1 +VEXED 1 0 1 1 +VEXATION 1 0 1 1 +VESTURE 1 0 1 1 +VESTIBULE 1 0 1 1 +VESSEL 2 0 2 2 +VERTEBRAL 1 0 1 1 +VERSES 1 0 1 1 +VERSED 3 0 3 3 +VERIFY 1 0 1 1 +VERGE 1 0 1 1 +VENTURED 1 0 1 1 +VENTURE 1 0 1 1 +VENICE 1 0 1 1 +VENGEANCE 2 0 2 2 +VENERABLE 1 0 1 1 +VELVET 1 0 1 1 +VELOCITY 2 0 2 2 +VEILED 2 0 2 2 +VEHICLE 1 0 1 1 +VEHEMENTLY 1 0 1 1 +VAULT 1 0 1 1 +VASTLY 1 0 1 1 +VAST 5 0 5 5 +VASSALS 1 0 1 1 +VARYING 2 0 2 2 +VARIOUS 7 0 7 7 +VARIETY 2 0 2 2 +VARIETIES 1 0 1 1 +VARIED 1 0 1 1 +VARIATIONS 1 0 1 1 +VARIANCE 1 0 1 1 +VANQUISHED 2 0 2 2 +VANITY 1 0 1 1 +VANISHED 2 0 2 2 +VANISH 2 0 2 2 +VANDERPOOLS 1 0 1 1 +VALUE 3 0 3 3 +VALUABLE 2 0 2 2 +VALOR 3 0 3 3 +VALLEYS 2 0 2 2 +VALLEYED 1 0 1 1 +VALLEY 4 0 4 4 +VALIANTLY 1 0 1 1 +VALHALLA 1 0 1 1 +VALE 1 0 1 1 +VAINLY 1 0 1 1 +VAIN 1 0 1 1 +VAGUELY 1 0 1 1 +VAGUE 3 0 3 3 +VACUUM 1 0 1 1 +VACANT 1 0 1 1 +UTTERLY 4 0 4 4 +UTTERED 1 0 1 1 +UTTERANCE 1 0 1 1 +UTTER 4 0 4 4 +UTMOST 3 0 3 3 +UTILITY 3 0 3 3 +USUALLY 4 0 4 4 +USUAL 5 0 5 5 +USING 3 0 3 3 +USELESS 4 0 4 4 +USEFUL 5 0 5 5 +USED 17 0 17 17 +USE 31 0 31 31 +US 60 0 60 60 +URGING 1 0 1 1 +URGED 3 0 3 3 +UPWARDS 1 0 1 1 +UPTOWN 1 0 1 1 +UPRIGHT 1 0 1 1 +UPRAISED 1 0 1 1 +UPPERMOST 1 0 1 1 +UPPER 2 0 2 2 +UPLIFTED 1 0 1 1 +UPHOLSTERED 1 0 1 1 +UPHEAVAL 1 0 1 1 +UPBRAIDED 1 0 1 1 +UNWORTHY 1 0 1 1 +UNWILLING 2 0 2 2 +UNVARNISHED 1 0 1 1 +UNUSUAL 4 0 4 4 +UNTUTORED 1 0 1 1 +UNTRIED 1 0 1 1 +UNTREATED 1 0 1 1 +UNTOUCHED 1 0 1 1 +UNTIL 16 0 16 16 +UNTIDINESS 1 0 1 1 +UNTASTED 1 0 1 1 +UNSUCCESSFUL 1 0 1 1 +UNSEPARATED 1 0 1 1 +UNSEEN 1 0 1 1 +UNSAID 1 0 1 1 +UNREAL 1 0 1 1 +UNPRECEDENTED 1 0 1 1 +UNPOPULAR 1 0 1 1 +UNPLEASANT 3 0 3 3 +UNPERCEIVED 1 0 1 1 +UNPARALLELED 1 0 1 1 +UNOBSERVED 1 0 1 1 +UNNECESSARY 1 0 1 1 +UNNATURAL 1 0 1 1 +UNMOVED 1 0 1 1 +UNLUCKY 2 0 2 2 +UNLUCKILY 1 0 1 1 +UNLOCKED 1 0 1 1 +UNLOCK 1 0 1 1 +UNLOADED 1 0 1 1 +UNLIKELY 1 0 1 1 +UNLESS 5 0 5 5 +UNKNOWN 1 0 1 1 +UNIVERSITY 1 0 1 1 +UNIVERSE 1 0 1 1 +UNIVERSAL 3 0 3 3 +UNITED 8 0 8 8 +UNITE 1 0 1 1 +UNION 3 0 3 3 +UNINVITED 1 0 1 1 +UNINTELLIGIBLE 1 0 1 1 +UNIFORMS 2 0 2 2 +UNIFORM 1 0 1 1 +UNICORN 1 0 1 1 +UNHEEDED 1 0 1 1 +UNHAPPY 4 0 4 4 +UNHAPPINESS 1 0 1 1 +UNGRACIOUSLY 1 0 1 1 +UNGRACIOUS 1 0 1 1 +UNFORTUNATELY 2 0 2 2 +UNFORTUNATE 1 0 1 1 +UNFOLD 1 0 1 1 +UNFINISHED 2 0 2 2 +UNFEELING 1 0 1 1 +UNFAMILIAR 1 0 1 1 +UNFAIRLY 1 0 1 1 +UNFAIR 2 0 2 2 +UNFAILING 1 0 1 1 +UNEXPECTEDLY 2 0 2 2 +UNEXPECTED 3 0 3 3 +UNEXCEPTIONABLY 1 0 1 1 +UNEASY 4 0 4 4 +UNEASINESS 1 0 1 1 +UNEASILY 1 0 1 1 +UNEARTHLY 1 0 1 1 +UNDUE 1 0 1 1 +UNDOUBTEDLY 1 0 1 1 +UNDOING 1 0 1 1 +UNDERWATER 1 0 1 1 +UNDERTONE 1 0 1 1 +UNDERTAKING 2 0 2 2 +UNDERSTOOD 6 0 6 6 +UNDERSTANDING 4 0 4 4 +UNDERSTAND 9 0 9 9 +UNDERSCORE 1 0 1 1 +UNDERNEATH 1 0 1 1 +UNDERMINE 1 0 1 1 +UNDERHANDED 1 0 1 1 +UNDER 40 0 40 40 +UNDECEIVED 1 0 1 1 +UNCOUTH 1 0 1 1 +UNCOURTEOUS 1 0 1 1 +UNCONTROLLABLE 1 0 1 1 +UNCONSTITUTIONALITY 1 0 1 1 +UNCOMPROMISING 1 0 1 1 +UNCOMFORTABLE 1 0 1 1 +UNCLE 6 0 6 6 +UNCIVIL 1 0 1 1 +UNCHARITABLENESS 1 0 1 1 +UNCHANGED 1 0 1 1 +UNCERTAIN 2 0 2 2 +UNCASING 1 0 1 1 +UNCAS 10 0 10 10 +UNBUTTONING 1 0 1 1 +UNBROKEN 1 0 1 1 +UNBEARABLE 2 0 2 2 +UNAVOIDABLE 1 0 1 1 +UNAVERRED 1 0 1 1 +UNANIMOUSLY 1 0 1 1 +UNANIMOUS 1 0 1 1 +UNAFFECTED 1 0 1 1 +UNACCOUNTABLE 1 0 1 1 +UNABLE 1 0 1 1 +UGLY 3 0 3 3 +TYRANNY 1 0 1 1 +TYPICAL 1 0 1 1 +TYPES 1 0 1 1 +TWIXT 1 0 1 1 +TWIRLING 1 0 1 1 +TWIN 1 0 1 1 +TWILIGHT 2 0 2 2 +TWICE 3 0 3 3 +TWENTY 15 0 15 15 +TWELVE 2 0 2 2 +TWELFTH 1 0 1 1 +TWASN'T 1 0 1 1 +TURNS 1 0 1 1 +TURNIPS 1 0 1 1 +TURNING 6 0 6 6 +TURNER'S 1 0 1 1 +TURNER 4 0 4 4 +TURF 1 0 1 1 +TUNE 2 0 2 2 +TUMULTUOUS 1 0 1 1 +TUMULT 3 0 3 3 +TUMBLER 1 0 1 1 +TUFT 1 0 1 1 +TUESDAY 1 0 1 1 +TUCKED 1 0 1 1 +TUBE 1 0 1 1 +TRYING 5 0 5 5 +TRY 4 0 4 4 +TRUTH 13 0 13 13 +TRUSTY 1 0 1 1 +TRUSTS 1 0 1 1 +TRUST 5 0 5 5 +TRUNKS 2 0 2 2 +TRUNK 2 0 2 2 +TRUMPETS 1 0 1 1 +TRULY 9 0 9 9 +TRUFFLES 1 0 1 1 +TRUE 21 0 21 21 +TRUDGED 1 0 1 1 +TROUT'S 1 0 1 1 +TROUBLESOME 1 0 1 1 +TROUBLES 2 0 2 2 +TROUBLED 4 0 4 4 +TROTTING 1 0 1 1 +TROTTED 1 0 1 1 +TROTH 1 0 1 1 +TROPHIES 1 0 1 1 +TROOPS 3 0 3 3 +TROOPER'S 1 0 1 1 +TRIUMPHANTLY 1 0 1 1 +TRIUMPHANT 1 0 1 1 +TRIUMPH 3 0 3 3 +TRIPPED 1 0 1 1 +TRINKET 1 0 1 1 +TRIMNESS 1 0 1 1 +TRIM 1 0 1 1 +TRIGGER 1 0 1 1 +TRIED 9 0 9 9 +TRICKS 2 0 2 2 +TRIBUTE 1 0 1 1 +TRIBES 1 0 1 1 +TRIAL 2 0 2 2 +TREND 2 0 2 2 +TREMULOUSLY 1 0 1 1 +TREMULOUS 1 0 1 1 +TREMOR 1 0 1 1 +TREMENDOUSLY 1 0 1 1 +TREMBLING 5 0 5 5 +TREMBLED 1 0 1 1 +TREMBLE 2 0 2 2 +TREE 35 0 35 35 +TREATY 1 0 1 1 +TREATS 1 0 1 1 +TREATING 1 0 1 1 +TREATED 2 0 2 2 +TREAT 1 0 1 1 +TREASURE 2 0 2 2 +TREAD 1 0 1 1 +TRAY 1 0 1 1 +TRAVERSED 1 0 1 1 +TRAVEL 1 0 1 1 +TRASH 1 0 1 1 +TRAP 2 0 2 2 +TRANSPARENT 2 0 2 2 +TRANSLATION 1 0 1 1 +TRANSLATE 1 0 1 1 +TRANSIENT 2 0 2 2 +TRANSFERRED 1 0 1 1 +TRANSCRIPT 1 0 1 1 +TRANQUILLITY 1 0 1 1 +TRANQUIL 1 0 1 1 +TRAINS 1 0 1 1 +TRAINING 3 0 3 3 +TRAINED 1 0 1 1 +TRAIN 1 0 1 1 +TRAIL 2 0 2 2 +TRAFFIC 1 0 1 1 +TRADITIONS 3 0 3 3 +TRADITION 1 0 1 1 +TRADES 1 0 1 1 +TRACK 1 0 1 1 +TRACES 2 0 2 2 +TRACE 1 0 1 1 +TOYS 1 0 1 1 +TOWNS 3 0 3 3 +TOWERS 1 0 1 1 +TOWER 1 0 1 1 +TOUR 1 0 1 1 +TOUCHING 1 0 1 1 +TOUCHES 4 0 4 4 +TOUCH 8 0 8 8 +TOTAL 1 0 1 1 +TOSSING 1 0 1 1 +TOSSED 1 0 1 1 +TORTURED 2 0 2 2 +TORTURE 1 0 1 1 +TORTOISE 1 0 1 1 +TORRENT 3 0 3 3 +TORN 1 0 1 1 +TORCH 1 0 1 1 +TOPSAILS 1 0 1 1 +TOPMOST 1 0 1 1 +TOPMASTS 1 0 1 1 +TOPEKA 1 0 1 1 +TOOTHED 1 0 1 1 +TOOTH 1 0 1 1 +TOOK 33 0 33 33 +TONGUES 1 0 1 1 +TONGUE 8 0 8 8 +TONES 3 0 3 3 +TONED 1 0 1 1 +TOMMY 1 0 1 1 +TOMB 1 0 1 1 +TOM 4 0 4 4 +TOLERATION 1 0 1 1 +TOLEDANS 1 0 1 1 +TOLD 32 0 32 32 +TOKEN 2 0 2 2 +TOE 1 0 1 1 +TITLE 3 0 3 3 +TITIAN 1 0 1 1 +TIS 8 0 8 8 +TIRELESS 1 0 1 1 +TIRED 6 0 6 6 +TIRE 1 0 1 1 +TIPTOE 2 0 2 2 +TIPPED 1 0 1 1 +TIP 3 0 3 3 +TINY 3 0 3 3 +TINTS 1 0 1 1 +TINT 1 0 1 1 +TINSEL 1 0 1 1 +TINKLED 1 0 1 1 +TINGLING 1 0 1 1 +TINGE 1 0 1 1 +TIN 1 0 1 1 +TIME'S 2 0 2 2 +TIMASCHEFF'S 1 0 1 1 +TILL 8 0 8 8 +TILES 1 0 1 1 +TIGHTLY 1 0 1 1 +TIGHTEN 1 0 1 1 +TIGHT 1 0 1 1 +TIED 2 0 2 2 +TIE 1 0 1 1 +TIDING 1 0 1 1 +TICKET 1 0 1 1 +THY 17 0 17 17 +THWART 1 0 1 1 +THURSTON 2 0 2 2 +THURSDAY 1 0 1 1 +THRUSTING 1 0 1 1 +THRUST 5 0 5 5 +THROW 2 0 2 2 +THROUGHOUT 5 0 5 5 +THROATS 1 0 1 1 +THROAT 1 0 1 1 +THRILLING 1 0 1 1 +THRILLED 1 0 1 1 +THRILL 1 0 1 1 +THRICE 1 0 1 1 +THREW 5 0 5 5 +THREE 41 0 41 41 +THREATS 1 0 1 1 +THREATENS 2 0 2 2 +THREATENING 3 0 3 3 +THREATENED 1 0 1 1 +THRALLS 2 0 2 2 +THRALL'S 1 0 1 1 +THRALL 2 0 2 2 +THOUSANDS 2 0 2 2 +THOUGHTS 13 0 13 13 +THOUGHTLESS 1 0 1 1 +THOUGHTFUL 1 0 1 1 +THOSE 37 0 37 37 +THOROUGH 1 0 1 1 +THOR 1 0 1 1 +THOMAS 1 0 1 1 +THIRTY 12 0 12 12 +THIRTIETH 1 0 1 1 +THIRTEENTH 1 0 1 1 +THIRD 7 0 7 7 +THINKS 3 0 3 3 +THIN 2 0 2 2 +THICKEST 1 0 1 1 +THICK 5 0 5 5 +THEY'VE 1 0 1 1 +THERMOMETER 1 0 1 1 +THEREOF 1 0 1 1 +THEREIN 3 0 3 3 +THEREAFTER 1 0 1 1 +THEREABOUTS 1 0 1 1 +THEORY 5 0 5 5 +THEORIES 1 0 1 1 +THEORETICAL 1 0 1 1 +THEOLOGY 1 0 1 1 +THENCE 1 0 1 1 +THEMSELVES 12 0 12 12 +THEME 1 0 1 1 +THEIRS 2 0 2 2 +THEFT 4 0 4 4 +THEATRICAL 1 0 1 1 +THEATRES 1 0 1 1 +THANKS 3 0 3 3 +THANKFUL 1 0 1 1 +THANKED 1 0 1 1 +TEXTURES 1 0 1 1 +TEXT 2 0 2 2 +TESTIMONY 1 0 1 1 +TESTIMONIES 1 0 1 1 +TESTED 1 0 1 1 +TEST 2 0 2 2 +TERROR 2 0 2 2 +TERRITORY 2 0 2 2 +TERRITORIAL 4 0 4 4 +TERRIFIED 2 0 2 2 +TERRIFIC 1 0 1 1 +TERRIBLY 2 0 2 2 +TERRIBLE 8 0 8 8 +TERRACED 1 0 1 1 +TERMS 9 0 9 9 +TERM 6 0 6 6 +TENTS 2 0 2 2 +TENFOLD 2 0 2 2 +TENDERLY 1 0 1 1 +TENDER 3 0 3 3 +TENDED 1 0 1 1 +TEND 2 0 2 2 +TENANTED 1 0 1 1 +TENABILITY 1 0 1 1 +TEN 14 0 14 14 +TEMPTATION 2 0 2 2 +TEMPORARY 2 0 2 2 +TEMPORAL 2 0 2 2 +TEMPLES 1 0 1 1 +TEMPLE 2 0 2 2 +TEMPEST 2 0 2 2 +TEMPERATURE 1 0 1 1 +TEMPER 3 0 3 3 +TELLS 1 0 1 1 +TELLING 3 0 3 3 +TELL 34 0 34 34 +TELESCOPE 2 0 2 2 +TEETH 1 0 1 1 +TEDIOUS 2 0 2 2 +TECHNOLOGY 1 0 1 1 +TECHNICAL 2 0 2 2 +TEAPOT 1 0 1 1 +TEAL 1 0 1 1 +TEACHING 2 0 2 2 +TEACHERY 1 0 1 1 +TEACHER 3 0 3 3 +TEACH 6 0 6 6 +TAXED 1 0 1 1 +TAWNY 1 0 1 1 +TAUGHT 5 0 5 5 +TASTE 5 0 5 5 +TASKS 1 0 1 1 +TASK 9 0 9 9 +TARTS 3 0 3 3 +TARRY 1 0 1 1 +TAPESTRY 1 0 1 1 +TAPESTRIES 1 0 1 1 +TAPE 1 0 1 1 +TANKARD 1 0 1 1 +TANGLE 1 0 1 1 +TAN 1 0 1 1 +TAMPERING 1 0 1 1 +TAMPERED 1 0 1 1 +TAMES 1 0 1 1 +TAME 1 0 1 1 +TALONS 1 0 1 1 +TALLOW 1 0 1 1 +TALL 9 0 9 9 +TALKS 4 0 4 4 +TALKING 10 0 10 10 +TALKED 1 0 1 1 +TALES 2 0 2 2 +TALENTED 1 0 1 1 +TALENT 5 0 5 5 +TALE 4 0 4 4 +TAKE 34 0 34 34 +TAILORS 1 0 1 1 +TAIL 3 0 3 3 +TAG 1 0 1 1 +TACT 1 0 1 1 +TACK 1 0 1 1 +TABLES 3 0 3 3 +TABBY'S 1 0 1 1 +TABBY 2 0 2 2 +SYSTEM 8 0 8 8 +SYNONYM 1 0 1 1 +SYMPATHY 3 0 3 3 +SYMPATHETIC 2 0 2 2 +SYLLABLE 1 0 1 1 +SWORDS 1 0 1 1 +SWORD 5 0 5 5 +SWOOPED 1 0 1 1 +SWOLLEN 1 0 1 1 +SWIRLING 1 0 1 1 +SWIRL 1 0 1 1 +SWING 1 0 1 1 +SWIMMING 2 0 2 2 +SWIFTNESS 1 0 1 1 +SWIFTLY 4 0 4 4 +SWIFT 1 0 1 1 +SWELLING 1 0 1 1 +SWELL 1 0 1 1 +SWEETS 1 0 1 1 +SWEETNESS 2 0 2 2 +SWEET 6 0 6 6 +SWEEPING 1 0 1 1 +SWARMING 1 0 1 1 +SWAMP 3 0 3 3 +SWAM 1 0 1 1 +SUSPICIOUS 1 0 1 1 +SUSPENDED 1 0 1 1 +SUSPECT 2 0 2 2 +SURVEYOR 1 0 1 1 +SURVEYED 1 0 1 1 +SURROUNDINGS 1 0 1 1 +SURROUNDING 2 0 2 2 +SURROUNDED 2 0 2 2 +SURRENDER 2 0 2 2 +SURPRISED 6 0 6 6 +SURPRISE 4 0 4 4 +SURPASSED 1 0 1 1 +SURMISED 1 0 1 1 +SURGEON 1 0 1 1 +SURGE 1 0 1 1 +SURFACE 8 0 8 8 +SURELY 5 0 5 5 +SURE 16 0 16 16 +SURCHARGED 1 0 1 1 +SUPREME 2 0 2 2 +SUPPRESSING 1 0 1 1 +SUPPRESSED 2 0 2 2 +SUPPOSING 2 0 2 2 +SUPPOSES 1 0 1 1 +SUPPOSED 3 0 3 3 +SUPPOSE 19 0 19 19 +SUPPORTS 1 0 1 1 +SUPPORTING 2 0 2 2 +SUPPORTED 2 0 2 2 +SUPPORT 2 0 2 2 +SUPPLYING 1 0 1 1 +SUPPLY 1 0 1 1 +SUPPLIES 1 0 1 1 +SUPPER 7 0 7 7 +SUPERIORITY 1 0 1 1 +SUPERIOR 8 0 8 8 +SUPERINTENDENCE 1 0 1 1 +SUPERFLUOUS 1 0 1 1 +SUPERFLUITIES 1 0 1 1 +SUNSHINE 3 0 3 3 +SUNSETS 1 0 1 1 +SUNSET 1 0 1 1 +SUNRISE 1 0 1 1 +SUNNY 1 0 1 1 +SUNLIGHT 2 0 2 2 +SUNK 1 0 1 1 +SUNG 2 0 2 2 +SUNDAY 2 0 2 2 +SUNBEAMS 1 0 1 1 +SUN 15 0 15 15 +SUMMONS 2 0 2 2 +SUMMIT 1 0 1 1 +SUMMARY 1 0 1 1 +SULLIED 1 0 1 1 +SULLEN 1 0 1 1 +SUITS 1 0 1 1 +SUITED 1 0 1 1 +SUITABLE 2 0 2 2 +SUGGESTIONS 1 0 1 1 +SUGGESTION 1 0 1 1 +SUGGESTED 3 0 3 3 +SUGGEST 1 0 1 1 +SUGAR 1 0 1 1 +SUFFOCATING 1 0 1 1 +SUFFICIENTLY 1 0 1 1 +SUFFICIENT 3 0 3 3 +SUFFICED 1 0 1 1 +SUFFICE 1 0 1 1 +SUFFERINGS 2 0 2 2 +SUFFERING 2 0 2 2 +SUFFERED 3 0 3 3 +SUFFER 5 0 5 5 +SUDDENLY 15 0 15 15 +SUDDEN 7 0 7 7 +SUCKLING 1 0 1 1 +SUCH 67 0 67 67 +SUCCESSION 3 0 3 3 +SUCCESSFUL 3 0 3 3 +SUCCESS 9 0 9 9 +SUCCEEDED 3 0 3 3 +SUCCEED 1 0 1 1 +SUBURB 1 0 1 1 +SUBTLETIES 1 0 1 1 +SUBSTITUTION 1 0 1 1 +SUBSTITUTED 1 0 1 1 +SUBSTANTIALLY 1 0 1 1 +SUBSTANTIAL 3 0 3 3 +SUBSTANCE 3 0 3 3 +SUBSISTENCE 1 0 1 1 +SUBSIDED 1 0 1 1 +SUBSCRIBE 1 0 1 1 +SUBORDINATION 1 0 1 1 +SUBMITTED 1 0 1 1 +SUBMIT 2 0 2 2 +SUBMISSIVELY 1 0 1 1 +SUBMARINE 3 0 3 3 +SUBJECTS 3 0 3 3 +SUBJECTIVELY 1 0 1 1 +SUBJECTED 1 0 1 1 +SUBJECT 16 0 16 16 +SUBDUING 1 0 1 1 +SUBDUED 2 0 2 2 +STYLE 5 0 5 5 +STUTELEY 4 0 4 4 +STURDY 1 0 1 1 +STUPID 4 0 4 4 +STUPEFIED 2 0 2 2 +STUNG 1 0 1 1 +STUMPED 1 0 1 1 +STUMP 1 0 1 1 +STUFFED 3 0 3 3 +STUFF 1 0 1 1 +STUDYING 2 0 2 2 +STUDIOUS 2 0 2 2 +STUDIES 1 0 1 1 +STUDENTS 3 0 3 3 +STUDENT 2 0 2 2 +STUCCO 1 0 1 1 +STRUGGLES 1 0 1 1 +STRUGGLED 1 0 1 1 +STRUGGLE 6 0 6 6 +STRUCTURE 2 0 2 2 +STRUCK 4 0 4 4 +STROVE 2 0 2 2 +STRONGLY 2 0 2 2 +STRONGHOLD 1 0 1 1 +STRONGEST 2 0 2 2 +STRONGER 1 0 1 1 +STRONG 13 0 13 13 +STROLLERS 1 0 1 1 +STROLLER'S 1 0 1 1 +STROLLER 3 0 3 3 +STROKE 1 0 1 1 +STRIVING 1 0 1 1 +STRIVE 3 0 3 3 +STRIPPING 1 0 1 1 +STRIPPED 1 0 1 1 +STRIPLING 1 0 1 1 +STRIKING 1 0 1 1 +STRIKE 3 0 3 3 +STRIFE 1 0 1 1 +STRICTLY 1 0 1 1 +STRICTEST 1 0 1 1 +STRICT 2 0 2 2 +STRETCHING 1 0 1 1 +STRETCHED 1 0 1 1 +STRETCH 1 0 1 1 +STRENUOUS 1 0 1 1 +STRENGTHENING 1 0 1 1 +STRENGTHENED 2 0 2 2 +STRENGTH 7 0 7 7 +STREETS 1 0 1 1 +STREET 14 0 14 14 +STREAKED 1 0 1 1 +STRAWBERRIES 1 0 1 1 +STRANGERS 2 0 2 2 +STRANGER 1 0 1 1 +STRANGELY 2 0 2 2 +STRANGE 12 0 12 12 +STRAITS 1 0 1 1 +STRAINED 1 0 1 1 +STRAIN 1 0 1 1 +STRAIGHTWAY 2 0 2 2 +STRAIGHT 3 0 3 3 +STORY 25 0 25 25 +STORMY 1 0 1 1 +STORMS 1 0 1 1 +STORM 3 0 3 3 +STORES 1 0 1 1 +STORAGE 1 0 1 1 +STOPPING 2 0 2 2 +STOPPED 6 0 6 6 +STOP 8 0 8 8 +STOOPED 1 0 1 1 +STOOP 1 0 1 1 +STOOL 2 0 2 2 +STOOD 22 0 22 22 +STONE 3 0 3 3 +STOLEN 2 0 2 2 +STOICAL 1 0 1 1 +STOCKINGS 1 0 1 1 +STOCKBROKER 1 0 1 1 +STOCK 2 0 2 2 +STIRS 1 0 1 1 +STIRRED 1 0 1 1 +STIR 1 0 1 1 +STINGY 2 0 2 2 +STING 1 0 1 1 +STIMULANTS 1 0 1 1 +STILLNESS 1 0 1 1 +STILL 55 0 55 55 +STIFLING 1 0 1 1 +STIFFNESS 1 0 1 1 +STIFF 1 0 1 1 +STICKS 1 0 1 1 +STICKING 1 0 1 1 +STICK 1 0 1 1 +STEW 1 0 1 1 +STERNEST 1 0 1 1 +STERN 2 0 2 2 +STEPS 1 0 1 1 +STEPPED 1 0 1 1 +STEPHEN'S 1 0 1 1 +STEPHEN 2 0 2 2 +STEPHANOS 2 0 2 2 +STEM 1 0 1 1 +STEEP 1 0 1 1 +STEEL 1 0 1 1 +STEAMING 1 0 1 1 +STEAMED 1 0 1 1 +STEAMBOAT 1 0 1 1 +STEAM 1 0 1 1 +STEAL 1 0 1 1 +STEADILY 2 0 2 2 +STEAD 1 0 1 1 +STAYS 1 0 1 1 +STAYED 2 0 2 2 +STATUS 1 0 1 1 +STATUARY 1 0 1 1 +STATIONS 2 0 2 2 +STATION 6 0 6 6 +STATESMAN 1 0 1 1 +STATEMENT 3 0 3 3 +STATELY 1 0 1 1 +STATE'S 2 0 2 2 +STARTLING 1 0 1 1 +STARTLED 2 0 2 2 +STARTING 2 0 2 2 +STARTED 9 0 9 9 +STARS 1 0 1 1 +STARLIT 1 0 1 1 +STARING 1 0 1 1 +STARED 1 0 1 1 +STARE 1 0 1 1 +STAR 2 0 2 2 +STANLEY 1 0 1 1 +STANDING 8 0 8 8 +STANDARD 4 0 4 4 +STAMPING 1 0 1 1 +STAMPED 1 0 1 1 +STALKS 1 0 1 1 +STAKES 1 0 1 1 +STAKE 1 0 1 1 +STAIRCASE 1 0 1 1 +STAIR 1 0 1 1 +STAINED 1 0 1 1 +STAIN 1 0 1 1 +STAGES 2 0 2 2 +STAGECRAFT 1 0 1 1 +STAFF 1 0 1 1 +STACKED 1 0 1 1 +STABLE 1 0 1 1 +SQUIRE'S 3 0 3 3 +SQUIRE 8 0 8 8 +SQUEEZE 1 0 1 1 +SQUARES 2 0 2 2 +SQUARE 2 0 2 2 +SQUALOR 1 0 1 1 +SQUALID 1 0 1 1 +SPUR 1 0 1 1 +SPRUNG 2 0 2 2 +SPRINKLING 1 0 1 1 +SPRINKLED 1 0 1 1 +SPRINGY 1 0 1 1 +SPRINGS 3 0 3 3 +SPRINGING 1 0 1 1 +SPREADS 1 0 1 1 +SPREAD 5 0 5 5 +SPOTLESS 1 0 1 1 +SPOT 4 0 4 4 +SPORTING 1 0 1 1 +SPOON 1 0 1 1 +SPOKEN 11 0 11 11 +SPOILS 2 0 2 2 +SPLENDORS 1 0 1 1 +SPLENDIDLY 2 0 2 2 +SPLASHES 1 0 1 1 +SPLASHED 2 0 2 2 +SPITE 2 0 2 2 +SPIRITUAL 4 0 4 4 +SPIRITS 3 0 3 3 +SPIRIT 11 0 11 11 +SPINNING 4 0 4 4 +SPIKES 1 0 1 1 +SPIDER 1 0 1 1 +SPICY 1 0 1 1 +SPERM 1 0 1 1 +SPENT 5 0 5 5 +SPENDING 1 0 1 1 +SPELLED 1 0 1 1 +SPELL 1 0 1 1 +SPEEDS 1 0 1 1 +SPEED 3 0 3 3 +SPEECHLESS 1 0 1 1 +SPEECH 6 0 6 6 +SPED 2 0 2 2 +SPECULATE 1 0 1 1 +SPECTATORS 1 0 1 1 +SPECKS 1 0 1 1 +SPECIOUS 1 0 1 1 +SPECIFICATIONS 1 0 1 1 +SPECIFIC 1 0 1 1 +SPECIES 3 0 3 3 +SPECIALTY 1 0 1 1 +SPECIALLY 2 0 2 2 +SPEAR 1 0 1 1 +SPEAKS 1 0 1 1 +SPEAKING 10 0 10 10 +SPEAK 15 0 15 15 +SPASM 1 0 1 1 +SPARKS 1 0 1 1 +SPARKLING 3 0 3 3 +SPARKLES 2 0 2 2 +SPARKLED 1 0 1 1 +SPARK 1 0 1 1 +SPARE 3 0 3 3 +SPACE 5 0 5 5 +SOUTHERNERS 2 0 2 2 +SOUTHBRIDGE 1 0 1 1 +SOUTH 7 0 7 7 +SOURCE 1 0 1 1 +SOUP 1 0 1 1 +SOUNDING 3 0 3 3 +SOUNDED 2 0 2 2 +SOUND 7 0 7 7 +SOULS 4 0 4 4 +SOUL'S 1 0 1 1 +SOUL 8 0 8 8 +SOUGHT 6 0 6 6 +SORTS 2 0 2 2 +SORT 8 0 8 8 +SORRY 5 0 5 5 +SORROWS 1 0 1 1 +SORROWFULLY 1 0 1 1 +SORROWFUL 1 0 1 1 +SORROW 5 0 5 5 +SORREL 1 0 1 1 +SORE 1 0 1 1 +SORCERESS 1 0 1 1 +SOPHISTRY 1 0 1 1 +SOOTHINGLY 1 0 1 1 +SOOTH 1 0 1 1 +SOON 28 0 28 28 +SONS 3 0 3 3 +SONOROUS 1 0 1 1 +SONG 2 0 2 2 +SOMEWHERE 6 0 6 6 +SOMEWHAT 5 0 5 5 +SOMETIMES 18 0 18 18 +SOMETHING 37 0 37 37 +SOMEHOW 6 0 6 6 +SOMEBODY 3 0 3 3 +SOMBRE 1 0 1 1 +SOLUTION 1 0 1 1 +SOLID 2 0 2 2 +SOLEMNITY 1 0 1 1 +SOLELY 1 0 1 1 +SOLE 3 0 3 3 +SOLDIERS 6 0 6 6 +SOLD 4 0 4 4 +SOIL 2 0 2 2 +SOFTNESS 2 0 2 2 +SOFTLY 4 0 4 4 +SOFTENED 1 0 1 1 +SOFT 7 0 7 7 +SOFAS 1 0 1 1 +SOCRATES 2 0 2 2 +SOCKS 1 0 1 1 +SOCIETY 7 0 7 7 +SOCIETIES 1 0 1 1 +SOCIAL 8 0 8 8 +SOCIABLE 1 0 1 1 +SOBS 1 0 1 1 +SOARED 1 0 1 1 +SOAR 1 0 1 1 +SNUFFED 1 0 1 1 +SNUFF 4 0 4 4 +SNOW 1 0 1 1 +SNORED 1 0 1 1 +SNEER 1 0 1 1 +SNATCHED 1 0 1 1 +SNATCH 1 0 1 1 +SMUGGLING 1 0 1 1 +SMOOTHER 1 0 1 1 +SMOOTH 1 0 1 1 +SMOKING 1 0 1 1 +SMOKE 5 0 5 5 +SMITTEN 2 0 2 2 +SMITH 2 0 2 2 +SMILING 3 0 3 3 +SMILES 3 0 3 3 +SMILE 12 0 12 12 +SMELL 3 0 3 3 +SMARTLY 1 0 1 1 +SMART 1 0 1 1 +SMALLEST 2 0 2 2 +SMALLER 1 0 1 1 +SMALL 20 0 20 20 +SLY 2 0 2 2 +SLUNK 1 0 1 1 +SLUMS 1 0 1 1 +SLUMBERS 1 0 1 1 +SLOWLY 14 0 14 14 +SLOW 4 0 4 4 +SLOPING 1 0 1 1 +SLIPS 1 0 1 1 +SLIPPING 1 0 1 1 +SLIPPED 4 0 4 4 +SLINGS 1 0 1 1 +SLIMY 1 0 1 1 +SLIMLY 1 0 1 1 +SLIGHTLY 4 0 4 4 +SLIGHTEST 1 0 1 1 +SLIGHTER 1 0 1 1 +SLIGHT 4 0 4 4 +SLEPT 2 0 2 2 +SLENDER 2 0 2 2 +SLEEPING 2 0 2 2 +SLEEP 5 0 5 5 +SLEEK 2 0 2 2 +SLAVES 1 0 1 1 +SLAVERY 3 0 3 3 +SLAVE 1 0 1 1 +SLATED 1 0 1 1 +SLAP 1 0 1 1 +SLANDERER 1 0 1 1 +SLAM 1 0 1 1 +SLAKED 1 0 1 1 +SKY 5 0 5 5 +SKIRTS 1 0 1 1 +SKIRT 1 0 1 1 +SKIRMISHES 1 0 1 1 +SKIP 1 0 1 1 +SKINNER 1 0 1 1 +SKINNED 1 0 1 1 +SKIN 3 0 3 3 +SKIMS 1 0 1 1 +SKILL 2 0 2 2 +SKETCHES 1 0 1 1 +SKETCH 1 0 1 1 +SKEPTICISM 1 0 1 1 +SKELETON 1 0 1 1 +SIZZLE 1 0 1 1 +SIXTY 3 0 3 3 +SIXTH 1 0 1 1 +SIXTEENTH 4 0 4 4 +SIXTEEN 1 0 1 1 +SIX 14 0 14 14 +SITUATION 2 0 2 2 +SITE 1 0 1 1 +SISTERS 5 0 5 5 +SISTER'S 1 0 1 1 +SISTER 8 0 8 8 +SIRE 4 0 4 4 +SINS 9 0 9 9 +SINNER 2 0 2 2 +SINKS 1 0 1 1 +SINK 1 0 1 1 +SINGS 1 0 1 1 +SINGLED 1 0 1 1 +SINGLE 5 0 5 5 +SINGING 2 0 2 2 +SINGER'S 1 0 1 1 +SINGER 2 0 2 2 +SING 2 0 2 2 +SINFUL 2 0 2 2 +SIMPLY 10 0 10 10 +SIMPLIFIED 1 0 1 1 +SIMPLICITY 2 0 2 2 +SIMPLE 9 0 9 9 +SIMON 1 0 1 1 +SIMILITUDE 1 0 1 1 +SIMILARLY 1 0 1 1 +SIMILAR 3 0 3 3 +SILVERING 1 0 1 1 +SILVER 8 0 8 8 +SILLINESS 2 0 2 2 +SILKEN 2 0 2 2 +SILK 6 0 6 6 +SILHOUETTE 1 0 1 1 +SILENCES 1 0 1 1 +SILAS 1 0 1 1 +SIGNS 4 0 4 4 +SIGNING 1 0 1 1 +SIGNIFICANTLY 1 0 1 1 +SIGNIFICANCE 3 0 3 3 +SIGNED 1 0 1 1 +SIGNATURE 1 0 1 1 +SIGN 5 0 5 5 +SIGH 3 0 3 3 +SIDEWAYS 1 0 1 1 +SIDES 6 0 6 6 +SICKNESS 2 0 2 2 +SHY 1 0 1 1 +SHUTTING 2 0 2 2 +SHUTTERS 1 0 1 1 +SHUT 3 0 3 3 +SHUNNING 1 0 1 1 +SHUDDER 2 0 2 2 +SHRUGGED 1 0 1 1 +SHRUBBERY 1 0 1 1 +SHRIVELLED 1 0 1 1 +SHRINE 1 0 1 1 +SHRILL 1 0 1 1 +SHRIEKED 1 0 1 1 +SHREWISH 1 0 1 1 +SHREWDLY 1 0 1 1 +SHREWD 1 0 1 1 +SHOWN 1 0 1 1 +SHOWERED 1 0 1 1 +SHOWER 1 0 1 1 +SHOW 10 0 10 10 +SHOUTINGS 1 0 1 1 +SHOUTING 1 0 1 1 +SHOUTED 3 0 3 3 +SHOUT 3 0 3 3 +SHOULDN'T 1 0 1 1 +SHOULDERS 5 0 5 5 +SHOULDER 5 0 5 5 +SHORTLY 1 0 1 1 +SHORT 11 0 11 11 +SHORES 1 0 1 1 +SHORE 4 0 4 4 +SHOPS 1 0 1 1 +SHOP 2 0 2 2 +SHOOTING 1 0 1 1 +SHOOT 1 0 1 1 +SHOOK 10 0 10 10 +SHONE 7 0 7 7 +SHOES 3 0 3 3 +SHOCK 4 0 4 4 +SHOAL 1 0 1 1 +SHIVERING 1 0 1 1 +SHIVER 2 0 2 2 +SHIRTS 1 0 1 1 +SHIRK 1 0 1 1 +SHIPS 2 0 2 2 +SHIPPING 1 0 1 1 +SHINING 4 0 4 4 +SHINES 2 0 2 2 +SHINE 2 0 2 2 +SHIFTED 1 0 1 1 +SHIELD 1 0 1 1 +SHERWOOD 1 0 1 1 +SHEPHERD 2 0 2 2 +SHELVES 1 0 1 1 +SHELTER 1 0 1 1 +SHELLEY'S 1 0 1 1 +SHELL 1 0 1 1 +SHEETING 1 0 1 1 +SHEET 4 0 4 4 +SHEEP 1 0 1 1 +SHEDDING 2 0 2 2 +SHED 1 0 1 1 +SHEAF 1 0 1 1 +SHAWL 1 0 1 1 +SHAVINGS 1 0 1 1 +SHAVEN 1 0 1 1 +SHARPLY 2 0 2 2 +SHARPENED 1 0 1 1 +SHARP 8 0 8 8 +SHARING 1 0 1 1 +SHARED 1 0 1 1 +SHARE 1 0 1 1 +SHAPEN 1 0 1 1 +SHAPELY 1 0 1 1 +SHAPED 1 0 1 1 +SHAPE 1 0 1 1 +SHAMES 1 0 1 1 +SHAME 3 0 3 3 +SHAM 1 0 1 1 +SHALLOWS 1 0 1 1 +SHALLOW 2 0 2 2 +SHAKING 2 0 2 2 +SHAKEN 1 0 1 1 +SHAGGY 2 0 2 2 +SHAFT 1 0 1 1 +SHADY 2 0 2 2 +SHADOWS 7 0 7 7 +SHADOW 2 0 2 2 +SHADES 1 0 1 1 +SHADE 4 0 4 4 +SHACKLETON 1 0 1 1 +SHABATA 1 0 1 1 +SEXTANT 1 0 1 1 +SEWING 1 0 1 1 +SEVERITY 4 0 4 4 +SEVERITIES 1 0 1 1 +SEVERED 2 0 2 2 +SEVERE 1 0 1 1 +SEVERAL 9 0 9 9 +SEVENTY 2 0 2 2 +SEVENTH 1 0 1 1 +SEVENTEEN 2 0 2 2 +SEVEN 6 0 6 6 +SETTLERS 1 0 1 1 +SETTLER 1 0 1 1 +SETTLEMENTS 1 0 1 1 +SETTLEMENT 3 0 3 3 +SETH 1 0 1 1 +SESSION 1 0 1 1 +SERVITUDE 1 0 1 1 +SERVILE 1 0 1 1 +SERVICEABILITY 1 0 1 1 +SERVICE 12 0 12 12 +SERVADAC'S 1 0 1 1 +SERVADAC 7 0 7 7 +SERIOUSLY 4 0 4 4 +SERIOUS 3 0 3 3 +SERIES 3 0 3 3 +SERENE 1 0 1 1 +SERAPHIC 1 0 1 1 +SEQUEL 1 0 1 1 +SEPARATION 1 0 1 1 +SEPARATED 3 0 3 3 +SENTIMENTS 1 0 1 1 +SENTIMENTAL 1 0 1 1 +SENTIMENT 1 0 1 1 +SENTENTIOUSLY 1 0 1 1 +SENSITIVE 1 0 1 1 +SENSIBLE 1 0 1 1 +SENSES 2 0 2 2 +SENSELESS 1 0 1 1 +SENSATIONS 1 0 1 1 +SENSATIONAL 1 0 1 1 +SENSATION 2 0 2 2 +SENORA 1 0 1 1 +SENOR 1 0 1 1 +SENIOR 1 0 1 1 +SEND 3 0 3 3 +SENATOR 1 0 1 1 +SELLING 2 0 2 2 +SELECTION 2 0 2 2 +SELECTED 1 0 1 1 +SELDOM 3 0 3 3 +SEIZING 2 0 2 2 +SEIZED 3 0 3 3 +SEIZE 1 0 1 1 +SEGMENT 1 0 1 1 +SEES 1 0 1 1 +SEEN 16 0 16 16 +SEEMINGLY 3 0 3 3 +SEEKERS 1 0 1 1 +SEEK 4 0 4 4 +SECURITY 2 0 2 2 +SECURING 1 0 1 1 +SECURED 3 0 3 3 +SECURE 4 0 4 4 +SECRETLY 3 0 3 3 +SECRET 3 0 3 3 +SECRECY 1 0 1 1 +SECONDS 2 0 2 2 +SECONDLY 1 0 1 1 +SECONDED 1 0 1 1 +SECONDARY 1 0 1 1 +SECOND 10 0 10 10 +SECLUSION 1 0 1 1 +SECESSIONISTS 1 0 1 1 +SEATED 3 0 3 3 +SEASONS 2 0 2 2 +SEASONABLE 1 0 1 1 +SEASON 3 0 3 3 +SEAS 2 0 2 2 +SEARCHING 1 0 1 1 +SEARCHED 2 0 2 2 +SEARCH 4 0 4 4 +SCYTHE 2 0 2 2 +SCURRIED 1 0 1 1 +SCUMMED 1 0 1 1 +SCULPTURE 1 0 1 1 +SCRUTINY 1 0 1 1 +SCRUTINIZE 1 0 1 1 +SCRUPLES 1 0 1 1 +SCRUB 1 0 1 1 +SCRIPTURES 1 0 1 1 +SCRIBE 1 0 1 1 +SCRIBBLING 1 0 1 1 +SCRIBBLER 1 0 1 1 +SCREEN 1 0 1 1 +SCREAMED 3 0 3 3 +SCRAMBLED 1 0 1 1 +SCOWLED 1 0 1 1 +SCOTTISH 2 0 2 2 +SCOTS 1 0 1 1 +SCOTLAND 1 0 1 1 +SCORPION 1 0 1 1 +SCORNFUL 2 0 2 2 +SCORE 2 0 2 2 +SCORCHED 1 0 1 1 +SCOPE 1 0 1 1 +SCOLD 1 0 1 1 +SCIENTISTS 2 0 2 2 +SCIENTIST 1 0 1 1 +SCIENTIFIC 1 0 1 1 +SCIENCE 2 0 2 2 +SCHOONER 2 0 2 2 +SCHOOLED 1 0 1 1 +SCHOLARSHIP 1 0 1 1 +SCHOLAR 1 0 1 1 +SCHISM 1 0 1 1 +SCHEME 5 0 5 5 +SCEPTICISM 1 0 1 1 +SCENT 1 0 1 1 +SCENES 1 0 1 1 +SCATTERS 1 0 1 1 +SCATTERED 3 0 3 3 +SCATTER 1 0 1 1 +SCARLET 3 0 3 3 +SCARE 1 0 1 1 +SCARCELY 9 0 9 9 +SCARCE 1 0 1 1 +SCALP 1 0 1 1 +SCALE 2 0 2 2 +SAY 51 0 51 51 +SAWDUST 1 0 1 1 +SAW 23 0 23 23 +SAVIOUR 1 0 1 1 +SAVINGS 1 0 1 1 +SAVING 1 0 1 1 +SAVES 1 0 1 1 +SAVE 9 0 9 9 +SAVAGE 5 0 5 5 +SAUNTERED 1 0 1 1 +SAUCE 1 0 1 1 +SATURDAY 5 0 5 5 +SATISFY 1 0 1 1 +SATISFIED 8 0 8 8 +SATISFACTION 5 0 5 5 +SATANIC 1 0 1 1 +SARCASTIC 1 0 1 1 +SARAH'S 1 0 1 1 +SANK 1 0 1 1 +SANGUINARY 1 0 1 1 +SANDY 2 0 2 2 +SANDWICHES 1 0 1 1 +SANDFORD 1 0 1 1 +SANCTIFYING 1 0 1 1 +SANCTIFIED 1 0 1 1 +SAME 35 0 35 35 +SALVATION 1 0 1 1 +SALUTE 1 0 1 1 +SALUTATION 1 0 1 1 +SALON 1 0 1 1 +SAKE 4 0 4 4 +SAINT 14 0 14 14 +SAILS 2 0 2 2 +SAILORS 1 0 1 1 +SAILORMAN 1 0 1 1 +SAILOR 1 0 1 1 +SAFETY 2 0 2 2 +SAFEST 1 0 1 1 +SAFE 5 0 5 5 +SADLY 2 0 2 2 +SADDLER 1 0 1 1 +SADDLE 1 0 1 1 +SAD 3 0 3 3 +SACRIFICE 2 0 2 2 +SACRED 2 0 2 2 +SACRAMENT 1 0 1 1 +S 1 0 1 1 +RUTH 10 0 10 10 +RUSTY 1 0 1 1 +RUSTLING 2 0 2 2 +RUSTLED 1 0 1 1 +RUSHING 1 0 1 1 +RUSHED 7 0 7 7 +RUSH 4 0 4 4 +RUNS 2 0 2 2 +RUNNING 8 0 8 8 +RUNG 1 0 1 1 +RUN 9 0 9 9 +RUMMAGED 1 0 1 1 +RUMINATED 1 0 1 1 +RULER 1 0 1 1 +RULE 2 0 2 2 +RUINS 1 0 1 1 +RUINED 1 0 1 1 +RUIN 2 0 2 2 +RUFFLED 1 0 1 1 +RUFFIANS 1 0 1 1 +RUFFIAN 2 0 2 2 +RUDELY 1 0 1 1 +RUDE 2 0 2 2 +RUBY 1 0 1 1 +RUBBING 1 0 1 1 +RUBBED 1 0 1 1 +ROYALISTS 2 0 2 2 +ROYAL 9 0 9 9 +ROWS 2 0 2 2 +ROW 5 0 5 5 +ROVING 1 0 1 1 +ROUTINE 1 0 1 1 +ROUTE 2 0 2 2 +ROUT 1 0 1 1 +ROUSES 1 0 1 1 +ROUSED 1 0 1 1 +ROUSE 1 0 1 1 +ROUNDED 1 0 1 1 +ROUGHLY 7 0 7 7 +ROUGHEST 1 0 1 1 +ROUGH 3 0 3 3 +ROSES 2 0 2 2 +ROSE 14 0 14 14 +ROSALIE 4 0 4 4 +ROOTS 1 0 1 1 +ROOMS 3 0 3 3 +ROOFS 1 0 1 1 +ROMANCE 1 0 1 1 +ROMAN 1 0 1 1 +ROLLERS 2 0 2 2 +ROLL 1 0 1 1 +ROGERS'S 1 0 1 1 +ROGERS 2 0 2 2 +ROD 1 0 1 1 +ROCKY 1 0 1 1 +ROCKS 2 0 2 2 +ROCKING 2 0 2 2 +ROBUST 1 0 1 1 +ROBINSON 1 0 1 1 +ROBIN 19 0 19 19 +ROBERT 2 0 2 2 +ROBBING 1 0 1 1 +ROBBER 1 0 1 1 +ROARINGS 1 0 1 1 +ROARED 1 0 1 1 +ROADS 1 0 1 1 +ROAD 4 0 4 4 +RIVULET 2 0 2 2 +RIVER 6 0 6 6 +RIVAL 2 0 2 2 +RISK 2 0 2 2 +RISING 3 0 3 3 +RISEN 1 0 1 1 +RISE 1 0 1 1 +RIPPLING 1 0 1 1 +RIOTING 2 0 2 2 +RIOT 1 0 1 1 +RINGS 1 0 1 1 +RING 3 0 3 3 +RIGOROUSLY 1 0 1 1 +RIGOROUS 1 0 1 1 +RIGIDLY 1 0 1 1 +RIGIDITY 2 0 2 2 +RIGID 1 0 1 1 +RIGHTS 1 0 1 1 +RIGHTLY 1 0 1 1 +RIGHTEOUSNESS 1 0 1 1 +RIGHTEOUS 1 0 1 1 +RIGHT 25 0 25 25 +RIGGING 1 0 1 1 +RIFLES 1 0 1 1 +RIDGE 1 0 1 1 +RIDES 1 0 1 1 +RIDE 1 0 1 1 +RIDDLE 1 0 1 1 +RICHLY 1 0 1 1 +RICHEST 1 0 1 1 +RICHER 1 0 1 1 +RICH 11 0 11 11 +RHYTHM 1 0 1 1 +RHONE 1 0 1 1 +REYNOLDS 2 0 2 2 +REWARDED 2 0 2 2 +REWARD 3 0 3 3 +REVOLVING 1 0 1 1 +REVIVES 1 0 1 1 +REVERT 1 0 1 1 +REVERSED 1 0 1 1 +REVERSAL 1 0 1 1 +REVERIE 2 0 2 2 +REVEREND 1 0 1 1 +REVERENCE 1 0 1 1 +REVENGE 3 0 3 3 +REVELATION 1 0 1 1 +REVEAL 1 0 1 1 +RETURNING 1 0 1 1 +RETRIEVE 1 0 1 1 +RETREATED 1 0 1 1 +RETREAT 3 0 3 3 +RETRACE 1 0 1 1 +RETIREMENT 2 0 2 2 +RETIRED 2 0 2 2 +RETIRE 1 0 1 1 +RETAINERS 1 0 1 1 +RETAINER 1 0 1 1 +RETAINED 3 0 3 3 +RETAIN 1 0 1 1 +RESURRECTION 3 0 3 3 +RESUMED 2 0 2 2 +RESULTS 4 0 4 4 +RESULTED 3 0 3 3 +RESULT 5 0 5 5 +RESTRAINED 1 0 1 1 +RESTORING 1 0 1 1 +RESTORED 2 0 2 2 +RESTLESS 4 0 4 4 +RESTED 2 0 2 2 +RESPONSIBLE 2 0 2 2 +RESPONSES 1 0 1 1 +RESPONDED 3 0 3 3 +RESPECTS 1 0 1 1 +RESPECTING 1 0 1 1 +RESPECT 3 0 3 3 +RESOURCES 2 0 2 2 +RESOUNDING 1 0 1 1 +RESORT 2 0 2 2 +RESOLVED 3 0 3 3 +RESOLVE 2 0 2 2 +RESOLUTIONS 1 0 1 1 +RESOLUTION 1 0 1 1 +RESOLUTE 1 0 1 1 +RESISTANCE 1 0 1 1 +RESIST 1 0 1 1 +RESIGNATION 2 0 2 2 +RESIDENCES 1 0 1 1 +RESIDENCE 3 0 3 3 +RESIDE 1 0 1 1 +RESERVOIR 1 0 1 1 +RESERVED 1 0 1 1 +RESERVE 2 0 2 2 +RESENTFUL 1 0 1 1 +RESENTED 1 0 1 1 +RESEMBLING 2 0 2 2 +RESEMBLED 1 0 1 1 +RESEMBLE 1 0 1 1 +RESEMBLANCE 2 0 2 2 +RESCUE 2 0 2 2 +REQUISITION 1 0 1 1 +REQUIRING 1 0 1 1 +REQUIREMENTS 1 0 1 1 +REQUIRED 3 0 3 3 +REQUIRE 4 0 4 4 +REQUEST 1 0 1 1 +REPUTE 2 0 2 2 +REPUTATION 1 0 1 1 +REPUGNANT 1 0 1 1 +REPUBLISH 1 0 1 1 +REPUBLICAN 1 0 1 1 +REPUBLIC 5 0 5 5 +REPTILES 2 0 2 2 +REPROOF 1 0 1 1 +REPRODUCE 1 0 1 1 +REPROACHING 1 0 1 1 +REPROACHFULLY 1 0 1 1 +REPROACH 2 0 2 2 +REPRESS 2 0 2 2 +REPRESENTS 1 0 1 1 +REPRESENTING 1 0 1 1 +REPRESENTATIVE 1 0 1 1 +REPRESENTATION 1 0 1 1 +REPRESENT 2 0 2 2 +REPOSE 2 0 2 2 +REPORT 2 0 2 2 +REPLY 7 0 7 7 +REPLIED 20 0 20 20 +REPLACES 1 0 1 1 +REPLACE 1 0 1 1 +REPETITION 1 0 1 1 +REPENTING 1 0 1 1 +REPENTANCE 1 0 1 1 +REPENT 1 0 1 1 +REPELLENT 1 0 1 1 +REPELLED 1 0 1 1 +REPEATED 1 0 1 1 +REPAST 1 0 1 1 +REPARTEES 1 0 1 1 +REPAIRS 1 0 1 1 +REPAIRED 1 0 1 1 +RENTED 1 0 1 1 +RENTAL 1 0 1 1 +RENT 1 0 1 1 +RENEWED 2 0 2 2 +RENEWABLE 1 0 1 1 +RENDING 1 0 1 1 +RENDERING 2 0 2 2 +RENDERED 2 0 2 2 +RENDER 3 0 3 3 +REMOTENESS 1 0 1 1 +REMOTE 2 0 2 2 +REMONSTRANCE 1 0 1 1 +REMNANT 2 0 2 2 +REMISSION 2 0 2 2 +REMIND 1 0 1 1 +REMEMBRANCE 2 0 2 2 +REMEMBERS 1 0 1 1 +REMEMBERING 3 0 3 3 +REMEDY 1 0 1 1 +REMARKS 1 0 1 1 +REMARKING 1 0 1 1 +REMARKABLE 2 0 2 2 +REMAINS 3 0 3 3 +RELY 1 0 1 1 +RELUCTANTLY 1 0 1 1 +RELINQUISHED 1 0 1 1 +RELIGIOUS 1 0 1 1 +RELIGION 4 0 4 4 +RELIEVE 2 0 2 2 +RELIEF 1 0 1 1 +RELIANCE 1 0 1 1 +RELIABLE 1 0 1 1 +RELATIVES 1 0 1 1 +RELATIVE 1 0 1 1 +RELATIONSHIP 1 0 1 1 +RELATIONS 2 0 2 2 +RELATION 4 0 4 4 +RELATED 3 0 3 3 +RELATE 2 0 2 2 +RELAPSES 1 0 1 1 +REJOICING 2 0 2 2 +REJOICED 1 0 1 1 +REJOICE 5 0 5 5 +REINS 1 0 1 1 +REIGNS 1 0 1 1 +REIGN 1 0 1 1 +REGULATOR 1 0 1 1 +REGULATIONS 1 0 1 1 +REGULATION 1 0 1 1 +REGULATED 1 0 1 1 +REGULARLY 1 0 1 1 +REGULARITY 1 0 1 1 +REGULAR 1 0 1 1 +REGRET 1 0 1 1 +REGISTRATION 1 0 1 1 +REGISTERS 1 0 1 1 +REGISTERED 1 0 1 1 +REGIONS 1 0 1 1 +REGION 1 0 1 1 +REGIMENTS 1 0 1 1 +REGIMENT 1 0 1 1 +REGARDS 2 0 2 2 +REGARDLESS 1 0 1 1 +REGARDED 2 0 2 2 +REGARD 5 0 5 5 +REGAINING 1 0 1 1 +REFUSING 2 0 2 2 +REFUSAL 1 0 1 1 +REFUGEES 1 0 1 1 +REFUGE 1 0 1 1 +REFRESHING 1 0 1 1 +REFRESH 2 0 2 2 +REFRAINED 1 0 1 1 +REFORMS 1 0 1 1 +REFORM 1 0 1 1 +REFLECTIONS 1 0 1 1 +REFLECTION 4 0 4 4 +REFLECTED 3 0 3 3 +REFLECT 1 0 1 1 +REFINEMENTS 1 0 1 1 +REFINEMENT 1 0 1 1 +REFINED 1 0 1 1 +REFERRING 2 0 2 2 +REFER 2 0 2 2 +REED 1 0 1 1 +REDUCED 1 0 1 1 +REDOUBLES 1 0 1 1 +REDOUBLED 1 0 1 1 +REDMAN'S 1 0 1 1 +REDEEMER 1 0 1 1 +RECUR 1 0 1 1 +RECTOR 2 0 2 2 +RECREATION 2 0 2 2 +RECOVERY 1 0 1 1 +RECOVERING 1 0 1 1 +RECOVERED 1 0 1 1 +RECOVER 1 0 1 1 +RECONCILIATION 1 0 1 1 +RECOLLECTIONS 1 0 1 1 +RECOILED 2 0 2 2 +RECOGNIZE 4 0 4 4 +RECOGNITION 9 0 9 9 +RECLINING 1 0 1 1 +RECKONING 2 0 2 2 +RECKONED 1 0 1 1 +RECKON 1 0 1 1 +RECKLESS 1 0 1 1 +RECITED 5 0 5 5 +RECITE 2 0 2 2 +RECESSES 1 0 1 1 +RECEPTION 4 0 4 4 +RECENTLY 1 0 1 1 +RECENT 2 0 2 2 +RECEIVING 2 0 2 2 +RECEIVES 1 0 1 1 +RECEIVER 1 0 1 1 +RECEIVED 9 0 9 9 +RECEIVE 3 0 3 3 +RECAPTURED 1 0 1 1 +RECALLING 1 0 1 1 +RECALLED 4 0 4 4 +RECALL 1 0 1 1 +REBUKES 1 0 1 1 +REBUKE 1 0 1 1 +REBELLION 2 0 2 2 +REBEL 1 0 1 1 +REBATE 1 0 1 1 +REASSURED 1 0 1 1 +REASONS 1 0 1 1 +REASONING 1 0 1 1 +REASON 19 0 19 19 +REAR 1 0 1 1 +REALLY 10 0 10 10 +REALIZED 2 0 2 2 +REALITY 8 0 8 8 +REAL 16 0 16 16 +READING 4 0 4 4 +READINESS 1 0 1 1 +READILY 2 0 2 2 +REACHING 2 0 2 2 +REACHED 12 0 12 12 +REACH 3 0 3 3 +RAVISHING 1 0 1 1 +RAVING 1 0 1 1 +RAVINES 1 0 1 1 +RATS 1 0 1 1 +RATIFY 1 0 1 1 +RATIFICATION 1 0 1 1 +RATHER 23 0 23 23 +RATED 2 0 2 2 +RATE 7 0 7 7 +RASH 1 0 1 1 +RASCAL 1 0 1 1 +RARELY 1 0 1 1 +RARE 5 0 5 5 +RAPTUROUS 1 0 1 1 +RAPTURES 1 0 1 1 +RAPIDS 3 0 3 3 +RAPIDLY 3 0 3 3 +RAPIDITY 2 0 2 2 +RAPID 3 0 3 3 +RAPHAEL 1 0 1 1 +RAOUL 3 0 3 3 +RANSOM 1 0 1 1 +RANKING 1 0 1 1 +RANKED 1 0 1 1 +RANK 5 0 5 5 +RANGERS 1 0 1 1 +RANGED 1 0 1 1 +RANGE 4 0 4 4 +RANG 4 0 4 4 +RAN 12 0 12 12 +RAMPART 1 0 1 1 +RALPH 2 0 2 2 +RAISED 6 0 6 6 +RAISE 1 0 1 1 +RAINS 1 0 1 1 +RAINDROPS 1 0 1 1 +RAINBOW 1 0 1 1 +RAIN 3 0 3 3 +RAILROADS 1 0 1 1 +RAGS 2 0 2 2 +RAGGED 1 0 1 1 +RAGE 3 0 3 3 +RAFT 7 0 7 7 +RADIE 2 0 2 2 +RADICALS 1 0 1 1 +RADICALISM 1 0 1 1 +RADIANCE 1 0 1 1 +RACKED 1 0 1 1 +RACK 1 0 1 1 +RACHEL'S 1 0 1 1 +RACHEL 16 0 16 16 +RACES 6 0 6 6 +RACE 1 0 1 1 +RABBLE 1 0 1 1 +RABBIT 2 0 2 2 +QUOTE 2 0 2 2 +QUIVERING 2 0 2 2 +QUIVERED 1 0 1 1 +QUITTING 1 0 1 1 +QUITTED 1 0 1 1 +QUINSON 1 0 1 1 +QUILT 3 0 3 3 +QUIETLY 5 0 5 5 +QUIET 4 0 4 4 +QUICKLY 5 0 5 5 +QUICK 6 0 6 6 +QUESTIONS 4 0 4 4 +QUESTIONING 1 0 1 1 +QUESTIONED 2 0 2 2 +QUESTION 12 0 12 12 +QUEST 1 0 1 1 +QUERIED 1 0 1 1 +QUEER 3 0 3 3 +QUEENSTOWN 1 0 1 1 +QUEENS 1 0 1 1 +QUEEN'S 1 0 1 1 +QUEEN 8 0 8 8 +QUASH 1 0 1 1 +QUARTERS 3 0 3 3 +QUARTER 7 0 7 7 +QUARRY 2 0 2 2 +QUARREL 1 0 1 1 +QUANTITY 4 0 4 4 +QUANTITIES 1 0 1 1 +QUALITY 1 0 1 1 +QUALITIES 3 0 3 3 +QUALIFICATIONS 2 0 2 2 +QUALIFICATION 1 0 1 1 +QUAKE 1 0 1 1 +QUAINT 1 0 1 1 +QUADRILLE 2 0 2 2 +QUADRANGLE 1 0 1 1 +PYTHON 1 0 1 1 +PYRAMIDS 2 0 2 2 +PUZZLED 1 0 1 1 +PUSHED 2 0 2 2 +PUSH 2 0 2 2 +PURSUITS 3 0 3 3 +PURSUIT 2 0 2 2 +PURSUER 1 0 1 1 +PURSUED 1 0 1 1 +PURSUE 1 0 1 1 +PURRING 1 0 1 1 +PURPOSES 2 0 2 2 +PURPLE 1 0 1 1 +PURITY 2 0 2 2 +PURELY 1 0 1 1 +PURCHASED 1 0 1 1 +PUPIL 1 0 1 1 +PUNISHMENT 4 0 4 4 +PUNCH 1 0 1 1 +PUMP 1 0 1 1 +PULPIT 1 0 1 1 +PULLING 2 0 2 2 +PULLED 2 0 2 2 +PULL 1 0 1 1 +PUFFY 1 0 1 1 +PUBLISH 1 0 1 1 +PUBLIC 13 0 13 13 +PSYCHOLOGY 1 0 1 1 +PSYCHE 2 0 2 2 +PRYNNE 4 0 4 4 +PRUDENT 2 0 2 2 +PROWESS 1 0 1 1 +PROW 1 0 1 1 +PROVOCATION 1 0 1 1 +PROVISIONALLY 1 0 1 1 +PROVISION 2 0 2 2 +PROVINCES 2 0 2 2 +PROVINCE 2 0 2 2 +PROVIDED 3 0 3 3 +PROVEN 1 0 1 1 +PROVE 4 0 4 4 +PROUDLY 1 0 1 1 +PROUD 5 0 5 5 +PROTESTED 1 0 1 1 +PROTEST 1 0 1 1 +PROTECTOR 1 0 1 1 +PROTECTION 3 0 3 3 +PROTECTING 1 0 1 1 +PROTECTED 1 0 1 1 +PROTECT 4 0 4 4 +PROSTRATION 2 0 2 2 +PROSPECTS 1 0 1 1 +PROSELYTES 1 0 1 1 +PROSECUTION 1 0 1 1 +PROSECUTE 1 0 1 1 +PROSE 1 0 1 1 +PROSCRIPTION 1 0 1 1 +PROSCRIBED 1 0 1 1 +PROPRIETY 1 0 1 1 +PROPRIETORS 1 0 1 1 +PROPRIETOR 1 0 1 1 +PROPOSED 2 0 2 2 +PROPOSE 1 0 1 1 +PROPOSALS 1 0 1 1 +PROPORTIONS 3 0 3 3 +PROPHETS 1 0 1 1 +PROPHET 1 0 1 1 +PROPERTY 6 0 6 6 +PROPERTIES 1 0 1 1 +PROPERLY 4 0 4 4 +PROPER 3 0 3 3 +PROPENSITIES 1 0 1 1 +PROOFS 4 0 4 4 +PROOF 3 0 3 3 +PRONUNCIATION 1 0 1 1 +PRONOUNCED 4 0 4 4 +PROMPTLY 1 0 1 1 +PROMPT 1 0 1 1 +PROMOTING 1 0 1 1 +PROMOTED 1 0 1 1 +PROMISES 2 0 2 2 +PROMISED 4 0 4 4 +PROMISE 4 0 4 4 +PROMINENT 2 0 2 2 +PROLIFIC 1 0 1 1 +PROJECTION 1 0 1 1 +PROJECT 2 0 2 2 +PROGRESSING 1 0 1 1 +PROGRESS 6 0 6 6 +PROGRAMME 1 0 1 1 +PROFOUND 2 0 2 2 +PROFITABLE 1 0 1 1 +PROFESSOR 6 0 6 6 +PROFESSIONS 1 0 1 1 +PROFESSION 1 0 1 1 +PROFESSING 1 0 1 1 +PROFESSED 1 0 1 1 +PRODUCTIVE 1 0 1 1 +PRODUCTION 1 0 1 1 +PRODUCT 1 0 1 1 +PRODUCING 2 0 2 2 +PRODUCED 5 0 5 5 +PRODUCE 3 0 3 3 +PROCOPE 2 0 2 2 +PROCESSION 1 0 1 1 +PROCESSES 1 0 1 1 +PROCESS 3 0 3 3 +PROCEEDINGS 2 0 2 2 +PROCEEDING 2 0 2 2 +PROCEEDED 2 0 2 2 +PROBLEM 4 0 4 4 +PROBING 1 0 1 1 +PROBABLY 10 0 10 10 +PROBABLE 2 0 2 2 +PRO 2 0 2 2 +PRIZE 1 0 1 1 +PRIVILEGE 3 0 3 3 +PRIVATION 1 0 1 1 +PRIVATE 11 0 11 11 +PRIVACY 1 0 1 1 +PRISTINE 1 0 1 1 +PRISONER 4 0 4 4 +PRISON 4 0 4 4 +PRIOR 1 0 1 1 +PRINTING 1 0 1 1 +PRINTER 2 0 2 2 +PRINCIPLES 2 0 2 2 +PRINCESSES 2 0 2 2 +PRINCESS 9 0 9 9 +PRINCES 2 0 2 2 +PRINCE 3 0 3 3 +PRIMLY 1 0 1 1 +PRIMITIVE 2 0 2 2 +PRIMATE 1 0 1 1 +PRIMARY 1 0 1 1 +PRIMARILY 1 0 1 1 +PRIESTHOOD 1 0 1 1 +PRIEST 1 0 1 1 +PRIDE 9 0 9 9 +PRICE 2 0 2 2 +PREY 2 0 2 2 +PREVIOUSLY 2 0 2 2 +PREVIOUS 2 0 2 2 +PREVENTS 1 0 1 1 +PREVENTING 1 0 1 1 +PREVAILING 1 0 1 1 +PREVAILED 3 0 3 3 +PREVAIL 1 0 1 1 +PRETTY 10 0 10 10 +PRETTIEST 1 0 1 1 +PRETENSION 1 0 1 1 +PRETENDED 1 0 1 1 +PRESUMED 1 0 1 1 +PRESUMABLY 1 0 1 1 +PRESSURE 4 0 4 4 +PRESSING 1 0 1 1 +PRESSED 2 0 2 2 +PRESS 2 0 2 2 +PRESIDENT 5 0 5 5 +PRESIDED 1 0 1 1 +PRESERVED 1 0 1 1 +PRESERVE 1 0 1 1 +PRESENTS 3 0 3 3 +PRESENTLY 7 0 7 7 +PRESENTING 2 0 2 2 +PRESENTED 1 0 1 1 +PRESENCE 6 0 6 6 +PRESCRIBED 1 0 1 1 +PREROGATIVES 1 0 1 1 +PREPARING 1 0 1 1 +PREPARED 3 0 3 3 +PREPARE 1 0 1 1 +PREOCCUPIED 1 0 1 1 +PREOCCUPATION 1 0 1 1 +PREMISES 1 0 1 1 +PRELIMINARY 1 0 1 1 +PREFERENCE 1 0 1 1 +PREDOMINATE 1 0 1 1 +PREDOMINANCE 1 0 1 1 +PREDICTED 1 0 1 1 +PREDATORY 2 0 2 2 +PRECONCEIVED 1 0 1 1 +PRECISION 1 0 1 1 +PRECISELY 2 0 2 2 +PRECIPITATED 1 0 1 1 +PRECIPITATE 1 0 1 1 +PRECIOUS 3 0 3 3 +PRECAUTION 1 0 1 1 +PREACHER 1 0 1 1 +PREACH 3 0 3 3 +PRAYERS 1 0 1 1 +PRAYED 1 0 1 1 +PRAY 7 0 7 7 +PRATTLED 1 0 1 1 +PRAISES 2 0 2 2 +PRAISED 1 0 1 1 +PRAISE 3 0 3 3 +PRAIRIES 2 0 2 2 +PRAIRIE 2 0 2 2 +PRACTISED 1 0 1 1 +PRACTICALLY 4 0 4 4 +PRACTICAL 6 0 6 6 +POYSER 9 0 9 9 +POWERS 3 0 3 3 +POWERLESS 1 0 1 1 +POWERFUL 2 0 2 2 +POWER 22 0 22 22 +POWDERY 1 0 1 1 +POWDER 8 0 8 8 +POVERTY 2 0 2 2 +POURING 1 0 1 1 +POURED 3 0 3 3 +POUR 2 0 2 2 +POUNDED 1 0 1 1 +POUND 4 0 4 4 +POTFULS 1 0 1 1 +POTATOES 1 0 1 1 +POT 2 0 2 2 +POSTS 1 0 1 1 +POSTPONEMENT 1 0 1 1 +POSTERITY 2 0 2 2 +POSTED 1 0 1 1 +POST 3 0 3 3 +POSSIBLY 1 0 1 1 +POSSIBLE 6 0 6 6 +POSSESSING 1 0 1 1 +POSSE 2 0 2 2 +POSITIVELY 3 0 3 3 +POSITIVE 1 0 1 1 +POSITION 9 0 9 9 +PORTRAIT 1 0 1 1 +PORTIONS 3 0 3 3 +PORTION 2 0 2 2 +PORTAL 1 0 1 1 +POPULATION 3 0 3 3 +POPULATED 1 0 1 1 +POPULAR 7 0 7 7 +POPPIES 1 0 1 1 +POPPED 1 0 1 1 +POORLY 1 0 1 1 +POOR 15 0 15 15 +POOLS 1 0 1 1 +PONY 1 0 1 1 +PONDS 1 0 1 1 +PONDERING 1 0 1 1 +POLLY'S 3 0 3 3 +POLLY 4 0 4 4 +POLITICAL 4 0 4 4 +POLITELY 2 0 2 2 +POLITE 1 0 1 1 +POLISHED 3 0 3 3 +POLISH 1 0 1 1 +POLICE 1 0 1 1 +POLE 2 0 2 2 +POLAR 1 0 1 1 +POISON 1 0 1 1 +POISED 1 0 1 1 +POINTS 3 0 3 3 +POINTING 4 0 4 4 +POINTEDLY 1 0 1 1 +POINTED 3 0 3 3 +POETRY 1 0 1 1 +POETIC 1 0 1 1 +POETESS 1 0 1 1 +POET 2 0 2 2 +POEMS 1 0 1 1 +POEM 3 0 3 3 +POCKETS 3 0 3 3 +POCKET 3 0 3 3 +PLURALITY 1 0 1 1 +PLUNGES 1 0 1 1 +PLUNDER 2 0 2 2 +PLUMES 1 0 1 1 +PLUCKING 1 0 1 1 +PLOT 3 0 3 3 +PLIABLE 1 0 1 1 +PLENTY 1 0 1 1 +PLENTIFUL 1 0 1 1 +PLEASURE 5 0 5 5 +PLEASING 1 0 1 1 +PLEASES 3 0 3 3 +PLEASED 4 0 4 4 +PLEASE 11 0 11 11 +PLEASANTLY 2 0 2 2 +PLEASANT 5 0 5 5 +PLEADED 1 0 1 1 +PLAYTHINGS 3 0 3 3 +PLAYS 1 0 1 1 +PLAYING 5 0 5 5 +PLAYER 2 0 2 2 +PLAYED 3 0 3 3 +PLATTERS 1 0 1 1 +PLATONIC 1 0 1 1 +PLATO'S 1 0 1 1 +PLATO 6 0 6 6 +PLATFORM 3 0 3 3 +PLATE 1 0 1 1 +PLASTER 1 0 1 1 +PLANTS 4 0 4 4 +PLANTED 3 0 3 3 +PLANTATIONS 1 0 1 1 +PLANT 7 0 7 7 +PLANS 2 0 2 2 +PLANNED 2 0 2 2 +PLAN 6 0 6 6 +PLAINER 1 0 1 1 +PLAIN 4 0 4 4 +PLACING 2 0 2 2 +PLACIDITY 1 0 1 1 +PLACID 1 0 1 1 +PLACED 8 0 8 8 +PITYING 1 0 1 1 +PITY 6 0 6 6 +PITIABLE 1 0 1 1 +PITH 1 0 1 1 +PITCHED 1 0 1 1 +PIT 1 0 1 1 +PISTOL 1 0 1 1 +PIPT 1 0 1 1 +PIPE 2 0 2 2 +PINKIES 3 0 3 3 +PINK 2 0 2 2 +PINIONED 1 0 1 1 +PINING 1 0 1 1 +PINES 1 0 1 1 +PINE 1 0 1 1 +PILLION 1 0 1 1 +PILLARS 1 0 1 1 +PILGRIM'S 1 0 1 1 +PILGRIM 1 0 1 1 +PILED 1 0 1 1 +PIGMENT 1 0 1 1 +PIETY 1 0 1 1 +PIECES 3 0 3 3 +PICTURESQUENESS 1 0 1 1 +PICTURES 3 0 3 3 +PICTURE 6 0 6 6 +PICNIC 1 0 1 1 +PICKED 3 0 3 3 +PIAZZA 3 0 3 3 +PIANO 4 0 4 4 +PHYSIOLOGY 1 0 1 1 +PHYSIOLOGICAL 1 0 1 1 +PHYSICS 1 0 1 1 +PHYSICAL 2 0 2 2 +PHRONSIE 6 0 6 6 +PHRASE 3 0 3 3 +PHOENICIAN 1 0 1 1 +PHILOSOPHY 2 0 2 2 +PHILOSOPHICAL 1 0 1 1 +PHILOSOPHER 5 0 5 5 +PHILOLOGIST 1 0 1 1 +PHILIP'S 1 0 1 1 +PHILIP 9 0 9 9 +PHENOMENON 1 0 1 1 +PHENOMENA 1 0 1 1 +PHEASANT 1 0 1 1 +PHASES 1 0 1 1 +PHASE 1 0 1 1 +PHANTOM 1 0 1 1 +PEWTER 2 0 2 2 +PET 1 0 1 1 +PERVERTED 1 0 1 1 +PERVERSE 1 0 1 1 +PERVADED 1 0 1 1 +PERTH 1 0 1 1 +PERSUASIVE 1 0 1 1 +PERSUADED 1 0 1 1 +PERSUADE 1 0 1 1 +PERSPIRATION 1 0 1 1 +PERSONS 13 0 13 13 +PERSONALLY 2 0 2 2 +PERSONAGE 2 0 2 2 +PERSEVERED 1 0 1 1 +PERSECUTORS 1 0 1 1 +PERSECUTION 2 0 2 2 +PERSECUTED 1 0 1 1 +PERPLEXITY 1 0 1 1 +PERPLEXED 2 0 2 2 +PERPETUATE 1 0 1 1 +PERPETUAL 1 0 1 1 +PERNICIOUS 1 0 1 1 +PERMITTING 1 0 1 1 +PERMITTED 2 0 2 2 +PERMIT 1 0 1 1 +PERMISSION 1 0 1 1 +PERMANENT 2 0 2 2 +PERISHES 1 0 1 1 +PERIODICALS 1 0 1 1 +PERIOD 8 0 8 8 +PERILS 1 0 1 1 +PERIL 2 0 2 2 +PERHAPS 17 0 17 17 +PERFORMING 1 0 1 1 +PERFORMED 1 0 1 1 +PERFORM 3 0 3 3 +PERFECTLY 8 0 8 8 +PERFECTION 4 0 4 4 +PERCY 1 0 1 1 +PERCHANCE 1 0 1 1 +PERCH 2 0 2 2 +PERCEPTION 2 0 2 2 +PERCEIVING 1 0 1 1 +PERCEIVES 1 0 1 1 +PERCEIVED 2 0 2 2 +PERCEIVE 2 0 2 2 +PEPPERS 1 0 1 1 +PEPPERED 1 0 1 1 +PEPPER 1 0 1 1 +PEOPLE 36 0 36 36 +PENSIVE 1 0 1 1 +PENSION 1 0 1 1 +PENINSULA 2 0 2 2 +PENETRATING 1 0 1 1 +PENETRATE 2 0 2 2 +PENDULOUS 1 0 1 1 +PENCILLED 1 0 1 1 +PENCIL 2 0 2 2 +PENANCE 1 0 1 1 +PENALTY 2 0 2 2 +PENAL 1 0 1 1 +PELL 1 0 1 1 +PEERING 3 0 3 3 +PEEPED 2 0 2 2 +PECUNIARY 6 0 6 6 +PECULIARLY 1 0 1 1 +PECULIAR 1 0 1 1 +PEASE 1 0 1 1 +PEARLY 2 0 2 2 +PEARL'S 1 0 1 1 +PEAKED 1 0 1 1 +PEACEFUL 1 0 1 1 +PEACEABLE 1 0 1 1 +PAYMENT 1 0 1 1 +PAYING 1 0 1 1 +PAY 3 0 3 3 +PAVEMENT 1 0 1 1 +PAUSED 4 0 4 4 +PAUSE 1 0 1 1 +PATTING 1 0 1 1 +PATRONIZING 1 0 1 1 +PATRON 1 0 1 1 +PATRIARCHAL 1 0 1 1 +PATHS 1 0 1 1 +PATHOLOGICAL 1 0 1 1 +PATH 4 0 4 4 +PATCHWORK 4 0 4 4 +PATCHING 2 0 2 2 +PATCHES 2 0 2 2 +PATCHED 1 0 1 1 +PASTNESS 1 0 1 1 +PASTEBOARD 1 0 1 1 +PAST 12 0 12 12 +PASSIONS 1 0 1 1 +PASSIONATE 1 0 1 1 +PASSING 3 0 3 3 +PASSES 1 0 1 1 +PASSER 1 0 1 1 +PASSED 15 0 15 15 +PASSAGES 1 0 1 1 +PASS 5 0 5 5 +PARTY 9 0 9 9 +PARTOOK 2 0 2 2 +PARTNER 1 0 1 1 +PARTLY 7 0 7 7 +PARTITION 1 0 1 1 +PARTISAN 1 0 1 1 +PARTING 2 0 2 2 +PARTIES 4 0 4 4 +PARTICULARS 2 0 2 2 +PARTICULARLY 6 0 6 6 +PARTICULAR 4 0 4 4 +PARTIALLY 1 0 1 1 +PARTED 2 0 2 2 +PARSONAGE 1 0 1 1 +PARROT 2 0 2 2 +PARRIED 1 0 1 1 +PARLIAMENTS 1 0 1 1 +PARLIAMENTARY 1 0 1 1 +PARLIAMENT 5 0 5 5 +PARK 1 0 1 1 +PARIS 9 0 9 9 +PARENTS 4 0 4 4 +PARENT 2 0 2 2 +PARDON 1 0 1 1 +PARAPHERNALIA 2 0 2 2 +PARAGRAPH 1 0 1 1 +PANTOMIME 1 0 1 1 +PANTING 1 0 1 1 +PANTED 1 0 1 1 +PANS 1 0 1 1 +PANGS 1 0 1 1 +PANG 1 0 1 1 +PANES 1 0 1 1 +PANE 1 0 1 1 +PAN 1 0 1 1 +PALM 3 0 3 3 +PALINGS 1 0 1 1 +PALE 8 0 8 8 +PALAIS 1 0 1 1 +PALACE 4 0 4 4 +PAIR 5 0 5 5 +PAINTING 2 0 2 2 +PAINTER 2 0 2 2 +PAINTED 4 0 4 4 +PAINS 2 0 2 2 +PAINFULLY 1 0 1 1 +PAINFUL 3 0 3 3 +PAIN 7 0 7 7 +PAIL 1 0 1 1 +PAID 7 0 7 7 +PAGES 1 0 1 1 +PAGE 2 0 2 2 +PACKING 1 0 1 1 +PACKET 1 0 1 1 +PACKED 1 0 1 1 +PACK 1 0 1 1 +PACING 1 0 1 1 +PACIFIED 1 0 1 1 +PACED 3 0 3 3 +OZMA 1 0 1 1 +OZ 4 0 4 4 +OWNER 1 0 1 1 +OWNED 2 0 2 2 +OWN 69 0 69 69 +OWLS 1 0 1 1 +OWING 3 0 3 3 +OWEN 1 0 1 1 +OVERWROUGHT 1 0 1 1 +OVERWHELMED 1 0 1 1 +OVERWHELM 1 0 1 1 +OVERTHROW 1 0 1 1 +OVERSTATEMENT 1 0 1 1 +OVERRATED 1 0 1 1 +OVERLOOKER 1 0 1 1 +OVERLOOKED 1 0 1 1 +OVERLEAN 1 0 1 1 +OVERHEAD 1 0 1 1 +OVERHANGING 2 0 2 2 +OVERFLOWING 1 0 1 1 +OVERCOAT 1 0 1 1 +OVERBEARING 1 0 1 1 +OVAL 2 0 2 2 +OUTWARD 1 0 1 1 +OUTSTRIP 1 0 1 1 +OUTSTRETCHED 2 0 2 2 +OUTSKIRTS 1 0 1 1 +OUTSIDE 4 0 4 4 +OUTSET 1 0 1 1 +OUTRIGHT 1 0 1 1 +OUTLINED 1 0 1 1 +OUTLINE 2 0 2 2 +OUTLAWS 3 0 3 3 +OUTFIT 1 0 1 1 +OUTER 2 0 2 2 +OUTCRY 1 0 1 1 +OUTCAST 1 0 1 1 +OURSELVES 6 0 6 6 +OURS 2 0 2 2 +OUNCE 2 0 2 2 +OUGHT 10 0 10 10 +OTHERWISE 5 0 5 5 +OTHER'S 2 0 2 2 +OSTENSIBLY 2 0 2 2 +OSTENSIBLE 1 0 1 1 +OSCILLATION 1 0 1 1 +ORNAMENTS 1 0 1 1 +ORNAMENTAL 2 0 2 2 +ORNAMENT 3 0 3 3 +ORLEANS 1 0 1 1 +ORIGINATED 1 0 1 1 +ORIGINAL 1 0 1 1 +ORIGIN 7 0 7 7 +ORGANS 1 0 1 1 +ORGANIZED 5 0 5 5 +ORGANIZATIONS 4 0 4 4 +ORGANIZATION 3 0 3 3 +ORGAN 1 0 1 1 +ORDINARY 3 0 3 3 +ORDERLY 1 0 1 1 +ORBIT 2 0 2 2 +ORACLE 1 0 1 1 +OPPRESSOR 1 0 1 1 +OPPRESSIVE 1 0 1 1 +OPPRESSION 1 0 1 1 +OPPRESSED 1 0 1 1 +OPPOSITE 2 0 2 2 +OPPOSED 2 0 2 2 +OPPOSE 4 0 4 4 +OPPORTUNITY 4 0 4 4 +OPPORTUNITIES 2 0 2 2 +OPPORTUNE 1 0 1 1 +OPPONENT 2 0 2 2 +OPINIONS 1 0 1 1 +OPINION'S 1 0 1 1 +OPINION 9 0 9 9 +OPERATOR 1 0 1 1 +OPERATIONS 1 0 1 1 +OPERATION 2 0 2 2 +OPERATE 1 0 1 1 +OPENING 6 0 6 6 +OPENED 11 0 11 11 +OPEN 23 0 23 23 +OPAQUE 1 0 1 1 +ONWARD 4 0 4 4 +ONES 2 0 2 2 +ONCE 22 0 22 22 +OMELETTE 1 0 1 1 +OLDEST 1 0 1 1 +OLDER 3 0 3 3 +OLDEN 1 0 1 1 +OJO 7 0 7 7 +OHIO 1 0 1 1 +OFTEN 13 0 13 13 +OFFICIALS 3 0 3 3 +OFFICIAL 1 0 1 1 +OFFICE 11 0 11 11 +OFFERS 1 0 1 1 +OFFERING 1 0 1 1 +OFFER 1 0 1 1 +OFFENDS 1 0 1 1 +OFFENDED 1 0 1 1 +OFFALS 1 0 1 1 +OFF 25 0 25 25 +ODORS 1 0 1 1 +ODIOUS 1 0 1 1 +ODIN 1 0 1 1 +ODDLY 1 0 1 1 +ODD 3 0 3 3 +OCTOBER 1 0 1 1 +OCEANOGRAPHER 1 0 1 1 +OCEAN 5 0 5 5 +OCCURS 3 0 3 3 +OCCURRENCES 2 0 2 2 +OCCURRENCE 3 0 3 3 +OCCURRED 2 0 2 2 +OCCUR 1 0 1 1 +OCCUPY 1 0 1 1 +OCCUPIED 6 0 6 6 +OCCUPATION 2 0 2 2 +OCCUPANTS 1 0 1 1 +OCCASIONS 1 0 1 1 +OCCASIONALLY 1 0 1 1 +OCCASIONAL 1 0 1 1 +OCCASION 13 0 13 13 +OBVIOUS 3 0 3 3 +OBTAINED 1 0 1 1 +OBTAIN 3 0 3 3 +OBSTINATE 1 0 1 1 +OBSTINACY 2 0 2 2 +OBSTACLES 1 0 1 1 +OBSERVING 2 0 2 2 +OBSERVERS 1 0 1 1 +OBSERVATION 3 0 3 3 +OBSERVANCES 1 0 1 1 +OBSCURE 3 0 3 3 +OBNOXIOUS 1 0 1 1 +OBLIVION 1 0 1 1 +OBLITERATED 1 0 1 1 +OBLITERATE 1 0 1 1 +OBLIGED 1 0 1 1 +OBJECTIONS 1 0 1 1 +OBJECTION 1 0 1 1 +OBJECT 16 0 16 16 +OBEYING 2 0 2 2 +OBEYED 2 0 2 2 +OBEY 1 0 1 1 +OBEDIENCE 1 0 1 1 +OATMEAL 1 0 1 1 +OATH 1 0 1 1 +OARS 1 0 1 1 +OAK 2 0 2 2 +O'CLOCK 6 0 6 6 +NUNKIE 1 0 1 1 +NUMIDIA 1 0 1 1 +NUMERICAL 1 0 1 1 +NUMBERS 4 0 4 4 +NUMBERED 1 0 1 1 +NUMBER 6 0 6 6 +NUDITY 1 0 1 1 +NUDGED 1 0 1 1 +NOWHERE 1 0 1 1 +NOVEL 1 0 1 1 +NOURISHING 1 0 1 1 +NOTWITHSTANDING 1 0 1 1 +NOTORIOUS 1 0 1 1 +NOTIONS 1 0 1 1 +NOTION 1 0 1 1 +NOTING 1 0 1 1 +NOTICING 1 0 1 1 +NOTICED 1 0 1 1 +NOTICEABLE 1 0 1 1 +NOTICE 3 0 3 3 +NOTED 1 0 1 1 +NOTE 4 0 4 4 +NOTARY 1 0 1 1 +NOTABLY 1 0 1 1 +NOTABLE 3 0 3 3 +NORWEGIAN 1 0 1 1 +NORWAY 1 0 1 1 +NORTHERN 1 0 1 1 +NONSENSE 1 0 1 1 +NONE 12 0 12 12 +NON 4 0 4 4 +NOMADS 1 0 1 1 +NOISILY 1 0 1 1 +NOISE 2 0 2 2 +NODS 1 0 1 1 +NOD 1 0 1 1 +NOBODY 6 0 6 6 +NOBLEST 1 0 1 1 +NOBLER 2 0 2 2 +NOBLEMAN'S 1 0 1 1 +NOBLE 10 0 10 10 +NITROGEN 1 0 1 1 +NINTH 1 0 1 1 +NINETY 2 0 2 2 +NIMBLY 1 0 1 1 +NIMBLE 1 0 1 1 +NIGHTS 3 0 3 3 +NIGHTLY 1 0 1 1 +NIGHTINGALE'S 1 0 1 1 +NIECE 1 0 1 1 +NICEST 1 0 1 1 +NICER 1 0 1 1 +NICELY 1 0 1 1 +NICE 3 0 3 3 +NEXT 12 0 12 12 +NEWSPAPER 4 0 4 4 +NEWS 2 0 2 2 +NEWLY 1 0 1 1 +NEVERTHELESS 3 0 3 3 +NETTLES 1 0 1 1 +NETTLED 1 0 1 1 +NESTING 1 0 1 1 +NERVOUSNESS 1 0 1 1 +NERVOUSLY 1 0 1 1 +NERVOUS 4 0 4 4 +NERVES 2 0 2 2 +NEMO 4 0 4 4 +NELL 1 0 1 1 +NEITHER 9 0 9 9 +NEIGHBORS 2 0 2 2 +NEGROES 2 0 2 2 +NEGRO 1 0 1 1 +NEGOTIATIONS 1 0 1 1 +NEGLIGENT 1 0 1 1 +NEGLECTED 2 0 2 2 +NEGLECT 1 0 1 1 +NEEDS 2 0 2 2 +NEEDING 1 0 1 1 +NEEDED 5 0 5 5 +NECESSITY 2 0 2 2 +NECESSITIES 1 0 1 1 +NECESSARY 9 0 9 9 +NECESSARILY 2 0 2 2 +NEATLY 2 0 2 2 +NEAT 1 0 1 1 +NEARLY 10 0 10 10 +NEAREST 1 0 1 1 +NEARED 1 0 1 1 +NAVY 1 0 1 1 +NAUTILUS 2 0 2 2 +NATURES 1 0 1 1 +NATURED 1 0 1 1 +NATURE 17 0 17 17 +NATURALLY 1 0 1 1 +NATURALISTS 2 0 2 2 +NATURALIST 1 0 1 1 +NATURAL 10 0 10 10 +NATIVE 5 0 5 5 +NATIONS 2 0 2 2 +NATIONAL 3 0 3 3 +NATION 2 0 2 2 +NASTY 1 0 1 1 +NARWHALE 1 0 1 1 +NARROWS 1 0 1 1 +NARROW 6 0 6 6 +NARRATIVE 2 0 2 2 +NAPIER 1 0 1 1 +NAPE 1 0 1 1 +NANCY'S 2 0 2 2 +NANCY 1 0 1 1 +NAMES 2 0 2 2 +NAMELY 2 0 2 2 +NAKEDNESS 1 0 1 1 +NAKED 1 0 1 1 +MYTHOLOGICAL 1 0 1 1 +MYTHICAL 1 0 1 1 +MYSTIFIED 1 0 1 1 +MYSTERIOUSLY 1 0 1 1 +MYSTERIOUS 3 0 3 3 +MYSELF 25 0 25 25 +MUTUAL 2 0 2 2 +MUTTON 1 0 1 1 +MUTTERING 1 0 1 1 +MUTILATION 1 0 1 1 +MUTABILITY 1 0 1 1 +MUSTARD 1 0 1 1 +MUST 66 0 66 66 +MUSSULMANS 1 0 1 1 +MUSICIANS 1 0 1 1 +MUSIC 6 0 6 6 +MUSHROOMS 1 0 1 1 +MUSEUM 1 0 1 1 +MURMURED 4 0 4 4 +MURMUR 2 0 2 2 +MURDERS 1 0 1 1 +MURDERERS 1 0 1 1 +MUNCHKINS 2 0 2 2 +MUNCHKIN 1 0 1 1 +MULTIPLE 2 0 2 2 +MUFFLED 1 0 1 1 +MUDDY 1 0 1 1 +MUD 1 0 1 1 +MOWED 2 0 2 2 +MOW 1 0 1 1 +MOVING 1 0 1 1 +MOVES 1 0 1 1 +MOVEMENTS 3 0 3 3 +MOVEMENT 5 0 5 5 +MOVE 4 0 4 4 +MOUTHS 3 0 3 3 +MOUTHED 2 0 2 2 +MOUTH 5 0 5 5 +MOUSE 2 0 2 2 +MOURNFUL 1 0 1 1 +MOURN 1 0 1 1 +MOUNTAINS 2 0 2 2 +MOTTO 1 0 1 1 +MOTTLED 1 0 1 1 +MOTORS 1 0 1 1 +MOTIVES 5 0 5 5 +MOTIONLESS 1 0 1 1 +MOTIONING 1 0 1 1 +MOTIONED 2 0 2 2 +MOTION 1 0 1 1 +MOTHER 32 0 32 32 +MOST 51 0 51 51 +MOSSY 1 0 1 1 +MOSS 1 0 1 1 +MOSAIC 1 0 1 1 +MORTALS 1 0 1 1 +MORTALLY 1 0 1 1 +MORTAL 1 0 1 1 +MORROW 6 0 6 6 +MORRIS 1 0 1 1 +MORNINGS 1 0 1 1 +MORN 1 0 1 1 +MORMON 5 0 5 5 +MOREOVER 1 0 1 1 +MORE'S 1 0 1 1 +MORBID 1 0 1 1 +MORAL 1 0 1 1 +MOONLIGHT 2 0 2 2 +MOONBEAMS 1 0 1 1 +MOON 4 0 4 4 +MOOD 2 0 2 2 +MONTROSE'S 1 0 1 1 +MONTROSE 6 0 6 6 +MONTHS 4 0 4 4 +MONTH 4 0 4 4 +MONTFICHET'S 1 0 1 1 +MONTALAIS 4 0 4 4 +MONSTERS 2 0 2 2 +MONSTER 1 0 1 1 +MONSIEUR 1 0 1 1 +MONOTONOUS 1 0 1 1 +MONCEUX 1 0 1 1 +MOMENTS 5 0 5 5 +MOMENTOUS 1 0 1 1 +MOMENTARY 1 0 1 1 +MOMENT 32 0 32 32 +MOLLY 3 0 3 3 +MOLECULES 1 0 1 1 +MOISTURE 1 0 1 1 +MOIST 1 0 1 1 +MOHAMMED 1 0 1 1 +MODIFICATION 1 0 1 1 +MODEST 3 0 3 3 +MODES 2 0 2 2 +MODERNS 1 0 1 1 +MODERN 8 0 8 8 +MODERATE 2 0 2 2 +MODEL 1 0 1 1 +MODE 2 0 2 2 +MOCCASIN 1 0 1 1 +MOBS 1 0 1 1 +MOBILITY 2 0 2 2 +MOB 3 0 3 3 +MOANING 1 0 1 1 +MOAN 1 0 1 1 +MIXTURE 2 0 2 2 +MIXED 4 0 4 4 +MITIGATE 1 0 1 1 +MISUNDERSTANDING 1 0 1 1 +MISTY 1 0 1 1 +MISTRESS 10 0 10 10 +MISTAKEN 2 0 2 2 +MISTAKE 2 0 2 2 +MISSUS 23 0 23 23 +MISSOURI 6 0 6 6 +MISSIONARY 1 0 1 1 +MISSIONARIES 1 0 1 1 +MISSION 3 0 3 3 +MISSED 2 0 2 2 +MISGOVERNMENT 1 0 1 1 +MISFORTUNES 1 0 1 1 +MISFORTUNE 4 0 4 4 +MISERY 3 0 3 3 +MISERABLY 1 0 1 1 +MISERABLE 2 0 2 2 +MISCHIEF 1 0 1 1 +MISCHANCE 1 0 1 1 +MIRROR 2 0 2 2 +MIRACULOUSLY 1 0 1 1 +MIRACLE 2 0 2 2 +MINUTES 6 0 6 6 +MINUTE 2 0 2 2 +MINORITY 1 0 1 1 +MINISTRY 3 0 3 3 +MINISTERS 1 0 1 1 +MINISTER 3 0 3 3 +MINIATURE 1 0 1 1 +MINGOES 1 0 1 1 +MINGLES 1 0 1 1 +MINGLED 1 0 1 1 +MINDS 3 0 3 3 +MINDFUL 1 0 1 1 +MINDED 1 0 1 1 +MILNER'S 3 0 3 3 +MILLIONS 1 0 1 1 +MILKING 1 0 1 1 +MILKED 1 0 1 1 +MILK 1 0 1 1 +MILITIA 3 0 3 3 +MILITARY 7 0 7 7 +MILE 1 0 1 1 +MILDLY 1 0 1 1 +MILD 2 0 2 2 +MILAN 1 0 1 1 +MIKE 2 0 2 2 +MIGHTY 4 0 4 4 +MIGHTILY 1 0 1 1 +MIGHT 48 0 48 48 +MIDWIFE 1 0 1 1 +MIDST 2 0 2 2 +MIDDLE 4 0 4 4 +MIDDAY 1 0 1 1 +MICROSCOPE 1 0 1 1 +MICE 5 0 5 5 +METROPOLIS 1 0 1 1 +METHODS 3 0 3 3 +METHOD 3 0 3 3 +METAPHOR 1 0 1 1 +METAMORPHOSIS 1 0 1 1 +METALLIC 1 0 1 1 +METAL 1 0 1 1 +MESSAGE 2 0 2 2 +MESHES 1 0 1 1 +MERITS 2 0 2 2 +MERIT 2 0 2 2 +MERIDIAN 2 0 2 2 +MERELY 5 0 5 5 +MERE 4 0 4 4 +MERCHANT 1 0 1 1 +MENTIONS 1 0 1 1 +MENTIONED 5 0 5 5 +MENTION 1 0 1 1 +MENTAL 2 0 2 2 +MENIAL 1 0 1 1 +MENDING 2 0 2 2 +MENAGERIE 1 0 1 1 +MEMORY 21 0 21 21 +MEMBERS 4 0 4 4 +MEMBER 2 0 2 2 +MELTS 1 0 1 1 +MELODY 1 0 1 1 +MELL 1 0 1 1 +MELANCHOLY 2 0 2 2 +MEETING 5 0 5 5 +MEEK 1 0 1 1 +MEDIUM 1 0 1 1 +MEDITERRANEAN 4 0 4 4 +MEDITATIVE 1 0 1 1 +MEDITATION 1 0 1 1 +MEDICINE 6 0 6 6 +MECHANICS 1 0 1 1 +MEAT 1 0 1 1 +MEASURES 2 0 2 2 +MEASURED 2 0 2 2 +MEASURE 6 0 6 6 +MEANWHILE 4 0 4 4 +MEANTIME 2 0 2 2 +MEANT 5 0 5 5 +MEANS 17 0 17 17 +MEANINGS 1 0 1 1 +MEANING 4 0 4 4 +MEALS 4 0 4 4 +MEAL 5 0 5 5 +MEADOWS 1 0 1 1 +MEADOWCROFT'S 1 0 1 1 +MAXIMUM 1 0 1 1 +MAXIMS 1 0 1 1 +MATURE 1 0 1 1 +MATTHEWS 1 0 1 1 +MATTERS 5 0 5 5 +MATTERED 1 0 1 1 +MATTER 20 0 20 20 +MATHEMATICS 1 0 1 1 +MATERIALS 2 0 2 2 +MATERIALLY 1 0 1 1 +MATERIALISM 1 0 1 1 +MATERIAL 3 0 3 3 +MATED 1 0 1 1 +MATCHLESS 1 0 1 1 +MASTERPIECE 1 0 1 1 +MASTERLY 1 0 1 1 +MASTER 14 0 14 14 +MAST 2 0 2 2 +MASSES 1 0 1 1 +MASSACHUSETTS 1 0 1 1 +MASKS 1 0 1 1 +MARY 6 0 6 6 +MARVELS 1 0 1 1 +MARVELLED 1 0 1 1 +MARVEL 2 0 2 2 +MARTIN 2 0 2 2 +MARTIAL 1 0 1 1 +MARTHA 2 0 2 2 +MARRY 1 0 1 1 +MARRIAGE 5 0 5 5 +MARQUIS 1 0 1 1 +MARKS 4 0 4 4 +MARKING 1 0 1 1 +MARKHAM 2 0 2 2 +MARKET 1 0 1 1 +MARKED 3 0 3 3 +MARK 6 0 6 6 +MARINE 2 0 2 2 +MARIANNE 1 0 1 1 +MARIA 1 0 1 1 +MARGOLOTTE 5 0 5 5 +MARGIN 1 0 1 1 +MARGARET 1 0 1 1 +MARCHES 1 0 1 1 +MARCHED 2 0 2 2 +MARCH 7 0 7 7 +MARBLE 2 0 2 2 +MAP 2 0 2 2 +MANUSCRIPT 2 0 2 2 +MANUFACTURER 3 0 3 3 +MANSION 1 0 1 1 +MANSERVANT 1 0 1 1 +MANOEUVRING 1 0 1 1 +MANNERS 1 0 1 1 +MANNER 14 0 14 14 +MANNA 1 0 1 1 +MANKIND 2 0 2 2 +MANIFOLD 1 0 1 1 +MANIFESTLY 1 0 1 1 +MANICAMP 1 0 1 1 +MANHOOD 1 0 1 1 +MANDIBLE 1 0 1 1 +MANAGING 1 0 1 1 +MANAGERS 1 0 1 1 +MANAGEMENT 3 0 3 3 +MANAGED 4 0 4 4 +MANAGE 1 0 1 1 +MAN'S 5 0 5 5 +MAMMY 1 0 1 1 +MALIGNITIES 1 0 1 1 +MALIGNED 1 0 1 1 +MALICIOUS 1 0 1 1 +MALICE 1 0 1 1 +MALADY 1 0 1 1 +MAKING 13 0 13 13 +MAKES 10 0 10 10 +MAJESTY'S 2 0 2 2 +MAJESTY 6 0 6 6 +MAINTAINING 1 0 1 1 +MAINTAINED 4 0 4 4 +MAINSAIL 1 0 1 1 +MAINLY 1 0 1 1 +MAIDS 4 0 4 4 +MAID'S 1 0 1 1 +MAHOGANY 1 0 1 1 +MAGNIFIED 1 0 1 1 +MAGNIFICENT 3 0 3 3 +MAGNIFICENCE 1 0 1 1 +MAGISTRACY 1 0 1 1 +MAGICIAN 5 0 5 5 +MAGIC 4 0 4 4 +MAGAZINE 1 0 1 1 +MADNESS 1 0 1 1 +MADEMOISELLE 5 0 5 5 +MADAME'S 1 0 1 1 +MAD 3 0 3 3 +MACHINES 1 0 1 1 +MACHINE 1 0 1 1 +MABEL 1 0 1 1 +LYNCHINGS 1 0 1 1 +LYING 4 0 4 4 +LUXURIES 2 0 2 2 +LUXURIANT 1 0 1 1 +LUTHERAN 2 0 2 2 +LUTHER'S 4 0 4 4 +LUTHER 3 0 3 3 +LUSTROUS 1 0 1 1 +LUSTRE 1 0 1 1 +LURKING 1 0 1 1 +LURID 1 0 1 1 +LURE 1 0 1 1 +LUNGS 1 0 1 1 +LUMPS 1 0 1 1 +LUMP 1 0 1 1 +LUMINOUS 2 0 2 2 +LULLS 1 0 1 1 +LUKE 1 0 1 1 +LUGUBRIOUS 1 0 1 1 +LUCY 1 0 1 1 +LUCRETIUS 1 0 1 1 +LUCID 1 0 1 1 +LOYALLY 1 0 1 1 +LOYAL 1 0 1 1 +LOWLY 2 0 2 2 +LOW 6 0 6 6 +LOVING 4 0 4 4 +LOVERS 2 0 2 2 +LOVER 1 0 1 1 +LOVELY 7 0 7 7 +LOVED 6 0 6 6 +LOVE 48 0 48 48 +LOUDNESS 1 0 1 1 +LOUDLY 2 0 2 2 +LOUDER 1 0 1 1 +LOUD 2 0 2 2 +LOTUS 1 0 1 1 +LOTS 2 0 2 2 +LOT 6 0 6 6 +LOST 12 0 12 12 +LOSS 6 0 6 6 +LOSING 3 0 3 3 +LOSES 2 0 2 2 +LOSE 3 0 3 3 +LORDS 1 0 1 1 +LORDLY 1 0 1 1 +LORDING 2 0 2 2 +LOPPED 1 0 1 1 +LOOSELY 1 0 1 1 +LOOKS 7 0 7 7 +LOOKING 16 0 16 16 +LOOKED 24 0 24 24 +LONGING 2 0 2 2 +LONGER 9 0 9 9 +LONGED 1 0 1 1 +LONG 29 0 29 29 +LONELY 2 0 2 2 +LONELINESS 1 0 1 1 +LONELIER 2 0 2 2 +LONDON 3 0 3 3 +LOGICALLY 1 0 1 1 +LOGICAL 2 0 2 2 +LOG 2 0 2 2 +LOFTINESS 1 0 1 1 +LOFTIEST 1 0 1 1 +LOFT 2 0 2 2 +LODGING 1 0 1 1 +LODGE 2 0 2 2 +LOCKED 3 0 3 3 +LOCK 1 0 1 1 +LOAF 1 0 1 1 +LOADED 1 0 1 1 +LIVING 5 0 5 5 +LIVID 1 0 1 1 +LIVES 6 0 6 6 +LIVERY 1 0 1 1 +LIVERIES 2 0 2 2 +LIVERIED 1 0 1 1 +LIVELIEST 1 0 1 1 +LIVED 8 0 8 8 +LIVE 9 0 9 9 +LITERATURE 1 0 1 1 +LITERARY 4 0 4 4 +LITERALLY 2 0 2 2 +LITERAL 2 0 2 2 +LISTLESSLY 1 0 1 1 +LISTENING 3 0 3 3 +LISTENED 5 0 5 5 +LISTEN 3 0 3 3 +LIST 1 0 1 1 +LIQUID 2 0 2 2 +LIPS 4 0 4 4 +LINKS 2 0 2 2 +LINGERED 1 0 1 1 +LINES 7 0 7 7 +LINEN 2 0 2 2 +LINED 2 0 2 2 +LINDENS 1 0 1 1 +LINCOLN 2 0 2 2 +LIMITATION 1 0 1 1 +LIMIT 1 0 1 1 +LIMESTONE 1 0 1 1 +LIMBS 2 0 2 2 +LIKEWISE 1 0 1 1 +LIKES 2 0 2 2 +LIKENESS 1 0 1 1 +LIKELY 2 0 2 2 +LIKED 4 0 4 4 +LIGHTS 5 0 5 5 +LIGHTNING 1 0 1 1 +LIGHTLY 3 0 3 3 +LIGHTING 6 0 6 6 +LIGHTED 5 0 5 5 +LIFTING 2 0 2 2 +LIFTED 1 0 1 1 +LIFT 2 0 2 2 +LIFE'S 1 0 1 1 +LIFE 47 0 47 47 +LIEUTENANT 6 0 6 6 +LIES 8 0 8 8 +LIEDENBROCK 1 0 1 1 +LICHEN 1 0 1 1 +LIBRARY 3 0 3 3 +LIBERTY 3 0 3 3 +LIBERAL 1 0 1 1 +LIABLE 2 0 2 2 +LEXINGTON 1 0 1 1 +LEVIED 2 0 2 2 +LEVELS 1 0 1 1 +LEVEL 3 0 3 3 +LETTING 1 0 1 1 +LETTERS 4 0 4 4 +LETTER 12 0 12 12 +LET'S 2 0 2 2 +LET 27 0 27 27 +LEST 2 0 2 2 +LESSONS 1 0 1 1 +LESSON 1 0 1 1 +LENGTHY 1 0 1 1 +LENGTHS 1 0 1 1 +LENGTH 4 0 4 4 +LEND 2 0 2 2 +LEISURELY 1 0 1 1 +LEISURE 11 0 11 11 +LEGS 3 0 3 3 +LEGISLATURE 4 0 4 4 +LEGISLATORS 1 0 1 1 +LEGISLATIVE 1 0 1 1 +LEGATE 1 0 1 1 +LEGALITY 2 0 2 2 +LEGAL 1 0 1 1 +LEG 1 0 1 1 +LEFT 34 0 34 34 +LEECH 2 0 2 2 +LECTURES 2 0 2 2 +LECTURE 3 0 3 3 +LECOMPTON 1 0 1 1 +LEAVE 16 0 16 16 +LEATHER 1 0 1 1 +LEASH 1 0 1 1 +LEARNING 1 0 1 1 +LEARNED 4 0 4 4 +LEARN 4 0 4 4 +LEAPS 2 0 2 2 +LEAP 2 0 2 2 +LEANING 3 0 3 3 +LEANED 5 0 5 5 +LEAN 1 0 1 1 +LEAGUES 1 0 1 1 +LEAGUE 1 0 1 1 +LEAF 3 0 3 3 +LEADING 3 0 3 3 +LEADERS 1 0 1 1 +LEADER 2 0 2 2 +LEAD 8 0 8 8 +LAZILY 1 0 1 1 +LAYS 1 0 1 1 +LAYMAN 1 0 1 1 +LAYING 1 0 1 1 +LAY 16 0 16 16 +LAWYER 1 0 1 1 +LAWS 9 0 9 9 +LAWRENCE 2 0 2 2 +LAWN 1 0 1 1 +LAWFUL 1 0 1 1 +LAW 13 0 13 13 +LAVISHING 1 0 1 1 +LAUGHTER 2 0 2 2 +LAUGHING 6 0 6 6 +LAUGHED 6 0 6 6 +LAUGH 4 0 4 4 +LATTICE 1 0 1 1 +LATTER 9 0 9 9 +LATIN 3 0 3 3 +LATEST 1 0 1 1 +LATENT 1 0 1 1 +LATE 6 0 6 6 +LATCHED 1 0 1 1 +LAST 41 0 41 41 +LASHED 1 0 1 1 +LARGEST 2 0 2 2 +LARGER 3 0 3 3 +LARGE 16 0 16 16 +LAREN 1 0 1 1 +LARDER 1 0 1 1 +LAPSE 1 0 1 1 +LAP 3 0 3 3 +LANGUISHINGLY 1 0 1 1 +LANGUID 1 0 1 1 +LANGUAGE 11 0 11 11 +LANE 1 0 1 1 +LANDSCAPE 1 0 1 1 +LANDS 2 0 2 2 +LAMPS 3 0 3 3 +LAMP 4 0 4 4 +LAMENTATION 1 0 1 1 +LAMBS 1 0 1 1 +LAMB 1 0 1 1 +LAKES 1 0 1 1 +LAKE'S 1 0 1 1 +LAID 8 0 8 8 +LAGOON 4 0 4 4 +LADY 9 0 9 9 +LADLED 1 0 1 1 +LADIES 11 0 11 11 +LADDER 3 0 3 3 +LACKEY 1 0 1 1 +LACK 1 0 1 1 +KNOWN 15 0 15 15 +KNOWLEDGE 15 0 15 15 +KNOWING 5 0 5 5 +KNOT 1 0 1 1 +KNOCKING 1 0 1 1 +KNOCKED 4 0 4 4 +KNOCK 1 0 1 1 +KNITTED 1 0 1 1 +KNIGHT 1 0 1 1 +KNEW 25 0 25 25 +KNEES 3 0 3 3 +KNEELS 1 0 1 1 +KNEELING 1 0 1 1 +KITTEN 1 0 1 1 +KITES 1 0 1 1 +KITE 1 0 1 1 +KITCHEN 4 0 4 4 +KIT 1 0 1 1 +KISSING 1 0 1 1 +KISSES 1 0 1 1 +KISSED 2 0 2 2 +KISS 2 0 2 2 +KINGS 1 0 1 1 +KINGLY 1 0 1 1 +KINGDOM 4 0 4 4 +KINDS 1 0 1 1 +KINDNESS 1 0 1 1 +KINDLY 3 0 3 3 +KINDLED 3 0 3 3 +KINDER 1 0 1 1 +KIND 14 0 14 14 +KILLS 1 0 1 1 +KILLED 1 0 1 1 +KIDNAP 2 0 2 2 +KID 1 0 1 1 +KICKAPOO 1 0 1 1 +KEYNOTE 1 0 1 1 +KEY 5 0 5 5 +KETTLES 2 0 2 2 +KETTLE 1 0 1 1 +KERCHIEFS 1 0 1 1 +KEPT 5 0 5 5 +KENNINGTON 2 0 2 2 +KENNETH 9 0 9 9 +KEEPS 3 0 3 3 +KEEPING 4 0 4 4 +KEEPER 1 0 1 1 +KEEP 10 0 10 10 +KEENNESS 2 0 2 2 +KEENER 1 0 1 1 +KEEN 3 0 3 3 +KATHLEEN 1 0 1 1 +KATE 1 0 1 1 +KANSAS 3 0 3 3 +KANE 1 0 1 1 +JUSTLY 2 0 2 2 +JUSTIFICATION 2 0 2 2 +JUSTICE 3 0 3 3 +JURISDICTION 1 0 1 1 +JUMPING 1 0 1 1 +JUMPED 1 0 1 1 +JUMP 3 0 3 3 +JUDGMENT 6 0 6 6 +JUDGES 1 0 1 1 +JUDGE 5 0 5 5 +JUDAH 1 0 1 1 +JOYOUS 1 0 1 1 +JOYCE 2 0 2 2 +JOY 4 0 4 4 +JOURNEYING 1 0 1 1 +JOURNEY 5 0 5 5 +JONES 3 0 3 3 +JOLLY 5 0 5 5 +JOKED 1 0 1 1 +JOKE 2 0 2 2 +JOINED 1 0 1 1 +JOIN 2 0 2 2 +JOHNSON 1 0 1 1 +JIB 1 0 1 1 +JEWISH 1 0 1 1 +JEWELS 3 0 3 3 +JET 1 0 1 1 +JESUS 7 0 7 7 +JERSEY 1 0 1 1 +JERK 1 0 1 1 +JENKS 1 0 1 1 +JELLIES 1 0 1 1 +JEHOVAH 1 0 1 1 +JEERED 1 0 1 1 +JEALOUS 1 0 1 1 +JAWS 2 0 2 2 +JASPER'S 2 0 2 2 +JAP 1 0 1 1 +JANUARY 2 0 2 2 +JANE'S 1 0 1 1 +JANE 4 0 4 4 +JAMES 2 0 2 2 +JAILER 5 0 5 5 +JACOB'S 2 0 2 2 +JACOB 1 0 1 1 +JACKSON 1 0 1 1 +JACKET 1 0 1 1 +J 2 0 2 2 +IVORY 1 0 1 1 +ITSELF 21 0 21 21 +ITCH 1 0 1 1 +ITALIAN 2 0 2 2 +ISSUED 2 0 2 2 +ISSUE 1 0 1 1 +ISRAEL 1 0 1 1 +ISOLATED 1 0 1 1 +ISN'T 5 0 5 5 +ISLAND 5 0 5 5 +IRWINE 1 0 1 1 +IRRITABLE 1 0 1 1 +IRRESOLUTION 1 0 1 1 +IRREPARABLE 1 0 1 1 +IRREGULARITY 2 0 2 2 +IRONING 1 0 1 1 +IRON 2 0 2 2 +IRISH 2 0 2 2 +IRIDESCENT 1 0 1 1 +IRENE 1 0 1 1 +IRELAND 1 0 1 1 +INWARDLY 1 0 1 1 +INWARD 1 0 1 1 +INVOLVING 1 0 1 1 +INVOLVES 1 0 1 1 +INVOLVED 1 0 1 1 +INVOLVE 1 0 1 1 +INVITED 4 0 4 4 +INVITATION 3 0 3 3 +INVISIBLE 1 0 1 1 +INVIOLATE 1 0 1 1 +INVIDIOUS 1 0 1 1 +INVESTIGATION 1 0 1 1 +INVENTOR 1 0 1 1 +INVENTION 1 0 1 1 +INVENTED 1 0 1 1 +INVASION 1 0 1 1 +INVARIABLY 4 0 4 4 +INVARIABLE 1 0 1 1 +INVALID 1 0 1 1 +INVADER 1 0 1 1 +INVADE 1 0 1 1 +INTRODUCTION 4 0 4 4 +INTRODUCING 1 0 1 1 +INTRODUCED 3 0 3 3 +INTRODUCE 3 0 3 3 +INTRINSIC 1 0 1 1 +INTRICATE 1 0 1 1 +INTOLERANT 1 0 1 1 +INTOLERANCY 1 0 1 1 +INTOLERABLE 1 0 1 1 +INTIMATELY 2 0 2 2 +INTIMATE 2 0 2 2 +INTERVIEWS 1 0 1 1 +INTERVIEW 3 0 3 3 +INTERSECTED 1 0 1 1 +INTERRUPTED 2 0 2 2 +INTERPRETED 1 0 1 1 +INTERPRETATION 1 0 1 1 +INTERPOSED 1 0 1 1 +INTERNAL 1 0 1 1 +INTERMINGLED 1 0 1 1 +INTERMEDIATE 1 0 1 1 +INTERLACED 1 0 1 1 +INTERFERE 2 0 2 2 +INTERESTING 3 0 3 3 +INTERESTED 4 0 4 4 +INTEREST 10 0 10 10 +INTENTLY 2 0 2 2 +INTENTIONS 1 0 1 1 +INTENTION 1 0 1 1 +INTENSITY 3 0 3 3 +INTENSIFICATION 1 0 1 1 +INTENSELY 1 0 1 1 +INTENDED 1 0 1 1 +INTEND 1 0 1 1 +INTELLIGENT 5 0 5 5 +INTELLIGENCE 7 0 7 7 +INTELLECTS 1 0 1 1 +INTELLECT 1 0 1 1 +INTEGRITY 1 0 1 1 +INTANGIBLE 1 0 1 1 +INSULT 1 0 1 1 +INSTRUMENT 1 0 1 1 +INSTRUCTIONS 4 0 4 4 +INSTITUTION 1 0 1 1 +INSTITUTED 1 0 1 1 +INSTITUTE 1 0 1 1 +INSTINCT 1 0 1 1 +INSTEAD 11 0 11 11 +INSTANTLY 6 0 6 6 +INSTANTANEOUS 1 0 1 1 +INSTANT 3 0 3 3 +INSTANCING 1 0 1 1 +INSTANCE 3 0 3 3 +INSTALLED 5 0 5 5 +INSTALL 1 0 1 1 +INSPIRED 1 0 1 1 +INSPIRATION 1 0 1 1 +INSOLENTLY 1 0 1 1 +INSISTS 1 0 1 1 +INSISTENCE 2 0 2 2 +INSISTED 1 0 1 1 +INSIST 1 0 1 1 +INSIPID 1 0 1 1 +INSINUATED 1 0 1 1 +INSIGNIFICANT 2 0 2 2 +INSIGHT 1 0 1 1 +INSIDE 2 0 2 2 +INSERTING 1 0 1 1 +INSENSIBLE 1 0 1 1 +INSECT 1 0 1 1 +INSATIABLE 2 0 2 2 +INNUMERABLE 2 0 2 2 +INNOCENTLY 1 0 1 1 +INNOCENT 2 0 2 2 +INNINGS 1 0 1 1 +INNER 2 0 2 2 +INMATES 1 0 1 1 +INJUSTICE 4 0 4 4 +INJURY 2 0 2 2 +INJURED 2 0 2 2 +INIQUITY 1 0 1 1 +INHUMAN 1 0 1 1 +INHERITANCE 2 0 2 2 +INHABITANTS 3 0 3 3 +INGREDIENTS 1 0 1 1 +INGENUITY 2 0 2 2 +INGENIOUS 2 0 2 2 +INFUSE 1 0 1 1 +INFORMING 1 0 1 1 +INFORMED 3 0 3 3 +INFORMATION 3 0 3 3 +INFORM 1 0 1 1 +INFLUENTIAL 2 0 2 2 +INFLUENCES 2 0 2 2 +INFLUENCE 8 0 8 8 +INFLICT 2 0 2 2 +INFLEXIBLE 1 0 1 1 +INFIRMITY 1 0 1 1 +INFIRMITIES 1 0 1 1 +INFIRMARY 1 0 1 1 +INFINITE 4 0 4 4 +INFERIOR 2 0 2 2 +INFERENCE 1 0 1 1 +INFECTED 1 0 1 1 +INFANTRY 2 0 2 2 +INFANT 1 0 1 1 +INFANCY 1 0 1 1 +INFALLIBLE 1 0 1 1 +INEXPRESSIBLY 1 0 1 1 +INEXPLICABLE 2 0 2 2 +INEXPERIENCE 1 0 1 1 +INEXHAUSTIBLE 1 0 1 1 +INESTIMABLE 1 0 1 1 +INEFFECTUALLY 1 0 1 1 +INDUSTRY 1 0 1 1 +INDUSTRIOUS 1 0 1 1 +INDULGENCE 1 0 1 1 +INDULGED 1 0 1 1 +INDUCED 1 0 1 1 +INDUCE 1 0 1 1 +INDUBITABLE 1 0 1 1 +INDIVIDUALS 1 0 1 1 +INDIVIDUAL 5 0 5 5 +INDISTINGUISHABLE 1 0 1 1 +INDISTINCT 1 0 1 1 +INDISPENSABLE 1 0 1 1 +INDISCREET 1 0 1 1 +INDIRECT 1 0 1 1 +INDIGENCE 1 0 1 1 +INDIFFERENT 3 0 3 3 +INDIFFERENCE 4 0 4 4 +INDIES 1 0 1 1 +INDICATOR 1 0 1 1 +INDICATING 1 0 1 1 +INDICATES 2 0 2 2 +INDICATED 3 0 3 3 +INDICATE 2 0 2 2 +INDIANS 4 0 4 4 +INDIAN 4 0 4 4 +INDIA 1 0 1 1 +INDEPENDENTS 1 0 1 1 +INDEPENDENT 4 0 4 4 +INDEPENDENCE 1 0 1 1 +INCURRING 1 0 1 1 +INCURRED 1 0 1 1 +INCUR 1 0 1 1 +INCULCATED 1 0 1 1 +INCREASED 2 0 2 2 +INCREASE 2 0 2 2 +INCONVENIENT 1 0 1 1 +INCONCEIVABLE 1 0 1 1 +INCOMPREHENSIBLE 1 0 1 1 +INCOMPATIBLE 1 0 1 1 +INCOHERENT 1 0 1 1 +INCLUDING 1 0 1 1 +INCLUDED 2 0 2 2 +INCLINES 1 0 1 1 +INCLINED 2 0 2 2 +INCLINATIONS 2 0 2 2 +INCITED 1 0 1 1 +INCIDENTS 1 0 1 1 +INCIDENTAL 1 0 1 1 +INCIDENT 6 0 6 6 +INCHES 1 0 1 1 +INCH 2 0 2 2 +INCESSANTLY 1 0 1 1 +INCEPTION 1 0 1 1 +INCAPABLE 2 0 2 2 +INCANDESCENT 1 0 1 1 +INASMUCH 1 0 1 1 +INADEQUATE 2 0 2 2 +INADEQUACY 1 0 1 1 +INACCURACY 1 0 1 1 +INACCESSIBLE 1 0 1 1 +IMPULSIVELY 1 0 1 1 +IMPULSE 3 0 3 3 +IMPROVING 1 0 1 1 +IMPROVED 3 0 3 3 +IMPRISONMENT 1 0 1 1 +IMPRISONED 3 0 3 3 +IMPRESSIONS 6 0 6 6 +IMPRESSION 2 0 2 2 +IMPOSSIBLE 11 0 11 11 +IMPOSSIBILITY 1 0 1 1 +IMPOSED 1 0 1 1 +IMPOSE 1 0 1 1 +IMPORTANT 7 0 7 7 +IMPORTANCE 5 0 5 5 +IMPORT 1 0 1 1 +IMPLY 1 0 1 1 +IMPLORES 1 0 1 1 +IMPLORE 1 0 1 1 +IMPLIES 3 0 3 3 +IMPLICIT 1 0 1 1 +IMPLICATION 1 0 1 1 +IMPIETY 1 0 1 1 +IMPETUS 1 0 1 1 +IMPETUOUS 3 0 3 3 +IMPERIOUSLY 1 0 1 1 +IMPERIALIST 1 0 1 1 +IMPERIALISM 1 0 1 1 +IMPERIAL 1 0 1 1 +IMPERFECTLY 1 0 1 1 +IMPERATIVE 1 0 1 1 +IMPENETRABLE 2 0 2 2 +IMPEDIMENT 1 0 1 1 +IMPATIENT 1 0 1 1 +IMPATIENCE 3 0 3 3 +IMPASSIVELY 1 0 1 1 +IMMUNITY 1 0 1 1 +IMMORTALITY 1 0 1 1 +IMMORTAL 1 0 1 1 +IMMENSELY 1 0 1 1 +IMMENSE 1 0 1 1 +IMMEDIATELY 4 0 4 4 +IMMEDIATE 2 0 2 2 +IMMEDIACY 1 0 1 1 +IMMATURE 1 0 1 1 +IMBIBING 1 0 1 1 +IMBIBED 1 0 1 1 +IMAGINING 1 0 1 1 +IMAGINED 2 0 2 2 +IMAGINE 2 0 2 2 +IMAGINATIVE 1 0 1 1 +IMAGINATION 3 0 3 3 +IMAGINARY 1 0 1 1 +IMAGINABLE 2 0 2 2 +IMAGES 8 0 8 8 +IMAGE 9 0 9 9 +ILLUSTRIOUS 2 0 2 2 +ILLUSTRATION 1 0 1 1 +ILLUSION 2 0 2 2 +ILLUMINATION 1 0 1 1 +ILLUMINATING 1 0 1 1 +ILLUMINATED 1 0 1 1 +ILLS 1 0 1 1 +ILLNESS 1 0 1 1 +IGNORANCE 2 0 2 2 +IGNOMINY 1 0 1 1 +IGNOBLE 1 0 1 1 +IDOLATRY 1 0 1 1 +IDLY 1 0 1 1 +IDLENESS 1 0 1 1 +IDLE 6 0 6 6 +IDENTITY 1 0 1 1 +IDEAL 3 0 3 3 +IDEA 7 0 7 7 +ICE 1 0 1 1 +HYPOTHESIS 1 0 1 1 +HYPOCRITE 1 0 1 1 +HYPOCRISY 1 0 1 1 +HUT 4 0 4 4 +HUSSY 1 0 1 1 +HUSHED 1 0 1 1 +HUSBAND'S 1 0 1 1 +HURT 1 0 1 1 +HURRYING 2 0 2 2 +HURRY 4 0 4 4 +HURRIEDLY 3 0 3 3 +HURONS 1 0 1 1 +HURLED 2 0 2 2 +HUNTING 2 0 2 2 +HUNTER 1 0 1 1 +HUNTED 1 0 1 1 +HUNGRY 1 0 1 1 +HUNGER 2 0 2 2 +HUNG 10 0 10 10 +HUNDREDTH 1 0 1 1 +HUNDREDS 2 0 2 2 +HUNDRED 18 0 18 18 +HUMPY 2 0 2 2 +HUMOUR 1 0 1 1 +HUMOROUS 3 0 3 3 +HUMOR 1 0 1 1 +HUMMING 1 0 1 1 +HUMILITY 1 0 1 1 +HUMILIATE 1 0 1 1 +HUMBUG 1 0 1 1 +HUMBLY 2 0 2 2 +HUMBLE 5 0 5 5 +HUMANITY 2 0 2 2 +HUMANE 1 0 1 1 +HUGGED 1 0 1 1 +HUGE 7 0 7 7 +HUES 1 0 1 1 +HUE 1 0 1 1 +HUDSON 1 0 1 1 +HUDDLED 1 0 1 1 +HOWL 1 0 1 1 +HOWEVER 29 0 29 29 +HOW 50 0 50 50 +HOVER 1 0 1 1 +HOUSEWORK 1 0 1 1 +HOUSEMAID 2 0 2 2 +HOUSEKEEPER 2 0 2 2 +HOUSEHOLDS 1 0 1 1 +HOUSEHOLD'S 1 0 1 1 +HOUSEHOLD 4 0 4 4 +HOURS 13 0 13 13 +HOUR 12 0 12 12 +HOUNDED 1 0 1 1 +HOTELS 1 0 1 1 +HOTEL 7 0 7 7 +HOSTILITY 1 0 1 1 +HOSPITALITY 4 0 4 4 +HOSPITABLY 1 0 1 1 +HOSPITABLE 1 0 1 1 +HORSES 6 0 6 6 +HORSEMEN 1 0 1 1 +HORROR 2 0 2 2 +HORRID 1 0 1 1 +HORRIBLY 2 0 2 2 +HORRIBLE 3 0 3 3 +HORNFUL 1 0 1 1 +HORIZON 3 0 3 3 +HORATIO 2 0 2 2 +HOPKINSON 2 0 2 2 +HOPKINS'S 1 0 1 1 +HOPKINS 4 0 4 4 +HOPING 1 0 1 1 +HOPELESS 1 0 1 1 +HOPED 2 0 2 2 +HOOKING 1 0 1 1 +HOOKED 1 0 1 1 +HONOURED 1 0 1 1 +HONORIFIC 2 0 2 2 +HONORED 1 0 1 1 +HONEY 1 0 1 1 +HONESTY 1 0 1 1 +HONESTLY 2 0 2 2 +HONEST 5 0 5 5 +HON 1 0 1 1 +HOMILY 1 0 1 1 +HOMES 2 0 2 2 +HOMELY 3 0 3 3 +HOME 23 0 23 23 +HOLMES 10 0 10 10 +HOLLYHOCKS 1 0 1 1 +HOLLOW 3 0 3 3 +HOLINESS 2 0 2 2 +HOLIDAYS 3 0 3 3 +HOLES 1 0 1 1 +HOLE 1 0 1 1 +HOLDS 2 0 2 2 +HOLDING 1 0 1 1 +HOLBORN 1 0 1 1 +HOBSON'S 1 0 1 1 +HOBS 1 0 1 1 +HO 1 0 1 1 +HITHERTO 1 0 1 1 +HISTORY 5 0 5 5 +HISTORIC 1 0 1 1 +HISTORIANS 1 0 1 1 +HISTORIAN 1 0 1 1 +HISSING 1 0 1 1 +HISS 1 0 1 1 +HIRE 1 0 1 1 +HINT 2 0 2 2 +HINDERED 1 0 1 1 +HIND 1 0 1 1 +HIMSELF 49 0 49 49 +HILLY 1 0 1 1 +HILL 4 0 4 4 +HIGHNESS 1 0 1 1 +HIGHLY 2 0 2 2 +HIGHER 2 0 2 2 +HIGH 18 0 18 18 +HIERARCHY 1 0 1 1 +HIDING 1 0 1 1 +HIDEOUS 1 0 1 1 +HIDE 3 0 3 3 +HIDDEN 3 0 3 3 +HICKEY 1 0 1 1 +HEWN 1 0 1 1 +HESTER 11 0 11 11 +HESITATION 1 0 1 1 +HESITATING 2 0 2 2 +HESITATED 1 0 1 1 +HERSELF 20 0 20 20 +HERS 2 0 2 2 +HERON 1 0 1 1 +HEROINE 1 0 1 1 +HEROIC 2 0 2 2 +HERO 3 0 3 3 +HERMOCRATES 1 0 1 1 +HERETICS 2 0 2 2 +HEREDITY 1 0 1 1 +HEREAFTER 3 0 3 3 +HERE'S 1 0 1 1 +HERALDED 1 0 1 1 +HENRY'S 1 0 1 1 +HENRY 2 0 2 2 +HENLEY 1 0 1 1 +HENCE 4 0 4 4 +HEMMED 1 0 1 1 +HELPLESS 3 0 3 3 +HELPING 1 0 1 1 +HELP 18 0 18 18 +HELMSMAN 1 0 1 1 +HELLENES 1 0 1 1 +HELL 2 0 2 2 +HEIGHTS 1 0 1 1 +HEIGHTENING 1 0 1 1 +HEIGHT 1 0 1 1 +HEELS 1 0 1 1 +HEDGE 1 0 1 1 +HEAVY 13 0 13 13 +HEAVING 2 0 2 2 +HEAVILY 1 0 1 1 +HEAVENS 1 0 1 1 +HEAVENLY 1 0 1 1 +HEAVED 1 0 1 1 +HEAT 2 0 2 2 +HEARTILY 2 0 2 2 +HEARTHSTONES 1 0 1 1 +HEARTH 3 0 3 3 +HEARTED 1 0 1 1 +HEARSE 2 0 2 2 +HEARS 2 0 2 2 +HEARING 1 0 1 1 +HEARD 19 0 19 19 +HEAP 2 0 2 2 +HEALTH 6 0 6 6 +HEADS 3 0 3 3 +HEADQUARTERS 1 0 1 1 +HEADLONGS 1 0 1 1 +HEADLONG 1 0 1 1 +HEADING 1 0 1 1 +HE'LL 1 0 1 1 +HAWORTH 1 0 1 1 +HAWKS 1 0 1 1 +HAWKEYE 5 0 5 5 +HAWK'S 1 0 1 1 +HAVEN'T 6 0 6 6 +HAUNTED 1 0 1 1 +HAUGHTY 4 0 4 4 +HATS 1 0 1 1 +HATRED 3 0 3 3 +HATH 4 0 4 4 +HATER 1 0 1 1 +HATEFUL 1 0 1 1 +HATED 1 0 1 1 +HATE 1 0 1 1 +HASTY 2 0 2 2 +HASTILY 2 0 2 2 +HASTENED 4 0 4 4 +HASTE 1 0 1 1 +HARVEST 1 0 1 1 +HARRYING 1 0 1 1 +HARRY 3 0 3 3 +HARROW 1 0 1 1 +HARPOONER 1 0 1 1 +HARNESS 1 0 1 1 +HARMONY 2 0 2 2 +HARMON 4 0 4 4 +HARM 2 0 2 2 +HARE 1 0 1 1 +HARDSHIPS 1 0 1 1 +HARDLY 14 0 14 14 +HARDER 1 0 1 1 +HARASSING 1 0 1 1 +HARANGUING 1 0 1 1 +HAPPY 16 0 16 16 +HAPPINESS 6 0 6 6 +HAPPILY 1 0 1 1 +HAPPENS 3 0 3 3 +HAPPENING 2 0 2 2 +HAPPENED 6 0 6 6 +HAPPEN 5 0 5 5 +HAPLESS 1 0 1 1 +HANSOM 1 0 1 1 +HANS 2 0 2 2 +HANGS 1 0 1 1 +HANGERS 1 0 1 1 +HANG 1 0 1 1 +HANDSOMEST 1 0 1 1 +HANDSOMELY 1 0 1 1 +HANDLE 1 0 1 1 +HANDKERCHIEFS 1 0 1 1 +HANDFUL 1 0 1 1 +HANDED 3 0 3 3 +HAND 29 0 29 29 +HAMMER 3 0 3 3 +HAMLET'S 1 0 1 1 +HAMBURG 1 0 1 1 +HALT 1 0 1 1 +HALLWAY 1 0 1 1 +HALLS 3 0 3 3 +HAIRS 1 0 1 1 +HAILING 1 0 1 1 +HADN'T 3 0 3 3 +HABITUALLY 1 0 1 1 +HABITUAL 2 0 2 2 +HABITS 4 0 4 4 +HABITATION 1 0 1 1 +HABIT 7 0 7 7 +GUTTER 1 0 1 1 +GUSTS 2 0 2 2 +GUST 1 0 1 1 +GUNS 1 0 1 1 +GUN 1 0 1 1 +GULLET 1 0 1 1 +GULF 2 0 2 2 +GUILTY 2 0 2 2 +GUILT 2 0 2 2 +GUIDED 2 0 2 2 +GUIDE 2 0 2 2 +GUERRILLA 1 0 1 1 +GUARDS 3 0 3 3 +GUARD 1 0 1 1 +GRUDGE 1 0 1 1 +GROWTH 5 0 5 5 +GROWN 7 0 7 7 +GROWLED 2 0 2 2 +GROWING 4 0 4 4 +GROW 4 0 4 4 +GROUPS 2 0 2 2 +GROUP 2 0 2 2 +GROUNDS 2 0 2 2 +GROSS 1 0 1 1 +GROPING 1 0 1 1 +GROPE 1 0 1 1 +GROANS 2 0 2 2 +GROANING 1 0 1 1 +GROANED 2 0 2 2 +GROAN 1 0 1 1 +GRINNING 1 0 1 1 +GRINDER 1 0 1 1 +GRIN 1 0 1 1 +GRIM 3 0 3 3 +GRIFFIN 1 0 1 1 +GRIEVED 1 0 1 1 +GRIEF 2 0 2 2 +GREW 5 0 5 5 +GREETINGS 1 0 1 1 +GREETING 4 0 4 4 +GREETED 1 0 1 1 +GREET 1 0 1 1 +GREEK 4 0 4 4 +GREATNESS 2 0 2 2 +GREATLY 10 0 10 10 +GREATEST 7 0 7 7 +GRAVEYARD 3 0 3 3 +GRAVES 1 0 1 1 +GRAVE 4 0 4 4 +GRATITUDE 2 0 2 2 +GRATIFICATION 3 0 3 3 +GRATE 1 0 1 1 +GRASS 11 0 11 11 +GRASPING 3 0 3 3 +GRANTED 3 0 3 3 +GRANDSON 1 0 1 1 +GRANDMOTHER 2 0 2 2 +GRANDFATHER 4 0 4 4 +GRANDER 2 0 2 2 +GRAMOPHONES 1 0 1 1 +GRAINS 1 0 1 1 +GRAF 1 0 1 1 +GRADUALLY 5 0 5 5 +GRADATED 1 0 1 1 +GRACIOUSLY 1 0 1 1 +GRACIOUS 2 0 2 2 +GRACES 2 0 2 2 +GRACEFULLY 1 0 1 1 +GRACEFUL 1 0 1 1 +GRACE 12 0 12 12 +GOWN 1 0 1 1 +GOVERNOR'S 1 0 1 1 +GOVERNESS 2 0 2 2 +GOTHIC 3 0 3 3 +GOT 13 0 13 13 +GOSSIP 2 0 2 2 +GOSPEL 2 0 2 2 +GORGEOUS 1 0 1 1 +GORDONS 1 0 1 1 +GORDON 1 0 1 1 +GOODS 5 0 5 5 +GOODNESS 1 0 1 1 +GOODLY 1 0 1 1 +GOLIATH 2 0 2 2 +GOLF 1 0 1 1 +GOLDEN 15 0 15 15 +GOES 2 0 2 2 +GODLY 1 0 1 1 +GODLESS 1 0 1 1 +GODDESS 1 0 1 1 +GOD'S 1 0 1 1 +GOD 33 0 33 33 +GOBY 1 0 1 1 +GNARLED 1 0 1 1 +GLUE 1 0 1 1 +GLOWING 3 0 3 3 +GLOW 3 0 3 3 +GLOVES 5 0 5 5 +GLOVED 1 0 1 1 +GLOSSY 2 0 2 2 +GLORY 1 0 1 1 +GLORIOUS 2 0 2 2 +GLORIES 1 0 1 1 +GLOOMY 1 0 1 1 +GLOOMILY 2 0 2 2 +GLOBE 1 0 1 1 +GLITTERING 4 0 4 4 +GLITTERED 2 0 2 2 +GLINDA 1 0 1 1 +GLIMMERING 1 0 1 1 +GLIDING 1 0 1 1 +GLIDES 1 0 1 1 +GLIDED 2 0 2 2 +GLEANER 1 0 1 1 +GLEAMS 1 0 1 1 +GLEAMING 4 0 4 4 +GLEAMED 1 0 1 1 +GLEAM 1 0 1 1 +GLASS 6 0 6 6 +GLARE 2 0 2 2 +GLANCED 2 0 2 2 +GLANCE 5 0 5 5 +GLAMOUR 1 0 1 1 +GLADNESS 1 0 1 1 +GLADLY 1 0 1 1 +GLAD 12 0 12 12 +GIVING 7 0 7 7 +GIVES 3 0 3 3 +GIVEN 15 0 15 15 +GIRLS 7 0 7 7 +GIRL'S 2 0 2 2 +GIRL 25 0 25 25 +GILLIKINS 2 0 2 2 +GILDED 3 0 3 3 +GILD 1 0 1 1 +GIGANTIC 1 0 1 1 +GIFTS 2 0 2 2 +GIFT 6 0 6 6 +GHOSTS 2 0 2 2 +GHOSTLY 1 0 1 1 +GHOST 2 0 2 2 +GHISIZZLE 4 0 4 4 +GHASTLY 1 0 1 1 +GETTING 6 0 6 6 +GET 30 0 30 30 +GESTURES 1 0 1 1 +GESTURE 2 0 2 2 +GESTATION 1 0 1 1 +GERMANTOWN 1 0 1 1 +GERM 1 0 1 1 +GEORGE 3 0 3 3 +GEOMETRICAL 1 0 1 1 +GENUINE 1 0 1 1 +GENTLY 5 0 5 5 +GENTLEWOMAN 1 0 1 1 +GENTLENESS 1 0 1 1 +GENTLEMAN'S 1 0 1 1 +GENTLE 7 0 7 7 +GENTILITY 1 0 1 1 +GENIUS 2 0 2 2 +GENEROUSLY 1 0 1 1 +GENEROUS 4 0 4 4 +GENERATION 2 0 2 2 +GENERATED 1 0 1 1 +GENERALS 3 0 3 3 +GENEALOGIES 1 0 1 1 +GAZING 2 0 2 2 +GAZED 2 0 2 2 +GAZE 3 0 3 3 +GAY 1 0 1 1 +GATHERINGS 1 0 1 1 +GATHERING 2 0 2 2 +GATHERED 2 0 2 2 +GATES 6 0 6 6 +GATE 3 0 3 3 +GASPED 1 0 1 1 +GASEOUS 1 0 1 1 +GAS 1 0 1 1 +GARDENING 1 0 1 1 +GARDENER'S 1 0 1 1 +GARDENER 1 0 1 1 +GARDEN 6 0 6 6 +GARB 1 0 1 1 +GAMEWELL'S 1 0 1 1 +GALVANOMETER 1 0 1 1 +GALLOWSNESS 1 0 1 1 +GALLEY 1 0 1 1 +GALLANT 4 0 4 4 +GAIN 2 0 2 2 +GABLES 1 0 1 1 +FUZZY 1 0 1 1 +FUTURE 5 0 5 5 +FURTHERED 1 0 1 1 +FURTHER 9 0 9 9 +FURNITURE 4 0 4 4 +FURNISHING 1 0 1 1 +FURNISHED 1 0 1 1 +FURNISH 1 0 1 1 +FURLED 1 0 1 1 +FURIOUSLY 2 0 2 2 +FUNERAL 1 0 1 1 +FUNDAMENTAL 1 0 1 1 +FUNCTIONS 1 0 1 1 +FUNCTION 1 0 1 1 +FUMBLED 1 0 1 1 +FULLY 3 0 3 3 +FULL 18 0 18 18 +FULFILLED 2 0 2 2 +FUGITIVES 1 0 1 1 +FUGITIVE'S 1 0 1 1 +FRUSTRATED 1 0 1 1 +FRUITS 1 0 1 1 +FRUIT 1 0 1 1 +FROZEN 2 0 2 2 +FROWNINGLY 1 0 1 1 +FROWNING 2 0 2 2 +FROWNED 2 0 2 2 +FROWN 1 0 1 1 +FROTHY 1 0 1 1 +FROST 1 0 1 1 +FRONTISPIECE 1 0 1 1 +FRONTIER 3 0 3 3 +FROM 187 0 187 187 +FROLIC 2 0 2 2 +FRO 1 0 1 1 +FRIVOLOUS 2 0 2 2 +FRINGED 2 0 2 2 +FRIGHTFUL 1 0 1 1 +FRIGATE 2 0 2 2 +FRIENDSHIP 1 0 1 1 +FRIENDS 8 0 8 8 +FRIENDLY 5 0 5 5 +FRIEND'S 2 0 2 2 +FRIDAY 1 0 1 1 +FRICTION 1 0 1 1 +FRETTING 1 0 1 1 +FRESHENS 1 0 1 1 +FRESH 6 0 6 6 +FREQUENTLY 3 0 3 3 +FREQUENTER 1 0 1 1 +FREQUENT 3 0 3 3 +FRENZY 1 0 1 1 +FRENCH 11 0 11 11 +FREELY 2 0 2 2 +FREEDOM 4 0 4 4 +FREED 1 0 1 1 +FREE 18 0 18 18 +FRECKLES 1 0 1 1 +FRANKNESS 1 0 1 1 +FRANKLY 1 0 1 1 +FRANCIS 3 0 3 3 +FRANCE 6 0 6 6 +FRAMEWORK 1 0 1 1 +FRAMED 1 0 1 1 +FRAME 4 0 4 4 +FRAIL 1 0 1 1 +FRAGRANCE 1 0 1 1 +FRAGMENT 2 0 2 2 +FRACTURED 1 0 1 1 +FRACTURE 1 0 1 1 +FOX 1 0 1 1 +FOURTH 2 0 2 2 +FOURTEEN 1 0 1 1 +FOUR 12 0 12 12 +FOUNTAINS 1 0 1 1 +FOUNDING 1 0 1 1 +FOUNDATION 1 0 1 1 +FOUL 1 0 1 1 +FOUGHT 1 0 1 1 +FORWARD 5 0 5 5 +FORTY 2 0 2 2 +FORTUNES 2 0 2 2 +FORTUNE 8 0 8 8 +FORTUNATELY 1 0 1 1 +FORTUNATE 2 0 2 2 +FORTUITOUS 1 0 1 1 +FORTNIGHT 1 0 1 1 +FORTIFIED 2 0 2 2 +FORTHWITH 3 0 3 3 +FORTH 9 0 9 9 +FORT 2 0 2 2 +FORSAKE 1 0 1 1 +FORMS 6 0 6 6 +FORMING 1 0 1 1 +FORMIDABLE 1 0 1 1 +FORMER 7 0 7 7 +FORMED 7 0 7 7 +FORMALITY 1 0 1 1 +FORMALITIES 1 0 1 1 +FORMAL 1 0 1 1 +FORM 12 0 12 12 +FORKED 1 0 1 1 +FORGOTTEN 4 0 4 4 +FORGOT 1 0 1 1 +FORGIVE 6 0 6 6 +FORGING 1 0 1 1 +FORGETTING 1 0 1 1 +FORGETFULNESS 1 0 1 1 +FORGET 6 0 6 6 +FORGED 3 0 3 3 +FORGE 1 0 1 1 +FORETOLD 2 0 2 2 +FOREST 6 0 6 6 +FORESEEING 1 0 1 1 +FORESAW 1 0 1 1 +FOREIGNER 1 0 1 1 +FOREIGN 1 0 1 1 +FOREHEAD 1 0 1 1 +FOREFINGER 1 0 1 1 +FORCIBLE 1 0 1 1 +FORCES 2 0 2 2 +FORCED 1 0 1 1 +FORCE 17 0 17 17 +FORBES 1 0 1 1 +FOOTNOTE 2 0 2 2 +FOOTMEN 1 0 1 1 +FOOTMAN 1 0 1 1 +FOOT 9 0 9 9 +FOOLS 1 0 1 1 +FOOLISHLY 2 0 2 2 +FOOLISH 3 0 3 3 +FOOD 1 0 1 1 +FONDNESS 1 0 1 1 +FOND 5 0 5 5 +FOLLOWS 3 0 3 3 +FOLLOWING 4 0 4 4 +FOLLOWER 1 0 1 1 +FOLLOW 7 0 7 7 +FOLLIES 1 0 1 1 +FOLIAGE 1 0 1 1 +FOLDED 1 0 1 1 +FOLD 2 0 2 2 +FOES 2 0 2 2 +FOCUS 1 0 1 1 +FOAM 4 0 4 4 +FLY 4 0 4 4 +FLUX 2 0 2 2 +FLUSHED 3 0 3 3 +FLUKES 1 0 1 1 +FLOYD'S 1 0 1 1 +FLOWERS 11 0 11 11 +FLOWED 1 0 1 1 +FLOW 2 0 2 2 +FLOURISH 1 0 1 1 +FLOORS 1 0 1 1 +FLOOR 10 0 10 10 +FLOOD 2 0 2 2 +FLOATED 1 0 1 1 +FLOAT 1 0 1 1 +FLINT 1 0 1 1 +FLING 1 0 1 1 +FLIES 1 0 1 1 +FLICKER 2 0 2 2 +FLEW 1 0 1 1 +FLESH 5 0 5 5 +FLEETING 1 0 1 1 +FLEECY 1 0 1 1 +FLEECES 1 0 1 1 +FLEECE 3 0 3 3 +FLED 3 0 3 3 +FLAX 1 0 1 1 +FLATTERY 1 0 1 1 +FLATTERS 2 0 2 2 +FLATTERING 1 0 1 1 +FLATTERED 1 0 1 1 +FLAT 1 0 1 1 +FLASHED 3 0 3 3 +FLASH 3 0 3 3 +FLAPS 1 0 1 1 +FLAP 2 0 2 2 +FLANKED 1 0 1 1 +FLAMES 1 0 1 1 +FLAMED 2 0 2 2 +FLAME 3 0 3 3 +FLAGSTONES 1 0 1 1 +FLAG 1 0 1 1 +FIXES 1 0 1 1 +FIXED 3 0 3 3 +FIX 2 0 2 2 +FITZOOTH 7 0 7 7 +FITTED 2 0 2 2 +FITS 1 0 1 1 +FITLY 1 0 1 1 +FISTS 2 0 2 2 +FISHES 3 0 3 3 +FISHERMAN 1 0 1 1 +FISH 1 0 1 1 +FIRMNESS 1 0 1 1 +FIRMLY 2 0 2 2 +FIRM 1 0 1 1 +FIRESIDES 1 0 1 1 +FIRESIDE 1 0 1 1 +FIRES 1 0 1 1 +FIREPLACE 2 0 2 2 +FIRED 1 0 1 1 +FINISHED 4 0 4 4 +FINISH 1 0 1 1 +FINGERS 6 0 6 6 +FINGER 2 0 2 2 +FINEST 1 0 1 1 +FINER 1 0 1 1 +FINED 1 0 1 1 +FINDS 2 0 2 2 +FINDING 3 0 3 3 +FINANCIAL 1 0 1 1 +FINALE 1 0 1 1 +FINAL 5 0 5 5 +FILE 1 0 1 1 +FIGURES 4 0 4 4 +FIGURED 1 0 1 1 +FIGURE 6 0 6 6 +FIGHTING 4 0 4 4 +FIFTY 6 0 6 6 +FIFTH 1 0 1 1 +FIFTEENTH 2 0 2 2 +FIFTEEN 1 0 1 1 +FIERCELY 1 0 1 1 +FIERCE 4 0 4 4 +FIELDS 4 0 4 4 +FIELD 6 0 6 6 +FEWER 1 0 1 1 +FEW 28 0 28 28 +FEVER 2 0 2 2 +FEUDS 1 0 1 1 +FETCH 1 0 1 1 +FESTIVE 1 0 1 1 +FERVENT 1 0 1 1 +FENCE 1 0 1 1 +FEMININE 1 0 1 1 +FEMALE 1 0 1 1 +FELLOWSHIP 1 0 1 1 +FELLOWS 3 0 3 3 +FELLOW'S 1 0 1 1 +FELLOW 9 0 9 9 +FELLER 1 0 1 1 +FELICITY 2 0 2 2 +FEET 11 0 11 11 +FEELINGS 3 0 3 3 +FEEDING 1 0 1 1 +FEEDER 1 0 1 1 +FEED 2 0 2 2 +FEEBLE 2 0 2 2 +FEDERAL 3 0 3 3 +FED 1 0 1 1 +FEBRUARY 1 0 1 1 +FEATURE 1 0 1 1 +FEATHERS 1 0 1 1 +FEASTED 1 0 1 1 +FEAST 3 0 3 3 +FEASIBLE 1 0 1 1 +FEARS 3 0 3 3 +FEARLESS 1 0 1 1 +FEARING 2 0 2 2 +FEARFUL 1 0 1 1 +FEAREST 1 0 1 1 +FAVORABLY 1 0 1 1 +FAULTS 1 0 1 1 +FAULTLESS 1 0 1 1 +FAULT 2 0 2 2 +FATTENED 1 0 1 1 +FATIGUE 2 0 2 2 +FATHOMS 6 0 6 6 +FATHOM 1 0 1 1 +FATHERS 2 0 2 2 +FATHER'S 4 0 4 4 +FATHER 28 0 28 28 +FATALITY 2 0 2 2 +FAT 3 0 3 3 +FASTEST 1 0 1 1 +FASTEN 1 0 1 1 +FAST 7 0 7 7 +FASHIONED 1 0 1 1 +FASHIONABLE 1 0 1 1 +FASCINATION 2 0 2 2 +FARMS 1 0 1 1 +FARMERS 1 0 1 1 +FARMER'S 1 0 1 1 +FARMER 5 0 5 5 +FAREWELL 2 0 2 2 +FANTASY 1 0 1 1 +FANNING 1 0 1 1 +FANCY 3 0 3 3 +FANCIES 2 0 2 2 +FANCIED 2 0 2 2 +FANATIC 1 0 1 1 +FAN 2 0 2 2 +FAMOUSLY 2 0 2 2 +FAMOUS 3 0 3 3 +FAMILY 16 0 16 16 +FAMILIES 3 0 3 3 +FAMILIARITY 3 0 3 3 +FAMILIAR 4 0 4 4 +FAME 2 0 2 2 +FALSE 6 0 6 6 +FALLS 5 0 5 5 +FALLING 2 0 2 2 +FALCONS 1 0 1 1 +FALCON 1 0 1 1 +FAITHFUL 1 0 1 1 +FAITH 9 0 9 9 +FAIRLY 4 0 4 4 +FAINTNESS 1 0 1 1 +FAINTLY 3 0 3 3 +FAINTING 4 0 4 4 +FAINT 3 0 3 3 +FAIN 1 0 1 1 +FAILURE 2 0 2 2 +FAILS 1 0 1 1 +FAILING 1 0 1 1 +FAILED 2 0 2 2 +FAIL 4 0 4 4 +FADES 1 0 1 1 +FADED 1 0 1 1 +FADE 4 0 4 4 +FACULTIES 1 0 1 1 +FACTS 4 0 4 4 +FACTORS 1 0 1 1 +FACTOR 1 0 1 1 +FACTIONS 2 0 2 2 +FACTION 1 0 1 1 +FACT 23 0 23 23 +FACILITATED 1 0 1 1 +FACILITATE 1 0 1 1 +FACES 4 0 4 4 +FACED 3 0 3 3 +FACE 29 0 29 29 +FABULOUS 1 0 1 1 +EYES 44 0 44 44 +EYELIDS 1 0 1 1 +EYED 1 0 1 1 +EXULTING 1 0 1 1 +EXULTATION 1 0 1 1 +EXTREMELY 2 0 2 2 +EXTREME 1 0 1 1 +EXTRAORDINARY 2 0 2 2 +EXTRACT 1 0 1 1 +EXTRA 1 0 1 1 +EXTINGUISHED 2 0 2 2 +EXTINCTION 1 0 1 1 +EXTINCT 1 0 1 1 +EXTERIOR 1 0 1 1 +EXTENT 6 0 6 6 +EXTENSIVE 1 0 1 1 +EXTENDED 3 0 3 3 +EXTEND 2 0 2 2 +EXQUISITE 3 0 3 3 +EXPULSION 1 0 1 1 +EXPRESSLY 1 0 1 1 +EXPRESSIVE 1 0 1 1 +EXPRESSIONS 1 0 1 1 +EXPRESSION 4 0 4 4 +EXPRESSING 2 0 2 2 +EXPRESSED 4 0 4 4 +EXPRESS 4 0 4 4 +EXPOSURE 1 0 1 1 +EXPONENT 2 0 2 2 +EXPLOSION 1 0 1 1 +EXPLORE 2 0 2 2 +EXPLOITS 1 0 1 1 +EXPLANATION 1 0 1 1 +EXPLAINED 1 0 1 1 +EXPLAIN 4 0 4 4 +EXPERIMENTALLY 1 0 1 1 +EXPERIENCING 1 0 1 1 +EXPERIENCED 1 0 1 1 +EXPERIENCE 7 0 7 7 +EXPENSIVE 1 0 1 1 +EXPENDITURE 4 0 4 4 +EXPEDITION 4 0 4 4 +EXPECTED 3 0 3 3 +EXPECTATIONS 2 0 2 2 +EXPECT 4 0 4 4 +EXPANDED 1 0 1 1 +EXOTICS 1 0 1 1 +EXISTING 3 0 3 3 +EXISTENT 1 0 1 1 +EXISTENCE 5 0 5 5 +EXISTED 4 0 4 4 +EXILE 1 0 1 1 +EXHORT 1 0 1 1 +EXHIBITS 1 0 1 1 +EXHIBITION 2 0 2 2 +EXHIBITED 1 0 1 1 +EXHIBIT 2 0 2 2 +EXHAUSTED 1 0 1 1 +EXHALE 1 0 1 1 +EXERTIONS 1 0 1 1 +EXERTED 1 0 1 1 +EXERCISING 1 0 1 1 +EXERCISED 1 0 1 1 +EXERCISE 1 0 1 1 +EXEMPLIFIES 1 0 1 1 +EXEMPLARY 1 0 1 1 +EXECUTIVE 1 0 1 1 +EXECUTED 1 0 1 1 +EXCLUDED 2 0 2 2 +EXCLAIMED 3 0 3 3 +EXCITING 2 0 2 2 +EXCITEMENT 3 0 3 3 +EXCITE 1 0 1 1 +EXCESSIVELY 1 0 1 1 +EXCESS 1 0 1 1 +EXCEPTIONS 1 0 1 1 +EXCEPTION 2 0 2 2 +EXCEPT 6 0 6 6 +EXCELLENT 5 0 5 5 +EXCELLENCY'S 1 0 1 1 +EXCELLENCY 2 0 2 2 +EXCELLENCE 1 0 1 1 +EXCEEDING 1 0 1 1 +EXCEEDED 1 0 1 1 +EXCEED 1 0 1 1 +EXAMPLE 2 0 2 2 +EXAMINED 4 0 4 4 +EXAMINE 4 0 4 4 +EXAMINATION 8 0 8 8 +EXALTED 1 0 1 1 +EXALT 1 0 1 1 +EXAGGERATED 1 0 1 1 +EXACTLY 8 0 8 8 +EXACT 5 0 5 5 +EX 2 0 2 2 +EVOLVING 1 0 1 1 +EVOLVED 1 0 1 1 +EVOKED 1 0 1 1 +EVOKE 1 0 1 1 +EVIL 6 0 6 6 +EVIDENTLY 4 0 4 4 +EVIDENT 5 0 5 5 +EVIDENCE 5 0 5 5 +EVERYWHERE 7 0 7 7 +EVERYTHING 16 0 16 16 +EVERYBODY 7 0 7 7 +EVERLASTING 2 0 2 2 +EVENTS 8 0 8 8 +EVENT 4 0 4 4 +EVENLY 2 0 2 2 +EVASIVELY 1 0 1 1 +EVAPORATION 1 0 1 1 +EVADED 1 0 1 1 +EVA'S 1 0 1 1 +EUROPE 3 0 3 3 +EUCHARIST 1 0 1 1 +ETHICAL 1 0 1 1 +ETERNITY 2 0 2 2 +ETERNAL 2 0 2 2 +ETCHINGS 1 0 1 1 +ET 3 0 3 3 +ESTRANGEMENT 1 0 1 1 +ESTIMATE 1 0 1 1 +ESTABLISHED 3 0 3 3 +ESSEX 1 0 1 1 +ESSENTIALLY 1 0 1 1 +ESSENTIAL 2 0 2 2 +ESSENCE 1 0 1 1 +ESQUIRE 2 0 2 2 +ESPECIALLY 6 0 6 6 +ESCORT 4 0 4 4 +ESCAPED 1 0 1 1 +ESCAPE 4 0 4 4 +ESCAPADES 1 0 1 1 +ERRORS 1 0 1 1 +ERRONEOUS 2 0 2 2 +ERRING 1 0 1 1 +ERRAND 2 0 2 2 +ERR 1 0 1 1 +ERIE 1 0 1 1 +ERECTED 1 0 1 1 +ERECT 1 0 1 1 +ERA 1 0 1 1 +EQUATION 1 0 1 1 +EQUALS 1 0 1 1 +EQUALLY 4 0 4 4 +EQUAL 2 0 2 2 +EPOCH 1 0 1 1 +EPISTLE 3 0 3 3 +EPISODE 1 0 1 1 +ENVY 1 0 1 1 +ENVIRONMENT 1 0 1 1 +ENVELOPMENT 1 0 1 1 +ENTRUSTED 1 0 1 1 +ENTREATIES 1 0 1 1 +ENTRANCED 1 0 1 1 +ENTIRELY 6 0 6 6 +ENTIRE 2 0 2 2 +ENTHUSIASTS 1 0 1 1 +ENTHUSIASTIC 1 0 1 1 +ENTHUSIASM 1 0 1 1 +ENTERTAINS 1 0 1 1 +ENTERTAINMENT 3 0 3 3 +ENTERTAIN 2 0 2 2 +ENTERS 1 0 1 1 +ENTERPRISE 2 0 2 2 +ENTERING 2 0 2 2 +ENTANGLED 1 0 1 1 +ENOUGH 20 0 20 20 +ENORMOUSLY 2 0 2 2 +ENORMOUS 1 0 1 1 +ENNIS 1 0 1 1 +ENLISTED 1 0 1 1 +ENLIST 1 0 1 1 +ENJOYMENT 1 0 1 1 +ENJOYED 1 0 1 1 +ENJOY 2 0 2 2 +ENIGMA 1 0 1 1 +ENGORGED 1 0 1 1 +ENGLISHMAN 3 0 3 3 +ENGLISH 12 0 12 12 +ENGLAND 10 0 10 10 +ENGINEERS 2 0 2 2 +ENGINEER 4 0 4 4 +ENGINE 6 0 6 6 +ENGENDERS 1 0 1 1 +ENGAGEMENTS 1 0 1 1 +ENGAGED 5 0 5 5 +ENGAGE 1 0 1 1 +ENFORCED 1 0 1 1 +ENFORCE 3 0 3 3 +ENERGY 3 0 3 3 +ENEMY'S 1 0 1 1 +ENEMY 3 0 3 3 +ENEMIES 3 0 3 3 +ENDURES 1 0 1 1 +ENDURED 1 0 1 1 +ENDURE 1 0 1 1 +ENDS 1 0 1 1 +ENDOWED 1 0 1 1 +ENDLESS 1 0 1 1 +ENDEAVOURED 1 0 1 1 +ENDEAVORING 1 0 1 1 +END 18 0 18 18 +ENCYCLOPAEDIA 1 0 1 1 +ENCOURAGED 1 0 1 1 +ENCOURAGE 2 0 2 2 +ENCLOSE 1 0 1 1 +ENACTED 1 0 1 1 +ENABLES 2 0 2 2 +ENABLED 3 0 3 3 +EMULATION 1 0 1 1 +EMPTY 7 0 7 7 +EMPLOYMENTS 2 0 2 2 +EMPLOYMENT 1 0 1 1 +EMPLOYING 1 0 1 1 +EMPLOYERS 1 0 1 1 +EMPLOYER 1 0 1 1 +EMPLOYED 2 0 2 2 +EMPIRE 2 0 2 2 +EMPEROR 1 0 1 1 +EMOTIONS 2 0 2 2 +EMOTIONLESS 1 0 1 1 +EMOTION 1 0 1 1 +EMINENCES 1 0 1 1 +EMERGENCY 1 0 1 1 +EMERGENCE 2 0 2 2 +EMERALD 1 0 1 1 +EMBROIDERED 2 0 2 2 +EMBRACING 2 0 2 2 +EMBRACE 2 0 2 2 +EMBODIED 1 0 1 1 +EMBLEM 1 0 1 1 +EMBITTERING 1 0 1 1 +EMBITTERED 1 0 1 1 +EMBERS 1 0 1 1 +EMBARRASS 1 0 1 1 +ELSEWHERE 4 0 4 4 +ELSE 7 0 7 7 +ELOQUENT 1 0 1 1 +ELMHURST 2 0 2 2 +ELIZABETH 1 0 1 1 +ELIZA 3 0 3 3 +ELICITED 1 0 1 1 +ELF 1 0 1 1 +ELEVEN 4 0 4 4 +ELEMENTS 7 0 7 7 +ELEMENTARY 3 0 3 3 +ELEMENT 4 0 4 4 +ELEGANT 1 0 1 1 +ELEGANCE 2 0 2 2 +ELECTROPLATING 1 0 1 1 +ELECTROLYTIC 2 0 2 2 +ELECTRICITY 5 0 5 5 +ELECTRICAL 2 0 2 2 +ELECTRIC 4 0 4 4 +ELECTIONEERING 1 0 1 1 +ELECTION 3 0 3 3 +ELECTING 1 0 1 1 +ELECTED 1 0 1 1 +ELDERS 1 0 1 1 +ELDERLY 1 0 1 1 +ELDER 2 0 2 2 +ELAPSED 1 0 1 1 +EJACULATIONS 1 0 1 1 +EJACULATED 2 0 2 2 +EITHER'S 1 0 1 1 +EIGHTY 3 0 3 3 +EIGHTH 3 0 3 3 +EIGHTEENTH 2 0 2 2 +EIGHTEEN 14 0 14 14 +EIGHT 7 0 7 7 +EGYPTIAN 1 0 1 1 +EGYPT 1 0 1 1 +EFFORTS 4 0 4 4 +EFFORT 11 0 11 11 +EFFICIENCY 1 0 1 1 +EFFECTUALLY 1 0 1 1 +EFFECTUAL 2 0 2 2 +EFFECTS 3 0 3 3 +EFFECTIVENESS 1 0 1 1 +EFFECTIVELY 1 0 1 1 +EFFECTIVE 2 0 2 2 +EFFECTING 1 0 1 1 +EDWARD 3 0 3 3 +EDUCATION 5 0 5 5 +EDUCATED 2 0 2 2 +EDITORS 1 0 1 1 +EDITOR 1 0 1 1 +EDISONIA 1 0 1 1 +EDISON 16 0 16 16 +EDIFICE 1 0 1 1 +EDICTS 1 0 1 1 +EDICT 1 0 1 1 +EDGES 1 0 1 1 +EDGED 1 0 1 1 +EDGE 5 0 5 5 +EDDYING 1 0 1 1 +ECONOMY 2 0 2 2 +ECONOMIC 3 0 3 3 +ECHOING 1 0 1 1 +ECHOES 1 0 1 1 +ECCLESIASTICAL 1 0 1 1 +ECCENTRICITY 1 0 1 1 +EATEN 1 0 1 1 +EAT 5 0 5 5 +EASY 14 0 14 14 +EASTERLY'S 1 0 1 1 +EAST 4 0 4 4 +EASILY 10 0 10 10 +EARTHLY 1 0 1 1 +EARTH'S 3 0 3 3 +EARTH 17 0 17 17 +EARNESTLY 1 0 1 1 +EARNED 1 0 1 1 +EARLY 13 0 13 13 +EARLIER 6 0 6 6 +EAGLES 1 0 1 1 +EAGLE 1 0 1 1 +EAGERNESS 2 0 2 2 +EAGERLY 1 0 1 1 +EAGER 4 0 4 4 +EACH 24 0 24 24 +DYNAMO 2 0 2 2 +DYING 2 0 2 2 +DYIN 1 0 1 1 +DYE 1 0 1 1 +DWELLINGS 3 0 3 3 +DWELL 1 0 1 1 +DUTY 7 0 7 7 +DUTIES 8 0 8 8 +DUST 4 0 4 4 +DUSK 2 0 2 2 +DURATION 1 0 1 1 +DURABLE 1 0 1 1 +DUPLICATE 1 0 1 1 +DUMBFOUNDED 1 0 1 1 +DUMB 1 0 1 1 +DULY 1 0 1 1 +DULL 2 0 2 2 +DUG 1 0 1 1 +DUE 5 0 5 5 +DUDLEY 1 0 1 1 +DUDGEON 1 0 1 1 +DUCKS 1 0 1 1 +DUCKLINGS 1 0 1 1 +DUCHESS 3 0 3 3 +DUBIOUSLY 1 0 1 1 +DRY 4 0 4 4 +DRUNKENNESS 1 0 1 1 +DRUMS 1 0 1 1 +DROWNED 2 0 2 2 +DROPS 3 0 3 3 +DROPPING 1 0 1 1 +DROPPED 5 0 5 5 +DROP 3 0 3 3 +DROOPED 1 0 1 1 +DRIVING 1 0 1 1 +DRIVES 1 0 1 1 +DRIVEN 1 0 1 1 +DRINKS 1 0 1 1 +DRINK 4 0 4 4 +DRIFTS 1 0 1 1 +DRIED 1 0 1 1 +DREW 10 0 10 10 +DRESSES 1 0 1 1 +DRESSED 6 0 6 6 +DRESS 1 0 1 1 +DREDGED 1 0 1 1 +DREARY 1 0 1 1 +DREAMY 1 0 1 1 +DREAMT 1 0 1 1 +DREAMS 2 0 2 2 +DREAMING 2 0 2 2 +DREAMED 1 0 1 1 +DREAM 5 0 5 5 +DREADING 1 0 1 1 +DREADFUL 3 0 3 3 +DRAWS 2 0 2 2 +DRAWN 7 0 7 7 +DRAWING 2 0 2 2 +DRAW 4 0 4 4 +DRAUGHT 1 0 1 1 +DRAPERIES 1 0 1 1 +DRANK 1 0 1 1 +DRAMATIST'S 1 0 1 1 +DRAMATIST 1 0 1 1 +DRAMATIC 2 0 2 2 +DRAMA 1 0 1 1 +DRAINED 1 0 1 1 +DRAIN 1 0 1 1 +DRAGOONS 1 0 1 1 +DRAGON'S 1 0 1 1 +DRAGGING 1 0 1 1 +DRAGGED 2 0 2 2 +DRAG 1 0 1 1 +DOZEN 4 0 4 4 +DOWNWARD 2 0 2 2 +DOVE 1 0 1 1 +DOUGLAS 4 0 4 4 +DOUGHY 1 0 1 1 +DOUGHNUTS 1 0 1 1 +DOUGH 1 0 1 1 +DOUBTS 2 0 2 2 +DOUBTLESS 2 0 2 2 +DOUBTINGLY 1 0 1 1 +DOUBTING 1 0 1 1 +DOUBTFULLY 1 0 1 1 +DOUBTFUL 2 0 2 2 +DOUBT 11 0 11 11 +DOUBLE 6 0 6 6 +DOTH 5 0 5 5 +DOST 3 0 3 3 +DOROTHY 1 0 1 1 +DORKING 1 0 1 1 +DORCAS 6 0 6 6 +DOORS 3 0 3 3 +DOOM 1 0 1 1 +DONNITHORNE 1 0 1 1 +DONKEY 1 0 1 1 +DONE 24 0 24 24 +DOMINIONS 1 0 1 1 +DOMINION 1 0 1 1 +DOMESTIC 2 0 2 2 +DOME 1 0 1 1 +DOLLS 2 0 2 2 +DOLLARS 7 0 7 7 +DOLL 2 0 2 2 +DOING 12 0 12 12 +DOGS 1 0 1 1 +DOGGED 1 0 1 1 +DOG 2 0 2 2 +DOESN'T 3 0 3 3 +DOCTRINES 3 0 3 3 +DOCTRINE 4 0 4 4 +DOCTORS 1 0 1 1 +DOCTOR 7 0 7 7 +DOBRYNA 3 0 3 3 +DIZZILY 1 0 1 1 +DIVORCE 1 0 1 1 +DIVISIONS 1 0 1 1 +DIVISION 2 0 2 2 +DIVINITY 1 0 1 1 +DIVING 4 0 4 4 +DIVINE 3 0 3 3 +DIVIDING 1 0 1 1 +DIVIDEND 1 0 1 1 +DIVIDED 4 0 4 4 +DIVIDE 2 0 2 2 +DIVERTING 1 0 1 1 +DIVERT 1 0 1 1 +DIVERSITY 1 0 1 1 +DISUSE 1 0 1 1 +DISUNITED 1 0 1 1 +DISTURBING 1 0 1 1 +DISTURBANCE 1 0 1 1 +DISTURB 2 0 2 2 +DISTRUSTING 2 0 2 2 +DISTRUSTFUL 1 0 1 1 +DISTRUST 1 0 1 1 +DISTRESSED 1 0 1 1 +DISTRESS 1 0 1 1 +DISTORTION 1 0 1 1 +DISTORTED 1 0 1 1 +DISTINGUISH 3 0 3 3 +DISTINCTLY 3 0 3 3 +DISTINCTIVE 1 0 1 1 +DISTINCTION 5 0 5 5 +DISTINCT 2 0 2 2 +DISTANT 4 0 4 4 +DISTANCE 6 0 6 6 +DISSENTERS 1 0 1 1 +DISSENTED 1 0 1 1 +DISQUIETUDE 1 0 1 1 +DISPUTE 3 0 3 3 +DISPOSITIONS 1 0 1 1 +DISPOSITION 3 0 3 3 +DISPOSES 1 0 1 1 +DISPOSED 3 0 3 3 +DISPOSE 1 0 1 1 +DISPOSAL 1 0 1 1 +DISPLEASURE 1 0 1 1 +DISPLAYING 1 0 1 1 +DISPLAYED 1 0 1 1 +DISPLAY 1 0 1 1 +DISPERSED 3 0 3 3 +DISPENSE 1 0 1 1 +DISPENSATION 1 0 1 1 +DISPATCH 1 0 1 1 +DISPASSIONATE 1 0 1 1 +DISPARAGE 1 0 1 1 +DISOWN 1 0 1 1 +DISMISS 2 0 2 2 +DISMAY 1 0 1 1 +DISLOYAL 1 0 1 1 +DISLIKE 1 0 1 1 +DISK 1 0 1 1 +DISINCORPORATED 1 0 1 1 +DISHES 2 0 2 2 +DISH 3 0 3 3 +DISGUSTED 1 0 1 1 +DISGUST 3 0 3 3 +DISGUISE 2 0 2 2 +DISGRACE 3 0 3 3 +DISENGAGED 1 0 1 1 +DISEASED 1 0 1 1 +DISDAINFUL 1 0 1 1 +DISCUSSION 1 0 1 1 +DISCUSSED 1 0 1 1 +DISCUSS 2 0 2 2 +DISCREETLY 1 0 1 1 +DISCREET 1 0 1 1 +DISCOVERY 2 0 2 2 +DISCOVERERS 1 0 1 1 +DISCOVERED 3 0 3 3 +DISCOVER 3 0 3 3 +DISCOURSE 2 0 2 2 +DISCOURAGED 1 0 1 1 +DISCOURAGE 2 0 2 2 +DISCOMFORT 1 0 1 1 +DISCLOSES 1 0 1 1 +DISCIPLINE 5 0 5 5 +DISCERN 1 0 1 1 +DISBURDENED 1 0 1 1 +DISASTER 1 0 1 1 +DISAPPOINTMENT 6 0 6 6 +DISAPPEARS 1 0 1 1 +DISAPPEARED 1 0 1 1 +DISAPPEAR 2 0 2 2 +DISADVANTAGE 1 0 1 1 +DISABILITIES 1 0 1 1 +DIRTY 1 0 1 1 +DIRECTLY 4 0 4 4 +DIRECTIONS 2 0 2 2 +DIRECTION 6 0 6 6 +DIRECTING 1 0 1 1 +DIRECTED 2 0 2 2 +DIRECT 8 0 8 8 +DIP 1 0 1 1 +DIOCLETIAN 1 0 1 1 +DINNER 8 0 8 8 +DINING 1 0 1 1 +DINE 1 0 1 1 +DIMLY 1 0 1 1 +DIMINUTION 2 0 2 2 +DIMINISH 1 0 1 1 +DIMENSIONS 1 0 1 1 +DIM 2 0 2 2 +DILIGENTLY 1 0 1 1 +DILEMMA 1 0 1 1 +DILATED 1 0 1 1 +DIGNITY 4 0 4 4 +DIGNITARIES 1 0 1 1 +DIGNIFIED 4 0 4 4 +DIGBY 1 0 1 1 +DIG 1 0 1 1 +DIFFUSED 1 0 1 1 +DIFFICULTIES 3 0 3 3 +DIFFICULT 11 0 11 11 +DIFFERS 2 0 2 2 +DIFFERENTLY 1 0 1 1 +DIFFERENTIATION 1 0 1 1 +DIFFERENT 15 0 15 15 +DIFFER 1 0 1 1 +DIES 1 0 1 1 +DIED 5 0 5 5 +DIE 4 0 4 4 +DIDN'T 12 0 12 12 +DICE 1 0 1 1 +DIAMONDS 1 0 1 1 +DIALOGUES 2 0 2 2 +DIALOGUE 3 0 3 3 +DIALECT 1 0 1 1 +DIAGRAMS 1 0 1 1 +DEWS 2 0 2 2 +DEVOUR 3 0 3 3 +DEVOTION 1 0 1 1 +DEVOTES 1 0 1 1 +DEVOTED 2 0 2 2 +DEVOTE 1 0 1 1 +DEVOLVE 1 0 1 1 +DEVOID 1 0 1 1 +DEVISING 1 0 1 1 +DEVIL 1 0 1 1 +DEVICES 2 0 2 2 +DEVELOPMENTS 1 0 1 1 +DEVELOPMENT 6 0 6 6 +DEVELOPED 2 0 2 2 +DEVELOP 1 0 1 1 +DETOUR 1 0 1 1 +DETESTS 1 0 1 1 +DETESTED 1 0 1 1 +DETESTABLE 1 0 1 1 +DETERMINING 1 0 1 1 +DETERMINED 5 0 5 5 +DETERMINE 1 0 1 1 +DETERMINATION 1 0 1 1 +DETECT 1 0 1 1 +DETAINED 2 0 2 2 +DETAILS 2 0 2 2 +DETAIL 1 0 1 1 +DETACHMENT 1 0 1 1 +DESTRUCTIVE 1 0 1 1 +DESTRUCTION 2 0 2 2 +DESTROYED 1 0 1 1 +DESTINY 1 0 1 1 +DESTINED 2 0 2 2 +DESPITE 1 0 1 1 +DESPISE 1 0 1 1 +DESPERATELY 1 0 1 1 +DESPERATE 3 0 3 3 +DESPAIRING 1 0 1 1 +DESPAIR 4 0 4 4 +DESOLATION 1 0 1 1 +DESOLATE 1 0 1 1 +DESK 2 0 2 2 +DESIROUS 1 0 1 1 +DESIRES 1 0 1 1 +DESIRED 3 0 3 3 +DESIRE 4 0 4 4 +DESIRABLE 1 0 1 1 +DESIGNERS 1 0 1 1 +DESIGNATED 1 0 1 1 +DESIGN 3 0 3 3 +DESERVES 1 0 1 1 +DESERVED 1 0 1 1 +DESERVE 1 0 1 1 +DESERTS 1 0 1 1 +DESERTED 1 0 1 1 +DESERT 1 0 1 1 +DESCRIPTIONS 1 0 1 1 +DESCRIPTION 2 0 2 2 +DESCRIBING 1 0 1 1 +DESCRIBED 2 0 2 2 +DESCRIBE 3 0 3 3 +DESCENDS 1 0 1 1 +DESCENDING 2 0 2 2 +DESCENDED 2 0 2 2 +DESCENDANTS 1 0 1 1 +DESCEND 4 0 4 4 +DERIVED 1 0 1 1 +DERIVATIVE 1 0 1 1 +DEPUTY 1 0 1 1 +DEPTHS 2 0 2 2 +DEPTH 4 0 4 4 +DEPRIVED 1 0 1 1 +DEPRESSION 1 0 1 1 +DEPRESSED 1 0 1 1 +DEPRECIATING 1 0 1 1 +DEPRECATION 3 0 3 3 +DEPRAVED 1 0 1 1 +DEPOSITION 1 0 1 1 +DEPORTMENT 1 0 1 1 +DEPENDS 1 0 1 1 +DEPENDENT 2 0 2 2 +DEPENDENCE 1 0 1 1 +DEPEND 2 0 2 2 +DEPARTURE 4 0 4 4 +DEPARTMENT 1 0 1 1 +DEPARTING 2 0 2 2 +DEPARTED 1 0 1 1 +DEPART 1 0 1 1 +DENY 5 0 5 5 +DENUNCIATION 1 0 1 1 +DENSELY 1 0 1 1 +DENSE 2 0 2 2 +DENOTE 1 0 1 1 +DENIES 1 0 1 1 +DENIED 2 0 2 2 +DEMURELY 1 0 1 1 +DEMURE 1 0 1 1 +DEMONSTRATION 1 0 1 1 +DEMOCRATIC 2 0 2 2 +DEMANDS 1 0 1 1 +DEMANDED 2 0 2 2 +DELUSIVE 1 0 1 1 +DELIVERY 2 0 2 2 +DELIVERING 1 0 1 1 +DELIVERED 2 0 2 2 +DELIGHTFUL 4 0 4 4 +DELIGHTED 5 0 5 5 +DELIGHT 4 0 4 4 +DELICIOUSNESS 1 0 1 1 +DELICIOUS 1 0 1 1 +DELICACY 1 0 1 1 +DELIBERATIONS 1 0 1 1 +DELIBERATION 1 0 1 1 +DELEGATED 1 0 1 1 +DELAYED 2 0 2 2 +DELAY 3 0 3 3 +DELAWARES 1 0 1 1 +DELAWARE 1 0 1 1 +DEIGNED 1 0 1 1 +DEIGN 1 0 1 1 +DEGREES 3 0 3 3 +DEGREE 6 0 6 6 +DEFYING 1 0 1 1 +DEFTLY 1 0 1 1 +DEFORMITIES 1 0 1 1 +DEFINITION 3 0 3 3 +DEFINITE 2 0 2 2 +DEFINES 1 0 1 1 +DEFIED 1 0 1 1 +DEFIANCE 2 0 2 2 +DEFERENCE 2 0 2 2 +DEFENDS 1 0 1 1 +DEFENDERS 1 0 1 1 +DEFENDED 1 0 1 1 +DEFENDANT 1 0 1 1 +DEFENCE 1 0 1 1 +DEFECT 1 0 1 1 +DEER 3 0 3 3 +DEEPLY 4 0 4 4 +DEEPENING 1 0 1 1 +DEEP 11 0 11 11 +DEEDS 1 0 1 1 +DECREES 1 0 1 1 +DECREED 1 0 1 1 +DECREE 1 0 1 1 +DECORATIVE 1 0 1 1 +DECORATED 3 0 3 3 +DECOMPOSE 1 0 1 1 +DECLINING 2 0 2 2 +DECLINED 2 0 2 2 +DECLARES 1 0 1 1 +DECLARED 2 0 2 2 +DECLARE 2 0 2 2 +DECLARATION 1 0 1 1 +DECISION 3 0 3 3 +DECIDEDLY 1 0 1 1 +DECIDED 3 0 3 3 +DECIDE 4 0 4 4 +DECEPTIVE 1 0 1 1 +DECEMBER 1 0 1 1 +DECEIVED 1 0 1 1 +DECEIT 1 0 1 1 +DECANTERS 1 0 1 1 +DECADES 1 0 1 1 +DEBATE 1 0 1 1 +DEATH 19 0 19 19 +DEARS 1 0 1 1 +DEARLY 2 0 2 2 +DEAREST 2 0 2 2 +DEAR 22 0 22 22 +DEALER 1 0 1 1 +DEAL 10 0 10 10 +DEAF 1 0 1 1 +DAZZLING 2 0 2 2 +DAZED 1 0 1 1 +DAYS 16 0 16 16 +DAYLIGHT 2 0 2 2 +DAWN 2 0 2 2 +DAVID 8 0 8 8 +DAUNTLESS 1 0 1 1 +DAUGHTERS 1 0 1 1 +DAUGHTER 9 0 9 9 +DATING 1 0 1 1 +DATE 1 0 1 1 +DATA 2 0 2 2 +DARTED 3 0 3 3 +DARLING 1 0 1 1 +DARKNESS 3 0 3 3 +DARKENED 1 0 1 1 +DARING 2 0 2 2 +DARED 3 0 3 3 +DARE 3 0 3 3 +DANGERS 1 0 1 1 +DANGEROUS 4 0 4 4 +DANGER 9 0 9 9 +DANES 1 0 1 1 +DANCING 2 0 2 2 +DANCES 1 0 1 1 +DANCERS 1 0 1 1 +DANCER 1 0 1 1 +DANCED 2 0 2 2 +DANCE 4 0 4 4 +DAMSEL 1 0 1 1 +DAMNED 1 0 1 1 +DAMNABLE 1 0 1 1 +DAMASK 1 0 1 1 +DAMAGING 1 0 1 1 +DAMAGE 2 0 2 2 +DAINTY 1 0 1 1 +DAILY 3 0 3 3 +DAFT 1 0 1 1 +CYRIL 2 0 2 2 +CYPRESS 1 0 1 1 +CYNTHIA'S 1 0 1 1 +CYNTHIA 3 0 3 3 +CYMBALS 1 0 1 1 +CUTTINGS 1 0 1 1 +CUT 5 0 5 5 +CUSTOMS 1 0 1 1 +CUSTOMER'S 1 0 1 1 +CUSTOMARILY 1 0 1 1 +CUSTOM 2 0 2 2 +CUSTODY 2 0 2 2 +CUSHIONED 1 0 1 1 +CURVE 2 0 2 2 +CURTAINS 1 0 1 1 +CURTAIN 3 0 3 3 +CURSE 1 0 1 1 +CURRENT 8 0 8 8 +CURLY 1 0 1 1 +CURIOUSLY 1 0 1 1 +CURIOUS 4 0 4 4 +CURIOSITY 2 0 2 2 +CURBSTONE 1 0 1 1 +CUPS 1 0 1 1 +CUPBOARD 2 0 2 2 +CUP 3 0 3 3 +CUNNING 3 0 3 3 +CUMBERLAND'S 1 0 1 1 +CULTURE 4 0 4 4 +CULTIVATING 1 0 1 1 +CULTIVATE 1 0 1 1 +CULPRIT 1 0 1 1 +CULMINATING 2 0 2 2 +CUFFS 1 0 1 1 +CUB 1 0 1 1 +CRYSTALLIZE 1 0 1 1 +CRYING 1 0 1 1 +CRY 5 0 5 5 +CRUSHED 1 0 1 1 +CRUSH 3 0 3 3 +CRUMBLED 1 0 1 1 +CRUMBLE 1 0 1 1 +CRUISING 1 0 1 1 +CRUELTY 2 0 2 2 +CRUEL 1 0 1 1 +CRUCIFIX 2 0 2 2 +CRUCIFIED 1 0 1 1 +CROWNS 1 0 1 1 +CROWNING 2 0 2 2 +CROWN 6 0 6 6 +CROWDING 1 0 1 1 +CROWDED 2 0 2 2 +CROWD 5 0 5 5 +CROSSLY 1 0 1 1 +CROSSING 1 0 1 1 +CROSSED 3 0 3 3 +CROPS 1 0 1 1 +CROP 1 0 1 1 +CROOKED 3 0 3 3 +CRITICISM 1 0 1 1 +CRITICALLY 1 0 1 1 +CRISIS 1 0 1 1 +CRIMSON 1 0 1 1 +CRIMINAL 3 0 3 3 +CRIME 3 0 3 3 +CRIES 3 0 3 3 +CRIED 23 0 23 23 +CRESTED 1 0 1 1 +CREPT 1 0 1 1 +CREEPING 2 0 2 2 +CREEP 1 0 1 1 +CREEK 2 0 2 2 +CREED 1 0 1 1 +CREDIT 2 0 2 2 +CREATURES 2 0 2 2 +CREATURE 8 0 8 8 +CREATOR 1 0 1 1 +CREATIVE 1 0 1 1 +CREATIONS 1 0 1 1 +CREATION 2 0 2 2 +CREATING 1 0 1 1 +CREATES 1 0 1 1 +CREATED 2 0 2 2 +CREATE 3 0 3 3 +CREAM 1 0 1 1 +CREAKED 1 0 1 1 +CRAZY 2 0 2 2 +CRAWLING 1 0 1 1 +CRAWL 1 0 1 1 +CRANED 1 0 1 1 +CRANE 1 0 1 1 +CRAMPNESS 1 0 1 1 +CRAMPED 1 0 1 1 +CRACKING 1 0 1 1 +CRACKED 2 0 2 2 +COZIER 1 0 1 1 +COWARDLY 1 0 1 1 +COWARD 1 0 1 1 +COW 2 0 2 2 +COVERT 1 0 1 1 +COVERING 1 0 1 1 +COVERED 2 0 2 2 +COVER 2 0 2 2 +COVENANTERS 5 0 5 5 +COUSINS 3 0 3 3 +COUSIN'S 2 0 2 2 +COUSIN 7 0 7 7 +COURTIERS 2 0 2 2 +COURTESY 2 0 2 2 +COURAGEOUS 1 0 1 1 +COURAGE 6 0 6 6 +COUPLE 1 0 1 1 +COUNTY 7 0 7 7 +COUNTRIES 1 0 1 1 +COUNTING 1 0 1 1 +COUNTERPART 1 0 1 1 +COUNTERFEITED 1 0 1 1 +COUNTERACT 1 0 1 1 +COUNTENANCE 3 0 3 3 +COUNT 15 0 15 15 +COUCH 1 0 1 1 +COTTON 12 0 12 12 +COTTAGE 2 0 2 2 +COSTUME 2 0 2 2 +CORRUPTION 1 0 1 1 +CORRIDOR 1 0 1 1 +CORRESPONDING 1 0 1 1 +CORRESPOND 1 0 1 1 +CORRECTLY 1 0 1 1 +CORRECTED 1 0 1 1 +CORRECT 3 0 3 3 +CORNERS 4 0 4 4 +CORNER 13 0 13 13 +CORMORANT 1 0 1 1 +CORDIALLY 1 0 1 1 +CORDIALITY 1 0 1 1 +CORAL 1 0 1 1 +COQUETRY 1 0 1 1 +COPY 2 0 2 2 +COPPER 1 0 1 1 +COPIED 2 0 2 2 +COOKERY 2 0 2 2 +COOKED 1 0 1 1 +CONVULSION 1 0 1 1 +CONVIVIALITY 1 0 1 1 +CONVINCING 2 0 2 2 +CONVINCED 2 0 2 2 +CONVICTIONS 2 0 2 2 +CONVICTION 2 0 2 2 +CONVEYED 1 0 1 1 +CONVEXITY 1 0 1 1 +CONVERTS 1 0 1 1 +CONVERSION 1 0 1 1 +CONVERSING 1 0 1 1 +CONVERSE 2 0 2 2 +CONVERSATIONS 1 0 1 1 +CONVERSATIONAL 1 0 1 1 +CONVERSATION 6 0 6 6 +CONVENTIONALITY 1 0 1 1 +CONVENTIONAL 1 0 1 1 +CONVENTION 1 0 1 1 +CONTROLLING 1 0 1 1 +CONTRIVED 2 0 2 2 +CONTRIVANCE 2 0 2 2 +CONTRITION 1 0 1 1 +CONTRITE 1 0 1 1 +CONTRIBUTE 1 0 1 1 +CONTRASTING 1 0 1 1 +CONTRAST 4 0 4 4 +CONTRARY 5 0 5 5 +CONTRADICTIONS 1 0 1 1 +CONTRACTION 2 0 2 2 +CONTINUOUSLY 2 0 2 2 +CONTINUOUS 1 0 1 1 +CONTINUED 14 0 14 14 +CONTINUE 1 0 1 1 +CONTINUALLY 2 0 2 2 +CONTINUAL 3 0 3 3 +CONTINGENCY 1 0 1 1 +CONTINENT 1 0 1 1 +CONTESTED 1 0 1 1 +CONTEST 1 0 1 1 +CONTENTS 1 0 1 1 +CONTENTMENT 1 0 1 1 +CONTENTEDLY 1 0 1 1 +CONTENT 1 0 1 1 +CONTEMPTIBLE 1 0 1 1 +CONTEMPT 2 0 2 2 +CONTEMPORARY 1 0 1 1 +CONTEMPLATION 1 0 1 1 +CONTEMPLATED 1 0 1 1 +CONTAMINATION 1 0 1 1 +CONTAMINATED 1 0 1 1 +CONTAINS 1 0 1 1 +CONTAINERS 1 0 1 1 +CONTAGION 1 0 1 1 +CONSUMPTION 13 0 13 13 +CONSUMERS 2 0 2 2 +CONSUMER'S 1 0 1 1 +CONSUMER 5 0 5 5 +CONSUMED 1 0 1 1 +CONSUME 2 0 2 2 +CONSULTED 1 0 1 1 +CONSULTATION 1 0 1 1 +CONSULT 1 0 1 1 +CONSTRUCTION 4 0 4 4 +CONSTRUCTED 1 0 1 1 +CONSTRAINEDLY 1 0 1 1 +CONSTRAINED 1 0 1 1 +CONSTITUTION 3 0 3 3 +CONSTITUTES 1 0 1 1 +CONSTITUTED 1 0 1 1 +CONSTITUTE 1 0 1 1 +CONSTANT 3 0 3 3 +CONSTANCY 1 0 1 1 +CONSPIRACY 2 0 2 2 +CONSPICUOUS 8 0 8 8 +CONSOLE 1 0 1 1 +CONSOLATION 1 0 1 1 +CONSISTENTLY 1 0 1 1 +CONSIDERING 1 0 1 1 +CONSIDERED 5 0 5 5 +CONSIDERATIONS 1 0 1 1 +CONSIDERABLY 1 0 1 1 +CONSERVATION 1 0 1 1 +CONSEQUENTLY 1 0 1 1 +CONSEQUENT 2 0 2 2 +CONSEQUENCES 1 0 1 1 +CONSEQUENCE 5 0 5 5 +CONSENT 4 0 4 4 +CONSEIL 6 0 6 6 +CONSECRATED 2 0 2 2 +CONSCIOUSNESS 2 0 2 2 +CONSCIENCES 1 0 1 1 +CONQUERED 2 0 2 2 +CONQUER 1 0 1 1 +CONNECTIONS 1 0 1 1 +CONNECTION 1 0 1 1 +CONNECTED 3 0 3 3 +CONNECT 2 0 2 2 +CONJURATION 1 0 1 1 +CONJUNCTURE 1 0 1 1 +CONJECTURE 1 0 1 1 +CONGRESS 1 0 1 1 +CONGREGATED 1 0 1 1 +CONGRATULATIONS 1 0 1 1 +CONGRATULATION 1 0 1 1 +CONGRATULATE 1 0 1 1 +CONGO 1 0 1 1 +CONGENIAL 1 0 1 1 +CONFUSION 2 0 2 2 +CONFUSES 1 0 1 1 +CONFUSED 3 0 3 3 +CONFOUNDEDLY 1 0 1 1 +CONFLICTING 1 0 1 1 +CONFLICT 3 0 3 3 +CONFISCATED 1 0 1 1 +CONFIRMS 1 0 1 1 +CONFIRMED 3 0 3 3 +CONFINED 2 0 2 2 +CONFIDENT 1 0 1 1 +CONFIDENCE 7 0 7 7 +CONFIDE 1 0 1 1 +CONFIDANTS 1 0 1 1 +CONFESSION 1 0 1 1 +CONFESSED 1 0 1 1 +CONFESS 4 0 4 4 +CONFERS 1 0 1 1 +CONFEDERATE 1 0 1 1 +CONDUCTS 1 0 1 1 +CONDUCTORS 2 0 2 2 +CONDUCTED 1 0 1 1 +CONDUCIVE 1 0 1 1 +CONDITIONS 3 0 3 3 +CONDITION 11 0 11 11 +CONDENSED 1 0 1 1 +CONDENSATION 1 0 1 1 +CONDEMNATION 2 0 2 2 +CONCUR 1 0 1 1 +CONCOURSE 1 0 1 1 +CONCORD 1 0 1 1 +CONCLUSION 2 0 2 2 +CONCERTING 1 0 1 1 +CONCERNING 4 0 4 4 +CONCERNED 8 0 8 8 +CONCEPTIONS 2 0 2 2 +CONCEPTION 2 0 2 2 +CONCEPT 1 0 1 1 +CONCEIVED 1 0 1 1 +CONCEIVE 2 0 2 2 +CONCEALING 1 0 1 1 +CONCEALED 1 0 1 1 +CONCEAL 1 0 1 1 +COMRADES 3 0 3 3 +COMPULSIVE 1 0 1 1 +COMPULSION 1 0 1 1 +COMPREHENSIVE 1 0 1 1 +COMPREHENDED 1 0 1 1 +COMPREHEND 1 0 1 1 +COMPOUND 1 0 1 1 +COMPOSURE 2 0 2 2 +COMPORT 1 0 1 1 +COMPONENT 1 0 1 1 +COMPLY 2 0 2 2 +COMPLIMENTARY 1 0 1 1 +COMPLIMENT 1 0 1 1 +COMPLICATED 1 0 1 1 +COMPLIANCE 2 0 2 2 +COMPLEXION 2 0 2 2 +COMPLETELY 3 0 3 3 +COMPLETED 2 0 2 2 +COMPLETE 2 0 2 2 +COMPLEMENT 1 0 1 1 +COMPLAINTS 1 0 1 1 +COMPLAINT 1 0 1 1 +COMPLAINING 1 0 1 1 +COMPLAINEST 1 0 1 1 +COMPLAIN 2 0 2 2 +COMPLACENTLY 1 0 1 1 +COMPLACENCY 1 0 1 1 +COMPETITION 1 0 1 1 +COMPETE 1 0 1 1 +COMPENSATION 1 0 1 1 +COMPELS 1 0 1 1 +COMPELLED 2 0 2 2 +COMPEL 1 0 1 1 +COMPASS 1 0 1 1 +COMPARISON 1 0 1 1 +COMPARED 3 0 3 3 +COMPARE 1 0 1 1 +COMPARATIVELY 2 0 2 2 +COMPARATIVE 1 0 1 1 +COMPANY 11 0 11 11 +COMPANIONSHIP 2 0 2 2 +COMPANIONS 2 0 2 2 +COMPANIONLESS 1 0 1 1 +COMPANION 5 0 5 5 +COMPANIES 3 0 3 3 +COMPACT 1 0 1 1 +COMMUNITY 3 0 3 3 +COMMUNITIES 1 0 1 1 +COMMUNION 1 0 1 1 +COMMUNICATED 2 0 2 2 +COMMUNICATE 1 0 1 1 +COMMOTION 1 0 1 1 +COMMONLY 1 0 1 1 +COMMITTING 1 0 1 1 +COMMITTEE 3 0 3 3 +COMMITTED 2 0 2 2 +COMMITTAL 1 0 1 1 +COMMITS 1 0 1 1 +COMMIT 1 0 1 1 +COMMISSIONS 1 0 1 1 +COMMISSIONERS 1 0 1 1 +COMMISSION 1 0 1 1 +COMMISERATION 1 0 1 1 +COMMERCIAL 2 0 2 2 +COMMENTS 1 0 1 1 +COMMENTED 1 0 1 1 +COMMENTARY 2 0 2 2 +COMMENT 1 0 1 1 +COMMENDED 1 0 1 1 +COMMEND 1 0 1 1 +COMMENCEMENT 1 0 1 1 +COMMENCE 1 0 1 1 +COMMANDMENT 2 0 2 2 +COMMANDERS 1 0 1 1 +COMMANDER 2 0 2 2 +COMMANDED 1 0 1 1 +COMMAND 2 0 2 2 +COMFORTS 2 0 2 2 +COMFORTING 1 0 1 1 +COMFORTED 2 0 2 2 +COMFORTABLE 3 0 3 3 +COMFORT 8 0 8 8 +COMETH 1 0 1 1 +COMES 10 0 10 10 +COMELY 1 0 1 1 +COMEDY 1 0 1 1 +COMEDIES 2 0 2 2 +COME 51 0 51 51 +COMBINED 2 0 2 2 +COMBINE 1 0 1 1 +COMBINATIONS 1 0 1 1 +COMBINATION 2 0 2 2 +COMBAT 2 0 2 2 +COMBASH 1 0 1 1 +COMB 1 0 1 1 +COLUMNS 1 0 1 1 +COLUMN 1 0 1 1 +COLORISTS 2 0 2 2 +COLONY 3 0 3 3 +COLONIAL 1 0 1 1 +COLONEL 1 0 1 1 +COLLEGE 4 0 4 4 +COLLECTIONS 1 0 1 1 +COLLECTION 1 0 1 1 +COLLECTING 1 0 1 1 +COLLAR 2 0 2 2 +COLLAPSED 1 0 1 1 +COLBERT 1 0 1 1 +COINED 1 0 1 1 +COINCIDE 1 0 1 1 +COFFEE 6 0 6 6 +COCK 1 0 1 1 +COBBLER 1 0 1 1 +COAXED 1 0 1 1 +COAT 1 0 1 1 +COASTS 1 0 1 1 +COAST 3 0 3 3 +COACHMAN 1 0 1 1 +COACH 1 0 1 1 +CLUTCHING 1 0 1 1 +CLUTCH 1 0 1 1 +CLUNG 1 0 1 1 +CLUMSINESS 1 0 1 1 +CLOTTED 1 0 1 1 +CLOTHING 1 0 1 1 +CLOTHES 5 0 5 5 +CLOTHED 1 0 1 1 +CLOSET 1 0 1 1 +CLOSER 1 0 1 1 +CLOSELY 6 0 6 6 +CLOSED 2 0 2 2 +CLOSE 10 0 10 10 +CLOAKS 2 0 2 2 +CLIMBING 1 0 1 1 +CLIMATE 2 0 2 2 +CLIFF 2 0 2 2 +CLIENTS 1 0 1 1 +CLICKED 1 0 1 1 +CLEVERNESS 3 0 3 3 +CLEVER 2 0 2 2 +CLERK 2 0 2 2 +CLERICAL 1 0 1 1 +CLERGYMAN'S 2 0 2 2 +CLERGY 2 0 2 2 +CLEARNESS 1 0 1 1 +CLEARLY 5 0 5 5 +CLEARING 1 0 1 1 +CLEAREST 1 0 1 1 +CLEAR 10 0 10 10 +CLEANED 1 0 1 1 +CLAY 5 0 5 5 +CLAWS 2 0 2 2 +CLAUSE 1 0 1 1 +CLASSIFYING 1 0 1 1 +CLASSIFIER 1 0 1 1 +CLASSIFICATION 1 0 1 1 +CLASSIC 2 0 2 2 +CLASSES 1 0 1 1 +CLASSED 3 0 3 3 +CLASS 9 0 9 9 +CLASPING 1 0 1 1 +CLASPED 2 0 2 2 +CLASHING 2 0 2 2 +CLARIFIED 1 0 1 1 +CLAP 1 0 1 1 +CLAMOROUS 1 0 1 1 +CLAIMS 1 0 1 1 +CLAIMED 1 0 1 1 +CLAIM 2 0 2 2 +CIVILIZATION 2 0 2 2 +CIVIL 3 0 3 3 +CITY 15 0 15 15 +CITIZENS 4 0 4 4 +CITIZEN 2 0 2 2 +CITIES 2 0 2 2 +CIRCUMVENTION 1 0 1 1 +CIRCUMSTANCES 4 0 4 4 +CIRCUMSTANCE 3 0 3 3 +CIRCUMNAVIGATION 1 0 1 1 +CIRCUMFERENCE 1 0 1 1 +CIRCULATED 1 0 1 1 +CIRCUITS 1 0 1 1 +CIRCUITOUS 1 0 1 1 +CIRCUIT 1 0 1 1 +CIRCLE 7 0 7 7 +CIGARS 1 0 1 1 +CHURNING 1 0 1 1 +CHURCHES 1 0 1 1 +CHURCH 17 0 17 17 +CHUCKLING 1 0 1 1 +CHUBBY 1 0 1 1 +CHRONICLED 1 0 1 1 +CHRISTMAS 4 0 4 4 +CHRISTIANS 1 0 1 1 +CHRISTIANITY 2 0 2 2 +CHRIST'S 1 0 1 1 +CHRISM 1 0 1 1 +CHOSEN 3 0 3 3 +CHOPPED 1 0 1 1 +CHOOSING 1 0 1 1 +CHOOSE 3 0 3 3 +CHOKING 1 0 1 1 +CHOCOLATE 1 0 1 1 +CHIP 1 0 1 1 +CHINA 1 0 1 1 +CHIN 2 0 2 2 +CHIMNEY 2 0 2 2 +CHILDREN'S 1 0 1 1 +CHILDREN 18 0 18 18 +CHILDISH 2 0 2 2 +CHILDHOOD'S 1 0 1 1 +CHILDHOOD 3 0 3 3 +CHILD'S 2 0 2 2 +CHILD 19 0 19 19 +CHIEFTAIN 1 0 1 1 +CHIEFLY 4 0 4 4 +CHIEF 3 0 3 3 +CHESTNUTS 1 0 1 1 +CHESTNUT 3 0 3 3 +CHEST 2 0 2 2 +CHERRY 1 0 1 1 +CHERRIES 3 0 3 3 +CHERISH 1 0 1 1 +CHEMICALS 1 0 1 1 +CHEMICAL 1 0 1 1 +CHELSEA 1 0 1 1 +CHELFORD 4 0 4 4 +CHEERS 1 0 1 1 +CHEERFULLY 2 0 2 2 +CHEERFUL 2 0 2 2 +CHEEKS 1 0 1 1 +CHEEK 2 0 2 2 +CHECKS 1 0 1 1 +CHECKED 3 0 3 3 +CHAUCER'S 1 0 1 1 +CHAUCER 1 0 1 1 +CHASING 1 0 1 1 +CHARTER 1 0 1 1 +CHARMING 1 0 1 1 +CHARMED 1 0 1 1 +CHARM 1 0 1 1 +CHARLOTTE 2 0 2 2 +CHARLESTOWN 1 0 1 1 +CHARLESTON 1 0 1 1 +CHARITY 1 0 1 1 +CHARGED 2 0 2 2 +CHARGE 8 0 8 8 +CHARACTERS 2 0 2 2 +CHARACTERIZES 1 0 1 1 +CHARACTERIZED 1 0 1 1 +CHARACTER 14 0 14 14 +CHAPTERS 1 0 1 1 +CHAPTER 3 0 3 3 +CHAPEL 1 0 1 1 +CHAP 1 0 1 1 +CHAOS 1 0 1 1 +CHANNEL 2 0 2 2 +CHANGING 2 0 2 2 +CHANCES 1 0 1 1 +CHANCE 6 0 6 6 +CHAMBERS 2 0 2 2 +CHAMBER 4 0 4 4 +CHALICE 1 0 1 1 +CHAIRS 4 0 4 4 +CHAIR 15 0 15 15 +CHAIN 1 0 1 1 +CETERA 3 0 3 3 +CERTITUDE 1 0 1 1 +CERTAINLY 8 0 8 8 +CERTAIN 12 0 12 12 +CEREMONIES 2 0 2 2 +CEREMONIAL 1 0 1 1 +CENTURY 1 0 1 1 +CENTURIES 1 0 1 1 +CENTRAL 5 0 5 5 +CENTIPEDE 1 0 1 1 +CELLS 1 0 1 1 +CELLAR 1 0 1 1 +CELL 1 0 1 1 +CELESTIAL 2 0 2 2 +CELEBRITY 1 0 1 1 +CELEBRATION 1 0 1 1 +CELEBRATED 4 0 4 4 +CEDAR 1 0 1 1 +CEASING 1 0 1 1 +CEASE 2 0 2 2 +CAVERN 2 0 2 2 +CAVALRY 2 0 2 2 +CAVALIERS 1 0 1 1 +CAUTIOUSLY 1 0 1 1 +CAUTION 1 0 1 1 +CAUSES 1 0 1 1 +CAUSED 5 0 5 5 +CAUSE 9 0 9 9 +CATTLE 1 0 1 1 +CATS 1 0 1 1 +CATHOLIC 3 0 3 3 +CATHEDRAL 1 0 1 1 +CATECHISM 2 0 2 2 +CATCHING 1 0 1 1 +CATCHES 1 0 1 1 +CATCH 3 0 3 3 +CATASTROPHE 1 0 1 1 +CATAPULT 1 0 1 1 +CASTS 1 0 1 1 +CASTLE 1 0 1 1 +CAST 8 0 8 8 +CASES 6 0 6 6 +CASEMATES 1 0 1 1 +CARTS 1 0 1 1 +CART 1 0 1 1 +CARRYING 4 0 4 4 +CARRY 7 0 7 7 +CARROTS 1 0 1 1 +CARRIES 1 0 1 1 +CARRIED 13 0 13 13 +CARRIAGES 1 0 1 1 +CARRIAGE 8 0 8 8 +CARPETED 1 0 1 1 +CARING 1 0 1 1 +CAREY 3 0 3 3 +CARELESSNESS 1 0 1 1 +CARELESS 1 0 1 1 +CAREFULLY 7 0 7 7 +CAREFUL 5 0 5 5 +CAREER 4 0 4 4 +CARED 4 0 4 4 +CARE 13 0 13 13 +CARD 1 0 1 1 +CAPTURED 1 0 1 1 +CAPTIVE 2 0 2 2 +CAPTIVATE 1 0 1 1 +CAPTAIN 27 0 27 27 +CAPSIZE 1 0 1 1 +CAPRICE 1 0 1 1 +CAPITAL 1 0 1 1 +CAPACITY 3 0 3 3 +CAPABLE 3 0 3 3 +CAP'N 4 0 4 4 +CAP 7 0 7 7 +CANST 1 0 1 1 +CANS 1 0 1 1 +CANOPY 1 0 1 1 +CANON 1 0 1 1 +CANNON 1 0 1 1 +CANE 1 0 1 1 +CANDLESTICKS 1 0 1 1 +CANDLES 2 0 2 2 +CANARY 1 0 1 1 +CANAL 1 0 1 1 +CAN'T 21 0 21 21 +CAMPS 1 0 1 1 +CAMPAIGN 2 0 2 2 +CAMP 1 0 1 1 +CAME 44 0 44 44 +CALVINISTIC 1 0 1 1 +CALMNESS 2 0 2 2 +CALMED 1 0 1 1 +CALM 5 0 5 5 +CALLS 5 0 5 5 +CALLOUS 1 0 1 1 +CALLING 2 0 2 2 +CALL 10 0 10 10 +CALHOUN 1 0 1 1 +CAKES 2 0 2 2 +CAKE 1 0 1 1 +CABINET 3 0 3 3 +CABIN 2 0 2 2 +CABALISTIC 1 0 1 1 +BUTTONING 1 0 1 1 +BUTTON 1 0 1 1 +BUTTERFLY 1 0 1 1 +BUTT 1 0 1 1 +BUTLER 2 0 2 2 +BUTCHERY 2 0 2 2 +BUTCHERED 1 0 1 1 +BUSY 1 0 1 1 +BUSINESS 5 0 5 5 +BUSHES 4 0 4 4 +BUSHEL 1 0 1 1 +BURSTS 1 0 1 1 +BURST 5 0 5 5 +BURNT 1 0 1 1 +BURNS 1 0 1 1 +BURNING 2 0 2 2 +BURNED 1 0 1 1 +BURIED 2 0 2 2 +BURGOS 1 0 1 1 +BURGLARS 1 0 1 1 +BURDEN 1 0 1 1 +BUOYANT 2 0 2 2 +BUNDLE 1 0 1 1 +BULL 2 0 2 2 +BUILT 2 0 2 2 +BUILDS 1 0 1 1 +BUILDING 5 0 5 5 +BUGGY 1 0 1 1 +BUDDING 1 0 1 1 +BUCKLING 1 0 1 1 +BUCKLES 1 0 1 1 +BUCKINGHAM 1 0 1 1 +BUBBLING 1 0 1 1 +BUBBLE'S 1 0 1 1 +BRUTE 1 0 1 1 +BRUTALITY 1 0 1 1 +BRUTAL 1 0 1 1 +BRUSH 1 0 1 1 +BRUISED 1 0 1 1 +BROWSED 1 0 1 1 +BROW 1 0 1 1 +BROTHERS 5 0 5 5 +BROTHELS 1 0 1 1 +BROOM 1 0 1 1 +BROOKS 1 0 1 1 +BROOKLYN 1 0 1 1 +BROODING 3 0 3 3 +BROKEN 7 0 7 7 +BROKE 1 0 1 1 +BROADLY 1 0 1 1 +BROADEST 1 0 1 1 +BROAD 11 0 11 11 +BRITISH 2 0 2 2 +BRISTLING 1 0 1 1 +BRING 9 0 9 9 +BRIM 2 0 2 2 +BRILLIANT 5 0 5 5 +BRILLIANCY 1 0 1 1 +BRIGHTNESS 1 0 1 1 +BRIGHTLY 1 0 1 1 +BRIGHTEST 1 0 1 1 +BRIGHTER 1 0 1 1 +BRIGHTENED 2 0 2 2 +BRIGHT 16 0 16 16 +BRIGANTINE 1 0 1 1 +BRIEFLY 1 0 1 1 +BRIDGE 4 0 4 4 +BRIDE 1 0 1 1 +BRICK 2 0 2 2 +BREWING 1 0 1 1 +BREEZE 1 0 1 1 +BRED 1 0 1 1 +BREATHING 4 0 4 4 +BREATH 10 0 10 10 +BREASTPLATE 1 0 1 1 +BREAST 2 0 2 2 +BREAKWATER 1 0 1 1 +BREAKING 2 0 2 2 +BREAKFASTING 1 0 1 1 +BREAKERS 1 0 1 1 +BRAVELY 1 0 1 1 +BRAVE 2 0 2 2 +BRANWELL 1 0 1 1 +BRANDY 1 0 1 1 +BRANDON 4 0 4 4 +BRANDED 1 0 1 1 +BRANCHES 8 0 8 8 +BRAINS 3 0 3 3 +BRAIN 2 0 2 2 +BRAIDS 1 0 1 1 +BRAIDED 1 0 1 1 +BRAID 1 0 1 1 +BRACTON 1 0 1 1 +BRACELETS 1 0 1 1 +BRACELET 1 0 1 1 +BOY'S 3 0 3 3 +BOY 17 0 17 17 +BOXES 1 0 1 1 +BOWING 1 0 1 1 +BOWER 1 0 1 1 +BOWED 1 0 1 1 +BOW 4 0 4 4 +BOUT 1 0 1 1 +BOUQUETS 1 0 1 1 +BOUND 6 0 6 6 +BOUGHS 1 0 1 1 +BOTTOMS 1 0 1 1 +BOTTOM 7 0 7 7 +BOTTLES 2 0 2 2 +BOTTLE 1 0 1 1 +BOTHER 1 0 1 1 +BOTH 34 0 34 34 +BOTANICAL 2 0 2 2 +BOSOM 2 0 2 2 +BORE 2 0 2 2 +BORDERING 1 0 1 1 +BORDERED 1 0 1 1 +BOOTS 2 0 2 2 +BOOLOOROO 12 0 12 12 +BOOK 4 0 4 4 +BONY 1 0 1 1 +BONNET 1 0 1 1 +BONES 2 0 2 2 +BONDAGE 1 0 1 1 +BOND 3 0 3 3 +BOLTON 1 0 1 1 +BOLDLY 3 0 3 3 +BOLDEST 1 0 1 1 +BOILED 1 0 1 1 +BOIL 1 0 1 1 +BOGUS 3 0 3 3 +BOGGS 2 0 2 2 +BODY 8 0 8 8 +BODILY 3 0 3 3 +BODIES 3 0 3 3 +BOASTING 2 0 2 2 +BOARDED 2 0 2 2 +BOARD 9 0 9 9 +BLUSHING 2 0 2 2 +BLUSHED 1 0 1 1 +BLUSH 1 0 1 1 +BLUNT 1 0 1 1 +BLUFF 1 0 1 1 +BLUES 1 0 1 1 +BLOWN 2 0 2 2 +BLOWING 1 0 1 1 +BLOW 2 0 2 2 +BLOOM 1 0 1 1 +BLOODY 1 0 1 1 +BLOODSHED 1 0 1 1 +BLOODED 1 0 1 1 +BLOOD 6 0 6 6 +BLOCKS 1 0 1 1 +BLISS 1 0 1 1 +BLIND 1 0 1 1 +BLEW 1 0 1 1 +BLESSED 3 0 3 3 +BLESS 2 0 2 2 +BLEED 1 0 1 1 +BLEACHED 1 0 1 1 +BLAZING 1 0 1 1 +BLAZED 1 0 1 1 +BLAZE 2 0 2 2 +BLANK 2 0 2 2 +BLAME 1 0 1 1 +BLADE 2 0 2 2 +BLACKSTONE 1 0 1 1 +BLACKNESSES 1 0 1 1 +BLACKNESS 1 0 1 1 +BLACKER 2 0 2 2 +BLACK 22 0 22 22 +BITTER 1 0 1 1 +BITS 1 0 1 1 +BITES 1 0 1 1 +BITE 1 0 1 1 +BISHOPS 5 0 5 5 +BIRTH 2 0 2 2 +BIRMINGHAM 1 0 1 1 +BIRD 4 0 4 4 +BIRCHES 1 0 1 1 +BINDING 1 0 1 1 +BIND 1 0 1 1 +BIG 12 0 12 12 +BIDDING 1 0 1 1 +BIBLE 1 0 1 1 +BEYOND 6 0 6 6 +BEWILDERMENT 1 0 1 1 +BEWILDERED 6 0 6 6 +BEWARE 1 0 1 1 +BEVERAGES 1 0 1 1 +BETWEEN 25 0 25 25 +BETTING 1 0 1 1 +BETTER 25 0 25 25 +BETRAYED 1 0 1 1 +BETRAY 1 0 1 1 +BETH 12 0 12 12 +BESTOWED 1 0 1 1 +BESTOW 1 0 1 1 +BEST 22 0 22 22 +BESOUGHT 1 0 1 1 +BESIEGED 1 0 1 1 +BESIDES 8 0 8 8 +BESIDE 5 0 5 5 +BERTIE 1 0 1 1 +BERRIES 1 0 1 1 +BENT 4 0 4 4 +BENIGNANTLY 1 0 1 1 +BENIGHTED 1 0 1 1 +BENEFIT 1 0 1 1 +BENEATH 6 0 6 6 +BEND 1 0 1 1 +BENCHES 3 0 3 3 +BEN 3 0 3 3 +BELT 2 0 2 2 +BELOW 1 0 1 1 +BELOVED 3 0 3 3 +BELONGS 1 0 1 1 +BELONGING 1 0 1 1 +BELONGED 3 0 3 3 +BELONG 2 0 2 2 +BELLY 3 0 3 3 +BELLS 1 0 1 1 +BELLINGHAM 2 0 2 2 +BELL 3 0 3 3 +BELIEVING 2 0 2 2 +BELIEVERS 1 0 1 1 +BELIEVED 5 0 5 5 +BELIEVE 21 0 21 21 +BELIEF 3 0 3 3 +BEINGS 1 0 1 1 +BEHOLDING 1 0 1 1 +BEHOLDERS 1 0 1 1 +BEHOLDER 1 0 1 1 +BEHIND 10 0 10 10 +BEHELD 1 0 1 1 +BEHAVED 1 0 1 1 +BEHAVE 1 0 1 1 +BEHALF 1 0 1 1 +BEGUN 5 0 5 5 +BEGUILING 1 0 1 1 +BEGOT 1 0 1 1 +BEGINS 4 0 4 4 +BEGINNING 4 0 4 4 +BEGIN 9 0 9 9 +BEGGAR'S 1 0 1 1 +BEGGAR 2 0 2 2 +BEGAN 22 0 22 22 +BEFITS 1 0 1 1 +BEFELL 1 0 1 1 +BEEN 137 0 137 137 +BEEF 1 0 1 1 +BEDSIDE 1 0 1 1 +BEDROOM 2 0 2 2 +BEDFORD 1 0 1 1 +BECOMING 1 0 1 1 +BECOMES 8 0 8 8 +BECOME 14 0 14 14 +BECKONED 1 0 1 1 +BECKON 1 0 1 1 +BECAUSE 30 0 30 30 +BECAME 12 0 12 12 +BEAUTY 21 0 21 21 +BEAUTIFUL 13 0 13 13 +BEAUTIES 2 0 2 2 +BEATITUDE 2 0 2 2 +BEATING 2 0 2 2 +BEATERS 1 0 1 1 +BEATEN 2 0 2 2 +BEAT 1 0 1 1 +BEASTS 2 0 2 2 +BEARS 4 0 4 4 +BEARING 3 0 3 3 +BEARD 1 0 1 1 +BEAR'S 1 0 1 1 +BEAR 11 0 11 11 +BEAMS 1 0 1 1 +BEAK 6 0 6 6 +BEADS 1 0 1 1 +BATTLED 1 0 1 1 +BATTERIES 1 0 1 1 +BATTERED 1 0 1 1 +BAT 1 0 1 1 +BASTARD 1 0 1 1 +BASKETS 1 0 1 1 +BASIS 2 0 2 2 +BASED 1 0 1 1 +BARTLEY 14 0 14 14 +BARS 1 0 1 1 +BARRICADED 1 0 1 1 +BARREN 1 0 1 1 +BARREL 1 0 1 1 +BARRACK 1 0 1 1 +BARNS 1 0 1 1 +BARN 4 0 4 4 +BARGAINS 1 0 1 1 +BAREFOOT 1 0 1 1 +BARE 2 0 2 2 +BARBARITY 1 0 1 1 +BAR 1 0 1 1 +BAPTIZED 1 0 1 1 +BAPTISM 1 0 1 1 +BANTER 1 0 1 1 +BANQUET 1 0 1 1 +BANKS 1 0 1 1 +BANK 3 0 3 3 +BANISHED 1 0 1 1 +BANG 1 0 1 1 +BAND 2 0 2 2 +BALMY 1 0 1 1 +BALLS 2 0 2 2 +BALLET 2 0 2 2 +BALEEN 1 0 1 1 +BAKER 1 0 1 1 +BAGS 1 0 1 1 +BAGGAGE 1 0 1 1 +BAG 1 0 1 1 +BAFFLED 2 0 2 2 +BADLY 2 0 2 2 +BADE 3 0 3 3 +BAD 6 0 6 6 +BACON 1 0 1 1 +BACKWARD 1 0 1 1 +BABY'S 1 0 1 1 +BABY 1 0 1 1 +BABIES 1 0 1 1 +BABE 1 0 1 1 +AZURE 1 0 1 1 +AXIS 1 0 1 1 +AWOKE 3 0 3 3 +AWKWARD 1 0 1 1 +AWFULLY 2 0 2 2 +AWFUL 4 0 4 4 +AWAKE 1 0 1 1 +AWAITING 1 0 1 1 +AWAITED 2 0 2 2 +AVOIDING 1 0 1 1 +AVOIDED 1 0 1 1 +AVOID 5 0 5 5 +AVERSION 1 0 1 1 +AVERSE 1 0 1 1 +AUTUMN 1 0 1 1 +AUTHORS 1 0 1 1 +AUTHORIZED 1 0 1 1 +AUTHORITY 6 0 6 6 +AUTHORITIES 1 0 1 1 +AUTHORITATIVELY 1 0 1 1 +AUTHOR 1 0 1 1 +AUTHENTICATED 1 0 1 1 +AUNT'S 1 0 1 1 +AUGUST 5 0 5 5 +AUGMENT 1 0 1 1 +AUDITORY 1 0 1 1 +AUDITORS 1 0 1 1 +AUDIENCE 6 0 6 6 +AUDACIOUS 1 0 1 1 +AUCTION 1 0 1 1 +ATTRIBUTED 1 0 1 1 +ATTRACTIVE 1 0 1 1 +ATTRACTION 1 0 1 1 +ATTRACTED 3 0 3 3 +ATTORNEYS 1 0 1 1 +ATTIRE 1 0 1 1 +ATTENUATING 1 0 1 1 +ATTENTIVELY 2 0 2 2 +ATTENTION 11 0 11 11 +ATTENDED 1 0 1 1 +ATTENDANT 1 0 1 1 +ATTEND 3 0 3 3 +ATTEMPTS 1 0 1 1 +ATTEMPT 5 0 5 5 +ATTAINMENTS 1 0 1 1 +ATTAINMENT 1 0 1 1 +ATTAINED 3 0 3 3 +ATTACKED 1 0 1 1 +ATTACK 4 0 4 4 +ATTACHED 2 0 2 2 +ATROCIOUS 1 0 1 1 +ATLANTIS 1 0 1 1 +ATLANTIC 3 0 3 3 +ATHENS 1 0 1 1 +ATHENIANS 1 0 1 1 +ATHENIAN 2 0 2 2 +ATE 2 0 2 2 +ASTRONOMY 1 0 1 1 +ASTOUNDING 1 0 1 1 +ASTONISHMENT 2 0 2 2 +ASTONISHING 1 0 1 1 +ASTONISHED 1 0 1 1 +ASSURES 1 0 1 1 +ASSUREDLY 1 0 1 1 +ASSURED 5 0 5 5 +ASSURE 5 0 5 5 +ASSURANCES 1 0 1 1 +ASSURANCE 3 0 3 3 +ASSUMED 5 0 5 5 +ASSOCIATION 2 0 2 2 +ASSOCIATES 1 0 1 1 +ASSOCIATED 3 0 3 3 +ASSISTED 2 0 2 2 +ASSISTANT 1 0 1 1 +ASSIST 2 0 2 2 +ASSIDUOUSLY 1 0 1 1 +ASSERTS 1 0 1 1 +ASSERTIVE 1 0 1 1 +ASSERTED 3 0 3 3 +ASSENT 1 0 1 1 +ASSEMBLY 2 0 2 2 +ASS 1 0 1 1 +ASPECT 1 0 1 1 +ASLEEP 1 0 1 1 +ASKING 2 0 2 2 +ASKED 22 0 22 22 +ASK 10 0 10 10 +ASIDE 3 0 3 3 +ASIA 1 0 1 1 +ASHORE 1 0 1 1 +ASHAMED 2 0 2 2 +ASCRIBES 1 0 1 1 +ASCERTAINING 1 0 1 1 +ASCERTAIN 2 0 2 2 +ARTILLERY 1 0 1 1 +ARTIFICE 1 0 1 1 +ARTICULATE 2 0 2 2 +ARTICLE 3 0 3 3 +ARTICHOKES 1 0 1 1 +ARTHUR 1 0 1 1 +ART 14 0 14 14 +ARROWS 1 0 1 1 +ARRIVED 4 0 4 4 +ARRIVE 1 0 1 1 +ARRIVAL 4 0 4 4 +ARRESTING 1 0 1 1 +ARRAY 1 0 1 1 +ARRANGING 2 0 2 2 +ARRANGEMENTS 1 0 1 1 +ARRANGEMENT 2 0 2 2 +ARRANGED 2 0 2 2 +AROUSE 1 0 1 1 +AROSE 2 0 2 2 +ARONNAX 1 0 1 1 +ARMY 9 0 9 9 +ARMS 15 0 15 15 +ARISTOCRACY 1 0 1 1 +ARISING 1 0 1 1 +ARISE 1 0 1 1 +ARID 1 0 1 1 +ARIANS 1 0 1 1 +ARGYLE'S 1 0 1 1 +ARGUS 1 0 1 1 +ARGUING 1 0 1 1 +ARGUE 2 0 2 2 +AREN'T 1 0 1 1 +AREA 1 0 1 1 +ARDUOUS 1 0 1 1 +ARCHIVES 1 0 1 1 +ARCHITECTURAL 1 0 1 1 +ARCHED 1 0 1 1 +ARCH 1 0 1 1 +ARCADIAN 1 0 1 1 +APRON 2 0 2 2 +APRIL 2 0 2 2 +APPROXIMATELY 1 0 1 1 +APPROVING 2 0 2 2 +APPROVE 2 0 2 2 +APPROVAL 1 0 1 1 +APPROPRIATE 1 0 1 1 +APPROBATION 1 0 1 1 +APPROACHING 3 0 3 3 +APPROACHES 2 0 2 2 +APPROACHED 6 0 6 6 +APPROACH 1 0 1 1 +APPRENTICESHIP 1 0 1 1 +APPREHENSION 1 0 1 1 +APPRECIATIVE 1 0 1 1 +APPRECIATE 1 0 1 1 +APPOSITION 1 0 1 1 +APPOINTED 7 0 7 7 +APPLYING 1 0 1 1 +APPLICATION 2 0 2 2 +APPLE 1 0 1 1 +APPLAUSE 2 0 2 2 +APPLAUDED 1 0 1 1 +APPETITES 1 0 1 1 +APPETITE 1 0 1 1 +APPEARS 1 0 1 1 +APPEARED 10 0 10 10 +APPEARANCES 3 0 3 3 +APPEARANCE 9 0 9 9 +APPEAR 3 0 3 3 +APPEALS 1 0 1 1 +APPARENTLY 1 0 1 1 +APPARENT 2 0 2 2 +APPARATUS 1 0 1 1 +APPALLING 1 0 1 1 +APOSTOLICAL 1 0 1 1 +APOSTOLIC 1 0 1 1 +APOSTLES 6 0 6 6 +APOSTLE 4 0 4 4 +APOLLO 1 0 1 1 +APARTMENT 2 0 2 2 +APART 1 0 1 1 +ANYHOW 3 0 3 3 +ANYBODY 3 0 3 3 +ANXIOUS 3 0 3 3 +ANXIETY 4 0 4 4 +ANTIPATHY 2 0 2 2 +ANTICIPATION 1 0 1 1 +ANTICIPATE 1 0 1 1 +ANTICHRIST 1 0 1 1 +ANTI 1 0 1 1 +ANTE 1 0 1 1 +ANTARCTIC 1 0 1 1 +ANSWERS 2 0 2 2 +ANSWER 6 0 6 6 +ANOTHER'S 1 0 1 1 +ANNOYANCE 2 0 2 2 +ANNOUNCED 2 0 2 2 +ANNE'S 2 0 2 2 +ANIMOSITY 2 0 2 2 +ANIMATED 2 0 2 2 +ANIMALS 5 0 5 5 +ANIMAL 8 0 8 8 +ANGRY 5 0 5 5 +ANGRILY 3 0 3 3 +ANGRIER 1 0 1 1 +ANGOR 1 0 1 1 +ANGER 1 0 1 1 +ANECDOTES 1 0 1 1 +ANCIENT 3 0 3 3 +ANALYSIS 2 0 2 2 +ANALOGY 1 0 1 1 +ANALOGUE 1 0 1 1 +AMUSING 2 0 2 2 +AMUSEMENT 3 0 3 3 +AMUSED 1 0 1 1 +AMUSE 2 0 2 2 +AMPLY 2 0 2 2 +AMOUNT 3 0 3 3 +AMONGST 3 0 3 3 +AMONG 29 0 29 29 +AMISS 1 0 1 1 +AMIDST 2 0 2 2 +AMID 1 0 1 1 +AMETHYST 1 0 1 1 +AMERICANS 2 0 2 2 +AMERICAN 10 0 10 10 +AMERICA 2 0 2 2 +AMENDS 2 0 2 2 +AMENDMENT 1 0 1 1 +AMELIORATION 1 0 1 1 +AMBROSE 4 0 4 4 +AMBITIOUS 1 0 1 1 +AMBITION 2 0 2 2 +AMBASSADOR 1 0 1 1 +AMAZEMENT 2 0 2 2 +AMALGAMATED 1 0 1 1 +ALWAYS 36 0 36 36 +ALTHOUGH 10 0 10 10 +ALTERNATING 3 0 3 3 +ALTERING 2 0 2 2 +ALTERED 2 0 2 2 +ALTERATION 1 0 1 1 +ALTER 1 0 1 1 +ALTAR 1 0 1 1 +ALSO 36 0 36 36 +ALOUD 3 0 3 3 +ALONG 15 0 15 15 +ALMS 1 0 1 1 +ALMOST 19 0 19 19 +ALLY 1 0 1 1 +ALLUDE 1 0 1 1 +ALLOWING 2 0 2 2 +ALLOWED 7 0 7 7 +ALLOW 5 0 5 5 +ALLIES 1 0 1 1 +ALLIED 1 0 1 1 +ALLERS 1 0 1 1 +ALLEGED 2 0 2 2 +ALIVE 1 0 1 1 +ALIKE 1 0 1 1 +ALIGHTED 1 0 1 1 +ALICE 4 0 4 4 +ALGERIAN 1 0 1 1 +ALGERIA 2 0 2 2 +ALGEBRA 1 0 1 1 +ALERTNESS 1 0 1 1 +ALE 2 0 2 2 +ALBIGENSES 1 0 1 1 +ALAS 3 0 3 3 +ALARMED 1 0 1 1 +AKIN 1 0 1 1 +AIN'T 2 0 2 2 +AIMED 1 0 1 1 +AIDED 1 0 1 1 +AHEAD 1 0 1 1 +AGREEMENT 1 0 1 1 +AGREED 2 0 2 2 +AGREEABLY 2 0 2 2 +AGREEABLE 5 0 5 5 +AGREE 2 0 2 2 +AGO 4 0 4 4 +AGITATION 4 0 4 4 +AGITATED 2 0 2 2 +AGGRESSIVENESS 1 0 1 1 +AGGRESSIVE 1 0 1 1 +AGGREGATE 1 0 1 1 +AGENCY 1 0 1 1 +AGE 6 0 6 6 +AGAPE 1 0 1 1 +AGAINST 23 0 23 23 +AGAIN 39 0 39 39 +AFTERWARDS 5 0 5 5 +AFTERWARD 2 0 2 2 +AFTERNOON 4 0 4 4 +AFRICAN 1 0 1 1 +AFRAID 9 0 9 9 +AFLOAT 1 0 1 1 +AFFORD 4 0 4 4 +AFFLICTED 1 0 1 1 +AFFIRMATIVE 1 0 1 1 +AFFECTIONS 1 0 1 1 +AFFECTIONATE 1 0 1 1 +AFFECTION 7 0 7 7 +AFFAIRS 3 0 3 3 +AFFAIR 2 0 2 2 +ADVISER 1 0 1 1 +ADVISED 1 0 1 1 +ADVISABLE 1 0 1 1 +ADVICE 4 0 4 4 +ADVERTISING 1 0 1 1 +ADVERTISEMENT 1 0 1 1 +ADVERSE 2 0 2 2 +ADVANTAGES 2 0 2 2 +ADVANTAGE 3 0 3 3 +ADVANCING 1 0 1 1 +ADVANCE 5 0 5 5 +ADORE 1 0 1 1 +ADORATION 3 0 3 3 +ADOPTED 2 0 2 2 +ADOLESCENCE 1 0 1 1 +ADMITTING 2 0 2 2 +ADMITTED 3 0 3 3 +ADMITTANCE 1 0 1 1 +ADMIT 2 0 2 2 +ADMIRING 1 0 1 1 +ADMIRED 1 0 1 1 +ADMIRATION 2 0 2 2 +ADMINISTRATION 3 0 3 3 +ADJUST 1 0 1 1 +ADHERENTS 1 0 1 1 +ADDRESSING 1 0 1 1 +ADDRESSED 6 0 6 6 +ADDRESS 3 0 3 3 +ADDITIONAL 1 0 1 1 +ADDED 11 0 11 11 +ACUTE 2 0 2 2 +ACTUALLY 3 0 3 3 +ACTUAL 4 0 4 4 +ACTS 2 0 2 2 +ACTRESS 1 0 1 1 +ACTORS 4 0 4 4 +ACTIVITY 1 0 1 1 +ACTIVELY 1 0 1 1 +ACTIVE 2 0 2 2 +ACTING 2 0 2 2 +ACT 6 0 6 6 +ACROSS 13 0 13 13 +ACQUIRES 1 0 1 1 +ACQUIRE 1 0 1 1 +ACQUAINTED 1 0 1 1 +ACQUAINTANCE 3 0 3 3 +ACQUAINT 1 0 1 1 +ACORN 1 0 1 1 +ACKNOWLEDGES 1 0 1 1 +ACKNOWLEDGED 2 0 2 2 +ACKNOWLEDGE 2 0 2 2 +ACHIEVEMENTS 1 0 1 1 +ACHIEVEMENT 1 0 1 1 +ACHIEVED 2 0 2 2 +ACE 2 0 2 2 +ACCUSTOMED 3 0 3 3 +ACCUSE 1 0 1 1 +ACCURATELY 1 0 1 1 +ACCURATE 2 0 2 2 +ACCURACY 3 0 3 3 +ACCRUING 1 0 1 1 +ACCOUNTS 1 0 1 1 +ACCOUNTED 1 0 1 1 +ACCORDINGLY 1 0 1 1 +ACCORDING 4 0 4 4 +ACCORDANCE 2 0 2 2 +ACCOMPLISHMENT 1 0 1 1 +ACCOMPLISHED 5 0 5 5 +ACCOMPANY 2 0 2 2 +ACCOMPANIED 3 0 3 3 +ACCOMMODATIONS 1 0 1 1 +ACCOMMODATION 1 0 1 1 +ACCLAMATIONS 1 0 1 1 +ACCIDENTS 2 0 2 2 +ACCIDENT 2 0 2 2 +ACCESSORIES 1 0 1 1 +ACCEPTING 1 0 1 1 +ACCEPTED 1 0 1 1 +ACCEPTABLE 1 0 1 1 +ABSURDITY 1 0 1 1 +ABSURDITIES 1 0 1 1 +ABSURD 1 0 1 1 +ABSTRACTIONS 1 0 1 1 +ABSTRACTION 1 0 1 1 +ABSORBED 1 0 1 1 +ABSOLUTELY 6 0 6 6 +ABSOLUTE 1 0 1 1 +ABSENT 2 0 2 2 +ABSENCE 1 0 1 1 +ABRUPTLY 2 0 2 2 +ABROAD 4 0 4 4 +ABRAHAM 2 0 2 2 +ABOUT 85 0 85 85 +ABOLITIONISM 1 0 1 1 +ABOARD 2 0 2 2 +ABNER 1 0 1 1 +ABLE 7 0 7 7 +ABJECTLY 1 0 1 1 +ABANDONED 2 0 2 2 diff --git a/log/fast_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt b/log/fast_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..f477b492ffc4a20b4cdd130701c7cdaa58591dca --- /dev/null +++ b/log/fast_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt @@ -0,0 +1,15371 @@ +%WER = 9.05 +Errors: 515 insertions, 457 deletions, 3766 substitutions, over 52343 reference words (48120 correct) +Search below for sections starting with PER-UTT DETAILS:, SUBSTITUTIONS:, DELETIONS:, INSERTIONS:, PER-WORD STATS: + +PER-UTT DETAILS: corr or (ref->hyp) +1688-142285-0000-1948: THERE'S IRON THEY SAY IN ALL OUR BLOOD AND A GRAIN OR TWO PERHAPS IS GOOD BUT HIS HE MAKES ME HARSHLY FEEL HAS GOT A LITTLE TOO MUCH OF STEEL ANON +1688-142285-0001-1949: MARGARET SAID MISTER HALE AS HE RETURNED FROM SHOWING HIS (GUEST->GUESTS) DOWNSTAIRS I COULD NOT HELP WATCHING YOUR FACE WITH SOME ANXIETY WHEN MISTER THORNTON MADE HIS CONFESSION OF HAVING BEEN A SHOP BOY +1688-142285-0002-1950: YOU DON'T MEAN THAT YOU THOUGHT ME SO SILLY +1688-142285-0003-1951: I REALLY (LIKED->LIKE) THAT ACCOUNT OF HIMSELF BETTER THAN ANYTHING ELSE HE SAID +1688-142285-0004-1952: HIS STATEMENT OF HAVING BEEN A SHOP BOY WAS THE THING I (LIKED->LIKE) BEST OF ALL +1688-142285-0005-1953: YOU WHO WERE ALWAYS ACCUSING PEOPLE (OF BEING->HAVE BEEN) SHOPPY AT HELSTONE +1688-142285-0006-1954: I DON'T THINK MISTER HALE YOU HAVE DONE QUITE RIGHT (IN->*) INTRODUCING SUCH A PERSON TO US WITHOUT TELLING US WHAT HE HAD BEEN +1688-142285-0007-1955: I REALLY WAS VERY MUCH AFRAID OF SHOWING HIM HOW MUCH SHOCKED I WAS AT SOME (PARTS->PART) OF WHAT HE SAID +1688-142285-0008-1956: HIS FATHER DYING IN MISERABLE CIRCUMSTANCES +1688-142285-0009-1957: WHY IT MIGHT HAVE BEEN IN THE WORKHOUSE +1688-142285-0010-1958: HIS FATHER SPECULATED WILDLY FAILED AND THEN KILLED HIMSELF BECAUSE HE COULD NOT BEAR THE DISGRACE +1688-142285-0011-1959: ALL HIS FORMER FRIENDS SHRUNK FROM THE DISCLOSURES THAT HAD TO BE MADE OF HIS DISHONEST GAMBLING WILD HOPELESS STRUGGLES MADE WITH OTHER PEOPLE'S MONEY TO REGAIN HIS OWN MODERATE PORTION OF WEALTH +1688-142285-0012-1960: NO ONE CAME FORWARDS TO HELP THE MOTHER AND THIS BOY +1688-142285-0013-1961: AT LEAST NO FRIEND CAME FORWARDS IMMEDIATELY AND (MISSUS->MISTER) THORNTON IS NOT ONE I FANCY TO WAIT TILL (TARDY->TIDY) KINDNESS COMES TO FIND HER OUT +1688-142285-0014-1962: SO THEY LEFT MILTON +1688-142285-0015-1963: HOW TAINTED ASKED HER FATHER +1688-142285-0016-1964: (OH->O) PAPA BY THAT TESTING EVERYTHING BY THE STANDARD OF WEALTH +1688-142285-0017-1965: WHEN HE SPOKE OF THE MECHANICAL POWERS HE EVIDENTLY LOOKED UPON THEM ONLY AS NEW WAYS OF EXTENDING TRADE AND MAKING MONEY +1688-142285-0018-1966: AND THE POOR MEN AROUND HIM (THEY->THERE) WERE POOR BECAUSE THEY WERE VICIOUS OUT OF THE PALE OF HIS SYMPATHIES BECAUSE THEY HAD NOT HIS IRON NATURE AND THE CAPABILITIES THAT IT GIVES HIM FOR BEING RICH +1688-142285-0019-1967: NOT VICIOUS HE NEVER SAID THAT +1688-142285-0020-1968: IMPROVIDENT AND SELF INDULGENT WERE HIS WORDS +1688-142285-0021-1969: MARGARET WAS COLLECTING HER MOTHER'S WORKING MATERIALS AND PREPARING TO GO TO BED +1688-142285-0022-1970: JUST AS SHE WAS LEAVING THE ROOM SHE HESITATED SHE WAS INCLINED TO MAKE AN ACKNOWLEDGMENT WHICH SHE THOUGHT WOULD PLEASE HER FATHER BUT WHICH TO BE FULL AND TRUE MUST INCLUDE A LITTLE ANNOYANCE +1688-142285-0023-1971: HOWEVER OUT IT CAME +1688-142285-0024-1972: PAPA I DO THINK MISTER THORNTON A VERY REMARKABLE MAN BUT PERSONALLY I DON'T LIKE HIM AT ALL +1688-142285-0025-1973: AND I DO SAID HER FATHER LAUGHING +1688-142285-0026-1974: PERSONALLY AS YOU CALL IT AND ALL +1688-142285-0027-1975: I DON'T SET HIM UP FOR A HERO OR ANYTHING OF THAT KIND +1688-142285-0028-1976: BUT GOOD NIGHT CHILD +1688-142285-0029-1977: THERE WERE SEVERAL OTHER SIGNS OF SOMETHING WRONG ABOUT MISSUS HALE +1688-142285-0030-1978: SHE AND DIXON HELD MYSTERIOUS CONSULTATIONS IN HER BEDROOM FROM WHICH DIXON WOULD COME OUT CRYING AND CROSS AS WAS (HER->A) CUSTOM WHEN ANY DISTRESS OF HER MISTRESS CALLED UPON HER SYMPATHY +1688-142285-0031-1979: ONCE MARGARET HAD GONE INTO THE CHAMBER SOON AFTER DIXON (LEFT IT->LIFTED) AND FOUND HER MOTHER ON HER KNEES AND AS MARGARET STOLE OUT SHE CAUGHT A FEW WORDS WHICH WERE EVIDENTLY A PRAYER FOR STRENGTH AND PATIENCE TO (ENDURE->INDURE) SEVERE BODILY SUFFERING +1688-142285-0032-1980: BUT THOUGH SHE RECEIVED CARESSES AND FOND WORDS BACK AGAIN IN SUCH PROFUSION AS WOULD HAVE GLADDENED HER FORMERLY YET SHE FELT THAT THERE WAS A SECRET WITHHELD FROM HER AND SHE BELIEVED IT BORE SERIOUS REFERENCE TO HER MOTHER'S HEALTH +1688-142285-0033-1981: SHE LAY AWAKE VERY LONG THIS NIGHT PLANNING HOW TO LESSEN THE EVIL INFLUENCE OF THEIR MILTON LIFE ON HER MOTHER +1688-142285-0034-1982: A SERVANT (TO->*) GIVE DIXON PERMANENT ASSISTANCE SHOULD BE GOT IF SHE GAVE UP (HER->THE) WHOLE TIME TO THE SEARCH AND THEN AT ANY RATE HER MOTHER MIGHT HAVE ALL THE PERSONAL (ATTENTION->ATTENTIONS) SHE REQUIRED AND HAD BEEN ACCUSTOMED TO HER WHOLE LIFE +1688-142285-0035-1983: VISITING REGISTER (OFFICES->OFFICERS) SEEING ALL MANNER OF UNLIKELY PEOPLE AND VERY FEW IN THE LEAST LIKELY ABSORBED MARGARET'S TIME AND THOUGHTS FOR SEVERAL DAYS +1688-142285-0036-1984: ONE AFTERNOON SHE MET BESSY HIGGINS IN THE STREET AND STOPPED TO SPEAK TO HER +1688-142285-0037-1985: WELL (BESSY->BUSY) HOW ARE YOU +1688-142285-0038-1986: BETTER AND NOT BETTER IF (YO->YOU) KNOW WHAT THAT MEANS +1688-142285-0039-1987: NOT EXACTLY REPLIED MARGARET SMILING +1688-142285-0040-1988: I'M BETTER IN NOT BEING TORN TO PIECES (BY->BUT) COUGHING (O'NIGHTS->A KNIGHT'S) BUT I'M WEARY AND TIRED (O->OF) MILTON AND LONGING TO GET AWAY TO THE LAND (O BEULAH->OF BOOLA) AND WHEN I THINK I'M FARTHER AND FARTHER OFF MY HEART SINKS AND I'M NO BETTER I'M WORSE +1688-142285-0041-1989: MARGARET TURNED ROUND TO WALK ALONGSIDE OF THE GIRL IN HER FEEBLE PROGRESS HOMEWARD +1688-142285-0042-1990: BUT FOR A MINUTE OR TWO SHE DID NOT SPEAK +1688-142285-0043-1991: AT LAST SHE SAID IN A LOW VOICE +1688-142285-0044-1992: BESSY DO YOU WISH TO DIE +1688-142285-0045-1993: BESSY WAS SILENT IN HER TURN FOR A MINUTE OR TWO THEN SHE REPLIED +1688-142285-0046-1994: (NOUGHT->NOT) WORSE THAN MANY OTHERS I RECKON +1688-142285-0047-1995: BUT WHAT WAS IT +1688-142285-0048-1996: YOU KNOW I'M A STRANGER HERE SO PERHAPS I'M NOT SO QUICK AT UNDERSTANDING WHAT YOU MEAN AS IF I'D LIVED ALL MY LIFE (AT->IN) MILTON +1688-142285-0049-1997: I HAD FORGOTTEN WHAT I SAID FOR THE TIME CONTINUED MARGARET QUIETLY +1688-142285-0050-1998: I SHOULD HAVE THOUGHT OF IT AGAIN WHEN I WAS LESS BUSY (MAY I GO->MARGAR) WITH YOU NOW +1688-142285-0051-1999: THE SHARPNESS IN HER EYE TURNED TO A WISTFUL LONGING AS SHE MET MARGARET'S SOFT AND FRIENDLY GAZE +1688-142285-0052-2000: AS THEY TURNED UP INTO A SMALL COURT OPENING OUT (OF->INTO) A SQUALID STREET BESSY SAID +1688-142285-0053-2001: (YO'LL->YOU'LL) NOT BE DAUNTED IF (FATHER'S AT->FATHER SAID) HOME AND SPEAKS A BIT GRUFFISH AT FIRST +1688-142285-0054-2002: BUT NICHOLAS WAS NOT AT HOME WHEN THEY ENTERED +1688-142285-0055-2003: GASPED (BESSY->BESSIE) AT LAST +1688-142285-0056-2004: BESSY TOOK A LONG AND FEVERISH DRAUGHT AND THEN FELL BACK AND SHUT HER EYES +1688-142285-0057-2005: MARGARET BENT OVER AND SAID BESSY DON'T BE IMPATIENT WITH YOUR LIFE WHATEVER IT IS OR MAY HAVE BEEN +1688-142285-0058-2006: REMEMBER WHO GAVE IT (*->TO) YOU AND MADE IT WHAT IT IS +1688-142285-0059-2007: NOW I'LL NOT HAVE MY WENCH (PREACHED TO->PREACH TOO) +1688-142285-0060-2008: BUT SURELY SAID MARGARET FACING ROUND YOU BELIEVE IN WHAT I SAID THAT GOD GAVE HER LIFE AND ORDERED WHAT KIND OF LIFE IT WAS TO BE +1688-142285-0061-2009: I BELIEVE WHAT I SEE AND NO MORE +1688-142285-0062-2010: THAT'S WHAT I BELIEVE YOUNG WOMAN +1688-142285-0063-2011: I DON'T BELIEVE ALL I HEAR NO NOT BY A BIG DEAL +1688-142285-0064-2012: BUT (HOO'S->WHO'S) COME AT LAST AND (HOO'S->WHO'S) WELCOME AS LONG AS (HOO'LL->HE'LL) KEEP FROM PREACHING ON WHAT (HOO->HE) KNOWS (NOUGHT->NOT) ABOUT +1688-142285-0065-2013: IT'S SIMPLE AND NOT FAR TO FETCH NOR HARD TO WORK +1688-142285-0066-2014: BUT THE GIRL ONLY PLEADED THE MORE WITH MARGARET +1688-142285-0067-2015: DON'T THINK HARDLY ON HIM HE'S A GOOD MAN HE IS +1688-142285-0068-2016: I SOMETIMES THINK I SHALL BE (MOPED WI->MIRKED WITH) SORROW EVEN IN THE CITY OF GOD IF (FATHER->EITHER) IS NOT THERE +1688-142285-0069-2017: THE FEVERISH COLOUR CAME INTO HER (CHEEK->CHEEKS) AND THE FEVERISH FLAME INTO HER EYE +1688-142285-0070-2018: BUT YOU WILL BE THERE FATHER YOU SHALL OH MY HEART +1688-142285-0071-2019: SHE PUT HER HAND TO IT AND BECAME GHASTLY PALE +1688-142285-0072-2020: MARGARET HELD HER IN HER ARMS AND PUT THE WEARY HEAD TO REST UPON HER BOSOM +1688-142285-0073-2021: PRESENTLY THE SPASM THAT FORESHADOWED DEATH HAD PASSED AWAY AND BESSY ROUSED HERSELF AND SAID +1688-142285-0074-2022: I'LL GO TO BED IT'S BEST PLACE BUT CATCHING (AT->THAT) MARGARET'S (GOWN YO'LL->GUN YOU'LL) COME AGAIN I KNOW (YO->YOU) WILL BUT JUST SAY IT +1688-142285-0075-2023: I WILL COME TO MORROW SAID MARGARET +1688-142285-0076-2024: MARGARET WENT AWAY VERY SAD AND THOUGHTFUL +1688-142285-0077-2025: SHE WAS LATE FOR TEA AT HOME +1688-142285-0078-2026: HAVE YOU MET WITH A SERVANT DEAR +1688-142285-0079-2027: NO MAMMA THAT ANNE BUCKLEY WOULD NEVER HAVE DONE +1688-142285-0080-2028: (SUPPOSE->S'POSE) I TRY SAID MISTER HALE +1688-142285-0081-2029: EVERYBODY ELSE HAS HAD (THEIR->THEY) TURN AT THIS GREAT DIFFICULTY NOW LET ME TRY +1688-142285-0082-2030: I MAY BE THE (CINDERELLA->CINOLA) TO PUT ON THE SLIPPER AFTER ALL +1688-142285-0083-2031: (WHAT->BUT) WOULD YOU DO PAPA HOW WOULD YOU SET ABOUT IT +1688-142285-0084-2032: WHY I WOULD APPLY (*->IT) TO SOME GOOD HOUSE MOTHER TO RECOMMEND ME ONE KNOWN TO HERSELF OR HER SERVANTS +1688-142285-0085-2033: VERY GOOD BUT WE MUST FIRST CATCH OUR HOUSE MOTHER +1688-142285-0086-2034: THE MOTHER OF WHOM HE SPOKE TO US SAID MARGARET +1688-142285-0087-2035: (MISSUS->MISTER) THORNTON THE ONLY MOTHER HE HAS I BELIEVE SAID MISTER HALE QUIETLY +1688-142285-0088-2036: I SHALL LIKE TO SEE HER SHE MUST BE AN UNCOMMON PERSON HER MOTHER ADDED +1688-142285-0089-2037: PERHAPS SHE MAY HAVE A RELATION WHO MIGHT SUIT US AND BE GLAD OF OUR PLACE +1688-142285-0090-2038: SHE SOUNDED TO BE SUCH A CAREFUL ECONOMICAL PERSON THAT I SHOULD LIKE ANY ONE OUT OF THE SAME FAMILY +1688-142285-0091-2039: MY DEAR SAID MISTER HALE ALARMED PRAY DON'T GO OFF ON THAT IDEA +1688-142285-0092-2040: I AM SURE AT ANY RATE SHE WOULD NOT LIKE STRANGERS TO KNOW ANYTHING ABOUT IT +1688-142285-0093-2041: TAKE NOTICE THAT (*->THIS) IS NOT MY KIND OF HAUGHTINESS PAPA IF I HAVE ANY AT ALL WHICH I DON'T AGREE TO THOUGH (YOU'RE->YOU) ALWAYS ACCUSING ME OF IT +1688-142285-0094-2042: I DON'T KNOW POSITIVELY THAT IT IS HERS EITHER BUT FROM LITTLE THINGS I HAVE GATHERED FROM HIM I FANCY SO +1688-142285-0095-2043: THEY CARED TOO LITTLE TO ASK IN WHAT MANNER HER SON HAD SPOKEN ABOUT HER +1998-15444-0000-2204: IF CALLED TO A CASE SUPPOSED (OR->AS) SUSPECTED TO BE ONE OF POISONING THE MEDICAL MAN HAS TWO DUTIES TO PERFORM TO SAVE THE PATIENT'S LIFE AND TO PLACE HIMSELF IN A POSITION TO GIVE EVIDENCE (IF->OF) CALLED (ON TO DO->UNTO) SO +1998-15444-0001-2205: HE SHOULD MAKE INQUIRIES AS TO SYMPTOMS AND TIME AT WHICH FOOD OR MEDICINE (WAS->MUST) LAST TAKEN +1998-15444-0002-2206: HE SHOULD NOTICE THE POSITION AND TEMPERATURE OF THE BODY THE CONDITION OF RIGOR MORTIS MARKS OF VIOLENCE APPEARANCE OF LIPS AND MOUTH +1998-15444-0003-2207: IN MAKING A POST (MORTEM->MODE OF) EXAMINATION THE ALIMENTARY CANAL SHOULD BE REMOVED AND PRESERVED FOR FURTHER INVESTIGATION +1998-15444-0004-2208: THE GUT AND THE (GULLET->GALLANT) BEING CUT ACROSS BETWEEN THESE LIGATURES THE STOMACH MAY BE REMOVED ENTIRE WITHOUT (SPILLING->SPINNING) ITS CONTENTS +1998-15444-0005-2209: IF THE (MEDICAL PRACTITIONER IS IN DOUBT->MEDICA PETITIONERS ENDOWED) ON ANY POINT HE SHOULD OBTAIN TECHNICAL ASSISTANCE FROM (SOMEONE->SOME ONE) WHO HAS PAID ATTENTION TO THE SUBJECT +1998-15444-0006-2210: IN A CASE OF ATTEMPTED SUICIDE BY POISONING IS IT THE DUTY OF THE DOCTOR TO INFORM THE POLICE +1998-15444-0007-2211: THE BEST (EMETIC->AMATIC) IS THAT WHICH IS AT HAND +1998-15444-0008-2212: (THE DOSE->THAT IS) FOR AN (ADULT->ADULGE) IS TEN MINIMS +1998-15444-0009-2213: (APOMORPHINE->EPIMORPHIN) IS NOT (ALLIED->ALID) IN PHYSIOLOGICAL ACTION TO MORPHINE AND MAY BE GIVEN IN CASES OF NARCOTIC POISONING +1998-15444-0010-2214: (TICKLING->TITLING) THE (FAUCES->FORCES) WITH (A->THE) FEATHER MAY EXCITE (VOMITING->WARMITTING) +1998-15444-0011-2215: IN USING THE (ELASTIC->ELECTIC) STOMACH TUBE SOME FLUID SHOULD BE INTRODUCED INTO THE STOMACH BEFORE ATTEMPTING TO EMPTY IT OR A PORTION OF THE MUCOUS MEMBRANE MAY BE (SUCKED->SACKED) INTO THE APERTURE +1998-15444-0012-2216: THE TUBE SHOULD BE EXAMINED TO SEE THAT IT IS NOT BROKEN OR CRACKED AS ACCIDENTS HAVE HAPPENED FROM NEGLECTING THIS PRECAUTION +1998-15444-0013-2217: (ANTIDOTES ARE->AND HE VOTES A) USUALLY GIVEN HYPODERMICALLY OR IF (BY MOUTH->THE MOUSE) IN THE FORM OF TABLETS +1998-15444-0014-2218: IN THE ABSENCE OF (A->THE) HYPODERMIC SYRINGE THE REMEDY MAY BE GIVEN BY THE (RECTUM->RECTIM) +1998-15444-0015-2219: NOTICE THE (SMELL->SMAR) COLOUR AND GENERAL APPEARANCE OF THE MATTER SUBMITTED FOR EXAMINATION +1998-15444-0016-2220: FOR THE SEPARATION OF AN (ALKALOID->AKALOID) THE FOLLOWING IS THE PROCESS OF (STAS OTTO->STARS ARE TWO) +1998-15444-0017-2221: THIS PROCESS IS BASED UPON THE PRINCIPLE THAT THE SALTS OF THE (ALKALOIDS->AKALITES) ARE SOLUBLE IN (ALCOHOL AND->AKELET) WATER AND INSOLUBLE IN ETHER +1998-15444-0018-2222: THE PURE (ALKALOIDS WITH->IKOLOITS WAS) THE EXCEPTION OF MORPHINE IN ITS CRYSTALLINE FORM (ARE->A) SOLUBLE (IN ETHER->BENEATH THEM) +1998-15444-0019-2223: TWO (COOL->U) THE (MIXTURE->MIXED) AND FILTER WASH THE RESIDUE WITH STRONG ALCOHOL AND MIX THE (FILTRATES->FUR TRADES) +1998-15444-0020-2224: THE RESIDUE MAY BE (SET->SAID) ASIDE FOR THE DETECTION OF THE METALLIC POISONS (IF->OF) SUSPECTED (EXPEL->EXPELLED) THE (ALCOHOL BY->ALCOHOLBA) CAREFUL EVAPORATION +1998-15444-0021-2225: ON THE EVAPORATION OF THE ALCOHOL THE (RESINOUS->ZENOUS) AND FATTY (MATTERS->MATTER) SEPARATE +1998-15444-0022-2226: EVAPORATE THE (FILTRATE->FEDERATE) TO A (SYRUP->CYRUP) AND EXTRACT WITH SUCCESSIVE PORTIONS OF ABSOLUTE ALCOHOL +1998-15444-0023-2227: SEPARATE THE ETHEREAL SOLUTION AND (EVAPORATE->THE REPARATE) +1998-15444-0024-2228: FIVE A PART OF THIS (ETHEREAL->ASSYRIAL) SOLUTION IS (POURED->PUT) INTO A WATCH GLASS AND (ALLOWED->ALLOW) TO EVAPORATE +1998-15444-0025-2229: TO PURIFY IT (ADD A SMALL->ADDISMA) QUANTITY OF (DILUTE SULPHURIC->DELUDE SUFFERG) ACID AND AFTER EVAPORATING TO THREE QUARTERS OF ITS BULK ADD (A->*) SATURATED SOLUTION OF CARBONATE OF POTASH OR SODA +1998-15444-0026-2230: (BOIL->BY) THE (FINELY->FINAL) DIVIDED SUBSTANCE WITH ABOUT ONE (EIGHTH->EIGHTHS) ITS (BULK->BAG) OF PURE HYDROCHLORIC ACID ADD FROM TIME TO TIME POTASSIC (CHLORATE->LOW RAGE) UNTIL THE SOLIDS ARE REDUCED TO A STRAW YELLOW FLUID +1998-15444-0027-2231: THE RESIDUE OF THE MATERIAL AFTER DIGESTION (WITH->WAS) HYDROCHLORIC ACID AND (POTASSIUM CHLORATE->POTASSIAN CHLORIDE) MAY HAVE TO BE EXAMINED FOR SILVER LEAD AND (BARIUM->BURIUM) +1998-29454-0000-2157: A THOUSAND BLESSINGS FROM A GRATEFUL HEART +1998-29454-0001-2158: PERUSAL (SAID THE PAWNBROKER->SET UPON BROKER) THAT'S THE WAY TO (PERNOUNCE->PRONOUNCE) IT +1998-29454-0002-2159: HIS BOOKS TOLD HIM THAT (TREASURE IS->TREASURES) BEST HIDDEN UNDER LOOSE BOARDS (UNLESS->AND AS) OF COURSE YOUR HOUSE (HAS->HAD) A SECRET PANEL WHICH HIS HAD NOT +1998-29454-0003-2160: HE GOT IT UP AND PUSHED HIS TREASURES AS FAR IN AS HE COULD ALONG THE ROUGH (CRUMBLY->CRAMBLY) SURFACE OF THE (LATH->LAST) AND PLASTER +1998-29454-0004-2161: WHEN DICKIE CAME DOWN HIS AUNT (SLIGHTLY->SAT HE) SLAPPED HIM AND HE TOOK THE HALFPENNY AND (LIMPED OFF->LIMP OF) OBEDIENTLY +1998-29454-0005-2162: HE HAD NEVER SEEN ONE BEFORE AND IT INTERESTED HIM EXTREMELY +1998-29454-0006-2163: HE LOOKED ABOUT HIM AND KNEW THAT HE DID NOT AT ALL KNOW WHERE HE WAS +1998-29454-0007-2164: WHAT'S UP (MATEY LOST->MATE YOU ASKED) YOUR WAY DICKIE EXPLAINED +1998-29454-0008-2165: WHEN HE SAID (AVE->HAVE) I (BIN->BEEN) ASLEEP +1998-29454-0009-2166: HERE WE ARE SAID THE MAN +1998-29454-0010-2167: NOT (EXACKLY->EXACTLY) SAID THE MAN BUT IT'S ALL RIGHT +1998-29454-0011-2168: WHEN IT WAS OVER THE (MAN->MEN) ASKED DICKIE IF HE COULD WALK A LITTLE WAY AND WHEN (DICKIE->DICKY) SAID HE COULD THEY SET OUT IN THE MOST FRIENDLY WAY SIDE BY SIDE +1998-29454-0012-2169: AND THE (TEA->TUNO) AND (ALL AN->*) THE EGG +1998-29454-0013-2170: AND THIS IS THE PRETTIEST PLACE EVER I SEE +1998-29454-0014-2171: I SHALL CATCH IT (A FAIR->IF HER) TREAT AS IT IS +1998-29454-0015-2172: SHE WAS (WAITIN->WAITING) FOR THE WOOD TO (BOIL->BOY) THE KETTLE WHEN (I->TO) COME OUT MOTHER +1998-29454-0016-2173: (AIN'T->AND) BAD WHEN SHE'S IN A GOOD TEMPER +1998-29454-0017-2174: (THAT AIN'T WHAT SHE'LL->THEN BUT HE'LL) BE IN WHEN YOU GETS BACK +1998-29454-0018-2175: I GOT (TO STICK IT->A STICKET) SAID (DICKIE->DICKY) SADLY I'D BEST BE GETTING HOME +1998-29454-0019-2176: I WOULDN'T GO (OME->HOME) NOT IF (I->EVER) WAS YOU SAID THE MAN +1998-29454-0020-2177: NO SAID DICKIE OH NO NO I NEVER +1998-29454-0021-2178: (I AIN'T IT YER->AND A DEAR) HAVE I LIKE WHAT (YER AUNT DO->YOU AREN'T TO) +1998-29454-0022-2179: WELL (THAT'LL->THOU) SHOW YOU THE SORT OF (MAN->MEN) I AM +1998-29454-0023-2180: THE MAN'S MANNER WAS SO KIND AND HEARTY THE WHOLE ADVENTURE WAS SO WONDERFUL AND NEW IS IT COUNTRY WHERE YOU GOING +1998-29454-0024-2181: THE SUN (SHOT->HAD) LONG GOLDEN BEAMS THROUGH THE GAPS (IN->AND) THE HEDGE +1998-29454-0025-2182: A BIRD (PAUSED->PASSED) IN ITS FLIGHT ON A BRANCH QUITE CLOSE AND CLUNG THERE SWAYING +1998-29454-0026-2183: HE TOOK OUT OF HIS POCKET A NEW ENVELOPE (A->AND) NEW SHEET OF PAPER AND A NEW PENCIL READY SHARPENED BY MACHINERY +1998-29454-0027-2184: (AN->AND) I (ASKS->ASK) YOU LET ME COME (ALONGER->ALONG ARE) YOU GOT THAT +1998-29454-0028-2185: GET (IT WROTE->US RODE) DOWN THEN DONE +1998-29454-0029-2186: THEN HE FOLDED IT AND PUT IT IN HIS POCKET +1998-29454-0030-2187: NOW (WE'RE->WE ARE) SQUARE HE SAID +1998-29454-0031-2188: THEY COULD PUT A (MAN->MEN) AWAY FOR (LESS->US) THAN THAT +1998-29454-0032-2189: I SEE THAT (THERE IN->THEN) A BOOK SAID (DICKIE CHARMED->DICK HAD SHUMMED) +1998-29454-0033-2190: HE REWARD THE WAKE THE LAST OF THE ENGLISH AND (I WUNNERED->A ONE AT) WHAT IT STOOD FOR +1998-29454-0034-2191: (WILD->WHITE) ONES (AIN'T ALF THE->AND A HALF) SIZE I LAY +1998-29454-0035-2192: ADVENTURES I SHOULD THINK SO +1998-29454-0036-2193: AH SAID (DICKIE->DICKY) AND A (FULL->FOOT) SILENCE FELL BETWEEN THEM +1998-29454-0037-2194: THAT WAS CHARMING BUT IT WAS PLEASANT TOO TO WASH THE (MUD OFF->MATTER) ON THE WET GRASS +1998-29454-0038-2195: (DICKIE->DICKY) ALWAYS REMEMBERED THAT MOMENT +1998-29454-0039-2196: SO YOU SHALL SAID MISTER BEALE A (REG'LER->REG'LAR) WASH ALL OVER THIS VERY NIGHT I ALWAYS LIKE A WASH MESELF +1998-29454-0040-2197: SOME (BLOKES->LOOKS) THINK IT PAYS TO BE DIRTY BUT IT DON'T +1998-29454-0041-2198: IF (YOU'RE->YO) CLEAN THEY SAY (HONEST->ON DIS) POVERTY AN IF (YOU'RE->YO) DIRTY THEY SAY SERVE YOU RIGHT +1998-29454-0042-2199: YOU ARE GOOD SAID DICKIE I DO LIKE YOU +1998-29454-0043-2200: I KNOW YOU WILL SAID DICKIE WITH ENTHUSIASM I KNOW (OW->HOW) GOOD YOU ARE +1998-29454-0044-2201: BLESS ME SAID MISTER BEALE UNCOMFORTABLY WELL THERE +1998-29454-0045-2202: (STEP OUT SONNY OR WE'LL->SPATANI ALBER) NEVER GET THERE THIS (SIDE->SORT OF) CHRISTMAS +1998-29454-0046-2203: WELL (YOU'LL->YOU) KNOW ALL ABOUT IT PRESENTLY +1998-29455-0000-2232: THE SINGING AND LAUGHING WENT ON LONG AFTER HE HAD FALLEN ASLEEP AND IF LATER IN THE EVENING (THERE->THEY) WERE (LOUD VOICED->ALL OUTWARDS) ARGUMENTS OR (*->A) QUARRELS EVEN DICKIE DID NOT HEAR THEM +1998-29455-0001-2233: WHAT'S (ALL->ON) THAT THERE DICKIE ASKED POINTING TO THE ODD (KNOBBLY->NOBLY) BUNDLES OF ALL SORTS AND SHAPES TIED ON TO THE (PERAMBULATOR'S->PRAMULATOR'S) FRONT +1998-29455-0002-2234: TELL (YER->YOU) WHAT (MATE->MADE) LOOKS TO ME AS IF (I'D->I) TOOK A FANCY TO YOU +1998-29455-0003-2235: (SWELP->SWAP) ME HE SAID HELPLESSLY +1998-29455-0004-2236: (OH->O) LOOK SAID (DICKIE->DICKY) THE FLOWERS +1998-29455-0005-2237: (THEY'RE->THERE) ONLY (WEEDS->READS) SAID (BEALE->BEER) +1998-29455-0006-2238: BUT I (SHALL->SHOULD) HAVE THEM (WHILE THEY'RE->WHETHER) ALIVE SAID (DICKIE->DICKY) AS HE HAD SAID TO THE (PAWNBROKER ABOUT->PONDBROKER BUT) THE MOONFLOWERS +1998-29455-0007-2239: (HI->AY) THERE (GOES->WAS) A RABBIT +1998-29455-0008-2240: (SEE IM CROST THE->SEEM QUEST) ROAD THERE (SEE HIM->SEEM) +1998-29455-0009-2241: HOW BEAUTIFUL SAID (DICKIE->DICKY) WRIGGLING WITH DELIGHT +1998-29455-0010-2242: THIS LIFE OF THE RABBIT AS DESCRIBED BY MISTER BEALE WAS THE CHILD'S FIRST GLIMPSE OF FREEDOM I'D LIKE TO BE A RABBIT +1998-29455-0011-2243: (OW'M->AM) I TO (WHEEL->REA) THE (BLOOMIN PRAM->ROOM IN PEM) IF (YOU->YOUR) GOES ON LIKE AS IF YOU WAS A (BAG->PEG) OF EELS +1998-29455-0012-2244: I LIKE YOU (NEXTER->NEXT TO) MY OWN DADDY AND MISTER (BAXTER->BAXT THE) NEXT DOOR +1998-29455-0013-2245: THAT'S ALL RIGHT SAID MISTER BEALE AWKWARDLY +1998-29455-0014-2246: (DICKIE QUICK TO->DICKY QUICKLY) IMITATE TOUCHED HIS +1998-29455-0015-2247: POOR LITTLE MAN SAID THE LADY YOU MISS YOUR MOTHER DON'T YOU +1998-29455-0016-2248: OH WELL DONE LITTLE (UN->ONE) SAID MISTER (BEALE->BEE) TO HIMSELF +1998-29455-0017-2249: THE TWO TRAVELLERS WERE LEFT FACING EACH OTHER THE RICHER BY A PENNY AND (OH->O) WONDERFUL GOOD FORTUNE A WHOLE HALF CROWN +1998-29455-0018-2250: NO I NEVER SAID DICKIE (ERE'S->YES) THE (STEEVER->STEVER) +1998-29455-0019-2251: YOU STICK TO THAT SAID (BEALE->BEARD) RADIANT (WITH->WAS) DELIGHT YOU'RE A FAIR MASTERPIECE YOU ARE YOU EARNED IT HONEST IF EVER (A KID->KIT) DONE +1998-29455-0020-2252: THEY WENT ON UP THE HILL AS HAPPY AS ANY ONE NEED WISH TO BE +1998-29455-0021-2253: PLEASE (DO NOT BE->DON'T REPEAT) TOO SHOCKED +1998-29455-0022-2254: REMEMBER THAT NEITHER OF THEM KNEW ANY BETTER +1998-29455-0023-2255: TO THE (ELDER->OTHER) TRAMP LIES (AND BEGGING WERE->IN PEGGING WHERE) NATURAL MEANS OF LIVELIHOOD +1998-29455-0024-2256: BUT YOU SAID THE BED (WITH->WAS) THE GREEN CURTAINS (URGED DICKIE->ADDED THE KEI) +1998-29455-0025-2257: WHICH THIS (AIN'T->END) NOT BY NO MEANS +1998-29455-0026-2258: THE NIGHT IS FULL OF INTERESTING LITTLE SOUNDS THAT WILL NOT AT FIRST LET YOU SLEEP THE RUSTLE OF LITTLE (WILD->WHITE) THINGS (IN->ON) THE (HEDGES->HATCHES) THE BARKING OF DOGS (IN->AND) DISTANT FARMS THE CHIRP OF CRICKETS AND THE CROAKING OF FROGS +1998-29455-0027-2259: (THE NEW->THEN YOU) GAME OF BEGGING AND INVENTING STORIES TO INTEREST THE PEOPLE FROM WHOM IT WAS (WORTH->WORSE) WHILE TO BEG WENT ON GAILY DAY BY DAY AND WEEK BY WEEK AND DICKIE BY CONSTANT PRACTICE GREW SO CLEVER AT TAKING HIS PART IN THE ACTING THAT MISTER (BEALE->BEER) WAS QUITE DAZED WITH ADMIRATION +1998-29455-0028-2260: (BLESSED->BLEST) IF I EVER SEE SUCH A NIPPER HE SAID OVER AND OVER AGAIN +1998-29455-0029-2261: CLEVER AS A (TRAINDAWG E->TRAIN DOG) IS (AN ALL OUTER IS->IN OUR OUTER'S) OWN (EAD->HEAD) +1998-29455-0030-2262: I (AIN'T->AM) SURE AS I (ADN'T->HADN'T) BETTER STICK TO THE ROAD AND KEEP AWAY FROM OLD (ANDS->ENDS) LIKE (YOU JIM->EUGEN) +1998-29455-0031-2263: (I OPE E'S CLEVER->IOPIUS LOVE) ENOUGH TO DO (WOT E'S->WHAT IS) TOLD (KEEP IS MUG SHUT->HE WAS MUCH AT) THAT'S ALL +1998-29455-0032-2264: IF (E'S STRAIGHT E'LL->HE STRAYED YOU) DO FOR ME AND IF HE (AIN'T->AND) I'LL DO FOR (IM->HIM) SEE +1998-29455-0033-2265: SEE THAT (BLOKE JUST->LOCTICE) NOW SAID MISTER BEALE (YUSS->YES) SAID (DICKIE->DICKY) +1998-29455-0034-2266: WELL YOU NEVER SEE (IM->HIM) +1998-29455-0035-2267: IF ANY ONE (ARSTS->ASKED) YOU IF YOU EVER SEE (IM->HIM) YOU NEVER SET EYES ON (IM->HIM) IN ALL YOUR BORN NOT TO REMEMBER (IM->HIM) +1998-29455-0036-2268: DICKIE WAS FULL OF QUESTIONS BUT MISTER (BEALE->BEE) HAD NO ANSWERS FOR THEM +1998-29455-0037-2269: NOR WAS IT SUNDAY ON WHICH THEY TOOK A REST AND WASHED THEIR SHIRTS ACCORDING TO MISTER (BEALE'S->BEAT'S) RULE OF LIFE +1998-29455-0038-2270: THEY DID NOT STAY THERE BUT WALKED OUT ACROSS THE DOWNS (WHERE->WITH) THE (SKYLARKS->SKYLECKS) WERE SINGING AND ON A DIP OF THE DOWNS CAME UPON GREAT STONE WALLS AND TOWERS (VERY->WHERE) STRONG AND GRAY +1998-29455-0039-2271: WHAT'S THAT THERE SAID (DICKIE->DICKY) +2033-164914-0000-661: REPLIED HE OF A TRUTH I HEARD HIM NOT AND I WOT HIM NOT AND FOLKS ARE ALL SLEEPING +2033-164914-0001-662: BUT SHE SAID (WHOMSOEVER->WHOSOEVER) THOU SEEST AWAKE HE IS THE RECITER +2033-164914-0002-663: THEN SAID THE EUNUCH ART THOU HE WHO REPEATED POETRY BUT NOW AND MY LADY HEARD HIM +2033-164914-0003-664: REJOINED THE EUNUCH WHO THEN WAS THE RECITER POINT HIM OUT TO ME +2033-164914-0004-665: BY ALLAH REPLIED THE FIREMAN I TELL THEE THE TRUTH +2033-164914-0005-666: TELL ME WHAT HAPPENED (QUOTH ZAU AL MAKAN->QUOMAN) +2033-164914-0006-667: WHAT AILS THEE THEN THAT THOU MUST NEEDS RECITE VERSES SEEING THAT WE ARE TIRED OUT WITH WALKING AND WATCHING AND ALL THE FOLK ARE ASLEEP FOR THEY REQUIRE SLEEP TO REST THEM OF THEIR FATIGUE +2033-164914-0007-668: AND HE ALSO (IMPROVISED->PROVISED) THE TWO FOLLOWING (DISTICHS->DISTINCTS) +2033-164914-0008-669: WHEN (NUZHAT->NUZHA'S) AL ZAMAN HEARD THE FIRST IMPROVISATION SHE CALLED TO (MIND->MINE) HER FATHER AND HER MOTHER AND HER BROTHER AND THEIR (WHILOME->WILM) HOME THEN SHE WEPT AND CRIED (AT->TO) THE EUNUCH AND SAID TO HIM WOE TO THEE +2033-164914-0009-670: HE WHO RECITED THE FIRST TIME (HATH->HAD) RECITED A SECOND TIME AND (I->*) HEARD HIM (HARD->HEART) BY +2033-164914-0010-671: BY ALLAH AN THOU FETCH HIM NOT TO ME I WILL ASSUREDLY ROUSE THE CHAMBERLAIN ON THEE AND HE SHALL BEAT THEE AND CAST THEE OUT +2033-164914-0011-672: BUT TAKE THESE HUNDRED DINERS AND GIVE THEM TO THE SINGER AND BRING HIM TO ME GENTLY AND DO HIM NO HURT +2033-164914-0012-673: RETURN QUICKLY AND LINGER NOT +2033-164914-0013-674: WHEN IT WAS THE SEVENTY THIRD NIGHT +2033-164914-0014-675: BUT THE EUNUCH SAID I WILL NOT LEAVE THEE TILL THOU SHOW ME WHO IT WAS THAT RECITED THE VERSES FOR I DREAD RETURNING TO MY LADY WITHOUT HIM +2033-164914-0015-676: NOW WHEN THE FIREMAN HEARD THESE WORDS HE FEARED FOR (ZAU->ZA) AL MAKAN AND WEPT WITH EXCEEDING WEEPING AND SAID TO THE EUNUCH BY ALLAH IT WAS NOT I AND (I->THEY) KNOW HIM NOT +2033-164914-0016-677: SO GO THOU TO THY STATION AND IF THOU AGAIN (MEET->*) ANY ONE AFTER THIS HOUR RECITING AUGHT OF POETRY WHETHER HE BE NEAR OR FAR IT WILL BE I OR SOME ONE I KNOW AND THOU SHALT NOT LEARN OF HIM BUT BY ME +2033-164914-0017-678: THEN HE KISSED THE EUNUCH'S HEAD AND SPAKE HIM FAIR TILL HE WENT AWAY BUT THE CASTRATO FETCHED (A ROUND->AROUND) AND RETURNING SECRETLY CAME AND STOOD BEHIND THE FIREMAN FEARING TO GO BACK TO HIS MISTRESS WITHOUT (TIDINGS->HIDINGS) +2033-164914-0018-679: I SAY WHAT MADE MY IGNOMY (WHATE'ER->WHATEVER) THE BITTER (CUP I->CUPIED) DRAIN FAR BE (FRO->FROM) ME (THAT->THE) LAND TO FLEE NOR WILL I BOW TO THOSE WHO BLAME AND FOR SUCH LOVE WOULD DEAL ME SHAME +2033-164914-0019-680: THEN SAID THE EUNUCH TO (ZAU->ZA) AL MAKAN PEACE BE WITH THEE O MY LORD +2033-164914-0020-681: O MY LORD CONTINUED THE EUNUCH AND SHAHRAZAD PERCEIVED (*->THAT) THE DAWN OF DAY AND CEASED TO SAY HER PERMITTED SAY +2033-164914-0021-682: WE WILL DO THEE NO UPRIGHT O MY SON NOR WRONG THEE IN AUGHT BUT OUR OBJECT IS THAT THOU BEND THY (GRACIOUS->GRECIOUS) STEPS WITH ME TO MY MISTRESS TO RECEIVE HER ANSWER AND (RETURN IN WEAL->RETURNING WHEEL) AND SAFETY AND THOU SHALT HAVE A HANDSOME PRESENT AS ONE WHO BRINGETH GOOD NEWS +2033-164914-0022-683: THEN THE EUNUCH WENT OUT TO (ZAU AL->ZAO) MAKAN AND SAID TO HIM RECITE WHAT (VERSES->VERSEST) THOU KNOWEST FOR MY (LADY IS->LADIES) HERE HARD BY LISTENING TO THEE AND AFTER I WILL ASK THEE OF THY NAME AND (THY->THINE) NATIVE COUNTRY AND THY CONDITION +2033-164915-0000-643: AND ALSO THESE +2033-164915-0001-644: THEN SHE THREW HERSELF UPON HIM AND HE GATHERED HER TO HIS BOSOM AND THE TWAIN FELL DOWN IN A FAINTING FIT +2033-164915-0002-645: WHEN THE (EUNUCH->EUNUCHS) SAW (THIS CASE->THESE CAVES) HE WONDERED AT THEM AND THROWING OVER THEM SOMEWHAT TO COVER THEM WAITED TILL THEY SHOULD RECOVER +2033-164915-0003-646: AFTER A WHILE THEY CAME TO THEMSELVES AND (NUZHAT->UZHAT) AL ZAMAN REJOICED WITH EXCEEDING JOY OPPRESSION AND DEPRESSION LEFT HER AND GLADNESS TOOK THE (MASTERY->MYSTERY) OF HER AND SHE REPEATED THESE VERSES +2033-164915-0004-647: ACCORDINGLY SHE TOLD HIM ALL THAT HAD COME TO HER SINCE THEIR SEPARATION AT THE KHAN AND WHAT HAD HAPPENED TO HER WITH THE (BADAWI->BADARI) HOW THE MERCHANT HAD BOUGHT HER OF HIM AND HAD TAKEN HER TO HER BROTHER (SHARRKAN->SHARKAN) AND HAD SOLD HER TO HIM HOW HE HAD FREED HER AT THE TIME OF BUYING HOW HE HAD MADE (A->HER) MARRIAGE CONTRACT WITH HER AND HAD GONE IN TO HER AND HOW THE KING THEIR SIRE HAD SENT AND ASKED FOR HER FROM (SHARRKAN->SHARKAN) +2033-164915-0005-648: BUT NOW GO TO THY MASTER AND BRING HIM QUICKLY TO ME +2033-164915-0006-649: THE CHAMBERLAIN CALLED THE CASTRATO AND CHARGED HIM TO DO ACCORDINGLY SO HE REPLIED I HEAR AND I OBEY AND HE TOOK HIS PAGES WITH HIM AND WENT OUT IN SEARCH OF THE (STOKER->STOCKER) TILL HE FOUND HIM IN THE REAR OF THE CARAVAN (GIRTHING->GIRDING) HIS ASS AND PREPARING FOR FLIGHT +2033-164915-0007-650: SHE SAID IT HATH REACHED ME O AUSPICIOUS KING THAT WHEN THE (STOKER GIRTHED->STOCKER GIRDED) HIS ASS FOR FLIGHT AND BESPAKE HIMSELF SAYING (OH->OV) WOULD I KNEW WHAT IS BECOME OF HIM +2033-164915-0008-651: I BELIEVE HE HATH DENOUNCED ME TO THE EUNUCH HENCE THESE PAGES (ET->AT) ABOUT ME AND HE HATH MADE ME AN ACCOMPLICE IN HIS CRIME +2033-164915-0009-652: WHY DIDST THOU SAY I NEVER REPEATED (THESE->THIS) COUPLETS NOR DO I KNOW WHO REPEATED THEM WHEN IT WAS THY COMPANION +2033-164915-0010-653: BUT NOW I WILL NOT LEAVE THEE BETWEEN THIS PLACE AND BAGHDAD AND WHAT BETIDETH THY COMRADE SHALL (BETIDE->BE TIDE) THEE +2033-164915-0011-654: TWAS AS I FEARED THE (COMING ILLS->CARMINALS) DISCERNING BUT UNTO ALLAH WE ARE ALL RETURNING +2033-164915-0012-655: THEN THE EUNUCH CRIED UPON (THE->HIS) PAGES SAYING TAKE HIM OFF THE ASS +2033-164915-0013-656: AND HE ANSWERED I AM THE CHAMBERLAIN OF THE EMIR OF DAMASCUS KING (SHARRKAN SON->SHARKAN SONG) OF OMAR BIN AL (NU'UMAN->NUMAN) LORD OF (BAGHDAD->AGAD) AND OF THE LAND OF KHORASAN AND I BRING TRIBUTE AND PRESENTS FROM HIM TO HIS FATHER IN BAGHDAD +2033-164915-0014-657: (SO FARE YE->SOPHIA HE) FORWARDS NO HARM SHALL (BEFAL->BEFALL) YOU TILL YOU JOIN HIS GRAND WAZIR (DANDAN->TAN) +2033-164915-0015-658: THEN HE BADE HIM BE SEATED AND QUESTIONED HIM AND HE REPLIED THAT HE WAS CHAMBERLAIN TO THE EMIR OF DAMASCUS AND WAS BOUND TO KING OMAR WITH PRESENTS AND THE TRIBUTE OF SYRIA +2033-164915-0016-659: SO IT WAS AGREED THAT WE GO TO DAMASCUS AND FETCH THENCE THE KING'S SON (SHARRKAN->SHARKAN) AND (MAKE->MADE) HIM SULTAN OVER HIS FATHER'S REALM +2033-164915-0017-660: AND AMONGST THEM WERE SOME WHO WOULD HAVE CHOSEN THE CADET (ZAU AL MAKAN->THOU A MACAN) FOR QUOTH THEY HIS NAME BE LIGHT OF THE PLACE AND HE HATH A SISTER NUZHAT AL ZAMAN (HIGHS->HIES) THE DELIGHT OF THE TIME BUT THEY SET OUT FIVE YEARS AGO FOR AL (HIJAZ->HI JAS) AND NONE (WOTTETH->WHATETH) WHAT IS BECOME OF THEM +2033-164916-0000-684: SO HE TURNED TO THE WAZIR DANDAN AND SAID TO HIM VERILY YOUR TALE IS A (WONDER->WANDER) OF WONDERS +2033-164916-0001-685: (KNOW->NO) O CHIEF WAZIR THAT HERE WHERE YOU HAVE ENCOUNTERED ME ALLAH HATH GIVEN YOU REST FROM FATIGUE AND BRINGETH YOU YOUR DESIRE AFTER THE EASIEST OF FASHIONS FOR (THAT->LET) HIS ALMIGHTY WILL (RESTORETH->RESTORE IT) TO YOU (ZAU AL MAKAN->THOU ARMANQUIN) AND (HIS->HE) SISTER (NUZHAT->KNOWSAT) AL ZAMAN WHEREBY WE WILL SETTLE THE MATTER AS WE EASILY CAN +2033-164916-0002-686: WHEN THE (MINISTER->MEANS SIR) HEARD THESE WORDS HE REJOICED WITH GREAT JOY AND SAID O CHAMBERLAIN TELL ME THE TALE OF THE TWAIN AND WHAT BEFEL THEM AND THE CAUSE OF THEIR LONG ABSENCE +2033-164916-0003-687: (ZAU AL MAKAN->ZOMAN) BOWED HIS HEAD AWHILE AND THEN SAID I ACCEPT (THIS->THE) POSITION FOR INDEED THERE WAS NO REFUSING AND HE WAS CERTIFIED THAT THE CHAMBERLAIN HAD COUNSELLED HIM WELL AND WISELY AND (SET->SAID TO) HIM ON THE RIGHT WAY +2033-164916-0004-688: THEN HE ADDED O MY UNCLE HOW SHALL I DO WITH MY BROTHER (SHARRKAN->SHARKAN) +2033-164916-0005-689: AFTER (AWHILE->A WHILE) THE DUST DISPERSED AND THERE APPEARED UNDER IT THE ARMY OF BAGHDAD AND KHORASAN A CONQUERING HOST LIKE THE (FULL TIDE->POOL TIED) SEA AND SHAHRAZAD PERCEIVED THE DAWN OF DAY AND CEASED TO SAY HER PERMITTED SAY +2033-164916-0006-690: WHEN IT WAS THE SEVENTY EIGHTH NIGHT +2033-164916-0007-691: (AND IN IT ALL->ANY NEAT OR) REJOICED AT THE ACCESSION OF THE LIGHT OF THE PLACE +2033-164916-0008-692: LASTLY THE MINISTER WENT IN AND KISSED THE GROUND BEFORE (ZAU AL->ZAO) MAKAN WHO ROSE TO MEET HIM SAYING WELCOME O WAZIR AND (SIRE SANS PEER->SIRES SONSPIER) +2033-164916-0009-693: MOREOVER THE SULTAN COMMANDED HIS WAZIR DANDAN CALL (A->AT) TEN DAYS HALT OF THE ARMY THAT HE MIGHT BE PRIVATE WITH HIM AND LEARN FROM HIM HOW AND WHEREFORE HIS FATHER HAD BEEN SLAIN +2033-164916-0010-694: HE THEN REPAIRED TO THE HEART OF THE ENCAMPMENT AND ORDERED (*->THAT) THE HOST TO HALT TEN DAYS +2414-128291-0000-2689: WHAT HATH HAPPENED (UNTO->TO) ME +2414-128291-0001-2690: HE ASKED HIMSELF SOMETHING (WARM->WRONG) AND LIVING QUICKENETH ME IT MUST BE IN THE NEIGHBOURHOOD +2414-128291-0002-2691: (WHEN->WHO READ) HOWEVER (ZARATHUSTRA->THEIR TWO STRAW) WAS QUITE NIGH (UNTO->AND TO) THEM THEN DID HE HEAR PLAINLY (THAT A->WITH) HUMAN VOICE (SPAKE->TAKE) IN THE MIDST OF THE (KINE->KIND) AND (APPARENTLY->A FRIENDLY) ALL OF THEM HAD TURNED THEIR HEADS TOWARDS THE SPEAKER +2414-128291-0003-2692: (WHAT DO->FOR DIEU) I HERE SEEK +2414-128291-0004-2693: ANSWERED HE THE SAME THAT THOU (SEEKEST->SEEK'ST) THOU MISCHIEF MAKER THAT IS TO SAY HAPPINESS UPON EARTH +2414-128291-0005-2694: FOR I TELL THEE THAT I HAVE (ALREADY->ALWAYS) TALKED HALF A MORNING UNTO THEM AND JUST NOW WERE THEY ABOUT TO GIVE ME (THEIR->THE) ANSWER +2414-128291-0006-2695: HE WOULD NOT BE RID OF HIS AFFLICTION +2414-128291-0007-2696: WHO (HATH->HAD) NOT AT PRESENT HIS HEART HIS MOUTH AND HIS EYES FULL OF DISGUST +2414-128291-0008-2697: THOU ALSO THOU ALSO +2414-128291-0009-2698: BUT BEHOLD (THESE KINE->HIS KIND) +2414-128291-0010-2699: THE (KINE->KIND) HOWEVER GAZED AT IT ALL AND WONDERED +2414-128291-0011-2700: WANTON (AVIDITY->ALDITY) BILIOUS ENVY CAREWORN REVENGE (POPULACE->POPULOUS) PRIDE ALL (THESE STRUCK MINE->DISTRACT MIGHT) EYE +2414-128291-0012-2701: IT IS NO LONGER TRUE (THAT THE->LITTLE) POOR ARE (BLESSED->BLEST) +2414-128291-0013-2702: THE KINGDOM OF HEAVEN HOWEVER IS WITH THE (KINE->KIND) AND WHY IS IT NOT WITH (THE->A) RICH +2414-128291-0014-2703: WHY (DOST THOU->THOSE DOUB) TEMPT ME +2414-128291-0015-2704: ANSWERED (THE OTHER->HER) +2414-128291-0016-2705: THOU KNOWEST IT THYSELF BETTER EVEN THAN I +2414-128291-0017-2706: (THUS SPAKE->DOES BEG) THE PEACEFUL ONE AND PUFFED HIMSELF AND (PERSPIRED->POISPIRED) WITH HIS WORDS (SO THAT THE KINE->TO INTER KIND) WONDERED ANEW +2414-128291-0018-2707: THOU DOEST (VIOLENCE->WILDEST) TO THYSELF THOU PREACHER ON THE (MOUNT WHEN->MOUND AND) THOU USEST SUCH (SEVERE->SAVOUR) WORDS +2414-128291-0019-2708: THEY ALSO (ABSTAIN->ABSTAINED) FROM ALL HEAVY THOUGHTS WHICH INFLATE THE HEART +2414-128291-0020-2709: WELL +2414-128291-0021-2710: SAID (ZARATHUSTRA->GUESTRA) THOU SHOULDST ALSO SEE MINE ANIMALS (MINE->MY) EAGLE AND MY SERPENT (THEIR->THEY ARE) LIKE DO NOT AT PRESENT EXIST ON EARTH +2414-128291-0022-2711: AND TALK TO (MINE->MY) ANIMALS OF THE HAPPINESS OF ANIMALS +2414-128291-0023-2712: NOW HOWEVER (TAKE->THEY) LEAVE (AT ONCE->IT WAS) OF (THY KINE->THAT KIND) THOU STRANGE (ONE->WORLD) +2414-128291-0024-2713: THOU AMIABLE ONE +2414-128291-0025-2714: FOR THEY ARE (THY WARMEST->DIVERMISH) FRIENDS AND (PRECEPTORS->PERCEPTORS) +2414-128291-0026-2715: THOU (EVIL FLATTERER->EVEN SLACKER) +2414-128292-0000-2618: WHITHER (HATH->HAD) MY (LONESOMENESS GONE->LONESOME DISCOUR) SPAKE HE +2414-128292-0001-2619: MY SHADOW (CALLETH->CAUGHT) ME +2414-128292-0002-2620: WHAT MATTER ABOUT MY SHADOW +2414-128292-0003-2621: (LET IT RUN AFTER->NEKHLUD TRUE ENOUGH TO) ME I (RUN->RAN) AWAY FROM IT +2414-128292-0004-2622: THUS (SPAKE ZARATHUSTRA->BEING THEIR TOO STRIKE) TO HIS HEART AND RAN AWAY +2414-128292-0005-2623: VERILY MY FOLLY HATH GROWN BIG IN THE MOUNTAINS +2414-128292-0006-2624: NOW DO I HEAR SIX OLD (FOOLS->FOOD'S) LEGS RATTLING BEHIND ONE ANOTHER +2414-128292-0007-2625: (BUT DOTH ZARATHUSTRA->BY DIRTS ARE TOUSTRA) NEED TO BE FRIGHTENED BY (HIS->A) SHADOW +2414-128292-0008-2626: ALSO (METHINKETH->METHINK IT) THAT AFTER ALL IT (HATH LONGER LEGS->HAD LONG OR LESS) THAN MINE +2414-128292-0009-2627: FOR WHEN (ZARATHUSTRA SCRUTINISED->THEIR DISTRESS COGNIZED) HIM (WITH HIS->IT IS) GLANCE HE WAS FRIGHTENED (AS BY->ALBERT) A (SUDDEN->CERTAIN) APPARITION SO SLENDER (SWARTHY->SWALLTY) HOLLOW AND WORN OUT DID (THIS->HIS) FOLLOWER APPEAR +2414-128292-0010-2628: (ASKED ZARATHUSTRA VEHEMENTLY->I TAKE TO EXTRAVE IMAGING) WHAT (DOEST->DOST) THOU (HERE->HEAR) +2414-128292-0011-2629: AND WHY (CALLEST->COLLARST) THOU THYSELF MY SHADOW +2414-128292-0012-2630: THOU ART NOT PLEASING (UNTO->INTO) ME +2414-128292-0013-2631: MUST I EVER BE ON THE WAY +2414-128292-0014-2632: O (EARTH->ART) THOU HAST BECOME (TOO->TO) ROUND FOR ME +2414-128292-0015-2633: (WHEN->WITH) THE DEVIL (CASTETH->CAST AT) HIS (SKIN->KIN) DOTH NOT HIS NAME ALSO FALL AWAY IT IS ALSO SKIN +2414-128292-0016-2634: THE DEVIL HIMSELF IS PERHAPS SKIN +2414-128292-0017-2635: SOMETIMES I MEANT TO LIE AND BEHOLD +2414-128292-0018-2636: THEN (ONLY->OLD LADY) DID I HIT THE TRUTH +2414-128292-0019-2637: HOW (HAVE->*) I STILL INCLINATION +2414-128292-0020-2638: (HAVE->EH) I STILL A (GOAL->GOLD) +2414-128292-0021-2639: A (HAVEN TOWARDS WHICH->HAIRY DOOR SPEECH) MY (SAIL IS SET->SAILOR SAID) +2414-128292-0022-2640: FOR IT (DO->TOO) I ASK AND SEEK AND HAVE (SOUGHT BUT HAVE->THOUGHT IT HATH) NOT FOUND IT +2414-128292-0023-2641: (O ETERNAL->I TURNED) EVERYWHERE (O ETERNAL->WHO HAD TURNED OUT) NOWHERE (O ETERNAL->WHO HAD TURNED UP) IN VAIN +2414-128292-0024-2642: THOU ART MY SHADOW +2414-128292-0025-2643: SAID HE AT LAST SADLY +2414-128292-0026-2644: THY DANGER (IS NOT SMALL->HIS PURSUAL) THOU FREE SPIRIT AND (WANDERER->WONDER) +2414-128292-0027-2645: THEY SLEEP QUIETLY THEY (ENJOY->ENJOYED) THEIR NEW SECURITY +2414-128292-0028-2646: BEWARE LEST IN THE END A NARROW (FAITH->FATE) CAPTURE THEE A HARD (RIGOROUS->RECKLESS) DELUSION +2414-128292-0029-2647: FOR NOW EVERYTHING THAT IS NARROW AND FIXED (SEDUCETH->SEDUCE IT) AND (TEMPTETH->TEMPTED) THEE +2414-128292-0030-2648: THOU HAST LOST (THY GOAL->THEIR GOULD) +2414-128292-0031-2649: (THOU->THE) POOR ROVER AND RAMBLER (THOU->NOW) TIRED (BUTTERFLY->BUT TO FLY) +2414-128292-0032-2650: WILT THOU HAVE A REST (AND A HOME->IN THE WHOLE) THIS EVENING +2414-159411-0000-2653: ONCE UPON (A->HER) TIME A BRAHMAN WHO WAS WALKING ALONG THE ROAD CAME UPON AN IRON CAGE IN WHICH A GREAT TIGER (HAD BEEN->AT MONS) SHUT UP BY THE (VILLAGERS->VILLAGES) WHO CAUGHT HIM +2414-159411-0001-2654: THE (BRAHMAN->BRAMIAN) ANSWERED NO I WILL NOT FOR IF I LET YOU OUT OF THE CAGE YOU WILL EAT ME +2414-159411-0002-2655: OH FATHER OF MERCY ANSWERED THE TIGER IN TRUTH THAT I WILL NOT +2414-159411-0003-2656: I WILL NEVER BE SO UNGRATEFUL ONLY LET ME OUT THAT I MAY (DRINK->BRING) SOME WATER AND RETURN +2414-159411-0004-2657: (THEN->AND IN) THE (BRAHMAN TOOK->BRAM INTO) PITY ON HIM AND OPENED THE CAGE DOOR BUT NO SOONER HAD HE (DONE->TURNED) SO THAN THE TIGER JUMPING OUT SAID NOW I WILL EAT YOU FIRST AND DRINK THE WATER AFTERWARDS +2414-159411-0005-2658: SO THE (BRAHMAN->BRAMID) AND THE TIGER WALKED ON TILL THEY CAME TO A (BANYAN->BANDON) TREE AND THE (BRAHMAN->BRAMMEN) SAID TO IT (BANYAN->BANION) TREE (BANYAN->BAN AND) TREE (HEAR->HERE) AND GIVE (JUDGMENT->JOINTMENT) +2414-159411-0006-2659: ON WHAT MUST I GIVE JUDGMENT ASKED THE (BANYAN->BEN) TREE +2414-159411-0007-2660: (THIS TIGER->DISTAGGER) SAID (THE BRAHMAN->DEBRAMAN) BEGGED ME TO LET HIM OUT OF HIS CAGE TO DRINK A LITTLE WATER AND HE PROMISED NOT TO (HURT->HIDE) ME IF I DID SO BUT NOW THAT I HAVE (LET->LEFT) HIM OUT HE WISHES TO EAT ME +2414-159411-0008-2661: (IS->*) IT (JUST->IS JEALOUS) THAT HE SHOULD DO SO (OR NO->I KNOW) +2414-159411-0009-2662: (LET->LATE) THE TIGER EAT THE MAN FOR MEN ARE (AN->IN) UNGRATEFUL RACE +2414-159411-0010-2663: (SIR->SO) CAMEL SIR CAMEL CRIED THE (BRAHMAN HEAR->BRAMIN HERE) AND GIVE JUDGMENT +2414-159411-0011-2664: AT A LITTLE DISTANCE THEY FOUND A BULLOCK LYING BY THE ROADSIDE +2414-159411-0012-2665: IS IT FAIR THAT HE SHOULD DO SO OR NOT +2414-159411-0013-2666: (LET THE->LATER) TIGER EAT THE MAN FOR MEN HAVE NO PITY +2414-159411-0014-2667: THREE OUT OF THE SIX (HAD GIVEN->IN GIVING) JUDGMENT AGAINST THE BRAHMAN (BUT->WHICH) STILL HE DID NOT LOSE ALL HOPE AND (DETERMINED->TURN MIND) TO ASK THE OTHER THREE +2414-159411-0015-2668: ON WHAT MUST I GIVE (JUDGMENT->YOU TELL ME) ASKED THE EAGLE +2414-159411-0016-2669: THE BRAHMAN (STATED->SUITED) THE CASE AND THE EAGLE ANSWERED WHENEVER MEN SEE ME THEY TRY TO SHOOT ME (THEY CLIMB->DECLINE) THE ROCKS AND STEAL AWAY MY LITTLE ONES +2414-159411-0017-2670: THEN THE TIGER BEGAN TO ROAR AND SAID (THE->*) JUDGMENT OF ALL IS AGAINST YOU O BRAHMAN +2414-159411-0018-2671: AFTER THIS THEY SAW AN (ALLIGATOR->ALLEGATOR) AND THE (BRAHMAN->BRAMA) RELATED THE MATTER TO HIM HOPING FOR A MORE FAVORABLE VERDICT +2414-159411-0019-2672: (BUT->WITH) THE (ALLIGATOR SAID->ADDIER TO THE SUIT) WHENEVER (I->A) PUT MY NOSE OUT OF THE WATER (MEN TORMENT->MEAN TOM AND) ME AND (TRY->TRIED) TO KILL ME +2414-159411-0020-2673: (THE BRAHMAN->NEGROMMAN) GAVE HIMSELF UP (AS->AT) LOST BUT AGAIN HE PRAYED THE TIGER TO HAVE PATIENCE AND LET HIM ASK THE OPINION OF THE SIXTH JUDGE +2414-159411-0021-2674: (NOW->BY) THE SIXTH WAS A JACKAL +2414-159411-0022-2675: THE (BRAHMAN->GRAMMER) TOLD HIS STORY AND SAID TO HIM UNCLE (JACKAL UNCLE JACKAL->JACKO AND WILL JACK HOLE) SAY WHAT IS YOUR JUDGMENT +2414-159411-0023-2676: SHOW ME THE (PLACE->PACE) +2414-159411-0024-2677: (WHEN THEY GOT->AND THE COURT) THERE THE JACKAL SAID (NOW BRAHMAN->NABRAMAN) SHOW ME EXACTLY WHERE YOU STOOD +2414-159411-0025-2678: EXACTLY THERE WAS IT ASKED (THE JACKAL->JACO) +2414-159411-0026-2679: EXACTLY HERE REPLIED THE (BRAHMAN->PROMIN) +2414-159411-0027-2680: (WHERE->THERE) WAS THE TIGER THEN +2414-159411-0028-2681: WHY I STOOD SO SAID THE (TIGER->DRAGGER) JUMPING INTO THE CAGE AND MY HEAD WAS ON THIS SIDE +2414-159411-0029-2682: VERY GOOD SAID THE (JACKAL->JACK HOPE) BUT I CANNOT JUDGE WITHOUT UNDERSTANDING THE WHOLE MATTER EXACTLY +2414-159411-0030-2683: (SHUT->SHED) AND BOLTED SAID (THE BRAHMAN->DEBRAMIN) +2414-159411-0031-2684: (THEN SHUT->VENTURED) AND (BOLT IT->BOLTED) SAID (THE->TO) JACKAL +2414-159411-0032-2685: WHEN THE BRAHMAN HAD DONE THIS THE JACKAL SAID OH YOU WICKED AND UNGRATEFUL (TIGER->TYER) +2414-159411-0033-2686: (WHEN THE->WITH A) GOOD BRAHMAN OPENED (YOUR CAGE->YOU CARED) DOOR IS TO EAT HIM THE ONLY RETURN (YOU->HE) WOULD MAKE +2414-159411-0034-2687: PROCEED ON YOUR JOURNEY FRIEND (BRAHMAN->RAMI) +2414-159411-0035-2688: (YOUR ROAD->HE RULED) LIES THAT WAY (AND MINE->IN MIND) THIS +2414-165385-0000-2651: (THUS->THERE'S) ACCOMPLISHED (HE->*) EXCITED (THE->*) ADMIRATION OF EVERY SILLY (COQUETTE->POCKET) AND THE ENVY OF EVERY (FLUTTERING COXCOMB->REFLECTING ACCOUNT) BUT BY ALL YOUNG GENTLEMEN AND LADIES OF UNDERSTANDING HE WAS HEARTILY DESPISED AS A MERE CIVILIZED MONKEY +2414-165385-0001-2652: THAT HIS SOUL MIGHT AFTERWARDS OCCUPY SUCH A STATION AS WOULD BE MOST SUITABLE TO HIS CHARACTER IT WAS (SENTENCED->INTENSE) TO INHABIT (THE->A) BODY OF THAT (FINICAL->PHYNICAL) GRINNING AND (MISCHIEVOUS->MACHIEVOUS) LITTLE (MIMICK->MIMIC) WITH (FOUR->FULL) LEGS WHICH (YOU->SHE) NOW BEHOLD BEFORE YOU +2609-156975-0000-2367: THEN MOSES WAS AFRAID AND SAID SURELY THE THING IS KNOWN +2609-156975-0001-2368: (HOLD->OR) ON (HOLD->HER) FAST (HOLD OUT PATIENCE IS->HODE PATENTS AS) GENIUS +2609-156975-0002-2369: LET US HAVE FAITH THAT RIGHT (MAKES->MATRON) MIGHT AND IN THAT FAITH LET (US DARE->STARED) TO DO OUR DUTY (AS->IF) WE UNDERSTAND IT LINCOLN +2609-156975-0003-2370: THE EGYPTIAN BACKGROUND OF THE BONDAGE +2609-156975-0004-2371: EVERY ONE (WHO IS TURBULENT->WHOSE TREBRANT) HAS BEEN FOUND BY (KING MERNEPTAH->GIMERNETTE PATH) THE TESTIMONY OF THE OLDEST (BIBLICAL NARRATIVES->PABRICAL NARRATIVE) REGARDING THE SOJOURN OF THE HEBREWS IN EGYPT IS ALSO IN PERFECT ACCORD WITH THE (PICTURE->PITCHER) WHICH (THE->IT) CONTEMPORARY EGYPTIAN INSCRIPTIONS GIVE (OF THE->THIS) PERIOD +2609-156975-0005-2372: THE ABSENCE OF DETAILED (REFERENCE TO->REFUCER) THE HEBREWS IS THEREFORE PERFECTLY NATURAL +2609-156975-0006-2373: IT SEEMS PROBABLE THAT NOT ALL BUT ONLY PART (OF->IN) THE TRIBES WHICH (ULTIMATELY COALESCED->ULTIMATE COLLETS) INTO THE HEBREW NATION FOUND THEIR WAY TO EGYPT +2609-156975-0007-2374: THE STORIES REGARDING JOSEPH (THE->THEIR) TRADITIONAL (FATHER OF EPHRAIM AND MANASSEH IMPLY->FOUNDER THAT FROM IN MANETTE SE INCLINE) THAT THESE STRONG CENTRAL TRIBES POSSIBLY TOGETHER WITH THE SOUTHERN (TRIBES->TRINES) OF BENJAMIN AND JUDAH WERE THE CHIEF ACTORS (IN THIS->*) OPENING (SCENE->SEEN) IN ISRAEL'S HISTORY +2609-156975-0008-2375: THE (BIBLICAL->BIBOCO) NARRATIVES APPARENTLY (DISAGREE REGARDING->DISAGREED GUARDING) THE DURATION OF THE SOJOURN IN EGYPT +2609-156975-0009-2376: THE LATER (TRADITIONS TEND TO EXTEND->JUDICINES INTEREST IN) THE PERIOD +2609-156975-0010-2377: (HERE->WHO) WERE FOUND (SEVERAL INSCRIPTIONS BEARING->SEVEREND SCRIPS AND SPARED) THE EGYPTIAN NAME OF THE CITY (P ATUM HOUSE->PATUM OUTS) OF THE GOD (ATUM->ATOM) +2609-156975-0011-2378: A CONTEMPORARY INSCRIPTION (ALSO STATES->ONCE ESTATES) THAT HE (FOUNDED->FOUND A) NEAR (PITHUM->PITTHAM) THE HOUSE OF (RAMSES->RANSES) A CITY WITH (A->THE) ROYAL RESIDENCE (AND TEMPLES->IN SIMPLES) +2609-156975-0012-2379: THAT THE HEBREWS WERE (RESTIVE->WRETS OF) UNDER THIS (TYRANNY->SURNING) WAS (NATURAL->NATURALLY) INEVITABLE +2609-156975-0013-2380: WAS ANY OTHER PROCEDURE TO BE (EXPECTED->INSPECTRE) FROM (A DESPOTIC RULER->IT THAT SPOTIC ROAR) OF THAT LAND AND DAY +2609-156975-0014-2381: THE MAKING OF (A->THE) LOYAL PATRIOT +2609-156975-0015-2382: THE STORY OF MOSES BIRTH (AND->AN) EARLY CHILDHOOD IS ONE OF THE MOST INTERESTING CHAPTERS IN BIBLICAL HISTORY +2609-156975-0016-2383: (WAS MOSES JUSTIFIED IN RESISTING THE EGYPTIAN TASKMASTER->WIS MOVES IT'S JEST FIND AN RESISTIN DE GYPTIAN TAX MASTER) +2609-156975-0017-2384: (IS PEONAGE->HIS PINIONS) ALWAYS (DISASTROUS->DISASTRATES) NOT (ONLY->OWING) TO ITS VICTIMS BUT ALSO TO THE GOVERNMENT IMPOSING IT +2609-156975-0018-2385: NATURALLY HE WENT TO THE LAND (OF MIDIAN->A MILLION) +2609-156975-0019-2386: THE WILDERNESS TO THE EAST OF EGYPT (HAD->AND) FOR CENTURIES BEEN THE (PLACE->PLATES) OF (REFUGE FOR->RED FUTURE) EGYPTIAN FUGITIVES +2609-156975-0020-2387: FROM ABOUT TWO THOUSAND B C +2609-156975-0021-2388: ON THE BORDERS OF THE WILDERNESS HE FOUND CERTAIN BEDOUIN (HERDSMEN->HERDSMAN) WHO RECEIVED HIM (HOSPITABLY->HALF SPITABLY) +2609-156975-0022-2389: THESE (SAND WANDERERS->SANDWARES) SENT HIM ON FROM (TRIBE->TIME) TO (TRIBE->TIME) UNTIL HE REACHED THE LAND OF (KEDEM EAST->KIDDAM EACH) OF THE DEAD SEA WHERE HE REMAINED FOR (A->*) YEAR AND A HALF +2609-156975-0023-2390: LATER HE FOUND HIS WAY TO THE COURT OF ONE OF THE LOCAL KINGS (IN->AND) CENTRAL PALESTINE WHERE HE MARRIED AND (BECAME->MICHANG) IN (*->THE) TIME A PROSPEROUS LOCAL PRINCE +2609-156975-0024-2391: THE SCHOOL OF THE (WILDERNESS->WARINESS) +2609-156975-0025-2392: THE STORY OF MOSES IS IN MANY WAYS CLOSELY PARALLEL (TO THAT OF SINUHIT->DID NOT ASSUME IT) +2609-156975-0026-2393: THE PRIEST (OF->*) THE (SUB TRIBE->SUBTRINE) OF THE KENITES RECEIVED HIM INTO HIS HOME AND GAVE HIM HIS DAUGHTER IN MARRIAGE +2609-156975-0027-2394: NOTE THE (CHARACTERISTIC ORIENTAL IDEA->CARE OF RIVER STICK ORIENTOUINE) OF (MARRIAGE->MARES) +2609-156975-0028-2395: HERE MOSES LEARNED (THE->THAT) LESSONS THAT WERE ESSENTIAL FOR HIS (TRAINING->TRAINED IN) AS (THE->A) LEADER AND DELIVERER OF HIS PEOPLE +2609-156975-0029-2396: AFTER THE CAPTURE OF JERICHO CERTAIN OF THEM WENT UP WITH (THE SOUTHERN TRIBES TO->A SUDDEN TRIUMPH SHE) CONQUER SOUTHERN PALESTINE +2609-156975-0030-2397: MANY MODERN SCHOLARS (DRAW->DRAWN) THE CONCLUSION FROM THE BIBLICAL NARRATIVE THAT IT WAS FROM THE (KENITES->KENNITES) THAT MOSES FIRST LEARNED OF (YAHWEH->YONWAY) OR AS THE DISTINCTIVE NAME OF ISRAEL'S (GOD->GONE) WAS TRANSLATED BY LATER JEWISH (SCRIBES->GRIBES) JEHOVAH +2609-156975-0031-2398: DO THE (EARLIEST HEBREW->ARIAD SEA BOU) TRADITIONS IMPLY THAT (THE ANCESTORS->INSECTORS) OF THE (ISRAELITES->ISRAIT) WERE (WORSHIPPERS->WORSHIPPED) OF JEHOVAH +2609-156975-0032-2399: THE (TITLE->TOWN) OF HIS (FATHER IN LAW->FUND THEM AND ALL) IMPLIES (THAT->AT) THIS (PRIEST->PREACH) MINISTERED AT SOME (WILDERNESS->LINEN AT) SANCTUARY +2609-156975-0033-2400: (MOSES->ROSES) IN THE HOME OF THE (MIDIAN PRIEST->MENDIAN PRIESTS) WAS BROUGHT INTO DIRECT AND CONSTANT CONTACT WITH THE JEHOVAH WORSHIP +2609-156975-0034-2401: THE CRUEL FATE OF (HIS->THIS) PEOPLE AND THE PAINFUL EXPERIENCE IN EGYPT THAT HAD DRIVEN HIM INTO THE WILDERNESS PREPARED HIS MIND TO RECEIVE THIS TRAINING +2609-156975-0035-2402: HIS (QUEST->PRESS) WAS FOR (A JUST->JETS) AND STRONG GOD ABLE TO DELIVER THE OPPRESSED +2609-156975-0036-2403: THE (WILDERNESS->WEDDINANCE) WITH ITS LURKING FOES AND THE EVER PRESENT DREAD OF HUNGER AND THIRST (DEEPENED HIS->DEEP INTO) SENSE OF NEED AND OF DEPENDENCE UPON A POWER ABLE TO GUIDE THE (DESTINIES->DEBTS NEEDS) OF MEN +2609-156975-0037-2404: THE PEASANTS OF THE (VAST ANTOLIAN->VATS INTOLLIUM) PLAIN (IN->OF) CENTRAL (ASIA->AS A) MINOR (STILL->SO) CALL EVERY LIFE (GIVING->GIVEN) SPRING GOD HATH GIVEN +2609-156975-0038-2405: (THE CONSTANT->THEY CAN'T SENT THE) NECESSITY (OF->A) MEETING THE DANGERS OF THE WILDERNESS AND (OF->THE) DEFENDING THE FLOCKS (ENTRUSTED TO MOSES->AND TRAITS OF JEMOSIS) CARE DEVELOPED HIS COURAGE AND POWER OF (LEADERSHIP->LEGERSHIP) AND ACTION +2609-157645-0000-2352: EVIDENTLY THE INTENTION (WAS TO MAKE->WHICH MADE) THINGS (PLEASANT->PRESENT) FOR THE ROYAL (FOE OF->FOLK A) TOBACCO DURING HIS VISIT +2609-157645-0001-2353: THE (PROHIBITION IN->PROBES AND) THE (REGULATION->REGULATING) QUOTED (OF->HER) SMOKING (IN->AND) SAINT MARY'S CHURCH REFERRED (IT->*) MAY BE NOTED TO THE ACT WHICH WAS HELD THEREIN +2609-157645-0002-2354: SOMETIMES TOBACCO (WAS->IS) USED IN CHURCH FOR (DISINFECTING OR DEODORIZING->DISINFECT AND NO DEAL ARISING) PURPOSES +2609-157645-0003-2355: BLACKBURN (ARCHBISHOP->ARCHBISH) OF YORK WAS A GREAT SMOKER +2609-157645-0004-2356: ON ONE OCCASION HE WAS AT SAINT MARY'S CHURCH (NOTTINGHAM->NINE IN HAM) FOR A (CONFIRMATION->CONFIRMATESON) +2609-157645-0005-2357: ANOTHER EIGHTEENTH CENTURY CLERICAL WORTHY THE FAMOUS (DOCTOR PARR->DOCTRIPAR) AN INVETERATE SMOKER WAS ACCUSTOMED TO DO (WHAT MISTER DISNEY->AT MIDSER DIDNY) PREVENTED (ARCHBISHOP->ARCHBISH OF) BLACKBURN FROM DOING HE SMOKED IN HIS (VESTRY->VETERY) AT HATTON +2609-157645-0006-2358: (PARR WAS->PAR WITH) SUCH A (CONTINUAL SMOKER->CONTINUOUS MOCHER) THAT (ANYONE->ANY ONE) WHO CAME INTO HIS COMPANY IF HE HAD NEVER SMOKED BEFORE (HAD->AND) TO (LEARN->LEARNED) THE USE OF A PIPE AS A MEANS OF SELF DEFENCE +2609-157645-0007-2359: ONE SUNDAY SAYS MISTER (DITCHFIELD->DITZFIELD) HE (HAD AN EXTRA->END IN THAT SIR) PIPE AND (JOSHUA->JOHNSHAW) THE CLERK TOLD HIM THAT THE PEOPLE WERE GETTING IMPATIENT +2609-157645-0008-2360: (LET->THEM TO) THEM (SING ANOTHER PSALM SAID->SINGING NOW THE PSALMS SAKE) THE CURATE +2609-157645-0009-2361: THEY HAVE SIR REPLIED THE CLERK +2609-157645-0010-2362: THEN LET THEM SING THE HUNDRED AND NINETEENTH REPLIED THE CURATE +2609-157645-0011-2363: SIX ARMS THE (NEAREST->NURSE) WITHIN REACH PRESENTED WITH AN OBEDIENT START AS MANY TOBACCO (POUCHES->PIUCHES) TO THE (MAN->MEN) OF OFFICE +2609-157645-0012-2364: DAVID (DEANS HOWEVER->DEAN SAMURED) DID NOT AT (ALL APPROVE->ARM PROVE) THIS IRREVERENCE +2609-157645-0013-2365: (GOING TO->GO INTO) CHURCH (AT HAYES IN THOSE->THAT HAS BEEN THUS) DAYS (MUST->MISTS) HAVE BEEN (QUITE AN EXCITING EXPERIENCE->ACQUAINTED AND THESE SIGNING SPIRITS) +2609-157645-0014-2366: WHEN THESE MEN IN THE COURSE OF MY REMONSTRANCE FOUND (*->OUT) THAT (I->*) WAS NOT GOING TO CONTINUE THE (CUSTOM->COTTOM) THEY NO LONGER CARED TO BE COMMUNICANTS +2609-169640-0000-2406: (PROAS->PERHAPS) IN THAT QUARTER WERE (USUALLY DISTRUSTED->USUAL DISTRUDGED) BY (SHIPS IT->THE STEPS AT) IS TRUE BUT THE (SEA IS FULL OF->SEAS FOR) THEM (AND->*) FAR MORE ARE INNOCENT THAN ARE GUILTY OF ANY ACTS OF VIOLENCE +2609-169640-0001-2407: (AN HOUR AFTER->NOW I OUTREW) THE SUN HAD SET THE WIND FELL (TO A->TURNED) LIGHT AIR (THAT JUST->DAT DIDST) KEPT STEERAGE WAY ON THE SHIP +2609-169640-0002-2408: FORTUNATELY THE JOHN WAS NOT ONLY (FAST->FAT) BUT (SHE->SEA) MINDED HER (HELM->HAIL) AS (A LIGHT FOOTED->THE LIGHTFOOTED) GIRL (TURNS IN A->TURNED TO THE) LIVELY DANCE +2609-169640-0003-2409: I NEVER WAS IN A BETTER (STEERING->STIRRING) SHIP (MOST ESPECIALLY IN MODERATE->PERCY SPENTRY AND MARGARET) WEATHER +2609-169640-0004-2410: MISTER MARBLE HE (I DO->OUGHT TO) BELIEVE WAS FAIRLY (SNOOZING ON->NUSING AND) THE (HEN COOPS->INCOUPS) BEING LIKE THE (SAILS->SAILORS) AS ONE MIGHT SAY (BARELY ASLEEP->VARIOUSLY) +2609-169640-0005-2411: AT THAT MOMENT I (HEARD->IN) A NOISE (ONE->WHEN) FAMILIAR TO (SEAMEN->SEE MEN) THAT OF AN OAR (FALLING->FOLLOWING) IN A BOAT +2609-169640-0006-2412: (I SANG OUT SAIL->AS IN YET SO) HO AND CLOSE (ABOARD->ABROAD) +2609-169640-0007-2413: HE WAS (TOO MUCH->SHIMMERTS) OF (A->THE) SEAMAN TO REQUIRE (A->*) SECOND LOOK IN ORDER TO ASCERTAIN (WHAT->BUT) WAS TO BE DONE +2609-169640-0008-2414: ALTHOUGH THEY WENT THREE FEET TO OUR TWO THIS GAVE (US A->UP SOME) MOMENT OF BREATHING TIME +2609-169640-0009-2415: AS OUR (SHEETS->SEATS) WERE ALL FLYING FORWARD AND REMAINED SO FOR A FEW MINUTES IT GAVE ME (*->A) LEISURE TO (LOOK->WORK) ABOUT +2609-169640-0010-2416: I SOON SAW BOTH (PROAS->PROTS) AND (GLAD ENOUGH->GRINDING UP) WAS I TO PERCEIVE THAT THEY HAD NOT APPROACHED MATERIALLY (NEARER->IN NEW YORK) +2609-169640-0011-2417: (MISTER KITE OBSERVED->BISHOIS DESERVED) THIS ALSO AND (REMARKED->REMARK) THAT OUR MOVEMENTS HAD BEEN SO PROMPT AS TO TAKE THE (RASCALS->RASCAL WAS) ABACK +2609-169640-0012-2418: A (BREATHLESS STILLNESS->BREATH WHICH STILL IN ITS) SUCCEEDED +2609-169640-0013-2419: THE (PROAS->PROITS) DID NOT ALTER (THEIR->THE) COURSE BUT (NEARED US->NEAR TO ITS) FAST +2609-169640-0014-2420: I HEARD THE (RATTLING->RIDING) OF THE BOARDING (PIKES->PIPES) TOO AS THEY WERE CUT ADRIFT FROM THE SPANKER BOOM AND FELL UPON THE DECKS +2609-169640-0015-2421: (KITE->COUNT) WENT (AFT->APT) AND RETURNED WITH THREE OR FOUR (MUSKETS->MASKETS) AND AS MANY PIKES +2609-169640-0016-2422: THE STILLNESS THAT (REIGNED->RAINED) ON BOTH SIDES WAS LIKE THAT OF (DEATH->DEA) +2609-169640-0017-2423: THE JOHN BEHAVED BEAUTIFULLY (AND->HE) CAME ROUND LIKE A TOP +2609-169640-0018-2424: THE QUESTION WAS NOW WHETHER WE COULD PASS (THEM->AND) OR NOT BEFORE THEY GOT NEAR ENOUGH TO (GRAPPLE->GRANTEL) +2609-169640-0019-2425: THE CAPTAIN BEHAVED (PERFECTLY->PERFECTUALLY) WELL IN (THIS->ITS) CRITICAL INSTANT COMMANDING A DEAD (SILENCE AND->SCIENCE IN) THE (CLOSEST ATTENTION TO->CLOSET SENTENCE INTO) HIS ORDERS +2609-169640-0020-2426: (NOT A SOUL->NOW SO) ON BOARD THE JOHN WAS HURT +2609-169640-0021-2427: (ON OUR SIDE->WHEN OURSAN) WE GAVE THE (GENTLEMEN->GENTLEMAN) THE FOUR (SIXES TWO AT->SIX TO OUT) THE (NEAREST->NEWS) AND TWO AT THE (STERN MOST PROA->STERNMOST PRO) WHICH WAS STILL NEAR A CABLE'S LENGTH DISTANT +2609-169640-0022-2428: THEY WERE (LIKE THE YELLS->NIGHTLY YEARS) OF FIENDS IN (ANGUISH->ENGLISH) +2609-169640-0023-2429: (I DOUBT->AND OUT) IF WE (TOUCHED A MAN->TOUCH THE REMAIN) IN THE (NEAREST PROA->NURTURE) +2609-169640-0024-2430: IN (THIS->THAT) STATE THE SHIP PASSED AHEAD (ALL HER->ON FOR A) CANVAS (BEING FULL->BEEN FOR) LEAVING THE (PROA MOTIONLESS->PROW MUCH ENRICHED) IN HER WAKE +3005-163389-0000-1108: THEY SWARMED UP IN FRONT OF SHERBURN'S PALINGS AS THICK AS THEY COULD JAM TOGETHER AND YOU COULDN'T HEAR YOURSELF THINK FOR THE NOISE +3005-163389-0001-1109: SOME SUNG OUT TEAR DOWN THE FENCE (TEAR->TEARE) DOWN THE FENCE +3005-163389-0002-1110: THE STILLNESS WAS AWFUL CREEPY AND UNCOMFORTABLE +3005-163389-0003-1111: SHERBURN RUN HIS EYE SLOW ALONG THE CROWD AND WHEREVER IT STRUCK THE PEOPLE TRIED A LITTLE TO (OUT GAZE->OUTGAZE) HIM BUT THEY COULDN'T THEY DROPPED THEIR EYES AND LOOKED SNEAKY +3005-163389-0004-1112: THE AVERAGE MAN'S A COWARD +3005-163389-0005-1113: BECAUSE THEY'RE AFRAID THE MAN'S FRIENDS WILL SHOOT THEM IN THE BACK IN THE (DARKAND->DARK AND) IT'S JUST WHAT THEY WOULD DO +3005-163389-0006-1114: SO THEY ALWAYS ACQUIT AND THEN A MAN GOES IN THE NIGHT WITH A HUNDRED (MASKED->MASSED) COWARDS AT HIS BACK AND LYNCHES THE RASCAL +3005-163389-0007-1115: YOU DIDN'T WANT TO COME +3005-163389-0008-1116: BUT A MOB WITHOUT ANY MAN AT THE HEAD OF IT IS BENEATH PITIFULNESS +3005-163389-0009-1117: NOW (LEAVE->LE) AND TAKE YOUR HALF A MAN WITH YOU TOSSING HIS GUN UP ACROSS HIS LEFT ARM AND COCKING IT WHEN HE SAYS THIS +3005-163389-0010-1118: THE CROWD WASHED BACK SUDDEN AND THEN BROKE ALL APART AND WENT TEARING OFF EVERY WHICH WAY AND BUCK (HARKNESS->HARKINS) HE (HEELED->HEALED) IT AFTER THEM LOOKING TOLERABLE CHEAP +3005-163389-0011-1119: (YOU->HE) CAN'T BE TOO CAREFUL +3005-163389-0012-1120: THEY ARGUED AND TRIED TO KEEP HIM OUT BUT HE WOULDN'T LISTEN AND (THE->A) WHOLE SHOW COME TO A (STANDSTILL->FAN STILL) +3005-163389-0013-1121: AND ONE OR TWO WOMEN (BEGUN->BEGAN) TO SCREAM +3005-163389-0014-1122: SO THEN (THE RINGMASTER->A RING MASTER) HE MADE A LITTLE SPEECH AND SAID HE HOPED THERE WOULDN'T BE NO DISTURBANCE AND IF THE MAN WOULD PROMISE HE WOULDN'T MAKE NO MORE TROUBLE HE WOULD LET HIM RIDE IF HE THOUGHT HE COULD STAY ON THE HORSE +3005-163389-0015-1123: IT WARN'T FUNNY TO ME THOUGH I WAS ALL OF A TREMBLE TO SEE HIS DANGER +3005-163389-0016-1124: AND (THE->A) HORSE A GOING LIKE A HOUSE (AFIRE->AFAR) TOO +3005-163389-0017-1125: HE (SHED->SHARED) THEM SO THICK (THEY->THAT) KIND OF CLOGGED UP THE AIR AND ALTOGETHER HE SHED SEVENTEEN SUITS +3005-163389-0018-1126: WHY IT WAS ONE OF HIS OWN MEN +3005-163390-0000-1185: (ANDBUT->AND BUT) NEVER MIND THE REST OF HIS OUTFIT IT WAS JUST WILD BUT IT WAS AWFUL FUNNY +3005-163390-0001-1186: THE PEOPLE MOST KILLED THEMSELVES LAUGHING AND WHEN THE KING GOT DONE CAPERING AND CAPERED OFF BEHIND THE SCENES THEY ROARED AND CLAPPED AND STORMED AND (HAW HAWED->HAWHAT) TILL HE COME BACK AND DONE IT OVER AGAIN AND AFTER THAT THEY MADE HIM DO IT ANOTHER TIME +3005-163390-0002-1187: TWENTY PEOPLE (SINGS->SANGS) OUT +3005-163390-0003-1188: THE DUKE SAYS YES +3005-163390-0004-1189: EVERYBODY SINGS OUT SOLD +3005-163390-0005-1190: BUT A BIG FINE LOOKING MAN JUMPS UP ON A BENCH AND SHOUTS HOLD ON +3005-163390-0006-1191: JUST A WORD GENTLEMEN THEY STOPPED TO LISTEN +3005-163390-0007-1192: WHAT WE WANT IS TO GO OUT OF HERE QUIET AND TALK THIS SHOW UP AND SELL THE REST (OF->O) THE TOWN +3005-163390-0008-1193: YOU (BET->BADE) IT IS THE (JEDGE->JUDGE) IS RIGHT EVERYBODY SINGS OUT +3005-163390-0009-1194: WE STRUCK THE RAFT AT THE SAME TIME AND IN LESS THAN TWO SECONDS WE WAS GLIDING DOWN STREAM ALL DARK AND STILL AND EDGING TOWARDS THE MIDDLE OF THE RIVER NOBODY SAYING A WORD +3005-163390-0010-1195: WE NEVER SHOWED A LIGHT TILL WE WAS ABOUT TEN MILE BELOW THE VILLAGE +3005-163390-0011-1196: GREENHORNS (FLATHEADS->FLAT HEADS) +3005-163390-0012-1197: NO I (SAYS->SAY IS) IT DON'T +3005-163390-0013-1198: WELL IT DON'T BECAUSE IT'S IN (THE->DE) BREED I RECKON THEY'RE ALL ALIKE +3005-163390-0014-1199: WELL THAT'S WHAT (I'M A->I MUST) SAYING ALL KINGS IS MOSTLY (RAPSCALLIONS->RATCALIONS) AS FUR AS I CAN MAKE OUT IS (DAT->DAT'S) SO +3005-163390-0015-1200: AND LOOK AT CHARLES SECOND AND LOUIS FOURTEEN AND LOUIS FIFTEEN AND JAMES SECOND AND EDWARD SECOND AND RICHARD (THIRD->*) AND FORTY MORE BESIDES ALL THEM SAXON (HEPTARCHIES->HEPTARK IS) THAT USED TO RIP AROUND SO (IN->WHEN) OLD TIMES AND (RAISE CAIN->RAISED GAME) +3005-163390-0016-1201: MY YOU OUGHT TO (SEEN->SEE AN) OLD HENRY THE EIGHT WHEN HE WAS IN BLOOM HE WAS A BLOSSOM +3005-163390-0017-1202: RING UP FAIR (ROSAMUN->ROSAMOND) +3005-163390-0018-1203: WELL HENRY HE TAKES A NOTION HE WANTS TO GET UP SOME TROUBLE WITH THIS COUNTRY +3005-163390-0019-1204: S'POSE HE OPENED HIS (MOUTHWHAT->MOUTH WHAT) THEN +3005-163390-0020-1205: ALL I SAY IS KINGS IS KINGS (AND->AN) YOU GOT TO MAKE ALLOWANCES +3005-163390-0021-1206: TAKE THEM ALL AROUND THEY'RE A MIGHTY ORNERY LOT IT'S THE WAY THEY'RE RAISED +3005-163390-0022-1207: WELL THEY ALL DO JIM +3005-163390-0023-1208: NOW (DE DUKE->TO DO) HE'S A (TOLERBLE LIKELY->TOLERABLE LIKE THE) MAN IN SOME WAYS +3005-163390-0024-1209: THIS ONE'S A (MIDDLING->MIDDLIN) HARD LOT FOR A (DUKE->DUPE) +3005-163390-0025-1210: WHEN I WAKED UP (JUST->JEST) AT DAYBREAK HE WAS SITTING THERE WITH HIS HEAD DOWN BETWIXT HIS KNEES MOANING AND MOURNING TO HIMSELF +3005-163390-0026-1211: IT DON'T SEEM NATURAL BUT I RECKON IT'S SO +3005-163390-0027-1212: HE WAS OFTEN MOANING (AND->IN) MOURNING THAT WAY NIGHTS WHEN HE JUDGED I WAS ASLEEP AND SAYING PO LITTLE (LIZABETH->ELIZABETH) +3005-163390-0028-1213: (DOAN->DON'T) YOU HEAR ME (SHET->SHUT) DE DO +3005-163390-0029-1214: I LAY I MAKE YOU MINE +3005-163390-0030-1215: (JIS->GIT) AS LOUD AS I COULD YELL +3005-163391-0000-1127: WHICH WAS SOUND ENOUGH JUDGMENT BUT YOU TAKE THE AVERAGE MAN AND HE WOULDN'T WAIT FOR HIM TO HOWL +3005-163391-0001-1128: THE KING'S (DUDS->DERDS) WAS ALL BLACK AND HE DID LOOK REAL SWELL (AND->AN) STARCHY +3005-163391-0002-1129: WHY BEFORE HE LOOKED LIKE THE ORNERIEST OLD RIP THAT EVER WAS BUT NOW WHEN HE'D TAKE OFF HIS NEW WHITE BEAVER AND MAKE A BOW AND DO A SMILE HE LOOKED THAT GRAND AND GOOD AND PIOUS THAT YOU'D SAY (HE HAD->HE'D) WALKED RIGHT OUT OF THE ARK AND MAYBE WAS OLD (LEVITICUS->LUVIDICUS) HIMSELF +3005-163391-0003-1130: JIM CLEANED UP THE CANOE AND I GOT MY PADDLE READY +3005-163391-0004-1131: (WHER YOU->WERE YE) BOUND FOR YOUNG MAN +3005-163391-0005-1132: (GIT->GET) ABOARD SAYS THE KING +3005-163391-0006-1133: I DONE SO (AND->AN) THEN WE ALL THREE STARTED ON AGAIN +3005-163391-0007-1134: THE YOUNG CHAP WAS MIGHTY THANKFUL SAID IT WAS TOUGH WORK TOTING HIS BAGGAGE SUCH WEATHER +3005-163391-0008-1135: HE ASKED THE KING WHERE HE WAS GOING AND THE KING TOLD HIM HE'D COME DOWN (THE->A) RIVER AND LANDED AT THE OTHER VILLAGE THIS MORNING AND NOW HE WAS GOING UP A FEW MILE TO SEE AN OLD FRIEND ON A FARM UP THERE THE YOUNG FELLOW SAYS +3005-163391-0009-1136: BUT THEN I SAYS AGAIN NO I RECKON IT AIN'T HIM OR ELSE HE WOULDN'T BE (PADDLING->PADDLIN) UP THE RIVER YOU AIN'T HIM ARE YOU +3005-163391-0010-1137: NO MY NAME'S (BLODGETT ELEXANDER BLODGETT->OBLIGE IT ALEXANDER BLODGET) REVEREND (ELEXANDER BLODGETT->ALEXANDER BLODGET) I (S'POSE->SUPPOSE) I MUST SAY AS I'M ONE (O->OF) THE (LORD'S->LORDS) POOR SERVANTS +3005-163391-0011-1138: YOU SEE HE WAS PRETTY OLD AND (GEORGE'S G'YIRLS->GEORGE IS GUY EARLS) WAS TOO YOUNG TO BE MUCH COMPANY FOR HIM EXCEPT MARY JANE THE RED HEADED ONE AND SO HE WAS KINDER LONESOME AFTER GEORGE AND HIS WIFE DIED AND DIDN'T SEEM TO CARE MUCH TO LIVE +3005-163391-0012-1139: TOO BAD TOO BAD HE COULDN'T (A->HAVE) LIVED TO SEE HIS (BROTHERS->BROTHER'S) POOR SOUL +3005-163391-0013-1140: I'M (GOING->GOIN) IN A SHIP NEXT WEDNESDAY (FOR RYO JANEERO->FERIO GENERO) WHERE MY UNCLE (LIVES->IS) +3005-163391-0014-1141: BUT IT'LL BE LOVELY (WISHT->WISHED) I WAS A (GOING->GOIN) +3005-163391-0015-1142: MARY JANE'S NINETEEN SUSAN'S FIFTEEN AND JOANNA'S ABOUT (FOURTEENTHAT'S->FOURTEEN THAT'S) THE ONE THAT GIVES HERSELF TO GOOD WORKS AND HAS A (HARE->HAIR) LIP POOR THINGS +3005-163391-0016-1143: WELL THEY COULD BE WORSE OFF +3005-163391-0017-1144: (OLD->O) PETER HAD FRIENDS AND THEY AIN'T GOING TO LET THEM COME TO NO HARM +3005-163391-0018-1145: BLAMED IF HE DIDN'T (INQUIRE->ACQUIRE) ABOUT EVERYBODY AND EVERYTHING (IN->AND) THAT BLESSED TOWN AND ALL ABOUT THE (WILKSES->WILKES) AND ABOUT PETER'S (BUSINESSWHICH->BUSINESS WHICH) WAS A TANNER AND ABOUT (GEORGE'SWHICH->GEORGE'S WHICH) WAS A CARPENTER AND ABOUT (HARVEY'SWHICH->HARVEST WHICH) WAS A DISSENTERING MINISTER AND SO ON AND SO ON THEN HE SAYS +3005-163391-0019-1146: WHEN (THEY'RE->HER) DEEP THEY WON'T STOP FOR A HAIL +3005-163391-0020-1147: WAS PETER (WILKS->WILKES) WELL OFF +3005-163391-0021-1148: WHEN (WE STRUCK->WASTED UP) THE BOAT SHE WAS ABOUT DONE LOADING AND PRETTY SOON SHE GOT OFF +3005-163391-0022-1149: NOW HUSTLE BACK RIGHT OFF AND FETCH THE DUKE UP HERE AND THE NEW CARPET BAGS +3005-163391-0023-1150: SO THEN THEY WAITED FOR A STEAMBOAT +3005-163391-0024-1151: (BUT->THAT) THE KING WAS (CA'M->CALM) HE SAYS +3005-163391-0025-1152: THEY (GIVE->GAVE) A GLANCE AT ONE ANOTHER AND NODDED THEIR HEADS AS MUCH AS TO SAY (WHAT D I->WOULD THEY) TELL YOU +3005-163391-0026-1153: THEN ONE OF THEM SAYS KIND OF SOFT AND GENTLE +3005-163399-0000-1154: PHELPS (WAS->IS) ONE OF THESE LITTLE ONE HORSE COTTON PLANTATIONS AND THEY ALL LOOK ALIKE +3005-163399-0001-1155: I WENT AROUND AND (CLUMB->CLIMB) OVER THE BACK STILE BY THE ASH HOPPER AND STARTED FOR THE KITCHEN +3005-163399-0002-1156: (I->AH) OUT WITH A (YES'M BEFORE->YES AND FORE) I THOUGHT +3005-163399-0003-1157: SO THEN SHE STARTED FOR THE HOUSE LEADING ME BY THE HAND AND THE CHILDREN TAGGING AFTER +3005-163399-0004-1158: WHEN WE GOT THERE SHE SET ME DOWN IN A SPLIT (BOTTOMED->BOTTOM) CHAIR AND SET HERSELF DOWN ON A LITTLE LOW STOOL IN FRONT OF ME HOLDING BOTH OF MY HANDS AND SAYS +3005-163399-0005-1159: WELL IT'S LUCKY BECAUSE SOMETIMES PEOPLE DO GET HURT +3005-163399-0006-1160: AND I THINK HE DIED AFTERWARDS HE WAS A BAPTIST +3005-163399-0007-1161: YES IT WAS (MORTIFICATIONTHAT->MORTIFICATION THAT) WAS IT +3005-163399-0008-1162: YOUR UNCLE'S BEEN UP TO THE TOWN EVERY DAY TO FETCH (YOU->*) +3005-163399-0009-1163: YOU MUST (A MET->AMERD) HIM ON THE ROAD DIDN'T YOU OLDISH MAN WITH A +3005-163399-0010-1164: WHY CHILD (IT LL->IT'LL) BE STOLE +3005-163399-0011-1165: IT WAS (KINDER->KIND OR) THIN (ICE->EYES) BUT I SAYS +3005-163399-0012-1166: I HAD MY MIND ON THE CHILDREN ALL THE TIME I WANTED TO GET THEM OUT TO ONE SIDE AND (PUMP->PUMPED) THEM A LITTLE AND FIND OUT WHO I WAS +3005-163399-0013-1167: PRETTY SOON SHE MADE THE COLD (CHILLS->CHILL) STREAK ALL DOWN MY BACK BECAUSE SHE SAYS +3005-163399-0014-1168: I SEE IT WARN'T A BIT OF USE TO TRY TO GO AHEAD I'D GOT TO THROW UP MY HAND +3005-163399-0015-1169: SO I SAYS TO MYSELF (HERE'S->HERE IS) ANOTHER PLACE WHERE I GOT TO (RESK->REST) THE TRUTH +3005-163399-0016-1170: I OPENED MY MOUTH TO BEGIN BUT SHE GRABBED ME AND HUSTLED ME IN BEHIND THE BED AND SAYS HERE HE COMES +3005-163399-0017-1171: CHILDREN DON'T YOU SAY A WORD +3005-163399-0018-1172: I SEE I WAS IN A FIX NOW +3005-163399-0019-1173: MISSUS PHELPS SHE (JUMPS->JUMPED) FOR HIM AND SAYS +3005-163399-0020-1174: HAS HE COME NO SAYS HER HUSBAND +3005-163399-0021-1175: I CAN'T IMAGINE SAYS THE OLD GENTLEMAN AND I MUST SAY IT MAKES ME DREADFUL UNEASY +3005-163399-0022-1176: UNEASY SHE SAYS I'M READY TO GO DISTRACTED +3005-163399-0023-1177: HE MUST (A->HAVE) COME AND YOU'VE MISSED HIM ALONG THE ROAD +3005-163399-0024-1178: OH DON'T DISTRESS ME ANY MORE'N I'M ALREADY DISTRESSED +3005-163399-0025-1179: WHY SILAS LOOK YONDER UP THE ROAD (AIN'T->HAIN'T) THAT SOMEBODY (COMING->COMIN) +3005-163399-0026-1180: THE OLD GENTLEMAN STARED AND SAYS +3005-163399-0027-1181: I HAIN'T NO IDEA WHO IS IT +3005-163399-0028-1182: (IT'S->IS) TOM SAWYER +3005-163399-0029-1183: BEING TOM SAWYER WAS EASY AND COMFORTABLE AND (IT STAYED->ITS STATE) EASY AND COMFORTABLE TILL BY AND BY I HEAR A STEAMBOAT COUGHING ALONG DOWN THE RIVER +3005-163399-0030-1184: THEN I SAYS TO MYSELF S'POSE TOM SAWYER COMES DOWN ON (THAT->MY) BOAT +3080-5032-0000-312: BUT I AM HUGELY PLEASED THAT YOU HAVE SEEN MY LADY +3080-5032-0001-313: I KNEW YOU COULD NOT CHOOSE BUT LIKE HER BUT YET LET ME TELL YOU YOU HAVE SEEN BUT THE WORST OF HER +3080-5032-0002-314: HER CONVERSATION HAS MORE CHARMS THAN CAN BE IN MERE BEAUTY AND (HER->A) HUMOUR AND DISPOSITION WOULD MAKE A DEFORMED PERSON APPEAR LOVELY +3080-5032-0003-315: WHY DID YOU NOT SEND ME THAT NEWS AND A GARLAND +3080-5032-0004-316: (WELL->WHY) THE BEST (ON'T->ON IT) IS (*->THAT) I HAVE A SQUIRE NOW THAT IS AS GOOD AS A KNIGHT +3080-5032-0005-317: IN EARNEST WE HAVE HAD SUCH A SKIRMISH (AND UPON->IN A POINT) SO FOOLISH AN OCCASION AS I CANNOT TELL WHICH IS (STRANGEST->STRANGERS) +3080-5032-0006-318: ALL THE PEOPLE THAT I HAD EVER IN MY LIFE REFUSED WERE BROUGHT AGAIN UPON THE STAGE LIKE RICHARD THE (THREE S->THIRD) GHOSTS TO REPROACH ME WITHAL (AND->IN) ALL THE KINDNESS HIS DISCOVERIES COULD MAKE I HAD FOR YOU WAS (LAID->LATE) TO MY CHARGE +3080-5032-0007-319: MY BEST QUALITIES IF I HAVE ANY THAT ARE GOOD SERVED BUT FOR AGGRAVATIONS OF MY FAULT AND I WAS ALLOWED TO HAVE WIT AND UNDERSTANDING AND DISCRETION IN OTHER THINGS THAT IT MIGHT APPEAR I HAD NONE IN THIS +3080-5032-0008-320: TIS A STRANGE CHANGE AND I AM VERY SORRY FOR IT BUT I'LL SWEAR I KNOW NOT HOW TO HELP IT +3080-5032-0009-321: MISTER FISH IS (THE->A) SQUIRE OF DAMES AND HAS SO MANY MISTRESSES (THAT->THAN) ANYBODY MAY PRETEND (A->TO) SHARE IN HIM AND BE BELIEVED BUT THOUGH I HAVE THE (HONOUR->HONOR) TO BE HIS NEAR NEIGHBOUR TO SPEAK FREELY I CANNOT BRAG MUCH THAT HE MAKES ANY COURT TO ME AND I KNOW NO YOUNG WOMAN IN THE COUNTRY THAT HE DOES NOT VISIT OFTEN +3080-5032-0010-322: I THINK MY YOUNGEST BROTHER COMES DOWN WITH HIM +3080-5032-0011-323: I CAN NO SOONER GIVE YOU SOME LITTLE HINTS (WHEREABOUTS->WHEREABOUT) THEY LIVE BUT YOU KNOW THEM PRESENTLY AND I MEANT YOU SHOULD BE BEHOLDING TO ME FOR YOUR ACQUAINTANCE +3080-5032-0012-324: BUT IT SEEMS THIS GENTLEMAN IS NOT SO EASY ACCESS BUT YOU MAY ACKNOWLEDGE SOMETHING DUE TO ME IF I INCLINE HIM TO LOOK GRACIOUSLY UPON YOU AND THEREFORE THERE IS NOT MUCH HARM DONE +3080-5032-0013-325: I HAVE MISSED FOUR FITS AND (*->HAVE) HAD BUT FIVE AND HAVE RECOVERED SO MUCH STRENGTH AS MADE ME VENTURE TO MEET YOUR LETTER ON WEDNESDAY A MILE FROM HOME +3080-5032-0014-326: BUT BESIDES I CAN GIVE YOU OTHERS +3080-5032-0015-327: I AM HERE MUCH MORE OUT OF PEOPLE'S WAY THAN IN TOWN WHERE MY (AUNT AND->AUNTS IN) SUCH (AS->HAS) PRETEND AN INTEREST IN ME (AND->IN) A POWER OVER ME DO SO PERSECUTE ME WITH (THEIR->DEAR) GOOD NATURE (AND->YOU'LL) TAKE IT SO ILL THAT THEY ARE NOT ACCEPTED AS I WOULD LIVE IN A HOLLOW TREE TO AVOID THEM +3080-5032-0016-328: YOU WILL THINK HIM ALTERED AND IF IT BE POSSIBLE MORE MELANCHOLY THAN HE WAS +3080-5032-0017-329: IF MARRIAGE AGREES NO BETTER WITH OTHER PEOPLE THAN IT DOES WITH HIM I SHALL PRAY THAT ALL MY FRIENDS MAY (SCAPE->ESCAPE) IT +3080-5032-0018-330: WELL IN EARNEST IF I WERE A PRINCE THAT LADY SHOULD BE MY MISTRESS BUT I CAN GIVE NO RULE TO ANY ONE ELSE AND PERHAPS THOSE THAT ARE IN NO DANGER OF LOSING THEIR HEARTS TO HER MAY BE INFINITELY TAKEN WITH ONE I SHOULD NOT VALUE (AT->IT) ALL FOR SO SAYS THE JUSTINIAN WISE PROVIDENCE HAS ORDAINED IT THAT BY THEIR DIFFERENT (HUMOURS->HUMANS) EVERYBODY MIGHT FIND SOMETHING TO PLEASE THEMSELVES WITHAL WITHOUT ENVYING THEIR (NEIGHBOURS->NEIGHBORS) +3080-5032-0019-331: THE MATTER IS NOT GREAT FOR I CONFESS I DO NATURALLY HATE THE NOISE AND TALK OF THE WORLD AND SHOULD BE BEST PLEASED NEVER TO BE KNOWN (IN'T->IN) UPON ANY OCCASION WHATSOEVER YET SINCE IT CAN NEVER BE WHOLLY AVOIDED ONE MUST SATISFY ONESELF BY DOING NOTHING THAT ONE NEED CARE WHO KNOWS +3080-5032-0020-332: IF I HAD A PICTURE THAT WERE FIT FOR YOU YOU SHOULD HAVE IT +3080-5032-0021-333: HOW CAN YOU TALK OF DEFYING FORTUNE NOBODY LIVES WITHOUT IT AND THEREFORE WHY SHOULD YOU IMAGINE YOU COULD +3080-5032-0022-334: I KNOW NOT HOW MY BROTHER COMES TO BE SO WELL INFORMED AS YOU SAY BUT I AM CERTAIN HE KNOWS THE UTMOST OF THE INJURIES YOU HAVE RECEIVED FROM HER +3080-5032-0023-335: WE HAVE HAD ANOTHER DEBATE BUT MUCH MORE CALMLY +3080-5032-0024-336: AND BESIDES THERE WAS A TIME WHEN WE OURSELVES WERE INDIFFERENT TO ONE ANOTHER DID I DO SO THEN OR HAVE I LEARNED IT SINCE +3080-5032-0025-337: I HAVE BEEN STUDYING HOW TOM (CHEEKE->CHEEK) MIGHT COME BY HIS INTELLIGENCE AND I (VERILY->VERY) BELIEVE HE HAS IT FROM MY COUSIN PETERS +3080-5032-0026-338: HOW KINDLY DO I TAKE (THESE->THE) CIVILITIES OF YOUR (FATHER'S->FATHERS) IN EARNEST YOU CANNOT IMAGINE HOW HIS LETTER PLEASED ME +3080-5040-0000-278: WOULD IT WOULD LEAVE ME AND THEN I COULD BELIEVE I SHALL NOT ALWAYS HAVE OCCASION FOR IT +3080-5040-0001-279: MY POOR LADY (VAVASOUR->VAVASOR) IS (CARRIED TO THE->CHARACTERED A) TOWER (AND->IN) HER GREAT BELLY COULD NOT EXCUSE HER BECAUSE SHE WAS ACQUAINTED BY SOMEBODY THAT THERE WAS A PLOT AGAINST THE PROTECTOR AND DID NOT DISCOVER IT +3080-5040-0002-280: SHE HAS TOLD NOW ALL THAT WAS TOLD HER BUT VOWS SHE WILL NEVER SAY FROM WHENCE SHE HAD IT WE SHALL SEE WHETHER HER RESOLUTIONS ARE AS UNALTERABLE AS THOSE OF MY LADY (TALMASH->THOMMISH) +3080-5040-0003-281: I WONDER HOW SHE BEHAVED HERSELF WHEN SHE WAS MARRIED +3080-5040-0004-282: I NEVER SAW ANY ONE YET THAT DID NOT LOOK SIMPLY AND OUT OF COUNTENANCE NOR EVER KNEW A WEDDING WELL DESIGNED BUT ONE AND THAT WAS OF TWO PERSONS WHO (HAD->AT) TIME ENOUGH I CONFESS TO CONTRIVE IT AND NOBODY TO PLEASE (IN'T->IN) BUT THEMSELVES +3080-5040-0005-283: THE TRUTH IS I COULD NOT ENDURE TO BE MISSUS BRIDE IN A PUBLIC WEDDING TO BE MADE THE HAPPIEST PERSON ON EARTH +3080-5040-0006-284: DO NOT TAKE IT ILL FOR I WOULD ENDURE IT IF I COULD RATHER THAN FAIL BUT IN EARNEST I DO NOT THINK IT WERE POSSIBLE FOR ME +3080-5040-0007-285: YET IN EARNEST YOUR FATHER WILL NOT FIND MY BROTHER PEYTON WANTING IN CIVILITY THOUGH HE IS NOT A MAN OF MUCH COMPLIMENT UNLESS IT BE IN HIS (LETTERS->LETTER) TO ME (NOR->NO) AN UNREASONABLE PERSON IN ANYTHING SO HE WILL ALLOW HIM OUT OF HIS KINDNESS TO HIS WIFE TO SET A HIGHER VALUE UPON HER SISTER THAN SHE DESERVES +3080-5040-0008-286: MY AUNT TOLD ME NO LONGER (AGONE THAN->A GONDON) YESTERDAY THAT I WAS THE MOST WILFUL WOMAN THAT EVER SHE KNEW AND HAD AN OBSTINACY OF SPIRIT NOTHING COULD OVERCOME TAKE HEED +3080-5040-0009-287: YOU SEE I GIVE YOU FAIR WARNING +3080-5040-0010-288: BY THE NEXT I SHALL BE GONE INTO KENT AND MY OTHER JOURNEY IS LAID ASIDE WHICH I AM NOT DISPLEASED AT BECAUSE IT WOULD HAVE BROKEN OUR INTERCOURSE VERY MUCH +3080-5040-0011-289: HERE ARE SOME VERSES OF (COWLEY'S->CARLIS) TELL ME HOW YOU LIKE THEM +3080-5040-0012-290: I TOLD YOU IN MY LAST THAT MY SUFFOLK JOURNEY WAS LAID ASIDE AND THAT INTO KENT HASTENED +3080-5040-0013-291: IF I DROWN BY THE WAY THIS WILL BE MY LAST LETTER AND LIKE A WILL I BEQUEATH ALL MY KINDNESS TO YOU IN IT WITH A CHARGE NEVER TO BESTOW (IT->AT) ALL UPON ANOTHER MISTRESS LEST MY GHOST RISE AGAIN AND HAUNT YOU +3080-5040-0014-292: INDEED I LIKE HIM EXTREMELY AND HE IS COMMENDED TO ME BY PEOPLE THAT KNOW HIM VERY WELL AND ARE ABLE TO JUDGE FOR A MOST EXCELLENT SERVANT AND FAITHFUL AS POSSIBLE +3080-5040-0015-293: BECAUSE YOU FIND FAULT WITH MY OTHER LETTERS THIS IS LIKE TO BE SHORTER THAN THEY I DID NOT INTEND IT SO THOUGH I CAN ASSURE YOU +3080-5040-0016-294: I DO NOT FIND IT THOUGH I AM TOLD I WAS SO EXTREMELY WHEN I BELIEVED YOU LOVED ME +3080-5040-0017-295: BUT I AM CALLED UPON +3080-5040-0018-296: DIRECTED FOR YOUR MASTER +3080-5040-0019-297: I SEE YOU CAN (CHIDE->CHID) WHEN YOU PLEASE AND WITH AUTHORITY BUT I DESERVE IT I CONFESS AND ALL I CAN SAY FOR MYSELF IS THAT MY FAULT PROCEEDED FROM A VERY GOOD PRINCIPLE IN ME +3080-5040-0020-298: WE DARE NOT LET OUR TONGUES LIE MORE (ON->AND) ONE SIDE OF OUR (MOUTHS->MOTHS) THAN (T'OTHER->THE OTHER) FOR FEAR OF OVERTURNING IT +3080-5040-0021-299: YOU ARE SATISFIED I HOPE (ERE->IF) THIS THAT I (SCAPED->ESCAPE) DROWNING +3080-5040-0022-300: BUT I AM TROUBLED MUCH YOU SHOULD MAKE SO ILL A JOURNEY TO SO LITTLE PURPOSE INDEED I (WRIT->WRITE) BY THE FIRST POST AFTER MY ARRIVAL HERE AND CANNOT IMAGINE HOW YOU CAME TO MISS OF MY LETTERS +3080-5040-0023-301: (HOW->OH) WELCOME YOU WILL BE BUT ALAS +3080-5040-0024-302: FOR MY LIFE I CANNOT BEAT INTO THEIR HEADS A PASSION THAT MUST BE SUBJECT TO NO DECAY (AN->AND) EVEN PERFECT KINDNESS THAT MUST LAST PERPETUALLY WITHOUT THE LEAST INTERMISSION +3080-5040-0025-303: THEY LAUGH TO HEAR ME SAY THAT ONE UNKIND WORD WOULD DESTROY ALL THE SATISFACTION OF MY LIFE AND THAT I SHOULD EXPECT OUR KINDNESS SHOULD INCREASE EVERY DAY IF IT WERE POSSIBLE BUT NEVER LESSEN +3080-5040-0026-304: WE GO ABROAD ALL DAY AND PLAY ALL NIGHT AND SAY (OUR PRAYERS->I'LL PRAY AS) WHEN WE HAVE TIME +3080-5040-0027-305: (WELL->WHILE) IN SOBER EARNEST NOW I WOULD NOT LIVE THUS (A TWELVEMONTH->AT TWELVE MONTHS) TO GAIN ALL THAT (THE->*) KING HAS LOST UNLESS IT WERE TO GIVE IT HIM AGAIN +3080-5040-0028-306: WILL YOU BE SO GOOD NATURED +3080-5040-0029-307: HE HAS ONE SON AND TIS THE FINEST BOY THAT (E'ER->ERE) YOU SAW AND HAS A NOBLE SPIRIT BUT YET STANDS IN THAT AWE OF HIS FATHER THAT ONE WORD FROM HIM IS AS MUCH AS TWENTY WHIPPINGS +3080-5040-0030-308: YOU MUST GIVE ME LEAVE TO ENTERTAIN (YOU THUS->YOURSELVES) WITH DISCOURSES OF THE FAMILY FOR I CAN TELL YOU NOTHING ELSE FROM HENCE +3080-5040-0031-309: NOT TO KNOW WHEN YOU (WOULD->HAD) COME HOME I CAN ASSURE YOU (NOR->NO) FOR ANY OTHER OCCASION (OF->ON) MY OWN BUT WITH A COUSIN OF MINE THAT HAD LONG DESIGNED TO MAKE HERSELF SPORT WITH HIM AND DID NOT MISS OF HER AIM +3080-5040-0032-310: IN MY LIFE I NEVER HEARD SO RIDICULOUS A DISCOURSE AS HE MADE US AND NO OLD WOMAN WHO (PASSES->PAUSES) FOR A WITCH COULD HAVE BEEN MORE PUZZLED TO SEEK WHAT TO SAY TO REASONABLE PEOPLE THAN HE WAS +3080-5040-0033-311: EVER SINCE THIS ADVENTURE I HAVE HAD SO GREAT A BELIEF IN ALL THINGS (OF THIS->WITH ITS) NATURE THAT I COULD NOT FORBEAR LAYING A (PEAS COD->PEASE COT) WITH NINE PEAS (IN'T->INTO) UNDER MY DOOR YESTERDAY (AND->IT) WAS INFORMED BY IT THAT MY HUSBAND'S NAME SHOULD BE THOMAS HOW DO YOU LIKE THAT +3331-159605-0000-695: SHE PULLED HER HAIR DOWN TURNED (HER SKIRT->HIS GOOD) BACK PUT HER FEET ON THE FENDER AND TOOK (PUTTEL->PATTERN) INTO HER LAP ALL OF WHICH ARRANGEMENTS SIGNIFIED THAT SOMETHING VERY IMPORTANT HAD GOT TO BE THOUGHT OVER AND SETTLED +3331-159605-0001-696: THE MORE PROPOSALS THE MORE CREDIT +3331-159605-0002-697: (I VE->I'VE) TRIED IT AND LIKED IT AND MAYBE THIS IS THE CONSEQUENCE OF THAT NIGHT'S FUN +3331-159605-0003-698: JUST SUPPOSE IT IS TRUE THAT HE DOES ASK ME AND I SAY YES +3331-159605-0004-699: WHAT A SPITEFUL THING I AM +3331-159605-0005-700: I COULD DO SO MUCH FOR ALL AT HOME HOW I SHOULD ENJOY THAT +3331-159605-0006-701: LET ME SEE HOW CAN I BEGIN +3331-159605-0007-702: HE HAS KNOWN HER ALL HER LIFE AND HAS A GOOD INFLUENCE OVER HER +3331-159605-0008-703: NOW AS POLLY WAS BY NO MEANS A PERFECT CREATURE I AM FREE TO CONFESS THAT THE OLD TEMPTATION ASSAILED HER MORE THAN ONCE (THAT->THE) WEEK FOR WHEN THE FIRST EXCITEMENT OF THE DODGING REFORM HAD SUBSIDED SHE MISSED THE PLEASANT LITTLE INTERVIEWS THAT USED TO PUT A CERTAIN (FLAVOR->FLAVOUR) OF ROMANCE INTO HER DULL HARD WORKING DAYS +3331-159605-0009-704: I DON'T THINK IT WAS HIS WEALTH ACCOMPLISHMENTS (OR POSITION->OPPOSITION) THAT MOST ATTRACTED POLLY THOUGH THESE DOUBTLESS POSSESSED A GREATER INFLUENCE THAN SHE SUSPECTED +3331-159605-0010-705: IT WAS THAT INDESCRIBABLE SOMETHING WHICH WOMEN ARE QUICK TO SEE AND FEEL IN MEN WHO HAVE BEEN BLESSED (WITH->THE) WISE AND GOOD MOTHERS +3331-159605-0011-706: THIS HAD AN ESPECIAL CHARM TO POLLY FOR SHE SOON FOUND THAT THIS (SIDE->SIGHT) OF HIS CHARACTER WAS NOT SHOWN TO EVERY ONE +3331-159605-0012-707: LATELY THIS HAD CHANGED ESPECIALLY TOWARDS POLLY AND IT (FLATTERED->FURTHER) HER MORE THAN SHE WOULD CONFESS EVEN TO HERSELF +3331-159605-0013-708: AT FIRST SHE TRIED TO THINK SHE COULD BUT UNFORTUNATELY HEARTS ARE SO CONTRARY THAT THEY WON'T BE OBEDIENT TO REASON WILL OR EVEN (GRATITUDE->CREDITUDE) +3331-159605-0014-709: POLLY FELT A VERY CORDIAL FRIENDSHIP FOR MISTER SYDNEY BUT NOT ONE PARTICLE OF THE LOVE (WHICH IS->PICTURED) THE ONLY COIN IN WHICH LOVE CAN BE TRULY PAID +3331-159605-0015-710: THIS FINISHED POLLY'S INDECISION AND AFTER THAT NIGHT SHE NEVER ALLOWED HERSELF TO DWELL UPON THE PLEASANT TEMPTATION WHICH CAME IN A GUISE PARTICULARLY ATTRACTIVE TO A YOUNG GIRL (WITH A SPICE->BUT THE SPIES) OF THE OLD EVE (IN->AND) HER COMPOSITION +3331-159605-0016-711: WHEN (SATURDAY->SAID) CAME POLLY STARTED AS USUAL FOR A VISIT TO BECKY AND BESS BUT (COULD N'T->COULDN'T) RESIST STOPPING AT THE (SHAWS->SHORES) TO LEAVE A LITTLE PARCEL FOR FAN (THOUGH IT->THAT) WAS CALLING TIME +3331-159605-0017-712: A FOOLISH LITTLE SPEECH TO MAKE TO A (DOG->DARK) BUT YOU SEE POLLY WAS ONLY A TENDER HEARTED GIRL TRYING TO (DO->*) HER DUTY +3331-159605-0018-713: TAKE HOLD OF (MASTER CHARLEY'S->MASSR CHARLIE'S) HAND MISS (MAMIE->MAY) AND (WALK->BUCK) PRETTY LIKE (WILLY->BILLY) AND (FLOSSY->FLOSSIE) SAID THE (MAID->MATE) +3331-159605-0019-714: (AT->*) A (STREET->DISTRICT) CORNER A BLACK EYED (SCHOOL BOY->SCHOOLBOY) WAS PARTING FROM A ROSY FACED SCHOOL GIRL WHOSE MUSIC ROLL HE WAS RELUCTANTLY SURRENDERING +3331-159605-0020-715: HOW HE GOT THERE WAS NEVER VERY CLEAR TO POLLY BUT THERE HE WAS FLUSHED AND (A->THE) LITTLE OUT OF BREATH BUT LOOKING SO GLAD TO SEE HER (THAT->TILL) SHE HAD (N'T->NOT) THE HEART TO BE STIFF AND COOL AS SHE HAD FULLY INTENDED TO BE WHEN THEY MET +3331-159605-0021-716: SHE REALLY COULD (N'T->NOT) HELP IT IT WAS SO PLEASANT TO SEE HIM AGAIN JUST WHEN SHE WAS FEELING SO LONELY +3331-159605-0022-717: THAT IS THE WAY I GET TO THE (ROTHS->WORSE) ANSWERED POLLY +3331-159605-0023-718: SHE DID NOT MEAN TO TELL BUT HIS FRANKNESS WAS (SO->TO) AGREEABLE SHE FORGOT HERSELF +3331-159605-0024-719: BUT I KNOW HER BETTER AND I ASSURE YOU THAT SHE (DOES IMPROVE->DOESN'T PROVE) SHE TRIES TO (MEND HER->MEAN TO) FAULTS THOUGH SHE WON'T OWN IT AND WILL SURPRISE YOU SOME DAY BY THE AMOUNT OF HEART AND SENSE AND GOODNESS SHE HAS GOT +3331-159605-0025-720: THANK YOU NO +3331-159605-0026-721: (HOW->HER) LOVELY THE PARK LOOKS SHE SAID IN GREAT CONFUSION +3331-159605-0027-722: ASKED THE ARTFUL YOUNG MAN LAYING A TRAP INTO WHICH POLLY IMMEDIATELY FELL +3331-159605-0028-723: HE WAS QUICKER TO TAKE A HINT THAN SHE HAD EXPECTED AND BEING BOTH PROUD AND GENEROUS (RESOLVED->WE SOFT) TO SETTLE THE MATTER AT ONCE FOR POLLY'S SAKE AS WELL AS HIS OWN +3331-159605-0029-724: SO WHEN SHE MADE HER LAST (BRILLIANT->BUOYANT) REMARK HE SAID QUIETLY WATCHING HER FACE KEENLY ALL THE WHILE I THOUGHT SO WELL (I M->I'M) GOING OUT OF TOWN ON BUSINESS FOR SEVERAL WEEKS SO YOU CAN ENJOY YOUR LITTLE BIT OF COUNTRY WITHOUT BEING ANNOYED BY ME (ANNOYED->ANNOY IT) +3331-159605-0030-725: SHE THOUGHT SHE HAD A GOOD DEAL OF THE COQUETTE IN HER AND (I VE->I'VE) NO DOUBT THAT WITH TIME AND TRAINING SHE WOULD HAVE BECOME A VERY DANGEROUS LITTLE PERSON BUT NOW SHE WAS FAR (TOO->TO) TRANSPARENT AND STRAIGHTFORWARD BY NATURE EVEN TO TELL A (WHITE LIE CLEVERLY->WIDE LIKE LEVELLY) +3331-159605-0031-726: HE WAS GONE BEFORE SHE COULD DO ANYTHING BUT LOOK UP AT HIM WITH A REMORSEFUL FACE AND SHE WALKED ON FEELING THAT THE FIRST AND PERHAPS THE ONLY (LOVER->LOVE) SHE WOULD EVER HAVE HAD READ HIS ANSWER AND ACCEPTED (IT->*) IN SILENCE +3331-159605-0032-727: POLLY DID NOT RETURN TO HER (FAVORITE->FAVOURITE) WALK TILL SHE LEARNED (FROM->FOR) MINNIE THAT UNCLE HAD REALLY LEFT TOWN AND THEN SHE FOUND THAT HIS FRIENDLY COMPANY AND CONVERSATION WAS WHAT HAD MADE THE WAY SO PLEASANT AFTER ALL +3331-159605-0033-728: (WAGGING->WORKING) TO AND FRO AS USUAL WHAT'S THE NEWS WITH YOU +3331-159605-0034-729: PERHAPS (SHE LL JILT->SHE'LL CHILLED) HIM +3331-159605-0035-730: UTTERLY DONE WITH AND LAID UPON THE SHELF +3331-159605-0036-731: (MINNIE->MANY) SAID THE OTHER DAY SHE WISHED SHE WAS A PIGEON SO SHE COULD PADDLE IN THE (PUDDLES->POTTLES) AND NOT FUSS ABOUT (RUBBERS->WRAPPERS) +3331-159605-0037-732: NOW DON'T BE AFFECTED POLLY BUT JUST TELL ME LIKE A DEAR HAS (N'T->NOT) HE PROPOSED +3331-159605-0038-733: DON'T YOU THINK HE MEANS TO +3331-159605-0039-734: TRULY (TRULY->JULIE) FAN +3331-159605-0040-735: I DON'T MEAN TO BE PRYING BUT I REALLY THOUGHT HE DID +3331-159605-0041-736: WELL I ALWAYS MEANT TO TRY IT IF I GOT A CHANCE AND I HAVE +3331-159605-0042-737: I JUST GAVE HIM A HINT AND HE TOOK IT +3331-159605-0043-738: HE MEANT TO GO AWAY BEFORE THAT SO DON'T THINK HIS HEART IS BROKEN (OR->OH) MIND WHAT (SILLY TATTLERS->DITTY TEDLER) SAY +3331-159605-0044-739: HE UNDERSTOOD AND BEING A GENTLEMAN MADE NO FUSS +3331-159605-0045-740: BUT POLLY IT WOULD HAVE BEEN A GRAND THING FOR YOU +3331-159605-0046-741: (I M ODD->I'M NOT) YOU KNOW AND PREFER TO BE AN INDEPENDENT SPINSTER AND TEACH MUSIC ALL MY DAYS +3331-159609-0000-742: NEVER MIND WHAT THE BUSINESS WAS IT (SUFFICES->SURFACES) TO SAY THAT IT WAS A GOOD BEGINNING FOR A YOUNG MAN LIKE TOM WHO HAVING BEEN BORN AND BRED IN THE MOST CONSERVATIVE CLASS OF THE MOST CONCEITED CITY IN NEW ENGLAND NEEDED JUST THE HEALTHY HEARTY SOCIAL INFLUENCES OF THE WEST TO WIDEN HIS VIEWS AND MAKE A MAN OF HIM +3331-159609-0001-743: FORTUNATELY EVERY ONE WAS SO BUSY WITH THE NECESSARY PREPARATIONS THAT THERE WAS NO TIME FOR (ROMANCE->ROMANS) OF ANY SORT AND THE FOUR YOUNG PEOPLE WORKED TOGETHER AS SOBERLY AND SENSIBLY AS IF ALL SORTS OF EMOTIONS WERE NOT (BOTTLED->BUTTERED) UP IN THEIR RESPECTIVE HEARTS +3331-159609-0002-744: PITY THAT THE END SHOULD COME SO SOON BUT THE HOUR DID ITS WORK AND (WENT->WHEN) ITS WAY LEAVING A CLEARER ATMOSPHERE BEHIND (THOUGH->THAN) THE YOUNG FOLKS DID NOT SEE IT THEN FOR THEIR EYES WERE DIM BECAUSE OF THE PARTINGS THAT MUST BE +3331-159609-0003-745: IF IT HAD NOT BEEN FOR TWO THINGS I FEAR SHE NEVER WOULD HAVE STOOD A SUMMER IN TOWN BUT SYDNEY OFTEN CALLED (TILL->TO) HIS VACATION CAME AND A VOLUMINOUS CORRESPONDENCE WITH POLLY BEGUILED THE LONG DAYS +3331-159609-0004-746: (TOM WROTE ONCE->TUMBLED ONES) A WEEK TO HIS MOTHER BUT (THE LETTERS->THEY LET US) WERE SHORT AND NOT VERY SATISFACTORY FOR MEN NEVER DO TELL THE INTERESTING LITTLE THINGS THAT WOMEN BEST LIKE TO HEAR +3331-159609-0005-747: (NO->NOW) I (M->AM) ONLY TIRED HAD A GOOD DEAL TO DO LATELY AND THE (DULL->DOLL) WEATHER MAKES ME JUST A (TRIFLE->TRAVEL) BLUE +3331-159609-0006-748: FORGIVE ME POLLY BUT I CAN'T HELP SAYING IT FOR (IT IS->THIS) THERE AND I WANT TO BE AS TRUE TO YOU AS YOU WERE TO ME IF I CAN +3331-159609-0007-749: I (TRY->TRIED) NOT TO DECEIVE MYSELF BUT IT DOES SEEM AS IF THERE WAS A CHANCE OF HAPPINESS FOR ME +3331-159609-0008-750: THANK HEAVEN FOR THAT +3331-159609-0009-751: CRIED POLLY WITH THE HEARTIEST SATISFACTION IN HER VOICE +3331-159609-0010-752: POOR POLLY WAS SO TAKEN BY SURPRISE THAT SHE HAD NOT A WORD TO SAY +3331-159609-0011-753: NONE WERE NEEDED HER (TELLTALE->TELLS HER) FACE ANSWERED FOR HER AS WELL AS THE IMPULSE WHICH MADE HER HIDE HER HEAD IN THE (SOFA->SILVER) CUSHION LIKE A FOOLISH OSTRICH (WHEN->AND) THE (HUNTERS->HANDLES) ARE AFTER IT +3331-159609-0012-754: ONCE OR TWICE (BUT->THAT) SORT OF (JOKINGLY->CHOKINGLY) AND I THOUGHT IT WAS ONLY SOME LITTLE FLIRTATION +3331-159609-0013-755: IT WAS SO STUPID OF ME NOT TO GUESS BEFORE +3331-159609-0014-756: IT WAS SO TENDER EARNEST AND DEFIANT THAT FANNY FORGOT THE DEFENCE OF HER OWN LOVER (IN->AND) ADMIRATION OF POLLY'S LOYALTY TO HERS FOR THIS FAITHFUL ALL ABSORBING LOVE WAS A (NEW REVELATION->NEWER RELATION) TO FANNY WHO WAS USED TO HEARING HER FRIENDS BOAST OF TWO OR THREE LOVERS A YEAR AND CALCULATE THEIR RESPECTIVE VALUES WITH ALMOST AS MUCH COOLNESS AS THE YOUNG MEN DISCUSSED THE FORTUNES OF THE GIRLS THEY WISHED FOR BUT COULD NOT AFFORD TO MARRY +3331-159609-0015-757: I HOPE MARIA (BAILEY IS ALL HE->BAILEY'S ONLY) THINKS HER SHE ADDED SOFTLY FOR I COULD (N'T->NOT) BEAR TO HAVE HIM DISAPPOINTED AGAIN +3331-159609-0016-758: SAID FANNY TURNING HOPEFUL ALL AT ONCE +3331-159609-0017-759: SUPPOSE (I->HER) SAY A WORD TO TOM JUST INQUIRE AFTER HIS HEART IN A GENERAL WAY YOU KNOW AND GIVE HIM A CHANCE TO TELL ME IF (THERE IS->THERE'S) ANYTHING TO TELL +3331-159609-0018-760: BEAR IT PEOPLE ALWAYS DO BEAR THINGS SOMEHOW ANSWERED POLLY LOOKING AS IF SENTENCE HAD BEEN PASSED UPON HER +3331-159609-0019-761: IT WAS (A->*) VERY DIFFERENT (WINTER->WINDOW) FROM THE LAST (FOR BOTH->ABOVE) THE (GIRLS->GIRL) +3331-159609-0020-762: IF (FANNY->ANY) WANTED TO SHOW HIM WHAT SHE COULD DO TOWARD MAKING A PLEASANT HOME SHE CERTAINLY SUCCEEDED (BETTER THAN->BY THEN) SHE SUSPECTED FOR IN SPITE OF MANY FAILURES AND DISCOURAGEMENTS BEHIND THE SCENES THE LITTLE HOUSE BECAME A MOST ATTRACTIVE PLACE TO MISTER (SYDNEY->SIDNEY) AT LEAST FOR HE WAS MORE THE HOUSE FRIEND THAN EVER AND SEEMED DETERMINED TO PROVE THAT CHANGE OF FORTUNE MADE NO DIFFERENCE TO HIM +3331-159609-0021-763: SHE KEPT MUCH AT HOME (WHEN->IN) THE DAY'S WORK WAS DONE FINDING IT PLEASANTER TO SIT DREAMING (OVER->OF A) BOOK OR (SEWING->SOON) ALONE THAN TO EXERT HERSELF EVEN TO GO TO THE (SHAWS->SHORES) +3331-159609-0022-764: POLLY WAS NOT AT ALL LIKE HERSELF THAT (WINTER->WINDOW) AND THOSE NEAREST TO HER SAW (AND WONDERED->ENVANTED) AT IT MOST +3331-159609-0023-765: FOR NED WAS SO ABSORBED IN BUSINESS THAT HE IGNORED THE WHOLE (BAILEY->BAILIQUE) QUESTION AND LEFT THEM IN (UTTER->OTHER) DARKNESS +3331-159609-0024-766: (FANNY->WHEN HE) CAME WALKING IN UPON HER ONE DAY LOOKING AS IF SHE (BROUGHT TIDINGS->POURED HIDINGS) OF SUCH GREAT JOY THAT SHE HARDLY KNEW HOW TO TELL THEM +3331-159609-0025-767: BUT IF WORK BASKETS WERE GIFTED WITH POWERS OF SPEECH THEY COULD TELL STORIES MORE TRUE AND TENDER THAN ANY WE READ +3528-168656-0000-864: SHE HAD EVEN BEEN IN SOCIETY BEFORE THE REVOLUTION +3528-168656-0001-865: IT WAS HER PLEASURE AND HER VANITY TO DRAG IN THESE NAMES ON EVERY PRETEXT +3528-168656-0002-866: EVERY YEAR SHE SOLEMNLY RENEWED HER VOWS AND AT THE MOMENT OF TAKING THE OATH SHE SAID TO THE PRIEST MONSEIGNEUR SAINT (FRANCOIS->FROIS) GAVE IT TO MONSEIGNEUR SAINT (JULIEN->JULIAN) MONSEIGNEUR SAINT (JULIEN->JULIAN) GAVE IT TO MONSEIGNEUR SAINT (EUSEBIUS MONSEIGNEUR->EUSIDIUS MONSIEUR) SAINT (EUSEBIUS->USUVIUS) GAVE IT TO MONSEIGNEUR SAINT PROCOPIUS ET CETERA ET CETERA +3528-168656-0003-867: AND THE (SCHOOL GIRLS->SCHOOLGIRLS) WOULD BEGIN TO LAUGH NOT IN THEIR SLEEVES BUT UNDER (THEIR->THE) VEILS CHARMING LITTLE STIFLED LAUGHS WHICH MADE THE VOCAL MOTHERS FROWN +3528-168656-0004-868: IT WAS A CENTURY WHICH SPOKE THROUGH HER BUT IT WAS THE EIGHTEENTH CENTURY +3528-168656-0005-869: THE RULE OF (FONTEVRAULT->FONTREVAL) DID NOT FORBID THIS +3528-168656-0006-870: SHE WOULD NOT SHOW (THIS OBJECT->THE SUBJECT) TO (ANYONE->ANY ONE) +3528-168656-0007-871: THUS IT FURNISHED A SUBJECT OF COMMENT FOR ALL THOSE WHO WERE (UNOCCUPIED->ON OCCUPIED) OR BORED IN THE CONVENT +3528-168656-0008-872: SOME (UNIQUE->EUIK) CHAPLET SOME AUTHENTIC RELIC +3528-168656-0009-873: THEY LOST THEMSELVES IN CONJECTURES +3528-168656-0010-874: WHEN THE POOR OLD WOMAN DIED THEY RUSHED TO HER CUPBOARD MORE HASTILY THAN WAS FITTING PERHAPS AND OPENED IT +3528-168656-0011-875: HE IS RESISTING FLUTTERING HIS TINY WINGS AND STILL MAKING AN EFFORT TO FLY BUT THE (DANCER IS->DANCERS) LAUGHING WITH A SATANICAL AIR +3528-168656-0012-876: MORAL LOVE CONQUERED BY THE COLIC +3528-168669-0000-877: THE (PRIORESS->PIRAS) RETURNED AND SEATED HERSELF ONCE MORE ON HER CHAIR +3528-168669-0001-878: WE WILL PRESENT A STENOGRAPHIC REPORT OF THE DIALOGUE WHICH THEN ENSUED TO THE BEST OF OUR ABILITY +3528-168669-0002-879: FATHER (FAUVENT->VUENT) +3528-168669-0003-880: REVEREND MOTHER DO YOU KNOW THE CHAPEL +3528-168669-0004-881: AND YOU HAVE BEEN IN THE CHOIR IN PURSUANCE OF YOUR DUTIES TWO OR THREE TIMES +3528-168669-0005-882: THERE IS A STONE TO BE RAISED HEAVY +3528-168669-0006-883: THE SLAB OF THE PAVEMENT WHICH IS AT THE (SIDE->THOUGHT) OF THE ALTAR +3528-168669-0007-884: THE (SLAB->FLAP) WHICH CLOSES THE VAULT YES +3528-168669-0008-885: IT WOULD BE A GOOD THING TO HAVE TWO MEN FOR IT +3528-168669-0009-886: A WOMAN IS NEVER A MAN +3528-168669-0010-887: BECAUSE (DOM MABILLON->DON MARVALAN) GIVES FOUR HUNDRED AND SEVENTEEN EPISTLES OF SAINT BERNARD WHILE (MERLONUS HORSTIUS->MERLINUS HORSES) ONLY GIVES THREE HUNDRED AND SIXTY SEVEN I DO NOT DESPISE (MERLONUS HORSTIUS->MERLINA'S HORSES) NEITHER DO I +3528-168669-0011-888: (MERIT->MARRIAGE) CONSISTS IN WORKING ACCORDING TO ONE'S STRENGTH A CLOISTER IS NOT A (DOCK YARD->DOCKYARD) +3528-168669-0012-889: AND A WOMAN IS NOT A MAN BUT MY BROTHER IS THE STRONG ONE THOUGH +3528-168669-0013-890: AND CAN YOU GET A (LEVER->LOVER) +3528-168669-0014-891: THERE IS A RING IN THE STONE +3528-168669-0015-892: I WILL PUT THE LEVER THROUGH IT +3528-168669-0016-893: THAT IS GOOD REVEREND MOTHER I WILL OPEN THE VAULT +3528-168669-0017-894: WILL THAT BE ALL NO +3528-168669-0018-895: GIVE ME YOUR ORDERS VERY REVEREND MOTHER +3528-168669-0019-896: (FAUVENT->FOR THAT) WE HAVE CONFIDENCE IN YOU +3528-168669-0020-897: I AM HERE TO DO ANYTHING YOU WISH +3528-168669-0021-898: AND TO HOLD YOUR PEACE ABOUT EVERYTHING YES (REVEREND->ROBIN) MOTHER +3528-168669-0022-899: WHEN THE (VAULT->WALL) IS OPEN I WILL CLOSE IT AGAIN +3528-168669-0023-900: BUT BEFORE THAT WHAT REVEREND MOTHER +3528-168669-0024-901: FATHER (FAUVENT->FERVENT) REVEREND MOTHER +3528-168669-0025-902: YOU KNOW THAT A MOTHER DIED THIS MORNING +3528-168669-0026-903: NO DID YOU NOT HEAR THE BELL +3528-168669-0027-904: NOTHING CAN BE HEARD AT THE BOTTOM OF THE GARDEN REALLY +3528-168669-0028-905: AND THEN THE WIND (IS->DOES) NOT BLOWING IN MY DIRECTION THIS MORNING +3528-168669-0029-906: IT WAS MOTHER CRUCIFIXION +3528-168669-0030-907: THREE YEARS AGO MADAME DE (BETHUNE->BESOON) A (JANSENIST->GENTLEST) TURNED ORTHODOX MERELY FROM HAVING SEEN MOTHER CRUCIFIXION AT PRAYER AH +3528-168669-0031-908: THE MOTHERS HAVE TAKEN HER TO THE DEAD ROOM WHICH OPENS ON THE CHURCH I KNOW +3528-168669-0032-909: A FINE SIGHT IT WOULD BE TO SEE A MAN ENTER THE DEAD ROOM MORE OFTEN +3528-168669-0033-910: HEY MORE OFTEN +3528-168669-0034-911: WHAT DO YOU SAY +3528-168669-0035-912: I SAY MORE OFTEN MORE OFTEN THAN WHAT +3528-168669-0036-913: REVEREND MOTHER I DID NOT SAY MORE OFTEN THAN WHAT I SAID MORE OFTEN +3528-168669-0037-914: BUT I DID NOT SAY MORE OFTEN +3528-168669-0038-915: AT THAT MOMENT NINE O'CLOCK STRUCK +3528-168669-0039-916: AT NINE O'CLOCK IN THE MORNING AND AT ALL HOURS PRAISED AND ADORED (*->TO) BE THE MOST HOLY SACRAMENT OF THE ALTAR SAID THE (PRIORESS->PIRATES) +3528-168669-0040-917: IT CUT MORE OFTEN SHORT +3528-168669-0041-918: FAUCHELEVENT MOPPED HIS FOREHEAD +3528-168669-0042-919: IN HER (LIFETIME->LIFE TIME) MOTHER CRUCIFIXION MADE CONVERTS AFTER HER DEATH SHE WILL PERFORM MIRACLES SHE WILL +3528-168669-0043-920: FATHER (FAUVENT->FUVENT) THE COMMUNITY HAS BEEN BLESSED IN MOTHER CRUCIFIXION +3528-168669-0044-921: SHE RETAINED HER CONSCIOUSNESS TO THE VERY LAST MOMENT +3528-168669-0045-922: SHE GAVE US HER LAST COMMANDS +3528-168669-0046-923: IF YOU HAD A LITTLE MORE FAITH AND IF YOU COULD HAVE BEEN IN (HER CELL->HERSELF) SHE WOULD HAVE CURED YOUR LEG MERELY BY TOUCHING IT SHE SMILED +3528-168669-0047-924: THERE WAS SOMETHING OF PARADISE IN THAT DEATH +3528-168669-0048-925: FAUCHELEVENT THOUGHT THAT IT WAS AN ORISON WHICH SHE WAS FINISHING +3528-168669-0049-926: FAUCHELEVENT HELD HIS PEACE SHE WENT ON +3528-168669-0050-927: I HAVE CONSULTED UPON THIS POINT MANY ECCLESIASTICS LABORING IN OUR LORD WHO OCCUPY THEMSELVES IN THE EXERCISES OF THE CLERICAL LIFE AND WHO BEAR WONDERFUL FRUIT +3528-168669-0051-928: FORTUNATELY THE (PRIORESS->PIOUS) COMPLETELY ABSORBED IN HER OWN THOUGHTS DID NOT HEAR IT +3528-168669-0052-929: SHE CONTINUED FATHER (FAUVENT->PROVENCE) +3528-168669-0053-930: YES REVEREND MOTHER +3528-168669-0054-931: (SAINT->SAID) TERENTIUS BISHOP OF PORT WHERE THE MOUTH OF THE TIBER EMPTIES INTO THE SEA REQUESTED THAT ON HIS (TOMB->TWO) MIGHT BE ENGRAVED THE SIGN WHICH WAS PLACED ON THE GRAVES OF (PARRICIDES->PARASITES) IN THE HOPE THAT PASSERS BY WOULD SPIT ON HIS TOMB THIS WAS DONE +3528-168669-0055-932: THE DEAD MUST BE OBEYED SO BE IT +3528-168669-0056-933: FOR THAT MATTER NO REVEREND MOTHER +3528-168669-0057-934: FATHER (FAUVENT->VENT) MOTHER CRUCIFIXION WILL BE INTERRED IN THE COFFIN IN WHICH SHE HAS SLEPT FOR THE LAST TWENTY YEARS THAT IS JUST +3528-168669-0058-935: IT IS A CONTINUATION OF HER SLUMBER +3528-168669-0059-936: SO I SHALL HAVE TO NAIL UP THAT COFFIN YES +3528-168669-0060-937: I AM AT THE ORDERS OF THE VERY REVEREND (COMMUNITY->CUNITY) +3528-168669-0061-938: THE (FOUR MOTHER PRECENTORS->FOREMOTHER PRESENTERS) WILL ASSIST YOU +3528-168669-0062-939: NO (IN LOWERING->INLORING) THE COFFIN +3528-168669-0063-940: WHERE INTO THE VAULT +3528-168669-0064-941: FAUCHELEVENT STARTED THE VAULT UNDER THE ALTAR +3528-168669-0065-942: UNDER THE ALTAR BUT +3528-168669-0066-943: YOU WILL HAVE AN IRON BAR YES BUT +3528-168669-0067-944: YOU WILL RAISE THE STONE WITH THE BAR BY MEANS OF THE RING BUT +3528-168669-0068-945: THE DEAD MUST BE OBEYED TO BE BURIED IN THE VAULT UNDER THE ALTAR OF THE CHAPEL NOT TO GO TO PROFANE EARTH TO REMAIN THERE IN DEATH WHERE SHE PRAYED WHILE LIVING SUCH WAS THE LAST WISH OF MOTHER CRUCIFIXION +3528-168669-0069-946: SHE ASKED IT OF US THAT IS TO SAY COMMANDED US +3528-168669-0070-947: BUT IT IS FORBIDDEN +3528-168669-0071-948: OH I AM A STONE IN YOUR WALLS +3528-168669-0072-949: THINK FATHER (FAUVENT->*) IF SHE WERE TO WORK MIRACLES HERE +3528-168669-0073-950: WHAT A GLORY OF GOD FOR THE COMMUNITY AND MIRACLES ISSUE FROM TOMBS +3528-168669-0074-951: BUT REVEREND MOTHER IF THE AGENT OF THE SANITARY COMMISSION +3528-168669-0075-952: BUT THE COMMISSARY OF POLICE +3528-168669-0076-953: (CHONODEMAIRE->SHADOW DE MER) ONE OF THE SEVEN GERMAN KINGS WHO ENTERED AMONG THE (GAULS->GULFS) UNDER THE EMPIRE OF CONSTANTIUS EXPRESSLY RECOGNIZED THE RIGHT OF NUNS TO BE BURIED IN RELIGION THAT IS TO SAY BENEATH THE ALTAR +3528-168669-0077-954: THE WORLD IS NOTHING IN THE PRESENCE OF THE CROSS +3528-168669-0078-955: MARTIN THE ELEVENTH GENERAL OF THE CARTHUSIANS GAVE TO HIS ORDER THIS DEVICE STAT (CRUX DUM VOLVITUR ORBIS->CREW DOOM VOLVETER ORBUS) +3528-168669-0079-956: THE (PRIORESS->PYRIUS) WHO WAS USUALLY SUBJECTED TO THE BARRIER OF SILENCE AND WHOSE RESERVOIR WAS (OVERFULL->OVER FULL) ROSE AND EXCLAIMED WITH THE (LOQUACITY->LEQUESTITY) OF A DAM WHICH HAS BROKEN AWAY +3528-168669-0080-957: I HAVE ON MY RIGHT (BENOIT->BENOIS) AND ON MY LEFT BERNARD WHO WAS BERNARD +3528-168669-0081-958: THE FIRST ABBOT OF (CLAIRVAUX->CLERVAL) +3528-168669-0082-959: HIS ORDER HAS PRODUCED FORTY POPES TWO HUNDRED CARDINALS FIFTY PATRIARCHS SIXTEEN HUNDRED ARCHBISHOPS FOUR THOUSAND SIX HUNDRED BISHOPS FOUR EMPERORS TWELVE EMPRESSES FORTY SIX KINGS FORTY ONE QUEENS THREE THOUSAND SIX HUNDRED (CANONIZED->CANNONIZED) SAINTS AND HAS BEEN IN EXISTENCE FOR FOURTEEN HUNDRED YEARS +3528-168669-0083-960: ON ONE SIDE SAINT BERNARD ON THE OTHER THE AGENT OF THE (SANITARY->SENATORY) DEPARTMENT +3528-168669-0084-961: GOD SUBORDINATED TO THE (COMMISSARY->COMMISSORY) OF POLICE SUCH (IS->WAS) THE AGE SILENCE (FAUVENT->FAVAN) +3528-168669-0085-962: NO ONE DOUBTS THE RIGHT OF THE MONASTERY (TO->CHOOSE) SEPULTURE +3528-168669-0086-963: ONLY FANATICS AND THOSE IN ERROR DENY IT +3528-168669-0087-964: WE LIVE IN TIMES OF TERRIBLE CONFUSION +3528-168669-0088-965: WE ARE IGNORANT AND IMPIOUS +3528-168669-0089-966: AND THEN RELIGION IS ATTACKED WHY +3528-168669-0090-967: BECAUSE THERE HAVE BEEN BAD PRIESTS BECAUSE (SAGITTAIRE->SAGATURE) BISHOP OF GAP WAS THE BROTHER OF (SALONE->SALON) BISHOP OF (EMBRUN->EMBRON) AND BECAUSE BOTH OF THEM FOLLOWED (MOMMOL->MAMMA) +3528-168669-0091-968: THEY PERSECUTE THE SAINTS +3528-168669-0092-969: THEY SHUT THEIR EYES TO THE TRUTH DARKNESS IS THE RULE +3528-168669-0093-970: THE MOST FEROCIOUS BEASTS ARE BEASTS WHICH ARE BLIND +3528-168669-0094-971: OH HOW WICKED PEOPLE ARE +3528-168669-0095-972: BY ORDER OF THE KING SIGNIFIES TO DAY BY ORDER OF THE REVOLUTION +3528-168669-0096-973: ONE NO LONGER KNOWS WHAT IS DUE TO THE LIVING OR TO THE DEAD A HOLY DEATH IS PROHIBITED +3528-168669-0097-974: (GAUTHIER->GATHIER) BISHOP OF (CHALONS->CALON) HELD HIS OWN IN THIS MATTER AGAINST OTHO DUKE OF BURGUNDY +3528-168669-0098-975: THE (PRIORESS->PRIORS) TOOK BREATH THEN TURNED TO FAUCHELEVENT +3528-168669-0099-976: YOU WILL CLOSE THE COFFIN THE SISTERS WILL CARRY IT TO THE CHAPEL +3528-168669-0100-977: THE OFFICE FOR THE DEAD WILL THEN BE SAID +3528-168669-0101-978: BUT SHE WILL HEAR SHE WILL NOT LISTEN +3528-168669-0102-979: BESIDES WHAT THE CLOISTER KNOWS THE WORLD LEARNS NOT +3528-168669-0103-980: A PAUSE (ENSUED->AND SUIT) +3528-168669-0104-981: YOU WILL REMOVE YOUR (BELL->BEL) +3528-168669-0105-982: HAS THE DOCTOR FOR THE DEAD PAID HIS VISIT +3528-168669-0106-983: HE WILL PAY IT AT FOUR O'CLOCK TO DAY +3528-168669-0107-984: THE PEAL WHICH ORDERS THE DOCTOR FOR THE DEAD TO BE SUMMONED HAS ALREADY BEEN RUNG +3528-168669-0108-985: BUT YOU DO NOT UNDERSTAND ANY OF THE PEALS +3528-168669-0109-986: THAT IS WELL FATHER (FAUVENT->VENT) +3528-168669-0110-987: WHERE WILL YOU OBTAIN IT +3528-168669-0111-988: I HAVE MY HEAP OF OLD IRON AT THE BOTTOM OF THE GARDEN +3528-168669-0112-989: (REVEREND->RIVER) MOTHER WHAT +3528-168669-0113-990: IF YOU WERE EVER TO HAVE ANY OTHER JOBS OF THIS SORT MY BROTHER IS THE STRONG MAN FOR YOU A PERFECT TURK +3528-168669-0114-991: YOU WILL DO IT AS SPEEDILY AS POSSIBLE +3528-168669-0115-992: I CANNOT WORK VERY FAST I AM INFIRM THAT IS WHY I REQUIRE AN ASSISTANT I LIMP +3528-168669-0116-993: EVERYTHING MUST HAVE BEEN COMPLETED A GOOD QUARTER OF AN HOUR BEFORE THAT +3528-168669-0117-994: I WILL DO ANYTHING TO PROVE MY ZEAL TOWARDS THE COMMUNITY THESE ARE MY ORDERS I AM TO NAIL UP THE COFFIN +3528-168669-0118-995: AT ELEVEN O'CLOCK EXACTLY I AM TO BE IN THE CHAPEL +3528-168669-0119-996: MOTHER ASCENSION WILL BE THERE TWO MEN WOULD BE BETTER +3528-168669-0120-997: HOWEVER NEVER MIND I SHALL HAVE MY (LEVER->LOVE) +3528-168669-0121-998: AFTER WHICH THERE WILL BE NO TRACE OF ANYTHING +3528-168669-0122-999: THE GOVERNMENT WILL HAVE NO SUSPICION +3528-168669-0123-1000: THE EMPTY COFFIN REMAINS THIS PRODUCED A PAUSE +3528-168669-0124-1001: WHAT IS TO BE DONE WITH THAT COFFIN FATHER (FAUVENT->PREVENT) +3528-168669-0125-1002: IT WILL BE GIVEN TO THE EARTH EMPTY +3528-168669-0126-1003: AH (THE DE->LEDA) EXCLAIMED FAUCHELEVENT +3528-168669-0127-1004: THE (VIL->VILLE) STUCK FAST IN HIS THROAT +3528-168669-0128-1005: HE MADE HASTE TO IMPROVISE AN EXPEDIENT TO MAKE HER FORGET THE OATH +3528-168669-0129-1006: I WILL PUT EARTH IN THE COFFIN REVEREND MOTHER THAT WILL PRODUCE THE EFFECT OF A CORPSE +3528-168669-0130-1007: I WILL MAKE THAT MY SPECIAL BUSINESS +3538-142836-0000-1567: GENERAL OBSERVATIONS ON PRESERVES (CONFECTIONARY->CONFECTIONERY) ICES AND DESSERT DISHES +3538-142836-0001-1568: THE EXPENSE OF PRESERVING THEM WITH SUGAR IS A SERIOUS OBJECTION FOR EXCEPT THE SUGAR IS USED IN CONSIDERABLE (QUANTITIES->QUALITIES) THE SUCCESS IS VERY UNCERTAIN +3538-142836-0002-1569: FRUIT GATHERED IN WET OR FOGGY WEATHER WILL SOON BE (MILDEWED->MELTED) AND BE OF NO SERVICE FOR PRESERVES +3538-142836-0003-1570: BUT TO DISTINGUISH THESE PROPERLY REQUIRES VERY GREAT ATTENTION AND CONSIDERABLE EXPERIENCE +3538-142836-0004-1571: IF YOU DIP THE FINGER INTO THE (SYRUP->SERF) AND APPLY IT TO THE THUMB THE TENACITY OF THE (SYRUP->SERF) WILL ON SEPARATING THE FINGER AND THUMB AFFORD A THREAD WHICH SHORTLY BREAKS THIS IS THE LITTLE THREAD +3538-142836-0005-1572: LET IT BOIL UP AGAIN THEN TAKE IT OFF AND REMOVE CAREFULLY THE SCUM THAT HAS RISEN +3538-142836-0006-1573: IT IS CONSIDERED TO BE SUFFICIENTLY BOILED WHEN SOME TAKEN UP IN A SPOON POURS OUT LIKE OIL +3538-142836-0007-1574: BEFORE SUGAR WAS IN USE HONEY WAS EMPLOYED TO PRESERVE MANY VEGETABLE PRODUCTIONS THOUGH THIS SUBSTANCE (HAS->IS) NOW GIVEN WAY TO THE JUICE OF THE SUGAR CANE +3538-142836-0008-1575: FOURTEEN NINETY NINE +3538-142836-0009-1576: BOIL THEM UP THREE DAYS SUCCESSIVELY SKIMMING EACH TIME AND THEY WILL THEN BE FINISHED AND IN A STATE FIT TO BE PUT INTO POTS FOR USE +3538-142836-0010-1577: THE REASON WHY THE FRUIT IS EMPTIED OUT OF THE PRESERVING PAN INTO (AN->OUR) EARTHEN PAN IS THAT THE ACID OF THE FRUIT ACTS UPON THE COPPER OF WHICH THE PRESERVING PANS ARE USUALLY MADE +3538-142836-0011-1578: FROM THIS EXAMPLE THE PROCESS OF PRESERVING FRUITS BY SYRUP (WILL->WOULD) BE EASILY COMPREHENDED +3538-142836-0012-1579: THEY SHOULD BE DRIED IN THE STOVE OR OVEN ON A (SIEVE->SEA) AND TURNED EVERY SIX OR EIGHT HOURS FRESH POWDERED SUGAR BEING SIFTED OVER THEM EVERY TIME THEY (ARE TURNED->RETURNED) +3538-142836-0013-1580: IN THIS WAY IT IS ALSO THAT ORANGE AND (LEMON CHIPS->LINENSHIPS) ARE PRESERVED +3538-142836-0014-1581: MARMALADES JAMS AND FRUIT (PASTES->PACE) ARE OF THE SAME NATURE AND ARE NOW IN VERY GENERAL (REQUEST->QUEST) +3538-142836-0015-1582: (MARMALADES->MARMALITIES) AND (JAMS->JAMES) DIFFER LITTLE FROM EACH OTHER (THEY ARE->THEIR) PRESERVES OF (A->*) HALF (LIQUID->LIKELY) CONSISTENCY MADE BY BOILING THE PULP OF FRUITS AND SOMETIMES PART OF THE (RINDS->RHINS) WITH SUGAR +3538-142836-0016-1583: THAT THEY MAY KEEP IT IS NECESSARY NOT TO BE SPARING OF SUGAR FIFTEEN O THREE +3538-142836-0017-1584: IN ALL THE OPERATIONS FOR PRESERVE MAKING WHEN THE PRESERVING PAN IS USED IT SHOULD NOT BE PLACED ON THE FIRE BUT ON A (TRIVET->TRIBUT) UNLESS THE JAM IS MADE ON A HOT PLATE WHEN THIS IS NOT NECESSARY +3538-142836-0018-1585: (CONFECTIONARY->CONFECTIONERY) FIFTEEN O EIGHT +3538-142836-0019-1586: IN SPEAKING OF (CONFECTIONARY IT->CONFECTIONERIES) SHOULD BE REMARKED THAT ALL THE VARIOUS PREPARATIONS ABOVE NAMED COME STRICTLY SPEAKING UNDER THAT HEAD FOR THE VARIOUS FRUITS FLOWERS HERBS (ROOTS->RUTHS) AND JUICES WHICH (WHEN->ONE) BOILED WITH SUGAR WERE FORMERLY EMPLOYED IN PHARMACY AS WELL AS FOR SWEETMEATS WERE CALLED CONFECTIONS FROM THE LATIN WORD (CONFICERE->CONFERS) TO MAKE UP BUT THE TERM (CONFECTIONARY->CONFECTIONERY) EMBRACES A VERY LARGE CLASS INDEED OF SWEET FOOD MANY KINDS OF WHICH SHOULD NOT BE ATTEMPTED IN THE ORDINARY (CUISINE->COISINE) +3538-142836-0020-1587: THE THOUSAND AND ONE ORNAMENTAL DISHES THAT ADORN THE TABLES OF THE WEALTHY SHOULD BE PURCHASED FROM THE CONFECTIONER THEY CANNOT PROFITABLY BE MADE AT HOME +3538-142836-0021-1588: HOWEVER AS LATE AS THE (REIGNS->REIGN) OF OUR TWO LAST GEORGES FABULOUS SUMS WERE OFTEN EXPENDED UPON FANCIFUL (DESSERTS->DESERTS) +3538-142836-0022-1589: THE SHAPE OF THE DISHES VARIES AT DIFFERENT PERIODS THE PREVAILING FASHION AT PRESENT BEING OVAL AND CIRCULAR DISHES ON STEMS +3538-142836-0023-1590: (ICES->ISIS) +3538-142836-0024-1591: (AT->A) DESSERTS OR AT SOME EVENING PARTIES (ICES->IISES) ARE SCARCELY TO BE DISPENSED WITH +3538-142836-0025-1592: THE (SPADDLE->SPADEL) IS GENERALLY MADE OF COPPER KEPT BRIGHT AND CLEAN +3538-142836-0026-1593: THEY SHOULD BE TAKEN IMMEDIATELY AFTER THE REPAST OR SOME HOURS AFTER BECAUSE THE TAKING (*->OF) THESE SUBSTANCES DURING THE PROCESS OF DIGESTION IS APT TO PROVOKE INDISPOSITION +3538-163619-0000-1500: THERE WAS ONCE (ON A->TILL THE) TIME A WIDOWER WHO HAD A SON AND A DAUGHTER BY HIS FIRST (WIFE->WI) +3538-163619-0001-1501: FROM THE VERY DAY THAT THE NEW WIFE CAME INTO THE HOUSE THERE WAS NO PEACE FOR THE MAN'S CHILDREN AND NOT A CORNER TO BE FOUND WHERE THEY COULD GET ANY REST SO THE BOY THOUGHT THAT THE BEST THING HE COULD DO WAS TO GO OUT INTO THE WORLD AND TRY TO EARN HIS OWN BREAD +3538-163619-0002-1502: BUT HIS SISTER WHO WAS STILL AT HOME FARED WORSE AND WORSE +3538-163619-0003-1503: KISS ME (GIRL->GO) SAID THE HEAD +3538-163619-0004-1504: WHEN THE KING ENTERED AND SAW IT HE STOOD STILL AS IF HE WERE IN FETTERS AND COULD NOT STIR FROM THE SPOT FOR THE PICTURE SEEMED TO HIM SO BEAUTIFUL +3538-163619-0005-1505: (THE YOUTH->THESE) PROMISED TO MAKE ALL THE HASTE HE COULD AND SET FORTH FROM THE KING'S PALACE +3538-163619-0006-1506: AT LAST THEY CAME IN SIGHT OF LAND +3538-163619-0007-1507: WELL IF MY BROTHER SAYS SO I MUST DO IT SAID THE MAN'S DAUGHTER AND SHE FLUNG HER CASKET INTO THE SEA +3538-163619-0008-1508: WHAT IS MY BROTHER SAYING ASKED HIS SISTER AGAIN +3538-163619-0009-1509: ON THE FIRST THURSDAY NIGHT AFTER THIS A BEAUTIFUL MAIDEN CAME INTO THE KITCHEN OF THE PALACE AND BEGGED THE KITCHEN MAID WHO SLEPT THERE TO LEND HER A BRUSH +3538-163619-0010-1510: SHE BEGGED VERY PRETTILY AND GOT IT AND THEN SHE BRUSHED HER HAIR AND THE GOLD DROPPED FROM IT +3538-163619-0011-1511: OUT ON THEE UGLY BUSHY BRIDE SLEEPING SO SOFT BY THE YOUNG KING'S SIDE ON SAND AND STONES MY BED I MAKE AND MY (BROTHER->BROTHERS) SLEEPS WITH THE COLD SNAKE UNPITIED AND UNWEPT +3538-163619-0012-1512: I SHALL COME TWICE MORE AND THEN NEVER AGAIN SAID SHE +3538-163619-0013-1513: THIS TIME ALSO AS BEFORE SHE BORROWED A BRUSH AND BRUSHED HER HAIR WITH IT AND THE GOLD DROPPED DOWN AS SHE DID IT AND AGAIN SHE SENT THE DOG OUT THREE TIMES AND WHEN (DAY->THEY) DAWNED SHE DEPARTED BUT AS SHE WAS GOING SHE SAID AS SHE HAD SAID BEFORE I SHALL COME ONCE MORE AND THEN NEVER AGAIN +3538-163619-0014-1514: NO ONE CAN TELL HOW DELIGHTED THE KING WAS TO GET RID OF THAT HIDEOUS BUSHY BRIDE AND GET A QUEEN WHO WAS BRIGHT AND BEAUTIFUL AS DAY (ITSELF->ITSEL) +3538-163622-0000-1515: WILT THOU SERVE ME AND WATCH MY SEVEN (FOALS->FOLDS) ASKED THE KING +3538-163622-0001-1516: THE YOUTH THOUGHT THAT IT WAS VERY EASY WORK TO WATCH THE FOALS AND (THAT->*) HE COULD DO IT WELL ENOUGH +3538-163622-0002-1517: HAST THOU (WATCHED->WATCH) FAITHFULLY AND WELL THE WHOLE DAY LONG SAID THE KING WHEN THE LAD CAME INTO HIS PRESENCE IN THE EVENING +3538-163622-0003-1518: YES THAT I HAVE SAID THE YOUTH +3538-163622-0004-1519: HE HAD GONE OUT ONCE TO SEEK A PLACE HE SAID BUT NEVER WOULD HE DO SUCH A THING AGAIN +3538-163622-0005-1520: (THEN->*) THE (KING->MACKING) PROMISED HIM THE SAME PUNISHMENT AND THE SAME REWARD THAT HE HAD PROMISED HIS BROTHER +3538-163622-0006-1521: WHEN HE HAD RUN AFTER THE (FOALS->FOOLS) FOR A LONG LONG TIME AND WAS HOT AND TIRED HE PASSED BY (A CLEFT->CLIFF) IN THE ROCK WHERE AN OLD WOMAN WAS SITTING SPINNING WITH A DISTAFF AND SHE CALLED TO HIM +3538-163622-0007-1522: (COME HITHER->COMMANDER) COME HITHER MY HANDSOME SON AND LET ME COMB YOUR HAIR +3538-163622-0008-1523: THE YOUTH LIKED THE THOUGHT OF THIS LET THE (FOALS RUN->FOLDS WARM) WHERE THEY CHOSE AND SEATED HIMSELF IN THE CLEFT OF THE ROCK BY THE SIDE OF THE OLD HAG +3538-163622-0009-1524: SO THERE HE SAT WITH HIS HEAD ON HER LAP TAKING HIS EASE THE LIVELONG DAY +3538-163622-0010-1525: ON THE THIRD DAY (CINDERLAD->SAID THE LAD) WANTED TO SET OUT +3538-163622-0011-1526: THE TWO BROTHERS LAUGHED AT HIM AND HIS FATHER AND MOTHER BEGGED HIM NOT TO GO BUT ALL TO NO PURPOSE (AND->WHEN) CINDERLAD SET OUT ON HIS WAY +3538-163622-0012-1527: I AM WALKING ABOUT IN SEARCH OF A PLACE SAID (CINDERLAD->SANDAL LAD) +3538-163622-0013-1528: I WOULD MUCH RATHER HAVE THE PRINCESS SAID (CINDERLAD->CINDER LAD) +3538-163622-0014-1529: AND THUS THEY JOURNEYED ONWARDS A LONG LONG WAY +3538-163622-0015-1530: WHEN THEY HAD GONE THUS FOR A LONG LONG WAY THE (FOAL->FULL) AGAIN ASKED DOST THOU SEE ANYTHING NOW +3538-163622-0016-1531: YES NOW I SEE SOMETHING THAT IS WHITE SAID (CINDERLAD->CINDER LAD) +3538-163622-0017-1532: IT LOOKS LIKE THE TRUNK OF A GREAT THICK BIRCH TREE +3538-163622-0018-1533: (CINDERLAD->SOONER LAD) TRIED BUT COULD NOT DO IT SO HE HAD TO TAKE A (DRAUGHT->DROP) FROM THE PITCHER AND THEN ONE MORE AND AFTER THAT STILL ANOTHER AND THEN HE WAS ABLE TO (WIELD->WHEEL) THE SWORD WITH PERFECT EASE +3538-163622-0019-1534: FOR WE ARE BROTHERS OF THE PRINCESS WHOM THOU ART TO HAVE WHEN THOU CANST TELL THE KING WHAT WE EAT AND DRINK BUT THERE IS A MIGHTY TROLL WHO (HAS->IS) CAST A SPELL OVER US +3538-163622-0020-1535: WHEN THEY HAD TRAVELLED (*->ALONG) A LONG (LONG->*) WAY THE FOAL SAID DOST THOU SEE ANYTHING +3538-163622-0021-1536: AND NOW INQUIRED THE (FOAL SEEST THOU->FULL CEASE DONE) NOTHING NOW +3538-163622-0022-1537: NOW THEN SAID THE (FOAL->FOUR) DOST THOU NOT SEE ANYTHING NOW +3538-163622-0023-1538: THAT IS A RIVER SAID THE FOAL AND WE HAVE TO CROSS IT +3538-163622-0024-1539: I HAVE DONE MY BEST REPLIED (CINDERLAD->SIR LAD) +3538-163624-0000-1540: ONCE UPON A TIME THERE WAS A KING IN THE NORTH WHO HAD WON MANY WARS BUT NOW HE WAS OLD +3538-163624-0001-1541: THE OLD KING WENT OUT AND (FOUGHT->THOUGHT) BRAVELY BUT AT LAST HIS SWORD BROKE AND HE WAS WOUNDED AND HIS MEN FLED +3538-163624-0002-1542: BUT IN THE NIGHT WHEN THE BATTLE WAS OVER HIS YOUNG WIFE CAME OUT AND SEARCHED FOR HIM AMONG THE SLAIN AND AT LAST SHE FOUND HIM AND ASKED WHETHER HE MIGHT BE HEALED +3538-163624-0003-1543: SO (HE ASKED->YES) THE QUEEN HOW DO YOU KNOW IN THE DARK OF NIGHT WHETHER THE HOURS ARE WEARING TO THE MORNING AND SHE SAID +3538-163624-0004-1544: THEN THE OLD MAN SAID DRIVE ALL THE HORSES INTO THE RIVER AND CHOOSE THE ONE THAT SWIMS ACROSS +3538-163624-0005-1545: HE (IS->HAS) NO BIGGER THAN OTHER DRAGONS SAID THE TUTOR AND IF YOU WERE AS BRAVE AS YOUR FATHER YOU WOULD NOT FEAR HIM +3538-163624-0006-1546: THEN THE PERSON WHO HAD KILLED OTTER WENT DOWN AND CAUGHT THE DWARF WHO OWNED ALL THE TREASURE AND TOOK IT FROM HIM +3538-163624-0007-1547: ONLY ONE RING WAS LEFT WHICH THE DWARF WORE AND EVEN THAT WAS TAKEN FROM HIM +3538-163624-0008-1548: SO (REGIN->RIGAN) MADE A SWORD AND (SIGURD->CIGAR) TRIED IT WITH A BLOW (ON->AND) A LUMP OF IRON AND THE SWORD BROKE +3538-163624-0009-1549: THEN (SIGURD->CIGAR) WENT TO HIS MOTHER AND ASKED FOR THE BROKEN PIECES OF HIS FATHER'S BLADE AND GAVE THEM TO (REGIN->REGAN) +3538-163624-0010-1550: SO (SIGURD->CIGARS) SAID THAT SWORD WOULD DO +3538-163624-0011-1551: THEN HE SAW THE TRACK WHICH THE DRAGON (*->HAD) MADE WHEN HE WENT TO A CLIFF TO DRINK AND THE TRACK WAS AS IF A GREAT RIVER HAD ROLLED ALONG AND LEFT A DEEP VALLEY +3538-163624-0012-1552: BUT (SIGURD->CIGARET) WAITED TILL HALF OF HIM HAD CRAWLED OVER THE PIT AND THEN HE THRUST THE SWORD (GRAM->GRAHAM) RIGHT INTO HIS VERY HEART +3538-163624-0013-1553: (SIGURD->CIGAR) SAID I WOULD TOUCH NONE OF IT IF BY LOSING IT I SHOULD NEVER DIE +3538-163624-0014-1554: BUT ALL MEN DIE AND (NO->KNOW) BRAVE MAN (LETS->LET'S) DEATH FRIGHTEN HIM FROM HIS DESIRE +3538-163624-0015-1555: (DIE->GUY) THOU (FAFNIR->FAFFNER) AND THEN (FAFNIR->STAFF) DIED +3538-163624-0016-1556: THEN (SIGURD->CIGAR) RODE BACK AND MET (REGIN->RIGAN) AND (REGIN->RIGAN) ASKED HIM TO ROAST (FAFNIR'S->FAFNER'S) HEART AND LET HIM TASTE OF IT +3538-163624-0017-1557: SO (SIGURD->SIR GOD) PUT THE HEART OF (FAFNIR->FAFNER) ON A STAKE AND ROASTED IT +3538-163624-0018-1558: (THERE IS SIGURD->THEIR CIGAR) ROASTING (FAFNIR'S->FASTENER'S) HEART FOR ANOTHER WHEN HE SHOULD TASTE OF IT HIMSELF AND LEARN ALL WISDOM +3538-163624-0019-1559: THAT LET HIM DO (AND->*) THEN RIDE OVER (HINDFELL->HINFIELD) TO THE PLACE WHERE (BRYNHILD->BURNHILD) SLEEPS +3538-163624-0020-1560: THERE MUST SHE SLEEP TILL THOU (COMEST->COMES) FOR HER WAKING (*->WHO) RISE UP AND RIDE FOR NOW SURE SHE WILL SWEAR THE VOW FEARLESS OF BREAKING +3538-163624-0021-1561: THEN HE TOOK THE HELMET OFF THE HEAD OF THE SLEEPER AND BEHOLD SHE WAS A MOST BEAUTIFUL LADY +3538-163624-0022-1562: THEN (SIGURD->CIGAR) RODE AWAY AND HE CAME TO THE HOUSE OF A KING WHO HAD A FAIR DAUGHTER +3538-163624-0023-1563: (THEN BRYNHILD'S->WHEN BRUNHOLD'S) FATHER TOLD (GUNNAR->GUNNER) THAT SHE WOULD MARRY NONE BUT HIM WHO COULD RIDE THE FLAME IN FRONT OF HER ENCHANTED TOWER AND THITHER THEY RODE AND (GUNNAR->GUNNER) SET HIS HORSE (AT->TO) THE FLAME BUT HE WOULD NOT FACE IT +3538-163624-0024-1564: FOR ONE DAY WHEN (BRYNHILD->BURNEHELD) AND (GUDRUN->GUNDRAIN) WERE BATHING (BRYNHILD WADED->BURNEHELD WAITED) FARTHEST (OUT->SOUTH) INTO THE RIVER AND SAID SHE DID THAT TO SHOW SHE WAS (GUIRUN'S->GUNDERING) SUPERIOR +3538-163624-0025-1565: FOR HER HUSBAND SHE SAID HAD RIDDEN THROUGH THE FLAME WHEN NO OTHER MAN DARED FACE IT +3538-163624-0026-1566: NOT LONG TO WAIT HE SAID TILL THE BITTER SWORD STANDS FAST IN MY HEART AND THOU (WILL->WILT) NOT LIVE LONG WHEN I AM DEAD +367-130732-0000-1466: LOBSTERS AND LOBSTERS +367-130732-0001-1467: WHEN (IS->AS) A LOBSTER NOT A LOBSTER WHEN IT IS A CRAYFISH +367-130732-0002-1468: THIS QUESTION (AND->IN) ANSWER MIGHT WELL GO INTO THE (PRIMER->PRIMARY) OF INFORMATION FOR THOSE WHO COME (TO->THE) SAN FRANCISCO FROM THE EAST FOR WHAT IS CALLED A LOBSTER IN SAN FRANCISCO IS NOT A LOBSTER AT ALL BUT A CRAYFISH +367-130732-0003-1469: THE PACIFIC CRAYFISH (HOWEVER SERVES->HOURSERVES) EVERY PURPOSE AND WHILE MANY (CONTEND THAT->CONTENDED) ITS MEAT IS NOT SO DELICATE (IN FLAVOR->FLAVORITE) AS THAT OF ITS EASTERN COUSIN THE (CALIFORNIAN->CALIFORNIA) WILL AS STRENUOUSLY INSIST THAT IT IS BETTER BUT OF COURSE SOMETHING MUST ALWAYS BE ALLOWED FOR THE PATRIOTISM OF THE (CALIFORNIAN->CALIFORNIA) +367-130732-0004-1470: A BOOK COULD BE WRITTEN ABOUT THIS RESTAURANT AND THEN ALL WOULD NOT BE TOLD FOR ALL ITS SECRETS CAN NEVER BE KNOWN +367-130732-0005-1471: IT WAS HERE THAT MOST MAGNIFICENT DINNERS WERE ARRANGED IT WAS HERE THAT EXTRAORDINARY DISHES WERE CONCOCTED BY CHEFS OF (WORLD WIDE->WOOLWRIGHT) FAME IT WAS HERE THAT LOBSTER (A LA NEWBERG->ALAD NEWBURG) REACHED ITS HIGHEST PERFECTION AND THIS IS THE RECIPE THAT WAS FOLLOWED WHEN (IT->HE) WAS PREPARED IN THE (DELMONICO->DOMONICO) +367-130732-0006-1472: LOBSTER (A LA NEWBERG->OLY NEWBURG) +367-130732-0007-1473: ONE POUND OF LOBSTER MEAT ONE TEASPOONFUL OF BUTTER ONE HALF PINT OF CREAM YOLKS OF FOUR EGGS ONE WINE GLASS OF SHERRY LOBSTER FAT +367-130732-0008-1474: PUT THIS IN A DOUBLE BOILER AND LET COOK UNTIL THICK STIRRING CONSTANTLY +367-130732-0009-1475: SERVE IN A (CHAFING->CHIEFING) DISH WITH (THIN->FLIND) SLICES OF DRY TOAST +367-130732-0010-1476: KING OF (SHELL FISH->SHELLFISH) +367-130732-0011-1477: ONE HAS TO COME TO SAN FRANCISCO TO PARTAKE OF THE KING OF (SHELL FISH->SHELLFISH) THE MAMMOTH PACIFIC CRAB +367-130732-0012-1478: I SAY COME TO SAN FRANCISCO ADVISEDLY FOR WHILE THE CRAB IS FOUND ALL ALONG THE COAST IT IS PREPARED NOWHERE SO DELICIOUSLY AS IN (SAN FRANCISCO->SAMPANCISCO) +367-130732-0013-1479: (GOBEY'S PASSED->GOBYS PASS) WITH THE FIRE AND THE LITTLE RESTAURANT BEARING HIS NAME (AND->*) IN CHARGE OF HIS WIDOW (IN->AND) UNION SQUARE AVENUE HAS NOT ATTAINED THE FAME OF THE OLD PLACE +367-130732-0014-1480: IT IS POSSIBLE THAT SHE KNOWS THE SECRET OF PREPARING CRAB AS IT WAS PREPARED IN THE (GOBEY'S->GOBIES) OF BEFORE THE FIRE BUT HIS (PRESTIGE->PRESAGE) DID NOT DESCEND TO HER +367-130732-0015-1481: (GOBEY'S CRAB STEW->GOBY'S CRABS DO) +367-130732-0016-1482: TAKE THE MEAT OF ONE LARGE CRAB SCRAPING OUT ALL (OF->*) THE (FAT->BAT) FROM THE SHELL +367-130732-0017-1483: SOAK THE CRAB MEAT IN THE SHERRY TWO HOURS BEFORE COOKING +367-130732-0018-1484: CHOP FINE THE ONION SWEET PEPPER AND TOMATO WITH THE ROSEMARY +367-130732-0019-1485: HEAT THIS IN A (STEWPAN->STEWPANT) AND (WHEN->WENT) SIMMERING (ADD->AT) THE SHERRY AND CRAB (MEAT->ME) AND LET ALL COOK TOGETHER WITH A SLOW FIRE FOR EIGHT MINUTES +367-130732-0020-1486: SERVE IN A CHAFING DISH WITH TOASTED CRACKERS OR THIN SLICES OF TOASTED BREAD +367-130732-0021-1487: LOBSTER IN MINIATURE +367-130732-0022-1488: SO FAR IT HAS BEEN USED MOSTLY FOR GARNISHMENT OF OTHER DISHES AND IT IS ONLY RECENTLY THAT THE (HOF BRAU->WHOLE BROW) HAS BEEN MAKING A SPECIALTY OF THEM +367-130732-0023-1489: ALL (OF->*) THE BETTER CLASS RESTAURANTS HOWEVER WILL SERVE THEM IF YOU ORDER THEM +367-130732-0024-1490: THIS IS THE RECIPE FOR EIGHT PEOPLE AND IT IS WELL (*->IT) WORTH TRYING IF YOU ARE GIVING A DINNER OF IMPORTANCE +367-130732-0025-1491: (BISQUE->THIS) OF (CRAWFISH->CROFISH) +367-130732-0026-1492: TAKE THIRTY (CRAWFISH->CROPFISH) FROM WHICH REMOVE THE GUT CONTAINING THE GALL IN THE FOLLOWING MANNER TAKE FIRM HOLD OF THE CRAWFISH WITH THE LEFT HAND SO AS TO AVOID BEING PINCHED BY ITS (CLAWS->CLOTH) WITH THE THUMB AND FOREFINGER OF THE RIGHT HAND PINCH THE EXTREME END OF THE CENTRAL FIN OF THE TAIL AND WITH A SUDDEN JERK THE GUT WILL BE WITHDRAWN +367-130732-0027-1493: MINCE (OR->ARE) CUT INTO SMALL DICE A CARROT AN ONION ONE HEAD OF CELERY AND A FEW PARSLEY ROOTS AND TO THESE (ADD->AT) A BAY LEAF (*->OF) A SPRIG OF THYME A LITTLE (MINIONETTE->MEAN ON IT) PEPPER AND TWO (OUNCES->OZ) OF BUTTER +367-130732-0028-1494: PUT THESE INGREDIENTS INTO A STEWPAN AND FRY THEM TEN MINUTES THEN THROW IN THE (CRAWFISH->CROPPISH) AND POUR ON THEM HALF A BOTTLE OF FRENCH WHITE WINE +367-130732-0029-1495: ALLOW (THIS->US) TO BOIL AND THEN ADD A QUART OF STRONG (CONSOMME->CONSUM) AND LET ALL CONTINUE BOILING FOR HALF AN HOUR +367-130732-0030-1496: PICK OUT THE (CRAWFISH->CRAW FISH) AND STRAIN THE BROTH THROUGH A NAPKIN BY PRESSURE INTO A BASIN IN ORDER TO EXTRACT ALL THE ESSENCE FROM THE VEGETABLES +367-130732-0031-1497: PICK THE SHELLS (OFF->OF) TWENTY FIVE OF THE (CRAWFISH->CROFISH) TAILS TRIM THEM NEATLY AND SET THEM ASIDE UNTIL WANTED +367-130732-0032-1498: RESERVE SOME OF THE SPAWN ALSO HALF (OF->*) THE BODY SHELLS WITH WHICH TO MAKE THE CRAWFISH BUTTER TO FINISH THE SOUP +367-130732-0033-1499: THIS BUTTER IS MADE AS FOLLOWS PLACE THE SHELLS (ON->IN) A BAKING SHEET IN THE OVEN TO DRY LET THE SHELLS COOL AND THEN POUND THEM IN A MORTAR WITH A LITTLE LOBSTER (CORAL->COAL) AND FOUR OUNCES OF FRESH BUTTER THOROUGHLY BRUISING THE WHOLE TOGETHER SO AS TO MAKE A FINE PASTE +367-293981-0000-1445: I SWEAR (IT->*) ANSWERED SANCHO +367-293981-0001-1446: I SAY SO CONTINUED DON QUIXOTE BECAUSE I HATE TAKING AWAY (ANYONE'S->ANY ONE'S) GOOD NAME +367-293981-0002-1447: I SAY REPLIED SANCHO THAT I SWEAR TO HOLD MY TONGUE ABOUT IT TILL THE END OF YOUR (WORSHIP'S DAYS->WORSHIP STAYS) AND (GOD->GONE) GRANT I MAY BE ABLE TO LET IT OUT (TOMORROW->TO MORROW) +367-293981-0003-1448: THOUGH YOUR WORSHIP WAS NOT SO BADLY OFF HAVING IN YOUR ARMS (THAT INCOMPARABLE->THE INN COMPARABLE) BEAUTY YOU SPOKE OF BUT I WHAT DID I HAVE EXCEPT THE HEAVIEST (WHACKS->WAX THAT) I THINK I HAD IN ALL MY LIFE +367-293981-0004-1449: UNLUCKY ME (AND->INTO) THE MOTHER THAT BORE ME +367-293981-0005-1450: DIDN'T I SAY SO WORSE LUCK TO MY LINE SAID SANCHO +367-293981-0006-1451: IT CANNOT BE THE (MOOR->MORE) ANSWERED DON QUIXOTE FOR THOSE UNDER ENCHANTMENT DO NOT LET THEMSELVES BE SEEN BY (ANYONE->ANY ONE) +367-293981-0007-1452: IF THEY (DON'T->DO NOT) LET THEMSELVES BE SEEN THEY LET THEMSELVES BE FELT SAID SANCHO IF NOT LET MY SHOULDERS SPEAK TO THE POINT +367-293981-0008-1453: (MINE->MIKE) COULD SPEAK TOO SAID DON QUIXOTE BUT THAT IS NOT A (SUFFICIENT->SUSPICION OF) REASON FOR BELIEVING THAT WHAT WE SEE IS THE ENCHANTED MOOR +367-293981-0009-1454: THE (OFFICER->OFFICERS) TURNED TO HIM AND SAID WELL HOW GOES (IT->A) GOOD MAN +367-293981-0010-1455: (SANCHO GOT->SANCHA CUT) UP WITH PAIN ENOUGH IN HIS BONES AND WENT AFTER THE INNKEEPER IN THE DARK AND MEETING THE OFFICER WHO WAS LOOKING TO SEE WHAT HAD BECOME OF HIS ENEMY HE SAID TO HIM (SENOR->SIGNOR) WHOEVER YOU ARE DO US (THE FAVOUR->TO FAVOR) AND KINDNESS TO GIVE US A LITTLE ROSEMARY OIL SALT AND (WINE->WHITE) FOR IT IS (WANTED->WATER) TO CURE ONE OF (THE->OUR) BEST KNIGHTS ERRANT ON EARTH WHO LIES ON YONDER BED WOUNDED BY THE HANDS OF THE ENCHANTED MOOR THAT IS IN THIS INN +367-293981-0011-1456: TO BE BRIEF HE TOOK THE (MATERIALS->MATURES) OF WHICH HE MADE A COMPOUND MIXING THEM ALL (AND->*) BOILING THEM A GOOD WHILE (*->IT) UNTIL IT SEEMED TO HIM THEY HAD COME TO PERFECTION +367-293981-0012-1457: SANCHO PANZA WHO ALSO REGARDED THE AMENDMENT OF HIS MASTER AS MIRACULOUS BEGGED HIM TO GIVE HIM WHAT WAS (LEFT IN THE PIGSKIN->LET AN OPINION) WHICH WAS NO SMALL QUANTITY +367-293981-0013-1458: DON QUIXOTE CONSENTED AND HE TAKING IT WITH BOTH HANDS IN GOOD FAITH AND WITH A BETTER WILL GULPED (*->IT) DOWN AND DRAINED (OFF->UP) VERY LITTLE LESS (THAN->IN) HIS MASTER +367-293981-0014-1459: IF YOUR WORSHIP KNEW THAT RETURNED SANCHO (WOE->WON'T) BETIDE ME AND ALL MY KINDRED WHY DID YOU LET ME TASTE IT +367-293981-0015-1460: SEARCH YOUR MEMORY AND IF YOU FIND ANYTHING OF THIS KIND YOU NEED ONLY TELL ME OF IT AND I PROMISE YOU BY THE ORDER OF KNIGHTHOOD WHICH I HAVE RECEIVED TO PROCURE YOU SATISFACTION (AND->IN) REPARATION TO THE UTMOST OF YOUR DESIRE +367-293981-0016-1461: THEN THIS IS AN (INN->IN) SAID DON QUIXOTE +367-293981-0017-1462: (AND->IN) A VERY RESPECTABLE ONE SAID THE INNKEEPER +367-293981-0018-1463: THE CRIES OF THE POOR (BLANKETED->BLANKET) WRETCH WERE SO LOUD THAT THEY REACHED THE EARS OF HIS MASTER WHO HALTING TO LISTEN ATTENTIVELY WAS PERSUADED THAT SOME NEW ADVENTURE WAS COMING UNTIL HE CLEARLY PERCEIVED THAT IT WAS (HIS->THE) SQUIRE WHO UTTERED THEM +367-293981-0019-1464: HE SAW HIM RISING AND FALLING IN THE AIR WITH SUCH GRACE AND NIMBLENESS THAT HAD HIS RAGE ALLOWED HIM IT IS MY BELIEF HE WOULD HAVE LAUGHED +367-293981-0020-1465: SANCHO TOOK IT AND AS HE WAS RAISING IT TO HIS MOUTH HE WAS STOPPED BY THE CRIES OF HIS MASTER EXCLAIMING SANCHO MY SON DRINK NOT WATER (DRINK IT NOT->DRINKIN UP) MY SON FOR IT WILL KILL THEE SEE HERE I HAVE THE BLESSED BALSAM AND HE HELD UP THE FLASK OF LIQUOR AND WITH DRINKING TWO DROPS (OF IT->WHAT) THOU WILT CERTAINLY BE RESTORED +3764-168670-0000-1666: THE STRIDES OF A LAME MAN ARE LIKE THE OGLING GLANCES OF A ONE EYED MAN THEY DO NOT REACH THEIR GOAL VERY PROMPTLY +3764-168670-0001-1667: COSETTE HAD WAKED UP +3764-168670-0002-1668: JEAN VALJEAN HAD PLACED HER NEAR THE FIRE +3764-168670-0003-1669: YOU WILL WAIT FOR ME AT A LADY'S HOUSE I SHALL COME TO FETCH YOU +3764-168670-0004-1670: EVERYTHING IS (ARRANGED->RANGED) AND NOTHING IS SAID FAUCHELEVENT +3764-168670-0005-1671: I HAVE PERMISSION TO BRING YOU IN BUT BEFORE BRINGING YOU IN YOU MUST BE GOT OUT +3764-168670-0006-1672: THAT'S WHERE THE DIFFICULTY LIES +3764-168670-0007-1673: IT IS EASY ENOUGH WITH THE CHILD YOU WILL CARRY HER OUT +3764-168670-0008-1674: AND SHE WILL HOLD HER TONGUE I ANSWER FOR THAT +3764-168670-0009-1675: FAUCHELEVENT GRUMBLED MORE TO HIMSELF THAN TO JEAN VALJEAN +3764-168670-0010-1676: YOU UNDERSTAND FATHER MADELEINE THE GOVERNMENT WILL NOTICE IT +3764-168670-0011-1677: JEAN VALJEAN STARED HIM STRAIGHT IN THE EYE AND THOUGHT THAT HE WAS RAVING +3764-168670-0012-1678: FAUCHELEVENT WENT ON +3764-168670-0013-1679: IT IS TO MORROW THAT I AM TO BRING YOU IN THE (PRIORESS->PRIORS) EXPECTS YOU +3764-168670-0014-1680: THEN HE EXPLAINED TO JEAN VALJEAN THAT THIS WAS HIS RECOMPENSE FOR A SERVICE WHICH HE (FAUCHELEVENT->FOR CHAUVELIN) WAS TO RENDER TO THE COMMUNITY +3764-168670-0015-1681: THAT THE NUN WHO HAD DIED THAT MORNING HAD REQUESTED TO BE BURIED IN THE COFFIN WHICH HAD SERVED HER FOR A BED AND INTERRED IN THE VAULT UNDER THE ALTAR OF THE CHAPEL +3764-168670-0016-1682: THAT THE (PRIORESS->PRIOR REST) AND THE VOCAL MOTHERS INTENDED TO FULFIL THE WISH OF THE DECEASED +3764-168670-0017-1683: THAT HE (FAUCHELEVENT->FOR SCHLEVENT) WAS TO NAIL UP THE COFFIN IN THE CELL RAISE THE STONE IN THE CHAPEL AND (LOWER->BLOW) THE CORPSE INTO THE VAULT +3764-168670-0018-1684: AND THEN THAT THERE WAS ANOTHER THE EMPTY COFFIN +3764-168670-0019-1685: WHAT IS THAT EMPTY COFFIN +3764-168670-0020-1686: ASKED JEAN VALJEAN FAUCHELEVENT REPLIED +3764-168670-0021-1687: WHAT COFFIN WHAT ADMINISTRATION +3764-168670-0022-1688: FAUCHELEVENT WHO WAS SEATED SPRANG UP AS THOUGH A BOMB HAD BURST UNDER HIS CHAIR YOU +3764-168670-0023-1689: YOU KNOW FAUCHELEVENT WHAT YOU HAVE SAID MOTHER CRUCIFIXION IS DEAD +3764-168670-0024-1690: AND I ADD AND FATHER MADELEINE IS BURIED (AH->*) +3764-168670-0025-1691: YOU ARE NOT LIKE OTHER MEN FATHER MADELEINE +3764-168670-0026-1692: THIS OFFERS THE MEANS BUT GIVE ME SOME INFORMATION IN THE FIRST PLACE +3764-168670-0027-1693: HOW LONG IS THE COFFIN SIX FEET +3764-168670-0028-1694: IT IS A CHAMBER ON THE GROUND FLOOR WHICH HAS A GRATED WINDOW OPENING ON THE GARDEN WHICH IS CLOSED ON THE OUTSIDE BY A SHUTTER AND TWO DOORS ONE LEADS INTO THE CONVENT THE OTHER INTO THE CHURCH (WHAT CHURCH->A WATCH) +3764-168670-0029-1695: THE CHURCH IN THE STREET (*->THOUGH) THE CHURCH WHICH ANY ONE CAN ENTER +3764-168670-0030-1696: HAVE YOU THE KEYS TO THOSE TWO DOORS +3764-168670-0031-1697: (*->AND) NO I HAVE THE KEY TO THE DOOR WHICH COMMUNICATES WITH THE CONVENT THE PORTER HAS THE KEY TO THE DOOR WHICH COMMUNICATES WITH THE CHURCH +3764-168670-0032-1698: ONLY TO ALLOW THE (UNDERTAKER'S->UNDERTAKERS) MEN TO ENTER WHEN THEY COME TO GET THE COFFIN +3764-168670-0033-1699: WHO NAILS UP THE COFFIN I DO +3764-168670-0034-1700: WHO SPREADS THE (PALL->POOL) OVER IT +3764-168670-0035-1701: NOT ANOTHER MAN EXCEPT THE POLICE DOCTOR CAN ENTER THE (DEAD ROOM->BEDROOM) THAT IS EVEN WRITTEN ON THE WALL +3764-168670-0036-1702: COULD YOU HIDE ME IN THAT ROOM TO NIGHT WHEN EVERY ONE IS ASLEEP +3764-168670-0037-1703: ABOUT THREE O'CLOCK IN THE AFTERNOON +3764-168670-0038-1704: I SHALL BE HUNGRY I WILL BRING YOU SOMETHING +3764-168670-0039-1705: YOU CAN COME AND NAIL ME UP IN THE COFFIN AT TWO O'CLOCK +3764-168670-0040-1706: FAUCHELEVENT RECOILED AND CRACKED HIS FINGER JOINTS BUT THAT IS IMPOSSIBLE +3764-168670-0041-1707: BAH IMPOSSIBLE TO TAKE A HAMMER AND DRIVE SOME NAILS IN A PLANK +3764-168670-0042-1708: JEAN VALJEAN HAD BEEN IN WORSE (STRAITS->STRAIT) THAN THIS +3764-168670-0043-1709: ANY MAN WHO HAS BEEN A PRISONER UNDERSTANDS HOW TO CONTRACT HIMSELF TO FIT THE DIAMETER OF THE ESCAPE +3764-168670-0044-1710: WHAT DOES NOT A MAN UNDERGO FOR THE SAKE OF A CURE +3764-168670-0045-1711: TO HAVE HIMSELF NAILED UP IN A CASE AND CARRIED OFF LIKE A BALE OF GOODS TO LIVE FOR A LONG TIME IN A BOX TO FIND AIR WHERE THERE IS NONE TO ECONOMIZE HIS BREATH FOR HOURS TO KNOW HOW TO STIFLE WITHOUT DYING THIS WAS ONE OF JEAN VALJEAN'S GLOOMY TALENTS +3764-168670-0046-1712: YOU SURELY MUST HAVE A GIMLET YOU WILL MAKE A FEW HOLES HERE AND THERE AROUND MY MOUTH AND YOU WILL NAIL THE TOP PLANK ON LOOSELY GOOD AND WHAT IF YOU SHOULD HAPPEN TO COUGH OR TO SNEEZE +3764-168670-0047-1713: A MAN WHO IS MAKING HIS ESCAPE DOES NOT COUGH OR SNEEZE +3764-168670-0048-1714: WHO IS THERE WHO HAS NOT SAID TO A CAT DO COME IN +3764-168670-0049-1715: THE (OVER PRUDENT->OVERPRUDENT) CATS AS THEY ARE AND BECAUSE THEY ARE CATS SOMETIMES INCUR MORE DANGER THAN THE AUDACIOUS +3764-168670-0050-1716: BUT JEAN VALJEAN'S COOLNESS PREVAILED OVER HIM IN SPITE OF HIMSELF HE GRUMBLED +3764-168670-0051-1717: IF YOU ARE SURE OF COMING OUT OF THE COFFIN ALL RIGHT I AM SURE OF GETTING (YOU->*) OUT OF THE GRAVE +3764-168670-0052-1718: AN OLD FELLOW OF THE OLD SCHOOL THE GRAVE DIGGER PUTS THE CORPSES IN THE GRAVE AND I PUT THE GRAVE DIGGER IN MY POCKET +3764-168670-0053-1719: I SHALL FOLLOW THAT IS MY BUSINESS +3764-168670-0054-1720: THE (HEARSE HALTS->HOUSEHOLTS) THE (UNDERTAKER'S->UNDERTAKERS) MEN (KNOT->NOT) A ROPE AROUND YOUR COFFIN AND LOWER YOU DOWN +3764-168670-0055-1721: THE (PRIEST SAYS->PRIESTS AS) THE PRAYERS MAKES THE SIGN OF THE CROSS SPRINKLES THE HOLY WATER AND TAKES HIS DEPARTURE +3764-168670-0056-1722: ONE OF TWO THINGS WILL HAPPEN HE WILL EITHER BE SOBER OR HE WILL NOT BE SOBER +3764-168670-0057-1723: THAT IS SETTLED FATHER FAUCHELEVENT ALL WILL GO WELL +3764-168671-0000-1724: ON THE FOLLOWING DAY AS THE SUN WAS DECLINING THE VERY RARE PASSERS BY ON THE BOULEVARD DU (MAINE->MIN) PULLED OFF THEIR HATS TO AN OLD FASHIONED HEARSE ORNAMENTED WITH SKULLS (CROSS BONES->CROSSBONES) AND TEARS +3764-168671-0001-1725: THIS HEARSE CONTAINED A COFFIN COVERED WITH A WHITE CLOTH OVER WHICH SPREAD A LARGE BLACK CROSS LIKE A HUGE CORPSE WITH DROOPING ARMS +3764-168671-0002-1726: (A MOURNING->THE MORNING) COACH IN WHICH COULD BE SEEN A PRIEST IN HIS SURPLICE AND A CHOIR BOY IN HIS RED CAP FOLLOWED +3764-168671-0003-1727: BEHIND IT CAME AN OLD MAN IN THE GARMENTS OF A LABORER WHO LIMPED ALONG +3764-168671-0004-1728: THE GRAVE DIGGERS BEING THUS BOUND TO SERVICE IN THE EVENING IN SUMMER AND AT NIGHT IN WINTER IN THIS CEMETERY THEY WERE SUBJECTED TO A SPECIAL DISCIPLINE +3764-168671-0005-1729: THESE GATES THEREFORE SWUNG INEXORABLY ON THEIR HINGES AT THE INSTANT WHEN THE SUN DISAPPEARED BEHIND THE DOME OF THE INVALIDES +3764-168671-0006-1730: DAMPNESS WAS INVADING IT THE FLOWERS WERE DESERTING IT +3764-168671-0007-1731: THE BOURGEOIS DID NOT CARE MUCH ABOUT BEING BURIED IN THE (VAUGIRARD->ROGER) IT HINTED AT POVERTY (PERE LACHAISE->PALACE) IF YOU PLEASE +3764-168671-0008-1732: TO BE BURIED IN (PERE LACHAISE->PERFELASHES) IS EQUIVALENT TO HAVING FURNITURE OF MAHOGANY IT IS RECOGNIZED AS ELEGANT +3764-168671-0009-1733: THE INTERMENT OF MOTHER CRUCIFIXION IN THE VAULT UNDER THE ALTAR THE EXIT OF COSETTE THE INTRODUCTION OF JEAN VALJEAN (TO->INTO) THE DEAD ROOM ALL HAD BEEN EXECUTED WITHOUT DIFFICULTY AND THERE HAD BEEN NO HITCH LET US REMARK IN PASSING THAT THE BURIAL OF MOTHER CRUCIFIXION UNDER THE ALTAR OF THE CONVENT IS A PERFECTLY VENIAL OFFENCE IN OUR SIGHT +3764-168671-0010-1734: IT IS ONE OF THE FAULTS WHICH RESEMBLE A DUTY +3764-168671-0011-1735: THE NUNS HAD COMMITTED IT NOT ONLY WITHOUT DIFFICULTY BUT EVEN WITH THE APPLAUSE OF THEIR OWN CONSCIENCES +3764-168671-0012-1736: IN THE CLOISTER WHAT IS CALLED THE GOVERNMENT IS ONLY AN INTERMEDDLING WITH AUTHORITY AN INTERFERENCE WHICH (IS->HAS) ALWAYS QUESTIONABLE +3764-168671-0013-1737: MAKE AS MANY (LAWS->NOISE) AS YOU PLEASE MEN BUT KEEP THEM FOR YOURSELVES +3764-168671-0014-1738: A PRINCE IS NOTHING IN THE PRESENCE OF A PRINCIPLE +3764-168671-0015-1739: FAUCHELEVENT LIMPED ALONG BEHIND THE HEARSE IN A VERY CONTENTED FRAME OF MIND +3764-168671-0016-1740: JEAN VALJEAN'S COMPOSURE WAS ONE OF THOSE POWERFUL TRANQUILLITIES WHICH ARE CONTAGIOUS +3764-168671-0017-1741: WHAT REMAINED TO BE DONE WAS A MERE NOTHING +3764-168671-0018-1742: HE PLAYED WITH FATHER (MESTIENNE->MESSION) +3764-168671-0019-1743: HE DID WHAT HE LIKED WITH HIM HE MADE HIM DANCE ACCORDING TO HIS WHIM +3764-168671-0020-1744: THE PERMISSION FOR INTERMENT MUST BE EXHIBITED +3764-168671-0021-1745: HE WAS A SORT OF LABORING MAN WHO WORE A WAISTCOAT WITH LARGE POCKETS AND CARRIED A MATTOCK UNDER HIS ARM +3764-168671-0022-1746: THE MAN REPLIED THE GRAVE DIGGER +3764-168671-0023-1747: THE (GRAVE->BRAVE) DIGGER YES +3764-168671-0024-1748: YOU I +3764-168671-0025-1749: FATHER (MESTIENNE->MACHIN) IS THE GRAVE DIGGER HE WAS +3764-168671-0026-1750: FAUCHELEVENT HAD EXPECTED ANYTHING BUT THIS THAT A GRAVE DIGGER COULD DIE +3764-168671-0027-1751: IT IS TRUE NEVERTHELESS THAT GRAVE DIGGERS DO DIE THEMSELVES +3764-168671-0028-1752: HE HAD HARDLY THE STRENGTH TO STAMMER +3764-168671-0029-1753: BUT HE PERSISTED FEEBLY FATHER (MESTIENNE->MESSIAN) IS THE GRAVE DIGGER +3764-168671-0030-1754: DO YOU KNOW WHO LITTLE FATHER (LENOIR->NOIR) IS HE IS A JUG OF RED WINE +3764-168671-0031-1755: BUT YOU ARE A JOLLY FELLOW TOO +3764-168671-0032-1756: ARE YOU NOT COMRADE WE'LL GO AND HAVE A DRINK TOGETHER PRESENTLY +3764-168671-0033-1757: THE MAN REPLIED +3764-168671-0034-1758: HE LIMPED MORE OUT OF ANXIETY THAN FROM INFIRMITY +3764-168671-0035-1759: THE GRAVE DIGGER WALKED ON IN FRONT OF HIM +3764-168671-0036-1760: FAUCHELEVENT PASSED THE UNEXPECTED (GRIBIER->CLAVIER) ONCE MORE IN REVIEW +3764-168671-0037-1761: FAUCHELEVENT WHO WAS ILLITERATE BUT VERY SHARP UNDERSTOOD THAT HE HAD TO DEAL WITH A FORMIDABLE SPECIES OF MAN WITH A FINE TALKER HE MUTTERED +3764-168671-0038-1762: (SO->MISS OH) FATHER (MESTIENNE->MESS TEEN) IS DEAD +3764-168671-0039-1763: THE MAN REPLIED COMPLETELY +3764-168671-0040-1764: THE GOOD GOD CONSULTED HIS NOTE BOOK WHICH SHOWS WHEN THE TIME IS UP IT WAS FATHER MESTIENNE'S TURN (FATHER MESTIENNE->FOR THE MESSIAN) DIED +3764-168671-0041-1765: STAMMERED FAUCHELEVENT IT IS MADE +3764-168671-0042-1766: YOU ARE A PEASANT I AM A PARISIAN +3764-168671-0043-1767: FAUCHELEVENT THOUGHT I AM LOST +3764-168671-0044-1768: THEY WERE ONLY A FEW TURNS OF THE WHEEL DISTANT FROM THE SMALL ALLEY LEADING TO THE (NUNS->NUN'S) CORNER +3764-168671-0045-1769: AND HE ADDED WITH THE SATISFACTION OF A SERIOUS MAN WHO IS TURNING A PHRASE WELL +3764-168671-0046-1770: FORTUNATELY THE SOIL WHICH WAS LIGHT AND WET WITH THE WINTER RAINS CLOGGED THE WHEELS AND RETARDED ITS SPEED +3764-168671-0047-1771: MY FATHER WAS A PORTER AT THE (PRYTANEUM->BRITTANNIUM) TOWN HALL +3764-168671-0048-1772: BUT HE HAD REVERSES HE HAD (LOSSES ON CHANGE->LOSES UNCHANGED) I WAS OBLIGED TO RENOUNCE THE PROFESSION OF AUTHOR BUT I AM STILL A PUBLIC WRITER +3764-168671-0049-1773: (*->BUT) SO YOU ARE NOT A GRAVE DIGGER THEN +3764-168671-0050-1774: RETURNED FAUCHELEVENT CLUTCHING AT THIS BRANCH FEEBLE AS IT WAS +3764-168671-0051-1775: HERE A REMARK BECOMES NECESSARY +3764-168671-0052-1776: (*->A) FAUCHELEVENT WHATEVER HIS ANGUISH OFFERED A DRINK BUT HE DID NOT EXPLAIN HIMSELF ON ONE POINT WHO WAS TO PAY +3764-168671-0053-1777: THE GRAVE DIGGER WENT ON WITH (A->THE) SUPERIOR SMILE +3764-168671-0054-1778: ONE MUST EAT +3997-180294-0000-1800: THE DUKE COMES EVERY MORNING THEY WILL TELL HIM WHEN HE COMES THAT I AM ASLEEP AND PERHAPS HE WILL WAIT UNTIL I (WAKE->AWAKE) +3997-180294-0001-1801: YES BUT IF I SHOULD ALREADY ASK FOR SOMETHING WHAT +3997-180294-0002-1802: WELL DO IT FOR ME FOR I SWEAR TO YOU (THAT I->THY) DON'T LOVE YOU AS THE OTHERS HAVE LOVED YOU +3997-180294-0003-1803: THERE ARE BOLTS (ON->IN) THE DOOR WRETCH +3997-180294-0004-1804: I DON'T KNOW HOW IT IS BUT IT SEEMS TO ME AS IF I DO +3997-180294-0005-1805: NOW GO I CAN'T KEEP MY EYES OPEN +3997-180294-0006-1806: IT (SEEMED->SEEMS) TO ME AS IF THIS SLEEPING CITY (BELONGED->BELONGS) TO ME I SEARCHED MY MEMORY FOR THE NAMES OF THOSE WHOSE HAPPINESS I HAD ONCE ENVIED AND I COULD NOT RECALL ONE WITHOUT FINDING MYSELF THE HAPPIER +3997-180294-0007-1807: EDUCATION FAMILY FEELING THE SENSE OF DUTY THE FAMILY ARE STRONG SENTINELS BUT THERE ARE NO SENTINELS SO VIGILANT AS NOT TO BE DECEIVED BY A GIRL OF SIXTEEN TO WHOM NATURE BY THE VOICE OF THE MAN SHE LOVES GIVES THE FIRST (COUNSELS->COUNCIL) OF LOVE ALL THE MORE (ARDENT->ARDENTS) BECAUSE THEY SEEM SO PURE +3997-180294-0008-1808: THE MORE (A->*) GIRL BELIEVES IN GOODNESS THE MORE EASILY WILL SHE GIVE WAY IF NOT TO HER LOVER AT LEAST TO LOVE FOR (BEING->BE) WITHOUT MISTRUST SHE IS WITHOUT FORCE AND TO WIN HER LOVE (IS->AS) A TRIUMPH THAT CAN BE GAINED BY ANY YOUNG (MAN->MEN) OF FIVE AND TWENTY SEE HOW YOUNG GIRLS ARE WATCHED AND GUARDED +3997-180294-0009-1809: THEN HOW SURELY MUST THEY DESIRE THE WORLD WHICH IS HIDDEN FROM THEM HOW (SURELY->TRULY) MUST THEY FIND IT TEMPTING HOW SURELY MUST THEY (LISTEN->LISTENED) TO THE FIRST VOICE WHICH COMES TO TELL ITS SECRETS THROUGH THEIR BARS AND BLESS THE HAND WHICH (*->HE) IS THE FIRST TO RAISE A CORNER OF THE (MYSTERIOUS->MYSTERY) VEIL +3997-180294-0010-1810: WITH THEM THE BODY HAS WORN OUT THE SOUL THE SENSES (HAVE->HALF) BURNED UP THE HEART DISSIPATION HAS BLUNTED THE FEELINGS +3997-180294-0011-1811: THEY LOVE BY PROFESSION AND NOT BY INSTINCT +3997-180294-0012-1812: WHEN A CREATURE WHO HAS ALL HER PAST TO REPROACH HERSELF WITH IS TAKEN ALL AT ONCE BY A PROFOUND SINCERE IRRESISTIBLE LOVE OF WHICH SHE HAD NEVER FELT HERSELF CAPABLE WHEN SHE HAS CONFESSED HER LOVE HOW ABSOLUTELY THE MAN WHOM SHE LOVES DOMINATES HER +3997-180294-0013-1813: THEY KNOW NOT WHAT PROOF TO GIVE +3997-180294-0014-1814: IN ORDER TO DISTURB THE (LABOURERS->LABORERS) IN THE (FIELD->FIELDS) WAS ONE DAY DEVOURED BY A WOLF BECAUSE THOSE WHOM HE HAD SO OFTEN DECEIVED NO LONGER BELIEVED IN HIS CRIES FOR HELP +3997-180294-0015-1815: (IT->THIS) IS THE SAME WITH THESE UNHAPPY WOMEN WHEN (THEY LOVE->HE LOVED) SERIOUSLY +3997-180294-0016-1816: BUT WHEN THE MAN WHO INSPIRES THIS REDEEMING LOVE IS GREAT ENOUGH IN SOUL TO RECEIVE IT WITHOUT REMEMBERING THE PAST WHEN HE GIVES HIMSELF UP TO IT WHEN IN SHORT HE LOVES AS HE IS LOVED THIS MAN DRAINS AT ONE (DRAUGHT->DROUGHT) ALL EARTHLY EMOTIONS AND AFTER SUCH A LOVE HIS HEART WILL BE CLOSED TO EVERY OTHER +3997-180294-0017-1817: BUT TO RETURN TO THE FIRST DAY OF MY (LIAISON->YEAR SONG) +3997-180294-0018-1818: WHEN I REACHED HOME I WAS IN A STATE OF (MAD->MADGE) GAIETY +3997-180294-0019-1819: THE WOMAN BECOMES THE MAN'S MISTRESS AND LOVES HIM +3997-180294-0020-1820: HOW WHY +3997-180294-0021-1821: MY WHOLE BEING WAS EXALTED INTO JOY AT THE MEMORY OF THE WORDS WE HAD EXCHANGED DURING THAT FIRST NIGHT +3997-180294-0022-1822: HERE ARE MY ORDERS TO NIGHT AT THE VAUDEVILLE +3997-180294-0023-1823: COME DURING THE THIRD (ENTR'ACTE->ENTRACT) +3997-180294-0024-1824: THE BOXES FILLED ONE AFTER ANOTHER +3997-180294-0025-1825: ONLY ONE (REMAINED->REMAINS) EMPTY THE STAGE BOX +3997-180294-0026-1826: AT THE BEGINNING OF THE THIRD ACT I HEARD THE DOOR OF THE BOX ON WHICH MY EYES HAD BEEN ALMOST CONSTANTLY FIXED OPEN AND MARGUERITE APPEARED +3997-180294-0027-1827: (DID->THAT) SHE (LOVE->LOVED) ME ENOUGH TO BELIEVE THAT THE MORE BEAUTIFUL SHE LOOKED THE HAPPIER I SHOULD BE +3997-180294-0028-1828: WHAT IS THE MATTER WITH YOU TO NIGHT SAID MARGUERITE RISING AND COMING TO THE BACK OF THE BOX AND KISSING ME ON THE FOREHEAD +3997-180294-0029-1829: (YOU->HE) SHOULD GO TO BED SHE REPLIED WITH THAT (IRONICAL->IRONIC) AIR WHICH WENT SO WELL WITH HER DELICATE AND WITTY FACE +3997-180294-0030-1830: WHERE AT HOME +3997-180294-0031-1831: YOU STILL LOVE ME CAN YOU ASK +3997-180294-0032-1832: BECAUSE YOU DON'T LIKE SEEING HIM +3997-180294-0033-1833: (NONETHELESS->NONE THE LESS) I WAS VERY UNHAPPY ALL THE REST OF THE EVENING AND WENT AWAY VERY SADLY AFTER HAVING SEEN PRUDENCE THE COUNT AND MARGUERITE GET INTO THE CARRIAGE WHICH WAS (WAITING->WINNING) FOR THEM AT THE DOOR +3997-180297-0000-1834: I HAVE NOT COME TO HINDER YOU FROM LEAVING PARIS +3997-180297-0001-1835: YOU IN THE WAY MARGUERITE BUT HOW +3997-180297-0002-1836: WELL YOU MIGHT HAVE HAD A WOMAN HERE SAID PRUDENCE AND IT WOULD HARDLY HAVE BEEN AMUSING FOR HER TO SEE TWO MORE ARRIVE +3997-180297-0003-1837: DURING THIS REMARK MARGUERITE LOOKED AT ME ATTENTIVELY +3997-180297-0004-1838: MY DEAR PRUDENCE I ANSWERED YOU DO NOT KNOW WHAT YOU ARE SAYING +3997-180297-0005-1839: YES BUT BESIDES NOT WISHING TO PUT YOU OUT I WAS SURE THAT IF YOU CAME AS FAR AS MY DOOR YOU WOULD WANT TO COME UP AND AS I COULD NOT LET YOU I DID NOT WISH TO LET YOU GO AWAY BLAMING ME FOR SAYING NO +3997-180297-0006-1840: BECAUSE I AM WATCHED AND THE LEAST SUSPICION MIGHT (DO->TO) ME THE GREATEST HARM +3997-180297-0007-1841: IS THAT REALLY THE ONLY REASON +3997-180297-0008-1842: IF THERE WERE ANY OTHER I WOULD TELL YOU FOR WE ARE NOT TO HAVE ANY SECRETS FROM ONE ANOTHER NOW +3997-180297-0009-1843: (HONESTLY DO->ON THE SUIT) YOU CARE FOR ME A LITTLE A GREAT DEAL +3997-180297-0010-1844: I FANCIED FOR A MOMENT THAT I MIGHT GIVE MYSELF THAT HAPPINESS FOR SIX MONTHS YOU WOULD NOT HAVE IT YOU INSISTED ON KNOWING THE MEANS +3997-180297-0011-1845: WELL GOOD HEAVENS THE MEANS WERE EASY ENOUGH TO GUESS +3997-180297-0012-1846: I LISTENED AND I GAZED AT MARGUERITE WITH ADMIRATION +3997-180297-0013-1847: WHEN (I->THEY) THOUGHT THAT THIS MARVELLOUS CREATURE WHOSE FEET I HAD ONCE LONGED TO KISS WAS WILLING TO LET ME TAKE MY PLACE IN HER THOUGHTS MY PART IN HER LIFE AND THAT I WAS NOT YET CONTENT WITH WHAT SHE GAVE ME I ASKED IF MAN'S DESIRE (HAS->HAD) INDEED LIMITS WHEN SATISFIED AS PROMPTLY AS MINE HAD BEEN IT REACHED AFTER SOMETHING FURTHER +3997-180297-0014-1848: TRULY SHE CONTINUED WE POOR CREATURES OF CHANCE HAVE FANTASTIC (DESIRES->DESIRE) AND INCONCEIVABLE LOVES +3997-180297-0015-1849: WE ARE NOT ALLOWED TO HAVE HEARTS UNDER PENALTY OF BEING HOOTED DOWN AND OF RUINING OUR CREDIT +3997-180297-0016-1850: WE NO LONGER BELONG TO OURSELVES +3997-180297-0017-1851: WE STAND FIRST IN THEIR SELF ESTEEM LAST IN THEIR ESTEEM +3997-180297-0018-1852: NEVER (DO THEY->DID HE) GIVE YOU ADVICE WHICH IS NOT LUCRATIVE +3997-180297-0019-1853: IT MEANS LITTLE ENOUGH TO THEM THAT WE SHOULD HAVE TEN LOVERS EXTRA AS LONG AS THEY GET DRESSES OR A BRACELET OUT OF THEM AND THAT THEY CAN DRIVE (IN OUR CARRIAGE->AND ARE PARISH) FROM TIME TO TIME OR COME TO OUR BOX AT THE (THEATRE->FUTURE) +3997-180297-0020-1854: SUCH A MAN I FOUND IN THE DUKE BUT THE DUKE IS OLD AND (*->THE) OLD AGE NEITHER PROTECTS NOR CONSOLES +3997-180297-0021-1855: I THOUGHT I COULD ACCEPT THE LIFE WHICH HE OFFERED ME (BUT->OR) WHAT WOULD YOU HAVE +3997-180297-0022-1856: WHAT I LOVED IN YOU WAS NOT THE MAN WHO WAS BUT THE MAN WHO WAS GOING TO BE +3997-180297-0023-1857: MARGUERITE (TIRED->HIRED) OUT WITH (THIS->HIS) LONG CONFESSION THREW HERSELF BACK ON THE SOFA AND TO STIFLE A SLIGHT COUGH (PUT->PULL) UP HER HANDKERCHIEF TO HER LIPS AND FROM THAT TO HER EYES +3997-180297-0024-1858: MARGUERITE DO WITH ME AS YOU WILL I AM YOUR SLAVE YOUR DOG BUT IN THE NAME OF HEAVEN TEAR UP THE LETTER WHICH I WROTE TO YOU AND DO NOT MAKE ME LEAVE YOU TO MORROW IT WOULD KILL ME +3997-180297-0025-1859: MARGUERITE DREW THE LETTER FROM HER BOSOM AND HANDING IT TO ME WITH A SMILE OF INFINITE SWEETNESS SAID +3997-180297-0026-1860: HERE IT IS I HAVE BROUGHT IT BACK +3997-180297-0027-1861: I (TORE THE->TOLD A) LETTER INTO FRAGMENTS AND KISSED WITH TEARS THE HAND THAT GAVE IT TO ME +3997-180297-0028-1862: LOOK HERE PRUDENCE DO YOU KNOW WHAT HE WANTS SAID MARGUERITE +3997-180297-0029-1863: HE WANTS YOU TO FORGIVE HIM +3997-180297-0030-1864: ONE HAS (TO->TWO) BUT HE WANTS MORE THAN THAT WHAT THEN +3997-180297-0031-1865: I EMBRACED MARGUERITE UNTIL SHE WAS ALMOST STIFLED +3997-182399-0000-1779: (OL MISTAH->ALL MISTER) BUZZARD GRINNED +3997-182399-0001-1780: THIS SOUNDED LIKE ANOTHER STORY +3997-182399-0002-1781: HE WAS CURIOUS ABOUT THAT BLACK HEADED COUSIN OF (OL MISTAH->OLD MISTER) BUZZARD VERY CURIOUS INDEED +3997-182399-0003-1782: ANYWAY HE WOULD FIND OUT +3997-182399-0004-1783: PLEASE MISTER (BUZZARD->BOZARD) PLEASE TELL US THE STORY HE BEGGED +3997-182399-0005-1784: NOW (OL MISTAH->ALL MISTER) BUZZARD IS NATURALLY GOOD NATURED AND ACCOMMODATING AND WHEN PETER BEGGED SO HARD HE JUST COULDN'T FIND IT IN HIS HEART TO REFUSE +3997-182399-0006-1785: WAY BACK IN THE DAYS WHEN GRANDPAP BUZZARD HAD HIS (LIL->LITTLE) FALLING (OUT->ON) WITH (OL->OLD) KING EAGLE AND DONE (FLY->FLIES) SO HIGH HE (SCO'TCH->SCORCHED) THE FEATHERS (OFFEN->OFF IN) HIS (HAID->HEAD) HE HAD A COUSIN DID (GRANDPAP->GRANDPA) BUZZARD AND THIS COUSIN WAS (JES->JUST) NATURALLY LAZY AND NO COUNT +3997-182399-0007-1786: LIKE MOST NO COUNT PEOPLE HE USED TO MAKE A REGULAR (NUISANCE->NOTIONS) OF (HISSELF->HIS SELF) POKING HIS NOSE INTO (EV'YBODY'S->EVERYBODY'S) BUSINESS AND NEVER TENDING TO HIS OWN +3997-182399-0008-1787: WASN'T ANYTHING GOING ON THAT THIS TRIFLING MEMBER OF THE BUZZARD (FAM'LY->FAMILY) DIDN'T FIND OUT ABOUT (AND->A) MEDDLE IN HE COULD ASK (MO->MORE) QUESTIONS THAN PETER RABBIT CAN (AN->AND) ANYBODY THAT CAN DO THAT HAS GOT TO ASK A LOT +3997-182399-0009-1788: EVERYBODY LOOKED AT PETER AND LAUGHED +3997-182399-0010-1789: SO WE (UNS SIT->UNSTEAD) ON THE CHIMNEY TOPS WHENEVER (OL->OLD) JACK FROST GETS (TO STRAYING->THE STRAIN) DOWN WHERE HE HAVE NO BUSINESS +3997-182399-0011-1790: ONE DAY THIS NO COUNT TRIFLING COUSIN OF (GRANDPAP->GRANDPA) BUZZARD GET COLD IN HIS FEET +3997-182399-0012-1791: IT WAS ON (A LIL OL->THE LITTLE OLD) HOUSE A (LIL OL->LITTLE OLD) TUMBLE DOWN HOUSE +3997-182399-0013-1792: WHY HE (JES->JUST) STRETCH HIS (FOOL HAID->FULL HEAD) AS FAR DOWN (THAT->THE) CHIMNEY AS HE CAN (AN->AND) LISTEN (AN->AND) LISTEN +3997-182399-0014-1793: BUT HE DON'T MIND THAT +3997-182399-0015-1794: (WILL YO' ALLS->WELL YE ALL) PLEASE SPEAK A (LIL->LITTLE) LOUDER HE (HOLLER->HOLLERED) DOWN THE CHIMNEY (JES->JUST) LIKE THAT +3997-182399-0016-1795: YES (SAH->SAD) SHE (SHO'LY->SURELY) WAS (PLUMB->PLUM) SCARED +3997-182399-0017-1796: THEY (LIKE->LIKED) TO CHOKE THAT NO (COUNT BUZZARD->COMPASSER) TO DEATH +3997-182399-0018-1797: WHEN HE GET HOME HE TRY (AN->AND) TRY TO BRUSH THAT (SOOT->SUIT) OFF BUT IT DONE GET INTO THE SKIN (AN->AND) IT STAY THERE +3997-182399-0019-1798: A LITTLE SIGH OF SATISFACTION WENT (AROUND->ROUND) THE CIRCLE OF LISTENERS +3997-182399-0020-1799: IT WAS JUST AS GOOD AS ONE OF GRANDFATHER (FROG'S->FROGS) +4198-12259-0000-203: DRAW REACH FILL MIX GIVE IT ME WITHOUT WATER +4198-12259-0001-204: SO MY FRIEND SO WHIP ME OFF THIS GLASS NEATLY BRING ME HITHER SOME CLARET A FULL WEEPING GLASS TILL IT RUN OVER +4198-12259-0002-205: A CESSATION AND TRUCE WITH THIRST +4198-12259-0003-206: YOU HAVE (CATCHED->CAST) A COLD GAMMER YEA FORSOOTH SIR +4198-12259-0004-207: BY THE (BELLY->VALLEY) OF (SANCT->SAINT) BUFF LET US TALK OF OUR DRINK I NEVER DRINK (BUT AT->WITHOUT) MY HOURS LIKE THE POPE'S MULE +4198-12259-0005-208: WHICH WAS FIRST (THIRST OR->THOSE) DRINKING +4198-12259-0006-209: WHAT IT SEEMS I DO NOT DRINK BUT (BY->BUY) AN ATTORNEY +4198-12259-0007-210: DRINK ALWAYS AND YOU SHALL NEVER DIE +4198-12259-0008-211: IF I DRINK NOT I AM A GROUND DRY GRAVELLED AND SPENT I AM (STARK->START) DEAD WITHOUT DRINK AND MY SOUL READY TO FLY INTO SOME MARSH AMONGST FROGS THE SOUL NEVER DWELLS IN A DRY PLACE DROUTH (KILLS->KILL) IT +4198-12259-0009-212: HE DRINKS (IN VAIN->THEIR VEIN) THAT (FEELS->FILLS) NOT THE PLEASURE OF IT +4198-12259-0010-213: IT IS ENOUGH TO BREAK BOTH (GIRDS->GOOD) AND (PETREL->PETEL) +4198-12259-0011-214: WHAT DIFFERENCE IS THERE BETWEEN A BOTTLE AND A FLAGON +4198-12259-0012-215: BRAVELY AND WELL PLAYED UPON THE WORDS +4198-12259-0013-216: OUR FATHERS DRANK LUSTILY AND EMPTIED THEIR CANS +4198-12259-0014-217: WELL (CACKED->CAGLED) WELL SUNG +4198-12259-0015-218: COME LET US DRINK WILL YOU SEND NOTHING TO THE RIVER +4198-12259-0016-219: I (DRINK->DRANK) NO MORE THAN (A SPONGE->HIS SPINES) +4198-12259-0017-220: I DRINK LIKE A (TEMPLAR KNIGHT->TENT LAW NIGHT) +4198-12259-0018-221: AND I (TANQUAM SPONSUS->TEN QUALMS BONSES) +4198-12259-0019-222: AND I SICUT (TERRA SINE->TERRACE IN) AQUA +4198-12259-0020-223: GIVE ME A (SYNONYMON->SNYM) FOR A (GAMMON->GAMIN) OF BACON +4198-12259-0021-224: IT IS THE COMPULSORY OF DRINKERS IT IS A (PULLEY->POLY) +4198-12259-0022-225: A LITTLE RAIN (ALLAYS->A LAYS) A GREAT DEAL OF WIND LONG TIPPLING BREAKS (THE->THAT) THUNDER +4198-12259-0023-226: BUT IF THERE CAME SUCH LIQUOR (FROM->FOR) MY (BALLOCK WOULD->BALLIC WILL) YOU NOT WILLINGLY THEREAFTER SUCK THE (UDDER->UTTER) WHENCE IT ISSUED +4198-12259-0024-227: HERE PAGE FILL +4198-12259-0025-228: I APPEAL FROM THIRST AND DISCLAIM ITS (JURISDICTION->JURIS DIXON) +4198-12259-0026-229: I WAS WONT (HERETOFORE->HERE) TO (*->FORE TO) DRINK OUT ALL BUT NOW I LEAVE NOTHING +4198-12259-0027-230: (HEYDAY->HEY THEE) HERE (ARE TRIPES->A TRITE) FIT FOR (OUR SPORT->OURSPORT) AND IN EARNEST EXCELLENT (GODEBILLIOS->GO TO BE YOURS) OF THE DUN OX YOU KNOW WITH THE BLACK (STREAK->STREET) +4198-12259-0028-231: (O->OH) FOR GOD'S SAKE LET US (LASH->LAST) THEM SOUNDLY YET (THRIFTILY->DRIFTILY) +4198-12259-0029-232: SPARROWS (WILL->WOULD) NOT EAT UNLESS YOU (BOB->BOBBED) THEM ON THE TAIL NOR CAN I DRINK IF I BE NOT FAIRLY SPOKE TO +4198-12259-0030-233: (HO->OH) THIS (WILL BANG IT SOUNDLY->WAS BENNETT'S ONLY) +4198-12259-0031-234: BUT THIS (SHALL BANISH IT->OUR BANACY) UTTERLY +4198-12259-0032-235: LET US WIND OUR HORNS BY THE SOUND OF FLAGONS AND BOTTLES AND CRY ALOUD THAT WHOEVER HATH LOST HIS THIRST COME (NOT->NIGH) HITHER TO SEEK IT +4198-12259-0033-236: THE GREAT GOD MADE THE PLANETS AND WE MAKE THE PLATTERS NEAT +4198-12259-0034-237: APPETITE COMES WITH EATING SAYS (ANGESTON->ANGERSON) BUT (THE THIRST->THAT THOSE) GOES AWAY WITH DRINKING +4198-12259-0035-238: I HAVE A REMEDY AGAINST THIRST QUITE CONTRARY TO THAT WHICH IS GOOD AGAINST (THE BITING->ABIDING) OF A MAD DOG +4198-12259-0036-239: WHITE (WINE->WHY) HERE WINE BOYS +4198-12259-0037-240: (O LACHRYMA CHRISTI->OH LACK REMAR CHRISTIE) IT IS OF THE BEST GRAPE +4198-12259-0038-241: (I'FAITH->I FAITH) PURE GREEK GREEK O THE FINE WHITE WINE +4198-12259-0039-242: THERE IS NO ENCHANTMENT NOR CHARM THERE EVERY ONE OF YOU HATH SEEN IT +4198-12259-0040-243: MY (PRENTICESHIP->PRENTICE IT) IS OUT (I AM->I'M) A FREE MAN AT THIS TRADE +4198-12259-0041-244: (I SHOULD SAY->AS YOU SEE) MASTER (PAST->PASS) +4198-12259-0042-245: (O->OH) THE DRINKERS THOSE THAT ARE A DRY (O->OH) POOR THIRSTY SOULS +4198-12259-0043-246: CLEAR OFF NEAT SUPERNACULUM +4198-12281-0000-187: ALTHOUGH THE PLAGUE WAS THERE IN THE MOST PART OF ALL THE HOUSES THEY NEVERTHELESS ENTERED EVERYWHERE THEN PLUNDERED AND CARRIED AWAY ALL THAT WAS WITHIN AND YET FOR ALL THIS NOT ONE OF THEM TOOK ANY HURT WHICH IS A MOST WONDERFUL CASE +4198-12281-0001-188: I BESEECH YOU THINK UPON IT +4198-12281-0002-189: NEVERTHELESS AT ALL (ADVENTURES->VENTURES) THEY RANG THE BELLS (AD CAPITULUM CAPITULANTES->AT CAPITULAM CAPITULAT DAYS) +4198-12281-0003-190: BY THE VIRTUE OF GOD WHY DO NOT YOU SING (PANNIERS->TEN YEARS) FAREWELL VINTAGE IS DONE +4198-12281-0004-191: BY THE BELLY OF (SANCT->SAINT) JAMES WHAT SHALL WE POOR DEVILS DRINK THE WHILE +4198-12281-0005-192: LORD GOD (DA MIHI POTUM->THOU ME HE POT EM) +4198-12281-0006-193: LET HIM BE CARRIED TO PRISON FOR TROUBLING THE DIVINE SERVICE +4198-12281-0007-194: WHEREFORE IS IT THAT OUR DEVOTIONS WERE INSTITUTED TO BE SHORT IN THE TIME OF HARVEST AND VINTAGE AND LONG IN THE ADVENT (AND->IN) ALL THE WINTER +4198-12281-0008-195: HARK YOU MY MASTERS YOU THAT LOVE THE WINE (COP'S->COPSE) BODY FOLLOW ME FOR (SANCT ANTHONY->SAINT AUNT ANY) BURN ME AS FREELY AS A FAGGOT (IF->*) THEY GET LEAVE TO TASTE ONE DROP OF THE LIQUOR THAT (WILL->WOULD) NOT NOW COME AND FIGHT FOR RELIEF OF THE VINE +4198-12281-0009-196: TO OTHERS AGAIN HE UNJOINTED THE (SPONDYLES->SPAWN MULES) OR KNUCKLES OF THE NECK (DISFIGURED->THIS FIGURED) THEIR CHAPS GASHED THEIR FACES MADE THEIR CHEEKS HANG FLAPPING ON THEIR CHIN AND SO SWINGED AND (BALAMMED->BLAMMED) THEM THAT THEY FELL DOWN BEFORE HIM LIKE HAY BEFORE (A MOWER->HIM OVER) +4198-12281-0010-197: TO SOME (WITH A SMART SOUSE->WOULD THEY SMARE SOUS) ON (THE EPIGASTER->THEIR EBERGASTER) HE (WOULD->WILL) MAKE (THEIR MIDRIFF SWAG->THEM MIDRIFTS WAG) THEN REDOUBLING THE BLOW GAVE THEM SUCH A (HOMEPUSH->HOME PUSH) ON THE NAVEL THAT HE MADE THEIR PUDDINGS TO GUSH OUT +4198-12281-0011-198: BELIEVE THAT IT WAS THE MOST HORRIBLE SPECTACLE THAT EVER (ONE->WON) SAW +4198-12281-0012-199: (O->ALL) THE HOLY LADY (NYTOUCH->KNIGHT) SAID ONE THE GOOD (SANCTESS->SANCTIS) O OUR LADY OF (SUCCOURS->SECURUS) SAID ANOTHER HELP HELP +4198-12281-0013-200: SOME DIED WITHOUT SPEAKING OTHERS SPOKE WITHOUT DYING SOME DIED IN SPEAKING OTHERS SPOKE (IN->AND) DYING +4198-12281-0014-201: CAN YOU TELL WITH WHAT INSTRUMENTS THEY DID IT +4198-12281-0015-202: IN THE (MEANTIME FRIAR->MEAN TIME FRIED) JOHN WITH HIS FORMIDABLE (BATON->BUT TIME) OF THE CROSS GOT TO THE BREACH WHICH THE ENEMIES HAD MADE AND THERE STOOD TO SNATCH UP THOSE THAT (ENDEAVOURED->ENDEAVORED) TO ESCAPE +4198-61336-0000-247: IT IS SIGNIFICANT TO NOTE IN THIS CONNECTION THAT THE NEW KING WAS AN UNSWERVING ADHERENT OF THE CULT OF (ASHUR->ASHER) BY THE (ADHERENTS->ADHERENCE) OF WHICH HE WAS PROBABLY STRONGLY SUPPORTED +4198-61336-0001-248: AT THE BEGINNING OF HIS REIGN THERE WAS MUCH SOCIAL DISCONTENT AND SUFFERING +4198-61336-0002-249: WELL MIGHT (SHARDURIS->SHOW DUERS) EXCLAIM IN THE WORDS OF THE PROPHET WHERE IS THE KING OF (ARPAD->ARPE) +4198-61336-0003-250: (TIGLATH PILESER->DICK LAUGHED PLEASURE) HOWEVER CROSSED THE (EUPHRATES->EUPHATEES) AND MOVING NORTHWARD DELIVERED AN UNEXPECTED ATTACK ON THE (URARTIAN->GRACIAN) ARMY (IN QUMMUKH->AND CUMICU) +4198-61336-0004-251: A FIERCE BATTLE ENSUED AND ONE OF (ITS->HIS) DRAMATIC INCIDENTS WAS A SINGLE COMBAT BETWEEN THE RIVAL KINGS +4198-61336-0005-252: AN ATTEMPT WAS MADE TO CAPTURE KING (SHARDURIS->SHADORUS) WHO (LEAPT->LEAPED) FROM HIS CHARIOT AND MADE HASTY ESCAPE ON HORSEBACK HOTLY PURSUED IN THE GATHERING DARKNESS BY AN ASSYRIAN CONTINGENT OF CAVALRY +4198-61336-0006-253: DESPITE THE (BLOW->BLUE) DEALT AGAINST (URARTU->URITU) ASSYRIA DID NOT IMMEDIATELY REGAIN POSSESSION OF NORTH SYRIA +4198-61336-0007-254: THE SHIFTY (MATI ILU->MANTI ILIU) EITHER CHERISHED THE HOPE THAT (SHARDURIS->SHALL DORIS) WOULD RECOVER STRENGTH AND AGAIN (INVADE->IN VAIN) NORTH (SYRIA->ASSYRIA) OR THAT HE MIGHT HIMSELF ESTABLISH AN EMPIRE IN THAT REGION +4198-61336-0008-255: (TIGLATH PILESER->T GLASS BE LEISURE) HAD THEREFORE TO MARCH WESTWARD AGAIN +4198-61336-0009-256: FOR THREE YEARS HE CONDUCTED VIGOROUS CAMPAIGNS IN THE WESTERN LAND WHERE HE MET WITH VIGOROUS RESISTANCE +4198-61336-0010-257: (ARPAD->OUR PAD) WAS CAPTURED AND (MATI ILU->MET TO ILL YOU) DEPOSED AND PROBABLY PUT TO DEATH +4198-61336-0011-258: ONCE AGAIN THE HEBREWS CAME INTO CONTACT WITH (ASSYRIA->THE SYRIA) +4198-61336-0012-259: (ITS FALL MAY->IT'S FOR ME) NOT (HAVE->HAV) BEEN UNCONNECTED WITH THE TREND OF EVENTS IN ASSYRIA DURING THE CLOSING YEARS OF THE MIDDLE EMPIRE +4198-61336-0013-260: (JEHOASH->JO ASH) THE GRANDSON OF (JEHU->JEHOV) HAD ACHIEVED SUCCESSES IN CONFLICT WITH DAMASCUS +4198-61336-0014-261: SIX MONTHS (AFTERWARDS->AFTERWARD) HE WAS ASSASSINATED BY (SHALLUM->CELEM) +4198-61336-0015-262: THIS USURPER HELD SWAY AT SAMARIA FOR ONLY A MONTH +4198-61336-0016-263: NO RESISTANCE WAS POSSIBLE ON THE PART OF (MENAHEM->MANY HIM) THE USURPER (WHO WAS->WHOSE) PROBABLY READY TO WELCOME THE ASSYRIAN CONQUEROR SO THAT BY ARRANGING AN ALLIANCE HE MIGHT SECURE HIS OWN POSITION +4198-61336-0017-264: (TIGLATH PILESER->TAKE LAST PLEASE HER) NEXT OPERATED AGAINST THE (MEDIAN->MEDIUM) AND OTHER HILL TRIBES IN THE (NORTH EAST->NORTHEAST) +4198-61336-0018-265: HE OVERTHREW BUILDINGS DESTROYED ORCHARDS AND TRANSPORTED TO NINEVEH THOSE OF THE INHABITANTS HE HAD NOT PUT TO THE SWORD WITH ALL THE LIVE STOCK HE COULD LAY HANDS ON +4198-61336-0019-266: (THUS->THIS) WAS (URARTU->URITU) CRIPPLED AND HUMILIATED IT NEVER REGAINED ITS (FORMER->FORM OF) PRESTIGE AMONG THE NORTHERN STATES +4198-61336-0020-267: IN THE FOLLOWING YEAR (TIGLATH PILESER->TIC LAUGH BELLEGER) RETURNED TO SYRIA +4198-61336-0021-268: (MENAHEM->MANY HIM) KING OF ISRAEL HAD DIED AND WAS SUCCEEDED BY HIS SON (PEKAHIAH->PECAH) +4198-61336-0022-269: (JUDAH->JULIA) HAD TAKEN ADVANTAGE OF THE DISTURBED CONDITIONS IN ISRAEL TO ASSERT ITS INDEPENDENCE +4198-61336-0023-270: HE CONDEMNED ISRAEL FOR ITS IDOLATRIES AND CRIED +4198-61336-0024-271: FOR (THUS->THIS) SAITH THE LORD UNTO THE HOUSE OF ISRAEL SEEK YE ME (AND->A) YE (SHALL->TO) LIVE HAVE YE OFFERED UNTO ME SACRIFICES AND OFFERINGS IN THE WILDERNESS FORTY YEARS (O->OR) HOUSE OF ISRAEL +4198-61336-0025-272: THE REMNANT OF THE PHILISTINES SHALL PERISH +4198-61336-0026-273: ISRAEL WAS ALSO DEALT WITH +4198-61336-0027-274: HE SWEPT THROUGH ISRAEL LIKE A HURRICANE +4198-61336-0028-275: THE (PHILISTINES->FURTHER STEAMS) AND THE ARABIANS OF THE DESERT WERE ALSO SUBDUED +4198-61336-0029-276: HE INVADED BABYLONIA +4198-61336-0030-277: (UKINZER->A KINDRED) TOOK REFUGE IN HIS CAPITAL SHAPIA WHICH HELD OUT SUCCESSFULLY ALTHOUGH THE SURROUNDING COUNTRY WAS RAVAGED AND DESPOILED +4294-14317-0000-1866: AS I THOUGHT THAT THIS WAS DUE TO SOME FAULT IN THE EARTH I WANTED TO MAKE THESE FIRST EXPERIMENTS BEFORE (I->AND) UNDERTOOK MY PERSEUS +4294-14317-0001-1867: WHEN I SAW (THAT->*) THIS BUST CAME OUT SHARP AND CLEAN I (SET->SAID) AT ONCE TO CONSTRUCT A LITTLE FURNACE IN THE WORKSHOP ERECTED FOR ME BY THE DUKE AFTER MY OWN PLANS AND DESIGN IN THE HOUSE WHICH THE DUKE HAD GIVEN ME +4294-14317-0002-1868: IT WAS AN EXTREMELY DIFFICULT TASK AND I WAS ANXIOUS TO OBSERVE ALL THE NICETIES OF ART WHICH I HAD LEARNED SO AS NOT TO LAPSE INTO SOME ERROR +4294-14317-0003-1869: I IN MY TURN FEEL THE SAME DESIRE AND HOPE TO PLAY MY PART LIKE THEM THEREFORE MY LORD GIVE ME THE LEAVE TO GO +4294-14317-0004-1870: BUT BEWARE OF LETTING (BANDINELLO->BEND NELLO) QUIT YOU RATHER BESTOW UPON HIM ALWAYS MORE THAN HE DEMANDS FOR IF HE GOES INTO FOREIGN PARTS HIS IGNORANCE IS SO PRESUMPTUOUS THAT HE IS JUST THE MAN TO DISGRACE OUR MOST ILLUSTRIOUS SCHOOL +4294-14317-0005-1871: I (ASK->ASKED) NO FURTHER REWARD FOR MY (LABOURS->LABORS) UP TO THIS TIME THAN THE GRACIOUS FAVOUR OF YOUR MOST ILLUSTRIOUS EXCELLENCY +4294-14317-0006-1872: THEN I THANKED HIM AND SAID I HAD NO GREATER DESIRE THAN TO SHOW THOSE ENVIOUS FOLK THAT I HAD IT IN ME TO EXECUTE THE PROMISED WORK +4294-14317-0007-1873: I HAD BETTER LOOK TO MY CONDUCT FOR IT HAD COME TO HIS EARS THAT I RELIED UPON HIS FAVOUR TO TAKE IN FIRST ONE MAN AND THEN ANOTHER +4294-14317-0008-1874: I BEGGED HIS MOST (ILLUSTRIOUS->LUSTRIOUS) EXCELLENCY TO NAME A SINGLE PERSON WHOM I HAD EVER TAKEN IN +4294-14317-0009-1875: I SAID MY LORD I THANK YOU AND BEG YOU TO CONDESCEND SO FAR AS TO LISTEN TO FOUR WORDS IT IS TRUE THAT HE LENT ME A PAIR OF OLD SCALES TWO (ANVILS->AMBILS) AND THREE LITTLE HAMMERS WHICH ARTICLES I BEGGED HIS (WORKMAN GIORGIO DA CORTONA->WORKMEN GEORGIO DECORTUNA) FIFTEEN DAYS AGO TO FETCH BACK +4294-14317-0010-1876: (GIORGIO->GEORGIO) CAME FOR THEM HIMSELF +4294-14317-0011-1877: I HOPE TO PROVE ON WHAT ACCOUNT THAT SCOUNDREL TRIES TO BRING ME INTO DISGRACE +4294-14317-0012-1878: WHEN HE HAD HEARD THIS SPEECH THE DUKE ROSE UP IN ANGER AND SENT FOR BERNARDONE WHO WAS FORCED TO TAKE FLIGHT AS FAR AS VENICE HE AND ANTONIO (LANDI->LANDEE) WITH HIM +4294-14317-0013-1879: YOU HAD BETTER PUT THIS TO THE PROOF AND I WILL GO AT ONCE TO THE (BARGELLO->BARGENO) +4294-14317-0014-1880: I AM WILLING TO ENTER INTO COMPETITION WITH THE ANCIENTS AND FEEL ABLE TO SURPASS THEM FOR SINCE THOSE EARLY DAYS IN WHICH I MADE THE MEDALS OF POPE CLEMENT I HAVE LEARNED SO MUCH THAT I CAN NOW PRODUCE FAR BETTER PIECES OF THE KIND I THINK I CAN ALSO OUTDO THE COINS I STRUCK FOR DUKE (ALESSANDRO->ALISANDRO) WHICH (ARE->IS) STILL HELD IN HIGH ESTEEM IN LIKE MANNER I COULD MAKE FOR YOU LARGE PIECES OF GOLD AND SILVER PLATE AS I DID SO OFTEN FOR THAT NOBLE MONARCH KING (FRANCIS->FRANCES) OF FRANCE THANKS TO THE GREAT CONVENIENCES HE ALLOWED ME WITHOUT EVER LOSING TIME FOR THE EXECUTION OF COLOSSAL STATUES OR OTHER WORKS OF THE (SCULPTORS->SCULPTOR'S) CRAFT +4294-14317-0015-1881: AFTER SEVERAL MONTHS WERE WASTED AND PIERO WOULD NEITHER WORK NOR PUT MEN TO WORK UPON THE PIECE I MADE HIM GIVE IT BACK +4294-14317-0016-1882: AMONG ARTISTS CERTAIN ENRAGED SCULPTORS LAUGHED AT ME AND CALLED ME THE NEW SCULPTOR +4294-14317-0017-1883: NOW I HOPE TO SHOW THEM THAT I AM AN OLD SCULPTOR IF GOD SHALL GRANT ME THE BOON OF FINISHING MY PERSEUS FOR THAT NOBLE PIAZZA OF HIS MOST ILLUSTRIOUS EXCELLENCY +4294-14317-0018-1884: HAVING THIS EXCELLENT RESOLVE IN HEART I REACHED MY HOME +4294-32859-0000-1942: WYLDER WAS RATHER SURLY AFTER THE LADIES HAD FLOATED AWAY FROM THE SCENE AND HE DRANK HIS LIQUOR DOGGEDLY +4294-32859-0001-1943: IT WAS HIS FANCY I SUPPOSE TO REVIVE CERTAIN SENTIMENTAL RELATIONS WHICH HAD IT MAY BE ONCE EXISTED BETWEEN HIM AND MISS LAKE AND HE WAS A PERSON OF THAT COMBATIVE TEMPERAMENT THAT MAGNIFIES AN OBJECT IN PROPORTION AS ITS PURSUIT IS THWARTED +4294-32859-0002-1944: THE STORY OF FRIDOLIN AND (RETZCH'S->WRETCHES) PRETTY (OUTLINES->OUTLINE) +4294-32859-0003-1945: SIT DOWN BESIDE ME AND I'LL TELL YOU THE STORY +4294-32859-0004-1946: HE ASSISTED AT IT BUT TOOK NO PART AND IN FACT WAS LISTENING TO THAT OTHER CONVERSATION WHICH SOUNDED WITH ITS PLEASANT GABBLE AND LAUGHTER LIKE A LITTLE MUSICAL TINKLE OF BELLS IN THE DISTANCE +4294-32859-0005-1947: BUT HONEST MARK FORGOT THAT YOUNG LADIES DO NOT ALWAYS COME OUT QUITE ALONE AND JUMP UNASSISTED INTO THEIR VEHICLES +4294-35475-0000-1885: BUT THE MIDDLE (SON->SUN) WAS LITTLE AND LORN HE WAS NEITHER DARK NOR FAIR HE WAS NEITHER HANDSOME NOR STRONG +4294-35475-0001-1886: THROWING HIMSELF ON HIS KNEES BEFORE THE KING HE CRIED (OH->O) ROYAL SIRE BESTOW UPON ME ALSO A SWORD AND A STEED THAT I MAY UP AND AWAY TO FOLLOW MY BRETHREN +4294-35475-0002-1887: BUT THE KING LAUGHED HIM TO SCORN THOU A SWORD HE QUOTH +4294-35475-0003-1888: IN SOOTH THOU SHALT HAVE ONE BUT IT SHALL BE ONE BEFITTING THY MAIDEN SIZE AND COURAGE IF SO SMALL A WEAPON CAN BE FOUND IN ALL MY KINGDOM +4294-35475-0004-1889: FORTHWITH THE GRINNING (JESTER->GESTURE) BEGAN SHRIEKING WITH LAUGHTER SO THAT THE BELLS UPON HIS MOTLEY CAP WERE ALL SET A JANGLING +4294-35475-0005-1890: I DID BUT LAUGH TO THINK THE (SWORD->SORT) OF (ETHELRIED->EPLORRIED) HAD BEEN SO QUICKLY FOUND RESPONDED THE JESTER AND HE POINTED TO THE SCISSORS HANGING FROM THE TAILOR'S GIRDLE +4294-35475-0006-1891: ONE NIGHT AS HE LAY IN A DEEP FOREST (TOO->TWO) UNHAPPY TO SLEEP HE HEARD A NOISE NEAR AT HAND IN THE BUSHES +4294-35475-0007-1892: THOU SHALT HAVE THY LIBERTY HE CRIED EVEN THOUGH THOU SHOULDST (REND->RUN) ME IN PIECES THE MOMENT THOU ART FREE +4294-35475-0008-1893: (IT->HE) HAD (*->HID IT) SUDDENLY DISAPPEARED AND IN ITS PLACE STOOD A BEAUTIFUL FAIRY WITH FILMY WINGS WHICH SHONE LIKE RAINBOWS IN THE MOONLIGHT +4294-35475-0009-1894: AT THIS MOMENT THERE WAS A DISTANT RUMBLING AS OF THUNDER TIS THE OGRE CRIED THE FAIRY WE MUST HASTEN +4294-35475-0010-1895: SCISSORS GROW A GIANT'S HEIGHT AND SAVE US FROM THE OGRE'S MIGHT +4294-35475-0011-1896: HE COULD SEE THE OGRE STANDING POWERLESS TO HURT HIM ON THE OTHER SIDE OF THE CHASM AND GNASHING HIS TEETH EACH ONE OF WHICH WAS AS BIG AS A (MILLSTON->MILLSTONE) +4294-35475-0012-1897: THE SIGHT WAS SO TERRIBLE THAT HE TURNED ON HIS HEEL AND FLED AWAY AS FAST AS HIS FEET COULD CARRY HIM +4294-35475-0013-1898: THOU SHALT NOT BE LEFT A PRISONER IN THIS DISMAL SPOT WHILE I HAVE THE POWER TO HELP THEE +4294-35475-0014-1899: HE LIFTED THE SCISSORS AND WITH ONE STROKE DESTROYED THE WEB AND GAVE THE FLY (ITS FREEDOM->TO READ THEM) +4294-35475-0015-1900: A FAINT GLIMMER OF LIGHT ON THE OPPOSITE WALL SHOWS ME THE KEYHOLE +4294-35475-0016-1901: THE PRINCE SPENT ALL THE FOLLOWING TIME UNTIL MIDNIGHT TRYING TO THINK OF A SUITABLE VERSE TO SAY TO THE SCISSORS +4294-35475-0017-1902: AS HE UTTERED THE WORDS THE SCISSORS LEAPED OUT OF HIS HAND AND BEGAN TO CUT THROUGH THE WOODEN SHUTTERS AS EASILY AS THROUGH A CHEESE +4294-35475-0018-1903: IN (A->THE) VERY SHORT TIME THE PRINCE (HAD->AND) CRAWLED THROUGH THE OPENING +4294-35475-0019-1904: WHILE HE STOOD LOOKING (AROUND->ROUND) HIM IN BEWILDERMENT A FIREFLY (ALIGHTED ON->LIGHTED DOWN) HIS (ARM->HEART) FLASHING ITS LITTLE LANTERN IN THE PRINCE'S FACE IT CRIED THIS WAY MY FRIEND THE FLY SENT ME TO GUIDE YOU TO A PLACE OF SAFETY +4294-35475-0020-1905: WHAT IS TO BECOME OF ME CRIED THE POOR PEASANT +4294-35475-0021-1906: MY GRAIN MUST FALL (AND->IN) ROT IN THE FIELD FROM (OVERRIPENESS->OVER RIPENESS) BECAUSE I HAVE NOT THE STRENGTH TO RISE AND HARVEST IT THEN INDEED MUST WE ALL STARVE +4294-35475-0022-1907: THE (GRANDAME->GRAND DAME) WHOM HE SUPPLIED WITH FAGOTS THE MERCHANT WHOM HE RESCUED FROM ROBBERS THE KING'S (COUNCILLOR->COUNSELLOR) TO WHOM HE GAVE AID ALL BECAME HIS FRIENDS UP AND DOWN THE LAND TO BEGGAR OR LORD HOMELESS WANDERER OR HIGH BORN DAME HE GLADLY GAVE UNSELFISH SERVICE ALL UNSOUGHT AND SUCH AS HE HELPED STRAIGHTWAY BECAME HIS FRIENDS +4294-35475-0023-1908: TO HIM WHO COULD BRING HER BACK TO HER FATHER'S CASTLE SHOULD BE GIVEN THE THRONE AND KINGDOM AS WELL AS THE PRINCESS HERSELF SO FROM FAR AND NEAR INDEED FROM ALMOST EVERY COUNTRY UNDER THE SUN CAME KNIGHTS AND PRINCES TO FIGHT THE OGRE +4294-35475-0024-1909: AMONG THOSE WHO DREW BACK WERE (ETHELRIED'S->EPILRIED'S) BROTHERS THE THREE THAT WERE DARK AND THE THREE THAT WERE FAIR +4294-35475-0025-1910: BUT (ETHELRIED HEEDED->ETHELRED HE DID) NOT THEIR TAUNTS +4294-35475-0026-1911: SO THEY ALL CRIED OUT LONG AND LOUD LONG LIVE THE PRINCE PRINCE (CISEAUX->ISAU) +4294-9934-0000-1912: HE FELT (WHAT->WITH) THE EARTH MAY POSSIBLY FEEL AT THE MOMENT WHEN IT IS TORN OPEN WITH THE IRON IN ORDER THAT GRAIN MAY BE DEPOSITED WITHIN IT IT FEELS ONLY THE WOUND THE QUIVER OF THE GERM (AND->*) THE JOY OF THE FRUIT ONLY (ARRIVE->ARRIVES) LATER +4294-9934-0001-1913: HE HAD BUT JUST ACQUIRED A FAITH MUST HE THEN (REJECT IT->REJECTED) ALREADY +4294-9934-0002-1914: HE AFFIRMED TO HIMSELF THAT HE WOULD NOT HE DECLARED TO HIMSELF THAT HE WOULD NOT DOUBT AND HE BEGAN TO DOUBT IN SPITE OF HIMSELF +4294-9934-0003-1915: TO STAND BETWEEN TWO RELIGIONS FROM ONE OF WHICH YOU HAVE NOT AS YET EMERGED AND ANOTHER INTO WHICH YOU HAVE NOT YET ENTERED IS INTOLERABLE AND TWILIGHT IS PLEASING ONLY TO BAT LIKE SOULS +4294-9934-0004-1916: MARIUS WAS CLEAR EYED AND HE REQUIRED THE TRUE LIGHT +4294-9934-0005-1917: THE HALF LIGHTS OF DOUBT PAINED HIM +4294-9934-0006-1918: WHATEVER MAY HAVE BEEN HIS DESIRE TO REMAIN WHERE HE WAS HE COULD NOT HALT THERE HE WAS IRRESISTIBLY CONSTRAINED TO CONTINUE TO ADVANCE TO EXAMINE TO THINK TO MARCH FURTHER +4294-9934-0007-1919: HE FEARED AFTER HAVING TAKEN SO MANY STEPS WHICH HAD BROUGHT HIM NEARER TO HIS FATHER TO NOW TAKE A STEP WHICH SHOULD ESTRANGE HIM FROM THAT FATHER +4294-9934-0008-1920: HIS DISCOMFORT WAS AUGMENTED BY ALL THE REFLECTIONS WHICH OCCURRED TO HIM +4294-9934-0009-1921: IN THE TROUBLED STATE OF HIS CONSCIENCE HE NO LONGER THOUGHT OF CERTAIN SERIOUS SIDES OF EXISTENCE +4294-9934-0010-1922: THEY SOON ELBOWED HIM ABRUPTLY +4294-9934-0011-1923: REQUEST (COURFEYRAC->COURFEREK) TO COME AND TALK WITH ME SAID MARIUS +4294-9934-0012-1924: WHAT IS TO BECOME OF YOU SAID COURFEYRAC +4294-9934-0013-1925: WHAT ARE YOU GOING TO DO I DO NOT KNOW +4294-9934-0014-1926: SILVER GOLD HERE IT IS +4294-9934-0015-1927: YOU WILL THEN HAVE ONLY A PAIR OF TROUSERS A WAISTCOAT A HAT AND A COAT AND MY BOOTS +4294-9934-0016-1928: THAT WILL BE ENOUGH +4294-9934-0017-1929: NO IT IS NOT GOOD WHAT (WILL YOU->WE) DO AFTER THAT +4294-9934-0018-1930: DO YOU KNOW GERMAN NO +4294-9934-0019-1931: IT IS BADLY PAID WORK BUT ONE CAN LIVE BY IT +4294-9934-0020-1932: THE CLOTHES DEALER WAS SENT FOR +4294-9934-0021-1933: HE PAID TWENTY FRANCS FOR THE CAST OFF GARMENTS THEY WENT TO THE (WATCHMAKER'S->WATCHMAKERS) +4294-9934-0022-1934: HE BOUGHT THE WATCH FOR FORTY FIVE FRANCS +4294-9934-0023-1935: HELLO I HAD FORGOTTEN THAT SAID MARIUS +4294-9934-0024-1936: THE LANDLORD PRESENTED HIS BILL WHICH HAD TO BE PAID ON THE SPOT +4294-9934-0025-1937: I HAVE TEN FRANCS LEFT SAID MARIUS +4294-9934-0026-1938: THAT WILL BE SWALLOWING A TONGUE VERY FAST OR A HUNDRED SOUS VERY SLOWLY +4294-9934-0027-1939: ONE MORNING ON HIS RETURN FROM THE (LAW->LAST) SCHOOL MARIUS FOUND A LETTER FROM HIS AUNT AND THE SIXTY (PISTOLES->PISTOL) THAT IS TO SAY SIX HUNDRED FRANCS IN GOLD (IN->AND) A SEALED BOX +4294-9934-0028-1940: MARIUS SENT BACK (THE->FOR) THIRTY LOUIS TO HIS AUNT WITH (A->THE) RESPECTFUL LETTER IN WHICH HE STATED THAT HE HAD SUFFICIENT MEANS OF SUBSISTENCE AND THAT HE SHOULD BE ABLE THENCEFORTH TO SUPPLY ALL HIS NEEDS +4294-9934-0029-1941: AT THAT MOMENT HE HAD THREE FRANCS LEFT +4350-10919-0000-2716: HE PERCEIVED THAT IT WAS NO GOOD TALKING TO THE OLD MAN AND THAT THE PRINCIPAL PERSON IN THE HOUSE WAS THE MOTHER +4350-10919-0001-2717: BEFORE HER HE DECIDED TO SCATTER HIS PEARLS +4350-10919-0002-2718: THE PRINCESS WAS DISTRACTED AND DID NOT KNOW WHAT TO DO SHE FELT SHE HAD SINNED AGAINST KITTY +4350-10919-0003-2719: WELL DOCTOR DECIDE OUR (FATE->PHAETON) SAID THE PRINCESS TELL ME EVERYTHING +4350-10919-0004-2720: IS (THERE->THEIR) HOPE SHE MEANT TO SAY BUT HER LIPS QUIVERED AND SHE COULD NOT UTTER THE QUESTION WELL DOCTOR +4350-10919-0005-2721: AS YOU PLEASE THE PRINCESS WENT OUT WITH A SIGH +4350-10919-0006-2722: THE FAMILY DOCTOR RESPECTFULLY CEASED IN THE MIDDLE OF HIS OBSERVATIONS +4350-10919-0007-2723: AND THERE ARE INDICATIONS (MALNUTRITION->MALTRICIAN) NERVOUS EXCITABILITY AND SO ON +4350-10919-0008-2724: THE QUESTION (STANDS->SENDS) THUS IN PRESENCE OF INDICATIONS OF (TUBERCULOUS->TUBERK AT THIS) PROCESS WHAT IS TO BE DONE TO MAINTAIN NUTRITION +4350-10919-0009-2725: YES (THAT'S AN->I CAN) UNDERSTOOD THING RESPONDED THE CELEBRATED PHYSICIAN AGAIN GLANCING AT HIS WATCH +4350-10919-0010-2726: BEG PARDON IS THE (YAUSKY BRIDGE->HOUSEKEEPER'S) DONE YET OR SHALL I HAVE TO DRIVE (AROUND->HER ON) +4350-10919-0011-2727: HE ASKED AH IT IS +4350-10919-0012-2728: OH WELL THEN I CAN DO IT IN TWENTY MINUTES +4350-10919-0013-2729: AND (HOW->*) ABOUT (A TOUR->IT TO) ABROAD ASKED THE FAMILY DOCTOR +4350-10919-0014-2730: WHAT IS WANTED IS (*->THE) MEANS OF IMPROVING NUTRITION AND NOT FOR LOWERING IT +4350-10919-0015-2731: THE FAMILY DOCTOR LISTENED ATTENTIVELY AND RESPECTFULLY +4350-10919-0016-2732: BUT IN (FAVOR->FAVOUR) OF FOREIGN TRAVEL I WOULD URGE THE CHANGE OF HABITS THE REMOVAL FROM CONDITIONS CALLING UP REMINISCENCES +4350-10919-0017-2733: AND THEN THE MOTHER WISHES IT HE ADDED +4350-10919-0018-2734: AH WELL (IN->*) THAT (CASE->HAS) TO BE SURE LET THEM GO ONLY THOSE GERMAN (QUACKS->CLACKS) ARE MISCHIEVOUS +4350-10919-0019-2735: OH TIME'S UP ALREADY AND HE WENT TO THE DOOR +4350-10919-0020-2736: THE CELEBRATED DOCTOR ANNOUNCED TO THE PRINCESS A FEELING OF WHAT WAS DUE FROM HIM DICTATED HIS DOING SO THAT HE OUGHT TO SEE THE PATIENT ONCE MORE +4350-10919-0021-2737: (OH->O) NO ONLY A FEW DETAILS PRINCESS COME THIS WAY +4350-10919-0022-2738: AND THE MOTHER ACCOMPANIED BY THE DOCTOR WENT INTO THE DRAWING ROOM TO KITTY +4350-10919-0023-2739: WHEN THE DOCTOR CAME IN SHE FLUSHED CRIMSON AND HER EYES FILLED WITH TEARS +4350-10919-0024-2740: SHE ANSWERED HIM AND ALL AT ONCE GOT UP FURIOUS +4350-10919-0025-2741: EXCUSE ME DOCTOR BUT THERE IS REALLY NO OBJECT IN THIS +4350-10919-0026-2742: THIS IS THE THIRD TIME (YOU'VE->YOU HAVE) ASKED ME THE SAME THING +4350-10919-0027-2743: THE (CELEBRATED->CLEARED) DOCTOR DID NOT TAKE (OFFENSE->OFFENCE) +4350-10919-0028-2744: NERVOUS IRRITABILITY HE SAID TO THE PRINCESS WHEN (KITTY->KATY) HAD LEFT THE ROOM HOWEVER I HAD FINISHED +4350-10919-0029-2745: AND THE DOCTOR BEGAN SCIENTIFICALLY EXPLAINING TO THE PRINCESS AS AN EXCEPTIONALLY INTELLIGENT WOMAN THE CONDITION OF THE YOUNG PRINCESS AND CONCLUDED BY INSISTING ON THE DRINKING OF THE WATERS WHICH (WERE->WAS) CERTAINLY HARMLESS +4350-10919-0030-2746: (AT->BUT) THE QUESTION SHOULD THEY GO ABROAD THE DOCTOR PLUNGED INTO DEEP MEDITATION AS THOUGH RESOLVING A WEIGHTY PROBLEM +4350-10919-0031-2747: FINALLY HIS DECISION WAS PRONOUNCED THEY WERE TO GO ABROAD BUT TO PUT NO FAITH IN FOREIGN QUACKS AND TO APPLY TO HIM IN ANY NEED +4350-10919-0032-2748: IT SEEMED AS THOUGH SOME PIECE OF GOOD FORTUNE HAD COME TO PASS AFTER THE DOCTOR HAD GONE +4350-10919-0033-2749: THE MOTHER WAS MUCH MORE CHEERFUL WHEN SHE WENT BACK TO HER DAUGHTER AND KITTY PRETENDED TO BE MORE CHEERFUL +4350-9170-0000-2750: EDUCATED PEOPLE OF THE UPPER CLASSES ARE TRYING TO STIFLE THE (EVER GROWING->EVERGREWING) SENSE OF THE NECESSITY OF TRANSFORMING THE EXISTING SOCIAL ORDER +4350-9170-0001-2751: (THIS IS->MISSUS) ABSOLUTELY INCORRECT +4350-9170-0002-2752: IN THE SOCIAL CONCEPTION OF LIFE IT IS SUPPOSED THAT SINCE THE AIM OF LIFE IS FOUND IN GROUPS OF INDIVIDUALS INDIVIDUALS WILL VOLUNTARILY SACRIFICE THEIR OWN INTERESTS FOR THE (INTERESTS->INTEREST) OF THE GROUP +4350-9170-0003-2753: THE CHAMPIONS OF THE SOCIAL CONCEPTION OF LIFE USUALLY TRY TO CONNECT THE IDEA OF AUTHORITY THAT IS OF VIOLENCE WITH THE IDEA OF MORAL INFLUENCE BUT THIS CONNECTION IS QUITE IMPOSSIBLE +4350-9170-0004-2754: THE MAN WHO (IS->WAS) CONTROLLED BY MORAL INFLUENCE ACTS IN ACCORDANCE WITH HIS OWN DESIRES +4350-9170-0005-2755: THE BASIS OF AUTHORITY IS BODILY VIOLENCE +4350-9170-0006-2756: THE POSSIBILITY OF APPLYING BODILY VIOLENCE TO PEOPLE IS PROVIDED ABOVE ALL BY AN ORGANIZATION OF ARMED MEN TRAINED TO ACT IN UNISON (IN->AND) SUBMISSION TO ONE WILL +4350-9170-0007-2757: THESE BANDS OF ARMED MEN SUBMISSIVE TO A SINGLE WILL ARE WHAT CONSTITUTE THE ARMY +4350-9170-0008-2758: THE ARMY HAS ALWAYS BEEN AND STILL IS THE BASIS OF POWER +4350-9170-0009-2759: POWER IS ALWAYS IN THE HANDS OF THOSE WHO CONTROL THE ARMY AND ALL MEN IN POWER FROM THE ROMAN CAESARS TO THE RUSSIAN AND GERMAN EMPERORS TAKE MORE INTEREST IN THEIR ARMY THAN IN ANYTHING AND COURT POPULARITY IN THE ARMY KNOWING THAT IF THAT IS ON THEIR SIDE THEIR POWER IS SECURE +4350-9170-0010-2760: INDEED IT COULD NOT BE OTHERWISE +4350-9170-0011-2761: ONLY UNDER THOSE CONDITIONS COULD THE SOCIAL ORGANIZATION BE JUSTIFIED +4350-9170-0012-2762: BUT SINCE THIS IS NOT THE CASE AND ON THE CONTRARY MEN (IN->AND) POWER ARE ALWAYS FAR FROM BEING SAINTS THROUGH THE VERY FACT OF THEIR POSSESSION OF POWER THE SOCIAL ORGANIZATION BASED ON POWER HAS NO JUSTIFICATION +4350-9170-0013-2763: EVEN IF THERE WAS ONCE A TIME WHEN OWING TO THE LOW (STANDARD->STANDARDS) OF MORALS (AND->WHEN) THE DISPOSITION OF MEN TO VIOLENCE THE EXISTENCE OF AN AUTHORITY TO RESTRAIN SUCH VIOLENCE WAS AN ADVANTAGE BECAUSE THE VIOLENCE OF (*->THE) GOVERNMENT WAS LESS THAN THE VIOLENCE OF INDIVIDUALS ONE CANNOT BUT SEE THAT THIS ADVANTAGE COULD NOT BE LASTING +4350-9170-0014-2764: BETWEEN THE MEMBERS OF ONE STATE (*->A) SUBJECT TO A SINGLE AUTHORITY THE (STRIFE->STRIPE) BETWEEN (*->THE) INDIVIDUALS (SEEMS->SEEMED) STILL LESS AND (THE->A) LIFE OF THE STATE SEEMS EVEN MORE SECURE +4350-9170-0015-2765: IT WAS PRODUCED ON ONE HAND BY THE NATURAL GROWTH OF POPULATION AND ON THE OTHER BY (STRUGGLE AND->STRUGGLING) CONQUEST +4350-9170-0016-2766: AFTER CONQUEST THE POWER OF THE EMPEROR PUTS AN END TO INTERNAL DISSENSIONS AND SO THE STATE CONCEPTION OF LIFE JUSTIFIES ITSELF +4350-9170-0017-2767: BUT THIS JUSTIFICATION IS NEVER MORE THAN TEMPORARY +4350-9170-0018-2768: INTERNAL DISSENSIONS DISAPPEAR ONLY IN PROPORTION TO THE DEGREE OF OPPRESSION EXERTED BY THE AUTHORITY OVER THE (DISSENTIENT->DISINDIAN) INDIVIDUALS +4350-9170-0019-2769: (GOVERNMENT->GOVERN) AUTHORITY EVEN IF IT DOES SUPPRESS PRIVATE VIOLENCE ALWAYS INTRODUCES INTO THE LIFE OF MEN FRESH FORMS OF VIOLENCE WHICH TEND TO BECOME GREATER AND GREATER IN PROPORTION TO THE DURATION AND STRENGTH OF THE GOVERNMENT +4350-9170-0020-2770: AND THEREFORE THE OPPRESSION OF THE OPPRESSED ALWAYS GOES ON GROWING UP TO THE FURTHEST LIMIT BEYOND WHICH IT CANNOT GO WITHOUT KILLING THE GOOSE WITH THE (GOLDEN EGGS->GOLD KNIFE) +4350-9170-0021-2771: THE MOST CONVINCING EXAMPLE OF THIS IS TO BE FOUND IN THE CONDITION OF THE WORKING CLASSES OF OUR EPOCH WHO ARE IN REALITY NO BETTER THAN THE SLAVES OF ANCIENT (TIMES->TIME) SUBDUED BY CONQUEST +4350-9170-0022-2772: SO IT (HAS->IS) ALWAYS (BEEN->THEN) +4350-9170-0023-2773: FOOTNOTE THE FACT THAT IN AMERICA THE ABUSES OF AUTHORITY EXIST IN SPITE OF THE SMALL NUMBER OF THEIR TROOPS NOT ONLY FAILS TO DISPROVE THIS POSITION BUT POSITIVELY CONFIRMS IT +4350-9170-0024-2774: THE UPPER CLASSES KNOW THAT AN ARMY OF FIFTY THOUSAND WILL SOON BE INSUFFICIENT AND NO LONGER RELYING ON (PINKERTON'S->PINKERTIN'S) MEN THEY FEEL THAT THE SECURITY OF THEIR POSITION DEPENDS ON THE INCREASED STRENGTH OF THE ARMY +4350-9170-0025-2775: THE REASON TO WHICH HE GAVE EXPRESSION IS ESSENTIALLY THE SAME AS THAT WHICH MADE THE FRENCH KINGS AND THE POPES ENGAGE SWISS AND SCOTCH GUARDS AND MAKES THE RUSSIAN AUTHORITIES OF TO DAY SO CAREFULLY DISTRIBUTE THE RECRUITS SO THAT THE REGIMENTS FROM THE (FRONTIERS->FRONTIER THEY) ARE STATIONED IN CENTRAL DISTRICTS AND THE REGIMENTS FROM THE (CENTER->CENTRE) ARE STATIONED ON THE FRONTIERS +4350-9170-0026-2776: THE MEANING OF (CAPRIVI'S->CAPRIVY) SPEECH PUT INTO (PLAIN->PLAN) LANGUAGE IS THAT (FUNDS->FONDS) ARE NEEDED NOT TO RESIST FOREIGN FOES BUT TO BUY UNDER OFFICERS TO BE READY TO ACT AGAINST THE ENSLAVED TOILING MASSES +4350-9170-0027-2777: AND THIS ABNORMAL ORDER OF (THINGS->THANKS) IS MAINTAINED BY THE ARMY +4350-9170-0028-2778: BUT THERE IS NOT ONLY ONE GOVERNMENT THERE ARE OTHER GOVERNMENTS (EXPLOITING->EXPLODING) THEIR SUBJECTS BY (VIOLENCE IN->VIOLENT AND) THE SAME WAY AND (*->ARE) ALWAYS READY TO POUNCE DOWN ON ANY OTHER GOVERNMENT AND CARRY OFF THE FRUITS OF THE TOIL OF ITS (ENSLAVED->ENSLAVE) SUBJECTS +4350-9170-0029-2779: AND SO EVERY GOVERNMENT NEEDS AN ARMY ALSO TO PROTECT ITS BOOTY FROM ITS (NEIGHBOR->NEIGHBOUR) BRIGANDS +4350-9170-0030-2780: THIS INCREASE IS CONTAGIOUS AS MONTESQUIEU POINTED OUT (ONE->A) HUNDRED FIFTY YEARS AGO +4350-9170-0031-2781: EVERY INCREASE IN THE ARMY OF ONE STATE WITH THE AIM OF SELF DEFENSE AGAINST ITS SUBJECTS BECOMES A (SOURCE->SORT) OF DANGER FOR NEIGHBORING STATES AND CALLS FOR A SIMILAR INCREASE IN THEIR ARMIES +4350-9170-0032-2782: THE DESPOTISM OF (A->THE) GOVERNMENT ALWAYS INCREASES WITH THE STRENGTH OF THE ARMY AND ITS EXTERNAL SUCCESSES AND THE AGGRESSIVENESS OF A GOVERNMENT INCREASES WITH ITS INTERNAL DESPOTISM +4350-9170-0033-2783: THE RIVALRY OF THE EUROPEAN STATES (IN->AND) CONSTANTLY INCREASING THEIR FORCES HAS REDUCED THEM TO THE NECESSITY OF HAVING RECOURSE TO UNIVERSAL MILITARY SERVICE SINCE BY THAT MEANS THE GREATEST POSSIBLE NUMBER OF SOLDIERS IS OBTAINED AT THE LEAST POSSIBLE EXPENSE +4350-9170-0034-2784: AND BY THIS MEANS ALL CITIZENS ARE UNDER ARMS TO SUPPORT THE INIQUITIES PRACTICED UPON THEM (ALL->ALSO) CITIZENS HAVE BECOME THEIR OWN (OPPRESSORS->IMPRESSORS) +4350-9170-0035-2785: THIS INCONSISTENCY HAS BECOME OBVIOUS (IN->AND) UNIVERSAL MILITARY SERVICE +4350-9170-0036-2786: IN FACT THE WHOLE SIGNIFICANCE OF THE SOCIAL CONCEPTION OF LIFE CONSISTS IN MAN'S RECOGNITION OF THE BARBARITY OF STRIFE BETWEEN INDIVIDUALS AND THE TRANSITORINESS OF PERSONAL LIFE ITSELF AND THE TRANSFERENCE OF THE AIM OF LIFE (TO->THE) GROUPS OF PERSONS +4350-9170-0037-2787: BUT WITH UNIVERSAL MILITARY SERVICE IT COMES TO PASS THAT MEN AFTER MAKING EVERY SACRIFICE TO GET RID OF THE CRUELTY OF STRIFE AND THE INSECURITY OF EXISTENCE ARE CALLED UPON TO FACE ALL THE PERILS THEY HAD MEANT TO AVOID +4350-9170-0038-2788: BUT INSTEAD OF DOING THAT THEY (EXPOSE THE->EXPOSED TO) INDIVIDUALS TO THE SAME NECESSITY OF STRIFE SUBSTITUTING (STRIFE->STRIKE) WITH INDIVIDUALS OF OTHER STATES FOR STRIFE WITH NEIGHBORS +4350-9170-0039-2789: THE TAXES RAISED FROM THE PEOPLE FOR WAR PREPARATIONS ABSORB THE GREATER PART OF THE PRODUCE OF LABOR WHICH THE ARMY OUGHT TO DEFEND +4350-9170-0040-2790: THE DANGER OF WAR EVER READY TO BREAK OUT RENDERS ALL REFORMS OF LIFE SOCIAL LIFE VAIN AND FRUITLESS +4350-9170-0041-2791: BUT THE FATAL SIGNIFICANCE OF UNIVERSAL MILITARY SERVICE AS THE MANIFESTATION OF THE CONTRADICTION INHERENT IN THE SOCIAL CONCEPTION OF LIFE IS NOT ONLY APPARENT IN THAT +4350-9170-0042-2792: (GOVERNMENTS->GOVERNMENT) ASSERT THAT ARMIES ARE NEEDED ABOVE ALL FOR EXTERNAL (DEFENSE->DEFENCE) BUT THAT IS NOT TRUE +4350-9170-0043-2793: (THEY ARE->THERE) NEEDED PRINCIPALLY AGAINST THEIR SUBJECTS AND EVERY MAN UNDER UNIVERSAL MILITARY SERVICE BECOMES AN ACCOMPLICE (IN->AND) ALL (THE->THAT) ACTS OF VIOLENCE OF THE GOVERNMENT AGAINST THE CITIZENS WITHOUT ANY CHOICE OF HIS OWN +4350-9170-0044-2794: AND FOR THE SAKE OF WHAT AM I MAKING THEM +4350-9170-0045-2795: I (AM->*) EXPECTED FOR THE SAKE OF (THE->A) STATE TO MAKE THESE SACRIFICES TO RENOUNCE EVERYTHING THAT CAN BE PRECIOUS TO MAN PEACE FAMILY SECURITY AND HUMAN DIGNITY +4350-9170-0046-2796: EXCEPT FOR THE STATE THEY SAY WE SHOULD BE EXPOSED TO THE ATTACKS OF EVIL DISPOSED PERSONS IN OUR OWN COUNTRY +4350-9170-0047-2797: WE (KNOW->*) NOW (*->KNOW) THAT THREATS AND PUNISHMENTS CANNOT DIMINISH THEIR NUMBER THAT THAT CAN ONLY BE DONE BY CHANGE OF ENVIRONMENT AND MORAL INFLUENCE +4350-9170-0048-2798: SO THAT (THE->THIS) JUSTIFICATION OF STATE VIOLENCE ON THE GROUND OF THE PROTECTION IT GIVES US FROM EVIL (DISPOSED->DISPOS) PERSONS EVEN IF (IT->I) HAD SOME FOUNDATION THREE OR FOUR CENTURIES AGO HAS NONE WHATEVER NOW +4350-9170-0049-2799: EXCEPT FOR THE STATE THEY TELL US WE SHOULD NOT HAVE ANY RELIGION EDUCATION CULTURE MEANS OF COMMUNICATION AND SO ON +4350-9170-0050-2800: WITHOUT THE STATE MEN WOULD NOT HAVE BEEN ABLE TO FORM THE SOCIAL INSTITUTIONS NEEDED FOR DOING (ANY THING->ANYTHING) +4350-9170-0051-2801: THIS ARGUMENT TOO WAS WELL FOUNDED ONLY SOME CENTURIES AGO +4350-9170-0052-2802: THE GREAT EXTENSION OF MEANS OF COMMUNICATION AND INTERCHANGE OF IDEAS HAS MADE MEN COMPLETELY ABLE TO DISPENSE WITH STATE AID IN FORMING SOCIETIES ASSOCIATIONS CORPORATIONS AND CONGRESSES FOR SCIENTIFIC ECONOMIC AND POLITICAL OBJECTS +4350-9170-0053-2803: WITHOUT GOVERNMENTS NATIONS WOULD BE ENSLAVED BY THEIR NEIGHBORS +4350-9170-0054-2804: THE GOVERNMENT THEY TELL US WITH ITS ARMY IS NECESSARY TO DEFEND US FROM NEIGHBORING STATES WHO MIGHT ENSLAVE US +4350-9170-0055-2805: AND IF (DEFENSE->DEFENCE) AGAINST BARBAROUS NATIONS IS MEANT ONE THOUSANDTH PART OF THE TROOPS NOW UNDER ARMS WOULD BE AMPLY SUFFICIENT FOR THAT PURPOSE +4350-9170-0056-2806: THE POWER OF THE STATE FAR FROM BEING A SECURITY AGAINST THE ATTACKS OF OUR NEIGHBORS EXPOSES US ON THE CONTRARY TO MUCH GREATER DANGER OF SUCH ATTACKS +4350-9170-0057-2807: EVEN LOOKING AT IT PRACTICALLY WEIGHING THAT IS TO SAY ALL THE BURDENS LAID ON HIM BY THE (STATE->STATES) NO MAN CAN FAIL TO SEE THAT FOR HIM PERSONALLY TO COMPLY WITH (*->THE) STATE DEMANDS AND SERVE IN THE ARMY WOULD IN THE MAJORITY OF CASES BE MORE DISADVANTAGEOUS THAN TO REFUSE TO DO SO +4350-9170-0058-2808: TO RESIST WOULD NEED INDEPENDENT THOUGHT AND EFFORT OF WHICH EVERY MAN IS NOT CAPABLE +4350-9170-0059-2809: SO MUCH FOR THE ADVANTAGES (AND->OF) DISADVANTAGES OF BOTH LINES OF CONDUCT FOR A MAN OF THE WEALTHY (CLASSES AN->CLASS AND) OPPRESSOR +4350-9170-0060-2810: FOR A MAN OF THE POOR WORKING CLASS THE ADVANTAGES AND DISADVANTAGES WILL BE THE SAME BUT WITH A GREAT INCREASE OF DISADVANTAGES +4852-28311-0000-2098: SAY YOU KNOW (SUMTHIN->SOMETHING) +4852-28311-0001-2099: CHRIS LOOKED FROM (A NICKEL PLATED FLASHLIGHT->MENDICULATED FLASH LIKE) TO A CAR JACK AND SPARK PLUG +4852-28311-0002-2100: (KNOW WHO->NO ONE) NEEDS A JOB (BAD->BAN) THAT'S (JAKEY->JAKIE) HARRIS +4852-28311-0003-2101: O K HE SAID +4852-28311-0004-2102: ONLY WHY DIDN'T YOU ASK HIM YOURSELF +4852-28311-0005-2103: MIKE BECAME UNEASY AND FISHED (AN ELASTIC->IT MOLASTIC) BAND OUT OF HIS POCKET MADE A FLICK OF PAPER AND SENT IT SOARING OUT (INTO M->AN ENEM) STREET +4852-28311-0006-2104: WELL HE ADMITTED I DID +4852-28311-0007-2105: CHRIS ASKED (AND->HIM) FOR THE FIRST TIME THAT DAY THE HEAVY WEIGHT HE CARRIED WITHIN HIM LIFTED AND LIGHTENED A LITTLE +4852-28311-0008-2106: (THINK HE->THINKING) REALLY NEEDS IT HE PURSUED +4852-28311-0009-2107: HE WOULD HAVE LIKED TO GET THE JOB FOR (JAKEY->JAKIE) WHO NEEDED IT BUT SOMEHOW THE TASK OF FACING MISTER WICKER ESPECIALLY NOW THAT THE LIGHT WAS GOING AND DUSK (EDGING->EDGED) INTO THE STREETS WAS NOT WHAT (CHRIS HAD->CHRISTEN) INTENDED FOR ENDING THE AFTERNOON +4852-28311-0010-2108: MIKE'S EXPRESSION CHANGED AT (ONCE->ONE WANTS) TO ONE OF TRIUMPH BUT (CHRIS->BRUCE) WAS ONLY (PARTLY->PARTIALLY) ENCOURAGED +4852-28311-0011-2109: (BETCHA AREN'T->BETTER AND) GOIN AFTER ALL (CHRIS->THIS) TURNED (ON->TO) HIM +4852-28311-0012-2110: MIKE WAS STANDING ON THE CORNER +4852-28311-0013-2111: (AW SHUCKS->AH SHOCKS) +4852-28311-0014-2112: CHRIS STARTED OFF ONCE MORE PASSING (THE->A) BLEAK LITTLE VICTORIAN CHURCH PERCHED ON THE HILL ABOVE MISTER WICKER'S HOUSE +4852-28311-0015-2113: AN EMPTY LOT CUT (*->IN) INTO BY CHURCH LANE GAVE A LOOK OF ISOLATION TO THE (L->ALE) SHAPED BRICK BUILDING THAT SERVED MISTER (WICKER AS->WICKER'S) BOTH HOUSE AND PLACE OF BUSINESS +4852-28311-0016-2114: (THE->NO) LONGER (WING->WINGED) TOWARD THE BACK (HAD->GOT) A BACK DOOR THAT OPENED (ONTO->ON A) WATER STREET THE SPACE BETWEEN THE HOUSE AND WISCONSIN (AVENUE->AVIGUE) HAD BEEN MADE INTO A NEAT OBLONG FLOWER GARDEN FENCED OFF FROM THE SIDEWALK BY BOX (SHRUBS->SHRUGS) AND (A->THE) WHITE PICKET FENCE +4852-28311-0017-2115: A LIVID YELLOW STAINED THE HORIZON BEYOND THE FACTORIES (AND GRAY->IN GLAY) CLOUDS LOWERED AND TUMBLED ABOVE +4852-28311-0018-2116: THE AIR WAS GROWING CHILL AND (CHRIS->CHRIST) DECIDED TO FINISH (HIS->THE) JOB +4852-28311-0019-2117: ALL AT ONCE (HE->YOU) WONDERED HOW HIS MOTHER WAS AND EVERYTHING IN HIM (PINCHED->IMPINGED) AND TIGHTENED ITSELF +4852-28311-0020-2118: AT THE FOOT OF THE HILL HE REACHED THE HOUSE +4852-28311-0021-2119: THERE WERE THREE THINGS THAT ALWAYS CAUGHT HIS EYE AMID THE LITTER OF DUSTY PIECES +4852-28311-0022-2120: ON THE LEFT THE COIL OF ROPE IN THE CENTER THE MODEL OF A SAILING SHIP IN A GREEN GLASS BOTTLE AND ON THE RIGHT THE WOODEN STATUE OF A NEGRO BOY (IN->AND) BAGGY TROUSERS TURKISH JACKET AND WHITE TURBAN +4852-28311-0023-2121: BUT THE NAME STILL SHOWED AT THE PROW AND MANY A TIME CHRIS SAFE AT HOME IN BED HAD SAILED IMAGINARY VOYAGES IN THE MIRABELLE +4852-28311-0024-2122: HE HAD NEVER SEEN (ANYONE->ANY ONE) GO INTO MISTER (WICKER'S->HOOKER'S) SHOP NOW HE THOUGHT OF IT +4852-28311-0025-2123: HOW THEN DID HE (*->TO) LIVE AND WHAT DID HE EVER SELL +4852-28311-0026-2124: A SUDDEN CAR HORN (WOKE HIM->WALKING) FROM (HIS->THIS) DREAM +4852-28312-0000-2125: OF THE MANY TIMES (HE HAD->YOU'D) EXAMINED MISTER WICKER'S WINDOW AND (PORED->POURED) OVER THE ROPE THE SHIP AND THE NUBIAN BOY HE HAD NEVER GONE INTO MISTER (WICKER'S->ROOKER'S) SHOP +4852-28312-0001-2126: SO NOW ALONE UNTIL (SOMEONE->SOME ONE) SHOULD ANSWER THE BELL (HE->THEY) LOOKED EAGERLY IF UNEASILY AROUND HIM +4852-28312-0002-2127: WHAT WITH THE ONE WINDOW AND THE LOWERING DAY OUTSIDE THE LONG NARROW SHOP WAS (SOMBER->SOMBRE) +4852-28312-0003-2128: HEAVY HAND (HEWN->YOU AND) BEAMS CROSSED IT FROM ONE SIDE TO THE OTHER +4852-28312-0004-2129: MISTER (WICKER'S->OAKERS) BACK BEING TOWARD THE SOURCE OF LIGHT CHRIS COULD NOT SEE HIS FACE +4852-28312-0005-2130: THE DOUBLE FANS OF MINUTE WRINKLES BREAKING FROM EYE (CORNER TO->CORNERED A) TEMPLE (AND JOINING->ADJOINING) WITH THOSE OVER THE (CHEEKBONES->SHEEP BONES) WERE DRAWN INTO THE HORIZONTAL LINES ACROSS THE DOMED FOREHEAD +4852-28312-0006-2131: LITTLE TUFTS OF WHITE (FUZZ->FUZ) ABOVE THE EARS WERE ALL THAT REMAINED OF THE ANTIQUARIAN'S HAIR BUT WHAT DREW AND HELD CHRIS'S GAZE WERE THE OLD MAN'S EYES +4852-28312-0007-2132: (CHRIS BLINKED->CRISP LINKED) AND LOOKED AGAIN YES THEY WERE STILL THERE +4852-28312-0008-2133: CHRIS SWALLOWED AND HIS VOICE CAME BACK TO HIM +4852-28312-0009-2134: YES SIR HE SAID +4852-28312-0010-2135: I SAW YOUR SIGN AND I KNOW A BOY WHO NEEDS THE JOB +4852-28312-0011-2136: HE'S A SCHOOLMATE OF MINE +4852-28312-0012-2137: (JAKEY HARRIS HIS->GIGIRIS'S) NAME (IS AND->ISN'T) HE REALLY NEEDS THE JOB +4852-28312-0013-2138: I I JUST WONDERED IF THE PLACE WAS STILL OPEN +4852-28312-0014-2139: WHAT HE SAW WAS A FRESH CHEEKED LAD TALL FOR THIRTEEN STURDY WITH SINCERITY AND GOOD (HUMOR->HUMOUR) IN HIS FACE AND SOMETHING SENSITIVE AND APPEALING ABOUT HIS EYES +4852-28312-0015-2140: HE GUESSED THERE (*->IT) MUST BE A LIVELY FIRE IN THAT (ROOM->RUM) BEYOND +4852-28312-0016-2141: WOULD THAT INTERFERE WITH (JAKEY'S->JAKIE GIGS) GETTING THE JOB SIR +4852-28312-0017-2142: BUT EVEN AS HE SLOWLY TURNED THE THOUGHT PIERCED HIS MIND WHY (HAD HE->DO YOU) NOT (SEEN->SEE) THE REFLECTION OF THE (HEADLIGHTS->HEAD LIGHTS) OF THE CARS MOVING UP AROUND THE CORNER OF (WATER STREET AND UP->WALL AT HER STREET NOT) THE HILL TOWARD THE (TRAFFIC->EFFIC) SIGNALS +4852-28312-0018-2143: THE ROOM SEEMED OVERLY STILL +4852-28312-0019-2144: THEN IN THAT SECOND HE TURNED AND FACED ABOUT +4852-28312-0020-2145: THE WIDE BOW WINDOW WAS THERE BEFORE HIM THE THREE OBJECTS HE LIKED BEST SHOWING FROSTY IN THE MOONLIGHT THAT POURED IN FROM ACROSS THE WATER +4852-28312-0021-2146: ACROSS THE WATER WHERE WAS THE (FREEWAY->FREE WAY) +4852-28312-0022-2147: IT WAS NO LONGER THERE NOR WERE THE HIGH WALLS AND (SMOKESTACKS->SMOKE STACKS) OF FACTORIES TO BE SEEN +4852-28312-0023-2148: THE WAREHOUSES WERE STILL THERE +4852-28312-0024-2149: (FLABBERGASTED AND->FLABRA GASTED IN) BREATHLESS CHRIS WAS UNAWARE THAT HE HAD MOVED CLOSER TO PEER OUT THE WINDOW IN EVERY DIRECTION +4852-28312-0025-2150: NO ELECTRIC SIGNS NO LAMPLIT STREETS +4852-28312-0026-2151: WHERE THE PEOPLE'S (DRUGSTORE->DRUG STORE) HAD STOOD BUT (A->*) HALF (*->AN) HOUR BEFORE ROSE THE ROOFS OF WHAT WAS EVIDENTLY AN INN +4852-28312-0027-2152: A COURTYARD WAS (SPARSELY->FIRSTLY) LIT BY A FLARING (TORCH OR->TORTURE) TWO SHOWING (A->THE) SWINGING SIGN HUNG ON (A->THE) POST +4852-28312-0028-2153: THE (POST WAS->POSTS) PLANTED AT THE EDGE OF (WHAT->IT) WAS NOW A BROAD AND MUDDY ROAD +4852-28312-0029-2154: A COACH (WITH ITS TOP->WHICH HAD STOPPED) PILED HIGH WITH (LUGGAGE->LEGGED) STAMPED (TO->*) A HALT BESIDE THE FLAGGED COURTYARD +4852-28312-0030-2155: THEY MOVED INTO THE INN THE COACH RATTLED OFF TO THE STABLE +4852-28312-0031-2156: (MY->BY) WINDOW (HAS->AS) A POWER FOR THOSE FEW WHO ARE TO SEE +4852-28319-0000-2070: THE LEARNING (OF->AND) MAGIC WAS BY NO MEANS EASY +4852-28319-0001-2071: HE HAD TOLD HIS MASTER AT ONCE (ABOUT->HE GOT) SIMON GOSLER HIS (HORDE->HOARD) OF MONEY AND HIS HIDING PLACES FOR IT +4852-28319-0002-2072: CHRIS THEREFORE THREW HIMSELF (INTO->AND) ALL THE PRELIMINARIES OF HIS TASK +4852-28319-0003-2073: ONE AFTERNOON WHEN HE (*->HAD) RETURNED AFTER A REST TO MISTER WICKER'S STUDY HE SAW THAT THERE WAS SOMETHING NEW IN THE ROOM A BOWL WITH A (GOLDFISH->GOLD FISH) IN IT STOOD ON THE TABLE BUT MISTER WICKER WAS NOT TO BE SEEN +4852-28319-0004-2074: WHAT (SHALL->SHOULD ALL) I DO FIRST +4852-28319-0005-2075: HOW (YOU HAVE IMPROVED->OFTEN PROVED) MY BOY (HE->IT) EXCLAIMED (IT->*) IS NOW TIME FOR YOU TO TRY (AND THIS->MISSUS) IS (AS->*) GOOD A CHANGE (AS->IS) ANY +4852-28319-0006-2076: SUPPOSE (I->A) CHANGE AND CAN'T (CHANGE->CHANCE) BACK +4852-28319-0007-2077: MISTER WICKER WAITED PATIENTLY BESIDE HIM FOR A FEW MOMENTS FOR CHRIS TO GET UP HIS COURAGE +4852-28319-0008-2078: (THEN AS->THAT IS) NOTHING HAPPENED WITH A VOICE LIKE A WHIP MISTER WICKER SAID START AT ONCE +4852-28319-0009-2079: THE SENSATION SPREAD FASTER AND FASTER +4852-28319-0010-2080: HIS HEAD SWAM AND HE FELT FAINT (AND->IN) A LITTLE SICK BUT HE PERSISTED THROUGH THE FINAL WORDS +4852-28319-0011-2081: HE THOUGHT NOT WITHOUT A FEELING OF PRIDE AND COMMENCED (*->THE) EXPERIMENTING WITH HIS TAIL AND FINS WITH SUCH ENTHUSIASM AND DELIGHT THAT SOME LITTLE TIME ELAPSED BEFORE MISTER WICKER'S VOICE BOOMED CLOSE BY +4852-28319-0012-2082: SEVENTY FOUR BOOK ONE THE RETURN +4852-28319-0013-2083: THE (FIGURE'S->FIGURES) SHOES CARVED IN SOME EASTERN STYLE HAD CURVED UP POINTING TOES +4852-28319-0014-2084: THEN ALL AT ONCE THE IDEA CAME TO CHRIS +4852-28319-0015-2085: IF HE WAS TO BE A MAGICIAN COULD HE MAKE THIS BOY COME TO LIFE +4852-28319-0016-2086: (HE->IT) SQUATTED ON HIS HAUNCHES (EXAMINING->EXAMINED) THE CARVED WOODEN FIGURE ATTENTIVELY AND FELT CONVINCED THAT ONCE ALIVE THE BOY WOULD BE AN IDEAL AND HAPPY COMPANION +4852-28319-0017-2087: BUT HOW DID ONE (*->A) CHANGE INANIMATE TO ANIMATE +4852-28319-0018-2088: (CHRIS->GRIS) GOT UP AND STOLE BACK TO MISTER WICKER'S DOOR +4852-28319-0019-2089: HE HEARD (THE->THAT) MAGICIAN GOING UP THE SPIRAL STAIRCASE TO HIS ROOM ABOVE AND AFTER CHANGING HIMSELF TO A MOUSE TO SLIP UNDER THE DOOR AND SEE THAT THE ROOM WAS REALLY EMPTY (CHRIS RESUMED HIS->MISTER JAMES'S) PROPER SHAPE AND OPENED THE DOORS OF THE CUPBOARD AT THE FAR END OF THE ROOM +4852-28319-0020-2090: THE AFTERNOON (RAINY->RAINING) BEFORE INCREASED IN STORM +4852-28319-0021-2091: (DUSK CAME->THUS GAINED) TWO HOURS BEFORE ITS TIME THUNDER (SNARLED->SNARLS) IN THE SKY +4852-28319-0022-2092: CERTAIN ELEMENTS WERE TO BE MIXED AND POURED AT THE PROPER TIME +4852-28319-0023-2093: MISTER WICKER BEGAN MOVING ABOUT UPSTAIRS THE (FLOORBOARDS->FOREBOARDS) CREAKED AND STILL CHRIS COULD NOT LEAVE UNTIL THE (POTION->FORCIAN) FUMED AND GLOWED +4852-28319-0024-2094: WITH INFINITE CAUTION CHRIS CLOSED THE DOOR SILENTLY BEHIND HIM AND RUNNING LIGHTLY FORWARD REACHED THE FIGURE (OF->AT) THE NEGRO BOY +4852-28319-0025-2095: IT WAS AS IF THE STIFFNESS MELTED +4852-28319-0026-2096: UNDER HIS EYES (THE WOODEN FOLDS->WELLS) OF CLOTH BECAME RICH SILK EMBROIDERY GLEAMED IN ITS REALITY UPON THE COAT AND OH THE FACE +4852-28319-0027-2097: THE WOODEN GRIN LOOSENED THE LARGE EYES TURNED THE HAND HOLDING (THE->A) HARD BOUQUET OF CARVED FLOWERS MOVED (AND LET->*) THE BOUQUET FALL +4852-28330-0000-2044: THEY WENT DOWN TO THEIR QUARTERS FIRST +4852-28330-0001-2045: GUESS MISTER FINNEY WENT TO HIS QUARTERS I DON'T REMEMBER SEEING HIM CROSS THE DECK OR COME OVER THAT WAY AT ALL +4852-28330-0002-2046: NEXT NED CILLEY WAS RELIEVED (AT->TO) THE HELM BY (ELBERT->ELBER) JONES WHO TOOK OVER NED WENT ON DOWN +4852-28330-0003-2047: IT LOOKS TO ME AS IF IT COULD (HAVE BEEN->BIT OF IN) ONE OF SEVERAL PEOPLE AND I'LL BE SWITCHED IF I KNOW WHO I'LL KEEP MY EYES (OPEN->UP AND) +4852-28330-0004-2048: THE MIRABELLE WAS (NEARING TAHITI->NEAR INDEEDY) +4852-28330-0005-2049: (WE'VE WATER->REVOLTA) AND FRESH STORES TO TAKE ON THERE +4852-28330-0006-2050: CHRIS LOST NO TIME AS SOON AS HE COULD DO IT WITHOUT BEING NOTICED (IN->AND) HURRYING DOWN TO HIS CABIN +4852-28330-0007-2051: CERTAINLY MY BOY BOOMED OUT THE CAPTAIN (HIS->AS) BLUE EYES ABRUPTLY KEEN AND PENETRATING +4852-28330-0008-2052: MISTER (FINNEY->FINNELL) WILL BE SOME TIME ON DECK WE CANNOT BE (OVERHEARD IN->OWNED HEARD AND) HERE +4852-28330-0009-2053: HIS FACE (FROZE->ROSE) WITH NERVOUSNESS THAT THIS MIGHT (*->DO) NOT DO AS AN ANSWER AND HE STOOD STIFF AND STILL BEFORE CAPTAIN BLIZZARD +4852-28330-0010-2054: THE CAPTAIN SAT FORWARD IN HIS CHAIR LOOKING AT HIM FOR A LONG MOMENT CONSIDERING +4852-28330-0011-2055: THEN HE SAID WELL I DO NOT CARE FOR IT I CANNOT SAY (I->THAT) DO +4852-28330-0012-2056: (THIS->THE) SHIP IS MORE TO ME THAN (WIFE OR->MY FULL) MOTHER OR FAMILY +4852-28330-0013-2057: (HE->AND) PAUSED FINGERING HIS LOWER LIP AND LOOKING SIDEWAYS IN A REFLECTIVE FASHION AT (CHRIS->CRIS) STANDING BEFORE HIM +4852-28330-0014-2058: WE SHALL SAY NO MORE BUT I TRUST YOU UNDERSTAND THE RESPONSIBILITY YOU HAVE +4852-28330-0015-2059: (THIS->THE) SHIP ITS CARGO (AND->IN) ITS MEN WILL BE IN YOUR HANDS +4852-28330-0016-2060: YES SIR I THINK I CAN DO IT SAFELY OR I SHOULD NOT TRY SIR +4852-28330-0017-2061: CAPTAIN BLIZZARD'S ROUND PINK (FACE->FACED) CREASED IN (HIS->ITS) WINNING SMILE +4852-28330-0018-2062: HE THEN WENT ON TO DESCRIBE WHAT ELSE WAS TO FOLLOW THE COVERING OF THE SHIP WITH LEAVES TO MAKE IT BLEND WITH ITS SURROUNDINGS +4852-28330-0019-2063: (*->THE) CAMOUFLAGE WAS NOT A WORD THE CAPTAIN OR (ANYONE->ANY ONE) ELSE OF HIS TIME (YET->HE HAD) UNDERSTOOD +4852-28330-0020-2064: WHAT CAN BE SAID DURING THAT TIME SIR CHRIS THOUGHT TO ASK +4852-28330-0021-2065: I AM SOMEWHAT SKILLED IN (MEDICAMENTS->MEDICMENTS) I HAVE TO BE AS (*->A) CAPTAIN OF (A->*) SHIP AND (THE->*) CREW KNOW IT +4852-28330-0022-2066: I SHALL SAY THAT YOU ARE IN MY OWN CABIN SO THAT I CAN CARE FOR YOU +4852-28330-0023-2067: NOT SINCE HE HAD LEFT MISTER WICKER (HAD->AND) CHRIS FELT SUCH CONFIDENCE AS HE DID IN THE WORDS AND ACTIONS OF CAPTAIN BLIZZARD +4852-28330-0024-2068: HE KNEW NOW THAT HIS ABSENCE FOR AS LONG AS HE HAD (*->HAD) TO BE AWAY WOULD BE COVERED UP (AND->IN) SATISFACTORILY ACCOUNTED FOR +4852-28330-0025-2069: THEIR CONVERSATION HAD TAKEN SOME LITTLE WHILE +533-1066-0000-796: (WHEN->ONE) CHURCHYARDS YAWN +533-1066-0001-797: I KNEW WELL ENOUGH THAT HE MIGHT BE CARRIED (THOUSANDS->THOUSAND) OF MILES (IN THE->INTO) BOX CAR LOCKED IN PERHAPS WITHOUT WATER OR (FOOD->FOOT) +533-1066-0002-798: I AM SURE I KISSED (LIDDY->LADY) AND (I HAVE->I'VE) HAD TERRIBLE MOMENTS SINCE WHEN I (SEEM->SEEMED) TO REMEMBER KISSING MISTER JAMIESON TOO IN THE EXCITEMENT +533-1066-0003-799: FORTUNATELY WARNER (AND->ON) THE (DETECTIVES->DETECTIVE) WERE KEEPING BACHELOR (HALL IN THE->HOLLAND) LODGE +533-1066-0004-800: OUT OF DEFERENCE TO (LIDDY->LIVE) THEY WASHED (THEIR->HER) DISHES ONCE A DAY AND THEY (CONCOCTED->CONCLUDED) QUEER (MESSES->MASSES) ACCORDING TO THEIR SEVERAL ABILITIES +533-1066-0005-801: MISS (INNES->EAMES) HE SAID STOPPING ME AS I WAS ABOUT TO GO TO MY ROOM UP STAIRS HOW ARE YOUR NERVES (TONIGHT->TO NIGHT) +533-1066-0006-802: I HAVE NONE I SAID HAPPILY +533-1066-0007-803: I MEAN HE PERSISTED DO YOU FEEL AS THOUGH YOU COULD GO THROUGH WITH SOMETHING RATHER UNUSUAL +533-1066-0008-804: THE MOST UNUSUAL THING I CAN THINK OF WOULD BE A PEACEFUL NIGHT +533-1066-0009-805: SOMETHING IS GOING TO OCCUR HE SAID +533-1066-0010-806: PUT ON HEAVY SHOES AND SOME (OLD->ALL) DARK CLOTHES AND MAKE UP YOUR MIND NOT TO BE SURPRISED AT ANYTHING +533-1066-0011-807: (LIDDY->LEAVY) WAS SLEEPING (THE->*) SLEEP OF THE JUST WHEN I WENT (UP STAIRS->UPSTAIRS) AND I HUNTED OUT MY THINGS CAUTIOUSLY +533-1066-0012-808: (THEY->YOU) WERE TALKING (CONFIDENTIALLY->TO FILIANTLY) TOGETHER BUT WHEN I CAME DOWN THEY CEASED +533-1066-0013-809: THERE WERE A FEW PREPARATIONS TO BE MADE (THE->*) LOCKS TO BE GONE OVER WINTERS TO BE (INSTRUCTED->INSTRUCTIVE) AS TO RENEWED VIGILANCE AND THEN AFTER (EXTINGUISHING->DISTINGUISHING) THE (HALL->WHOLE) LIGHT WE CREPT IN THE DARKNESS THROUGH THE FRONT DOOR AND INTO THE NIGHT +533-1066-0014-810: I ASKED NO QUESTIONS +533-1066-0015-811: (ONCE->WAS) ONLY SOMEBODY SPOKE AND THEN IT WAS AN EMPHATIC (BIT->FIT) OF PROFANITY FROM DOCTOR STEWART WHEN HE RAN INTO A WIRE FENCE +533-1066-0016-812: I (HARDLY->ARE TO) KNOW WHAT I EXPECTED +533-1066-0017-813: THE DOCTOR WAS PUFFING SOMEWHAT (WHEN->BUT) WE FINALLY CAME TO A HALT +533-1066-0018-814: I CONFESS THAT JUST AT THAT MINUTE EVEN SUNNYSIDE SEEMED A CHEERFUL SPOT +533-1066-0019-815: IN SPITE OF MYSELF I DREW MY BREATH IN SHARPLY +533-1066-0020-816: IT WAS ALEX (ARMED WITH->ON THE) TWO LONG HANDLED SPADES +533-1066-0021-817: THE DOCTOR KEPT A (KEEN LOOKOUT->KIN LOOK OUT) BUT NO ONE APPEARED +533-1066-0022-818: THERE'S ONE THING SURE I'LL NOT BE SUSPECTED OF COMPLICITY +533-1066-0023-819: (A->THE) DOCTOR IS GENERALLY SUPPOSED TO BE (*->A) HANDIER AT (BURYING->BERING) FOLKS THAN (AT DIGGING->A TIGGING) THEM UP +533-1066-0024-820: I HELD ON TO HIM FRANTICALLY AND SOMEHOW I GOT (THERE AND->TERRANT) LOOKED DOWN +533-131556-0000-821: BUT HOW AM I TO (GET->IT) OVER THE (TEN->TOWN) OR TWELVE DAYS THAT MUST YET ELAPSE BEFORE THEY GO +533-131556-0001-822: FOR NONE COULD (INJURE->ENDURE) ME AS HE HAS DONE OH +533-131556-0002-823: THE WORD STARES ME IN THE FACE LIKE A GUILTY CONFESSION BUT IT IS TRUE I HATE HIM I HATE HIM +533-131556-0003-824: I SOMETIMES THINK I OUGHT TO GIVE HIM CREDIT FOR THE GOOD FEELING HE (SIMULATES->SIMILATES) SO WELL AND THEN AGAIN I THINK IT IS MY DUTY TO SUSPECT HIM UNDER THE PECULIAR CIRCUMSTANCES IN WHICH I AM PLACED +533-131556-0004-825: I HAVE DONE WELL TO RECORD (THEM SO MINUTELY->HIM SUMINUTELY) +533-131556-0005-826: (THEY->THE YEAR) HAD (BETAKEN->TAKEN) THEMSELVES TO THEIR WORK I (LESS TO->LEST) DIVERT MY MIND THAN TO (DEPRECATE->THE PROCATE) CONVERSATION HAD PROVIDED MYSELF WITH (A->THE) BOOK +533-131556-0006-827: I AM TOO (WELL->*) ACQUAINTED WITH YOUR CHARACTER AND CONDUCT TO FEEL ANY REAL FRIENDSHIP FOR YOU AND AS I AM WITHOUT YOUR TALENT FOR DISSIMULATION I CANNOT ASSUME THE APPEARANCE OF IT +533-131556-0007-828: (UPON->UP AND) PERUSING THIS SHE TURNED SCARLET AND BIT HER LIP +533-131556-0008-829: YOU MAY GO (MILICENT AND SHE'LL FOLLOW IN->MELLICENT UNTIL FOLLOWING) A WHILE (MILICENT->MELLICENT) WENT +533-131556-0009-830: (WILL YOU->OLI') OBLIGE ME (HELEN->ALLAN) CONTINUED SHE +533-131556-0010-831: (AH->HA) YOU ARE SUSPICIOUS +533-131556-0011-832: IF I WERE SUSPICIOUS I REPLIED I SHOULD HAVE DISCOVERED YOUR INFAMY LONG BEFORE +533-131556-0012-833: (I->*) ENJOY (A MOONLIGHT->EVENLENTH) RAMBLE AS WELL AS YOU I ANSWERED STEADILY FIXING MY EYES (UPON HER->UP ON EARTH) AND (THE SHRUBBERY->FREDERI) HAPPENS TO BE ONE OF MY (FAVOURITE->FAVORITE) RESORTS +533-131556-0013-834: SHE COLOURED AGAIN EXCESSIVELY AND REMAINED SILENT PRESSING HER FINGER AGAINST HER TEETH AND GAZING INTO THE FIRE +533-131556-0014-835: I (WATCHED HER->WATCH FOR) A FEW MOMENTS (WITH A->TO THE) FEELING OF MALEVOLENT GRATIFICATION THEN MOVING TOWARDS THE DOOR I CALMLY ASKED IF SHE HAD ANYTHING MORE TO SAY +533-131556-0015-836: YES YES +533-131556-0016-837: SUPPOSE I DO +533-131556-0017-838: SHE PAUSED IN EVIDENT DISCONCERTION AND PERPLEXITY MINGLED WITH ANGER SHE DARED NOT SHOW +533-131556-0018-839: I CANNOT RENOUNCE WHAT IS DEARER THAN LIFE SHE MUTTERED IN A LOW HURRIED TONE +533-131556-0019-840: IF YOU ARE GENEROUS HERE IS A FITTING OPPORTUNITY FOR THE EXERCISE OF YOUR MAGNANIMITY IF YOU ARE PROUD HERE AM I YOUR RIVAL (READY->RATHER) TO (ACKNOWLEDGE->ANNOUNCE) MYSELF YOUR (DEBTOR->DEPTOR) FOR AN ACT OF (THE->*) MOST NOBLE FORBEARANCE +533-131556-0020-841: I SHALL NOT TELL HIM +533-131556-0021-842: GIVE ME NO THANKS IT IS NOT FOR YOUR SAKE THAT I REFRAIN +533-131556-0022-843: AND (MILICENT->MELLICENT) WILL (YOU->IT) TELL HER +533-131556-0023-844: I (WOULD->WILL) NOT FOR MUCH THAT (SHE->YOU) SHOULD (KNOW THE->NOT) INFAMY AND DISGRACE OF HER RELATION +533-131556-0024-845: YOU USE (HARD->OUR) WORDS MISSUS HUNTINGDON BUT I CAN PARDON YOU +533-131556-0025-846: HOW DARE YOU MENTION HIS NAME TO ME +533-131562-0000-847: IT SEEMS VERY INTERESTING LOVE SAID HE LIFTING HIS HEAD AND TURNING TO (WHERE I STOOD->HER EYES TOO) WRINGING MY (HANDS->HAND) IN SILENT (RAGE->RATE) AND ANGUISH BUT IT'S RATHER LONG (I'LL->I) LOOK AT IT SOME OTHER TIME AND MEANWHILE I'LL TROUBLE YOU FOR YOUR KEYS MY DEAR WHAT (KEYS->CASE) +533-131562-0001-848: (THE KEYS->IT ACCUSE) OF YOUR CABINET (DESK DRAWERS->DESKED RAOUL) AND WHATEVER ELSE YOU POSSESS SAID HE RISING AND HOLDING OUT HIS HAND +533-131562-0002-849: THE KEY OF MY (DESK->VES) IN FACT WAS AT THAT MOMENT IN (THE LOCK->LOVE) AND THE OTHERS WERE ATTACHED TO IT +533-131562-0003-850: NOW THEN SNEERED HE WE MUST HAVE A CONFISCATION OF PROPERTY +533-131562-0004-851: AND (PUTTING->PUT IN) THE KEYS INTO HIS POCKET HE WALKED INTO THE LIBRARY +533-131562-0005-852: THAT AND ALL REPLIED THE MASTER AND THE THINGS WERE CLEARED AWAY +533-131562-0006-853: MISTER HUNTINGDON THEN WENT (UP STAIRS->UPSTAIRS) +533-131562-0007-854: MUTTERED HE STARTING BACK SHE'S (THE->*) VERY DEVIL FOR (*->A) SPITE +533-131562-0008-855: I DIDN'T SAY (I'D->I'VE) BROKEN IT DID I RETURNED HE +533-131562-0009-856: I SHALL PUT YOU (UPON->UP IN) A SMALL (MONTHLY ALLOWANCE->MOUTHFULLY ALLOW US) IN FUTURE FOR YOUR OWN PRIVATE EXPENSES AND YOU NEEDN'T TROUBLE YOURSELF ANY MORE ABOUT MY CONCERNS I SHALL LOOK OUT FOR A STEWARD MY DEAR I WON'T EXPOSE YOU TO (THE->*) TEMPTATION +533-131562-0010-857: AND AS FOR THE (HOUSEHOLD->HOUSE OR) MATTERS MISSUS (GREAVES->GREEBS) MUST BE VERY PARTICULAR IN KEEPING HER ACCOUNTS WE MUST GO (UPON->UP IN) AN ENTIRELY NEW PLAN +533-131562-0011-858: WHAT GREAT DISCOVERY HAVE YOU MADE NOW MISTER (HUNTINGDON->HONDYNON) +533-131562-0012-859: (HAVE->IF) I (ATTEMPTED->ATTENDED) TO DEFRAUD YOU +533-131562-0013-860: NOT IN MONEY MATTERS EXACTLY IT SEEMS BUT (IT'S BEST->IS FAST) TO KEEP OUT OF THE WAY OF TEMPTATION +533-131562-0014-861: HERE (BENSON->BUILTON) ENTERED (WITH->*) THE CANDLES AND THERE FOLLOWED (A BRIEF->THE ROOF) INTERVAL OF SILENCE I SITTING (STILL IN->STEALING) MY CHAIR AND HE STANDING WITH HIS BACK TO THE FIRE SILENTLY TRIUMPHING IN MY DESPAIR +533-131562-0015-862: I KNOW THAT DAY AFTER DAY SUCH FEELINGS (WILL->TO) RETURN UPON ME +533-131562-0016-863: I (TRY->TRIED) TO LOOK TO HIM AND RAISE MY HEART TO HEAVEN BUT IT WILL (CLEAVE->CLIFF) TO THE DUST +533-131564-0000-768: VAIN HOPE I FEAR +533-131564-0001-769: (MISTER->MISS) AND MISSUS (HATTERSLEY->HALTERSLEY) HAVE BEEN (STAYING AT THE GROVE->SEEING IT TO GROW) A FORTNIGHT AND AS (MISTER->MISSUS) HARGRAVE IS STILL ABSENT AND (THE->*) WEATHER WAS REMARKABLY FINE I NEVER PASSED (A->THE) DAY WITHOUT SEEING MY TWO FRIENDS (MILICENT->MILLSON) AND (ESTHER->ASSER) EITHER THERE OR HERE +533-131564-0002-770: NO UNLESS YOU CAN TELL (ME->YOU) WHEN TO EXPECT HIM HOME +533-131564-0003-771: I CAN'T (YOU DON'T WANT->EVEN WANTS) HIM DO YOU +533-131564-0004-772: IT IS A RESOLUTION YOU (OUGHT TO HAVE FORMED->ARE REFORMED) LONG AGO +533-131564-0005-773: WE ALL HAVE A BIT OF A LIKING FOR HIM AT THE BOTTOM OF OUR (HEARTS->HEART) THOUGH WE CAN'T RESPECT HIM +533-131564-0006-774: NO I'D RATHER BE LIKE MYSELF (BAD->THAT) AS I AM +533-131564-0007-775: NEVER MIND MY PLAIN SPEAKING SAID I IT IS FROM THE BEST OF MOTIVES +533-131564-0008-776: BUT TELL ME SHOULD YOU WISH (YOUR SONS->YOURSELVES) TO BE LIKE MISTER HUNTINGDON OR EVEN LIKE YOURSELF +533-131564-0009-777: OH NO (I COULDN'T->ECHOLYN) STAND THAT +533-131564-0010-778: (FIRE->FAR) AND FURY +533-131564-0011-779: NOW DON'T (BURST->FORCE) INTO A TEMPEST AT THAT +533-131564-0012-780: BUT HANG IT THAT'S NOT MY FAULT +533-131564-0013-781: NOT (YEARS->EARS) FOR SHE'S ONLY FIVE AND TWENTY +533-131564-0014-782: WHAT (WOULD->DID) YOU MAKE OF ME AND THE CHILDREN TO BE SURE THAT (WORRY HER TO->WERE HE HURT) DEATH BETWEEN THEM +533-131564-0015-783: I KNOW THEY ARE BLESS THEM +533-131564-0016-784: (HE FOLLOWED->IF ALL OF) ME INTO THE LIBRARY +533-131564-0017-785: I SOUGHT OUT AND PUT INTO HIS HANDS TWO OF (MILICENT'S->MILICSON'S) LETTERS ONE (DATED->DID IT) FROM LONDON AND WRITTEN DURING ONE OF HIS (WILDEST->WALDEST) SEASONS OF RECKLESS DISSIPATION THE OTHER IN THE COUNTRY DURING (A LUCID->ELUSIVE) INTERVAL +533-131564-0018-786: THE FORMER WAS FULL OF TROUBLE AND ANGUISH NOT ACCUSING HIM BUT DEEPLY REGRETTING HIS CONNECTION WITH HIS PROFLIGATE COMPANIONS ABUSING MISTER (GRIMSBY AND->GRIM'S BEING) OTHERS INSINUATING BITTER THINGS AGAINST MISTER HUNTINGDON AND MOST (INGENIOUSLY THROWING->INGENUOUSLY THREW IN) THE BLAME OF HER HUSBAND'S MISCONDUCT ON (TO->THE) OTHER (MEN'S->MAN'S) SHOULDERS +533-131564-0019-787: I'VE BEEN A CURSED RASCAL GOD KNOWS SAID HE AS HE GAVE IT (A HEARTY->EARTHLY) SQUEEZE BUT YOU SEE IF I DON'T MAKE AMENDS FOR IT (D N->THEN) ME IF I DON'T +533-131564-0020-788: IF YOU (INTEND TO REFORM->INSENT WITH FORM) INVOKE GOD'S BLESSING (HIS->IS A) MERCY (AND HIS AID NOT HIS CURSE->IN THIS APE NOR DISCOURSE) +533-131564-0021-789: GOD HELP ME THEN FOR (I'M->I AM) SURE I (NEED IT->NEEDED) +533-131564-0022-790: (WHERE'S->WHERE IS) MILICENT +533-131564-0023-791: NAY NOT I SAID HE TURNING (HER ROUND->AROUND) AND PUSHING (HER->IT) TOWARDS ME +533-131564-0024-792: MILICENT FLEW TO THANK ME (OVERFLOWING WITH->OVERWHELMING ITS) GRATITUDE +533-131564-0025-793: CRIED SHE I COULDN'T HAVE INFLUENCED HIM I'M SURE BY ANYTHING THAT I COULD HAVE SAID +533-131564-0026-794: YOU NEVER TRIED ME (MILLY->MERELY) SAID HE +533-131564-0027-795: AFTER THAT THEY WILL REPAIR TO THEIR COUNTRY HOME +5442-32873-0000-1365: CAPTAIN LAKE DID NOT LOOK AT ALL LIKE A LONDON DANDY NOW +5442-32873-0001-1366: THERE WAS A VERY NATURAL SAVAGERY AND DEJECTION (THERE->THEN) AND A WILD (LEER->YARD) IN HIS YELLOW EYES RACHEL SAT DOWN +5442-32873-0002-1367: (A->AND) SLAVE ONLY THINK A SLAVE +5442-32873-0003-1368: OH FRIGHTFUL FRIGHTFUL IS IT A DREAM +5442-32873-0004-1369: OH FRIGHTFUL (FRIGHTFUL->DREADFUL) +5442-32873-0005-1370: STANLEY STANLEY IT WOULD BE MERCY TO KILL ME SHE BROKE OUT AGAIN +5442-32873-0006-1371: BRIGHT AND NATTY (WERE THE CHINTZ->WITH A CHIN'S) CURTAINS AND THE LITTLE TOILET SET OUT NOT INELEGANTLY AND HER PET PIPING GOLDFINCH ASLEEP ON HIS PERCH WITH HIS BIT OF SUGAR BETWEEN THE (WIRES->WIVES) OF HIS CAGE HER PILLOW SO WHITE AND UNPRESSED WITH ITS LITTLE EDGING OF LACE +5442-32873-0007-1372: WHEN HE CAME BACK TO THE DRAWING ROOM A (TOILET->TALLED) BOTTLE OF (EAU DE COLOGNE->OVERCLONE) IN HIS HAND WITH HER LACE HANDKERCHIEF HE BATHED HER (TEMPLES->TEMPLE) AND FOREHEAD +5442-32873-0008-1373: THERE WAS NOTHING VERY BROTHERLY IN HIS LOOK AS HE PEERED INTO (HER->A) PALE SHARP FEATURES DURING THE PROCESS +5442-32873-0009-1374: THERE DON'T MIND ME SHE SAID SHARPLY AND GETTING UP SHE LOOKED DOWN AT HER DRESS AND THIN SHOES AND SEEMING TO RECOLLECT HERSELF SHE TOOK THE CANDLE HE HAD JUST SET DOWN AND WENT SWIFTLY TO HER ROOM +5442-32873-0010-1375: AND SHE THREW BACK HER VEIL AND GOING HURRIEDLY TO THE TOILET MECHANICALLY SURVEYED HERSELF (IN->FROM) THE GLASS +5442-32873-0011-1376: (RACHEL LAKE RACHEL LAKE->RIGIDLY TO MAKE) WHAT ARE YOU NOW +5442-32873-0012-1377: I'LL STAY HERE THAT IS IN THE DRAWING ROOM SHE ANSWERED AND THE FACE WAS WITHDRAWN +5442-32873-0013-1378: (HE SLACKENED HIS PACE->HIS CLACK IN THE SPACE) AND (TAPPED->TOP) SHARPLY AT THE LITTLE WINDOW OF THAT MODEST POST OFFICE AT WHICH THE YOUNG LADIES IN THE PONY CARRIAGE HAD PULLED UP THE DAY BEFORE AND WITHIN WHICH LUKE (WAGGOT->RAGGED) WAS WONT TO SLEEP IN A SORT OF WOODEN BOX THAT FOLDED UP AND APPEARED TO BE A CHEST OF DRAWERS ALL DAY +5442-32873-0014-1379: (LUKE->LOOK) TOOK CARE OF MISTER LARKIN'S (DOGS->DOG) AND GROOMED MISTER WYLDER'S HORSE AND CLEANED UP HIS (DOG->DOOR) CART FOR MARK BEING CLOSE ABOUT MONEY AND FINDING THAT THE THING WAS TO BE DONE MORE CHEAPLY THAT WAY PUT UP HIS HORSE AND (DOG->DOOR) CART IN THE POST OFFICE PREMISES AND SO EVADED THE LIVERY CHARGES OF THE BRANDON ARMS +5442-32873-0015-1380: BUT LUKE WAS (NOT->KNOWN) THERE AND CAPTAIN LAKE RECOLLECTING HIS HABITS AND HIS HAUNT HURRIED ON TO THE SILVER LION WHICH HAS ITS GABLE TOWARDS THE COMMON ONLY ABOUT A HUNDRED STEPS AWAY FOR DISTANCES ARE NOT GREAT IN (GYLINGDEN->GILINGDEN) +5442-32873-0016-1381: HERE WERE THE (FLOW->FLOOR) OF SOUL AND OF STOUT LONG PIPES LONG YARNS AND TOLERABLY LONG CREDITS AND THE HUMBLE (SCAPEGRACES->CAPE BRACES) OF THE TOWN RESORTED THITHER FOR THE PLEASURES OF A CLUB LIFE AND OFTEN REVELLED DEEP INTO THE SMALL HOURS OF THE MORNING +5442-32873-0017-1382: LOSE NO TIME (AND->BUT) I'LL GIVE YOU HALF A CROWN +5442-32873-0018-1383: LUKE STUCK ON HIS GREASY (WIDEAWAKE->WIDE AWAKE) AND IN A FEW MINUTES MORE THE (DOG->DOOR) CART WAS (TRUNDLED->TUMBLED) OUT INTO THE LANE AND THE HORSE HARNESSED WENT BETWEEN THE SHAFTS WITH THAT WONDERFUL CHEERFULNESS WITH WHICH (THEY->THEIR) BEAR TO BE CALLED UP (UNDER->AND THE) STARTLING CIRCUMSTANCES (AT->AND) UNSEASONABLE HOURS +5442-32873-0019-1384: IF I THOUGHT YOU'D FAIL ME NOW (TAMAR->TO MORROW) I SHOULD NEVER COME BACK GOOD NIGHT (TAMAR->TO MOR) +5442-41168-0000-1385: THE ACT SAID THAT IN CASE OF DIFFERENCE OF OPINION THERE MUST BE A BALLOT +5442-41168-0001-1386: HE WENT UP TO THE TABLE AND STRIKING IT WITH HIS FINGER RING HE SHOUTED LOUDLY A BALLOT +5442-41168-0002-1387: HE WAS SHOUTING FOR THE VERY (COURSE SERGEY->COARSE SURGY) IVANOVITCH HAD PROPOSED BUT IT WAS EVIDENT THAT HE HATED HIM AND ALL HIS PARTY AND THIS FEELING OF HATRED SPREAD THROUGH THE WHOLE PARTY AND ROUSED IN OPPOSITION TO IT THE SAME VINDICTIVENESS THOUGH IN A MORE SEEMLY FORM ON THE OTHER SIDE +5442-41168-0003-1388: SHOUTS WERE RAISED AND FOR A MOMENT ALL WAS CONFUSION SO THAT THE MARSHAL OF THE PROVINCE HAD TO CALL FOR (ORDER->ODO) A BALLOT +5442-41168-0004-1389: WE SHED OUR BLOOD FOR OUR COUNTRY +5442-41168-0005-1390: THE CONFIDENCE OF THE MONARCH (*->BUT) NO CHECKING THE ACCOUNTS OF THE (MARSHAL HE'S->MARTIAN IS) NOT A CASHIER BUT THAT'S NOT THE POINT +5442-41168-0006-1391: VOTES PLEASE BEASTLY +5442-41168-0007-1392: THEY EXPRESSED THE MOST IMPLACABLE HATRED +5442-41168-0008-1393: LEVIN DID NOT IN THE LEAST UNDERSTAND WHAT WAS THE MATTER AND HE (MARVELED->MARVELLED) AT THE PASSION WITH WHICH IT WAS DISPUTED WHETHER OR NOT THE DECISION ABOUT (FLEROV->FLARE OFF) SHOULD BE PUT TO THE VOTE +5442-41168-0009-1394: HE FORGOT AS (SERGEY IVANOVITCH->SO GIVANOVITCH) EXPLAINED TO HIM AFTERWARDS THIS (SYLLOGISM->SYLLISM) THAT IT WAS NECESSARY FOR THE PUBLIC GOOD TO GET RID OF THE MARSHAL OF THE PROVINCE THAT TO GET (RID OF->INTO) THE (MARSHAL->MARTIAN) IT WAS NECESSARY TO HAVE A MAJORITY OF VOTES THAT TO GET A MAJORITY OF VOTES IT WAS NECESSARY TO SECURE (FLEROV'S->FYOV'S) RIGHT TO VOTE THAT TO (SECURE->SECURED) THE RECOGNITION OF (FLEROV'S->FLORO'S) RIGHT TO VOTE THEY MUST DECIDE ON THE INTERPRETATION TO BE PUT ON THE ACT +5442-41168-0010-1395: BUT LEVIN FORGOT ALL THAT AND IT WAS PAINFUL TO HIM TO SEE ALL THESE EXCELLENT PERSONS FOR WHOM HE HAD A RESPECT IN SUCH AN UNPLEASANT AND VICIOUS STATE OF EXCITEMENT +5442-41168-0011-1396: TO ESCAPE FROM THIS PAINFUL FEELING HE WENT AWAY INTO THE OTHER ROOM WHERE THERE WAS NOBODY EXCEPT THE WAITERS AT THE REFRESHMENT BAR +5442-41168-0012-1397: HE PARTICULARLY LIKED THE WAY ONE (GRAY WHISKERED->GREY WHISKIRT) WAITER WHO SHOWED HIS (SCORN->CORN) FOR THE OTHER YOUNGER ONES AND WAS (JEERED->JEWED) AT BY THEM WAS TEACHING THEM HOW TO FOLD UP NAPKINS PROPERLY +5442-41168-0013-1398: LEVIN ADVANCED BUT UTTERLY FORGETTING WHAT HE WAS TO DO AND MUCH EMBARRASSED HE TURNED TO SERGEY IVANOVITCH WITH THE QUESTION WHERE AM I TO PUT IT +5442-41168-0014-1399: (SERGEY IVANOVITCH->SOJOURNOVITCH) FROWNED +5442-41168-0015-1400: THAT IS A MATTER FOR EACH MAN'S OWN DECISION HE SAID SEVERELY +5442-41168-0016-1401: HAVING PUT IT IN HE RECOLLECTED THAT HE OUGHT TO HAVE THRUST HIS LEFT HAND TOO AND SO HE THRUST IT (IN->*) THOUGH TOO LATE AND STILL MORE OVERCOME WITH CONFUSION HE BEAT A HASTY RETREAT INTO THE BACKGROUND +5442-41168-0017-1402: A HUNDRED AND TWENTY SIX FOR ADMISSION NINETY EIGHT AGAINST +5442-41168-0018-1403: SANG (OUT->ALL) THE VOICE OF THE SECRETARY WHO COULD NOT PRONOUNCE THE LETTER R +5442-41168-0019-1404: THEN THERE WAS A LAUGH (A BUTTON->OF BOTTOM) AND TWO (NUTS->KNOTS) WERE FOUND IN THE BOX +5442-41168-0020-1405: BUT THE OLD PARTY DID NOT CONSIDER THEMSELVES CONQUERED +5442-41168-0021-1406: (IN REPLY SNETKOV->INTERPLIES NEDCOV) SPOKE OF THE TRUST (THE->AND) NOBLEMEN OF THE PROVINCE HAD PLACED (IN->ON) HIM THE (AFFECTION->EFFECT ON) THEY HAD SHOWN HIM WHICH HE DID NOT DESERVE AS HIS ONLY MERIT HAD BEEN HIS ATTACHMENT TO THE NOBILITY TO WHOM HE HAD DEVOTED TWELVE YEARS OF SERVICE +5442-41168-0022-1407: THIS EXPRESSION IN THE MARSHAL'S FACE WAS PARTICULARLY TOUCHING TO LEVIN BECAUSE ONLY THE DAY (BEFORE->FOR) HE HAD BEEN AT HIS HOUSE ABOUT HIS (TRUSTEE->TRUSTY) BUSINESS AND HAD SEEN HIM IN ALL HIS GRANDEUR A KIND HEARTED FATHERLY MAN +5442-41168-0023-1408: IF THERE ARE MEN YOUNGER AND MORE DESERVING THAN I LET THEM SERVE +5442-41168-0024-1409: AND THE MARSHAL DISAPPEARED THROUGH A SIDE DOOR +5442-41168-0025-1410: (THEY->THERE) WERE TO PROCEED IMMEDIATELY TO THE ELECTION +5442-41168-0026-1411: (TWO->DO) NOBLE GENTLEMEN WHO HAD A WEAKNESS (FOR->WAS) STRONG DRINK HAD BEEN MADE DRUNK BY THE PARTISANS OF SNETKOV AND (A->THE) THIRD HAD BEEN ROBBED OF HIS UNIFORM +5442-41168-0027-1412: ON LEARNING THIS THE NEW PARTY HAD MADE HASTE DURING THE DISPUTE ABOUT (FLEROV->FLAREFF) TO SEND SOME OF THEIR MEN IN A SLEDGE TO CLOTHE THE STRIPPED GENTLEMAN AND TO BRING ALONG ONE OF THE INTOXICATED TO THE MEETING +5442-41169-0000-1413: LEVIN DID NOT CARE TO EAT AND HE WAS NOT SMOKING HE DID NOT WANT TO JOIN HIS OWN FRIENDS THAT IS (SERGEY->SO SHE) IVANOVITCH STEPAN ARKADYEVITCH SVIAZHSKY AND THE REST BECAUSE VRONSKY IN (HIS EQUERRY'S->AN EQUERRIES) UNIFORM WAS STANDING WITH THEM IN EAGER CONVERSATION +5442-41169-0001-1414: HE WENT TO THE WINDOW AND SAT DOWN SCANNING THE GROUPS AND LISTENING TO WHAT WAS BEING SAID AROUND HIM +5442-41169-0002-1415: HE'S SUCH A BLACKGUARD +5442-41169-0003-1416: I HAVE TOLD HIM SO BUT IT MAKES NO DIFFERENCE ONLY THINK OF IT +5442-41169-0004-1417: THESE PERSONS WERE UNMISTAKABLY SEEKING A PLACE WHERE THEY COULD TALK WITHOUT BEING OVERHEARD +5442-41169-0005-1418: SHALL WE GO ON YOUR EXCELLENCY FINE CHAMPAGNE +5442-41169-0006-1419: (LAST YEAR->MASTER) AT OUR DISTRICT (MARSHAL->MARTIAL) NIKOLAY IVANOVITCH'S +5442-41169-0007-1420: OH STILL JUST THE SAME ALWAYS AT A LOSS THE LANDOWNER ANSWERED WITH A RESIGNED SMILE BUT WITH AN EXPRESSION OF SERENITY AND CONVICTION THAT SO IT MUST BE +5442-41169-0008-1421: WHY WHAT IS (THERE->THAT) TO UNDERSTAND +5442-41169-0009-1422: (THERE'S->THERE IS) NO MEANING IN IT AT ALL +5442-41169-0010-1423: THEN (TOO->DO) ONE MUST KEEP UP CONNECTIONS +5442-41169-0011-1424: IT'S A MORAL OBLIGATION OF A SORT +5442-41169-0012-1425: AND THEN TO TELL THE TRUTH THERE'S ONE'S OWN (INTERESTS->INTEREST) +5442-41169-0013-1426: (THEY'RE->THEIR) PROPRIETORS OF A SORT BUT (WE'RE->WE ARE) THE LANDOWNERS +5442-41169-0014-1427: THAT IT MAY BE BUT STILL IT OUGHT TO BE TREATED A LITTLE MORE RESPECTFULLY +5442-41169-0015-1428: IF (WE'RE->WE ARE) LAYING OUT A GARDEN (PLANNING->CLIMBING) ONE BEFORE THE HOUSE YOU KNOW AND THERE (YOU'VE->YOU HAVE) A TREE (THAT'S->THAT) STOOD (FOR->IN) CENTURIES IN THE VERY SPOT OLD AND GNARLED IT MAY BE AND YET YOU DON'T CUT DOWN THE OLD FELLOW TO MAKE ROOM FOR THE (FLOWERBEDS->FLOWER BEDS) BUT LAY OUT YOUR BEDS SO AS TO TAKE ADVANTAGE OF THE TREE +5442-41169-0016-1429: WELL AND HOW IS YOUR LAND DOING +5442-41169-0017-1430: BUT ONE'S WORK IS THROWN IN FOR NOTHING +5442-41169-0018-1431: OH WELL ONE DOES IT WHAT WOULD YOU HAVE +5442-41169-0019-1432: AND (WHAT'S->ONCE) MORE THE LANDOWNER WENT ON LEANING HIS ELBOWS ON THE WINDOW AND CHATTING ON MY SON I MUST TELL YOU HAS NO TASTE FOR IT +5442-41169-0020-1433: SO THERE'LL BE NO ONE TO KEEP IT UP AND YET ONE DOES IT +5442-41169-0021-1434: WE WALKED ABOUT THE FIELDS AND THE GARDEN NO SAID HE STEPAN (VASSILIEVITCH->WISLOVITCH) EVERYTHING'S WELL LOOKED AFTER BUT YOUR (GARDEN'S->GARDENS) NEGLECTED +5442-41169-0022-1435: TO MY THINKING I'D (CUT->GOT) DOWN (THAT LIME->THE LINE) TREE +5442-41169-0023-1436: HERE (YOU'VE->YOU) THOUSANDS OF LIMES AND EACH WOULD MAKE TWO GOOD BUNDLES OF BARK +5442-41169-0024-1437: YOU'RE MARRIED (I'VE->I) HEARD SAID THE LANDOWNER +5442-41169-0025-1438: YES (IT'S RATHER->AND JOHN IS) STRANGE HE WENT ON +5442-41169-0026-1439: THE LANDOWNER CHUCKLED UNDER HIS WHITE (MUSTACHES->MOUSTACHES) +5442-41169-0027-1440: WHY DON'T WE (CUT->GO) DOWN OUR (PARKS->BOX) FOR TIMBER +5442-41169-0028-1441: SAID LEVIN RETURNING TO A THOUGHT THAT HAD STRUCK HIM +5442-41169-0029-1442: THERE'S (A->THE) CLASS INSTINCT TOO OF WHAT ONE OUGHT AND (OUGHTN'T->OUGHT NOT KNOWN) TO DO +5442-41169-0030-1443: THERE'S THE PEASANTS TOO I WONDER AT THEM SOMETIMES ANY GOOD PEASANT TRIES TO TAKE ALL THE LAND HE CAN +5442-41169-0031-1444: WITHOUT A RETURN TOO (AT->ADD) A SIMPLE (LOSS->LAWS) +5484-24317-0000-571: WHEN HE CAME FROM THE BATH (PROCLUS->PROCLASS) VISITED HIM AGAIN +5484-24317-0001-572: BUT (HERMON->HARMON) WAS NOT IN THE MOOD TO SHARE A JOYOUS REVEL AND HE FRANKLY SAID SO ALTHOUGH IMMEDIATELY AFTER HIS RETURN HE HAD ACCEPTED THE INVITATION TO THE FESTIVAL WHICH THE WHOLE FELLOWSHIP OF ARTISTS WOULD GIVE THE FOLLOWING DAY (IN HONOUR->AN HONOR) OF THE (SEVENTIETH->SEVENTEENTH) BIRTHDAY OF THE OLD SCULPTOR (EUPHRANOR->EUPHRANER) +5484-24317-0002-573: SHE WOULD APPEAR HERSELF AT DESSERT AND THE BANQUET MUST THEREFORE BEGIN AT AN UNUSUALLY EARLY HOUR +5484-24317-0003-574: SO THE ARTIST FOUND HIMSELF OBLIGED TO RELINQUISH HIS OPPOSITION +5484-24317-0004-575: THE BANQUET WAS TO BEGIN IN A FEW HOURS YET HE COULD NOT LET THE DAY PASS WITHOUT SEEING DAPHNE AND TELLING HER THE WORDS OF THE ORACLE +5484-24317-0005-576: HE LONGED WITH ARDENT YEARNING FOR THE SOUND OF HER VOICE AND STILL MORE TO UNBURDEN HIS SORELY TROUBLED SOUL TO HER +5484-24317-0006-577: SINCE HIS RETURN FROM THE ORACLE THE FEAR THAT THE (RESCUED->RESCUE) DEMETER MIGHT YET BE THE WORK OF (MYRTILUS->MERTOLUS) HAD AGAIN MASTERED HIM +5484-24317-0007-578: THE APPROVAL AS WELL AS THE DOUBTS WHICH IT (AROUSED->ARISED) IN OTHERS STRENGTHENED HIS OPINION ALTHOUGH EVEN NOW HE COULD NOT SUCCEED IN BRINGING IT INTO HARMONY WITH THE FACTS +5484-24317-0008-579: THEN HE WENT DIRECTLY TO THE (NEIGHBOURING->NEIGHBORING) PALACE THE QUEEN MIGHT HAVE APPEARED ALREADY AND IT WOULD NOT DO TO KEEP HER WAITING +5484-24317-0009-580: HITHERTO THE MERCHANT HAD BEEN INDUCED IT IS TRUE TO ADVANCE LARGE SUMS OF MONEY TO THE QUEEN BUT THE LOYAL DEVOTION WHICH HE SHOWED TO HER ROYAL HUSBAND HAD RENDERED IT IMPOSSIBLE TO GIVE HIM EVEN A HINT OF THE CONSPIRACY +5484-24317-0010-581: WHEN (HERMON ENTERED->HERMAN ANSWERED) THE RESIDENCE OF THE (GRAMMATEUS->GRAMMATIUS) IN THE PALACE THE GUESTS HAD ALREADY ASSEMBLED +5484-24317-0011-582: (THE PLACE->THEY PLACED) BY (HERMON'S->HERMANN'S) SIDE WHICH (ALTHEA->ALTHIE) HAD CHOSEN FOR HERSELF WOULD THEN BE GIVEN UP TO (ARSINOE->ARSENO) +5484-24317-0012-583: TRUE AN INTERESTING CONVERSATION STILL HAD POWER TO CHARM HIM BUT OFTEN DURING ITS CONTINUANCE THE FULL CONSCIOUSNESS OF HIS MISFORTUNE FORCED ITSELF UPON HIS MIND FOR THE MAJORITY OF THE SUBJECTS DISCUSSED BY THE ARTISTS CAME TO THEM THROUGH THE MEDIUM OF SIGHT AND REFERRED TO NEW CREATIONS OF ARCHITECTURE SCULPTURE AND PAINTING FROM WHOSE ENJOYMENT (HIS->IS) BLINDNESS (DEBARRED->DEBARED) HIM +5484-24317-0013-584: A STRANGER OUT OF HIS OWN SPHERE HE (FELT->FELL) CHILLED AMONG THESE CLOSELY UNITED MEN AND WOMEN TO WHOM NO TIE BOUND HIM SAVE THE PRESENCE OF THE SAME HOST +5484-24317-0014-585: (CRATES->CREEDS) HAD REALLY BEEN INVITED IN ORDER TO WIN HIM OVER TO THE QUEEN'S CAUSE BUT CHARMING FAIR HAIRED (NICO->NIGO) HAD BEEN COMMISSIONED BY THE CONSPIRATORS TO PERSUADE HIM TO SING (ARSINOE'S->ARSENO'S) PRAISES AMONG HIS PROFESSIONAL ASSOCIATES +5484-24317-0015-586: HIS SON HAD BEEN (THIS->THE) ROYAL (DAME'S->JAMES'S) FIRST HUSBAND AND SHE HAD DESERTED HIM TO MARRY (LYSIMACHUS->LYSMACHUS) THE AGED KING OF THRACE +5484-24317-0016-587: THE KING'S SISTER THE OBJECT OF HIS LOVE CRIED (HERMON->HARMON) INCREDULOUSLY +5484-24317-0017-588: WE WOMEN ARE ONLY AS OLD AS WE LOOK AND THE LEECHES (*->ENTIRE) AND (TIRING WOMEN->WOMAN) OF THIS BEAUTY OF FORTY PRACTISE ARTS WHICH GIVE HER THE APPEARANCE OF TWENTY FIVE YET PERHAPS THE KING VALUES HER INTELLECT MORE THAN HER PERSON AND THE WISDOM OF A HUNDRED SERPENTS IS CERTAINLY UNITED IN THIS WOMAN'S HEAD +5484-24317-0018-589: THE THREE MOST TRUSTWORTHY ONES (ARE HERE AMYNTAS->I HEAR I MEANTIS) THE LEECH CHRYSIPPUS (AND->IN) THE ADMIRABLE (PROCLUS->PROCLASS) +5484-24317-0019-590: LET US HOPE THAT YOU WILL MAKE THIS THREE LEAVED CLOVER THE LUCK PROMISING (FOUR LEAVED->FOLIEVED) ONE +5484-24317-0020-591: YOUR UNCLE TOO HAS OFTEN WITH (PRAISEWORTHY->PRAISED WORTHY) GENEROSITY HELPED (ARSINOE->OFTEN KNOW) IN MANY (AN->*) EMBARRASSMENT +5484-24317-0021-592: HOW LONG HE KEPT YOU WAITING (FOR->FROM) THE FIRST WORD CONCERNING A WORK WHICH JUSTLY TRANSPORTED THE WHOLE CITY WITH DELIGHT +5484-24317-0022-593: WHEN HE DID FINALLY SUMMON YOU HE SAID THINGS WHICH MUST HAVE WOUNDED YOU +5484-24317-0023-594: THAT IS GOING TOO FAR REPLIED (HERMON->HARMON) +5484-24317-0024-595: HE WINKED AT HER AND MADE A SIGNIFICANT GESTURE AS HE SPOKE AND THEN INFORMED THE BLIND ARTIST HOW GRACIOUSLY (ARSINOE->ARSENO) HAD REMEMBERED HIM WHEN SHE HEARD OF THE REMEDY BY WHOSE AID MANY A WONDERFUL CURE OF BLIND (EYES->EYE) HAD BEEN MADE IN (RHODES->ROADS) +5484-24317-0025-596: THE ROYAL LADY HAD INQUIRED ABOUT HIM AND HIS SUFFERINGS WITH ALMOST SISTERLY INTEREST AND (ALTHEA->ALTHIA) EAGERLY CONFIRMED THE STATEMENT +5484-24317-0026-597: (HERMON->HERMAN) LISTENED TO THE (PAIR IN->PARENT) SILENCE +5484-24317-0027-598: THE (RHODIAN->RADIAN) WAS JUST BEGINNING TO PRAISE (ARSINOE->ARSENAL) ALSO AS A SPECIAL FRIEND AND CONNOISSEUR OF THE SCULPTOR'S ART WHEN CRATES (HERMON'S->HERMANN'S) FELLOW STUDENT ASKED THE BLIND ARTIST IN BEHALF OF HIS BEAUTIFUL COMPANION WHY HIS DEMETER WAS PLACED UPON A PEDESTAL (WHICH->WITCH) TO OTHERS AS WELL AS HIMSELF SEEMED TOO HIGH FOR THE SIZE OF THE STATUE +5484-24317-0028-599: YET WHAT MATTERED IT EVEN IF THESE MISERABLE PEOPLE CONSIDERED THEMSELVES DECEIVED AND POINTED THE FINGER OF SCORN AT HIM +5484-24317-0029-600: A WOMAN WHO YEARNS FOR THE REGARD OF ALL MEN AND MAKES LOVE A TOY EASILY LESSENS THE DEMANDS SHE IMPOSES UPON INDIVIDUALS +5484-24317-0030-601: ONLY EVEN THOUGH LOVE HAS WHOLLY DISAPPEARED SHE STILL CLAIMS CONSIDERATION AND (ALTHEA->ALTHIA) DID NOT WISH TO LOSE (HERMON'S->HARMON'S) REGARD +5484-24317-0031-602: HOW INDIFFERENT YOU LOOK BUT I TELL YOU HER DEEP BLUE EYES FLASHED AS SHE SPOKE THAT SO LONG AS YOU (WERE->WAS) STILL A GENUINE CREATING ARTIST THE CASE WAS DIFFERENT +5484-24317-0032-603: THOUGH SO LOUD A DENIAL IS WRITTEN ON YOUR FACE I PERSIST IN MY CONVICTION AND THAT NO IDLE DELUSION (ENSNARES->AND SNATHS) ME I CAN PROVE +5484-24317-0033-604: IT WAS NAY IT COULD HAVE BEEN NOTHING ELSE THAT VERY SPIDER +5484-24318-0000-605: NOT A SOUND IF YOU VALUE YOUR LIVES +5484-24318-0001-606: TO OFFER RESISTANCE WOULD HAVE BEEN MADNESS FOR EVEN (HERMON->HERMANN) PERCEIVED BY THE LOUD CLANKING OF WEAPONS AROUND THEM (THE->THEY) GREATLY SUPERIOR POWER OF THE ENEMY AND THEY WERE ACTING BY THE ORDERS OF THE KING TO THE PRISON NEAR THE PLACE OF EXECUTION +5484-24318-0002-607: WAS HE TO BE LED TO THE EXECUTIONER'S BLOCK +5484-24318-0003-608: WHAT PLEASURE HAD LIFE TO OFFER HIM THE BLIND MAN WHO WAS ALREADY DEAD TO HIS ART +5484-24318-0004-609: OUGHT HE NOT TO GREET (THIS->HIS) SUDDEN END AS A (BOON->BOOM) FROM THE IMMORTALS +5484-24318-0005-610: DID IT NOT SPARE HIM A HUMILIATION AS GREAT AND PAINFUL AS COULD BE IMAGINED +5484-24318-0006-611: WHATEVER MIGHT AWAIT HIM HE DESIRED NO BETTER FATE +5484-24318-0007-612: IF HE HAD PASSED INTO ANNIHILATION HE (HERMON->HERMAN) WISHED TO FOLLOW HIM THITHER AND ANNIHILATION CERTAINLY MEANT REDEMPTION FROM PAIN AND MISERY +5484-24318-0008-613: BUT IF HE WERE DESTINED TO MEET HIS (MYRTILUS->BURTLES) AND HIS MOTHER IN THE WORLD BEYOND THE GRAVE WHAT HAD HE NOT TO TELL THEM HOW SURE HE WAS (OF->A) FINDING A JOYFUL RECEPTION THERE FROM BOTH +5484-24318-0009-614: THE POWER WHICH DELIVERED HIM OVER TO DEATH JUST AT THAT MOMENT WAS NOT NEMESIS NO IT WAS A KINDLY DEITY +5484-24318-0010-615: YET IT WAS NO ILLUSION THAT DECEIVED HIM +5484-24318-0011-616: AGAIN HE HEARD THE BELOVED VOICE AND THIS TIME IT ADDRESSED NOT ONLY HIM BUT WITH THE UTMOST HASTE THE COMMANDER OF THE SOLDIERS +5484-24318-0012-617: SOMETIMES WITH TOUCHING ENTREATY SOMETIMES WITH IMPERIOUS COMMAND SHE PROTESTED AFTER GIVING HIM HER NAME THAT THIS MATTER COULD BE NOTHING BUT AN UNFORTUNATE MISTAKE +5484-24318-0013-618: LASTLY WITH EARNEST WARMTH SHE BESOUGHT HIM BEFORE TAKING THE PRISONERS AWAY TO PERMIT HER TO SPEAK TO THE COMMANDING GENERAL PHILIPPUS HER FATHER'S GUEST WHO SHE WAS CERTAIN WAS IN THE PALACE +5484-24318-0014-619: CRIED (HERMON->HERMANN) IN GRATEFUL AGITATION BUT SHE WOULD NOT LISTEN TO HIM AND (FOLLOWED->FOLLOW) THE SOLDIER WHOM THE CAPTAIN DETAILED TO GUIDE HER INTO THE PALACE +5484-24318-0015-620: TO MORROW YOU SHALL CONFESS TO ME WHO TREACHEROUSLY DIRECTED YOU TO THIS DANGEROUS PATH +5484-24318-0016-621: DAPHNE AGAIN PLEADED FOR THE LIBERATION OF THE PRISONERS BUT (PHILIPPUS->PHILIP WAS) SILENCED HER WITH (THE->A) GRAVE EXCLAMATION THE ORDER OF THE KING +5484-24318-0017-622: AS SOON AS THE CAPTIVE ARTIST WAS ALONE WITH (THE->A) WOMAN HE LOVED HE CLASPED HER HAND POURING FORTH INCOHERENT WORDS OF THE MOST ARDENT GRATITUDE AND WHEN HE FELT HER WARMLY (RETURN->RETURNED) THE PRESSURE HE COULD NOT RESTRAIN THE DESIRE TO CLASP HER TO HIS HEART +5484-24318-0018-623: IN SPITE OF HIS DEEP (MENTAL->MANTLE) DISTRESS HE COULD HAVE SHOUTED ALOUD IN HIS DELIGHT AND GRATITUDE +5484-24318-0019-624: HE MIGHT NOW HAVE BEEN PERMITTED TO BIND FOREVER TO HIS LIFE THE WOMAN WHO HAD JUST RESCUED HIM FROM THE GREATEST DANGER BUT THE CONFESSION HE MUST MAKE TO HIS FELLOW ARTISTS IN THE (PALAESTRA->PELLESTRA) THE FOLLOWING MORNING STILL SEALED HIS LIPS YET IN THIS HOUR HE FELT THAT HE WAS UNITED TO HER AND OUGHT NOT TO CONCEAL WHAT AWAITED HIM SO OBEYING A STRONG IMPULSE HE EXCLAIMED YOU KNOW THAT I LOVE YOU +5484-24318-0020-625: I LOVE YOU AND HAVE LOVED YOU ALWAYS +5484-24318-0021-626: (DAPHNE->JAPANE) EXCLAIMED TENDERLY WHAT MORE IS NEEDED +5484-24318-0022-627: BUT (HERMON->HERMAN) WITH DROOPING HEAD MURMURED TO MORROW I SHALL NO LONGER BE WHAT I AM NOW +5484-24318-0023-628: THEN (DAPHNE->JAPANE) RAISED HER FACE TO HIS ASKING SO THE (DEMETER->DEMEANOR) IS THE WORK OF (MYRTILUS->MYRTALIS) +5484-24318-0024-629: WHAT A TERRIBLE ORDEAL AGAIN AWAITS YOU +5484-24318-0025-630: AND I FOOL BLINDED ALSO IN MIND COULD BE VEXED WITH YOU FOR IT +5484-24318-0026-631: BRING THIS BEFORE YOUR MIND AND EVERYTHING ELSE THAT YOU MUST ACCEPT WITH IT IF YOU CONSENT (WHEN->WITH) THE TIME ARRIVES TO BECOME MINE CONCEAL (AND PALLIATE->IMPALION TO) NOTHING +5484-24318-0027-632: (SO ARCHIAS->SARKAIUS) INTENDED TO LEAVE THE CITY ON ONE OF HIS OWN SHIPS THAT VERY DAY +5484-24318-0028-633: (HE->SHE) HIMSELF ON THE WAY TO EXPOSE HIMSELF TO THE MALICE AND MOCKERY OF THE WHOLE CITY +5484-24318-0029-634: HIS HEART CONTRACTED PAINFULLY AND HIS SOLICITUDE ABOUT HIS UNCLE'S FATE INCREASED WHEN (PHILIPPUS->PHILIPUS) INFORMED HIM THAT THE CONSPIRATORS HAD BEEN ARRESTED AT THE BANQUET AND HEADED BY (AMYNTAS->A MEANTESSE) THE (RHODIAN->RODIAN) CHRYSIPPUS AND (PROCLUS->PROCLAUS) HAD PERISHED BY THE EXECUTIONER'S SWORD AT SUNRISE +5484-24318-0030-635: BESIDES HE KNEW THAT THE OBJECT OF HIS LOVE WOULD NOT PART FROM HIM WITHOUT GRANTING HIM ONE LAST WORD +5484-24318-0031-636: ON THE WAY HIS HEART THROBBED ALMOST TO BURSTING +5484-24318-0032-637: EVEN (DAPHNE'S->THESE) IMAGE AND WHAT THREATENED HER FATHER AND HER WITH HIM (RECEDED->WAS SEATED) FAR INTO THE BACKGROUND +5484-24318-0033-638: HE WAS APPEARING BEFORE HIS COMPANIONS ONLY TO GIVE TRUTH ITS JUST DUE +5484-24318-0034-639: THE EGYPTIAN OBEYED AND HIS MASTER CROSSED THE WIDE SPACE STREWN WITH SAND AND APPROACHED THE STAGE WHICH HAD BEEN ERECTED FOR THE (FESTAL->FEAST OF) PERFORMANCES EVEN HAD HIS EYES RETAINED THE POWER OF SIGHT HIS BLOOD WAS (COURSING->COARSING) SO (WILDLY->WIDELY) THROUGH HIS VEINS THAT HE MIGHT PERHAPS HAVE BEEN UNABLE TO DISTINGUISH THE STATUES AROUND HIM AND THE THOUSANDS OF SPECTATORS WHO CROWDED CLOSELY TOGETHER RICHLY GARLANDED THEIR CHEEKS GLOWING WITH ENTHUSIASM SURROUNDED THE ARENA (HERMON->HERMANN) +5484-24318-0035-640: SHOUTED HIS FRIEND (SOTELES IN->SARTUOUS AND) JOYFUL SURPRISE IN THE MIDST OF (THIS->HIS) PAINFUL WALK (HERMON->HERE ON) +5484-24318-0036-641: EVEN WHILE HE BELIEVED HIMSELF TO BE THE CREATOR OF THE DEMETER HE HAD BEEN SERIOUSLY TROUBLED BY THE PRAISE OF SO MANY CRITICS BECAUSE IT HAD EXPOSED HIM TO THE SUSPICION OF HAVING BECOME FAITHLESS TO HIS ART AND HIS NATURE +5484-24318-0037-642: HONOUR TO (MYRTILUS->MARTILLUS) AND HIS ART BUT HE TRUSTED THIS NOBLE (FESTAL->FEAST AN) ASSEMBLAGE WOULD PARDON THE UNINTENTIONAL DECEPTION AND AID HIS PRAYER FOR RECOVERY +5764-299665-0000-405: AFTERWARD IT WAS SUPPOSED THAT HE WAS SATISFIED WITH THE BLOOD OF OXEN (LAMBS->LAMPS) AND DOVES AND THAT IN EXCHANGE FOR OR (ON->IN) ACCOUNT OF THESE SACRIFICES (THIS->THESE) GOD GAVE (RAIN->REIN) SUNSHINE AND HARVEST +5764-299665-0001-406: WHETHER HE WAS THE CREATOR OF YOURSELF AND MYSELF +5764-299665-0002-407: WHETHER ANY PRAYER WAS EVER ANSWERED +5764-299665-0003-408: WHY DID HE CREATE THE (INTELLECTUALLY->INTELLECTUAL) INFERIOR +5764-299665-0004-409: WHY DID HE CREATE THE DEFORMED AND HELPLESS WHY DID HE CREATE THE CRIMINAL THE IDIOTIC THE INSANE +5764-299665-0005-410: ARE THE FAILURES UNDER OBLIGATION TO THEIR CREATOR +5764-299665-0006-411: (IS HE RESPONSIBLE->HIS IRRESPONSIBLE) FOR ALL THE WARS THAT HAVE BEEN (WAGED->RAGED) FOR ALL THE INNOCENT BLOOD THAT HAS BEEN SHED +5764-299665-0007-412: (IS->IF) HE RESPONSIBLE FOR THE CENTURIES OF SLAVERY FOR THE BACKS THAT HAVE BEEN SCARRED WITH (THE->A) LASH FOR THE BABES THAT HAVE BEEN SOLD FROM THE BREASTS OF MOTHERS FOR THE FAMILIES THAT HAVE BEEN SEPARATED AND DESTROYED +5764-299665-0008-413: IS (THIS GOD->THE SCOTT) RESPONSIBLE FOR RELIGIOUS PERSECUTION FOR THE INQUISITION FOR THE (THUMB->TIME) SCREW AND RACK AND FOR ALL THE INSTRUMENTS OF TORTURE +5764-299665-0009-414: DID THIS (GOD ALLOW->GOT THE LOW) THE CRUEL AND VILE TO DESTROY THE BRAVE AND VIRTUOUS +5764-299665-0010-415: DID HE (ALLOW->ALONE) TYRANTS TO SHED (THE->A) BLOOD OF PATRIOTS +5764-299665-0011-416: CAN WE CONCEIVE OF A DEVIL BASE ENOUGH TO PREFER HIS ENEMIES TO HIS FRIENDS +5764-299665-0012-417: HOW CAN WE ACCOUNT FOR THE WILD BEASTS THAT (DEVOUR->THE FOUR) HUMAN BEINGS FOR THE (FANGED->FACT) SERPENTS WHOSE BITE IS DEATH +5764-299665-0013-418: HOW CAN WE ACCOUNT FOR A WORLD (WHERE LIFE FEEDS->WERE LIE FEATS) ON LIFE +5764-299665-0014-419: (DID->THE) INFINITE WISDOM INTENTIONALLY (PRODUCE THE->PRODUCED A) MICROSCOPIC BEASTS THAT (FEED->FEAT) UPON THE OPTIC (NERVE->NERVES) THINK OF BLINDING A MAN TO SATISFY THE APPETITE OF A MICROBE +5764-299665-0015-420: FEAR (BUILDS->BIDS) THE ALTAR AND OFFERS THE (SACRIFICE->SACRIFIC) +5764-299665-0016-421: FEAR ERECTS THE (CATHEDRAL->KITRAL) AND BOWS THE HEAD OF MAN IN WORSHIP +5764-299665-0017-422: LIPS RELIGIOUS AND FEARFUL TREMBLINGLY REPEAT THIS PASSAGE THOUGH HE SLAY ME YET WILL I TRUST HIM +5764-299665-0018-423: CAN WE SAY THAT HE CARED FOR THE CHILDREN OF MEN +5764-299665-0019-424: CAN WE SAY THAT HIS MERCY (ENDURETH->AND DURRED) FOREVER +5764-299665-0020-425: DO WE PROVE HIS GOODNESS BY SHOWING THAT HE HAS OPENED THE EARTH AND SWALLOWED (THOUSANDS->THOUSAND) OF HIS HELPLESS CHILDREN (OR->ALL) THAT WITH THE VOLCANOES HE HAS OVERWHELMED THEM WITH RIVERS OF FIRE +5764-299665-0021-426: WAS THERE GOODNESS WAS (THERE->THEIR) WISDOM IN THIS +5764-299665-0022-427: (OUGHT->ALL) THE SUPERIOR (RACES->RAYS) TO THANK GOD THAT THEY ARE NOT THE INFERIOR +5764-299665-0023-428: MOST PEOPLE (CLING->CLINK) TO THE SUPERNATURAL +5764-299665-0024-429: IF THEY GIVE UP ONE GOD THEY IMAGINE ANOTHER +5764-299665-0025-430: WHAT IS THIS POWER +5764-299665-0026-431: MAN ADVANCES (AND->A) NECESSARILY ADVANCES THROUGH EXPERIENCE +5764-299665-0027-432: A MAN WISHING TO GO TO A CERTAIN PLACE (COMES->COME) TO WHERE THE ROAD DIVIDES +5764-299665-0028-433: (HE HAS->HIS) TRIED THAT ROAD AND KNOWS THAT IT IS THE WRONG ROAD +5764-299665-0029-434: A CHILD (CHARMED->SHOWN) BY THE BEAUTY OF THE FLAME (GRASPS->GRASPED) IT WITH (ITS->HIS) DIMPLED HAND +5764-299665-0030-435: THE POWER THAT (WORKS->WORK) FOR RIGHTEOUSNESS (HAS->HAD) TAUGHT THE CHILD A LESSON +5764-299665-0031-436: IT IS A RESULT +5764-299665-0032-437: IT IS INSISTED BY THESE THEOLOGIANS AND BY MANY OF THE (SO->SOUL) CALLED PHILOSOPHERS THAT THIS MORAL SENSE THIS SENSE OF DUTY OF OBLIGATION WAS IMPORTED AND THAT CONSCIENCE IS AN EXOTIC +5764-299665-0033-438: WE LIVE TOGETHER IN FAMILIES TRIBES AND NATIONS +5764-299665-0034-439: THEY ARE PRAISED ADMIRED AND RESPECTED +5764-299665-0035-440: THEY ARE REGARDED AS GOOD THAT IS TO SAY (AS->S) MORAL +5764-299665-0036-441: THE MEMBERS WHO ADD TO THE MISERY OF THE FAMILY THE TRIBE (OR->OF) THE NATION ARE CONSIDERED BAD MEMBERS +5764-299665-0037-442: THE GREATEST OF HUMAN BEINGS (HAS->HAD) SAID CONSCIENCE IS BORN OF LOVE +5764-299665-0038-443: AS PEOPLE ADVANCE THE REMOTE CONSEQUENCES ARE PERCEIVED +5764-299665-0039-444: THE IMAGINATION IS CULTIVATED +5764-299665-0040-445: A MAN (PUTS->BUT) HIMSELF IN THE PLACE OF ANOTHER +5764-299665-0041-446: THE SENSE OF DUTY BECOMES STRONGER MORE IMPERATIVE +5764-299665-0042-447: MAN JUDGES HIMSELF +5764-299665-0043-448: IN ALL THIS THERE IS NOTHING SUPERNATURAL +5764-299665-0044-449: MAN HAS DECEIVED HIMSELF +5764-299665-0045-450: (HAS CHRISTIANITY DONE GOOD->HESTERITY DONEGOOD) +5764-299665-0046-451: WHEN THE CHURCH HAD (CONTROL WERE->CONTROLLED WHERE) MEN MADE BETTER AND HAPPIER +5764-299665-0047-452: WHAT HAS RELIGION DONE FOR HUNGARY OR AUSTRIA +5764-299665-0048-453: (COULD->GOOD) THESE COUNTRIES HAVE BEEN WORSE WITHOUT RELIGION +5764-299665-0049-454: COULD THEY HAVE BEEN WORSE HAD THEY HAD ANY OTHER RELIGION THAN CHRISTIANITY +5764-299665-0050-455: WHAT DID CHRISTIANITY DO (FOR->FAULT) THEM +5764-299665-0051-456: THEY HATED PLEASURE +5764-299665-0052-457: THEY MUFFLED ALL THE BELLS OF GLADNESS +5764-299665-0053-458: THE RELIGION OF THE PURITAN WAS AN (UNADULTERATED->ANADULTERATED) CURSE +5764-299665-0054-459: THE PURITAN BELIEVED THE BIBLE TO BE THE (WORD->WORLD) OF GOD AND THIS BELIEF HAS ALWAYS MADE THOSE WHO HELD IT CRUEL AND WRETCHED +5764-299665-0055-460: LET ME REFER TO JUST ONE FACT SHOWING THE INFLUENCE OF A BELIEF IN THE BIBLE ON HUMAN BEINGS +5764-299665-0056-461: THE QUEEN RECEIVED THE BIBLE KISSED IT AND PLEDGED HERSELF TO DILIGENTLY READ THEREIN +5764-299665-0057-462: IN OTHER WORDS IT WAS JUST AS FIENDISH JUST AS (INFAMOUS->IN FAMOUS) AS THE (CATHOLIC SPIRIT->CATTLE EXPERIOR) +5764-299665-0058-463: (HAS THE BIBLE->HESDAY BUT) MADE THE PEOPLE OF GEORGIA KIND AND MERCIFUL +5764-299665-0059-464: (*->WHO) RELIGION HAS BEEN TRIED AND IN ALL COUNTRIES IN ALL TIMES HAS FAILED +5764-299665-0060-465: RELIGION (HAS->HATH) ALWAYS BEEN THE ENEMY OF SCIENCE OF INVESTIGATION AND THOUGHT +5764-299665-0061-466: RELIGION (HAS->IS) NEVER MADE (MAN FREE->MEN FRE) +5764-299665-0062-467: (IT HAS->HE JUST) NEVER MADE MAN MORAL TEMPERATE INDUSTRIOUS AND HONEST +5764-299665-0063-468: (ARE CHRISTIANS MORE->AH CHRISTIAN SMALL) TEMPERATE NEARER VIRTUOUS NEARER HONEST THAN SAVAGES +5764-299665-0064-469: CAN WE CURE DISEASE BY SUPPLICATION +5764-299665-0065-470: CAN WE RECEIVE VIRTUE OR (HONOR->HUNGER) AS ALMS +5764-299665-0066-471: RELIGION RESTS ON THE IDEA THAT NATURE HAS A MASTER AND THAT THIS MASTER WILL LISTEN TO PRAYER THAT (THIS->HIS) MASTER PUNISHES AND REWARDS THAT HE LOVES PRAISE AND FLATTERY AND HATES THE BRAVE AND FREE +5764-299665-0067-472: WE MUST HAVE (CORNER->CORN THE) STONES +5764-299665-0068-473: THE STRUCTURE MUST HAVE (A BASEMENT->ABASEMENT) +5764-299665-0069-474: IF WE BUILD WE MUST BEGIN AT THE BOTTOM +5764-299665-0070-475: I HAVE (A->IT) THEORY AND I HAVE FOUR CORNER STONES +5764-299665-0071-476: THE FIRST STONE (IS->EAST) THAT MATTER SUBSTANCE CANNOT BE DESTROYED CANNOT BE ANNIHILATED +5764-299665-0072-477: IF (THESE CORNER->THIS CORN THE) STONES ARE FACTS IT FOLLOWS AS A NECESSITY THAT MATTER AND FORCE ARE FROM (AND->END) TO ETERNITY THAT THEY CAN NEITHER BE INCREASED NOR DIMINISHED +5764-299665-0073-478: IT FOLLOWS THAT NOTHING HAS BEEN OR CAN BE CREATED THAT THERE NEVER HAS BEEN OR CAN BE A CREATOR +5764-299665-0074-479: IT (FOLLOWS->FOLLOWED) THAT THERE COULD NOT HAVE BEEN ANY INTELLIGENCE (ANY DESIGN->AND A DESIGNED) BACK OF MATTER AND FORCE +5764-299665-0075-480: I SAY WHAT I THINK +5764-299665-0076-481: EVERY EVENT HAS PARENTS +5764-299665-0077-482: THAT WHICH (HAS->HATH) NOT HAPPENED COULD NOT +5764-299665-0078-483: IN THE INFINITE (CHAIN THERE IS->CHANGE WREATHS) AND THERE CAN BE NO BROKEN NO MISSING LINK +5764-299665-0079-484: WE NOW KNOW THAT OUR FIRST PARENTS WERE NOT FOREIGNERS +5764-299665-0080-485: WE NOW KNOW IF WE KNOW ANYTHING THAT THE UNIVERSE IS NATURAL AND THAT (MEN->MAN) AND WOMEN HAVE BEEN NATURALLY PRODUCED +5764-299665-0081-486: WE KNOW THE PATHS THAT LIFE HAS (TRAVELED->TRAVELLED) +5764-299665-0082-487: WE KNOW THE FOOTSTEPS OF ADVANCE THEY HAVE BEEN (TRACED->PRAISED) +5764-299665-0083-488: (FOR->FOUR) THOUSANDS OF YEARS MEN AND WOMEN HAVE BEEN (TRYING->CRYING) TO REFORM THE WORLD +5764-299665-0084-489: WHY HAVE THE (REFORMERS FAILED->REFORMED FAITH) +5764-299665-0085-490: THEY DEPEND ON THE (LORD ON LUCK->LOT UNLUCK) AND CHARITY +5764-299665-0086-491: THEY (LIVE->LEAVE) BY FRAUD AND VIOLENCE AND BEQUEATH THEIR VICES TO THEIR CHILDREN +5764-299665-0087-492: FAILURE SEEMS TO BE THE (TRADEMARK->TRADE MARK) OF NATURE WHY +5764-299665-0088-493: NATURE (PRODUCES->PRODUCED) WITHOUT PURPOSE SUSTAINS WITHOUT INTENTION AND DESTROYS WITHOUT THOUGHT +5764-299665-0089-494: (MUST THE WORLD->MISTER BUILD) FOREVER (REMAIN THE->REMAINED A) VICTIM OF IGNORANT PASSION +5764-299665-0090-495: WHY SHOULD MEN AND WOMEN HAVE CHILDREN THAT THEY CANNOT TAKE CARE OF CHILDREN THAT ARE (BURDENS->A BURDEN) AND CURSES WHY +5764-299665-0091-496: PASSION IS AND (ALWAYS->ALL THIS) HAS BEEN DEAF +5764-299665-0092-497: LAW CAN PUNISH (BUT->THAT) IT CAN NEITHER REFORM CRIMINALS NOR PREVENT CRIME +5764-299665-0093-498: (THIS->THESE) CANNOT BE DONE BY TALK OR EXAMPLE +5764-299665-0094-499: THIS IS THE SOLUTION OF THE WHOLE QUESTION +5764-299665-0095-500: THIS (FREES WOMAN->FREEZE WOMEN) +5764-299665-0096-501: POVERTY AND CRIME WILL BE CHILDLESS +5764-299665-0097-502: IT IS FAR BETTER TO BE FREE TO LEAVE THE (FORTS->FAULTS) AND BARRICADES OF FEAR TO STAND ERECT AND (FACE->FAITH) THE FUTURE WITH (A SMILE->US MIND) +6070-63485-0000-2599: (THEY'RE->THERE) DONE (FOR->FAR) SAID THE SCHOOLMASTER IN A (LOW KEY->LOKI) TO THE (CHOUETTE->SWEAT) OUT WITH YOUR VITRIOL AND MIND YOUR EYE +6070-63485-0001-2600: THE TWO MONSTERS TOOK OFF THEIR SHOES AND MOVED STEALTHILY ALONG KEEPING IN THE SHADOWS OF THE HOUSES +6070-63485-0002-2601: BY MEANS OF THIS STRATAGEM THEY FOLLOWED SO CLOSELY THAT ALTHOUGH WITHIN A FEW STEPS OF (SARAH AND->SEREN) TOM THEY DID NOT HEAR THEM +6070-63485-0003-2602: SARAH AND HER BROTHER HAVING AGAIN PASSED BY THE (TAPIS FRANC->TAPPY FRANK) ARRIVED CLOSE TO THE DILAPIDATED HOUSE WHICH WAS PARTLY IN RUINS AND ITS (OPENED->OPEN) CELLARS FORMED A KIND OF GULF ALONG WHICH THE STREET RAN IN THAT DIRECTION +6070-63485-0004-2603: IN AN INSTANT THE SCHOOLMASTER WITH A LEAP RESEMBLING IN STRENGTH AND AGILITY THE SPRING OF A TIGER SEIZED SEYTON WITH ONE HAND BY THE THROAT AND EXCLAIMED YOUR MONEY OR I WILL FLING YOU INTO THIS HOLE +6070-63485-0005-2604: NO SAID THE OLD BRUTE (GRUMBLINGLY->TREMBLINGLY) NO NOT ONE RING WHAT A SHAME +6070-63485-0006-2605: TOM SEYTON DID NOT LOSE HIS PRESENCE OF MIND DURING THIS SCENE RAPIDLY AND UNEXPECTEDLY AS IT HAD OCCURRED +6070-63485-0007-2606: (OH AH->UH) TO LAY A TRAP TO CATCH US REPLIED THE THIEF +6070-63485-0008-2607: THEN ADDRESSING THOMAS (SEYTON->SETTON) YOU KNOW THE (PLAIN->PLANE) OF SAINT DENIS +6070-63485-0009-2608: DID YOU SEE IN THE CABARET WE (HAVE->HAD) JUST LEFT FOR I KNOW YOU AGAIN THE MAN WHOM THE CHARCOAL MAN CAME TO SEEK +6070-63485-0010-2609: CRIED THE SCHOOLMASTER A THOUSAND FRANCS AND I'LL KILL HIM +6070-63485-0011-2610: WRETCH I DO NOT (SEEK->SEE) HIS LIFE REPLIED SARAH TO THE SCHOOLMASTER +6070-63485-0012-2611: LET'S GO AND MEET HIM +6070-63485-0013-2612: OLD (BOY->BY) IT WILL PAY FOR LOOKING AFTER +6070-63485-0014-2613: WELL MY WIFE SHALL BE THERE SAID THE SCHOOLMASTER YOU WILL TELL HER WHAT YOU WANT AND I SHALL SEE +6070-63485-0015-2614: IN THE PLAIN OF SAINT DENIS +6070-63485-0016-2615: BETWEEN SAINT (OUEN->WAT) AND THE ROAD OF LA (REVOLTE->REVOLT) AT THE END OF THE ROAD AGREED +6070-63485-0017-2616: HE HAD FORGOTTEN THE ADDRESS OF THE SELF STYLED (FAN->PAN) PAINTER +6070-63485-0018-2617: THE (FIACRE->FIACCHUS) STARTED +6070-86744-0000-2569: (FRANZ->FRANCE) WHO SEEMED ATTRACTED BY SOME INVISIBLE INFLUENCE TOWARDS THE COUNT IN WHICH TERROR WAS STRANGELY MINGLED FELT AN EXTREME RELUCTANCE TO PERMIT HIS FRIEND TO BE EXPOSED ALONE TO THE SINGULAR FASCINATION THAT THIS MYSTERIOUS PERSONAGE SEEMED TO EXERCISE OVER HIM AND THEREFORE MADE NO OBJECTION TO ALBERT'S REQUEST BUT AT ONCE ACCOMPANIED HIM TO THE DESIRED SPOT AND AFTER A SHORT DELAY THE COUNT JOINED THEM IN THE SALON +6070-86744-0001-2570: MY VERY GOOD FRIEND (AND->AN) EXCELLENT (NEIGHBOR->NEIGHBOUR) REPLIED THE COUNT WITH A SMILE YOU REALLY EXAGGERATE MY TRIFLING EXERTIONS +6070-86744-0002-2571: MY FATHER THE COMTE DE MORCERF ALTHOUGH (OF->A) SPANISH ORIGIN POSSESSES CONSIDERABLE INFLUENCE BOTH AT THE COURT OF FRANCE AND MADRID AND I (UNHESITATINGLY->AM HESITATINGLY) PLACE THE BEST SERVICES OF MYSELF AND ALL TO WHOM MY LIFE IS DEAR AT YOUR DISPOSAL +6070-86744-0003-2572: I CAN SCARCELY CREDIT IT +6070-86744-0004-2573: THEN IT IS SETTLED SAID THE COUNT AND I GIVE YOU MY SOLEMN ASSURANCE THAT I ONLY WAITED (*->IN) AN OPPORTUNITY LIKE THE PRESENT TO REALIZE PLANS THAT I HAVE LONG MEDITATED +6070-86744-0005-2574: (SHALL->SHOW) WE MAKE A POSITIVE APPOINTMENT FOR A PARTICULAR DAY AND HOUR INQUIRED THE COUNT ONLY LET ME WARN YOU THAT I AM PROVERBIAL FOR MY PUNCTILIOUS EXACTITUDE IN KEEPING MY ENGAGEMENTS DAY FOR DAY HOUR FOR HOUR SAID ALBERT THAT WILL SUIT ME TO A DOT +6070-86744-0006-2575: SO BE IT THEN REPLIED THE COUNT AND EXTENDING HIS HAND TOWARDS (A->THE) CALENDAR SUSPENDED NEAR THE CHIMNEY PIECE HE SAID TO DAY IS THE TWENTY FIRST OF FEBRUARY AND DRAWING OUT HIS WATCH (ADDED->I DID) IT IS EXACTLY HALF PAST TEN O'CLOCK NOW PROMISE ME TO REMEMBER THIS AND EXPECT ME (THE->THAT) TWENTY FIRST OF MAY AT THE SAME HOUR IN THE FORENOON +6070-86744-0007-2576: I RESIDE IN MY FATHER'S HOUSE BUT OCCUPY A PAVILION AT THE FARTHER SIDE OF THE (COURT YARD->COURTYARD) ENTIRELY SEPARATED FROM THE MAIN BUILDING +6070-86744-0008-2577: NOW THEN SAID THE COUNT RETURNING HIS TABLETS TO HIS POCKET MAKE YOURSELF PERFECTLY EASY THE HAND OF YOUR TIME PIECE WILL NOT BE MORE ACCURATE IN MARKING THE TIME THAN MYSELF +6070-86744-0009-2578: THAT DEPENDS WHEN DO YOU LEAVE +6070-86744-0010-2579: FOR FRANCE NO FOR VENICE I SHALL REMAIN IN ITALY FOR ANOTHER YEAR OR TWO +6070-86744-0011-2580: THEN WE SHALL NOT MEET IN PARIS +6070-86744-0012-2581: I FEAR I SHALL NOT HAVE THAT (HONOR->HONOUR) +6070-86744-0013-2582: WELL SINCE WE MUST PART SAID THE COUNT HOLDING OUT A HAND TO EACH OF THE YOUNG MEN ALLOW ME TO WISH YOU BOTH A SAFE AND PLEASANT JOURNEY +6070-86744-0014-2583: WHAT IS THE MATTER ASKED ALBERT OF FRANZ WHEN THEY HAD RETURNED TO THEIR OWN APARTMENTS YOU (SEEM->SEE) MORE THAN COMMONLY THOUGHTFUL +6070-86744-0015-2584: I WILL CONFESS TO YOU ALBERT REPLIED FRANZ THE COUNT IS A VERY SINGULAR PERSON AND THE APPOINTMENT YOU HAVE MADE TO MEET HIM IN PARIS FILLS ME WITH A THOUSAND APPREHENSIONS +6070-86744-0016-2585: DID YOU EVER MEET HIM PREVIOUSLY TO COMING HITHER +6070-86744-0017-2586: UPON MY (HONOR->HONOUR) THEN LISTEN TO ME +6070-86744-0018-2587: HE DWELT WITH CONSIDERABLE FORCE AND ENERGY ON THE ALMOST MAGICAL HOSPITALITY HE HAD RECEIVED FROM THE COUNT AND THE MAGNIFICENCE OF HIS ENTERTAINMENT IN THE GROTTO OF THE THOUSAND AND ONE NIGHTS HE RECOUNTED WITH CIRCUMSTANTIAL EXACTITUDE ALL THE PARTICULARS OF THE SUPPER THE HASHISH THE STATUES THE DREAM AND HOW AT HIS AWAKENING THERE REMAINED NO PROOF OR TRACE OF ALL THESE EVENTS SAVE THE SMALL YACHT SEEN IN THE DISTANT HORIZON DRIVING UNDER FULL SAIL TOWARD (PORTO->PORT OR) VECCHIO +6070-86744-0019-2588: THEN HE DETAILED THE CONVERSATION OVERHEARD BY HIM AT THE (COLOSSEUM->COLISEUM) BETWEEN THE COUNT AND VAMPA IN WHICH THE COUNT HAD PROMISED TO OBTAIN THE RELEASE OF THE BANDIT PEPPINO AN ENGAGEMENT WHICH AS OUR READERS ARE AWARE HE MOST FAITHFULLY FULFILLED +6070-86744-0020-2589: BUT SAID FRANZ THE CORSICAN BANDITS THAT WERE AMONG THE CREW OF HIS VESSEL +6070-86744-0021-2590: WHY REALLY THE THING SEEMS TO ME SIMPLE ENOUGH +6070-86744-0022-2591: TALKING OF COUNTRIES REPLIED FRANZ OF WHAT (COUNTRY IS->COUNTRIES) THE COUNT WHAT IS HIS NATIVE (TONGUE->TONG) WHENCE DOES HE DERIVE HIS IMMENSE FORTUNE AND WHAT WERE THOSE EVENTS OF HIS EARLY LIFE A LIFE AS MARVELLOUS AS UNKNOWN THAT (HAVE->HATH) TINCTURED HIS SUCCEEDING YEARS WITH SO DARK AND GLOOMY A MISANTHROPY +6070-86744-0023-2592: CERTAINLY THESE ARE QUESTIONS THAT IN YOUR PLACE I SHOULD LIKE TO HAVE ANSWERED +6070-86744-0024-2593: MY DEAR (FRANZ->FRIENDS) REPLIED ALBERT WHEN UPON RECEIPT OF MY LETTER YOU FOUND THE NECESSITY OF ASKING THE COUNT'S ASSISTANCE YOU PROMPTLY WENT TO HIM SAYING MY FRIEND ALBERT DE MORCERF IS IN DANGER HELP ME TO DELIVER HIM +6070-86744-0025-2594: WHAT ARE HIS MEANS OF EXISTENCE WHAT IS HIS BIRTHPLACE OF WHAT (COUNTRY IS->COUNTRIES) HE A NATIVE +6070-86744-0026-2595: I CONFESS HE ASKED ME NONE NO HE MERELY CAME AND FREED ME FROM THE HANDS OF (SIGNOR->SENOR) VAMPA WHERE I CAN ASSURE YOU IN SPITE OF ALL MY OUTWARD APPEARANCE OF EASE AND UNCONCERN I DID NOT VERY PARTICULARLY CARE TO REMAIN +6070-86744-0027-2596: AND THIS TIME IT MUST BE CONFESSED THAT CONTRARY TO THE USUAL STATE OF AFFAIRS IN DISCUSSIONS BETWEEN THE YOUNG MEN THE EFFECTIVE ARGUMENTS WERE ALL ON ALBERT'S SIDE +6070-86744-0028-2597: WELL SAID FRANZ WITH A SIGH DO AS YOU PLEASE MY DEAR VISCOUNT FOR YOUR ARGUMENTS ARE BEYOND MY POWERS OF REFUTATION +6070-86744-0029-2598: AND NOW MY DEAR FRANZ LET US TALK OF SOMETHING ELSE +6070-86745-0000-2549: THEN SHOULD ANYTHING APPEAR TO (MERIT->MARRIT) A MORE MINUTE EXAMINATION ALBERT DE MORCERF COULD FOLLOW UP HIS RESEARCHES BY MEANS OF A SMALL GATE SIMILAR TO THAT CLOSE TO THE CONCIERGE'S DOOR AND WHICH MERITS A PARTICULAR DESCRIPTION +6070-86745-0001-2550: SHRUBS AND CREEPING PLANTS COVERED THE WINDOWS AND HID FROM THE GARDEN AND COURT THESE TWO APARTMENTS THE ONLY ROOMS INTO WHICH AS THEY WERE ON THE GROUND FLOOR THE PRYING EYES OF THE CURIOUS COULD PENETRATE +6070-86745-0002-2551: AT A QUARTER TO TEN A VALET ENTERED HE COMPOSED WITH A LITTLE GROOM NAMED JOHN AND WHO ONLY SPOKE ENGLISH ALL ALBERT'S ESTABLISHMENT ALTHOUGH THE COOK OF THE HOTEL WAS ALWAYS AT HIS SERVICE AND ON GREAT OCCASIONS THE COUNT'S CHASSEUR ALSO +6070-86745-0003-2552: WAIT THEN DURING THE DAY TELL ROSA THAT WHEN I LEAVE THE OPERA I WILL SUP WITH HER AS SHE WISHES +6070-86745-0004-2553: VERY WELL AT HALF PAST TEN +6070-86745-0005-2554: IS THE COUNTESS UP YET +6070-86745-0006-2555: THE VALET LEFT THE ROOM +6070-86745-0007-2556: GOOD MORNING LUCIEN GOOD MORNING SAID ALBERT YOUR PUNCTUALITY REALLY ALARMS ME +6070-86745-0008-2557: YOU WHOM I EXPECTED LAST YOU ARRIVE AT FIVE MINUTES TO TEN WHEN THE TIME FIXED WAS HALF PAST +6070-86745-0009-2558: NO NO MY DEAR FELLOW DO NOT CONFOUND OUR PLANS +6070-86745-0010-2559: YES HE HAS NOT MUCH TO COMPLAIN OF (BOURGES->BOURGE) IS THE CAPITAL OF CHARLES (SEVEN->THE SEVENTH) +6070-86745-0011-2560: IT IS FOR THAT REASON YOU SEE ME SO EARLY +6070-86745-0012-2561: I RETURNED HOME AT DAYBREAK AND STROVE TO SLEEP BUT MY HEAD ACHED AND I GOT UP TO HAVE A RIDE FOR AN HOUR +6070-86745-0013-2562: (PESTE->PESTS) I WILL DO NOTHING OF THE KIND THE MOMENT THEY COME FROM GOVERNMENT YOU WOULD FIND THEM EXECRABLE +6070-86745-0014-2563: BESIDES THAT DOES NOT CONCERN THE HOME BUT THE FINANCIAL DEPARTMENT +6070-86745-0015-2564: ABOUT WHAT ABOUT THE PAPERS +6070-86745-0016-2565: IN THE ENTIRE POLITICAL WORLD OF WHICH YOU ARE ONE OF THE LEADERS +6070-86745-0017-2566: THEY SAY THAT IT IS QUITE FAIR AND THAT SOWING SO MUCH RED YOU OUGHT TO (REAP->READ) A LITTLE BLUE +6070-86745-0018-2567: COME COME THAT IS NOT BAD SAID (LUCIEN->LUCIAN) +6070-86745-0019-2568: WITH (YOUR TALENTS YOU->THE OR TALONS HE) WOULD MAKE YOUR FORTUNE IN THREE OR FOUR YEARS +6128-63240-0000-503: THE GENTLEMAN HAD NOT EVEN NEEDED TO SIT DOWN TO BECOME INTERESTED APPARENTLY HE HAD TAKEN UP THE VOLUME FROM A TABLE AS SOON AS HE CAME IN AND STANDING THERE AFTER A SINGLE GLANCE ROUND THE APARTMENT HAD LOST HIMSELF IN ITS PAGES +6128-63240-0001-504: THAT HAS AN UNFLATTERING SOUND FOR ME SAID THE YOUNG MAN +6128-63240-0002-505: SHE IS WILLING TO RISK THAT +6128-63240-0003-506: JUST AS I AM THE VISITOR INQUIRED PRESENTING HIMSELF WITH (RATHER->MOTHER) A (WORK A DAY->WORKADAY) ASPECT +6128-63240-0004-507: HE WAS TALL AND LEAN AND DRESSED THROUGHOUT IN BLACK HIS SHIRT COLLAR WAS LOW AND WIDE AND THE TRIANGLE OF LINEN A LITTLE (CRUMPLED->CRAMPLED) EXHIBITED BY THE OPENING OF HIS WAISTCOAT WAS ADORNED BY A PIN CONTAINING A SMALL RED STONE +6128-63240-0005-508: IN SPITE OF THIS DECORATION THE YOUNG MAN LOOKED POOR AS (POOR->FAR) AS A YOUNG MAN COULD (LOOK->LIVE) WHO HAD SUCH A FINE HEAD AND SUCH MAGNIFICENT EYES +6128-63240-0006-509: THOSE OF (BASIL->BAZA) RANSOM (WERE->WENT) DARK DEEP AND GLOWING HIS HEAD HAD A CHARACTER OF ELEVATION WHICH FAIRLY ADDED TO HIS (STATURE->STATUE) IT WAS A HEAD TO BE SEEN ABOVE THE LEVEL OF A CROWD ON SOME JUDICIAL BENCH OR POLITICAL PLATFORM OR EVEN ON A BRONZE MEDAL +6128-63240-0007-510: THESE THINGS THE EYES ESPECIALLY WITH THEIR SMOULDERING FIRE MIGHT HAVE INDICATED THAT HE WAS TO BE A GREAT AMERICAN STATESMAN OR ON THE OTHER HAND THEY MIGHT SIMPLY HAVE PROVED THAT HE CAME FROM CAROLINA OR ALABAMA +6128-63240-0008-511: AND YET THE READER WHO LIKES A COMPLETE IMAGE WHO DESIRES TO READ WITH THE SENSES AS WELL AS WITH THE REASON IS ENTREATED NOT TO FORGET THAT HE PROLONGED HIS (CONSONANTS->COUNTENANCE) AND SWALLOWED HIS (VOWELS->VOWALS) THAT HE WAS GUILTY OF (ELISIONS->ELYGIANS) AND INTERPOLATIONS WHICH WERE EQUALLY UNEXPECTED AND THAT HIS DISCOURSE WAS PERVADED BY SOMETHING SULTRY AND VAST SOMETHING ALMOST AFRICAN IN ITS RICH BASKING TONE SOMETHING THAT SUGGESTED THE TEEMING (EXPANSE->EXPOUNDS) OF THE COTTON FIELD +6128-63240-0009-512: AND HE TOOK UP HIS HAT VAGUELY A SOFT BLACK HAT WITH A LOW CROWN AND AN IMMENSE STRAIGHT BRIM +6128-63240-0010-513: WELL SO IT IS THEY ARE ALL WITCHES AND WIZARDS MEDIUMS AND SPIRIT (RAPPERS->WRAPPERS) AND (ROARING->ROWING) RADICALS +6128-63240-0011-514: IF YOU ARE GOING TO DINE WITH HER YOU HAD BETTER KNOW IT OH MURDER +6128-63240-0012-515: HE (LOOKED AT->LIFTED) MISSUS LUNA WITH INTELLIGENT INCREDULITY +6128-63240-0013-516: SHE WAS ATTRACTIVE AND IMPERTINENT ESPECIALLY THE LATTER +6128-63240-0014-517: HAVE YOU BEEN IN EUROPE +6128-63240-0015-518: NO I HAVEN'T BEEN ANYWHERE +6128-63240-0016-519: SHE HATES IT SHE WOULD LIKE TO ABOLISH IT +6128-63240-0017-520: THIS LAST REMARK HE MADE (AT->*) A VENTURE FOR HE HAD NATURALLY NOT DEVOTED ANY SUPPOSITION WHATEVER TO MISSUS (LUNA->LENA) +6128-63240-0018-521: ARE YOU VERY AMBITIOUS YOU LOOK AS IF YOU WERE +6128-63240-0019-522: AND MISSUS (LUNA->LENA) ADDED THAT NOW SHE WAS BACK SHE DIDN'T KNOW WHAT SHE SHOULD DO +6128-63240-0020-523: ONE DIDN'T EVEN KNOW WHAT ONE HAD COME BACK FOR +6128-63240-0021-524: BESIDES OLIVE DIDN'T WANT HER IN (BOSTON->BUSTON) AND DIDN'T GO THROUGH THE FORM OF SAYING SO +6128-63240-0022-525: THAT WAS ONE COMFORT WITH OLIVE SHE NEVER (WENT->WON) THROUGH ANY FORMS +6128-63240-0023-526: SHE STOOD THERE LOOKING CONSCIOUSLY AND RATHER SERIOUSLY (AT->AND) MISTER RANSOM A SMILE OF EXCEEDING FAINTNESS PLAYED ABOUT HER LIPS IT WAS JUST PERCEPTIBLE ENOUGH TO LIGHT UP THE NATIVE GRAVITY OF HER FACE +6128-63240-0024-527: HER VOICE WAS LOW AND AGREEABLE A CULTIVATED VOICE AND SHE EXTENDED A SLENDER WHITE HAND TO HER VISITOR (WHO->HER) REMARKED WITH SOME SOLEMNITY HE FELT A CERTAIN GUILT OF PARTICIPATION IN MISSUS (LUNA'S->LUNAR'S) INDISCRETION THAT HE WAS INTENSELY HAPPY TO MAKE HER ACQUAINTANCE +6128-63240-0025-528: HE OBSERVED THAT MISS CHANCELLOR'S HAND WAS AT ONCE (COLD->CALLED) AND LIMP SHE MERELY PLACED IT IN HIS WITHOUT EXERTING THE SMALLEST PRESSURE +6128-63240-0026-529: I SHALL BE BACK VERY LATE (WE ARE GOING TO A THEATRE->WILL DON'T YOU THEATER) PARTY THAT'S WHY WE DINE SO EARLY +6128-63240-0027-530: MISSUS (LUNA'S->LUNDY'S) FAMILIARITY EXTENDED EVEN TO HER SISTER SHE REMARKED TO MISS CHANCELLOR THAT SHE LOOKED AS IF SHE WERE GOT UP FOR A SEA (VOYAGE->VOY EACH) +6128-63241-0000-557: POOR RANSOM ANNOUNCED THIS FACT TO HIMSELF AS IF HE HAD MADE A GREAT DISCOVERY BUT IN REALITY HE HAD NEVER BEEN SO (BOEOTIAN->BE OTIAN) AS AT THAT MOMENT +6128-63241-0001-558: THE WOMEN HE HAD HITHERTO KNOWN HAD BEEN MAINLY OF HIS OWN SOFT (CLIME->CLIMB) AND IT WAS NOT OFTEN THEY EXHIBITED THE TENDENCY HE DETECTED AND (CURSORILY->CURSORY) DEPLORED IN MISSUS LUNA'S SISTER +6128-63241-0002-559: RANSOM WAS PLEASED WITH THE VISION OF THAT REMEDY IT MUST BE REPEATED THAT HE WAS VERY PROVINCIAL +6128-63241-0003-560: HE WAS SORRY FOR HER BUT HE SAW IN A FLASH THAT NO ONE COULD HELP HER THAT WAS WHAT MADE HER TRAGIC +6128-63241-0004-561: SHE COULD NOT DEFEND HERSELF AGAINST A RICH ADMIRATION A KIND OF TENDERNESS OF ENVY OF ANY ONE WHO HAD BEEN SO HAPPY AS TO HAVE THAT OPPORTUNITY +6128-63241-0005-562: HIS FAMILY WAS RUINED THEY HAD LOST THEIR SLAVES THEIR PROPERTY THEIR FRIENDS AND RELATIONS THEIR HOME HAD TASTED OF ALL THE CRUELTY OF DEFEAT +6128-63241-0006-563: THE STATE OF MISSISSIPPI SEEMED TO HIM THE STATE OF DESPAIR SO (HE->HIS) SURRENDERED THE REMNANTS OF HIS PATRIMONY TO HIS MOTHER AND SISTERS AND AT NEARLY THIRTY YEARS OF AGE ALIGHTED FOR THE FIRST TIME IN NEW YORK IN THE COSTUME OF HIS PROVINCE WITH FIFTY DOLLARS IN HIS POCKET AND (A GNAWING->ENNARING) HUNGER IN HIS HEART +6128-63241-0007-564: IT WAS IN THE FEMALE LINE AS (BASIL->BALES AT) RANSOM HAD WRITTEN IN ANSWERING HER LETTER WITH A GOOD DEAL OF FORM AND FLOURISH HE SPOKE AS IF THEY HAD BEEN ROYAL HOUSES +6128-63241-0008-565: IF IT HAD BEEN POSSIBLE TO SEND MISSUS (RANSOM->RANDOM) MONEY OR EVEN CLOTHES SHE WOULD HAVE LIKED THAT BUT SHE HAD NO MEANS OF ASCERTAINING (HOW->HER) SUCH AN OFFERING WOULD BE TAKEN +6128-63241-0009-566: (OLIVE->OLIV) HAD A FEAR OF EVERYTHING BUT HER GREATEST FEAR WAS OF BEING AFRAID +6128-63241-0010-567: SHE HAD ERECTED IT INTO A SORT OF RULE OF CONDUCT THAT WHENEVER SHE SAW A RISK SHE WAS TO TAKE IT AND SHE HAD FREQUENT HUMILIATIONS AT FINDING HERSELF SAFE AFTER ALL +6128-63241-0011-568: SHE WAS PERFECTLY SAFE AFTER WRITING TO (BASIL->BASE OR) RANSOM AND INDEED IT WAS DIFFICULT TO SEE WHAT HE COULD HAVE DONE TO HER EXCEPT THANK HER HE WAS ONLY EXCEPTIONALLY SUPERLATIVE FOR HER LETTER AND ASSURE HER THAT HE WOULD COME AND SEE HER THE FIRST TIME HIS BUSINESS HE WAS BEGINNING TO GET A LITTLE SHOULD TAKE HIM TO BOSTON +6128-63241-0012-569: HE WAS TOO SIMPLE TOO MISSISSIPPIAN FOR THAT SHE WAS ALMOST DISAPPOINTED +6128-63241-0013-570: OF ALL THINGS IN THE WORLD CONTENTION WAS MOST SWEET TO HER THOUGH WHY IT IS HARD TO IMAGINE FOR IT ALWAYS COST HER TEARS HEADACHES A DAY OR TWO IN BED ACUTE EMOTION AND IT WAS VERY POSSIBLE (BASIL->BEESER) RANSOM WOULD NOT CARE TO (CONTEND->COMPEND) +6128-63244-0000-531: MISS CHANCELLOR HERSELF HAD THOUGHT SO MUCH ON THE VITAL SUBJECT WOULD NOT SHE MAKE A FEW REMARKS AND GIVE THEM SOME OF HER EXPERIENCES +6128-63244-0001-532: HOW DID THE LADIES (ON->AND) BEACON STREET FEEL ABOUT THE BALLOT +6128-63244-0002-533: PERHAPS SHE COULD SPEAK FOR THEM MORE THAN FOR SOME OTHERS +6128-63244-0003-534: WITH HER (IMMENSE->MENST) SYMPATHY FOR REFORM SHE FOUND HERSELF SO OFTEN WISHING THAT (REFORMERS->REFUSE) WERE A LITTLE DIFFERENT +6128-63244-0004-535: (OLIVE->I DID) HATED TO HEAR THAT FINE AVENUE (TALKED->TALKS) ABOUT AS IF IT (WERE->WAS) SUCH A REMARKABLE PLACE AND TO LIVE THERE (WERE->WHERE) A PROOF OF WORLDLY GLORY +6128-63244-0005-536: ALL SORTS OF INFERIOR PEOPLE (LIVED->IF) THERE AND SO BRILLIANT A WOMAN AS MISSUS (FARRINDER->FALLINGER) WHO LIVED AT (ROXBURY->BRAXBURY) OUGHT NOT TO (MIX THINGS->MAKE SPENCE) UP +6128-63244-0006-537: SHE KNEW HER PLACE IN THE BOSTON (HIERARCHY->HIRAKEE) AND IT WAS NOT WHAT MISSUS (FARRINDER->FAIRRING JUST) SUPPOSED SO THAT THERE WAS A WANT OF PERSPECTIVE IN TALKING TO HER AS IF SHE HAD BEEN (A REPRESENTATIVE->UNREPRESENTATIVE) OF THE ARISTOCRACY +6128-63244-0007-538: SHE WISHED TO WORK IN ANOTHER FIELD SHE HAD LONG BEEN PREOCCUPIED WITH THE ROMANCE OF THE PEOPLE +6128-63244-0008-539: THIS MIGHT SEEM ONE OF THE MOST ACCESSIBLE OF PLEASURES BUT IN POINT OF FACT SHE HAD NOT FOUND IT SO +6128-63244-0009-540: CHARLIE WAS A YOUNG MAN IN A (WHITE->WORLD) OVERCOAT AND A PAPER COLLAR IT WAS FOR HIM IN THE (LAST ANALYSIS->LASTIS) THAT (THEY->THE) CARED MUCH THE MOST +6128-63244-0010-541: (OLIVE->OUT OF) CHANCELLOR (*->I) WONDERED HOW MISSUS (FARRINDER->THINDER) WOULD TREAT (THAT->THEIR) BRANCH OF THE QUESTION +6128-63244-0011-542: IF IT BE NECESSARY WE ARE PREPARED TO TAKE CERTAIN STEPS TO CONCILIATE THE SHRINKING +6128-63244-0012-543: OUR MOVEMENT IS FOR ALL IT APPEALS TO THE MOST DELICATE LADIES +6128-63244-0013-544: (RAISE->PRINCE) THE STANDARD AMONG THEM AND BRING ME A (THOUSAND->SPASM) NAMES +6128-63244-0014-545: (I->AND) LOOK AFTER THE DETAILS AS WELL AS THE BIG (CURRENTS->CURRANTS) MISSUS (FARRINDER->FARRENDER) ADDED IN A TONE AS EXPLANATORY AS COULD BE EXPECTED OF SUCH A WOMAN AND WITH A SMILE OF WHICH (THE->THIS) SWEETNESS WAS THRILLING TO HER LISTENER +6128-63244-0015-546: SAID (OLIVE->OLDEST) CHANCELLOR WITH A FACE WHICH SEEMED TO PLEAD FOR A (REMISSION OF->REMISSIONOUS) RESPONSIBILITY +6128-63244-0016-547: (I WANT->HOW WONT) TO BE NEAR TO THEM TO HELP THEM +6128-63244-0017-548: IT WAS ONE THING TO CHOOSE FOR HERSELF BUT NOW THE GREAT REPRESENTATIVE OF THE (ENFRANCHISEMENT->ENCOMCHISEMENT) OF THEIR SEX FROM EVERY FORM OF BONDAGE HAD CHOSEN FOR HER +6128-63244-0018-549: THE UNHAPPINESS OF WOMEN +6128-63244-0019-550: THEY WERE (HER->HIS) SISTERS THEY WERE HER OWN AND THE DAY OF THEIR DELIVERY HAD DAWNED +6128-63244-0020-551: THIS WAS THE ONLY SACRED CAUSE THIS WAS THE GREAT (THE JUST REVOLUTION->DRESSED REVELATION) IT (MUST->WAS) TRIUMPH IT (MUST->WAS) SWEEP EVERYTHING BEFORE IT IT MUST EXACT FROM THE OTHER THE BRUTAL (BLOOD STAINED->BLOODSTAINED) RAVENING RACE THE LAST PARTICLE OF EXPIATION +6128-63244-0021-552: (THEY->THERE) WOULD BE NAMES OF WOMEN WEAK INSULTED PERSECUTED BUT DEVOTED IN EVERY PULSE OF THEIR BEING TO THE CAUSE AND ASKING NO BETTER FATE THAN TO DIE FOR IT +6128-63244-0022-553: IT WAS NOT CLEAR TO THIS INTERESTING GIRL IN WHAT MANNER SUCH A SACRIFICE (AS->OF) THIS LAST WOULD BE REQUIRED OF HER BUT SHE (SAW THE->SOLDOM) MATTER THROUGH A KIND OF SUNRISE MIST OF (EMOTION->THE NATION) WHICH MADE DANGER AS ROSY (AS->IS) SUCCESS +6128-63244-0023-554: WHEN MISS (BIRDSEYE->BIRD'S EYE) APPROACHED IT TRANSFIGURED HER FAMILIAR (HER COMICAL->HYCOMICAL) SHAPE AND MADE THE POOR LITTLE (HUMANITARY->HUMANITY) HACK SEEM ALREADY A MARTYR +6128-63244-0024-555: (OLIVE->ONLY IF) CHANCELLOR LOOKED AT HER WITH LOVE REMEMBERED THAT SHE HAD NEVER IN HER LONG (UNREWARDED->IN REWARDED) WEARY LIFE HAD A THOUGHT (OR->OF) AN IMPULSE FOR HERSELF +6128-63244-0025-556: (*->IF) SHE HAD BEEN CONSUMED BY THE PASSION OF SYMPATHY IT HAD (CRUMPLED->CRUMBLED) HER INTO AS MANY CREASES AS AN OLD GLAZED DISTENDED GLOVE +6432-63722-0000-2431: (BUT SCUSE->BUTCHUSE) ME (DIDN'T YO FIGGER ON DOIN->THEN YOU'LL FAGONNE DOING) SOME (DETECTIN AN GIVE->DETECTIVE AND GIVIN) UP (FISHIN->FISHING) +6432-63722-0001-2432: AND SHAG WITH THE FREEDOM OF AN OLD SERVANT STOOD LOOKING AT HIS MASTER AS IF NOT QUITE UNDERSTANDING THE NEW TWIST THE AFFAIRS HAD TAKEN +6432-63722-0002-2433: I'M (GOING OFF FISHING->GOIN OUR FISHIN) I MAY NOT CATCH ANYTHING (I->AND) MAY NOT WANT TO AFTER I GET THERE +6432-63722-0003-2434: GET READY (SHAG->SHAGG) YES (SAH->A) COLONEL +6432-63722-0004-2435: AND HAVING PUT HIMSELF IN A FAIR WAY AS HE HOPED TO SOLVE SOME OF THE PROBLEMS CONNECTED WITH THE DARCY CASE COLONEL ASHLEY WENT DOWN TO POLICE HEADQUARTERS TO LEARN MORE FACTS IN (*->THE) CONNECTION WITH THE MURDER OF THE EAST INDIAN +6432-63722-0005-2436: (PINKUS->PICK US) AND DONOVAN HAVEN'T THEY CARROLL YEP +6432-63722-0006-2437: (CARROLL->KAL) WAS TOO MUCH ENGAGED IN WATCHING THE BLUE SMOKE (CURL->GIRL) LAZILY UPWARD FROM HIS CIGAR JUST THEN TO SAY MORE +6432-63722-0007-2438: ARE YOU GOING TO WORK ON THAT CASE COLONEL +6432-63722-0008-2439: BUT HE HADN'T ANY MORE TO DO WITH IT COLONEL THAN THAT CAT +6432-63722-0009-2440: PERHAPS NOT ADMITTED COLONEL ASHLEY +6432-63722-0010-2441: WE'VE GOT OUR MAN AND THAT'S ALL WE WANT +6432-63722-0011-2442: (YOU'RE->YOU ARE) ON THE DARCY CASE THEY TELL ME IN A WAY YES +6432-63722-0012-2443: (I'M WORKING->HIGH MARKING) IN THE (INTERESTS->INTEREST) OF THE YOUNG MAN +6432-63722-0013-2444: IT'S JUST ONE OF THEM (COINCIDENCES->COINCIDENCE IS) LIKE +6432-63722-0014-2445: BUSTED HIS HEAD IN WITH A HEAVY CANDLESTICK ONE OF A PAIR +6432-63722-0015-2446: GAD (EXCLAIMED->EXPLAINED) THE COLONEL +6432-63722-0016-2447: THE VERY PAIR I WAS GOING TO BUY +6432-63722-0017-2448: LOOK HERE (COLONEL->CAROL) DO YOU KNOW ANYTHING ABOUT THIS +6432-63722-0018-2449: AND THE DETECTIVE'S PROFESSIONAL INSTINCTS GOT THE UPPER HAND OF HIS FRIENDLINESS NOT THE LEAST IN THE WORLD NOT AS MUCH AS YOU DO WAS THE COOL ANSWER +6432-63722-0019-2450: I HAPPENED TO SEE THOSE CANDLESTICKS IN THE WINDOW OF (SINGA PHUT'S->SINGAFUT'S) SHOP THE OTHER DAY AND I MADE UP MY MIND TO BUY THEM WHEN I HAD A CHANCE +6432-63722-0020-2451: NOW I'M AFRAID I WON'T BUT HOW DID IT HAPPEN +6432-63722-0021-2452: (PHUT->FIVE) I DON'T KNOW WHETHER THAT'S HIS FIRST OR HIS LAST NAME ANYHOW HE HAD A PARTNER NAMED (SHERE->TO SHARE) ALI +6432-63722-0022-2453: ANYHOW HE (AND PHUT DIDN'T->INFECTED) GET ALONG VERY WELL IT SEEMS +6432-63722-0023-2454: (NEIGHBORS->LABORS) OFTEN HEARD (EM SCRAPPIN->HIM SCRAP IN) A LOT AND THIS AFTERNOON THEY WENT AT IT AGAIN HOT AND HEAVY +6432-63722-0024-2455: (TOWARD->TO OUR) DARK A MAN WENT IN TO BUY A LAMP +6432-63722-0025-2456: HE FOUND THE PLACE WITHOUT A LIGHT IN IT STUMBLED OVER SOMETHING ON THE FLOOR AND THERE WAS (ALI'S->ALWAYS) BODY WITH THE HEAD BUSTED IN AND THIS HEAVY CANDLESTICK NEAR IT +6432-63722-0026-2457: SURE HELD SO TIGHT WE COULD HARDLY GET IT OUT +6432-63722-0027-2458: MAYBE THE FIGHT WAS ABOUT WHO OWNED THE WATCH FOR THE (DAGOS->DAG WAS) TALKED IN THEIR FOREIGN LINGO AND NONE OF THE NEIGHBORS COULD TELL WHAT THEY WERE (SAYIN->SAYING) I SEE +6432-63722-0028-2459: AND THE WATCH HAVE YOU IT YES IT'S HERE +6432-63722-0029-2460: THAT'S THE WATCH ANNOUNCED THE (HEADQUARTERS->HEADQUARTER) DETECTIVE REACHING IN FOR IT GOING (YET->AT) SEE +6432-63722-0030-2461: YOU'RE NOT (AS SQUEAMISH->A SCREAMY) AS ALL THAT ARE YOU JUST BECAUSE IT WAS IN A DEAD MAN'S (HAND->HANDS) AND (IN->*) A WOMAN'S +6432-63722-0031-2462: AND DONOVAN'S VOICE WAS PLAINLY (SKEPTICAL->SCEPTICAL) +6432-63722-0032-2463: YES IT MAY HAVE SOME ROUGH EDGES ON IT +6432-63722-0033-2464: AND I'VE READ ENOUGH ABOUT GERMS TO KNOW THE DANGER I'D ADVISE YOU TO BE CAREFUL +6432-63722-0034-2465: IF YOU DON'T MIND I SHOULD LIKE TO EXAMINE THIS A BIT +6432-63722-0035-2466: BEFORE THE BIG WIND IN IRELAND SUGGESTED THONG WITH A NOD (AT->OF) HIS IRISH COMPATRIOT SLIGHTLY (LAUGHED->THEY'LL HAVE) THE COLONEL +6432-63722-0036-2467: THAT'S RIGHT AGREED THE COLONEL AS HE CONTINUED TO MOVE HIS MAGNIFYING GLASS OVER THE SURFACE OF THE STILL TICKING WATCH +6432-63722-0037-2468: (AND->IN) A CLOSE OBSERVER MIGHT HAVE OBSERVED THAT HE DID NOT TOUCH HIS BARE FINGERS TO THE TIMEPIECE BUT POKED IT ABOUT AND TOUCHED IT HERE AND THERE WITH THE END OF A (LEADPENCIL->LEAD PENCIL) +6432-63722-0038-2469: AND (DONOVAN->DONALIN) TAKE (A->HER) FRIEND'S ADVICE AND DON'T BE TOO FREE WITH THAT WATCH TOO FREE WITH IT +6432-63722-0039-2470: ASKED THE SURPRISED DETECTIVE YES +6432-63722-0040-2471: DON'T SCRATCH YOURSELF ON IT WHATEVER YOU DO WHY NOT +6432-63722-0041-2472: SIMPLY BECAUSE THIS WATCH +6432-63722-0042-2473: SOME ONE OUT HERE TO SEE YOU +6432-63722-0043-2474: ALL RIGHT BE THERE IN A SECOND +6432-63722-0044-2475: (SINGA PHUT->SHING AFOOT) WAS THE PANTING ANSWER +6432-63722-0045-2476: I WANT TO TALK OVER DARCY'S CASE WITH YOU THE COLONEL HAD SAID AND THE (TWO->JEW) HAD TALKED HAD THOUGHT HAD TALKED AGAIN AND NOW WERE SILENT FOR A TIME +6432-63722-0046-2477: WHAT ARE THE (CHANCES->CHURCHES) OF GETTING HIM OFF LEGALLY IF WE GO AT IT FROM A NEGATIVE STANDPOINT ASKED THE COLONEL +6432-63722-0047-2478: RATHER A HYPOTHETICAL QUESTION COLONEL BUT I SHOULD SAY IT MIGHT BE A FIFTY FIFTY PROPOSITION +6432-63722-0048-2479: AT BEST HE WOULD GET OFF (WITH A->FOR THE) SCOTCH VERDICT OF NOT PROVEN BUT HE DOESN'T WANT THAT NOR DO I +6432-63722-0049-2480: AND YOU I DON'T WANT IT EITHER +6432-63722-0050-2481: BUT I WANT TO KNOW JUST WHERE WE STAND NOW I KNOW +6432-63722-0051-2482: BUT I NEED TO DO A LITTLE MORE SMOKING OUT FIRST NOW I WANT TO THINK +6432-63722-0052-2483: IF YOU'LL EXCUSE ME I'LL PRETEND I'M FISHING AND I MAY CATCH SOMETHING +6432-63722-0053-2484: IN FACT I HAVE A FEELING THAT (I'LL->I) LAND MY FISH +6432-63722-0054-2485: (I'D->I) RECOMMEND HIM TO YOU INSTEAD OF BLACKSTONE THANKS LAUGHED KENNETH +6432-63722-0055-2486: WHAT IS IT PERHAPS I CAN HELP YOU +6432-63722-0056-2487: THE OLD ADAGE OF TWO HEADS YOU KNOW +6432-63722-0057-2488: YES (IT->IT'S) STILL HOLDS GOOD +6432-63722-0058-2489: NO ALIMONY (REPEATED->REPLIED) THE COLONEL PUZZLED YES JUST THAT +6432-63722-0059-2490: AND THERE'S NO REASON YOU SHOULDN'T KNOW +6432-63723-0000-2491: CHUCKLED THE COLONEL AS HE SKILFULLY PLAYED THE LUCKLESS TROUT NOW STRUGGLING TO GET LOOSE FROM THE HOOK +6432-63723-0001-2492: AND WHEN THE FISH WAS LANDED PANTING ON THE GRASS AND SHAG HAD BEEN ROUSED FROM HIS SLUMBER TO SLIP (THE->A) NOW LIMP FISH INTO THE (CREEL->CREOLE) COLONEL ASHLEY GAVE A SIGH OF RELIEF AND REMARKED I THINK I SEE IT NOW +6432-63723-0002-2493: THE REASON SHE ASKED NO ALIMONY INQUIRED KENNETH +6432-63723-0003-2494: NO I WASN'T THINKING OF THAT +6432-63723-0004-2495: HOWEVER DON'T THINK I'M NOT INTERESTED IN YOUR CASE I'VE (FISHED->FINISHED) ENOUGH FOR TO DAY +6432-63723-0005-2496: WELL I DON'T KNOW THAT YOU CAN +6432-63723-0006-2497: IT (ISN'T->IS IN) GENERALLY KNOWN WENT ON THE LAWYER THAT THE HOTEL KEEPER'S WIFE HAS LEFT HIM +6432-63723-0007-2498: IT WAS ONE OF WHAT AT FIRST MIGHT BE CALLED REFINED CRUELTY ON HER HUSBAND'S PART DEGENERATING GRADUALLY INTO THAT OF (THE->A) BASER SORT +6432-63723-0008-2499: (YOU DON'T->IT ALL) MEAN THAT (LARCH->LARGE) STRUCK HER THAT THERE WAS PHYSICAL ABUSE DO YOU ASKED THE COLONEL THAT'S WHAT HE DID +6432-63723-0009-2500: THE COLONEL DID NOT DISCLOSE THE FACT THAT IT WAS NO NEWS TO HIM +6432-63723-0010-2501: AARON GRAFTON'S STATEMENT WAS BEING (UNEXPECTEDLY->UNEXPECTED GREAT) CONFIRMED +6432-63723-0011-2502: HE REMEMBERED THAT CYNTHIA AND GRAFTON HAD ONCE BEEN IN LOVE WITH EACH OTHER +6432-63723-0012-2503: SHE SAID HE HAD STRUCK HER MORE THAN ONCE AND SHE COULD STAND IT NO LONGER +6432-63723-0013-2504: BECAUSE (LARCH->LARGE) MADE NO (DEFENSE->DEFENCE) +6432-63723-0014-2505: (LARCH->LARGE) BY REFUSING TO APPEAR PRACTICALLY ADMITTED THE CHARGES AGAINST HIM AND DID NOT OPPOSE THE SEPARATION +6432-63723-0015-2506: SO I HAD TO LET HER HAVE HER WAY AND WE DID NOT ASK THE (COURT->CORP) FOR MONEY THOUGH I HAD NO SUCH SQUEAMISH FEELINGS WHEN IT CAME TO MY COUNSEL FEE +6432-63723-0016-2507: NO BUT HE WILL OR (I'LL SUE HIM->ELSE UM) AND GET JUDGMENT OH HE'LL PAY ALL RIGHT +6432-63723-0017-2508: AND IT TAKES ALL SORTS OF PERSONS TO MAKE IT UP +6432-63723-0018-2509: STILL I WOULD LIKE TO KNOW +6432-63723-0019-2510: THE MURDER OF MISSUS DARCY HAD SOME TIME AGO BEEN SHIFTED OFF THE FRONT PAGE THOUGH IT WOULD GET BACK THERE WHEN THE YOUNG JEWELER WAS TRIED +6432-63723-0020-2511: IT HAD A DOUBLE REPUTATION SO TO SPEAK +6432-63723-0021-2512: GRAVE AND EVEN REVEREND (*->THE) CONVENTIONS ASSEMBLED IN ITS (BALLROOM AND->BALL ROOM IN) POLITICIANS OF THE UPPER IF NOT BETTER CLASS WERE FREQUENTLY SEEN IN ITS DINING ROOM OR CAFE +6432-63723-0022-2513: (LARCH->LARGE) HIMSELF WAS A PECULIAR CHARACTER +6432-63723-0023-2514: IN A SMALLER PLACE HE WOULD HAVE BEEN CALLED A SALOON KEEPER +6432-63723-0024-2515: AND IT WAS THIS MAN RICH (IT WAS->EVER) SAID HANDSOME CERTAINLY THAT CYNTHIA RATCHFORD HAD MARRIED +6432-63723-0025-2516: TO THIS WAS THE ANSWER WHISPERED MONEY +6432-63723-0026-2517: AND IN A WAY IT WAS TRUE +6432-63723-0027-2518: SHE ALSO SAW AN OPPORTUNITY OF PAYING OLD DEBTS AND REAPING SOME REVENGES +6432-63723-0028-2519: AFTER THE MARRIAGE WHICH WAS A BRILLIANT AND GAY ONE IF NOT HAPPY THE (LARCH->LARGE) HOTEL IT COULD HARDLY BE CALLED A HOME BECAME THE SCENE OF MANY FESTIVE OCCASIONS +6432-63723-0029-2520: THEN IT WAS SAID OF (LARCH->LARGE) THAT SOON AFTER THE ECHOES OF THE WEDDING CHIMES HAD DIED AWAY HE HAD BEGUN TO TREAT HIS WIFE WITH (*->A) REFINED CRUELTY THAT HIDDEN AWAY FROM THE PUBLIC UNDERNEATH HIS HABITUAL MANNER THERE WAS THE RAWNESS OF THE BRUTE +6432-63723-0030-2521: BUT IT WAS NOTICED THAT THE OLDER AND MORE CONSERVATIVE FAMILIES WERE LESS OFTEN REPRESENTED AND WHEN THEY WERE IT WAS BY SOME OF THE YOUNGER MEMBERS WHOSE REPUTATIONS WERE ALREADY (SMIRCHED->SMARGED) OR WHO HAD NOT YET ACQUIRED ANY AND WERE WILLING TO TAKE A CHANCE +6432-63723-0031-2522: IT WOULDN'T DO YOU KNOW AFTER THAT STORY CAME OUT FOR ME AND THE VICE CHANCELLOR WHO SAT IN (THE->A) CASE AS WELL AS OTHER JUDGES AND MEMBERS OF THE BAR TO BE SEEN THERE KENNETH EXPLAINED TO THE COLONEL +6432-63723-0032-2523: MEANWHILE COLONEL ASHLEY WAS A VERY BUSY MAN AND TO NO ONE DID HE TELL VERY MUCH ABOUT HIS ACTIVITIES HE SAW DARCY FREQUENTLY AT THE JAIL AND TO THAT YOUNG MAN'S PLEADINGS THAT SOMETHING (*->TO) BE DONE ALWAYS RETURNED THE ANSWER +6432-63723-0033-2524: DON'T WORRY IT WILL COME OUT ALL RIGHT +6432-63723-0034-2525: I'M GOING (TO RECTIFY->DIRECTIFY) THEM BUT (IT->I) WILL TAKE TIME +6432-63723-0035-2526: (IT'S->HIS) HARD FOR MISS MASON TOO ALTHOUGH SHE'S BEARING UP LIKE A MAJOR +6432-63723-0036-2527: SO KING (GOT->GOD) BAIL WHO PUT IT (UP->*) +6432-63723-0037-2528: IT WAS (HIGH->I) LARCH +6432-63723-0038-2529: THEY TOOK HARRY AWAY A WHILE AGO +6432-63723-0039-2530: BUT HIS ARE PRETTY UNCERTAIN SHOES TO BE IN JUST THE SAME +6432-63723-0040-2531: ONLY THAT I DARCY HESITATED AND GREW RED +6432-63723-0041-2532: GOOD EVENING COLONEL HE CALLED GENIALLY WILL YOU JOIN ME IN A WELSH RABBIT +6432-63723-0042-2533: THANK YOU NO +6432-63723-0043-2534: I'M AFRAID MY (DIGESTION->DIRECTION) ISN'T QUITE UP TO THAT AS I'VE HAD TO CUT OUT MY FISHING OF LATE +6432-63723-0044-2535: NOW AS TO CERTAIN MATTERS IN THE STORE ON THE MORNING OF THE MURDER +6432-63723-0045-2536: (THE->THEY) STOPPED (CLOCKS->CLUXED) FOR INSTANCE HAVE YOU ANY THEORY +6432-63723-0046-2537: THERE WERE THREE OF THEM THE CENTER FIGURE BEING THAT OF HARRY KING AND HE WAS VERY MUCH INTOXICATED +6432-63723-0047-2538: THAT IS NOT ALWAYS BUT SOMETIMES IT HAPPENED TO BE SO NOW +6432-63723-0048-2539: I BEG YOUR PARDON HE SAID IN THE CULTURED TONES HE KNEW SO WELL HOW TO USE YET OF WHICH HE MADE SO LITTLE USE OF LATE +6432-63723-0049-2540: I SAID WHERE HAVE YOU BEEN REMARKED THE OTHER WE'VE MISSED YOU +6432-63723-0050-2541: I SAID I WAS GOLFING HE WENT ON EXCEEDINGLY DISTINCTLY THOUGH WITH AN EFFORT +6432-63723-0051-2542: WHY (POLONIUS->BONIUS) SOME ONE ASKED +6432-63723-0052-2543: BECAUSE DEAR FRIEND REPLIED KING SOFTLY HE SOMEWHAT RESEMBLES A CERTAIN PERSON HERE WHO TALKS TOO MUCH BUT WHO IS NOT SO WISE AS HE THINKS +6432-63723-0053-2544: THERE WAS A RATTLE OF (COINS ON->COIN DOWN) THE MAHOGANY BAR AS KING SOUGHT TO DISENTANGLE A SINGLE BILL FROM THE (WADDED->WATERED) UP CURRENCY IN HIS POCKET +6432-63723-0054-2545: IT'S (IT'S->*) AN ODD COIN AN OLD ROMAN ONE THAT MISSUS DARCY HAD IN HER PRIVATE COLLECTION KEPT IN THE JEWELRY STORE SAFE WAS THE WHISPERED ANSWER +6432-63723-0055-2546: I WENT OVER THEM (*->NEAR) THE (OTHER->*) DAY AND NOTICED SOME WERE MISSING THOUGH I SAW THEM ALL WHEN I PAID A VISIT TO HER JUST A SHORT TIME BEFORE SHE WAS KILLED +6432-63723-0056-2547: THAT WAS HERS WENT ON THE JEWELER +6432-63723-0057-2548: NOW HARRY KING HAS IT EXCLAIMED COLONEL ASHLEY +6938-70848-0000-1216: EVEN THE SUN CAME OUT PALE AND WATERY AT NOON +6938-70848-0001-1217: THE (COLDS->GOLDS) AND RHEUMATISM OF THE RAINY MONTHS VANISHED +6938-70848-0002-1218: (ASKED A->AS TO) WORKER LAST SUNDAY YOU DID IT WHEN THE YUNKERS +6938-70848-0003-1219: WELL DIDN'T THEY SHOOT US ONE MAN EXHIBITED HIS ARM IN A SLING +6938-70848-0004-1220: HAVEN'T I GOT SOMETHING TO REMEMBER THEM BY THE DEVILS +6938-70848-0005-1221: WHO ARE YOU TO DESTROY THE LEGAL GOVERNMENT (WHO IS LENIN->WITH LANY) A GERMAN +6938-70848-0006-1222: WHO ARE YOU A COUNTER (REVOLUTIONIST A PROVOCATOR->REVOLITIONIST APPROPATOR) THEY (BELLOWED->BELOVED) AT HIM +6938-70848-0007-1223: YOU CALL YOURSELVES THE PEOPLE OF (RUSSIA BUT YOU'RE->A SHEPHERD YOU ARE) NOT THE PEOPLE OF RUSSIA +6938-70848-0008-1224: (THE PEASANTS ARE THE->TO PIECE AND OTHER) PEOPLE OF RUSSIA WAIT UNTIL THE PEASANTS +6938-70848-0009-1225: WE KNOW WHAT THE PEASANTS WILL SAY AREN'T THEY (WORKINGMEN->WORKING MEN) LIKE OURSELVES +6938-70848-0010-1226: (THESE MEN ESPECIALLY->THIS MAN HAS SPECIALLY) WELCOMED THE CALL TO A CONGRESS OF PEASANTS +6938-70848-0011-1227: (THESE->THIS) LAST (WERE->WHERE) THE YOUNG GENERATION WHO HAD BEEN SERVING IN THE ARMY +6938-70848-0012-1228: WHEREUPON THE OLD (EXECUTIVE->EXECUTED) COMMITTEE LEFT THE HALL +6938-70848-0013-1229: DOWN WITH HIM THEY SHRIEKED +6938-70848-0014-1230: FEARFUL TUMULT (CRIES DOWN WITH THE BOLSHEVIKI->QUITE CHEVIKI) +6938-70848-0015-1231: UPON MY RETURN I VISITED (SMOLNY->MORLEY) NO SUCH ACCUSATION WAS MADE AGAINST ME THERE AFTER A BRIEF CONVERSATION I LEFT AND (THAT'S ALL->THAT SOUL) LET (ANY ONE->ANYONE) PRESENT MAKE SUCH AN ACCUSATION +6938-70848-0016-1232: MEANWHILE THE QUESTION OF THE (STATUS->STRATAS) OF THE (EXECUTIVE->EXECUTED) COMMITTEE WAS AGITATING ALL MINDS +6938-70848-0017-1233: BY (DECLARING THE->DECLINING THEIR) ASSEMBLY EXTRAORDINARY CONFERENCE IT HAD BEEN PLANNED TO (BLOCK->PLUCK) THE (REELECTION->RE ELECTION) OF THE EXECUTIVE COMMITTEE +6938-70848-0018-1234: BUT THIS (WORKED->WORTH) BOTH WAYS THE (LEFT SOCIALIST REVOLUTIONISTS->LAST SOCIALLY REVOLUTION IS) DECIDED THAT IF THE CONGRESS HAD NO POWER OVER THE EXECUTIVE COMMITTEE THEN THE EXECUTIVE COMMITTEE HAD NO POWER OVER THE CONGRESS +6938-70848-0019-1235: ON THE TWENTY SEVENTH OCCURRED THE DEBATE ON THE LAND QUESTION WHICH REVEALED THE DIFFERENCES BETWEEN THE (AGRARIAN->AGRIAN) PROGRAMME OF THE BOLSHEVIKI AND THE LEFT SOCIALIST REVOLUTIONARIES +6938-70848-0020-1236: THE (CONSTITUENT->CONSTITUTE) ASSEMBLY WILL NOT DARE TO BREAK WITH THE WILL OF THE PEOPLE +6938-70848-0021-1237: FOLLOWED HIM LENIN LISTENED TO NOW WITH ABSORBING INTENSITY +6938-70848-0022-1238: THE FIRST STAGE WAS (THE->A) CRUSHING OF AUTOCRACY AND THE (CRUSHING->CRASHING) OF THE POWER OF THE INDUSTRIAL (CAPITALISTS->CAPITALIST) AND (LAND OWNERS->THE LANDOWNERS) WHOSE INTERESTS ARE CLOSELY RELATED +6938-70848-0023-1239: (THE DUMAS AND ZEMSTVOS->DID YOU ME SEND THEMSELVES) WERE DROPPED +6938-70848-0024-1240: HE KNEW THAT AN AGREEMENT WITH THE BOLSHEVIKI WAS BEING DISCUSSED BUT HE DID NOT KNOW THAT IT HAD BEEN CONCLUDED +6938-70848-0025-1241: HE SPOKE TO THE (RUMP->WRONG) CONVENTION +6938-70848-0026-1242: THE (VILLAGES->RELIGIOUS) WILL SAVE US IN THE END +6938-70848-0027-1243: BUT THE PRESENT (MOVEMENT->MOMENT) IS INTERNATIONAL AND THAT IS WHY IT IS INVINCIBLE +6938-70848-0028-1244: THE (WILL->WIDOW) OF MILLIONS OF WORKERS IS (NOW->SO) CONCENTRATED IN (THIS->THE) HALL +6938-70848-0029-1245: A NEW HUMANITY WILL BE BORN OF THIS WAR +6938-70848-0030-1246: I GREET YOU WITH THE (CHRISTENING->CHRISTIANNING) OF A NEW RUSSIAN LIFE AND FREEDOM +7018-75788-0000-135: THEN I TOOK UP A GREAT STONE FROM AMONG THE TREES AND COMING UP TO HIM SMOTE HIM THEREWITH ON THE HEAD WITH ALL MY MIGHT AND CRUSHED IN HIS SKULL AS HE LAY DEAD DRUNK +7018-75788-0001-136: BEHOLD A SHIP WAS MAKING FOR THE ISLAND THROUGH THE DASHING SEA AND CLASHING WAVES +7018-75788-0002-137: HEARING THIS I WAS SORE TROUBLED REMEMBERING WHAT I HAD BEFORE SUFFERED FROM THE APE KIND +7018-75788-0003-138: UPON THIS HE BROUGHT ME A (COTTON->COT AND) BAG AND (GIVING->GIVEN) IT TO ME SAID TAKE THIS BAG AND FILL IT WITH PEBBLES FROM THE BEACH AND GO FORTH WITH A COMPANY OF THE TOWNSFOLK TO WHOM I WILL GIVE A CHARGE RESPECTING THEE +7018-75788-0004-139: DO AS THEY DO AND (BELIKE->BE LIKE) THOU SHALT GAIN WHAT MAY FURTHER THY RETURN VOYAGE TO THY NATIVE LAND +7018-75788-0005-140: THEN HE CARRIED ME TO THE BEACH WHERE I FILLED MY BAG WITH PEBBLES LARGE AND SMALL AND PRESENTLY WE SAW A COMPANY OF FOLK ISSUE FROM THE TOWN EACH BEARING A BAG LIKE MINE FILLED WITH PEBBLES +7018-75788-0006-141: TO THESE HE COMMITTED ME COMMENDING ME TO THEIR CARE AND SAYING THIS MAN IS A STRANGER SO TAKE HIM WITH YOU AND TEACH HIM HOW TO GATHER THAT HE MAY GET HIS DAILY BREAD AND YOU WILL EARN YOUR REWARD AND RECOMPENSE IN HEAVEN +7018-75788-0007-142: NOW SLEEPING UNDER THESE TREES WERE MANY (APES->IPES) WHICH WHEN THEY SAW US ROSE AND FLED FROM US AND SWARMED UP AMONG THE BRANCHES WHEREUPON MY COMPANIONS BEGAN TO PELT THEM WITH WHAT THEY HAD IN THEIR BAGS AND THE APES FELL TO PLUCKING OF THE FRUIT OF THE TREES AND CASTING THEM AT THE FOLK +7018-75788-0008-143: WE (WEIGHED->WADE) ANCHOR (AND->AN) SHAHRAZAD PERCEIVED THE DAWN OF DAY AND CEASED SAYING HER PERMITTED SAY +7018-75788-0009-144: WHEN IT WAS THE FIVE HUNDRED AND FIFTY NINTH NIGHT +7018-75788-0010-145: AND CEASED NOT SAILING TILL WE ARRIVED SAFELY AT (BASSORAH->PUSSARA) +7018-75788-0011-146: THERE I ABODE A LITTLE AND THEN WENT ON TO (BAGHDAD->BAGDAD) WHERE I ENTERED MY QUARTER AND FOUND MY HOUSE AND (FOREGATHERED->FOR GATHERED) WITH MY FAMILY AND SALUTED MY FRIENDS WHO GAVE ME JOY OF MY SAFE RETURN AND I LAID UP ALL MY GOODS AND VALUABLES IN MY STOREHOUSES +7018-75788-0012-147: AFTER WHICH I RETURNED TO MY OLD MERRY WAY OF LIFE AND FORGOT ALL I HAD SUFFERED IN THE GREAT PROFIT AND GAIN I HAD MADE +7018-75788-0013-148: NEXT MORNING AS SOON AS IT WAS LIGHT HE PRAYED THE DAWN PRAYER AND AFTER BLESSING MOHAMMED THE CREAM OF ALL CREATURES BETOOK HIMSELF TO THE HOUSE OF (SINDBAD->SINBAD) THE SEAMAN AND WISHED HIM A GOOD DAY +7018-75788-0014-149: HERE I FOUND A GREAT SHIP READY FOR SEA AND FULL OF MERCHANTS AND NOTABLES WHO HAD WITH THEM GOODS OF PRICE SO I EMBARKED MY BALES THEREIN +7018-75788-0015-150: (HAPLY->HAPPILY) AMONGST YOU IS ONE RIGHTEOUS WHOSE PRAYERS THE LORD WILL ACCEPT +7018-75788-0016-151: PRESENTLY THE SHIP STRUCK THE MOUNTAIN AND BROKE UP AND ALL AND EVERYTHING ON BOARD OF HER WERE PLUNGED INTO THE SEA +7018-75788-0017-152: BUT (IT BURNETH->AT BERNETH) IN THEIR BELLIES SO THEY CAST IT UP AGAIN AND IT CONGEALETH ON THE SURFACE OF THE WATER WHEREBY ITS COLOR AND QUANTITIES ARE CHANGED AND AT LAST THE WAVES CAST IT ASHORE AND THE TRAVELLERS AND MERCHANTS WHO KNOW IT (COLLECT IT->COLLECTED) AND SELL IT +7018-75788-0018-153: EACH THAT DIED WE WASHED AND SHROUDED IN SOME OF THE CLOTHES AND LINEN CAST ASHORE BY THE TIDES AND AFTER A LITTLE THE REST OF MY FELLOWS PERISHED ONE BY ONE TILL I HAD BURIED THE LAST OF THE PARTY AND (ABODE->A BOAT) ALONE ON THE ISLAND WITH BUT A LITTLE PROVISION LEFT I WHO WAS WONT TO HAVE SO MUCH +7018-75788-0019-154: BUT THERE IS MAJESTY AND THERE IS NO MIGHT SAVE IN ALLAH THE GLORIOUS THE GREAT +7018-75789-0000-155: WHEN IT WAS THE FIVE HUNDRED AND SIXTY FIRST NIGHT +7018-75789-0001-156: THEN (SIGHING->SIGNED) FOR MYSELF I SET TO WORK COLLECTING A NUMBER OF PIECES OF CHINESE AND (COMORIN ALOES->CORMOR AND ALLIES) WOOD AND I BOUND THEM TOGETHER WITH ROPES FROM THE WRECKAGE THEN I CHOSE OUT FROM THE BROKEN UP (SHIPS->SHIP) STRAIGHT PLANKS OF EVEN SIZE AND FIXED THEM FIRMLY UPON THE (ALOES->ALLIES) WOOD MAKING ME A BOAT RAFT A LITTLE NARROWER THAN THE CHANNEL OF THE STREAM AND I TIED IT TIGHTLY AND FIRMLY AS THOUGH IT WERE NAILED +7018-75789-0002-157: LAND AFTER LAND SHALT THOU (SEEK AND FIND->SEE CONFINED) BUT NO OTHER LIFE ON THY WISH SHALL WAIT FRET NOT THY SOUL IN THY THOUGHTS (O->A) NIGHT (ALL->OR) WOES SHALL END OR SOONER OR LATE +7018-75789-0003-158: I (ROWED->RIDE) MY CONVEYANCE INTO THE PLACE WHICH WAS INTENSELY DARK AND THE CURRENT CARRIED (*->ME) THE RAFT WITH IT DOWN THE UNDERGROUND CHANNEL +7018-75789-0004-159: AND I THREW MYSELF DOWN UPON MY FACE ON THE RAFT BY REASON OF THE NARROWNESS OF THE CHANNEL WHILST THE STREAM CEASED NOT TO CARRY ME ALONG KNOWING NOT NIGHT FROM DAY FOR THE EXCESS OF THE GLOOM WHICH ENCOMPASSED ME ABOUT (AND->IN) MY TERROR AND CONCERN FOR MYSELF LEST I SHOULD PERISH +7018-75789-0005-160: WHEN I AWOKE AT LAST I FOUND MYSELF IN THE LIGHT OF HEAVEN AND OPENING MY EYES I SAW MYSELF IN A BROAD STREAM AND THE RAFT MOORED TO AN ISLAND IN THE MIDST OF A NUMBER OF INDIANS AND ABYSSINIANS +7018-75789-0006-161: BUT I WAS DELIGHTED AT MY ESCAPE FROM THE RIVER +7018-75789-0007-162: WHEN THEY SAW I UNDERSTOOD THEM NOT AND MADE THEM NO ANSWER ONE OF THEM CAME FORWARD AND SAID TO ME IN ARABIC PEACE BE WITH THEE O MY BROTHER +7018-75789-0008-163: O MY BROTHER ANSWERED HE WE ARE HUSBANDMEN AND (TILLERS->TELLERS) OF THE SOIL WHO CAME OUT TO WATER OUR FIELDS AND PLANTATIONS AND FINDING THEE ASLEEP ON THIS RAFT LAID HOLD OF IT AND MADE IT FAST BY US AGAINST THOU (SHOULDST->SHOULDEST) AWAKE AT THY LEISURE +7018-75789-0009-164: I ANSWERED FOR ALLAH'S SAKE (O->AM) MY LORD ERE I SPEAK GIVE ME SOMEWHAT TO EAT FOR I AM STARVING AND AFTER ASK ME WHAT THOU WILT +7018-75789-0010-165: WHEN IT WAS THE FIVE HUNDRED AND SIXTY SECOND NIGHT +7018-75789-0011-166: SHE SAID IT HATH (REACHED->RAGED) ME O AUSPICIOUS KING THAT (SINDBAD->SINBAD) THE SEAMAN CONTINUED WHEN I LANDED AND FOUND MYSELF AMONGST THE INDIANS AND ABYSSINIANS AND HAD TAKEN SOME REST THEY CONSULTED AMONG THEMSELVES AND SAID TO ONE ANOTHER THERE IS NO HELP FOR IT BUT WE CARRY HIM WITH US AND PRESENT HIM TO OUR KING THAT HE MAY ACQUAINT HIM WITH HIS ADVENTURES +7018-75789-0012-167: SO I CONSORTED WITH THE CHIEF OF THE ISLANDERS AND THEY PAID ME THE UTMOST RESPECT +7018-75789-0013-168: SO I ROSE WITHOUT STAY OR DELAY AND KISSED THE KING'S HAND AND ACQUAINTED HIM WITH MY LONGING TO SET OUT WITH THE MERCHANTS FOR THAT I PINED AFTER MY PEOPLE AND MINE OWN LAND +7018-75789-0014-169: QUOTH HE THOU ART THINE OWN MASTER YET IF IT BE THY WILL TO ABIDE WITH US ON OUR HEAD (AND EYES->NIGHS) BE IT FOR THOU GLADDENEST US WITH THY COMPANY +7018-75789-0015-170: BY ALLAH O MY LORD ANSWERED I THOU HAST INDEED OVERWHELMED ME WITH THY FAVOURS AND WELL DOINGS BUT I WEARY FOR A SIGHT OF MY FRIENDS AND FAMILY AND NATIVE COUNTRY +7018-75789-0016-171: THEN I TOOK LEAVE OF HIM AND OF ALL MY INTIMATES AND ACQUAINTANCES IN THE ISLAND AND EMBARKED WITH THE MERCHANTS AFORESAID +7018-75789-0017-172: HE ASKED ME WHENCE THEY CAME AND I SAID TO HIM BY ALLAH (O->A) COMMANDER OF THE FAITHFUL I KNOW NOT THE NAME OF THE CITY NOR THE WAY THITHER +7018-75789-0018-173: FOR STATE PROCESSIONS A THRONE IS SET FOR HIM UPON A HUGE ELEPHANT ELEVEN CUBITS HIGH AND UPON THIS HE SITTETH HAVING HIS GREAT LORDS AND OFFICERS AND GUESTS STANDING IN TWO RANKS ON HIS RIGHT HAND AND ON HIS LEFT +7018-75789-0019-174: HIS LETTER HATH SHOWN ME THIS AND AS FOR THE MIGHTINESS OF HIS DOMINION THOU HAST TOLD US WHAT THOU HAST (EYE->I) WITNESSED +7018-75789-0020-175: PRESENTLY MY FRIENDS CAME TO ME AND I DISTRIBUTED PRESENTS AMONG MY FAMILY AND GAVE (ALMS->ARMS) AND LARGESSE AFTER WHICH I YIELDED MYSELF TO JOYANCE AND ENJOYMENT MIRTH AND (MERRY MAKING->MERRYMAKING) AND FORGOT ALL THAT I HAD SUFFERED +7018-75789-0021-176: SUCH THEN O MY BROTHERS IS THE HISTORY OF WHAT (BEFEL->BEFELL) ME IN MY SIXTH VOYAGE AND TO MORROW INSHALLAH +7018-75789-0022-177: I WILL TELL YOU THE STORY OF MY SEVENTH AND LAST VOYAGE WHICH IS STILL MORE WONDROUS AND MARVELLOUS THAN THAT OF THE FIRST SIX +7018-75789-0023-178: WHEN IT WAS THE FIVE HUNDRED AND SIXTY THIRD NIGHT +7018-75789-0024-179: SHE SAID IT HATH REACHED ME O AUSPICIOUS KING THAT WHEN (SINDBAD->SINBAD) THE SEAMAN HAD (RELATED->RELIGHTED) THE HISTORY OF WHAT (BEFEL->BEFELL) HIM IN HIS SIXTH VOYAGE AND ALL THE COMPANY HAD DISPERSED (SINDBAD->SINBAD) THE LANDSMAN WENT HOME AND SLEPT AS OF WONT +7018-75789-0025-180: THE SEVENTH VOYAGE OF (SINDBAD->SINBAD) THE (SEAMAN->SALMON) +7018-75789-0026-181: (KNOW->NO) O COMPANY THAT AFTER MY RETURN FROM MY SIXTH VOYAGE WHICH BROUGHT ME ABUNDANT PROFIT I RESUMED MY FORMER LIFE (IN->AND) ALL POSSIBLE JOYANCE AND ENJOYMENT AND MIRTH AND MAKING MERRY DAY AND NIGHT AND I TARRIED SOME TIME IN THIS SOLACE AND SATISFACTION TILL MY SOUL BEGAN ONCE MORE TO LONG TO SAIL THE SEAS AND SEE FOREIGN COUNTRIES AND COMPANY WITH MERCHANTS AND (HEAR->HERE) NEW THINGS +7018-75789-0027-182: SO HAVING MADE UP MY MIND I PACKED UP IN BALES A QUANTITY OF PRECIOUS STUFFS SUITED FOR SEA TRADE AND REPAIRED WITH THEM FROM BAGHDAD CITY TO (BASSORAH->BASSERA) TOWN WHERE I FOUND A SHIP READY FOR SEA AND IN HER A COMPANY OF CONSIDERABLE MERCHANTS +7018-75789-0028-183: BUT THE CAPTAIN AROSE AND (TIGHTENING->TIGHTENED IN) HIS GIRDLE TUCKED UP HIS SKIRTS AND AFTER TAKING REFUGE WITH ALLAH FROM SATAN THE (STONED CLOMB->STONE CLIMBED) TO THE MAST HEAD WHENCE HE LOOKED OUT RIGHT AND LEFT AND GAZING AT THE PASSENGERS AND CREW FELL TO (BUFFETING->BUFFET IN) HIS FACE AND PLUCKING OUT HIS BEARD +7018-75789-0029-184: THIS HE (SET->SAID) IN A SAUCER WETTED WITH A LITTLE WATER AND AFTER WAITING A SHORT TIME SMELT AND TASTED IT AND THEN HE TOOK OUT OF THE CHEST A BOOKLET WHEREIN HE READ (AWHILE->A WHILE) AND SAID WEEPING KNOW O YE PASSENGERS THAT IN THIS BOOK IS A MARVELLOUS MATTER DENOTING THAT WHOSO (COMETH HITHER->COME THITHER) SHALL SURELY DIE WITHOUT HOPE OF ESCAPE FOR THAT THIS OCEAN IS CALLED THE SEA OF THE CLIME OF THE KING WHEREIN IS (THE->A) SEPULCHRE OF OUR LORD SOLOMON SON OF DAVID ON BOTH BE PEACE +7018-75789-0030-185: A SECOND FISH (MADE->READ) ITS APPEARANCE (THAN->AND) WHICH WE HAD SEEN (NAUGHT->NOUGHT) MORE MONSTROUS +7018-75789-0031-186: WHEN SUDDENLY A VIOLENT SQUALL OF WIND AROSE AND SMOTE THE SHIP WHICH ROSE OUT OF THE WATER AND SETTLED UPON A GREAT REEF THE HAUNT OF SEA MONSTERS WHERE IT BROKE UP AND FELL ASUNDER INTO PLANKS AND ALL AND EVERYTHING ON BOARD WERE PLUNGED INTO THE SEA +7105-2330-0000-2310: UNFORTUNATELY THERE COULD BE NO DOUBT (OR->OUR) MISCONCEPTION AS (TO PLATTERBAFF'S->THE PLATTERBUFF'S) GUILT +7105-2330-0001-2311: HE HAD NOT ONLY (PLEADED->PLAYED IT) GUILTY BUT HAD EXPRESSED HIS INTENTION OF REPEATING HIS ESCAPADE IN OTHER DIRECTIONS AS SOON AS CIRCUMSTANCES PERMITTED THROUGHOUT THE TRIAL HE WAS BUSY EXAMINING A SMALL MODEL OF THE FREE TRADE HALL IN MANCHESTER +7105-2330-0002-2312: (THE JURY->VERY CHEERY) COULD NOT POSSIBLY FIND THAT THE PRISONER HAD NOT DELIBERATELY AND INTENTIONALLY BLOWN UP THE ALBERT HALL THE QUESTION WAS COULD THEY FIND ANY (EXTENUATING->EXTINUATING) CIRCUMSTANCES WHICH WOULD PERMIT OF AN ACQUITTAL +7105-2330-0003-2313: OF COURSE ANY SENTENCE (WHICH->REACHED) THE LAW MIGHT FEEL COMPELLED TO INFLICT WOULD BE FOLLOWED BY AN IMMEDIATE PARDON BUT IT WAS HIGHLY DESIRABLE FROM THE (GOVERNMENT'S POINT OF->GOVERNMENTS BY A) VIEW THAT THE NECESSITY FOR SUCH AN EXERCISE OF CLEMENCY SHOULD NOT ARISE +7105-2330-0004-2314: (A HEADLONG->I HAD LONG) PARDON (ON->AND) THE EVE OF A (BYE ELECTION->BIOLECTION) WITH THREATS OF A HEAVY VOTING (DEFECTION->AFFECTION) IF IT WERE WITHHELD OR EVEN DELAYED WOULD NOT NECESSARILY BE A SURRENDER BUT IT WOULD LOOK LIKE ONE +7105-2330-0005-2315: HENCE (THE->THEIR) ANXIETY IN THE CROWDED COURT AND IN THE LITTLE GROUPS GATHERED ROUND THE TAPE MACHINES IN (WHITEHALL->WHITE HALL) AND (DOWNING->DAWNING) STREET (AND OTHER->ANOTHER) AFFECTED CENTRES +7105-2330-0006-2316: (THE JURY RETURNED->THEIR CHEERY RETURN) FROM CONSIDERING THEIR VERDICT THERE WAS A FLUTTER AN EXCITED MURMUR A (DEATHLIKE->DEATH LIKE) HUSH +7105-2330-0007-2317: THE (FOREMAN->FOUR MEN) DELIVERED HIS MESSAGE +7105-2330-0008-2318: THE (JURY->CHERRY) FIND THE PRISONER GUILTY OF BLOWING UP THE ALBERT HALL +7105-2330-0009-2319: (THE JURY->THEY JERRY) WISH TO ADD A (RIDER->WRITER) DRAWING ATTENTION TO THE FACT THAT A (BY ELECTION->BILL) IS (PENDING->SPENDING) IN THE PARLIAMENTARY DIVISION OF NEMESIS ON HAND +7105-2330-0010-2320: AND (MAY->MADE) THE (LORD->LARD) HAVE MERCY ON THE (POLL->POLE) A (JUNIOR COUNSEL->GENIOR CONSUL) EXCLAIMED IRREVERENTLY +7105-2330-0011-2321: FIFTEEN HUNDRED SAID THE PRIME MINISTER WITH A SHUDDER IT'S TOO HORRIBLE TO THINK OF +7105-2330-0012-2322: OUR MAJORITY LAST TIME WAS ONLY A THOUSAND AND SEVEN +7105-2330-0013-2323: SEVEN THIRTY AMENDED THE PRIME MINISTER WE MUST AVOID ANY APPEARANCE OF PRECIPITANCY +7105-2330-0014-2324: NOT LATER THAN SEVEN THIRTY THEN SAID THE CHIEF (ORGANISER->ORGANIZER) I HAVE PROMISED THE AGENT DOWN THERE THAT HE SHALL BE ABLE TO DISPLAY POSTERS ANNOUNCING PLATTERBAFF IS OUT BEFORE THE (POLL->POLE) OPENS +7105-2330-0015-2325: HE SAID IT WAS (OUR->HER) ONLY CHANCE OF GETTING A TELEGRAM (RADPROP->REDRUP) IS (IN->INN) TO NIGHT +7105-2330-0016-2326: (DESPITE->THIS SPITE) THE EARLINESS OF THE HOUR A SMALL CROWD HAD GATHERED IN THE STREET OUTSIDE AND THE HORRIBLE MENACING (TRELAWNEY->TREEONER) REFRAIN OF THE FIFTEEN HUNDRED VOTING MEN CAME IN A STEADY MONOTONOUS CHANT +7105-2330-0017-2327: HE EXCLAIMED WON'T GO +7105-2330-0018-2328: HE SAYS HE NEVER HAS LEFT PRISON WITHOUT A (BRASS BAND->BREASTPAND) TO PLAY HIM OUT AND HE'S NOT GOING TO GO WITHOUT ONE NOW +7105-2330-0019-2329: SAID THE PRIME MINISTER WE CAN HARDLY BE SUPPOSED TO SUPPLY A (RELEASED->LESS) PRISONER WITH A BRASS BAND HOW ON EARTH COULD WE (DEFEND IT->DEFENDED) ON THE ESTIMATES +7105-2330-0020-2330: (ANYWAY HE->AND AWAY YOU) WON'T GO UNLESS HE HAS A BAND +7105-2330-0021-2331: (POLL->PAUL) OPENS IN FIVE MINUTES +7105-2330-0022-2332: (IS PLATTERBAFF->HIS FURTHER BATH) OUT YET +7105-2330-0023-2333: IN HEAVEN'S NAME WHY +7105-2330-0024-2334: THE CHIEF (ORGANISER->ORGANIZER) RANG OFF +7105-2330-0025-2335: THIS IS NOT A MOMENT FOR STANDING ON DIGNITY HE OBSERVED BLUNTLY (MUSICIANS->MEASIANS) MUST BE SUPPLIED AT ONCE +7105-2330-0026-2336: CAN'T YOU GET (A->US) STRIKE PERMIT ASKED THE (ORGANISER->ORGANIZER) +7105-2330-0027-2337: I'LL TRY SAID THE HOME SECRETARY AND WENT TO THE TELEPHONE +7105-2330-0028-2338: EIGHT O'CLOCK STRUCK THE CROWD OUTSIDE CHANTED WITH AN INCREASING VOLUME OF SOUND (WILL VOTE->REVOTE) THE OTHER WAY +7105-2330-0029-2339: (A TELEGRAM WAS->I TELEGRAMAS) BROUGHT IN +7105-2330-0030-2340: IT WAS FROM THE CENTRAL (COMMITTEE->COMEDY) ROOMS AT NEMESIS +7105-2330-0031-2341: WITHOUT A BAND HE WOULD NOT GO AND THEY HAD NO BAND +7105-2330-0032-2342: A QUARTER PAST TEN HALF PAST +7105-2330-0033-2343: HAVE YOU ANY BAND INSTRUMENTS OF AN EASY NATURE TO PLAY +7105-2330-0034-2344: DEMANDED THE CHIEF (ORGANISER->ORGANIZER) OF THE PRISON GOVERNOR DRUMS (CYMBALS->SYMBOLS) THOSE SORT OF THINGS +7105-2330-0035-2345: THE (WARDERS->ORDERS) HAVE A PRIVATE BAND OF THEIR OWN SAID THE GOVERNOR BUT OF COURSE I COULDN'T ALLOW THE MEN THEMSELVES +7105-2330-0036-2346: (LEND US->BLENDEST) THE INSTRUMENTS SAID THE CHIEF ORGANISER +7105-2330-0037-2347: (THE->THOUGH) POPULAR SONG OF THE MOMENT REPLIED THE AGITATOR AFTER A MOMENT'S REFLECTION +7105-2330-0038-2348: IT WAS A TUNE THEY HAD ALL HEARD HUNDREDS OF TIMES SO THERE (WAS->IS) NO DIFFICULTY IN TURNING OUT A PASSABLE IMITATION OF IT TO THE IMPROVISED (STRAINS->TRAINS) OF I (DIDN'T->DON'T) WANT TO DO IT THE PRISONER STRODE FORTH TO FREEDOM +7105-2330-0039-2349: THE WORD OF THE (SONG->SUN) HAD REFERENCE IT WAS UNDERSTOOD (TO THE->THAT) INCARCERATING GOVERNMENT AND NOT TO THE DESTROYER OF THE ALBERT HALL +7105-2330-0040-2350: (THE SEAT->THIS HEAT) WAS LOST AFTER ALL BY A NARROW (MAJORITY->MATURITY) +7105-2330-0041-2351: THE LOCAL TRADE UNIONISTS TOOK OFFENCE AT THE FACT OF (CABINET MINISTERS->CABINETS) HAVING PERSONALLY ACTED AS STRIKE BREAKERS AND EVEN THE RELEASE OF (PLATTERBAFF->PLATTERBUFF) FAILED TO PACIFY THEM +7105-2340-0000-2272: WITH THAT NOTORIOUS FAILING OF HIS HE WAS NOT THE SORT OF PERSON ONE WANTED IN ONE'S HOUSE +7105-2340-0001-2273: WELL THE FAILING STILL EXISTS DOESN'T IT SAID (HER->THE) HUSBAND OR (*->A) DO YOU SUPPOSE A REFORM OF CHARACTER IS ENTAILED ALONG WITH THE ESTATE +7105-2340-0002-2274: BESIDES (CYNICISM APART HIS->CYS IN A PART IS) BEING RICH (WILL->WE'LL) MAKE A DIFFERENCE IN THE WAY PEOPLE WILL LOOK AT HIS (FAILING->FEELING) +7105-2340-0003-2275: WHEN A MAN IS ABSOLUTELY WEALTHY NOT MERELY WELL TO DO ALL SUSPICION OF (SORDID->SARDID) MOTIVE (NATURALLY->NATURAL) DISAPPEARS THE THING BECOMES MERELY A (TIRESOME->PARASAN) MALADY +7105-2340-0004-2276: (WILFRID PIGEONCOTE->WILFRED DIGESON COLT) HAD SUDDENLY BECOME HEIR TO HIS UNCLE SIR WILFRID (PIGEONCOTE->PIGEON COAT) ON THE DEATH OF HIS COUSIN MAJOR (WILFRID PIGEONCOTE->WILFRED PIGEONOTE) WHO HAD SUCCUMBED (TO->*) THE (*->DAY) AFTER EFFECTS OF (A POLO->APOLLO) ACCIDENT +7105-2340-0005-2277: (A WILFRID PIGEONCOTE->OF WILFRED BEECH AND COURT) HAD COVERED HIMSELF WITH HONOURS IN THE COURSE OF MARLBOROUGH'S CAMPAIGNS AND THE NAME (WILFRID->LOYAL FRED) HAD BEEN (A BAPTISMAL->ABOVE THE SMALL) WEAKNESS IN THE FAMILY EVER SINCE THE NEW HEIR TO THE FAMILY DIGNITY AND ESTATES WAS A YOUNG MAN OF ABOUT FIVE AND TWENTY WHO WAS KNOWN MORE BY (REPUTATION->REPETITION) THAN BY PERSON TO (A WIDE->AVIDE) CIRCLE OF COUSINS AND KINSFOLK +7105-2340-0006-2278: AND THE REPUTATION WAS AN UNPLEASANT ONE +7105-2340-0007-2279: FROM HIS LATE (SCHOOLDAYS->SCHOOL DAYS) ONWARD HE HAD BEEN POSSESSED BY AN ACUTE AND OBSTINATE FORM OF (KLEPTOMANIA->CLEFTOMANIA) HE HAD THE ACQUISITIVE INSTINCT OF THE COLLECTOR WITHOUT ANY OF THE COLLECTOR'S DISCRIMINATION +7105-2340-0008-2280: (THE->THIS) SEARCH USUALLY PRODUCED A LARGE AND VARIED YIELD THIS IS FUNNY SAID PETER (PIGEONCOTE->PIGEON BOAT) TO HIS WIFE (SOME->THEM) HALF HOUR AFTER THEIR CONVERSATION (HERE'S->HERE IS) A TELEGRAM FROM (WILFRID->MILFRED) SAYING HE'S PASSING THROUGH HERE IN HIS MOTOR AND WOULD LIKE TO STOP AND PAY US HIS RESPECTS +7105-2340-0009-2281: (SIGNED WILFRID PIGEONCOTE->SIGN WILFRED PEACH AND CO) +7105-2340-0010-2282: I SUPPOSE (HE'S->THIS) BRINGING US A PRESENT FOR THE SILVER WEDDING GOOD GRACIOUS +7105-2340-0011-2283: THE TALK FLITTED NERVOUSLY AND HURRIEDLY FROM ONE IMPERSONAL TOPIC TO ANOTHER +7105-2340-0012-2284: IN THE DRAWING ROOM AFTER DINNER THEIR NERVOUSNESS AND AWKWARDNESS INCREASED +7105-2340-0013-2285: OH WE HAVEN'T SHOWN YOU THE (SILVER->SILVERY) WEDDING PRESENTS SAID MISSUS PETER SUDDENLY AS THOUGH STRUCK BY A BRILLIANT IDEA FOR ENTERTAINING THE GUEST HERE THEY ALL ARE +7105-2340-0014-2286: SUCH NICE (USEFUL GIFTS->FORGIFTS) A FEW (DUPLICATES->DEPLICATES) OF COURSE +7105-2340-0015-2287: SEVEN (CREAM->QUEEN) JUGS PUT IN PETER +7105-2340-0016-2288: WE FEEL THAT WE MUST LIVE (ON CREAM->UNCREAM) FOR THE REST OF OUR LIVES +7105-2340-0017-2289: OF COURSE SOME OF THEM CAN BE CHANGED +7105-2340-0018-2290: I PUT IT DOWN BY THE (CLARET JUG->CLARGA) SAID (WILFRID->WILFRIED) BUSY WITH ANOTHER OBJECT +7105-2340-0019-2291: (VIGILANCE->EACH A LENS) WAS NOT COMPLETELY CROWNED WITH A SENSE OF VICTORY +7105-2340-0020-2292: AFTER THEY HAD SAID GOOD NIGHT TO THEIR VISITOR MISSUS PETER EXPRESSED HER CONVICTION THAT HE HAD TAKEN SOMETHING +7105-2340-0021-2293: HOW ON EARTH ARE WE TO KNOW SAID PETER THE MEAN PIG HASN'T BROUGHT US A PRESENT AND I'M HANGED IF HE SHALL CARRY ONE (OFF->OF) +7105-2340-0022-2294: (IT'S->IS) THE ONLY THING TO DO +7105-2340-0023-2295: (WILFRID->WILFRED) WAS LATE IN COMING DOWN TO BREAKFAST AND HIS MANNER SHOWED PLAINLY THAT SOMETHING WAS AMISS +7105-2340-0024-2296: (IT'S->IS) AN UNPLEASANT THING TO HAVE TO SAY HE BLURTED OUT PRESENTLY BUT I'M AFRAID YOU MUST HAVE A THIEF AMONG YOUR SERVANTS SOMETHING'S BEEN TAKEN OUT OF MY PORTMANTEAU +7105-2340-0025-2297: IT WAS A LITTLE PRESENT FROM MY MOTHER AND MYSELF FOR YOUR SILVER WEDDING +7105-2340-0026-2298: I SHOULD HAVE GIVEN IT TO YOU LAST NIGHT AFTER DINNER ONLY IT HAPPENED TO BE A (CREAM->QUEEN) JUG AND YOU SEEMED ANNOYED AT HAVING SO MANY DUPLICATES SO I FELT RATHER AWKWARD (ABOUT->OF A) GIVING YOU ANOTHER +7105-2340-0027-2299: (THE->THIS) SNATCHER HAD BEEN AN ORPHAN (THESE->THIS) MANY YEARS +7105-2340-0028-2300: LADY (ERNESTINE PIGEONCOTE->ERNESTON BEECH AND COLD) HIS MOTHER MOVED IN CIRCLES WHICH WERE ENTIRELY BEYOND THEIR COMPASS OR AMBITIONS AND THE (SON->SUN) WOULD PROBABLY ONE DAY BE AN AMBASSADOR +7105-2340-0029-2301: HUSBAND AND WIFE LOOKED BLANKLY AND DESPERATELY AT ONE ANOTHER +7105-2340-0030-2302: IT WAS MISSUS PETER WHO ARRIVED FIRST AT AN INSPIRATION HOW DREADFUL TO THINK THERE ARE THIEVES IN THE HOUSE WE KEEP THE DRAWING ROOM LOCKED UP AT NIGHT OF COURSE BUT ANYTHING MIGHT BE CARRIED OFF WHILE WE ARE AT BREAKFAST +7105-2340-0031-2303: SHE ROSE AND WENT OUT HURRIEDLY AS THOUGH TO ASSURE HERSELF THAT THE DRAWING ROOM WAS NOT BEING STRIPPED OF ITS (SILVERWARE->SILVER WARE) AND RETURNED A MOMENT LATER BEARING A CREAM JUG IN HER HANDS +7105-2340-0032-2304: THE (PIGEONCOTES->PIGEON CORDS) HAD TURNED PALER THAN EVER MISSUS PETER HAD A FINAL INSPIRATION +7105-2340-0033-2305: PETER DASHED OUT OF THE ROOM WITH GLAD RELIEF HE HAD LIVED SO LONG DURING THE LAST FEW MINUTES THAT A GOLDEN WEDDING SEEMED WITHIN MEASURABLE DISTANCE +7105-2340-0034-2306: MISSUS (PETER->BEATER) TURNED TO HER GUEST WITH CONFIDENTIAL (COYNESS->KINDNESS) +7105-2340-0035-2307: (PETER'S->PETER IS) LITTLE WEAKNESS (IT RUNS->EACH ONE'S) IN THE FAMILY GOOD LORD +7105-2340-0036-2308: DO YOU MEAN TO SAY (HE'S->HE IS) A (KLEPTOMANIAC->CLAPTOMANIA) LIKE COUSIN SNATCHER +7105-2340-0037-2309: (BRAVE->PRETTY) LITTLE WOMAN SAID PETER WITH A GASP OF RELIEF I COULD NEVER HAVE DONE IT +7902-96591-0000-0: (I AM->AND) FROM THE CUTTER LYING OFF THE COAST +7902-96591-0001-1: DON'T CRY HE SAID I WAS OBLIGED TO COME +7902-96591-0002-2: AND AND YOU HAVE NOT FOUND OUT ANYTHING CAME IN QUICK FRIGHTENED TONES +7902-96591-0003-3: I WISH YOU WOULD BELIEVE ME THAT I AM IN AS GREAT TROUBLE ABOUT IT AS YOU ARE +7902-96591-0004-4: THAT MY FATHER SIR RISDON (GRAEME HAS->GRAHAME) SMUGGLED GOODS HERE +7902-96591-0005-5: HE COULD NOT HELP IT HE HATES THE SMUGGLERS YOU SHALL NOT TELL +7902-96591-0006-6: PRAY PRAY SAY YOU WILL NOT (ARCHY->ARCHIE) WAS SILENT +7902-96591-0007-7: THEN AS (ARCHY->ARCHIE) STOOD IN THE DARK LITERALLY AGHAST WITH ASTONISHMENT HE HEARD THE FAINT RUSTLING ONCE MORE AND AGAIN ALL WAS SILENT +7902-96591-0008-8: HE LAUGHED BUT IT WAS A CURIOUS KIND OF LAUGH FULL OF VEXATION INJURED (AMOUR PROPRE->AMORE A PROPER) AS THE FRENCH CALL OUR LOVE OF OUR OWN DIGNITY OF WHICH (ARCHIBALD RAYSTOKE->ARQUEBALD RAY STROKE) IN THE FULL FLUSH OF HIS YOUNG BELIEF IN HIS IMPORTANCE AS A BRITISH OFFICER HAD A PRETTY GOOD STOCK +7902-96591-0009-9: (IT->AND) ALL COMES OF DRESSING UP IN THIS STUPID WAY LIKE A ROUGH FISHER LAD +7902-96591-0010-10: COLD WATER CAME ON THIS IDEA DIRECTLY AS HE RECALLED THE FACT THAT THE DARKNESS WAS INTENSE AND CELIA COULD NOT HAVE SEEN HIM +7902-96591-0011-11: I'LL SOON SHOW THEM THAT I AM NOT GOING TO BE PLAYED WITH +7902-96591-0012-12: FOR IT SUDDENLY OCCURRED TO HIM THAT HE WAS NOT ONLY A PRISONER BUT A PRISONER IN THE POWER OF A VERY RECKLESS SET OF PEOPLE WHO WOULD STOP AT NOTHING +7902-96591-0013-13: NO HE THOUGHT TO HIMSELF I DON'T BELIEVE THEY WOULD KILL ME BUT THEY WOULD KNOCK ME ABOUT +7902-96591-0014-14: THE (KICK HE->KICKIE) HAD RECEIVED WAS A FORETASTE OF WHAT HE MIGHT EXPECT AND AFTER A LITTLE CONSIDERATION HE CAME TO THE CONCLUSION THAT HIS DUTY WAS TO ESCAPE AND GET BACK TO THE CUTTER AS QUICKLY AS HE COULD +7902-96591-0015-15: TO DO THIS HE MUST SCHEME LIE HID TILL MORNING (THEN->THAN) MAKE FOR THE NEAREST POINT AND SIGNAL FOR HELP UNLESS A BOAT'S CREW WERE ALREADY SEARCHING FOR HIM HOW TO ESCAPE +7902-96591-0016-16: THE WINDOW WAS BARRED BUT HE WENT TO IT AND TRIED THE BARS ONE BY ONE TO FIND THEM ALL SOLIDLY FITTED INTO THE STONE SILL +7902-96591-0017-17: NEXT MOMENT AS HE FELT HIS WAY ABOUT HIS HAND TOUCHED AN OLD FASHIONED MARBLE MANTELPIECE FIREPLACE CHIMNEY +7902-96591-0018-18: YES IF OTHER WAYS FAILED HE COULD ESCAPE UP THE CHIMNEY +7902-96591-0019-19: NO THAT WAS TOO BAD HE (COULD NOT->CANNOT) DO THAT +7902-96591-0020-20: SYMPATHY AND PITY FOR THE DWELLERS IN THE (HOZE->HOSE) WERE COMPLETELY GONE NOW AND HE SET HIS TEETH FAST AND MENTALLY CALLED HIMSELF A WEAK IDIOT FOR EVER THINKING ABOUT SUCH PEOPLE +7902-96591-0021-21: A NARROW TABLE AGAINST THE WALL IN TWO PLACES +7902-96591-0022-22: HE WENT AND TRIED TO FORCE HIS HEAD THROUGH RECALLING AS HE DID THAT WHERE A PERSON'S HEAD WOULD GO THE REST OF THE BODY WOULD PASS +7902-96591-0023-23: BUT THERE WAS NO CHANCE FOR HIS BODY THERE THE HEAD WOULD NOT GO FIRST +7902-96591-0024-24: A FELLOW WHO WAS SHUT UP IN PRISON FOR LIFE MIGHT DO IT HE SAID BUT NOT IN A CASE LIKE THIS +7902-96592-0000-25: SURE (YOU'VE LOOKED->YOU LOOK) ROUND EVERYWHERE BOY YES FATHER QUITE +7902-96592-0001-26: I'M GOING HOME TO BREAKFAST +7902-96592-0002-27: SHALL I COME (TOO->TO) FATHER NO +7902-96592-0003-28: STOP HERE TILL SIR RISDON COMES DOWN AND TELL HIM I'M VERY SORRY THAT WE SHOULD HAVE CLEARED OUT LAST NIGHT ONLY A BORN FOOL SAW JERRY (NANDY'S->ANDY'S) LOBSTER BOAT COMING INTO THE COVE AND CAME RUNNING TO SAY IT WAS A PARTY FROM THE CUTTER YES FATHER +7902-96592-0004-29: TELL HIM NOT TO BE UNEASY TIS ALL RIGHT AND I'LL HAVE EVERYTHING CLEAR AWAY TO NIGHT +7902-96592-0005-30: THE DULL SOUND OF DEPARTING STEPS AND A LOW WHISTLING SOUND COMING DOWN THROUGH THE SKYLIGHT WINDOW INTO THE CABIN WHERE (ARCHY RAYSTOKE->ARCHIE RAY STROKE) LAY WITH HIS HEAVY EYELIDS PRESSED DOWN BY SLEEP +7902-96592-0006-31: WHAT A QUEER DREAM HE THOUGHT TO HIMSELF +7902-96592-0007-32: BUT HOW QUEER FOR MISTER (GURR->GIRT) TO BE TALKING LIKE THAT TO ANDREW (TEAL->TEALE) THE BOY WHO (HELPED->HELPS) THE COOK +7902-96592-0008-33: AND WHY DID ANDY CALL MISTER (GURR FATHER->GERFATHER) +7902-96592-0009-34: THERE WAS AN INTERVAL OF THINKING OVER THIS (KNOTTY->NAUGHTY) QUESTION DURING WHICH THE LOW WHISTLING WENT ON +7902-96592-0010-35: AND (I'M HUNGRY->UNHUNGRY) TOO (TIME I->TELL IT) WAS UP I SUPPOSE +7902-96592-0011-36: NO HE WAS NOT DREAMING FOR HE WAS LOOKING OUT ON THE SEA OVER WHICH A FAINT MIST HUNG LIKE WREATHS OF SMOKE +7902-96592-0012-37: WHAT DID THEY SAY FALSE ALARM TELL SIR (RISDON->RISDEN) THEY WOULD CLEAR ALL AWAY TO NIGHT SEE IF ANYTHING HAD BEEN LEFT ABOUT LOBSTER BOAT +7902-96592-0013-38: ONCE OUT OF THAT ROOM HE COULD (RAN->RUN) AND BY DAYLIGHT THE SMUGGLERS (DARE->DARED) NOT HUNT HIM DOWN +7902-96592-0014-39: OH THOSE BARS HE MENTALLY EXCLAIMED AND HE WAS ADVANCING (TOWARD->TOWARDS) THEM WHEN JUST AS HE DREW NEAR THERE WAS A RUSTLING NOISE UNDER THE WINDOW A COUPLE OF HANDS SEIZED THE BARS THERE WAS A SCRATCHING OF BOOT TOES AGAINST STONE WORK AND RAM'S FACE APPEARED TO GAZE INTO THE ROOM BY INTENTION BUT INTO THE ASTONISHED COUNTENANCE OF THE YOUNG MIDSHIPMAN INSTEAD +7902-96592-0015-40: (RAM->ROOM) WAS THE FIRST TO RECOVER FROM HIS SURPRISE +7902-96592-0016-41: HULLO HE SAID WHO ARE YOU +7902-96592-0017-42: GO ROUND AND OPEN THE DOOR I WAS SHUT IN LAST NIGHT BY MISTAKE +7902-96592-0018-43: I SAW YOU LAST NIGHT AND WONDERED WHOSE BOY (YOU->HE) WAS +7902-96592-0019-44: IT WAS (YOU->YOUR) FATHER KICKED FOR SHIRKING AND MY WELL I HARDLY KNOWED YOU +7902-96592-0020-45: NONSENSE +7902-96592-0021-46: WON'T DO SAID RAM GRINNING +7902-96592-0022-47: THINK I DON'T KNOW YOU MISTER (ORFICER->ORFASTER) +7902-96592-0023-48: (WON'T->WELL) DO SAID RAM QUICKLY I KNOW YOU +7902-96592-0024-49: (BEEN PLAYING->COMPLYING) THE SPY THAT'S WHAT YOU'VE BEEN DOING WHO LOCKED YOU IN +7902-96592-0025-50: (ARCHY->ARCHIE) STEPPED BACK TO THE DOOR LISTENING BUT THERE WAS NOT A SOUND +7902-96592-0026-51: HE HAS GONE TO GIVE THE ALARM THOUGHT THE PRISONER AND HE LOOKED EXCITEDLY ROUND FOR A WAY OF ESCAPE +7902-96592-0027-52: NOTHING BUT THE CHIMNEY PRESENTED ITSELF +7902-96592-0028-53: A HAPPY INSPIRATION HAD COME AND PLACING ONE HAND UPON HIS (BREAST->CHEST) HE THRUST IN THE OTHER GAVE A TUG AND DREW OUT HIS LITTLE CURVED DIRK GLANCED AT THE EDGE RAN TO THE WINDOW AND BEGAN TO CUT AT ONE OF THE BARS (LABOUR->LABOR) IN VAIN +7902-96592-0029-54: HE DIVIDED THE PAINT AND PRODUCED A FEW SQUEAKS AND GRATING SOUNDS AS HE (REALISED->REALIZED) THAT THE ATTEMPT WAS MADNESS +7902-96592-0030-55: THE RESULT WAS NOT VERY SATISFACTORY BUT SUFFICIENTLY SO TO MAKE HIM ESSAY THE BAR OF THE WINDOW ONCE MORE PRODUCING A GRATING (EAR ASSAILING->IRISH SELLING) SOUND AS HE FOUND THAT NOW HE DID MAKE A LITTLE IMPRESSION SO LITTLE THOUGH THAT THE PROBABILITY WAS IF HE KEPT ON WORKING WELL FOR TWENTY FOUR HOURS HE WOULD NOT GET THROUGH +7902-96592-0031-56: BUT AT THE END OF FIVE MINUTES HE STOPPED AND THRUST BACK THE DIRK INTO ITS SHEATH +7902-96592-0032-57: NO I CAN'T PART WITH THAT HA HA (HA->*) LAUGHED THE BOY JEERINGLY +7902-96592-0033-58: BUT (I'LL->ALL) YES I'LL GIVE YOU A GUINEA IF YOU WILL LET ME OUT +7902-96592-0034-59: (GUINEA SAID->GUINEAS OF) THE BOY THINK (I'D->I'LL) DO IT FOR A GUINEA WELL THEN (TWO->TOO) +7902-96592-0035-60: BE QUICK THERE'S A GOOD FELLOW I WANT TO GET AWAY AT ONCE +7902-96592-0036-61: NOT YOU ONLY A SHAM +7902-96592-0037-62: WHY YOUR CLOTHES DON'T FIT YOU AND YOUR CAP'S PUT ON ALL (SKEW REW->SKIRO) +7902-96592-0038-63: NEVER MIND ABOUT THAT LET ME OUT OF THIS PLACE +7902-96592-0039-64: I TOLD YOU A FISHER BOY CRIED (ARCHY->ARCHIE) IMPATIENTLY BUT TRYING NOT TO OFFEND HIS VISITOR WHO POSSESSED THE POWER OF CONFERRING FREEDOM BY SPEAKING SHARPLY +7902-96592-0040-65: NOT YOU LOOK LIKE A WILD BEAST IN A CAGE LIKE A MONKEY YOU INSOLENT +7902-96592-0041-66: (ARCHY->ARCHIE) CHECKED HIMSELF AND THE BOY LAUGHED +7902-96592-0042-67: IT WAS YOUR TURN YESTERDAY IT'S MINE TO DAY WHAT A GAME +7902-96592-0043-68: YOU LAUGHED AND (FLEERED->FLARED) AT ME WHEN I WAS ON THE CUTTER'S DECK +7902-96592-0044-69: I SAY YOU DO LOOK (*->LIKE) A (RUM UN->ROMAN) JUST LIKE A BIG MONKEY IN A SHOW +7902-96592-0045-70: RAM SHOWED HIS WHITE TEETH AS HE BURST OUT WITH A LONG LOW FIT OF LAUGHTER +7902-96592-0046-71: YOU (ROPE'S END->HOPES AND) ME HE SAID +7902-96592-0047-72: WHY I COULD (TIE->TELL) YOU UP IN A KNOT AND HEAVE YOU OFF THE CLIFF ANY DAY WHAT A GAME +7902-96592-0048-73: BIT OF (A MIDDY->AMITY) FED ON (*->A) SALT TACK AND (WEEVILLY->WEEVILY) BISCUIT TALK OF GIVING ME (ROPE'S END->ROPES AND) +7902-96592-0049-74: ONCE MORE WILL YOU COME AND LET ME OUT NO +7902-96592-0050-75: TO HIS ASTONISHMENT THE BOY DID NOT FLINCH BUT THRUST HIS OWN ARMS THROUGH PLACING (THEM->HIM) ABOUT THE MIDDY'S WAIST CLENCHING HIS (HANDS->HAND) BEHIND AND UTTERING A SHARP WHISTLE +7902-96594-0000-76: (SEEMED IN GOOD SPIRITS->SEEMING AT SPEAR'S) LAST NIGHT MISTER (GURR->GARR) EH +7902-96594-0001-77: YES SIR BUT HE MAY TURN UP ON THE CLIFF AT ANY MOMENT +7902-96594-0002-78: YES MEN QUITE READY YES SIR +7902-96594-0003-79: (THAT'S RIGHT->THE THREAD) OF COURSE (WELL ARMED->WILL ALARMED) +7902-96594-0004-80: SOON AS THE SIGNAL COMES WE SHALL PUSH OFF +7902-96594-0005-81: AWKWARD (BIT O->BITTER) COUNTRY SIR SIX MILES ROW BEFORE YOU CAN FIND A PLACE TO LAND +7902-96594-0006-82: SO SHALL WE YET SIR +7902-96594-0007-83: YOU DON'T THINK MISTER (GURR->GORE) THAT THEY WOULD DARE TO INJURE HIM IF HE WAS SO UNLUCKY AS TO BE CAUGHT +7902-96594-0008-84: WELL SIR SAID THE MASTER HESITATING SMUGGLERS ARE SMUGGLERS +7902-96594-0009-85: CERTAINLY SIR SMUGGLERS ARE SMUGGLERS (INDEED->INDE) +7902-96594-0010-86: (BEG->THEY) PARDON SIR DIDN'T MEAN ANY HARM +7902-96594-0011-87: I'M GETTING VERY ANXIOUS ABOUT MISTER (RAYSTOKE->RAYSTROKE) START AT ONCE SIR +7902-96594-0012-88: NO WAIT ANOTHER (*->AND) HALF HOUR +7902-96594-0013-89: VERY ILL ADVISED THING TO DO +7902-96594-0014-90: (THEN->THAT) I MUST REQUEST THAT YOU WILL NOT MAKE IT AGAIN VERY TRUE +7902-96594-0015-91: (AWK WARD->AWKWARD) MISTER (GURR->GARR) AWKWARD +7902-96594-0016-92: YES SIR OF COURSE +7902-96594-0017-93: SAY (AWK WARD->AWKWARD) IN (*->THE) FUTURE NOT (AWK'ARD->UPWARD) +7902-96594-0018-94: I MEAN (ALL ALONE->OUR OWN) BY MYSELF SIR +7902-96594-0019-95: WHAT FOR THERE (AREN'T A->ARE TO) PUBLIC HOUSE FOR TEN MILES DIDN'T MEAN THAT +7902-96594-0020-96: THEN WHAT DID YOU MEAN SPEAK OUT AND DON'T DO THE DOUBLE SHUFFLE ALL OVER MY CLEAN DECK NO SIR +7902-96594-0021-97: (HOPPING->HAVING) ABOUT (LIKE A->THE GOOD) CAT ON HOT BRICKS +7902-96594-0022-98: NOW THEN WHY DO YOU WANT TO GO ASHORE +7902-96594-0023-99: (BEG->THEY) PARDON DIDN'T MEAN (NOWT->OUT) SIR SAID THE SAILOR TOUCHING HIS FORELOCK +7902-96594-0024-100: YES SIR SAID THE MAN HUMBLY SHALL I GO AT ONCE SIR +7902-96594-0025-101: NO WAIT +7902-96594-0026-102: (KEEP A->HE WAS) SHARP LOOK OUT ON THE CLIFF TO SEE IF MISTER (RAYSTOKE->RAYSTROKE) IS MAKING SIGNALS FOR A (BOAT->BO) +7902-96594-0027-103: HE SWUNG ROUND WALKED (AFT->OFF) AND BEGAN SWEEPING (THE SHORE->ASHORE) AGAIN WITH HIS GLASS WHILE THE MASTER AND DICK EXCHANGED GLANCES WHICH MEANT A GREAT DEAL +7902-96594-0028-104: AT LAST THE LITTLE LIEUTENANT COULD BEAR THE ANXIETY NO LONGER +7902-96594-0029-105: (PIPE->PEG) AWAY (THE MEN TO->THEM INTO) THAT BOAT THERE HE SAID AND AS THE CREW SPRANG IN +7902-96594-0030-106: (NOW->NO) MISTER GURR HE SAID I'M ONLY GOING TO SAY ONE THING TO YOU IN THE WAY OF INSTRUCTIONS YES SIR +7902-96594-0031-107: BEG PARDON SIR SAID THE MASTER DEPRECATINGLY +7902-96594-0032-108: STEADY MY (LADS->LAD) STEADY CRIED THE MASTER KEEP STROKE AND THEN HE BEGAN TO MAKE PLANS AS TO HIS FIRST PROCEEDINGS ON GETTING ASHORE +7902-96595-0000-109: SAY (MESTER GURR->MISTER GIRK) SAID DICK AFTER ONE OF THESE SEARCHES HE WOULDN'T RUN AWAY WHAT +7902-96595-0001-110: MISTER RAYSTOKE SIR DON'T BE A FOOL +7902-96595-0002-111: WHAT (CHUCKED HIM OFF->SAID DE MORVE) YONDER +7902-96595-0003-112: (GURR->GIRK) GLANCED ROUND TO SEE IF THE MEN WERE LOOKING AND THEN SAID RATHER HUSKILY (BUT->BE) KINDLY +7902-96595-0004-113: AH EJACULATED DICK SADLY +7902-96595-0005-114: SAY (MESTER GURR SIR->MISTER GER) WHICH THANKFUL I AM (TO->FOR) YOU FOR SPEAKING SO BUT YOU DON'T REALLY THINK AS HE HAS COME TO HARM +7902-96595-0006-115: I HOPE NOT DICK I (HOPE NOT->OPEN IT) BUT SMUGGLERS DON'T STAND AT ANYTHING SOMETIMES +7902-96595-0007-116: I DO ASSURE YOU THERE'S NOTHING HERE BUT WHAT YOU MAY SEE +7902-96595-0008-117: IF (YOU'D->YOU) LET ME FINISH YOU'D KNOW SAID (GURR GRUFFLY->GRIGGLY) ONE OF OUR BOYS IS MISSING SEEN (HIM->EM) UP HERE +7902-96595-0009-118: BOY (BOUT->ABOUT) SEVENTEEN WITH A RED CAP NO SIR INDEED I'VE NOT +7902-96595-0010-119: DON'T KNOW AS HE HAS BEEN SEEN ABOUT HERE DO YOU SAID (GURR->GIRL) LOOKING AT HER SEARCHINGLY NO SIR +7902-96595-0011-120: IF SHE KNEW EVIL HAD COME TO THE POOR LAD HER FACE WOULD TELL TALES LIKE PRINT +7902-96595-0012-121: I (SAID A LAD BOUT->STOOD ALOUD ABOUT) SEVENTEEN IN A RED (CAP LIKE->CAPLICH) YOURS SAID (GURR->GREW) VERY SHORTLY +7902-96595-0013-122: THE MAN SHOOK HIS HEAD AND STARED AS IF HE DIDN'T HALF UNDERSTAND THE DRIFT OF WHAT WAS SAID +7902-96595-0014-123: HERE MY LAD WHERE'S YOUR MASTER +7902-96595-0015-124: EH I SAY (WHERE'S->WAS) YOUR MASTER +7902-96595-0016-125: (GURR->GERT) TURNED AWAY IMPATIENTLY AGAIN AND (SIGNING->SUNNING) TO HIS MEN TO FOLLOW THEY ALL BEGAN TO TRAMP UP (THE->A) STEEP (TRACK->CHECK) LEADING TOWARD THE (HOZE->HOSE) WITH THE (RABBITS->RABBIT'S) SCUTTLING AWAY AMONG THE (FURZE->FIRS) AND SHOWING THEIR WHITE COTTONY TAILS FOR A MOMENT AS THEY DARTED DOWN INTO THEIR HOLES +7902-96595-0017-126: I (DUNNO->DON'T KNOW) MUTTERED DICK AND A (MAN->MEN) CAN'T BE SURE +7902-96595-0018-127: (GURR->GER) SALUTED AND STATED HIS BUSINESS WHILE THE BARONET WHO HAD TURNED (SALLOWER->SALARY) AND MORE CAREWORN THAN HIS LOT DREW A BREATH (*->OF) FULL OF RELIEF ONE OF YOUR SHIP BOYS HE SAID +7902-96595-0019-128: A LAD LOOKING LIKE A COMMON SAILOR AND WEARING A RED CAP NO SAID SIR RISDON +7902-96595-0020-129: I HAVE SEEN NO ONE ANSWERING TO THE DESCRIPTION HERE +7902-96595-0021-130: (BEG PARDON SIR BUT CAN YOU->BIG PARTISER BECAME) AS (A->*) GENTLEMAN ASSURE ME THAT HE IS NOT HERE CERTAINLY SAID SIR RISDON +7902-96595-0022-131: SURELY CRIED SIR RISDON EXCITEDLY +7902-96595-0023-132: SIR (RISDON->RICHARD) WAS SILENT +7902-96595-0024-133: LADY (GRAEME->GRAHAM) LOOKED GHASTLY +7902-96595-0025-134: YOU DO NOT KNOW NO +7975-280057-0000-1008: THESE HATREDS WERE SOON TO MAKE TROUBLE FOR ME OF WHICH I HAD NEVER DREAMED +7975-280057-0001-1009: HENRY WASHINGTON YOUNGER MY FATHER REPRESENTED JACKSON COUNTY THREE TIMES IN THE LEGISLATURE AND WAS ALSO (*->A) JUDGE OF THE COUNTY COURT +7975-280057-0002-1010: MY MOTHER WHO WAS (BURSHEBA FRISTOE->PERCEIVER FOR STOVE) OF INDEPENDENCE WAS (THE->A) DAUGHTER OF RICHARD (FRISTOE->FRISTOW) WHO FOUGHT UNDER GENERAL ANDREW JACKSON AT NEW ORLEANS JACKSON COUNTY HAVING BEEN SO NAMED (AT->AND) MY GRANDFATHER (FRISTOE'S INSISTENCE->FIRST THOSE INSISTANTS) +7975-280057-0003-1011: I CANNOT REMEMBER WHEN I DID NOT KNOW HOW TO SHOOT +7975-280057-0004-1012: MY BROTHER JAMES WAS BORN JANUARY (FIFTEENTH->FIFTEEN) EIGHTEEN FORTY EIGHT JOHN (IN->AND) EIGHTEEN FIFTY ONE AND ROBERT IN DECEMBER EIGHTEEN FIFTY THREE +7975-280057-0005-1013: MY ELDEST BROTHER RICHARD DIED IN EIGHTEEN SIXTY +7975-280057-0006-1014: MY FATHER WAS IN THE EMPLOY OF THE UNITED STATES GOVERNMENT AND HAD THE (MAIL->MALE) CONTRACT FOR FIVE HUNDRED MILES +7975-280057-0007-1015: HE HAD STARTED BACK TO HARRISONVILLE IN A BUGGY BUT WAS WAYLAID ONE MILE SOUTH OF (WESTPORT->WESTWARD) A SUBURB OF KANSAS CITY AND BRUTALLY MURDERED FALLING OUT OF HIS BUGGY INTO THE ROAD WITH THREE MORTAL BULLET WOUNDS +7975-280057-0008-1016: (MISSUS->MISS) WASHINGTON (WELLS->WALES) AND HER SON SAMUEL ON THE ROAD HOME FROM KANSAS CITY TO (LEE'S->LEE) SUMMIT RECOGNIZED THE BODY AS THAT OF MY FATHER +7975-280057-0009-1017: (MISSUS WELLS STAYED->MUST WELL STAY) TO GUARD THE REMAINS (WHILE HER->WHETHER HIS) SON CARRIED THE NEWS OF THE MURDER TO COLONEL PEABODY OF THE FEDERAL COMMAND WHO WAS THEN IN CAMP AT KANSAS CITY +7975-280057-0010-1018: (MISSUS MC CORKLE->MISS MICROCLE) JUMPED FROM THE WINDOW OF THE HOUSE AND ESCAPED +7975-280057-0011-1019: AS THE RAIDERS (LEFT->LIVED) ONE OF THEM SHOUTED +7975-280057-0012-1020: NOW (OLD->*) LADY CALL ON YOUR PROTECTORS WHY DON'T YOU CALL (ON COLE->AND CO) YOUNGER NOW +7975-280057-0013-1021: EVERY KNOT REPRESENTED A HUMAN LIFE +7975-280057-0014-1022: BUT SHE FAILED TO (FIND THE->FANCY) COMFORT SHE (SOUGHT->SAW) FOR ANNOYANCES CONTINUED IN A MORE AGGRAVATED (FORM->FOR) +7975-280057-0015-1023: TWO MONTHS AFTER (THIS->THE) INCIDENT THE SAME PERSECUTORS AGAIN ENTERED OUR HOME IN THE (DEAD->DAY) OF THE NIGHT AND AT THE POINT OF A PISTOL TRIED TO FORCE MY MOTHER TO SET FIRE TO HER OWN HOME +7975-280057-0016-1024: I HAVE ALWAYS FELT THAT THE EXPOSURE TO WHICH SHE WAS SUBJECTED ON THIS CRUEL JOURNEY TOO HARD EVEN FOR A MAN TO TAKE WAS (THE->A) DIRECT CAUSE OF HER DEATH +7975-280057-0017-1025: FROM HARRISONVILLE SHE WENT TO (WAVERLY->WAVERLEY) WHERE SHE WAS (HOUNDED CONTINUALLY->HANDY CONTINUAL) +7975-280057-0018-1026: ONE OF THE CONDITIONS UPON WHICH HER LIFE WAS SPARED WAS THAT SHE WOULD REPORT (AT LEXINGTON->IT LESSINGTON) WEEKLY +7975-280057-0019-1027: ONE OF MY OLD SCHOOL TEACHERS WHOM I HAVE NEVER SEEN SINCE THE SPRING (OR->OF) SUMMER OF EIGHTEEN SIXTY TWO IS STEPHEN B ELKINS SENATOR FROM WEST VIRGINIA +7975-280057-0020-1028: WHEN I WAS (TAKEN->TAKING) PRISONER I EXPECTED TO BE SHOT WITHOUT CEREMONY +7975-280063-0000-1058: WE TOOK THE OATH PERHAPS THREE HUNDRED OF US DOWN ON LUTHER MASON'S FARM A FEW MILES FROM WHERE I NOW WRITE WHERE COLONEL (HAYS->HAYES) HAD ENCAMPED AFTER INDEPENDENCE +7975-280063-0001-1059: (BOONE MUIR->BOOM YOU) AND MYSELF (MET->MAKE) COFFEE (AND->IN) THE REST BELOW ROSE HILL ON GRAND RIVER +7975-280063-0002-1060: ACCORDINGLY I WAS SHORTLY AWAKENED TO ACCOMPANY HIM (TO LONE->THE LONG) JACK WHERE HE WOULD PERSONALLY MAKE KNOWN THE SITUATION TO THE OTHER COLONELS +7975-280063-0003-1061: FOSTER HAD NEARLY ONE THOUSAND (CAVALRYMEN->CAVERNMENT) AND TWO PIECES OF (RABB'S->RABBS) INDIANA BATTERY THAT HAD ALREADY MADE FOR ITSELF A NAME FOR HARD FIGHTING +7975-280063-0004-1062: (COME IN->COMMONED) COLONEL (HAYS->HAYES) EXCLAIMED COLONEL (COCKRELL->COCKLE) +7975-280063-0005-1063: I THINK HE'LL BE (RATHER TOUGH MEAT->READY TO HAVE MEET) FOR BREAKFAST I REPLIED HE MIGHT BE ALL RIGHT FOR DINNER +7975-280063-0006-1064: (JACKMAN->JACK WENT) WITH A PARTY OF THIRTY SEASONED MEN CHARGED THE INDIANA GUNS AND CAPTURED THEM BUT MAJOR FOSTER LED A GALLANT CHARGE AGAINST THE INVADERS AND (RECAPTURED->RE CAPTURED) THE PIECES +7975-280063-0007-1065: WE WERE OUT OF AMMUNITION AND WERE HELPLESS HAD THE FIGHT BEEN PRESSED +7975-280063-0008-1066: THEY DID MARK MY CLOTHES IN ONE OR TWO PLACES HOWEVER +7975-280063-0009-1067: MAJOR FOSTER IN A LETTER TO (JUDGE->JOE) GEORGE (M BENNETT->I INVITED) OF (MINNEAPOLIS->MANY APOLIS) SAID +7975-280063-0010-1068: I WAS TOLD BY SOME OF OUR MEN FROM THE WESTERN BORDER OF THE STATE THAT THEY RECOGNIZED (THE->A) DARING YOUNG (RIDER AS COLE->RATURIST COAL) YOUNGER +7975-280063-0011-1069: ABOUT NINE THIRTY A M I WAS SHOT DOWN +7975-280063-0012-1070: THE WOUNDED OF BOTH FORCES WERE GATHERED UP AND WERE PLACED IN HOUSES +7975-280076-0000-1029: ALTHOUGH EVERY BOOK (PURPORTING->REPORTING) TO NARRATE THE LIVES OF THE YOUNGER BROTHERS (HAS->IS) TOLD OF THE LIBERTY ROBBERY AND IMPLIED THAT WE HAD A PART IN IT THE YOUNGERS WERE NOT SUSPECTED AT THAT TIME NOR FOR A LONG TIME AFTERWARD +7975-280076-0001-1030: IT WAS CLAIMED BY PEOPLE OF LIBERTY THAT THEY POSITIVELY RECOGNIZED AMONG THE ROBBERS (OLL SHEPHERD RED MONKERS->ALL SHEPARD REDMOCKERS) AND BUD PENCE WHO HAD SEEN SERVICE WITH (QUANTRELL->QUANTREAL) +7975-280076-0002-1031: THIS (RAID->RAY) WAS ACCOMPANIED BY (BLOODSHED JUDGE->BLOTCHET JOSE) MC (LAIN->LANE) THE BANKER BEING SHOT THOUGH NOT FATALLY +7975-280076-0003-1032: (NO->THOUGH) WARRANT WAS ISSUED FOR THE YOUNGERS BUT SUBSEQUENT HISTORIANS HAVE INFERENTIALLY AT LEAST ACCUSED US OF TAKING PART BUT AS I SAID BEFORE THERE IS NO TRUTH IN THE ACCUSATION +7975-280076-0004-1033: JUNE THIRD EIGHTEEN SEVENTY ONE (OBOCOCK BROTHERS->OBEY BROTHER'S) BANK AT (CORYDON IOWA->CROYDEN HOUR) WAS ROBBED OF FORTY THOUSAND DOLLARS BY SEVEN MEN IN BROAD DAYLIGHT +7975-280076-0005-1034: IT WAS (CHARGED->CHARGE) THAT (ARTHUR MC COY->OFTEN MA KOY) OR A (C MC->SEA MAC) COY AND MYSELF HAD BEEN PARTICIPANTS IN THE GAD'S HILL AFFAIR AND THE TWO STAGE ROBBERIES +7975-280076-0006-1035: THE PARTS OF THIS LETTER NOW RELEVANT ARE AS FOLLOWS +7975-280076-0007-1036: YOU MAY USE THIS LETTER IN YOUR OWN WAY +7975-280076-0008-1037: I WILL GIVE YOU THIS OUTLINE AND SKETCH OF MY WHEREABOUTS AND ACTIONS AT THE TIME OF CERTAIN ROBBERIES WITH WHICH I AM CHARGED +7975-280076-0009-1038: (AT->IT'S) THE TIME OF THE (GALLATIN->GELATIN) BANK ROBBERY I WAS GATHERING CATTLE (IN ELLIS->AND ILLIS) COUNTY TEXAS (CATTLE THAT I BOUGHT->CATTLETTA BROUGHT) FROM (PLEAS->PLACE) TAYLOR AND RECTOR +7975-280076-0010-1039: THIS CAN BE PROVED BY BOTH OF THEM ALSO BY (SHERIFF BARKLEY->SIR PARKLEY) AND FIFTY OTHER RESPECTABLE MEN OF THAT COUNTY +7975-280076-0011-1040: I BROUGHT THE CATTLE (TO KANSAS THAT->THE KANSASTE) FALL AND REMAINED IN SAINT CLAIR COUNTY UNTIL FEBRUARY +7975-280076-0012-1041: I THEN WENT TO (ARKANSAS->OUR CONSOL) AND (RETURNED->RETURN) TO SAINT CLAIR COUNTY ABOUT THE FIRST OF MAY +7975-280076-0013-1042: (I->AND) WENT TO KANSAS WHERE (OUR CATTLE WERE IN->A CATTLERON) WOODSON COUNTY AT COLONEL (RIDGE'S->RICHES) +7975-280076-0014-1043: DURING (THE->*) SUMMER I WAS EITHER IN SAINT CLAIR (*->OR) JACKSON OR KANSAS BUT AS THERE WAS NO ROBBERY COMMITTED THAT SUMMER IT MAKES NO DIFFERENCE WHERE I WAS +7975-280076-0015-1044: (I->AND) WENT THROUGH INDEPENDENCE AND FROM THERE TO ACE (WEBB'S->WEBBS) +7975-280076-0016-1045: THERE I TOOK DINNER AND THEN WENT TO DOCTOR (L->OLD) W (TWYMAN'S->TWIMMAN) +7975-280076-0017-1046: OUR BUSINESS THERE WAS TO SEE E P WEST HE WAS NOT AT HOME BUT THE FAMILY WILL REMEMBER THAT WE WERE THERE +7975-280076-0018-1047: WE CROSSED ON THE BRIDGE (STAYED->STATE) IN THE CITY ALL NIGHT AND THE NEXT MORNING WE RODE UP (THROUGH->TO) THE CITY +7975-280076-0019-1048: (I MET->AMID) SEVERAL OF MY FRIENDS AMONG THEM WAS BOB (HUDSPETH->HUSBUTH) +7975-280076-0020-1049: WE WERE NOT (ON GOOD->ONLY) TERMS AT THE TIME NOR HAVE WE BEEN FOR SEVERAL YEARS +7975-280076-0021-1050: POOR JOHN HE HAS BEEN HUNTED DOWN AND SHOT LIKE A WILD BEAST AND NEVER WAS A BOY MORE INNOCENT +7975-280076-0022-1051: DOCTOR (L LEWIS->ELUS) WAS HIS PHYSICIAN +7975-280076-0023-1052: THERE WERE FIFTY OR (A->*) HUNDRED PERSONS THERE WHO WILL TESTIFY IN ANY COURT THAT JOHN AND I WERE THERE +7975-280076-0024-1053: (HELVIN->HELVAN) FICKLE AND WIFE OF (GREENTON->GREENSON) VALLEY WERE ATTENDING THE SPRINGS AT THAT TIME AND EITHER OF THEM WILL TESTIFY TO THE ABOVE FOR JOHN AND I (SAT->SET) IN FRONT OF MISTER SMITH WHILE HE WAS PREACHING AND WAS IN HIS COMPANY FOR A FEW MOMENTS TOGETHER WITH HIS WIFE AND MISTER AND (MISSUS->MISS) FICKLE AFTER (*->THE) SERVICE +7975-280076-0025-1054: ABOUT THE LAST OF DECEMBER EIGHTEEN SEVENTY THREE I ARRIVED IN (CARROLL->CAROL) PARISH LOUISIANA +7975-280076-0026-1055: I STAYED THERE UNTIL THE EIGHTH OF FEBRUARY EIGHTEEN SEVENTY FOUR +7975-280076-0027-1056: I HAD NOT HEARD OF THAT WHEN I WROTE THE LETTER OF EIGHTEEN SEVENTY FOUR AND TO CORRECT ANY MISAPPREHENSION THAT MIGHT BE CREATED BY OMITTING IT I WILL SAY THAT AT (THAT->THE) TIME I WAS AT (NEOSHO->NIOKILL) KANSAS WITH A DROVE OF CATTLE WHICH I SOLD TO MAJOR (RAY->WRAYE) +7975-280076-0028-1057: IT WAS IMMEDIATELY FOLLOWING THE ROCK ISLAND ROBBERY AT (ADAIR->EIGHT AIR) IOWA THAT (THERE->THEIR) FIRST APPEARED A (DELIBERATE->DELIVERED) ENLISTMENT OF SOME LOCAL PAPERS (IN->AND) MISSOURI TO CONNECT US WITH THIS ROBBERY +7975-280084-0000-1090: I URGED ON THE BOYS (THAT->AT) WHATEVER HAPPENED WE SHOULD NOT SHOOT ANY ONE +7975-280084-0001-1091: WHEN MILLER AND I CROSSED THE BRIDGE THE THREE WERE ON SOME (DRY GOODS->DRAGOOD) BOXES AT THE CORNER NEAR THE BANK AND AS SOON AS (THEY->I) SAW US WENT RIGHT INTO THE BANK INSTEAD OF WAITING FOR US TO GET THERE +7975-280084-0002-1092: WHEN WE CAME UP I TOLD MILLER TO SHUT THE BANK DOOR WHICH THEY HAD LEFT OPEN IN THEIR HURRY +7975-280084-0003-1093: J (S ALLEN->HELEN) WHOSE (HARDWARE STORE WAS->HARD WORKED ALWAYS) NEAR TRIED TO GO INTO THE BANK BUT MILLER ORDERED HIM AWAY AND HE RAN (AROUND->ROUND) THE CORNER SHOUTING +7975-280084-0004-1094: GET YOUR GUNS BOYS THEY'RE ROBBING THE BANK +7975-280084-0005-1095: AND I (CALLED->CALL) TO HIM TO GET INSIDE AT THE SAME TIME FIRING A PISTOL SHOT IN THE AIR AS (A->THE) SIGNAL TO THE THREE BOYS AT THE BRIDGE THAT WE HAD BEEN DISCOVERED +7975-280084-0006-1096: ALMOST AT THIS INSTANT I HEARD A PISTOL SHOT IN THE BANK +7975-280084-0007-1097: (CHADWELL->TEDWELL) WOODS AND JIM RODE UP AND (JOINED US->JARNDYCE) SHOUTING TO (*->THE) PEOPLE IN THE STREET TO GET INSIDE AND FIRING THEIR PISTOLS TO EMPHASIZE THEIR COMMANDS +7975-280084-0008-1098: IF ANY OF OUR PARTY SHOT HIM IT MUST HAVE BEEN WOODS +7975-280084-0009-1099: MEANTIME THE STREET WAS GETTING UNCOMFORTABLY HOT +7975-280084-0010-1100: EVERY TIME I SAW ANY ONE WITH A BEAD ON ME I WOULD DROP OFF MY HORSE AND (TRY->TROT) TO DRIVE THE SHOOTER INSIDE BUT I COULD NOT SEE IN EVERY DIRECTION +7975-280084-0011-1101: DOCTOR (WHEELER->WHALER) WHO HAD GONE UPSTAIRS IN THE HOTEL SHOT MILLER AND HE LAY DYING IN THE STREET +7975-280084-0012-1102: CHANGING HIS PISTOL TO HIS LEFT HAND BOB RAN OUT AND MOUNTED MILLER'S MARE +7975-280084-0013-1103: WHAT KEPT YOU SO LONG I ASKED PITTS +7975-280084-0014-1104: AS TO THE REST OF THE AFFAIR INSIDE THE BANK I TAKE THE ACCOUNT OF A (NORTHFIELD->NORTH FIELD) NARRATOR +7975-280084-0015-1105: WHERE'S THE MONEY OUTSIDE THE SAFE BOB ASKED +7975-280084-0016-1106: THE SHUTTERS WERE CLOSED AND THIS CAUSED BUNKER AN INSTANT'S DELAY THAT WAS ALMOST FATAL (PITTS->FITZ) CHASED HIM WITH A BULLET +7975-280084-0017-1107: THE FIRST ONE (MISSED HIM->MISTING) BUT THE SECOND WENT THROUGH HIS RIGHT SHOULDER +7975-280085-0000-1071: THAT NIGHT IT STARTED TO RAIN AND WE WORE OUT OUR HORSES +7975-280085-0001-1072: FRIDAY WE MOVED TOWARD WATERVILLE AND FRIDAY NIGHT WE (CAMPED->CAME) BETWEEN (ELYSIAN->ALYCIAN) AND GERMAN LAKE +7975-280085-0002-1073: (BOB'S SHATTERED ELBOW WAS->BOB SATURDAIL BOWS) REQUIRING FREQUENT ATTENTION AND THAT NIGHT WE MADE ONLY NINE MILES AND MONDAY MONDAY NIGHT AND TUESDAY WE SPENT IN A DESERTED FARM HOUSE CLOSE TO (MANKATO->MANKADO) +7975-280085-0003-1074: THAT (DAY->THEY) A MAN NAMED (DUNNING->DINNING) DISCOVERED US AND WE TOOK HIM PRISONER +7975-280085-0004-1075: FINALLY WE ADMINISTERED TO HIM AN OATH NOT TO BETRAY OUR WHEREABOUTS UNTIL WE HAD TIME TO MAKE OUR ESCAPE AND HE AGREED NOT TO +7975-280085-0005-1076: NO SOONER HOWEVER WAS HE RELEASED THAN HE MADE (POSTHASTE->POST HASTE) INTO (MANKATO->MANKE) TO ANNOUNCE OUR PRESENCE AND IN A FEW MINUTES ANOTHER POSSE WAS LOOKING FOR US +7975-280085-0006-1077: THE WHISTLE ON THE (OIL->ORE) MILL BLEW AND WE FEARED THAT IT WAS A SIGNAL THAT HAD BEEN AGREED UPON TO ALARM THE TOWN IN CASE WE WERE OBSERVED BUT WE WERE NOT MOLESTED +7975-280085-0007-1078: HE HAD TO SLEEP WITH (IT PILLOWED->A PILL IT) ON MY BREAST JIM BEING ALSO (*->A) CRIPPLED WITH A WOUND IN HIS SHOULDER AND WE COULD NOT GET MUCH SLEEP +7975-280085-0008-1079: BUT THEY SOON AFTER GOT CLOSE ENOUGH SO THAT ONE OF THEM BROKE MY WALKING STICK WITH A SHOT +7975-280085-0009-1080: WE WERE (IN SIGHT->INSIDE) OF OUR LONG (SOUGHT->SOWED) HORSES WHEN THEY CUT US OFF FROM THE ANIMALS AND OUR LAST HOPE WAS GONE +7975-280085-0010-1081: SIX (STEPPED->STEPS) TO THE FRONT SHERIFF (GLISPIN->CLISPIN) COLONEL T L (VOUGHT->WALT) B (M->AND) RICE G A BRADFORD C A (POMEROY->POMROY) AND S J SEVERSON +7975-280085-0011-1082: FORMING (IN->A) LINE FOUR PACES APART HE ORDERED THEM TO ADVANCE RAPIDLY AND CONCENTRATE THE FIRE OF THE WHOLE LINE THE INSTANT THE ROBBERS WERE DISCOVERED +7975-280085-0012-1083: MAKE FOR THE HORSES I SAID EVERY MAN FOR HIMSELF +7975-280085-0013-1084: THERE IS NO USE STOPPING TO PICK UP A COMRADE HERE (FOR->TILL) WE CAN'T GET HIM THROUGH THE LINE JUST (CHARGE->SHARS) THEM AND MAKE IT IF WE CAN +7975-280085-0014-1085: I GOT UP AS (THE->A) SIGNAL FOR THE CHARGE AND WE FIRED ONE VOLLEY +7975-280085-0015-1086: ONE OF THE FELLOWS IN THE OUTER (LINE->LAND) NOT BRAVE ENOUGH HIMSELF TO JOIN THE VOLUNTEERS WHO HAD COME IN TO BEAT US OUT WAS NOT DISPOSED TO BELIEVE IN THE SURRENDER AND HAD HIS GUN LEVELLED ON BOB IN SPITE OF THE HANDKERCHIEF WHICH WAS WAVING AS A FLAG OF TRUCE +7975-280085-0016-1087: (SHERIFF->SURE OF) GLISPIN OF (WATONWAN->WATERWAM) COUNTY WHO WAS TAKING BOB'S PISTOL FROM HIM WAS ALSO SHOUTING TO THE FELLOW +7975-280085-0017-1088: INCLUDING THOSE RECEIVED IN AND ON THE WAY FROM (NORTHFIELD->NORTH FIELD) I HAD ELEVEN (WOUNDS->ONES) +7975-280085-0018-1089: (AND SHERIFF->IN CHEER OF) GLISPIN'S ORDER NOT TO SHOOT WAS THE BEGINNING OF THE (PROTECTORATE->PROTECTOR) THAT MINNESOTA PEOPLE ESTABLISHED OVER US +8131-117016-0000-1303: CAPTAIN (MURDOCH->MURDOCK) +8131-117016-0001-1304: BUT MARSPORT HAD FLOURISHED ENOUGH TO KILL IT OFF +8131-117016-0002-1305: SOME OF MARS LAWS DATED FROM THE TIME WHEN (LAW ENFORCEMENT->LAWN FORCEMENT) HAD BEEN HAMPERED BY LACK OF MEN RATHER THAN BY THE TYPE OF MEN +8131-117016-0003-1306: THE (STONEWALL->STONE WALL) GANG NUMBERED PERHAPS FIVE HUNDRED +8131-117016-0004-1307: EVEN (DERELICTS AND->DEAR ALEXAM) FAILURES HAD TO EAT THERE WERE (STORES->STORIES) AND SHOPS THROUGHOUT THE DISTRICT WHICH EKED OUT SOME KIND OF A MARGINAL LIVING +8131-117016-0005-1308: THEY WERE SAFE FROM PROTECTION (RACKETEERS->RAGATIRS) THERE NONE BOTHERED TO COME SO FAR OUT +8131-117016-0006-1309: THE SHOPKEEPERS AND SOME OF THE LESS UNFORTUNATE PEOPLE THERE HAD PROTESTED LOUD ENOUGH TO REACH CLEAR BACK TO EARTH +8131-117016-0007-1310: CAPTAIN (MURDOCH->MURDOCK) WAS AN UNKNOWN FACTOR AND NOW WAS ASKING FOR MORE MEN +8131-117016-0008-1311: THE PRESSURE WAS ENOUGH TO GET THEM FOR HIM +8131-117016-0009-1312: GORDON REPORTED FOR WORK WITH A SENSE OF THE BOTTOM FALLING OUT MIXED WITH A VAGUE RELIEF +8131-117016-0010-1313: I'VE GOT A FREE HAND AND WE'RE GOING TO RUN THIS THE WAY WE WOULD ON EARTH +8131-117016-0011-1314: YOUR JOB IS TO PROTECT THE CITIZENS HERE AND THAT MEANS (EVERYONE->EVERY ONE) NOT BREAKING THE LAWS WHETHER YOU FEEL LIKE IT OR NOT NO GRAFT +8131-117016-0012-1315: THE FIRST MAN MAKING A (SHAKEDOWN->SHAKE DOWN) WILL GET THE SAME TREATMENT WE'RE GOING TO USE ON THE (STONEWALL->STONE WALL) BOYS YOU'LL GET DOUBLE PAY HERE AND YOU CAN LIVE ON IT +8131-117016-0013-1316: HE PICKED OUT FIVE OF THE MEN INCLUDING GORDON YOU FIVE WILL COME WITH ME +8131-117016-0014-1317: THE REST OF YOU CAN (TEAM->TEEM) UP ANY WAY YOU WANT (TONIGHT->TO NIGHT) PICK ANY (ROUTE->ROUGH) THAT'S OPEN (OKAY MEN->OH CAME AND) LET'S GO +8131-117016-0015-1318: (BRUCE->BRUSH) GORDON GRINNED SLOWLY AS HE SWUNG THE STICK AND (MURDOCH'S->MARDOC'S) EYES FELL ON HIM EARTH COP +8131-117016-0016-1319: TWO YEARS GORDON ADMITTED +8131-117016-0017-1320: FOR A SECOND GORDON CURSED HIMSELF +8131-117016-0018-1321: HE BEGAN WONDERING ABOUT SECURITY THEN +8131-117016-0019-1322: NOBODY HAD TRIED TO GET IN TOUCH WITH HIM +8131-117016-0020-1323: THERE WAS A CRUDE LIGHTING SYSTEM HERE PUT UP BY THE CITIZENS AT THE FRONT OF EACH BUILDING A DIM (PHOSPHOR->PHOSPHER) BULB GLOWED WHEN DARKNESS FELL THEY WOULD HAVE NOTHING ELSE TO SEE BY +8131-117016-0021-1324: MOVING IN TWO GROUPS OF THREES (AT->IT) OPPOSITE SIDES OF THE STREET THEY BEGAN THEIR BEAT +8131-117016-0022-1325: THERE WAS NO CHANCE TO SAVE THE CITIZEN WHO WAS DYING FROM LACK OF AIR +8131-117016-0023-1326: GORDON FELT THE SOLID PLEASURE OF THE FINELY TURNED CLUB IN HIS HANDS +8131-117016-0024-1327: GORDON'S EYES POPPED AT THAT +8131-117016-0025-1328: HE SWALLOWED THE SENTIMENT HIS OWN CLUB WAS MOVING NOW +8131-117016-0026-1329: THE OTHER (FOUR COPS->FUPS) HAD COME IN RELUCTANTLY +8131-117016-0027-1330: HE BROUGHT HIM TO THE GROUND WITH A SINGLE BLOW ACROSS THE KIDNEYS +8131-117016-0028-1331: THEY (ROUNDED->ROUTED) UP THE MEN OF THE GANG AND ONE OF THE (COPS->CUPS) STARTED OFF +8131-117016-0029-1332: TO FIND A PHONE AND CALL THE WAGON +8131-117016-0030-1333: (WE'RE->WERE) NOT USING WAGONS (MURDOCH->MURDOCK) TOLD HIM (LINE->LYING) THEM UP +8131-117016-0031-1334: IF THEY TRIED TO RUN THEY WERE HIT FROM BEHIND (IF->THAT) THEY STOOD STILL THEY WERE CLUBBED CAREFULLY +8131-117016-0032-1335: (MURDOCH->MURDOCK) INDICATED ONE WHO STOOD WITH HIS (SHOULDERS->SHOULDER) SHAKING AND TEARS RUNNING DOWN HIS CHEEKS +8131-117016-0033-1336: THE CAPTAIN'S FACE WAS AS SICK AS (GORDON->GORDON'S) FELT +8131-117016-0034-1337: I WANT THE NAME OF EVERY MAN IN THE GANG YOU CAN REMEMBER HE TOLD THE MAN +8131-117016-0035-1338: COLONEL THEY'D KILL ME I DON'T KNOW +8131-117016-0036-1339: (MURDOCH->MURDOCK) TOOK HIS NOD AS EVIDENCE ENOUGH AND TURNED TO THE WRETCHED (TOUGHS->TUFTS) +8131-117016-0037-1340: IF HE SHOULD TURN UP DEAD I'LL KNOW YOU BOYS ARE RESPONSIBLE AND I'LL FIND YOU +8131-117016-0038-1341: TROUBLE BEGAN BREWING SHORTLY AFTER THOUGH +8131-117016-0039-1342: (MURDOCH SENT->MARDOX SAT) ONE OF THE MEN TO PICK UP A SECOND SQUAD OF SIX AND THEN A THIRD +8131-117016-0040-1343: (IN->AND) THE THIRD ONE BRUCE GORDON SPOTTED ONE OF THE MEN (WHO'D->WHO HAD) BEEN BEATEN BEFORE +8131-117016-0041-1344: GET A STRETCHER AND TAKE HIM WHEREVER HE BELONGS HE ORDERED +8131-117016-0042-1345: BUT THE CAPTAIN STIRRED FINALLY SIGHING +8131-117016-0043-1346: (NO->NOW) THE (COPS THEY'RE->CAPS ARE) GIVING ME (WE'RE->WERE) COVERED GORDON +8131-117016-0044-1347: BUT THE (STONEWALL->STERN WALL) GANG IS (BACKING WAYNE->BACK IN WAIN) +8131-117016-0045-1348: BUT IT'S GOING TO BE TOUGH ON THEM +8131-117016-0046-1349: BRUCE (GORDON->GORD AND) GRIMACED I'VE GOT A YELLOW TICKET FROM SECURITY +8131-117016-0047-1350: (MURDOCH->MARDOCK) BLINKED HE DROPPED HIS EYES SLOWLY +8131-117016-0048-1351: WHAT MAKES YOU THINK (WAYNE->WAIN) WILL BE RE ELECTED +8131-117016-0049-1352: NOBODY WANTS HIM EXCEPT A GANG OF (CROOKS->COOKS) AND THOSE IN POWER +8131-117016-0050-1353: EVER SEE A MARTIAN ELECTION +8131-117016-0051-1354: NO (YOU'RE->YOU ARE) A (FIRSTER->FORSTER) HE CAN'T LOSE +8131-117016-0052-1355: AND THEN HELL IS GOING TO POP AND THIS WHOLE PLANET MAY BE BLOWN WIDE OPEN +8131-117016-0053-1356: (IT->YET) FITTED WITH THE (DIRE->DIA) PREDICTIONS OF SECURITY AND WITH (THE->A) SPYING GORDON WAS GOING TO DO ACCORDING TO THEM +8131-117016-0054-1357: HE WAS GETTING EVEN FATTER NOW THAT HE WAS EATING BETTER FOOD FROM THE FAIR RESTAURANT AROUND THE CORNER +8131-117016-0055-1358: (COST EM->COSTUM) MORE BUT THEY'D BE RESPECTABLE +8131-117016-0056-1359: BECAUSE (IZZY->IZZIE) IS ALWAYS HONEST ACCORDING TO HOW HE SEES IT +8131-117016-0057-1360: BUT YOU GOT EARTH IDEAS OF THE STUFF LIKE I HAD ONCE +8131-117016-0058-1361: THE GROUPS GREW MORE EXPERIENCED AND (MURDOCH->MURDOCK) WAS TRAINING A NEW SQUAD EVERY NIGHT +8131-117016-0059-1362: IT WASN'T EXACTLY LEGAL BUT NOTHING WAS HERE +8131-117016-0060-1363: THIS COULD LEAD TO ABUSES AS HE'D SEEN ON EARTH +8131-117016-0061-1364: BUT (THERE->THEIR) PROBABLY WOULDN'T BE TIME FOR IT IF MAYOR (WAYNE->WAIN) WAS RE ELECTED +8131-117017-0000-1270: IT WAS NIGHT OUTSIDE AND THE (PHOSPHOR BULBS->PHOSPHOBS) AT THE CORNERS GLOWED DIMLY GIVING HIM BARELY ENOUGH LIGHT BY WHICH TO LOCATE THE WAY TO THE EXTEMPORIZED PRECINCT HOUSE +8131-117017-0001-1271: IT HAD PROBABLY BEEN YEARS SINCE ANY HAD DARED RISK IT AFTER THE SUN WENT DOWN +8131-117017-0002-1272: AND THE SLOW DOUBTFUL RESPECT ON THE FACES OF THE CITIZENS AS THEY NODDED TO HIM WAS EVEN MORE PROOF THAT (HALEY'S->HALELY) SYSTEM WAS WORKING +8131-117017-0003-1273: GORDON HIT THE SIGNAL SWITCH AND THE (MARSPEAKER->MARKEER) LET OUT A SHRILL WHISTLE +8131-117017-0004-1274: (GUNS->GUN) SUDDENLY SEEMED TO BE FLOURISHING EVERYWHERE +8131-117017-0005-1275: YOU CAN'T DO IT TO ME +8131-117017-0006-1276: (I'M->I AM) REFORMED I'M GOING STRAIGHT +8131-117017-0007-1277: YOU DAMNED (COPS->COPSE) CAN'T (O'NEILL->O'NEIA) WAS BLUBBERING +8131-117017-0008-1278: ONE LOOK WAS ENOUGH THE WORK PAPERS HAD THE (TELLTALE->TELL TALE) OVER THICKENING OF THE SIGNATURE (THAT->THEY) HAD SHOWED UP ON OTHER PAPERS OBVIOUSLY FORGERIES +8131-117017-0009-1279: SOME TURNED AWAY AS GORDON AND THE OTHER (COP->COPP) WENT TO WORK BUT MOST OF THEM WEREN'T SQUEAMISH +8131-117017-0010-1280: WHEN IT WAS OVER THE TWO PICKED UP THEIR WHIMPERING CAPTIVE +8131-117017-0011-1281: JENKINS THE OTHER COP HAD BEEN HOLDING THE WALLET +8131-117017-0012-1282: MUST (OF->HAVE) BEEN MAKING A BIG CONTACT IN SOMETHING FIFTY FIFTY +8131-117017-0013-1283: THERE MUST HAVE BEEN OVER TWO THOUSAND CREDITS IN THE WALLET +8131-117017-0014-1284: WHEN GORDON AND JENKINS CAME BACK (MURDOCH->MURDOCK) TOSSED THE MONEY TO THEM SPLIT IT +8131-117017-0015-1285: WHATEVER COMES TO HAND (GOV'NOR->GOVERNOR) +8131-117017-0016-1286: LIKE THIS SOCIAL CALL GORDON ASKED HIM +8131-117017-0017-1287: THE LITTLE MAN SHOOK HIS HEAD HIS ANCIENT EIGHTEEN YEAR OLD FACE TURNING SOBER (NOPE->NOTE) +8131-117017-0018-1288: YOU (OWE->ARE) ME SOME BILLS (GOV'NOR->GUV'NER) +8131-117017-0019-1289: ELEVEN HUNDRED FIFTY CREDITS +8131-117017-0020-1290: YOU DIDN'T PAY UP YOUR PLEDGE TO THE (CAMPAIGN->CAPTAIN) FUND SO I (HADDA->HAD A) FILL IN +8131-117017-0021-1291: A THOUSAND (INTEREST->INTERESTS) AT TEN PER CENT A WEEK STANDARD RIGHT +8131-117017-0022-1292: GORDON HAD HEARD OF THE FRIENDLY INTEREST CHARGED ON THE SIDE HERE BUT HE SHOOK HIS HEAD WRONG (IZZY->IS HE) +8131-117017-0023-1293: (HUH IZZY->HOW AS HE) TURNED IT OVER AND SHOOK HIS HEAD +8131-117017-0024-1294: NOW SHOW ME WHERE I SIGNED ANY AGREEMENT SAYING I'D PAY YOU BACK +8131-117017-0025-1295: FOR A SECOND (IZZY'S->IZZIE'S) FACE WENT BLANK THEN HE CHUCKLED +8131-117017-0026-1296: HE (PULLED->POURED) OUT THE BILLS AND HANDED THEM OVER +8131-117017-0027-1297: THANKS (IZZY->IS HE) THANKS YOURSELF +8131-117017-0028-1298: THE KID POCKETED THE MONEY CHEERFULLY NODDING +8131-117017-0029-1299: THE LITTLE GUY KNEW MARS AS FEW OTHERS DID APPARENTLY FROM ALL SIDES +8131-117017-0030-1300: AND IF ANY OF THE OTHER (COPS->CUPS) HAD PRIVATE RACKETS OF THEIR OWN (IZZY->IZZIE) WAS UNDOUBTEDLY THE MAN TO FIND IT OUT AND (USE->USED) THE INFORMATION WITH A BEAT SUCH AS THAT EVEN GOING HALVES AND WITH ALL THE GRAFT (TO->AT) THE UPPER BRACKETS HE'D STILL BE ABLE TO MAKE HIS PILE IN A MATTER OF MONTHS +8131-117017-0031-1301: THE CAPTAIN LOOKED COMPLETELY BEATEN AS HE CAME INTO THE ROOM AND DROPPED (ONTO->INTO) THE BENCH +8131-117017-0032-1302: GO ON (ACCEPT DAMN IT->EXCEPT DEAR MIN) +8131-117029-0000-1247: THERE WAS A MAN COMING FROM EARTH ON A SECOND SHIP WHO WOULD SEE HIM +8131-117029-0001-1248: THE LITTLE PUBLISHER WAS BACK AT THE CRUSADER AGAIN +8131-117029-0002-1249: ONLY GORDON AND SHEILA WERE LEFT +8131-117029-0003-1250: CREDIT HAD BEEN ESTABLISHED AGAIN AND THE BUSINESSES WERE OPEN +8131-117029-0004-1251: GORDON CAME TO A ROW OF TEMPORARY BUBBLES INDIVIDUAL DWELLINGS BUILT LIKE THE DOME BUT OPAQUE FOR PRIVACY +8131-117029-0005-1252: THEY HAD BEEN LUCKY +8131-117029-0006-1253: (SCHULBERG'S->SHOALBURG'S) VOLUNTEERS WERE OFFICIAL NOW +8131-117029-0007-1254: (FATS->FAT'S) PLACE WAS STILL OPEN THOUGH THE CROOKED TABLES HAD BEEN REMOVED GORDON DROPPED TO A STOOL SLIPPING OFF HIS HELMET +8131-117029-0008-1255: HE REACHED AUTOMATICALLY FOR THE GLASS OF ETHER (NEEDLED->NEEDLE) BEER +8131-117029-0009-1256: THOUGHT (YOU'D->YE'D) BE IN THE CHIPS +8131-117029-0010-1257: THAT'S MARS GORDON ECHOED (THE OTHER'S COMMENT->OTHERS COMMENTS) WHY DON'T YOU PULL OFF THE PLANET FATS YOU COULD GO BACK TO EARTH I'D GUESS THE OTHER NODDED +8131-117029-0011-1258: (GUESS->GES) A MAN GETS USED TO ANYTHING HELL MAYBE I CAN HIRE SOME BUMS TO SIT AROUND AND WHOOP IT UP WHEN THE SHIPS COME IN AND (BILL->BUILD) THIS (AS->IS) A REAL OLD MARTIAN DEN OF SIN +8131-117029-0012-1259: THERE WAS A GRIN ON THE OTHER'S FACE +8131-117029-0013-1260: FINALLY GOT OUR ORDERS FOR YOU IT'S MERCURY +8131-117029-0014-1261: WE SENT TWENTY OTHERS THE SAME WAY AND THEY FAILED +8131-117029-0015-1262: (LET'S SAY YOU'VE->LET SAVE) SHIFTED SOME OF THE MISERY AROUND A BIT AND GIVEN THEM A CHANCE TO DO BETTER +8131-117029-0016-1263: YOU CAN'T STAY HERE +8131-117029-0017-1264: THERE'S A ROCKET WAITING TO (TRANSSHIP->TRANSHIP) YOU TO THE MOON ON THE WAY TO MERCURY RIGHT NOW GORDON SIGHED +8131-117029-0018-1265: AND (I'VE->I) PAID HER THE PAY WE OWE YOU FROM THE TIME YOU (BEGAN->BEGIN) USING YOUR BADGE SHE'S OUT SHOPPING +8131-117029-0019-1266: BUT HIS OLD EYES WERE GLINTING +8131-117029-0020-1267: DID YOU THINK WE'D LET YOU GO WITHOUT SEEING YOU OFF (COBBER->COPPER) HE ASKED +8131-117029-0021-1268: I I OH DRAT IT I'M GETTING OLD (IZZY->IS HE) YOU TELL HIM +8131-117029-0022-1269: HE GRABBED GORDON'S HAND AND WADDLED DOWN THE LANDING PLANK (IZZY->IZZIE) SHOOK HIS HEAD +8188-269288-0000-2881: (ANNIE->ANY) COLCHESTER HAD BEGUN TO MAKE FRIENDS WITH (LESLIE->LISLEY) +8188-269288-0001-2882: LESLIE DETERMINED TO (TRY FOR->TRIFLE) HONORS IN ENGLISH LANGUAGE AND LITERATURE +8188-269288-0002-2883: HER TASTES ALL LAY IN THIS DIRECTION HER IDEA BEING BY AND BY TO FOLLOW HER MOTHER'S PROFESSION OF JOURNALISM FOR WHICH SHE ALREADY SHOWED CONSIDERABLE APTITUDE +8188-269288-0003-2884: SHE HAD NO IDEA OF ALLOWING HERSELF TO BREAK DOWN +8188-269288-0004-2885: WHAT DO YOU MEAN REPLIED LESLIE +8188-269288-0005-2886: WHY YOU WILL BE PARTING FROM ME YOU KNOW +8188-269288-0006-2887: I (WON'T->WOULD) BE THE CONSTANT WORRY (AND->IN) PLAGUE OF YOUR LIFE +8188-269288-0007-2888: IT IS THIS IF BY ANY CHANCE YOU DON'T LEAVE SAINT (WODE'S->WORDS) ANNIE I HOPE YOU WILL ALLOW ME TO BE YOUR (ROOMFELLOW->ROOM FELLOW) AGAIN NEXT TERM +8188-269288-0008-2889: SAID ANNIE A FLASH OF LIGHT COMING INTO HER EYES AND THEN LEAVING THEM +8188-269288-0009-2890: BUT SHE ADDED ABRUPTLY YOU SPEAK OF SOMETHING WHICH MUST NOT TAKE PLACE +8188-269288-0010-2891: I MUST PASS (IN HONORS->AN HONOURS) IF I DON'T I SHALL DIE +8188-269288-0011-2892: A FEW MOMENTS LATER THERE CAME A TAP AT THE DOOR +8188-269288-0012-2893: LESLIE OPENED THE DOOR +8188-269288-0013-2894: JANE (HERIOT->HERRIOT) STOOD WITHOUT +8188-269288-0014-2895: THESE LETTERS HAVE JUST COME FOR YOU AND (ANNIE->ANY) COLCHESTER SHE SAID AND AS I WAS COMING (UPSTAIRS->UP STAIRS) I THOUGHT I WOULD LEAVE THEM WITH YOU +8188-269288-0015-2896: (LESLIE->LIZLY) THANKED HER AND EAGERLY GRASPED THE LITTLE PARCEL +8188-269288-0016-2897: HER EYES SHONE WITH PLEASURE AT THE ANTICIPATION OF THE DELIGHTFUL TIME SHE WOULD HAVE (REVELING->REVELLING) IN THE HOME NEWS THE OTHER LETTER WAS DIRECTED TO (ANNIE->ANY) COLCHESTER +8188-269288-0017-2898: HERE IS A LETTER FOR YOU ANNIE CRIED LESLIE +8188-269288-0018-2899: HER FACE GREW SUDDENLY WHITE AS DEATH WHAT IS IT DEAR +8188-269288-0019-2900: I HAVE BEEN STARVING OR RATHER I HAVE BEEN THIRSTING +8188-269288-0020-2901: WELL READ IT IN PEACE SAID (LESLIE->LINSLEY) I WON'T DISTURB YOU +8188-269288-0021-2902: I AM TRULY GLAD IT HAS COME +8188-269288-0022-2903: (LESLIE->LISALLY) SEATED HERSELF WITH HER BACK TO HER COMPANION AND OPENED HER (OWN->ON) LETTERS +8188-269288-0023-2904: DON'T NOTICE ME REPLIED ANNIE +8188-269288-0024-2905: I MUST GO INTO THE GROUNDS THE AIR IS STIFLING +8188-269288-0025-2906: BUT THEY ARE JUST SHUTTING UP +8188-269288-0026-2907: I SHALL GO I KNOW A WAY +8188-269288-0027-2908: JUST AFTER MIDNIGHT SHE ROSE WITH A SIGH TO PREPARE FOR BED +8188-269288-0028-2909: SHE LOOKED ROUND THE ROOM +8188-269288-0029-2910: NOW I REMEMBER SHE GOT A LETTER WHICH UPSET HER VERY MUCH AND WENT OUT +8188-269288-0030-2911: (LESLIE->LIZLY) WENT TO THE WINDOW AND FLUNG IT OPEN SHE PUT HER HEAD OUT AND TRIED TO PEER INTO THE DARKNESS BUT THE MOON HAD ALREADY SET AND SHE COULD NOT SEE MORE THAN A COUPLE OF YARDS IN FRONT OF HER +8188-269288-0031-2912: SHE IS A VERY QUEER ERRATIC CREATURE AND THAT LETTER THERE (WAS->IS) BAD NEWS IN THAT LETTER +8188-269288-0032-2913: WHAT (CAN SHE->CAN'T YOU) BE DOING OUT BY HERSELF +8188-269288-0033-2914: (LESLIE LEFT->THIS LILY LIT) THE ROOM BUT SHE HAD SCARCELY GONE A DOZEN (PACES->PLACES) DOWN THE CORRIDOR BEFORE SHE MET (ANNIE->ANY) RETURNING +8188-269288-0034-2915: (ANNIE'S->AND HIS) EYES WERE VERY BRIGHT HER CHEEKS WERE NO LONGER PALE AND THERE WAS A BRILLIANT COLOR IN THEM +8188-269288-0035-2916: SHE DID NOT TAKE THE LEAST NOTICE OF (LESLIE->LIZZLING) BUT GOING INTO THE ROOM SHUT THE DOOR +8188-269288-0036-2917: DON'T BEGIN SAID ANNIE +8188-269288-0037-2918: DON'T BEGIN WHAT DO YOU MEAN +8188-269288-0038-2919: I MEAN THAT I DON'T WANT YOU TO BEGIN TO ASK QUESTIONS +8188-269288-0039-2920: I WALKED UP AND DOWN AS FAST AS EVER I COULD OUTSIDE IN ORDER TO MAKE MYSELF SLEEPY +8188-269288-0040-2921: DON'T TALK TO ME LESLIE DON'T SAY A SINGLE WORD +8188-269288-0041-2922: I SHALL GO OFF TO SLEEP THAT IS ALL I CARE FOR +8188-269288-0042-2923: DON'T SAID ANNIE +8188-269288-0043-2924: NOW DRINK THIS AT ONCE SHE SAID IN A VOICE OF AUTHORITY IF YOU REALLY WISH TO SLEEP +8188-269288-0044-2925: (ANNIE STARED->ANY STEERED) VACANTLY AT THE (COCOA THEN SHE UTTERED->COOKER DIDN'T) A LAUGH +8188-269288-0045-2926: DRINK THAT SHE SAID +8188-269288-0046-2927: DO YOU WANT TO KILL ME DON'T TALK ANY MORE +8188-269288-0047-2928: I (AM SLEEPY->AMY) I SHALL SLEEP +8188-269288-0048-2929: SHE GOT INTO BED AS SHE SPOKE AND WRAPPED THE CLOTHES TIGHTLY ROUND HER +8188-269288-0049-2930: (CAN'T->COULD) YOU MANAGE WITH A CANDLE JUST FOR ONCE +8188-269288-0050-2931: CERTAINLY SAID (LESLIE->IT EASILY) +8188-269288-0051-2932: SHE TURNED OFF THE LIGHT AND LIT A CANDLE WHICH (SHE->HE) PUT BEHIND HER SCREEN THEN PREPARED TO GET INTO BED +8188-269288-0052-2933: (ANNIE'S->ANY) MANNER WAS VERY MYSTERIOUS +8188-269288-0053-2934: (ANNIE->AND HE) DID NOT MEAN TO (CONFIDE->CONFINE) IN (ANYONE->ANY ONE) THAT NIGHT AND THE KINDEST THING WAS TO LEAVE HER ALONE +8188-269288-0054-2935: (TIRED->TIE IT) OUT (LESLIE->LIZZLY) HERSELF DROPPED ASLEEP +8188-269288-0055-2936: ANNIE IS THAT YOU SHE CALLED OUT +8188-269288-0056-2937: THERE WAS NO REPLY BUT THE SOUND OF HURRYING STEPS CAME QUICKER AND QUICKER NOW AND THEN (THEY WERE->THEIR) INTERRUPTED BY A GROAN +8188-269288-0057-2938: OH THIS WILL KILL ME MY HEART WILL BREAK THIS WILL KILL ME +8188-269290-0000-2823: THE (GUILD->GULD) OF SAINT ELIZABETH +8188-269290-0001-2824: IMMEDIATELY AFTER DINNER THAT EVENING LESLIE RAN UP TO HER ROOM TO MAKE PREPARATIONS FOR HER VISIT TO EAST HALL +8188-269290-0002-2825: I'M NOT COMING SAID ANNIE +8188-269290-0003-2826: EVERY STUDENT IS TO BE (IN->AN) EAST HALL AT HALF PAST EIGHT +8188-269290-0004-2827: IT DOESN'T MATTER REPLIED ANNIE WHETHER IT IS AN ORDER OR NOT I'M NOT COMING SAY NOTHING ABOUT ME PLEASE +8188-269290-0005-2828: IT BURNED AS IF WITH FEVER +8188-269290-0006-2829: YOU DON'T KNOW WHAT A TRIAL IT IS FOR ME TO HAVE YOU HERE +8188-269290-0007-2830: I WANT TO BE ALONE GO +8188-269290-0008-2831: I KNOW YOU DON'T QUITE MEAN WHAT YOU SAY SAID LESLIE BUT OF COURSE IF YOU REALLY WISH ME +8188-269290-0009-2832: YOU (FRET->FRITTEN) ME BEYOND ENDURANCE +8188-269290-0010-2833: WRAPPING A PRETTY BLUE SHAWL (ROUND HER HEAD AND->AROUND A HIDDEN) SHOULDERS SHE TURNED TO ANNIE +8188-269290-0011-2834: LESLIE WAS JUST CLOSING THE DOOR BEHIND HER WHEN (ANNIE->ANY) CALLED AFTER HER +8188-269290-0012-2835: I TOOK IT OUT SAID (LESLIE->LIZLY) TOOK IT OUT +8188-269290-0013-2836: HAVE THE GOODNESS TO FIND IT AND PUT IT BACK +8188-269290-0014-2837: BUT DON'T LOCK ME OUT PLEASE (ANNIE->ANY) +8188-269290-0015-2838: OH I WON'T (LOCK->LOOK) YOU OUT SHE SAID BUT I MUST HAVE THE KEY +8188-269290-0016-2839: JANE (HERIOT'S->HERETT'S) VOICE WAS HEARD IN THE PASSAGE +8188-269290-0017-2840: AS SHE WALKED (DOWN->ROUND) THE CORRIDOR SHE HEARD IT BEING TURNED (IN->TO) THE LOCK +8188-269290-0018-2841: WHAT CAN THIS MEAN SHE SAID TO HERSELF +8188-269290-0019-2842: OH I (WON'T->WOULD) PRESS YOU REPLIED JANE +8188-269290-0020-2843: OH I SHALL NEVER DO THAT REPLIED LESLIE +8188-269290-0021-2844: YOU SEE ALL THE GIRLS EXCEPT (EILEEN->AILEEN) AND MARJORIE LAUGH AT HER AND THAT SEEMS TO ME TO MAKE HER WORSE +8188-269290-0022-2845: SOME DAY JANE YOU MUST SEE HER +8188-269290-0023-2846: IF YOU (ARE->*) IN LONDON DURING THE SUMMER YOU MUST COME AND (PAY US->PASS) A VISIT WILL YOU +8188-269290-0024-2847: THAT IS IF YOU CARE TO CONFIDE IN ME +8188-269290-0025-2848: I BELIEVE POOR ANNIE IS DREADFULLY UNHAPPY +8188-269290-0026-2849: THAT'S JUST (IT JANE->A CHAIN) THAT IS WHAT (FRIGHTENS->BRIGHTENS) ME SHE REFUSES TO COME +8188-269290-0027-2850: REFUSES TO COME SHE (CRIED->CRI) +8188-269290-0028-2851: (SHE WILL->SHE'LL) GET (INTO->IN) AN AWFUL SCRAPE +8188-269290-0029-2852: I AM SURE SHE IS ILL SHE WORKS TOO HARD AND SHE BUT THERE I DON'T KNOW THAT I OUGHT TO SAY ANY MORE +8188-269290-0030-2853: I'LL WAIT FOR YOU HERE SAID LESLIE +8188-269290-0031-2854: DO COME (ANNIE->ANY) DO +8188-269290-0032-2855: SCARCELY LIKELY REPLIED LESLIE SHE TOLD ME SHE WAS DETERMINED NOT TO COME TO THE MEETING +8188-269290-0033-2856: BUT (MARJORIE->MARGERY) AND (EILEEN->AILEEN) HAD ALREADY DEPARTED AND LESLIE AND JANE FOUND THEMSELVES AMONG THE LAST STUDENTS TO ARRIVE AT THE GREAT EAST HALL +8188-269290-0034-2857: MISS (LAUDERDALE->LAUDIDAL) WAS STANDING WITH THE OTHER TUTORS AND (PRINCIPALS->PRINCIPLES) OF THE DIFFERENT HALLS ON A RAISED PLATFORM +8188-269290-0035-2858: THEN A (ROLL CALL->ROCCALL) WAS GONE THROUGH BY ONE OF THE TUTORS THE ONLY (ABSENTEE->ABSENTE) WAS (ANNIE->ANY) COLCHESTER +8188-269290-0036-2859: THE PHYSICAL PART OF (YOUR->THE OLD) TRAINING AND ALSO THE MENTAL PART ARE ABUNDANTLY SUPPLIED IN THIS GREAT HOUSE OF LEARNING SHE CONTINUED BUT THE SPIRITUAL PART IT SEEMS TO ME OUGHT NOW TO BE STRENGTHENED +8188-269290-0037-2860: (HEAR HEAR->HAIR HAIR) AND ONCE AGAIN (HEAR->HAIR) +8188-269290-0038-2861: SHE UTTERED (HER STRANGE->A STRAIN) REMARK STANDING UP +8188-269290-0039-2862: (MARJORIE->MARGERY) AND (EILEEN->AILEEN) WERE CLOSE TO HER +8188-269290-0040-2863: I WILL TALK WITH YOU (BELLE ACHESON->BELL ARCHISON) PRESENTLY SHE SAID +8188-269290-0041-2864: THE NAMES OF (*->THE) PROPOSED MEMBERS ARE TO BE SUBMITTED TO ME BEFORE THIS DAY WEEK +8188-269290-0042-2865: AM I MY BROTHER'S KEEPER +8188-269290-0043-2866: YOU ASK SHE CONTINUED +8188-269290-0044-2867: GOD (ANSWERS TO->ADDEST) EACH OF YOU YOU ARE +8188-269290-0045-2868: THE WORLD (SAYS->TEETH) NO I AM NOT BUT GOD SAYS YES YOU ARE +8188-269290-0046-2869: ALL MEN ARE YOUR BROTHERS +8188-269290-0047-2870: FOR ALL WHO SIN ALL WHO SUFFER YOU ARE TO (A CERTAIN->EXERT AN) EXTENT RESPONSIBLE +8188-269290-0048-2871: AFTER THE ADDRESS THE GIRLS THEMSELVES WERE ENCOURAGED TO SPEAK AND A VERY ANIMATED DISCUSSION FOLLOWED +8188-269290-0049-2872: IT WAS PAST TEN O'CLOCK WHEN SHE LEFT THE HALL +8188-269290-0050-2873: JUST AS SHE WAS DOING SO MISS FRERE CAME UP +8188-269290-0051-2874: (ANNIE->ANY) COLCHESTER (IS->AS) YOUR (ROOMFELLOW->ROOM FELLOW) IS SHE NOT SHE SAID +8188-269290-0052-2875: I SEE BY YOUR FACE MISS GILROY THAT YOU ARE DISTRESSED ABOUT SOMETHING ARE YOU KEEPING ANYTHING BACK +8188-269290-0053-2876: (I AM->I'M) AFRAID I AM REPLIED (LESLIE DISTRESS->LIZZIE DISTRESSED) NOW IN HER TONE +8188-269290-0054-2877: I MUST SEE HER MYSELF EARLY IN THE MORNING AND I AM QUITE SURE THAT NOTHING WILL SATISFY MISS (LAUDERDALE->LAURAIL) EXCEPT A VERY AMPLE APOLOGY AND A FULL EXPLANATION OF THE REASON WHY SHE ABSENTED HERSELF +8188-269290-0055-2878: EXCUSES MAKE NO DIFFERENCE +8188-269290-0056-2879: THE GIRL WHO BREAKS THE RULES (HAS->HAVE) TO BE PUNISHED +8188-269290-0057-2880: I WILL TELL HER +8188-274364-0000-2811: THE COMMONS ALSO VOTED THAT THE NEW CREATED PEERS OUGHT TO HAVE NO VOICE IN THIS TRIAL BECAUSE THE ACCUSATION BEING AGREED TO WHILE THEY WERE COMMONERS (THEIR->THEY) CONSENT TO IT WAS IMPLIED WITH THAT OF ALL THE COMMONS OF ENGLAND +8188-274364-0001-2812: IN THE GOVERNMENT OF IRELAND HIS ADMINISTRATION HAD BEEN EQUALLY (PROMOTIVE->PROMOTED) OF HIS MASTER'S INTEREST AND THAT OF THE SUBJECTS COMMITTED TO HIS CARE +8188-274364-0002-2813: THE CASE OF LORD (MOUNTNORRIS->MONTORAS) OF ALL THOSE WHICH WERE COLLECTED WITH SO MUCH INDUSTRY IS THE MOST FLAGRANT AND THE LEAST EXCUSABLE +8188-274364-0003-2814: THE COURT WHICH CONSISTED OF THE (CHIEF OFFICERS->CHIE OFFICIALS) OF THE ARMY FOUND THE CRIME TO BE (CAPITAL->CAPT ON) AND CONDEMNED THAT NOBLEMAN TO LOSE HIS HEAD +8188-274364-0004-2815: WHERE THE TOKEN BY WHICH I (SHOULD->SHALL) DISCOVER IT +8188-274364-0005-2816: IT IS NOW (*->A) FULL TWO HUNDRED AND FORTY YEARS SINCE TREASONS WERE DEFINED AND SO LONG HAS IT BEEN SINCE ANY MAN WAS TOUCHED TO THIS EXTENT UPON THIS CRIME BEFORE MYSELF +8188-274364-0006-2817: LET US NOT TO (OUR OWN DESTRUCTION->UNDERSTRUCTION) AWAKE THOSE SLEEPING LIONS BY RATTLING UP A COMPANY OF OLD RECORDS WHICH HAVE LAIN FOR SO MANY AGES BY THE (WALL->WAR) FORGOTTEN AND NEGLECTED +8188-274364-0007-2818: (HOWEVER->HERBERT) THESE (GENTLEMEN->GENTLEMAN) AT THE BAR (SAY->SO) THEY SPEAK FOR THE (COMMONWEALTH->CORNWEALTH) AND THEY BELIEVE SO YET UNDER (FAVOR->FAVOUR) IT IS I WHO IN THIS PARTICULAR SPEAK FOR THE (COMMONWEALTH->CORNWEALTH) +8188-274364-0008-2819: MY LORDS I HAVE NOW TROUBLED YOUR LORDSHIPS A GREAT DEAL LONGER THAN I SHOULD HAVE DONE +8188-274364-0009-2820: YOUNG (VANE->VAIN) FALLING UPON THIS PAPER OF NOTES DEEMED THE MATTER OF THE UTMOST IMPORTANCE AND IMMEDIATELY COMMUNICATED IT TO (PYM->POEM) WHO NOW PRODUCED THE PAPER BEFORE THE HOUSE OF COMMONS +8188-274364-0010-2821: THE KING PROPOSES THIS DIFFICULTY BUT HOW CAN I UNDERTAKE (OFFENSIVE->OFFENCE OF) WAR IF I HAVE NO MORE MONEY +8188-274364-0011-2822: YOUR MAJESTY HAVING TRIED THE AFFECTIONS OF YOUR PEOPLE YOU ARE (ABSOLVED->ABSORBED) AND LOOSE FROM ALL RULES OF GOVERNMENT AND MAY DO WHAT POWER WILL ADMIT +8280-266249-0000-339: OLD MISTER DINSMORE HAD ACCEPTED A PRESSING INVITATION FROM HIS GRANDDAUGHTER AND HER HUSBAND TO JOIN THE PARTY AND WITH THE ADDITION OF SERVANTS IT WAS A LARGE ONE +8280-266249-0001-340: AS THEY WERE IN NO HASTE AND THE CONFINEMENT OF A RAILROAD CAR WOULD BE VERY IRKSOME TO THE YOUNGER CHILDREN IT HAD BEEN DECIDED TO MAKE THE JOURNEY BY WATER +8280-266249-0002-341: THERE WERE NO SAD LEAVE TAKINGS TO MAR THEIR PLEASURE THE CHILDREN WERE IN WILD SPIRITS AND ALL SEEMED CHEERFUL AND HAPPY AS THEY SAT OR STOOD UPON THE DECK WATCHING THE RECEDING SHORE AS THE VESSEL STEAMED OUT OF THE HARBOR +8280-266249-0003-342: AT LENGTH THE LAND HAD QUITE DISAPPEARED NOTHING COULD BE SEEN BUT THE SKY OVERHEAD AND A VAST EXPANSE OF WATER ALL (AROUND->ROUND) AND THE PASSENGERS FOUND LEISURE TO TURN THEIR ATTENTION UPON EACH OTHER +8280-266249-0004-343: THERE ARE SOME NICE LOOKING PEOPLE ON BOARD REMARKED MISTER TRAVILLA IN AN UNDERTONE TO HIS WIFE +8280-266249-0005-344: (BESIDE->BESIDES) OURSELVES ADDED COUSIN (RONALD->RANALD) LAUGHING +8280-266249-0006-345: YES SHE ANSWERED THAT LITTLE GROUP YONDER A YOUNG MINISTER AND HIS WIFE AND CHILD I SUPPOSE +8280-266249-0007-346: AND WHAT A DEAR LITTLE FELLOW HE IS JUST ABOUT THE AGE OF OUR (HAROLD->HERALD) I SHOULD JUDGE +8280-266249-0008-347: DO YOU SON WAS THE SMILING REJOINDER +8280-266249-0009-348: HE CERTAINLY LOOKS LIKE A VERY NICE LITTLE BOY +8280-266249-0010-349: SUPPOSE YOU AND HE SHAKE HANDS FRANK +8280-266249-0011-350: I DO INDEED (THOUGH->THE) PROBABLY COMPARATIVELY FEW ARE AWARE THAT TOBACCO IS THE CAUSE OF THEIR AILMENTS +8280-266249-0012-351: DOUBTLESS THAT IS THE CASE REMARKED MISTER DINSMORE +8280-266249-0013-352: WITH ALL MY HEART IF YOU WILL STEP INTO THE (GENTLEMEN'S->GENTLEMAN'S) CABIN WHERE THERE'S A LIGHT +8280-266249-0014-353: HE LED THE WAY THE OTHERS ALL FOLLOWING AND TAKING OUT A SLIP OF PAPER READ FROM IT IN A DISTINCT TONE LOUD ENOUGH TO BE HEARD BY THOSE (*->ALL) ABOUT HIM WITHOUT DISTURBING THE OTHER PASSENGERS +8280-266249-0015-354: ONE DROP OF NICOTINE (EXTRACT OF->EXTRACTED) TOBACCO PLACED ON THE TONGUE OF (A->THE) DOG WILL KILL HIM IN A MINUTE THE HUNDREDTH PART OF (A->THE) GRAIN (PICKED->PRICKED) UNDER THE SKIN OF A MAN'S ARM WILL PRODUCE NAUSEA AND FAINTING +8280-266249-0016-355: THE HALF DOZEN CIGARS WHICH MOST SMOKERS (USE->YEARS) A DAY CONTAIN SIX OR SEVEN GRAINS ENOUGH IF CONCENTRATED AND ABSORBED TO KILL THREE MEN AND A POUND OF TOBACCO ACCORDING TO ITS QUALITY CONTAINS FROM ONE QUARTER TO ONE AND A QUARTER OUNCES +8280-266249-0017-356: IS IT STRANGE THEN THAT SMOKERS AND CHEWERS HAVE A THOUSAND AILMENTS +8280-266249-0018-357: THAT THE FRENCH (POLYTECHNIC->POLY TECHNIC AT) INSTITUTE HAD TO PROHIBIT ITS (*->THE) USE ON ACCOUNT OF ITS EFFECTS (ON->UPON) THE MIND +8280-266249-0019-358: (NOTICE->NOTICED) THE MULTITUDE OF SUDDEN DEATHS AND SEE HOW MANY ARE SMOKERS AND CHEWERS +8280-266249-0020-359: (IN->AND) A SMALL COUNTRY TOWN SEVEN OF THESE MYSTERIOUS PROVIDENCES OCCURRED WITHIN THE CIRCUIT OF A MILE ALL DIRECTLY TRACEABLE TO TOBACCO AND ANY PHYSICIAN ON A FEW MOMENTS REFLECTION CAN MATCH THIS FACT BY HIS OWN OBSERVATION +8280-266249-0021-360: AND THEN SUCH POWERFUL ACIDS PRODUCE INTENSE IRRITATION AND THIRST THIRST WHICH WATER DOES NOT QUENCH +8280-266249-0022-361: HENCE A RESORT TO CIDER AND BEER +8280-266249-0023-362: NO SIR WHAT (KNOW->NO) YE NOT THAT YOUR BODY IS THE TEMPLE OF THE HOLY GHOST WHICH IS IN YOU WHICH YE HAVE OF GOD AND YE ARE NOT YOUR OWN +8280-266249-0024-363: FOR (YE->YOU) ARE (BOUGHT->BROUGHT) WITH A PRICE THEREFORE GLORIFY GOD IN YOUR BODY AND IN YOUR SPIRIT WHICH ARE GOD'S +8280-266249-0025-364: WE CERTAINLY HAVE NO RIGHT TO INJURE OUR BODIES EITHER BY NEGLECT OR SELF INDULGENCE +8280-266249-0026-365: AND AGAIN I BESEECH YOU THEREFORE BRETHREN BY THE MERCIES OF GOD THAT YE PRESENT YOUR BODIES A LIVING SACRIFICE (HOLY->WHOLLY) ACCEPTABLE UNTO GOD WHICH IS YOUR REASONABLE SERVICE +8280-266249-0027-366: IT MUST REQUIRE A GOOD DEAL OF RESOLUTION FOR ONE WHO HAS BECOME FOND OF THE INDULGENCE TO GIVE IT UP REMARKED MISTER (DALY->DALEY) +8280-266249-0028-367: NO DOUBT NO DOUBT RETURNED MISTER (LILBURN->LOWBOURNE) BUT IF THY RIGHT (EYE->I) OFFEND THEE PLUCK IT (OUT->UP) AND CAST IT FROM (THEE->ME) FOR IT IS PROFITABLE FOR THEE THAT ONE OF THY MEMBERS SHOULD PERISH AND NOT THAT THY WHOLE BODY SHOULD BE CAST INTO HELL +8280-266249-0029-368: THERE WAS A PAUSE BROKEN BY YOUNG HORACE WHO HAD BEEN WATCHING A GROUP OF MEN GATHERED ABOUT A TABLE AT THE FURTHER END OF THE ROOM +8280-266249-0030-369: THEY ARE GAMBLING YONDER AND I'M AFRAID THAT YOUNG FELLOW IS BEING BADLY FLEECED BY (THAT->THE) MIDDLE AGED MAN OPPOSITE +8280-266249-0031-370: THE EYES OF THE WHOLE PARTY WERE AT ONCE TURNED IN THAT DIRECTION +8280-266249-0032-371: NO SIR HE IS NOT HERE +8280-266249-0033-372: (AND->AS) THE DOOR WAS SLAMMED VIOLENTLY (TO->TOO) +8280-266249-0034-373: NOW THE VOICE CAME FROM THE SKYLIGHT OVERHEAD APPARENTLY AND WITH A FIERCE IMPRECATION THE IRATE GAMESTER RUSHED UPON DECK AND RAN HITHER AND THITHER IN SEARCH OF HIS TORMENTOR +8280-266249-0035-374: HIS VICTIM WHO HAD BEEN LOOKING ON DURING THE LITTLE SCENE AND LISTENING TO THE MYSTERIOUS VOICE IN SILENT WIDE EYED WONDER AND FEAR NOW (ROSE->AROSE) HASTILY HIS FACE (DEATHLY->DEFTLY) PALE WITH TREMBLING HANDS GATHERED UP THE MONEY HE HAD STAKED AND HURRYING (INTO->TO) HIS (STATE ROOM->STATEROOM) LOCKED HIMSELF IN +8280-266249-0036-375: WHAT DOES IT MEAN CRIED ONE +8280-266249-0037-376: A (VENTRILOQUIST ABOARD->VENTILLA QUESTED BORN) OF COURSE RETURNED ANOTHER LET'S FOLLOW AND SEE THE FUN +8280-266249-0038-377: I WONDER WHICH OF US IT IS REMARKED THE FIRST LOOKING HARD AT OUR PARTY I DON'T KNOW BUT COME ON +8280-266249-0039-378: THAT FELLOW NICK WARD IS A NOTED (BLACKLEG AND->BLACK LAG IN) RUFFIAN HAD HIS NOSE BROKEN IN A FIGHT AND IS SENSITIVE ON THE SUBJECT WAS CHEATING OF COURSE +8280-266249-0040-379: WHO ASKED THE MATE I'VE SEEN (NONE UP->NO NOT) HERE THOUGH THERE ARE SOME IN THE STEERAGE +8280-266249-0041-380: THEY HEARD HIM IN SILENCE WITH A COOL PHLEGMATIC INDIFFERENCE MOST EXASPERATING TO ONE IN HIS PRESENT MOOD +8280-266249-0042-381: A MAN OF GIANT SIZE AND HERCULEAN STRENGTH HAD LAID ASIDE HIS PIPE AND SLOWLY RISING TO HIS FEET SEIZED THE SCOUNDREL IN HIS POWERFUL GRASP +8280-266249-0043-382: LET ME GO YELLED WARD MAKING A DESPERATE EFFORT TO FREE HIS ARMS +8280-266249-0044-383: I (DINKS->DENZ) NO I (DINKS->THINK) I (DEACH->DID) YOU (VON->FUN) LESSON RETURNED HIS CAPTOR NOT RELAXING HIS GRASP IN THE LEAST +8280-266249-0045-384: THE GERMAN RELEASED HIS PRISONER AND THE LATTER (SLUNK->SUNK) AWAY WITH MUTTERED THREATS AND IMPRECATIONS UPON THE HEAD OF HIS TORMENTOR +8280-266249-0046-385: MISTER LILBURN AND MISTER (DALY->DALEY) EACH (AT->HAD) A DIFFERENT TIME SOUGHT OUT THE YOUNG MAN (WARD'S->WORDS) INTENDED VICTIM AND TRIED TO INFLUENCE HIM FOR GOOD +8280-266249-0047-386: YET THERE WAS GAMBLING AGAIN THE SECOND NIGHT BETWEEN WARD AND SEVERAL OTHERS OF HIS PROFESSION +8280-266249-0048-387: THEY KEPT IT UP TILL AFTER MIDNIGHT +8280-266249-0049-388: THEN MISTER (LILBURN->LOWBORNE) WAKING FROM HIS FIRST SLEEP IN A STATEROOM NEAR BY THOUGHT HE WOULD BREAK IT UP ONCE MORE +8280-266249-0050-389: AN INTENSE VOICELESS EXCITEMENT POSSESSED THE PLAYERS FOR THE GAME WAS A CLOSE ONE AND (THE STAKES->MISTAKES) WERE VERY HEAVY +8280-266249-0051-390: THEY BENT EAGERLY OVER THE BOARD EACH WATCHING WITH FEVERISH ANXIETY HIS COMPANION'S MOVEMENTS EACH CASTING NOW AND AGAIN A GLOATING EYE UPON THE HEAP OF GOLD AND (GREENBACKS->GREEN BACKS) THAT LAY BETWEEN THEM AND AT TIMES HALF STRETCHING OUT HIS HAND TO CLUTCH IT +8280-266249-0052-391: A DEEP (GROAN->GROUND) STARTLED THEM AND THEY SPRANG TO THEIR FEET PALE AND TREMBLING WITH SUDDEN TERROR EACH HOLDING HIS BREATH AND STRAINING HIS EAR TO CATCH A REPETITION OF THE DREAD SOUND +8280-266249-0053-392: BUT (ALL WAS->ALWAYS) SILENT AND AFTER A MOMENT OF ANXIOUS WAITING THEY SAT DOWN TO THEIR GAME AGAIN TRYING TO CONCEAL AND SHAKE OFF THEIR FEARS (WITH A->FOR THE) FORCED UNNATURAL LAUGH +8280-266249-0054-393: IT CAME FROM UNDER THE TABLE GASPED (WARD->TOWARD) LOOK WHAT'S THERE (LOOK->LOOKED) YOURSELF +8280-266249-0055-394: WHAT CAN IT HAVE BEEN THEY ASKED EACH OTHER +8280-266249-0056-395: OH NONSENSE WHAT FOOLS WE ARE +8280-266249-0057-396: IT WAS THE LAST GAME OF CARDS FOR THAT TRIP +8280-266249-0058-397: THE CAPTAIN COMING IN SHORTLY AFTER THE SUDDEN FLIGHT OF THE GAMBLERS TOOK CHARGE OF THE MONEY AND THE NEXT DAY RESTORED IT TO THE OWNERS +8280-266249-0059-398: TO ELSIE'S OBSERVANT EYES IT PRESENTLY BECAME EVIDENT THAT THE (DALYS WERE IN->DAILIES RAN) VERY (STRAITENED->STRAIGHT AND) CIRCUMSTANCES +8280-266249-0060-399: OH HOW KIND HOW VERY KIND MISSUS (DALY->DALEY) SAID WITH TEARS OF JOY AND GRATITUDE WE HAVE HARDLY KNOWN HOW WE SHOULD MEET THE MOST NECESSARY EXPENSES OF THIS TRIP BUT HAVE BEEN TRYING TO CAST OUR CARE UPON THE LORD ASKING HIM TO PROVIDE +8280-266249-0061-400: AND HOW WONDERFULLY HE HAS ANSWERED OUR PETITIONS +8280-266249-0062-401: ELSIE ANSWERED PRESSING HER HAND AFFECTIONATELY (ART->ARE) WE NOT SISTERS IN CHRIST +8280-266249-0063-402: YE ARE ALL THE CHILDREN OF GOD BY FAITH IN CHRIST JESUS +8280-266249-0064-403: YE ARE ALL ONE (IN->AND) CHRIST JESUS +8280-266249-0065-404: WE (FEEL->SEE ON) MY HUSBAND AND I THAT WE ARE ONLY THE STEWARDS OF HIS BOUNTY AND (THAT->*) BECAUSE HE HAS SAID INASMUCH AS YE HAVE DONE IT UNTO ONE OF THE LEAST OF THESE MY BRETHREN (YE->YOU) HAVE DONE IT UNTO ME IT IS THE GREATEST PRIVILEGE AND DELIGHT TO DO ANYTHING FOR HIS PEOPLE +8461-258277-0000-1649: WHEN IT WAS THE SEVEN HUNDRED AND EIGHTEENTH NIGHT +8461-258277-0001-1650: BUT HE ANSWERED NEEDS (MUST I->MY STY) HAVE (ZAYNAB ALSO->THY NABBS SO) NOW SUDDENLY THERE CAME A RAP AT THE DOOR AND THE MAID SAID WHO IS AT THE DOOR +8461-258277-0002-1651: THE KNOCKER REPLIED (KAMAR->COME ON) DAUGHTER (OF AZARIAH->VAZARRE) THE JEW SAY ME IS ALI OF CAIRO WITH YOU +8461-258277-0003-1652: REPLIED THE BROKER'S DAUGHTER O THOU DAUGHTER OF A DOG +8461-258277-0004-1653: AND HAVING THUS (ISLAMISED->ISLAMMISED) SHE ASKED HIM (DO->TWO) MEN IN THE FAITH OF (AL ISLAM GIVE->ALICELA GAVE) MARRIAGE PORTIONS TO WOMEN OR (DO->TWO) WOMEN DOWER MEN +8461-258277-0005-1654: AND SHE THREW DOWN THE JEW'S HEAD BEFORE HIM +8461-258277-0006-1655: NOW THE (CAUSE->COURSE) OF HER SLAYING HER SIRE WAS AS FOLLOWS +8461-258277-0007-1656: THEN HE (SET OUT->SAT DOWN) REJOICING TO RETURN TO THE BARRACK OF THE FORTY +8461-258277-0008-1657: SO HE ATE AND FELL DOWN SENSELESS FOR THE SWEETMEATS WERE DRUGGED WITH (BHANG->BANG) WHEREUPON THE KAZI BUNDLED HIM INTO THE SACK AND MADE OFF WITH (HIM->THEM) CHARGER AND CHEST AND ALL TO THE BARRACK OF THE (FORTY->FORTE) +8461-258277-0009-1658: PRESENTLY (HASAN SHUMAN->HER SON SCHUMANN) CAME OUT OF A (CLOSET->CLOSE) AND SAID TO HIM HAST THOU GOTTEN THE GEAR O ALI +8461-258277-0010-1659: SO HE TOLD HIM WHAT HAD BEFALLEN HIM AND ADDED IF I KNOW (WHITHER->WHETHER) THE RASCAL IS GONE AND WHERE TO FIND THE KNAVE I WOULD PAY HIM OUT +8461-258277-0011-1660: KNOWEST THOU WHITHER HE WENT +8461-258277-0012-1661: ANSWERED HASAN I KNOW WHERE HE IS AND OPENING THE DOOR OF THE CLOSET SHOWED HIM THE SWEETMEAT (SELLER->CELLAR) WITHIN DRUGGED AND SENSELESS +8461-258277-0013-1662: SO I WENT ROUND ABOUT THE HIGHWAYS OF THE CITY TILL I MET A SWEETMEAT (SELLER->CELLAR) AND BUYING HIS CLOTHES AND (STOCK IN->STOCKING) TRADE AND GEAR FOR TEN (DINARS->HOURS) DID WHAT WAS DONE +8461-258277-0014-1663: QUOTH (AL->A) RASHID WHOSE HEAD IS THIS +8461-258277-0015-1664: SO ALI RELATED TO HIM ALL THAT (HAD->THAT) PASSED FROM FIRST (TO->*) LAST AND THE CALIPH SAID I HAD NOT THOUGHT THOU WOULDST KILL HIM FOR THAT HE WAS A SORCERER +8461-258277-0016-1665: HE REPLIED I HAVE FORTY LADS BUT THEY ARE IN CAIRO +8461-278226-0000-1633: AND LAURA HAD HER OWN PET PLANS +8461-278226-0001-1634: SHE MEANT TO BE SCRUPULOUSLY CONSCIENTIOUS IN THE ADMINISTRATION OF (HER TALENTS->ITALIANS) AND SOMETIMES AT CHURCH ON A SUNDAY WHEN THE (SERMON->SAME) WAS PARTICULARLY AWAKENING SHE MENTALLY DEBATED (THE->A) SERIOUS QUESTION AS TO WHETHER NEW BONNETS AND A PAIR OF (JOUVIN'S->JUBANCE) GLOVES DAILY WERE NOT (SINFUL->SENT FOR) BUT I THINK SHE DECIDED THAT THE NEW BONNETS AND GLOVES WERE ON THE WHOLE A PARDONABLE WEAKNESS AS BEING GOOD FOR TRADE +8461-278226-0002-1635: ONE MORNING LAURA TOLD HER HUSBAND WITH A GAY LAUGH THAT SHE WAS GOING TO (VICTIMIZE->VICTIMISE) HIM BUT HE WAS TO PROMISE TO BE PATIENT AND BEAR WITH HER FOR ONCE IN A WAY +8461-278226-0003-1636: I WANT TO SEE ALL THE PICTURES THE MODERN PICTURES ESPECIALLY +8461-278226-0004-1637: I REMEMBER ALL THE (RUBENSES AT->REUBEN SAYS THAT) THE LOUVRE FOR I SAW THEM THREE YEARS AGO WHEN I WAS STAYING IN PARIS WITH GRANDPAPA +8461-278226-0005-1638: SHE RETURNED IN A LITTLE MORE THAN TEN MINUTES IN THE FRESHEST TOILETTE ALL PALE SHIMMERING BLUE LIKE THE SPRING SKY WITH (PEARL GREY->PER GRAY) GLOVES AND BOOTS AND PARASOL AND A BONNET THAT SEEMED MADE OF AZURE BUTTERFLIES +8461-278226-0006-1639: (IT->HE) WAS DRAWING TOWARDS THE CLOSE OF THIS DELIGHTFUL HONEYMOON TOUR AND IT WAS A BRIGHT SUNSHINY MORNING EARLY IN FEBRUARY BUT FEBRUARY IN PARIS IS SOMETIMES BETTER THAN APRIL IN LONDON +8461-278226-0007-1640: BUT SHE FIXED UPON A PICTURE WHICH SHE SAID SHE PREFERRED TO ANYTHING SHE HAD SEEN IN THE GALLERY +8461-278226-0008-1641: PHILIP JOCELYN WAS EXAMINING SOME PICTURES ON THE OTHER SIDE OF THE ROOM WHEN HIS WIFE MADE (THIS->THE) DISCOVERY +8461-278226-0009-1642: HOW I WISH YOU COULD GET ME A COPY OF THAT PICTURE PHILIP LAURA SAID ENTREATINGLY +8461-278226-0010-1643: I SHOULD SO LIKE ONE TO HANG IN MY MORNING ROOM (AT->A) JOCELYN'S ROCK +8461-278226-0011-1644: SHE TURNED TO THE (FRENCH ARTIST->FRENCHARD THIS) PRESENTLY AND ASKED (HIM->THEM) WHERE THE ELDER MISTER (KERSTALL->COASTON) LIVED AND IF THERE WAS ANY POSSIBILITY OF SEEING HIM +8461-278226-0012-1645: THEY HAVE SAID THAT HE IS EVEN A LITTLE IMBECILE THAT HE DOES NOT REMEMBER HIMSELF OF THE MOST COMMON EVENTS OF HIS LIFE +8461-278226-0013-1646: BUT THERE ARE SOME OTHERS WHO SAY THAT HIS MEMORY HAS NOT ALTOGETHER FAILED AND THAT HE (IS->*) STILL ENOUGH HARSHLY CRITICAL TOWARDS THE WORKS OF OTHERS +8461-278226-0014-1647: I DON'T THINK YOU WILL HAVE ANY DIFFICULTY IN FINDING THE HOUSE +8461-278226-0015-1648: YOU (WILL BE DOING->WERE BETWEEN) ME SUCH A FAVOUR (PHILIP->FELLOW) IF YOU'LL SAY YES +8461-281231-0000-1594: HIS FOLLOWERS (RUSHED->RUSH) FORWARD TO WHERE HE LAY AND THEIR UNITED FORCE COMPELLING THE BLACK (KNIGHT->NIGHT) TO PAUSE THEY DRAGGED (THEIR->THE) WOUNDED LEADER WITHIN THE WALLS +8461-281231-0001-1595: IT WAS ON THEIR JOURNEY TO THAT TOWN THAT THEY WERE OVERTAKEN ON THE ROAD BY (CEDRIC->SEDRRICK) AND HIS PARTY IN WHOSE COMPANY THEY WERE AFTERWARDS CARRIED CAPTIVE TO THE (CASTLE->COUNCIL) OF (TORQUILSTONE->TORCHLESTONE) +8461-281231-0002-1596: (AS HE->I SEE) LAY UPON HIS BED (RACKED->WRAPPED) WITH PAIN AND MENTAL AGONY AND FILLED WITH (THE->*) FEAR OF RAPIDLY APPROACHING DEATH HE HEARD A VOICE ADDRESS HIM +8461-281231-0003-1597: WHAT ART THOU HE EXCLAIMED IN TERROR +8461-281231-0004-1598: LEAVE ME AND SEEK THE SAXON (WITCH ULRICA->WHICH OIKA) WHO WAS MY TEMPTRESS LET HER AS WELL AS I (TASTE->CASE) THE TORTURES WHICH ANTICIPATE HELL +8461-281231-0005-1599: EXCLAIMED THE NORMAN (HO->OH) +8461-281231-0006-1600: (REMEMBEREST->REMEMBER AS) THOU THE MAGAZINE OF FUEL THAT (IS STORED->HIS STORE) BENEATH THESE APARTMENTS WOMAN +8461-281231-0007-1601: THEY ARE FAST RISING AT LEAST SAID (ULRICA->EUREKA) AND A SIGNAL SHALL SOON WAVE (TO WARN->TOWARD) THE BESIEGERS TO PRESS HARD UPON THOSE WHO WOULD EXTINGUISH THEM +8461-281231-0008-1602: MEANWHILE THE BLACK KNIGHT HAD LED HIS FORCES AGAIN TO THE ATTACK AND SO VIGOROUS WAS THEIR ASSAULT THAT BEFORE LONG THE GATE OF THE CASTLE ALONE SEPARATED THEM FROM THOSE WITHIN +8461-281231-0009-1603: THE DEFENDERS (FINDING->FIND IN) THE CASTLE TO BE ON FIRE NOW DETERMINED TO SELL THEIR LIVES AS (DEARLY->DAILY) AS THEY COULD AND HEADED BY (DE BRACY->THE BRACES) THEY THREW OPEN THE GATE AND WERE AT ONCE INVOLVED IN A TERRIFIC CONFLICT WITH THOSE OUTSIDE +8461-281231-0010-1604: THE BLACK (KNIGHT->NIGHT) WITH (PORTENTOUS STRENGTH FORCED->POTENTI FORCE) HIS WAY INWARD IN DESPITE OF (DE BRACY->THE BRAZY) AND HIS FOLLOWERS +8461-281231-0011-1605: TWO OF THE FOREMOST (INSTANTLY FELL->THING) AND THE REST GAVE WAY NOTWITHSTANDING ALL (THEIR LEADERS->THE LEADER'S) EFFORTS TO STOP THEM +8461-281231-0012-1606: THE BLACK (KNIGHT->NIGHT) WAS SOON ENGAGED IN DESPERATE COMBAT WITH THE NORMAN CHIEF AND (THE VAULTED->DEVOTED) ROOF OF THE HALL (RUNG->RANG) WITH (THEIR->THE) FURIOUS BLOWS +8461-281231-0013-1607: AT LENGTH (DE BRACY->THE BRACEY) FELL +8461-281231-0014-1608: TELL ME THY NAME (OR->A) WORK THY PLEASURE ON ME +8461-281231-0015-1609: YET FIRST LET ME SAY SAID (DE BRACY->DEBRACY) WHAT IT IMPORTS THEE TO KNOW +8461-281231-0016-1610: EXCLAIMED THE BLACK KNIGHT PRISONER AND PERISH +8461-281231-0017-1611: THE LIFE OF EVERY MAN IN THE CASTLE SHALL ANSWER IT IF A HAIR OF HIS HEAD BE SINGED SHOW ME HIS CHAMBER +8461-281231-0018-1612: RAISING THE WOUNDED MAN WITH EASE THE BLACK KNIGHT RUSHED WITH (HIM->THEM) TO THE (POSTERN->PASSING) GATE AND HAVING THERE DELIVERED HIS BURDEN TO THE CARE OF TWO (YEOMEN->YOUNG MEN) HE AGAIN ENTERED THE CASTLE TO ASSIST IN THE RESCUE OF (THE OTHER->THEIR) PRISONERS +8461-281231-0019-1613: BUT IN OTHER PARTS THE BESIEGERS PURSUED THE DEFENDERS OF THE CASTLE FROM CHAMBER TO CHAMBER AND SATIATED IN (THEIR->THE) BLOOD THE VENGEANCE WHICH HAD LONG ANIMATED THEM AGAINST THE SOLDIERS OF THE TYRANT (FRONT->FROM) DE BOEUF +8461-281231-0020-1614: AS THE FIRE (COMMENCED->COMMANDS) TO SPREAD RAPIDLY THROUGH ALL PARTS OF THE CASTLE (ULRICA->OR RICHA) APPEARED ON ONE OF THE TURRETS +8461-281231-0021-1615: BEFORE LONG THE TOWERING FLAMES (HAD->THAT) SURMOUNTED EVERY OBSTRUCTION AND ROSE TO THE EVENING SKIES (ONE->WHEN) HUGE AND BURNING BEACON (SEEN->SEEMED) FAR AND WIDE THROUGH THE ADJACENT COUNTRY (TOWER->TOWERED) AFTER TOWER CRASHED DOWN WITH BLAZING ROOF AND RAFTER +8461-281231-0022-1616: AT LENGTH WITH A TERRIFIC CRASH THE WHOLE (TURRET->TOWER) GAVE WAY AND SHE PERISHED IN (THE->*) FLAMES WHICH HAD CONSUMED HER TYRANT +8461-281231-0023-1617: WHEN THE OUTLAWS (HAD->ARE) DIVIDED THE SPOILS WHICH THEY HAD TAKEN FROM THE CASTLE OF (TORQUILSTONE->TORCHLESTONE) CEDRIC PREPARED TO TAKE HIS DEPARTURE +8461-281231-0024-1618: HE LEFT THE GALLANT BAND OF FORESTERS SORROWING DEEPLY FOR HIS LOST FRIEND THE LORD OF (CONINGSBURGH->CONIGSBURG) AND HE AND HIS FOLLOWERS HAD SCARCE DEPARTED WHEN A PROCESSION MOVED SLOWLY FROM UNDER THE GREENWOOD BRANCHES IN THE DIRECTION WHICH HE HAD TAKEN IN THE CENTRE OF WHICH WAS THE CAR IN WHICH THE BODY OF (ATHELSTANE->OTHERSTEIN) WAS LAID +8461-281231-0025-1619: (DE BRACY->DEBRACY) BOWED LOW AND IN SILENCE THREW HIMSELF UPON A HORSE AND GALLOPED OFF THROUGH THE (WOOD->WOODS) +8461-281231-0026-1620: HERE IS A BUGLE WHICH AN ENGLISH YEOMAN HAS ONCE WORN I PRAY YOU TO KEEP IT AS A MEMORIAL OF YOUR GALLANT BEARING +8461-281231-0027-1621: SO SAYING HE MOUNTED HIS STRONG WAR HORSE AND RODE OFF THROUGH THE FOREST +8461-281231-0028-1622: DURING ALL THIS TIME (ISAAC->MISERC) OF YORK SAT MOURNFULLY APART GRIEVING FOR THE LOSS OF HIS (DEARLY->STAIRLY) LOVED DAUGHTER REBECCA +8461-281231-0029-1623: AND WITH THIS EPISTLE (THE UNHAPPY->THEN HAPPY) OLD MAN SET OUT TO PROCURE HIS DAUGHTER'S LIBERATION +8461-281231-0030-1624: THE TEMPLAR IS FLED SAID (DE BRACY->THE BRAZY) IN ANSWER TO THE PRINCE'S EAGER QUESTIONS (FRONT->FROM) DE BOEUF YOU WILL NEVER SEE MORE AND HE ADDED IN A LOW AND EMPHATIC TONE (RICHARD->WRETCHED) IS (IN->AN) ENGLAND I HAVE SEEN HIM AND SPOKEN WITH HIM +8461-281231-0031-1625: HE (APPEALED->APPEARED) TO (DE BRACY->THE BRACELET) TO ASSIST HIM IN THIS PROJECT AND BECAME AT ONCE DEEPLY SUSPICIOUS OF THE (KNIGHT'S->NIGHT'S) LOYALTY TOWARDS HIM WHEN HE DECLINED TO LIFT HAND AGAINST THE MAN WHO HAD SPARED HIS OWN LIFE +8461-281231-0032-1626: BEFORE REACHING HIS DESTINATION HE WAS TOLD THAT LUCAS (DE BEAUMANOIR->THE BURMANOIS) THE GRAND MASTER OF THE ORDER OF THE TEMPLARS WAS THEN ON VISIT TO THE PRECEPTORY +8461-281231-0033-1627: HE HAD NOT UNTIL THEN BEEN INFORMED (OF->TO) THE PRESENCE OF THE JEWISH MAIDEN IN THE ABODE OF THE TEMPLARS AND GREAT WAS HIS FURY AND INDIGNATION ON LEARNING THAT SHE WAS AMONGST THEM +8461-281231-0034-1628: POOR ISAAC WAS HURRIED OFF ACCORDINGLY AND EXPELLED FROM THE PRECEPTORY ALL HIS ENTREATIES AND EVEN HIS OFFERS UNHEARD AND DISREGARDED +8461-281231-0035-1629: THE ASSURANCE THAT SHE POSSESSED SOME FRIEND IN THIS AWFUL ASSEMBLY GAVE HER COURAGE TO LOOK AROUND AND TO MARK INTO WHOSE PRESENCE SHE HAD BEEN CONDUCTED +8461-281231-0036-1630: SHE GAZED ACCORDINGLY UPON A SCENE WHICH MIGHT WELL HAVE STRUCK TERROR INTO A BOLDER HEART THAN HERS +8461-281231-0037-1631: AT HIS FEET WAS PLACED (A->THE) TABLE OCCUPIED BY TWO SCRIBES WHOSE DUTY (IT->*) WAS TO RECORD THE PROCEEDINGS OF THE DAY +8461-281231-0038-1632: THE PRECEPTORS OF WHOM THERE WERE FOUR PRESENT OCCUPIED SEATS BEHIND (THEIR->THE) SUPERIORS AND BEHIND THEM STOOD THE ESQUIRES OF THE ORDER ROBED IN WHITE + +SUBSTITUTIONS: count ref -> hyp +35 A -> THE +34 THE -> A +30 IN -> AND +28 AND -> IN +10 DICKIE -> DICKY +10 ANNIE -> ANY +10 AN -> AND +9 THIS -> THE +8 THEIR -> THE +8 I -> AND +7 TO -> THE +7 THE -> THAT +7 THAT -> THE +7 MURDOCH -> MURDOCK +7 MAN -> MEN +6 THE -> THEY +6 THE -> THEIR +6 ON -> AND +6 OF -> A +6 LARCH -> LARGE +6 DE -> THE +6 ARCHY -> ARCHIE +6 ANYONE -> ANY +6 AND -> AN +5 WILFRID -> WILFRED +5 THIS -> HIS +5 THESE -> THIS +5 THERE -> THEIR +5 THE -> TO +5 THE -> THIS +5 SINDBAD -> SINBAD +5 SIGURD -> CIGAR +5 SHARRKAN -> SHARKAN +5 OL -> OLD +5 OH -> O +5 KINE -> KIND +5 IS -> HIS +5 IN -> AN +5 IM -> HIM +5 HER -> A +5 HAS -> IS +5 AT -> IT +4 YOU'VE -> YOU +4 YOU'RE -> YOU +4 YOU -> HE +4 WITH -> WAS +4 WERE -> WHERE +4 WAS -> IS +4 UPON -> UP +4 THEY -> THERE +4 SET -> SAID +4 ORGANISER -> ORGANIZER +4 O -> OH +4 N'T -> NOT +4 LIL -> LITTLE +4 KNOW -> NO +4 IT'S -> IS +4 IT -> HE +4 I -> I'M +4 HERMON -> HERMAN +4 HATH -> HAD +4 HAS -> HAD +4 HAD -> AND +4 AS -> IS +4 AROUND -> ROUND +3 ZARATHUSTRA -> THEIR +3 WOULD -> WILL +3 WILL -> WOULD +3 WHEN -> WITH +3 WHEN -> AND +3 WERE -> WAS +3 WE'RE -> WE +3 TRY -> TRIED +3 TOO -> TO +3 TO -> INTO +3 TO -> A +3 THIS -> THESE +3 THEY -> THEIR +3 THEY -> THE +3 THEIR -> THEY +3 SANCT -> SAINT +3 REGIN -> RIGAN +3 OR -> OF +3 ON -> IN +3 OFF -> OF +3 O -> OF +3 MISTAH -> MISTER +3 MISSUS -> MISS +3 MILICENT -> MELLICENT +3 LESLIE -> LIZLY +3 KNIGHT -> NIGHT +3 JES -> JUST +3 IZZY -> IZZIE +3 IZZY -> IS +3 IT -> A +3 IS -> WAS +3 IS -> AS +3 INTERESTS -> INTEREST +3 I'M -> I +3 I -> I'VE +3 I -> A +3 HIS -> THIS +3 HIS -> IS +3 HIM -> THEM +3 HERMON -> HERMANN +3 HERMON -> HARMON +3 HER -> HIS +3 HEAR -> HERE +3 HEAR -> HAIR +3 HE -> HIS +3 GOING -> GOIN +3 EILEEN -> AILEEN +3 DOG -> DOOR +3 DO -> TO +3 DEFENSE -> DEFENCE +3 DALY -> DALEY +3 CONFECTIONARY -> CONFECTIONERY +3 BUT -> THAT +3 AT -> TO +3 AT -> AND +3 AT -> A +3 AND -> A +3 AN -> IN +3 AIN'T -> AND +3 A -> TO +3 A -> IT +3 A -> HER +2 ZAU -> ZAO +2 ZAU -> ZA +2 ZAU -> THOU +2 YOUR -> THE +2 YOU'RE -> YO +2 YOU -> YOUR +2 YOU -> IT +2 YO'LL -> YOU'LL +2 YO -> YOU +2 YER -> YOU +2 YE -> YOU +2 WON'T -> WOULD +2 WITH -> THE +2 WITH -> FOR +2 WINTER -> WINDOW +2 WILD -> WHITE +2 WHO -> WHOSE +2 WHILE -> WHETHER +2 WHEN -> ONE +2 WHAT -> BUT +2 WE'RE -> WERE +2 WAYNE -> WAIN +2 WATCHED -> WATCH +2 URARTU -> URITU +2 UP -> UPSTAIRS +2 UNDERTAKER'S -> UNDERTAKERS +2 TRIBE -> TIME +2 TORQUILSTONE -> TORCHLESTONE +2 TONIGHT -> TO +2 TO -> TOO +2 TIDINGS -> HIDINGS +2 THUS -> THIS +2 THOUSANDS -> THOUSAND +2 THIS -> ITS +2 THIRST -> THOSE +2 THEY'RE -> THERE +2 THEY -> HE +2 THERE -> THEN +2 THEN -> THAT +2 THEM -> HIM +2 THE -> THEN +2 THE -> IT +2 THAT'S -> THAT +2 THAT -> AT +2 TAMAR -> TO +2 SYRUP -> SERF +2 STONEWALL -> STONE +2 STOKER -> STOCKER +2 STAYED -> STATE +2 SON -> SUN +2 SOMEONE -> SOME +2 SO -> TO +2 SHELL -> SHELLFISH +2 SHE -> YOU +2 SHE -> SHE'LL +2 SHAWS -> SHORES +2 SHALL -> SHOULD +2 SERGEY -> SO +2 SELLER -> CELLAR +2 SEEN -> SEE +2 SEEK -> SEE +2 SEE -> SEEM +2 ROOMFELLOW -> ROOM +2 RETURNED -> RETURN +2 RAYSTOKE -> RAYSTROKE +2 RAYSTOKE -> RAY +2 PROCLUS -> PROCLASS +2 PRIORESS -> PRIORS +2 PRIEST -> PRIESTS +2 POLL -> POLE +2 PIGEONCOTE -> PIGEON +2 PIGEONCOTE -> BEECH +2 ONE -> WHEN +2 ONCE -> WAS +2 ON -> DOWN +2 OL -> ALL +2 OF -> HAVE +2 O -> WHO +2 O -> A +2 NOUGHT -> NOT +2 NORTHFIELD -> NORTH +2 NOR -> NO +2 NO -> NOW +2 NO -> KNOW +2 NEIGHBOR -> NEIGHBOUR +2 MUST -> WAS +2 MISSUS -> MISTER +2 MINE -> MY +2 MESTER -> MISTER +2 MENAHEM -> MANY +2 MEN -> MAN +2 MARSHAL -> MARTIAN +2 MARJORIE -> MARGERY +2 LUNA -> LENA +2 LOVE -> LOVED +2 LIKED -> LIKE +2 LA -> NEWBURG +2 KEEP -> HE +2 JURY -> CHEERY +2 JULIEN -> JULIAN +2 JAKEY -> JAKIE +2 ITS -> HIS +2 IT -> THIS +2 IT -> I +2 IT -> AT +2 IS -> HAS +2 IN'T -> IN +2 IN -> TO +2 IN -> ON +2 IF -> OF +2 I'VE -> I +2 I'LL -> I +2 I'D -> I +2 I -> THEY +2 I -> AS +2 HOZE -> HOSE +2 HOW -> HER +2 HORSTIUS -> HORSES +2 HOO'S -> WHO'S +2 HONOUR -> HONOR +2 HONOR -> HONOUR +2 HO -> OH +2 HIS -> THE +2 HIS -> A +2 HERMON'S -> HERMANN'S +2 HERE'S -> HERE +2 HERE -> HEAR +2 HER -> THE +2 HER -> FOR +2 HE -> YOU +2 HE -> IT +2 HAYS -> HAYES +2 HAVE -> HATH +2 HAS -> HATH +2 HANDS -> HAND +2 HAID -> HEAD +2 HAD -> THAT +2 HAD -> AT +2 GURR -> GIRK +2 GURR -> GER +2 GURR -> GARR +2 GUNNAR -> GUNNER +2 GRANDPAP -> GRANDPA +2 GOD -> GONE +2 GIVING -> GIVEN +2 GIORGIO -> GEORGIO +2 GENTLEMEN -> GENTLEMAN +2 FRONT -> FROM +2 FROM -> FOR +2 FOALS -> FOLDS +2 FOAL -> FULL +2 FESTAL -> FEAST +2 FAVOR -> FAVOUR +2 FAUVENT -> VENT +2 FAUCHELEVENT -> FOR +2 EYE -> I +2 EXECUTIVE -> EXECUTED +2 ETERNAL -> HAD +2 END -> AND +2 DO -> TWO +2 DE -> DEBRACY +2 DAY -> THEY +2 DAPHNE -> JAPANE +2 CREAM -> QUEEN +2 CRAWFISH -> CROFISH +2 COUNTRY -> COUNTRIES +2 CORNER -> CORN +2 COPS -> CUPS +2 COMMONWEALTH -> CORNWEALTH +2 CINDERLAD -> CINDER +2 CALIFORNIAN -> CALIFORNIA +2 BRYNHILD -> BURNEHELD +2 BROTHERS -> BROTHER'S +2 BRACY -> BRAZY +2 BLESSED -> BLEST +2 BEING -> BEEN +2 BEG -> THEY +2 BEFEL -> BEFELL +2 BEALE -> BEER +2 BEALE -> BEE +2 AWK -> AWKWARD +2 AWHILE -> A +2 AT -> THAT +2 ARSINOE -> ARSENO +2 ARE -> A +2 ARCHBISHOP -> ARCHBISH +2 AND -> WHEN +2 ALTHEA -> ALTHIA +2 ALL -> OUR +2 ALL -> ON +2 AL -> A +2 ADD -> AT +2 A -> US +2 A -> OF +2 A -> I +2 A -> HAVE +2 A -> AT +2 A -> AND +1 ZEMSTVOS -> SEND +1 ZAYNAB -> THY +1 ZAU -> ZOMAN +1 ZARATHUSTRA -> TAKE +1 ZARATHUSTRA -> GUESTRA +1 ZARATHUSTRA -> ARE +1 YUSS -> YES +1 YOUR -> YOURSELVES +1 YOUR -> YOU +1 YOUR -> HE +1 YOU'LL -> YOU +1 YOU'D -> YOU +1 YOU'D -> YE'D +1 YOU -> YOURSELVES +1 YOU -> YE +1 YOU -> TALONS +1 YOU -> SHE +1 YOU -> OFTEN +1 YOU -> EVEN +1 YOU -> EUGEN +1 YO' -> YE +1 YO -> YOU'LL +1 YET -> HE +1 YET -> AT +1 YES'M -> YES +1 YEOMEN -> YOUNG +1 YEARS -> EARS +1 YAUSKY -> HOUSEKEEPER'S +1 YAHWEH -> YONWAY +1 WUNNERED -> ONE +1 WROTE -> RODE +1 WROTE -> ONES +1 WRIT -> WRITE +1 WOUNDS -> ONES +1 WOULD -> HAD +1 WOULD -> DID +1 WOTTETH -> WHATETH +1 WOT -> WHAT +1 WORTH -> WORSE +1 WORSHIPPERS -> WORSHIPPED +1 WORSHIP'S -> WORSHIP +1 WORRY -> WERE +1 WORLD -> WOOLWRIGHT +1 WORKS -> WORK +1 WORKMAN -> WORKMEN +1 WORKINGMEN -> WORKING +1 WORKING -> MARKING +1 WORKED -> WORTH +1 WORK -> WORKADAY +1 WORD -> WORLD +1 WOOD -> WOODS +1 WONDER -> WANDER +1 WON'T -> WELL +1 WOMAN -> WOMEN +1 WOKE -> WALKING +1 WOE -> WON'T +1 WODE'S -> WORDS +1 WITH -> WOULD +1 WITH -> WHICH +1 WITH -> TO +1 WITH -> ITS +1 WITH -> IT +1 WITH -> BUT +1 WITCH -> WHICH +1 WISHT -> WISHED +1 WIRES -> WIVES +1 WING -> WINGED +1 WINE -> WHY +1 WINE -> WHITE +1 WILLY -> BILLY +1 WILL -> WILT +1 WILL -> WIDOW +1 WILL -> WERE +1 WILL -> WELL +1 WILL -> WE'LL +1 WILL -> WE +1 WILL -> WAS +1 WILL -> TO +1 WILL -> REVOTE +1 WILL -> OLI' +1 WILKSES -> WILKES +1 WILKS -> WILKES +1 WILFRID -> WILFRIED +1 WILFRID -> MILFRED +1 WILFRID -> LOYAL +1 WILDLY -> WIDELY +1 WILDEST -> WALDEST +1 WILDERNESS -> WEDDINANCE +1 WILDERNESS -> WARINESS +1 WILDERNESS -> LINEN +1 WIFE -> WI +1 WIFE -> MY +1 WIELD -> WHEEL +1 WIDEAWAKE -> WIDE +1 WICKER'S -> ROOKER'S +1 WICKER'S -> OAKERS +1 WICKER'S -> HOOKER'S +1 WICKER -> WICKER'S +1 WI -> WITH +1 WHOMSOEVER -> WHOSOEVER +1 WHO'D -> WHO +1 WHO -> WITH +1 WHO -> ONE +1 WHO -> HER +1 WHITHER -> WHETHER +1 WHITEHALL -> WHITE +1 WHITE -> WORLD +1 WHITE -> WIDE +1 WHISKERED -> WHISKIRT +1 WHILOME -> WILM +1 WHICH -> WITCH +1 WHICH -> SPEECH +1 WHICH -> REACHED +1 WHICH -> PICTURED +1 WHEREABOUTS -> WHEREABOUT +1 WHERE'S -> WHERE +1 WHERE'S -> WAS +1 WHERE -> WITH +1 WHERE -> WERE +1 WHERE -> THERE +1 WHERE -> HER +1 WHER -> WERE +1 WHEN -> WHO +1 WHEN -> WENT +1 WHEN -> IN +1 WHEN -> BUT +1 WHEELER -> WHALER +1 WHEEL -> REA +1 WHATE'ER -> WHATEVER +1 WHAT'S -> ONCE +1 WHAT -> WOULD +1 WHAT -> WITH +1 WHAT -> IT +1 WHAT -> HE'LL +1 WHAT -> FOR +1 WHAT -> AT +1 WHAT -> A +1 WHACKS -> WAX +1 WESTPORT -> WESTWARD +1 WERE -> WITH +1 WERE -> WENT +1 WERE -> RAN +1 WENT -> WON +1 WENT -> WHEN +1 WELLS -> WELL +1 WELLS -> WALES +1 WELL -> WILL +1 WELL -> WHY +1 WELL -> WHILE +1 WEIGHED -> WADE +1 WEEVILLY -> WEEVILY +1 WEEDS -> READS +1 WEBB'S -> WEBBS +1 WE'VE -> REVOLTA +1 WE -> WILL +1 WE -> WASTED +1 WAYNE -> IN +1 WAVERLY -> WAVERLEY +1 WATONWAN -> WATERWAM +1 WATER -> WALL +1 WATCHMAKER'S -> WATCHMAKERS +1 WAS -> WITH +1 WAS -> WIS +1 WAS -> WHICH +1 WAS -> MUST +1 WAS -> ALWAYS +1 WARM -> WRONG +1 WARDERS -> ORDERS +1 WARD'S -> WORDS +1 WARD -> TOWARD +1 WANTED -> WATER +1 WANT -> WONT +1 WANDERER -> WONDER +1 WALL -> WAR +1 WALK -> BUCK +1 WAKE -> AWAKE +1 WAITING -> WINNING +1 WAITIN -> WAITING +1 WAGGOT -> RAGGED +1 WAGGING -> WORKING +1 WAGED -> RAGED +1 WADED -> WAITED +1 WADDED -> WATERED +1 VOYAGE -> VOY +1 VOWELS -> VOWALS +1 VOUGHT -> WALT +1 VON -> FUN +1 VOMITING -> WARMITTING +1 VOLVITUR -> VOLVETER +1 VOICED -> OUTWARDS +1 VIOLENCE -> WILDEST +1 VIOLENCE -> VIOLENT +1 VILLAGES -> RELIGIOUS +1 VILLAGERS -> VILLAGES +1 VIL -> VILLE +1 VIGILANCE -> EACH +1 VICTIMIZE -> VICTIMISE +1 VESTRY -> VETERY +1 VERY -> WHERE +1 VERSES -> VERSEST +1 VERILY -> VERY +1 VENTRILOQUIST -> VENTILLA +1 VEHEMENTLY -> TO +1 VAVASOUR -> VAVASOR +1 VAULT -> WALL +1 VAUGIRARD -> ROGER +1 VAST -> VATS +1 VASSILIEVITCH -> WISLOVITCH +1 VANE -> VAIN +1 VAIN -> VEIN +1 UTTER -> OTHER +1 USUALLY -> USUAL +1 USEFUL -> FORGIFTS +1 USE -> YEARS +1 USE -> USED +1 US -> UP +1 US -> TO +1 US -> STARED +1 URGED -> ADDED +1 URARTIAN -> GRACIAN +1 UPSTAIRS -> UP +1 UPON -> A +1 UP -> STREET +1 UP -> NOT +1 UNTO -> TO +1 UNTO -> INTO +1 UNTO -> AND +1 UNS -> UNSTEAD +1 UNREWARDED -> IN +1 UNOCCUPIED -> ON +1 UNLESS -> AND +1 UNIQUE -> EUIK +1 UNHESITATINGLY -> AM +1 UNHAPPY -> HAPPY +1 UNEXPECTEDLY -> UNEXPECTED +1 UNDER -> AND +1 UNCLE -> AND +1 UNADULTERATED -> ANADULTERATED +1 UN -> ONE +1 ULTIMATELY -> ULTIMATE +1 ULRICA -> OR +1 ULRICA -> OIKA +1 ULRICA -> EUREKA +1 UKINZER -> A +1 UDDER -> UTTER +1 TYRANNY -> SURNING +1 TWYMAN'S -> TWIMMAN +1 TWO -> TOO +1 TWO -> TO +1 TWO -> JEW +1 TWO -> DO +1 TWELVEMONTH -> TWELVE +1 TURRET -> TOWER +1 TURNS -> TURNED +1 TUBERCULOUS -> TUBERK +1 TRYING -> CRYING +1 TRY -> TROT +1 TRY -> TRIFLE +1 TRUSTEE -> TRUSTY +1 TRUNDLED -> TUMBLED +1 TRULY -> JULIE +1 TRIVET -> TRIBUT +1 TRIPES -> TRITE +1 TRIFLE -> TRAVEL +1 TRIBES -> TRIUMPH +1 TRIBES -> TRINES +1 TRELAWNEY -> TREEONER +1 TREASURE -> TREASURES +1 TRAVELED -> TRAVELLED +1 TRANSSHIP -> TRANSHIP +1 TRAINING -> TRAINED +1 TRAINDAWG -> TRAIN +1 TRAFFIC -> EFFIC +1 TRADITIONS -> JUDICINES +1 TRADEMARK -> TRADE +1 TRACK -> CHECK +1 TRACED -> PRAISED +1 TOWER -> TOWERED +1 TOWARDS -> DOOR +1 TOWARD -> TOWARDS +1 TOWARD -> TO +1 TOUR -> TO +1 TOUGHS -> TUFTS +1 TOUGH -> TO +1 TOUCHED -> TOUCH +1 TORMENT -> TOM +1 TORE -> TOLD +1 TORCH -> TORTURE +1 TOP -> STOPPED +1 TOOK -> INTO +1 TOO -> TWO +1 TOO -> SHIMMERTS +1 TOO -> DO +1 TONGUE -> TONG +1 TOMORROW -> TO +1 TOMB -> TWO +1 TOM -> TUMBLED +1 TOLERBLE -> TOLERABLE +1 TOILET -> TALLED +1 TO -> WITH +1 TO -> TWO +1 TO -> TURNED +1 TO -> TRAITS +1 TO -> TOWARD +1 TO -> THEATER +1 TO -> THAT +1 TO -> SHE +1 TO -> REFORMED +1 TO -> MADE +1 TO -> IN +1 TO -> HURT +1 TO -> FOR +1 TO -> DIRECTIFY +1 TO -> DID +1 TO -> CHOOSE +1 TO -> AT +1 TITLE -> TOWN +1 TIRING -> WOMAN +1 TIRESOME -> PARASAN +1 TIRED -> TIE +1 TIRED -> HIRED +1 TIMES -> TIME +1 TIME -> TELL +1 TILLERS -> TELLERS +1 TILL -> TO +1 TIGLATH -> TIC +1 TIGLATH -> TAKE +1 TIGLATH -> T +1 TIGLATH -> DICK +1 TIGHTENING -> TIGHTENED +1 TIGER -> TYER +1 TIGER -> DRAGGER +1 TIE -> TELL +1 TIDE -> TIED +1 TICKLING -> TITLING +1 THY -> THINE +1 THY -> THEIR +1 THY -> THAT +1 THY -> DIVERMISH +1 THUS -> THERE'S +1 THUS -> DOES +1 THUMB -> TIME +1 THROWING -> THREW +1 THROUGH -> TO +1 THRIFTILY -> DRIFTILY +1 THREE -> THIRD +1 THOUSAND -> SPASM +1 THOUGH -> THE +1 THOUGH -> THAT +1 THOUGH -> THAN +1 THOU -> THE +1 THOU -> NOW +1 THOU -> DOUB +1 THOU -> DONE +1 THOSE -> THUS +1 THIS -> US +1 THIS -> THAT +1 THIS -> MISSUS +1 THIS -> DISTAGGER +1 THINK -> THINKING +1 THINGS -> THANKS +1 THINGS -> SPENCE +1 THIN -> FLIND +1 THEY'RE -> THEIR +1 THEY'RE -> HER +1 THEY'RE -> ARE +1 THEY -> YOU +1 THEY -> THAT +1 THEY -> I +1 THEY -> DECLINE +1 THESE -> THE +1 THESE -> HIS +1 THESE -> DISTRACT +1 THERE'S -> THERE +1 THERE -> WREATHS +1 THERE -> THEY +1 THERE -> THERE'S +1 THERE -> THAT +1 THERE -> TERRANT +1 THEN -> WHEN +1 THEN -> VENTURED +1 THEN -> THAN +1 THEN -> DIDN'T +1 THEN -> AND +1 THEM -> AND +1 THEIR -> THEM +1 THEIR -> HER +1 THEIR -> DEAR +1 THEE -> ME +1 THEATRE -> FUTURE +1 THE -> YEARS +1 THE -> WELLS +1 THE -> VERY +1 THE -> UPON +1 THE -> THOUGH +1 THE -> THESE +1 THE -> THEM +1 THE -> OUR +1 THE -> OTHERS +1 THE -> OTHER +1 THE -> OPINION +1 THE -> NO +1 THE -> NEGROMMAN +1 THE -> MISTAKES +1 THE -> LOVE +1 THE -> LEDA +1 THE -> KIND +1 THE -> JACO +1 THE -> INSECTORS +1 THE -> HIS +1 THE -> HER +1 THE -> HALF +1 THE -> FREDERI +1 THE -> FOR +1 THE -> DRESSED +1 THE -> DID +1 THE -> DEVOTED +1 THE -> DEBRAMIN +1 THE -> DEBRAMAN +1 THE -> DE +1 THE -> BUT +1 THE -> BUILD +1 THE -> ASHORE +1 THE -> AND +1 THE -> AN +1 THE -> ABIDING +1 THAT'S -> THE +1 THAT'S -> I +1 THAT'LL -> THOU +1 THAT -> WITH +1 THAT -> TILL +1 THAT -> THY +1 THAT -> THEY +1 THAT -> THEN +1 THAT -> THEIR +1 THAT -> THAN +1 THAT -> NOT +1 THAT -> MY +1 THAT -> LITTLE +1 THAT -> LET +1 THAT -> INTER +1 THAT -> DAT +1 THAT -> BROUGHT +1 THAN -> THEN +1 THAN -> IN +1 THAN -> GONDON +1 THAN -> AND +1 TERRA -> TERRACE +1 TEND -> INTEREST +1 TEN -> TOWN +1 TEMPTETH -> TEMPTED +1 TEMPLES -> TEMPLE +1 TEMPLES -> SIMPLES +1 TEMPLAR -> TENT +1 TELLTALE -> TELLS +1 TELLTALE -> TELL +1 TELEGRAM -> TELEGRAMAS +1 TEAR -> TEARE +1 TEAM -> TEEM +1 TEAL -> TEALE +1 TEA -> TUNO +1 TATTLERS -> TEDLER +1 TASTE -> CASE +1 TASKMASTER -> DE +1 TARDY -> TIDY +1 TAPPED -> TOP +1 TAPIS -> TAPPY +1 TANQUAM -> TEN +1 TALMASH -> THOMMISH +1 TALKED -> TALKS +1 TALENTS -> OR +1 TAKEN -> TAKING +1 TAKE -> THEY +1 TAHITI -> INDEEDY +1 T'OTHER -> THE +1 SYRUP -> CYRUP +1 SYRIA -> ASSYRIA +1 SYNONYMON -> SNYM +1 SYLLOGISM -> SYLLISM +1 SYDNEY -> SIDNEY +1 SWORD -> SORT +1 SWELP -> SWAP +1 SWARTHY -> SWALLTY +1 SWAG -> WAG +1 SURELY -> TRULY +1 SUPPOSE -> S'POSE +1 SUMTHIN -> SOMETHING +1 SULPHURIC -> SUFFERG +1 SUFFICIENT -> SUSPICION +1 SUFFICES -> SURFACES +1 SUE -> UM +1 SUDDEN -> CERTAIN +1 SUCKED -> SACKED +1 SUCCOURS -> SECURUS +1 SUB -> SUBTRINE +1 STRUGGLE -> STRUGGLING +1 STRUCK -> UP +1 STRUCK -> MIGHT +1 STRIFE -> STRIPE +1 STRIFE -> STRIKE +1 STRENGTH -> FORCE +1 STREET -> DISTRICT +1 STREET -> AT +1 STREAK -> STREET +1 STRAYING -> STRAIN +1 STRANGEST -> STRANGERS +1 STRANGE -> STRAIN +1 STRAITS -> STRAIT +1 STRAITENED -> STRAIGHT +1 STRAINS -> TRAINS +1 STRAIGHT -> STRAYED +1 STORES -> STORIES +1 STORED -> STORE +1 STORE -> WORKED +1 STOOD -> TOO +1 STONEWALL -> STERN +1 STONED -> STONE +1 STOCK -> STOCKING +1 STILLNESS -> WHICH +1 STILL -> STEALING +1 STILL -> SO +1 STICK -> STICKET +1 STEWPAN -> STEWPANT +1 STEW -> DO +1 STERN -> STERNMOST +1 STEPPED -> STEPS +1 STEP -> SPATANI +1 STEEVER -> STEVER +1 STEERING -> STIRRING +1 STAYING -> SEEING +1 STAYED -> STAY +1 STATUS -> STRATAS +1 STATURE -> STATUE +1 STATES -> ESTATES +1 STATED -> SUITED +1 STATE -> STATES +1 STATE -> STATEROOM +1 STAS -> STARS +1 STARK -> START +1 STARED -> STEERED +1 STANDSTILL -> FAN +1 STANDS -> SENDS +1 STANDARD -> STANDARDS +1 SQUEAMISH -> SCREAMY +1 SPONSUS -> QUALMS +1 SPONGE -> SPINES +1 SPONDYLES -> SPAWN +1 SPIRIT -> EXPERIOR +1 SPILLING -> SPINNING +1 SPICE -> SPIES +1 SPARSELY -> FIRSTLY +1 SPAKE -> TAKE +1 SPAKE -> BEING +1 SPAKE -> BEG +1 SPADDLE -> SPADEL +1 SOUTHERN -> SUDDEN +1 SOUSE -> SOUS +1 SOURCE -> SORT +1 SOUGHT -> THOUGHT +1 SOUGHT -> SOWED +1 SOUGHT -> SAW +1 SOTELES -> SARTUOUS +1 SORDID -> SARDID +1 SOOT -> SUIT +1 SONG -> SUN +1 SON -> SONG +1 SOME -> THEM +1 SOMBER -> SOMBRE +1 SOFA -> SILVER +1 SOCIALIST -> SOCIALLY +1 SO -> SUMINUTELY +1 SO -> SOUL +1 SO -> SOPHIA +1 SO -> SARKAIUS +1 SO -> MISS +1 SNOOZING -> NUSING +1 SNARLED -> SNARLS +1 SMOLNY -> MORLEY +1 SMOKESTACKS -> SMOKE +1 SMOKER -> MOCHER +1 SMIRCHED -> SMARGED +1 SMILE -> MIND +1 SMELL -> SMAR +1 SMART -> SMARE +1 SLUNK -> SUNK +1 SLIGHTLY -> SAT +1 SLACKENED -> CLACK +1 SLAB -> FLAP +1 SKYLARKS -> SKYLECKS +1 SKIRT -> GOOD +1 SKIN -> KIN +1 SKEW -> SKIRO +1 SKEPTICAL -> SCEPTICAL +1 SIXES -> SIX +1 SIRE -> SIRES +1 SIR -> SO +1 SIR -> BECAME +1 SINUHIT -> IT +1 SINGS -> SANGS +1 SINGA -> SINGAFUT'S +1 SINGA -> SHING +1 SING -> SINGING +1 SINFUL -> SENT +1 SINE -> IN +1 SIMULATES -> SIMILATES +1 SILVERWARE -> SILVER +1 SILVER -> SILVERY +1 SILLY -> DITTY +1 SILENCE -> SCIENCE +1 SIGURD -> SIR +1 SIGURD -> CIGARS +1 SIGURD -> CIGARET +1 SIGNOR -> SENOR +1 SIGNING -> SUNNING +1 SIGNED -> SIGN +1 SIGHING -> SIGNED +1 SIEVE -> SEA +1 SIDE -> THOUGHT +1 SIDE -> SORT +1 SIDE -> SIGHT +1 SHUT -> SHED +1 SHUT -> AT +1 SHUMAN -> SON +1 SHUCKS -> SHOCKS +1 SHRUBS -> SHRUGS +1 SHOULDST -> SHOULDEST +1 SHOULDERS -> SHOULDER +1 SHOULD -> YOU +1 SHOULD -> SHALL +1 SHOT -> HAD +1 SHO'LY -> SURELY +1 SHIPS -> THE +1 SHIPS -> SHIP +1 SHET -> SHUT +1 SHERIFF -> SURE +1 SHERIFF -> SIR +1 SHERIFF -> CHEER +1 SHERE -> TO +1 SHEPHERD -> SHEPARD +1 SHEETS -> SEATS +1 SHED -> SHARED +1 SHE'LL -> FOLLOWING +1 SHE -> SEA +1 SHE -> HE +1 SHATTERED -> SATURDAIL +1 SHARDURIS -> SHOW +1 SHARDURIS -> SHALL +1 SHARDURIS -> SHADORUS +1 SHALLUM -> CELEM +1 SHALL -> TO +1 SHALL -> SHOW +1 SHALL -> OUR +1 SHAKEDOWN -> SHAKE +1 SHAG -> SHAGG +1 SEYTON -> SETTON +1 SEWING -> SOON +1 SEVERE -> SAVOUR +1 SEVERAL -> SEVEREND +1 SEVENTIETH -> SEVENTEENTH +1 SEVEN -> THE +1 SET -> SAT +1 SERMON -> SAME +1 SERGEY -> SURGY +1 SERGEY -> SOJOURNOVITCH +1 SENTENCED -> INTENSE +1 SENT -> SAT +1 SENOR -> SIGNOR +1 SEEST -> CEASE +1 SEEN -> SEEMED +1 SEEMS -> SEEMED +1 SEEMED -> SEEMS +1 SEEMED -> SEEMING +1 SEEM -> SEEMED +1 SEEM -> SEE +1 SEEKEST -> SEEK'ST +1 SEDUCETH -> SEDUCE +1 SECURE -> SECURED +1 SEAT -> HEAT +1 SEAMEN -> SEE +1 SEAMAN -> SALMON +1 SEA -> SEAS +1 SCULPTORS -> SCULPTOR'S +1 SCRUTINISED -> DISTRESS +1 SCRIBES -> GRIBES +1 SCRAPPIN -> SCRAP +1 SCORN -> CORN +1 SCO'TCH -> SCORCHED +1 SCHULBERG'S -> SHOALBURG'S +1 SCHOOLDAYS -> SCHOOL +1 SCHOOL -> SCHOOLGIRLS +1 SCHOOL -> SCHOOLBOY +1 SCENE -> SEEN +1 SCAPEGRACES -> CAPE +1 SCAPED -> ESCAPE +1 SCAPE -> ESCAPE +1 SAYS -> TEETH +1 SAYS -> SAY +1 SAYS -> AS +1 SAYIN -> SAYING +1 SAY -> SO +1 SAY -> SEE +1 SAY -> SAVE +1 SAW -> SOLDOM +1 SATURDAY -> SAID +1 SAT -> SET +1 SARAH -> SEREN +1 SANS -> SONSPIER +1 SANITARY -> SENATORY +1 SANG -> IN +1 SAND -> SANDWARES +1 SANCTESS -> SANCTIS +1 SANCHO -> SANCHA +1 SAN -> SAMPANCISCO +1 SALONE -> SALON +1 SALLOWER -> SALARY +1 SAINT -> SAID +1 SAILS -> SAILORS +1 SAIL -> SO +1 SAIL -> SAILOR +1 SAID -> TO +1 SAID -> STOOD +1 SAID -> SET +1 SAID -> PSALMS +1 SAID -> OF +1 SAH -> SAD +1 SAH -> A +1 SAGITTAIRE -> SAGATURE +1 SACRIFICE -> SACRIFIC +1 S'POSE -> SUPPOSE +1 S -> HELEN +1 RYO -> GENERO +1 RUSSIA -> A +1 RUSHED -> RUSH +1 RUNS -> ONE'S +1 RUNG -> RANG +1 RUN -> WARM +1 RUN -> RAN +1 RUN -> ENOUGH +1 RUMP -> WRONG +1 RUM -> ROMAN +1 RULER -> SPOTIC +1 RUBENSES -> REUBEN +1 RUBBERS -> WRAPPERS +1 ROXBURY -> BRAXBURY +1 ROWED -> RIDE +1 ROUTE -> ROUGH +1 ROUNDED -> ROUTED +1 ROUND -> AROUND +1 ROTHS -> WORSE +1 ROSE -> AROSE +1 ROSAMUN -> ROSAMOND +1 ROPE'S -> ROPES +1 ROPE'S -> HOPES +1 ROOTS -> RUTHS +1 ROOM -> RUM +1 RONALD -> RANALD +1 ROMANCE -> ROMANS +1 ROLL -> ROCCALL +1 ROARING -> ROWING +1 ROAD -> RULED +1 RISDON -> RISDEN +1 RISDON -> RICHARD +1 RINGMASTER -> RING +1 RINDS -> RHINS +1 RIGOROUS -> RECKLESS +1 RIGHT -> THREAD +1 RIDGE'S -> RICHES +1 RIDER -> WRITER +1 RIDER -> RATURIST +1 RID -> INTO +1 RICHARD -> WRETCHED +1 RHODIAN -> RODIAN +1 RHODIAN -> RADIAN +1 RHODES -> ROADS +1 REVOLUTIONISTS -> REVOLUTION +1 REVOLUTIONIST -> REVOLITIONIST +1 REVOLTE -> REVOLT +1 REVEREND -> ROBIN +1 REVEREND -> RIVER +1 REVELING -> REVELLING +1 REVELATION -> RELATION +1 RETZCH'S -> WRETCHES +1 RETURN -> RETURNING +1 RETURN -> RETURNED +1 RESUMED -> JAMES'S +1 RESTORETH -> RESTORE +1 RESTIVE -> WRETS +1 RESOLVED -> WE +1 RESK -> REST +1 RESISTING -> FIND +1 RESINOUS -> ZENOUS +1 RESCUED -> RESCUE +1 REQUEST -> QUEST +1 REPUTATION -> REPETITION +1 REPLY -> NEDCOV +1 REPEATED -> REPLIED +1 REND -> RUN +1 REMISSION -> REMISSIONOUS +1 REMEMBEREST -> REMEMBER +1 REMARKED -> REMARK +1 REMAINED -> REMAINS +1 REMAIN -> REMAINED +1 RELEASED -> LESS +1 RELATED -> RELIGHTED +1 REJECT -> REJECTED +1 REIGNS -> REIGN +1 REIGNED -> RAINED +1 REGULATION -> REGULATING +1 REGIN -> REGAN +1 REGARDING -> GUARDING +1 REG'LER -> REG'LAR +1 REFUGE -> RED +1 REFORMERS -> REFUSE +1 REFORMERS -> REFORMED +1 REFORM -> FORM +1 REFERENCE -> REFUCER +1 REELECTION -> RE +1 RED -> REDMOCKERS +1 RECTUM -> RECTIM +1 RECEDED -> WAS +1 RECAPTURED -> RE +1 REAP -> READ +1 REALISED -> REALIZED +1 READY -> RATHER +1 REACHED -> RAGED +1 RAY -> WRAYE +1 RATTLING -> RIDING +1 RATHER -> READY +1 RATHER -> MOTHER +1 RATHER -> JOHN +1 RASCALS -> RASCAL +1 RAPSCALLIONS -> RATCALIONS +1 RAPPERS -> WRAPPERS +1 RANSOM -> RANDOM +1 RAN -> RUN +1 RAMSES -> RANSES +1 RAM -> ROOM +1 RAISE -> RAISED +1 RAISE -> PRINCE +1 RAINY -> RAINING +1 RAIN -> REIN +1 RAID -> RAY +1 RAGE -> RATE +1 RADPROP -> REDRUP +1 RACKETEERS -> RAGATIRS +1 RACKED -> WRAPPED +1 RACHEL -> RIGIDLY +1 RACHEL -> MAKE +1 RACES -> RAYS +1 RABBITS -> RABBIT'S +1 RABB'S -> RABBS +1 QUOTH -> QUOMAN +1 QUMMUKH -> CUMICU +1 QUITE -> ACQUAINTED +1 QUICK -> QUICKLY +1 QUEST -> PRESS +1 QUANTRELL -> QUANTREAL +1 QUANTITIES -> QUALITIES +1 QUACKS -> CLACKS +1 PYM -> POEM +1 PUTTING -> PUT +1 PUTTEL -> PATTERN +1 PUTS -> BUT +1 PUT -> PULL +1 PURPORTING -> REPORTING +1 PUMP -> PUMPED +1 PULLEY -> POLY +1 PULLED -> POURED +1 PUDDLES -> POTTLES +1 PSALM -> THE +1 PRYTANEUM -> BRITTANNIUM +1 PROTECTORATE -> PROTECTOR +1 PROPRE -> A +1 PROMOTIVE -> PROMOTED +1 PROHIBITION -> PROBES +1 PRODUCES -> PRODUCED +1 PRODUCE -> PRODUCED +1 PROCLUS -> PROCLAUS +1 PROAS -> PROTS +1 PROAS -> PROITS +1 PROAS -> PERHAPS +1 PROA -> PROW +1 PRIORESS -> PYRIUS +1 PRIORESS -> PRIOR +1 PRIORESS -> PIRATES +1 PRIORESS -> PIRAS +1 PRIORESS -> PIOUS +1 PRINCIPALS -> PRINCIPLES +1 PRIMER -> PRIMARY +1 PRIEST -> PREACH +1 PRESTIGE -> PRESAGE +1 PRENTICESHIP -> PRENTICE +1 PRECEPTORS -> PERCEPTORS +1 PREACHED -> PREACH +1 PRAYERS -> PRAY +1 PRAM -> IN +1 PRAISEWORTHY -> PRAISED +1 PRACTITIONER -> PETITIONERS +1 POURED -> PUT +1 POUCHES -> PIUCHES +1 POTUM -> HE +1 POTION -> FORCIAN +1 POTASSIUM -> POTASSIAN +1 POSTHASTE -> POST +1 POSTERN -> PASSING +1 POST -> POSTS +1 PORTO -> PORT +1 PORTENTOUS -> POTENTI +1 PORED -> POURED +1 POPULACE -> POPULOUS +1 POOR -> FAR +1 POMEROY -> POMROY +1 POLYTECHNIC -> POLY +1 POLONIUS -> BONIUS +1 POLL -> PAUL +1 POINT -> BY +1 PLUMB -> PLUM +1 PLEASANT -> PRESENT +1 PLEAS -> PLACE +1 PLEADED -> PLAYED +1 PLATTERBAFF'S -> PLATTERBUFF'S +1 PLATTERBAFF -> PLATTERBUFF +1 PLATTERBAFF -> FURTHER +1 PLATED -> LIKE +1 PLANNING -> CLIMBING +1 PLAIN -> PLANE +1 PLAIN -> PLAN +1 PLACE -> PLATES +1 PLACE -> PLACED +1 PLACE -> PACE +1 PITTS -> FITZ +1 PITHUM -> PITTHAM +1 PISTOLES -> PISTOL +1 PIPE -> PEG +1 PINKUS -> PICK +1 PINKERTON'S -> PINKERTIN'S +1 PINCHED -> IMPINGED +1 PILLOWED -> PILL +1 PILESER -> LAUGHED +1 PILESER -> LAUGH +1 PILESER -> LAST +1 PILESER -> GLASS +1 PIKES -> PIPES +1 PIGEONCOTES -> PIGEON +1 PIGEONCOTE -> PIGEONOTE +1 PIGEONCOTE -> PEACH +1 PIGEONCOTE -> DIGESON +1 PICTURE -> PITCHER +1 PICKED -> PRICKED +1 PHUT -> FIVE +1 PHUT -> AFOOT +1 PHOSPHOR -> PHOSPHOBS +1 PHOSPHOR -> PHOSPHER +1 PHILISTINES -> FURTHER +1 PHILIPPUS -> PHILIPUS +1 PHILIPPUS -> PHILIP +1 PHILIP -> FELLOW +1 PETREL -> PETEL +1 PETER'S -> PETER +1 PETER -> BEATER +1 PESTE -> PESTS +1 PERSPIRED -> POISPIRED +1 PERNOUNCE -> PRONOUNCE +1 PERFECTLY -> PERFECTUALLY +1 PERE -> PERFELASHES +1 PERE -> PALACE +1 PERAMBULATOR'S -> PRAMULATOR'S +1 PEONAGE -> PINIONS +1 PENDING -> SPENDING +1 PEKAHIAH -> PECAH +1 PEASANTS -> PIECE +1 PEAS -> PEASE +1 PEARL -> PER +1 PAY -> PASS +1 PAWNBROKER -> PONDBROKER +1 PAWNBROKER -> BROKER +1 PAUSED -> PASSED +1 PATIENCE -> AS +1 PASTES -> PACE +1 PAST -> PASS +1 PASSES -> PAUSES +1 PASSED -> PASS +1 PARTS -> PART +1 PARTLY -> PARTIALLY +1 PARRICIDES -> PARASITES +1 PARR -> PAR +1 PARKS -> BOX +1 PARDON -> PARTISER +1 PANNIERS -> TEN +1 PALLIATE -> TO +1 PALL -> POOL +1 PALAESTRA -> PELLESTRA +1 PAIR -> PARENT +1 PADDLING -> PADDLIN +1 PACES -> PLACES +1 PACE -> THE +1 P -> PATUM +1 OWNERS -> LANDOWNERS +1 OWN -> ON +1 OWE -> ARE +1 OW'M -> AM +1 OW -> HOW +1 OVERRIPENESS -> OVER +1 OVERHEARD -> OWNED +1 OVERFULL -> OVER +1 OVERFLOWING -> OVERWHELMING +1 OVER -> OVERPRUDENT +1 OVER -> OF +1 OUTLINES -> OUTLINE +1 OUTER -> OUTER'S +1 OUT -> YET +1 OUT -> UP +1 OUT -> SOUTH +1 OUT -> PATENTS +1 OUT -> OUTGAZE +1 OUT -> ON +1 OUT -> DOWN +1 OUT -> ALL +1 OUT -> ALBER +1 OUR -> UNDERSTRUCTION +1 OUR -> OURSPORT +1 OUR -> OURSAN +1 OUR -> I'LL +1 OUR -> HER +1 OUR -> ARE +1 OUR -> A +1 OUNCES -> OZ +1 OUGHTN'T -> OUGHT +1 OUGHT -> ARE +1 OUGHT -> ALL +1 OUEN -> WAT +1 OTTO -> ARE +1 OTHER'S -> COMMENTS +1 ORIENTAL -> OF +1 ORFICER -> ORFASTER +1 ORDER -> ODO +1 ORBIS -> ORBUS +1 OR -> OUR +1 OR -> OPPOSITION +1 OR -> OH +1 OR -> I +1 OR -> FULL +1 OR -> AS +1 OR -> ARE +1 OR -> AND +1 OR -> ALL +1 OR -> A +1 OPPRESSORS -> IMPRESSORS +1 OPENED -> OPEN +1 OPEN -> UP +1 OPE -> LOVE +1 ONTO -> ON +1 ONTO -> INTO +1 ONLY -> OWING +1 ONLY -> OLD +1 ONE -> WORLD +1 ONE -> WON +1 ONE -> A +1 ONCE -> ONE +1 ON'T -> ON +1 ON -> WHEN +1 ON -> UPON +1 ON -> UNTO +1 ON -> UNLUCK +1 ON -> UNCREAM +1 ON -> UNCHANGED +1 ON -> TO +1 ON -> TILL +1 ON -> ONLY +1 ON -> DOING +1 OME -> HOME +1 OLL -> ALL +1 OLIVE -> OUT +1 OLIVE -> ONLY +1 OLIVE -> OLIV +1 OLIVE -> OLDEST +1 OLIVE -> I +1 OLD -> O +1 OLD -> ALL +1 OKAY -> OH +1 OIL -> ORE +1 OH -> UH +1 OH -> OV +1 OFFICES -> OFFICERS +1 OFFICERS -> OFFICIALS +1 OFFICER -> OFFICERS +1 OFFENSIVE -> OFFENCE +1 OFFENSE -> OFFENCE +1 OFFEN -> OFF +1 OFF -> UP +1 OFF -> OUR +1 OFF -> MORVE +1 OF -> WITH +1 OF -> WHAT +1 OF -> VAZARRE +1 OF -> TO +1 OF -> THIS +1 OF -> THE +1 OF -> THAT +1 OF -> ON +1 OF -> O +1 OF -> INTO +1 OF -> IN +1 OF -> HER +1 OF -> AT +1 OF -> ASSUME +1 OF -> AND +1 OBOCOCK -> OBEY +1 OBJECT -> SUBJECT +1 O'NIGHTS -> A +1 O'NEILL -> O'NEIA +1 O -> OR +1 O -> I +1 O -> AM +1 O -> ALL +1 NYTOUCH -> KNIGHT +1 NUZHAT -> UZHAT +1 NUZHAT -> NUZHA'S +1 NUZHAT -> KNOWSAT +1 NUTS -> KNOTS +1 NUNS -> NUN'S +1 NUISANCE -> NOTIONS +1 NU'UMAN -> NUMAN +1 NOWT -> OUT +1 NOW -> SO +1 NOW -> NO +1 NOW -> NABRAMAN +1 NOW -> BY +1 NOTTINGHAM -> NINE +1 NOTICE -> NOTICED +1 NOT -> REPEAT +1 NOT -> PURSUAL +1 NOT -> NOW +1 NOT -> NOR +1 NOT -> NIGH +1 NOT -> KNOWN +1 NOT -> IT +1 NORTH -> NORTHEAST +1 NOPE -> NOTE +1 NONETHELESS -> NONE +1 NONE -> NO +1 NO -> THOUGH +1 NICO -> NIGO +1 NICKEL -> FLASH +1 NEXTER -> NEXT +1 NEW -> YOU +1 NEW -> NEWER +1 NERVE -> NERVES +1 NEOSHO -> NIOKILL +1 NEIGHBOURS -> NEIGHBORS +1 NEIGHBOURING -> NEIGHBORING +1 NEIGHBORS -> LABORS +1 NEEDLED -> NEEDLE +1 NEED -> NEEDED +1 NEARING -> NEAR +1 NEAREST -> NURTURE +1 NEAREST -> NURSE +1 NEAREST -> NEWS +1 NEARER -> IN +1 NEARED -> NEAR +1 NAUGHT -> NOUGHT +1 NATURALLY -> NATURAL +1 NATURAL -> NATURALLY +1 NARRATIVES -> NARRATIVE +1 NANDY'S -> ANDY'S +1 MYSTERIOUS -> MYSTERY +1 MYRTILUS -> MYRTALIS +1 MYRTILUS -> MERTOLUS +1 MYRTILUS -> MARTILLUS +1 MYRTILUS -> BURTLES +1 MY -> BY +1 MUSTACHES -> MOUSTACHES +1 MUST -> MY +1 MUST -> MISTS +1 MUST -> MISTER +1 MUSKETS -> MASKETS +1 MUSICIANS -> MEASIANS +1 MURDOCH'S -> MARDOC'S +1 MURDOCH -> MARDOX +1 MURDOCH -> MARDOCK +1 MUIR -> YOU +1 MUG -> MUCH +1 MUD -> MATTER +1 MOWER -> OVER +1 MOVEMENT -> MOMENT +1 MOUTHWHAT -> MOUTH +1 MOUTHS -> MOTHS +1 MOUTH -> MOUSE +1 MOURNING -> MORNING +1 MOUNTNORRIS -> MONTORAS +1 MOUNT -> MOUND +1 MOTIONLESS -> MUCH +1 MOTHER -> PRESENTERS +1 MOST -> PRO +1 MOST -> PERCY +1 MOSES -> ROSES +1 MOSES -> OF +1 MOSES -> MOVES +1 MORTIFICATIONTHAT -> MORTIFICATION +1 MORTEM -> MODE +1 MORE -> SMALL +1 MOPED -> MIRKED +1 MOOR -> MORE +1 MONTHLY -> MOUTHFULLY +1 MONSEIGNEUR -> MONSIEUR +1 MOMMOL -> MAMMA +1 MODERATE -> MARGARET +1 MO -> MORE +1 MIXTURE -> MIXED +1 MIX -> MAKE +1 MISTER -> MISSUS +1 MISTER -> MISS +1 MISTER -> MIDSER +1 MISTER -> BISHOIS +1 MISSUS -> MUST +1 MISSED -> MISTING +1 MISCHIEVOUS -> MACHIEVOUS +1 MINNIE -> MANY +1 MINNEAPOLIS -> MANY +1 MINISTER -> MEANS +1 MINIONETTE -> MEAN +1 MINE -> MIND +1 MINE -> MIKE +1 MIND -> MINE +1 MIMICK -> MIMIC +1 MILLY -> MERELY +1 MILLSTON -> MILLSTONE +1 MILICENT'S -> MILICSON'S +1 MILICENT -> MILLSON +1 MILDEWED -> MELTED +1 MIHI -> ME +1 MIDRIFF -> MIDRIFTS +1 MIDIAN -> MILLION +1 MIDIAN -> MENDIAN +1 MIDDLING -> MIDDLIN +1 METHINKETH -> METHINK +1 MET -> MAKE +1 MESTIENNE -> THE +1 MESTIENNE -> MESSION +1 MESTIENNE -> MESSIAN +1 MESTIENNE -> MESS +1 MESTIENNE -> MACHIN +1 MESSES -> MASSES +1 MERRY -> MERRYMAKING +1 MERNEPTAH -> PATH +1 MERLONUS -> MERLINUS +1 MERLONUS -> MERLINA'S +1 MERIT -> MARRIT +1 MERIT -> MARRIAGE +1 MENTAL -> MANTLE +1 MEND -> MEAN +1 MEN'S -> MAN'S +1 MEN -> MEAN +1 MEN -> INTO +1 MEN -> CAME +1 MEDICAMENTS -> MEDICMENTS +1 MEDICAL -> MEDICA +1 MEDIAN -> MEDIUM +1 MEAT -> ME +1 MEAT -> HAVE +1 MEANTIME -> MEAN +1 ME -> YOU +1 MC -> MICROCLE +1 MC -> MAC +1 MC -> MA +1 MAY -> ME +1 MAY -> MARGAR +1 MAY -> MADE +1 MATTERS -> MATTER +1 MATI -> MET +1 MATI -> MANTI +1 MATEY -> MATE +1 MATERIALS -> MATURES +1 MATE -> MADE +1 MASTERY -> MYSTERY +1 MASTER -> MASSR +1 MASKED -> MASSED +1 MARVELED -> MARVELLED +1 MARSPEAKER -> MARKEER +1 MARSHAL -> MARTIAL +1 MARRIAGE -> MARES +1 MARMALADES -> MARMALITIES +1 MANKATO -> MANKE +1 MANKATO -> MANKADO +1 MANASSEH -> MANETTE +1 MAN -> REMAIN +1 MAMIE -> MAY +1 MALNUTRITION -> MALTRICIAN +1 MAKES -> MATRON +1 MAKE -> MADE +1 MAKAN -> MACAN +1 MAJORITY -> MATURITY +1 MAINE -> MIN +1 MAIL -> MALE +1 MAID -> MATE +1 MADE -> READ +1 MAD -> MADGE +1 MABILLON -> MARVALAN +1 M -> NOT +1 M -> I +1 M -> ENEM +1 M -> AND +1 M -> AM +1 LYSIMACHUS -> LYSMACHUS +1 LUNA'S -> LUNDY'S +1 LUNA'S -> LUNAR'S +1 LUKE -> LOOK +1 LUGGAGE -> LEGGED +1 LUCIEN -> LUCIAN +1 LOWER -> BLOW +1 LOW -> LOKI +1 LOVER -> LOVE +1 LOUD -> ALL +1 LOST -> YOU +1 LOSSES -> LOSES +1 LOSS -> LAWS +1 LORD'S -> LORDS +1 LORD -> LOT +1 LORD -> LARD +1 LOQUACITY -> LEQUESTITY +1 LOOKOUT -> LOOK +1 LOOKED -> LOOK +1 LOOKED -> LIFTED +1 LOOK -> WORK +1 LOOK -> LOOKED +1 LOOK -> LIVE +1 LONGER -> LONG +1 LONESOMENESS -> LONESOME +1 LONE -> LONG +1 LOCK -> LOOK +1 LL -> CHILLED +1 LIZABETH -> ELIZABETH +1 LIVES -> IS +1 LIVED -> IF +1 LIVE -> LEAVE +1 LISTEN -> LISTENED +1 LIQUID -> LIKELY +1 LINE -> LYING +1 LINE -> LAND +1 LIMPED -> LIMP +1 LIME -> LINE +1 LILBURN -> LOWBOURNE +1 LILBURN -> LOWBORNE +1 LIKELY -> LIKE +1 LIKE -> THE +1 LIKE -> NIGHTLY +1 LIKE -> LIKED +1 LIGHT -> LIGHTFOOTED +1 LIFETIME -> LIFE +1 LIFE -> LIE +1 LIE -> LIKE +1 LIDDY -> LIVE +1 LIDDY -> LEAVY +1 LIDDY -> LADY +1 LIAISON -> YEAR +1 LEXINGTON -> LESSINGTON +1 LEVITICUS -> LUVIDICUS +1 LEVER -> LOVER +1 LEVER -> LOVE +1 LETTERS -> LETTER +1 LETTERS -> LET +1 LETS -> LET'S +1 LET'S -> LET +1 LET -> THEM +1 LET -> NEKHLUD +1 LET -> LEFT +1 LET -> LATER +1 LET -> LATE +1 LESS -> US +1 LESS -> LEST +1 LESLIE -> THIS +1 LESLIE -> LIZZLY +1 LESLIE -> LIZZLING +1 LESLIE -> LIZZIE +1 LESLIE -> LISLEY +1 LESLIE -> LISALLY +1 LESLIE -> LINSLEY +1 LESLIE -> IT +1 LENOIR -> NOIR +1 LEND -> BLENDEST +1 LEMON -> LINENSHIPS +1 LEGS -> OR +1 LEFT -> LIVED +1 LEFT -> LILY +1 LEFT -> LIFTED +1 LEFT -> LET +1 LEFT -> LAST +1 LEER -> YARD +1 LEE'S -> LEE +1 LEAVE -> LE +1 LEARN -> LEARNED +1 LEAPT -> LEAPED +1 LEADPENCIL -> LEAD +1 LEADERSHIP -> LEGERSHIP +1 LEADERS -> LEADER'S +1 LAWS -> NOISE +1 LAW -> LAWN +1 LAW -> LAST +1 LAW -> AND +1 LAUGHED -> THEY'LL +1 LAUDERDALE -> LAURAIL +1 LAUDERDALE -> LAUDIDAL +1 LATH -> LAST +1 LAST -> MASTER +1 LAST -> LASTIS +1 LASH -> LAST +1 LANDI -> LANDEE +1 LAND -> THE +1 LAMBS -> LAMPS +1 LAKE -> TO +1 LAIN -> LANE +1 LAID -> LATE +1 LADY -> LADIES +1 LADS -> LAD +1 LAD -> ABOUT +1 LACHRYMA -> LACK +1 LABOURS -> LABORS +1 LABOURERS -> LABORERS +1 LABOUR -> LABOR +1 L -> OLD +1 L -> ELUS +1 L -> ALE +1 KNOW -> NOT +1 KNOTTY -> NAUGHTY +1 KNOT -> NOT +1 KNOBBLY -> NOBLY +1 KNIGHT'S -> NIGHT'S +1 KNIGHT -> LAW +1 KLEPTOMANIAC -> CLAPTOMANIA +1 KLEPTOMANIA -> CLEFTOMANIA +1 KITTY -> KATY +1 KITE -> DESERVED +1 KITE -> COUNT +1 KING -> MACKING +1 KING -> GIMERNETTE +1 KINDER -> KIND +1 KILLS -> KILL +1 KICK -> KICKIE +1 KEYS -> CASE +1 KEYS -> ACCUSE +1 KERSTALL -> COASTON +1 KENITES -> KENNITES +1 KEEN -> KIN +1 KEDEM -> KIDDAM +1 KANSAS -> KANSASTE +1 KAMAR -> COME +1 JUSTIFIED -> IT'S +1 JUST -> REVELATION +1 JUST -> JEST +1 JUST -> IS +1 JUST -> DIDST +1 JURY -> JERRY +1 JURY -> CHERRY +1 JURISDICTION -> JURIS +1 JUNIOR -> GENIOR +1 JUMPS -> JUMPED +1 JUDGMENT -> YOU +1 JUDGMENT -> JOINTMENT +1 JUDGE -> JOSE +1 JUDGE -> JOE +1 JUDAH -> JULIA +1 JOUVIN'S -> JUBANCE +1 JOSHUA -> JOHNSHAW +1 JOKINGLY -> CHOKINGLY +1 JOINED -> JARNDYCE +1 JIS -> GIT +1 JESTER -> GESTURE +1 JEHU -> JEHOV +1 JEHOASH -> JO +1 JEERED -> JEWED +1 JEDGE -> JUDGE +1 JANSENIST -> GENTLEST +1 JANE -> CHAIN +1 JAMS -> JAMES +1 JAKEY'S -> JAKIE +1 JAKEY -> GIGIRIS'S +1 JACKMAN -> JACK +1 JACKAL -> WILL +1 JACKAL -> JACKO +1 JACKAL -> JACK +1 IZZY'S -> IZZIE'S +1 IZZY -> AS +1 IVANOVITCH -> GIVANOVITCH +1 ITSELF -> ITSEL +1 ITS -> TO +1 ITS -> IT'S +1 ITS -> HAD +1 IT'S -> HIS +1 IT'S -> AND +1 IT -> YET +1 IT -> US +1 IT -> UP +1 IT -> TRUE +1 IT -> STEPS +1 IT -> OR +1 IT -> ONLY +1 IT -> MIN +1 IT -> ITS +1 IT -> IT'S +1 IT -> IT'LL +1 IT -> EVER +1 IT -> EACH +1 IT -> DEAR +1 IT -> AND +1 ISRAELITES -> ISRAIT +1 ISN'T -> IS +1 ISLAMISED -> ISLAMMISED +1 ISLAM -> GAVE +1 ISAAC -> MISERC +1 IS -> TREBRANT +1 IS -> SAID +1 IS -> ONLY +1 IS -> LANY +1 IS -> ISN'T +1 IS -> IF +1 IS -> FOR +1 IS -> ENDOWED +1 IS -> EAST +1 IS -> DOES +1 IS -> CIGAR +1 IRONICAL -> IRONIC +1 IOWA -> HOUR +1 INVADE -> IN +1 INTO -> TO +1 INTO -> IN +1 INTO -> AND +1 INTO -> AN +1 INTEREST -> INTERESTS +1 INTEND -> INSENT +1 INTELLECTUALLY -> INTELLECTUAL +1 INSTRUCTED -> INSTRUCTIVE +1 INSTANTLY -> THING +1 INSISTENCE -> THOSE +1 INSCRIPTIONS -> SCRIPS +1 INQUIRE -> ACQUIRE +1 INNES -> EAMES +1 INN -> IN +1 INJURE -> ENDURE +1 INGENIOUSLY -> INGENUOUSLY +1 INFAMOUS -> IN +1 INDEED -> INDE +1 INCOMPARABLE -> INN +1 IN'T -> INTO +1 IN -> WHEN +1 IN -> WHEEL +1 IN -> THEM +1 IN -> THEIR +1 IN -> OF +1 IN -> NEAT +1 IN -> JEST +1 IN -> INTO +1 IN -> INTERPLIES +1 IN -> INSIDE +1 IN -> INN +1 IN -> INLORING +1 IN -> HEARD +1 IN -> FROM +1 IN -> FLAVORITE +1 IN -> BENEATH +1 IN -> BEEN +1 IN -> AT +1 IN -> A +1 IMPROVISED -> PROVISED +1 IMPROVE -> PROVE +1 IMPLY -> SE +1 IMMENSE -> MENST +1 IM -> QUEST +1 ILU -> TO +1 ILU -> ILIU +1 ILLUSTRIOUS -> LUSTRIOUS +1 IF -> THAT +1 IDEA -> RIVER +1 ICES -> ISIS +1 ICES -> IISES +1 ICE -> EYES +1 I'M -> UNHUNGRY +1 I'M -> HIGH +1 I'LL -> ELSE +1 I'LL -> ALL +1 I'FAITH -> I +1 I'D -> I'VE +1 I'D -> I'LL +1 I -> TO +1 I -> THAT +1 I -> STY +1 I -> OUGHT +1 I -> IT +1 I -> IOPIUS +1 I -> HOW +1 I -> HER +1 I -> EYES +1 I -> EVER +1 I -> ECHOLYN +1 I -> AMID +1 I -> AH +1 HURT -> HIDE +1 HUNTINGDON -> HONDYNON +1 HUNTERS -> HANDLES +1 HUMOURS -> HUMANS +1 HUMOR -> HUMOUR +1 HUMANITARY -> HUMANITY +1 HUH -> HOW +1 HUDSPETH -> HUSBUTH +1 HOWEVER -> SAMURED +1 HOWEVER -> HOURSERVES +1 HOWEVER -> HERBERT +1 HOW -> OH +1 HOUSEHOLD -> HOUSE +1 HOUR -> I +1 HOUNDED -> HANDY +1 HOSPITABLY -> HALF +1 HORDE -> HOARD +1 HOPPING -> HAVING +1 HOPE -> OPEN +1 HOO'LL -> HE'LL +1 HOO -> HE +1 HONORS -> HONOURS +1 HONOR -> HUNGER +1 HONESTLY -> ON +1 HONEST -> ON +1 HOMEPUSH -> HOME +1 HOME -> WHOLE +1 HOLY -> WHOLLY +1 HOLLER -> HOLLERED +1 HOLD -> OR +1 HOLD -> HODE +1 HOLD -> HER +1 HOF -> WHOLE +1 HITHER -> THITHER +1 HISSELF -> HIS +1 HIS -> ITS +1 HIS -> INTO +1 HIS -> IN +1 HIS -> HE +1 HIS -> DISCOURSE +1 HIS -> AS +1 HIS -> AN +1 HINDFELL -> HINFIELD +1 HIM -> EM +1 HIM -> DE +1 HIJAZ -> HI +1 HIGHS -> HIES +1 HIGH -> I +1 HIERARCHY -> HIRAKEE +1 HI -> AY +1 HEYDAY -> HEY +1 HEWN -> YOU +1 HERMON'S -> HARMON'S +1 HERMON -> HERE +1 HERIOT'S -> HERETT'S +1 HERIOT -> HERRIOT +1 HERETOFORE -> HERE +1 HERE -> WHO +1 HERDSMEN -> HERDSMAN +1 HER -> TO +1 HER -> ON +1 HER -> ITALIANS +1 HER -> IT +1 HER -> HYCOMICAL +1 HER -> HERSELF +1 HER -> HE +1 HER -> AROUND +1 HEPTARCHIES -> HEPTARK +1 HEN -> INCOUPS +1 HELVIN -> HELVAN +1 HELPED -> HELPS +1 HELM -> HAIL +1 HELEN -> ALLAN +1 HEELED -> HEALED +1 HEEDED -> HE +1 HEDGES -> HATCHES +1 HEBREW -> SEA +1 HEARTS -> HEART +1 HEARSE -> HOUSEHOLTS +1 HEARD -> IN +1 HEADQUARTERS -> HEADQUARTER +1 HEADLONG -> HAD +1 HEADLIGHTS -> HEAD +1 HEAD -> HIDDEN +1 HE'S -> THIS +1 HE'S -> IS +1 HE'S -> HE +1 HE -> YOU'D +1 HE -> YES +1 HE -> THEY +1 HE -> SHE +1 HE -> SEE +1 HE -> IRRESPONSIBLE +1 HE -> IF +1 HE -> HE'D +1 HE -> AWAY +1 HE -> AND +1 HAYES -> HAS +1 HAW -> HAWHAT +1 HAVEN -> HAIRY +1 HAVE -> PROVED +1 HAVE -> IF +1 HAVE -> HAV +1 HAVE -> HALF +1 HAVE -> HAD +1 HAVE -> EH +1 HAVE -> BIT +1 HATTERSLEY -> HALTERSLEY +1 HASAN -> HER +1 HAS -> JUST +1 HAS -> HESTERITY +1 HAS -> HESDAY +1 HAS -> HAVE +1 HAS -> AS +1 HARVEY'SWHICH -> HARVEST +1 HAROLD -> HERALD +1 HARKNESS -> HARKINS +1 HARE -> HAIR +1 HARDWARE -> HARD +1 HARDLY -> ARE +1 HARD -> OUR +1 HARD -> HEART +1 HAPLY -> HAPPILY +1 HAND -> HANDS +1 HALL -> WHOLE +1 HALL -> HOLLAND +1 HALEY'S -> HALELY +1 HADDA -> HAD +1 HAD -> IN +1 HAD -> GOT +1 HAD -> END +1 HAD -> DO +1 HAD -> ARE +1 GYLINGDEN -> GILINGDEN +1 GURR -> GRIGGLY +1 GURR -> GREW +1 GURR -> GORE +1 GURR -> GIRT +1 GURR -> GIRL +1 GURR -> GERT +1 GURR -> GERFATHER +1 GUNS -> GUN +1 GULLET -> GALLANT +1 GUIRUN'S -> GUNDERING +1 GUINEA -> GUINEAS +1 GUILD -> GULD +1 GUEST -> GUESTS +1 GUESS -> GES +1 GUDRUN -> GUNDRAIN +1 GRUMBLINGLY -> TREMBLINGLY +1 GROVE -> GROW +1 GROAN -> GROUND +1 GRIMSBY -> GRIM'S +1 GRIBIER -> CLAVIER +1 GREY -> GRAY +1 GREENTON -> GREENSON +1 GREENBACKS -> GREEN +1 GREAVES -> GREEBS +1 GRAY -> GREY +1 GRAY -> GLAY +1 GRAVE -> BRAVE +1 GRATITUDE -> CREDITUDE +1 GRASPS -> GRASPED +1 GRAPPLE -> GRANTEL +1 GRANDAME -> GRAND +1 GRAMMATEUS -> GRAMMATIUS +1 GRAM -> GRAHAM +1 GRAEME -> GRAHAME +1 GRAEME -> GRAHAM +1 GRACIOUS -> GRECIOUS +1 GOWN -> GUN +1 GOVERNMENTS -> GOVERNMENT +1 GOVERNMENT'S -> GOVERNMENTS +1 GOVERNMENT -> GOVERN +1 GOV'NOR -> GUV'NER +1 GOV'NOR -> GOVERNOR +1 GOT -> GOD +1 GOT -> CUT +1 GOT -> COURT +1 GORDON -> GORDON'S +1 GORDON -> GORD +1 GOOD -> SPEAR'S +1 GONE -> DISCOUR +1 GOLDFISH -> GOLD +1 GOLDEN -> GOLD +1 GOING -> YOU +1 GOING -> GO +1 GOES -> WAS +1 GODEBILLIOS -> GO +1 GOD -> SCOTT +1 GOD -> GOT +1 GOBEY'S -> GOBYS +1 GOBEY'S -> GOBY'S +1 GOBEY'S -> GOBIES +1 GOAL -> GOULD +1 GOAL -> GOLD +1 GLISPIN -> CLISPIN +1 GLAD -> GRINDING +1 GIVEN -> GIVING +1 GIVE -> GIVIN +1 GIVE -> GAVE +1 GIT -> GET +1 GIRTHING -> GIRDING +1 GIRTHED -> GIRDED +1 GIRLS -> GIRL +1 GIRL -> GO +1 GIRDS -> GOOD +1 GET -> IT +1 GEORGE'SWHICH -> GEORGE'S +1 GEORGE'S -> GEORGE +1 GENTLEMEN'S -> GENTLEMAN'S +1 GAUTHIER -> GATHIER +1 GAULS -> GULFS +1 GARDEN'S -> GARDENS +1 GAMMON -> GAMIN +1 GALLATIN -> GELATIN +1 G'YIRLS -> IS +1 FUZZ -> FUZ +1 FURZE -> FIRS +1 FUNDS -> FONDS +1 FULL -> POOL +1 FULL -> FOR +1 FULL -> FOOT +1 FROZE -> ROSE +1 FRONTIERS -> FRONTIER +1 FROG'S -> FROGS +1 FRO -> FROM +1 FRISTOE'S -> FIRST +1 FRISTOE -> FRISTOW +1 FRISTOE -> FOR +1 FRIGHTFUL -> DREADFUL +1 FRIGHTENS -> BRIGHTENS +1 FRIAR -> TIME +1 FRET -> FRITTEN +1 FRENCH -> FRENCHARD +1 FREEWAY -> FREE +1 FREES -> FREEZE +1 FREEDOM -> READ +1 FREE -> FRE +1 FRANZ -> FRIENDS +1 FRANZ -> FRANCE +1 FRANCOIS -> FROIS +1 FRANCIS -> FRANCES +1 FRANC -> FRANK +1 FOURTEENTHAT'S -> FOURTEEN +1 FOUR -> FUPS +1 FOUR -> FULL +1 FOUR -> FOREMOTHER +1 FOUR -> FOLIEVED +1 FOUNDED -> FOUND +1 FOUGHT -> THOUGHT +1 FORTY -> FORTE +1 FORTS -> FAULTS +1 FORMER -> FORM +1 FORM -> FOR +1 FOREMAN -> FOUR +1 FOREGATHERED -> FOR +1 FOR -> WAS +1 FOR -> TILL +1 FOR -> IN +1 FOR -> FUTURE +1 FOR -> FROM +1 FOR -> FOUR +1 FOR -> FERIO +1 FOR -> FAULT +1 FOR -> FAR +1 FOR -> ABOVE +1 FOOLS -> FOOD'S +1 FOOL -> FULL +1 FOOD -> FOOT +1 FONTEVRAULT -> FONTREVAL +1 FOLLOWS -> FOLLOWED +1 FOLLOWED -> FOLLOW +1 FOLLOWED -> ALL +1 FOE -> FOLK +1 FOALS -> FOOLS +1 FOAL -> FOUR +1 FLY -> FLIES +1 FLUTTERING -> REFLECTING +1 FLOWERBEDS -> FLOWER +1 FLOW -> FLOOR +1 FLOSSY -> FLOSSIE +1 FLOORBOARDS -> FOREBOARDS +1 FLEROV'S -> FYOV'S +1 FLEROV'S -> FLORO'S +1 FLEROV -> FLAREFF +1 FLEROV -> FLARE +1 FLEERED -> FLARED +1 FLAVOR -> FLAVOUR +1 FLATTERER -> SLACKER +1 FLATTERED -> FURTHER +1 FLATHEADS -> FLAT +1 FLABBERGASTED -> FLABRA +1 FISHING -> FISHIN +1 FISHIN -> FISHING +1 FISHED -> FINISHED +1 FIRSTER -> FORSTER +1 FIRE -> FAR +1 FINNEY -> FINNELL +1 FINICAL -> PHYNICAL +1 FINELY -> FINAL +1 FINDING -> FIND +1 FIND -> FANCY +1 FILTRATES -> FUR +1 FILTRATE -> FEDERATE +1 FIGURE'S -> FIGURES +1 FIGGER -> FAGONNE +1 FIFTEENTH -> FIFTEEN +1 FIELD -> FIELDS +1 FIACRE -> FIACCHUS +1 FELT -> FELL +1 FEELS -> FILLS +1 FEEL -> SEE +1 FEEDS -> FEATS +1 FEED -> FEAT +1 FAVOURITE -> FAVORITE +1 FAVOUR -> FAVOR +1 FAVORITE -> FAVOURITE +1 FAUVENT -> VUENT +1 FAUVENT -> PROVENCE +1 FAUVENT -> PREVENT +1 FAUVENT -> FUVENT +1 FAUVENT -> FOR +1 FAUVENT -> FERVENT +1 FAUVENT -> FAVAN +1 FAUCES -> FORCES +1 FATS -> FAT'S +1 FATHER'S -> FATHERS +1 FATHER'S -> FATHER +1 FATHER -> FUND +1 FATHER -> FOUNDER +1 FATHER -> FOR +1 FATHER -> EITHER +1 FATE -> PHAETON +1 FAT -> BAT +1 FAST -> FAT +1 FARRINDER -> THINDER +1 FARRINDER -> FARRENDER +1 FARRINDER -> FALLINGER +1 FARRINDER -> FAIRRING +1 FARE -> HE +1 FANNY -> WHEN +1 FANNY -> ANY +1 FANGED -> FACT +1 FAN -> PAN +1 FAM'LY -> FAMILY +1 FALLING -> FOLLOWING +1 FALL -> FOR +1 FAITH -> FATE +1 FAIR -> HER +1 FAILING -> FEELING +1 FAILED -> FAITH +1 FAFNIR'S -> FASTENER'S +1 FAFNIR'S -> FAFNER'S +1 FAFNIR -> STAFF +1 FAFNIR -> FAFNER +1 FAFNIR -> FAFFNER +1 FACE -> FAITH +1 FACE -> FACED +1 EYES -> EYE +1 EXTRACT -> EXTRACTED +1 EXTRA -> THAT +1 EXTINGUISHING -> DISTINGUISHING +1 EXTENUATING -> EXTINUATING +1 EXPOSE -> EXPOSED +1 EXPLOITING -> EXPLODING +1 EXPERIENCE -> SIGNING +1 EXPEL -> EXPELLED +1 EXPECTED -> INSPECTRE +1 EXPANSE -> EXPOUNDS +1 EXCLAIMED -> EXPLAINED +1 EXCITING -> THESE +1 EXAMINING -> EXAMINED +1 EXACKLY -> EXACTLY +1 EVIL -> EVEN +1 EVERYONE -> EVERY +1 EVER -> EVERGREWING +1 EVAPORATE -> THE +1 EV'YBODY'S -> EVERYBODY'S +1 EUSEBIUS -> USUVIUS +1 EUSEBIUS -> EUSIDIUS +1 EUPHRATES -> EUPHATEES +1 EUPHRANOR -> EUPHRANER +1 EUNUCH -> EUNUCHS +1 ETHEREAL -> ASSYRIAL +1 ETHER -> THEM +1 ETHELRIED'S -> EPILRIED'S +1 ETHELRIED -> ETHELRED +1 ETHELRIED -> EPLORRIED +1 ETERNAL -> TURNED +1 ET -> AT +1 ESTHER -> ASSER +1 ESPECIALLY -> SPENTRY +1 ESPECIALLY -> HAS +1 ERNESTINE -> ERNESTON +1 ERE'S -> YES +1 ERE -> IF +1 EQUERRY'S -> EQUERRIES +1 EPIGASTER -> EBERGASTER +1 EPHRAIM -> FROM +1 ENTRUSTED -> AND +1 ENTR'ACTE -> ENTRACT +1 ENTERED -> ANSWERED +1 ENSUED -> AND +1 ENSNARES -> AND +1 ENSLAVED -> ENSLAVE +1 ENOUGH -> UP +1 ENJOY -> ENJOYED +1 ENFRANCHISEMENT -> ENCOMCHISEMENT +1 ENFORCEMENT -> FORCEMENT +1 ENDURETH -> AND +1 ENDURE -> INDURE +1 ENDEAVOURED -> ENDEAVORED +1 EMOTION -> THE +1 EMETIC -> AMATIC +1 EMBRUN -> EMBRON +1 EM -> HIM +1 ELYSIAN -> ALYCIAN +1 ELLIS -> ILLIS +1 ELISIONS -> ELYGIANS +1 ELEXANDER -> IT +1 ELEXANDER -> ALEXANDER +1 ELDER -> OTHER +1 ELBOW -> BOWS +1 ELBERT -> ELBER +1 ELASTIC -> MOLASTIC +1 ELASTIC -> ELECTIC +1 EIGHTH -> EIGHTHS +1 EGYPTIAN -> RESISTIN +1 EGGS -> KNIFE +1 EDGING -> EDGED +1 EAU -> OVERCLONE +1 EAST -> EACH +1 EARTH -> ART +1 EARLIEST -> ARIAD +1 EAR -> IRISH +1 EAD -> HEAD +1 E'S -> IS +1 E'S -> HE +1 E'LL -> YOU +1 E'ER -> ERE +1 E -> DOG +1 DUSK -> THUS +1 DUPLICATES -> DEPLICATES +1 DUNNO -> DON'T +1 DUNNING -> DINNING +1 DUMAS -> YOU +1 DUM -> DOOM +1 DULL -> DOLL +1 DUKE -> DUPE +1 DUKE -> DO +1 DUDS -> DERDS +1 DRY -> DRAGOOD +1 DRUGSTORE -> DRUG +1 DRINK -> DRINKIN +1 DRINK -> DRANK +1 DRINK -> BRING +1 DRAWERS -> RAOUL +1 DRAW -> DRAWN +1 DRAUGHT -> DROUGHT +1 DRAUGHT -> DROP +1 DOWNING -> DAWNING +1 DOWN -> ROUND +1 DOWN -> CHEVIKI +1 DOUBT -> OUT +1 DOTH -> DIRTS +1 DOST -> THOSE +1 DOSE -> IS +1 DONOVAN -> DONALIN +1 DONE -> TURNED +1 DON'T -> WANTS +1 DON'T -> DO +1 DON'T -> ALL +1 DOM -> DON +1 DOGS -> DOG +1 DOG -> DARK +1 DOEST -> DOST +1 DOES -> DOESN'T +1 DOCTOR -> DOCTRIPAR +1 DOCK -> DOCKYARD +1 DOAN -> DON'T +1 DO -> TOO +1 DO -> THE +1 DO -> DON'T +1 DO -> DIEU +1 DO -> DID +1 DITCHFIELD -> DITZFIELD +1 DISTRUSTED -> DISTRUDGED +1 DISTRESS -> DISTRESSED +1 DISTICHS -> DISTINCTS +1 DISSENTIENT -> DISINDIAN +1 DISPOSED -> DISPOS +1 DISNEY -> DIDNY +1 DISINFECTING -> DISINFECT +1 DISFIGURED -> THIS +1 DISASTROUS -> DISASTRATES +1 DISAGREE -> DISAGREED +1 DIRE -> DIA +1 DINKS -> THINK +1 DINKS -> DENZ +1 DINARS -> HOURS +1 DILUTE -> DELUDE +1 DIGGING -> TIGGING +1 DIGESTION -> DIRECTION +1 DIE -> GUY +1 DIDN'T -> THEN +1 DIDN'T -> DON'T +1 DID -> THE +1 DID -> THAT +1 DICKIE -> THE +1 DICKIE -> DICK +1 DEVOUR -> THE +1 DETERMINED -> TURN +1 DETECTIVES -> DETECTIVE +1 DETECTIN -> DETECTIVE +1 DESTINIES -> DEBTS +1 DESSERTS -> DESERTS +1 DESPOTIC -> THAT +1 DESPITE -> THIS +1 DESK -> VES +1 DESK -> DESKED +1 DESIRES -> DESIRE +1 DESIGN -> A +1 DERELICTS -> DEAR +1 DEPRECATE -> THE +1 DEODORIZING -> NO +1 DEMETER -> DEMEANOR +1 DELMONICO -> DOMONICO +1 DELIBERATE -> DELIVERED +1 DEFEND -> DEFENDED +1 DEFECTION -> AFFECTION +1 DEEPENED -> DEEP +1 DECLARING -> DECLINING +1 DEBTOR -> DEPTOR +1 DEBARRED -> DEBARED +1 DEATHLY -> DEFTLY +1 DEATHLIKE -> DEATH +1 DEATH -> DEA +1 DEARLY -> STAIRLY +1 DEARLY -> DAILY +1 DEANS -> DEAN +1 DEAD -> DAY +1 DEAD -> BEDROOM +1 DEACH -> DID +1 DE -> TO +1 DAYS -> STAYS +1 DATED -> DID +1 DAT -> DAT'S +1 DARKAND -> DARK +1 DARE -> DARED +1 DAPHNE'S -> THESE +1 DANDAN -> TAN +1 DANCER -> DANCERS +1 DAMN -> DEAR +1 DAME'S -> JAMES'S +1 DALYS -> DAILIES +1 DAGOS -> DAG +1 DA -> THOU +1 DA -> DECORTUNA +1 D -> THEY +1 D -> THEN +1 CYNICISM -> CYS +1 CYMBALS -> SYMBOLS +1 CUT -> GOT +1 CUT -> GO +1 CUSTOM -> COTTOM +1 CURSORILY -> CURSORY +1 CURRENTS -> CURRANTS +1 CURL -> GIRL +1 CUP -> CUPIED +1 CUISINE -> COISINE +1 CRUX -> CREW +1 CRUSHING -> CRASHING +1 CRUMPLED -> CRUMBLED +1 CRUMPLED -> CRAMPLED +1 CRUMBLY -> CRAMBLY +1 CROSS -> CROSSBONES +1 CROOKS -> COOKS +1 CRIES -> QUITE +1 CRIED -> CRI +1 CREEL -> CREOLE +1 CRAWFISH -> CROPPISH +1 CRAWFISH -> CROPFISH +1 CRAWFISH -> CRAW +1 CRATES -> CREEDS +1 CRAB -> CRABS +1 COYNESS -> KINDNESS +1 COY -> KOY +1 COXCOMB -> ACCOUNT +1 COWLEY'S -> CARLIS +1 COURT -> COURTYARD +1 COURT -> CORP +1 COURSING -> COARSING +1 COURSE -> COARSE +1 COURFEYRAC -> COURFEREK +1 COUNT -> COMPASSER +1 COUNSELS -> COUNCIL +1 COUNSEL -> CONSUL +1 COUNCILLOR -> COUNSELLOR +1 COULD -> GOOD +1 COULD -> COULDN'T +1 COULD -> CANNOT +1 COTTON -> COT +1 COST -> COSTUM +1 CORYDON -> CROYDEN +1 CORNER -> CORNERED +1 CORAL -> COAL +1 COQUETTE -> POCKET +1 COPS -> COPSE +1 COPS -> CAPS +1 COP'S -> COPSE +1 COP -> COPP +1 COOL -> U +1 CONTROL -> CONTROLLED +1 CONTINUALLY -> CONTINUAL +1 CONTINUAL -> CONTINUOUS +1 CONTEND -> CONTENDED +1 CONTEND -> COMPEND +1 CONSTITUENT -> CONSTITUTE +1 CONSTANT -> CAN'T +1 CONSONANTS -> COUNTENANCE +1 CONSOMME -> CONSUM +1 CONINGSBURGH -> CONIGSBURG +1 CONFIRMATION -> CONFIRMATESON +1 CONFIDENTIALLY -> TO +1 CONFIDE -> CONFINE +1 CONFICERE -> CONFERS +1 CONFECTIONARY -> CONFECTIONERIES +1 CONCOCTED -> CONCLUDED +1 COMORIN -> CORMOR +1 COMMUNITY -> CUNITY +1 COMMITTEE -> COMEDY +1 COMMISSARY -> COMMISSORY +1 COMMENCED -> COMMANDS +1 COMING -> COMIN +1 COMING -> CARMINALS +1 COMETH -> COME +1 COMEST -> COMES +1 COMES -> COME +1 COME -> COMMONED +1 COME -> COMMANDER +1 COLOSSEUM -> COLISEUM +1 COLONEL -> CAROL +1 COLLECT -> COLLECTED +1 COLE -> CO +1 COLDS -> GOLDS +1 COLD -> CALLED +1 COINS -> COIN +1 COINCIDENCES -> COINCIDENCE +1 COD -> COT +1 COCOA -> COOKER +1 COCKRELL -> COCKLE +1 COBBER -> COPPER +1 COALESCED -> COLLETS +1 CLUMB -> CLIMB +1 CLOSET -> CLOSE +1 CLOSEST -> CLOSET +1 CLOMB -> CLIMBED +1 CLOCKS -> CLUXED +1 CLING -> CLINK +1 CLIME -> CLIMB +1 CLEVERLY -> LEVELLY +1 CLEAVE -> CLIFF +1 CLAWS -> CLOTH +1 CLASSES -> CLASS +1 CLARET -> CLARGA +1 CLAIRVAUX -> CLERVAL +1 CISEAUX -> ISAU +1 CINDERLAD -> SOONER +1 CINDERLAD -> SIR +1 CINDERLAD -> SANDAL +1 CINDERLAD -> SAID +1 CINDERELLA -> CINOLA +1 CHURCH -> WATCH +1 CHUCKED -> SAID +1 CHRISTIANS -> CHRISTIAN +1 CHRISTIANITY -> DONEGOOD +1 CHRISTI -> REMAR +1 CHRISTENING -> CHRISTIANNING +1 CHRIS -> THIS +1 CHRIS -> MISTER +1 CHRIS -> GRIS +1 CHRIS -> CRISP +1 CHRIS -> CRIS +1 CHRIS -> CHRISTEN +1 CHRIS -> CHRIST +1 CHRIS -> BRUCE +1 CHOUETTE -> SWEAT +1 CHONODEMAIRE -> SHADOW +1 CHLORATE -> LOW +1 CHLORATE -> CHLORIDE +1 CHINTZ -> CHIN'S +1 CHILLS -> CHILL +1 CHIEF -> CHIE +1 CHIDE -> CHID +1 CHEEKE -> CHEEK +1 CHEEKBONES -> SHEEP +1 CHEEK -> CHEEKS +1 CHARMED -> SHOWN +1 CHARMED -> HAD +1 CHARLEY'S -> CHARLIE'S +1 CHARGED -> CHARGE +1 CHARGE -> SHARS +1 CHARACTERISTIC -> CARE +1 CHANGE -> CHANCE +1 CHANCES -> CHURCHES +1 CHALONS -> CALON +1 CHAIN -> CHANGE +1 CHAFING -> CHIEFING +1 CHADWELL -> TEDWELL +1 CERTAIN -> AN +1 CENTER -> CENTRE +1 CELEBRATED -> CLEARED +1 CEDRIC -> SEDRRICK +1 CAVALRYMEN -> CAVERNMENT +1 CAUSE -> COURSE +1 CATTLE -> CATTLETTA +1 CATTLE -> CATTLERON +1 CATHOLIC -> CATTLE +1 CATHEDRAL -> KITRAL +1 CATCHED -> CAST +1 CASTLE -> COUNCIL +1 CASTETH -> CAST +1 CASE -> HAS +1 CASE -> CAVES +1 CARROLL -> KAL +1 CARROLL -> CAROL +1 CARRIED -> CHARACTERED +1 CARRIAGE -> PARISH +1 CAPRIVI'S -> CAPRIVY +1 CAPITULUM -> CAPITULAM +1 CAPITULANTES -> CAPITULAT +1 CAPITALISTS -> CAPITALIST +1 CAPITAL -> CAPT +1 CAP -> CAPLICH +1 CANONIZED -> CANNONIZED +1 CAN'T -> COULD +1 CAN -> CAN'T +1 CAMPED -> CAME +1 CAMPAIGN -> CAPTAIN +1 CAME -> GAINED +1 CALLETH -> CAUGHT +1 CALLEST -> COLLARST +1 CALLED -> CALL +1 CAIN -> GAME +1 CAGE -> CARED +1 CACKED -> CAGLED +1 CABINET -> CABINETS +1 CA'M -> CALM +1 C -> SEA +1 BYE -> BIOLECTION +1 BY -> THE +1 BY -> BUY +1 BY -> BUT +1 BY -> BILL +1 BUZZARD -> BOZARD +1 BUTTON -> BOTTOM +1 BUTTERFLY -> BUT +1 BUT -> WITHOUT +1 BUT -> WITH +1 BUT -> WHICH +1 BUT -> SHEPHERD +1 BUT -> OR +1 BUT -> IT +1 BUT -> BY +1 BUT -> BUTCHUSE +1 BUT -> BE +1 BUSINESSWHICH -> BUSINESS +1 BURYING -> BERING +1 BURST -> FORCE +1 BURSHEBA -> PERCEIVER +1 BURNETH -> BERNETH +1 BURDENS -> A +1 BULK -> BAG +1 BUILDS -> BIDS +1 BUFFETING -> BUFFET +1 BRYNHILD'S -> BRUNHOLD'S +1 BRYNHILD -> BURNHILD +1 BRUCE -> BRUSH +1 BROUGHT -> POURED +1 BROTHER -> BROTHERS +1 BRILLIANT -> BUOYANT +1 BRIEF -> ROOF +1 BREATHLESS -> BREATH +1 BREAST -> CHEST +1 BRAVE -> PRETTY +1 BRAU -> BROW +1 BRASS -> BREASTPAND +1 BRAHMAN -> RAMI +1 BRAHMAN -> PROMIN +1 BRAHMAN -> GRAMMER +1 BRAHMAN -> BRAMMEN +1 BRAHMAN -> BRAMIN +1 BRAHMAN -> BRAMID +1 BRAHMAN -> BRAMIAN +1 BRAHMAN -> BRAMA +1 BRAHMAN -> BRAM +1 BRACY -> BRACEY +1 BRACY -> BRACES +1 BRACY -> BRACELET +1 BOY -> BY +1 BOUT -> ABOUT +1 BOURGES -> BOURGE +1 BOUGHT -> BROUGHT +1 BOTTOMED -> BOTTOM +1 BOTTLED -> BUTTERED +1 BOSTON -> BUSTON +1 BOONE -> BOOM +1 BOON -> BOOM +1 BOLT -> BOLTED +1 BOIL -> BY +1 BOIL -> BOY +1 BOEOTIAN -> BE +1 BOB'S -> BOB +1 BOB -> BOBBED +1 BOAT -> BO +1 BLOW -> BLUE +1 BLOOMIN -> ROOM +1 BLOODSHED -> BLOTCHET +1 BLOOD -> BLOODSTAINED +1 BLOKES -> LOOKS +1 BLOKE -> LOCTICE +1 BLODGETT -> OBLIGE +1 BLODGETT -> BLODGET +1 BLODGETT -> ALEXANDER +1 BLOCK -> PLUCK +1 BLINKED -> LINKED +1 BLANKETED -> BLANKET +1 BLACKLEG -> BLACK +1 BIT -> FIT +1 BIT -> BITTER +1 BISQUE -> THIS +1 BIRDSEYE -> BIRD'S +1 BIN -> BEEN +1 BILL -> BUILD +1 BIBLICAL -> PABRICAL +1 BIBLICAL -> BIBOCO +1 BHANG -> BANG +1 BEULAH -> BOOLA +1 BETTER -> BY +1 BETIDE -> BE +1 BETHUNE -> BESOON +1 BETCHA -> BETTER +1 BETAKEN -> TAKEN +1 BET -> BADE +1 BEST -> FAST +1 BESSY -> BUSY +1 BESSY -> BESSIE +1 BESIDE -> BESIDES +1 BENSON -> BUILTON +1 BENOIT -> BENOIS +1 BENNETT -> INVITED +1 BELONGED -> BELONGS +1 BELLY -> VALLEY +1 BELLOWED -> BELOVED +1 BELLE -> BELL +1 BELL -> BEL +1 BELIKE -> BE +1 BEING -> BE +1 BEGUN -> BEGAN +1 BEGGING -> PEGGING +1 BEGAN -> BEGIN +1 BEG -> BIG +1 BEFORE -> FOR +1 BEFORE -> AND +1 BEFAL -> BEFALL +1 BEEN -> THEN +1 BEEN -> OF +1 BEEN -> MONS +1 BEEN -> COMPLYING +1 BECAME -> MICHANG +1 BEAUMANOIR -> BURMANOIS +1 BEARING -> AND +1 BEALE'S -> BEAT'S +1 BEALE -> BEARD +1 BE -> BETWEEN +1 BAXTER -> BAXT +1 BATON -> BUT +1 BASSORAH -> PUSSARA +1 BASSORAH -> BASSERA +1 BASIL -> BEESER +1 BASIL -> BAZA +1 BASIL -> BASE +1 BASIL -> BALES +1 BARKLEY -> PARKLEY +1 BARIUM -> BURIUM +1 BARGELLO -> BARGENO +1 BARELY -> VARIOUSLY +1 BAPTISMAL -> THE +1 BANYAN -> BEN +1 BANYAN -> BANION +1 BANYAN -> BANDON +1 BANYAN -> BAN +1 BANISH -> BANACY +1 BANG -> BENNETT'S +1 BANDINELLO -> BEND +1 BALLROOM -> BALL +1 BALLOCK -> BALLIC +1 BALAMMED -> BLAMMED +1 BAILEY -> BAILIQUE +1 BAILEY -> BAILEY'S +1 BAGHDAD -> BAGDAD +1 BAGHDAD -> AGAD +1 BAG -> PEG +1 BADAWI -> BADARI +1 BAD -> THAT +1 BAD -> BAN +1 BACKING -> BACK +1 AWK'ARD -> UPWARD +1 AW -> AH +1 AVIDITY -> ALDITY +1 AVENUE -> AVIGUE +1 AVE -> HAVE +1 AUNT -> AUNTS +1 AUNT -> AREN'T +1 ATUM -> OUTS +1 ATUM -> ATOM +1 ATTENTION -> SENTENCE +1 ATTENTION -> ATTENTIONS +1 ATTEMPTED -> ATTENDED +1 ATHELSTANE -> OTHERSTEIN +1 AT -> SAYS +1 AT -> SAID +1 AT -> OUT +1 AT -> OF +1 AT -> IT'S +1 AT -> IN +1 AT -> HAD +1 AT -> BUT +1 AT -> ADD +1 ASSYRIA -> THE +1 ASSAILING -> SELLING +1 ASKS -> ASK +1 ASKED -> I +1 ASKED -> AS +1 ASK -> ASKED +1 ASIA -> AS +1 ASHUR -> ASHER +1 AS -> S +1 AS -> OF +1 AS -> IF +1 AS -> I +1 AS -> HAS +1 AS -> COAL +1 AS -> AT +1 AS -> ALBERT +1 AS -> A +1 ARTIST -> THIS +1 ARTHUR -> OFTEN +1 ART -> ARE +1 ARSTS -> ASKED +1 ARSINOE'S -> ARSENO'S +1 ARSINOE -> OFTEN +1 ARSINOE -> ARSENAL +1 ARRIVE -> ARRIVES +1 ARRANGED -> RANGED +1 ARPAD -> OUR +1 ARPAD -> ARPE +1 AROUSED -> ARISED +1 AROUND -> HER +1 ARMED -> ON +1 ARMED -> ALARMED +1 ARM -> HEART +1 ARKANSAS -> OUR +1 AREN'T -> ARE +1 AREN'T -> AND +1 ARE -> RETURNED +1 ARE -> IS +1 ARE -> I +1 ARE -> HE +1 ARE -> DON'T +1 ARE -> AND +1 ARE -> AH +1 ARDENT -> ARDENTS +1 ARCHIBALD -> ARQUEBALD +1 APPROVE -> PROVE +1 APPEALED -> APPEARED +1 APPARENTLY -> A +1 APOMORPHINE -> EPIMORPHIN +1 APES -> IPES +1 APART -> IN +1 ANYWAY -> AND +1 ANYONE'S -> ANY +1 ANY -> ANYTHING +1 ANY -> ANYONE +1 ANY -> AND +1 ANVILS -> AMBILS +1 ANTOLIAN -> INTOLLIUM +1 ANTIDOTES -> AND +1 ANTHONY -> AUNT +1 ANSWERS -> ADDEST +1 ANOTHER -> NOW +1 ANNOYED -> ANNOY +1 ANNIE'S -> ANY +1 ANNIE'S -> AND +1 ANNIE -> AND +1 ANGUISH -> ENGLISH +1 ANGESTON -> ANGERSON +1 ANDS -> ENDS +1 ANDBUT -> AND +1 AND -> YOU'LL +1 AND -> UNTIL +1 AND -> ROOM +1 AND -> ON +1 AND -> OF +1 AND -> NIGHS +1 AND -> MISSUS +1 AND -> ME +1 AND -> LAG +1 AND -> IT +1 AND -> INTO +1 AND -> INFECTED +1 AND -> IMPALION +1 AND -> HIM +1 AND -> HER +1 AND -> HE +1 AND -> GASTED +1 AND -> ENVANTED +1 AND -> END +1 AND -> CONFINED +1 AND -> BUT +1 AND -> BEING +1 AND -> AS +1 AND -> ANY +1 AND -> ANOTHER +1 AND -> ALEXAM +1 AND -> ADJOINING +1 AN -> OUR +1 AN -> NOW +1 AN -> IT +1 AN -> CAN +1 AMYNTAS -> I +1 AMYNTAS -> A +1 AMOUR -> AMORE +1 AM -> AMY +1 ALWAYS -> ALL +1 ALTHEA -> ALTHIE +1 ALSO -> ONCE +1 ALSO -> NABBS +1 ALREADY -> ALWAYS +1 ALONGER -> ALONG +1 ALONE -> OWN +1 ALOES -> AND +1 ALOES -> ALLIES +1 ALMS -> ARMS +1 ALLS -> ALL +1 ALLOWED -> ALLOW +1 ALLOWANCE -> ALLOW +1 ALLOW -> THE +1 ALLOW -> ALONE +1 ALLIGATOR -> ALLEGATOR +1 ALLIGATOR -> ADDIER +1 ALLIED -> ALID +1 ALLAYS -> A +1 ALL -> SOUL +1 ALL -> OR +1 ALL -> ARM +1 ALL -> ALWAYS +1 ALL -> ALSO +1 ALKALOIDS -> IKOLOITS +1 ALKALOIDS -> AKALITES +1 ALKALOID -> AKALOID +1 ALIGHTED -> LIGHTED +1 ALI'S -> ALWAYS +1 ALF -> A +1 ALESSANDRO -> ALISANDRO +1 ALCOHOL -> ALCOHOLBA +1 ALCOHOL -> AKELET +1 AL -> ARMANQUIN +1 AL -> ALICELA +1 AIN'T -> HAIN'T +1 AIN'T -> END +1 AIN'T -> BUT +1 AIN'T -> AM +1 AIN'T -> A +1 AID -> APE +1 AH -> HA +1 AGRARIAN -> AGRIAN +1 AGONE -> A +1 AFTERWARDS -> AFTERWARD +1 AFTER -> TO +1 AFTER -> OUTREW +1 AFT -> OFF +1 AFT -> APT +1 AFIRE -> AFAR +1 AFFECTION -> EFFECT +1 ADVENTURES -> VENTURES +1 ADULT -> ADULGE +1 ADN'T -> HADN'T +1 ADHERENTS -> ADHERENCE +1 ADDED -> I +1 ADD -> ADDISMA +1 ADAIR -> EIGHT +1 AD -> AT +1 ACKNOWLEDGE -> ANNOUNCE +1 ACHESON -> ARCHISON +1 ACCEPT -> EXCEPT +1 ABSTAIN -> ABSTAINED +1 ABSOLVED -> ABSORBED +1 ABSENTEE -> ABSENTE +1 ABOUT -> OF +1 ABOUT -> HE +1 ABOUT -> BUT +1 ABODE -> A +1 ABOARD -> QUESTED +1 ABOARD -> ABROAD +1 A -> WAS +1 A -> UNREPRESENTATIVE +1 A -> THEY +1 A -> SOME +1 A -> SO +1 A -> OLY +1 A -> MUST +1 A -> MENDICULATED +1 A -> KIT +1 A -> JETS +1 A -> IF +1 A -> HIS +1 A -> HIM +1 A -> GOOD +1 A -> EXERT +1 A -> EVENLENTH +1 A -> ENNARING +1 A -> ELUSIVE +1 A -> EARTHLY +1 A -> CLIFF +1 A -> AVIDE +1 A -> AROUND +1 A -> APPROPATOR +1 A -> APOLLO +1 A -> AMITY +1 A -> AMERD +1 A -> ALOUD +1 A -> ALAD +1 A -> ABOVE +1 A -> ABASEMENT + +DELETIONS: count ref +25 THE +16 IT +15 IN +15 A +14 IS +12 AND +10 TO +8 OF +8 I +6 WAS +5 YOU +5 THAT +4 OTHER +4 HIM +4 HE +4 BRAHMAN +4 AT +4 AM +4 AL +3 US +3 OR +3 MAKAN +3 HAVE +3 HAD +3 ARE +3 ALL +2 YARD +2 WITH +2 WIDE +2 WERE +2 WARD +2 VE +2 THIS +2 STAIRS +2 SMALL +2 ROUND +2 ROOM +2 PROA +2 NOT +2 NEWBERG +2 MET +2 LACHAISE +2 JUST +2 HIS +2 HAS +2 GOOD +2 FISH +2 ELECTION +2 DO +2 DE +2 BY +2 BRACY +2 AS +2 AN +2 AH +1 ZAU +1 YOUTH +1 YOU'VE +1 YER +1 YELLS +1 YEAR +1 YE +1 WORLD +1 WOODEN +1 WONDERED +1 WOMEN +1 WILL +1 WELL +1 WEAL +1 WE'LL +1 WATER +1 WARN +1 WARMEST +1 WANT +1 WANDERERS +1 VOTE +1 VAULTED +1 UTTERED +1 UP +1 UN +1 TURNED +1 TURBULENT +1 TRIBE +1 TIGER +1 THUS +1 THIRD +1 THING +1 THEY'RE +1 THEN +1 THEATRE +1 TALENTS +1 STAKES +1 STAINED +1 SPORT +1 SPIRITS +1 SOUNDLY +1 SOUL +1 SONS +1 SONNY +1 SNETKOV +1 SLEEPY +1 SIT +1 SIR +1 SIGURD +1 SIGHT +1 SIDE +1 SHUT +1 SHRUBBERY +1 SHORE +1 SHE'LL +1 SHE +1 SET +1 SERVES +1 SCUSE +1 S +1 REW +1 REVOLUTION +1 RESPONSIBLE +1 REPRESENTATIVE +1 RECTIFY +1 PRUDENT +1 PROVOCATOR +1 PRECENTORS +1 POSITION +1 POLO +1 PLAYING +1 PIGSKIN +1 PHUT'S +1 PHUT +1 PEER +1 PARR +1 OWN +1 ONE +1 ONCE +1 OLD +1 OFF +1 ODD +1 OBSERVED +1 O +1 N'T +1 N +1 MUCH +1 MOONLIGHT +1 MONKERS +1 MINUTELY +1 MINISTERS +1 MINE +1 MIDDY +1 MEET +1 MAKING +1 MAKE +1 M +1 LUCK +1 LUCID +1 LOWERING +1 LONG +1 LOCK +1 LL +1 LIKE +1 LEWIS +1 LET +1 LENIN +1 LEAVED +1 LAKE +1 KNOW +1 KINE +1 KID +1 KEY +1 JUG +1 JOINING +1 JIM +1 JILT +1 JANEERO +1 JACKAL +1 IVANOVITCH +1 IT'S +1 IMPROVED +1 ILLS +1 IF +1 HUNGRY +1 HOW +1 HOUSE +1 HITHER +1 HEARTY +1 HAWED +1 HARRIS +1 HALTS +1 HA +1 GRUFFLY +1 GROWING +1 GOODS +1 GO +1 GNAWING +1 GIVE +1 GIRLS +1 GIFTS +1 GAZE +1 FULL +1 FRANCISCO +1 FORMED +1 FORCED +1 FOR +1 FOOTED +1 FOLLOW +1 FOLDS +1 FLAVOR +1 FLASHLIGHT +1 FIND +1 FELL +1 FAUVENT +1 FATHER +1 EYES +1 EXTEND +1 EM +1 EAST +1 E'S +1 DOUBT +1 DONE +1 DOING +1 DOIN +1 DIDN'T +1 DESTRUCTION +1 DAY +1 DARE +1 CURSE +1 CROST +1 CREAM +1 COULDN'T +1 CORTONA +1 CORKLE +1 COPS +1 COOPS +1 COMMENT +1 COMICAL +1 COLOGNE +1 COLE +1 CLIMB +1 CLEVER +1 CLEFT +1 CHIPS +1 CHANGE +1 CELL +1 CAN +1 CALL +1 BUZZARD +1 BUT +1 BULBS +1 BRIDGE +1 BOY +1 BOUT +1 BOUGHT +1 BOTH +1 BONES +1 BOLSHEVIKI +1 BITING +1 BIBLE +1 BE +1 BASEMENT +1 BAND +1 AZARIAH +1 ASLEEP +1 ASKED +1 ARCHIAS +1 ANCESTORS +1 ANALYSIS +1 ALLEN + +INSERTIONS: count hyp +24 THE +24 A +20 IN +17 IT +16 AND +15 OF +12 TO +12 IS +9 ONE +9 HE +9 ARE +7 THAT +7 ON +7 AT +6 LAD +5 OR +5 HAD +4 MEN +4 HAVE +3 WHICH +3 WAS +3 WALL +3 US +3 THIS +3 SUIT +3 OUT +3 NOT +3 NIGHT +3 LIKE +3 KNOW +3 DID +3 BUT +3 AS +3 AN +3 ALL +2 YOU +2 WHO +2 WHILE +2 TWO +2 TURNED +2 TIME +2 STROKE +2 STILL +2 SIR +2 MORROW +2 ME +2 MASTER +2 LESS +2 ITS +2 IF +2 HIM +2 HER +2 FORE +2 FISH +2 FIELD +2 FELLOW +2 DAYS +2 BOAT +2 BE +2 AM +1 YOURS +1 YORK +1 YEARS +1 YEAR +1 WORTHY +1 WHAT +1 WENT +1 WAY +1 WARE +1 WANTS +1 WAIN +1 VOTES +1 VAIN +1 UP +1 TRADES +1 TOUSTRA +1 TOO +1 TIDE +1 THOUGH +1 THEY +1 THEMSELVES +1 THEM +1 THEE +1 THAT'S +1 TELL +1 TEEN +1 TECHNIC +1 TAX +1 TALE +1 SYRIA +1 STRIKE +1 STRAW +1 STOVE +1 STORE +1 STICK +1 STEAMS +1 STAIRS +1 STACKS +1 SPITE +1 SPITABLY +1 SPIRITS +1 SPECIALLY +1 SPARED +1 SPACE +1 SONG +1 SOFT +1 SO +1 SNATHS +1 SMALL +1 SHUMMED +1 SHE +1 SHARE +1 SEVENTH +1 SENT +1 SELF +1 SEATED +1 SCHUMANN +1 SCHLEVENT +1 SAKE +1 ROAR +1 RIPENESS +1 RICHA +1 REWARDED +1 REST +1 REPARATE +1 READ +1 RAGE +1 PUSH +1 PROPER +1 PROCATE +1 POT +1 POINT +1 PLEASURE +1 PLEASE +1 PENCIL +1 PEM +1 PART +1 PAD +1 OUR +1 OTIAN +1 OTHER +1 ORIENTOUINE +1 ONE'S +1 OLD +1 OH +1 OFF +1 OCCUPIED +1 NEW +1 NELLO +1 NEEDS +1 NEAR +1 NATION +1 MULES +1 MOR +1 MONTHS +1 MIND +1 MESSIAN +1 MER +1 MEET +1 MEANTIS +1 MEANTESSE +1 MARK +1 LOW +1 LONG +1 LIT +1 LIGHTS +1 LENS +1 LEISURE +1 LAYS +1 LADY +1 KNOWN +1 KNIGHT'S +1 KINDRED +1 KEI +1 JUST +1 JEMOSIS +1 JEALOUS +1 JAS +1 JACK +1 INSISTANTS +1 INCLINE +1 IMAGING +1 ILL +1 I +1 HOPE +1 HOLE +1 HIS +1 HID +1 HESITATINGLY +1 HEADS +1 HASTE +1 HAM +1 HALL +1 GYPTIAN +1 GUY +1 GREAT +1 GOT +1 GOD +1 GIGS +1 GATHERED +1 FULL +1 FRIENDLY +1 FRIED +1 FRED +1 FOUR +1 FOR +1 FLY +1 FILIANTLY +1 FIGURED +1 FAMOUS +1 FAITH +1 EYE +1 EXTRAVE +1 ENTIRE +1 ENRICHED +1 EM +1 ELECTION +1 EASILY +1 EARTH +1 EARLS +1 EACH +1 DURRED +1 DUERS +1 DOWN +1 DORIS +1 DO +1 DIXON +1 DIS +1 DESIGNED +1 DEAL +1 DE +1 DAY +1 DAME +1 COURT +1 CORDS +1 CONSOL +1 COMPARABLE +1 COLT +1 COLD +1 COGNIZED +1 COAT +1 CO +1 CHRISTIE +1 CHAUVELIN +1 CAPTURED +1 BURDEN +1 BRACES +1 BOU +1 BORN +1 BONSES +1 BONES +1 BLODGET +1 BELLEGER +1 BEDS +1 BATH +1 BACKS +1 AWAKE +1 ASKED +1 ASH +1 ARISING +1 APOLIS +1 ANY +1 ALONG +1 ALLIES +1 AIR + +PER-WORD STATS: word corr tot_errs count_in_ref count_in_hyp +THE 3006 259 3134 3137 +A 1044 216 1145 1159 +AND 1710 196 1788 1828 +IN 735 149 808 811 +TO 1402 106 1444 1466 +IT 616 87 660 659 +IS 377 81 415 420 +I 810 73 853 840 +OF 1355 69 1386 1393 +THAT 654 66 682 692 +HE 672 56 693 707 +AT 250 56 279 277 +YOU 493 54 513 527 +THIS 221 53 246 249 +ON 260 43 281 282 +HIS 474 43 493 498 +HER 269 40 289 289 +HAD 359 39 375 382 +WAS 638 38 653 661 +THEY 223 38 239 245 +AN 106 37 125 124 +THEIR 98 35 112 119 +FOR 420 32 431 441 +AS 324 32 339 341 +ARE 147 32 159 167 +WITH 369 29 385 382 +OR 93 29 109 106 +BUT 357 29 370 373 +ALL 224 29 236 241 +HAS 84 24 102 90 +ANY 73 24 76 94 +O 25 23 41 32 +NOT 392 23 401 406 +HAVE 221 23 233 232 +WHEN 150 21 162 159 +ONE 210 21 216 225 +HIM 296 21 305 308 +WILL 152 20 166 158 +THERE 162 20 174 170 +DO 141 19 153 148 +WERE 154 18 166 160 +UP 140 18 145 153 +OUR 63 18 70 74 +THEN 145 17 153 154 +OH 28 17 35 38 +INTO 113 17 117 126 +SO 204 16 211 213 +OUT 157 16 166 164 +MEN 61 16 66 72 +SAID 247 15 252 257 +NO 185 15 190 195 +DE 7 15 18 11 +US 52 14 58 60 +THESE 43 14 51 49 +THEM 141 13 144 151 +MISTER 70 13 74 79 +IF 163 13 167 172 +GURR 1 13 14 1 +BY 193 13 199 200 +BRAHMAN 6 13 19 6 +WOULD 124 12 129 131 +WHO 146 12 151 153 +WHICH 187 12 191 195 +WHAT 183 12 192 186 +TOO 35 12 41 41 +LIKE 85 12 89 93 +ITS 52 12 57 59 +DICKIE 11 12 23 11 +AM 57 12 62 64 +TWO 58 11 62 65 +SHE 285 11 292 289 +SEE 75 11 77 84 +OLD 54 11 57 62 +LESLIE 12 11 23 12 +KNOW 89 11 95 94 +JUST 51 11 57 56 +IT'S 17 11 24 21 +HERMON 0 11 11 0 +DID 93 11 95 102 +ANNIE 11 11 22 11 +WHERE 53 10 57 59 +OFF 46 10 53 49 +NOW 112 10 116 118 +MAN 106 10 114 108 +LET 58 10 64 62 +HERE 66 10 69 73 +FULL 14 10 18 20 +FAUVENT 0 10 10 0 +DICKY 0 10 0 10 +SIGURD 0 9 9 0 +MURDOCH 0 9 9 0 +MISSUS 25 9 31 28 +I'M 29 9 34 33 +BE 311 9 313 318 +ZAU 0 8 8 0 +WILFRID 1 8 9 1 +TIME 82 8 83 89 +THOU 64 8 68 68 +SIR 37 8 40 42 +SET 25 8 31 27 +ROUND 15 8 18 20 +ROOM 34 8 37 39 +OTHER 65 8 69 69 +MUST 72 8 77 75 +ME 259 8 260 266 +LAD 6 8 7 13 +HEAR 15 8 21 17 +HATH 13 8 17 17 +FROM 177 8 179 183 +FOUR 20 8 24 24 +DON'T 70 8 73 75 +CHRIS 15 8 23 15 +BEEN 129 8 133 133 +AROUND 15 8 20 18 +AL 8 8 16 8 +AIN'T 3 8 11 3 +YOUR 99 7 104 101 +WELL 81 7 85 84 +WE 149 7 151 154 +UPON 66 7 71 68 +TURNED 30 7 31 36 +THUS 8 7 13 10 +THAN 70 7 74 73 +SHALL 57 7 62 59 +PRIORESS 0 7 7 0 +PIGEONCOTE 0 7 7 0 +OL 0 7 7 0 +MURDOCK 0 7 0 7 +LOOK 30 7 33 34 +LAST 45 7 47 50 +KIND 19 7 19 26 +IZZY 0 7 7 0 +HOW 77 7 81 80 +GOT 37 7 40 41 +GOOD 64 7 67 68 +BRACY 0 7 7 0 +ANYONE 0 7 6 1 +ZARATHUSTRA 0 6 6 0 +YOU'RE 3 6 9 3 +WHITE 16 6 18 20 +WALL 3 6 4 8 +THY 24 6 28 26 +THOUGH 36 6 39 39 +THEY'RE 5 6 11 5 +ONLY 71 6 73 75 +ONCE 52 6 56 54 +NIGHT 49 6 49 55 +MY 247 6 248 252 +MINE 10 6 15 11 +M 1 6 7 1 +LOVE 27 6 29 31 +LEFT 33 6 38 34 +LARGE 10 6 10 16 +LARCH 1 6 7 1 +KINE 0 6 6 0 +IM 0 6 6 0 +I'VE 8 6 10 12 +I'LL 20 6 24 22 +GOD 25 6 29 27 +FATHER 46 6 51 47 +DOWN 66 6 68 70 +DOG 5 6 9 7 +CINDERLAD 1 6 7 1 +CIGAR 1 6 1 7 +ASKED 43 6 46 46 +ARCHY 0 6 6 0 +ARCHIE 0 6 0 6 +AH 6 6 9 9 +YOU'VE 2 5 7 2 +YOU'LL 3 5 4 7 +YO 0 5 3 2 +YE 11 5 14 13 +WORLD 12 5 14 15 +WILFRED 0 5 0 5 +WHILE 20 5 22 23 +WENT 74 5 76 77 +WE'RE 2 5 7 2 +TRY 12 5 17 12 +THOSE 37 5 38 41 +THAT'S 19 5 23 20 +SINDBAD 0 5 5 0 +SINBAD 0 5 0 5 +SHARRKAN 0 5 5 0 +SHARKAN 0 5 0 5 +SEEMED 16 5 18 19 +SEA 14 5 15 18 +RUN 6 5 9 8 +OVER 47 5 49 50 +OLIVE 2 5 7 2 +N'T 0 5 5 0 +MISS 15 5 15 20 +MESTIENNE 0 5 5 0 +MAKE 64 5 66 67 +MADE 76 5 77 80 +LITTLE 91 5 91 96 +LESS 7 5 9 10 +KNIGHT 4 5 8 5 +INTEREST 6 5 7 10 +HONOR 0 5 3 2 +HEAD 40 5 41 44 +GOING 34 5 39 34 +GO 60 5 61 64 +END 13 5 15 16 +DAY 58 5 61 60 +CRAWFISH 2 5 7 2 +COPS 0 5 5 0 +COME 69 5 71 72 +BEING 36 5 39 38 +BEALE 5 5 10 5 +ALWAYS 33 5 34 37 +ABOUT 76 5 79 78 +YET 32 4 34 34 +YES 42 4 42 46 +YEARS 19 4 20 22 +WON'T 10 4 13 11 +WIDE 6 4 8 8 +WICKER'S 5 4 8 6 +UNTO 7 4 10 8 +TOWARD 7 4 9 9 +TILL 23 4 24 26 +TIGLATH 0 4 4 0 +TELL 52 4 52 56 +TAKE 44 4 45 47 +SUIT 2 4 2 6 +STREET 13 4 15 15 +STILL 39 4 41 41 +STATE 22 4 24 24 +SON 15 4 18 16 +SOME 76 4 77 79 +SMALL 15 4 17 17 +SIDE 18 4 22 18 +SHUT 7 4 10 8 +SHOULD 70 4 72 72 +SHE'LL 0 4 2 2 +SERGEY 1 4 5 1 +SEEN 29 4 32 30 +SEEM 6 4 8 8 +SAYS 25 4 28 26 +SAY 80 4 83 81 +SAT 8 4 9 11 +SAINT 18 4 19 21 +RETURNED 17 4 19 19 +RETURN 16 4 18 18 +REGIN 0 4 4 0 +READ 8 4 8 12 +RAYSTOKE 1 4 5 1 +RAY 0 4 1 3 +RATHER 13 4 16 14 +POURED 2 4 3 5 +PLACE 36 4 39 37 +PILESER 0 4 4 0 +OUGHT 14 4 16 16 +ORGANIZER 0 4 0 4 +ORGANISER 1 4 5 1 +MYRTILUS 0 4 4 0 +MIND 23 4 24 26 +MILICENT 2 4 6 2 +MET 8 4 11 9 +MEAN 20 4 20 24 +MAY 40 4 43 41 +MASTER 22 4 23 25 +MANY 27 4 27 31 +MAKAN 4 4 8 4 +LONG 51 4 52 54 +LIL 0 4 4 0 +LAW 2 4 5 3 +JURY 0 4 4 0 +JACKAL 4 4 8 4 +INTERESTS 2 4 5 3 +I'D 9 4 13 9 +HONOUR 1 4 3 3 +HERMAN 0 4 0 4 +HAIR 6 4 6 10 +GIRL 10 4 11 13 +FISH 5 4 7 7 +FIND 23 4 25 25 +FARRINDER 0 4 4 0 +FAITH 8 4 9 11 +EYES 33 4 35 35 +EYE 9 4 11 11 +EM 0 4 2 2 +EACH 18 4 18 22 +DOOR 30 4 30 34 +DIDN'T 18 4 21 19 +DEAR 14 4 14 18 +COURT 10 4 12 12 +COULD 111 4 114 112 +CONFECTIONARY 0 4 4 0 +CASE 18 4 20 20 +BEG 5 4 8 6 +BASIL 0 4 4 0 +BANYAN 0 4 4 0 +ARSINOE 0 4 4 0 +ALLOW 6 4 8 8 +ADD 6 4 9 7 +YOU'D 3 3 5 4 +YER 0 3 3 0 +YEAR 6 3 7 8 +YARD 0 3 2 1 +WORKING 7 3 8 9 +WORK 25 3 26 27 +WILDERNESS 5 3 8 5 +WHOLE 22 3 22 25 +WHETHER 12 3 12 15 +WHEEL 1 3 2 3 +WELLS 0 3 2 1 +WAYNE 0 3 3 0 +WATER 20 3 22 21 +WATCH 12 3 12 15 +WARD 3 3 6 3 +WAIN 0 3 0 3 +VERY 83 3 84 85 +VAIN 4 3 5 6 +UPSTAIRS 2 3 3 4 +ULRICA 0 3 3 0 +TRIED 19 3 19 22 +TRIBE 1 3 4 1 +TIGER 10 3 13 10 +THOUSAND 15 3 16 17 +THOUGHT 39 3 39 42 +THERE'S 10 3 11 12 +TEN 17 3 18 19 +SYRUP 1 3 4 1 +SUN 7 3 7 10 +STORE 2 3 3 4 +STONEWALL 0 3 3 0 +STONE 11 3 11 14 +STAYED 1 3 4 1 +STAIRS 1 3 3 2 +SPAKE 2 3 5 2 +SOUL 10 3 11 12 +SOUGHT 3 3 6 3 +SORT 12 3 12 15 +SONG 1 3 2 3 +SILVER 6 3 7 8 +SHERIFF 1 3 4 1 +SHARDURIS 0 3 3 0 +SENT 9 3 10 11 +SCHOOL 6 3 8 7 +SANCT 0 3 3 0 +S 1 3 3 2 +RIGAN 0 3 0 3 +RAN 8 3 9 10 +QUEST 0 3 1 2 +PUT 39 3 40 41 +PROCLUS 0 3 3 0 +PROAS 0 3 3 0 +PROA 0 3 3 0 +PRIEST 3 3 6 3 +POLL 0 3 3 0 +PIGEON 1 3 1 4 +PHUT 0 3 3 0 +PASS 6 3 6 9 +PACE 0 3 1 2 +OWN 43 3 45 44 +OPEN 14 3 15 16 +OFTEN 21 3 21 24 +OFFICERS 2 3 3 4 +NUZHAT 1 3 4 1 +NOUGHT 0 3 2 1 +NORTH 3 3 4 5 +NOR 19 3 21 20 +NEW 28 3 30 29 +NEAREST 2 3 5 2 +NEAR 16 3 16 19 +MUCH 40 3 41 42 +MOSES 5 3 8 5 +MORE 98 3 99 100 +MISTAH 0 3 3 0 +MELLICENT 0 3 0 3 +MC 1 3 4 1 +MATE 1 3 2 3 +MARSHAL 3 3 6 3 +LOW 12 3 13 14 +LOOKED 23 3 25 24 +LIZLY 0 3 0 3 +LIVE 16 3 17 18 +LINE 5 3 7 6 +LIKED 7 3 9 8 +LIDDY 0 3 3 0 +LADY 14 3 15 16 +L 1 3 4 1 +JUDGE 5 3 7 6 +JES 0 3 3 0 +JAKIE 0 3 0 3 +JAKEY 0 3 3 0 +JACK 3 3 3 6 +IZZIE 0 3 0 3 +INN 3 3 4 5 +IN'T 0 3 3 0 +HOWEVER 13 3 16 13 +HOME 33 3 34 35 +HOLD 7 3 10 7 +HERMON'S 0 3 3 0 +HERMANN 0 3 0 3 +HEART 28 3 28 31 +HE'S 6 3 9 6 +HAYES 0 3 1 2 +HARMON 0 3 0 3 +HARD 14 3 16 15 +HANDS 13 3 15 14 +HAND 39 3 40 41 +HALL 12 3 14 13 +HALF 23 3 23 26 +GRAY 1 3 3 2 +GONE 16 3 17 18 +GOLD 6 3 6 9 +GOIN 1 3 1 4 +GOBEY'S 0 3 3 0 +GIVING 6 3 8 7 +GIVEN 12 3 13 14 +GIVE 44 3 47 44 +FURTHER 6 3 6 9 +FORM 8 3 9 10 +FOLLOWED 8 3 10 9 +FOLDS 0 3 1 2 +FOALS 1 3 4 1 +FOAL 2 3 5 2 +FIELD 3 3 4 5 +FELLOW 13 3 13 16 +FAVOUR 3 3 4 5 +FAVOR 0 3 2 1 +FAR 21 3 21 24 +FAFNIR 0 3 3 0 +EVER 26 3 27 28 +ETERNAL 0 3 3 0 +ELECTION 2 3 4 3 +EILEEN 0 3 3 0 +EAST 6 3 8 7 +E'S 0 3 3 0 +DRINK 21 3 24 21 +DONE 37 3 39 38 +DOES 14 3 15 16 +DEFENSE 1 3 4 1 +DEFENCE 2 3 2 5 +DAYS 12 3 13 14 +DALY 0 3 3 0 +DALEY 0 3 0 3 +CUT 10 3 12 11 +CREAM 3 3 6 3 +CORNER 10 3 13 10 +CORN 0 3 0 3 +CONFECTIONERY 0 3 0 3 +CLIMB 0 3 1 2 +CHANGE 7 3 9 8 +CATTLE 3 3 5 4 +CAN'T 15 3 16 17 +CAN 78 3 80 79 +CAME 65 3 66 67 +BRYNHILD 0 3 3 0 +BROUGHT 10 3 11 12 +BROTHERS 6 3 8 7 +BOY 25 3 27 26 +BOAT 7 3 8 9 +BLODGETT 0 3 3 0 +BIT 9 3 11 10 +AUNT 4 3 6 5 +AREN'T 1 3 3 2 +ALTHEA 0 3 3 0 +ALSO 31 3 33 32 +AILEEN 0 3 0 3 +ZAO 0 2 0 2 +ZA 0 2 0 2 +YOURSELVES 2 2 2 4 +YO'LL 0 2 2 0 +WROTE 2 2 4 2 +WRONG 4 2 4 6 +WRAPPERS 0 2 0 2 +WORTH 1 2 2 2 +WORSE 10 2 10 12 +WORKED 1 2 2 2 +WORDS 18 2 18 20 +WONDER 4 2 5 5 +WON 1 2 1 3 +WOMEN 14 2 15 15 +WOMAN 18 2 19 19 +WITCH 1 2 2 2 +WINTER 3 2 5 3 +WINE 6 2 8 6 +WINDOW 17 2 17 19 +WILKES 0 2 0 2 +WILDEST 0 2 1 1 +WILD 7 2 9 7 +WIFE 14 2 16 14 +WI 0 2 1 1 +WHY 47 2 47 49 +WHOSE 16 2 16 18 +WHO'S 0 2 0 2 +WHERE'S 2 2 4 2 +WE'LL 1 2 2 2 +WATCHED 2 2 4 2 +WARM 0 2 1 1 +WANTS 5 2 5 7 +WANT 24 2 26 24 +WAITING 6 2 7 7 +VIOLENCE 14 2 16 14 +VILLAGES 0 2 1 1 +VENT 0 2 0 2 +VE 0 2 2 0 +UTTER 1 2 2 2 +USE 11 2 13 11 +URITU 0 2 0 2 +URARTU 0 2 2 0 +UNDERTAKERS 0 2 0 2 +UNDERTAKER'S 0 2 2 0 +UN 0 2 2 0 +TUMBLED 1 2 1 3 +TRULY 4 2 5 5 +TRIFLE 0 2 1 1 +TRIBES 4 2 6 4 +TOWN 14 2 14 16 +TOWER 3 2 4 4 +TOWARDS 12 2 13 13 +TORQUILSTONE 0 2 2 0 +TORCHLESTONE 0 2 0 2 +TOP 2 2 3 3 +TONIGHT 0 2 2 0 +TOM 8 2 9 9 +TIRED 5 2 7 5 +TIE 1 2 2 2 +TIDINGS 0 2 2 0 +TIDE 0 2 1 1 +THOUSANDS 3 2 5 3 +THIRST 7 2 9 7 +THIRD 10 2 11 11 +THINK 45 2 46 46 +THINGS 19 2 21 19 +THING 20 2 21 21 +THEE 26 2 27 27 +THEATRE 0 2 2 0 +TEMPLES 0 2 2 0 +TELLTALE 0 2 2 0 +TAMAR 0 2 2 0 +TALENTS 1 2 3 1 +TAKEN 21 2 22 22 +SYRIA 3 2 4 4 +SURELY 7 2 8 8 +SUPPOSE 10 2 11 11 +SUDDEN 7 2 8 8 +STRUCK 11 2 13 11 +STROKE 2 2 2 4 +STRIKE 2 2 2 4 +STRIFE 4 2 6 4 +STRAIN 1 2 1 3 +STRAIGHT 4 2 5 5 +STOOD 21 2 22 22 +STOKER 0 2 2 0 +STOCKER 0 2 0 2 +STICK 4 2 5 5 +STERN 0 2 1 1 +STEPS 7 2 7 9 +STATES 6 2 7 7 +STARED 3 2 4 4 +SPIRITS 1 2 2 2 +SOMEONE 0 2 2 0 +SINGA 0 2 2 0 +SIGNOR 0 2 1 1 +SIGNING 0 2 1 1 +SIGNED 1 2 2 2 +SIGHT 7 2 8 8 +SHOW 16 2 16 18 +SHORES 0 2 0 2 +SHIPS 2 2 4 2 +SHEPHERD 0 2 1 1 +SHELLFISH 0 2 0 2 +SHELL 1 2 3 1 +SHED 4 2 5 5 +SHAWS 0 2 2 0 +SERF 0 2 0 2 +SENOR 0 2 1 1 +SELLER 0 2 2 0 +SEEMS 12 2 13 13 +SEEK 8 2 10 8 +SAW 28 2 29 29 +SAIL 2 2 4 2 +SAH 0 2 2 0 +S'POSE 2 2 3 3 +RUM 0 2 1 1 +ROSE 11 2 12 12 +ROPE'S 0 2 2 0 +ROOMFELLOW 0 2 2 0 +RIVER 11 2 11 13 +RISDON 5 2 7 5 +RIDER 0 2 2 0 +RICHARD 4 2 5 5 +RHODIAN 0 2 2 0 +REVOLUTION 2 2 3 3 +REVEREND 13 2 15 13 +REVELATION 0 2 1 1 +REST 19 2 19 21 +REMAINED 7 2 8 8 +REMAIN 4 2 5 5 +REFORMERS 0 2 2 0 +REFORMED 1 2 1 3 +RED 9 2 10 10 +READY 12 2 13 13 +REACHED 11 2 12 12 +RE 2 2 2 4 +RAYSTROKE 0 2 0 2 +RAISE 4 2 6 4 +RAGED 0 2 0 2 +RAGE 1 2 2 2 +RACHEL 1 2 3 1 +QUITE 15 2 16 16 +QUEEN 5 2 5 7 +PROVE 5 2 5 7 +PRODUCED 7 2 7 9 +PROCLASS 0 2 0 2 +PRIORS 0 2 0 2 +PRIESTS 1 2 1 3 +PREACH 0 2 0 2 +PRAISED 2 2 2 4 +POST 5 2 6 6 +POOL 0 2 0 2 +POLY 0 2 0 2 +POLE 0 2 0 2 +POINT 9 2 10 10 +PLATTERBAFF 1 2 3 1 +PLAIN 3 2 5 3 +PHOSPHOR 0 2 2 0 +PHILIPPUS 1 2 3 1 +PHILIP 2 2 3 3 +PETER 14 2 15 15 +PERE 0 2 2 0 +PEG 0 2 0 2 +PAWNBROKER 0 2 2 0 +PASSED 9 2 10 10 +PART 21 2 21 23 +PARR 0 2 2 0 +ONTO 0 2 2 0 +ONES 4 2 4 6 +ONE'S 5 2 5 7 +OFFENCE 2 2 2 4 +NORTHFIELD 0 2 2 0 +NONE 12 2 13 13 +NEWBURG 0 2 0 2 +NEWBERG 0 2 2 0 +NEIGHBOUR 1 2 1 3 +NEIGHBORS 4 2 5 5 +NEIGHBOR 0 2 2 0 +NATURALLY 6 2 7 7 +NATURAL 6 2 7 7 +MYSTERY 0 2 0 2 +MOUTH 6 2 7 7 +MOTHER 51 2 52 52 +MOST 42 2 44 42 +MORROW 6 2 6 8 +MIN 0 2 0 2 +MIDIAN 0 2 2 0 +MESTER 0 2 2 0 +MESSIAN 0 2 0 2 +MERLONUS 0 2 2 0 +MERIT 1 2 3 1 +MENAHEM 0 2 2 0 +MEET 8 2 9 9 +MEAT 4 2 6 4 +MATTER 22 2 22 24 +MATI 0 2 2 0 +MARTIAN 2 2 2 4 +MARRIAGE 5 2 6 6 +MARJORIE 1 2 3 1 +MARGERY 0 2 0 2 +MANKATO 0 2 2 0 +LUNA'S 1 2 3 1 +LUNA 1 2 3 1 +LOVER 2 2 3 3 +LOVED 7 2 7 9 +LORD 17 2 19 17 +LOCK 2 2 4 2 +LL 0 2 2 0 +LIVED 5 2 6 6 +LILBURN 1 2 3 1 +LIKELY 2 2 3 3 +LIFTED 2 2 2 4 +LIFE 54 2 55 55 +LIE 3 2 4 4 +LEVER 1 2 3 1 +LETTERS 5 2 7 5 +LET'S 3 2 4 4 +LENA 0 2 0 2 +LEAVE 21 2 22 22 +LAWS 2 2 3 3 +LAUGHED 10 2 11 11 +LAUDERDALE 0 2 2 0 +LATE 9 2 9 11 +LAND 19 2 20 20 +LAKE 4 2 6 4 +LACHAISE 0 2 2 0 +LABORS 0 2 0 2 +LA 1 2 3 1 +KNOWN 10 2 10 12 +KNIGHT'S 0 2 1 1 +KITE 0 2 2 0 +KING 43 2 45 43 +KIN 0 2 0 2 +KEYS 3 2 5 3 +KEEP 14 2 16 14 +JULIEN 0 2 2 0 +JULIAN 0 2 0 2 +JUDGMENT 7 2 9 7 +JEST 0 2 0 2 +JAPANE 0 2 0 2 +JAMES'S 0 2 0 2 +IVANOVITCH 3 2 5 3 +ISN'T 1 2 2 2 +ILU 0 2 2 0 +ICES 1 2 3 1 +HURT 5 2 6 6 +HOZE 0 2 2 0 +HOUSE 36 2 37 37 +HOUR 15 2 16 16 +HOSE 0 2 0 2 +HORSTIUS 0 2 2 0 +HORSES 4 2 4 6 +HOPE 15 2 16 16 +HOO'S 0 2 2 0 +HO 1 2 3 1 +HITHER 5 2 7 5 +HIGH 7 2 8 8 +HIDINGS 0 2 0 2 +HI 0 2 1 1 +HERMANN'S 0 2 0 2 +HERE'S 0 2 2 0 +HELEN 0 2 1 1 +HEARD 27 2 28 28 +HE'LL 2 2 2 4 +HAYS 0 2 2 0 +HAID 0 2 2 0 +HA 2 2 3 3 +GUY 1 2 1 3 +GUNNER 0 2 0 2 +GUNNAR 0 2 2 0 +GUN 2 2 2 4 +GREY 0 2 1 1 +GRANDPAP 1 2 3 1 +GRANDPA 0 2 0 2 +GRAHAM 0 2 0 2 +GRAEME 0 2 2 0 +GOVERNMENTS 2 2 3 3 +GOVERNMENT 19 2 20 20 +GOV'NOR 0 2 2 0 +GORDON 19 2 21 19 +GOAL 1 2 3 1 +GIT 0 2 1 1 +GIRLS 4 2 6 4 +GIRK 0 2 0 2 +GIORGIO 0 2 2 0 +GET 52 2 53 53 +GER 0 2 0 2 +GEORGIO 0 2 0 2 +GEORGE'S 0 2 1 1 +GENTLEMEN 3 2 5 3 +GENTLEMAN 7 2 7 9 +GAVE 32 2 32 34 +GARR 0 2 0 2 +FUTURE 3 2 3 5 +FRONT 11 2 13 11 +FRISTOE 0 2 2 0 +FREE 11 2 12 12 +FRANZ 6 2 8 6 +FORE 0 2 0 2 +FORCE 7 2 7 9 +FOOT 1 2 1 3 +FOOLS 1 2 2 2 +FOLLOWING 10 2 10 12 +FOLLOW 9 2 10 10 +FLY 4 2 5 5 +FLEROV'S 0 2 2 0 +FLEROV 0 2 2 0 +FLAVOR 0 2 2 0 +FISHING 2 2 3 3 +FISHIN 0 2 1 1 +FESTAL 0 2 2 0 +FELL 14 2 15 15 +FEAST 0 2 0 2 +FAVOURITE 0 2 1 1 +FAVORITE 0 2 1 1 +FAUCHELEVENT 22 2 24 22 +FATHER'S 5 2 7 5 +FATE 4 2 5 5 +FAT 1 2 2 2 +FAST 11 2 12 12 +FANNY 3 2 5 3 +FAN 2 2 3 3 +FAFNIR'S 0 2 2 0 +FACE 31 2 33 31 +EXECUTIVE 3 2 5 3 +EXECUTED 1 2 1 3 +EVEN 46 2 46 48 +EUSEBIUS 0 2 2 0 +ETHELRIED 0 2 2 0 +ESPECIALLY 5 2 7 5 +ESCAPE 12 2 12 14 +ERE 1 2 2 2 +ENOUGH 30 2 31 31 +ENDURE 2 2 3 3 +ELEXANDER 0 2 2 0 +ELASTIC 0 2 2 0 +EARTH 19 2 20 20 +DUKE 10 2 12 10 +DRAUGHT 1 2 3 1 +DOUBT 7 2 9 7 +DOST 3 2 4 4 +DOING 9 2 10 10 +DISTRESS 3 2 4 4 +DINKS 0 2 2 0 +DICK 5 2 5 7 +DETECTIVE 2 2 2 4 +DESK 0 2 2 0 +DEBRACY 0 2 0 2 +DEATH 15 2 16 16 +DEARLY 0 2 2 0 +DEAD 19 2 21 19 +DAT 0 2 1 1 +DARK 11 2 11 13 +DARE 4 2 6 4 +DAPHNE 2 2 4 2 +DA 0 2 2 0 +D 0 2 2 0 +CUPS 0 2 0 2 +CRUMPLED 0 2 2 0 +CROFISH 0 2 0 2 +COURSE 15 2 16 16 +COUNTRY 15 2 17 15 +COUNTRIES 4 2 4 6 +COUNT 17 2 18 18 +COUNCIL 0 2 0 2 +COULDN'T 6 2 7 7 +COT 0 2 0 2 +CORNWEALTH 0 2 0 2 +COPSE 0 2 0 2 +CONTINUAL 0 2 1 1 +CONTEND 0 2 2 0 +COMMONWEALTH 0 2 2 0 +COMING 14 2 16 14 +COMES 14 2 15 15 +COLE 0 2 2 0 +COLD 5 2 6 6 +COAL 0 2 0 2 +CO 0 2 0 2 +CLOSET 1 2 2 2 +CLIFF 4 2 4 6 +CINDER 0 2 0 2 +CHLORATE 0 2 2 0 +CHEERY 0 2 0 2 +CHEEK 0 2 1 1 +CHARMED 0 2 2 0 +CHARGE 7 2 8 8 +CHAIN 0 2 1 1 +CERTAIN 15 2 16 16 +CELLAR 0 2 0 2 +CAST 9 2 9 11 +CARROLL 1 2 3 1 +CAROL 0 2 0 2 +CALLED 23 2 24 24 +CALL 12 2 13 13 +CALIFORNIAN 0 2 2 0 +CALIFORNIA 0 2 0 2 +BUZZARD 7 2 9 7 +BURNEHELD 0 2 0 2 +BUILD 1 2 1 3 +BRUCE 2 2 3 3 +BROTHER'S 1 2 1 3 +BRAZY 0 2 0 2 +BRAVE 5 2 6 6 +BRACES 0 2 0 2 +BOUT 0 2 2 0 +BOUGHT 2 2 4 2 +BOTTOM 5 2 5 7 +BOOM 1 2 1 3 +BONES 1 2 2 2 +BOIL 3 2 5 3 +BOB 4 2 5 5 +BLOW 3 2 4 4 +BLODGET 0 2 0 2 +BLEST 0 2 0 2 +BLESSED 4 2 6 4 +BILL 2 2 3 3 +BIBLICAL 2 2 4 2 +BETTER 28 2 29 29 +BESSY 7 2 9 7 +BELL 2 2 3 3 +BEGAN 15 2 16 16 +BEFORE 54 2 56 54 +BEFELL 0 2 0 2 +BEFEL 1 2 3 1 +BEER 2 2 2 4 +BEECH 0 2 0 2 +BEE 0 2 0 2 +BECAME 9 2 10 10 +BASSORAH 0 2 2 0 +BANG 0 2 1 1 +BAN 0 2 0 2 +BAILEY 0 2 2 0 +BAGHDAD 4 2 6 4 +BAG 4 2 5 5 +BAD 8 2 10 8 +AWKWARD 3 2 3 5 +AWK 0 2 2 0 +AWHILE 1 2 3 1 +AWAKE 4 2 4 6 +ATUM 0 2 2 0 +ATTENTION 5 2 7 5 +ASSYRIA 2 2 3 3 +ASK 16 2 17 17 +ART 12 2 13 13 +ARSENO 0 2 0 2 +ARPAD 0 2 2 0 +ARMED 2 2 4 2 +ARM 4 2 5 5 +ARCHBISHOP 0 2 2 0 +ARCHBISH 0 2 0 2 +ANOTHER 30 2 31 31 +ANNIE'S 0 2 2 0 +AMYNTAS 0 2 2 0 +ALTHIA 0 2 0 2 +ALONG 15 2 15 17 +ALONE 9 2 10 10 +ALOES 0 2 2 0 +ALLIGATOR 0 2 2 0 +ALLIES 0 2 0 2 +ALKALOIDS 0 2 2 0 +ALEXANDER 0 2 0 2 +ALCOHOL 3 2 5 3 +AFTER 95 2 97 95 +AFT 0 2 2 0 +AFFECTION 0 2 1 1 +ADDED 12 2 13 13 +ABOVE 9 2 9 11 +ABOARD 1 2 3 1 +ZOMAN 0 1 0 1 +ZENOUS 0 1 0 1 +ZEMSTVOS 0 1 1 0 +ZAYNAB 0 1 1 0 +YUSS 0 1 1 0 +YOUTH 3 1 4 3 +YOURS 1 1 1 2 +YOUNG 39 1 39 40 +YORK 3 1 3 4 +YONWAY 0 1 0 1 +YO' 0 1 1 0 +YES'M 0 1 1 0 +YEOMEN 0 1 1 0 +YELLS 0 1 1 0 +YE'D 0 1 0 1 +YAUSKY 0 1 1 0 +YAHWEH 0 1 1 0 +WUNNERED 0 1 1 0 +WRITER 1 1 1 2 +WRITE 1 1 1 2 +WRIT 0 1 1 0 +WRETS 0 1 0 1 +WRETCHES 0 1 0 1 +WRETCHED 2 1 2 3 +WREATHS 1 1 1 2 +WRAYE 0 1 0 1 +WRAPPED 1 1 1 2 +WOUNDS 1 1 2 1 +WOTTETH 0 1 1 0 +WOT 1 1 2 1 +WORTHY 1 1 1 2 +WORSHIPPERS 0 1 1 0 +WORSHIPPED 0 1 0 1 +WORSHIP'S 0 1 1 0 +WORSHIP 4 1 4 5 +WORRY 2 1 3 2 +WORKS 4 1 5 4 +WORKMEN 0 1 0 1 +WORKMAN 0 1 1 0 +WORKINGMEN 0 1 1 0 +WORKADAY 0 1 0 1 +WORD 14 1 15 14 +WOOLWRIGHT 0 1 0 1 +WOODS 2 1 2 3 +WOODEN 5 1 6 5 +WOOD 3 1 4 3 +WONT 4 1 4 5 +WONDERED 7 1 8 7 +WOKE 0 1 1 0 +WOE 1 1 2 1 +WODE'S 0 1 1 0 +WIVES 0 1 0 1 +WITHOUT 51 1 51 52 +WISLOVITCH 0 1 0 1 +WISHT 0 1 1 0 +WISHED 5 1 5 6 +WIS 0 1 0 1 +WIRES 0 1 1 0 +WINNING 1 1 1 2 +WINGED 0 1 0 1 +WING 0 1 1 0 +WILT 4 1 4 5 +WILM 0 1 0 1 +WILLY 0 1 1 0 +WILKSES 0 1 1 0 +WILKS 0 1 1 0 +WILFRIED 0 1 0 1 +WILDLY 1 1 2 1 +WIELD 0 1 1 0 +WIDOW 1 1 1 2 +WIDELY 0 1 0 1 +WIDEAWAKE 0 1 1 0 +WICKER 6 1 7 6 +WHOSOEVER 0 1 0 1 +WHOMSOEVER 0 1 1 0 +WHOLLY 2 1 2 3 +WHO'D 0 1 1 0 +WHITHER 2 1 3 2 +WHITEHALL 0 1 1 0 +WHISKIRT 0 1 0 1 +WHISKERED 0 1 1 0 +WHILOME 0 1 1 0 +WHEREABOUTS 2 1 3 2 +WHEREABOUT 0 1 0 1 +WHER 0 1 1 0 +WHEELER 0 1 1 0 +WHATEVER 10 1 10 11 +WHATETH 0 1 0 1 +WHATE'ER 0 1 1 0 +WHAT'S 5 1 6 5 +WHALER 0 1 0 1 +WHACKS 0 1 1 0 +WESTWARD 1 1 1 2 +WESTPORT 0 1 1 0 +WEIGHED 0 1 1 0 +WEEVILY 0 1 0 1 +WEEVILLY 0 1 1 0 +WEEDS 0 1 1 0 +WEDDINANCE 0 1 0 1 +WEBBS 0 1 0 1 +WEBB'S 0 1 1 0 +WEAL 0 1 1 0 +WE'VE 2 1 3 2 +WAY 62 1 62 63 +WAX 0 1 0 1 +WAVERLY 0 1 1 0 +WAVERLEY 0 1 0 1 +WATONWAN 0 1 1 0 +WATERWAM 0 1 0 1 +WATERED 0 1 0 1 +WATCHMAKERS 0 1 0 1 +WATCHMAKER'S 0 1 1 0 +WAT 0 1 0 1 +WASTED 1 1 1 2 +WARN 1 1 2 1 +WARMITTING 0 1 0 1 +WARMEST 0 1 1 0 +WARINESS 0 1 0 1 +WARE 0 1 0 1 +WARDERS 0 1 1 0 +WARD'S 0 1 1 0 +WAR 5 1 5 6 +WANTED 7 1 8 7 +WANDERERS 0 1 1 0 +WANDERER 1 1 2 1 +WANDER 0 1 0 1 +WALT 0 1 0 1 +WALKING 5 1 5 6 +WALK 4 1 5 4 +WALES 0 1 0 1 +WALDEST 0 1 0 1 +WAKE 2 1 3 2 +WAITIN 0 1 1 0 +WAITED 5 1 5 6 +WAGGOT 0 1 1 0 +WAGGING 0 1 1 0 +WAGED 0 1 1 0 +WAG 0 1 0 1 +WADED 0 1 1 0 +WADE 0 1 0 1 +WADDED 0 1 1 0 +VUENT 0 1 0 1 +VOYAGE 6 1 7 6 +VOY 0 1 0 1 +VOWELS 0 1 1 0 +VOWALS 0 1 0 1 +VOUGHT 0 1 1 0 +VOTES 3 1 3 4 +VOTE 3 1 4 3 +VON 0 1 1 0 +VOMITING 0 1 1 0 +VOLVITUR 0 1 1 0 +VOLVETER 0 1 0 1 +VOICED 0 1 1 0 +VIOLENT 1 1 1 2 +VILLE 0 1 0 1 +VILLAGERS 0 1 1 0 +VIL 0 1 1 0 +VIGILANCE 1 1 2 1 +VICTIMIZE 0 1 1 0 +VICTIMISE 0 1 0 1 +VETERY 0 1 0 1 +VESTRY 0 1 1 0 +VES 0 1 0 1 +VERSEST 0 1 0 1 +VERSES 4 1 5 4 +VERILY 2 1 3 2 +VENTURES 0 1 0 1 +VENTURED 0 1 0 1 +VENTRILOQUIST 0 1 1 0 +VENTILLA 0 1 0 1 +VEIN 0 1 0 1 +VEHEMENTLY 0 1 1 0 +VAZARRE 0 1 0 1 +VAVASOUR 0 1 1 0 +VAVASOR 0 1 0 1 +VAULTED 0 1 1 0 +VAULT 8 1 9 8 +VAUGIRARD 0 1 1 0 +VATS 0 1 0 1 +VAST 2 1 3 2 +VASSILIEVITCH 0 1 1 0 +VARIOUSLY 0 1 0 1 +VANE 0 1 1 0 +VALLEY 2 1 2 3 +UZHAT 0 1 0 1 +UTTERED 3 1 4 3 +USUVIUS 0 1 0 1 +USUALLY 5 1 6 5 +USUAL 3 1 3 4 +USEFUL 0 1 1 0 +USED 9 1 9 10 +URGED 1 1 2 1 +URARTIAN 0 1 1 0 +UPWARD 1 1 1 2 +UNTIL 16 1 16 17 +UNSTEAD 0 1 0 1 +UNS 0 1 1 0 +UNREWARDED 0 1 1 0 +UNREPRESENTATIVE 0 1 0 1 +UNOCCUPIED 0 1 1 0 +UNLUCK 0 1 0 1 +UNLESS 7 1 8 7 +UNIQUE 0 1 1 0 +UNHUNGRY 0 1 0 1 +UNHESITATINGLY 0 1 1 0 +UNHAPPY 4 1 5 4 +UNEXPECTEDLY 1 1 2 1 +UNEXPECTED 3 1 3 4 +UNDERSTRUCTION 0 1 0 1 +UNDER 36 1 37 36 +UNCREAM 0 1 0 1 +UNCLE 6 1 7 6 +UNCHANGED 0 1 0 1 +UNADULTERATED 0 1 1 0 +UM 0 1 0 1 +ULTIMATELY 0 1 1 0 +ULTIMATE 0 1 0 1 +UKINZER 0 1 1 0 +UH 0 1 0 1 +UDDER 0 1 1 0 +U 0 1 0 1 +TYRANNY 0 1 1 0 +TYER 0 1 0 1 +TWYMAN'S 0 1 1 0 +TWIMMAN 0 1 0 1 +TWELVEMONTH 0 1 1 0 +TWELVE 3 1 3 4 +TURRET 0 1 1 0 +TURNS 1 1 2 1 +TURN 8 1 8 9 +TURBULENT 0 1 1 0 +TUNO 0 1 0 1 +TUFTS 1 1 1 2 +TUBERK 0 1 0 1 +TUBERCULOUS 0 1 1 0 +TRYING 7 1 8 7 +TRUSTY 0 1 0 1 +TRUSTEE 0 1 1 0 +TRUNDLED 0 1 1 0 +TRUE 15 1 15 16 +TROT 0 1 0 1 +TRIVET 0 1 1 0 +TRIUMPH 3 1 3 4 +TRITE 0 1 0 1 +TRIPES 0 1 1 0 +TRINES 0 1 0 1 +TRIBUT 0 1 0 1 +TREMBLINGLY 1 1 1 2 +TRELAWNEY 0 1 1 0 +TREEONER 0 1 0 1 +TREBRANT 0 1 0 1 +TREASURES 1 1 1 2 +TREASURE 1 1 2 1 +TRAVELLED 1 1 1 2 +TRAVELED 0 1 1 0 +TRAVEL 1 1 1 2 +TRANSSHIP 0 1 1 0 +TRANSHIP 0 1 0 1 +TRAITS 0 1 0 1 +TRAINS 0 1 0 1 +TRAINING 4 1 5 4 +TRAINED 1 1 1 2 +TRAINDAWG 0 1 1 0 +TRAIN 0 1 0 1 +TRAFFIC 0 1 1 0 +TRADITIONS 1 1 2 1 +TRADES 0 1 0 1 +TRADEMARK 0 1 1 0 +TRADE 7 1 7 8 +TRACK 2 1 3 2 +TRACED 0 1 1 0 +TOWERED 0 1 0 1 +TOUSTRA 0 1 0 1 +TOUR 1 1 2 1 +TOUGHS 0 1 1 0 +TOUGH 2 1 3 2 +TOUCHED 4 1 5 4 +TOUCH 3 1 3 4 +TORTURE 1 1 1 2 +TORMENT 0 1 1 0 +TORE 0 1 1 0 +TORCH 0 1 1 0 +TOOK 34 1 35 34 +TONGUE 4 1 5 4 +TONG 0 1 0 1 +TOMORROW 0 1 1 0 +TOMB 1 1 2 1 +TOLERBLE 0 1 1 0 +TOLERABLE 1 1 1 2 +TOLD 26 1 26 27 +TOILET 2 1 3 2 +TITLING 0 1 0 1 +TITLE 0 1 1 0 +TIRING 0 1 1 0 +TIRESOME 0 1 1 0 +TIMES 9 1 10 9 +TILLERS 0 1 1 0 +TIGHTENING 0 1 1 0 +TIGHTENED 1 1 1 2 +TIGGING 0 1 0 1 +TIED 2 1 2 3 +TIDY 0 1 0 1 +TICKLING 0 1 1 0 +TIC 0 1 0 1 +THUMB 3 1 4 3 +THROWING 2 1 3 2 +THROUGH 36 1 37 36 +THRIFTILY 0 1 1 0 +THREW 8 1 8 9 +THREE 36 1 37 36 +THREAD 2 1 2 3 +THOMMISH 0 1 0 1 +THITHER 5 1 5 6 +THINKING 4 1 4 5 +THINE 1 1 1 2 +THINDER 0 1 0 1 +THIN 3 1 4 3 +THEY'LL 0 1 0 1 +THEMSELVES 17 1 17 18 +THEATER 0 1 0 1 +THAT'LL 0 1 1 0 +THANKS 5 1 5 6 +TERRANT 0 1 0 1 +TERRACE 0 1 0 1 +TERRA 0 1 1 0 +TENT 0 1 0 1 +TEND 1 1 2 1 +TEMPTETH 0 1 1 0 +TEMPTED 0 1 0 1 +TEMPLE 2 1 2 3 +TEMPLAR 1 1 2 1 +TELLS 0 1 0 1 +TELLERS 0 1 0 1 +TELEGRAMAS 0 1 0 1 +TELEGRAM 2 1 3 2 +TEETH 4 1 4 5 +TEEN 0 1 0 1 +TEEM 0 1 0 1 +TEDWELL 0 1 0 1 +TEDLER 0 1 0 1 +TECHNIC 0 1 0 1 +TEARE 0 1 0 1 +TEAR 2 1 3 2 +TEAM 0 1 1 0 +TEALE 0 1 0 1 +TEAL 0 1 1 0 +TEA 1 1 2 1 +TAX 0 1 0 1 +TATTLERS 0 1 1 0 +TASTE 5 1 6 5 +TASKMASTER 0 1 1 0 +TARDY 0 1 1 0 +TAPPY 0 1 0 1 +TAPPED 0 1 1 0 +TAPIS 0 1 1 0 +TANQUAM 0 1 1 0 +TAN 0 1 0 1 +TALONS 0 1 0 1 +TALMASH 0 1 1 0 +TALLED 0 1 0 1 +TALKS 1 1 1 2 +TALKED 4 1 5 4 +TALE 2 1 2 3 +TAKING 11 1 11 12 +TAHITI 0 1 1 0 +T'OTHER 0 1 1 0 +T 1 1 1 2 +SYNONYMON 0 1 1 0 +SYMBOLS 0 1 0 1 +SYLLOGISM 0 1 1 0 +SYLLISM 0 1 0 1 +SYDNEY 2 1 3 2 +SWORD 11 1 12 11 +SWELP 0 1 1 0 +SWEAT 0 1 0 1 +SWARTHY 0 1 1 0 +SWAP 0 1 0 1 +SWALLTY 0 1 0 1 +SWAG 0 1 1 0 +SUSPICION 4 1 4 5 +SURNING 0 1 0 1 +SURGY 0 1 0 1 +SURFACES 0 1 0 1 +SURE 18 1 18 19 +SUNNING 0 1 0 1 +SUNK 0 1 0 1 +SUMTHIN 0 1 1 0 +SUMINUTELY 0 1 0 1 +SULPHURIC 0 1 1 0 +SUITED 1 1 1 2 +SUFFICIENT 2 1 3 2 +SUFFICES 0 1 1 0 +SUFFERG 0 1 0 1 +SUE 0 1 1 0 +SUCKED 0 1 1 0 +SUCCOURS 0 1 1 0 +SUBTRINE 0 1 0 1 +SUBJECT 6 1 6 7 +SUB 0 1 1 0 +STY 0 1 0 1 +STRUGGLING 1 1 1 2 +STRUGGLE 0 1 1 0 +STRIPE 0 1 0 1 +STRENGTH 11 1 12 11 +STREAK 1 1 2 1 +STRAYING 0 1 1 0 +STRAYED 0 1 0 1 +STRAW 1 1 1 2 +STRATAS 0 1 0 1 +STRANGEST 0 1 1 0 +STRANGERS 1 1 1 2 +STRANGE 4 1 5 4 +STRAITS 0 1 1 0 +STRAITENED 0 1 1 0 +STRAIT 0 1 0 1 +STRAINS 0 1 1 0 +STOVE 1 1 1 2 +STORIES 3 1 3 4 +STORES 1 1 2 1 +STORED 0 1 1 0 +STOPPED 5 1 5 6 +STONED 0 1 1 0 +STOCKING 0 1 0 1 +STOCK 2 1 3 2 +STIRRING 1 1 1 2 +STILLNESS 2 1 3 2 +STICKET 0 1 0 1 +STEWPANT 0 1 0 1 +STEWPAN 1 1 2 1 +STEW 0 1 1 0 +STEVER 0 1 0 1 +STERNMOST 0 1 0 1 +STEPPED 1 1 2 1 +STEP 2 1 3 2 +STEEVER 0 1 1 0 +STEERING 0 1 1 0 +STEERED 0 1 0 1 +STEAMS 0 1 0 1 +STEALING 0 1 0 1 +STAYS 0 1 0 1 +STAYING 1 1 2 1 +STAY 6 1 6 7 +STATUS 0 1 1 0 +STATURE 0 1 1 0 +STATUE 2 1 2 3 +STATEROOM 1 1 1 2 +STATED 2 1 3 2 +STAS 0 1 1 0 +START 3 1 3 4 +STARS 0 1 0 1 +STARK 0 1 1 0 +STANDSTILL 0 1 1 0 +STANDS 2 1 3 2 +STANDARDS 0 1 0 1 +STANDARD 3 1 4 3 +STAKES 0 1 1 0 +STAIRLY 0 1 0 1 +STAINED 1 1 2 1 +STAFF 0 1 0 1 +STACKS 0 1 0 1 +SQUEAMISH 2 1 3 2 +SPOTIC 0 1 0 1 +SPORT 1 1 2 1 +SPONSUS 0 1 1 0 +SPONGE 0 1 1 0 +SPONDYLES 0 1 1 0 +SPITE 10 1 10 11 +SPITABLY 0 1 0 1 +SPIRIT 5 1 6 5 +SPINNING 1 1 1 2 +SPINES 0 1 0 1 +SPILLING 0 1 1 0 +SPIES 0 1 0 1 +SPICE 0 1 1 0 +SPENTRY 0 1 0 1 +SPENDING 0 1 0 1 +SPENCE 0 1 0 1 +SPEECH 5 1 5 6 +SPECIALLY 0 1 0 1 +SPEAR'S 0 1 0 1 +SPAWN 1 1 1 2 +SPATANI 0 1 0 1 +SPASM 1 1 1 2 +SPARSELY 0 1 1 0 +SPARED 2 1 2 3 +SPADEL 0 1 0 1 +SPADDLE 0 1 1 0 +SPACE 2 1 2 3 +SOWED 0 1 0 1 +SOUTHERN 2 1 3 2 +SOUTH 1 1 1 2 +SOUSE 0 1 1 0 +SOUS 1 1 1 2 +SOURCE 1 1 2 1 +SOUNDLY 1 1 2 1 +SOTELES 0 1 1 0 +SORDID 0 1 1 0 +SOPHIA 0 1 0 1 +SOOT 0 1 1 0 +SOONER 4 1 4 5 +SOON 22 1 22 23 +SONSPIER 0 1 0 1 +SONS 0 1 1 0 +SONNY 0 1 1 0 +SOMETHING 29 1 29 30 +SOMBRE 0 1 0 1 +SOMBER 0 1 1 0 +SOLDOM 0 1 0 1 +SOJOURNOVITCH 0 1 0 1 +SOFT 5 1 5 6 +SOFA 1 1 2 1 +SOCIALLY 0 1 0 1 +SOCIALIST 1 1 2 1 +SNYM 0 1 0 1 +SNOOZING 0 1 1 0 +SNETKOV 1 1 2 1 +SNATHS 0 1 0 1 +SNARLS 0 1 0 1 +SNARLED 0 1 1 0 +SMOLNY 0 1 1 0 +SMOKESTACKS 0 1 1 0 +SMOKER 2 1 3 2 +SMOKE 2 1 2 3 +SMIRCHED 0 1 1 0 +SMILE 8 1 9 8 +SMELL 0 1 1 0 +SMART 0 1 1 0 +SMARGED 0 1 0 1 +SMARE 0 1 0 1 +SMAR 0 1 0 1 +SLUNK 0 1 1 0 +SLIGHTLY 1 1 2 1 +SLEEPY 1 1 2 1 +SLACKER 0 1 0 1 +SLACKENED 0 1 1 0 +SLAB 1 1 2 1 +SKYLECKS 0 1 0 1 +SKYLARKS 0 1 1 0 +SKIRT 0 1 1 0 +SKIRO 0 1 0 1 +SKIN 4 1 5 4 +SKEW 0 1 1 0 +SKEPTICAL 0 1 1 0 +SIXES 0 1 1 0 +SIX 17 1 17 18 +SIT 4 1 5 4 +SIRES 0 1 0 1 +SIRE 3 1 4 3 +SINUHIT 0 1 1 0 +SINGS 2 1 3 2 +SINGING 2 1 2 3 +SINGAFUT'S 0 1 0 1 +SING 3 1 4 3 +SINFUL 0 1 1 0 +SINE 0 1 1 0 +SIMULATES 0 1 1 0 +SIMPLES 0 1 0 1 +SIMILATES 0 1 0 1 +SILVERY 0 1 0 1 +SILVERWARE 0 1 1 0 +SILLY 2 1 3 2 +SILENCE 8 1 9 8 +SIGN 4 1 4 5 +SIGHING 1 1 2 1 +SIEVE 0 1 1 0 +SIDNEY 0 1 0 1 +SHUMMED 0 1 0 1 +SHUMAN 0 1 1 0 +SHUCKS 0 1 1 0 +SHRUGS 0 1 0 1 +SHRUBS 1 1 2 1 +SHRUBBERY 0 1 1 0 +SHOWN 4 1 4 5 +SHOULDST 2 1 3 2 +SHOULDEST 0 1 0 1 +SHOULDERS 3 1 4 3 +SHOULDER 2 1 2 3 +SHOT 9 1 10 9 +SHORE 1 1 2 1 +SHOCKS 0 1 0 1 +SHOALBURG'S 0 1 0 1 +SHO'LY 0 1 1 0 +SHIP 17 1 17 18 +SHING 0 1 0 1 +SHIMMERTS 0 1 0 1 +SHET 0 1 1 0 +SHERE 0 1 1 0 +SHEPARD 0 1 0 1 +SHEETS 0 1 1 0 +SHEEP 0 1 0 1 +SHATTERED 0 1 1 0 +SHARS 0 1 0 1 +SHARED 0 1 0 1 +SHARE 2 1 2 3 +SHALLUM 0 1 1 0 +SHAKEDOWN 0 1 1 0 +SHAKE 2 1 2 3 +SHAGG 0 1 0 1 +SHAG 2 1 3 2 +SHADOW 5 1 5 6 +SHADORUS 0 1 0 1 +SEYTON 2 1 3 2 +SEWING 0 1 1 0 +SEVEREND 0 1 0 1 +SEVERE 1 1 2 1 +SEVERAL 9 1 10 9 +SEVENTIETH 0 1 1 0 +SEVENTH 3 1 3 4 +SEVENTEENTH 0 1 0 1 +SEVEN 11 1 12 11 +SETTON 0 1 0 1 +SERVES 0 1 1 0 +SERMON 0 1 1 0 +SEREN 0 1 0 1 +SENTENCED 0 1 1 0 +SENTENCE 2 1 2 3 +SENDS 0 1 0 1 +SEND 4 1 4 5 +SENATORY 0 1 0 1 +SELLING 0 1 0 1 +SELF 6 1 6 7 +SEEST 1 1 2 1 +SEEMING 1 1 1 2 +SEEKEST 0 1 1 0 +SEEK'ST 0 1 0 1 +SEEING 8 1 8 9 +SEDUCETH 0 1 1 0 +SEDUCE 0 1 0 1 +SEDRRICK 0 1 0 1 +SECURUS 0 1 0 1 +SECURED 0 1 0 1 +SECURE 4 1 5 4 +SEATS 1 1 1 2 +SEATED 5 1 5 6 +SEAT 0 1 1 0 +SEAS 1 1 1 2 +SEAMEN 0 1 1 0 +SEAMAN 4 1 5 4 +SE 0 1 0 1 +SCUSE 0 1 1 0 +SCULPTORS 1 1 2 1 +SCULPTOR'S 1 1 1 2 +SCRUTINISED 0 1 1 0 +SCRIPS 0 1 0 1 +SCRIBES 1 1 2 1 +SCREAMY 0 1 0 1 +SCRAPPIN 0 1 1 0 +SCRAP 0 1 0 1 +SCOTT 0 1 0 1 +SCORN 2 1 3 2 +SCORCHED 0 1 0 1 +SCO'TCH 0 1 1 0 +SCIENCE 1 1 1 2 +SCHUMANN 0 1 0 1 +SCHULBERG'S 0 1 1 0 +SCHOOLGIRLS 0 1 0 1 +SCHOOLDAYS 0 1 1 0 +SCHOOLBOY 0 1 0 1 +SCHLEVENT 0 1 0 1 +SCEPTICAL 0 1 0 1 +SCENE 5 1 6 5 +SCAPEGRACES 0 1 1 0 +SCAPED 0 1 1 0 +SCAPE 0 1 1 0 +SAYING 17 1 17 18 +SAYIN 0 1 1 0 +SAVOUR 0 1 0 1 +SAVE 7 1 7 8 +SATURDAY 0 1 1 0 +SATURDAIL 0 1 0 1 +SARTUOUS 0 1 0 1 +SARKAIUS 0 1 0 1 +SARDID 0 1 0 1 +SARAH 2 1 3 2 +SANS 0 1 1 0 +SANITARY 1 1 2 1 +SANGS 0 1 0 1 +SANG 1 1 2 1 +SANDWARES 0 1 0 1 +SANDAL 0 1 0 1 +SAND 2 1 3 2 +SANCTIS 0 1 0 1 +SANCTESS 0 1 1 0 +SANCHO 8 1 9 8 +SANCHA 0 1 0 1 +SAN 4 1 5 4 +SAMURED 0 1 0 1 +SAMPANCISCO 0 1 0 1 +SAME 22 1 22 23 +SALONE 0 1 1 0 +SALON 1 1 1 2 +SALMON 0 1 0 1 +SALLOWER 0 1 1 0 +SALARY 0 1 0 1 +SAKE 7 1 7 8 +SAILS 0 1 1 0 +SAILORS 0 1 0 1 +SAILOR 2 1 2 3 +SAGITTAIRE 0 1 1 0 +SAGATURE 0 1 0 1 +SAD 2 1 2 3 +SACRIFICE 4 1 5 4 +SACRIFIC 0 1 0 1 +SACKED 0 1 0 1 +RYO 0 1 1 0 +RUTHS 0 1 0 1 +RUSSIA 2 1 3 2 +RUSHED 3 1 4 3 +RUSH 0 1 0 1 +RUNS 0 1 1 0 +RUNG 1 1 2 1 +RUMP 0 1 1 0 +RULER 0 1 1 0 +RULED 0 1 0 1 +RUBENSES 0 1 1 0 +RUBBERS 0 1 1 0 +ROXBURY 0 1 1 0 +ROWING 0 1 0 1 +ROWED 0 1 1 0 +ROUTED 0 1 0 1 +ROUTE 0 1 1 0 +ROUNDED 0 1 1 0 +ROUGH 3 1 3 4 +ROTHS 0 1 1 0 +ROSES 0 1 0 1 +ROSAMUN 0 1 1 0 +ROSAMOND 0 1 0 1 +ROPES 1 1 1 2 +ROOTS 1 1 2 1 +ROOKER'S 0 1 0 1 +ROOF 2 1 2 3 +RONALD 0 1 1 0 +ROMANS 0 1 0 1 +ROMANCE 2 1 3 2 +ROMAN 2 1 2 3 +ROLL 1 1 2 1 +ROGER 0 1 0 1 +RODIAN 0 1 0 1 +RODE 6 1 6 7 +ROCCALL 0 1 0 1 +ROBIN 0 1 0 1 +ROARING 0 1 1 0 +ROAR 1 1 1 2 +ROADS 0 1 0 1 +ROAD 15 1 16 15 +RISDEN 0 1 0 1 +RIPENESS 0 1 0 1 +RINGMASTER 0 1 1 0 +RING 6 1 6 7 +RINDS 0 1 1 0 +RIGOROUS 0 1 1 0 +RIGIDLY 0 1 0 1 +RIGHT 32 1 33 32 +RIDING 0 1 0 1 +RIDGE'S 0 1 1 0 +RIDE 5 1 5 6 +RID 4 1 5 4 +RICHES 0 1 0 1 +RICHA 0 1 0 1 +RHODES 0 1 1 0 +RHINS 0 1 0 1 +REWARDED 0 1 0 1 +REW 0 1 1 0 +REVOTE 0 1 0 1 +REVOLUTIONISTS 0 1 1 0 +REVOLUTIONIST 0 1 1 0 +REVOLTE 0 1 1 0 +REVOLTA 0 1 0 1 +REVOLT 0 1 0 1 +REVOLITIONIST 0 1 0 1 +REVELLING 0 1 0 1 +REVELING 0 1 1 0 +REUBEN 0 1 0 1 +RETZCH'S 0 1 1 0 +RETURNING 6 1 6 7 +RESUMED 1 1 2 1 +RESTORETH 0 1 1 0 +RESTORE 0 1 0 1 +RESTIVE 0 1 1 0 +RESPONSIBLE 4 1 5 4 +RESOLVED 0 1 1 0 +RESK 0 1 1 0 +RESISTING 1 1 2 1 +RESISTIN 0 1 0 1 +RESINOUS 0 1 1 0 +RESCUED 2 1 3 2 +RESCUE 1 1 1 2 +REQUEST 3 1 4 3 +REPUTATION 2 1 3 2 +REPRESENTATIVE 1 1 2 1 +REPORTING 0 1 0 1 +REPLY 1 1 2 1 +REPLIED 39 1 39 40 +REPETITION 1 1 1 2 +REPEATED 5 1 6 5 +REPEAT 1 1 1 2 +REPARATE 0 1 0 1 +REND 0 1 1 0 +REMISSIONOUS 0 1 0 1 +REMISSION 0 1 1 0 +REMEMBEREST 0 1 1 0 +REMEMBER 13 1 13 14 +REMARKED 9 1 10 9 +REMARK 6 1 6 7 +REMAR 0 1 0 1 +REMAINS 2 1 2 3 +RELIGIOUS 2 1 2 3 +RELIGHTED 0 1 0 1 +RELEASED 2 1 3 2 +RELATION 2 1 2 3 +RELATED 3 1 4 3 +REJECTED 0 1 0 1 +REJECT 0 1 1 0 +REIN 0 1 0 1 +REIGNS 0 1 1 0 +REIGNED 0 1 1 0 +REIGN 1 1 1 2 +REGULATION 0 1 1 0 +REGULATING 0 1 0 1 +REGARDING 2 1 3 2 +REGAN 0 1 0 1 +REG'LER 0 1 1 0 +REG'LAR 0 1 0 1 +REFUSE 2 1 2 3 +REFUGE 2 1 3 2 +REFUCER 0 1 0 1 +REFORM 5 1 6 5 +REFLECTING 0 1 0 1 +REFERENCE 2 1 3 2 +REELECTION 0 1 1 0 +REDRUP 0 1 0 1 +REDMOCKERS 0 1 0 1 +RECTUM 0 1 1 0 +RECTIM 0 1 0 1 +RECTIFY 0 1 1 0 +RECKLESS 2 1 2 3 +RECEDED 0 1 1 0 +RECAPTURED 0 1 1 0 +REAP 0 1 1 0 +REALIZED 0 1 0 1 +REALISED 0 1 1 0 +READS 0 1 0 1 +REA 0 1 0 1 +RAYS 0 1 0 1 +RATURIST 0 1 0 1 +RATTLING 2 1 3 2 +RATE 2 1 2 3 +RATCALIONS 0 1 0 1 +RASCALS 0 1 1 0 +RASCAL 3 1 3 4 +RAPSCALLIONS 0 1 1 0 +RAPPERS 0 1 1 0 +RAOUL 0 1 0 1 +RANSOM 7 1 8 7 +RANSES 0 1 0 1 +RANGED 0 1 0 1 +RANG 2 1 2 3 +RANDOM 0 1 0 1 +RANALD 0 1 0 1 +RAMSES 0 1 1 0 +RAMI 0 1 0 1 +RAM 3 1 4 3 +RAISED 6 1 6 7 +RAINY 1 1 2 1 +RAINING 0 1 0 1 +RAINED 0 1 0 1 +RAIN 2 1 3 2 +RAID 0 1 1 0 +RAGGED 0 1 0 1 +RAGATIRS 0 1 0 1 +RADPROP 0 1 1 0 +RADIAN 0 1 0 1 +RACKETEERS 0 1 1 0 +RACKED 0 1 1 0 +RACES 0 1 1 0 +RABBS 0 1 0 1 +RABBITS 0 1 1 0 +RABBIT'S 0 1 0 1 +RABB'S 0 1 1 0 +QUOTH 4 1 5 4 +QUOMAN 0 1 0 1 +QUMMUKH 0 1 1 0 +QUICKLY 5 1 5 6 +QUICK 4 1 5 4 +QUESTED 0 1 0 1 +QUANTRELL 0 1 1 0 +QUANTREAL 0 1 0 1 +QUANTITIES 1 1 2 1 +QUALMS 0 1 0 1 +QUALITIES 1 1 1 2 +QUACKS 1 1 2 1 +PYRIUS 0 1 0 1 +PYM 0 1 1 0 +PUTTING 0 1 1 0 +PUTTEL 0 1 1 0 +PUTS 2 1 3 2 +PUSSARA 0 1 0 1 +PUSH 1 1 1 2 +PURSUAL 0 1 0 1 +PURPORTING 0 1 1 0 +PUMPED 0 1 0 1 +PUMP 0 1 1 0 +PULLEY 0 1 1 0 +PULLED 3 1 4 3 +PULL 1 1 1 2 +PUDDLES 0 1 1 0 +PSALMS 0 1 0 1 +PSALM 0 1 1 0 +PRYTANEUM 0 1 1 0 +PRUDENT 0 1 1 0 +PROW 1 1 1 2 +PROVOCATOR 0 1 1 0 +PROVISED 0 1 0 1 +PROVENCE 0 1 0 1 +PROVED 2 1 2 3 +PROTS 0 1 0 1 +PROTECTORATE 0 1 1 0 +PROTECTOR 1 1 1 2 +PROPRE 0 1 1 0 +PROPER 2 1 2 3 +PRONOUNCE 1 1 1 2 +PROMOTIVE 0 1 1 0 +PROMOTED 0 1 0 1 +PROMIN 0 1 0 1 +PROITS 0 1 0 1 +PROHIBITION 0 1 1 0 +PRODUCES 0 1 1 0 +PRODUCE 5 1 6 5 +PROCLAUS 0 1 0 1 +PROCATE 0 1 0 1 +PROBES 0 1 0 1 +PRO 0 1 0 1 +PRIOR 0 1 0 1 +PRINCIPLES 0 1 0 1 +PRINCIPALS 0 1 1 0 +PRINCE 7 1 7 8 +PRIMER 0 1 1 0 +PRIMARY 0 1 0 1 +PRICKED 0 1 0 1 +PREVENT 1 1 1 2 +PRETTY 8 1 8 9 +PRESTIGE 1 1 2 1 +PRESS 2 1 2 3 +PRESENTERS 0 1 0 1 +PRESENT 16 1 16 17 +PRESAGE 0 1 0 1 +PRENTICESHIP 0 1 1 0 +PRENTICE 0 1 0 1 +PRECEPTORS 1 1 2 1 +PRECENTORS 0 1 1 0 +PREACHED 0 1 1 0 +PRAYERS 2 1 3 2 +PRAY 5 1 5 6 +PRAMULATOR'S 0 1 0 1 +PRAM 0 1 1 0 +PRAISEWORTHY 0 1 1 0 +PRACTITIONER 0 1 1 0 +POUCHES 0 1 1 0 +POTUM 0 1 1 0 +POTTLES 0 1 0 1 +POTION 0 1 1 0 +POTENTI 0 1 0 1 +POTASSIUM 0 1 1 0 +POTASSIAN 0 1 0 1 +POT 0 1 0 1 +POSTS 0 1 0 1 +POSTHASTE 0 1 1 0 +POSTERN 0 1 1 0 +POSITION 6 1 7 6 +PORTO 0 1 1 0 +PORTENTOUS 0 1 1 0 +PORT 1 1 1 2 +PORED 0 1 1 0 +POPULOUS 0 1 0 1 +POPULACE 0 1 1 0 +POOR 24 1 25 24 +PONDBROKER 0 1 0 1 +POMROY 0 1 0 1 +POMEROY 0 1 1 0 +POLYTECHNIC 0 1 1 0 +POLONIUS 0 1 1 0 +POLO 0 1 1 0 +POISPIRED 0 1 0 1 +POEM 0 1 0 1 +POCKET 8 1 8 9 +PLUMB 0 1 1 0 +PLUM 0 1 0 1 +PLUCK 1 1 1 2 +PLEASURE 8 1 8 9 +PLEASE 15 1 15 16 +PLEASANT 8 1 9 8 +PLEAS 0 1 1 0 +PLEADED 2 1 3 2 +PLAYING 0 1 1 0 +PLAYED 5 1 5 6 +PLATTERBUFF'S 0 1 0 1 +PLATTERBUFF 0 1 0 1 +PLATTERBAFF'S 0 1 1 0 +PLATES 0 1 0 1 +PLATED 0 1 1 0 +PLANNING 1 1 2 1 +PLANE 0 1 0 1 +PLAN 1 1 1 2 +PLACES 3 1 3 4 +PLACED 10 1 10 11 +PIUCHES 0 1 0 1 +PITTS 1 1 2 1 +PITTHAM 0 1 0 1 +PITHUM 0 1 1 0 +PITCHER 1 1 1 2 +PISTOLES 0 1 1 0 +PISTOL 5 1 5 6 +PIRATES 0 1 0 1 +PIRAS 0 1 0 1 +PIPES 1 1 1 2 +PIPE 3 1 4 3 +PIOUS 1 1 1 2 +PINKUS 0 1 1 0 +PINKERTON'S 0 1 1 0 +PINKERTIN'S 0 1 0 1 +PINIONS 0 1 0 1 +PINCHED 1 1 2 1 +PILLOWED 0 1 1 0 +PILL 0 1 0 1 +PIKES 1 1 2 1 +PIGSKIN 0 1 1 0 +PIGEONOTE 0 1 0 1 +PIGEONCOTES 0 1 1 0 +PIECE 4 1 4 5 +PICTURED 0 1 0 1 +PICTURE 4 1 5 4 +PICKED 2 1 3 2 +PICK 5 1 5 6 +PHYNICAL 0 1 0 1 +PHUT'S 0 1 1 0 +PHOSPHOBS 0 1 0 1 +PHOSPHER 0 1 0 1 +PHILISTINES 1 1 2 1 +PHILIPUS 0 1 0 1 +PHAETON 0 1 0 1 +PETREL 0 1 1 0 +PETITIONERS 0 1 0 1 +PETER'S 1 1 2 1 +PETEL 0 1 0 1 +PESTS 0 1 0 1 +PESTE 0 1 1 0 +PERSPIRED 0 1 1 0 +PERNOUNCE 0 1 1 0 +PERHAPS 17 1 17 18 +PERFELASHES 0 1 0 1 +PERFECTUALLY 0 1 0 1 +PERFECTLY 4 1 5 4 +PERCY 0 1 0 1 +PERCEPTORS 0 1 0 1 +PERCEIVER 0 1 0 1 +PERAMBULATOR'S 0 1 1 0 +PER 1 1 1 2 +PEONAGE 0 1 1 0 +PENDING 0 1 1 0 +PENCIL 1 1 1 2 +PEM 0 1 0 1 +PELLESTRA 0 1 0 1 +PEKAHIAH 0 1 1 0 +PEGGING 0 1 0 1 +PEER 2 1 3 2 +PECAH 0 1 0 1 +PEASE 0 1 0 1 +PEASANTS 5 1 6 5 +PEAS 1 1 2 1 +PEARL 0 1 1 0 +PEACH 0 1 0 1 +PAY 10 1 11 10 +PAUSES 0 1 0 1 +PAUSED 2 1 3 2 +PAUL 0 1 0 1 +PATUM 0 1 0 1 +PATTERN 0 1 0 1 +PATIENCE 2 1 3 2 +PATH 1 1 1 2 +PATENTS 0 1 0 1 +PASTES 0 1 1 0 +PAST 9 1 10 9 +PASSING 3 1 3 4 +PASSES 0 1 1 0 +PARTS 4 1 5 4 +PARTLY 1 1 2 1 +PARTISER 0 1 0 1 +PARTIALLY 0 1 0 1 +PARRICIDES 0 1 1 0 +PARKS 0 1 1 0 +PARKLEY 0 1 0 1 +PARISH 1 1 1 2 +PARENT 0 1 0 1 +PARDON 9 1 10 9 +PARASITES 0 1 0 1 +PARASAN 0 1 0 1 +PAR 0 1 0 1 +PANNIERS 0 1 1 0 +PAN 3 1 3 4 +PALLIATE 0 1 1 0 +PALL 0 1 1 0 +PALAESTRA 0 1 1 0 +PALACE 6 1 6 7 +PAIR 5 1 6 5 +PADDLING 0 1 1 0 +PADDLIN 0 1 0 1 +PAD 0 1 0 1 +PACES 1 1 2 1 +PABRICAL 0 1 0 1 +P 1 1 2 1 +OZ 0 1 0 1 +OWNERS 1 1 2 1 +OWNED 2 1 2 3 +OWING 1 1 1 2 +OWE 1 1 2 1 +OW'M 0 1 1 0 +OW 0 1 1 0 +OVERWHELMING 0 1 0 1 +OVERRIPENESS 0 1 1 0 +OVERPRUDENT 0 1 0 1 +OVERHEARD 2 1 3 2 +OVERFULL 0 1 1 0 +OVERFLOWING 0 1 1 0 +OVERCLONE 0 1 0 1 +OV 0 1 0 1 +OUTWARDS 0 1 0 1 +OUTS 0 1 0 1 +OUTREW 0 1 0 1 +OUTLINES 0 1 1 0 +OUTLINE 1 1 1 2 +OUTGAZE 0 1 0 1 +OUTER'S 0 1 0 1 +OUTER 1 1 2 1 +OURSPORT 0 1 0 1 +OURSAN 0 1 0 1 +OUNCES 2 1 3 2 +OUGHTN'T 0 1 1 0 +OUEN 0 1 1 0 +OTTO 0 1 1 0 +OTIAN 0 1 0 1 +OTHERSTEIN 0 1 0 1 +OTHERS 17 1 17 18 +OTHER'S 1 1 2 1 +ORIENTOUINE 0 1 0 1 +ORIENTAL 0 1 1 0 +ORFICER 0 1 1 0 +ORFASTER 0 1 0 1 +ORE 0 1 0 1 +ORDERS 8 1 8 9 +ORDER 19 1 20 19 +ORBUS 0 1 0 1 +ORBIS 0 1 1 0 +OPPRESSORS 0 1 1 0 +OPPOSITION 2 1 2 3 +OPINION 3 1 3 4 +OPENED 10 1 11 10 +OPE 0 1 1 0 +ON'T 0 1 1 0 +OME 0 1 1 0 +OLY 0 1 0 1 +OLL 0 1 1 0 +OLIV 0 1 0 1 +OLI' 0 1 0 1 +OLDEST 1 1 1 2 +OKAY 0 1 1 0 +OIL 2 1 3 2 +OIKA 0 1 0 1 +OFFICIALS 0 1 0 1 +OFFICES 0 1 1 0 +OFFICER 2 1 3 2 +OFFENSIVE 0 1 1 0 +OFFENSE 0 1 1 0 +OFFEN 0 1 1 0 +ODO 0 1 0 1 +ODD 2 1 3 2 +OCCUPIED 2 1 2 3 +OBSERVED 4 1 5 4 +OBOCOCK 0 1 1 0 +OBLIGE 1 1 1 2 +OBJECT 6 1 7 6 +OBEY 1 1 1 2 +OAKERS 0 1 0 1 +O'NIGHTS 0 1 1 0 +O'NEILL 0 1 1 0 +O'NEIA 0 1 0 1 +NYTOUCH 0 1 1 0 +NUZHA'S 0 1 0 1 +NUTS 0 1 1 0 +NUSING 0 1 0 1 +NURTURE 0 1 0 1 +NURSE 0 1 0 1 +NUNS 2 1 3 2 +NUN'S 0 1 0 1 +NUMAN 0 1 0 1 +NUISANCE 0 1 1 0 +NU'UMAN 0 1 1 0 +NOWT 0 1 1 0 +NOTTINGHAM 0 1 1 0 +NOTIONS 0 1 0 1 +NOTICED 3 1 3 4 +NOTICE 6 1 7 6 +NOTE 3 1 3 4 +NORTHEAST 0 1 0 1 +NOPE 0 1 1 0 +NONETHELESS 0 1 1 0 +NOISE 5 1 5 6 +NOIR 0 1 0 1 +NOBLY 0 1 0 1 +NIOKILL 0 1 0 1 +NINE 6 1 6 7 +NIGO 0 1 0 1 +NIGHTLY 0 1 0 1 +NIGHT'S 1 1 1 2 +NIGHS 0 1 0 1 +NIGH 1 1 1 2 +NICO 0 1 1 0 +NICKEL 0 1 1 0 +NEXTER 0 1 1 0 +NEXT 10 1 10 11 +NEWS 7 1 7 8 +NEWER 0 1 0 1 +NERVES 1 1 1 2 +NERVE 0 1 1 0 +NEOSHO 0 1 1 0 +NELLO 0 1 0 1 +NEKHLUD 0 1 0 1 +NEIGHBOURS 0 1 1 0 +NEIGHBOURING 0 1 1 0 +NEIGHBORING 2 1 2 3 +NEGROMMAN 0 1 0 1 +NEEDS 8 1 8 9 +NEEDLED 0 1 1 0 +NEEDLE 0 1 0 1 +NEEDED 9 1 9 10 +NEED 8 1 9 8 +NEDCOV 0 1 0 1 +NEAT 3 1 3 4 +NEARING 0 1 1 0 +NEARER 3 1 4 3 +NEARED 0 1 1 0 +NAUGHTY 0 1 0 1 +NAUGHT 0 1 1 0 +NATION 2 1 2 3 +NARRATIVES 1 1 2 1 +NARRATIVE 1 1 1 2 +NANDY'S 0 1 1 0 +NABRAMAN 0 1 0 1 +NABBS 0 1 0 1 +N 0 1 1 0 +MYSTERIOUS 5 1 6 5 +MYRTALIS 0 1 0 1 +MUSTACHES 0 1 1 0 +MUSKETS 0 1 1 0 +MUSICIANS 0 1 1 0 +MURDOCH'S 0 1 1 0 +MULES 0 1 0 1 +MUIR 0 1 1 0 +MUG 0 1 1 0 +MUD 0 1 1 0 +MOWER 0 1 1 0 +MOVES 0 1 0 1 +MOVEMENT 1 1 2 1 +MOUTHWHAT 0 1 1 0 +MOUTHS 0 1 1 0 +MOUTHFULLY 0 1 0 1 +MOUSTACHES 0 1 0 1 +MOUSE 1 1 1 2 +MOURNING 2 1 3 2 +MOUNTNORRIS 0 1 1 0 +MOUNT 0 1 1 0 +MOUND 0 1 0 1 +MOTIONLESS 0 1 1 0 +MOTHS 0 1 0 1 +MORVE 0 1 0 1 +MORTIFICATIONTHAT 0 1 1 0 +MORTIFICATION 0 1 0 1 +MORTEM 0 1 1 0 +MORNING 21 1 21 22 +MORLEY 0 1 0 1 +MOR 0 1 0 1 +MOPED 0 1 1 0 +MOOR 2 1 3 2 +MOONLIGHT 2 1 3 2 +MONTORAS 0 1 0 1 +MONTHS 6 1 6 7 +MONTHLY 0 1 1 0 +MONSIEUR 0 1 0 1 +MONSEIGNEUR 5 1 6 5 +MONS 0 1 0 1 +MONKERS 0 1 1 0 +MOMMOL 0 1 1 0 +MOMENT 24 1 24 25 +MOLASTIC 0 1 0 1 +MODERATE 1 1 2 1 +MODE 0 1 0 1 +MOCHER 0 1 0 1 +MO 0 1 1 0 +MIXTURE 0 1 1 0 +MIXED 2 1 2 3 +MIX 2 1 3 2 +MISTS 0 1 0 1 +MISTING 0 1 0 1 +MISTAKES 0 1 0 1 +MISSED 4 1 5 4 +MISERC 0 1 0 1 +MISCHIEVOUS 1 1 2 1 +MIRKED 0 1 0 1 +MINUTELY 0 1 1 0 +MINNIE 1 1 2 1 +MINNEAPOLIS 0 1 1 0 +MINISTERS 0 1 1 0 +MINISTER 6 1 7 6 +MINIONETTE 0 1 1 0 +MIMICK 0 1 1 0 +MIMIC 0 1 0 1 +MILLY 0 1 1 0 +MILLSTONE 0 1 0 1 +MILLSTON 0 1 1 0 +MILLSON 0 1 0 1 +MILLION 0 1 0 1 +MILICSON'S 0 1 0 1 +MILICENT'S 0 1 1 0 +MILFRED 0 1 0 1 +MILDEWED 0 1 1 0 +MIKE 2 1 2 3 +MIHI 0 1 1 0 +MIGHT 43 1 43 44 +MIDSER 0 1 0 1 +MIDRIFTS 0 1 0 1 +MIDRIFF 0 1 1 0 +MIDDY 0 1 1 0 +MIDDLING 0 1 1 0 +MIDDLIN 0 1 0 1 +MICROCLE 0 1 0 1 +MICHANG 0 1 0 1 +METHINKETH 0 1 1 0 +METHINK 0 1 0 1 +MESSION 0 1 0 1 +MESSES 0 1 1 0 +MESS 0 1 0 1 +MERTOLUS 0 1 0 1 +MERRYMAKING 0 1 0 1 +MERRY 2 1 3 2 +MERNEPTAH 0 1 1 0 +MERLINUS 0 1 0 1 +MERLINA'S 0 1 0 1 +MERELY 6 1 6 7 +MER 0 1 0 1 +MENTAL 2 1 3 2 +MENST 0 1 0 1 +MENDICULATED 0 1 0 1 +MENDIAN 0 1 0 1 +MEND 0 1 1 0 +MEN'S 0 1 1 0 +MELTED 1 1 1 2 +MEDIUM 1 1 1 2 +MEDICMENTS 0 1 0 1 +MEDICAMENTS 0 1 1 0 +MEDICAL 1 1 2 1 +MEDICA 0 1 0 1 +MEDIAN 0 1 1 0 +MEASIANS 0 1 0 1 +MEANTIS 0 1 0 1 +MEANTIME 1 1 2 1 +MEANTESSE 0 1 0 1 +MEANS 23 1 23 24 +MATURITY 0 1 0 1 +MATURES 0 1 0 1 +MATTERS 3 1 4 3 +MATRON 0 1 0 1 +MATEY 0 1 1 0 +MATERIALS 1 1 2 1 +MASTERY 0 1 1 0 +MASSR 0 1 0 1 +MASSES 1 1 1 2 +MASSED 0 1 0 1 +MASKETS 0 1 0 1 +MASKED 0 1 1 0 +MARVELLED 0 1 0 1 +MARVELED 0 1 1 0 +MARVALAN 0 1 0 1 +MARTILLUS 0 1 0 1 +MARTIAL 0 1 0 1 +MARSPEAKER 0 1 1 0 +MARRIT 0 1 0 1 +MARMALITIES 0 1 0 1 +MARMALADES 1 1 2 1 +MARKING 1 1 1 2 +MARKEER 0 1 0 1 +MARK 4 1 4 5 +MARGARET 14 1 14 15 +MARGAR 0 1 0 1 +MARES 0 1 0 1 +MARDOX 0 1 0 1 +MARDOCK 0 1 0 1 +MARDOC'S 0 1 0 1 +MANTLE 0 1 0 1 +MANTI 0 1 0 1 +MANKE 0 1 0 1 +MANKADO 0 1 0 1 +MANETTE 0 1 0 1 +MANASSEH 0 1 1 0 +MAN'S 13 1 13 14 +MAMMA 1 1 1 2 +MAMIE 0 1 1 0 +MALTRICIAN 0 1 0 1 +MALNUTRITION 0 1 1 0 +MALE 0 1 0 1 +MAKING 17 1 18 17 +MAKES 10 1 11 10 +MAJORITY 5 1 6 5 +MAINE 0 1 1 0 +MAIL 0 1 1 0 +MAID 2 1 3 2 +MADGE 0 1 0 1 +MAD 1 1 2 1 +MACKING 0 1 0 1 +MACHIN 0 1 0 1 +MACHIEVOUS 0 1 0 1 +MACAN 0 1 0 1 +MAC 0 1 0 1 +MABILLON 0 1 1 0 +MA 0 1 0 1 +LYSMACHUS 0 1 0 1 +LYSIMACHUS 0 1 1 0 +LYING 2 1 2 3 +LUVIDICUS 0 1 0 1 +LUSTRIOUS 0 1 0 1 +LUNDY'S 0 1 0 1 +LUNAR'S 0 1 0 1 +LUKE 3 1 4 3 +LUGGAGE 0 1 1 0 +LUCK 2 1 3 2 +LUCIEN 1 1 2 1 +LUCID 0 1 1 0 +LUCIAN 0 1 0 1 +LOYAL 2 1 2 3 +LOWERING 2 1 3 2 +LOWER 2 1 3 2 +LOWBOURNE 0 1 0 1 +LOWBORNE 0 1 0 1 +LOUD 7 1 8 7 +LOT 6 1 6 7 +LOST 11 1 12 11 +LOSSES 0 1 1 0 +LOSS 2 1 3 2 +LOSES 0 1 0 1 +LORDS 2 1 2 3 +LORD'S 0 1 1 0 +LOQUACITY 0 1 1 0 +LOOKS 5 1 5 6 +LOOKOUT 0 1 1 0 +LONGER 15 1 16 15 +LONESOMENESS 0 1 1 0 +LONESOME 1 1 1 2 +LONE 0 1 1 0 +LOKI 0 1 0 1 +LOCTICE 0 1 0 1 +LIZZLY 0 1 0 1 +LIZZLING 0 1 0 1 +LIZZIE 0 1 0 1 +LIZABETH 0 1 1 0 +LIVES 5 1 6 5 +LIT 2 1 2 3 +LISTENED 4 1 4 5 +LISTEN 10 1 11 10 +LISLEY 0 1 0 1 +LISALLY 0 1 0 1 +LIQUID 0 1 1 0 +LINSLEY 0 1 0 1 +LINKED 0 1 0 1 +LINENSHIPS 0 1 0 1 +LINEN 2 1 2 3 +LIMPED 3 1 4 3 +LIMP 3 1 3 4 +LIME 0 1 1 0 +LILY 0 1 0 1 +LIGHTS 1 1 1 2 +LIGHTFOOTED 0 1 0 1 +LIGHTED 0 1 0 1 +LIGHT 18 1 19 18 +LIFETIME 0 1 1 0 +LIAISON 0 1 1 0 +LEXINGTON 0 1 1 0 +LEWIS 0 1 1 0 +LEVITICUS 0 1 1 0 +LEVELLY 0 1 0 1 +LETTER 22 1 22 23 +LETS 0 1 1 0 +LEST 3 1 3 4 +LESSINGTON 0 1 0 1 +LEQUESTITY 0 1 0 1 +LENS 0 1 0 1 +LENOIR 0 1 1 0 +LENIN 1 1 2 1 +LEND 1 1 2 1 +LEMON 0 1 1 0 +LEISURE 3 1 3 4 +LEGS 2 1 3 2 +LEGGED 0 1 0 1 +LEGERSHIP 0 1 0 1 +LEER 0 1 1 0 +LEE'S 0 1 1 0 +LEE 0 1 0 1 +LEDA 0 1 0 1 +LEAVY 0 1 0 1 +LEAVED 1 1 2 1 +LEARNED 6 1 6 7 +LEARN 4 1 5 4 +LEAPT 0 1 1 0 +LEAPED 1 1 1 2 +LEADPENCIL 0 1 1 0 +LEADERSHIP 0 1 1 0 +LEADERS 1 1 2 1 +LEADER'S 0 1 0 1 +LEAD 2 1 2 3 +LE 0 1 0 1 +LAYS 0 1 0 1 +LAWN 0 1 0 1 +LAURAIL 0 1 0 1 +LAUGH 9 1 9 10 +LAUDIDAL 0 1 0 1 +LATH 0 1 1 0 +LATER 8 1 8 9 +LASTIS 0 1 0 1 +LASH 1 1 2 1 +LARD 0 1 0 1 +LANY 0 1 0 1 +LANE 2 1 2 3 +LANDOWNERS 1 1 1 2 +LANDI 0 1 1 0 +LANDEE 0 1 0 1 +LAMPS 0 1 0 1 +LAMBS 0 1 1 0 +LAIN 1 1 2 1 +LAID 8 1 9 8 +LAG 0 1 0 1 +LADS 1 1 2 1 +LADIES 6 1 6 7 +LACK 2 1 2 3 +LACHRYMA 0 1 1 0 +LABOURS 0 1 1 0 +LABOURERS 0 1 1 0 +LABOUR 0 1 1 0 +LABORERS 0 1 0 1 +LABOR 1 1 1 2 +KOY 0 1 0 1 +KNOWSAT 0 1 0 1 +KNOTTY 0 1 1 0 +KNOTS 0 1 0 1 +KNOT 2 1 3 2 +KNOBBLY 0 1 1 0 +KNIFE 0 1 0 1 +KLEPTOMANIAC 0 1 1 0 +KLEPTOMANIA 0 1 1 0 +KITTY 3 1 4 3 +KITRAL 0 1 0 1 +KIT 0 1 0 1 +KINDRED 1 1 1 2 +KINDNESS 7 1 7 8 +KINDER 1 1 2 1 +KILLS 0 1 1 0 +KILL 14 1 14 15 +KIDDAM 0 1 0 1 +KID 1 1 2 1 +KICKIE 0 1 0 1 +KICK 0 1 1 0 +KEY 4 1 5 4 +KERSTALL 0 1 1 0 +KENNITES 0 1 0 1 +KENITES 1 1 2 1 +KEI 0 1 0 1 +KEEN 1 1 2 1 +KEDEM 0 1 1 0 +KATY 0 1 0 1 +KANSASTE 0 1 0 1 +KANSAS 6 1 7 6 +KAMAR 0 1 1 0 +KAL 0 1 0 1 +JUSTIFIED 1 1 2 1 +JURISDICTION 0 1 1 0 +JURIS 0 1 0 1 +JUNIOR 0 1 1 0 +JUMPS 1 1 2 1 +JUMPED 1 1 1 2 +JULIE 0 1 0 1 +JULIA 0 1 0 1 +JUG 3 1 4 3 +JUDICINES 0 1 0 1 +JUDAH 1 1 2 1 +JUBANCE 0 1 0 1 +JOUVIN'S 0 1 1 0 +JOSHUA 0 1 1 0 +JOSE 0 1 0 1 +JOKINGLY 0 1 1 0 +JOINTMENT 0 1 0 1 +JOINING 0 1 1 0 +JOINED 1 1 2 1 +JOHNSHAW 0 1 0 1 +JOHN 9 1 9 10 +JOE 0 1 0 1 +JO 0 1 0 1 +JIS 0 1 1 0 +JIM 4 1 5 4 +JILT 0 1 1 0 +JEWED 0 1 0 1 +JEW 1 1 1 2 +JETS 0 1 0 1 +JESTER 1 1 2 1 +JERRY 1 1 1 2 +JEMOSIS 0 1 0 1 +JEHU 0 1 1 0 +JEHOV 0 1 0 1 +JEHOASH 0 1 1 0 +JEERED 0 1 1 0 +JEDGE 0 1 1 0 +JEALOUS 0 1 0 1 +JAS 0 1 0 1 +JARNDYCE 0 1 0 1 +JANSENIST 0 1 1 0 +JANEERO 0 1 1 0 +JANE 6 1 7 6 +JAMS 1 1 2 1 +JAMES 3 1 3 4 +JAKEY'S 0 1 1 0 +JACO 0 1 0 1 +JACKO 0 1 0 1 +JACKMAN 0 1 1 0 +IZZY'S 0 1 1 0 +IZZIE'S 0 1 0 1 +ITSELF 6 1 7 6 +ITSEL 0 1 0 1 +ITALIANS 0 1 0 1 +IT'LL 1 1 1 2 +ISRAIT 0 1 0 1 +ISRAELITES 0 1 1 0 +ISLAMMISED 0 1 0 1 +ISLAMISED 0 1 1 0 +ISLAM 0 1 1 0 +ISIS 0 1 0 1 +ISAU 0 1 0 1 +ISAAC 1 1 2 1 +IRRESPONSIBLE 0 1 0 1 +IRONICAL 0 1 1 0 +IRONIC 0 1 0 1 +IRISH 1 1 1 2 +IPES 0 1 0 1 +IOWA 1 1 2 1 +IOPIUS 0 1 0 1 +INVITED 1 1 1 2 +INVADE 0 1 1 0 +INTOLLIUM 0 1 0 1 +INTERPLIES 0 1 0 1 +INTER 0 1 0 1 +INTENSE 3 1 3 4 +INTEND 1 1 2 1 +INTELLECTUALLY 0 1 1 0 +INTELLECTUAL 0 1 0 1 +INSTRUCTIVE 0 1 0 1 +INSTRUCTED 0 1 1 0 +INSTANTLY 0 1 1 0 +INSPECTRE 0 1 0 1 +INSISTENCE 0 1 1 0 +INSISTANTS 0 1 0 1 +INSIDE 4 1 4 5 +INSENT 0 1 0 1 +INSECTORS 0 1 0 1 +INSCRIPTIONS 1 1 2 1 +INQUIRE 1 1 2 1 +INNES 0 1 1 0 +INLORING 0 1 0 1 +INJURE 2 1 3 2 +INGENUOUSLY 0 1 0 1 +INGENIOUSLY 0 1 1 0 +INFECTED 0 1 0 1 +INFAMOUS 0 1 1 0 +INDURE 0 1 0 1 +INDEEDY 0 1 0 1 +INDEED 13 1 14 13 +INDE 0 1 0 1 +INCOUPS 0 1 0 1 +INCOMPARABLE 0 1 1 0 +INCLINE 1 1 1 2 +IMPROVISED 1 1 2 1 +IMPROVED 0 1 1 0 +IMPROVE 0 1 1 0 +IMPRESSORS 0 1 0 1 +IMPLY 1 1 2 1 +IMPINGED 0 1 0 1 +IMPALION 0 1 0 1 +IMMENSE 2 1 3 2 +IMAGING 0 1 0 1 +ILLUSTRIOUS 3 1 4 3 +ILLS 0 1 1 0 +ILLIS 0 1 0 1 +ILL 5 1 5 6 +ILIU 0 1 0 1 +IKOLOITS 0 1 0 1 +IISES 0 1 0 1 +IDEA 10 1 11 10 +ICE 0 1 1 0 +I'FAITH 0 1 1 0 +HYCOMICAL 0 1 0 1 +HUSBUTH 0 1 0 1 +HUNTINGDON 4 1 5 4 +HUNTERS 0 1 1 0 +HUNGRY 1 1 2 1 +HUNGER 2 1 2 3 +HUMOURS 0 1 1 0 +HUMOUR 1 1 1 2 +HUMOR 0 1 1 0 +HUMANS 0 1 0 1 +HUMANITY 1 1 1 2 +HUMANITARY 0 1 1 0 +HUH 0 1 1 0 +HUDSPETH 0 1 1 0 +HOUSEKEEPER'S 0 1 0 1 +HOUSEHOLTS 0 1 0 1 +HOUSEHOLD 0 1 1 0 +HOURSERVES 0 1 0 1 +HOURS 12 1 12 13 +HOUNDED 0 1 1 0 +HOSPITABLY 0 1 1 0 +HORDE 0 1 1 0 +HOPPING 0 1 1 0 +HOPES 0 1 0 1 +HOOKER'S 0 1 0 1 +HOO'LL 0 1 1 0 +HOO 0 1 1 0 +HONOURS 1 1 1 2 +HONORS 1 1 2 1 +HONESTLY 0 1 1 0 +HONEST 5 1 6 5 +HONDYNON 0 1 0 1 +HOMEPUSH 0 1 1 0 +HOLY 5 1 6 5 +HOLLERED 0 1 0 1 +HOLLER 0 1 1 0 +HOLLAND 0 1 0 1 +HOLE 1 1 1 2 +HOF 0 1 1 0 +HODE 0 1 0 1 +HOARD 0 1 0 1 +HISSELF 0 1 1 0 +HIRED 0 1 0 1 +HIRAKEE 0 1 0 1 +HINFIELD 0 1 0 1 +HINDFELL 0 1 1 0 +HIJAZ 0 1 1 0 +HIGHS 0 1 1 0 +HIES 0 1 0 1 +HIERARCHY 0 1 1 0 +HIDE 2 1 2 3 +HIDDEN 3 1 3 4 +HID 2 1 2 3 +HEYDAY 0 1 1 0 +HEY 1 1 1 2 +HEWN 0 1 1 0 +HESTERITY 0 1 0 1 +HESITATINGLY 0 1 0 1 +HESDAY 0 1 0 1 +HERSELF 35 1 35 36 +HERRIOT 0 1 0 1 +HERIOT'S 0 1 1 0 +HERIOT 0 1 1 0 +HERETT'S 0 1 0 1 +HERETOFORE 0 1 1 0 +HERDSMEN 0 1 1 0 +HERDSMAN 0 1 0 1 +HERBERT 0 1 0 1 +HERALD 0 1 0 1 +HEPTARK 0 1 0 1 +HEPTARCHIES 0 1 1 0 +HEN 0 1 1 0 +HELVIN 0 1 1 0 +HELVAN 0 1 0 1 +HELPS 0 1 0 1 +HELPED 2 1 3 2 +HELM 1 1 2 1 +HEELED 0 1 1 0 +HEEDED 0 1 1 0 +HEDGES 0 1 1 0 +HEBREW 1 1 2 1 +HEAT 1 1 1 2 +HEARTY 2 1 3 2 +HEARTS 4 1 5 4 +HEARSE 3 1 4 3 +HEALED 1 1 1 2 +HEADS 4 1 4 5 +HEADQUARTERS 1 1 2 1 +HEADQUARTER 0 1 0 1 +HEADLONG 0 1 1 0 +HEADLIGHTS 0 1 1 0 +HE'D 4 1 4 5 +HAWHAT 0 1 0 1 +HAWED 0 1 1 0 +HAW 0 1 1 0 +HAVING 22 1 22 23 +HAVEN 0 1 1 0 +HAV 0 1 0 1 +HATTERSLEY 0 1 1 0 +HATCHES 0 1 0 1 +HASTE 5 1 5 6 +HASAN 1 1 2 1 +HARVEY'SWHICH 0 1 1 0 +HARVEST 3 1 3 4 +HARRIS 1 1 2 1 +HAROLD 0 1 1 0 +HARMON'S 0 1 0 1 +HARKNESS 0 1 1 0 +HARKINS 0 1 0 1 +HARE 0 1 1 0 +HARDWARE 0 1 1 0 +HARDLY 9 1 10 9 +HAPPY 7 1 7 8 +HAPPILY 1 1 1 2 +HAPLY 0 1 1 0 +HANDY 0 1 0 1 +HANDLES 0 1 0 1 +HAM 0 1 0 1 +HALTS 0 1 1 0 +HALTERSLEY 0 1 0 1 +HALEY'S 0 1 1 0 +HALELY 0 1 0 1 +HAIRY 0 1 0 1 +HAIN'T 1 1 1 2 +HAIL 1 1 1 2 +HADN'T 1 1 1 2 +HADDA 0 1 1 0 +GYPTIAN 0 1 0 1 +GYLINGDEN 0 1 1 0 +GUV'NER 0 1 0 1 +GUNS 2 1 3 2 +GUNDRAIN 0 1 0 1 +GUNDERING 0 1 0 1 +GULLET 0 1 1 0 +GULFS 0 1 0 1 +GULD 0 1 0 1 +GUIRUN'S 0 1 1 0 +GUINEAS 0 1 0 1 +GUINEA 2 1 3 2 +GUILD 0 1 1 0 +GUESTS 2 1 2 3 +GUESTRA 0 1 0 1 +GUEST 3 1 4 3 +GUESS 4 1 5 4 +GUDRUN 0 1 1 0 +GUARDING 0 1 0 1 +GRUMBLINGLY 0 1 1 0 +GRUFFLY 0 1 1 0 +GROWING 2 1 3 2 +GROW 1 1 1 2 +GROVE 0 1 1 0 +GROUND 6 1 6 7 +GROAN 1 1 2 1 +GRIS 0 1 0 1 +GRINDING 0 1 0 1 +GRIMSBY 0 1 1 0 +GRIM'S 0 1 0 1 +GRIGGLY 0 1 0 1 +GRIBIER 0 1 1 0 +GRIBES 0 1 0 1 +GREW 4 1 4 5 +GREENTON 0 1 1 0 +GREENSON 0 1 0 1 +GREENBACKS 0 1 1 0 +GREEN 2 1 2 3 +GREEBS 0 1 0 1 +GRECIOUS 0 1 0 1 +GREAVES 0 1 1 0 +GREAT 40 1 40 41 +GRAVE 16 1 17 16 +GRATITUDE 4 1 5 4 +GRASPS 0 1 1 0 +GRASPED 1 1 1 2 +GRAPPLE 0 1 1 0 +GRANTEL 0 1 0 1 +GRANDAME 0 1 1 0 +GRAND 5 1 5 6 +GRAMMER 0 1 0 1 +GRAMMATIUS 0 1 0 1 +GRAMMATEUS 0 1 1 0 +GRAM 0 1 1 0 +GRAHAME 0 1 0 1 +GRACIOUS 2 1 3 2 +GRACIAN 0 1 0 1 +GOWN 0 1 1 0 +GOVERNOR 2 1 2 3 +GOVERNMENT'S 0 1 1 0 +GOVERN 0 1 0 1 +GOULD 0 1 0 1 +GORE 0 1 0 1 +GORDON'S 2 1 2 3 +GORD 0 1 0 1 +GOODS 4 1 5 4 +GONDON 0 1 0 1 +GOLDS 0 1 0 1 +GOLDFISH 0 1 1 0 +GOLDEN 2 1 3 2 +GOES 6 1 7 6 +GODEBILLIOS 0 1 1 0 +GOBYS 0 1 0 1 +GOBY'S 0 1 0 1 +GOBIES 0 1 0 1 +GNAWING 0 1 1 0 +GLISPIN 1 1 2 1 +GLAY 0 1 0 1 +GLASS 9 1 9 10 +GLAD 4 1 5 4 +GIVIN 0 1 0 1 +GIVANOVITCH 0 1 0 1 +GIRTHING 0 1 1 0 +GIRTHED 0 1 1 0 +GIRT 0 1 0 1 +GIRDS 0 1 1 0 +GIRDING 0 1 0 1 +GIRDED 0 1 0 1 +GIMERNETTE 0 1 0 1 +GILINGDEN 0 1 0 1 +GIGS 0 1 0 1 +GIGIRIS'S 0 1 0 1 +GIFTS 0 1 1 0 +GESTURE 1 1 1 2 +GES 0 1 0 1 +GERT 0 1 0 1 +GERFATHER 0 1 0 1 +GEORGE'SWHICH 0 1 1 0 +GEORGE 2 1 2 3 +GENTLEST 0 1 0 1 +GENTLEMEN'S 0 1 1 0 +GENTLEMAN'S 0 1 0 1 +GENIOR 0 1 0 1 +GENERO 0 1 0 1 +GELATIN 0 1 0 1 +GAZE 3 1 4 3 +GAUTHIER 0 1 1 0 +GAULS 0 1 1 0 +GATHIER 0 1 0 1 +GATHERED 8 1 8 9 +GASTED 0 1 0 1 +GARDENS 0 1 0 1 +GARDEN'S 0 1 1 0 +GAMMON 0 1 1 0 +GAMIN 0 1 0 1 +GAME 6 1 6 7 +GALLATIN 0 1 1 0 +GALLANT 3 1 3 4 +GAINED 1 1 1 2 +G'YIRLS 0 1 1 0 +FYOV'S 0 1 0 1 +FUZZ 0 1 1 0 +FUZ 0 1 0 1 +FUVENT 0 1 0 1 +FURZE 0 1 1 0 +FUR 1 1 1 2 +FUPS 0 1 0 1 +FUNDS 0 1 1 0 +FUND 1 1 1 2 +FUN 2 1 2 3 +FROZE 0 1 1 0 +FRONTIERS 1 1 2 1 +FRONTIER 0 1 0 1 +FROIS 0 1 0 1 +FROGS 2 1 2 3 +FROG'S 0 1 1 0 +FRO 1 1 2 1 +FRITTEN 0 1 0 1 +FRISTOW 0 1 0 1 +FRISTOE'S 0 1 1 0 +FRIGHTFUL 3 1 4 3 +FRIGHTENS 0 1 1 0 +FRIENDS 17 1 17 18 +FRIENDLY 4 1 4 5 +FRIED 0 1 0 1 +FRIAR 0 1 1 0 +FRET 1 1 2 1 +FRENCHARD 0 1 0 1 +FRENCH 4 1 5 4 +FREEZE 0 1 0 1 +FREEWAY 0 1 1 0 +FREES 0 1 1 0 +FREEDOM 5 1 6 5 +FREDERI 0 1 0 1 +FRED 0 1 0 1 +FRE 0 1 0 1 +FRANK 1 1 1 2 +FRANCOIS 0 1 1 0 +FRANCISCO 4 1 5 4 +FRANCIS 0 1 1 0 +FRANCES 0 1 0 1 +FRANCE 3 1 3 4 +FRANC 0 1 1 0 +FOURTEENTHAT'S 0 1 1 0 +FOURTEEN 3 1 3 4 +FOUNDER 0 1 0 1 +FOUNDED 1 1 2 1 +FOUND 37 1 37 38 +FOUGHT 1 1 2 1 +FORTY 12 1 13 12 +FORTS 0 1 1 0 +FORTE 0 1 0 1 +FORSTER 0 1 0 1 +FORMER 3 1 4 3 +FORMED 1 1 2 1 +FORGIFTS 0 1 0 1 +FOREMOTHER 0 1 0 1 +FOREMAN 0 1 1 0 +FOREGATHERED 0 1 1 0 +FOREBOARDS 0 1 0 1 +FORCIAN 0 1 0 1 +FORCES 3 1 3 4 +FORCEMENT 0 1 0 1 +FORCED 3 1 4 3 +FOOTED 0 1 1 0 +FOOL 3 1 4 3 +FOOD'S 0 1 0 1 +FOOD 3 1 4 3 +FONTREVAL 0 1 0 1 +FONTEVRAULT 0 1 1 0 +FONDS 0 1 0 1 +FOLLOWS 5 1 6 5 +FOLK 4 1 4 5 +FOLIEVED 0 1 0 1 +FOE 0 1 1 0 +FLUTTERING 1 1 2 1 +FLOWERBEDS 0 1 1 0 +FLOWER 1 1 1 2 +FLOW 0 1 1 0 +FLOSSY 0 1 1 0 +FLOSSIE 0 1 0 1 +FLORO'S 0 1 0 1 +FLOORBOARDS 0 1 1 0 +FLOOR 3 1 3 4 +FLIND 0 1 0 1 +FLIES 0 1 0 1 +FLEERED 0 1 1 0 +FLAVOUR 0 1 0 1 +FLAVORITE 0 1 0 1 +FLATTERER 0 1 1 0 +FLATTERED 0 1 1 0 +FLATHEADS 0 1 1 0 +FLAT 0 1 0 1 +FLASHLIGHT 0 1 1 0 +FLASH 2 1 2 3 +FLAREFF 0 1 0 1 +FLARED 0 1 0 1 +FLARE 0 1 0 1 +FLAP 0 1 0 1 +FLABRA 0 1 0 1 +FLABBERGASTED 0 1 1 0 +FIVE 20 1 20 21 +FITZ 0 1 0 1 +FIT 7 1 7 8 +FISHED 1 1 2 1 +FIRSTLY 0 1 0 1 +FIRSTER 0 1 1 0 +FIRST 54 1 54 55 +FIRS 0 1 0 1 +FIRE 14 1 15 14 +FINNEY 1 1 2 1 +FINNELL 0 1 0 1 +FINISHED 3 1 3 4 +FINICAL 0 1 1 0 +FINELY 1 1 2 1 +FINDING 7 1 8 7 +FINAL 2 1 2 3 +FILTRATES 0 1 1 0 +FILTRATE 0 1 1 0 +FILLS 1 1 1 2 +FILIANTLY 0 1 0 1 +FIGURES 0 1 0 1 +FIGURED 0 1 0 1 +FIGURE'S 0 1 1 0 +FIGGER 0 1 1 0 +FIFTEENTH 0 1 1 0 +FIFTEEN 7 1 7 8 +FIELDS 2 1 2 3 +FIACRE 0 1 1 0 +FIACCHUS 0 1 0 1 +FERVENT 0 1 0 1 +FERIO 0 1 0 1 +FELT 18 1 19 18 +FEELS 1 1 2 1 +FEELING 10 1 10 11 +FEEL 12 1 13 12 +FEEDS 0 1 1 0 +FEED 0 1 1 0 +FEDERATE 0 1 0 1 +FEATS 0 1 0 1 +FEAT 0 1 0 1 +FAVAN 0 1 0 1 +FAULTS 2 1 2 3 +FAULT 5 1 5 6 +FAUCES 0 1 1 0 +FATS 1 1 2 1 +FATHERS 1 1 1 2 +FAT'S 0 1 0 1 +FASTENER'S 0 1 0 1 +FARRENDER 0 1 0 1 +FARE 0 1 1 0 +FANGED 0 1 1 0 +FANCY 4 1 4 5 +FAMOUS 1 1 1 2 +FAMILY 18 1 18 19 +FAM'LY 0 1 1 0 +FALLINGER 0 1 0 1 +FALLING 5 1 6 5 +FALL 4 1 5 4 +FAIRRING 0 1 0 1 +FAIR 12 1 13 12 +FAILING 2 1 3 2 +FAILED 7 1 8 7 +FAGONNE 0 1 0 1 +FAFNER'S 0 1 0 1 +FAFNER 0 1 0 1 +FAFFNER 0 1 0 1 +FACT 14 1 14 15 +FACED 2 1 2 3 +EXTRAVE 0 1 0 1 +EXTRACTED 0 1 0 1 +EXTRACT 2 1 3 2 +EXTRA 1 1 2 1 +EXTINUATING 0 1 0 1 +EXTINGUISHING 0 1 1 0 +EXTENUATING 0 1 1 0 +EXTEND 0 1 1 0 +EXPOUNDS 0 1 0 1 +EXPOSED 3 1 3 4 +EXPOSE 2 1 3 2 +EXPLOITING 0 1 1 0 +EXPLODING 0 1 0 1 +EXPLAINED 4 1 4 5 +EXPERIOR 0 1 0 1 +EXPERIENCE 3 1 4 3 +EXPELLED 1 1 1 2 +EXPEL 0 1 1 0 +EXPECTED 7 1 8 7 +EXPANSE 1 1 2 1 +EXERT 1 1 1 2 +EXCLAIMED 14 1 15 14 +EXCITING 0 1 1 0 +EXCEPT 11 1 11 12 +EXAMINING 2 1 3 2 +EXAMINED 3 1 3 4 +EXACTLY 9 1 9 10 +EXACKLY 0 1 1 0 +EVIL 4 1 5 4 +EVERYONE 0 1 1 0 +EVERYBODY'S 0 1 0 1 +EVERY 38 1 38 39 +EVERGREWING 0 1 0 1 +EVENLENTH 0 1 0 1 +EVAPORATE 2 1 3 2 +EV'YBODY'S 0 1 1 0 +EUSIDIUS 0 1 0 1 +EUREKA 0 1 0 1 +EUPHRATES 0 1 1 0 +EUPHRANOR 0 1 1 0 +EUPHRANER 0 1 0 1 +EUPHATEES 0 1 0 1 +EUNUCHS 0 1 0 1 +EUNUCH 10 1 11 10 +EUIK 0 1 0 1 +EUGEN 0 1 0 1 +ETHEREAL 1 1 2 1 +ETHER 2 1 3 2 +ETHELRIED'S 0 1 1 0 +ETHELRED 0 1 0 1 +ET 2 1 3 2 +ESTHER 0 1 1 0 +ESTATES 1 1 1 2 +ERNESTON 0 1 0 1 +ERNESTINE 0 1 1 0 +ERE'S 0 1 1 0 +EQUERRY'S 0 1 1 0 +EQUERRIES 0 1 0 1 +EPLORRIED 0 1 0 1 +EPIMORPHIN 0 1 0 1 +EPILRIED'S 0 1 0 1 +EPIGASTER 0 1 1 0 +EPHRAIM 0 1 1 0 +ENVANTED 0 1 0 1 +ENTRUSTED 0 1 1 0 +ENTRACT 0 1 0 1 +ENTR'ACTE 0 1 1 0 +ENTIRE 2 1 2 3 +ENTERED 10 1 11 10 +ENSUED 2 1 3 2 +ENSNARES 0 1 1 0 +ENSLAVED 2 1 3 2 +ENSLAVE 1 1 1 2 +ENRICHED 0 1 0 1 +ENNARING 0 1 0 1 +ENJOYED 0 1 0 1 +ENJOY 3 1 4 3 +ENGLISH 4 1 4 5 +ENFRANCHISEMENT 0 1 1 0 +ENFORCEMENT 0 1 1 0 +ENEM 0 1 0 1 +ENDURETH 0 1 1 0 +ENDS 0 1 0 1 +ENDOWED 0 1 0 1 +ENDEAVOURED 0 1 1 0 +ENDEAVORED 0 1 0 1 +ENCOMCHISEMENT 0 1 0 1 +EMOTION 1 1 2 1 +EMETIC 0 1 1 0 +EMBRUN 0 1 1 0 +EMBRON 0 1 0 1 +ELYSIAN 0 1 1 0 +ELYGIANS 0 1 0 1 +ELUSIVE 0 1 0 1 +ELUS 0 1 0 1 +ELSE 12 1 12 13 +ELLIS 0 1 1 0 +ELIZABETH 1 1 1 2 +ELISIONS 0 1 1 0 +ELECTIC 0 1 0 1 +ELDER 1 1 2 1 +ELBOW 0 1 1 0 +ELBERT 0 1 1 0 +ELBER 0 1 0 1 +EITHER 8 1 8 9 +EIGHTHS 0 1 0 1 +EIGHTH 2 1 3 2 +EIGHT 9 1 9 10 +EH 2 1 2 3 +EGYPTIAN 5 1 6 5 +EGGS 1 1 2 1 +EFFIC 0 1 0 1 +EFFECT 1 1 1 2 +EDGING 2 1 3 2 +EDGED 0 1 0 1 +ECHOLYN 0 1 0 1 +EBERGASTER 0 1 0 1 +EAU 0 1 1 0 +EASILY 5 1 5 6 +EARTHLY 1 1 1 2 +EARS 3 1 3 4 +EARLS 0 1 0 1 +EARLIEST 0 1 1 0 +EAR 1 1 2 1 +EAMES 0 1 0 1 +EAD 0 1 1 0 +E'LL 0 1 1 0 +E'ER 0 1 1 0 +E 1 1 2 1 +DUSK 1 1 2 1 +DURRED 0 1 0 1 +DUPLICATES 1 1 2 1 +DUPE 0 1 0 1 +DUNNO 0 1 1 0 +DUNNING 0 1 1 0 +DUMAS 0 1 1 0 +DUM 0 1 1 0 +DULL 2 1 3 2 +DUERS 0 1 0 1 +DUDS 0 1 1 0 +DRY 5 1 6 5 +DRUGSTORE 0 1 1 0 +DRUG 0 1 0 1 +DROUGHT 0 1 0 1 +DROP 3 1 3 4 +DRINKIN 0 1 0 1 +DRIFTILY 0 1 0 1 +DRESSED 1 1 1 2 +DREADFUL 2 1 2 3 +DRAWN 1 1 1 2 +DRAWERS 1 1 2 1 +DRAW 1 1 2 1 +DRANK 2 1 2 3 +DRAGOOD 0 1 0 1 +DRAGGER 0 1 0 1 +DOWNING 0 1 1 0 +DOUB 0 1 0 1 +DOTH 1 1 2 1 +DOSE 0 1 1 0 +DORIS 0 1 0 1 +DOOM 0 1 0 1 +DONOVAN 1 1 2 1 +DONEGOOD 0 1 0 1 +DONALIN 0 1 0 1 +DON 5 1 5 6 +DOMONICO 0 1 0 1 +DOM 0 1 1 0 +DOLL 0 1 0 1 +DOIN 0 1 1 0 +DOGS 1 1 2 1 +DOEST 1 1 2 1 +DOESN'T 3 1 3 4 +DOCTRIPAR 0 1 0 1 +DOCTOR 24 1 25 24 +DOCKYARD 0 1 0 1 +DOCK 0 1 1 0 +DOAN 0 1 1 0 +DIXON 4 1 4 5 +DIVERMISH 0 1 0 1 +DITZFIELD 0 1 0 1 +DITTY 0 1 0 1 +DITCHFIELD 0 1 1 0 +DISTRUSTED 0 1 1 0 +DISTRUDGED 0 1 0 1 +DISTRICT 2 1 2 3 +DISTRESSED 2 1 2 3 +DISTRACT 0 1 0 1 +DISTINGUISHING 0 1 0 1 +DISTINCTS 0 1 0 1 +DISTICHS 0 1 1 0 +DISTAGGER 0 1 0 1 +DISSENTIENT 0 1 1 0 +DISPOSED 2 1 3 2 +DISPOS 0 1 0 1 +DISNEY 0 1 1 0 +DISINFECTING 0 1 1 0 +DISINFECT 0 1 0 1 +DISINDIAN 0 1 0 1 +DISFIGURED 0 1 1 0 +DISCOURSE 2 1 2 3 +DISCOUR 0 1 0 1 +DISASTROUS 0 1 1 0 +DISASTRATES 0 1 0 1 +DISAGREED 0 1 0 1 +DISAGREE 0 1 1 0 +DIS 0 1 0 1 +DIRTS 0 1 0 1 +DIRECTION 7 1 7 8 +DIRECTIFY 0 1 0 1 +DIRE 0 1 1 0 +DINNING 0 1 0 1 +DINARS 0 1 1 0 +DILUTE 0 1 1 0 +DIGGING 0 1 1 0 +DIGESTION 2 1 3 2 +DIGESON 0 1 0 1 +DIEU 0 1 0 1 +DIE 9 1 10 9 +DIDST 1 1 1 2 +DIDNY 0 1 0 1 +DIA 0 1 0 1 +DEVOUR 0 1 1 0 +DEVOTED 3 1 3 4 +DETERMINED 4 1 5 4 +DETECTIVES 0 1 1 0 +DETECTIN 0 1 1 0 +DESTRUCTION 0 1 1 0 +DESTINIES 0 1 1 0 +DESSERTS 1 1 2 1 +DESPOTIC 0 1 1 0 +DESPITE 2 1 3 2 +DESKED 0 1 0 1 +DESIRES 2 1 3 2 +DESIRE 9 1 9 10 +DESIGNED 2 1 2 3 +DESIGN 1 1 2 1 +DESERVED 0 1 0 1 +DESERTS 0 1 0 1 +DERELICTS 0 1 1 0 +DERDS 0 1 0 1 +DEPTOR 0 1 0 1 +DEPRECATE 0 1 1 0 +DEPLICATES 0 1 0 1 +DEODORIZING 0 1 1 0 +DENZ 0 1 0 1 +DEMETER 3 1 4 3 +DEMEANOR 0 1 0 1 +DELUDE 0 1 0 1 +DELMONICO 0 1 1 0 +DELIVERED 4 1 4 5 +DELIBERATE 0 1 1 0 +DEFTLY 0 1 0 1 +DEFENDED 0 1 0 1 +DEFEND 3 1 4 3 +DEFECTION 0 1 1 0 +DEEPENED 0 1 1 0 +DEEP 9 1 9 10 +DECORTUNA 0 1 0 1 +DECLINING 1 1 1 2 +DECLINE 0 1 0 1 +DECLARING 0 1 1 0 +DEBTS 1 1 1 2 +DEBTOR 0 1 1 0 +DEBRAMIN 0 1 0 1 +DEBRAMAN 0 1 0 1 +DEBARRED 0 1 1 0 +DEBARED 0 1 0 1 +DEATHLY 0 1 1 0 +DEATHLIKE 0 1 1 0 +DEANS 0 1 1 0 +DEAN 0 1 0 1 +DEAL 11 1 11 12 +DEACH 0 1 1 0 +DEA 0 1 0 1 +DAWNING 0 1 0 1 +DATED 1 1 2 1 +DAT'S 0 1 0 1 +DARKAND 0 1 1 0 +DARED 3 1 3 4 +DAPHNE'S 0 1 1 0 +DANDAN 2 1 3 2 +DANCERS 0 1 0 1 +DANCER 0 1 1 0 +DAMN 0 1 1 0 +DAME'S 0 1 1 0 +DAME 1 1 1 2 +DALYS 0 1 1 0 +DAILY 2 1 2 3 +DAILIES 0 1 0 1 +DAGOS 0 1 1 0 +DAG 0 1 0 1 +CYS 0 1 0 1 +CYRUP 0 1 0 1 +CYNICISM 0 1 1 0 +CYMBALS 0 1 1 0 +CUSTOM 1 1 2 1 +CURSORY 0 1 0 1 +CURSORILY 0 1 1 0 +CURSE 1 1 2 1 +CURRENTS 0 1 1 0 +CURRANTS 0 1 0 1 +CURL 0 1 1 0 +CUPIED 0 1 0 1 +CUP 0 1 1 0 +CUNITY 0 1 0 1 +CUMICU 0 1 0 1 +CUISINE 0 1 1 0 +CRYING 1 1 1 2 +CRUX 0 1 1 0 +CRUSHING 1 1 2 1 +CRUMBLY 0 1 1 0 +CRUMBLED 0 1 0 1 +CROYDEN 0 1 0 1 +CROST 0 1 1 0 +CROSSBONES 0 1 0 1 +CROSS 7 1 8 7 +CROPPISH 0 1 0 1 +CROPFISH 0 1 0 1 +CROOKS 0 1 1 0 +CRISP 0 1 0 1 +CRIS 0 1 0 1 +CRIES 3 1 4 3 +CRIED 20 1 21 20 +CRI 0 1 0 1 +CREW 5 1 5 6 +CREOLE 0 1 0 1 +CREEL 0 1 1 0 +CREEDS 0 1 0 1 +CREDITUDE 0 1 0 1 +CRAW 0 1 0 1 +CRATES 1 1 2 1 +CRASHING 0 1 0 1 +CRAMPLED 0 1 0 1 +CRAMBLY 0 1 0 1 +CRABS 0 1 0 1 +CRAB 6 1 7 6 +COYNESS 0 1 1 0 +COY 1 1 2 1 +COXCOMB 0 1 1 0 +COWLEY'S 0 1 1 0 +COURTYARD 2 1 2 3 +COURSING 0 1 1 0 +COURFEYRAC 1 1 2 1 +COURFEREK 0 1 0 1 +COUNTENANCE 2 1 2 3 +COUNSELS 0 1 1 0 +COUNSELLOR 0 1 0 1 +COUNSEL 1 1 2 1 +COUNCILLOR 0 1 1 0 +COTTON 2 1 3 2 +COTTOM 0 1 0 1 +COSTUM 0 1 0 1 +COST 1 1 2 1 +CORYDON 0 1 1 0 +CORTONA 0 1 1 0 +CORP 0 1 0 1 +CORNERED 0 1 0 1 +CORMOR 0 1 0 1 +CORKLE 0 1 1 0 +CORDS 0 1 0 1 +CORAL 0 1 1 0 +COQUETTE 1 1 2 1 +COPPER 2 1 2 3 +COPP 0 1 0 1 +COP'S 0 1 1 0 +COP 2 1 3 2 +COOPS 0 1 1 0 +COOL 4 1 5 4 +COOKS 0 1 0 1 +COOKER 0 1 0 1 +CONTROLLED 1 1 1 2 +CONTROL 1 1 2 1 +CONTINUOUS 0 1 0 1 +CONTINUALLY 0 1 1 0 +CONTENDED 0 1 0 1 +CONSUM 0 1 0 1 +CONSUL 0 1 0 1 +CONSTITUTE 1 1 1 2 +CONSTITUENT 0 1 1 0 +CONSTANT 3 1 4 3 +CONSONANTS 0 1 1 0 +CONSOMME 0 1 1 0 +CONSOL 0 1 0 1 +CONINGSBURGH 0 1 1 0 +CONIGSBURG 0 1 0 1 +CONFIRMATION 0 1 1 0 +CONFIRMATESON 0 1 0 1 +CONFINED 0 1 0 1 +CONFINE 0 1 0 1 +CONFIDENTIALLY 0 1 1 0 +CONFIDE 1 1 2 1 +CONFICERE 0 1 1 0 +CONFERS 0 1 0 1 +CONFECTIONERIES 0 1 0 1 +CONCOCTED 1 1 2 1 +CONCLUDED 2 1 2 3 +COMPLYING 0 1 0 1 +COMPEND 0 1 0 1 +COMPASSER 0 1 0 1 +COMPARABLE 0 1 0 1 +COMORIN 0 1 1 0 +COMMUNITY 4 1 5 4 +COMMONED 0 1 0 1 +COMMITTEE 5 1 6 5 +COMMISSORY 0 1 0 1 +COMMISSARY 1 1 2 1 +COMMENTS 0 1 0 1 +COMMENT 1 1 2 1 +COMMENCED 1 1 2 1 +COMMANDS 2 1 2 3 +COMMANDER 2 1 2 3 +COMIN 0 1 0 1 +COMICAL 0 1 1 0 +COMETH 0 1 1 0 +COMEST 0 1 1 0 +COMEDY 0 1 0 1 +COLT 0 1 0 1 +COLOSSEUM 0 1 1 0 +COLONEL 27 1 28 27 +COLOGNE 0 1 1 0 +COLLETS 0 1 0 1 +COLLECTED 1 1 1 2 +COLLECT 0 1 1 0 +COLLARST 0 1 0 1 +COLISEUM 0 1 0 1 +COLDS 0 1 1 0 +COISINE 0 1 0 1 +COINS 1 1 2 1 +COINCIDENCES 0 1 1 0 +COINCIDENCE 0 1 0 1 +COIN 2 1 2 3 +COGNIZED 0 1 0 1 +COD 0 1 1 0 +COCOA 0 1 1 0 +COCKRELL 0 1 1 0 +COCKLE 0 1 0 1 +COBBER 0 1 1 0 +COAT 2 1 2 3 +COASTON 0 1 0 1 +COARSING 0 1 0 1 +COARSE 0 1 0 1 +COALESCED 0 1 1 0 +CLUXED 0 1 0 1 +CLUMB 0 1 1 0 +CLOTH 2 1 2 3 +CLOSEST 0 1 1 0 +CLOSE 14 1 14 15 +CLOMB 0 1 1 0 +CLOCKS 0 1 1 0 +CLISPIN 0 1 0 1 +CLINK 0 1 0 1 +CLING 0 1 1 0 +CLIME 1 1 2 1 +CLIMBING 0 1 0 1 +CLIMBED 0 1 0 1 +CLEVERLY 0 1 1 0 +CLEVER 2 1 3 2 +CLERVAL 0 1 0 1 +CLEFTOMANIA 0 1 0 1 +CLEFT 1 1 2 1 +CLEAVE 0 1 1 0 +CLEARED 2 1 2 3 +CLAWS 0 1 1 0 +CLAVIER 0 1 0 1 +CLASSES 3 1 4 3 +CLASS 6 1 6 7 +CLARGA 0 1 0 1 +CLARET 1 1 2 1 +CLAPTOMANIA 0 1 0 1 +CLAIRVAUX 0 1 1 0 +CLACKS 0 1 0 1 +CLACK 0 1 0 1 +CISEAUX 0 1 1 0 +CINOLA 0 1 0 1 +CINDERELLA 0 1 1 0 +CIGARS 1 1 1 2 +CIGARET 0 1 0 1 +CHURCHES 0 1 0 1 +CHURCH 13 1 14 13 +CHUCKED 0 1 1 0 +CHRISTIE 0 1 0 1 +CHRISTIANS 0 1 1 0 +CHRISTIANNING 0 1 0 1 +CHRISTIANITY 2 1 3 2 +CHRISTIAN 0 1 0 1 +CHRISTI 0 1 1 0 +CHRISTENING 0 1 1 0 +CHRISTEN 0 1 0 1 +CHRIST 3 1 3 4 +CHOUETTE 0 1 1 0 +CHOOSE 3 1 3 4 +CHONODEMAIRE 0 1 1 0 +CHOKINGLY 0 1 0 1 +CHLORIDE 0 1 0 1 +CHIPS 1 1 2 1 +CHINTZ 0 1 1 0 +CHIN'S 0 1 0 1 +CHILLS 0 1 1 0 +CHILLED 1 1 1 2 +CHILL 1 1 1 2 +CHIEFING 0 1 0 1 +CHIEF 8 1 9 8 +CHIE 0 1 0 1 +CHIDE 0 1 1 0 +CHID 0 1 0 1 +CHEVIKI 0 1 0 1 +CHEST 3 1 3 4 +CHERRY 0 1 0 1 +CHEER 0 1 0 1 +CHEEKS 4 1 4 5 +CHEEKE 0 1 1 0 +CHEEKBONES 0 1 1 0 +CHECK 0 1 0 1 +CHAUVELIN 0 1 0 1 +CHARLIE'S 0 1 0 1 +CHARLEY'S 0 1 1 0 +CHARGED 4 1 5 4 +CHARACTERISTIC 0 1 1 0 +CHARACTERED 0 1 0 1 +CHANCES 0 1 1 0 +CHANCE 11 1 11 12 +CHALONS 0 1 1 0 +CHAFING 1 1 2 1 +CHADWELL 0 1 1 0 +CENTRE 1 1 1 2 +CENTER 2 1 3 2 +CELL 1 1 2 1 +CELEM 0 1 0 1 +CELEBRATED 2 1 3 2 +CEDRIC 1 1 2 1 +CEASE 0 1 0 1 +CAVES 0 1 0 1 +CAVERNMENT 0 1 0 1 +CAVALRYMEN 0 1 1 0 +CAUSE 6 1 7 6 +CAUGHT 5 1 5 6 +CATTLETTA 0 1 0 1 +CATTLERON 0 1 0 1 +CATHOLIC 0 1 1 0 +CATHEDRAL 0 1 1 0 +CATCHED 0 1 1 0 +CASTLE 8 1 9 8 +CASTETH 0 1 1 0 +CARRIED 11 1 12 11 +CARRIAGE 2 1 3 2 +CARMINALS 0 1 0 1 +CARLIS 0 1 0 1 +CARED 4 1 4 5 +CARE 18 1 18 19 +CAPTURED 2 1 2 3 +CAPTAIN 17 1 17 18 +CAPT 0 1 0 1 +CAPS 0 1 0 1 +CAPRIVY 0 1 0 1 +CAPRIVI'S 0 1 1 0 +CAPLICH 0 1 0 1 +CAPITULUM 0 1 1 0 +CAPITULAT 0 1 0 1 +CAPITULANTES 0 1 1 0 +CAPITULAM 0 1 0 1 +CAPITALISTS 0 1 1 0 +CAPITALIST 0 1 0 1 +CAPITAL 2 1 3 2 +CAPE 0 1 0 1 +CAP 4 1 5 4 +CANONIZED 0 1 1 0 +CANNOT 21 1 21 22 +CANNONIZED 0 1 0 1 +CAMPED 0 1 1 0 +CAMPAIGN 0 1 1 0 +CALON 0 1 0 1 +CALM 0 1 0 1 +CALLETH 0 1 1 0 +CALLEST 0 1 1 0 +CAIN 0 1 1 0 +CAGLED 0 1 0 1 +CAGE 7 1 8 7 +CACKED 0 1 1 0 +CABINETS 0 1 0 1 +CABINET 1 1 2 1 +CA'M 0 1 1 0 +C 2 1 3 2 +BYE 0 1 1 0 +BUY 4 1 4 5 +BUTTON 0 1 1 0 +BUTTERFLY 0 1 1 0 +BUTTERED 0 1 0 1 +BUTCHUSE 0 1 0 1 +BUSY 5 1 5 6 +BUSTON 0 1 0 1 +BUSINESSWHICH 0 1 1 0 +BUSINESS 12 1 12 13 +BURYING 0 1 1 0 +BURTLES 0 1 0 1 +BURST 2 1 3 2 +BURSHEBA 0 1 1 0 +BURNHILD 0 1 0 1 +BURNETH 0 1 1 0 +BURMANOIS 0 1 0 1 +BURIUM 0 1 0 1 +BURDENS 1 1 2 1 +BURDEN 1 1 1 2 +BUOYANT 0 1 0 1 +BULK 1 1 2 1 +BULBS 0 1 1 0 +BUILTON 0 1 0 1 +BUILDS 0 1 1 0 +BUFFETING 0 1 1 0 +BUFFET 0 1 0 1 +BUCK 1 1 1 2 +BRYNHILD'S 0 1 1 0 +BRUSH 3 1 3 4 +BRUNHOLD'S 0 1 0 1 +BROW 0 1 0 1 +BROTHER 17 1 18 17 +BROKER 0 1 0 1 +BRITTANNIUM 0 1 0 1 +BRING 12 1 12 13 +BRILLIANT 4 1 5 4 +BRIGHTENS 0 1 0 1 +BRIEF 2 1 3 2 +BRIDGE 3 1 4 3 +BREATHLESS 1 1 2 1 +BREATH 6 1 6 7 +BREASTPAND 0 1 0 1 +BREAST 1 1 2 1 +BRAXBURY 0 1 0 1 +BRAU 0 1 1 0 +BRASS 1 1 2 1 +BRAMMEN 0 1 0 1 +BRAMIN 0 1 0 1 +BRAMID 0 1 0 1 +BRAMIAN 0 1 0 1 +BRAMA 0 1 0 1 +BRAM 0 1 0 1 +BRACEY 0 1 0 1 +BRACELET 1 1 1 2 +BOZARD 0 1 0 1 +BOX 10 1 10 11 +BOWS 1 1 1 2 +BOURGES 0 1 1 0 +BOURGE 0 1 0 1 +BOU 0 1 0 1 +BOTTOMED 0 1 1 0 +BOTTLED 0 1 1 0 +BOTH 16 1 17 16 +BOSTON 2 1 3 2 +BORN 7 1 7 8 +BOONE 0 1 1 0 +BOON 1 1 2 1 +BOOLA 0 1 0 1 +BONSES 0 1 0 1 +BONIUS 0 1 0 1 +BOLTED 1 1 1 2 +BOLT 0 1 1 0 +BOLSHEVIKI 2 1 3 2 +BOEOTIAN 0 1 1 0 +BOBBED 0 1 0 1 +BOB'S 1 1 2 1 +BO 0 1 0 1 +BLUE 7 1 7 8 +BLOTCHET 0 1 0 1 +BLOOMIN 0 1 1 0 +BLOODSTAINED 0 1 0 1 +BLOODSHED 0 1 1 0 +BLOOD 7 1 8 7 +BLOKES 0 1 1 0 +BLOKE 0 1 1 0 +BLOCK 1 1 2 1 +BLINKED 1 1 2 1 +BLENDEST 0 1 0 1 +BLANKETED 0 1 1 0 +BLANKET 0 1 0 1 +BLAMMED 0 1 0 1 +BLACKLEG 0 1 1 0 +BLACK 13 1 13 14 +BITTER 3 1 3 4 +BITING 0 1 1 0 +BISQUE 0 1 1 0 +BISHOIS 0 1 0 1 +BIRDSEYE 0 1 1 0 +BIRD'S 0 1 0 1 +BIOLECTION 0 1 0 1 +BIN 1 1 2 1 +BILLY 0 1 0 1 +BIG 8 1 8 9 +BIDS 0 1 0 1 +BIBOCO 0 1 0 1 +BIBLE 3 1 4 3 +BHANG 0 1 1 0 +BEULAH 0 1 1 0 +BETWEEN 21 1 21 22 +BETIDE 1 1 2 1 +BETHUNE 0 1 1 0 +BETCHA 0 1 1 0 +BETAKEN 0 1 1 0 +BET 0 1 1 0 +BEST 18 1 19 18 +BESSIE 0 1 0 1 +BESOON 0 1 0 1 +BESIDES 9 1 9 10 +BESIDE 3 1 4 3 +BERNETH 0 1 0 1 +BERING 0 1 0 1 +BENSON 0 1 1 0 +BENOIT 0 1 1 0 +BENOIS 0 1 0 1 +BENNETT'S 0 1 0 1 +BENNETT 0 1 1 0 +BENEATH 3 1 3 4 +BEND 1 1 1 2 +BEN 0 1 0 1 +BELOVED 1 1 1 2 +BELONGS 1 1 1 2 +BELONGED 0 1 1 0 +BELLY 2 1 3 2 +BELLOWED 0 1 1 0 +BELLEGER 0 1 0 1 +BELLE 0 1 1 0 +BELIKE 0 1 1 0 +BEL 0 1 0 1 +BEGUN 2 1 3 2 +BEGIN 9 1 9 10 +BEGGING 1 1 2 1 +BEFALL 0 1 0 1 +BEFAL 0 1 1 0 +BEESER 0 1 0 1 +BEDS 1 1 1 2 +BEDROOM 1 1 1 2 +BEAUMANOIR 0 1 1 0 +BEATER 0 1 0 1 +BEAT'S 0 1 0 1 +BEARING 5 1 6 5 +BEARD 1 1 1 2 +BEALE'S 0 1 1 0 +BAZA 0 1 0 1 +BAXTER 0 1 1 0 +BAXT 0 1 0 1 +BATON 0 1 1 0 +BATH 1 1 1 2 +BAT 1 1 1 2 +BASSERA 0 1 0 1 +BASEMENT 0 1 1 0 +BASE 1 1 1 2 +BARKLEY 0 1 1 0 +BARIUM 0 1 1 0 +BARGENO 0 1 0 1 +BARGELLO 0 1 1 0 +BARELY 1 1 2 1 +BAPTISMAL 0 1 1 0 +BANISH 0 1 1 0 +BANION 0 1 0 1 +BANDON 0 1 0 1 +BANDINELLO 0 1 1 0 +BAND 8 1 9 8 +BANACY 0 1 0 1 +BALLROOM 0 1 1 0 +BALLOCK 0 1 1 0 +BALLIC 0 1 0 1 +BALL 0 1 0 1 +BALES 2 1 2 3 +BALAMMED 0 1 1 0 +BAILIQUE 0 1 0 1 +BAILEY'S 0 1 0 1 +BAGDAD 0 1 0 1 +BADE 1 1 1 2 +BADAWI 0 1 1 0 +BADARI 0 1 0 1 +BACKS 1 1 1 2 +BACKING 0 1 1 0 +BACK 51 1 51 52 +AZARIAH 0 1 1 0 +AY 0 1 0 1 +AWK'ARD 0 1 1 0 +AWAY 39 1 39 40 +AW 0 1 1 0 +AVIGUE 0 1 0 1 +AVIDITY 0 1 1 0 +AVIDE 0 1 0 1 +AVENUE 2 1 3 2 +AVE 0 1 1 0 +AUNTS 0 1 0 1 +ATTENTIONS 0 1 0 1 +ATTENDED 0 1 0 1 +ATTEMPTED 2 1 3 2 +ATOM 0 1 0 1 +ATHELSTANE 0 1 1 0 +ASSYRIAL 0 1 0 1 +ASSUME 1 1 1 2 +ASSER 0 1 0 1 +ASSAILING 0 1 1 0 +ASLEEP 9 1 10 9 +ASKS 0 1 1 0 +ASIA 0 1 1 0 +ASHUR 0 1 1 0 +ASHORE 4 1 4 5 +ASHER 0 1 0 1 +ASH 1 1 1 2 +ARTIST 5 1 6 5 +ARTHUR 0 1 1 0 +ARSTS 0 1 1 0 +ARSINOE'S 0 1 1 0 +ARSENO'S 0 1 0 1 +ARSENAL 0 1 0 1 +ARRIVES 1 1 1 2 +ARRIVE 3 1 4 3 +ARRANGED 1 1 2 1 +ARQUEBALD 0 1 0 1 +ARPE 0 1 0 1 +AROUSED 0 1 1 0 +AROSE 2 1 2 3 +ARMS 9 1 9 10 +ARMANQUIN 0 1 0 1 +ARKANSAS 0 1 1 0 +ARISING 0 1 0 1 +ARISED 0 1 0 1 +ARIAD 0 1 0 1 +ARDENTS 0 1 0 1 +ARDENT 2 1 3 2 +ARCHISON 0 1 0 1 +ARCHIBALD 0 1 1 0 +ARCHIAS 0 1 1 0 +APT 1 1 1 2 +APPROVE 0 1 1 0 +APPROPATOR 0 1 0 1 +APPEARED 8 1 8 9 +APPEALED 0 1 1 0 +APPARENTLY 4 1 5 4 +APOMORPHINE 0 1 1 0 +APOLLO 0 1 0 1 +APOLIS 0 1 0 1 +APES 1 1 2 1 +APE 1 1 1 2 +APART 3 1 4 3 +ANYWAY 1 1 2 1 +ANYTHING 31 1 31 32 +ANYONE'S 0 1 1 0 +ANVILS 0 1 1 0 +ANTOLIAN 0 1 1 0 +ANTIDOTES 0 1 1 0 +ANTHONY 0 1 1 0 +ANSWERS 1 1 2 1 +ANSWERED 26 1 26 27 +ANNOYED 2 1 3 2 +ANNOY 0 1 0 1 +ANNOUNCE 1 1 1 2 +ANGUISH 3 1 4 3 +ANGESTON 0 1 1 0 +ANGERSON 0 1 0 1 +ANDY'S 0 1 0 1 +ANDS 0 1 1 0 +ANDBUT 0 1 1 0 +ANCESTORS 0 1 1 0 +ANALYSIS 0 1 1 0 +ANADULTERATED 0 1 0 1 +AMY 0 1 0 1 +AMOUR 0 1 1 0 +AMORE 0 1 0 1 +AMITY 0 1 0 1 +AMID 1 1 1 2 +AMERD 0 1 0 1 +AMBILS 0 1 0 1 +AMATIC 0 1 0 1 +ALYCIAN 0 1 0 1 +ALTHIE 0 1 0 1 +ALREADY 15 1 16 15 +ALOUD 2 1 2 3 +ALONGER 0 1 1 0 +ALMS 1 1 2 1 +ALLS 0 1 1 0 +ALLOWED 6 1 7 6 +ALLOWANCE 0 1 1 0 +ALLIED 0 1 1 0 +ALLEN 0 1 1 0 +ALLEGATOR 0 1 0 1 +ALLAYS 0 1 1 0 +ALLAN 0 1 0 1 +ALKALOID 0 1 1 0 +ALISANDRO 0 1 0 1 +ALIGHTED 1 1 2 1 +ALID 0 1 0 1 +ALICELA 0 1 0 1 +ALI'S 0 1 1 0 +ALF 0 1 1 0 +ALEXAM 0 1 0 1 +ALESSANDRO 0 1 1 0 +ALE 0 1 0 1 +ALDITY 0 1 0 1 +ALCOHOLBA 0 1 0 1 +ALBERT 10 1 10 11 +ALBER 0 1 0 1 +ALARMED 1 1 1 2 +ALAD 0 1 0 1 +AKELET 0 1 0 1 +AKALOID 0 1 0 1 +AKALITES 0 1 0 1 +AIR 10 1 10 11 +AID 4 1 5 4 +AGRIAN 0 1 0 1 +AGRARIAN 0 1 1 0 +AGONE 0 1 1 0 +AGAD 0 1 0 1 +AFTERWARDS 5 1 6 5 +AFTERWARD 2 1 2 3 +AFOOT 0 1 0 1 +AFIRE 0 1 1 0 +AFAR 0 1 0 1 +ADVENTURES 2 1 3 2 +ADULT 0 1 1 0 +ADULGE 0 1 0 1 +ADN'T 0 1 1 0 +ADJOINING 0 1 0 1 +ADHERENTS 0 1 1 0 +ADHERENCE 0 1 0 1 +ADDISMA 0 1 0 1 +ADDIER 0 1 0 1 +ADDEST 0 1 0 1 +ADAIR 0 1 1 0 +AD 0 1 1 0 +ACQUIRE 0 1 0 1 +ACQUAINTED 3 1 3 4 +ACKNOWLEDGE 1 1 2 1 +ACHESON 0 1 1 0 +ACCUSE 0 1 0 1 +ACCOUNT 7 1 7 8 +ACCEPT 4 1 5 4 +ABSTAINED 0 1 0 1 +ABSTAIN 0 1 1 0 +ABSORBED 4 1 4 5 +ABSOLVED 0 1 1 0 +ABSENTEE 0 1 1 0 +ABSENTE 0 1 0 1 +ABROAD 4 1 4 5 +ABODE 2 1 3 2 +ABIDING 0 1 0 1 +ABASEMENT 0 1 0 1 +ZEAL 1 0 1 1 +ZAMAN 4 0 4 4 +YUNKERS 1 0 1 1 +YOURSELF 9 0 9 9 +YOUNGEST 1 0 1 1 +YOUNGERS 2 0 2 2 +YOUNGER 8 0 8 8 +YONDER 5 0 5 5 +YOLKS 1 0 1 1 +YIELDED 1 0 1 1 +YIELD 1 0 1 1 +YESTERDAY 3 0 3 3 +YEP 1 0 1 1 +YEOMAN 1 0 1 1 +YELLOW 4 0 4 4 +YELLED 1 0 1 1 +YELL 1 0 1 1 +YEARNS 1 0 1 1 +YEARNING 1 0 1 1 +YEA 1 0 1 1 +YAWN 1 0 1 1 +YARNS 1 0 1 1 +YARDS 1 0 1 1 +YACHT 1 0 1 1 +WYLDER'S 1 0 1 1 +WYLDER 1 0 1 1 +WRITTEN 5 0 5 5 +WRITING 1 0 1 1 +WRINKLES 1 0 1 1 +WRINGING 1 0 1 1 +WRIGGLING 1 0 1 1 +WRETCH 3 0 3 3 +WRECKAGE 1 0 1 1 +WRAPPING 1 0 1 1 +WOUNDED 6 0 6 6 +WOUND 2 0 2 2 +WOULDST 1 0 1 1 +WOULDN'T 9 0 9 9 +WORST 1 0 1 1 +WORN 3 0 3 3 +WORLDLY 1 0 1 1 +WORKSHOP 1 0 1 1 +WORKHOUSE 1 0 1 1 +WORKERS 1 0 1 1 +WORKER 1 0 1 1 +WORE 3 0 3 3 +WOODSON 1 0 1 1 +WONDROUS 1 0 1 1 +WONDERS 1 0 1 1 +WONDERING 1 0 1 1 +WONDERFULLY 1 0 1 1 +WONDERFUL 6 0 6 6 +WOMAN'S 2 0 2 2 +WOLF 1 0 1 1 +WOES 1 0 1 1 +WIZARDS 1 0 1 1 +WITTY 1 0 1 1 +WITNESSED 1 0 1 1 +WITHIN 11 0 11 11 +WITHHELD 2 0 2 2 +WITHDRAWN 2 0 2 2 +WITHAL 2 0 2 2 +WITCHES 1 0 1 1 +WIT 1 0 1 1 +WISTFUL 1 0 1 1 +WISHING 3 0 3 3 +WISHES 3 0 3 3 +WISH 15 0 15 15 +WISELY 1 0 1 1 +WISE 3 0 3 3 +WISDOM 4 0 4 4 +WISCONSIN 1 0 1 1 +WIRE 1 0 1 1 +WINTERS 1 0 1 1 +WINKED 1 0 1 1 +WINGS 2 0 2 2 +WINDOWS 1 0 1 1 +WIND 6 0 6 6 +WIN 2 0 2 2 +WILLINGLY 1 0 1 1 +WILLING 4 0 4 4 +WILFUL 1 0 1 1 +WIDOWER 1 0 1 1 +WIDEN 1 0 1 1 +WICKED 2 0 2 2 +WHOSO 1 0 1 1 +WHOOP 1 0 1 1 +WHOM 20 0 20 20 +WHOEVER 2 0 2 2 +WHISTLING 2 0 2 2 +WHISTLE 3 0 3 3 +WHISPERED 2 0 2 2 +WHIPPINGS 1 0 1 1 +WHIP 2 0 2 2 +WHIMPERING 1 0 1 1 +WHIM 1 0 1 1 +WHILST 1 0 1 1 +WHEREVER 2 0 2 2 +WHEREUPON 3 0 3 3 +WHEREIN 2 0 2 2 +WHEREFORE 2 0 2 2 +WHEREBY 2 0 2 2 +WHENEVER 4 0 4 4 +WHENCE 5 0 5 5 +WHEELS 1 0 1 1 +WHATSOEVER 1 0 1 1 +WETTED 1 0 1 1 +WET 3 0 3 3 +WESTERN 2 0 2 2 +WEST 3 0 3 3 +WEREN'T 1 0 1 1 +WEPT 2 0 2 2 +WENCH 1 0 1 1 +WELSH 1 0 1 1 +WELCOMED 1 0 1 1 +WELCOME 4 0 4 4 +WEIGHTY 1 0 1 1 +WEIGHT 1 0 1 1 +WEIGHING 1 0 1 1 +WEEPING 3 0 3 3 +WEEKS 1 0 1 1 +WEEKLY 1 0 1 1 +WEEK 6 0 6 6 +WEDNESDAY 2 0 2 2 +WEDDING 7 0 7 7 +WEB 1 0 1 1 +WEATHER 5 0 5 5 +WEARY 4 0 4 4 +WEARING 2 0 2 2 +WEAPONS 1 0 1 1 +WEAPON 1 0 1 1 +WEALTHY 3 0 3 3 +WEALTH 3 0 3 3 +WEAKNESS 4 0 4 4 +WEAK 2 0 2 2 +WE'D 1 0 1 1 +WAZIR 5 0 5 5 +WAYS 5 0 5 5 +WAYLAID 1 0 1 1 +WAVING 1 0 1 1 +WAVES 2 0 2 2 +WAVE 1 0 1 1 +WATERY 1 0 1 1 +WATERVILLE 1 0 1 1 +WATERS 1 0 1 1 +WATCHING 7 0 7 7 +WASN'T 3 0 3 3 +WASHINGTON 2 0 2 2 +WASHED 4 0 4 4 +WASH 4 0 4 4 +WARS 2 0 2 2 +WARRANT 1 0 1 1 +WARNING 1 0 1 1 +WARNER 1 0 1 1 +WARN'T 2 0 2 2 +WARMTH 1 0 1 1 +WARMLY 1 0 1 1 +WAREHOUSES 1 0 1 1 +WANTON 1 0 1 1 +WANTING 1 0 1 1 +WALLS 4 0 4 4 +WALLET 2 0 2 2 +WALKED 10 0 10 10 +WAKING 2 0 2 2 +WAKED 2 0 2 2 +WAITERS 1 0 1 1 +WAITER 1 0 1 1 +WAIT 11 0 11 11 +WAISTCOAT 3 0 3 3 +WAIST 1 0 1 1 +WAGONS 1 0 1 1 +WAGON 1 0 1 1 +WADDLED 1 0 1 1 +W 1 0 1 1 +VRONSKY 1 0 1 1 +VOYAGES 1 0 1 1 +VOWS 2 0 2 2 +VOW 1 0 1 1 +VOTING 2 0 2 2 +VOTED 1 0 1 1 +VOLUNTEERS 2 0 2 2 +VOLUNTARILY 1 0 1 1 +VOLUMINOUS 1 0 1 1 +VOLUME 2 0 2 2 +VOLLEY 1 0 1 1 +VOLCANOES 1 0 1 1 +VOICELESS 1 0 1 1 +VOICE 20 0 20 20 +VOCAL 2 0 2 2 +VITRIOL 1 0 1 1 +VITAL 1 0 1 1 +VISITOR 4 0 4 4 +VISITING 1 0 1 1 +VISITED 2 0 2 2 +VISIT 8 0 8 8 +VISION 1 0 1 1 +VISCOUNT 1 0 1 1 +VIRTUOUS 2 0 2 2 +VIRTUE 2 0 2 2 +VIRGINIA 1 0 1 1 +VIOLENTLY 1 0 1 1 +VINTAGE 2 0 2 2 +VINE 1 0 1 1 +VINDICTIVENESS 1 0 1 1 +VILLAGE 2 0 2 2 +VILE 1 0 1 1 +VIGOROUS 3 0 3 3 +VIGILANT 1 0 1 1 +VIEWS 1 0 1 1 +VIEW 1 0 1 1 +VICTORY 1 0 1 1 +VICTORIAN 1 0 1 1 +VICTIMS 1 0 1 1 +VICTIM 3 0 3 3 +VICIOUS 3 0 3 3 +VICES 1 0 1 1 +VICE 1 0 1 1 +VEXED 1 0 1 1 +VEXATION 1 0 1 1 +VESSEL 2 0 2 2 +VERSE 1 0 1 1 +VERDICT 3 0 3 3 +VENTURE 2 0 2 2 +VENICE 2 0 2 2 +VENIAL 1 0 1 1 +VENGEANCE 1 0 1 1 +VEINS 1 0 1 1 +VEILS 1 0 1 1 +VEIL 2 0 2 2 +VEHICLES 1 0 1 1 +VEGETABLES 1 0 1 1 +VEGETABLE 1 0 1 1 +VECCHIO 1 0 1 1 +VAUDEVILLE 1 0 1 1 +VARIOUS 2 0 2 2 +VARIES 1 0 1 1 +VARIED 1 0 1 1 +VANITY 1 0 1 1 +VANISHED 1 0 1 1 +VAMPA 2 0 2 2 +VALUES 2 0 2 2 +VALUE 3 0 3 3 +VALUABLES 1 0 1 1 +VALJEAN'S 3 0 3 3 +VALJEAN 7 0 7 7 +VALET 2 0 2 2 +VAGUELY 1 0 1 1 +VAGUE 1 0 1 1 +VACATION 1 0 1 1 +VACANTLY 1 0 1 1 +UTTERLY 3 0 3 3 +UTTERING 1 0 1 1 +UTMOST 5 0 5 5 +USURPER 2 0 2 2 +USING 3 0 3 3 +USEST 1 0 1 1 +URGE 1 0 1 1 +UPSET 1 0 1 1 +UPRIGHT 1 0 1 1 +UPPER 5 0 5 5 +UNWEPT 1 0 1 1 +UNUSUALLY 1 0 1 1 +UNUSUAL 2 0 2 2 +UNSWERVING 1 0 1 1 +UNSOUGHT 1 0 1 1 +UNSELFISH 1 0 1 1 +UNSEASONABLE 1 0 1 1 +UNREASONABLE 1 0 1 1 +UNPRESSED 1 0 1 1 +UNPLEASANT 3 0 3 3 +UNPITIED 1 0 1 1 +UNNATURAL 1 0 1 1 +UNMISTAKABLY 1 0 1 1 +UNLUCKY 2 0 2 2 +UNLIKELY 1 0 1 1 +UNKNOWN 2 0 2 2 +UNKIND 1 0 1 1 +UNJOINTED 1 0 1 1 +UNIVERSE 1 0 1 1 +UNIVERSAL 5 0 5 5 +UNITED 5 0 5 5 +UNISON 1 0 1 1 +UNIONISTS 1 0 1 1 +UNION 1 0 1 1 +UNINTENTIONAL 1 0 1 1 +UNIFORM 2 0 2 2 +UNHEARD 1 0 1 1 +UNHAPPINESS 1 0 1 1 +UNGRATEFUL 3 0 3 3 +UNFORTUNATELY 2 0 2 2 +UNFORTUNATE 2 0 2 2 +UNFLATTERING 1 0 1 1 +UNEASY 4 0 4 4 +UNEASILY 1 0 1 1 +UNDOUBTEDLY 1 0 1 1 +UNDERTOOK 1 0 1 1 +UNDERTONE 1 0 1 1 +UNDERTAKE 1 0 1 1 +UNDERSTOOD 6 0 6 6 +UNDERSTANDS 1 0 1 1 +UNDERSTANDING 5 0 5 5 +UNDERSTAND 7 0 7 7 +UNDERNEATH 1 0 1 1 +UNDERGROUND 1 0 1 1 +UNDERGO 1 0 1 1 +UNCONNECTED 1 0 1 1 +UNCONCERN 1 0 1 1 +UNCOMMON 1 0 1 1 +UNCOMFORTABLY 2 0 2 2 +UNCOMFORTABLE 1 0 1 1 +UNCLE'S 2 0 2 2 +UNCERTAIN 2 0 2 2 +UNBURDEN 1 0 1 1 +UNAWARE 1 0 1 1 +UNASSISTED 1 0 1 1 +UNALTERABLE 1 0 1 1 +UNABLE 1 0 1 1 +UGLY 1 0 1 1 +TYRANTS 1 0 1 1 +TYRANT 2 0 2 2 +TYPE 1 0 1 1 +TWIST 1 0 1 1 +TWILIGHT 1 0 1 1 +TWICE 2 0 2 2 +TWENTY 16 0 16 16 +TWAS 1 0 1 1 +TWAIN 2 0 2 2 +TUTORS 2 0 2 2 +TUTOR 1 0 1 1 +TURRETS 1 0 1 1 +TURNING 6 0 6 6 +TURKISH 1 0 1 1 +TURK 1 0 1 1 +TURBAN 1 0 1 1 +TUNE 1 0 1 1 +TUMULT 1 0 1 1 +TUMBLE 1 0 1 1 +TUG 1 0 1 1 +TUESDAY 1 0 1 1 +TUCKED 1 0 1 1 +TUBE 2 0 2 2 +TRUTH 10 0 10 10 +TRUSTWORTHY 1 0 1 1 +TRUSTED 1 0 1 1 +TRUST 3 0 3 3 +TRUNK 1 0 1 1 +TRUCE 2 0 2 2 +TROUT 1 0 1 1 +TROUSERS 2 0 2 2 +TROUBLING 1 0 1 1 +TROUBLED 6 0 6 6 +TROUBLE 8 0 8 8 +TROOPS 2 0 2 2 +TROLL 1 0 1 1 +TRIUMPHING 1 0 1 1 +TRIP 2 0 2 2 +TRIM 1 0 1 1 +TRIFLING 3 0 3 3 +TRIES 3 0 3 3 +TRIBUTE 2 0 2 2 +TRIANGLE 1 0 1 1 +TRIAL 3 0 3 3 +TREND 1 0 1 1 +TREMBLING 2 0 2 2 +TREMBLE 1 0 1 1 +TREES 3 0 3 3 +TREE 9 0 9 9 +TREATMENT 1 0 1 1 +TREATED 1 0 1 1 +TREAT 3 0 3 3 +TREASONS 1 0 1 1 +TREACHEROUSLY 1 0 1 1 +TRAVILLA 1 0 1 1 +TRAVELLERS 2 0 2 2 +TRAP 2 0 2 2 +TRANSPORTED 2 0 2 2 +TRANSPARENT 1 0 1 1 +TRANSLATED 1 0 1 1 +TRANSITORINESS 1 0 1 1 +TRANSFORMING 1 0 1 1 +TRANSFIGURED 1 0 1 1 +TRANSFERENCE 1 0 1 1 +TRANQUILLITIES 1 0 1 1 +TRAMP 2 0 2 2 +TRAGIC 1 0 1 1 +TRADITIONAL 1 0 1 1 +TRACEABLE 1 0 1 1 +TRACE 2 0 2 2 +TOY 1 0 1 1 +TOWNSFOLK 1 0 1 1 +TOWERS 1 0 1 1 +TOWERING 1 0 1 1 +TOUCHING 4 0 4 4 +TOTING 1 0 1 1 +TOSSING 1 0 1 1 +TOSSED 1 0 1 1 +TORTURES 1 0 1 1 +TORN 2 0 2 2 +TORMENTOR 2 0 2 2 +TOPS 1 0 1 1 +TOPIC 1 0 1 1 +TONGUES 1 0 1 1 +TONES 2 0 2 2 +TONE 6 0 6 6 +TOMBS 1 0 1 1 +TOMATO 1 0 1 1 +TOLERABLY 1 0 1 1 +TOKEN 1 0 1 1 +TOILING 1 0 1 1 +TOILETTE 1 0 1 1 +TOIL 1 0 1 1 +TOGETHER 11 0 11 11 +TOES 2 0 2 2 +TOBACCO 7 0 7 7 +TOASTED 2 0 2 2 +TOAST 1 0 1 1 +TIS 4 0 4 4 +TIPPLING 1 0 1 1 +TINY 1 0 1 1 +TINKLE 1 0 1 1 +TINCTURED 1 0 1 1 +TIMEPIECE 1 0 1 1 +TIME'S 1 0 1 1 +TIMBER 1 0 1 1 +TIGHTLY 2 0 2 2 +TIGHT 1 0 1 1 +TIDES 1 0 1 1 +TICKING 1 0 1 1 +TICKET 1 0 1 1 +TIBER 1 0 1 1 +THYSELF 3 0 3 3 +THYME 1 0 1 1 +THWARTED 1 0 1 1 +THURSDAY 1 0 1 1 +THUNDER 3 0 3 3 +THRUST 6 0 6 6 +THROWN 1 0 1 1 +THROW 2 0 2 2 +THROUGHOUT 3 0 3 3 +THRONE 2 0 2 2 +THROBBED 1 0 1 1 +THROAT 2 0 2 2 +THRILLING 1 0 1 1 +THREES 1 0 1 1 +THREATS 3 0 3 3 +THREATENED 1 0 1 1 +THRACE 1 0 1 1 +THOUSANDTH 1 0 1 1 +THOUGHTS 5 0 5 5 +THOUGHTFUL 2 0 2 2 +THOROUGHLY 1 0 1 1 +THORNTON 4 0 4 4 +THONG 1 0 1 1 +THOMAS 2 0 2 2 +THIRTY 7 0 7 7 +THIRTEEN 1 0 1 1 +THIRSTY 1 0 1 1 +THIRSTING 1 0 1 1 +THINKS 2 0 2 2 +THIEVES 1 0 1 1 +THIEF 2 0 2 2 +THICKENING 1 0 1 1 +THICK 4 0 4 4 +THEY'D 2 0 2 2 +THEREWITH 1 0 1 1 +THEREIN 3 0 3 3 +THEREFORE 12 0 12 12 +THEREAFTER 1 0 1 1 +THERE'LL 1 0 1 1 +THEORY 2 0 2 2 +THEOLOGIANS 1 0 1 1 +THENCEFORTH 1 0 1 1 +THENCE 1 0 1 1 +THANKFUL 2 0 2 2 +THANKED 2 0 2 2 +THANK 7 0 7 7 +TEXAS 1 0 1 1 +TESTING 1 0 1 1 +TESTIMONY 1 0 1 1 +TESTIFY 2 0 2 2 +TERROR 5 0 5 5 +TERRIFIC 2 0 2 2 +TERRIBLE 4 0 4 4 +TERMS 1 0 1 1 +TERM 2 0 2 2 +TERENTIUS 1 0 1 1 +TENDING 1 0 1 1 +TENDERNESS 1 0 1 1 +TENDERLY 1 0 1 1 +TENDER 3 0 3 3 +TENDENCY 1 0 1 1 +TENACITY 1 0 1 1 +TEMPTRESS 1 0 1 1 +TEMPTING 1 0 1 1 +TEMPTATION 4 0 4 4 +TEMPT 1 0 1 1 +TEMPORARY 2 0 2 2 +TEMPLARS 2 0 2 2 +TEMPEST 1 0 1 1 +TEMPERATURE 1 0 1 1 +TEMPERATE 2 0 2 2 +TEMPERAMENT 1 0 1 1 +TEMPER 1 0 1 1 +TELLING 2 0 2 2 +TELEPHONE 1 0 1 1 +TEEMING 1 0 1 1 +TECHNICAL 1 0 1 1 +TEASPOONFUL 1 0 1 1 +TEARS 6 0 6 6 +TEARING 1 0 1 1 +TEACHING 1 0 1 1 +TEACHERS 1 0 1 1 +TEACH 2 0 2 2 +TAYLOR 1 0 1 1 +TAXES 1 0 1 1 +TAUNTS 1 0 1 1 +TAUGHT 1 0 1 1 +TASTES 1 0 1 1 +TASTED 2 0 2 2 +TASK 3 0 3 3 +TARRIED 1 0 1 1 +TAPE 1 0 1 1 +TAP 1 0 1 1 +TANNER 1 0 1 1 +TALL 2 0 2 2 +TALKING 5 0 5 5 +TALKER 1 0 1 1 +TALK 15 0 15 15 +TALES 1 0 1 1 +TALENT 1 0 1 1 +TAKINGS 1 0 1 1 +TAKES 3 0 3 3 +TAINTED 1 0 1 1 +TAILS 2 0 2 2 +TAILOR'S 1 0 1 1 +TAIL 3 0 3 3 +TAGGING 1 0 1 1 +TACK 1 0 1 1 +TABLETS 2 0 2 2 +TABLES 2 0 2 2 +TABLE 7 0 7 7 +SYSTEM 2 0 2 2 +SYRINGE 1 0 1 1 +SYMPTOMS 1 0 1 1 +SYMPATHY 4 0 4 4 +SYMPATHIES 1 0 1 1 +SWUNG 3 0 3 3 +SWITCHED 1 0 1 1 +SWITCH 1 0 1 1 +SWISS 1 0 1 1 +SWINGING 1 0 1 1 +SWINGED 1 0 1 1 +SWIMS 1 0 1 1 +SWIFTLY 1 0 1 1 +SWEPT 1 0 1 1 +SWELL 1 0 1 1 +SWEETNESS 2 0 2 2 +SWEETMEATS 2 0 2 2 +SWEETMEAT 2 0 2 2 +SWEET 3 0 3 3 +SWEEPING 1 0 1 1 +SWEEP 1 0 1 1 +SWEAR 5 0 5 5 +SWAYING 1 0 1 1 +SWAY 1 0 1 1 +SWARMED 2 0 2 2 +SWAM 1 0 1 1 +SWALLOWING 1 0 1 1 +SWALLOWED 4 0 4 4 +SVIAZHSKY 1 0 1 1 +SUSTAINS 1 0 1 1 +SUSPICIOUS 3 0 3 3 +SUSPENDED 1 0 1 1 +SUSPECTED 6 0 6 6 +SUSPECT 1 0 1 1 +SUSAN'S 1 0 1 1 +SURVEYED 1 0 1 1 +SURROUNDINGS 1 0 1 1 +SURROUNDING 1 0 1 1 +SURROUNDED 1 0 1 1 +SURRENDERING 1 0 1 1 +SURRENDERED 1 0 1 1 +SURRENDER 2 0 2 2 +SURPRISED 2 0 2 2 +SURPRISE 4 0 4 4 +SURPLICE 1 0 1 1 +SURPASS 1 0 1 1 +SURMOUNTED 1 0 1 1 +SURLY 1 0 1 1 +SURFACE 3 0 3 3 +SUPPRESS 1 0 1 1 +SUPPOSITION 1 0 1 1 +SUPPOSED 6 0 6 6 +SUPPORTED 1 0 1 1 +SUPPORT 1 0 1 1 +SUPPLY 2 0 2 2 +SUPPLIED 3 0 3 3 +SUPPLICATION 1 0 1 1 +SUPPER 1 0 1 1 +SUPERNATURAL 2 0 2 2 +SUPERNACULUM 1 0 1 1 +SUPERLATIVE 1 0 1 1 +SUPERIORS 1 0 1 1 +SUPERIOR 4 0 4 4 +SUP 1 0 1 1 +SUNSHINY 1 0 1 1 +SUNSHINE 1 0 1 1 +SUNRISE 2 0 2 2 +SUNNYSIDE 1 0 1 1 +SUNG 2 0 2 2 +SUNDAY 4 0 4 4 +SUMS 2 0 2 2 +SUMMONED 1 0 1 1 +SUMMON 1 0 1 1 +SUMMIT 1 0 1 1 +SUMMER 6 0 6 6 +SULTRY 1 0 1 1 +SULTAN 2 0 2 2 +SUITS 1 0 1 1 +SUITABLE 2 0 2 2 +SUICIDE 1 0 1 1 +SUGGESTED 2 0 2 2 +SUGAR 9 0 9 9 +SUFFOLK 1 0 1 1 +SUFFICIENTLY 2 0 2 2 +SUFFERINGS 1 0 1 1 +SUFFERING 2 0 2 2 +SUFFERED 3 0 3 3 +SUFFER 1 0 1 1 +SUDDENLY 8 0 8 8 +SUCK 1 0 1 1 +SUCH 44 0 44 44 +SUCCUMBED 1 0 1 1 +SUCCESSIVELY 1 0 1 1 +SUCCESSIVE 1 0 1 1 +SUCCESSFULLY 1 0 1 1 +SUCCESSES 2 0 2 2 +SUCCESS 2 0 2 2 +SUCCEEDING 1 0 1 1 +SUCCEEDED 3 0 3 3 +SUCCEED 1 0 1 1 +SUBURB 1 0 1 1 +SUBSTITUTING 1 0 1 1 +SUBSTANCES 1 0 1 1 +SUBSTANCE 3 0 3 3 +SUBSISTENCE 1 0 1 1 +SUBSIDED 1 0 1 1 +SUBSEQUENT 1 0 1 1 +SUBORDINATED 1 0 1 1 +SUBMITTED 2 0 2 2 +SUBMISSIVE 1 0 1 1 +SUBMISSION 1 0 1 1 +SUBJECTS 6 0 6 6 +SUBJECTED 3 0 3 3 +SUBDUED 2 0 2 2 +STYLED 1 0 1 1 +STYLE 1 0 1 1 +STURDY 1 0 1 1 +STUPID 2 0 2 2 +STUMBLED 1 0 1 1 +STUFFS 1 0 1 1 +STUFF 1 0 1 1 +STUDYING 1 0 1 1 +STUDY 1 0 1 1 +STUDENTS 1 0 1 1 +STUDENT 2 0 2 2 +STUCK 2 0 2 2 +STRUGGLES 1 0 1 1 +STRUCTURE 1 0 1 1 +STROVE 1 0 1 1 +STRONGLY 1 0 1 1 +STRONGER 1 0 1 1 +STRONG 12 0 12 12 +STRODE 1 0 1 1 +STRIPPED 2 0 2 2 +STRIKING 1 0 1 1 +STRIDES 1 0 1 1 +STRICTLY 1 0 1 1 +STREWN 1 0 1 1 +STRETCHING 1 0 1 1 +STRETCHER 1 0 1 1 +STRETCH 1 0 1 1 +STRENUOUSLY 1 0 1 1 +STRENGTHENED 2 0 2 2 +STREETS 2 0 2 2 +STREAM 4 0 4 4 +STRATAGEM 1 0 1 1 +STRANGER 3 0 3 3 +STRANGELY 1 0 1 1 +STRAINING 1 0 1 1 +STRAIGHTWAY 1 0 1 1 +STRAIGHTFORWARD 1 0 1 1 +STOUT 1 0 1 1 +STORY 9 0 9 9 +STORMED 1 0 1 1 +STORM 1 0 1 1 +STOREHOUSES 1 0 1 1 +STOPPING 3 0 3 3 +STOP 5 0 5 5 +STOOL 2 0 2 2 +STONES 4 0 4 4 +STOMACH 3 0 3 3 +STOLE 3 0 3 3 +STIRRED 1 0 1 1 +STIR 1 0 1 1 +STILE 1 0 1 1 +STIFLING 1 0 1 1 +STIFLED 2 0 2 2 +STIFLE 3 0 3 3 +STIFFNESS 1 0 1 1 +STIFF 2 0 2 2 +STEWART 1 0 1 1 +STEWARDS 1 0 1 1 +STEWARD 1 0 1 1 +STEPHEN 1 0 1 1 +STEPAN 2 0 2 2 +STENOGRAPHIC 1 0 1 1 +STEMS 1 0 1 1 +STEERAGE 2 0 2 2 +STEEP 1 0 1 1 +STEEL 1 0 1 1 +STEED 1 0 1 1 +STEAMED 1 0 1 1 +STEAMBOAT 2 0 2 2 +STEALTHILY 1 0 1 1 +STEAL 1 0 1 1 +STEADY 3 0 3 3 +STEADILY 1 0 1 1 +STATUES 3 0 3 3 +STATIONED 2 0 2 2 +STATION 2 0 2 2 +STATESMAN 1 0 1 1 +STATEMENT 3 0 3 3 +STAT 1 0 1 1 +STARVING 2 0 2 2 +STARVE 1 0 1 1 +STARTLING 1 0 1 1 +STARTLED 1 0 1 1 +STARTING 1 0 1 1 +STARTED 10 0 10 10 +STARES 1 0 1 1 +STARCHY 1 0 1 1 +STANLEY 2 0 2 2 +STANDPOINT 1 0 1 1 +STANDING 10 0 10 10 +STAND 7 0 7 7 +STAMPED 1 0 1 1 +STAMMERED 1 0 1 1 +STAMMER 1 0 1 1 +STAKED 1 0 1 1 +STAKE 1 0 1 1 +STAIRCASE 1 0 1 1 +STAGE 5 0 5 5 +STABLE 1 0 1 1 +SQUIRE 3 0 3 3 +SQUEEZE 1 0 1 1 +SQUEAKS 1 0 1 1 +SQUATTED 1 0 1 1 +SQUARE 2 0 2 2 +SQUALL 1 0 1 1 +SQUALID 1 0 1 1 +SQUAD 2 0 2 2 +SPYING 1 0 1 1 +SPY 1 0 1 1 +SPRINKLES 1 0 1 1 +SPRINGS 1 0 1 1 +SPRING 4 0 4 4 +SPRIG 1 0 1 1 +SPREADS 1 0 1 1 +SPREAD 4 0 4 4 +SPRANG 3 0 3 3 +SPOTTED 1 0 1 1 +SPOT 6 0 6 6 +SPOON 1 0 1 1 +SPOKEN 2 0 2 2 +SPOKE 15 0 15 15 +SPOILS 1 0 1 1 +SPLIT 2 0 2 2 +SPITEFUL 1 0 1 1 +SPIT 1 0 1 1 +SPIRITUAL 1 0 1 1 +SPIRAL 1 0 1 1 +SPINSTER 1 0 1 1 +SPIDER 1 0 1 1 +SPHERE 1 0 1 1 +SPENT 3 0 3 3 +SPELL 1 0 1 1 +SPEEDILY 1 0 1 1 +SPEED 1 0 1 1 +SPECULATED 1 0 1 1 +SPECTATORS 1 0 1 1 +SPECTACLE 1 0 1 1 +SPECIES 1 0 1 1 +SPECIALTY 1 0 1 1 +SPECIAL 3 0 3 3 +SPEAKS 1 0 1 1 +SPEAKING 7 0 7 7 +SPEAKER 1 0 1 1 +SPEAK 15 0 15 15 +SPARROWS 1 0 1 1 +SPARK 1 0 1 1 +SPARING 1 0 1 1 +SPARE 1 0 1 1 +SPANKER 1 0 1 1 +SPANISH 1 0 1 1 +SPADES 1 0 1 1 +SOWING 1 0 1 1 +SOUP 1 0 1 1 +SOUNDS 2 0 2 2 +SOUNDED 3 0 3 3 +SOUND 12 0 12 12 +SOULS 2 0 2 2 +SORTS 4 0 4 4 +SORRY 3 0 3 3 +SORROWING 1 0 1 1 +SORROW 1 0 1 1 +SORELY 1 0 1 1 +SORE 1 0 1 1 +SORCERER 1 0 1 1 +SOOTH 1 0 1 1 +SOMEWHAT 5 0 5 5 +SOMETIMES 14 0 14 14 +SOMETHING'S 1 0 1 1 +SOMEHOW 3 0 3 3 +SOMEBODY 3 0 3 3 +SOLVE 1 0 1 1 +SOLUTION 4 0 4 4 +SOLUBLE 2 0 2 2 +SOLOMON 1 0 1 1 +SOLIDS 1 0 1 1 +SOLIDLY 1 0 1 1 +SOLID 1 0 1 1 +SOLICITUDE 1 0 1 1 +SOLEMNLY 1 0 1 1 +SOLEMNITY 1 0 1 1 +SOLEMN 1 0 1 1 +SOLDIERS 3 0 3 3 +SOLDIER 1 0 1 1 +SOLD 4 0 4 4 +SOLACE 1 0 1 1 +SOJOURN 2 0 2 2 +SOIL 2 0 2 2 +SOFTLY 2 0 2 2 +SODA 1 0 1 1 +SOCIETY 1 0 1 1 +SOCIETIES 1 0 1 1 +SOCIAL 12 0 12 12 +SOBERLY 1 0 1 1 +SOBER 4 0 4 4 +SOARING 1 0 1 1 +SOAK 1 0 1 1 +SNEEZE 2 0 2 2 +SNEERED 1 0 1 1 +SNEAKY 1 0 1 1 +SNATCHER 2 0 2 2 +SNATCH 1 0 1 1 +SNAKE 1 0 1 1 +SMUGGLERS 7 0 7 7 +SMUGGLED 1 0 1 1 +SMOULDERING 1 0 1 1 +SMOTE 2 0 2 2 +SMOKING 3 0 3 3 +SMOKERS 3 0 3 3 +SMOKED 2 0 2 2 +SMITH 1 0 1 1 +SMILING 2 0 2 2 +SMILED 1 0 1 1 +SMELT 1 0 1 1 +SMALLEST 1 0 1 1 +SMALLER 1 0 1 1 +SLUMBER 2 0 2 2 +SLOWLY 6 0 6 6 +SLOW 3 0 3 3 +SLIPPING 1 0 1 1 +SLIPPER 1 0 1 1 +SLIP 3 0 3 3 +SLING 1 0 1 1 +SLIGHT 1 0 1 1 +SLICES 2 0 2 2 +SLEPT 3 0 3 3 +SLENDER 2 0 2 2 +SLEEVES 1 0 1 1 +SLEEPS 2 0 2 2 +SLEEPING 6 0 6 6 +SLEEPER 1 0 1 1 +SLEEP 15 0 15 15 +SLEDGE 1 0 1 1 +SLAYING 1 0 1 1 +SLAY 1 0 1 1 +SLAVES 2 0 2 2 +SLAVERY 1 0 1 1 +SLAVE 3 0 3 3 +SLAPPED 1 0 1 1 +SLAMMED 1 0 1 1 +SLAIN 2 0 2 2 +SKYLIGHT 2 0 2 2 +SKY 3 0 3 3 +SKULLS 1 0 1 1 +SKULL 1 0 1 1 +SKIRTS 1 0 1 1 +SKIRMISH 1 0 1 1 +SKIMMING 1 0 1 1 +SKILLED 1 0 1 1 +SKILFULLY 1 0 1 1 +SKIES 1 0 1 1 +SKETCH 1 0 1 1 +SIZE 5 0 5 5 +SIXTY 7 0 7 7 +SIXTH 5 0 5 5 +SIXTEEN 2 0 2 2 +SITUATION 1 0 1 1 +SITTING 3 0 3 3 +SITTETH 1 0 1 1 +SISTERS 4 0 4 4 +SISTERLY 1 0 1 1 +SISTER 8 0 8 8 +SINNED 1 0 1 1 +SINKS 1 0 1 1 +SINGULAR 2 0 2 2 +SINGLE 8 0 8 8 +SINGER 1 0 1 1 +SINGED 1 0 1 1 +SINCERITY 1 0 1 1 +SINCERE 1 0 1 1 +SINCE 17 0 17 17 +SIN 2 0 2 2 +SIMPLY 3 0 3 3 +SIMPLE 4 0 4 4 +SIMON 1 0 1 1 +SIMMERING 1 0 1 1 +SIMILAR 2 0 2 2 +SILL 1 0 1 1 +SILK 1 0 1 1 +SILENTLY 2 0 2 2 +SILENT 9 0 9 9 +SILENCED 1 0 1 1 +SILAS 1 0 1 1 +SIGNS 2 0 2 2 +SIGNIFIES 1 0 1 1 +SIGNIFIED 1 0 1 1 +SIGNIFICANT 2 0 2 2 +SIGNIFICANCE 2 0 2 2 +SIGNATURE 1 0 1 1 +SIGNALS 2 0 2 2 +SIGNAL 7 0 7 7 +SIGHED 1 0 1 1 +SIGH 5 0 5 5 +SIFTED 1 0 1 1 +SIDEWAYS 1 0 1 1 +SIDEWALK 1 0 1 1 +SIDES 4 0 4 4 +SICUT 1 0 1 1 +SICK 2 0 2 2 +SHUTTING 1 0 1 1 +SHUTTERS 2 0 2 2 +SHUTTER 1 0 1 1 +SHUFFLE 1 0 1 1 +SHUDDER 1 0 1 1 +SHRUNK 1 0 1 1 +SHROUDED 1 0 1 1 +SHRINKING 1 0 1 1 +SHRILL 1 0 1 1 +SHRIEKING 1 0 1 1 +SHRIEKED 1 0 1 1 +SHOWS 2 0 2 2 +SHOWING 7 0 7 7 +SHOWED 9 0 9 9 +SHOUTS 2 0 2 2 +SHOUTING 4 0 4 4 +SHOUTED 4 0 4 4 +SHOULDN'T 1 0 1 1 +SHORTLY 5 0 5 5 +SHORTER 1 0 1 1 +SHORT 8 0 8 8 +SHOPS 1 0 1 1 +SHOPPY 1 0 1 1 +SHOPPING 1 0 1 1 +SHOPKEEPERS 1 0 1 1 +SHOP 6 0 6 6 +SHOOTER 1 0 1 1 +SHOOT 6 0 6 6 +SHOOK 5 0 5 5 +SHONE 2 0 2 2 +SHOES 5 0 5 5 +SHOCKED 2 0 2 2 +SHIRTS 1 0 1 1 +SHIRT 1 0 1 1 +SHIRKING 1 0 1 1 +SHIMMERING 1 0 1 1 +SHIFTY 1 0 1 1 +SHIFTED 2 0 2 2 +SHERRY 3 0 3 3 +SHERBURN'S 1 0 1 1 +SHERBURN 1 0 1 1 +SHELLS 4 0 4 4 +SHELF 1 0 1 1 +SHEILA 1 0 1 1 +SHEET 2 0 2 2 +SHEATH 1 0 1 1 +SHE'S 5 0 5 5 +SHAWL 1 0 1 1 +SHARPNESS 1 0 1 1 +SHARPLY 4 0 4 4 +SHARPENED 1 0 1 1 +SHARP 5 0 5 5 +SHAPIA 1 0 1 1 +SHAPES 1 0 1 1 +SHAPED 1 0 1 1 +SHAPE 3 0 3 3 +SHAME 2 0 2 2 +SHAM 1 0 1 1 +SHALT 7 0 7 7 +SHAKING 1 0 1 1 +SHAHRAZAD 3 0 3 3 +SHAFTS 1 0 1 1 +SHADOWS 1 0 1 1 +SEX 1 0 1 1 +SEVERSON 1 0 1 1 +SEVERELY 1 0 1 1 +SEVENTY 7 0 7 7 +SEVENTEEN 4 0 4 4 +SETTLED 4 0 4 4 +SETTLE 2 0 2 2 +SERVING 1 0 1 1 +SERVICES 1 0 1 1 +SERVICE 15 0 15 15 +SERVED 3 0 3 3 +SERVE 7 0 7 7 +SERVANTS 4 0 4 4 +SERVANT 4 0 4 4 +SERPENTS 2 0 2 2 +SERPENT 1 0 1 1 +SERIOUSLY 3 0 3 3 +SERIOUS 5 0 5 5 +SERENITY 1 0 1 1 +SEPULTURE 1 0 1 1 +SEPULCHRE 1 0 1 1 +SEPARATION 3 0 3 3 +SEPARATING 1 0 1 1 +SEPARATED 3 0 3 3 +SEPARATE 2 0 2 2 +SENTINELS 2 0 2 2 +SENTIMENTAL 1 0 1 1 +SENTIMENT 1 0 1 1 +SENSITIVE 2 0 2 2 +SENSIBLY 1 0 1 1 +SENSES 2 0 2 2 +SENSELESS 2 0 2 2 +SENSE 9 0 9 9 +SENSATION 1 0 1 1 +SENATOR 1 0 1 1 +SELL 4 0 4 4 +SEIZED 3 0 3 3 +SEES 1 0 1 1 +SEEMLY 1 0 1 1 +SEEKING 1 0 1 1 +SECURITY 7 0 7 7 +SECRETS 3 0 3 3 +SECRETLY 1 0 1 1 +SECRETARY 2 0 2 2 +SECRET 3 0 3 3 +SECONDS 1 0 1 1 +SECOND 15 0 15 15 +SEASONS 1 0 1 1 +SEASONED 1 0 1 1 +SEARCHINGLY 1 0 1 1 +SEARCHING 1 0 1 1 +SEARCHES 1 0 1 1 +SEARCHED 2 0 2 2 +SEARCH 6 0 6 6 +SEALED 2 0 2 2 +SCUTTLING 1 0 1 1 +SCUM 1 0 1 1 +SCULPTURE 1 0 1 1 +SCULPTOR 3 0 3 3 +SCRUPULOUSLY 1 0 1 1 +SCREW 1 0 1 1 +SCREEN 1 0 1 1 +SCREAM 1 0 1 1 +SCRATCHING 1 0 1 1 +SCRATCH 1 0 1 1 +SCRAPING 1 0 1 1 +SCRAPE 1 0 1 1 +SCOUNDREL 2 0 2 2 +SCOTCH 2 0 2 2 +SCISSORS 5 0 5 5 +SCIENTIFICALLY 1 0 1 1 +SCIENTIFIC 1 0 1 1 +SCHOOLMATE 1 0 1 1 +SCHOOLMASTER 5 0 5 5 +SCHOLARS 1 0 1 1 +SCHEME 1 0 1 1 +SCENES 2 0 2 2 +SCATTER 1 0 1 1 +SCARRED 1 0 1 1 +SCARLET 1 0 1 1 +SCARED 1 0 1 1 +SCARCELY 4 0 4 4 +SCARCE 1 0 1 1 +SCANNING 1 0 1 1 +SCALES 1 0 1 1 +SAXON 2 0 2 2 +SAWYER 3 0 3 3 +SAVAGES 1 0 1 1 +SAVAGERY 1 0 1 1 +SAUCER 1 0 1 1 +SATURATED 1 0 1 1 +SATISFY 3 0 3 3 +SATISFIED 3 0 3 3 +SATISFACTORY 2 0 2 2 +SATISFACTORILY 1 0 1 1 +SATISFACTION 6 0 6 6 +SATIATED 1 0 1 1 +SATANICAL 1 0 1 1 +SATAN 1 0 1 1 +SANCTUARY 1 0 1 1 +SAMUEL 1 0 1 1 +SAMARIA 1 0 1 1 +SALUTED 2 0 2 2 +SALTS 1 0 1 1 +SALT 2 0 2 2 +SALOON 1 0 1 1 +SAITH 1 0 1 1 +SAINTS 3 0 3 3 +SAILING 2 0 2 2 +SAILED 1 0 1 1 +SAFETY 2 0 2 2 +SAFELY 2 0 2 2 +SAFE 8 0 8 8 +SADLY 4 0 4 4 +SACRIFICES 3 0 3 3 +SACRED 1 0 1 1 +SACRAMENT 1 0 1 1 +SACK 1 0 1 1 +RUSTLING 2 0 2 2 +RUSTLE 1 0 1 1 +RUSSIAN 3 0 3 3 +RUNNING 3 0 3 3 +RUMBLING 1 0 1 1 +RULES 2 0 2 2 +RULE 5 0 5 5 +RUINS 1 0 1 1 +RUINING 1 0 1 1 +RUINED 1 0 1 1 +RUFFIAN 1 0 1 1 +ROYAL 7 0 7 7 +ROW 2 0 2 2 +ROVER 1 0 1 1 +ROUSED 3 0 3 3 +ROUSE 1 0 1 1 +ROT 1 0 1 1 +ROSY 2 0 2 2 +ROSEMARY 2 0 2 2 +ROSA 1 0 1 1 +ROPE 3 0 3 3 +ROOMS 2 0 2 2 +ROOFS 1 0 1 1 +ROLLED 1 0 1 1 +ROCKS 1 0 1 1 +ROCKET 1 0 1 1 +ROCK 4 0 4 4 +ROBERT 1 0 1 1 +ROBED 1 0 1 1 +ROBBING 1 0 1 1 +ROBBERY 5 0 5 5 +ROBBERS 3 0 3 3 +ROBBERIES 2 0 2 2 +ROBBED 2 0 2 2 +ROASTING 1 0 1 1 +ROASTED 1 0 1 1 +ROAST 1 0 1 1 +ROARED 1 0 1 1 +ROADSIDE 1 0 1 1 +RIVERS 1 0 1 1 +RIVALRY 1 0 1 1 +RIVAL 2 0 2 2 +RISK 3 0 3 3 +RISING 5 0 5 5 +RISEN 1 0 1 1 +RISE 3 0 3 3 +RIP 2 0 2 2 +RIGOR 1 0 1 1 +RIGHTEOUSNESS 1 0 1 1 +RIGHTEOUS 1 0 1 1 +RIDICULOUS 1 0 1 1 +RIDDEN 1 0 1 1 +RICHLY 1 0 1 1 +RICHER 1 0 1 1 +RICH 7 0 7 7 +RICE 1 0 1 1 +RHEUMATISM 1 0 1 1 +REWARDS 1 0 1 1 +REWARD 4 0 4 4 +REVOLUTIONARIES 1 0 1 1 +REVIVE 1 0 1 1 +REVIEW 1 0 1 1 +REVERSES 1 0 1 1 +REVENGES 1 0 1 1 +REVENGE 1 0 1 1 +REVELLED 1 0 1 1 +REVEL 1 0 1 1 +REVEALED 1 0 1 1 +RETREAT 1 0 1 1 +RETARDED 1 0 1 1 +RETAINED 2 0 2 2 +RESULT 2 0 2 2 +RESTS 1 0 1 1 +RESTRAIN 2 0 2 2 +RESTORED 2 0 2 2 +RESTAURANTS 1 0 1 1 +RESTAURANT 3 0 3 3 +RESPONSIBILITY 2 0 2 2 +RESPONDED 2 0 2 2 +RESPECTS 1 0 1 1 +RESPECTIVE 2 0 2 2 +RESPECTING 1 0 1 1 +RESPECTFULLY 3 0 3 3 +RESPECTFUL 1 0 1 1 +RESPECTED 1 0 1 1 +RESPECTABLE 3 0 3 3 +RESPECT 4 0 4 4 +RESORTS 1 0 1 1 +RESORTED 1 0 1 1 +RESORT 1 0 1 1 +RESOLVING 1 0 1 1 +RESOLVE 1 0 1 1 +RESOLUTIONS 1 0 1 1 +RESOLUTION 2 0 2 2 +RESISTANCE 3 0 3 3 +RESIST 3 0 3 3 +RESIGNED 1 0 1 1 +RESIDUE 3 0 3 3 +RESIDENCE 2 0 2 2 +RESIDE 1 0 1 1 +RESERVOIR 1 0 1 1 +RESERVE 1 0 1 1 +RESEMBLING 1 0 1 1 +RESEMBLES 1 0 1 1 +RESEMBLE 1 0 1 1 +RESEARCHES 1 0 1 1 +REQUIRING 1 0 1 1 +REQUIRES 1 0 1 1 +REQUIRED 3 0 3 3 +REQUIRE 4 0 4 4 +REQUESTED 2 0 2 2 +REPUTATIONS 1 0 1 1 +REPROACH 2 0 2 2 +REPRESENTED 3 0 3 3 +REPORTED 1 0 1 1 +REPORT 2 0 2 2 +REPEATING 1 0 1 1 +REPAST 1 0 1 1 +REPARATION 1 0 1 1 +REPAIRED 2 0 2 2 +REPAIR 1 0 1 1 +RENOUNCE 3 0 3 3 +RENEWED 2 0 2 2 +RENDERS 1 0 1 1 +RENDERED 1 0 1 1 +RENDER 1 0 1 1 +REMOVED 3 0 3 3 +REMOVE 3 0 3 3 +REMOVAL 1 0 1 1 +REMOTE 1 0 1 1 +REMORSEFUL 1 0 1 1 +REMONSTRANCE 1 0 1 1 +REMNANTS 1 0 1 1 +REMNANT 1 0 1 1 +REMINISCENCES 1 0 1 1 +REMEMBERING 2 0 2 2 +REMEMBERED 4 0 4 4 +REMEDY 4 0 4 4 +REMARKS 1 0 1 1 +REMARKABLY 1 0 1 1 +REMARKABLE 2 0 2 2 +RELYING 1 0 1 1 +RELUCTANTLY 2 0 2 2 +RELUCTANCE 1 0 1 1 +RELINQUISH 1 0 1 1 +RELIGIONS 1 0 1 1 +RELIGION 11 0 11 11 +RELIEVED 1 0 1 1 +RELIEF 6 0 6 6 +RELIED 1 0 1 1 +RELIC 1 0 1 1 +RELEVANT 1 0 1 1 +RELEASE 2 0 2 2 +RELAXING 1 0 1 1 +RELATIONS 2 0 2 2 +REJOINED 1 0 1 1 +REJOINDER 1 0 1 1 +REJOICING 1 0 1 1 +REJOICED 3 0 3 3 +REGULAR 1 0 1 1 +REGRETTING 1 0 1 1 +REGISTER 1 0 1 1 +REGION 1 0 1 1 +REGIMENTS 2 0 2 2 +REGARDED 2 0 2 2 +REGARD 2 0 2 2 +REGAINED 1 0 1 1 +REGAIN 2 0 2 2 +REFUTATION 1 0 1 1 +REFUSING 2 0 2 2 +REFUSES 2 0 2 2 +REFUSED 1 0 1 1 +REFRESHMENT 1 0 1 1 +REFRAIN 2 0 2 2 +REFORMS 1 0 1 1 +REFLECTIVE 1 0 1 1 +REFLECTIONS 1 0 1 1 +REFLECTION 3 0 3 3 +REFINED 2 0 2 2 +REFERRED 2 0 2 2 +REFER 1 0 1 1 +REEF 1 0 1 1 +REDUCED 2 0 2 2 +REDOUBLING 1 0 1 1 +REDEMPTION 1 0 1 1 +REDEEMING 1 0 1 1 +RECTOR 1 0 1 1 +RECRUITS 1 0 1 1 +RECOVERY 1 0 1 1 +RECOVERED 1 0 1 1 +RECOVER 3 0 3 3 +RECOURSE 1 0 1 1 +RECOUNTED 1 0 1 1 +RECORDS 1 0 1 1 +RECORD 2 0 2 2 +RECOMPENSE 2 0 2 2 +RECOMMEND 2 0 2 2 +RECOLLECTING 1 0 1 1 +RECOLLECTED 1 0 1 1 +RECOLLECT 1 0 1 1 +RECOILED 1 0 1 1 +RECOGNIZED 5 0 5 5 +RECOGNITION 2 0 2 2 +RECKON 4 0 4 4 +RECITING 1 0 1 1 +RECITER 2 0 2 2 +RECITED 3 0 3 3 +RECITE 2 0 2 2 +RECIPE 2 0 2 2 +RECEPTION 1 0 1 1 +RECENTLY 1 0 1 1 +RECEIVED 9 0 9 9 +RECEIVE 4 0 4 4 +RECEIPT 1 0 1 1 +RECEDING 1 0 1 1 +RECALLING 1 0 1 1 +RECALLED 1 0 1 1 +RECALL 1 0 1 1 +REBECCA 1 0 1 1 +REASONABLE 2 0 2 2 +REASON 11 0 11 11 +REAR 1 0 1 1 +REAPING 1 0 1 1 +REALM 1 0 1 1 +REALLY 18 0 18 18 +REALIZE 1 0 1 1 +REALITY 3 0 3 3 +REAL 3 0 3 3 +READERS 1 0 1 1 +READER 1 0 1 1 +REACHING 2 0 2 2 +REACH 4 0 4 4 +RAWNESS 1 0 1 1 +RAVING 1 0 1 1 +RAVENING 1 0 1 1 +RAVAGED 1 0 1 1 +RATTLED 1 0 1 1 +RATTLE 1 0 1 1 +RATCHFORD 1 0 1 1 +RASHID 1 0 1 1 +RARE 1 0 1 1 +RAPIDLY 4 0 4 4 +RAP 1 0 1 1 +RANKS 1 0 1 1 +RAMBLER 1 0 1 1 +RAMBLE 1 0 1 1 +RAM'S 1 0 1 1 +RAISING 2 0 2 2 +RAINS 1 0 1 1 +RAINBOWS 1 0 1 1 +RAILROAD 1 0 1 1 +RAIDERS 1 0 1 1 +RAFTER 1 0 1 1 +RAFT 6 0 6 6 +RADICALS 1 0 1 1 +RADIANT 1 0 1 1 +RACKETS 1 0 1 1 +RACK 1 0 1 1 +RACE 2 0 2 2 +RABBIT 5 0 5 5 +R 1 0 1 1 +QUOTED 1 0 1 1 +QUIXOTE 5 0 5 5 +QUIVERED 1 0 1 1 +QUIVER 1 0 1 1 +QUIT 1 0 1 1 +QUIETLY 4 0 4 4 +QUIET 1 0 1 1 +QUICKER 3 0 3 3 +QUICKENETH 1 0 1 1 +QUESTIONS 6 0 6 6 +QUESTIONED 1 0 1 1 +QUESTIONABLE 1 0 1 1 +QUESTION 15 0 15 15 +QUENCH 1 0 1 1 +QUEER 4 0 4 4 +QUEENS 1 0 1 1 +QUEEN'S 1 0 1 1 +QUARTERS 3 0 3 3 +QUARTER 7 0 7 7 +QUART 1 0 1 1 +QUARRELS 1 0 1 1 +QUANTITY 3 0 3 3 +QUALITY 1 0 1 1 +PUZZLED 2 0 2 2 +PUSHING 1 0 1 1 +PUSHED 1 0 1 1 +PURSUIT 1 0 1 1 +PURSUED 3 0 3 3 +PURSUANCE 1 0 1 1 +PURPOSES 1 0 1 1 +PURPOSE 5 0 5 5 +PURITAN 2 0 2 2 +PURIFY 1 0 1 1 +PURE 4 0 4 4 +PURCHASED 1 0 1 1 +PUNISHMENTS 1 0 1 1 +PUNISHMENT 1 0 1 1 +PUNISHES 1 0 1 1 +PUNISHED 1 0 1 1 +PUNISH 1 0 1 1 +PUNCTUALITY 1 0 1 1 +PUNCTILIOUS 1 0 1 1 +PULSE 1 0 1 1 +PULP 1 0 1 1 +PUFFING 1 0 1 1 +PUFFED 1 0 1 1 +PUDDINGS 1 0 1 1 +PUBLISHER 1 0 1 1 +PUBLIC 5 0 5 5 +PRYING 2 0 2 2 +PRUDENCE 4 0 4 4 +PROVOKE 1 0 1 1 +PROVISION 1 0 1 1 +PROVINCIAL 1 0 1 1 +PROVINCE 4 0 4 4 +PROVIDENCES 1 0 1 1 +PROVIDENCE 1 0 1 1 +PROVIDED 2 0 2 2 +PROVIDE 1 0 1 1 +PROVERBIAL 1 0 1 1 +PROVEN 1 0 1 1 +PROUD 2 0 2 2 +PROTESTED 2 0 2 2 +PROTECTS 1 0 1 1 +PROTECTORS 1 0 1 1 +PROTECTION 2 0 2 2 +PROTECT 2 0 2 2 +PROSPEROUS 1 0 1 1 +PROPRIETORS 1 0 1 1 +PROPOSITION 1 0 1 1 +PROPOSES 1 0 1 1 +PROPOSED 3 0 3 3 +PROPOSALS 1 0 1 1 +PROPORTION 3 0 3 3 +PROPHET 1 0 1 1 +PROPERTY 2 0 2 2 +PROPERLY 2 0 2 2 +PROOF 5 0 5 5 +PRONOUNCED 1 0 1 1 +PROMPTLY 3 0 3 3 +PROMPT 1 0 1 1 +PROMISING 1 0 1 1 +PROMISED 7 0 7 7 +PROMISE 4 0 4 4 +PROLONGED 1 0 1 1 +PROJECT 1 0 1 1 +PROHIBITED 1 0 1 1 +PROHIBIT 1 0 1 1 +PROGRESS 1 0 1 1 +PROGRAMME 1 0 1 1 +PROFUSION 1 0 1 1 +PROFOUND 1 0 1 1 +PROFLIGATE 1 0 1 1 +PROFITABLY 1 0 1 1 +PROFITABLE 1 0 1 1 +PROFIT 2 0 2 2 +PROFESSIONAL 2 0 2 2 +PROFESSION 4 0 4 4 +PROFANITY 1 0 1 1 +PROFANE 1 0 1 1 +PRODUCTIONS 1 0 1 1 +PRODUCING 1 0 1 1 +PROCURE 2 0 2 2 +PROCOPIUS 1 0 1 1 +PROCESSIONS 1 0 1 1 +PROCESSION 1 0 1 1 +PROCESS 6 0 6 6 +PROCEEDINGS 2 0 2 2 +PROCEEDED 1 0 1 1 +PROCEED 2 0 2 2 +PROCEDURE 1 0 1 1 +PROBLEMS 1 0 1 1 +PROBLEM 1 0 1 1 +PROBABLY 7 0 7 7 +PROBABLE 1 0 1 1 +PROBABILITY 1 0 1 1 +PRIVILEGE 1 0 1 1 +PRIVATE 6 0 6 6 +PRIVACY 1 0 1 1 +PRISONERS 3 0 3 3 +PRISONER 13 0 13 13 +PRISON 5 0 5 5 +PRINT 1 0 1 1 +PRINCIPLE 3 0 3 3 +PRINCIPALLY 1 0 1 1 +PRINCIPAL 1 0 1 1 +PRINCESS 11 0 11 11 +PRINCES 1 0 1 1 +PRINCE'S 2 0 2 2 +PRIME 3 0 3 3 +PRIDE 2 0 2 2 +PRICE 2 0 2 2 +PREVIOUSLY 1 0 1 1 +PREVENTED 1 0 1 1 +PREVAILING 1 0 1 1 +PREVAILED 1 0 1 1 +PRETTILY 1 0 1 1 +PRETTIEST 1 0 1 1 +PRETEXT 1 0 1 1 +PRETENDED 1 0 1 1 +PRETEND 3 0 3 3 +PRESUMPTUOUS 1 0 1 1 +PRESSURE 4 0 4 4 +PRESSING 3 0 3 3 +PRESSED 2 0 2 2 +PRESERVING 5 0 5 5 +PRESERVES 3 0 3 3 +PRESERVED 2 0 2 2 +PRESERVE 2 0 2 2 +PRESENTS 4 0 4 4 +PRESENTLY 12 0 12 12 +PRESENTING 1 0 1 1 +PRESENTED 3 0 3 3 +PRESENCE 9 0 9 9 +PREPARING 3 0 3 3 +PREPARED 7 0 7 7 +PREPARE 1 0 1 1 +PREPARATIONS 5 0 5 5 +PREOCCUPIED 1 0 1 1 +PREMISES 1 0 1 1 +PRELIMINARIES 1 0 1 1 +PREFERRED 1 0 1 1 +PREFER 2 0 2 2 +PREDICTIONS 1 0 1 1 +PRECIPITANCY 1 0 1 1 +PRECIOUS 2 0 2 2 +PRECINCT 1 0 1 1 +PRECEPTORY 2 0 2 2 +PRECAUTION 1 0 1 1 +PREACHING 2 0 2 2 +PREACHER 1 0 1 1 +PRAYER 6 0 6 6 +PRAYED 3 0 3 3 +PRAISES 1 0 1 1 +PRAISE 3 0 3 3 +PRACTISE 1 0 1 1 +PRACTICED 1 0 1 1 +PRACTICE 1 0 1 1 +PRACTICALLY 2 0 2 2 +POWERS 3 0 3 3 +POWERLESS 1 0 1 1 +POWERFUL 3 0 3 3 +POWER 27 0 27 27 +POWDERED 1 0 1 1 +POVERTY 3 0 3 3 +POURS 1 0 1 1 +POURING 1 0 1 1 +POUR 1 0 1 1 +POUND 3 0 3 3 +POUNCE 1 0 1 1 +POTS 1 0 1 1 +POTASSIC 1 0 1 1 +POTASH 1 0 1 1 +POSTERS 1 0 1 1 +POSSIBLY 3 0 3 3 +POSSIBLE 12 0 12 12 +POSSIBILITY 2 0 2 2 +POSSESSION 2 0 2 2 +POSSESSES 1 0 1 1 +POSSESSED 5 0 5 5 +POSSESS 1 0 1 1 +POSSE 1 0 1 1 +POSITIVELY 3 0 3 3 +POSITIVE 1 0 1 1 +PORTMANTEAU 1 0 1 1 +PORTIONS 2 0 2 2 +PORTION 2 0 2 2 +PORTER 2 0 2 2 +POPULATION 1 0 1 1 +POPULARITY 1 0 1 1 +POPULAR 1 0 1 1 +POPPED 1 0 1 1 +POPES 2 0 2 2 +POPE'S 1 0 1 1 +POPE 1 0 1 1 +POP 1 0 1 1 +PONY 1 0 1 1 +POLLY'S 3 0 3 3 +POLLY 19 0 19 19 +POLITICIANS 1 0 1 1 +POLITICAL 3 0 3 3 +POLICE 5 0 5 5 +POKING 1 0 1 1 +POKED 1 0 1 1 +POISONS 1 0 1 1 +POISONING 3 0 3 3 +POINTING 2 0 2 2 +POINTED 3 0 3 3 +POETRY 2 0 2 2 +POCKETS 1 0 1 1 +POCKETED 1 0 1 1 +PO 1 0 1 1 +PLUNGED 3 0 3 3 +PLUNDERED 1 0 1 1 +PLUG 1 0 1 1 +PLUCKING 2 0 2 2 +PLOT 1 0 1 1 +PLEDGED 1 0 1 1 +PLEDGE 1 0 1 1 +PLEASURES 2 0 2 2 +PLEASING 2 0 2 2 +PLEASED 4 0 4 4 +PLEASANTER 1 0 1 1 +PLEADINGS 1 0 1 1 +PLEAD 1 0 1 1 +PLAYERS 1 0 1 1 +PLAY 4 0 4 4 +PLATTERS 1 0 1 1 +PLATFORM 2 0 2 2 +PLATE 2 0 2 2 +PLASTER 1 0 1 1 +PLANTS 1 0 1 1 +PLANTED 1 0 1 1 +PLANTATIONS 2 0 2 2 +PLANS 5 0 5 5 +PLANNED 1 0 1 1 +PLANKS 2 0 2 2 +PLANK 3 0 3 3 +PLANETS 1 0 1 1 +PLANET 2 0 2 2 +PLAINLY 3 0 3 3 +PLAGUE 2 0 2 2 +PLACING 2 0 2 2 +PITY 4 0 4 4 +PITIFULNESS 1 0 1 1 +PIT 1 0 1 1 +PISTOLS 1 0 1 1 +PIPING 1 0 1 1 +PINT 1 0 1 1 +PINK 1 0 1 1 +PINED 1 0 1 1 +PINCH 1 0 1 1 +PIN 1 0 1 1 +PILLOW 1 0 1 1 +PILED 1 0 1 1 +PILE 1 0 1 1 +PIG 1 0 1 1 +PIERO 1 0 1 1 +PIERCED 1 0 1 1 +PIECES 9 0 9 9 +PICTURES 3 0 3 3 +PICKET 1 0 1 1 +PIAZZA 1 0 1 1 +PHYSIOLOGICAL 1 0 1 1 +PHYSICIAN 3 0 3 3 +PHYSICAL 2 0 2 2 +PHRASE 1 0 1 1 +PHONE 1 0 1 1 +PHLEGMATIC 1 0 1 1 +PHILOSOPHERS 1 0 1 1 +PHELPS 2 0 2 2 +PHARMACY 1 0 1 1 +PEYTON 1 0 1 1 +PETITIONS 1 0 1 1 +PETERS 1 0 1 1 +PET 2 0 2 2 +PERVADED 1 0 1 1 +PERUSING 1 0 1 1 +PERUSAL 1 0 1 1 +PERSUADED 1 0 1 1 +PERSUADE 1 0 1 1 +PERSPECTIVE 1 0 1 1 +PERSONS 8 0 8 8 +PERSONALLY 5 0 5 5 +PERSONAL 2 0 2 2 +PERSONAGE 1 0 1 1 +PERSON'S 1 0 1 1 +PERSON 16 0 16 16 +PERSISTED 3 0 3 3 +PERSIST 1 0 1 1 +PERSEUS 2 0 2 2 +PERSECUTORS 1 0 1 1 +PERSECUTION 1 0 1 1 +PERSECUTED 1 0 1 1 +PERSECUTE 2 0 2 2 +PERPLEXITY 1 0 1 1 +PERPETUALLY 1 0 1 1 +PERMITTED 5 0 5 5 +PERMIT 4 0 4 4 +PERMISSION 2 0 2 2 +PERMANENT 1 0 1 1 +PERISHED 3 0 3 3 +PERISH 4 0 4 4 +PERIODS 1 0 1 1 +PERIOD 2 0 2 2 +PERILS 1 0 1 1 +PERFORMANCES 1 0 1 1 +PERFORM 2 0 2 2 +PERFECTION 2 0 2 2 +PERFECT 5 0 5 5 +PERCHED 1 0 1 1 +PERCH 1 0 1 1 +PERCEPTIBLE 1 0 1 1 +PERCEIVED 7 0 7 7 +PERCEIVE 1 0 1 1 +PEPPINO 1 0 1 1 +PEPPER 2 0 2 2 +PEOPLE'S 3 0 3 3 +PEOPLE 44 0 44 44 +PENNY 1 0 1 1 +PENETRATING 1 0 1 1 +PENETRATE 1 0 1 1 +PENCE 1 0 1 1 +PENALTY 1 0 1 1 +PELT 1 0 1 1 +PEERS 1 0 1 1 +PEERED 1 0 1 1 +PEDESTAL 1 0 1 1 +PECULIAR 2 0 2 2 +PEBBLES 3 0 3 3 +PEASANT 3 0 3 3 +PEARLS 1 0 1 1 +PEALS 1 0 1 1 +PEAL 1 0 1 1 +PEACEFUL 2 0 2 2 +PEACE 8 0 8 8 +PEABODY 1 0 1 1 +PAYS 1 0 1 1 +PAYING 1 0 1 1 +PAVILION 1 0 1 1 +PAVEMENT 1 0 1 1 +PAUSE 4 0 4 4 +PATRIOTS 1 0 1 1 +PATRIOTISM 1 0 1 1 +PATRIOT 1 0 1 1 +PATRIMONY 1 0 1 1 +PATRIARCHS 1 0 1 1 +PATIENTLY 1 0 1 1 +PATIENT'S 1 0 1 1 +PATIENT 2 0 2 2 +PATHS 1 0 1 1 +PASTE 1 0 1 1 +PASSION 5 0 5 5 +PASSERS 2 0 2 2 +PASSENGERS 4 0 4 4 +PASSAGE 2 0 2 2 +PASSABLE 1 0 1 1 +PARTY 13 0 13 13 +PARTNER 1 0 1 1 +PARTISANS 1 0 1 1 +PARTINGS 1 0 1 1 +PARTING 2 0 2 2 +PARTIES 1 0 1 1 +PARTICULARS 1 0 1 1 +PARTICULARLY 5 0 5 5 +PARTICULAR 4 0 4 4 +PARTICLE 2 0 2 2 +PARTICIPATION 1 0 1 1 +PARTICIPANTS 1 0 1 1 +PARTAKE 1 0 1 1 +PARSLEY 1 0 1 1 +PARLIAMENTARY 1 0 1 1 +PARK 1 0 1 1 +PARISIAN 1 0 1 1 +PARIS 5 0 5 5 +PARENTS 2 0 2 2 +PARDONABLE 1 0 1 1 +PARCEL 2 0 2 2 +PARASOL 1 0 1 1 +PARALLEL 1 0 1 1 +PARADISE 1 0 1 1 +PAPERS 4 0 4 4 +PAPER 6 0 6 6 +PAPA 4 0 4 4 +PANZA 1 0 1 1 +PANTING 2 0 2 2 +PANS 1 0 1 1 +PANEL 1 0 1 1 +PALINGS 1 0 1 1 +PALESTINE 2 0 2 2 +PALER 1 0 1 1 +PALE 8 0 8 8 +PAINTING 1 0 1 1 +PAINTER 1 0 1 1 +PAINT 1 0 1 1 +PAINFULLY 1 0 1 1 +PAINFUL 5 0 5 5 +PAINED 1 0 1 1 +PAIN 3 0 3 3 +PAID 9 0 9 9 +PAGES 4 0 4 4 +PAGE 2 0 2 2 +PADDLE 2 0 2 2 +PACKED 1 0 1 1 +PACIFY 1 0 1 1 +PACIFIC 2 0 2 2 +OXEN 1 0 1 1 +OX 1 0 1 1 +OVERWHELMED 2 0 2 2 +OVERTURNING 1 0 1 1 +OVERTHREW 1 0 1 1 +OVERTAKEN 1 0 1 1 +OVERLY 1 0 1 1 +OVERHEAD 2 0 2 2 +OVERCOME 2 0 2 2 +OVERCOAT 1 0 1 1 +OVEN 2 0 2 2 +OVAL 1 0 1 1 +OUTWARD 1 0 1 1 +OUTSIDE 8 0 8 8 +OUTLAWS 1 0 1 1 +OUTFIT 1 0 1 1 +OUTDO 1 0 1 1 +OURSELVES 4 0 4 4 +OTTER 1 0 1 1 +OTHO 1 0 1 1 +OTHERWISE 1 0 1 1 +OSTRICH 1 0 1 1 +ORTHODOX 1 0 1 1 +ORPHAN 1 0 1 1 +ORNERY 1 0 1 1 +ORNERIEST 1 0 1 1 +ORNAMENTED 1 0 1 1 +ORNAMENTAL 1 0 1 1 +ORLEANS 1 0 1 1 +ORISON 1 0 1 1 +ORIGIN 1 0 1 1 +ORGANIZATION 3 0 3 3 +ORDINARY 1 0 1 1 +ORDERED 5 0 5 5 +ORDEAL 1 0 1 1 +ORDAINED 1 0 1 1 +ORCHARDS 1 0 1 1 +ORANGE 1 0 1 1 +ORACLE 2 0 2 2 +OPTIC 1 0 1 1 +OPPRESSOR 1 0 1 1 +OPPRESSION 3 0 3 3 +OPPRESSED 2 0 2 2 +OPPOSITE 3 0 3 3 +OPPOSE 1 0 1 1 +OPPORTUNITY 4 0 4 4 +OPERATIONS 1 0 1 1 +OPERATED 1 0 1 1 +OPERA 1 0 1 1 +OPENS 3 0 3 3 +OPENING 7 0 7 7 +OPAQUE 1 0 1 1 +ONWARDS 1 0 1 1 +ONWARD 1 0 1 1 +ONION 2 0 2 2 +ONESELF 1 0 1 1 +OMITTING 1 0 1 1 +OMAR 2 0 2 2 +OLDISH 1 0 1 1 +OLDER 1 0 1 1 +OGRE'S 1 0 1 1 +OGRE 3 0 3 3 +OGLING 1 0 1 1 +OFFICIAL 1 0 1 1 +OFFICE 4 0 4 4 +OFFERS 3 0 3 3 +OFFERINGS 1 0 1 1 +OFFERING 1 0 1 1 +OFFERED 3 0 3 3 +OFFER 2 0 2 2 +OFFEND 2 0 2 2 +OCEAN 1 0 1 1 +OCCURRED 5 0 5 5 +OCCUR 1 0 1 1 +OCCUPY 3 0 3 3 +OCCASIONS 2 0 2 2 +OCCASION 5 0 5 5 +OBVIOUSLY 1 0 1 1 +OBVIOUS 1 0 1 1 +OBTAINED 1 0 1 1 +OBTAIN 3 0 3 3 +OBSTRUCTION 1 0 1 1 +OBSTINATE 1 0 1 1 +OBSTINACY 1 0 1 1 +OBSERVER 1 0 1 1 +OBSERVE 1 0 1 1 +OBSERVATIONS 2 0 2 2 +OBSERVATION 1 0 1 1 +OBSERVANT 1 0 1 1 +OBLONG 1 0 1 1 +OBLIGED 3 0 3 3 +OBLIGATION 3 0 3 3 +OBJECTS 2 0 2 2 +OBJECTION 2 0 2 2 +OBEYING 1 0 1 1 +OBEYED 3 0 3 3 +OBEDIENTLY 1 0 1 1 +OBEDIENT 2 0 2 2 +OATH 4 0 4 4 +OAR 1 0 1 1 +O'CLOCK 9 0 9 9 +NUTRITION 2 0 2 2 +NUN 1 0 1 1 +NUMBERED 1 0 1 1 +NUMBER 5 0 5 5 +NUBIAN 1 0 1 1 +NOWHERE 2 0 2 2 +NOTWITHSTANDING 1 0 1 1 +NOTORIOUS 1 0 1 1 +NOTION 1 0 1 1 +NOTHING 28 0 28 28 +NOTES 1 0 1 1 +NOTED 2 0 2 2 +NOTABLES 1 0 1 1 +NOSE 3 0 3 3 +NORTHWARD 1 0 1 1 +NORTHERN 1 0 1 1 +NORMAN 2 0 2 2 +NOON 1 0 1 1 +NONSENSE 2 0 2 2 +NODDING 1 0 1 1 +NODDED 3 0 3 3 +NOD 2 0 2 2 +NOBODY 6 0 6 6 +NOBLEMEN 1 0 1 1 +NOBLEMAN 1 0 1 1 +NOBLE 6 0 6 6 +NOBILITY 1 0 1 1 +NIPPER 1 0 1 1 +NINTH 1 0 1 1 +NINEVEH 1 0 1 1 +NINETY 2 0 2 2 +NINETEENTH 1 0 1 1 +NINETEEN 1 0 1 1 +NIMBLENESS 1 0 1 1 +NIKOLAY 1 0 1 1 +NIGHTS 2 0 2 2 +NICOTINE 1 0 1 1 +NICK 1 0 1 1 +NICHOLAS 1 0 1 1 +NICETIES 1 0 1 1 +NICE 3 0 3 3 +NEVERTHELESS 3 0 3 3 +NEVER 61 0 61 61 +NERVOUSNESS 2 0 2 2 +NERVOUSLY 1 0 1 1 +NERVOUS 2 0 2 2 +NEMESIS 3 0 3 3 +NEITHER 8 0 8 8 +NEIGHBOURHOOD 1 0 1 1 +NEGRO 2 0 2 2 +NEGLECTING 1 0 1 1 +NEGLECTED 2 0 2 2 +NEGLECT 1 0 1 1 +NEGATIVE 1 0 1 1 +NEEDN'T 1 0 1 1 +NED 3 0 3 3 +NECK 1 0 1 1 +NECESSITY 7 0 7 7 +NECESSARY 10 0 10 10 +NECESSARILY 2 0 2 2 +NEATLY 2 0 2 2 +NEARLY 2 0 2 2 +NAY 2 0 2 2 +NAVEL 1 0 1 1 +NAUSEA 1 0 1 1 +NATURED 2 0 2 2 +NATURE 11 0 11 11 +NATTY 1 0 1 1 +NATIVE 6 0 6 6 +NATIONS 3 0 3 3 +NARROWNESS 1 0 1 1 +NARROWER 1 0 1 1 +NARROW 5 0 5 5 +NARRATOR 1 0 1 1 +NARRATE 1 0 1 1 +NARCOTIC 1 0 1 1 +NAPKINS 1 0 1 1 +NAPKIN 1 0 1 1 +NAMES 5 0 5 5 +NAMED 5 0 5 5 +NAME'S 1 0 1 1 +NAME 21 0 21 21 +NAILS 2 0 2 2 +NAILED 2 0 2 2 +NAIL 5 0 5 5 +MYSELF 27 0 27 27 +MUTTERED 5 0 5 5 +MUSICAL 1 0 1 1 +MUSIC 2 0 2 2 +MURMURED 1 0 1 1 +MURMUR 1 0 1 1 +MURDERED 1 0 1 1 +MURDER 5 0 5 5 +MULTITUDE 1 0 1 1 +MULE 1 0 1 1 +MUFFLED 1 0 1 1 +MUDDY 1 0 1 1 +MUCOUS 1 0 1 1 +MOVING 6 0 6 6 +MOVEMENTS 2 0 2 2 +MOVED 7 0 7 7 +MOVE 1 0 1 1 +MOURNFULLY 1 0 1 1 +MOUNTED 2 0 2 2 +MOUNTAINS 1 0 1 1 +MOUNTAIN 1 0 1 1 +MOTOR 1 0 1 1 +MOTLEY 1 0 1 1 +MOTIVES 1 0 1 1 +MOTIVE 1 0 1 1 +MOTHERS 5 0 5 5 +MOTHER'S 3 0 3 3 +MOSTLY 2 0 2 2 +MORTIS 1 0 1 1 +MORTAR 1 0 1 1 +MORTAL 1 0 1 1 +MORPHINE 2 0 2 2 +MOREOVER 1 0 1 1 +MORE'N 1 0 1 1 +MORCERF 3 0 3 3 +MORALS 1 0 1 1 +MORAL 8 0 8 8 +MOPPED 1 0 1 1 +MOORED 1 0 1 1 +MOONFLOWERS 1 0 1 1 +MOON 2 0 2 2 +MOOD 2 0 2 2 +MONTH 1 0 1 1 +MONTESQUIEU 1 0 1 1 +MONSTROUS 1 0 1 1 +MONSTERS 2 0 2 2 +MONOTONOUS 1 0 1 1 +MONKEY 3 0 3 3 +MONEY 16 0 16 16 +MONDAY 2 0 2 2 +MONASTERY 1 0 1 1 +MONARCH 2 0 2 2 +MOMENTS 6 0 6 6 +MOMENT'S 1 0 1 1 +MOLESTED 1 0 1 1 +MOHAMMED 1 0 1 1 +MODEST 1 0 1 1 +MODERN 2 0 2 2 +MODEL 2 0 2 2 +MOCKERY 1 0 1 1 +MOB 1 0 1 1 +MOANING 2 0 2 2 +MIXING 1 0 1 1 +MISTRUST 1 0 1 1 +MISTRESSES 1 0 1 1 +MISTRESS 6 0 6 6 +MISTAKE 2 0 2 2 +MIST 2 0 2 2 +MISSOURI 1 0 1 1 +MISSISSIPPIAN 1 0 1 1 +MISSISSIPPI 1 0 1 1 +MISSING 3 0 3 3 +MISFORTUNE 1 0 1 1 +MISERY 3 0 3 3 +MISERABLE 2 0 2 2 +MISCONDUCT 1 0 1 1 +MISCONCEPTION 1 0 1 1 +MISCHIEF 1 0 1 1 +MISAPPREHENSION 1 0 1 1 +MISANTHROPY 1 0 1 1 +MIRTH 2 0 2 2 +MIRACULOUS 1 0 1 1 +MIRACLES 3 0 3 3 +MIRABELLE 2 0 2 2 +MINUTES 11 0 11 11 +MINUTE 6 0 6 6 +MINOR 1 0 1 1 +MINNESOTA 1 0 1 1 +MINISTERED 1 0 1 1 +MINIMS 1 0 1 1 +MINIATURE 1 0 1 1 +MINGLED 2 0 2 2 +MINDS 1 0 1 1 +MINDED 1 0 1 1 +MINCE 1 0 1 1 +MILTON 4 0 4 4 +MILLIONS 1 0 1 1 +MILLER'S 1 0 1 1 +MILLER 4 0 4 4 +MILL 1 0 1 1 +MILITARY 5 0 5 5 +MILES 6 0 6 6 +MILE 5 0 5 5 +MIKE'S 1 0 1 1 +MIGHTY 3 0 3 3 +MIGHTINESS 1 0 1 1 +MIDST 3 0 3 3 +MIDSHIPMAN 1 0 1 1 +MIDNIGHT 3 0 3 3 +MIDDY'S 1 0 1 1 +MIDDLE 5 0 5 5 +MICROSCOPIC 1 0 1 1 +MICROBE 1 0 1 1 +METALLIC 1 0 1 1 +MESTIENNE'S 1 0 1 1 +MESSAGE 1 0 1 1 +MESELF 1 0 1 1 +MERITS 1 0 1 1 +MERE 3 0 3 3 +MERCY 5 0 5 5 +MERCURY 2 0 2 2 +MERCIFUL 1 0 1 1 +MERCIES 1 0 1 1 +MERCHANTS 6 0 6 6 +MERCHANT 3 0 3 3 +MENTION 1 0 1 1 +MENTALLY 3 0 3 3 +MENACING 1 0 1 1 +MEMORY 4 0 4 4 +MEMORIAL 1 0 1 1 +MEMBRANE 1 0 1 1 +MEMBERS 7 0 7 7 +MEMBER 1 0 1 1 +MELANCHOLY 1 0 1 1 +MEETING 4 0 4 4 +MEDIUMS 1 0 1 1 +MEDITATION 1 0 1 1 +MEDITATED 1 0 1 1 +MEDICINE 1 0 1 1 +MEDDLE 1 0 1 1 +MEDALS 1 0 1 1 +MEDAL 1 0 1 1 +MECHANICALLY 1 0 1 1 +MECHANICAL 1 0 1 1 +MEASURABLE 1 0 1 1 +MEANWHILE 4 0 4 4 +MEANT 10 0 10 10 +MEANING 2 0 2 2 +MAYOR 1 0 1 1 +MAYBE 4 0 4 4 +MATTOCK 1 0 1 1 +MATTERED 1 0 1 1 +MATERIALLY 1 0 1 1 +MATERIAL 1 0 1 1 +MATCH 1 0 1 1 +MASTERS 1 0 1 1 +MASTERPIECE 1 0 1 1 +MASTERED 1 0 1 1 +MASTER'S 1 0 1 1 +MAST 1 0 1 1 +MASON'S 1 0 1 1 +MASON 1 0 1 1 +MARY'S 2 0 2 2 +MARY 2 0 2 2 +MARVELLOUS 4 0 4 4 +MARTYR 1 0 1 1 +MARTIN 1 0 1 1 +MARSPORT 1 0 1 1 +MARSHAL'S 1 0 1 1 +MARSH 1 0 1 1 +MARS 3 0 3 3 +MARRY 3 0 3 3 +MARRIED 4 0 4 4 +MARLBOROUGH'S 1 0 1 1 +MARKS 1 0 1 1 +MARIUS 6 0 6 6 +MARIA 1 0 1 1 +MARGUERITE 11 0 11 11 +MARGINAL 1 0 1 1 +MARGARET'S 3 0 3 3 +MARE 1 0 1 1 +MARCH 2 0 2 2 +MARBLE 2 0 2 2 +MAR 1 0 1 1 +MANTELPIECE 1 0 1 1 +MANNER 9 0 9 9 +MANIFESTATION 1 0 1 1 +MANCHESTER 1 0 1 1 +MANAGE 1 0 1 1 +MAMMOTH 1 0 1 1 +MALICE 1 0 1 1 +MALEVOLENT 1 0 1 1 +MALADY 1 0 1 1 +MAKER 1 0 1 1 +MAJOR 5 0 5 5 +MAJESTY 2 0 2 2 +MAINTAINED 1 0 1 1 +MAINTAIN 1 0 1 1 +MAINLY 1 0 1 1 +MAIN 1 0 1 1 +MAIDEN 3 0 3 3 +MAHOGANY 2 0 2 2 +MAGNIFYING 1 0 1 1 +MAGNIFIES 1 0 1 1 +MAGNIFICENT 2 0 2 2 +MAGNIFICENCE 1 0 1 1 +MAGNANIMITY 1 0 1 1 +MAGICIAN 2 0 2 2 +MAGICAL 1 0 1 1 +MAGIC 1 0 1 1 +MAGAZINE 1 0 1 1 +MADRID 1 0 1 1 +MADNESS 2 0 2 2 +MADELEINE 3 0 3 3 +MADAME 1 0 1 1 +MACHINES 1 0 1 1 +MACHINERY 1 0 1 1 +LYNCHES 1 0 1 1 +LUTHER 1 0 1 1 +LUSTILY 1 0 1 1 +LURKING 1 0 1 1 +LUMP 1 0 1 1 +LUCRATIVE 1 0 1 1 +LUCKY 2 0 2 2 +LUCKLESS 1 0 1 1 +LUCAS 1 0 1 1 +LOYALTY 2 0 2 2 +LOWERED 1 0 1 1 +LOVES 6 0 6 6 +LOVERS 2 0 2 2 +LOVELY 3 0 3 3 +LOUVRE 1 0 1 1 +LOUISIANA 1 0 1 1 +LOUIS 3 0 3 3 +LOUDLY 1 0 1 1 +LOUDER 1 0 1 1 +LOSING 3 0 3 3 +LOSE 6 0 6 6 +LORN 1 0 1 1 +LORDSHIPS 1 0 1 1 +LOOSENED 1 0 1 1 +LOOSELY 1 0 1 1 +LOOSE 3 0 3 3 +LOOKING 21 0 21 21 +LONGING 3 0 3 3 +LONGED 2 0 2 2 +LONELY 1 0 1 1 +LONDON 4 0 4 4 +LODGE 1 0 1 1 +LOCKS 1 0 1 1 +LOCKED 4 0 4 4 +LOCATE 1 0 1 1 +LOCAL 4 0 4 4 +LOBSTERS 2 0 2 2 +LOBSTER 12 0 12 12 +LOADING 1 0 1 1 +LIVING 5 0 5 5 +LIVID 1 0 1 1 +LIVERY 1 0 1 1 +LIVELY 2 0 2 2 +LIVELONG 1 0 1 1 +LIVELIHOOD 1 0 1 1 +LITTER 1 0 1 1 +LITERATURE 1 0 1 1 +LITERALLY 1 0 1 1 +LISTENING 5 0 5 5 +LISTENERS 1 0 1 1 +LISTENER 1 0 1 1 +LIQUOR 4 0 4 4 +LIPS 6 0 6 6 +LIP 3 0 3 3 +LIONS 1 0 1 1 +LION 1 0 1 1 +LINK 1 0 1 1 +LINGO 1 0 1 1 +LINGER 1 0 1 1 +LINES 2 0 2 2 +LINCOLN 1 0 1 1 +LIMITS 1 0 1 1 +LIMIT 1 0 1 1 +LIMES 1 0 1 1 +LIKING 1 0 1 1 +LIKES 1 0 1 1 +LIGHTLY 1 0 1 1 +LIGHTING 1 0 1 1 +LIGHTENED 1 0 1 1 +LIGATURES 1 0 1 1 +LIFTING 1 0 1 1 +LIFT 1 0 1 1 +LIEUTENANT 1 0 1 1 +LIES 4 0 4 4 +LIBRARY 2 0 2 2 +LIBERTY 3 0 3 3 +LIBERATION 2 0 2 2 +LEVIN 6 0 6 6 +LEVELLED 1 0 1 1 +LEVEL 1 0 1 1 +LETTING 1 0 1 1 +LESSONS 1 0 1 1 +LESSON 2 0 2 2 +LESSENS 1 0 1 1 +LESSEN 2 0 2 2 +LENT 1 0 1 1 +LENGTH 4 0 4 4 +LEGISLATURE 1 0 1 1 +LEGALLY 1 0 1 1 +LEGAL 2 0 2 2 +LEG 1 0 1 1 +LEECHES 1 0 1 1 +LEECH 1 0 1 1 +LED 4 0 4 4 +LEAVING 5 0 5 5 +LEAVES 1 0 1 1 +LEAST 15 0 15 15 +LEARNS 1 0 1 1 +LEARNING 4 0 4 4 +LEAP 1 0 1 1 +LEANING 1 0 1 1 +LEAN 1 0 1 1 +LEAF 1 0 1 1 +LEADS 1 0 1 1 +LEADING 3 0 3 3 +LEADER 2 0 2 2 +LAZY 1 0 1 1 +LAZILY 1 0 1 1 +LAYING 3 0 3 3 +LAY 14 0 14 14 +LAWYER 1 0 1 1 +LAURA 3 0 3 3 +LAUGHTER 3 0 3 3 +LAUGHS 1 0 1 1 +LAUGHING 5 0 5 5 +LATTER 2 0 2 2 +LATIN 1 0 1 1 +LATELY 2 0 2 2 +LASTLY 2 0 2 2 +LASTING 1 0 1 1 +LARKIN'S 1 0 1 1 +LARGESSE 1 0 1 1 +LAPSE 1 0 1 1 +LAP 2 0 2 2 +LANTERN 1 0 1 1 +LANGUAGE 2 0 2 2 +LANDSMAN 1 0 1 1 +LANDOWNER 4 0 4 4 +LANDLORD 1 0 1 1 +LANDING 1 0 1 1 +LANDED 3 0 3 3 +LAMPLIT 1 0 1 1 +LAMP 1 0 1 1 +LAME 1 0 1 1 +LADY'S 1 0 1 1 +LACE 2 0 2 2 +LABORING 2 0 2 2 +LABORER 1 0 1 1 +KNUCKLES 1 0 1 1 +KNOWS 8 0 8 8 +KNOWING 3 0 3 3 +KNOWEST 3 0 3 3 +KNOWED 1 0 1 1 +KNOCKER 1 0 1 1 +KNOCK 1 0 1 1 +KNIGHTS 2 0 2 2 +KNIGHTHOOD 1 0 1 1 +KNEW 16 0 16 16 +KNEES 3 0 3 3 +KNAVE 1 0 1 1 +KITCHEN 3 0 3 3 +KISSING 2 0 2 2 +KISSED 6 0 6 6 +KISS 2 0 2 2 +KINSFOLK 1 0 1 1 +KINGS 8 0 8 8 +KINGDOM 3 0 3 3 +KING'S 7 0 7 7 +KINDS 1 0 1 1 +KINDLY 3 0 3 3 +KINDEST 1 0 1 1 +KILLING 1 0 1 1 +KILLED 4 0 4 4 +KIDNEYS 1 0 1 1 +KICKED 1 0 1 1 +KHORASAN 2 0 2 2 +KHAN 1 0 1 1 +KEYHOLE 1 0 1 1 +KETTLE 1 0 1 1 +KEPT 9 0 9 9 +KENT 2 0 2 2 +KENNETH 3 0 3 3 +KEEPING 5 0 5 5 +KEEPER'S 1 0 1 1 +KEEPER 2 0 2 2 +KEENLY 1 0 1 1 +KAZI 1 0 1 1 +K 1 0 1 1 +JUSTLY 1 0 1 1 +JUSTINIAN 1 0 1 1 +JUSTIFIES 1 0 1 1 +JUSTIFICATION 3 0 3 3 +JUNE 1 0 1 1 +JUMPING 2 0 2 2 +JUMP 1 0 1 1 +JUICES 1 0 1 1 +JUICE 1 0 1 1 +JUGS 1 0 1 1 +JUDICIAL 1 0 1 1 +JUDGES 2 0 2 2 +JUDGED 1 0 1 1 +JOYOUS 1 0 1 1 +JOYFUL 2 0 2 2 +JOYANCE 2 0 2 2 +JOY 7 0 7 7 +JOURNEYED 1 0 1 1 +JOURNEY 8 0 8 8 +JOURNALISM 1 0 1 1 +JOSEPH 1 0 1 1 +JONES 1 0 1 1 +JOLLY 1 0 1 1 +JOINTS 1 0 1 1 +JOIN 5 0 5 5 +JOCELYN'S 1 0 1 1 +JOCELYN 1 0 1 1 +JOBS 1 0 1 1 +JOB 7 0 7 7 +JOANNA'S 1 0 1 1 +JEWISH 2 0 2 2 +JEWELRY 1 0 1 1 +JEWELER 2 0 2 2 +JEW'S 1 0 1 1 +JESUS 2 0 2 2 +JERK 1 0 1 1 +JERICHO 1 0 1 1 +JENKINS 2 0 2 2 +JEHOVAH 3 0 3 3 +JEERINGLY 1 0 1 1 +JEAN 10 0 10 10 +JANUARY 1 0 1 1 +JANGLING 1 0 1 1 +JANE'S 1 0 1 1 +JAMIESON 1 0 1 1 +JAM 2 0 2 2 +JAIL 1 0 1 1 +JACKSON 4 0 4 4 +JACKET 1 0 1 1 +J 2 0 2 2 +IVANOVITCH'S 1 0 1 1 +ITALY 1 0 1 1 +ISSUED 2 0 2 2 +ISSUE 2 0 2 2 +ISRAEL'S 2 0 2 2 +ISRAEL 7 0 7 7 +ISOLATION 1 0 1 1 +ISLANDERS 1 0 1 1 +ISLAND 5 0 5 5 +IRRITATION 1 0 1 1 +IRRITABILITY 1 0 1 1 +IRREVERENTLY 1 0 1 1 +IRREVERENCE 1 0 1 1 +IRRESISTIBLY 1 0 1 1 +IRRESISTIBLE 1 0 1 1 +IRON 7 0 7 7 +IRKSOME 1 0 1 1 +IRELAND 2 0 2 2 +IRATE 1 0 1 1 +INWARD 1 0 1 1 +INVOLVED 1 0 1 1 +INVOKE 1 0 1 1 +INVITATION 2 0 2 2 +INVISIBLE 1 0 1 1 +INVINCIBLE 1 0 1 1 +INVETERATE 1 0 1 1 +INVESTIGATION 2 0 2 2 +INVENTING 1 0 1 1 +INVALIDES 1 0 1 1 +INVADING 1 0 1 1 +INVADERS 1 0 1 1 +INVADED 1 0 1 1 +INTRODUCTION 1 0 1 1 +INTRODUCING 1 0 1 1 +INTRODUCES 1 0 1 1 +INTRODUCED 1 0 1 1 +INTOXICATED 2 0 2 2 +INTOLERABLE 1 0 1 1 +INTIMATES 1 0 1 1 +INTERVIEWS 1 0 1 1 +INTERVAL 3 0 3 3 +INTERRUPTED 1 0 1 1 +INTERRED 2 0 2 2 +INTERPRETATION 1 0 1 1 +INTERPOLATIONS 1 0 1 1 +INTERNATIONAL 1 0 1 1 +INTERNAL 3 0 3 3 +INTERMISSION 1 0 1 1 +INTERMENT 2 0 2 2 +INTERMEDDLING 1 0 1 1 +INTERFERENCE 1 0 1 1 +INTERFERE 1 0 1 1 +INTERESTING 6 0 6 6 +INTERESTED 3 0 3 3 +INTERCOURSE 1 0 1 1 +INTERCHANGE 1 0 1 1 +INTENTIONALLY 2 0 2 2 +INTENTION 4 0 4 4 +INTENSITY 1 0 1 1 +INTENSELY 2 0 2 2 +INTENDED 5 0 5 5 +INTELLIGENT 2 0 2 2 +INTELLIGENCE 2 0 2 2 +INTELLECT 1 0 1 1 +INSULTED 1 0 1 1 +INSUFFICIENT 1 0 1 1 +INSTRUMENTS 4 0 4 4 +INSTRUCTIONS 1 0 1 1 +INSTITUTIONS 1 0 1 1 +INSTITUTED 1 0 1 1 +INSTITUTE 1 0 1 1 +INSTINCTS 1 0 1 1 +INSTINCT 3 0 3 3 +INSTEAD 4 0 4 4 +INSTANT'S 1 0 1 1 +INSTANT 5 0 5 5 +INSTANCE 1 0 1 1 +INSPIRES 1 0 1 1 +INSPIRATION 3 0 3 3 +INSOLUBLE 1 0 1 1 +INSOLENT 1 0 1 1 +INSISTING 1 0 1 1 +INSISTED 2 0 2 2 +INSIST 1 0 1 1 +INSINUATING 1 0 1 1 +INSHALLAH 1 0 1 1 +INSECURITY 1 0 1 1 +INSCRIPTION 1 0 1 1 +INSANE 1 0 1 1 +INQUISITION 1 0 1 1 +INQUIRIES 1 0 1 1 +INQUIRED 5 0 5 5 +INNOCENT 3 0 3 3 +INNKEEPER 2 0 2 2 +INJURIES 1 0 1 1 +INJURED 1 0 1 1 +INIQUITIES 1 0 1 1 +INHERENT 1 0 1 1 +INHABITANTS 1 0 1 1 +INHABIT 1 0 1 1 +INGREDIENTS 1 0 1 1 +INFORMED 5 0 5 5 +INFORMATION 3 0 3 3 +INFORM 1 0 1 1 +INFLUENCES 1 0 1 1 +INFLUENCED 1 0 1 1 +INFLUENCE 10 0 10 10 +INFLICT 1 0 1 1 +INFLATE 1 0 1 1 +INFIRMITY 1 0 1 1 +INFIRM 1 0 1 1 +INFINITELY 1 0 1 1 +INFINITE 4 0 4 4 +INFERIOR 3 0 3 3 +INFERENTIALLY 1 0 1 1 +INFAMY 2 0 2 2 +INEXORABLY 1 0 1 1 +INEVITABLE 1 0 1 1 +INELEGANTLY 1 0 1 1 +INDUSTRY 1 0 1 1 +INDUSTRIOUS 1 0 1 1 +INDUSTRIAL 1 0 1 1 +INDULGENT 1 0 1 1 +INDULGENCE 2 0 2 2 +INDUCED 1 0 1 1 +INDIVIDUALS 9 0 9 9 +INDIVIDUAL 1 0 1 1 +INDISPOSITION 1 0 1 1 +INDISCRETION 1 0 1 1 +INDIGNATION 1 0 1 1 +INDIFFERENT 2 0 2 2 +INDIFFERENCE 1 0 1 1 +INDICATIONS 2 0 2 2 +INDICATED 2 0 2 2 +INDIANS 2 0 2 2 +INDIANA 2 0 2 2 +INDIAN 1 0 1 1 +INDESCRIBABLE 1 0 1 1 +INDEPENDENT 2 0 2 2 +INDEPENDENCE 4 0 4 4 +INDECISION 1 0 1 1 +INCUR 1 0 1 1 +INCREDULOUSLY 1 0 1 1 +INCREDULITY 1 0 1 1 +INCREASING 2 0 2 2 +INCREASES 2 0 2 2 +INCREASED 5 0 5 5 +INCREASE 5 0 5 5 +INCORRECT 1 0 1 1 +INCONSISTENCY 1 0 1 1 +INCONCEIVABLE 1 0 1 1 +INCOHERENT 1 0 1 1 +INCLUDING 2 0 2 2 +INCLUDE 1 0 1 1 +INCLINED 1 0 1 1 +INCLINATION 1 0 1 1 +INCIDENTS 1 0 1 1 +INCIDENT 1 0 1 1 +INCARCERATING 1 0 1 1 +INASMUCH 1 0 1 1 +INANIMATE 1 0 1 1 +IMPULSE 3 0 3 3 +IMPROVISE 1 0 1 1 +IMPROVISATION 1 0 1 1 +IMPROVING 1 0 1 1 +IMPROVIDENT 1 0 1 1 +IMPRESSION 1 0 1 1 +IMPRECATIONS 1 0 1 1 +IMPRECATION 1 0 1 1 +IMPOSSIBLE 4 0 4 4 +IMPOSING 1 0 1 1 +IMPOSES 1 0 1 1 +IMPORTS 1 0 1 1 +IMPORTED 1 0 1 1 +IMPORTANT 1 0 1 1 +IMPORTANCE 3 0 3 3 +IMPLIES 1 0 1 1 +IMPLIED 2 0 2 2 +IMPLACABLE 1 0 1 1 +IMPIOUS 1 0 1 1 +IMPERTINENT 1 0 1 1 +IMPERSONAL 1 0 1 1 +IMPERIOUS 1 0 1 1 +IMPERATIVE 1 0 1 1 +IMPATIENTLY 2 0 2 2 +IMPATIENT 2 0 2 2 +IMMORTALS 1 0 1 1 +IMMEDIATELY 9 0 9 9 +IMMEDIATE 1 0 1 1 +IMITATION 1 0 1 1 +IMITATE 1 0 1 1 +IMBECILE 1 0 1 1 +IMAGINED 1 0 1 1 +IMAGINE 6 0 6 6 +IMAGINATION 1 0 1 1 +IMAGINARY 1 0 1 1 +IMAGE 2 0 2 2 +ILLUSION 1 0 1 1 +ILLITERATE 1 0 1 1 +IGNORED 1 0 1 1 +IGNORANT 2 0 2 2 +IGNORANCE 1 0 1 1 +IGNOMY 1 0 1 1 +IDOLATRIES 1 0 1 1 +IDLE 1 0 1 1 +IDIOTIC 1 0 1 1 +IDIOT 1 0 1 1 +IDEAS 2 0 2 2 +IDEAL 1 0 1 1 +HYPOTHETICAL 1 0 1 1 +HYPODERMICALLY 1 0 1 1 +HYPODERMIC 1 0 1 1 +HYDROCHLORIC 2 0 2 2 +HUSTLED 1 0 1 1 +HUSTLE 1 0 1 1 +HUSKILY 1 0 1 1 +HUSH 1 0 1 1 +HUSBANDMEN 1 0 1 1 +HUSBAND'S 3 0 3 3 +HUSBAND 9 0 9 9 +HURRYING 3 0 3 3 +HURRY 1 0 1 1 +HURRIEDLY 3 0 3 3 +HURRIED 3 0 3 3 +HURRICANE 1 0 1 1 +HUNTED 2 0 2 2 +HUNT 1 0 1 1 +HUNGARY 1 0 1 1 +HUNG 2 0 2 2 +HUNDREDTH 1 0 1 1 +HUNDREDS 1 0 1 1 +HUNDRED 29 0 29 29 +HUMILIATIONS 1 0 1 1 +HUMILIATION 1 0 1 1 +HUMILIATED 1 0 1 1 +HUMBLY 1 0 1 1 +HUMBLE 1 0 1 1 +HUMAN 6 0 6 6 +HULLO 1 0 1 1 +HUGELY 1 0 1 1 +HUGE 3 0 3 3 +HOWL 1 0 1 1 +HOUSES 4 0 4 4 +HOTLY 1 0 1 1 +HOTEL 4 0 4 4 +HOT 5 0 5 5 +HOST 3 0 3 3 +HOSPITALITY 1 0 1 1 +HORSEBACK 1 0 1 1 +HORSE 10 0 10 10 +HORRIBLE 3 0 3 3 +HORNS 1 0 1 1 +HORN 1 0 1 1 +HORIZONTAL 1 0 1 1 +HORIZON 2 0 2 2 +HORACE 1 0 1 1 +HOPPER 1 0 1 1 +HOPING 1 0 1 1 +HOPELESS 1 0 1 1 +HOPEFUL 1 0 1 1 +HOPED 2 0 2 2 +HOOTED 1 0 1 1 +HOOK 1 0 1 1 +HONEYMOON 1 0 1 1 +HONEY 1 0 1 1 +HOMEWARD 1 0 1 1 +HOMELESS 1 0 1 1 +HOLLOW 2 0 2 2 +HOLES 2 0 2 2 +HOLDS 1 0 1 1 +HOLDING 6 0 6 6 +HITHERTO 2 0 2 2 +HITCH 1 0 1 1 +HIT 3 0 3 3 +HISTORY 4 0 4 4 +HISTORIANS 1 0 1 1 +HIRE 1 0 1 1 +HINTS 1 0 1 1 +HINTED 1 0 1 1 +HINT 3 0 3 3 +HINGES 1 0 1 1 +HINDER 1 0 1 1 +HIMSELF 52 0 52 52 +HILL 7 0 7 7 +HIGHWAYS 1 0 1 1 +HIGHLY 1 0 1 1 +HIGHEST 1 0 1 1 +HIGHER 1 0 1 1 +HIGGINS 1 0 1 1 +HIDING 1 0 1 1 +HIDEOUS 1 0 1 1 +HESITATING 1 0 1 1 +HESITATED 2 0 2 2 +HERS 4 0 4 4 +HERO 1 0 1 1 +HERCULEAN 1 0 1 1 +HERBS 1 0 1 1 +HENRY 3 0 3 3 +HENCE 4 0 4 4 +HELSTONE 1 0 1 1 +HELPLESSLY 1 0 1 1 +HELPLESS 3 0 3 3 +HELP 17 0 17 17 +HELMET 2 0 2 2 +HELLO 1 0 1 1 +HELL 4 0 4 4 +HELD 13 0 13 13 +HEIR 2 0 2 2 +HEIGHT 1 0 1 1 +HEEL 1 0 1 1 +HEED 1 0 1 1 +HEDGE 1 0 1 1 +HEBREWS 4 0 4 4 +HEAVY 11 0 11 11 +HEAVIEST 1 0 1 1 +HEAVENS 1 0 1 1 +HEAVEN'S 1 0 1 1 +HEAVEN 6 0 6 6 +HEAVE 1 0 1 1 +HEARTILY 1 0 1 1 +HEARTIEST 1 0 1 1 +HEARTED 2 0 2 2 +HEARING 2 0 2 2 +HEAP 2 0 2 2 +HEALTHY 1 0 1 1 +HEALTH 1 0 1 1 +HEADED 4 0 4 4 +HEADACHES 1 0 1 1 +HAY 1 0 1 1 +HAVEN'T 4 0 4 4 +HAUNT 3 0 3 3 +HAUNCHES 1 0 1 1 +HAUGHTINESS 1 0 1 1 +HATTON 1 0 1 1 +HATS 1 0 1 1 +HATREDS 1 0 1 1 +HATRED 2 0 2 2 +HATES 3 0 3 3 +HATED 3 0 3 3 +HATE 4 0 4 4 +HAT 3 0 3 3 +HASTY 2 0 2 2 +HASTILY 2 0 2 2 +HASTENED 1 0 1 1 +HASTEN 1 0 1 1 +HAST 7 0 7 7 +HASN'T 1 0 1 1 +HASHISH 1 0 1 1 +HARSHLY 2 0 2 2 +HARRY 3 0 3 3 +HARRISONVILLE 2 0 2 2 +HARNESSED 1 0 1 1 +HARMONY 1 0 1 1 +HARMLESS 1 0 1 1 +HARM 6 0 6 6 +HARK 1 0 1 1 +HARGRAVE 1 0 1 1 +HARBOR 1 0 1 1 +HAPPINESS 5 0 5 5 +HAPPIEST 1 0 1 1 +HAPPIER 3 0 3 3 +HAPPENS 1 0 1 1 +HAPPENED 10 0 10 10 +HAPPEN 3 0 3 3 +HANGING 1 0 1 1 +HANGED 1 0 1 1 +HANG 3 0 3 3 +HANDSOME 4 0 4 4 +HANDLED 1 0 1 1 +HANDKERCHIEF 3 0 3 3 +HANDING 1 0 1 1 +HANDIER 1 0 1 1 +HANDED 1 0 1 1 +HAMPERED 1 0 1 1 +HAMMERS 1 0 1 1 +HAMMER 1 0 1 1 +HALVES 1 0 1 1 +HALTING 1 0 1 1 +HALT 5 0 5 5 +HALLS 1 0 1 1 +HALFPENNY 1 0 1 1 +HALE 6 0 6 6 +HAIRED 1 0 1 1 +HAG 1 0 1 1 +HACK 1 0 1 1 +HABITUAL 1 0 1 1 +HABITS 2 0 2 2 +GUT 3 0 3 3 +GUSH 1 0 1 1 +GULPED 1 0 1 1 +GULF 1 0 1 1 +GUISE 1 0 1 1 +GUILTY 5 0 5 5 +GUILT 2 0 2 2 +GUIDE 3 0 3 3 +GUESSED 1 0 1 1 +GUARDS 1 0 1 1 +GUARDED 1 0 1 1 +GUARD 1 0 1 1 +GRUMBLED 2 0 2 2 +GRUFFISH 1 0 1 1 +GROWTH 1 0 1 1 +GROWN 1 0 1 1 +GROUPS 6 0 6 6 +GROUP 3 0 3 3 +GROUNDS 1 0 1 1 +GROTTO 1 0 1 1 +GROOMED 1 0 1 1 +GROOM 1 0 1 1 +GRINNING 3 0 3 3 +GRINNED 2 0 2 2 +GRIN 2 0 2 2 +GRIMACED 1 0 1 1 +GRIEVING 1 0 1 1 +GREET 2 0 2 2 +GREENWOOD 1 0 1 1 +GREENHORNS 1 0 1 1 +GREEK 2 0 2 2 +GREATLY 1 0 1 1 +GREATEST 6 0 6 6 +GREATER 6 0 6 6 +GREASY 1 0 1 1 +GRAVITY 1 0 1 1 +GRAVES 1 0 1 1 +GRAVELLED 1 0 1 1 +GRATING 2 0 2 2 +GRATIFICATION 1 0 1 1 +GRATEFUL 2 0 2 2 +GRATED 1 0 1 1 +GRASS 2 0 2 2 +GRASP 2 0 2 2 +GRAPE 1 0 1 1 +GRANTING 1 0 1 1 +GRANT 2 0 2 2 +GRANDSON 1 0 1 1 +GRANDPAPA 1 0 1 1 +GRANDFATHER 2 0 2 2 +GRANDEUR 1 0 1 1 +GRANDDAUGHTER 1 0 1 1 +GRAINS 1 0 1 1 +GRAIN 4 0 4 4 +GRAFTON'S 1 0 1 1 +GRAFTON 1 0 1 1 +GRAFT 2 0 2 2 +GRADUALLY 1 0 1 1 +GRACIOUSLY 2 0 2 2 +GRACE 1 0 1 1 +GRABBED 2 0 2 2 +GOTTEN 1 0 1 1 +GOSLER 1 0 1 1 +GOOSE 1 0 1 1 +GOODNESS 5 0 5 5 +GOLFING 1 0 1 1 +GOLDFINCH 1 0 1 1 +GOD'S 3 0 3 3 +GNASHING 1 0 1 1 +GNARLED 1 0 1 1 +GLOWING 2 0 2 2 +GLOWED 3 0 3 3 +GLOVES 3 0 3 3 +GLOVE 1 0 1 1 +GLORY 2 0 2 2 +GLORIOUS 1 0 1 1 +GLORIFY 1 0 1 1 +GLOOMY 2 0 2 2 +GLOOM 1 0 1 1 +GLOATING 1 0 1 1 +GLISPIN'S 1 0 1 1 +GLINTING 1 0 1 1 +GLIMPSE 1 0 1 1 +GLIMMER 1 0 1 1 +GLIDING 1 0 1 1 +GLEAMED 1 0 1 1 +GLAZED 1 0 1 1 +GLANCING 1 0 1 1 +GLANCES 2 0 2 2 +GLANCED 2 0 2 2 +GLANCE 3 0 3 3 +GLADNESS 2 0 2 2 +GLADLY 1 0 1 1 +GLADDENEST 1 0 1 1 +GLADDENED 1 0 1 1 +GIVES 7 0 7 7 +GIRDLE 2 0 2 2 +GIMLET 1 0 1 1 +GILROY 1 0 1 1 +GIFTED 1 0 1 1 +GIANT'S 1 0 1 1 +GIANT 1 0 1 1 +GHOSTS 1 0 1 1 +GHOST 2 0 2 2 +GHASTLY 2 0 2 2 +GETTING 12 0 12 12 +GETS 3 0 3 3 +GERMS 1 0 1 1 +GERMAN 7 0 7 7 +GERM 1 0 1 1 +GEORGIA 1 0 1 1 +GEORGES 1 0 1 1 +GENUINE 1 0 1 1 +GENTLY 1 0 1 1 +GENTLE 1 0 1 1 +GENIUS 1 0 1 1 +GENIALLY 1 0 1 1 +GENEROUS 2 0 2 2 +GENEROSITY 1 0 1 1 +GENERATION 1 0 1 1 +GENERALLY 3 0 3 3 +GENERAL 7 0 7 7 +GEAR 2 0 2 2 +GAZING 2 0 2 2 +GAZED 3 0 3 3 +GAY 2 0 2 2 +GATHERING 2 0 2 2 +GATHER 1 0 1 1 +GATES 1 0 1 1 +GATE 4 0 4 4 +GASPED 2 0 2 2 +GASP 1 0 1 1 +GASHED 1 0 1 1 +GARNISHMENT 1 0 1 1 +GARMENTS 2 0 2 2 +GARLANDED 1 0 1 1 +GARLAND 1 0 1 1 +GARDEN 7 0 7 7 +GAPS 1 0 1 1 +GAP 1 0 1 1 +GANG 5 0 5 5 +GAMMER 1 0 1 1 +GAMESTER 1 0 1 1 +GAMBLING 3 0 3 3 +GAMBLERS 1 0 1 1 +GALLOPED 1 0 1 1 +GALLERY 1 0 1 1 +GALL 1 0 1 1 +GAIN 3 0 3 3 +GAILY 1 0 1 1 +GAIETY 1 0 1 1 +GAD'S 1 0 1 1 +GAD 1 0 1 1 +GABLE 1 0 1 1 +GABBLE 1 0 1 1 +G 1 0 1 1 +FUSS 2 0 2 2 +FURY 2 0 2 2 +FURTHEST 1 0 1 1 +FURNITURE 1 0 1 1 +FURNISHED 1 0 1 1 +FURNACE 1 0 1 1 +FURIOUS 2 0 2 2 +FUNNY 3 0 3 3 +FUMED 1 0 1 1 +FULLY 1 0 1 1 +FULFILLED 1 0 1 1 +FULFIL 1 0 1 1 +FUGITIVES 1 0 1 1 +FUEL 1 0 1 1 +FRY 1 0 1 1 +FRUITS 4 0 4 4 +FRUITLESS 1 0 1 1 +FRUIT 7 0 7 7 +FROWNED 1 0 1 1 +FROWN 1 0 1 1 +FROSTY 1 0 1 1 +FROST 1 0 1 1 +FRIGHTENED 3 0 3 3 +FRIGHTEN 1 0 1 1 +FRIENDSHIP 2 0 2 2 +FRIENDLINESS 1 0 1 1 +FRIEND'S 1 0 1 1 +FRIEND 14 0 14 14 +FRIDOLIN 1 0 1 1 +FRIDAY 2 0 2 2 +FRESHEST 1 0 1 1 +FRESH 5 0 5 5 +FRERE 1 0 1 1 +FREQUENTLY 2 0 2 2 +FREQUENT 2 0 2 2 +FREELY 2 0 2 2 +FREED 2 0 2 2 +FRAUD 1 0 1 1 +FRANTICALLY 1 0 1 1 +FRANKNESS 1 0 1 1 +FRANKLY 1 0 1 1 +FRANCS 6 0 6 6 +FRAME 1 0 1 1 +FRAGMENTS 1 0 1 1 +FOUNDATION 1 0 1 1 +FOSTER 3 0 3 3 +FORWARDS 3 0 3 3 +FORWARD 5 0 5 5 +FORTUNES 1 0 1 1 +FORTUNE 6 0 6 6 +FORTUNATELY 5 0 5 5 +FORTNIGHT 1 0 1 1 +FORTHWITH 1 0 1 1 +FORTH 4 0 4 4 +FORSOOTH 1 0 1 1 +FORMS 2 0 2 2 +FORMING 2 0 2 2 +FORMIDABLE 2 0 2 2 +FORMERLY 2 0 2 2 +FORGOTTEN 4 0 4 4 +FORGOT 7 0 7 7 +FORGIVE 2 0 2 2 +FORGETTING 1 0 1 1 +FORGET 2 0 2 2 +FORGERIES 1 0 1 1 +FOREVER 3 0 3 3 +FORETASTE 1 0 1 1 +FORESTERS 1 0 1 1 +FOREST 2 0 2 2 +FORESHADOWED 1 0 1 1 +FORENOON 1 0 1 1 +FOREMOST 1 0 1 1 +FORELOCK 1 0 1 1 +FOREIGNERS 1 0 1 1 +FOREIGN 6 0 6 6 +FOREHEAD 4 0 4 4 +FOREFINGER 1 0 1 1 +FORBIDDEN 1 0 1 1 +FORBID 1 0 1 1 +FORBEARANCE 1 0 1 1 +FORBEAR 1 0 1 1 +FOOTSTEPS 1 0 1 1 +FOOTNOTE 1 0 1 1 +FOOLISH 3 0 3 3 +FOND 2 0 2 2 +FOLLY 1 0 1 1 +FOLLOWERS 3 0 3 3 +FOLLOWER 1 0 1 1 +FOLKS 3 0 3 3 +FOLDED 2 0 2 2 +FOLD 1 0 1 1 +FOGGY 1 0 1 1 +FOES 2 0 2 2 +FLYING 1 0 1 1 +FLUTTER 1 0 1 1 +FLUSHED 2 0 2 2 +FLUSH 1 0 1 1 +FLUNG 2 0 2 2 +FLUID 2 0 2 2 +FLOWERS 4 0 4 4 +FLOURISHING 1 0 1 1 +FLOURISHED 1 0 1 1 +FLOURISH 1 0 1 1 +FLOCKS 1 0 1 1 +FLOATED 1 0 1 1 +FLITTED 1 0 1 1 +FLIRTATION 1 0 1 1 +FLING 1 0 1 1 +FLINCH 1 0 1 1 +FLIGHT 5 0 5 5 +FLICK 1 0 1 1 +FLEW 1 0 1 1 +FLEECED 1 0 1 1 +FLEE 1 0 1 1 +FLED 4 0 4 4 +FLATTERY 1 0 1 1 +FLASK 1 0 1 1 +FLASHING 1 0 1 1 +FLASHED 1 0 1 1 +FLARING 1 0 1 1 +FLAPPING 1 0 1 1 +FLAMES 2 0 2 2 +FLAME 5 0 5 5 +FLAGRANT 1 0 1 1 +FLAGONS 1 0 1 1 +FLAGON 1 0 1 1 +FLAGGED 1 0 1 1 +FLAG 1 0 1 1 +FIXING 1 0 1 1 +FIXED 5 0 5 5 +FIX 1 0 1 1 +FITTING 2 0 2 2 +FITTED 2 0 2 2 +FITS 1 0 1 1 +FISHER 2 0 2 2 +FIRMLY 2 0 2 2 +FIRM 1 0 1 1 +FIRING 2 0 2 2 +FIREPLACE 1 0 1 1 +FIREMAN 3 0 3 3 +FIREFLY 1 0 1 1 +FIRED 1 0 1 1 +FINS 1 0 1 1 +FINISHING 2 0 2 2 +FINISH 3 0 3 3 +FINGERS 1 0 1 1 +FINGERING 1 0 1 1 +FINGER 6 0 6 6 +FINEST 1 0 1 1 +FINE 10 0 10 10 +FINANCIAL 1 0 1 1 +FINALLY 6 0 6 6 +FIN 1 0 1 1 +FILTER 1 0 1 1 +FILMY 1 0 1 1 +FILLED 5 0 5 5 +FILL 4 0 4 4 +FIGURE 3 0 3 3 +FIGHTING 1 0 1 1 +FIGHT 5 0 5 5 +FIFTY 14 0 14 14 +FIERCE 2 0 2 2 +FIENDS 1 0 1 1 +FIENDISH 1 0 1 1 +FICKLE 2 0 2 2 +FEW 26 0 26 26 +FEVERISH 4 0 4 4 +FEVER 1 0 1 1 +FETTERS 1 0 1 1 +FETCHED 1 0 1 1 +FETCH 7 0 7 7 +FESTIVE 1 0 1 1 +FESTIVAL 1 0 1 1 +FEROCIOUS 1 0 1 1 +FENDER 1 0 1 1 +FENCED 1 0 1 1 +FENCE 4 0 4 4 +FEMALE 1 0 1 1 +FELLOWSHIP 1 0 1 1 +FELLOWS 2 0 2 2 +FEET 9 0 9 9 +FEELINGS 3 0 3 3 +FEEBLY 1 0 1 1 +FEEBLE 2 0 2 2 +FEE 1 0 1 1 +FEDERAL 1 0 1 1 +FED 1 0 1 1 +FEBRUARY 5 0 5 5 +FEATURES 1 0 1 1 +FEATHERS 1 0 1 1 +FEATHER 1 0 1 1 +FEARS 1 0 1 1 +FEARLESS 1 0 1 1 +FEARING 1 0 1 1 +FEARFUL 2 0 2 2 +FEARED 4 0 4 4 +FEAR 13 0 13 13 +FAVOURS 1 0 1 1 +FAVORABLE 1 0 1 1 +FATTY 1 0 1 1 +FATTER 1 0 1 1 +FATIGUE 2 0 2 2 +FATHERLY 1 0 1 1 +FATALLY 1 0 1 1 +FATAL 2 0 2 2 +FASTER 2 0 2 2 +FASHIONS 1 0 1 1 +FASHIONED 2 0 2 2 +FASHION 2 0 2 2 +FASCINATION 1 0 1 1 +FARTHEST 1 0 1 1 +FARTHER 3 0 3 3 +FARMS 1 0 1 1 +FARM 3 0 3 3 +FAREWELL 1 0 1 1 +FARED 1 0 1 1 +FANTASTIC 1 0 1 1 +FANS 1 0 1 1 +FANCIFUL 1 0 1 1 +FANCIED 1 0 1 1 +FANATICS 1 0 1 1 +FAMILIES 3 0 3 3 +FAMILIARITY 1 0 1 1 +FAMILIAR 2 0 2 2 +FAME 2 0 2 2 +FALSE 1 0 1 1 +FALLEN 1 0 1 1 +FAITHLESS 1 0 1 1 +FAITHFULLY 2 0 2 2 +FAITHFUL 3 0 3 3 +FAIRY 2 0 2 2 +FAIRLY 3 0 3 3 +FAINTNESS 1 0 1 1 +FAINTING 2 0 2 2 +FAINT 4 0 4 4 +FAILURES 3 0 3 3 +FAILURE 1 0 1 1 +FAILS 1 0 1 1 +FAIL 3 0 3 3 +FAGOTS 1 0 1 1 +FAGGOT 1 0 1 1 +FACTS 3 0 3 3 +FACTORIES 2 0 2 2 +FACTOR 1 0 1 1 +FACING 3 0 3 3 +FACES 2 0 2 2 +FABULOUS 1 0 1 1 +EYELIDS 1 0 1 1 +EYED 4 0 4 4 +EXTREMELY 4 0 4 4 +EXTREME 2 0 2 2 +EXTRAORDINARY 2 0 2 2 +EXTINGUISH 1 0 1 1 +EXTERNAL 2 0 2 2 +EXTENT 2 0 2 2 +EXTENSION 1 0 1 1 +EXTENDING 2 0 2 2 +EXTENDED 2 0 2 2 +EXTEMPORIZED 1 0 1 1 +EXPRESSLY 1 0 1 1 +EXPRESSION 4 0 4 4 +EXPRESSED 3 0 3 3 +EXPOSURE 1 0 1 1 +EXPOSES 1 0 1 1 +EXPLANATORY 1 0 1 1 +EXPLANATION 1 0 1 1 +EXPLAINING 1 0 1 1 +EXPLAIN 1 0 1 1 +EXPIATION 1 0 1 1 +EXPERIMENTS 1 0 1 1 +EXPERIMENTING 1 0 1 1 +EXPERIENCES 1 0 1 1 +EXPERIENCED 1 0 1 1 +EXPENSES 2 0 2 2 +EXPENSE 2 0 2 2 +EXPENDED 1 0 1 1 +EXPEDIENT 1 0 1 1 +EXPECTS 1 0 1 1 +EXPECT 4 0 4 4 +EXOTIC 1 0 1 1 +EXIT 1 0 1 1 +EXISTS 1 0 1 1 +EXISTING 1 0 1 1 +EXISTENCE 5 0 5 5 +EXISTED 1 0 1 1 +EXIST 2 0 2 2 +EXHIBITED 4 0 4 4 +EXERTIONS 1 0 1 1 +EXERTING 1 0 1 1 +EXERTED 1 0 1 1 +EXERCISES 1 0 1 1 +EXERCISE 3 0 3 3 +EXECUTIONER'S 2 0 2 2 +EXECUTION 2 0 2 2 +EXECUTE 1 0 1 1 +EXECRABLE 1 0 1 1 +EXCUSES 1 0 1 1 +EXCUSE 3 0 3 3 +EXCUSABLE 1 0 1 1 +EXCLAMATION 1 0 1 1 +EXCLAIMING 1 0 1 1 +EXCLAIM 1 0 1 1 +EXCITEMENT 4 0 4 4 +EXCITEDLY 2 0 2 2 +EXCITED 2 0 2 2 +EXCITE 1 0 1 1 +EXCITABILITY 1 0 1 1 +EXCHANGED 2 0 2 2 +EXCHANGE 1 0 1 1 +EXCESSIVELY 1 0 1 1 +EXCESS 1 0 1 1 +EXCEPTIONALLY 2 0 2 2 +EXCEPTION 1 0 1 1 +EXCELLENT 5 0 5 5 +EXCELLENCY 4 0 4 4 +EXCEEDINGLY 1 0 1 1 +EXCEEDING 3 0 3 3 +EXASPERATING 1 0 1 1 +EXAMPLE 3 0 3 3 +EXAMINE 2 0 2 2 +EXAMINATION 3 0 3 3 +EXALTED 1 0 1 1 +EXAGGERATE 1 0 1 1 +EXACTITUDE 2 0 2 2 +EXACT 1 0 1 1 +EVIDENTLY 4 0 4 4 +EVIDENT 3 0 3 3 +EVIDENCE 2 0 2 2 +EVERYWHERE 4 0 4 4 +EVERYTHING'S 1 0 1 1 +EVERYTHING 15 0 15 15 +EVERYBODY 6 0 6 6 +EVENTS 4 0 4 4 +EVENT 1 0 1 1 +EVENING 9 0 9 9 +EVE 2 0 2 2 +EVAPORATION 2 0 2 2 +EVAPORATING 1 0 1 1 +EVADED 1 0 1 1 +EUROPEAN 1 0 1 1 +EUROPE 1 0 1 1 +EUNUCH'S 1 0 1 1 +ETERNITY 1 0 1 1 +ESTRANGE 1 0 1 1 +ESTIMATES 1 0 1 1 +ESTEEM 3 0 3 3 +ESTATE 1 0 1 1 +ESTABLISHMENT 1 0 1 1 +ESTABLISHED 2 0 2 2 +ESTABLISH 1 0 1 1 +ESSENTIALLY 1 0 1 1 +ESSENTIAL 1 0 1 1 +ESSENCE 1 0 1 1 +ESSAY 1 0 1 1 +ESQUIRES 1 0 1 1 +ESPECIAL 1 0 1 1 +ESCAPED 1 0 1 1 +ESCAPADE 1 0 1 1 +ERROR 2 0 2 2 +ERRATIC 1 0 1 1 +ERRANT 1 0 1 1 +ERECTS 1 0 1 1 +ERECTED 3 0 3 3 +ERECT 1 0 1 1 +EQUIVALENT 1 0 1 1 +EQUALLY 2 0 2 2 +EPOCH 1 0 1 1 +EPISTLES 1 0 1 1 +EPISTLE 1 0 1 1 +ENVYING 1 0 1 1 +ENVY 3 0 3 3 +ENVIRONMENT 1 0 1 1 +ENVIOUS 1 0 1 1 +ENVIED 1 0 1 1 +ENVELOPE 1 0 1 1 +ENTREATY 1 0 1 1 +ENTREATINGLY 1 0 1 1 +ENTREATIES 1 0 1 1 +ENTREATED 1 0 1 1 +ENTIRELY 3 0 3 3 +ENTHUSIASM 3 0 3 3 +ENTERTAINMENT 1 0 1 1 +ENTERTAINING 1 0 1 1 +ENTERTAIN 1 0 1 1 +ENTER 5 0 5 5 +ENTAILED 1 0 1 1 +ENRAGED 1 0 1 1 +ENLISTMENT 1 0 1 1 +ENJOYMENT 3 0 3 3 +ENGRAVED 1 0 1 1 +ENGLAND 3 0 3 3 +ENGAGEMENTS 1 0 1 1 +ENGAGEMENT 1 0 1 1 +ENGAGED 2 0 2 2 +ENGAGE 1 0 1 1 +ENERGY 1 0 1 1 +ENEMY 3 0 3 3 +ENEMIES 2 0 2 2 +ENDURANCE 1 0 1 1 +ENDING 1 0 1 1 +ENCOURAGED 2 0 2 2 +ENCOUNTERED 1 0 1 1 +ENCOMPASSED 1 0 1 1 +ENCHANTMENT 2 0 2 2 +ENCHANTED 3 0 3 3 +ENCAMPMENT 1 0 1 1 +ENCAMPED 1 0 1 1 +EMPTY 8 0 8 8 +EMPTIES 1 0 1 1 +EMPTIED 2 0 2 2 +EMPRESSES 1 0 1 1 +EMPLOYED 2 0 2 2 +EMPLOY 1 0 1 1 +EMPIRE 3 0 3 3 +EMPHATIC 2 0 2 2 +EMPHASIZE 1 0 1 1 +EMPERORS 2 0 2 2 +EMPEROR 1 0 1 1 +EMOTIONS 2 0 2 2 +EMIR 2 0 2 2 +EMERGED 1 0 1 1 +EMBROIDERY 1 0 1 1 +EMBRACES 1 0 1 1 +EMBRACED 1 0 1 1 +EMBARRASSMENT 1 0 1 1 +EMBARRASSED 1 0 1 1 +EMBARKED 2 0 2 2 +ELSIE'S 1 0 1 1 +ELSIE 1 0 1 1 +ELKINS 1 0 1 1 +ELEVENTH 1 0 1 1 +ELEVEN 4 0 4 4 +ELEVATION 1 0 1 1 +ELEPHANT 1 0 1 1 +ELEMENTS 1 0 1 1 +ELEGANT 1 0 1 1 +ELECTRIC 1 0 1 1 +ELECTED 2 0 2 2 +ELDEST 1 0 1 1 +ELBOWS 1 0 1 1 +ELBOWED 1 0 1 1 +ELAPSED 1 0 1 1 +ELAPSE 1 0 1 1 +EKED 1 0 1 1 +EJACULATED 1 0 1 1 +EIGHTEENTH 3 0 3 3 +EIGHTEEN 10 0 10 10 +EGYPT 5 0 5 5 +EGG 1 0 1 1 +EFFORTS 1 0 1 1 +EFFORT 4 0 4 4 +EFFECTS 2 0 2 2 +EFFECTIVE 1 0 1 1 +EELS 1 0 1 1 +EDWARD 1 0 1 1 +EDUCATION 2 0 2 2 +EDUCATED 1 0 1 1 +EDGES 1 0 1 1 +EDGE 2 0 2 2 +ECONOMIZE 1 0 1 1 +ECONOMICAL 1 0 1 1 +ECONOMIC 1 0 1 1 +ECHOES 1 0 1 1 +ECHOED 1 0 1 1 +ECCLESIASTICS 1 0 1 1 +EATING 2 0 2 2 +EAT 12 0 12 12 +EASY 9 0 9 9 +EASTERN 2 0 2 2 +EASIEST 1 0 1 1 +EASE 4 0 4 4 +EARTHEN 1 0 1 1 +EARNEST 9 0 9 9 +EARNED 1 0 1 1 +EARN 2 0 2 2 +EARLY 8 0 8 8 +EARLINESS 1 0 1 1 +EAGLE 4 0 4 4 +EAGERLY 4 0 4 4 +EAGER 2 0 2 2 +DYING 6 0 6 6 +DWELT 1 0 1 1 +DWELLS 1 0 1 1 +DWELLINGS 1 0 1 1 +DWELLERS 1 0 1 1 +DWELL 1 0 1 1 +DWARF 2 0 2 2 +DUTY 10 0 10 10 +DUTIES 2 0 2 2 +DUSTY 1 0 1 1 +DUST 2 0 2 2 +DURING 20 0 20 20 +DURATION 2 0 2 2 +DUN 1 0 1 1 +DUE 5 0 5 5 +DU 1 0 1 1 +DRUNK 2 0 2 2 +DRUMS 1 0 1 1 +DRUGGED 2 0 2 2 +DROWNING 1 0 1 1 +DROWN 1 0 1 1 +DROVE 1 0 1 1 +DROUTH 1 0 1 1 +DROPS 1 0 1 1 +DROPPED 8 0 8 8 +DROOPING 2 0 2 2 +DRIVING 1 0 1 1 +DRIVEN 1 0 1 1 +DRIVE 5 0 5 5 +DRINKS 1 0 1 1 +DRINKING 4 0 4 4 +DRINKERS 2 0 2 2 +DRIFT 1 0 1 1 +DRIED 1 0 1 1 +DREW 7 0 7 7 +DRESSING 1 0 1 1 +DRESSES 1 0 1 1 +DRESS 1 0 1 1 +DREAMING 2 0 2 2 +DREAMED 1 0 1 1 +DREAM 4 0 4 4 +DREADFULLY 1 0 1 1 +DREAD 3 0 3 3 +DRAWING 9 0 9 9 +DRAT 1 0 1 1 +DRAMATIC 1 0 1 1 +DRAINS 1 0 1 1 +DRAINED 1 0 1 1 +DRAIN 1 0 1 1 +DRAGONS 1 0 1 1 +DRAGON 1 0 1 1 +DRAGGED 1 0 1 1 +DRAG 1 0 1 1 +DOZEN 2 0 2 2 +DOWNSTAIRS 1 0 1 1 +DOWNS 2 0 2 2 +DOWER 1 0 1 1 +DOVES 1 0 1 1 +DOUBTS 2 0 2 2 +DOUBTLESS 2 0 2 2 +DOUBTFUL 1 0 1 1 +DOUBLE 5 0 5 5 +DOT 1 0 1 1 +DOORS 3 0 3 3 +DONOVAN'S 1 0 1 1 +DOMINION 1 0 1 1 +DOMINATES 1 0 1 1 +DOMED 1 0 1 1 +DOME 2 0 2 2 +DOLLARS 2 0 2 2 +DOINGS 1 0 1 1 +DOGGEDLY 1 0 1 1 +DODGING 1 0 1 1 +DIVISION 1 0 1 1 +DIVINE 1 0 1 1 +DIVIDES 1 0 1 1 +DIVIDED 3 0 3 3 +DIVERT 1 0 1 1 +DISTURBING 1 0 1 1 +DISTURBED 1 0 1 1 +DISTURBANCE 1 0 1 1 +DISTURB 2 0 2 2 +DISTRICTS 1 0 1 1 +DISTRIBUTED 1 0 1 1 +DISTRIBUTE 1 0 1 1 +DISTRACTED 2 0 2 2 +DISTINGUISH 2 0 2 2 +DISTINCTLY 1 0 1 1 +DISTINCTIVE 1 0 1 1 +DISTINCT 1 0 1 1 +DISTENDED 1 0 1 1 +DISTANT 5 0 5 5 +DISTANCES 1 0 1 1 +DISTANCE 3 0 3 3 +DISTAFF 1 0 1 1 +DISSIPATION 2 0 2 2 +DISSIMULATION 1 0 1 1 +DISSENTERING 1 0 1 1 +DISSENSIONS 2 0 2 2 +DISREGARDED 1 0 1 1 +DISPUTED 1 0 1 1 +DISPUTE 1 0 1 1 +DISPROVE 1 0 1 1 +DISPOSITION 2 0 2 2 +DISPOSAL 1 0 1 1 +DISPLEASED 1 0 1 1 +DISPLAY 1 0 1 1 +DISPERSED 2 0 2 2 +DISPENSED 1 0 1 1 +DISPENSE 1 0 1 1 +DISMAL 1 0 1 1 +DISHONEST 1 0 1 1 +DISHES 7 0 7 7 +DISH 2 0 2 2 +DISGUST 1 0 1 1 +DISGRACE 4 0 4 4 +DISENTANGLE 1 0 1 1 +DISEASE 1 0 1 1 +DISCUSSIONS 1 0 1 1 +DISCUSSION 1 0 1 1 +DISCUSSED 3 0 3 3 +DISCRIMINATION 1 0 1 1 +DISCRETION 1 0 1 1 +DISCOVERY 3 0 3 3 +DISCOVERIES 1 0 1 1 +DISCOVERED 4 0 4 4 +DISCOVER 2 0 2 2 +DISCOURSES 1 0 1 1 +DISCOURAGEMENTS 1 0 1 1 +DISCONTENT 1 0 1 1 +DISCONCERTION 1 0 1 1 +DISCOMFORT 1 0 1 1 +DISCLOSURES 1 0 1 1 +DISCLOSE 1 0 1 1 +DISCLAIM 1 0 1 1 +DISCIPLINE 1 0 1 1 +DISCERNING 1 0 1 1 +DISAPPOINTED 2 0 2 2 +DISAPPEARS 1 0 1 1 +DISAPPEARED 5 0 5 5 +DISAPPEAR 1 0 1 1 +DISADVANTAGES 3 0 3 3 +DISADVANTAGEOUS 1 0 1 1 +DIRTY 2 0 2 2 +DIRK 2 0 2 2 +DIRECTLY 3 0 3 3 +DIRECTIONS 1 0 1 1 +DIRECTED 3 0 3 3 +DIRECT 2 0 2 2 +DIP 2 0 2 2 +DINSMORE 2 0 2 2 +DINNERS 1 0 1 1 +DINNER 6 0 6 6 +DINING 1 0 1 1 +DINERS 1 0 1 1 +DINE 2 0 2 2 +DIMPLED 1 0 1 1 +DIMLY 1 0 1 1 +DIMINISHED 1 0 1 1 +DIMINISH 1 0 1 1 +DIM 2 0 2 2 +DILIGENTLY 1 0 1 1 +DILAPIDATED 1 0 1 1 +DIGNITY 4 0 4 4 +DIGGERS 2 0 2 2 +DIGGER 10 0 10 10 +DIFFICULTY 7 0 7 7 +DIFFICULT 2 0 2 2 +DIFFERENT 7 0 7 7 +DIFFERENCES 1 0 1 1 +DIFFERENCE 7 0 7 7 +DIFFER 1 0 1 1 +DIED 13 0 13 13 +DICTATED 1 0 1 1 +DICE 1 0 1 1 +DIAMETER 1 0 1 1 +DIALOGUE 1 0 1 1 +DEVOURED 1 0 1 1 +DEVOTIONS 1 0 1 1 +DEVOTION 1 0 1 1 +DEVILS 2 0 2 2 +DEVIL 4 0 4 4 +DEVICE 1 0 1 1 +DEVELOPED 1 0 1 1 +DETECTIVE'S 1 0 1 1 +DETECTION 1 0 1 1 +DETECTED 1 0 1 1 +DETAILS 2 0 2 2 +DETAILED 3 0 3 3 +DESTROYS 1 0 1 1 +DESTROYER 1 0 1 1 +DESTROYED 4 0 4 4 +DESTROY 3 0 3 3 +DESTINED 1 0 1 1 +DESTINATION 1 0 1 1 +DESSERT 2 0 2 2 +DESPOTISM 2 0 2 2 +DESPOILED 1 0 1 1 +DESPISED 1 0 1 1 +DESPISE 1 0 1 1 +DESPERATELY 1 0 1 1 +DESPERATE 2 0 2 2 +DESPAIR 2 0 2 2 +DESIRED 2 0 2 2 +DESIRABLE 1 0 1 1 +DESERVING 1 0 1 1 +DESERVES 1 0 1 1 +DESERVE 2 0 2 2 +DESERTING 1 0 1 1 +DESERTED 2 0 2 2 +DESERT 1 0 1 1 +DESCRIPTION 2 0 2 2 +DESCRIBED 1 0 1 1 +DESCRIBE 1 0 1 1 +DESCEND 1 0 1 1 +DERIVE 1 0 1 1 +DEPRESSION 1 0 1 1 +DEPRECATINGLY 1 0 1 1 +DEPOSITED 1 0 1 1 +DEPOSED 1 0 1 1 +DEPLORED 1 0 1 1 +DEPENDS 2 0 2 2 +DEPENDENCE 1 0 1 1 +DEPEND 1 0 1 1 +DEPARTURE 2 0 2 2 +DEPARTMENT 2 0 2 2 +DEPARTING 1 0 1 1 +DEPARTED 3 0 3 3 +DENY 1 0 1 1 +DENOUNCED 1 0 1 1 +DENOTING 1 0 1 1 +DENIS 2 0 2 2 +DENIAL 1 0 1 1 +DEN 1 0 1 1 +DEMANDS 3 0 3 3 +DEMANDED 1 0 1 1 +DELUSION 2 0 2 2 +DELIVERY 1 0 1 1 +DELIVERER 1 0 1 1 +DELIVER 2 0 2 2 +DELIGHTFUL 2 0 2 2 +DELIGHTED 2 0 2 2 +DELIGHT 7 0 7 7 +DELICIOUSLY 1 0 1 1 +DELICATE 3 0 3 3 +DELIBERATELY 1 0 1 1 +DELAYED 1 0 1 1 +DELAY 3 0 3 3 +DEJECTION 1 0 1 1 +DEITY 1 0 1 1 +DEGREE 1 0 1 1 +DEGENERATING 1 0 1 1 +DEFYING 1 0 1 1 +DEFRAUD 1 0 1 1 +DEFORMED 2 0 2 2 +DEFINED 1 0 1 1 +DEFIANT 1 0 1 1 +DEFERENCE 1 0 1 1 +DEFENDING 1 0 1 1 +DEFENDERS 2 0 2 2 +DEFEAT 1 0 1 1 +DEEPLY 3 0 3 3 +DEEMED 1 0 1 1 +DECORATION 1 0 1 1 +DECLINED 1 0 1 1 +DECLARED 1 0 1 1 +DECKS 1 0 1 1 +DECK 6 0 6 6 +DECISION 3 0 3 3 +DECIDED 5 0 5 5 +DECIDE 2 0 2 2 +DECEPTION 1 0 1 1 +DECEMBER 2 0 2 2 +DECEIVED 5 0 5 5 +DECEIVE 1 0 1 1 +DECEASED 1 0 1 1 +DECAY 1 0 1 1 +DEBATED 1 0 1 1 +DEBATE 2 0 2 2 +DEATHS 1 0 1 1 +DEARER 1 0 1 1 +DEALT 2 0 2 2 +DEALER 1 0 1 1 +DEAF 1 0 1 1 +DAZED 1 0 1 1 +DAYLIGHT 2 0 2 2 +DAYBREAK 2 0 2 2 +DAY'S 1 0 1 1 +DAWNED 2 0 2 2 +DAWN 4 0 4 4 +DAVID 2 0 2 2 +DAUNTED 1 0 1 1 +DAUGHTER'S 1 0 1 1 +DAUGHTER 10 0 10 10 +DASHING 1 0 1 1 +DASHED 1 0 1 1 +DARTED 1 0 1 1 +DARKNESS 7 0 7 7 +DARING 1 0 1 1 +DARCY'S 1 0 1 1 +DARCY 6 0 6 6 +DANGERS 1 0 1 1 +DANGEROUS 2 0 2 2 +DANGER 11 0 11 11 +DANDY 1 0 1 1 +DANCE 2 0 2 2 +DAMPNESS 1 0 1 1 +DAMNED 1 0 1 1 +DAMES 1 0 1 1 +DAMASCUS 4 0 4 4 +DAM 1 0 1 1 +DADDY 1 0 1 1 +CYNTHIA 2 0 2 2 +CUTTER'S 1 0 1 1 +CUTTER 3 0 3 3 +CUSHION 1 0 1 1 +CURVED 2 0 2 2 +CURTAINS 2 0 2 2 +CURSES 1 0 1 1 +CURSED 2 0 2 2 +CURRENT 1 0 1 1 +CURRENCY 1 0 1 1 +CURIOUS 4 0 4 4 +CURED 1 0 1 1 +CURE 4 0 4 4 +CURATE 2 0 2 2 +CUPBOARD 2 0 2 2 +CULTURED 1 0 1 1 +CULTURE 1 0 1 1 +CULTIVATED 2 0 2 2 +CULT 1 0 1 1 +CUBITS 1 0 1 1 +CRYSTALLINE 1 0 1 1 +CRY 2 0 2 2 +CRUSHED 1 0 1 1 +CRUSADER 1 0 1 1 +CRUELTY 4 0 4 4 +CRUEL 4 0 4 4 +CRUDE 1 0 1 1 +CRUCIFIXION 9 0 9 9 +CROWNED 1 0 1 1 +CROWN 3 0 3 3 +CROWDED 2 0 2 2 +CROWD 5 0 5 5 +CROSSED 5 0 5 5 +CROOKED 1 0 1 1 +CROAKING 1 0 1 1 +CRITICS 1 0 1 1 +CRITICAL 2 0 2 2 +CRIPPLED 2 0 2 2 +CRIMSON 1 0 1 1 +CRIMINALS 1 0 1 1 +CRIMINAL 1 0 1 1 +CRIME 5 0 5 5 +CRICKETS 1 0 1 1 +CREPT 1 0 1 1 +CREEPY 1 0 1 1 +CREEPING 1 0 1 1 +CREDITS 3 0 3 3 +CREDIT 5 0 5 5 +CREATURES 2 0 2 2 +CREATURE 4 0 4 4 +CREATOR 4 0 4 4 +CREATIONS 1 0 1 1 +CREATING 1 0 1 1 +CREATED 3 0 3 3 +CREATE 3 0 3 3 +CREASES 1 0 1 1 +CREASED 1 0 1 1 +CREAKED 1 0 1 1 +CRAYFISH 3 0 3 3 +CRAWLED 2 0 2 2 +CRASHED 1 0 1 1 +CRASH 1 0 1 1 +CRAFT 1 0 1 1 +CRACKERS 1 0 1 1 +CRACKED 2 0 2 2 +COWARDS 1 0 1 1 +COWARD 1 0 1 1 +COVERING 1 0 1 1 +COVERED 5 0 5 5 +COVER 1 0 1 1 +COVE 1 0 1 1 +COUSINS 1 0 1 1 +COUSIN 10 0 10 10 +COURAGE 4 0 4 4 +COUPLETS 1 0 1 1 +COUPLE 2 0 2 2 +COUNTY 9 0 9 9 +COUNTESS 1 0 1 1 +COUNTER 1 0 1 1 +COUNT'S 2 0 2 2 +COUNSELLED 1 0 1 1 +COUGHING 2 0 2 2 +COUGH 3 0 3 3 +COTTONY 1 0 1 1 +COSTUME 1 0 1 1 +COSETTE 2 0 2 2 +CORSICAN 1 0 1 1 +CORRIDOR 2 0 2 2 +CORRESPONDENCE 1 0 1 1 +CORRECT 1 0 1 1 +CORPSES 1 0 1 1 +CORPSE 3 0 3 3 +CORPORATIONS 1 0 1 1 +CORNERS 1 0 1 1 +CORDIAL 1 0 1 1 +COPY 1 0 1 1 +COOLNESS 2 0 2 2 +COOKING 1 0 1 1 +COOK 4 0 4 4 +CONVINCING 1 0 1 1 +CONVINCED 1 0 1 1 +CONVICTION 3 0 3 3 +CONVEYANCE 1 0 1 1 +CONVERTS 1 0 1 1 +CONVERSATION 10 0 10 10 +CONVENTIONS 1 0 1 1 +CONVENTION 1 0 1 1 +CONVENT 4 0 4 4 +CONVENIENCES 1 0 1 1 +CONTRIVE 1 0 1 1 +CONTRARY 5 0 5 5 +CONTRADICTION 1 0 1 1 +CONTRACTED 1 0 1 1 +CONTRACT 3 0 3 3 +CONTINUED 11 0 11 11 +CONTINUE 3 0 3 3 +CONTINUATION 1 0 1 1 +CONTINUANCE 1 0 1 1 +CONTINGENT 1 0 1 1 +CONTENTS 1 0 1 1 +CONTENTION 1 0 1 1 +CONTENTED 1 0 1 1 +CONTENT 1 0 1 1 +CONTEMPORARY 2 0 2 2 +CONTAINS 1 0 1 1 +CONTAINING 2 0 2 2 +CONTAINED 1 0 1 1 +CONTAIN 1 0 1 1 +CONTAGIOUS 2 0 2 2 +CONTACT 3 0 3 3 +CONSUMED 2 0 2 2 +CONSULTED 3 0 3 3 +CONSULTATIONS 1 0 1 1 +CONSTRUCT 1 0 1 1 +CONSTRAINED 1 0 1 1 +CONSTANTLY 3 0 3 3 +CONSTANTIUS 1 0 1 1 +CONSPIRATORS 2 0 2 2 +CONSPIRACY 1 0 1 1 +CONSORTED 1 0 1 1 +CONSOLES 1 0 1 1 +CONSISTS 2 0 2 2 +CONSISTENCY 1 0 1 1 +CONSISTED 1 0 1 1 +CONSIDERING 2 0 2 2 +CONSIDERED 3 0 3 3 +CONSIDERATION 2 0 2 2 +CONSIDERABLE 6 0 6 6 +CONSIDER 1 0 1 1 +CONSERVATIVE 2 0 2 2 +CONSEQUENCES 1 0 1 1 +CONSEQUENCE 1 0 1 1 +CONSENTED 1 0 1 1 +CONSENT 2 0 2 2 +CONSCIOUSNESS 2 0 2 2 +CONSCIOUSLY 1 0 1 1 +CONSCIENTIOUS 1 0 1 1 +CONSCIENCES 1 0 1 1 +CONSCIENCE 3 0 3 3 +CONQUEST 3 0 3 3 +CONQUEROR 1 0 1 1 +CONQUERING 1 0 1 1 +CONQUERED 2 0 2 2 +CONQUER 1 0 1 1 +CONNOISSEUR 1 0 1 1 +CONNECTIONS 1 0 1 1 +CONNECTION 4 0 4 4 +CONNECTED 1 0 1 1 +CONNECT 2 0 2 2 +CONJECTURES 1 0 1 1 +CONGRESSES 1 0 1 1 +CONGRESS 3 0 3 3 +CONGEALETH 1 0 1 1 +CONFUSION 4 0 4 4 +CONFOUND 1 0 1 1 +CONFLICT 2 0 2 2 +CONFISCATION 1 0 1 1 +CONFIRMS 1 0 1 1 +CONFIRMED 2 0 2 2 +CONFINEMENT 1 0 1 1 +CONFIDENTIAL 1 0 1 1 +CONFIDENCE 3 0 3 3 +CONFESSION 4 0 4 4 +CONFESSED 2 0 2 2 +CONFESS 9 0 9 9 +CONFERRING 1 0 1 1 +CONFERENCE 1 0 1 1 +CONFECTIONS 1 0 1 1 +CONFECTIONER 1 0 1 1 +CONDUCTED 2 0 2 2 +CONDUCT 4 0 4 4 +CONDITIONS 4 0 4 4 +CONDITION 4 0 4 4 +CONDESCEND 1 0 1 1 +CONDEMNED 2 0 2 2 +CONCLUSION 2 0 2 2 +CONCILIATE 1 0 1 1 +CONCIERGE'S 1 0 1 1 +CONCERNS 1 0 1 1 +CONCERNING 1 0 1 1 +CONCERN 2 0 2 2 +CONCEPTION 5 0 5 5 +CONCENTRATED 2 0 2 2 +CONCENTRATE 1 0 1 1 +CONCEIVE 1 0 1 1 +CONCEITED 1 0 1 1 +CONCEAL 3 0 3 3 +COMTE 1 0 1 1 +COMRADE 3 0 3 3 +COMPULSORY 1 0 1 1 +COMPREHENDED 1 0 1 1 +COMPOUND 1 0 1 1 +COMPOSURE 1 0 1 1 +COMPOSITION 1 0 1 1 +COMPOSED 1 0 1 1 +COMPLY 1 0 1 1 +COMPLIMENT 1 0 1 1 +COMPLICITY 1 0 1 1 +COMPLETELY 6 0 6 6 +COMPLETED 1 0 1 1 +COMPLETE 1 0 1 1 +COMPLAIN 1 0 1 1 +COMPETITION 1 0 1 1 +COMPELLING 1 0 1 1 +COMPELLED 1 0 1 1 +COMPATRIOT 1 0 1 1 +COMPASS 1 0 1 1 +COMPARATIVELY 1 0 1 1 +COMPANY 13 0 13 13 +COMPANIONS 3 0 3 3 +COMPANION'S 1 0 1 1 +COMPANION 4 0 4 4 +COMMUNICATION 2 0 2 2 +COMMUNICATES 2 0 2 2 +COMMUNICATED 1 0 1 1 +COMMUNICANTS 1 0 1 1 +COMMONS 3 0 3 3 +COMMONLY 1 0 1 1 +COMMONERS 1 0 1 1 +COMMON 3 0 3 3 +COMMITTED 4 0 4 4 +COMMISSIONED 1 0 1 1 +COMMISSION 1 0 1 1 +COMMENDING 1 0 1 1 +COMMENDED 1 0 1 1 +COMMANDING 2 0 2 2 +COMMANDED 2 0 2 2 +COMMAND 2 0 2 2 +COMFORTABLE 2 0 2 2 +COMFORT 2 0 2 2 +COMBATIVE 1 0 1 1 +COMBAT 2 0 2 2 +COMB 1 0 1 1 +COLOURED 1 0 1 1 +COLOUR 2 0 2 2 +COLOSSAL 1 0 1 1 +COLOR 2 0 2 2 +COLONELS 1 0 1 1 +COLLECTOR'S 1 0 1 1 +COLLECTOR 1 0 1 1 +COLLECTION 1 0 1 1 +COLLECTING 2 0 2 2 +COLLAR 2 0 2 2 +COLIC 1 0 1 1 +COLCHESTER 5 0 5 5 +COIL 1 0 1 1 +COFFIN 20 0 20 20 +COFFEE 1 0 1 1 +COCKING 1 0 1 1 +COAST 2 0 2 2 +COACH 3 0 3 3 +CLUTCHING 1 0 1 1 +CLUTCH 1 0 1 1 +CLUNG 1 0 1 1 +CLUBBED 1 0 1 1 +CLUB 3 0 3 3 +CLOVER 1 0 1 1 +CLOUDS 1 0 1 1 +CLOTHES 8 0 8 8 +CLOTHE 1 0 1 1 +CLOSING 2 0 2 2 +CLOSES 1 0 1 1 +CLOSER 1 0 1 1 +CLOSELY 5 0 5 5 +CLOSED 4 0 4 4 +CLOISTER 3 0 3 3 +CLOGGED 2 0 2 2 +CLERK 2 0 2 2 +CLERICAL 2 0 2 2 +CLENCHING 1 0 1 1 +CLEMENT 1 0 1 1 +CLEMENCY 1 0 1 1 +CLEARLY 1 0 1 1 +CLEARER 1 0 1 1 +CLEAR 7 0 7 7 +CLEANED 2 0 2 2 +CLEAN 4 0 4 4 +CLASPED 1 0 1 1 +CLASP 1 0 1 1 +CLASHING 1 0 1 1 +CLAPPED 1 0 1 1 +CLANKING 1 0 1 1 +CLAIR 3 0 3 3 +CLAIMS 1 0 1 1 +CLAIMED 1 0 1 1 +CIVILIZED 1 0 1 1 +CIVILITY 1 0 1 1 +CIVILITIES 1 0 1 1 +CITY 16 0 16 16 +CITIZENS 6 0 6 6 +CITIZEN 1 0 1 1 +CIRCUMSTANTIAL 1 0 1 1 +CIRCUMSTANCES 6 0 6 6 +CIRCULAR 1 0 1 1 +CIRCUIT 1 0 1 1 +CIRCLES 1 0 1 1 +CIRCLE 2 0 2 2 +CILLEY 1 0 1 1 +CIDER 1 0 1 1 +CHURCHYARDS 1 0 1 1 +CHUCKLED 3 0 3 3 +CHRYSIPPUS 2 0 2 2 +CHRISTMAS 1 0 1 1 +CHRIS'S 1 0 1 1 +CHOSEN 3 0 3 3 +CHOSE 2 0 2 2 +CHOP 1 0 1 1 +CHOKE 1 0 1 1 +CHOIR 2 0 2 2 +CHOICE 1 0 1 1 +CHIRP 1 0 1 1 +CHINESE 1 0 1 1 +CHIN 1 0 1 1 +CHIMNEY 7 0 7 7 +CHIMES 1 0 1 1 +CHILDREN 13 0 13 13 +CHILDLESS 1 0 1 1 +CHILDHOOD 1 0 1 1 +CHILD'S 1 0 1 1 +CHILD 6 0 6 6 +CHEWERS 2 0 2 2 +CHERISHED 1 0 1 1 +CHEFS 1 0 1 1 +CHEESE 1 0 1 1 +CHEERFULNESS 1 0 1 1 +CHEERFULLY 1 0 1 1 +CHEERFUL 4 0 4 4 +CHEEKED 1 0 1 1 +CHECKING 1 0 1 1 +CHECKED 1 0 1 1 +CHEATING 1 0 1 1 +CHEAPLY 1 0 1 1 +CHEAP 1 0 1 1 +CHATTING 1 0 1 1 +CHASSEUR 1 0 1 1 +CHASM 1 0 1 1 +CHASED 1 0 1 1 +CHARMS 1 0 1 1 +CHARMING 3 0 3 3 +CHARM 3 0 3 3 +CHARLIE 1 0 1 1 +CHARLES 2 0 2 2 +CHARITY 1 0 1 1 +CHARIOT 1 0 1 1 +CHARGES 2 0 2 2 +CHARGER 1 0 1 1 +CHARCOAL 1 0 1 1 +CHARACTER 6 0 6 6 +CHAPTERS 1 0 1 1 +CHAPS 1 0 1 1 +CHAPLET 1 0 1 1 +CHAPEL 6 0 6 6 +CHAP 1 0 1 1 +CHANTED 1 0 1 1 +CHANT 1 0 1 1 +CHANNEL 3 0 3 3 +CHANGING 2 0 2 2 +CHANGED 4 0 4 4 +CHANCELLOR'S 1 0 1 1 +CHANCELLOR 6 0 6 6 +CHAMPIONS 1 0 1 1 +CHAMPAGNE 1 0 1 1 +CHAMBERLAIN 6 0 6 6 +CHAMBER 5 0 5 5 +CHAIR 5 0 5 5 +CETERA 2 0 2 2 +CESSATION 1 0 1 1 +CERTIFIED 1 0 1 1 +CERTAINLY 13 0 13 13 +CEREMONY 1 0 1 1 +CENTURY 3 0 3 3 +CENTURIES 5 0 5 5 +CENTRES 1 0 1 1 +CENTRAL 6 0 6 6 +CENT 1 0 1 1 +CEMETERY 1 0 1 1 +CELLARS 1 0 1 1 +CELIA 1 0 1 1 +CELERY 1 0 1 1 +CEASED 7 0 7 7 +CAVALRY 1 0 1 1 +CAUTIOUSLY 1 0 1 1 +CAUTION 1 0 1 1 +CAUSED 1 0 1 1 +CATS 2 0 2 2 +CATCHING 1 0 1 1 +CATCH 6 0 6 6 +CAT 3 0 3 3 +CASTRATO 2 0 2 2 +CASTING 2 0 2 2 +CASKET 1 0 1 1 +CASHIER 1 0 1 1 +CASES 2 0 2 2 +CARVED 3 0 3 3 +CARTHUSIANS 1 0 1 1 +CART 3 0 3 3 +CARS 1 0 1 1 +CARRY 7 0 7 7 +CARROT 1 0 1 1 +CARPET 1 0 1 1 +CARPENTER 1 0 1 1 +CAROLINA 1 0 1 1 +CARGO 1 0 1 1 +CAREWORN 2 0 2 2 +CARESSES 1 0 1 1 +CAREFULLY 3 0 3 3 +CAREFUL 4 0 4 4 +CARDS 1 0 1 1 +CARDINALS 1 0 1 1 +CARBONATE 1 0 1 1 +CARAVAN 1 0 1 1 +CAR 5 0 5 5 +CAPTURE 3 0 3 3 +CAPTOR 1 0 1 1 +CAPTIVE 3 0 3 3 +CAPTAIN'S 1 0 1 1 +CAPERING 1 0 1 1 +CAPERED 1 0 1 1 +CAPABLE 2 0 2 2 +CAPABILITIES 1 0 1 1 +CAP'S 1 0 1 1 +CANVAS 1 0 1 1 +CANST 1 0 1 1 +CANS 1 0 1 1 +CANOE 1 0 1 1 +CANE 1 0 1 1 +CANDLESTICKS 1 0 1 1 +CANDLESTICK 2 0 2 2 +CANDLES 1 0 1 1 +CANDLE 3 0 3 3 +CANAL 1 0 1 1 +CAMPAIGNS 2 0 2 2 +CAMP 1 0 1 1 +CAMOUFLAGE 1 0 1 1 +CAMEL 2 0 2 2 +CALMLY 2 0 2 2 +CALLS 1 0 1 1 +CALLING 2 0 2 2 +CALIPH 1 0 1 1 +CALENDAR 1 0 1 1 +CALCULATE 1 0 1 1 +CAIRO 2 0 2 2 +CAFE 1 0 1 1 +CAESARS 1 0 1 1 +CADET 1 0 1 1 +CABLE'S 1 0 1 1 +CABIN 4 0 4 4 +CABARET 1 0 1 1 +BUYING 2 0 2 2 +BUTTERFLIES 1 0 1 1 +BUTTER 5 0 5 5 +BUSTED 2 0 2 2 +BUST 1 0 1 1 +BUSINESSES 1 0 1 1 +BUSHY 2 0 2 2 +BUSHES 1 0 1 1 +BURSTING 1 0 1 1 +BURNING 1 0 1 1 +BURNED 2 0 2 2 +BURN 1 0 1 1 +BURIED 7 0 7 7 +BURIAL 1 0 1 1 +BURGUNDY 1 0 1 1 +BUNKER 1 0 1 1 +BUNDLES 2 0 2 2 +BUNDLED 1 0 1 1 +BUMS 1 0 1 1 +BULLOCK 1 0 1 1 +BULLET 2 0 2 2 +BULB 1 0 1 1 +BUILT 1 0 1 1 +BUILDINGS 1 0 1 1 +BUILDING 3 0 3 3 +BUGLE 1 0 1 1 +BUGGY 2 0 2 2 +BUFF 1 0 1 1 +BUD 1 0 1 1 +BUCKLEY 1 0 1 1 +BUBBLES 1 0 1 1 +BRUTE 2 0 2 2 +BRUTALLY 1 0 1 1 +BRUTAL 1 0 1 1 +BRUSHED 2 0 2 2 +BRUISING 1 0 1 1 +BROTHERLY 1 0 1 1 +BROTH 1 0 1 1 +BRONZE 1 0 1 1 +BROKER'S 1 0 1 1 +BROKEN 10 0 10 10 +BROKE 7 0 7 7 +BROAD 3 0 3 3 +BRITISH 1 0 1 1 +BRINGING 3 0 3 3 +BRINGETH 2 0 2 2 +BRIM 1 0 1 1 +BRIGHT 5 0 5 5 +BRIGANDS 1 0 1 1 +BRIDE 3 0 3 3 +BRICKS 1 0 1 1 +BRICK 1 0 1 1 +BREWING 1 0 1 1 +BRETHREN 3 0 3 3 +BREED 1 0 1 1 +BRED 1 0 1 1 +BREATHING 1 0 1 1 +BREASTS 1 0 1 1 +BREAKS 3 0 3 3 +BREAKING 3 0 3 3 +BREAKFAST 4 0 4 4 +BREAKERS 1 0 1 1 +BREAK 6 0 6 6 +BREAD 3 0 3 3 +BREACH 1 0 1 1 +BRAVELY 2 0 2 2 +BRANDON 1 0 1 1 +BRANCHES 2 0 2 2 +BRANCH 3 0 3 3 +BRAG 1 0 1 1 +BRADFORD 1 0 1 1 +BRACKETS 1 0 1 1 +BOYS 8 0 8 8 +BOXES 2 0 2 2 +BOWL 1 0 1 1 +BOWED 2 0 2 2 +BOW 3 0 3 3 +BOURGEOIS 1 0 1 1 +BOUQUET 2 0 2 2 +BOUNTY 1 0 1 1 +BOUND 5 0 5 5 +BOULEVARD 1 0 1 1 +BOTTLES 1 0 1 1 +BOTTLE 4 0 4 4 +BOTHERED 1 0 1 1 +BOSOM 3 0 3 3 +BORROWED 1 0 1 1 +BORED 1 0 1 1 +BORE 2 0 2 2 +BORDERS 1 0 1 1 +BORDER 1 0 1 1 +BOOTY 1 0 1 1 +BOOTS 2 0 2 2 +BOOT 1 0 1 1 +BOOMED 2 0 2 2 +BOOKS 1 0 1 1 +BOOKLET 1 0 1 1 +BOOK 8 0 8 8 +BONNETS 2 0 2 2 +BONNET 1 0 1 1 +BONDAGE 2 0 2 2 +BOMB 1 0 1 1 +BOLTS 1 0 1 1 +BOLDER 1 0 1 1 +BOILING 3 0 3 3 +BOILER 1 0 1 1 +BOILED 2 0 2 2 +BOEUF 2 0 2 2 +BODY 13 0 13 13 +BODILY 3 0 3 3 +BODIES 2 0 2 2 +BOAT'S 1 0 1 1 +BOAST 1 0 1 1 +BOARDS 1 0 1 1 +BOARDING 1 0 1 1 +BOARD 5 0 5 5 +BLURTED 1 0 1 1 +BLUNTLY 1 0 1 1 +BLUNTED 1 0 1 1 +BLUBBERING 1 0 1 1 +BLOWS 1 0 1 1 +BLOWN 2 0 2 2 +BLOWING 2 0 2 2 +BLOSSOM 1 0 1 1 +BLOOM 1 0 1 1 +BLIZZARD'S 1 0 1 1 +BLIZZARD 2 0 2 2 +BLINDNESS 1 0 1 1 +BLINDING 1 0 1 1 +BLINDED 1 0 1 1 +BLIND 5 0 5 5 +BLEW 1 0 1 1 +BLESSINGS 1 0 1 1 +BLESSING 2 0 2 2 +BLESS 3 0 3 3 +BLEND 1 0 1 1 +BLEAK 1 0 1 1 +BLAZING 1 0 1 1 +BLANKLY 1 0 1 1 +BLANK 1 0 1 1 +BLAMING 1 0 1 1 +BLAMED 1 0 1 1 +BLAME 2 0 2 2 +BLADE 1 0 1 1 +BLACKSTONE 1 0 1 1 +BLACKGUARD 1 0 1 1 +BLACKBURN 2 0 2 2 +BITE 1 0 1 1 +BISHOPS 1 0 1 1 +BISHOP 4 0 4 4 +BISCUIT 1 0 1 1 +BIRTHPLACE 1 0 1 1 +BIRTHDAY 1 0 1 1 +BIRTH 1 0 1 1 +BIRD 1 0 1 1 +BIRCH 1 0 1 1 +BIND 1 0 1 1 +BILLS 2 0 2 2 +BILIOUS 1 0 1 1 +BIGGER 1 0 1 1 +BEYOND 7 0 7 7 +BEWILDERMENT 1 0 1 1 +BEWARE 2 0 2 2 +BETWIXT 1 0 1 1 +BETRAY 1 0 1 1 +BETOOK 1 0 1 1 +BETIDETH 1 0 1 1 +BESTOW 3 0 3 3 +BESS 1 0 1 1 +BESPAKE 1 0 1 1 +BESOUGHT 1 0 1 1 +BESIEGERS 2 0 2 2 +BESEECH 2 0 2 2 +BERNARDONE 1 0 1 1 +BERNARD 4 0 4 4 +BEQUEATH 2 0 2 2 +BENT 2 0 2 2 +BENJAMIN 1 0 1 1 +BENCH 3 0 3 3 +BELOW 2 0 2 2 +BELONG 1 0 1 1 +BELLS 4 0 4 4 +BELLIES 1 0 1 1 +BELIEVING 1 0 1 1 +BELIEVES 1 0 1 1 +BELIEVED 6 0 6 6 +BELIEVE 16 0 16 16 +BELIEF 5 0 5 5 +BEINGS 3 0 3 3 +BEHOLDING 1 0 1 1 +BEHOLD 5 0 5 5 +BEHIND 16 0 16 16 +BEHAVED 3 0 3 3 +BEHALF 1 0 1 1 +BEGUILED 1 0 1 1 +BEGINNING 6 0 6 6 +BEGGED 9 0 9 9 +BEGGAR 1 0 1 1 +BEFITTING 1 0 1 1 +BEFALLEN 1 0 1 1 +BEDOUIN 1 0 1 1 +BED 14 0 14 14 +BECOMES 6 0 6 6 +BECOME 15 0 15 15 +BECKY 1 0 1 1 +BECAUSE 34 0 34 34 +BEAVER 1 0 1 1 +BEAUTY 4 0 4 4 +BEAUTIFULLY 1 0 1 1 +BEAUTIFUL 8 0 8 8 +BEATEN 2 0 2 2 +BEAT 6 0 6 6 +BEASTS 4 0 4 4 +BEASTLY 1 0 1 1 +BEAST 2 0 2 2 +BEAR 8 0 8 8 +BEAMS 2 0 2 2 +BEAD 1 0 1 1 +BEACON 2 0 2 2 +BEACH 2 0 2 2 +BAY 1 0 1 1 +BATTLE 2 0 2 2 +BATTERY 1 0 1 1 +BATHING 1 0 1 1 +BATHED 1 0 1 1 +BASKING 1 0 1 1 +BASKETS 1 0 1 1 +BASIS 2 0 2 2 +BASIN 1 0 1 1 +BASER 1 0 1 1 +BASED 2 0 2 2 +BARS 5 0 5 5 +BARRIER 1 0 1 1 +BARRICADES 1 0 1 1 +BARRED 1 0 1 1 +BARRACK 2 0 2 2 +BARONET 1 0 1 1 +BARKING 1 0 1 1 +BARK 1 0 1 1 +BARE 1 0 1 1 +BARBAROUS 1 0 1 1 +BARBARITY 1 0 1 1 +BAR 7 0 7 7 +BAPTIST 1 0 1 1 +BANQUET 3 0 3 3 +BANKER 1 0 1 1 +BANK 9 0 9 9 +BANDS 1 0 1 1 +BANDITS 1 0 1 1 +BANDIT 1 0 1 1 +BALSAM 1 0 1 1 +BALLOT 4 0 4 4 +BALE 1 0 1 1 +BAKING 1 0 1 1 +BAIL 1 0 1 1 +BAH 1 0 1 1 +BAGS 2 0 2 2 +BAGGY 1 0 1 1 +BAGGAGE 1 0 1 1 +BADLY 3 0 3 3 +BADGE 1 0 1 1 +BACON 1 0 1 1 +BACKGROUND 3 0 3 3 +BACHELOR 1 0 1 1 +BABYLONIA 1 0 1 1 +BABES 1 0 1 1 +B 3 0 3 3 +AZURE 1 0 1 1 +AWOKE 1 0 1 1 +AWKWARDNESS 1 0 1 1 +AWKWARDLY 1 0 1 1 +AWFUL 4 0 4 4 +AWE 1 0 1 1 +AWARE 2 0 2 2 +AWAKENING 2 0 2 2 +AWAKENED 1 0 1 1 +AWAITS 1 0 1 1 +AWAITED 1 0 1 1 +AWAIT 1 0 1 1 +AVOIDED 1 0 1 1 +AVOID 4 0 4 4 +AVERAGE 2 0 2 2 +AUTOMATICALLY 1 0 1 1 +AUTOCRACY 1 0 1 1 +AUTHORITY 10 0 10 10 +AUTHORITIES 1 0 1 1 +AUTHOR 1 0 1 1 +AUTHENTIC 1 0 1 1 +AUSTRIA 1 0 1 1 +AUSPICIOUS 3 0 3 3 +AUGMENTED 1 0 1 1 +AUGHT 2 0 2 2 +AUDACIOUS 1 0 1 1 +ATTRACTIVE 3 0 3 3 +ATTRACTED 2 0 2 2 +ATTORNEY 1 0 1 1 +ATTENTIVELY 4 0 4 4 +ATTENDING 1 0 1 1 +ATTEMPTING 1 0 1 1 +ATTEMPT 2 0 2 2 +ATTAINED 1 0 1 1 +ATTACKS 3 0 3 3 +ATTACKED 1 0 1 1 +ATTACK 2 0 2 2 +ATTACHMENT 1 0 1 1 +ATTACHED 1 0 1 1 +ATMOSPHERE 1 0 1 1 +ATE 1 0 1 1 +ASUNDER 1 0 1 1 +ASTONISHMENT 2 0 2 2 +ASTONISHED 1 0 1 1 +ASSYRIAN 2 0 2 2 +ASSUREDLY 1 0 1 1 +ASSURE 8 0 8 8 +ASSURANCE 2 0 2 2 +ASSOCIATIONS 1 0 1 1 +ASSOCIATES 1 0 1 1 +ASSISTED 1 0 1 1 +ASSISTANT 1 0 1 1 +ASSISTANCE 3 0 3 3 +ASSIST 3 0 3 3 +ASSERT 2 0 2 2 +ASSEMBLY 3 0 3 3 +ASSEMBLED 2 0 2 2 +ASSEMBLAGE 1 0 1 1 +ASSAULT 1 0 1 1 +ASSASSINATED 1 0 1 1 +ASSAILED 1 0 1 1 +ASS 3 0 3 3 +ASPECT 1 0 1 1 +ASKING 5 0 5 5 +ASIDE 5 0 5 5 +ASHLEY 5 0 5 5 +ASCERTAINING 1 0 1 1 +ASCERTAIN 1 0 1 1 +ASCENSION 1 0 1 1 +ARTS 1 0 1 1 +ARTISTS 4 0 4 4 +ARTICLES 1 0 1 1 +ARTFUL 1 0 1 1 +ARRIVED 4 0 4 4 +ARRIVAL 1 0 1 1 +ARRESTED 1 0 1 1 +ARRANGING 1 0 1 1 +ARRANGEMENTS 1 0 1 1 +ARMY 19 0 19 19 +ARMIES 2 0 2 2 +ARKADYEVITCH 1 0 1 1 +ARK 1 0 1 1 +ARISTOCRACY 1 0 1 1 +ARISE 1 0 1 1 +ARGUMENTS 3 0 3 3 +ARGUMENT 1 0 1 1 +ARGUED 1 0 1 1 +ARENA 1 0 1 1 +ARCHITECTURE 1 0 1 1 +ARCHBISHOPS 1 0 1 1 +ARABIC 1 0 1 1 +ARABIANS 1 0 1 1 +AQUA 1 0 1 1 +APTITUDE 1 0 1 1 +APRIL 1 0 1 1 +APPROVAL 1 0 1 1 +APPROACHING 1 0 1 1 +APPROACHED 3 0 3 3 +APPREHENSIONS 1 0 1 1 +APPOINTMENT 2 0 2 2 +APPLYING 1 0 1 1 +APPLY 3 0 3 3 +APPLAUSE 1 0 1 1 +APPETITE 2 0 2 2 +APPEARING 1 0 1 1 +APPEARANCE 7 0 7 7 +APPEAR 6 0 6 6 +APPEALS 1 0 1 1 +APPEALING 1 0 1 1 +APPEAL 1 0 1 1 +APPARITION 1 0 1 1 +APPARENT 1 0 1 1 +APOLOGY 1 0 1 1 +APERTURE 1 0 1 1 +APARTMENTS 3 0 3 3 +APARTMENT 1 0 1 1 +ANYWHERE 1 0 1 1 +ANYHOW 2 0 2 2 +ANYBODY 2 0 2 2 +ANXIOUS 3 0 3 3 +ANXIETY 5 0 5 5 +ANTONIO 1 0 1 1 +ANTIQUARIAN'S 1 0 1 1 +ANTICIPATION 1 0 1 1 +ANTICIPATE 1 0 1 1 +ANSWERING 2 0 2 2 +ANSWER 15 0 15 15 +ANON 1 0 1 1 +ANNOYANCES 1 0 1 1 +ANNOYANCE 1 0 1 1 +ANNOUNCING 1 0 1 1 +ANNOUNCED 3 0 3 3 +ANNIHILATION 2 0 2 2 +ANNIHILATED 1 0 1 1 +ANNE 1 0 1 1 +ANIMATED 2 0 2 2 +ANIMATE 1 0 1 1 +ANIMALS 4 0 4 4 +ANGER 2 0 2 2 +ANEW 1 0 1 1 +ANDY 1 0 1 1 +ANDREW 2 0 2 2 +ANCIENTS 1 0 1 1 +ANCIENT 2 0 2 2 +ANCHOR 1 0 1 1 +AMUSING 1 0 1 1 +AMPLY 1 0 1 1 +AMPLE 1 0 1 1 +AMOUNT 1 0 1 1 +AMONGST 5 0 5 5 +AMONG 18 0 18 18 +AMMUNITION 1 0 1 1 +AMISS 1 0 1 1 +AMIABLE 1 0 1 1 +AMERICAN 1 0 1 1 +AMERICA 1 0 1 1 +AMENDS 1 0 1 1 +AMENDMENT 1 0 1 1 +AMENDED 1 0 1 1 +AMBITIOUS 1 0 1 1 +AMBITIONS 1 0 1 1 +AMBASSADOR 1 0 1 1 +ALTOGETHER 2 0 2 2 +ALTHOUGH 10 0 10 10 +ALTERED 1 0 1 1 +ALTER 1 0 1 1 +ALTAR 10 0 10 10 +ALONGSIDE 1 0 1 1 +ALMOST 11 0 11 11 +ALMIGHTY 1 0 1 1 +ALLOWING 1 0 1 1 +ALLOWANCES 1 0 1 1 +ALLIANCE 1 0 1 1 +ALLEY 1 0 1 1 +ALLAH'S 1 0 1 1 +ALLAH 9 0 9 9 +ALIVE 2 0 2 2 +ALIMONY 2 0 2 2 +ALIMENTARY 1 0 1 1 +ALIKE 2 0 2 2 +ALI 4 0 4 4 +ALEX 1 0 1 1 +ALBERT'S 3 0 3 3 +ALAS 1 0 1 1 +ALARMS 1 0 1 1 +ALARM 3 0 3 3 +ALABAMA 1 0 1 1 +AIM 4 0 4 4 +AILS 1 0 1 1 +AILMENTS 2 0 2 2 +AHEAD 2 0 2 2 +AGREES 1 0 1 1 +AGREEMENT 2 0 2 2 +AGREED 6 0 6 6 +AGREEABLE 2 0 2 2 +AGREE 1 0 1 1 +AGONY 1 0 1 1 +AGO 10 0 10 10 +AGITATOR 1 0 1 1 +AGITATION 1 0 1 1 +AGITATING 1 0 1 1 +AGILITY 1 0 1 1 +AGHAST 1 0 1 1 +AGGRESSIVENESS 1 0 1 1 +AGGRAVATIONS 1 0 1 1 +AGGRAVATED 1 0 1 1 +AGES 1 0 1 1 +AGENT 3 0 3 3 +AGED 2 0 2 2 +AGE 4 0 4 4 +AGAINST 27 0 27 27 +AGAIN 56 0 56 56 +AFTERNOON 6 0 6 6 +AFRICAN 1 0 1 1 +AFRAID 9 0 9 9 +AFORESAID 1 0 1 1 +AFFORD 2 0 2 2 +AFFLICTION 1 0 1 1 +AFFIRMED 1 0 1 1 +AFFECTIONS 1 0 1 1 +AFFECTIONATELY 1 0 1 1 +AFFECTED 2 0 2 2 +AFFAIRS 2 0 2 2 +AFFAIR 2 0 2 2 +ADVISEDLY 1 0 1 1 +ADVISED 1 0 1 1 +ADVISE 1 0 1 1 +ADVICE 2 0 2 2 +ADVENTURE 3 0 3 3 +ADVENT 1 0 1 1 +ADVANTAGES 2 0 2 2 +ADVANTAGE 4 0 4 4 +ADVANCING 1 0 1 1 +ADVANCES 2 0 2 2 +ADVANCED 1 0 1 1 +ADVANCE 5 0 5 5 +ADRIFT 1 0 1 1 +ADORNED 1 0 1 1 +ADORN 1 0 1 1 +ADORED 1 0 1 1 +ADMITTED 4 0 4 4 +ADMIT 1 0 1 1 +ADMISSION 1 0 1 1 +ADMIRED 1 0 1 1 +ADMIRATION 5 0 5 5 +ADMIRABLE 1 0 1 1 +ADMINISTRATION 3 0 3 3 +ADMINISTERED 1 0 1 1 +ADJACENT 1 0 1 1 +ADHERENT 1 0 1 1 +ADDRESSING 1 0 1 1 +ADDRESSED 1 0 1 1 +ADDRESS 3 0 3 3 +ADDITION 1 0 1 1 +ADAGE 1 0 1 1 +ACUTE 2 0 2 2 +ACTS 4 0 4 4 +ACTORS 1 0 1 1 +ACTIVITIES 1 0 1 1 +ACTIONS 2 0 2 2 +ACTION 2 0 2 2 +ACTING 2 0 2 2 +ACTED 1 0 1 1 +ACT 7 0 7 7 +ACROSS 8 0 8 8 +ACQUITTAL 1 0 1 1 +ACQUIT 1 0 1 1 +ACQUISITIVE 1 0 1 1 +ACQUIRED 2 0 2 2 +ACQUAINTANCES 1 0 1 1 +ACQUAINTANCE 2 0 2 2 +ACQUAINT 1 0 1 1 +ACKNOWLEDGMENT 1 0 1 1 +ACIDS 1 0 1 1 +ACID 4 0 4 4 +ACHIEVED 1 0 1 1 +ACHED 1 0 1 1 +ACE 1 0 1 1 +ACCUSTOMED 2 0 2 2 +ACCUSING 3 0 3 3 +ACCUSED 1 0 1 1 +ACCUSATION 4 0 4 4 +ACCURATE 1 0 1 1 +ACCOUNTS 2 0 2 2 +ACCOUNTED 1 0 1 1 +ACCORDINGLY 5 0 5 5 +ACCORDING 7 0 7 7 +ACCORDANCE 1 0 1 1 +ACCORD 1 0 1 1 +ACCOMPLISHMENTS 1 0 1 1 +ACCOMPLISHED 1 0 1 1 +ACCOMPLICE 2 0 2 2 +ACCOMPANY 1 0 1 1 +ACCOMPANIED 3 0 3 3 +ACCOMMODATING 1 0 1 1 +ACCIDENTS 1 0 1 1 +ACCIDENT 1 0 1 1 +ACCESSION 1 0 1 1 +ACCESSIBLE 1 0 1 1 +ACCESS 1 0 1 1 +ACCEPTED 4 0 4 4 +ACCEPTABLE 1 0 1 1 +ABYSSINIANS 2 0 2 2 +ABUSING 1 0 1 1 +ABUSES 2 0 2 2 +ABUSE 1 0 1 1 +ABUNDANTLY 1 0 1 1 +ABUNDANT 1 0 1 1 +ABSORBING 2 0 2 2 +ABSORB 1 0 1 1 +ABSOLUTELY 3 0 3 3 +ABSOLUTE 1 0 1 1 +ABSENTED 1 0 1 1 +ABSENT 1 0 1 1 +ABSENCE 4 0 4 4 +ABRUPTLY 3 0 3 3 +ABOLISH 1 0 1 1 +ABNORMAL 1 0 1 1 +ABLE 11 0 11 11 +ABILITY 1 0 1 1 +ABILITIES 1 0 1 1 +ABIDE 1 0 1 1 +ABBOT 1 0 1 1 +ABACK 1 0 1 1 +AARON 1 0 1 1 diff --git a/log/fast_beam_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model-2023-04-04-09-21-03 b/log/fast_beam_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model-2023-04-04-09-21-03 new file mode 100644 index 0000000000000000000000000000000000000000..006f416ccda0634dbeb65431d802ca8466847959 --- /dev/null +++ b/log/fast_beam_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model-2023-04-04-09-21-03 @@ -0,0 +1,45 @@ +2023-04-04 09:21:03,151 INFO [decode.py:649] Decoding started +2023-04-04 09:21:03,151 INFO [decode.py:655] Device: cuda:0 +2023-04-04 09:21:03,214 INFO [decode.py:665] {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r7n04', 'IP address': '10.1.7.4'}, 'epoch': 30, 'iter': 0, 'avg': 9, 'use_averaged_model': True, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'lang_dir': PosixPath('data/lang_bpe_500'), 'decoding_method': 'fast_beam_search', 'beam_size': 4, 'beam': 20.0, 'ngram_lm_scale': 0.01, 'max_contexts': 4, 'max_states': 8, 'context_size': 2, 'max_sym_per_frame': 1, 'num_paths': 200, 'nbest_scale': 0.5, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'res_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2/fast_beam_search'), 'suffix': 'epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model', 'blank_id': 0, 'unk_id': 2, 'vocab_size': 500} +2023-04-04 09:21:03,214 INFO [decode.py:667] About to create model +2023-04-04 09:21:03,641 INFO [zipformer.py:405] At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-04-04 09:21:03,649 INFO [decode.py:738] Calculating the averaged model over epoch range from 21 (excluded) to 30 +2023-04-04 09:21:12,177 INFO [decode.py:772] Number of model parameters: 20697573 +2023-04-04 09:21:12,178 INFO [asr_datamodule.py:454] About to get test-clean cuts +2023-04-04 09:21:12,204 INFO [asr_datamodule.py:461] About to get test-other cuts +2023-04-04 09:21:21,894 INFO [decode.py:560] batch 0/?, cuts processed until now is 36 +2023-04-04 09:22:03,674 INFO [zipformer.py:2441] attn_weights_entropy = tensor([1.3765, 1.4193, 1.7228, 1.7642, 1.3093, 1.6687, 1.6459, 1.5270], + device='cuda:0'), covar=tensor([0.3476, 0.3824, 0.1703, 0.2398, 0.3968, 0.2215, 0.4112, 0.3021], + device='cuda:0'), in_proj_covar=tensor([0.0922, 0.0996, 0.0730, 0.0941, 0.0899, 0.0836, 0.0850, 0.0796], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-04 09:22:18,478 INFO [decode.py:560] batch 20/?, cuts processed until now is 1038 +2023-04-04 09:23:05,390 INFO [decode.py:560] batch 40/?, cuts processed until now is 2296 +2023-04-04 09:23:30,148 INFO [decode.py:574] The transcripts are stored in pruned_transducer_stateless7_streaming/exp/v2/fast_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt +2023-04-04 09:23:30,221 INFO [utils.py:560] [test-clean-beam_20.0_max_contexts_4_max_states_8] %WER 3.57% [1879 / 52576, 218 ins, 142 del, 1519 sub ] +2023-04-04 09:23:30,377 INFO [decode.py:585] Wrote detailed error stats to pruned_transducer_stateless7_streaming/exp/v2/fast_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt +2023-04-04 09:23:30,378 INFO [decode.py:599] +For test-clean, WER of different settings are: +beam_20.0_max_contexts_4_max_states_8 3.57 best for test-clean + +2023-04-04 09:23:33,849 INFO [decode.py:560] batch 0/?, cuts processed until now is 43 +2023-04-04 09:24:24,002 INFO [zipformer.py:2441] attn_weights_entropy = tensor([1.2127, 1.4455, 1.7780, 1.1393, 2.3796, 2.9278, 2.6542, 2.9877], + device='cuda:0'), covar=tensor([0.1591, 0.3740, 0.3306, 0.2715, 0.0603, 0.0200, 0.0263, 0.0375], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0327, 0.0358, 0.0267, 0.0247, 0.0189, 0.0215, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-04 09:24:25,372 INFO [decode.py:560] batch 20/?, cuts processed until now is 1198 +2023-04-04 09:24:38,009 INFO [zipformer.py:2441] attn_weights_entropy = tensor([1.5290, 1.4286, 1.4607, 1.8452, 1.4408, 1.7147, 1.6404, 1.6043], + device='cuda:0'), covar=tensor([0.0797, 0.0904, 0.0939, 0.0609, 0.0893, 0.0737, 0.0895, 0.0659], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0224, 0.0236, 0.0223, 0.0210, 0.0185, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-04 09:25:11,694 INFO [decode.py:560] batch 40/?, cuts processed until now is 2642 +2023-04-04 09:25:33,579 INFO [decode.py:574] The transcripts are stored in pruned_transducer_stateless7_streaming/exp/v2/fast_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt +2023-04-04 09:25:33,661 INFO [utils.py:560] [test-other-beam_20.0_max_contexts_4_max_states_8] %WER 9.05% [4738 / 52343, 515 ins, 457 del, 3766 sub ] +2023-04-04 09:25:33,838 INFO [decode.py:585] Wrote detailed error stats to pruned_transducer_stateless7_streaming/exp/v2/fast_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt +2023-04-04 09:25:33,839 INFO [decode.py:599] +For test-other, WER of different settings are: +beam_20.0_max_contexts_4_max_states_8 9.05 best for test-other + +2023-04-04 09:25:33,839 INFO [decode.py:803] Done! diff --git a/log/fast_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt b/log/fast_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..a2ce0868edb1d0e6011b1d22f1dffb21d3a0fea9 --- /dev/null +++ b/log/fast_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt @@ -0,0 +1,5240 @@ +1089-134686-0000-1733: ref=['HE', 'HOPED', 'THERE', 'WOULD', 'BE', 'STEW', 'FOR', 'DINNER', 'TURNIPS', 'AND', 'CARROTS', 'AND', 'BRUISED', 'POTATOES', 'AND', 'FAT', 'MUTTON', 'PIECES', 'TO', 'BE', 'LADLED', 'OUT', 'IN', 'THICK', 'PEPPERED', 'FLOUR', 'FATTENED', 'SAUCE'] +1089-134686-0000-1733: hyp=['HE', 'HOPED', 'THERE', 'WOULD', 'BE', 'STEW', 'FOR', 'DINNER', 'TURNIPS', 'AND', 'CARROTS', 'AND', 'BRUISED', 'POTATOES', 'AND', 'FAT', 'MUTTON', 'PIECES', 'TO', 'BE', 'LADLED', 'OUT', 'IN', 'THICK', 'PEPPERED', 'FLOWER', 'FATTENED', 'SAUCE'] +1089-134686-0001-1734: ref=['STUFF', 'IT', 'INTO', 'YOU', 'HIS', 'BELLY', 'COUNSELLED', 'HIM'] +1089-134686-0001-1734: hyp=['STUFF', 'IT', 'INTO', 'YOU', 'HIS', 'BELLY', 'COUNSELLED', 'HIM'] +1089-134686-0002-1735: ref=['AFTER', 'EARLY', 'NIGHTFALL', 'THE', 'YELLOW', 'LAMPS', 'WOULD', 'LIGHT', 'UP', 'HERE', 'AND', 'THERE', 'THE', 'SQUALID', 'QUARTER', 'OF', 'THE', 'BROTHELS'] +1089-134686-0002-1735: hyp=['AFTER', 'EARLY', 'NIGHT', 'FALL', 'THE', 'YELLOW', 'LAMPS', 'WOULD', 'LIGHT', 'UP', 'HERE', 'AND', 'THERE', 'THE', 'SQUALID', 'QUARTER', 'OF', 'THE', 'BROTHELS'] +1089-134686-0003-1736: ref=['HELLO', 'BERTIE', 'ANY', 'GOOD', 'IN', 'YOUR', 'MIND'] +1089-134686-0003-1736: hyp=['HELLO', 'BERTIE', 'ANY', 'GOOD', 'IN', 'YOUR', 'MIND'] +1089-134686-0004-1737: ref=['NUMBER', 'TEN', 'FRESH', 'NELLY', 'IS', 'WAITING', 'ON', 'YOU', 'GOOD', 'NIGHT', 'HUSBAND'] +1089-134686-0004-1737: hyp=['NUMBER', 'TEN', 'FRESH', 'NELLIE', 'IS', 'WAITING', 'ON', 'YOU', 'GOOD', 'NIGHT', 'HUSBAND'] +1089-134686-0005-1738: ref=['THE', 'MUSIC', 'CAME', 'NEARER', 'AND', 'HE', 'RECALLED', 'THE', 'WORDS', 'THE', 'WORDS', 'OF', "SHELLEY'S", 'FRAGMENT', 'UPON', 'THE', 'MOON', 'WANDERING', 'COMPANIONLESS', 'PALE', 'FOR', 'WEARINESS'] +1089-134686-0005-1738: hyp=['THE', 'MUSIC', 'CAME', 'NEARER', 'AND', 'HE', 'RECALLED', 'THE', 'WORDS', 'THE', 'WORDS', 'OF', "SHELLEY'S", 'FRAGMENT', 'UPON', 'THE', 'MOON', 'WANDERING', 'COMPANIONLESS', 'PALE', 'FOR', 'WEARINESS'] +1089-134686-0006-1739: ref=['THE', 'DULL', 'LIGHT', 'FELL', 'MORE', 'FAINTLY', 'UPON', 'THE', 'PAGE', 'WHEREON', 'ANOTHER', 'EQUATION', 'BEGAN', 'TO', 'UNFOLD', 'ITSELF', 'SLOWLY', 'AND', 'TO', 'SPREAD', 'ABROAD', 'ITS', 'WIDENING', 'TAIL'] +1089-134686-0006-1739: hyp=['THE', 'DULL', 'LIGHT', 'FELL', 'MORE', 'FAINTLY', 'UPON', 'THE', 'PAGE', 'WHEREON', 'ANOTHER', 'EQUATION', 'BEGAN', 'TO', 'UNFOLD', 'ITSELF', 'SLOWLY', 'AND', 'TO', 'SPREAD', 'ABROAD', 'ITS', 'WIDENING', 'TAIL'] +1089-134686-0007-1740: ref=['A', 'COLD', 'LUCID', 'INDIFFERENCE', 'REIGNED', 'IN', 'HIS', 'SOUL'] +1089-134686-0007-1740: hyp=['A', 'COLD', 'LUCID', 'INDIFFERENCE', 'REIGNED', 'IN', 'HIS', 'SOUL'] +1089-134686-0008-1741: ref=['THE', 'CHAOS', 'IN', 'WHICH', 'HIS', 'ARDOUR', 'EXTINGUISHED', 'ITSELF', 'WAS', 'A', 'COLD', 'INDIFFERENT', 'KNOWLEDGE', 'OF', 'HIMSELF'] +1089-134686-0008-1741: hyp=['THE', 'CHAOS', 'IN', 'WHICH', 'HIS', 'ARDOR', 'EXTINGUISHED', 'ITSELF', 'WAS', 'A', 'COLD', 'INDIFFERENT', 'KNOWLEDGE', 'OF', 'HIMSELF'] +1089-134686-0009-1742: ref=['AT', 'MOST', 'BY', 'AN', 'ALMS', 'GIVEN', 'TO', 'A', 'BEGGAR', 'WHOSE', 'BLESSING', 'HE', 'FLED', 'FROM', 'HE', 'MIGHT', 'HOPE', 'WEARILY', 'TO', 'WIN', 'FOR', 'HIMSELF', 'SOME', 'MEASURE', 'OF', 'ACTUAL', 'GRACE'] +1089-134686-0009-1742: hyp=['AT', 'MOST', 'BY', 'AN', 'ALMS', 'GIVEN', 'TO', 'A', 'BEGGAR', 'WHOSE', 'BLESSING', 'HE', 'FLED', 'FROM', 'HE', 'MIGHT', 'HOPE', 'WEARILY', 'TO', 'WIN', 'FOR', 'HIMSELF', 'SOME', 'MEASURE', 'OF', 'ACTUAL', 'GRACE'] +1089-134686-0010-1743: ref=['WELL', 'NOW', 'ENNIS', 'I', 'DECLARE', 'YOU', 'HAVE', 'A', 'HEAD', 'AND', 'SO', 'HAS', 'MY', 'STICK'] +1089-134686-0010-1743: hyp=['WELL', 'NOW', 'ENNIS', 'I', 'DECLARE', 'YOU', 'HAVE', 'A', 'HEAD', 'AND', 'SO', 'HAS', 'MY', 'STICK'] +1089-134686-0011-1744: ref=['ON', 'SATURDAY', 'MORNINGS', 'WHEN', 'THE', 'SODALITY', 'MET', 'IN', 'THE', 'CHAPEL', 'TO', 'RECITE', 'THE', 'LITTLE', 'OFFICE', 'HIS', 'PLACE', 'WAS', 'A', 'CUSHIONED', 'KNEELING', 'DESK', 'AT', 'THE', 'RIGHT', 'OF', 'THE', 'ALTAR', 'FROM', 'WHICH', 'HE', 'LED', 'HIS', 'WING', 'OF', 'BOYS', 'THROUGH', 'THE', 'RESPONSES'] +1089-134686-0011-1744: hyp=['ON', 'SATURDAY', 'MORNINGS', 'WHEN', 'THE', 'SODELITY', 'MET', 'IN', 'THE', 'CHAPEL', 'TO', 'RECITE', 'THE', 'LITTLE', 'OFFICE', 'HIS', 'PLACE', 'WAS', 'A', 'CUSHIONED', 'KNEELING', 'DESK', 'AT', 'THE', 'RIGHT', 'OF', 'THE', 'ALTAR', 'FROM', 'WHICH', 'HE', 'LED', 'HIS', 'WING', 'OF', 'BOYS', 'THROUGH', 'THE', 'RESPONSES'] +1089-134686-0012-1745: ref=['HER', 'EYES', 'SEEMED', 'TO', 'REGARD', 'HIM', 'WITH', 'MILD', 'PITY', 'HER', 'HOLINESS', 'A', 'STRANGE', 'LIGHT', 'GLOWING', 'FAINTLY', 'UPON', 'HER', 'FRAIL', 'FLESH', 'DID', 'NOT', 'HUMILIATE', 'THE', 'SINNER', 'WHO', 'APPROACHED', 'HER'] +1089-134686-0012-1745: hyp=['HER', 'EYES', 'SEEMED', 'TO', 'REGARD', 'HIM', 'WITH', 'MILD', 'PITY', 'HER', 'HOLINESS', 'A', 'STRANGE', 'LIGHT', 'GLOWING', 'FAINTLY', 'UPON', 'HER', 'FRAIL', 'FLESH', 'DID', 'NOT', 'HUMILIATE', 'THE', 'SINNER', 'WHO', 'APPROACHED', 'HER'] +1089-134686-0013-1746: ref=['IF', 'EVER', 'HE', 'WAS', 'IMPELLED', 'TO', 'CAST', 'SIN', 'FROM', 'HIM', 'AND', 'TO', 'REPENT', 'THE', 'IMPULSE', 'THAT', 'MOVED', 'HIM', 'WAS', 'THE', 'WISH', 'TO', 'BE', 'HER', 'KNIGHT'] +1089-134686-0013-1746: hyp=['IF', 'EVER', 'HE', 'WAS', 'IMPELLED', 'TO', 'CAST', 'SIN', 'FROM', 'HIM', 'AND', 'TO', 'REPENT', 'THE', 'IMPULSE', 'THAT', 'MOVED', 'HIM', 'WAS', 'THE', 'WISH', 'TO', 'BE', 'HER', 'KNIGHT'] +1089-134686-0014-1747: ref=['HE', 'TRIED', 'TO', 'THINK', 'HOW', 'IT', 'COULD', 'BE'] +1089-134686-0014-1747: hyp=['HE', 'TRIED', 'TO', 'THINK', 'HOW', 'IT', 'COULD', 'BE'] +1089-134686-0015-1748: ref=['BUT', 'THE', 'DUSK', 'DEEPENING', 'IN', 'THE', 'SCHOOLROOM', 'COVERED', 'OVER', 'HIS', 'THOUGHTS', 'THE', 'BELL', 'RANG'] +1089-134686-0015-1748: hyp=['BUT', 'THE', 'DUSK', 'DEEPENING', 'IN', 'THE', 'SCHOOL', 'ROOM', 'COVERED', 'OVER', 'HIS', 'THOUGHTS', 'THE', 'BELL', 'RANG'] +1089-134686-0016-1749: ref=['THEN', 'YOU', 'CAN', 'ASK', 'HIM', 'QUESTIONS', 'ON', 'THE', 'CATECHISM', 'DEDALUS'] +1089-134686-0016-1749: hyp=['THEN', 'YOU', 'CAN', 'ASK', 'HIM', 'QUESTIONS', 'ON', 'THE', 'CATECHISM', 'DAEDALUS'] +1089-134686-0017-1750: ref=['STEPHEN', 'LEANING', 'BACK', 'AND', 'DRAWING', 'IDLY', 'ON', 'HIS', 'SCRIBBLER', 'LISTENED', 'TO', 'THE', 'TALK', 'ABOUT', 'HIM', 'WHICH', 'HERON', 'CHECKED', 'FROM', 'TIME', 'TO', 'TIME', 'BY', 'SAYING'] +1089-134686-0017-1750: hyp=['STEPHEN', 'LEANING', 'BACK', 'AND', 'DRAWING', 'IDLY', 'ON', 'HIS', 'SCRIBBLER', 'LISTENED', 'TO', 'THE', 'TALK', 'ABOUT', 'HIM', 'WHICH', 'HERON', 'CHECKED', 'FROM', 'TIME', 'TO', 'TIME', 'BY', 'SAYING'] +1089-134686-0018-1751: ref=['IT', 'WAS', 'STRANGE', 'TOO', 'THAT', 'HE', 'FOUND', 'AN', 'ARID', 'PLEASURE', 'IN', 'FOLLOWING', 'UP', 'TO', 'THE', 'END', 'THE', 'RIGID', 'LINES', 'OF', 'THE', 'DOCTRINES', 'OF', 'THE', 'CHURCH', 'AND', 'PENETRATING', 'INTO', 'OBSCURE', 'SILENCES', 'ONLY', 'TO', 'HEAR', 'AND', 'FEEL', 'THE', 'MORE', 'DEEPLY', 'HIS', 'OWN', 'CONDEMNATION'] +1089-134686-0018-1751: hyp=['IT', 'WAS', 'STRANGE', 'TOO', 'THAT', 'HE', 'FOUND', 'AN', 'ARID', 'PLEASURE', 'IN', 'FOLLOWING', 'UP', 'TO', 'THE', 'END', 'THE', 'RIGID', 'LINES', 'OF', 'THE', 'DOCTRINES', 'OF', 'THE', 'CHURCH', 'AND', 'PENETRATING', 'INTO', 'OBSCURE', 'SILENCES', 'ONLY', 'TO', 'HEAR', 'AND', 'FEEL', 'THE', 'MORE', 'DEEPLY', 'HIS', 'OWN', 'CONDEMNATION'] +1089-134686-0019-1752: ref=['THE', 'SENTENCE', 'OF', 'SAINT', 'JAMES', 'WHICH', 'SAYS', 'THAT', 'HE', 'WHO', 'OFFENDS', 'AGAINST', 'ONE', 'COMMANDMENT', 'BECOMES', 'GUILTY', 'OF', 'ALL', 'HAD', 'SEEMED', 'TO', 'HIM', 'FIRST', 'A', 'SWOLLEN', 'PHRASE', 'UNTIL', 'HE', 'HAD', 'BEGUN', 'TO', 'GROPE', 'IN', 'THE', 'DARKNESS', 'OF', 'HIS', 'OWN', 'STATE'] +1089-134686-0019-1752: hyp=['THE', 'SENTENCE', 'OF', 'SAINT', 'JAMES', 'WHICH', 'SAYS', 'THAT', 'HE', 'WHO', 'OFFENDS', 'AGAINST', 'ONE', 'COMMANDMENT', 'BECOMES', 'GUILTY', 'OF', 'ALL', 'HAD', 'SEEMED', 'TO', 'HIM', 'FIRST', 'A', 'SWOLLEN', 'PHRASE', 'UNTIL', 'HE', 'HAD', 'BEGUN', 'TO', 'GROPE', 'IN', 'THE', 'DARKNESS', 'OF', 'HIS', 'OWN', 'STATE'] +1089-134686-0020-1753: ref=['IF', 'A', 'MAN', 'HAD', 'STOLEN', 'A', 'POUND', 'IN', 'HIS', 'YOUTH', 'AND', 'HAD', 'USED', 'THAT', 'POUND', 'TO', 'AMASS', 'A', 'HUGE', 'FORTUNE', 'HOW', 'MUCH', 'WAS', 'HE', 'OBLIGED', 'TO', 'GIVE', 'BACK', 'THE', 'POUND', 'HE', 'HAD', 'STOLEN', 'ONLY', 'OR', 'THE', 'POUND', 'TOGETHER', 'WITH', 'THE', 'COMPOUND', 'INTEREST', 'ACCRUING', 'UPON', 'IT', 'OR', 'ALL', 'HIS', 'HUGE', 'FORTUNE'] +1089-134686-0020-1753: hyp=['IF', 'A', 'MAN', 'HAD', 'STOLEN', 'A', 'POUND', 'IN', 'HIS', 'YOUTH', 'AND', 'HAD', 'USED', 'THAT', 'POUND', 'TO', 'A', 'MASS', 'A', 'HUGE', 'FORTUNE', 'HOW', 'MUCH', 'WAS', 'HE', 'OBLIGED', 'TO', 'GIVE', 'BACK', 'THE', 'POUND', 'HE', 'HAD', 'STOLEN', 'ONLY', 'OR', 'THE', 'POUND', 'TOGETHER', 'WITH', 'THE', 'COMPOUND', 'INTEREST', 'ACCRUING', 'UPON', 'IT', 'OR', 'ALL', 'HIS', 'HUGE', 'FORTUNE'] +1089-134686-0021-1754: ref=['IF', 'A', 'LAYMAN', 'IN', 'GIVING', 'BAPTISM', 'POUR', 'THE', 'WATER', 'BEFORE', 'SAYING', 'THE', 'WORDS', 'IS', 'THE', 'CHILD', 'BAPTIZED'] +1089-134686-0021-1754: hyp=['IF', 'A', 'LAYMAN', 'IN', 'GIVING', 'BAPTISM', 'POUR', 'THE', 'WATER', 'BEFORE', 'SAYING', 'THE', 'WORDS', 'IS', 'THE', 'CHILD', 'BAPTIZED'] +1089-134686-0022-1755: ref=['HOW', 'COMES', 'IT', 'THAT', 'WHILE', 'THE', 'FIRST', 'BEATITUDE', 'PROMISES', 'THE', 'KINGDOM', 'OF', 'HEAVEN', 'TO', 'THE', 'POOR', 'OF', 'HEART', 'THE', 'SECOND', 'BEATITUDE', 'PROMISES', 'ALSO', 'TO', 'THE', 'MEEK', 'THAT', 'THEY', 'SHALL', 'POSSESS', 'THE', 'LAND'] +1089-134686-0022-1755: hyp=['HOW', 'COMES', 'IT', 'THAT', 'WHILE', 'THE', 'FIRST', 'BEATITUDE', 'PROMISES', 'THE', 'KINGDOM', 'OF', 'HEAVEN', 'TO', 'THE', 'POOR', 'OF', 'HEART', 'THE', 'SECOND', 'BEATITUDE', 'PROMISES', 'ALSO', 'TO', 'THE', 'MEEK', 'THAT', 'THEY', 'SHALL', 'POSSESS', 'THE', 'LAND'] +1089-134686-0023-1756: ref=['WHY', 'WAS', 'THE', 'SACRAMENT', 'OF', 'THE', 'EUCHARIST', 'INSTITUTED', 'UNDER', 'THE', 'TWO', 'SPECIES', 'OF', 'BREAD', 'AND', 'WINE', 'IF', 'JESUS', 'CHRIST', 'BE', 'PRESENT', 'BODY', 'AND', 'BLOOD', 'SOUL', 'AND', 'DIVINITY', 'IN', 'THE', 'BREAD', 'ALONE', 'AND', 'IN', 'THE', 'WINE', 'ALONE'] +1089-134686-0023-1756: hyp=['WHY', 'WAS', 'THE', 'SACRAMENT', 'OF', 'THE', 'EUCHARIST', 'INSTITUTED', 'UNDER', 'THE', 'TWO', 'SPECIES', 'OF', 'BREAD', 'AND', 'WINE', 'IF', 'JESUS', 'CHRIST', 'BE', 'PRESENT', 'BODY', 'AND', 'BLOOD', 'SOUL', 'AND', 'DIVINITY', 'IN', 'THE', 'BREAD', 'ALONE', 'AND', 'IN', 'THE', 'WINE', 'ALONE'] +1089-134686-0024-1757: ref=['IF', 'THE', 'WINE', 'CHANGE', 'INTO', 'VINEGAR', 'AND', 'THE', 'HOST', 'CRUMBLE', 'INTO', 'CORRUPTION', 'AFTER', 'THEY', 'HAVE', 'BEEN', 'CONSECRATED', 'IS', 'JESUS', 'CHRIST', 'STILL', 'PRESENT', 'UNDER', 'THEIR', 'SPECIES', 'AS', 'GOD', 'AND', 'AS', 'MAN'] +1089-134686-0024-1757: hyp=['IF', 'THE', 'WINE', 'CHANGE', 'INTO', 'VINEGAR', 'AND', 'THE', 'HOST', 'CRUMBLE', 'INTO', 'CORRUPTION', 'AFTER', 'THEY', 'HAVE', 'BEEN', 'CONSECRATED', 'IS', 'JESUS', 'CHRIST', 'STILL', 'PRESENT', 'UNDER', 'THEIR', 'SPECIES', 'AS', 'GOD', 'AND', 'AS', 'MAN'] +1089-134686-0025-1758: ref=['A', 'GENTLE', 'KICK', 'FROM', 'THE', 'TALL', 'BOY', 'IN', 'THE', 'BENCH', 'BEHIND', 'URGED', 'STEPHEN', 'TO', 'ASK', 'A', 'DIFFICULT', 'QUESTION'] +1089-134686-0025-1758: hyp=['A', 'GENTLE', 'KICK', 'FROM', 'THE', 'TALL', 'BOY', 'IN', 'THE', 'BENCH', 'BEHIND', 'URGED', 'STEPHEN', 'TO', 'ASK', 'A', 'DIFFICULT', 'QUESTION'] +1089-134686-0026-1759: ref=['THE', 'RECTOR', 'DID', 'NOT', 'ASK', 'FOR', 'A', 'CATECHISM', 'TO', 'HEAR', 'THE', 'LESSON', 'FROM'] +1089-134686-0026-1759: hyp=['THE', 'RECTOR', 'DID', 'NOT', 'ASK', 'FOR', 'A', 'CATECHISM', 'TO', 'HEAR', 'THE', 'LESSON', 'FROM'] +1089-134686-0027-1760: ref=['HE', 'CLASPED', 'HIS', 'HANDS', 'ON', 'THE', 'DESK', 'AND', 'SAID'] +1089-134686-0027-1760: hyp=['HE', 'CLASPED', 'HIS', 'HANDS', 'ON', 'THE', 'DESK', 'AND', 'SAID'] +1089-134686-0028-1761: ref=['THE', 'RETREAT', 'WILL', 'BEGIN', 'ON', 'WEDNESDAY', 'AFTERNOON', 'IN', 'HONOUR', 'OF', 'SAINT', 'FRANCIS', 'XAVIER', 'WHOSE', 'FEAST', 'DAY', 'IS', 'SATURDAY'] +1089-134686-0028-1761: hyp=['THE', 'RETREAT', 'WILL', 'BEGIN', 'ON', 'WEDNESDAY', 'AFTERNOON', 'IN', 'HONOR', 'OF', 'SAINT', 'FRANCIS', 'ZEVIOUR', 'WHOSE', 'FEAST', 'DAY', 'IS', 'SATURDAY'] +1089-134686-0029-1762: ref=['ON', 'FRIDAY', 'CONFESSION', 'WILL', 'BE', 'HEARD', 'ALL', 'THE', 'AFTERNOON', 'AFTER', 'BEADS'] +1089-134686-0029-1762: hyp=['ON', 'FRIDAY', 'CONFESSION', 'WILL', 'BE', 'HEARD', 'ALL', 'THE', 'AFTERNOON', 'AFTER', 'BEADS'] +1089-134686-0030-1763: ref=['BEWARE', 'OF', 'MAKING', 'THAT', 'MISTAKE'] +1089-134686-0030-1763: hyp=['BEWARE', 'OF', 'MAKING', 'THAT', 'MISTAKE'] +1089-134686-0031-1764: ref=["STEPHEN'S", 'HEART', 'BEGAN', 'SLOWLY', 'TO', 'FOLD', 'AND', 'FADE', 'WITH', 'FEAR', 'LIKE', 'A', 'WITHERING', 'FLOWER'] +1089-134686-0031-1764: hyp=["STEPHEN'S", 'HEART', 'BEGAN', 'SLOWLY', 'TO', 'FOLD', 'AND', 'FADE', 'WITH', 'FEAR', 'LIKE', 'A', 'WITHERING', 'FLOWER'] +1089-134686-0032-1765: ref=['HE', 'IS', 'CALLED', 'AS', 'YOU', 'KNOW', 'THE', 'APOSTLE', 'OF', 'THE', 'INDIES'] +1089-134686-0032-1765: hyp=['HE', 'HAS', 'CALLED', 'AS', 'YOU', 'KNOW', 'THE', 'APOSTLE', 'OF', 'THE', 'INDIES'] +1089-134686-0033-1766: ref=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'XAVIER'] +1089-134686-0033-1766: hyp=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'ZAVIER'] +1089-134686-0034-1767: ref=['THE', 'RECTOR', 'PAUSED', 'AND', 'THEN', 'SHAKING', 'HIS', 'CLASPED', 'HANDS', 'BEFORE', 'HIM', 'WENT', 'ON'] +1089-134686-0034-1767: hyp=['THE', 'RECTOR', 'PAUSED', 'AND', 'THEN', 'SHAKING', 'HIS', 'CLASPED', 'HANDS', 'BEFORE', 'HIM', 'WENT', 'ON'] +1089-134686-0035-1768: ref=['HE', 'HAD', 'THE', 'FAITH', 'IN', 'HIM', 'THAT', 'MOVES', 'MOUNTAINS'] +1089-134686-0035-1768: hyp=['HE', 'HAD', 'THE', 'FAITH', 'IN', 'HIM', 'THAT', 'MOVES', 'MOUNTAINS'] +1089-134686-0036-1769: ref=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'XAVIER'] +1089-134686-0036-1769: hyp=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'ZEVIER'] +1089-134686-0037-1770: ref=['IN', 'THE', 'SILENCE', 'THEIR', 'DARK', 'FIRE', 'KINDLED', 'THE', 'DUSK', 'INTO', 'A', 'TAWNY', 'GLOW'] +1089-134686-0037-1770: hyp=['IN', 'THE', 'SILENCE', 'THEIR', 'DARK', 'FIRE', 'KINDLED', 'THE', 'DUSK', 'INTO', 'A', 'TAWNY', 'GLOW'] +1089-134691-0000-1707: ref=['HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0000-1707: hyp=['HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0001-1708: ref=['FOR', 'A', 'FULL', 'HOUR', 'HE', 'HAD', 'PACED', 'UP', 'AND', 'DOWN', 'WAITING', 'BUT', 'HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0001-1708: hyp=['FOR', 'A', 'FULL', 'HOUR', 'HE', 'HAD', 'PACED', 'UP', 'AND', 'DOWN', 'WAITING', 'BUT', 'HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0002-1709: ref=['HE', 'SET', 'OFF', 'ABRUPTLY', 'FOR', 'THE', 'BULL', 'WALKING', 'RAPIDLY', 'LEST', 'HIS', "FATHER'S", 'SHRILL', 'WHISTLE', 'MIGHT', 'CALL', 'HIM', 'BACK', 'AND', 'IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HAD', 'ROUNDED', 'THE', 'CURVE', 'AT', 'THE', 'POLICE', 'BARRACK', 'AND', 'WAS', 'SAFE'] +1089-134691-0002-1709: hyp=['HE', 'SET', 'OFF', 'ABRUPTLY', 'FOR', 'THE', 'BULL', 'WALKING', 'RAPIDLY', 'LEST', 'HIS', "FATHER'S", 'SHRILL', 'WHISTLE', 'MIGHT', 'CALL', 'HIM', 'BACK', 'AND', 'IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HAD', 'ROUNDED', 'THE', 'CURVE', 'AT', 'THE', 'POLICE', 'BARRACK', 'AND', 'WAS', 'SAFE'] +1089-134691-0003-1710: ref=['THE', 'UNIVERSITY'] +1089-134691-0003-1710: hyp=['THE', 'UNIVERSITY'] +1089-134691-0004-1711: ref=['PRIDE', 'AFTER', 'SATISFACTION', 'UPLIFTED', 'HIM', 'LIKE', 'LONG', 'SLOW', 'WAVES'] +1089-134691-0004-1711: hyp=['PRIDE', 'AFTER', 'SATISFACTION', 'UPLIFTED', 'HIM', 'LIKE', 'LONG', 'SLOW', 'WAVES'] +1089-134691-0005-1712: ref=['WHOSE', 'FEET', 'ARE', 'AS', 'THE', 'FEET', 'OF', 'HARTS', 'AND', 'UNDERNEATH', 'THE', 'EVERLASTING', 'ARMS'] +1089-134691-0005-1712: hyp=['WHOSE', 'FEET', 'ARE', 'AS', 'THE', 'FEET', 'OF', 'HEARTS', 'AND', 'UNDERNEATH', 'THE', 'EVERLASTING', 'ARMS'] +1089-134691-0006-1713: ref=['THE', 'PRIDE', 'OF', 'THAT', 'DIM', 'IMAGE', 'BROUGHT', 'BACK', 'TO', 'HIS', 'MIND', 'THE', 'DIGNITY', 'OF', 'THE', 'OFFICE', 'HE', 'HAD', 'REFUSED'] +1089-134691-0006-1713: hyp=['THE', 'PRIDE', 'OF', 'THAT', 'DIM', 'IMAGE', 'BROUGHT', 'BACK', 'TO', 'HIS', 'MIND', 'THE', 'DIGNITY', 'OF', 'THE', 'OFFICE', 'HE', 'HAD', 'REFUSED'] +1089-134691-0007-1714: ref=['SOON', 'THE', 'WHOLE', 'BRIDGE', 'WAS', 'TREMBLING', 'AND', 'RESOUNDING'] +1089-134691-0007-1714: hyp=['SOON', 'THE', 'WHOLE', 'BRIDGE', 'WAS', 'TREMBLING', 'AND', 'RESOUNDING'] +1089-134691-0008-1715: ref=['THE', 'UNCOUTH', 'FACES', 'PASSED', 'HIM', 'TWO', 'BY', 'TWO', 'STAINED', 'YELLOW', 'OR', 'RED', 'OR', 'LIVID', 'BY', 'THE', 'SEA', 'AND', 'AS', 'HE', 'STROVE', 'TO', 'LOOK', 'AT', 'THEM', 'WITH', 'EASE', 'AND', 'INDIFFERENCE', 'A', 'FAINT', 'STAIN', 'OF', 'PERSONAL', 'SHAME', 'AND', 'COMMISERATION', 'ROSE', 'TO', 'HIS', 'OWN', 'FACE'] +1089-134691-0008-1715: hyp=['THE', 'UNCOUTH', 'FACES', 'PASSED', 'HIM', 'TWO', 'BY', 'TWO', 'STAINED', 'YELLOW', 'OR', 'RED', 'OR', 'LIVID', 'BY', 'THE', 'SEA', 'AND', 'AS', 'HE', 'STROVE', 'TO', 'LOOK', 'AT', 'THEM', 'WITH', 'EASE', 'AND', 'INDIFFERENCE', 'A', 'FAINT', 'STAIN', 'OF', 'PERSONAL', 'SHAME', 'AND', 'COMMISERATION', 'ROSE', 'TO', 'HIS', 'OWN', 'FACE'] +1089-134691-0009-1716: ref=['ANGRY', 'WITH', 'HIMSELF', 'HE', 'TRIED', 'TO', 'HIDE', 'HIS', 'FACE', 'FROM', 'THEIR', 'EYES', 'BY', 'GAZING', 'DOWN', 'SIDEWAYS', 'INTO', 'THE', 'SHALLOW', 'SWIRLING', 'WATER', 'UNDER', 'THE', 'BRIDGE', 'BUT', 'HE', 'STILL', 'SAW', 'A', 'REFLECTION', 'THEREIN', 'OF', 'THEIR', 'TOP', 'HEAVY', 'SILK', 'HATS', 'AND', 'HUMBLE', 'TAPE', 'LIKE', 'COLLARS', 'AND', 'LOOSELY', 'HANGING', 'CLERICAL', 'CLOTHES', 'BROTHER', 'HICKEY'] +1089-134691-0009-1716: hyp=['ANGRY', 'WITH', 'HIMSELF', 'HE', 'TRIED', 'TO', 'HIDE', 'HIS', 'FACE', 'FROM', 'THEIR', 'EYES', 'BY', 'GAZING', 'DOWN', 'SIDEWAYS', 'INTO', 'THE', 'SHALLOW', 'SWIRLING', 'WATER', 'UNDER', 'THE', 'BRIDGE', 'BUT', 'HE', 'STILL', 'SAW', 'A', 'REFLECTION', 'THEREIN', 'OF', 'THEIR', 'TOP', 'HEAVY', 'SILK', 'HATS', 'AND', 'HUMBLE', 'TAPE', 'LIKE', 'COLLARS', 'AND', 'LOOSELY', 'HANGING', 'CLERICAL', 'CLOTHES', 'BROTHER', 'HICKEY'] +1089-134691-0010-1717: ref=['BROTHER', 'MAC', 'ARDLE', 'BROTHER', 'KEOGH'] +1089-134691-0010-1717: hyp=['BROTHER', 'MICARTLE', 'BROTHER', 'KIEV'] +1089-134691-0011-1718: ref=['THEIR', 'PIETY', 'WOULD', 'BE', 'LIKE', 'THEIR', 'NAMES', 'LIKE', 'THEIR', 'FACES', 'LIKE', 'THEIR', 'CLOTHES', 'AND', 'IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'THEIR', 'HUMBLE', 'AND', 'CONTRITE', 'HEARTS', 'IT', 'MIGHT', 'BE', 'PAID', 'A', 'FAR', 'RICHER', 'TRIBUTE', 'OF', 'DEVOTION', 'THAN', 'HIS', 'HAD', 'EVER', 'BEEN', 'A', 'GIFT', 'TENFOLD', 'MORE', 'ACCEPTABLE', 'THAN', 'HIS', 'ELABORATE', 'ADORATION'] +1089-134691-0011-1718: hyp=['THEIR', 'PIETY', 'WOULD', 'BE', 'LIKE', 'THEIR', 'NAMES', 'LIKE', 'THEIR', 'FACES', 'LIKE', 'THEIR', 'CLOTHES', 'AND', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'THEIR', 'HUMBLE', 'AND', 'CONTRITE', 'HEARTS', 'IT', 'MIGHT', 'BE', 'PAID', 'A', 'FAR', 'RICHER', 'TRIBUTE', 'OF', 'DEVOTION', 'THAN', 'HIS', 'HAD', 'EVER', 'BEEN', 'A', 'GIFT', 'TENFOLD', 'MORE', 'ACCEPTABLE', 'THAN', 'HIS', 'ELABORATE', 'ADORATION'] +1089-134691-0012-1719: ref=['IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'MOVE', 'HIMSELF', 'TO', 'BE', 'GENEROUS', 'TOWARDS', 'THEM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'IF', 'HE', 'EVER', 'CAME', 'TO', 'THEIR', 'GATES', 'STRIPPED', 'OF', 'HIS', 'PRIDE', 'BEATEN', 'AND', 'IN', "BEGGAR'S", 'WEEDS', 'THAT', 'THEY', 'WOULD', 'BE', 'GENEROUS', 'TOWARDS', 'HIM', 'LOVING', 'HIM', 'AS', 'THEMSELVES'] +1089-134691-0012-1719: hyp=['IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'MOVE', 'HIMSELF', 'TO', 'BE', 'GENEROUS', 'TOWARDS', 'THEM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'IF', 'HE', 'EVER', 'CAME', 'TO', 'THEIR', 'GATES', 'STRIPPED', 'OF', 'HIS', 'PRIDE', 'BEATEN', 'AND', 'IN', "BEGGAR'S", 'WEEDS', 'THAT', 'THEY', 'WOULD', 'BE', 'GENEROUS', 'TOWARDS', 'HIM', 'LOVING', 'HIM', 'AS', 'THEMSELVES'] +1089-134691-0013-1720: ref=['IDLE', 'AND', 'EMBITTERING', 'FINALLY', 'TO', 'ARGUE', 'AGAINST', 'HIS', 'OWN', 'DISPASSIONATE', 'CERTITUDE', 'THAT', 'THE', 'COMMANDMENT', 'OF', 'LOVE', 'BADE', 'US', 'NOT', 'TO', 'LOVE', 'OUR', 'NEIGHBOUR', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'AMOUNT', 'AND', 'INTENSITY', 'OF', 'LOVE', 'BUT', 'TO', 'LOVE', 'HIM', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'KIND', 'OF', 'LOVE'] +1089-134691-0013-1720: hyp=['IDLE', 'AND', 'EMBITTERING', 'FINELY', 'TO', 'ARGUE', 'AGAINST', 'HIS', 'OWN', 'DISPASSIONATE', 'CERTITUDE', 'THAT', 'THE', 'COMMANDMENT', 'OF', 'LOVE', 'BADE', 'US', 'NOT', 'TO', 'LOVE', 'OUR', 'NEIGHBOR', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'AMOUNT', 'AND', 'INTENSITY', 'OF', 'LOVE', 'BUT', 'TO', 'LOVE', 'HIM', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'KIND', 'OF', 'LOVE'] +1089-134691-0014-1721: ref=['THE', 'PHRASE', 'AND', 'THE', 'DAY', 'AND', 'THE', 'SCENE', 'HARMONIZED', 'IN', 'A', 'CHORD'] +1089-134691-0014-1721: hyp=['THE', 'PHRASE', 'AND', 'THE', 'DAY', 'AND', 'THE', 'SCENE', 'HARMONIZED', 'IN', 'ACCORD'] +1089-134691-0015-1722: ref=['WORDS', 'WAS', 'IT', 'THEIR', 'COLOURS'] +1089-134691-0015-1722: hyp=['WORDS', 'WAS', 'IT', 'THEIR', 'COLORS'] +1089-134691-0016-1723: ref=['THEY', 'WERE', 'VOYAGING', 'ACROSS', 'THE', 'DESERTS', 'OF', 'THE', 'SKY', 'A', 'HOST', 'OF', 'NOMADS', 'ON', 'THE', 'MARCH', 'VOYAGING', 'HIGH', 'OVER', 'IRELAND', 'WESTWARD', 'BOUND'] +1089-134691-0016-1723: hyp=['THEY', 'WERE', 'VOYAGING', 'ACROSS', 'THE', 'DESERTS', 'OF', 'THE', 'SKY', 'A', 'HOST', 'OF', 'NOMADS', 'ON', 'THE', 'MARCH', 'VOYAGING', 'HIGH', 'OVER', 'IRELAND', 'WESTWARD', 'BOUND'] +1089-134691-0017-1724: ref=['THE', 'EUROPE', 'THEY', 'HAD', 'COME', 'FROM', 'LAY', 'OUT', 'THERE', 'BEYOND', 'THE', 'IRISH', 'SEA', 'EUROPE', 'OF', 'STRANGE', 'TONGUES', 'AND', 'VALLEYED', 'AND', 'WOODBEGIRT', 'AND', 'CITADELLED', 'AND', 'OF', 'ENTRENCHED', 'AND', 'MARSHALLED', 'RACES'] +1089-134691-0017-1724: hyp=['THE', 'EUROPE', 'THEY', 'HAD', 'COME', 'FROM', 'LAY', 'OUT', 'THERE', 'BEYOND', 'THE', 'IRISH', 'SEA', 'EUROPE', 'OF', 'STRANGE', 'TONGUES', 'AND', 'VALLEYED', 'AND', 'WOOD', 'BEGIRT', 'AND', 'CITADELED', 'AND', 'OF', 'INTRENCHED', 'AND', 'MARSHALED', 'RACES'] +1089-134691-0018-1725: ref=['AGAIN', 'AGAIN'] +1089-134691-0018-1725: hyp=['AGAIN', 'AGAIN'] +1089-134691-0019-1726: ref=['A', 'VOICE', 'FROM', 'BEYOND', 'THE', 'WORLD', 'WAS', 'CALLING'] +1089-134691-0019-1726: hyp=['A', 'VOICE', 'FROM', 'BEYOND', 'THE', 'WORLD', 'WAS', 'CALLING'] +1089-134691-0020-1727: ref=['HELLO', 'STEPHANOS', 'HERE', 'COMES', 'THE', 'DEDALUS'] +1089-134691-0020-1727: hyp=['HALLO', 'STEPHANOS', 'HERE', 'COMES', 'THE', 'DAEDALUS'] +1089-134691-0021-1728: ref=['THEIR', 'DIVING', 'STONE', 'POISED', 'ON', 'ITS', 'RUDE', 'SUPPORTS', 'AND', 'ROCKING', 'UNDER', 'THEIR', 'PLUNGES', 'AND', 'THE', 'ROUGH', 'HEWN', 'STONES', 'OF', 'THE', 'SLOPING', 'BREAKWATER', 'OVER', 'WHICH', 'THEY', 'SCRAMBLED', 'IN', 'THEIR', 'HORSEPLAY', 'GLEAMED', 'WITH', 'COLD', 'WET', 'LUSTRE'] +1089-134691-0021-1728: hyp=['THEIR', 'DIVING', 'STONE', 'POISED', 'ON', 'ITS', 'RUDE', 'SUPPORTS', 'AND', 'ROCKING', 'UNDER', 'THEIR', 'PLUNGES', 'AND', 'THE', 'ROUGH', 'HEWN', 'STONES', 'OF', 'THE', 'SLOPING', 'BREAKWATER', 'OVER', 'WHICH', 'THEY', 'SCRAMBLED', 'IN', 'THEIR', 'HORSE', 'PLAY', 'GLEAMED', 'WITH', 'COLD', 'WET', 'LUSTRE'] +1089-134691-0022-1729: ref=['HE', 'STOOD', 'STILL', 'IN', 'DEFERENCE', 'TO', 'THEIR', 'CALLS', 'AND', 'PARRIED', 'THEIR', 'BANTER', 'WITH', 'EASY', 'WORDS'] +1089-134691-0022-1729: hyp=['HE', 'STOOD', 'STILL', 'IN', 'DEFERENCE', 'TO', 'THEIR', 'CALLS', 'AND', 'PARRIED', 'THEIR', 'BANTER', 'WITH', 'EASY', 'WORDS'] +1089-134691-0023-1730: ref=['IT', 'WAS', 'A', 'PAIN', 'TO', 'SEE', 'THEM', 'AND', 'A', 'SWORD', 'LIKE', 'PAIN', 'TO', 'SEE', 'THE', 'SIGNS', 'OF', 'ADOLESCENCE', 'THAT', 'MADE', 'REPELLENT', 'THEIR', 'PITIABLE', 'NAKEDNESS'] +1089-134691-0023-1730: hyp=['IT', 'WAS', 'A', 'PAIN', 'TO', 'SEE', 'THEM', 'AND', 'A', 'SWORD', 'LIKE', 'PAIN', 'TO', 'SEE', 'THE', 'SIGNS', 'OF', 'ADOLESCENCE', 'THAT', 'MADE', 'REPELLENT', 'THEIR', 'PITIABLE', 'NAKEDNESS'] +1089-134691-0024-1731: ref=['STEPHANOS', 'DEDALOS'] +1089-134691-0024-1731: hyp=['STEPHANOS', 'DELOS'] +1089-134691-0025-1732: ref=['A', 'MOMENT', 'BEFORE', 'THE', 'GHOST', 'OF', 'THE', 'ANCIENT', 'KINGDOM', 'OF', 'THE', 'DANES', 'HAD', 'LOOKED', 'FORTH', 'THROUGH', 'THE', 'VESTURE', 'OF', 'THE', 'HAZEWRAPPED', 'CITY'] +1089-134691-0025-1732: hyp=['A', 'MOMENT', 'BEFORE', 'THE', 'GHOST', 'OF', 'THE', 'ANCIENT', 'KINGDOM', 'OF', 'THE', 'DANES', 'HAD', 'LOOKED', 'FORTH', 'THROUGH', 'THE', 'VESTURE', 'OF', 'THE', 'HAYES', 'WRAPPED', 'CITY'] +1188-133604-0000-1771: ref=['YOU', 'WILL', 'FIND', 'ME', 'CONTINUALLY', 'SPEAKING', 'OF', 'FOUR', 'MEN', 'TITIAN', 'HOLBEIN', 'TURNER', 'AND', 'TINTORET', 'IN', 'ALMOST', 'THE', 'SAME', 'TERMS'] +1188-133604-0000-1771: hyp=['YOU', 'WILL', 'FIND', 'ME', 'CONTINUALLY', 'SPEAKING', 'OF', 'FOUR', 'MEN', 'TITIAN', 'HOLBINE', 'TURNER', 'AND', 'TINTARETTE', 'IN', 'ALMOST', 'THE', 'SAME', 'TERMS'] +1188-133604-0001-1772: ref=['THEY', 'UNITE', 'EVERY', 'QUALITY', 'AND', 'SOMETIMES', 'YOU', 'WILL', 'FIND', 'ME', 'REFERRING', 'TO', 'THEM', 'AS', 'COLORISTS', 'SOMETIMES', 'AS', 'CHIAROSCURISTS'] +1188-133604-0001-1772: hyp=['THEY', 'UNITE', 'EVERY', 'QUALITY', 'AND', 'SOMETIMES', 'YOU', 'WILL', 'FIND', 'ME', 'REFERRING', 'TO', 'THEM', 'AS', 'COLORISTS', 'SOMETIMES', 'AS', 'KIERRASCURISTS'] +1188-133604-0002-1773: ref=['BY', 'BEING', 'STUDIOUS', 'OF', 'COLOR', 'THEY', 'ARE', 'STUDIOUS', 'OF', 'DIVISION', 'AND', 'WHILE', 'THE', 'CHIAROSCURIST', 'DEVOTES', 'HIMSELF', 'TO', 'THE', 'REPRESENTATION', 'OF', 'DEGREES', 'OF', 'FORCE', 'IN', 'ONE', 'THING', 'UNSEPARATED', 'LIGHT', 'THE', 'COLORISTS', 'HAVE', 'FOR', 'THEIR', 'FUNCTION', 'THE', 'ATTAINMENT', 'OF', 'BEAUTY', 'BY', 'ARRANGEMENT', 'OF', 'THE', 'DIVISIONS', 'OF', 'LIGHT'] +1188-133604-0002-1773: hyp=['BY', 'BEING', 'STUDIOUS', 'OF', 'COLOUR', 'THEY', 'ARE', 'STUDIOUS', 'OF', 'DIVISION', 'AND', 'WHILE', 'THE', 'CUIRASCURISTS', 'DEVOTES', 'HIMSELF', 'TO', 'THE', 'REPRESENTATION', 'OF', 'DEGREES', 'OF', 'FORCE', 'IN', 'ONE', 'THING', 'UNSEPARATED', 'LIGHT', 'THE', 'COLORISTS', 'HAVE', 'FOR', 'THEIR', 'FUNCTION', 'THE', 'ATTAINMENT', 'OF', 'BEAUTY', 'BY', 'ARRANGEMENT', 'OF', 'THE', 'DIVISIONS', 'OF', 'LIGHT'] +1188-133604-0003-1774: ref=['MY', 'FIRST', 'AND', 'PRINCIPAL', 'REASON', 'WAS', 'THAT', 'THEY', 'ENFORCED', 'BEYOND', 'ALL', 'RESISTANCE', 'ON', 'ANY', 'STUDENT', 'WHO', 'MIGHT', 'ATTEMPT', 'TO', 'COPY', 'THEM', 'THIS', 'METHOD', 'OF', 'LAYING', 'PORTIONS', 'OF', 'DISTINCT', 'HUE', 'SIDE', 'BY', 'SIDE'] +1188-133604-0003-1774: hyp=['MY', 'FIRST', 'AND', 'PRINCIPAL', 'REASON', 'WAS', 'THAT', 'THEY', 'ENFORCED', 'BEYOND', 'ALL', 'RESISTANCE', 'ON', 'ANY', 'STUDENT', 'WHO', 'MIGHT', 'ATTEMPT', 'TO', 'COPY', 'THEM', 'THIS', 'METHOD', 'OF', 'LAYING', 'PORTIONS', 'OF', 'DISTINCT', 'HUE', 'SIDE', 'BY', 'SIDE'] +1188-133604-0004-1775: ref=['SOME', 'OF', 'THE', 'TOUCHES', 'INDEED', 'WHEN', 'THE', 'TINT', 'HAS', 'BEEN', 'MIXED', 'WITH', 'MUCH', 'WATER', 'HAVE', 'BEEN', 'LAID', 'IN', 'LITTLE', 'DROPS', 'OR', 'PONDS', 'SO', 'THAT', 'THE', 'PIGMENT', 'MIGHT', 'CRYSTALLIZE', 'HARD', 'AT', 'THE', 'EDGE'] +1188-133604-0004-1775: hyp=['SOME', 'OF', 'THE', 'TOUCHES', 'INDEED', 'WHEN', 'THE', 'TINT', 'HAS', 'BEEN', 'MIXED', 'WITH', 'MUCH', 'WATER', 'HAVE', 'BEEN', 'LAID', 'IN', 'LITTLE', 'DROPS', 'OR', 'PONDS', 'SO', 'THAT', 'THE', 'PIGMENT', 'MIGHT', 'CRYSTALLIZE', 'HARD', 'AT', 'THE', 'EDGE'] +1188-133604-0005-1776: ref=['IT', 'IS', 'THE', 'HEAD', 'OF', 'A', 'PARROT', 'WITH', 'A', 'LITTLE', 'FLOWER', 'IN', 'HIS', 'BEAK', 'FROM', 'A', 'PICTURE', 'OF', "CARPACCIO'S", 'ONE', 'OF', 'HIS', 'SERIES', 'OF', 'THE', 'LIFE', 'OF', 'SAINT', 'GEORGE'] +1188-133604-0005-1776: hyp=['IT', 'IS', 'THE', 'HEAD', 'OF', 'A', 'PARROT', 'WITH', 'A', 'LITTLE', 'FLOWER', 'IN', 'HIS', 'BEAK', 'FROM', 'A', 'PICTURE', 'OF', 'CARPATIUS', 'ONE', 'OF', 'HIS', 'SERIES', 'OF', 'THE', 'LIFE', 'OF', 'SAINT', 'GEORGE'] +1188-133604-0006-1777: ref=['THEN', 'HE', 'COMES', 'TO', 'THE', 'BEAK', 'OF', 'IT'] +1188-133604-0006-1777: hyp=['THEN', 'HE', 'COMES', 'TO', 'THE', 'BEAK', 'OF', 'IT'] +1188-133604-0007-1778: ref=['THE', 'BROWN', 'GROUND', 'BENEATH', 'IS', 'LEFT', 'FOR', 'THE', 'MOST', 'PART', 'ONE', 'TOUCH', 'OF', 'BLACK', 'IS', 'PUT', 'FOR', 'THE', 'HOLLOW', 'TWO', 'DELICATE', 'LINES', 'OF', 'DARK', 'GRAY', 'DEFINE', 'THE', 'OUTER', 'CURVE', 'AND', 'ONE', 'LITTLE', 'QUIVERING', 'TOUCH', 'OF', 'WHITE', 'DRAWS', 'THE', 'INNER', 'EDGE', 'OF', 'THE', 'MANDIBLE'] +1188-133604-0007-1778: hyp=['THE', 'BROWN', 'GROUND', 'BENEATH', 'IS', 'LEFT', 'FOR', 'THE', 'MOST', 'PART', 'ONE', 'TOUCH', 'OF', 'BLACK', 'IS', 'PUT', 'FOR', 'THE', 'HOLLOW', 'TOO', 'DELICATE', 'LINES', 'OF', 'DARK', 'GREY', 'DEFINE', 'THE', 'OUTER', 'CURVE', 'AND', 'ONE', 'LITTLE', 'QUIVERING', 'TOUCH', 'OF', 'WHITE', 'DRAWS', 'THE', 'INNER', 'EDGE', 'OF', 'THE', 'MANDIBLE'] +1188-133604-0008-1779: ref=['FOR', 'BELIEVE', 'ME', 'THE', 'FINAL', 'PHILOSOPHY', 'OF', 'ART', 'CAN', 'ONLY', 'RATIFY', 'THEIR', 'OPINION', 'THAT', 'THE', 'BEAUTY', 'OF', 'A', 'COCK', 'ROBIN', 'IS', 'TO', 'BE', 'RED', 'AND', 'OF', 'A', 'GRASS', 'PLOT', 'TO', 'BE', 'GREEN', 'AND', 'THE', 'BEST', 'SKILL', 'OF', 'ART', 'IS', 'IN', 'INSTANTLY', 'SEIZING', 'ON', 'THE', 'MANIFOLD', 'DELICIOUSNESS', 'OF', 'LIGHT', 'WHICH', 'YOU', 'CAN', 'ONLY', 'SEIZE', 'BY', 'PRECISION', 'OF', 'INSTANTANEOUS', 'TOUCH'] +1188-133604-0008-1779: hyp=['FOR', 'BELIEVE', 'ME', 'THE', 'FINAL', 'PHILOSOPHY', 'OF', 'ART', 'CAN', 'ONLY', 'RATIFY', 'THEIR', 'OPINION', 'THAT', 'THE', 'BEAUTY', 'OF', 'A', 'COCK', 'ROBIN', 'IS', 'TO', 'BE', 'READ', 'AND', 'OF', 'A', 'GRASS', 'PLOT', 'TO', 'BE', 'GREEN', 'AND', 'THE', 'BEST', 'SKILL', 'OF', 'ART', 'IS', 'AN', 'INSTANTLY', 'SEIZING', 'ON', 'THE', 'MANIFOLD', 'DELICIOUSNESS', 'OF', 'LIGHT', 'WHICH', 'YOU', 'CAN', 'ONLY', 'SEIZE', 'BY', 'PRECISION', 'OF', 'INSTANTANEOUS', 'TOUCH'] +1188-133604-0009-1780: ref=['NOW', 'YOU', 'WILL', 'SEE', 'IN', 'THESE', 'STUDIES', 'THAT', 'THE', 'MOMENT', 'THE', 'WHITE', 'IS', 'INCLOSED', 'PROPERLY', 'AND', 'HARMONIZED', 'WITH', 'THE', 'OTHER', 'HUES', 'IT', 'BECOMES', 'SOMEHOW', 'MORE', 'PRECIOUS', 'AND', 'PEARLY', 'THAN', 'THE', 'WHITE', 'PAPER', 'AND', 'THAT', 'I', 'AM', 'NOT', 'AFRAID', 'TO', 'LEAVE', 'A', 'WHOLE', 'FIELD', 'OF', 'UNTREATED', 'WHITE', 'PAPER', 'ALL', 'ROUND', 'IT', 'BEING', 'SURE', 'THAT', 'EVEN', 'THE', 'LITTLE', 'DIAMONDS', 'IN', 'THE', 'ROUND', 'WINDOW', 'WILL', 'TELL', 'AS', 'JEWELS', 'IF', 'THEY', 'ARE', 'GRADATED', 'JUSTLY'] +1188-133604-0009-1780: hyp=['NOW', 'YOU', 'WILL', 'SEE', 'IN', 'THESE', 'STUDIES', 'AT', 'THE', 'MOMENT', 'THE', 'WHITE', 'IS', 'ENCLOSED', 'PROPERLY', 'AND', 'HARMONIZE', 'WITH', 'THE', 'OTHER', 'HUES', 'IT', 'BECOMES', 'SOMEHOW', 'MORE', 'PRECIOUS', 'AND', 'PEARLY', 'THAN', 'THE', 'WHITE', 'PAPER', 'AND', 'THAT', 'I', 'AM', 'NOT', 'AFRAID', 'TO', 'LEAVE', 'A', 'WHOLE', 'FIELD', 'OF', 'UNTREATED', 'WHITE', 'PAPER', 'ALL', 'ROUND', 'IT', 'BEING', 'SURE', 'THAT', 'EVEN', 'THE', 'LITTLE', 'DIAMONDS', 'IN', 'THE', 'ROUND', 'WINDOW', 'WILL', 'TELL', 'AS', 'JEWELS', 'IF', 'THEY', 'ARE', 'GRADATED', 'JUSTLY'] +1188-133604-0010-1781: ref=['BUT', 'IN', 'THIS', 'VIGNETTE', 'COPIED', 'FROM', 'TURNER', 'YOU', 'HAVE', 'THE', 'TWO', 'PRINCIPLES', 'BROUGHT', 'OUT', 'PERFECTLY'] +1188-133604-0010-1781: hyp=['BUT', 'IN', 'THIS', 'VINEYARD', 'COPIED', 'FROM', 'TURNER', 'YOU', 'HAVE', 'THE', 'TWO', 'PRINCIPLES', 'BROUGHT', 'OUT', 'PERFECTLY'] +1188-133604-0011-1782: ref=['THEY', 'ARE', 'BEYOND', 'ALL', 'OTHER', 'WORKS', 'THAT', 'I', 'KNOW', 'EXISTING', 'DEPENDENT', 'FOR', 'THEIR', 'EFFECT', 'ON', 'LOW', 'SUBDUED', 'TONES', 'THEIR', 'FAVORITE', 'CHOICE', 'IN', 'TIME', 'OF', 'DAY', 'BEING', 'EITHER', 'DAWN', 'OR', 'TWILIGHT', 'AND', 'EVEN', 'THEIR', 'BRIGHTEST', 'SUNSETS', 'PRODUCED', 'CHIEFLY', 'OUT', 'OF', 'GRAY', 'PAPER'] +1188-133604-0011-1782: hyp=['THEY', 'ARE', 'BEYOND', 'ALL', 'OTHER', 'WORKS', 'THAN', 'I', 'KNOW', 'EXISTING', 'DEPENDENT', 'FOR', 'THEIR', 'EFFECT', 'ON', 'LOW', 'SUBDUED', 'TONES', 'THEIR', 'FAVORITE', 'CHOICE', 'IN', 'TIME', 'OF', 'DAY', 'BEING', 'EITHER', 'DAWN', 'OR', 'TWILIGHT', 'AND', 'EVEN', 'THEIR', 'BRIGHTEST', 'SUNSETS', 'PRODUCED', 'CHIEFLY', 'OUT', 'OF', 'GRAY', 'PAPER'] +1188-133604-0012-1783: ref=['IT', 'MAY', 'BE', 'THAT', 'A', 'GREAT', 'COLORIST', 'WILL', 'USE', 'HIS', 'UTMOST', 'FORCE', 'OF', 'COLOR', 'AS', 'A', 'SINGER', 'HIS', 'FULL', 'POWER', 'OF', 'VOICE', 'BUT', 'LOUD', 'OR', 'LOW', 'THE', 'VIRTUE', 'IS', 'IN', 'BOTH', 'CASES', 'ALWAYS', 'IN', 'REFINEMENT', 'NEVER', 'IN', 'LOUDNESS'] +1188-133604-0012-1783: hyp=['IT', 'MAY', 'BE', 'THAT', 'A', 'GREAT', 'COLORLESS', 'WILL', 'USE', 'HIS', 'UTMOST', 'FORCE', 'OF', 'COLOR', 'AS', 'A', 'SINGER', 'HIS', 'FULL', 'POWER', 'OF', 'VOICE', 'BUT', 'LOUD', 'OR', 'LOW', 'THE', 'VIRTUE', 'IS', 'IN', 'BOTH', 'CASES', 'ALWAYS', 'IN', 'REFINEMENT', 'NEVER', 'IN', 'LOUDNESS'] +1188-133604-0013-1784: ref=['IT', 'MUST', 'REMEMBER', 'BE', 'ONE', 'OR', 'THE', 'OTHER'] +1188-133604-0013-1784: hyp=['IT', 'MUST', 'REMEMBER', 'BE', 'ONE', 'OR', 'THE', 'OTHER'] +1188-133604-0014-1785: ref=['DO', 'NOT', 'THEREFORE', 'THINK', 'THAT', 'THE', 'GOTHIC', 'SCHOOL', 'IS', 'AN', 'EASY', 'ONE'] +1188-133604-0014-1785: hyp=['DO', 'NOT', 'THEREFORE', 'THINK', 'THAT', 'THE', 'GOTHIC', 'SCHOOLS', 'AN', 'EASY', 'ONE'] +1188-133604-0015-1786: ref=['THE', 'LAW', 'OF', 'THAT', 'SCHOOL', 'IS', 'THAT', 'EVERYTHING', 'SHALL', 'BE', 'SEEN', 'CLEARLY', 'OR', 'AT', 'LEAST', 'ONLY', 'IN', 'SUCH', 'MIST', 'OR', 'FAINTNESS', 'AS', 'SHALL', 'BE', 'DELIGHTFUL', 'AND', 'I', 'HAVE', 'NO', 'DOUBT', 'THAT', 'THE', 'BEST', 'INTRODUCTION', 'TO', 'IT', 'WOULD', 'BE', 'THE', 'ELEMENTARY', 'PRACTICE', 'OF', 'PAINTING', 'EVERY', 'STUDY', 'ON', 'A', 'GOLDEN', 'GROUND'] +1188-133604-0015-1786: hyp=['THE', 'LAW', 'OF', 'THAT', 'SCHOOL', 'IS', 'THAT', 'EVERYTHING', 'SHALL', 'BE', 'SEEN', 'CLEARLY', 'OR', 'AT', 'LEAST', 'ONLY', 'IN', 'SUCH', 'MIST', 'OR', 'FAINTNESS', 'AS', 'SHALL', 'BE', 'DELIGHTFUL', 'AND', 'I', 'HAVE', 'NO', 'DOUBT', 'THAT', 'THE', 'BEST', 'INTRODUCTION', 'TO', 'IT', 'WOULD', 'BE', 'THE', 'ELEMENTARY', 'PRACTICE', 'OF', 'PAINTING', 'EVERY', 'STUDY', 'ON', 'A', 'GOLDEN', 'GROUND'] +1188-133604-0016-1787: ref=['THIS', 'AT', 'ONCE', 'COMPELS', 'YOU', 'TO', 'UNDERSTAND', 'THAT', 'THE', 'WORK', 'IS', 'TO', 'BE', 'IMAGINATIVE', 'AND', 'DECORATIVE', 'THAT', 'IT', 'REPRESENTS', 'BEAUTIFUL', 'THINGS', 'IN', 'THE', 'CLEAREST', 'WAY', 'BUT', 'NOT', 'UNDER', 'EXISTING', 'CONDITIONS', 'AND', 'THAT', 'IN', 'FACT', 'YOU', 'ARE', 'PRODUCING', "JEWELER'S", 'WORK', 'RATHER', 'THAN', 'PICTURES'] +1188-133604-0016-1787: hyp=['THIS', 'AT', 'ONCE', 'COMPELS', 'YOU', 'TO', 'UNDERSTAND', 'THAT', 'THE', 'WORK', 'IS', 'TO', 'BE', 'IMAGINATIVE', 'AND', 'DECORATIVE', 'THAT', 'IT', 'REPRESENTS', 'BEAUTIFUL', 'THINGS', 'IN', 'THE', 'CLEAREST', 'WAY', 'BUT', 'NOT', 'UNDER', 'EXISTING', 'CONDITIONS', 'AND', 'THAT', 'IN', 'FACT', 'YOU', 'ARE', 'PRODUCING', 'JEWELERS', 'WORK', 'RATHER', 'THAN', 'PICTURES'] +1188-133604-0017-1788: ref=['THAT', 'A', 'STYLE', 'IS', 'RESTRAINED', 'OR', 'SEVERE', 'DOES', 'NOT', 'MEAN', 'THAT', 'IT', 'IS', 'ALSO', 'ERRONEOUS'] +1188-133604-0017-1788: hyp=['THAT', 'A', 'STYLE', 'WAS', 'RESTRAINED', 'OR', 'SEVERE', 'DOES', 'NOT', 'MEAN', 'THAT', 'IT', 'IS', 'ALSO', 'ERRONEOUS'] +1188-133604-0018-1789: ref=['IN', 'ALL', 'EARLY', 'GOTHIC', 'ART', 'INDEED', 'YOU', 'WILL', 'FIND', 'FAILURE', 'OF', 'THIS', 'KIND', 'ESPECIALLY', 'DISTORTION', 'AND', 'RIGIDITY', 'WHICH', 'ARE', 'IN', 'MANY', 'RESPECTS', 'PAINFULLY', 'TO', 'BE', 'COMPARED', 'WITH', 'THE', 'SPLENDID', 'REPOSE', 'OF', 'CLASSIC', 'ART'] +1188-133604-0018-1789: hyp=['IN', 'ALL', 'EARLY', 'GOTHIC', 'ART', 'INDEED', 'YOU', 'WILL', 'FIND', 'FAILURE', 'OF', 'THIS', 'KIND', 'ESPECIALLY', 'DISTORTION', 'AND', 'RIGIDITY', 'WHICH', 'ARE', 'IN', 'MANY', 'RESPECTS', 'PAINFULLY', 'TO', 'BE', 'COMPARED', 'WITH', 'THE', 'SPLENDID', 'REPOSE', 'OF', 'CLASSIC', 'ART'] +1188-133604-0019-1790: ref=['THE', 'LARGE', 'LETTER', 'CONTAINS', 'INDEED', 'ENTIRELY', 'FEEBLE', 'AND', 'ILL', 'DRAWN', 'FIGURES', 'THAT', 'IS', 'MERELY', 'CHILDISH', 'AND', 'FAILING', 'WORK', 'OF', 'AN', 'INFERIOR', 'HAND', 'IT', 'IS', 'NOT', 'CHARACTERISTIC', 'OF', 'GOTHIC', 'OR', 'ANY', 'OTHER', 'SCHOOL'] +1188-133604-0019-1790: hyp=['THE', 'LARGE', 'LETTER', 'CONTAINS', 'INDEED', 'ENTIRELY', 'FEEBLE', 'AND', 'ILL', 'DRAWN', 'FIGURES', 'THAT', 'IS', 'MERELY', 'CHILDISH', 'AND', 'FAILING', 'WORK', 'OF', 'AN', 'INFERIOR', 'HAND', 'IT', 'IS', 'NOT', 'CHARACTERISTIC', 'OF', 'GOTHIC', 'OR', 'ANY', 'OTHER', 'SCHOOL'] +1188-133604-0020-1791: ref=['BUT', 'OBSERVE', 'YOU', 'CAN', 'ONLY', 'DO', 'THIS', 'ON', 'ONE', 'CONDITION', 'THAT', 'OF', 'STRIVING', 'ALSO', 'TO', 'CREATE', 'IN', 'REALITY', 'THE', 'BEAUTY', 'WHICH', 'YOU', 'SEEK', 'IN', 'IMAGINATION'] +1188-133604-0020-1791: hyp=['BUT', 'OBSERVE', 'YOU', 'CAN', 'ONLY', 'DO', 'THIS', 'ON', 'ONE', 'CONDITION', 'THAT', 'OF', 'STRIVING', 'ALSO', 'TO', 'CREATE', 'IN', 'REALITY', 'THE', 'BEAUTY', 'WHICH', 'YOU', 'SEEK', 'IN', 'IMAGINATION'] +1188-133604-0021-1792: ref=['IT', 'WILL', 'BE', 'WHOLLY', 'IMPOSSIBLE', 'FOR', 'YOU', 'TO', 'RETAIN', 'THE', 'TRANQUILLITY', 'OF', 'TEMPER', 'AND', 'FELICITY', 'OF', 'FAITH', 'NECESSARY', 'FOR', 'NOBLE', 'PURIST', 'PAINTING', 'UNLESS', 'YOU', 'ARE', 'ACTIVELY', 'ENGAGED', 'IN', 'PROMOTING', 'THE', 'FELICITY', 'AND', 'PEACE', 'OF', 'PRACTICAL', 'LIFE'] +1188-133604-0021-1792: hyp=['IT', 'WILL', 'BE', 'WHOLLY', 'IMPOSSIBLE', 'FOR', 'YOU', 'TO', 'RETAIN', 'THE', 'TRANQUILLITY', 'OF', 'TEMPER', 'AND', 'FELICITY', 'OF', 'FAITH', 'NECESSARY', 'FOR', 'NOBLE', 'PUREST', 'PAINTING', 'UNLESS', 'YOU', 'ARE', 'ACTIVELY', 'ENGAGED', 'IN', 'PROMOTING', 'THE', 'FELICITY', 'AND', 'PEACE', 'OF', 'PRACTICAL', 'LIFE'] +1188-133604-0022-1793: ref=['YOU', 'MUST', 'LOOK', 'AT', 'HIM', 'IN', 'THE', 'FACE', 'FIGHT', 'HIM', 'CONQUER', 'HIM', 'WITH', 'WHAT', 'SCATHE', 'YOU', 'MAY', 'YOU', 'NEED', 'NOT', 'THINK', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'HIM'] +1188-133604-0022-1793: hyp=['YOU', 'MUST', 'LOOK', 'AT', 'HIM', 'IN', 'THE', 'FACE', 'FIGHT', 'HIM', 'CONQUER', 'HIM', 'WITH', 'WHAT', 'SCATH', 'YOU', 'MAY', 'YOU', 'NEED', 'NOT', 'THINK', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'HIM'] +1188-133604-0023-1794: ref=['THE', 'COLORIST', 'SAYS', 'FIRST', 'OF', 'ALL', 'AS', 'MY', 'DELICIOUS', 'PAROQUET', 'WAS', 'RUBY', 'SO', 'THIS', 'NASTY', 'VIPER', 'SHALL', 'BE', 'BLACK', 'AND', 'THEN', 'IS', 'THE', 'QUESTION', 'CAN', 'I', 'ROUND', 'HIM', 'OFF', 'EVEN', 'THOUGH', 'HE', 'IS', 'BLACK', 'AND', 'MAKE', 'HIM', 'SLIMY', 'AND', 'YET', 'SPRINGY', 'AND', 'CLOSE', 'DOWN', 'CLOTTED', 'LIKE', 'A', 'POOL', 'OF', 'BLACK', 'BLOOD', 'ON', 'THE', 'EARTH', 'ALL', 'THE', 'SAME'] +1188-133604-0023-1794: hyp=['THE', 'CHOLERIST', 'SAYS', 'FIRST', 'OF', 'ALL', 'AS', 'MY', 'DELICIOUS', 'PARAQUET', 'WAS', 'RUBY', 'SO', 'THIS', 'NASTY', 'VIPER', 'SHALL', 'BE', 'BLACK', 'AND', 'THEN', 'AS', 'THE', 'QUESTION', 'CAN', 'I', 'ROUND', 'HIM', 'OFF', 'EVEN', 'THOUGH', 'HE', 'IS', 'BLACK', 'AND', 'MAKE', 'HIM', 'SLIMY', 'AND', 'YET', 'SPRINGY', 'AND', 'CLOSE', 'DOWN', 'CLOTTED', 'LIKE', 'A', 'POOL', 'OF', 'BLACK', 'BLOOD', 'ON', 'THE', 'EARTH', 'ALL', 'THE', 'SAME'] +1188-133604-0024-1795: ref=['NOTHING', 'WILL', 'BE', 'MORE', 'PRECIOUS', 'TO', 'YOU', 'I', 'THINK', 'IN', 'THE', 'PRACTICAL', 'STUDY', 'OF', 'ART', 'THAN', 'THE', 'CONVICTION', 'WHICH', 'WILL', 'FORCE', 'ITSELF', 'ON', 'YOU', 'MORE', 'AND', 'MORE', 'EVERY', 'HOUR', 'OF', 'THE', 'WAY', 'ALL', 'THINGS', 'ARE', 'BOUND', 'TOGETHER', 'LITTLE', 'AND', 'GREAT', 'IN', 'SPIRIT', 'AND', 'IN', 'MATTER'] +1188-133604-0024-1795: hyp=['NOTHING', 'WILL', 'BE', 'MORE', 'PRECIOUS', 'TO', 'YOU', 'I', 'THINK', 'IN', 'THE', 'PRACTICAL', 'STUDY', 'OF', 'ART', 'THAN', 'THE', 'CONVICTION', 'WHICH', 'WILL', 'FORCE', 'ITSELF', 'ON', 'YOU', 'MORE', 'AND', 'MORE', 'EVERY', 'HOUR', 'OF', 'THE', 'WAY', 'ALL', 'THINGS', 'ARE', 'BOUND', 'TOGETHER', 'LITTLE', 'AND', 'GREAT', 'IN', 'SPIRIT', 'AND', 'IN', 'MATTER'] +1188-133604-0025-1796: ref=['YOU', 'KNOW', 'I', 'HAVE', 'JUST', 'BEEN', 'TELLING', 'YOU', 'HOW', 'THIS', 'SCHOOL', 'OF', 'MATERIALISM', 'AND', 'CLAY', 'INVOLVED', 'ITSELF', 'AT', 'LAST', 'IN', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0025-1796: hyp=['YOU', 'KNOW', 'I', 'HAVE', 'JUST', 'BEEN', 'TELLING', 'YOU', 'HOW', 'THIS', 'SCHOOL', 'OF', 'MATERIALISM', 'AND', 'CLAY', 'INVOLVED', 'ITSELF', 'AT', 'LAST', 'IN', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0026-1797: ref=['HERE', 'IS', 'AN', 'EQUALLY', 'TYPICAL', 'GREEK', 'SCHOOL', 'LANDSCAPE', 'BY', 'WILSON', 'LOST', 'WHOLLY', 'IN', 'GOLDEN', 'MIST', 'THE', 'TREES', 'SO', 'SLIGHTLY', 'DRAWN', 'THAT', 'YOU', "DON'T", 'KNOW', 'IF', 'THEY', 'ARE', 'TREES', 'OR', 'TOWERS', 'AND', 'NO', 'CARE', 'FOR', 'COLOR', 'WHATEVER', 'PERFECTLY', 'DECEPTIVE', 'AND', 'MARVELOUS', 'EFFECT', 'OF', 'SUNSHINE', 'THROUGH', 'THE', 'MIST', 'APOLLO', 'AND', 'THE', 'PYTHON'] +1188-133604-0026-1797: hyp=['HERE', 'IS', 'AN', 'EQUALLY', 'TYPICAL', 'GREEK', 'SCHOOL', 'LANDSCAPE', 'BY', 'WILSON', 'LOST', 'WHOLLY', 'IN', 'GOLDEN', 'MIST', 'THE', 'TREES', 'SO', 'SLIGHTLY', 'DRAWN', 'THAT', 'YOU', "DON'T", 'KNOW', 'IF', 'THEY', 'ARE', 'TREES', 'OR', 'TOWERS', 'AND', 'NO', 'CARE', 'FOR', 'COLOR', 'WHATSOEVER', 'PERFECTLY', 'DECEPTIVE', 'AND', 'MARVELLOUS', 'EFFECT', 'OF', 'SUNSHINE', 'THROUGH', 'THE', 'MIST', 'APOLLO', 'IN', 'THE', 'PYTHON'] +1188-133604-0027-1798: ref=['NOW', 'HERE', 'IS', 'RAPHAEL', 'EXACTLY', 'BETWEEN', 'THE', 'TWO', 'TREES', 'STILL', 'DRAWN', 'LEAF', 'BY', 'LEAF', 'WHOLLY', 'FORMAL', 'BUT', 'BEAUTIFUL', 'MIST', 'COMING', 'GRADUALLY', 'INTO', 'THE', 'DISTANCE'] +1188-133604-0027-1798: hyp=['NOW', 'HERE', 'IS', 'RAPHAEL', 'EXACTLY', 'BETWEEN', 'THE', 'TWO', 'TREES', 'STILL', 'DRAWN', 'LEAF', 'BY', 'LEAF', 'WHOLLY', 'FORMAL', 'BUT', 'BEAUTIFUL', 'MIST', 'COMING', 'GRADUALLY', 'INTO', 'THE', 'DISTANCE'] +1188-133604-0028-1799: ref=['WELL', 'THEN', 'LAST', 'HERE', 'IS', "TURNER'S", 'GREEK', 'SCHOOL', 'OF', 'THE', 'HIGHEST', 'CLASS', 'AND', 'YOU', 'DEFINE', 'HIS', 'ART', 'ABSOLUTELY', 'AS', 'FIRST', 'THE', 'DISPLAYING', 'INTENSELY', 'AND', 'WITH', 'THE', 'STERNEST', 'INTELLECT', 'OF', 'NATURAL', 'FORM', 'AS', 'IT', 'IS', 'AND', 'THEN', 'THE', 'ENVELOPMENT', 'OF', 'IT', 'WITH', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0028-1799: hyp=['WELL', 'THEN', 'LAST', 'HERE', 'IS', "TURNER'S", 'GREEK', 'SCHOOL', 'OF', 'THE', 'HIGHEST', 'CLASS', 'AND', 'YOU', 'DEFINE', 'HIS', 'ART', 'ABSOLUTELY', 'AS', 'FIRST', 'THE', 'DISPLAYING', 'INTENSELY', 'AND', 'WITH', 'THE', 'STERNEST', 'INTELLECT', 'OF', 'NATURAL', 'FORM', 'AS', 'IT', 'IS', 'AND', 'THEN', 'THE', 'ENVELOPMENT', 'OF', 'IT', 'WITH', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0029-1800: ref=['ONLY', 'THERE', 'ARE', 'TWO', 'SORTS', 'OF', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0029-1800: hyp=['ONLY', 'THERE', 'ARE', 'TWO', 'SORTS', 'OF', 'CLOUD', 'IN', 'FIRE'] +1188-133604-0030-1801: ref=['HE', 'KNOWS', 'THEM', 'BOTH'] +1188-133604-0030-1801: hyp=['HE', 'KNOWS', 'THEM', 'BOTH'] +1188-133604-0031-1802: ref=["THERE'S", 'ONE', 'AND', "THERE'S", 'ANOTHER', 'THE', 'DUDLEY', 'AND', 'THE', 'FLINT'] +1188-133604-0031-1802: hyp=["THERE'S", 'ONE', 'AND', "THERE'S", 'ANOTHER', 'THE', 'DUDLEY', 'AND', 'THE', 'FLINT'] +1188-133604-0032-1803: ref=['IT', 'IS', 'ONLY', 'A', 'PENCIL', 'OUTLINE', 'BY', 'EDWARD', 'BURNE', 'JONES', 'IN', 'ILLUSTRATION', 'OF', 'THE', 'STORY', 'OF', 'PSYCHE', 'IT', 'IS', 'THE', 'INTRODUCTION', 'OF', 'PSYCHE', 'AFTER', 'ALL', 'HER', 'TROUBLES', 'INTO', 'HEAVEN'] +1188-133604-0032-1803: hyp=['IT', 'IS', 'ONLY', 'A', 'PENCIL', 'OUTLINE', 'BY', 'EDWARD', 'BURNE', 'JONES', 'IN', 'ILLUSTRATION', 'OF', 'THE', 'STORY', 'OF', 'PSYCHE', 'IT', 'IS', 'THE', 'INTRODUCTION', 'OF', 'PSYCHE', 'AFTER', 'ALL', 'HER', 'TROUBLES', 'INTO', 'HEAVEN'] +1188-133604-0033-1804: ref=['EVERY', 'PLANT', 'IN', 'THE', 'GRASS', 'IS', 'SET', 'FORMALLY', 'GROWS', 'PERFECTLY', 'AND', 'MAY', 'BE', 'REALIZED', 'COMPLETELY'] +1188-133604-0033-1804: hyp=['EVERY', 'PLANT', 'IN', 'THE', 'GRASS', 'IS', 'SET', 'FORMALLY', 'GROWS', 'PERFECTLY', 'AND', 'MAY', 'BE', 'REALIZED', 'COMPLETELY'] +1188-133604-0034-1805: ref=['EXQUISITE', 'ORDER', 'AND', 'UNIVERSAL', 'WITH', 'ETERNAL', 'LIFE', 'AND', 'LIGHT', 'THIS', 'IS', 'THE', 'FAITH', 'AND', 'EFFORT', 'OF', 'THE', 'SCHOOLS', 'OF', 'CRYSTAL', 'AND', 'YOU', 'MAY', 'DESCRIBE', 'AND', 'COMPLETE', 'THEIR', 'WORK', 'QUITE', 'LITERALLY', 'BY', 'TAKING', 'ANY', 'VERSES', 'OF', 'CHAUCER', 'IN', 'HIS', 'TENDER', 'MOOD', 'AND', 'OBSERVING', 'HOW', 'HE', 'INSISTS', 'ON', 'THE', 'CLEARNESS', 'AND', 'BRIGHTNESS', 'FIRST', 'AND', 'THEN', 'ON', 'THE', 'ORDER'] +1188-133604-0034-1805: hyp=['EXQUISITE', 'ORDER', 'AND', 'UNIVERSAL', 'WITH', 'ETERNAL', 'LIFE', 'AND', 'LIGHT', 'THIS', 'IS', 'THE', 'FAITH', 'AND', 'EFFORT', 'OF', 'THE', 'SCHOOLS', 'OF', 'CRISTEL', 'AND', 'YOU', 'MAY', 'DESCRIBE', 'AND', 'COMPLETE', 'THEIR', 'WORK', 'QUITE', 'LITERALLY', 'BY', 'TAKING', 'ANY', 'VERSES', 'OF', 'CHAUCER', 'IN', 'HIS', 'TENDER', 'MOOD', 'AND', 'OBSERVING', 'HOW', 'HE', 'INSISTS', 'ON', 'THE', 'CLEARNESS', 'AND', 'BRIGHTNESS', 'FIRST', 'AND', 'THEN', 'ON', 'THE', 'ORDER'] +1188-133604-0035-1806: ref=['THUS', 'IN', "CHAUCER'S", 'DREAM'] +1188-133604-0035-1806: hyp=['THUS', 'IN', "CHAUCER'S", 'DREAM'] +1188-133604-0036-1807: ref=['IN', 'BOTH', 'THESE', 'HIGH', 'MYTHICAL', 'SUBJECTS', 'THE', 'SURROUNDING', 'NATURE', 'THOUGH', 'SUFFERING', 'IS', 'STILL', 'DIGNIFIED', 'AND', 'BEAUTIFUL'] +1188-133604-0036-1807: hyp=['IN', 'BOTH', 'THESE', 'HIGH', 'MYTHICAL', 'SUBJECTS', 'THE', 'SURROUNDING', 'NATURE', 'THOUGH', 'SUFFERING', 'IS', 'STILL', 'DIGNIFIED', 'AND', 'BEAUTIFUL'] +1188-133604-0037-1808: ref=['EVERY', 'LINE', 'IN', 'WHICH', 'THE', 'MASTER', 'TRACES', 'IT', 'EVEN', 'WHERE', 'SEEMINGLY', 'NEGLIGENT', 'IS', 'LOVELY', 'AND', 'SET', 'DOWN', 'WITH', 'A', 'MEDITATIVE', 'CALMNESS', 'WHICH', 'MAKES', 'THESE', 'TWO', 'ETCHINGS', 'CAPABLE', 'OF', 'BEING', 'PLACED', 'BESIDE', 'THE', 'MOST', 'TRANQUIL', 'WORK', 'OF', 'HOLBEIN', 'OR', 'DUERER'] +1188-133604-0037-1808: hyp=['EVERY', 'LINE', 'IN', 'WHICH', 'THE', 'MASTER', 'TRACES', 'IT', 'EVEN', 'WHERE', 'SEEMINGLY', 'NEGLIGENT', 'IS', 'LOVELY', 'AND', 'SET', 'DOWN', 'WITH', 'A', 'MEDITATIVE', 'CALMNESS', 'WHICH', 'MAKES', 'THESE', 'TWO', 'ETCHINGS', 'CAPABLE', 'OF', 'BEING', 'PLACED', 'BESIDE', 'THE', 'MOST', 'TRANQUIL', 'WORK', 'OF', 'HOLBINE', 'OR', 'DURE'] +1188-133604-0038-1809: ref=['BUT', 'NOW', 'HERE', 'IS', 'A', 'SUBJECT', 'OF', 'WHICH', 'YOU', 'WILL', 'WONDER', 'AT', 'FIRST', 'WHY', 'TURNER', 'DREW', 'IT', 'AT', 'ALL'] +1188-133604-0038-1809: hyp=['BUT', 'NOW', 'HERE', 'IS', 'A', 'SUBJECT', 'OF', 'WHICH', 'YOU', 'WILL', 'WONDER', 'AT', 'FIRST', 'WHY', 'TURNER', 'DREW', 'IT', 'AT', 'ALL'] +1188-133604-0039-1810: ref=['IT', 'HAS', 'NO', 'BEAUTY', 'WHATSOEVER', 'NO', 'SPECIALTY', 'OF', 'PICTURESQUENESS', 'AND', 'ALL', 'ITS', 'LINES', 'ARE', 'CRAMPED', 'AND', 'POOR'] +1188-133604-0039-1810: hyp=['IT', 'HAS', 'NO', 'BEAUTY', 'WHATSOEVER', 'NO', 'SPECIALTY', 'OF', 'PICTURESQUENESS', 'IN', 'ALL', 'ITS', 'LINES', 'ARE', 'CRAMPED', 'AND', 'POOR'] +1188-133604-0040-1811: ref=['THE', 'CRAMPNESS', 'AND', 'THE', 'POVERTY', 'ARE', 'ALL', 'INTENDED'] +1188-133604-0040-1811: hyp=['THE', 'CRAMPNESS', 'IN', 'THE', 'POVERTY', 'ARE', 'ALL', 'INTENDED'] +1188-133604-0041-1812: ref=['IT', 'IS', 'A', 'GLEANER', 'BRINGING', 'DOWN', 'HER', 'ONE', 'SHEAF', 'OF', 'CORN', 'TO', 'AN', 'OLD', 'WATERMILL', 'ITSELF', 'MOSSY', 'AND', 'RENT', 'SCARCELY', 'ABLE', 'TO', 'GET', 'ITS', 'STONES', 'TO', 'TURN'] +1188-133604-0041-1812: hyp=['IT', 'IS', 'A', 'GLEANER', 'BRINGING', 'DOWN', 'HER', 'ONE', 'SHEAF', 'OF', 'CORN', 'TO', 'AN', 'OLD', 'WATER', 'MILL', 'ITSELF', 'MOSSY', 'AND', 'RENT', 'SCARCELY', 'ABLE', 'TO', 'GET', 'ITS', 'STONES', 'TO', 'TURN'] +1188-133604-0042-1813: ref=['THE', 'SCENE', 'IS', 'ABSOLUTELY', 'ARCADIAN'] +1188-133604-0042-1813: hyp=['THE', 'SCENE', 'IS', 'ABSOLUTELY', 'ARCADIAN'] +1188-133604-0043-1814: ref=['SEE', 'THAT', 'YOUR', 'LIVES', 'BE', 'IN', 'NOTHING', 'WORSE', 'THAN', 'A', "BOY'S", 'CLIMBING', 'FOR', 'HIS', 'ENTANGLED', 'KITE'] +1188-133604-0043-1814: hyp=['SEE', 'THAT', 'YOUR', 'LIVES', 'BE', 'IN', 'NOTHING', 'WORSE', 'THAN', 'A', "BOY'S", 'CLIMBING', 'FOR', 'HIS', 'ENTANGLED', 'KITE'] +1188-133604-0044-1815: ref=['IT', 'WILL', 'BE', 'WELL', 'FOR', 'YOU', 'IF', 'YOU', 'JOIN', 'NOT', 'WITH', 'THOSE', 'WHO', 'INSTEAD', 'OF', 'KITES', 'FLY', 'FALCONS', 'WHO', 'INSTEAD', 'OF', 'OBEYING', 'THE', 'LAST', 'WORDS', 'OF', 'THE', 'GREAT', 'CLOUD', 'SHEPHERD', 'TO', 'FEED', 'HIS', 'SHEEP', 'LIVE', 'THE', 'LIVES', 'HOW', 'MUCH', 'LESS', 'THAN', 'VANITY', 'OF', 'THE', 'WAR', 'WOLF', 'AND', 'THE', 'GIER', 'EAGLE'] +1188-133604-0044-1815: hyp=['IT', 'WILL', 'BE', 'WELL', 'FOR', 'YOU', 'IF', 'YOU', 'JOIN', 'NOT', 'WITH', 'THOSE', 'WHO', 'INSTEAD', 'OF', 'KITES', 'FLY', 'FALCONS', 'WHO', 'INSTEAD', 'OF', 'OBEYING', 'THE', 'LAST', 'WORDS', 'OF', 'THE', 'GREAT', 'CLOUD', 'SHEPHERD', 'TO', 'FEED', 'HIS', 'SHEEP', 'LIVE', 'THE', 'LIVES', 'HOW', 'MUCH', 'LESS', 'THAN', 'VANITY', 'OF', 'THE', 'WAR', 'WOLF', 'IN', 'THE', 'GEAR', 'EAGLE'] +121-121726-0000-2558: ref=['ALSO', 'A', 'POPULAR', 'CONTRIVANCE', 'WHEREBY', 'LOVE', 'MAKING', 'MAY', 'BE', 'SUSPENDED', 'BUT', 'NOT', 'STOPPED', 'DURING', 'THE', 'PICNIC', 'SEASON'] +121-121726-0000-2558: hyp=['ALSO', 'A', 'POPULAR', 'CONTRIVANCE', 'WHEREBY', 'LOVE', 'MAKING', 'MAY', 'BE', 'SUSPENDED', 'BUT', 'NOT', 'STOPPED', 'DURING', 'THE', 'PICNIC', 'SEASON'] +121-121726-0001-2559: ref=['HARANGUE', 'THE', 'TIRESOME', 'PRODUCT', 'OF', 'A', 'TIRELESS', 'TONGUE'] +121-121726-0001-2559: hyp=['HURRANG', 'THE', 'TIRESOME', 'PRODUCT', 'OF', 'A', 'TIRELESS', 'TONGUE'] +121-121726-0002-2560: ref=['ANGOR', 'PAIN', 'PAINFUL', 'TO', 'HEAR'] +121-121726-0002-2560: hyp=['ANGOR', 'PAIN', 'PAINFUL', 'TO', 'HEAR'] +121-121726-0003-2561: ref=['HAY', 'FEVER', 'A', 'HEART', 'TROUBLE', 'CAUSED', 'BY', 'FALLING', 'IN', 'LOVE', 'WITH', 'A', 'GRASS', 'WIDOW'] +121-121726-0003-2561: hyp=['HEY', 'FEVER', 'A', 'HARD', 'TROUBLE', 'CAUSED', 'BY', 'FALLING', 'IN', 'LOVE', 'WITH', 'A', 'GRASS', 'WIDOW'] +121-121726-0004-2562: ref=['HEAVEN', 'A', 'GOOD', 'PLACE', 'TO', 'BE', 'RAISED', 'TO'] +121-121726-0004-2562: hyp=['HEAVEN', 'A', 'GOOD', 'PLACE', 'TO', 'BE', 'RAISED', 'TO'] +121-121726-0005-2563: ref=['HEDGE', 'A', 'FENCE'] +121-121726-0005-2563: hyp=['HEDGE', 'A', 'FENCE'] +121-121726-0006-2564: ref=['HEREDITY', 'THE', 'CAUSE', 'OF', 'ALL', 'OUR', 'FAULTS'] +121-121726-0006-2564: hyp=['HEREDITY', 'THE', 'CAUSE', 'OF', 'ALL', 'OUR', 'FAULTS'] +121-121726-0007-2565: ref=['HORSE', 'SENSE', 'A', 'DEGREE', 'OF', 'WISDOM', 'THAT', 'KEEPS', 'ONE', 'FROM', 'BETTING', 'ON', 'THE', 'RACES'] +121-121726-0007-2565: hyp=['HORSE', 'SENSE', 'A', 'DEGREE', 'OF', 'WISDOM', 'THAT', 'KEEPS', 'ONE', 'FROM', 'BETTING', 'ON', 'THE', 'RACES'] +121-121726-0008-2566: ref=['HOSE', "MAN'S", 'EXCUSE', 'FOR', 'WETTING', 'THE', 'WALK'] +121-121726-0008-2566: hyp=['HOSE', "MAN'S", 'EXCUSE', 'FOR', 'WETTING', 'THE', 'WALK'] +121-121726-0009-2567: ref=['HOTEL', 'A', 'PLACE', 'WHERE', 'A', 'GUEST', 'OFTEN', 'GIVES', 'UP', 'GOOD', 'DOLLARS', 'FOR', 'POOR', 'QUARTERS'] +121-121726-0009-2567: hyp=['HOTEL', 'A', 'PLACE', 'WHERE', 'A', 'GUEST', 'OFTEN', 'GIVES', 'UP', 'GOOD', 'DOLLARS', 'FOR', 'POOR', 'QUARTERS'] +121-121726-0010-2568: ref=['HOUSECLEANING', 'A', 'DOMESTIC', 'UPHEAVAL', 'THAT', 'MAKES', 'IT', 'EASY', 'FOR', 'THE', 'GOVERNMENT', 'TO', 'ENLIST', 'ALL', 'THE', 'SOLDIERS', 'IT', 'NEEDS'] +121-121726-0010-2568: hyp=['HOUSE', 'CLEANING', 'A', 'DOMESTIC', 'UPHEAVAL', 'THAT', 'MAKES', 'IT', 'EASY', 'FOR', 'THE', 'GOVERNMENT', 'TO', 'ENLIST', 'ALL', 'THE', 'SOLDIERS', 'IT', 'NEEDS'] +121-121726-0011-2569: ref=['HUSBAND', 'THE', 'NEXT', 'THING', 'TO', 'A', 'WIFE'] +121-121726-0011-2569: hyp=['HUSBAND', 'THE', 'NEXT', 'THING', 'TO', 'A', 'WIFE'] +121-121726-0012-2570: ref=['HUSSY', 'WOMAN', 'AND', 'BOND', 'TIE'] +121-121726-0012-2570: hyp=['HUSSY', 'WOMAN', 'AND', 'BOND', 'TIE'] +121-121726-0013-2571: ref=['TIED', 'TO', 'A', 'WOMAN'] +121-121726-0013-2571: hyp=['TIED', 'TO', 'A', 'WOMAN'] +121-121726-0014-2572: ref=['HYPOCRITE', 'A', 'HORSE', 'DEALER'] +121-121726-0014-2572: hyp=['HYPOCRITE', 'A', 'HORSE', 'DEALER'] +121-123852-0000-2615: ref=['THOSE', 'PRETTY', 'WRONGS', 'THAT', 'LIBERTY', 'COMMITS', 'WHEN', 'I', 'AM', 'SOMETIME', 'ABSENT', 'FROM', 'THY', 'HEART', 'THY', 'BEAUTY', 'AND', 'THY', 'YEARS', 'FULL', 'WELL', 'BEFITS', 'FOR', 'STILL', 'TEMPTATION', 'FOLLOWS', 'WHERE', 'THOU', 'ART'] +121-123852-0000-2615: hyp=['THOSE', 'PRETTY', 'WRONGS', 'THAT', 'LIBERTY', 'COMMITS', 'WHEN', 'I', 'AM', 'SOME', 'TIME', 'ABSENT', 'FROM', 'THY', 'HEART', 'THY', 'BEAUTY', 'AND', 'THY', 'YEARS', 'FULL', 'WELL', 'BEFITS', 'FOR', 'STILL', 'TEMPTATION', 'FOLLOWS', 'WHERE', 'THOU', 'ART'] +121-123852-0001-2616: ref=['AY', 'ME'] +121-123852-0001-2616: hyp=['I', 'ME'] +121-123852-0002-2617: ref=['NO', 'MATTER', 'THEN', 'ALTHOUGH', 'MY', 'FOOT', 'DID', 'STAND', 'UPON', 'THE', 'FARTHEST', 'EARTH', "REMOV'D", 'FROM', 'THEE', 'FOR', 'NIMBLE', 'THOUGHT', 'CAN', 'JUMP', 'BOTH', 'SEA', 'AND', 'LAND', 'AS', 'SOON', 'AS', 'THINK', 'THE', 'PLACE', 'WHERE', 'HE', 'WOULD', 'BE', 'BUT', 'AH'] +121-123852-0002-2617: hyp=['NO', 'MATTER', 'THEN', 'ALTHOUGH', 'MY', 'FOOT', 'DID', 'STAND', 'UPON', 'THE', 'FARTHEST', 'EARTH', 'REMOVED', 'FROM', 'THEE', 'FOR', 'NIMBLE', 'THOUGHT', 'CAN', 'JUMP', 'BOTH', 'SEA', 'AND', 'LAND', 'AS', 'SOON', 'AS', 'THINK', 'THE', 'PLACE', 'WHERE', 'HE', 'WOULD', 'BE', 'BUT', 'AH'] +121-123852-0003-2618: ref=['THOUGHT', 'KILLS', 'ME', 'THAT', 'I', 'AM', 'NOT', 'THOUGHT', 'TO', 'LEAP', 'LARGE', 'LENGTHS', 'OF', 'MILES', 'WHEN', 'THOU', 'ART', 'GONE', 'BUT', 'THAT', 'SO', 'MUCH', 'OF', 'EARTH', 'AND', 'WATER', 'WROUGHT', 'I', 'MUST', 'ATTEND', "TIME'S", 'LEISURE', 'WITH', 'MY', 'MOAN', 'RECEIVING', 'NOUGHT', 'BY', 'ELEMENTS', 'SO', 'SLOW', 'BUT', 'HEAVY', 'TEARS', 'BADGES', 'OF', "EITHER'S", 'WOE'] +121-123852-0003-2618: hyp=['THOUGHT', 'KILLS', 'ME', 'THAT', 'I', 'AM', 'NOT', 'BOUGHT', 'TO', 'LEAP', 'LARGE', 'LENGTHS', 'OF', 'MILES', 'WHEN', 'THOU', 'ART', 'GONE', 'BUT', 'THAT', 'SO', 'MUCH', 'OF', 'EARTH', 'AND', 'WATER', 'WROUGHT', 'I', 'MUST', 'ATTEND', "TIME'S", 'LEISURE', 'WITH', 'MY', 'MOAN', 'RECEIVING', 'NOT', 'BY', 'ELEMENTS', 'SO', 'SLOW', 'BUT', 'HEAVY', 'TEARS', 'BADGERS', 'OF', "EITHER'S", 'WOE'] +121-123852-0004-2619: ref=['MY', 'HEART', 'DOTH', 'PLEAD', 'THAT', 'THOU', 'IN', 'HIM', 'DOST', 'LIE', 'A', 'CLOSET', 'NEVER', "PIERC'D", 'WITH', 'CRYSTAL', 'EYES', 'BUT', 'THE', 'DEFENDANT', 'DOTH', 'THAT', 'PLEA', 'DENY', 'AND', 'SAYS', 'IN', 'HIM', 'THY', 'FAIR', 'APPEARANCE', 'LIES'] +121-123852-0004-2619: hyp=['MY', 'HEART', 'DOTH', 'PLEAD', 'THAT', 'THOU', 'IN', 'HIM', 'DOST', 'LIE', 'A', 'CLOSET', 'NEVER', 'PIERCED', 'WITH', 'CRYSTAL', 'EYES', 'BUT', 'THE', 'DEFENDANT', 'DOTH', 'THAT', 'PLEAD', 'DENY', 'AND', 'SAYS', 'IN', 'HIM', 'THY', 'FAIR', 'APPEARANCE', 'LIES'] +121-123859-0000-2573: ref=['YOU', 'ARE', 'MY', 'ALL', 'THE', 'WORLD', 'AND', 'I', 'MUST', 'STRIVE', 'TO', 'KNOW', 'MY', 'SHAMES', 'AND', 'PRAISES', 'FROM', 'YOUR', 'TONGUE', 'NONE', 'ELSE', 'TO', 'ME', 'NOR', 'I', 'TO', 'NONE', 'ALIVE', 'THAT', 'MY', "STEEL'D", 'SENSE', 'OR', 'CHANGES', 'RIGHT', 'OR', 'WRONG'] +121-123859-0000-2573: hyp=['YOU', 'ARE', 'MY', 'ALL', 'THE', 'WORLD', 'AND', 'I', 'MUST', 'STRIVE', 'TO', 'KNOW', 'MY', 'SHAMES', 'AND', 'PRAISES', 'FROM', 'YOUR', 'TONGUE', 'NONE', 'ELSE', 'TO', 'ME', 'NOR', 'I', 'TO', 'NONE', 'ALIVE', 'THAT', 'MY', 'STEELED', 'SCENTS', 'OR', 'CHANGES', 'RIGHT', 'OR', 'WRONG'] +121-123859-0001-2574: ref=['O', 'TIS', 'THE', 'FIRST', 'TIS', 'FLATTERY', 'IN', 'MY', 'SEEING', 'AND', 'MY', 'GREAT', 'MIND', 'MOST', 'KINGLY', 'DRINKS', 'IT', 'UP', 'MINE', 'EYE', 'WELL', 'KNOWS', 'WHAT', 'WITH', 'HIS', 'GUST', 'IS', 'GREEING', 'AND', 'TO', 'HIS', 'PALATE', 'DOTH', 'PREPARE', 'THE', 'CUP', 'IF', 'IT', 'BE', "POISON'D", 'TIS', 'THE', 'LESSER', 'SIN', 'THAT', 'MINE', 'EYE', 'LOVES', 'IT', 'AND', 'DOTH', 'FIRST', 'BEGIN'] +121-123859-0001-2574: hyp=['OH', 'TIS', 'THE', 'FIRST', 'TIS', 'FLATTERY', 'IN', 'MY', 'SEEING', 'AND', 'MY', 'GREAT', 'MIND', 'MOST', 'KINGLY', 'DRINKS', 'IT', 'UP', 'MINE', 'EYE', 'WELL', 'KNOWS', 'WHAT', 'WITH', 'HIS', 'GUST', 'IS', 'GREEN', 'AND', 'TO', 'HIS', 'PALLET', 'DOTH', 'PREPARE', 'THE', 'CUP', 'IF', 'IT', 'BE', 'POISONED', 'TIS', 'THE', 'LESSER', 'SIN', 'THAT', 'MINE', 'EYE', 'LOVES', 'IT', 'AND', 'DOTH', 'FIRST', 'BEGIN'] +121-123859-0002-2575: ref=['BUT', 'RECKONING', 'TIME', 'WHOSE', "MILLION'D", 'ACCIDENTS', 'CREEP', 'IN', 'TWIXT', 'VOWS', 'AND', 'CHANGE', 'DECREES', 'OF', 'KINGS', 'TAN', 'SACRED', 'BEAUTY', 'BLUNT', 'THE', "SHARP'ST", 'INTENTS', 'DIVERT', 'STRONG', 'MINDS', 'TO', 'THE', 'COURSE', 'OF', 'ALTERING', 'THINGS', 'ALAS', 'WHY', 'FEARING', 'OF', "TIME'S", 'TYRANNY', 'MIGHT', 'I', 'NOT', 'THEN', 'SAY', 'NOW', 'I', 'LOVE', 'YOU', 'BEST', 'WHEN', 'I', 'WAS', 'CERTAIN', "O'ER", 'INCERTAINTY', 'CROWNING', 'THE', 'PRESENT', 'DOUBTING', 'OF', 'THE', 'REST'] +121-123859-0002-2575: hyp=['BUT', 'RECKONING', 'TIME', 'WHOSE', 'MILLIONED', 'ACCIDENTS', 'CREEP', 'IN', 'TWIXT', 'VOWS', 'AND', 'CHANGE', 'DECREES', 'OF', 'KINGS', 'TAN', 'SACRED', 'BEAUTY', 'BLUNT', 'THE', 'SHARPEST', 'INTENSE', 'DIVERT', 'STRONG', 'MINDS', 'TO', 'THE', 'COURSE', 'OF', 'ALTERING', 'THINGS', 'ALAS', 'WHY', 'FEARING', 'OF', "TIME'S", 'TYRANNY', 'MIGHT', 'I', 'NOT', 'THEN', 'SAY', 'NOW', 'I', 'LOVE', 'YOU', 'BEST', 'WHEN', 'I', 'WAS', 'CERTAIN', 'OR', 'IN', 'CERTAINTY', 'CROWNING', 'THE', 'PRESENT', 'DOUBTING', 'OF', 'THE', 'REST'] +121-123859-0003-2576: ref=['LOVE', 'IS', 'A', 'BABE', 'THEN', 'MIGHT', 'I', 'NOT', 'SAY', 'SO', 'TO', 'GIVE', 'FULL', 'GROWTH', 'TO', 'THAT', 'WHICH', 'STILL', 'DOTH', 'GROW'] +121-123859-0003-2576: hyp=['LOVE', 'IS', 'A', 'BABE', 'THEN', 'MIGHT', 'I', 'NOT', 'SAY', 'SO', 'TO', 'GIVE', 'FULL', 'GROWTH', 'TO', 'THAT', 'WHICH', 'STILL', 'DOTH', 'GROW'] +121-123859-0004-2577: ref=['SO', 'I', 'RETURN', "REBUK'D", 'TO', 'MY', 'CONTENT', 'AND', 'GAIN', 'BY', 'ILL', 'THRICE', 'MORE', 'THAN', 'I', 'HAVE', 'SPENT'] +121-123859-0004-2577: hyp=['SO', 'I', 'RETURNED', 'REBUKED', 'TO', 'MY', 'CONTENT', 'AND', 'GAIN', 'BY', 'ILL', 'THRICE', 'MORE', 'THAN', 'I', 'HAVE', 'SPENT'] +121-127105-0000-2578: ref=['IT', 'WAS', 'THIS', 'OBSERVATION', 'THAT', 'DREW', 'FROM', 'DOUGLAS', 'NOT', 'IMMEDIATELY', 'BUT', 'LATER', 'IN', 'THE', 'EVENING', 'A', 'REPLY', 'THAT', 'HAD', 'THE', 'INTERESTING', 'CONSEQUENCE', 'TO', 'WHICH', 'I', 'CALL', 'ATTENTION'] +121-127105-0000-2578: hyp=['IT', 'WAS', 'THIS', 'OBSERVATION', 'THAT', 'DREW', 'FROM', 'DOUGLAS', 'NOT', 'IMMEDIATELY', 'BUT', 'LATER', 'IN', 'THE', 'EVENING', 'A', 'REPLY', 'THAT', 'HAD', 'THE', 'INTERESTING', 'CONSEQUENCE', 'TO', 'WHICH', 'I', 'CALL', 'ATTENTION'] +121-127105-0001-2579: ref=['SOMEONE', 'ELSE', 'TOLD', 'A', 'STORY', 'NOT', 'PARTICULARLY', 'EFFECTIVE', 'WHICH', 'I', 'SAW', 'HE', 'WAS', 'NOT', 'FOLLOWING'] +121-127105-0001-2579: hyp=['SOME', 'ONE', 'ELSE', 'TOLD', 'A', 'STORY', 'NOT', 'PARTICULARLY', 'EFFECTIVE', 'WHICH', 'I', 'SAW', 'HE', 'WAS', 'NOT', 'FOLLOWING'] +121-127105-0002-2580: ref=['CRIED', 'ONE', 'OF', 'THE', 'WOMEN', 'HE', 'TOOK', 'NO', 'NOTICE', 'OF', 'HER', 'HE', 'LOOKED', 'AT', 'ME', 'BUT', 'AS', 'IF', 'INSTEAD', 'OF', 'ME', 'HE', 'SAW', 'WHAT', 'HE', 'SPOKE', 'OF'] +121-127105-0002-2580: hyp=['CRIED', 'ONE', 'OF', 'THE', 'WOMEN', 'HE', 'TOOK', 'NO', 'NOTICE', 'OF', 'HER', 'HE', 'LOOKED', 'AT', 'ME', 'BUT', 'AS', 'IF', 'INSTEAD', 'OF', 'ME', 'HE', 'SAW', 'WHAT', 'HE', 'SPOKE', 'OF'] +121-127105-0003-2581: ref=['THERE', 'WAS', 'A', 'UNANIMOUS', 'GROAN', 'AT', 'THIS', 'AND', 'MUCH', 'REPROACH', 'AFTER', 'WHICH', 'IN', 'HIS', 'PREOCCUPIED', 'WAY', 'HE', 'EXPLAINED'] +121-127105-0003-2581: hyp=['THERE', 'WAS', 'A', 'UNANIMOUS', 'GROAN', 'AT', 'THIS', 'AND', 'MUCH', 'REPROACH', 'AFTER', 'WHICH', 'IN', 'HIS', 'PREOCCUPIED', 'WAY', 'HE', 'EXPLAINED'] +121-127105-0004-2582: ref=['THE', "STORY'S", 'WRITTEN'] +121-127105-0004-2582: hyp=['THE', 'STORIES', 'WRITTEN'] +121-127105-0005-2583: ref=['I', 'COULD', 'WRITE', 'TO', 'MY', 'MAN', 'AND', 'ENCLOSE', 'THE', 'KEY', 'HE', 'COULD', 'SEND', 'DOWN', 'THE', 'PACKET', 'AS', 'HE', 'FINDS', 'IT'] +121-127105-0005-2583: hyp=['I', 'COULD', 'WRITE', 'TO', 'MY', 'MAN', 'AND', 'ENCLOSE', 'THE', 'KEY', 'HE', 'COULD', 'SEND', 'DOWN', 'THE', 'PACKET', 'AS', 'HE', 'FINDS', 'IT'] +121-127105-0006-2584: ref=['THE', 'OTHERS', 'RESENTED', 'POSTPONEMENT', 'BUT', 'IT', 'WAS', 'JUST', 'HIS', 'SCRUPLES', 'THAT', 'CHARMED', 'ME'] +121-127105-0006-2584: hyp=['THE', 'OTHERS', 'RESENTED', 'POSTPONEMENT', 'BUT', 'IT', 'WAS', 'JUST', 'HIS', 'SCRUPLES', 'THAT', 'CHARMED', 'ME'] +121-127105-0007-2585: ref=['TO', 'THIS', 'HIS', 'ANSWER', 'WAS', 'PROMPT', 'OH', 'THANK', 'GOD', 'NO', 'AND', 'IS', 'THE', 'RECORD', 'YOURS'] +121-127105-0007-2585: hyp=['TO', 'THIS', 'HIS', 'ANSWER', 'WAS', 'PROMPT', 'OH', 'THANK', 'GOD', 'NO', 'AND', 'IS', 'THE', 'RECORD', 'YOURS'] +121-127105-0008-2586: ref=['HE', 'HUNG', 'FIRE', 'AGAIN', 'A', "WOMAN'S"] +121-127105-0008-2586: hyp=['HE', 'HUNG', 'FIRE', 'AGAIN', 'A', "WOMAN'"] +121-127105-0009-2587: ref=['SHE', 'HAS', 'BEEN', 'DEAD', 'THESE', 'TWENTY', 'YEARS'] +121-127105-0009-2587: hyp=['SHE', 'HAS', 'BEEN', 'DEAD', 'THESE', 'TWENTY', 'YEARS'] +121-127105-0010-2588: ref=['SHE', 'SENT', 'ME', 'THE', 'PAGES', 'IN', 'QUESTION', 'BEFORE', 'SHE', 'DIED'] +121-127105-0010-2588: hyp=['SHE', 'SENT', 'ME', 'THE', 'PAGES', 'IN', 'QUESTION', 'BEFORE', 'SHE', 'DIED'] +121-127105-0011-2589: ref=['SHE', 'WAS', 'THE', 'MOST', 'AGREEABLE', 'WOMAN', "I'VE", 'EVER', 'KNOWN', 'IN', 'HER', 'POSITION', 'SHE', 'WOULD', 'HAVE', 'BEEN', 'WORTHY', 'OF', 'ANY', 'WHATEVER'] +121-127105-0011-2589: hyp=['SHE', 'WAS', 'THE', 'MOST', 'AGREEABLE', 'WOMAN', "I'VE", 'EVER', 'KNOWN', 'IN', 'HER', 'POSITION', 'SHE', 'WOULD', 'HAVE', 'BEEN', 'WORTHY', 'OF', 'ANY', 'WHATEVER'] +121-127105-0012-2590: ref=['IT', "WASN'T", 'SIMPLY', 'THAT', 'SHE', 'SAID', 'SO', 'BUT', 'THAT', 'I', 'KNEW', 'SHE', "HADN'T", 'I', 'WAS', 'SURE', 'I', 'COULD', 'SEE'] +121-127105-0012-2590: hyp=['IT', "WASN'T", 'SIMPLY', 'THAT', 'SHE', 'SAID', 'SO', 'BUT', 'THAT', 'I', 'KNEW', 'SHE', "HADN'T", 'I', 'WAS', 'SURE', 'I', 'COULD', 'SEE'] +121-127105-0013-2591: ref=["YOU'LL", 'EASILY', 'JUDGE', 'WHY', 'WHEN', 'YOU', 'HEAR', 'BECAUSE', 'THE', 'THING', 'HAD', 'BEEN', 'SUCH', 'A', 'SCARE', 'HE', 'CONTINUED', 'TO', 'FIX', 'ME'] +121-127105-0013-2591: hyp=["YOU'LL", 'EASILY', 'JUDGE', 'WHY', 'WHEN', 'YOU', 'HEAR', 'BECAUSE', 'THE', 'THING', 'HAD', 'BEEN', 'SUCH', 'A', 'SCARE', 'HE', 'CONTINUED', 'TO', 'FIX', 'ME'] +121-127105-0014-2592: ref=['YOU', 'ARE', 'ACUTE'] +121-127105-0014-2592: hyp=['YOU', 'ARE', 'ACUTE'] +121-127105-0015-2593: ref=['HE', 'QUITTED', 'THE', 'FIRE', 'AND', 'DROPPED', 'BACK', 'INTO', 'HIS', 'CHAIR'] +121-127105-0015-2593: hyp=['HE', 'QUITTED', 'THE', 'FIRE', 'AND', 'DROPPED', 'BACK', 'INTO', 'HIS', 'CHAIR'] +121-127105-0016-2594: ref=['PROBABLY', 'NOT', 'TILL', 'THE', 'SECOND', 'POST'] +121-127105-0016-2594: hyp=['PROBABLY', 'NOT', 'TILL', 'THE', 'SECOND', 'POST'] +121-127105-0017-2595: ref=['IT', 'WAS', 'ALMOST', 'THE', 'TONE', 'OF', 'HOPE', 'EVERYBODY', 'WILL', 'STAY'] +121-127105-0017-2595: hyp=['IT', 'WAS', 'ALMOST', 'THE', 'TONE', 'OF', 'HOPE', 'EVERYBODY', 'WILL', 'STAY'] +121-127105-0018-2596: ref=['CRIED', 'THE', 'LADIES', 'WHOSE', 'DEPARTURE', 'HAD', 'BEEN', 'FIXED'] +121-127105-0018-2596: hyp=['CRIED', 'THE', 'LADIES', 'WHOSE', 'DEPARTURE', 'HAD', 'BEEN', 'FIXED'] +121-127105-0019-2597: ref=['MISSUS', 'GRIFFIN', 'HOWEVER', 'EXPRESSED', 'THE', 'NEED', 'FOR', 'A', 'LITTLE', 'MORE', 'LIGHT'] +121-127105-0019-2597: hyp=['MISSUS', 'GRIFFIN', 'HOWEVER', 'EXPRESSED', 'THE', 'NEED', 'FOR', 'A', 'LITTLE', 'MORE', 'LIGHT'] +121-127105-0020-2598: ref=['WHO', 'WAS', 'IT', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'THE', 'STORY', 'WILL', 'TELL', 'I', 'TOOK', 'UPON', 'MYSELF', 'TO', 'REPLY', 'OH', 'I', "CAN'T", 'WAIT', 'FOR', 'THE', 'STORY', 'THE', 'STORY', "WON'T", 'TELL', 'SAID', 'DOUGLAS', 'NOT', 'IN', 'ANY', 'LITERAL', 'VULGAR', 'WAY', "MORE'S", 'THE', 'PITY', 'THEN'] +121-127105-0020-2598: hyp=['WHO', 'WAS', 'IT', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'THE', 'STORY', 'WILL', 'TELL', 'I', 'TOOK', 'UPON', 'MYSELF', 'TO', 'REPLY', 'OH', 'I', "CAN'T", 'WAIT', 'FOR', 'THE', 'STORY', 'THE', 'STORY', "WON'T", 'TELL', 'SAID', 'DOUGLAS', 'NOT', 'IN', 'ANY', 'LITERAL', 'VULGAR', 'WAY', "MORE'S", 'THE', 'PITY', 'THEN'] +121-127105-0021-2599: ref=["WON'T", 'YOU', 'TELL', 'DOUGLAS'] +121-127105-0021-2599: hyp=["WON'T", 'YOU', 'TELL', 'DOUGLAS'] +121-127105-0022-2600: ref=['WELL', 'IF', 'I', "DON'T", 'KNOW', 'WHO', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'I', 'KNOW', 'WHO', 'HE', 'WAS'] +121-127105-0022-2600: hyp=['FOR', 'IF', 'I', "DON'T", 'KNOW', 'WHO', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'I', 'KNOW', 'WHO', 'HE', 'WAS'] +121-127105-0023-2601: ref=['LET', 'ME', 'SAY', 'HERE', 'DISTINCTLY', 'TO', 'HAVE', 'DONE', 'WITH', 'IT', 'THAT', 'THIS', 'NARRATIVE', 'FROM', 'AN', 'EXACT', 'TRANSCRIPT', 'OF', 'MY', 'OWN', 'MADE', 'MUCH', 'LATER', 'IS', 'WHAT', 'I', 'SHALL', 'PRESENTLY', 'GIVE'] +121-127105-0023-2601: hyp=['LET', 'ME', 'SAY', 'HERE', 'DISTINCTLY', 'TO', 'HAVE', 'DONE', 'WITH', 'IT', 'THAT', 'THIS', 'NARRATIVE', 'FROM', 'AN', 'EXACT', 'TRANSCRIPT', 'OF', 'MY', 'OWN', 'MADE', 'MUCH', 'LATER', 'IS', 'WHAT', 'I', 'SHALL', 'PRESENTLY', 'GIVE'] +121-127105-0024-2602: ref=['POOR', 'DOUGLAS', 'BEFORE', 'HIS', 'DEATH', 'WHEN', 'IT', 'WAS', 'IN', 'SIGHT', 'COMMITTED', 'TO', 'ME', 'THE', 'MANUSCRIPT', 'THAT', 'REACHED', 'HIM', 'ON', 'THE', 'THIRD', 'OF', 'THESE', 'DAYS', 'AND', 'THAT', 'ON', 'THE', 'SAME', 'SPOT', 'WITH', 'IMMENSE', 'EFFECT', 'HE', 'BEGAN', 'TO', 'READ', 'TO', 'OUR', 'HUSHED', 'LITTLE', 'CIRCLE', 'ON', 'THE', 'NIGHT', 'OF', 'THE', 'FOURTH'] +121-127105-0024-2602: hyp=['POOR', 'DOUGLAS', 'BEFORE', 'HIS', 'DEATH', 'WHEN', 'IT', 'WAS', 'IN', 'SIGHT', 'COMMITTED', 'TO', 'ME', 'THE', 'MANUSCRIPT', 'THAT', 'REACHED', 'HIM', 'ON', 'THE', 'THIRD', 'OF', 'THESE', 'DAYS', 'AND', 'THAT', 'ON', 'THE', 'SAME', 'SPOT', 'WITH', 'IMMENSE', 'EFFECT', 'HE', 'BEGAN', 'TO', 'READ', 'TO', 'OUR', 'HUSHED', 'LITTLE', 'CIRCLE', 'ON', 'THE', 'NIGHT', 'OF', 'THE', 'FOURTH'] +121-127105-0025-2603: ref=['THE', 'DEPARTING', 'LADIES', 'WHO', 'HAD', 'SAID', 'THEY', 'WOULD', 'STAY', "DIDN'T", 'OF', 'COURSE', 'THANK', 'HEAVEN', 'STAY', 'THEY', 'DEPARTED', 'IN', 'CONSEQUENCE', 'OF', 'ARRANGEMENTS', 'MADE', 'IN', 'A', 'RAGE', 'OF', 'CURIOSITY', 'AS', 'THEY', 'PROFESSED', 'PRODUCED', 'BY', 'THE', 'TOUCHES', 'WITH', 'WHICH', 'HE', 'HAD', 'ALREADY', 'WORKED', 'US', 'UP'] +121-127105-0025-2603: hyp=['THE', 'DEPARTING', 'LADIES', 'WHO', 'HAD', 'SAID', 'THEY', 'WOULD', 'STAY', "DIDN'T", 'OF', 'COURSE', 'THANK', 'HEAVEN', 'STAY', 'THEY', 'DEPARTED', 'IN', 'CONSEQUENCE', 'OF', 'ARRANGEMENTS', 'MADE', 'IN', 'A', 'RAGE', 'OF', 'CURIOSITY', 'AS', 'THEY', 'PROFESSED', 'PRODUCED', 'BY', 'THE', 'TOUCHES', 'WITH', 'WHICH', 'HE', 'HAD', 'ALREADY', 'WORKED', 'US', 'UP'] +121-127105-0026-2604: ref=['THE', 'FIRST', 'OF', 'THESE', 'TOUCHES', 'CONVEYED', 'THAT', 'THE', 'WRITTEN', 'STATEMENT', 'TOOK', 'UP', 'THE', 'TALE', 'AT', 'A', 'POINT', 'AFTER', 'IT', 'HAD', 'IN', 'A', 'MANNER', 'BEGUN'] +121-127105-0026-2604: hyp=['THE', 'FIRST', 'OF', 'THESE', 'TOUCHES', 'CONVEYED', 'THAT', 'THE', 'WRITTEN', 'STATEMENT', 'TOOK', 'UP', 'THE', 'TALE', 'AT', 'A', 'POINT', 'AFTER', 'IT', 'HAD', 'IN', 'A', 'MANNER', 'BEGUN'] +121-127105-0027-2605: ref=['HE', 'HAD', 'FOR', 'HIS', 'OWN', 'TOWN', 'RESIDENCE', 'A', 'BIG', 'HOUSE', 'FILLED', 'WITH', 'THE', 'SPOILS', 'OF', 'TRAVEL', 'AND', 'THE', 'TROPHIES', 'OF', 'THE', 'CHASE', 'BUT', 'IT', 'WAS', 'TO', 'HIS', 'COUNTRY', 'HOME', 'AN', 'OLD', 'FAMILY', 'PLACE', 'IN', 'ESSEX', 'THAT', 'HE', 'WISHED', 'HER', 'IMMEDIATELY', 'TO', 'PROCEED'] +121-127105-0027-2605: hyp=['HE', 'HAD', 'FOR', 'HIS', 'OWN', 'TOWN', 'RESIDENCE', 'A', 'BIG', 'HOUSE', 'FILLED', 'WITH', 'THE', 'SPOILS', 'OF', 'TRAVEL', 'AND', 'THE', 'TROPHIES', 'OF', 'THE', 'CHASE', 'BUT', 'IT', 'WAS', 'TO', 'HIS', 'COUNTRY', 'HOME', 'AN', 'OLD', 'FAMILY', 'PLACE', 'IN', 'ESSEX', 'THAT', 'HE', 'WISHED', 'HER', 'IMMEDIATELY', 'TO', 'PROCEED'] +121-127105-0028-2606: ref=['THE', 'AWKWARD', 'THING', 'WAS', 'THAT', 'THEY', 'HAD', 'PRACTICALLY', 'NO', 'OTHER', 'RELATIONS', 'AND', 'THAT', 'HIS', 'OWN', 'AFFAIRS', 'TOOK', 'UP', 'ALL', 'HIS', 'TIME'] +121-127105-0028-2606: hyp=['THE', 'AWKWARD', 'THING', 'WAS', 'THAT', 'THEY', 'HAD', 'PRACTICALLY', 'NO', 'OTHER', 'RELATIONS', 'AND', 'THAT', 'HIS', 'OWN', 'AFFAIRS', 'TOOK', 'UP', 'ALL', 'HIS', 'TIME'] +121-127105-0029-2607: ref=['THERE', 'WERE', 'PLENTY', 'OF', 'PEOPLE', 'TO', 'HELP', 'BUT', 'OF', 'COURSE', 'THE', 'YOUNG', 'LADY', 'WHO', 'SHOULD', 'GO', 'DOWN', 'AS', 'GOVERNESS', 'WOULD', 'BE', 'IN', 'SUPREME', 'AUTHORITY'] +121-127105-0029-2607: hyp=['THERE', 'WERE', 'PLENTY', 'OF', 'PEOPLE', 'TO', 'HELP', 'BUT', 'OF', 'COURSE', 'THE', 'YOUNG', 'LADY', 'WHO', 'SHOULD', 'GO', 'DOWN', 'AS', 'GOVERNESS', 'WOULD', 'BE', 'IN', 'SUPREME', 'AUTHORITY'] +121-127105-0030-2608: ref=['I', "DON'T", 'ANTICIPATE'] +121-127105-0030-2608: hyp=['I', "DON'T", 'ANTICIPATE'] +121-127105-0031-2609: ref=['SHE', 'WAS', 'YOUNG', 'UNTRIED', 'NERVOUS', 'IT', 'WAS', 'A', 'VISION', 'OF', 'SERIOUS', 'DUTIES', 'AND', 'LITTLE', 'COMPANY', 'OF', 'REALLY', 'GREAT', 'LONELINESS'] +121-127105-0031-2609: hyp=['SHE', 'WAS', 'YOUNG', 'UNTRIED', 'NERVOUS', 'IT', 'WAS', 'A', 'VISION', 'OF', 'SERIOUS', 'DUTIES', 'AND', 'LITTLE', 'COMPANY', 'OF', 'REALLY', 'GREAT', 'LONELINESS'] +121-127105-0032-2610: ref=['YES', 'BUT', "THAT'S", 'JUST', 'THE', 'BEAUTY', 'OF', 'HER', 'PASSION'] +121-127105-0032-2610: hyp=['YES', 'BUT', "THAT'S", 'JUST', 'THE', 'BEAUTY', 'OF', 'HER', 'PASSION'] +121-127105-0033-2611: ref=['IT', 'WAS', 'THE', 'BEAUTY', 'OF', 'IT'] +121-127105-0033-2611: hyp=['IT', 'WAS', 'THE', 'BEAUTY', 'OF', 'IT'] +121-127105-0034-2612: ref=['IT', 'SOUNDED', 'DULL', 'IT', 'SOUNDED', 'STRANGE', 'AND', 'ALL', 'THE', 'MORE', 'SO', 'BECAUSE', 'OF', 'HIS', 'MAIN', 'CONDITION', 'WHICH', 'WAS'] +121-127105-0034-2612: hyp=['IT', 'SOUNDED', 'DULL', 'IT', 'SOUNDED', 'STRANGE', 'AND', 'ALL', 'THE', 'MORE', 'SO', 'BECAUSE', 'OF', 'HIS', 'MAIN', 'CONDITION', 'WHICH', 'WAS'] +121-127105-0035-2613: ref=['SHE', 'PROMISED', 'TO', 'DO', 'THIS', 'AND', 'SHE', 'MENTIONED', 'TO', 'ME', 'THAT', 'WHEN', 'FOR', 'A', 'MOMENT', 'DISBURDENED', 'DELIGHTED', 'HE', 'HELD', 'HER', 'HAND', 'THANKING', 'HER', 'FOR', 'THE', 'SACRIFICE', 'SHE', 'ALREADY', 'FELT', 'REWARDED'] +121-127105-0035-2613: hyp=['SHE', 'PROMISED', 'TO', 'DO', 'THIS', 'AND', 'SHE', 'MENTIONED', 'TO', 'ME', 'THAT', 'WHEN', 'FOR', 'A', 'MOMENT', 'DISBURDENED', 'DELIGHTED', 'HE', 'HELD', 'HER', 'HAND', 'THANKING', 'HER', 'FOR', 'THE', 'SACRIFICE', 'SHE', 'ALREADY', 'FELT', 'REWARDED'] +121-127105-0036-2614: ref=['BUT', 'WAS', 'THAT', 'ALL', 'HER', 'REWARD', 'ONE', 'OF', 'THE', 'LADIES', 'ASKED'] +121-127105-0036-2614: hyp=['BUT', 'WAS', 'THAT', 'ALL', 'HER', 'REWARD', 'ONE', 'OF', 'THE', 'LADIES', 'ASKED'] +1221-135766-0000-1305: ref=['HOW', 'STRANGE', 'IT', 'SEEMED', 'TO', 'THE', 'SAD', 'WOMAN', 'AS', 'SHE', 'WATCHED', 'THE', 'GROWTH', 'AND', 'THE', 'BEAUTY', 'THAT', 'BECAME', 'EVERY', 'DAY', 'MORE', 'BRILLIANT', 'AND', 'THE', 'INTELLIGENCE', 'THAT', 'THREW', 'ITS', 'QUIVERING', 'SUNSHINE', 'OVER', 'THE', 'TINY', 'FEATURES', 'OF', 'THIS', 'CHILD'] +1221-135766-0000-1305: hyp=['HOW', 'STRANGE', 'IT', 'SEEMED', 'TO', 'THE', 'SAD', 'WOMAN', 'AS', 'SHE', 'WATCHED', 'THE', 'GROWTH', 'AND', 'THE', 'BEAUTY', 'THAT', 'BECAME', 'EVERY', 'DAY', 'MORE', 'BRILLIANT', 'AND', 'THE', 'INTELLIGENCE', 'THAT', 'THREW', 'ITS', 'QUIVERING', 'SUNSHINE', 'OVER', 'THE', 'TINY', 'FEATURES', 'OF', 'THIS', 'CHILD'] +1221-135766-0001-1306: ref=['GOD', 'AS', 'A', 'DIRECT', 'CONSEQUENCE', 'OF', 'THE', 'SIN', 'WHICH', 'MAN', 'THUS', 'PUNISHED', 'HAD', 'GIVEN', 'HER', 'A', 'LOVELY', 'CHILD', 'WHOSE', 'PLACE', 'WAS', 'ON', 'THAT', 'SAME', 'DISHONOURED', 'BOSOM', 'TO', 'CONNECT', 'HER', 'PARENT', 'FOR', 'EVER', 'WITH', 'THE', 'RACE', 'AND', 'DESCENT', 'OF', 'MORTALS', 'AND', 'TO', 'BE', 'FINALLY', 'A', 'BLESSED', 'SOUL', 'IN', 'HEAVEN'] +1221-135766-0001-1306: hyp=['GOD', 'AS', 'A', 'DIRECT', 'CONSEQUENCE', 'OF', 'THE', 'SIN', 'WHICH', 'MAN', 'THUS', 'PUNISHED', 'HAD', 'GIVEN', 'HER', 'A', 'LOVELY', 'CHILD', 'WHOSE', 'PLACE', 'WAS', 'ON', 'THAT', 'SAME', 'DISHONORED', 'BOSOM', 'TO', 'CONNECT', 'HER', 'PARENT', 'FOREVER', 'WITH', 'THE', 'RACE', 'AND', 'DESCENT', 'OF', 'MORTALS', 'AND', 'TO', 'BE', 'FINALLY', 'A', 'BLESSED', 'SOUL', 'IN', 'HEAVEN'] +1221-135766-0002-1307: ref=['YET', 'THESE', 'THOUGHTS', 'AFFECTED', 'HESTER', 'PRYNNE', 'LESS', 'WITH', 'HOPE', 'THAN', 'APPREHENSION'] +1221-135766-0002-1307: hyp=['YET', 'THESE', 'THOUGHTS', 'AFFECTED', 'HESTER', 'PRYNNE', 'LESS', 'WITH', 'HOPE', 'THAN', 'APPREHENSION'] +1221-135766-0003-1308: ref=['THE', 'CHILD', 'HAD', 'A', 'NATIVE', 'GRACE', 'WHICH', 'DOES', 'NOT', 'INVARIABLY', 'CO', 'EXIST', 'WITH', 'FAULTLESS', 'BEAUTY', 'ITS', 'ATTIRE', 'HOWEVER', 'SIMPLE', 'ALWAYS', 'IMPRESSED', 'THE', 'BEHOLDER', 'AS', 'IF', 'IT', 'WERE', 'THE', 'VERY', 'GARB', 'THAT', 'PRECISELY', 'BECAME', 'IT', 'BEST'] +1221-135766-0003-1308: hyp=['THE', 'CHILD', 'HAD', 'A', 'NATIVE', 'GRACE', 'WHICH', 'DOES', 'NOT', 'INVARIABLY', 'COEXIST', 'WITH', 'FAULTLESS', 'BEAUTY', 'ITS', 'ATTIRE', 'HOWEVER', 'SIMPLE', 'ALWAYS', 'IMPRESSED', 'THE', 'BEHOLDER', 'AS', 'IF', 'IT', 'WERE', 'THE', 'VERY', 'GARB', 'THAT', 'PRECISELY', 'BECAME', 'IT', 'BEST'] +1221-135766-0004-1309: ref=['THIS', 'OUTWARD', 'MUTABILITY', 'INDICATED', 'AND', 'DID', 'NOT', 'MORE', 'THAN', 'FAIRLY', 'EXPRESS', 'THE', 'VARIOUS', 'PROPERTIES', 'OF', 'HER', 'INNER', 'LIFE'] +1221-135766-0004-1309: hyp=['THIS', 'OUTWARD', 'MUTABILITY', 'INDICATED', 'AND', 'DID', 'NOT', 'MORE', 'THAN', 'FAIRLY', 'EXPRESS', 'THE', 'VARIOUS', 'PROPERTIES', 'OF', 'HER', 'INNER', 'LIFE'] +1221-135766-0005-1310: ref=['HESTER', 'COULD', 'ONLY', 'ACCOUNT', 'FOR', 'THE', "CHILD'S", 'CHARACTER', 'AND', 'EVEN', 'THEN', 'MOST', 'VAGUELY', 'AND', 'IMPERFECTLY', 'BY', 'RECALLING', 'WHAT', 'SHE', 'HERSELF', 'HAD', 'BEEN', 'DURING', 'THAT', 'MOMENTOUS', 'PERIOD', 'WHILE', 'PEARL', 'WAS', 'IMBIBING', 'HER', 'SOUL', 'FROM', 'THE', 'SPIRITUAL', 'WORLD', 'AND', 'HER', 'BODILY', 'FRAME', 'FROM', 'ITS', 'MATERIAL', 'OF', 'EARTH'] +1221-135766-0005-1310: hyp=['HESTER', 'COULD', 'ONLY', 'ACCOUNT', 'FOR', 'THE', "CHILD'S", 'CHARACTER', 'AND', 'EVEN', 'THEN', 'MOST', 'VAGUELY', 'AND', 'IMPERFECTLY', 'BY', 'RECALLING', 'WHAT', 'SHE', 'HERSELF', 'HAD', 'BEEN', 'DURING', 'THAT', 'MOMENTOUS', 'PERIOD', 'WHILE', 'PEARL', 'WAS', 'IMBIBING', 'HER', 'SOUL', 'FROM', 'THE', 'SPIRITUAL', 'WORLD', 'AND', 'HER', 'BODILY', 'FRAME', 'FROM', 'ITS', 'MATERIAL', 'OF', 'EARTH'] +1221-135766-0006-1311: ref=['THEY', 'WERE', 'NOW', 'ILLUMINATED', 'BY', 'THE', 'MORNING', 'RADIANCE', 'OF', 'A', 'YOUNG', "CHILD'S", 'DISPOSITION', 'BUT', 'LATER', 'IN', 'THE', 'DAY', 'OF', 'EARTHLY', 'EXISTENCE', 'MIGHT', 'BE', 'PROLIFIC', 'OF', 'THE', 'STORM', 'AND', 'WHIRLWIND'] +1221-135766-0006-1311: hyp=['THEY', 'WERE', 'NOW', 'ILLUMINATED', 'BY', 'THE', 'MORNING', 'RADIANCE', 'OF', 'A', 'YOUNG', "CHILD'S", 'DISPOSITION', 'BUT', 'LATER', 'IN', 'THE', 'DAY', 'OF', 'EARTHLY', 'EXISTENCE', 'MIGHT', 'BE', 'PROLIFIC', 'OF', 'THE', 'STORM', 'AND', 'WHIRLWIND'] +1221-135766-0007-1312: ref=['HESTER', 'PRYNNE', 'NEVERTHELESS', 'THE', 'LOVING', 'MOTHER', 'OF', 'THIS', 'ONE', 'CHILD', 'RAN', 'LITTLE', 'RISK', 'OF', 'ERRING', 'ON', 'THE', 'SIDE', 'OF', 'UNDUE', 'SEVERITY'] +1221-135766-0007-1312: hyp=['HESTER', 'PRYNNE', 'NEVERTHELESS', 'THE', 'LOVING', 'MOTHER', 'OF', 'THIS', 'ONE', 'CHILD', 'RAN', 'LITTLE', 'RISK', 'OF', 'ERRING', 'ON', 'THE', 'SIDE', 'OF', 'UNDUE', 'SEVERITY'] +1221-135766-0008-1313: ref=['MINDFUL', 'HOWEVER', 'OF', 'HER', 'OWN', 'ERRORS', 'AND', 'MISFORTUNES', 'SHE', 'EARLY', 'SOUGHT', 'TO', 'IMPOSE', 'A', 'TENDER', 'BUT', 'STRICT', 'CONTROL', 'OVER', 'THE', 'INFANT', 'IMMORTALITY', 'THAT', 'WAS', 'COMMITTED', 'TO', 'HER', 'CHARGE'] +1221-135766-0008-1313: hyp=['MINDFUL', 'HOWEVER', 'OF', 'HER', 'OWN', 'ERRORS', 'AND', 'MISFORTUNES', 'SHE', 'EARLY', 'SOUGHT', 'TO', 'IMPOSE', 'A', 'TENDER', 'BUT', 'STRICT', 'CONTROL', 'OVER', 'THE', 'INFANT', 'IMMORTALITY', 'THAT', 'WAS', 'COMMITTED', 'TO', 'HER', 'CHARGE'] +1221-135766-0009-1314: ref=['AS', 'TO', 'ANY', 'OTHER', 'KIND', 'OF', 'DISCIPLINE', 'WHETHER', 'ADDRESSED', 'TO', 'HER', 'MIND', 'OR', 'HEART', 'LITTLE', 'PEARL', 'MIGHT', 'OR', 'MIGHT', 'NOT', 'BE', 'WITHIN', 'ITS', 'REACH', 'IN', 'ACCORDANCE', 'WITH', 'THE', 'CAPRICE', 'THAT', 'RULED', 'THE', 'MOMENT'] +1221-135766-0009-1314: hyp=['AS', 'TO', 'ANY', 'OTHER', 'KIND', 'OF', 'DISCIPLINE', 'WHETHER', 'ADDRESSED', 'TO', 'HER', 'MIND', 'OR', 'HEART', 'LITTLE', 'PEARL', 'MIGHT', 'OR', 'MIGHT', 'NOT', 'BE', 'WITHIN', 'ITS', 'REACH', 'IN', 'ACCORDANCE', 'WITH', 'THE', 'CAPRICE', 'THAT', 'ROLLED', 'THE', 'MOMENT'] +1221-135766-0010-1315: ref=['IT', 'WAS', 'A', 'LOOK', 'SO', 'INTELLIGENT', 'YET', 'INEXPLICABLE', 'PERVERSE', 'SOMETIMES', 'SO', 'MALICIOUS', 'BUT', 'GENERALLY', 'ACCOMPANIED', 'BY', 'A', 'WILD', 'FLOW', 'OF', 'SPIRITS', 'THAT', 'HESTER', 'COULD', 'NOT', 'HELP', 'QUESTIONING', 'AT', 'SUCH', 'MOMENTS', 'WHETHER', 'PEARL', 'WAS', 'A', 'HUMAN', 'CHILD'] +1221-135766-0010-1315: hyp=['IT', 'WAS', 'A', 'LOOK', 'SO', 'INTELLIGENT', 'YET', 'INEXPLICABLE', 'PERVERSE', 'SOMETIMES', 'SO', 'MALICIOUS', 'BUT', 'GENERALLY', 'ACCOMPANIED', 'BY', 'A', 'WILD', 'FLOW', 'OF', 'SPIRITS', 'THAT', 'HESTER', 'COULD', 'NOT', 'HELP', 'QUESTIONING', 'AT', 'SUCH', 'MOMENTS', 'WHETHER', 'PEARL', 'WAS', 'A', 'HUMAN', 'CHILD'] +1221-135766-0011-1316: ref=['BEHOLDING', 'IT', 'HESTER', 'WAS', 'CONSTRAINED', 'TO', 'RUSH', 'TOWARDS', 'THE', 'CHILD', 'TO', 'PURSUE', 'THE', 'LITTLE', 'ELF', 'IN', 'THE', 'FLIGHT', 'WHICH', 'SHE', 'INVARIABLY', 'BEGAN', 'TO', 'SNATCH', 'HER', 'TO', 'HER', 'BOSOM', 'WITH', 'A', 'CLOSE', 'PRESSURE', 'AND', 'EARNEST', 'KISSES', 'NOT', 'SO', 'MUCH', 'FROM', 'OVERFLOWING', 'LOVE', 'AS', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'PEARL', 'WAS', 'FLESH', 'AND', 'BLOOD', 'AND', 'NOT', 'UTTERLY', 'DELUSIVE'] +1221-135766-0011-1316: hyp=['BEHOLDING', 'IT', 'HESTER', 'WAS', 'CONSTRAINED', 'TO', 'RUSH', 'TOWARDS', 'THE', 'CHILD', 'TO', 'PURSUE', 'THE', 'LITTLE', 'ELF', 'IN', 'THE', 'FLIGHT', 'WHICH', 'SHE', 'INVARIABLY', 'BEGAN', 'TO', 'SNATCH', 'HER', 'TO', 'HER', 'BOSOM', 'WITH', 'A', 'CLOSE', 'PRESSURE', 'AND', 'EARNEST', 'KISSES', 'NOT', 'SO', 'MUCH', 'FROM', 'OVERFLOWING', 'LOVE', 'AS', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'PEARL', 'WAS', 'FLESH', 'AND', 'BLOOD', 'AND', 'NOT', 'UTTERLY', 'DELUSIVE'] +1221-135766-0012-1317: ref=['BROODING', 'OVER', 'ALL', 'THESE', 'MATTERS', 'THE', 'MOTHER', 'FELT', 'LIKE', 'ONE', 'WHO', 'HAS', 'EVOKED', 'A', 'SPIRIT', 'BUT', 'BY', 'SOME', 'IRREGULARITY', 'IN', 'THE', 'PROCESS', 'OF', 'CONJURATION', 'HAS', 'FAILED', 'TO', 'WIN', 'THE', 'MASTER', 'WORD', 'THAT', 'SHOULD', 'CONTROL', 'THIS', 'NEW', 'AND', 'INCOMPREHENSIBLE', 'INTELLIGENCE'] +1221-135766-0012-1317: hyp=['BROODING', 'OVER', 'ALL', 'THESE', 'MATTERS', 'THE', 'MOTHER', 'FELT', 'LIKE', 'ONE', 'WHO', 'HAS', 'EVOKED', 'A', 'SPIRIT', 'BUT', 'BY', 'SOME', 'IRREGULARITY', 'IN', 'THE', 'PROCESS', 'OF', 'CONJURATION', 'HAS', 'FAILED', 'TO', 'WIN', 'THE', 'MASTER', 'WORD', 'THAT', 'SHOULD', 'CONTROL', 'THIS', 'NEW', 'AND', 'INCOMPREHENSIBLE', 'INTELLIGENCE'] +1221-135766-0013-1318: ref=['PEARL', 'WAS', 'A', 'BORN', 'OUTCAST', 'OF', 'THE', 'INFANTILE', 'WORLD'] +1221-135766-0013-1318: hyp=['PEARL', 'WAS', 'A', 'BORN', 'OUTCAST', 'OF', 'THE', 'INVENTILE', 'WORLD'] +1221-135766-0014-1319: ref=['PEARL', 'SAW', 'AND', 'GAZED', 'INTENTLY', 'BUT', 'NEVER', 'SOUGHT', 'TO', 'MAKE', 'ACQUAINTANCE'] +1221-135766-0014-1319: hyp=['PEARL', 'SAW', 'AND', 'GAZED', 'INTENTLY', 'BUT', 'NEVER', 'SOUGHT', 'TO', 'MAKE', 'ACQUAINTANCE'] +1221-135766-0015-1320: ref=['IF', 'SPOKEN', 'TO', 'SHE', 'WOULD', 'NOT', 'SPEAK', 'AGAIN'] +1221-135766-0015-1320: hyp=['IF', 'SPOKEN', 'TO', 'SHE', 'WOULD', 'NOT', 'SPEAK', 'AGAIN'] +1221-135767-0000-1280: ref=['HESTER', 'PRYNNE', 'WENT', 'ONE', 'DAY', 'TO', 'THE', 'MANSION', 'OF', 'GOVERNOR', 'BELLINGHAM', 'WITH', 'A', 'PAIR', 'OF', 'GLOVES', 'WHICH', 'SHE', 'HAD', 'FRINGED', 'AND', 'EMBROIDERED', 'TO', 'HIS', 'ORDER', 'AND', 'WHICH', 'WERE', 'TO', 'BE', 'WORN', 'ON', 'SOME', 'GREAT', 'OCCASION', 'OF', 'STATE', 'FOR', 'THOUGH', 'THE', 'CHANCES', 'OF', 'A', 'POPULAR', 'ELECTION', 'HAD', 'CAUSED', 'THIS', 'FORMER', 'RULER', 'TO', 'DESCEND', 'A', 'STEP', 'OR', 'TWO', 'FROM', 'THE', 'HIGHEST', 'RANK', 'HE', 'STILL', 'HELD', 'AN', 'HONOURABLE', 'AND', 'INFLUENTIAL', 'PLACE', 'AMONG', 'THE', 'COLONIAL', 'MAGISTRACY'] +1221-135767-0000-1280: hyp=['HESTER', 'PRYNNE', 'WENT', 'ONE', 'DAY', 'TO', 'THE', 'MANSION', 'OF', 'GOVERNOR', 'BELLINGHAM', 'WITH', 'A', 'PAIR', 'OF', 'GLOVES', 'WHICH', 'SHE', 'HAD', 'FRINGED', 'AND', 'EMBROIDERED', 'TO', 'HIS', 'ORDER', 'AND', 'WHICH', 'WERE', 'TO', 'BE', 'WORN', 'ON', 'SOME', 'GREAT', 'OCCASION', 'OF', 'STATE', 'FOR', 'THOUGH', 'THE', 'CHANCES', 'OF', 'A', 'POPULAR', 'ELECTION', 'HAD', 'CAUSED', 'THIS', 'FORMER', 'RULER', 'TO', 'DESCEND', 'A', 'STEP', 'OR', 'TWO', 'FROM', 'THE', 'HIGHEST', 'RANK', 'HE', 'STILL', 'HELD', 'AN', 'HONORABLE', 'AND', 'INFLUENTIAL', 'PLACE', 'AMONG', 'THE', 'COLONIAL', 'MAGISTRACY'] +1221-135767-0001-1281: ref=['ANOTHER', 'AND', 'FAR', 'MORE', 'IMPORTANT', 'REASON', 'THAN', 'THE', 'DELIVERY', 'OF', 'A', 'PAIR', 'OF', 'EMBROIDERED', 'GLOVES', 'IMPELLED', 'HESTER', 'AT', 'THIS', 'TIME', 'TO', 'SEEK', 'AN', 'INTERVIEW', 'WITH', 'A', 'PERSONAGE', 'OF', 'SO', 'MUCH', 'POWER', 'AND', 'ACTIVITY', 'IN', 'THE', 'AFFAIRS', 'OF', 'THE', 'SETTLEMENT'] +1221-135767-0001-1281: hyp=['ANOTHER', 'AND', 'FAR', 'MORE', 'IMPORTANT', 'REASON', 'THAN', 'THE', 'DELIVERY', 'OF', 'A', 'PAIR', 'OF', 'EMBROIDERED', 'GLOVES', 'IMPELLED', 'HESTER', 'AT', 'THIS', 'TIME', 'TO', 'SEEK', 'AN', 'INTERVIEW', 'WITH', 'A', 'PERSONAGE', 'OF', 'SO', 'MUCH', 'POWER', 'AND', 'ACTIVITY', 'IN', 'THE', 'AFFAIRS', 'OF', 'THE', 'SETTLEMENT'] +1221-135767-0002-1282: ref=['AT', 'THAT', 'EPOCH', 'OF', 'PRISTINE', 'SIMPLICITY', 'HOWEVER', 'MATTERS', 'OF', 'EVEN', 'SLIGHTER', 'PUBLIC', 'INTEREST', 'AND', 'OF', 'FAR', 'LESS', 'INTRINSIC', 'WEIGHT', 'THAN', 'THE', 'WELFARE', 'OF', 'HESTER', 'AND', 'HER', 'CHILD', 'WERE', 'STRANGELY', 'MIXED', 'UP', 'WITH', 'THE', 'DELIBERATIONS', 'OF', 'LEGISLATORS', 'AND', 'ACTS', 'OF', 'STATE'] +1221-135767-0002-1282: hyp=['AT', 'THAT', 'EPOCH', 'OF', 'PRISTINE', 'SIMPLICITY', 'HOWEVER', 'MATTERS', 'OF', 'EVEN', 'SLIGHTER', 'PUBLIC', 'INTEREST', 'AND', 'OF', 'FAR', 'LESS', 'INTRINSIC', 'WEIGHT', 'THAN', 'THE', 'WELFARE', 'OF', 'HESTER', 'AND', 'HER', 'CHILD', 'WERE', 'STRANGELY', 'MIXED', 'UP', 'WITH', 'THE', 'DELIBERATIONS', 'OF', 'LEGISLATORS', 'AND', 'ACTS', 'OF', 'STATE'] +1221-135767-0003-1283: ref=['THE', 'PERIOD', 'WAS', 'HARDLY', 'IF', 'AT', 'ALL', 'EARLIER', 'THAN', 'THAT', 'OF', 'OUR', 'STORY', 'WHEN', 'A', 'DISPUTE', 'CONCERNING', 'THE', 'RIGHT', 'OF', 'PROPERTY', 'IN', 'A', 'PIG', 'NOT', 'ONLY', 'CAUSED', 'A', 'FIERCE', 'AND', 'BITTER', 'CONTEST', 'IN', 'THE', 'LEGISLATIVE', 'BODY', 'OF', 'THE', 'COLONY', 'BUT', 'RESULTED', 'IN', 'AN', 'IMPORTANT', 'MODIFICATION', 'OF', 'THE', 'FRAMEWORK', 'ITSELF', 'OF', 'THE', 'LEGISLATURE'] +1221-135767-0003-1283: hyp=['THE', 'PERIOD', 'WAS', 'HARDLY', 'IF', 'AT', 'ALL', 'EARLIER', 'THAN', 'THAT', 'OF', 'OUR', 'STORY', 'WHEN', 'A', 'DISPUTE', 'CONCERNING', 'THE', 'RIGHT', 'OF', 'PROPERTY', 'IN', 'A', 'PIG', 'NOT', 'ONLY', 'CAUSED', 'A', 'FIERCE', 'AND', 'BITTER', 'CONTEST', 'IN', 'THE', 'LEGISLATIVE', 'BODY', 'OF', 'THE', 'COLONY', 'BUT', 'RESULTED', 'IN', 'AN', 'IMPORTANT', 'MODIFICATION', 'OF', 'THE', 'FRAMEWORK', 'ITSELF', 'OF', 'THE', 'LEGISLATURE'] +1221-135767-0004-1284: ref=['WE', 'HAVE', 'SPOKEN', 'OF', "PEARL'S", 'RICH', 'AND', 'LUXURIANT', 'BEAUTY', 'A', 'BEAUTY', 'THAT', 'SHONE', 'WITH', 'DEEP', 'AND', 'VIVID', 'TINTS', 'A', 'BRIGHT', 'COMPLEXION', 'EYES', 'POSSESSING', 'INTENSITY', 'BOTH', 'OF', 'DEPTH', 'AND', 'GLOW', 'AND', 'HAIR', 'ALREADY', 'OF', 'A', 'DEEP', 'GLOSSY', 'BROWN', 'AND', 'WHICH', 'IN', 'AFTER', 'YEARS', 'WOULD', 'BE', 'NEARLY', 'AKIN', 'TO', 'BLACK'] +1221-135767-0004-1284: hyp=['WE', 'HAVE', 'SPOKEN', 'OF', "PEARL'S", 'RICH', 'AND', 'LUXURIANT', 'BEAUTY', 'A', 'BEAUTY', 'THAT', 'SHONE', 'WITH', 'DEEP', 'AND', 'VIVID', 'TINTS', 'A', 'BRIGHT', 'COMPLEXION', 'EYES', 'POSSESSING', 'INTENSITY', 'BOTH', 'OF', 'DEPTH', 'AND', 'GLOW', 'AND', 'HAIR', 'ALREADY', 'OF', 'A', 'DEEP', 'GLOSSY', 'BROWN', 'AND', 'WHICH', 'IN', 'AFTER', 'YEARS', 'WOULD', 'BE', 'NEARLY', 'AKIN', 'TO', 'BLACK'] +1221-135767-0005-1285: ref=['IT', 'WAS', 'THE', 'SCARLET', 'LETTER', 'IN', 'ANOTHER', 'FORM', 'THE', 'SCARLET', 'LETTER', 'ENDOWED', 'WITH', 'LIFE'] +1221-135767-0005-1285: hyp=['IT', 'WAS', 'THE', 'SCARLET', 'LETTER', 'IN', 'ANOTHER', 'FORM', 'THE', 'SCARLET', 'LETTER', 'ENDOWED', 'WITH', 'LIFE'] +1221-135767-0006-1286: ref=['THE', 'MOTHER', 'HERSELF', 'AS', 'IF', 'THE', 'RED', 'IGNOMINY', 'WERE', 'SO', 'DEEPLY', 'SCORCHED', 'INTO', 'HER', 'BRAIN', 'THAT', 'ALL', 'HER', 'CONCEPTIONS', 'ASSUMED', 'ITS', 'FORM', 'HAD', 'CAREFULLY', 'WROUGHT', 'OUT', 'THE', 'SIMILITUDE', 'LAVISHING', 'MANY', 'HOURS', 'OF', 'MORBID', 'INGENUITY', 'TO', 'CREATE', 'AN', 'ANALOGY', 'BETWEEN', 'THE', 'OBJECT', 'OF', 'HER', 'AFFECTION', 'AND', 'THE', 'EMBLEM', 'OF', 'HER', 'GUILT', 'AND', 'TORTURE'] +1221-135767-0006-1286: hyp=['THE', 'MOTHER', 'HERSELF', 'AS', 'IF', 'THE', 'RED', 'IGNOMINY', 'WERE', 'SO', 'DEEPLY', 'SCORCHED', 'INTO', 'HER', 'BRAIN', 'THAT', 'ALL', 'HER', 'CONCEPTIONS', 'ASSUMED', 'ITS', 'FORM', 'HAD', 'CAREFULLY', 'WROUGHT', 'OUT', 'THE', 'SIMILITUDE', 'LAVISHING', 'MANY', 'HOURS', 'OF', 'MORBID', 'INGENUITY', 'TO', 'CREATE', 'AN', 'ANALOGY', 'BETWEEN', 'THE', 'OBJECT', 'OF', 'HER', 'AFFECTION', 'AND', 'THE', 'EMBLEM', 'OF', 'HER', 'GUILT', 'AND', 'TORTURE'] +1221-135767-0007-1287: ref=['BUT', 'IN', 'TRUTH', 'PEARL', 'WAS', 'THE', 'ONE', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'AND', 'ONLY', 'IN', 'CONSEQUENCE', 'OF', 'THAT', 'IDENTITY', 'HAD', 'HESTER', 'CONTRIVED', 'SO', 'PERFECTLY', 'TO', 'REPRESENT', 'THE', 'SCARLET', 'LETTER', 'IN', 'HER', 'APPEARANCE'] +1221-135767-0007-1287: hyp=['BUT', 'IN', 'TRUTH', 'PEARL', 'WAS', 'THE', 'ONE', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'AND', 'ONLY', 'IN', 'CONSEQUENCE', 'OF', 'THAT', 'IDENTITY', 'HAD', 'HESTER', 'CONTRIVED', 'SO', 'PERFECTLY', 'TO', 'REPRESENT', 'THE', 'SCARLET', 'LETTER', 'IN', 'HER', 'APPEARANCE'] +1221-135767-0008-1288: ref=['COME', 'THEREFORE', 'AND', 'LET', 'US', 'FLING', 'MUD', 'AT', 'THEM'] +1221-135767-0008-1288: hyp=['COME', 'THEREFORE', 'AND', 'LET', 'US', 'FLING', 'MUD', 'AT', 'THEM'] +1221-135767-0009-1289: ref=['BUT', 'PEARL', 'WHO', 'WAS', 'A', 'DAUNTLESS', 'CHILD', 'AFTER', 'FROWNING', 'STAMPING', 'HER', 'FOOT', 'AND', 'SHAKING', 'HER', 'LITTLE', 'HAND', 'WITH', 'A', 'VARIETY', 'OF', 'THREATENING', 'GESTURES', 'SUDDENLY', 'MADE', 'A', 'RUSH', 'AT', 'THE', 'KNOT', 'OF', 'HER', 'ENEMIES', 'AND', 'PUT', 'THEM', 'ALL', 'TO', 'FLIGHT'] +1221-135767-0009-1289: hyp=['BUT', 'PEARL', 'WHO', 'WAS', 'A', 'DAUNTLESS', 'CHILD', 'AFTER', 'FROWNING', 'STAMPING', 'HER', 'FOOT', 'AND', 'SHAKING', 'HER', 'LITTLE', 'HAND', 'WITH', 'A', 'VARIETY', 'OF', 'THREATENING', 'GESTURES', 'SUDDENLY', 'MADE', 'A', 'RUSH', 'AT', 'THE', 'KNOT', 'OF', 'HER', 'ENEMIES', 'AND', 'PUT', 'THEM', 'ALL', 'TO', 'FLIGHT'] +1221-135767-0010-1290: ref=['SHE', 'SCREAMED', 'AND', 'SHOUTED', 'TOO', 'WITH', 'A', 'TERRIFIC', 'VOLUME', 'OF', 'SOUND', 'WHICH', 'DOUBTLESS', 'CAUSED', 'THE', 'HEARTS', 'OF', 'THE', 'FUGITIVES', 'TO', 'QUAKE', 'WITHIN', 'THEM'] +1221-135767-0010-1290: hyp=['SHE', 'SCREAMED', 'AND', 'SHOUTED', 'TOO', 'WITH', 'A', 'TERRIFIC', 'VOLUME', 'OF', 'SOUND', 'WHICH', 'DOUBTLESS', 'CAUSED', 'THE', 'HEARTS', 'OF', 'THE', 'FUGITIVES', 'TO', 'QUAKE', 'WITHIN', 'THEM'] +1221-135767-0011-1291: ref=['IT', 'WAS', 'FURTHER', 'DECORATED', 'WITH', 'STRANGE', 'AND', 'SEEMINGLY', 'CABALISTIC', 'FIGURES', 'AND', 'DIAGRAMS', 'SUITABLE', 'TO', 'THE', 'QUAINT', 'TASTE', 'OF', 'THE', 'AGE', 'WHICH', 'HAD', 'BEEN', 'DRAWN', 'IN', 'THE', 'STUCCO', 'WHEN', 'NEWLY', 'LAID', 'ON', 'AND', 'HAD', 'NOW', 'GROWN', 'HARD', 'AND', 'DURABLE', 'FOR', 'THE', 'ADMIRATION', 'OF', 'AFTER', 'TIMES'] +1221-135767-0011-1291: hyp=['IT', 'WAS', 'FURTHER', 'DECORATED', 'WITH', 'STRANGE', 'AND', 'SEEMINGLY', 'CABALISTIC', 'FIGURES', 'AND', 'DIAGRAMS', 'SUITABLE', 'TO', 'THE', 'QUAINT', 'TASTE', 'OF', 'THE', 'AGE', 'WHICH', 'HAD', 'BEEN', 'DRAWN', 'IN', 'THE', 'STUCCO', 'WHEN', 'NEWLY', 'LAID', 'ON', 'AND', 'HAD', 'NOW', 'GROWN', 'HARD', 'AND', 'DURABLE', 'FOR', 'THE', 'ADMIRATION', 'OF', 'AFTER', 'TIMES'] +1221-135767-0012-1292: ref=['THEY', 'APPROACHED', 'THE', 'DOOR', 'WHICH', 'WAS', 'OF', 'AN', 'ARCHED', 'FORM', 'AND', 'FLANKED', 'ON', 'EACH', 'SIDE', 'BY', 'A', 'NARROW', 'TOWER', 'OR', 'PROJECTION', 'OF', 'THE', 'EDIFICE', 'IN', 'BOTH', 'OF', 'WHICH', 'WERE', 'LATTICE', 'WINDOWS', 'THE', 'WOODEN', 'SHUTTERS', 'TO', 'CLOSE', 'OVER', 'THEM', 'AT', 'NEED'] +1221-135767-0012-1292: hyp=['THEY', 'APPROACHED', 'THE', 'DOOR', 'WHICH', 'WAS', 'OF', 'AN', 'ARCHED', 'FORM', 'AND', 'FLANKED', 'ON', 'EACH', 'SIDE', 'BY', 'A', 'NARROW', 'TOWER', 'OR', 'PROJECTION', 'OF', 'THE', 'EDIFICE', 'IN', 'BOTH', 'OF', 'WHICH', 'WERE', 'LATTICE', 'WINDOWS', 'THE', 'WOODEN', 'SHUTTERS', 'TO', 'CLOSE', 'OVER', 'THEM', 'AT', 'NEED'] +1221-135767-0013-1293: ref=['LIFTING', 'THE', 'IRON', 'HAMMER', 'THAT', 'HUNG', 'AT', 'THE', 'PORTAL', 'HESTER', 'PRYNNE', 'GAVE', 'A', 'SUMMONS', 'WHICH', 'WAS', 'ANSWERED', 'BY', 'ONE', 'OF', 'THE', "GOVERNOR'S", 'BOND', 'SERVANT', 'A', 'FREE', 'BORN', 'ENGLISHMAN', 'BUT', 'NOW', 'A', 'SEVEN', 'YEARS', 'SLAVE'] +1221-135767-0013-1293: hyp=['LIFTING', 'THE', 'IRON', 'HAMMER', 'THAT', 'HUNG', 'AT', 'THE', 'PORTAL', 'HESTER', 'PRYNNE', 'GAVE', 'A', 'SUMMONS', 'WHICH', 'WAS', 'ANSWERED', 'BY', 'ONE', 'OF', 'THE', "GOVERNOR'S", 'BOND', 'SERVANTS', 'A', 'FREE', 'BORN', 'ENGLISHMAN', 'BUT', 'NOW', 'A', 'SEVEN', 'YEARS', 'SLAVE'] +1221-135767-0014-1294: ref=['YEA', 'HIS', 'HONOURABLE', 'WORSHIP', 'IS', 'WITHIN', 'BUT', 'HE', 'HATH', 'A', 'GODLY', 'MINISTER', 'OR', 'TWO', 'WITH', 'HIM', 'AND', 'LIKEWISE', 'A', 'LEECH'] +1221-135767-0014-1294: hyp=['YEA', 'HIS', 'HONORABLE', 'WORSHIP', 'IS', 'WITHIN', 'BUT', 'HE', 'HATH', 'A', 'GODLY', 'MINISTER', 'OR', 'TWO', 'WITH', 'HIM', 'AND', 'LIKEWISE', 'A', 'LEECH'] +1221-135767-0015-1295: ref=['YE', 'MAY', 'NOT', 'SEE', 'HIS', 'WORSHIP', 'NOW'] +1221-135767-0015-1295: hyp=['YE', 'MAY', 'NOT', 'SEE', 'HIS', 'WORSHIP', 'NOW'] +1221-135767-0016-1296: ref=['WITH', 'MANY', 'VARIATIONS', 'SUGGESTED', 'BY', 'THE', 'NATURE', 'OF', 'HIS', 'BUILDING', 'MATERIALS', 'DIVERSITY', 'OF', 'CLIMATE', 'AND', 'A', 'DIFFERENT', 'MODE', 'OF', 'SOCIAL', 'LIFE', 'GOVERNOR', 'BELLINGHAM', 'HAD', 'PLANNED', 'HIS', 'NEW', 'HABITATION', 'AFTER', 'THE', 'RESIDENCES', 'OF', 'GENTLEMEN', 'OF', 'FAIR', 'ESTATE', 'IN', 'HIS', 'NATIVE', 'LAND'] +1221-135767-0016-1296: hyp=['WITH', 'MANY', 'VARIATIONS', 'SUGGESTED', 'BY', 'THE', 'NATURE', 'OF', 'HIS', 'BUILDING', 'MATERIALS', 'DIVERSITY', 'OF', 'CLIMATE', 'AND', 'A', 'DIFFERENT', 'MODE', 'OF', 'SOCIAL', 'LIFE', 'GOVERNOR', 'BELLINGHAM', 'HAD', 'PLANNED', 'HIS', 'NEW', 'HABITATION', 'AFTER', 'THE', 'RESIDENCES', 'OF', 'GENTLEMEN', 'OF', 'FAIREST', 'STATE', 'IN', 'HIS', 'NATIVE', 'LAND'] +1221-135767-0017-1297: ref=['ON', 'THE', 'TABLE', 'IN', 'TOKEN', 'THAT', 'THE', 'SENTIMENT', 'OF', 'OLD', 'ENGLISH', 'HOSPITALITY', 'HAD', 'NOT', 'BEEN', 'LEFT', 'BEHIND', 'STOOD', 'A', 'LARGE', 'PEWTER', 'TANKARD', 'AT', 'THE', 'BOTTOM', 'OF', 'WHICH', 'HAD', 'HESTER', 'OR', 'PEARL', 'PEEPED', 'INTO', 'IT', 'THEY', 'MIGHT', 'HAVE', 'SEEN', 'THE', 'FROTHY', 'REMNANT', 'OF', 'A', 'RECENT', 'DRAUGHT', 'OF', 'ALE'] +1221-135767-0017-1297: hyp=['ON', 'THE', 'TABLE', 'IN', 'TOKEN', 'THAT', 'THE', 'SENTIMENT', 'OF', 'OLD', 'ENGLISH', 'HOSPITALITY', 'HAD', 'NOT', 'BEEN', 'LEFT', 'BEHIND', 'STOOD', 'A', 'LARGE', 'PEWTER', 'TANKARD', 'AT', 'THE', 'BOTTOM', 'OF', 'WHICH', 'HAD', 'HESTER', 'OR', 'PEARL', 'PEEPED', 'INTO', 'IT', 'THEY', 'MIGHT', 'HAVE', 'SEEN', 'THE', 'FROTHY', 'REMNANT', 'OF', 'A', 'RECENT', 'DRAUGHT', 'OF', 'ALE'] +1221-135767-0018-1298: ref=['LITTLE', 'PEARL', 'WHO', 'WAS', 'AS', 'GREATLY', 'PLEASED', 'WITH', 'THE', 'GLEAMING', 'ARMOUR', 'AS', 'SHE', 'HAD', 'BEEN', 'WITH', 'THE', 'GLITTERING', 'FRONTISPIECE', 'OF', 'THE', 'HOUSE', 'SPENT', 'SOME', 'TIME', 'LOOKING', 'INTO', 'THE', 'POLISHED', 'MIRROR', 'OF', 'THE', 'BREASTPLATE'] +1221-135767-0018-1298: hyp=['LITTLE', 'PEARL', 'WHO', 'WAS', 'AS', 'GREATLY', 'PLEASED', 'WITH', 'THE', 'GLEAMING', 'ARMOR', 'AS', 'SHE', 'HAD', 'BEEN', 'WITH', 'THE', 'GLITTERING', 'FRONTISPIECE', 'OF', 'THE', 'HOUSE', 'SPENT', 'SOME', 'TIME', 'LOOKING', 'INTO', 'THE', 'POLISHED', 'MIRROR', 'OF', 'THE', 'BREASTPLATE'] +1221-135767-0019-1299: ref=['MOTHER', 'CRIED', 'SHE', 'I', 'SEE', 'YOU', 'HERE', 'LOOK', 'LOOK'] +1221-135767-0019-1299: hyp=['MOTHER', 'CRIED', 'SHE', 'I', 'SEE', 'YOU', 'HERE', 'LOOK'] +1221-135767-0020-1300: ref=['IN', 'TRUTH', 'SHE', 'SEEMED', 'ABSOLUTELY', 'HIDDEN', 'BEHIND', 'IT'] +1221-135767-0020-1300: hyp=['IN', 'TRUTH', 'SHE', 'SEEMED', 'ABSOLUTELY', 'HIDDEN', 'BEHIND', 'IT'] +1221-135767-0021-1301: ref=['PEARL', 'ACCORDINGLY', 'RAN', 'TO', 'THE', 'BOW', 'WINDOW', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'HALL', 'AND', 'LOOKED', 'ALONG', 'THE', 'VISTA', 'OF', 'A', 'GARDEN', 'WALK', 'CARPETED', 'WITH', 'CLOSELY', 'SHAVEN', 'GRASS', 'AND', 'BORDERED', 'WITH', 'SOME', 'RUDE', 'AND', 'IMMATURE', 'ATTEMPT', 'AT', 'SHRUBBERY'] +1221-135767-0021-1301: hyp=['PEARL', 'ACCORDINGLY', 'RAN', 'TO', 'THE', 'BOW', 'WINDOW', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'HALL', 'AND', 'LOOKED', 'ALONG', 'THE', 'VISTA', 'OF', 'A', 'GARDEN', 'WALK', 'CARPETED', 'WITH', 'CLOSELY', 'SHAVEN', 'GRASS', 'AND', 'BORDERED', 'WITH', 'SOME', 'RUDE', 'AND', 'IMMATURE', 'ATTEMPT', 'AT', 'SHRUBBERY'] +1221-135767-0022-1302: ref=['BUT', 'THE', 'PROPRIETOR', 'APPEARED', 'ALREADY', 'TO', 'HAVE', 'RELINQUISHED', 'AS', 'HOPELESS', 'THE', 'EFFORT', 'TO', 'PERPETUATE', 'ON', 'THIS', 'SIDE', 'OF', 'THE', 'ATLANTIC', 'IN', 'A', 'HARD', 'SOIL', 'AND', 'AMID', 'THE', 'CLOSE', 'STRUGGLE', 'FOR', 'SUBSISTENCE', 'THE', 'NATIVE', 'ENGLISH', 'TASTE', 'FOR', 'ORNAMENTAL', 'GARDENING'] +1221-135767-0022-1302: hyp=['BUT', 'THE', 'PROPRIETOR', 'APPEARED', 'ALL', 'READY', 'TO', 'HAVE', 'RELINQUISHED', 'AS', 'HOPELESS', 'THE', 'EFFORT', 'TO', 'PERPETUATE', 'ON', 'THIS', 'SIDE', 'OF', 'THE', 'ATLANTIC', 'IN', 'A', 'HARD', 'SOIL', 'AND', 'AMID', 'THE', 'CLOSE', 'STRUGGLE', 'FOR', 'SUBSISTENCE', 'THE', 'NATIVE', 'ENGLISH', 'TASTE', 'FOR', 'ORNAMENTAL', 'GARDENING'] +1221-135767-0023-1303: ref=['THERE', 'WERE', 'A', 'FEW', 'ROSE', 'BUSHES', 'HOWEVER', 'AND', 'A', 'NUMBER', 'OF', 'APPLE', 'TREES', 'PROBABLY', 'THE', 'DESCENDANTS', 'OF', 'THOSE', 'PLANTED', 'BY', 'THE', 'REVEREND', 'MISTER', 'BLACKSTONE', 'THE', 'FIRST', 'SETTLER', 'OF', 'THE', 'PENINSULA', 'THAT', 'HALF', 'MYTHOLOGICAL', 'PERSONAGE', 'WHO', 'RIDES', 'THROUGH', 'OUR', 'EARLY', 'ANNALS', 'SEATED', 'ON', 'THE', 'BACK', 'OF', 'A', 'BULL'] +1221-135767-0023-1303: hyp=['THERE', 'WERE', 'A', 'FEW', 'ROSE', 'BUSHES', 'HOWEVER', 'AND', 'A', 'NUMBER', 'OF', 'APPLE', 'TREES', 'PROBABLY', 'THE', 'DESCENDANTS', 'OF', 'THOSE', 'PLANTED', 'BY', 'THE', 'REVEREND', 'MISTER', 'BLACKSTONE', 'THE', 'FIRST', 'SETTLER', 'OF', 'THE', 'PENINSULA', 'THAT', 'HALF', 'MYTHOLOGICAL', 'PERSONAGE', 'WHO', 'RIDES', 'THROUGH', 'OUR', 'EARLY', 'ANNALS', 'SEATED', 'ON', 'THE', 'BACK', 'OF', 'A', 'BULL'] +1221-135767-0024-1304: ref=['PEARL', 'SEEING', 'THE', 'ROSE', 'BUSHES', 'BEGAN', 'TO', 'CRY', 'FOR', 'A', 'RED', 'ROSE', 'AND', 'WOULD', 'NOT', 'BE', 'PACIFIED'] +1221-135767-0024-1304: hyp=['PEARL', 'SEEING', 'THE', 'ROSE', 'BUSHES', 'BEGAN', 'TO', 'CRY', 'FOR', 'A', 'RED', 'ROSE', 'AND', 'WOULD', 'NOT', 'BE', 'PACIFIED'] +1284-1180-0000-829: ref=['HE', 'WORE', 'BLUE', 'SILK', 'STOCKINGS', 'BLUE', 'KNEE', 'PANTS', 'WITH', 'GOLD', 'BUCKLES', 'A', 'BLUE', 'RUFFLED', 'WAIST', 'AND', 'A', 'JACKET', 'OF', 'BRIGHT', 'BLUE', 'BRAIDED', 'WITH', 'GOLD'] +1284-1180-0000-829: hyp=['HE', 'WORE', 'BLUE', 'SILK', 'STOCKINGS', 'BLUE', 'KNEEP', 'HANDS', 'WITH', 'GOLD', 'BUCKLES', 'A', 'BLUE', 'RUFFLED', 'WAIST', 'AND', 'A', 'JACKET', 'OF', 'BRIGHT', 'BLUE', 'BRAIDED', 'WITH', 'GOLD'] +1284-1180-0001-830: ref=['HIS', 'HAT', 'HAD', 'A', 'PEAKED', 'CROWN', 'AND', 'A', 'FLAT', 'BRIM', 'AND', 'AROUND', 'THE', 'BRIM', 'WAS', 'A', 'ROW', 'OF', 'TINY', 'GOLDEN', 'BELLS', 'THAT', 'TINKLED', 'WHEN', 'HE', 'MOVED'] +1284-1180-0001-830: hyp=['HIS', 'HAT', 'HAD', 'A', 'PEAKED', 'CROWN', 'AND', 'A', 'FLAT', 'BRIM', 'AND', 'AROUND', 'THE', 'BRIM', 'WAS', 'A', 'ROW', 'OF', 'TINY', 'GOLDEN', 'BELLS', 'THAT', 'TINKLED', 'WHEN', 'HE', 'MOVED'] +1284-1180-0002-831: ref=['INSTEAD', 'OF', 'SHOES', 'THE', 'OLD', 'MAN', 'WORE', 'BOOTS', 'WITH', 'TURNOVER', 'TOPS', 'AND', 'HIS', 'BLUE', 'COAT', 'HAD', 'WIDE', 'CUFFS', 'OF', 'GOLD', 'BRAID'] +1284-1180-0002-831: hyp=['INSTEAD', 'OF', 'SHOES', 'THE', 'OLD', 'MAN', 'WORE', 'BOOTS', 'WITH', 'TURN', 'OVER', 'TOPS', 'AND', 'HIS', 'BLUE', 'COAT', 'HAD', 'WIDE', 'CUFFS', 'OF', 'GOLD', 'BRAID'] +1284-1180-0003-832: ref=['FOR', 'A', 'LONG', 'TIME', 'HE', 'HAD', 'WISHED', 'TO', 'EXPLORE', 'THE', 'BEAUTIFUL', 'LAND', 'OF', 'OZ', 'IN', 'WHICH', 'THEY', 'LIVED'] +1284-1180-0003-832: hyp=['FOR', 'A', 'LONG', 'TIME', 'HE', 'HAD', 'WISHED', 'TO', 'EXPLORE', 'THE', 'BEAUTIFUL', 'LAND', 'OF', 'OZ', 'IN', 'WHICH', 'THEY', 'LIVED'] +1284-1180-0004-833: ref=['WHEN', 'THEY', 'WERE', 'OUTSIDE', 'UNC', 'SIMPLY', 'LATCHED', 'THE', 'DOOR', 'AND', 'STARTED', 'UP', 'THE', 'PATH'] +1284-1180-0004-833: hyp=['WHEN', 'THEY', 'WERE', 'OUTSIDE', 'UNC', 'SIMPLY', 'LATCHED', 'THE', 'DOOR', 'AND', 'STARTED', 'UP', 'THE', 'PATH'] +1284-1180-0005-834: ref=['NO', 'ONE', 'WOULD', 'DISTURB', 'THEIR', 'LITTLE', 'HOUSE', 'EVEN', 'IF', 'ANYONE', 'CAME', 'SO', 'FAR', 'INTO', 'THE', 'THICK', 'FOREST', 'WHILE', 'THEY', 'WERE', 'GONE'] +1284-1180-0005-834: hyp=['NO', 'ONE', 'WOULD', 'DISTURB', 'THEIR', 'LITTLE', 'HOUSE', 'EVEN', 'IF', 'ANY', 'ONE', 'CAME', 'SO', 'FAR', 'INTO', 'THE', 'THICK', 'FOREST', 'WHILE', 'THEY', 'WERE', 'GONE'] +1284-1180-0006-835: ref=['AT', 'THE', 'FOOT', 'OF', 'THE', 'MOUNTAIN', 'THAT', 'SEPARATED', 'THE', 'COUNTRY', 'OF', 'THE', 'MUNCHKINS', 'FROM', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'THE', 'PATH', 'DIVIDED'] +1284-1180-0006-835: hyp=['AT', 'THE', 'FOOT', 'OF', 'THE', 'MOUNTAIN', 'THAT', 'SEPARATED', 'THE', 'COUNTRY', 'OF', 'THE', 'MUNCHKINS', 'FROM', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'THE', 'PATH', 'DIVIDED'] +1284-1180-0007-836: ref=['HE', 'KNEW', 'IT', 'WOULD', 'TAKE', 'THEM', 'TO', 'THE', 'HOUSE', 'OF', 'THE', 'CROOKED', 'MAGICIAN', 'WHOM', 'HE', 'HAD', 'NEVER', 'SEEN', 'BUT', 'WHO', 'WAS', 'THEIR', 'NEAREST', 'NEIGHBOR'] +1284-1180-0007-836: hyp=['HE', 'KNEW', 'IT', 'WOULD', 'TAKE', 'THEM', 'TO', 'THE', 'HOUSE', 'OF', 'THE', 'CROOKED', 'MAGICIAN', 'WHOM', 'HE', 'HAD', 'NEVER', 'SEEN', 'BUT', 'WHO', 'WAS', 'THERE', 'NEAREST', 'NEIGHBOUR'] +1284-1180-0008-837: ref=['ALL', 'THE', 'MORNING', 'THEY', 'TRUDGED', 'UP', 'THE', 'MOUNTAIN', 'PATH', 'AND', 'AT', 'NOON', 'UNC', 'AND', 'OJO', 'SAT', 'ON', 'A', 'FALLEN', 'TREE', 'TRUNK', 'AND', 'ATE', 'THE', 'LAST', 'OF', 'THE', 'BREAD', 'WHICH', 'THE', 'OLD', 'MUNCHKIN', 'HAD', 'PLACED', 'IN', 'HIS', 'POCKET'] +1284-1180-0008-837: hyp=['ALL', 'THE', 'MORNING', 'THEY', 'TRUDGED', 'UP', 'THE', 'MOUNTAIN', 'PATH', 'AND', 'AT', 'NOONK', 'AND', 'OJO', 'SAT', 'ON', 'A', 'FALLEN', 'TREE', 'TRUNK', 'AND', 'ATE', 'THE', 'LAST', 'OF', 'THE', 'BREAD', 'WHICH', 'THE', 'OLD', 'MUNCHKIN', 'HAD', 'PLACED', 'IN', 'HIS', 'POCKET'] +1284-1180-0009-838: ref=['THEN', 'THEY', 'STARTED', 'ON', 'AGAIN', 'AND', 'TWO', 'HOURS', 'LATER', 'CAME', 'IN', 'SIGHT', 'OF', 'THE', 'HOUSE', 'OF', 'DOCTOR', 'PIPT'] +1284-1180-0009-838: hyp=['THEN', 'THEY', 'STARTED', 'ON', 'AGAIN', 'AND', 'TWO', 'HOURS', 'LATER', 'CAME', 'IN', 'SIGHT', 'OF', 'THE', 'HOUSE', 'OF', 'DOCTOR', 'PIPT'] +1284-1180-0010-839: ref=['UNC', 'KNOCKED', 'AT', 'THE', 'DOOR', 'OF', 'THE', 'HOUSE', 'AND', 'A', 'CHUBBY', 'PLEASANT', 'FACED', 'WOMAN', 'DRESSED', 'ALL', 'IN', 'BLUE', 'OPENED', 'IT', 'AND', 'GREETED', 'THE', 'VISITORS', 'WITH', 'A', 'SMILE'] +1284-1180-0010-839: hyp=['UNC', 'KNOCKED', 'AT', 'THE', 'DOOR', 'OF', 'THE', 'HOUSE', 'INTO', 'CHUBBY', 'PLEASANT', 'FACED', 'WOMAN', 'DRESSED', 'ALL', 'IN', 'BLUE', 'OPENED', 'IT', 'AND', 'GREETED', 'THE', 'VISITORS', 'WITH', 'A', 'SMILE'] +1284-1180-0011-840: ref=['I', 'AM', 'MY', 'DEAR', 'AND', 'ALL', 'STRANGERS', 'ARE', 'WELCOME', 'TO', 'MY', 'HOME'] +1284-1180-0011-840: hyp=['I', 'AM', 'MY', 'DEAR', 'AND', 'ALL', 'STRANGERS', 'ARE', 'WELCOME', 'TO', 'MY', 'HOME'] +1284-1180-0012-841: ref=['WE', 'HAVE', 'COME', 'FROM', 'A', 'FAR', 'LONELIER', 'PLACE', 'THAN', 'THIS', 'A', 'LONELIER', 'PLACE'] +1284-1180-0012-841: hyp=['WE', 'HAVE', 'COME', 'FROM', 'A', 'FAR', 'LONELIER', 'PLACE', 'THAN', 'THIS', 'A', 'LONELIER', 'PLACE'] +1284-1180-0013-842: ref=['AND', 'YOU', 'MUST', 'BE', 'OJO', 'THE', 'UNLUCKY', 'SHE', 'ADDED'] +1284-1180-0013-842: hyp=['AND', 'YOU', 'MUST', 'BE', 'OJO', 'THE', 'UNLUCKY', 'SHE', 'ADDED'] +1284-1180-0014-843: ref=['OJO', 'HAD', 'NEVER', 'EATEN', 'SUCH', 'A', 'FINE', 'MEAL', 'IN', 'ALL', 'HIS', 'LIFE'] +1284-1180-0014-843: hyp=['OJO', 'HAD', 'NEVER', 'EATEN', 'SUCH', 'A', 'FINE', 'MEAL', 'IN', 'ALL', 'HIS', 'LIFE'] +1284-1180-0015-844: ref=['WE', 'ARE', 'TRAVELING', 'REPLIED', 'OJO', 'AND', 'WE', 'STOPPED', 'AT', 'YOUR', 'HOUSE', 'JUST', 'TO', 'REST', 'AND', 'REFRESH', 'OURSELVES'] +1284-1180-0015-844: hyp=['WE', 'ARE', 'TRAVELLING', 'REPLIED', 'OJO', 'AND', 'WE', 'STOPPED', 'AT', 'YOUR', 'HOUSE', 'JUST', 'TO', 'REST', 'AND', 'REFRESH', 'OURSELVES'] +1284-1180-0016-845: ref=['THE', 'WOMAN', 'SEEMED', 'THOUGHTFUL'] +1284-1180-0016-845: hyp=['THE', 'WOMAN', 'SEEMED', 'THOUGHTFUL'] +1284-1180-0017-846: ref=['AT', 'ONE', 'END', 'STOOD', 'A', 'GREAT', 'FIREPLACE', 'IN', 'WHICH', 'A', 'BLUE', 'LOG', 'WAS', 'BLAZING', 'WITH', 'A', 'BLUE', 'FLAME', 'AND', 'OVER', 'THE', 'FIRE', 'HUNG', 'FOUR', 'KETTLES', 'IN', 'A', 'ROW', 'ALL', 'BUBBLING', 'AND', 'STEAMING', 'AT', 'A', 'GREAT', 'RATE'] +1284-1180-0017-846: hyp=['AT', 'ONE', 'END', 'STOOD', 'A', 'GREAT', 'FIREPLACE', 'IN', 'WHICH', 'A', 'BLUE', 'LOG', 'WAS', 'BLAZING', 'WITH', 'A', 'BLUE', 'FLAME', 'AND', 'OVER', 'THE', 'FIRE', 'HUNG', 'FOUR', 'KETTLES', 'IN', 'A', 'ROW', 'ALL', 'BUBBLING', 'AND', 'STEAMING', 'AT', 'A', 'GREAT', 'RATE'] +1284-1180-0018-847: ref=['IT', 'TAKES', 'ME', 'SEVERAL', 'YEARS', 'TO', 'MAKE', 'THIS', 'MAGIC', 'POWDER', 'BUT', 'AT', 'THIS', 'MOMENT', 'I', 'AM', 'PLEASED', 'TO', 'SAY', 'IT', 'IS', 'NEARLY', 'DONE', 'YOU', 'SEE', 'I', 'AM', 'MAKING', 'IT', 'FOR', 'MY', 'GOOD', 'WIFE', 'MARGOLOTTE', 'WHO', 'WANTS', 'TO', 'USE', 'SOME', 'OF', 'IT', 'FOR', 'A', 'PURPOSE', 'OF', 'HER', 'OWN'] +1284-1180-0018-847: hyp=['IT', 'TAKES', 'ME', 'SEVERAL', 'YEARS', 'TO', 'MAKE', 'THIS', 'MAGIC', 'POWDER', 'BUT', 'AT', 'THIS', 'MOMENT', 'I', 'AM', 'PLEASED', 'TO', 'SAY', 'IT', 'IS', 'NEARLY', 'DONE', 'YOU', 'SEE', 'I', 'AM', 'MAKING', 'IT', 'FOR', 'MY', 'GOOD', 'WIFE', 'MARGOLOTTE', 'WHO', 'WANTS', 'TO', 'USE', 'SOME', 'OF', 'IT', 'FOR', 'A', 'PURPOSE', 'OF', 'HER', 'OWN'] +1284-1180-0019-848: ref=['YOU', 'MUST', 'KNOW', 'SAID', 'MARGOLOTTE', 'WHEN', 'THEY', 'WERE', 'ALL', 'SEATED', 'TOGETHER', 'ON', 'THE', 'BROAD', 'WINDOW', 'SEAT', 'THAT', 'MY', 'HUSBAND', 'FOOLISHLY', 'GAVE', 'AWAY', 'ALL', 'THE', 'POWDER', 'OF', 'LIFE', 'HE', 'FIRST', 'MADE', 'TO', 'OLD', 'MOMBI', 'THE', 'WITCH', 'WHO', 'USED', 'TO', 'LIVE', 'IN', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'TO', 'THE', 'NORTH', 'OF', 'HERE'] +1284-1180-0019-848: hyp=['YOU', 'MUST', 'KNOW', 'SAID', 'MARGOLOTTE', 'WHEN', 'THEY', 'WERE', 'ALL', 'SEATED', 'TOGETHER', 'ON', 'THE', 'BROAD', 'WINDOW', 'SEAT', 'THAT', 'MY', 'HUSBAND', 'FOOLISHLY', 'GAVE', 'AWAY', 'ALL', 'THE', 'POWDER', 'OF', 'LIFE', 'HE', 'FIRST', 'MADE', 'TO', 'OLD', 'MUMBIE', 'THE', 'WITCH', 'WHO', 'USED', 'TO', 'LIVE', 'IN', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'TO', 'THE', 'NORTH', 'OF', 'HERE'] +1284-1180-0020-849: ref=['THE', 'FIRST', 'LOT', 'WE', 'TESTED', 'ON', 'OUR', 'GLASS', 'CAT', 'WHICH', 'NOT', 'ONLY', 'BEGAN', 'TO', 'LIVE', 'BUT', 'HAS', 'LIVED', 'EVER', 'SINCE'] +1284-1180-0020-849: hyp=['THE', 'FIRST', 'LOT', 'WE', 'TESTED', 'ON', 'OUR', 'GLASS', 'HAT', 'WHICH', 'NOT', 'ONLY', 'BEGAN', 'TO', 'LIVE', 'BUT', 'HAS', 'LIVED', 'EVER', 'SINCE'] +1284-1180-0021-850: ref=['I', 'THINK', 'THE', 'NEXT', 'GLASS', 'CAT', 'THE', 'MAGICIAN', 'MAKES', 'WILL', 'HAVE', 'NEITHER', 'BRAINS', 'NOR', 'HEART', 'FOR', 'THEN', 'IT', 'WILL', 'NOT', 'OBJECT', 'TO', 'CATCHING', 'MICE', 'AND', 'MAY', 'PROVE', 'OF', 'SOME', 'USE', 'TO', 'US'] +1284-1180-0021-850: hyp=['I', 'THINK', 'THE', 'NEXT', 'GLASS', 'CAT', 'THE', 'MAGICIAN', 'MAKES', 'WILL', 'HAVE', 'NEITHER', 'BRAINS', 'NOR', 'HEART', 'FOR', 'THEN', 'IT', 'WILL', 'NOT', 'OBJECT', 'TO', 'CATCHING', 'MICE', 'AND', 'THEY', 'PROVE', 'OF', 'SOME', 'USE', 'TO', 'US'] +1284-1180-0022-851: ref=["I'M", 'AFRAID', 'I', "DON'T", 'KNOW', 'MUCH', 'ABOUT', 'THE', 'LAND', 'OF', 'OZ'] +1284-1180-0022-851: hyp=["I'M", 'AFRAID', 'I', "DON'T", 'KNOW', 'MUCH', 'ABOUT', 'THE', 'LAND', 'OF', 'OZ'] +1284-1180-0023-852: ref=['YOU', 'SEE', "I'VE", 'LIVED', 'ALL', 'MY', 'LIFE', 'WITH', 'UNC', 'NUNKIE', 'THE', 'SILENT', 'ONE', 'AND', 'THERE', 'WAS', 'NO', 'ONE', 'TO', 'TELL', 'ME', 'ANYTHING'] +1284-1180-0023-852: hyp=['YOU', 'SEE', 'I', 'HAVE', 'LIVED', 'ALL', 'MY', 'LIFE', 'WITH', 'UNC', 'NUNKIE', 'THE', 'SILENT', 'ONE', 'AND', 'THERE', 'WAS', 'NO', 'ONE', 'TO', 'TELL', 'ME', 'ANYTHING'] +1284-1180-0024-853: ref=['THAT', 'IS', 'ONE', 'REASON', 'YOU', 'ARE', 'OJO', 'THE', 'UNLUCKY', 'SAID', 'THE', 'WOMAN', 'IN', 'A', 'SYMPATHETIC', 'TONE'] +1284-1180-0024-853: hyp=['THAT', 'IS', 'ONE', 'REASON', 'YOU', 'ARE', 'OJO', 'THE', 'UNLUCKY', 'SAID', 'THE', 'WOMAN', 'IN', 'SYMPATHETIC', 'TONE'] +1284-1180-0025-854: ref=['I', 'THINK', 'I', 'MUST', 'SHOW', 'YOU', 'MY', 'PATCHWORK', 'GIRL', 'SAID', 'MARGOLOTTE', 'LAUGHING', 'AT', 'THE', "BOY'S", 'ASTONISHMENT', 'FOR', 'SHE', 'IS', 'RATHER', 'DIFFICULT', 'TO', 'EXPLAIN'] +1284-1180-0025-854: hyp=['I', 'THINK', 'I', 'MUST', 'SHOW', 'YOU', 'MY', 'PATCHWORK', 'GIRL', 'SAID', 'MARGOLOTTE', 'LAUGHING', 'AT', 'THE', "BOY'S", 'ASTONISHMENT', 'FOR', 'SHE', 'IS', 'RATHER', 'DIFFICULT', 'TO', 'EXPLAIN'] +1284-1180-0026-855: ref=['BUT', 'FIRST', 'I', 'WILL', 'TELL', 'YOU', 'THAT', 'FOR', 'MANY', 'YEARS', 'I', 'HAVE', 'LONGED', 'FOR', 'A', 'SERVANT', 'TO', 'HELP', 'ME', 'WITH', 'THE', 'HOUSEWORK', 'AND', 'TO', 'COOK', 'THE', 'MEALS', 'AND', 'WASH', 'THE', 'DISHES'] +1284-1180-0026-855: hyp=['BUT', 'FIRST', 'I', 'WILL', 'TELL', 'YOU', 'THAT', 'FOR', 'MANY', 'YEARS', 'I', 'HAVE', 'LONGED', 'FOR', 'A', 'SERVANT', 'TO', 'HELP', 'ME', 'WITH', 'THE', 'HOUSEWORK', 'AND', 'TO', 'COPE', 'THE', 'MEALS', 'AND', 'WASH', 'THE', 'DISHES'] +1284-1180-0027-856: ref=['YET', 'THAT', 'TASK', 'WAS', 'NOT', 'SO', 'EASY', 'AS', 'YOU', 'MAY', 'SUPPOSE'] +1284-1180-0027-856: hyp=['YET', 'THAT', 'TASK', 'WAS', 'NOT', 'SO', 'EASY', 'AS', 'YOU', 'MAY', 'SUPPOSE'] +1284-1180-0028-857: ref=['A', 'BED', 'QUILT', 'MADE', 'OF', 'PATCHES', 'OF', 'DIFFERENT', 'KINDS', 'AND', 'COLORS', 'OF', 'CLOTH', 'ALL', 'NEATLY', 'SEWED', 'TOGETHER'] +1284-1180-0028-857: hyp=['A', 'BED', 'QUILT', 'MADE', 'OF', 'PATCHES', 'OF', 'DIFFERENT', 'KINDS', 'AND', 'COLLARS', 'OF', 'CLOTH', 'ALL', 'NEATLY', 'SEWED', 'TOGETHER'] +1284-1180-0029-858: ref=['SOMETIMES', 'IT', 'IS', 'CALLED', 'A', 'CRAZY', 'QUILT', 'BECAUSE', 'THE', 'PATCHES', 'AND', 'COLORS', 'ARE', 'SO', 'MIXED', 'UP'] +1284-1180-0029-858: hyp=['SOMETIMES', 'IT', 'IS', 'CALLED', 'A', 'CRAZY', 'QUILT', 'BECAUSE', 'THE', 'PATCHES', 'AND', 'COLORS', 'ARE', 'SO', 'MIXED', 'UP'] +1284-1180-0030-859: ref=['WHEN', 'I', 'FOUND', 'IT', 'I', 'SAID', 'TO', 'MYSELF', 'THAT', 'IT', 'WOULD', 'DO', 'NICELY', 'FOR', 'MY', 'SERVANT', 'GIRL', 'FOR', 'WHEN', 'SHE', 'WAS', 'BROUGHT', 'TO', 'LIFE', 'SHE', 'WOULD', 'NOT', 'BE', 'PROUD', 'NOR', 'HAUGHTY', 'AS', 'THE', 'GLASS', 'CAT', 'IS', 'FOR', 'SUCH', 'A', 'DREADFUL', 'MIXTURE', 'OF', 'COLORS', 'WOULD', 'DISCOURAGE', 'HER', 'FROM', 'TRYING', 'TO', 'BE', 'AS', 'DIGNIFIED', 'AS', 'THE', 'BLUE', 'MUNCHKINS', 'ARE'] +1284-1180-0030-859: hyp=['WHEN', 'I', 'FOUND', 'IT', 'I', 'SAID', 'TO', 'MYSELF', 'THAT', 'IT', 'WOULD', 'DO', 'NICELY', 'FOR', 'MY', 'SERVANT', 'GIRL', 'FOR', 'WHEN', 'SHE', 'WAS', 'BROUGHT', 'TO', 'LIFE', 'SHE', 'WOULD', 'NOT', 'BE', 'PROUD', 'NOR', 'HAUGHTY', 'AS', 'THE', 'GLASS', 'CAT', 'IS', 'FOR', 'SUCH', 'A', 'DREADFUL', 'MIXTURE', 'OF', 'COLOURS', 'WOULD', 'DISCOURAGE', 'HER', 'FROM', 'TRYING', 'TO', 'BE', 'AS', 'DIGNIFIED', 'AS', 'THE', 'BLUE', 'MUNCHKINS', 'ARE'] +1284-1180-0031-860: ref=['AT', 'THE', 'EMERALD', 'CITY', 'WHERE', 'OUR', 'PRINCESS', 'OZMA', 'LIVES', 'GREEN', 'IS', 'THE', 'POPULAR', 'COLOR'] +1284-1180-0031-860: hyp=['AT', 'THE', 'EMERALD', 'CITY', 'WHERE', 'OUR', 'PRINCESS', 'OZMA', 'LIVES', 'GREEN', 'IS', 'THE', 'POPULAR', 'COLOR'] +1284-1180-0032-861: ref=['I', 'WILL', 'SHOW', 'YOU', 'WHAT', 'A', 'GOOD', 'JOB', 'I', 'DID', 'AND', 'SHE', 'WENT', 'TO', 'A', 'TALL', 'CUPBOARD', 'AND', 'THREW', 'OPEN', 'THE', 'DOORS'] +1284-1180-0032-861: hyp=['I', 'WILL', 'SHOW', 'YOU', 'WHAT', 'A', 'GOOD', 'JOB', 'I', 'DID', 'AND', 'SHE', 'WENT', 'TO', 'A', 'TALL', 'CUPBOARD', 'AND', 'THREW', 'OPEN', 'THE', 'DOORS'] +1284-1181-0000-807: ref=['OJO', 'EXAMINED', 'THIS', 'CURIOUS', 'CONTRIVANCE', 'WITH', 'WONDER'] +1284-1181-0000-807: hyp=['OJO', 'EXAMINED', 'THIS', 'CURIOUS', 'CONTRIVANCE', 'WITH', 'WONDER'] +1284-1181-0001-808: ref=['MARGOLOTTE', 'HAD', 'FIRST', 'MADE', 'THE', "GIRL'S", 'FORM', 'FROM', 'THE', 'PATCHWORK', 'QUILT', 'AND', 'THEN', 'SHE', 'HAD', 'DRESSED', 'IT', 'WITH', 'A', 'PATCHWORK', 'SKIRT', 'AND', 'AN', 'APRON', 'WITH', 'POCKETS', 'IN', 'IT', 'USING', 'THE', 'SAME', 'GAY', 'MATERIAL', 'THROUGHOUT'] +1284-1181-0001-808: hyp=['MARGOLOTTE', 'HAD', 'FIRST', 'MADE', 'THE', "GIRL'S", 'FORM', 'FROM', 'THE', 'PATCHWORK', 'QUILT', 'AND', 'THEN', 'SHE', 'HAD', 'DRESSED', 'IT', 'WITH', 'A', 'PATCHWORK', 'SKIRT', 'AND', 'AN', 'APRON', 'WITH', 'POCKETS', 'IN', 'IT', 'USING', 'THE', 'SAME', 'GAY', 'MATERIAL', 'THROUGHOUT'] +1284-1181-0002-809: ref=['THE', 'HEAD', 'OF', 'THE', 'PATCHWORK', 'GIRL', 'WAS', 'THE', 'MOST', 'CURIOUS', 'PART', 'OF', 'HER'] +1284-1181-0002-809: hyp=['THE', 'HEAD', 'OF', 'THE', 'PATCHWORK', 'GIRL', 'WAS', 'THE', 'MOST', 'CURIOUS', 'PART', 'OF', 'HER'] +1284-1181-0003-810: ref=['THE', 'HAIR', 'WAS', 'OF', 'BROWN', 'YARN', 'AND', 'HUNG', 'DOWN', 'ON', 'HER', 'NECK', 'IN', 'SEVERAL', 'NEAT', 'BRAIDS'] +1284-1181-0003-810: hyp=['THE', 'HAIR', 'WAS', 'OF', 'BROWN', 'YARN', 'AND', 'HUNG', 'DOWN', 'ON', 'HER', 'NECK', 'IN', 'SEVERAL', 'NEAT', 'BRAIDS'] +1284-1181-0004-811: ref=['GOLD', 'IS', 'THE', 'MOST', 'COMMON', 'METAL', 'IN', 'THE', 'LAND', 'OF', 'OZ', 'AND', 'IS', 'USED', 'FOR', 'MANY', 'PURPOSES', 'BECAUSE', 'IT', 'IS', 'SOFT', 'AND', 'PLIABLE'] +1284-1181-0004-811: hyp=['GOLD', 'IS', 'THE', 'MOST', 'COMMON', 'METAL', 'IN', 'THE', 'LAND', 'OF', 'OZ', 'AND', 'IS', 'USED', 'FOR', 'MANY', 'PURPOSES', 'BECAUSE', 'IT', 'IS', 'SOFT', 'AND', 'PLIABLE'] +1284-1181-0005-812: ref=['NO', 'I', 'FORGOT', 'ALL', 'ABOUT', 'THE', 'BRAINS', 'EXCLAIMED', 'THE', 'WOMAN'] +1284-1181-0005-812: hyp=['NO', 'I', 'FORGOT', 'ALL', 'ABOUT', 'THE', 'BRAINS', 'EXCLAIMED', 'THE', 'WOMAN'] +1284-1181-0006-813: ref=['WELL', 'THAT', 'MAY', 'BE', 'TRUE', 'AGREED', 'MARGOLOTTE', 'BUT', 'ON', 'THE', 'CONTRARY', 'A', 'SERVANT', 'WITH', 'TOO', 'MUCH', 'BRAINS', 'IS', 'SURE', 'TO', 'BECOME', 'INDEPENDENT', 'AND', 'HIGH', 'AND', 'MIGHTY', 'AND', 'FEEL', 'ABOVE', 'HER', 'WORK'] +1284-1181-0006-813: hyp=['WELL', 'THAT', 'MAY', 'BE', 'TRUE', 'AGREED', 'MARGOLOTTE', 'BUT', 'ON', 'THE', 'CONTRARY', 'A', 'SERVANT', 'WITH', 'TOO', 'MUCH', 'BRAINS', 'IS', 'SURE', 'TO', 'BECOME', 'INDEPENDENT', 'AND', 'HIGH', 'AND', 'MIGHTY', 'AND', 'FEEL', 'ABOVE', 'HER', 'WORK'] +1284-1181-0007-814: ref=['SHE', 'POURED', 'INTO', 'THE', 'DISH', 'A', 'QUANTITY', 'FROM', 'EACH', 'OF', 'THESE', 'BOTTLES'] +1284-1181-0007-814: hyp=['SHE', 'POURED', 'INTO', 'THE', 'DISH', 'A', 'QUANTITY', 'FROM', 'EACH', 'OF', 'THESE', 'BOTTLES'] +1284-1181-0008-815: ref=['I', 'THINK', 'THAT', 'WILL', 'DO', 'SHE', 'CONTINUED', 'FOR', 'THE', 'OTHER', 'QUALITIES', 'ARE', 'NOT', 'NEEDED', 'IN', 'A', 'SERVANT'] +1284-1181-0008-815: hyp=['I', 'THINK', 'THAT', 'WILL', 'DO', 'SHE', 'CONTINUED', 'FOR', 'THE', 'OTHER', 'QUALITIES', 'ARE', 'NOT', 'NEEDED', 'IN', 'A', 'SERVANT'] +1284-1181-0009-816: ref=['SHE', 'RAN', 'TO', 'HER', "HUSBAND'S", 'SIDE', 'AT', 'ONCE', 'AND', 'HELPED', 'HIM', 'LIFT', 'THE', 'FOUR', 'KETTLES', 'FROM', 'THE', 'FIRE'] +1284-1181-0009-816: hyp=['SHE', 'RAN', 'TO', 'HER', "HUSBAND'S", 'SIDE', 'AT', 'ONCE', 'AND', 'HELPED', 'HIM', 'LIFT', 'THE', 'FOUR', 'KETTLES', 'FROM', 'THE', 'FIRE'] +1284-1181-0010-817: ref=['THEIR', 'CONTENTS', 'HAD', 'ALL', 'BOILED', 'AWAY', 'LEAVING', 'IN', 'THE', 'BOTTOM', 'OF', 'EACH', 'KETTLE', 'A', 'FEW', 'GRAINS', 'OF', 'FINE', 'WHITE', 'POWDER'] +1284-1181-0010-817: hyp=['THEIR', 'CONTENTS', 'HAD', 'ALL', 'BOILED', 'AWAY', 'LEAVING', 'IN', 'THE', 'BOTTOM', 'OF', 'EACH', 'KETTLE', 'A', 'FEW', 'GRAINS', 'OF', 'FINE', 'WHITE', 'POWDER'] +1284-1181-0011-818: ref=['VERY', 'CAREFULLY', 'THE', 'MAGICIAN', 'REMOVED', 'THIS', 'POWDER', 'PLACING', 'IT', 'ALL', 'TOGETHER', 'IN', 'A', 'GOLDEN', 'DISH', 'WHERE', 'HE', 'MIXED', 'IT', 'WITH', 'A', 'GOLDEN', 'SPOON'] +1284-1181-0011-818: hyp=['VERY', 'CAREFULLY', 'THE', 'MAGICIAN', 'REMOVED', 'THIS', 'POWDER', 'PLACING', 'IT', 'ALTOGETHER', 'IN', 'A', 'GOLDEN', 'DISH', 'WHERE', 'HE', 'MIXED', 'IT', 'WITH', 'A', 'GOLDEN', 'SPOON'] +1284-1181-0012-819: ref=['NO', 'ONE', 'SAW', 'HIM', 'DO', 'THIS', 'FOR', 'ALL', 'WERE', 'LOOKING', 'AT', 'THE', 'POWDER', 'OF', 'LIFE', 'BUT', 'SOON', 'THE', 'WOMAN', 'REMEMBERED', 'WHAT', 'SHE', 'HAD', 'BEEN', 'DOING', 'AND', 'CAME', 'BACK', 'TO', 'THE', 'CUPBOARD'] +1284-1181-0012-819: hyp=['NO', 'ONE', 'SAW', 'HIM', 'DO', 'THIS', 'FOR', 'ALL', 'WERE', 'LOOKING', 'AT', 'THE', 'POWDER', 'OF', 'LIFE', 'BUT', 'SOON', 'THE', 'WOMAN', 'REMEMBERED', 'WHAT', 'SHE', 'HAD', 'BEEN', 'DOING', 'AND', 'CAME', 'BACK', 'TO', 'THE', 'CUPBOARD'] +1284-1181-0013-820: ref=['OJO', 'BECAME', 'A', 'BIT', 'UNEASY', 'AT', 'THIS', 'FOR', 'HE', 'HAD', 'ALREADY', 'PUT', 'QUITE', 'A', 'LOT', 'OF', 'THE', 'CLEVERNESS', 'POWDER', 'IN', 'THE', 'DISH', 'BUT', 'HE', 'DARED', 'NOT', 'INTERFERE', 'AND', 'SO', 'HE', 'COMFORTED', 'HIMSELF', 'WITH', 'THE', 'THOUGHT', 'THAT', 'ONE', 'CANNOT', 'HAVE', 'TOO', 'MUCH', 'CLEVERNESS'] +1284-1181-0013-820: hyp=['OJO', 'BECAME', 'A', 'BIT', 'UNEASY', 'AT', 'THIS', 'FOR', 'HE', 'HAD', 'ALREADY', 'PUT', 'QUITE', 'A', 'LOT', 'OF', 'THE', 'CLEVERNESS', 'POWDER', 'IN', 'THE', 'DISH', 'BUT', 'HE', 'DARED', 'NOT', 'INTERFERE', 'AND', 'SO', 'HE', 'COMFORTED', 'HIMSELF', 'WITH', 'THE', 'THOUGHT', 'THAT', 'ONE', 'CANNOT', 'HAVE', 'TOO', 'MUCH', 'CLEVERNESS'] +1284-1181-0014-821: ref=['HE', 'SELECTED', 'A', 'SMALL', 'GOLD', 'BOTTLE', 'WITH', 'A', 'PEPPER', 'BOX', 'TOP', 'SO', 'THAT', 'THE', 'POWDER', 'MIGHT', 'BE', 'SPRINKLED', 'ON', 'ANY', 'OBJECT', 'THROUGH', 'THE', 'SMALL', 'HOLES'] +1284-1181-0014-821: hyp=['HE', 'SELECTED', 'A', 'SMALL', 'GOLD', 'BOTTLE', 'WITH', 'A', 'PEPPER', 'BOX', 'TOP', 'SO', 'THAT', 'THE', 'POWDER', 'MIGHT', 'BE', 'SPRINKLED', 'ON', 'ANY', 'OBJECT', 'THROUGH', 'THE', 'SMALL', 'HOLES'] +1284-1181-0015-822: ref=['MOST', 'PEOPLE', 'TALK', 'TOO', 'MUCH', 'SO', 'IT', 'IS', 'A', 'RELIEF', 'TO', 'FIND', 'ONE', 'WHO', 'TALKS', 'TOO', 'LITTLE'] +1284-1181-0015-822: hyp=['MOST', 'PEOPLE', 'TALK', 'TOO', 'MUCH', 'SO', 'IT', 'IS', 'A', 'RELIEF', 'TO', 'FIND', 'ONE', 'WHO', 'TALKS', 'TOO', 'LITTLE'] +1284-1181-0016-823: ref=['I', 'AM', 'NOT', 'ALLOWED', 'TO', 'PERFORM', 'MAGIC', 'EXCEPT', 'FOR', 'MY', 'OWN', 'AMUSEMENT', 'HE', 'TOLD', 'HIS', 'VISITORS', 'AS', 'HE', 'LIGHTED', 'A', 'PIPE', 'WITH', 'A', 'CROOKED', 'STEM', 'AND', 'BEGAN', 'TO', 'SMOKE'] +1284-1181-0016-823: hyp=['I', 'AM', 'NOT', 'ALLOWED', 'TO', 'PERFORM', 'MAGIC', 'EXCEPT', 'FOR', 'MY', 'OWN', 'AMUSEMENT', 'HE', 'TOLD', 'HIS', 'VISITORS', 'AS', 'HE', 'LIGHTED', 'A', 'PIPE', 'WITH', 'A', 'CROOKED', 'STEM', 'AND', 'BEGAN', 'TO', 'SMOKE'] +1284-1181-0017-824: ref=['THE', 'WIZARD', 'OF', 'OZ', 'WHO', 'USED', 'TO', 'BE', 'A', 'HUMBUG', 'AND', 'KNEW', 'NO', 'MAGIC', 'AT', 'ALL', 'HAS', 'BEEN', 'TAKING', 'LESSONS', 'OF', 'GLINDA', 'AND', "I'M", 'TOLD', 'HE', 'IS', 'GETTING', 'TO', 'BE', 'A', 'PRETTY', 'GOOD', 'WIZARD', 'BUT', 'HE', 'IS', 'MERELY', 'THE', 'ASSISTANT', 'OF', 'THE', 'GREAT', 'SORCERESS'] +1284-1181-0017-824: hyp=['THE', 'WIZARD', 'OF', 'OZ', 'WHO', 'USED', 'TO', 'BE', 'A', 'HUMBUG', 'AND', 'KNEW', 'NO', 'MAGIC', 'AT', 'ALL', 'HAS', 'BEEN', 'TAKING', 'LESSONS', 'OF', 'GLINDA', 'AND', "I'M", 'TOLD', 'HE', 'IS', 'GETTING', 'TO', 'BE', 'A', 'PRETTY', 'GOOD', 'WIZARD', 'BUT', 'HE', 'IS', 'MERELY', 'THE', 'ASSISTANT', 'OF', 'THE', 'GREAT', 'SORCERESS'] +1284-1181-0018-825: ref=['IT', 'TRULY', 'IS', 'ASSERTED', 'THE', 'MAGICIAN'] +1284-1181-0018-825: hyp=['IT', 'TRULY', 'IS', 'ASSERTED', 'THE', 'MAGICIAN'] +1284-1181-0019-826: ref=['I', 'NOW', 'USE', 'THEM', 'AS', 'ORNAMENTAL', 'STATUARY', 'IN', 'MY', 'GARDEN'] +1284-1181-0019-826: hyp=['I', 'NOW', 'USE', 'THEM', 'AS', 'ORNAMENTAL', 'STATUARY', 'IN', 'MY', 'GARDEN'] +1284-1181-0020-827: ref=['DEAR', 'ME', 'WHAT', 'A', 'CHATTERBOX', "YOU'RE", 'GETTING', 'TO', 'BE', 'UNC', 'REMARKED', 'THE', 'MAGICIAN', 'WHO', 'WAS', 'PLEASED', 'WITH', 'THE', 'COMPLIMENT'] +1284-1181-0020-827: hyp=['DEAR', 'ME', 'WHAT', 'A', 'CHATTER', 'BOX', "YOU'RE", 'GETTING', 'TO', 'BE', 'YOUNG', 'REMARKED', 'THE', 'MAGICIAN', 'WHO', 'WAS', 'PLEASED', 'WITH', 'THE', 'COMPLIMENT'] +1284-1181-0021-828: ref=['ASKED', 'THE', 'VOICE', 'IN', 'SCORNFUL', 'ACCENTS'] +1284-1181-0021-828: hyp=['ASKED', 'THE', 'VOICE', 'IN', 'SCORNFUL', 'ACCENT'] +1284-134647-0000-862: ref=['THE', 'GRATEFUL', 'APPLAUSE', 'OF', 'THE', 'CLERGY', 'HAS', 'CONSECRATED', 'THE', 'MEMORY', 'OF', 'A', 'PRINCE', 'WHO', 'INDULGED', 'THEIR', 'PASSIONS', 'AND', 'PROMOTED', 'THEIR', 'INTEREST'] +1284-134647-0000-862: hyp=['THE', 'GRATEFUL', 'APPLAUSE', 'OF', 'THE', 'CLERGY', 'HAS', 'CONSECRATED', 'THE', 'MEMORY', 'OF', 'A', 'PRINCE', 'WHO', 'INDULGED', 'THEIR', 'PASSIONS', 'AND', 'PROMOTED', 'THEIR', 'INTEREST'] +1284-134647-0001-863: ref=['THE', 'EDICT', 'OF', 'MILAN', 'THE', 'GREAT', 'CHARTER', 'OF', 'TOLERATION', 'HAD', 'CONFIRMED', 'TO', 'EACH', 'INDIVIDUAL', 'OF', 'THE', 'ROMAN', 'WORLD', 'THE', 'PRIVILEGE', 'OF', 'CHOOSING', 'AND', 'PROFESSING', 'HIS', 'OWN', 'RELIGION'] +1284-134647-0001-863: hyp=['THE', 'EDICT', 'OF', 'MILAN', 'THE', 'GREAT', 'CHARTER', 'OF', 'TOLERATION', 'HAD', 'CONFIRMED', 'TO', 'EACH', 'INDIVIDUAL', 'OF', 'THE', 'ROMAN', 'WORLD', 'THE', 'PRIVILEGE', 'OF', 'CHOOSING', 'AND', 'PROFESSING', 'HIS', 'OWN', 'RELIGION'] +1284-134647-0002-864: ref=['BUT', 'THIS', 'INESTIMABLE', 'PRIVILEGE', 'WAS', 'SOON', 'VIOLATED', 'WITH', 'THE', 'KNOWLEDGE', 'OF', 'TRUTH', 'THE', 'EMPEROR', 'IMBIBED', 'THE', 'MAXIMS', 'OF', 'PERSECUTION', 'AND', 'THE', 'SECTS', 'WHICH', 'DISSENTED', 'FROM', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'AFFLICTED', 'AND', 'OPPRESSED', 'BY', 'THE', 'TRIUMPH', 'OF', 'CHRISTIANITY'] +1284-134647-0002-864: hyp=['BUT', 'THIS', 'INESTIMABLE', 'PRIVILEGE', 'WAS', 'SOON', 'VIOLATED', 'WITH', 'THE', 'KNOWLEDGE', 'OF', 'TRUTH', 'THE', 'EMPEROR', 'IMBIBED', 'THE', 'MAXIMS', 'OF', 'PERSECUTION', 'AND', 'THE', 'SEX', 'WHICH', 'DISSENTED', 'FROM', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'AFFLICTED', 'AND', 'OPPRESSED', 'BY', 'THE', 'TRIUMPH', 'OF', 'CHRISTIANITY'] +1284-134647-0003-865: ref=['CONSTANTINE', 'EASILY', 'BELIEVED', 'THAT', 'THE', 'HERETICS', 'WHO', 'PRESUMED', 'TO', 'DISPUTE', 'HIS', 'OPINIONS', 'OR', 'TO', 'OPPOSE', 'HIS', 'COMMANDS', 'WERE', 'GUILTY', 'OF', 'THE', 'MOST', 'ABSURD', 'AND', 'CRIMINAL', 'OBSTINACY', 'AND', 'THAT', 'A', 'SEASONABLE', 'APPLICATION', 'OF', 'MODERATE', 'SEVERITIES', 'MIGHT', 'SAVE', 'THOSE', 'UNHAPPY', 'MEN', 'FROM', 'THE', 'DANGER', 'OF', 'AN', 'EVERLASTING', 'CONDEMNATION'] +1284-134647-0003-865: hyp=['KONSTANTINE', 'EASILY', 'BELIEVED', 'THAT', 'THE', 'HERETICS', 'WHO', 'PRESUMED', 'TO', 'DISPUTE', 'HIS', 'OPINIONS', 'OR', 'TO', 'OPPOSE', 'HIS', 'COMMANDS', 'WERE', 'GUILTY', 'OF', 'THE', 'MOST', 'ABSURD', 'AND', 'CRIMINAL', 'OBSTINACY', 'AND', 'THAT', 'A', 'SEASONABLE', 'APPLICATION', 'OF', 'MODERATE', 'SEVERITIES', 'MIGHT', 'SAVE', 'THOSE', 'UNHAPPY', 'MEN', 'FROM', 'THE', 'DANGER', 'OF', 'AN', 'EVERLASTING', 'CONDEMNATION'] +1284-134647-0004-866: ref=['SOME', 'OF', 'THE', 'PENAL', 'REGULATIONS', 'WERE', 'COPIED', 'FROM', 'THE', 'EDICTS', 'OF', 'DIOCLETIAN', 'AND', 'THIS', 'METHOD', 'OF', 'CONVERSION', 'WAS', 'APPLAUDED', 'BY', 'THE', 'SAME', 'BISHOPS', 'WHO', 'HAD', 'FELT', 'THE', 'HAND', 'OF', 'OPPRESSION', 'AND', 'PLEADED', 'FOR', 'THE', 'RIGHTS', 'OF', 'HUMANITY'] +1284-134647-0004-866: hyp=['SOME', 'OF', 'THE', 'PENAL', 'REGULATIONS', 'WERE', 'COPIED', 'FROM', 'THE', 'EDICTS', 'OF', 'DIOCLETIAN', 'AND', 'THIS', 'METHOD', 'OF', 'CONVERSION', 'WAS', 'APPLAUDED', 'BY', 'THE', 'SAME', 'BISHOPS', 'WHO', 'HAD', 'FELLED', 'THE', 'HAND', 'OF', 'OPPRESSION', 'AND', 'PLEADED', 'FOR', 'THE', 'RIGHTS', 'OF', 'HUMANITY'] +1284-134647-0005-867: ref=['THEY', 'ASSERTED', 'WITH', 'CONFIDENCE', 'AND', 'ALMOST', 'WITH', 'EXULTATION', 'THAT', 'THE', 'APOSTOLICAL', 'SUCCESSION', 'WAS', 'INTERRUPTED', 'THAT', 'ALL', 'THE', 'BISHOPS', 'OF', 'EUROPE', 'AND', 'ASIA', 'WERE', 'INFECTED', 'BY', 'THE', 'CONTAGION', 'OF', 'GUILT', 'AND', 'SCHISM', 'AND', 'THAT', 'THE', 'PREROGATIVES', 'OF', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'CONFINED', 'TO', 'THE', 'CHOSEN', 'PORTION', 'OF', 'THE', 'AFRICAN', 'BELIEVERS', 'WHO', 'ALONE', 'HAD', 'PRESERVED', 'INVIOLATE', 'THE', 'INTEGRITY', 'OF', 'THEIR', 'FAITH', 'AND', 'DISCIPLINE'] +1284-134647-0005-867: hyp=['THEY', 'ASSERTED', 'WITH', 'CONFIDENCE', 'AND', 'ALMOST', 'WITH', 'EXULTATION', 'THAT', 'THE', 'APOSTOLICAL', 'SUCCESSION', 'WAS', 'INTERRUPTED', 'THAT', 'ALL', 'THE', 'BISHOPS', 'OF', 'EUROPE', 'AND', 'ASIA', 'WERE', 'INFECTED', 'BY', 'THE', 'CONTAGION', 'OF', 'GUILT', 'AND', 'SCHISM', 'AND', 'THAT', 'THE', 'PREROGATIVES', 'OF', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'CONFINED', 'TO', 'THE', 'CHOSEN', 'PORTION', 'OF', 'THE', 'AFRICAN', 'BELIEVERS', 'WHO', 'ALONE', 'HAD', 'PRESERVED', 'INVIOLATE', 'THE', 'INTEGRITY', 'OF', 'THEIR', 'FAITH', 'AND', 'DISCIPLINE'] +1284-134647-0006-868: ref=['BISHOPS', 'VIRGINS', 'AND', 'EVEN', 'SPOTLESS', 'INFANTS', 'WERE', 'SUBJECTED', 'TO', 'THE', 'DISGRACE', 'OF', 'A', 'PUBLIC', 'PENANCE', 'BEFORE', 'THEY', 'COULD', 'BE', 'ADMITTED', 'TO', 'THE', 'COMMUNION', 'OF', 'THE', 'DONATISTS'] +1284-134647-0006-868: hyp=['BISHOPS', 'VIRGINS', 'AND', 'EVEN', 'SPOTLESS', 'INFANTS', 'WERE', 'SUBJECTED', 'TO', 'THE', 'DISGRACE', 'OF', 'A', 'PUBLIC', 'PENANCE', 'BEFORE', 'THEY', 'COULD', 'BE', 'ADMITTED', 'TO', 'THE', 'COMMUNION', 'OF', 'THE', 'DONATISTS'] +1284-134647-0007-869: ref=['PROSCRIBED', 'BY', 'THE', 'CIVIL', 'AND', 'ECCLESIASTICAL', 'POWERS', 'OF', 'THE', 'EMPIRE', 'THE', 'DONATISTS', 'STILL', 'MAINTAINED', 'IN', 'SOME', 'PROVINCES', 'PARTICULARLY', 'IN', 'NUMIDIA', 'THEIR', 'SUPERIOR', 'NUMBERS', 'AND', 'FOUR', 'HUNDRED', 'BISHOPS', 'ACKNOWLEDGED', 'THE', 'JURISDICTION', 'OF', 'THEIR', 'PRIMATE'] +1284-134647-0007-869: hyp=['PROSCRIBED', 'BY', 'THE', 'CIVIL', 'AND', 'ECCLESIASTICAL', 'POWERS', 'OF', 'THE', 'EMPIRE', 'THE', 'DONATIST', 'STILL', 'MAINTAINED', 'IN', 'SOME', 'PROVINCES', 'PARTICULARLY', 'IN', 'NUMIDIA', 'THEIR', 'SUPERIOR', 'NUMBERS', 'AND', 'FOUR', 'HUNDRED', 'BISHOPS', 'ACKNOWLEDGED', 'THE', 'JURISDICTION', 'OF', 'THEIR', 'PRIMATE'] +1320-122612-0000-120: ref=['SINCE', 'THE', 'PERIOD', 'OF', 'OUR', 'TALE', 'THE', 'ACTIVE', 'SPIRIT', 'OF', 'THE', 'COUNTRY', 'HAS', 'SURROUNDED', 'IT', 'WITH', 'A', 'BELT', 'OF', 'RICH', 'AND', 'THRIVING', 'SETTLEMENTS', 'THOUGH', 'NONE', 'BUT', 'THE', 'HUNTER', 'OR', 'THE', 'SAVAGE', 'IS', 'EVER', 'KNOWN', 'EVEN', 'NOW', 'TO', 'PENETRATE', 'ITS', 'WILD', 'RECESSES'] +1320-122612-0000-120: hyp=['SINCE', 'THE', 'PERIOD', 'OF', 'OUR', 'TALE', 'THE', 'ACTIVE', 'SPIRIT', 'OF', 'THE', 'COUNTRY', 'HAS', 'SURROUNDED', 'IT', 'WITH', 'A', 'BELT', 'OF', 'RICH', 'ENTHRIBING', 'SETTLEMENTS', 'THOUGH', 'NONE', 'BUT', 'THE', 'HUNTER', 'OR', 'THE', 'SAVAGE', 'IS', 'EVER', 'KNOWN', 'EVEN', 'NOW', 'TO', 'PENETRATE', 'ITS', 'WILD', 'RECESSES'] +1320-122612-0001-121: ref=['THE', 'DEWS', 'WERE', 'SUFFERED', 'TO', 'EXHALE', 'AND', 'THE', 'SUN', 'HAD', 'DISPERSED', 'THE', 'MISTS', 'AND', 'WAS', 'SHEDDING', 'A', 'STRONG', 'AND', 'CLEAR', 'LIGHT', 'IN', 'THE', 'FOREST', 'WHEN', 'THE', 'TRAVELERS', 'RESUMED', 'THEIR', 'JOURNEY'] +1320-122612-0001-121: hyp=['THE', 'DEWS', 'WERE', 'SUFFERED', 'TO', 'EXHALE', 'AND', 'THE', 'SUN', 'HAD', 'DISPERSED', 'THE', 'MISTS', 'AND', 'WAS', 'SHEDDING', 'A', 'STRONG', 'AND', 'CLEAR', 'LIGHT', 'IN', 'THE', 'FOREST', 'WHEN', 'THE', 'TRAVELLERS', 'RESUMED', 'THEIR', 'JOURNEY'] +1320-122612-0002-122: ref=['AFTER', 'PROCEEDING', 'A', 'FEW', 'MILES', 'THE', 'PROGRESS', 'OF', 'HAWKEYE', 'WHO', 'LED', 'THE', 'ADVANCE', 'BECAME', 'MORE', 'DELIBERATE', 'AND', 'WATCHFUL'] +1320-122612-0002-122: hyp=['AFTER', 'PROCEEDING', 'A', 'FEW', 'MILES', 'THE', 'PROGRESS', 'OF', 'HAWKEYE', 'WHO', 'LED', 'THE', 'ADVANCE', 'BECAME', 'MORE', 'DELIBERATE', 'AND', 'WATCHFUL'] +1320-122612-0003-123: ref=['HE', 'OFTEN', 'STOPPED', 'TO', 'EXAMINE', 'THE', 'TREES', 'NOR', 'DID', 'HE', 'CROSS', 'A', 'RIVULET', 'WITHOUT', 'ATTENTIVELY', 'CONSIDERING', 'THE', 'QUANTITY', 'THE', 'VELOCITY', 'AND', 'THE', 'COLOR', 'OF', 'ITS', 'WATERS'] +1320-122612-0003-123: hyp=['HE', 'OFTEN', 'STOPPED', 'TO', 'EXAMINE', 'THE', 'TREES', 'NOR', 'DID', 'HE', 'CROSS', 'A', 'RIVULET', 'WITHOUT', 'ATTENTIVELY', 'CONSIDERING', 'THE', 'QUANTITY', 'THE', 'VELOCITY', 'AND', 'THE', 'COLOR', 'OF', 'ITS', 'WATERS'] +1320-122612-0004-124: ref=['DISTRUSTING', 'HIS', 'OWN', 'JUDGMENT', 'HIS', 'APPEALS', 'TO', 'THE', 'OPINION', 'OF', 'CHINGACHGOOK', 'WERE', 'FREQUENT', 'AND', 'EARNEST'] +1320-122612-0004-124: hyp=['DISTRUSTING', 'HIS', 'OWN', 'JUDGMENT', 'HIS', 'APPEALS', 'TO', 'THE', 'OPINION', 'OF', 'CHINGACHGOOK', 'WERE', 'FREQUENT', 'AND', 'EARNEST'] +1320-122612-0005-125: ref=['YET', 'HERE', 'ARE', 'WE', 'WITHIN', 'A', 'SHORT', 'RANGE', 'OF', 'THE', 'SCAROONS', 'AND', 'NOT', 'A', 'SIGN', 'OF', 'A', 'TRAIL', 'HAVE', 'WE', 'CROSSED'] +1320-122612-0005-125: hyp=['YET', 'HERE', 'ARE', 'WE', 'WITHIN', 'A', 'SHORT', 'RANGE', 'OF', 'THE', 'SCARONS', 'AND', 'NOT', 'A', 'SIGN', 'OF', 'A', 'TRAIL', 'HAVE', 'WE', 'CROSSED'] +1320-122612-0006-126: ref=['LET', 'US', 'RETRACE', 'OUR', 'STEPS', 'AND', 'EXAMINE', 'AS', 'WE', 'GO', 'WITH', 'KEENER', 'EYES'] +1320-122612-0006-126: hyp=['LET', 'US', 'RETRACE', 'OUR', 'STEPS', 'AND', 'EXAMINE', 'AS', 'WE', 'GO', 'WITH', 'KEENER', 'EYES'] +1320-122612-0007-127: ref=['CHINGACHGOOK', 'HAD', 'CAUGHT', 'THE', 'LOOK', 'AND', 'MOTIONING', 'WITH', 'HIS', 'HAND', 'HE', 'BADE', 'HIM', 'SPEAK'] +1320-122612-0007-127: hyp=['CHINGACHOOK', 'HAD', 'CAUGHT', 'THE', 'LOOK', 'AND', 'MOTIONING', 'WITH', 'HIS', 'HAND', 'HE', 'BADE', 'HIM', 'SPEAK'] +1320-122612-0008-128: ref=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'FOLLOWED', 'THE', 'UNEXPECTED', 'MOVEMENT', 'AND', 'READ', 'THEIR', 'SUCCESS', 'IN', 'THE', 'AIR', 'OF', 'TRIUMPH', 'THAT', 'THE', 'YOUTH', 'ASSUMED'] +1320-122612-0008-128: hyp=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'FOLLOWED', 'THE', 'UNEXPECTED', 'MOVEMENT', 'AND', 'READ', 'THEIR', 'SUCCESS', 'IN', 'THE', 'AIR', 'OF', 'TRIUMPH', 'THAT', 'THE', 'YOUTH', 'ASSUMED'] +1320-122612-0009-129: ref=['IT', 'WOULD', 'HAVE', 'BEEN', 'MORE', 'WONDERFUL', 'HAD', 'HE', 'SPOKEN', 'WITHOUT', 'A', 'BIDDING'] +1320-122612-0009-129: hyp=['IT', 'WOULD', 'HAVE', 'BEEN', 'MORE', 'WONDERFUL', 'HAD', 'HE', 'SPOKEN', 'WITHOUT', 'A', 'BIDDING'] +1320-122612-0010-130: ref=['SEE', 'SAID', 'UNCAS', 'POINTING', 'NORTH', 'AND', 'SOUTH', 'AT', 'THE', 'EVIDENT', 'MARKS', 'OF', 'THE', 'BROAD', 'TRAIL', 'ON', 'EITHER', 'SIDE', 'OF', 'HIM', 'THE', 'DARK', 'HAIR', 'HAS', 'GONE', 'TOWARD', 'THE', 'FOREST'] +1320-122612-0010-130: hyp=['SEE', 'SAID', 'UNCAS', 'POINTING', 'NORTH', 'AND', 'SOUTH', 'AT', 'THE', 'EVIDENT', 'MARKS', 'OF', 'THE', 'BROAD', 'TRAIL', 'ON', 'EITHER', 'SIDE', 'OF', 'HIM', 'THE', 'DARK', 'HAIR', 'HAS', 'GONE', 'TOWARD', 'THE', 'FOREST'] +1320-122612-0011-131: ref=['IF', 'A', 'ROCK', 'OR', 'A', 'RIVULET', 'OR', 'A', 'BIT', 'OF', 'EARTH', 'HARDER', 'THAN', 'COMMON', 'SEVERED', 'THE', 'LINKS', 'OF', 'THE', 'CLEW', 'THEY', 'FOLLOWED', 'THE', 'TRUE', 'EYE', 'OF', 'THE', 'SCOUT', 'RECOVERED', 'THEM', 'AT', 'A', 'DISTANCE', 'AND', 'SELDOM', 'RENDERED', 'THE', 'DELAY', 'OF', 'A', 'SINGLE', 'MOMENT', 'NECESSARY'] +1320-122612-0011-131: hyp=['IF', 'A', 'ROCK', 'OR', 'A', 'RIVULET', 'OR', 'A', 'BIT', 'OF', 'EARTH', 'HARDER', 'THAN', 'COMMON', 'SEVERED', 'THE', 'LINKS', 'OF', 'THE', 'CLUE', 'THEY', 'FOLLOWED', 'THE', 'TRUE', 'EYE', 'OF', 'THE', 'SCOUT', 'RECOVERED', 'THEM', 'AT', 'A', 'DISTANCE', 'AND', 'SELDOM', 'RENDERED', 'THE', 'DELAY', 'OF', 'A', 'SINGLE', 'MOMENT', 'NECESSARY'] +1320-122612-0012-132: ref=['EXTINGUISHED', 'BRANDS', 'WERE', 'LYING', 'AROUND', 'A', 'SPRING', 'THE', 'OFFALS', 'OF', 'A', 'DEER', 'WERE', 'SCATTERED', 'ABOUT', 'THE', 'PLACE', 'AND', 'THE', 'TREES', 'BORE', 'EVIDENT', 'MARKS', 'OF', 'HAVING', 'BEEN', 'BROWSED', 'BY', 'THE', 'HORSES'] +1320-122612-0012-132: hyp=['EXTINGUISHED', 'BRANDS', 'WERE', 'LYING', 'AROUND', 'A', 'SPRING', 'THE', 'OFFALS', 'OF', 'A', 'DEER', 'WERE', 'SCATTERED', 'ABOUT', 'THE', 'PLACE', 'AND', 'THE', 'TREES', 'BORE', 'EVIDENT', 'MARKS', 'OF', 'HAVING', 'BEEN', 'BROWSED', 'BY', 'THE', 'HORSES'] +1320-122612-0013-133: ref=['A', 'CIRCLE', 'OF', 'A', 'FEW', 'HUNDRED', 'FEET', 'IN', 'CIRCUMFERENCE', 'WAS', 'DRAWN', 'AND', 'EACH', 'OF', 'THE', 'PARTY', 'TOOK', 'A', 'SEGMENT', 'FOR', 'HIS', 'PORTION'] +1320-122612-0013-133: hyp=['A', 'CIRCLE', 'OF', 'A', 'FEW', 'HUNDRED', 'FEET', 'IN', 'CIRCUMFERENCE', 'WAS', 'DRAWN', 'AND', 'EACH', 'OF', 'THE', 'PARTY', 'TOOK', 'A', 'SEGMENT', 'FOR', 'HIS', 'PORTION'] +1320-122612-0014-134: ref=['THE', 'EXAMINATION', 'HOWEVER', 'RESULTED', 'IN', 'NO', 'DISCOVERY'] +1320-122612-0014-134: hyp=['THE', 'EXAMINATION', 'HOWEVER', 'RESULTED', 'IN', 'NO', 'DISCOVERY'] +1320-122612-0015-135: ref=['THE', 'WHOLE', 'PARTY', 'CROWDED', 'TO', 'THE', 'SPOT', 'WHERE', 'UNCAS', 'POINTED', 'OUT', 'THE', 'IMPRESSION', 'OF', 'A', 'MOCCASIN', 'IN', 'THE', 'MOIST', 'ALLUVION'] +1320-122612-0015-135: hyp=['THE', 'WHOLE', 'PARTY', 'CROWDED', 'TO', 'THE', 'SPOT', 'WHERE', 'UNCAS', 'POINTED', 'OUT', 'THE', 'IMPRESSION', 'OF', 'A', 'MOCCASIN', 'IN', 'THE', 'MOIST', 'ALLUVIAN'] +1320-122612-0016-136: ref=['RUN', 'BACK', 'UNCAS', 'AND', 'BRING', 'ME', 'THE', 'SIZE', 'OF', 'THE', "SINGER'S", 'FOOT'] +1320-122612-0016-136: hyp=['RUN', 'BACK', 'UNCAS', 'AND', 'BRING', 'ME', 'THE', 'SIZE', 'OF', 'THE', "SINGER'S", 'FOOT'] +1320-122617-0000-78: ref=['NOTWITHSTANDING', 'THE', 'HIGH', 'RESOLUTION', 'OF', 'HAWKEYE', 'HE', 'FULLY', 'COMPREHENDED', 'ALL', 'THE', 'DIFFICULTIES', 'AND', 'DANGER', 'HE', 'WAS', 'ABOUT', 'TO', 'INCUR'] +1320-122617-0000-78: hyp=['NOTWITHSTANDING', 'THE', 'HIGH', 'RESOLUTION', 'OF', 'HAWKEYE', 'HE', 'FULLY', 'COMPREHENDED', 'ALL', 'THE', 'DIFFICULTIES', 'AND', 'DANGER', 'HE', 'WAS', 'ABOUT', 'TO', 'INCUR'] +1320-122617-0001-79: ref=['IN', 'HIS', 'RETURN', 'TO', 'THE', 'CAMP', 'HIS', 'ACUTE', 'AND', 'PRACTISED', 'INTELLECTS', 'WERE', 'INTENTLY', 'ENGAGED', 'IN', 'DEVISING', 'MEANS', 'TO', 'COUNTERACT', 'A', 'WATCHFULNESS', 'AND', 'SUSPICION', 'ON', 'THE', 'PART', 'OF', 'HIS', 'ENEMIES', 'THAT', 'HE', 'KNEW', 'WERE', 'IN', 'NO', 'DEGREE', 'INFERIOR', 'TO', 'HIS', 'OWN'] +1320-122617-0001-79: hyp=['IN', 'HIS', 'RETURN', 'TO', 'THE', 'CAMP', 'HIS', 'ACUTE', 'AND', 'PRACTISED', 'INTELLECTS', 'WERE', 'INTENTLY', 'ENGAGED', 'IN', 'DEVISING', 'MEANS', 'TO', 'COUNTERACT', 'A', 'WATCHFULNESS', 'AND', 'SUSPICION', 'ON', 'THE', 'PART', 'OF', 'HIS', 'ENEMIES', 'THAT', 'HE', 'KNEW', 'WERE', 'IN', 'NO', 'DEGREE', 'INFERIOR', 'TO', 'HIS', 'OWN'] +1320-122617-0002-80: ref=['IN', 'OTHER', 'WORDS', 'WHILE', 'HE', 'HAD', 'IMPLICIT', 'FAITH', 'IN', 'THE', 'ABILITY', 'OF', "BALAAM'S", 'ASS', 'TO', 'SPEAK', 'HE', 'WAS', 'SOMEWHAT', 'SKEPTICAL', 'ON', 'THE', 'SUBJECT', 'OF', 'A', "BEAR'S", 'SINGING', 'AND', 'YET', 'HE', 'HAD', 'BEEN', 'ASSURED', 'OF', 'THE', 'LATTER', 'ON', 'THE', 'TESTIMONY', 'OF', 'HIS', 'OWN', 'EXQUISITE', 'ORGANS'] +1320-122617-0002-80: hyp=['IN', 'OTHER', 'WORDS', 'WHILE', 'HE', 'HAD', 'IMPLICIT', 'FAITH', 'IN', 'THE', 'ABILITY', 'OF', "BAYLIM'S", 'ASS', 'TO', 'SPEAK', 'HE', 'WAS', 'SOMEWHAT', 'SCEPTICAL', 'ON', 'THE', 'SUBJECT', 'OF', 'A', "BEAR'S", 'SINGING', 'AND', 'YET', 'HE', 'HAD', 'BEEN', 'ASSURED', 'OF', 'THE', 'LATTER', 'ON', 'THE', 'TESTIMONY', 'OF', 'HIS', 'OWN', 'EXQUISITE', 'ORGANS'] +1320-122617-0003-81: ref=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'AIR', 'AND', 'MANNER', 'THAT', 'BETRAYED', 'TO', 'THE', 'SCOUT', 'THE', 'UTTER', 'CONFUSION', 'OF', 'THE', 'STATE', 'OF', 'HIS', 'MIND'] +1320-122617-0003-81: hyp=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'AIR', 'AND', 'MANNER', 'THAT', 'BETRAYED', 'TO', 'THE', 'SCOUT', 'THE', 'UTTER', 'CONFUSION', 'OF', 'THE', 'STATE', 'OF', 'HIS', 'MIND'] +1320-122617-0004-82: ref=['THE', 'INGENIOUS', 'HAWKEYE', 'WHO', 'RECALLED', 'THE', 'HASTY', 'MANNER', 'IN', 'WHICH', 'THE', 'OTHER', 'HAD', 'ABANDONED', 'HIS', 'POST', 'AT', 'THE', 'BEDSIDE', 'OF', 'THE', 'SICK', 'WOMAN', 'WAS', 'NOT', 'WITHOUT', 'HIS', 'SUSPICIONS', 'CONCERNING', 'THE', 'SUBJECT', 'OF', 'SO', 'MUCH', 'SOLEMN', 'DELIBERATION'] +1320-122617-0004-82: hyp=['THE', 'INGENIOUS', 'HAWKEYE', 'WHO', 'RECALLED', 'THE', 'HASTY', 'MANNER', 'IN', 'WHICH', 'THE', 'OTHER', 'HAD', 'ABANDONED', 'HIS', 'POST', 'AT', 'THE', 'BEDSIDE', 'OF', 'THE', 'SICK', 'WOMAN', 'WAS', 'NOT', 'WITHOUT', 'HIS', 'SUSPICIONS', 'CONCERNING', 'THE', 'SUBJECT', 'OF', 'SO', 'MUCH', 'SOLEMN', 'DELIBERATION'] +1320-122617-0005-83: ref=['THE', 'BEAR', 'SHOOK', 'HIS', 'SHAGGY', 'SIDES', 'AND', 'THEN', 'A', 'WELL', 'KNOWN', 'VOICE', 'REPLIED'] +1320-122617-0005-83: hyp=['THE', 'BEAR', 'SHOOK', 'HIS', 'SHAGGY', 'SIDES', 'AND', 'THEN', 'A', 'WELL', 'KNOWN', 'VOICE', 'REPLIED'] +1320-122617-0006-84: ref=['CAN', 'THESE', 'THINGS', 'BE', 'RETURNED', 'DAVID', 'BREATHING', 'MORE', 'FREELY', 'AS', 'THE', 'TRUTH', 'BEGAN', 'TO', 'DAWN', 'UPON', 'HIM'] +1320-122617-0006-84: hyp=['CAN', 'THESE', 'THINGS', 'BE', 'RETURNED', 'DAVID', 'BREATHING', 'MORE', 'FREELY', 'AS', 'THE', 'TRUTH', 'BEGAN', 'TO', 'DAWN', 'UPON', 'HIM'] +1320-122617-0007-85: ref=['COME', 'COME', 'RETURNED', 'HAWKEYE', 'UNCASING', 'HIS', 'HONEST', 'COUNTENANCE', 'THE', 'BETTER', 'TO', 'ASSURE', 'THE', 'WAVERING', 'CONFIDENCE', 'OF', 'HIS', 'COMPANION', 'YOU', 'MAY', 'SEE', 'A', 'SKIN', 'WHICH', 'IF', 'IT', 'BE', 'NOT', 'AS', 'WHITE', 'AS', 'ONE', 'OF', 'THE', 'GENTLE', 'ONES', 'HAS', 'NO', 'TINGE', 'OF', 'RED', 'TO', 'IT', 'THAT', 'THE', 'WINDS', 'OF', 'THE', 'HEAVEN', 'AND', 'THE', 'SUN', 'HAVE', 'NOT', 'BESTOWED', 'NOW', 'LET', 'US', 'TO', 'BUSINESS'] +1320-122617-0007-85: hyp=['COME', 'COME', 'RETURNED', 'HAWKEYE', 'UNCASING', 'HIS', 'HONEST', 'COUNTENANCE', 'THE', 'BETTER', 'TO', 'ASSURE', 'THE', 'WAVERING', 'CONFIDENCE', 'OF', 'HIS', 'COMPANION', 'YOU', 'MAY', 'SEE', 'A', 'SKIN', 'WHICH', 'IF', 'IT', 'BE', 'NOT', 'AS', 'WHITE', 'AS', 'ONE', 'OF', 'THE', 'GENTLE', 'ONES', 'HAS', 'NO', 'TINGE', 'OF', 'RED', 'TO', 'IT', 'THAT', 'THE', 'WINDS', 'OF', 'THE', 'HEAVEN', 'AND', 'THE', 'SUN', 'HAVE', 'NOT', 'BESTOWED', 'NOW', 'LET', 'US', 'TO', 'BUSINESS'] +1320-122617-0008-86: ref=['THE', 'YOUNG', 'MAN', 'IS', 'IN', 'BONDAGE', 'AND', 'MUCH', 'I', 'FEAR', 'HIS', 'DEATH', 'IS', 'DECREED'] +1320-122617-0008-86: hyp=['THE', 'YOUNG', 'MAN', 'IS', 'IN', 'BONDAGE', 'AND', 'MUCH', 'I', 'FEAR', 'HIS', 'DEATH', 'IS', 'DECREED'] +1320-122617-0009-87: ref=['I', 'GREATLY', 'MOURN', 'THAT', 'ONE', 'SO', 'WELL', 'DISPOSED', 'SHOULD', 'DIE', 'IN', 'HIS', 'IGNORANCE', 'AND', 'I', 'HAVE', 'SOUGHT', 'A', 'GOODLY', 'HYMN', 'CAN', 'YOU', 'LEAD', 'ME', 'TO', 'HIM'] +1320-122617-0009-87: hyp=['I', 'GREATLY', 'MOURN', 'THAT', 'ONE', 'SO', 'WELL', 'DISPOSED', 'SHOULD', 'DIE', 'IN', 'HIS', 'IGNORANCE', 'AND', 'I', 'HAVE', 'SOUGHT', 'A', 'GOODLY', 'HYMN', 'CAN', 'YOU', 'LEAD', 'ME', 'TO', 'HIM'] +1320-122617-0010-88: ref=['THE', 'TASK', 'WILL', 'NOT', 'BE', 'DIFFICULT', 'RETURNED', 'DAVID', 'HESITATING', 'THOUGH', 'I', 'GREATLY', 'FEAR', 'YOUR', 'PRESENCE', 'WOULD', 'RATHER', 'INCREASE', 'THAN', 'MITIGATE', 'HIS', 'UNHAPPY', 'FORTUNES'] +1320-122617-0010-88: hyp=['THE', 'TASK', 'WILL', 'NOT', 'BE', 'DIFFICULT', 'RETURNED', 'DAVID', 'HESITATING', 'THOUGH', 'I', 'GREATLY', 'FEAR', 'YOUR', 'PRESENCE', 'WOULD', 'RATHER', 'INCREASE', 'THAN', 'MITIGATE', 'HIS', 'UNHAPPY', 'FORTUNES'] +1320-122617-0011-89: ref=['THE', 'LODGE', 'IN', 'WHICH', 'UNCAS', 'WAS', 'CONFINED', 'WAS', 'IN', 'THE', 'VERY', 'CENTER', 'OF', 'THE', 'VILLAGE', 'AND', 'IN', 'A', 'SITUATION', 'PERHAPS', 'MORE', 'DIFFICULT', 'THAN', 'ANY', 'OTHER', 'TO', 'APPROACH', 'OR', 'LEAVE', 'WITHOUT', 'OBSERVATION'] +1320-122617-0011-89: hyp=['THE', 'LODGE', 'IN', 'WHICH', 'UNCAS', 'WAS', 'CONFINED', 'WAS', 'IN', 'THE', 'VERY', 'CENTER', 'OF', 'THE', 'VILLAGE', 'AND', 'IN', 'A', 'SITUATION', 'PERHAPS', 'MORE', 'DIFFICULT', 'THAN', 'ANY', 'OTHER', 'TO', 'APPROACH', 'OR', 'LEAVE', 'WITHOUT', 'OBSERVATION'] +1320-122617-0012-90: ref=['FOUR', 'OR', 'FIVE', 'OF', 'THE', 'LATTER', 'ONLY', 'LINGERED', 'ABOUT', 'THE', 'DOOR', 'OF', 'THE', 'PRISON', 'OF', 'UNCAS', 'WARY', 'BUT', 'CLOSE', 'OBSERVERS', 'OF', 'THE', 'MANNER', 'OF', 'THEIR', 'CAPTIVE'] +1320-122617-0012-90: hyp=['FOUR', 'OR', 'FIVE', 'OF', 'THE', 'LATTER', 'ONLY', 'LINGERED', 'ABOUT', 'THE', 'DOOR', 'OF', 'THE', 'PRISON', 'OF', 'UNCAS', 'WARY', 'BUT', 'CLOSE', 'OBSERVERS', 'OF', 'THE', 'MANNER', 'OF', 'THEIR', 'CAPTIVE'] +1320-122617-0013-91: ref=['DELIVERED', 'IN', 'A', 'STRONG', 'TONE', 'OF', 'ASSENT', 'ANNOUNCED', 'THE', 'GRATIFICATION', 'THE', 'SAVAGE', 'WOULD', 'RECEIVE', 'IN', 'WITNESSING', 'SUCH', 'AN', 'EXHIBITION', 'OF', 'WEAKNESS', 'IN', 'AN', 'ENEMY', 'SO', 'LONG', 'HATED', 'AND', 'SO', 'MUCH', 'FEARED'] +1320-122617-0013-91: hyp=['DELIVERED', 'IN', 'A', 'STRONG', 'TONE', 'OF', 'ASSENT', 'ANNOUNCED', 'THE', 'GRATIFICATION', 'THE', 'SAVAGE', 'WOULD', 'RECEIVE', 'AND', 'WITNESSING', 'SUCH', 'AN', 'EXHIBITION', 'OF', 'WEAKNESS', 'IN', 'AN', 'ENEMY', 'SO', 'LONG', 'HATED', 'AND', 'SO', 'MUCH', 'FEARED'] +1320-122617-0014-92: ref=['THEY', 'DREW', 'BACK', 'A', 'LITTLE', 'FROM', 'THE', 'ENTRANCE', 'AND', 'MOTIONED', 'TO', 'THE', 'SUPPOSED', 'CONJURER', 'TO', 'ENTER'] +1320-122617-0014-92: hyp=['THEY', 'DREW', 'BACK', 'A', 'LITTLE', 'FROM', 'THE', 'ENTRANCE', 'AND', 'MOTIONED', 'TO', 'THE', 'SUPPOSED', 'CONJUROR', 'TO', 'ENTER'] +1320-122617-0015-93: ref=['BUT', 'THE', 'BEAR', 'INSTEAD', 'OF', 'OBEYING', 'MAINTAINED', 'THE', 'SEAT', 'IT', 'HAD', 'TAKEN', 'AND', 'GROWLED'] +1320-122617-0015-93: hyp=['BUT', 'THE', 'BEAR', 'INSTEAD', 'OF', 'OBEYING', 'MAINTAINED', 'THE', 'SEED', 'IT', 'HAD', 'TAKEN', 'AND', 'GROWLED'] +1320-122617-0016-94: ref=['THE', 'CUNNING', 'MAN', 'IS', 'AFRAID', 'THAT', 'HIS', 'BREATH', 'WILL', 'BLOW', 'UPON', 'HIS', 'BROTHERS', 'AND', 'TAKE', 'AWAY', 'THEIR', 'COURAGE', 'TOO', 'CONTINUED', 'DAVID', 'IMPROVING', 'THE', 'HINT', 'HE', 'RECEIVED', 'THEY', 'MUST', 'STAND', 'FURTHER', 'OFF'] +1320-122617-0016-94: hyp=['THE', 'CUNNING', 'MAN', 'IS', 'AFRAID', 'THAT', 'HIS', 'BREATH', 'WILL', 'BLOW', 'UPON', 'HIS', 'BROTHERS', 'AND', 'TAKE', 'AWAY', 'THEIR', 'COURAGE', 'TOO', 'CONTINUED', 'DAVID', 'IMPROVING', 'THE', 'HINT', 'HE', 'RECEIVED', 'THEY', 'MUST', 'STAND', 'FURTHER', 'OFF'] +1320-122617-0017-95: ref=['THEN', 'AS', 'IF', 'SATISFIED', 'OF', 'THEIR', 'SAFETY', 'THE', 'SCOUT', 'LEFT', 'HIS', 'POSITION', 'AND', 'SLOWLY', 'ENTERED', 'THE', 'PLACE'] +1320-122617-0017-95: hyp=['THEN', 'AS', 'IF', 'SATISFIED', 'OF', 'THEIR', 'SAFETY', 'THE', 'SCOUT', 'LEFT', 'HIS', 'POSITION', 'AND', 'SLOWLY', 'ENTERED', 'THE', 'PLACE'] +1320-122617-0018-96: ref=['IT', 'WAS', 'SILENT', 'AND', 'GLOOMY', 'BEING', 'TENANTED', 'SOLELY', 'BY', 'THE', 'CAPTIVE', 'AND', 'LIGHTED', 'BY', 'THE', 'DYING', 'EMBERS', 'OF', 'A', 'FIRE', 'WHICH', 'HAD', 'BEEN', 'USED', 'FOR', 'THE', 'PURPOSED', 'OF', 'COOKERY'] +1320-122617-0018-96: hyp=['IT', 'WAS', 'SILENT', 'AND', 'GLOOMY', 'BEING', 'TENANTED', 'SOLELY', 'BY', 'THE', 'CAPTIVE', 'AND', 'LIGHTED', 'BY', 'THE', 'DYING', 'EMBERS', 'OF', 'A', 'FIRE', 'WHICH', 'HAD', 'BEEN', 'USED', 'FOR', 'THE', 'PURPOSE', 'OF', 'COOKERY'] +1320-122617-0019-97: ref=['UNCAS', 'OCCUPIED', 'A', 'DISTANT', 'CORNER', 'IN', 'A', 'RECLINING', 'ATTITUDE', 'BEING', 'RIGIDLY', 'BOUND', 'BOTH', 'HANDS', 'AND', 'FEET', 'BY', 'STRONG', 'AND', 'PAINFUL', 'WITHES'] +1320-122617-0019-97: hyp=['UNCAS', 'OCCUPIED', 'A', 'DISTANT', 'CORNER', 'IN', 'A', 'RECLINING', 'ATTITUDE', 'BEING', 'RIGIDLY', 'BOUND', 'BOTH', 'HANDS', 'AND', 'FEET', 'BY', 'STRONG', 'AND', 'PAINFUL', 'WIDTHS'] +1320-122617-0020-98: ref=['THE', 'SCOUT', 'WHO', 'HAD', 'LEFT', 'DAVID', 'AT', 'THE', 'DOOR', 'TO', 'ASCERTAIN', 'THEY', 'WERE', 'NOT', 'OBSERVED', 'THOUGHT', 'IT', 'PRUDENT', 'TO', 'PRESERVE', 'HIS', 'DISGUISE', 'UNTIL', 'ASSURED', 'OF', 'THEIR', 'PRIVACY'] +1320-122617-0020-98: hyp=['THE', 'SCOUT', 'WHO', 'HAD', 'LEFT', 'DAVID', 'AT', 'THE', 'DOOR', 'TO', 'ASCERTAIN', 'THEY', 'WERE', 'NOT', 'OBSERVED', 'THOUGHT', 'IT', 'PRUDENT', 'TO', 'PRESERVE', 'HIS', 'DISGUISE', 'UNTIL', 'ASSURED', 'OF', 'THEIR', 'PRIVACY'] +1320-122617-0021-99: ref=['WHAT', 'SHALL', 'WE', 'DO', 'WITH', 'THE', 'MINGOES', 'AT', 'THE', 'DOOR', 'THEY', 'COUNT', 'SIX', 'AND', 'THIS', 'SINGER', 'IS', 'AS', 'GOOD', 'AS', 'NOTHING'] +1320-122617-0021-99: hyp=['WHAT', 'SHALL', 'WE', 'DO', 'WITH', 'THE', 'MINGOES', 'AT', 'THE', 'DOOR', 'THEY', 'COUNT', 'SIX', 'AND', 'THE', 'SINGER', 'IS', 'AS', 'GOOD', 'AS', 'NOTHING'] +1320-122617-0022-100: ref=['THE', 'DELAWARES', 'ARE', 'CHILDREN', 'OF', 'THE', 'TORTOISE', 'AND', 'THEY', 'OUTSTRIP', 'THE', 'DEER'] +1320-122617-0022-100: hyp=['THE', 'DELAWARES', 'ARE', 'CHILDREN', 'OF', 'THE', 'TORTOISE', 'AND', 'THE', 'OUTSTRIP', 'THE', 'DEER'] +1320-122617-0023-101: ref=['UNCAS', 'WHO', 'HAD', 'ALREADY', 'APPROACHED', 'THE', 'DOOR', 'IN', 'READINESS', 'TO', 'LEAD', 'THE', 'WAY', 'NOW', 'RECOILED', 'AND', 'PLACED', 'HIMSELF', 'ONCE', 'MORE', 'IN', 'THE', 'BOTTOM', 'OF', 'THE', 'LODGE'] +1320-122617-0023-101: hyp=['UNCAS', 'WHO', 'HAD', 'ALREADY', 'APPROACHED', 'THE', 'DOOR', 'IN', 'READINESS', 'TO', 'LEAD', 'THE', 'WAY', 'NOW', 'RECOILED', 'AND', 'PLACED', 'HIMSELF', 'ONCE', 'MORE', 'IN', 'THE', 'BOTTOM', 'OF', 'THE', 'LODGE'] +1320-122617-0024-102: ref=['BUT', 'HAWKEYE', 'WHO', 'WAS', 'TOO', 'MUCH', 'OCCUPIED', 'WITH', 'HIS', 'OWN', 'THOUGHTS', 'TO', 'NOTE', 'THE', 'MOVEMENT', 'CONTINUED', 'SPEAKING', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'HIS', 'COMPANION'] +1320-122617-0024-102: hyp=['BUT', 'HAWKEYE', 'WHO', 'WAS', 'TOO', 'MUCH', 'OCCUPIED', 'WITH', 'HIS', 'OWN', 'THOUGHTS', 'TO', 'NOTE', 'THE', 'MOVEMENT', 'CONTINUED', 'SPEAKING', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'HIS', 'COMPANION'] +1320-122617-0025-103: ref=['SO', 'UNCAS', 'YOU', 'HAD', 'BETTER', 'TAKE', 'THE', 'LEAD', 'WHILE', 'I', 'WILL', 'PUT', 'ON', 'THE', 'SKIN', 'AGAIN', 'AND', 'TRUST', 'TO', 'CUNNING', 'FOR', 'WANT', 'OF', 'SPEED'] +1320-122617-0025-103: hyp=['SO', 'UNCAS', 'YOU', 'HAD', 'BETTER', 'TAKE', 'THE', 'LEAD', 'WHILE', 'I', 'WILL', 'PUT', 'ON', 'THE', 'SKIN', 'AGAIN', 'AND', 'TRUST', 'TO', 'CUNNING', 'FOR', 'WANT', 'OF', 'SPEED'] +1320-122617-0026-104: ref=['WELL', 'WHAT', "CAN'T", 'BE', 'DONE', 'BY', 'MAIN', 'COURAGE', 'IN', 'WAR', 'MUST', 'BE', 'DONE', 'BY', 'CIRCUMVENTION'] +1320-122617-0026-104: hyp=['WELL', 'WHAT', "CAN'T", 'BE', 'DONE', 'BY', 'MAIN', 'COURAGE', 'AND', 'WAR', 'MUST', 'BE', 'DONE', 'BY', 'CIRCUMVENTION'] +1320-122617-0027-105: ref=['AS', 'SOON', 'AS', 'THESE', 'DISPOSITIONS', 'WERE', 'MADE', 'THE', 'SCOUT', 'TURNED', 'TO', 'DAVID', 'AND', 'GAVE', 'HIM', 'HIS', 'PARTING', 'INSTRUCTIONS'] +1320-122617-0027-105: hyp=['AS', 'SOON', 'AS', 'THESE', 'DISPOSITIONS', 'WERE', 'MADE', 'THE', 'SCOUT', 'TURNED', 'TO', 'DAVID', 'AND', 'GAVE', 'HIM', 'HIS', 'PARTING', 'INSTRUCTIONS'] +1320-122617-0028-106: ref=['MY', 'PURSUITS', 'ARE', 'PEACEFUL', 'AND', 'MY', 'TEMPER', 'I', 'HUMBLY', 'TRUST', 'IS', 'GREATLY', 'GIVEN', 'TO', 'MERCY', 'AND', 'LOVE', 'RETURNED', 'DAVID', 'A', 'LITTLE', 'NETTLED', 'AT', 'SO', 'DIRECT', 'AN', 'ATTACK', 'ON', 'HIS', 'MANHOOD', 'BUT', 'THERE', 'ARE', 'NONE', 'WHO', 'CAN', 'SAY', 'THAT', 'I', 'HAVE', 'EVER', 'FORGOTTEN', 'MY', 'FAITH', 'IN', 'THE', 'LORD', 'EVEN', 'IN', 'THE', 'GREATEST', 'STRAITS'] +1320-122617-0028-106: hyp=['MY', 'PURSUITS', 'ARE', 'PEACEFUL', 'AND', 'MY', 'TEMPER', 'I', 'HUMBLY', 'TRUST', 'IS', 'GREATLY', 'GIVEN', 'TO', 'MERCY', 'AND', 'LOVE', 'RETURNED', 'DAVID', 'A', 'LITTLE', 'NETTLED', 'AT', 'SO', 'DIRECT', 'AN', 'ATTACK', 'ON', 'HIS', 'MANHOOD', 'BUT', 'THERE', 'ARE', 'NONE', 'WHO', 'CAN', 'SAY', 'THAT', 'I', 'HAVE', 'EVER', 'FORGOTTEN', 'MY', 'FAITH', 'IN', 'THE', 'LORD', 'EVEN', 'IN', 'THE', 'GREATEST', 'STRAITS'] +1320-122617-0029-107: ref=['IF', 'YOU', 'ARE', 'NOT', 'THEN', 'KNOCKED', 'ON', 'THE', 'HEAD', 'YOUR', 'BEING', 'A', 'NON', 'COMPOSSER', 'WILL', 'PROTECT', 'YOU', 'AND', "YOU'LL", 'THEN', 'HAVE', 'A', 'GOOD', 'REASON', 'TO', 'EXPECT', 'TO', 'DIE', 'IN', 'YOUR', 'BED'] +1320-122617-0029-107: hyp=['IF', 'YOU', 'ARE', 'NOT', 'THEN', 'KNOCKED', 'ON', 'THE', 'HEAD', 'YOUR', 'BEING', 'A', 'NON', 'COMPOSSIBLE', 'PROTECT', 'YOU', 'AND', "YOU'LL", 'THEN', 'HAVE', 'A', 'GOOD', 'REASON', 'TO', 'EXPECT', 'TO', 'DIE', 'IN', 'YOUR', 'BED'] +1320-122617-0030-108: ref=['SO', 'CHOOSE', 'FOR', 'YOURSELF', 'TO', 'MAKE', 'A', 'RUSH', 'OR', 'TARRY', 'HERE'] +1320-122617-0030-108: hyp=['SO', 'CHOOSE', 'FOR', 'YOURSELF', 'TO', 'MAKE', 'A', 'RUSH', 'OR', 'TARRY', 'HERE'] +1320-122617-0031-109: ref=['BRAVELY', 'AND', 'GENEROUSLY', 'HAS', 'HE', 'BATTLED', 'IN', 'MY', 'BEHALF', 'AND', 'THIS', 'AND', 'MORE', 'WILL', 'I', 'DARE', 'IN', 'HIS', 'SERVICE'] +1320-122617-0031-109: hyp=['BRAVELY', 'AND', 'GENEROUSLY', 'HAS', 'HE', 'BATTLED', 'IN', 'MY', 'BEHALF', 'AND', 'THIS', 'AND', 'MORE', 'WILL', 'I', 'DARE', 'IN', 'HIS', 'SERVICE'] +1320-122617-0032-110: ref=['KEEP', 'SILENT', 'AS', 'LONG', 'AS', 'MAY', 'BE', 'AND', 'IT', 'WOULD', 'BE', 'WISE', 'WHEN', 'YOU', 'DO', 'SPEAK', 'TO', 'BREAK', 'OUT', 'SUDDENLY', 'IN', 'ONE', 'OF', 'YOUR', 'SHOUTINGS', 'WHICH', 'WILL', 'SERVE', 'TO', 'REMIND', 'THE', 'INDIANS', 'THAT', 'YOU', 'ARE', 'NOT', 'ALTOGETHER', 'AS', 'RESPONSIBLE', 'AS', 'MEN', 'SHOULD', 'BE'] +1320-122617-0032-110: hyp=['KEEP', 'SILENT', 'AS', 'LONG', 'AS', 'MAY', 'BE', 'AND', 'IT', 'WOULD', 'BE', 'WISE', 'WHEN', 'YOU', 'DO', 'SPEAK', 'TO', 'BREAK', 'OUT', 'SUDDENLY', 'IN', 'ONE', 'OF', 'YOUR', 'SHOUTINGS', 'WHICH', 'WILL', 'SERVE', 'TO', 'REMIND', 'THE', 'INDIANS', 'THAT', 'YOU', 'ARE', 'NOT', 'ALTOGETHER', 'AS', 'RESPONSIBLE', 'AS', 'MEN', 'SHOULD', 'BE'] +1320-122617-0033-111: ref=['IF', 'HOWEVER', 'THEY', 'TAKE', 'YOUR', 'SCALP', 'AS', 'I', 'TRUST', 'AND', 'BELIEVE', 'THEY', 'WILL', 'NOT', 'DEPEND', 'ON', 'IT', 'UNCAS', 'AND', 'I', 'WILL', 'NOT', 'FORGET', 'THE', 'DEED', 'BUT', 'REVENGE', 'IT', 'AS', 'BECOMES', 'TRUE', 'WARRIORS', 'AND', 'TRUSTY', 'FRIENDS'] +1320-122617-0033-111: hyp=['IF', 'HOWEVER', 'THEY', 'TAKE', 'YOUR', 'SCALP', 'AS', 'I', 'TRUST', 'AND', 'BELIEVE', 'THEY', 'WILL', 'NOT', 'DEPEND', 'ON', 'IT', 'UNCAS', 'AND', 'I', 'WILL', 'NOT', 'FORGET', 'THE', 'DEED', 'BUT', 'REVENGE', 'IT', 'AS', 'BECOMES', 'TRUE', 'WARRIORS', 'AND', 'TRUSTY', 'FRIENDS'] +1320-122617-0034-112: ref=['HOLD', 'SAID', 'DAVID', 'PERCEIVING', 'THAT', 'WITH', 'THIS', 'ASSURANCE', 'THEY', 'WERE', 'ABOUT', 'TO', 'LEAVE', 'HIM', 'I', 'AM', 'AN', 'UNWORTHY', 'AND', 'HUMBLE', 'FOLLOWER', 'OF', 'ONE', 'WHO', 'TAUGHT', 'NOT', 'THE', 'DAMNABLE', 'PRINCIPLE', 'OF', 'REVENGE'] +1320-122617-0034-112: hyp=['HOLD', 'SAID', 'DAVID', 'PERCEIVING', 'THAT', 'WITH', 'THIS', 'ASSURANCE', 'THEY', 'WERE', 'ABOUT', 'TO', 'LEAVE', 'HIM', 'I', 'AM', 'AN', 'UNWORTHY', 'AND', 'HUMBLE', 'FOLLOWER', 'OF', 'ONE', 'WHO', 'TAUGHT', 'NOT', 'THE', 'DAMNABLE', 'PRINCIPLE', 'OF', 'REVENGE'] +1320-122617-0035-113: ref=['THEN', 'HEAVING', 'A', 'HEAVY', 'SIGH', 'PROBABLY', 'AMONG', 'THE', 'LAST', 'HE', 'EVER', 'DREW', 'IN', 'PINING', 'FOR', 'A', 'CONDITION', 'HE', 'HAD', 'SO', 'LONG', 'ABANDONED', 'HE', 'ADDED', 'IT', 'IS', 'WHAT', 'I', 'WOULD', 'WISH', 'TO', 'PRACTISE', 'MYSELF', 'AS', 'ONE', 'WITHOUT', 'A', 'CROSS', 'OF', 'BLOOD', 'THOUGH', 'IT', 'IS', 'NOT', 'ALWAYS', 'EASY', 'TO', 'DEAL', 'WITH', 'AN', 'INDIAN', 'AS', 'YOU', 'WOULD', 'WITH', 'A', 'FELLOW', 'CHRISTIAN'] +1320-122617-0035-113: hyp=['THEN', 'HEAVING', 'A', 'HEAVY', 'SIGH', 'PROBABLY', 'AMONG', 'THE', 'LAST', 'HE', 'EVER', 'DREW', 'IN', 'PINING', 'FOR', 'A', 'CONDITION', 'HE', 'HAD', 'SO', 'LONG', 'ABANDONED', 'HE', 'ADDED', 'IT', 'IS', 'WHAT', 'I', 'WOULD', 'WISH', 'TO', 'PRACTISE', 'MYSELF', 'AS', 'ONE', 'WITHOUT', 'A', 'CROSS', 'OF', 'BLOOD', 'THOUGH', 'IT', 'IS', 'NOT', 'ALWAYS', 'EASY', 'TO', 'DEAL', 'WITH', 'AN', 'INDIAN', 'AS', 'YOU', 'WOULD', 'WITH', 'A', 'FELLOW', 'CHRISTIAN'] +1320-122617-0036-114: ref=['GOD', 'BLESS', 'YOU', 'FRIEND', 'I', 'DO', 'BELIEVE', 'YOUR', 'SCENT', 'IS', 'NOT', 'GREATLY', 'WRONG', 'WHEN', 'THE', 'MATTER', 'IS', 'DULY', 'CONSIDERED', 'AND', 'KEEPING', 'ETERNITY', 'BEFORE', 'THE', 'EYES', 'THOUGH', 'MUCH', 'DEPENDS', 'ON', 'THE', 'NATURAL', 'GIFTS', 'AND', 'THE', 'FORCE', 'OF', 'TEMPTATION'] +1320-122617-0036-114: hyp=['GOD', 'BLESS', 'YOU', 'FRIEND', 'I', 'DO', 'BELIEVE', 'YOUR', 'SCENT', 'HAS', 'NOT', 'GREATLY', 'WRONG', 'WHEN', 'THE', 'MATTER', 'IS', 'DULY', 'CONSIDERED', 'AND', 'KEEPING', 'ETERNITY', 'BEFORE', 'THE', 'EYES', 'THOUGH', 'MUCH', 'DEPENDS', 'ON', 'THE', 'NATURAL', 'GIFTS', 'AND', 'THE', 'FORCE', 'OF', 'TEMPTATION'] +1320-122617-0037-115: ref=['THE', 'DELAWARE', 'DOG', 'HE', 'SAID', 'LEANING', 'FORWARD', 'AND', 'PEERING', 'THROUGH', 'THE', 'DIM', 'LIGHT', 'TO', 'CATCH', 'THE', 'EXPRESSION', 'OF', 'THE', "OTHER'S", 'FEATURES', 'IS', 'HE', 'AFRAID'] +1320-122617-0037-115: hyp=['THE', 'DELAWARE', 'DOG', 'HE', 'SAID', 'LEANING', 'FORWARD', 'AND', 'PEERING', 'THROUGH', 'THE', 'DIM', 'LIGHT', 'TO', 'CATCH', 'THE', 'EXPRESSION', 'OF', 'THE', "OTHER'S", 'FEATURES', 'IS', 'HE', 'AFRAID'] +1320-122617-0038-116: ref=['WILL', 'THE', 'HURONS', 'HEAR', 'HIS', 'GROANS'] +1320-122617-0038-116: hyp=['WILL', 'THE', 'HURONS', 'HEAR', 'HIS', 'GROANS'] +1320-122617-0039-117: ref=['THE', 'MOHICAN', 'STARTED', 'ON', 'HIS', 'FEET', 'AND', 'SHOOK', 'HIS', 'SHAGGY', 'COVERING', 'AS', 'THOUGH', 'THE', 'ANIMAL', 'HE', 'COUNTERFEITED', 'WAS', 'ABOUT', 'TO', 'MAKE', 'SOME', 'DESPERATE', 'EFFORT'] +1320-122617-0039-117: hyp=['THE', 'MOHICANS', 'STARTED', 'ON', 'HIS', 'FEET', 'AND', 'SHOOK', 'HIS', 'SHAGGY', 'COVERING', 'AS', 'THOUGH', 'THE', 'ANIMAL', 'HE', 'COUNTERFEITED', 'WAS', 'ABOUT', 'TO', 'MAKE', 'SOME', 'DESPERATE', 'EFFORT'] +1320-122617-0040-118: ref=['HE', 'HAD', 'NO', 'OCCASION', 'TO', 'DELAY', 'FOR', 'AT', 'THE', 'NEXT', 'INSTANT', 'A', 'BURST', 'OF', 'CRIES', 'FILLED', 'THE', 'OUTER', 'AIR', 'AND', 'RAN', 'ALONG', 'THE', 'WHOLE', 'EXTENT', 'OF', 'THE', 'VILLAGE'] +1320-122617-0040-118: hyp=['HE', 'HAD', 'NO', 'OCCASION', 'TO', 'DELAY', 'FOR', 'AT', 'THE', 'NEXT', 'INSTANT', 'A', 'BURST', 'OF', 'CRIES', 'FILLED', 'THE', 'OUTER', 'AIR', 'AND', 'RAN', 'ALONG', 'THE', 'WHOLE', 'EXTENT', 'OF', 'THE', 'VILLAGE'] +1320-122617-0041-119: ref=['UNCAS', 'CAST', 'HIS', 'SKIN', 'AND', 'STEPPED', 'FORTH', 'IN', 'HIS', 'OWN', 'BEAUTIFUL', 'PROPORTIONS'] +1320-122617-0041-119: hyp=['UNCAS', 'CAST', 'HIS', 'SKIN', 'AND', 'STEPPED', 'FORTH', 'IN', 'HIS', 'OWN', 'BEAUTIFUL', 'PROPORTIONS'] +1580-141083-0000-1949: ref=['I', 'WILL', 'ENDEAVOUR', 'IN', 'MY', 'STATEMENT', 'TO', 'AVOID', 'SUCH', 'TERMS', 'AS', 'WOULD', 'SERVE', 'TO', 'LIMIT', 'THE', 'EVENTS', 'TO', 'ANY', 'PARTICULAR', 'PLACE', 'OR', 'GIVE', 'A', 'CLUE', 'AS', 'TO', 'THE', 'PEOPLE', 'CONCERNED'] +1580-141083-0000-1949: hyp=['I', 'WILL', 'ENDEAVOUR', 'IN', 'MY', 'STATEMENT', 'TO', 'AVOID', 'SUCH', 'TERMS', 'AS', 'WOULD', 'SERVE', 'TO', 'LIMIT', 'THE', 'EVENTS', 'TO', 'ANY', 'PARTICULAR', 'PLACE', 'OR', 'GIVE', 'A', 'CLUE', 'AS', 'TO', 'THE', 'PEOPLE', 'CONCERNED'] +1580-141083-0001-1950: ref=['I', 'HAD', 'ALWAYS', 'KNOWN', 'HIM', 'TO', 'BE', 'RESTLESS', 'IN', 'HIS', 'MANNER', 'BUT', 'ON', 'THIS', 'PARTICULAR', 'OCCASION', 'HE', 'WAS', 'IN', 'SUCH', 'A', 'STATE', 'OF', 'UNCONTROLLABLE', 'AGITATION', 'THAT', 'IT', 'WAS', 'CLEAR', 'SOMETHING', 'VERY', 'UNUSUAL', 'HAD', 'OCCURRED'] +1580-141083-0001-1950: hyp=['I', 'HAD', 'ALWAYS', 'KNOWN', 'HIM', 'TO', 'BE', 'RESTLESS', 'IN', 'HIS', 'MANNER', 'BUT', 'ON', 'THIS', 'PARTICULAR', 'OCCASION', 'HE', 'WAS', 'IN', 'SUCH', 'A', 'STATE', 'OF', 'UNCONTROLLABLE', 'AGITATION', 'THAT', 'IT', 'WAS', 'CLEAR', 'SOMETHING', 'VERY', 'UNUSUAL', 'HAD', 'OCCURRED'] +1580-141083-0002-1951: ref=['MY', "FRIEND'S", 'TEMPER', 'HAD', 'NOT', 'IMPROVED', 'SINCE', 'HE', 'HAD', 'BEEN', 'DEPRIVED', 'OF', 'THE', 'CONGENIAL', 'SURROUNDINGS', 'OF', 'BAKER', 'STREET'] +1580-141083-0002-1951: hyp=['MY', "FRIEND'S", 'TEMPER', 'HAD', 'NOT', 'IMPROVED', 'SINCE', 'HE', 'HAD', 'BEEN', 'DEPRIVED', 'OF', 'THE', 'CONGENIAL', 'SURROUNDINGS', 'OF', 'BAKER', 'STREET'] +1580-141083-0003-1952: ref=['WITHOUT', 'HIS', 'SCRAPBOOKS', 'HIS', 'CHEMICALS', 'AND', 'HIS', 'HOMELY', 'UNTIDINESS', 'HE', 'WAS', 'AN', 'UNCOMFORTABLE', 'MAN'] +1580-141083-0003-1952: hyp=['WITHOUT', 'HIS', 'SCRAP', 'BOOKS', 'HIS', 'CHEMICALS', 'AND', 'HIS', 'HOMELY', 'UNTIDINESS', 'HE', 'WAS', 'AN', 'UNCOMFORTABLE', 'MAN'] +1580-141083-0004-1953: ref=['I', 'HAD', 'TO', 'READ', 'IT', 'OVER', 'CAREFULLY', 'AS', 'THE', 'TEXT', 'MUST', 'BE', 'ABSOLUTELY', 'CORRECT'] +1580-141083-0004-1953: hyp=['I', 'HAD', 'TO', 'READ', 'IT', 'OVER', 'CAREFULLY', 'AS', 'THE', 'TEXT', 'MUST', 'BE', 'ABSOLUTELY', 'CORRECT'] +1580-141083-0005-1954: ref=['I', 'WAS', 'ABSENT', 'RATHER', 'MORE', 'THAN', 'AN', 'HOUR'] +1580-141083-0005-1954: hyp=['I', 'WAS', 'ABSENT', 'RATHER', 'MORE', 'THAN', 'AN', 'HOUR'] +1580-141083-0006-1955: ref=['THE', 'ONLY', 'DUPLICATE', 'WHICH', 'EXISTED', 'SO', 'FAR', 'AS', 'I', 'KNEW', 'WAS', 'THAT', 'WHICH', 'BELONGED', 'TO', 'MY', 'SERVANT', 'BANNISTER', 'A', 'MAN', 'WHO', 'HAS', 'LOOKED', 'AFTER', 'MY', 'ROOM', 'FOR', 'TEN', 'YEARS', 'AND', 'WHOSE', 'HONESTY', 'IS', 'ABSOLUTELY', 'ABOVE', 'SUSPICION'] +1580-141083-0006-1955: hyp=['THE', 'ONLY', 'DUPLICATE', 'WHICH', 'EXISTED', 'SO', 'FAR', 'AS', 'I', 'KNEW', 'WAS', 'THAT', 'WHICH', 'BELONGED', 'TO', 'MY', 'SERVANT', 'BANISTER', 'A', 'MAN', 'WHO', 'HAS', 'LOOKED', 'AFTER', 'MY', 'ROOM', 'FOR', 'TEN', 'YEARS', 'AND', 'WHOSE', 'HONESTY', 'IS', 'ABSOLUTELY', 'ABOVE', 'SUSPICION'] +1580-141083-0007-1956: ref=['THE', 'MOMENT', 'I', 'LOOKED', 'AT', 'MY', 'TABLE', 'I', 'WAS', 'AWARE', 'THAT', 'SOMEONE', 'HAD', 'RUMMAGED', 'AMONG', 'MY', 'PAPERS'] +1580-141083-0007-1956: hyp=['THE', 'MOMENT', 'I', 'LOOKED', 'AT', 'MY', 'TABLE', 'I', 'WAS', 'AWARE', 'THAT', 'SOMEONE', 'HAD', 'RUMMAGED', 'AMONG', 'MY', 'PAPER'] +1580-141083-0008-1957: ref=['THE', 'PROOF', 'WAS', 'IN', 'THREE', 'LONG', 'SLIPS', 'I', 'HAD', 'LEFT', 'THEM', 'ALL', 'TOGETHER'] +1580-141083-0008-1957: hyp=['THE', 'PROOF', 'WAS', 'IN', 'THREE', 'LONG', 'SLIPS', 'I', 'HAD', 'LEFT', 'THEM', 'ALL', 'TOGETHER'] +1580-141083-0009-1958: ref=['THE', 'ALTERNATIVE', 'WAS', 'THAT', 'SOMEONE', 'PASSING', 'HAD', 'OBSERVED', 'THE', 'KEY', 'IN', 'THE', 'DOOR', 'HAD', 'KNOWN', 'THAT', 'I', 'WAS', 'OUT', 'AND', 'HAD', 'ENTERED', 'TO', 'LOOK', 'AT', 'THE', 'PAPERS'] +1580-141083-0009-1958: hyp=['THEY', 'ALL', 'TURNED', 'OF', 'WAS', 'THAT', 'SOME', 'ONE', 'PASSING', 'HAD', 'OBSERVED', 'THE', 'KEY', 'IN', 'THE', 'DOOR', 'HAD', 'KNOWN', 'THAT', 'I', 'WAS', 'OUT', 'AND', 'HAD', 'ENTERED', 'TO', 'LOOK', 'AT', 'THE', 'PAPERS'] +1580-141083-0010-1959: ref=['I', 'GAVE', 'HIM', 'A', 'LITTLE', 'BRANDY', 'AND', 'LEFT', 'HIM', 'COLLAPSED', 'IN', 'A', 'CHAIR', 'WHILE', 'I', 'MADE', 'A', 'MOST', 'CAREFUL', 'EXAMINATION', 'OF', 'THE', 'ROOM'] +1580-141083-0010-1959: hyp=['I', 'GAVE', 'HIM', 'A', 'LITTLE', 'BRANDY', 'AND', 'LEFT', 'HIM', 'COLLAPSED', 'IN', 'A', 'CHAIR', 'WHILE', 'I', 'MADE', 'A', 'MOST', 'CAREFUL', 'EXAMINATION', 'OF', 'THE', 'ROOM'] +1580-141083-0011-1960: ref=['A', 'BROKEN', 'TIP', 'OF', 'LEAD', 'WAS', 'LYING', 'THERE', 'ALSO'] +1580-141083-0011-1960: hyp=['A', 'BROKEN', 'TIP', 'OF', 'LEAD', 'WAS', 'LYING', 'THERE', 'ALSO'] +1580-141083-0012-1961: ref=['NOT', 'ONLY', 'THIS', 'BUT', 'ON', 'THE', 'TABLE', 'I', 'FOUND', 'A', 'SMALL', 'BALL', 'OF', 'BLACK', 'DOUGH', 'OR', 'CLAY', 'WITH', 'SPECKS', 'OF', 'SOMETHING', 'WHICH', 'LOOKS', 'LIKE', 'SAWDUST', 'IN', 'IT'] +1580-141083-0012-1961: hyp=['NOT', 'ONLY', 'THIS', 'BUT', 'ON', 'THE', 'TABLE', 'I', 'FOUND', 'A', 'SMALL', 'BALL', 'OF', 'BLACK', 'DOUGH', 'OR', 'CLAY', 'WITH', 'SPECKS', 'OF', 'SOMETHING', 'WHICH', 'LOOKS', 'LIKE', 'SAWDUST', 'IN', 'IT'] +1580-141083-0013-1962: ref=['ABOVE', 'ALL', 'THINGS', 'I', 'DESIRE', 'TO', 'SETTLE', 'THE', 'MATTER', 'QUIETLY', 'AND', 'DISCREETLY'] +1580-141083-0013-1962: hyp=['ABOVE', 'ALL', 'THINGS', 'I', 'DESIRE', 'TO', 'SETTLE', 'THE', 'MATTER', 'QUIETLY', 'AND', 'DISCREETLY'] +1580-141083-0014-1963: ref=['TO', 'THE', 'BEST', 'OF', 'MY', 'BELIEF', 'THEY', 'WERE', 'ROLLED', 'UP'] +1580-141083-0014-1963: hyp=['TO', 'THE', 'BEST', 'OF', 'MY', 'BELIEF', 'THEY', 'WERE', 'ROLLED', 'UP'] +1580-141083-0015-1964: ref=['DID', 'ANYONE', 'KNOW', 'THAT', 'THESE', 'PROOFS', 'WOULD', 'BE', 'THERE', 'NO', 'ONE', 'SAVE', 'THE', 'PRINTER'] +1580-141083-0015-1964: hyp=['DID', 'ANY', 'ONE', 'KNOW', 'THAT', 'THESE', 'PROOFS', 'WOULD', 'BE', 'THERE', 'NO', 'ONE', 'SAVE', 'THE', 'PRINTER'] +1580-141083-0016-1965: ref=['I', 'WAS', 'IN', 'SUCH', 'A', 'HURRY', 'TO', 'COME', 'TO', 'YOU', 'YOU', 'LEFT', 'YOUR', 'DOOR', 'OPEN'] +1580-141083-0016-1965: hyp=['I', 'WAS', 'IN', 'SUCH', 'A', 'HURRY', 'TO', 'COME', 'TO', 'YOU', 'YOU', 'LEFT', 'YOUR', 'DOOR', 'OPEN'] +1580-141083-0017-1966: ref=['SO', 'IT', 'SEEMS', 'TO', 'ME'] +1580-141083-0017-1966: hyp=['SO', 'IT', 'SEEMS', 'TO', 'ME'] +1580-141083-0018-1967: ref=['NOW', 'MISTER', 'SOAMES', 'AT', 'YOUR', 'DISPOSAL'] +1580-141083-0018-1967: hyp=['NOW', 'MISTER', 'SOLMES', 'AT', 'YOUR', 'DISPOSAL'] +1580-141083-0019-1968: ref=['ABOVE', 'WERE', 'THREE', 'STUDENTS', 'ONE', 'ON', 'EACH', 'STORY'] +1580-141083-0019-1968: hyp=['ABOVE', 'WERE', 'THREE', 'STUDENTS', 'ONE', 'ON', 'EACH', 'STORY'] +1580-141083-0020-1969: ref=['THEN', 'HE', 'APPROACHED', 'IT', 'AND', 'STANDING', 'ON', 'TIPTOE', 'WITH', 'HIS', 'NECK', 'CRANED', 'HE', 'LOOKED', 'INTO', 'THE', 'ROOM'] +1580-141083-0020-1969: hyp=['THEN', 'HE', 'APPROACHED', 'IT', 'AND', 'STANDING', 'ON', 'TIPTOE', 'WITH', 'HIS', 'NET', 'CRANED', 'HE', 'LOOKED', 'INTO', 'THE', 'ROOM'] +1580-141083-0021-1970: ref=['THERE', 'IS', 'NO', 'OPENING', 'EXCEPT', 'THE', 'ONE', 'PANE', 'SAID', 'OUR', 'LEARNED', 'GUIDE'] +1580-141083-0021-1970: hyp=['THERE', 'IS', 'NO', 'OPENING', 'EXCEPT', 'THE', 'ONE', 'PANE', 'SAID', 'OUR', 'LEARNED', 'GUIDE'] +1580-141083-0022-1971: ref=['I', 'AM', 'AFRAID', 'THERE', 'ARE', 'NO', 'SIGNS', 'HERE', 'SAID', 'HE'] +1580-141083-0022-1971: hyp=['I', 'AM', 'AFRAID', 'THERE', 'ARE', 'NO', 'SIGNS', 'HERE', 'SAID', 'HE'] +1580-141083-0023-1972: ref=['ONE', 'COULD', 'HARDLY', 'HOPE', 'FOR', 'ANY', 'UPON', 'SO', 'DRY', 'A', 'DAY'] +1580-141083-0023-1972: hyp=['ONE', 'COULD', 'HARDLY', 'HOPE', 'FOR', 'ANY', 'UPON', 'SO', 'DRY', 'A', 'DAY'] +1580-141083-0024-1973: ref=['YOU', 'LEFT', 'HIM', 'IN', 'A', 'CHAIR', 'YOU', 'SAY', 'WHICH', 'CHAIR', 'BY', 'THE', 'WINDOW', 'THERE'] +1580-141083-0024-1973: hyp=['YOU', 'LEFT', 'HIM', 'IN', 'A', 'CHAIR', 'YOU', 'SAY', 'WHICH', 'CHAIR', 'BY', 'THE', 'WINDOW', 'THERE'] +1580-141083-0025-1974: ref=['THE', 'MAN', 'ENTERED', 'AND', 'TOOK', 'THE', 'PAPERS', 'SHEET', 'BY', 'SHEET', 'FROM', 'THE', 'CENTRAL', 'TABLE'] +1580-141083-0025-1974: hyp=['THE', 'MEN', 'ENTERED', 'AND', 'TOOK', 'THE', 'PAPERS', 'SHEET', 'BY', 'SHEET', 'FROM', 'THE', 'CENTRAL', 'TABLE'] +1580-141083-0026-1975: ref=['AS', 'A', 'MATTER', 'OF', 'FACT', 'HE', 'COULD', 'NOT', 'SAID', 'SOAMES', 'FOR', 'I', 'ENTERED', 'BY', 'THE', 'SIDE', 'DOOR'] +1580-141083-0026-1975: hyp=['AS', 'A', 'MATTER', 'OF', 'FACT', 'HE', 'COULD', 'NOT', 'SAID', 'SOLMES', 'FOR', 'I', 'ENTERED', 'BY', 'THE', 'SIDE', 'DOOR'] +1580-141083-0027-1976: ref=['HOW', 'LONG', 'WOULD', 'IT', 'TAKE', 'HIM', 'TO', 'DO', 'THAT', 'USING', 'EVERY', 'POSSIBLE', 'CONTRACTION', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'NOT', 'LESS'] +1580-141083-0027-1976: hyp=['HOW', 'LONG', 'WOULD', 'IT', 'TAKE', 'HIM', 'TO', 'DO', 'THAT', 'USING', 'EVERY', 'POSSIBLE', 'CONTRACTION', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'NOT', 'LESS'] +1580-141083-0028-1977: ref=['THEN', 'HE', 'TOSSED', 'IT', 'DOWN', 'AND', 'SEIZED', 'THE', 'NEXT'] +1580-141083-0028-1977: hyp=['THEN', 'HE', 'TOSSED', 'IT', 'DOWN', 'AND', 'SEIZED', 'THE', 'NEXT'] +1580-141083-0029-1978: ref=['HE', 'WAS', 'IN', 'THE', 'MIDST', 'OF', 'THAT', 'WHEN', 'YOUR', 'RETURN', 'CAUSED', 'HIM', 'TO', 'MAKE', 'A', 'VERY', 'HURRIED', 'RETREAT', 'VERY', 'HURRIED', 'SINCE', 'HE', 'HAD', 'NOT', 'TIME', 'TO', 'REPLACE', 'THE', 'PAPERS', 'WHICH', 'WOULD', 'TELL', 'YOU', 'THAT', 'HE', 'HAD', 'BEEN', 'THERE'] +1580-141083-0029-1978: hyp=['HE', 'WAS', 'IN', 'THE', 'MIDST', 'OF', 'THAT', 'WHEN', 'YOUR', 'RETURN', 'CAUSED', 'HIM', 'TO', 'MAKE', 'A', 'VERY', 'HURRIED', 'RETREAT', 'VERY', 'HURRIED', 'SINCE', 'HE', 'HAD', 'NOT', 'TIME', 'TO', 'REPLACE', 'THE', 'PAPERS', 'WHICH', 'WOULD', 'TELL', 'YOU', 'THAT', 'HE', 'HAD', 'BEEN', 'THERE'] +1580-141083-0030-1979: ref=['MISTER', 'SOAMES', 'WAS', 'SOMEWHAT', 'OVERWHELMED', 'BY', 'THIS', 'FLOOD', 'OF', 'INFORMATION'] +1580-141083-0030-1979: hyp=['MISTER', 'SOLMES', 'WAS', 'SOMEWHAT', 'OVERWHELMED', 'BY', 'THIS', 'FLOOD', 'OF', 'INFORMATION'] +1580-141083-0031-1980: ref=['HOLMES', 'HELD', 'OUT', 'A', 'SMALL', 'CHIP', 'WITH', 'THE', 'LETTERS', 'N', 'N', 'AND', 'A', 'SPACE', 'OF', 'CLEAR', 'WOOD', 'AFTER', 'THEM', 'YOU', 'SEE'] +1580-141083-0031-1980: hyp=['HOLMES', 'HELD', 'OUT', 'A', 'SMALL', 'CHIP', 'WITH', 'THE', 'LETTERS', 'N', 'AND', 'A', 'SPACE', 'OF', 'CLEAR', 'WOOD', 'AFTER', 'THEM', 'YOU', 'SEE'] +1580-141083-0032-1981: ref=['WATSON', 'I', 'HAVE', 'ALWAYS', 'DONE', 'YOU', 'AN', 'INJUSTICE', 'THERE', 'ARE', 'OTHERS'] +1580-141083-0032-1981: hyp=['WATSON', 'I', 'HAVE', 'ALWAYS', 'DONE', 'YOU', 'AN', 'INJUSTICE', 'THERE', 'ARE', 'OTHERS'] +1580-141083-0033-1982: ref=['I', 'WAS', 'HOPING', 'THAT', 'IF', 'THE', 'PAPER', 'ON', 'WHICH', 'HE', 'WROTE', 'WAS', 'THIN', 'SOME', 'TRACE', 'OF', 'IT', 'MIGHT', 'COME', 'THROUGH', 'UPON', 'THIS', 'POLISHED', 'SURFACE', 'NO', 'I', 'SEE', 'NOTHING'] +1580-141083-0033-1982: hyp=['I', 'WAS', 'HOPING', 'THAT', 'IF', 'THE', 'PAPER', 'ON', 'WHICH', 'HE', 'WROTE', 'WAS', 'THIN', 'SOME', 'TRACE', 'OF', 'IT', 'MIGHT', 'COME', 'THROUGH', 'UPON', 'THIS', 'POLISHED', 'SURFACE', 'NO', 'I', 'SEE', 'NOTHING'] +1580-141083-0034-1983: ref=['AS', 'HOLMES', 'DREW', 'THE', 'CURTAIN', 'I', 'WAS', 'AWARE', 'FROM', 'SOME', 'LITTLE', 'RIGIDITY', 'AND', 'ALERTNESS', 'OF', 'HIS', 'ATTITUDE', 'THAT', 'HE', 'WAS', 'PREPARED', 'FOR', 'AN', 'EMERGENCY'] +1580-141083-0034-1983: hyp=['AS', 'HOLMES', 'DREW', 'THE', 'CURTAIN', 'I', 'WAS', 'AWARE', 'FROM', 'SOME', 'LITTLE', 'RIGIDITY', 'AND', 'AN', 'ALERTNESS', 'OF', 'HIS', 'ATTITUDE', 'THAT', 'HE', 'WAS', 'PREPARED', 'FOR', 'AN', 'EMERGENCY'] +1580-141083-0035-1984: ref=['HOLMES', 'TURNED', 'AWAY', 'AND', 'STOOPED', 'SUDDENLY', 'TO', 'THE', 'FLOOR', 'HALLOA', "WHAT'S", 'THIS'] +1580-141083-0035-1984: hyp=['HOLMES', 'TURNED', 'AWAY', 'AND', 'STOOPED', 'SUDDENLY', 'TO', 'THE', 'FLOOR', 'HULLO', 'WHAT', 'IS', 'THIS'] +1580-141083-0036-1985: ref=['HOLMES', 'HELD', 'IT', 'OUT', 'ON', 'HIS', 'OPEN', 'PALM', 'IN', 'THE', 'GLARE', 'OF', 'THE', 'ELECTRIC', 'LIGHT'] +1580-141083-0036-1985: hyp=['HOLMES', 'HUTTED', 'OUT', 'ON', 'HIS', 'OPEN', 'PALM', 'IN', 'THE', 'GLARE', 'OF', 'THE', 'ELECTRIC', 'LIGHT'] +1580-141083-0037-1986: ref=['WHAT', 'COULD', 'HE', 'DO', 'HE', 'CAUGHT', 'UP', 'EVERYTHING', 'WHICH', 'WOULD', 'BETRAY', 'HIM', 'AND', 'HE', 'RUSHED', 'INTO', 'YOUR', 'BEDROOM', 'TO', 'CONCEAL', 'HIMSELF'] +1580-141083-0037-1986: hyp=['WHAT', 'COULD', 'HE', 'DO', 'HE', 'CAUGHT', 'UP', 'EVERYTHING', 'WHICH', 'WOULD', 'BETRAY', 'HIM', 'AND', 'HE', 'RUSHED', 'INTO', 'YOUR', 'BEDROOM', 'TO', 'CONCEAL', 'HIMSELF'] +1580-141083-0038-1987: ref=['I', 'UNDERSTAND', 'YOU', 'TO', 'SAY', 'THAT', 'THERE', 'ARE', 'THREE', 'STUDENTS', 'WHO', 'USE', 'THIS', 'STAIR', 'AND', 'ARE', 'IN', 'THE', 'HABIT', 'OF', 'PASSING', 'YOUR', 'DOOR', 'YES', 'THERE', 'ARE'] +1580-141083-0038-1987: hyp=['I', 'UNDERSTAND', 'YOU', 'TO', 'SAY', 'THAT', 'THERE', 'ARE', 'THREE', 'STUDENTS', 'WHO', 'USE', 'THIS', 'STAIR', 'AND', 'ARE', 'IN', 'THE', 'HABIT', 'OF', 'PASSING', 'YOUR', 'DOOR', 'YES', 'THERE', 'ARE'] +1580-141083-0039-1988: ref=['AND', 'THEY', 'ARE', 'ALL', 'IN', 'FOR', 'THIS', 'EXAMINATION', 'YES'] +1580-141083-0039-1988: hyp=['AND', 'THEY', 'ARE', 'ALL', 'IN', 'FOR', 'THIS', 'EXAMINATION', 'YES'] +1580-141083-0040-1989: ref=['ONE', 'HARDLY', 'LIKES', 'TO', 'THROW', 'SUSPICION', 'WHERE', 'THERE', 'ARE', 'NO', 'PROOFS'] +1580-141083-0040-1989: hyp=['ONE', 'HARDLY', 'LIKES', 'TO', 'THROW', 'SUSPICION', 'WHERE', 'THERE', 'ARE', 'NO', 'PROOFS'] +1580-141083-0041-1990: ref=['LET', 'US', 'HEAR', 'THE', 'SUSPICIONS', 'I', 'WILL', 'LOOK', 'AFTER', 'THE', 'PROOFS'] +1580-141083-0041-1990: hyp=['LET', 'US', 'SEE', 'THE', 'SUSPICIONS', 'I', 'WILL', 'LOOK', 'AFTER', 'THE', 'PROOFS'] +1580-141083-0042-1991: ref=['MY', 'SCHOLAR', 'HAS', 'BEEN', 'LEFT', 'VERY', 'POOR', 'BUT', 'HE', 'IS', 'HARD', 'WORKING', 'AND', 'INDUSTRIOUS', 'HE', 'WILL', 'DO', 'WELL'] +1580-141083-0042-1991: hyp=['MY', 'SCHOLAR', 'HAS', 'BEEN', 'LEFT', 'A', 'VERY', 'POOR', 'BUT', 'HE', 'IS', 'HARD', 'WORKING', 'AND', 'INDUSTRIOUS', 'HE', 'WILL', 'DO', 'WELL'] +1580-141083-0043-1992: ref=['THE', 'TOP', 'FLOOR', 'BELONGS', 'TO', 'MILES', 'MC', 'LAREN'] +1580-141083-0043-1992: hyp=['THE', 'TOP', 'FLOOR', 'BELONGS', 'TO', 'MYLES', 'MC', 'LAREN'] +1580-141083-0044-1993: ref=['I', 'DARE', 'NOT', 'GO', 'SO', 'FAR', 'AS', 'THAT', 'BUT', 'OF', 'THE', 'THREE', 'HE', 'IS', 'PERHAPS', 'THE', 'LEAST', 'UNLIKELY'] +1580-141083-0044-1993: hyp=['I', 'DARE', 'NOT', 'GO', 'SO', 'FAR', 'AS', 'THAT', 'BUT', 'OF', 'THE', 'THREE', 'HE', 'IS', 'PERHAPS', 'THE', 'LEAST', 'UNLIKELY'] +1580-141083-0045-1994: ref=['HE', 'WAS', 'STILL', 'SUFFERING', 'FROM', 'THIS', 'SUDDEN', 'DISTURBANCE', 'OF', 'THE', 'QUIET', 'ROUTINE', 'OF', 'HIS', 'LIFE'] +1580-141083-0045-1994: hyp=['HE', 'WAS', 'STILL', 'SUFFERING', 'FROM', 'THIS', 'SUDDEN', 'DISTURBANCE', 'OF', 'THE', 'QUIET', 'ROUTINE', 'OF', 'HIS', 'LIFE'] +1580-141083-0046-1995: ref=['BUT', 'I', 'HAVE', 'OCCASIONALLY', 'DONE', 'THE', 'SAME', 'THING', 'AT', 'OTHER', 'TIMES'] +1580-141083-0046-1995: hyp=['BUT', 'I', 'HAVE', 'OCCASIONALLY', 'DONE', 'THE', 'SAME', 'THING', 'AT', 'OTHER', 'TIMES'] +1580-141083-0047-1996: ref=['DID', 'YOU', 'LOOK', 'AT', 'THESE', 'PAPERS', 'ON', 'THE', 'TABLE'] +1580-141083-0047-1996: hyp=['DID', 'YOU', 'LOOK', 'AT', 'THESE', 'PAPERS', 'ON', 'THE', 'TABLE'] +1580-141083-0048-1997: ref=['HOW', 'CAME', 'YOU', 'TO', 'LEAVE', 'THE', 'KEY', 'IN', 'THE', 'DOOR'] +1580-141083-0048-1997: hyp=['HOW', 'CAME', 'YOU', 'TO', 'LEAVE', 'THE', 'KEY', 'IN', 'THE', 'DOOR'] +1580-141083-0049-1998: ref=['ANYONE', 'IN', 'THE', 'ROOM', 'COULD', 'GET', 'OUT', 'YES', 'SIR'] +1580-141083-0049-1998: hyp=['ANY', 'ONE', 'IN', 'THE', 'ROOM', 'COULD', 'GET', 'OUT', 'YES', 'SIR'] +1580-141083-0050-1999: ref=['I', 'REALLY', "DON'T", 'THINK', 'HE', 'KNEW', 'MUCH', 'ABOUT', 'IT', 'MISTER', 'HOLMES'] +1580-141083-0050-1999: hyp=['I', 'HAVE', 'REALLY', "DON'T", 'THINK', 'HE', 'KNEW', 'MUCH', 'ABOUT', 'IT', 'MISTER', 'HOLMES'] +1580-141083-0051-2000: ref=['ONLY', 'FOR', 'A', 'MINUTE', 'OR', 'SO'] +1580-141083-0051-2000: hyp=['ONLY', 'FOR', 'A', 'MINUTE', 'OR', 'SO'] +1580-141083-0052-2001: ref=['OH', 'I', 'WOULD', 'NOT', 'VENTURE', 'TO', 'SAY', 'SIR'] +1580-141083-0052-2001: hyp=['OH', 'I', 'WOULD', 'NOT', 'VENTURE', 'TO', 'SAY', 'SIR'] +1580-141083-0053-2002: ref=['YOU', "HAVEN'T", 'SEEN', 'ANY', 'OF', 'THEM', 'NO', 'SIR'] +1580-141083-0053-2002: hyp=['YOU', "HAVEN'T", 'SEEN', 'ANY', 'OF', 'THEM', 'NO', 'SIR'] +1580-141084-0000-2003: ref=['IT', 'WAS', 'THE', 'INDIAN', 'WHOSE', 'DARK', 'SILHOUETTE', 'APPEARED', 'SUDDENLY', 'UPON', 'HIS', 'BLIND'] +1580-141084-0000-2003: hyp=['IT', 'WAS', 'THE', 'INDIAN', 'WHOSE', 'DARK', 'SILHOUETTE', 'APPEARED', 'SUDDENLY', 'UPON', 'HIS', 'BLIND'] +1580-141084-0001-2004: ref=['HE', 'WAS', 'PACING', 'SWIFTLY', 'UP', 'AND', 'DOWN', 'HIS', 'ROOM'] +1580-141084-0001-2004: hyp=['HE', 'WAS', 'PACING', 'SWIFTLY', 'UP', 'AND', 'DOWN', 'HIS', 'ROOM'] +1580-141084-0002-2005: ref=['THIS', 'SET', 'OF', 'ROOMS', 'IS', 'QUITE', 'THE', 'OLDEST', 'IN', 'THE', 'COLLEGE', 'AND', 'IT', 'IS', 'NOT', 'UNUSUAL', 'FOR', 'VISITORS', 'TO', 'GO', 'OVER', 'THEM'] +1580-141084-0002-2005: hyp=['THE', 'SET', 'OF', 'ROOMS', 'IS', 'QUITE', 'THE', 'OLDEST', 'IN', 'THE', 'COLLEGE', 'AND', 'IT', 'IS', 'NOT', 'UNUSUAL', 'FOR', 'VISITORS', 'TO', 'GO', 'OVER', 'THEM'] +1580-141084-0003-2006: ref=['NO', 'NAMES', 'PLEASE', 'SAID', 'HOLMES', 'AS', 'WE', 'KNOCKED', 'AT', "GILCHRIST'S", 'DOOR'] +1580-141084-0003-2006: hyp=['NO', 'NAMES', 'PLEASE', 'SAID', 'HOLMES', 'AS', 'WE', 'KNOCKED', 'AT', "GILCHER'S", 'DOOR'] +1580-141084-0004-2007: ref=['OF', 'COURSE', 'HE', 'DID', 'NOT', 'REALIZE', 'THAT', 'IT', 'WAS', 'I', 'WHO', 'WAS', 'KNOCKING', 'BUT', 'NONE', 'THE', 'LESS', 'HIS', 'CONDUCT', 'WAS', 'VERY', 'UNCOURTEOUS', 'AND', 'INDEED', 'UNDER', 'THE', 'CIRCUMSTANCES', 'RATHER', 'SUSPICIOUS'] +1580-141084-0004-2007: hyp=['OF', 'COURSE', 'HE', 'DID', 'NOT', 'REALIZE', 'THAT', 'IT', 'WAS', 'I', 'WHO', 'WAS', 'KNOCKING', 'BUT', 'NONE', 'THE', 'LESS', 'HIS', 'CONDUCT', 'WAS', 'VERY', 'UNCOURTEOUS', 'AND', 'INDEED', 'UNDER', 'THE', 'CIRCUMSTANCES', 'RATHER', 'SUSPICIOUS'] +1580-141084-0005-2008: ref=['THAT', 'IS', 'VERY', 'IMPORTANT', 'SAID', 'HOLMES'] +1580-141084-0005-2008: hyp=['THAT', 'IS', 'VERY', 'IMPORTANT', 'SAID', 'HOLMES'] +1580-141084-0006-2009: ref=['YOU', "DON'T", 'SEEM', 'TO', 'REALIZE', 'THE', 'POSITION'] +1580-141084-0006-2009: hyp=['YOU', "DON'T", 'SEEM', 'TO', 'REALIZE', 'THE', 'POSITION'] +1580-141084-0007-2010: ref=['TO', 'MORROW', 'IS', 'THE', 'EXAMINATION'] +1580-141084-0007-2010: hyp=['TO', 'MORROW', 'WAS', 'THE', 'EXAMINATION'] +1580-141084-0008-2011: ref=['I', 'CANNOT', 'ALLOW', 'THE', 'EXAMINATION', 'TO', 'BE', 'HELD', 'IF', 'ONE', 'OF', 'THE', 'PAPERS', 'HAS', 'BEEN', 'TAMPERED', 'WITH', 'THE', 'SITUATION', 'MUST', 'BE', 'FACED'] +1580-141084-0008-2011: hyp=['I', 'CANNOT', 'ALLOW', 'THE', 'EXAMINATION', 'TO', 'BE', 'HELD', 'IF', 'ONE', 'OF', 'THE', 'PAPERS', 'HAS', 'BEEN', 'TAMPERED', 'WITH', 'THE', 'SITUATION', 'MUST', 'BE', 'FACED'] +1580-141084-0009-2012: ref=['IT', 'IS', 'POSSIBLE', 'THAT', 'I', 'MAY', 'BE', 'IN', 'A', 'POSITION', 'THEN', 'TO', 'INDICATE', 'SOME', 'COURSE', 'OF', 'ACTION'] +1580-141084-0009-2012: hyp=['IT', 'IS', 'POSSIBLE', 'THAT', 'I', 'MAY', 'BE', 'IN', 'A', 'POSITION', 'THEN', 'TO', 'INDICATE', 'SOME', 'COURSE', 'OF', 'ACTION'] +1580-141084-0010-2013: ref=['I', 'WILL', 'TAKE', 'THE', 'BLACK', 'CLAY', 'WITH', 'ME', 'ALSO', 'THE', 'PENCIL', 'CUTTINGS', 'GOOD', 'BYE'] +1580-141084-0010-2013: hyp=['I', 'WILL', 'TAKE', 'THE', 'BLACK', 'CLAY', 'WITH', 'ME', 'ALSO', 'THE', 'PENCIL', 'CUTTINGS', 'GOOD', 'BYE'] +1580-141084-0011-2014: ref=['WHEN', 'WE', 'WERE', 'OUT', 'IN', 'THE', 'DARKNESS', 'OF', 'THE', 'QUADRANGLE', 'WE', 'AGAIN', 'LOOKED', 'UP', 'AT', 'THE', 'WINDOWS'] +1580-141084-0011-2014: hyp=['WHEN', 'WE', 'WERE', 'OUT', 'IN', 'THE', 'DARKNESS', 'OF', 'THE', 'QUADRANGLE', 'WE', 'AGAIN', 'LOOKED', 'UP', 'AT', 'THE', 'WINDOWS'] +1580-141084-0012-2015: ref=['THE', 'FOUL', 'MOUTHED', 'FELLOW', 'AT', 'THE', 'TOP'] +1580-141084-0012-2015: hyp=['THE', 'FOUL', 'MOUTHED', 'FELLOW', 'AT', 'THE', 'TOP'] +1580-141084-0013-2016: ref=['HE', 'IS', 'THE', 'ONE', 'WITH', 'THE', 'WORST', 'RECORD'] +1580-141084-0013-2016: hyp=['HE', 'IS', 'THE', 'ONE', 'WITH', 'THE', 'WORST', 'RECORD'] +1580-141084-0014-2017: ref=['WHY', 'BANNISTER', 'THE', 'SERVANT', "WHAT'S", 'HIS', 'GAME', 'IN', 'THE', 'MATTER'] +1580-141084-0014-2017: hyp=['WHY', 'BANISTER', 'THE', 'SERVANT', "WHAT'S", 'HIS', 'GAME', 'IN', 'THE', 'MATTER'] +1580-141084-0015-2018: ref=['HE', 'IMPRESSED', 'ME', 'AS', 'BEING', 'A', 'PERFECTLY', 'HONEST', 'MAN'] +1580-141084-0015-2018: hyp=['HE', 'IMPRESSED', 'ME', 'AS', 'BEING', 'A', 'PERFECTLY', 'HONEST', 'MAN'] +1580-141084-0016-2019: ref=['MY', 'FRIEND', 'DID', 'NOT', 'APPEAR', 'TO', 'BE', 'DEPRESSED', 'BY', 'HIS', 'FAILURE', 'BUT', 'SHRUGGED', 'HIS', 'SHOULDERS', 'IN', 'HALF', 'HUMOROUS', 'RESIGNATION'] +1580-141084-0016-2019: hyp=['MY', 'FRIEND', 'DID', 'NOT', 'APPEAR', 'TO', 'BE', 'DEPRESSED', 'BY', 'HIS', 'FAILURE', 'BUT', 'SHRUGGED', 'HIS', 'SHOULDERS', 'AND', 'HALF', 'HUMOROUS', 'RESIGNATION'] +1580-141084-0017-2020: ref=['NO', 'GOOD', 'MY', 'DEAR', 'WATSON'] +1580-141084-0017-2020: hyp=['NO', 'GOOD', 'MY', 'DEAR', 'WATSON'] +1580-141084-0018-2021: ref=['I', 'THINK', 'SO', 'YOU', 'HAVE', 'FORMED', 'A', 'CONCLUSION'] +1580-141084-0018-2021: hyp=['I', 'THINK', 'SO', 'YOU', 'HAVE', 'FORMED', 'A', 'CONCLUSION'] +1580-141084-0019-2022: ref=['YES', 'MY', 'DEAR', 'WATSON', 'I', 'HAVE', 'SOLVED', 'THE', 'MYSTERY'] +1580-141084-0019-2022: hyp=['YES', 'MY', 'DEAR', 'WATSON', 'I', 'HAVE', 'SOLVED', 'THE', 'MYSTERY'] +1580-141084-0020-2023: ref=['LOOK', 'AT', 'THAT', 'HE', 'HELD', 'OUT', 'HIS', 'HAND'] +1580-141084-0020-2023: hyp=['LOOK', 'AT', 'THAT', 'HE', 'HELD', 'OUT', 'HIS', 'HAND'] +1580-141084-0021-2024: ref=['ON', 'THE', 'PALM', 'WERE', 'THREE', 'LITTLE', 'PYRAMIDS', 'OF', 'BLACK', 'DOUGHY', 'CLAY'] +1580-141084-0021-2024: hyp=['ON', 'THE', 'PALM', 'WERE', 'THREE', 'LITTLE', 'PYRAMIDS', 'OF', 'BLACK', 'DOUGHY', 'CLAY'] +1580-141084-0022-2025: ref=['AND', 'ONE', 'MORE', 'THIS', 'MORNING'] +1580-141084-0022-2025: hyp=['AND', 'ONE', 'MORE', 'THIS', 'MORNING'] +1580-141084-0023-2026: ref=['IN', 'A', 'FEW', 'HOURS', 'THE', 'EXAMINATION', 'WOULD', 'COMMENCE', 'AND', 'HE', 'WAS', 'STILL', 'IN', 'THE', 'DILEMMA', 'BETWEEN', 'MAKING', 'THE', 'FACTS', 'PUBLIC', 'AND', 'ALLOWING', 'THE', 'CULPRIT', 'TO', 'COMPETE', 'FOR', 'THE', 'VALUABLE', 'SCHOLARSHIP'] +1580-141084-0023-2026: hyp=['IN', 'A', 'FEW', 'HOURS', 'THE', 'EXAMINATION', 'WOULD', 'COMMENCE', 'AND', 'HE', 'WAS', 'STILL', 'IN', 'THE', 'DILEMMA', 'BETWEEN', 'MAKING', 'THE', 'FACTS', 'PUBLIC', 'AND', 'ALLOWING', 'THE', 'CULPRIT', 'TO', 'COMPETE', 'FOR', 'THE', 'VALUABLE', 'SCHOLARSHIP'] +1580-141084-0024-2027: ref=['HE', 'COULD', 'HARDLY', 'STAND', 'STILL', 'SO', 'GREAT', 'WAS', 'HIS', 'MENTAL', 'AGITATION', 'AND', 'HE', 'RAN', 'TOWARDS', 'HOLMES', 'WITH', 'TWO', 'EAGER', 'HANDS', 'OUTSTRETCHED', 'THANK', 'HEAVEN', 'THAT', 'YOU', 'HAVE', 'COME'] +1580-141084-0024-2027: hyp=['HE', 'COULD', 'HARDLY', 'STAND', 'STILL', 'SO', 'GREAT', 'WAS', 'HIS', 'MENTAL', 'AGITATION', 'AND', 'HE', 'RAN', 'TOWARDS', 'HOLMES', 'WITH', 'TOO', 'EAGER', 'HANDS', 'OUTSTRETCHED', 'THANK', 'HEAVEN', 'THAT', 'YOU', 'HAVE', 'COME'] +1580-141084-0025-2028: ref=['YOU', 'KNOW', 'HIM', 'I', 'THINK', 'SO'] +1580-141084-0025-2028: hyp=['YOU', 'KNOW', 'HIM', 'I', 'THINK', 'SO'] +1580-141084-0026-2029: ref=['IF', 'THIS', 'MATTER', 'IS', 'NOT', 'TO', 'BECOME', 'PUBLIC', 'WE', 'MUST', 'GIVE', 'OURSELVES', 'CERTAIN', 'POWERS', 'AND', 'RESOLVE', 'OURSELVES', 'INTO', 'A', 'SMALL', 'PRIVATE', 'COURT', 'MARTIAL'] +1580-141084-0026-2029: hyp=['IF', 'THIS', 'MATTER', 'IS', 'NOT', 'TO', 'BECOME', 'PUBLIC', 'WE', 'MUST', 'GIVE', 'OURSELVES', 'CERTAIN', 'POWERS', 'AND', 'RESOLVE', 'OURSELVES', 'INTO', 'A', 'SMALL', 'PRIVATE', 'COURT', 'MARTIAL'] +1580-141084-0027-2030: ref=['NO', 'SIR', 'CERTAINLY', 'NOT'] +1580-141084-0027-2030: hyp=['NO', 'SIR', 'CERTAINLY', 'NOT'] +1580-141084-0028-2031: ref=['THERE', 'WAS', 'NO', 'MAN', 'SIR'] +1580-141084-0028-2031: hyp=['THERE', 'WAS', 'NO', 'MAN', 'SIR'] +1580-141084-0029-2032: ref=['HIS', 'TROUBLED', 'BLUE', 'EYES', 'GLANCED', 'AT', 'EACH', 'OF', 'US', 'AND', 'FINALLY', 'RESTED', 'WITH', 'AN', 'EXPRESSION', 'OF', 'BLANK', 'DISMAY', 'UPON', 'BANNISTER', 'IN', 'THE', 'FARTHER', 'CORNER'] +1580-141084-0029-2032: hyp=['HIS', 'TROUBLED', 'BLUE', 'EYES', 'GLANCED', 'AT', 'EACH', 'OF', 'US', 'AND', 'FINALLY', 'RESTED', 'WITH', 'AN', 'EXPRESSION', 'OF', 'BLANK', 'DISMAY', 'UPON', 'BANISTER', 'IN', 'THE', 'FARTHER', 'CORNER'] +1580-141084-0030-2033: ref=['JUST', 'CLOSE', 'THE', 'DOOR', 'SAID', 'HOLMES'] +1580-141084-0030-2033: hyp=['JUST', 'CLOSE', 'THE', 'DOOR', 'SAID', 'HOLMES'] +1580-141084-0031-2034: ref=['WE', 'WANT', 'TO', 'KNOW', 'MISTER', 'GILCHRIST', 'HOW', 'YOU', 'AN', 'HONOURABLE', 'MAN', 'EVER', 'CAME', 'TO', 'COMMIT', 'SUCH', 'AN', 'ACTION', 'AS', 'THAT', 'OF', 'YESTERDAY'] +1580-141084-0031-2034: hyp=['WE', 'WANT', 'TO', 'KNOW', 'MISTER', 'GILGRIST', 'HOW', 'YOU', 'AN', 'HONOURABLE', 'MAN', 'EVER', 'CAME', 'TO', 'COMMIT', 'SUCH', 'AN', 'ACTION', 'AS', 'THAT', 'OF', 'YESTERDAY'] +1580-141084-0032-2035: ref=['FOR', 'A', 'MOMENT', 'GILCHRIST', 'WITH', 'UPRAISED', 'HAND', 'TRIED', 'TO', 'CONTROL', 'HIS', 'WRITHING', 'FEATURES'] +1580-141084-0032-2035: hyp=['FOR', 'A', 'MOMENT', 'GO', 'CHRIST', 'WITH', 'UPRAISED', 'HAND', 'TRIED', 'TO', 'CONTROL', 'HIS', 'WRITHING', 'FEATURES'] +1580-141084-0033-2036: ref=['COME', 'COME', 'SAID', 'HOLMES', 'KINDLY', 'IT', 'IS', 'HUMAN', 'TO', 'ERR', 'AND', 'AT', 'LEAST', 'NO', 'ONE', 'CAN', 'ACCUSE', 'YOU', 'OF', 'BEING', 'A', 'CALLOUS', 'CRIMINAL'] +1580-141084-0033-2036: hyp=['COME', 'COME', 'SAID', 'HOLMES', 'KINDLY', 'IT', 'IS', 'HUMAN', 'TO', 'ERR', 'AND', 'AT', 'LEAST', 'NO', 'ONE', 'CAN', 'ACCUSE', 'YOU', 'OF', 'BEING', 'A', 'CALLOUS', 'CRIMINAL'] +1580-141084-0034-2037: ref=['WELL', 'WELL', "DON'T", 'TROUBLE', 'TO', 'ANSWER', 'LISTEN', 'AND', 'SEE', 'THAT', 'I', 'DO', 'YOU', 'NO', 'INJUSTICE'] +1580-141084-0034-2037: hyp=['WELL', 'WELL', "DON'T", 'TROUBLE', 'TO', 'ANSWER', 'LISTEN', 'AND', 'SEE', 'THAT', 'I', 'DO', 'YOU', 'KNOW', 'INJUSTICE'] +1580-141084-0035-2038: ref=['HE', 'COULD', 'EXAMINE', 'THE', 'PAPERS', 'IN', 'HIS', 'OWN', 'OFFICE'] +1580-141084-0035-2038: hyp=['HE', 'COULD', 'EXAMINE', 'THE', 'PAPERS', 'IN', 'HIS', 'OWN', 'OFFICE'] +1580-141084-0036-2039: ref=['THE', 'INDIAN', 'I', 'ALSO', 'THOUGHT', 'NOTHING', 'OF'] +1580-141084-0036-2039: hyp=['THE', 'INDIAN', 'I', 'ALSO', 'THOUGHT', 'NOTHING', 'OF'] +1580-141084-0037-2040: ref=['WHEN', 'I', 'APPROACHED', 'YOUR', 'ROOM', 'I', 'EXAMINED', 'THE', 'WINDOW'] +1580-141084-0037-2040: hyp=['WHEN', 'I', 'APPROACHED', 'YOUR', 'ROOM', 'I', 'EXAMINED', 'THE', 'WINDOW'] +1580-141084-0038-2041: ref=['NO', 'ONE', 'LESS', 'THAN', 'THAT', 'WOULD', 'HAVE', 'A', 'CHANCE'] +1580-141084-0038-2041: hyp=['NO', 'ONE', 'LESS', 'THAN', 'THAT', 'WOULD', 'HAVE', 'A', 'CHANCE'] +1580-141084-0039-2042: ref=['I', 'ENTERED', 'AND', 'I', 'TOOK', 'YOU', 'INTO', 'MY', 'CONFIDENCE', 'AS', 'TO', 'THE', 'SUGGESTIONS', 'OF', 'THE', 'SIDE', 'TABLE'] +1580-141084-0039-2042: hyp=['I', 'ENTERED', 'AND', 'I', 'TOOK', 'YOU', 'INTO', 'MY', 'CONFIDENCE', 'AS', 'TO', 'THE', 'SUGGESTIONS', 'OF', 'THE', 'SIDE', 'TABLE'] +1580-141084-0040-2043: ref=['HE', 'RETURNED', 'CARRYING', 'HIS', 'JUMPING', 'SHOES', 'WHICH', 'ARE', 'PROVIDED', 'AS', 'YOU', 'ARE', 'AWARE', 'WITH', 'SEVERAL', 'SHARP', 'SPIKES'] +1580-141084-0040-2043: hyp=['HE', 'RETURNED', 'CARRYING', 'HIS', 'JUMPING', 'SHOES', 'WHICH', 'ARE', 'PROVIDED', 'AS', 'YOU', 'ARE', 'WHERE', 'WITH', 'SEVERAL', 'SHARP', 'SPIKES'] +1580-141084-0041-2044: ref=['NO', 'HARM', 'WOULD', 'HAVE', 'BEEN', 'DONE', 'HAD', 'IT', 'NOT', 'BEEN', 'THAT', 'AS', 'HE', 'PASSED', 'YOUR', 'DOOR', 'HE', 'PERCEIVED', 'THE', 'KEY', 'WHICH', 'HAD', 'BEEN', 'LEFT', 'BY', 'THE', 'CARELESSNESS', 'OF', 'YOUR', 'SERVANT'] +1580-141084-0041-2044: hyp=['NO', 'HARM', 'WOULD', 'HAVE', 'BEEN', 'DONE', 'HAD', 'IT', 'NOT', 'BEEN', 'THAT', 'AS', 'HE', 'PASSED', 'YOUR', 'DOOR', 'HE', 'PERCEIVED', 'THE', 'KEY', 'WHICH', 'HAD', 'BEEN', 'LEFT', 'BY', 'THE', 'CARELESSNESS', 'OF', 'YOUR', 'SERVANT'] +1580-141084-0042-2045: ref=['A', 'SUDDEN', 'IMPULSE', 'CAME', 'OVER', 'HIM', 'TO', 'ENTER', 'AND', 'SEE', 'IF', 'THEY', 'WERE', 'INDEED', 'THE', 'PROOFS'] +1580-141084-0042-2045: hyp=['A', 'SUDDEN', 'IMPULSE', 'CAME', 'OVER', 'HIM', 'TO', 'ENTER', 'AND', 'SEE', 'IF', 'THEY', 'WERE', 'INDEED', 'THE', 'PROOFS'] +1580-141084-0043-2046: ref=['HE', 'PUT', 'HIS', 'SHOES', 'ON', 'THE', 'TABLE'] +1580-141084-0043-2046: hyp=['HE', 'PUT', 'HIS', 'SHOES', 'ON', 'THE', 'TABLE'] +1580-141084-0044-2047: ref=['GLOVES', 'SAID', 'THE', 'YOUNG', 'MAN'] +1580-141084-0044-2047: hyp=['GLOVES', 'SAID', 'THE', 'YOUNG', 'MAN'] +1580-141084-0045-2048: ref=['SUDDENLY', 'HE', 'HEARD', 'HIM', 'AT', 'THE', 'VERY', 'DOOR', 'THERE', 'WAS', 'NO', 'POSSIBLE', 'ESCAPE'] +1580-141084-0045-2048: hyp=['SUDDENLY', 'HE', 'HEARD', 'HIM', 'AT', 'THE', 'VERY', 'DOOR', 'THERE', 'WAS', 'NO', 'POSSIBLE', 'ESCAPE'] +1580-141084-0046-2049: ref=['HAVE', 'I', 'TOLD', 'THE', 'TRUTH', 'MISTER', 'GILCHRIST'] +1580-141084-0046-2049: hyp=['HAVE', 'I', 'TOLD', 'THE', 'TRUTH', 'MISTER', 'GILGRIST'] +1580-141084-0047-2050: ref=['I', 'HAVE', 'A', 'LETTER', 'HERE', 'MISTER', 'SOAMES', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'EARLY', 'THIS', 'MORNING', 'IN', 'THE', 'MIDDLE', 'OF', 'A', 'RESTLESS', 'NIGHT'] +1580-141084-0047-2050: hyp=['I', 'HAVE', 'A', 'LETTER', 'HERE', 'MISTER', 'SOLMES', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'EARLY', 'THIS', 'MORNING', 'IN', 'THE', 'MIDDLE', 'OF', 'A', 'RESTLESS', 'NIGHT'] +1580-141084-0048-2051: ref=['IT', 'WILL', 'BE', 'CLEAR', 'TO', 'YOU', 'FROM', 'WHAT', 'I', 'HAVE', 'SAID', 'THAT', 'ONLY', 'YOU', 'COULD', 'HAVE', 'LET', 'THIS', 'YOUNG', 'MAN', 'OUT', 'SINCE', 'YOU', 'WERE', 'LEFT', 'IN', 'THE', 'ROOM', 'AND', 'MUST', 'HAVE', 'LOCKED', 'THE', 'DOOR', 'WHEN', 'YOU', 'WENT', 'OUT'] +1580-141084-0048-2051: hyp=['IT', 'WOULD', 'BE', 'CLEAR', 'TO', 'YOU', 'FROM', 'WHAT', 'I', 'HAVE', 'SAID', 'THAT', 'ONLY', 'YOU', 'COULD', 'HAVE', 'LET', 'THIS', 'YOUNG', 'MAN', 'OUT', 'SINCE', 'YOU', 'WERE', 'LEFT', 'IN', 'THE', 'ROOM', 'AND', 'MUST', 'HAVE', 'LOCKED', 'THE', 'DOOR', 'WHEN', 'YOU', 'WENT', 'OUT'] +1580-141084-0049-2052: ref=['IT', 'WAS', 'SIMPLE', 'ENOUGH', 'SIR', 'IF', 'YOU', 'ONLY', 'HAD', 'KNOWN', 'BUT', 'WITH', 'ALL', 'YOUR', 'CLEVERNESS', 'IT', 'WAS', 'IMPOSSIBLE', 'THAT', 'YOU', 'COULD', 'KNOW'] +1580-141084-0049-2052: hyp=['IT', 'WAS', 'SIMPLE', 'ENOUGH', 'SIR', 'IF', 'YOU', 'ONLY', 'HAD', 'KNOWN', 'BUT', 'WITH', 'ALL', 'YOUR', 'CLEVERNESS', 'IT', 'WAS', 'IMPOSSIBLE', 'THAT', 'YOU', 'COULD', 'KNOW'] +1580-141084-0050-2053: ref=['IF', 'MISTER', 'SOAMES', 'SAW', 'THEM', 'THE', 'GAME', 'WAS', 'UP'] +1580-141084-0050-2053: hyp=['IF', 'MISTER', 'SOLMES', 'SAW', 'THEM', 'THE', 'GAME', 'WAS', 'UP'] +1995-1826-0000-750: ref=['IN', 'THE', 'DEBATE', 'BETWEEN', 'THE', 'SENIOR', 'SOCIETIES', 'HER', 'DEFENCE', 'OF', 'THE', 'FIFTEENTH', 'AMENDMENT', 'HAD', 'BEEN', 'NOT', 'ONLY', 'A', 'NOTABLE', 'BIT', 'OF', 'REASONING', 'BUT', 'DELIVERED', 'WITH', 'REAL', 'ENTHUSIASM'] +1995-1826-0000-750: hyp=['IN', 'THE', 'DEBATE', 'BETWEEN', 'THE', 'SENIOR', 'SOCIETIES', 'HER', 'DEFENCE', 'OF', 'THE', 'FIFTEENTH', 'AMENDMENT', 'HAD', 'BEEN', 'NOT', 'ONLY', 'A', 'NOTABLE', 'BIT', 'OF', 'REASONING', 'BUT', 'DELIVERED', 'WITH', 'REAL', 'ENTHUSIASM'] +1995-1826-0001-751: ref=['THE', 'SOUTH', 'SHE', 'HAD', 'NOT', 'THOUGHT', 'OF', 'SERIOUSLY', 'AND', 'YET', 'KNOWING', 'OF', 'ITS', 'DELIGHTFUL', 'HOSPITALITY', 'AND', 'MILD', 'CLIMATE', 'SHE', 'WAS', 'NOT', 'AVERSE', 'TO', 'CHARLESTON', 'OR', 'NEW', 'ORLEANS'] +1995-1826-0001-751: hyp=['THE', 'SOUTH', 'SHE', 'HAD', 'NOT', 'THOUGHT', 'OF', 'SERIOUSLY', 'AND', 'YET', 'KNOWING', 'OF', 'ITS', 'DELIGHTFUL', 'HOSPITALITY', 'AND', 'MILD', 'CLIMATE', 'SHE', 'WAS', 'NOT', 'AVERSE', 'TO', 'CHARLESTON', 'OR', 'NEW', 'ORLEANS'] +1995-1826-0002-752: ref=['JOHN', 'TAYLOR', 'WHO', 'HAD', 'SUPPORTED', 'HER', 'THROUGH', 'COLLEGE', 'WAS', 'INTERESTED', 'IN', 'COTTON'] +1995-1826-0002-752: hyp=['JOHN', 'TAYLOR', 'WHO', 'HAD', 'SUPPORTED', 'HER', 'THROUGH', 'COLLEGE', 'WAS', 'INTERESTED', 'IN', 'COTTON'] +1995-1826-0003-753: ref=['BETTER', 'GO', 'HE', 'HAD', 'COUNSELLED', 'SENTENTIOUSLY'] +1995-1826-0003-753: hyp=['BETTER', 'GO', 'HE', 'HAD', 'COUNSEL', 'SENTENTIOUSLY'] +1995-1826-0004-754: ref=['MIGHT', 'LEARN', 'SOMETHING', 'USEFUL', 'DOWN', 'THERE'] +1995-1826-0004-754: hyp=['MIGHT', 'LEARN', 'SOMETHING', 'USEFUL', 'DOWN', 'THERE'] +1995-1826-0005-755: ref=['BUT', 'JOHN', "THERE'S", 'NO', 'SOCIETY', 'JUST', 'ELEMENTARY', 'WORK'] +1995-1826-0005-755: hyp=['BUT', 'JOHN', "THERE'S", 'NO', 'SOCIETY', 'JUST', 'ELEMENTARY', 'WORK'] +1995-1826-0006-756: ref=['BEEN', 'LOOKING', 'UP', 'TOOMS', 'COUNTY'] +1995-1826-0006-756: hyp=['BEEN', 'LOOKING', 'UP', 'TOMBS', 'COUNTY'] +1995-1826-0007-757: ref=['FIND', 'SOME', 'CRESSWELLS', 'THERE', 'BIG', 'PLANTATIONS', 'RATED', 'AT', 'TWO', 'HUNDRED', 'AND', 'FIFTY', 'THOUSAND', 'DOLLARS'] +1995-1826-0007-757: hyp=['FIVE', 'SOME', 'CRUSTWELLS', 'THERE', 'BIG', 'PLANTATIONS', 'RATED', 'AT', 'TWO', 'HUNDRED', 'AND', 'FIFTY', 'THOUSAND', 'DOLLARS'] +1995-1826-0008-758: ref=['SOME', 'OTHERS', 'TOO', 'BIG', 'COTTON', 'COUNTY'] +1995-1826-0008-758: hyp=['SOME', 'OTHERS', 'TOO', 'BIG', 'COTTON', 'COUNTY'] +1995-1826-0009-759: ref=['YOU', 'OUGHT', 'TO', 'KNOW', 'JOHN', 'IF', 'I', 'TEACH', 'NEGROES', "I'LL", 'SCARCELY', 'SEE', 'MUCH', 'OF', 'PEOPLE', 'IN', 'MY', 'OWN', 'CLASS'] +1995-1826-0009-759: hyp=['YOU', 'OUGHT', 'TO', 'KNOW', 'JOHN', 'IF', 'I', 'TEACH', 'NEGROES', "I'LL", 'SCARCELY', 'SEE', 'MUCH', 'OF', 'PEOPLE', 'IN', 'MY', 'OWN', 'CLASS'] +1995-1826-0010-760: ref=['AT', 'ANY', 'RATE', 'I', 'SAY', 'GO'] +1995-1826-0010-760: hyp=['AT', 'ANY', 'RATE', 'I', 'SAY', 'GO'] +1995-1826-0011-761: ref=['HERE', 'SHE', 'WAS', 'TEACHING', 'DIRTY', 'CHILDREN', 'AND', 'THE', 'SMELL', 'OF', 'CONFUSED', 'ODORS', 'AND', 'BODILY', 'PERSPIRATION', 'WAS', 'TO', 'HER', 'AT', 'TIMES', 'UNBEARABLE'] +1995-1826-0011-761: hyp=['HERE', 'SHE', 'WAS', 'TEACHING', 'DIRTY', 'CHILDREN', 'AND', 'THE', 'SMELL', 'OF', 'CONFUSED', 'ODORS', 'AND', 'BODILY', 'PERSPIRATION', 'WAS', 'TO', 'HER', 'AT', 'TIMES', 'UNBEARABLE'] +1995-1826-0012-762: ref=['SHE', 'WANTED', 'A', 'GLANCE', 'OF', 'THE', 'NEW', 'BOOKS', 'AND', 'PERIODICALS', 'AND', 'TALK', 'OF', 'GREAT', 'PHILANTHROPIES', 'AND', 'REFORMS'] +1995-1826-0012-762: hyp=['SHE', 'WANTED', 'A', 'GLANCE', 'OF', 'THE', 'NEW', 'BOOKS', 'AND', 'PERIODICALS', 'AND', 'TALK', 'OF', 'GRATEFUL', 'ANTHROPIES', 'AND', 'REFORMS'] +1995-1826-0013-763: ref=['SO', 'FOR', 'THE', 'HUNDREDTH', 'TIME', 'SHE', 'WAS', 'THINKING', 'TODAY', 'AS', 'SHE', 'WALKED', 'ALONE', 'UP', 'THE', 'LANE', 'BACK', 'OF', 'THE', 'BARN', 'AND', 'THEN', 'SLOWLY', 'DOWN', 'THROUGH', 'THE', 'BOTTOMS'] +1995-1826-0013-763: hyp=['SO', 'FOR', 'THE', 'HUNDREDTH', 'TIME', 'SHE', 'WAS', 'THINKING', 'TO', 'DAY', 'AS', 'SHE', 'WALKED', 'ALONE', 'UP', 'THE', 'LANE', 'BACK', 'OF', 'THE', 'BARN', 'AND', 'THEN', 'SLOWLY', 'DOWN', 'THROUGH', 'THE', 'BOTTOMS'] +1995-1826-0014-764: ref=['COTTON', 'SHE', 'PAUSED'] +1995-1826-0014-764: hyp=['COTTON', 'SHE', 'PAUSED'] +1995-1826-0015-765: ref=['SHE', 'HAD', 'ALMOST', 'FORGOTTEN', 'THAT', 'IT', 'WAS', 'HERE', 'WITHIN', 'TOUCH', 'AND', 'SIGHT'] +1995-1826-0015-765: hyp=['SHE', 'HAD', 'ALMOST', 'FORGOTTEN', 'THAT', 'IT', 'WAS', 'HERE', 'WITHIN', 'TOUCH', 'IN', 'SIGHT'] +1995-1826-0016-766: ref=['THE', 'GLIMMERING', 'SEA', 'OF', 'DELICATE', 'LEAVES', 'WHISPERED', 'AND', 'MURMURED', 'BEFORE', 'HER', 'STRETCHING', 'AWAY', 'TO', 'THE', 'NORTHWARD'] +1995-1826-0016-766: hyp=['THE', 'GLIMMERING', 'SEA', 'OF', 'DELICATE', 'LEAVES', 'WHISPERED', 'AND', 'MURMURED', 'BEFORE', 'HER', 'STRETCHING', 'AWAY', 'TO', 'THE', 'NORTHWARD'] +1995-1826-0017-767: ref=['THERE', 'MIGHT', 'BE', 'A', 'BIT', 'OF', 'POETRY', 'HERE', 'AND', 'THERE', 'BUT', 'MOST', 'OF', 'THIS', 'PLACE', 'WAS', 'SUCH', 'DESPERATE', 'PROSE'] +1995-1826-0017-767: hyp=['THERE', 'MIGHT', 'BE', 'A', 'BIT', 'OF', 'POETRY', 'HERE', 'AND', 'THERE', 'BUT', 'MOST', 'OF', 'THIS', 'PLACE', 'WAS', 'SUCH', 'DESPERATE', 'PROSE'] +1995-1826-0018-768: ref=['HER', 'REGARD', 'SHIFTED', 'TO', 'THE', 'GREEN', 'STALKS', 'AND', 'LEAVES', 'AGAIN', 'AND', 'SHE', 'STARTED', 'TO', 'MOVE', 'AWAY'] +1995-1826-0018-768: hyp=['HER', 'REGARD', 'SHIFTED', 'TO', 'THE', 'GREEN', 'STALKS', 'AND', 'LEAVES', 'AGAIN', 'AND', 'SHE', 'STARTED', 'TO', 'MOVE', 'AWAY'] +1995-1826-0019-769: ref=['COTTON', 'IS', 'A', 'WONDERFUL', 'THING', 'IS', 'IT', 'NOT', 'BOYS', 'SHE', 'SAID', 'RATHER', 'PRIMLY'] +1995-1826-0019-769: hyp=['COTTON', 'IS', 'A', 'WONDERFUL', 'THING', 'IS', 'IT', 'NOT', 'BOYS', 'SHE', 'SAID', 'RATHER', 'PRIMLY'] +1995-1826-0020-770: ref=['MISS', 'TAYLOR', 'DID', 'NOT', 'KNOW', 'MUCH', 'ABOUT', 'COTTON', 'BUT', 'AT', 'LEAST', 'ONE', 'MORE', 'REMARK', 'SEEMED', 'CALLED', 'FOR'] +1995-1826-0020-770: hyp=['MISS', 'TAYLOR', 'DID', 'NOT', 'KNOW', 'MUCH', 'ABOUT', 'COTTON', 'BUT', 'AT', 'LEAST', 'ONE', 'MORE', 'REMARKED', 'SEEMED', 'CALLED', 'FOR'] +1995-1826-0021-771: ref=["DON'T", 'KNOW', 'WELL', 'OF', 'ALL', 'THINGS', 'INWARDLY', 'COMMENTED', 'MISS', 'TAYLOR', 'LITERALLY', 'BORN', 'IN', 'COTTON', 'AND', 'OH', 'WELL', 'AS', 'MUCH', 'AS', 'TO', 'ASK', "WHAT'S", 'THE', 'USE', 'SHE', 'TURNED', 'AGAIN', 'TO', 'GO'] +1995-1826-0021-771: hyp=["DON'T", 'KNOW', 'WELL', 'OF', 'ALL', 'THINGS', 'INWARDLY', 'COMMENTED', 'MISS', 'TAYLOR', 'LITERALLY', 'BORN', 'IN', 'COTTON', 'AND', 'OH', 'WELL', 'AS', 'MUCH', 'AS', 'TO', 'ASK', "WHAT'S", 'THE', 'USE', 'SHE', 'TURNED', 'AGAIN', 'TO', 'GO'] +1995-1826-0022-772: ref=['I', 'SUPPOSE', 'THOUGH', "IT'S", 'TOO', 'EARLY', 'FOR', 'THEM', 'THEN', 'CAME', 'THE', 'EXPLOSION'] +1995-1826-0022-772: hyp=['I', 'SUPPOSE', 'THOUGH', "IT'S", 'TOO', 'EARLY', 'FOR', 'THEM', 'THEN', 'CAME', 'THE', 'EXPLOSION'] +1995-1826-0023-773: ref=['GOOBERS', "DON'T", 'GROW', 'ON', 'THE', 'TOPS', 'OF', 'VINES', 'BUT', 'UNDERGROUND', 'ON', 'THE', 'ROOTS', 'LIKE', 'YAMS', 'IS', 'THAT', 'SO'] +1995-1826-0023-773: hyp=['GOULD', 'WAS', "DON'T", 'GROW', 'ON', 'THE', 'TOPSY', 'BANDS', 'BUT', 'ON', 'THE', 'GROUND', 'ON', 'THE', 'ROOTS', 'LIKE', 'YAMS', 'IS', 'THAT', 'SO'] +1995-1826-0024-774: ref=['THE', 'GOLDEN', 'FLEECE', "IT'S", 'THE', 'SILVER', 'FLEECE', 'HE', 'HARKENED'] +1995-1826-0024-774: hyp=['THE', 'GOLDEN', 'FLEECE', "IT'S", 'THE', 'SILVER', 'FLEECE', 'HE', 'HEARKENED'] +1995-1826-0025-775: ref=['SOME', 'TIME', "YOU'LL", 'TELL', 'ME', 'PLEASE', "WON'T", 'YOU'] +1995-1826-0025-775: hyp=['SOMETIME', 'YOU', 'TELL', 'ME', 'PLEASE', "WON'T", 'YOU'] +1995-1826-0026-776: ref=['NOW', 'FOR', 'ONE', 'LITTLE', 'HALF', 'HOUR', 'SHE', 'HAD', 'BEEN', 'A', 'WOMAN', 'TALKING', 'TO', 'A', 'BOY', 'NO', 'NOT', 'EVEN', 'THAT', 'SHE', 'HAD', 'BEEN', 'TALKING', 'JUST', 'TALKING', 'THERE', 'WERE', 'NO', 'PERSONS', 'IN', 'THE', 'CONVERSATION', 'JUST', 'THINGS', 'ONE', 'THING', 'COTTON'] +1995-1826-0026-776: hyp=['THOU', 'FOR', 'ONE', 'LITTLE', 'HALF', 'HOUR', 'SHE', 'HAD', 'BEEN', 'A', 'WOMAN', 'TALKING', 'TO', 'A', 'BOY', 'NO', 'NOT', 'EVEN', 'THAT', 'SHE', 'HAD', 'BEEN', 'TALKING', 'JUST', 'TALKING', 'THERE', 'WERE', 'NO', 'PERSONS', 'IN', 'THE', 'CONVERSATION', 'JUST', 'THINGS', 'ONE', 'THING', 'COTTON'] +1995-1836-0000-735: ref=['THE', 'HON', 'CHARLES', 'SMITH', 'MISS', "SARAH'S", 'BROTHER', 'WAS', 'WALKING', 'SWIFTLY', 'UPTOWN', 'FROM', 'MISTER', "EASTERLY'S", 'WALL', 'STREET', 'OFFICE', 'AND', 'HIS', 'FACE', 'WAS', 'PALE'] +1995-1836-0000-735: hyp=['THE', 'HON', 'SMITH', 'MISS', "SARAH'S", 'BROTHER', 'WAS', 'WALKING', 'SWIFTLY', 'UPTOWN', 'FROM', 'MISTER', "EASTERLY'S", 'WALL', 'STREET', 'OFFICE', 'AND', 'HIS', 'FACE', 'WAS', 'PALE'] +1995-1836-0001-736: ref=['AT', 'LAST', 'THE', 'COTTON', 'COMBINE', 'WAS', 'TO', 'ALL', 'APPEARANCES', 'AN', 'ASSURED', 'FACT', 'AND', 'HE', 'WAS', 'SLATED', 'FOR', 'THE', 'SENATE'] +1995-1836-0001-736: hyp=['AT', 'LAST', 'THE', 'COTTON', 'COMBINE', 'WAS', 'TO', 'ALL', 'APPEARANCES', 'AN', 'ASSURED', 'FACT', 'AND', 'HE', 'WAS', 'SLATED', 'FOR', 'THE', 'SENATE'] +1995-1836-0002-737: ref=['WHY', 'SHOULD', 'HE', 'NOT', 'BE', 'AS', 'OTHER', 'MEN'] +1995-1836-0002-737: hyp=['WHY', 'SHOULD', 'HE', 'NOT', 'BE', 'AS', 'OTHER', 'MEN'] +1995-1836-0003-738: ref=['SHE', 'WAS', 'NOT', 'HERSELF', 'A', 'NOTABLY', 'INTELLIGENT', 'WOMAN', 'SHE', 'GREATLY', 'ADMIRED', 'INTELLIGENCE', 'OR', 'WHATEVER', 'LOOKED', 'TO', 'HER', 'LIKE', 'INTELLIGENCE', 'IN', 'OTHERS'] +1995-1836-0003-738: hyp=['SHE', 'WAS', 'NOT', 'HERSELF', 'A', 'NOTABLY', 'INTELLIGENT', 'WOMAN', 'SHE', 'GREATLY', 'ADMIRED', 'INTELLIGENCE', 'OR', 'WHATEVER', 'LOOKED', 'TO', 'HER', 'LIKE', 'INTELLIGENCE', 'IN', 'OTHERS'] +1995-1836-0004-739: ref=['AS', 'SHE', 'AWAITED', 'HER', 'GUESTS', 'SHE', 'SURVEYED', 'THE', 'TABLE', 'WITH', 'BOTH', 'SATISFACTION', 'AND', 'DISQUIETUDE', 'FOR', 'HER', 'SOCIAL', 'FUNCTIONS', 'WERE', 'FEW', 'TONIGHT', 'THERE', 'WERE', 'SHE', 'CHECKED', 'THEM', 'OFF', 'ON', 'HER', 'FINGERS', 'SIR', 'JAMES', 'CREIGHTON', 'THE', 'RICH', 'ENGLISH', 'MANUFACTURER', 'AND', 'LADY', 'CREIGHTON', 'MISTER', 'AND', 'MISSUS', 'VANDERPOOL', 'MISTER', 'HARRY', 'CRESSWELL', 'AND', 'HIS', 'SISTER', 'JOHN', 'TAYLOR', 'AND', 'HIS', 'SISTER', 'AND', 'MISTER', 'CHARLES', 'SMITH', 'WHOM', 'THE', 'EVENING', 'PAPERS', 'MENTIONED', 'AS', 'LIKELY', 'TO', 'BE', 'UNITED', 'STATES', 'SENATOR', 'FROM', 'NEW', 'JERSEY', 'A', 'SELECTION', 'OF', 'GUESTS', 'THAT', 'HAD', 'BEEN', 'DETERMINED', 'UNKNOWN', 'TO', 'THE', 'HOSTESS', 'BY', 'THE', 'MEETING', 'OF', 'COTTON', 'INTERESTS', 'EARLIER', 'IN', 'THE', 'DAY'] +1995-1836-0004-739: hyp=['AS', 'SHE', 'AWAITED', 'HER', 'GUESS', 'SHE', 'SURVEYED', 'THE', 'TABLE', 'WITH', 'BOTH', 'SATISFACTION', 'AND', 'DISQUIETUDE', 'FOR', 'HER', 'SOCIAL', 'FUNCTIONS', 'WERE', 'FEW', 'TO', 'NIGHT', 'THERE', 'WERE', 'SHE', 'CHECKED', 'THEM', 'OFF', 'ON', 'HER', 'FINGERS', 'SIR', 'JAMES', 'CRITON', 'THE', 'RICH', 'ENGLISH', 'MANUFACTURER', 'AND', 'LADY', 'CRIGHTON', 'MISTER', 'AND', 'MISSUS', 'VAN', 'DERPOOL', 'MISTER', 'HARRY', 'CRESWELL', 'AND', 'HIS', 'SISTER', 'JOHN', 'TAYLOR', 'AND', 'HIS', 'SISTER', 'AND', 'MISTER', 'CHARLES', 'SMITH', 'WHOM', 'THE', 'EVENING', 'PAPERS', 'MENTIONED', 'AS', 'LIKELY', 'TO', 'BE', 'UNITED', 'STATES', 'SENATOR', 'FROM', 'NEW', 'JERSEY', 'A', 'SELECTION', 'OF', 'GUESTS', 'THAT', 'HAD', 'BEEN', 'DETERMINED', 'UNKNOWN', 'TO', 'THE', 'HOSTESS', 'BY', 'THE', 'MEETING', 'OF', 'COTTON', 'INTERESTS', 'EARLIER', 'IN', 'THE', 'DAY'] +1995-1836-0005-740: ref=['MISSUS', 'GREY', 'HAD', 'MET', 'SOUTHERNERS', 'BEFORE', 'BUT', 'NOT', 'INTIMATELY', 'AND', 'SHE', 'ALWAYS', 'HAD', 'IN', 'MIND', 'VIVIDLY', 'THEIR', 'CRUELTY', 'TO', 'POOR', 'NEGROES', 'A', 'SUBJECT', 'SHE', 'MADE', 'A', 'POINT', 'OF', 'INTRODUCING', 'FORTHWITH'] +1995-1836-0005-740: hyp=['MISSUS', 'GRAY', 'HAD', 'MET', 'SOUTHERNERS', 'BEFORE', 'BUT', 'NOT', 'INTIMATELY', 'AND', 'SHE', 'ALWAYS', 'HAD', 'IN', 'MIND', 'VIVIDLY', 'THEIR', 'CRUELTY', 'TO', 'POOR', 'NEGROES', 'A', 'SUBJECT', 'SHE', 'MADE', 'A', 'POINT', 'OF', 'INTRODUCING', 'FORTHWITH'] +1995-1836-0006-741: ref=['SHE', 'WAS', 'THEREFORE', 'MOST', 'AGREEABLY', 'SURPRISED', 'TO', 'HEAR', 'MISTER', 'CRESSWELL', 'EXPRESS', 'HIMSELF', 'SO', 'CORDIALLY', 'AS', 'APPROVING', 'OF', 'NEGRO', 'EDUCATION'] +1995-1836-0006-741: hyp=['SHE', 'WAS', 'THEREFORE', 'MOST', 'AGREEABLY', 'SURPRISED', 'TO', 'HEAR', 'MISTER', 'CRESWELL', 'EXPRESS', 'HIMSELF', 'SO', 'CORDIALLY', 'AS', 'APPROVING', 'OF', 'NEGRO', 'EDUCATION'] +1995-1836-0007-742: ref=['BUT', 'YOU', 'BELIEVE', 'IN', 'SOME', 'EDUCATION', 'ASKED', 'MARY', 'TAYLOR'] +1995-1836-0007-742: hyp=['DO', 'BELIEVE', 'IN', 'SOME', 'EDUCATION', 'ASKED', 'MARY', 'TAYLOR'] +1995-1836-0008-743: ref=['I', 'BELIEVE', 'IN', 'THE', 'TRAINING', 'OF', 'PEOPLE', 'TO', 'THEIR', 'HIGHEST', 'CAPACITY', 'THE', 'ENGLISHMAN', 'HERE', 'HEARTILY', 'SECONDED', 'HIM'] +1995-1836-0008-743: hyp=['I', 'BELIEVE', 'IN', 'THE', 'TRAINING', 'OF', 'PEOPLE', 'TO', 'THEIR', 'HAS', 'CAPACITY', 'THE', 'ENGLISHMAN', 'HERE', 'HEARTILY', 'SECONDED', 'HIM'] +1995-1836-0009-744: ref=['BUT', 'CRESSWELL', 'ADDED', 'SIGNIFICANTLY', 'CAPACITY', 'DIFFERS', 'ENORMOUSLY', 'BETWEEN', 'RACES'] +1995-1836-0009-744: hyp=['BUT', 'CRASWELL', 'ADDED', 'SIGNIFICANTLY', 'CAPACITY', 'DIFFERS', 'ENORMOUSLY', 'BETWEEN', 'RACES'] +1995-1836-0010-745: ref=['THE', 'VANDERPOOLS', 'WERE', 'SURE', 'OF', 'THIS', 'AND', 'THE', 'ENGLISHMAN', 'INSTANCING', 'INDIA', 'BECAME', 'QUITE', 'ELOQUENT', 'MISSUS', 'GREY', 'WAS', 'MYSTIFIED', 'BUT', 'HARDLY', 'DARED', 'ADMIT', 'IT', 'THE', 'GENERAL', 'TREND', 'OF', 'THE', 'CONVERSATION', 'SEEMED', 'TO', 'BE', 'THAT', 'MOST', 'INDIVIDUALS', 'NEEDED', 'TO', 'BE', 'SUBMITTED', 'TO', 'THE', 'SHARPEST', 'SCRUTINY', 'BEFORE', 'BEING', 'ALLOWED', 'MUCH', 'EDUCATION', 'AND', 'AS', 'FOR', 'THE', 'LOWER', 'RACES', 'IT', 'WAS', 'SIMPLY', 'CRIMINAL', 'TO', 'OPEN', 'SUCH', 'USELESS', 'OPPORTUNITIES', 'TO', 'THEM'] +1995-1836-0010-745: hyp=['THE', 'VANDERPOOLS', 'WERE', 'SURE', 'THIS', 'AND', 'THE', 'ENGLISHMAN', 'INSTANCING', 'INDIA', 'BECAME', 'QUITE', 'ELOQUENT', 'MISSUS', 'GRAY', 'WAS', 'MYSTIFIED', 'BUT', 'HARDLY', 'DARED', 'ADMIT', 'IT', 'THE', 'GENERAL', 'TREND', 'OF', 'THE', 'CONVERSATION', 'SEEMED', 'TO', 'BE', 'THAT', 'MOST', 'INDIVIDUALS', 'NEEDED', 'TO', 'BE', 'SUBMITTED', 'TO', 'THE', 'SHARPEST', 'SCRUTINY', 'BEFORE', 'BEING', 'ALLOWED', 'MUCH', 'EDUCATION', 'AND', 'AS', 'FOR', 'THE', 'LOWER', 'RACES', 'IT', 'WAS', 'SIMPLY', 'CRIMINAL', 'TO', 'OPEN', 'SUCH', 'USELESS', 'OPPORTUNITIES', 'TO', 'THEM'] +1995-1836-0011-746: ref=['POSITIVELY', 'HEROIC', 'ADDED', 'CRESSWELL', 'AVOIDING', 'HIS', "SISTER'S", 'EYES'] +1995-1836-0011-746: hyp=['POSITIVELY', 'HEROIC', 'ADDED', 'CRASWELL', 'AVOIDING', 'HIS', "SISTER'S", 'EYES'] +1995-1836-0012-747: ref=['BUT', "WE'RE", 'NOT', 'ER', 'EXACTLY', 'WELCOMED'] +1995-1836-0012-747: hyp=['BUT', 'WE', 'ARE', 'NOT', 'A', 'EXACTLY', 'WELCOME'] +1995-1836-0013-748: ref=['MARY', 'TAYLOR', 'HOWEVER', 'RELATED', 'THE', 'TALE', 'OF', 'ZORA', 'TO', 'MISSUS', "GREY'S", 'PRIVATE', 'EAR', 'LATER'] +1995-1836-0013-748: hyp=['MARY', 'TAILOR', 'HOWEVER', 'RELATED', 'THE', 'TALE', 'OF', 'ZORA', 'TO', 'MISSUS', "GRAY'S", 'PRIVATE', 'EAR', 'LATER'] +1995-1836-0014-749: ref=['FORTUNATELY', 'SAID', 'MISTER', 'VANDERPOOL', 'NORTHERNERS', 'AND', 'SOUTHERNERS', 'ARE', 'ARRIVING', 'AT', 'A', 'BETTER', 'MUTUAL', 'UNDERSTANDING', 'ON', 'MOST', 'OF', 'THESE', 'MATTERS'] +1995-1836-0014-749: hyp=['FORTUNATELY', 'SAID', 'MISTER', 'VAN', 'DERPOOL', 'NOR', 'THE', 'NOSE', 'AND', 'SOUTHERNERS', 'ALL', 'RIVING', 'AT', 'A', 'BETTER', 'MUTUAL', 'UNDERSTANDING', 'ON', 'MOST', 'OF', 'THESE', 'MATTERS'] +1995-1837-0000-777: ref=['HE', 'KNEW', 'THE', 'SILVER', 'FLEECE', 'HIS', 'AND', "ZORA'S", 'MUST', 'BE', 'RUINED'] +1995-1837-0000-777: hyp=['HE', 'KNEW', 'THE', 'SILVER', 'FLEECE', 'HIS', 'AND', 'ZORAS', 'MUST', 'BE', 'RUINED'] +1995-1837-0001-778: ref=['IT', 'WAS', 'THE', 'FIRST', 'GREAT', 'SORROW', 'OF', 'HIS', 'LIFE', 'IT', 'WAS', 'NOT', 'SO', 'MUCH', 'THE', 'LOSS', 'OF', 'THE', 'COTTON', 'ITSELF', 'BUT', 'THE', 'FANTASY', 'THE', 'HOPES', 'THE', 'DREAMS', 'BUILT', 'AROUND', 'IT'] +1995-1837-0001-778: hyp=['IT', 'WAS', 'THE', 'FIRST', 'GREAT', 'SORROW', 'OF', 'HIS', 'LIFE', 'IT', 'WAS', 'NOT', 'SO', 'MUCH', 'THE', 'LOSS', 'OF', 'THE', 'COTTON', 'ITSELF', 'BUT', 'THE', 'FANTASY', 'THE', 'HOPES', 'THE', 'DREAMS', 'BUILT', 'AROUND', 'IT'] +1995-1837-0002-779: ref=['AH', 'THE', 'SWAMP', 'THE', 'CRUEL', 'SWAMP'] +1995-1837-0002-779: hyp=['AH', 'THE', 'SWAMP', 'THE', 'CRUEL', 'SWAMP'] +1995-1837-0003-780: ref=['THE', 'REVELATION', 'OF', 'HIS', 'LOVE', 'LIGHTED', 'AND', 'BRIGHTENED', 'SLOWLY', 'TILL', 'IT', 'FLAMED', 'LIKE', 'A', 'SUNRISE', 'OVER', 'HIM', 'AND', 'LEFT', 'HIM', 'IN', 'BURNING', 'WONDER'] +1995-1837-0003-780: hyp=['WHO', 'REVELATION', 'OF', 'HIS', 'LOVE', 'LIGHTED', 'AND', 'BRIGHTENED', 'SLOWLY', 'TILL', 'IT', 'FLAMED', 'LIKE', 'A', 'SUNRISE', 'OVER', 'HIM', 'AND', 'LEFT', 'HIM', 'IN', 'BURNING', 'WONDER'] +1995-1837-0004-781: ref=['HE', 'PANTED', 'TO', 'KNOW', 'IF', 'SHE', 'TOO', 'KNEW', 'OR', 'KNEW', 'AND', 'CARED', 'NOT', 'OR', 'CARED', 'AND', 'KNEW', 'NOT'] +1995-1837-0004-781: hyp=['HE', 'PANTED', 'TO', 'KNOW', 'IF', 'SHE', 'TOO', 'KNEW', 'OR', 'KNEW', 'AND', 'CARED', 'NOT', 'OR', 'CARED', 'AND', 'KNEW', 'NOT'] +1995-1837-0005-782: ref=['SHE', 'WAS', 'SO', 'STRANGE', 'AND', 'HUMAN', 'A', 'CREATURE'] +1995-1837-0005-782: hyp=['SHE', 'WAS', 'SO', 'STRANGE', 'IN', 'HUMAN', 'A', 'CREATURE'] +1995-1837-0006-783: ref=['THE', 'WORLD', 'WAS', 'WATER', 'VEILED', 'IN', 'MISTS'] +1995-1837-0006-783: hyp=['THE', 'WORLD', 'WAS', 'WATER', 'VEILED', 'IN', 'MISTS'] +1995-1837-0007-784: ref=['THEN', 'OF', 'A', 'SUDDEN', 'AT', 'MIDDAY', 'THE', 'SUN', 'SHOT', 'OUT', 'HOT', 'AND', 'STILL', 'NO', 'BREATH', 'OF', 'AIR', 'STIRRED', 'THE', 'SKY', 'WAS', 'LIKE', 'BLUE', 'STEEL', 'THE', 'EARTH', 'STEAMED'] +1995-1837-0007-784: hyp=['THEN', 'OF', 'A', 'SUDDEN', 'AT', 'MIDDAY', 'THE', 'SUN', 'SHOT', 'OUT', 'HOT', 'AND', 'STILL', 'NO', 'BREATH', 'OF', 'AIR', 'STIRRED', 'THE', 'SKY', 'WAS', 'LIKE', 'BLUE', 'STEEL', 'THE', 'EARTH', 'STEAMED'] +1995-1837-0008-785: ref=['WHERE', 'WAS', 'THE', 'USE', 'OF', 'IMAGINING'] +1995-1837-0008-785: hyp=['WHERE', 'WAS', 'THE', 'USE', 'OF', 'IMAGINING'] +1995-1837-0009-786: ref=['THE', 'LAGOON', 'HAD', 'BEEN', 'LEVEL', 'WITH', 'THE', 'DYKES', 'A', 'WEEK', 'AGO', 'AND', 'NOW'] +1995-1837-0009-786: hyp=['THE', 'LAGOON', 'HAD', 'BEEN', 'LEVEL', 'WITH', 'THE', 'DIKES', 'A', 'WEEK', 'AGO', 'AND', 'NOW'] +1995-1837-0010-787: ref=['PERHAPS', 'SHE', 'TOO', 'MIGHT', 'BE', 'THERE', 'WAITING', 'WEEPING'] +1995-1837-0010-787: hyp=['PERHAPS', 'SHE', 'TOO', 'MIGHT', 'BE', 'THERE', 'WAITING', 'WEEPING'] +1995-1837-0011-788: ref=['HE', 'STARTED', 'AT', 'THE', 'THOUGHT', 'HE', 'HURRIED', 'FORTH', 'SADLY'] +1995-1837-0011-788: hyp=['HE', 'STARTED', 'AT', 'THE', 'THOUGHT', 'HE', 'HURRIED', 'FORTH', 'SADLY'] +1995-1837-0012-789: ref=['HE', 'SPLASHED', 'AND', 'STAMPED', 'ALONG', 'FARTHER', 'AND', 'FARTHER', 'ONWARD', 'UNTIL', 'HE', 'NEARED', 'THE', 'RAMPART', 'OF', 'THE', 'CLEARING', 'AND', 'PUT', 'FOOT', 'UPON', 'THE', 'TREE', 'BRIDGE'] +1995-1837-0012-789: hyp=['HE', 'SPLASHED', 'AND', 'STAMPED', 'ALONG', 'FARTHER', 'AND', 'FARTHER', 'ONWARD', 'UNTIL', 'HE', 'NEARED', 'THE', 'RAMPART', 'OF', 'THE', 'CLEARING', 'AND', 'PUT', 'FOOT', 'UPON', 'THE', 'TREE', 'BRIDGE'] +1995-1837-0013-790: ref=['THEN', 'HE', 'LOOKED', 'DOWN', 'THE', 'LAGOON', 'WAS', 'DRY'] +1995-1837-0013-790: hyp=['THEN', 'HE', 'LOOKED', 'DOWN', 'THE', 'LAGOON', 'WAS', 'DRY'] +1995-1837-0014-791: ref=['HE', 'STOOD', 'A', 'MOMENT', 'BEWILDERED', 'THEN', 'TURNED', 'AND', 'RUSHED', 'UPON', 'THE', 'ISLAND', 'A', 'GREAT', 'SHEET', 'OF', 'DAZZLING', 'SUNLIGHT', 'SWEPT', 'THE', 'PLACE', 'AND', 'BENEATH', 'LAY', 'A', 'MIGHTY', 'MASS', 'OF', 'OLIVE', 'GREEN', 'THICK', 'TALL', 'WET', 'AND', 'WILLOWY'] +1995-1837-0014-791: hyp=['HE', 'STOOD', 'A', 'MOMENT', 'BEWILDERED', 'THEN', 'TURNED', 'AND', 'RUSHED', 'UPON', 'THE', 'ISLAND', 'A', 'GREAT', 'SHEET', 'OF', 'DAZZLING', 'SUNLIGHT', 'SWEPT', 'THE', 'PLACE', 'AND', 'BENEATH', 'LAY', 'A', 'MIGHTY', 'MASS', 'OF', 'OLIVE', 'GREEN', 'THICK', 'TALL', 'WET', 'AND', 'WILLOWY'] +1995-1837-0015-792: ref=['THE', 'SQUARES', 'OF', 'COTTON', 'SHARP', 'EDGED', 'HEAVY', 'WERE', 'JUST', 'ABOUT', 'TO', 'BURST', 'TO', 'BOLLS'] +1995-1837-0015-792: hyp=['THE', 'SQUARES', 'OF', 'COTTON', 'SHARP', 'EDGED', 'HEAVY', 'WERE', 'JUST', 'ABOUT', 'TO', 'BURST', 'TO', 'BOWLS'] +1995-1837-0016-793: ref=['FOR', 'ONE', 'LONG', 'MOMENT', 'HE', 'PAUSED', 'STUPID', 'AGAPE', 'WITH', 'UTTER', 'AMAZEMENT', 'THEN', 'LEANED', 'DIZZILY', 'AGAINST', 'A', 'TREE'] +1995-1837-0016-793: hyp=['FOR', 'ONE', 'LONG', 'MOMENT', 'HE', 'PAUSED', 'STUPID', 'AGAPE', 'WITH', 'UTTER', 'AMAZEMENT', 'THEN', 'LEANED', 'DIZZILY', 'AGAINST', 'THE', 'TREE'] +1995-1837-0017-794: ref=['HE', 'GAZED', 'ABOUT', 'PERPLEXED', 'ASTONISHED'] +1995-1837-0017-794: hyp=['HE', 'GAZED', 'ABOUT', 'PERPLEXED', 'ASTONISHED'] +1995-1837-0018-795: ref=['HERE', 'LAY', 'THE', 'READING', 'OF', 'THE', 'RIDDLE', 'WITH', 'INFINITE', 'WORK', 'AND', 'PAIN', 'SOME', 'ONE', 'HAD', 'DUG', 'A', 'CANAL', 'FROM', 'THE', 'LAGOON', 'TO', 'THE', 'CREEK', 'INTO', 'WHICH', 'THE', 'FORMER', 'HAD', 'DRAINED', 'BY', 'A', 'LONG', 'AND', 'CROOKED', 'WAY', 'THUS', 'ALLOWING', 'IT', 'TO', 'EMPTY', 'DIRECTLY'] +1995-1837-0018-795: hyp=['HERE', 'LAY', 'THE', 'READING', 'OF', 'THE', 'RIDDLE', 'WITH', 'INFINITE', 'WORK', 'AND', 'PAIN', 'SOME', 'ONE', 'HAD', 'DUG', 'A', 'CANAL', 'FROM', 'THE', 'LAGOON', 'TO', 'THE', 'CREEK', 'INTO', 'WHICH', 'THE', 'FORMER', 'HAD', 'DRAINED', 'BY', 'A', 'LONG', 'AND', 'CROOKED', 'WAY', 'THUS', 'ALLOWING', 'IT', 'TO', 'EMPTY', 'DIRECTLY'] +1995-1837-0019-796: ref=['HE', 'SAT', 'DOWN', 'WEAK', 'BEWILDERED', 'AND', 'ONE', 'THOUGHT', 'WAS', 'UPPERMOST', 'ZORA'] +1995-1837-0019-796: hyp=['HE', 'SAT', 'DOWN', 'WEAK', 'BEWILDERED', 'AND', 'ONE', 'THOUGHT', 'WAS', 'UPPERMOST', 'SORA'] +1995-1837-0020-797: ref=['THE', 'YEARS', 'OF', 'THE', 'DAYS', 'OF', 'HER', 'DYING', 'WERE', 'TEN'] +1995-1837-0020-797: hyp=['THE', 'YEARS', 'OF', 'THE', 'DAYS', 'OF', 'HER', 'DYING', 'WERE', 'TEN'] +1995-1837-0021-798: ref=['THE', 'HOPE', 'AND', 'DREAM', 'OF', 'HARVEST', 'WAS', 'UPON', 'THE', 'LAND'] +1995-1837-0021-798: hyp=['THE', 'HOPE', 'AND', 'DREAM', 'OF', 'HARVEST', 'WAS', 'UPON', 'THE', 'LAND'] +1995-1837-0022-799: ref=['UP', 'IN', 'THE', 'SICK', 'ROOM', 'ZORA', 'LAY', 'ON', 'THE', 'LITTLE', 'WHITE', 'BED'] +1995-1837-0022-799: hyp=['UP', 'IN', 'THE', 'SICK', 'ROOM', 'ZORA', 'LAY', 'ON', 'THE', 'LITTLE', 'WHITE', 'BED'] +1995-1837-0023-800: ref=['THE', 'NET', 'AND', 'WEB', 'OF', 'ENDLESS', 'THINGS', 'HAD', 'BEEN', 'CRAWLING', 'AND', 'CREEPING', 'AROUND', 'HER', 'SHE', 'HAD', 'STRUGGLED', 'IN', 'DUMB', 'SPEECHLESS', 'TERROR', 'AGAINST', 'SOME', 'MIGHTY', 'GRASPING', 'THAT', 'STROVE', 'FOR', 'HER', 'LIFE', 'WITH', 'GNARLED', 'AND', 'CREEPING', 'FINGERS', 'BUT', 'NOW', 'AT', 'LAST', 'WEAKLY', 'SHE', 'OPENED', 'HER', 'EYES', 'AND', 'QUESTIONED'] +1995-1837-0023-800: hyp=['THE', 'NED', 'AND', 'WEB', 'OF', 'ENDLESS', 'THINGS', 'HAD', 'BEEN', 'CRAWLING', 'AND', 'CREEPING', 'AROUND', 'HER', 'SHE', 'HAD', 'STRUGGLED', 'IN', 'DUMB', 'SPEECHLESS', 'TERROR', 'AGAINST', 'SOME', 'MIGHTY', 'GRASPING', 'THAT', 'STROVE', 'FOR', 'HER', 'LIFE', 'WITH', 'GNARLED', 'AND', 'CREEPING', 'FINGERS', 'BUT', 'NOW', 'AT', 'LAST', 'WEEKLY', 'SHE', 'OPENED', 'HER', 'EYES', 'AND', 'QUESTIONED'] +1995-1837-0024-801: ref=['FOR', 'A', 'WHILE', 'SHE', 'LAY', 'IN', 'HER', 'CHAIR', 'IN', 'HAPPY', 'DREAMY', 'PLEASURE', 'AT', 'SUN', 'AND', 'BIRD', 'AND', 'TREE'] +1995-1837-0024-801: hyp=['FOR', 'A', 'WHILE', 'SHE', 'LAY', 'IN', 'HER', 'CHAIR', 'IN', 'HAPPY', 'DREAMY', 'PLEASURE', 'AT', 'SUN', 'AND', 'BIRD', 'AND', 'TREE'] +1995-1837-0025-802: ref=['SHE', 'ROSE', 'WITH', 'A', 'FLEETING', 'GLANCE', 'GATHERED', 'THE', 'SHAWL', 'ROUND', 'HER', 'THEN', 'GLIDING', 'FORWARD', 'WAVERING', 'TREMULOUS', 'SLIPPED', 'ACROSS', 'THE', 'ROAD', 'AND', 'INTO', 'THE', 'SWAMP'] +1995-1837-0025-802: hyp=['SHE', 'ROSE', 'WITH', 'A', 'FLEETING', 'GLANCE', 'GATHERED', 'THE', 'SHAWL', 'AROUND', 'HER', 'THEN', 'GLIDING', 'FORWARD', 'WAVERING', 'TREMULOUS', 'SLIPPED', 'ACROSS', 'THE', 'ROAD', 'AND', 'INTO', 'THE', 'SWAMP'] +1995-1837-0026-803: ref=['SHE', 'HAD', 'BEEN', 'BORN', 'WITHIN', 'ITS', 'BORDERS', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'LIVED', 'AND', 'GROWN', 'AND', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'MET', 'HER', 'LOVE'] +1995-1837-0026-803: hyp=['SHE', 'HAD', 'BEEN', 'BORN', 'WITHIN', 'ITS', 'BORDERS', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'LIVED', 'AND', 'GROWN', 'AND', 'WITHIN', 'ITS', 'BORDER', 'SHE', 'HAD', 'MET', 'HER', 'LOVE'] +1995-1837-0027-804: ref=['ON', 'SHE', 'HURRIED', 'UNTIL', 'SWEEPING', 'DOWN', 'TO', 'THE', 'LAGOON', 'AND', 'THE', 'ISLAND', 'LO', 'THE', 'COTTON', 'LAY', 'BEFORE', 'HER'] +1995-1837-0027-804: hyp=['ON', 'SHE', 'HURRIED', 'UNTIL', 'SWEEPING', 'DOWN', 'TO', 'THE', 'LAGOON', 'AND', 'THE', 'ISLAND', 'LO', 'THE', 'COTTON', 'LAY', 'BEFORE', 'HER'] +1995-1837-0028-805: ref=['THE', 'CHAIR', 'WAS', 'EMPTY', 'BUT', 'HE', 'KNEW'] +1995-1837-0028-805: hyp=['THE', 'CHAIR', 'WAS', 'EMPTY', 'BUT', 'HE', 'KNEW'] +1995-1837-0029-806: ref=['HE', 'DARTED', 'THROUGH', 'THE', 'TREES', 'AND', 'PAUSED', 'A', 'TALL', 'MAN', 'STRONGLY', 'BUT', 'SLIMLY', 'MADE'] +1995-1837-0029-806: hyp=['HE', 'DARTED', 'THROUGH', 'THE', 'TREES', 'AND', 'PAUSED', 'A', 'TALL', 'MAN', 'STRONGLY', 'BUT', 'SLIMLY', 'MADE'] +2094-142345-0000-308: ref=['IT', 'IS', 'A', 'VERY', 'FINE', 'OLD', 'PLACE', 'OF', 'RED', 'BRICK', 'SOFTENED', 'BY', 'A', 'PALE', 'POWDERY', 'LICHEN', 'WHICH', 'HAS', 'DISPERSED', 'ITSELF', 'WITH', 'HAPPY', 'IRREGULARITY', 'SO', 'AS', 'TO', 'BRING', 'THE', 'RED', 'BRICK', 'INTO', 'TERMS', 'OF', 'FRIENDLY', 'COMPANIONSHIP', 'WITH', 'THE', 'LIMESTONE', 'ORNAMENTS', 'SURROUNDING', 'THE', 'THREE', 'GABLES', 'THE', 'WINDOWS', 'AND', 'THE', 'DOOR', 'PLACE'] +2094-142345-0000-308: hyp=['IT', 'IS', 'A', 'VERY', 'FINE', 'OLD', 'PLACE', 'OF', 'RED', 'BRICK', 'SOFTENED', 'BY', 'A', 'PALE', 'POWDERY', 'LICHEN', 'WHICH', 'HAS', 'DISPERSED', 'ITSELF', 'WITH', 'HAPPY', 'IRREGULARITY', 'SO', 'AS', 'TO', 'BRING', 'THE', 'RED', 'BRICK', 'INTO', 'TERMS', 'OF', 'FRIENDLY', 'COMPANIONSHIP', 'WITH', 'A', 'LIMESTONE', 'ORNAMENTS', 'SURROUNDING', 'THE', 'THREE', 'GABLES', 'THE', 'WINDOWS', 'AND', 'THE', 'DOOR', 'PLACE'] +2094-142345-0001-309: ref=['BUT', 'THE', 'WINDOWS', 'ARE', 'PATCHED', 'WITH', 'WOODEN', 'PANES', 'AND', 'THE', 'DOOR', 'I', 'THINK', 'IS', 'LIKE', 'THE', 'GATE', 'IT', 'IS', 'NEVER', 'OPENED'] +2094-142345-0001-309: hyp=['BUT', 'THE', 'WINDOWS', 'ARE', 'PATCHED', 'WITH', 'WOODEN', 'PANES', 'AND', 'THE', 'DOOR', 'I', 'THINK', 'IS', 'LIKE', 'THE', 'GATE', 'IT', 'IS', 'NEVER', 'OPENED'] +2094-142345-0002-310: ref=['FOR', 'IT', 'IS', 'A', 'SOLID', 'HEAVY', 'HANDSOME', 'DOOR', 'AND', 'MUST', 'ONCE', 'HAVE', 'BEEN', 'IN', 'THE', 'HABIT', 'OF', 'SHUTTING', 'WITH', 'A', 'SONOROUS', 'BANG', 'BEHIND', 'A', 'LIVERIED', 'LACKEY', 'WHO', 'HAD', 'JUST', 'SEEN', 'HIS', 'MASTER', 'AND', 'MISTRESS', 'OFF', 'THE', 'GROUNDS', 'IN', 'A', 'CARRIAGE', 'AND', 'PAIR'] +2094-142345-0002-310: hyp=['FOR', 'IT', 'IS', 'A', 'SOLID', 'HEAVY', 'HANDSOME', 'DOOR', 'AND', 'MUST', 'ONCE', 'HAVE', 'BEEN', 'IN', 'THE', 'HABIT', 'OF', 'SHUTTING', 'WITH', 'A', 'SONOROUS', 'BANG', 'BEHIND', 'THE', 'LIVERIED', 'LACKEY', 'WHO', 'HAD', 'JUST', 'SEEN', 'HIS', 'MASTER', 'AND', 'MISTRESS', 'OFF', 'THE', 'GROUNDS', 'IN', 'A', 'CARRIAGE', 'AND', 'PAIR'] +2094-142345-0003-311: ref=['A', 'LARGE', 'OPEN', 'FIREPLACE', 'WITH', 'RUSTY', 'DOGS', 'IN', 'IT', 'AND', 'A', 'BARE', 'BOARDED', 'FLOOR', 'AT', 'THE', 'FAR', 'END', 'FLEECES', 'OF', 'WOOL', 'STACKED', 'UP', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'FLOOR', 'SOME', 'EMPTY', 'CORN', 'BAGS'] +2094-142345-0003-311: hyp=['A', 'LARGE', 'OPEN', 'FIREPLACE', 'WITH', 'RUSTY', 'DOGS', 'IN', 'IT', 'AND', 'A', 'BARE', 'BOARDED', 'FLOOR', 'AT', 'THE', 'FAR', 'END', 'FLEECES', 'OF', 'WOOL', 'STACKED', 'UP', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'FLOOR', 'SOME', 'EMPTY', 'CORN', 'BAGS'] +2094-142345-0004-312: ref=['AND', 'WHAT', 'THROUGH', 'THE', 'LEFT', 'HAND', 'WINDOW'] +2094-142345-0004-312: hyp=['AND', 'WHAT', 'THROUGH', 'THE', 'LEFT', 'HAND', 'WINDOW'] +2094-142345-0005-313: ref=['SEVERAL', 'CLOTHES', 'HORSES', 'A', 'PILLION', 'A', 'SPINNING', 'WHEEL', 'AND', 'AN', 'OLD', 'BOX', 'WIDE', 'OPEN', 'AND', 'STUFFED', 'FULL', 'OF', 'COLOURED', 'RAGS'] +2094-142345-0005-313: hyp=['SEVERAL', 'CLOTHES', 'HORSES', 'A', 'PILLION', 'A', 'SPINNING', 'WHEEL', 'AND', 'AN', 'OLD', 'BOX', 'WIDE', 'OPEN', 'AND', 'STUFFED', 'FULL', 'OF', 'COLOURED', 'RAGS'] +2094-142345-0006-314: ref=['AT', 'THE', 'EDGE', 'OF', 'THIS', 'BOX', 'THERE', 'LIES', 'A', 'GREAT', 'WOODEN', 'DOLL', 'WHICH', 'SO', 'FAR', 'AS', 'MUTILATION', 'IS', 'CONCERNED', 'BEARS', 'A', 'STRONG', 'RESEMBLANCE', 'TO', 'THE', 'FINEST', 'GREEK', 'SCULPTURE', 'AND', 'ESPECIALLY', 'IN', 'THE', 'TOTAL', 'LOSS', 'OF', 'ITS', 'NOSE'] +2094-142345-0006-314: hyp=['AT', 'THE', 'EDGE', 'OF', 'THIS', 'BOX', 'THERE', 'LIES', 'A', 'GREAT', 'WOODEN', 'DOLL', 'WHICH', 'SO', 'FAR', 'AS', 'MUTILATION', 'IS', 'CONCERNED', 'BEARS', 'A', 'STRONG', 'RESEMBLANCE', 'TO', 'THE', 'FINEST', 'GREEK', 'SCULPTURE', 'AND', 'ESPECIALLY', 'IN', 'THE', 'TOTAL', 'LOSS', 'OF', 'ITS', 'NOSE'] +2094-142345-0007-315: ref=['THE', 'HISTORY', 'OF', 'THE', 'HOUSE', 'IS', 'PLAIN', 'NOW'] +2094-142345-0007-315: hyp=['THE', 'HISTORY', 'OF', 'THE', 'HOUSE', 'IS', 'PLAIN', 'NOW'] +2094-142345-0008-316: ref=['BUT', 'THERE', 'IS', 'ALWAYS', 'A', 'STRONGER', 'SENSE', 'OF', 'LIFE', 'WHEN', 'THE', 'SUN', 'IS', 'BRILLIANT', 'AFTER', 'RAIN', 'AND', 'NOW', 'HE', 'IS', 'POURING', 'DOWN', 'HIS', 'BEAMS', 'AND', 'MAKING', 'SPARKLES', 'AMONG', 'THE', 'WET', 'STRAW', 'AND', 'LIGHTING', 'UP', 'EVERY', 'PATCH', 'OF', 'VIVID', 'GREEN', 'MOSS', 'ON', 'THE', 'RED', 'TILES', 'OF', 'THE', 'COW', 'SHED', 'AND', 'TURNING', 'EVEN', 'THE', 'MUDDY', 'WATER', 'THAT', 'IS', 'HURRYING', 'ALONG', 'THE', 'CHANNEL', 'TO', 'THE', 'DRAIN', 'INTO', 'A', 'MIRROR', 'FOR', 'THE', 'YELLOW', 'BILLED', 'DUCKS', 'WHO', 'ARE', 'SEIZING', 'THE', 'OPPORTUNITY', 'OF', 'GETTING', 'A', 'DRINK', 'WITH', 'AS', 'MUCH', 'BODY', 'IN', 'IT', 'AS', 'POSSIBLE'] +2094-142345-0008-316: hyp=['BUT', 'THERE', 'IS', 'ALWAYS', 'AS', 'STRONGER', 'SENSE', 'OF', 'LIFE', 'WHEN', 'THE', 'SUN', 'IS', 'BRILLIANT', 'AFTER', 'RAIN', 'AND', 'NOW', 'HE', 'IS', 'POURING', 'DOWN', 'HIS', 'BEAMS', 'AND', 'MAKING', 'SPARKLES', 'AMONG', 'THE', 'WET', 'STRAW', 'AND', 'LIGHTING', 'UP', 'EVERY', 'PATCH', 'OF', 'VIVID', 'GREEN', 'MOSS', 'ON', 'THE', 'RED', 'TILES', 'OF', 'THE', 'COW', 'SHED', 'AND', 'TURNING', 'EVEN', 'THE', 'MUDDY', 'WATER', 'THAT', 'IS', 'HURRYING', 'ALONG', 'THE', 'CHANNEL', 'TO', 'THE', 'DRAIN', 'INTO', 'A', 'MIRROR', 'FOR', 'THE', 'YELLOW', 'BUILD', 'DUCKS', 'WHO', 'ARE', 'SEIZING', 'THE', 'OPPORTUNITY', 'OF', 'GETTING', 'A', 'DRINK', 'WITH', 'AS', 'MUCH', 'BODY', 'IN', 'IT', 'AS', 'POSSIBLE'] +2094-142345-0009-317: ref=['FOR', 'THE', 'GREAT', 'BARN', 'DOORS', 'ARE', 'THROWN', 'WIDE', 'OPEN', 'AND', 'MEN', 'ARE', 'BUSY', 'THERE', 'MENDING', 'THE', 'HARNESS', 'UNDER', 'THE', 'SUPERINTENDENCE', 'OF', 'MISTER', 'GOBY', 'THE', 'WHITTAW', 'OTHERWISE', 'SADDLER', 'WHO', 'ENTERTAINS', 'THEM', 'WITH', 'THE', 'LATEST', 'TREDDLESTON', 'GOSSIP'] +2094-142345-0009-317: hyp=['FOR', 'THE', 'GREAT', 'BARN', 'DOORS', 'ARE', 'THROWN', 'WIDE', 'OPEN', 'AND', 'MEN', 'ARE', 'BUSY', 'THERE', 'MENDING', 'THE', 'HARNESS', 'UNDER', 'THE', 'SUPERINTENDENCE', 'OF', 'MISTER', 'GOBY', 'THE', 'WIDOW', 'OTHERWISE', 'SADDLER', 'WHO', 'ENTERTAINS', 'THEM', 'WITH', 'THE', 'LATEST', 'TREDDLESTONE', 'GOSSIP'] +2094-142345-0010-318: ref=['HETTY', 'SORREL', 'OFTEN', 'TOOK', 'THE', 'OPPORTUNITY', 'WHEN', 'HER', "AUNT'S", 'BACK', 'WAS', 'TURNED', 'OF', 'LOOKING', 'AT', 'THE', 'PLEASING', 'REFLECTION', 'OF', 'HERSELF', 'IN', 'THOSE', 'POLISHED', 'SURFACES', 'FOR', 'THE', 'OAK', 'TABLE', 'WAS', 'USUALLY', 'TURNED', 'UP', 'LIKE', 'A', 'SCREEN', 'AND', 'WAS', 'MORE', 'FOR', 'ORNAMENT', 'THAN', 'FOR', 'USE', 'AND', 'SHE', 'COULD', 'SEE', 'HERSELF', 'SOMETIMES', 'IN', 'THE', 'GREAT', 'ROUND', 'PEWTER', 'DISHES', 'THAT', 'WERE', 'RANGED', 'ON', 'THE', 'SHELVES', 'ABOVE', 'THE', 'LONG', 'DEAL', 'DINNER', 'TABLE', 'OR', 'IN', 'THE', 'HOBS', 'OF', 'THE', 'GRATE', 'WHICH', 'ALWAYS', 'SHONE', 'LIKE', 'JASPER'] +2094-142345-0010-318: hyp=["HETTY'S", 'SORREL', 'OFTEN', 'TOOK', 'THE', 'OPPORTUNITY', 'WHEN', 'HER', "AUNT'S", 'BACK', 'WAS', 'TURNED', 'OF', 'LOOKING', 'AT', 'THE', 'PLEASING', 'REFLECTION', 'OF', 'HERSELF', 'IN', 'THOSE', 'POLISHED', 'SERVICES', 'FOR', 'THE', 'OAK', 'TABLE', 'WAS', 'USUALLY', 'TURNED', 'UP', 'LIKE', 'A', 'SCREEN', 'AND', 'WAS', 'MORE', 'FOR', 'ORNAMENT', 'THAN', 'FOR', 'USE', 'AND', 'SHE', 'COULD', 'SEE', 'HERSELF', 'SOMETIMES', 'IN', 'THE', 'GREAT', 'ROUND', 'PEWTER', 'DISHES', 'THAT', 'WERE', 'RANGED', 'ON', 'THE', 'SHELVES', 'ABOVE', 'THE', 'LONG', 'DEAL', 'DINNER', 'TABLE', 'OR', 'IN', 'THE', 'HOBS', 'OF', 'THE', 'GRATE', 'WHICH', 'ALWAYS', 'SHONE', 'LIKE', 'JASPER'] +2094-142345-0011-319: ref=['DO', 'NOT', 'SUPPOSE', 'HOWEVER', 'THAT', 'MISSUS', 'POYSER', 'WAS', 'ELDERLY', 'OR', 'SHREWISH', 'IN', 'HER', 'APPEARANCE', 'SHE', 'WAS', 'A', 'GOOD', 'LOOKING', 'WOMAN', 'NOT', 'MORE', 'THAN', 'EIGHT', 'AND', 'THIRTY', 'OF', 'FAIR', 'COMPLEXION', 'AND', 'SANDY', 'HAIR', 'WELL', 'SHAPEN', 'LIGHT', 'FOOTED'] +2094-142345-0011-319: hyp=['DO', 'NOT', 'SUPPOSE', 'HOWEVER', 'THAT', 'MISSUS', 'POYSER', 'WAS', 'ELDERLY', 'OR', 'SHREWISH', 'IN', 'HER', 'APPEARANCE', 'SHE', 'WAS', 'A', 'GOOD', 'LOOKING', 'WOMAN', 'NOT', 'MORE', 'THAN', 'EIGHT', 'AND', 'THIRTY', 'OF', 'FAIR', 'COMPLEXION', 'AND', 'SANDY', 'HAIR', 'WELL', 'SHAPEN', 'LIGHT', 'FOOTED'] +2094-142345-0012-320: ref=['THE', 'FAMILY', 'LIKENESS', 'BETWEEN', 'HER', 'AND', 'HER', 'NIECE', 'DINAH', 'MORRIS', 'WITH', 'THE', 'CONTRAST', 'BETWEEN', 'HER', 'KEENNESS', 'AND', "DINAH'S", 'SERAPHIC', 'GENTLENESS', 'OF', 'EXPRESSION', 'MIGHT', 'HAVE', 'SERVED', 'A', 'PAINTER', 'AS', 'AN', 'EXCELLENT', 'SUGGESTION', 'FOR', 'A', 'MARTHA', 'AND', 'MARY'] +2094-142345-0012-320: hyp=['THE', 'FAMILY', 'LIKENESS', 'BETWEEN', 'HER', 'AND', 'HER', 'NIECE', 'DINA', 'MORRIS', 'WITH', 'THE', 'CONTRAST', 'BETWEEN', 'HER', 'KEENNESS', 'AND', 'DYNAS', 'SERAPHIC', 'GENTLENESS', 'OF', 'EXPRESSION', 'MIGHT', 'HAVE', 'SERVED', 'A', 'PAINTER', 'AS', 'AN', 'EXCELLENT', 'SUGGESTION', 'FOR', 'A', 'MARTHA', 'AND', 'MARY'] +2094-142345-0013-321: ref=['HER', 'TONGUE', 'WAS', 'NOT', 'LESS', 'KEEN', 'THAN', 'HER', 'EYE', 'AND', 'WHENEVER', 'A', 'DAMSEL', 'CAME', 'WITHIN', 'EARSHOT', 'SEEMED', 'TO', 'TAKE', 'UP', 'AN', 'UNFINISHED', 'LECTURE', 'AS', 'A', 'BARREL', 'ORGAN', 'TAKES', 'UP', 'A', 'TUNE', 'PRECISELY', 'AT', 'THE', 'POINT', 'WHERE', 'IT', 'HAD', 'LEFT', 'OFF'] +2094-142345-0013-321: hyp=['HER', 'TONGUE', 'WAS', 'NOT', 'LESS', 'KEEN', 'THAN', 'HER', 'EYE', 'AND', 'WHENEVER', 'A', 'DAMSEL', 'CAME', 'WITHIN', 'EAR', 'SHOT', 'SEEMED', 'TO', 'TAKE', 'UP', 'AN', 'UNFINISHED', 'LECTURE', 'AS', 'A', 'BARREL', 'ORGAN', 'TAKES', 'UP', 'A', 'TUNE', 'PRECISELY', 'AT', 'THE', 'POINT', 'WHERE', 'IT', 'HAD', 'LEFT', 'OFF'] +2094-142345-0014-322: ref=['THE', 'FACT', 'THAT', 'IT', 'WAS', 'CHURNING', 'DAY', 'WAS', 'ANOTHER', 'REASON', 'WHY', 'IT', 'WAS', 'INCONVENIENT', 'TO', 'HAVE', 'THE', 'WHITTAWS', 'AND', 'WHY', 'CONSEQUENTLY', 'MISSUS', 'POYSER', 'SHOULD', 'SCOLD', 'MOLLY', 'THE', 'HOUSEMAID', 'WITH', 'UNUSUAL', 'SEVERITY'] +2094-142345-0014-322: hyp=['THE', 'FACT', 'THAT', 'IT', 'WAS', 'CHURNING', 'DAY', 'WAS', 'ANOTHER', 'REASON', 'WHY', 'IT', 'WAS', 'INCONVENIENT', 'TO', 'HAVE', 'THE', 'WIDOWS', 'AND', 'WHY', 'CONSEQUENTLY', 'MISSUS', 'POYSER', 'SHOULD', 'SCOLD', 'MOLLY', 'THE', 'HOUSEMAID', 'WITH', 'UNUSUAL', 'SEVERITY'] +2094-142345-0015-323: ref=['TO', 'ALL', 'APPEARANCE', 'MOLLY', 'HAD', 'GOT', 'THROUGH', 'HER', 'AFTER', 'DINNER', 'WORK', 'IN', 'AN', 'EXEMPLARY', 'MANNER', 'HAD', 'CLEANED', 'HERSELF', 'WITH', 'GREAT', 'DISPATCH', 'AND', 'NOW', 'CAME', 'TO', 'ASK', 'SUBMISSIVELY', 'IF', 'SHE', 'SHOULD', 'SIT', 'DOWN', 'TO', 'HER', 'SPINNING', 'TILL', 'MILKING', 'TIME'] +2094-142345-0015-323: hyp=['TO', 'ALL', 'APPEARANCE', 'MOLLY', 'HAD', 'GOT', 'THROUGH', 'HER', 'AFTER', 'DINNER', 'WORK', 'IN', 'AN', 'EXEMPLARY', 'MANNER', 'HAD', 'CLEANED', 'HERSELF', 'WITH', 'GREAT', 'DISPATCH', 'AND', 'NOW', 'CAME', 'TO', 'ASK', 'SUBMISSIVELY', 'IF', 'SHE', 'SHOULD', 'SIT', 'DOWN', 'TO', 'HER', 'SPINNING', 'TILL', 'MILKING', 'TIME'] +2094-142345-0016-324: ref=['SPINNING', 'INDEED'] +2094-142345-0016-324: hyp=['SPINNING', 'INDEED'] +2094-142345-0017-325: ref=['I', 'NEVER', 'KNEW', 'YOUR', 'EQUALS', 'FOR', 'GALLOWSNESS'] +2094-142345-0017-325: hyp=['I', 'NEVER', 'KNEW', 'YOUR', 'EQUALS', 'FOR', 'GALLOWSNESS'] +2094-142345-0018-326: ref=['WHO', 'TAUGHT', 'YOU', 'TO', 'SCRUB', 'A', 'FLOOR', 'I', 'SHOULD', 'LIKE', 'TO', 'KNOW'] +2094-142345-0018-326: hyp=['WHO', 'TAUGHT', 'YOU', 'TO', 'SCRUB', 'A', 'FLOOR', 'I', 'SHOULD', 'LIKE', 'TO', 'KNOW'] +2094-142345-0019-327: ref=['COMB', 'THE', 'WOOL', 'FOR', 'THE', 'WHITTAWS', 'INDEED'] +2094-142345-0019-327: hyp=['COMB', 'THE', 'WOOL', 'FOR', 'THE', 'WIDOWS', 'INDE'] +2094-142345-0020-328: ref=["THAT'S", 'WHAT', "YOU'D", 'LIKE', 'TO', 'BE', 'DOING', 'IS', 'IT'] +2094-142345-0020-328: hyp=["THAT'S", 'WHAT', "YOU'D", 'LIKE', 'TO', 'BE', 'DOING', 'IS', 'IT'] +2094-142345-0021-329: ref=["THAT'S", 'THE', 'WAY', 'WITH', 'YOU', "THAT'S", 'THE', 'ROAD', "YOU'D", 'ALL', 'LIKE', 'TO', 'GO', 'HEADLONGS', 'TO', 'RUIN'] +2094-142345-0021-329: hyp=["THAT'S", 'THE', 'WAY', 'WITH', 'YOU', "THAT'S", 'THE', 'ROAD', "YOU'D", 'ALL', 'LIKE', 'TO', 'GO', 'HEADLONGS', 'TO', 'RUIN'] +2094-142345-0022-330: ref=['MISTER', "OTTLEY'S", 'INDEED'] +2094-142345-0022-330: hyp=['MISTER', 'OAKLEIGHS', 'INDEED'] +2094-142345-0023-331: ref=["YOU'RE", 'A', 'RARE', 'UN', 'FOR', 'SITTING', 'DOWN', 'TO', 'YOUR', 'WORK', 'A', 'LITTLE', 'WHILE', 'AFTER', "IT'S", 'TIME', 'TO', 'PUT', 'BY'] +2094-142345-0023-331: hyp=['YOU', 'ARE', 'A', 'RARE', 'AND', 'FOR', 'SITTING', 'DOWN', 'TO', 'YOUR', 'WORK', 'A', 'LITTLE', 'WHILE', 'AFTER', 'ITS', 'TIME', 'TO', 'PUT', 'BY'] +2094-142345-0024-332: ref=['MUNNY', 'MY', "IRON'S", 'TWITE', 'TOLD', 'PEASE', 'PUT', 'IT', 'DOWN', 'TO', 'WARM'] +2094-142345-0024-332: hyp=['MONEY', 'MY', 'IRONS', 'QUITE', 'TOLD', 'PEASE', 'PUT', 'IT', 'DOWN', 'TO', 'WARM'] +2094-142345-0025-333: ref=['COLD', 'IS', 'IT', 'MY', 'DARLING', 'BLESS', 'YOUR', 'SWEET', 'FACE'] +2094-142345-0025-333: hyp=['COLD', 'IS', 'IT', 'MY', 'DARLING', 'BLESS', 'YOUR', 'SWEET', 'FACE'] +2094-142345-0026-334: ref=["SHE'S", 'GOING', 'TO', 'PUT', 'THE', 'IRONING', 'THINGS', 'AWAY'] +2094-142345-0026-334: hyp=["SHE'S", 'GOING', 'TO', 'PUT', 'THE', 'IRONING', 'THINGS', 'AWAY'] +2094-142345-0027-335: ref=['MUNNY', 'I', 'TOULD', 'IKE', 'TO', 'DO', 'INTO', 'DE', 'BARN', 'TO', 'TOMMY', 'TO', 'SEE', 'DE', 'WHITTAWD'] +2094-142345-0027-335: hyp=['MONEY', 'I', 'DID', 'LIKE', 'TO', 'DO', 'INTO', 'THE', 'BARN', 'TO', 'TOMMY', 'TO', 'SEE', 'THE', 'WIDOWED'] +2094-142345-0028-336: ref=['NO', 'NO', 'NO', 'TOTTY', 'UD', 'GET', 'HER', 'FEET', 'WET', 'SAID', 'MISSUS', 'POYSER', 'CARRYING', 'AWAY', 'HER', 'IRON'] +2094-142345-0028-336: hyp=['NO', 'NO', 'TODDY', 'HAD', 'GET', 'HER', 'FEET', 'WET', 'SAID', 'MISSUS', 'POYSER', 'CARRYING', 'AWAY', 'HER', 'IRON'] +2094-142345-0029-337: ref=['DID', 'EVER', 'ANYBODY', 'SEE', 'THE', 'LIKE', 'SCREAMED', 'MISSUS', 'POYSER', 'RUNNING', 'TOWARDS', 'THE', 'TABLE', 'WHEN', 'HER', 'EYE', 'HAD', 'FALLEN', 'ON', 'THE', 'BLUE', 'STREAM'] +2094-142345-0029-337: hyp=['DID', 'EVER', 'ANYBODY', 'SEE', 'THE', 'LIKE', 'SCREAMED', 'MISSUS', 'POYSER', 'RUNNING', 'TOWARDS', 'THE', 'TABLE', 'WHEN', 'HER', 'EYE', 'HAD', 'FALLEN', 'ON', 'THE', 'BLUE', 'STREAM'] +2094-142345-0030-338: ref=['TOTTY', 'HOWEVER', 'HAD', 'DESCENDED', 'FROM', 'HER', 'CHAIR', 'WITH', 'GREAT', 'SWIFTNESS', 'AND', 'WAS', 'ALREADY', 'IN', 'RETREAT', 'TOWARDS', 'THE', 'DAIRY', 'WITH', 'A', 'SORT', 'OF', 'WADDLING', 'RUN', 'AND', 'AN', 'AMOUNT', 'OF', 'FAT', 'ON', 'THE', 'NAPE', 'OF', 'HER', 'NECK', 'WHICH', 'MADE', 'HER', 'LOOK', 'LIKE', 'THE', 'METAMORPHOSIS', 'OF', 'A', 'WHITE', 'SUCKLING', 'PIG'] +2094-142345-0030-338: hyp=['TOTTY', 'HOWEVER', 'HAD', 'DESCENDED', 'FROM', 'HER', 'CHAIR', 'WITH', 'GREAT', 'SWIFTNESS', 'AND', 'WAS', 'ALREADY', 'IN', 'RETREAT', 'TOWARDS', 'THE', 'DAIRY', 'WITH', 'A', 'SORT', 'OF', 'WADDLING', 'RUN', 'AND', 'AN', 'AMOUNT', 'OF', 'FAT', 'ON', 'THE', 'NAPE', 'OF', 'HER', 'NECK', 'WHICH', 'MADE', 'HER', 'LOOK', 'LIKE', 'THE', 'METAMORPHOSIS', 'OF', 'A', 'WHITE', 'SUCKLING', 'PIG'] +2094-142345-0031-339: ref=['AND', 'SHE', 'WAS', 'VERY', 'FOND', 'OF', 'YOU', 'TOO', 'AUNT', 'RACHEL'] +2094-142345-0031-339: hyp=['AND', 'SHE', 'WAS', 'VERY', 'FOND', 'OF', 'YOU', 'TOO', 'AUNT', 'RACHEL'] +2094-142345-0032-340: ref=['I', 'OFTEN', 'HEARD', 'HER', 'TALK', 'OF', 'YOU', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'WAY'] +2094-142345-0032-340: hyp=['I', 'OFTEN', 'HEARD', 'HER', 'TALK', 'OF', 'YOU', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'WAY'] +2094-142345-0033-341: ref=['WHEN', 'SHE', 'HAD', 'THAT', 'BAD', 'ILLNESS', 'AND', 'I', 'WAS', 'ONLY', 'ELEVEN', 'YEARS', 'OLD', 'SHE', 'USED', 'TO', 'SAY', "YOU'LL", 'HAVE', 'A', 'FRIEND', 'ON', 'EARTH', 'IN', 'YOUR', 'AUNT', 'RACHEL', 'IF', "I'M", 'TAKEN', 'FROM', 'YOU', 'FOR', 'SHE', 'HAS', 'A', 'KIND', 'HEART', 'AND', "I'M", 'SURE', "I'VE", 'FOUND', 'IT', 'SO'] +2094-142345-0033-341: hyp=['WHEN', 'SHE', 'HAD', 'THAT', 'BAD', 'ILLNESS', 'AND', 'I', 'WAS', 'ONLY', 'ELEVEN', 'YEARS', 'OLD', 'SHE', 'USED', 'TO', 'SAY', "YOU'LL", 'HAVE', 'A', 'FRIEND', 'ON', 'EARTH', 'IN', 'YOUR', 'AUNT', 'RACHEL', 'IF', "I'M", 'TAKEN', 'FROM', 'YOU', 'FOR', 'SHE', 'HAS', 'A', 'KIND', 'HEART', 'AND', "I'M", 'SURE', "I'VE", 'FOUND', 'IT', 'SO'] +2094-142345-0034-342: ref=['AND', "THERE'S", 'LINEN', 'IN', 'THE', 'HOUSE', 'AS', 'I', 'COULD', 'WELL', 'SPARE', 'YOU', 'FOR', "I'VE", 'GOT', 'LOTS', 'O', 'SHEETING', 'AND', 'TABLE', 'CLOTHING', 'AND', 'TOWELLING', 'AS', "ISN'T", 'MADE', 'UP'] +2094-142345-0034-342: hyp=['AND', "THERE'S", 'LINEN', 'IN', 'THE', 'HOUSE', 'AS', 'I', 'COULD', 'WELL', 'SPARE', 'YOU', 'FOR', 'I', 'GOT', 'LOTS', 'OF', 'SHEETING', 'AND', 'TABLE', 'CLOTHING', 'AND', 'TOWELINGS', "ISN'T", 'MADE', 'UP'] +2094-142345-0035-343: ref=['BUT', 'NOT', 'MORE', 'THAN', "WHAT'S", 'IN', 'THE', 'BIBLE', 'AUNT', 'SAID', 'DINAH'] +2094-142345-0035-343: hyp=['BUT', 'NOT', 'MORE', 'THAN', "WHAT'S", 'IN', 'THE', 'BIBLE', 'AND', 'SAID', 'DINAH'] +2094-142345-0036-344: ref=['NAY', 'DEAR', 'AUNT', 'YOU', 'NEVER', 'HEARD', 'ME', 'SAY', 'THAT', 'ALL', 'PEOPLE', 'ARE', 'CALLED', 'TO', 'FORSAKE', 'THEIR', 'WORK', 'AND', 'THEIR', 'FAMILIES'] +2094-142345-0036-344: hyp=['NAY', 'DEAR', 'AUNT', 'YOU', 'NEVER', 'HEARD', 'ME', 'SAY', 'THAT', 'ALL', 'PEOPLE', 'ARE', 'CALLED', 'TO', 'FORSAKE', 'THEIR', 'WORK', 'AND', 'THEIR', 'FAMILIES'] +2094-142345-0037-345: ref=['WE', 'CAN', 'ALL', 'BE', 'SERVANTS', 'OF', 'GOD', 'WHEREVER', 'OUR', 'LOT', 'IS', 'CAST', 'BUT', 'HE', 'GIVES', 'US', 'DIFFERENT', 'SORTS', 'OF', 'WORK', 'ACCORDING', 'AS', 'HE', 'FITS', 'US', 'FOR', 'IT', 'AND', 'CALLS', 'US', 'TO', 'IT'] +2094-142345-0037-345: hyp=['WE', 'CAN', 'ALL', 'BE', 'SERVANTS', 'OF', 'GOD', 'WHEREVER', 'OUR', 'LOT', 'IS', 'CAST', 'BUT', 'HE', 'GIVES', 'US', 'DIFFERENT', 'SORTS', 'OF', 'WORK', 'ACCORDING', 'AS', 'HE', 'FITS', 'US', 'FOR', 'IT', 'AND', 'CALLS', 'US', 'TO', 'IT'] +2094-142345-0038-346: ref=['I', 'CAN', 'NO', 'MORE', 'HELP', 'SPENDING', 'MY', 'LIFE', 'IN', 'TRYING', 'TO', 'DO', 'WHAT', 'I', 'CAN', 'FOR', 'THE', 'SOULS', 'OF', 'OTHERS', 'THAN', 'YOU', 'COULD', 'HELP', 'RUNNING', 'IF', 'YOU', 'HEARD', 'LITTLE', 'TOTTY', 'CRYING', 'AT', 'THE', 'OTHER', 'END', 'OF', 'THE', 'HOUSE', 'THE', 'VOICE', 'WOULD', 'GO', 'TO', 'YOUR', 'HEART', 'YOU', 'WOULD', 'THINK', 'THE', 'DEAR', 'CHILD', 'WAS', 'IN', 'TROUBLE', 'OR', 'IN', 'DANGER', 'AND', 'YOU', "COULDN'T", 'REST', 'WITHOUT', 'RUNNING', 'TO', 'HELP', 'HER', 'AND', 'COMFORT', 'HER'] +2094-142345-0038-346: hyp=['I', 'CAN', 'NO', 'MORE', 'HELP', 'SPENDING', 'MY', 'LIFE', 'IN', 'TRYING', 'TO', 'DO', 'WHAT', 'I', 'CAN', 'FOR', 'THE', 'SOULS', 'OF', 'OTHERS', 'THAN', 'YOU', 'COULD', 'HELP', 'RUNNING', 'IF', 'YOU', 'HEARD', 'LITTLE', 'TOTTY', 'CRYING', 'AT', 'THE', 'OTHER', 'END', 'OF', 'THE', 'HOUSE', 'THE', 'VOICE', 'WOULD', 'GO', 'TO', 'YOUR', 'HEART', 'YOU', 'WOULD', 'THINK', 'THE', 'DEAR', 'CHILD', 'WAS', 'IN', 'TROUBLE', 'OR', 'IN', 'DANGER', 'AND', 'YOU', "COULDN'T", 'REST', 'WITHOUT', 'RUNNING', 'TO', 'HELP', 'HER', 'AND', 'COMFORT', 'HER'] +2094-142345-0039-347: ref=["I'VE", 'STRONG', 'ASSURANCE', 'THAT', 'NO', 'EVIL', 'WILL', 'HAPPEN', 'TO', 'YOU', 'AND', 'MY', 'UNCLE', 'AND', 'THE', 'CHILDREN', 'FROM', 'ANYTHING', "I'VE", 'DONE'] +2094-142345-0039-347: hyp=["I'VE", 'STRONG', 'ASSURANCE', 'THAT', 'NO', 'EVIL', 'WILL', 'HAPPEN', 'TO', 'YOU', 'AND', 'MY', 'UNCLE', 'AND', 'THE', 'CHILDREN', 'FROM', 'ANYTHING', 'I', 'HAVE', 'DONE'] +2094-142345-0040-348: ref=['I', "DIDN'T", 'PREACH', 'WITHOUT', 'DIRECTION'] +2094-142345-0040-348: hyp=['I', "DIDN'T", 'PREACH', 'WITHOUT', 'DIRECTION'] +2094-142345-0041-349: ref=['DIRECTION'] +2094-142345-0041-349: hyp=['DIRECTION'] +2094-142345-0042-350: ref=['I', 'HANNA', 'COMMON', 'PATIENCE', 'WITH', 'YOU'] +2094-142345-0042-350: hyp=['I', 'HAD', 'A', 'COMMON', 'PATIENCE', 'WITH', 'YOU'] +2094-142345-0043-351: ref=['BY', 'THIS', 'TIME', 'THE', 'TWO', 'GENTLEMEN', 'HAD', 'REACHED', 'THE', 'PALINGS', 'AND', 'HAD', 'GOT', 'DOWN', 'FROM', 'THEIR', 'HORSES', 'IT', 'WAS', 'PLAIN', 'THEY', 'MEANT', 'TO', 'COME', 'IN'] +2094-142345-0043-351: hyp=['BY', 'THIS', 'TIME', 'THE', 'TWO', 'GENTLEMEN', 'HAD', 'REACHED', 'THE', 'PALINGS', 'AND', 'HAD', 'GOT', 'DOWN', 'FROM', 'THEIR', 'HORSES', 'IT', 'WAS', 'PLAIN', 'THEY', 'MEANT', 'TO', 'COME', 'IN'] +2094-142345-0044-352: ref=['SAID', 'MISTER', 'IRWINE', 'WITH', 'HIS', 'STATELY', 'CORDIALITY'] +2094-142345-0044-352: hyp=['SAID', 'MISTER', 'IRWINE', 'WITH', 'HIS', 'STATELY', 'CORDIALITY'] +2094-142345-0045-353: ref=['OH', 'SIR', "DON'T", 'MENTION', 'IT', 'SAID', 'MISSUS', 'POYSER'] +2094-142345-0045-353: hyp=['OH', 'SIR', "DON'T", 'MENTION', 'IT', 'SAID', 'MISSUS', 'POYSER'] +2094-142345-0046-354: ref=['I', 'DELIGHT', 'IN', 'YOUR', 'KITCHEN'] +2094-142345-0046-354: hyp=['I', 'DELIGHT', 'IN', 'YOUR', 'KITCHEN'] +2094-142345-0047-355: ref=['POYSER', 'IS', 'NOT', 'AT', 'HOME', 'IS', 'HE'] +2094-142345-0047-355: hyp=['POYSER', 'IS', 'NOT', 'AT', 'HOME', 'IS', 'HE'] +2094-142345-0048-356: ref=['SAID', 'CAPTAIN', 'DONNITHORNE', 'SEATING', 'HIMSELF', 'WHERE', 'HE', 'COULD', 'SEE', 'ALONG', 'THE', 'SHORT', 'PASSAGE', 'TO', 'THE', 'OPEN', 'DAIRY', 'DOOR'] +2094-142345-0048-356: hyp=['SAID', 'CAPTAIN', 'DONNITHORNE', 'SITTING', 'HIMSELF', 'WHERE', 'HE', 'COULD', 'SEE', 'ALONG', 'THE', 'SHORT', 'PASSAGE', 'TO', 'THE', 'OPEN', 'DAIRY', 'DOOR'] +2094-142345-0049-357: ref=['NO', 'SIR', 'HE', "ISN'T", "HE'S", 'GONE', 'TO', 'ROSSETER', 'TO', 'SEE', 'MISTER', 'WEST', 'THE', 'FACTOR', 'ABOUT', 'THE', 'WOOL'] +2094-142345-0049-357: hyp=['NO', 'SIR', 'HE', "ISN'T", "HE'S", 'GONE', 'TO', 'ROSSITUR', 'TO', 'SEE', 'MISTER', 'WEST', 'THE', 'FACTOR', 'ABOUT', 'THE', 'WOOL'] +2094-142345-0050-358: ref=['BUT', "THERE'S", 'FATHER', 'THE', 'BARN', 'SIR', 'IF', "HE'D", 'BE', 'OF', 'ANY', 'USE'] +2094-142345-0050-358: hyp=['BUT', "THERE'S", 'FATHER', 'IN', 'BARN', 'SIR', 'IF', "HE'D", 'BE', 'OF', 'ANY', 'USE'] +2094-142345-0051-359: ref=['NO', 'THANK', 'YOU', "I'LL", 'JUST', 'LOOK', 'AT', 'THE', 'WHELPS', 'AND', 'LEAVE', 'A', 'MESSAGE', 'ABOUT', 'THEM', 'WITH', 'YOUR', 'SHEPHERD'] +2094-142345-0051-359: hyp=['NO', 'THANK', 'YOU', "I'LL", 'JUST', 'LOOK', 'AT', 'THE', 'WHELPS', 'AND', 'LEAVE', 'A', 'MESSAGE', 'ABOUT', 'THEM', 'WITH', 'YOUR', 'SHEPHERD'] +2094-142345-0052-360: ref=['I', 'MUST', 'COME', 'ANOTHER', 'DAY', 'AND', 'SEE', 'YOUR', 'HUSBAND', 'I', 'WANT', 'TO', 'HAVE', 'A', 'CONSULTATION', 'WITH', 'HIM', 'ABOUT', 'HORSES'] +2094-142345-0052-360: hyp=['I', 'MUST', 'COME', 'ANOTHER', 'DAY', 'AND', 'SEE', 'YOUR', 'HUSBAND', 'I', 'WANT', 'TO', 'HAVE', 'A', 'CONSULTATION', 'WITH', 'HIM', 'ABOUT', 'HORSES'] +2094-142345-0053-361: ref=['FOR', 'IF', "HE'S", 'ANYWHERE', 'ON', 'THE', 'FARM', 'WE', 'CAN', 'SEND', 'FOR', 'HIM', 'IN', 'A', 'MINUTE'] +2094-142345-0053-361: hyp=['FOR', 'IF', 'IS', 'ANYWHERE', 'ON', 'THE', 'FARM', 'WE', 'CAN', 'SEND', 'FOR', 'HIM', 'IN', 'A', 'MINUTE'] +2094-142345-0054-362: ref=['OH', 'SIR', 'SAID', 'MISSUS', 'POYSER', 'RATHER', 'ALARMED', 'YOU', "WOULDN'T", 'LIKE', 'IT', 'AT', 'ALL'] +2094-142345-0054-362: hyp=['OH', 'SIR', 'SAID', 'MISSUS', 'POYSER', 'RATHER', 'ALARMED', 'YOU', "WOULDN'T", 'LIKE', 'IT', 'AT', 'ALL'] +2094-142345-0055-363: ref=['BUT', 'YOU', 'KNOW', 'MORE', 'ABOUT', 'THAT', 'THAN', 'I', 'DO', 'SIR'] +2094-142345-0055-363: hyp=['BUT', 'YOU', 'KNOW', 'MORE', 'ABOUT', 'THAT', 'THAN', 'I', 'DO', 'SIR'] +2094-142345-0056-364: ref=['I', 'THINK', 'I', 'SHOULD', 'BE', 'DOING', 'YOU', 'A', 'SERVICE', 'TO', 'TURN', 'YOU', 'OUT', 'OF', 'SUCH', 'A', 'PLACE'] +2094-142345-0056-364: hyp=['I', 'THINK', 'I', 'SHOULD', 'BE', 'DOING', 'YOU', 'A', 'SERVICE', 'TO', 'TURN', 'YOU', 'OUT', 'OF', 'SUCH', 'A', 'PLACE'] +2094-142345-0057-365: ref=['I', 'KNOW', 'HIS', 'FARM', 'IS', 'IN', 'BETTER', 'ORDER', 'THAN', 'ANY', 'OTHER', 'WITHIN', 'TEN', 'MILES', 'OF', 'US', 'AND', 'AS', 'FOR', 'THE', 'KITCHEN', 'HE', 'ADDED', 'SMILING', 'I', "DON'T", 'BELIEVE', "THERE'S", 'ONE', 'IN', 'THE', 'KINGDOM', 'TO', 'BEAT', 'IT'] +2094-142345-0057-365: hyp=['I', 'KNOWS', 'FARM', 'IS', 'IN', 'BETTER', 'ORDER', 'THAN', 'ANY', 'OTHER', 'WITHIN', 'TEN', 'MILES', 'OF', 'US', 'AND', 'AS', 'FOR', 'THE', 'KITCHEN', 'HE', 'ADDED', 'SMILING', 'I', "DON'T", 'BELIEVE', "THERE'S", 'ONE', 'IN', 'THE', 'KINGDOM', 'TO', 'BEAT', 'IT'] +2094-142345-0058-366: ref=['BY', 'THE', 'BY', "I'VE", 'NEVER', 'SEEN', 'YOUR', 'DAIRY', 'I', 'MUST', 'SEE', 'YOUR', 'DAIRY', 'MISSUS', 'POYSER'] +2094-142345-0058-366: hyp=['BY', 'THE', 'BY', 'I', 'HAVE', 'NEVER', 'SEEN', 'YOUR', 'DAIRY', 'I', 'MUST', 'SEE', 'YOUR', 'DEARIE', 'MISSUS', 'POYSER'] +2094-142345-0059-367: ref=['THIS', 'MISSUS', 'POYSER', 'SAID', 'BLUSHING', 'AND', 'BELIEVING', 'THAT', 'THE', 'CAPTAIN', 'WAS', 'REALLY', 'INTERESTED', 'IN', 'HER', 'MILK', 'PANS', 'AND', 'WOULD', 'ADJUST', 'HIS', 'OPINION', 'OF', 'HER', 'TO', 'THE', 'APPEARANCE', 'OF', 'HER', 'DAIRY'] +2094-142345-0059-367: hyp=['THIS', 'MISSUS', 'POYSER', 'SAID', 'BLUSHING', 'AND', 'BELIEVING', 'THAT', 'THE', 'CAPTAIN', 'WAS', 'REALLY', 'INTERESTED', 'IN', 'HER', 'MILK', 'PANS', 'AND', 'WOULD', 'ADJUST', 'HIS', 'OPINION', 'OF', 'HER', 'TO', 'THE', 'APPEARANCE', 'OF', 'HER', 'DAIRY'] +2094-142345-0060-368: ref=['OH', "I'VE", 'NO', 'DOUBT', "IT'S", 'IN', 'CAPITAL', 'ORDER'] +2094-142345-0060-368: hyp=['OH', "I'VE", 'NO', 'DOUBT', "IT'S", 'IN', 'CAPITAL', 'ORDER'] +2300-131720-0000-1816: ref=['THE', 'PARIS', 'PLANT', 'LIKE', 'THAT', 'AT', 'THE', 'CRYSTAL', 'PALACE', 'WAS', 'A', 'TEMPORARY', 'EXHIBIT'] +2300-131720-0000-1816: hyp=['THE', 'PARIS', 'PLANT', 'LIKE', 'THAT', 'AT', 'THE', 'CRYSTAL', 'PALACE', 'WAS', 'A', 'TEMPORARY', 'EXHIBIT'] +2300-131720-0001-1817: ref=['THE', 'LONDON', 'PLANT', 'WAS', 'LESS', 'TEMPORARY', 'BUT', 'NOT', 'PERMANENT', 'SUPPLYING', 'BEFORE', 'IT', 'WAS', 'TORN', 'OUT', 'NO', 'FEWER', 'THAN', 'THREE', 'THOUSAND', 'LAMPS', 'IN', 'HOTELS', 'CHURCHES', 'STORES', 'AND', 'DWELLINGS', 'IN', 'THE', 'VICINITY', 'OF', 'HOLBORN', 'VIADUCT'] +2300-131720-0001-1817: hyp=['THE', 'LONDON', 'PLANT', 'WAS', 'LESS', 'TEMPORARY', 'BUT', 'NOT', 'PERMANENT', 'SUPPLYING', 'BEFORE', 'IT', 'WAS', 'TORN', 'OUT', 'NO', 'FEWER', 'THAN', 'THREE', 'THOUSAND', 'LAMPS', 'IN', 'HOTELS', 'CHURCHES', 'STORES', 'AND', 'DWELLINGS', 'IN', 'THE', 'VICINITY', 'OF', 'HOLBORN', 'VIEDUC'] +2300-131720-0002-1818: ref=['THERE', 'MESSRS', 'JOHNSON', 'AND', 'HAMMER', 'PUT', 'INTO', 'PRACTICE', 'MANY', 'OF', 'THE', 'IDEAS', 'NOW', 'STANDARD', 'IN', 'THE', 'ART', 'AND', 'SECURED', 'MUCH', 'USEFUL', 'DATA', 'FOR', 'THE', 'WORK', 'IN', 'NEW', 'YORK', 'OF', 'WHICH', 'THE', 'STORY', 'HAS', 'JUST', 'BEEN', 'TOLD'] +2300-131720-0002-1818: hyp=['THERE', 'MESSIERS', 'JOHNSON', 'AND', 'HAMMER', 'PUT', 'INTO', 'PRACTICE', 'MANY', 'OF', 'THE', 'IDEAS', 'NOW', 'STANDARD', 'IN', 'THE', 'ART', 'AND', 'SECURED', 'MUCH', 'USEFUL', 'DATA', 'FOR', 'THE', 'WORK', 'IN', 'NEW', 'YORK', 'OF', 'WHICH', 'THE', 'STORY', 'HAS', 'JUST', 'BEEN', 'TOLD'] +2300-131720-0003-1819: ref=['THE', 'DYNAMO', 'ELECTRIC', 'MACHINE', 'THOUGH', 'SMALL', 'WAS', 'ROBUST', 'FOR', 'UNDER', 'ALL', 'THE', 'VARYING', 'SPEEDS', 'OF', 'WATER', 'POWER', 'AND', 'THE', 'VICISSITUDES', 'OF', 'THE', 'PLANT', 'TO', 'WHICH', 'IT', 'BELONGED', 'IT', 'CONTINUED', 'IN', 'ACTIVE', 'USE', 'UNTIL', 'EIGHTEEN', 'NINETY', 'NINE', 'SEVENTEEN', 'YEARS'] +2300-131720-0003-1819: hyp=['THE', 'DYNAMO', 'ELECTRIC', 'MACHINE', 'THOUGH', 'SMALL', 'WAS', 'ROBUST', 'FOR', 'UNDER', 'ALL', 'THE', 'VARYING', 'SPEEDS', 'OF', 'WATER', 'POWER', 'AND', 'THE', 'VICISSITUDES', 'OF', 'THE', 'PLANT', 'TO', 'WHICH', 'IT', 'BELONGED', 'IT', 'CONTINUED', 'IN', 'ACTIVE', 'USE', 'UNTIL', 'EIGHTEEN', 'NINETY', 'NINE', 'SEVENTEEN', 'YEARS'] +2300-131720-0004-1820: ref=['OWING', 'TO', 'HIS', 'INSISTENCE', 'ON', 'LOW', 'PRESSURE', 'DIRECT', 'CURRENT', 'FOR', 'USE', 'IN', 'DENSELY', 'POPULATED', 'DISTRICTS', 'AS', 'THE', 'ONLY', 'SAFE', 'AND', 'TRULY', 'UNIVERSAL', 'PROFITABLE', 'WAY', 'OF', 'DELIVERING', 'ELECTRICAL', 'ENERGY', 'TO', 'THE', 'CONSUMERS', 'EDISON', 'HAS', 'BEEN', 'FREQUENTLY', 'SPOKEN', 'OF', 'AS', 'AN', 'OPPONENT', 'OF', 'THE', 'ALTERNATING', 'CURRENT'] +2300-131720-0004-1820: hyp=['OWING', 'TO', 'HIS', 'INSISTENCE', 'ON', 'LOW', 'PRESSURE', 'DIRECT', 'CURRENT', 'FOR', 'USE', 'IN', 'DENSELY', 'POPULATED', 'DISTRICTS', 'AS', 'THE', 'ONLY', 'SAFE', 'AND', 'TRULY', 'UNIVERSAL', 'PROFITABLE', 'WAY', 'OF', 'DELIVERING', 'ELECTRICAL', 'ENERGY', 'TO', 'THE', 'CONSUMERS', 'EDISON', 'HAS', 'BEEN', 'FREQUENTLY', 'SPOKEN', 'OF', 'AS', 'AN', 'OPPONENT', 'OF', 'THE', 'ALTERNATING', 'CURRENT'] +2300-131720-0005-1821: ref=['WHY', 'IF', 'WE', 'ERECT', 'A', 'STATION', 'AT', 'THE', 'FALLS', 'IT', 'IS', 'A', 'GREAT', 'ECONOMY', 'TO', 'GET', 'IT', 'UP', 'TO', 'THE', 'CITY'] +2300-131720-0005-1821: hyp=['WHY', 'IF', 'WE', 'ERECT', 'A', 'STATION', 'AT', 'THE', 'FALLS', 'IT', 'IS', 'A', 'GREAT', 'ECONOMY', 'TO', 'GET', 'IT', 'UP', 'TO', 'THE', 'CITY'] +2300-131720-0006-1822: ref=['THERE', 'SEEMS', 'NO', 'GOOD', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'IT', 'WILL', 'CHANGE'] +2300-131720-0006-1822: hyp=['THERE', 'SEEMS', 'NO', 'GOOD', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'IT', 'WILL', 'CHANGE'] +2300-131720-0007-1823: ref=['BROAD', 'AS', 'THE', 'PRAIRIES', 'AND', 'FREE', 'IN', 'THOUGHT', 'AS', 'THE', 'WINDS', 'THAT', 'SWEEP', 'THEM', 'HE', 'IS', 'IDIOSYNCRATICALLY', 'OPPOSED', 'TO', 'LOOSE', 'AND', 'WASTEFUL', 'METHODS', 'TO', 'PLANS', 'OF', 'EMPIRE', 'THAT', 'NEGLECT', 'THE', 'POOR', 'AT', 'THE', 'GATE'] +2300-131720-0007-1823: hyp=['BROAD', 'AS', 'THE', 'PRAIRIES', 'AND', 'FREE', 'IN', 'THOUGHT', 'AS', 'THE', 'WINDS', 'THAT', 'SWEPT', 'THEM', 'HE', 'IS', 'IDIOS', 'AND', 'CRATICALLY', 'OPPOSED', 'TO', 'LOOSE', 'AND', 'WASTEFUL', 'METHODS', 'TO', 'PLANS', 'OF', 'EMPIRE', 'THAT', 'NEGLECT', 'THE', 'POOR', 'AT', 'THE', 'GATE'] +2300-131720-0008-1824: ref=['EVERYTHING', 'HE', 'HAS', 'DONE', 'HAS', 'BEEN', 'AIMED', 'AT', 'THE', 'CONSERVATION', 'OF', 'ENERGY', 'THE', 'CONTRACTION', 'OF', 'SPACE', 'THE', 'INTENSIFICATION', 'OF', 'CULTURE'] +2300-131720-0008-1824: hyp=['EVERYTHING', 'HE', 'HAS', 'DONE', 'HAS', 'BEEN', 'AIMED', 'AT', 'THE', 'CONSERVATION', 'OF', 'ENERGY', 'THE', 'CONTRACTION', 'OF', 'SPACE', 'THE', 'INTENSIFICATION', 'OF', 'CULTURE'] +2300-131720-0009-1825: ref=['FOR', 'SOME', 'YEARS', 'IT', 'WAS', 'NOT', 'FOUND', 'FEASIBLE', 'TO', 'OPERATE', 'MOTORS', 'ON', 'ALTERNATING', 'CURRENT', 'CIRCUITS', 'AND', 'THAT', 'REASON', 'WAS', 'OFTEN', 'URGED', 'AGAINST', 'IT', 'SERIOUSLY'] +2300-131720-0009-1825: hyp=['FOR', 'SOME', 'YEARS', 'IT', 'WAS', 'NOT', 'FOUND', 'FEASIBLE', 'TO', 'OPERATE', 'MOTORS', 'ON', 'ALTERNATING', 'CURRENT', 'CIRCUITS', 'AND', 'THAT', 'REASON', 'WAS', 'OFTEN', 'URGED', 'AGAINST', 'ITS', 'SERIOUSLY'] +2300-131720-0010-1826: ref=['IT', 'COULD', 'NOT', 'BE', 'USED', 'FOR', 'ELECTROPLATING', 'OR', 'DEPOSITION', 'NOR', 'COULD', 'IT', 'CHARGE', 'STORAGE', 'BATTERIES', 'ALL', 'OF', 'WHICH', 'ARE', 'EASILY', 'WITHIN', 'THE', 'ABILITY', 'OF', 'THE', 'DIRECT', 'CURRENT'] +2300-131720-0010-1826: hyp=['IT', 'COULD', 'NOT', 'BE', 'USED', 'FOR', 'ELECTROPLATING', 'OR', 'DEPOSITION', 'NOR', 'COULD', 'IT', 'CHARGE', 'STORAGE', 'BATTERIES', 'ALL', 'OF', 'WHICH', 'ARE', 'EASILY', 'WITHIN', 'THE', 'ABILITY', 'OF', 'THE', 'DIRECT', 'CURRENT'] +2300-131720-0011-1827: ref=['BUT', 'WHEN', 'IT', 'CAME', 'TO', 'BE', 'A', 'QUESTION', 'OF', 'LIGHTING', 'A', 'SCATTERED', 'SUBURB', 'A', 'GROUP', 'OF', 'DWELLINGS', 'ON', 'THE', 'OUTSKIRTS', 'A', 'REMOTE', 'COUNTRY', 'RESIDENCE', 'OR', 'A', 'FARM', 'HOUSE', 'THE', 'ALTERNATING', 'CURRENT', 'IN', 'ALL', 'ELEMENTS', 'SAVE', 'ITS', 'DANGER', 'WAS', 'AND', 'IS', 'IDEAL'] +2300-131720-0011-1827: hyp=['BUT', 'WHEN', 'IT', 'CAME', 'TO', 'BE', 'A', 'QUESTION', 'OF', 'LIGHTING', 'A', 'SCATTERED', 'SUBURB', 'A', 'GROUP', 'OF', 'DWELLINGS', 'ON', 'THE', 'OUTSKIRTS', 'A', 'REMOTE', 'COUNTRY', 'RESIDENCE', 'OR', 'A', 'FARM', 'HOUSE', 'THE', 'ALTERNATING', 'CURRENT', 'IN', 'ALL', 'ELEMENTS', 'SAVE', 'ITS', 'DANGER', 'WAS', 'AND', 'IS', 'IDEAL'] +2300-131720-0012-1828: ref=['EDISON', 'WAS', 'INTOLERANT', 'OF', 'SHAM', 'AND', 'SHODDY', 'AND', 'NOTHING', 'WOULD', 'SATISFY', 'HIM', 'THAT', 'COULD', 'NOT', 'STAND', 'CROSS', 'EXAMINATION', 'BY', 'MICROSCOPE', 'TEST', 'TUBE', 'AND', 'GALVANOMETER'] +2300-131720-0012-1828: hyp=['EDISON', 'WAS', 'INTOLERANT', 'OF', 'SHAM', 'AND', 'SHODY', 'AND', 'NOTHING', 'WOULD', 'SATISFY', 'HIM', 'THAT', 'COULD', 'NOT', 'STAND', 'CROSS', 'EXAMINATION', 'BY', 'MICROSCOPE', 'TEST', 'TUBE', 'AND', 'GALVANOMETER'] +2300-131720-0013-1829: ref=['UNLESS', 'HE', 'COULD', 'SECURE', 'AN', 'ENGINE', 'OF', 'SMOOTHER', 'RUNNING', 'AND', 'MORE', 'EXACTLY', 'GOVERNED', 'AND', 'REGULATED', 'THAN', 'THOSE', 'AVAILABLE', 'FOR', 'HIS', 'DYNAMO', 'AND', 'LAMP', 'EDISON', 'REALIZED', 'THAT', 'HE', 'WOULD', 'FIND', 'IT', 'ALMOST', 'IMPOSSIBLE', 'TO', 'GIVE', 'A', 'STEADY', 'LIGHT'] +2300-131720-0013-1829: hyp=['UNLESS', 'HE', 'COULD', 'SECURE', 'AN', 'ENGINE', 'OF', 'SMOOTHER', 'RUNNING', 'AND', 'MORE', 'EXACTLY', 'GOVERN', 'AND', 'REGULATED', 'THAN', 'THOSE', 'AVALUABLE', 'FOR', 'HIS', 'DYNAMO', 'AND', 'LAMP', 'EDISON', 'REALIZED', 'THAT', 'HE', 'WOULD', 'FIND', 'IT', 'ALMOST', 'IMPOSSIBLE', 'TO', 'GIVE', 'A', 'STEADY', 'LIGHT'] +2300-131720-0014-1830: ref=['MISTER', 'EDISON', 'WAS', 'A', 'LEADER', 'FAR', 'AHEAD', 'OF', 'THE', 'TIME'] +2300-131720-0014-1830: hyp=['MISTER', 'EDISON', 'WAS', 'A', 'LEADER', 'FAR', 'AHEAD', 'OF', 'THE', 'TIME'] +2300-131720-0015-1831: ref=['HE', 'OBTAINED', 'THE', 'DESIRED', 'SPEED', 'AND', 'LOAD', 'WITH', 'A', 'FRICTION', 'BRAKE', 'ALSO', 'REGULATOR', 'OF', 'SPEED', 'BUT', 'WAITED', 'FOR', 'AN', 'INDICATOR', 'TO', 'VERIFY', 'IT'] +2300-131720-0015-1831: hyp=['HE', 'OBTAINED', 'THE', 'DESIRED', 'SPEED', 'AND', 'LOWED', 'WITH', 'A', 'FRICTION', 'BREAK', 'ALSO', 'REGULATOR', 'OF', 'SPEED', 'BUT', 'WAITED', 'FOR', 'AN', 'INDICATOR', 'TO', 'VERIFY', 'IT'] +2300-131720-0016-1832: ref=['THEN', 'AGAIN', 'THERE', 'WAS', 'NO', 'KNOWN', 'WAY', 'TO', 'LUBRICATE', 'AN', 'ENGINE', 'FOR', 'CONTINUOUS', 'RUNNING', 'AND', 'MISTER', 'EDISON', 'INFORMED', 'ME', 'THAT', 'AS', 'A', 'MARINE', 'ENGINE', 'STARTED', 'BEFORE', 'THE', 'SHIP', 'LEFT', 'NEW', 'YORK', 'AND', 'CONTINUED', 'RUNNING', 'UNTIL', 'IT', 'REACHED', 'ITS', 'HOME', 'PORT', 'SO', 'AN', 'ENGINE', 'FOR', 'HIS', 'PURPOSES', 'MUST', 'PRODUCE', 'LIGHT', 'AT', 'ALL', 'TIMES'] +2300-131720-0016-1832: hyp=['THEN', 'AGAIN', 'THERE', 'WAS', 'NO', 'KNOWN', 'WAY', 'TO', 'LUBRICADE', 'AN', 'ENGINE', 'FOR', 'CONTINUOUS', 'RUNNING', 'AND', 'MISTER', 'EDISON', 'INFORMED', 'ME', 'THAT', 'AS', 'A', 'MARINE', 'ENGINE', 'STARTED', 'BEFORE', 'THE', 'SHIP', 'LEFT', 'NEW', 'YORK', 'AND', 'CONTINUED', 'RUNNING', 'UNTIL', 'IT', 'REACHED', 'ITS', 'HOME', 'PORT', 'SO', 'AN', 'ENGINE', 'FOR', 'HIS', 'PURPOSES', 'MUST', 'PRODUCE', 'LIGHT', 'AT', 'ALL', 'TIMES'] +2300-131720-0017-1833: ref=['EDISON', 'HAD', 'INSTALLED', 'HIS', 'HISTORIC', 'FIRST', 'GREAT', 'CENTRAL', 'STATION', 'SYSTEM', 'IN', 'NEW', 'YORK', 'ON', 'THE', 'MULTIPLE', 'ARC', 'SYSTEM', 'COVERED', 'BY', 'HIS', 'FEEDER', 'AND', 'MAIN', 'INVENTION', 'WHICH', 'RESULTED', 'IN', 'A', 'NOTABLE', 'SAVING', 'IN', 'THE', 'COST', 'OF', 'CONDUCTORS', 'AS', 'AGAINST', 'A', 'STRAIGHT', 'TWO', 'WIRE', 'SYSTEM', 'THROUGHOUT', 'OF', 'THE', 'TREE', 'KIND'] +2300-131720-0017-1833: hyp=['EDISON', 'HAD', 'INSTALLED', 'HIS', 'HISTORIC', 'FIRST', 'GREAT', 'CENTRAL', 'STATION', 'SYSTEM', 'IN', 'NEW', 'YORK', 'ON', 'THE', 'MULTIPLE', 'ARC', 'SYSTEM', 'COVERED', 'BY', 'HIS', 'FEEDER', 'AND', 'MAIN', 'INVENTION', 'WHICH', 'RESULTED', 'IN', 'A', 'NOTABLE', 'SAVING', 'IN', 'THE', 'COST', 'OF', 'CONDUCTORS', 'AS', 'AGAINST', 'A', 'STRAIGHT', 'TWO', 'WIRE', 'SYSTEM', 'THROUGHOUT', 'OF', 'THE', 'TREE', 'KIND'] +2300-131720-0018-1834: ref=['HE', 'SOON', 'FORESAW', 'THAT', 'STILL', 'GREATER', 'ECONOMY', 'WOULD', 'BE', 'NECESSARY', 'FOR', 'COMMERCIAL', 'SUCCESS', 'NOT', 'ALONE', 'FOR', 'THE', 'LARGER', 'TERRITORY', 'OPENING', 'BUT', 'FOR', 'THE', 'COMPACT', 'DISTRICTS', 'OF', 'LARGE', 'CITIES'] +2300-131720-0018-1834: hyp=['HE', 'SOON', 'FORESAW', 'THAT', 'STILL', 'GREATER', 'ECONOMY', 'WOULD', 'BE', 'NECESSARY', 'FOR', 'COMMERCIAL', 'SUCCESS', 'NOT', 'ALONE', 'FOR', 'THE', 'LARGER', 'TERRITORY', 'OPENING', 'BUT', 'FOR', 'THE', 'COMPACT', 'DISTRICT', 'OF', 'LARGE', 'CITIES'] +2300-131720-0019-1835: ref=['THE', 'STRONG', 'POSITION', 'HELD', 'BY', 'THE', 'EDISON', 'SYSTEM', 'UNDER', 'THE', 'STRENUOUS', 'COMPETITION', 'THAT', 'WAS', 'ALREADY', 'SPRINGING', 'UP', 'WAS', 'ENORMOUSLY', 'IMPROVED', 'BY', 'THE', 'INTRODUCTION', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'AND', 'IT', 'GAVE', 'AN', 'IMMEDIATE', 'IMPETUS', 'TO', 'INCANDESCENT', 'LIGHTING'] +2300-131720-0019-1835: hyp=['THE', 'STRONG', 'POSITION', 'HELD', 'BY', 'THE', 'EDISON', 'SYSTEM', 'UNDER', 'THE', 'STRENUOUS', 'COMPETITION', 'THAT', 'WAS', 'ALREADY', 'SPRINGING', 'UP', 'WAS', 'ENORMOUSLY', 'IMPROVED', 'BY', 'THE', 'INTRODUCTION', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'AND', 'IT', 'GAVE', 'AN', 'IMMEDIATE', 'IMPETUS', 'TO', 'INCANDESCENT', 'LIGHTING'] +2300-131720-0020-1836: ref=['IT', 'WAS', 'SPECIALLY', 'SUITED', 'FOR', 'A', 'TRIAL', 'PLANT', 'ALSO', 'IN', 'THE', 'EARLY', 'DAYS', 'WHEN', 'A', 'YIELD', 'OF', 'SIX', 'OR', 'EIGHT', 'LAMPS', 'TO', 'THE', 'HORSE', 'POWER', 'WAS', 'CONSIDERED', 'SUBJECT', 'FOR', 'CONGRATULATION'] +2300-131720-0020-1836: hyp=['IT', 'WAS', 'SPECIALLY', 'SUITED', 'FOR', 'A', 'TRIAL', 'PLANT', 'ALSO', 'IN', 'THE', 'EARLY', 'DAYS', 'WHEN', 'A', 'YIELD', 'OF', 'SIX', 'OR', 'EIGHT', 'LAMPS', 'TO', 'THE', 'HORSE', 'POWER', 'WAS', 'CONSIDERED', 'SUBJECT', 'FOR', 'CONGRATULATION'] +2300-131720-0021-1837: ref=['THE', 'STREET', 'CONDUCTORS', 'WERE', 'OF', 'THE', 'OVERHEAD', 'POLE', 'LINE', 'CONSTRUCTION', 'AND', 'WERE', 'INSTALLED', 'BY', 'THE', 'CONSTRUCTION', 'COMPANY', 'THAT', 'HAD', 'BEEN', 'ORGANIZED', 'BY', 'EDISON', 'TO', 'BUILD', 'AND', 'EQUIP', 'CENTRAL', 'STATIONS'] +2300-131720-0021-1837: hyp=['THE', 'STREET', 'CONDUCTORS', 'WERE', 'OF', 'THE', 'OVERHEAD', 'POLE', 'LINE', 'CONSTRUCTION', 'AND', 'WERE', 'INSTALLED', 'BY', 'THE', 'CONSTRUCTION', 'COMPANY', 'THAT', 'HAD', 'BEEN', 'ORGANIZED', 'BY', 'EDISON', 'TO', 'BUILD', 'AN', 'EQUIPPED', 'CENTRAL', 'STATIONS'] +2300-131720-0022-1838: ref=['MEANWHILE', 'HE', 'HAD', 'CALLED', 'UPON', 'ME', 'TO', 'MAKE', 'A', 'REPORT', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'KNOWN', 'IN', 'ENGLAND', 'AS', 'THE', 'HOPKINSON', 'BOTH', 'DOCTOR', 'JOHN', 'HOPKINSON', 'AND', 'MISTER', 'EDISON', 'BEING', 'INDEPENDENT', 'INVENTORS', 'AT', 'PRACTICALLY', 'THE', 'SAME', 'TIME'] +2300-131720-0022-1838: hyp=['MEANWHILE', 'HE', 'HAD', 'CALLED', 'UPON', 'ME', 'TO', 'MAKE', 'A', 'REPORT', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'KNOWN', 'IN', 'ENGLAND', 'AS', 'THE', 'HOPKINSON', 'BOTH', 'DOCTOR', 'JOHN', 'HOPKINSON', 'AND', 'MISTER', 'EDISON', 'BEING', 'INDEPENDENT', 'IN', 'VENORS', 'AT', 'PRACTICALLY', 'THE', 'SAME', 'TIME'] +2300-131720-0023-1839: ref=['I', 'THINK', 'HE', 'WAS', 'PERHAPS', 'MORE', 'APPRECIATIVE', 'THAN', 'I', 'WAS', 'OF', 'THE', 'DISCIPLINE', 'OF', 'THE', 'EDISON', 'CONSTRUCTION', 'DEPARTMENT', 'AND', 'THOUGHT', 'IT', 'WOULD', 'BE', 'WELL', 'FOR', 'US', 'TO', 'WAIT', 'UNTIL', 'THE', 'MORNING', 'OF', 'THE', 'FOURTH', 'BEFORE', 'WE', 'STARTED', 'UP'] +2300-131720-0023-1839: hyp=['I', 'THINK', 'HE', 'WAS', 'PERHAPS', 'MORE', 'APPRECIATIVE', 'THAN', 'I', 'WAS', 'OF', 'THE', 'DISCIPLINE', 'OF', 'THE', 'EDISON', 'CONSTRUCTION', 'DEPARTMENT', 'AND', 'THOUGHT', 'IT', 'WOULD', 'BE', 'WELL', 'FOR', 'US', 'TO', 'WAIT', 'UNTIL', 'THE', 'MORNING', 'OF', 'THE', 'FOURTH', 'BEFORE', 'WE', 'STARTED', 'UP'] +2300-131720-0024-1840: ref=['BUT', 'THE', 'PLANT', 'RAN', 'AND', 'IT', 'WAS', 'THE', 'FIRST', 'THREE', 'WIRE', 'STATION', 'IN', 'THIS', 'COUNTRY'] +2300-131720-0024-1840: hyp=['BUT', 'THE', 'PLANT', 'RAN', 'AND', 'IT', 'WAS', 'THE', 'FIRST', 'THREE', 'WIRE', 'STATION', 'IN', 'THIS', 'COUNTRY'] +2300-131720-0025-1841: ref=['THEY', 'WERE', 'LATER', 'USED', 'AS', 'RESERVE', 'MACHINES', 'AND', 'FINALLY', 'WITH', 'THE', 'ENGINE', 'RETIRED', 'FROM', 'SERVICE', 'AS', 'PART', 'OF', 'THE', 'COLLECTION', 'OF', 'EDISONIA', 'BUT', 'THEY', 'REMAIN', 'IN', 'PRACTICALLY', 'AS', 'GOOD', 'CONDITION', 'AS', 'WHEN', 'INSTALLED', 'IN', 'EIGHTEEN', 'EIGHTY', 'THREE'] +2300-131720-0025-1841: hyp=['THEY', 'WERE', 'LATER', 'USED', 'AS', 'RESERVE', 'MACHINES', 'AND', 'FINALLY', 'WITH', 'THE', 'ENGINE', 'RETIRED', 'FROM', 'SERVICE', 'AS', 'PART', 'OF', 'THE', 'COLLECTION', 'OF', 'EDISONIA', 'BUT', 'THEY', 'REMAIN', 'IN', 'PRACTICALLY', 'AS', 'GOOD', 'CONDITION', 'AS', 'ONE', 'INSTALLED', 'IN', 'EIGHTEEN', 'EIGHTY', 'THREE'] +2300-131720-0026-1842: ref=['THE', 'ARC', 'LAMP', 'INSTALLED', 'OUTSIDE', 'A', "CUSTOMER'S", 'PREMISES', 'OR', 'IN', 'A', 'CIRCUIT', 'FOR', 'PUBLIC', 'STREET', 'LIGHTING', 'BURNED', 'SO', 'MANY', 'HOURS', 'NIGHTLY', 'SO', 'MANY', 'NIGHTS', 'IN', 'THE', 'MONTH', 'AND', 'WAS', 'PAID', 'FOR', 'AT', 'THAT', 'RATE', 'SUBJECT', 'TO', 'REBATE', 'FOR', 'HOURS', 'WHEN', 'THE', 'LAMP', 'MIGHT', 'BE', 'OUT', 'THROUGH', 'ACCIDENT'] +2300-131720-0026-1842: hyp=['THE', 'ARK', 'LAMP', 'INSTALLED', 'OUTSIDE', 'A', "CUSTOMER'S", 'PREMISES', 'OR', 'IN', 'A', 'CIRCUIT', 'FOR', 'PUBLIC', 'STREET', 'LIGHTING', 'BURNED', 'SO', 'MANY', 'HOURS', 'NIGHTLY', 'SO', 'MANY', 'NIGHTS', 'IN', 'THE', 'MONTH', 'AND', 'WAS', 'PAID', 'FOR', 'AT', 'THAT', 'RATE', 'SUBJECT', 'TO', 'REBATE', 'FOR', 'HOURS', 'WHEN', 'THE', 'LAMP', 'MIGHT', 'BE', 'OUT', 'THROUGH', 'ACCIDENT'] +2300-131720-0027-1843: ref=['EDISON', 'HELD', 'THAT', 'THE', 'ELECTRICITY', 'SOLD', 'MUST', 'BE', 'MEASURED', 'JUST', 'LIKE', 'GAS', 'OR', 'WATER', 'AND', 'HE', 'PROCEEDED', 'TO', 'DEVELOP', 'A', 'METER'] +2300-131720-0027-1843: hyp=['EDISON', 'HELD', 'THAT', 'THE', 'ELECTRICITY', 'SOLD', 'MUST', 'BE', 'MEASURED', 'JUST', 'LIKE', 'GAS', 'OR', 'WATER', 'AND', 'HE', 'PROCEEDED', 'TO', 'DEVELOP', 'A', 'METER'] +2300-131720-0028-1844: ref=['THERE', 'WAS', 'INFINITE', 'SCEPTICISM', 'AROUND', 'HIM', 'ON', 'THE', 'SUBJECT', 'AND', 'WHILE', 'OTHER', 'INVENTORS', 'WERE', 'ALSO', 'GIVING', 'THE', 'SUBJECT', 'THEIR', 'THOUGHT', 'THE', 'PUBLIC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'ANYTHING', 'SO', 'UTTERLY', 'INTANGIBLE', 'AS', 'ELECTRICITY', 'THAT', 'COULD', 'NOT', 'BE', 'SEEN', 'OR', 'WEIGHED', 'AND', 'ONLY', 'GAVE', 'SECONDARY', 'EVIDENCE', 'OF', 'ITSELF', 'AT', 'THE', 'EXACT', 'POINT', 'OF', 'USE', 'COULD', 'NOT', 'BE', 'BROUGHT', 'TO', 'ACCURATE', 'REGISTRATION'] +2300-131720-0028-1844: hyp=['THERE', 'WAS', 'INFINITE', 'SCEPTICISM', 'AROUND', 'HIM', 'ON', 'THE', 'SUBJECT', 'AND', 'WHILE', 'OTHER', 'INVENTORS', 'WERE', 'ALSO', 'GIVING', 'THE', 'SUBJECT', 'THEIR', 'THOUGHT', 'THE', 'PUBLIC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'ANYTHING', 'SO', 'UTTERLY', 'INTANGIBLE', 'AS', 'ELECTRICITY', 'THAT', 'COULD', 'NOT', 'BE', 'SEEN', 'OR', 'WEIGHED', 'AND', 'ONLY', 'GAVE', 'SECONDARY', 'EVIDENCE', 'OF', 'ITSELF', 'AT', 'THE', 'EXACT', 'POINT', 'OF', 'USE', 'COULD', 'NOT', 'BE', 'BROUGHT', 'TO', 'ACCURATE', 'REGISTRATION'] +2300-131720-0029-1845: ref=['HENCE', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'NO', 'LONGER', 'USED', 'DESPITE', 'ITS', 'EXCELLENT', 'QUALITIES'] +2300-131720-0029-1845: hyp=['HENCE', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'NO', 'LONGER', 'USED', 'DESPITE', 'ITS', 'EXCELLENT', 'QUALITIES'] +2300-131720-0030-1846: ref=['THE', 'PRINCIPLE', 'EMPLOYED', 'IN', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'THAT', 'WHICH', 'EXEMPLIFIES', 'THE', 'POWER', 'OF', 'ELECTRICITY', 'TO', 'DECOMPOSE', 'A', 'CHEMICAL', 'SUBSTANCE'] +2300-131720-0030-1846: hyp=['THE', 'PRINCIPAL', 'EMPLOYED', 'IN', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'THAT', 'WHICH', 'EXEMPLIFIES', 'THE', 'POWER', 'OF', 'ELECTRICITY', 'TO', 'DECOMPOSE', 'A', 'CHEMICAL', 'SUBSTANCE'] +2300-131720-0031-1847: ref=['ASSOCIATED', 'WITH', 'THIS', 'SIMPLE', 'FORM', 'OF', 'APPARATUS', 'WERE', 'VARIOUS', 'INGENIOUS', 'DETAILS', 'AND', 'REFINEMENTS', 'TO', 'SECURE', 'REGULARITY', 'OF', 'OPERATION', 'FREEDOM', 'FROM', 'INACCURACY', 'AND', 'IMMUNITY', 'FROM', 'SUCH', 'TAMPERING', 'AS', 'WOULD', 'PERMIT', 'THEFT', 'OF', 'CURRENT', 'OR', 'DAMAGE'] +2300-131720-0031-1847: hyp=['ASSOCIATED', 'WITH', 'THIS', 'SIMPLE', 'FORM', 'OF', 'APPARATUS', 'WERE', 'VARIOUS', 'INGENIOUS', 'DETAILS', 'AND', 'REFINEMENTS', 'TO', 'SECURE', 'REGULARITY', 'OF', 'OPERATION', 'FREEDOM', 'FROM', 'INACCURACY', 'AND', 'IMMUNITY', 'FROM', 'SUCH', 'TAMPERING', 'AS', 'WOULD', 'PERMIT', 'THEFT', 'OF', 'CURRENT', 'OR', 'DAMAGE'] +2300-131720-0032-1848: ref=['THE', 'STANDARD', 'EDISON', 'METER', 'PRACTICE', 'WAS', 'TO', 'REMOVE', 'THE', 'CELLS', 'ONCE', 'A', 'MONTH', 'TO', 'THE', 'METER', 'ROOM', 'OF', 'THE', 'CENTRAL', 'STATION', 'COMPANY', 'FOR', 'EXAMINATION', 'ANOTHER', 'SET', 'BEING', 'SUBSTITUTED'] +2300-131720-0032-1848: hyp=['THE', 'STANDARD', 'EDISON', 'METER', 'PRACTICE', 'WAS', 'TO', 'REMOVE', 'THE', 'CELLS', 'ONCE', 'A', 'MONTH', 'TO', 'THE', 'METER', 'ROOM', 'OF', 'THE', 'CENTRAL', 'STATION', 'COMPANY', 'FOR', 'EXAMINATION', 'ANOTHER', 'SET', 'BEING', 'SUBSTITUTED'] +2300-131720-0033-1849: ref=['IN', 'DECEMBER', 'EIGHTEEN', 'EIGHTY', 'EIGHT', 'MISTER', 'W', 'J', 'JENKS', 'READ', 'AN', 'INTERESTING', 'PAPER', 'BEFORE', 'THE', 'AMERICAN', 'INSTITUTE', 'OF', 'ELECTRICAL', 'ENGINEERS', 'ON', 'THE', 'SIX', 'YEARS', 'OF', 'PRACTICAL', 'EXPERIENCE', 'HAD', 'UP', 'TO', 'THAT', 'TIME', 'WITH', 'THE', 'METER', 'THEN', 'MORE', 'GENERALLY', 'IN', 'USE', 'THAN', 'ANY', 'OTHER'] +2300-131720-0033-1849: hyp=['IN', 'DECEMBER', 'EIGHTEEN', 'EIGHTY', 'EIGHT', 'MISTER', 'W', 'J', 'JENKS', 'READ', 'AN', 'INTERESTING', 'PAPER', 'BEFORE', 'THE', 'AMERICAN', 'INSTITUTE', 'OF', 'ELECTRICAL', 'ENGINEERS', 'ON', 'THE', 'SIX', 'YEARS', 'OF', 'PRACTICAL', 'EXPERIENCE', 'HAD', 'UP', 'TO', 'THAT', 'TIME', 'WITH', 'THE', 'METRE', 'THEN', 'MORE', 'GENERALLY', 'IN', 'USE', 'THAN', 'ANY', 'OTHER'] +2300-131720-0034-1850: ref=['THE', 'OTHERS', 'HAVING', 'BEEN', 'IN', 'OPERATION', 'TOO', 'SHORT', 'A', 'TIME', 'TO', 'SHOW', 'DEFINITE', 'RESULTS', 'ALTHOUGH', 'THEY', 'ALSO', 'WENT', 'QUICKLY', 'TO', 'A', 'DIVIDEND', 'BASIS'] +2300-131720-0034-1850: hyp=['THE', 'OTHERS', 'HAVING', 'BEEN', 'IN', 'OPERATION', 'TOO', 'SHORT', 'A', 'TIME', 'TO', 'SHOW', 'DEFINITE', 'RESULTS', 'ALTHOUGH', 'THEY', 'ALSO', 'WENT', 'QUICKLY', 'TO', 'A', 'DIVIDEND', 'BASIS'] +2300-131720-0035-1851: ref=['IN', 'THIS', 'CONNECTION', 'IT', 'SHOULD', 'BE', 'MENTIONED', 'THAT', 'THE', 'ASSOCIATION', 'OF', 'EDISON', 'ILLUMINATING', 'COMPANIES', 'IN', 'THE', 'SAME', 'YEAR', 'ADOPTED', 'RESOLUTIONS', 'UNANIMOUSLY', 'TO', 'THE', 'EFFECT', 'THAT', 'THE', 'EDISON', 'METER', 'WAS', 'ACCURATE', 'AND', 'THAT', 'ITS', 'USE', 'WAS', 'NOT', 'EXPENSIVE', 'FOR', 'STATIONS', 'ABOVE', 'ONE', 'THOUSAND', 'LIGHTS', 'AND', 'THAT', 'THE', 'BEST', 'FINANCIAL', 'RESULTS', 'WERE', 'INVARIABLY', 'SECURED', 'IN', 'A', 'STATION', 'SELLING', 'CURRENT', 'BY', 'METER'] +2300-131720-0035-1851: hyp=['IN', 'THIS', 'CONNECTION', 'IT', 'SHOULD', 'BE', 'MENTIONED', 'THAT', 'THE', 'ASSOCIATION', 'OF', 'EDISON', 'ILLUMINATING', 'COMPANIES', 'IN', 'THE', 'SAME', 'YEAR', 'ADOPTED', 'RESOLUTIONS', 'UNANIMOUSLY', 'TO', 'THE', 'EFFECT', 'THAT', 'THE', 'EDISON', 'METER', 'WAS', 'ACCURATE', 'AND', 'THAT', 'ITS', 'USE', 'WAS', 'NOT', 'EXPENSIVE', 'FOR', 'STATIONS', 'ABOVE', 'ONE', 'THOUSAND', 'LIGHTS', 'AND', 'THAT', 'THE', 'BEST', 'FINANCIAL', 'RESULTS', 'WERE', 'INVARIABLY', 'SECURED', 'IN', 'A', 'STATION', 'SELLING', 'CURRENT', 'BY', 'METRE'] +2300-131720-0036-1852: ref=['THE', 'METER', 'CONTINUED', 'IN', 'GENERAL', 'SERVICE', 'DURING', 'EIGHTEEN', 'NINETY', 'NINE', 'AND', 'PROBABLY', 'UP', 'TO', 'THE', 'CLOSE', 'OF', 'THE', 'CENTURY'] +2300-131720-0036-1852: hyp=['THE', 'METRE', 'CONTINUED', 'IN', 'GENERAL', 'SERVICE', 'DURING', 'EIGHTEEN', 'NINETY', 'NINE', 'AND', 'PROBABLY', 'UP', 'TO', 'THE', 'CLOSE', 'OF', 'THE', 'CENTURY'] +2300-131720-0037-1853: ref=['HE', 'WEIGHED', 'AND', 'REWEIGHED', 'THE', 'METER', 'PLATES', 'AND', 'PURSUED', 'EVERY', 'LINE', 'OF', 'INVESTIGATION', 'IMAGINABLE', 'BUT', 'ALL', 'IN', 'VAIN'] +2300-131720-0037-1853: hyp=['HE', 'WEIGHED', 'AND', 'REWAIED', 'THE', 'METRE', 'PLATES', 'AND', 'PURSUED', 'EVERY', 'LINE', 'OF', 'INVESTIGATION', 'IMAGINABLE', 'BUT', 'ALL', 'IN', 'VAIN'] +2300-131720-0038-1854: ref=['HE', 'FELT', 'HE', 'WAS', 'UP', 'AGAINST', 'IT', 'AND', 'THAT', 'PERHAPS', 'ANOTHER', 'KIND', 'OF', 'A', 'JOB', 'WOULD', 'SUIT', 'HIM', 'BETTER'] +2300-131720-0038-1854: hyp=['HE', 'FELT', 'HE', 'WAS', 'UP', 'AGAINST', 'IT', 'AND', 'THAT', 'PERHAPS', 'ANOTHER', 'KIND', 'OF', 'A', 'JOB', 'WOULD', 'SUIT', 'HIM', 'BETTER'] +2300-131720-0039-1855: ref=['THE', 'PROBLEM', 'WAS', 'SOLVED'] +2300-131720-0039-1855: hyp=['THE', 'PROBLEM', 'WAS', 'SOBBED'] +2300-131720-0040-1856: ref=['WE', 'WERE', 'MORE', 'INTERESTED', 'IN', 'THE', 'TECHNICAL', 'CONDITION', 'OF', 'THE', 'STATION', 'THAN', 'IN', 'THE', 'COMMERCIAL', 'PART'] +2300-131720-0040-1856: hyp=['WE', 'WERE', 'MORE', 'INTERESTED', 'IN', 'THE', 'TECHNICAL', 'CONDITION', 'OF', 'THE', 'STATION', 'THAN', 'IN', 'THE', 'COMMERCIAL', 'PART'] +2300-131720-0041-1857: ref=['WE', 'HAD', 'METERS', 'IN', 'WHICH', 'THERE', 'WERE', 'TWO', 'BOTTLES', 'OF', 'LIQUID'] +2300-131720-0041-1857: hyp=['WE', 'HAD', 'METRES', 'IN', 'WHICH', 'THERE', 'WERE', 'TWO', 'BOTTLES', 'OF', 'LIQUID'] +237-126133-0000-2407: ref=['HERE', 'SHE', 'WOULD', 'STAY', 'COMFORTED', 'AND', 'SOOTHED', 'AMONG', 'THE', 'LOVELY', 'PLANTS', 'AND', 'RICH', 'EXOTICS', 'REJOICING', 'THE', 'HEART', 'OF', 'OLD', 'TURNER', 'THE', 'GARDENER', 'WHO', 'SINCE', "POLLY'S", 'FIRST', 'RAPTUROUS', 'ENTRANCE', 'HAD', 'TAKEN', 'HER', 'INTO', 'HIS', 'GOOD', 'GRACES', 'FOR', 'ALL', 'TIME'] +237-126133-0000-2407: hyp=['HERE', 'SHE', 'WOULD', 'STAY', 'COMFORTED', 'AND', 'SOOTHE', 'AMONG', 'THE', 'LOVELY', 'PLANTS', 'AND', 'RICH', 'EXOTICS', 'REJOICING', 'THE', 'HEART', 'OF', 'OLD', 'TURNER', 'THE', 'GARDENER', 'WHO', 'SINCE', "POLLY'S", 'FIRST', 'RAPTUROUS', 'ENTRANCE', 'HAD', 'TAKEN', 'HER', 'INTO', 'HIS', 'GOOD', 'GRACES', 'FOR', 'ALL', 'TIME'] +237-126133-0001-2408: ref=['EVERY', 'CHANCE', 'SHE', 'COULD', 'STEAL', 'AFTER', 'PRACTICE', 'HOURS', 'WERE', 'OVER', 'AND', 'AFTER', 'THE', 'CLAMOROUS', 'DEMANDS', 'OF', 'THE', 'BOYS', 'UPON', 'HER', 'TIME', 'WERE', 'FULLY', 'SATISFIED', 'WAS', 'SEIZED', 'TO', 'FLY', 'ON', 'THE', 'WINGS', 'OF', 'THE', 'WIND', 'TO', 'THE', 'FLOWERS'] +237-126133-0001-2408: hyp=['EVERY', 'CHANCE', 'SHE', 'COULD', 'STEAL', 'AFTER', 'PRACTICE', 'HOURS', 'WERE', 'OVER', 'AND', 'AFTER', 'THE', 'CLAMOROUS', 'DEMANDS', 'OF', 'THE', 'BOYS', 'UPON', 'HER', 'TIME', 'WERE', 'FULLY', 'SATISFIED', 'WAS', 'SEIZED', 'TO', 'FLY', 'ON', 'THE', 'WINGS', 'OF', 'THE', 'WIND', 'TO', 'THE', 'FLOWERS'] +237-126133-0002-2409: ref=['THEN', 'DEAR', 'SAID', 'MISSUS', 'WHITNEY', 'YOU', 'MUST', 'BE', 'KINDER', 'TO', 'HER', 'THAN', 'EVER', 'THINK', 'WHAT', 'IT', 'WOULD', 'BE', 'FOR', 'ONE', 'OF', 'YOU', 'TO', 'BE', 'AWAY', 'FROM', 'HOME', 'EVEN', 'AMONG', 'FRIENDS'] +237-126133-0002-2409: hyp=['THEN', 'DEAR', 'SAID', 'MISSUS', 'WHITNEY', 'YOU', 'MUST', 'BE', 'KINDER', 'TO', 'HER', 'THAN', 'EVER', 'THINK', 'WHAT', 'IT', 'WOULD', 'BE', 'FOR', 'ONE', 'OF', 'YOU', 'TO', 'BE', 'AWAY', 'FROM', 'HOME', 'EVEN', 'AMONG', 'FRIENDS'] +237-126133-0003-2410: ref=['SOMEHOW', 'OF', 'ALL', 'THE', 'DAYS', 'WHEN', 'THE', 'HOME', 'FEELING', 'WAS', 'THE', 'STRONGEST', 'THIS', 'DAY', 'IT', 'SEEMED', 'AS', 'IF', 'SHE', 'COULD', 'BEAR', 'IT', 'NO', 'LONGER'] +237-126133-0003-2410: hyp=['SOMEHOW', 'OF', 'ALL', 'THE', 'DAYS', 'WHEN', 'THE', 'HOME', 'FEELING', 'WAS', 'THE', 'STRONGEST', 'THIS', 'DAY', 'IT', 'SEEMED', 'AS', 'IF', 'SHE', 'COULD', 'BEAR', 'IT', 'NO', 'LONGER'] +237-126133-0004-2411: ref=['IF', 'SHE', 'COULD', 'ONLY', 'SEE', 'PHRONSIE', 'FOR', 'JUST', 'ONE', 'MOMENT'] +237-126133-0004-2411: hyp=['IF', 'SHE', 'COULD', 'ONLY', 'SEE', 'PHRONSIE', 'FOR', 'JUST', 'ONE', 'MOMENT'] +237-126133-0005-2412: ref=['OH', "SHE'S", 'ALWAYS', 'AT', 'THE', 'PIANO', 'SAID', 'VAN', 'SHE', 'MUST', 'BE', 'THERE', 'NOW', 'SOMEWHERE', 'AND', 'THEN', 'SOMEBODY', 'LAUGHED'] +237-126133-0005-2412: hyp=['OH', "SHE'S", 'ALWAYS', 'AT', 'THE', 'PIANO', 'SAID', 'VAN', 'SHE', 'MUST', 'BE', 'THERE', 'NOW', 'SOMEWHERE', 'AND', 'THEN', 'SOMEBODY', 'LAUGHED'] +237-126133-0006-2413: ref=['AT', 'THIS', 'THE', 'BUNDLE', 'OPENED', 'SUDDENLY', 'AND', 'OUT', 'POPPED', 'PHRONSIE'] +237-126133-0006-2413: hyp=['AT', 'THIS', 'THE', 'BUNDLE', 'OPENED', 'SUDDENLY', 'AND', 'OUT', 'POPPED', 'PHRONSIE'] +237-126133-0007-2414: ref=['BUT', 'POLLY', "COULDN'T", 'SPEAK', 'AND', 'IF', 'JASPER', "HADN'T", 'CAUGHT', 'HER', 'JUST', 'IN', 'TIME', 'SHE', 'WOULD', 'HAVE', 'TUMBLED', 'OVER', 'BACKWARD', 'FROM', 'THE', 'STOOL', 'PHRONSIE', 'AND', 'ALL'] +237-126133-0007-2414: hyp=['BUT', 'POLLY', "COULDN'T", 'SPEAK', 'AND', 'IF', 'JASPER', "HADN'T", 'CAUGHT', 'HER', 'JUST', 'IN', 'TIME', 'SHE', 'WOULD', 'HAVE', 'TUMBLED', 'OVER', 'BACKWARD', 'FROM', 'THE', 'STOOL', 'PHRONSIE', 'AND', 'ALL'] +237-126133-0008-2415: ref=['ASKED', 'PHRONSIE', 'WITH', 'HER', 'LITTLE', 'FACE', 'CLOSE', 'TO', "POLLY'S", 'OWN'] +237-126133-0008-2415: hyp=['ASKED', 'PHRONSIE', 'WITH', 'HER', 'LITTLE', 'FACE', 'CLOSE', 'TO', "POLLY'S", 'OWN'] +237-126133-0009-2416: ref=['NOW', "YOU'LL", 'STAY', 'CRIED', 'VAN', 'SAY', 'POLLY', "WON'T", 'YOU'] +237-126133-0009-2416: hyp=['NOW', "YOU'LL", 'STAY', 'CRIED', 'VAN', 'SAY', 'POLLY', "WON'T", 'YOU'] +237-126133-0010-2417: ref=['OH', 'YOU', 'ARE', 'THE', 'DEAREST', 'AND', 'BEST', 'MISTER', 'KING', 'I', 'EVER', 'SAW', 'BUT', 'HOW', 'DID', 'YOU', 'MAKE', 'MAMMY', 'LET', 'HER', 'COME'] +237-126133-0010-2417: hyp=['OH', 'YOU', 'ARE', 'THE', 'DEAREST', 'AND', 'BEST', 'MISTER', 'KING', 'I', 'EVER', 'SAW', 'BUT', 'HOW', 'DID', 'YOU', 'MAKE', 'MAMMY', 'LET', 'HER', 'COME'] +237-126133-0011-2418: ref=["ISN'T", 'HE', 'SPLENDID', 'CRIED', 'JASPER', 'IN', 'INTENSE', 'PRIDE', 'SWELLING', 'UP', 'FATHER', 'KNEW', 'HOW', 'TO', 'DO', 'IT'] +237-126133-0011-2418: hyp=["ISN'T", 'HE', 'SPLENDID', 'CRIED', 'JASPER', 'AN', 'INTENSE', 'PRIDE', 'SWELLING', 'UP', 'FATHER', 'KNEW', 'HOW', 'TO', 'DO', 'IT'] +237-126133-0012-2419: ref=['THERE', 'THERE', 'HE', 'SAID', 'SOOTHINGLY', 'PATTING', 'HER', 'BROWN', 'FUZZY', 'HEAD'] +237-126133-0012-2419: hyp=['THERE', 'THERE', 'HE', 'SAID', 'SOOTHINGLY', 'PATTING', 'HER', 'BROWN', 'FUZZY', 'HEAD'] +237-126133-0013-2420: ref=['I', 'KNOW', 'GASPED', 'POLLY', 'CONTROLLING', 'HER', 'SOBS', 'I', "WON'T", 'ONLY', 'I', "CAN'T", 'THANK', 'YOU'] +237-126133-0013-2420: hyp=['I', 'KNOW', 'GASPED', 'POLLY', 'CONTROLLING', 'HER', 'SOBS', 'I', "WON'T", 'ONLY', 'I', "CAN'T", 'THANK', 'YOU'] +237-126133-0014-2421: ref=['ASKED', 'PHRONSIE', 'IN', 'INTENSE', 'INTEREST', 'SLIPPING', 'DOWN', 'OUT', 'OF', "POLLY'S", 'ARMS', 'AND', 'CROWDING', 'UP', 'CLOSE', 'TO', "JASPER'S", 'SIDE'] +237-126133-0014-2421: hyp=['ASKED', 'PHRONSIE', 'IN', 'INTENSE', 'INTEREST', 'SLIPPING', 'DOWN', 'OUT', 'OF', "POLLY'S", 'ARMS', 'AND', 'CROWDING', 'UP', 'CLOSE', 'TO', "JASPER'S", 'SIDE'] +237-126133-0015-2422: ref=['YES', 'ALL', 'ALONE', 'BY', 'HIMSELF', 'ASSERTED', 'JASPER', 'VEHEMENTLY', 'AND', 'WINKING', 'FURIOUSLY', 'TO', 'THE', 'OTHERS', 'TO', 'STOP', 'THEIR', 'LAUGHING', 'HE', 'DID', 'NOW', 'TRULY', 'PHRONSIE'] +237-126133-0015-2422: hyp=['YES', 'ALL', 'ALONE', 'BY', 'HIMSELF', 'ASSERTED', 'JASPER', 'VEHEMENTLY', 'AND', 'WINKING', 'FURIOUSLY', 'TO', 'THE', 'OTHERS', 'TO', 'STOP', 'THEIR', 'LAUGHING', 'HE', 'DID', 'NOW', 'TRULY', 'PHRONSIE'] +237-126133-0016-2423: ref=['OH', 'NO', 'JASPER', 'I', 'MUST', 'GO', 'BY', 'MY', 'VERY', 'OWN', 'SELF'] +237-126133-0016-2423: hyp=['OH', 'NO', 'JAPSER', 'I', 'MUST', 'GO', 'BY', 'MY', 'VERY', 'OWN', 'SELF'] +237-126133-0017-2424: ref=['THERE', 'JAP', "YOU'VE", 'CAUGHT', 'IT', 'LAUGHED', 'PERCY', 'WHILE', 'THE', 'OTHERS', 'SCREAMED', 'AT', 'THE', 'SIGHT', 'OF', "JASPER'S", 'FACE'] +237-126133-0017-2424: hyp=['THERE', 'JAP', "YOU'VE", 'CAUGHT', 'IT', 'LAUGHED', 'PERCY', 'WHILE', 'THE', 'OTHERS', 'SCREAMED', 'AT', 'THE', 'SIGHT', 'OF', "JASPER'S", 'FACE'] +237-126133-0018-2425: ref=["DON'T", 'MIND', 'IT', 'POLLY', 'WHISPERED', 'JASPER', "TWASN'T", 'HER', 'FAULT'] +237-126133-0018-2425: hyp=["DON'T", 'MIND', 'IT', 'POLLY', 'WHISPERED', 'JASPER', "TWASN'T", 'HER', 'FAULT'] +237-126133-0019-2426: ref=['DEAR', 'ME', 'EJACULATED', 'THE', 'OLD', 'GENTLEMAN', 'IN', 'THE', 'UTMOST', 'AMAZEMENT', 'AND', 'SUCH', 'A', 'TIME', 'AS', "I'VE", 'HAD', 'TO', 'GET', 'HER', 'HERE', 'TOO'] +237-126133-0019-2426: hyp=['DEAR', 'ME', 'EJACULATED', 'THE', 'OLD', 'GENTLEMAN', 'IN', 'THE', 'UTMOST', 'AMAZEMENT', 'AND', 'SUCH', 'A', 'TIME', 'AS', "I'VE", 'HAD', 'TO', 'GET', 'HER', 'HERE', 'TOO'] +237-126133-0020-2427: ref=['HOW', 'DID', 'HER', 'MOTHER', 'EVER', 'LET', 'HER', 'GO'] +237-126133-0020-2427: hyp=['HOW', 'DID', 'HER', 'MOTHER', 'EVER', 'LET', 'HER', 'GO'] +237-126133-0021-2428: ref=['SHE', 'ASKED', 'IMPULSIVELY', 'I', "DIDN'T", 'BELIEVE', 'YOU', 'COULD', 'PERSUADE', 'HER', 'FATHER'] +237-126133-0021-2428: hyp=['SHE', 'ASKED', 'IMPULSIVELY', 'I', "DIDN'T", 'BELIEVE', 'YOU', 'COULD', 'PERSUADE', 'HER', 'FATHER'] +237-126133-0022-2429: ref=['I', "DIDN'T", 'HAVE', 'ANY', 'FEARS', 'IF', 'I', 'WORKED', 'IT', 'RIGHTLY', 'SAID', 'THE', 'OLD', 'GENTLEMAN', 'COMPLACENTLY'] +237-126133-0022-2429: hyp=['I', "DIDN'T", 'HAVE', 'ANY', 'FEARS', 'IF', 'I', 'WORKED', 'IT', 'RIGHTLY', 'SAID', 'THE', 'OLD', 'GENTLEMAN', 'COMPLACENTLY'] +237-126133-0023-2430: ref=['HE', 'CRIED', 'IN', 'HIGH', 'DUDGEON', 'JUST', 'AS', 'IF', 'HE', 'OWNED', 'THE', 'WHOLE', 'OF', 'THE', 'PEPPERS', 'AND', 'COULD', 'DISPOSE', 'OF', 'THEM', 'ALL', 'TO', 'SUIT', 'HIS', 'FANCY'] +237-126133-0023-2430: hyp=['HE', 'CRIED', 'IN', 'HIGH', 'DUDGEON', 'JUST', 'AS', 'IF', 'HE', 'OWNED', 'THE', 'WHOLE', 'OF', 'THE', 'PEPPERS', 'AND', 'COULD', 'DISPOSE', 'OF', 'THEM', 'ALL', 'TO', 'SUIT', 'HIS', 'FANCY'] +237-126133-0024-2431: ref=['AND', 'THE', 'OLD', 'GENTLEMAN', 'WAS', 'SO', 'DELIGHTED', 'WITH', 'HIS', 'SUCCESS', 'THAT', 'HE', 'HAD', 'TO', 'BURST', 'OUT', 'INTO', 'A', 'SERIES', 'OF', 'SHORT', 'HAPPY', 'BITS', 'OF', 'LAUGHTER', 'THAT', 'OCCUPIED', 'QUITE', 'A', 'SPACE', 'OF', 'TIME'] +237-126133-0024-2431: hyp=['AND', 'THE', 'OLD', 'GENTLEMAN', 'WAS', 'SO', 'DELIGHTED', 'WITH', 'HIS', 'SUCCESS', 'THAT', 'HE', 'HAD', 'TO', 'BURST', 'OUT', 'INTO', 'A', 'SERIES', 'OF', 'SHORT', 'HAPPY', 'BITS', 'OF', 'LAUGHTER', 'THAT', 'OCCUPIED', 'QUITE', 'A', 'SPACE', 'OF', 'TIME'] +237-126133-0025-2432: ref=['AT', 'LAST', 'HE', 'CAME', 'OUT', 'OF', 'THEM', 'AND', 'WIPED', 'HIS', 'FACE', 'VIGOROUSLY'] +237-126133-0025-2432: hyp=['AT', 'LAST', 'HE', 'CAME', 'OUT', 'OF', 'THEM', 'AND', 'WIPED', 'HIS', 'FACE', 'VIGOROUSLY'] +237-134493-0000-2388: ref=['IT', 'IS', 'SIXTEEN', 'YEARS', 'SINCE', 'JOHN', 'BERGSON', 'DIED'] +237-134493-0000-2388: hyp=['IT', 'IS', 'SIXTEEN', 'YEARS', 'SINCE', 'JOHN', 'BERKSON', 'DIED'] +237-134493-0001-2389: ref=['HIS', 'WIFE', 'NOW', 'LIES', 'BESIDE', 'HIM', 'AND', 'THE', 'WHITE', 'SHAFT', 'THAT', 'MARKS', 'THEIR', 'GRAVES', 'GLEAMS', 'ACROSS', 'THE', 'WHEAT', 'FIELDS'] +237-134493-0001-2389: hyp=['HIS', 'WIFE', 'NOW', 'LIES', 'BESIDE', 'HIM', 'AND', 'THE', 'WHITE', 'SHAFT', 'THAT', 'MARKS', 'THEIR', 'GRAVES', 'GLEAMS', 'ACROSS', 'THE', 'WHEAT', 'FIELDS'] +237-134493-0002-2390: ref=['FROM', 'THE', 'NORWEGIAN', 'GRAVEYARD', 'ONE', 'LOOKS', 'OUT', 'OVER', 'A', 'VAST', 'CHECKER', 'BOARD', 'MARKED', 'OFF', 'IN', 'SQUARES', 'OF', 'WHEAT', 'AND', 'CORN', 'LIGHT', 'AND', 'DARK', 'DARK', 'AND', 'LIGHT'] +237-134493-0002-2390: hyp=['FROM', 'THE', 'NORWEGIAN', 'GRAVEYARD', 'ONE', 'LOOKS', 'OUT', 'OVER', 'A', 'VAST', 'CHEQUER', 'BOARD', 'MARKED', 'OFF', 'IN', 'SQUARES', 'OF', 'WHEAT', 'AND', 'CORN', 'LIGHT', 'AND', 'DARK', 'AND', 'LIGHT'] +237-134493-0003-2391: ref=['FROM', 'THE', 'GRAVEYARD', 'GATE', 'ONE', 'CAN', 'COUNT', 'A', 'DOZEN', 'GAYLY', 'PAINTED', 'FARMHOUSES', 'THE', 'GILDED', 'WEATHER', 'VANES', 'ON', 'THE', 'BIG', 'RED', 'BARNS', 'WINK', 'AT', 'EACH', 'OTHER', 'ACROSS', 'THE', 'GREEN', 'AND', 'BROWN', 'AND', 'YELLOW', 'FIELDS'] +237-134493-0003-2391: hyp=['FROM', 'THE', 'GRAVEYARD', 'GATE', 'ONE', 'CAN', 'COUNT', 'A', 'DOZEN', 'GAILY', 'PAINTED', 'FARM', 'HOUSES', 'THE', 'GILDED', 'WEATHER', 'VEINS', 'ON', 'THE', 'BIG', 'RED', 'BARNS', 'WINK', 'AT', 'EACH', 'OTHER', 'ACROSS', 'THE', 'GREEN', 'AND', 'BROWN', 'AND', 'YELLOW', 'FIELDS'] +237-134493-0004-2392: ref=['THE', 'AIR', 'AND', 'THE', 'EARTH', 'ARE', 'CURIOUSLY', 'MATED', 'AND', 'INTERMINGLED', 'AS', 'IF', 'THE', 'ONE', 'WERE', 'THE', 'BREATH', 'OF', 'THE', 'OTHER'] +237-134493-0004-2392: hyp=['THE', 'AIR', 'AND', 'THE', 'EARTH', 'ARE', 'CURIOUSLY', 'MATED', 'AND', 'INTERMINGLED', 'AS', 'IF', 'THE', 'ONE', 'WERE', 'THE', 'BREATH', 'OF', 'THE', 'OTHER'] +237-134493-0005-2393: ref=['HE', 'WAS', 'A', 'SPLENDID', 'FIGURE', 'OF', 'A', 'BOY', 'TALL', 'AND', 'STRAIGHT', 'AS', 'A', 'YOUNG', 'PINE', 'TREE', 'WITH', 'A', 'HANDSOME', 'HEAD', 'AND', 'STORMY', 'GRAY', 'EYES', 'DEEPLY', 'SET', 'UNDER', 'A', 'SERIOUS', 'BROW'] +237-134493-0005-2393: hyp=['HE', 'WAS', 'A', 'SPLENDID', 'FIGURE', 'OF', 'A', 'BOY', 'TALL', 'AND', 'STRAIGHT', 'AS', 'A', 'YOUNG', 'PINE', 'TREE', 'WITH', 'A', 'HANDSOME', 'HEAD', 'AND', 'STORMY', 'GREY', 'EYES', 'DEEPLY', 'SET', 'UNDER', 'A', 'SERIOUS', 'BROW'] +237-134493-0006-2394: ref=["THAT'S", 'NOT', 'MUCH', 'OF', 'A', 'JOB', 'FOR', 'AN', 'ATHLETE', 'HERE', "I'VE", 'BEEN', 'TO', 'TOWN', 'AND', 'BACK'] +237-134493-0006-2394: hyp=["THAT'S", 'NOT', 'MUCH', 'OF', 'A', 'JOB', 'FOR', 'AN', 'ATHLETE', 'HERE', "I'VE", 'BEEN', 'TO', 'TOWN', 'AND', 'BACK'] +237-134493-0007-2395: ref=['ALEXANDRA', 'LETS', 'YOU', 'SLEEP', 'LATE'] +237-134493-0007-2395: hyp=['ALEXANDER', "THAT'S", 'YOU', 'SLEEP', 'LATE'] +237-134493-0008-2396: ref=['SHE', 'GATHERED', 'UP', 'HER', 'REINS'] +237-134493-0008-2396: hyp=['SHE', 'GATHERED', 'UP', 'HER', 'REINS'] +237-134493-0009-2397: ref=['PLEASE', 'WAIT', 'FOR', 'ME', 'MARIE', 'EMIL', 'COAXED'] +237-134493-0009-2397: hyp=['PLEASE', 'WAIT', 'FOR', 'ME', 'MARIE', 'AMYL', 'COAXED'] +237-134493-0010-2398: ref=['I', 'NEVER', 'SEE', "LOU'S", 'SCYTHE', 'OVER', 'HERE'] +237-134493-0010-2398: hyp=['I', 'NEVER', 'SEE', 'LOOSE', 'SCYTHE', 'OVER', 'HERE'] +237-134493-0011-2399: ref=['HOW', 'BROWN', "YOU'VE", 'GOT', 'SINCE', 'YOU', 'CAME', 'HOME', 'I', 'WISH', 'I', 'HAD', 'AN', 'ATHLETE', 'TO', 'MOW', 'MY', 'ORCHARD'] +237-134493-0011-2399: hyp=['HOW', 'BROWN', "YOU'VE", 'GOT', 'SINCE', 'YOU', 'CAME', 'HOME', 'I', 'WISH', 'I', 'HAD', 'AN', 'ADETE', 'TO', 'MOW', 'MY', 'ORCHARD'] +237-134493-0012-2400: ref=['I', 'GET', 'WET', 'TO', 'MY', 'KNEES', 'WHEN', 'I', 'GO', 'DOWN', 'TO', 'PICK', 'CHERRIES'] +237-134493-0012-2400: hyp=['I', 'GET', 'WET', 'TO', 'MY', 'KNEES', 'WHEN', 'I', 'GO', 'DOWN', 'TO', 'PIC', 'CHERRIES'] +237-134493-0013-2401: ref=['INDEED', 'HE', 'HAD', 'LOOKED', 'AWAY', 'WITH', 'THE', 'PURPOSE', 'OF', 'NOT', 'SEEING', 'IT'] +237-134493-0013-2401: hyp=['INDEED', 'HE', 'HAD', 'LOOKED', 'AWAY', 'WITH', 'THE', 'PURPOSE', 'OF', 'NOT', 'SEEING', 'IT'] +237-134493-0014-2402: ref=['THEY', 'THINK', "YOU'RE", 'PROUD', 'BECAUSE', "YOU'VE", 'BEEN', 'AWAY', 'TO', 'SCHOOL', 'OR', 'SOMETHING'] +237-134493-0014-2402: hyp=['THEY', 'THINK', 'YOU', 'ARE', 'PROUD', 'BECAUSE', "YOU'VE", 'BEEN', 'AWAY', 'TO', 'SCHOOL', 'OR', 'SOMETHING'] +237-134493-0015-2403: ref=['THERE', 'WAS', 'SOMETHING', 'INDIVIDUAL', 'ABOUT', 'THE', 'GREAT', 'FARM', 'A', 'MOST', 'UNUSUAL', 'TRIMNESS', 'AND', 'CARE', 'FOR', 'DETAIL'] +237-134493-0015-2403: hyp=['THERE', 'WAS', 'SOMETHING', 'INDIVIDUAL', 'ABOUT', 'THE', 'GREAT', 'FARM', 'A', 'MOST', 'UNUSUAL', 'TRIMNESS', 'AND', 'CARE', 'FOR', 'DETAIL'] +237-134493-0016-2404: ref=['ON', 'EITHER', 'SIDE', 'OF', 'THE', 'ROAD', 'FOR', 'A', 'MILE', 'BEFORE', 'YOU', 'REACHED', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'STOOD', 'TALL', 'OSAGE', 'ORANGE', 'HEDGES', 'THEIR', 'GLOSSY', 'GREEN', 'MARKING', 'OFF', 'THE', 'YELLOW', 'FIELDS'] +237-134493-0016-2404: hyp=['ON', 'EITHER', 'SIDE', 'OF', 'THE', 'ROAD', 'FOR', 'A', 'MILE', 'BEFORE', 'YOU', 'REACHED', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'STOOD', 'TALL', 'O', 'SAGE', 'ORANGES', 'THEIR', 'GLOSSY', 'GREEN', 'MARKING', 'OFF', 'THE', 'YELLOW', 'FIELDS'] +237-134493-0017-2405: ref=['ANY', 'ONE', 'THEREABOUTS', 'WOULD', 'HAVE', 'TOLD', 'YOU', 'THAT', 'THIS', 'WAS', 'ONE', 'OF', 'THE', 'RICHEST', 'FARMS', 'ON', 'THE', 'DIVIDE', 'AND', 'THAT', 'THE', 'FARMER', 'WAS', 'A', 'WOMAN', 'ALEXANDRA', 'BERGSON'] +237-134493-0017-2405: hyp=['ANY', 'ONE', 'THEREABOUTS', 'WOULD', 'HAVE', 'TOLD', 'YOU', 'THAT', 'THIS', 'WAS', 'ONE', 'OF', 'THE', 'RICHEST', 'FARMS', 'ON', 'THE', 'DIVIDE', 'AND', 'THAT', 'THE', 'FARMER', 'WAS', 'A', 'WOMAN', 'ALEXANDRA', 'BERGIN'] +237-134493-0018-2406: ref=['THERE', 'IS', 'EVEN', 'A', 'WHITE', 'ROW', 'OF', 'BEEHIVES', 'IN', 'THE', 'ORCHARD', 'UNDER', 'THE', 'WALNUT', 'TREES'] +237-134493-0018-2406: hyp=['THERE', 'IS', 'EVEN', 'A', 'WHITE', 'ROW', 'OF', 'BEE', 'HIVES', 'IN', 'THE', 'ORCHARD', 'UNDER', 'THE', 'WALNUT', 'TREES'] +237-134500-0000-2345: ref=['FRANK', 'READ', 'ENGLISH', 'SLOWLY', 'AND', 'THE', 'MORE', 'HE', 'READ', 'ABOUT', 'THIS', 'DIVORCE', 'CASE', 'THE', 'ANGRIER', 'HE', 'GREW'] +237-134500-0000-2345: hyp=['FRANK', 'READ', 'ENGLISH', 'SLOWLY', 'AND', 'THE', 'MORE', 'HE', 'READ', 'ABOUT', 'THIS', 'DIVORCE', 'CASE', 'THE', 'ANGRIER', 'HE', 'GREW'] +237-134500-0001-2346: ref=['MARIE', 'SIGHED'] +237-134500-0001-2346: hyp=['MARIE', 'SIGHED'] +237-134500-0002-2347: ref=['A', 'BRISK', 'WIND', 'HAD', 'COME', 'UP', 'AND', 'WAS', 'DRIVING', 'PUFFY', 'WHITE', 'CLOUDS', 'ACROSS', 'THE', 'SKY'] +237-134500-0002-2347: hyp=['A', 'BRACE', 'WIND', 'HAD', 'COME', 'UP', 'AND', 'WAS', 'DRIVING', 'PUFFY', 'WHITE', 'CLOUDS', 'ACROSS', 'THE', 'SKY'] +237-134500-0003-2348: ref=['THE', 'ORCHARD', 'WAS', 'SPARKLING', 'AND', 'RIPPLING', 'IN', 'THE', 'SUN'] +237-134500-0003-2348: hyp=['THE', 'ARCHWOOD', 'WAS', 'SPARKLING', 'AND', 'RIPPLING', 'IN', 'THE', 'SUN'] +237-134500-0004-2349: ref=['THAT', 'INVITATION', 'DECIDED', 'HER'] +237-134500-0004-2349: hyp=['THAT', 'INVITATION', 'DECIDED', 'HER'] +237-134500-0005-2350: ref=['OH', 'BUT', "I'M", 'GLAD', 'TO', 'GET', 'THIS', 'PLACE', 'MOWED'] +237-134500-0005-2350: hyp=['OH', 'BUT', 'I', 'AM', 'GLAD', 'TO', 'GET', 'THIS', 'PLACE', 'MOWED'] +237-134500-0006-2351: ref=['JUST', 'SMELL', 'THE', 'WILD', 'ROSES', 'THEY', 'ARE', 'ALWAYS', 'SO', 'SPICY', 'AFTER', 'A', 'RAIN'] +237-134500-0006-2351: hyp=['JUST', 'SMELL', 'THE', 'WILD', 'ROSES', 'THEY', 'ARE', 'ALWAYS', 'SO', 'SPICY', 'AFTER', 'A', 'RAIN'] +237-134500-0007-2352: ref=['WE', 'NEVER', 'HAD', 'SO', 'MANY', 'OF', 'THEM', 'IN', 'HERE', 'BEFORE'] +237-134500-0007-2352: hyp=['WE', 'NEVER', 'HAD', 'SO', 'MANY', 'OF', 'THEM', 'IN', 'HERE', 'BEFORE'] +237-134500-0008-2353: ref=['I', 'SUPPOSE', "IT'S", 'THE', 'WET', 'SEASON', 'WILL', 'YOU', 'HAVE', 'TO', 'CUT', 'THEM', 'TOO'] +237-134500-0008-2353: hyp=['I', 'SUPPOSE', "IT'S", 'THE', 'WET', 'SEASON', 'WILL', 'YOU', 'HAVE', 'TO', 'CUT', 'THEM', 'TOO'] +237-134500-0009-2354: ref=['I', 'SUPPOSE', "THAT'S", 'THE', 'WET', 'SEASON', 'TOO', 'THEN'] +237-134500-0009-2354: hyp=['I', 'SUPPOSE', "THAT'S", 'THE', 'WET', 'SEASON', 'TOO', 'THEN'] +237-134500-0010-2355: ref=["IT'S", 'EXCITING', 'TO', 'SEE', 'EVERYTHING', 'GROWING', 'SO', 'FAST', 'AND', 'TO', 'GET', 'THE', 'GRASS', 'CUT'] +237-134500-0010-2355: hyp=["IT'S", 'EXCITING', 'TO', 'SEE', 'EVERYTHING', 'GROWING', 'SO', 'FAST', 'AND', 'TO', 'GET', 'THE', 'GRASS', 'CUT'] +237-134500-0011-2356: ref=["AREN'T", 'YOU', 'SPLASHED', 'LOOK', 'AT', 'THE', 'SPIDER', 'WEBS', 'ALL', 'OVER', 'THE', 'GRASS'] +237-134500-0011-2356: hyp=["AREN'T", 'YOU', 'SPLASHED', 'LOOK', 'AT', 'THE', 'SPIDER', 'WEBS', 'ALL', 'OVER', 'THE', 'GRASS'] +237-134500-0012-2357: ref=['IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HEARD', 'THE', 'CHERRIES', 'DROPPING', 'SMARTLY', 'INTO', 'THE', 'PAIL', 'AND', 'HE', 'BEGAN', 'TO', 'SWING', 'HIS', 'SCYTHE', 'WITH', 'THAT', 'LONG', 'EVEN', 'STROKE', 'THAT', 'FEW', 'AMERICAN', 'BOYS', 'EVER', 'LEARN'] +237-134500-0012-2357: hyp=['IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HEARD', 'THE', 'CHERRIES', 'DROPPING', 'SMARTLY', 'INTO', 'THE', 'PAIL', 'AND', 'HE', 'BEGAN', 'TO', 'SWING', 'HIS', 'SCYTHE', 'WITH', 'THAT', 'LONG', 'EVEN', 'STROKE', 'THAT', 'FEW', 'AMERICAN', 'BOYS', 'EVER', 'LEARN'] +237-134500-0013-2358: ref=['MARIE', 'PICKED', 'CHERRIES', 'AND', 'SANG', 'SOFTLY', 'TO', 'HERSELF', 'STRIPPING', 'ONE', 'GLITTERING', 'BRANCH', 'AFTER', 'ANOTHER', 'SHIVERING', 'WHEN', 'SHE', 'CAUGHT', 'A', 'SHOWER', 'OF', 'RAINDROPS', 'ON', 'HER', 'NECK', 'AND', 'HAIR'] +237-134500-0013-2358: hyp=['MARIE', 'PICKED', 'CHERRIES', 'AND', 'SANG', 'SOFTLY', 'TO', 'HERSELF', 'STRIPPING', 'ONE', 'GLITTERING', 'RANCH', 'AFTER', 'ANOTHER', 'SHIVERING', 'WHEN', 'SHE', 'THOUGHT', 'A', 'SHOWER', 'OF', 'RAINDROPS', 'ON', 'HER', 'NECK', 'AND', 'HAIR'] +237-134500-0014-2359: ref=['AND', 'EMIL', 'MOWED', 'HIS', 'WAY', 'SLOWLY', 'DOWN', 'TOWARD', 'THE', 'CHERRY', 'TREES'] +237-134500-0014-2359: hyp=['AND', 'AMYL', 'MOWED', 'HIS', 'WAY', 'SLOWLY', 'DOWN', 'TOWARD', 'THE', 'CHERRY', 'TREES'] +237-134500-0015-2360: ref=['THAT', 'SUMMER', 'THE', 'RAINS', 'HAD', 'BEEN', 'SO', 'MANY', 'AND', 'OPPORTUNE', 'THAT', 'IT', 'WAS', 'ALMOST', 'MORE', 'THAN', 'SHABATA', 'AND', 'HIS', 'MAN', 'COULD', 'DO', 'TO', 'KEEP', 'UP', 'WITH', 'THE', 'CORN', 'THE', 'ORCHARD', 'WAS', 'A', 'NEGLECTED', 'WILDERNESS'] +237-134500-0015-2360: hyp=['THAT', 'SUMMER', 'THE', 'RAINS', 'HAD', 'BEEN', 'SO', 'MANY', 'AND', 'OPPORTUNE', 'THAT', 'IT', 'WAS', 'ALMOST', 'MORE', 'THAN', 'SHABATA', 'AND', 'HIS', 'MAN', 'COULD', 'DO', 'TO', 'KEEP', 'UP', 'WITH', 'THE', 'CORN', 'THE', 'ORCHARD', 'WAS', 'A', 'NEGLECTED', 'WILDERNESS'] +237-134500-0016-2361: ref=['I', "DON'T", 'KNOW', 'ALL', 'OF', 'THEM', 'BUT', 'I', 'KNOW', 'LINDENS', 'ARE'] +237-134500-0016-2361: hyp=['I', "DON'T", 'KNOW', 'ALL', 'OF', 'THEM', 'BUT', 'I', 'KNOW', 'LINDENS', 'ARE'] +237-134500-0017-2362: ref=['IF', 'I', 'FEEL', 'THAT', 'WAY', 'I', 'FEEL', 'THAT', 'WAY'] +237-134500-0017-2362: hyp=['IF', 'I', 'FEEL', 'THAT', 'WAY', 'I', 'FEEL', 'THAT', 'WAY'] +237-134500-0018-2363: ref=['HE', 'REACHED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'AND', 'BEGAN', 'TO', 'PICK', 'THE', 'SWEET', 'INSIPID', 'FRUIT', 'LONG', 'IVORY', 'COLORED', 'BERRIES', 'TIPPED', 'WITH', 'FAINT', 'PINK', 'LIKE', 'WHITE', 'CORAL', 'THAT', 'FALL', 'TO', 'THE', 'GROUND', 'UNHEEDED', 'ALL', 'SUMMER', 'THROUGH'] +237-134500-0018-2363: hyp=['HE', 'REACHED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'AND', 'BEGAN', 'TO', 'PICK', 'THE', 'SWEET', 'INSIPID', 'FRUIT', 'LONG', 'IVORY', 'COLORED', 'BERRIES', 'TIPPED', 'WITH', 'FAINT', 'PINK', 'LIKE', 'WHITE', 'CORAL', 'THAT', 'FALL', 'TO', 'THE', 'GROUND', 'UNHEEDED', 'ALL', 'SUMMER', 'THROUGH'] +237-134500-0019-2364: ref=['HE', 'DROPPED', 'A', 'HANDFUL', 'INTO', 'HER', 'LAP'] +237-134500-0019-2364: hyp=['HE', 'DROPPED', 'A', 'HANDFUL', 'INTO', 'HER', 'LAP'] +237-134500-0020-2365: ref=['YES', "DON'T", 'YOU'] +237-134500-0020-2365: hyp=['YES', "DON'T", 'YOU'] +237-134500-0021-2366: ref=['OH', 'EVER', 'SO', 'MUCH', 'ONLY', 'HE', 'SEEMS', 'KIND', 'OF', 'STAID', 'AND', 'SCHOOL', 'TEACHERY'] +237-134500-0021-2366: hyp=['OH', 'EVER', 'SO', 'MUCH', 'ONLY', 'HE', 'SEEMS', 'KIND', 'OF', 'STAY', 'AT', 'IN', 'SCHOOL', 'TEACHERY'] +237-134500-0022-2367: ref=['WHEN', 'SHE', 'USED', 'TO', 'TELL', 'ME', 'ABOUT', 'HIM', 'I', 'ALWAYS', 'WONDERED', 'WHETHER', 'SHE', "WASN'T", 'A', 'LITTLE', 'IN', 'LOVE', 'WITH', 'HIM'] +237-134500-0022-2367: hyp=['WHEN', 'SHE', 'USED', 'TO', 'TELL', 'ME', 'ABOUT', 'HIM', 'I', 'ALWAYS', 'WONDERED', 'WHETHER', 'SHE', "WASN'T", 'A', 'LITTLE', 'IN', 'LOVE', 'WITH', 'HIM'] +237-134500-0023-2368: ref=['IT', 'WOULD', 'SERVE', 'YOU', 'ALL', 'RIGHT', 'IF', 'SHE', 'WALKED', 'OFF', 'WITH', 'CARL'] +237-134500-0023-2368: hyp=['IT', 'WOULD', 'SERVE', 'YOU', 'ALL', 'RIGHT', 'IF', 'SHE', 'WALKED', 'OFF', 'WITH', 'KARL'] +237-134500-0024-2369: ref=['I', 'LIKE', 'TO', 'TALK', 'TO', 'CARL', 'ABOUT', 'NEW', 'YORK', 'AND', 'WHAT', 'A', 'FELLOW', 'CAN', 'DO', 'THERE'] +237-134500-0024-2369: hyp=['I', 'LIKE', 'TO', 'TALK', 'TO', 'KARL', 'ABOUT', 'NEW', 'YORK', 'AND', 'WHAT', 'A', 'FELLOW', 'CAN', 'DO', 'THERE'] +237-134500-0025-2370: ref=['OH', 'EMIL'] +237-134500-0025-2370: hyp=['OH', 'AMY', 'ILL'] +237-134500-0026-2371: ref=['SURELY', 'YOU', 'ARE', 'NOT', 'THINKING', 'OF', 'GOING', 'OFF', 'THERE'] +237-134500-0026-2371: hyp=['SURELY', 'YOU', 'ARE', 'NOT', 'THINKING', 'OF', 'GOING', 'OFF', 'THERE'] +237-134500-0027-2372: ref=["MARIE'S", 'FACE', 'FELL', 'UNDER', 'HIS', 'BROODING', 'GAZE'] +237-134500-0027-2372: hyp=["MARI'S", 'FACE', 'FELL', 'UNDER', 'HIS', 'BROODING', 'GAZE'] +237-134500-0028-2373: ref=["I'M", 'SURE', 'ALEXANDRA', 'HOPES', 'YOU', 'WILL', 'STAY', 'ON', 'HERE', 'SHE', 'MURMURED'] +237-134500-0028-2373: hyp=['I', 'AM', 'SURE', 'ALEXANDER', 'HELPS', 'YOU', 'WILL', 'STAY', 'ON', 'HERE', 'SHE', 'MURMURED'] +237-134500-0029-2374: ref=['I', "DON'T", 'WANT', 'TO', 'STAND', 'AROUND', 'AND', 'LOOK', 'ON'] +237-134500-0029-2374: hyp=['I', "DON'T", 'WANT', 'TO', 'STAND', 'AROUND', 'AND', 'LOOK', 'ON'] +237-134500-0030-2375: ref=['I', 'WANT', 'TO', 'BE', 'DOING', 'SOMETHING', 'ON', 'MY', 'OWN', 'ACCOUNT'] +237-134500-0030-2375: hyp=['I', 'WANT', 'TO', 'BE', 'DOING', 'SOMETHING', 'ON', 'MY', 'OWN', 'ACCOUNT'] +237-134500-0031-2376: ref=['SOMETIMES', 'I', "DON'T", 'WANT', 'TO', 'DO', 'ANYTHING', 'AT', 'ALL', 'AND', 'SOMETIMES', 'I', 'WANT', 'TO', 'PULL', 'THE', 'FOUR', 'CORNERS', 'OF', 'THE', 'DIVIDE', 'TOGETHER', 'HE', 'THREW', 'OUT', 'HIS', 'ARM', 'AND', 'BROUGHT', 'IT', 'BACK', 'WITH', 'A', 'JERK', 'SO', 'LIKE', 'A', 'TABLE', 'CLOTH'] +237-134500-0031-2376: hyp=['SOMETIMES', 'I', "DON'T", 'WANT', 'TO', 'DO', 'ANYTHING', 'AT', 'ALL', 'AND', 'SOMETIMES', 'I', 'WANT', 'TO', 'PULL', 'THE', 'FOUR', 'CORNERS', 'OF', 'THE', 'DIVIDE', 'TOGETHER', 'HE', 'THREW', 'OUT', 'HIS', 'ARM', 'AND', 'BROUGHT', 'IT', 'BACK', 'WITH', 'A', 'JERK', 'SO', 'LIKE', 'A', 'TABLECLOTH'] +237-134500-0032-2377: ref=['I', 'GET', 'TIRED', 'OF', 'SEEING', 'MEN', 'AND', 'HORSES', 'GOING', 'UP', 'AND', 'DOWN', 'UP', 'AND', 'DOWN'] +237-134500-0032-2377: hyp=['I', 'GET', 'TIRED', 'OF', 'SEEING', 'MAN', 'AND', 'HORSES', 'GOING', 'UP', 'AND', 'DOWN', 'UP', 'AND', 'DOWN'] +237-134500-0033-2378: ref=['I', 'WISH', 'YOU', "WEREN'T", 'SO', 'RESTLESS', 'AND', "DIDN'T", 'GET', 'SO', 'WORKED', 'UP', 'OVER', 'THINGS', 'SHE', 'SAID', 'SADLY'] +237-134500-0033-2378: hyp=['I', 'WISH', 'YOU', "WEREN'T", 'SO', 'RESTLESS', 'AND', "DIDN'T", 'GET', 'SO', 'WORKED', 'UP', 'OVER', 'THINGS', 'SHE', 'SAID', 'SADLY'] +237-134500-0034-2379: ref=['THANK', 'YOU', 'HE', 'RETURNED', 'SHORTLY'] +237-134500-0034-2379: hyp=['THANK', 'YOU', 'HE', 'RETURNED', 'SHORTLY'] +237-134500-0035-2380: ref=['AND', 'YOU', 'NEVER', 'USED', 'TO', 'BE', 'CROSS', 'TO', 'ME'] +237-134500-0035-2380: hyp=['AND', 'YOU', 'NEVER', 'USED', 'TO', 'BE', 'CROSS', 'TO', 'ME'] +237-134500-0036-2381: ref=['I', "CAN'T", 'PLAY', 'WITH', 'YOU', 'LIKE', 'A', 'LITTLE', 'BOY', 'ANY', 'MORE', 'HE', 'SAID', 'SLOWLY', "THAT'S", 'WHAT', 'YOU', 'MISS', 'MARIE'] +237-134500-0036-2381: hyp=['I', "CAN'T", 'PLAY', 'WITH', 'YOU', 'LIKE', 'A', 'LITTLE', 'BOY', 'ANY', 'MORE', 'HE', 'SAID', 'SLOWLY', "THAT'S", 'WHAT', 'YOU', 'MISS', 'MARI'] +237-134500-0037-2382: ref=['BUT', 'EMIL', 'IF', 'I', 'UNDERSTAND', 'THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER', 'WE', 'CAN', 'NEVER', 'DO', 'NICE', 'THINGS', 'TOGETHER', 'ANY', 'MORE'] +237-134500-0037-2382: hyp=['BUT', 'AM', 'ILL', 'IF', 'I', 'UNDERSTAND', 'IN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER', 'WE', 'CAN', 'NEVER', 'DO', 'NICE', 'THINGS', 'TOGETHER', 'ANY', 'MORE'] +237-134500-0038-2383: ref=['AND', 'ANYHOW', "THERE'S", 'NOTHING', 'TO', 'UNDERSTAND'] +237-134500-0038-2383: hyp=['AND', 'ANYHOW', "THERE'S", 'NOTHING', 'TO', 'UNDERSTAND'] +237-134500-0039-2384: ref=['THAT', "WON'T", 'LAST', 'IT', 'WILL', 'GO', 'AWAY', 'AND', 'THINGS', 'WILL', 'BE', 'JUST', 'AS', 'THEY', 'USED', 'TO'] +237-134500-0039-2384: hyp=['THAT', "WON'T", 'LAST', 'IT', 'WILL', 'GO', 'AWAY', 'AND', 'THINGS', 'WILL', 'BE', 'JUST', 'AS', 'THEY', 'USED', 'TO'] +237-134500-0040-2385: ref=['I', 'PRAY', 'FOR', 'YOU', 'BUT', "THAT'S", 'NOT', 'THE', 'SAME', 'AS', 'IF', 'YOU', 'PRAYED', 'YOURSELF'] +237-134500-0040-2385: hyp=['I', 'PRAY', 'FOR', 'YOU', 'BUT', "THAT'S", 'NOT', 'THE', 'SAME', 'AS', 'IF', 'YOU', 'PRAYED', 'YOURSELF'] +237-134500-0041-2386: ref=['I', "CAN'T", 'PRAY', 'TO', 'HAVE', 'THE', 'THINGS', 'I', 'WANT', 'HE', 'SAID', 'SLOWLY', 'AND', 'I', "WON'T", 'PRAY', 'NOT', 'TO', 'HAVE', 'THEM', 'NOT', 'IF', "I'M", 'DAMNED', 'FOR', 'IT'] +237-134500-0041-2386: hyp=['I', "CAN'T", 'PRAY', 'TO', 'HAVE', 'THE', 'THINGS', 'I', 'WANT', 'HE', 'SAID', 'SLOWLY', 'AND', 'I', "WON'T", 'PRAY', 'NOT', 'TO', 'HAVE', 'THEM', 'NOT', 'IF', "I'M", 'DAMNED', 'FOR', 'IT'] +237-134500-0042-2387: ref=['THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER'] +237-134500-0042-2387: hyp=['THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER'] +260-123286-0000-200: ref=['SATURDAY', 'AUGUST', 'FIFTEENTH', 'THE', 'SEA', 'UNBROKEN', 'ALL', 'ROUND', 'NO', 'LAND', 'IN', 'SIGHT'] +260-123286-0000-200: hyp=['SATURDAY', 'AUGUST', 'FIFTEENTH', 'THE', 'SEA', 'UNBROKEN', 'ALL', 'ROUND', 'NO', 'LAND', 'IN', 'SIGHT'] +260-123286-0001-201: ref=['THE', 'HORIZON', 'SEEMS', 'EXTREMELY', 'DISTANT'] +260-123286-0001-201: hyp=['THE', 'HORIZON', 'SEEMS', 'EXTREMELY', 'DISTANT'] +260-123286-0002-202: ref=['ALL', 'MY', 'DANGER', 'AND', 'SUFFERINGS', 'WERE', 'NEEDED', 'TO', 'STRIKE', 'A', 'SPARK', 'OF', 'HUMAN', 'FEELING', 'OUT', 'OF', 'HIM', 'BUT', 'NOW', 'THAT', 'I', 'AM', 'WELL', 'HIS', 'NATURE', 'HAS', 'RESUMED', 'ITS', 'SWAY'] +260-123286-0002-202: hyp=['ALL', 'MY', 'DANGER', 'AND', 'SUFFERINGS', 'WERE', 'NEEDED', 'TO', 'STRIKE', 'A', 'SPARK', 'OF', 'HUMAN', 'FEELING', 'OUT', 'OF', 'HIM', 'BUT', 'NOW', 'THAT', 'I', 'AM', 'WELL', 'HIS', 'NATURE', 'HAS', 'RESUMED', 'ITS', 'SWAY'] +260-123286-0003-203: ref=['YOU', 'SEEM', 'ANXIOUS', 'MY', 'UNCLE', 'I', 'SAID', 'SEEING', 'HIM', 'CONTINUALLY', 'WITH', 'HIS', 'GLASS', 'TO', 'HIS', 'EYE', 'ANXIOUS'] +260-123286-0003-203: hyp=['YOU', 'SEEM', 'ANXIOUS', 'MY', 'UNCLE', 'I', 'SAID', 'SEEING', 'HIM', 'CONTINUALLY', 'WITH', 'HIS', 'GLASS', 'TO', 'HIS', 'EYE', 'ANXIOUS'] +260-123286-0004-204: ref=['ONE', 'MIGHT', 'BE', 'WITH', 'LESS', 'REASON', 'THAN', 'NOW'] +260-123286-0004-204: hyp=['ONE', 'MIGHT', 'BE', 'WITH', 'LESS', 'REASON', 'THAN', 'NOW'] +260-123286-0005-205: ref=['I', 'AM', 'NOT', 'COMPLAINING', 'THAT', 'THE', 'RATE', 'IS', 'SLOW', 'BUT', 'THAT', 'THE', 'SEA', 'IS', 'SO', 'WIDE'] +260-123286-0005-205: hyp=['I', 'AM', 'NOT', 'COMPLAINING', 'THAT', 'THE', 'RATE', 'IS', 'SLOW', 'BUT', 'THAT', 'THE', 'SEA', 'IS', 'SO', 'WIDE'] +260-123286-0006-206: ref=['WE', 'ARE', 'LOSING', 'TIME', 'AND', 'THE', 'FACT', 'IS', 'I', 'HAVE', 'NOT', 'COME', 'ALL', 'THIS', 'WAY', 'TO', 'TAKE', 'A', 'LITTLE', 'SAIL', 'UPON', 'A', 'POND', 'ON', 'A', 'RAFT'] +260-123286-0006-206: hyp=['WE', 'ARE', 'LOSING', 'TIME', 'AND', 'THE', 'FACT', 'IS', 'I', 'HAVE', 'NOT', 'COME', 'ALL', 'THIS', 'WAY', 'TO', 'TAKE', 'A', 'LITTLE', 'SAIL', 'UPON', 'A', 'POND', 'ON', 'A', 'RAFT'] +260-123286-0007-207: ref=['HE', 'CALLED', 'THIS', 'SEA', 'A', 'POND', 'AND', 'OUR', 'LONG', 'VOYAGE', 'TAKING', 'A', 'LITTLE', 'SAIL'] +260-123286-0007-207: hyp=['HE', 'CALLED', 'THIS', 'SEA', 'UPON', 'AND', 'OUR', 'LONG', 'VOYAGE', 'TAKING', 'A', 'LITTLE', 'SAIL'] +260-123286-0008-208: ref=['THEREFORE', "DON'T", 'TALK', 'TO', 'ME', 'ABOUT', 'VIEWS', 'AND', 'PROSPECTS'] +260-123286-0008-208: hyp=['THEREFORE', "DON'T", 'TALK', 'TO', 'ME', 'ABOUT', 'VIEWS', 'AND', 'PROSPECTS'] +260-123286-0009-209: ref=['I', 'TAKE', 'THIS', 'AS', 'MY', 'ANSWER', 'AND', 'I', 'LEAVE', 'THE', 'PROFESSOR', 'TO', 'BITE', 'HIS', 'LIPS', 'WITH', 'IMPATIENCE'] +260-123286-0009-209: hyp=['I', 'TAKE', 'THIS', 'AS', 'MY', 'ANSWER', 'AND', 'I', 'LEAVE', 'THE', 'PROFESSOR', 'TO', 'BITE', 'HIS', 'LIPS', 'WITH', 'IMPATIENCE'] +260-123286-0010-210: ref=['SUNDAY', 'AUGUST', 'SIXTEENTH'] +260-123286-0010-210: hyp=['SUNDAY', 'AUGUST', 'SIXTEENTH'] +260-123286-0011-211: ref=['NOTHING', 'NEW', 'WEATHER', 'UNCHANGED', 'THE', 'WIND', 'FRESHENS'] +260-123286-0011-211: hyp=['NOTHING', 'NEW', 'WHETHER', 'UNCHANGED', 'THE', 'WIND', 'FRESHENS'] +260-123286-0012-212: ref=['BUT', 'THERE', 'SEEMED', 'NO', 'REASON', 'TO', 'FEAR'] +260-123286-0012-212: hyp=['BUT', 'THERE', 'SEEMED', 'NO', 'REASON', 'OF', 'FEAR'] +260-123286-0013-213: ref=['THE', 'SHADOW', 'OF', 'THE', 'RAFT', 'WAS', 'CLEARLY', 'OUTLINED', 'UPON', 'THE', 'SURFACE', 'OF', 'THE', 'WAVES'] +260-123286-0013-213: hyp=['THE', 'SHADOW', 'OF', 'THE', 'RAFT', 'WAS', 'CLEARLY', 'OUTLINED', 'UPON', 'THE', 'SURFACE', 'OF', 'THE', 'WAVES'] +260-123286-0014-214: ref=['TRULY', 'THIS', 'SEA', 'IS', 'OF', 'INFINITE', 'WIDTH'] +260-123286-0014-214: hyp=['TRULY', 'THE', 'SEA', 'IS', 'OF', 'INFINITE', 'WID'] +260-123286-0015-215: ref=['IT', 'MUST', 'BE', 'AS', 'WIDE', 'AS', 'THE', 'MEDITERRANEAN', 'OR', 'THE', 'ATLANTIC', 'AND', 'WHY', 'NOT'] +260-123286-0015-215: hyp=['IT', 'MUST', 'BE', 'AS', 'WIDE', 'AS', 'THE', 'MEDITERRANEAN', 'OR', 'THE', 'ATLANTIC', 'AND', 'WHY', 'NOT'] +260-123286-0016-216: ref=['THESE', 'THOUGHTS', 'AGITATED', 'ME', 'ALL', 'DAY', 'AND', 'MY', 'IMAGINATION', 'SCARCELY', 'CALMED', 'DOWN', 'AFTER', 'SEVERAL', 'HOURS', 'SLEEP'] +260-123286-0016-216: hyp=['THESE', 'THOUGHTS', 'AGITATED', 'ME', 'ALL', 'DAY', 'AND', 'MY', 'IMAGINATION', 'SCARCELY', 'CALMED', 'DOWN', 'AFTER', 'SEVERAL', 'HOURS', 'SLEEP'] +260-123286-0017-217: ref=['I', 'SHUDDER', 'AS', 'I', 'RECALL', 'THESE', 'MONSTERS', 'TO', 'MY', 'REMEMBRANCE'] +260-123286-0017-217: hyp=['I', 'SHUDDER', 'AS', 'I', 'RECALL', 'THESE', 'MONSTERS', 'TO', 'MY', 'REMEMBRANCE'] +260-123286-0018-218: ref=['I', 'SAW', 'AT', 'THE', 'HAMBURG', 'MUSEUM', 'THE', 'SKELETON', 'OF', 'ONE', 'OF', 'THESE', 'CREATURES', 'THIRTY', 'FEET', 'IN', 'LENGTH'] +260-123286-0018-218: hyp=['I', 'SAW', 'AT', 'THE', 'HAMBURG', 'MUSEUM', 'THE', 'SKELETON', 'OF', 'ONE', 'OF', 'THESE', 'CREATURES', 'THIRTY', 'FEET', 'IN', 'LENGTH'] +260-123286-0019-219: ref=['I', 'SUPPOSE', 'PROFESSOR', 'LIEDENBROCK', 'WAS', 'OF', 'MY', 'OPINION', 'TOO', 'AND', 'EVEN', 'SHARED', 'MY', 'FEARS', 'FOR', 'AFTER', 'HAVING', 'EXAMINED', 'THE', 'PICK', 'HIS', 'EYES', 'TRAVERSED', 'THE', 'OCEAN', 'FROM', 'SIDE', 'TO', 'SIDE'] +260-123286-0019-219: hyp=['I', 'SUPPOSE', 'PROFESSOR', 'LIEDENBROCK', 'WAS', 'OF', 'MY', 'OPINION', 'TOO', 'AND', 'EVEN', 'SHARED', 'MY', 'FEARS', 'FOR', 'AFTER', 'HAVING', 'EXAMINED', 'THE', 'PIG', 'HIS', 'EYES', 'TRAVERSED', 'THE', 'OCEAN', 'FROM', 'SIDE', 'TO', 'SIDE'] +260-123286-0020-220: ref=['TUESDAY', 'AUGUST', 'EIGHTEENTH'] +260-123286-0020-220: hyp=['TUESDAY', 'AUGUST', 'EIGHTEENTH'] +260-123286-0021-221: ref=['DURING', 'HIS', 'WATCH', 'I', 'SLEPT'] +260-123286-0021-221: hyp=['DURING', 'HIS', 'WATCH', 'I', 'SLEPT'] +260-123286-0022-222: ref=['TWO', 'HOURS', 'AFTERWARDS', 'A', 'TERRIBLE', 'SHOCK', 'AWOKE', 'ME'] +260-123286-0022-222: hyp=['TWO', 'HOURS', 'AFTERWARDS', 'A', 'TERRIBLE', 'SHOCK', 'AWOKE', 'ME'] +260-123286-0023-223: ref=['THE', 'RAFT', 'WAS', 'HEAVED', 'UP', 'ON', 'A', 'WATERY', 'MOUNTAIN', 'AND', 'PITCHED', 'DOWN', 'AGAIN', 'AT', 'A', 'DISTANCE', 'OF', 'TWENTY', 'FATHOMS'] +260-123286-0023-223: hyp=['THE', 'RAFT', 'WAS', 'HEAVED', 'UP', 'ON', 'A', 'WATERY', 'MOUNTAIN', 'AND', 'PITCHED', 'DOWN', 'AGAIN', 'AT', 'A', 'DISTANCE', 'OF', 'TWENTY', 'FATHOMS'] +260-123286-0024-224: ref=["THERE'S", 'A', 'WHALE', 'A', 'WHALE', 'CRIED', 'THE', 'PROFESSOR'] +260-123286-0024-224: hyp=["THERE'S", 'A', 'WAIL', 'A', 'WELL', 'CRIED', 'THE', 'PROFESSOR'] +260-123286-0025-225: ref=['FLIGHT', 'WAS', 'OUT', 'OF', 'THE', 'QUESTION', 'NOW', 'THE', 'REPTILES', 'ROSE', 'THEY', 'WHEELED', 'AROUND', 'OUR', 'LITTLE', 'RAFT', 'WITH', 'A', 'RAPIDITY', 'GREATER', 'THAN', 'THAT', 'OF', 'EXPRESS', 'TRAINS'] +260-123286-0025-225: hyp=['FIGHT', 'WAS', 'OUT', 'OF', 'THE', 'QUESTION', 'NOW', 'THE', 'REPTILES', 'ROSE', 'THEY', 'WHEELED', 'AROUND', 'OUR', 'LITTLE', 'RAFT', 'WITH', 'A', 'RAPIDITY', 'GREATER', 'THAN', 'THAT', 'OF', 'EXPRESS', 'TRAINS'] +260-123286-0026-226: ref=['TWO', 'MONSTERS', 'ONLY', 'WERE', 'CREATING', 'ALL', 'THIS', 'COMMOTION', 'AND', 'BEFORE', 'MY', 'EYES', 'ARE', 'TWO', 'REPTILES', 'OF', 'THE', 'PRIMITIVE', 'WORLD'] +260-123286-0026-226: hyp=['TWO', 'MONSTERS', 'OMER', 'WERE', 'CREATING', 'ALL', 'THIS', 'COMMOTION', 'AND', 'BEFORE', 'MY', 'EYES', 'OUR', 'TWO', 'REPTILES', 'OF', 'THE', 'PRIMITIVE', 'WORLD'] +260-123286-0027-227: ref=['I', 'CAN', 'DISTINGUISH', 'THE', 'EYE', 'OF', 'THE', 'ICHTHYOSAURUS', 'GLOWING', 'LIKE', 'A', 'RED', 'HOT', 'COAL', 'AND', 'AS', 'LARGE', 'AS', 'A', "MAN'S", 'HEAD'] +260-123286-0027-227: hyp=['I', 'CAN', 'DISTINGUISH', 'THE', 'EYE', 'OF', 'THE', 'ITHUSORIS', 'GLOWING', 'LIKE', 'A', 'RED', 'HOT', 'CO', 'AND', 'AS', 'LARGE', 'AS', 'A', "MAN'S", 'HEAD'] +260-123286-0028-228: ref=['ITS', 'JAW', 'IS', 'ENORMOUS', 'AND', 'ACCORDING', 'TO', 'NATURALISTS', 'IT', 'IS', 'ARMED', 'WITH', 'NO', 'LESS', 'THAN', 'ONE', 'HUNDRED', 'AND', 'EIGHTY', 'TWO', 'TEETH'] +260-123286-0028-228: hyp=['ITS', 'JAW', 'IS', 'ENORMOUS', 'AND', 'ACCORDING', 'TO', 'NATURALISTS', 'IT', 'IS', 'ARMED', 'WITH', 'NO', 'LESS', 'THAN', 'ONE', 'HUNDRED', 'AND', 'EIGHTY', 'TWO', 'TEETH'] +260-123286-0029-229: ref=['THOSE', 'HUGE', 'CREATURES', 'ATTACKED', 'EACH', 'OTHER', 'WITH', 'THE', 'GREATEST', 'ANIMOSITY'] +260-123286-0029-229: hyp=['THOSE', 'HUGE', 'CREATURES', 'ATTACKED', 'EACH', 'OTHER', 'WITH', 'THE', 'GREATEST', 'ANIMOSITY'] +260-123286-0030-230: ref=['SUDDENLY', 'THE', 'ICHTHYOSAURUS', 'AND', 'THE', 'PLESIOSAURUS', 'DISAPPEAR', 'BELOW', 'LEAVING', 'A', 'WHIRLPOOL', 'EDDYING', 'IN', 'THE', 'WATER'] +260-123286-0030-230: hyp=['SUDDENLY', 'THE', 'IDEAS', 'AND', 'THE', 'PLEAS', 'DISAPPEAR', 'BELOW', 'LEAVING', 'A', 'WAR', 'POOL', 'EDDYING', 'IN', 'THE', 'WATER'] +260-123286-0031-231: ref=['AS', 'FOR', 'THE', 'ICHTHYOSAURUS', 'HAS', 'HE', 'RETURNED', 'TO', 'HIS', 'SUBMARINE', 'CAVERN'] +260-123286-0031-231: hyp=['AS', 'FOR', 'THE', 'ETHIOSORIS', 'HAS', 'HE', 'RETURNED', 'TO', 'HIS', 'SUBMARINE', 'CAVERN'] +260-123288-0000-232: ref=['THE', 'ROARINGS', 'BECOME', 'LOST', 'IN', 'THE', 'DISTANCE'] +260-123288-0000-232: hyp=['THE', 'ROARINGS', 'BECOME', 'LOST', 'IN', 'THE', 'DISTANCE'] +260-123288-0001-233: ref=['THE', 'WEATHER', 'IF', 'WE', 'MAY', 'USE', 'THAT', 'TERM', 'WILL', 'CHANGE', 'BEFORE', 'LONG'] +260-123288-0001-233: hyp=['THE', 'WEATHER', 'IF', 'WE', 'MAY', 'USE', 'THE', 'TERM', 'WILL', 'CHANGE', 'BEFORE', 'LONG'] +260-123288-0002-234: ref=['THE', 'ATMOSPHERE', 'IS', 'CHARGED', 'WITH', 'VAPOURS', 'PERVADED', 'WITH', 'THE', 'ELECTRICITY', 'GENERATED', 'BY', 'THE', 'EVAPORATION', 'OF', 'SALINE', 'WATERS'] +260-123288-0002-234: hyp=['THE', 'ATMOSPHERE', 'IS', 'CHARGED', 'WITH', 'VAPORS', 'PERVADED', 'WITH', 'THE', 'ELECTRICITY', 'GENERATED', 'BY', 'THE', 'EVAPORATION', 'OF', 'SAILING', 'WATERS'] +260-123288-0003-235: ref=['THE', 'ELECTRIC', 'LIGHT', 'CAN', 'SCARCELY', 'PENETRATE', 'THROUGH', 'THE', 'DENSE', 'CURTAIN', 'WHICH', 'HAS', 'DROPPED', 'OVER', 'THE', 'THEATRE', 'ON', 'WHICH', 'THE', 'BATTLE', 'OF', 'THE', 'ELEMENTS', 'IS', 'ABOUT', 'TO', 'BE', 'WAGED'] +260-123288-0003-235: hyp=['THE', 'ELECTRIC', 'LIGHT', 'CAN', 'SCARCELY', 'PENETRATE', 'TO', 'THE', 'DENSE', 'CURTAIN', 'WHICH', 'IS', 'DROPPED', 'OVER', 'THE', 'THEATRE', 'ON', 'WHICH', 'THE', 'BATTLE', 'OF', 'THE', 'ELEMENTS', 'IS', 'ABOUT', 'TO', 'BE', 'WAGED'] +260-123288-0004-236: ref=['THE', 'AIR', 'IS', 'HEAVY', 'THE', 'SEA', 'IS', 'CALM'] +260-123288-0004-236: hyp=['THE', 'AIR', 'IS', 'HEAVY', 'THE', 'SEA', 'IS', 'CALM'] +260-123288-0005-237: ref=['FROM', 'TIME', 'TO', 'TIME', 'A', 'FLEECY', 'TUFT', 'OF', 'MIST', 'WITH', 'YET', 'SOME', 'GLEAMING', 'LIGHT', 'LEFT', 'UPON', 'IT', 'DROPS', 'DOWN', 'UPON', 'THE', 'DENSE', 'FLOOR', 'OF', 'GREY', 'AND', 'LOSES', 'ITSELF', 'IN', 'THE', 'OPAQUE', 'AND', 'IMPENETRABLE', 'MASS'] +260-123288-0005-237: hyp=['FROM', 'TIME', 'TO', 'TIME', 'A', 'FLEECY', 'TUFT', 'OF', 'MISTS', 'WITH', 'YET', 'SOME', 'GLEAMING', 'LIGHT', 'LEFT', 'UPON', 'IT', 'DROPS', 'DOWN', 'UPON', 'THE', 'DENSE', 'FLOOR', 'OF', 'GREY', 'AND', 'LOSES', 'ITSELF', 'IN', 'THE', 'OPAQUE', 'AND', 'IMPENETRABLE', 'MASS'] +260-123288-0006-238: ref=['THE', 'ATMOSPHERE', 'IS', 'EVIDENTLY', 'CHARGED', 'AND', 'SURCHARGED', 'WITH', 'ELECTRICITY'] +260-123288-0006-238: hyp=['THE', 'ATMOSPHERE', 'AS', 'EVIDENTLY', 'CHARGED', 'IN', 'SURCHARGED', 'WITH', 'ELECTRICITY'] +260-123288-0007-239: ref=['THE', 'WIND', 'NEVER', 'LULLS', 'BUT', 'TO', 'ACQUIRE', 'INCREASED', 'STRENGTH', 'THE', 'VAST', 'BANK', 'OF', 'HEAVY', 'CLOUDS', 'IS', 'A', 'HUGE', 'RESERVOIR', 'OF', 'FEARFUL', 'WINDY', 'GUSTS', 'AND', 'RUSHING', 'STORMS'] +260-123288-0007-239: hyp=['THE', 'WIND', 'NEVER', 'LULLS', 'BUT', 'TO', 'ACQUIRE', 'INCREASED', 'STRENGTH', 'THE', 'VAST', 'BANK', 'OF', 'HEAVY', 'CLOUDS', 'IS', 'A', 'HUGE', 'RESERVOIR', 'OF', 'FEARFUL', 'WINDY', 'GUSTS', 'AND', 'RUSHING', 'STORMS'] +260-123288-0008-240: ref=["THERE'S", 'A', 'HEAVY', 'STORM', 'COMING', 'ON', 'I', 'CRIED', 'POINTING', 'TOWARDS', 'THE', 'HORIZON'] +260-123288-0008-240: hyp=["THERE'S", 'A', 'HEAVY', 'STORM', 'COMING', 'ON', 'I', 'CRIED', 'POINTING', 'TOWARDS', 'THE', 'HORIZON'] +260-123288-0009-241: ref=['THOSE', 'CLOUDS', 'SEEM', 'AS', 'IF', 'THEY', 'WERE', 'GOING', 'TO', 'CRUSH', 'THE', 'SEA'] +260-123288-0009-241: hyp=['THOSE', 'CLOUDS', 'SEEM', 'AS', 'IF', 'THEY', 'WERE', 'GOING', 'TO', 'CRUSH', 'THE', 'SEA'] +260-123288-0010-242: ref=['ON', 'THE', 'MAST', 'ALREADY', 'I', 'SEE', 'THE', 'LIGHT', 'PLAY', 'OF', 'A', 'LAMBENT', 'SAINT', "ELMO'S", 'FIRE', 'THE', 'OUTSTRETCHED', 'SAIL', 'CATCHES', 'NOT', 'A', 'BREATH', 'OF', 'WIND', 'AND', 'HANGS', 'LIKE', 'A', 'SHEET', 'OF', 'LEAD'] +260-123288-0010-242: hyp=['ON', 'THE', 'MAST', 'ALREADY', 'I', 'SEE', 'THE', 'LIGHT', 'PLAY', 'OF', 'A', 'LAMENT', 'SAINT', "ABLE'S", 'FIRE', 'THE', 'OUTSTRETCHED', 'SAIL', 'CATCHES', 'NOT', 'A', 'BREATH', 'OF', 'WIND', 'AND', 'HANGS', 'LIKE', 'A', 'SHEET', 'OF', 'LEAD'] +260-123288-0011-243: ref=['BUT', 'IF', 'WE', 'HAVE', 'NOW', 'CEASED', 'TO', 'ADVANCE', 'WHY', 'DO', 'WE', 'YET', 'LEAVE', 'THAT', 'SAIL', 'LOOSE', 'WHICH', 'AT', 'THE', 'FIRST', 'SHOCK', 'OF', 'THE', 'TEMPEST', 'MAY', 'CAPSIZE', 'US', 'IN', 'A', 'MOMENT'] +260-123288-0011-243: hyp=['BUT', 'IF', 'WE', 'HAVE', 'NOW', 'CEASED', 'TO', 'ADVANCE', 'WHY', 'DO', 'WE', 'YET', 'LEAVE', 'THAT', 'SAIL', 'LOOSE', 'WHICH', 'AT', 'THE', 'FIRST', 'SHOCK', 'OF', 'A', 'TEMPEST', 'MAY', 'CAPSIZE', 'US', 'IN', 'A', 'MOMENT'] +260-123288-0012-244: ref=['THAT', 'WILL', 'BE', 'SAFEST', 'NO', 'NO', 'NEVER'] +260-123288-0012-244: hyp=['THAT', 'WILL', 'BE', 'THE', 'SAFEST', 'NO', 'NO', 'NEVER'] +260-123288-0013-245: ref=['THE', 'PILED', 'UP', 'VAPOURS', 'CONDENSE', 'INTO', 'WATER', 'AND', 'THE', 'AIR', 'PUT', 'INTO', 'VIOLENT', 'ACTION', 'TO', 'SUPPLY', 'THE', 'VACUUM', 'LEFT', 'BY', 'THE', 'CONDENSATION', 'OF', 'THE', 'MISTS', 'ROUSES', 'ITSELF', 'INTO', 'A', 'WHIRLWIND'] +260-123288-0013-245: hyp=['THAT', 'PILED', 'UP', 'VAPORS', 'CONTENSED', 'INTO', 'WATER', 'AND', 'THE', 'AIR', 'PUT', 'INTO', 'VIOLENT', 'ACTION', 'TO', 'SUPPLY', 'THE', 'VACUUM', 'LEFT', 'BY', 'THE', 'CONDENSATION', 'OF', 'THE', 'MIST', 'ROUSES', 'ITSELF', 'INTO', 'A', 'WHIRLWIND'] +260-123288-0014-246: ref=['HANS', 'STIRS', 'NOT'] +260-123288-0014-246: hyp=['HANS', 'STIRS', 'NOT'] +260-123288-0015-247: ref=['FROM', 'THE', 'UNDER', 'SURFACE', 'OF', 'THE', 'CLOUDS', 'THERE', 'ARE', 'CONTINUAL', 'EMISSIONS', 'OF', 'LURID', 'LIGHT', 'ELECTRIC', 'MATTER', 'IS', 'IN', 'CONTINUAL', 'EVOLUTION', 'FROM', 'THEIR', 'COMPONENT', 'MOLECULES', 'THE', 'GASEOUS', 'ELEMENTS', 'OF', 'THE', 'AIR', 'NEED', 'TO', 'BE', 'SLAKED', 'WITH', 'MOISTURE', 'FOR', 'INNUMERABLE', 'COLUMNS', 'OF', 'WATER', 'RUSH', 'UPWARDS', 'INTO', 'THE', 'AIR', 'AND', 'FALL', 'BACK', 'AGAIN', 'IN', 'WHITE', 'FOAM'] +260-123288-0015-247: hyp=['FROM', 'THE', 'UNDER', 'SURFACE', 'OF', 'THE', 'CLOUDS', 'THERE', 'ARE', 'CONTINUAL', 'ADMISSIONS', 'OF', 'LURID', 'LIGHT', 'ELECTRIC', 'MATTER', 'IS', 'IN', 'CONTINUAL', 'EVOLUTION', 'FROM', 'THEIR', 'COMPONENT', 'MOLECULES', 'THE', 'GASEOUS', 'ELEMENTS', 'OF', 'THE', 'AIR', 'NEED', 'TO', 'BE', 'SLAKED', 'WITH', 'MOISTURE', 'FOR', 'INNUMERABLE', 'COLUMNS', 'OF', 'WATER', 'RUSH', 'UPWARDS', 'INTO', 'THE', 'AIR', 'AND', 'FALL', 'BACK', 'AGAIN', 'IN', 'WHITE', 'FOAM'] +260-123288-0016-248: ref=['I', 'REFER', 'TO', 'THE', 'THERMOMETER', 'IT', 'INDICATES', 'THE', 'FIGURE', 'IS', 'OBLITERATED'] +260-123288-0016-248: hyp=['I', 'REFER', 'TO', 'THE', 'THERMOMETER', 'IT', 'INDICATES', 'THE', 'FIGURE', 'IS', 'OBLITERATED'] +260-123288-0017-249: ref=['IS', 'THE', 'ATMOSPHERIC', 'CONDITION', 'HAVING', 'ONCE', 'REACHED', 'THIS', 'DENSITY', 'TO', 'BECOME', 'FINAL'] +260-123288-0017-249: hyp=['IS', 'THE', 'ATMOSPHERE', 'CONDITION', 'HAVING', 'ONCE', 'REACHED', 'OSTENSITY', 'TO', 'BECOME', 'FINAL'] +260-123288-0018-250: ref=['THE', 'RAFT', 'BEARS', 'ON', 'STILL', 'TO', 'THE', 'SOUTH', 'EAST'] +260-123288-0018-250: hyp=['THE', 'RAFT', 'BEARS', 'ON', 'STILL', 'TO', 'THE', 'SOUTH', 'EAST'] +260-123288-0019-251: ref=['AT', 'NOON', 'THE', 'VIOLENCE', 'OF', 'THE', 'STORM', 'REDOUBLES'] +260-123288-0019-251: hyp=['AT', 'NOON', 'THE', 'VIOLENCE', 'OF', 'THE', 'STORM', 'REDOUBLES'] +260-123288-0020-252: ref=['EACH', 'OF', 'US', 'IS', 'LASHED', 'TO', 'SOME', 'PART', 'OF', 'THE', 'RAFT'] +260-123288-0020-252: hyp=['EACH', 'OF', 'US', 'IS', 'LASHED', 'TO', 'SOME', 'PART', 'OF', 'THE', 'RAFT'] +260-123288-0021-253: ref=['THE', 'WAVES', 'RISE', 'ABOVE', 'OUR', 'HEADS'] +260-123288-0021-253: hyp=['THE', 'WAVES', 'RISE', 'ABOVE', 'OUR', 'HEADS'] +260-123288-0022-254: ref=['THEY', 'SEEM', 'TO', 'BE', 'WE', 'ARE', 'LOST', 'BUT', 'I', 'AM', 'NOT', 'SURE'] +260-123288-0022-254: hyp=['THEY', 'SEEMED', 'TO', 'BE', 'WE', 'ARE', 'LOST', 'BUT', 'I', 'AM', 'NOT', 'SURE'] +260-123288-0023-255: ref=['HE', 'NODS', 'HIS', 'CONSENT'] +260-123288-0023-255: hyp=['HE', 'NODS', 'HIS', 'CONSENT'] +260-123288-0024-256: ref=['THE', 'FIREBALL', 'HALF', 'OF', 'IT', 'WHITE', 'HALF', 'AZURE', 'BLUE', 'AND', 'THE', 'SIZE', 'OF', 'A', 'TEN', 'INCH', 'SHELL', 'MOVED', 'SLOWLY', 'ABOUT', 'THE', 'RAFT', 'BUT', 'REVOLVING', 'ON', 'ITS', 'OWN', 'AXIS', 'WITH', 'ASTONISHING', 'VELOCITY', 'AS', 'IF', 'WHIPPED', 'ROUND', 'BY', 'THE', 'FORCE', 'OF', 'THE', 'WHIRLWIND'] +260-123288-0024-256: hyp=['THE', 'FIRE', 'BALL', 'HALF', 'OF', 'IT', 'WHITE', 'HALF', 'AZURE', 'BLUE', 'AND', 'THE', 'SIZE', 'OF', 'A', 'TEN', 'INCH', 'SHELL', 'MOVED', 'SLOWLY', 'ABOUT', 'THE', 'RAFT', 'BUT', 'REVOLVING', 'ON', 'ITS', 'OWN', 'AXIS', 'WITH', 'ASTONISHING', 'VELOCITY', 'AS', 'IF', 'WHIP', 'ROUND', 'BY', 'THE', 'FORCE', 'OF', 'THE', 'WHIRLWIND'] +260-123288-0025-257: ref=['HERE', 'IT', 'COMES', 'THERE', 'IT', 'GLIDES', 'NOW', 'IT', 'IS', 'UP', 'THE', 'RAGGED', 'STUMP', 'OF', 'THE', 'MAST', 'THENCE', 'IT', 'LIGHTLY', 'LEAPS', 'ON', 'THE', 'PROVISION', 'BAG', 'DESCENDS', 'WITH', 'A', 'LIGHT', 'BOUND', 'AND', 'JUST', 'SKIMS', 'THE', 'POWDER', 'MAGAZINE', 'HORRIBLE'] +260-123288-0025-257: hyp=['HERE', 'IT', 'COMES', 'THERE', 'IT', 'GLIDES', 'NOW', 'IT', 'IS', 'UP', 'THE', 'RAGGED', 'STUMP', 'OF', 'THE', 'MAST', 'THENCE', 'IT', 'LIGHTLY', 'LEAPS', 'ON', 'THE', 'PROVISION', 'BAG', 'DESCENDS', 'WITH', 'A', 'LIGHT', 'BOUND', 'AND', 'JUST', 'SKIMS', 'THE', 'POWDER', 'MAGAZINE', 'HORRIBLE'] +260-123288-0026-258: ref=['WE', 'SHALL', 'BE', 'BLOWN', 'UP', 'BUT', 'NO', 'THE', 'DAZZLING', 'DISK', 'OF', 'MYSTERIOUS', 'LIGHT', 'NIMBLY', 'LEAPS', 'ASIDE', 'IT', 'APPROACHES', 'HANS', 'WHO', 'FIXES', 'HIS', 'BLUE', 'EYE', 'UPON', 'IT', 'STEADILY', 'IT', 'THREATENS', 'THE', 'HEAD', 'OF', 'MY', 'UNCLE', 'WHO', 'FALLS', 'UPON', 'HIS', 'KNEES', 'WITH', 'HIS', 'HEAD', 'DOWN', 'TO', 'AVOID', 'IT'] +260-123288-0026-258: hyp=['WE', 'SHALL', 'BE', 'BLOWN', 'UP', 'BUT', 'NO', 'THE', 'DAZZLING', 'DISK', 'OF', 'MYSTERIOUS', 'LIGHT', 'NIMBLY', 'LEAPS', 'ASIDE', 'IT', 'APPROACHES', 'HANS', 'WHO', 'FIXES', 'HIS', 'BLUE', 'EYE', 'UPON', 'IT', 'STEADILY', 'IT', 'THREATENS', 'THE', 'HEAD', 'OF', 'MY', 'UNCLE', 'WHO', 'FALLS', 'UPON', 'HIS', 'KNEES', 'WITH', 'HIS', 'HEAD', 'DOWN', 'TO', 'AVOID', 'IT'] +260-123288-0027-259: ref=['A', 'SUFFOCATING', 'SMELL', 'OF', 'NITROGEN', 'FILLS', 'THE', 'AIR', 'IT', 'ENTERS', 'THE', 'THROAT', 'IT', 'FILLS', 'THE', 'LUNGS'] +260-123288-0027-259: hyp=['A', 'SUFFOCATING', 'SMELL', 'OF', 'NITROGEN', 'FILLS', 'THE', 'AIR', 'IT', 'ENTERS', 'THE', 'THROAT', 'IT', 'FILLS', 'THE', 'LUNGS'] +260-123288-0028-260: ref=['WE', 'SUFFER', 'STIFLING', 'PAINS'] +260-123288-0028-260: hyp=['WE', 'SUFFER', 'STIFLING', 'PAINS'] +260-123440-0000-179: ref=['AND', 'HOW', 'ODD', 'THE', 'DIRECTIONS', 'WILL', 'LOOK'] +260-123440-0000-179: hyp=['AND', 'HOW', 'ODD', 'THE', 'DIRECTIONS', 'WILL', 'LOOK'] +260-123440-0001-180: ref=['POOR', 'ALICE'] +260-123440-0001-180: hyp=['POOR', 'ALICE'] +260-123440-0002-181: ref=['IT', 'WAS', 'THE', 'WHITE', 'RABBIT', 'RETURNING', 'SPLENDIDLY', 'DRESSED', 'WITH', 'A', 'PAIR', 'OF', 'WHITE', 'KID', 'GLOVES', 'IN', 'ONE', 'HAND', 'AND', 'A', 'LARGE', 'FAN', 'IN', 'THE', 'OTHER', 'HE', 'CAME', 'TROTTING', 'ALONG', 'IN', 'A', 'GREAT', 'HURRY', 'MUTTERING', 'TO', 'HIMSELF', 'AS', 'HE', 'CAME', 'OH', 'THE', 'DUCHESS', 'THE', 'DUCHESS'] +260-123440-0002-181: hyp=['IT', 'WAS', 'THE', 'WHITE', 'RABBIT', 'RETURNING', 'SPLENDIDLY', 'DRESSED', 'WITH', 'A', 'PAIR', 'OF', 'WHITE', 'KID', 'GLOVES', 'IN', 'ONE', 'HAND', 'AND', 'A', 'LARGE', 'FAN', 'IN', 'THE', 'OTHER', 'HE', 'CAME', 'TROTTING', 'ALONG', 'IN', 'A', 'GREAT', 'HURRY', 'MUTTERING', 'TO', 'HIMSELF', 'AS', 'HE', 'CAME', 'OH', 'THE', 'DUCHESS', 'THE', 'DUCHESS'] +260-123440-0003-182: ref=['OH', "WON'T", 'SHE', 'BE', 'SAVAGE', 'IF', "I'VE", 'KEPT', 'HER', 'WAITING'] +260-123440-0003-182: hyp=['OH', "WON'T", 'SHE', 'BE', 'SAVAGE', 'IF', "I'VE", 'KEPT', 'HER', 'WAITING'] +260-123440-0004-183: ref=['ALICE', 'TOOK', 'UP', 'THE', 'FAN', 'AND', 'GLOVES', 'AND', 'AS', 'THE', 'HALL', 'WAS', 'VERY', 'HOT', 'SHE', 'KEPT', 'FANNING', 'HERSELF', 'ALL', 'THE', 'TIME', 'SHE', 'WENT', 'ON', 'TALKING', 'DEAR', 'DEAR', 'HOW', 'QUEER', 'EVERYTHING', 'IS', 'TO', 'DAY'] +260-123440-0004-183: hyp=['ALICE', 'TOOK', 'UP', 'THE', 'FAN', 'AND', 'GLOVES', 'AND', 'AS', 'THE', 'HALL', 'WAS', 'VERY', 'HOT', 'SHE', 'KEPT', 'FANNING', 'HERSELF', 'ALL', 'THE', 'TIME', 'SHE', 'WENT', 'ON', 'TALKING', 'DEAR', 'DEAR', 'HOW', 'QUEER', 'EVERYTHING', 'IS', 'TO', 'DAY'] +260-123440-0005-184: ref=['AND', 'YESTERDAY', 'THINGS', 'WENT', 'ON', 'JUST', 'AS', 'USUAL'] +260-123440-0005-184: hyp=['AND', 'YESTERDAY', 'THINGS', 'WENT', 'ON', 'JUST', 'AS', 'USUAL'] +260-123440-0006-185: ref=['I', 'WONDER', 'IF', "I'VE", 'BEEN', 'CHANGED', 'IN', 'THE', 'NIGHT'] +260-123440-0006-185: hyp=['I', 'WONDER', 'IF', "I'VE", 'BEEN', 'CHANGED', 'IN', 'THE', 'NIGHT'] +260-123440-0007-186: ref=['I', 'ALMOST', 'THINK', 'I', 'CAN', 'REMEMBER', 'FEELING', 'A', 'LITTLE', 'DIFFERENT'] +260-123440-0007-186: hyp=['I', 'ALMOST', 'THINK', 'I', 'CAN', 'REMEMBER', 'FEELING', 'LITTLE', 'DIFFERENT'] +260-123440-0008-187: ref=["I'LL", 'TRY', 'IF', 'I', 'KNOW', 'ALL', 'THE', 'THINGS', 'I', 'USED', 'TO', 'KNOW'] +260-123440-0008-187: hyp=["I'LL", 'TRY', 'IF', 'I', 'KNOW', 'ALL', 'THE', 'THINGS', 'I', 'USED', 'TO', 'KNOW'] +260-123440-0009-188: ref=['I', 'SHALL', 'NEVER', 'GET', 'TO', 'TWENTY', 'AT', 'THAT', 'RATE'] +260-123440-0009-188: hyp=['I', 'SHALL', 'NEVER', 'GET', 'TO', 'TWENTY', 'AT', 'THAT', 'RATE'] +260-123440-0010-189: ref=['HOW', 'CHEERFULLY', 'HE', 'SEEMS', 'TO', 'GRIN', 'HOW', 'NEATLY', 'SPREAD', 'HIS', 'CLAWS', 'AND', 'WELCOME', 'LITTLE', 'FISHES', 'IN', 'WITH', 'GENTLY', 'SMILING', 'JAWS'] +260-123440-0010-189: hyp=['HOW', 'CHEERFULLY', 'HE', 'SEEMS', 'TO', 'GRIN', 'HOW', 'NEATLY', 'SPREAD', 'HIS', 'CLAWS', 'AND', 'WELCOME', 'LITTLE', 'FISHES', 'IN', 'WITH', 'GENTLY', 'SMILING', 'JAWS'] +260-123440-0011-190: ref=['NO', "I'VE", 'MADE', 'UP', 'MY', 'MIND', 'ABOUT', 'IT', 'IF', "I'M", 'MABEL', "I'LL", 'STAY', 'DOWN', 'HERE'] +260-123440-0011-190: hyp=['NO', "I'VE", 'MADE', 'UP', 'MY', 'MIND', 'ABOUT', 'IT', 'IF', "I'M", 'MABEL', "I'LL", 'STAY', 'DOWN', 'HERE'] +260-123440-0012-191: ref=["IT'LL", 'BE', 'NO', 'USE', 'THEIR', 'PUTTING', 'THEIR', 'HEADS', 'DOWN', 'AND', 'SAYING', 'COME', 'UP', 'AGAIN', 'DEAR'] +260-123440-0012-191: hyp=["IT'LL", 'BE', 'NO', 'USE', 'THEIR', 'PUTTING', 'THEIR', 'HEADS', 'DOWN', 'AND', 'SAYING', 'COME', 'UP', 'AGAIN', 'DEAR'] +260-123440-0013-192: ref=['I', 'AM', 'SO', 'VERY', 'TIRED', 'OF', 'BEING', 'ALL', 'ALONE', 'HERE'] +260-123440-0013-192: hyp=['I', 'AM', 'SO', 'VERY', 'TIRED', 'OF', 'BEING', 'ALL', 'ALONE', 'HERE'] +260-123440-0014-193: ref=['AND', 'I', 'DECLARE', "IT'S", 'TOO', 'BAD', 'THAT', 'IT', 'IS'] +260-123440-0014-193: hyp=['AND', 'I', 'DECLARE', "IT'S", 'TOO', 'BAD', 'THAT', 'IT', 'IS'] +260-123440-0015-194: ref=['I', 'WISH', 'I', "HADN'T", 'CRIED', 'SO', 'MUCH', 'SAID', 'ALICE', 'AS', 'SHE', 'SWAM', 'ABOUT', 'TRYING', 'TO', 'FIND', 'HER', 'WAY', 'OUT'] +260-123440-0015-194: hyp=['I', 'WISH', 'I', "HADN'T", 'CRIED', 'SO', 'MUCH', 'SAID', 'ALICE', 'AS', 'SHE', 'SWAM', 'ABOUT', 'TRYING', 'TO', 'FIND', 'HER', 'WAY', 'OUT'] +260-123440-0016-195: ref=['I', 'SHALL', 'BE', 'PUNISHED', 'FOR', 'IT', 'NOW', 'I', 'SUPPOSE', 'BY', 'BEING', 'DROWNED', 'IN', 'MY', 'OWN', 'TEARS'] +260-123440-0016-195: hyp=['I', 'SHALL', 'BE', 'PUNISHED', 'FOR', 'IT', 'NOW', 'I', 'SUPPOSE', 'BY', 'BEING', 'DROWNED', 'IN', 'MY', 'OWN', 'TEARS'] +260-123440-0017-196: ref=['THAT', 'WILL', 'BE', 'A', 'QUEER', 'THING', 'TO', 'BE', 'SURE'] +260-123440-0017-196: hyp=['THAT', 'WILL', 'BE', 'A', 'QUEER', 'THING', 'TO', 'BE', 'SURE'] +260-123440-0018-197: ref=['I', 'AM', 'VERY', 'TIRED', 'OF', 'SWIMMING', 'ABOUT', 'HERE', 'O', 'MOUSE'] +260-123440-0018-197: hyp=['I', 'AM', 'VERY', 'TIRED', 'OF', 'SWIMMING', 'ABOUT', 'HERE', 'OH', 'MOUSE'] +260-123440-0019-198: ref=['CRIED', 'ALICE', 'AGAIN', 'FOR', 'THIS', 'TIME', 'THE', 'MOUSE', 'WAS', 'BRISTLING', 'ALL', 'OVER', 'AND', 'SHE', 'FELT', 'CERTAIN', 'IT', 'MUST', 'BE', 'REALLY', 'OFFENDED'] +260-123440-0019-198: hyp=['CRIED', 'ALICE', 'AGAIN', 'FOR', 'THIS', 'TIME', 'THE', 'MOUSE', 'WAS', 'BRISTLING', 'ALL', 'OVER', 'AND', 'SHE', 'FELT', 'CERTAIN', 'IT', 'MUST', 'BE', 'REALLY', 'OFFENDED'] +260-123440-0020-199: ref=['WE', "WON'T", 'TALK', 'ABOUT', 'HER', 'ANY', 'MORE', 'IF', "YOU'D", 'RATHER', 'NOT', 'WE', 'INDEED'] +260-123440-0020-199: hyp=['WE', "WON'T", 'TALK', 'ABOUT', 'HER', 'ANY', 'MORE', 'IF', "YOU'D", 'RATHER', 'NOT', 'WE', 'INDEED'] +2830-3979-0000-1120: ref=['WE', 'WANT', 'YOU', 'TO', 'HELP', 'US', 'PUBLISH', 'SOME', 'LEADING', 'WORK', 'OF', "LUTHER'S", 'FOR', 'THE', 'GENERAL', 'AMERICAN', 'MARKET', 'WILL', 'YOU', 'DO', 'IT'] +2830-3979-0000-1120: hyp=['WE', 'WANT', 'YOU', 'TO', 'HELP', 'US', 'PUBLISH', 'SOME', 'LEADING', 'WORK', 'OF', "LUTHER'S", 'FOR', 'THE', 'GENERAL', 'AMERICAN', 'MARKET', 'WILL', 'YOU', 'DO', 'IT'] +2830-3979-0001-1121: ref=['THE', 'CONDITION', 'IS', 'THAT', 'I', 'WILL', 'BE', 'PERMITTED', 'TO', 'MAKE', 'LUTHER', 'TALK', 'AMERICAN', 'STREAMLINE', 'HIM', 'SO', 'TO', 'SPEAK', 'BECAUSE', 'YOU', 'WILL', 'NEVER', 'GET', 'PEOPLE', 'WHETHER', 'IN', 'OR', 'OUTSIDE', 'THE', 'LUTHERAN', 'CHURCH', 'ACTUALLY', 'TO', 'READ', 'LUTHER', 'UNLESS', 'WE', 'MAKE', 'HIM', 'TALK', 'AS', 'HE', 'WOULD', 'TALK', 'TODAY', 'TO', 'AMERICANS'] +2830-3979-0001-1121: hyp=['THE', 'CONDITION', 'IS', 'THAT', 'I', 'WILL', 'BE', 'PERMITTED', 'TO', 'MAKE', 'LUTHER', 'TALK', 'AMERICAN', 'STREAM', 'LINE', 'HYMN', 'SO', 'TO', 'SPEAK', 'BECAUSE', 'YOU', 'WILL', 'NEVER', 'GET', 'PEOPLE', 'WHETHER', 'IN', 'OR', 'OUTSIDE', 'THE', 'LUTHERAN', 'CHURCH', 'ACTUALLY', 'TO', 'READ', 'LUTHER', 'UNLESS', 'WE', 'MAKE', 'HIM', 'TALK', 'AS', 'HE', 'WOULD', 'TALK', 'TO', 'DAY', 'TO', 'AMERICANS'] +2830-3979-0002-1122: ref=['LET', 'US', 'BEGIN', 'WITH', 'THAT', 'HIS', 'COMMENTARY', 'ON', 'GALATIANS'] +2830-3979-0002-1122: hyp=['LET', 'US', 'BEGIN', 'WITH', 'THAT', 'HIS', 'COMMENTARY', 'ON', 'GALLATIONS'] +2830-3979-0003-1123: ref=['THE', 'UNDERTAKING', 'WHICH', 'SEEMED', 'SO', 'ATTRACTIVE', 'WHEN', 'VIEWED', 'AS', 'A', 'LITERARY', 'TASK', 'PROVED', 'A', 'MOST', 'DIFFICULT', 'ONE', 'AND', 'AT', 'TIMES', 'BECAME', 'OPPRESSIVE'] +2830-3979-0003-1123: hyp=['THE', 'UNDERTAKING', 'WHICH', 'SEEMS', 'SO', 'ATTRACTIVE', 'WHEN', 'VIEWED', 'AS', 'A', 'LITERARY', 'TASK', 'PROVED', 'A', 'MOST', 'DIFFICULT', 'ONE', 'AND', 'AT', 'TIMES', 'BECAME', 'OPPRESSIVE'] +2830-3979-0004-1124: ref=['IT', 'WAS', 'WRITTEN', 'IN', 'LATIN'] +2830-3979-0004-1124: hyp=['IT', 'WAS', 'WRITTEN', 'IN', 'LATIN'] +2830-3979-0005-1125: ref=['THE', 'WORK', 'HAD', 'TO', 'BE', 'CONDENSED'] +2830-3979-0005-1125: hyp=['THE', 'WORK', 'HAD', 'TO', 'BE', 'CONDENSED'] +2830-3979-0006-1126: ref=['A', 'WORD', 'SHOULD', 'NOW', 'BE', 'SAID', 'ABOUT', 'THE', 'ORIGIN', 'OF', "LUTHER'S", 'COMMENTARY', 'ON', 'GALATIANS'] +2830-3979-0006-1126: hyp=['A', 'WORD', 'SHOULD', 'NOW', 'BE', 'SAID', 'ABOUT', 'THE', 'ORIGIN', 'OF', "LUTHER'S", 'COMMENTARY', 'ANGULATIONS'] +2830-3979-0007-1127: ref=['MUCH', 'LATER', 'WHEN', 'A', 'FRIEND', 'OF', 'HIS', 'WAS', 'PREPARING', 'AN', 'EDITION', 'OF', 'ALL', 'HIS', 'LATIN', 'WORKS', 'HE', 'REMARKED', 'TO', 'HIS', 'HOME', 'CIRCLE', 'IF', 'I', 'HAD', 'MY', 'WAY', 'ABOUT', 'IT', 'THEY', 'WOULD', 'REPUBLISH', 'ONLY', 'THOSE', 'OF', 'MY', 'BOOKS', 'WHICH', 'HAVE', 'DOCTRINE', 'MY', 'GALATIANS', 'FOR', 'INSTANCE'] +2830-3979-0007-1127: hyp=['MUCH', 'LATER', 'WHEN', 'A', 'FRIEND', 'OF', 'HIS', 'WAS', 'PREPARING', 'AN', 'ADDITION', 'OF', 'ALL', 'HIS', 'LATIN', 'WORKS', 'HE', 'REMARKED', 'TO', 'HIS', 'HOME', 'CIRCLE', 'IF', 'I', 'HAD', 'MY', 'WAY', 'ABOUT', 'IT', 'THEY', 'WOULD', 'REPUBLISH', 'ONLY', 'THOSE', 'OF', 'MY', 'BOOKS', 'WHICH', 'HAVE', 'DOCTRINE', 'MY', 'GALLATIONS', 'FOR', 'INSTANCE'] +2830-3979-0008-1128: ref=['IN', 'OTHER', 'WORDS', 'THESE', 'THREE', 'MEN', 'TOOK', 'DOWN', 'THE', 'LECTURES', 'WHICH', 'LUTHER', 'ADDRESSED', 'TO', 'HIS', 'STUDENTS', 'IN', 'THE', 'COURSE', 'OF', 'GALATIANS', 'AND', 'ROERER', 'PREPARED', 'THE', 'MANUSCRIPT', 'FOR', 'THE', 'PRINTER'] +2830-3979-0008-1128: hyp=['IN', 'OTHER', 'WORDS', 'THESE', 'THREE', 'MEN', 'TOOK', 'DOWN', 'THE', 'LECTURES', 'WHICH', 'LUTHER', 'ADDRESSED', 'TO', 'HIS', 'STUDENTS', 'IN', 'THE', 'COURSE', 'OF', 'GALLATIONS', 'AND', 'ROAR', 'PREPARED', 'THE', 'MANUSCRIPT', 'FOR', 'THE', 'PRINTER'] +2830-3979-0009-1129: ref=['IT', 'PRESENTS', 'LIKE', 'NO', 'OTHER', 'OF', "LUTHER'S", 'WRITINGS', 'THE', 'CENTRAL', 'THOUGHT', 'OF', 'CHRISTIANITY', 'THE', 'JUSTIFICATION', 'OF', 'THE', 'SINNER', 'FOR', 'THE', 'SAKE', 'OF', "CHRIST'S", 'MERITS', 'ALONE'] +2830-3979-0009-1129: hyp=['IT', 'PRESENTS', 'LIKE', 'NO', 'OTHER', 'OF', "LUTHER'S", 'WRITINGS', 'THE', 'CENTRAL', 'THOUGHT', 'OF', 'CHRISTIANITY', 'THE', 'JUSTIFICATION', 'OF', 'THE', 'SINNER', 'FOR', 'THE', 'SAKE', 'OF', "CHRIST'S", 'MERITS', 'ALONE'] +2830-3979-0010-1130: ref=['BUT', 'THE', 'ESSENCE', 'OF', "LUTHER'S", 'LECTURES', 'IS', 'THERE'] +2830-3979-0010-1130: hyp=['BUT', 'THE', 'ESSENCE', 'OF', "LUTHER'S", 'LECTURES', 'IS', 'THERE'] +2830-3979-0011-1131: ref=['THE', 'LORD', 'WHO', 'HAS', 'GIVEN', 'US', 'POWER', 'TO', 'TEACH', 'AND', 'TO', 'HEAR', 'LET', 'HIM', 'ALSO', 'GIVE', 'US', 'THE', 'POWER', 'TO', 'SERVE', 'AND', 'TO', 'DO', 'LUKE', 'TWO'] +2830-3979-0011-1131: hyp=['THE', 'LORD', 'WHO', 'HAS', 'GIVEN', 'US', 'POWER', 'TO', 'TEACH', 'AND', 'TO', 'HEAR', 'LET', 'HIM', 'ALSO', 'GIVE', 'US', 'THE', 'POWER', 'TO', 'SERVE', 'AND', 'TO', 'DO', 'LUKE', 'TWO'] +2830-3979-0012-1132: ref=['THE', 'WORD', 'OF', 'OUR', 'GOD', 'SHALL', 'STAND', 'FOREVER'] +2830-3979-0012-1132: hyp=['THE', 'WORD', 'OF', 'OUR', 'GOD', 'SHALL', 'STAND', 'FOR', 'EVER'] +2830-3980-0000-1043: ref=['IN', 'EVERY', 'WAY', 'THEY', 'SOUGHT', 'TO', 'UNDERMINE', 'THE', 'AUTHORITY', 'OF', 'SAINT', 'PAUL'] +2830-3980-0000-1043: hyp=['IN', 'EVERY', 'WAY', 'THEY', 'SOUGHT', 'TO', 'UNDERMINE', 'THE', 'AUTHORITY', 'OF', 'SAINT', 'PAUL'] +2830-3980-0001-1044: ref=['THEY', 'SAID', 'TO', 'THE', 'GALATIANS', 'YOU', 'HAVE', 'NO', 'RIGHT', 'TO', 'THINK', 'HIGHLY', 'OF', 'PAUL'] +2830-3980-0001-1044: hyp=['THEY', 'SAID', 'TO', 'THE', 'GALATIANS', 'YOU', 'HAVE', 'NO', 'RIGHT', 'TO', 'THINK', 'HIGHLY', 'OF', 'PAUL'] +2830-3980-0002-1045: ref=['HE', 'WAS', 'THE', 'LAST', 'TO', 'TURN', 'TO', 'CHRIST'] +2830-3980-0002-1045: hyp=['HE', 'WAS', 'THE', 'LAST', 'TO', 'TURN', 'TO', 'CHRIST'] +2830-3980-0003-1046: ref=['PAUL', 'CAME', 'LATER', 'AND', 'IS', 'BENEATH', 'US'] +2830-3980-0003-1046: hyp=['PAW', 'CAME', 'LATER', 'IN', 'HIS', 'BENEATH', 'US'] +2830-3980-0004-1047: ref=['INDEED', 'HE', 'PERSECUTED', 'THE', 'CHURCH', 'OF', 'CHRIST', 'FOR', 'A', 'LONG', 'TIME'] +2830-3980-0004-1047: hyp=['INDEED', 'HE', 'PERSECUTED', 'THE', 'CHURCH', 'OF', 'CHRIST', 'FOR', 'A', 'LONG', 'TIME'] +2830-3980-0005-1048: ref=['DO', 'YOU', 'SUPPOSE', 'THAT', 'GOD', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'FEW', 'LUTHERAN', 'HERETICS', 'WOULD', 'DISOWN', 'HIS', 'ENTIRE', 'CHURCH'] +2830-3980-0005-1048: hyp=['DO', 'YOU', 'SUPPOSE', 'THAT', 'GOD', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'FEW', 'LUTHERAN', 'HERETICS', 'WOULD', 'DISOWN', 'HIS', 'ENTIRE', 'CHURCH'] +2830-3980-0006-1049: ref=['AGAINST', 'THESE', 'BOASTING', 'FALSE', 'APOSTLES', 'PAUL', 'BOLDLY', 'DEFENDS', 'HIS', 'APOSTOLIC', 'AUTHORITY', 'AND', 'MINISTRY'] +2830-3980-0006-1049: hyp=['AGAINST', 'THESE', 'BOASTING', 'FALSE', 'APOSTLES', 'PAUL', 'BOLDLY', 'DEFENDS', 'HIS', 'APOSTOLIC', 'AUTHORITY', 'IN', 'MINISTRY'] +2830-3980-0007-1050: ref=['AS', 'THE', 'AMBASSADOR', 'OF', 'A', 'GOVERNMENT', 'IS', 'HONORED', 'FOR', 'HIS', 'OFFICE', 'AND', 'NOT', 'FOR', 'HIS', 'PRIVATE', 'PERSON', 'SO', 'THE', 'MINISTER', 'OF', 'CHRIST', 'SHOULD', 'EXALT', 'HIS', 'OFFICE', 'IN', 'ORDER', 'TO', 'GAIN', 'AUTHORITY', 'AMONG', 'MEN'] +2830-3980-0007-1050: hyp=['AS', 'THE', 'AMBASSADOR', 'OF', 'A', 'GOVERNMENT', 'IS', 'HONORED', 'FOR', 'HIS', 'OFFICE', 'AND', 'NOT', 'FOR', 'HIS', 'PRIVATE', 'PERSON', 'SO', 'THE', 'MINISTER', 'OF', 'CHRIST', 'SHOULD', 'EXALT', 'HIS', 'OFFICE', 'IN', 'ORDER', 'TO', 'GAIN', 'AUTHORITY', 'AMONG', 'MEN'] +2830-3980-0008-1051: ref=['PAUL', 'TAKES', 'PRIDE', 'IN', 'HIS', 'MINISTRY', 'NOT', 'TO', 'HIS', 'OWN', 'PRAISE', 'BUT', 'TO', 'THE', 'PRAISE', 'OF', 'GOD'] +2830-3980-0008-1051: hyp=['POLITICS', 'PRIDE', 'IN', 'HIS', 'MINISTRY', 'NOT', 'TO', 'HIS', 'OWN', 'PRAISE', 'BUT', 'TO', 'THE', 'PRAISE', 'OF', 'GOD'] +2830-3980-0009-1052: ref=['PAUL', 'AN', 'APOSTLE', 'NOT', 'OF', 'MEN', 'ET', 'CETERA'] +2830-3980-0009-1052: hyp=['PAUL', 'AND', 'APOSTLE', 'NOT', 'OF', 'MEN', 'ET', 'CETERA'] +2830-3980-0010-1053: ref=['EITHER', 'HE', 'CALLS', 'MINISTERS', 'THROUGH', 'THE', 'AGENCY', 'OF', 'MEN', 'OR', 'HE', 'CALLS', 'THEM', 'DIRECTLY', 'AS', 'HE', 'CALLED', 'THE', 'PROPHETS', 'AND', 'APOSTLES'] +2830-3980-0010-1053: hyp=['EITHER', 'HE', 'CALLS', 'MINISTERS', 'THROUGH', 'THE', 'AGENCY', 'OF', 'MEN', 'OR', 'HE', 'CALLS', 'THEM', 'DIRECTLY', 'AS', 'HE', 'CALLED', 'THE', 'PROPHETS', 'AND', 'APOSTLES'] +2830-3980-0011-1054: ref=['PAUL', 'DECLARES', 'THAT', 'THE', 'FALSE', 'APOSTLES', 'WERE', 'CALLED', 'OR', 'SENT', 'NEITHER', 'BY', 'MEN', 'NOR', 'BY', 'MAN'] +2830-3980-0011-1054: hyp=['PAUL', 'DECLARES', 'THAT', 'THE', 'FALSE', 'APOSTLES', 'WERE', 'CALLED', 'OR', 'SENT', 'NEITHER', 'BY', 'MEN', 'NOR', 'BY', 'MAN'] +2830-3980-0012-1055: ref=['THE', 'MOST', 'THEY', 'COULD', 'CLAIM', 'IS', 'THAT', 'THEY', 'WERE', 'SENT', 'BY', 'OTHERS'] +2830-3980-0012-1055: hyp=['THE', 'MOST', 'THEY', 'COULD', 'CLAIM', 'IS', 'THAT', 'THEY', 'WERE', 'SENT', 'BY', 'OTHERS'] +2830-3980-0013-1056: ref=['HE', 'MENTIONS', 'THE', 'APOSTLES', 'FIRST', 'BECAUSE', 'THEY', 'WERE', 'APPOINTED', 'DIRECTLY', 'BY', 'GOD'] +2830-3980-0013-1056: hyp=['HE', 'MENTIONS', 'THE', 'APOSTLES', 'FIRST', 'BECAUSE', 'THEY', 'WERE', 'APPOINTED', 'DIRECTLY', 'BY', 'GOD'] +2830-3980-0014-1057: ref=['THE', 'CALL', 'IS', 'NOT', 'TO', 'BE', 'TAKEN', 'LIGHTLY'] +2830-3980-0014-1057: hyp=['THE', 'CALL', 'IS', 'NOT', 'TO', 'BE', 'TAKEN', 'LIGHTLY'] +2830-3980-0015-1058: ref=['FOR', 'A', 'PERSON', 'TO', 'POSSESS', 'KNOWLEDGE', 'IS', 'NOT', 'ENOUGH'] +2830-3980-0015-1058: hyp=['FOR', 'A', 'PERSON', 'TO', 'POSSESSED', 'KNOWLEDGE', 'IS', 'NOT', 'ENOUGH'] +2830-3980-0016-1059: ref=['IT', 'SPOILS', "ONE'S", 'BEST', 'WORK'] +2830-3980-0016-1059: hyp=['IT', 'SPOILS', "ONE'S", 'BEST', 'WORK'] +2830-3980-0017-1060: ref=['WHEN', 'I', 'WAS', 'A', 'YOUNG', 'MAN', 'I', 'THOUGHT', 'PAUL', 'WAS', 'MAKING', 'TOO', 'MUCH', 'OF', 'HIS', 'CALL'] +2830-3980-0017-1060: hyp=['WHEN', 'I', 'WAS', 'A', 'YOUNG', 'MAN', 'I', 'THOUGHT', 'PAUL', 'WAS', 'MAKING', 'TOO', 'MUCH', 'OF', 'HIS', 'CALL'] +2830-3980-0018-1061: ref=['I', 'DID', 'NOT', 'THEN', 'REALIZE', 'THE', 'IMPORTANCE', 'OF', 'THE', 'MINISTRY'] +2830-3980-0018-1061: hyp=['I', 'DID', 'NOT', 'THEN', 'REALIZE', 'THE', 'IMPORTANCE', 'OF', 'THE', 'MINISTRY'] +2830-3980-0019-1062: ref=['I', 'KNEW', 'NOTHING', 'OF', 'THE', 'DOCTRINE', 'OF', 'FAITH', 'BECAUSE', 'WE', 'WERE', 'TAUGHT', 'SOPHISTRY', 'INSTEAD', 'OF', 'CERTAINTY', 'AND', 'NOBODY', 'UNDERSTOOD', 'SPIRITUAL', 'BOASTING'] +2830-3980-0019-1062: hyp=['I', 'KNEW', 'NOTHING', 'OF', 'THE', 'DOCTRINE', 'OF', 'FAITH', 'BECAUSE', 'WE', 'WERE', 'TAUGHT', 'SOPHISTRY', 'INSTEAD', 'OF', 'CERTAINTY', 'AND', 'NOBODY', 'UNDERSTOOD', 'SPIRITUAL', 'BOASTING'] +2830-3980-0020-1063: ref=['THIS', 'IS', 'NO', 'SINFUL', 'PRIDE', 'IT', 'IS', 'HOLY', 'PRIDE'] +2830-3980-0020-1063: hyp=['THIS', 'IS', 'NO', 'SINFUL', 'PRIDE', 'IT', 'IS', 'HOLY', 'PRIDE'] +2830-3980-0021-1064: ref=['AND', 'GOD', 'THE', 'FATHER', 'WHO', 'RAISED', 'HIM', 'FROM', 'THE', 'DEAD'] +2830-3980-0021-1064: hyp=['AND', 'GOD', 'THE', 'FATHER', 'WHO', 'RAISED', 'HIM', 'FROM', 'THE', 'DEAD'] +2830-3980-0022-1065: ref=['THE', 'CLAUSE', 'SEEMS', 'SUPERFLUOUS', 'ON', 'FIRST', 'SIGHT'] +2830-3980-0022-1065: hyp=['THE', 'CLAUSE', 'SEEMED', 'SUPERFLUOUS', 'ON', 'FIRST', 'SIGHT'] +2830-3980-0023-1066: ref=['THESE', 'PERVERTERS', 'OF', 'THE', 'RIGHTEOUSNESS', 'OF', 'CHRIST', 'RESIST', 'THE', 'FATHER', 'AND', 'THE', 'SON', 'AND', 'THE', 'WORKS', 'OF', 'THEM', 'BOTH'] +2830-3980-0023-1066: hyp=['THESE', 'PERVERTIVES', 'OF', 'THE', 'RIGHTEOUSNESS', 'OF', 'CHRIST', 'RESIST', 'THE', 'FATHER', 'AND', 'THE', 'SON', 'AND', 'THE', 'WORKS', 'OF', 'THEM', 'BOTH'] +2830-3980-0024-1067: ref=['IN', 'THIS', 'WHOLE', 'EPISTLE', 'PAUL', 'TREATS', 'OF', 'THE', 'RESURRECTION', 'OF', 'CHRIST'] +2830-3980-0024-1067: hyp=['IN', 'THIS', 'WHOLE', 'EPISTLE', 'PAUL', 'TREATS', 'OF', 'THE', 'RESURRECTION', 'OF', 'CHRIST'] +2830-3980-0025-1068: ref=['BY', 'HIS', 'RESURRECTION', 'CHRIST', 'WON', 'THE', 'VICTORY', 'OVER', 'LAW', 'SIN', 'FLESH', 'WORLD', 'DEVIL', 'DEATH', 'HELL', 'AND', 'EVERY', 'EVIL'] +2830-3980-0025-1068: hyp=['BY', 'HIS', 'RESURRECTION', 'CHRIST', 'WON', 'THE', 'VICTORY', 'OVER', 'LAW', 'SIN', 'FLESH', 'WORLD', 'DEVIL', 'DEATH', 'HELL', 'AND', 'EVERY', 'EVIL'] +2830-3980-0026-1069: ref=['VERSE', 'TWO'] +2830-3980-0026-1069: hyp=['FIRST', 'TOO'] +2830-3980-0027-1070: ref=['AND', 'ALL', 'THE', 'BRETHREN', 'WHICH', 'ARE', 'WITH', 'ME'] +2830-3980-0027-1070: hyp=['AND', 'ALL', 'THE', 'BRETHREN', 'WHICH', 'ARE', 'WITH', 'ME'] +2830-3980-0028-1071: ref=['THIS', 'SHOULD', 'GO', 'FAR', 'IN', 'SHUTTING', 'THE', 'MOUTHS', 'OF', 'THE', 'FALSE', 'APOSTLES'] +2830-3980-0028-1071: hyp=['THIS', 'SHOULD', 'GO', 'FAR', 'IN', 'SHUTTING', 'THE', 'MOUTHS', 'OF', 'THE', 'FALSE', 'APOSTLES'] +2830-3980-0029-1072: ref=['ALTHOUGH', 'THE', 'BRETHREN', 'WITH', 'ME', 'ARE', 'NOT', 'APOSTLES', 'LIKE', 'MYSELF', 'YET', 'THEY', 'ARE', 'ALL', 'OF', 'ONE', 'MIND', 'WITH', 'ME', 'THINK', 'WRITE', 'AND', 'TEACH', 'AS', 'I', 'DO'] +2830-3980-0029-1072: hyp=['ALTHOUGH', 'THE', 'BRETHREN', 'WITH', 'ME', 'ARE', 'NOT', 'APOSTLES', 'LIKE', 'MYSELF', 'YET', 'THEY', 'ARE', 'ALL', 'OF', 'ONE', 'MIND', 'WITH', 'ME', 'THINK', 'WRITE', 'AND', 'TEACH', 'AS', 'I', 'DO'] +2830-3980-0030-1073: ref=['THEY', 'DO', 'NOT', 'GO', 'WHERE', 'THE', 'ENEMIES', 'OF', 'THE', 'GOSPEL', 'PREDOMINATE', 'THEY', 'GO', 'WHERE', 'THE', 'CHRISTIANS', 'ARE'] +2830-3980-0030-1073: hyp=['THEY', 'DO', 'NOT', 'GO', 'WHERE', 'THE', 'ENEMIES', 'OF', 'THE', 'GOSPEL', 'PREDOMINATE', 'THEY', 'GO', 'WHERE', 'THE', 'CHRISTIANS', 'ARE'] +2830-3980-0031-1074: ref=['WHY', 'DO', 'THEY', 'NOT', 'INVADE', 'THE', 'CATHOLIC', 'PROVINCES', 'AND', 'PREACH', 'THEIR', 'DOCTRINE', 'TO', 'GODLESS', 'PRINCES', 'BISHOPS', 'AND', 'DOCTORS', 'AS', 'WE', 'HAVE', 'DONE', 'BY', 'THE', 'HELP', 'OF', 'GOD'] +2830-3980-0031-1074: hyp=['WHY', 'DO', 'THEY', 'NOT', 'INVADE', 'THE', 'CATHOLIC', 'PROVINCES', 'AND', 'PREACH', 'THEIR', 'DOCTRINE', 'TO', 'GODLESS', 'PRINCES', 'BISHOPS', 'AND', 'DOCTORS', 'AS', 'WE', 'HAVE', 'DONE', 'BY', 'THE', 'HELP', 'OF', 'GOD'] +2830-3980-0032-1075: ref=['WE', 'LOOK', 'FOR', 'THAT', 'REWARD', 'WHICH', 'EYE', 'HATH', 'NOT', 'SEEN', 'NOR', 'EAR', 'HEARD', 'NEITHER', 'HATH', 'ENTERED', 'INTO', 'THE', 'HEART', 'OF', 'MAN'] +2830-3980-0032-1075: hyp=['WE', 'LOOK', 'FOR', 'THAT', 'REWARD', 'WHICH', 'I', 'HATH', 'NOT', 'SEEN', 'NOR', 'EAR', 'HEARD', 'NEITHER', 'HATH', 'ENTERED', 'INTO', 'THE', 'HEART', 'OF', 'MAN'] +2830-3980-0033-1076: ref=['NOT', 'ALL', 'THE', 'GALATIANS', 'HAD', 'BECOME', 'PERVERTED'] +2830-3980-0033-1076: hyp=['NOT', 'ALL', 'THE', 'GALLATIONS', 'HAD', 'BECOME', 'PERVERTED'] +2830-3980-0034-1077: ref=['THESE', 'MEANS', 'CANNOT', 'BE', 'CONTAMINATED'] +2830-3980-0034-1077: hyp=['THESE', 'MEANS', 'CANNOT', 'BE', 'CONTAMINATED'] +2830-3980-0035-1078: ref=['THEY', 'REMAIN', 'DIVINE', 'REGARDLESS', 'OF', "MEN'S", 'OPINION'] +2830-3980-0035-1078: hyp=['THEY', 'REMAINED', 'DIVINE', 'REGARDLESS', 'OF', "MEN'S", 'OPINION'] +2830-3980-0036-1079: ref=['WHEREVER', 'THE', 'MEANS', 'OF', 'GRACE', 'ARE', 'FOUND', 'THERE', 'IS', 'THE', 'HOLY', 'CHURCH', 'EVEN', 'THOUGH', 'ANTICHRIST', 'REIGNS', 'THERE'] +2830-3980-0036-1079: hyp=['WHEREVER', 'THE', 'MEANS', 'OF', 'GRACE', 'ARE', 'FOUND', 'THERE', 'IS', 'THE', 'HOLY', 'CHURCH', 'EVEN', 'THOUGH', 'ANTICHRIST', 'REIGNS', 'THERE'] +2830-3980-0037-1080: ref=['SO', 'MUCH', 'FOR', 'THE', 'TITLE', 'OF', 'THE', 'EPISTLE', 'NOW', 'FOLLOWS', 'THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'VERSE', 'THREE'] +2830-3980-0037-1080: hyp=['SO', 'MUCH', 'FOR', 'THE', 'TITLE', 'OF', 'THE', 'EPISTLE', 'NOW', 'FOLLOWS', 'THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'VERSE', 'THREE'] +2830-3980-0038-1081: ref=['GRACE', 'BE', 'TO', 'YOU', 'AND', 'PEACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0038-1081: hyp=['GRACE', 'BE', 'TO', 'YOU', 'IN', 'PEACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0039-1082: ref=['THE', 'TERMS', 'OF', 'GRACE', 'AND', 'PEACE', 'ARE', 'COMMON', 'TERMS', 'WITH', 'PAUL', 'AND', 'ARE', 'NOW', 'PRETTY', 'WELL', 'UNDERSTOOD'] +2830-3980-0039-1082: hyp=['THE', 'TERMS', 'OF', 'GRACE', 'AND', 'PEACE', 'ARE', 'COMMON', 'TERMS', 'WITH', 'PAUL', 'AND', 'ARE', 'NOW', 'PRETTY', 'WELL', 'UNDERSTOOD'] +2830-3980-0040-1083: ref=['THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'IS', 'REFRESHING'] +2830-3980-0040-1083: hyp=['THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'IS', 'REFRESHING'] +2830-3980-0041-1084: ref=['GRACE', 'INVOLVES', 'THE', 'REMISSION', 'OF', 'SINS', 'PEACE', 'AND', 'A', 'HAPPY', 'CONSCIENCE'] +2830-3980-0041-1084: hyp=['GRACE', 'INVOLVES', 'THE', 'REMISSION', 'OF', 'SINS', 'PEACE', 'AND', 'A', 'HAPPY', 'CONSCIENCE'] +2830-3980-0042-1085: ref=['THE', 'WORLD', 'BRANDS', 'THIS', 'A', 'PERNICIOUS', 'DOCTRINE'] +2830-3980-0042-1085: hyp=['THE', 'WORLD', 'BRINGS', 'THIS', 'A', 'PERNICIOUS', 'DOCTRINE'] +2830-3980-0043-1086: ref=['EXPERIENCE', 'PROVES', 'THIS'] +2830-3980-0043-1086: hyp=['EXPERIENCE', 'PROVES', 'THIS'] +2830-3980-0044-1087: ref=['HOWEVER', 'THE', 'GRACE', 'AND', 'PEACE', 'OF', 'GOD', 'WILL'] +2830-3980-0044-1087: hyp=['HOWEVER', 'THE', 'GRACE', 'AND', 'PEACE', 'OF', 'GOD', 'WILL'] +2830-3980-0045-1088: ref=['MEN', 'SHOULD', 'NOT', 'SPECULATE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0045-1088: hyp=['MEN', 'SHOULD', 'NOT', 'SPECULATE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0046-1089: ref=['WAS', 'IT', 'NOT', 'ENOUGH', 'TO', 'SAY', 'FROM', 'GOD', 'THE', 'FATHER'] +2830-3980-0046-1089: hyp=['WAS', 'IT', 'NOT', 'ENOUGH', 'TO', 'SAY', 'FROM', 'GOD', 'THE', 'FATHER'] +2830-3980-0047-1090: ref=['TO', 'DO', 'SO', 'IS', 'TO', 'LOSE', 'GOD', 'ALTOGETHER', 'BECAUSE', 'GOD', 'BECOMES', 'INTOLERABLE', 'WHEN', 'WE', 'SEEK', 'TO', 'MEASURE', 'AND', 'TO', 'COMPREHEND', 'HIS', 'INFINITE', 'MAJESTY'] +2830-3980-0047-1090: hyp=['TO', 'DO', 'SO', 'IS', 'TO', 'LOSE', 'GOD', 'ALTOGETHER', 'BECAUSE', 'GOD', 'BECOMES', 'INTOLERABLE', 'WHEN', 'WE', 'SEEK', 'TO', 'MEASURE', 'INTO', 'COMPREHEND', 'HIS', 'INFINITE', 'MAJESTY'] +2830-3980-0048-1091: ref=['HE', 'CAME', 'DOWN', 'TO', 'EARTH', 'LIVED', 'AMONG', 'MEN', 'SUFFERED', 'WAS', 'CRUCIFIED', 'AND', 'THEN', 'HE', 'DIED', 'STANDING', 'CLEARLY', 'BEFORE', 'US', 'SO', 'THAT', 'OUR', 'HEARTS', 'AND', 'EYES', 'MAY', 'FASTEN', 'UPON', 'HIM'] +2830-3980-0048-1091: hyp=['HE', 'CAME', 'DOWN', 'TO', 'EARTH', 'LIVED', 'AMONG', 'MEN', 'SUFFERED', 'WAS', 'CRUCIFIED', 'AND', 'THEN', 'HE', 'DIED', 'STANDING', 'CLEARLY', 'BEFORE', 'US', 'SO', 'THAT', 'OUR', 'HEARTS', 'AND', 'EYES', 'MAY', 'FASTEN', 'UPON', 'HIM'] +2830-3980-0049-1092: ref=['EMBRACE', 'HIM', 'AND', 'FORGET', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0049-1092: hyp=['EMBRACE', 'HIM', 'AND', 'FORGET', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0050-1093: ref=['DID', 'NOT', 'CHRIST', 'HIMSELF', 'SAY', 'I', 'AM', 'THE', 'WAY', 'AND', 'THE', 'TRUTH', 'AND', 'THE', 'LIFE', 'NO', 'MAN', 'COMETH', 'UNTO', 'THE', 'FATHER', 'BUT', 'BY', 'ME'] +2830-3980-0050-1093: hyp=['DID', 'NOT', 'CHRIST', 'HIMSELF', 'SAY', 'I', 'AM', 'THE', 'WAY', 'AND', 'THE', 'TRUTH', 'AND', 'THE', 'LIFE', 'NO', 'MAN', 'COMETH', 'UNTO', 'THE', 'FATHER', 'BUT', 'BY', 'ME'] +2830-3980-0051-1094: ref=['WHEN', 'YOU', 'ARGUE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD', 'APART', 'FROM', 'THE', 'QUESTION', 'OF', 'JUSTIFICATION', 'YOU', 'MAY', 'BE', 'AS', 'PROFOUND', 'AS', 'YOU', 'LIKE'] +2830-3980-0051-1094: hyp=['WHEN', 'YOU', 'ARGUE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD', 'APART', 'FROM', 'THE', 'QUESTION', 'OF', 'JUSTIFICATION', 'YOU', 'MAY', 'BE', 'AS', 'PROFOUND', 'AS', 'YOU', 'LIKE'] +2830-3980-0052-1095: ref=['WE', 'ARE', 'TO', 'HEAR', 'CHRIST', 'WHO', 'HAS', 'BEEN', 'APPOINTED', 'BY', 'THE', 'FATHER', 'AS', 'OUR', 'DIVINE', 'TEACHER'] +2830-3980-0052-1095: hyp=['WE', 'ARE', 'TO', 'HEAR', 'CHRIST', 'WHO', 'HAS', 'BEEN', 'APPOINTED', 'BY', 'THE', 'FATHER', 'AS', 'OUR', 'DIVINE', 'TEACHER'] +2830-3980-0053-1096: ref=['AT', 'THE', 'SAME', 'TIME', 'PAUL', 'CONFIRMS', 'OUR', 'CREED', 'THAT', 'CHRIST', 'IS', 'VERY', 'GOD'] +2830-3980-0053-1096: hyp=['AT', 'THE', 'SAME', 'TIME', 'PAUL', 'CONFIRMS', 'OUR', 'CREED', 'THAT', 'CHRIST', 'IS', 'VERY', 'GOD'] +2830-3980-0054-1097: ref=['THAT', 'CHRIST', 'IS', 'VERY', 'GOD', 'IS', 'APPARENT', 'IN', 'THAT', 'PAUL', 'ASCRIBES', 'TO', 'HIM', 'DIVINE', 'POWERS', 'EQUALLY', 'WITH', 'THE', 'FATHER', 'AS', 'FOR', 'INSTANCE', 'THE', 'POWER', 'TO', 'DISPENSE', 'GRACE', 'AND', 'PEACE'] +2830-3980-0054-1097: hyp=['THAT', 'CHRIST', 'IS', 'VERY', 'GOD', 'IS', 'APPARENT', 'IN', 'THAT', 'PAUL', 'ASCRIBES', 'TO', 'HIM', 'DIVINE', 'POWERS', 'EQUALLY', 'WITH', 'THE', 'FATHER', 'AS', 'FOR', 'INSTANCE', 'THE', 'POWER', 'DOES', 'DISPENSE', 'GRACE', 'AND', 'PEACE'] +2830-3980-0055-1098: ref=['TO', 'BESTOW', 'PEACE', 'AND', 'GRACE', 'LIES', 'IN', 'THE', 'PROVINCE', 'OF', 'GOD', 'WHO', 'ALONE', 'CAN', 'CREATE', 'THESE', 'BLESSINGS', 'THE', 'ANGELS', 'CANNOT'] +2830-3980-0055-1098: hyp=['TO', 'BESTOW', 'PEACE', 'AND', 'GRACE', 'LIES', 'IN', 'THE', 'PROVINCE', 'OF', 'GOD', 'WHO', 'ALONE', 'CAN', 'CREATE', 'THESE', 'BLESSINGS', 'THE', 'ANGELS', 'CANNOT'] +2830-3980-0056-1099: ref=['OTHERWISE', 'PAUL', 'SHOULD', 'HAVE', 'WRITTEN', 'GRACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'PEACE', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0056-1099: hyp=['OTHERWISE', 'PAUL', 'SHOULD', 'HAVE', 'WRITTEN', 'GRACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'PEACE', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0057-1100: ref=['THE', 'ARIANS', 'TOOK', 'CHRIST', 'FOR', 'A', 'NOBLE', 'AND', 'PERFECT', 'CREATURE', 'SUPERIOR', 'EVEN', 'TO', 'THE', 'ANGELS', 'BECAUSE', 'BY', 'HIM', 'GOD', 'CREATED', 'HEAVEN', 'AND', 'EARTH'] +2830-3980-0057-1100: hyp=['THE', 'ARIANS', 'TOOK', 'CHRIST', 'FOR', 'A', 'NOBLE', 'AND', 'PERFECT', 'CREATURE', 'SUPERIOR', 'EVEN', 'TO', 'THE', 'ANGELS', 'BECAUSE', 'BY', 'HIM', 'GOD', 'CREATED', 'HEAVEN', 'AND', 'EARTH'] +2830-3980-0058-1101: ref=['MOHAMMED', 'ALSO', 'SPEAKS', 'HIGHLY', 'OF', 'CHRIST'] +2830-3980-0058-1101: hyp=['MOHAMMED', 'ALSO', 'SPEAKS', 'HIGHLY', 'OF', 'CHRIST'] +2830-3980-0059-1102: ref=['PAUL', 'STICKS', 'TO', 'HIS', 'THEME'] +2830-3980-0059-1102: hyp=['PAUL', 'STICKS', 'TO', 'HIS', 'THEME'] +2830-3980-0060-1103: ref=['HE', 'NEVER', 'LOSES', 'SIGHT', 'OF', 'THE', 'PURPOSE', 'OF', 'HIS', 'EPISTLE'] +2830-3980-0060-1103: hyp=['HE', 'NEVER', 'LOSES', 'SIGHT', 'OF', 'THE', 'PURPOSE', 'OF', 'HIS', 'EPISTLE'] +2830-3980-0061-1104: ref=['NOT', 'GOLD', 'OR', 'SILVER', 'OR', 'PASCHAL', 'LAMBS', 'OR', 'AN', 'ANGEL', 'BUT', 'HIMSELF', 'WHAT', 'FOR'] +2830-3980-0061-1104: hyp=['NOT', 'GOLD', 'OR', 'SILVER', 'OR', 'PASSION', 'LAMBS', 'OR', 'AN', 'ANGEL', 'BUT', 'HIMSELF', 'WHAT', 'FOR'] +2830-3980-0062-1105: ref=['NOT', 'FOR', 'A', 'CROWN', 'OR', 'A', 'KINGDOM', 'OR', 'OUR', 'GOODNESS', 'BUT', 'FOR', 'OUR', 'SINS'] +2830-3980-0062-1105: hyp=['NOT', 'FOR', 'A', 'CROWN', 'OR', 'A', 'KINGDOM', 'OR', 'A', 'GOODNESS', 'BUT', 'FOR', 'OUR', 'SINS'] +2830-3980-0063-1106: ref=['UNDERSCORE', 'THESE', 'WORDS', 'FOR', 'THEY', 'ARE', 'FULL', 'OF', 'COMFORT', 'FOR', 'SORE', 'CONSCIENCES'] +2830-3980-0063-1106: hyp=['UNDERSCORE', 'THESE', 'WORDS', 'FOR', 'THEY', 'ARE', 'FULL', 'OF', 'COMFORT', 'FOR', 'SORE', 'CONSCIENCES'] +2830-3980-0064-1107: ref=['HOW', 'MAY', 'WE', 'OBTAIN', 'REMISSION', 'OF', 'OUR', 'SINS'] +2830-3980-0064-1107: hyp=['HOW', 'MAY', 'WE', 'OBTAIN', 'REMISSION', 'OF', 'OUR', 'SINS'] +2830-3980-0065-1108: ref=['PAUL', 'ANSWERS', 'THE', 'MAN', 'WHO', 'IS', 'NAMED', 'JESUS', 'CHRIST', 'AND', 'THE', 'SON', 'OF', 'GOD', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0065-1108: hyp=['PAUL', 'ANSWERS', 'THE', 'MAN', 'WHO', 'IS', 'NAMED', 'JESUS', 'CHRIST', 'AND', 'THE', 'SON', 'OF', 'GOD', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0066-1109: ref=['SINCE', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS', 'IT', 'STANDS', 'TO', 'REASON', 'THAT', 'THEY', 'CANNOT', 'BE', 'PUT', 'AWAY', 'BY', 'OUR', 'OWN', 'EFFORTS'] +2830-3980-0066-1109: hyp=['SINCE', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS', 'IT', 'STANDS', 'TO', 'REASON', 'THAT', 'THEY', 'CANNOT', 'BE', 'PUT', 'AWAY', 'BY', 'OUR', 'OWN', 'EFFORTS'] +2830-3980-0067-1110: ref=['THIS', 'SENTENCE', 'ALSO', 'DEFINES', 'OUR', 'SINS', 'AS', 'GREAT', 'SO', 'GREAT', 'IN', 'FACT', 'THAT', 'THE', 'WHOLE', 'WORLD', 'COULD', 'NOT', 'MAKE', 'AMENDS', 'FOR', 'A', 'SINGLE', 'SIN'] +2830-3980-0067-1110: hyp=['THIS', 'SENTENCE', 'ALSO', 'DEFINES', 'OUR', 'SINS', 'AS', 'GREAT', 'SO', 'GREAT', 'IN', 'FACT', 'THAT', 'THE', 'WHOLE', 'WORLD', 'COULD', 'NOT', 'MAKE', 'AMENDS', 'FOR', 'A', 'SINGLE', 'SIN'] +2830-3980-0068-1111: ref=['THE', 'GREATNESS', 'OF', 'THE', 'RANSOM', 'CHRIST', 'THE', 'SON', 'OF', 'GOD', 'INDICATES', 'THIS'] +2830-3980-0068-1111: hyp=['THE', 'GREATNESS', 'OF', 'THE', 'RANSOM', 'CHRIST', 'THE', 'SON', 'OF', 'GOD', 'INDICATES', 'THIS'] +2830-3980-0069-1112: ref=['THE', 'VICIOUS', 'CHARACTER', 'OF', 'SIN', 'IS', 'BROUGHT', 'OUT', 'BY', 'THE', 'WORDS', 'WHO', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0069-1112: hyp=['THE', 'VICIOUS', 'CHARACTER', 'OF', 'SIN', 'IS', 'BROUGHT', 'OUT', 'BY', 'THE', 'WORDS', 'WHO', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0070-1113: ref=['BUT', 'WE', 'ARE', 'CARELESS', 'WE', 'MAKE', 'LIGHT', 'OF', 'SIN'] +2830-3980-0070-1113: hyp=['BUT', 'WE', 'ARE', 'CARELESS', 'WE', 'MAKE', 'LIGHT', 'OF', 'SIN'] +2830-3980-0071-1114: ref=['WE', 'THINK', 'THAT', 'BY', 'SOME', 'LITTLE', 'WORK', 'OR', 'MERIT', 'WE', 'CAN', 'DISMISS', 'SIN'] +2830-3980-0071-1114: hyp=['WE', 'THINK', 'THAT', 'BY', 'SOME', 'LITTLE', 'WORK', 'OR', 'MERIT', 'WE', 'CAN', 'DISMISS', 'IN'] +2830-3980-0072-1115: ref=['THIS', 'PASSAGE', 'THEN', 'BEARS', 'OUT', 'THE', 'FACT', 'THAT', 'ALL', 'MEN', 'ARE', 'SOLD', 'UNDER', 'SIN'] +2830-3980-0072-1115: hyp=['THIS', 'PASSAGE', 'THEN', 'BEARS', 'OUT', 'THE', 'FACT', 'THAT', 'ALL', 'MEN', 'ARE', 'SOLD', 'UNDER', 'SIN'] +2830-3980-0073-1116: ref=['THIS', 'ATTITUDE', 'SPRINGS', 'FROM', 'A', 'FALSE', 'CONCEPTION', 'OF', 'SIN', 'THE', 'CONCEPTION', 'THAT', 'SIN', 'IS', 'A', 'SMALL', 'MATTER', 'EASILY', 'TAKEN', 'CARE', 'OF', 'BY', 'GOOD', 'WORKS', 'THAT', 'WE', 'MUST', 'PRESENT', 'OURSELVES', 'UNTO', 'GOD', 'WITH', 'A', 'GOOD', 'CONSCIENCE', 'THAT', 'WE', 'MUST', 'FEEL', 'NO', 'SIN', 'BEFORE', 'WE', 'MAY', 'FEEL', 'THAT', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS'] +2830-3980-0073-1116: hyp=['THIS', 'ATTITUDE', 'SPRINGS', 'FROM', 'A', 'FALSE', 'CONCEPTION', 'OF', 'SIN', 'THE', 'CONCEPTION', 'THAT', 'SIN', 'IS', 'A', 'SMALL', 'MATTER', 'EASILY', 'TAKING', 'CARE', 'OF', 'BY', 'GOOD', 'WORKS', 'THAT', 'WE', 'MUST', 'PRESENT', 'OURSELVES', 'INTO', 'GOD', 'WITH', 'GOOD', 'CONSCIENCE', 'THAT', 'WE', 'MUST', 'FEEL', 'NO', 'SIN', 'BEFORE', 'WE', 'MAY', 'FEEL', 'THAT', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS'] +2830-3980-0074-1117: ref=['THIS', 'ATTITUDE', 'IS', 'UNIVERSAL', 'AND', 'PARTICULARLY', 'DEVELOPED', 'IN', 'THOSE', 'WHO', 'CONSIDER', 'THEMSELVES', 'BETTER', 'THAN', 'OTHERS'] +2830-3980-0074-1117: hyp=['THE', 'SATITUDE', 'IS', 'UNIVERSAL', 'IN', 'PARTICULARLY', 'DEVELOPED', 'IN', 'THOSE', 'WHO', 'CONSIDER', 'THEMSELVES', 'BETTER', 'THAN', 'OTHERS'] +2830-3980-0075-1118: ref=['BUT', 'THE', 'REAL', 'SIGNIFICANCE', 'AND', 'COMFORT', 'OF', 'THE', 'WORDS', 'FOR', 'OUR', 'SINS', 'IS', 'LOST', 'UPON', 'THEM'] +2830-3980-0075-1118: hyp=['BUT', 'THE', 'REAL', 'SIGNIFICANCE', 'AND', 'COMFORT', 'OF', 'THE', 'WORDS', 'FOR', 'OUR', 'SINS', 'IS', 'LOST', 'UPON', 'THEM'] +2830-3980-0076-1119: ref=['ON', 'THE', 'OTHER', 'HAND', 'WE', 'ARE', 'NOT', 'TO', 'REGARD', 'THEM', 'AS', 'SO', 'TERRIBLE', 'THAT', 'WE', 'MUST', 'DESPAIR'] +2830-3980-0076-1119: hyp=['ON', 'THE', 'OTHER', 'HAND', 'WE', 'ARE', 'NOT', 'TO', 'REGARD', 'THEM', 'AS', 'SO', 'TERRIBLE', 'THAT', 'WE', 'MUST', 'DESPAIR'] +2961-960-0000-497: ref=['HE', 'PASSES', 'ABRUPTLY', 'FROM', 'PERSONS', 'TO', 'IDEAS', 'AND', 'NUMBERS', 'AND', 'FROM', 'IDEAS', 'AND', 'NUMBERS', 'TO', 'PERSONS', 'FROM', 'THE', 'HEAVENS', 'TO', 'MAN', 'FROM', 'ASTRONOMY', 'TO', 'PHYSIOLOGY', 'HE', 'CONFUSES', 'OR', 'RATHER', 'DOES', 'NOT', 'DISTINGUISH', 'SUBJECT', 'AND', 'OBJECT', 'FIRST', 'AND', 'FINAL', 'CAUSES', 'AND', 'IS', 'DREAMING', 'OF', 'GEOMETRICAL', 'FIGURES', 'LOST', 'IN', 'A', 'FLUX', 'OF', 'SENSE'] +2961-960-0000-497: hyp=['HE', 'PASSES', 'ABRUPTLY', 'FROM', 'PERSONS', 'TO', 'IDEAS', 'AND', 'NUMBERS', 'AND', 'FROM', 'IDEAS', 'AND', 'NUMBERS', 'TO', 'PERSONS', 'FROM', 'THE', 'HEAVENS', 'TO', 'MAN', 'FROM', 'ASTRONOMY', 'TO', 'PHYSIOLOGY', 'HE', 'CONFUSES', 'OR', 'RATHER', 'DOES', 'NOT', 'DISTINGUISH', 'SUBJECT', 'AND', 'OBJECT', 'FIRST', 'AND', 'FINAL', 'CAUSES', 'AND', 'IS', 'DREAMING', 'OF', 'GEOMETRICAL', 'FIGURES', 'LOST', 'IN', 'A', 'FLUX', 'OF', 'SENSE'] +2961-960-0001-498: ref=['THE', 'INFLUENCE', 'WITH', 'THE', 'TIMAEUS', 'HAS', 'EXERCISED', 'UPON', 'POSTERITY', 'IS', 'DUE', 'PARTLY', 'TO', 'A', 'MISUNDERSTANDING'] +2961-960-0001-498: hyp=['THE', 'INFLUENCE', 'WHICH', 'THE', 'TIMAEUS', 'HAS', 'EXERCISED', 'UPON', 'POSTERITY', 'IS', 'DUE', 'PARTLY', 'TO', 'A', 'MISUNDERSTANDING'] +2961-960-0002-499: ref=['IN', 'THE', 'SUPPOSED', 'DEPTHS', 'OF', 'THIS', 'DIALOGUE', 'THE', 'NEO', 'PLATONISTS', 'FOUND', 'HIDDEN', 'MEANINGS', 'AND', 'CONNECTIONS', 'WITH', 'THE', 'JEWISH', 'AND', 'CHRISTIAN', 'SCRIPTURES', 'AND', 'OUT', 'OF', 'THEM', 'THEY', 'ELICITED', 'DOCTRINES', 'QUITE', 'AT', 'VARIANCE', 'WITH', 'THE', 'SPIRIT', 'OF', 'PLATO'] +2961-960-0002-499: hyp=['IN', 'THE', 'SUPPOSED', 'DEPTHS', 'OF', 'THIS', 'DIALOGUE', 'THE', 'NEO', 'PLATINISTS', 'FOUND', 'HIDDEN', 'MEANINGS', 'IN', 'CONNECTIONS', 'WITH', 'THE', 'JEWISH', 'AND', 'CHRISTIAN', 'SCRIPTURES', 'AND', 'OUT', 'OF', 'THEM', 'THEY', 'ELICITED', 'DOCTRINES', 'QUITE', 'AT', 'VARIANCE', 'WITH', 'THE', 'SPIRIT', 'OF', 'PLATO'] +2961-960-0003-500: ref=['THEY', 'WERE', 'ABSORBED', 'IN', 'HIS', 'THEOLOGY', 'AND', 'WERE', 'UNDER', 'THE', 'DOMINION', 'OF', 'HIS', 'NAME', 'WHILE', 'THAT', 'WHICH', 'WAS', 'TRULY', 'GREAT', 'AND', 'TRULY', 'CHARACTERISTIC', 'IN', 'HIM', 'HIS', 'EFFORT', 'TO', 'REALIZE', 'AND', 'CONNECT', 'ABSTRACTIONS', 'WAS', 'NOT', 'UNDERSTOOD', 'BY', 'THEM', 'AT', 'ALL'] +2961-960-0003-500: hyp=['THEY', 'WERE', 'ABSORBED', 'IN', 'HIS', 'THEOLOGY', 'AND', 'WERE', 'UNDER', 'THE', 'DOMINION', 'OF', 'HIS', 'NAME', 'WHILE', 'THAT', 'WHICH', 'WAS', 'TRULY', 'GREAT', 'AND', 'TRULY', 'CORRECTORISTIC', 'IN', 'HIM', 'HIS', 'EFFORT', 'TO', 'REALIZE', 'AND', 'CONNECT', 'ABSTRACTIONS', 'WAS', 'NOT', 'UNDERSTOOD', 'BY', 'THEM', 'AT', 'ALL'] +2961-960-0004-501: ref=['THERE', 'IS', 'NO', 'DANGER', 'OF', 'THE', 'MODERN', 'COMMENTATORS', 'ON', 'THE', 'TIMAEUS', 'FALLING', 'INTO', 'THE', 'ABSURDITIES', 'OF', 'THE', 'NEO', 'PLATONISTS'] +2961-960-0004-501: hyp=['THERE', 'IS', 'NO', 'DANGER', 'OF', 'THE', 'MODERN', 'COMMON', 'TEACHERS', 'ON', 'THE', 'TIMIRAS', 'FALLING', 'INTO', 'THE', 'ABSURDITIES', 'OF', 'THE', 'NEW', 'PLATANISTS'] +2961-960-0005-502: ref=['IN', 'THE', 'PRESENT', 'DAY', 'WE', 'ARE', 'WELL', 'AWARE', 'THAT', 'AN', 'ANCIENT', 'PHILOSOPHER', 'IS', 'TO', 'BE', 'INTERPRETED', 'FROM', 'HIMSELF', 'AND', 'BY', 'THE', 'CONTEMPORARY', 'HISTORY', 'OF', 'THOUGHT'] +2961-960-0005-502: hyp=['IN', 'THE', 'PRESENT', 'DAY', 'WE', 'ARE', 'WELL', 'AWARE', 'THAT', 'AN', 'ANCIENT', 'PHILOSOPHER', 'IS', 'TO', 'BE', 'INTERPRETED', 'FROM', 'HIMSELF', 'AND', 'BY', 'THE', 'CONTEMPORARY', 'HISTORY', 'OF', 'THOUGHT'] +2961-960-0006-503: ref=['THE', 'FANCIES', 'OF', 'THE', 'NEO', 'PLATONISTS', 'ARE', 'ONLY', 'INTERESTING', 'TO', 'US', 'BECAUSE', 'THEY', 'EXHIBIT', 'A', 'PHASE', 'OF', 'THE', 'HUMAN', 'MIND', 'WHICH', 'PREVAILED', 'WIDELY', 'IN', 'THE', 'FIRST', 'CENTURIES', 'OF', 'THE', 'CHRISTIAN', 'ERA', 'AND', 'IS', 'NOT', 'WHOLLY', 'EXTINCT', 'IN', 'OUR', 'OWN', 'DAY'] +2961-960-0006-503: hyp=['THE', 'FANCIES', 'OF', 'THE', 'NEW', 'PLATANISTS', 'ARE', 'ONLY', 'INTERESTING', 'TO', 'US', 'BECAUSE', 'THEY', 'EXHIBIT', 'A', 'PHASE', 'OF', 'THE', 'HUMAN', 'MIND', 'WHICH', 'PREVAILED', 'WIDELY', 'IN', 'THE', 'FIRST', 'CENTURIES', 'OF', 'THE', 'CHRISTIAN', 'ERA', 'AND', 'IS', 'NOT', 'WHOLLY', 'EXTINCT', 'IN', 'OUR', 'OWN', 'DAY'] +2961-960-0007-504: ref=['BUT', 'THEY', 'HAVE', 'NOTHING', 'TO', 'DO', 'WITH', 'THE', 'INTERPRETATION', 'OF', 'PLATO', 'AND', 'IN', 'SPIRIT', 'THEY', 'ARE', 'OPPOSED', 'TO', 'HIM'] +2961-960-0007-504: hyp=['BUT', 'THEY', 'HAVE', 'NOTHING', 'TO', 'DO', 'WITH', 'THE', 'INTERPRETATION', 'OF', 'PLATO', 'AND', 'IN', 'SPIRIT', 'THEY', 'ARE', 'OPPOSED', 'TO', 'HIM'] +2961-960-0008-505: ref=['WE', 'DO', 'NOT', 'KNOW', 'HOW', 'PLATO', 'WOULD', 'HAVE', 'ARRANGED', 'HIS', 'OWN', 'DIALOGUES', 'OR', 'WHETHER', 'THE', 'THOUGHT', 'OF', 'ARRANGING', 'ANY', 'OF', 'THEM', 'BESIDES', 'THE', 'TWO', 'TRILOGIES', 'WHICH', 'HE', 'HAS', 'EXPRESSLY', 'CONNECTED', 'WAS', 'EVER', 'PRESENT', 'TO', 'HIS', 'MIND'] +2961-960-0008-505: hyp=['WE', 'DO', 'NOT', 'KNOW', 'HOW', 'PLATO', 'WOULD', 'HAVE', 'ARRANGED', 'HIS', 'OWN', 'DIALOGUES', 'OR', 'WHETHER', 'THE', 'THOUGHT', 'OF', 'ARRANGING', 'ANY', 'OF', 'THEM', 'BESIDES', 'THE', 'TUTRILOGIES', 'WHICH', 'HE', 'HAS', 'EXPRESSLY', 'CONNECTED', 'WAS', 'EVER', 'PRESENT', 'TO', 'HIS', 'MIND'] +2961-960-0009-506: ref=['THE', 'DIALOGUE', 'IS', 'PRIMARILY', 'CONCERNED', 'WITH', 'THE', 'ANIMAL', 'CREATION', 'INCLUDING', 'UNDER', 'THIS', 'TERM', 'THE', 'HEAVENLY', 'BODIES', 'AND', 'WITH', 'MAN', 'ONLY', 'AS', 'ONE', 'AMONG', 'THE', 'ANIMALS'] +2961-960-0009-506: hyp=['THE', 'DIALOGUE', 'IS', 'PRIMARILY', 'CONCERNED', 'WITH', 'THE', 'ANIMAL', 'CREATION', 'INCLUDING', 'UNDER', 'THIS', 'TERM', 'THE', 'HEAVENLY', 'BODIES', 'AND', 'WITH', 'MAN', 'ONLY', 'AS', 'ONE', 'AMONG', 'THE', 'ANIMALS'] +2961-960-0010-507: ref=['BUT', 'HE', 'HAS', 'NOT', 'AS', 'YET', 'DEFINED', 'THIS', 'INTERMEDIATE', 'TERRITORY', 'WHICH', 'LIES', 'SOMEWHERE', 'BETWEEN', 'MEDICINE', 'AND', 'MATHEMATICS', 'AND', 'HE', 'WOULD', 'HAVE', 'FELT', 'THAT', 'THERE', 'WAS', 'AS', 'GREAT', 'AN', 'IMPIETY', 'IN', 'RANKING', 'THEORIES', 'OF', 'PHYSICS', 'FIRST', 'IN', 'THE', 'ORDER', 'OF', 'KNOWLEDGE', 'AS', 'IN', 'PLACING', 'THE', 'BODY', 'BEFORE', 'THE', 'SOUL'] +2961-960-0010-507: hyp=['BUT', 'HE', 'HAS', 'NOT', 'AS', 'YET', 'THE', 'FIND', 'THIS', 'INTERMEDIATE', 'TERRITORY', 'WHICH', 'LIES', 'SOMEWHERE', 'BETWEEN', 'MEDICINE', 'AND', 'MATHEMATICS', 'AND', 'HE', 'WOULD', 'HAVE', 'FELT', 'THAT', 'THERE', 'WAS', 'AS', 'GREAT', 'AN', 'IMPIETY', 'IN', 'RANKING', 'THEORIES', 'OF', 'PHYSICS', 'FIRST', 'IN', 'THE', 'ORDER', 'OF', 'KNOWLEDGE', 'AS', 'IN', 'PLACING', 'THE', 'BODY', 'BEFORE', 'THE', 'SOUL'] +2961-960-0011-508: ref=['WITH', 'HERACLEITUS', 'HE', 'ACKNOWLEDGES', 'THE', 'PERPETUAL', 'FLUX', 'LIKE', 'ANAXAGORAS', 'HE', 'ASSERTS', 'THE', 'PREDOMINANCE', 'OF', 'MIND', 'ALTHOUGH', 'ADMITTING', 'AN', 'ELEMENT', 'OF', 'NECESSITY', 'WHICH', 'REASON', 'IS', 'INCAPABLE', 'OF', 'SUBDUING', 'LIKE', 'THE', 'PYTHAGOREANS', 'HE', 'SUPPOSES', 'THE', 'MYSTERY', 'OF', 'THE', 'WORLD', 'TO', 'BE', 'CONTAINED', 'IN', 'NUMBER'] +2961-960-0011-508: hyp=['WITH', 'HERACLITUS', 'HE', 'ACKNOWLEDGES', 'THE', 'PERPETUAL', 'FLUX', 'LIKE', 'AN', 'EXAGGERUS', 'HE', 'ASSERTS', 'THE', 'PREDOMINANCE', 'OF', 'MIND', 'ALTHOUGH', 'ADMITTING', 'AN', 'ELEMENT', 'OF', 'NECESSITY', 'WHICH', 'REASON', 'IS', 'INCAPABLE', 'OF', 'SUBDUING', 'LIKE', 'THE', 'PYTHAGORIANS', 'HE', 'SUPPOSES', 'THE', 'MYSTERY', 'OF', 'THE', 'WORLD', 'TO', 'BE', 'CONTAINED', 'IN', 'NUMBER'] +2961-960-0012-509: ref=['MANY', 'IF', 'NOT', 'ALL', 'THE', 'ELEMENTS', 'OF', 'THE', 'PRE', 'SOCRATIC', 'PHILOSOPHY', 'ARE', 'INCLUDED', 'IN', 'THE', 'TIMAEUS'] +2961-960-0012-509: hyp=['MANY', 'IF', 'NOT', 'ALL', 'THE', 'ELEMENTS', 'OF', 'THE', 'PRIESTHOO', 'CRADIC', 'PHILOSOPHY', 'ARE', 'INCLUDED', 'IN', 'THE', 'TIMIUS'] +2961-960-0013-510: ref=['IT', 'IS', 'PROBABLE', 'THAT', 'THE', 'RELATION', 'OF', 'THE', 'IDEAS', 'TO', 'GOD', 'OR', 'OF', 'GOD', 'TO', 'THE', 'WORLD', 'WAS', 'DIFFERENTLY', 'CONCEIVED', 'BY', 'HIM', 'AT', 'DIFFERENT', 'TIMES', 'OF', 'HIS', 'LIFE'] +2961-960-0013-510: hyp=['IT', 'IS', 'PROBABLE', 'THAT', 'THE', 'RELATION', 'OF', 'THE', 'IDEAS', 'TO', 'GOD', 'OR', 'OF', 'GOD', 'TO', 'THE', 'WORLD', 'WAS', 'DIFFERENTLY', 'CONCEIVED', 'BY', 'HIM', 'AT', 'DIFFERENT', 'TIMES', 'OF', 'HIS', 'LIFE'] +2961-960-0014-511: ref=['THE', 'IDEAS', 'ALSO', 'REMAIN', 'BUT', 'THEY', 'HAVE', 'BECOME', 'TYPES', 'IN', 'NATURE', 'FORMS', 'OF', 'MEN', 'ANIMALS', 'BIRDS', 'FISHES'] +2961-960-0014-511: hyp=['THE', 'IDEAS', 'ALSO', 'REMAIN', 'BUT', 'THEY', 'HAVE', 'BECOME', 'TYPES', 'IN', 'NATURE', 'FORMS', 'OF', 'MEN', 'ANIMALS', 'BIRDS', 'FISHES'] +2961-960-0015-512: ref=['THE', 'STYLE', 'AND', 'PLAN', 'OF', 'THE', 'TIMAEUS', 'DIFFER', 'GREATLY', 'FROM', 'THAT', 'OF', 'ANY', 'OTHER', 'OF', 'THE', 'PLATONIC', 'DIALOGUES'] +2961-960-0015-512: hyp=['THE', 'STYLE', 'AND', 'PLAN', 'OF', 'THE', 'TENEAS', 'DIFFER', 'GREATLY', 'FROM', 'THAT', 'OF', 'ANY', 'OTHER', 'OF', 'THE', 'PLATONIC', 'DIALOGUES'] +2961-960-0016-513: ref=['BUT', 'PLATO', 'HAS', 'NOT', 'THE', 'SAME', 'MASTERY', 'OVER', 'HIS', 'INSTRUMENT', 'WHICH', 'HE', 'EXHIBITS', 'IN', 'THE', 'PHAEDRUS', 'OR', 'SYMPOSIUM'] +2961-960-0016-513: hyp=['BUT', 'PLATO', 'HAS', 'NOT', 'THE', 'SAME', 'MYSTERY', 'OVER', 'HIS', 'INSTRUMENT', 'WHICH', 'HE', 'EXHIBITS', 'IN', 'THE', 'FEATURES', 'OR', 'SIMPOSE', 'HIM'] +2961-960-0017-514: ref=['NOTHING', 'CAN', 'EXCEED', 'THE', 'BEAUTY', 'OR', 'ART', 'OF', 'THE', 'INTRODUCTION', 'IN', 'WHICH', 'HE', 'IS', 'USING', 'WORDS', 'AFTER', 'HIS', 'ACCUSTOMED', 'MANNER'] +2961-960-0017-514: hyp=['NOTHING', 'CAN', 'EXCEED', 'THE', 'BEAUTY', 'OR', 'ART', 'OF', 'INTRODUCTION', 'IN', 'WHICH', 'HIS', 'USING', 'WORDS', 'AFTER', 'HIS', 'ACCUSTOMED', 'MANNER'] +2961-960-0018-515: ref=['BUT', 'IN', 'THE', 'REST', 'OF', 'THE', 'WORK', 'THE', 'POWER', 'OF', 'LANGUAGE', 'SEEMS', 'TO', 'FAIL', 'HIM', 'AND', 'THE', 'DRAMATIC', 'FORM', 'IS', 'WHOLLY', 'GIVEN', 'UP'] +2961-960-0018-515: hyp=['BUT', 'IN', 'THE', 'REST', 'OF', 'THE', 'WORK', 'THE', 'POWER', 'OF', 'LANGUAGE', 'SEEMS', 'TO', 'FAIL', 'HIM', 'AND', 'THE', 'DRAMATIC', 'FORM', 'IS', 'WHOLLY', 'GIVEN', 'UP'] +2961-960-0019-516: ref=['HE', 'COULD', 'WRITE', 'IN', 'ONE', 'STYLE', 'BUT', 'NOT', 'IN', 'ANOTHER', 'AND', 'THE', 'GREEK', 'LANGUAGE', 'HAD', 'NOT', 'AS', 'YET', 'BEEN', 'FASHIONED', 'BY', 'ANY', 'POET', 'OR', 'PHILOSOPHER', 'TO', 'DESCRIBE', 'PHYSICAL', 'PHENOMENA'] +2961-960-0019-516: hyp=['HE', 'COULD', 'WRITE', 'IN', "ONE'S", 'STYLE', 'BUT', 'NOT', 'IN', 'ANOTHER', 'THE', 'GREEK', 'LANGUAGE', 'HAD', 'NOT', 'AS', 'YET', 'BEEN', 'FASHIONED', 'BY', 'ANY', 'POET', 'OR', 'PHILOSOPHER', 'TO', 'DESCRIBE', 'PHYSICAL', 'PHENOMENA'] +2961-960-0020-517: ref=['AND', 'HENCE', 'WE', 'FIND', 'THE', 'SAME', 'SORT', 'OF', 'CLUMSINESS', 'IN', 'THE', 'TIMAEUS', 'OF', 'PLATO', 'WHICH', 'CHARACTERIZES', 'THE', 'PHILOSOPHICAL', 'POEM', 'OF', 'LUCRETIUS'] +2961-960-0020-517: hyp=['AND', 'HENCE', 'WE', 'FIND', 'THE', 'SAME', 'SORT', 'OF', 'CLUMSINESS', 'IN', 'THE', 'TIMAIRS', 'OF', 'PLATO', 'WHICH', 'CHARACTERIZES', 'THE', 'PHILOSOPHICAL', 'POEM', 'OF', 'LUCRETIUS'] +2961-960-0021-518: ref=['THERE', 'IS', 'A', 'WANT', 'OF', 'FLOW', 'AND', 'OFTEN', 'A', 'DEFECT', 'OF', 'RHYTHM', 'THE', 'MEANING', 'IS', 'SOMETIMES', 'OBSCURE', 'AND', 'THERE', 'IS', 'A', 'GREATER', 'USE', 'OF', 'APPOSITION', 'AND', 'MORE', 'OF', 'REPETITION', 'THAN', 'OCCURS', 'IN', "PLATO'S", 'EARLIER', 'WRITINGS'] +2961-960-0021-518: hyp=['THERE', 'IS', 'A', 'WANT', 'OF', 'FLOW', 'AND', 'OFTEN', 'A', 'DEFECT', 'OF', 'RHYTHM', 'THE', 'MEANING', 'IS', 'SOMETIMES', 'OBSCURE', 'AND', 'THERE', 'IS', 'A', 'GREATER', 'USE', 'OF', 'APPOSITION', 'IN', 'MORE', 'OF', 'REPETITION', 'THAN', 'OCCURS', 'IN', "PLATO'S", 'EARLIER', 'WRITINGS'] +2961-960-0022-519: ref=['PLATO', 'HAD', 'NOT', 'THE', 'COMMAND', 'OF', 'HIS', 'MATERIALS', 'WHICH', 'WOULD', 'HAVE', 'ENABLED', 'HIM', 'TO', 'PRODUCE', 'A', 'PERFECT', 'WORK', 'OF', 'ART'] +2961-960-0022-519: hyp=['PLATO', 'HAD', 'NOT', 'THE', 'COMMAND', 'OF', 'HIS', 'MATERIALS', 'WHICH', 'WOULD', 'HAVE', 'ENABLED', 'HIM', 'TO', 'PRODUCE', 'A', 'PERFECT', 'WORK', 'OF', 'ART'] +2961-961-0000-520: ref=['SOCRATES', 'BEGINS', 'THE', 'TIMAEUS', 'WITH', 'A', 'SUMMARY', 'OF', 'THE', 'REPUBLIC'] +2961-961-0000-520: hyp=['SOCRATES', 'BEGINS', 'TO', 'TEARS', 'WITH', 'A', 'SUMMARY', 'OF', 'THE', 'REPUBLIC'] +2961-961-0001-521: ref=['AND', 'NOW', 'HE', 'DESIRES', 'TO', 'SEE', 'THE', 'IDEAL', 'STATE', 'SET', 'IN', 'MOTION', 'HE', 'WOULD', 'LIKE', 'TO', 'KNOW', 'HOW', 'SHE', 'BEHAVED', 'IN', 'SOME', 'GREAT', 'STRUGGLE'] +2961-961-0001-521: hyp=['AND', 'NOW', 'HE', 'DESIRES', 'TO', 'SEE', 'THE', 'IDEAL', 'STATE', 'SET', 'IN', 'MOTION', 'HE', 'WOULD', 'LIKE', 'TO', 'KNOW', 'HOW', 'SHE', 'BEHAVED', 'IN', 'SOME', 'GREAT', 'STRUGGLE'] +2961-961-0002-522: ref=['AND', 'THEREFORE', 'TO', 'YOU', 'I', 'TURN', 'TIMAEUS', 'CITIZEN', 'OF', 'LOCRIS', 'WHO', 'ARE', 'AT', 'ONCE', 'A', 'PHILOSOPHER', 'AND', 'A', 'STATESMAN', 'AND', 'TO', 'YOU', 'CRITIAS', 'WHOM', 'ALL', 'ATHENIANS', 'KNOW', 'TO', 'BE', 'SIMILARLY', 'ACCOMPLISHED', 'AND', 'TO', 'HERMOCRATES', 'WHO', 'IS', 'ALSO', 'FITTED', 'BY', 'NATURE', 'AND', 'EDUCATION', 'TO', 'SHARE', 'IN', 'OUR', 'DISCOURSE'] +2961-961-0002-522: hyp=['AND', 'THEREFORE', 'TO', 'YOU', 'I', 'TURN', 'TO', 'ME', 'AS', 'CITIZEN', 'OF', 'LOCHRIS', 'WHO', 'ARE', 'AT', 'ONCE', 'A', 'PHILOSOPHER', 'IN', 'A', 'STATESMAN', 'AND', 'TO', 'YOU', 'CRITUS', 'WHOM', 'ALL', 'ATHENIANS', 'KNOW', 'TO', 'BE', 'SIMILARLY', 'ACCOMPLISHED', 'AND', 'TO', 'HERMOCRATES', 'WHOSE', 'ALSO', 'FITTED', 'BY', 'NATURE', 'AND', 'EDUCATION', 'TO', 'SHARE', 'IN', 'OUR', 'DISCOURSE'] +2961-961-0003-523: ref=['I', 'WILL', 'IF', 'TIMAEUS', 'APPROVES', 'I', 'APPROVE'] +2961-961-0003-523: hyp=['I', 'WILL', 'IF', 'TO', 'ME', 'AS', 'IT', 'PROVES', 'I', 'APPROVE'] +2961-961-0004-524: ref=['LISTEN', 'THEN', 'SOCRATES', 'TO', 'A', 'TALE', 'OF', "SOLON'S", 'WHO', 'BEING', 'THE', 'FRIEND', 'OF', 'DROPIDAS', 'MY', 'GREAT', 'GRANDFATHER', 'TOLD', 'IT', 'TO', 'MY', 'GRANDFATHER', 'CRITIAS', 'AND', 'HE', 'TOLD', 'ME'] +2961-961-0004-524: hyp=['LISTEN', 'THEN', 'SOCRATES', 'TO', 'A', 'TALE', 'OF', 'SILENCE', 'WHO', 'BEING', 'THE', 'FRIEND', 'OF', 'TROPIDAS', 'BY', 'GREAT', 'GRANDFATHER', 'TOLD', 'IT', 'TO', 'MY', 'GRANDFATHER', 'CRITIUS', 'AND', 'HE', 'TOLD', 'ME'] +2961-961-0005-525: ref=['SOME', 'POEMS', 'OF', 'SOLON', 'WERE', 'RECITED', 'BY', 'THE', 'BOYS'] +2961-961-0005-525: hyp=['SOME', 'POEMS', 'OF', 'SOLEMN', 'WERE', 'RECITED', 'BY', 'THE', 'BOYS'] +2961-961-0006-526: ref=['AND', 'WHAT', 'WAS', 'THE', 'SUBJECT', 'OF', 'THE', 'POEM', 'SAID', 'THE', 'PERSON', 'WHO', 'MADE', 'THE', 'REMARK'] +2961-961-0006-526: hyp=['AND', 'WHAT', 'WAS', 'THE', 'SUBJECT', 'OF', 'THE', 'POEM', 'SAID', 'THE', 'PERSON', 'WHO', 'MADE', 'THE', 'REMARK'] +2961-961-0007-527: ref=['THE', 'SUBJECT', 'WAS', 'A', 'VERY', 'NOBLE', 'ONE', 'HE', 'DESCRIBED', 'THE', 'MOST', 'FAMOUS', 'ACTION', 'IN', 'WHICH', 'THE', 'ATHENIAN', 'PEOPLE', 'WERE', 'EVER', 'ENGAGED'] +2961-961-0007-527: hyp=['THE', 'SUBJECT', 'WAS', 'A', 'VERY', 'NOBLE', 'ONE', 'HE', 'DESCRIBED', 'THE', 'MOST', 'FAMOUS', 'ACTION', 'IN', 'WHICH', 'THE', 'ATHENIAN', 'PEOPLE', 'WERE', 'EVER', 'ENGAGED'] +2961-961-0008-528: ref=['BUT', 'THE', 'MEMORY', 'OF', 'THEIR', 'EXPLOITS', 'HAS', 'PASSED', 'AWAY', 'OWING', 'TO', 'THE', 'LAPSE', 'OF', 'TIME', 'AND', 'THE', 'EXTINCTION', 'OF', 'THE', 'ACTORS'] +2961-961-0008-528: hyp=['BUT', 'THE', 'MEMORY', 'OF', 'THEIR', 'EXPLOITS', 'HAD', 'PASSED', 'AWAY', 'OWING', 'TO', 'THE', 'LAPSE', 'OF', 'TIME', 'AND', 'THE', 'EXTINCTION', 'OF', 'THE', 'ACTORS'] +2961-961-0009-529: ref=['TELL', 'US', 'SAID', 'THE', 'OTHER', 'THE', 'WHOLE', 'STORY', 'AND', 'WHERE', 'SOLON', 'HEARD', 'THE', 'STORY'] +2961-961-0009-529: hyp=['TELL', 'US', 'SAID', 'THE', 'OTHER', 'THE', 'WHOLE', 'STORY', 'AND', 'WHERE', 'SOLON', 'HEARD', 'THE', 'STORY'] +2961-961-0010-530: ref=['BUT', 'IN', 'EGYPT', 'THE', 'TRADITIONS', 'OF', 'OUR', 'OWN', 'AND', 'OTHER', 'LANDS', 'ARE', 'BY', 'US', 'REGISTERED', 'FOR', 'EVER', 'IN', 'OUR', 'TEMPLES'] +2961-961-0010-530: hyp=['BUT', 'IN', 'EGYPT', 'THE', 'TRADITIONS', 'OF', 'OUR', 'OWN', 'AND', 'OTHER', 'LANDS', 'ARE', 'BY', 'US', 'REGISTERED', 'FOR', 'EVER', 'IN', 'OUR', 'TEMPLES'] +2961-961-0011-531: ref=['THE', 'GENEALOGIES', 'WHICH', 'YOU', 'HAVE', 'RECITED', 'TO', 'US', 'OUT', 'OF', 'YOUR', 'OWN', 'ANNALS', 'SOLON', 'ARE', 'A', 'MERE', "CHILDREN'S", 'STORY'] +2961-961-0011-531: hyp=['THE', 'GENEALOGIES', 'WHICH', 'YOU', 'HAVE', 'RECITED', 'TO', 'US', 'OUT', 'OF', 'YOUR', 'OWN', 'ANNAL', 'SOLEMN', 'ARE', 'A', 'MERE', "CHILDREN'S", 'STORY'] +2961-961-0012-532: ref=['FOR', 'IN', 'THE', 'TIMES', 'BEFORE', 'THE', 'GREAT', 'FLOOD', 'ATHENS', 'WAS', 'THE', 'GREATEST', 'AND', 'BEST', 'OF', 'CITIES', 'AND', 'DID', 'THE', 'NOBLEST', 'DEEDS', 'AND', 'HAD', 'THE', 'BEST', 'CONSTITUTION', 'OF', 'ANY', 'UNDER', 'THE', 'FACE', 'OF', 'HEAVEN'] +2961-961-0012-532: hyp=['FOR', 'IN', 'THE', 'TIMES', 'BEFORE', 'THE', 'GREAT', 'FLOOD', 'ATHENS', 'WAS', 'THE', 'GREATEST', 'AND', 'BEST', 'OF', 'CITIES', 'AND', 'DID', 'THE', 'NOBLEST', 'DEEDS', 'AND', 'HAD', 'THE', 'BEST', 'CONSTITUTION', 'OF', 'ANY', 'UNDER', 'THE', 'FACE', 'OF', 'HEAVEN'] +2961-961-0013-533: ref=['SOLON', 'MARVELLED', 'AND', 'DESIRED', 'TO', 'BE', 'INFORMED', 'OF', 'THE', 'PARTICULARS'] +2961-961-0013-533: hyp=['SOLEMN', 'MARVELLED', 'AND', 'DESIRED', 'TO', 'BE', 'INFORMED', 'OF', 'THE', 'PARTICULARS'] +2961-961-0014-534: ref=['NINE', 'THOUSAND', 'YEARS', 'HAVE', 'ELAPSED', 'SINCE', 'SHE', 'FOUNDED', 'YOURS', 'AND', 'EIGHT', 'THOUSAND', 'SINCE', 'SHE', 'FOUNDED', 'OURS', 'AS', 'OUR', 'ANNALS', 'RECORD'] +2961-961-0014-534: hyp=['NINE', 'THOUSAND', 'YEARS', 'HAVE', 'ELAPSED', 'SINCE', 'SHE', 'FOUND', 'IT', 'YOURS', 'AND', 'EIGHT', 'THOUSAND', 'SINCE', 'YOU', 'FOUND', 'IT', 'OURS', 'AS', 'OUR', 'ANNALS', 'RECORD'] +2961-961-0015-535: ref=['MANY', 'LAWS', 'EXIST', 'AMONG', 'US', 'WHICH', 'ARE', 'THE', 'COUNTERPART', 'OF', 'YOURS', 'AS', 'THEY', 'WERE', 'IN', 'THE', 'OLDEN', 'TIME'] +2961-961-0015-535: hyp=['MANY', 'LAWS', 'EXIST', 'AMONG', 'US', 'WHICH', 'ARE', 'THE', 'COUNTERPART', 'OF', 'YOURS', 'AS', 'THEY', 'WERE', 'IN', 'THE', 'OLDEN', 'TIME'] +2961-961-0016-536: ref=['I', 'WILL', 'BRIEFLY', 'DESCRIBE', 'THEM', 'TO', 'YOU', 'AND', 'YOU', 'SHALL', 'READ', 'THE', 'ACCOUNT', 'OF', 'THEM', 'AT', 'YOUR', 'LEISURE', 'IN', 'THE', 'SACRED', 'REGISTERS'] +2961-961-0016-536: hyp=['I', 'WILL', 'BRIEFLY', 'DESCRIBE', 'HIM', 'TO', 'YOU', 'AND', 'YOU', 'SHALL', 'READ', 'THE', 'ACCOUNT', 'OF', 'THEM', 'AT', 'YOUR', 'LEISURE', 'IN', 'THE', 'SACRED', 'REGISTERS'] +2961-961-0017-537: ref=['OBSERVE', 'AGAIN', 'WHAT', 'CARE', 'THE', 'LAW', 'TOOK', 'IN', 'THE', 'PURSUIT', 'OF', 'WISDOM', 'SEARCHING', 'OUT', 'THE', 'DEEP', 'THINGS', 'OF', 'THE', 'WORLD', 'AND', 'APPLYING', 'THEM', 'TO', 'THE', 'USE', 'OF', 'MAN'] +2961-961-0017-537: hyp=['OBSERVE', 'AGAIN', 'WHAT', 'CARE', 'THE', 'LAW', 'TOOK', 'IN', 'THE', 'PURSUIT', 'OF', 'WISDOM', 'SEARCHING', 'OUT', 'THE', 'DEEP', 'THINGS', 'OF', 'THE', 'WORLD', 'AND', 'APPLYING', 'THEM', 'TO', 'THE', 'USE', 'OF', 'MEN'] +2961-961-0018-538: ref=['THE', 'MOST', 'FAMOUS', 'OF', 'THEM', 'ALL', 'WAS', 'THE', 'OVERTHROW', 'OF', 'THE', 'ISLAND', 'OF', 'ATLANTIS'] +2961-961-0018-538: hyp=['THE', 'MOST', 'FAMOUS', 'OF', 'THEM', 'ALL', 'WAS', 'THE', 'OVERTHROW', 'OF', 'THE', 'ISLAND', 'OF', 'ATLANTIS'] +2961-961-0019-539: ref=['FOR', 'AT', 'THE', 'PERIL', 'OF', 'HER', 'OWN', 'EXISTENCE', 'AND', 'WHEN', 'THE', 'OTHER', 'HELLENES', 'HAD', 'DESERTED', 'HER', 'SHE', 'REPELLED', 'THE', 'INVADER', 'AND', 'OF', 'HER', 'OWN', 'ACCORD', 'GAVE', 'LIBERTY', 'TO', 'ALL', 'THE', 'NATIONS', 'WITHIN', 'THE', 'PILLARS'] +2961-961-0019-539: hyp=['FOR', 'AT', 'THE', 'PERIL', 'OF', 'HER', 'OWN', 'EXISTENCE', 'AND', 'WHEN', 'THE', 'OTTER', 'HELLENES', 'HAD', 'DESERTED', 'HER', 'SHE', 'REPELLED', 'THE', 'INVADER', 'AND', 'OF', 'HER', 'OWN', 'ACCORD', 'GAVE', 'LIBERTY', 'TO', 'ALL', 'THE', 'NATIONS', 'WITHIN', 'THE', 'PILLARS'] +2961-961-0020-540: ref=['THIS', 'IS', 'THE', 'EXPLANATION', 'OF', 'THE', 'SHALLOWS', 'WHICH', 'ARE', 'FOUND', 'IN', 'THAT', 'PART', 'OF', 'THE', 'ATLANTIC', 'OCEAN'] +2961-961-0020-540: hyp=['THIS', 'IS', 'THE', 'EXPLANATION', 'OF', 'THE', 'SHALLOWS', 'WHICH', 'ARE', 'FOUND', 'IN', 'THAT', 'PART', 'OF', 'THE', 'ATLANTIC', 'OCEAN'] +2961-961-0021-541: ref=['BUT', 'I', 'WOULD', 'NOT', 'SPEAK', 'AT', 'THE', 'TIME', 'BECAUSE', 'I', 'WANTED', 'TO', 'REFRESH', 'MY', 'MEMORY'] +2961-961-0021-541: hyp=['BUT', 'I', 'WOULD', 'NOT', 'SPEAK', 'AT', 'THE', 'TIME', 'BECAUSE', 'I', 'WANTED', 'TO', 'REFRESH', 'MY', 'MEMORY'] +2961-961-0022-542: ref=['THEN', 'NOW', 'LET', 'ME', 'EXPLAIN', 'TO', 'YOU', 'THE', 'ORDER', 'OF', 'OUR', 'ENTERTAINMENT', 'FIRST', 'TIMAEUS', 'WHO', 'IS', 'A', 'NATURAL', 'PHILOSOPHER', 'WILL', 'SPEAK', 'OF', 'THE', 'ORIGIN', 'OF', 'THE', 'WORLD', 'GOING', 'DOWN', 'TO', 'THE', 'CREATION', 'OF', 'MAN', 'AND', 'THEN', 'I', 'SHALL', 'RECEIVE', 'THE', 'MEN', 'WHOM', 'HE', 'HAS', 'CREATED', 'AND', 'SOME', 'OF', 'WHOM', 'WILL', 'HAVE', 'BEEN', 'EDUCATED', 'BY', 'YOU', 'AND', 'INTRODUCE', 'THEM', 'TO', 'YOU', 'AS', 'THE', 'LOST', 'ATHENIAN', 'CITIZENS', 'OF', 'WHOM', 'THE', 'EGYPTIAN', 'RECORD', 'SPOKE'] +2961-961-0022-542: hyp=['THEN', 'THOU', 'LET', 'ME', 'EXPLAIN', 'TO', 'YOU', 'THE', 'ORDER', 'OF', 'OUR', 'ENTERTAINMENT', 'FIRST', 'TIMAEUS', 'WHO', 'IS', 'A', 'NATURAL', 'PHILOSOPHER', 'WILL', 'SPEAK', 'OF', 'THE', 'ORIGIN', 'OF', 'THE', 'WORLD', 'GOING', 'DOWN', 'TO', 'THE', 'CREATION', 'OF', 'MEN', 'AND', 'THEN', 'I', 'SHALL', 'RECEIVE', 'THE', 'MEN', 'WHOM', 'HE', 'HAS', 'CREATED', 'AND', 'SOME', 'OF', 'WHOM', 'WILL', 'HAVE', 'BEEN', 'EDUCATED', 'BY', 'YOU', 'AND', 'INTRODUCE', 'THEM', 'TO', 'YOU', 'AS', 'THE', 'LOST', 'ATHENIAN', 'CITIZENS', 'OF', 'WHOM', 'THE', 'EGYPTIAN', 'RECORDS', 'SPOKE'] +3570-5694-0000-2433: ref=['BUT', 'ALREADY', 'AT', 'A', 'POINT', 'IN', 'ECONOMIC', 'EVOLUTION', 'FAR', 'ANTEDATING', 'THE', 'EMERGENCE', 'OF', 'THE', 'LADY', 'SPECIALISED', 'CONSUMPTION', 'OF', 'GOODS', 'AS', 'AN', 'EVIDENCE', 'OF', 'PECUNIARY', 'STRENGTH', 'HAD', 'BEGUN', 'TO', 'WORK', 'OUT', 'IN', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM'] +3570-5694-0000-2433: hyp=['BUT', 'ALREADY', 'AT', 'A', 'POINT', 'IN', 'ECONOMIC', 'EVOLUTION', 'FAR', 'ANTETING', 'THE', 'EMERGENCE', 'OF', 'THE', 'LADY', 'SPECIALIZED', 'CONSUMPTION', 'OF', 'GOODS', 'AS', 'AN', 'EVIDENCE', 'OF', 'PECUNIARY', 'STRENGTH', 'HAD', 'BEGUN', 'TO', 'WORK', 'OUT', 'IN', 'A', 'MORE', 'OR', 'LESS', 'CELEBRATE', 'SYSTEM'] +3570-5694-0001-2434: ref=['THE', 'UTILITY', 'OF', 'CONSUMPTION', 'AS', 'AN', 'EVIDENCE', 'OF', 'WEALTH', 'IS', 'TO', 'BE', 'CLASSED', 'AS', 'A', 'DERIVATIVE', 'GROWTH'] +3570-5694-0001-2434: hyp=['THE', 'UTILITY', 'OF', 'CONSUMPTION', 'AS', 'AN', 'EVIDENCE', 'OF', 'WEALTH', 'IS', 'TO', 'BE', 'CLASSED', 'AS', 'A', 'DERIVATIVE', 'GROWTH'] +3570-5694-0002-2435: ref=['SUCH', 'CONSUMPTION', 'AS', 'FALLS', 'TO', 'THE', 'WOMEN', 'IS', 'MERELY', 'INCIDENTAL', 'TO', 'THEIR', 'WORK', 'IT', 'IS', 'A', 'MEANS', 'TO', 'THEIR', 'CONTINUED', 'LABOUR', 'AND', 'NOT', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THEIR', 'OWN', 'COMFORT', 'AND', 'FULNESS', 'OF', 'LIFE'] +3570-5694-0002-2435: hyp=['SUCH', 'CONSUMPTION', 'AS', 'FALLS', 'THROUGH', 'THE', 'WOMEN', 'IS', 'MERELY', 'INCIDENTAL', 'TO', 'THEIR', 'WORK', 'IT', 'IS', 'A', 'MEANS', 'TO', 'THEIR', 'CONTINUED', 'LABOR', 'AND', 'NOT', 'TO', 'CONSUMPTION', 'DIRECTED', 'TO', 'THEIR', 'OWN', 'COMFORT', 'AND', 'FULLNESS', 'OF', 'LIFE'] +3570-5694-0003-2436: ref=['WITH', 'A', 'FURTHER', 'ADVANCE', 'IN', 'CULTURE', 'THIS', 'TABU', 'MAY', 'CHANGE', 'INTO', 'SIMPLE', 'CUSTOM', 'OF', 'A', 'MORE', 'OR', 'LESS', 'RIGOROUS', 'CHARACTER', 'BUT', 'WHATEVER', 'BE', 'THE', 'THEORETICAL', 'BASIS', 'OF', 'THE', 'DISTINCTION', 'WHICH', 'IS', 'MAINTAINED', 'WHETHER', 'IT', 'BE', 'A', 'TABU', 'OR', 'A', 'LARGER', 'CONVENTIONALITY', 'THE', 'FEATURES', 'OF', 'THE', 'CONVENTIONAL', 'SCHEME', 'OF', 'CONSUMPTION', 'DO', 'NOT', 'CHANGE', 'EASILY'] +3570-5694-0003-2436: hyp=['WITH', 'A', 'FURTHER', 'ADVANCE', 'AND', 'CULTURE', 'THIS', 'TABOU', 'MAY', 'CHANGED', 'INTO', 'SIMPLE', 'CUSTOM', 'OF', 'A', 'MORE', 'OR', 'LESS', 'RIGOROUS', 'CHARACTER', 'BUT', 'WHATEVER', 'BE', 'THE', 'THEORETICAL', 'BASIS', 'OF', 'THE', 'DISTINCTION', 'WHICH', 'IS', 'MAINTAINED', 'WHETHER', 'IT', 'BE', 'AT', 'A', 'BOOT', 'OR', 'A', 'LARGER', 'CONVENTIONALITY', 'THE', 'FEATURES', 'OF', 'THE', 'CONVENTIONAL', 'SCHEME', 'OF', 'CONSUMPTION', 'DO', 'NOT', 'CHANGE', 'EASILY'] +3570-5694-0004-2437: ref=['IN', 'THE', 'NATURE', 'OF', 'THINGS', 'LUXURIES', 'AND', 'THE', 'COMFORTS', 'OF', 'LIFE', 'BELONG', 'TO', 'THE', 'LEISURE', 'CLASS'] +3570-5694-0004-2437: hyp=['IN', 'THE', 'NATURE', 'OF', 'THINGS', 'LUXURIES', 'AND', 'THE', 'COMFORTS', 'OF', 'LIFE', 'BELONG', 'TO', 'THE', 'LEISURE', 'CLASS'] +3570-5694-0005-2438: ref=['UNDER', 'THE', 'TABU', 'CERTAIN', 'VICTUALS', 'AND', 'MORE', 'PARTICULARLY', 'CERTAIN', 'BEVERAGES', 'ARE', 'STRICTLY', 'RESERVED', 'FOR', 'THE', 'USE', 'OF', 'THE', 'SUPERIOR', 'CLASS'] +3570-5694-0005-2438: hyp=['UNDER', 'THE', 'TABOO', 'CERTAIN', 'VICTUALS', 'AND', 'MORE', 'PARTICULARLY', 'CERTAIN', 'BEVERAGES', 'ARE', 'STRICTLY', 'RESERVED', 'FOR', 'THE', 'USE', 'OF', 'THE', 'SUPERIOR', 'CLASS'] +3570-5694-0006-2439: ref=['DRUNKENNESS', 'AND', 'THE', 'OTHER', 'PATHOLOGICAL', 'CONSEQUENCES', 'OF', 'THE', 'FREE', 'USE', 'OF', 'STIMULANTS', 'THEREFORE', 'TEND', 'IN', 'THEIR', 'TURN', 'TO', 'BECOME', 'HONORIFIC', 'AS', 'BEING', 'A', 'MARK', 'AT', 'THE', 'SECOND', 'REMOVE', 'OF', 'THE', 'SUPERIOR', 'STATUS', 'OF', 'THOSE', 'WHO', 'ARE', 'ABLE', 'TO', 'AFFORD', 'THE', 'INDULGENCE'] +3570-5694-0006-2439: hyp=['DRUNKENNESS', 'AND', 'THE', 'OTHER', 'PATHOLOGICAL', 'CONSEQUENCES', 'OF', 'THE', 'FREE', 'USE', 'OF', 'STIMULANTS', 'THEREFORE', 'TEND', 'IN', 'THEIR', 'TURN', 'TO', 'BECOME', 'HONORIFIC', 'AS', 'BEING', 'A', 'MARK', 'AT', 'THE', 'SECOND', 'REMOVE', 'OF', 'THE', 'SUPERIOR', 'STATUS', 'OF', 'THOSE', 'WHO', 'ARE', 'ABLE', 'TO', 'AFFORD', 'THE', 'INDULGENCE'] +3570-5694-0007-2440: ref=['IT', 'HAS', 'EVEN', 'HAPPENED', 'THAT', 'THE', 'NAME', 'FOR', 'CERTAIN', 'DISEASED', 'CONDITIONS', 'OF', 'THE', 'BODY', 'ARISING', 'FROM', 'SUCH', 'AN', 'ORIGIN', 'HAS', 'PASSED', 'INTO', 'EVERYDAY', 'SPEECH', 'AS', 'A', 'SYNONYM', 'FOR', 'NOBLE', 'OR', 'GENTLE'] +3570-5694-0007-2440: hyp=['IT', 'HAS', 'EVEN', 'HAPPENED', 'THAT', 'THE', 'NAME', 'FOR', 'CERTAIN', 'DISEASED', 'CONDITIONS', 'OF', 'THE', 'BODY', 'ARISING', 'FROM', 'SUCH', 'AN', 'ORIGIN', 'HAS', 'PASSED', 'INTO', 'EVERYDAY', 'SPEECH', 'AS', 'A', 'SYNONYM', 'FOR', 'NOBLE', 'OR', 'GENTLE'] +3570-5694-0008-2441: ref=['THE', 'CONSUMPTION', 'OF', 'LUXURIES', 'IN', 'THE', 'TRUE', 'SENSE', 'IS', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THE', 'COMFORT', 'OF', 'THE', 'CONSUMER', 'HIMSELF', 'AND', 'IS', 'THEREFORE', 'A', 'MARK', 'OF', 'THE', 'MASTER'] +3570-5694-0008-2441: hyp=['THE', 'CONSUMPTION', 'OF', 'LUXURIES', 'IN', 'THE', 'TRUE', 'SENSE', 'IS', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THE', 'COMFORT', 'OF', 'THE', 'CONSUMER', 'HIMSELF', 'AND', 'IS', 'THEREFORE', 'A', 'MARK', 'OF', 'THE', 'MASTER'] +3570-5694-0009-2442: ref=['WITH', 'MANY', 'QUALIFICATIONS', 'WITH', 'MORE', 'QUALIFICATIONS', 'AS', 'THE', 'PATRIARCHAL', 'TRADITION', 'HAS', 'GRADUALLY', 'WEAKENED', 'THE', 'GENERAL', 'RULE', 'IS', 'FELT', 'TO', 'BE', 'RIGHT', 'AND', 'BINDING', 'THAT', 'WOMEN', 'SHOULD', 'CONSUME', 'ONLY', 'FOR', 'THE', 'BENEFIT', 'OF', 'THEIR', 'MASTERS'] +3570-5694-0009-2442: hyp=['WITH', 'MANY', 'QUALIFICATIONS', 'WITH', 'MORE', 'QUALIFICATIONS', 'AS', 'THE', 'PATRIARCHAL', 'TRADITION', 'HAS', 'GRADUALLY', 'WEAKENED', 'THE', 'GENERAL', 'RULE', 'IS', 'FELT', 'TO', 'BE', 'RIGHT', 'AND', 'BINDING', 'THAT', 'WOMEN', 'SHOULD', 'CONSUME', 'ONLY', 'FOR', 'THE', 'BENEFIT', 'OF', 'THEIR', 'MASTERS'] +3570-5694-0010-2443: ref=['THE', 'OBJECTION', 'OF', 'COURSE', 'PRESENTS', 'ITSELF', 'THAT', 'EXPENDITURE', 'ON', "WOMEN'S", 'DRESS', 'AND', 'HOUSEHOLD', 'PARAPHERNALIA', 'IS', 'AN', 'OBVIOUS', 'EXCEPTION', 'TO', 'THIS', 'RULE', 'BUT', 'IT', 'WILL', 'APPEAR', 'IN', 'THE', 'SEQUEL', 'THAT', 'THIS', 'EXCEPTION', 'IS', 'MUCH', 'MORE', 'OBVIOUS', 'THAN', 'SUBSTANTIAL'] +3570-5694-0010-2443: hyp=['THE', 'OBJECTION', 'OF', 'COURSE', 'PRESENTS', 'ITSELF', 'THAT', 'EXPENDITURE', 'ON', "WOMEN'S", 'DRESS', 'AND', 'HOUSEHOLD', 'PARAPHERNALIA', 'IS', 'AN', 'OBVIOUS', 'EXCEPTION', 'TO', 'THIS', 'RULE', 'BUT', 'IT', 'WILL', 'APPEAR', 'IN', 'THE', 'SEQUEL', 'THAT', 'THIS', 'EXCEPTION', 'IS', 'MUCH', 'MORE', 'OBVIOUS', 'THAN', 'SUBSTANTIAL'] +3570-5694-0011-2444: ref=['THE', 'CUSTOM', 'OF', 'FESTIVE', 'GATHERINGS', 'PROBABLY', 'ORIGINATED', 'IN', 'MOTIVES', 'OF', 'CONVIVIALITY', 'AND', 'RELIGION', 'THESE', 'MOTIVES', 'ARE', 'ALSO', 'PRESENT', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'BUT', 'THEY', 'DO', 'NOT', 'CONTINUE', 'TO', 'BE', 'THE', 'SOLE', 'MOTIVES'] +3570-5694-0011-2444: hyp=['THE', 'CUSTOM', 'OF', 'FESTIVE', 'GATHERINGS', 'PROBABLY', 'ORIGINATED', 'IN', 'MOTIVES', 'OF', 'CONVIVIALITY', 'AND', 'RELIGION', 'THESE', 'MOTIVES', 'ARE', 'ALSO', 'PRESENT', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'THAT', 'THEY', 'DO', 'NOT', 'CONTINUE', 'TO', 'BE', 'THE', 'SOLE', 'MOTIVES'] +3570-5694-0012-2445: ref=['THERE', 'IS', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM', 'OF', 'RANK', 'AND', 'GRADES'] +3570-5694-0012-2445: hyp=['THERE', 'IS', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM', 'OF', 'RANK', 'AND', 'GRATES'] +3570-5694-0013-2446: ref=['THIS', 'DIFFERENTIATION', 'IS', 'FURTHERED', 'BY', 'THE', 'INHERITANCE', 'OF', 'WEALTH', 'AND', 'THE', 'CONSEQUENT', 'INHERITANCE', 'OF', 'GENTILITY'] +3570-5694-0013-2446: hyp=['THIS', 'DIFFERENTIATION', 'IS', 'FURTHERED', 'BY', 'THE', 'INHERITANCE', 'OF', 'WEALTH', 'AND', 'THE', 'CONSEQUENT', 'INHERITANCE', 'OF', 'GENTILITY'] +3570-5694-0014-2447: ref=['MANY', 'OF', 'THESE', 'AFFILIATED', 'GENTLEMEN', 'OF', 'LEISURE', 'ARE', 'AT', 'THE', 'SAME', 'TIME', 'LESSER', 'MEN', 'OF', 'SUBSTANCE', 'IN', 'THEIR', 'OWN', 'RIGHT', 'SO', 'THAT', 'SOME', 'OF', 'THEM', 'ARE', 'SCARCELY', 'AT', 'ALL', 'OTHERS', 'ONLY', 'PARTIALLY', 'TO', 'BE', 'RATED', 'AS', 'VICARIOUS', 'CONSUMERS'] +3570-5694-0014-2447: hyp=['MANY', 'OF', 'THESE', 'ARE', 'FILIATED', 'GENTLEMEN', 'OF', 'LEISURE', 'ARE', 'AT', 'THE', 'SAME', 'TIME', 'LESS', 'AMEN', 'OF', 'SUBSTANCE', 'IN', 'THEIR', 'OWN', 'RIGHT', 'SO', 'THAT', 'SOME', 'OF', 'THEM', 'ARE', 'SCARCELY', 'AT', 'ALL', 'OTHERS', 'ONLY', 'PARTIALLY', 'TO', 'BE', 'RATED', 'AS', 'VICARIOUS', 'CONSUMERS'] +3570-5694-0015-2448: ref=['SO', 'MANY', 'OF', 'THEM', 'HOWEVER', 'AS', 'MAKE', 'UP', 'THE', 'RETAINER', 'AND', 'HANGERS', 'ON', 'OF', 'THE', 'PATRON', 'MAY', 'BE', 'CLASSED', 'AS', 'VICARIOUS', 'CONSUMER', 'WITHOUT', 'QUALIFICATION'] +3570-5694-0015-2448: hyp=['SO', 'MANY', 'OF', 'THEM', 'HOWEVER', 'AS', 'MAKE', 'UP', 'THE', 'RETAINER', 'AND', 'HANGERS', 'ON', 'OF', 'THE', 'PATRON', 'MAY', 'BE', 'CLASSED', 'AS', 'VICARIOUS', 'CONSUMER', 'WITHOUT', 'QUALIFICATION'] +3570-5694-0016-2449: ref=['MANY', 'OF', 'THESE', 'AGAIN', 'AND', 'ALSO', 'MANY', 'OF', 'THE', 'OTHER', 'ARISTOCRACY', 'OF', 'LESS', 'DEGREE', 'HAVE', 'IN', 'TURN', 'ATTACHED', 'TO', 'THEIR', 'PERSONS', 'A', 'MORE', 'OR', 'LESS', 'COMPREHENSIVE', 'GROUP', 'OF', 'VICARIOUS', 'CONSUMER', 'IN', 'THE', 'PERSONS', 'OF', 'THEIR', 'WIVES', 'AND', 'CHILDREN', 'THEIR', 'SERVANTS', 'RETAINERS', 'ET', 'CETERA'] +3570-5694-0016-2449: hyp=['MANY', 'OF', 'THESE', 'AGAIN', 'AND', 'ALSO', 'MANY', 'OF', 'THE', 'OTHER', 'ARISTOCRACY', 'OF', 'LESS', 'DEGREE', 'HAVE', 'IN', 'TURN', 'ATTACHED', 'TO', 'THEIR', 'PERSONS', 'A', 'MORE', 'OR', 'LESS', 'COMPREHENSIVE', 'GROUP', 'OF', 'VICARIOUS', 'CONSUMER', 'IN', 'THE', 'PERSONS', 'OF', 'THEIR', 'WIVES', 'AND', 'CHILDREN', 'THEIR', 'SERVANTS', 'RETAINERS', 'ET', 'CETERA'] +3570-5694-0017-2450: ref=['THE', 'WEARING', 'OF', 'UNIFORMS', 'OR', 'LIVERIES', 'IMPLIES', 'A', 'CONSIDERABLE', 'DEGREE', 'OF', 'DEPENDENCE', 'AND', 'MAY', 'EVEN', 'BE', 'SAID', 'TO', 'BE', 'A', 'MARK', 'OF', 'SERVITUDE', 'REAL', 'OR', 'OSTENSIBLE'] +3570-5694-0017-2450: hyp=['THE', 'WEARING', 'OF', 'UNIFORMS', 'ARE', 'LIVERIES', 'IMPLIES', 'A', 'CONSIDERABLE', 'DEGREE', 'OF', 'DEPENDENCE', 'AND', 'MAY', 'EVEN', 'BE', 'SAID', 'TO', 'BE', 'A', 'MARK', 'OF', 'SERVITUDE', 'REAL', 'OR', 'OSTENSIBLE'] +3570-5694-0018-2451: ref=['THE', 'WEARERS', 'OF', 'UNIFORMS', 'AND', 'LIVERIES', 'MAY', 'BE', 'ROUGHLY', 'DIVIDED', 'INTO', 'TWO', 'CLASSES', 'THE', 'FREE', 'AND', 'THE', 'SERVILE', 'OR', 'THE', 'NOBLE', 'AND', 'THE', 'IGNOBLE'] +3570-5694-0018-2451: hyp=['THE', 'WEARERS', 'OF', 'UNIFORMS', 'AND', 'LIVERIES', 'MAY', 'BE', 'ROUGHLY', 'DIVIDED', 'INTO', 'TWO', 'CLASSES', 'THE', 'FREE', 'AND', 'THE', 'SERVILE', 'OR', 'THE', 'NOBLE', 'AND', 'THE', 'IGNOBLE'] +3570-5694-0019-2452: ref=['BUT', 'THE', 'GENERAL', 'DISTINCTION', 'IS', 'NOT', 'ON', 'THAT', 'ACCOUNT', 'TO', 'BE', 'OVERLOOKED'] +3570-5694-0019-2452: hyp=['BUT', 'THE', 'GENERAL', 'DISTINCTION', 'IS', 'NOT', 'ON', 'THAT', 'ACCOUNT', 'TO', 'BE', 'OVERLOOKED'] +3570-5694-0020-2453: ref=['SO', 'THOSE', 'OFFICES', 'WHICH', 'ARE', 'BY', 'RIGHT', 'THE', 'PROPER', 'EMPLOYMENT', 'OF', 'THE', 'LEISURE', 'CLASS', 'ARE', 'NOBLE', 'SUCH', 'AS', 'GOVERNMENT', 'FIGHTING', 'HUNTING', 'THE', 'CARE', 'OF', 'ARMS', 'AND', 'ACCOUTREMENTS', 'AND', 'THE', 'LIKE', 'IN', 'SHORT', 'THOSE', 'WHICH', 'MAY', 'BE', 'CLASSED', 'AS', 'OSTENSIBLY', 'PREDATORY', 'EMPLOYMENTS'] +3570-5694-0020-2453: hyp=['SO', 'THOSE', 'OFFICERS', 'WHICH', 'ARE', 'BY', 'RIGHT', 'THE', 'PROPER', 'EMPLOYMENT', 'OF', 'THE', 'LEISURE', 'CLASS', 'ARE', 'NOBLE', 'SUCH', 'AS', 'GOVERNMENT', 'FIGHTING', 'HUNTING', 'THE', 'CARE', 'OF', 'ARMS', 'AND', 'ACCUTMENTS', 'AND', 'THE', 'LIKE', 'IN', 'SHORT', 'THOSE', 'WHICH', 'MAY', 'BE', 'CLASSED', 'AS', 'OSTENSIBLY', 'PREDATORY', 'EMPLOYMENTS'] +3570-5694-0021-2454: ref=['WHENEVER', 'AS', 'IN', 'THESE', 'CASES', 'THE', 'MENIAL', 'SERVICE', 'IN', 'QUESTION', 'HAS', 'TO', 'DO', 'DIRECTLY', 'WITH', 'THE', 'PRIMARY', 'LEISURE', 'EMPLOYMENTS', 'OF', 'FIGHTING', 'AND', 'HUNTING', 'IT', 'EASILY', 'ACQUIRES', 'A', 'REFLECTED', 'HONORIFIC', 'CHARACTER'] +3570-5694-0021-2454: hyp=['WHENEVER', 'AS', 'IN', 'THESE', 'CASES', 'THE', 'MENIAL', 'SERVICE', 'IN', 'QUESTION', 'HAS', 'TO', 'DO', 'DIRECTLY', 'WITH', 'A', 'PRIMARY', 'LEISURE', 'EMPLOYMENTS', 'OF', 'FIGHTING', 'AND', 'HUNTING', 'IT', 'EASILY', 'ACQUIRES', 'A', 'REFLECTED', 'HONORIFIC', 'CHARACTER'] +3570-5694-0022-2455: ref=['THE', 'LIVERY', 'BECOMES', 'OBNOXIOUS', 'TO', 'NEARLY', 'ALL', 'WHO', 'ARE', 'REQUIRED', 'TO', 'WEAR', 'IT'] +3570-5694-0022-2455: hyp=['THE', 'LIVERY', 'BECOMES', 'OBNOXIOUS', 'TO', 'NEARLY', 'ALL', 'WHO', 'ARE', 'REQUIRED', 'TO', 'WEAR', 'IT'] +3570-5695-0000-2456: ref=['IN', 'A', 'GENERAL', 'WAY', 'THOUGH', 'NOT', 'WHOLLY', 'NOR', 'CONSISTENTLY', 'THESE', 'TWO', 'GROUPS', 'COINCIDE'] +3570-5695-0000-2456: hyp=['IN', 'A', 'GENERAL', 'WAY', 'THOUGH', 'NOT', 'WHOLLY', 'NOR', 'CONSISTENTLY', 'THESE', 'TWO', 'GROUPS', 'COINCIDE'] +3570-5695-0001-2457: ref=['THE', 'DEPENDENT', 'WHO', 'WAS', 'FIRST', 'DELEGATED', 'FOR', 'THESE', 'DUTIES', 'WAS', 'THE', 'WIFE', 'OR', 'THE', 'CHIEF', 'WIFE', 'AND', 'AS', 'WOULD', 'BE', 'EXPECTED', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'OF', 'THE', 'INSTITUTION', 'WHEN', 'THE', 'NUMBER', 'OF', 'PERSONS', 'BY', 'WHOM', 'THESE', 'DUTIES', 'ARE', 'CUSTOMARILY', 'PERFORMED', 'GRADUALLY', 'NARROWS', 'THE', 'WIFE', 'REMAINS', 'THE', 'LAST'] +3570-5695-0001-2457: hyp=['THE', 'DEPENDENT', 'WHO', 'WAS', 'FIRST', 'DELEGATED', 'FOR', 'THESE', 'DUTIES', 'WAS', 'THE', 'WIFE', 'OR', 'THE', 'CHIEF', 'WIFE', 'AND', 'AS', 'WOULD', 'BE', 'EXPECTED', 'IN', 'A', 'LATER', 'DEVELOPMENT', 'OF', 'THE', 'INSTITUTION', 'WHEN', 'THE', 'NUMBER', 'OF', 'PERSONS', 'BY', 'WHOM', 'THESE', 'DUTIES', 'ARE', 'CUSTOMARILY', 'PERFORMED', 'GRADUALLY', 'NARROWS', 'THE', 'WIFE', 'REMAINS', 'THE', 'LAST'] +3570-5695-0002-2458: ref=['BUT', 'AS', 'WE', 'DESCEND', 'THE', 'SOCIAL', 'SCALE', 'THE', 'POINT', 'IS', 'PRESENTLY', 'REACHED', 'WHERE', 'THE', 'DUTIES', 'OF', 'VICARIOUS', 'LEISURE', 'AND', 'CONSUMPTION', 'DEVOLVE', 'UPON', 'THE', 'WIFE', 'ALONE'] +3570-5695-0002-2458: hyp=['BUT', 'AS', 'WE', 'DESCEND', 'THE', 'SOCIAL', 'SCALE', 'THE', 'POINT', 'IS', 'PRESENTLY', 'REACHED', 'WHERE', 'THE', 'DUTIES', 'OF', 'VIPEROUS', 'LEISURE', 'AND', 'CONSUMPTION', 'DEVOLVE', 'UPON', 'THE', 'WIFE', 'ALONE'] +3570-5695-0003-2459: ref=['IN', 'THE', 'COMMUNITIES', 'OF', 'THE', 'WESTERN', 'CULTURE', 'THIS', 'POINT', 'IS', 'AT', 'PRESENT', 'FOUND', 'AMONG', 'THE', 'LOWER', 'MIDDLE', 'CLASS'] +3570-5695-0003-2459: hyp=['IN', 'THE', 'COMMUNITIES', 'OF', 'THE', 'WESTERN', 'CULTURE', 'THIS', 'POINT', 'IS', 'AT', 'PRESENT', 'FOUND', 'AMONG', 'THE', 'LOWER', 'MIDDLE', 'CLASS'] +3570-5695-0004-2460: ref=['IF', 'BEAUTY', 'OR', 'COMFORT', 'IS', 'ACHIEVED', 'AND', 'IT', 'IS', 'A', 'MORE', 'OR', 'LESS', 'FORTUITOUS', 'CIRCUMSTANCE', 'IF', 'THEY', 'ARE', 'THEY', 'MUST', 'BE', 'ACHIEVED', 'BY', 'MEANS', 'AND', 'METHODS', 'THAT', 'COMMEND', 'THEMSELVES', 'TO', 'THE', 'GREAT', 'ECONOMIC', 'LAW', 'OF', 'WASTED', 'EFFORT'] +3570-5695-0004-2460: hyp=['IF', 'BEAUTY', 'OR', 'COMFORT', 'IS', 'ACHIEVED', 'AND', 'IT', 'IS', 'A', 'MORE', 'OR', 'LESS', 'FORTUITOUS', 'CIRCUMSTANCE', 'IF', 'THEY', 'ARE', 'THEY', 'MUST', 'BE', 'ACHIEVED', 'BY', 'MEANS', 'AND', 'METHODS', 'THAT', 'COMMEND', 'THEMSELVES', 'TO', 'THE', 'GREAT', 'ECONOMIC', 'LAW', 'OF', 'WASTED', 'EFFORT'] +3570-5695-0005-2461: ref=['THE', 'MAN', 'OF', 'THE', 'HOUSEHOLD', 'ALSO', 'CAN', 'DO', 'SOMETHING', 'IN', 'THIS', 'DIRECTION', 'AND', 'INDEED', 'HE', 'COMMONLY', 'DOES', 'BUT', 'WITH', 'A', 'STILL', 'LOWER', 'DESCENT', 'INTO', 'THE', 'LEVELS', 'OF', 'INDIGENCE', 'ALONG', 'THE', 'MARGIN', 'OF', 'THE', 'SLUMS', 'THE', 'MAN', 'AND', 'PRESENTLY', 'ALSO', 'THE', 'CHILDREN', 'VIRTUALLY', 'CEASE', 'TO', 'CONSUME', 'VALUABLE', 'GOODS', 'FOR', 'APPEARANCES', 'AND', 'THE', 'WOMAN', 'REMAINS', 'VIRTUALLY', 'THE', 'SOLE', 'EXPONENT', 'OF', 'THE', "HOUSEHOLD'S", 'PECUNIARY', 'DECENCY'] +3570-5695-0005-2461: hyp=['THE', 'MAN', 'OF', 'THE', 'HOUSEHOLD', 'ALSO', 'CAN', 'DO', 'SOMETHING', 'IN', 'THIS', 'DIRECTION', 'AND', 'INDEED', 'HE', 'COMMONLY', 'DOES', 'BUT', 'WITH', 'A', 'STILL', 'LOWER', 'DESCENT', 'INTO', 'THE', 'LEVELS', 'OF', 'INDIGENCE', 'ALONG', 'THE', 'MARGIN', 'OF', 'THE', 'SLUMS', 'THE', 'MAN', 'AND', 'PRESENTLY', 'ALSO', 'THE', 'CHILDREN', 'VIRTUALLY', 'CEASE', 'TO', 'CONSUME', 'VALUABLE', 'GOODS', 'FOR', 'APPEARANCES', 'AND', 'THE', 'WOMAN', 'REMAINS', 'VIRTUALLY', 'THE', 'SOLE', 'EXPONENT', 'OF', 'THE', "HOUSEHOLD'S", 'PECUNIARY', 'DECENCY'] +3570-5695-0006-2462: ref=['VERY', 'MUCH', 'OF', 'SQUALOR', 'AND', 'DISCOMFORT', 'WILL', 'BE', 'ENDURED', 'BEFORE', 'THE', 'LAST', 'TRINKET', 'OR', 'THE', 'LAST', 'PRETENSE', 'OF', 'PECUNIARY', 'DECENCY', 'IS', 'PUT', 'AWAY'] +3570-5695-0006-2462: hyp=['VERY', 'MUCH', 'OF', 'SQUALOR', 'AND', 'DISCOMFORT', 'WILL', 'BE', 'ENDURED', 'BEFORE', 'THE', 'LAST', 'TRINKET', 'OR', 'THE', 'LAST', 'PRETENCE', 'OF', 'PECUNIARY', 'DECENCIES', 'PUT', 'AWAY'] +3570-5695-0007-2463: ref=['THERE', 'IS', 'NO', 'CLASS', 'AND', 'NO', 'COUNTRY', 'THAT', 'HAS', 'YIELDED', 'SO', 'ABJECTLY', 'BEFORE', 'THE', 'PRESSURE', 'OF', 'PHYSICAL', 'WANT', 'AS', 'TO', 'DENY', 'THEMSELVES', 'ALL', 'GRATIFICATION', 'OF', 'THIS', 'HIGHER', 'OR', 'SPIRITUAL', 'NEED'] +3570-5695-0007-2463: hyp=['THERE', 'IS', 'NO', 'CLASS', 'IN', 'NO', 'COUNTRY', 'THAT', 'HAS', 'YIELDED', 'SO', 'ABJECTLY', 'BEFORE', 'THE', 'PRESSURE', 'OF', 'PHYSICAL', 'WANT', 'AS', 'TO', 'DENY', 'THEMSELVES', 'ALL', 'GRATIFICATION', 'OF', 'THIS', 'HIGHER', 'OR', 'SPIRITUAL', 'NEED'] +3570-5695-0008-2464: ref=['THE', 'QUESTION', 'IS', 'WHICH', 'OF', 'THE', 'TWO', 'METHODS', 'WILL', 'MOST', 'EFFECTIVELY', 'REACH', 'THE', 'PERSONS', 'WHOSE', 'CONVICTIONS', 'IT', 'IS', 'DESIRED', 'TO', 'AFFECT'] +3570-5695-0008-2464: hyp=['THE', 'QUESTION', 'IS', 'WHICH', 'OF', 'THE', 'TWO', 'METHODS', 'WILL', 'MOST', 'EFFECTIVELY', 'REACH', 'THE', 'PERSONS', 'WHOSE', 'CONVICTIONS', 'IT', 'IS', 'DESIRED', 'TO', 'EFFECT'] +3570-5695-0009-2465: ref=['EACH', 'WILL', 'THEREFORE', 'SERVE', 'ABOUT', 'EQUALLY', 'WELL', 'DURING', 'THE', 'EARLIER', 'STAGES', 'OF', 'SOCIAL', 'GROWTH'] +3570-5695-0009-2465: hyp=['EACH', 'WILL', 'THEREFORE', 'SERVE', 'ABOUT', 'EQUALLY', 'WELL', 'DURING', 'THE', 'EARLIER', 'STAGES', 'OF', 'SOCIAL', 'GROWTH'] +3570-5695-0010-2466: ref=['THE', 'MODERN', 'ORGANIZATION', 'OF', 'INDUSTRY', 'WORKS', 'IN', 'THE', 'SAME', 'DIRECTION', 'ALSO', 'BY', 'ANOTHER', 'LINE'] +3570-5695-0010-2466: hyp=['THE', 'MODERN', 'ORGANIZATION', 'OF', 'INDUSTRY', 'WORKS', 'IN', 'THE', 'SAME', 'DIRECTION', 'ALSO', 'BY', 'ANOTHER', 'LINE'] +3570-5695-0011-2467: ref=['IT', 'IS', 'EVIDENT', 'THEREFORE', 'THAT', 'THE', 'PRESENT', 'TREND', 'OF', 'THE', 'DEVELOPMENT', 'IS', 'IN', 'THE', 'DIRECTION', 'OF', 'HEIGHTENING', 'THE', 'UTILITY', 'OF', 'CONSPICUOUS', 'CONSUMPTION', 'AS', 'COMPARED', 'WITH', 'LEISURE'] +3570-5695-0011-2467: hyp=['IT', 'IS', 'EVIDENT', 'THEREFORE', 'THAT', 'THE', 'PRESENT', 'TREND', 'OF', 'THE', 'DEVELOPMENT', 'IS', 'IN', 'THE', 'DIRECTION', 'OF', 'HEIGHTENING', 'THE', 'UTILITY', 'OF', 'CONSPICUOUS', 'CONSUMPTION', 'AS', 'COMPARED', 'WITH', 'LEISURE'] +3570-5695-0012-2468: ref=['IT', 'IS', 'ALSO', 'NOTICEABLE', 'THAT', 'THE', 'SERVICEABILITY', 'OF', 'CONSUMPTION', 'AS', 'A', 'MEANS', 'OF', 'REPUTE', 'AS', 'WELL', 'AS', 'THE', 'INSISTENCE', 'ON', 'IT', 'AS', 'AN', 'ELEMENT', 'OF', 'DECENCY', 'IS', 'AT', 'ITS', 'BEST', 'IN', 'THOSE', 'PORTIONS', 'OF', 'THE', 'COMMUNITY', 'WHERE', 'THE', 'HUMAN', 'CONTACT', 'OF', 'THE', 'INDIVIDUAL', 'IS', 'WIDEST', 'AND', 'THE', 'MOBILITY', 'OF', 'THE', 'POPULATION', 'IS', 'GREATEST'] +3570-5695-0012-2468: hyp=['IT', 'IS', 'ALSO', 'NOTICEABLE', 'THAT', 'THE', 'SERVICEABILITY', 'OF', 'CONSUMPTION', 'AS', 'A', 'MEANS', 'OF', 'REPUTE', 'AS', 'WELL', 'AS', 'THE', 'INSISTENCE', 'ON', 'IT', 'AS', 'AN', 'ELEMENT', 'OF', 'DECENCY', 'IS', 'AT', 'ITS', 'BEST', 'IN', 'THOSE', 'PORTIONS', 'OF', 'THE', 'COMMUNITY', 'WHERE', 'THE', 'HUMAN', 'CONDUCT', 'OF', 'THE', 'INDIVIDUAL', 'IS', 'WIDEST', 'AND', 'THE', 'MOBILITY', 'OF', 'THE', 'POPULATION', 'IS', 'GREATEST'] +3570-5695-0013-2469: ref=['CONSUMPTION', 'BECOMES', 'A', 'LARGER', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'LIVING', 'IN', 'THE', 'CITY', 'THAN', 'IN', 'THE', 'COUNTRY'] +3570-5695-0013-2469: hyp=['CONSUMPTION', 'BECOMES', 'A', 'LARGER', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'LIVING', 'IN', 'THE', 'CITY', 'THAN', 'IN', 'THE', 'COUNTRY'] +3570-5695-0014-2470: ref=['AMONG', 'THE', 'COUNTRY', 'POPULATION', 'ITS', 'PLACE', 'IS', 'TO', 'SOME', 'EXTENT', 'TAKEN', 'BY', 'SAVINGS', 'AND', 'HOME', 'COMFORTS', 'KNOWN', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'NEIGHBORHOOD', 'GOSSIP', 'SUFFICIENTLY', 'TO', 'SERVE', 'THE', 'LIKE', 'GENERAL', 'PURPOSE', 'OF', 'PECUNIARY', 'REPUTE'] +3570-5695-0014-2470: hyp=['AMONG', 'THE', 'COUNTRY', 'POPULATION', 'ITS', 'PLACES', 'TO', 'SOME', 'EXTENT', 'TAKEN', 'BY', 'SAVINGS', 'AND', 'HOME', 'COMFORTS', 'KNOWN', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'NEIGHBOURHOOD', 'GOSSIP', 'SUFFICIENTLY', 'TO', 'SERVE', 'THE', 'LIKE', 'GENERAL', 'PURPOSE', 'OF', 'PECUNIARY', 'REPUTE'] +3570-5695-0015-2471: ref=['THE', 'RESULT', 'IS', 'A', 'GREAT', 'MOBILITY', 'OF', 'THE', 'LABOR', 'EMPLOYED', 'IN', 'PRINTING', 'PERHAPS', 'GREATER', 'THAN', 'IN', 'ANY', 'OTHER', 'EQUALLY', 'WELL', 'DEFINED', 'AND', 'CONSIDERABLE', 'BODY', 'OF', 'WORKMEN'] +3570-5695-0015-2471: hyp=['THE', 'RESULT', 'IS', 'A', 'GREAT', 'MOBILITY', 'OF', 'THE', 'LABOR', 'EMPLOYED', 'IN', 'PRINTING', 'PERHAPS', 'GREATER', 'THAN', 'IN', 'ANY', 'OTHER', 'EQUALLY', 'WELL', 'DEFINED', 'AND', 'CONSIDERABLE', 'BODY', 'OF', 'WORKMEN'] +3570-5696-0000-2472: ref=['UNDER', 'THE', 'SIMPLE', 'TEST', 'OF', 'EFFECTIVENESS', 'FOR', 'ADVERTISING', 'WE', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'LEISURE', 'AND', 'THE', 'CONSPICUOUS', 'CONSUMPTION', 'OF', 'GOODS', 'DIVIDING', 'THE', 'FIELD', 'OF', 'PECUNIARY', 'EMULATION', 'PRETTY', 'EVENLY', 'BETWEEN', 'THEM', 'AT', 'THE', 'OUTSET'] +3570-5696-0000-2472: hyp=['UNDER', 'THE', 'SIMPLE', 'TEST', 'OF', 'EFFECTIVENESS', 'FOR', 'ADVERTISING', 'WE', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'LEISURE', 'AND', 'THE', 'CONSPICUOUS', 'CONSUMPTION', 'OF', 'GOODS', 'DIVIDING', 'THE', 'FIELD', 'OF', 'PECUNIARY', 'EMULATION', 'PRETTY', 'EVENLY', 'BETWEEN', 'THEM', 'AT', 'THE', 'OUTSET'] +3570-5696-0001-2473: ref=['BUT', 'THE', 'ACTUAL', 'COURSE', 'OF', 'DEVELOPMENT', 'HAS', 'BEEN', 'SOMEWHAT', 'DIFFERENT', 'FROM', 'THIS', 'IDEAL', 'SCHEME', 'LEISURE', 'HELD', 'THE', 'FIRST', 'PLACE', 'AT', 'THE', 'START', 'AND', 'CAME', 'TO', 'HOLD', 'A', 'RANK', 'VERY', 'MUCH', 'ABOVE', 'WASTEFUL', 'CONSUMPTION', 'OF', 'GOODS', 'BOTH', 'AS', 'A', 'DIRECT', 'EXPONENT', 'OF', 'WEALTH', 'AND', 'AS', 'AN', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'DECENCY', 'DURING', 'THE', 'QUASI', 'PEACEABLE', 'CULTURE'] +3570-5696-0001-2473: hyp=['BUT', 'THE', 'ACTUAL', 'COURSE', 'OF', 'DEVELOPMENT', 'HAS', 'BEEN', 'SOMEWHAT', 'DIFFERENT', 'FROM', 'THIS', 'IDEAL', 'SCHEME', 'LEISURE', 'HELD', 'THE', 'FIRST', 'PLACE', 'AT', 'THE', 'START', 'AND', 'CAME', 'TO', 'ALL', 'THE', 'RANK', 'VERIMENT', 'ABOVE', 'WASTEFUL', 'CONSUMPTION', 'OF', 'GOODS', 'BOTH', 'AS', 'A', 'DIRECT', 'EXPONENT', 'OF', 'WEALTH', 'AND', 'AS', 'AN', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'DECENCY', 'DURING', 'THE', 'COURSE', 'I', 'PEACEABLE', 'CULTURE'] +3570-5696-0002-2474: ref=['OTHER', 'CIRCUMSTANCES', 'PERMITTING', 'THAT', 'INSTINCT', 'DISPOSES', 'MEN', 'TO', 'LOOK', 'WITH', 'FAVOR', 'UPON', 'PRODUCTIVE', 'EFFICIENCY', 'AND', 'ON', 'WHATEVER', 'IS', 'OF', 'HUMAN', 'USE'] +3570-5696-0002-2474: hyp=['OTHER', 'CIRCUMSTANCES', 'PERMITTING', 'THAT', 'INSTINCT', 'DISPOSES', 'MEN', 'TO', 'LOOK', 'WITH', 'FAVOUR', 'UPON', 'PRODUCTIVE', 'EFFICIENCY', 'AND', 'ON', 'WHATEVER', 'IS', 'OF', 'HUMAN', 'USE'] +3570-5696-0003-2475: ref=['A', 'RECONCILIATION', 'BETWEEN', 'THE', 'TWO', 'CONFLICTING', 'REQUIREMENTS', 'IS', 'EFFECTED', 'BY', 'A', 'RESORT', 'TO', 'MAKE', 'BELIEVE', 'MANY', 'AND', 'INTRICATE', 'POLITE', 'OBSERVANCES', 'AND', 'SOCIAL', 'DUTIES', 'OF', 'A', 'CEREMONIAL', 'NATURE', 'ARE', 'DEVELOPED', 'MANY', 'ORGANIZATIONS', 'ARE', 'FOUNDED', 'WITH', 'SOME', 'SPECIOUS', 'OBJECT', 'OF', 'AMELIORATION', 'EMBODIED', 'IN', 'THEIR', 'OFFICIAL', 'STYLE', 'AND', 'TITLE', 'THERE', 'IS', 'MUCH', 'COMING', 'AND', 'GOING', 'AND', 'A', 'DEAL', 'OF', 'TALK', 'TO', 'THE', 'END', 'THAT', 'THE', 'TALKERS', 'MAY', 'NOT', 'HAVE', 'OCCASION', 'TO', 'REFLECT', 'ON', 'WHAT', 'IS', 'THE', 'EFFECTUAL', 'ECONOMIC', 'VALUE', 'OF', 'THEIR', 'TRAFFIC'] +3570-5696-0003-2475: hyp=['A', 'RECONCILIATION', 'BETWEEN', 'THE', 'TWO', 'CONFLICTING', 'REQUIREMENTS', 'IS', 'AFFECTED', 'BY', 'RESORT', 'TO', 'MAKE', 'BELIEVE', 'MEN', 'IN', 'INTRICATE', 'POLITE', 'OBSERVANCES', 'AND', 'SOCIAL', 'DUTIES', 'OF', 'A', 'CEREMONIAL', 'NATURE', 'ARE', 'DEVELOPED', 'MANY', 'ORGANIZATIONS', 'ARE', 'FOUNDED', 'WITH', 'SOME', 'SPECIOUS', 'OBJECT', 'OF', 'AMELIORATION', 'EMBODIED', 'IN', 'THEIR', 'OFFICIAL', 'STYLE', 'AND', 'TITLE', 'THERE', 'IS', 'MUCH', 'COMING', 'AND', 'GOING', 'AND', 'A', 'DEAL', 'OF', 'TALK', 'TO', 'THE', 'END', 'THAT', 'THE', 'TALK', 'IS', 'NOT', 'HAVE', 'OCCASION', 'TO', 'REFLECT', 'ON', 'WHAT', 'IS', 'THE', 'EFFECTUAL', 'ECONOMIC', 'VALUE', 'OF', 'THEIR', 'TRAFFIC'] +3570-5696-0004-2476: ref=['THE', 'SALIENT', 'FEATURES', 'OF', 'THIS', 'DEVELOPMENT', 'OF', 'DOMESTIC', 'SERVICE', 'HAVE', 'ALREADY', 'BEEN', 'INDICATED'] +3570-5696-0004-2476: hyp=['THE', 'SAILORED', 'FEATURES', 'OF', 'THIS', 'DEVELOPMENT', 'OF', 'DOMESTIC', 'SERVICE', 'HAVE', 'ALREADY', 'BEEN', 'INDICATED'] +3570-5696-0005-2477: ref=['THROUGHOUT', 'THE', 'ENTIRE', 'EVOLUTION', 'OF', 'CONSPICUOUS', 'EXPENDITURE', 'WHETHER', 'OF', 'GOODS', 'OR', 'OF', 'SERVICES', 'OR', 'HUMAN', 'LIFE', 'RUNS', 'THE', 'OBVIOUS', 'IMPLICATION', 'THAT', 'IN', 'ORDER', 'TO', 'EFFECTUALLY', 'MEND', 'THE', "CONSUMER'S", 'GOOD', 'FAME', 'IT', 'MUST', 'BE', 'AN', 'EXPENDITURE', 'OF', 'SUPERFLUITIES'] +3570-5696-0005-2477: hyp=['THROUGHOUT', 'THE', 'ENTIRE', 'REVOLUTION', 'OF', 'CONSPICUOUS', 'EXPENDITURE', 'WHETHER', 'OF', 'GOODS', 'OR', 'OF', 'SERVICES', 'OR', 'HUMAN', 'LIFE', 'RUNS', 'THE', 'OBVIOUS', 'IMPLICATION', 'THAT', 'IN', 'ORDER', 'TO', 'EFFECTUALLY', 'MEND', 'THE', "CONSUMER'S", 'GOOD', 'FAME', 'IT', 'MUST', 'BE', 'AN', 'EXPENDITURE', 'OF', 'SUPERFLUITIES'] +3570-5696-0006-2478: ref=['AS', 'USED', 'IN', 'THE', 'SPEECH', 'OF', 'EVERYDAY', 'LIFE', 'THE', 'WORD', 'CARRIES', 'AN', 'UNDERTONE', 'OF', 'DEPRECATION'] +3570-5696-0006-2478: hyp=['AS', 'USED', 'IN', 'THE', 'SPEECH', 'OF', 'EVERY', 'DAY', 'LIFE', 'THE', 'WORD', 'CARRIES', 'AN', 'UNDERTONE', 'OF', 'DEPRECATION'] +3570-5696-0007-2479: ref=['THE', 'USE', 'OF', 'THE', 'WORD', 'WASTE', 'AS', 'A', 'TECHNICAL', 'TERM', 'THEREFORE', 'IMPLIES', 'NO', 'DEPRECATION', 'OF', 'THE', 'MOTIVES', 'OR', 'OF', 'THE', 'ENDS', 'SOUGHT', 'BY', 'THE', 'CONSUMER', 'UNDER', 'THIS', 'CANON', 'OF', 'CONSPICUOUS', 'WASTE'] +3570-5696-0007-2479: hyp=['THE', 'USE', 'OF', 'THE', 'WORD', 'WASTE', 'AS', 'A', 'TECHNICAL', 'TERM', 'THEREFORE', 'IMPLIES', 'NO', 'DEPRECATION', 'OF', 'THE', 'MOTIVES', 'OR', 'OF', 'THE', 'ENDS', 'SOUGHT', 'BY', 'THE', 'CONSUMER', 'UNDER', 'THIS', 'CANON', 'OF', 'CONSPICUOUS', 'WASTE'] +3570-5696-0008-2480: ref=['BUT', 'IT', 'IS', 'ON', 'OTHER', 'GROUNDS', 'WORTH', 'NOTING', 'THAT', 'THE', 'TERM', 'WASTE', 'IN', 'THE', 'LANGUAGE', 'OF', 'EVERYDAY', 'LIFE', 'IMPLIES', 'DEPRECATION', 'OF', 'WHAT', 'IS', 'CHARACTERIZED', 'AS', 'WASTEFUL'] +3570-5696-0008-2480: hyp=['BUT', 'IT', 'IS', 'ANOTHER', 'GROUNDS', 'WORTH', 'NOTING', 'THAT', 'THE', 'TERM', 'WASTES', 'IN', 'THE', 'LANGUAGE', 'OF', 'EVERYDAY', 'LIFE', 'IMPLIES', 'DEPRECATION', 'OF', 'WHAT', 'IS', 'CHARACTERIZED', 'AS', 'WASTEFUL'] +3570-5696-0009-2481: ref=['IN', 'STRICT', 'ACCURACY', 'NOTHING', 'SHOULD', 'BE', 'INCLUDED', 'UNDER', 'THE', 'HEAD', 'OF', 'CONSPICUOUS', 'WASTE', 'BUT', 'SUCH', 'EXPENDITURE', 'AS', 'IS', 'INCURRED', 'ON', 'THE', 'GROUND', 'OF', 'AN', 'INVIDIOUS', 'PECUNIARY', 'COMPARISON'] +3570-5696-0009-2481: hyp=['IN', 'STRICT', 'ACCURACY', 'NOTHING', 'SHOULD', 'BE', 'INCLUDED', 'UNDER', 'THE', 'HEAD', 'OF', 'CONSPICUOUS', 'WASTE', 'BUT', 'SUCH', 'EXPENDITURE', 'AS', 'IS', 'INCURRED', 'ON', 'THE', 'GROUND', 'OF', 'AN', 'INVIDIOUS', 'PECUNIARY', 'COMPARISON'] +3570-5696-0010-2482: ref=['AN', 'ARTICLE', 'MAY', 'BE', 'USEFUL', 'AND', 'WASTEFUL', 'BOTH', 'AND', 'ITS', 'UTILITY', 'TO', 'THE', 'CONSUMER', 'MAY', 'BE', 'MADE', 'UP', 'OF', 'USE', 'AND', 'WASTE', 'IN', 'THE', 'MOST', 'VARYING', 'PROPORTIONS'] +3570-5696-0010-2482: hyp=['AN', 'ARTICLE', 'MAY', 'BE', 'USEFUL', 'AND', 'WASTEFUL', 'BOTH', 'AND', 'ITS', 'UTILITY', 'TO', 'THE', 'CONSUMER', 'MAY', 'BE', 'MADE', 'UP', 'OF', 'USE', 'AND', 'WASTE', 'IN', 'THE', 'MOST', 'VARYING', 'PROPORTIONS'] +3575-170457-0000-369: ref=['AND', 'OFTEN', 'HAS', 'MY', 'MOTHER', 'SAID', 'WHILE', 'ON', 'HER', 'LAP', 'I', 'LAID', 'MY', 'HEAD', 'SHE', 'FEARED', 'FOR', 'TIME', 'I', 'WAS', 'NOT', 'MADE', 'BUT', 'FOR', 'ETERNITY'] +3575-170457-0000-369: hyp=['AND', 'OFTEN', 'HAS', 'MY', 'MOTHER', 'SAID', 'WHILE', 'ON', 'HER', 'LAP', 'I', 'LAID', 'MY', 'HEAD', 'SHE', 'FEARED', 'FOR', 'TIME', 'I', 'WAS', 'NOT', 'MADE', 'BUT', 'FOR', 'ETERNITY'] +3575-170457-0001-370: ref=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DENIED', 'EACH', "OTHER'S", 'SOCIETY'] +3575-170457-0001-370: hyp=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DENIED', 'EACH', "OTHER'S", 'SOCIETY'] +3575-170457-0002-371: ref=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DIVIDED'] +3575-170457-0002-371: hyp=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DIVIDED'] +3575-170457-0003-372: ref=['SURELY', 'IT', 'MUST', 'BE', 'BECAUSE', 'WE', 'ARE', 'IN', 'DANGER', 'OF', 'LOVING', 'EACH', 'OTHER', 'TOO', 'WELL', 'OF', 'LOSING', 'SIGHT', 'OF', 'THE', 'CREATOR', 'IN', 'IDOLATRY', 'OF', 'THE', 'CREATURE'] +3575-170457-0003-372: hyp=['SURELY', 'IT', 'MUST', 'BE', 'BECAUSE', 'WE', 'ARE', 'IN', 'DANGER', 'OF', 'LOVING', 'EACH', 'OTHER', 'TOO', 'WELL', 'OF', 'LOSING', 'SIGHT', 'OF', 'THE', 'CREATOR', 'AND', 'IDOLATRY', 'OF', 'THE', 'CREATURE'] +3575-170457-0004-373: ref=['WE', 'USED', 'TO', 'DISPUTE', 'ABOUT', 'POLITICS', 'AND', 'RELIGION'] +3575-170457-0004-373: hyp=['WE', 'USED', 'TO', 'DISPUTE', 'ABOUT', 'POLITICS', 'AND', 'RELIGION'] +3575-170457-0005-374: ref=['SHE', 'A', 'TORY', 'AND', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'ALWAYS', 'IN', 'A', 'MINORITY', 'OF', 'ONE', 'IN', 'OUR', 'HOUSE', 'OF', 'VIOLENT', 'DISSENT', 'AND', 'RADICALISM'] +3575-170457-0005-374: hyp=['SHE', 'ATTORIAN', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'ALWAYS', 'IN', 'A', 'MINORITY', 'OF', 'ONE', 'IN', 'OUR', 'HOUSE', 'OF', 'VIOLENT', 'DESCENT', 'AND', 'RADICALISM'] +3575-170457-0006-375: ref=['HER', 'FEEBLE', 'HEALTH', 'GAVE', 'HER', 'HER', 'YIELDING', 'MANNER', 'FOR', 'SHE', 'COULD', 'NEVER', 'OPPOSE', 'ANY', 'ONE', 'WITHOUT', 'GATHERING', 'UP', 'ALL', 'HER', 'STRENGTH', 'FOR', 'THE', 'STRUGGLE'] +3575-170457-0006-375: hyp=['HER', 'FEEBLE', 'HEALTH', 'GAVE', 'HER', 'HER', 'YIELDING', 'MANNER', 'FOR', 'SHE', 'COULD', 'NEVER', 'OPPOSE', 'ANY', 'ONE', 'WITHOUT', 'GATHERING', 'UP', 'ALL', 'HER', 'STRENGTH', 'FOR', 'THE', 'STRUGGLE'] +3575-170457-0007-376: ref=['HE', 'SPOKE', 'FRENCH', 'PERFECTLY', 'I', 'HAVE', 'BEEN', 'TOLD', 'WHEN', 'NEED', 'WAS', 'BUT', 'DELIGHTED', 'USUALLY', 'IN', 'TALKING', 'THE', 'BROADEST', 'YORKSHIRE'] +3575-170457-0007-376: hyp=['HE', 'SPOKE', 'FRENCH', 'PERFECTLY', 'I', 'HAVE', 'BEEN', 'TOLD', 'WHEN', 'NEED', 'WAS', 'BUT', 'DELIGHTED', 'USUALLY', 'IN', 'TALKING', 'THE', 'BROADEST', 'YORKSHIRE'] +3575-170457-0008-377: ref=['AND', 'SO', 'LIFE', 'AND', 'DEATH', 'HAVE', 'DISPERSED', 'THE', 'CIRCLE', 'OF', 'VIOLENT', 'RADICALS', 'AND', 'DISSENTERS', 'INTO', 'WHICH', 'TWENTY', 'YEARS', 'AGO', 'THE', 'LITTLE', 'QUIET', 'RESOLUTE', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'RECEIVED', 'AND', 'BY', 'WHOM', 'SHE', 'WAS', 'TRULY', 'LOVED', 'AND', 'HONOURED'] +3575-170457-0008-377: hyp=['AND', 'SO', 'LIFE', 'AND', 'DEATH', 'HAVE', 'DISPERSED', 'THE', 'CIRCLE', 'OF', 'VIOLENT', 'RADICALS', 'AND', 'DISSENTERS', 'INTO', 'WHICH', 'TWENTY', 'YEARS', 'AGO', 'THE', 'LITTLE', 'QUIET', 'RESOLUTE', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'RECEIVED', 'AND', 'BY', 'WHOM', 'SHE', 'WAS', 'TRULY', 'LOVED', 'AND', 'HONOURED'] +3575-170457-0009-378: ref=['JANUARY', 'AND', 'FEBRUARY', 'OF', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'HAD', 'PASSED', 'AWAY', 'AND', 'STILL', 'THERE', 'WAS', 'NO', 'REPLY', 'FROM', 'SOUTHEY'] +3575-170457-0009-378: hyp=['JANUARY', 'AND', 'FEBRUARY', 'OF', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'HAD', 'PASSED', 'AWAY', 'AND', 'STILL', 'THERE', 'WAS', 'NO', 'REPLY', 'FROM', 'SALVI'] +3575-170457-0010-379: ref=['I', 'AM', 'NOT', 'DEPRECIATING', 'IT', 'WHEN', 'I', 'SAY', 'THAT', 'IN', 'THESE', 'TIMES', 'IT', 'IS', 'NOT', 'RARE'] +3575-170457-0010-379: hyp=['I', 'AM', 'NOT', 'DEPRECIATING', 'IT', 'WHEN', 'I', 'SAY', 'THAT', 'IN', 'THESE', 'TIMES', 'IT', 'IS', 'NOT', 'RARE'] +3575-170457-0011-380: ref=['BUT', 'IT', 'IS', 'NOT', 'WITH', 'A', 'VIEW', 'TO', 'DISTINCTION', 'THAT', 'YOU', 'SHOULD', 'CULTIVATE', 'THIS', 'TALENT', 'IF', 'YOU', 'CONSULT', 'YOUR', 'OWN', 'HAPPINESS'] +3575-170457-0011-380: hyp=['BUT', 'IT', 'IS', 'NOT', 'WITH', 'A', 'VIEW', 'TO', 'DISTINCTION', 'THAT', 'YOU', 'SHOULD', 'CULTIVATE', 'THIS', 'TALENT', 'IF', 'YOU', 'CONSULT', 'YOUR', 'OWN', 'HAPPINESS'] +3575-170457-0012-381: ref=['YOU', 'WILL', 'SAY', 'THAT', 'A', 'WOMAN', 'HAS', 'NO', 'NEED', 'OF', 'SUCH', 'A', 'CAUTION', 'THERE', 'CAN', 'BE', 'NO', 'PERIL', 'IN', 'IT', 'FOR', 'HER'] +3575-170457-0012-381: hyp=['YOU', 'WILL', 'SAY', 'THAT', 'A', 'WOMAN', 'HAS', 'NO', 'NEED', 'OF', 'SUCH', 'A', 'CAUTION', 'THERE', 'CAN', 'BE', 'NO', 'PERIL', 'IN', 'IT', 'FOR', 'HER'] +3575-170457-0013-382: ref=['THE', 'MORE', 'SHE', 'IS', 'ENGAGED', 'IN', 'HER', 'PROPER', 'DUTIES', 'THE', 'LESS', 'LEISURE', 'WILL', 'SHE', 'HAVE', 'FOR', 'IT', 'EVEN', 'AS', 'AN', 'ACCOMPLISHMENT', 'AND', 'A', 'RECREATION'] +3575-170457-0013-382: hyp=['THE', 'MORE', 'SHE', 'IS', 'ENGAGED', 'IN', 'HER', 'PROPER', 'DUTIES', 'THE', 'LESS', 'LEISURE', 'WILL', 'SHE', 'HAVE', 'FOR', 'IT', 'EVEN', 'AS', 'AN', 'ACCOMPLISHMENT', 'AND', 'A', 'RECREATION'] +3575-170457-0014-383: ref=['TO', 'THOSE', 'DUTIES', 'YOU', 'HAVE', 'NOT', 'YET', 'BEEN', 'CALLED', 'AND', 'WHEN', 'YOU', 'ARE', 'YOU', 'WILL', 'BE', 'LESS', 'EAGER', 'FOR', 'CELEBRITY'] +3575-170457-0014-383: hyp=['TO', 'THOSE', 'DUTIES', 'YOU', 'HAVE', 'NOT', 'YET', 'BEEN', 'CALLED', 'AND', 'WHEN', 'YOU', 'ARE', 'YOU', 'WILL', 'BE', 'LESS', 'EAGER', 'FOR', 'CELEBRITY'] +3575-170457-0015-384: ref=['BUT', 'DO', 'NOT', 'SUPPOSE', 'THAT', 'I', 'DISPARAGE', 'THE', 'GIFT', 'WHICH', 'YOU', 'POSSESS', 'NOR', 'THAT', 'I', 'WOULD', 'DISCOURAGE', 'YOU', 'FROM', 'EXERCISING', 'IT', 'I', 'ONLY', 'EXHORT', 'YOU', 'SO', 'TO', 'THINK', 'OF', 'IT', 'AND', 'SO', 'TO', 'USE', 'IT', 'AS', 'TO', 'RENDER', 'IT', 'CONDUCIVE', 'TO', 'YOUR', 'OWN', 'PERMANENT', 'GOOD'] +3575-170457-0015-384: hyp=['BUT', 'DO', 'NOT', 'SUPPOSE', 'THAT', 'I', 'DISPARAGE', 'THE', 'GIFT', 'WHICH', 'YOU', 'POSSESS', 'NOR', 'THAT', 'I', 'WOULD', 'DISCOURAGE', 'YOU', 'FROM', 'EXERCISING', 'IT', 'I', 'ONLY', 'EXHORT', 'YOU', 'SO', 'TO', 'THINK', 'OF', 'IT', 'AND', 'SO', 'TO', 'USE', 'IT', 'AS', 'TO', 'RENDER', 'IT', 'CONDUCIVE', 'TO', 'YOUR', 'OWN', 'PERMANENT', 'GOOD'] +3575-170457-0016-385: ref=['FAREWELL', 'MADAM'] +3575-170457-0016-385: hyp=['FAREWELL', 'MADAM'] +3575-170457-0017-386: ref=['THOUGH', 'I', 'MAY', 'BE', 'BUT', 'AN', 'UNGRACIOUS', 'ADVISER', 'YOU', 'WILL', 'ALLOW', 'ME', 'THEREFORE', 'TO', 'SUBSCRIBE', 'MYSELF', 'WITH', 'THE', 'BEST', 'WISHES', 'FOR', 'YOUR', 'HAPPINESS', 'HERE', 'AND', 'HEREAFTER', 'YOUR', 'TRUE', 'FRIEND', 'ROBERT', 'SOUTHEY'] +3575-170457-0017-386: hyp=['THOUGH', 'I', 'MAY', 'BE', 'BUT', 'AN', 'UNGRACIOUS', 'ADVISER', 'YOU', 'WILL', 'ALLOW', 'ME', 'THEREFORE', 'TO', 'SUBSCRIBE', 'MYSELF', 'WITH', 'THE', 'BEST', 'WISHES', 'FOR', 'YOUR', 'HAPPINESS', 'HERE', 'AND', 'HEREAFTER', 'YOUR', 'TRUE', 'FRIEND', 'ROBERT', 'SELVEY'] +3575-170457-0018-387: ref=['SIR', 'MARCH', 'SIXTEENTH'] +3575-170457-0018-387: hyp=['SIR', 'MARCH', 'SIXTEENTH'] +3575-170457-0019-388: ref=['I', 'HAD', 'NOT', 'VENTURED', 'TO', 'HOPE', 'FOR', 'SUCH', 'A', 'REPLY', 'SO', 'CONSIDERATE', 'IN', 'ITS', 'TONE', 'SO', 'NOBLE', 'IN', 'ITS', 'SPIRIT'] +3575-170457-0019-388: hyp=['I', 'HAVE', 'NOT', 'VENTURED', 'TO', 'HOPE', 'FOR', 'SUCH', 'A', 'REPLY', 'SO', 'CONSIDER', 'IT', 'IN', 'ITS', 'TONE', 'SO', 'NOBLE', 'IN', 'ITS', 'SPIRIT'] +3575-170457-0020-389: ref=['I', 'KNOW', 'THE', 'FIRST', 'LETTER', 'I', 'WROTE', 'TO', 'YOU', 'WAS', 'ALL', 'SENSELESS', 'TRASH', 'FROM', 'BEGINNING', 'TO', 'END', 'BUT', 'I', 'AM', 'NOT', 'ALTOGETHER', 'THE', 'IDLE', 'DREAMING', 'BEING', 'IT', 'WOULD', 'SEEM', 'TO', 'DENOTE'] +3575-170457-0020-389: hyp=['I', 'KNOW', 'THE', 'FIRST', 'LETTER', 'I', 'WROTE', 'TO', 'YOU', 'WAS', 'ALL', 'SENSELESS', 'TRASH', 'FROM', 'BEGINNING', 'TO', 'END', 'BUT', 'I', 'AM', 'NOT', 'ALTOGETHER', 'THE', 'IDLE', 'DREAMING', 'BEING', 'IT', 'WOULD', 'SEEM', 'TO', 'DENOTE'] +3575-170457-0021-390: ref=['I', 'THOUGHT', 'IT', 'THEREFORE', 'MY', 'DUTY', 'WHEN', 'I', 'LEFT', 'SCHOOL', 'TO', 'BECOME', 'A', 'GOVERNESS'] +3575-170457-0021-390: hyp=['I', 'THOUGHT', 'IT', 'THEREFORE', 'MY', 'DUTY', 'WHEN', 'I', 'LEFT', 'SCHOOL', 'TO', 'BECOME', 'A', 'GOVERNESS'] +3575-170457-0022-391: ref=['IN', 'THE', 'EVENINGS', 'I', 'CONFESS', 'I', 'DO', 'THINK', 'BUT', 'I', 'NEVER', 'TROUBLE', 'ANY', 'ONE', 'ELSE', 'WITH', 'MY', 'THOUGHTS'] +3575-170457-0022-391: hyp=['IN', 'THE', 'EVENINGS', 'I', 'CONFESS', 'I', 'DO', 'THINK', 'BUT', 'I', 'NEVER', 'TROUBLE', 'ANY', 'ONE', 'ELSE', 'WITH', 'MY', 'THOUGHTS'] +3575-170457-0023-392: ref=['I', 'CAREFULLY', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PREOCCUPATION', 'AND', 'ECCENTRICITY', 'WHICH', 'MIGHT', 'LEAD', 'THOSE', 'I', 'LIVE', 'AMONGST', 'TO', 'SUSPECT', 'THE', 'NATURE', 'OF', 'MY', 'PURSUITS'] +3575-170457-0023-392: hyp=['I', 'CAREFULLY', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PREOCCUPATION', 'AND', 'ECCENTRICITY', 'WHICH', 'MIGHT', 'LEAD', 'THOSE', 'I', 'LIVE', 'AMONGST', 'TO', 'SUSPECT', 'THE', 'NATURE', 'OF', 'MY', 'PURSUITS'] +3575-170457-0024-393: ref=['I', "DON'T", 'ALWAYS', 'SUCCEED', 'FOR', 'SOMETIMES', 'WHEN', "I'M", 'TEACHING', 'OR', 'SEWING', 'I', 'WOULD', 'RATHER', 'BE', 'READING', 'OR', 'WRITING', 'BUT', 'I', 'TRY', 'TO', 'DENY', 'MYSELF', 'AND', 'MY', "FATHER'S", 'APPROBATION', 'AMPLY', 'REWARDED', 'ME', 'FOR', 'THE', 'PRIVATION'] +3575-170457-0024-393: hyp=['I', "DON'T", 'ALWAYS', 'SUCCEED', 'FOR', 'SOMETIMES', 'WHEN', "I'M", 'TEACHING', 'OR', 'SEWING', 'I', 'WOULD', 'RATHER', 'BE', 'READING', 'A', 'WRITING', 'BUT', 'I', 'TRY', 'TO', 'DENY', 'MYSELF', 'AND', 'MY', "FATHER'S", 'APPROBATION', 'AMPLY', 'REWARDED', 'ME', 'FOR', 'THE', 'PRIVATION'] +3575-170457-0025-394: ref=['AGAIN', 'I', 'THANK', 'YOU', 'THIS', 'INCIDENT', 'I', 'SUPPOSE', 'WILL', 'BE', 'RENEWED', 'NO', 'MORE', 'IF', 'I', 'LIVE', 'TO', 'BE', 'AN', 'OLD', 'WOMAN', 'I', 'SHALL', 'REMEMBER', 'IT', 'THIRTY', 'YEARS', 'HENCE', 'AS', 'A', 'BRIGHT', 'DREAM'] +3575-170457-0025-394: hyp=['AGAIN', 'I', 'THANK', 'YOU', 'THIS', 'INCIDENT', 'I', 'SUPPOSE', 'WILL', 'BE', 'RENEWED', 'NO', 'MORE', 'IF', 'I', 'LIVE', 'TO', 'BE', 'AN', 'OLD', 'WOMAN', 'I', 'SHALL', 'REMEMBER', 'IT', 'THIRTY', 'YEARS', 'HENCE', 'AS', 'A', 'BRIGHT', 'DREAM'] +3575-170457-0026-395: ref=['P', 'S', 'PRAY', 'SIR', 'EXCUSE', 'ME', 'FOR', 'WRITING', 'TO', 'YOU', 'A', 'SECOND', 'TIME', 'I', 'COULD', 'NOT', 'HELP', 'WRITING', 'PARTLY', 'TO', 'TELL', 'YOU', 'HOW', 'THANKFUL', 'I', 'AM', 'FOR', 'YOUR', 'KINDNESS', 'AND', 'PARTLY', 'TO', 'LET', 'YOU', 'KNOW', 'THAT', 'YOUR', 'ADVICE', 'SHALL', 'NOT', 'BE', 'WASTED', 'HOWEVER', 'SORROWFULLY', 'AND', 'RELUCTANTLY', 'IT', 'MAY', 'BE', 'AT', 'FIRST', 'FOLLOWED', 'C', 'B'] +3575-170457-0026-395: hyp=['P', 'S', 'PRAY', 'SIR', 'EXCUSE', 'ME', 'FOR', 'WRITING', 'TO', 'YOU', 'A', 'SECOND', 'TIME', 'I', 'COULD', 'NOT', 'HELP', 'WRITING', 'PARTLY', 'TO', 'TELL', 'YOU', 'HOW', 'THANKFUL', 'I', 'AM', 'FOR', 'YOUR', 'KINDNESS', 'AND', 'PARTLY', 'TO', 'LET', 'YOU', 'KNOW', 'THAT', 'YOUR', 'ADVICE', 'SHALL', 'NOT', 'BE', 'WASTED', 'HOWEVER', 'SORROWFULLY', 'AND', 'RELUCTANTLY', 'IT', 'MAY', 'BE', 'AT', 'FIRST', 'FOLLOWED'] +3575-170457-0027-396: ref=['I', 'CANNOT', 'DENY', 'MYSELF', 'THE', 'GRATIFICATION', 'OF', 'INSERTING', "SOUTHEY'S", 'REPLY'] +3575-170457-0027-396: hyp=['I', 'CANNOT', 'DENY', 'MYSELF', 'THE', 'GRATIFICATION', 'OF', 'INSERTING', 'SO', 'THESE', 'REPLY'] +3575-170457-0028-397: ref=['KESWICK', 'MARCH', 'TWENTY', 'SECOND', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'DEAR', 'MADAM'] +3575-170457-0028-397: hyp=['KEZWICK', 'MARCH', 'TWENTY', 'SECOND', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'DEAR', 'MADAME'] +3575-170457-0029-398: ref=['YOUR', 'LETTER', 'HAS', 'GIVEN', 'ME', 'GREAT', 'PLEASURE', 'AND', 'I', 'SHOULD', 'NOT', 'FORGIVE', 'MYSELF', 'IF', 'I', 'DID', 'NOT', 'TELL', 'YOU', 'SO'] +3575-170457-0029-398: hyp=['YOUR', 'LETTER', 'HAS', 'GIVEN', 'ME', 'GREAT', 'PLEASURE', 'AND', 'I', 'SHOULD', 'NOT', 'FORGIVE', 'MYSELF', 'IF', 'I', 'DID', 'NOT', 'TELL', 'YOU', 'SO'] +3575-170457-0030-399: ref=['OF', 'THIS', 'SECOND', 'LETTER', 'ALSO', 'SHE', 'SPOKE', 'AND', 'TOLD', 'ME', 'THAT', 'IT', 'CONTAINED', 'AN', 'INVITATION', 'FOR', 'HER', 'TO', 'GO', 'AND', 'SEE', 'THE', 'POET', 'IF', 'EVER', 'SHE', 'VISITED', 'THE', 'LAKES'] +3575-170457-0030-399: hyp=['OF', 'THIS', 'SECOND', 'LETTER', 'ALSO', 'SHE', 'SPOKE', 'AND', 'TOLD', 'ME', 'THAT', 'IT', 'CONTAINED', 'AN', 'INVITATION', 'FOR', 'HER', 'TO', 'GO', 'AND', 'SEE', 'THE', 'POET', 'IF', 'EVER', 'SHE', 'VISITED', 'THE', 'LAKES'] +3575-170457-0031-400: ref=['ON', 'AUGUST', 'TWENTY', 'SEVENTH', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'SHE', 'WRITES'] +3575-170457-0031-400: hyp=['ON', 'AUGUST', 'TWENTY', 'SEVENTH', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'SHE', 'WRITES'] +3575-170457-0032-401: ref=['COME', 'COME', 'I', 'AM', 'GETTING', 'REALLY', 'TIRED', 'OF', 'YOUR', 'ABSENCE'] +3575-170457-0032-401: hyp=['COME', 'COME', "I'M", 'GETTING', 'REALLY', 'TIRED', 'OF', 'YOUR', 'ABSENCE'] +3575-170457-0033-402: ref=['SATURDAY', 'AFTER', 'SATURDAY', 'COMES', 'ROUND', 'AND', 'I', 'CAN', 'HAVE', 'NO', 'HOPE', 'OF', 'HEARING', 'YOUR', 'KNOCK', 'AT', 'THE', 'DOOR', 'AND', 'THEN', 'BEING', 'TOLD', 'THAT', 'MISS', 'E', 'IS', 'COME', 'OH', 'DEAR'] +3575-170457-0033-402: hyp=['SATURDAY', 'AFTER', 'SATURDAY', 'COMES', 'ROUND', 'AND', 'I', 'CAN', 'HAVE', 'NO', 'HOPE', 'OF', 'HEARING', 'YOUR', 'KNOCK', 'AT', 'THE', 'DOOR', 'AND', 'THEN', 'BEING', 'TOLD', 'THAT', 'MISSY', 'IS', 'COME', 'OH', 'DEAR'] +3575-170457-0034-403: ref=['IN', 'THIS', 'MONOTONOUS', 'LIFE', 'OF', 'MINE', 'THAT', 'WAS', 'A', 'PLEASANT', 'EVENT'] +3575-170457-0034-403: hyp=['IN', 'THIS', 'MONOTONOUS', 'LIFE', 'OF', 'MIND', 'THAT', 'WAS', 'A', 'PLEASANT', 'EVENT'] +3575-170457-0035-404: ref=['I', 'WISH', 'IT', 'WOULD', 'RECUR', 'AGAIN', 'BUT', 'IT', 'WILL', 'TAKE', 'TWO', 'OR', 'THREE', 'INTERVIEWS', 'BEFORE', 'THE', 'STIFFNESS', 'THE', 'ESTRANGEMENT', 'OF', 'THIS', 'LONG', 'SEPARATION', 'WILL', 'WEAR', 'AWAY'] +3575-170457-0035-404: hyp=['I', 'WISH', 'YOU', 'WERE', 'RECUR', 'AGAIN', 'BUT', 'IT', 'WILL', 'TAKE', 'TWO', 'OR', 'THREE', 'INTERVIEWS', 'BEFORE', 'THE', 'STIFFNESS', 'THE', 'ESTRANGEMENT', 'OF', 'THIS', 'LONG', 'SEPARATION', 'WILL', 'WEAR', 'AWAY'] +3575-170457-0036-405: ref=['MY', 'EYES', 'FILL', 'WITH', 'TEARS', 'WHEN', 'I', 'CONTRAST', 'THE', 'BLISS', 'OF', 'SUCH', 'A', 'STATE', 'BRIGHTENED', 'BY', 'HOPES', 'OF', 'THE', 'FUTURE', 'WITH', 'THE', 'MELANCHOLY', 'STATE', 'I', 'NOW', 'LIVE', 'IN', 'UNCERTAIN', 'THAT', 'I', 'EVER', 'FELT', 'TRUE', 'CONTRITION', 'WANDERING', 'IN', 'THOUGHT', 'AND', 'DEED', 'LONGING', 'FOR', 'HOLINESS', 'WHICH', 'I', 'SHALL', 'NEVER', 'NEVER', 'OBTAIN', 'SMITTEN', 'AT', 'TIMES', 'TO', 'THE', 'HEART', 'WITH', 'THE', 'CONVICTION', 'THAT', 'GHASTLY', 'CALVINISTIC', 'DOCTRINES', 'ARE', 'TRUE', 'DARKENED', 'IN', 'SHORT', 'BY', 'THE', 'VERY', 'SHADOWS', 'OF', 'SPIRITUAL', 'DEATH'] +3575-170457-0036-405: hyp=['MY', 'EYES', 'FILLED', 'TEARS', 'WHEN', 'I', 'CONTRAST', 'THE', 'BLISS', 'OF', 'SUCH', 'A', 'STATE', 'BRIGHTENED', 'BY', 'HOPES', 'OF', 'THE', 'FUTURE', 'WITH', 'THE', 'MELANCHOLY', 'STATE', 'I', 'NOW', 'LIVE', 'IN', 'UNCERTAIN', 'THAT', 'I', 'EVER', 'FELT', 'TRUE', 'CONTRITION', 'WANDERING', 'IN', 'THOUGHT', 'INDEED', 'LONGING', 'FOR', 'HOLINESS', 'WHICH', 'I', 'SHALL', 'NEVER', 'NEVER', 'OBTAIN', 'SMITTEN', 'THAT', 'TIMES', 'TO', 'THE', 'HEART', 'WITH', 'THE', 'CONVICTION', 'THAT', 'GHASTLY', 'CALVINISTIC', 'DOCTRINES', 'ARE', 'TRUE', 'DARKENED', 'AND', 'SHORT', 'BY', 'THE', 'VERY', 'SHADOWS', 'OF', 'SPIRITUAL', 'DEATH'] +3575-170457-0037-406: ref=['IF', 'CHRISTIAN', 'PERFECTION', 'BE', 'NECESSARY', 'TO', 'SALVATION', 'I', 'SHALL', 'NEVER', 'BE', 'SAVED', 'MY', 'HEART', 'IS', 'A', 'VERY', 'HOTBED', 'FOR', 'SINFUL', 'THOUGHTS', 'AND', 'WHEN', 'I', 'DECIDE', 'ON', 'AN', 'ACTION', 'I', 'SCARCELY', 'REMEMBER', 'TO', 'LOOK', 'TO', 'MY', 'REDEEMER', 'FOR', 'DIRECTION'] +3575-170457-0037-406: hyp=['IF', 'CHRISTIAN', 'PERFECTION', 'BE', 'NECESSARY', 'TO', 'SALVATION', 'I', 'SHALL', 'NEVER', 'BE', 'SAVED', 'MY', 'HEART', 'IS', 'A', 'VERY', 'HOT', 'BED', 'FOR', 'SINFUL', 'THOUGHTS', 'AND', 'WHEN', 'I', 'DECIDE', 'ON', 'AN', 'ACTION', 'I', 'SCARCELY', 'REMEMBER', 'TO', 'LOOK', 'TO', 'MY', 'REDEEMER', 'FOR', 'A', 'DIRECTION'] +3575-170457-0038-407: ref=['AND', 'MEANTIME', 'I', 'KNOW', 'THE', 'GREATNESS', 'OF', 'JEHOVAH', 'I', 'ACKNOWLEDGE', 'THE', 'PERFECTION', 'OF', 'HIS', 'WORD', 'I', 'ADORE', 'THE', 'PURITY', 'OF', 'THE', 'CHRISTIAN', 'FAITH', 'MY', 'THEORY', 'IS', 'RIGHT', 'MY', 'PRACTICE', 'HORRIBLY', 'WRONG'] +3575-170457-0038-407: hyp=['AND', 'MEANTIME', 'I', 'KNOW', 'THE', 'GREATNESS', 'OF', 'JEHOVAH', 'I', 'ACKNOWLEDGE', 'THE', 'PERFECTION', 'OF', 'HIS', 'WORD', 'I', 'ADORE', 'THE', 'PURITY', 'OF', 'THE', 'CHRISTIAN', 'FAITH', 'MY', 'THEORY', 'IS', 'RIGHT', 'MY', 'PRACTICE', 'HORRIBLY', 'WRONG'] +3575-170457-0039-408: ref=['THE', 'CHRISTMAS', 'HOLIDAYS', 'CAME', 'AND', 'SHE', 'AND', 'ANNE', 'RETURNED', 'TO', 'THE', 'PARSONAGE', 'AND', 'TO', 'THAT', 'HAPPY', 'HOME', 'CIRCLE', 'IN', 'WHICH', 'ALONE', 'THEIR', 'NATURES', 'EXPANDED', 'AMONGST', 'ALL', 'OTHER', 'PEOPLE', 'THEY', 'SHRIVELLED', 'UP', 'MORE', 'OR', 'LESS'] +3575-170457-0039-408: hyp=['THE', 'CHRISTMAS', 'HOLIDAYS', 'CAME', 'AND', 'SHE', 'AND', 'ANNE', 'RETURNED', 'TO', 'THE', 'PARSONAGE', 'AND', 'TO', 'THAT', 'HAPPY', 'HOME', 'CIRCLE', 'IN', 'WHICH', 'ALONE', 'THEIR', 'NATURES', 'EXPANDED', 'AMONGST', 'ALL', 'OTHER', 'PEOPLE', 'THEY', 'SHRIVELLED', 'UP', 'MORE', 'OR', 'LESS'] +3575-170457-0040-409: ref=['INDEED', 'THERE', 'WERE', 'ONLY', 'ONE', 'OR', 'TWO', 'STRANGERS', 'WHO', 'COULD', 'BE', 'ADMITTED', 'AMONG', 'THE', 'SISTERS', 'WITHOUT', 'PRODUCING', 'THE', 'SAME', 'RESULT'] +3575-170457-0040-409: hyp=['INDEED', 'THERE', 'WERE', 'ONLY', 'ONE', 'OR', 'TWO', 'STRANGERS', 'WHO', 'COULD', 'BE', 'ADMITTED', 'AMONG', 'THE', 'SISTERS', 'WITHOUT', 'PRODUCING', 'THE', 'SAME', 'RESULT'] +3575-170457-0041-410: ref=['SHE', 'WAS', 'GONE', 'OUT', 'INTO', 'THE', 'VILLAGE', 'ON', 'SOME', 'ERRAND', 'WHEN', 'AS', 'SHE', 'WAS', 'DESCENDING', 'THE', 'STEEP', 'STREET', 'HER', 'FOOT', 'SLIPPED', 'ON', 'THE', 'ICE', 'AND', 'SHE', 'FELL', 'IT', 'WAS', 'DARK', 'AND', 'NO', 'ONE', 'SAW', 'HER', 'MISCHANCE', 'TILL', 'AFTER', 'A', 'TIME', 'HER', 'GROANS', 'ATTRACTED', 'THE', 'ATTENTION', 'OF', 'A', 'PASSER', 'BY'] +3575-170457-0041-410: hyp=['SHE', 'WAS', 'GONE', 'OUT', 'INTO', 'THE', 'VILLAGE', 'ON', 'SOME', 'ERRAND', 'WHEN', 'AS', 'SHE', 'WAS', 'DESCENDING', 'THE', 'STEEP', 'STREET', 'HER', 'FOOT', 'SLIPPED', 'ON', 'THE', 'ICE', 'AND', 'SHE', 'FELL', 'HE', 'WAS', 'DARK', 'AND', 'NO', 'ONE', 'SAW', 'HER', 'MISCHANCE', 'TILL', 'AFTER', 'A', 'TIME', 'HER', 'GROANS', 'ATTRACTED', 'THE', 'ATTENTION', 'OF', 'A', 'PASSER', 'BY'] +3575-170457-0042-411: ref=['UNFORTUNATELY', 'THE', 'FRACTURE', 'COULD', 'NOT', 'BE', 'SET', 'TILL', 'SIX', "O'CLOCK", 'THE', 'NEXT', 'MORNING', 'AS', 'NO', 'SURGEON', 'WAS', 'TO', 'BE', 'HAD', 'BEFORE', 'THAT', 'TIME', 'AND', 'SHE', 'NOW', 'LIES', 'AT', 'OUR', 'HOUSE', 'IN', 'A', 'VERY', 'DOUBTFUL', 'AND', 'DANGEROUS', 'STATE'] +3575-170457-0042-411: hyp=['UNFORTUNATELY', 'THE', 'FRACTURE', 'COULD', 'NOT', 'BE', 'SET', 'TILL', 'SIX', "O'CLOCK", 'THE', 'NEXT', 'MORNING', 'AS', 'NO', 'SURGEON', 'WAS', 'TO', 'BE', 'HAD', 'BEFORE', 'THAT', 'TIME', 'AND', 'SHE', 'NOW', 'LIES', 'AT', 'HER', 'HOUSE', 'IN', 'A', 'VERY', 'DOUBTFUL', 'AND', 'DANGEROUS', 'STATE'] +3575-170457-0043-412: ref=['HOWEVER', 'REMEMBERING', 'WHAT', 'YOU', 'TOLD', 'ME', 'NAMELY', 'THAT', 'YOU', 'HAD', 'COMMENDED', 'THE', 'MATTER', 'TO', 'A', 'HIGHER', 'DECISION', 'THAN', 'OURS', 'AND', 'THAT', 'YOU', 'WERE', 'RESOLVED', 'TO', 'SUBMIT', 'WITH', 'RESIGNATION', 'TO', 'THAT', 'DECISION', 'WHATEVER', 'IT', 'MIGHT', 'BE', 'I', 'HOLD', 'IT', 'MY', 'DUTY', 'TO', 'YIELD', 'ALSO', 'AND', 'TO', 'BE', 'SILENT', 'IT', 'MAY', 'BE', 'ALL', 'FOR', 'THE', 'BEST'] +3575-170457-0043-412: hyp=['HOWEVER', 'REMEMBERING', 'WHAT', 'YOU', 'TOLD', 'ME', 'NAMELY', 'THAT', 'YOU', 'HAD', 'COMMENDED', 'THE', 'MATTER', 'TO', 'A', 'HIGHER', 'DECISION', 'THAN', 'OURS', 'AND', 'THAT', 'YOU', 'WERE', 'RESOLVED', 'TO', 'SUBMIT', 'WITH', 'RESIGNATION', 'TO', 'THAT', 'DECISION', 'WHATEVER', 'IT', 'MIGHT', 'BE', 'I', 'HOLD', 'IT', 'MY', 'DUTY', 'TO', 'YIELD', 'ALSO', 'AND', 'TO', 'BE', 'SILENT', 'AND', 'MAY', 'BE', 'ALL', 'FOR', 'THE', 'BEST'] +3575-170457-0044-413: ref=['AFTER', 'THIS', 'DISAPPOINTMENT', 'I', 'NEVER', 'DARE', 'RECKON', 'WITH', 'CERTAINTY', 'ON', 'THE', 'ENJOYMENT', 'OF', 'A', 'PLEASURE', 'AGAIN', 'IT', 'SEEMS', 'AS', 'IF', 'SOME', 'FATALITY', 'STOOD', 'BETWEEN', 'YOU', 'AND', 'ME'] +3575-170457-0044-413: hyp=['AFTER', 'THIS', 'DISAPPOINTMENT', 'I', 'NEVER', 'DARE', 'RECKON', 'WITH', 'CERTAINTY', 'ON', 'THE', 'ENJOYMENT', 'OF', 'A', 'PLEASURE', 'AGAIN', 'IT', 'SEEMS', 'AS', 'IF', 'SOME', 'FATALITY', 'STOOD', 'BETWEEN', 'YOU', 'AND', 'ME'] +3575-170457-0045-414: ref=['I', 'AM', 'NOT', 'GOOD', 'ENOUGH', 'FOR', 'YOU', 'AND', 'YOU', 'MUST', 'BE', 'KEPT', 'FROM', 'THE', 'CONTAMINATION', 'OF', 'TOO', 'INTIMATE', 'SOCIETY'] +3575-170457-0045-414: hyp=['I', 'AM', 'NOT', 'GOOD', 'ENOUGH', 'FOR', 'YOU', 'AND', 'YOU', 'MUST', 'BE', 'KEPT', 'FROM', 'THE', 'CONTAMINATION', 'OF', 'TWO', 'INTIMATE', 'SOCIETY'] +3575-170457-0046-415: ref=['A', 'GOOD', 'NEIGHBOUR', 'OF', 'THE', 'BRONTES', 'A', 'CLEVER', 'INTELLIGENT', 'YORKSHIRE', 'WOMAN', 'WHO', 'KEEPS', 'A', "DRUGGIST'S", 'SHOP', 'IN', 'HAWORTH', 'AND', 'FROM', 'HER', 'OCCUPATION', 'HER', 'EXPERIENCE', 'AND', 'EXCELLENT', 'SENSE', 'HOLDS', 'THE', 'POSITION', 'OF', 'VILLAGE', 'DOCTRESS', 'AND', 'NURSE', 'AND', 'AS', 'SUCH', 'HAS', 'BEEN', 'A', 'FRIEND', 'IN', 'MANY', 'A', 'TIME', 'OF', 'TRIAL', 'AND', 'SICKNESS', 'AND', 'DEATH', 'IN', 'THE', 'HOUSEHOLDS', 'ROUND', 'TOLD', 'ME', 'A', 'CHARACTERISTIC', 'LITTLE', 'INCIDENT', 'CONNECTED', 'WITH', "TABBY'S", 'FRACTURED', 'LEG'] +3575-170457-0046-415: hyp=['A', 'GOOD', 'NEIGHBOR', 'OF', 'THE', 'BRONTEES', 'A', 'CLEVER', 'INTELLIGENT', 'YORKSHIRE', 'WOMAN', 'WHO', 'KEEPS', 'A', 'DRUGGIST', 'SHOP', 'IN', 'HAWORTH', 'FROM', 'HER', 'OCCUPATION', 'HER', 'EXPERIENCE', 'AND', 'EXCELLENT', 'SENSE', 'HOLDS', 'THE', 'POSITION', 'OF', 'VILLAGE', 'DOCTRIS', 'AND', 'NURSE', 'AND', 'AS', 'SUCH', 'HAS', 'BEEN', 'A', 'FRIEND', 'IN', 'MANY', 'A', 'TIME', 'OF', 'TRIAL', 'AND', 'SICKNESS', 'AND', 'DEATH', 'IN', 'THE', 'HOUSEHOLDS', 'ROUND', 'TOLD', 'ME', 'A', 'CHARACTERISTIC', 'LITTLE', 'INCIDENT', 'CONNECTED', 'WITH', "TABBY'S", 'FRACTURED', 'LEG'] +3575-170457-0047-416: ref=['TABBY', 'HAD', 'LIVED', 'WITH', 'THEM', 'FOR', 'TEN', 'OR', 'TWELVE', 'YEARS', 'AND', 'WAS', 'AS', 'CHARLOTTE', 'EXPRESSED', 'IT', 'ONE', 'OF', 'THE', 'FAMILY'] +3575-170457-0047-416: hyp=['TABBY', 'HAD', 'LIVED', 'WITH', 'THEM', 'FOR', 'TEN', 'OR', 'TWELVE', 'YEARS', 'AND', 'WAS', 'AS', 'CHARLOTTE', 'EXPRESSED', 'IT', 'ONE', 'OF', 'THE', 'FAMILY'] +3575-170457-0048-417: ref=['HE', 'REFUSED', 'AT', 'FIRST', 'TO', 'LISTEN', 'TO', 'THE', 'CAREFUL', 'ADVICE', 'IT', 'WAS', 'REPUGNANT', 'TO', 'HIS', 'LIBERAL', 'NATURE'] +3575-170457-0048-417: hyp=['HE', 'REFUSED', 'AT', 'FIRST', 'TO', 'LISTEN', 'TO', 'THE', 'CAREFUL', 'ADVICE', 'IT', 'WAS', 'REPUGNANT', 'TO', 'HIS', 'LIBERAL', 'NATURE'] +3575-170457-0049-418: ref=['THIS', 'DECISION', 'WAS', 'COMMUNICATED', 'TO', 'THE', 'GIRLS'] +3575-170457-0049-418: hyp=['THIS', 'DECISION', 'WAS', 'COMMUNICATED', 'TO', 'THE', 'GIRLS'] +3575-170457-0050-419: ref=['TABBY', 'HAD', 'TENDED', 'THEM', 'IN', 'THEIR', 'CHILDHOOD', 'THEY', 'AND', 'NONE', 'OTHER', 'SHOULD', 'TEND', 'HER', 'IN', 'HER', 'INFIRMITY', 'AND', 'AGE'] +3575-170457-0050-419: hyp=['TABBY', 'HAD', 'TENDED', 'THEM', 'IN', 'THEIR', 'CHILDHOOD', 'THEY', 'AND', 'NONE', 'OTHER', 'SHOULD', 'TEND', 'HER', 'IN', 'HER', 'INFIRMITY', 'IN', 'AGE'] +3575-170457-0051-420: ref=['AT', 'TEA', 'TIME', 'THEY', 'WERE', 'SAD', 'AND', 'SILENT', 'AND', 'THE', 'MEAL', 'WENT', 'AWAY', 'UNTOUCHED', 'BY', 'ANY', 'OF', 'THE', 'THREE'] +3575-170457-0051-420: hyp=['AT', 'TEA', 'TIME', 'THEY', 'WERE', 'SAD', 'AND', 'SILENT', 'AND', 'THE', 'MEAL', 'WENT', 'AWAY', 'UNTOUCHED', 'BY', 'ANY', 'OF', 'THE', 'THREE'] +3575-170457-0052-421: ref=['SHE', 'HAD', 'ANOTHER', 'WEIGHT', 'ON', 'HER', 'MIND', 'THIS', 'CHRISTMAS'] +3575-170457-0052-421: hyp=['SHE', 'HAD', 'ANOTHER', 'WEIGHT', 'ON', 'HER', 'MIND', 'THIS', 'CHRISTMAS'] +3575-170457-0053-422: ref=['BUT', 'ANNE', 'HAD', 'BEGUN', 'TO', 'SUFFER', 'JUST', 'BEFORE', 'THE', 'HOLIDAYS', 'AND', 'CHARLOTTE', 'WATCHED', 'OVER', 'HER', 'YOUNGER', 'SISTERS', 'WITH', 'THE', 'JEALOUS', 'VIGILANCE', 'OF', 'SOME', 'WILD', 'CREATURE', 'THAT', 'CHANGES', 'HER', 'VERY', 'NATURE', 'IF', 'DANGER', 'THREATENS', 'HER', 'YOUNG'] +3575-170457-0053-422: hyp=['BUT', 'ANNE', 'HAD', 'BEGUN', 'TO', 'SUFFER', 'JUST', 'BEFORE', 'THE', 'HOLIDAYS', 'AND', 'CHARLOTTE', 'WATCHED', 'OVER', 'HER', 'YOUNGER', 'SISTERS', 'WITH', 'A', 'JEALOUS', 'VIGILANCE', 'OF', 'SOME', 'WILD', 'CREATURE', 'THAT', 'CHANGES', 'HER', 'VERY', 'NATURE', 'IF', 'DANGER', 'THREATENS', 'HER', 'YOUNG'] +3575-170457-0054-423: ref=['STUNG', 'BY', 'ANXIETY', 'FOR', 'THIS', 'LITTLE', 'SISTER', 'SHE', 'UPBRAIDED', 'MISS', 'W', 'FOR', 'HER', 'FANCIED', 'INDIFFERENCE', 'TO', "ANNE'S", 'STATE', 'OF', 'HEALTH'] +3575-170457-0054-423: hyp=['STUNG', 'BY', 'ANXIETY', 'FOR', 'THIS', 'LITTLE', 'SISTER', 'SHE', 'UPBRAIDED', 'MISS', 'W', 'FOR', 'HER', 'FANCIED', 'INDIFFERENCE', 'TO', "ANNE'S", 'STATE', 'OF', 'HEALTH'] +3575-170457-0055-424: ref=['STILL', 'HER', 'HEART', 'HAD', 'RECEIVED', 'A', 'SHOCK', 'IN', 'THE', 'PERCEPTION', 'OF', "ANNE'S", 'DELICACY', 'AND', 'ALL', 'THESE', 'HOLIDAYS', 'SHE', 'WATCHED', 'OVER', 'HER', 'WITH', 'THE', 'LONGING', 'FOND', 'ANXIETY', 'WHICH', 'IS', 'SO', 'FULL', 'OF', 'SUDDEN', 'PANGS', 'OF', 'FEAR'] +3575-170457-0055-424: hyp=['STILL', 'HER', 'HEART', 'HAD', 'RECEIVED', 'A', 'SHOCK', 'IN', 'THE', 'PERCEPTION', 'OF', "ANNE'S", 'DELICACY', 'AND', 'ALL', 'THESE', 'HOLIDAYS', 'SHE', 'WATCHED', 'OVER', 'HER', 'WITH', 'THE', 'LONGING', 'FOND', 'ANXIETY', 'WHICH', 'IS', 'SO', 'FULL', 'OF', 'SUDDEN', 'PANGS', 'OF', 'FEAR'] +3575-170457-0056-425: ref=['I', 'DOUBT', 'WHETHER', 'BRANWELL', 'WAS', 'MAINTAINING', 'HIMSELF', 'AT', 'THIS', 'TIME'] +3575-170457-0056-425: hyp=['I', 'DOUBT', 'WHETHER', 'BRANWELL', 'WAS', 'MAINTAINING', 'HIMSELF', 'AT', 'THIS', 'TIME'] +3729-6852-0000-1660: ref=['TO', 'CELEBRATE', 'THE', 'ARRIVAL', 'OF', 'HER', 'SON', 'SILVIA', 'GAVE', 'A', 'SPLENDID', 'SUPPER', 'TO', 'WHICH', 'SHE', 'HAD', 'INVITED', 'ALL', 'HER', 'RELATIVES', 'AND', 'IT', 'WAS', 'A', 'GOOD', 'OPPORTUNITY', 'FOR', 'ME', 'TO', 'MAKE', 'THEIR', 'ACQUAINTANCE'] +3729-6852-0000-1660: hyp=['TO', 'CELEBRATE', 'THE', 'ARRIVAL', 'OF', 'HER', 'SON', 'SYLVIA', 'GAVE', 'A', 'SPLENDID', 'SUPPER', 'TO', 'WHICH', 'SHE', 'HAD', 'INVITED', 'ALL', 'HER', 'RELATIVES', 'AND', 'IT', 'WAS', 'A', 'GOOD', 'OPPORTUNITY', 'FOR', 'ME', 'TO', 'MAKE', 'THEIR', 'ACQUAINTANCE'] +3729-6852-0001-1661: ref=['WITHOUT', 'SAYING', 'IT', 'POSITIVELY', 'SHE', 'MADE', 'ME', 'UNDERSTAND', 'THAT', 'BEING', 'HERSELF', 'AN', 'ILLUSTRIOUS', 'MEMBER', 'OF', 'THE', 'REPUBLIC', 'OF', 'LETTERS', 'SHE', 'WAS', 'WELL', 'AWARE', 'THAT', 'SHE', 'WAS', 'SPEAKING', 'TO', 'AN', 'INSECT'] +3729-6852-0001-1661: hyp=['WITHOUT', 'SAYING', 'IT', 'POSITIVELY', 'SHE', 'MADE', 'ME', 'UNDERSTAND', 'THAT', 'BEING', 'HERSELF', 'AN', 'ILLUSTRIOUS', 'MEMBER', 'OF', 'THE', 'REPUBLIC', 'OF', 'LETTERS', 'SHE', 'WAS', 'WELL', 'AWARE', 'THAT', 'SHE', 'WAS', 'SPEAKING', 'TO', 'AN', 'INSECT'] +3729-6852-0002-1662: ref=['IN', 'ORDER', 'TO', 'PLEASE', 'HER', 'I', 'SPOKE', 'TO', 'HER', 'OF', 'THE', 'ABBE', 'CONTI', 'AND', 'I', 'HAD', 'OCCASION', 'TO', 'QUOTE', 'TWO', 'LINES', 'OF', 'THAT', 'PROFOUND', 'WRITER'] +3729-6852-0002-1662: hyp=['IN', 'ORDER', 'TO', 'PLEASE', 'HER', 'I', 'SPOKE', 'TO', 'HER', 'OF', 'THE', 'ABBEY', 'KANTI', 'AND', 'I', 'HAD', 'OCCASION', 'TO', 'QUOTE', 'TWO', 'LINES', 'OF', 'THAT', 'PROFOUND', 'WRITER'] +3729-6852-0003-1663: ref=['MADAM', 'CORRECTED', 'ME', 'WITH', 'A', 'PATRONIZING', 'AIR', 'FOR', 'MY', 'PRONUNCIATION', 'OF', 'THE', 'WORD', 'SCEVRA', 'WHICH', 'MEANS', 'DIVIDED', 'SAYING', 'THAT', 'IT', 'OUGHT', 'TO', 'BE', 'PRONOUNCED', 'SCEURA', 'AND', 'SHE', 'ADDED', 'THAT', 'I', 'OUGHT', 'TO', 'BE', 'VERY', 'GLAD', 'TO', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'ON', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'ARRIVAL', 'IN', 'PARIS', 'TELLING', 'ME', 'THAT', 'IT', 'WOULD', 'BE', 'AN', 'IMPORTANT', 'DAY', 'IN', 'MY', 'LIFE'] +3729-6852-0003-1663: hyp=['MADAME', 'CORRECTED', 'ME', 'WITH', 'A', 'PATRONIZING', 'AIR', 'FOR', 'MY', 'PRONUNCIATION', 'OF', 'THE', 'WORD', 'SCAFFRA', 'WHICH', 'MEANS', 'DIVIDED', 'SAYING', 'THAT', 'IT', 'OUGHT', 'TO', 'BE', 'PRONOUNCED', 'SKURA', 'AND', 'SHE', 'ADDED', 'THAT', 'I', 'OUGHT', 'TO', 'BE', 'VERY', 'GLAD', 'TO', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'ON', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'ARRIVAL', 'IN', 'PARIS', 'TELLING', 'ME', 'THAT', 'IT', 'WOULD', 'BE', 'AN', 'IMPORTANT', 'DAY', 'IN', 'MY', 'LIFE'] +3729-6852-0004-1664: ref=['HER', 'FACE', 'WAS', 'AN', 'ENIGMA', 'FOR', 'IT', 'INSPIRED', 'EVERYONE', 'WITH', 'THE', 'WARMEST', 'SYMPATHY', 'AND', 'YET', 'IF', 'YOU', 'EXAMINED', 'IT', 'ATTENTIVELY', 'THERE', 'WAS', 'NOT', 'ONE', 'BEAUTIFUL', 'FEATURE', 'SHE', 'COULD', 'NOT', 'BE', 'CALLED', 'HANDSOME', 'BUT', 'NO', 'ONE', 'COULD', 'HAVE', 'THOUGHT', 'HER', 'UGLY'] +3729-6852-0004-1664: hyp=['HER', 'FACE', 'WAS', 'AN', 'ENIGMA', 'FOR', 'IT', 'INSPIRED', 'EVERY', 'ONE', 'WITH', 'THE', 'WARMEST', 'SYMPATHY', 'AND', 'YET', 'IF', 'YOU', 'EXAMINED', 'IT', 'ATTENTIVELY', 'THERE', 'WAS', 'NOT', 'ONE', 'BEAUTIFUL', 'FEATURE', 'SHE', 'COULD', 'NOT', 'BE', 'CALLED', 'HANDSOME', 'BUT', 'NO', 'ONE', 'COULD', 'HAVE', 'THOUGHT', 'HER', 'UGLY'] +3729-6852-0005-1665: ref=['SILVIA', 'WAS', 'THE', 'ADORATION', 'OF', 'FRANCE', 'AND', 'HER', 'TALENT', 'WAS', 'THE', 'REAL', 'SUPPORT', 'OF', 'ALL', 'THE', 'COMEDIES', 'WHICH', 'THE', 'GREATEST', 'AUTHORS', 'WROTE', 'FOR', 'HER', 'ESPECIALLY', 'OF', 'THE', 'PLAYS', 'OF', 'MARIVAUX', 'FOR', 'WITHOUT', 'HER', 'HIS', 'COMEDIES', 'WOULD', 'NEVER', 'HAVE', 'GONE', 'TO', 'POSTERITY'] +3729-6852-0005-1665: hyp=['SYLVIA', 'WAS', 'THE', 'ADORATION', 'OF', 'FRANCE', 'AND', 'HER', 'TALENT', 'WAS', 'THE', 'REAL', 'SUPPORT', 'OF', 'ALL', 'THE', 'COMEDIES', 'WHICH', 'THE', 'GREATEST', 'AUTHORS', 'WROTE', 'FOR', 'HER', 'ESPECIALLY', 'OF', 'THE', 'PLAYS', 'OF', 'MARIVAL', 'FOR', 'WITHOUT', 'HER', 'HIS', 'COMEDIES', 'WOULD', 'NEVER', 'HAVE', 'GONE', 'TO', 'POSTERITY'] +3729-6852-0006-1666: ref=['SILVIA', 'DID', 'NOT', 'THINK', 'THAT', 'HER', 'GOOD', 'CONDUCT', 'WAS', 'A', 'MERIT', 'FOR', 'SHE', 'KNEW', 'THAT', 'SHE', 'WAS', 'VIRTUOUS', 'ONLY', 'BECAUSE', 'HER', 'SELF', 'LOVE', 'COMPELLED', 'HER', 'TO', 'BE', 'SO', 'AND', 'SHE', 'NEVER', 'EXHIBITED', 'ANY', 'PRIDE', 'OR', 'ASSUMED', 'ANY', 'SUPERIORITY', 'TOWARDS', 'HER', 'THEATRICAL', 'SISTERS', 'ALTHOUGH', 'SATISFIED', 'TO', 'SHINE', 'BY', 'THEIR', 'TALENT', 'OR', 'THEIR', 'BEAUTY', 'THEY', 'CARED', 'LITTLE', 'ABOUT', 'RENDERING', 'THEMSELVES', 'CONSPICUOUS', 'BY', 'THEIR', 'VIRTUE'] +3729-6852-0006-1666: hyp=['SYLVIA', 'DID', 'NOT', 'THINK', 'THAT', 'HER', 'GOOD', 'CONDUCT', 'WAS', 'A', 'MERIT', 'FOR', 'SHE', 'KNEW', 'THAT', 'SHE', 'WAS', 'VIRTUOUS', 'ONLY', 'BECAUSE', 'HER', 'SELF', 'LOVE', 'COMPELLED', 'HER', 'TO', 'BE', 'SO', 'AND', 'SHE', 'NEVER', 'EXHIBITED', 'ANY', 'PRIDE', 'OR', 'ASSUMED', 'ANY', 'SUPERIORITY', 'TOWARDS', 'HER', 'THEATRICAL', 'SISTERS', 'ALTHOUGH', 'SATISFIED', 'TO', 'SHINE', 'BY', 'THEIR', 'TALENT', 'OR', 'THEIR', 'BEAUTY', 'THEY', 'CARED', 'LITTLE', 'ABOUT', 'RENDERING', 'THEMSELVES', 'CONSPICUOUS', 'BY', 'THEIR', 'VIRTUE'] +3729-6852-0007-1667: ref=['TWO', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'I', 'SAW', 'HER', 'PERFORM', 'THE', 'CHARACTER', 'OF', 'MARIANNE', 'IN', 'THE', 'COMEDY', 'OF', 'MARIVAUX', 'AND', 'IN', 'SPITE', 'OF', 'HER', 'AGE', 'AND', 'DECLINING', 'HEALTH', 'THE', 'ILLUSION', 'WAS', 'COMPLETE'] +3729-6852-0007-1667: hyp=['TWO', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'I', 'SAW', 'HER', 'PERFORM', 'THE', 'CHARACTER', 'OF', 'MARIANNE', 'IN', 'THE', 'COMEDY', 'OF', 'MARAVAUX', 'AND', 'IN', 'SPITE', 'OF', 'HER', 'AGE', 'AND', 'DECLINING', 'HEALTH', 'THE', 'ILLUSION', 'WAS', 'COMPLETE'] +3729-6852-0008-1668: ref=['SHE', 'WAS', 'HONOURABLY', 'BURIED', 'IN', 'THE', 'CHURCH', 'OF', 'SAINT', 'SAUVEUR', 'WITHOUT', 'THE', 'SLIGHTEST', 'OPPOSITION', 'FROM', 'THE', 'VENERABLE', 'PRIEST', 'WHO', 'FAR', 'FROM', 'SHARING', 'THE', 'ANTI', 'CHRISTAIN', 'INTOLERANCY', 'OF', 'THE', 'CLERGY', 'IN', 'GENERAL', 'SAID', 'THAT', 'HER', 'PROFESSION', 'AS', 'AN', 'ACTRESS', 'HAD', 'NOT', 'HINDERED', 'HER', 'FROM', 'BEING', 'A', 'GOOD', 'CHRISTIAN', 'AND', 'THAT', 'THE', 'EARTH', 'WAS', 'THE', 'COMMON', 'MOTHER', 'OF', 'ALL', 'HUMAN', 'BEINGS', 'AS', 'JESUS', 'CHRIST', 'HAD', 'BEEN', 'THE', 'SAVIOUR', 'OF', 'ALL', 'MANKIND'] +3729-6852-0008-1668: hyp=['SHE', 'WAS', 'HONORABLY', 'BURIED', 'IN', 'THE', 'CHURCH', 'OF', 'SAINT', 'SEVER', 'WITHOUT', 'THE', 'SLIGHTEST', 'OPPOSITION', 'FROM', 'THE', 'VENERABLE', 'PRIEST', 'WHO', 'FAR', 'FROM', 'SHARING', 'THE', 'ANTI', 'CHRISTIAN', 'INTOLERANCY', 'OF', 'THE', 'CLERGY', 'IN', 'GENERAL', 'SAID', 'THAT', 'HER', 'PROFESSION', 'AS', 'AN', 'ACTRESS', 'HAD', 'NOT', 'HINDERED', 'HER', 'FROM', 'BEING', 'A', 'GOOD', 'CHRISTIAN', 'AND', 'THAT', 'THE', 'EARTH', 'WAS', 'A', 'COMMON', 'MOTHER', 'OF', 'ALL', 'HUMAN', 'BEINGS', 'AS', 'JESUS', 'CHRIST', 'HAD', 'BEEN', 'THE', 'SAVIOUR', 'OF', 'ALL', 'MANKIND'] +3729-6852-0009-1669: ref=['YOU', 'WILL', 'FORGIVE', 'ME', 'DEAR', 'READER', 'IF', 'I', 'HAVE', 'MADE', 'YOU', 'ATTEND', 'THE', 'FUNERAL', 'OF', 'SILVIA', 'TEN', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'BELIEVE', 'ME', 'I', 'HAVE', 'NO', 'INTENTION', 'OF', 'PERFORMING', 'A', 'MIRACLE', 'YOU', 'MAY', 'CONSOLE', 'YOURSELF', 'WITH', 'THE', 'IDEA', 'THAT', 'I', 'SHALL', 'SPARE', 'YOU', 'THAT', 'UNPLEASANT', 'TASK', 'WHEN', 'POOR', 'SILVIA', 'DIES'] +3729-6852-0009-1669: hyp=['YOU', 'WILL', 'FORGIVE', 'ME', 'DEAR', 'READER', 'IF', 'I', 'HAVE', 'MADE', 'YOU', 'ATTEND', 'THE', 'FUNERAL', 'OF', 'SYLVIA', 'TEN', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'BELIEVE', 'ME', 'I', 'HAVE', 'NO', 'INTENTION', 'OF', 'PERFORMING', 'A', 'MIRACLE', 'YOU', 'MAY', 'CONSOLE', 'YOURSELF', 'WITH', 'THE', 'IDEA', 'THAT', 'I', 'SHALL', 'SPARE', 'YOU', 'THAT', 'UNPLEASANT', 'TASK', 'WHEN', 'POOR', 'SYLVIA', 'DIES'] +3729-6852-0010-1670: ref=['I', 'NEVER', 'HAD', 'ANY', 'FAMILY'] +3729-6852-0010-1670: hyp=['I', 'NEVER', 'HAD', 'ANY', 'FAMILY'] +3729-6852-0011-1671: ref=['I', 'HAD', 'A', 'NAME', 'I', 'BELIEVE', 'IN', 'MY', 'YOUNG', 'DAYS', 'BUT', 'I', 'HAVE', 'FORGOTTEN', 'IT', 'SINCE', 'I', 'HAVE', 'BEEN', 'IN', 'SERVICE'] +3729-6852-0011-1671: hyp=['I', 'HAD', 'A', 'NAME', 'I', 'BELIEVE', 'IN', 'MY', 'YOUNG', 'DAYS', 'BUT', 'I', 'HAVE', 'FORGOTTEN', 'IT', 'SINCE', 'I', 'HAVE', 'BEEN', 'IN', 'SERVICE'] +3729-6852-0012-1672: ref=['I', 'SHALL', 'CALL', 'YOU', 'ESPRIT'] +3729-6852-0012-1672: hyp=['I', 'SHALL', 'CALL', 'YOU', 'A', 'SPREE'] +3729-6852-0013-1673: ref=['YOU', 'DO', 'ME', 'A', 'GREAT', 'HONOUR'] +3729-6852-0013-1673: hyp=['YOU', 'DO', 'ME', 'A', 'GREAT', 'HONOUR'] +3729-6852-0014-1674: ref=['HERE', 'GO', 'AND', 'GET', 'ME', 'CHANGE', 'FOR', 'A', 'LOUIS', 'I', 'HAVE', 'IT', 'SIR'] +3729-6852-0014-1674: hyp=['HERE', 'GO', 'AND', 'GET', 'ME', 'CHANGE', 'FOR', 'A', 'LOUIS', 'I', 'HAVE', 'IT', 'SIR'] +3729-6852-0015-1675: ref=['AT', 'YOUR', 'SERVICE', 'SIR'] +3729-6852-0015-1675: hyp=['AT', 'YOUR', 'SERVICE', 'SIR'] +3729-6852-0016-1676: ref=['MADAME', 'QUINSON', 'BESIDES', 'CAN', 'ANSWER', 'YOUR', 'ENQUIRIES'] +3729-6852-0016-1676: hyp=['MADAME', 'QUINSON', 'BESIDES', 'CAN', 'ANSWER', 'YOUR', 'INQUIRIES'] +3729-6852-0017-1677: ref=['I', 'SEE', 'A', 'QUANTITY', 'OF', 'CHAIRS', 'FOR', 'HIRE', 'AT', 'THE', 'RATE', 'OF', 'ONE', 'SOU', 'MEN', 'READING', 'THE', 'NEWSPAPER', 'UNDER', 'THE', 'SHADE', 'OF', 'THE', 'TREES', 'GIRLS', 'AND', 'MEN', 'BREAKFASTING', 'EITHER', 'ALONE', 'OR', 'IN', 'COMPANY', 'WAITERS', 'WHO', 'WERE', 'RAPIDLY', 'GOING', 'UP', 'AND', 'DOWN', 'A', 'NARROW', 'STAIRCASE', 'HIDDEN', 'UNDER', 'THE', 'FOLIAGE'] +3729-6852-0017-1677: hyp=['I', 'SEE', 'A', 'QUANTITY', 'OF', 'CHAIRS', 'FOR', 'HIRE', 'AT', 'THE', 'RATE', 'OF', 'ONE', 'SOUS', 'MEN', 'READING', 'THE', 'NEWSPAPER', 'UNDER', 'THE', 'SHADE', 'OF', 'THE', 'TREES', 'GIRLS', 'AND', 'MEN', 'BREAKFASTING', 'EITHER', 'ALONE', 'OR', 'IN', 'COMPANY', 'WAITERS', 'WHO', 'WERE', 'RAPIDLY', 'GOING', 'UP', 'AND', 'DOWN', 'A', 'NARROW', 'STAIRCASE', 'HIDDEN', 'UNDER', 'THE', 'FOLIAGE'] +3729-6852-0018-1678: ref=['I', 'SIT', 'DOWN', 'AT', 'A', 'SMALL', 'TABLE', 'A', 'WAITER', 'COMES', 'IMMEDIATELY', 'TO', 'ENQUIRE', 'MY', 'WISHES'] +3729-6852-0018-1678: hyp=['I', 'SIT', 'DOWN', 'AT', 'A', 'SMALL', 'TABLE', 'A', 'WAITER', 'COMES', 'IMMEDIATELY', 'TO', 'INQUIRE', 'MY', 'WISHES'] +3729-6852-0019-1679: ref=['I', 'TELL', 'HIM', 'TO', 'GIVE', 'ME', 'SOME', 'COFFEE', 'IF', 'IT', 'IS', 'GOOD'] +3729-6852-0019-1679: hyp=['I', 'TELL', 'HIM', 'TO', 'GIVE', 'ME', 'SOME', 'COFFEE', 'IF', 'IT', 'IS', 'GOOD'] +3729-6852-0020-1680: ref=['THEN', 'TURNING', 'TOWARDS', 'ME', 'HE', 'SAYS', 'THAT', 'I', 'LOOK', 'LIKE', 'A', 'FOREIGNER', 'AND', 'WHEN', 'I', 'SAY', 'THAT', 'I', 'AM', 'AN', 'ITALIAN', 'HE', 'BEGINS', 'TO', 'SPEAK', 'TO', 'ME', 'OF', 'THE', 'COURT', 'OF', 'THE', 'CITY', 'OF', 'THE', 'THEATRES', 'AND', 'AT', 'LAST', 'HE', 'OFFERS', 'TO', 'ACCOMPANY', 'ME', 'EVERYWHERE'] +3729-6852-0020-1680: hyp=['THEN', 'TURNING', 'TOWARDS', 'ME', 'HE', 'SAYS', 'THAT', 'I', 'LOOK', 'LIKE', 'A', 'FOREIGNER', 'AND', 'WHEN', 'I', 'SAY', 'THAT', 'I', 'AM', 'AN', 'ITALIAN', 'HE', 'BEGINS', 'TO', 'SPEAK', 'TO', 'ME', 'OF', 'THE', 'COURT', 'THE', 'CITY', 'OF', 'THE', 'THEATRES', 'AND', 'AT', 'LAST', 'HE', 'OFFERS', 'TO', 'ACCOMPANY', 'ME', 'EVERYWHERE'] +3729-6852-0021-1681: ref=['I', 'THANK', 'HIM', 'AND', 'TAKE', 'MY', 'LEAVE'] +3729-6852-0021-1681: hyp=['I', 'THANK', 'HIM', 'AND', 'TAKE', 'MY', 'LEAVE'] +3729-6852-0022-1682: ref=['I', 'ADDRESS', 'HIM', 'IN', 'ITALIAN', 'AND', 'HE', 'ANSWERS', 'VERY', 'WITTILY', 'BUT', 'HIS', 'WAY', 'OF', 'SPEAKING', 'MAKES', 'ME', 'SMILE', 'AND', 'I', 'TELL', 'HIM', 'WHY'] +3729-6852-0022-1682: hyp=['I', 'ADDRESS', 'HIM', 'IN', 'ITALIAN', 'AND', 'HE', 'ANSWERS', 'VERY', 'WITTILY', 'BUT', 'HIS', 'WAY', 'OF', 'SPEAKING', 'MAKES', 'ME', 'SMILE', 'AND', 'I', 'TELL', 'HIM', 'WHY'] +3729-6852-0023-1683: ref=['MY', 'REMARK', 'PLEASES', 'HIM', 'BUT', 'I', 'SOON', 'PROVE', 'TO', 'HIM', 'THAT', 'IT', 'IS', 'NOT', 'THE', 'RIGHT', 'WAY', 'TO', 'SPEAK', 'HOWEVER', 'PERFECT', 'MAY', 'HAVE', 'BEEN', 'THE', 'LANGUAGE', 'OF', 'THAT', 'ANCIENT', 'WRITER'] +3729-6852-0023-1683: hyp=['MY', 'REMARK', 'PLEASES', 'HIM', 'BUT', 'I', 'SOON', 'PROVE', 'TO', 'HIM', 'THAT', 'IT', 'IS', 'NOT', 'THE', 'RIGHT', 'WAY', 'TO', 'SPEAK', 'HOWEVER', 'PERFECT', 'MAY', 'HAVE', 'BEEN', 'THE', 'LANGUAGE', 'OF', 'THAT', 'ANCIENT', 'WRITER'] +3729-6852-0024-1684: ref=['I', 'SEE', 'A', 'CROWD', 'IN', 'ONE', 'CORNER', 'OF', 'THE', 'GARDEN', 'EVERYBODY', 'STANDING', 'STILL', 'AND', 'LOOKING', 'UP'] +3729-6852-0024-1684: hyp=['I', 'SEE', 'A', 'CROWD', 'IN', 'ONE', 'CORNER', 'OF', 'THE', 'GARDEN', 'EVERYBODY', 'STANDING', 'STILL', 'AND', 'LOOKING', 'UP'] +3729-6852-0025-1685: ref=['IS', 'THERE', 'NOT', 'A', 'MERIDIAN', 'EVERYWHERE'] +3729-6852-0025-1685: hyp=['IS', 'THERE', 'NOT', 'A', 'MERIDIAN', 'EVERYWHERE'] +3729-6852-0026-1686: ref=['YES', 'BUT', 'THE', 'MERIDIAN', 'OF', 'THE', 'PALAIS', 'ROYAL', 'IS', 'THE', 'MOST', 'EXACT'] +3729-6852-0026-1686: hyp=['YES', 'BUT', 'THE', 'MERIDIAN', 'OF', 'THE', 'PALAIS', 'ROYAL', 'IS', 'THE', 'MOST', 'EXACT'] +3729-6852-0027-1687: ref=['THAT', 'IS', 'TRUE', 'BADAUDERIE'] +3729-6852-0027-1687: hyp=['THAT', 'IS', 'TRUE', "BADR'D", 'GREE'] +3729-6852-0028-1688: ref=['ALL', 'THESE', 'HONEST', 'PERSONS', 'ARE', 'WAITING', 'THEIR', 'TURN', 'TO', 'GET', 'THEIR', 'SNUFF', 'BOXES', 'FILLED'] +3729-6852-0028-1688: hyp=['ALL', 'THESE', 'HONEST', 'PERSONS', 'ARE', 'WAITING', 'THEIR', 'TURN', 'TO', 'GET', 'THEIR', 'SNUFF', 'BOXES', 'FILLED'] +3729-6852-0029-1689: ref=['IT', 'IS', 'SOLD', 'EVERYWHERE', 'BUT', 'FOR', 'THE', 'LAST', 'THREE', 'WEEKS', 'NOBODY', 'WILL', 'USE', 'ANY', 'SNUFF', 'BUT', 'THAT', 'SOLD', 'AT', 'THE', 'CIVET', 'CAT'] +3729-6852-0029-1689: hyp=['IT', 'IS', 'SOLD', 'EVERYWHERE', 'BUT', 'FOR', 'THE', 'LAST', 'THREE', 'WEEKS', 'NOBODY', 'WILL', 'USE', 'ANY', 'SNUFF', 'BUT', 'THAT', 'SOLD', 'AT', 'THE', 'SEVETTE', 'CAT'] +3729-6852-0030-1690: ref=['IS', 'IT', 'BETTER', 'THAN', 'ANYWHERE', 'ELSE'] +3729-6852-0030-1690: hyp=['IS', 'IT', 'BETTER', 'THAN', 'ANYWHERE', 'ELSE'] +3729-6852-0031-1691: ref=['BUT', 'HOW', 'DID', 'SHE', 'MANAGE', 'TO', 'RENDER', 'IT', 'SO', 'FASHIONABLE'] +3729-6852-0031-1691: hyp=['BUT', 'HOW', 'DID', 'SHE', 'MANAGE', 'TO', 'RENDER', 'IT', 'SO', 'FASHIONABLE'] +3729-6852-0032-1692: ref=['SIMPLY', 'BY', 'STOPPING', 'HER', 'CARRIAGE', 'TWO', 'OR', 'THREE', 'TIMES', 'BEFORE', 'THE', 'SHOP', 'TO', 'HAVE', 'HER', 'SNUFF', 'BOX', 'FILLED', 'AND', 'BY', 'SAYING', 'ALOUD', 'TO', 'THE', 'YOUNG', 'GIRL', 'WHO', 'HANDED', 'BACK', 'THE', 'BOX', 'THAT', 'HER', 'SNUFF', 'WAS', 'THE', 'VERY', 'BEST', 'IN', 'PARIS'] +3729-6852-0032-1692: hyp=['SIMPLY', 'BY', 'STOPPING', 'HER', 'CARRIAGE', 'TWO', 'OR', 'THREE', 'TIMES', 'BEFORE', 'THE', 'SHOP', 'TO', 'HAVE', 'HER', 'SNUFF', 'BOX', 'FILLED', 'AND', 'BY', 'SAYING', 'ALOUD', 'TO', 'THE', 'YOUNG', 'GIRL', 'WHO', 'HANDED', 'BACK', 'THE', 'BOX', 'THAT', 'HER', 'SNUFF', 'WAS', 'THE', 'VERY', 'BEST', 'IN', 'PARIS'] +3729-6852-0033-1693: ref=['YOU', 'ARE', 'NOW', 'IN', 'THE', 'ONLY', 'COUNTRY', 'IN', 'THE', 'WORLD', 'WHERE', 'WIT', 'CAN', 'MAKE', 'A', 'FORTUNE', 'BY', 'SELLING', 'EITHER', 'A', 'GENUINE', 'OR', 'A', 'FALSE', 'ARTICLE', 'IN', 'THE', 'FIRST', 'CASE', 'IT', 'RECEIVES', 'THE', 'WELCOME', 'OF', 'INTELLIGENT', 'AND', 'TALENTED', 'PEOPLE', 'AND', 'IN', 'THE', 'SECOND', 'FOOLS', 'ARE', 'ALWAYS', 'READY', 'TO', 'REWARD', 'IT', 'FOR', 'SILLINESS', 'IS', 'TRULY', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'PEOPLE', 'HERE', 'AND', 'HOWEVER', 'WONDERFUL', 'IT', 'MAY', 'APPEAR', 'SILLINESS', 'IS', 'THE', 'DAUGHTER', 'OF', 'WIT'] +3729-6852-0033-1693: hyp=['YOU', 'ARE', 'NOW', 'IN', 'THE', 'ONLY', 'COUNTRY', 'IN', 'THE', 'WORLD', 'WHERE', 'WIT', 'CAN', 'MAKE', 'A', 'FORTUNE', 'BY', 'SELLING', 'EITHER', 'A', 'GENUINE', 'OR', 'A', 'FALSE', 'ARTICLE', 'IN', 'THE', 'FIRST', 'CASE', 'IT', 'RECEIVES', 'THE', 'WELCOME', 'OF', 'INTELLIGENT', 'AND', 'TALENTED', 'PEOPLE', 'AND', 'IN', 'THE', 'SECOND', 'FOOLS', 'ARE', 'ALWAYS', 'READY', 'TO', 'REWARD', 'IT', 'FOR', 'SILLINESS', 'IS', 'TRULY', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'PEOPLE', 'HERE', 'AND', 'HOWEVER', 'WONDERFUL', 'IT', 'MAY', 'APPEAR', 'SILLINESS', 'IS', 'THE', 'DAUGHTER', 'OF', 'WIT'] +3729-6852-0034-1694: ref=['LET', 'A', 'MAN', 'RUN', 'AND', 'EVERYBODY', 'WILL', 'RUN', 'AFTER', 'HIM', 'THE', 'CROWD', 'WILL', 'NOT', 'STOP', 'UNLESS', 'THE', 'MAN', 'IS', 'PROVED', 'TO', 'BE', 'MAD', 'BUT', 'TO', 'PROVE', 'IT', 'IS', 'INDEED', 'A', 'DIFFICULT', 'TASK', 'BECAUSE', 'WE', 'HAVE', 'A', 'CROWD', 'OF', 'MEN', 'WHO', 'MAD', 'FROM', 'THEIR', 'BIRTH', 'ARE', 'STILL', 'CONSIDERED', 'WISE'] +3729-6852-0034-1694: hyp=['LET', 'A', 'MAN', 'RUN', 'AND', 'EVERYBODY', 'WILL', 'RUN', 'AFTER', 'HIM', 'THE', 'CROWD', 'WILL', 'NOT', 'STOP', 'UNLESS', 'THE', 'MAN', 'IS', 'PROVED', 'TO', 'BE', 'MAD', 'BUT', 'TO', 'PROVE', 'IT', 'IS', 'INDEED', 'A', 'DIFFICULT', 'TASK', 'BECAUSE', 'WE', 'HAVE', 'A', 'CROWD', 'OF', 'MEN', 'WHO', 'MAD', 'FROM', 'THEIR', 'BIRTH', 'ARE', 'STILL', 'CONSIDERED', 'WISE'] +3729-6852-0035-1695: ref=['IT', 'SEEMS', 'TO', 'ME', 'I', 'REPLIED', 'THAT', 'SUCH', 'APPROVAL', 'SUCH', 'RATIFICATION', 'OF', 'THE', 'OPINION', 'EXPRESSED', 'BY', 'THE', 'KING', 'THE', 'PRINCES', 'OF', 'THE', 'BLOOD', 'ET', 'CETERA', 'IS', 'RATHER', 'A', 'PROOF', 'OF', 'THE', 'AFFECTION', 'FELT', 'FOR', 'THEM', 'BY', 'THE', 'NATION', 'FOR', 'THE', 'FRENCH', 'CARRY', 'THAT', 'AFFECTION', 'TO', 'SUCH', 'AN', 'EXTENT', 'THAT', 'THEY', 'BELIEVE', 'THEM', 'INFALLIBLE'] +3729-6852-0035-1695: hyp=['IT', 'SEEMS', 'TO', 'ME', 'I', 'REPLIED', 'THAT', 'SUCH', 'APPROVAL', 'SUCH', 'RATIFICATION', 'OF', 'THE', 'OPINION', 'EXPRESSED', 'BY', 'THE', 'KING', 'THE', 'PRINCES', 'OF', 'THE', 'BLOOD', 'ET', 'CETERA', 'IS', 'RATHER', 'A', 'PROOF', 'OF', 'THE', 'AFFECTION', 'FELT', 'FOR', 'THEM', 'BY', 'THE', 'NATION', 'FOR', 'THE', 'FRENCH', 'CARRY', 'THAT', 'AFFECTION', 'TO', 'SUCH', 'AN', 'EXTENT', 'THAT', 'THEY', 'BELIEVE', 'THEM', 'INFALLIBLE'] +3729-6852-0036-1696: ref=['WHEN', 'THE', 'KING', 'COMES', 'TO', 'PARIS', 'EVERYBODY', 'CALLS', 'OUT', 'VIVE', 'LE', 'ROI'] +3729-6852-0036-1696: hyp=['WHEN', 'THE', 'KING', 'COMES', 'TO', 'PARIS', 'EVERYBODY', 'CALLS', 'OUT', 'VIVE', 'LAUROI'] +3729-6852-0037-1697: ref=['SHE', 'INTRODUCED', 'ME', 'TO', 'ALL', 'HER', 'GUESTS', 'AND', 'GAVE', 'ME', 'SOME', 'PARTICULARS', 'RESPECTING', 'EVERY', 'ONE', 'OF', 'THEM'] +3729-6852-0037-1697: hyp=['SHE', 'INTRODUCED', 'ME', 'TO', 'ALL', 'HER', 'GUESTS', 'AND', 'GAVE', 'ME', 'SOME', 'PARTICULARS', 'RESPECTING', 'EVERY', 'ONE', 'OF', 'THEM'] +3729-6852-0038-1698: ref=['WHAT', 'SIR', 'I', 'SAID', 'TO', 'HIM', 'AM', 'I', 'FORTUNATE', 'ENOUGH', 'TO', 'SEE', 'YOU'] +3729-6852-0038-1698: hyp=['WHAT', 'SIR', 'I', 'SAID', 'TO', 'HIM', 'AM', 'I', 'FORTUNATE', 'ENOUGH', 'TO', 'SEE', 'YOU'] +3729-6852-0039-1699: ref=['HE', 'HIMSELF', 'RECITED', 'THE', 'SAME', 'PASSAGE', 'IN', 'FRENCH', 'AND', 'POLITELY', 'POINTED', 'OUT', 'THE', 'PARTS', 'IN', 'WHICH', 'HE', 'THOUGHT', 'THAT', 'I', 'HAD', 'IMPROVED', 'ON', 'THE', 'ORIGINAL'] +3729-6852-0039-1699: hyp=['HE', 'HIMSELF', 'RECITED', 'THE', 'SAME', 'PASSAGE', 'IN', 'FRENCH', 'AND', 'POLITELY', 'POINTED', 'OUT', 'THE', 'PARTS', 'IN', 'WHICH', 'HE', 'THOUGHT', 'THAT', 'I', 'HAD', 'IMPROVED', 'ON', 'THE', 'ORIGINAL'] +3729-6852-0040-1700: ref=['FOR', 'THE', 'FIRST', 'DAY', 'SIR', 'I', 'THINK', 'THAT', 'WHAT', 'YOU', 'HAVE', 'DONE', 'GIVES', 'GREAT', 'HOPES', 'OF', 'YOU', 'AND', 'WITHOUT', 'ANY', 'DOUBT', 'YOU', 'WILL', 'MAKE', 'RAPID', 'PROGRESS'] +3729-6852-0040-1700: hyp=['FOR', 'THE', 'FIRST', 'DAY', 'SIR', 'I', 'THINK', 'THAT', 'WHAT', 'YOU', 'HAVE', 'DONE', 'GIVES', 'GREAT', 'HOPES', 'OF', 'YOU', 'AND', 'WITHOUT', 'ANY', 'DOUBT', 'YOU', 'WILL', 'MAKE', 'RAPID', 'PROGRESS'] +3729-6852-0041-1701: ref=['I', 'BELIEVE', 'IT', 'SIR', 'AND', 'THAT', 'IS', 'WHAT', 'I', 'FEAR', 'THEREFORE', 'THE', 'PRINCIPAL', 'OBJECT', 'OF', 'MY', 'VISIT', 'HERE', 'IS', 'TO', 'DEVOTE', 'MYSELF', 'ENTIRELY', 'TO', 'THE', 'STUDY', 'OF', 'THE', 'FRENCH', 'LANGUAGE'] +3729-6852-0041-1701: hyp=['I', 'BELIEVE', 'IT', 'SIR', 'AND', 'THAT', 'IS', 'WHAT', 'I', 'FEAR', 'THEREFORE', 'THE', 'PRINCIPAL', 'OBJECT', 'OF', 'MY', 'VISIT', 'HERE', 'IS', 'TO', 'DEVOTE', 'MYSELF', 'ENTIRELY', 'TO', 'THE', 'STUDY', 'OF', 'THE', 'FRENCH', 'LANGUAGE'] +3729-6852-0042-1702: ref=['I', 'AM', 'A', 'VERY', 'UNPLEASANT', 'PUPIL', 'ALWAYS', 'ASKING', 'QUESTIONS', 'CURIOUS', 'TROUBLESOME', 'INSATIABLE', 'AND', 'EVEN', 'SUPPOSING', 'THAT', 'I', 'COULD', 'MEET', 'WITH', 'THE', 'TEACHER', 'I', 'REQUIRE', 'I', 'AM', 'AFRAID', 'I', 'AM', 'NOT', 'RICH', 'ENOUGH', 'TO', 'PAY', 'HIM'] +3729-6852-0042-1702: hyp=['I', 'AM', 'A', 'VERY', 'UNPLEASANT', 'PUPIL', 'ALWAYS', 'ASKING', 'QUESTIONS', 'CURIOUS', 'TROUBLESOME', 'INSATIABLE', 'AND', 'EVEN', 'SUPPOSING', 'THAT', 'I', 'COULD', 'MEET', 'WITH', 'THE', 'TEACHER', 'I', 'REQUIRE', 'I', 'AM', 'AFRAID', 'I', 'AM', 'NOT', 'RICH', 'ENOUGH', 'TO', 'PAY', 'HIM'] +3729-6852-0043-1703: ref=['I', 'RESIDE', 'IN', 'THE', 'MARAIS', 'RUE', 'DE', 'DOUZE', 'PORTES'] +3729-6852-0043-1703: hyp=['I', 'RESIDE', 'IN', 'THE', 'MARAY', 'GRUE', 'DE', 'DUSPORT'] +3729-6852-0044-1704: ref=['I', 'WILL', 'MAKE', 'YOU', 'TRANSLATE', 'THEM', 'INTO', 'FRENCH', 'AND', 'YOU', 'NEED', 'NOT', 'BE', 'AFRAID', 'OF', 'MY', 'FINDING', 'YOU', 'INSATIABLE'] +3729-6852-0044-1704: hyp=['I', 'WILL', 'MAKE', 'YOU', 'TRANSLATE', 'THEM', 'INTO', 'FRENCH', 'AND', 'YOU', 'NEED', 'NOT', 'BE', 'AFRAID', 'OF', 'MY', 'FINDING', 'YOU', 'INSATIABLE'] +3729-6852-0045-1705: ref=['HE', 'HAD', 'A', 'GOOD', 'APPETITE', 'COULD', 'TELL', 'A', 'GOOD', 'STORY', 'WITHOUT', 'LAUGHING', 'WAS', 'CELEBRATED', 'FOR', 'HIS', 'WITTY', 'REPARTEES', 'AND', 'HIS', 'SOCIABLE', 'MANNERS', 'BUT', 'HE', 'SPENT', 'HIS', 'LIFE', 'AT', 'HOME', 'SELDOM', 'GOING', 'OUT', 'AND', 'SEEING', 'HARDLY', 'ANYONE', 'BECAUSE', 'HE', 'ALWAYS', 'HAD', 'A', 'PIPE', 'IN', 'HIS', 'MOUTH', 'AND', 'WAS', 'SURROUNDED', 'BY', 'AT', 'LEAST', 'TWENTY', 'CATS', 'WITH', 'WHICH', 'HE', 'WOULD', 'AMUSE', 'HIMSELF', 'ALL', 'DAY'] +3729-6852-0045-1705: hyp=['HE', 'HAD', 'A', 'GOOD', 'APPETITE', 'COULD', 'TELL', 'A', 'GOOD', 'STORY', 'WITHOUT', 'LAUGHING', 'WITH', 'CELEBRATED', 'FOR', 'HIS', 'WITTY', 'REPARTEES', 'AND', 'HIS', 'SOCIABLE', 'MANNERS', 'BUT', 'HE', 'SPENT', 'HIS', 'LIFE', 'AT', 'HOME', 'SELDOM', 'GOING', 'OUT', 'AND', 'SEEING', 'HARDLY', 'ANY', 'ONE', 'BECAUSE', 'HE', 'ALWAYS', 'HAD', 'A', 'PIPE', 'IN', 'HIS', 'MOUTH', 'AND', 'WAS', 'SURROUNDED', 'BY', 'AT', 'LEAST', 'TWENTY', 'CATS', 'WITH', 'WHICH', 'HE', 'WOULD', 'AMUSE', 'HIMSELF', 'ALL', 'DAY'] +3729-6852-0046-1706: ref=['HIS', 'HOUSEKEEPER', 'HAD', 'THE', 'MANAGEMENT', 'OF', 'EVERYTHING', 'SHE', 'NEVER', 'ALLOWED', 'HIM', 'TO', 'BE', 'IN', 'NEED', 'OF', 'ANYTHING', 'AND', 'SHE', 'GAVE', 'NO', 'ACCOUNT', 'OF', 'HIS', 'MONEY', 'WHICH', 'SHE', 'KEPT', 'ALTOGETHER', 'BECAUSE', 'HE', 'NEVER', 'ASKED', 'HER', 'TO', 'RENDER', 'ANY', 'ACCOUNTS'] +3729-6852-0046-1706: hyp=['HIS', 'HOUSEKEEPER', 'HAD', 'THE', 'MANAGEMENT', 'OF', 'EVERYTHING', 'SHE', 'NEVER', 'ALLOWED', 'HIM', 'TO', 'BE', 'IN', 'NEED', 'OF', 'ANYTHING', 'AND', 'SHE', 'GAVE', 'NO', 'ACCOUNT', 'OF', 'HIS', 'MONEY', 'WHICH', 'SHE', 'KEPT', 'ALTOGETHER', 'BECAUSE', 'HE', 'NEVER', 'ASKED', 'HER', 'TO', 'RENDER', 'ANY', 'ACCOUNTS'] +4077-13751-0000-1258: ref=['ON', 'THE', 'SIXTH', 'OF', 'APRIL', 'EIGHTEEN', 'THIRTY', 'THE', 'CHURCH', 'OF', 'JESUS', 'CHRIST', 'OF', 'LATTER', 'DAY', 'SAINTS', 'WAS', 'FORMALLY', 'ORGANIZED', 'AND', 'THUS', 'TOOK', 'ON', 'A', 'LEGAL', 'EXISTENCE'] +4077-13751-0000-1258: hyp=['ON', 'THE', 'SIXTH', 'OF', 'APRIL', 'EIGHTEEN', 'THIRTY', 'THE', 'CHURCH', 'OF', 'JESUS', 'CHRIST', 'OF', 'LATTER', 'DAY', 'SAINTS', 'WAS', 'FORMERLY', 'ORGANIZED', 'AND', 'THUS', 'TOOK', 'ON', 'A', 'LEGAL', 'EXISTENCE'] +4077-13751-0001-1259: ref=['ITS', 'ORIGIN', 'WAS', 'SMALL', 'A', 'GERM', 'AN', 'INSIGNIFICANT', 'SEED', 'HARDLY', 'TO', 'BE', 'THOUGHT', 'OF', 'AS', 'LIKELY', 'TO', 'AROUSE', 'OPPOSITION'] +4077-13751-0001-1259: hyp=['ITS', 'ORIGIN', 'WAS', 'SMALL', 'A', 'GERM', 'AN', 'INSIGNIFICANT', 'SEED', 'HARDLY', 'TO', 'BE', 'THOUGHT', 'OF', 'AS', 'LIKELY', 'TO', 'AROUSE', 'OPPOSITION'] +4077-13751-0002-1260: ref=['INSTEAD', 'OF', 'BUT', 'SIX', 'REGULARLY', 'AFFILIATED', 'MEMBERS', 'AND', 'AT', 'MOST', 'TWO', 'SCORE', 'OF', 'ADHERENTS', 'THE', 'ORGANIZATION', 'NUMBERS', 'TODAY', 'MANY', 'HUNDRED', 'THOUSAND', 'SOULS'] +4077-13751-0002-1260: hyp=['INSTEAD', 'OF', 'BUT', 'SIX', 'REGULARLY', 'AFFILIATED', 'MEMBERS', 'AND', 'AT', 'MOST', 'TWO', 'SCORE', 'OF', 'ADHERENTS', 'THE', 'ORGANIZATION', 'NUMBERS', 'TO', 'DAY', 'MANY', 'HUNDRED', 'THOUSAND', 'SOULS'] +4077-13751-0003-1261: ref=['IN', 'PLACE', 'OF', 'A', 'SINGLE', 'HAMLET', 'IN', 'THE', 'SMALLEST', 'CORNER', 'OF', 'WHICH', 'THE', 'MEMBERS', 'COULD', 'HAVE', 'CONGREGATED', 'THERE', 'NOW', 'ARE', 'ABOUT', 'SEVENTY', 'STAKES', 'OF', 'ZION', 'AND', 'ABOUT', 'SEVEN', 'HUNDRED', 'ORGANIZED', 'WARDS', 'EACH', 'WARD', 'AND', 'STAKE', 'WITH', 'ITS', 'FULL', 'COMPLEMENT', 'OF', 'OFFICERS', 'AND', 'PRIESTHOOD', 'ORGANIZATIONS'] +4077-13751-0003-1261: hyp=['IN', 'PLACE', 'OF', 'A', 'SINGLE', 'HAMLET', 'IN', 'THE', 'SMALLEST', 'CORNER', 'OF', 'WHICH', 'THE', 'MEMBERS', 'COULD', 'HAVE', 'CONGREGATED', 'THERE', 'NOW', 'ARE', 'ABOUT', 'SEVENTY', 'STAKES', 'OF', 'ZION', 'AND', 'ABOUT', 'SEVEN', 'HUNDRED', 'ORGANIZED', 'WARDS', 'EACH', 'WARD', 'AND', 'STAKE', 'WITH', 'ITS', 'FULL', 'COMPLEMENT', 'OF', 'OFFICERS', 'AND', 'PRIESTHOOD', 'ORGANIZATIONS'] +4077-13751-0004-1262: ref=['THE', 'PRACTISE', 'OF', 'GATHERING', 'ITS', 'PROSELYTES', 'INTO', 'ONE', 'PLACE', 'PREVENTS', 'THE', 'BUILDING', 'UP', 'AND', 'STRENGTHENING', 'OF', 'FOREIGN', 'BRANCHES', 'AND', 'INASMUCH', 'AS', 'EXTENSIVE', 'AND', 'STRONG', 'ORGANIZATIONS', 'ARE', 'SELDOM', 'MET', 'WITH', 'ABROAD', 'VERY', 'ERRONEOUS', 'IDEAS', 'EXIST', 'CONCERNING', 'THE', 'STRENGTH', 'OF', 'THE', 'CHURCH'] +4077-13751-0004-1262: hyp=['THE', 'PRACTICE', 'OF', 'GATHERING', 'ITS', 'PROSELYTES', 'INTO', 'ONE', 'PLACE', 'PREVENTS', 'THE', 'BUILDING', 'UP', 'AND', 'STRENGTHENING', 'OF', 'FOREIGN', 'BRANCHES', 'AND', 'INASMUCH', 'AS', 'EXTENSIVE', 'AND', 'STRONG', 'ORGANIZATIONS', 'ARE', 'SELDOM', 'MET', 'WITH', 'ABROAD', 'VERY', 'ERRONEOUS', 'IDEAS', 'EXIST', 'CONCERNING', 'THE', 'STRENGTH', 'OF', 'THE', 'CHURCH'] +4077-13751-0005-1263: ref=['NEVERTHELESS', 'THE', 'MUSTARD', 'SEED', 'AMONG', 'THE', 'SMALLEST', 'OF', 'ALL', 'SEEDS', 'HAS', 'ATTAINED', 'THE', 'PROPORTIONS', 'OF', 'A', 'TREE', 'AND', 'THE', 'BIRDS', 'OF', 'THE', 'AIR', 'ARE', 'NESTING', 'IN', 'ITS', 'BRANCHES', 'THE', 'ACORN', 'IS', 'NOW', 'AN', 'OAK', 'OFFERING', 'PROTECTION', 'AND', 'THE', 'SWEETS', 'OF', 'SATISFACTION', 'TO', 'EVERY', 'EARNEST', 'PILGRIM', 'JOURNEYING', 'ITS', 'WAY', 'FOR', 'TRUTH'] +4077-13751-0005-1263: hyp=['NEVERTHELESS', 'THE', 'MUSTARD', 'SEED', 'AMONG', 'THE', 'SMALLEST', 'OF', 'ALL', 'SEATS', 'HAS', 'ATTAINED', 'THAT', 'PROPORTIONS', 'OF', 'A', 'TREE', 'AND', 'THE', 'BIRDS', 'OF', 'THE', 'AIR', 'ARE', 'NESTING', 'IN', 'ITS', 'BRANCHES', 'THE', 'ACORN', 'IS', 'NOW', 'IN', 'OAK', 'OFFERING', 'PROTECTION', 'AND', 'THE', 'SWEETS', 'OF', 'SATISFACTION', 'TO', 'EVERY', 'EARNEST', 'PILGRIM', 'JOURNEYING', 'ITS', 'WAY', 'FIR', 'TRUTH'] +4077-13751-0006-1264: ref=['THEIR', 'EYES', 'WERE', 'FROM', 'THE', 'FIRST', 'TURNED', 'IN', 'ANTICIPATION', 'TOWARD', 'THE', 'EVENING', 'SUN', 'NOT', 'MERELY', 'THAT', 'THE', 'WORK', 'OF', 'PROSELYTING', 'SHOULD', 'BE', 'CARRIED', 'ON', 'IN', 'THE', 'WEST', 'BUT', 'THAT', 'THE', 'HEADQUARTERS', 'OF', 'THE', 'CHURCH', 'SHOULD', 'BE', 'THERE', 'ESTABLISHED'] +4077-13751-0006-1264: hyp=['THEIR', 'EYES', 'WERE', 'FROM', 'THE', 'FIRST', 'TURNED', 'IN', 'ANTICIPATION', 'TOWARD', 'THE', 'EVENING', 'SUN', 'NOT', 'MERELY', 'THAT', 'THE', 'WORK', 'OF', 'PROSELY', 'SHOULD', 'BE', 'CARRIED', 'ON', 'IN', 'THE', 'WEST', 'BUT', 'THAT', 'THE', 'HEADQUARTERS', 'OF', 'THE', 'CHURCH', 'SHOULD', 'BE', 'THEIR', 'ESTABLISHED'] +4077-13751-0007-1265: ref=['THE', 'BOOK', 'OF', 'MORMON', 'HAD', 'TAUGHT', 'THE', 'PEOPLE', 'THE', 'TRUE', 'ORIGIN', 'AND', 'DESTINY', 'OF', 'THE', 'AMERICAN', 'INDIANS', 'AND', 'TOWARD', 'THIS', 'DARK', 'SKINNED', 'REMNANT', 'OF', 'A', 'ONCE', 'MIGHTY', 'PEOPLE', 'THE', 'MISSIONARIES', 'OF', 'MORMONISM', 'EARLY', 'TURNED', 'THEIR', 'EYES', 'AND', 'WITH', 'THEIR', 'EYES', 'WENT', 'THEIR', 'HEARTS', 'AND', 'THEIR', 'HOPES'] +4077-13751-0007-1265: hyp=['THE', 'BOOK', 'O', 'MORMON', 'HAD', 'TAUGHT', 'THE', 'PEOPLE', 'THE', 'TRUE', 'ORIGIN', 'AND', 'DESTINY', 'OF', 'THE', 'AMERICAN', 'INDIANS', 'AND', 'TOWARD', 'THIS', 'DARK', 'SKINNED', 'REMNANT', 'OF', 'A', 'ONCE', 'MIGHTY', 'PEOPLE', 'THE', 'MISSIONARIES', 'OF', 'MORMONISM', 'EARLY', 'TURNED', 'THEIR', 'EYES', 'AND', 'WITH', 'THEIR', 'EYES', 'WENT', 'THEIR', 'HEARTS', 'AND', 'THEIR', 'HOPES'] +4077-13751-0008-1266: ref=['IT', 'IS', 'NOTABLE', 'THAT', 'THE', 'INDIAN', 'TRIBES', 'HAVE', 'GENERALLY', 'REGARDED', 'THE', 'RELIGION', 'OF', 'THE', 'LATTER', 'DAY', 'SAINTS', 'WITH', 'FAVOR', 'SEEING', 'IN', 'THE', 'BOOK', 'OF', 'MORMON', 'STRIKING', 'AGREEMENT', 'WITH', 'THEIR', 'OWN', 'TRADITIONS'] +4077-13751-0008-1266: hyp=['IT', 'IS', 'NOTABLE', 'THAT', 'THE', 'INDIAN', 'TRIBES', 'HAVE', 'GERALLY', 'REGARDED', 'THEIR', 'RELIGION', 'OF', 'THE', 'LATTER', 'DAY', 'SAINTS', 'WITH', 'FAVOR', 'SEEING', 'IN', 'THE', 'BOOK', 'OF', 'MORMON', 'STRIKING', 'AGREEMENT', 'WITH', 'THEIR', 'OWN', 'TRADITIONS'] +4077-13751-0009-1267: ref=['THE', 'FIRST', 'WELL', 'ESTABLISHED', 'SEAT', 'OF', 'THE', 'CHURCH', 'WAS', 'IN', 'THE', 'PRETTY', 'LITTLE', 'TOWN', 'OF', 'KIRTLAND', 'OHIO', 'ALMOST', 'WITHIN', 'SIGHT', 'OF', 'LAKE', 'ERIE', 'AND', 'HERE', 'SOON', 'ROSE', 'THE', 'FIRST', 'TEMPLE', 'OF', 'MODERN', 'TIMES'] +4077-13751-0009-1267: hyp=['THE', 'FIRST', 'WELL', 'ESTABLISHED', 'SEAT', 'OF', 'THE', 'CHURCH', 'WAS', 'IN', 'THE', 'PRETTY', 'LITTLE', 'TOWN', 'OF', 'CURTALIND', 'OHIO', 'ALMOST', 'WITHIN', 'SIGHT', 'OF', 'LAKE', 'ERIE', 'AND', 'HERE', 'SOON', 'ROSE', 'THE', 'FIRST', 'TEMPLE', 'OF', 'MODERN', 'TIMES'] +4077-13751-0010-1268: ref=['TO', 'THE', 'FERVENT', 'LATTER', 'DAY', 'SAINT', 'A', 'TEMPLE', 'IS', 'NOT', 'SIMPLY', 'A', 'CHURCH', 'BUILDING', 'A', 'HOUSE', 'FOR', 'RELIGIOUS', 'ASSEMBLY'] +4077-13751-0010-1268: hyp=['TO', 'THE', 'FERVENT', 'LATTER', 'DAY', 'SAINT', 'A', 'TEMPLE', 'IS', 'NOT', 'SIMPLY', 'A', 'CHURCH', 'BUILDING', 'A', 'HOUSE', 'FOR', 'RELIGIOUS', 'ASSEMBLY'] +4077-13751-0011-1269: ref=['SOON', 'THOUSANDS', 'OF', 'CONVERTS', 'HAD', 'RENTED', 'OR', 'PURCHASED', 'HOMES', 'IN', 'MISSOURI', 'INDEPENDENCE', 'JACKSON', 'COUNTY', 'BEING', 'THEIR', 'CENTER', 'BUT', 'FROM', 'THE', 'FIRST', 'THEY', 'WERE', 'UNPOPULAR', 'AMONG', 'THE', 'MISSOURIANS'] +4077-13751-0011-1269: hyp=['SOON', 'THOUSANDS', 'OF', 'CONVERTS', 'HAD', 'RENTED', 'OR', 'PURCHASED', 'HOMES', 'IN', 'MISSOURI', 'INDEPENDENCE', 'JACKSON', 'COUNTY', 'BEING', 'THEIR', 'CENTRE', 'BUT', 'FROM', 'THE', 'FIRST', 'THEY', 'WERE', 'UNPOPULAR', 'AMONG', 'THE', 'MISSOURIENS'] +4077-13751-0012-1270: ref=['THE', 'LIEUTENANT', 'GOVERNOR', 'LILBURN', 'W', 'BOGGS', 'AFTERWARD', 'GOVERNOR', 'WAS', 'A', 'PRONOUNCED', 'MORMON', 'HATER', 'AND', 'THROUGHOUT', 'THE', 'PERIOD', 'OF', 'THE', 'TROUBLES', 'HE', 'MANIFESTED', 'SYMPATHY', 'WITH', 'THE', 'PERSECUTORS'] +4077-13751-0012-1270: hyp=['THE', 'LIEUTENANT', 'GOVERNOR', 'LITTLE', 'BURN', 'W', 'BOGGS', 'AFTERWARD', 'GOVERNOR', 'WAS', 'A', 'PRONOUNCED', 'MORMON', 'HATER', 'AND', 'THROUGHOUT', 'THE', 'PERIOD', 'OF', 'THE', 'TROUBLES', 'HE', 'MANIFEST', 'HIS', 'SYMPATHY', 'WITH', 'THE', 'PERSECUTORS'] +4077-13751-0013-1271: ref=['THEIR', 'SUFFERINGS', 'HAVE', 'NEVER', 'YET', 'BEEN', 'FITLY', 'CHRONICLED', 'BY', 'HUMAN', 'SCRIBE'] +4077-13751-0013-1271: hyp=['THEIR', 'SUFFERINGS', 'HAVE', 'NEVER', 'YET', 'BEEN', 'FITLY', 'CHRONICLED', 'BY', 'HUMAN', 'SCRIBE'] +4077-13751-0014-1272: ref=['MAKING', 'THEIR', 'WAY', 'ACROSS', 'THE', 'RIVER', 'MOST', 'OF', 'THE', 'REFUGEES', 'FOUND', 'SHELTER', 'AMONG', 'THE', 'MORE', 'HOSPITABLE', 'PEOPLE', 'OF', 'CLAY', 'COUNTY', 'AND', 'AFTERWARD', 'ESTABLISHED', 'THEMSELVES', 'IN', 'CALDWELL', 'COUNTY', 'THEREIN', 'FOUNDING', 'THE', 'CITY', 'OF', 'FAR', 'WEST'] +4077-13751-0014-1272: hyp=['MAKING', 'THEIR', 'WAY', 'ACROSS', 'THE', 'RIVER', 'MOST', 'OF', 'THE', 'REFUGEES', 'FOUND', 'SHELTER', 'AMONG', 'THE', 'MORE', 'HOSPITABLE', 'PEOPLE', 'OF', 'CLAY', 'COUNTY', 'AND', 'AFTERWARD', 'ESTABLISHED', 'THEMSELVES', 'IN', 'CAULDWELL', 'COUNTY', 'THEREIN', 'FOUNDING', 'THE', 'CITY', 'OF', 'FAR', 'WEST'] +4077-13751-0015-1273: ref=['A', 'SMALL', 'SETTLEMENT', 'HAD', 'BEEN', 'FOUNDED', 'BY', 'MORMON', 'FAMILIES', 'ON', 'SHOAL', 'CREEK', 'AND', 'HERE', 'ON', 'THE', 'THIRTIETH', 'OF', 'OCTOBER', 'EIGHTEEN', 'THIRTY', 'EIGHT', 'A', 'COMPANY', 'OF', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'FELL', 'UPON', 'THE', 'HAPLESS', 'SETTLERS', 'AND', 'BUTCHERED', 'A', 'SCORE'] +4077-13751-0015-1273: hyp=['A', 'SMALL', 'SETTLEMENT', 'HAD', 'BEEN', 'FOUNDED', 'BY', 'MORMON', 'FAMILIES', 'ON', 'SHOAL', 'CREEK', 'AND', 'HERE', 'ON', 'THE', 'THIRTIETH', 'OF', 'OCTOBER', 'EIGHTEEN', 'THIRTY', 'EIGHT', 'A', 'COMPANY', 'OF', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'FELL', 'UPON', 'THE', 'HAPLESS', 'SETTLERS', 'AND', 'BUTCHERED', 'A', 'SCORE'] +4077-13751-0016-1274: ref=['BE', 'IT', 'SAID', 'TO', 'THE', 'HONOR', 'OF', 'SOME', 'OF', 'THE', 'OFFICERS', 'ENTRUSTED', 'WITH', 'THE', 'TERRIBLE', 'COMMISSION', 'THAT', 'WHEN', 'THEY', 'LEARNED', 'ITS', 'TRUE', 'SIGNIFICANCE', 'THEY', 'RESIGNED', 'THEIR', 'AUTHORITY', 'RATHER', 'THAN', 'HAVE', 'ANYTHING', 'TO', 'DO', 'WITH', 'WHAT', 'THEY', 'DESIGNATED', 'A', 'COLD', 'BLOODED', 'BUTCHERY'] +4077-13751-0016-1274: hyp=['BE', 'IT', 'SAID', 'TO', 'THE', 'HONOR', 'OF', 'SOME', 'OF', 'THE', 'OFFICERS', 'ENTRUSTED', 'WITH', 'THE', 'TERRIBLE', 'COMMISSION', 'THAT', 'WHEN', 'THEY', 'LEARNED', 'ITS', 'TRUE', 'SIGNIFICANCE', 'THEY', 'RESIGN', 'THEIR', 'AUTHORITY', 'RATHER', 'THAN', 'HAVE', 'ANYTHING', 'TO', 'DO', 'WITH', 'WHAT', 'THEY', 'DESIGNATED', 'A', 'COLD', 'BLOODED', 'BUTCHERY'] +4077-13751-0017-1275: ref=['OH', 'WHAT', 'A', 'RECORD', 'TO', 'READ', 'WHAT', 'A', 'PICTURE', 'TO', 'GAZE', 'UPON', 'HOW', 'AWFUL', 'THE', 'FACT'] +4077-13751-0017-1275: hyp=['OH', 'WHAT', 'A', 'RECORD', 'TO', 'READ', 'WHAT', 'A', 'PICTURE', 'TO', 'GAZE', 'UPON', 'HOW', 'AWFUL', 'THE', 'FACT'] +4077-13751-0018-1276: ref=['AMERICAN', 'SCHOOL', 'BOYS', 'READ', 'WITH', 'EMOTIONS', 'OF', 'HORROR', 'OF', 'THE', 'ALBIGENSES', 'DRIVEN', 'BEATEN', 'AND', 'KILLED', 'WITH', 'A', 'PAPAL', 'LEGATE', 'DIRECTING', 'THE', 'BUTCHERY', 'AND', 'OF', 'THE', 'VAUDOIS', 'HUNTED', 'AND', 'HOUNDED', 'LIKE', 'BEASTS', 'AS', 'THE', 'EFFECT', 'OF', 'A', 'ROYAL', 'DECREE', 'AND', 'THEY', 'YET', 'SHALL', 'READ', 'IN', 'THE', 'HISTORY', 'OF', 'THEIR', 'OWN', 'COUNTRY', 'OF', 'SCENES', 'AS', 'TERRIBLE', 'AS', 'THESE', 'IN', 'THE', 'EXHIBITION', 'OF', 'INJUSTICE', 'AND', 'INHUMAN', 'HATE'] +4077-13751-0018-1276: hyp=['AMERICAN', 'SCHOOLBOYS', 'READ', 'WITH', 'EMOTIONS', 'OF', 'HORROR', 'OF', 'THE', 'ALBIGENSES', 'DRIVEN', 'BEATEN', 'AND', 'KILLED', 'WITH', 'A', 'PEPPEL', 'LEGATE', 'DIRECTING', 'THE', 'BUTCHERY', 'AND', 'OF', 'THE', 'FAUDOIS', 'HUNTED', 'AND', 'HOUNDED', 'LIKE', 'BEASTS', 'AS', 'THE', 'EFFECT', 'OF', 'A', 'ROYAL', 'DECREE', 'AND', 'THEY', 'YET', 'SHALL', 'READ', 'IN', 'THE', 'HISTORY', 'OF', 'THEIR', 'OWN', 'COUNTRY', 'OF', 'SCENES', 'AS', 'TERRIBLE', 'AS', 'THESE', 'IN', 'THE', 'EXHIBITION', 'OF', 'INJUSTICE', 'AND', 'INHUMAN', 'HATE'] +4077-13751-0019-1277: ref=['WHO', 'BEGAN', 'THE', 'QUARREL', 'WAS', 'IT', 'THE', 'MORMONS'] +4077-13751-0019-1277: hyp=['WHO', 'BEGAN', 'THE', 'QUARREL', 'WAS', 'IT', 'THE', 'MORMONS'] +4077-13751-0020-1278: ref=['AS', 'A', 'SAMPLE', 'OF', 'THE', 'PRESS', 'COMMENTS', 'AGAINST', 'THE', 'BRUTALITY', 'OF', 'THE', 'MISSOURIANS', 'I', 'QUOTE', 'A', 'PARAGRAPH', 'FROM', 'THE', 'QUINCY', 'ARGUS', 'MARCH', 'SIXTEENTH', 'EIGHTEEN', 'THIRTY', 'NINE'] +4077-13751-0020-1278: hyp=['AS', 'THE', 'SABLE', 'OF', 'THE', 'PRESS', 'COMMENTS', 'AGAINST', 'THE', 'BRUTALITY', 'OF', 'THE', 'MISSOURIANS', 'I', 'QUOTE', 'A', 'PARAGRAPH', 'FROM', 'THE', 'QUINCEY', 'ARGUS', 'MARCH', 'SIXTEENTH', 'EIGHTEEN', 'THIRTY', 'NINE'] +4077-13751-0021-1279: ref=['IT', 'WILL', 'BE', 'OBSERVED', 'THAT', 'AN', 'ORGANIZED', 'MOB', 'AIDED', 'BY', 'MANY', 'OF', 'THE', 'CIVIL', 'AND', 'MILITARY', 'OFFICERS', 'OF', 'MISSOURI', 'WITH', 'GOVERNOR', 'BOGGS', 'AT', 'THEIR', 'HEAD', 'HAVE', 'BEEN', 'THE', 'PROMINENT', 'ACTORS', 'IN', 'THIS', 'BUSINESS', 'INCITED', 'TOO', 'IT', 'APPEARS', 'AGAINST', 'THE', 'MORMONS', 'BY', 'POLITICAL', 'HATRED', 'AND', 'BY', 'THE', 'ADDITIONAL', 'MOTIVES', 'OF', 'PLUNDER', 'AND', 'REVENGE'] +4077-13751-0021-1279: hyp=['IT', 'WILL', 'BE', 'OBSERVED', 'THAT', 'AN', 'ORGANIZED', 'MOB', 'AIDED', 'BY', 'MANY', 'OF', 'THE', 'CIVIL', 'AND', 'MILITARY', 'OFFICERS', 'OF', 'MISSOURI', 'WITH', 'GOVERNOR', 'BOGGS', 'AT', 'THEIR', 'HEAD', 'HAVE', 'BEEN', 'THE', 'PROMINENT', 'ACTORS', 'IN', 'THIS', 'BUSINESS', 'INCITED', 'TOO', 'IT', 'APPEARS', 'AGAINST', 'THE', 'MORMONS', 'BY', 'POLITICAL', 'HATRED', 'AND', 'BY', 'THE', 'ADDITIONAL', 'MOTIVES', 'OF', 'PLUNDER', 'AND', 'REVENGE'] +4077-13754-0000-1241: ref=['THE', 'ARMY', 'FOUND', 'THE', 'PEOPLE', 'IN', 'POVERTY', 'AND', 'LEFT', 'THEM', 'IN', 'COMPARATIVE', 'WEALTH'] +4077-13754-0000-1241: hyp=['THE', 'ARMY', 'FOUND', 'THE', 'PEOPLE', 'IN', 'POVERTY', 'AND', 'LEFT', 'THEM', 'IN', 'COMPARATIVE', 'WEALTH'] +4077-13754-0001-1242: ref=['BUT', 'A', 'WORD', 'FURTHER', 'CONCERNING', 'THE', 'EXPEDITION', 'IN', 'GENERAL'] +4077-13754-0001-1242: hyp=['BUT', 'A', 'WORD', 'FURTHER', 'CONCERNING', 'THE', 'EXPEDITION', 'IN', 'GENERAL'] +4077-13754-0002-1243: ref=['IT', 'WAS', 'THROUGH', "FLOYD'S", 'ADVICE', 'THAT', 'BUCHANAN', 'ORDERED', 'THE', 'MILITARY', 'EXPEDITION', 'TO', 'UTAH', 'OSTENSIBLY', 'TO', 'INSTALL', 'CERTAIN', 'FEDERAL', 'OFFICIALS', 'AND', 'TO', 'REPRESS', 'AN', 'ALLEGED', 'INFANTILE', 'REBELLION', 'WHICH', 'IN', 'FACT', 'HAD', 'NEVER', 'COME', 'INTO', 'EXISTENCE', 'BUT', 'IN', 'REALITY', 'TO', 'FURTHER', 'THE', 'INTERESTS', 'OF', 'THE', 'SECESSIONISTS'] +4077-13754-0002-1243: hyp=['IT', 'WAS', 'THROUGH', "FLOYD'S", 'ADVICE', 'THAT', 'YOU', 'CANNOT', 'ORDER', 'THE', 'MILITARY', 'EXPEDITION', 'TO', 'UTAH', 'OSTENSIBLY', 'TO', 'INSTALL', 'CERTAIN', 'FEDERAL', 'OFFICIALS', 'AND', 'TO', 'REPRESS', 'AN', 'ALLEGED', 'INFANTILE', 'REBELLION', 'WHICH', 'IN', 'FACT', 'HAD', 'NEVER', 'COME', 'INTO', 'EXISTENCE', 'BUT', 'IN', 'REALITY', 'TO', 'FURTHER', 'THE', 'ENTRANCE', 'OF', 'THE', 'SECESSIONISTS'] +4077-13754-0003-1244: ref=['MOREOVER', 'HAD', 'THE', 'PEOPLE', 'BEEN', 'INCLINED', 'TO', 'REBELLION', 'WHAT', 'GREATER', 'OPPORTUNITY', 'COULD', 'THEY', 'HAVE', 'WISHED'] +4077-13754-0003-1244: hyp=['MOREOVER', 'HAD', 'THE', 'PEOPLE', 'BEEN', 'INCLINED', 'TO', 'REBELLION', 'WHAT', 'GREAT', 'OPPORTUNITY', 'COULD', 'THEY', 'HAVE', 'WISHED'] +4077-13754-0004-1245: ref=['ALREADY', 'A', 'NORTH', 'AND', 'A', 'SOUTH', 'WERE', 'TALKED', 'OF', 'WHY', 'NOT', 'SET', 'UP', 'ALSO', 'A', 'WEST'] +4077-13754-0004-1245: hyp=['ALREADY', 'A', 'NORTH', 'AND', 'THE', 'SOUTH', 'WERE', 'TALKED', 'OF', 'WHY', 'NOT', 'SET', 'UP', 'ALSO', 'WEST'] +4077-13754-0005-1246: ref=['THEY', 'KNEW', 'NO', 'NORTH', 'NO', 'SOUTH', 'NO', 'EAST', 'NO', 'WEST', 'THEY', 'STOOD', 'POSITIVELY', 'BY', 'THE', 'CONSTITUTION', 'AND', 'WOULD', 'HAVE', 'NOTHING', 'TO', 'DO', 'IN', 'THE', 'BLOODY', 'STRIFE', 'BETWEEN', 'BROTHERS', 'UNLESS', 'INDEED', 'THEY', 'WERE', 'SUMMONED', 'BY', 'THE', 'AUTHORITY', 'TO', 'WHICH', 'THEY', 'HAD', 'ALREADY', 'ONCE', 'LOYALLY', 'RESPONDED', 'TO', 'FURNISH', 'MEN', 'AND', 'ARMS', 'FOR', 'THEIR', "COUNTRY'S", 'NEED'] +4077-13754-0005-1246: hyp=['THEY', 'KNEW', 'NO', 'NORTH', 'NO', 'SOUTH', 'NO', 'EAST', 'NO', 'WEST', 'THEY', 'STOOD', 'POSITIVELY', 'BY', 'THE', 'CONSTITUTION', 'AND', 'WOULD', 'HAVE', 'NOTHING', 'TO', 'DO', 'IN', 'THE', 'BLOODY', 'STRIFE', 'BETWEEN', 'BROTHERS', 'UNLESS', 'INDEED', 'THEY', 'WERE', 'SUMMONED', 'BY', 'THE', 'AUTHORITY', 'TO', 'WHICH', 'THEY', 'HAD', 'ALREADY', 'ONCE', 'LOYALLY', 'RESPONDED', 'TO', 'FURNISH', 'MEN', 'IN', 'ARMS', 'FOR', 'THE', "COUNTRY'S", 'NEED'] +4077-13754-0006-1247: ref=['WHAT', 'THE', 'LATTER', 'DAY', 'SAINTS', 'CALL', 'CELESTIAL', 'MARRIAGE', 'IS', 'CHARACTERISTIC', 'OF', 'THE', 'CHURCH', 'AND', 'IS', 'IN', 'VERY', 'GENERAL', 'PRACTISE', 'BUT', 'OF', 'CELESTIAL', 'MARRIAGE', 'PLURALITY', 'OF', 'WIVES', 'WAS', 'AN', 'INCIDENT', 'NEVER', 'AN', 'ESSENTIAL'] +4077-13754-0006-1247: hyp=['WHAT', 'THE', 'LATTER', 'DAY', 'SAYS', 'CALL', 'CELESTIAL', 'MARRIAGE', 'IS', 'CHARACTERISTIC', 'OF', 'THE', 'CHURCH', 'AND', 'IS', 'IN', 'VERY', 'GENERAL', 'PRACTICE', 'BUT', 'OF', 'CELESTIAL', 'MARRIAGE', 'PLURALITY', 'OF', 'WIVES', 'WAS', 'AN', 'INCIDENT', 'NEVER', 'AN', 'ESSENTIAL'] +4077-13754-0007-1248: ref=['WE', 'BELIEVE', 'IN', 'A', 'LITERAL', 'RESURRECTION', 'AND', 'AN', 'ACTUAL', 'HEREAFTER', 'IN', 'WHICH', 'FUTURE', 'STATE', 'SHALL', 'BE', 'RECOGNIZED', 'EVERY', 'SANCTIFIED', 'AND', 'AUTHORIZED', 'RELATIONSHIP', 'EXISTING', 'HERE', 'ON', 'EARTH', 'OF', 'PARENT', 'AND', 'CHILD', 'BROTHER', 'AND', 'SISTER', 'HUSBAND', 'AND', 'WIFE'] +4077-13754-0007-1248: hyp=['WE', 'BELIEVE', 'IN', 'A', 'LITERAL', 'RESURRECTION', 'AND', 'AN', 'ACTUAL', 'HEREAFTER', 'IN', 'WHICH', 'FUTURE', 'STATES', 'SHALL', 'BE', 'RECOGNIZED', 'EVERY', 'SANCTIFIED', 'AND', 'AUTHORIZED', 'RELATIONSHIP', 'EXISTING', 'HERE', 'ON', 'EARTH', 'OF', 'PARENT', 'AND', 'CHILD', 'BRETHREN', 'SISTER', 'HUSBAND', 'AND', 'WIFE'] +4077-13754-0008-1249: ref=['IT', 'HAS', 'BEEN', 'MY', 'PRIVILEGE', 'TO', 'TREAD', 'THE', 'SOIL', 'OF', 'MANY', 'LANDS', 'TO', 'OBSERVE', 'THE', 'CUSTOMS', 'AND', 'STUDY', 'THE', 'HABITS', 'OF', 'MORE', 'NATIONS', 'THAN', 'ONE', 'AND', 'I', 'HAVE', 'YET', 'TO', 'FIND', 'THE', 'PLACE', 'AND', 'MEET', 'THE', 'PEOPLE', 'WHERE', 'AND', 'WITH', 'WHOM', 'THE', 'PURITY', 'OF', 'MAN', 'AND', 'WOMAN', 'IS', 'HELD', 'MORE', 'PRECIOUS', 'THAN', 'AMONG', 'THE', 'MALIGNED', 'MORMONS', 'IN', 'THE', 'MOUNTAIN', 'VALLEYS', 'OF', 'THE', 'WEST'] +4077-13754-0008-1249: hyp=['IT', 'HAS', 'BEEN', 'MY', 'PRIVILEGE', 'TO', 'TREAD', 'THE', 'SOIL', 'OF', 'MANY', 'LANDS', 'TO', 'OBSERVE', 'THE', 'CUSTOMS', 'AND', 'STUDY', 'THE', 'HABITS', 'OF', 'MORE', 'NATIONS', 'THAN', 'ONE', 'AND', 'I', 'HAVE', 'YET', 'DEFINED', 'THE', 'PLACE', 'AND', 'MEET', 'THE', 'PEOPLE', 'WHERE', 'AND', 'WITH', 'WHOM', 'THE', 'PURITY', 'OF', 'MAN', 'AND', 'WOMAN', 'IS', 'HELD', 'MORE', 'PRECIOUS', 'THAN', 'AMONG', 'THE', 'MALIGNED', 'MORMONS', 'IN', 'THE', 'MOUNTAIN', 'VALLEYS', 'OF', 'THE', 'WEST'] +4077-13754-0009-1250: ref=['AT', 'THE', 'INCEPTION', 'OF', 'PLURAL', 'MARRIAGE', 'AMONG', 'THE', 'LATTER', 'DAY', 'SAINTS', 'THERE', 'WAS', 'NO', 'LAW', 'NATIONAL', 'OR', 'STATE', 'AGAINST', 'ITS', 'PRACTISE'] +4077-13754-0009-1250: hyp=['AT', 'THE', 'INCEPTION', 'OF', 'PEARL', 'MARRIAGE', 'AMONG', 'THE', 'LATTER', 'DAY', 'SAINTS', 'THERE', 'WAS', 'NO', 'LAW', 'NATIONAL', 'OR', 'STATE', 'AGAINST', 'ITS', 'PRACTICE'] +4077-13754-0010-1251: ref=['IN', 'EIGHTEEN', 'SIXTY', 'TWO', 'A', 'LAW', 'WAS', 'ENACTED', 'WITH', 'THE', 'PURPOSE', 'OF', 'SUPPRESSING', 'PLURAL', 'MARRIAGE', 'AND', 'AS', 'HAD', 'BEEN', 'PREDICTED', 'IN', 'THE', 'NATIONAL', 'SENATE', 'PRIOR', 'TO', 'ITS', 'PASSAGE', 'IT', 'LAY', 'FOR', 'MANY', 'YEARS', 'A', 'DEAD', 'LETTER'] +4077-13754-0010-1251: hyp=['IN', 'EIGHTEEN', 'SIXTY', 'TWO', 'A', 'LAW', 'WAS', 'ENACTED', 'WITH', 'A', 'PURPOSE', 'OF', 'SUPPRESSING', 'PORAL', 'MARRIAGE', 'AND', 'AS', 'HAD', 'BEEN', 'PREDICTED', 'IN', 'THE', 'NATIONAL', 'SENATE', 'PRIOR', 'TO', 'ITS', 'PASSAGE', 'IT', 'LAY', 'FOR', 'MANY', 'YEARS', 'A', 'DEAD', 'LETTER'] +4077-13754-0011-1252: ref=['FEDERAL', 'JUDGES', 'AND', 'UNITED', 'STATES', 'ATTORNEYS', 'IN', 'UTAH', 'WHO', 'WERE', 'NOT', 'MORMONS', 'NOR', 'LOVERS', 'OF', 'MORMONISM', 'REFUSED', 'TO', 'ENTERTAIN', 'COMPLAINTS', 'OR', 'PROSECUTE', 'CASES', 'UNDER', 'THE', 'LAW', 'BECAUSE', 'OF', 'ITS', 'MANIFEST', 'INJUSTICE', 'AND', 'INADEQUACY'] +4077-13754-0011-1252: hyp=['FEDERAL', 'JUDGES', 'AND', 'UNITED', 'STATES', 'ATTORNEYS', 'AND', 'NEW', 'TOP', 'WHO', 'WERE', 'NOT', 'MORE', "MEN'S", 'NOR', 'LOVERS', 'OF', 'WOMANISM', 'REFUSED', 'TO', 'ENTERTAIN', 'COMPLAINTS', 'OR', 'PROSECUTE', 'CASES', 'UNDER', 'THE', 'LAW', 'BECAUSE', 'OF', 'ITS', 'MANIFEST', 'INJUSTICE', 'AND', 'INADEQUACY'] +4077-13754-0012-1253: ref=['THIS', 'MEANT', 'THAT', 'FOR', 'AN', 'ALLEGED', 'MISDEMEANOR', 'FOR', 'WHICH', 'CONGRESS', 'PRESCRIBED', 'A', 'MAXIMUM', 'PENALTY', 'OF', 'SIX', 'MONTHS', 'IMPRISONMENT', 'AND', 'A', 'FINE', 'OF', 'THREE', 'HUNDRED', 'DOLLARS', 'A', 'MAN', 'MIGHT', 'BE', 'IMPRISONED', 'FOR', 'LIFE', 'AYE', 'FOR', 'MANY', 'TERMS', 'OF', 'A', "MAN'S", 'NATURAL', 'LIFE', 'DID', 'THE', "COURT'S", 'POWER', 'TO', 'ENFORCE', 'ITS', 'SENTENCES', 'EXTEND', 'SO', 'FAR', 'AND', 'MIGHT', 'BE', 'FINED', 'MILLIONS', 'OF', 'DOLLARS'] +4077-13754-0012-1253: hyp=['THIS', 'MEANT', 'THAT', 'FOR', 'AN', 'ALLEGED', 'MISDEMEANOUR', 'FOR', 'WHICH', 'CONGRESS', 'PRESCRIBED', 'A', 'MAXIMUM', 'PENALTY', 'OF', 'SIX', 'MONTHS', 'IMPRISONMENT', 'AND', 'A', 'FINE', 'OF', 'THREE', 'HUNDRED', 'DOLLARS', 'A', 'MAN', 'MIGHT', 'BE', 'IMPRISONED', 'FOR', 'LIFE', 'I', 'FOR', 'MANY', 'TERMS', 'OF', 'A', "MAN'S", 'NATURAL', 'LIFE', 'DID', 'THE', "COURT'S", 'POWER', 'TO', 'ENFORCE', 'ITS', 'SENTENCES', 'EXTEND', 'SO', 'FAR', 'AND', 'MIGHT', 'BE', 'FINED', 'MILLIONS', 'OF', 'DOLLARS'] +4077-13754-0013-1254: ref=['BEFORE', 'THIS', 'TRAVESTY', 'ON', 'THE', 'ADMINISTRATION', 'OF', 'LAW', 'COULD', 'BE', 'BROUGHT', 'BEFORE', 'THE', 'COURT', 'OF', 'LAST', 'RESORT', 'AND', 'THERE', 'MEET', 'WITH', 'THE', 'REVERSAL', 'AND', 'REBUKE', 'IT', 'DESERVED', 'MEN', 'WERE', 'IMPRISONED', 'UNDER', 'SENTENCES', 'OF', 'MANY', 'YEARS', 'DURATION'] +4077-13754-0013-1254: hyp=['BEFORE', 'THIS', 'TRAVASTY', 'ON', 'THE', 'ADMINISTRATION', 'OF', 'LAW', 'COULD', 'BE', 'WROUGHT', 'BEFORE', 'THE', 'COURT', 'OF', 'LAST', 'RESORT', 'AND', 'THERE', 'MET', 'WITH', 'THE', 'REVERSAL', 'AND', 'REBUKE', 'IT', 'DESERVED', 'MEN', 'WERE', 'IMPRISONED', 'UNDER', 'SENTENCE', 'OF', 'MANY', 'YEARS', 'DURATION'] +4077-13754-0014-1255: ref=['THE', 'PEOPLE', 'CONTESTED', 'THESE', 'MEASURES', 'ONE', 'BY', 'ONE', 'IN', 'THE', 'COURTS', 'PRESENTING', 'IN', 'CASE', 'AFTER', 'CASE', 'THE', 'DIFFERENT', 'PHASES', 'OF', 'THE', 'SUBJECT', 'AND', 'URGING', 'THE', 'UNCONSTITUTIONALITY', 'OF', 'THE', 'MEASURE'] +4077-13754-0014-1255: hyp=['THE', 'PEOPLE', 'CONTESTED', 'THESE', 'MEASURES', 'ONE', 'BY', 'ONE', 'IN', 'THE', 'COURTS', 'PRESENTING', 'IN', 'CASE', 'AFTER', 'CASE', 'THE', 'DIFFERENT', 'PHASES', 'OF', 'THE', 'SUBJECT', 'AND', 'URGING', 'THE', 'UNCONSTITUTIONALITY', 'OF', 'THE', 'MEASURE'] +4077-13754-0015-1256: ref=['THEN', 'THE', 'CHURCH', 'WAS', 'DISINCORPORATED', 'AND', 'ITS', 'PROPERTY', 'BOTH', 'REAL', 'AND', 'PERSONAL', 'CONFISCATED', 'AND', 'ESCHEATED', 'TO', 'THE', 'GOVERNMENT', 'OF', 'THE', 'UNITED', 'STATES', 'AND', 'ALTHOUGH', 'THE', 'PERSONAL', 'PROPERTY', 'WAS', 'SOON', 'RESTORED', 'REAL', 'ESTATE', 'OF', 'GREAT', 'VALUE', 'LONG', 'LAY', 'IN', 'THE', 'HANDS', 'OF', 'THE', "COURT'S", 'RECEIVER', 'AND', 'THE', 'MORMON', 'CHURCH', 'HAD', 'TO', 'PAY', 'THE', 'NATIONAL', 'GOVERNMENT', 'HIGH', 'RENTAL', 'ON', 'ITS', 'OWN', 'PROPERTY'] +4077-13754-0015-1256: hyp=['THEN', 'THE', 'CHURCH', 'WAS', 'DISINCORPORATED', 'AND', 'ITS', 'PROPERTY', 'BOTH', 'REAL', 'AND', 'PERSONAL', 'CONFISCATED', 'AND', 'ISTIATED', 'TO', 'THE', 'GOVERNMENT', 'OF', 'THE', 'UNITED', 'STATES', 'AND', 'ALTHOUGH', 'THE', 'PERSONAL', 'PROPERTY', 'WAS', 'SOON', 'RESTORED', 'REAL', 'ESTATE', 'OF', 'GREAT', 'VALUE', 'LONG', 'LAY', 'IN', 'THE', 'HANDS', 'OF', 'THE', 'COURTS', 'RECEIVER', 'AND', 'THE', 'MORMON', 'CHURCH', 'HAD', 'TO', 'PAY', 'THE', 'NATIONAL', 'GOVERNMENT', 'HIGH', 'RENTAL', 'ON', 'ITS', 'OWN', 'PROPERTY'] +4077-13754-0016-1257: ref=['AND', 'SO', 'THE', 'STORY', 'OF', 'MORMONISM', 'RUNS', 'ON', 'ITS', 'FINALE', 'HAS', 'NOT', 'YET', 'BEEN', 'WRITTEN', 'THE', 'CURRENT', 'PRESS', 'PRESENTS', 'CONTINUOUSLY', 'NEW', 'STAGES', 'OF', 'ITS', 'PROGRESS', 'NEW', 'DEVELOPMENTS', 'OF', 'ITS', 'PLAN'] +4077-13754-0016-1257: hyp=['AND', 'SO', 'THE', 'STORY', 'OF', 'MORMONISM', 'RUNS', 'ON', 'ITS', 'FINALE', 'HAS', 'NOT', 'YET', 'BEEN', 'WRITTEN', 'THE', 'CURRENT', 'PRESS', 'PRESENTS', 'CONTINUOUSLY', 'NEW', 'STAGES', 'OF', 'ITS', 'PROGRESS', 'NEW', 'DEVELOPMENTS', 'OF', 'ITS', 'PLAN'] +4446-2271-0000-1133: ref=['MAINHALL', 'LIKED', 'ALEXANDER', 'BECAUSE', 'HE', 'WAS', 'AN', 'ENGINEER'] +4446-2271-0000-1133: hyp=['MAIN', 'HALL', 'LIKED', 'ALEXANDER', 'BECAUSE', 'HE', 'WAS', 'AN', 'ENGINEER'] +4446-2271-0001-1134: ref=['HE', 'HAD', 'PRECONCEIVED', 'IDEAS', 'ABOUT', 'EVERYTHING', 'AND', 'HIS', 'IDEA', 'ABOUT', 'AMERICANS', 'WAS', 'THAT', 'THEY', 'SHOULD', 'BE', 'ENGINEERS', 'OR', 'MECHANICS'] +4446-2271-0001-1134: hyp=['WE', 'NOT', 'PRECONCEIVED', 'IDEAS', 'ABOUT', 'EVERYTHING', 'AND', 'HIS', 'IDEA', 'ABOUT', 'AMERICANS', 'WAS', 'THAT', 'THEY', 'SHOULD', 'BE', 'ENGINEERS', 'OR', 'MECHANICS'] +4446-2271-0002-1135: ref=["IT'S", 'TREMENDOUSLY', 'WELL', 'PUT', 'ON', 'TOO'] +4446-2271-0002-1135: hyp=['ITS', 'TREMENDOUSLY', 'WELL', 'PUT', 'ON', 'TOO'] +4446-2271-0003-1136: ref=["IT'S", 'BEEN', 'ON', 'ONLY', 'TWO', 'WEEKS', 'AND', "I'VE", 'BEEN', 'HALF', 'A', 'DOZEN', 'TIMES', 'ALREADY'] +4446-2271-0003-1136: hyp=["IT'S", 'BEEN', 'ON', 'ONLY', 'TWO', 'WEEKS', 'AND', "I'VE", 'BEEN', 'HALF', 'A', 'DOZEN', 'TIMES', 'ALREADY'] +4446-2271-0004-1137: ref=['DO', 'YOU', 'KNOW', 'ALEXANDER', 'MAINHALL', 'LOOKED', 'WITH', 'PERPLEXITY', 'UP', 'INTO', 'THE', 'TOP', 'OF', 'THE', 'HANSOM', 'AND', 'RUBBED', 'HIS', 'PINK', 'CHEEK', 'WITH', 'HIS', 'GLOVED', 'FINGER', 'DO', 'YOU', 'KNOW', 'I', 'SOMETIMES', 'THINK', 'OF', 'TAKING', 'TO', 'CRITICISM', 'SERIOUSLY', 'MYSELF'] +4446-2271-0004-1137: hyp=['DO', 'YOU', 'KNOW', 'ALEXANDER', 'MAIN', 'HALL', 'LOOKED', 'WITH', 'PERPLEXITY', 'UP', 'INTO', 'THE', 'TOP', 'OF', 'THE', 'HANSOM', 'AND', 'RUBBED', 'HIS', 'PINK', 'CHEEK', 'WITH', 'HIS', 'GLOVED', 'FINGER', 'DO', 'YOU', 'KNOW', 'I', 'SOMETIMES', 'THINK', 'OF', 'TAKING', 'TO', 'CRITICISM', 'SERIOUSLY', 'MYSELF'] +4446-2271-0005-1138: ref=['SHE', 'SAVES', 'HER', 'HAND', 'TOO', "SHE'S", 'AT', 'HER', 'BEST', 'IN', 'THE', 'SECOND', 'ACT'] +4446-2271-0005-1138: hyp=['SHE', 'SAVES', 'HER', 'HAND', 'TOO', 'SHE', 'SAID', 'HER', 'BEST', 'IN', 'THE', 'SECOND', 'ACT'] +4446-2271-0006-1139: ref=["HE'S", 'BEEN', 'WANTING', 'TO', 'MARRY', 'HILDA', 'THESE', 'THREE', 'YEARS', 'AND', 'MORE'] +4446-2271-0006-1139: hyp=["HE'S", 'BEEN', 'WANTING', 'TO', 'MARRY', 'HILDA', 'THESE', 'THREE', 'YEARS', 'AND', 'MORE'] +4446-2271-0007-1140: ref=['SHE', "DOESN'T", 'TAKE', 'UP', 'WITH', 'ANYBODY', 'YOU', 'KNOW'] +4446-2271-0007-1140: hyp=['SHE', "DOESN'T", 'TAKE', 'UP', 'WITH', 'ANYBODY', 'YOU', 'KNOW'] +4446-2271-0008-1141: ref=['IRENE', 'BURGOYNE', 'ONE', 'OF', 'HER', 'FAMILY', 'TOLD', 'ME', 'IN', 'CONFIDENCE', 'THAT', 'THERE', 'WAS', 'A', 'ROMANCE', 'SOMEWHERE', 'BACK', 'IN', 'THE', 'BEGINNING'] +4446-2271-0008-1141: hyp=['IRENE', 'WERE', 'GOING', 'ONE', 'OF', 'HER', 'FAMILY', 'TOLD', 'ME', 'IN', 'CONFIDENCE', 'THAT', 'THERE', 'WAS', 'A', 'ROMANCE', 'SOMEWHERE', 'BACK', 'IN', 'THE', 'BEGINNING'] +4446-2271-0009-1142: ref=['MAINHALL', 'VOUCHED', 'FOR', 'HER', 'CONSTANCY', 'WITH', 'A', 'LOFTINESS', 'THAT', 'MADE', 'ALEXANDER', 'SMILE', 'EVEN', 'WHILE', 'A', 'KIND', 'OF', 'RAPID', 'EXCITEMENT', 'WAS', 'TINGLING', 'THROUGH', 'HIM'] +4446-2271-0009-1142: hyp=['MEANHAW', 'VOUCH', 'FOR', 'HER', 'CONSTANCY', 'WITH', 'A', 'LOFTINESS', 'THAT', 'MADE', 'ALEXANDER', 'SMILE', 'EVEN', 'WHILE', 'A', 'KIND', 'OF', 'RAPID', 'EXCITEMENT', 'WAS', 'TINGLING', 'THROUGH', 'HIM'] +4446-2271-0010-1143: ref=["HE'S", 'ANOTHER', "WHO'S", 'AWFULLY', 'KEEN', 'ABOUT', 'HER', 'LET', 'ME', 'INTRODUCE', 'YOU'] +4446-2271-0010-1143: hyp=["HE'S", 'ANOTHER', "WHO'S", 'AWFULLY', 'KEEN', 'ABOUT', 'HER', 'LET', 'ME', 'INTRODUCE', 'YOU'] +4446-2271-0011-1144: ref=['SIR', 'HARRY', 'TOWNE', 'MISTER', 'BARTLEY', 'ALEXANDER', 'THE', 'AMERICAN', 'ENGINEER'] +4446-2271-0011-1144: hyp=['SIR', 'HARRY', 'TOWN', 'MISTER', 'BARTLEY', 'ALEXANDER', 'THE', 'AMERICAN', 'ENGINEER'] +4446-2271-0012-1145: ref=['I', 'SAY', 'SIR', 'HARRY', 'THE', 'LITTLE', "GIRL'S", 'GOING', 'FAMOUSLY', 'TO', 'NIGHT', "ISN'T", 'SHE'] +4446-2271-0012-1145: hyp=['I', 'SAY', 'SIR', 'HARRY', 'THE', 'LITTLE', "GIRL'S", 'GOING', 'FAMOUSLY', 'TO', 'NIGHT', "ISN'T", 'SHE'] +4446-2271-0013-1146: ref=['DO', 'YOU', 'KNOW', 'I', 'THOUGHT', 'THE', 'DANCE', 'A', 'BIT', 'CONSCIOUS', 'TO', 'NIGHT', 'FOR', 'THE', 'FIRST', 'TIME'] +4446-2271-0013-1146: hyp=['YOU', 'KNOW', 'I', 'THOUGHT', 'THE', 'DANCE', 'OF', 'GOOD', 'CONSCIENCE', 'TO', 'NIGHT', 'FOR', 'THE', 'FIRST', 'TIME'] +4446-2271-0014-1147: ref=['WESTMERE', 'AND', 'I', 'WERE', 'BACK', 'AFTER', 'THE', 'FIRST', 'ACT', 'AND', 'WE', 'THOUGHT', 'SHE', 'SEEMED', 'QUITE', 'UNCERTAIN', 'OF', 'HERSELF'] +4446-2271-0014-1147: hyp=['WESTMIR', 'AND', 'I', 'WERE', 'BACK', 'AFTER', 'THE', 'FIRST', 'ACT', 'AND', 'WE', 'THOUGHT', 'SHE', 'SEEMED', 'QUITE', 'UNCERTAIN', 'OF', 'HERSELF'] +4446-2271-0015-1148: ref=['A', 'LITTLE', 'ATTACK', 'OF', 'NERVES', 'POSSIBLY'] +4446-2271-0015-1148: hyp=['A', 'LITTLE', 'ATTACK', 'OF', 'NERVES', 'POSSIBLY'] +4446-2271-0016-1149: ref=['HE', 'WAS', 'BEGINNING', 'TO', 'FEEL', 'A', 'KEEN', 'INTEREST', 'IN', 'THE', 'SLENDER', 'BAREFOOT', 'DONKEY', 'GIRL', 'WHO', 'SLIPPED', 'IN', 'AND', 'OUT', 'OF', 'THE', 'PLAY', 'SINGING', 'LIKE', 'SOME', 'ONE', 'WINDING', 'THROUGH', 'A', 'HILLY', 'FIELD'] +4446-2271-0016-1149: hyp=['HE', 'WAS', 'BEGINNING', 'TO', 'FEEL', 'THE', 'KEEN', 'INTEREST', 'IN', 'THE', 'SLENDER', 'BAREFOOT', 'DONKEY', 'GIRL', 'WHO', 'SLIPPED', 'IN', 'AND', 'OUT', 'OF', 'THE', 'PLAY', 'SINGING', 'LIKE', 'SOME', 'ONE', 'WINDING', 'THROUGH', 'A', 'HILLY', 'FIELD'] +4446-2271-0017-1150: ref=['ONE', 'NIGHT', 'WHEN', 'HE', 'AND', 'WINIFRED', 'WERE', 'SITTING', 'TOGETHER', 'ON', 'THE', 'BRIDGE', 'HE', 'TOLD', 'HER', 'THAT', 'THINGS', 'HAD', 'HAPPENED', 'WHILE', 'HE', 'WAS', 'STUDYING', 'ABROAD', 'THAT', 'HE', 'WAS', 'SORRY', 'FOR', 'ONE', 'THING', 'IN', 'PARTICULAR', 'AND', 'HE', 'ASKED', 'HER', 'WHETHER', 'SHE', 'THOUGHT', 'SHE', 'OUGHT', 'TO', 'KNOW', 'ABOUT', 'THEM'] +4446-2271-0017-1150: hyp=['ONE', 'NIGHT', 'WHEN', 'HE', 'AND', 'WINIFRED', 'WERE', 'SITTING', 'TOGETHER', 'ON', 'THE', 'BRIDGE', 'HE', 'TOLD', 'HER', 'THE', 'THINGS', 'HAD', 'HAPPENED', 'WHILE', 'HE', 'WAS', 'STUDYING', 'ABROAD', 'THAT', 'HE', 'WAS', 'SORRY', 'FOR', 'ONE', 'THING', 'IN', 'PARTICULAR', 'AND', 'HE', 'ASKED', 'HER', 'WHETHER', 'SHE', 'THOUGHT', 'SHE', 'OUGHT', 'TO', 'KNOW', 'ABOUT', 'THEM'] +4446-2271-0018-1151: ref=['SHE', 'CONSIDERED', 'A', 'MOMENT', 'AND', 'THEN', 'SAID', 'NO', 'I', 'THINK', 'NOT', 'THOUGH', 'I', 'AM', 'GLAD', 'YOU', 'ASK', 'ME'] +4446-2271-0018-1151: hyp=['SHE', 'CONSIDERED', 'FOR', 'A', 'MOMENT', 'AND', 'THEN', 'SAID', 'NO', 'I', 'THINK', 'NOT', 'THE', 'WAY', 'I', 'AM', 'GLAD', 'YOU', 'ASK', 'ME'] +4446-2271-0019-1152: ref=['AFTER', 'THAT', 'IT', 'WAS', 'EASY', 'TO', 'FORGET', 'ACTUALLY', 'TO', 'FORGET'] +4446-2271-0019-1152: hyp=['AFTER', 'THAT', 'IT', 'WAS', 'EASY', 'TO', 'FORGET', 'ACTUALLY', 'TO', 'FORGET'] +4446-2271-0020-1153: ref=['OF', 'COURSE', 'HE', 'REFLECTED', 'SHE', 'ALWAYS', 'HAD', 'THAT', 'COMBINATION', 'OF', 'SOMETHING', 'HOMELY', 'AND', 'SENSIBLE', 'AND', 'SOMETHING', 'UTTERLY', 'WILD', 'AND', 'DAFT'] +4446-2271-0020-1153: hyp=['OF', 'COURSE', 'HE', 'REFLECTED', 'SHE', 'ALWAYS', 'HAD', 'THAT', 'COMBINATION', 'OF', 'SOMETHING', 'HOMELY', 'AND', 'SENSIBLE', 'AND', 'SOMETHING', 'UTTERLY', 'WILD', 'AND', 'DAFT'] +4446-2271-0021-1154: ref=['SHE', 'MUST', 'CARE', 'ABOUT', 'THE', 'THEATRE', 'A', 'GREAT', 'DEAL', 'MORE', 'THAN', 'SHE', 'USED', 'TO'] +4446-2271-0021-1154: hyp=['SHE', 'MUST', 'CARE', 'ABOUT', 'THE', 'THEATRE', 'A', 'GREAT', 'DEAL', 'MORE', 'THAN', 'SHE', 'USED', 'TO'] +4446-2271-0022-1155: ref=["I'M", 'GLAD', "SHE'S", 'HELD', 'HER', 'OWN', 'SINCE'] +4446-2271-0022-1155: hyp=["I'M", 'GLAD', "SHE'S", 'HELD', 'HER', 'OWN', 'SE'] +4446-2271-0023-1156: ref=['AFTER', 'ALL', 'WE', 'WERE', 'AWFULLY', 'YOUNG'] +4446-2271-0023-1156: hyp=['AFTER', 'ALL', 'WE', 'WERE', 'AWFULLY', 'YOUNG'] +4446-2271-0024-1157: ref=['I', "SHOULDN'T", 'WONDER', 'IF', 'SHE', 'COULD', 'LAUGH', 'ABOUT', 'IT', 'WITH', 'ME', 'NOW'] +4446-2271-0024-1157: hyp=['I', "SHOULDN'T", 'WONDER', 'IF', 'SHE', 'COULD', 'LAUGH', 'ABOUT', 'IT', 'WITH', 'ME', 'NOW'] +4446-2273-0000-1158: ref=['HILDA', 'WAS', 'VERY', 'NICE', 'TO', 'HIM', 'AND', 'HE', 'SAT', 'ON', 'THE', 'EDGE', 'OF', 'HIS', 'CHAIR', 'FLUSHED', 'WITH', 'HIS', 'CONVERSATIONAL', 'EFFORTS', 'AND', 'MOVING', 'HIS', 'CHIN', 'ABOUT', 'NERVOUSLY', 'OVER', 'HIS', 'HIGH', 'COLLAR'] +4446-2273-0000-1158: hyp=['HILDA', 'WAS', 'VERY', 'NICE', 'TO', 'HIM', 'AND', 'HE', 'SAT', 'ON', 'THE', 'EDGE', 'OF', 'HIS', 'CHAIR', 'FLUSHED', 'WITH', 'HIS', 'CONVERSATIONAL', 'EFFORTS', 'AND', 'MOVING', 'HIS', 'CHIN', 'ABOUT', 'NERVOUSLY', 'OVER', 'HIS', 'HIGH', 'COLLAR'] +4446-2273-0001-1159: ref=['THEY', 'ASKED', 'HIM', 'TO', 'COME', 'TO', 'SEE', 'THEM', 'IN', 'CHELSEA', 'AND', 'THEY', 'SPOKE', 'VERY', 'TENDERLY', 'OF', 'HILDA'] +4446-2273-0001-1159: hyp=['THEY', 'ASKED', 'HIM', 'TO', 'COME', 'TO', 'SEE', 'THEM', 'IN', 'CHELSEA', 'AND', 'THEY', 'SPOKE', 'VERY', 'TENDERLY', 'OF', 'HILDA'] +4446-2273-0002-1160: ref=['LAMB', "WOULDN'T", 'CARE', 'A', 'GREAT', 'DEAL', 'ABOUT', 'MANY', 'OF', 'THEM', 'I', 'FANCY'] +4446-2273-0002-1160: hyp=['LAMB', "WOULDN'T", 'CARE', 'A', 'GREAT', 'DEAL', 'ABOUT', 'MANY', 'OF', 'THEM', 'I', 'FANCY'] +4446-2273-0003-1161: ref=['WHEN', 'BARTLEY', 'ARRIVED', 'AT', 'BEDFORD', 'SQUARE', 'ON', 'SUNDAY', 'EVENING', 'MARIE', 'THE', 'PRETTY', 'LITTLE', 'FRENCH', 'GIRL', 'MET', 'HIM', 'AT', 'THE', 'DOOR', 'AND', 'CONDUCTED', 'HIM', 'UPSTAIRS'] +4446-2273-0003-1161: hyp=['WHEN', 'BARTLEY', 'ARRIVED', 'AT', 'BEDFORD', 'SQUARE', 'ON', 'SUNDAY', 'EVENING', 'MARIE', 'THE', 'PRETTY', 'LITTLE', 'FRENCH', 'GIRL', 'MET', 'HIM', 'AT', 'THE', 'DOOR', 'AND', 'CONDUCTED', 'HIM', 'UPSTAIRS'] +4446-2273-0004-1162: ref=['I', 'SHOULD', 'NEVER', 'HAVE', 'ASKED', 'YOU', 'IF', 'MOLLY', 'HAD', 'BEEN', 'HERE', 'FOR', 'I', 'REMEMBER', 'YOU', "DON'T", 'LIKE', 'ENGLISH', 'COOKERY'] +4446-2273-0004-1162: hyp=['I', 'SHOULD', 'NEVER', 'HAVE', 'ASKED', 'YOU', 'IF', 'MOLLY', 'HAD', 'BEEN', 'HERE', 'FOR', 'I', 'REMEMBER', 'YOU', "DON'T", 'LIKE', 'ENGLISH', 'COOKERY'] +4446-2273-0005-1163: ref=['I', "HAVEN'T", 'HAD', 'A', 'CHANCE', 'YET', 'TO', 'TELL', 'YOU', 'WHAT', 'A', 'JOLLY', 'LITTLE', 'PLACE', 'I', 'THINK', 'THIS', 'IS'] +4446-2273-0005-1163: hyp=['I', "HAVEN'T", 'HAD', 'A', 'CHANCE', 'YET', 'TO', 'TELL', 'YOU', 'WHAT', 'A', 'JOLLY', 'LITTLE', 'PLACE', 'I', 'THINK', 'THIS', 'IS'] +4446-2273-0006-1164: ref=['THEY', 'ARE', 'ALL', 'SKETCHES', 'MADE', 'ABOUT', 'THE', 'VILLA', "D'ESTE", 'YOU', 'SEE'] +4446-2273-0006-1164: hyp=['THEY', 'ARE', 'ALL', 'SKETCHES', 'MADE', 'ABOUT', 'THE', 'VALIDESTE', 'YOU', 'SEE'] +4446-2273-0007-1165: ref=['THOSE', 'FELLOWS', 'ARE', 'ALL', 'VERY', 'LOYAL', 'EVEN', 'MAINHALL'] +4446-2273-0007-1165: hyp=['THOSE', 'FELLOWS', 'ARE', 'ALL', 'VERY', 'LOYAL', 'EVEN', 'MAIN', 'HALL'] +4446-2273-0008-1166: ref=["I'VE", 'MANAGED', 'TO', 'SAVE', 'SOMETHING', 'EVERY', 'YEAR', 'AND', 'THAT', 'WITH', 'HELPING', 'MY', 'THREE', 'SISTERS', 'NOW', 'AND', 'THEN', 'AND', 'TIDING', 'POOR', 'COUSIN', 'MIKE', 'OVER', 'BAD', 'SEASONS'] +4446-2273-0008-1166: hyp=["I'VE", 'MANAGED', 'TO', 'SAVE', 'SOMETHING', 'EVERY', 'YEAR', 'AND', 'THAT', 'WITH', 'HELPING', 'MY', 'THREE', 'SISTERS', 'NOW', 'AND', 'THEN', 'AND', 'TIDING', 'POOR', 'COUSIN', 'MIKE', 'OVER', 'BAD', 'SEASONS'] +4446-2273-0009-1167: ref=["IT'S", 'NOT', 'PARTICULARLY', 'RARE', 'SHE', 'SAID', 'BUT', 'SOME', 'OF', 'IT', 'WAS', 'MY', "MOTHER'S"] +4446-2273-0009-1167: hyp=["IT'S", 'NOT', 'PARTICULARLY', 'RARE', 'SHE', 'SAID', 'BUT', 'SOME', 'OF', 'IT', 'WAS', 'MY', "MOTHER'S"] +4446-2273-0010-1168: ref=['THERE', 'WAS', 'WATERCRESS', 'SOUP', 'AND', 'SOLE', 'AND', 'A', 'DELIGHTFUL', 'OMELETTE', 'STUFFED', 'WITH', 'MUSHROOMS', 'AND', 'TRUFFLES', 'AND', 'TWO', 'SMALL', 'RARE', 'DUCKLINGS', 'AND', 'ARTICHOKES', 'AND', 'A', 'DRY', 'YELLOW', 'RHONE', 'WINE', 'OF', 'WHICH', 'BARTLEY', 'HAD', 'ALWAYS', 'BEEN', 'VERY', 'FOND'] +4446-2273-0010-1168: hyp=['THERE', 'WAS', 'WATERCRESS', 'SOUP', 'AND', 'SOLE', 'AND', 'A', 'DELIGHTFUL', 'OMELETTE', 'STUFFED', 'WITH', 'MUSHROOMS', 'AND', 'TRUFFLES', 'AND', 'TWO', 'SMALL', 'RARE', 'DUCKLINGS', 'AND', 'ARTICHOKES', 'AND', 'A', 'DRY', 'YELLOW', 'RHONE', 'WINE', 'OF', 'WHICH', 'BARTLEY', 'HAD', 'ALWAYS', 'BEEN', 'VERY', 'FOND'] +4446-2273-0011-1169: ref=['THERE', 'IS', 'NOTHING', 'ELSE', 'THAT', 'LOOKS', 'SO', 'JOLLY'] +4446-2273-0011-1169: hyp=['THERE', 'IS', 'NOTHING', 'ELSE', 'THAT', 'LOOKS', 'SO', 'JOLLY'] +4446-2273-0012-1170: ref=['THANK', 'YOU', 'BUT', 'I', "DON'T", 'LIKE', 'IT', 'SO', 'WELL', 'AS', 'THIS'] +4446-2273-0012-1170: hyp=['THANK', 'YOU', 'BUT', 'I', "DON'T", 'LIKE', 'IT', 'SO', 'WELL', 'AS', 'THIS'] +4446-2273-0013-1171: ref=['HAVE', 'YOU', 'BEEN', 'IN', 'PARIS', 'MUCH', 'THESE', 'LATE', 'YEARS'] +4446-2273-0013-1171: hyp=['HAVE', 'YOU', 'BEEN', 'IN', 'PARIS', 'MUCH', 'THESE', 'LATE', 'YEARS'] +4446-2273-0014-1172: ref=['THERE', 'ARE', 'FEW', 'CHANGES', 'IN', 'THE', 'OLD', 'QUARTER'] +4446-2273-0014-1172: hyp=['THERE', 'ARE', 'A', 'FEW', 'CHANGES', 'IN', 'THE', 'OLD', 'QUARTER'] +4446-2273-0015-1173: ref=["DON'T", 'I', 'THOUGH', "I'M", 'SO', 'SORRY', 'TO', 'HEAR', 'IT', 'HOW', 'DID', 'HER', 'SON', 'TURN', 'OUT'] +4446-2273-0015-1173: hyp=["DON'T", 'I', 'THOUGH', "I'M", 'SO', 'SORRY', 'TO', 'HEAR', 'IT', 'HOW', 'DID', 'HER', 'SON', 'TURN', 'OUT'] +4446-2273-0016-1174: ref=['HER', 'HAIR', 'IS', 'STILL', 'LIKE', 'FLAX', 'AND', 'HER', 'BLUE', 'EYES', 'ARE', 'JUST', 'LIKE', 'A', "BABY'S", 'AND', 'SHE', 'HAS', 'THE', 'SAME', 'THREE', 'FRECKLES', 'ON', 'HER', 'LITTLE', 'NOSE', 'AND', 'TALKS', 'ABOUT', 'GOING', 'BACK', 'TO', 'HER', 'BAINS', 'DE', 'MER'] +4446-2273-0016-1174: hyp=['HER', 'HAIR', 'IS', 'STILL', 'LIKE', 'FLAX', 'AND', 'HER', 'BLUE', 'EYES', 'ARE', 'JUST', 'LIKE', 'A', "BABY'S", 'AND', 'SHE', 'HAS', 'THE', 'SAME', 'THREE', 'FRECKLES', 'ON', 'HER', 'LITTLE', 'NOSE', 'AND', 'TALKS', 'ABOUT', 'GOING', 'BACK', 'TO', 'HER', 'BANDUME'] +4446-2273-0017-1175: ref=['HOW', 'JOLLY', 'IT', 'WAS', 'BEING', 'YOUNG', 'HILDA'] +4446-2273-0017-1175: hyp=['HOW', 'JOLLY', 'IT', 'WAS', 'BEING', 'YOUNG', 'HILDA'] +4446-2273-0018-1176: ref=['DO', 'YOU', 'REMEMBER', 'THAT', 'FIRST', 'WALK', 'WE', 'TOOK', 'TOGETHER', 'IN', 'PARIS'] +4446-2273-0018-1176: hyp=['DO', 'YOU', 'REMEMBER', 'THAT', 'FIRST', 'WALK', 'WE', 'TOOK', 'TOGETHER', 'IN', 'PARIS'] +4446-2273-0019-1177: ref=['COME', "WE'LL", 'HAVE', 'OUR', 'COFFEE', 'IN', 'THE', 'OTHER', 'ROOM', 'AND', 'YOU', 'CAN', 'SMOKE'] +4446-2273-0019-1177: hyp=['COME', "WE'LL", 'HAVE', 'OUR', 'COFFEE', 'IN', 'THE', 'OTHER', 'ROOM', 'AND', 'YOU', 'CAN', 'SMOKE'] +4446-2273-0020-1178: ref=['I', 'THINK', 'WE', 'DID', 'SHE', 'ANSWERED', 'DEMURELY'] +4446-2273-0020-1178: hyp=['I', 'THINK', 'WE', 'DID', 'SHE', 'ANSWERED', 'DEMURELY'] +4446-2273-0021-1179: ref=['WHAT', 'SHE', 'WANTED', 'FROM', 'US', 'WAS', 'NEITHER', 'OUR', 'FLOWERS', 'NOR', 'OUR', 'FRANCS', 'BUT', 'JUST', 'OUR', 'YOUTH'] +4446-2273-0021-1179: hyp=['WHAT', 'SHE', 'WANTED', 'FROM', 'US', 'WAS', 'NEITHER', 'OUR', 'FLOWERS', 'NOR', 'OUR', 'FRANKS', 'BUT', 'JUST', 'OUR', 'YOUTH'] +4446-2273-0022-1180: ref=['THEY', 'WERE', 'BOTH', 'REMEMBERING', 'WHAT', 'THE', 'WOMAN', 'HAD', 'SAID', 'WHEN', 'SHE', 'TOOK', 'THE', 'MONEY', 'GOD', 'GIVE', 'YOU', 'A', 'HAPPY', 'LOVE'] +4446-2273-0022-1180: hyp=['THEY', 'WERE', 'BOTH', 'REMEMBERING', 'WHAT', 'THE', 'WOMAN', 'HAD', 'SAID', 'WHEN', 'SHE', 'TOOK', 'THE', 'MONEY', 'GOD', 'GIVE', 'YOU', 'A', 'HAPPY', 'LOVE'] +4446-2273-0023-1181: ref=['THE', 'STRANGE', 'WOMAN', 'AND', 'HER', 'PASSIONATE', 'SENTENCE', 'THAT', 'RANG', 'OUT', 'SO', 'SHARPLY', 'HAD', 'FRIGHTENED', 'THEM', 'BOTH'] +4446-2273-0023-1181: hyp=['THE', 'STRANGE', 'WOMAN', 'AND', 'HER', 'PASSIONATE', 'SENTENCE', 'THAT', 'RANG', 'OUT', 'SO', 'SHARPLY', 'HAD', 'FRIGHTENED', 'THEM', 'BOTH'] +4446-2273-0024-1182: ref=['BARTLEY', 'STARTED', 'WHEN', 'HILDA', 'RANG', 'THE', 'LITTLE', 'BELL', 'BESIDE', 'HER', 'DEAR', 'ME', 'WHY', 'DID', 'YOU', 'DO', 'THAT'] +4446-2273-0024-1182: hyp=['BARTLEY', 'STARTED', 'WHEN', 'HILDA', 'RANG', 'THE', 'LITTLE', 'BELL', 'BESIDE', 'HER', 'DEAR', 'ME', 'WHY', 'DID', 'YOU', 'DO', 'THAT'] +4446-2273-0025-1183: ref=['IT', 'WAS', 'VERY', 'JOLLY', 'HE', 'MURMURED', 'LAZILY', 'AS', 'MARIE', 'CAME', 'IN', 'TO', 'TAKE', 'AWAY', 'THE', 'COFFEE'] +4446-2273-0025-1183: hyp=['IT', 'WAS', 'VERY', 'JOLLY', 'HE', 'MURMURED', 'LAZILY', 'AS', 'MARIE', 'CAME', 'IN', 'TO', 'TAKE', 'AWAY', 'THE', 'COFFEE'] +4446-2273-0026-1184: ref=['HAVE', 'I', 'TOLD', 'YOU', 'ABOUT', 'MY', 'NEW', 'PLAY'] +4446-2273-0026-1184: hyp=['HAVE', 'I', 'TOLD', 'YOU', 'ABOUT', 'MY', 'NEW', 'PLAY'] +4446-2273-0027-1185: ref=['WHEN', 'SHE', 'FINISHED', 'ALEXANDER', 'SHOOK', 'HIMSELF', 'OUT', 'OF', 'A', 'REVERIE'] +4446-2273-0027-1185: hyp=['WHEN', 'SHE', 'FINISHED', 'ALEXANDER', 'SHOOK', 'HIMSELF', 'OUT', 'OF', 'A', 'REVERIE'] +4446-2273-0028-1186: ref=['NONSENSE', 'OF', 'COURSE', 'I', "CAN'T", 'REALLY', 'SING', 'EXCEPT', 'THE', 'WAY', 'MY', 'MOTHER', 'AND', 'GRANDMOTHER', 'DID', 'BEFORE', 'ME'] +4446-2273-0028-1186: hyp=['NONSENSE', 'OF', 'COURSE', 'I', "CAN'T", 'REALLY', 'SING', 'EXCEPT', 'THE', 'WAY', 'MY', 'MOTHER', 'AND', 'GRANDMOTHER', 'DID', 'BEFORE', 'ME'] +4446-2273-0029-1187: ref=["IT'S", 'REALLY', 'TOO', 'WARM', 'IN', 'THIS', 'ROOM', 'TO', 'SING', "DON'T", 'YOU', 'FEEL', 'IT'] +4446-2273-0029-1187: hyp=["IT'S", 'REALLY', 'TOO', 'WARM', 'IN', 'THIS', 'ROOM', 'TO', 'SING', "DON'T", 'YOU', 'FEEL', 'IT'] +4446-2273-0030-1188: ref=['ALEXANDER', 'WENT', 'OVER', 'AND', 'OPENED', 'THE', 'WINDOW', 'FOR', 'HER'] +4446-2273-0030-1188: hyp=['ALEXANDER', 'WENT', 'OVER', 'AND', 'OPENED', 'THE', 'WINDOW', 'FOR', 'HER'] +4446-2273-0031-1189: ref=['THERE', 'JUST', 'IN', 'FRONT'] +4446-2273-0031-1189: hyp=['THERE', 'JUST', 'IN', 'FRON'] +4446-2273-0032-1190: ref=['HE', 'STOOD', 'A', 'LITTLE', 'BEHIND', 'HER', 'AND', 'TRIED', 'TO', 'STEADY', 'HIMSELF', 'AS', 'HE', 'SAID', "IT'S", 'SOFT', 'AND', 'MISTY', 'SEE', 'HOW', 'WHITE', 'THE', 'STARS', 'ARE'] +4446-2273-0032-1190: hyp=['HE', 'STOOD', 'A', 'LITTLE', 'BEHIND', 'HER', 'AND', 'TRIED', 'TO', 'STEADY', 'HIMSELF', 'AS', 'HE', 'SAID', "IT'S", 'SOFT', 'AND', 'MISTY', 'SEE', 'HOW', 'WHITE', 'THE', 'STARS', 'ARE'] +4446-2273-0033-1191: ref=['FOR', 'A', 'LONG', 'TIME', 'NEITHER', 'HILDA', 'NOR', 'BARTLEY', 'SPOKE'] +4446-2273-0033-1191: hyp=['FOR', 'A', 'LONG', 'TIME', 'NEITHER', 'HILDA', 'NOR', 'BARTLEY', 'SPO'] +4446-2273-0034-1192: ref=['HE', 'FELT', 'A', 'TREMOR', 'RUN', 'THROUGH', 'THE', 'SLENDER', 'YELLOW', 'FIGURE', 'IN', 'FRONT', 'OF', 'HIM'] +4446-2273-0034-1192: hyp=['HE', 'FELT', 'A', 'TREMOR', 'RUN', 'THROUGH', 'THE', 'SLENDER', 'YELLOW', 'FIGURE', 'IN', 'FRONT', 'OF', 'HIM'] +4446-2273-0035-1193: ref=['BARTLEY', 'LEANED', 'OVER', 'HER', 'SHOULDER', 'WITHOUT', 'TOUCHING', 'HER', 'AND', 'WHISPERED', 'IN', 'HER', 'EAR', 'YOU', 'ARE', 'GIVING', 'ME', 'A', 'CHANCE', 'YES'] +4446-2273-0035-1193: hyp=['BARTLEY', 'LEANED', 'OVER', 'HER', 'SHOULDER', 'WITHOUT', 'TOUCHING', 'HER', 'AND', 'WHISPERED', 'IN', 'HER', 'EAR', 'YOU', 'ARE', 'GIVING', 'ME', 'A', 'CHANCE', 'YES'] +4446-2273-0036-1194: ref=['ALEXANDER', 'UNCLENCHED', 'THE', 'TWO', 'HANDS', 'AT', 'HIS', 'SIDES'] +4446-2273-0036-1194: hyp=['ALEXANDER', 'CLENCHED', 'THE', 'TWO', 'HANDS', 'AT', 'HIS', 'SIDES'] +4446-2275-0000-1195: ref=['THE', 'STOP', 'AT', 'QUEENSTOWN', 'THE', 'TEDIOUS', 'PASSAGE', 'UP', 'THE', 'MERSEY', 'WERE', 'THINGS', 'THAT', 'HE', 'NOTED', 'DIMLY', 'THROUGH', 'HIS', 'GROWING', 'IMPATIENCE'] +4446-2275-0000-1195: hyp=['THE', 'STOP', 'AT', 'QUEENSTOWN', 'THE', 'TEDIOUS', 'PASSAGE', 'OF', 'THE', 'MERCY', 'WERE', 'THINGS', 'THAT', 'HE', 'NOTED', 'DIMLY', 'THROUGH', 'HIS', 'GROWING', 'IMPATIENCE'] +4446-2275-0001-1196: ref=['SHE', 'BLUSHED', 'AND', 'SMILED', 'AND', 'FUMBLED', 'HIS', 'CARD', 'IN', 'HER', 'CONFUSION', 'BEFORE', 'SHE', 'RAN', 'UPSTAIRS'] +4446-2275-0001-1196: hyp=['SHE', 'BLUSHED', 'AND', 'SMILED', 'AND', 'FUMBLED', 'HIS', 'CARD', 'IN', 'HER', 'CONFUSION', 'BEFORE', 'SHE', 'RAN', 'UPSTAIRS'] +4446-2275-0002-1197: ref=['ALEXANDER', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'HALLWAY', 'BUTTONING', 'AND', 'UNBUTTONING', 'HIS', 'OVERCOAT', 'UNTIL', 'SHE', 'RETURNED', 'AND', 'TOOK', 'HIM', 'UP', 'TO', "HILDA'S", 'LIVING', 'ROOM'] +4446-2275-0002-1197: hyp=['ALEXANDER', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'HALLWAY', 'BUTTONING', 'AND', 'UNBUTTONING', 'HIS', 'OVERCOAT', 'UNTIL', 'SHE', 'RETURNED', 'AND', 'TOOK', 'HIM', 'UP', 'TO', "HILDA'S", 'LIVING', 'ROOM'] +4446-2275-0003-1198: ref=['THE', 'ROOM', 'WAS', 'EMPTY', 'WHEN', 'HE', 'ENTERED'] +4446-2275-0003-1198: hyp=['THE', 'ROOM', 'WAS', 'EMPTY', 'WHEN', 'HE', 'ENTER'] +4446-2275-0004-1199: ref=['ALEXANDER', 'DID', 'NOT', 'SIT', 'DOWN'] +4446-2275-0004-1199: hyp=['ALEXANDER', 'DID', 'NOT', 'SIT', 'DOWN'] +4446-2275-0005-1200: ref=['I', 'FELT', 'IT', 'IN', 'MY', 'BONES', 'WHEN', 'I', 'WOKE', 'THIS', 'MORNING', 'THAT', 'SOMETHING', 'SPLENDID', 'WAS', 'GOING', 'TO', 'TURN', 'UP'] +4446-2275-0005-1200: hyp=['I', 'FELT', 'IT', 'IN', 'MY', 'BONES', 'WHEN', 'I', 'WOKE', 'THIS', 'MORNING', 'THAT', 'SOMETHING', 'SPLENDID', 'WAS', 'GOING', 'TO', 'TURN', 'UP'] +4446-2275-0006-1201: ref=['I', 'THOUGHT', 'IT', 'MIGHT', 'BE', 'SISTER', 'KATE', 'OR', 'COUSIN', 'MIKE', 'WOULD', 'BE', 'HAPPENING', 'ALONG'] +4446-2275-0006-1201: hyp=['I', 'THOUGHT', 'IT', 'MIGHT', 'BE', 'SISTER', 'KATE', 'OR', 'COUSIN', 'MIKE', 'WOULD', 'BE', 'HAPPENING', 'ALONG'] +4446-2275-0007-1202: ref=['SHE', 'PUSHED', 'HIM', 'TOWARD', 'THE', 'BIG', 'CHAIR', 'BY', 'THE', 'FIRE', 'AND', 'SAT', 'DOWN', 'ON', 'A', 'STOOL', 'AT', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'HEARTH', 'HER', 'KNEES', 'DRAWN', 'UP', 'TO', 'HER', 'CHIN', 'LAUGHING', 'LIKE', 'A', 'HAPPY', 'LITTLE', 'GIRL'] +4446-2275-0007-1202: hyp=['SHE', 'PUSHED', 'HIM', 'TOWARD', 'THE', 'BIG', 'CHAIR', 'BY', 'THE', 'FIRE', 'AND', 'SAT', 'DOWN', 'ON', 'A', 'STOOL', 'AT', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'HEARTH', 'HER', 'KNEES', 'DRAWN', 'UP', 'TO', 'HER', 'CHIN', 'LAUGHING', 'LIKE', 'A', 'HAPPY', 'LITTLE', 'GIRL'] +4446-2275-0008-1203: ref=['WHEN', 'DID', 'YOU', 'COME', 'BARTLEY', 'AND', 'HOW', 'DID', 'IT', 'HAPPEN', 'YOU', "HAVEN'T", 'SPOKEN', 'A', 'WORD'] +4446-2275-0008-1203: hyp=['WHEN', 'DID', 'YOU', 'COME', 'BARTLEY', 'AND', 'HOW', 'DID', 'IT', 'HAPPEN', 'YOU', "HAVEN'T", 'SPOKEN', 'A', 'WORD'] +4446-2275-0009-1204: ref=['I', 'GOT', 'IN', 'ABOUT', 'TEN', 'MINUTES', 'AGO'] +4446-2275-0009-1204: hyp=['I', 'GOT', 'IN', 'ABOUT', 'TEN', 'MINUTES', 'AGO'] +4446-2275-0010-1205: ref=['ALEXANDER', 'LEANED', 'FORWARD', 'AND', 'WARMED', 'HIS', 'HANDS', 'BEFORE', 'THE', 'BLAZE'] +4446-2275-0010-1205: hyp=['ALEXANDER', 'LEANED', 'FORWARD', 'AND', 'WARMED', 'HIS', 'HANDS', 'BEFORE', 'THE', 'BLAZE'] +4446-2275-0011-1206: ref=['BARTLEY', 'BENT', 'LOWER', 'OVER', 'THE', 'FIRE'] +4446-2275-0011-1206: hyp=['BARTLEY', 'BENT', 'LOWERED', 'OVER', 'THE', 'FIRE'] +4446-2275-0012-1207: ref=['SHE', 'LOOKED', 'AT', 'HIS', 'HEAVY', 'SHOULDERS', 'AND', 'BIG', 'DETERMINED', 'HEAD', 'THRUST', 'FORWARD', 'LIKE', 'A', 'CATAPULT', 'IN', 'LEASH'] +4446-2275-0012-1207: hyp=['SHE', 'LOOKED', 'AT', 'HIS', 'HEAVY', 'SHOULDERS', 'AND', 'BIG', 'DETERMINED', 'HEAD', 'THRUST', 'FORWARD', 'LIKE', 'A', 'CATAPULT', 'IN', 'LEASH'] +4446-2275-0013-1208: ref=["I'LL", 'DO', 'ANYTHING', 'YOU', 'WISH', 'ME', 'TO', 'BARTLEY', 'SHE', 'SAID', 'TREMULOUSLY'] +4446-2275-0013-1208: hyp=["I'LL", 'DO', 'ANYTHING', 'YOU', 'WISH', 'ME', 'TO', 'BARTLEY', 'SHE', 'SAID', 'TREMULOUSLY'] +4446-2275-0014-1209: ref=['I', "CAN'T", 'STAND', 'SEEING', 'YOU', 'MISERABLE'] +4446-2275-0014-1209: hyp=['I', "CAN'T", 'STAND', 'SEEING', 'YOU', 'MISERABLE'] +4446-2275-0015-1210: ref=['HE', 'PULLED', 'UP', 'A', 'WINDOW', 'AS', 'IF', 'THE', 'AIR', 'WERE', 'HEAVY'] +4446-2275-0015-1210: hyp=['HE', 'PULLED', 'UP', 'A', 'WINDOW', 'AS', 'IF', 'THE', 'AIR', 'WERE', 'HEAVY'] +4446-2275-0016-1211: ref=['HILDA', 'WATCHED', 'HIM', 'FROM', 'HER', 'CORNER', 'TREMBLING', 'AND', 'SCARCELY', 'BREATHING', 'DARK', 'SHADOWS', 'GROWING', 'ABOUT', 'HER', 'EYES', 'IT'] +4446-2275-0016-1211: hyp=['HILDA', 'WATCHED', 'HIM', 'FROM', 'THE', 'CORNER', 'TREMBLING', 'AND', 'SCARCELY', 'BREATHING', 'DARK', 'SHADOWS', 'GROWING', 'ABOUT', 'HER', 'EYES', 'IT'] +4446-2275-0017-1212: ref=['BUT', "IT'S", 'WORSE', 'NOW', "IT'S", 'UNBEARABLE'] +4446-2275-0017-1212: hyp=['BUT', "IT'S", 'WORSE', 'NOW', "IT'S", 'UNBEARABLE'] +4446-2275-0018-1213: ref=['I', 'GET', 'NOTHING', 'BUT', 'MISERY', 'OUT', 'OF', 'EITHER'] +4446-2275-0018-1213: hyp=['I', 'GET', 'NOTHING', 'BUT', 'MISERY', 'OUT', 'OF', 'EITHER'] +4446-2275-0019-1214: ref=['THE', 'WORLD', 'IS', 'ALL', 'THERE', 'JUST', 'AS', 'IT', 'USED', 'TO', 'BE', 'BUT', 'I', "CAN'T", 'GET', 'AT', 'IT', 'ANY', 'MORE'] +4446-2275-0019-1214: hyp=['THE', 'WORLD', 'IS', 'ALL', 'THERE', 'JUST', 'AS', 'IT', 'USED', 'TO', 'BE', 'BUT', 'I', "CAN'T", 'GET', 'AT', 'IT', 'ANY', 'MORE'] +4446-2275-0020-1215: ref=['IT', 'WAS', 'MYSELF', 'I', 'WAS', 'DEFYING', 'HILDA'] +4446-2275-0020-1215: hyp=['IT', 'WAS', 'MYSELF', 'I', 'WAS', 'DEFYING', 'HILDA'] +4446-2275-0021-1216: ref=["HILDA'S", 'FACE', 'QUIVERED', 'BUT', 'SHE', 'WHISPERED', 'YES', 'I', 'THINK', 'IT', 'MUST', 'HAVE', 'BEEN'] +4446-2275-0021-1216: hyp=["HELDA'S", 'FACE', 'QUIVERED', 'BUT', 'SHE', 'WHISPERED', 'YES', 'I', 'THINK', 'IT', 'MUST', 'HAVE', 'BEEN'] +4446-2275-0022-1217: ref=['BUT', 'WHY', "DIDN'T", 'YOU', 'TELL', 'ME', 'WHEN', 'YOU', 'WERE', 'HERE', 'IN', 'THE', 'SUMMER'] +4446-2275-0022-1217: hyp=['BUT', 'WHY', "DIDN'T", 'YOU', 'TELL', 'ME', 'WHEN', 'YOU', 'WERE', 'HERE', 'IN', 'THE', 'SUMMER'] +4446-2275-0023-1218: ref=['ALEXANDER', 'GROANED', 'I', 'MEANT', 'TO', 'BUT', 'SOMEHOW', 'I', "COULDN'T"] +4446-2275-0023-1218: hyp=['ALEXANDER', 'GROANED', 'I', 'MEANT', 'TO', 'BUT', 'SOMEHOW', 'I', "COULDN'T"] +4446-2275-0024-1219: ref=['SHE', 'PRESSED', 'HIS', 'HAND', 'GENTLY', 'IN', 'GRATITUDE'] +4446-2275-0024-1219: hyp=['SHE', 'PRESSED', 'HIS', 'HAND', 'GENTLY', 'IN', 'GRATITUDE'] +4446-2275-0025-1220: ref=["WEREN'T", 'YOU', 'HAPPY', 'THEN', 'AT', 'ALL'] +4446-2275-0025-1220: hyp=["WEREN'T", 'YOU', 'HAPPY', 'THEN', 'AT', 'ALL'] +4446-2275-0026-1221: ref=['SHE', 'CLOSED', 'HER', 'EYES', 'AND', 'TOOK', 'A', 'DEEP', 'BREATH', 'AS', 'IF', 'TO', 'DRAW', 'IN', 'AGAIN', 'THE', 'FRAGRANCE', 'OF', 'THOSE', 'DAYS'] +4446-2275-0026-1221: hyp=['SHE', 'CLOSED', 'HER', 'EYES', 'AND', 'TOOK', 'A', 'DEEP', 'BREATH', 'AS', 'IF', 'TO', 'DRAW', 'IN', 'AGAIN', 'THE', 'FRAGRANCE', 'OF', 'THOSE', 'DAYS'] +4446-2275-0027-1222: ref=['HE', 'MOVED', 'UNEASILY', 'AND', 'HIS', 'CHAIR', 'CREAKED'] +4446-2275-0027-1222: hyp=['HE', 'MOVED', 'UNEASILY', 'AND', 'HIS', 'CHAIR', 'CREAKED'] +4446-2275-0028-1223: ref=['YES', 'YES', 'SHE', 'HURRIED', 'PULLING', 'HER', 'HAND', 'GENTLY', 'AWAY', 'FROM', 'HIM'] +4446-2275-0028-1223: hyp=['YES', 'YES', 'SHE', 'HURRIED', 'PULLING', 'HER', 'HAND', 'GENTLY', 'AWAY', 'FROM', 'HIM'] +4446-2275-0029-1224: ref=['PLEASE', 'TELL', 'ME', 'ONE', 'THING', 'BARTLEY', 'AT', 'LEAST', 'TELL', 'ME', 'THAT', 'YOU', 'BELIEVE', 'I', 'THOUGHT', 'I', 'WAS', 'MAKING', 'YOU', 'HAPPY'] +4446-2275-0029-1224: hyp=['PLEASE', 'TELL', 'ME', 'ONE', 'THING', 'BARTLEY', 'AT', 'LEAST', 'TELL', 'ME', 'THAT', 'YOU', 'BELIEVE', 'I', 'THOUGHT', 'I', 'WAS', 'MAKING', 'YOU', 'HAPPY'] +4446-2275-0030-1225: ref=['YES', 'HILDA', 'I', 'KNOW', 'THAT', 'HE', 'SAID', 'SIMPLY'] +4446-2275-0030-1225: hyp=['YES', 'HELDA', 'I', 'KNOW', 'THAT', 'HE', 'SAID', 'SIMPLY'] +4446-2275-0031-1226: ref=['I', 'UNDERSTAND', 'BARTLEY', 'I', 'WAS', 'WRONG'] +4446-2275-0031-1226: hyp=['I', 'UNDERSTAND', 'BARTLEY', 'I', 'WAS', 'WRONG'] +4446-2275-0032-1227: ref=['BUT', 'I', "DIDN'T", 'KNOW', "YOU'VE", 'ONLY', 'TO', 'TELL', 'ME', 'NOW'] +4446-2275-0032-1227: hyp=['BUT', 'I', "DIDN'T", 'KNOW', "YOU'VE", 'ONLY', 'TO', 'TELL', 'ME', 'NOW'] +4446-2275-0033-1228: ref=['WHAT', 'I', 'MEAN', 'IS', 'THAT', 'I', 'WANT', 'YOU', 'TO', 'PROMISE', 'NEVER', 'TO', 'SEE', 'ME', 'AGAIN', 'NO', 'MATTER', 'HOW', 'OFTEN', 'I', 'COME', 'NO', 'MATTER', 'HOW', 'HARD', 'I', 'BEG'] +4446-2275-0033-1228: hyp=['WHAT', 'I', 'MEAN', 'IS', 'THAT', 'I', 'WANT', 'YOU', 'TO', 'PROMISE', 'NEVER', 'TO', 'SEE', 'ME', 'AGAIN', 'NO', 'MATTER', 'HOW', 'OFTEN', 'I', 'COME', 'NO', 'MATTER', 'HOW', 'HARD', 'I', 'BEG'] +4446-2275-0034-1229: ref=['KEEP', 'AWAY', 'IF', 'YOU', 'WISH', 'WHEN', 'HAVE', 'I', 'EVER', 'FOLLOWED', 'YOU'] +4446-2275-0034-1229: hyp=['KEEP', 'AWAY', 'IF', 'YOU', 'WISH', 'WHEN', 'HAVE', 'I', 'EVER', 'FOLLOWED', 'YOU'] +4446-2275-0035-1230: ref=['ALEXANDER', 'ROSE', 'AND', 'SHOOK', 'HIMSELF', 'ANGRILY', 'YES', 'I', 'KNOW', "I'M", 'COWARDLY'] +4446-2275-0035-1230: hyp=['ALEXANDER', 'ROSE', 'AND', 'SHOOK', 'HIMSELF', 'ANGRILY', 'YES', 'I', 'KNOW', "I'M", 'COWARDLY'] +4446-2275-0036-1231: ref=['HE', 'TOOK', 'HER', 'ROUGHLY', 'IN', 'HIS', 'ARMS', 'DO', 'YOU', 'KNOW', 'WHAT', 'I', 'MEAN'] +4446-2275-0036-1231: hyp=['HE', 'TOOK', 'A', 'ROUGHLY', 'IN', 'HIS', 'ARMS', 'DO', 'YOU', 'KNOW', 'WHAT', 'I', 'MEAN'] +4446-2275-0037-1232: ref=['OH', 'BARTLEY', 'WHAT', 'AM', 'I', 'TO', 'DO'] +4446-2275-0037-1232: hyp=['OH', 'BARTLEY', 'WHAT', 'AM', 'I', 'TO', 'DO'] +4446-2275-0038-1233: ref=['I', 'WILL', 'ASK', 'THE', 'LEAST', 'IMAGINABLE', 'BUT', 'I', 'MUST', 'HAVE', 'SOMETHING'] +4446-2275-0038-1233: hyp=['I', 'WILL', 'ASK', 'THE', 'LEAST', 'IMAGINABLE', 'BUT', 'I', 'MUST', 'HAVE', 'SOMETHING'] +4446-2275-0039-1234: ref=['I', 'MUST', 'KNOW', 'ABOUT', 'YOU'] +4446-2275-0039-1234: hyp=['I', 'MUST', 'KNOW', 'ABOUT', 'YOU'] +4446-2275-0040-1235: ref=['THE', 'SIGHT', 'OF', 'YOU', 'BARTLEY', 'TO', 'SEE', 'YOU', 'LIVING', 'AND', 'HAPPY', 'AND', 'SUCCESSFUL', 'CAN', 'I', 'NEVER', 'MAKE', 'YOU', 'UNDERSTAND', 'WHAT', 'THAT', 'MEANS', 'TO', 'ME'] +4446-2275-0040-1235: hyp=['THE', 'SIGHT', 'OF', 'YOU', 'BARTLEY', 'TO', 'SEE', 'YOU', 'LIVING', 'AND', 'HAPPY', 'AND', 'SUCCESSFUL', 'CAN', 'I', 'NEVER', 'MAKE', 'YOU', 'UNDERSTAND', 'WHAT', 'THAT', 'MEANS', 'TO', 'ME'] +4446-2275-0041-1236: ref=['YOU', 'SEE', 'LOVING', 'SOME', 'ONE', 'AS', 'I', 'LOVE', 'YOU', 'MAKES', 'THE', 'WHOLE', 'WORLD', 'DIFFERENT'] +4446-2275-0041-1236: hyp=['YOU', 'SEE', 'LOVING', 'SOME', 'ONE', 'AS', 'I', 'LOVE', 'YOU', 'MAKES', 'THE', 'WHOLE', 'WORLD', 'DIFFERENT'] +4446-2275-0042-1237: ref=['AND', 'THEN', 'YOU', 'CAME', 'BACK', 'NOT', 'CARING', 'VERY', 'MUCH', 'BUT', 'IT', 'MADE', 'NO', 'DIFFERENCE'] +4446-2275-0042-1237: hyp=['AND', 'THEN', 'YOU', 'CAME', 'BACK', 'NOT', 'CARING', 'VERY', 'MUCH', 'BUT', 'IT', 'MADE', 'NO', 'DIFFERENCE'] +4446-2275-0043-1238: ref=['BARTLEY', 'BENT', 'OVER', 'AND', 'TOOK', 'HER', 'IN', 'HIS', 'ARMS', 'KISSING', 'HER', 'MOUTH', 'AND', 'HER', 'WET', 'TIRED', 'EYES'] +4446-2275-0043-1238: hyp=['BARTLEY', 'BENT', 'OVER', 'AND', 'TOOK', 'HER', 'IN', 'HIS', 'ARMS', 'KISSING', 'HER', 'MOUTH', 'AND', 'HER', 'WET', 'TIRED', 'EYES'] +4446-2275-0044-1239: ref=["DON'T", 'CRY', "DON'T", 'CRY', 'HE', 'WHISPERED'] +4446-2275-0044-1239: hyp=['I', "DON'T", 'CRY', "DON'T", 'CRY', 'HE', 'WHISPERED'] +4446-2275-0045-1240: ref=["WE'VE", 'TORTURED', 'EACH', 'OTHER', 'ENOUGH', 'FOR', 'TONIGHT'] +4446-2275-0045-1240: hyp=["WE'VE", 'TORTURED', 'EACH', 'OTHER', 'ENOUGH', 'FOR', 'TO', 'NIGHT'] +4507-16021-0000-1469: ref=['CHAPTER', 'ONE', 'ORIGIN'] +4507-16021-0000-1469: hyp=['CHAPTER', 'ONE', 'ORIGIN'] +4507-16021-0001-1470: ref=['IT', 'ENGENDERS', 'A', 'WHOLE', 'WORLD', 'LA', 'PEGRE', 'FOR', 'WHICH', 'READ', 'THEFT', 'AND', 'A', 'HELL', 'LA', 'PEGRENNE', 'FOR', 'WHICH', 'READ', 'HUNGER'] +4507-16021-0001-1470: hyp=['IT', 'ENGENDERS', 'A', 'WHOLE', 'WORLD', 'LA', 'PEG', 'FOR', 'WHICH', 'RED', 'THEFT', 'AND', 'A', 'HELL', 'LA', 'PEGRIN', 'FOR', 'WHICH', 'RED', 'HUNGER'] +4507-16021-0002-1471: ref=['THUS', 'IDLENESS', 'IS', 'THE', 'MOTHER'] +4507-16021-0002-1471: hyp=['THUS', 'IDLENESS', 'IS', 'THE', 'MOTHER'] +4507-16021-0003-1472: ref=['SHE', 'HAS', 'A', 'SON', 'THEFT', 'AND', 'A', 'DAUGHTER', 'HUNGER'] +4507-16021-0003-1472: hyp=['SHE', 'HAS', 'A', 'SON', 'THEFT', 'AND', 'A', 'DAUGHTER', 'HUNGER'] +4507-16021-0004-1473: ref=['WHAT', 'IS', 'SLANG'] +4507-16021-0004-1473: hyp=['WHAT', 'IS', 'SLANG'] +4507-16021-0005-1474: ref=['WE', 'HAVE', 'NEVER', 'UNDERSTOOD', 'THIS', 'SORT', 'OF', 'OBJECTIONS'] +4507-16021-0005-1474: hyp=['WE', 'HAVE', 'NEVER', 'UNDERSTOOD', 'THIS', 'SORT', 'OF', 'OBJECTIONS'] +4507-16021-0006-1475: ref=['SLANG', 'IS', 'ODIOUS'] +4507-16021-0006-1475: hyp=['SLANG', 'IS', 'ODIOUS'] +4507-16021-0007-1476: ref=['SLANG', 'MAKES', 'ONE', 'SHUDDER'] +4507-16021-0007-1476: hyp=['SLANG', 'MAKES', 'ONE', 'SHUDDER'] +4507-16021-0008-1477: ref=['WHO', 'DENIES', 'THAT', 'OF', 'COURSE', 'IT', 'DOES'] +4507-16021-0008-1477: hyp=['WHO', 'DENIES', 'THAT', 'OF', 'COURSE', 'IT', 'DOES'] +4507-16021-0009-1478: ref=['WHEN', 'IT', 'IS', 'A', 'QUESTION', 'OF', 'PROBING', 'A', 'WOUND', 'A', 'GULF', 'A', 'SOCIETY', 'SINCE', 'WHEN', 'HAS', 'IT', 'BEEN', 'CONSIDERED', 'WRONG', 'TO', 'GO', 'TOO', 'FAR', 'TO', 'GO', 'TO', 'THE', 'BOTTOM'] +4507-16021-0009-1478: hyp=['WHEN', 'IT', 'IS', 'A', 'QUESTION', 'OF', 'PROBING', 'A', 'WOUND', 'A', 'GULF', 'A', 'SOCIETY', 'SINCE', 'WHEN', 'HAS', 'IT', 'BEEN', 'CONSIDERED', 'WRONG', 'TO', 'GO', 'TOO', 'FAR', 'TO', 'GO', 'TO', 'THE', 'BOTTOM'] +4507-16021-0010-1479: ref=['WE', 'HAVE', 'ALWAYS', 'THOUGHT', 'THAT', 'IT', 'WAS', 'SOMETIMES', 'A', 'COURAGEOUS', 'ACT', 'AND', 'AT', 'LEAST', 'A', 'SIMPLE', 'AND', 'USEFUL', 'DEED', 'WORTHY', 'OF', 'THE', 'SYMPATHETIC', 'ATTENTION', 'WHICH', 'DUTY', 'ACCEPTED', 'AND', 'FULFILLED', 'MERITS'] +4507-16021-0010-1479: hyp=['WE', 'HAVE', 'ALWAYS', 'THOUGHT', 'THAT', 'IT', 'WAS', 'SOMETIMES', 'A', 'COURAGEOUS', 'ACT', 'AND', 'AT', 'LEAST', 'A', 'SIMPLE', 'AND', 'USEFUL', 'DEED', 'WORTHY', 'OF', 'THE', 'SYMPATHETIC', 'ATTENTION', 'WHICH', 'DUTY', 'ACCEPTED', 'IN', 'FULFILLED', 'MERITS'] +4507-16021-0011-1480: ref=['WHY', 'SHOULD', 'ONE', 'NOT', 'EXPLORE', 'EVERYTHING', 'AND', 'STUDY', 'EVERYTHING'] +4507-16021-0011-1480: hyp=['WHY', 'SHOULD', 'ONE', 'NOT', 'EXPLORE', 'EVERYTHING', 'AND', 'STUDY', 'EVERYTHING'] +4507-16021-0012-1481: ref=['WHY', 'SHOULD', 'ONE', 'HALT', 'ON', 'THE', 'WAY'] +4507-16021-0012-1481: hyp=['WHY', 'SHOULD', 'ONE', 'HALT', 'ON', 'THE', 'WAY'] +4507-16021-0013-1482: ref=['NOTHING', 'IS', 'MORE', 'LUGUBRIOUS', 'THAN', 'THE', 'CONTEMPLATION', 'THUS', 'IN', 'ITS', 'NUDITY', 'IN', 'THE', 'BROAD', 'LIGHT', 'OF', 'THOUGHT', 'OF', 'THE', 'HORRIBLE', 'SWARMING', 'OF', 'SLANG'] +4507-16021-0013-1482: hyp=['NOTHING', 'IS', 'MORE', 'LUGUBRIOUS', 'THAN', 'THE', 'CONTEMPLATION', 'THUS', 'IN', 'ITS', 'NUDITY', 'IN', 'THE', 'BROAD', 'LIGHT', 'OF', 'THOUGHT', 'OF', 'THE', 'HORRIBLE', 'SWARMING', 'OF', 'SLANG'] +4507-16021-0014-1483: ref=['NOW', 'WHEN', 'HAS', 'HORROR', 'EVER', 'EXCLUDED', 'STUDY'] +4507-16021-0014-1483: hyp=['NO', 'WHEN', 'HAS', 'HORROR', 'EVER', 'EXCLUDED', 'STUDY'] +4507-16021-0015-1484: ref=['SINCE', 'WHEN', 'HAS', 'MALADY', 'BANISHED', 'MEDICINE'] +4507-16021-0015-1484: hyp=['SINCE', 'WHEN', 'HAS', 'MALADY', 'BANISHED', 'MEDICINE'] +4507-16021-0016-1485: ref=['CAN', 'ONE', 'IMAGINE', 'A', 'NATURALIST', 'REFUSING', 'TO', 'STUDY', 'THE', 'VIPER', 'THE', 'BAT', 'THE', 'SCORPION', 'THE', 'CENTIPEDE', 'THE', 'TARANTULA', 'AND', 'ONE', 'WHO', 'WOULD', 'CAST', 'THEM', 'BACK', 'INTO', 'THEIR', 'DARKNESS', 'SAYING', 'OH', 'HOW', 'UGLY', 'THAT', 'IS'] +4507-16021-0016-1485: hyp=['CAN', 'ONE', 'IMAGINE', 'A', 'NATURALIST', 'REFUSING', 'TO', 'STUDY', 'THE', 'VIPER', 'THE', 'BAT', 'THE', 'SCORPION', 'THE', 'CENTIPEDE', 'THE', 'TERENTIAL', 'AND', 'ONE', 'WHO', 'WOULD', 'CAST', 'THEM', 'BACK', 'INTO', 'THEIR', 'DARKNESS', 'SAYING', 'O', 'HOW', 'UGLY', 'THAT', 'IS'] +4507-16021-0017-1486: ref=['HE', 'WOULD', 'BE', 'LIKE', 'A', 'PHILOLOGIST', 'REFUSING', 'TO', 'EXAMINE', 'A', 'FACT', 'IN', 'LANGUAGE', 'A', 'PHILOSOPHER', 'HESITATING', 'TO', 'SCRUTINIZE', 'A', 'FACT', 'IN', 'HUMANITY'] +4507-16021-0017-1486: hyp=['HE', 'WOULD', 'BE', 'LIKE', 'A', 'PHILOLOGIST', 'REFUSING', 'TO', 'EXAMINE', 'A', 'FACT', 'IN', 'LANGUAGE', 'A', 'PHILOSOPHER', 'HESITATING', 'TO', 'SCRUTINIZE', 'A', 'FACT', 'IN', 'HUMANITY'] +4507-16021-0018-1487: ref=['WHAT', 'IS', 'SLANG', 'PROPERLY', 'SPEAKING'] +4507-16021-0018-1487: hyp=['WHAT', 'IS', 'SLANG', 'PROPERLY', 'SPEAKING'] +4507-16021-0019-1488: ref=['IT', 'IS', 'THE', 'LANGUAGE', 'OF', 'WRETCHEDNESS'] +4507-16021-0019-1488: hyp=['IT', 'IS', 'THE', 'LANGUAGE', 'OF', 'WRETCHEDNESS'] +4507-16021-0020-1489: ref=['WE', 'MAY', 'BE', 'STOPPED', 'THE', 'FACT', 'MAY', 'BE', 'PUT', 'TO', 'US', 'IN', 'GENERAL', 'TERMS', 'WHICH', 'IS', 'ONE', 'WAY', 'OF', 'ATTENUATING', 'IT', 'WE', 'MAY', 'BE', 'TOLD', 'THAT', 'ALL', 'TRADES', 'PROFESSIONS', 'IT', 'MAY', 'BE', 'ADDED', 'ALL', 'THE', 'ACCIDENTS', 'OF', 'THE', 'SOCIAL', 'HIERARCHY', 'AND', 'ALL', 'FORMS', 'OF', 'INTELLIGENCE', 'HAVE', 'THEIR', 'OWN', 'SLANG'] +4507-16021-0020-1489: hyp=['WE', 'MAY', 'BE', 'STOPPED', 'THE', 'FACT', 'MAY', 'BE', 'PUT', 'TO', 'US', 'IN', 'GENERAL', 'TERMS', 'WHICH', 'IS', 'ONE', 'WAY', 'OF', 'ATTENUATING', 'IT', 'WE', 'MAY', 'BE', 'TOLD', 'THAT', 'ALL', 'TRADES', 'PROFESSIONS', 'IT', 'MAY', 'BE', 'ADDED', 'ALL', 'THE', 'ACCIDENTS', 'OF', 'THE', 'SOCIAL', 'HIERARCHY', 'AND', 'ALL', 'FORMS', 'OF', 'INTELLIGENCE', 'HAVE', 'THEIR', 'OWN', 'SLANG'] +4507-16021-0021-1490: ref=['THE', 'PAINTER', 'WHO', 'SAYS', 'MY', 'GRINDER', 'THE', 'NOTARY', 'WHO', 'SAYS', 'MY', 'SKIP', 'THE', 'GUTTER', 'THE', 'HAIRDRESSER', 'WHO', 'SAYS', 'MY', 'MEALYBACK', 'THE', 'COBBLER', 'WHO', 'SAYS', 'MY', 'CUB', 'TALKS', 'SLANG'] +4507-16021-0021-1490: hyp=['THE', 'PAINTER', 'WHO', 'SAYS', 'MY', 'GRINDER', 'THE', 'NOTARY', 'WHO', 'SAYS', 'MY', 'SKIP', 'THE', 'GUTTER', 'THE', 'HAIR', 'DRESSER', 'WHO', 'SAYS', 'MY', 'MEALLY', 'BACK', 'THE', 'COBBLER', 'WHO', 'SAYS', 'MY', 'CUB', 'TALKS', 'SLANG'] +4507-16021-0022-1491: ref=['THERE', 'IS', 'THE', 'SLANG', 'OF', 'THE', 'AFFECTED', 'LADY', 'AS', 'WELL', 'AS', 'OF', 'THE', 'PRECIEUSES'] +4507-16021-0022-1491: hyp=['THERE', 'IS', 'THE', 'SLANG', 'OF', 'THE', 'AFFECTED', 'LADY', 'AS', 'WELL', 'AS', 'OF', 'THE', 'PURSUS'] +4507-16021-0023-1492: ref=['THE', 'SUGAR', 'MANUFACTURER', 'WHO', 'SAYS', 'LOAF', 'CLARIFIED', 'LUMPS', 'BASTARD', 'COMMON', 'BURNT', 'THIS', 'HONEST', 'MANUFACTURER', 'TALKS', 'SLANG'] +4507-16021-0023-1492: hyp=['THE', 'SUGAR', 'MANUFACTURER', 'WHO', 'SAYS', 'LOAF', 'CLARIFIED', 'LUMPS', 'BASTARD', 'COMMON', 'BURNT', 'THIS', 'HONEST', 'MANUFACTURER', 'TALKS', 'SLANG'] +4507-16021-0024-1493: ref=['ALGEBRA', 'MEDICINE', 'BOTANY', 'HAVE', 'EACH', 'THEIR', 'SLANG'] +4507-16021-0024-1493: hyp=['ALGEBRA', 'MEDICINE', 'BARTANY', 'HAVE', 'EACH', 'THEIR', 'SLANG'] +4507-16021-0025-1494: ref=['TO', 'MEET', 'THE', 'NEEDS', 'OF', 'THIS', 'CONFLICT', 'WRETCHEDNESS', 'HAS', 'INVENTED', 'A', 'LANGUAGE', 'OF', 'COMBAT', 'WHICH', 'IS', 'SLANG'] +4507-16021-0025-1494: hyp=['TO', 'MEET', 'THE', 'NEEDS', 'OF', 'THIS', 'CONFLICT', 'WRETCHEDNESS', 'HAS', 'INVENTED', 'A', 'LANGUAGE', 'OF', 'COMBAT', 'WHICH', 'IS', 'SLANG'] +4507-16021-0026-1495: ref=['TO', 'KEEP', 'AFLOAT', 'AND', 'TO', 'RESCUE', 'FROM', 'OBLIVION', 'TO', 'HOLD', 'ABOVE', 'THE', 'GULF', 'WERE', 'IT', 'BUT', 'A', 'FRAGMENT', 'OF', 'SOME', 'LANGUAGE', 'WHICH', 'MAN', 'HAS', 'SPOKEN', 'AND', 'WHICH', 'WOULD', 'OTHERWISE', 'BE', 'LOST', 'THAT', 'IS', 'TO', 'SAY', 'ONE', 'OF', 'THE', 'ELEMENTS', 'GOOD', 'OR', 'BAD', 'OF', 'WHICH', 'CIVILIZATION', 'IS', 'COMPOSED', 'OR', 'BY', 'WHICH', 'IT', 'IS', 'COMPLICATED', 'TO', 'EXTEND', 'THE', 'RECORDS', 'OF', 'SOCIAL', 'OBSERVATION', 'IS', 'TO', 'SERVE', 'CIVILIZATION', 'ITSELF'] +4507-16021-0026-1495: hyp=['TO', 'KEEP', 'AFLOAT', 'AND', 'TO', 'RESCUE', 'FROM', 'OBLIVION', 'TO', 'HOLD', 'ABOVE', 'THE', 'GULF', 'WERE', 'IT', 'BUT', 'A', 'FRAGMENT', 'OF', 'SOME', 'LANGUAGE', 'WHICH', 'MAN', 'HAS', 'SPOKEN', 'AND', 'WHICH', 'WOULD', 'OTHERWISE', 'BE', 'LOST', 'THAT', 'IS', 'TO', 'SAY', 'ONE', 'OF', 'THE', 'ELEMENTS', 'GOOD', 'OR', 'BAD', 'OF', 'WHICH', 'CIVILIZATION', 'IS', 'COMPOSED', 'OR', 'BY', 'WHICH', 'IT', 'IS', 'COMPLICATED', 'TO', 'EXTEND', 'THE', 'RECORDS', 'OF', 'SOCIAL', 'OBSERVATION', 'IS', 'TO', 'SERVE', 'CIVILIZATION', 'ITSELF'] +4507-16021-0027-1496: ref=['PHOENICIAN', 'VERY', 'GOOD'] +4507-16021-0027-1496: hyp=['PHOENICIAN', 'VERY', 'GOOD'] +4507-16021-0028-1497: ref=['EVEN', 'DIALECT', 'LET', 'THAT', 'PASS'] +4507-16021-0028-1497: hyp=['EVEN', 'DIALECT', 'LET', 'THAT', 'PASS'] +4507-16021-0029-1498: ref=['TO', 'THIS', 'WE', 'REPLY', 'IN', 'ONE', 'WORD', 'ONLY'] +4507-16021-0029-1498: hyp=['TO', 'THIS', 'WE', 'REPLY', 'IN', 'ONE', 'WORD', 'ONLY'] +4507-16021-0030-1499: ref=['ASSUREDLY', 'IF', 'THE', 'TONGUE', 'WHICH', 'A', 'NATION', 'OR', 'A', 'PROVINCE', 'HAS', 'SPOKEN', 'IS', 'WORTHY', 'OF', 'INTEREST', 'THE', 'LANGUAGE', 'WHICH', 'HAS', 'BEEN', 'SPOKEN', 'BY', 'A', 'MISERY', 'IS', 'STILL', 'MORE', 'WORTHY', 'OF', 'ATTENTION', 'AND', 'STUDY'] +4507-16021-0030-1499: hyp=['ASSUREDLY', 'IF', 'THE', 'TONGUE', 'WHICH', 'A', 'NATION', 'OR', 'A', 'PROVINCE', 'HAS', 'SPOKEN', 'IS', 'WORTHY', 'OF', 'INTEREST', 'THE', 'LANGUAGE', 'WHICH', 'HAS', 'BEEN', 'SPOKEN', 'BY', 'A', 'MISERY', 'IS', 'STILL', 'MORE', 'WORTHY', 'OF', 'ATTENTION', 'AND', 'STUDY'] +4507-16021-0031-1500: ref=['AND', 'THEN', 'WE', 'INSIST', 'UPON', 'IT', 'THE', 'STUDY', 'OF', 'SOCIAL', 'DEFORMITIES', 'AND', 'INFIRMITIES', 'AND', 'THE', 'TASK', 'OF', 'POINTING', 'THEM', 'OUT', 'WITH', 'A', 'VIEW', 'TO', 'REMEDY', 'IS', 'NOT', 'A', 'BUSINESS', 'IN', 'WHICH', 'CHOICE', 'IS', 'PERMITTED'] +4507-16021-0031-1500: hyp=['AND', 'THEN', 'WE', 'INSIST', 'UPON', 'IT', 'THE', 'STUDY', 'OF', 'SOCIAL', 'DEFORMITIES', 'AND', 'INFIRMITIES', 'AND', 'THE', 'TASK', 'OF', 'POINTING', 'THEM', 'OUT', 'WITH', 'A', 'VIEW', 'TO', 'REMEDY', 'IS', 'NOT', 'A', 'BUSINESS', 'IN', 'WHICH', 'CHOICES', 'PERMITTED'] +4507-16021-0032-1501: ref=['HE', 'MUST', 'DESCEND', 'WITH', 'HIS', 'HEART', 'FULL', 'OF', 'CHARITY', 'AND', 'SEVERITY', 'AT', 'THE', 'SAME', 'TIME', 'AS', 'A', 'BROTHER', 'AND', 'AS', 'A', 'JUDGE', 'TO', 'THOSE', 'IMPENETRABLE', 'CASEMATES', 'WHERE', 'CRAWL', 'PELL', 'MELL', 'THOSE', 'WHO', 'BLEED', 'AND', 'THOSE', 'WHO', 'DEAL', 'THE', 'BLOW', 'THOSE', 'WHO', 'WEEP', 'AND', 'THOSE', 'WHO', 'CURSE', 'THOSE', 'WHO', 'FAST', 'AND', 'THOSE', 'WHO', 'DEVOUR', 'THOSE', 'WHO', 'ENDURE', 'EVIL', 'AND', 'THOSE', 'WHO', 'INFLICT', 'IT'] +4507-16021-0032-1501: hyp=['HE', 'MUST', 'DESCEND', 'WITH', 'HIS', 'HEART', 'FULL', 'OF', 'CHARITY', 'AND', 'SEVERITY', 'AT', 'THE', 'SAME', 'TIME', 'AS', 'A', 'BROTHER', 'AND', 'AS', 'HE', 'JUDGE', 'TO', 'THOSE', 'IMPENETRABLE', 'CASEMATES', 'WERE', 'CRAWL', 'PELL', 'MELL', 'THOSE', 'WHO', 'BLEED', 'AND', 'THOSE', 'WHO', 'DEAL', 'THE', 'BLOW', 'THOSE', 'WHO', 'WEEP', 'AND', 'THOSE', 'WHO', 'CURSE', 'THOSE', 'WHO', 'FAST', 'IN', 'THOSE', 'WHO', 'DEVOUR', 'THOSE', 'WHO', 'ENDURE', 'EVIL', 'AND', 'THOSE', 'WHO', 'INFLICT', 'IT'] +4507-16021-0033-1502: ref=['DO', 'WE', 'REALLY', 'KNOW', 'THE', 'MOUNTAIN', 'WELL', 'WHEN', 'WE', 'ARE', 'NOT', 'ACQUAINTED', 'WITH', 'THE', 'CAVERN'] +4507-16021-0033-1502: hyp=['DO', 'WE', 'REALLY', 'KNOW', 'THE', 'MOUNTAIN', 'WELL', 'WHEN', 'WE', 'ARE', 'NOT', 'ACQUAINTED', 'WITH', 'THE', 'CAVERN'] +4507-16021-0034-1503: ref=['THEY', 'CONSTITUTE', 'TWO', 'DIFFERENT', 'ORDERS', 'OF', 'FACTS', 'WHICH', 'CORRESPOND', 'TO', 'EACH', 'OTHER', 'WHICH', 'ARE', 'ALWAYS', 'INTERLACED', 'AND', 'WHICH', 'OFTEN', 'BRING', 'FORTH', 'RESULTS'] +4507-16021-0034-1503: hyp=['THEY', 'CONSTITUTE', 'TWO', 'DIFFERENT', 'ORDERS', 'OF', 'FACTS', 'WHICH', 'CORRESPOND', 'TO', 'EACH', 'OTHER', 'WHICH', 'ARE', 'ALWAYS', 'INTERLACED', 'AND', 'WHICH', 'OFTEN', 'BRING', 'FORTH', 'RESULTS'] +4507-16021-0035-1504: ref=['TRUE', 'HISTORY', 'BEING', 'A', 'MIXTURE', 'OF', 'ALL', 'THINGS', 'THE', 'TRUE', 'HISTORIAN', 'MINGLES', 'IN', 'EVERYTHING'] +4507-16021-0035-1504: hyp=['TRUE', 'HISTORY', 'BEING', 'A', 'MIXTURE', 'OF', 'ALL', 'THINGS', 'THE', 'TRUE', 'HISTORIAN', 'MINGLES', 'IN', 'EVERYTHING'] +4507-16021-0036-1505: ref=['FACTS', 'FORM', 'ONE', 'OF', 'THESE', 'AND', 'IDEAS', 'THE', 'OTHER'] +4507-16021-0036-1505: hyp=['FACTS', 'FORM', 'ONE', 'OF', 'THESE', 'AND', 'IDEAS', 'THE', 'OTHER'] +4507-16021-0037-1506: ref=['THERE', 'IT', 'CLOTHES', 'ITSELF', 'IN', 'WORD', 'MASKS', 'IN', 'METAPHOR', 'RAGS'] +4507-16021-0037-1506: hyp=['THERE', 'IT', 'CLOTHES', 'ITSELF', 'IN', 'WORD', 'MASKS', 'IN', 'METAPHOR', 'RAGS'] +4507-16021-0038-1507: ref=['IN', 'THIS', 'GUISE', 'IT', 'BECOMES', 'HORRIBLE'] +4507-16021-0038-1507: hyp=['IN', 'THE', 'SKIES', 'IT', 'BECOMES', 'HORRIBLE'] +4507-16021-0039-1508: ref=['ONE', 'PERCEIVES', 'WITHOUT', 'UNDERSTANDING', 'IT', 'A', 'HIDEOUS', 'MURMUR', 'SOUNDING', 'ALMOST', 'LIKE', 'HUMAN', 'ACCENTS', 'BUT', 'MORE', 'NEARLY', 'RESEMBLING', 'A', 'HOWL', 'THAN', 'AN', 'ARTICULATE', 'WORD'] +4507-16021-0039-1508: hyp=['ONE', 'PERCEIVES', 'WITHOUT', 'UNDERSTANDING', 'IT', 'A', 'HIDEOUS', 'MURMUR', 'SOUNDING', 'ALMOST', 'LIKE', 'HUMAN', 'ACCENTS', 'BUT', 'MORE', 'NEARLY', 'RESEMBLING', 'A', 'HOWL', 'THAN', 'AN', 'ARTICULATE', 'WORD'] +4507-16021-0040-1509: ref=['ONE', 'THINKS', 'ONE', 'HEARS', 'HYDRAS', 'TALKING'] +4507-16021-0040-1509: hyp=['ONE', 'THINKS', 'ONE', 'HEARS', 'HYDRAST', 'TALKING'] +4507-16021-0041-1510: ref=['IT', 'IS', 'UNINTELLIGIBLE', 'IN', 'THE', 'DARK'] +4507-16021-0041-1510: hyp=['IT', 'IS', 'UNINTELLIGIBLE', 'IN', 'THE', 'DARK'] +4507-16021-0042-1511: ref=['IT', 'IS', 'BLACK', 'IN', 'MISFORTUNE', 'IT', 'IS', 'BLACKER', 'STILL', 'IN', 'CRIME', 'THESE', 'TWO', 'BLACKNESSES', 'AMALGAMATED', 'COMPOSE', 'SLANG'] +4507-16021-0042-1511: hyp=['IT', 'IS', 'BLACK', 'IN', 'MISFORTUNE', 'IT', 'IS', 'BLACKER', 'STILL', 'AND', 'CRIME', 'THESE', 'TWO', 'BLACKNESSES', 'AMALGAMATED', 'COMPOSED', 'SLING'] +4507-16021-0043-1512: ref=['THE', 'EARTH', 'IS', 'NOT', 'DEVOID', 'OF', 'RESEMBLANCE', 'TO', 'A', 'JAIL'] +4507-16021-0043-1512: hyp=['THE', 'EARTH', 'IS', 'NOT', 'DEVOID', 'OF', 'RESEMBLANCE', 'TO', 'A', 'JAIL'] +4507-16021-0044-1513: ref=['LOOK', 'CLOSELY', 'AT', 'LIFE'] +4507-16021-0044-1513: hyp=['LOOK', 'CLOSELY', 'AT', 'LIFE'] +4507-16021-0045-1514: ref=['IT', 'IS', 'SO', 'MADE', 'THAT', 'EVERYWHERE', 'WE', 'FEEL', 'THE', 'SENSE', 'OF', 'PUNISHMENT'] +4507-16021-0045-1514: hyp=['IT', 'IS', 'SO', 'MADE', 'THAT', 'EVERYWHERE', 'WE', 'FEEL', 'THE', 'SENSE', 'OF', 'PUNISHMENT'] +4507-16021-0046-1515: ref=['EACH', 'DAY', 'HAS', 'ITS', 'OWN', 'GREAT', 'GRIEF', 'OR', 'ITS', 'LITTLE', 'CARE'] +4507-16021-0046-1515: hyp=['EACH', 'DAY', 'HAS', 'ITS', 'OWN', 'GREAT', 'GRIEF', 'FOR', 'ITS', 'LITTLE', 'CARE'] +4507-16021-0047-1516: ref=['YESTERDAY', 'YOU', 'WERE', 'TREMBLING', 'FOR', 'A', 'HEALTH', 'THAT', 'IS', 'DEAR', 'TO', 'YOU', 'TO', 'DAY', 'YOU', 'FEAR', 'FOR', 'YOUR', 'OWN', 'TO', 'MORROW', 'IT', 'WILL', 'BE', 'ANXIETY', 'ABOUT', 'MONEY', 'THE', 'DAY', 'AFTER', 'TO', 'MORROW', 'THE', 'DIATRIBE', 'OF', 'A', 'SLANDERER', 'THE', 'DAY', 'AFTER', 'THAT', 'THE', 'MISFORTUNE', 'OF', 'SOME', 'FRIEND', 'THEN', 'THE', 'PREVAILING', 'WEATHER', 'THEN', 'SOMETHING', 'THAT', 'HAS', 'BEEN', 'BROKEN', 'OR', 'LOST', 'THEN', 'A', 'PLEASURE', 'WITH', 'WHICH', 'YOUR', 'CONSCIENCE', 'AND', 'YOUR', 'VERTEBRAL', 'COLUMN', 'REPROACH', 'YOU', 'AGAIN', 'THE', 'COURSE', 'OF', 'PUBLIC', 'AFFAIRS'] +4507-16021-0047-1516: hyp=['YESTERDAY', 'WERE', 'TREMBLING', 'FOR', 'A', 'HEALTH', 'THAT', 'IS', 'DEAR', 'TO', 'YOU', 'TO', 'DAY', 'YOU', 'FEAR', 'FOR', 'YOUR', 'OWN', 'TO', 'MORROW', 'IT', 'WILL', 'BE', 'ANXIETY', 'ABOUT', 'MONEY', 'THE', 'DAY', 'AFTER', 'TO', 'MORROW', 'THE', 'DIETRIBE', 'OF', 'A', 'SLANDERER', 'THE', 'DAY', 'AFTER', 'THAT', 'THE', 'MISFORTUNE', 'OF', 'SOME', 'FRIEND', 'THEN', 'THE', 'PREVAILING', 'WEATHER', 'THEN', 'SOMETHING', 'THAT', 'HAS', 'BEEN', 'BROKEN', 'OR', 'LOST', 'THEN', 'A', 'PLEASURE', 'WITH', 'WHICH', 'YOUR', 'CONSCIENCE', 'AND', 'YOUR', 'VERTEBRAL', 'COLUMN', 'REPROACH', 'YOU', 'AGAIN', 'THE', 'COURSE', 'OF', 'PUBLIC', 'AFFAIRS'] +4507-16021-0048-1517: ref=['THIS', 'WITHOUT', 'RECKONING', 'IN', 'THE', 'PAINS', 'OF', 'THE', 'HEART', 'AND', 'SO', 'IT', 'GOES', 'ON'] +4507-16021-0048-1517: hyp=['THIS', 'WITHOUT', 'RECKONING', 'IN', 'THE', 'PAINS', 'OF', 'THE', 'HEART', 'AND', 'SO', 'TO', 'GOES', 'ON'] +4507-16021-0049-1518: ref=['THERE', 'IS', 'HARDLY', 'ONE', 'DAY', 'OUT', 'OF', 'A', 'HUNDRED', 'WHICH', 'IS', 'WHOLLY', 'JOYOUS', 'AND', 'SUNNY'] +4507-16021-0049-1518: hyp=['THERE', 'IS', 'HARDLY', 'ONE', 'DAY', 'OUT', 'OF', 'A', 'HUNDRED', 'WHICH', 'IS', 'WHOLLY', 'JOYOUS', 'AND', 'SUNNY'] +4507-16021-0050-1519: ref=['AND', 'YOU', 'BELONG', 'TO', 'THAT', 'SMALL', 'CLASS', 'WHO', 'ARE', 'HAPPY'] +4507-16021-0050-1519: hyp=['AND', 'YOU', 'BELONG', 'TO', 'THAT', 'SMALL', 'CLASS', 'WHO', 'ARE', 'A', 'HAPPY'] +4507-16021-0051-1520: ref=['IN', 'THIS', 'WORLD', 'EVIDENTLY', 'THE', 'VESTIBULE', 'OF', 'ANOTHER', 'THERE', 'ARE', 'NO', 'FORTUNATE'] +4507-16021-0051-1520: hyp=['IN', 'THIS', 'WORLD', 'EVIDENTLY', 'THE', 'VESTIBULE', 'OF', 'ANOTHER', 'THERE', 'ARE', 'NO', 'FORTUNATE'] +4507-16021-0052-1521: ref=['THE', 'REAL', 'HUMAN', 'DIVISION', 'IS', 'THIS', 'THE', 'LUMINOUS', 'AND', 'THE', 'SHADY'] +4507-16021-0052-1521: hyp=['THE', 'REAL', 'HUMAN', 'DIVISION', 'IS', 'THIS', 'THE', 'LUMINOUS', 'AND', 'THE', 'SHADY'] +4507-16021-0053-1522: ref=['TO', 'DIMINISH', 'THE', 'NUMBER', 'OF', 'THE', 'SHADY', 'TO', 'AUGMENT', 'THE', 'NUMBER', 'OF', 'THE', 'LUMINOUS', 'THAT', 'IS', 'THE', 'OBJECT'] +4507-16021-0053-1522: hyp=['TO', 'DIMINISH', 'THE', 'NUMBER', 'OF', 'THE', 'SHADY', 'TO', 'AUGMENT', 'THE', 'NUMBER', 'OF', 'THE', 'LUMINOUS', 'THAT', 'IS', 'THE', 'OBJECT'] +4507-16021-0054-1523: ref=['THAT', 'IS', 'WHY', 'WE', 'CRY', 'EDUCATION', 'SCIENCE'] +4507-16021-0054-1523: hyp=['THAT', 'IS', 'WHY', 'WE', 'CRY', 'EDUCATION', 'SCIENCE'] +4507-16021-0055-1524: ref=['TO', 'TEACH', 'READING', 'MEANS', 'TO', 'LIGHT', 'THE', 'FIRE', 'EVERY', 'SYLLABLE', 'SPELLED', 'OUT', 'SPARKLES'] +4507-16021-0055-1524: hyp=['TO', 'TEACH', 'READING', 'MEANS', 'TO', 'WRITE', 'THE', 'FIRE', 'EVERY', 'SYLLABLE', 'SPELLED', 'OUT', 'SPARKLES'] +4507-16021-0056-1525: ref=['HOWEVER', 'HE', 'WHO', 'SAYS', 'LIGHT', 'DOES', 'NOT', 'NECESSARILY', 'SAY', 'JOY'] +4507-16021-0056-1525: hyp=['HOWEVER', 'HE', 'WHO', 'SAYS', 'LIGHT', 'DOES', 'NOT', 'NECESSARILY', 'SAY', 'JOY'] +4507-16021-0057-1526: ref=['PEOPLE', 'SUFFER', 'IN', 'THE', 'LIGHT', 'EXCESS', 'BURNS'] +4507-16021-0057-1526: hyp=['PEOPLE', 'SUFFER', 'IN', 'THE', 'LIGHT', 'EXCESS', 'BURNS'] +4507-16021-0058-1527: ref=['THE', 'FLAME', 'IS', 'THE', 'ENEMY', 'OF', 'THE', 'WING'] +4507-16021-0058-1527: hyp=['THE', 'FLAME', 'IS', 'THE', 'ENEMY', 'OF', 'THE', 'WING'] +4507-16021-0059-1528: ref=['TO', 'BURN', 'WITHOUT', 'CEASING', 'TO', 'FLY', 'THEREIN', 'LIES', 'THE', 'MARVEL', 'OF', 'GENIUS'] +4507-16021-0059-1528: hyp=['TO', 'BURN', 'WITHOUT', 'CEASING', 'TO', 'FLY', 'THEREIN', 'LIES', 'THE', 'MARVEL', 'OF', 'GENIUS'] +4970-29093-0000-2093: ref=["YOU'LL", 'NEVER', 'DIG', 'IT', 'OUT', 'OF', 'THE', 'ASTOR', 'LIBRARY'] +4970-29093-0000-2093: hyp=["YOU'LL", 'NEVER', 'DIG', 'IT', 'OUT', 'OF', 'THE', 'ASTRO', 'LIBRARY'] +4970-29093-0001-2094: ref=['TO', 'THE', 'YOUNG', 'AMERICAN', 'HERE', 'OR', 'ELSEWHERE', 'THE', 'PATHS', 'TO', 'FORTUNE', 'ARE', 'INNUMERABLE', 'AND', 'ALL', 'OPEN', 'THERE', 'IS', 'INVITATION', 'IN', 'THE', 'AIR', 'AND', 'SUCCESS', 'IN', 'ALL', 'HIS', 'WIDE', 'HORIZON'] +4970-29093-0001-2094: hyp=['TO', 'THE', 'YOUNG', 'AMERICAN', 'HERE', 'OR', 'ELSEWHERE', 'THE', 'PATHS', 'TO', 'FORTUNE', 'ARE', 'INNUMERABLE', 'AND', 'ALL', 'OPEN', 'THERE', 'IS', 'INVITATION', 'IN', 'THE', 'AIR', 'AND', 'SUCCESS', 'IN', 'ALL', 'HIS', 'WIDE', 'HORIZON'] +4970-29093-0002-2095: ref=['HE', 'HAS', 'NO', 'TRADITIONS', 'TO', 'BIND', 'HIM', 'OR', 'GUIDE', 'HIM', 'AND', 'HIS', 'IMPULSE', 'IS', 'TO', 'BREAK', 'AWAY', 'FROM', 'THE', 'OCCUPATION', 'HIS', 'FATHER', 'HAS', 'FOLLOWED', 'AND', 'MAKE', 'A', 'NEW', 'WAY', 'FOR', 'HIMSELF'] +4970-29093-0002-2095: hyp=['HE', 'HAS', 'NO', 'TRADITIONS', 'TO', 'BIND', 'HIM', 'OR', 'GUIDE', 'HIM', 'AND', 'HIS', 'IMPULSE', 'IS', 'TO', 'BREAK', 'AWAY', 'FROM', 'THE', 'OCCUPATION', 'HIS', 'FATHER', 'HAS', 'FOLLOWED', 'AND', 'MAKE', 'A', 'NEW', 'WAY', 'FOR', 'HIMSELF'] +4970-29093-0003-2096: ref=['THE', 'MODEST', 'FELLOW', 'WOULD', 'HAVE', 'LIKED', 'FAME', 'THRUST', 'UPON', 'HIM', 'FOR', 'SOME', 'WORTHY', 'ACHIEVEMENT', 'IT', 'MIGHT', 'BE', 'FOR', 'A', 'BOOK', 'OR', 'FOR', 'THE', 'SKILLFUL', 'MANAGEMENT', 'OF', 'SOME', 'GREAT', 'NEWSPAPER', 'OR', 'FOR', 'SOME', 'DARING', 'EXPEDITION', 'LIKE', 'THAT', 'OF', 'LIEUTENANT', 'STRAIN', 'OR', 'DOCTOR', 'KANE'] +4970-29093-0003-2096: hyp=['THE', 'MODEST', 'FELLOW', 'WOULD', 'HAVE', 'LIKED', 'FAME', 'THRUST', 'UPON', 'HIM', 'FOR', 'SOME', 'WORTHY', 'ACHIEVEMENT', 'IT', 'MIGHT', 'BE', 'FOR', 'A', 'BOOK', 'OR', 'FOR', 'THE', 'SKILFUL', 'MANAGEMENT', 'OF', 'SOME', 'GREAT', 'NEWSPAPER', 'OR', 'FOR', 'SOME', 'DARING', 'EXPEDITION', 'LIKE', 'THAT', 'OF', 'LIEUTENANT', 'STRAIN', 'OR', 'DOCTOR', 'KANE'] +4970-29093-0004-2097: ref=['HE', 'WAS', 'UNABLE', 'TO', 'DECIDE', 'EXACTLY', 'WHAT', 'IT', 'SHOULD', 'BE'] +4970-29093-0004-2097: hyp=['HE', 'WAS', 'UNABLE', 'TO', 'DECIDE', 'EXACTLY', 'WHAT', 'IT', 'SHOULD', 'BE'] +4970-29093-0005-2098: ref=['SOMETIMES', 'HE', 'THOUGHT', 'HE', 'WOULD', 'LIKE', 'TO', 'STAND', 'IN', 'A', 'CONSPICUOUS', 'PULPIT', 'AND', 'HUMBLY', 'PREACH', 'THE', 'GOSPEL', 'OF', 'REPENTANCE', 'AND', 'IT', 'EVEN', 'CROSSED', 'HIS', 'MIND', 'THAT', 'IT', 'WOULD', 'BE', 'NOBLE', 'TO', 'GIVE', 'HIMSELF', 'TO', 'A', 'MISSIONARY', 'LIFE', 'TO', 'SOME', 'BENIGHTED', 'REGION', 'WHERE', 'THE', 'DATE', 'PALM', 'GROWS', 'AND', 'THE', "NIGHTINGALE'S", 'VOICE', 'IS', 'IN', 'TUNE', 'AND', 'THE', 'BUL', 'BUL', 'SINGS', 'ON', 'THE', 'OFF', 'NIGHTS'] +4970-29093-0005-2098: hyp=['SOMETIMES', 'HE', 'THOUGHT', 'HE', 'WOULD', 'LIKE', 'TO', 'STAND', 'IN', 'A', 'CONSPICUOUS', 'PULPIT', 'AND', 'HUMBLY', 'PREACH', 'THE', 'GOSPEL', 'OF', 'REPENTANCE', 'AND', 'IT', 'EVEN', 'CROSSED', 'HIS', 'MIND', 'THAT', 'IT', 'WOULD', 'BE', 'NOBLE', 'TO', 'GIVE', 'HIMSELF', 'TO', 'A', 'MISSIONARY', 'LIFE', 'TO', 'SOME', 'BENIGHTED', 'REGION', 'WHERE', 'THE', 'DATE', 'PALM', 'GROVES', 'AND', 'THE', "NIGHTINGALE'S", 'VOICE', 'IS', 'IN', 'TUNE', 'AND', 'THE', 'BULBUL', 'SINGS', 'ON', 'THE', 'OFF', 'NIGHTS'] +4970-29093-0006-2099: ref=['LAW', 'SEEMED', 'TO', 'HIM', 'WELL', 'ENOUGH', 'AS', 'A', 'SCIENCE', 'BUT', 'HE', 'NEVER', 'COULD', 'DISCOVER', 'A', 'PRACTICAL', 'CASE', 'WHERE', 'IT', 'APPEARED', 'TO', 'HIM', 'WORTH', 'WHILE', 'TO', 'GO', 'TO', 'LAW', 'AND', 'ALL', 'THE', 'CLIENTS', 'WHO', 'STOPPED', 'WITH', 'THIS', 'NEW', 'CLERK', 'IN', 'THE', 'ANTE', 'ROOM', 'OF', 'THE', 'LAW', 'OFFICE', 'WHERE', 'HE', 'WAS', 'WRITING', 'PHILIP', 'INVARIABLY', 'ADVISED', 'TO', 'SETTLE', 'NO', 'MATTER', 'HOW', 'BUT', 'SETTLE', 'GREATLY', 'TO', 'THE', 'DISGUST', 'OF', 'HIS', 'EMPLOYER', 'WHO', 'KNEW', 'THAT', 'JUSTICE', 'BETWEEN', 'MAN', 'AND', 'MAN', 'COULD', 'ONLY', 'BE', 'ATTAINED', 'BY', 'THE', 'RECOGNIZED', 'PROCESSES', 'WITH', 'THE', 'ATTENDANT', 'FEES'] +4970-29093-0006-2099: hyp=['LAW', 'SEEMED', 'TO', 'HIM', 'WELL', 'ENOUGH', 'AS', 'A', 'SCIENCE', 'BUT', 'HE', 'NEVER', 'COULD', 'DISCOVER', 'A', 'PRACTICAL', 'CASE', 'WHERE', 'IT', 'APPEARED', 'TO', 'HIM', 'WORTH', 'WHILE', 'TO', 'GO', 'TO', 'LAW', 'AND', 'ALL', 'THE', 'CLIENTS', 'WHO', 'STOPPED', 'WITH', 'THIS', 'NEW', 'CLERK', 'AND', 'THE', 'ANTE', 'ROOM', 'OF', 'THE', 'LAW', 'OFFICE', 'WHERE', 'HE', 'WAS', 'WRITING', 'PHILIP', 'INVARIABLY', 'ADVISED', 'TO', 'SETTLE', 'NO', 'MATTER', 'HOW', 'BUT', 'SETTLED', 'GREATLY', 'TO', 'THE', 'DISGUST', 'OF', 'HIS', 'EMPLOYER', 'WHO', 'KNEW', 'THAT', 'JUSTICE', 'BETWEEN', 'MAN', 'AND', 'MAN', 'COULD', 'ONLY', 'BE', 'ATTAINED', 'BY', 'THE', 'RECOGNIZED', 'PROCESSES', 'WITH', 'THE', 'ATTENDANT', 'BEES'] +4970-29093-0007-2100: ref=['IT', 'IS', 'SUCH', 'A', 'NOBLE', 'AMBITION', 'THAT', 'IT', 'IS', 'A', 'PITY', 'IT', 'HAS', 'USUALLY', 'SUCH', 'A', 'SHALLOW', 'FOUNDATION'] +4970-29093-0007-2100: hyp=['IT', 'IS', 'SUCH', 'A', 'NOBLE', 'AMBITION', 'THAT', 'IT', 'IS', 'A', 'PITY', 'IT', 'HAS', 'USUALLY', 'SUCH', 'A', 'SHALLOW', 'FOUNDATION'] +4970-29093-0008-2101: ref=['HE', 'WANTED', 'TO', 'BEGIN', 'AT', 'THE', 'TOP', 'OF', 'THE', 'LADDER'] +4970-29093-0008-2101: hyp=['HE', 'WANTED', 'TO', 'BEGIN', 'AT', 'THE', 'TOP', 'OF', 'THE', 'LADDER'] +4970-29093-0009-2102: ref=['PHILIP', 'THEREFORE', 'READ', 'DILIGENTLY', 'IN', 'THE', 'ASTOR', 'LIBRARY', 'PLANNED', 'LITERARY', 'WORKS', 'THAT', 'SHOULD', 'COMPEL', 'ATTENTION', 'AND', 'NURSED', 'HIS', 'GENIUS'] +4970-29093-0009-2102: hyp=['PHILIP', 'THEREFORE', 'READ', 'DILIGENTLY', 'IN', 'THE', 'ASTOR', 'LIBRARY', 'PLANNED', 'LITERARY', 'WORKS', 'THAT', 'SHOULD', 'COMPEL', 'ATTENTION', 'AND', 'NURSE', 'HIS', 'GENIUS'] +4970-29093-0010-2103: ref=['HE', 'HAD', 'NO', 'FRIEND', 'WISE', 'ENOUGH', 'TO', 'TELL', 'HIM', 'TO', 'STEP', 'INTO', 'THE', 'DORKING', 'CONVENTION', 'THEN', 'IN', 'SESSION', 'MAKE', 'A', 'SKETCH', 'OF', 'THE', 'MEN', 'AND', 'WOMEN', 'ON', 'THE', 'PLATFORM', 'AND', 'TAKE', 'IT', 'TO', 'THE', 'EDITOR', 'OF', 'THE', 'DAILY', 'GRAPEVINE', 'AND', 'SEE', 'WHAT', 'HE', 'COULD', 'GET', 'A', 'LINE', 'FOR', 'IT'] +4970-29093-0010-2103: hyp=['HE', 'HAD', 'NO', 'FRIEND', 'WISE', 'ENOUGH', 'TO', 'TELL', 'HIM', 'TO', 'STEP', 'INTO', 'THE', 'DORKING', 'CONVENTION', 'THAN', 'IN', 'SESSION', 'MAKE', 'A', 'SKETCH', 'OF', 'THE', 'MEN', 'AND', 'WOMEN', 'ON', 'THE', 'PLATFORM', 'AND', 'TAKE', 'IT', 'TO', 'THE', 'EDITOR', 'OF', 'THE', 'DAILY', 'GRAPE', 'VINE', 'AND', 'SEE', 'WHAT', 'HE', 'COULD', 'GET', 'A', 'LINE', 'FOR', 'IT'] +4970-29093-0011-2104: ref=['O', 'VERY', 'WELL', 'SAID', 'GRINGO', 'TURNING', 'AWAY', 'WITH', 'A', 'SHADE', 'OF', 'CONTEMPT', "YOU'LL", 'FIND', 'IF', 'YOU', 'ARE', 'GOING', 'INTO', 'LITERATURE', 'AND', 'NEWSPAPER', 'WORK', 'THAT', 'YOU', "CAN'T", 'AFFORD', 'A', 'CONSCIENCE', 'LIKE', 'THAT'] +4970-29093-0011-2104: hyp=['OH', 'VERY', 'WELL', 'SAID', 'GREENOW', 'TURNING', 'AWAY', 'WITH', 'A', 'SHADE', 'OF', 'CONTEMPT', "YOU'LL", 'FIND', 'IF', 'YOU', 'ARE', 'GOING', 'INTO', 'LITERATURE', 'AND', 'NEWSPAPER', 'WORK', 'THAT', 'YOU', "CAN'T", 'AFFORD', 'A', 'CONSCIENCE', 'LIKE', 'THAT'] +4970-29093-0012-2105: ref=['BUT', 'PHILIP', 'DID', 'AFFORD', 'IT', 'AND', 'HE', 'WROTE', 'THANKING', 'HIS', 'FRIENDS', 'AND', 'DECLINING', 'BECAUSE', 'HE', 'SAID', 'THE', 'POLITICAL', 'SCHEME', 'WOULD', 'FAIL', 'AND', 'OUGHT', 'TO', 'FAIL'] +4970-29093-0012-2105: hyp=['BUT', 'PHILIP', 'DID', 'AFFORD', 'IT', 'AND', 'HE', 'WROTE', 'THINKING', 'HIS', 'FRIENDS', 'AND', 'DECLINING', 'BECAUSE', 'HE', 'SAID', 'THE', 'POLITICAL', 'SCHEME', 'WOULD', 'FAIL', 'AND', 'OUGHT', 'TO', 'FAIL'] +4970-29093-0013-2106: ref=['AND', 'HE', 'WENT', 'BACK', 'TO', 'HIS', 'BOOKS', 'AND', 'TO', 'HIS', 'WAITING', 'FOR', 'AN', 'OPENING', 'LARGE', 'ENOUGH', 'FOR', 'HIS', 'DIGNIFIED', 'ENTRANCE', 'INTO', 'THE', 'LITERARY', 'WORLD'] +4970-29093-0013-2106: hyp=['AND', 'HE', 'WENT', 'BACK', 'TO', 'HIS', 'BOOKS', 'AND', 'TO', 'HIS', 'WAITING', 'FOR', 'AN', 'OPENING', 'LARGE', 'ENOUGH', 'FOR', 'HIS', 'DIGNIFIED', 'ENTRANCE', 'INTO', 'THE', 'LITERARY', 'WORLD'] +4970-29093-0014-2107: ref=['WELL', "I'M", 'GOING', 'AS', 'AN', 'ENGINEER', 'YOU', 'CAN', 'GO', 'AS', 'ONE'] +4970-29093-0014-2107: hyp=['WELL', "I'M", 'GOING', 'AS', 'AN', 'ENGINEER', 'YOU', 'COULD', 'GO', 'AS', 'ONE'] +4970-29093-0015-2108: ref=['YOU', 'CAN', 'BEGIN', 'BY', 'CARRYING', 'A', 'ROD', 'AND', 'PUTTING', 'DOWN', 'THE', 'FIGURES'] +4970-29093-0015-2108: hyp=['YOU', 'CAN', 'BEGIN', 'BY', 'CARRYING', 'A', 'ROD', 'AND', 'PUTTING', 'DOWN', 'THE', 'FIGURES'] +4970-29093-0016-2109: ref=['NO', 'ITS', 'NOT', 'TOO', 'SOON'] +4970-29093-0016-2109: hyp=['NO', "IT'S", 'NOT', 'TOO', 'SOON'] +4970-29093-0017-2110: ref=["I'VE", 'BEEN', 'READY', 'TO', 'GO', 'ANYWHERE', 'FOR', 'SIX', 'MONTHS'] +4970-29093-0017-2110: hyp=["I'VE", 'BEEN', 'READY', 'TO', 'GO', 'ANYWHERE', 'FOR', 'SIX', 'MONTHS'] +4970-29093-0018-2111: ref=['THE', 'TWO', 'YOUNG', 'MEN', 'WHO', 'WERE', 'BY', 'THIS', 'TIME', 'FULL', 'OF', 'THE', 'ADVENTURE', 'WENT', 'DOWN', 'TO', 'THE', 'WALL', 'STREET', 'OFFICE', 'OF', "HENRY'S", 'UNCLE', 'AND', 'HAD', 'A', 'TALK', 'WITH', 'THAT', 'WILY', 'OPERATOR'] +4970-29093-0018-2111: hyp=['THE', 'TWO', 'YOUNG', 'MEN', 'WHO', 'WERE', 'BY', 'THIS', 'TIME', 'FULL', 'OF', 'THE', 'ADVENTURER', 'WENT', 'DOWN', 'TO', 'THE', 'WALL', 'STREET', 'OFFICE', 'OF', "HENRY'S", 'UNCLE', 'AND', 'HAD', 'A', 'TALK', 'WITH', 'THAT', 'WILY', 'OPERATOR'] +4970-29093-0019-2112: ref=['THE', 'NIGHT', 'WAS', 'SPENT', 'IN', 'PACKING', 'UP', 'AND', 'WRITING', 'LETTERS', 'FOR', 'PHILIP', 'WOULD', 'NOT', 'TAKE', 'SUCH', 'AN', 'IMPORTANT', 'STEP', 'WITHOUT', 'INFORMING', 'HIS', 'FRIENDS'] +4970-29093-0019-2112: hyp=['THE', 'NIGHT', 'WAS', 'SPENT', 'IN', 'PACKING', 'UP', 'AND', 'WRITING', 'LETTERS', 'FOR', 'PHILIP', 'WOULD', 'NOT', 'TAKE', 'SUCH', 'AN', 'IMPORTANT', 'STEP', 'WITHOUT', 'INFORMING', 'HIS', 'FRIENDS'] +4970-29093-0020-2113: ref=['WHY', "IT'S", 'IN', 'MISSOURI', 'SOMEWHERE', 'ON', 'THE', 'FRONTIER', 'I', 'THINK', "WE'LL", 'GET', 'A', 'MAP'] +4970-29093-0020-2113: hyp=['WHY', "IT'S", 'A', 'MISSOURI', 'SOMEWHERE', 'ON', 'THE', 'FRONTIER', 'I', 'THINK', "WE'LL", 'GET', 'A', 'MAP'] +4970-29093-0021-2114: ref=['I', 'WAS', 'AFRAID', 'IT', 'WAS', 'NEARER', 'HOME'] +4970-29093-0021-2114: hyp=['I', 'WAS', 'AFRAID', 'IT', 'WAS', 'NEARER', 'HOME'] +4970-29093-0022-2115: ref=['HE', 'KNEW', 'HIS', 'UNCLE', 'WOULD', 'BE', 'GLAD', 'TO', 'HEAR', 'THAT', 'HE', 'HAD', 'AT', 'LAST', 'TURNED', 'HIS', 'THOUGHTS', 'TO', 'A', 'PRACTICAL', 'MATTER'] +4970-29093-0022-2115: hyp=['HE', 'KNEW', 'HIS', 'UNCLE', 'WOULD', 'BE', 'GLAD', 'TO', 'HEAR', 'THAT', 'HE', 'HAD', 'AT', 'LAST', 'TURNED', 'HIS', 'THOUGHTS', 'TO', 'A', 'PRACTICAL', 'MATTER'] +4970-29093-0023-2116: ref=['HE', 'WELL', 'KNEW', 'THE', 'PERILS', 'OF', 'THE', 'FRONTIER', 'THE', 'SAVAGE', 'STATE', 'OF', 'SOCIETY', 'THE', 'LURKING', 'INDIANS', 'AND', 'THE', 'DANGERS', 'OF', 'FEVER'] +4970-29093-0023-2116: hyp=['HE', 'WELL', 'KNEW', 'THE', 'PERILS', 'OF', 'THE', 'FRONTIER', 'THE', 'SAVAGE', 'STATE', 'OF', 'SOCIETY', 'THE', 'LURKING', 'INDIANS', 'AND', 'THE', 'DANGERS', 'OF', 'FEVER'] +4970-29095-0000-2054: ref=['SHE', 'WAS', 'TIRED', 'OF', 'OTHER', 'THINGS'] +4970-29095-0000-2054: hyp=['SHE', 'WAS', 'TIRED', 'OF', 'OTHER', 'THINGS'] +4970-29095-0001-2055: ref=['SHE', 'TRIED', 'THIS', 'MORNING', 'AN', 'AIR', 'OR', 'TWO', 'UPON', 'THE', 'PIANO', 'SANG', 'A', 'SIMPLE', 'SONG', 'IN', 'A', 'SWEET', 'BUT', 'SLIGHTLY', 'METALLIC', 'VOICE', 'AND', 'THEN', 'SEATING', 'HERSELF', 'BY', 'THE', 'OPEN', 'WINDOW', 'READ', "PHILIP'S", 'LETTER'] +4970-29095-0001-2055: hyp=['SHE', 'TRIED', 'THIS', 'MORNING', 'AN', 'AIR', 'OR', 'TWO', 'UPON', 'THE', 'PIANO', 'SAYING', 'A', 'SIMPLE', 'SONG', 'IN', 'A', 'SWEET', 'BUT', 'SLIGHTLY', 'METALLIC', 'VOICE', 'AND', 'THEN', 'SEATING', 'HERSELF', 'BY', 'THE', 'OPEN', 'WINDOW', 'READ', "PHILIP'S", 'LETTER'] +4970-29095-0002-2056: ref=['WELL', 'MOTHER', 'SAID', 'THE', 'YOUNG', 'STUDENT', 'LOOKING', 'UP', 'WITH', 'A', 'SHADE', 'OF', 'IMPATIENCE'] +4970-29095-0002-2056: hyp=['WELL', 'MOTHER', 'SAID', 'THE', 'YOUNG', 'STUDENT', 'LOOKING', 'UP', 'WITH', 'A', 'SHADE', 'OF', 'IMPATIENCE'] +4970-29095-0003-2057: ref=['I', 'HOPE', 'THEE', 'TOLD', 'THE', 'ELDERS', 'THAT', 'FATHER', 'AND', 'I', 'ARE', 'RESPONSIBLE', 'FOR', 'THE', 'PIANO', 'AND', 'THAT', 'MUCH', 'AS', 'THEE', 'LOVES', 'MUSIC', 'THEE', 'IS', 'NEVER', 'IN', 'THE', 'ROOM', 'WHEN', 'IT', 'IS', 'PLAYED'] +4970-29095-0003-2057: hyp=['I', 'HOPE', 'THEE', 'TOLD', 'THE', 'ELDERS', 'THAT', 'FATHER', 'AND', 'I', 'ARE', 'RESPONSIBLE', 'FOR', 'THE', 'PIANO', 'AND', 'THAT', 'MUCH', 'AS', 'THEE', 'LOVES', 'MUSIC', 'THEE', 'IS', 'NEVER', 'IN', 'THE', 'ROOM', 'WHEN', 'IT', 'IS', 'PLAYED'] +4970-29095-0004-2058: ref=['I', 'HEARD', 'FATHER', 'TELL', 'COUSIN', 'ABNER', 'THAT', 'HE', 'WAS', 'WHIPPED', 'SO', 'OFTEN', 'FOR', 'WHISTLING', 'WHEN', 'HE', 'WAS', 'A', 'BOY', 'THAT', 'HE', 'WAS', 'DETERMINED', 'TO', 'HAVE', 'WHAT', 'COMPENSATION', 'HE', 'COULD', 'GET', 'NOW'] +4970-29095-0004-2058: hyp=['I', 'HEARD', 'FATHER', 'TELL', 'COUSIN', 'ABNER', 'THAT', 'HE', 'WAS', 'WHIPPED', 'SO', 'OFTEN', 'FOR', 'WHISTLING', 'WHEN', 'HE', 'WAS', 'A', 'BOY', 'THAT', 'HE', 'WAS', 'DETERMINED', 'TO', 'HAVE', 'WHAT', 'COMPENSATION', 'HE', 'COULD', 'GET', 'NOW'] +4970-29095-0005-2059: ref=['THY', 'WAYS', 'GREATLY', 'TRY', 'ME', 'RUTH', 'AND', 'ALL', 'THY', 'RELATIONS'] +4970-29095-0005-2059: hyp=['THY', 'WAYS', 'GREATLY', 'TRY', 'ME', 'RUTH', 'AND', 'ALL', 'THY', 'RELATIONS'] +4970-29095-0006-2060: ref=['IS', 'THY', 'FATHER', 'WILLING', 'THEE', 'SHOULD', 'GO', 'AWAY', 'TO', 'A', 'SCHOOL', 'OF', 'THE', "WORLD'S", 'PEOPLE'] +4970-29095-0006-2060: hyp=['IS', 'THY', 'FATHER', 'WILLING', 'THEE', 'SHOULD', 'GO', 'AWAY', 'TO', 'A', 'SCHOOL', 'OF', 'THE', "WORLD'S", 'PEOPLE'] +4970-29095-0007-2061: ref=['I', 'HAVE', 'NOT', 'ASKED', 'HIM', 'RUTH', 'REPLIED', 'WITH', 'A', 'LOOK', 'THAT', 'MIGHT', 'IMPLY', 'THAT', 'SHE', 'WAS', 'ONE', 'OF', 'THOSE', 'DETERMINED', 'LITTLE', 'BODIES', 'WHO', 'FIRST', 'MADE', 'UP', 'HER', 'OWN', 'MIND', 'AND', 'THEN', 'COMPELLED', 'OTHERS', 'TO', 'MAKE', 'UP', 'THEIRS', 'IN', 'ACCORDANCE', 'WITH', 'HERS'] +4970-29095-0007-2061: hyp=['I', 'HAVE', 'NOT', 'ASKED', 'HIM', 'RUTH', 'REPLIED', 'WITH', 'A', 'LOOK', 'THAT', 'MIGHT', 'IMPLY', 'THAT', 'SHE', 'WAS', 'ONE', 'OF', 'THOSE', 'DETERMINED', 'LITTLE', 'BODIES', 'WHO', 'FIRST', 'MADE', 'UP', 'HER', 'OWN', 'MIND', 'AND', 'THEN', 'COMPELLED', 'OTHERS', 'TO', 'MAKE', 'UP', 'THEIRS', 'IN', 'ACCORDANCE', 'WITH', 'HERS'] +4970-29095-0008-2062: ref=['MOTHER', "I'M", 'GOING', 'TO', 'STUDY', 'MEDICINE'] +4970-29095-0008-2062: hyp=['MOTHER', 'I', 'AM', 'GOING', 'TO', 'STUDY', 'MEDICINE'] +4970-29095-0009-2063: ref=['MARGARET', 'BOLTON', 'ALMOST', 'LOST', 'FOR', 'A', 'MOMENT', 'HER', 'HABITUAL', 'PLACIDITY'] +4970-29095-0009-2063: hyp=['MARGARET', 'BOLTON', 'ALMOST', 'LOST', 'FOR', 'A', 'MOMENT', 'HER', 'HABITUAL', 'PLACIDITY'] +4970-29095-0010-2064: ref=['THEE', 'STUDY', 'MEDICINE'] +4970-29095-0010-2064: hyp=['THE', 'STUDY', 'MEDICINE'] +4970-29095-0011-2065: ref=['DOES', 'THEE', 'THINK', 'THEE', 'COULD', 'STAND', 'IT', 'SIX', 'MONTHS'] +4970-29095-0011-2065: hyp=['DOES', 'THEE', 'THINK', 'THEE', 'COULD', 'STAND', 'IT', 'SIX', 'MONTHS'] +4970-29095-0012-2066: ref=['AND', 'BESIDES', 'SUPPOSE', 'THEE', 'DOES', 'LEARN', 'MEDICINE'] +4970-29095-0012-2066: hyp=['AND', 'BESIDES', 'SUPPOSE', 'THEE', 'DOES', 'LEARN', 'MEDICINE'] +4970-29095-0013-2067: ref=['I', 'WILL', 'PRACTICE', 'IT'] +4970-29095-0013-2067: hyp=['I', 'WILL', 'PRACTISE', 'IT'] +4970-29095-0014-2068: ref=['WHERE', 'THEE', 'AND', 'THY', 'FAMILY', 'ARE', 'KNOWN'] +4970-29095-0014-2068: hyp=["WHERE'S", 'THEE', 'AND', 'THY', 'FAMILY', 'ARE', 'KNOWN'] +4970-29095-0015-2069: ref=['IF', 'I', 'CAN', 'GET', 'PATIENTS'] +4970-29095-0015-2069: hyp=['IF', 'I', 'CAN', 'GET', 'PATIENCE'] +4970-29095-0016-2070: ref=['RUTH', 'SAT', 'QUITE', 'STILL', 'FOR', 'A', 'TIME', 'WITH', 'FACE', 'INTENT', 'AND', 'FLUSHED', 'IT', 'WAS', 'OUT', 'NOW'] +4970-29095-0016-2070: hyp=['RUTH', 'SAT', 'QUITE', 'STILL', 'FOR', 'A', 'TIME', 'WITH', 'FACE', 'AND', 'TENT', 'AND', 'FLUSHED', 'IT', 'WAS', 'OUT', 'NOW'] +4970-29095-0017-2071: ref=['THE', 'SIGHT', 'SEERS', 'RETURNED', 'IN', 'HIGH', 'SPIRITS', 'FROM', 'THE', 'CITY'] +4970-29095-0017-2071: hyp=['THE', 'SIGHTSEERS', 'RETURNED', 'AND', 'HIGH', 'SPIRITS', 'FROM', 'THE', 'CITY'] +4970-29095-0018-2072: ref=['RUTH', 'ASKED', 'THE', 'ENTHUSIASTS', 'IF', 'THEY', 'WOULD', 'LIKE', 'TO', 'LIVE', 'IN', 'SUCH', 'A', 'SOUNDING', 'MAUSOLEUM', 'WITH', 'ITS', 'GREAT', 'HALLS', 'AND', 'ECHOING', 'ROOMS', 'AND', 'NO', 'COMFORTABLE', 'PLACE', 'IN', 'IT', 'FOR', 'THE', 'ACCOMMODATION', 'OF', 'ANY', 'BODY'] +4970-29095-0018-2072: hyp=['RUTH', 'ASKED', 'THE', 'ENTHUSIASTS', 'IF', 'THEY', 'WOULD', 'LIKE', 'TO', 'LIVE', 'IN', 'SUCH', 'A', 'SOUNDING', 'MUSOLEUM', 'WITH', 'ITS', 'GREAT', 'HALLS', 'AND', 'ECHOING', 'ROOMS', 'AND', 'NO', 'COMFORTABLE', 'PLACE', 'IN', 'IT', 'FOR', 'THE', 'ACCOMMODATION', 'OF', 'ANY', 'BODY'] +4970-29095-0019-2073: ref=['AND', 'THEN', 'THERE', 'WAS', 'BROAD', 'STREET'] +4970-29095-0019-2073: hyp=['AND', 'THEN', 'THERE', 'WAS', 'BROAD', 'STREET'] +4970-29095-0020-2074: ref=['THERE', 'CERTAINLY', 'WAS', 'NO', 'END', 'TO', 'IT', 'AND', 'EVEN', 'RUTH', 'WAS', 'PHILADELPHIAN', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'A', 'STREET', 'OUGHT', 'NOT', 'TO', 'HAVE', 'ANY', 'END', 'OR', 'ARCHITECTURAL', 'POINT', 'UPON', 'WHICH', 'THE', 'WEARY', 'EYE', 'COULD', 'REST'] +4970-29095-0020-2074: hyp=['THERE', 'IS', 'CERTAINLY', 'WAS', 'NO', 'END', 'TO', 'IT', 'AND', 'EVEN', 'RUTH', 'WAS', 'PHILADELPHIA', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'A', 'STREET', 'OUGHT', 'NOT', 'TO', 'HAVE', 'ANY', 'END', 'OR', 'ARCHITECTURAL', 'BLINT', 'UPON', 'WHICH', 'THE', 'WEARY', 'EYE', 'COULD', 'REST'] +4970-29095-0021-2075: ref=['BUT', 'NEITHER', 'SAINT', 'GIRARD', 'NOR', 'BROAD', 'STREET', 'NEITHER', 'WONDERS', 'OF', 'THE', 'MINT', 'NOR', 'THE', 'GLORIES', 'OF', 'THE', 'HALL', 'WHERE', 'THE', 'GHOSTS', 'OF', 'OUR', 'FATHERS', 'SIT', 'ALWAYS', 'SIGNING', 'THE', 'DECLARATION', 'IMPRESSED', 'THE', 'VISITORS', 'SO', 'MUCH', 'AS', 'THE', 'SPLENDORS', 'OF', 'THE', 'CHESTNUT', 'STREET', 'WINDOWS', 'AND', 'THE', 'BARGAINS', 'ON', 'EIGHTH', 'STREET'] +4970-29095-0021-2075: hyp=['BUT', 'NEITHER', 'SAINT', 'GERARD', 'NOR', 'BROAD', 'STREET', 'NEITHER', 'WONDERS', 'OF', 'THE', 'MENT', 'NOR', 'THE', 'GLORIES', 'OF', 'THE', 'HALL', 'WHERE', 'THE', 'GHOSTS', 'OF', 'OUR', 'FATHERS', 'SIT', 'ALWAYS', 'SIGNING', 'THE', 'DECLARATION', 'IMPRESS', 'THE', 'VISITOR', 'SO', 'MUCH', 'AS', 'THE', 'SPLENDORS', 'OF', 'THE', 'CHESTNUT', 'STREET', 'WINDOWS', 'AND', 'THE', 'BARGAINS', 'ON', 'EIGHTH', 'STREET'] +4970-29095-0022-2076: ref=['IS', 'THEE', 'GOING', 'TO', 'THE', 'YEARLY', 'MEETING', 'RUTH', 'ASKED', 'ONE', 'OF', 'THE', 'GIRLS'] +4970-29095-0022-2076: hyp=['IS', 'THEE', 'GOING', 'TO', 'THE', 'YEARLY', 'MEETING', 'RUTH', 'ASKED', 'ONE', 'OF', 'THE', 'GIRLS'] +4970-29095-0023-2077: ref=['I', 'HAVE', 'NOTHING', 'TO', 'WEAR', 'REPLIED', 'THAT', 'DEMURE', 'PERSON'] +4970-29095-0023-2077: hyp=['I', 'HAVE', 'NOTHING', 'TO', 'WEAR', 'REPLIED', 'THE', 'DEMURE', 'PERSON'] +4970-29095-0024-2078: ref=['IT', 'HAS', 'OCCUPIED', 'MOTHER', 'A', 'LONG', 'TIME', 'TO', 'FIND', 'AT', 'THE', 'SHOPS', 'THE', 'EXACT', 'SHADE', 'FOR', 'HER', 'NEW', 'BONNET'] +4970-29095-0024-2078: hyp=['IT', 'HAS', 'OCCUPIED', 'MOTHER', 'A', 'LONG', 'TIME', 'TO', 'FIND', 'THE', 'SHOPS', 'THE', 'EXACT', 'SHADE', 'FOR', 'HER', 'NEW', 'BONNET'] +4970-29095-0025-2079: ref=['AND', 'THEE', "WON'T", 'GO', 'WHY', 'SHOULD', 'I'] +4970-29095-0025-2079: hyp=['AND', 'THEE', "WON'T", 'GO', 'WHY', 'SHOULD', 'I'] +4970-29095-0026-2080: ref=['IF', 'I', 'GO', 'TO', 'MEETING', 'AT', 'ALL', 'I', 'LIKE', 'BEST', 'TO', 'SIT', 'IN', 'THE', 'QUIET', 'OLD', 'HOUSE', 'IN', 'GERMANTOWN', 'WHERE', 'THE', 'WINDOWS', 'ARE', 'ALL', 'OPEN', 'AND', 'I', 'CAN', 'SEE', 'THE', 'TREES', 'AND', 'HEAR', 'THE', 'STIR', 'OF', 'THE', 'LEAVES'] +4970-29095-0026-2080: hyp=['IF', 'I', 'GO', 'TO', 'MEETING', 'AT', 'ALL', 'I', 'LIKE', 'BEST', 'TO', 'SIT', 'IN', 'THE', 'QUIET', 'OLD', 'HOUSE', 'IN', 'GERMANTOWN', 'WHERE', 'THE', 'WINDOWS', 'ARE', 'ALL', 'OPEN', 'AND', 'I', 'CAN', 'SEE', 'THE', 'TREES', 'AND', 'HERE', 'THE', 'STIR', 'OF', 'THE', 'LEAVES'] +4970-29095-0027-2081: ref=["IT'S", 'SUCH', 'A', 'CRUSH', 'AT', 'THE', 'YEARLY', 'MEETING', 'AT', 'ARCH', 'STREET', 'AND', 'THEN', "THERE'S", 'THE', 'ROW', 'OF', 'SLEEK', 'LOOKING', 'YOUNG', 'MEN', 'WHO', 'LINE', 'THE', 'CURBSTONE', 'AND', 'STARE', 'AT', 'US', 'AS', 'WE', 'COME', 'OUT'] +4970-29095-0027-2081: hyp=["IT'S", 'SUCH', 'A', 'CRUSH', 'AT', 'THE', 'YEARLY', 'MEETING', 'AT', 'ARCH', 'STREET', 'AND', 'THEN', "THERE'S", 'THE', 'ROW', 'OF', 'SLEEK', 'LOOKING', 'YOUNG', 'MEN', 'WHO', 'LIE', 'IN', 'THE', 'CURBSTONE', 'AND', 'STARE', 'AT', 'US', 'AS', 'WE', 'COME', 'OUT'] +4970-29095-0028-2082: ref=['HE', "DOESN'T", 'SAY', 'BUT', "IT'S", 'ON', 'THE', 'FRONTIER', 'AND', 'ON', 'THE', 'MAP', 'EVERYTHING', 'BEYOND', 'IT', 'IS', 'MARKED', 'INDIANS', 'AND', 'DESERT', 'AND', 'LOOKS', 'AS', 'DESOLATE', 'AS', 'A', 'WEDNESDAY', 'MEETING', 'HUMPH', 'IT', 'WAS', 'TIME', 'FOR', 'HIM', 'TO', 'DO', 'SOMETHING'] +4970-29095-0028-2082: hyp=['HE', "DOESN'T", 'SAY', 'BUT', "IT'S", 'ON', 'THE', 'FRONTIER', 'AND', 'ON', 'THE', 'MAP', 'EVERYTHING', 'BEYOND', 'IT', 'IS', 'MARKED', 'INDIANS', 'AND', 'DESERT', 'AND', 'LOOKS', 'AS', 'DESOLATE', 'AS', 'A', 'WIND', 'ZAY', 'MEETING', 'IT', 'WAS', 'TIME', 'FOR', 'HIM', 'TO', 'DO', 'SOMETHING'] +4970-29095-0029-2083: ref=['IS', 'HE', 'GOING', 'TO', 'START', 'A', 'DAILY', 'NEWSPAPER', 'AMONG', 'THE', 'KICK', 'A', 'POOS'] +4970-29095-0029-2083: hyp=['IS', 'HE', 'GOING', 'TO', 'START', 'A', 'DAILY', 'NEWSPAPER', 'AMONG', 'THE', 'KICKAPOOS'] +4970-29095-0030-2084: ref=['FATHER', "THEE'S", 'UNJUST', 'TO', 'PHILIP', "HE'S", 'GOING', 'INTO', 'BUSINESS'] +4970-29095-0030-2084: hyp=['FATHER', 'THESE', 'UNJUSTI', 'PHILIP', "HE'S", 'GOING', 'INTO', 'BUSINESS'] +4970-29095-0031-2085: ref=['HE', "DOESN'T", 'SAY', 'EXACTLY', 'WHAT', 'IT', 'IS', 'SAID', 'RUTH', 'A', 'LITTLE', 'DUBIOUSLY', 'BUT', "IT'S", 'SOMETHING', 'ABOUT', 'LAND', 'AND', 'RAILROADS', 'AND', 'THEE', 'KNOWS', 'FATHER', 'THAT', 'FORTUNES', 'ARE', 'MADE', 'NOBODY', 'KNOWS', 'EXACTLY', 'HOW', 'IN', 'A', 'NEW', 'COUNTRY'] +4970-29095-0031-2085: hyp=['HE', "DOESN'T", 'SAY', 'EXACTLY', 'WHAT', 'IT', 'IS', 'SAID', 'RUTH', 'A', 'LITTLE', 'DUBIOUSLY', 'BUT', "IT'S", 'SOMETHING', 'ABOUT', 'LAND', 'AND', 'RAILROADS', 'AND', 'HE', 'KNOWS', 'FATHER', 'THAT', 'FORTUNES', 'ARE', 'MADE', 'NOBODY', 'KNOWS', 'EXACTLY', 'HOW', 'IN', 'A', 'NEW', 'COUNTRY'] +4970-29095-0032-2086: ref=['BUT', 'PHILIP', 'IS', 'HONEST', 'AND', 'HE', 'HAS', 'TALENT', 'ENOUGH', 'IF', 'HE', 'WILL', 'STOP', 'SCRIBBLING', 'TO', 'MAKE', 'HIS', 'WAY'] +4970-29095-0032-2086: hyp=['THAT', 'PHILIP', 'IS', 'HONEST', 'AND', 'HE', 'HAS', 'TALENT', 'ENOUGH', 'IF', 'HE', 'WILL', 'STOP', 'SCRIBBLING', 'TO', 'MAKE', 'HIS', 'WAY'] +4970-29095-0033-2087: ref=['WHAT', 'A', 'BOX', 'WOMEN', 'ARE', 'PUT', 'INTO', 'MEASURED', 'FOR', 'IT', 'AND', 'PUT', 'IN', 'YOUNG', 'IF', 'WE', 'GO', 'ANYWHERE', "IT'S", 'IN', 'A', 'BOX', 'VEILED', 'AND', 'PINIONED', 'AND', 'SHUT', 'IN', 'BY', 'DISABILITIES'] +4970-29095-0033-2087: hyp=['WHAT', 'A', 'BOXWOMEN', 'ARE', 'PUT', 'INTO', 'MEASURED', 'FOR', 'IT', 'AND', 'PUTTING', 'YOUNG', 'IF', 'WE', 'GO', 'ANYWHERE', "IT'S", 'IN', 'A', 'BOX', 'VEILED', 'AND', 'PINIONED', 'AND', 'SHUT', 'IN', 'BY', 'DISABILITIES'] +4970-29095-0034-2088: ref=['WHY', 'SHOULD', 'I', 'RUST', 'AND', 'BE', 'STUPID', 'AND', 'SIT', 'IN', 'INACTION', 'BECAUSE', 'I', 'AM', 'A', 'GIRL'] +4970-29095-0034-2088: hyp=['WHY', 'SHOULD', 'I', 'REST', 'AND', 'BE', 'STUPID', 'AND', 'SIT', 'IN', 'AN', 'ACTION', 'BECAUSE', 'I', 'AM', 'A', 'GIRL'] +4970-29095-0035-2089: ref=['AND', 'IF', 'I', 'HAD', 'A', 'FORTUNE', 'WOULD', 'THEE', 'WANT', 'ME', 'TO', 'LEAD', 'A', 'USELESS', 'LIFE'] +4970-29095-0035-2089: hyp=['AND', 'IF', 'I', 'HAD', 'A', 'FORTUNE', 'WOULD', 'THEE', 'WANT', 'ME', 'TO', 'LEAD', 'A', 'USELESS', 'LIFE'] +4970-29095-0036-2090: ref=['HAS', 'THEE', 'CONSULTED', 'THY', 'MOTHER', 'ABOUT', 'A', 'CAREER', 'I', 'SUPPOSE', 'IT', 'IS', 'A', 'CAREER', 'THEE', 'WANTS'] +4970-29095-0036-2090: hyp=['HAS', 'THE', 'CONSULTED', 'THY', 'MOTHER', 'ABOUT', 'A', 'CAREER', 'I', 'SUPPOSE', 'IT', 'IS', 'A', 'CAREER', 'OF', 'THEE', 'WANTS'] +4970-29095-0037-2091: ref=['BUT', 'THAT', 'WISE', 'AND', 'PLACID', 'WOMAN', 'UNDERSTOOD', 'THE', 'SWEET', 'REBEL', 'A', 'GREAT', 'DEAL', 'BETTER', 'THAN', 'RUTH', 'UNDERSTOOD', 'HERSELF'] +4970-29095-0037-2091: hyp=['BUT', 'THAT', 'WISE', 'AND', 'PLACID', 'WOMAN', 'UNDERSTOOD', 'THE', 'SWEET', 'REBEL', 'A', 'GREAT', 'DEAL', 'BETTER', 'THAN', 'RUTH', 'UNDERSTOOD', 'HERSELF'] +4970-29095-0038-2092: ref=['RUTH', 'WAS', 'GLAD', 'TO', 'HEAR', 'THAT', 'PHILIP', 'HAD', 'MADE', 'A', 'PUSH', 'INTO', 'THE', 'WORLD', 'AND', 'SHE', 'WAS', 'SURE', 'THAT', 'HIS', 'TALENT', 'AND', 'COURAGE', 'WOULD', 'MAKE', 'A', 'WAY', 'FOR', 'HIM'] +4970-29095-0038-2092: hyp=['RUTH', 'WAS', 'GLAD', 'TO', 'HEAR', 'THAT', 'PHILIP', 'HAD', 'MADE', 'A', 'PUSH', 'INTO', 'THE', 'WORLD', 'AND', 'SHE', 'WAS', 'SURE', 'THAT', 'HIS', 'TALENT', 'AND', 'COURAGE', 'WOULD', 'MAKE', 'AWAY', 'FOR', 'HIM'] +4992-23283-0000-2140: ref=['BUT', 'THE', 'MORE', 'FORGETFULNESS', 'HAD', 'THEN', 'PREVAILED', 'THE', 'MORE', 'POWERFUL', 'WAS', 'THE', 'FORCE', 'OF', 'REMEMBRANCE', 'WHEN', 'SHE', 'AWOKE'] +4992-23283-0000-2140: hyp=['BUT', 'THE', 'MORE', 'FORGETFULNESS', 'HAD', 'THEN', 'PREVAILED', 'THE', 'MORE', 'POWERFUL', 'WAS', 'THE', 'FORCE', 'OF', 'REMEMBRANCE', 'WHEN', 'SHE', 'AWOKE'] +4992-23283-0001-2141: ref=['MISS', "MILNER'S", 'HEALTH', 'IS', 'NOT', 'GOOD'] +4992-23283-0001-2141: hyp=['MISS', "MILNER'S", 'HEALTH', 'IS', 'NOT', 'GOOD'] +4992-23283-0002-2142: ref=['SAID', 'MISSUS', 'HORTON', 'A', 'FEW', 'MINUTES', 'AFTER'] +4992-23283-0002-2142: hyp=['SAID', 'MISSUS', 'WHARTON', 'A', 'FEW', 'MINUTES', 'AFTER'] +4992-23283-0003-2143: ref=['SO', 'THERE', 'IS', 'TO', 'ME', 'ADDED', 'SANDFORD', 'WITH', 'A', 'SARCASTIC', 'SNEER'] +4992-23283-0003-2143: hyp=['SO', 'THERE', 'IS', 'TO', 'ME', 'ADDED', 'SANDFORD', 'WITH', 'A', 'SARCASTIC', 'SNEER'] +4992-23283-0004-2144: ref=['AND', 'YET', 'YOU', 'MUST', 'OWN', 'HER', 'BEHAVIOUR', 'HAS', 'WARRANTED', 'THEM', 'HAS', 'IT', 'NOT', 'BEEN', 'IN', 'THIS', 'PARTICULAR', 'INCOHERENT', 'AND', 'UNACCOUNTABLE'] +4992-23283-0004-2144: hyp=['AND', 'YET', 'YOU', 'MUST', 'OWN', 'HER', 'BEHAVIOR', 'HAS', 'WARRANTED', 'THEM', 'HAS', 'IT', 'NOT', 'BEEN', 'IN', 'THIS', 'PARTICULAR', 'INCOHERENT', 'AND', 'UNACCOUNTABLE'] +4992-23283-0005-2145: ref=['NOT', 'THAT', 'I', 'KNOW', 'OF', 'NOT', 'ONE', 'MORE', 'THAT', 'I', 'KNOW', 'OF', 'HE', 'REPLIED', 'WITH', 'ASTONISHMENT', 'AT', 'WHAT', 'SHE', 'HAD', 'INSINUATED', 'AND', 'YET', 'WITH', 'A', 'PERFECT', 'ASSURANCE', 'THAT', 'SHE', 'WAS', 'IN', 'THE', 'WRONG'] +4992-23283-0005-2145: hyp=['NOT', 'THAT', 'I', 'KNOW', 'OF', 'NOT', 'ONE', 'MORE', 'THAT', 'I', 'KNOW', 'OF', 'HE', 'REPLIED', 'WITH', 'ASTONISHMENT', 'AT', 'WHAT', 'SHE', 'HAD', 'INSINUATED', 'AND', 'YET', 'WITH', 'A', 'PERFECT', 'ASSURANCE', 'THAT', 'SHE', 'WAS', 'IN', 'THE', 'WRONG'] +4992-23283-0006-2146: ref=['PERHAPS', 'I', 'AM', 'MISTAKEN', 'ANSWERED', 'SHE'] +4992-23283-0006-2146: hyp=['PERHAPS', 'I', 'AM', 'MISTAKEN', 'ANSWERED', 'SHE'] +4992-23283-0007-2147: ref=['TO', 'ASK', 'ANY', 'MORE', 'QUESTIONS', 'OF', 'YOU', 'I', 'BELIEVE', 'WOULD', 'BE', 'UNFAIR'] +4992-23283-0007-2147: hyp=['TO', 'ASK', 'ANY', 'MORE', 'QUESTIONS', 'OF', 'YOU', 'I', 'BELIEVE', 'WOULD', 'BE', 'UNFAIR'] +4992-23283-0008-2148: ref=['HE', 'SEEMED', 'TO', 'WAIT', 'FOR', 'HER', 'REPLY', 'BUT', 'AS', 'SHE', 'MADE', 'NONE', 'HE', 'PROCEEDED'] +4992-23283-0008-2148: hyp=['HE', 'SEEMED', 'TO', 'WAIT', 'FOR', 'HER', 'REPLY', 'BUT', 'AS', 'SHE', 'MADE', 'NONE', 'HE', 'PROCEEDED'] +4992-23283-0009-2149: ref=['OH', 'MY', 'LORD', 'CRIED', 'MISS', 'WOODLEY', 'WITH', 'A', 'MOST', 'FORCIBLE', 'ACCENT', 'YOU', 'ARE', 'THE', 'LAST', 'PERSON', 'ON', 'EARTH', 'SHE', 'WOULD', 'PARDON', 'ME', 'FOR', 'ENTRUSTING'] +4992-23283-0009-2149: hyp=['O', 'MY', 'LORD', 'CRIED', 'MISS', 'WOODLEY', 'WITH', 'A', 'MOST', 'FORCIBLE', 'ACCENT', 'YOU', 'ARE', 'THE', 'LAST', 'PERSONAL', 'ON', 'EARTH', 'SHE', 'WOULD', 'PARDON', 'ME', 'FOR', 'INTRUSTING'] +4992-23283-0010-2150: ref=['BUT', 'IN', 'SUCH', 'A', 'CASE', 'MISS', "MILNER'S", 'ELECTION', 'OF', 'A', 'HUSBAND', 'SHALL', 'NOT', 'DIRECT', 'MINE'] +4992-23283-0010-2150: hyp=['BUT', 'IN', 'SUCH', 'A', 'CASE', 'MISS', "MILNER'S", 'ELECTION', 'OF', 'A', 'HUSBAND', 'SHALL', 'NOT', 'DIRECT', 'MINE'] +4992-23283-0011-2151: ref=['IF', 'SHE', 'DOES', 'NOT', 'KNOW', 'HOW', 'TO', 'ESTIMATE', 'HER', 'OWN', 'VALUE', 'I', 'DO'] +4992-23283-0011-2151: hyp=['IF', 'SHE', 'DOES', 'NOT', 'KNOW', 'HOW', 'TO', 'ESTIMATE', 'HER', 'OWN', 'VALUE', 'I', 'DO'] +4992-23283-0012-2152: ref=['INDEPENDENT', 'OF', 'HER', 'FORTUNE', 'SHE', 'HAS', 'BEAUTY', 'TO', 'CAPTIVATE', 'THE', 'HEART', 'OF', 'ANY', 'MAN', 'AND', 'WITH', 'ALL', 'HER', 'FOLLIES', 'SHE', 'HAS', 'A', 'FRANKNESS', 'IN', 'HER', 'MANNER', 'AN', 'UNAFFECTED', 'WISDOM', 'IN', 'HER', 'THOUGHTS', 'A', 'VIVACITY', 'IN', 'HER', 'CONVERSATION', 'AND', 'WITHAL', 'A', 'SOFTNESS', 'IN', 'HER', 'DEMEANOUR', 'THAT', 'MIGHT', 'ALONE', 'ENGAGE', 'THE', 'AFFECTIONS', 'OF', 'A', 'MAN', 'OF', 'THE', 'NICEST', 'SENTIMENTS', 'AND', 'THE', 'STRONGEST', 'UNDERSTANDING'] +4992-23283-0012-2152: hyp=['INDEPENDENT', 'OF', 'HER', 'FORTUNE', 'SHE', 'HAS', 'BEAUTY', 'TO', 'CAPTIVATE', 'THE', 'HEART', 'OF', 'ANY', 'MAN', 'AND', 'WITH', 'ALL', 'HER', 'FOLLIES', 'SHE', 'HAS', 'A', 'FRANKNESS', 'IN', 'HER', 'MANNER', 'AN', 'UNAFFECTED', 'WISDOM', 'IN', 'HER', 'THOUGHTS', 'OF', 'VIVACITY', 'IN', 'HER', 'CONVERSATION', 'AND', 'WITHAL', 'A', 'SOFTNESS', 'IN', 'HER', 'DEMEANOUR', 'THAT', 'MIGHT', 'ALONE', 'ENGAGE', 'THE', 'AFFECTIONS', 'OF', 'A', 'MAN', 'OF', 'THE', 'NICEST', 'SENTIMENTS', 'AND', 'THE', 'STRONGEST', 'UNDERSTANDING'] +4992-23283-0013-2153: ref=['MY', 'LORD', 'MISS', "MILNER'S", 'TASTE', 'IS', 'NOT', 'A', 'DEPRAVED', 'ONE', 'IT', 'IS', 'BUT', 'TOO', 'REFINED'] +4992-23283-0013-2153: hyp=['MY', 'LORD', 'MISS', "MILNER'S", 'TASTE', 'IS', 'NOT', 'A', 'DEPRAVED', 'ONE', 'IT', 'IS', 'BUT', 'TOO', 'REFINED'] +4992-23283-0014-2154: ref=['WHAT', 'CAN', 'YOU', 'MEAN', 'BY', 'THAT', 'MISS', 'WOODLEY', 'YOU', 'TALK', 'MYSTERIOUSLY'] +4992-23283-0014-2154: hyp=['WHAT', 'CAN', 'YOU', 'MEAN', 'BY', 'THAT', 'MISS', 'WOODLEY', 'YOU', 'TALK', 'MYSTERIOUSLY'] +4992-23283-0015-2155: ref=['IS', 'SHE', 'NOT', 'AFRAID', 'THAT', 'I', 'WILL', 'THWART', 'HER', 'INCLINATIONS'] +4992-23283-0015-2155: hyp=['IS', 'SHE', 'NOT', 'AFRAID', 'THAT', 'I', 'WILL', 'THWART', 'HER', 'INCLINATIONS'] +4992-23283-0016-2156: ref=['AGAIN', 'HE', 'SEARCHED', 'HIS', 'OWN', 'THOUGHTS', 'NOR', 'INEFFECTUALLY', 'AS', 'BEFORE'] +4992-23283-0016-2156: hyp=['AGAIN', 'HE', 'SEARCHED', 'HIS', 'OWN', 'THOUGHTS', 'NOR', 'INEFFECTUALLY', 'AS', 'BEFORE'] +4992-23283-0017-2157: ref=['MISS', 'WOODLEY', 'WAS', 'TOO', 'LITTLE', 'VERSED', 'IN', 'THE', 'SUBJECT', 'TO', 'KNOW', 'THIS', 'WOULD', 'HAVE', 'BEEN', 'NOT', 'TO', 'LOVE', 'AT', 'ALL', 'AT', 'LEAST', 'NOT', 'TO', 'THE', 'EXTENT', 'OF', 'BREAKING', 'THROUGH', 'ENGAGEMENTS', 'AND', 'ALL', 'THE', 'VARIOUS', 'OBSTACLES', 'THAT', 'STILL', 'MILITATED', 'AGAINST', 'THEIR', 'UNION'] +4992-23283-0017-2157: hyp=['MISS', 'WOODLEY', 'WAS', 'TOO', 'LITTLE', 'VERSED', 'IN', 'THE', 'SUBJECT', 'TO', 'KNOW', 'THIS', 'WOULD', 'HAVE', 'BEEN', 'NOT', 'TO', 'LOVE', 'AT', 'ALL', 'AT', 'LEAST', 'NOT', 'TO', 'THE', 'EXTENT', 'OF', 'BREAKING', 'THROUGH', 'ENGAGEMENTS', 'AND', 'ALL', 'THE', 'VARIOUS', 'OBSTACLES', 'THAT', 'STILL', 'MITIGATED', 'AGAINST', 'THEIR', 'UNION'] +4992-23283-0018-2158: ref=['TO', 'RELIEVE', 'HER', 'FROM', 'BOTH', 'HE', 'LAID', 'HIS', 'HAND', 'WITH', 'FORCE', 'UPON', 'HIS', 'HEART', 'AND', 'SAID', 'DO', 'YOU', 'BELIEVE', 'ME'] +4992-23283-0018-2158: hyp=['TO', 'RELIEVE', 'HER', 'FROM', 'BOTH', 'HE', 'LAID', 'HIS', 'HAND', 'WITH', 'FORCE', 'UPON', 'HIS', 'HEART', 'AND', 'SAID', 'DO', 'YOU', 'BELIEVE', 'ME'] +4992-23283-0019-2159: ref=['I', 'WILL', 'MAKE', 'NO', 'UNJUST', 'USE', 'OF', 'WHAT', 'I', 'KNOW', 'HE', 'REPLIED', 'WITH', 'FIRMNESS', 'I', 'BELIEVE', 'YOU', 'MY', 'LORD'] +4992-23283-0019-2159: hyp=['I', 'WILL', 'MAKE', 'NO', 'UNJUST', 'USE', 'OF', 'WHAT', 'I', 'KNOW', 'HE', 'REPLIED', 'WITH', 'FIRMNESS', 'I', 'BELIEVE', 'YOU', 'MY', 'LORD'] +4992-23283-0020-2160: ref=['I', 'HAVE', 'NEVER', 'YET', 'HOWEVER', 'BEEN', 'VANQUISHED', 'BY', 'THEM', 'AND', 'EVEN', 'UPON', 'THIS', 'OCCASION', 'MY', 'REASON', 'SHALL', 'COMBAT', 'THEM', 'TO', 'THE', 'LAST', 'AND', 'MY', 'REASON', 'SHALL', 'FAIL', 'ME', 'BEFORE', 'I', 'DO', 'WRONG'] +4992-23283-0020-2160: hyp=['I', 'HAVE', 'NEVER', 'YET', 'HOWEVER', 'BEEN', 'VANQUISHED', 'BY', 'THEM', 'AND', 'EVEN', 'UPON', 'THIS', 'OCCASION', 'MY', 'REASON', 'SHALL', 'COMBAT', 'THEM', 'TO', 'THE', 'LAST', 'AND', 'MY', 'REASON', 'SHALL', 'FAIL', 'ME', 'BEFORE', 'I', 'DO', 'WRONG'] +4992-41797-0000-2117: ref=['YES', 'DEAD', 'THESE', 'FOUR', 'YEARS', 'AN', 'A', 'GOOD', 'JOB', 'FOR', 'HER', 'TOO'] +4992-41797-0000-2117: hyp=['YES', 'DEAD', 'THESE', 'FOUR', 'YEARS', 'AND', 'A', 'GOOD', 'JOB', 'FOR', 'HER', 'TOO'] +4992-41797-0001-2118: ref=['WELL', 'AS', 'I', 'SAY', "IT'S", 'AN', 'AWFUL', 'QUEER', 'WORLD', 'THEY', 'CLAP', 'ALL', 'THE', 'BURGLARS', 'INTO', 'JAIL', 'AND', 'THE', 'MURDERERS', 'AND', 'THE', 'WIFE', 'BEATERS', "I'VE", 'ALLERS', 'THOUGHT', 'A', 'GENTLE', 'REPROOF', 'WOULD', 'BE', 'ENOUGH', 'PUNISHMENT', 'FOR', 'A', 'WIFE', 'BEATER', 'CAUSE', 'HE', 'PROBABLY', 'HAS', 'A', 'LOT', 'O', 'PROVOCATION', 'THAT', 'NOBODY', 'KNOWS', 'AND', 'THE', 'FIREBUGS', "CAN'T", 'THINK', 'O', 'THE', 'RIGHT', 'NAME', 'SOMETHING', 'LIKE', 'CENDENARIES', 'AN', 'THE', 'BREAKERS', 'O', 'THE', 'PEACE', 'AN', 'WHAT', 'NOT', 'AN', 'YET', 'THE', 'LAW', 'HAS', 'NOTHIN', 'TO', 'SAY', 'TO', 'A', 'MAN', 'LIKE', 'HEN', 'LORD'] +4992-41797-0001-2118: hyp=['WELL', 'AS', 'I', 'SAY', "IT'S", 'AN', 'AWFUL', 'QUEER', 'WORLD', 'THEY', 'CLAP', 'ALL', 'THE', 'BURGLARS', 'AND', 'JAIL', 'THE', 'MURDERERS', 'IN', 'THE', 'WHITE', 'BEATERS', 'I', 'ALLERS', 'THOUGHT', 'A', 'GENTLE', 'REPROOF', 'WOULD', 'BE', 'ENOUGH', 'PUNISHMENT', 'FOR', 'A', 'WIFE', 'PETER', 'CAUSE', 'HE', 'PROBABLY', 'HAS', 'A', 'LOT', 'OF', 'PROVOCATION', 'THAT', 'NOBODY', 'KNOWS', 'AND', 'THE', 'FIRE', 'BUGS', "CAN'T", 'THINK', 'OF', 'THE', 'RIGHT', 'NAME', 'SOMETHING', 'LIKE', 'SENDIARIES', 'AND', 'THE', 'BREAKERS', 'OF', 'THE', 'PIECE', 'AND', 'WHAT', 'NOT', 'AND', 'YET', 'THE', 'LAW', 'HAS', 'NOTHING', 'TO', 'SAY', 'TO', 'A', 'MAN', 'LIKE', 'HANDLED'] +4992-41797-0002-2119: ref=['GRANDFATHER', 'WAS', 'ALEXANDER', 'CAREY', 'L', 'L', 'D', 'DOCTOR', 'OF', 'LAWS', 'THAT', 'IS'] +4992-41797-0002-2119: hyp=['GRANDFATHER', 'WAS', 'ALEXANDER', 'CAREY', 'L', 'D', 'DOCTOR', 'OF', 'LAWS', 'THAT', 'IS'] +4992-41797-0003-2120: ref=['MISTER', 'POPHAM', 'LAID', 'DOWN', 'HIS', 'BRUSH'] +4992-41797-0003-2120: hyp=['MISTER', 'POPHAM', 'LAID', 'DOWN', 'HIS', 'BRUSH'] +4992-41797-0004-2121: ref=['I', 'SWAN', 'TO', 'MAN', 'HE', 'EJACULATED', 'IF', 'YOU', "DON'T", 'WORK', 'HARD', 'YOU', "CAN'T", 'KEEP', 'UP', 'WITH', 'THE', 'TIMES', 'DOCTOR', 'OF', 'LAWS'] +4992-41797-0004-2121: hyp=['I', 'SWAY', 'INTO', 'MEN', 'HE', 'EJACULATED', 'IF', 'YOU', "DON'T", 'WORK', 'HARD', 'YOU', "CAN'T", 'KEEP', 'UP', 'WITH', 'THE', 'TUBS', 'DOCTOR', 'OF', 'LAWS'] +4992-41797-0005-2122: ref=['DONE', 'HE', "AIN'T", 'DONE', 'A', 'THING', "HE'D", 'OUGHTER', 'SENCE', 'HE', 'WAS', 'BORN'] +4992-41797-0005-2122: hyp=['DONE', 'HE', "AIN'T", 'DONE', 'A', 'THING', 'HE', 'ORDERS', 'SINCE', 'HE', 'WAS', 'BORN'] +4992-41797-0006-2123: ref=['HE', 'KEEPS', 'THE', 'THOU', 'SHALT', 'NOT', 'COMMANDMENTS', 'FIRST', 'RATE', 'HEN', 'LORD', 'DOES'] +4992-41797-0006-2123: hyp=['HE', 'KEEPS', 'THE', 'THOU', 'SHALT', 'NOT', 'COMMANDS', 'FIRST', 'RATE', 'HEN', 'LORD', 'DOES'] +4992-41797-0007-2124: ref=['HE', 'GIVE', 'UP', 'HIS', 'POSITION', 'AND', 'SHUT', 'THE', 'FAMILY', 'UP', 'IN', 'THAT', 'TOMB', 'OF', 'A', 'HOUSE', 'SO', 'T', 'HE', 'COULD', 'STUDY', 'HIS', 'BOOKS'] +4992-41797-0007-2124: hyp=['HE', 'GAVE', 'UP', 'HIS', 'POSITION', 'AND', 'SHUT', 'THE', 'FAMILY', 'UP', 'IN', 'THAT', 'TOMB', 'OF', 'A', 'HOUSE', 'SEWED', 'HE', "COULDN'T", 'STUDY', 'HIS', 'BOOKS'] +4992-41797-0008-2125: ref=['MISTER', 'POPHAM', 'EXAGGERATED', 'NOTHING', 'BUT', 'ON', 'THE', 'CONTRARY', 'LEFT', 'MUCH', 'UNSAID', 'IN', 'HIS', 'NARRATIVE', 'OF', 'THE', 'FAMILY', 'AT', 'THE', 'HOUSE', 'OF', 'LORDS'] +4992-41797-0008-2125: hyp=['MISTER', 'POPHAM', 'EXAGGERATED', 'NOTHING', 'BUT', 'ON', 'THE', 'CONTRARY', 'LEFT', 'MUCH', 'UNSAID', 'IN', 'HIS', 'NARRATIVE', 'OF', 'THE', 'FAMILY', 'AT', 'THE', 'HOUSE', 'OF', 'LORDS'] +4992-41797-0009-2126: ref=['HENRY', 'LORD', 'WITH', 'THE', 'DEGREE', 'OF', 'PH', 'D', 'TO', 'HIS', 'CREDIT', 'HAD', 'BEEN', 'PROFESSOR', 'OF', 'ZOOLOGY', 'AT', 'A', 'NEW', 'ENGLAND', 'COLLEGE', 'BUT', 'HAD', 'RESIGNED', 'HIS', 'POST', 'IN', 'ORDER', 'TO', 'WRITE', 'A', 'SERIES', 'OF', 'SCIENTIFIC', 'TEXT', 'BOOKS'] +4992-41797-0009-2126: hyp=['HENRY', 'LORD', 'WITH', 'THE', 'DEGREE', 'OF', 'P', 'D', 'D', 'TO', 'HIS', 'CREDIT', 'HAD', 'BEEN', 'PROFESSOR', 'OF', 'ZOOLOGY', 'AT', 'A', 'NEW', 'ENGLAND', 'COLLEGE', 'BUT', 'HAD', 'RESIGNED', 'HIS', 'POST', 'IN', 'ORDER', 'TO', 'WRITE', 'A', 'SERIES', 'OF', 'SCIENTIFIC', 'TEXT', 'BOOKS'] +4992-41797-0010-2127: ref=['ALWAYS', 'IRRITABLE', 'COLD', 'INDIFFERENT', 'HE', 'HAD', 'GROWN', 'RAPIDLY', 'MORE', 'SO', 'AS', 'YEARS', 'WENT', 'ON'] +4992-41797-0010-2127: hyp=['ALWAYS', 'IRRITABLE', 'COLD', 'INDIFFERENT', 'HE', 'HAD', 'GROWN', 'RAPIDLY', 'MORE', 'SO', 'AS', 'YEARS', 'WENT', 'ON'] +4992-41797-0011-2128: ref=['WHATEVER', 'APPEALED', 'TO', 'HER', 'SENSE', 'OF', 'BEAUTY', 'WAS', 'STRAIGHTWAY', 'TRANSFERRED', 'TO', 'PAPER', 'OR', 'CANVAS'] +4992-41797-0011-2128: hyp=['WHATEVER', 'APPEAL', 'TO', 'HER', 'SENSE', 'OF', 'BEAUTY', 'WAS', 'STRAIGHTWAY', 'TRANSFERRED', 'TO', 'PAPER', 'OR', 'CANVAS'] +4992-41797-0012-2129: ref=['SHE', 'IS', 'WILD', 'TO', 'KNOW', 'HOW', 'TO', 'DO', 'THINGS'] +4992-41797-0012-2129: hyp=['SHE', 'IS', 'WILD', 'TO', 'KNOW', 'HOW', 'TO', 'DO', 'THING'] +4992-41797-0013-2130: ref=['SHE', 'MAKES', 'EFFORT', 'AFTER', 'EFFORT', 'TREMBLING', 'WITH', 'EAGERNESS', 'AND', 'WHEN', 'SHE', 'FAILS', 'TO', 'REPRODUCE', 'WHAT', 'SHE', 'SEES', 'SHE', 'WORKS', 'HERSELF', 'INTO', 'A', 'FRENZY', 'OF', 'GRIEF', 'AND', 'DISAPPOINTMENT'] +4992-41797-0013-2130: hyp=['SHE', 'MAKES', 'EFFORT', 'AFTER', 'EFFORT', 'TREMBLING', 'WITH', 'EAGERNESS', 'AND', 'WHEN', 'SHE', 'FAILS', 'TO', 'REPRODUCE', 'WHAT', 'SHE', 'SEES', 'SHE', 'WORKS', 'HERSELF', 'INTO', 'A', 'FRENZY', 'OF', 'GRIEF', 'AND', 'DISAPPOINTMENT'] +4992-41797-0014-2131: ref=['WHEN', 'SHE', 'COULD', 'NOT', 'MAKE', 'A', 'RABBIT', 'OR', 'A', 'BIRD', 'LOOK', 'REAL', 'ON', 'PAPER', 'SHE', 'SEARCHED', 'IN', 'HER', "FATHER'S", 'BOOKS', 'FOR', 'PICTURES', 'OF', 'ITS', 'BONES'] +4992-41797-0014-2131: hyp=['WHEN', 'SHE', 'COULD', 'NOT', 'MAKE', 'A', 'RABBIT', 'OR', 'A', 'BIRD', 'LOOK', 'REAL', 'ON', 'PAPER', 'SHE', 'SEARCHED', 'IN', 'HER', "FATHER'S", 'BOOKS', 'FOR', 'PICTURES', 'OF', 'ITS', 'BONES'] +4992-41797-0015-2132: ref=['CYRIL', 'THERE', 'MUST', 'BE', 'SOME', 'BETTER', 'WAY', 'OF', 'DOING', 'I', 'JUST', 'DRAW', 'THE', 'OUTLINE', 'OF', 'AN', 'ANIMAL', 'AND', 'THEN', 'I', 'PUT', 'HAIRS', 'OR', 'FEATHERS', 'ON', 'IT', 'THEY', 'HAVE', 'NO', 'BODIES'] +4992-41797-0015-2132: hyp=['CYRIL', 'THERE', 'MUST', 'BE', 'SOME', 'BETTER', 'WAY', 'OF', 'DOING', 'I', 'JUST', 'DRAW', 'THE', 'OUTLINE', 'OF', 'AN', 'ANIMAL', 'AND', 'THEN', 'I', 'PUT', 'HAIRS', 'OR', 'FEATHERS', 'ON', 'IT', 'THEY', 'HAVE', 'NO', 'BODIES'] +4992-41797-0016-2133: ref=['THEY', "COULDN'T", 'RUN', 'NOR', 'MOVE', "THEY'RE", 'JUST', 'PASTEBOARD'] +4992-41797-0016-2133: hyp=['THEY', "COULDN'T", 'RUN', 'OR', 'MOVE', "THEY'RE", 'JUST', 'PASTEBOARD'] +4992-41797-0017-2134: ref=['HE', "WOULDN'T", 'SEARCH', 'SO', "DON'T", 'WORRY', 'REPLIED', 'CYRIL', 'QUIETLY', 'AND', 'THE', 'TWO', 'LOOKED', 'AT', 'EACH', 'OTHER', 'AND', 'KNEW', 'THAT', 'IT', 'WAS', 'SO'] +4992-41797-0017-2134: hyp=['HE', "WOULDN'T", 'SEARCH', 'SO', "DON'T", 'WORRY', 'REPLIED', 'CYRIL', 'QUIETLY', 'AND', 'THE', 'TWO', 'LOOKED', 'AT', 'EACH', 'OTHER', 'AND', 'KNEW', 'THAT', 'IT', 'WAS', 'SO'] +4992-41797-0018-2135: ref=['THERE', 'IN', 'THE', 'CEDAR', 'HOLLOW', 'THEN', 'LIVED', 'OLIVE', 'LORD', 'AN', 'ANGRY', 'RESENTFUL', 'LITTLE', 'CREATURE', 'WEIGHED', 'DOWN', 'BY', 'A', 'FIERCE', 'SENSE', 'OF', 'INJURY'] +4992-41797-0018-2135: hyp=['THERE', 'IN', 'THE', 'CEDAR', 'HOLLOW', 'THEN', 'LIVED', 'OLIVE', 'LORD', 'AN', 'ANGRY', 'RESENTFUL', 'LITTLE', 'CREATURE', 'WEIGHED', 'DOWN', 'BY', 'A', 'FIERCE', 'SENSE', 'OF', 'INJURY'] +4992-41797-0019-2136: ref=["OLIVE'S", 'MOURNFUL', 'BLACK', 'EYES', 'MET', "NANCY'S", 'SPARKLING', 'BROWN', 'ONES'] +4992-41797-0019-2136: hyp=['ALL', 'OF', 'HIS', 'MOURNFUL', 'BLACK', 'EYES', 'MET', "NANCY'S", 'SPARKLING', 'BROWN', 'ONES'] +4992-41797-0020-2137: ref=["NANCY'S", 'CURLY', 'CHESTNUT', 'CROP', 'SHONE', 'IN', 'THE', 'SUN', 'AND', "OLIVE'S", 'THICK', 'BLACK', 'PLAITS', 'LOOKED', 'BLACKER', 'BY', 'CONTRAST'] +4992-41797-0020-2137: hyp=["NANCY'S", 'CURLY', 'CHESTNUT', 'CROP', 'SHONE', 'IN', 'THE', 'SUN', 'AND', "OLIVE'S", 'THICK', 'BLACK', 'PLATES', 'LOOKED', 'BLACKER', 'BY', 'CONTRAST'] +4992-41797-0021-2138: ref=["SHE'S", 'WONDERFUL', 'MORE', 'WONDERFUL', 'THAN', 'ANYBODY', "WE'VE", 'EVER', 'SEEN', 'ANYWHERE', 'AND', 'SHE', 'DRAWS', 'BETTER', 'THAN', 'THE', 'TEACHER', 'IN', 'CHARLESTOWN'] +4992-41797-0021-2138: hyp=['SHE', 'IS', 'WONDERFUL', 'MORE', 'WONDERFUL', 'IN', 'ANYBODY', "WE'VE", 'EVER', 'SEEN', 'ANYWHERE', 'AND', 'SHE', 'DRAWS', 'BETTER', 'THAN', 'THE', 'TEACHER', 'IN', 'CHARLESTOWN'] +4992-41797-0022-2139: ref=["SHE'S", 'OLDER', 'THAN', 'I', 'AM', 'BUT', 'SO', 'TINY', 'AND', 'SAD', 'AND', 'SHY', 'THAT', 'SHE', 'SEEMS', 'LIKE', 'A', 'CHILD'] +4992-41797-0022-2139: hyp=["SHE'S", 'OLDER', 'THAN', 'I', 'AM', 'BUT', 'SO', 'TINY', 'AND', 'SAD', 'AND', 'SHY', 'THAT', 'SHE', 'SEEMS', 'LIKE', 'A', 'CHILD'] +4992-41806-0000-2161: ref=['NATTY', 'HARMON', 'TRIED', 'THE', 'KITCHEN', 'PUMP', 'SECRETLY', 'SEVERAL', 'TIMES', 'DURING', 'THE', 'EVENING', 'FOR', 'THE', 'WATER', 'HAD', 'TO', 'RUN', 'UP', 'HILL', 'ALL', 'THE', 'WAY', 'FROM', 'THE', 'WELL', 'TO', 'THE', 'KITCHEN', 'SINK', 'AND', 'HE', 'BELIEVED', 'THIS', 'TO', 'BE', 'A', 'CONTINUAL', 'MIRACLE', 'THAT', 'MIGHT', 'GIVE', 'OUT', 'AT', 'ANY', 'MOMENT'] +4992-41806-0000-2161: hyp=['NATTY', 'HARMON', 'TRIED', 'THE', 'KITCHEN', 'PUMP', 'SECRETLY', 'SEVERAL', 'TIMES', 'DURING', 'THE', 'EVENING', 'FOR', 'THE', 'WATER', 'HAD', 'TO', 'RUN', 'UP', 'HILL', 'ALL', 'THE', 'WAY', 'FROM', 'THE', 'WELL', 'TO', 'THE', 'KITCHEN', 'SINK', 'AND', 'HE', 'BELIEVED', 'THIS', 'TO', 'BE', 'A', 'CONTINUAL', 'MIRACLE', 'THAT', 'MIGHT', 'GIVE', 'OUT', 'AT', 'ANY', 'MOMENT'] +4992-41806-0001-2162: ref=['TO', 'NIGHT', 'THERE', 'WAS', 'NO', 'NEED', 'OF', 'EXTRA', 'HEAT', 'AND', 'THERE', 'WERE', 'GREAT', 'CEREMONIES', 'TO', 'BE', 'OBSERVED', 'IN', 'LIGHTING', 'THE', 'FIRES', 'ON', 'THE', 'HEARTHSTONES'] +4992-41806-0001-2162: hyp=['TO', 'NIGHT', 'THERE', 'WAS', 'NO', 'NEED', 'OF', 'EXTRA', 'HEAT', 'AND', 'THERE', 'WERE', 'GREAT', 'CEREMONIES', 'TO', 'BE', 'OBSERVED', 'IN', 'LIGHTING', 'THE', 'FIRES', 'ON', 'THE', 'HEARTHSTONES'] +4992-41806-0002-2163: ref=['THEY', 'BEGAN', 'WITH', 'THE', 'ONE', 'IN', 'THE', 'FAMILY', 'SITTING', 'ROOM', 'COLONEL', 'WHEELER', 'RALPH', 'THURSTON', 'MISTER', 'AND', 'MISSUS', 'BILL', 'HARMON', 'WITH', 'NATTY', 'AND', 'RUFUS', 'MISTER', 'AND', 'MISSUS', 'POPHAM', 'WITH', 'DIGBY', 'AND', 'LALLIE', 'JOY', 'ALL', 'STANDING', 'IN', 'ADMIRING', 'GROUPS', 'AND', 'THRILLING', 'WITH', 'DELIGHT', 'AT', 'THE', 'ORDER', 'OF', 'EVENTS'] +4992-41806-0002-2163: hyp=['THEY', 'BEGAN', 'WITH', 'THE', 'ONE', 'IN', 'THE', 'FAMILY', 'SITTING', 'ROOM', 'COLONEL', 'WHEELER', 'RALPH', 'THURSTON', 'MISTER', 'AND', 'MISSUS', 'BILL', 'HARMON', 'WITH', 'NANNIE', 'AND', 'RUFFUS', 'MISTER', 'AND', 'MISSUS', 'POPHAM', 'WITH', 'DIGBY', 'AND', 'LILY', 'JOY', 'ALL', 'STANDING', 'IN', 'ADMIRING', 'GROUPS', 'AND', 'THRILLING', 'WITH', 'DELIGHT', 'AT', 'THE', 'ORDER', 'OF', 'EVENTS'] +4992-41806-0003-2164: ref=['KATHLEEN', 'WAVED', 'THE', 'TORCH', 'TO', 'AND', 'FRO', 'AS', 'SHE', 'RECITED', 'SOME', 'BEAUTIFUL', 'LINES', 'WRITTEN', 'FOR', 'SOME', 'SUCH', 'PURPOSE', 'AS', 'THAT', 'WHICH', 'CALLED', 'THEM', 'TOGETHER', 'TO', 'NIGHT'] +4992-41806-0003-2164: hyp=['KATHLEEN', 'WAVED', 'THE', 'TORCH', 'TO', 'AND', 'FRO', 'AS', 'SHE', 'RECITED', 'SOME', 'BEAUTIFUL', 'LINES', 'WRITTEN', 'FOR', 'SOME', 'SUCH', 'PURPOSE', 'AS', 'THAT', 'WHICH', 'CALLED', 'THEM', 'TOGETHER', 'TO', 'NIGHT'] +4992-41806-0004-2165: ref=['BURN', 'FIRE', 'BURN', 'FLICKER', 'FLICKER', 'FLAME'] +4992-41806-0004-2165: hyp=['BURNE', 'FIRE', 'BURN', 'FLICKER', 'FLICKER', 'FLAME'] +4992-41806-0005-2166: ref=['NEXT', 'CAME', "OLIVE'S", 'TURN', 'TO', 'HELP', 'IN', 'THE', 'CEREMONIES'] +4992-41806-0005-2166: hyp=['NEXT', 'CAME', "OLIVE'S", 'TURN', 'TO', 'HELP', 'IN', 'THE', 'CEREMONIES'] +4992-41806-0006-2167: ref=['RALPH', 'THURSTON', 'HAD', 'FOUND', 'A', 'LINE', 'OF', 'LATIN', 'FOR', 'THEM', 'IN', 'HIS', 'BELOVED', 'HORACE', 'TIBI', 'SPLENDET', 'FOCUS', 'FOR', 'YOU', 'THE', 'HEARTH', 'FIRE', 'SHINES'] +4992-41806-0006-2167: hyp=['RALPH', 'THURSTON', 'HAD', 'FOUND', 'A', 'LINE', 'OF', 'LATIN', 'FOR', 'THEM', 'IN', 'HIS', 'BELOVED', 'HORNS', 'TIBBY', 'SPLENDID', 'FOCUS', 'FOR', 'YOU', 'THE', 'HEARTH', 'FIRE', 'SHINES'] +4992-41806-0007-2168: ref=['OLIVE', 'HAD', 'PAINTED', 'THE', 'MOTTO', 'ON', 'A', 'LONG', 'NARROW', 'PANEL', 'OF', 'CANVAS', 'AND', 'GIVING', 'IT', 'TO', 'MISTER', 'POPHAM', 'STOOD', 'BY', 'THE', 'FIRESIDE', 'WHILE', 'HE', 'DEFTLY', 'FITTED', 'IT', 'INTO', 'THE', 'PLACE', 'PREPARED', 'FOR', 'IT'] +4992-41806-0007-2168: hyp=['OLIVE', 'HAD', 'PAINTED', 'THE', 'MOTTO', 'ON', 'A', 'LONG', 'NARROW', 'PANEL', 'OF', 'CANVAS', 'AND', 'GIVING', 'IT', 'TO', 'MISTER', 'POPHAM', 'STOOD', 'BY', 'THE', 'FIRESIDE', 'WHILE', 'HE', 'DEFTLY', 'FITTED', 'IT', 'INTO', 'THE', 'PLACE', 'PREPARED', 'FOR', 'IT'] +4992-41806-0008-2169: ref=['OLIVE', 'HAS', 'ANOTHER', 'LOVELY', 'GIFT', 'FOR', 'THE', 'YELLOW', 'HOUSE', 'SAID', 'MOTHER', 'CAREY', 'RISING', 'AND', 'TO', 'CARRY', 'OUT', 'THE', 'NEXT', 'PART', 'OF', 'THE', 'PROGRAMME', 'WE', 'SHALL', 'HAVE', 'TO', 'GO', 'IN', 'PROCESSION', 'UPSTAIRS', 'TO', 'MY', 'BEDROOM'] +4992-41806-0008-2169: hyp=['AH', 'HAS', 'ANOTHER', 'LOVELY', 'GIFT', 'FOR', 'THE', 'YELLOW', 'HOUSE', 'SAID', 'MOTHER', 'CAREY', 'RISING', 'AND', 'TO', 'CARRY', 'OUT', 'THE', 'NEXT', 'PART', 'OF', 'THE', 'PROGRAMME', 'WE', 'SHALL', 'HAVE', 'TO', 'GO', 'IN', 'PROCESSION', 'UPSTAIRS', 'TO', 'MY', 'BEDROOM'] +4992-41806-0009-2170: ref=['EXCLAIMED', 'BILL', 'HARMON', 'TO', 'HIS', 'WIFE', 'AS', 'THEY', 'WENT', 'THROUGH', 'THE', 'LIGHTED', 'HALL'] +4992-41806-0009-2170: hyp=['EXCLAIMED', 'BILL', 'HARMON', 'TO', 'HIS', 'WIFE', 'AS', 'THEY', 'WENT', 'THROUGH', 'THE', 'LIGHTED', 'HALL'] +4992-41806-0010-2171: ref=["AIN'T", 'THEY', 'THE', 'GREATEST'] +4992-41806-0010-2171: hyp=["AIN'T", 'THEY', 'THE', 'GREATEST'] +4992-41806-0011-2172: ref=['MOTHER', 'CAREY', 'POURED', 'COFFEE', 'NANCY', 'CHOCOLATE', 'AND', 'THE', 'OTHERS', 'HELPED', 'SERVE', 'THE', 'SANDWICHES', 'AND', 'CAKE', 'DOUGHNUTS', 'AND', 'TARTS'] +4992-41806-0011-2172: hyp=['MOTHER', 'CAREY', 'POURED', 'COFFEE', 'NANCY', 'CHOCOLATE', 'AND', 'THE', 'OTHER', 'SELF', 'SERVED', 'THE', 'SANDWICHES', 'AND', 'CAKE', 'DOUGHNUTS', 'AND', 'TARTS'] +4992-41806-0012-2173: ref=['AT', 'THAT', 'MOMENT', 'THE', 'GENTLEMAN', 'ENTERED', 'BEARING', 'A', 'HUGE', 'OBJECT', 'CONCEALED', 'BY', 'A', 'PIECE', 'OF', 'GREEN', 'FELT'] +4992-41806-0012-2173: hyp=['AT', 'THAT', 'MOMENT', 'THE', 'GENTLEMAN', 'ENTERED', 'BEARING', 'A', 'HUGE', 'OBJECT', 'CONCEALED', 'BY', 'A', 'PIECE', 'OF', 'GREEN', 'FELT'] +4992-41806-0013-2174: ref=['APPROACHING', 'THE', 'DINING', 'TABLE', 'HE', 'CAREFULLY', 'PLACED', 'THE', 'ARTICLE', 'IN', 'THE', 'CENTRE', 'AND', 'REMOVED', 'THE', 'CLOTH'] +4992-41806-0013-2174: hyp=['APPROACHING', 'THE', 'DINING', 'TABLE', 'HE', 'CAREFULLY', 'PLACED', 'THE', 'ARTICLE', 'IN', 'THE', 'CENTRE', 'AND', 'REMOVED', 'THE', 'CLOTH'] +4992-41806-0014-2175: ref=['THINKS', 'I', 'TO', 'MYSELF', 'I', 'NEVER', 'SEEN', 'ANYTHING', 'OSH', 'POPHAM', "COULDN'T", 'MEND', 'IF', 'HE', 'TOOK', 'TIME', 'ENOUGH', 'AND', 'GLUE', 'ENOUGH', 'SO', 'I', 'CARRIED', 'THIS', 'LITTLE', 'FELLER', 'HOME', 'IN', 'A', 'BUSHEL', 'BASKET', 'ONE', 'NIGHT', 'LAST', 'MONTH', 'AN', "I'VE", 'SPENT', 'ELEVEN', "EVENIN'S", 'PUTTIN', 'HIM', 'TOGETHER'] +4992-41806-0014-2175: hyp=['THINKS', 'OUT', 'OF', 'MYSELF', 'I', 'NEVER', 'SEEN', 'ANYTHING', 'I', "COULDN'T", 'MEN', 'IF', 'HE', 'TOOK', 'TIME', 'ENOUGH', 'AND', 'GLUE', 'ENOUGH', 'SO', 'I', 'CARRIED', 'THIS', 'LITTLE', 'FELLER', 'HOME', 'IN', 'A', 'BUSHEL', 'BASKET', 'ONE', 'NIGHT', 'LAST', 'MONTH', 'AND', "I'VE", 'SPENT', 'ELEVEN', 'EVENINGS', 'PUTTING', 'HIM', 'TOGETHER'] +4992-41806-0015-2176: ref=['MISSUS', 'HARMON', 'THOUGHT', 'HE', 'SANG', 'TOO', 'MUCH', 'AND', 'TOLD', 'HER', 'HUSBAND', 'PRIVATELY', 'THAT', 'IF', 'HE', 'WAS', 'A', 'CANARY', 'BIRD', 'SHE', 'SHOULD', 'WANT', 'TO', 'KEEP', 'A', 'TABLE', 'COVER', 'OVER', 'HIS', 'HEAD', 'MOST', 'OF', 'THE', 'TIME', 'BUT', 'HE', 'WAS', 'IMMENSELY', 'POPULAR', 'WITH', 'THE', 'REST', 'OF', 'HIS', 'AUDIENCE'] +4992-41806-0015-2176: hyp=['MISSUS', 'HARMON', 'THOUGHT', 'HE', 'SANG', 'TOO', 'MUCH', 'AND', 'TOLD', 'HER', 'HUSBABLY', 'THAT', 'IF', 'HE', 'WAS', 'A', 'CANARY', 'BIRD', 'SHE', 'SHOULD', 'WANT', 'TO', 'KEEP', 'A', 'TABLE', 'COVER', 'OF', 'HIS', 'HEAD', 'MOST', 'OF', 'THE', 'TIME', 'BUT', 'HE', 'WAS', 'IMMENSELY', 'POPULAR', 'WITH', 'THE', 'REST', 'OF', 'HIS', 'AUDIENCE'] +4992-41806-0016-2177: ref=['THE', 'FACE', 'OF', 'THE', 'MAHOGANY', 'SHONE', 'WITH', 'DELIGHT', 'AND', 'WHY', 'NOT', 'WHEN', 'IT', 'WAS', 'DOING', 'EVERYTHING', 'ALMOST', 'EVERYTHING', 'WITHIN', 'THE', 'SCOPE', 'OF', 'A', 'PIANO', 'AND', 'YET', 'THE', 'FAMILY', 'HAD', 'ENJOYED', 'WEEKS', 'OF', 'GOOD', 'NOURISHING', 'MEALS', 'ON', 'WHAT', 'HAD', 'BEEN', 'SAVED', 'BY', 'ITS', 'EXERTIONS'] +4992-41806-0016-2177: hyp=['THE', 'FACE', 'OF', 'THE', 'MAHOGANY', 'SHONE', 'WITH', 'DELIGHT', 'AND', 'WHY', 'NOT', 'WHEN', 'IT', 'WAS', 'DOING', 'EVERYTHING', 'ALMOST', 'EVERYTHING', 'WITHIN', 'THE', 'SCOPE', 'OF', 'A', 'PIANO', 'AND', 'YET', 'THE', 'FAMILY', 'HAD', 'ENJOYED', 'WEEKS', 'OF', 'GOOD', 'NOURISHING', 'MEALS', 'ON', 'WHAT', 'HAD', 'BEEN', 'SAVED', 'BY', 'ITS', 'EXERTIONS'] +4992-41806-0017-2178: ref=['WE', 'SHUT', 'OUR', 'EYES', 'THE', 'FLOWERS', 'BLOOM', 'ON', 'WE', 'MURMUR', 'BUT', 'THE', 'CORN', 'EARS', 'FILL', 'WE', 'CHOOSE', 'THE', 'SHADOW', 'BUT', 'THE', 'SUN', 'THAT', 'CASTS', 'IT', 'SHINES', 'BEHIND', 'US', 'STILL'] +4992-41806-0017-2178: hyp=['WE', 'SHUT', 'OUR', 'EYES', 'THE', 'FLOWERS', 'BLOOM', 'ON', 'WE', 'MURMUR', 'BUT', 'THE', 'CORNIERS', 'FILL', 'WE', 'CHOOSE', 'THE', 'SHADOW', 'BUT', 'THE', 'SUN', 'THAT', 'CASTS', 'IT', 'SHINES', 'BEHIND', 'US', 'STILL'] +5105-28233-0000-1649: ref=['LENGTH', 'OF', 'SERVICE', 'FOURTEEN', 'YEARS', 'THREE', 'MONTHS', 'AND', 'FIVE', 'DAYS'] +5105-28233-0000-1649: hyp=['LENGTH', 'OF', 'SERVICE', 'FOURTEEN', 'YEARS', 'THREE', 'MONTHS', 'AND', 'FIVE', 'DAYS'] +5105-28233-0001-1650: ref=['HE', 'SEEMED', 'BORN', 'TO', 'PLEASE', 'WITHOUT', 'BEING', 'CONSCIOUS', 'OF', 'THE', 'POWER', 'HE', 'POSSESSED'] +5105-28233-0001-1650: hyp=['HE', 'SEEMED', 'BORN', 'TO', 'PLEASE', 'WITHOUT', 'BEING', 'CONSCIOUS', 'OF', 'THE', 'POWER', 'HE', 'POSSESSED'] +5105-28233-0002-1651: ref=['IT', 'MUST', 'BE', 'OWNED', 'AND', 'NO', 'ONE', 'WAS', 'MORE', 'READY', 'TO', 'CONFESS', 'IT', 'THAN', 'HIMSELF', 'THAT', 'HIS', 'LITERARY', 'ATTAINMENTS', 'WERE', 'BY', 'NO', 'MEANS', 'OF', 'A', 'HIGH', 'ORDER'] +5105-28233-0002-1651: hyp=['IT', 'MUST', 'BE', 'OWNED', 'AND', 'NO', 'ONE', 'WAS', 'MORE', 'READY', 'TO', 'CONFESS', 'IT', 'THAN', 'HIMSELF', 'THAT', 'HIS', 'LITERARY', 'ATTAINMENTS', 'WERE', 'BY', 'NO', 'MEANS', 'OF', 'A', 'HIGH', 'ORDER'] +5105-28233-0003-1652: ref=['WE', "DON'T", 'SPIN', 'TOPS', 'IS', 'A', 'FAVORITE', 'SAYING', 'AMONGST', 'ARTILLERY', 'OFFICERS', 'INDICATING', 'THAT', 'THEY', 'DO', 'NOT', 'SHIRK', 'THEIR', 'DUTY', 'BY', 'FRIVOLOUS', 'PURSUITS', 'BUT', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'SERVADAC', 'BEING', 'NATURALLY', 'IDLE', 'WAS', 'VERY', 'MUCH', 'GIVEN', 'TO', 'SPINNING', 'TOPS'] +5105-28233-0003-1652: hyp=['WE', "DON'T", 'SPEND', 'TOPS', 'AS', 'A', 'FAVORITE', 'SAYING', 'AMONGST', 'ARTILLERY', 'OFFICERS', 'INDICATING', 'THAT', 'THEY', 'DO', 'NOT', 'SHIRK', 'THEIR', 'DUTY', 'BY', 'FRIVOLOUS', 'PURSUITS', 'BUT', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'SERVADAC', 'BEING', 'NATURALLY', 'IDLE', 'WAS', 'VERY', 'MUCH', 'GIVEN', 'TO', 'SPINNING', 'TOPS'] +5105-28233-0004-1653: ref=['ONCE', 'IN', 'ACTION', 'HE', 'WAS', 'LEADING', 'A', 'DETACHMENT', 'OF', 'INFANTRY', 'THROUGH', 'AN', 'INTRENCHMENT'] +5105-28233-0004-1653: hyp=['ONCE', 'AN', 'ACTION', 'HE', 'WAS', 'LEADING', 'A', 'DETACHMENT', 'OF', 'INFANTRY', 'THROUGH', 'AN', 'ENTRENCHMENT'] +5105-28233-0005-1654: ref=['SOMETIMES', 'HE', 'WOULD', 'WANDER', 'ON', 'FOOT', 'UPON', 'THE', 'SANDY', 'SHORE', 'AND', 'SOMETIMES', 'HE', 'WOULD', 'ENJOY', 'A', 'RIDE', 'ALONG', 'THE', 'SUMMIT', 'OF', 'THE', 'CLIFF', 'ALTOGETHER', 'BEING', 'IN', 'NO', 'HURRY', 'AT', 'ALL', 'TO', 'BRING', 'HIS', 'TASK', 'TO', 'AN', 'END'] +5105-28233-0005-1654: hyp=['SOMETIMES', 'HE', 'WOULD', 'WANDER', 'ON', 'FOOT', 'UPON', 'THE', 'SANDY', 'SHORE', 'AND', 'SOMETIMES', 'HE', 'WOULD', 'ENJOY', 'A', 'RIDE', 'ALONG', 'THE', 'SUMMIT', 'OF', 'THE', 'CLIFF', 'ALTOGETHER', 'BEING', 'IN', 'NO', 'HURRY', 'AT', 'ALL', 'TO', 'BRING', 'HIS', 'TASK', 'TO', 'AN', 'END'] +5105-28233-0006-1655: ref=['NO', 'CATHEDRAL', 'NOT', 'EVEN', 'BURGOS', 'ITSELF', 'COULD', 'VIE', 'WITH', 'THE', 'CHURCH', 'AT', 'MONTMARTRE'] +5105-28233-0006-1655: hyp=['NO', 'CATHEDRAL', 'NOT', 'EVEN', 'BURGOS', 'ITSELF', 'COULD', 'VIE', 'WITH', 'THE', 'CHURCH', 'AT', 'MOUNT', 'MARSHRA'] +5105-28233-0007-1656: ref=['BEN', "ZOOF'S", 'MOST', 'AMBITIOUS', 'DESIRE', 'WAS', 'TO', 'INDUCE', 'THE', 'CAPTAIN', 'TO', 'GO', 'WITH', 'HIM', 'AND', 'END', 'HIS', 'DAYS', 'IN', 'HIS', 'MUCH', 'LOVED', 'HOME', 'AND', 'SO', 'INCESSANTLY', 'WERE', "SERVADAC'S", 'EARS', 'BESIEGED', 'WITH', 'DESCRIPTIONS', 'OF', 'THE', 'UNPARALLELED', 'BEAUTIES', 'AND', 'ADVANTAGES', 'OF', 'THIS', 'EIGHTEENTH', 'ARRONDISSEMENT', 'OF', 'PARIS', 'THAT', 'HE', 'COULD', 'SCARCELY', 'HEAR', 'THE', 'NAME', 'OF', 'MONTMARTRE', 'WITHOUT', 'A', 'CONSCIOUS', 'THRILL', 'OF', 'AVERSION'] +5105-28233-0007-1656: hyp=['BEN', "ZOOF'S", 'MOST', 'AMBITIOUS', 'DESIRE', 'WAS', 'TO', 'INDUCE', 'THE', 'CAPTAIN', 'TO', 'GO', 'WITH', 'HIM', 'AND', 'END', 'HIS', 'DAYS', 'IN', 'HIS', 'MUCH', 'LOVED', 'HOME', 'AND', 'SO', 'INCESSANTLY', 'WERE', "SERVADAC'S", 'EARS', 'BESIEGED', 'WITH', 'DESCRIPTIONS', 'OF', 'THE', 'UNPARALLELED', 'BEAUTIES', 'AND', 'ADVANTAGES', 'OF', 'THIS', 'EIGHTEENTH', 'ARE', 'UNDISSIMA', 'OF', 'PARIS', 'THAT', 'HE', 'COULD', 'SCARCELY', 'HEAR', 'THE', 'NAME', 'OF', 'MONTMARTRA', 'WITHOUT', 'A', 'CONSCIOUS', 'THRILL', 'OF', 'AVERSION'] +5105-28233-0008-1657: ref=['WHEN', 'A', 'PRIVATE', 'IN', 'THE', 'EIGHTH', 'CAVALRY', 'HE', 'HAD', 'BEEN', 'ON', 'THE', 'POINT', 'OF', 'QUITTING', 'THE', 'ARMY', 'AT', 'TWENTY', 'EIGHT', 'YEARS', 'OF', 'AGE', 'BUT', 'UNEXPECTEDLY', 'HE', 'HAD', 'BEEN', 'APPOINTED', 'ORDERLY', 'TO', 'CAPTAIN', 'SERVADAC'] +5105-28233-0008-1657: hyp=['WHEN', 'A', 'PRIVATE', 'IN', 'THE', 'EIGHTH', 'CAVALRY', 'HE', 'HAD', 'BEEN', 'ON', 'THE', 'POINT', 'OF', 'QUITTING', 'THE', 'ARMY', 'AT', 'TWENTY', 'EIGHT', 'YEARS', 'OF', 'AGE', 'BUT', 'UNEXPECTEDLY', 'HE', 'HAD', 'BEEN', 'APPOINTED', 'ORDERLY', 'TO', 'CAPTAIN', 'SERVADAC'] +5105-28233-0009-1658: ref=['THE', 'BOND', 'OF', 'UNION', 'THUS', 'EFFECTED', 'COULD', 'NEVER', 'BE', 'SEVERED', 'AND', 'ALTHOUGH', 'BEN', "ZOOF'S", 'ACHIEVEMENTS', 'HAD', 'FAIRLY', 'EARNED', 'HIM', 'THE', 'RIGHT', 'OF', 'RETIREMENT', 'HE', 'FIRMLY', 'DECLINED', 'ALL', 'HONORS', 'OR', 'ANY', 'PENSION', 'THAT', 'MIGHT', 'PART', 'HIM', 'FROM', 'HIS', 'SUPERIOR', 'OFFICER'] +5105-28233-0009-1658: hyp=['THE', 'BOND', 'OF', 'UNION', 'THUS', 'EFFECTED', 'COULD', 'NEVER', 'BE', 'SEVERED', 'AND', 'ALTHOUGH', 'BEN', "ZEF'S", 'ACHIEVEMENTS', 'HAD', 'FAIRLY', 'EARNED', 'HIM', 'THE', 'RIGHT', 'OF', 'RETIREMENT', 'HE', 'FIRMLY', 'DECLINED', 'ALL', 'HONOURS', 'OR', 'ANY', 'PENSION', 'THAT', 'MIGHT', 'PART', 'HIM', 'FROM', 'HIS', 'SUPERIOR', 'OFFICER'] +5105-28233-0010-1659: ref=['UNLIKE', 'HIS', 'MASTER', 'HE', 'MADE', 'NO', 'PRETENSION', 'TO', 'ANY', 'GIFT', 'OF', 'POETIC', 'POWER', 'BUT', 'HIS', 'INEXHAUSTIBLE', 'MEMORY', 'MADE', 'HIM', 'A', 'LIVING', 'ENCYCLOPAEDIA', 'AND', 'FOR', 'HIS', 'STOCK', 'OF', 'ANECDOTES', 'AND', "TROOPER'S", 'TALES', 'HE', 'WAS', 'MATCHLESS'] +5105-28233-0010-1659: hyp=['I', 'MAKE', 'HIS', 'MASTER', 'HE', 'MADE', 'NO', 'PRETENSION', 'TO', 'ANY', 'GIFT', 'OF', 'POETIC', 'POWER', 'BUT', 'HIS', 'INEXHAUSTIBLE', 'MEMORY', 'MADE', 'HIM', 'A', 'LIVING', 'ENCYCLOPAEDIA', 'AND', 'FOR', 'HIS', 'STOCK', 'OF', 'ANECDOTES', 'AND', "TROOPER'S", 'TALES', 'HE', 'WAS', 'MATCHLESS'] +5105-28240-0000-1624: ref=['FAST', 'AS', 'HIS', 'LEGS', 'COULD', 'CARRY', 'HIM', 'SERVADAC', 'HAD', 'MADE', 'HIS', 'WAY', 'TO', 'THE', 'TOP', 'OF', 'THE', 'CLIFF'] +5105-28240-0000-1624: hyp=['FAST', 'AS', 'HIS', 'LEGS', 'COULD', 'CARRY', 'HIM', 'SERVADAC', 'HAD', 'MADE', 'HIS', 'WAY', 'TO', 'THE', 'TOP', 'OF', 'THE', 'CLIFF'] +5105-28240-0001-1625: ref=['IT', 'WAS', 'QUITE', 'TRUE', 'THAT', 'A', 'VESSEL', 'WAS', 'IN', 'SIGHT', 'HARDLY', 'MORE', 'THAN', 'SIX', 'MILES', 'FROM', 'THE', 'SHORE', 'BUT', 'OWING', 'TO', 'THE', 'INCREASE', 'IN', 'THE', "EARTH'S", 'CONVEXITY', 'AND', 'THE', 'CONSEQUENT', 'LIMITATION', 'OF', 'THE', 'RANGE', 'OF', 'VISION', 'THE', 'RIGGING', 'OF', 'THE', 'TOPMASTS', 'ALONE', 'WAS', 'VISIBLE', 'ABOVE', 'THE', 'WATER'] +5105-28240-0001-1625: hyp=['IT', 'WAS', 'QUITE', 'TRUE', 'THAT', 'A', 'VESSEL', 'WAS', 'IN', 'SIGHT', 'HARDLY', 'MORE', 'THAN', 'SIX', 'MILES', 'FROM', 'THE', 'SHORE', 'BUT', 'OWING', 'TO', 'THE', 'INCREASE', 'IN', 'THE', "EARTH'S", 'CONVEXITY', 'AND', 'THE', 'CONSEQUENT', 'LIMITATION', 'OF', 'THE', 'RANGE', 'OF', 'VISION', 'THE', 'RIGGING', 'OF', 'THE', 'TOPMASTS', 'ALONE', 'WAS', 'VISIBLE', 'ABOVE', 'THE', 'WATER'] +5105-28240-0002-1626: ref=['EXCLAIMED', 'SERVADAC', 'KEEPING', 'HIS', 'EYE', 'UNMOVED', 'AT', 'HIS', 'TELESCOPE'] +5105-28240-0002-1626: hyp=['EXCLAIMED', 'SERVADAC', 'KEEPING', 'HIS', 'EYE', 'UNMOVED', 'AT', 'HIS', 'TELESCOPE'] +5105-28240-0003-1627: ref=['SHE', 'IS', 'UNDER', 'SAIL', 'BUT', 'SHE', 'IS', 'COUNT', "TIMASCHEFF'S", 'YACHT', 'HE', 'WAS', 'RIGHT'] +5105-28240-0003-1627: hyp=['SHE', 'IS', 'UNDER', 'SALE', 'BUT', 'SHE', 'IS', 'COUNT', "TIMASCHEFF'S", 'YACHT', 'HE', 'WAS', 'RIGHT'] +5105-28240-0004-1628: ref=['IF', 'THE', 'COUNT', 'WERE', 'ON', 'BOARD', 'A', 'STRANGE', 'FATALITY', 'WAS', 'BRINGING', 'HIM', 'TO', 'THE', 'PRESENCE', 'OF', 'HIS', 'RIVAL'] +5105-28240-0004-1628: hyp=['IF', 'THE', 'COUNT', 'WERE', 'ON', 'BOARD', 'A', 'STRANGE', 'FATALITY', 'WAS', 'BRINGING', 'HIM', 'TO', 'THE', 'PRESENCE', 'OF', 'HIS', 'RIVAL'] +5105-28240-0005-1629: ref=['HE', 'RECKONED', 'THEREFORE', 'NOT', 'ONLY', 'UPON', 'ASCERTAINING', 'THE', 'EXTENT', 'OF', 'THE', 'LATE', 'CATASTROPHE', 'BUT', 'UPON', 'LEARNING', 'ITS', 'CAUSE'] +5105-28240-0005-1629: hyp=['HE', 'RECKONED', 'THEREFORE', 'NOT', 'ONLY', 'UPON', 'ASCERTAINING', 'THE', 'EXTENT', 'OF', 'THE', 'LATE', 'CATASTROPHE', 'BUT', 'UPON', 'LEARNING', 'ITS', 'CAUSE'] +5105-28240-0006-1630: ref=['THE', 'WIND', 'BEING', 'ADVERSE', 'THE', 'DOBRYNA', 'DID', 'NOT', 'MAKE', 'VERY', 'RAPID', 'PROGRESS', 'BUT', 'AS', 'THE', 'WEATHER', 'IN', 'SPITE', 'OF', 'A', 'FEW', 'CLOUDS', 'REMAINED', 'CALM', 'AND', 'THE', 'SEA', 'WAS', 'QUITE', 'SMOOTH', 'SHE', 'WAS', 'ENABLED', 'TO', 'HOLD', 'A', 'STEADY', 'COURSE'] +5105-28240-0006-1630: hyp=['THE', 'WIND', 'BEING', 'ADVERSE', 'THE', 'DOBRYNA', 'DID', 'NOT', 'MAKE', 'VERY', 'RAPID', 'PROGRESS', 'BUT', 'AS', 'THE', 'WEATHER', 'IN', 'SPITE', 'OF', 'A', 'FEW', 'CLOUDS', 'REMAINED', 'CALM', 'AND', 'THE', 'SEA', 'WAS', 'QUITE', 'SMOOTH', 'SHE', 'WAS', 'ENABLED', 'TO', 'HOLD', 'A', 'STEADY', 'COURSE'] +5105-28240-0007-1631: ref=['SERVADAC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'THE', 'DOBRYNA', 'WAS', 'ENDEAVORING', 'TO', 'PUT', 'IN'] +5105-28240-0007-1631: hyp=['SERVADAC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'THE', 'DOBRYNA', 'WAS', 'ENDEAVORING', 'TO', 'PUT', 'IN'] +5105-28240-0008-1632: ref=['A', 'NARROW', 'CHANNEL', 'FORMED', 'A', 'PASSAGE', 'THROUGH', 'THE', 'RIDGE', 'OF', 'ROCKS', 'THAT', 'PROTECTED', 'IT', 'FROM', 'THE', 'OPEN', 'SEA', 'AND', 'WHICH', 'EVEN', 'IN', 'THE', 'ROUGHEST', 'WEATHER', 'WOULD', 'ENSURE', 'THE', 'CALMNESS', 'OF', 'ITS', 'WATERS'] +5105-28240-0008-1632: hyp=['A', 'NARROW', 'CHANNEL', 'FORMED', 'A', 'PASSAGE', 'THROUGH', 'THE', 'RIDGE', 'OF', 'ROCKS', 'THAT', 'PROTECTED', 'IT', 'FROM', 'THE', 'OPEN', 'SEA', 'AND', 'WHICH', 'EVEN', 'IN', 'THE', 'ROUGHEST', 'WEATHER', 'WOULD', 'INSURE', 'THE', 'CALMNESS', 'OF', 'ITS', 'WATERS'] +5105-28240-0009-1633: ref=['SLIGHTLY', 'CHANGING', 'HER', 'COURSE', 'SHE', 'FIRST', 'STRUCK', 'HER', 'MAINSAIL', 'AND', 'IN', 'ORDER', 'TO', 'FACILITATE', 'THE', 'MOVEMENTS', 'OF', 'HER', 'HELMSMAN', 'SOON', 'CARRIED', 'NOTHING', 'BUT', 'HER', 'TWO', 'TOPSAILS', 'BRIGANTINE', 'AND', 'JIB'] +5105-28240-0009-1633: hyp=['SLIGHTLY', 'CHANGING', 'HER', 'COURSE', 'SHE', 'FIRST', 'STRUCK', 'HER', 'MAINSAIL', 'AND', 'IN', 'ORDER', 'TO', 'FACILITATE', 'THE', 'MOVEMENTS', 'OF', 'HER', 'HELMSMAN', 'SOON', 'CARRIED', 'NOTHING', 'BUT', 'HER', 'TWO', 'TOPSAILS', 'BRIGANTINE', 'AND', 'JIB'] +5105-28240-0010-1634: ref=['CAPTAIN', 'SERVADAC', 'HASTENED', 'TOWARDS', 'HIM'] +5105-28240-0010-1634: hyp=['CAPTAIN', 'SERVADAC', 'HASTENED', 'TOWARD', 'HIM'] +5105-28240-0011-1635: ref=['I', 'LEFT', 'YOU', 'ON', 'A', 'CONTINENT', 'AND', 'HERE', 'I', 'HAVE', 'THE', 'HONOR', 'OF', 'FINDING', 'YOU', 'ON', 'AN', 'ISLAND'] +5105-28240-0011-1635: hyp=['I', 'LEFT', 'YOU', 'ON', 'A', 'CONTINENT', 'AND', 'HERE', 'I', 'HAVE', 'THE', 'HONOR', 'OF', 'FINDING', 'YOU', 'ON', 'AN', 'ISLAND'] +5105-28240-0012-1636: ref=['NEVER', 'MIND', 'NOW', 'INTERPOSED', 'THE', 'CAPTAIN', 'WE', 'WILL', 'TALK', 'OF', 'THAT', 'BY', 'AND', 'BY'] +5105-28240-0012-1636: hyp=['NEVER', 'MIND', 'NOW', 'INTERPOSED', 'THE', 'CAPTAIN', 'WE', 'WILL', 'TALK', 'OF', 'THAT', 'BY', 'AND', 'BY'] +5105-28240-0013-1637: ref=['NOTHING', 'MORE', 'THAN', 'YOU', 'KNOW', 'YOURSELF'] +5105-28240-0013-1637: hyp=['NOTHING', 'MORE', 'THAN', 'YOU', 'KNOW', 'YOURSELF'] +5105-28240-0014-1638: ref=['ARE', 'YOU', 'CERTAIN', 'THAT', 'THIS', 'IS', 'THE', 'MEDITERRANEAN'] +5105-28240-0014-1638: hyp=['ARE', 'YOU', 'CERTAIN', 'THAT', 'THIS', 'IS', 'THE', 'MEDITERRANEAN'] +5105-28240-0015-1639: ref=['FOR', 'SOME', 'MOMENTS', 'HE', 'SEEMED', 'PERFECTLY', 'STUPEFIED', 'THEN', 'RECOVERING', 'HIMSELF', 'HE', 'BEGAN', 'TO', 'OVERWHELM', 'THE', 'COUNT', 'WITH', 'A', 'TORRENT', 'OF', 'QUESTIONS'] +5105-28240-0015-1639: hyp=['FOR', 'SOME', 'MOMENTS', 'HE', 'SEEMED', 'PERFECTLY', 'STUPEFIED', 'AND', 'THEN', 'RECOVERING', 'HIMSELF', 'HE', 'BEGAN', 'TO', 'OVERWHELM', 'THE', 'COUNT', 'WITH', 'A', 'TORRENT', 'OF', 'QUESTIONS'] +5105-28240-0016-1640: ref=['TO', 'ALL', 'THESE', 'INQUIRIES', 'THE', 'COUNT', 'RESPONDED', 'IN', 'THE', 'AFFIRMATIVE'] +5105-28240-0016-1640: hyp=['TO', 'ALL', 'THESE', 'INQUIRIES', 'THE', 'COUNT', 'RESPONDED', 'IN', 'THE', 'AFFIRMATIVE'] +5105-28240-0017-1641: ref=['SOME', 'MYSTERIOUS', 'FORCE', 'SEEMED', 'TO', 'HAVE', 'BROUGHT', 'ABOUT', 'A', 'CONVULSION', 'OF', 'THE', 'ELEMENTS'] +5105-28240-0017-1641: hyp=['SOME', 'MYSTERIOUS', 'FORCE', 'SEEMED', 'TO', 'HAVE', 'BROUGHT', 'ABOUT', 'A', 'CONVULSION', 'OF', 'THE', 'ELEMENTS'] +5105-28240-0018-1642: ref=['YOU', 'WILL', 'TAKE', 'ME', 'ON', 'BOARD', 'COUNT', 'WILL', 'YOU', 'NOT'] +5105-28240-0018-1642: hyp=['YOU', 'WILL', 'TAKE', 'ME', 'ON', 'BOARD', 'COUNT', 'WILL', 'YOU', 'NOT'] +5105-28240-0019-1643: ref=['MY', 'YACHT', 'IS', 'AT', 'YOUR', 'SERVICE', 'SIR', 'EVEN', 'SHOULD', 'YOU', 'REQUIRE', 'TO', 'MAKE', 'A', 'TOUR', 'ROUND', 'THE', 'WORLD'] +5105-28240-0019-1643: hyp=['MY', 'YACHT', 'IS', 'AT', 'YOUR', 'SERVICE', 'SIR', 'EVEN', 'SHOULD', 'YOU', 'REQUIRE', 'TO', 'MAKE', 'A', 'TOUR', 'AROUND', 'THE', 'WORLD'] +5105-28240-0020-1644: ref=['THE', 'COUNT', 'SHOOK', 'HIS', 'HEAD'] +5105-28240-0020-1644: hyp=['THE', 'COUNT', 'SHOOK', 'HIS', 'HEAD'] +5105-28240-0021-1645: ref=['BEFORE', 'STARTING', 'IT', 'WAS', 'INDISPENSABLE', 'THAT', 'THE', 'ENGINE', 'OF', 'THE', 'DOBRYNA', 'SHOULD', 'BE', 'REPAIRED', 'TO', 'SAIL', 'UNDER', 'CANVAS', 'ONLY', 'WOULD', 'IN', 'CONTRARY', 'WINDS', 'AND', 'ROUGH', 'SEAS', 'BE', 'BOTH', 'TEDIOUS', 'AND', 'DIFFICULT'] +5105-28240-0021-1645: hyp=['BEFORE', 'STARTING', 'IT', 'WAS', 'INDISPENSABLE', 'THAT', 'THE', 'ENGINE', 'OF', 'THE', 'DOBRYNA', 'SHOULD', 'BE', 'REPAIRED', 'TO', 'SAIL', 'UNDER', 'CANVAS', 'ONLY', 'WOULD', 'IN', 'CONTRARY', 'WINDS', 'AND', 'ROUGH', 'SEAS', 'BE', 'BOTH', 'TEDIOUS', 'AND', 'DIFFICULT'] +5105-28240-0022-1646: ref=['IT', 'WAS', 'ON', 'THE', 'LAST', 'DAY', 'OF', 'JANUARY', 'THAT', 'THE', 'REPAIRS', 'OF', 'THE', 'SCHOONER', 'WERE', 'COMPLETED'] +5105-28240-0022-1646: hyp=['IT', 'WAS', 'ON', 'THE', 'LAST', 'DAY', 'OF', 'JANUARY', 'THAT', 'THE', 'REPAIRS', 'OF', 'THE', 'SCHOONER', 'WERE', 'COMPLETED'] +5105-28240-0023-1647: ref=['A', 'SLIGHT', 'DIMINUTION', 'IN', 'THE', 'EXCESSIVELY', 'HIGH', 'TEMPERATURE', 'WHICH', 'HAD', 'PREVAILED', 'FOR', 'THE', 'LAST', 'FEW', 'WEEKS', 'WAS', 'THE', 'ONLY', 'APPARENT', 'CHANGE', 'IN', 'THE', 'GENERAL', 'ORDER', 'OF', 'THINGS', 'BUT', 'WHETHER', 'THIS', 'WAS', 'TO', 'BE', 'ATTRIBUTED', 'TO', 'ANY', 'ALTERATION', 'IN', 'THE', "EARTH'S", 'ORBIT', 'WAS', 'A', 'QUESTION', 'WHICH', 'WOULD', 'STILL', 'REQUIRE', 'SEVERAL', 'DAYS', 'TO', 'DECIDE'] +5105-28240-0023-1647: hyp=['A', 'SLIGHT', 'DIMINUTION', 'IN', 'THE', 'EXCESSIVELY', 'HIGH', 'TEMPERATURE', 'WHICH', 'HAD', 'PREVAILED', 'FOR', 'THE', 'LAST', 'FEW', 'WEEKS', 'WAS', 'THE', 'ONLY', 'APPARENT', 'CHANGE', 'IN', 'THE', 'GENERAL', 'ORDER', 'OF', 'THINGS', 'BUT', 'WHETHER', 'THIS', 'WAS', 'TO', 'BE', 'ATTRIBUTED', 'TO', 'ANY', 'ALTERATION', 'IN', 'THE', "EARTH'S", 'ORBIT', 'WAS', 'A', 'QUESTION', 'WHICH', 'WOULD', 'STILL', 'REQUIRE', 'SEVERAL', 'DAYS', 'TO', 'DECIDE'] +5105-28240-0024-1648: ref=['DOUBTS', 'NOW', 'AROSE', 'AND', 'SOME', 'DISCUSSION', 'FOLLOWED', 'WHETHER', 'OR', 'NOT', 'IT', 'WAS', 'DESIRABLE', 'FOR', 'BEN', 'ZOOF', 'TO', 'ACCOMPANY', 'HIS', 'MASTER'] +5105-28240-0024-1648: hyp=['DOUBTS', 'NOW', 'AROSE', 'AND', 'SOME', 'DISCUSSION', 'FOLLOWED', 'WHETHER', 'OR', 'NOT', 'IT', 'WAS', 'DESIRABLE', 'FOR', 'BEN', 'ZOOF', 'TO', 'ACCOMPANY', 'HIS', 'MASTER'] +5105-28241-0000-1604: ref=['HER', 'SEA', 'GOING', 'QUALITIES', 'WERE', 'EXCELLENT', 'AND', 'WOULD', 'HAVE', 'AMPLY', 'SUFFICED', 'FOR', 'A', 'CIRCUMNAVIGATION', 'OF', 'THE', 'GLOBE'] +5105-28241-0000-1604: hyp=['HER', 'SEA', 'GOING', 'QUALITIES', 'WERE', 'EXCELLENT', 'AND', 'WOULD', 'HAVE', 'AMPLY', 'SUFFICED', 'FOR', 'A', 'CIRCUMNAVIGATION', 'OF', 'THE', 'GLOBE'] +5105-28241-0001-1605: ref=['AFTER', 'AN', 'APPRENTICESHIP', 'ON', 'A', 'MERCHANT', 'SHIP', 'HE', 'HAD', 'ENTERED', 'THE', 'IMPERIAL', 'NAVY', 'AND', 'HAD', 'ALREADY', 'REACHED', 'THE', 'RANK', 'OF', 'LIEUTENANT', 'WHEN', 'THE', 'COUNT', 'APPOINTED', 'HIM', 'TO', 'THE', 'CHARGE', 'OF', 'HIS', 'OWN', 'PRIVATE', 'YACHT', 'IN', 'WHICH', 'HE', 'WAS', 'ACCUSTOMED', 'TO', 'SPEND', 'BY', 'FAR', 'THE', 'GREATER', 'PART', 'OF', 'HIS', 'TIME', 'THROUGHOUT', 'THE', 'WINTER', 'GENERALLY', 'CRUISING', 'IN', 'THE', 'MEDITERRANEAN', 'WHILST', 'IN', 'THE', 'SUMMER', 'HE', 'VISITED', 'MORE', 'NORTHERN', 'WATERS'] +5105-28241-0001-1605: hyp=['AFTER', 'AN', 'APPRENTICESHIP', 'ON', 'A', 'MERCHANT', 'SHIP', 'HE', 'HAD', 'ENTERED', 'THE', 'IMPERIAL', 'NAVY', 'AND', 'HAD', 'ALREADY', 'REACHED', 'THE', 'RANK', 'OF', 'LIEUTENANT', 'WHEN', 'THE', 'COUNT', 'APPOINTED', 'HIM', 'TO', 'THE', 'CHARGE', 'OF', 'HIS', 'OWN', 'PRIVATE', 'YACHT', 'IN', 'WHICH', 'HE', 'WAS', 'ACCUSTOMED', 'TO', 'SPEND', 'BY', 'FARTHER', 'GREATER', 'PART', 'OF', 'HIS', 'TIME', 'THROUGHOUT', 'THE', 'WINTER', 'GENERALLY', 'CRUISING', 'IN', 'THE', 'MEDITERRANEAN', 'WHILST', 'IN', 'THE', 'SUMMER', 'HE', 'VISITED', 'MORE', 'NORTHERN', 'WATERS'] +5105-28241-0002-1606: ref=['THE', 'LATE', 'ASTOUNDING', 'EVENTS', 'HOWEVER', 'HAD', 'RENDERED', 'PROCOPE', 'MANIFESTLY', 'UNEASY', 'AND', 'NOT', 'THE', 'LESS', 'SO', 'FROM', 'HIS', 'CONSCIOUSNESS', 'THAT', 'THE', 'COUNT', 'SECRETLY', 'PARTOOK', 'OF', 'HIS', 'OWN', 'ANXIETY'] +5105-28241-0002-1606: hyp=['THE', 'LATE', 'ASTOUNDING', 'EVENTS', 'HOWEVER', 'HAD', 'RENDERED', 'PROCOPE', 'MANIFESTLY', 'UNEASY', 'AND', 'NOT', 'THE', 'LESS', 'SO', 'FROM', 'HIS', 'CONSCIOUSNESS', 'THAT', 'THE', 'COUNT', 'SECRETLY', 'PARTOOK', 'OF', 'HIS', 'OWN', 'ANXIETY'] +5105-28241-0003-1607: ref=['STEAM', 'UP', 'AND', 'CANVAS', 'SPREAD', 'THE', 'SCHOONER', 'STARTED', 'EASTWARDS'] +5105-28241-0003-1607: hyp=['STEAM', 'UP', 'AND', 'CANVAS', 'SPREAD', 'THE', 'SCHOONER', 'STARTED', 'EASTWARD'] +5105-28241-0004-1608: ref=['ALTHOUGH', 'ONLY', 'A', 'MODERATE', 'BREEZE', 'WAS', 'BLOWING', 'THE', 'SEA', 'WAS', 'ROUGH', 'A', 'CIRCUMSTANCE', 'TO', 'BE', 'ACCOUNTED', 'FOR', 'ONLY', 'BY', 'THE', 'DIMINUTION', 'IN', 'THE', 'FORCE', 'OF', 'THE', "EARTH'S", 'ATTRACTION', 'RENDERING', 'THE', 'LIQUID', 'PARTICLES', 'SO', 'BUOYANT', 'THAT', 'BY', 'THE', 'MERE', 'EFFECT', 'OF', 'OSCILLATION', 'THEY', 'WERE', 'CARRIED', 'TO', 'A', 'HEIGHT', 'THAT', 'WAS', 'QUITE', 'UNPRECEDENTED'] +5105-28241-0004-1608: hyp=['ALTHOUGH', 'ONLY', 'A', 'MODERATE', 'BREEZE', 'WAS', 'BLOWING', 'THE', 'SEA', 'WAS', 'ROUGH', 'A', 'CIRCUMSTANCE', 'TO', 'BE', 'ACCOUNTED', 'FOR', 'ONLY', 'BY', 'THE', 'DIMINUTION', 'IN', 'THE', 'FORCE', 'OF', 'THE', "EARTH'S", 'ATTRACTION', 'RENDERING', 'THE', 'LIQUID', 'PARTICLE', 'SO', 'BUOYANT', 'THAT', 'BY', 'THE', 'MERE', 'EFFECT', 'OF', 'OSCILLATION', 'THEY', 'WERE', 'CARRIED', 'TO', 'A', 'HEIGHT', 'THAT', 'WAS', 'QUITE', 'UNPRECEDENTED'] +5105-28241-0005-1609: ref=['FOR', 'A', 'FEW', 'MILES', 'SHE', 'FOLLOWED', 'THE', 'LINE', 'HITHERTO', 'PRESUMABLY', 'OCCUPIED', 'BY', 'THE', 'COAST', 'OF', 'ALGERIA', 'BUT', 'NO', 'LAND', 'APPEARED', 'TO', 'THE', 'SOUTH'] +5105-28241-0005-1609: hyp=['FOR', 'A', 'FEW', 'MILES', 'SHE', 'FOLLOWED', 'THE', 'LINE', 'HITHERTO', 'PRESUMABLY', 'OCCUPIED', 'BY', 'THE', 'COAST', 'OF', 'ALGERIA', 'BUT', 'NO', 'LAND', 'APPEARED', 'TO', 'THE', 'SOUTH'] +5105-28241-0006-1610: ref=['THE', 'LOG', 'AND', 'THE', 'COMPASS', 'THEREFORE', 'WERE', 'ABLE', 'TO', 'BE', 'CALLED', 'UPON', 'TO', 'DO', 'THE', 'WORK', 'OF', 'THE', 'SEXTANT', 'WHICH', 'HAD', 'BECOME', 'UTTERLY', 'USELESS'] +5105-28241-0006-1610: hyp=['THE', 'LOG', 'AND', 'THE', 'COMPASS', 'THEREFORE', 'WERE', 'ABLE', 'TO', 'BE', 'CALLED', 'UPON', 'TO', 'DO', 'THE', 'WORK', 'OF', 'THE', 'SEXTANT', 'WHICH', 'HAD', 'BECOME', 'UTTERLY', 'USELESS'] +5105-28241-0007-1611: ref=['THERE', 'IS', 'NO', 'FEAR', 'OF', 'THAT', 'SIR'] +5105-28241-0007-1611: hyp=["THERE'S", 'NO', 'FEAR', 'OF', 'THAT', 'SIR'] +5105-28241-0008-1612: ref=['THE', 'EARTH', 'HAS', 'UNDOUBTEDLY', 'ENTERED', 'UPON', 'A', 'NEW', 'ORBIT', 'BUT', 'SHE', 'IS', 'NOT', 'INCURRING', 'ANY', 'PROBABLE', 'RISK', 'OF', 'BEING', 'PRECIPITATED', 'ONTO', 'THE', 'SUN'] +5105-28241-0008-1612: hyp=['THAT', 'THE', 'EARTH', 'HAS', 'UNDOUBTEDLY', 'ENTERED', 'UPON', 'A', 'NEW', 'ORBIT', 'BUT', 'SHE', 'IS', 'NOT', 'INCURRING', 'ANY', 'PROBABLE', 'RISK', 'OF', 'BEING', 'PRECIPITATED', 'ON', 'TO', 'THE', 'SUN'] +5105-28241-0009-1613: ref=['AND', 'WHAT', 'DEMONSTRATION', 'DO', 'YOU', 'OFFER', 'ASKED', 'SERVADAC', 'EAGERLY', 'THAT', 'IT', 'WILL', 'NOT', 'HAPPEN'] +5105-28241-0009-1613: hyp=['AND', 'WHAT', 'DEMONSTRATION', 'DO', 'YOU', 'OFFER', 'ASKED', 'SERVADAC', 'EAGERLY', 'THAT', 'IT', 'WILL', 'NOT', 'HAPPEN'] +5105-28241-0010-1614: ref=['OCEAN', 'REIGNED', 'SUPREME'] +5105-28241-0010-1614: hyp=['OCEAN', 'RAINED', 'SUPREME'] +5105-28241-0011-1615: ref=['ALL', 'THE', 'IMAGES', 'OF', 'HIS', 'PAST', 'LIFE', 'FLOATED', 'UPON', 'HIS', 'MEMORY', 'HIS', 'THOUGHTS', 'SPED', 'AWAY', 'TO', 'HIS', 'NATIVE', 'FRANCE', 'ONLY', 'TO', 'RETURN', 'AGAIN', 'TO', 'WONDER', 'WHETHER', 'THE', 'DEPTHS', 'OF', 'OCEAN', 'WOULD', 'REVEAL', 'ANY', 'TRACES', 'OF', 'THE', 'ALGERIAN', 'METROPOLIS'] +5105-28241-0011-1615: hyp=['ALL', 'THE', 'IMAGES', 'OF', 'HIS', 'PAST', 'LIFE', 'FLOATED', 'UPON', 'HIS', 'MEMORY', 'HIS', 'THOUGHTS', 'SPED', 'AWAY', 'TO', 'HIS', 'NATIVE', 'FRANCE', 'ONLY', 'TO', 'RETURN', 'AGAIN', 'TO', 'WONDER', 'WHETHER', 'THE', 'DEPTHS', 'OF', 'OCEAN', 'WOULD', 'REVEAL', 'ANY', 'TRACES', 'OF', 'THE', 'ALGERIAN', 'METROPOLIS'] +5105-28241-0012-1616: ref=['IS', 'IT', 'NOT', 'IMPOSSIBLE', 'HE', 'MURMURED', 'ALOUD', 'THAT', 'ANY', 'CITY', 'SHOULD', 'DISAPPEAR', 'SO', 'COMPLETELY'] +5105-28241-0012-1616: hyp=['IS', 'IT', 'NOT', 'IMPOSSIBLE', 'HE', 'MURMURED', 'ALOUD', 'THAT', 'ANY', 'CITY', 'SHOULD', 'DISAPPEAR', 'SO', 'COMPLETELY'] +5105-28241-0013-1617: ref=['WOULD', 'NOT', 'THE', 'LOFTIEST', 'EMINENCES', 'OF', 'THE', 'CITY', 'AT', 'LEAST', 'BE', 'VISIBLE'] +5105-28241-0013-1617: hyp=['WOULD', 'NOT', 'THE', 'LOFTIEST', 'EMINENCES', 'OF', 'THE', 'CITY', 'AT', 'LEAST', 'BE', 'VISIBLE'] +5105-28241-0014-1618: ref=['ANOTHER', 'CIRCUMSTANCE', 'WAS', 'MOST', 'REMARKABLE'] +5105-28241-0014-1618: hyp=['ANOTHER', 'CIRCUMSTANCE', 'WAS', 'MOST', 'REMARKABLE'] +5105-28241-0015-1619: ref=['TO', 'THE', 'SURPRISE', 'OF', 'ALL', 'AND', 'ESPECIALLY', 'OF', 'LIEUTENANT', 'PROCOPE', 'THE', 'LINE', 'INDICATED', 'A', 'BOTTOM', 'AT', 'A', 'NEARLY', 'UNIFORM', 'DEPTH', 'OF', 'FROM', 'FOUR', 'TO', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'THE', 'SOUNDING', 'WAS', 'PERSEVERED', 'WITH', 'CONTINUOUSLY', 'FOR', 'MORE', 'THAN', 'TWO', 'HOURS', 'OVER', 'A', 'CONSIDERABLE', 'AREA', 'THE', 'DIFFERENCES', 'OF', 'LEVEL', 'WERE', 'INSIGNIFICANT', 'NOT', 'CORRESPONDING', 'IN', 'ANY', 'DEGREE', 'TO', 'WHAT', 'WOULD', 'BE', 'EXPECTED', 'OVER', 'THE', 'SITE', 'OF', 'A', 'CITY', 'THAT', 'HAD', 'BEEN', 'TERRACED', 'LIKE', 'THE', 'SEATS', 'OF', 'AN', 'AMPHITHEATER'] +5105-28241-0015-1619: hyp=['TO', 'THE', 'SURPRISE', 'OF', 'ALL', 'AND', 'ESPECIALLY', 'OF', 'LIEUTENANT', 'PROCOPE', 'THE', 'LINE', 'INDICATED', 'A', 'BOTTOM', 'AT', 'A', 'NEARLY', 'UNIFORM', 'DEPTH', 'OF', 'FROM', 'FOUR', 'TO', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'THE', 'SOUNDING', 'WAS', 'PERSEVERED', 'WITH', 'CONTINUOUSLY', 'FOR', 'MORE', 'THAN', 'TWO', 'HOURS', 'OVER', 'A', 'CONSIDERABLE', 'AREA', 'THE', 'DIFFERENCES', 'OF', 'LEVEL', 'WERE', 'INSIGNIFICANT', 'NOT', 'CORRESPONDING', 'IN', 'ANY', 'DEGREE', 'TO', 'WHAT', 'WOULD', 'BE', 'EXPECTED', 'OVER', 'THE', 'SITE', 'OF', 'A', 'CITY', 'THAT', 'HAD', 'BEEN', 'TERRACED', 'LIKE', 'THE', 'SEATS', 'OF', 'AN', 'AMPHITHEATRE'] +5105-28241-0016-1620: ref=['YOU', 'MUST', 'SEE', 'LIEUTENANT', 'I', 'SHOULD', 'THINK', 'THAT', 'WE', 'ARE', 'NOT', 'SO', 'NEAR', 'THE', 'COAST', 'OF', 'ALGERIA', 'AS', 'YOU', 'IMAGINED'] +5105-28241-0016-1620: hyp=['YOU', 'MUST', 'SEE', 'LIEUTENANT', 'I', 'SHOULD', 'THINK', 'THAT', 'WE', 'ARE', 'NOT', 'SO', 'NEAR', 'THE', 'COAST', 'OF', 'ALGERIA', 'AS', 'YOU', 'IMAGINED'] +5105-28241-0017-1621: ref=['AFTER', 'PONDERING', 'AWHILE', 'HE', 'SAID', 'IF', 'WE', 'WERE', 'FARTHER', 'AWAY', 'I', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'A', 'DEPTH', 'OF', 'TWO', 'OR', 'THREE', 'HUNDRED', 'FATHOMS', 'INSTEAD', 'OF', 'FIVE', 'FATHOMS', 'FIVE', 'FATHOMS'] +5105-28241-0017-1621: hyp=['AFTER', 'PONDERING', 'A', 'WHILE', 'HE', 'SAID', 'IF', 'WE', 'WERE', 'FARTHER', 'AWAY', 'I', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'A', 'DEPTH', 'OF', 'TWO', 'OR', 'THREE', 'HUNDRED', 'FATHOMS', 'INSTEAD', 'OF', 'FIVE', 'FATHOMS', 'FIVE', 'FATHOMS'] +5105-28241-0018-1622: ref=['ITS', 'DEPTH', 'REMAINED', 'INVARIABLE', 'STILL', 'FOUR', 'OR', 'AT', 'MOST', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'ITS', 'BOTTOM', 'WAS', 'ASSIDUOUSLY', 'DREDGED', 'IT', 'WAS', 'ONLY', 'TO', 'PROVE', 'IT', 'BARREN', 'OF', 'MARINE', 'PRODUCTION', 'OF', 'ANY', 'TYPE'] +5105-28241-0018-1622: hyp=['ITS', 'DEPTH', 'REMAINED', 'INVARIABLE', 'STILL', 'FOUR', 'OR', 'AT', 'MOST', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'ITS', 'BOTTOM', 'WAS', 'ASSIDUOUSLY', 'DREDGED', 'IT', 'WAS', 'ONLY', 'TO', 'PROVE', 'IT', 'BARREN', 'OF', 'MARINE', 'PRODUCTION', 'OF', 'ANY', 'TYPE'] +5105-28241-0019-1623: ref=['NOTHING', 'WAS', 'TO', 'BE', 'DONE', 'BUT', 'TO', 'PUT', 'ABOUT', 'AND', 'RETURN', 'IN', 'DISAPPOINTMENT', 'TOWARDS', 'THE', 'NORTH'] +5105-28241-0019-1623: hyp=['NOTHING', 'WAS', 'TO', 'BE', 'DONE', 'BUT', 'TO', 'PUT', 'ABOUT', 'AND', 'RETURN', 'AND', 'DISAPPOINTMENT', 'TOWARD', 'THE', 'NORTH'] +5142-33396-0000-898: ref=['AT', 'ANOTHER', 'TIME', 'HARALD', 'ASKED'] +5142-33396-0000-898: hyp=['AT', 'ANOTHER', 'TIME', 'HAROLD', 'ASKED'] +5142-33396-0001-899: ref=['WHAT', 'IS', 'YOUR', 'COUNTRY', 'OLAF', 'HAVE', 'YOU', 'ALWAYS', 'BEEN', 'A', 'THRALL', 'THE', "THRALL'S", 'EYES', 'FLASHED'] +5142-33396-0001-899: hyp=['WHAT', 'IS', 'YOUR', 'COUNTRY', 'OLAF', 'HAVE', 'YOU', 'ALWAYS', 'BEEN', 'A', 'THRALL', 'THE', "THRALL'S", 'EYES', 'FLASHED'] +5142-33396-0002-900: ref=['TWO', 'HUNDRED', 'WARRIORS', 'FEASTED', 'IN', 'HIS', 'HALL', 'AND', 'FOLLOWED', 'HIM', 'TO', 'BATTLE'] +5142-33396-0002-900: hyp=['TWO', 'HUNDRED', 'WARRIORS', 'FEASTED', 'IN', 'HIS', 'HALL', 'AND', 'FOLLOWED', 'HIM', 'TO', 'BATTLE'] +5142-33396-0003-901: ref=['THE', 'REST', 'OF', 'YOU', 'OFF', 'A', 'VIKING', 'HE', 'HAD', 'THREE', 'SHIPS'] +5142-33396-0003-901: hyp=['THE', 'REST', 'OF', 'YOU', 'OFF', 'A', 'VIKING', 'HE', 'HAD', 'THREE', 'SHIPS'] +5142-33396-0004-902: ref=['THESE', 'HE', 'GAVE', 'TO', 'THREE', 'OF', 'MY', 'BROTHERS'] +5142-33396-0004-902: hyp=['THESE', 'HE', 'GAVE', 'TO', 'THREE', 'OF', 'MY', 'BROTHERS'] +5142-33396-0005-903: ref=['BUT', 'I', 'STAYED', 'THAT', 'SPRING', 'AND', 'BUILT', 'ME', 'A', 'BOAT'] +5142-33396-0005-903: hyp=['BUT', 'I', 'STAYED', 'THAT', 'SPRING', 'AND', 'BUILT', 'ME', 'A', 'BOAT'] +5142-33396-0006-904: ref=['I', 'MADE', 'HER', 'FOR', 'ONLY', 'TWENTY', 'OARS', 'BECAUSE', 'I', 'THOUGHT', 'FEW', 'MEN', 'WOULD', 'FOLLOW', 'ME', 'FOR', 'I', 'WAS', 'YOUNG', 'FIFTEEN', 'YEARS', 'OLD'] +5142-33396-0006-904: hyp=['I', 'MADE', 'HER', 'FALL', 'ONLY', 'TWENTY', 'OARS', 'BECAUSE', 'I', 'THOUGHT', 'FEW', 'MEN', 'WOULD', 'FOLLOW', 'ME', 'FOR', 'I', 'WAS', 'YOUNG', 'FIFTEEN', 'YEARS', 'OLD'] +5142-33396-0007-905: ref=['AT', 'THE', 'PROW', 'I', 'CARVED', 'THE', 'HEAD', 'WITH', 'OPEN', 'MOUTH', 'AND', 'FORKED', 'TONGUE', 'THRUST', 'OUT'] +5142-33396-0007-905: hyp=['AT', 'THE', 'PROW', 'I', 'CARVED', 'THE', 'HEAD', 'WITH', 'OPEN', 'MOUTH', 'AND', 'FORKED', 'TONGUE', 'THRUST', 'OUT'] +5142-33396-0008-906: ref=['I', 'PAINTED', 'THE', 'EYES', 'RED', 'FOR', 'ANGER'] +5142-33396-0008-906: hyp=['I', 'PAINTED', 'THE', 'EYES', 'RED', 'FOR', 'ANGER'] +5142-33396-0009-907: ref=['THERE', 'STAND', 'SO', 'I', 'SAID', 'AND', 'GLARE', 'AND', 'HISS', 'AT', 'MY', 'FOES'] +5142-33396-0009-907: hyp=['THERE', 'STAND', 'SO', 'I', 'SAID', 'AND', 'GLARE', 'AND', 'HISS', 'AT', 'MY', 'FOES'] +5142-33396-0010-908: ref=['IN', 'THE', 'STERN', 'I', 'CURVED', 'THE', 'TAIL', 'UP', 'ALMOST', 'AS', 'HIGH', 'AS', 'THE', 'HEAD'] +5142-33396-0010-908: hyp=['IN', 'THE', 'STERN', 'I', 'CARVED', 'THE', 'TAIL', 'UP', 'ALMOST', 'AS', 'HIGH', 'AS', 'THE', 'HEAD'] +5142-33396-0011-909: ref=['THERE', 'SHE', 'SAT', 'ON', 'THE', 'ROLLERS', 'AS', 'FAIR', 'A', 'SHIP', 'AS', 'I', 'EVER', 'SAW'] +5142-33396-0011-909: hyp=['THERE', 'SHE', 'SAT', 'ON', 'THE', 'ROLLERS', 'AS', 'FAIR', 'A', 'SHIP', 'AS', 'I', 'EVER', 'SAW'] +5142-33396-0012-910: ref=['THEN', 'I', 'WILL', 'GET', 'ME', 'A', 'FARM', 'AND', 'WILL', 'WINTER', 'IN', 'THAT', 'LAND', 'NOW', 'WHO', 'WILL', 'FOLLOW', 'ME'] +5142-33396-0012-910: hyp=['THEN', 'I', 'WILL', 'GET', 'ME', 'A', 'FARM', 'AND', "WE'LL", 'WINNER', 'IN', 'THAT', 'LAND', 'NOW', 'WHO', 'WILL', 'FOLLOW', 'ME'] +5142-33396-0013-911: ref=['HE', 'IS', 'BUT', 'A', 'BOY', 'THE', 'MEN', 'SAID'] +5142-33396-0013-911: hyp=['HE', 'IS', 'BUT', 'A', 'BOY', 'THE', 'MAN', 'SAID'] +5142-33396-0014-912: ref=['THIRTY', 'MEN', 'ONE', 'AFTER', 'ANOTHER', 'RAISED', 'THEIR', 'HORNS', 'AND', 'SAID'] +5142-33396-0014-912: hyp=['THIRTY', 'MEN', 'ONE', 'AFTER', 'ANOTHER', 'RAISED', 'THEIR', 'HORNS', 'AND', 'SAID'] +5142-33396-0015-913: ref=['AS', 'OUR', 'BOAT', 'FLASHED', 'DOWN', 'THE', 'ROLLERS', 'INTO', 'THE', 'WATER', 'I', 'MADE', 'THIS', 'SONG', 'AND', 'SANG', 'IT'] +5142-33396-0015-913: hyp=['AS', 'OUR', 'BOAT', 'FLASHED', 'DOWN', 'THE', 'ROLLERS', 'INTO', 'THE', 'WATER', 'I', 'MADE', 'THIS', 'SONG', 'AND', 'SANG', 'IT'] +5142-33396-0016-914: ref=['SO', 'WE', 'HARRIED', 'THE', 'COAST', 'OF', 'NORWAY'] +5142-33396-0016-914: hyp=['SO', 'WE', 'HURRIED', 'THE', 'COAST', 'OF', 'NORWAY'] +5142-33396-0017-915: ref=['WE', 'ATE', 'AT', 'MANY', "MEN'S", 'TABLES', 'UNINVITED'] +5142-33396-0017-915: hyp=['WE', 'ATE', 'IT', 'MANY', "MEN'S", 'TABLES', 'UNINVITED'] +5142-33396-0018-916: ref=['MY', "DRAGON'S", 'BELLY', 'IS', 'NEVER', 'FULL', 'AND', 'ON', 'BOARD', 'WENT', 'THE', 'GOLD'] +5142-33396-0018-916: hyp=['I', "DRAGON'S", 'BELLY', 'IS', 'NEVER', 'FULL', 'AND', 'ON', 'BOARD', 'WENT', 'THE', 'GOL'] +5142-33396-0019-917: ref=['OH', 'IT', 'IS', 'BETTER', 'TO', 'LIVE', 'ON', 'THE', 'SEA', 'AND', 'LET', 'OTHER', 'MEN', 'RAISE', 'YOUR', 'CROPS', 'AND', 'COOK', 'YOUR', 'MEALS'] +5142-33396-0019-917: hyp=['OH', 'IT', 'IS', 'BETTER', 'TO', 'LIVE', 'ON', 'THE', 'SEA', 'AND', 'LET', 'OTHER', 'MEN', 'RAISE', 'YOUR', 'CROPS', 'AND', 'COOK', 'YOUR', 'MEALS'] +5142-33396-0020-918: ref=['A', 'HOUSE', 'SMELLS', 'OF', 'SMOKE', 'A', 'SHIP', 'SMELLS', 'OF', 'FROLIC'] +5142-33396-0020-918: hyp=['A', 'HOUSE', 'SMELLS', 'OF', 'SMOKE', 'A', "SHIP'S", 'MILLS', 'OF', 'FROLIC'] +5142-33396-0021-919: ref=['UP', 'AND', 'DOWN', 'THE', 'WATER', 'WE', 'WENT', 'TO', 'GET', 'MUCH', 'WEALTH', 'AND', 'MUCH', 'FROLIC'] +5142-33396-0021-919: hyp=['UP', 'AND', 'DOWN', 'THE', 'WATER', 'WE', 'WENT', 'TO', 'GET', 'MUCH', 'WEALTH', 'AND', 'MUCH', 'FROLIC'] +5142-33396-0022-920: ref=['WHAT', 'OF', 'THE', 'FARM', 'OLAF', 'NOT', 'YET', 'I', 'ANSWERED', 'VIKING', 'IS', 'BETTER', 'FOR', 'SUMMER'] +5142-33396-0022-920: hyp=['WHAT', 'IS', 'THE', 'FARM', 'OLOFF', 'NOT', 'YET', 'I', 'ANSWERED', 'VIKING', 'IS', 'BETTER', 'FOR', 'SUMMER'] +5142-33396-0023-921: ref=['IT', 'WAS', 'SO', 'DARK', 'THAT', 'I', 'COULD', 'SEE', 'NOTHING', 'BUT', 'A', 'FEW', 'SPARKS', 'ON', 'THE', 'HEARTH'] +5142-33396-0023-921: hyp=['IT', 'WAS', 'SO', 'DARK', 'THAT', 'I', 'COULD', 'SEE', 'NOTHING', 'BUT', 'A', 'FEW', 'SPARKS', 'ON', 'THE', 'HEARTH'] +5142-33396-0024-922: ref=['I', 'STOOD', 'WITH', 'MY', 'BACK', 'TO', 'THE', 'WALL', 'FOR', 'I', 'WANTED', 'NO', 'SWORD', 'REACHING', 'OUT', 'OF', 'THE', 'DARK', 'FOR', 'ME'] +5142-33396-0024-922: hyp=['I', 'STOOD', 'WITH', 'MY', 'BACK', 'TO', 'THE', 'WALL', 'FOR', 'I', 'WANTED', 'NO', 'SWORD', 'REACHING', 'OUT', 'OF', 'THE', 'DARK', 'FOR', 'ME'] +5142-33396-0025-923: ref=['COME', 'COME', 'I', 'CALLED', 'WHEN', 'NO', 'ONE', 'OBEYED', 'A', 'FIRE'] +5142-33396-0025-923: hyp=['COME', 'COME', 'I', 'CALLED', 'WHEN', 'NO', 'ONE', 'OBEYED', 'A', 'FIRE'] +5142-33396-0026-924: ref=['MY', 'MEN', 'LAUGHED', 'YES', 'A', 'STINGY', 'HOST'] +5142-33396-0026-924: hyp=['MY', 'MEN', 'LAUGHED', 'YES', 'A', 'STINGY', 'HOSE'] +5142-33396-0027-925: ref=['HE', 'ACTS', 'AS', 'THOUGH', 'HE', 'HAD', 'NOT', 'EXPECTED', 'US'] +5142-33396-0027-925: hyp=['HE', 'ACTS', 'AS', 'THOUGH', 'HE', 'IS', 'NOT', 'EXPECTED', 'US'] +5142-33396-0028-926: ref=['ON', 'A', 'BENCH', 'IN', 'A', 'FAR', 'CORNER', 'WERE', 'A', 'DOZEN', 'PEOPLE', 'HUDDLED', 'TOGETHER'] +5142-33396-0028-926: hyp=['ON', 'A', 'BENCH', 'IN', 'A', 'FAR', 'CORNER', 'WERE', 'A', 'DOZEN', 'PEOPLE', 'HUDDLED', 'TOGETHER'] +5142-33396-0029-927: ref=['BRING', 'IN', 'THE', 'TABLE', 'WE', 'ARE', 'HUNGRY'] +5142-33396-0029-927: hyp=['BRING', 'IN', 'THE', 'TABLE', 'WE', 'ARE', 'HUNGRY'] +5142-33396-0030-928: ref=['THE', 'THRALLS', 'WERE', 'BRINGING', 'IN', 'A', 'GREAT', 'POT', 'OF', 'MEAT'] +5142-33396-0030-928: hyp=['THE', 'THRALLS', 'WERE', 'RINGING', 'IN', 'A', 'GREAT', 'POT', 'OF', 'MEAT'] +5142-33396-0031-929: ref=['THEY', 'SET', 'UP', 'A', 'CRANE', 'OVER', 'THE', 'FIRE', 'AND', 'HUNG', 'THE', 'POT', 'UPON', 'IT', 'AND', 'WE', 'SAT', 'AND', 'WATCHED', 'IT', 'BOIL', 'WHILE', 'WE', 'JOKED', 'AT', 'LAST', 'THE', 'SUPPER', 'BEGAN'] +5142-33396-0031-929: hyp=['THEY', 'SET', 'UP', 'A', 'CRANE', 'OVER', 'THE', 'FIRE', 'AND', 'HUNG', 'THE', 'POT', 'UPON', 'IT', 'AND', 'WE', 'SAT', 'AND', 'WATCHED', 'IT', 'BOIL', 'WHILE', 'WE', 'JOKED', 'AT', 'LAST', 'THE', 'SUPPER', 'BEGAN'] +5142-33396-0032-930: ref=['THE', 'FARMER', 'SAT', 'GLOOMILY', 'ON', 'THE', 'BENCH', 'AND', 'WOULD', 'NOT', 'EAT', 'AND', 'YOU', 'CANNOT', 'WONDER', 'FOR', 'HE', 'SAW', 'US', 'PUTTING', 'POTFULS', 'OF', 'HIS', 'GOOD', 'BEEF', 'AND', 'BASKET', 'LOADS', 'OF', 'BREAD', 'INTO', 'OUR', 'BIG', 'MOUTHS'] +5142-33396-0032-930: hyp=['THE', 'FARMER', 'SAT', 'GLOOMILY', 'ON', 'THE', 'BENCH', 'AND', 'WOULD', 'NOT', 'EAT', 'AND', 'YOU', 'CANNOT', 'WONDER', 'FOR', 'HE', 'SAW', 'US', 'PUTTING', 'POTFULS', 'OF', 'HIS', 'GOOD', 'BEEF', 'AND', 'BASCULADES', 'OF', 'BREAD', 'AND', 'OUR', 'BIG', 'MOUTHS'] +5142-33396-0033-931: ref=['YOU', 'WOULD', 'NOT', 'EAT', 'WITH', 'US', 'YOU', 'CANNOT', 'SAY', 'NO', 'TO', 'HALF', 'OF', 'MY', 'ALE', 'I', 'DRINK', 'THIS', 'TO', 'YOUR', 'HEALTH'] +5142-33396-0033-931: hyp=['YOU', 'WOULD', 'NOT', 'EAT', 'WITH', 'US', 'YOU', 'CANNOT', 'SAY', 'NO', 'TO', 'HALF', 'OF', 'MY', 'ALE', 'I', 'DRINK', 'THIS', 'TO', 'YOUR', 'HEALTH'] +5142-33396-0034-932: ref=['THEN', 'I', 'DRANK', 'HALF', 'OF', 'THE', 'HORNFUL', 'AND', 'SENT', 'THE', 'REST', 'ACROSS', 'THE', 'FIRE', 'TO', 'THE', 'FARMER', 'HE', 'TOOK', 'IT', 'AND', 'SMILED', 'SAYING'] +5142-33396-0034-932: hyp=['THEN', 'I', 'DRANK', 'HALF', 'OF', 'THE', 'HORNFUL', 'AND', 'SET', 'THE', 'REST', 'ACROSS', 'THE', 'FIRE', 'TO', 'THE', 'FARMER', 'HE', 'TOOK', 'IT', 'AND', 'SMILED', 'SAYING'] +5142-33396-0035-933: ref=['DID', 'YOU', 'EVER', 'HAVE', 'SUCH', 'A', 'LORDLY', 'GUEST', 'BEFORE', 'I', 'WENT', 'ON'] +5142-33396-0035-933: hyp=['DID', 'YOU', 'EVER', 'HAVE', 'SUCH', 'A', 'LORDLY', 'GUEST', 'BEFORE', 'I', 'WENT', 'ON'] +5142-33396-0036-934: ref=['SO', 'I', 'WILL', 'GIVE', 'OUT', 'THIS', 'LAW', 'THAT', 'MY', 'MEN', 'SHALL', 'NEVER', 'LEAVE', 'YOU', 'ALONE'] +5142-33396-0036-934: hyp=['SO', 'I', 'WILL', 'GIVE', 'OUT', 'THIS', 'LAW', 'THAT', 'MY', 'MEN', 'SHALL', 'NEVER', 'LEAVE', 'YOU', 'ALO'] +5142-33396-0037-935: ref=['HAKON', 'THERE', 'SHALL', 'BE', 'YOUR', 'CONSTANT', 'COMPANION', 'FRIEND', 'FARMER'] +5142-33396-0037-935: hyp=['HAWKIN', 'THERE', 'SHALL', 'BE', 'YOUR', 'CONSTANT', 'COMPANION', 'FRIEND', 'FARMER'] +5142-33396-0038-936: ref=['HE', 'SHALL', 'NOT', 'LEAVE', 'YOU', 'DAY', 'OR', 'NIGHT', 'WHETHER', 'YOU', 'ARE', 'WORKING', 'OR', 'PLAYING', 'OR', 'SLEEPING'] +5142-33396-0038-936: hyp=['HE', 'SHALL', 'NOT', 'LEAVE', 'YOU', 'DAY', 'OR', 'NIGHT', 'WHETHER', 'YOU', 'ARE', 'WORKING', 'OR', 'PLAYING', 'OR', 'SLEEPING'] +5142-33396-0039-937: ref=['I', 'NAMED', 'NINE', 'OTHERS', 'AND', 'SAID'] +5142-33396-0039-937: hyp=['I', 'NAME', 'NINE', 'OTHERS', 'AND', 'SAID'] +5142-33396-0040-938: ref=['AND', 'THESE', 'SHALL', 'FOLLOW', 'YOUR', 'THRALLS', 'IN', 'THE', 'SAME', 'WAY'] +5142-33396-0040-938: hyp=['AND', 'THESE', 'SHALL', 'FOLLOW', 'YOUR', 'THRALLS', 'IN', 'THE', 'SAME', 'WAY'] +5142-33396-0041-939: ref=['SO', 'I', 'SET', 'GUARDS', 'OVER', 'EVERY', 'ONE', 'IN', 'THAT', 'HOUSE'] +5142-33396-0041-939: hyp=['SO', 'I', 'SET', 'GUARDS', 'OVER', 'EVERY', 'ONE', 'IN', 'THAT', 'HOUSE'] +5142-33396-0042-940: ref=['SO', 'NO', 'TALES', 'GOT', 'OUT', 'TO', 'THE', 'NEIGHBORS', 'BESIDES', 'IT', 'WAS', 'A', 'LONELY', 'PLACE', 'AND', 'BY', 'GOOD', 'LUCK', 'NO', 'ONE', 'CAME', 'THAT', 'WAY'] +5142-33396-0042-940: hyp=['SO', 'NO', 'TALES', 'GOT', 'OUT', 'TO', 'THE', 'NEIGHBORS', 'BESIDES', 'IT', 'WAS', 'A', 'LONELY', 'PLACE', 'AND', 'BY', 'GOOD', 'LUCK', 'NO', 'ONE', 'CAME', 'THAT', 'WAY'] +5142-33396-0043-941: ref=['THEIR', 'EYES', 'DANCED', 'BIG', 'THORLEIF', 'STOOD', 'UP', 'AND', 'STRETCHED', 'HIMSELF'] +5142-33396-0043-941: hyp=['THEIR', 'EYES', 'DANCED', 'BIG', 'TORE', 'LEAFS', 'STOOD', 'UP', 'AND', 'STRETCHED', 'HIMSELF'] +5142-33396-0044-942: ref=['I', 'AM', 'STIFF', 'WITH', 'LONG', 'SITTING', 'HE', 'SAID', 'I', 'ITCH', 'FOR', 'A', 'FIGHT', 'I', 'TURNED', 'TO', 'THE', 'FARMER'] +5142-33396-0044-942: hyp=["I'M", 'STIFF', 'WITH', 'LONG', 'SITTING', 'HE', 'SAID', 'I', 'ITCH', 'FOR', 'A', 'FIGHT', 'I', 'TURNED', 'TO', 'THE', 'FARMER'] +5142-33396-0045-943: ref=['THIS', 'IS', 'OUR', 'LAST', 'FEAST', 'WITH', 'YOU', 'I', 'SAID'] +5142-33396-0045-943: hyp=['THIS', 'IS', 'OUR', 'LAST', 'FEAST', 'WITH', 'YOU', 'I', 'SAID'] +5142-33396-0046-944: ref=['BY', 'THE', 'BEARD', 'OF', 'ODIN', 'I', 'CRIED', 'YOU', 'HAVE', 'TAKEN', 'OUR', 'JOKE', 'LIKE', 'A', 'MAN'] +5142-33396-0046-944: hyp=['BY', 'THE', 'BEARD', 'OF', 'ODIN', 'I', 'CRIED', 'YOU', 'HAVE', 'TAKEN', 'OUR', 'JOKE', 'LIKE', 'A', 'MAN'] +5142-33396-0047-945: ref=['MY', 'MEN', 'POUNDED', 'THE', 'TABLE', 'WITH', 'THEIR', 'FISTS'] +5142-33396-0047-945: hyp=['MY', 'MEN', 'POUNDED', 'THE', 'TABLE', 'WITH', 'THEIR', 'FISTS'] +5142-33396-0048-946: ref=['BY', 'THE', 'HAMMER', 'OF', 'THOR', 'SHOUTED', 'GRIM', 'HERE', 'IS', 'NO', 'STINGY', 'COWARD'] +5142-33396-0048-946: hyp=['BY', 'THE', 'HAMMER', 'A', 'THOR', 'SHOUTED', 'GRIM', 'THERE', 'IS', 'NO', 'STINGY', 'COWARD'] +5142-33396-0049-947: ref=['HERE', 'FRIEND', 'TAKE', 'IT', 'AND', 'HE', 'THRUST', 'IT', 'INTO', 'THE', "FARMER'S", 'HAND'] +5142-33396-0049-947: hyp=['HERE', 'FRIEND', 'TAKE', 'IT', 'AND', 'HE', 'THRUST', 'IT', 'INTO', 'THE', "FARMER'S", 'HAND'] +5142-33396-0050-948: ref=['MAY', 'YOU', 'DRINK', "HEART'S", 'EASE', 'FROM', 'IT', 'FOR', 'MANY', 'YEARS'] +5142-33396-0050-948: hyp=['MAY', 'YOU', 'DRINK', 'HEARTSEASE', 'FROM', 'IT', 'FOR', 'MANY', 'YEARS'] +5142-33396-0051-949: ref=['AND', 'WITH', 'IT', 'I', 'LEAVE', 'YOU', 'A', 'NAME', 'SIF', 'THE', 'FRIENDLY', 'I', 'SHALL', 'HOPE', 'TO', 'DRINK', 'WITH', 'YOU', 'SOMETIME', 'IN', 'VALHALLA'] +5142-33396-0051-949: hyp=['AND', 'WITH', 'IT', 'I', 'LEAVE', 'YOU', 'A', 'NAME', 'SIFT', 'THE', 'FRIENDLY', 'I', 'SHALL', 'HOPE', 'TO', 'DRINK', 'WITH', 'YOU', 'SOME', 'TIME', 'IN', 'VALHALLA'] +5142-33396-0052-950: ref=['HERE', 'IS', 'A', 'RING', 'FOR', 'SIF', 'THE', 'FRIENDLY', 'AND', 'HERE', 'IS', 'A', 'BRACELET', 'A', 'SWORD', 'WOULD', 'NOT', 'BE', 'ASHAMED', 'TO', 'HANG', 'AT', 'YOUR', 'SIDE'] +5142-33396-0052-950: hyp=['HERE', 'IS', 'A', 'RING', 'FOR', 'SIF', 'THE', 'FRIENDLY', 'AND', 'HERE', 'IS', 'A', 'BRACELET', 'AND', 'A', 'SWORD', 'WOULD', 'NOT', 'BE', 'ASHAMED', 'TO', 'HANG', 'AT', 'YOUR', 'SIDE'] +5142-33396-0053-951: ref=['I', 'TOOK', 'FIVE', 'GREAT', 'BRACELETS', 'OF', 'GOLD', 'FROM', 'OUR', 'TREASURE', 'CHEST', 'AND', 'GAVE', 'THEM', 'TO', 'HIM'] +5142-33396-0053-951: hyp=['I', 'TOOK', 'FIVE', 'GREAT', 'BRACELETS', 'OF', 'GOLD', 'FROM', 'OUR', 'TREASURE', 'CHEST', 'AND', 'GAVE', 'THEM', 'TO'] +5142-33396-0054-952: ref=['THAT', 'IS', 'THE', 'BEST', 'WAY', 'TO', 'DECIDE', 'FOR', 'THE', 'SPEAR', 'WILL', 'ALWAYS', 'POINT', 'SOMEWHERE', 'AND', 'ONE', 'THING', 'IS', 'AS', 'GOOD', 'AS', 'ANOTHER'] +5142-33396-0054-952: hyp=['THAT', 'IS', 'THE', 'BEST', 'WAY', 'TO', 'DECIDE', 'FOR', 'THE', 'SPEAR', 'WILL', 'ALWAYS', 'POINT', 'SOMEWHERE', 'AND', 'ONE', 'THING', 'IS', 'AS', 'GOOD', 'AS', 'ANOTHER'] +5142-33396-0055-953: ref=['THAT', 'TIME', 'IT', 'POINTED', 'US', 'INTO', 'YOUR', "FATHER'S", 'SHIPS'] +5142-33396-0055-953: hyp=['THAT', 'TIME', 'IT', 'POINTED', 'US', 'INTO', 'YOUR', "FATHER'S", 'SHIPS'] +5142-33396-0056-954: ref=['HERE', 'THEY', 'SAID', 'IS', 'A', 'RASCAL', 'WHO', 'HAS', 'BEEN', 'HARRYING', 'OUR', 'COASTS'] +5142-33396-0056-954: hyp=['HERE', 'THEY', 'SAID', 'IS', 'A', 'RASCAL', 'WHO', 'HAS', 'BEEN', 'HARRYING', 'OUR', 'COASTS'] +5142-33396-0057-955: ref=['WE', 'SUNK', 'HIS', 'SHIP', 'AND', 'MEN', 'BUT', 'HIM', 'WE', 'BROUGHT', 'TO', 'YOU'] +5142-33396-0057-955: hyp=['WE', 'SUNK', 'HIS', 'SHIP', 'AND', 'MEN', 'BUT', 'HIM', 'WE', 'BROUGHT', 'TO', 'YOU'] +5142-33396-0058-956: ref=['A', 'ROBBER', 'VIKING', 'SAID', 'THE', 'KING', 'AND', 'SCOWLED', 'AT', 'ME'] +5142-33396-0058-956: hyp=['A', 'ROBBER', 'VIKING', 'SAID', 'THE', 'KING', 'AND', 'HE', 'SCOWLED', 'AT', 'ME'] +5142-33396-0059-957: ref=['YES', 'AND', 'WITH', 'ALL', 'YOUR', 'FINGERS', 'IT', 'TOOK', 'YOU', 'A', 'YEAR', 'TO', 'CATCH', 'ME', 'THE', 'KING', 'FROWNED', 'MORE', 'ANGRILY'] +5142-33396-0059-957: hyp=['YES', 'AND', 'WITH', 'ALL', 'YOUR', 'FINGERS', 'IT', 'TOOK', 'YOU', 'A', 'YEAR', 'TO', 'CATCH', 'ME', 'THE', 'KING', 'FROWNED', 'MORE', 'ANGRILY'] +5142-33396-0060-958: ref=['TAKE', 'HIM', 'OUT', 'THORKEL', 'AND', 'LET', 'HIM', 'TASTE', 'YOUR', 'SWORD'] +5142-33396-0060-958: hyp=['TAKE', 'HIM', 'OUT', 'TORQUAL', 'AND', 'LET', 'HIM', 'TASTE', 'YOUR', 'SWORD'] +5142-33396-0061-959: ref=['YOUR', 'MOTHER', 'THE', 'QUEEN', 'WAS', 'STANDING', 'BY'] +5142-33396-0061-959: hyp=['YOUR', 'MOTHER', 'THE', 'QUEEN', 'WAS', 'STANDING', 'BY'] +5142-33396-0062-960: ref=['NOW', 'SHE', 'PUT', 'HER', 'HAND', 'ON', 'HIS', 'ARM', 'AND', 'SMILED', 'AND', 'SAID'] +5142-33396-0062-960: hyp=['NOW', 'SHE', 'PUT', 'HER', 'HAND', 'ON', 'HIS', 'ARM', 'AND', 'SMILED', 'AND', 'SAID'] +5142-33396-0063-961: ref=['AND', 'WOULD', 'HE', 'NOT', 'BE', 'A', 'GOOD', 'GIFT', 'FOR', 'OUR', 'BABY'] +5142-33396-0063-961: hyp=['AND', 'WOULD', 'HE', 'NOT', 'BE', 'A', 'GOOD', 'GIFT', 'FOR', 'OUR', 'BABY'] +5142-33396-0064-962: ref=['YOUR', 'FATHER', 'THOUGHT', 'A', 'MOMENT', 'THEN', 'LOOKED', 'AT', 'YOUR', 'MOTHER', 'AND', 'SMILED'] +5142-33396-0064-962: hyp=['YOUR', 'FATHER', 'THOUGHT', 'A', 'MOMENT', 'AND', 'LOOKED', 'AT', 'YOUR', 'MOTHER', 'AND', 'SMIL'] +5142-33396-0065-963: ref=['SOFT', 'HEART', 'HE', 'SAID', 'GENTLY', 'TO', 'HER', 'THEN', 'TO', 'THORKEL', 'WELL', 'LET', 'HIM', 'GO', 'THORKEL'] +5142-33396-0065-963: hyp=['SOFT', 'HEART', 'HE', 'SAID', 'GENTLY', 'TO', 'HER', 'THEN', 'TO', 'TORQUAL', 'WELL', 'LET', 'HIM', 'GO', 'TORKLE'] +5142-33396-0066-964: ref=['THEN', 'HE', 'TURNED', 'TO', 'ME', 'AGAIN', 'FROWNING'] +5142-33396-0066-964: hyp=['THEN', 'HE', 'TURNED', 'TO', 'ME', 'AGAIN', 'FROWNING'] +5142-33396-0067-965: ref=['BUT', 'YOUNG', 'SHARP', 'TONGUE', 'NOW', 'THAT', 'WE', 'HAVE', 'CAUGHT', 'YOU', 'WE', 'WILL', 'PUT', 'YOU', 'INTO', 'A', 'TRAP', 'THAT', 'YOU', 'CANNOT', 'GET', 'OUT', 'OF'] +5142-33396-0067-965: hyp=['BUT', 'YOUNG', 'SHARP', 'TONGUE', 'NOW', 'THAT', "WE'VE", 'CAUGHT', 'YOU', 'WILL', 'PUT', 'YOU', 'INTO', 'A', 'TRAP', 'THAT', 'YOU', 'CANNOT', 'GET', 'OUT', 'OF'] +5142-33396-0068-966: ref=['SO', 'I', 'LIVED', 'AND', 'NOW', 'AM', 'YOUR', 'TOOTH', 'THRALL', 'WELL', 'IT', 'IS', 'THE', 'LUCK', 'OF', 'WAR'] +5142-33396-0068-966: hyp=['SO', 'I', 'LIVED', 'AND', 'NOW', 'AM', 'YOUR', 'TOOTH', 'THRALL', 'WELL', 'IT', 'IS', 'THE', 'LUCK', 'OF', 'WAR'] +5142-36377-0000-870: ref=['IT', 'WAS', 'ONE', 'OF', 'THE', 'MASTERLY', 'AND', 'CHARMING', 'STORIES', 'OF', 'DUMAS', 'THE', 'ELDER'] +5142-36377-0000-870: hyp=['IT', 'WAS', 'ONE', 'OF', 'THE', 'MASTERLY', 'AND', 'CHARMING', 'STORIES', 'OF', 'DE', 'MAU', 'THE', 'ELDER'] +5142-36377-0001-871: ref=['IN', 'FIVE', 'MINUTES', 'I', 'WAS', 'IN', 'A', 'NEW', 'WORLD', 'AND', 'MY', 'MELANCHOLY', 'ROOM', 'WAS', 'FULL', 'OF', 'THE', 'LIVELIEST', 'FRENCH', 'COMPANY'] +5142-36377-0001-871: hyp=['IN', 'FIVE', 'MINUTES', 'I', 'WAS', 'IN', 'A', 'NEW', 'WORLD', 'AND', 'MY', 'MELANCHOLY', 'ROOM', 'WAS', 'FULL', 'OF', 'THE', 'LIVELIEST', 'FRENCH', 'COMPANY'] +5142-36377-0002-872: ref=['THE', 'SOUND', 'OF', 'AN', 'IMPERATIVE', 'AND', 'UNCOMPROMISING', 'BELL', 'RECALLED', 'ME', 'IN', 'DUE', 'TIME', 'TO', 'THE', 'REGIONS', 'OF', 'REALITY'] +5142-36377-0002-872: hyp=['THE', 'SOUND', 'OF', 'AN', 'IMPERATIVE', 'AND', 'UNCOMPROMISING', 'BELL', 'RECALLED', 'ME', 'IN', 'DUE', 'TIME', 'TO', 'THE', 'REGIONS', 'OF', 'REALITY'] +5142-36377-0003-873: ref=['AMBROSE', 'MET', 'ME', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'STAIRS', 'AND', 'SHOWED', 'ME', 'THE', 'WAY', 'TO', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0003-873: hyp=['AMBROSE', 'MET', 'ME', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'STAIRS', 'AND', 'SHOWED', 'ME', 'THE', 'WAY', 'TO', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0004-874: ref=['SHE', 'SIGNED', 'TO', 'ME', 'WITH', 'A', 'GHOSTLY', 'SOLEMNITY', 'TO', 'TAKE', 'THE', 'VACANT', 'PLACE', 'ON', 'THE', 'LEFT', 'OF', 'HER', 'FATHER'] +5142-36377-0004-874: hyp=['SHE', 'SIGNED', 'TO', 'ME', 'WITH', 'A', 'GHOSTLY', 'SOLEMNITY', 'TO', 'TAKE', 'THE', 'VACANT', 'PLACE', 'ON', 'THE', 'LEFT', 'OF', 'HER', 'FATHER'] +5142-36377-0005-875: ref=['THE', 'DOOR', 'OPENED', 'AGAIN', 'WHILE', 'I', 'WAS', 'STILL', 'STUDYING', 'THE', 'TWO', 'BROTHERS', 'WITHOUT', 'I', 'HONESTLY', 'CONFESS', 'BEING', 'VERY', 'FAVORABLY', 'IMPRESSED', 'BY', 'EITHER', 'OF', 'THEM'] +5142-36377-0005-875: hyp=['THE', 'DOOR', 'OPENED', 'AGAIN', 'WHILE', 'I', 'WAS', 'STILL', 'STUDYING', 'THE', 'TWO', 'BROTHERS', 'WITHOUT', 'I', 'HONESTLY', 'CONFESS', 'BEING', 'VERY', 'FAVORABLY', 'IMPRESSED', 'BY', 'EITHER', 'OF', 'THEM'] +5142-36377-0006-876: ref=['A', 'NEW', 'MEMBER', 'OF', 'THE', 'FAMILY', 'CIRCLE', 'WHO', 'INSTANTLY', 'ATTRACTED', 'MY', 'ATTENTION', 'ENTERED', 'THE', 'ROOM'] +5142-36377-0006-876: hyp=['A', 'NEW', 'MEMBER', 'OF', 'THE', 'FAMILY', 'CIRCLE', 'WHO', 'INSTANTLY', 'ATTRACTED', 'MY', 'ATTENTION', 'ENTERED', 'THE', 'ROOM'] +5142-36377-0007-877: ref=['A', 'LITTLE', 'CRACKED', 'THAT', 'IN', 'THE', 'POPULAR', 'PHRASE', 'WAS', 'MY', 'IMPRESSION', 'OF', 'THE', 'STRANGER', 'WHO', 'NOW', 'MADE', 'HIS', 'APPEARANCE', 'IN', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0007-877: hyp=['A', 'LITTLE', 'CRACKED', 'THAT', 'IN', 'THE', 'POPULAR', 'PHRASE', 'WAS', 'MY', 'IMPRESSION', 'OF', 'THE', 'STRANGER', 'WHO', 'NOW', 'MADE', 'HIS', 'APPEARANCE', 'IN', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0008-878: ref=['MISTER', 'MEADOWCROFT', 'THE', 'ELDER', 'HAVING', 'NOT', 'SPOKEN', 'ONE', 'WORD', 'THUS', 'FAR', 'HIMSELF', 'INTRODUCED', 'THE', 'NEWCOMER', 'TO', 'ME', 'WITH', 'A', 'SIDE', 'GLANCE', 'AT', 'HIS', 'SONS', 'WHICH', 'HAD', 'SOMETHING', 'LIKE', 'DEFIANCE', 'IN', 'IT', 'A', 'GLANCE', 'WHICH', 'AS', 'I', 'WAS', 'SORRY', 'TO', 'NOTICE', 'WAS', 'RETURNED', 'WITH', 'THE', 'DEFIANCE', 'ON', 'THEIR', 'SIDE', 'BY', 'THE', 'TWO', 'YOUNG', 'MEN'] +5142-36377-0008-878: hyp=['MISTER', 'MEADOWCROF', 'THE', 'ELDER', 'HAVING', 'NOT', 'SPOKEN', 'ONE', 'WORD', 'THUS', 'FAR', 'HIMSELF', 'INTRODUCED', 'THE', 'NEW', 'COMER', 'TO', 'ME', 'WITH', 'A', 'SIDE', 'GLANCE', 'AT', 'HIS', 'SONS', 'WHICH', 'HAD', 'SOMETHING', 'LIKE', 'DEFIANCE', 'IN', 'IT', 'A', 'GLANCE', 'WHICH', 'AS', 'I', 'WAS', 'SORRY', 'TO', 'NOTICE', 'WAS', 'RETURNED', 'WITH', 'THE', 'DEFIANCE', 'ON', 'THEIR', 'SIDE', 'BY', 'THE', 'TWO', 'YOUNG', 'MEN'] +5142-36377-0009-879: ref=['PHILIP', 'LEFRANK', 'THIS', 'IS', 'MY', 'OVERLOOKER', 'MISTER', 'JAGO', 'SAID', 'THE', 'OLD', 'MAN', 'FORMALLY', 'PRESENTING', 'US'] +5142-36377-0009-879: hyp=['PHILIP', 'LE', 'FRANK', 'THIS', 'IS', 'MY', 'OVERLOOKER', 'MISTER', 'YAGO', 'SAID', 'THE', 'OLD', 'MAN', 'FORMALLY', 'PRESENTING', 'US'] +5142-36377-0010-880: ref=['HE', 'IS', 'NOT', 'WELL', 'HE', 'HAS', 'COME', 'OVER', 'THE', 'OCEAN', 'FOR', 'REST', 'AND', 'CHANGE', 'OF', 'SCENE'] +5142-36377-0010-880: hyp=['HE', 'IS', 'NOT', 'WELL', 'HE', 'HAS', 'COME', 'OVER', 'THE', 'OCEAN', 'FOR', 'REST', 'AND', 'CHANGES', 'SCENE'] +5142-36377-0011-881: ref=['MISTER', 'JAGO', 'IS', 'AN', 'AMERICAN', 'PHILIP'] +5142-36377-0011-881: hyp=['THIS', 'GEOGO', 'IS', 'AN', 'AMERICAN', 'PHILIP'] +5142-36377-0012-882: ref=['MAKE', 'ACQUAINTANCE', 'WITH', 'MISTER', 'JAGO', 'SIT', 'TOGETHER'] +5142-36377-0012-882: hyp=['MAKE', 'ACQUAINTANCE', 'WITH', 'MISS', 'GIAGO', 'SIT', 'TOGETHER'] +5142-36377-0013-883: ref=['THEY', 'POINTEDLY', 'DREW', 'BACK', 'FROM', 'JOHN', 'JAGO', 'AS', 'HE', 'APPROACHED', 'THE', 'EMPTY', 'CHAIR', 'NEXT', 'TO', 'ME', 'AND', 'MOVED', 'ROUND', 'TO', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'TABLE'] +5142-36377-0013-883: hyp=['THEY', 'POINTEDLY', 'DREW', 'BACK', 'FROM', 'JOHN', 'YAGO', 'AS', 'HE', 'APPROACHED', 'THE', 'EMPTY', 'CHAIR', 'NEXT', 'TO', 'ME', 'AND', 'MOVED', 'ROUND', 'TO', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'TABLE'] +5142-36377-0014-884: ref=['A', 'PRETTY', 'GIRL', 'AND', 'SO', 'FAR', 'AS', 'I', 'COULD', 'JUDGE', 'BY', 'APPEARANCES', 'A', 'GOOD', 'GIRL', 'TOO', 'DESCRIBING', 'HER', 'GENERALLY', 'I', 'MAY', 'SAY', 'THAT', 'SHE', 'HAD', 'A', 'SMALL', 'HEAD', 'WELL', 'CARRIED', 'AND', 'WELL', 'SET', 'ON', 'HER', 'SHOULDERS', 'BRIGHT', 'GRAY', 'EYES', 'THAT', 'LOOKED', 'AT', 'YOU', 'HONESTLY', 'AND', 'MEANT', 'WHAT', 'THEY', 'LOOKED', 'A', 'TRIM', 'SLIGHT', 'LITTLE', 'FIGURE', 'TOO', 'SLIGHT', 'FOR', 'OUR', 'ENGLISH', 'NOTIONS', 'OF', 'BEAUTY', 'A', 'STRONG', 'AMERICAN', 'ACCENT', 'AND', 'A', 'RARE', 'THING', 'IN', 'AMERICA', 'A', 'PLEASANTLY', 'TONED', 'VOICE', 'WHICH', 'MADE', 'THE', 'ACCENT', 'AGREEABLE', 'TO', 'ENGLISH', 'EARS'] +5142-36377-0014-884: hyp=['A', 'PRETTY', 'GIRL', 'AND', 'SO', 'FAR', 'AS', 'I', 'COULD', 'JUDGE', 'BY', 'APPEARANCES', 'A', 'GOOD', 'GIRL', 'TOO', 'DESCRIBING', 'HER', 'GENERALLY', 'I', 'MAY', 'SAY', 'THAT', 'SHE', 'HAD', 'A', 'SMALL', 'HEAD', 'WELL', 'CARRIED', 'AND', 'WELL', 'SET', 'ON', 'HER', 'SHOULDERS', 'BRIGHT', 'GRAY', 'EYES', 'THAT', 'LOOKED', 'AT', 'YOU', 'HONESTLY', 'AND', 'MEANT', 'WHAT', 'THEY', 'LOOKED', 'A', 'TRIM', 'SLIGHT', 'LITTLE', 'FIGURE', 'TOO', 'SLIGHT', 'FOR', 'OUR', 'ENGLISH', 'NOTIONS', 'OF', 'BEAUTY', 'A', 'STRONG', 'AMERICAN', 'ACCENT', 'AND', 'A', 'RARE', 'THING', 'IN', 'AMERICA', 'A', 'PLEASANTLY', 'TONED', 'VOICE', 'WHICH', 'MADE', 'THE', 'ACCENT', 'AGREEABLE', 'TO', 'ENGLISH', 'EARS'] +5142-36377-0015-885: ref=['OUR', 'FIRST', 'IMPRESSIONS', 'OF', 'PEOPLE', 'ARE', 'IN', 'NINE', 'CASES', 'OUT', 'OF', 'TEN', 'THE', 'RIGHT', 'IMPRESSIONS'] +5142-36377-0015-885: hyp=['OUR', 'FIRST', 'IMPRESSIONS', 'OF', 'PEOPLE', 'ARE', 'IN', 'NINE', 'CASES', 'OUT', 'OF', 'TEN', 'THE', 'RIGHT', 'IMPRESSIONS'] +5142-36377-0016-886: ref=['FOR', 'ONCE', 'IN', 'A', 'WAY', 'I', 'PROVED', 'A', 'TRUE', 'PROPHET'] +5142-36377-0016-886: hyp=['FOR', 'ONCE', 'IN', 'A', 'WAY', 'I', 'PROVED', 'A', 'TRUE', 'PROPHET'] +5142-36377-0017-887: ref=['THE', 'ONLY', 'CHEERFUL', 'CONVERSATION', 'WAS', 'THE', 'CONVERSATION', 'ACROSS', 'THE', 'TABLE', 'BETWEEN', 'NAOMI', 'AND', 'ME'] +5142-36377-0017-887: hyp=['THE', 'ONLY', 'CHEERFUL', 'CONVERSATION', 'WAS', 'THE', 'CONVERSATION', 'ACROSS', 'THE', 'TABLE', 'BETWEEN', 'NAOMI', 'AND', 'ME'] +5142-36377-0018-888: ref=['HE', 'LOOKED', 'UP', 'AT', 'NAOMI', 'DOUBTINGLY', 'FROM', 'HIS', 'PLATE', 'AND', 'LOOKED', 'DOWN', 'AGAIN', 'SLOWLY', 'WITH', 'A', 'FROWN'] +5142-36377-0018-888: hyp=['HE', 'LOOKED', 'UP', 'AND', 'NOW', 'ON', 'ME', 'DOUBTINGLY', 'FROM', 'HIS', 'PLATE', 'AND', 'LOOKED', 'DOWN', 'AGAIN', 'SLOWLY', 'WITH', 'A', 'FROWN'] +5142-36377-0019-889: ref=['WHEN', 'I', 'ADDRESSED', 'HIM', 'HE', 'ANSWERED', 'CONSTRAINEDLY'] +5142-36377-0019-889: hyp=['WHEN', 'I', 'ADDRESSED', 'HIM', 'HE', 'ANSWERED', 'CONSTRAINEDLY'] +5142-36377-0020-890: ref=['A', 'MORE', 'DREARY', 'AND', 'MORE', 'DISUNITED', 'FAMILY', 'PARTY', 'I', 'NEVER', 'SAT', 'AT', 'THE', 'TABLE', 'WITH'] +5142-36377-0020-890: hyp=['A', 'MORE', 'DREARY', 'AND', 'MORE', 'DISUNITED', 'FAMILY', 'PARTY', 'I', 'NEVER', 'SAT', 'AT', 'THE', 'TABLE', 'WITH'] +5142-36377-0021-891: ref=['ENVY', 'HATRED', 'MALICE', 'AND', 'UNCHARITABLENESS', 'ARE', 'NEVER', 'SO', 'ESSENTIALLY', 'DETESTABLE', 'TO', 'MY', 'MIND', 'AS', 'WHEN', 'THEY', 'ARE', 'ANIMATED', 'BY', 'A', 'SENSE', 'OF', 'PROPRIETY', 'AND', 'WORK', 'UNDER', 'THE', 'SURFACE', 'BUT', 'FOR', 'MY', 'INTEREST', 'IN', 'NAOMI', 'AND', 'MY', 'OTHER', 'INTEREST', 'IN', 'THE', 'LITTLE', 'LOVE', 'LOOKS', 'WHICH', 'I', 'NOW', 'AND', 'THEN', 'SURPRISED', 'PASSING', 'BETWEEN', 'HER', 'AND', 'AMBROSE', 'I', 'SHOULD', 'NEVER', 'HAVE', 'SAT', 'THROUGH', 'THAT', 'SUPPER'] +5142-36377-0021-891: hyp=['ENVY', 'HATRED', 'MALICE', 'AND', 'UNCHARITABLENESS', 'ARE', 'NEVER', 'SO', 'ESSENTIALLY', 'DETESTABLE', 'TO', 'MY', 'MIND', 'AS', 'WHEN', 'THEY', 'ARE', 'ANIMATED', 'BY', 'THE', 'SENSE', 'OF', 'PROPRIETY', 'AND', 'WORK', 'UNDER', 'THE', 'SURFACE', 'BUT', 'FOR', 'MY', 'INTEREST', 'IN', 'NAY', 'OWE', 'ME', 'AND', 'MY', 'OTHER', 'INTEREST', 'IN', 'THE', 'LITTLE', 'LOVE', 'LOOKS', 'WHICH', 'I', 'NOW', 'AND', 'THEN', 'SURPRISED', 'PASSING', 'BETWEEN', 'HER', 'AND', 'AMBROSE', 'I', 'SHOULD', 'NEVER', 'HAVE', 'SAT', 'THROUGH', 'THAT', 'SUPPER'] +5142-36377-0022-892: ref=['I', 'WISH', 'YOU', 'GOOD', 'NIGHT', 'SHE', 'LAID', 'HER', 'BONY', 'HANDS', 'ON', 'THE', 'BACK', 'OF', 'MISTER', "MEADOWCROFT'S", 'INVALID', 'CHAIR', 'CUT', 'HIM', 'SHORT', 'IN', 'HIS', 'FAREWELL', 'SALUTATION', 'TO', 'ME', 'AND', 'WHEELED', 'HIM', 'OUT', 'TO', 'HIS', 'BED', 'AS', 'IF', 'SHE', 'WERE', 'WHEELING', 'HIM', 'OUT', 'TO', 'HIS', 'GRAVE'] +5142-36377-0022-892: hyp=['I', 'WISH', 'YOU', 'GOOD', 'NIGHT', 'SHE', 'LAID', 'HER', 'BONY', 'HANDS', 'ON', 'THE', 'BACK', 'OF', 'MISTER', "MEADOWCROFT'S", 'INVALID', 'CHAIR', 'CUT', 'HIM', 'SHORT', 'IN', 'HIS', 'FAREWELL', 'SALUTATION', 'TO', 'ME', 'AND', 'WHEELED', 'HIM', 'OUT', 'TO', 'HIS', 'BED', 'AS', 'IF', 'SHE', 'WERE', 'WHEELING', 'HIM', 'OUT', 'TO', 'HIS', 'GRAVE'] +5142-36377-0023-893: ref=['YOU', 'WERE', 'QUITE', 'RIGHT', 'TO', 'SAY', 'NO', 'AMBROSE', 'BEGAN', 'NEVER', 'SMOKE', 'WITH', 'JOHN', 'JAGO', 'HIS', 'CIGARS', 'WILL', 'POISON', 'YOU'] +5142-36377-0023-893: hyp=['YOU', 'WERE', 'QUITE', 'RIGHT', 'TO', 'SAY', 'NO', 'AMBROSE', 'BEGAN', 'NEVER', 'SMOKE', 'WITH', 'JOHNNIAGO', 'HIS', 'CIGARS', 'WILL', 'POISON', 'YOU'] +5142-36377-0024-894: ref=['NAOMI', 'SHOOK', 'HER', 'FOREFINGER', 'REPROACHFULLY', 'AT', 'THEM', 'AS', 'IF', 'THE', 'TWO', 'STURDY', 'YOUNG', 'FARMERS', 'HAD', 'BEEN', 'TWO', 'CHILDREN'] +5142-36377-0024-894: hyp=['THEY', 'ONLY', 'SHOOK', 'HER', 'FOREFINGER', 'REPROACHFULLY', 'AT', 'THEM', 'AS', 'IF', 'THE', 'TWO', 'STURDY', 'YOUNG', 'FARMERS', 'HAD', 'BEEN', 'TWO', 'CHILDREN'] +5142-36377-0025-895: ref=['SILAS', 'SLUNK', 'AWAY', 'WITHOUT', 'A', 'WORD', 'OF', 'PROTEST', 'AMBROSE', 'STOOD', 'HIS', 'GROUND', 'EVIDENTLY', 'BENT', 'ON', 'MAKING', 'HIS', 'PEACE', 'WITH', 'NAOMI', 'BEFORE', 'HE', 'LEFT', 'HER', 'SEEING', 'THAT', 'I', 'WAS', 'IN', 'THE', 'WAY', 'I', 'WALKED', 'ASIDE', 'TOWARD', 'A', 'GLASS', 'DOOR', 'AT', 'THE', 'LOWER', 'END', 'OF', 'THE', 'ROOM'] +5142-36377-0025-895: hyp=['SILAS', 'SLUNK', 'AWAY', 'WITHOUT', 'A', 'WORD', 'OF', 'PROTEST', 'AMBROSE', 'STOOD', 'HIS', 'GROUND', 'EVIDENTLY', 'BENT', 'ON', 'MAKING', 'HIS', 'PEACE', 'WHEN', 'NAOMI', 'BEFORE', 'HE', 'LEFT', 'HER', 'SEEING', 'THAT', 'I', 'WAS', 'IN', 'THE', 'WAY', 'I', 'WALKED', 'ASIDE', 'TOWARD', 'A', 'GLASS', 'DOOR', 'AT', 'THE', 'LOWER', 'END', 'OF', 'THE', 'ROOM'] +5142-36586-0000-967: ref=['IT', 'IS', 'MANIFEST', 'THAT', 'MAN', 'IS', 'NOW', 'SUBJECT', 'TO', 'MUCH', 'VARIABILITY'] +5142-36586-0000-967: hyp=['IT', 'IS', 'MANIFEST', 'THAT', 'MAN', 'IS', 'NOW', 'SUBJECT', 'TO', 'MUCH', 'VARIABILITY'] +5142-36586-0001-968: ref=['SO', 'IT', 'IS', 'WITH', 'THE', 'LOWER', 'ANIMALS'] +5142-36586-0001-968: hyp=['SO', 'IT', 'IS', 'WITH', 'THE', 'LOWER', 'ANIMALS'] +5142-36586-0002-969: ref=['THE', 'VARIABILITY', 'OF', 'MULTIPLE', 'PARTS'] +5142-36586-0002-969: hyp=['THE', 'VERY', 'ABILITY', 'OF', 'MULTIPLE', 'PART'] +5142-36586-0003-970: ref=['BUT', 'THIS', 'SUBJECT', 'WILL', 'BE', 'MORE', 'PROPERLY', 'DISCUSSED', 'WHEN', 'WE', 'TREAT', 'OF', 'THE', 'DIFFERENT', 'RACES', 'OF', 'MANKIND'] +5142-36586-0003-970: hyp=['BUT', 'THIS', 'SUBJECT', 'WILL', 'BE', 'MORE', 'PROPERLY', 'DISCUSSED', 'WHEN', 'WE', 'TREAT', 'OF', 'THE', 'DIFFERENT', 'RACES', 'OF', 'MANKIND'] +5142-36586-0004-971: ref=['EFFECTS', 'OF', 'THE', 'INCREASED', 'USE', 'AND', 'DISUSE', 'OF', 'PARTS'] +5142-36586-0004-971: hyp=['EFFECTS', 'OF', 'THE', 'INCREASED', 'USE', 'AND', 'DISUSE', 'OF', 'PARTS'] +5142-36600-0000-896: ref=['CHAPTER', 'SEVEN', 'ON', 'THE', 'RACES', 'OF', 'MAN'] +5142-36600-0000-896: hyp=['CHAPTER', 'SEVEN', 'ON', 'THE', 'RACES', 'OF', 'MAN'] +5142-36600-0001-897: ref=['IN', 'DETERMINING', 'WHETHER', 'TWO', 'OR', 'MORE', 'ALLIED', 'FORMS', 'OUGHT', 'TO', 'BE', 'RANKED', 'AS', 'SPECIES', 'OR', 'VARIETIES', 'NATURALISTS', 'ARE', 'PRACTICALLY', 'GUIDED', 'BY', 'THE', 'FOLLOWING', 'CONSIDERATIONS', 'NAMELY', 'THE', 'AMOUNT', 'OF', 'DIFFERENCE', 'BETWEEN', 'THEM', 'AND', 'WHETHER', 'SUCH', 'DIFFERENCES', 'RELATE', 'TO', 'FEW', 'OR', 'MANY', 'POINTS', 'OF', 'STRUCTURE', 'AND', 'WHETHER', 'THEY', 'ARE', 'OF', 'PHYSIOLOGICAL', 'IMPORTANCE', 'BUT', 'MORE', 'ESPECIALLY', 'WHETHER', 'THEY', 'ARE', 'CONSTANT'] +5142-36600-0001-897: hyp=['IN', 'DETERMINING', 'WHETHER', 'TWO', 'OR', 'MORE', 'ALLIED', 'FORMS', 'OUGHT', 'TO', 'BE', 'RANKED', 'TO', 'SPECIES', 'OR', 'VARIETIES', 'NATURALISTS', 'ARE', 'PRACTICALLY', 'GUIDED', 'BY', 'THE', 'FOLLOWING', 'CONSIDERATIONS', 'NAMELY', 'THE', 'AMOUNT', 'OF', 'DIFFERENCE', 'BETWEEN', 'THEM', 'AND', 'WHETHER', 'SUCH', 'DIFFERENCE', 'IS', 'RELATE', 'TO', 'FEW', 'OR', 'MANY', 'POINTS', 'OF', 'STRUCTURE', 'AND', 'WHETHER', 'THEY', 'ARE', 'OF', 'PHYSIOLOGICAL', 'IMPORTANCE', 'BUT', 'MORE', 'ESPECIALLY', 'WHETHER', 'THEY', 'ARE', 'CONSTANT'] +5639-40744-0000-137: ref=['ELEVEN', "O'CLOCK", 'HAD', 'STRUCK', 'IT', 'WAS', 'A', 'FINE', 'CLEAR', 'NIGHT', 'THEY', 'WERE', 'THE', 'ONLY', 'PERSONS', 'ON', 'THE', 'ROAD', 'AND', 'THEY', 'SAUNTERED', 'LEISURELY', 'ALONG', 'TO', 'AVOID', 'PAYING', 'THE', 'PRICE', 'OF', 'FATIGUE', 'FOR', 'THE', 'RECREATION', 'PROVIDED', 'FOR', 'THE', 'TOLEDANS', 'IN', 'THEIR', 'VALLEY', 'OR', 'ON', 'THE', 'BANKS', 'OF', 'THEIR', 'RIVER'] +5639-40744-0000-137: hyp=['ELEVEN', "O'CLOCK", 'HAD', 'STRUCK', 'IT', 'WAS', 'A', 'FINE', 'CLEAR', 'NIGHT', 'THERE', 'WERE', 'THE', 'ONLY', 'PERSONS', 'ON', 'THE', 'ROAD', 'AND', 'THEY', 'SAUNTERED', 'LEISURELY', 'ALONG', 'TO', 'AVOID', 'PAYING', 'THE', 'PRICE', 'OF', 'FATIGUE', 'FOR', 'THE', 'RECREATION', 'PROVIDED', 'FOR', 'THE', 'TOLEDANS', 'IN', 'THE', 'VALLEY', 'OR', 'ON', 'THE', 'BANKS', 'OF', 'THEIR', 'RIVER'] +5639-40744-0001-138: ref=['SECURE', 'AS', 'HE', 'THOUGHT', 'IN', 'THE', 'CAREFUL', 'ADMINISTRATION', 'OF', 'JUSTICE', 'IN', 'THAT', 'CITY', 'AND', 'THE', 'CHARACTER', 'OF', 'ITS', 'WELL', 'DISPOSED', 'INHABITANTS', 'THE', 'GOOD', 'HIDALGO', 'WAS', 'FAR', 'FROM', 'THINKING', 'THAT', 'ANY', 'DISASTER', 'COULD', 'BEFAL', 'HIS', 'FAMILY'] +5639-40744-0001-138: hyp=['SECURE', 'AS', 'HE', 'THOUGHT', 'IN', 'THE', 'CAREFUL', 'ADMINISTRATION', 'OF', 'JUSTICE', 'IN', 'THAT', 'CITY', 'AND', 'THE', 'CHARACTER', 'OF', 'ITS', 'WELL', 'DISPOSED', 'INHABITANTS', 'THE', 'GOOD', 'HAD', 'ALGO', 'WAS', 'FAR', 'FROM', 'THINKING', 'THAT', 'ANY', 'DISASTER', 'COULD', 'BEFALL', 'HIS', 'FAMILY'] +5639-40744-0002-139: ref=['RODOLFO', 'AND', 'HIS', 'COMPANIONS', 'WITH', 'THEIR', 'FACES', 'MUFFLED', 'IN', 'THEIR', 'CLOAKS', 'STARED', 'RUDELY', 'AND', 'INSOLENTLY', 'AT', 'THE', 'MOTHER', 'THE', 'DAUGHTER', 'AND', 'THE', 'SERVANT', 'MAID'] +5639-40744-0002-139: hyp=['RUDOLPHO', 'AND', 'HIS', 'COMPANIONS', 'WITH', 'THEIR', 'FACES', 'MUFFLED', 'IN', 'THEIR', 'CLOAKS', 'STARED', 'RUDELY', 'AND', 'INSOLENTLY', 'AT', 'THE', 'MOTHER', 'THE', 'DAUGHTER', 'AND', 'THE', 'SERVANT', 'MAID'] +5639-40744-0003-140: ref=['IN', 'A', 'MOMENT', 'HE', 'COMMUNICATED', 'HIS', 'THOUGHTS', 'TO', 'HIS', 'COMPANIONS', 'AND', 'IN', 'THE', 'NEXT', 'MOMENT', 'THEY', 'RESOLVED', 'TO', 'TURN', 'BACK', 'AND', 'CARRY', 'HER', 'OFF', 'TO', 'PLEASE', 'RODOLFO', 'FOR', 'THE', 'RICH', 'WHO', 'ARE', 'OPEN', 'HANDED', 'ALWAYS', 'FIND', 'PARASITES', 'READY', 'TO', 'ENCOURAGE', 'THEIR', 'BAD', 'PROPENSITIES', 'AND', 'THUS', 'TO', 'CONCEIVE', 'THIS', 'WICKED', 'DESIGN', 'TO', 'COMMUNICATE', 'IT', 'APPROVE', 'IT', 'RESOLVE', 'ON', 'RAVISHING', 'LEOCADIA', 'AND', 'TO', 'CARRY', 'THAT', 'DESIGN', 'INTO', 'EFFECT', 'WAS', 'THE', 'WORK', 'OF', 'A', 'MOMENT'] +5639-40744-0003-140: hyp=['IN', 'A', 'MOMENT', 'HE', 'COMMUNICATED', 'HIS', 'THOUGHTS', 'TO', 'HIS', 'COMPANIONS', 'AND', 'IN', 'THE', 'NEXT', 'MOMENT', 'THEY', 'RESOLVED', 'TO', 'TURN', 'BACK', 'AND', 'CARRY', 'HER', 'OFF', 'TO', 'PLEASE', 'RUDOLPHO', 'FOR', 'THE', 'RICH', 'WHO', 'ARE', 'OPEN', 'HANDED', 'ALWAYS', 'FIND', 'PARRICIDES', 'READY', 'TO', 'ENCOURAGE', 'THEIR', 'BAD', 'PROPENSITIES', 'AND', 'THUS', 'TO', 'CONCEIVE', 'THIS', 'WICKED', 'DESIGN', 'TO', 'COMMUNICATE', 'IT', 'APPROVE', 'IT', 'RESOLVE', 'ON', 'RAVISHING', 'LEOCADIA', 'AND', 'TO', 'CARRY', 'THAT', 'DESIGN', 'INTO', 'EFFECT', 'WAS', 'THE', 'WORK', 'OF', 'A', 'MOMENT'] +5639-40744-0004-141: ref=['THEY', 'DREW', 'THEIR', 'SWORDS', 'HID', 'THEIR', 'FACES', 'IN', 'THE', 'FLAPS', 'OF', 'THEIR', 'CLOAKS', 'TURNED', 'BACK', 'AND', 'SOON', 'CAME', 'IN', 'FRONT', 'OF', 'THE', 'LITTLE', 'PARTY', 'WHO', 'HAD', 'NOT', 'YET', 'DONE', 'GIVING', 'THANKS', 'TO', 'GOD', 'FOR', 'THEIR', 'ESCAPE', 'FROM', 'THOSE', 'AUDACIOUS', 'MEN'] +5639-40744-0004-141: hyp=['THEY', 'DREW', 'THEIR', 'SWORDS', 'HID', 'THEIR', 'FACES', 'IN', 'THE', 'FLAPS', 'OF', 'THEIR', 'CLOAKS', 'TURNED', 'BACK', 'AND', 'SOON', 'CAME', 'IN', 'FRONT', 'OF', 'THE', 'LITTLE', 'PARTY', 'WHO', 'HAD', 'NOT', 'YET', 'DONE', 'GIVING', 'THANKS', 'TO', 'GOD', 'FOR', 'THEIR', 'ESCAPE', 'FROM', 'THOSE', 'AUDACIOUS', 'MEN'] +5639-40744-0005-142: ref=['FINALLY', 'THE', 'ONE', 'PARTY', 'WENT', 'OFF', 'EXULTING', 'AND', 'THE', 'OTHER', 'WAS', 'LEFT', 'IN', 'DESOLATION', 'AND', 'WOE'] +5639-40744-0005-142: hyp=['FINALLY', 'THE', 'ONE', 'PARTY', 'WENT', 'OFF', 'EXULTING', 'AND', 'THE', 'OTHER', 'WAS', 'LEFT', 'IN', 'DESOLATION', 'AND', 'WOE'] +5639-40744-0006-143: ref=['RODOLFO', 'ARRIVED', 'AT', 'HIS', 'OWN', 'HOUSE', 'WITHOUT', 'ANY', 'IMPEDIMENT', 'AND', "LEOCADIA'S", 'PARENTS', 'REACHED', 'THEIRS', 'HEART', 'BROKEN', 'AND', 'DESPAIRING'] +5639-40744-0006-143: hyp=['RODOLPHO', 'ARRIVED', 'AT', 'HIS', 'OWN', 'HOUSE', 'WITHOUT', 'ANY', 'IMPEDIMENT', 'A', 'UCADIUS', 'PARENTS', 'REACHED', 'THEIRS', 'HEART', 'BROKEN', 'AND', 'DESPAIRING'] +5639-40744-0007-144: ref=['MEANWHILE', 'RODOLFO', 'HAD', 'LEOCADIA', 'SAFE', 'IN', 'HIS', 'CUSTODY', 'AND', 'IN', 'HIS', 'OWN', 'APARTMENT'] +5639-40744-0007-144: hyp=['MEANWHILE', 'RUDOLPHO', 'HAD', 'LOCALIA', 'SAFE', 'IN', 'HIS', 'CUSTODY', 'AND', 'IN', 'HIS', 'OWN', 'APARTMENT'] +5639-40744-0008-145: ref=['WHO', 'TOUCHES', 'ME', 'AM', 'I', 'IN', 'BED'] +5639-40744-0008-145: hyp=['WHO', 'TOUCHES', 'ME', 'AM', 'I', 'IN', 'BED'] +5639-40744-0009-146: ref=['MOTHER', 'DEAR', 'FATHER', 'DO', 'YOU', 'HEAR', 'ME'] +5639-40744-0009-146: hyp=['MOTHER', 'DEAR', 'FATHER', 'DO', 'YOU', 'HEAR', 'ME'] +5639-40744-0010-147: ref=['IT', 'IS', 'THE', 'ONLY', 'AMENDS', 'I', 'ASK', 'OF', 'YOU', 'FOR', 'THE', 'WRONG', 'YOU', 'HAVE', 'DONE', 'ME'] +5639-40744-0010-147: hyp=['IT', 'IS', 'THE', 'ONLY', 'AMENDS', 'I', 'ASK', 'OF', 'YOU', 'FOR', 'THE', 'WRONG', 'YOU', 'HAVE', 'DONE', 'ME'] +5639-40744-0011-148: ref=['SHE', 'FOUND', 'THE', 'DOOR', 'BUT', 'IT', 'WAS', 'LOCKED', 'OUTSIDE'] +5639-40744-0011-148: hyp=['SHE', 'FOUND', 'THE', 'DOOR', 'BUT', 'IT', 'WAS', 'LOCKED', 'OUTSIDE'] +5639-40744-0012-149: ref=['SHE', 'SUCCEEDED', 'IN', 'OPENING', 'THE', 'WINDOW', 'AND', 'THE', 'MOONLIGHT', 'SHONE', 'IN', 'SO', 'BRIGHTLY', 'THAT', 'SHE', 'COULD', 'DISTINGUISH', 'THE', 'COLOUR', 'OF', 'SOME', 'DAMASK', 'HANGINGS', 'IN', 'THE', 'ROOM'] +5639-40744-0012-149: hyp=['SHE', 'SUCCEEDED', 'IN', 'OPENING', 'THE', 'WINDOW', 'AND', 'THE', 'MOONLIGHT', 'SHONE', 'IN', 'SO', 'BRIGHTLY', 'THAT', 'SHE', 'COULD', 'DISTINGUISH', 'THE', 'COLOR', 'OF', 'SOME', 'DAMASK', 'HANGING', 'IN', 'THE', 'ROOM'] +5639-40744-0013-150: ref=['SHE', 'SAW', 'THAT', 'THE', 'BED', 'WAS', 'GILDED', 'AND', 'SO', 'RICH', 'THAT', 'IT', 'SEEMED', 'THAT', 'OF', 'A', 'PRINCE', 'RATHER', 'THAN', 'OF', 'A', 'PRIVATE', 'GENTLEMAN'] +5639-40744-0013-150: hyp=['SHE', 'SAW', 'THAT', 'THE', 'BED', 'WAS', 'GILDED', 'AND', 'SO', 'RICH', 'THAT', 'IT', 'SEEMED', 'THAT', 'OF', 'A', 'PRINCE', 'THE', 'RATHER', 'THAT', 'OF', 'A', 'PRIVATE', 'GENTLEMAN'] +5639-40744-0014-151: ref=['AMONG', 'OTHER', 'THINGS', 'ON', 'WHICH', 'SHE', 'CAST', 'HER', 'EYES', 'WAS', 'A', 'SMALL', 'CRUCIFIX', 'OF', 'SOLID', 'SILVER', 'STANDING', 'ON', 'A', 'CABINET', 'NEAR', 'THE', 'WINDOW'] +5639-40744-0014-151: hyp=['AMONG', 'OTHER', 'THINGS', 'ON', 'WHICH', 'HE', 'CAST', 'HER', 'EYES', 'WAS', 'A', 'SMALL', 'CRUCIFIX', 'OF', 'SOLID', 'SILVER', 'STANDING', 'ON', 'A', 'CABINET', 'NEAR', 'THE', 'WINDOW'] +5639-40744-0015-152: ref=['THIS', 'PERSON', 'WAS', 'RODOLFO', 'WHO', 'THOUGH', 'HE', 'HAD', 'GONE', 'TO', 'LOOK', 'FOR', 'HIS', 'FRIENDS', 'HAD', 'CHANGED', 'HIS', 'MIND', 'IN', 'THAT', 'RESPECT', 'NOT', 'THINKING', 'IT', 'ADVISABLE', 'TO', 'ACQUAINT', 'THEM', 'WITH', 'WHAT', 'HAD', 'PASSED', 'BETWEEN', 'HIM', 'AND', 'THE', 'GIRL'] +5639-40744-0015-152: hyp=['THIS', 'PERSON', 'WAS', 'RIDOLPHO', 'WHO', 'THOUGH', 'HE', 'HAD', 'GONE', 'TO', 'LOOK', 'FOR', 'HIS', 'FRIENDS', 'HAD', 'CHANGED', 'HIS', 'MIND', 'IN', 'THAT', 'RESPECT', 'NOTHING', 'IT', 'ADVISABLE', 'TO', 'ACQUAINT', 'THEM', 'WITH', 'WHAT', 'HAD', 'PASSED', 'BETWEEN', 'HIM', 'AND', 'THE', 'GIRL'] +5639-40744-0016-153: ref=['ON', 'THE', 'CONTRARY', 'HE', 'RESOLVED', 'TO', 'TELL', 'THEM', 'THAT', 'REPENTING', 'OF', 'HIS', 'VIOLENCE', 'AND', 'MOVED', 'BY', 'HER', 'TEARS', 'HE', 'HAD', 'ONLY', 'CARRIED', 'HER', 'HALF', 'WAY', 'TOWARDS', 'HIS', 'HOUSE', 'AND', 'THEN', 'LET', 'HER', 'GO'] +5639-40744-0016-153: hyp=['ON', 'THE', 'CONTRARY', 'HE', 'RESOLVED', 'TO', 'TELL', 'THEM', 'THAT', 'REPENTING', 'OF', 'HIS', 'VIOLENCE', 'AND', 'MOVED', 'BY', 'A', 'TEARS', 'HE', 'HAD', 'ONLY', 'CARRIED', 'HER', 'HALF', 'WAY', 'TOWARDS', 'HIS', 'HOUSE', 'AND', 'THEN', 'LET', 'HER', 'GO'] +5639-40744-0017-154: ref=['CHOKING', 'WITH', 'EMOTION', 'LEOCADI', 'MADE', 'A', 'SIGN', 'TO', 'HER', 'PARENTS', 'THAT', 'SHE', 'WISHED', 'TO', 'BE', 'ALONE', 'WITH', 'THEM'] +5639-40744-0017-154: hyp=['CHOKING', 'WITH', 'EMOTION', 'LUCADIA', 'MADE', 'A', 'SIGN', 'TO', 'HER', 'PARENTS', 'THAT', 'SHE', 'WISHED', 'TO', 'BE', 'ALONE', 'WITH', 'THEM'] +5639-40744-0018-155: ref=['THAT', 'WOULD', 'BE', 'VERY', 'WELL', 'MY', 'CHILD', 'REPLIED', 'HER', 'FATHER', 'IF', 'YOUR', 'PLAN', 'WERE', 'NOT', 'LIABLE', 'TO', 'BE', 'FRUSTRATED', 'BY', 'ORDINARY', 'CUNNING', 'BUT', 'NO', 'DOUBT', 'THIS', 'IMAGE', 'HAS', 'BEEN', 'ALREADY', 'MISSED', 'BY', 'ITS', 'OWNER', 'AND', 'HE', 'WILL', 'HAVE', 'SET', 'IT', 'DOWN', 'FOR', 'CERTAIN', 'THAT', 'IT', 'WAS', 'TAKEN', 'OUT', 'OF', 'THE', 'ROOM', 'BY', 'THE', 'PERSON', 'HE', 'LOCKED', 'UP', 'THERE'] +5639-40744-0018-155: hyp=['THAT', 'WOULD', 'BE', 'VERY', 'WELL', 'MY', 'CHILD', 'REPLIED', 'HER', 'FATHER', 'IF', 'YOUR', 'PLAN', 'WERE', 'NOT', 'LIABLE', 'TO', 'BE', 'FRUSTRATED', 'BY', 'ORDINARY', 'CUNNING', 'BUT', 'NO', 'DOUBT', 'THIS', 'IMAGE', 'HAD', 'BEEN', 'ALREADY', 'MISSED', 'BY', 'ITS', 'OWNER', 'AND', 'HE', 'WILL', 'HAVE', 'SET', 'IT', 'DOWN', 'FOR', 'CERTAIN', 'THAT', 'IT', 'WAS', 'TAKEN', 'OUT', 'OF', 'THE', 'ROOM', 'BY', 'THE', 'PERSON', 'HE', 'LOCKED', 'UP', 'THERE'] +5639-40744-0019-156: ref=['WHAT', 'YOU', 'HAD', 'BEST', 'DO', 'MY', 'CHILD', 'IS', 'TO', 'KEEP', 'IT', 'AND', 'PRAY', 'TO', 'IT', 'THAT', 'SINCE', 'IT', 'WAS', 'A', 'WITNESS', 'TO', 'YOUR', 'UNDOING', 'IT', 'WILL', 'DEIGN', 'TO', 'VINDICATE', 'YOUR', 'CAUSE', 'BY', 'ITS', 'RIGHTEOUS', 'JUDGMENT'] +5639-40744-0019-156: hyp=['WHAT', 'YOU', 'HAD', 'BEST', 'DO', 'MY', 'CHILD', 'IS', 'TO', 'KEEP', 'IT', 'AND', 'PRAY', 'TO', 'IT', 'THAT', 'SINCE', 'IT', 'WAS', 'A', 'WITNESS', 'TO', 'YOUR', 'UNDOING', 'IT', 'WILL', 'DEIGN', 'TO', 'VINDICATE', 'YOUR', 'CAUSE', 'BY', 'ITS', 'RIGHTEOUS', 'JUDGMENT'] +5639-40744-0020-157: ref=['THUS', 'DID', 'THIS', 'HUMANE', 'AND', 'RIGHT', 'MINDED', 'FATHER', 'COMFORT', 'HIS', 'UNHAPPY', 'DAUGHTER', 'AND', 'HER', 'MOTHER', 'EMBRACING', 'HER', 'AGAIN', 'DID', 'ALL', 'SHE', 'COULD', 'TO', 'SOOTHE', 'HER', 'FEELINGS'] +5639-40744-0020-157: hyp=['THUS', 'DID', 'THE', 'HUMANE', 'AND', 'RIGHT', 'MINDED', 'FATHER', 'COMFORT', 'HIS', 'UNHAPPY', 'DAUGHTER', 'AND', 'HER', 'MOTHER', 'EMBRACING', 'HER', 'AGAIN', 'DID', 'ALL', 'SHE', 'COULD', 'TO', 'SOOTHE', 'A', 'FEELINGS'] +5639-40744-0021-158: ref=['SHE', 'MEANWHILE', 'PASSED', 'HER', 'LIFE', 'WITH', 'HER', 'PARENTS', 'IN', 'THE', 'STRICTEST', 'RETIREMENT', 'NEVER', 'LETTING', 'HERSELF', 'BE', 'SEEN', 'BUT', 'SHUNNING', 'EVERY', 'EYE', 'LEST', 'IT', 'SHOULD', 'READ', 'HER', 'MISFORTUNE', 'IN', 'HER', 'FACE'] +5639-40744-0021-158: hyp=['SHE', 'MEANWHILE', 'PASSED', 'HER', 'LIFE', 'WITH', 'HER', 'PARENTS', 'IN', 'THE', 'STRICTEST', 'RETIREMENT', 'NEVER', 'LETTING', 'HERSELF', 'BE', 'SEEN', 'BUT', 'SHUNNING', 'EVERY', 'EYE', 'LEST', 'IT', 'SHOULD', 'READ', 'HER', 'MISFORTUNE', 'IN', 'HER', 'FACE'] +5639-40744-0022-159: ref=['TIME', 'ROLLED', 'ON', 'THE', 'HOUR', 'OF', 'HER', 'DELIVERY', 'ARRIVED', 'IT', 'TOOK', 'PLACE', 'IN', 'THE', 'UTMOST', 'SECRECY', 'HER', 'MOTHER', 'TAKING', 'UPON', 'HER', 'THE', 'OFFICE', 'OF', 'MIDWIFE', 'AND', 'SHE', 'GAVE', 'BIRTH', 'TO', 'A', 'SON', 'ONE', 'OF', 'THE', 'MOST', 'BEAUTIFUL', 'EVER', 'SEEN'] +5639-40744-0022-159: hyp=['TIME', 'ROLLED', 'ON', 'THE', 'HOUR', 'OF', 'HER', 'DELIVERY', 'ARRIVED', 'IT', 'TOOK', 'PLACE', 'IN', 'THE', 'UTMOST', 'SECRECY', 'HER', 'MOTHER', 'TAKING', 'UPON', 'HER', 'THE', 'OFFICE', 'OF', 'MIDWIFE', 'AS', 'SHE', 'GAVE', 'BIRTH', 'TO', 'A', 'SON', 'ONE', 'OF', 'THE', 'MOST', 'BEAUTIFUL', 'EVER', 'SEEN'] +5639-40744-0023-160: ref=['WHEN', 'THE', 'BOY', 'WALKED', 'THROUGH', 'THE', 'STREETS', 'BLESSINGS', 'WERE', 'SHOWERED', 'UPON', 'HIM', 'BY', 'ALL', 'WHO', 'SAW', 'HIM', 'BLESSINGS', 'UPON', 'HIS', 'BEAUTY', 'UPON', 'THE', 'MOTHER', 'THAT', 'BORE', 'HIM', 'UPON', 'THE', 'FATHER', 'THAT', 'BEGOT', 'HIM', 'UPON', 'THOSE', 'WHO', 'BROUGHT', 'HIM', 'UP', 'SO', 'WELL'] +5639-40744-0023-160: hyp=['WHEN', 'THE', 'BOY', 'WALKED', 'THROUGH', 'THE', 'STREETS', 'BLESSINGS', 'WERE', 'SHOWERED', 'UPON', 'HIM', 'BY', 'ALL', 'WHO', 'SAW', 'HIM', 'BLESSING', 'UPON', 'HIS', 'BEAUTY', 'UPON', 'THE', 'MOTHER', 'THAT', 'BORE', 'HIM', 'UPON', 'THE', 'FATHER', 'THAT', 'BEGOT', 'HIM', 'UPON', 'THOSE', 'WHO', 'BROUGHT', 'HIM', 'UP', 'SO', 'WELL'] +5639-40744-0024-161: ref=['ONE', 'DAY', 'WHEN', 'THE', 'BOY', 'WAS', 'SENT', 'BY', 'HIS', 'GRANDFATHER', 'WITH', 'A', 'MESSAGE', 'TO', 'A', 'RELATION', 'HE', 'PASSED', 'ALONG', 'A', 'STREET', 'IN', 'WHICH', 'THERE', 'WAS', 'A', 'GREAT', 'CONCOURSE', 'OF', 'HORSEMEN'] +5639-40744-0024-161: hyp=['ONE', 'DAY', 'WHEN', 'THE', 'BOY', 'WAS', 'SENT', 'BY', 'HIS', 'GRANDFATHER', 'WITH', 'A', 'MESSAGE', 'TO', 'A', 'RELATION', 'HE', 'PASSED', 'ALONG', 'A', 'STREET', 'IN', 'WHICH', 'THERE', 'WAS', 'A', 'GREAT', 'CONCOURSE', 'OF', 'HORSEMEN'] +5639-40744-0025-162: ref=['THE', 'BED', 'SHE', 'TOO', 'WELL', 'REMEMBERED', 'WAS', 'THERE', 'AND', 'ABOVE', 'ALL', 'THE', 'CABINET', 'ON', 'WHICH', 'HAD', 'STOOD', 'THE', 'IMAGE', 'SHE', 'HAD', 'TAKEN', 'AWAY', 'WAS', 'STILL', 'ON', 'THE', 'SAME', 'SPOT'] +5639-40744-0025-162: hyp=['THE', 'BED', 'SHE', 'TOO', 'WELL', 'REMEMBERED', 'WAS', 'THERE', 'AND', 'ABOVE', 'ALL', 'THE', 'CABINET', 'ON', 'WHICH', 'HAD', 'STOOD', 'THE', 'IMAGE', 'SHE', 'HAD', 'TAKEN', 'AWAY', 'WAS', 'STILL', 'ON', 'THE', 'SAME', 'SPOT'] +5639-40744-0026-163: ref=['LUIS', 'WAS', 'OUT', 'OF', 'DANGER', 'IN', 'A', 'FORTNIGHT', 'IN', 'A', 'MONTH', 'HE', 'ROSE', 'FROM', 'HIS', 'BED', 'AND', 'DURING', 'ALL', 'THAT', 'TIME', 'HE', 'WAS', 'VISITED', 'DAILY', 'BY', 'HIS', 'MOTHER', 'AND', 'GRANDMOTHER', 'AND', 'TREATED', 'BY', 'THE', 'MASTER', 'AND', 'MISTRESS', 'OF', 'THE', 'HOUSE', 'AS', 'IF', 'HE', 'WAS', 'THEIR', 'OWN', 'CHILD'] +5639-40744-0026-163: hyp=['LOUIS', 'WAS', 'OUT', 'OF', 'DANGER', 'IN', 'A', 'FORTNIGHT', 'IN', 'A', 'MONTH', 'HE', 'ROSE', 'FROM', 'HIS', 'BED', 'AND', 'DREWING', 'ALL', 'THAT', 'TIME', 'HE', 'WAS', 'VISITED', 'DAILY', 'BY', 'HIS', 'MOTHER', 'AND', 'GRANDMOTHER', 'AND', 'TREATED', 'BY', 'THE', 'MASTER', 'AND', 'MISTRESS', 'OF', 'THE', 'HOUSE', 'AS', 'IF', 'HE', 'WAS', 'THEIR', 'OWN', 'CHILD'] +5639-40744-0027-164: ref=['THUS', 'SAYING', 'AND', 'PRESSING', 'THE', 'CRUCIFIX', 'TO', 'HER', 'BREAST', 'SHE', 'FELL', 'FAINTING', 'INTO', 'THE', 'ARMS', 'OF', 'DONA', 'ESTAFANIA', 'WHO', 'AS', 'A', 'GENTLEWOMAN', 'TO', 'WHOSE', 'SEX', 'PITY', 'IS', 'AS', 'NATURAL', 'AS', 'CRUELTY', 'IS', 'TO', 'MAN', 'INSTANTLY', 'PRESSED', 'HER', 'LIPS', 'TO', 'THOSE', 'OF', 'THE', 'FAINTING', 'GIRL', 'SHEDDING', 'OVER', 'HER', 'SO', 'MANY', 'TEARS', 'THAT', 'THERE', 'NEEDED', 'NO', 'OTHER', 'SPRINKLING', 'OF', 'WATER', 'TO', 'RECOVER', 'LEOCADIA', 'FROM', 'HER', 'SWOON'] +5639-40744-0027-164: hyp=['THUS', 'SAYING', 'AND', 'PRESSING', 'THE', 'CRUCIFIX', 'TO', 'HER', 'BREAST', 'SHE', 'FELL', 'FAINTING', 'INTO', 'THE', 'ARMS', 'OF', 'DONA', 'ESTAFFANIA', 'WHO', 'AS', 'A', 'GENTLEWOMAN', 'TO', 'WHOSE', 'SEX', 'PITY', 'IS', 'A', 'NATURAL', 'AS', 'CRUELTY', 'AS', 'TO', 'MAN', 'INSTANTLY', 'PRESSED', 'HER', 'LIPS', 'TO', 'THOSE', 'OF', 'THE', 'FAINTING', 'GIRL', 'SHEDDING', 'OVER', 'HER', 'SO', 'MANY', 'TEARS', 'THAT', 'THERE', 'NEEDED', 'NO', 'OTHER', 'SPRINKLING', 'OF', 'WATER', 'TO', 'RECOVER', 'LOCATIA', 'FROM', 'HER', 'SWOON'] +5639-40744-0028-165: ref=['I', 'HAVE', 'GREAT', 'THINGS', 'TO', 'TELL', 'YOU', 'SENOR', 'SAID', 'DONA', 'ESTAFANIA', 'TO', 'HER', 'HUSBAND', 'THE', 'CREAM', 'AND', 'SUBSTANCE', 'OF', 'WHICH', 'IS', 'THIS', 'THE', 'FAINTING', 'GIRL', 'BEFORE', 'YOU', 'IS', 'YOUR', 'DAUGHTER', 'AND', 'THAT', 'BOY', 'IS', 'YOUR', 'GRANDSON'] +5639-40744-0028-165: hyp=['I', 'HAVE', 'GREAT', 'THINGS', 'TO', 'TELL', 'YOU', 'SENOR', 'SAID', 'DORNEST', 'DA', 'FANIA', 'TO', 'HER', 'HUSBAND', 'THE', 'CREAM', 'AND', 'SUBSTANCE', 'OF', 'WHICH', 'IS', 'THIS', 'THE', 'FAINTING', 'GIRL', 'BEFORE', 'YOU', 'IS', 'YOUR', 'DAUGHTER', 'AND', 'THE', 'BOY', 'IS', 'YOUR', 'GRANDSON'] +5639-40744-0029-166: ref=['THIS', 'TRUTH', 'WHICH', 'I', 'HAVE', 'LEARNED', 'FROM', 'HER', 'LIPS', 'IS', 'CONFIRMED', 'BY', 'HIS', 'FACE', 'IN', 'WHICH', 'WE', 'HAVE', 'BOTH', 'BEHELD', 'THAT', 'OF', 'OUR', 'SON'] +5639-40744-0029-166: hyp=['THIS', 'TRUTH', 'WHICH', 'I', 'HAVE', 'LEARNED', 'FROM', 'HER', 'LIPS', 'IS', 'CONFIRMED', 'BY', 'HIS', 'FACE', 'IN', 'WHICH', 'WE', 'HAVE', 'BOTH', 'BEHELD', 'THAT', 'OF', 'OUR', 'SON'] +5639-40744-0030-167: ref=['JUST', 'THEN', 'LEOCADIA', 'CAME', 'TO', 'HERSELF', 'AND', 'EMBRACING', 'THE', 'CROSS', 'SEEMED', 'CHANGED', 'INTO', 'A', 'SEA', 'OF', 'TEARS', 'AND', 'THE', 'GENTLEMAN', 'REMAINED', 'IN', 'UTTER', 'BEWILDERMENT', 'UNTIL', 'HIS', 'WIFE', 'HAD', 'REPEATED', 'TO', 'HIM', 'FROM', 'BEGINNING', 'TO', 'END', "LEOCADIA'S", 'WHOLE', 'STORY', 'AND', 'HE', 'BELIEVED', 'IT', 'THROUGH', 'THE', 'BLESSED', 'DISPENSATION', 'OF', 'HEAVEN', 'WHICH', 'HAD', 'CONFIRMED', 'IT', 'BY', 'SO', 'MANY', 'CONVINCING', 'TESTIMONIES'] +5639-40744-0030-167: hyp=['JUST', 'THEN', 'LEOKADIA', 'CAME', 'TO', 'HERSELF', 'AND', 'EMBRACING', 'THE', 'CROSS', 'SEEMED', 'CHANGED', 'INTO', 'A', 'SEA', 'OF', 'TEARS', 'AND', 'THE', 'GENTLEMAN', 'REMAINING', 'IN', 'UTTER', 'BEWILDERMENT', 'UNTIL', 'HIS', 'WIFE', 'HAD', 'REPEATED', 'TO', 'HIM', 'FROM', 'BEGINNING', 'TO', 'END', 'LEUCEDES', 'WHOLE', 'STORY', 'AND', 'HE', 'BELIEVED', 'IT', 'THROUGH', 'THE', 'BLESSED', 'DISPENSATION', 'OF', 'HEAVEN', 'WHICH', 'HAD', 'CONFIRMED', 'IT', 'BY', 'SO', 'MANY', 'CONVINCING', 'TESTIMONIES'] +5639-40744-0031-168: ref=['SO', 'PERSUASIVE', 'WERE', 'HER', 'ENTREATIES', 'AND', 'SO', 'STRONG', 'HER', 'ASSURANCES', 'THAT', 'NO', 'HARM', 'WHATEVER', 'COULD', 'RESULT', 'TO', 'THEM', 'FROM', 'THE', 'INFORMATION', 'SHE', 'SOUGHT', 'THEY', 'WERE', 'INDUCED', 'TO', 'CONFESS', 'THAT', 'ONE', "SUMMER'S", 'NIGHT', 'THE', 'SAME', 'SHE', 'HAD', 'MENTIONED', 'THEMSELVES', 'AND', 'ANOTHER', 'FRIEND', 'BEING', 'OUT', 'ON', 'A', 'STROLL', 'WITH', 'RODOLFO', 'THEY', 'HAD', 'BEEN', 'CONCERNED', 'IN', 'THE', 'ABDUCTION', 'OF', 'A', 'GIRL', 'WHOM', 'RODOLFO', 'CARRIED', 'OFF', 'WHILST', 'THE', 'REST', 'OF', 'THEM', 'DETAINED', 'HER', 'FAMILY', 'WHO', 'MADE', 'A', 'GREAT', 'OUTCRY', 'AND', 'WOULD', 'HAVE', 'DEFENDED', 'HER', 'IF', 'THEY', 'COULD'] +5639-40744-0031-168: hyp=['SO', 'PERSUASIVE', 'WERE', 'HER', 'ENTREATIES', 'AND', 'SO', 'STRONG', 'HER', 'ASSURANCES', 'THAT', 'NO', 'HARM', 'WHATEVER', 'COULD', 'RESULT', 'TO', 'THEM', 'FROM', 'THE', 'INFORMATION', 'SHE', 'SOUGHT', 'THEY', 'WERE', 'INDUCED', 'TO', 'CONFESS', 'THAT', 'ONE', "SUMMER'S", 'NIGHT', 'THE', 'SAME', 'SHE', 'HAD', 'MENTIONED', 'THEMSELVES', 'IN', 'ANOTHER', 'FRIEND', 'BEING', 'OUT', 'ON', 'A', 'STRAW', 'WITH', 'RADOLPHO', 'THEY', 'HAD', 'BEEN', 'CONCERNED', 'IN', 'THE', 'ADOCTION', 'OF', 'A', 'GIRL', 'WHOM', 'UDOLPHO', 'CARRIED', 'OFF', 'WHILST', 'THE', 'REST', 'OF', 'THEM', 'DETAINED', 'HER', 'FAMILY', 'WHO', 'MADE', 'A', 'GREAT', 'OUTCRY', 'AND', 'WOULD', 'HAVE', 'DEFENDED', 'HER', 'IF', 'THEY', 'COULD'] +5639-40744-0032-169: ref=['FOR', "GOD'S", 'SAKE', 'MY', 'LADY', 'MOTHER', 'GIVE', 'ME', 'A', 'WIFE', 'WHO', 'WOULD', 'BE', 'AN', 'AGREEABLE', 'COMPANION', 'NOT', 'ONE', 'WHO', 'WILL', 'DISGUST', 'ME', 'SO', 'THAT', 'WE', 'MAY', 'BOTH', 'BEAR', 'EVENLY', 'AND', 'WITH', 'MUTUAL', 'GOOD', 'WILL', 'THE', 'YOKE', 'IMPOSED', 'ON', 'US', 'BY', 'HEAVEN', 'INSTEAD', 'OF', 'PULLING', 'THIS', 'WAY', 'AND', 'THAT', 'WAY', 'AND', 'FRETTING', 'EACH', 'OTHER', 'TO', 'DEATH'] +5639-40744-0032-169: hyp=['FOR', "GOD'S", 'SAKE', 'MY', 'LADY', 'MOTHER', 'GIVE', 'ME', 'A', 'WIFE', 'WHO', 'WOULD', 'BE', 'AN', 'AGREEABLE', 'COMPANION', 'NOT', 'ONE', 'WHO', 'WILL', 'DISGUST', 'ME', 'SO', 'THAT', 'WE', 'MAY', 'BOTH', 'BEAR', 'EVENLY', 'AND', 'WITH', 'MUTUAL', 'GOOD', 'WILL', 'THE', 'YOKE', 'IMPOSED', 'ON', 'US', 'BY', 'HEAVEN', 'INSTEAD', 'OF', 'PULLING', 'THIS', 'WAY', 'AND', 'THAT', 'WAY', 'AND', 'FRETTING', 'EACH', 'OTHER', 'TO', 'DEATH'] +5639-40744-0033-170: ref=['HER', 'BEARING', 'WAS', 'GRACEFUL', 'AND', 'ANIMATED', 'SHE', 'LED', 'HER', 'SON', 'BY', 'THE', 'HAND', 'AND', 'BEFORE', 'HER', 'WALKED', 'TWO', 'MAIDS', 'WITH', 'WAX', 'LIGHTS', 'AND', 'SILVER', 'CANDLESTICKS'] +5639-40744-0033-170: hyp=['HER', 'BEARING', 'WAS', 'GRACEFUL', 'ANIMATED', 'SHE', 'LED', 'HER', 'SON', 'BY', 'THE', 'HAND', 'AND', 'BEFORE', 'HER', 'WALKED', 'TWO', 'MAIDS', 'WITH', 'WAX', 'LIGHTS', 'AND', 'SILVER', 'CANDLESTICKS'] +5639-40744-0034-171: ref=['ALL', 'ROSE', 'TO', 'DO', 'HER', 'REVERENCE', 'AS', 'IF', 'SOMETHING', 'FROM', 'HEAVEN', 'HAD', 'MIRACULOUSLY', 'APPEARED', 'BEFORE', 'THEM', 'BUT', 'GAZING', 'ON', 'HER', 'ENTRANCED', 'WITH', 'ADMIRATION', 'NOT', 'ONE', 'OF', 'THEM', 'WAS', 'ABLE', 'TO', 'ADDRESS', 'A', 'SINGLE', 'WORD', 'TO', 'HER'] +5639-40744-0034-171: hyp=['ALL', 'ROSE', 'TO', 'DO', 'HER', 'REVERENCE', 'AS', 'IF', 'SOMETHING', 'FROM', 'HEAVEN', 'HAD', 'MIRACULOUSLY', 'APPEARED', 'BEFORE', 'THEM', 'BUT', 'GAZING', 'ON', 'HER', 'ENTRANCED', 'WITH', 'ADMIRATION', 'NOT', 'ONE', 'OF', 'THEM', 'WAS', 'ABLE', 'TO', 'ADDRESS', 'A', 'SINGLE', 'WORD', 'TO', 'HER'] +5639-40744-0035-172: ref=['SHE', 'REFLECTED', 'HOW', 'NEAR', 'SHE', 'STOOD', 'TO', 'THE', 'CRISIS', 'WHICH', 'WAS', 'TO', 'DETERMINE', 'WHETHER', 'SHE', 'WAS', 'TO', 'BE', 'BLESSED', 'OR', 'UNHAPPY', 'FOR', 'EVER', 'AND', 'RACKED', 'BY', 'THE', 'INTENSITY', 'OF', 'HER', 'EMOTIONS', 'SHE', 'SUDDENLY', 'CHANGED', 'COLOUR', 'HER', 'HEAD', 'DROPPED', 'AND', 'SHE', 'FELL', 'FORWARD', 'IN', 'A', 'SWOON', 'INTO', 'THE', 'ARMS', 'OF', 'THE', 'DISMAYED', 'ESTAFANIA'] +5639-40744-0035-172: hyp=['SHE', 'REFLECTED', 'HOW', 'NEAR', 'SHE', 'STOOD', 'TO', 'THE', 'CRISIS', 'WHICH', 'WAS', 'TO', 'DETERMINE', 'WHETHER', 'SHE', 'WAS', 'TO', 'BE', 'BLESSED', 'OR', 'UNHAPPY', 'FOR', 'EVER', 'AND', 'RACKED', 'BY', 'THE', 'INTENSITY', 'OF', 'HER', 'EMOTIONS', 'SHE', 'SUDDENLY', 'CHANGED', 'COLOR', 'HER', 'HEAD', 'DROPPED', 'AND', 'SHE', 'FELL', 'FORWARD', 'IN', 'A', 'SWOON', 'INTO', 'THE', 'ARMS', 'OF', 'THE', 'DISMAYEDESTAFHANIA'] +5639-40744-0036-173: ref=['HIS', 'MOTHER', 'HAD', 'LEFT', 'HER', 'TO', 'HIM', 'AS', 'BEING', 'HER', 'DESTINED', 'PROTECTOR', 'BUT', 'WHEN', 'SHE', 'SAW', 'THAT', 'HE', 'TOO', 'WAS', 'INSENSIBLE', 'SHE', 'WAS', 'NEAR', 'MAKING', 'A', 'THIRD', 'AND', 'WOULD', 'HAVE', 'DONE', 'SO', 'HAD', 'HE', 'NOT', 'COME', 'TO', 'HIMSELF'] +5639-40744-0036-173: hyp=['HIS', 'MOTHER', 'HAD', 'LEFT', 'HER', 'TO', 'HIM', 'AS', 'BEING', 'HER', 'DESTINED', 'PROTECTOR', 'BUT', 'WHEN', 'SHE', 'SAW', 'THAT', 'HE', 'TOO', 'WAS', 'INSENSIBLE', 'SHE', 'WAS', 'NEAR', 'MAKING', 'A', 'THIRD', 'AND', 'WOULD', 'HAVE', 'DONE', 'SO', 'HAD', 'HE', 'NOT', 'COME', 'TO', 'HIMSELF'] +5639-40744-0037-174: ref=['KNOW', 'THEN', 'SON', 'OF', 'MY', 'HEART', 'THAT', 'THIS', 'FAINTING', 'LADY', 'IS', 'YOUR', 'REAL', 'BRIDE', 'I', 'SAY', 'REAL', 'BECAUSE', 'SHE', 'IS', 'THE', 'ONE', 'WHOM', 'YOUR', 'FATHER', 'AND', 'I', 'HAVE', 'CHOSEN', 'FOR', 'YOU', 'AND', 'THE', 'PORTRAIT', 'WAS', 'A', 'PRETENCE'] +5639-40744-0037-174: hyp=['KNOW', 'THEN', 'SON', 'OF', 'MY', 'HEART', 'THAT', 'THIS', 'FAINTING', 'LADY', 'IS', 'YOUR', 'REAL', 'BRIDE', 'I', 'SAY', 'REAL', 'BECAUSE', 'SHE', 'IS', 'THE', 'ONE', 'WHOM', 'YOUR', 'FATHER', 'AND', 'I', 'HAVE', 'CHOSEN', 'FOR', 'YOU', 'AND', 'A', 'PORTRAIT', 'WAS', 'A', 'PRETENCE'] +5639-40744-0038-175: ref=['JUST', 'AT', 'THE', 'MOMENT', 'WHEN', 'THE', 'TEARS', 'OF', 'THE', 'PITYING', 'BEHOLDERS', 'FLOWED', 'FASTEST', 'AND', 'THEIR', 'EJACULATIONS', 'WERE', 'MOST', 'EXPRESSIVE', 'OF', 'DESPAIR', 'LEOCADIA', 'GAVE', 'SIGNS', 'OF', 'RECOVERY', 'AND', 'BROUGHT', 'BACK', 'GLADNESS', 'TO', 'THE', 'HEARTS', 'OF', 'ALL'] +5639-40744-0038-175: hyp=['JUST', 'AT', 'A', 'MOMENT', 'WHEN', 'THE', 'TEARS', 'OF', 'THE', 'PITYING', 'BEHOLDERS', 'FLOWED', 'FASTEST', 'AND', 'THEIR', 'EJACULATIONS', 'WERE', 'MOST', 'EXPRESSIVE', 'OF', 'DESPAIR', 'THE', 'OCCAS', 'GAVE', 'SIGNS', 'OF', 'RECOVERY', 'AND', 'BROUGHT', 'BACK', 'GLADNESS', 'TO', 'THE', 'HEARTS', 'OF', 'ALL'] +5639-40744-0039-176: ref=['WHEN', 'SHE', 'CAME', 'TO', 'HER', 'SENSES', 'AND', 'BLUSHING', 'TO', 'FIND', 'HERSELF', 'IN', "RODOLFO'S", 'ARMS', 'WOULD', 'HAVE', 'DISENGAGED', 'HERSELF', 'NO', 'SENORA', 'HE', 'SAID', 'THAT', 'MUST', 'NOT', 'BE', 'STRIVE', 'NOT', 'TO', 'WITHDRAW', 'FROM', 'THE', 'ARMS', 'OF', 'HIM', 'WHO', 'HOLDS', 'YOU', 'IN', 'HIS', 'SOUL'] +5639-40744-0039-176: hyp=['WHEN', 'SHE', 'CAME', 'TO', 'HER', 'SENSES', 'AND', 'BLUSHING', 'TO', 'FIND', 'HERSELF', 'IN', "GODOLPH'S", 'ARMS', 'WOULD', 'HAVE', 'DISENGAGED', 'HERSELF', 'NO', 'SENORA', 'HE', 'SAID', 'THAT', 'MUST', 'NOT', 'BE', 'STRIVE', 'NOT', 'TO', 'WITHDRAW', 'FROM', 'THE', 'ARMS', 'OF', 'HIM', 'WHO', 'HOLDS', 'YOU', 'IN', 'HIS', 'SOUL'] +5639-40744-0040-177: ref=['THIS', 'WAS', 'DONE', 'FOR', 'THE', 'EVENT', 'TOOK', 'PLACE', 'AT', 'A', 'TIME', 'WHEN', 'THE', 'CONSENT', 'OF', 'THE', 'PARTIES', 'WAS', 'SUFFICIENT', 'FOR', 'THE', 'CELEBRATION', 'OF', 'A', 'MARRIAGE', 'WITHOUT', 'ANY', 'OF', 'THE', 'PRELIMINARY', 'FORMALITIES', 'WHICH', 'ARE', 'NOW', 'SO', 'PROPERLY', 'REQUIRED'] +5639-40744-0040-177: hyp=['THIS', 'WAS', 'DONE', 'FOR', 'THE', 'EVENT', 'TOOK', 'PLACE', 'AT', 'A', 'TIME', 'BY', 'THE', 'CONSENT', 'OF', 'THE', 'PARTIES', 'WAS', 'SUFFICIENT', 'FOR', 'THE', 'CELEBRATION', 'OF', 'A', 'MARRIAGE', 'WITHOUT', 'ANY', 'OF', 'THE', 'PRELIMINARY', 'FORMALITIES', 'WHICH', 'ARE', 'NOW', 'SO', 'PROPERLY', 'REQUIRED'] +5639-40744-0041-178: ref=['NOR', 'WAS', 'RODOLFO', 'LESS', 'SURPRISED', 'THAN', 'THEY', 'AND', 'THE', 'BETTER', 'TO', 'ASSURE', 'HIMSELF', 'OF', 'SO', 'WONDERFUL', 'A', 'FACT', 'HE', 'BEGGED', 'LEOCADIA', 'TO', 'GIVE', 'HIM', 'SOME', 'TOKEN', 'WHICH', 'SHOULD', 'MAKE', 'PERFECTLY', 'CLEAR', 'TO', 'HIM', 'THAT', 'WHICH', 'INDEED', 'HE', 'DID', 'NOT', 'DOUBT', 'SINCE', 'IT', 'WAS', 'AUTHENTICATED', 'BY', 'HIS', 'PARENTS'] +5639-40744-0041-178: hyp=['NOR', 'WAS', 'RIDOLPHAL', 'LESS', 'SURPRISED', 'THAN', 'THEY', 'AND', 'THE', 'BETTER', 'TO', 'ASSURE', 'HIMSELF', 'OF', 'SO', 'WONDERFUL', 'A', 'FACT', 'HE', 'BEGGED', 'LOU', 'KATYA', 'TO', 'GIVE', 'HIM', 'SOME', 'TOKEN', 'WHICH', 'SHOULD', 'MAKE', 'PERFECTLY', 'CLEAR', 'TO', 'HIM', 'THAT', 'WHICH', 'INDEED', 'HE', 'DID', 'NOT', 'DOUBT', 'SINCE', 'IT', 'WAS', 'AUTHENTICATED', 'BY', 'HIS', 'PARENTS'] +5683-32865-0000-2483: ref=['YOU', 'KNOW', 'CAPTAIN', 'LAKE'] +5683-32865-0000-2483: hyp=['YOU', 'KNOW', 'CAPTAIN', 'LAKE'] +5683-32865-0001-2484: ref=['SAID', 'LORD', 'CHELFORD', 'ADDRESSING', 'ME'] +5683-32865-0001-2484: hyp=['SAID', 'LORD', 'CHELFORD', 'ADDRESSING', 'ME'] +5683-32865-0002-2485: ref=['HE', 'HAD', 'HIS', 'HAND', 'UPON', "LAKE'S", 'SHOULDER'] +5683-32865-0002-2485: hyp=['HE', 'HAD', 'HIS', 'HAND', 'UPON', "LAKE'S", 'SHOULDER'] +5683-32865-0003-2486: ref=['THEY', 'ARE', 'COUSINS', 'YOU', 'KNOW', 'WE', 'ARE', 'ALL', 'COUSINS'] +5683-32865-0003-2486: hyp=['THEY', 'ARE', 'COUSINS', 'YOU', 'KNOW', 'WE', 'ARE', 'ALL', 'COUSINS'] +5683-32865-0004-2487: ref=['WHATEVER', 'LORD', 'CHELFORD', 'SAID', 'MISS', 'BRANDON', 'RECEIVED', 'IT', 'VERY', 'GRACIOUSLY', 'AND', 'EVEN', 'WITH', 'A', 'MOMENTARY', 'SMILE'] +5683-32865-0004-2487: hyp=['WHATEVER', 'LORD', 'CHELFORD', 'SAID', 'MISS', 'BRANDON', 'RECEIVED', 'IT', 'VERY', 'GRACIOUSLY', 'AND', 'EVEN', 'WITH', 'A', 'MOMENTARY', 'SMILE'] +5683-32865-0005-2488: ref=['BUT', 'HER', 'GREETING', 'TO', 'CAPTAIN', 'LAKE', 'WAS', 'MORE', 'THAN', 'USUALLY', 'HAUGHTY', 'AND', 'FROZEN', 'AND', 'HER', 'FEATURES', 'I', 'FANCIED', 'PARTICULARLY', 'PROUD', 'AND', 'PALE'] +5683-32865-0005-2488: hyp=['BUT', 'HER', 'GREETING', 'TO', 'CAPTAIN', 'LEEK', 'WAS', 'MORE', 'THAN', 'USUALLY', 'HAUGHTY', 'AND', 'FROZEN', 'AND', 'HER', 'FEATURES', 'I', 'FANCIED', 'PARTICULARLY', 'PROUD', 'AND', 'PALE'] +5683-32865-0006-2489: ref=['AT', 'DINNER', 'LAKE', 'WAS', 'EASY', 'AND', 'AMUSING'] +5683-32865-0006-2489: hyp=['AT', 'DINNER', 'LAKE', 'WAS', 'EASY', 'AND', 'AMUSING'] +5683-32865-0007-2490: ref=["I'M", 'GLAD', 'YOU', 'LIKE', 'IT', 'SAYS', 'WYLDER', 'CHUCKLING', 'BENIGNANTLY', 'ON', 'IT', 'OVER', 'HIS', 'SHOULDER'] +5683-32865-0007-2490: hyp=['I', 'AM', 'GLAD', 'YOU', 'LIKE', 'IT', 'SAYS', 'WILDER', 'CHUCKLING', 'BENIGNANTLY', 'ON', 'IT', 'OVER', 'HIS', 'SHOULDER'] +5683-32865-0008-2491: ref=['I', 'BELIEVE', 'I', 'HAVE', 'A', 'LITTLE', 'TASTE', 'THAT', 'WAY', 'THOSE', 'ARE', 'ALL', 'REAL', 'YOU', 'KNOW', 'THOSE', 'JEWELS'] +5683-32865-0008-2491: hyp=['I', 'BELIEVE', 'I', 'HAVE', 'A', 'LITTLE', 'TASTE', 'THAT', 'WAY', 'THOSE', 'ARE', 'ALL', 'REAL', 'YOU', 'KNOW', 'THOSE', 'JEWELS'] +5683-32865-0009-2492: ref=['AND', 'HE', 'PLACED', 'IT', 'IN', 'THAT', "GENTLEMAN'S", 'FINGERS', 'WHO', 'NOW', 'TOOK', 'HIS', 'TURN', 'AT', 'THE', 'LAMP', 'AND', 'CONTEMPLATED', 'THE', 'LITTLE', 'PARALLELOGRAM', 'WITH', 'A', 'GLEAM', 'OF', 'SLY', 'AMUSEMENT'] +5683-32865-0009-2492: hyp=['AND', 'HE', 'PLACED', 'IT', 'IN', 'THAT', "GENTLEMAN'S", 'FINGERS', 'WHO', 'NOW', 'TOOK', 'HIS', 'TURN', 'AT', 'THE', 'LAMP', 'AND', 'CONTEMPLATED', 'THE', 'LITTLE', 'PARALLELLOGRAM', 'WITH', 'A', 'GLEAM', 'OF', 'SLY', 'AMUSEMENT'] +5683-32865-0010-2493: ref=['I', 'WAS', 'THINKING', "IT'S", 'VERY', 'LIKE', 'THE', 'ACE', 'OF', 'HEARTS', 'ANSWERED', 'THE', 'CAPTAIN', 'SOFTLY', 'SMILING', 'ON'] +5683-32865-0010-2493: hyp=['I', 'WAS', 'THINKING', "IT'S", 'VERY', 'LIKE', 'THE', 'ACE', 'OF', 'HEARTS', 'ANSWERED', 'THE', 'CAPTAIN', 'SOFTLY', 'SMILING', 'ON'] +5683-32865-0011-2494: ref=['WHEREUPON', 'LAKE', 'LAUGHED', 'QUIETLY', 'STILL', 'LOOKING', 'ON', 'THE', 'ACE', 'OF', 'HEARTS', 'WITH', 'HIS', 'SLY', 'EYES'] +5683-32865-0011-2494: hyp=['WHEREUPON', 'LAKE', 'LAUGHED', 'QUIETLY', 'STILL', 'LOOKING', 'ON', 'THE', 'ACE', 'OF', 'HEARTS', 'WITH', 'HIS', 'SLY', 'EYES'] +5683-32865-0012-2495: ref=['AND', 'WYLDER', 'LAUGHED', 'TOO', 'MORE', 'SUDDENLY', 'AND', 'NOISILY', 'THAN', 'THE', 'HUMOUR', 'OF', 'THE', 'JOKE', 'SEEMED', 'QUITE', 'TO', 'CALL', 'FOR', 'AND', 'GLANCED', 'A', 'GRIM', 'LOOK', 'FROM', 'THE', 'CORNERS', 'OF', 'HIS', 'EYES', 'ON', 'LAKE', 'BUT', 'THE', 'GALLANT', 'CAPTAIN', 'DID', 'NOT', 'SEEM', 'TO', 'PERCEIVE', 'IT', 'AND', 'AFTER', 'A', 'FEW', 'SECONDS', 'MORE', 'HE', 'HANDED', 'IT', 'VERY', 'INNOCENTLY', 'BACK', 'TO', 'MISSUS', 'DOROTHY', 'ONLY', 'REMARKING'] +5683-32865-0012-2495: hyp=['AND', 'WYLDER', 'LAUGHED', 'TOO', 'MORE', 'SUDDENLY', 'AND', 'NOISILY', 'THAN', 'THE', 'HUMOUR', 'OF', 'THE', 'JOKE', 'SEEMED', 'QUITE', 'TO', 'CALL', 'FOR', 'AND', 'GLANCED', 'A', 'GRIM', 'LOOK', 'FROM', 'THE', 'CORNERS', 'OF', 'HIS', 'EYES', 'ON', 'LAKE', 'BUT', 'THE', 'GALLANT', 'CAPTAIN', 'DID', 'NOT', 'SEEM', 'TO', 'PERCEIVE', 'IT', 'AND', 'AFTER', 'A', 'FEW', 'SECONDS', 'MORE', 'HE', 'HANDED', 'IT', 'VERY', 'INNOCENTLY', 'BACK', 'TO', 'MISSUS', 'DOROTHY', 'ONLY', 'REMARKING'] +5683-32865-0013-2496: ref=['DO', 'YOU', 'KNOW', 'LAKE', 'OH', 'I', 'REALLY', "CAN'T", 'TELL', 'BUT', "HE'LL", 'SOON', 'TIRE', 'OF', 'COUNTRY', 'LIFE'] +5683-32865-0013-2496: hyp=['DO', 'YOU', 'KNOW', 'LAKE', 'OH', 'I', 'REALLY', "CAN'T", 'TELL', 'BUT', "HE'LL", 'SOON', 'TIRE', 'OF', 'COUNTRY', 'LIFE'] +5683-32865-0014-2497: ref=["HE'S", 'NOT', 'A', 'MAN', 'FOR', 'COUNTRY', 'QUARTERS'] +5683-32865-0014-2497: hyp=["HE'S", 'NOT', 'A', 'MAN', 'FOR', 'COUNTRY', 'QUARTERS'] +5683-32865-0015-2498: ref=['I', 'HAD', 'A', 'HORRID', 'DREAM', 'ABOUT', 'HIM', 'LAST', 'NIGHT', 'THAT'] +5683-32865-0015-2498: hyp=['I', 'HAD', 'A', 'HORRID', 'DREAM', 'ABOUT', 'HIM', 'LAST', 'NIGHT', 'THAT'] +5683-32865-0016-2499: ref=['OH', 'I', 'KNOW', "THAT'S", 'LORNE', 'BRANDON'] +5683-32865-0016-2499: hyp=['OH', 'I', 'KNOW', "THAT'S", 'LORN', 'BRANDON'] +5683-32865-0017-2500: ref=['ALL', 'THE', 'TIME', 'HE', 'WAS', 'TALKING', 'TO', 'ME', 'HIS', 'ANGRY', 'LITTLE', 'EYES', 'WERE', 'FOLLOWING', 'LAKE'] +5683-32865-0017-2500: hyp=['ALL', 'THE', 'TIME', 'HE', 'WAS', 'TALKING', 'TO', 'ME', 'HIS', 'ANGRY', 'LITTLE', 'EYES', 'WERE', 'FOLLOWING', 'LAKE'] +5683-32866-0000-2527: ref=['MISS', 'LAKE', 'DECLINED', 'THE', 'CARRIAGE', 'TO', 'NIGHT'] +5683-32866-0000-2527: hyp=['MISS', 'LAKE', 'DECLINED', 'THE', 'CARRIAGE', 'TO', 'NIGHT'] +5683-32866-0001-2528: ref=['AND', 'HE', 'ADDED', 'SOMETHING', 'STILL', 'LESS', 'COMPLIMENTARY'] +5683-32866-0001-2528: hyp=['AND', 'HE', 'ADDED', 'SOMETHING', 'STILL', 'LESS', 'COMPLIMENTARY'] +5683-32866-0002-2529: ref=['BUT', "DON'T", 'THESE', 'VERY', 'WISE', 'THINGS', 'SOMETIMES', 'TURN', 'OUT', 'VERY', 'FOOLISHLY'] +5683-32866-0002-2529: hyp=['BUT', "DON'T", 'THESE', 'VERY', 'WISE', 'THINGS', 'SOMETIMES', 'TURN', 'OUT', 'VERY', 'FOOLISHLY'] +5683-32866-0003-2530: ref=['IN', 'THE', 'MEANTIME', 'I', 'HAD', 'FORMED', 'A', 'NEW', 'IDEA', 'OF', 'HER'] +5683-32866-0003-2530: hyp=['IN', 'THE', 'MEANTIME', 'I', 'HAD', 'FORMED', 'A', 'NEW', 'IDEA', 'OF', 'HER'] +5683-32866-0004-2531: ref=['BY', 'THIS', 'TIME', 'LORD', 'CHELFORD', 'AND', 'WYLDER', 'RETURNED', 'AND', 'DISGUSTED', 'RATHER', 'WITH', 'MYSELF', 'I', 'RUMINATED', 'ON', 'MY', 'WANT', 'OF', 'GENERAL', 'SHIP'] +5683-32866-0004-2531: hyp=['BY', 'THIS', 'TIME', 'LORD', 'CHELFORD', 'AND', 'WYLDER', 'RETURNED', 'AND', 'DISGUSTED', 'RATHER', 'WITH', 'MYSELF', 'I', 'RUMINATED', 'ON', 'MY', 'WANT', 'OF', 'GENERALSHIP'] +5683-32866-0005-2532: ref=['AND', 'HE', 'MADE', 'A', 'LITTLE', 'DIP', 'OF', 'HIS', 'CANE', 'TOWARDS', 'BRANDON', 'HALL', 'OVER', 'HIS', 'SHOULDER'] +5683-32866-0005-2532: hyp=['AND', 'HE', 'MADE', 'A', 'LITTLE', 'DIP', 'OF', 'HIS', 'CANE', 'TOWARDS', 'BRANDON', 'HALL', 'OVER', 'HIS', 'SHOULDER'] +5683-32866-0006-2533: ref=['YES', 'SO', 'THEY', 'SAID', 'BUT', 'THAT', 'WOULD', 'I', 'THINK', 'HAVE', 'BEEN', 'WORSE'] +5683-32866-0006-2533: hyp=['YES', 'SO', 'THEY', 'SAID', 'BUT', 'THAT', 'WOULD', 'I', 'THINK', 'HAVE', 'BEEN', 'WORSE'] +5683-32866-0007-2534: ref=['IF', 'A', "FELLOW'S", 'BEEN', 'A', 'LITTLE', 'BIT', 'WILD', "HE'S", 'BEELZEBUB', 'AT', 'ONCE'] +5683-32866-0007-2534: hyp=['IF', 'A', "FELLOW'S", 'BEEN', 'A', 'LITTLE', 'BIT', 'WILD', 'HE', 'IS', 'BIELDS', 'ABOVE', 'AT', 'ONCE'] +5683-32866-0008-2535: ref=["BRACTON'S", 'A', 'VERY', 'GOOD', 'FELLOW', 'I', 'CAN', 'ASSURE', 'YOU'] +5683-32866-0008-2535: hyp=["BROCKTON'S", 'A', 'VERY', 'GOOD', 'FELLOW', 'I', 'CAN', 'ASSURE', 'YOU'] +5683-32866-0009-2536: ref=['I', "DON'T", 'KNOW', 'AND', "CAN'T", 'SAY', 'HOW', 'YOU', 'FINE', 'GENTLEMEN', 'DEFINE', 'WICKEDNESS', 'ONLY', 'AS', 'AN', 'OBSCURE', 'FEMALE', 'I', 'SPEAK', 'ACCORDING', 'TO', 'MY', 'LIGHTS', 'AND', 'HE', 'IS', 'GENERALLY', 'THOUGHT', 'THE', 'WICKEDEST', 'MAN', 'IN', 'THIS', 'COUNTY'] +5683-32866-0009-2536: hyp=['I', "DON'T", 'KNOW', 'ONE', "CAN'T", 'SAY', 'HOW', 'YOU', 'FIND', 'GENTLEMEN', 'TO', 'FIND', 'WICKEDNESS', 'ONLY', 'AS', 'AN', 'OBSCURE', 'FEMALE', 'I', 'SPEAK', 'ACCORDING', 'TO', 'MY', 'LIGHTS', 'AND', 'HE', 'IS', 'GENERALLY', 'THOUGHT', 'THE', 'WICKEDEST', 'MAN', 'IN', 'THIS', 'COUNTY'] +5683-32866-0010-2537: ref=['WELL', 'YOU', 'KNOW', 'RADIE', 'WOMEN', 'LIKE', 'WICKED', 'FELLOWS', 'IT', 'IS', 'CONTRAST', 'I', 'SUPPOSE', 'BUT', 'THEY', 'DO', 'AND', "I'M", 'SURE', 'FROM', 'WHAT', 'BRACTON', 'HAS', 'SAID', 'TO', 'ME', 'I', 'KNOW', 'HIM', 'INTIMATELY', 'THAT', 'DORCAS', 'LIKES', 'HIM', 'AND', 'I', "CAN'T", 'CONCEIVE', 'WHY', 'THEY', 'ARE', 'NOT', 'MARRIED'] +5683-32866-0010-2537: hyp=['WELL', 'YOU', 'KNOW', 'RADIE', 'WOMEN', 'LIKE', 'WICKED', 'FELLOWS', 'IT', 'IS', 'CONTRAST', 'I', 'SUPPOSE', 'BUT', 'THEY', 'DO', 'AND', "I'M", 'SURE', 'FROM', 'WHAT', 'BRACTON', 'HAS', 'SAID', 'TO', 'ME', 'I', 'KNOW', 'HIM', 'INTIMATELY', 'THAT', 'DORCAS', 'LIKES', 'HIM', 'AND', 'I', "CAN'T", 'CONCEIVE', 'WHY', 'THEY', 'ARE', 'NOT', 'MARRIED'] +5683-32866-0011-2538: ref=['THEIR', 'WALK', 'CONTINUED', 'SILENT', 'FOR', 'THE', 'GREATER', 'PART', 'NEITHER', 'WAS', 'QUITE', 'SATISFIED', 'WITH', 'THE', 'OTHER', 'BUT', 'RACHEL', 'AT', 'LAST', 'SAID'] +5683-32866-0011-2538: hyp=['THEIR', 'WALK', 'CONTINUED', 'SILENT', 'FOR', 'THE', 'GREATER', 'PART', 'NEITHER', 'WAS', 'QUITE', 'SATISFIED', 'WITH', 'THE', 'OTHER', 'BUT', 'RACHEL', 'AT', 'LAST', 'SAID'] +5683-32866-0012-2539: ref=['NOW', "THAT'S", 'IMPOSSIBLE', 'RADIE', 'FOR', 'I', 'REALLY', "DON'T", 'THINK', 'I', 'ONCE', 'THOUGHT', 'OF', 'HIM', 'ALL', 'THIS', 'EVENING', 'EXCEPT', 'JUST', 'WHILE', 'WE', 'WERE', 'TALKING'] +5683-32866-0012-2539: hyp=['NOW', "THAT'S", 'IMPOSSIBLE', 'RADIE', 'FOR', 'I', 'REALLY', "DON'T", 'THINK', 'I', 'ONCE', 'THOUGHT', 'OF', 'HIM', 'ALL', 'THIS', 'EVENING', 'EXCEPT', 'JUST', 'WHILE', 'WE', 'WERE', 'TALKING'] +5683-32866-0013-2540: ref=['THERE', 'WAS', 'A', 'BRIGHT', 'MOONLIGHT', 'BROKEN', 'BY', 'THE', 'SHADOWS', 'OF', 'OVERHANGING', 'BOUGHS', 'AND', 'WITHERED', 'LEAVES', 'AND', 'THE', 'MOTTLED', 'LIGHTS', 'AND', 'SHADOWS', 'GLIDED', 'ODDLY', 'ACROSS', 'HIS', 'PALE', 'FEATURES'] +5683-32866-0013-2540: hyp=['THERE', 'WAS', 'A', 'BRIGHT', 'MOONLIGHT', 'BROKEN', 'BY', 'THE', 'SHADOWS', 'OF', 'OVERHANGING', 'BOUGHS', 'AND', 'WITHERED', 'LEAVES', 'AND', 'THE', 'MOTTLED', 'LIGHTS', 'AND', 'SHADOWS', 'GLIDED', 'ODDLY', 'ACROSS', 'HIS', 'PALE', 'FEATURES'] +5683-32866-0014-2541: ref=["DON'T", 'INSULT', 'ME', 'STANLEY', 'BY', 'TALKING', 'AGAIN', 'AS', 'YOU', 'DID', 'THIS', 'MORNING'] +5683-32866-0014-2541: hyp=["DON'T", 'INSULT', 'ME', 'STANLEY', 'BY', 'TALKING', 'AGAIN', 'AS', 'YOU', 'DID', 'THIS', 'MORNING'] +5683-32866-0015-2542: ref=['WHAT', 'I', 'SAY', 'IS', 'ALTOGETHER', 'ON', 'YOUR', 'OWN', 'ACCOUNT'] +5683-32866-0015-2542: hyp=['WHAT', 'I', 'SAY', 'IS', 'ALTOGETHER', 'ON', 'YOUR', 'OWN', 'ACCOUN'] +5683-32866-0016-2543: ref=['MARK', 'MY', 'WORDS', "YOU'LL", 'FIND', 'HIM', 'TOO', 'STRONG', 'FOR', 'YOU', 'AYE', 'AND', 'TOO', 'DEEP'] +5683-32866-0016-2543: hyp=['MARK', 'MY', 'WORDS', "YOU'LL", 'FIND', 'HIM', 'TOO', 'STRONG', 'FOR', 'YOU', 'I', 'AND', 'TOO', 'DEEP'] +5683-32866-0017-2544: ref=['I', 'AM', 'VERY', 'UNEASY', 'ABOUT', 'IT', 'WHATEVER', 'IT', 'IS', 'I', "CAN'T", 'HELP', 'IT'] +5683-32866-0017-2544: hyp=['I', 'AM', 'VERY', 'UNEASY', 'ABOUT', 'IT', 'WHATEVER', 'IT', 'IS', 'I', "CAN'T", 'HELP', 'IT'] +5683-32866-0018-2545: ref=['TO', 'MY', 'MIND', 'THERE', 'HAS', 'ALWAYS', 'BEEN', 'SOMETHING', 'INEXPRESSIBLY', 'AWFUL', 'IN', 'FAMILY', 'FEUDS'] +5683-32866-0018-2545: hyp=['TO', 'MY', 'MIND', 'THERE', 'HAS', 'ALWAYS', 'BEEN', 'SOMETHING', 'INEXPRESSIBLY', 'AWFUL', 'IN', 'FAMILY', 'FEUDS'] +5683-32866-0019-2546: ref=['THE', 'MYSTERY', 'OF', 'THEIR', 'ORIGIN', 'THEIR', 'CAPACITY', 'FOR', 'EVOLVING', 'LATENT', 'FACULTIES', 'OF', 'CRIME', 'AND', 'THE', 'STEADY', 'VITALITY', 'WITH', 'WHICH', 'THEY', 'SURVIVE', 'THE', 'HEARSE', 'AND', 'SPEAK', 'THEIR', 'DEEP', 'MOUTHED', 'MALIGNITIES', 'IN', 'EVERY', 'NEW', 'BORN', 'GENERATION', 'HAVE', 'ASSOCIATED', 'THEM', 'SOMEHOW', 'IN', 'MY', 'MIND', 'WITH', 'A', 'SPELL', 'OF', 'LIFE', 'EXCEEDING', 'AND', 'DISTINCT', 'FROM', 'HUMAN', 'AND', 'A', 'SPECIAL', 'SATANIC', 'ACTION'] +5683-32866-0019-2546: hyp=['THE', 'MYSTERY', 'OF', 'THEIR', 'ORIGIN', 'THEIR', 'CAPACITY', 'FOR', 'EVOLVING', 'LATENT', 'FACULTIES', 'OF', 'CRIME', 'AND', 'THE', 'STUDY', 'VITALITY', 'WITH', 'WHICH', 'THEY', 'SURVIVED', 'THE', 'HEARSE', 'AND', 'SPEAK', 'THEIR', 'DEEP', 'MOUTHED', 'MALIGNITIES', 'IN', 'EVERY', 'NEW', 'BORN', 'GENERATION', 'HAVE', 'ASSOCIATED', 'THEM', 'SOMEHOW', 'IN', 'MY', 'MIND', 'WITH', 'THE', 'SPELL', 'OF', 'LIFE', 'EXCEEDING', 'AND', 'DISTINCT', 'FROM', 'HUMAN', 'AND', 'ESPECIAL', 'SATANIC', 'ACTION'] +5683-32866-0020-2547: ref=['THE', 'FLOOR', 'MORE', 'THAN', 'ANYTHING', 'ELSE', 'SHOWED', 'THE', 'GREAT', 'AGE', 'OF', 'THE', 'ROOM'] +5683-32866-0020-2547: hyp=['THE', 'FLOOR', 'MORE', 'THAN', 'ANYTHING', 'ELSE', 'SHOWED', 'THE', 'GREAT', 'AGE', 'OF', 'THE', 'ROOM'] +5683-32866-0021-2548: ref=['MY', 'BED', 'WAS', 'UNEXCEPTIONABLY', 'COMFORTABLE', 'BUT', 'IN', 'MY', 'THEN', 'MOOD', 'I', 'COULD', 'HAVE', 'WISHED', 'IT', 'A', 'GREAT', 'DEAL', 'MORE', 'MODERN'] +5683-32866-0021-2548: hyp=['MY', 'BED', 'WAS', 'UNEXCEPTIONABLY', 'COMFORTABLE', 'BUT', 'IN', 'MY', 'THEN', 'MOOD', 'I', 'COULD', 'HAVE', 'WISHED', 'IT', 'A', 'GREAT', 'DEAL', 'MORE', 'MODERN'] +5683-32866-0022-2549: ref=['ITS', 'CURTAINS', 'WERE', 'OF', 'THICK', 'AND', 'FADED', 'TAPESTRY'] +5683-32866-0022-2549: hyp=['ITS', 'CURTAINS', 'WERE', 'OF', 'THICK', 'AND', 'FADED', 'TAPESTRY'] +5683-32866-0023-2550: ref=['ALL', 'THE', 'FURNITURE', 'BELONGED', 'TO', 'OTHER', 'TIMES'] +5683-32866-0023-2550: hyp=['ALL', 'THE', 'FURNITURE', 'BELONGED', 'TO', 'OTHER', 'TIMES'] +5683-32866-0024-2551: ref=['I', "SHAN'T", 'TROUBLE', 'YOU', 'ABOUT', 'MY', 'TRAIN', 'OF', 'THOUGHTS', 'OR', 'FANCIES', 'BUT', 'I', 'BEGAN', 'TO', 'FEEL', 'VERY', 'LIKE', 'A', 'GENTLEMAN', 'IN', 'A', 'GHOST', 'STORY', 'WATCHING', 'EXPERIMENTALLY', 'IN', 'A', 'HAUNTED', 'CHAMBER'] +5683-32866-0024-2551: hyp=['I', "SHA'N'T", 'TROUBLE', 'YOU', 'ABOUT', 'MY', 'TRAIN', 'OF', 'THOUGHTS', 'OR', 'FANCIES', 'BUT', 'I', 'BEGAN', 'TO', 'FEEL', 'VERY', 'LIKE', 'A', 'GENTLEMAN', 'IN', 'A', 'GHOST', 'STORY', 'WATCHING', 'EXPERIMENTALLY', 'IN', 'A', 'HAUNTED', 'CHAMBER'] +5683-32866-0025-2552: ref=['I', 'DID', 'NOT', 'EVEN', 'TAKE', 'THE', 'PRECAUTION', 'OF', 'SMOKING', 'UP', 'THE', 'CHIMNEY'] +5683-32866-0025-2552: hyp=['I', 'DID', 'NOT', 'EVEN', 'TAKE', 'THE', 'PRECAUTION', 'OF', 'SMOKING', 'UP', 'THE', 'CHIMNEY'] +5683-32866-0026-2553: ref=['I', 'BOLDLY', 'LIGHTED', 'MY', 'CHEROOT'] +5683-32866-0026-2553: hyp=['I', 'BOLDLY', 'LIGHTED', 'MY', 'TROUT'] +5683-32866-0027-2554: ref=['A', 'COLD', 'BRIGHT', 'MOON', 'WAS', 'SHINING', 'WITH', 'CLEAR', 'SHARP', 'LIGHTS', 'AND', 'SHADOWS'] +5683-32866-0027-2554: hyp=['A', 'COLD', 'BRIGHT', 'MOON', 'WAS', 'SHINING', 'WITH', 'CLEAR', 'SHARP', 'LIGHTS', 'AND', 'SHADOWS'] +5683-32866-0028-2555: ref=['THE', 'SOMBRE', 'OLD', 'TREES', 'LIKE', 'GIGANTIC', 'HEARSE', 'PLUMES', 'BLACK', 'AND', 'AWFUL'] +5683-32866-0028-2555: hyp=['THE', 'SOMBRE', 'OLD', 'TREES', 'LIKE', 'GIGANTIC', 'HEARSE', 'PLUMES', 'BLACK', 'AND', 'AWFUL'] +5683-32866-0029-2556: ref=['SOMEHOW', 'I', 'HAD', 'GROWN', 'NERVOUS'] +5683-32866-0029-2556: hyp=['SOMEHOW', 'I', 'HAD', 'GROWN', 'NERVOUS'] +5683-32866-0030-2557: ref=['A', 'LITTLE', 'BIT', 'OF', 'PLASTER', 'TUMBLED', 'DOWN', 'THE', 'CHIMNEY', 'AND', 'STARTLED', 'ME', 'CONFOUNDEDLY'] +5683-32866-0030-2557: hyp=['A', 'LITTLE', 'BIT', 'OF', 'PLASTER', 'TUMBLE', 'DOWN', 'THE', 'CHIMNEY', 'AND', 'STARTLED', 'ME', 'CONFOUNDEDLY'] +5683-32879-0000-2501: ref=['IT', 'WAS', 'NOT', 'VERY', 'MUCH', 'PAST', 'ELEVEN', 'THAT', 'MORNING', 'WHEN', 'THE', 'PONY', 'CARRIAGE', 'FROM', 'BRANDON', 'DREW', 'UP', 'BEFORE', 'THE', 'LITTLE', 'GARDEN', 'WICKET', 'OF', "REDMAN'S", 'FARM'] +5683-32879-0000-2501: hyp=['IT', 'WAS', 'NOT', 'VERY', 'MUCH', 'PAST', 'ELEVEN', 'THAT', 'MORNING', 'WHEN', 'THE', 'PONY', 'CARRIAGE', 'FROM', 'BRANDON', 'DREW', 'UP', 'BEFORE', 'THE', 'LITTLE', 'GARDEN', 'WICKET', 'OF', "REDMAN'S", 'FARM'] +5683-32879-0001-2502: ref=['WELL', 'SHE', 'WAS', 'BETTER', 'THOUGH', 'SHE', 'HAD', 'HAD', 'A', 'BAD', 'NIGHT'] +5683-32879-0001-2502: hyp=['WHILE', 'SHE', 'WAS', 'BETTER', 'THOUGH', 'SHE', 'HAD', 'HAD', 'A', 'BAD', 'NIGHT'] +5683-32879-0002-2503: ref=['SO', 'THERE', 'CAME', 'A', 'STEP', 'AND', 'A', 'LITTLE', 'RUSTLING', 'OF', 'FEMININE', 'DRAPERIES', 'THE', 'SMALL', 'DOOR', 'OPENED', 'AND', 'RACHEL', 'ENTERED', 'WITH', 'HER', 'HAND', 'EXTENDED', 'AND', 'A', 'PALE', 'SMILE', 'OF', 'WELCOME'] +5683-32879-0002-2503: hyp=['SO', 'THERE', 'CAME', 'A', 'STEP', 'AND', 'A', 'LITTLE', 'RUSTLING', 'OF', 'FEMININE', 'DRAPERIES', 'THE', 'SMALL', 'DOOR', 'OPENED', 'AND', 'RACHEL', 'ENTERED', 'WITH', 'HER', 'HAND', 'EXTENDED', 'AND', 'A', 'PALE', 'SMILE', 'OF', 'WELCOME'] +5683-32879-0003-2504: ref=['WOMEN', 'CAN', 'HIDE', 'THEIR', 'PAIN', 'BETTER', 'THAN', 'WE', 'MEN', 'AND', 'BEAR', 'IT', 'BETTER', 'TOO', 'EXCEPT', 'WHEN', 'SHAME', 'DROPS', 'FIRE', 'INTO', 'THE', 'DREADFUL', 'CHALICE'] +5683-32879-0003-2504: hyp=['WOMEN', 'CAN', 'HIDE', 'THEIR', 'PAIN', 'BETTER', 'THAN', 'WE', 'MEN', 'AND', 'BEAR', 'IT', 'BETTER', 'TOO', 'EXCEPT', 'WHEN', 'SHAME', 'DROPS', 'FIRE', 'INTO', 'THE', 'DREADFUL', 'CHALICE'] +5683-32879-0004-2505: ref=['BUT', 'POOR', 'RACHEL', 'LAKE', 'HAD', 'MORE', 'THAN', 'THAT', 'STOICAL', 'HYPOCRISY', 'WHICH', 'ENABLES', 'THE', 'TORTURED', 'SPIRITS', 'OF', 'HER', 'SEX', 'TO', 'LIFT', 'A', 'PALE', 'FACE', 'THROUGH', 'THE', 'FLAMES', 'AND', 'SMILE'] +5683-32879-0004-2505: hyp=['BUT', 'POOR', 'RACHEL', 'LAKE', 'HAD', 'MORE', 'THAN', 'THAT', 'STOICAL', 'HYPOCRISY', 'WHICH', 'ENABLES', 'THE', 'TORTURED', 'SPIRITS', 'OF', 'HER', 'SEX', 'TO', 'LIFT', 'A', 'PALE', 'FACE', 'THROUGH', 'THE', 'FLAMES', 'AND', 'SMILE'] +5683-32879-0005-2506: ref=['THIS', 'TRANSIENT', 'SPRING', 'AND', 'LIGHTING', 'UP', 'ARE', 'BEAUTIFUL', 'A', 'GLAMOUR', 'BEGUILING', 'OUR', 'SENSES'] +5683-32879-0005-2506: hyp=['THIS', 'TRANSIENT', 'SPRING', 'AND', 'LIGHTING', 'UP', 'ARE', 'BEAUTIFUL', 'A', 'GLAMOUR', 'BEGUILING', 'OUR', 'SENSES'] +5683-32879-0006-2507: ref=['THERE', 'WAS', 'SOMETHING', 'OF', 'SWEETNESS', 'AND', 'FONDNESS', 'IN', 'HER', 'TONES', 'AND', 'MANNER', 'WHICH', 'WAS', 'NEW', 'TO', 'RACHEL', 'AND', 'COMFORTING', 'AND', 'SHE', 'RETURNED', 'THE', 'GREETING', 'AS', 'KINDLY', 'AND', 'FELT', 'MORE', 'LIKE', 'HER', 'FORMER', 'SELF'] +5683-32879-0006-2507: hyp=['THERE', 'WAS', 'SOMETHING', 'OF', 'SWEETNESS', 'AND', 'FONDNESS', 'IN', 'HER', 'TONES', 'AND', 'MANNER', 'WHICH', 'WAS', 'NEW', 'TO', 'RACHEL', 'AND', 'COMFORTING', 'AND', 'SHE', 'RETURNED', 'THE', 'GREETING', 'AS', 'KINDLY', 'AND', 'FELT', 'MORE', 'LIKE', 'HER', 'FORMER', 'SELF'] +5683-32879-0007-2508: ref=["RACHEL'S", 'PALE', 'AND', 'SHARPENED', 'FEATURES', 'AND', 'DILATED', 'EYE', 'STRUCK', 'HER', 'WITH', 'A', 'PAINFUL', 'SURPRISE'] +5683-32879-0007-2508: hyp=["RACHEL'S", 'PALE', 'AND', 'SHARPENED', 'FEATURES', 'AND', 'DILATED', 'EYE', 'STRUCK', 'HER', 'WITH', 'A', 'PAINFUL', 'SURPRISE'] +5683-32879-0008-2509: ref=['YOU', 'HAVE', 'BEEN', 'SO', 'ILL', 'MY', 'POOR', 'RACHEL'] +5683-32879-0008-2509: hyp=['YOU', 'HAVE', 'BEEN', 'SO', 'ILL', 'MY', 'POOR', 'RACHEL'] +5683-32879-0009-2510: ref=['ILL', 'AND', 'TROUBLED', 'DEAR', 'TROUBLED', 'IN', 'MIND', 'AND', 'MISERABLY', 'NERVOUS'] +5683-32879-0009-2510: hyp=['ILL', 'AND', 'TROUBLED', 'DEAR', 'TROUBLED', 'IN', 'MIND', 'AND', 'MISERABLY', 'NERVOUS'] +5683-32879-0010-2511: ref=['POOR', 'RACHEL', 'HER', 'NATURE', 'RECOILED', 'FROM', 'DECEIT', 'AND', 'SHE', 'TOLD', 'AT', 'ALL', 'EVENTS', 'AS', 'MUCH', 'OF', 'THE', 'TRUTH', 'AS', 'SHE', 'DARED'] +5683-32879-0010-2511: hyp=['POOR', 'RACHEL', 'HER', 'NATURE', 'RECOILED', 'FROM', 'DECEIT', 'AND', 'SHE', 'TOLD', 'AT', 'ALL', 'EVENTS', 'AS', 'MUCH', 'OF', 'THE', 'TRUTH', 'AS', 'SHE', 'DARED'] +5683-32879-0011-2512: ref=['SHE', 'SPOKE', 'WITH', 'A', 'SUDDEN', 'ENERGY', 'WHICH', 'PARTOOK', 'OF', 'FEAR', 'AND', 'PASSION', 'AND', 'FLUSHED', 'HER', 'THIN', 'CHEEK', 'AND', 'MADE', 'HER', 'LANGUID', 'EYES', 'FLASH'] +5683-32879-0011-2512: hyp=['SHE', 'SPOKE', 'WITH', 'A', 'SUDDEN', 'ENERGY', 'WHICH', 'PARTOOK', 'A', 'FEAR', 'AND', 'PASSION', 'AND', 'FLUSHED', 'HER', 'THIN', 'CHEEK', 'AND', 'MADE', 'HER', 'LANGUID', 'EYES', 'FLASH'] +5683-32879-0012-2513: ref=['THANK', 'YOU', 'RACHEL', 'MY', 'COUSIN', 'RACHEL', 'MY', 'ONLY', 'FRIEND'] +5683-32879-0012-2513: hyp=['THANK', 'YOU', 'RACHEL', 'MY', 'COUSIN', 'RACHEL', 'MY', 'ONLY', 'FRIEND'] +5683-32879-0013-2514: ref=['CHELFORD', 'HAD', 'A', 'NOTE', 'FROM', 'MISTER', 'WYLDER', 'THIS', 'MORNING', 'ANOTHER', 'NOTE', 'HIS', 'COMING', 'DELAYED', 'AND', 'SOMETHING', 'OF', 'HIS', 'HAVING', 'TO', 'SEE', 'SOME', 'PERSON', 'WHO', 'IS', 'ABROAD', 'CONTINUED', 'DORCAS', 'AFTER', 'A', 'LITTLE', 'PAUSE'] +5683-32879-0013-2514: hyp=['CHELFORD', 'HAD', 'A', 'NOTE', 'FROM', 'MISTER', 'WILDER', 'THIS', 'MORNING', 'ANOTHER', 'NOTE', 'HIS', 'COMING', 'DELAYED', 'AND', 'SOMETHING', 'OF', 'HIS', 'HAVING', 'TO', 'SEE', 'SOME', 'PERSON', 'WHO', 'WAS', 'ABROAD', 'CONTINUED', 'DORCAS', 'AFTER', 'A', 'LITTLE', 'PAUSE'] +5683-32879-0014-2515: ref=['YES', 'SOMETHING', 'EVERYTHING', 'SAID', 'RACHEL', 'HURRIEDLY', 'LOOKING', 'FROWNINGLY', 'AT', 'A', 'FLOWER', 'WHICH', 'SHE', 'WAS', 'TWIRLING', 'IN', 'HER', 'FINGERS'] +5683-32879-0014-2515: hyp=['YES', 'SOMETHING', 'EVERYTHING', 'SAID', 'RACHEL', 'HURRIEDLY', 'LOOKING', 'FROWNINGLY', 'AT', 'A', 'FLOWER', 'WHICH', 'SHE', 'WAS', 'TWIRLING', 'IN', 'HER', 'FINGERS'] +5683-32879-0015-2516: ref=['YES', 'SAID', 'RACHEL'] +5683-32879-0015-2516: hyp=['YES', 'SAID', 'RACHEL'] +5683-32879-0016-2517: ref=['AND', 'THE', 'WAN', 'ORACLE', 'HAVING', 'SPOKEN', 'SHE', 'SATE', 'DOWN', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'ABSTRACTION', 'AGAIN', 'BESIDE', 'DORCAS', 'AND', 'SHE', 'LOOKED', 'FULL', 'IN', 'HER', "COUSIN'S", 'EYES'] +5683-32879-0016-2517: hyp=['AND', 'THE', 'WAN', 'ORACLE', 'HAVING', 'SPOKEN', 'SHE', 'SAT', 'DOWN', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'ABSTRACTION', 'AGAIN', 'BESIDE', 'DORCAS', 'AND', 'SHE', 'LOOKED', 'FULL', 'IN', 'HER', "COUSIN'S", 'EYES'] +5683-32879-0017-2518: ref=['OF', 'MARK', 'WYLDER', 'I', 'SAY', 'THIS', 'HIS', 'NAME', 'HAS', 'BEEN', 'FOR', 'YEARS', 'HATEFUL', 'TO', 'ME', 'AND', 'RECENTLY', 'IT', 'HAS', 'BECOME', 'FRIGHTFUL', 'AND', 'YOU', 'WILL', 'PROMISE', 'ME', 'SIMPLY', 'THIS', 'THAT', 'YOU', 'WILL', 'NEVER', 'ASK', 'ME', 'TO', 'SPEAK', 'AGAIN', 'ABOUT', 'HIM'] +5683-32879-0017-2518: hyp=['OF', 'MARK', 'WYLDER', 'I', 'SAY', 'THIS', 'HIS', 'NAME', 'HAS', 'BEEN', 'FOR', 'YEARS', 'HATEFUL', 'TO', 'ME', 'AND', 'RECENTLY', 'IT', 'HAS', 'BECOME', 'FRIGHTFUL', 'AND', 'YOU', 'WILL', 'PROMISE', 'ME', 'SIMPLY', 'THIS', 'THAT', 'YOU', 'WILL', 'NEVER', 'ASK', 'ME', 'TO', 'SPEAK', 'AGAIN', 'ABOUT', 'HIM'] +5683-32879-0018-2519: ref=['IT', 'IS', 'AN', 'ANTIPATHY', 'AN', 'ANTIPATHY', 'I', 'CANNOT', 'GET', 'OVER', 'DEAR', 'DORCAS', 'YOU', 'MAY', 'THINK', 'IT', 'A', 'MADNESS', 'BUT', "DON'T", 'BLAME', 'ME'] +5683-32879-0018-2519: hyp=['IT', 'IS', 'AN', 'ANTIPATHY', 'AN', 'ANTIPATHY', 'I', 'CANNOT', 'GET', 'OVER', 'DEAR', 'DORCAS', 'YOU', 'MAY', 'THINK', 'IT', 'A', 'MADNESS', 'BUT', "DON'T", 'BLAME', 'ME'] +5683-32879-0019-2520: ref=['I', 'HAVE', 'VERY', 'FEW', 'TO', 'LOVE', 'ME', 'NOW', 'AND', 'I', 'THOUGHT', 'YOU', 'MIGHT', 'LOVE', 'ME', 'AS', 'I', 'HAVE', 'BEGUN', 'TO', 'LOVE', 'YOU'] +5683-32879-0019-2520: hyp=['I', 'HAVE', 'VERY', 'FEW', 'TO', 'LOVE', 'ME', 'NOW', 'AND', 'I', 'THOUGHT', 'YOU', 'MIGHT', 'LOVE', 'ME', 'AS', 'I', 'HAVE', 'BEGUN', 'TO', 'LOVE', 'YOU'] +5683-32879-0020-2521: ref=['AND', 'SHE', 'THREW', 'HER', 'ARMS', 'ROUND', 'HER', "COUSIN'S", 'NECK', 'AND', 'BRAVE', 'RACHEL', 'AT', 'LAST', 'BURST', 'INTO', 'TEARS'] +5683-32879-0020-2521: hyp=['AND', 'SHE', 'THREW', 'HER', 'ARMS', 'ROUND', 'HER', "COUSIN'S", 'NECK', 'AND', 'BRAVE', 'RACHEL', 'AT', 'LAST', 'BURST', 'INTO', 'TEARS'] +5683-32879-0021-2522: ref=['DORCAS', 'IN', 'HER', 'STRANGE', 'WAY', 'WAS', 'MOVED'] +5683-32879-0021-2522: hyp=['DORCAS', 'IN', 'HER', 'STRANGE', 'WAY', 'WAS', 'MOVED'] +5683-32879-0022-2523: ref=['I', 'LIKE', 'YOU', 'STILL', 'RACHEL', "I'M", 'SURE', "I'LL", 'ALWAYS', 'LIKE', 'YOU'] +5683-32879-0022-2523: hyp=['I', 'LIKE', 'YOU', 'STILL', 'RACHEL', "I'M", 'SURE', "I'LL", 'ALWAYS', 'LIKE', 'YOU'] +5683-32879-0023-2524: ref=['YOU', 'RESEMBLE', 'ME', 'RACHEL', 'YOU', 'ARE', 'FEARLESS', 'AND', 'INFLEXIBLE', 'AND', 'GENEROUS'] +5683-32879-0023-2524: hyp=['YOU', 'RESEMBLE', 'ME', 'RACHEL', 'YOU', 'ARE', 'FEARLESS', 'AND', 'INFLEXIBLE', 'AND', 'GENEROUS'] +5683-32879-0024-2525: ref=['YES', 'RACHEL', 'I', 'DO', 'LOVE', 'YOU'] +5683-32879-0024-2525: hyp=['YES', 'RACHEL', 'I', 'DO', 'LOVE', 'YOU'] +5683-32879-0025-2526: ref=['THANK', 'YOU', 'DORCAS', 'DEAR'] +5683-32879-0025-2526: hyp=['THANK', 'YOU', 'DORCAS', 'DEAR'] +61-70968-0000-2179: ref=['HE', 'BEGAN', 'A', 'CONFUSED', 'COMPLAINT', 'AGAINST', 'THE', 'WIZARD', 'WHO', 'HAD', 'VANISHED', 'BEHIND', 'THE', 'CURTAIN', 'ON', 'THE', 'LEFT'] +61-70968-0000-2179: hyp=['HE', 'BEGAN', 'A', 'CONFUSED', 'COMPLAINT', 'AGAINST', 'THE', 'WIZARD', 'WHO', 'HAD', 'VANISHED', 'BEHIND', 'THE', 'CURTAIN', 'ON', 'THE', 'LEFT'] +61-70968-0001-2180: ref=['GIVE', 'NOT', 'SO', 'EARNEST', 'A', 'MIND', 'TO', 'THESE', 'MUMMERIES', 'CHILD'] +61-70968-0001-2180: hyp=['CUVE', 'NOT', 'SO', 'EARNEST', 'A', 'MIND', 'TO', 'THESE', 'MEMORIES', 'CHILD'] +61-70968-0002-2181: ref=['A', 'GOLDEN', 'FORTUNE', 'AND', 'A', 'HAPPY', 'LIFE'] +61-70968-0002-2181: hyp=['A', 'GOLDEN', 'FORTUNE', 'AND', 'A', 'HAPPY', 'LIFE'] +61-70968-0003-2182: ref=['HE', 'WAS', 'LIKE', 'UNTO', 'MY', 'FATHER', 'IN', 'A', 'WAY', 'AND', 'YET', 'WAS', 'NOT', 'MY', 'FATHER'] +61-70968-0003-2182: hyp=['HE', 'WAS', 'LIKE', 'UNTO', 'MY', 'FATHER', 'IN', 'A', 'WAY', 'AND', 'YET', 'WAS', 'NOT', 'MY', 'FATHER'] +61-70968-0004-2183: ref=['ALSO', 'THERE', 'WAS', 'A', 'STRIPLING', 'PAGE', 'WHO', 'TURNED', 'INTO', 'A', 'MAID'] +61-70968-0004-2183: hyp=['ALSO', 'THERE', 'WAS', 'A', 'STRIPLING', 'PAGE', 'WHO', 'TURNED', 'INTO', 'A', 'MAID'] +61-70968-0005-2184: ref=['THIS', 'WAS', 'SO', 'SWEET', 'A', 'LADY', 'SIR', 'AND', 'IN', 'SOME', 'MANNER', 'I', 'DO', 'THINK', 'SHE', 'DIED'] +61-70968-0005-2184: hyp=['THIS', 'WAS', 'SO', 'SWEET', 'A', 'LADY', 'SIR', 'AND', 'IN', 'SOME', 'MANNER', 'I', 'DO', 'THINK', 'SHE', 'DIED'] +61-70968-0006-2185: ref=['BUT', 'THEN', 'THE', 'PICTURE', 'WAS', 'GONE', 'AS', 'QUICKLY', 'AS', 'IT', 'CAME'] +61-70968-0006-2185: hyp=['BUT', 'THEN', 'THE', 'PICTURE', 'WAS', 'GONE', 'AS', 'QUICKLY', 'AS', 'IT', 'CAME'] +61-70968-0007-2186: ref=['SISTER', 'NELL', 'DO', 'YOU', 'HEAR', 'THESE', 'MARVELS'] +61-70968-0007-2186: hyp=['SISTER', 'NELL', 'DO', 'YOU', 'HEAR', 'THESE', 'MARVELS'] +61-70968-0008-2187: ref=['TAKE', 'YOUR', 'PLACE', 'AND', 'LET', 'US', 'SEE', 'WHAT', 'THE', 'CRYSTAL', 'CAN', 'SHOW', 'TO', 'YOU'] +61-70968-0008-2187: hyp=['TAKE', 'YOUR', 'PLACE', 'AND', 'LET', 'US', 'SEE', 'WHAT', 'THE', 'CRYSTAL', 'CAN', 'SHOW', 'TO', 'YOU'] +61-70968-0009-2188: ref=['LIKE', 'AS', 'NOT', 'YOUNG', 'MASTER', 'THOUGH', 'I', 'AM', 'AN', 'OLD', 'MAN'] +61-70968-0009-2188: hyp=['LIKE', 'AS', 'NOT', 'YOUNG', 'MASTER', 'THOUGH', 'I', 'AM', 'AN', 'OLD', 'MAN'] +61-70968-0010-2189: ref=['FORTHWITH', 'ALL', 'RAN', 'TO', 'THE', 'OPENING', 'OF', 'THE', 'TENT', 'TO', 'SEE', 'WHAT', 'MIGHT', 'BE', 'AMISS', 'BUT', 'MASTER', 'WILL', 'WHO', 'PEEPED', 'OUT', 'FIRST', 'NEEDED', 'NO', 'MORE', 'THAN', 'ONE', 'GLANCE'] +61-70968-0010-2189: hyp=['FORTHWITH', 'ALL', 'RAN', 'TO', 'THE', 'OPENING', 'OF', 'THE', 'TENT', 'TO', 'SEE', 'WHAT', 'MIGHT', 'BE', 'AMISS', 'BUT', 'MASTER', 'WILL', 'WHO', 'PEEPED', 'OUT', 'FIRST', 'NEEDED', 'NO', 'MORE', 'THAN', 'ONE', 'GLANCE'] +61-70968-0011-2190: ref=['HE', 'GAVE', 'WAY', 'TO', 'THE', 'OTHERS', 'VERY', 'READILY', 'AND', 'RETREATED', 'UNPERCEIVED', 'BY', 'THE', 'SQUIRE', 'AND', 'MISTRESS', 'FITZOOTH', 'TO', 'THE', 'REAR', 'OF', 'THE', 'TENT'] +61-70968-0011-2190: hyp=['HE', 'GAVE', 'WAY', 'TO', 'THE', 'OTHERS', 'VERY', 'READILY', 'AND', 'RETREATED', 'UNPERCEIVED', 'BY', 'THE', 'SQUIRE', 'AND', 'MISTRESS', 'FITZOOTH', 'TO', 'THE', 'REAR', 'OF', 'THE', 'TENT'] +61-70968-0012-2191: ref=['CRIES', 'OF', 'A', 'NOTTINGHAM', 'A', 'NOTTINGHAM'] +61-70968-0012-2191: hyp=['CRIES', 'OF', 'UNNOTTINGHAM', 'ARE', 'NOTTINGHAM'] +61-70968-0013-2192: ref=['BEFORE', 'THEM', 'FLED', 'THE', 'STROLLER', 'AND', 'HIS', 'THREE', 'SONS', 'CAPLESS', 'AND', 'TERRIFIED'] +61-70968-0013-2192: hyp=['BEFORE', 'THEM', 'FLED', 'THE', 'STROLLER', 'AND', 'HIS', 'THREE', 'SONS', 'CAPLICE', 'AND', 'TERRIFIED'] +61-70968-0014-2193: ref=['WHAT', 'IS', 'THE', 'TUMULT', 'AND', 'RIOTING', 'CRIED', 'OUT', 'THE', 'SQUIRE', 'AUTHORITATIVELY', 'AND', 'HE', 'BLEW', 'TWICE', 'ON', 'A', 'SILVER', 'WHISTLE', 'WHICH', 'HUNG', 'AT', 'HIS', 'BELT'] +61-70968-0014-2193: hyp=['WHAT', 'IS', 'THE', 'TUMULT', 'AND', 'RIOTING', 'CRIED', 'OUT', 'THE', 'SQUIRE', 'AUTHORITATIVELY', 'AND', 'HE', 'BLEW', 'TWICE', 'ON', 'THE', 'SILVER', 'WHISTLE', 'WHICH', 'HUNG', 'AT', 'HIS', 'BELT'] +61-70968-0015-2194: ref=['NAY', 'WE', 'REFUSED', 'THEIR', 'REQUEST', 'MOST', 'POLITELY', 'MOST', 'NOBLE', 'SAID', 'THE', 'LITTLE', 'STROLLER'] +61-70968-0015-2194: hyp=['NAY', 'WE', 'WERE', 'FREEZED', 'THEIR', 'REQUEST', 'MOST', 'POLITELY', 'MOST', 'NOBLE', 'SAID', 'THE', 'LITTLE', 'STROLLER'] +61-70968-0016-2195: ref=['AND', 'THEN', 'THEY', 'BECAME', 'VEXED', 'AND', 'WOULD', 'HAVE', 'SNATCHED', 'YOUR', 'PURSE', 'FROM', 'US'] +61-70968-0016-2195: hyp=['AND', 'THEN', 'THEY', 'BECAME', 'VEXED', 'AND', 'WOULD', 'HAVE', 'SNATCHED', 'YOUR', 'PURSE', 'FROM', 'US'] +61-70968-0017-2196: ref=['I', 'COULD', 'NOT', 'SEE', 'MY', 'BOY', 'INJURED', 'EXCELLENCE', 'FOR', 'BUT', 'DOING', 'HIS', 'DUTY', 'AS', 'ONE', 'OF', "CUMBERLAND'S", 'SONS'] +61-70968-0017-2196: hyp=['I', 'COULD', 'NOT', 'SEE', 'MY', 'BOY', 'INJURED', 'EXCELLENCE', 'FOR', 'BUT', 'DOING', 'HIS', 'DUTY', 'AS', 'ONE', 'OF', "CUMBERLAND'S", 'SONS'] +61-70968-0018-2197: ref=['SO', 'I', 'DID', 'PUSH', 'THIS', 'FELLOW'] +61-70968-0018-2197: hyp=['SO', 'I', 'DID', 'PUSH', 'THIS', 'FELLOW'] +61-70968-0019-2198: ref=['IT', 'IS', 'ENOUGH', 'SAID', 'GEORGE', 'GAMEWELL', 'SHARPLY', 'AND', 'HE', 'TURNED', 'UPON', 'THE', 'CROWD'] +61-70968-0019-2198: hyp=['IT', 'IS', 'ENOUGH', 'SAID', 'GEORGE', 'GAMEWELL', 'SHARPLY', 'AS', 'HE', 'TURNED', 'UPON', 'THE', 'CROWD'] +61-70968-0020-2199: ref=['SHAME', 'ON', 'YOU', 'CITIZENS', 'CRIED', 'HE', 'I', 'BLUSH', 'FOR', 'MY', 'FELLOWS', 'OF', 'NOTTINGHAM'] +61-70968-0020-2199: hyp=['SHAME', 'ON', 'YOU', 'CITIZENS', 'CRIED', 'HE', 'I', 'BLUSH', 'FOR', 'MY', 'FELLOWS', 'OF', 'NOTTINGHAM'] +61-70968-0021-2200: ref=['SURELY', 'WE', 'CAN', 'SUBMIT', 'WITH', 'GOOD', 'GRACE'] +61-70968-0021-2200: hyp=['SURELY', 'WE', 'CAN', 'SUBMIT', 'WITH', 'GOOD', 'GRACE'] +61-70968-0022-2201: ref=['TIS', 'FINE', 'FOR', 'YOU', 'TO', 'TALK', 'OLD', 'MAN', 'ANSWERED', 'THE', 'LEAN', 'SULLEN', 'APPRENTICE'] +61-70968-0022-2201: hyp=['TIS', 'FINE', 'FOR', 'YOU', 'TO', 'TALK', 'OLD', 'MAN', 'ANSWERED', 'THE', 'LEAN', 'SULLEN', 'APPRENTICE'] +61-70968-0023-2202: ref=['BUT', 'I', 'WRESTLED', 'WITH', 'THIS', 'FELLOW', 'AND', 'DO', 'KNOW', 'THAT', 'HE', 'PLAYED', 'UNFAIRLY', 'IN', 'THE', 'SECOND', 'BOUT'] +61-70968-0023-2202: hyp=['BUT', 'I', 'WRESTLED', 'WITH', 'THIS', 'FELLOW', 'AND', 'DO', 'KNOW', 'THAT', 'HE', 'PLAYED', 'UNFAIRLY', 'IN', 'THE', 'SECOND', 'BOUT'] +61-70968-0024-2203: ref=['SPOKE', 'THE', 'SQUIRE', 'LOSING', 'ALL', 'PATIENCE', 'AND', 'IT', 'WAS', 'TO', 'YOU', 'THAT', 'I', 'GAVE', 'ANOTHER', 'PURSE', 'IN', 'CONSOLATION'] +61-70968-0024-2203: hyp=['SPOKE', 'THE', 'SQUIRE', 'LOSING', 'ALL', 'PATIENT', 'AND', 'IT', 'WAS', 'TO', 'YOU', 'THAT', 'I', 'GAVE', 'ANOTHER', 'PERSON', 'CONSOLATION'] +61-70968-0025-2204: ref=['COME', 'TO', 'ME', 'MEN', 'HERE', 'HERE', 'HE', 'RAISED', 'HIS', 'VOICE', 'STILL', 'LOUDER'] +61-70968-0025-2204: hyp=['COME', 'TO', 'ME', 'MEN', 'HERE', 'HERE', 'HE', 'RAISED', 'HIS', 'VOICE', 'STILL', 'LOUDER'] +61-70968-0026-2205: ref=['THE', 'STROLLERS', 'TOOK', 'THEIR', 'PART', 'IN', 'IT', 'WITH', 'HEARTY', 'ZEST', 'NOW', 'THAT', 'THEY', 'HAD', 'SOME', 'CHANCE', 'OF', 'BEATING', 'OFF', 'THEIR', 'FOES'] +61-70968-0026-2205: hyp=['THE', 'STROLLERS', 'TOOK', 'THEIR', 'PART', 'IN', 'IT', 'WITH', 'HARDY', 'ZEST', 'NOW', 'THAT', 'THEY', 'HAD', 'SOME', 'CHANCE', 'OF', 'BEATING', 'OFF', 'THEIR', 'FOES'] +61-70968-0027-2206: ref=['ROBIN', 'AND', 'THE', 'LITTLE', 'TUMBLER', 'BETWEEN', 'THEM', 'TRIED', 'TO', 'FORCE', 'THE', 'SQUIRE', 'TO', 'STAND', 'BACK', 'AND', 'VERY', 'VALIANTLY', 'DID', 'THESE', 'TWO', 'COMPORT', 'THEMSELVES'] +61-70968-0027-2206: hyp=['ROBIN', 'AND', 'THE', 'LITTLE', 'TUMBLER', 'BETWEEN', 'THEM', 'TRIED', 'TO', 'FORCE', 'THE', 'SQUIRE', 'TO', 'STAND', 'BACK', 'AND', 'VERY', 'VALIANTLY', 'DO', 'THESE', 'TWO', 'COMPORT', 'THEMSELVES'] +61-70968-0028-2207: ref=['THE', 'HEAD', 'AND', 'CHIEF', 'OF', 'THE', 'RIOT', 'THE', 'NOTTINGHAM', 'APPRENTICE', 'WITH', 'CLENCHED', 'FISTS', 'THREATENED', 'MONTFICHET'] +61-70968-0028-2207: hyp=['THE', 'HEAD', 'AND', 'CHIEF', 'OF', 'THE', 'RIOT', 'THE', 'NOTTINGHAM', 'APPRENTICED', 'WITH', 'CLENCHED', 'FISTS', 'THREATENED', 'MONTFICHET'] +61-70968-0029-2208: ref=['THE', 'SQUIRE', 'HELPED', 'TO', 'THRUST', 'THEM', 'ALL', 'IN', 'AND', 'ENTERED', 'SWIFTLY', 'HIMSELF'] +61-70968-0029-2208: hyp=['THE', 'SQUIRE', 'HELPED', 'TO', 'THRUST', 'THEM', 'ALL', 'IN', 'AND', 'ENTERED', 'SWIFTLY', 'HIMSELF'] +61-70968-0030-2209: ref=['NOW', 'BE', 'SILENT', 'ON', 'YOUR', 'LIVES', 'HE', 'BEGAN', 'BUT', 'THE', 'CAPTURED', 'APPRENTICE', 'SET', 'UP', 'AN', 'INSTANT', 'SHOUT'] +61-70968-0030-2209: hyp=['NOW', 'BE', 'SILENT', 'ON', 'YOUR', 'LIVES', 'HE', 'BEGAN', 'BUT', 'THE', 'CAPTURED', 'APPRENTICE', 'SET', 'UP', 'AN', 'INSTANT', 'SHOUT'] +61-70968-0031-2210: ref=['SILENCE', 'YOU', 'KNAVE', 'CRIED', 'MONTFICHET'] +61-70968-0031-2210: hyp=['SILENCE', 'YOU', 'NAVE', 'CRIED', 'MONTFICHET'] +61-70968-0032-2211: ref=['HE', 'FELT', 'FOR', 'AND', 'FOUND', 'THE', "WIZARD'S", 'BLACK', 'CLOTH', 'THE', 'SQUIRE', 'WAS', 'QUITE', 'OUT', 'OF', 'BREATH'] +61-70968-0032-2211: hyp=['HE', 'FELT', 'FOR', 'AND', 'FOUND', 'THE', "WIZARD'S", 'BLACK', 'CLOTH', 'THE', 'SQUIRE', 'WAS', 'QUITE', 'OUT', 'OF', 'BREATH'] +61-70968-0033-2212: ref=['THRUSTING', 'OPEN', 'THE', 'PROPER', 'ENTRANCE', 'OF', 'THE', 'TENT', 'ROBIN', 'SUDDENLY', 'RUSHED', 'FORTH', 'WITH', 'HIS', 'BURDEN', 'WITH', 'A', 'GREAT', 'SHOUT'] +61-70968-0033-2212: hyp=['THRUSTING', 'OPEN', 'THE', 'PROPER', 'ENTRANCE', 'OF', 'THE', 'TENT', 'ROBIN', 'SUDDENLY', 'RUSHED', 'FORTH', 'WITH', 'HIS', 'BURDEN', 'WITH', 'A', 'GREAT', 'SHOUT'] +61-70968-0034-2213: ref=['A', 'MONTFICHET', 'A', 'MONTFICHET', 'GAMEWELL', 'TO', 'THE', 'RESCUE'] +61-70968-0034-2213: hyp=['A', 'MONTFICHET', 'A', 'MONTFICHET', 'GAMEWELL', 'TO', 'THE', 'RESCUE'] +61-70968-0035-2214: ref=['TAKING', 'ADVANTAGE', 'OF', 'THIS', 'THE', "SQUIRE'S", 'FEW', 'MEN', 'REDOUBLED', 'THEIR', 'EFFORTS', 'AND', 'ENCOURAGED', 'BY', "ROBIN'S", 'AND', 'THE', 'LITTLE', "STROLLER'S", 'CRIES', 'FOUGHT', 'THEIR', 'WAY', 'TO', 'HIM'] +61-70968-0035-2214: hyp=['TAKING', 'ADVANTAGE', 'OF', 'THIS', 'THE', "SQUIRE'S", 'FEW', 'MEN', 'REDOUBLED', 'THEIR', 'EFFORTS', 'AND', 'ENCOURAGED', 'BY', 'ROBINS', 'AND', 'THE', 'LITTLE', "STROLLER'S", 'CRIES', 'FOUGHT', 'THEIR', 'WAY', 'TO', 'HIM'] +61-70968-0036-2215: ref=['GEORGE', 'MONTFICHET', 'WILL', 'NEVER', 'FORGET', 'THIS', 'DAY'] +61-70968-0036-2215: hyp=['GEORGE', 'MONTFICHET', 'WILL', 'NEVER', 'FORGET', 'THIS', 'DAY'] +61-70968-0037-2216: ref=['WHAT', 'IS', 'YOUR', 'NAME', 'LORDING', 'ASKED', 'THE', 'LITTLE', 'STROLLER', 'PRESENTLY'] +61-70968-0037-2216: hyp=['WHAT', 'IS', 'YOUR', 'NAME', 'LORDING', 'ASKED', 'THE', 'LITTLE', 'STROLLER', 'PRESENTLY'] +61-70968-0038-2217: ref=['ROBIN', 'FITZOOTH'] +61-70968-0038-2217: hyp=['ROBIN', 'FITZOOTH'] +61-70968-0039-2218: ref=['AND', 'MINE', 'IS', 'WILL', 'STUTELEY', 'SHALL', 'WE', 'BE', 'COMRADES'] +61-70968-0039-2218: hyp=['AND', 'MINE', 'IS', 'WILL', 'STUTELEY', 'SHALL', 'WE', 'BE', 'COMRADES'] +61-70968-0040-2219: ref=['RIGHT', 'WILLINGLY', 'FOR', 'BETWEEN', 'US', 'WE', 'HAVE', 'WON', 'THE', 'BATTLE', 'ANSWERED', 'ROBIN'] +61-70968-0040-2219: hyp=['RIGHT', 'WILLINGLY', 'FOR', 'BETWEEN', 'US', 'WE', 'HAVE', 'WON', 'THE', 'BATTLE', 'ANSWERED', 'ROBIN'] +61-70968-0041-2220: ref=['I', 'LIKE', 'YOU', 'WILL', 'YOU', 'ARE', 'THE', 'SECOND', 'WILL', 'THAT', 'I', 'HAVE', 'MET', 'AND', 'LIKED', 'WITHIN', 'TWO', 'DAYS', 'IS', 'THERE', 'A', 'SIGN', 'IN', 'THAT'] +61-70968-0041-2220: hyp=['I', 'LIKE', 'YOU', 'WILL', 'YOU', 'ARE', 'THE', 'SECOND', 'WILL', 'THAT', 'I', 'HAVE', 'MET', 'AND', 'LIKED', 'WITHIN', 'TWO', 'DAYS', 'IS', 'THERE', 'A', 'SIGN', 'IN', 'THAT'] +61-70968-0042-2221: ref=['MONTFICHET', 'CALLED', 'OUT', 'FOR', 'ROBIN', 'TO', 'GIVE', 'HIM', 'AN', 'ARM'] +61-70968-0042-2221: hyp=['MARTFICHERE', 'CALLED', 'OUT', 'FOR', 'ROBIN', 'TO', 'GIVE', 'HIM', 'AN', 'ARM'] +61-70968-0043-2222: ref=['FRIENDS', 'SAID', 'MONTFICHET', 'FAINTLY', 'TO', 'THE', 'WRESTLERS', 'BEAR', 'US', 'ESCORT', 'SO', 'FAR', 'AS', 'THE', "SHERIFF'S", 'HOUSE'] +61-70968-0043-2222: hyp=['FRIENDS', 'SAID', 'MONTFICHE', 'FAINTLY', 'TO', 'THE', 'WRESTLERS', 'BEAR', 'US', 'ESCORT', 'SO', 'FAR', 'AS', 'THE', "SHERIFF'S", 'HOUSE'] +61-70968-0044-2223: ref=['IT', 'WILL', 'NOT', 'BE', 'SAFE', 'FOR', 'YOU', 'TO', 'STAY', 'HERE', 'NOW'] +61-70968-0044-2223: hyp=['IT', 'WILL', 'NOT', 'BE', 'SAFE', 'FOR', 'YOU', 'TO', 'STAY', 'HERE', 'NOW'] +61-70968-0045-2224: ref=['PRAY', 'FOLLOW', 'US', 'WITH', 'MINE', 'AND', 'MY', 'LORD', "SHERIFF'S", 'MEN'] +61-70968-0045-2224: hyp=['PRAY', 'FOLLOW', 'US', 'WITH', 'MINE', 'IN', 'MY', 'LORD', "SHERIFF'S", 'MEN'] +61-70968-0046-2225: ref=['NOTTINGHAM', 'CASTLE', 'WAS', 'REACHED', 'AND', 'ADMITTANCE', 'WAS', 'DEMANDED'] +61-70968-0046-2225: hyp=['NOTTINGHAM', 'CASTLE', 'WAS', 'REACHED', 'AND', 'ADMITTANCE', 'WAS', 'DEMANDED'] +61-70968-0047-2226: ref=['MASTER', 'MONCEUX', 'THE', 'SHERIFF', 'OF', 'NOTTINGHAM', 'WAS', 'MIGHTILY', 'PUT', 'ABOUT', 'WHEN', 'TOLD', 'OF', 'THE', 'RIOTING'] +61-70968-0047-2226: hyp=['MASTER', 'MONCEUX', 'THE', 'SHERIFF', 'OF', 'NOTTINGHAM', 'WAS', 'MIGHTILY', 'PUT', 'ABOUT', 'WHEN', 'TOLD', 'OF', 'THE', 'RIOTING'] +61-70968-0048-2227: ref=['AND', 'HENRY', 'MIGHT', 'RETURN', 'TO', 'ENGLAND', 'AT', 'ANY', 'MOMENT'] +61-70968-0048-2227: hyp=['AND', 'HENRY', 'MIGHT', 'RETURN', 'TO', 'ENGLAND', 'AT', 'ANY', 'MOMENT'] +61-70968-0049-2228: ref=['HAVE', 'YOUR', 'WILL', 'CHILD', 'IF', 'THE', 'BOY', 'ALSO', 'WILLS', 'IT', 'MONTFICHET', 'ANSWERED', 'FEELING', 'TOO', 'ILL', 'TO', 'OPPOSE', 'ANYTHING', 'VERY', 'STRONGLY', 'JUST', 'THEN'] +61-70968-0049-2228: hyp=['HAVE', 'YOUR', 'WILL', 'CHILD', 'IF', 'THE', 'BOY', 'ALSO', 'WILLS', 'IT', 'MONTFICHET', 'ANSWERED', 'FEELING', 'TOO', 'ILL', 'TO', 'OPPOSE', 'ANYTHING', 'VERY', 'STRONGLY', 'JUST', 'THEN'] +61-70968-0050-2229: ref=['HE', 'MADE', 'AN', 'EFFORT', 'TO', 'HIDE', 'HIS', 'CONDITION', 'FROM', 'THEM', 'ALL', 'AND', 'ROBIN', 'FELT', 'HIS', 'FINGERS', 'TIGHTEN', 'UPON', 'HIS', 'ARM'] +61-70968-0050-2229: hyp=['HE', 'MADE', 'AN', 'EFFORT', 'TO', 'HIDE', 'HIS', 'CONDITION', 'FROM', 'THEM', 'ALL', 'AND', 'ROBIN', 'FELT', 'HIS', 'FINGERS', 'TIGHTEN', 'UPON', 'HIS', 'ARM'] +61-70968-0051-2230: ref=['BEG', 'ME', 'A', 'ROOM', 'OF', 'THE', 'SHERIFF', 'CHILD', 'QUICKLY'] +61-70968-0051-2230: hyp=['BEGGED', 'ME', 'A', 'ROOM', 'OF', 'THE', 'SHERIFF', 'CHILD', 'QUICKLY'] +61-70968-0052-2231: ref=['BUT', 'WHO', 'IS', 'THIS', 'FELLOW', 'PLUCKING', 'AT', 'YOUR', 'SLEEVE'] +61-70968-0052-2231: hyp=['BUT', 'WHO', 'IS', 'THIS', 'FELLOW', 'PLUCKING', 'AT', 'YOUR', 'STEVE'] +61-70968-0053-2232: ref=['HE', 'IS', 'MY', 'ESQUIRE', 'EXCELLENCY', 'RETURNED', 'ROBIN', 'WITH', 'DIGNITY'] +61-70968-0053-2232: hyp=['HE', 'IS', 'MY', 'ESQUIRE', 'EXCELLENCY', 'RETURNED', 'ROBIN', 'WITH', 'DIGNITY'] +61-70968-0054-2233: ref=['MISTRESS', 'FITZOOTH', 'HAD', 'BEEN', 'CARRIED', 'OFF', 'BY', 'THE', "SHERIFF'S", 'DAUGHTER', 'AND', 'HER', 'MAIDS', 'AS', 'SOON', 'AS', 'THEY', 'HAD', 'ENTERED', 'THE', 'HOUSE', 'SO', 'THAT', 'ROBIN', 'ALONE', 'HAD', 'THE', 'CARE', 'OF', 'MONTFICHET'] +61-70968-0054-2233: hyp=['MISTRESS', 'FITZOOTH', 'HAD', 'BEEN', 'CARRIED', 'OFF', 'BY', 'THE', "SHERIFF'S", 'DAUGHTER', 'AND', 'HER', 'MAIDS', 'AS', 'SOON', 'AS', 'THEY', 'HAD', 'ENTERED', 'THE', 'HOUSE', 'SO', 'THAT', 'ROBIN', 'ALONE', 'HAD', 'THE', 'CARE', 'OF', 'MONTFICHET'] +61-70968-0055-2234: ref=['ROBIN', 'WAS', 'GLAD', 'WHEN', 'AT', 'LENGTH', 'THEY', 'WERE', 'LEFT', 'TO', 'THEIR', 'OWN', 'DEVICES'] +61-70968-0055-2234: hyp=['ROBIN', 'WAS', 'GLAD', 'WHEN', 'AT', 'LENGTH', 'THEY', 'WERE', 'LEFT', 'TO', 'THEIR', 'OWN', 'DEVICES'] +61-70968-0056-2235: ref=['THE', 'WINE', 'DID', 'CERTAINLY', 'BRING', 'BACK', 'THE', 'COLOR', 'TO', 'THE', "SQUIRE'S", 'CHEEKS'] +61-70968-0056-2235: hyp=['THE', 'WINE', 'DID', 'CERTAINLY', 'BRING', 'BACK', 'THE', 'COLOR', 'TO', 'THE', "SQUIRE'S", 'CHEEKS'] +61-70968-0057-2236: ref=['THESE', 'ESCAPADES', 'ARE', 'NOT', 'FOR', 'OLD', 'GAMEWELL', 'LAD', 'HIS', 'DAY', 'HAS', 'COME', 'TO', 'TWILIGHT'] +61-70968-0057-2236: hyp=['THESE', 'ESCAPADES', 'ARE', 'NOT', 'FOR', 'OLD', 'GAME', 'WELL', 'LED', 'HIS', 'DAY', 'HAS', 'COME', 'TO', 'TWILIGHT'] +61-70968-0058-2237: ref=['WILL', 'YOU', 'FORGIVE', 'ME', 'NOW'] +61-70968-0058-2237: hyp=['WILL', 'YOU', 'FORGIVE', 'ME', 'NOW'] +61-70968-0059-2238: ref=['IT', 'WILL', 'BE', 'NO', 'DISAPPOINTMENT', 'TO', 'ME'] +61-70968-0059-2238: hyp=["IT'LL", 'BE', 'NO', 'DISAPPOINTMENT', 'TO', 'ME'] +61-70968-0060-2239: ref=['NO', 'THANKS', 'I', 'AM', 'GLAD', 'TO', 'GIVE', 'YOU', 'SUCH', 'EASY', 'HAPPINESS'] +61-70968-0060-2239: hyp=['NO', 'THANKS', 'I', 'AM', 'GLAD', 'TO', 'GIVE', 'YOU', 'SUCH', 'EASY', 'HAPPINESS'] +61-70968-0061-2240: ref=['YOU', 'ARE', 'A', 'WORTHY', 'LEECH', 'WILL', 'PRESENTLY', 'WHISPERED', 'ROBIN', 'THE', 'WINE', 'HAS', 'WORKED', 'A', 'MARVEL'] +61-70968-0061-2240: hyp=['YOU', 'ARE', 'A', 'WORTHY', 'LEECH', 'WILL', 'PRESENTLY', 'WHISPERED', 'ROBIN', 'THE', 'WINE', 'HAS', 'WORKED', 'A', 'MARVEL'] +61-70968-0062-2241: ref=['AY', 'AND', 'SHOW', 'YOU', 'SOME', 'PRETTY', 'TRICKS'] +61-70968-0062-2241: hyp=['I', 'AND', 'SHOW', 'YOU', 'SOME', 'PRETTY', 'TRICKS'] +61-70970-0000-2242: ref=['YOUNG', 'FITZOOTH', 'HAD', 'BEEN', 'COMMANDED', 'TO', 'HIS', "MOTHER'S", 'CHAMBER', 'SO', 'SOON', 'AS', 'HE', 'HAD', 'COME', 'OUT', 'FROM', 'HIS', 'CONVERSE', 'WITH', 'THE', 'SQUIRE'] +61-70970-0000-2242: hyp=['YOUNG', 'FITZOOTH', 'HAD', 'BEEN', 'COMMANDED', 'TO', 'HIS', "MOTHER'S", 'CHAMBER', 'SO', 'SOON', 'AS', 'HE', 'HAD', 'COME', 'OUT', 'FROM', 'HIS', 'CONVERSE', 'WITH', 'THE', 'SQUIRE'] +61-70970-0001-2243: ref=['THERE', 'BEFELL', 'AN', 'ANXIOUS', 'INTERVIEW', 'MISTRESS', 'FITZOOTH', 'ARGUING', 'FOR', 'AND', 'AGAINST', 'THE', "SQUIRE'S", 'PROJECT', 'IN', 'A', 'BREATH'] +61-70970-0001-2243: hyp=['THERE', 'BEFELL', 'AN', 'ANXIOUS', 'INTERVIEW', 'MISTRESS', 'FITZOOTH', 'ARGUING', 'FOR', 'AND', 'AGAINST', 'THE', "SQUIRE'S", 'PROJECT', 'IN', 'A', 'BREATH'] +61-70970-0002-2244: ref=['MOST', 'OF', 'ALL', 'ROBIN', 'THOUGHT', 'OF', 'HIS', 'FATHER', 'WHAT', 'WOULD', 'HE', 'COUNSEL'] +61-70970-0002-2244: hyp=['MOST', 'OF', 'ALL', 'ROBIN', 'THOUGHT', 'OF', 'HIS', 'FATHER', 'WHAT', 'WOULD', 'HE', 'COUNSEL'] +61-70970-0003-2245: ref=['IF', 'FOR', 'A', 'WHIM', 'YOU', 'BEGGAR', 'YOURSELF', 'I', 'CANNOT', 'STAY', 'YOU'] +61-70970-0003-2245: hyp=['IF', 'FOR', 'A', 'WHIM', 'YOU', 'BEGGAR', 'YOURSELF', 'I', 'CANNOT', 'STAY', 'YOU'] +61-70970-0004-2246: ref=['BUT', 'TAKE', 'IT', 'WHILST', 'I', 'LIVE', 'AND', 'WEAR', "MONTFICHET'S", 'SHIELD', 'IN', 'THE', 'DAYS', 'WHEN', 'MY', 'EYES', 'CAN', 'BE', 'REJOICED', 'BY', 'SO', 'BRAVE', 'A', 'SIGHT', 'FOR', 'YOU', 'WILL', "NE'ER", 'DISGRACE', 'OUR', 'SCUTCHEON', 'I', 'WARRANT', 'ME'] +61-70970-0004-2246: hyp=['BUT', 'TAKE', 'IT', 'WHILST', 'I', 'LIVE', 'AND', 'WHERE', "MONTFICHET'S", 'SHIELD', 'IN', 'THE', 'DAYS', 'WHEN', 'MY', 'EYES', 'CAN', 'BE', 'REJOICED', 'BY', 'SO', 'BRAVE', 'A', 'SIGHT', 'FOR', 'YOU', 'WILL', 'NEVER', 'DISGRACE', 'OUR', 'STATUNE', 'I', 'WARRANT', 'ME'] +61-70970-0005-2247: ref=['THE', 'LAD', 'HAD', 'CHECKED', 'HIM', 'THEN'] +61-70970-0005-2247: hyp=['THE', 'LAD', 'HAD', 'CHECKED', 'HIM', 'THEN'] +61-70970-0006-2248: ref=['NEVER', 'THAT', 'SIR', 'HE', 'HAD', 'SAID'] +61-70970-0006-2248: hyp=['NEVER', 'THAT', 'SIR', 'HE', 'HAD', 'SAID'] +61-70970-0007-2249: ref=['HE', 'WAS', 'IN', 'DEEP', 'CONVERSE', 'WITH', 'THE', 'CLERK', 'AND', 'ENTERED', 'THE', 'HALL', 'HOLDING', 'HIM', 'BY', 'THE', 'ARM'] +61-70970-0007-2249: hyp=['HE', 'WAS', 'IN', 'DEEP', 'CONVERSE', 'WITH', 'THE', 'CLERK', 'AND', 'ENTERED', 'THE', 'HALL', 'HOLDING', 'HIM', 'BY', 'THE', 'ARM'] +61-70970-0008-2250: ref=['NOW', 'TO', 'BED', 'BOY'] +61-70970-0008-2250: hyp=['NOW', 'TO', 'BED', 'BOY'] +61-70970-0009-2251: ref=['TIS', 'LATE', 'AND', 'I', 'GO', 'MYSELF', 'WITHIN', 'A', 'SHORT', 'SPACE'] +61-70970-0009-2251: hyp=['TIS', 'LATE', 'AND', 'I', 'GO', 'MYSELF', 'WITHIN', 'A', 'SHORT', 'SPACE'] +61-70970-0010-2252: ref=['DISMISS', 'YOUR', 'SQUIRE', 'ROBIN', 'AND', 'BID', 'ME', 'GOOD', 'E', 'E', 'N'] +61-70970-0010-2252: hyp=['DISMISS', 'YOUR', 'SQUIRE', 'ROBIN', 'AND', 'BID', 'ME', 'GOOD', 'EVEN'] +61-70970-0011-2253: ref=['AS', 'ANY', 'IN', 'ENGLAND', 'I', 'WOULD', 'SAY', 'SAID', 'GAMEWELL', 'PROUDLY', 'THAT', 'IS', 'IN', 'HIS', 'DAY'] +61-70970-0011-2253: hyp=['AS', 'ANY', 'IN', 'ENGLAND', 'I', 'WOULD', 'SAY', 'SAID', 'GAMEWELL', 'PROUDLY', 'THAT', 'IS', 'IN', 'HIS', 'DAY'] +61-70970-0012-2254: ref=['YET', 'HE', 'WILL', 'TEACH', 'YOU', 'A', 'FEW', 'TRICKS', 'WHEN', 'MORNING', 'IS', 'COME'] +61-70970-0012-2254: hyp=['YET', 'HE', 'WILL', 'TEACH', 'YOU', 'A', 'FEW', 'TRICKS', 'WHEN', 'MORNING', 'IS', 'COME'] +61-70970-0013-2255: ref=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'ALTER', 'HIS', 'SLEEPING', 'ROOM', 'TO', 'ONE', 'NEARER', 'TO', "GAMEWELL'S", 'CHAMBER'] +61-70970-0013-2255: hyp=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'ALTER', 'HIS', 'SLEEPING', 'ROOM', 'TO', 'ONE', 'NEARER', 'TO', "GAMEWELL'S", 'CHAMBER'] +61-70970-0014-2256: ref=['PRESENTLY', 'HE', 'CROSSED', 'THE', 'FLOOR', 'OF', 'HIS', 'ROOM', 'WITH', 'DECIDED', 'STEP'] +61-70970-0014-2256: hyp=['PRESENTLY', 'HE', 'CROSSED', 'THE', 'FLOOR', 'OF', 'HIS', 'ROOM', 'WITH', 'DECIDED', 'STEP'] +61-70970-0015-2257: ref=['WILL', 'CRIED', 'HE', 'SOFTLY', 'AND', 'STUTELEY', 'WHO', 'HAD', 'CHOSEN', 'HIS', 'COUCH', 'ACROSS', 'THE', 'DOOR', 'OF', 'HIS', 'YOUNG', "MASTER'S", 'CHAMBER', 'SPRANG', 'UP', 'AT', 'ONCE', 'IN', 'ANSWER'] +61-70970-0015-2257: hyp=['WILL', 'CRIED', 'HE', 'SOFTLY', 'AND', 'STUTELEY', 'WHO', 'HAD', 'CHOSEN', 'HIS', 'COUCH', 'ACROSS', 'THE', 'DOOR', 'OF', 'HIS', 'YOUNG', "MASTER'S", 'CHAMBER', 'SPRANG', 'UP', 'AT', 'ONCE', 'IN', 'ANSWER'] +61-70970-0016-2258: ref=['WE', 'WILL', 'GO', 'OUT', 'TOGETHER', 'TO', 'THE', 'BOWER', 'THERE', 'IS', 'A', 'WAY', 'DOWN', 'TO', 'THE', 'COURT', 'FROM', 'MY', 'WINDOW'] +61-70970-0016-2258: hyp=['WE', 'WILL', 'GO', 'OUT', 'TOGETHER', 'TO', 'THE', 'BOWER', 'THERE', 'IS', 'A', 'WAY', 'DOWN', 'TO', 'THE', 'COURT', 'FROM', 'MY', 'WINDOW'] +61-70970-0017-2259: ref=['REST', 'AND', 'BE', 'STILL', 'UNTIL', 'I', 'WARN', 'YOU'] +61-70970-0017-2259: hyp=['REST', 'AND', 'BE', 'STILL', 'UNTIL', 'I', 'WARN', 'YOU'] +61-70970-0018-2260: ref=['THE', 'HOURS', 'PASSED', 'WEARILY', 'BY', 'AND', 'MOVEMENT', 'COULD', 'YET', 'BE', 'HEARD', 'ABOUT', 'THE', 'HALL'] +61-70970-0018-2260: hyp=['THE', 'HOURS', 'PASSED', 'WEARILY', 'BY', 'AND', 'MOVEMENT', 'COULD', 'YET', 'BE', 'HEARD', 'ABOUT', 'THE', 'HALL'] +61-70970-0019-2261: ref=['AT', 'LAST', 'ALL', 'WAS', 'QUIET', 'AND', 'BLACK', 'IN', 'THE', 'COURTYARD', 'OF', 'GAMEWELL'] +61-70970-0019-2261: hyp=['AT', 'LAST', 'ALL', 'WAS', 'QUIET', 'AND', 'BLACK', 'IN', 'THE', 'COURTYARD', 'OF', 'GAMEWELL'] +61-70970-0020-2262: ref=['WILL', 'WHISPERED', 'ROBIN', 'OPENING', 'HIS', 'DOOR', 'AS', 'HE', 'SPOKE', 'ARE', 'YOU', 'READY'] +61-70970-0020-2262: hyp=['WILL', 'WHISPERED', 'ROBIN', 'OPENING', 'HIS', 'DOOR', 'AS', 'HE', 'SPOKE', 'ARE', 'YOU', 'READY'] +61-70970-0021-2263: ref=['THEY', 'THEN', 'RENEWED', 'THEIR', 'JOURNEY', 'AND', 'UNDER', 'THE', 'BETTER', 'LIGHT', 'MADE', 'A', 'SAFE', 'CROSSING', 'OF', 'THE', 'STABLE', 'ROOFS'] +61-70970-0021-2263: hyp=['THEY', 'THEN', 'RENEWED', 'THEIR', 'JOURNEY', 'AND', 'UNDER', 'THE', 'BETTER', 'LIGHT', 'MADE', 'A', 'SAFE', 'CROSSING', 'OF', 'THE', 'STABLE', 'ROOFS'] +61-70970-0022-2264: ref=['ROBIN', 'ENTERED', 'THE', 'HUT', 'DRAGGING', 'THE', 'UNWILLING', 'ESQUIRE', 'AFTER', 'HIM'] +61-70970-0022-2264: hyp=['ROBIN', 'ENTERED', 'THE', 'HUT', 'DRAGGING', 'THE', 'UNWILLING', 'ESQUIRE', 'AFTER', 'HIM'] +61-70970-0023-2265: ref=['BE', 'NOT', 'SO', 'FOOLISH', 'FRIEND', 'SAID', 'FITZOOTH', 'CROSSLY'] +61-70970-0023-2265: hyp=['BE', 'NOT', 'SO', 'FOOLISH', 'FRIEND', 'SAID', 'FITZOOTH', 'CROSSLY'] +61-70970-0024-2266: ref=['THEY', 'MOVED', 'THEREAFTER', 'CAUTIOUSLY', 'ABOUT', 'THE', 'HUT', 'GROPING', 'BEFORE', 'AND', 'ABOUT', 'THEM', 'TO', 'FIND', 'SOMETHING', 'TO', 'SHOW', 'THAT', 'WARRENTON', 'HAD', 'FULFILLED', 'HIS', 'MISSION'] +61-70970-0024-2266: hyp=['THEY', 'MOVED', 'THEREAFTER', 'CAUTIOUSLY', 'ABOUT', 'THE', 'HUT', 'GROPING', 'BEFORE', 'AND', 'ABOUT', 'THEM', 'TO', 'FIND', 'SOMETHING', 'TO', 'SHOW', 'THAT', 'WARRENTON', 'HAD', 'FULFILLED', 'HIS', 'MISSION'] +61-70970-0025-2267: ref=['THEY', 'WERE', 'UPON', 'THE', 'VERGE', 'OF', 'AN', 'OPEN', 'TRAP', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'HUT', 'AND', 'STUTELEY', 'HAD', 'TRIPPED', 'OVER', 'THE', 'EDGE', 'OF', 'THE', 'REVERSED', 'FLAP', 'MOUTH', 'OF', 'THIS', 'PIT'] +61-70970-0025-2267: hyp=['THEY', 'WERE', 'UPON', 'THE', 'VERGE', 'OF', 'AN', 'OPEN', 'TRAP', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'HUT', 'AND', 'STUTELEY', 'HAD', 'TRIPPED', 'OVER', 'THE', 'EDGE', 'OF', 'THE', 'REVERSED', 'FLAP', 'MOUTH', 'OF', 'THIS', 'PIT'] +61-70970-0026-2268: ref=["FITZOOTH'S", 'HAND', 'RESTED', 'AT', 'LAST', 'UPON', 'THE', 'TOP', 'RUNG', 'OF', 'A', 'LADDER', 'AND', 'SLOWLY', 'THE', 'TRUTH', 'CAME', 'TO', 'HIM'] +61-70970-0026-2268: hyp=["FITTOOTH'S", 'HAND', 'RESTED', 'AT', 'LAST', 'UPON', 'THE', 'TOP', 'RUNG', 'OF', 'A', 'LADDER', 'AND', 'SLOWLY', 'THE', 'TRUTH', 'CAME', 'TO', 'HIM'] +61-70970-0027-2269: ref=['ROBIN', 'CAREFULLY', 'DESCENDED', 'THE', 'LADDER', 'AND', 'FOUND', 'HIMSELF', 'SOON', 'UPON', 'FIRM', 'ROCKY', 'GROUND'] +61-70970-0027-2269: hyp=['ROBIN', 'CAREFULLY', 'DESCENDED', 'THE', 'LADDER', 'AND', 'FOUND', 'HIMSELF', 'SOON', 'UPON', 'FIRM', 'ROCKY', 'GROUND'] +61-70970-0028-2270: ref=['STUTELEY', 'WAS', 'BY', 'HIS', 'SIDE', 'IN', 'A', 'FLASH', 'AND', 'THEN', 'THEY', 'BOTH', 'BEGAN', 'FEELING', 'ABOUT', 'THEM', 'TO', 'ASCERTAIN', 'THE', 'SHAPE', 'AND', 'CHARACTER', 'OF', 'THIS', 'VAULT'] +61-70970-0028-2270: hyp=['STUTELEY', 'WAS', 'BY', 'HIS', 'SIDE', 'IN', 'A', 'FLASH', 'AND', 'THEN', 'THEY', 'BOTH', 'BEGAN', 'FEELING', 'ABOUT', 'THEM', 'TO', 'ASCERTAIN', 'THE', 'SHAPE', 'AND', 'CHARACTER', 'OF', 'THIS', 'VAULT'] +61-70970-0029-2271: ref=['FROM', 'THE', 'BLACKNESS', 'BEHIND', 'THE', 'LIGHT', 'THEY', 'HEARD', 'A', 'VOICE', "WARRENTON'S"] +61-70970-0029-2271: hyp=['FROM', 'THE', 'BLACKNESS', 'BEHIND', 'THE', 'LIGHT', 'THEY', 'HEARD', 'A', 'VOICE', "WARRENTON'S"] +61-70970-0030-2272: ref=['SAVE', 'ME', 'MASTERS', 'BUT', 'YOU', 'STARTLED', 'ME', 'RARELY'] +61-70970-0030-2272: hyp=['SAVE', 'ME', 'MASTERS', 'BUT', 'YOU', 'STARTLED', 'ME', 'RARELY'] +61-70970-0031-2273: ref=['CRIED', 'HE', 'WAVING', 'THE', 'LANTHORN', 'BEFORE', 'HIM', 'TO', 'MAKE', 'SURE', 'THAT', 'THESE', 'WERE', 'NO', 'GHOSTS', 'IN', 'FRONT', 'OF', 'HIM'] +61-70970-0031-2273: hyp=['CRIED', 'HE', 'WAVING', 'THE', 'LANTERN', 'BEFORE', 'HIM', 'TO', 'MAKE', 'SURE', 'THAT', 'THESE', 'WERE', 'NO', 'GHOSTS', 'IN', 'FRONT', 'OF', 'HIM'] +61-70970-0032-2274: ref=['ENQUIRED', 'ROBIN', 'WITH', 'HIS', 'SUSPICIONS', 'STILL', 'UPON', 'HIM'] +61-70970-0032-2274: hyp=['INQUIRED', 'ROBIN', 'WITH', 'HIS', 'SUSPICION', 'STILL', 'UPON', 'HIM'] +61-70970-0033-2275: ref=['TRULY', 'SUCH', 'A', 'HORSE', 'SHOULD', 'BE', 'WORTH', 'MUCH', 'IN', 'NOTTINGHAM', 'FAIR'] +61-70970-0033-2275: hyp=['TRULY', 'SUCH', 'A', 'HORSE', 'WOULD', 'BE', 'WORTH', 'MUCH', 'IN', 'NOTTINGHAM', 'FAIR'] +61-70970-0034-2276: ref=['NAY', 'NAY', 'LORDING', 'ANSWERED', 'WARRENTON', 'WITH', 'A', 'HALF', 'LAUGH'] +61-70970-0034-2276: hyp=['NAY', 'NAY', 'LORDING', 'ANSWERED', 'WARRENTON', 'WITH', 'A', 'HALF', 'LAUGH'] +61-70970-0035-2277: ref=['WARRENTON', 'SPOKE', 'THUS', 'WITH', 'SIGNIFICANCE', 'TO', 'SHOW', 'ROBIN', 'THAT', 'HE', 'WAS', 'NOT', 'TO', 'THINK', "GEOFFREY'S", 'CLAIMS', 'TO', 'THE', 'ESTATE', 'WOULD', 'BE', 'PASSED', 'BY'] +61-70970-0035-2277: hyp=['WARRENTON', 'SPOKE', 'THUS', 'WITH', 'SIGNIFICANCE', 'TO', 'SHOW', 'ROBIN', 'THAT', 'HE', 'WAS', 'NOT', 'TO', 'THINK', "JEFFREY'S", 'CLAIMS', 'TO', 'THE', 'ESTATE', 'WOULD', 'BE', 'PASSED', 'BY'] +61-70970-0036-2278: ref=['ROBIN', 'FITZOOTH', 'SAW', 'THAT', 'HIS', 'DOUBTS', 'OF', 'WARRENTON', 'HAD', 'BEEN', 'UNFAIR', 'AND', 'HE', 'BECAME', 'ASHAMED', 'OF', 'HIMSELF', 'FOR', 'HARBORING', 'THEM'] +61-70970-0036-2278: hyp=['ROBIN', 'FITZOOTH', 'SAW', 'THAT', 'HIS', 'DOUBTS', 'OF', 'WARRENTON', 'HAD', 'BEEN', 'UNFAIR', 'AND', 'HE', 'BECAME', 'ASHAMED', 'OF', 'HIMSELF', 'FOR', 'HARBOURING', 'THEM'] +61-70970-0037-2279: ref=['HIS', 'TONES', 'RANG', 'PLEASANTLY', 'ON', "WARRENTON'S", 'EARS', 'AND', 'FORTHWITH', 'A', 'GOOD', 'FELLOWSHIP', 'WAS', 'HERALDED', 'BETWEEN', 'THEM'] +61-70970-0037-2279: hyp=['HIS', 'TONES', 'RANG', 'PLEASANTLY', 'ON', "WARRENTON'S", 'EARS', 'AND', 'FORTHWITH', 'THE', 'GOOD', 'FELLOWSHIP', 'WAS', 'HERALDED', 'BETWEEN', 'THEM'] +61-70970-0038-2280: ref=['THE', 'OLD', 'SERVANT', 'TOLD', 'HIM', 'QUIETLY', 'AS', 'THEY', 'CREPT', 'BACK', 'TO', 'GAMEWELL', 'THAT', 'THIS', 'PASSAGE', 'WAY', 'LED', 'FROM', 'THE', 'HUT', 'IN', 'THE', 'PLEASANCE', 'TO', 'SHERWOOD', 'AND', 'THAT', 'GEOFFREY', 'FOR', 'THE', 'TIME', 'WAS', 'HIDING', 'WITH', 'THE', 'OUTLAWS', 'IN', 'THE', 'FOREST'] +61-70970-0038-2280: hyp=['THE', 'OLD', 'SERVANT', 'TOLD', 'HIM', 'QUIETLY', 'AS', 'THEY', 'CREPT', 'BACK', 'TO', 'GAMEWELL', 'THAT', 'THIS', 'PASSAGEWAY', 'LED', 'FROM', 'THE', 'HUT', 'IN', 'THE', 'PLEASANTS', 'TO', 'SHERWOOD', 'AND', 'THAT', 'JEFFREY', 'FOR', 'THE', 'TIME', 'WAS', 'HIDING', 'WITH', 'THE', 'OUTLAWS', 'IN', 'THE', 'FOREST'] +61-70970-0039-2281: ref=['HE', 'IMPLORES', 'US', 'TO', 'BE', 'DISCREET', 'AS', 'THE', 'GRAVE', 'IN', 'THIS', 'MATTER', 'FOR', 'IN', 'SOOTH', 'HIS', 'LIFE', 'IS', 'IN', 'THE', 'HOLLOW', 'OF', 'OUR', 'HANDS'] +61-70970-0039-2281: hyp=['HE', 'IMPLORES', 'US', 'TO', 'BE', 'DISCREET', 'AS', 'THE', 'GRAVE', 'IN', 'THIS', 'MATTER', 'FOR', 'IN', 'SOOTH', 'HIS', 'LIFE', 'IS', 'IN', 'THE', 'HOLLOW', 'OF', 'OUR', 'HANDS'] +61-70970-0040-2282: ref=['THEY', 'REGAINED', 'THEIR', 'APARTMENT', 'APPARENTLY', 'WITHOUT', 'DISTURBING', 'THE', 'HOUSEHOLD', 'OF', 'GAMEWELL'] +61-70970-0040-2282: hyp=['THEY', 'REGAIN', 'THEIR', 'APARTMENT', 'APPARENTLY', 'WITHOUT', 'DISTURBING', 'THE', 'HOUSEHOLD', 'OF', 'GAMEWELL'] +672-122797-0000-1529: ref=['OUT', 'IN', 'THE', 'WOODS', 'STOOD', 'A', 'NICE', 'LITTLE', 'FIR', 'TREE'] +672-122797-0000-1529: hyp=['OUT', 'IN', 'THE', 'WOODS', 'STOOD', 'A', 'NICE', 'LITTLE', 'FIR', 'TREE'] +672-122797-0001-1530: ref=['THE', 'PLACE', 'HE', 'HAD', 'WAS', 'A', 'VERY', 'GOOD', 'ONE', 'THE', 'SUN', 'SHONE', 'ON', 'HIM', 'AS', 'TO', 'FRESH', 'AIR', 'THERE', 'WAS', 'ENOUGH', 'OF', 'THAT', 'AND', 'ROUND', 'HIM', 'GREW', 'MANY', 'LARGE', 'SIZED', 'COMRADES', 'PINES', 'AS', 'WELL', 'AS', 'FIRS'] +672-122797-0001-1530: hyp=['THE', 'PLACE', 'HE', 'HAD', 'WAS', 'A', 'VERY', 'GOOD', 'ONE', 'THE', 'SUN', 'SHONE', 'ON', 'HIM', 'AS', 'TO', 'FRESH', 'AIR', 'THERE', 'WAS', 'ENOUGH', 'OF', 'THAT', 'AND', 'ROUND', 'HIM', 'GREW', 'MANY', 'LARGE', 'SIZED', 'COMRADES', 'PINES', 'AS', 'WELL', 'AS', 'FURS'] +672-122797-0002-1531: ref=['HE', 'DID', 'NOT', 'THINK', 'OF', 'THE', 'WARM', 'SUN', 'AND', 'OF', 'THE', 'FRESH', 'AIR', 'HE', 'DID', 'NOT', 'CARE', 'FOR', 'THE', 'LITTLE', 'COTTAGE', 'CHILDREN', 'THAT', 'RAN', 'ABOUT', 'AND', 'PRATTLED', 'WHEN', 'THEY', 'WERE', 'IN', 'THE', 'WOODS', 'LOOKING', 'FOR', 'WILD', 'STRAWBERRIES'] +672-122797-0002-1531: hyp=['HE', 'DID', 'NOT', 'THINK', 'OF', 'THE', 'WARM', 'SUN', 'AND', 'OF', 'THE', 'FRESH', 'AIR', 'HE', 'DID', 'NOT', 'CARE', 'FOR', 'THE', 'LITTLE', 'COTTAGE', 'CHILDREN', 'THAT', 'RAN', 'ABOUT', 'IN', 'PRATTLED', 'WHEN', 'THEY', 'WERE', 'IN', 'THE', 'WOODS', 'LOOKING', 'FOR', 'WILD', 'STRAWBERRIES'] +672-122797-0003-1532: ref=['BUT', 'THIS', 'WAS', 'WHAT', 'THE', 'TREE', 'COULD', 'NOT', 'BEAR', 'TO', 'HEAR'] +672-122797-0003-1532: hyp=['BUT', 'THIS', 'WAS', 'WHAT', 'THE', 'TREE', 'COULD', 'NOT', 'BEAR', 'TO', 'HEAR'] +672-122797-0004-1533: ref=['IN', 'WINTER', 'WHEN', 'THE', 'SNOW', 'LAY', 'GLITTERING', 'ON', 'THE', 'GROUND', 'A', 'HARE', 'WOULD', 'OFTEN', 'COME', 'LEAPING', 'ALONG', 'AND', 'JUMP', 'RIGHT', 'OVER', 'THE', 'LITTLE', 'TREE'] +672-122797-0004-1533: hyp=['IN', 'WINTER', 'WHEN', 'THE', 'SNOW', 'LAY', 'GLITTERING', 'ON', 'THE', 'GROUND', 'A', 'HARE', 'WOULD', 'OFTEN', 'COME', 'LEAPING', 'ALONG', 'AND', 'JUMP', 'RIGHT', 'OVER', 'THE', 'LITTLE', 'TREE'] +672-122797-0005-1534: ref=['OH', 'THAT', 'MADE', 'HIM', 'SO', 'ANGRY'] +672-122797-0005-1534: hyp=['OH', 'THAT', 'MADE', 'HIM', 'SO', 'ANGRY'] +672-122797-0006-1535: ref=['TO', 'GROW', 'AND', 'GROW', 'TO', 'GET', 'OLDER', 'AND', 'BE', 'TALL', 'THOUGHT', 'THE', 'TREE', 'THAT', 'AFTER', 'ALL', 'IS', 'THE', 'MOST', 'DELIGHTFUL', 'THING', 'IN', 'THE', 'WORLD'] +672-122797-0006-1535: hyp=['TO', 'GROW', 'AND', 'GROW', 'TO', 'GET', 'OLDER', 'AND', 'BE', 'TALL', 'THOUGHT', 'THE', 'TREE', 'THAT', 'AFTER', 'ALL', 'IS', 'THE', 'MOST', 'DELIGHTFUL', 'THING', 'IN', 'THE', 'WORLD'] +672-122797-0007-1536: ref=['IN', 'AUTUMN', 'THE', 'WOOD', 'CUTTERS', 'ALWAYS', 'CAME', 'AND', 'FELLED', 'SOME', 'OF', 'THE', 'LARGEST', 'TREES'] +672-122797-0007-1536: hyp=['IN', 'AUTUMN', 'THE', 'WOODCUTTERS', 'ALWAYS', 'CAME', 'AND', 'FELLED', 'SOME', 'OF', 'THE', 'LARGEST', 'TREES'] +672-122797-0008-1537: ref=['THIS', 'HAPPENED', 'EVERY', 'YEAR', 'AND', 'THE', 'YOUNG', 'FIR', 'TREE', 'THAT', 'HAD', 'NOW', 'GROWN', 'TO', 'A', 'VERY', 'COMELY', 'SIZE', 'TREMBLED', 'AT', 'THE', 'SIGHT', 'FOR', 'THE', 'MAGNIFICENT', 'GREAT', 'TREES', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'NOISE', 'AND', 'CRACKING', 'THE', 'BRANCHES', 'WERE', 'LOPPED', 'OFF', 'AND', 'THE', 'TREES', 'LOOKED', 'LONG', 'AND', 'BARE', 'THEY', 'WERE', 'HARDLY', 'TO', 'BE', 'RECOGNISED', 'AND', 'THEN', 'THEY', 'WERE', 'LAID', 'IN', 'CARTS', 'AND', 'THE', 'HORSES', 'DRAGGED', 'THEM', 'OUT', 'OF', 'THE', 'WOOD'] +672-122797-0008-1537: hyp=['THIS', 'HAPPENED', 'EVERY', 'YEAR', 'AND', 'THE', 'YOUNG', 'FIR', 'TREE', 'THAT', 'HAD', 'NOW', 'GROWN', 'TO', 'A', 'VERY', 'COMELY', 'SIZED', 'TREMBLED', 'AT', 'THE', 'SIGHT', 'FOR', 'THE', 'MAGNIFICENT', 'GREAT', 'TREES', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'NOISE', 'AND', 'CRACKING', 'THE', 'BRANCHES', 'WERE', 'LOPPED', 'OFF', 'AND', 'THE', 'TREES', 'LOOKED', 'LONG', 'AND', 'BARE', 'THEY', 'WERE', 'HARDLY', 'TO', 'BE', 'RECOGNIZED', 'AND', 'THEN', 'THEY', 'WERE', 'LAID', 'IN', 'CARTS', 'AND', 'THE', 'HORSES', 'DRAGGED', 'THEM', 'OUT', 'OF', 'THE', 'WOOD'] +672-122797-0009-1538: ref=['HAVE', 'YOU', 'NOT', 'MET', 'THEM', 'ANYWHERE'] +672-122797-0009-1538: hyp=['HAVE', 'YOU', 'NOT', 'MET', 'THE', 'MANY', 'WHERE'] +672-122797-0010-1539: ref=['REJOICE', 'IN', 'THY', 'GROWTH', 'SAID', 'THE', 'SUNBEAMS'] +672-122797-0010-1539: hyp=['REJOICE', 'IN', 'THY', 'GROWTH', 'SAID', 'THE', 'SUNBEAMS'] +672-122797-0011-1540: ref=['AND', 'THEN', 'WHAT', 'HAPPENS', 'THEN'] +672-122797-0011-1540: hyp=['AND', 'THEN', 'WHAT', 'HAPPENS', 'THEN'] +672-122797-0012-1541: ref=['I', 'WOULD', 'FAIN', 'KNOW', 'IF', 'I', 'AM', 'DESTINED', 'FOR', 'SO', 'GLORIOUS', 'A', 'CAREER', 'CRIED', 'THE', 'TREE', 'REJOICING'] +672-122797-0012-1541: hyp=['I', 'WOULD', 'FAIN', 'KNOW', 'IF', 'I', 'AM', 'DESTINED', 'FOR', 'SO', 'GLORIOUS', 'A', 'CAREER', 'CRIED', 'THE', 'TREE', 'REJOICING'] +672-122797-0013-1542: ref=['I', 'AM', 'NOW', 'TALL', 'AND', 'MY', 'BRANCHES', 'SPREAD', 'LIKE', 'THE', 'OTHERS', 'THAT', 'WERE', 'CARRIED', 'OFF', 'LAST', 'YEAR', 'OH'] +672-122797-0013-1542: hyp=['I', 'AM', 'NOW', 'TALL', 'AND', 'MY', 'BRANCHES', 'SPREAD', 'LIKE', 'THE', 'OTHERS', 'THAT', 'WERE', 'CARRIED', 'OFF', 'LAST', 'YEAR', 'OH'] +672-122797-0014-1543: ref=['WERE', 'I', 'BUT', 'ALREADY', 'ON', 'THE', 'CART'] +672-122797-0014-1543: hyp=['WERE', 'I', 'BUT', 'ALREADY', 'ON', 'THE', 'CART'] +672-122797-0015-1544: ref=['WERE', 'I', 'IN', 'THE', 'WARM', 'ROOM', 'WITH', 'ALL', 'THE', 'SPLENDOR', 'AND', 'MAGNIFICENCE'] +672-122797-0015-1544: hyp=['WHERE', 'I', 'IN', 'THE', 'WARM', 'ROOM', 'WITH', 'ALL', 'THE', 'SPLENDOUR', 'AND', 'MAGNIFICENCE'] +672-122797-0016-1545: ref=['YES', 'THEN', 'SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'WILL', 'SURELY', 'FOLLOW', 'OR', 'WHEREFORE', 'SHOULD', 'THEY', 'THUS', 'ORNAMENT', 'ME'] +672-122797-0016-1545: hyp=['YES', 'THEN', 'SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'WILL', 'SURELY', 'FOLLOW', 'OR', 'WHEREFORE', 'SHOULD', 'THEY', 'THUS', 'ORNAMENT', 'ME'] +672-122797-0017-1546: ref=['SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'MUST', 'FOLLOW', 'BUT', 'WHAT'] +672-122797-0017-1546: hyp=['SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'MUST', 'FOLLOW', 'BUT', 'WHAT'] +672-122797-0018-1547: ref=['REJOICE', 'IN', 'OUR', 'PRESENCE', 'SAID', 'THE', 'AIR', 'AND', 'THE', 'SUNLIGHT'] +672-122797-0018-1547: hyp=['REJOICE', 'IN', 'OUR', 'PRESENCE', 'SAID', 'THE', 'HEIR', 'AND', 'THE', 'SUNLIGHT'] +672-122797-0019-1548: ref=['REJOICE', 'IN', 'THY', 'OWN', 'FRESH', 'YOUTH'] +672-122797-0019-1548: hyp=['REJOICE', 'IN', 'THY', 'OWN', 'FRESH', 'YOUTH'] +672-122797-0020-1549: ref=['BUT', 'THE', 'TREE', 'DID', 'NOT', 'REJOICE', 'AT', 'ALL', 'HE', 'GREW', 'AND', 'GREW', 'AND', 'WAS', 'GREEN', 'BOTH', 'WINTER', 'AND', 'SUMMER'] +672-122797-0020-1549: hyp=['BUT', 'THE', 'TREE', 'DID', 'NOT', 'REJOICE', 'AT', 'ALL', 'HE', 'GREW', 'AND', 'GREW', 'AND', 'WAS', 'GREEN', 'BOTH', 'WINTER', 'AND', 'SUMMER'] +672-122797-0021-1550: ref=['AND', 'TOWARDS', 'CHRISTMAS', 'HE', 'WAS', 'ONE', 'OF', 'THE', 'FIRST', 'THAT', 'WAS', 'CUT', 'DOWN'] +672-122797-0021-1550: hyp=['AND', 'TOWARDS', 'CHRISTMAS', 'HE', 'WAS', 'ONE', 'OF', 'THE', 'FIRST', 'THAT', 'WAS', 'CUT', 'DOWN'] +672-122797-0022-1551: ref=['THE', 'AXE', 'STRUCK', 'DEEP', 'INTO', 'THE', 'VERY', 'PITH', 'THE', 'TREE', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'A', 'SIGH', 'HE', 'FELT', 'A', 'PANG', 'IT', 'WAS', 'LIKE', 'A', 'SWOON', 'HE', 'COULD', 'NOT', 'THINK', 'OF', 'HAPPINESS', 'FOR', 'HE', 'WAS', 'SORROWFUL', 'AT', 'BEING', 'SEPARATED', 'FROM', 'HIS', 'HOME', 'FROM', 'THE', 'PLACE', 'WHERE', 'HE', 'HAD', 'SPRUNG', 'UP'] +672-122797-0022-1551: hyp=['THE', 'AXE', 'STRUCK', 'DEEP', 'INTO', 'THE', 'VERY', 'PITH', 'THE', 'TREE', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'A', 'SIGH', 'HE', 'FELT', 'A', 'PANG', 'IT', 'WAS', 'LIKE', 'A', 'SWOON', 'HE', 'COULD', 'NOT', 'THINK', 'OF', 'HAPPINESS', 'FOR', 'HE', 'WAS', 'SORROWFUL', 'AT', 'BEING', 'SEPARATED', 'FROM', 'HIS', 'HOME', 'FROM', 'THE', 'PLACE', 'WHERE', 'HE', 'HAD', 'SPRUNG', 'UP'] +672-122797-0023-1552: ref=['HE', 'WELL', 'KNEW', 'THAT', 'HE', 'SHOULD', 'NEVER', 'SEE', 'HIS', 'DEAR', 'OLD', 'COMRADES', 'THE', 'LITTLE', 'BUSHES', 'AND', 'FLOWERS', 'AROUND', 'HIM', 'ANYMORE', 'PERHAPS', 'NOT', 'EVEN', 'THE', 'BIRDS'] +672-122797-0023-1552: hyp=['HE', 'WELL', 'KNEW', 'THAT', 'HE', 'SHOULD', 'NEVER', 'SEE', 'HIS', 'DEAR', 'OLD', 'COMRADES', 'THE', 'LITTLE', 'BUSHES', 'AND', 'FLOWERS', 'AROUND', 'HIM', 'ANY', 'MORE', 'PERHAPS', 'NOT', 'EVEN', 'THE', 'BIRDS'] +672-122797-0024-1553: ref=['THE', 'DEPARTURE', 'WAS', 'NOT', 'AT', 'ALL', 'AGREEABLE'] +672-122797-0024-1553: hyp=['THE', 'DEPARTURE', 'WAS', 'NOT', 'AT', 'ALL', 'AGREEABLE'] +672-122797-0025-1554: ref=['THE', 'TREE', 'ONLY', 'CAME', 'TO', 'HIMSELF', 'WHEN', 'HE', 'WAS', 'UNLOADED', 'IN', 'A', 'COURT', 'YARD', 'WITH', 'THE', 'OTHER', 'TREES', 'AND', 'HEARD', 'A', 'MAN', 'SAY', 'THAT', 'ONE', 'IS', 'SPLENDID', 'WE', "DON'T", 'WANT', 'THE', 'OTHERS'] +672-122797-0025-1554: hyp=['THE', 'TREE', 'ONLY', 'CAME', 'TO', 'HIMSELF', 'WHEN', 'HE', 'WAS', 'UNLOADED', 'IN', 'A', 'COURTYARD', 'WITH', 'THE', 'OTHER', 'TREES', 'AND', 'HEARD', 'A', 'MAN', 'SAY', 'THAT', 'ONE', 'IS', 'SPLENDID', 'WE', "DON'T", 'WANT', 'THE', 'OTHERS'] +672-122797-0026-1555: ref=['THERE', 'TOO', 'WERE', 'LARGE', 'EASY', 'CHAIRS', 'SILKEN', 'SOFAS', 'LARGE', 'TABLES', 'FULL', 'OF', 'PICTURE', 'BOOKS', 'AND', 'FULL', 'OF', 'TOYS', 'WORTH', 'HUNDREDS', 'AND', 'HUNDREDS', 'OF', 'CROWNS', 'AT', 'LEAST', 'THE', 'CHILDREN', 'SAID', 'SO'] +672-122797-0026-1555: hyp=['THERE', 'TOO', 'WERE', 'LARGE', 'EASY', 'CHAIRS', 'SILKEN', 'SOFAS', 'LARGE', 'TABLES', 'FULL', 'OF', 'PICTURE', 'BOOKS', 'AND', 'FULL', 'OF', 'TOYS', 'WORTH', 'HUNDREDS', 'AND', 'HUNDREDS', 'OF', 'CROWNS', 'AT', 'LEAST', 'THE', 'CHILDREN', 'SAID', 'SO'] +672-122797-0027-1556: ref=['THE', 'SERVANTS', 'AS', 'WELL', 'AS', 'THE', 'YOUNG', 'LADIES', 'DECORATED', 'IT'] +672-122797-0027-1556: hyp=['THE', 'SERVANTS', 'AS', 'WELL', 'AS', 'THE', 'YOUNG', 'LADIES', 'DECORATED', 'IT'] +672-122797-0028-1557: ref=['THIS', 'EVENING', 'THEY', 'ALL', 'SAID'] +672-122797-0028-1557: hyp=['THIS', 'EVENING', 'THEY', 'ALL', 'SAID'] +672-122797-0029-1558: ref=['HOW', 'IT', 'WILL', 'SHINE', 'THIS', 'EVENING'] +672-122797-0029-1558: hyp=['HOW', 'IT', 'WILL', 'SHINE', 'THIS', 'EVENING'] +672-122797-0030-1559: ref=['PERHAPS', 'THE', 'OTHER', 'TREES', 'FROM', 'THE', 'FOREST', 'WILL', 'COME', 'TO', 'LOOK', 'AT', 'ME'] +672-122797-0030-1559: hyp=['PERHAPS', 'THE', 'OTHER', 'TREES', 'FROM', 'THE', 'FOREST', 'WILL', 'COME', 'TO', 'LOOK', 'AT', 'ME'] +672-122797-0031-1560: ref=['IT', 'BLAZED', 'UP', 'FAMOUSLY', 'HELP', 'HELP'] +672-122797-0031-1560: hyp=['IT', 'BLAZED', 'UP', 'FAMOUSLY', 'HELP', 'HELP'] +672-122797-0032-1561: ref=['CRIED', 'THE', 'YOUNG', 'LADIES', 'AND', 'THEY', 'QUICKLY', 'PUT', 'OUT', 'THE', 'FIRE'] +672-122797-0032-1561: hyp=['CRIED', 'THE', 'YOUNG', 'LADIES', 'AND', 'THEY', 'QUICKLY', 'PUT', 'OUT', 'THE', 'FIRE'] +672-122797-0033-1562: ref=['A', 'STORY'] +672-122797-0033-1562: hyp=['A', 'STORY'] +672-122797-0034-1563: ref=['A', 'STORY', 'CRIED', 'THE', 'CHILDREN', 'DRAWING', 'A', 'LITTLE', 'FAT', 'MAN', 'TOWARDS', 'THE', 'TREE'] +672-122797-0034-1563: hyp=['A', 'STORY', 'CRIED', 'THE', 'CHILDREN', 'DRAWING', 'A', 'LITTLE', 'FAT', 'MAN', 'TOWARDS', 'THE', 'TREE'] +672-122797-0035-1564: ref=['BUT', 'I', 'SHALL', 'TELL', 'ONLY', 'ONE', 'STORY'] +672-122797-0035-1564: hyp=['BUT', 'I', 'SHALL', 'TELL', 'ONLY', 'ONE', 'STORY'] +672-122797-0036-1565: ref=['HUMPY', 'DUMPY', 'FELL', 'DOWNSTAIRS', 'AND', 'YET', 'HE', 'MARRIED', 'THE', 'PRINCESS'] +672-122797-0036-1565: hyp=['HUMPY', "DON'T", 'BE', 'FELL', 'DOWNSTAIRS', 'AND', 'YET', 'HE', 'MARRIED', 'THE', 'PRINCESS'] +672-122797-0037-1566: ref=["THAT'S", 'THE', 'WAY', 'OF', 'THE', 'WORLD'] +672-122797-0037-1566: hyp=["THAT'S", 'THE', 'WAY', 'OF', 'THE', 'WORLD'] +672-122797-0038-1567: ref=['THOUGHT', 'THE', 'FIR', 'TREE', 'AND', 'BELIEVED', 'IT', 'ALL', 'BECAUSE', 'THE', 'MAN', 'WHO', 'TOLD', 'THE', 'STORY', 'WAS', 'SO', 'GOOD', 'LOOKING', 'WELL', 'WELL'] +672-122797-0038-1567: hyp=['THOUGHT', 'THE', 'FIR', 'TREE', 'AND', 'BELIEVED', 'IT', 'ALL', 'BECAUSE', 'THE', 'MAN', 'WHO', 'TOLD', 'THE', 'STORY', 'WAS', 'SO', 'GOOD', 'LOOKING', 'WELL', 'WELL'] +672-122797-0039-1568: ref=['I', "WON'T", 'TREMBLE', 'TO', 'MORROW', 'THOUGHT', 'THE', 'FIR', 'TREE'] +672-122797-0039-1568: hyp=['I', "WON'T", 'TREMBLE', 'TO', 'MORROW', 'THOUGHT', 'THE', 'FIR', 'TREE'] +672-122797-0040-1569: ref=['AND', 'THE', 'WHOLE', 'NIGHT', 'THE', 'TREE', 'STOOD', 'STILL', 'AND', 'IN', 'DEEP', 'THOUGHT'] +672-122797-0040-1569: hyp=['AND', 'THE', 'WHOLE', 'NIGHT', 'THE', 'TREE', 'STOOD', 'STILL', 'AND', 'IN', 'DEEP', 'THOUGHT'] +672-122797-0041-1570: ref=['IN', 'THE', 'MORNING', 'THE', 'SERVANT', 'AND', 'THE', 'HOUSEMAID', 'CAME', 'IN'] +672-122797-0041-1570: hyp=['IN', 'THE', 'MORNING', 'THE', 'SERVANT', 'AND', 'THE', 'HOUSEMAID', 'CAME', 'IN'] +672-122797-0042-1571: ref=['BUT', 'THEY', 'DRAGGED', 'HIM', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'UP', 'THE', 'STAIRS', 'INTO', 'THE', 'LOFT', 'AND', 'HERE', 'IN', 'A', 'DARK', 'CORNER', 'WHERE', 'NO', 'DAYLIGHT', 'COULD', 'ENTER', 'THEY', 'LEFT', 'HIM'] +672-122797-0042-1571: hyp=['BUT', 'THEY', 'DRAGGED', 'HIM', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'UP', 'THE', 'STAIRS', 'INTO', 'THE', 'LOFT', 'AND', 'HERE', 'IN', 'A', 'DARK', 'CORNER', 'WHERE', 'NO', 'DAYLIGHT', 'COULD', 'ENTER', 'THEY', 'LEFT', 'HIM'] +672-122797-0043-1572: ref=["WHAT'S", 'THE', 'MEANING', 'OF', 'THIS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0043-1572: hyp=["WHAT'S", 'THE', 'MEANING', 'OF', 'THIS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0044-1573: ref=['AND', 'HE', 'LEANED', 'AGAINST', 'THE', 'WALL', 'LOST', 'IN', 'REVERIE'] +672-122797-0044-1573: hyp=['AND', 'HE', 'LEANED', 'AGAINST', 'THE', 'WALL', 'LOST', 'IN', 'REVERIE'] +672-122797-0045-1574: ref=['TIME', 'ENOUGH', 'HAD', 'HE', 'TOO', 'FOR', 'HIS', 'REFLECTIONS', 'FOR', 'DAYS', 'AND', 'NIGHTS', 'PASSED', 'ON', 'AND', 'NOBODY', 'CAME', 'UP', 'AND', 'WHEN', 'AT', 'LAST', 'SOMEBODY', 'DID', 'COME', 'IT', 'WAS', 'ONLY', 'TO', 'PUT', 'SOME', 'GREAT', 'TRUNKS', 'IN', 'A', 'CORNER', 'OUT', 'OF', 'THE', 'WAY'] +672-122797-0045-1574: hyp=['TIME', 'ENOUGH', 'HAD', 'HE', 'TOO', 'FOR', 'HIS', 'REFLECTIONS', 'FOR', 'DAYS', 'AND', 'NIGHTS', 'PASSED', 'ON', 'AND', 'NOBODY', 'CAME', 'UP', 'AND', 'WHEN', 'AT', 'LAST', 'SOMEBODY', 'DID', 'COME', 'IT', 'WAS', 'ONLY', 'TO', 'PUT', 'SOME', 'GREAT', 'TRUNKS', 'IN', 'A', 'CORNER', 'OUT', 'OF', 'THE', 'WAY'] +672-122797-0046-1575: ref=['TIS', 'NOW', 'WINTER', 'OUT', 'OF', 'DOORS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0046-1575: hyp=['TIS', 'NOW', 'WINTER', 'OUT', 'OF', 'DOORS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0047-1576: ref=['HOW', 'KIND', 'MAN', 'IS', 'AFTER', 'ALL'] +672-122797-0047-1576: hyp=['HOW', 'KIND', 'MAN', 'IS', 'AFTER', 'ALL'] +672-122797-0048-1577: ref=['IF', 'IT', 'ONLY', 'WERE', 'NOT', 'SO', 'DARK', 'HERE', 'AND', 'SO', 'TERRIBLY', 'LONELY'] +672-122797-0048-1577: hyp=['IF', 'IT', 'ONLY', 'WERE', 'NOT', 'SO', 'DARK', 'HERE', 'AND', 'SO', 'TERRIBLY', 'LONELY'] +672-122797-0049-1578: ref=['SQUEAK', 'SQUEAK'] +672-122797-0049-1578: hyp=['SQUEAK', 'SQUI'] +672-122797-0050-1579: ref=['THEY', 'SNUFFED', 'ABOUT', 'THE', 'FIR', 'TREE', 'AND', 'RUSTLED', 'AMONG', 'THE', 'BRANCHES'] +672-122797-0050-1579: hyp=['THEY', 'SNUFFED', 'ABOUT', 'THE', 'FIR', 'TREE', 'AND', 'RUSTLED', 'AMONG', 'THE', 'BRANCHES'] +672-122797-0051-1580: ref=['I', 'AM', 'BY', 'NO', 'MEANS', 'OLD', 'SAID', 'THE', 'FIR', 'TREE'] +672-122797-0051-1580: hyp=['I', 'AM', 'BY', 'NO', 'MEANS', 'OLD', 'SAID', 'THE', 'FIR', 'TREE'] +672-122797-0052-1581: ref=["THERE'S", 'MANY', 'A', 'ONE', 'CONSIDERABLY', 'OLDER', 'THAN', 'I', 'AM'] +672-122797-0052-1581: hyp=["THERE'S", 'MANY', 'A', 'ONE', 'CONSIDERABLY', 'OLDER', 'THAN', 'I', 'AM'] +672-122797-0053-1582: ref=['THEY', 'WERE', 'SO', 'EXTREMELY', 'CURIOUS'] +672-122797-0053-1582: hyp=['THEY', 'WERE', 'SO', 'EXTREMELY', 'CURIOUS'] +672-122797-0054-1583: ref=['I', 'KNOW', 'NO', 'SUCH', 'PLACE', 'SAID', 'THE', 'TREE'] +672-122797-0054-1583: hyp=['I', 'KNOW', 'NO', 'SUCH', 'PLACE', 'SAID', 'THE', 'TREE'] +672-122797-0055-1584: ref=['AND', 'THEN', 'HE', 'TOLD', 'ALL', 'ABOUT', 'HIS', 'YOUTH', 'AND', 'THE', 'LITTLE', 'MICE', 'HAD', 'NEVER', 'HEARD', 'THE', 'LIKE', 'BEFORE', 'AND', 'THEY', 'LISTENED', 'AND', 'SAID'] +672-122797-0055-1584: hyp=['AND', 'THEN', 'HE', 'TOLD', 'ALL', 'ABOUT', 'HIS', 'YOUTH', 'AND', 'THE', 'LITTLE', 'MICE', 'HAD', 'NEVER', 'HEARD', 'THE', 'LIKE', 'BEFORE', 'AND', 'THEY', 'LISTENED', 'AND', 'SAID'] +672-122797-0056-1585: ref=['SAID', 'THE', 'FIR', 'TREE', 'THINKING', 'OVER', 'WHAT', 'HE', 'HAD', 'HIMSELF', 'RELATED'] +672-122797-0056-1585: hyp=['SAID', 'THE', 'FIR', 'TREE', 'THINKING', 'OVER', 'WHAT', 'HE', 'HAD', 'HIMSELF', 'RELATED'] +672-122797-0057-1586: ref=['YES', 'IN', 'REALITY', 'THOSE', 'WERE', 'HAPPY', 'TIMES'] +672-122797-0057-1586: hyp=['YES', 'IN', 'REALITY', 'THOSE', 'WERE', 'HAPPY', 'TIMES'] +672-122797-0058-1587: ref=['WHO', 'IS', 'HUMPY', 'DUMPY', 'ASKED', 'THE', 'MICE'] +672-122797-0058-1587: hyp=['WHO', "IT'S", 'HUMPY', 'DUMPY', 'ASKED', 'THE', 'MICE'] +672-122797-0059-1588: ref=['ONLY', 'THAT', 'ONE', 'ANSWERED', 'THE', 'TREE'] +672-122797-0059-1588: hyp=['ONLY', 'THAT', 'ONE', 'ANSWERED', 'THE', 'TREE'] +672-122797-0060-1589: ref=['IT', 'IS', 'A', 'VERY', 'STUPID', 'STORY'] +672-122797-0060-1589: hyp=['IT', 'IS', 'A', 'VERY', 'STUPID', 'STORY'] +672-122797-0061-1590: ref=["DON'T", 'YOU', 'KNOW', 'ONE', 'ABOUT', 'BACON', 'AND', 'TALLOW', 'CANDLES', "CAN'T", 'YOU', 'TELL', 'ANY', 'LARDER', 'STORIES'] +672-122797-0061-1590: hyp=["DON'T", 'YOU', 'KNOW', 'ONE', 'ABOUT', 'BACON', 'AND', 'TALLOW', 'CANDLES', "CAN'T", 'YOU', 'TELL', 'ANY', 'LARDER', 'STORIES'] +672-122797-0062-1591: ref=['NO', 'SAID', 'THE', 'TREE'] +672-122797-0062-1591: hyp=['NO', 'SAID', 'THE', 'TREE'] +672-122797-0063-1592: ref=['THEN', 'GOOD', 'BYE', 'SAID', 'THE', 'RATS', 'AND', 'THEY', 'WENT', 'HOME'] +672-122797-0063-1592: hyp=['THEN', 'GOOD', 'BY', 'SAID', 'THE', 'RATS', 'AND', 'THEY', 'WENT', 'HOME'] +672-122797-0064-1593: ref=['AT', 'LAST', 'THE', 'LITTLE', 'MICE', 'STAYED', 'AWAY', 'ALSO', 'AND', 'THE', 'TREE', 'SIGHED', 'AFTER', 'ALL', 'IT', 'WAS', 'VERY', 'PLEASANT', 'WHEN', 'THE', 'SLEEK', 'LITTLE', 'MICE', 'SAT', 'ROUND', 'ME', 'AND', 'LISTENED', 'TO', 'WHAT', 'I', 'TOLD', 'THEM'] +672-122797-0064-1593: hyp=['AT', 'LAST', 'THE', 'LITTLE', 'MICE', 'STAYED', 'AWAY', 'ALSO', 'AND', 'THE', 'TREE', 'SIGHED', 'AFTER', 'ALL', 'IT', 'WAS', 'VERY', 'PLEASANT', 'WHEN', 'THE', 'SLEEK', 'LITTLE', 'MICE', 'SAT', 'ROUND', 'ME', 'AND', 'LISTENED', 'TO', 'WHAT', 'I', 'TOLD', 'THEM'] +672-122797-0065-1594: ref=['NOW', 'THAT', 'TOO', 'IS', 'OVER'] +672-122797-0065-1594: hyp=['NOW', 'THAT', 'TOO', 'IS', 'OVER'] +672-122797-0066-1595: ref=['WHY', 'ONE', 'MORNING', 'THERE', 'CAME', 'A', 'QUANTITY', 'OF', 'PEOPLE', 'AND', 'SET', 'TO', 'WORK', 'IN', 'THE', 'LOFT'] +672-122797-0066-1595: hyp=['WHY', 'ONE', 'MORNING', 'THERE', 'CAME', 'A', 'QUANTITY', 'OF', 'PEOPLE', 'AND', 'SET', 'TO', 'WORK', 'IN', 'THE', 'LOFT'] +672-122797-0067-1596: ref=['THE', 'TRUNKS', 'WERE', 'MOVED', 'THE', 'TREE', 'WAS', 'PULLED', 'OUT', 'AND', 'THROWN', 'RATHER', 'HARD', 'IT', 'IS', 'TRUE', 'DOWN', 'ON', 'THE', 'FLOOR', 'BUT', 'A', 'MAN', 'DREW', 'HIM', 'TOWARDS', 'THE', 'STAIRS', 'WHERE', 'THE', 'DAYLIGHT', 'SHONE'] +672-122797-0067-1596: hyp=['THE', 'TRUNKS', 'WERE', 'MOVED', 'THE', 'TREE', 'WAS', 'PULLED', 'OUT', 'AND', 'THROWN', 'RATHER', 'HARD', 'IT', 'IS', 'TRUE', 'DOWN', 'ON', 'THE', 'FLOOR', 'BUT', 'A', 'MAN', 'DREW', 'HIM', 'TOWARDS', 'THE', 'STAIRS', 'WHERE', 'THE', 'DAYLIGHT', 'SHONE'] +672-122797-0068-1597: ref=['BUT', 'IT', 'WAS', 'NOT', 'THE', 'FIR', 'TREE', 'THAT', 'THEY', 'MEANT'] +672-122797-0068-1597: hyp=['BUT', 'IT', 'WAS', 'NOT', 'THE', 'FIR', 'TREE', 'THAT', 'THEY', 'MEANT'] +672-122797-0069-1598: ref=['IT', 'WAS', 'IN', 'A', 'CORNER', 'THAT', 'HE', 'LAY', 'AMONG', 'WEEDS', 'AND', 'NETTLES'] +672-122797-0069-1598: hyp=['IT', 'WAS', 'IN', 'A', 'CORNER', 'THAT', 'HE', 'LAY', 'AMONG', 'WEEDS', 'AND', 'NETTLES'] +672-122797-0070-1599: ref=['THE', 'GOLDEN', 'STAR', 'OF', 'TINSEL', 'WAS', 'STILL', 'ON', 'THE', 'TOP', 'OF', 'THE', 'TREE', 'AND', 'GLITTERED', 'IN', 'THE', 'SUNSHINE'] +672-122797-0070-1599: hyp=['THE', 'GOLDEN', 'STAR', 'OF', 'TINSEL', 'WAS', 'STILL', 'ON', 'THE', 'TOP', 'OF', 'THE', 'TREE', 'AND', 'GLITTERED', 'IN', 'THE', 'SUNSHINE'] +672-122797-0071-1600: ref=['IN', 'THE', 'COURT', 'YARD', 'SOME', 'OF', 'THE', 'MERRY', 'CHILDREN', 'WERE', 'PLAYING', 'WHO', 'HAD', 'DANCED', 'AT', 'CHRISTMAS', 'ROUND', 'THE', 'FIR', 'TREE', 'AND', 'WERE', 'SO', 'GLAD', 'AT', 'THE', 'SIGHT', 'OF', 'HIM'] +672-122797-0071-1600: hyp=['IN', 'THE', 'COURTYARD', 'SOME', 'OF', 'THE', 'MARRIED', 'CHILDREN', 'WERE', 'PLAYING', 'WHO', 'HAD', 'DANCED', 'AT', 'CHRISTMAS', 'ROUND', 'THE', 'FIR', 'TREE', 'AND', 'WERE', 'SO', 'GLAD', 'AT', 'THE', 'SIGHT', 'OF', 'HIM'] +672-122797-0072-1601: ref=['AND', 'THE', "GARDENER'S", 'BOY', 'CHOPPED', 'THE', 'TREE', 'INTO', 'SMALL', 'PIECES', 'THERE', 'WAS', 'A', 'WHOLE', 'HEAP', 'LYING', 'THERE'] +672-122797-0072-1601: hyp=['AND', 'THE', "GARDENER'S", 'BOY', 'CHOPPED', 'THE', 'TREE', 'INTO', 'SMALL', 'PIECES', 'THERE', 'WAS', 'A', 'WHOLE', 'HEAP', 'LYING', 'THERE'] +672-122797-0073-1602: ref=['THE', 'WOOD', 'FLAMED', 'UP', 'SPLENDIDLY', 'UNDER', 'THE', 'LARGE', 'BREWING', 'COPPER', 'AND', 'IT', 'SIGHED', 'SO', 'DEEPLY'] +672-122797-0073-1602: hyp=['THE', 'WOOD', 'FLAMED', 'UP', 'SPLENDIDLY', 'UNDER', 'THE', 'LARGE', 'BREWING', 'COPPER', 'AND', 'ITS', 'SIDE', 'SO', 'DEEPLY'] +672-122797-0074-1603: ref=['HOWEVER', 'THAT', 'WAS', 'OVER', 'NOW', 'THE', 'TREE', 'GONE', 'THE', 'STORY', 'AT', 'AN', 'END'] +672-122797-0074-1603: hyp=['HOWEVER', 'THAT', 'WAS', 'OVER', 'NOW', 'THE', 'TREE', 'GONE', 'THE', 'STORY', 'AT', 'AN', 'END'] +6829-68769-0000-1858: ref=['KENNETH', 'AND', 'BETH', 'REFRAINED', 'FROM', 'TELLING', 'THE', 'OTHER', 'GIRLS', 'OR', 'UNCLE', 'JOHN', 'OF', 'OLD', 'WILL', "ROGERS'S", 'VISIT', 'BUT', 'THEY', 'GOT', 'MISTER', 'WATSON', 'IN', 'THE', 'LIBRARY', 'AND', 'QUESTIONED', 'HIM', 'CLOSELY', 'ABOUT', 'THE', 'PENALTY', 'FOR', 'FORGING', 'A', 'CHECK'] +6829-68769-0000-1858: hyp=['KENNETH', 'AND', 'BETH', 'REFRAINED', 'FROM', 'TELLING', 'THE', 'OTHER', 'GIRLS', 'OR', 'UNCLE', 'JOHN', 'OF', 'OLD', 'WILL', "ROGERS'S", 'VISIT', 'BUT', 'THEY', 'GOT', 'MISTER', 'WATSON', 'IN', 'THE', 'LIBRARY', 'AND', 'QUESTIONED', 'HIM', 'CLOSELY', 'ABOUT', 'THE', 'PENALTY', 'FOR', 'FORGING', 'A', 'CHEQUE'] +6829-68769-0001-1859: ref=['IT', 'WAS', 'A', 'SERIOUS', 'CRIME', 'INDEED', 'MISTER', 'WATSON', 'TOLD', 'THEM', 'AND', 'TOM', 'GATES', 'BADE', 'FAIR', 'TO', 'SERVE', 'A', 'LENGTHY', 'TERM', 'IN', "STATE'S", 'PRISON', 'AS', 'A', 'CONSEQUENCE', 'OF', 'HIS', 'RASH', 'ACT'] +6829-68769-0001-1859: hyp=['IT', 'WAS', 'A', 'SERIOUS', 'CRIME', 'INDEED', 'MISTER', 'WATSON', 'TOLD', 'THEM', 'AND', 'TOM', 'GATES', 'BADE', 'FAIR', 'TO', 'SERVE', 'A', 'LENGTHY', 'TERM', 'IN', 'THE', "STATE'S", 'PRISON', 'AS', 'A', 'CONSEQUENCE', 'OF', 'HIS', 'RASH', 'ACT'] +6829-68769-0002-1860: ref=['I', "CAN'T", 'SEE', 'IT', 'IN', 'THAT', 'LIGHT', 'SAID', 'THE', 'OLD', 'LAWYER'] +6829-68769-0002-1860: hyp=['I', "CAN'T", 'SEE', 'IT', 'IN', 'THAT', 'LIGHT', 'SAID', 'THE', 'OLD', 'LAWYER'] +6829-68769-0003-1861: ref=['IT', 'WAS', 'A', 'DELIBERATE', 'THEFT', 'FROM', 'HIS', 'EMPLOYERS', 'TO', 'PROTECT', 'A', 'GIRL', 'HE', 'LOVED'] +6829-68769-0003-1861: hyp=['IT', 'WAS', 'A', 'DELIBERATE', 'THEFT', 'FROM', 'HIS', 'EMPLOYERS', 'TO', 'PROTECT', 'A', 'GIRL', 'HE', 'LOVED'] +6829-68769-0004-1862: ref=['BUT', 'THEY', 'COULD', 'NOT', 'HAVE', 'PROVEN', 'A', 'CASE', 'AGAINST', 'LUCY', 'IF', 'SHE', 'WAS', 'INNOCENT', 'AND', 'ALL', 'THEIR', 'THREATS', 'OF', 'ARRESTING', 'HER', 'WERE', 'PROBABLY', 'MERE', 'BLUFF'] +6829-68769-0004-1862: hyp=['BUT', 'THEY', 'COULD', 'NOT', 'HAVE', 'PROVEN', 'A', 'GASE', 'AGAINST', 'LUCY', 'IF', 'SHE', 'WAS', 'INNOCENT', 'AND', 'ALL', 'THEIR', 'THREATS', 'OF', 'ARRESTING', 'HER', 'WERE', 'PROBABLY', 'A', 'MERE', 'BLUFF'] +6829-68769-0005-1863: ref=['HE', 'WAS', 'SOFT', 'HEARTED', 'AND', 'IMPETUOUS', 'SAID', 'BETH', 'AND', 'BEING', 'IN', 'LOVE', 'HE', "DIDN'T", 'STOP', 'TO', 'COUNT', 'THE', 'COST'] +6829-68769-0005-1863: hyp=['HE', 'WAS', 'A', 'SOFT', 'HEARTED', 'AND', 'IMPETUOUS', 'SAID', 'BETH', 'AND', 'BEING', 'IN', 'LOVE', 'HE', "DIDN'T", 'STOP', 'TO', 'COUNT', 'THE', 'COST'] +6829-68769-0006-1864: ref=['IF', 'THE', 'PROSECUTION', 'WERE', 'WITHDRAWN', 'AND', 'THE', 'CASE', 'SETTLED', 'WITH', 'THE', 'VICTIM', 'OF', 'THE', 'FORGED', 'CHECK', 'THEN', 'THE', 'YOUNG', 'MAN', 'WOULD', 'BE', 'ALLOWED', 'HIS', 'FREEDOM'] +6829-68769-0006-1864: hyp=['IF', 'THE', 'PROSECUTION', 'WERE', 'WITHDRAWN', 'AND', 'THE', 'CASE', 'SETTLED', 'WITH', 'THE', 'VICTIM', 'OF', 'THE', 'FORGED', 'CHECK', 'THEN', 'THE', 'YOUNG', 'MAN', 'WOULD', 'BE', 'ALLOWED', 'HIS', 'FREEDOM'] +6829-68769-0007-1865: ref=['BUT', 'UNDER', 'THE', 'CIRCUMSTANCES', 'I', 'DOUBT', 'IF', 'SUCH', 'AN', 'ARRANGEMENT', 'COULD', 'BE', 'MADE'] +6829-68769-0007-1865: hyp=['BUT', 'UNDER', 'THE', 'CIRCUMSTANCES', 'I', 'DOUBT', 'OF', 'SUCH', 'AN', 'ARRANGEMENT', 'COULD', 'BE', 'MADE'] +6829-68769-0008-1866: ref=['FAIRVIEW', 'WAS', 'TWELVE', 'MILES', 'AWAY', 'BUT', 'BY', 'TEN', "O'CLOCK", 'THEY', 'DREW', 'UP', 'AT', 'THE', 'COUNTY', 'JAIL'] +6829-68769-0008-1866: hyp=['FAIR', "VIEW'S", 'TWELVE', 'MILES', 'AWAY', 'BUT', 'BY', 'TEN', "O'CLOCK", 'THEY', 'DREW', 'UP', 'AT', 'THE', 'COUNTY', 'DRALE'] +6829-68769-0009-1867: ref=['THEY', 'WERE', 'RECEIVED', 'IN', 'THE', 'LITTLE', 'OFFICE', 'BY', 'A', 'MAN', 'NAMED', 'MARKHAM', 'WHO', 'WAS', 'THE', 'JAILER'] +6829-68769-0009-1867: hyp=['THEY', 'WERE', 'RECEIVED', 'IN', 'THE', 'LITTLE', 'OFFICE', 'BY', 'A', 'MAN', 'NAMED', 'MARKHAM', 'WHO', 'WAS', 'THE', 'JAILER'] +6829-68769-0010-1868: ref=['WE', 'WISH', 'TO', 'TALK', 'WITH', 'HIM', 'ANSWERED', 'KENNETH', 'TALK'] +6829-68769-0010-1868: hyp=['WE', 'WISH', 'TO', 'TALK', 'WITH', 'HIM', 'ANSWERED', 'KENNETH', 'TALK'] +6829-68769-0011-1869: ref=["I'M", 'RUNNING', 'FOR', 'REPRESENTATIVE', 'ON', 'THE', 'REPUBLICAN', 'TICKET', 'SAID', 'KENNETH', 'QUIETLY'] +6829-68769-0011-1869: hyp=["I'M", 'RUNNING', 'FOR', 'REPRESENTATIVE', 'ON', 'THE', 'REPUBLICAN', 'TICKET', 'SAID', 'KENNETH', 'QUIETLY'] +6829-68769-0012-1870: ref=['OH', 'SAY', "THAT'S", 'DIFFERENT', 'OBSERVED', 'MARKHAM', 'ALTERING', 'HIS', 'DEMEANOR'] +6829-68769-0012-1870: hyp=["I'LL", 'SAY', "THAT'S", 'DIFFERENT', 'OBSERVED', 'MARKHAM', 'ALTERING', 'HIS', 'DEMEANOUR'] +6829-68769-0013-1871: ref=['MAY', 'WE', 'SEE', 'GATES', 'AT', 'ONCE', 'ASKED', 'KENNETH'] +6829-68769-0013-1871: hyp=['MAY', 'WE', 'SEA', 'GATES', 'AT', 'ONCE', 'ASKED', 'KENNETH'] +6829-68769-0014-1872: ref=['THEY', 'FOLLOWED', 'THE', 'JAILER', 'ALONG', 'A', 'SUCCESSION', 'OF', 'PASSAGES'] +6829-68769-0014-1872: hyp=['THEY', 'FOLLOWED', 'THE', 'JAILER', 'ALONG', 'THE', 'SUCCESSION', 'OF', 'PASSAGES'] +6829-68769-0015-1873: ref=['SOMETIMES', "I'M", 'THAT', 'YEARNING', 'FOR', 'A', 'SMOKE', "I'M", 'NEARLY', 'CRAZY', 'AN', 'I', 'DUNNO', 'WHICH', 'IS', 'WORST', 'DYIN', 'ONE', 'WAY', 'OR', 'ANOTHER'] +6829-68769-0015-1873: hyp=['SOMETIMES', 'ON', 'THAT', 'YEARNIN', 'FOR', 'A', 'SMOKE', "I'M", 'NEARLY', 'CRAZY', 'AND', 'I', "DON'T", 'KNOW', 'WHICH', 'IS', 'WORSE', 'DYIN', 'ONE', 'WAY', 'OR', 'THE', 'OTHER'] +6829-68769-0016-1874: ref=['HE', 'UNLOCKED', 'THE', 'DOOR', 'AND', 'CALLED', "HERE'S", 'VISITORS', 'TOM'] +6829-68769-0016-1874: hyp=['HE', 'UNLOCKED', 'THE', 'DOOR', 'AND', 'CALLED', "HERE'S", 'VISITORS', 'TOM'] +6829-68769-0017-1875: ref=['WORSE', 'TOM', 'WORSE', 'N', 'EVER', 'REPLIED', 'THE', 'JAILER', 'GLOOMILY'] +6829-68769-0017-1875: hyp=['HORSE', 'TOM', 'WORSE', 'THAN', 'ARROW', 'REPLIED', 'THE', 'JAILER', 'GLOOMILY'] +6829-68769-0018-1876: ref=['MISS', 'DE', 'GRAF', 'SAID', 'KENNETH', 'NOTICING', 'THE', "BOY'S", 'FACE', 'CRITICALLY', 'AS', 'HE', 'STOOD', 'WHERE', 'THE', 'LIGHT', 'FROM', 'THE', 'PASSAGE', 'FELL', 'UPON', 'IT'] +6829-68769-0018-1876: hyp=['MISTER', 'GRAF', 'SAID', 'KENNETH', 'NOTICING', 'THE', "BOY'S", 'FACE', 'CRITICALLY', 'AS', 'HE', 'STOOD', 'WHERE', 'THE', 'LIGHT', 'FROM', 'THE', 'PASSAGE', 'FELL', 'UPON', 'IT'] +6829-68769-0019-1877: ref=['SORRY', 'WE', "HAVEN'T", 'ANY', 'RECEPTION', 'ROOM', 'IN', 'THE', 'JAIL'] +6829-68769-0019-1877: hyp=['SORRY', 'WE', "HAVEN'T", 'ANY', 'RECEPTION', 'ROOM', 'IN', 'THE', 'JAIL'] +6829-68769-0020-1878: ref=['SIT', 'DOWN', 'PLEASE', 'SAID', 'GATES', 'IN', 'A', 'CHEERFUL', 'AND', 'PLEASANT', 'VOICE', "THERE'S", 'A', 'BENCH', 'HERE'] +6829-68769-0020-1878: hyp=['SIT', 'DOWN', 'PLEASE', 'SAID', 'GATES', 'IN', 'A', 'CHEERFUL', 'AND', 'PLEASANT', 'VOICE', "THERE'S", 'A', 'PINCH', 'HERE'] +6829-68769-0021-1879: ref=['A', 'FRESH', 'WHOLESOME', 'LOOKING', 'BOY', 'WAS', 'TOM', 'GATES', 'WITH', 'STEADY', 'GRAY', 'EYES', 'AN', 'INTELLIGENT', 'FOREHEAD', 'BUT', 'A', 'SENSITIVE', 'RATHER', 'WEAK', 'MOUTH'] +6829-68769-0021-1879: hyp=['A', 'FRESH', 'WHOLESOME', 'LOOKING', 'BOY', 'WAS', 'TOM', 'GATES', 'WITH', 'STEADY', 'GRAY', 'EYES', 'AN', 'INTELLIGENT', 'FOREHEAD', 'BUT', 'A', 'SENSITIVE', 'RATHER', 'WEAK', 'MOUTH'] +6829-68769-0022-1880: ref=['WE', 'HAVE', 'HEARD', 'SOMETHING', 'OF', 'YOUR', 'STORY', 'SAID', 'KENNETH', 'AND', 'ARE', 'INTERESTED', 'IN', 'IT'] +6829-68769-0022-1880: hyp=['WE', 'HAVE', 'HEARD', 'SOMETHING', 'OF', 'YOUR', 'STORY', 'SAID', 'KENNETH', 'AND', 'OUR', 'INTERESTED', 'IN', 'IT'] +6829-68769-0023-1881: ref=['I', "DIDN'T", 'STOP', 'TO', 'THINK', 'WHETHER', 'IT', 'WAS', 'FOOLISH', 'OR', 'NOT', 'I', 'DID', 'IT', 'AND', "I'M", 'GLAD', 'I', 'DID'] +6829-68769-0023-1881: hyp=['I', "DIDN'T", 'STOP', 'TO', 'THINK', 'WHETHER', 'IT', 'WAS', 'FOOLISH', 'OR', 'NOT', 'I', 'DID', 'IT', 'AND', "I'M", 'GLAD', 'I', 'DID', 'IT'] +6829-68769-0024-1882: ref=['OLD', 'WILL', 'IS', 'A', 'FINE', 'FELLOW', 'BUT', 'POOR', 'AND', 'HELPLESS', 'SINCE', 'MISSUS', 'ROGERS', 'HAD', 'HER', 'ACCIDENT'] +6829-68769-0024-1882: hyp=['OLD', 'WILL', 'IS', 'A', 'FINE', 'FELLOW', 'BUT', 'POOR', 'AND', 'HELPLESS', 'SINCE', 'MISSUS', 'ROGERS', 'HAD', 'HER', 'ACCIDENT'] +6829-68769-0025-1883: ref=['THEN', 'ROGERS', "WOULDN'T", 'DO', 'ANYTHING', 'BUT', 'LEAD', 'HER', 'AROUND', 'AND', 'WAIT', 'UPON', 'HER', 'AND', 'THE', 'PLACE', 'WENT', 'TO', 'RACK', 'AND', 'RUIN'] +6829-68769-0025-1883: hyp=['THEN', 'ROGERS', "WOULDN'T", 'DO', 'ANYTHING', 'BUT', 'LEAD', 'HER', 'AROUND', 'AND', 'WAIT', 'UPON', 'HER', 'AND', 'THE', 'PLACE', 'WENT', 'TO', 'RACK', 'AND', 'RUIN'] +6829-68769-0026-1884: ref=['HE', 'SPOKE', 'SIMPLY', 'BUT', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'NARROW', 'CELL', 'IN', 'FRONT', 'OF', 'THEM'] +6829-68769-0026-1884: hyp=['HE', 'SPOKE', 'SIMPLY', 'BUT', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'NARROW', 'CELL', 'IN', 'FRONT', 'OF', 'THEM'] +6829-68769-0027-1885: ref=['WHOSE', 'NAME', 'DID', 'YOU', 'SIGN', 'TO', 'THE', 'CHECK', 'ASKED', 'KENNETH'] +6829-68769-0027-1885: hyp=['WHOSE', 'NAME', 'DID', 'YOU', 'SIGN', 'TO', 'THE', 'CHECK', 'ASKED', 'KENNETH'] +6829-68769-0028-1886: ref=['HE', 'IS', 'SUPPOSED', 'TO', 'SIGN', 'ALL', 'THE', 'CHECKS', 'OF', 'THE', 'CONCERN'] +6829-68769-0028-1886: hyp=['HE', 'IS', 'SUPPOSED', 'TO', 'SIGN', 'ALL', 'THE', 'CHECKS', 'OF', 'THE', 'CONCER'] +6829-68769-0029-1887: ref=["IT'S", 'A', 'STOCK', 'COMPANY', 'AND', 'RICH'] +6829-68769-0029-1887: hyp=["IT'S", 'A', 'STOCK', 'COMPANY', 'IN', 'RICH'] +6829-68769-0030-1888: ref=['I', 'WAS', 'BOOKKEEPER', 'SO', 'IT', 'WAS', 'EASY', 'TO', 'GET', 'A', 'BLANK', 'CHECK', 'AND', 'FORGE', 'THE', 'SIGNATURE'] +6829-68769-0030-1888: hyp=['I', 'WAS', 'BITKEEPER', 'SO', 'IT', 'WAS', 'EASY', 'TO', 'GET', 'A', 'BLANK', 'CHECK', 'AND', 'FORGE', 'THE', 'SIGNATURE'] +6829-68769-0031-1889: ref=['AS', 'REGARDS', 'MY', 'ROBBING', 'THE', 'COMPANY', "I'LL", 'SAY', 'THAT', 'I', 'SAVED', 'THEM', 'A', 'HEAVY', 'LOSS', 'ONE', 'DAY'] +6829-68769-0031-1889: hyp=['AS', 'REGARDS', 'MY', 'ROBBING', 'THE', 'COMPANY', "I'LL", 'SAY', 'THAT', 'I', 'SAVED', 'HIM', 'A', 'HEAVY', 'LOSS', 'ONE', 'DAY'] +6829-68769-0032-1890: ref=['I', 'DISCOVERED', 'AND', 'PUT', 'OUT', 'A', 'FIRE', 'THAT', 'WOULD', 'HAVE', 'DESTROYED', 'THE', 'WHOLE', 'PLANT', 'BUT', 'MARSHALL', 'NEVER', 'EVEN', 'THANKED', 'ME'] +6829-68769-0032-1890: hyp=['I', 'DISCOVERED', 'AND', 'PUT', 'OUT', 'A', 'FIRE', 'THAT', 'WOULD', 'HAVE', 'DESTROYED', 'THE', 'WHOLE', 'PLANT', 'BUT', 'MARSHALL', 'NEVER', 'EVEN', 'THANKED', 'ME'] +6829-68769-0033-1891: ref=['IT', 'WAS', 'BETTER', 'FOR', 'HIM', 'TO', 'THINK', 'THE', 'GIRL', 'UNFEELING', 'THAN', 'TO', 'KNOW', 'THE', 'TRUTH'] +6829-68769-0033-1891: hyp=['IT', 'WAS', 'BETTER', 'FOR', 'HIM', 'TO', 'THINK', 'THE', 'GIRL', 'UNFEELING', 'THAN', 'TO', 'KNOW', 'THE', 'TRUTH'] +6829-68769-0034-1892: ref=["I'M", 'GOING', 'TO', 'SEE', 'MISTER', 'MARSHALL', 'SAID', 'KENNETH', 'AND', 'DISCOVER', 'WHAT', 'I', 'CAN', 'DO', 'TO', 'ASSIST', 'YOU', 'THANK', 'YOU', 'SIR'] +6829-68769-0034-1892: hyp=["I'M", 'GOING', 'TO', 'SEE', 'MISTER', 'MARSHAL', 'SAID', 'KENNETH', 'AND', 'DISCOVER', 'WHAT', 'I', 'CAN', 'DO', 'TO', 'ASSIST', 'YOU', 'THANK', 'YOU', 'SIR'] +6829-68769-0035-1893: ref=['IT', "WON'T", 'BE', 'MUCH', 'BUT', "I'M", 'GRATEFUL', 'TO', 'FIND', 'A', 'FRIEND'] +6829-68769-0035-1893: hyp=['IT', "WON'T", 'BE', 'MUCH', 'BUT', "I'M", 'GRATEFUL', 'TO', 'FIND', 'A', 'FRIEND'] +6829-68769-0036-1894: ref=['THEY', 'LEFT', 'HIM', 'THEN', 'FOR', 'THE', 'JAILER', 'ARRIVED', 'TO', 'UNLOCK', 'THE', 'DOOR', 'AND', 'ESCORT', 'THEM', 'TO', 'THE', 'OFFICE'] +6829-68769-0036-1894: hyp=['THEY', 'LEFT', 'HIM', 'THEN', 'FOR', 'THE', 'JAILER', 'ARRIVED', 'TO', 'UNLOCK', 'THE', 'DOOR', 'AND', 'ESCORT', 'THEM', 'TO', 'THE', 'OFFICE'] +6829-68769-0037-1895: ref=["I'VE", 'SEEN', 'LOTS', 'OF', 'THAT', 'KIND', 'IN', 'MY', 'DAY'] +6829-68769-0037-1895: hyp=["I'VE", 'SEEN', 'LOTS', 'OF', 'THAT', 'KIND', 'IN', 'MY', 'DAY'] +6829-68769-0038-1896: ref=['AND', 'IT', 'RUINS', 'A', "MAN'S", 'DISPOSITION'] +6829-68769-0038-1896: hyp=['AND', 'IT', 'RUINS', 'A', "MAN'S", 'DISPOSITION'] +6829-68769-0039-1897: ref=['HE', 'LOOKED', 'UP', 'RATHER', 'UNGRACIOUSLY', 'BUT', 'MOTIONED', 'THEM', 'TO', 'BE', 'SEATED'] +6829-68769-0039-1897: hyp=['HE', 'LOOKED', 'UP', 'RATHER', 'UNGRACIOUSLY', 'BUT', 'MOTIONED', 'THEM', 'TO', 'BE', 'SEATED'] +6829-68769-0040-1898: ref=['SOME', 'GIRL', 'HAS', 'BEEN', 'HERE', 'TWICE', 'TO', 'INTERVIEW', 'MY', 'MEN', 'AND', 'I', 'HAVE', 'REFUSED', 'TO', 'ADMIT', 'HER'] +6829-68769-0040-1898: hyp=['SOME', 'GIRL', 'HAS', 'BEEN', 'IN', 'HERE', 'TWICE', 'TO', 'INTERVIEW', 'MY', 'MEN', 'AND', 'I', 'HAVE', 'REFUSED', 'TO', 'ADMIT', 'HER'] +6829-68769-0041-1899: ref=["I'M", 'NOT', 'ELECTIONEERING', 'JUST', 'NOW'] +6829-68769-0041-1899: hyp=["I'M", 'NOT', 'ELECTIONEERING', 'JUST', 'NOW'] +6829-68769-0042-1900: ref=['OH', 'WELL', 'SIR', 'WHAT', 'ABOUT', 'HIM'] +6829-68769-0042-1900: hyp=['OH', 'WELL', 'SIR', 'WHAT', 'ABOUT', 'HIM'] +6829-68769-0043-1901: ref=['AND', 'HE', 'DESERVES', 'A', 'TERM', 'IN', "STATE'S", 'PRISON'] +6829-68769-0043-1901: hyp=['AND', 'HE', 'DESERVES', 'A', 'TERM', 'AND', "STATE'S", 'PRISON'] +6829-68769-0044-1902: ref=['IT', 'HAS', 'COST', 'ME', 'TWICE', 'SIXTY', 'DOLLARS', 'IN', 'ANNOYANCE'] +6829-68769-0044-1902: hyp=['IT', 'HAS', 'COST', 'ME', 'TWICE', 'SIXTY', 'DOLLARS', 'AN', 'ANNOYANCE'] +6829-68769-0045-1903: ref=["I'LL", 'PAY', 'ALL', 'THE', 'COSTS', 'BESIDES'] +6829-68769-0045-1903: hyp=["I'LL", 'PAY', 'ALL', 'THE', 'COST', 'BESIDES'] +6829-68769-0046-1904: ref=["YOU'RE", 'FOOLISH', 'WHY', 'SHOULD', 'YOU', 'DO', 'ALL', 'THIS'] +6829-68769-0046-1904: hyp=["YOU'RE", 'FOOLISH', 'WHY', 'SHOULD', 'YOU', 'DO', 'ALL', 'THIS'] +6829-68769-0047-1905: ref=['I', 'HAVE', 'MY', 'OWN', 'REASONS', 'MISTER', 'MARSHALL'] +6829-68769-0047-1905: hyp=['I', 'HAVE', 'MY', 'OWN', 'REASONS', 'MISTER', 'MARSHAL'] +6829-68769-0048-1906: ref=['GIVE', 'ME', 'A', 'CHECK', 'FOR', 'A', 'HUNDRED', 'AND', 'FIFTY', 'AND', "I'LL", 'TURN', 'OVER', 'TO', 'YOU', 'THE', 'FORGED', 'CHECK', 'AND', 'QUASH', 'FURTHER', 'PROCEEDINGS'] +6829-68769-0048-1906: hyp=['GIVE', 'ME', 'A', 'CHEQUE', 'FOR', 'A', 'HUNDRED', 'AND', 'FIFTY', 'AND', "I'LL", 'TURN', 'OVER', 'TO', 'YOU', 'THE', 'FORGED', 'CHECK', 'AND', 'QUASH', 'FURTHER', 'PROCEEDINGS'] +6829-68769-0049-1907: ref=['HE', 'DETESTED', 'THE', 'GRASPING', 'DISPOSITION', 'THAT', 'WOULD', 'ENDEAVOR', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'HIS', 'EVIDENT', 'DESIRE', 'TO', 'HELP', 'YOUNG', 'GATES'] +6829-68769-0049-1907: hyp=['HE', 'DETESTED', 'THE', 'GRASPING', 'DISPOSITION', 'THAT', 'WOULD', 'ENDEAVOUR', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'HIS', 'EVIDENT', 'DESIRE', 'TO', 'HELP', 'YOUNG', 'GATES'] +6829-68769-0050-1908: ref=['BETH', 'UNEASY', 'AT', 'HIS', 'SILENCE', 'NUDGED', 'HIM'] +6829-68769-0050-1908: hyp=['BETH', 'UNEASY', 'AT', 'HIS', 'SILENCE', 'NUDGED', 'HIM'] +6829-68769-0051-1909: ref=['THERE', 'WAS', 'A', 'GRIM', 'SMILE', 'OF', 'AMUSEMENT', 'ON', 'HIS', 'SHREWD', 'FACE'] +6829-68769-0051-1909: hyp=['THERE', 'WAS', 'A', 'GRIM', 'SMILE', 'OF', 'AMUSEMENT', 'ON', 'HIS', 'SHREWD', 'FACE'] +6829-68769-0052-1910: ref=['HE', 'MIGHT', 'HAVE', 'HAD', 'THAT', 'FORGED', 'CHECK', 'FOR', 'THE', 'FACE', 'OF', 'IT', 'IF', "HE'D", 'BEEN', 'SHARP'] +6829-68769-0052-1910: hyp=['HE', 'MIGHT', 'HAVE', 'HAD', 'THAT', 'FORGED', 'CHECK', 'FOR', 'THE', 'FACE', 'OF', 'IT', 'IF', "HE'D", 'BEEN', 'SHARP'] +6829-68769-0053-1911: ref=['AND', 'TO', 'THINK', 'WE', 'CAN', 'SAVE', 'ALL', 'THAT', 'MISERY', 'AND', 'DESPAIR', 'BY', 'THE', 'PAYMENT', 'OF', 'A', 'HUNDRED', 'AND', 'FIFTY', 'DOLLARS'] +6829-68769-0053-1911: hyp=['AND', 'TO', 'THINK', 'WE', 'CAN', 'SAVE', 'ALL', 'THAT', 'MISERY', 'AND', 'DESPAIR', 'BY', 'THE', 'PAYMENT', 'OF', 'A', 'HUNDRED', 'AND', 'FIFTY', 'DOLLARS'] +6829-68771-0000-1912: ref=['SO', 'TO', 'THE', 'SURPRISE', 'OF', 'THE', 'DEMOCRATIC', 'COMMITTEE', 'AND', 'ALL', 'HIS', 'FRIENDS', 'MISTER', 'HOPKINS', 'ANNOUNCED', 'THAT', 'HE', 'WOULD', 'OPPOSE', "FORBES'S", 'AGGRESSIVE', 'CAMPAIGN', 'WITH', 'AN', 'EQUAL', 'AGGRESSIVENESS', 'AND', 'SPEND', 'AS', 'MANY', 'DOLLARS', 'IN', 'DOING', 'SO', 'AS', 'MIGHT', 'BE', 'NECESSARY'] +6829-68771-0000-1912: hyp=['SO', 'TO', 'THE', 'SURPRISE', 'OF', 'THE', 'DEMOCRATIC', 'COMMITTEE', 'AND', 'ALL', 'HIS', 'FRIENDS', 'MISTER', 'HOPKINS', 'ANNOUNCED', 'THAT', 'HE', 'WOULD', 'OPPOSE', 'FORTS', 'AGGRESSIVE', 'CAMPAIGN', 'WITH', 'AN', 'EQUAL', 'AGGRESSIVENESS', 'AND', 'SPEND', 'AS', 'MANY', 'DOLLARS', 'IN', 'DOING', 'SO', 'AS', 'MIGHT', 'BE', 'NECESSARY'] +6829-68771-0001-1913: ref=['ONE', 'OF', 'MISTER', "HOPKINS'S", 'FIRST', 'TASKS', 'AFTER', 'CALLING', 'HIS', 'FAITHFUL', 'HENCHMEN', 'AROUND', 'HIM', 'WAS', 'TO', 'MAKE', 'A', 'CAREFUL', 'CANVASS', 'OF', 'THE', 'VOTERS', 'OF', 'HIS', 'DISTRICT', 'TO', 'SEE', 'WHAT', 'WAS', 'STILL', 'TO', 'BE', 'ACCOMPLISHED'] +6829-68771-0001-1913: hyp=['ONE', 'OF', 'MISTER', "HOPKINS'S", 'FIRST', 'TASKS', 'AFTER', 'CALLING', 'HIS', 'FAITHFUL', 'HENCHMAN', 'AROUND', 'HIM', 'WAS', 'TO', 'MAKE', 'A', 'CAREFUL', 'CANVAS', 'OF', 'THE', 'VOTERS', 'OF', 'HIS', 'DISTRICT', 'TO', 'SEE', 'WHAT', 'WAS', 'STILL', 'TO', 'BE', 'ACCOMPLISHED'] +6829-68771-0002-1914: ref=['THE', 'WEAK', 'KNEED', 'CONTINGENCY', 'MUST', 'BE', 'STRENGTHENED', 'AND', 'FORTIFIED', 'AND', 'A', 'COUPLE', 'OF', 'HUNDRED', 'VOTES', 'IN', 'ONE', 'WAY', 'OR', 'ANOTHER', 'SECURED', 'FROM', 'THE', 'OPPOSITION'] +6829-68771-0002-1914: hyp=['THE', 'WEAK', 'NEED', 'CONTINGENCY', 'MUST', 'BE', 'STRENGTHENED', 'AND', 'FORTIFIED', 'AND', 'A', 'COUPLE', 'OF', 'HUNDRED', 'VOTES', 'IN', 'ONE', 'WAY', 'OR', 'THE', 'OTHER', 'SECURED', 'FROM', 'THE', 'OPPOSITION'] +6829-68771-0003-1915: ref=['THE', 'DEMOCRATIC', 'COMMITTEE', 'FIGURED', 'OUT', 'A', 'WAY', 'TO', 'DO', 'THIS'] +6829-68771-0003-1915: hyp=['THE', 'DEMOCRATIC', 'COMMITTEE', 'FIGURED', 'OUT', 'A', 'WAY', 'TO', 'DO', 'THIS'] +6829-68771-0004-1916: ref=['UNDER', 'ORDINARY', 'CONDITIONS', 'REYNOLDS', 'WAS', 'SURE', 'TO', 'BE', 'ELECTED', 'BUT', 'THE', 'COMMITTEE', 'PROPOSED', 'TO', 'SACRIFICE', 'HIM', 'IN', 'ORDER', 'TO', 'ELECT', 'HOPKINS'] +6829-68771-0004-1916: hyp=['UNDER', 'ORDINARY', 'CONDITIONS', 'REYNOLDS', 'WAS', 'SURE', 'TO', 'BE', 'ELECTED', 'BUT', 'THE', 'COMMITTEE', 'PROPOSED', 'TO', 'SACRIFICE', 'HIM', 'IN', 'ORDER', 'TO', 'ELEC', 'HOPKINS'] +6829-68771-0005-1917: ref=['THE', 'ONLY', 'THING', 'NECESSARY', 'WAS', 'TO', 'FIX', 'SETH', 'REYNOLDS', 'AND', 'THIS', 'HOPKINS', 'ARRANGED', 'PERSONALLY'] +6829-68771-0005-1917: hyp=['THE', 'ONLY', 'THING', 'NECESSARY', 'WAS', 'TO', 'FIX', 'SETH', 'REYNOLDS', 'AND', 'THIS', 'HOPKINS', 'ARRANGED', 'PERSONALLY'] +6829-68771-0006-1918: ref=['AND', 'THIS', 'WAS', 'WHY', 'KENNETH', 'AND', 'BETH', 'DISCOVERED', 'HIM', 'CONVERSING', 'WITH', 'THE', 'YOUNG', 'WOMAN', 'IN', 'THE', 'BUGGY'] +6829-68771-0006-1918: hyp=['AND', 'THIS', 'WAS', 'WHY', 'KENNETH', 'AND', 'BETH', 'DISCOVERED', 'HIM', 'CONVERSING', 'WITH', 'THE', 'YOUNG', 'WOMAN', 'IN', 'THE', 'BUGGY'] +6829-68771-0007-1919: ref=['THE', 'DESCRIPTION', 'SHE', 'GAVE', 'OF', 'THE', 'COMING', 'RECEPTION', 'TO', 'THE', "WOMAN'S", 'POLITICAL', 'LEAGUE', 'WAS', 'SO', 'HUMOROUS', 'AND', 'DIVERTING', 'THAT', 'THEY', 'WERE', 'BOTH', 'LAUGHING', 'HEARTILY', 'OVER', 'THE', 'THING', 'WHEN', 'THE', 'YOUNG', 'PEOPLE', 'PASSED', 'THEM', 'AND', 'THUS', 'MISTER', 'HOPKINS', 'FAILED', 'TO', 'NOTICE', 'WHO', 'THE', 'OCCUPANTS', 'OF', 'THE', 'OTHER', 'VEHICLE', 'WERE'] +6829-68771-0007-1919: hyp=['THE', 'DESCRIPTION', 'SHE', 'GAVE', 'OF', 'THE', 'COMING', 'RECEPTION', 'TO', 'THE', "WOMEN'S", 'POLITICAL', 'LEAGUE', 'WAS', 'SO', 'HUMOROUS', 'AND', 'DIVERTING', 'THAT', 'THEY', 'WERE', 'BOTH', 'LAUGHING', 'HEARTILY', 'OVER', 'THE', 'THING', 'WHEN', 'THE', 'YOUNG', 'PEOPLE', 'PASSED', 'THEM', 'AND', 'THUS', 'MISTER', 'HOPKINS', 'FAILED', 'TO', 'NOTICE', 'WHO', 'THE', 'OCCUPANTS', 'OF', 'THE', 'OTHER', 'VEHICLE', 'WERE'] +6829-68771-0008-1920: ref=['THESE', 'WOMEN', 'WERE', 'FLATTERED', 'BY', 'THE', 'ATTENTION', 'OF', 'THE', 'YOUNG', 'LADY', 'AND', 'HAD', 'PROMISED', 'TO', 'ASSIST', 'IN', 'ELECTING', 'MISTER', 'FORBES'] +6829-68771-0008-1920: hyp=['THESE', 'WOMEN', 'WERE', 'FLATTERED', 'BY', 'THE', 'ATTENTION', 'OF', 'THE', 'YOUNG', 'LADY', 'AND', 'HAD', 'PROMISED', 'TO', 'ASSIST', 'IN', 'ELECTING', 'MISTER', 'FORBES'] +6829-68771-0009-1921: ref=['LOUISE', 'HOPED', 'FOR', 'EXCELLENT', 'RESULTS', 'FROM', 'THIS', 'ORGANIZATION', 'AND', 'WISHED', 'THE', 'ENTERTAINMENT', 'TO', 'BE', 'SO', 'EFFECTIVE', 'IN', 'WINNING', 'THEIR', 'GOOD', 'WILL', 'THAT', 'THEY', 'WOULD', 'WORK', 'EARNESTLY', 'FOR', 'THE', 'CAUSE', 'IN', 'WHICH', 'THEY', 'WERE', 'ENLISTED'] +6829-68771-0009-1921: hyp=['LOUISE', 'HOPED', 'FOR', 'EXCELLENT', 'RESULTS', 'FROM', 'THIS', 'ORGANIZATION', 'AND', 'WISHED', 'THE', 'ENTERTAINMENT', 'TO', 'BE', 'SO', 'EFFECTIVE', 'IN', 'WINNING', 'THEIR', 'GOOD', 'WILL', 'THAT', 'THEY', 'WOULD', 'WORK', 'EARNESTLY', 'FOR', 'THE', 'CAUSE', 'IN', 'WHICH', 'THEY', 'WERE', 'ENLISTED'] +6829-68771-0010-1922: ref=['THE', 'FAIRVIEW', 'BAND', 'WAS', 'ENGAGED', 'TO', 'DISCOURSE', 'AS', 'MUCH', 'HARMONY', 'AS', 'IT', 'COULD', 'PRODUCE', 'AND', 'THE', 'RESOURCES', 'OF', 'THE', 'GREAT', 'HOUSE', 'WERE', 'TAXED', 'TO', 'ENTERTAIN', 'THE', 'GUESTS'] +6829-68771-0010-1922: hyp=['THE', 'FAIR', 'VIEW', 'BAND', 'WAS', 'ENGAGED', 'TO', 'DISCOURSE', 'AS', 'MUCH', 'HARMONY', 'AS', 'IT', 'COULD', 'PRODUCE', 'AND', 'THE', 'RESOURCES', 'OF', 'THE', 'GREAT', 'HOUSE', 'WERE', 'TAXED', 'TO', 'ENTERTAIN', 'THE', 'GUESTS'] +6829-68771-0011-1923: ref=['TABLES', 'WERE', 'SPREAD', 'ON', 'THE', 'LAWN', 'AND', 'A', 'DAINTY', 'BUT', 'SUBSTANTIAL', 'REPAST', 'WAS', 'TO', 'BE', 'SERVED'] +6829-68771-0011-1923: hyp=['TABLES', 'WERE', 'SPREAD', 'ON', 'THE', 'LAWN', 'AND', 'A', 'DAINTY', 'BUT', 'SUBSTANTIAL', 'REPAST', 'WAS', 'TO', 'BE', 'SERVED'] +6829-68771-0012-1924: ref=['THIS', 'WAS', 'THE', 'FIRST', 'OCCASION', 'WITHIN', 'A', 'GENERATION', 'WHEN', 'SUCH', 'AN', 'ENTERTAINMENT', 'HAD', 'BEEN', 'GIVEN', 'AT', 'ELMHURST', 'AND', 'THE', 'ONLY', 'ONE', 'WITHIN', 'THE', 'MEMORY', 'OF', 'MAN', 'WHERE', 'THE', 'NEIGHBORS', 'AND', 'COUNTRY', 'PEOPLE', 'HAD', 'BEEN', 'INVITED', 'GUESTS'] +6829-68771-0012-1924: hyp=['THIS', 'WAS', 'THE', 'FIRST', 'OCCASION', 'WITHIN', 'A', 'GENERATION', 'WHEN', 'SUCH', 'AN', 'ENTERTAINMENT', 'HAD', 'BEEN', 'GIVEN', 'AT', 'ELMHURST', 'AND', 'THE', 'ONLY', 'ONE', 'WITHIN', 'THE', 'MEMORY', 'OF', 'MAN', 'WERE', 'THE', 'NEIGHBORS', 'AND', 'COUNTRY', 'PEOPLE', 'HAD', 'BEEN', 'THE', 'INVITED', 'GUEST'] +6829-68771-0013-1925: ref=['THE', 'ATTENDANCE', 'WAS', 'UNEXPECTEDLY', 'LARGE', 'AND', 'THE', 'GIRLS', 'WERE', 'DELIGHTED', 'FORESEEING', 'GREAT', 'SUCCESS', 'FOR', 'THEIR', 'FETE'] +6829-68771-0013-1925: hyp=['THE', 'ATTENDANTS', 'WAS', 'UNEXPECTEDLY', 'LARGE', 'AND', 'THE', 'GIRLS', 'WERE', 'DELIGHTED', 'FORESEEING', 'GREAT', 'SUCCESS', 'FOR', 'THEIR', 'FIGHT'] +6829-68771-0014-1926: ref=['WE', 'OUGHT', 'TO', 'HAVE', 'MORE', 'ATTENDANTS', 'BETH', 'SAID', 'LOUISE', 'APPROACHING', 'HER', 'COUSIN'] +6829-68771-0014-1926: hyp=['WE', 'OUGHT', 'TO', 'HAVE', 'MORE', 'ATTENDANCE', 'BETH', 'SAID', 'LOUISE', 'APPROACHING', 'HER', 'COUSIN'] +6829-68771-0015-1927: ref=["WON'T", 'YOU', 'RUN', 'INTO', 'THE', 'HOUSE', 'AND', 'SEE', 'IF', 'MARTHA', "CAN'T", 'SPARE', 'ONE', 'OR', 'TWO', 'MORE', 'MAIDS'] +6829-68771-0015-1927: hyp=["WON'T", 'YOU', 'RUN', 'INTO', 'THE', 'HOUSE', 'AND', 'SEE', 'IF', 'MARTHA', "CAN'T", 'SPARE', 'ONE', 'OR', 'TWO', 'MORE', 'MAIDS'] +6829-68771-0016-1928: ref=['SHE', 'WAS', 'VERY', 'FOND', 'OF', 'THE', 'YOUNG', 'LADIES', 'WHOM', 'SHE', 'HAD', 'KNOWN', 'WHEN', 'AUNT', 'JANE', 'WAS', 'THE', 'MISTRESS', 'HERE', 'AND', 'BETH', 'WAS', 'HER', 'ESPECIAL', 'FAVORITE'] +6829-68771-0016-1928: hyp=['SHE', 'WAS', 'VERY', 'FOND', 'OF', 'THE', 'YOUNG', 'LADIES', 'WHOM', 'SHE', 'HAD', 'KNOWN', 'WHEN', 'AUNT', 'JANE', 'WAS', 'THEIR', 'MISTRESS', 'HERE', 'AND', 'BETH', 'WAS', 'HER', 'SPECIAL', 'FAVOURITE'] +6829-68771-0017-1929: ref=['THE', 'HOUSEKEEPER', 'LED', 'THE', 'WAY', 'AND', 'BETH', 'FOLLOWED'] +6829-68771-0017-1929: hyp=['THE', 'HOUSEKEEPER', 'LED', 'THE', 'WAY', 'IN', 'BETH', 'FOLLOWED'] +6829-68771-0018-1930: ref=['FOR', 'A', 'MOMENT', 'BETH', 'STOOD', 'STARING', 'WHILE', 'THE', 'NEW', 'MAID', 'REGARDED', 'HER', 'WITH', 'COMPOSURE', 'AND', 'A', 'SLIGHT', 'SMILE', 'UPON', 'HER', 'BEAUTIFUL', 'FACE'] +6829-68771-0018-1930: hyp=['FOR', 'A', 'MOMENT', 'BETH', 'STOOD', 'STARING', 'WHILE', 'THE', 'NEW', 'MAID', 'REGARDED', 'HER', 'WITH', 'COMPOSURE', 'AND', 'OF', 'SLIGHT', 'SMILE', 'UPON', 'HER', 'BEAUTIFUL', 'FACE'] +6829-68771-0019-1931: ref=['SHE', 'WAS', 'DRESSED', 'IN', 'THE', 'REGULATION', 'COSTUME', 'OF', 'THE', 'MAIDS', 'AT', 'ELMHURST', 'A', 'PLAIN', 'BLACK', 'GOWN', 'WITH', 'WHITE', 'APRON', 'AND', 'CAP'] +6829-68771-0019-1931: hyp=['SHE', 'WAS', 'DRESSED', 'IN', 'THE', 'REGULATION', 'COSTUME', 'OF', 'THE', 'MAIDS', 'AT', 'ELMHURST', 'A', 'PLAIN', 'BLACK', 'GOWN', 'WITH', 'A', 'WHITE', 'APRON', 'AND', 'CAP'] +6829-68771-0020-1932: ref=['THEN', 'SHE', 'GAVE', 'A', 'LITTLE', 'LAUGH', 'AND', 'REPLIED', 'NO', 'MISS', 'BETH', "I'M", 'ELIZABETH', 'PARSONS'] +6829-68771-0020-1932: hyp=['THEN', 'SHE', 'GAVE', 'A', 'LITTLE', 'LAUGH', 'AND', 'REPLIED', 'NO', 'MISS', 'BETH', "I'M", 'ELIZABETH', 'PARSONS'] +6829-68771-0021-1933: ref=['BUT', 'IT', "CAN'T", 'BE', 'PROTESTED', 'THE', 'GIRL'] +6829-68771-0021-1933: hyp=['BUT', 'IT', "CAN'T", 'BE', 'PROTESTED', 'THE', 'GIRL'] +6829-68771-0022-1934: ref=['I', 'ATTEND', 'TO', 'THE', 'HOUSEHOLD', 'MENDING', 'YOU', 'KNOW', 'AND', 'CARE', 'FOR', 'THE', 'LINEN'] +6829-68771-0022-1934: hyp=['I', 'ATTEND', 'TO', 'THE', 'HOUSEHOLD', 'MENDING', 'YOU', 'KNOW', 'AND', 'CARE', 'FOR', 'THE', 'LINEN'] +6829-68771-0023-1935: ref=['YOU', 'SPEAK', 'LIKE', 'AN', 'EDUCATED', 'PERSON', 'SAID', 'BETH', 'WONDERINGLY', 'WHERE', 'IS', 'YOUR', 'HOME'] +6829-68771-0023-1935: hyp=['YOU', 'SPEAK', 'LIKE', 'AN', 'EDUCATED', 'PERSON', 'SAID', 'BETH', 'WONDERINGLY', 'WHERE', 'IS', 'YOUR', 'HOME'] +6829-68771-0024-1936: ref=['FOR', 'THE', 'FIRST', 'TIME', 'THE', 'MAID', 'SEEMED', 'A', 'LITTLE', 'CONFUSED', 'AND', 'HER', 'GAZE', 'WANDERED', 'FROM', 'THE', 'FACE', 'OF', 'HER', 'VISITOR'] +6829-68771-0024-1936: hyp=['FOR', 'THE', 'FIRST', 'TIME', 'THE', 'MAID', 'SEEMED', 'A', 'LITTLE', 'CONFUSED', 'AND', 'HER', 'GAZE', 'WANDERED', 'FROM', 'THE', 'FACE', 'OF', 'HER', 'VISITOR'] +6829-68771-0025-1937: ref=['SHE', 'SAT', 'DOWN', 'IN', 'A', 'ROCKING', 'CHAIR', 'AND', 'CLASPING', 'HER', 'HANDS', 'IN', 'HER', 'LAP', 'ROCKED', 'SLOWLY', 'BACK', 'AND', 'FORTH', "I'M", 'SORRY', 'SAID', 'BETH'] +6829-68771-0025-1937: hyp=['SHE', 'SAT', 'DOWN', 'IN', 'A', 'ROCKING', 'CHAIR', 'AND', 'CLASPING', 'HER', 'HANDS', 'IN', 'HER', 'LAP', 'ROCK', 'SLOWLY', 'BACK', 'AND', 'FORTH', "I'M", 'SORRY', 'SAID', 'BETH'] +6829-68771-0026-1938: ref=['ELIZA', 'PARSONS', 'SHOOK', 'HER', 'HEAD'] +6829-68771-0026-1938: hyp=['ELIZA', 'PARSON', 'SHOOK', 'HER', 'HEAD'] +6829-68771-0027-1939: ref=['THEY', 'THEY', 'EXCITE', 'ME', 'IN', 'SOME', 'WAY', 'AND', 'I', 'I', "CAN'T", 'BEAR', 'THEM', 'YOU', 'MUST', 'EXCUSE', 'ME'] +6829-68771-0027-1939: hyp=['THEY', 'THEY', 'EXCITE', 'ME', 'IN', 'SOME', 'WAY', 'AND', 'I', 'I', "CAN'T", 'BEAR', 'THEM', 'YOU', 'MUST', 'EXCUSE', 'ME'] +6829-68771-0028-1940: ref=['SHE', 'EVEN', 'SEEMED', 'MILDLY', 'AMUSED', 'AT', 'THE', 'ATTENTION', 'SHE', 'ATTRACTED'] +6829-68771-0028-1940: hyp=['SHE', 'EVEN', 'SEEMED', 'MILDLY', 'AMUSED', 'AT', 'THE', 'ATTENTION', 'SHE', 'ATTRACTED'] +6829-68771-0029-1941: ref=['BETH', 'WAS', 'A', 'BEAUTIFUL', 'GIRL', 'THE', 'HANDSOMEST', 'OF', 'THE', 'THREE', 'COUSINS', 'BY', 'FAR', 'YET', 'ELIZA', 'SURPASSED', 'HER', 'IN', 'NATURAL', 'CHARM', 'AND', 'SEEMED', 'WELL', 'AWARE', 'OF', 'THE', 'FACT'] +6829-68771-0029-1941: hyp=['BETH', 'WAS', 'A', 'BEAUTIFUL', 'GIRL', 'THE', 'HANDSOMEST', 'OF', 'THE', 'THREE', 'COUSINS', 'BY', 'FAR', 'YET', 'ELIZA', 'SURPASSED', 'HER', 'A', 'NATURAL', 'CHARM', 'AND', 'SEEMED', 'WELL', 'AWARE', 'OF', 'THE', 'FACT'] +6829-68771-0030-1942: ref=['HER', 'MANNER', 'WAS', 'NEITHER', 'INDEPENDENT', 'NOR', 'ASSERTIVE', 'BUT', 'RATHER', 'ONE', 'OF', 'WELL', 'BRED', 'COMPOSURE', 'AND', 'CALM', 'RELIANCE'] +6829-68771-0030-1942: hyp=['HER', 'MANNER', 'WAS', 'NEITHER', 'INDEPENDENT', 'NOR', 'ASSERTIVE', 'BUT', 'RATHER', 'ONE', 'OF', 'WELL', 'BRED', 'COMPOSURE', 'AND', 'CALM', 'RELIANCE'] +6829-68771-0031-1943: ref=['HER', 'EYES', 'WANDERED', 'TO', 'THE', "MAID'S", 'HANDS'] +6829-68771-0031-1943: hyp=['HER', 'EYES', 'WANDERED', 'TO', 'THE', "MAID'S", 'HANDS'] +6829-68771-0032-1944: ref=['HOWEVER', 'HER', 'FEATURES', 'AND', 'FORM', 'MIGHT', 'REPRESS', 'ANY', 'EVIDENCE', 'OF', 'NERVOUSNESS', 'THESE', 'HANDS', 'TOLD', 'A', 'DIFFERENT', 'STORY'] +6829-68771-0032-1944: hyp=['HOWEVER', 'HER', 'FEATURES', 'AND', 'FORM', 'MIGHT', 'REPRESS', 'ANY', 'EVIDENCE', 'OF', 'NERVOUSNESS', 'THESE', 'HANDS', 'TOLD', 'A', 'DIFFERENT', 'STORY'] +6829-68771-0033-1945: ref=['SHE', 'ROSE', 'QUICKLY', 'TO', 'HER', 'FEET', 'WITH', 'AN', 'IMPETUOUS', 'GESTURE', 'THAT', 'MADE', 'HER', 'VISITOR', 'CATCH', 'HER', 'BREATH'] +6829-68771-0033-1945: hyp=['SHE', 'ROSE', 'QUICKLY', 'TO', 'HER', 'FEET', 'WITH', 'AN', 'IMPETUOUS', 'GESTURE', 'THAT', 'MADE', 'HER', 'VISITOR', 'CATCH', 'HER', 'BREATH'] +6829-68771-0034-1946: ref=['I', 'WISH', 'I', 'KNEW', 'MYSELF', 'SHE', 'CRIED', 'FIERCELY'] +6829-68771-0034-1946: hyp=['I', 'WISH', 'I', 'KNEW', 'MYSELF', 'SHE', 'CRIED', 'FIERCELY'] +6829-68771-0035-1947: ref=['WILL', 'YOU', 'LEAVE', 'ME', 'ALONE', 'IN', 'MY', 'OWN', 'ROOM', 'OR', 'MUST', 'I', 'GO', 'AWAY', 'TO', 'ESCAPE', 'YOU'] +6829-68771-0035-1947: hyp=['WILL', 'YOU', 'LEAVE', 'ME', 'ALONE', 'IN', 'MY', 'OWN', 'ROOM', 'OR', 'MUST', 'I', 'GO', 'AWAY', 'TO', 'ESCAPE', 'YOU'] +6829-68771-0036-1948: ref=['ELIZA', 'CLOSED', 'THE', 'DOOR', 'BEHIND', 'HER', 'WITH', 'A', 'DECIDED', 'SLAM', 'AND', 'A', 'KEY', 'CLICKED', 'IN', 'THE', 'LOCK'] +6829-68771-0036-1948: hyp=['ELIZA', 'CLOSED', 'THE', 'DOOR', 'BEHIND', 'HER', 'WITH', 'A', 'DECIDED', 'SLAM', 'AND', 'A', 'KEY', 'CLICKED', 'IN', 'THE', 'LOCK'] +6930-75918-0000-0: ref=['CONCORD', 'RETURNED', 'TO', 'ITS', 'PLACE', 'AMIDST', 'THE', 'TENTS'] +6930-75918-0000-0: hyp=['CONCORD', 'RETURNED', 'TO', 'ITS', 'PLACE', 'AMIDST', 'THE', 'TENTS'] +6930-75918-0001-1: ref=['THE', 'ENGLISH', 'FORWARDED', 'TO', 'THE', 'FRENCH', 'BASKETS', 'OF', 'FLOWERS', 'OF', 'WHICH', 'THEY', 'HAD', 'MADE', 'A', 'PLENTIFUL', 'PROVISION', 'TO', 'GREET', 'THE', 'ARRIVAL', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'THE', 'FRENCH', 'IN', 'RETURN', 'INVITED', 'THE', 'ENGLISH', 'TO', 'A', 'SUPPER', 'WHICH', 'WAS', 'TO', 'BE', 'GIVEN', 'THE', 'NEXT', 'DAY'] +6930-75918-0001-1: hyp=['THE', 'ENGLISH', 'FOOTED', 'TO', 'THE', 'FRENCH', 'BASKETS', 'OF', 'FLOWERS', 'OF', 'WHICH', 'THEY', 'HAD', 'MADE', 'A', 'PLENTIFUL', 'PROVISION', 'TO', 'GREET', 'THE', 'ARRIVAL', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'THE', 'FRENCH', 'IN', 'RETURN', 'INVITED', 'THE', 'ENGLISH', 'TO', 'A', 'SUPPER', 'WHICH', 'WAS', 'TO', 'BE', 'GIVEN', 'THE', 'NEXT', 'DAY'] +6930-75918-0002-2: ref=['CONGRATULATIONS', 'WERE', 'POURED', 'IN', 'UPON', 'THE', 'PRINCESS', 'EVERYWHERE', 'DURING', 'HER', 'JOURNEY'] +6930-75918-0002-2: hyp=['CONGRATULATIONS', 'WERE', 'POURED', 'IN', 'UPON', 'THE', 'PRINCESS', 'EVERYWHERE', 'DURING', 'HER', 'JOURNEY'] +6930-75918-0003-3: ref=['FROM', 'THE', 'RESPECT', 'PAID', 'HER', 'ON', 'ALL', 'SIDES', 'SHE', 'SEEMED', 'LIKE', 'A', 'QUEEN', 'AND', 'FROM', 'THE', 'ADORATION', 'WITH', 'WHICH', 'SHE', 'WAS', 'TREATED', 'BY', 'TWO', 'OR', 'THREE', 'SHE', 'APPEARED', 'AN', 'OBJECT', 'OF', 'WORSHIP', 'THE', 'QUEEN', 'MOTHER', 'GAVE', 'THE', 'FRENCH', 'THE', 'MOST', 'AFFECTIONATE', 'RECEPTION', 'FRANCE', 'WAS', 'HER', 'NATIVE', 'COUNTRY', 'AND', 'SHE', 'HAD', 'SUFFERED', 'TOO', 'MUCH', 'UNHAPPINESS', 'IN', 'ENGLAND', 'FOR', 'ENGLAND', 'TO', 'HAVE', 'MADE', 'HER', 'FORGET', 'FRANCE'] +6930-75918-0003-3: hyp=['FROM', 'THE', 'RESPECT', 'PAID', 'HER', 'ON', 'ALL', 'SIDES', 'SHE', 'SEEMED', 'LIKE', 'A', 'QUEEN', 'AND', 'FROM', 'THE', 'ADORATION', 'WITH', 'WHICH', 'SHE', 'WAS', 'TREATED', 'BY', 'TWO', 'OR', 'THREE', 'SHE', 'APPEARED', 'AN', 'OBJECT', 'OF', 'WORSHIP', 'THE', 'QUEEN', 'MOTHER', 'GAVE', 'THE', 'FRENCH', 'THE', 'MOST', 'AFFECTIONATE', 'RECEPTION', 'FRANCE', 'WAS', 'HER', 'NATIVE', 'COUNTRY', 'AND', 'SHE', 'HAD', 'SUFFERED', 'TOO', 'MUCH', 'UNHAPPINESS', 'IN', 'ENGLAND', 'FOR', 'ENGLAND', 'TO', 'HAVE', 'MADE', 'HER', 'FORGET', 'FRANCE'] +6930-75918-0004-4: ref=['SHE', 'TAUGHT', 'HER', 'DAUGHTER', 'THEN', 'BY', 'HER', 'OWN', 'AFFECTION', 'FOR', 'IT', 'THAT', 'LOVE', 'FOR', 'A', 'COUNTRY', 'WHERE', 'THEY', 'HAD', 'BOTH', 'BEEN', 'HOSPITABLY', 'RECEIVED', 'AND', 'WHERE', 'A', 'BRILLIANT', 'FUTURE', 'OPENED', 'BEFORE', 'THEM'] +6930-75918-0004-4: hyp=['SHE', 'TAUGHT', 'HER', 'DAUGHTER', 'THEN', 'BY', 'HER', 'OWN', 'AFFECTION', 'FOR', 'IT', 'THAT', 'LOVE', 'FOR', 'A', 'COUNTRY', 'WHERE', 'THEY', 'HAD', 'BOTH', 'BEEN', 'HOSPITABLY', 'RECEIVED', 'AND', 'WERE', 'A', 'BRILLIANT', 'FUTURE', 'OPENED', 'FOR', 'THEM'] +6930-75918-0005-5: ref=['THE', 'COUNT', 'HAD', 'THROWN', 'HIMSELF', 'BACK', 'ON', 'HIS', 'SEAT', 'LEANING', 'HIS', 'SHOULDERS', 'AGAINST', 'THE', 'PARTITION', 'OF', 'THE', 'TENT', 'AND', 'REMAINED', 'THUS', 'HIS', 'FACE', 'BURIED', 'IN', 'HIS', 'HANDS', 'WITH', 'HEAVING', 'CHEST', 'AND', 'RESTLESS', 'LIMBS'] +6930-75918-0005-5: hyp=['THE', 'COUNT', 'HAD', 'THROWN', 'HIMSELF', 'BACK', 'ON', 'HIS', 'SEAT', 'LEANING', 'HIS', 'SHOULDERS', 'AGAINST', 'THE', 'PARTITION', 'OF', 'THE', 'TENT', 'AND', 'REMAINED', 'THUS', 'HIS', 'FACE', 'BURIED', 'IN', 'HIS', 'HANDS', 'WITH', 'HEAVING', 'CHEST', 'AND', 'RESTLESS', 'LIMBS'] +6930-75918-0006-6: ref=['THIS', 'HAS', 'INDEED', 'BEEN', 'A', 'HARASSING', 'DAY', 'CONTINUED', 'THE', 'YOUNG', 'MAN', 'HIS', 'EYES', 'FIXED', 'UPON', 'HIS', 'FRIEND'] +6930-75918-0006-6: hyp=['THIS', 'HAS', 'INDEED', 'BEEN', 'AN', 'HARASSING', 'DAY', 'CONTINUED', 'THE', 'YOUNG', 'MAN', 'HIS', 'EYES', 'FIXED', 'UPON', 'HIS', 'FRIEND'] +6930-75918-0007-7: ref=['YOU', 'WILL', 'BE', 'FRANK', 'WITH', 'ME', 'I', 'ALWAYS', 'AM'] +6930-75918-0007-7: hyp=['YOU', 'WILL', 'BE', 'FRANK', 'WITH', 'ME', 'I', 'ALWAYS', 'AM'] +6930-75918-0008-8: ref=['CAN', 'YOU', 'IMAGINE', 'WHY', 'BUCKINGHAM', 'HAS', 'BEEN', 'SO', 'VIOLENT', 'I', 'SUSPECT'] +6930-75918-0008-8: hyp=['CAN', 'YOU', 'IMAGINE', 'WHY', 'BUCKINGHAM', 'HAS', 'BEEN', 'SO', 'VIOLENT', 'I', 'SUSPECT'] +6930-75918-0009-9: ref=['IT', 'IS', 'YOU', 'WHO', 'ARE', 'MISTAKEN', 'RAOUL', 'I', 'HAVE', 'READ', 'HIS', 'DISTRESS', 'IN', 'HIS', 'EYES', 'IN', 'HIS', 'EVERY', 'GESTURE', 'AND', 'ACTION', 'THE', 'WHOLE', 'DAY'] +6930-75918-0009-9: hyp=['IT', 'IS', 'YOU', 'WHO', 'ARE', 'MISTAKEN', 'RAOUL', 'I', 'HAVE', 'READ', 'HIS', 'DISTRESS', 'IN', 'HIS', 'EYES', 'IN', 'HIS', 'EVERY', 'GESTURE', 'AND', 'ACTION', 'THE', 'WHOLE', 'DAY'] +6930-75918-0010-10: ref=['I', 'CAN', 'PERCEIVE', 'LOVE', 'CLEARLY', 'ENOUGH'] +6930-75918-0010-10: hyp=['I', 'CAN', 'PERCEIVE', 'LOVE', 'CLEARLY', 'ENOUGH'] +6930-75918-0011-11: ref=['I', 'AM', 'CONVINCED', 'OF', 'WHAT', 'I', 'SAY', 'SAID', 'THE', 'COUNT'] +6930-75918-0011-11: hyp=['I', 'AM', 'CONVINCED', 'OF', 'WHAT', 'I', 'SAY', 'SAID', 'THE', 'COUNT'] +6930-75918-0012-12: ref=['IT', 'IS', 'ANNOYANCE', 'THEN'] +6930-75918-0012-12: hyp=['IT', 'IS', 'ANNOYANCE', 'THEN'] +6930-75918-0013-13: ref=['IN', 'THOSE', 'VERY', 'TERMS', 'I', 'EVEN', 'ADDED', 'MORE'] +6930-75918-0013-13: hyp=['IN', 'THOSE', 'VERY', 'TERMS', 'I', 'EVEN', 'ADDED', 'MORE'] +6930-75918-0014-14: ref=['BUT', 'CONTINUED', 'RAOUL', 'NOT', 'INTERRUPTED', 'BY', 'THIS', 'MOVEMENT', 'OF', 'HIS', 'FRIEND', 'HEAVEN', 'BE', 'PRAISED', 'THE', 'FRENCH', 'WHO', 'ARE', 'PRONOUNCED', 'TO', 'BE', 'THOUGHTLESS', 'AND', 'INDISCREET', 'RECKLESS', 'EVEN', 'ARE', 'CAPABLE', 'OF', 'BRINGING', 'A', 'CALM', 'AND', 'SOUND', 'JUDGMENT', 'TO', 'BEAR', 'ON', 'MATTERS', 'OF', 'SUCH', 'HIGH', 'IMPORTANCE'] +6930-75918-0014-14: hyp=['BUT', 'CONTINUED', 'RAOUL', 'NOT', 'INTERRUPTED', 'BY', 'THIS', 'MOVEMENT', 'OF', 'HIS', 'FRIEND', 'HEAVEN', 'BE', 'PRAISED', 'THE', 'FRENCH', 'WHO', 'ARE', 'PRONOUNCED', 'TO', 'BE', 'THOUGHTLESS', 'AND', 'INDISCREET', 'RECKLESS', 'EVEN', 'ARE', 'CAPABLE', 'OF', 'BRINGING', 'A', 'CALM', 'AND', 'SOUND', 'JUDGMENT', 'TO', 'BEAR', 'ON', 'MATTERS', 'OF', 'SUCH', 'HIGH', 'IMPORTANCE'] +6930-75918-0015-15: ref=['THUS', 'IT', 'IS', 'THAT', 'THE', 'HONOR', 'OF', 'THREE', 'IS', 'SAVED', 'OUR', "COUNTRY'S", 'OUR', "MASTER'S", 'AND', 'OUR', 'OWN'] +6930-75918-0015-15: hyp=['THUS', 'IT', 'IS', 'THAT', 'THE', 'HONOR', 'OF', 'THREE', 'IS', 'SAVED', 'OUR', 'COUNTRY', 'OUR', 'MASTERS', 'AND', 'OUR', 'OWN'] +6930-75918-0016-16: ref=['YES', 'I', 'NEED', 'REPOSE', 'MANY', 'THINGS', 'HAVE', 'AGITATED', 'ME', 'TO', 'DAY', 'BOTH', 'IN', 'MIND', 'AND', 'BODY', 'WHEN', 'YOU', 'RETURN', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'THE', 'SAME', 'MAN'] +6930-75918-0016-16: hyp=['YES', 'I', 'NEED', 'REPOSE', 'MANY', 'THINGS', 'HAVE', 'AGITATED', 'ME', 'TO', 'DAY', 'BOTH', 'IN', 'MIND', 'AND', 'BODY', 'WHEN', 'YOU', 'RETURN', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'THE', 'SAME', 'MAN'] +6930-75918-0017-17: ref=['BUT', 'IN', 'THIS', 'FRIENDLY', 'PRESSURE', 'RAOUL', 'COULD', 'DETECT', 'THE', 'NERVOUS', 'AGITATION', 'OF', 'A', 'GREAT', 'INTERNAL', 'CONFLICT'] +6930-75918-0017-17: hyp=['BY', 'IN', 'THIS', 'FRIENDLY', 'PRESSURE', 'RAOUL', 'COULD', 'DETECT', 'THE', 'NERVOUS', 'AGITATION', 'OF', 'A', 'GREAT', 'INTERNAL', 'CONFLICT'] +6930-75918-0018-18: ref=['THE', 'NIGHT', 'WAS', 'CLEAR', 'STARLIT', 'AND', 'SPLENDID', 'THE', 'TEMPEST', 'HAD', 'PASSED', 'AWAY', 'AND', 'THE', 'SWEET', 'INFLUENCES', 'OF', 'THE', 'EVENING', 'HAD', 'RESTORED', 'LIFE', 'PEACE', 'AND', 'SECURITY', 'EVERYWHERE'] +6930-75918-0018-18: hyp=['THE', 'NIGHT', 'WAS', 'CLEAR', 'STARLIT', 'AND', 'SPLENDID', 'THE', 'TEMPEST', 'HAD', 'PASSED', 'AWAY', 'AND', 'THE', 'SWEET', 'INFLUENCES', 'OF', 'THE', 'EVENING', 'HAD', 'RESTORED', 'LIFE', 'PEACE', 'AND', 'SECURITY', 'EVERYWHERE'] +6930-75918-0019-19: ref=['UPON', 'THE', 'LARGE', 'SQUARE', 'IN', 'FRONT', 'OF', 'THE', 'HOTEL', 'THE', 'SHADOWS', 'OF', 'THE', 'TENTS', 'INTERSECTED', 'BY', 'THE', 'GOLDEN', 'MOONBEAMS', 'FORMED', 'AS', 'IT', 'WERE', 'A', 'HUGE', 'MOSAIC', 'OF', 'JET', 'AND', 'YELLOW', 'FLAGSTONES'] +6930-75918-0019-19: hyp=['UPON', 'THE', 'LARGE', 'SQUARE', 'IN', 'FRONT', 'OF', 'THE', 'HOTEL', 'THE', 'SHADOWS', 'OF', 'THE', 'TENTS', 'INTERSECTED', 'BY', 'THE', 'GOLDEN', 'MOONBEAMS', 'FORMED', 'AS', 'IT', 'WERE', 'A', 'HUGE', 'MOSAIC', 'OF', 'JET', 'AND', 'YELLOW', 'FLAGSTONES'] +6930-75918-0020-20: ref=['BRAGELONNE', 'WATCHED', 'FOR', 'SOME', 'TIME', 'THE', 'CONDUCT', 'OF', 'THE', 'TWO', 'LOVERS', 'LISTENED', 'TO', 'THE', 'LOUD', 'AND', 'UNCIVIL', 'SLUMBERS', 'OF', 'MANICAMP', 'WHO', 'SNORED', 'AS', 'IMPERIOUSLY', 'AS', 'THOUGH', 'HE', 'WAS', 'WEARING', 'HIS', 'BLUE', 'AND', 'GOLD', 'INSTEAD', 'OF', 'HIS', 'VIOLET', 'SUIT'] +6930-75918-0020-20: hyp=['BRIGALON', 'WATCHED', 'FOR', 'SOME', 'TIME', 'THE', 'CONDUCT', 'OF', 'THE', 'TWO', 'LOVERS', 'LISTENED', 'TO', 'THE', 'LOUD', 'AND', 'UNCIVIL', 'SLUMBERS', 'OF', 'MANICAMP', 'WHO', 'SNORED', 'AS', 'IMPERIOUSLY', 'AS', 'THOUGH', 'HE', 'WAS', 'WEARING', 'HIS', 'BLUE', 'AND', 'GOLD', 'INSTEAD', 'OF', 'HIS', 'VIOLET', 'SUIT'] +6930-76324-0000-21: ref=['GOLIATH', 'MAKES', 'ANOTHER', 'DISCOVERY'] +6930-76324-0000-21: hyp=['GOLIATH', 'MAKES', 'ANOTHER', 'DISCOVERY'] +6930-76324-0001-22: ref=['THEY', 'WERE', 'CERTAINLY', 'NO', 'NEARER', 'THE', 'SOLUTION', 'OF', 'THEIR', 'PROBLEM'] +6930-76324-0001-22: hyp=['THERE', 'WERE', 'CERTAINLY', 'NO', 'NEAR', 'THE', 'SOLUTION', 'OF', 'THEIR', 'PROBLEM'] +6930-76324-0002-23: ref=['THE', 'POOR', 'LITTLE', 'THINGS', 'CRIED', 'CYNTHIA', 'THINK', 'OF', 'THEM', 'HAVING', 'BEEN', 'TURNED', 'TO', 'THE', 'WALL', 'ALL', 'THESE', 'YEARS'] +6930-76324-0002-23: hyp=['THE', 'POOR', 'LITTLE', 'THINGS', 'CRIED', 'CYNTHIA', 'THINK', 'OF', 'THEM', 'HAVING', 'BEEN', 'TURNED', 'TO', 'THE', 'WALL', 'ALL', 'THESE', 'YEARS'] +6930-76324-0003-24: ref=['NOW', 'WHAT', 'WAS', 'THE', 'SENSE', 'OF', 'IT', 'TWO', 'INNOCENT', 'BABIES', 'LIKE', 'THAT'] +6930-76324-0003-24: hyp=['NOW', 'WHAT', 'IS', 'THE', 'SENSE', 'OF', 'IT', 'TOO', 'INNOCENT', 'BABIES', 'LIKE', 'THAT'] +6930-76324-0004-25: ref=['BUT', 'JOYCE', 'HAD', 'NOT', 'BEEN', 'LISTENING', 'ALL', 'AT', 'ONCE', 'SHE', 'PUT', 'DOWN', 'HER', 'CANDLE', 'ON', 'THE', 'TABLE', 'AND', 'FACED', 'HER', 'COMPANION'] +6930-76324-0004-25: hyp=['BUT', 'JOYCE', 'HAD', 'NOT', 'BEEN', 'LISTENING', 'ALL', 'AT', 'ONCE', 'SHE', 'PUT', 'DOWN', 'HER', 'CANDLE', 'ON', 'THE', 'TABLE', 'AND', 'FACED', 'HER', 'COMPANION'] +6930-76324-0005-26: ref=['THE', 'TWIN', 'BROTHER', 'DID', 'SOMETHING', 'SHE', "DIDN'T", 'LIKE', 'AND', 'SHE', 'TURNED', 'HIS', 'PICTURE', 'TO', 'THE', 'WALL'] +6930-76324-0005-26: hyp=['THE', 'TWIN', 'BROTHER', 'DID', 'SOMETHING', 'SHE', "DIDN'T", 'LIKE', 'AND', 'SHE', 'TURNED', 'HIS', 'PICTURE', 'TO', 'THE', 'WALL'] +6930-76324-0006-27: ref=['HERS', 'HAPPENED', 'TO', 'BE', 'IN', 'THE', 'SAME', 'FRAME', 'TOO', 'BUT', 'SHE', 'EVIDENTLY', "DIDN'T", 'CARE', 'ABOUT', 'THAT'] +6930-76324-0006-27: hyp=['HERS', 'HAPPENED', 'TO', 'BE', 'ON', 'THE', 'SAME', 'FRAME', 'TOO', 'BUT', 'SHE', 'EVIDENTLY', "DIDN'T", 'CARE', 'ABOUT', 'IT'] +6930-76324-0007-28: ref=['NOW', 'WHAT', 'HAVE', 'YOU', 'TO', 'SAY', 'CYNTHIA', 'SPRAGUE'] +6930-76324-0007-28: hyp=['NOW', 'WHAT', 'HAVE', 'YOU', 'TO', 'SAY', 'CYNTHIA', 'SP'] +6930-76324-0008-29: ref=['I', 'THOUGHT', 'WE', 'WERE', 'STUMPED', 'AGAIN', 'WHEN', 'I', 'FIRST', 'SAW', 'THAT', 'PICTURE', 'BUT', "IT'S", 'BEEN', 'OF', 'SOME', 'USE', 'AFTER', 'ALL'] +6930-76324-0008-29: hyp=['I', 'THOUGHT', 'WE', 'WERE', 'STUMPED', 'AGAIN', 'WHEN', 'I', 'FIRST', 'SAW', 'THAT', 'PICTURE', 'BUT', "IT'S", 'BEEN', 'OF', 'SOME', 'USE', 'AFTER', 'ALL'] +6930-76324-0009-30: ref=['DO', 'YOU', 'SUPPOSE', 'THE', 'MINIATURE', 'WAS', 'A', 'COPY', 'OF', 'THE', 'SAME', 'THING'] +6930-76324-0009-30: hyp=['DO', 'YOU', 'SUPPOSE', 'THE', 'MINIATURE', 'WAS', 'A', 'COPY', 'OF', 'THE', 'SAME', 'THING'] +6930-76324-0010-31: ref=['WHAT', 'IN', 'THE', 'WORLD', 'IS', 'THAT', 'QUERIED', 'JOYCE'] +6930-76324-0010-31: hyp=['WHEN', 'IN', 'THE', 'WORLD', 'IS', 'IT', 'QUERIED', 'JOYCE'] +6930-76324-0011-32: ref=['THEY', 'WORRY', 'ME', 'TERRIBLY', 'AND', 'BESIDES', "I'D", 'LIKE', 'TO', 'SEE', 'WHAT', 'THIS', 'LOVELY', 'FURNITURE', 'LOOKS', 'LIKE', 'WITHOUT', 'SUCH', 'QUANTITIES', 'OF', 'DUST', 'ALL', 'OVER', 'IT', 'GOOD', 'SCHEME', 'CYN'] +6930-76324-0011-32: hyp=['MAY', 'WORRY', 'ME', 'TERRIBLY', 'AND', 'BESIDES', "I'D", 'LIKE', 'TO', 'SEE', 'WHAT', 'THIS', 'LOVELY', 'FURNITURE', 'LOOKS', 'LIKE', 'WITHOUT', 'SUCH', 'QUANTITIES', 'OF', 'DUST', 'ALL', 'OVER', 'IT', 'GOOD', 'SCHEME', 'SIN'] +6930-76324-0012-33: ref=["WE'LL", 'COME', 'IN', 'HERE', 'THIS', 'AFTERNOON', 'WITH', 'OLD', 'CLOTHES', 'ON', 'AND', 'HAVE', 'A', 'REGULAR', 'HOUSE', 'CLEANING'] +6930-76324-0012-33: hyp=['WILL', 'COME', 'IN', 'HERE', 'THIS', 'AFTERNOON', 'WITH', 'OLD', 'CLOTHES', 'ON', 'AND', 'HALF', 'A', 'REGULAR', 'HOUSE', 'CLEANING'] +6930-76324-0013-34: ref=['IT', "CAN'T", 'HURT', 'ANYTHING', "I'M", 'SURE', 'FOR', 'WE', "WON'T", 'DISTURB', 'THINGS', 'AT', 'ALL'] +6930-76324-0013-34: hyp=['IT', "CAN'T", 'HURT', 'ANYTHING', "I'M", 'SURE', 'FOR', 'WE', "WON'T", 'DISTURB', 'THINGS', 'AT', 'ALL'] +6930-76324-0014-35: ref=['THIS', 'THOUGHT', 'HOWEVER', 'DID', 'NOT', 'ENTER', 'THE', 'HEADS', 'OF', 'THE', 'ENTHUSIASTIC', 'PAIR'] +6930-76324-0014-35: hyp=['THIS', 'THOUGHT', 'HOWEVER', 'DID', 'NOT', 'ENTER', 'THE', 'HEADS', 'OF', 'THE', 'ENTHUSIASTIC', 'PAIR'] +6930-76324-0015-36: ref=['SMUGGLING', 'THE', 'HOUSE', 'CLEANING', 'PARAPHERNALIA', 'INTO', 'THE', 'CELLAR', 'WINDOW', 'UNOBSERVED', 'THAT', 'AFTERNOON', 'PROVED', 'NO', 'EASY', 'TASK', 'FOR', 'CYNTHIA', 'HAD', 'ADDED', 'A', 'WHISK', 'BROOM', 'AND', 'DUST', 'PAN', 'TO', 'THE', 'OUTFIT'] +6930-76324-0015-36: hyp=['SMUGGLING', 'THE', 'HOUSE', 'CLEANING', 'PARAPHERNALIA', 'INTO', 'THE', 'CELLAR', 'WINDOW', 'UNOBSERVED', 'THAT', 'AFTERNOON', 'PROVED', 'NO', 'EASY', 'TASK', 'FOR', 'CYNTHIA', 'HAD', 'ADDED', 'A', 'WHISKED', 'BROOM', 'AND', 'DUST', 'PAN', 'TO', 'THE', 'OUTFIT'] +6930-76324-0016-37: ref=['THE', 'LURE', 'PROVED', 'TOO', 'MUCH', 'FOR', 'HIM', 'AND', 'HE', 'CAME', 'SPORTING', 'AFTER', 'IT', 'AS', 'FRISKILY', 'AS', 'A', 'YOUNG', 'KITTEN', 'MUCH', 'TO', "CYNTHIA'S", 'DELIGHT', 'WHEN', 'SHE', 'CAUGHT', 'SIGHT', 'OF', 'HIM'] +6930-76324-0016-37: hyp=['THE', 'LURE', 'PROVED', 'TOO', 'MUCH', 'FOR', 'HIM', 'AND', 'HE', 'CAME', 'SPORTING', 'AFTER', 'IT', 'AS', 'FRISKLY', 'AS', 'A', 'YOUNG', 'KITTEN', 'MUCH', 'TO', "CYNTHIA'S", 'DELIGHT', 'WHEN', 'SHE', 'CAUGHT', 'SIGHT', 'OF', 'HIM'] +6930-76324-0017-38: ref=['OH', 'LET', 'HIM', 'COME', 'ALONG', 'SHE', 'URGED', 'I', 'DO', 'LOVE', 'TO', 'SEE', 'HIM', 'ABOUT', 'THAT', 'OLD', 'HOUSE'] +6930-76324-0017-38: hyp=['OH', 'LET', 'HIM', 'COME', 'ALONG', 'SHE', 'URGED', 'I', 'DO', 'LOVE', 'TO', 'SEE', 'HIM', 'ABOUT', 'THAT', 'OLD', 'HOUSE'] +6930-76324-0018-39: ref=['HE', 'MAKES', 'IT', 'SORT', 'OF', 'COZIER'] +6930-76324-0018-39: hyp=['HE', 'MAKES', 'IT', 'SORT', 'OF', 'COZIER'] +6930-76324-0019-40: ref=['NOW', "LET'S", 'DUST', 'THE', 'FURNITURE', 'AND', 'PICTURES'] +6930-76324-0019-40: hyp=['NOW', "LET'S", 'DUST', 'THE', 'FURNITURE', 'AND', 'PICTURES'] +6930-76324-0020-41: ref=['YET', 'LITTLE', 'AS', 'IT', 'WAS', 'IT', 'HAD', 'ALREADY', 'MADE', 'A', 'VAST', 'DIFFERENCE', 'IN', 'THE', 'ASPECT', 'OF', 'THE', 'ROOM'] +6930-76324-0020-41: hyp=['YET', 'LITTLE', 'AS', 'IT', 'WAS', 'IT', 'HAD', 'ALREADY', 'MADE', 'A', 'VAST', 'DIFFERENCE', 'IN', 'THE', 'ASPECT', 'OF', 'THE', 'ROOM'] +6930-76324-0021-42: ref=['SURFACE', 'DUST', 'AT', 'LEAST', 'HAD', 'BEEN', 'REMOVED', 'AND', 'THE', 'FINE', 'OLD', 'FURNITURE', 'GAVE', 'A', 'HINT', 'OF', 'ITS', 'REAL', 'ELEGANCE', 'AND', 'POLISH'] +6930-76324-0021-42: hyp=['SURFACE', 'DUST', 'AT', 'LEAST', 'HAD', 'BEEN', 'REMOVED', 'AND', 'THE', 'FINE', 'OLD', 'FURNITURE', 'GAVE', 'A', 'HINT', 'OF', 'ITS', 'REAL', 'ELEGANCE', 'AND', 'POLISH'] +6930-76324-0022-43: ref=['THEN', 'SHE', 'SUDDENLY', 'REMARKED'] +6930-76324-0022-43: hyp=['THEN', 'SHE', 'SUDDENLY', 'REMARKED'] +6930-76324-0023-44: ref=['AND', 'MY', 'POCKET', 'MONEY', 'IS', 'GETTING', 'LOW', 'AGAIN', 'AND', 'YOU', "HAVEN'T", 'ANY', 'LEFT', 'AS', 'USUAL'] +6930-76324-0023-44: hyp=['AND', 'MY', 'POCKET', 'MONEY', 'IS', 'GETTING', 'LOW', 'AGAIN', 'AND', 'YOU', "HAVEN'T", 'ANY', 'LEFT', 'AS', 'USUAL'] +6930-76324-0024-45: ref=['THEY', 'SAY', 'ILLUMINATION', 'BY', 'CANDLE', 'LIGHT', 'IS', 'THE', 'PRETTIEST', 'IN', 'THE', 'WORLD'] +6930-76324-0024-45: hyp=['THEY', 'SAY', 'ILLUMINATION', 'BY', 'CANDLELIGHT', 'IS', 'THE', 'PRETTIEST', 'IN', 'THE', 'WORLD'] +6930-76324-0025-46: ref=['WHY', "IT'S", 'GOLIATH', 'AS', 'USUAL', 'THEY', 'BOTH', 'CRIED', 'PEERING', 'IN'] +6930-76324-0025-46: hyp=['WHY', "IT'S", 'GOLIATH', 'AS', 'USUAL', 'THEY', 'BOTH', 'CRIED', 'PEERING', 'IN'] +6930-76324-0026-47: ref=["ISN'T", 'HE', 'THE', 'GREATEST', 'FOR', 'GETTING', 'INTO', 'ODD', 'CORNERS'] +6930-76324-0026-47: hyp=["ISN'T", 'HE', 'THE', 'GREATEST', 'FOR', 'GETTING', 'INTO', 'ODD', 'CORNERS'] +6930-76324-0027-48: ref=['FORGETTING', 'ALL', 'THEIR', 'WEARINESS', 'THEY', 'SEIZED', 'THEIR', 'CANDLES', 'AND', 'SCURRIED', 'THROUGH', 'THE', 'HOUSE', 'FINDING', 'AN', 'OCCASIONAL', 'PAPER', 'TUCKED', 'AWAY', 'IN', 'SOME', 'ODD', 'CORNER'] +6930-76324-0027-48: hyp=['FORGETTING', 'ALL', 'THEIR', 'WEARINESS', 'THEY', 'SEIZED', 'THEIR', 'CANDLES', 'AND', 'SCURRIED', 'THROUGH', 'THE', 'HOUSE', 'FINDING', 'ON', 'OCCASIONAL', 'PAPER', 'TUCKED', 'AWAY', 'IN', 'SOME', 'ODD', 'CORNER'] +6930-76324-0028-49: ref=['WELL', "I'M", 'CONVINCED', 'THAT', 'THE', 'BOARDED', 'UP', 'HOUSE', 'MYSTERY', 'HAPPENED', 'NOT', 'EARLIER', 'THAN', 'APRIL', 'SIXTEENTH', 'EIGHTEEN', 'SIXTY', 'ONE', 'AND', 'PROBABLY', 'NOT', 'MUCH', 'LATER'] +6930-76324-0028-49: hyp=['WELL', "I'M", 'CONVINCED', 'THAT', 'THE', 'BOARDED', 'UP', 'HOUSE', 'MYSTERY', 'HAPPENED', 'NOT', 'EARLIER', 'THAN', 'APRIL', 'SIXTEENTH', 'EIGHTEEN', 'SIXTY', 'ONE', 'AND', 'PROBABLY', 'NOT', 'MUCH', 'LATER'] +6930-81414-0000-50: ref=['NO', 'WORDS', 'WERE', 'SPOKEN', 'NO', 'LANGUAGE', 'WAS', 'UTTERED', 'SAVE', 'THAT', 'OF', 'WAILING', 'AND', 'HISSING', 'AND', 'THAT', 'SOMEHOW', 'WAS', 'INDISTINCT', 'AS', 'IF', 'IT', 'EXISTED', 'IN', 'FANCY', 'AND', 'NOT', 'IN', 'REALITY'] +6930-81414-0000-50: hyp=['NO', 'WORDS', 'WERE', 'SPOKEN', 'NO', 'LANGUAGE', 'WAS', 'UTTERED', 'SAVE', 'THAT', 'OF', 'WAILING', 'AND', 'HISSING', 'AND', 'THAT', 'SOMEHOW', 'WAS', 'INDISTINCT', 'AS', 'IF', 'IT', 'EXISTED', 'IN', 'FANCY', 'AND', 'NOT', 'IN', 'REALITY'] +6930-81414-0001-51: ref=['I', 'HEARD', 'A', 'NOISE', 'BEHIND', 'I', 'TURNED', 'AND', 'SAW', 'KAFFAR', 'HIS', 'BLACK', 'EYES', 'SHINING', 'WHILE', 'IN', 'HIS', 'HAND', 'HE', 'HELD', 'A', 'GLEAMING', 'KNIFE', 'HE', 'LIFTED', 'IT', 'ABOVE', 'HIS', 'HEAD', 'AS', 'IF', 'TO', 'STRIKE', 'BUT', 'I', 'HAD', 'THE', 'STRENGTH', 'OF', 'TEN', 'MEN', 'AND', 'I', 'HURLED', 'HIM', 'FROM', 'ME'] +6930-81414-0001-51: hyp=['I', 'HEARD', 'A', 'NOISE', 'BEHIND', 'I', 'TURNED', 'AND', 'SAW', 'KAFFIR', 'HIS', 'BLACK', 'EYES', 'SHINING', 'WHILE', 'IN', 'HIS', 'HAND', 'HE', 'HELD', 'A', 'GLEAMING', 'KNIFE', 'HE', 'LIFTED', 'IT', 'ABOVE', 'HIS', 'HEAD', 'AS', 'IF', 'TO', 'STRIKE', 'BUT', 'I', 'HAD', 'THE', 'STRENGTH', 'OF', 'TEN', 'MEN', 'AND', 'I', 'HURLED', 'HIM', 'FROM', 'ME'] +6930-81414-0002-52: ref=['ONWARD', 'SAID', 'A', 'DISTANT', 'VOICE'] +6930-81414-0002-52: hyp=['ONWARD', 'SAID', 'A', 'DISTANT', 'VOICE'] +6930-81414-0003-53: ref=['NO', 'SOUND', 'BROKE', 'THE', 'STILLNESS', 'OF', 'THE', 'NIGHT'] +6930-81414-0003-53: hyp=['NO', 'SOUND', 'BROKE', 'THE', 'STILLNESS', 'OF', 'THE', 'NIGHT'] +6930-81414-0004-54: ref=['THE', 'STORY', 'OF', 'ITS', 'EVIL', 'INFLUENCE', 'CAME', 'BACK', 'TO', 'ME', 'AND', 'IN', 'MY', 'BEWILDERED', 'CONDITION', 'I', 'WONDERED', 'WHETHER', 'THERE', 'WAS', 'NOT', 'SOME', 'TRUTH', 'IN', 'WHAT', 'HAD', 'BEEN', 'SAID'] +6930-81414-0004-54: hyp=['THE', 'STORY', 'OF', 'ITS', 'EVIL', 'INFLUENCE', 'CAME', 'BACK', 'TO', 'ME', 'AND', 'IN', 'MY', 'BEWILDERED', 'CONDITION', 'I', 'WONDERED', 'WHETHER', 'THERE', 'WAS', 'NOT', 'SOME', 'TRUTH', 'IN', 'WHAT', 'HAD', 'BEEN', 'SAID'] +6930-81414-0005-55: ref=['WHAT', 'WAS', 'THAT'] +6930-81414-0005-55: hyp=['WHAT', 'WAS', 'THAT'] +6930-81414-0006-56: ref=['WHAT', 'THEN', 'A', 'HUMAN', 'HAND', 'LARGE', 'AND', 'SHAPELY', 'APPEARED', 'DISTINCTLY', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'POND'] +6930-81414-0006-56: hyp=['WHAT', 'THEN', 'A', 'HUMAN', 'HAND', 'LARGE', 'AND', 'SHAPELY', 'APPEARED', 'DISTINCTLY', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'POND'] +6930-81414-0007-57: ref=['NOTHING', 'MORE', 'NOT', 'EVEN', 'THE', 'WRIST', 'TO', 'WHICH', 'IT', 'MIGHT', 'BE', 'ATTACHED'] +6930-81414-0007-57: hyp=['NOTHING', 'MORE', 'NOT', 'EVEN', 'THE', 'WRIST', 'TO', 'WHICH', 'IT', 'MIGHT', 'BE', 'ATTACHED'] +6930-81414-0008-58: ref=['IT', 'DID', 'NOT', 'BECKON', 'OR', 'INDEED', 'MOVE', 'AT', 'ALL', 'IT', 'WAS', 'AS', 'STILL', 'AS', 'THE', 'HAND', 'OF', 'DEATH'] +6930-81414-0008-58: hyp=['IT', 'DID', 'NOT', 'BECKON', 'OR', 'INDEED', 'MOVE', 'AT', 'ALL', 'IT', 'WAS', 'AS', 'STILL', 'AS', 'THE', 'HAND', 'OF', 'DEATH'] +6930-81414-0009-59: ref=['I', 'AWOKE', 'TO', 'CONSCIOUSNESS', 'FIGHTING', 'AT', 'FIRST', 'IT', 'SEEMED', 'AS', 'IF', 'I', 'WAS', 'FIGHTING', 'WITH', 'A', 'PHANTOM', 'BUT', 'GRADUALLY', 'MY', 'OPPONENT', 'BECAME', 'MORE', 'REAL', 'TO', 'ME', 'IT', 'WAS', 'KAFFAR'] +6930-81414-0009-59: hyp=['I', 'AWOKE', 'TO', 'CONSCIOUSNESS', 'FIGHTING', 'AT', 'FIRST', 'IT', 'SEEMED', 'AS', 'IF', 'I', 'WAS', 'FIGHTING', 'WITH', 'THE', 'PHANTOM', 'BUT', 'GRADUALLY', 'MY', 'OPPONENT', 'BECAME', 'MORE', 'REAL', 'TO', 'ME', 'IT', 'WAS', 'KAFFIR'] +6930-81414-0010-60: ref=['A', 'SOUND', 'OF', 'VOICES', 'A', 'FLASH', 'OF', 'LIGHT'] +6930-81414-0010-60: hyp=['A', 'SOUND', 'OF', 'VOICES', 'A', 'FLASH', 'OF', 'LIGHT'] +6930-81414-0011-61: ref=['A', 'FEELING', 'OF', 'FREEDOM', 'AND', 'I', 'WAS', 'AWAKE', 'WHERE'] +6930-81414-0011-61: hyp=['A', 'FEELING', 'OF', 'FREEDOM', 'AND', 'I', 'WAS', 'AWAKE', 'WHERE'] +6930-81414-0012-62: ref=['SAID', 'ANOTHER', 'VOICE', 'WHICH', 'I', 'RECOGNIZED', 'AS', "VOLTAIRE'S", 'KAFFAR'] +6930-81414-0012-62: hyp=['SAID', 'ANOTHER', 'VOICE', 'WHICH', 'I', 'RECOGNIZED', 'AS', "VOLTAIRE'S", 'KAFFIR'] +6930-81414-0013-63: ref=['I', 'HAD', 'SCARCELY', 'KNOWN', 'WHAT', 'I', 'HAD', 'BEEN', 'SAYING', 'OR', 'DOING', 'UP', 'TO', 'THIS', 'TIME', 'BUT', 'AS', 'HE', 'SPOKE', 'I', 'LOOKED', 'AT', 'MY', 'HAND'] +6930-81414-0013-63: hyp=['I', 'HAD', 'SCARCELY', 'KNOWN', 'WHAT', 'I', 'HAD', 'BEEN', 'SAYING', 'OR', 'DOING', 'UP', 'TO', 'THIS', 'TIME', 'BUT', 'AS', 'HE', 'SPOKE', 'I', 'LOOKED', 'AT', 'MY', 'HAND'] +6930-81414-0014-64: ref=['IN', 'THE', 'LIGHT', 'OF', 'THE', 'MOON', 'I', 'SAW', 'A', 'KNIFE', 'RED', 'WITH', 'BLOOD', 'AND', 'MY', 'HAND', 'TOO', 'WAS', 'ALSO', 'DISCOLOURED'] +6930-81414-0014-64: hyp=['IN', 'THE', 'LIGHT', 'OF', 'THE', 'MOON', 'I', 'SAW', 'A', 'KNIFE', 'RED', 'WITH', 'BLOOD', 'AND', 'MY', 'HAND', 'TOO', 'WAS', 'ALSO', 'DISCOLORED'] +6930-81414-0015-65: ref=['I', 'DO', 'NOT', 'KNOW', 'I', 'AM', 'DAZED', 'BEWILDERED'] +6930-81414-0015-65: hyp=['I', 'DO', 'NOT', 'KNOW', 'I', 'AM', 'DAZED', 'BEWILDERED'] +6930-81414-0016-66: ref=['BUT', 'THAT', 'IS', "KAFFAR'S", 'KNIFE'] +6930-81414-0016-66: hyp=['BUT', 'THAT', 'IS', "KAFFIR'S", 'KNIF'] +6930-81414-0017-67: ref=['I', 'KNOW', 'HE', 'HAD', 'IT', 'THIS', 'VERY', 'EVENING'] +6930-81414-0017-67: hyp=['I', 'KNOW', 'HE', 'HAD', 'IT', 'THIS', 'VERY', 'EVEN'] +6930-81414-0018-68: ref=['I', 'REMEMBER', 'SAYING', 'HAVE', 'WE', 'BEEN', 'TOGETHER'] +6930-81414-0018-68: hyp=['I', 'REMEMBERED', 'SAYING', 'HAVE', 'WE', 'BEEN', 'TOGETHER'] +6930-81414-0019-69: ref=['VOLTAIRE', 'PICKED', 'UP', 'SOMETHING', 'FROM', 'THE', 'GROUND', 'AND', 'LOOKED', 'AT', 'IT'] +6930-81414-0019-69: hyp=['VOLTAIRE', 'PICKED', 'UP', 'SOMETHING', 'FROM', 'THE', 'GROUND', 'AND', 'LOOKED', 'AT', 'IT'] +6930-81414-0020-70: ref=['I', 'SAY', 'YOU', 'DO', 'KNOW', 'WHAT', 'THIS', 'MEANS', 'AND', 'YOU', 'MUST', 'TELL', 'US'] +6930-81414-0020-70: hyp=['I', 'SAY', 'YOU', 'DO', 'KNOW', 'WHAT', 'THIS', 'MEANS', 'AND', 'YOU', 'MUST', 'TELL', 'US'] +6930-81414-0021-71: ref=['A', 'TERRIBLE', 'THOUGHT', 'FLASHED', 'INTO', 'MY', 'MIND'] +6930-81414-0021-71: hyp=['A', 'TERRIBLE', 'THOUGHT', 'FLASHED', 'INTO', 'MY', 'MIND'] +6930-81414-0022-72: ref=['I', 'HAD', 'AGAIN', 'BEEN', 'ACTING', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'THIS', "MAN'S", 'POWER'] +6930-81414-0022-72: hyp=['I', 'HAD', 'AGAIN', 'BEEN', 'ACTING', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'THIS', "MAN'S", 'POWER'] +6930-81414-0023-73: ref=['PERCHANCE', 'TOO', "KAFFAR'S", 'DEATH', 'MIGHT', 'SERVE', 'HIM', 'IN', 'GOOD', 'STEAD'] +6930-81414-0023-73: hyp=['PERCHANCE', 'TOO', 'KAFFIRS', 'DEATH', 'MIGHT', 'SERVE', 'HIM', 'IN', 'GOOD', 'STEAD'] +6930-81414-0024-74: ref=['MY', 'TONGUE', 'REFUSED', 'TO', 'ARTICULATE', 'MY', 'POWER', 'OF', 'SPEECH', 'LEFT', 'ME'] +6930-81414-0024-74: hyp=['MY', 'TONGUE', 'REFUSED', 'TO', 'ARTICULATE', 'MY', 'POWER', 'OF', 'SPEECH', 'LEFT', 'ME'] +6930-81414-0025-75: ref=['MY', 'POSITION', 'WAS', 'TOO', 'TERRIBLE'] +6930-81414-0025-75: hyp=['MY', 'POSITION', 'WAS', 'TOO', 'TERRIBLE'] +6930-81414-0026-76: ref=['MY', 'OVERWROUGHT', 'NERVES', 'YIELDED', 'AT', 'LAST'] +6930-81414-0026-76: hyp=['MY', 'OVERWROUGHT', 'NERVES', 'YIELDED', 'AT', 'LAST'] +6930-81414-0027-77: ref=['FOR', 'SOME', 'TIME', 'AFTER', 'THAT', 'I', 'REMEMBERED', 'NOTHING', 'DISTINCTLY'] +6930-81414-0027-77: hyp=['FOR', 'SOME', 'TIME', 'AFTER', 'THAT', 'I', 'REMEMBERED', 'NOTHING', 'DISTINCTLY'] +7021-79730-0000-1399: ref=['THE', 'THREE', 'MODES', 'OF', 'MANAGEMENT'] +7021-79730-0000-1399: hyp=['THE', 'THREE', 'MODES', 'OF', 'MANAGEMENT'] +7021-79730-0001-1400: ref=['TO', 'SUPPOSE', 'THAT', 'THE', 'OBJECT', 'OF', 'THIS', 'WORK', 'IS', 'TO', 'AID', 'IN', 'EFFECTING', 'SUCH', 'A', 'SUBSTITUTION', 'AS', 'THAT', 'IS', 'ENTIRELY', 'TO', 'MISTAKE', 'ITS', 'NATURE', 'AND', 'DESIGN'] +7021-79730-0001-1400: hyp=['TO', 'SUPPOSE', 'THAT', 'THE', 'OBJECT', 'OF', 'THIS', 'WORK', 'IS', 'TO', 'AID', 'IN', 'EFFECTING', 'SUCH', 'A', 'SUBSTITUTION', 'AS', 'THAT', 'IS', 'ENTIRELY', 'TO', 'MISTAKE', 'ITS', 'NATURE', 'AND', 'DESIGN'] +7021-79730-0002-1401: ref=['BY', 'REASON', 'AND', 'AFFECTION'] +7021-79730-0002-1401: hyp=['BY', 'REASON', 'AND', 'AFFECTION'] +7021-79730-0003-1402: ref=['AS', 'THE', 'CHAISE', 'DRIVES', 'AWAY', 'MARY', 'STANDS', 'BEWILDERED', 'AND', 'PERPLEXED', 'ON', 'THE', 'DOOR', 'STEP', 'HER', 'MIND', 'IN', 'A', 'TUMULT', 'OF', 'EXCITEMENT', 'IN', 'WHICH', 'HATRED', 'OF', 'THE', 'DOCTOR', 'DISTRUST', 'AND', 'SUSPICION', 'OF', 'HER', 'MOTHER', 'DISAPPOINTMENT', 'VEXATION', 'AND', 'ILL', 'HUMOR', 'SURGE', 'AND', 'SWELL', 'AMONG', 'THOSE', 'DELICATE', 'ORGANIZATIONS', 'ON', 'WHICH', 'THE', 'STRUCTURE', 'AND', 'DEVELOPMENT', 'OF', 'THE', 'SOUL', 'SO', 'CLOSELY', 'DEPEND', 'DOING', 'PERHAPS', 'AN', 'IRREPARABLE', 'INJURY'] +7021-79730-0003-1402: hyp=['AS', 'THE', 'CHASE', 'DRIVES', 'AWAY', 'MARY', 'STANDS', 'BEWILDERED', 'AND', 'PERPLEXED', 'ON', 'THE', 'DOORSTEP', 'HER', 'MIND', 'IN', 'A', 'TUMULT', 'OF', 'EXCITEMENT', 'IN', 'WHICH', 'HATRED', 'OF', 'THE', 'DOCTOR', 'DISTRUST', 'AND', 'SUSPICION', 'OF', 'HER', 'MOTHER', 'DISAPPOINTMENT', 'VEXATION', 'AND', 'ILL', 'HUMOR', 'SURGE', 'AND', 'SWELL', 'AMONG', 'THOSE', 'DELEGATE', 'ORGANIZATIONS', 'ON', 'WHICH', 'THE', 'STRUCTURE', 'AND', 'DEVELOPMENT', 'OF', 'THE', 'SOUL', 'SO', 'CLOSELY', 'DEPEND', 'DOING', 'PERHAPS', 'AN', 'IRREPARABLE', 'INJURY'] +7021-79730-0004-1403: ref=['THE', 'MOTHER', 'AS', 'SOON', 'AS', 'THE', 'CHAISE', 'IS', 'SO', 'FAR', 'TURNED', 'THAT', 'MARY', 'CAN', 'NO', 'LONGER', 'WATCH', 'THE', 'EXPRESSION', 'OF', 'HER', 'COUNTENANCE', 'GOES', 'AWAY', 'FROM', 'THE', 'DOOR', 'WITH', 'A', 'SMILE', 'OF', 'COMPLACENCY', 'AND', 'SATISFACTION', 'UPON', 'HER', 'FACE', 'AT', 'THE', 'INGENUITY', 'AND', 'SUCCESS', 'OF', 'HER', 'LITTLE', 'ARTIFICE'] +7021-79730-0004-1403: hyp=['THE', 'MOTHER', 'AS', 'SOON', 'AS', 'THE', 'CHASE', 'IS', 'SO', 'FAR', 'TURNED', 'THAT', 'MARY', 'CAN', 'NO', 'LONGER', 'WATCH', 'THE', 'EXPRESSION', 'OF', 'HER', 'COUNTENANCE', 'GOES', 'AWAY', 'FROM', 'THE', 'DOOR', 'WITH', 'A', 'SMILE', 'OF', 'COMPLACENCY', 'AND', 'SATISFACTION', 'ON', 'HER', 'FACE', 'AT', 'THE', 'INGENUITY', 'AND', 'SUCCESS', 'OF', 'HER', 'LITTLE', 'ARTIFICE'] +7021-79730-0005-1404: ref=['SO', 'YOU', 'WILL', 'BE', 'A', 'GOOD', 'GIRL', 'I', 'KNOW', 'AND', 'NOT', 'MAKE', 'ANY', 'TROUBLE', 'BUT', 'WILL', 'STAY', 'AT', 'HOME', 'CONTENTEDLY', "WON'T", 'YOU'] +7021-79730-0005-1404: hyp=['SO', 'YOU', 'WILL', 'BE', 'A', 'GOOD', 'GIRL', 'I', 'KNOW', 'AND', 'NOT', 'MAKE', 'ANY', 'TROUBLE', 'BUT', 'WILL', 'STAY', 'AT', 'HOME', 'CONTENTEDLY', "WON'T", 'YOU'] +7021-79730-0006-1405: ref=['THE', 'MOTHER', 'IN', 'MANAGING', 'THE', 'CASE', 'IN', 'THIS', 'WAY', 'RELIES', 'PARTLY', 'ON', 'CONVINCING', 'THE', 'REASON', 'OF', 'THE', 'CHILD', 'AND', 'PARTLY', 'ON', 'AN', 'APPEAL', 'TO', 'HER', 'AFFECTION'] +7021-79730-0006-1405: hyp=['THE', 'MOTHER', 'IN', 'MANAGING', 'THE', 'CASE', 'IN', 'THIS', 'WAY', 'REALIZE', 'PARTLY', 'ON', 'CONVINCING', 'THE', 'REASON', 'OF', 'THE', 'CHILD', 'AND', 'PARTLY', 'ON', 'AN', 'APPEAL', 'TO', 'HER', 'AFFECTION'] +7021-79730-0007-1406: ref=['IF', 'YOU', 'SHOULD', 'NOT', 'BE', 'A', 'GOOD', 'GIRL', 'BUT', 'SHOULD', 'SHOW', 'SIGNS', 'OF', 'MAKING', 'US', 'ANY', 'TROUBLE', 'I', 'SHALL', 'HAVE', 'TO', 'SEND', 'YOU', 'OUT', 'SOMEWHERE', 'TO', 'THE', 'BACK', 'PART', 'OF', 'THE', 'HOUSE', 'UNTIL', 'WE', 'ARE', 'GONE'] +7021-79730-0007-1406: hyp=['IF', 'YOU', 'SHOULD', 'NOT', 'BE', 'A', 'GOOD', 'GIRL', 'BUT', 'SHOULD', 'SHOW', 'SIGNS', 'OF', 'MAKING', 'US', 'ANY', 'TROUBLE', 'I', 'SHALL', 'HAVE', 'TO', 'SEND', 'YOU', 'OUT', 'SOMEWHERE', 'TO', 'THE', 'BACK', 'PART', 'OF', 'THE', 'HOUSE', 'UNTIL', 'WE', 'ARE', 'GONE'] +7021-79730-0008-1407: ref=['BUT', 'THIS', 'LAST', 'SUPPOSITION', 'IS', 'ALMOST', 'ALWAYS', 'UNNECESSARY', 'FOR', 'IF', 'MARY', 'HAS', 'BEEN', 'HABITUALLY', 'MANAGED', 'ON', 'THIS', 'PRINCIPLE', 'SHE', 'WILL', 'NOT', 'MAKE', 'ANY', 'TROUBLE'] +7021-79730-0008-1407: hyp=['BUT', 'THIS', 'LAST', 'OPPOSITION', 'IS', 'ALMOST', 'ALWAYS', 'UNNECESSARY', 'FOR', 'IF', 'MARY', 'HAS', 'BEEN', 'HABITUALLY', 'MANAGED', 'ON', 'THIS', 'PRINCIPLE', 'SHE', 'WILL', 'NOT', 'MAKE', 'ANY', 'TROUBLE'] +7021-79730-0009-1408: ref=['IT', 'IS', 'INDEED', 'TRUE', 'THAT', 'THE', 'IMPORTANCE', 'OF', 'TACT', 'AND', 'SKILL', 'IN', 'THE', 'TRAINING', 'OF', 'THE', 'YOUNG', 'AND', 'OF', 'CULTIVATING', 'THEIR', 'REASON', 'AND', 'SECURING', 'THEIR', 'AFFECTION', 'CAN', 'NOT', 'BE', 'OVERRATED'] +7021-79730-0009-1408: hyp=['IT', 'IS', 'INDEED', 'TRUE', 'THAT', 'THE', 'IMPORTANCE', 'OF', 'TACT', 'AND', 'SKILL', 'IN', 'THE', 'TRAINING', 'OF', 'THE', 'YOUNG', 'AND', 'OF', 'CULTIVATING', 'THEIR', 'REASON', 'AND', 'SECURING', 'THEIR', 'AFFECTION', 'CANNOT', 'BE', 'OVERRATED'] +7021-79740-0000-1384: ref=['TO', 'SUCH', 'PERSONS', 'THESE', 'INDIRECT', 'MODES', 'OF', 'TRAINING', 'CHILDREN', 'IN', 'HABITS', 'OF', 'SUBORDINATION', 'TO', 'THEIR', 'WILL', 'OR', 'RATHER', 'OF', 'YIELDING', 'TO', 'THEIR', 'INFLUENCE', 'ARE', 'SPECIALLY', 'USEFUL'] +7021-79740-0000-1384: hyp=['TO', 'SUCH', 'PERSONS', 'THESE', 'INDIRECT', 'MODES', 'OF', 'TRAINING', 'CHILDREN', 'IN', 'HABITS', 'OF', 'SUBORDINATION', 'TO', 'THEIR', 'WILL', 'OR', 'RATHER', 'OF', 'YIELDING', 'TO', 'THEIR', 'INFLUENCE', 'ARE', 'SPECIALLY', 'USEFUL'] +7021-79740-0001-1385: ref=['DELLA', 'HAD', 'A', 'YOUNG', 'SISTER', 'NAMED', 'MARIA', 'AND', 'A', 'COUSIN', 'WHOSE', 'NAME', 'WAS', 'JANE'] +7021-79740-0001-1385: hyp=['DELLA', 'HAD', 'A', 'YOUNG', 'SISTER', 'NAMED', 'MARIA', 'AND', 'A', 'COUSIN', 'WHOSE', 'NAME', 'WAS', 'JANE'] +7021-79740-0002-1386: ref=['NOW', 'DELIA', 'CONTRIVED', 'TO', 'OBTAIN', 'A', 'GREAT', 'INFLUENCE', 'AND', 'ASCENDENCY', 'OVER', 'THE', 'MINDS', 'OF', 'THE', 'CHILDREN', 'BY', 'MEANS', 'OF', 'THESE', 'DOLLS'] +7021-79740-0002-1386: hyp=['NOW', 'GALLIA', 'CONTRIVED', 'TO', 'OBTAIN', 'A', 'GREAT', 'INFLUENCE', 'AND', 'A', 'SCENE', 'OVER', 'THE', 'MINDS', 'OF', 'THE', 'CHILDREN', 'BY', 'MEANS', 'OF', 'THESE', 'DOLLS'] +7021-79740-0003-1387: ref=['TO', 'GIVE', 'AN', 'IDEA', 'OF', 'THESE', 'CONVERSATIONS', 'I', 'WILL', 'REPORT', 'ONE', 'OF', 'THEM', 'IN', 'FULL'] +7021-79740-0003-1387: hyp=['TO', 'GIVE', 'AN', 'IDEA', 'OF', 'THESE', 'CONVERSATIONS', 'I', 'WILL', 'REPORT', 'ONE', 'OF', 'THEM', 'IN', 'FULL'] +7021-79740-0004-1388: ref=['YOU', 'HAVE', 'COME', 'ANDELLA', 'ANDELLA', 'WAS', 'THE', 'NAME', 'OF', "JANE'S", 'DOLL', 'TO', 'MAKE', 'ROSALIE', 'A', 'VISIT'] +7021-79740-0004-1388: hyp=['YOU', 'HAVE', 'COME', 'AMDELLA', 'AND', 'DELLA', 'WAS', 'THE', 'NAME', 'OF', "JANE'S", 'DOLL', 'TO', 'MAKE', 'ROSALIE', 'A', 'VISIT'] +7021-79740-0005-1389: ref=['I', 'AM', 'VERY', 'GLAD'] +7021-79740-0005-1389: hyp=['I', 'AM', 'VERY', 'GLAD'] +7021-79740-0006-1390: ref=['I', 'EXPECT', 'YOU', 'HAVE', 'BEEN', 'A', 'VERY', 'GOOD', 'GIRL', 'ANDELLA', 'SINCE', 'YOU', 'WERE', 'HERE', 'LAST'] +7021-79740-0006-1390: hyp=['I', 'EXPECT', 'YOU', 'HAVE', 'BEEN', 'A', 'VERY', 'GOOD', 'GIRL', 'ANNE', 'DELA', 'SINCE', 'YOU', 'WERE', 'HERE', 'LAST'] +7021-79740-0007-1391: ref=['THEN', 'TURNING', 'TO', 'JANE', 'SHE', 'ASKED', 'IN', 'A', 'SOMEWHAT', 'ALTERED', 'TONE', 'HAS', 'SHE', 'BEEN', 'A', 'GOOD', 'GIRL', 'JANE'] +7021-79740-0007-1391: hyp=['THEN', 'TURNING', 'TO', 'JANE', 'SHE', 'ASKED', 'IN', 'A', 'SOMEWHAT', 'ALTERED', 'TONE', 'HAS', 'SHE', 'BEEN', 'A', 'GOOD', 'GIRL', 'JANE'] +7021-79740-0008-1392: ref=['FOR', 'INSTANCE', 'ONE', 'DAY', 'THE', 'CHILDREN', 'HAD', 'BEEN', 'PLAYING', 'UPON', 'THE', 'PIAZZA', 'WITH', 'BLOCKS', 'AND', 'OTHER', 'PLAYTHINGS', 'AND', 'FINALLY', 'HAD', 'GONE', 'INTO', 'THE', 'HOUSE', 'LEAVING', 'ALL', 'THE', 'THINGS', 'ON', 'THE', 'FLOOR', 'OF', 'THE', 'PIAZZA', 'INSTEAD', 'OF', 'PUTTING', 'THEM', 'AWAY', 'IN', 'THEIR', 'PLACES', 'AS', 'THEY', 'OUGHT', 'TO', 'HAVE', 'DONE'] +7021-79740-0008-1392: hyp=['FOR', 'INSTANCE', 'ONE', 'DAY', 'THE', 'CHILDREN', 'HAD', 'BEEN', 'PLAYING', 'UPON', 'THE', 'PIAZZA', 'WITH', 'BLOCKS', 'AND', 'OTHER', 'PLAYTHINGS', 'AND', 'FINALLY', 'HAD', 'GONE', 'INTO', 'THE', 'HOUSE', 'LEAVING', 'ALL', 'THE', 'THINGS', 'ON', 'THE', 'FLOOR', 'OF', 'THE', 'PIAZZA', 'INSTEAD', 'OF', 'PUTTING', 'THEM', 'AWAY', 'IN', 'THEIR', 'PLACES', 'AS', 'THEY', 'OUGHT', 'TO', 'HAVE', 'DONE'] +7021-79740-0009-1393: ref=['THEY', 'WERE', 'NOW', 'PLAYING', 'WITH', 'THEIR', 'DOLLS', 'IN', 'THE', 'PARLOR'] +7021-79740-0009-1393: hyp=['THEY', 'WERE', 'NOW', 'PLAYING', 'WITH', 'THEIR', 'DOLLS', 'IN', 'THE', 'PARLOUR'] +7021-79740-0010-1394: ref=['DELIA', 'CAME', 'TO', 'THE', 'PARLOR', 'AND', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'MYSTERY', 'BECKONED', 'THE', 'CHILDREN', 'ASIDE', 'AND', 'SAID', 'TO', 'THEM', 'IN', 'A', 'WHISPER', 'LEAVE', 'ANDELLA', 'AND', 'ROSALIE', 'HERE', 'AND', "DON'T", 'SAY', 'A', 'WORD', 'TO', 'THEM'] +7021-79740-0010-1394: hyp=['DAHLIA', 'CAME', 'TO', 'THE', 'PARLOUR', 'AND', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'MYSTERY', 'BECKONED', 'THE', 'CHILDREN', 'ASIDE', 'AND', 'SAID', 'TO', 'THEM', 'IN', 'A', 'WHISPER', 'LEAVE', 'ANDDELA', 'AND', 'ROSALIE', 'HERE', 'AND', "DON'T", 'SAY', 'A', 'WORD', 'TO', 'THEM'] +7021-79740-0011-1395: ref=['SO', 'SAYING', 'SHE', 'LED', 'THE', 'WAY', 'ON', 'TIPTOE', 'FOLLOWED', 'BY', 'THE', 'CHILDREN', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'ROUND', 'BY', 'A', 'CIRCUITOUS', 'ROUTE', 'TO', 'THE', 'PIAZZA', 'THERE'] +7021-79740-0011-1395: hyp=['SO', 'SAYING', 'SHE', 'LED', 'THE', 'WAY', 'ON', 'TIPTOE', 'FOLLOWED', 'BY', 'THE', 'CHILDREN', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'ROUND', 'BY', 'A', 'CIRCUITOUS', 'ROUTE', 'TO', 'THE', 'PIAZZA', 'THERE'] +7021-79740-0012-1396: ref=['SAID', 'SHE', 'POINTING', 'TO', 'THE', 'PLAYTHINGS', 'SEE'] +7021-79740-0012-1396: hyp=['SAID', 'SHE', 'POINTING', 'TO', 'THE', 'PLAYTHINGS', 'SEE'] +7021-79740-0013-1397: ref=['PUT', 'THESE', 'PLAYTHINGS', 'ALL', 'AWAY', 'QUICK', 'AND', 'CAREFULLY', 'AND', 'WE', 'WILL', 'NOT', 'LET', 'THEM', 'KNOW', 'ANY', 'THING', 'ABOUT', 'YOUR', 'LEAVING', 'THEM', 'OUT'] +7021-79740-0013-1397: hyp=['PUT', 'THESE', 'PLAYTHINGS', 'ALL', 'AWAY', 'QUICK', 'AND', 'CAREFULLY', 'AND', 'WE', 'WILL', 'NOT', 'LET', 'THEM', 'KNOW', 'ANYTHING', 'ABOUT', 'YOUR', 'LEAVING', 'THEM', 'OUT'] +7021-79740-0014-1398: ref=['AND', 'THIS', 'METHOD', 'OF', 'TREATING', 'THE', 'CASE', 'WAS', 'MUCH', 'MORE', 'EFFECTUAL', 'IN', 'MAKING', 'THEM', 'DISPOSED', 'TO', 'AVOID', 'COMMITTING', 'A', 'SIMILAR', 'FAULT', 'ANOTHER', 'TIME', 'THAN', 'ANY', 'DIRECT', 'REBUKES', 'OR', 'EXPRESSIONS', 'OF', 'DISPLEASURE', 'ADDRESSED', 'PERSONALLY', 'TO', 'THEM', 'WOULD', 'HAVE', 'BEEN'] +7021-79740-0014-1398: hyp=['AND', 'THIS', 'METHOD', 'OF', 'TREATING', 'THE', 'CASE', 'WAS', 'MUCH', 'MORE', 'EFFECTUAL', 'IN', 'MAKING', 'THEM', 'DISPOSED', 'TO', 'AVOID', 'COMMITTING', 'A', 'SIMILAR', 'FAULT', 'ANOTHER', 'TIME', 'THAN', 'ANY', 'DIRECT', 'REBUKES', 'OR', 'EXPRESSIONS', 'OF', 'DISPLEASURE', 'ADDRESSED', 'PERSONALLY', 'TO', 'THEM', 'WOULD', 'HAVE', 'BEEN'] +7021-79759-0000-1378: ref=['NATURE', 'OF', 'THE', 'EFFECT', 'PRODUCED', 'BY', 'EARLY', 'IMPRESSIONS'] +7021-79759-0000-1378: hyp=['NATURE', 'OF', 'THE', 'EFFECT', 'PRODUCED', 'BY', 'EARLY', 'IMPRESSIONS'] +7021-79759-0001-1379: ref=['THAT', 'IS', 'COMPARATIVELY', 'NOTHING'] +7021-79759-0001-1379: hyp=['THAT', 'IS', 'COMPARATIVELY', 'NOTHING'] +7021-79759-0002-1380: ref=['THEY', 'ARE', 'CHIEFLY', 'FORMED', 'FROM', 'COMBINATIONS', 'OF', 'THE', 'IMPRESSIONS', 'MADE', 'IN', 'CHILDHOOD'] +7021-79759-0002-1380: hyp=['THEY', 'ARE', 'CHIEFLY', 'FORMED', 'FROM', 'COMBINATIONS', 'OF', 'THE', 'IMPRESSIONS', 'MADE', 'IN', 'CHILDHOOD'] +7021-79759-0003-1381: ref=['VAST', 'IMPORTANCE', 'AND', 'INFLUENCE', 'OF', 'THIS', 'MENTAL', 'FURNISHING'] +7021-79759-0003-1381: hyp=['VAST', 'IMPORTANCE', 'AND', 'INFLUENCE', 'OF', 'THIS', 'MENTAL', 'FURNISHING'] +7021-79759-0004-1382: ref=['WITHOUT', 'GOING', 'TO', 'ANY', 'SUCH', 'EXTREME', 'AS', 'THIS', 'WE', 'CAN', 'EASILY', 'SEE', 'ON', 'REFLECTION', 'HOW', 'VAST', 'AN', 'INFLUENCE', 'ON', 'THE', 'IDEAS', 'AND', 'CONCEPTIONS', 'AS', 'WELL', 'AS', 'ON', 'THE', 'PRINCIPLES', 'OF', 'ACTION', 'IN', 'MATURE', 'YEARS', 'MUST', 'BE', 'EXERTED', 'BY', 'THE', 'NATURE', 'AND', 'CHARACTER', 'OF', 'THE', 'IMAGES', 'WHICH', 'THE', 'PERIOD', 'OF', 'INFANCY', 'AND', 'CHILDHOOD', 'IMPRESSES', 'UPON', 'THE', 'MIND'] +7021-79759-0004-1382: hyp=['WITHOUT', 'GOING', 'TO', 'ANY', 'SUCH', 'EXTREME', 'AS', 'THIS', 'WE', 'CAN', 'EASILY', 'SEE', 'ON', 'REFLECTION', 'HOW', 'VAST', 'AND', 'INFLUENCE', 'ON', 'THE', 'IDEAS', 'AND', 'CONCEPTIONS', 'AS', 'WELL', 'AS', 'ON', 'THE', 'PRINCIPLES', 'OF', 'ACTION', 'AND', 'MATURE', 'YEARS', 'MUST', 'BE', 'EXERTED', 'BY', 'THE', 'NATURE', 'AND', 'CHARACTER', 'OF', 'THE', 'IMAGES', 'WHICH', 'THE', 'PERIOD', 'OF', 'INFANCY', 'AND', 'CHILDHOOD', 'IMPRESS', 'UPON', 'THE', 'MIND'] +7021-79759-0005-1383: ref=['THE', 'PAIN', 'PRODUCED', 'BY', 'AN', 'ACT', 'OF', 'HASTY', 'AND', 'ANGRY', 'VIOLENCE', 'TO', 'WHICH', 'A', 'FATHER', 'SUBJECTS', 'HIS', 'SON', 'MAY', 'SOON', 'PASS', 'AWAY', 'BUT', 'THE', 'MEMORY', 'OF', 'IT', 'DOES', 'NOT', 'PASS', 'AWAY', 'WITH', 'THE', 'PAIN'] +7021-79759-0005-1383: hyp=['THE', 'PAIN', 'PRODUCED', 'BY', 'AN', 'ACT', 'OF', 'HASTY', 'AND', 'ANGRY', 'VIOLENCE', 'TO', 'WHICH', 'A', 'FATHER', 'SUBJECTS', 'HIS', 'SON', 'MAY', 'SOON', 'PASS', 'AWAY', 'BUT', 'THE', 'MEMORY', 'OF', 'IT', 'DOES', 'NOT', 'PASS', 'AWAY', 'WITH', 'THE', 'PAIN'] +7021-85628-0000-1409: ref=['BUT', 'ANDERS', 'CARED', 'NOTHING', 'ABOUT', 'THAT'] +7021-85628-0000-1409: hyp=['BUT', 'ANDREWS', 'CARED', 'NOTHING', 'ABOUT', 'THAT'] +7021-85628-0001-1410: ref=['HE', 'MADE', 'A', 'BOW', 'SO', 'DEEP', 'THAT', 'HIS', 'BACK', 'CAME', 'NEAR', 'BREAKING', 'AND', 'HE', 'WAS', 'DUMBFOUNDED', 'I', 'CAN', 'TELL', 'YOU', 'WHEN', 'HE', 'SAW', 'IT', 'WAS', 'NOBODY', 'BUT', 'ANDERS'] +7021-85628-0001-1410: hyp=['HE', 'MADE', 'A', 'BOW', 'SO', 'DEEP', 'THAT', 'HIS', 'BACK', 'CAME', 'NEAR', 'BREAKING', 'AND', 'HE', 'WAS', 'DUMBFOUNDED', 'I', 'CAN', 'TELL', 'YOU', 'WHEN', 'HE', 'SAW', 'IT', 'WAS', 'NOBODY', 'BUT', 'ANDERS'] +7021-85628-0002-1411: ref=['HE', 'WAS', 'SUCH', 'A', 'BIG', 'BOY', 'THAT', 'HE', 'WORE', 'HIGH', 'BOOTS', 'AND', 'CARRIED', 'A', 'JACK', 'KNIFE'] +7021-85628-0002-1411: hyp=['HE', 'WAS', 'SUCH', 'A', 'BIG', 'BOY', 'THAT', 'HE', 'WORE', 'HIGH', 'BOOTS', 'AND', 'CARRIED', 'A', 'JACK', 'KNIFE'] +7021-85628-0003-1412: ref=['NOW', 'THIS', 'KNIFE', 'WAS', 'A', 'SPLENDID', 'ONE', 'THOUGH', 'HALF', 'THE', 'BLADE', 'WAS', 'GONE', 'AND', 'THE', 'HANDLE', 'WAS', 'A', 'LITTLE', 'CRACKED', 'AND', 'ANDERS', 'KNEW', 'THAT', 'ONE', 'IS', 'ALMOST', 'A', 'MAN', 'AS', 'SOON', 'AS', 'ONE', 'HAS', 'A', 'JACK', 'KNIFE'] +7021-85628-0003-1412: hyp=['NOW', 'THIS', 'KNIFE', 'WAS', 'A', 'SPLENDID', 'ONE', 'THOUGH', 'HALF', 'THE', 'BLADE', 'WAS', 'GONE', 'AND', 'THE', 'HANDLE', 'WAS', 'A', 'LITTLE', 'CRACKED', 'AND', 'ANDERS', 'KNEW', 'THAT', 'ONE', 'IS', 'ALMOST', 'A', 'MAN', 'AS', 'SOON', 'AS', 'ONE', 'HAS', 'A', 'JACKKNIFE'] +7021-85628-0004-1413: ref=['YES', 'WHY', 'NOT', 'THOUGHT', 'ANDERS'] +7021-85628-0004-1413: hyp=['YES', 'WHY', 'NOT', 'THOUGHT', 'ANDREWS'] +7021-85628-0005-1414: ref=['SEEING', 'THAT', 'I', 'AM', 'SO', 'FINE', 'I', 'MAY', 'AS', 'WELL', 'GO', 'AND', 'VISIT', 'THE', 'KING'] +7021-85628-0005-1414: hyp=['SEEING', 'THAT', 'I', 'AM', 'SO', 'FINE', 'I', 'MAY', 'AS', 'WELL', 'GO', 'AND', 'VISIT', 'THE', 'KING'] +7021-85628-0006-1415: ref=['I', 'AM', 'GOING', 'TO', 'THE', 'COURT', 'BALL', 'ANSWERED', 'ANDERS'] +7021-85628-0006-1415: hyp=['I', 'AM', 'GOING', 'TO', 'THE', 'COURT', 'BALL', 'ANSWERED', 'ANDRES'] +7021-85628-0007-1416: ref=['AND', 'SHE', 'TOOK', 'ANDERS', 'HAND', 'AND', 'WALKED', 'WITH', 'HIM', 'UP', 'THE', 'BROAD', 'MARBLE', 'STAIRS', 'WHERE', 'SOLDIERS', 'WERE', 'POSTED', 'AT', 'EVERY', 'THIRD', 'STEP', 'AND', 'THROUGH', 'THE', 'MAGNIFICENT', 'HALLS', 'WHERE', 'COURTIERS', 'IN', 'SILK', 'AND', 'VELVET', 'STOOD', 'BOWING', 'WHEREVER', 'HE', 'WENT'] +7021-85628-0007-1416: hyp=['AND', 'SHE', 'TOOK', "ANDREW'S", 'HAND', 'AND', 'WALKED', 'WITH', 'HIM', 'UP', 'THE', 'BROAD', 'MARBLE', 'STAIRS', 'WHERE', 'SOLDIERS', 'WERE', 'POSTED', 'AT', 'EVERY', 'THIRD', 'STEP', 'AND', 'THROUGH', 'THE', 'MAGNIFICENT', 'HALLS', 'WHERE', 'COURTIERS', 'IN', 'SILK', 'AND', 'VELVET', 'STOOD', 'BOWING', 'WHEREVER', 'HE', 'WENT'] +7021-85628-0008-1417: ref=['FOR', 'LIKE', 'AS', 'NOT', 'THEY', 'MUST', 'HAVE', 'THOUGHT', 'HIM', 'A', 'PRINCE', 'WHEN', 'THEY', 'SAW', 'HIS', 'FINE', 'CAP'] +7021-85628-0008-1417: hyp=['FOR', 'LIKE', 'AS', 'NOT', 'THEY', 'MUST', 'HAVE', 'THOUGHT', 'HIM', 'A', 'PRINCE', 'WHEN', 'THEY', 'SAW', 'HIS', 'FINE', 'CAP'] +7021-85628-0009-1418: ref=['AT', 'THE', 'FARTHER', 'END', 'OF', 'THE', 'LARGEST', 'HALL', 'A', 'TABLE', 'WAS', 'SET', 'WITH', 'GOLDEN', 'CUPS', 'AND', 'GOLDEN', 'PLATES', 'IN', 'LONG', 'ROWS'] +7021-85628-0009-1418: hyp=['AT', 'THE', 'FARTHER', 'END', 'OF', 'THE', 'LARGEST', 'HALL', 'A', 'TABLE', 'WAS', 'SET', 'WITH', 'GOLDEN', 'CUPS', 'AND', 'GOLDEN', 'PLATES', 'IN', 'LONG', 'ROWS'] +7021-85628-0010-1419: ref=['ON', 'HUGE', 'SILVER', 'PLATTERS', 'WERE', 'PYRAMIDS', 'OF', 'TARTS', 'AND', 'CAKES', 'AND', 'RED', 'WINE', 'SPARKLED', 'IN', 'GLITTERING', 'DECANTERS'] +7021-85628-0010-1419: hyp=['ON', 'HUGE', 'SILVER', 'PLATTERS', 'WERE', 'PYRAMIDS', 'OF', 'TARTS', 'AND', 'CAKES', 'AND', 'RED', 'WINE', 'SPARKLED', 'IN', 'GLITTERING', 'DECANTERS'] +7021-85628-0011-1420: ref=['THE', 'PRINCESS', 'SAT', 'DOWN', 'UNDER', 'A', 'BLUE', 'CANOPY', 'WITH', 'BOUQUETS', 'OF', 'ROSES', 'AND', 'SHE', 'LET', 'ANDERS', 'SIT', 'IN', 'A', 'GOLDEN', 'CHAIR', 'BY', 'HER', 'SIDE'] +7021-85628-0011-1420: hyp=['THE', 'PRINCESS', 'SAT', 'DOWN', 'UNDER', 'A', 'BLUE', 'CANOPY', 'WITH', 'BOUQUETS', 'OF', 'ROSES', 'AND', 'SHE', 'LET', 'ANDRE', 'SIT', 'IN', 'A', 'GOLDEN', 'CHAIR', 'BY', 'HER', 'SIDE'] +7021-85628-0012-1421: ref=['BUT', 'YOU', 'MUST', 'NOT', 'EAT', 'WITH', 'YOUR', 'CAP', 'ON', 'YOUR', 'HEAD', 'SHE', 'SAID', 'AND', 'WAS', 'GOING', 'TO', 'TAKE', 'IT', 'OFF'] +7021-85628-0012-1421: hyp=['BUT', 'YOU', 'MUST', 'NOT', 'EAT', 'WITH', 'YOUR', 'CAP', 'ON', 'YOUR', 'HEAD', 'SHE', 'SAID', 'AND', 'WAS', 'GOING', 'TO', 'TAKE', 'IT', 'OFF'] +7021-85628-0013-1422: ref=['THE', 'PRINCESS', 'CERTAINLY', 'WAS', 'BEAUTIFUL', 'AND', 'HE', 'WOULD', 'HAVE', 'DEARLY', 'LIKED', 'TO', 'BE', 'KISSED', 'BY', 'HER', 'BUT', 'THE', 'CAP', 'WHICH', 'HIS', 'MOTHER', 'HAD', 'MADE', 'HE', 'WOULD', 'NOT', 'GIVE', 'UP', 'ON', 'ANY', 'CONDITION'] +7021-85628-0013-1422: hyp=['THE', 'PRINCESS', 'CERTAINLY', 'WAS', 'BEAUTIFUL', 'AND', 'HE', 'WOULD', 'HAVE', 'DEARLY', 'LIKED', 'TO', 'BE', 'KISSED', 'BY', 'HER', 'BUT', 'THE', 'CAP', 'WHICH', 'HIS', 'MOTHER', 'HAD', 'MADE', 'HE', 'WOULD', 'NOT', 'GIVE', 'UP', 'ON', 'ANY', 'CONDITION'] +7021-85628-0014-1423: ref=['HE', 'ONLY', 'SHOOK', 'HIS', 'HEAD'] +7021-85628-0014-1423: hyp=['HE', 'ONLY', 'SHOOK', 'HIS', 'HEAD'] +7021-85628-0015-1424: ref=['WELL', 'BUT', 'NOW', 'SAID', 'THE', 'PRINCESS', 'AND', 'SHE', 'FILLED', 'HIS', 'POCKETS', 'WITH', 'CAKES', 'AND', 'PUT', 'HER', 'OWN', 'HEAVY', 'GOLD', 'CHAIN', 'AROUND', 'HIS', 'NECK', 'AND', 'BENT', 'DOWN', 'AND', 'KISSED', 'HIM'] +7021-85628-0015-1424: hyp=['WELL', 'BUT', 'NOW', 'SAID', 'THE', 'PRINCESS', 'AND', 'SHE', 'FILLED', 'HIS', 'POCKETS', 'WITH', 'CAKES', 'AND', 'PUT', 'HER', 'OWN', 'HEAVY', 'GOLD', 'CHAIN', 'ROUND', 'HIS', 'NECK', 'AND', 'BENT', 'DOWN', 'AND', 'KISSED', 'HIM'] +7021-85628-0016-1425: ref=['THAT', 'IS', 'A', 'VERY', 'FINE', 'CAP', 'YOU', 'HAVE', 'HE', 'SAID'] +7021-85628-0016-1425: hyp=['THAT', 'IS', 'A', 'VERY', 'FINE', 'CAP', 'YOU', 'HAVE', 'HE', 'SAID'] +7021-85628-0017-1426: ref=['SO', 'IT', 'IS', 'SAID', 'ANDERS'] +7021-85628-0017-1426: hyp=['SO', 'IT', 'IS', 'SAID', 'ANDREWS'] +7021-85628-0018-1427: ref=['AND', 'IT', 'IS', 'MADE', 'OF', "MOTHER'S", 'BEST', 'YARN', 'AND', 'SHE', 'KNITTED', 'IT', 'HERSELF', 'AND', 'EVERYBODY', 'WANTS', 'TO', 'GET', 'IT', 'AWAY', 'FROM', 'ME'] +7021-85628-0018-1427: hyp=['AND', 'IT', 'IS', 'MADE', 'OF', "MOTHER'S", 'BEST', 'YARN', 'AND', 'SHE', 'KNITTED', 'IT', 'HERSELF', 'AND', 'EVERYBODY', 'WANTS', 'TO', 'GET', 'IT', 'AWAY', 'FROM', 'ME'] +7021-85628-0019-1428: ref=['WITH', 'ONE', 'JUMP', 'ANDERS', 'GOT', 'OUT', 'OF', 'HIS', 'CHAIR'] +7021-85628-0019-1428: hyp=['WITH', 'ONE', 'JUMP', 'ANDERS', 'GOT', 'OUT', 'OF', 'HIS', 'CHAIR'] +7021-85628-0020-1429: ref=['HE', 'DARTED', 'LIKE', 'AN', 'ARROW', 'THROUGH', 'ALL', 'THE', 'HALLS', 'DOWN', 'ALL', 'THE', 'STAIRS', 'AND', 'ACROSS', 'THE', 'YARD'] +7021-85628-0020-1429: hyp=['HE', 'DARTED', 'LIKE', 'AN', 'ARROW', 'THROUGH', 'ALL', 'THE', 'HALLS', 'DOWN', 'ALL', 'THE', 'STAIRS', 'AND', 'ACROSS', 'THE', 'YARD'] +7021-85628-0021-1430: ref=['HE', 'STILL', 'HELD', 'ON', 'TO', 'IT', 'WITH', 'BOTH', 'HANDS', 'AS', 'HE', 'RUSHED', 'INTO', 'HIS', "MOTHER'S", 'COTTAGE'] +7021-85628-0021-1430: hyp=['HE', 'STILL', 'HELD', 'ON', 'TO', 'IT', 'WITH', 'BOTH', 'HANDS', 'AS', 'HE', 'RUSHED', 'INTO', 'HIS', "MOTHER'S", 'COTTAGE'] +7021-85628-0022-1431: ref=['AND', 'ALL', 'HIS', 'BROTHERS', 'AND', 'SISTERS', 'STOOD', 'ROUND', 'AND', 'LISTENED', 'WITH', 'THEIR', 'MOUTHS', 'OPEN'] +7021-85628-0022-1431: hyp=['AND', 'ALL', 'HIS', 'BROTHERS', 'AND', 'SISTERS', 'STOOD', 'ROUND', 'AND', 'LISTENED', 'WITH', 'THEIR', 'MOUTHS', 'OPEN'] +7021-85628-0023-1432: ref=['BUT', 'WHEN', 'HIS', 'BIG', 'BROTHER', 'HEARD', 'THAT', 'HE', 'HAD', 'REFUSED', 'TO', 'GIVE', 'HIS', 'CAP', 'FOR', 'A', "KING'S", 'GOLDEN', 'CROWN', 'HE', 'SAID', 'THAT', 'ANDERS', 'WAS', 'A', 'STUPID'] +7021-85628-0023-1432: hyp=['BUT', 'WHEN', 'HIS', 'BIG', 'BROTHER', 'HEARD', 'THAT', 'HE', 'HAD', 'REFUSED', 'TO', 'GIVE', 'HIS', 'CAP', 'FOR', 'A', "KING'S", 'GOLDEN', 'CROWN', 'HE', 'SAID', 'THAT', 'ANDERS', 'WAS', 'A', 'STUPID'] +7021-85628-0024-1433: ref=['ANDERS', 'FACE', 'GREW', 'RED'] +7021-85628-0024-1433: hyp=["ANDREW'S", 'FACE', 'GREW', 'RED'] +7021-85628-0025-1434: ref=['BUT', 'HIS', 'MOTHER', 'HUGGED', 'HIM', 'CLOSE'] +7021-85628-0025-1434: hyp=['BUT', 'HIS', 'MOTHER', 'HUGGED', 'HIM', 'CLOSE'] +7021-85628-0026-1435: ref=['NO', 'MY', 'LITTLE', 'SON', 'SHE', 'SAID'] +7021-85628-0026-1435: hyp=['NO', 'MY', 'LITTLE', 'FUN', 'SHE', 'SAID'] +7021-85628-0027-1436: ref=['IF', 'YOU', 'DRESSED', 'IN', 'SILK', 'AND', 'GOLD', 'FROM', 'TOP', 'TO', 'TOE', 'YOU', 'COULD', 'NOT', 'LOOK', 'ANY', 'NICER', 'THAN', 'IN', 'YOUR', 'LITTLE', 'RED', 'CAP'] +7021-85628-0027-1436: hyp=['IF', 'YOU', 'DRESSED', 'IN', 'SILK', 'AND', 'GOLD', 'FROM', 'TOP', 'TO', 'TOE', 'YOU', 'COULD', 'NOT', 'LOOK', 'ANY', 'NICER', 'THAN', 'IN', 'YOUR', 'LITTLE', 'RED', 'CAP'] +7127-75946-0000-467: ref=['AT', 'THE', 'CONCLUSION', 'OF', 'THE', 'BANQUET', 'WHICH', 'WAS', 'SERVED', 'AT', 'FIVE', "O'CLOCK", 'THE', 'KING', 'ENTERED', 'HIS', 'CABINET', 'WHERE', 'HIS', 'TAILORS', 'WERE', 'AWAITING', 'HIM', 'FOR', 'THE', 'PURPOSE', 'OF', 'TRYING', 'ON', 'THE', 'CELEBRATED', 'COSTUME', 'REPRESENTING', 'SPRING', 'WHICH', 'WAS', 'THE', 'RESULT', 'OF', 'SO', 'MUCH', 'IMAGINATION', 'AND', 'HAD', 'COST', 'SO', 'MANY', 'EFFORTS', 'OF', 'THOUGHT', 'TO', 'THE', 'DESIGNERS', 'AND', 'ORNAMENT', 'WORKERS', 'OF', 'THE', 'COURT'] +7127-75946-0000-467: hyp=['AT', 'THE', 'CONCLUSION', 'OF', 'THE', 'BANQUET', 'WHICH', 'WAS', 'SERVED', 'AT', 'FIVE', "O'CLOCK", 'THE', 'KING', 'ENTERED', 'HIS', 'CABINET', 'WHERE', 'HIS', 'TAILORS', 'WERE', 'AWAITING', 'HIM', 'FOR', 'THE', 'PURPOSE', 'OF', 'TRYING', 'ON', 'THE', 'CELEBRATED', 'COSTUME', 'REPRESENTING', 'SPRING', 'WHICH', 'WAS', 'THE', 'RESULT', 'OF', 'SO', 'MUCH', 'IMAGINATION', 'AND', 'HAD', 'COST', 'SO', 'MANY', 'EFFORTS', 'OF', 'THOUGHT', 'TO', 'THE', 'DESIGNERS', 'AND', 'ORNAMENT', 'WORKERS', 'OF', 'THE', 'COURT'] +7127-75946-0001-468: ref=['AH', 'VERY', 'WELL'] +7127-75946-0001-468: hyp=['AH', 'VERY', 'WELL'] +7127-75946-0002-469: ref=['LET', 'HIM', 'COME', 'IN', 'THEN', 'SAID', 'THE', 'KING', 'AND', 'AS', 'IF', 'COLBERT', 'HAD', 'BEEN', 'LISTENING', 'AT', 'THE', 'DOOR', 'FOR', 'THE', 'PURPOSE', 'OF', 'KEEPING', 'HIMSELF', 'AU', 'COURANT', 'WITH', 'THE', 'CONVERSATION', 'HE', 'ENTERED', 'AS', 'SOON', 'AS', 'THE', 'KING', 'HAD', 'PRONOUNCED', 'HIS', 'NAME', 'TO', 'THE', 'TWO', 'COURTIERS'] +7127-75946-0002-469: hyp=['LET', 'HIM', 'COME', 'IN', 'THEN', 'SAID', 'THE', 'KING', 'AND', 'AS', 'IF', 'COLBERT', 'HAD', 'BEEN', 'LISTENING', 'AT', 'THE', 'DOOR', 'FOR', 'THE', 'PURPOSE', 'OF', 'KEEPING', 'HIMSELF', 'OKARRANT', 'WITH', 'THE', 'CONVERSATION', 'HE', 'ENTERED', 'AS', 'SOON', 'AS', 'THE', 'KING', 'HAD', 'PRONOUNCED', 'HIS', 'NAME', 'TO', 'THE', 'TWO', 'COURTIERS'] +7127-75946-0003-470: ref=['GENTLEMEN', 'TO', 'YOUR', 'POSTS', 'WHEREUPON', 'SAINT', 'AIGNAN', 'AND', 'VILLEROY', 'TOOK', 'THEIR', 'LEAVE'] +7127-75946-0003-470: hyp=['GENTLEMEN', 'TO', 'YOUR', 'POSTS', 'WHEREUPON', 'SAINT', 'DAN', 'AND', 'VILLEROI', 'TOOK', 'THEIR', 'LEAVE'] +7127-75946-0004-471: ref=['CERTAINLY', 'SIRE', 'BUT', 'I', 'MUST', 'HAVE', 'MONEY', 'TO', 'DO', 'THAT', 'WHAT'] +7127-75946-0004-471: hyp=['CERTAINLY', 'SIRE', 'BUT', 'I', 'MUST', 'HAVE', 'MONEY', 'TO', 'DO', 'THAT', 'WHAT'] +7127-75946-0005-472: ref=['WHAT', 'DO', 'YOU', 'MEAN', 'INQUIRED', 'LOUIS'] +7127-75946-0005-472: hyp=['WHAT', 'DO', 'YOU', 'MEAN', 'INQUIRED', 'LOUISE'] +7127-75946-0006-473: ref=['HE', 'HAS', 'GIVEN', 'THEM', 'WITH', 'TOO', 'MUCH', 'GRACE', 'NOT', 'TO', 'HAVE', 'OTHERS', 'STILL', 'TO', 'GIVE', 'IF', 'THEY', 'ARE', 'REQUIRED', 'WHICH', 'IS', 'THE', 'CASE', 'AT', 'THE', 'PRESENT', 'MOMENT'] +7127-75946-0006-473: hyp=['HE', 'HAS', 'GIVEN', 'THEM', 'WITH', 'TOO', 'MUCH', 'GRACE', 'NOT', 'TO', 'HAVE', 'OTHERS', 'STILL', 'TO', 'GIVE', 'IF', 'THEY', 'ARE', 'REQUIRED', 'WHICH', 'IS', 'THE', 'CASE', 'AT', 'THE', 'PRESENT', 'MOMENT'] +7127-75946-0007-474: ref=['IT', 'IS', 'NECESSARY', 'THEREFORE', 'THAT', 'HE', 'SHOULD', 'COMPLY', 'THE', 'KING', 'FROWNED'] +7127-75946-0007-474: hyp=['IT', 'IS', 'NECESSARY', 'THEREFORE', 'THAT', 'HE', 'SHOULD', 'COMPLY', 'THE', 'KING', 'FROWNED'] +7127-75946-0008-475: ref=['DOES', 'YOUR', 'MAJESTY', 'THEN', 'NO', 'LONGER', 'BELIEVE', 'THE', 'DISLOYAL', 'ATTEMPT'] +7127-75946-0008-475: hyp=['DOES', 'YOUR', 'MAJESTY', 'THEN', 'NO', 'LONGER', 'BELIEVE', 'THE', 'DISLOYAL', 'ATTEMPT'] +7127-75946-0009-476: ref=['NOT', 'AT', 'ALL', 'YOU', 'ARE', 'ON', 'THE', 'CONTRARY', 'MOST', 'AGREEABLE', 'TO', 'ME'] +7127-75946-0009-476: hyp=['NOT', 'AT', 'ALL', 'YOU', 'ARE', 'ON', 'THE', 'CONTRARY', 'MOST', 'AGREEABLE', 'TO', 'ME'] +7127-75946-0010-477: ref=['YOUR', "MAJESTY'S", 'PLAN', 'THEN', 'IN', 'THIS', 'AFFAIR', 'IS'] +7127-75946-0010-477: hyp=['YOUR', "MAJESTY'S", 'PLAN', 'THEN', 'IN', 'THIS', 'AFFAIR', 'IS'] +7127-75946-0011-478: ref=['YOU', 'WILL', 'TAKE', 'THEM', 'FROM', 'MY', 'PRIVATE', 'TREASURE'] +7127-75946-0011-478: hyp=['YOU', 'WILL', 'TAKE', 'THEM', 'FROM', 'MY', 'PRIVATE', 'TREASURE'] +7127-75946-0012-479: ref=['THE', 'NEWS', 'CIRCULATED', 'WITH', 'THE', 'RAPIDITY', 'OF', 'LIGHTNING', 'DURING', 'ITS', 'PROGRESS', 'IT', 'KINDLED', 'EVERY', 'VARIETY', 'OF', 'COQUETRY', 'DESIRE', 'AND', 'WILD', 'AMBITION'] +7127-75946-0012-479: hyp=['THE', 'NEWS', 'CIRCULATED', 'WITH', 'THE', 'RAPIDITY', 'OF', 'LIGHTNING', 'DURING', 'ITS', 'PROGRESS', 'IT', 'KINDLED', 'EVERY', 'VARIETY', 'OF', 'COQUETRY', 'DESIRE', 'AND', 'WILD', 'AMBITION'] +7127-75946-0013-480: ref=['THE', 'KING', 'HAD', 'COMPLETED', 'HIS', 'TOILETTE', 'BY', 'NINE', "O'CLOCK", 'HE', 'APPEARED', 'IN', 'AN', 'OPEN', 'CARRIAGE', 'DECORATED', 'WITH', 'BRANCHES', 'OF', 'TREES', 'AND', 'FLOWERS'] +7127-75946-0013-480: hyp=['THE', 'KING', 'HAD', 'COMPLETED', 'HIS', 'TOILET', 'BY', 'NINE', "O'CLOCK", 'HE', 'APPEARED', 'IN', 'AN', 'OPEN', 'CARRIAGE', 'DECORATED', 'WITH', 'BRANCHES', 'OF', 'TREES', 'AND', 'FLOWERS'] +7127-75946-0014-481: ref=['THE', 'QUEENS', 'HAD', 'TAKEN', 'THEIR', 'SEATS', 'UPON', 'A', 'MAGNIFICENT', 'DIAS', 'OR', 'PLATFORM', 'ERECTED', 'UPON', 'THE', 'BORDERS', 'OF', 'THE', 'LAKE', 'IN', 'A', 'THEATER', 'OF', 'WONDERFUL', 'ELEGANCE', 'OF', 'CONSTRUCTION'] +7127-75946-0014-481: hyp=['THE', 'QUEENS', 'HAD', 'TAKEN', 'THEIR', 'SEATS', 'UPON', 'A', 'MAGNIFICENT', 'DAIS', 'OR', 'PLATFORM', 'ERECTED', 'UPON', 'THE', 'BORDERS', 'OF', 'THE', 'LAKE', 'IN', 'A', 'THEATRE', 'OF', 'WONDERFUL', 'ELEGANCE', 'OF', 'CONSTRUCTION'] +7127-75946-0015-482: ref=['SUDDENLY', 'FOR', 'THE', 'PURPOSE', 'OF', 'RESTORING', 'PEACE', 'AND', 'ORDER', 'SPRING', 'ACCOMPANIED', 'BY', 'HIS', 'WHOLE', 'COURT', 'MADE', 'HIS', 'APPEARANCE'] +7127-75946-0015-482: hyp=['SUDDENLY', 'FOR', 'THE', 'PURPOSE', 'OF', 'RESTORING', 'PEACE', 'AND', 'ORDER', 'SPRANG', 'ACCOMPANIED', 'BY', 'HIS', 'WHOLE', 'COURT', 'MADE', 'HIS', 'APPEARANCE'] +7127-75946-0016-483: ref=['THE', 'SEASONS', 'ALLIES', 'OF', 'SPRING', 'FOLLOWED', 'HIM', 'CLOSELY', 'TO', 'FORM', 'A', 'QUADRILLE', 'WHICH', 'AFTER', 'MANY', 'WORDS', 'OF', 'MORE', 'OR', 'LESS', 'FLATTERING', 'IMPORT', 'WAS', 'THE', 'COMMENCEMENT', 'OF', 'THE', 'DANCE'] +7127-75946-0016-483: hyp=['THE', 'SEASONS', 'ALLIES', 'OF', 'SPRING', 'FOLLOWED', 'HIM', 'CLOSELY', 'TO', 'FORM', 'A', 'QUADRILLE', 'WHICH', 'AFTER', 'MANY', 'WORDS', 'OF', 'MORE', 'OR', 'LESS', 'FLATTERING', 'IMPORT', 'WAS', 'THE', 'COMMENCEMENT', 'OF', 'THE', 'DANCE'] +7127-75946-0017-484: ref=['HIS', 'LEGS', 'THE', 'BEST', 'SHAPED', 'AT', 'COURT', 'WERE', 'DISPLAYED', 'TO', 'GREAT', 'ADVANTAGE', 'IN', 'FLESH', 'COLORED', 'SILKEN', 'HOSE', 'OF', 'SILK', 'SO', 'FINE', 'AND', 'SO', 'TRANSPARENT', 'THAT', 'IT', 'SEEMED', 'ALMOST', 'LIKE', 'FLESH', 'ITSELF'] +7127-75946-0017-484: hyp=['HIS', 'LEGS', 'THE', 'BEST', 'SHAPED', 'AT', 'COURT', 'WERE', 'DISPLAYED', 'TO', 'GREAT', 'ADVANTAGE', 'IN', 'FLESH', 'COLOURED', 'SILKEN', 'HOSE', 'A', 'SILK', 'SO', 'FINE', 'AND', 'SO', 'TRANSPARENT', 'THAT', 'IT', 'SEEMED', 'ALMOST', 'LIKE', 'FLESH', 'ITSELF'] +7127-75946-0018-485: ref=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'CARRIAGE', 'WHICH', 'RESEMBLED', 'THE', 'BUOYANT', 'MOVEMENTS', 'OF', 'AN', 'IMMORTAL', 'AND', 'HE', 'DID', 'NOT', 'DANCE', 'SO', 'MUCH', 'AS', 'SEEM', 'TO', 'SOAR', 'ALONG'] +7127-75946-0018-485: hyp=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'CARRIAGE', 'WHICH', 'RESEMBLED', 'THE', 'BUOYANT', 'MOVEMENTS', 'OF', 'AN', 'IMMORTAL', 'AND', 'HE', 'DID', 'NOT', 'DANCE', 'SO', 'MUCH', 'AS', 'SEEMED', 'TO', 'SOAR', 'ALONG'] +7127-75946-0019-486: ref=['YES', 'IT', 'IS', 'SUPPRESSED'] +7127-75946-0019-486: hyp=['YES', 'IT', 'IS', 'SUPPRESSED'] +7127-75946-0020-487: ref=['FAR', 'FROM', 'IT', 'SIRE', 'YOUR', 'MAJESTY', 'HAVING', 'GIVEN', 'NO', 'DIRECTIONS', 'ABOUT', 'IT', 'THE', 'MUSICIANS', 'HAVE', 'RETAINED', 'IT'] +7127-75946-0020-487: hyp=['FAR', 'FROM', 'IT', 'SIRE', 'YOUR', 'MAJESTY', 'HEAVEN', 'GIVEN', 'NO', 'DIRECTIONS', 'ABOUT', 'IT', 'THE', 'MUSICIANS', 'HAVE', 'RETAINED', 'IT'] +7127-75946-0021-488: ref=['YES', 'SIRE', 'AND', 'READY', 'DRESSED', 'FOR', 'THE', 'BALLET'] +7127-75946-0021-488: hyp=['YES', 'SIRE', 'AND', 'READY', 'DRESSED', 'FOR', 'THE', 'BALLET'] +7127-75946-0022-489: ref=['SIRE', 'HE', 'SAID', 'YOUR', "MAJESTY'S", 'MOST', 'DEVOTED', 'SERVANT', 'APPROACHES', 'TO', 'PERFORM', 'A', 'SERVICE', 'ON', 'THIS', 'OCCASION', 'WITH', 'SIMILAR', 'ZEAL', 'THAT', 'HE', 'HAS', 'ALREADY', 'SHOWN', 'ON', 'THE', 'FIELD', 'OF', 'BATTLE'] +7127-75946-0022-489: hyp=['SIRE', 'HE', 'SAID', 'YOUR', "MAJESTY'S", 'MOST', 'DEVOTED', 'SERVANT', 'APPROACHES', 'TO', 'PERFORM', 'A', 'SERVICE', 'ON', 'THIS', 'OCCASION', 'WITH', 'SIMILAR', 'ZEAL', 'THAT', 'HE', 'HAS', 'ALREADY', 'SHOWN', 'ON', 'THE', 'FIELD', 'OF', 'BATTLE'] +7127-75946-0023-490: ref=['THE', 'KING', 'SEEMED', 'ONLY', 'PLEASED', 'WITH', 'EVERY', 'ONE', 'PRESENT'] +7127-75946-0023-490: hyp=['THE', 'KING', 'SEEMED', 'ONLY', 'PLEASED', 'WITH', 'EVERY', 'ONE', 'PRESENT'] +7127-75946-0024-491: ref=['MONSIEUR', 'WAS', 'THE', 'ONLY', 'ONE', 'WHO', 'DID', 'NOT', 'UNDERSTAND', 'ANYTHING', 'ABOUT', 'THE', 'MATTER'] +7127-75946-0024-491: hyp=['MONSIEUR', 'WAS', 'THE', 'ONLY', 'ONE', 'WHO', 'DID', 'NOT', 'UNDERSTAND', 'ANYTHING', 'ABOUT', 'THE', 'MATTER'] +7127-75946-0025-492: ref=['THE', 'BALLET', 'BEGAN', 'THE', 'EFFECT', 'WAS', 'MORE', 'THAN', 'BEAUTIFUL'] +7127-75946-0025-492: hyp=['THE', 'BALLET', 'BEGAN', 'THE', 'EFFECT', 'WAS', 'MORE', 'THAN', 'BEAUTIFUL'] +7127-75946-0026-493: ref=['WHEN', 'THE', 'MUSIC', 'BY', 'ITS', 'BURSTS', 'OF', 'MELODY', 'CARRIED', 'AWAY', 'THESE', 'ILLUSTRIOUS', 'DANCERS', 'WHEN', 'THE', 'SIMPLE', 'UNTUTORED', 'PANTOMIME', 'OF', 'THAT', 'PERIOD', 'ONLY', 'THE', 'MORE', 'NATURAL', 'ON', 'ACCOUNT', 'OF', 'THE', 'VERY', 'INDIFFERENT', 'ACTING', 'OF', 'THE', 'AUGUST', 'ACTORS', 'HAD', 'REACHED', 'ITS', 'CULMINATING', 'POINT', 'OF', 'TRIUMPH', 'THE', 'THEATER', 'SHOOK', 'WITH', 'TUMULTUOUS', 'APPLAUSE'] +7127-75946-0026-493: hyp=['WHEN', 'THE', 'MUSIC', 'BY', 'ITS', 'BURSTS', 'OF', 'MELODY', 'CARRIED', 'AWAY', 'THESE', 'ILLUSTRIOUS', 'DANCERS', 'WHEN', 'THIS', 'SIMPLE', 'UNTUTORED', 'PANTOMIME', 'OF', 'THAT', 'PERIOD', 'ONLY', 'THE', 'MORE', 'NATURAL', 'ON', 'ACCOUNT', 'OF', 'THE', 'VERY', 'INDIFFERENT', 'ACTING', 'OF', 'THE', 'AUGUST', 'ACTORS', 'HAD', 'REACHED', 'ITS', 'CULMINATING', 'POINT', 'OF', 'TRIUMPH', 'THE', 'THEATRE', 'SHOOK', 'WITH', 'TUMULTUOUS', 'APPLAUSE'] +7127-75946-0027-494: ref=['DISDAINFUL', 'OF', 'A', 'SUCCESS', 'OF', 'WHICH', 'MADAME', 'SHOWED', 'NO', 'ACKNOWLEDGEMENT', 'HE', 'THOUGHT', 'OF', 'NOTHING', 'BUT', 'BOLDLY', 'REGAINING', 'THE', 'MARKED', 'PREFERENCE', 'OF', 'THE', 'PRINCESS'] +7127-75946-0027-494: hyp=['DISDAINFUL', 'OF', 'A', 'SUCCESS', 'OF', 'WHICH', 'MADAME', 'SHOWED', 'NO', 'ACKNOWLEDGMENT', 'HE', 'THOUGHT', 'OF', 'NOTHING', 'BUT', 'BOLDLY', 'REGAINING', 'THE', 'MARKED', 'PREFERENCE', 'OF', 'THE', 'PRINCESS'] +7127-75946-0028-495: ref=['BY', 'DEGREES', 'ALL', 'HIS', 'HAPPINESS', 'ALL', 'HIS', 'BRILLIANCY', 'SUBSIDED', 'INTO', 'REGRET', 'AND', 'UNEASINESS', 'SO', 'THAT', 'HIS', 'LIMBS', 'LOST', 'THEIR', 'POWER', 'HIS', 'ARMS', 'HUNG', 'HEAVILY', 'BY', 'HIS', 'SIDES', 'AND', 'HIS', 'HEAD', 'DROOPED', 'AS', 'THOUGH', 'HE', 'WAS', 'STUPEFIED'] +7127-75946-0028-495: hyp=['BY', 'DEGREES', 'ALL', 'HIS', 'HAPPINESS', 'ALL', 'HIS', 'BRILLIANCY', 'SUBSIDED', 'INTO', 'REGRET', 'AND', 'UNEASINESS', 'SO', 'THAT', 'HIS', 'LIMBS', 'LOST', 'THEIR', 'POWER', 'HIS', 'ARMS', 'HUNG', 'HEAVILY', 'BY', 'HIS', 'SIDES', 'AND', 'HIS', 'HEAD', 'DROOPED', 'AS', 'THOUGH', 'HE', 'WAS', 'STUPEFIED'] +7127-75946-0029-496: ref=['THE', 'KING', 'WHO', 'HAD', 'FROM', 'THIS', 'MOMENT', 'BECOME', 'IN', 'REALITY', 'THE', 'PRINCIPAL', 'DANCER', 'IN', 'THE', 'QUADRILLE', 'CAST', 'A', 'LOOK', 'UPON', 'HIS', 'VANQUISHED', 'RIVAL'] +7127-75946-0029-496: hyp=['THE', 'KING', 'WHO', 'HAD', 'FROM', 'THIS', 'MOMENT', 'BECOME', 'IN', 'REALITY', 'THE', 'PRINCIPAL', 'DANCER', 'IN', 'THE', 'QUADRILLE', 'CAST', 'A', 'LOOK', 'UPON', 'HIS', 'VANQUISHED', 'RIVAL'] +7127-75947-0000-426: ref=['EVERY', 'ONE', 'COULD', 'OBSERVE', 'HIS', 'AGITATION', 'AND', 'PROSTRATION', 'A', 'PROSTRATION', 'WHICH', 'WAS', 'INDEED', 'THE', 'MORE', 'REMARKABLE', 'SINCE', 'PEOPLE', 'WERE', 'NOT', 'ACCUSTOMED', 'TO', 'SEE', 'HIM', 'WITH', 'HIS', 'ARMS', 'HANGING', 'LISTLESSLY', 'BY', 'HIS', 'SIDE', 'HIS', 'HEAD', 'BEWILDERED', 'AND', 'HIS', 'EYES', 'WITH', 'ALL', 'THEIR', 'BRIGHT', 'INTELLIGENCE', 'BEDIMMED'] +7127-75947-0000-426: hyp=['EVERY', 'ONE', 'COULD', 'OBSERVE', 'HIS', 'AGITATION', 'AND', 'PROSTRATION', 'A', 'PROSTRATION', 'WHICH', 'WAS', 'INDEED', 'THE', 'MORE', 'REMARKABLE', 'SINCE', 'PEOPLE', 'WERE', 'NOT', 'ACCUSTOMED', 'TO', 'SEE', 'HIM', 'WITH', 'HIS', 'ARMS', 'HANGING', 'LISTLESSLY', 'BY', 'HIS', 'SIDE', 'HIS', 'HEAD', 'BEWILDERED', 'AND', 'HIS', 'EYES', 'WITH', 'ALL', 'THEIR', 'BRIGHT', 'INTELLIGENCE', 'BEDEMNED'] +7127-75947-0001-427: ref=['UPON', 'THIS', 'MADAME', 'DEIGNED', 'TO', 'TURN', 'HER', 'EYES', 'LANGUISHINGLY', 'TOWARDS', 'THE', 'COMTE', 'OBSERVING'] +7127-75947-0001-427: hyp=['UPON', 'THIS', 'MADAME', 'DEIGNED', 'TO', 'TURN', 'HER', 'EYES', 'LANGUISHINGLY', 'TOWARDS', 'THE', 'COMTE', 'OBSERVING'] +7127-75947-0002-428: ref=['DO', 'YOU', 'THINK', 'SO', 'SHE', 'REPLIED', 'WITH', 'INDIFFERENCE'] +7127-75947-0002-428: hyp=['DO', 'YOU', 'THINK', 'SO', 'SHE', 'REPLIED', 'WITH', 'INDIFFERENCE'] +7127-75947-0003-429: ref=['YES', 'THE', 'CHARACTER', 'WHICH', 'YOUR', 'ROYAL', 'HIGHNESS', 'ASSUMED', 'IS', 'IN', 'PERFECT', 'HARMONY', 'WITH', 'YOUR', 'OWN'] +7127-75947-0003-429: hyp=['YES', 'THE', 'CHARACTER', 'WHICH', 'YOUR', 'ROYAL', 'HIGHNESS', 'ASSUMED', 'IS', 'IN', 'PERFECT', 'HARMONY', 'WITH', 'YOUR', 'OWN'] +7127-75947-0004-430: ref=['EXPLAIN', 'YOURSELF'] +7127-75947-0004-430: hyp=['EXPLAIN', 'YOURSELF'] +7127-75947-0005-431: ref=['I', 'ALLUDE', 'TO', 'THE', 'GODDESS'] +7127-75947-0005-431: hyp=['I', 'ALLUDE', 'TO', 'THE', 'GODDESS'] +7127-75947-0006-432: ref=['THE', 'PRINCESS', 'INQUIRED', 'NO'] +7127-75947-0006-432: hyp=['THE', 'PRINCESS', 'INQUIRED', 'NO'] +7127-75947-0007-433: ref=['SHE', 'THEN', 'ROSE', 'HUMMING', 'THE', 'AIR', 'TO', 'WHICH', 'SHE', 'WAS', 'PRESENTLY', 'GOING', 'TO', 'DANCE'] +7127-75947-0007-433: hyp=['SHE', 'THEN', 'ROSE', 'HUMMING', 'THE', 'AIR', 'TO', 'WHICH', 'SHE', 'WAS', 'PRESENTLY', 'GOING', 'TO', 'DANCE'] +7127-75947-0008-434: ref=['THE', 'ARROW', 'PIERCED', 'HIS', 'HEART', 'AND', 'WOUNDED', 'HIM', 'MORTALLY'] +7127-75947-0008-434: hyp=['THE', 'ARROW', 'PIERCED', 'HIS', 'HEART', 'AND', 'WOUNDED', 'HIM', 'MORTALLY'] +7127-75947-0009-435: ref=['A', 'QUARTER', 'OF', 'AN', 'HOUR', 'AFTERWARDS', 'HE', 'RETURNED', 'TO', 'THE', 'THEATER', 'BUT', 'IT', 'WILL', 'BE', 'READILY', 'BELIEVED', 'THAT', 'IT', 'WAS', 'ONLY', 'A', 'POWERFUL', 'EFFORT', 'OF', 'REASON', 'OVER', 'HIS', 'GREAT', 'EXCITEMENT', 'THAT', 'ENABLED', 'HIM', 'TO', 'GO', 'BACK', 'OR', 'PERHAPS', 'FOR', 'LOVE', 'IS', 'THUS', 'STRANGELY', 'CONSTITUTED', 'HE', 'FOUND', 'IT', 'IMPOSSIBLE', 'EVEN', 'TO', 'REMAIN', 'MUCH', 'LONGER', 'SEPARATED', 'FROM', 'THE', 'PRESENCE', 'OF', 'ONE', 'WHO', 'HAD', 'BROKEN', 'HIS', 'HEART'] +7127-75947-0009-435: hyp=['A', 'QUARTER', 'OF', 'AN', 'HOUR', 'AFTERWARDS', 'HE', 'RETURNED', 'TO', 'THE', 'THEATRE', 'BUT', 'IT', 'WILL', 'BE', 'READILY', 'BELIEVED', 'THAT', 'IT', 'WAS', 'ONLY', 'A', 'POWERFUL', 'EFFORT', 'OF', 'REASON', 'OVER', 'HIS', 'GREAT', 'EXCITEMENT', 'THAT', 'ENABLED', 'HIM', 'TO', 'GO', 'BACK', 'OR', 'PERHAPS', 'FOR', 'LOVE', 'IS', 'THUS', 'STRANGELY', 'CONSTITUTED', 'HE', 'FOUND', 'IT', 'IMPOSSIBLE', 'EVEN', 'TO', 'REMAIN', 'MUCH', 'LONGER', 'SEPARATED', 'FROM', 'THEIR', 'PRESENCE', 'OF', 'ONE', 'WHO', 'HAD', 'BROKEN', 'HIS', 'HEART'] +7127-75947-0010-436: ref=['WHEN', 'SHE', 'PERCEIVED', 'THE', 'YOUNG', 'MAN', 'SHE', 'ROSE', 'LIKE', 'A', 'WOMAN', 'SURPRISED', 'IN', 'THE', 'MIDST', 'OF', 'IDEAS', 'SHE', 'WAS', 'DESIROUS', 'OF', 'CONCEALING', 'FROM', 'HERSELF'] +7127-75947-0010-436: hyp=['WHEN', 'SHE', 'PERCEIVED', 'THE', 'YOUNG', 'MAN', 'SHE', 'ROSE', 'LIKE', 'A', 'WOMAN', 'SURPRISED', 'IN', 'THE', 'MIDST', 'OF', 'IDEAS', 'SHE', 'WAS', 'DESIROUS', 'OF', 'CONCEALING', 'FROM', 'HERSELF'] +7127-75947-0011-437: ref=['REMAIN', 'I', 'IMPLORE', 'YOU', 'THE', 'EVENING', 'IS', 'MOST', 'LOVELY'] +7127-75947-0011-437: hyp=['REMAIN', 'I', 'IMPLORE', 'YOU', 'THE', 'EVENING', 'IS', 'MOST', 'LOVELY'] +7127-75947-0012-438: ref=['INDEED', 'AH'] +7127-75947-0012-438: hyp=['INDEED', 'A'] +7127-75947-0013-439: ref=['I', 'REMEMBER', 'NOW', 'AND', 'I', 'CONGRATULATE', 'MYSELF', 'DO', 'YOU', 'LOVE', 'ANY', 'ONE'] +7127-75947-0013-439: hyp=['I', 'REMEMBER', 'NOW', 'AND', 'I', 'CONGRATULATE', 'MYSELF', 'DO', 'YOU', 'LOVE', 'ANY', 'ONE'] +7127-75947-0014-440: ref=['FORGIVE', 'ME', 'I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'AM', 'SAYING', 'A', 'THOUSAND', 'TIMES', 'FORGIVE', 'ME', 'MADAME', 'WAS', 'RIGHT', 'QUITE', 'RIGHT', 'THIS', 'BRUTAL', 'EXILE', 'HAS', 'COMPLETELY', 'TURNED', 'MY', 'BRAIN'] +7127-75947-0014-440: hyp=['FORGIVE', 'ME', 'I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'AM', 'SAYING', 'A', 'THOUSAND', 'TIMES', 'FORGIVE', 'ME', 'MADAME', 'WAS', 'RIGHT', 'QUITE', 'RIGHT', 'THIS', 'BRUTAL', 'EXILE', 'HAS', 'COMPLETELY', 'TURNED', 'MY', 'BRAIN'] +7127-75947-0015-441: ref=['THERE', 'CANNOT', 'BE', 'A', 'DOUBT', 'HE', 'RECEIVED', 'YOU', 'KINDLY', 'FOR', 'IN', 'FACT', 'YOU', 'RETURNED', 'WITHOUT', 'HIS', 'PERMISSION'] +7127-75947-0015-441: hyp=['THERE', 'CANNOT', 'BE', 'A', 'DOUBT', 'HE', 'RECEIVED', 'YOU', 'KINDLY', 'FOR', 'IN', 'FACT', 'YOU', 'RETURNED', 'WITHOUT', 'HIS', 'PERMISSION'] +7127-75947-0016-442: ref=['OH', 'MADEMOISELLE', 'WHY', 'HAVE', 'I', 'NOT', 'A', 'DEVOTED', 'SISTER', 'OR', 'A', 'TRUE', 'FRIEND', 'SUCH', 'AS', 'YOURSELF'] +7127-75947-0016-442: hyp=['OH', 'MADEMOISELLE', 'WHY', 'HAVE', 'I', 'NOT', 'A', 'DEVOTED', 'SISTER', 'OR', 'A', 'TRUE', 'FRIEND', 'SUCH', 'AS', 'YOURSELF'] +7127-75947-0017-443: ref=['WHAT', 'ALREADY', 'HERE', 'THEY', 'SAID', 'TO', 'HER'] +7127-75947-0017-443: hyp=['WHAT', 'ALREADY', 'HERE', 'THEY', 'SAID', 'TO', 'HER'] +7127-75947-0018-444: ref=['I', 'HAVE', 'BEEN', 'HERE', 'THIS', 'QUARTER', 'OF', 'AN', 'HOUR', 'REPLIED', 'LA', 'VALLIERE'] +7127-75947-0018-444: hyp=['I', 'HAVE', 'BEEN', 'HERE', 'THIS', 'QUARTER', 'OF', 'AN', 'HOUR', 'REPLIED', 'LA', 'VALLIERS'] +7127-75947-0019-445: ref=['DID', 'NOT', 'THE', 'DANCING', 'AMUSE', 'YOU', 'NO'] +7127-75947-0019-445: hyp=['DID', 'NOT', 'THE', 'DANCING', 'AMUSE', 'YOU', 'NO'] +7127-75947-0020-446: ref=['NO', 'MORE', 'THAN', 'THE', 'DANCING'] +7127-75947-0020-446: hyp=['NO', 'MORE', 'THAN', 'THE', 'DANCING'] +7127-75947-0021-447: ref=['LA', 'VALLIERE', 'IS', 'QUITE', 'A', 'POETESS', 'SAID', 'TONNAY', 'CHARENTE'] +7127-75947-0021-447: hyp=['LA', 'VALLIERS', 'QUITE', 'A', 'POETESS', 'SAID', 'TONISON'] +7127-75947-0022-448: ref=['I', 'AM', 'A', 'WOMAN', 'AND', 'THERE', 'ARE', 'FEW', 'LIKE', 'ME', 'WHOEVER', 'LOVES', 'ME', 'FLATTERS', 'ME', 'WHOEVER', 'FLATTERS', 'ME', 'PLEASES', 'ME', 'AND', 'WHOEVER', 'PLEASES', 'WELL', 'SAID', 'MONTALAIS', 'YOU', 'DO', 'NOT', 'FINISH'] +7127-75947-0022-448: hyp=['I', 'AM', 'A', 'WOMAN', 'AND', 'THERE', 'ARE', 'FEW', 'LIKE', 'ME', 'WHOEVER', 'LOVES', 'ME', 'FLATTERS', 'ME', 'WHOEVER', 'FLATTERS', 'ME', 'PLEASES', 'ME', 'AND', 'WHOEVER', 'PLEASES', 'WELL', 'SAID', 'MONTALAIS', 'YOU', 'DO', 'NOT', 'FINISH'] +7127-75947-0023-449: ref=['IT', 'IS', 'TOO', 'DIFFICULT', 'REPLIED', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'LAUGHING', 'LOUDLY'] +7127-75947-0023-449: hyp=['IT', 'IS', 'TOO', 'DIFFICULT', 'REPLIED', 'MADEMOISELLE', 'DETONICHALANT', 'LAUGHING', 'LOUDLY'] +7127-75947-0024-450: ref=['LOOK', 'YONDER', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'MOON', 'SLOWLY', 'RISING', 'SILVERING', 'THE', 'TOPMOST', 'BRANCHES', 'OF', 'THE', 'CHESTNUTS', 'AND', 'THE', 'OAKS'] +7127-75947-0024-450: hyp=['LUCK', 'YONDER', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'MOON', 'SLOWLY', 'RISING', 'SILVERING', 'THE', 'TOPMOST', 'BRANCHES', 'OF', 'THE', 'CHESTNUTS', 'AND', 'THE', 'YOKES'] +7127-75947-0025-451: ref=['EXQUISITE', 'SOFT', 'TURF', 'OF', 'THE', 'WOODS', 'THE', 'HAPPINESS', 'WHICH', 'YOUR', 'FRIENDSHIP', 'CONFERS', 'UPON', 'ME'] +7127-75947-0025-451: hyp=['EXQUISITE', 'SOFT', 'TURF', 'OF', 'THE', 'WOODS', 'THE', 'HAPPINESS', 'WHICH', 'YOUR', 'FRIENDSHIP', 'CONFERS', 'UPON', 'ME'] +7127-75947-0026-452: ref=['WELL', 'SAID', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'I', 'ALSO', 'THINK', 'A', 'GOOD', 'DEAL', 'BUT', 'I', 'TAKE', 'CARE'] +7127-75947-0026-452: hyp=['WELL', 'SAID', 'MADEMOISELLE', 'DE', 'TONE', 'I', 'ALSO', 'THINK', 'A', 'GOOD', 'DEAL', 'BUT', 'I', 'TAKE', 'CARE'] +7127-75947-0027-453: ref=['TO', 'SAY', 'NOTHING', 'SAID', 'MONTALAIS', 'SO', 'THAT', 'WHEN', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'THINKS', 'ATHENAIS', 'IS', 'THE', 'ONLY', 'ONE', 'WHO', 'KNOWS', 'IT'] +7127-75947-0027-453: hyp=['TO', 'SAY', 'NOTHING', 'SAID', 'MONTALAIS', 'SO', 'THAT', 'WHEN', 'MADEMOISELLE', 'DE', 'TO', 'NECHERANT', 'THINKS', 'ETHNEE', 'IS', 'THE', 'ONLY', 'ONE', 'WHO', 'KNOWS', 'IT'] +7127-75947-0028-454: ref=['QUICK', 'QUICK', 'THEN', 'AMONG', 'THE', 'HIGH', 'REED', 'GRASS', 'SAID', 'MONTALAIS', 'STOOP', 'ATHENAIS', 'YOU', 'ARE', 'SO', 'TALL'] +7127-75947-0028-454: hyp=['QUICK', 'QUICK', 'THEN', 'AMONG', 'THE', 'HIGH', 'REED', 'GRASS', 'SAID', 'MONTALAIS', 'STOOP', 'ETHINAY', 'YOU', 'ARE', 'SO', 'TALL'] +7127-75947-0029-455: ref=['THE', 'YOUNG', 'GIRLS', 'HAD', 'INDEED', 'MADE', 'THEMSELVES', 'SMALL', 'INDEED', 'INVISIBLE'] +7127-75947-0029-455: hyp=['THE', 'YOUNG', 'GIRLS', 'HAD', 'INDEED', 'MADE', 'THEMSELVES', 'SMALL', 'INDEED', 'INVISIBLE'] +7127-75947-0030-456: ref=['SHE', 'WAS', 'HERE', 'JUST', 'NOW', 'SAID', 'THE', 'COUNT'] +7127-75947-0030-456: hyp=['SHE', 'WAS', 'HERE', 'JUST', 'NOW', 'SAID', 'THE', 'COUNT'] +7127-75947-0031-457: ref=['YOU', 'ARE', 'POSITIVE', 'THEN'] +7127-75947-0031-457: hyp=['YOU', 'ARE', 'POSITIVE', 'THEN'] +7127-75947-0032-458: ref=['YES', 'BUT', 'PERHAPS', 'I', 'FRIGHTENED', 'HER', 'IN', 'WHAT', 'WAY'] +7127-75947-0032-458: hyp=['YES', 'BUT', 'PERHAPS', 'I', 'FRIGHTENED', 'HER', 'AND', 'WHAT', 'WAY'] +7127-75947-0033-459: ref=['HOW', 'IS', 'IT', 'LA', 'VALLIERE', 'SAID', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'THAT', 'THE', 'VICOMTE', 'DE', 'BRAGELONNE', 'SPOKE', 'OF', 'YOU', 'AS', 'LOUISE'] +7127-75947-0033-459: hyp=['HOW', 'IS', 'IT', 'LA', 'VALLIERS', 'SAID', 'MADEMOISELLE', 'DE', 'TINACHANT', 'THAT', 'THE', 'VICOMTE', 'DE', 'BRAGELONE', 'SPOKE', 'OF', 'YOU', 'AS', 'LOUISE'] +7127-75947-0034-460: ref=['IT', 'SEEMS', 'THE', 'KING', 'WILL', 'NOT', 'CONSENT', 'TO', 'IT'] +7127-75947-0034-460: hyp=['IT', 'SEEMS', 'THE', 'KING', 'WILL', 'NOT', 'CONSENT', 'TO', 'IT'] +7127-75947-0035-461: ref=['GOOD', 'GRACIOUS', 'HAS', 'THE', 'KING', 'ANY', 'RIGHT', 'TO', 'INTERFERE', 'IN', 'MATTERS', 'OF', 'THAT', 'KIND'] +7127-75947-0035-461: hyp=['GOOD', 'GRACIOUS', 'AS', 'THE', 'KING', 'ANY', 'RIGHT', 'TO', 'INTERFERE', 'IN', 'MATTERS', 'OF', 'THAT', 'KIND'] +7127-75947-0036-462: ref=['I', 'GIVE', 'MY', 'CONSENT'] +7127-75947-0036-462: hyp=['I', 'GIVE', 'MY', 'CONSENT'] +7127-75947-0037-463: ref=['OH', 'I', 'AM', 'SPEAKING', 'SERIOUSLY', 'REPLIED', 'MONTALAIS', 'AND', 'MY', 'OPINION', 'IN', 'THIS', 'CASE', 'IS', 'QUITE', 'AS', 'GOOD', 'AS', 'THE', "KING'S", 'I', 'SUPPOSE', 'IS', 'IT', 'NOT', 'LOUISE'] +7127-75947-0037-463: hyp=['OH', 'I', 'AM', 'SPEAKING', 'SERIOUSLY', 'REPLIED', 'MONTALAIS', 'AND', 'MY', 'OPINION', 'IN', 'THIS', 'CASE', 'IS', 'QUITE', 'AS', 'GOOD', 'AS', 'THE', "KING'S", 'I', 'SUPPOSE', 'IS', 'IT', 'NOT', 'LOUISE'] +7127-75947-0038-464: ref=['LET', 'US', 'RUN', 'THEN', 'SAID', 'ALL', 'THREE', 'AND', 'GRACEFULLY', 'LIFTING', 'UP', 'THE', 'LONG', 'SKIRTS', 'OF', 'THEIR', 'SILK', 'DRESSES', 'THEY', 'LIGHTLY', 'RAN', 'ACROSS', 'THE', 'OPEN', 'SPACE', 'BETWEEN', 'THE', 'LAKE', 'AND', 'THE', 'THICKEST', 'COVERT', 'OF', 'THE', 'PARK'] +7127-75947-0038-464: hyp=['LET', 'US', 'RUN', 'THEN', 'SAID', 'ALL', 'THREE', 'AND', 'GRACEFULLY', 'LIFTING', 'UP', 'THE', 'LONG', 'SKIRTS', 'OF', 'THEIR', 'SILK', 'DRESSES', 'THEY', 'LIGHTLY', 'RAN', 'ACROSS', 'THE', 'OPEN', 'SPACE', 'BETWEEN', 'THE', 'LAKE', 'AND', 'THE', 'THICKEST', 'COVERT', 'OF', 'THE', 'PARK'] +7127-75947-0039-465: ref=['IN', 'FACT', 'THE', 'SOUND', 'OF', "MADAME'S", 'AND', 'THE', "QUEEN'S", 'CARRIAGES', 'COULD', 'BE', 'HEARD', 'IN', 'THE', 'DISTANCE', 'UPON', 'THE', 'HARD', 'DRY', 'GROUND', 'OF', 'THE', 'ROADS', 'FOLLOWED', 'BY', 'THE', 'MOUNTED', 'CAVALIERS'] +7127-75947-0039-465: hyp=['IN', 'FACT', 'THE', 'SOUND', 'OF', "MADAME'S", 'AND', 'THE', "QUEEN'S", 'CARRIAGES', 'COULD', 'BE', 'HEARD', 'IN', 'THE', 'DISTANCE', 'UPON', 'THE', 'HARD', 'DRY', 'GROUND', 'OF', 'THE', 'ROADS', 'FOLLOWED', 'BY', 'THE', 'MOUNTAIN', 'CAVALIERS'] +7127-75947-0040-466: ref=['IN', 'THIS', 'WAY', 'THE', 'FETE', 'OF', 'THE', 'WHOLE', 'COURT', 'WAS', 'A', 'FETE', 'ALSO', 'FOR', 'THE', 'MYSTERIOUS', 'INHABITANTS', 'OF', 'THE', 'FOREST', 'FOR', 'CERTAINLY', 'THE', 'DEER', 'IN', 'THE', 'BRAKE', 'THE', 'PHEASANT', 'ON', 'THE', 'BRANCH', 'THE', 'FOX', 'IN', 'ITS', 'HOLE', 'WERE', 'ALL', 'LISTENING'] +7127-75947-0040-466: hyp=['IN', 'THIS', 'WAY', 'THE', 'FETE', 'OF', 'THE', 'WHOLE', 'COURT', 'WAS', 'A', 'FETE', 'ALSO', 'FOR', 'THE', 'MYSTERIOUS', 'INHABITANTS', 'OF', 'THE', 'FOREST', 'FOR', 'CERTAINLY', 'THE', 'DEER', 'IN', 'THE', 'BRAKE', 'THE', 'PHEASANT', 'ON', 'THE', 'BRANCH', 'THE', 'FOX', 'IN', 'ITS', 'HOLE', 'WERE', 'ALL', 'LISTENING'] +7176-88083-0000-707: ref=['ALL', 'ABOUT', 'HIM', 'WAS', 'A', 'TUMULT', 'OF', 'BRIGHT', 'AND', 'BROKEN', 'COLOR', 'SCATTERED', 'IN', 'BROAD', 'SPLASHES'] +7176-88083-0000-707: hyp=['ALL', 'ABOUT', 'HIM', 'WAS', 'A', 'TUMULT', 'OF', 'BRIGHT', 'AND', 'BROKEN', 'COLOR', 'SCATTERED', 'IN', 'BROAD', 'SPLASHES'] +7176-88083-0001-708: ref=['THE', 'MERGANSER', 'HAD', 'A', 'CRESTED', 'HEAD', 'OF', 'IRIDESCENT', 'GREEN', 'BLACK', 'A', 'BROAD', 'COLLAR', 'OF', 'LUSTROUS', 'WHITE', 'BLACK', 'BACK', 'BLACK', 'AND', 'WHITE', 'WINGS', 'WHITE', 'BELLY', 'SIDES', 'FINELY', 'PENCILLED', 'IN', 'BLACK', 'AND', 'WHITE', 'AND', 'A', 'BREAST', 'OF', 'RICH', 'CHESTNUT', 'RED', 'STREAKED', 'WITH', 'BLACK'] +7176-88083-0001-708: hyp=['THE', 'MERGANCER', 'HAD', 'A', 'CRESTED', 'HEAD', 'OF', 'IRIDESCENT', 'GREEN', 'BLACK', 'A', 'BROAD', 'COLLAR', 'OF', 'LUSTROUS', 'WHITE', 'BLACK', 'BACK', 'BLACK', 'AND', 'WHITE', 'WINGS', 'WHITE', 'BELLY', 'SIDES', 'FINELY', 'PENCILLED', 'AND', 'BLACK', 'AND', 'WHITE', 'AND', 'HER', 'BREAST', 'OF', 'RICH', 'CHESTNUT', 'RED', 'STREAKED', 'WITH', 'BLACK'] +7176-88083-0002-709: ref=['HIS', 'FEET', 'WERE', 'RED', 'HIS', 'LONG', 'NARROW', 'BEAK', 'WITH', 'ITS', 'SAW', 'TOOTHED', 'EDGES', 'AND', 'SHARP', 'HOOKED', 'TIP', 'WAS', 'BRIGHT', 'RED'] +7176-88083-0002-709: hyp=['HIS', 'FEET', 'WERE', 'RED', 'HIS', 'LONG', 'NARROW', 'BEAK', 'WITH', 'ITS', 'SAW', 'TOOTHED', 'EDGES', 'AND', 'SHARP', 'HOOKED', 'TIP', 'WAS', 'BRIGHT', 'RED'] +7176-88083-0003-710: ref=['BUT', 'HERE', 'HE', 'WAS', 'AT', 'A', 'TERRIBLE', 'DISADVANTAGE', 'AS', 'COMPARED', 'WITH', 'THE', 'OWLS', 'HAWKS', 'AND', 'EAGLES', 'HE', 'HAD', 'NO', 'RENDING', 'CLAWS'] +7176-88083-0003-710: hyp=['BUT', 'HERE', 'HE', 'WAS', 'AT', 'A', 'TERRIBLE', 'DISADVANTAGE', 'AS', 'COMPARED', 'WITH', 'THE', 'OWLS', 'HAWKS', 'AND', 'EAGLES', 'HE', 'HAD', 'NO', 'RENDING', 'CLAWS'] +7176-88083-0004-711: ref=['BUT', 'SUDDENLY', 'STRAIGHT', 'AND', 'SWIFT', 'AS', 'A', 'DIVING', 'CORMORANT', 'HE', 'SHOT', 'DOWN', 'INTO', 'THE', 'TORRENT', 'AND', 'DISAPPEARED', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0004-711: hyp=['BUT', 'SUDDENLY', 'STRAIGHT', 'AND', 'SWIFT', 'AS', 'A', 'DIVING', 'CORMORANT', 'HE', 'SHOT', 'DOWN', 'INTO', 'THE', 'TORRENT', 'AND', 'DISAPPEARED', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0005-712: ref=['ONCE', 'FAIRLY', 'A', 'WING', 'HOWEVER', 'HE', 'WHEELED', 'AND', 'MADE', 'BACK', 'HURRIEDLY', 'FOR', 'HIS', 'PERCH'] +7176-88083-0005-712: hyp=['ONCE', 'FAIRLY', 'A', 'WING', 'HOWEVER', 'HE', 'WHEELED', 'AND', 'MADE', 'BACK', 'HURRIEDLY', 'FOR', 'HIS', 'PERCH'] +7176-88083-0006-713: ref=['IT', 'MIGHT', 'HAVE', 'SEEMED', 'THAT', 'A', 'TROUT', 'OF', 'THIS', 'SIZE', 'WAS', 'A', 'FAIRLY', 'SUBSTANTIAL', 'MEAL'] +7176-88083-0006-713: hyp=['IT', 'MIGHT', 'HAVE', 'SEEMED', 'THAT', 'A', 'TROUT', 'OF', 'THIS', 'SIZE', 'WAS', 'A', 'FAIRLY', 'SUBSTANTIAL', 'MEAL'] +7176-88083-0007-714: ref=['BUT', 'SUCH', 'WAS', 'HIS', 'KEENNESS', 'THAT', 'EVEN', 'WHILE', 'THE', 'WIDE', 'FLUKES', 'OF', 'HIS', 'ENGORGED', 'VICTIM', 'WERE', 'STILL', 'STICKING', 'OUT', 'AT', 'THE', 'CORNERS', 'OF', 'HIS', 'BEAK', 'HIS', 'FIERCE', 'RED', 'EYES', 'WERE', 'ONCE', 'MORE', 'PEERING', 'DOWNWARD', 'INTO', 'THE', 'TORRENT', 'IN', 'SEARCH', 'OF', 'FRESH', 'PREY'] +7176-88083-0007-714: hyp=['BUT', 'SUCH', 'WAS', 'HIS', 'KEENNESS', 'THAT', 'EVEN', 'WHILE', 'THE', 'WIDE', 'FLUKES', 'OF', 'HIS', 'ENGORGED', 'VICTIM', 'WERE', 'STILL', 'STICKING', 'OUT', 'AT', 'THE', 'CORNERS', 'OF', 'HIS', 'BEAK', 'HIS', 'FIERCE', 'RED', 'EYES', 'WERE', 'ONCE', 'MORE', 'PEERING', 'DOWNWARD', 'INTO', 'THE', 'TORRENT', 'IN', 'SEARCH', 'OF', 'FRESH', 'PREY'] +7176-88083-0008-715: ref=['IN', 'DESPAIR', 'HE', 'HURLED', 'HIMSELF', 'DOWNWARD', 'TOO', 'SOON'] +7176-88083-0008-715: hyp=['IN', 'DESPAIR', 'HE', 'HURLED', 'HIMSELF', 'DOWNWARD', 'TOO', 'SOON'] +7176-88083-0009-716: ref=['THE', 'GREAT', 'HAWK', 'FOLLOWED', 'HURRIEDLY', 'TO', 'RETRIEVE', 'HIS', 'PREY', 'FROM', 'THE', 'GROUND'] +7176-88083-0009-716: hyp=['THE', 'GREAT', 'HAWK', 'FOWLED', 'HURRIEDLY', 'TO', 'RETRIEVE', 'HIS', 'PREY', 'FROM', 'THE', 'GROUND'] +7176-88083-0010-717: ref=['THE', 'CAT', 'GROWLED', 'SOFTLY', 'PICKED', 'UP', 'THE', 'PRIZE', 'IN', 'HER', 'JAWS', 'AND', 'TROTTED', 'INTO', 'THE', 'BUSHES', 'TO', 'DEVOUR', 'IT'] +7176-88083-0010-717: hyp=['THE', 'CAT', 'GROWLED', 'SOFTLY', 'PICKED', 'UP', 'THE', 'PRIZE', 'IN', 'HER', 'JAWS', 'AND', 'TROTTED', 'INTO', 'THE', 'BUSHES', 'TO', 'DEVOUR', 'IT'] +7176-88083-0011-718: ref=['IN', 'FACT', 'HE', 'HAD', 'JUST', 'FINISHED', 'IT', 'THE', 'LAST', 'OF', 'THE', "TROUT'S", 'TAIL', 'HAD', 'JUST', 'VANISHED', 'WITH', 'A', 'SPASM', 'DOWN', 'HIS', 'STRAINED', 'GULLET', 'WHEN', 'THE', 'BAFFLED', 'HAWK', 'CAUGHT', 'SIGHT', 'OF', 'HIM', 'AND', 'SWOOPED'] +7176-88083-0011-718: hyp=['IN', 'FACT', 'HE', 'HAD', 'JUST', 'FINISHED', 'IT', 'THE', 'LAST', 'OF', 'THE', "TROUT'S", 'TAIL', 'HAD', 'JUST', 'VANISHED', 'WITH', 'A', 'SPASM', 'DOWN', 'HIS', 'STRAINED', 'GULLET', 'WHEN', 'THE', 'BAFFLED', 'HAWK', 'CAUGHT', 'SIGHT', 'OF', 'HIM', 'AND', 'SWOOPED'] +7176-88083-0012-719: ref=['THE', 'HAWK', 'ALIGHTED', 'ON', 'THE', 'DEAD', 'BRANCH', 'AND', 'SAT', 'UPRIGHT', 'MOTIONLESS', 'AS', 'IF', 'SURPRISED'] +7176-88083-0012-719: hyp=['THE', 'HAWK', 'ALIGHTED', 'ON', 'THE', 'DEAD', 'BRANCH', 'AND', 'SAT', 'UPRIGHT', 'MOTIONLESS', 'AS', 'IF', 'SURPRISED'] +7176-88083-0013-720: ref=['LIKE', 'HIS', 'UNFORTUNATE', 'LITTLE', 'COUSIN', 'THE', 'TEAL', 'HE', 'TOO', 'HAD', 'FELT', 'THE', 'FEAR', 'OF', 'DEATH', 'SMITTEN', 'INTO', 'HIS', 'HEART', 'AND', 'WAS', 'HEADING', 'DESPERATELY', 'FOR', 'THE', 'REFUGE', 'OF', 'SOME', 'DARK', 'OVERHANGING', 'BANK', 'DEEP', 'FRINGED', 'WITH', 'WEEDS', 'WHERE', 'THE', 'DREADFUL', 'EYE', 'OF', 'THE', 'HAWK', 'SHOULD', 'NOT', 'DISCERN', 'HIM'] +7176-88083-0013-720: hyp=['LIKE', 'HIS', 'UNFORTUNATE', 'LITTLE', 'COUSIN', 'THE', 'TEAL', 'HE', 'TOO', 'HAD', 'FELT', 'THE', 'FEAR', 'OF', 'DEATH', 'SMITTEN', 'INTO', 'HIS', 'HEART', 'AND', 'WAS', 'HEADING', 'DESPERATELY', 'FOR', 'THE', 'REFUGE', 'OF', 'SOME', 'DARK', 'OVERHANGING', 'BANK', 'DEEP', 'FRINGED', 'WITH', 'WEEDS', 'WHERE', 'THE', 'DREADFUL', 'EYE', 'OF', 'THE', 'HAWK', 'SHOULD', 'NOT', 'DISCERN', 'HIM'] +7176-88083-0014-721: ref=['THE', 'HAWK', 'SAT', 'UPON', 'THE', 'BRANCH', 'AND', 'WATCHED', 'HIS', 'QUARRY', 'SWIMMING', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0014-721: hyp=['THE', 'HAWK', 'SAT', 'UPON', 'THE', 'BRANCH', 'AND', 'WATCHED', 'HIS', 'QUARRY', 'SWIMMING', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0015-722: ref=['ALMOST', 'INSTANTLY', 'HE', 'WAS', 'FORCED', 'TO', 'THE', 'TOP'] +7176-88083-0015-722: hyp=['ALMOST', 'INSTANTLY', 'HE', 'WAS', 'FORCED', 'TO', 'THE', 'TO'] +7176-88083-0016-723: ref=['STRAIGHTWAY', 'THE', 'HAWK', 'GLIDED', 'FROM', 'HIS', 'PERCH', 'AND', 'DARTED', 'AFTER', 'HIM'] +7176-88083-0016-723: hyp=['STRAIGHTWAY', 'THE', 'HOT', 'GLIDED', 'FROM', 'HIS', 'PERCH', 'AND', 'DARTED', 'AFTER', 'HIM'] +7176-88083-0017-724: ref=['BUT', 'AT', 'THIS', 'POINT', 'IN', 'THE', 'RAPIDS', 'IT', 'WAS', 'IMPOSSIBLE', 'FOR', 'HIM', 'TO', 'STAY', 'DOWN'] +7176-88083-0017-724: hyp=['BUT', 'AT', 'THIS', 'POINT', 'IN', 'THE', 'RAPIDS', 'IT', 'WAS', 'IMPOSSIBLE', 'FOR', 'HIM', 'TO', 'STAY', 'DOWN'] +7176-88083-0018-725: ref=['BUT', 'THIS', 'FREQUENTER', 'OF', 'THE', 'HEIGHTS', 'OF', 'AIR', 'FOR', 'ALL', 'HIS', 'SAVAGE', 'VALOR', 'WAS', 'TROUBLED', 'AT', 'THE', 'LEAPING', 'WAVES', 'AND', 'THE', 'TOSSING', 'FOAM', 'OF', 'THESE', 'MAD', 'RAPIDS', 'HE', 'DID', 'NOT', 'UNDERSTAND', 'THEM'] +7176-88083-0018-725: hyp=['BUT', 'THIS', 'FREQUENTER', 'OF', 'THE', 'HEIGHTS', 'OF', 'AIR', 'FOR', 'ALL', 'HIS', 'SAVAGE', 'VALOR', 'WAS', 'TROUBLED', 'AT', 'THE', 'LEAPING', 'WAVES', 'AND', 'THE', 'TOSSING', 'FOAM', 'OF', 'THESE', 'MAD', 'RAPIDS', 'HE', 'DID', 'NOT', 'UNDERSTAND', 'THEM'] +7176-88083-0019-726: ref=['AS', 'HE', 'FLEW', 'HIS', 'DOWN', 'REACHING', 'CLUTCHING', 'TALONS', 'WERE', 'NOT', 'HALF', 'A', 'YARD', 'ABOVE', 'THE', "FUGITIVE'S", 'HEAD'] +7176-88083-0019-726: hyp=['AS', 'HE', 'FLEW', 'HIS', 'DOWN', 'REACHING', 'CLUTCHING', 'TALONS', 'WERE', 'NOT', 'HALF', 'A', 'YARD', 'ABOVE', 'THE', "FUGITIVE'S", 'HEAD'] +7176-88083-0020-727: ref=['WHERE', 'THE', 'WAVES', 'FOR', 'AN', 'INSTANT', 'SANK', 'THEY', 'CAME', 'CLOSER', 'BUT', 'NOT', 'QUITE', 'WITHIN', 'GRASPING', 'REACH'] +7176-88083-0020-727: hyp=['WHERE', 'THE', 'WAY', 'IS', 'FOR', 'AN', 'INSTANT', 'SANK', 'THEY', 'CAME', 'CLOSER', 'BUT', 'NOT', 'QUITE', 'WITHIN', 'GRASPING', 'REACH'] +7176-88083-0021-728: ref=['BUT', 'AS', 'BEFORE', 'THE', 'LEAPING', 'WAVES', 'OF', 'THE', 'RAPIDS', 'WERE', 'TOO', 'MUCH', 'FOR', 'HIS', 'PURSUER', 'AND', 'HE', 'WAS', 'ABLE', 'TO', 'FLAP', 'HIS', 'WAY', 'ONWARD', 'IN', 'A', 'CLOUD', 'OF', 'FOAM', 'WHILE', 'DOOM', 'HUNG', 'LOW', 'ABOVE', 'HIS', 'HEAD', 'YET', 'HESITATED', 'TO', 'STRIKE'] +7176-88083-0021-728: hyp=['BUT', 'AS', 'BEFORE', 'THE', 'LEAPING', 'WAVES', 'OF', 'THE', 'RAPIDS', 'WERE', 'TOO', 'MUCH', 'FOR', 'HIS', 'PURSUER', 'AND', 'HE', 'WAS', 'ABLE', 'TO', 'FLAP', 'HIS', 'WAY', 'ONWARD', 'IN', 'A', 'CLOUD', 'OF', 'FOAM', 'WHILE', 'DOOM', 'HUNG', 'LOW', 'ABOVE', 'HIS', 'HEAD', 'YET', 'HESITATED', 'TO', 'STRIKE'] +7176-88083-0022-729: ref=['THE', 'HAWK', 'EMBITTERED', 'BY', 'THE', 'LOSS', 'OF', 'HIS', 'FIRST', 'QUARRY', 'HAD', 'BECOME', 'AS', 'DOGGED', 'IN', 'PURSUIT', 'AS', 'A', 'WEASEL', 'NOT', 'TO', 'BE', 'SHAKEN', 'OFF', 'OR', 'EVADED', 'OR', 'DECEIVED'] +7176-88083-0022-729: hyp=['THE', 'HAWK', 'EMBITTERED', 'BY', 'THE', 'LOSS', 'OF', 'HIS', 'FIRST', 'QUARRY', 'HAD', 'BECOME', 'AS', 'DOGGED', 'IN', 'PURSUIT', 'AS', 'A', 'WEASEL', 'NOT', 'TO', 'BE', 'SHAKEN', 'OFF', 'OR', 'EVADED', 'OR', 'DECEIVED'] +7176-88083-0023-730: ref=['HE', 'HAD', 'A', 'LOT', 'OF', 'LINE', 'OUT', 'AND', 'THE', 'PLACE', 'WAS', 'NONE', 'TOO', 'FREE', 'FOR', 'A', 'LONG', 'CAST', 'BUT', 'HE', 'WAS', 'IMPATIENT', 'TO', 'DROP', 'HIS', 'FLIES', 'AGAIN', 'ON', 'THE', 'SPOT', 'WHERE', 'THE', 'BIG', 'FISH', 'WAS', 'FEEDING'] +7176-88083-0023-730: hyp=['HE', 'HAD', 'A', 'LOT', 'OF', 'LINE', 'OUT', 'AND', 'THE', 'PLACE', 'WAS', 'NONE', 'TOO', 'FREE', 'FOR', 'A', 'LONG', 'CAST', 'BUT', 'HE', 'WAS', 'IMPATIENT', 'TO', 'DROP', 'HIS', 'FLIES', 'AGAIN', 'ON', 'THE', 'SPOT', 'WHERE', 'THE', 'BIG', 'FISH', 'WAS', 'FEEDING'] +7176-88083-0024-731: ref=['THE', 'LAST', 'DROP', 'FLY', 'AS', 'LUCK', 'WOULD', 'HAVE', 'IT', 'CAUGHT', 'JUST', 'IN', 'THE', 'CORNER', 'OF', 'THE', "HAWK'S", 'ANGRILY', 'OPEN', 'BEAK', 'HOOKING', 'ITSELF', 'FIRMLY'] +7176-88083-0024-731: hyp=['THE', 'LAST', 'DROP', 'FLY', 'AS', 'LUCK', 'WOULD', 'HAVE', 'IT', 'CAUGHT', 'JUST', 'IN', 'THE', 'CORNER', 'OF', 'THE', "HAWK'S", 'ANGRILY', 'OPEN', 'BEAK', 'HOOKING', 'ITSELF', 'FIRMLY'] +7176-88083-0025-732: ref=['AT', 'THE', 'SUDDEN', 'SHARP', 'STING', 'OF', 'IT', 'THE', 'GREAT', 'BIRD', 'TURNED', 'HIS', 'HEAD', 'AND', 'NOTICED', 'FOR', 'THE', 'FIRST', 'TIME', 'THE', 'FISHERMAN', 'STANDING', 'ON', 'THE', 'BANK'] +7176-88083-0025-732: hyp=['AT', 'THE', 'SUDDEN', 'SHARP', 'STING', 'OF', 'IT', 'THE', 'GREAT', 'BIRD', 'TURNED', 'HIS', 'HEAD', 'AND', 'NOTICED', 'FOR', 'THE', 'FIRST', 'TIME', 'THE', 'FISHERMAN', 'STANDING', 'ON', 'THE', 'BANK'] +7176-88083-0026-733: ref=['THE', 'DRAG', 'UPON', 'HIS', 'BEAK', 'AND', 'THE', 'LIGHT', 'CHECK', 'UPON', 'HIS', 'WINGS', 'WERE', 'INEXPLICABLE', 'TO', 'HIM', 'AND', 'APPALLING'] +7176-88083-0026-733: hyp=['THE', 'DRAG', 'UPON', 'HIS', 'BEAK', 'AND', 'THE', 'LIGHT', 'CHECK', 'UPON', 'HIS', 'WINGS', 'WERE', 'INEXPLICABLE', 'TO', 'HIM', 'AND', 'APPALLING'] +7176-88083-0027-734: ref=['THEN', 'THE', 'LEADER', 'PARTED', 'FROM', 'THE', 'LINE'] +7176-88083-0027-734: hyp=['THAN', 'THE', 'LEADER', 'PARTED', 'FROM', 'THE', 'LINE'] +7176-92135-0000-661: ref=['HE', 'IS', 'A', 'WELCOME', 'FIGURE', 'AT', 'THE', 'GARDEN', 'PARTIES', 'OF', 'THE', 'ELECT', 'WHO', 'ARE', 'ALWAYS', 'READY', 'TO', 'ENCOURAGE', 'HIM', 'BY', 'ACCEPTING', 'FREE', 'SEATS', 'FOR', 'HIS', 'PLAY', 'ACTOR', 'MANAGERS', 'NOD', 'TO', 'HIM', 'EDITORS', 'ALLOW', 'HIM', 'TO', 'CONTRIBUTE', 'WITHOUT', 'CHARGE', 'TO', 'A', 'SYMPOSIUM', 'ON', 'THE', 'PRICE', 'OF', 'GOLF', 'BALLS'] +7176-92135-0000-661: hyp=['HE', 'IS', 'A', 'WELCOME', 'FIGURE', 'AT', 'THE', 'GARDEN', 'PARTIES', 'OF', 'THE', 'ELECT', 'WHO', 'ARE', 'ALWAYS', 'READY', 'TO', 'ENCOURAGE', 'HIM', 'BY', 'ACCEPTING', 'FREE', 'SEATS', 'FOR', 'HIS', 'PLAY', 'ACTOR', 'MANAGERS', 'NOD', 'TO', 'HIM', 'EDITORS', 'ALLOW', 'HIM', 'TO', 'CONTRIBUTE', 'WITHOUT', 'CHARGE', 'TO', 'A', 'SIMPOSIUM', 'ON', 'THE', 'PRICE', 'OF', 'GOLF', 'BALLS'] +7176-92135-0001-662: ref=['IN', 'SHORT', 'HE', 'BECOMES', 'A', 'PROMINENT', 'FIGURE', 'IN', 'LONDON', 'SOCIETY', 'AND', 'IF', 'HE', 'IS', 'NOT', 'CAREFUL', 'SOMEBODY', 'WILL', 'SAY', 'SO'] +7176-92135-0001-662: hyp=['IN', 'SHORT', 'HE', 'BECOMES', 'A', 'PROMINENT', 'FIGURE', 'IN', 'LONDON', 'SOCIETY', 'AND', 'IF', 'HE', 'IS', 'NOT', 'CAREFUL', 'SOMEBODY', 'WILL', 'SAY', 'SO'] +7176-92135-0002-663: ref=['BUT', 'EVEN', 'THE', 'UNSUCCESSFUL', 'DRAMATIST', 'HAS', 'HIS', 'MOMENTS'] +7176-92135-0002-663: hyp=['BUT', 'EVEN', 'THE', 'UNSUCCESSFUL', 'DRAMATIST', 'HAS', 'HIS', 'MOMENTS'] +7176-92135-0003-664: ref=['YOUR', 'PLAY', 'MUST', 'BE', 'NOT', 'MERELY', 'A', 'GOOD', 'PLAY', 'BUT', 'A', 'SUCCESSFUL', 'ONE'] +7176-92135-0003-664: hyp=['YOUR', 'PLAY', 'MUST', 'BE', 'NOT', 'MERELY', 'A', 'GOOD', 'PLAY', 'BUT', 'A', 'SUCCESSFUL', 'ONE'] +7176-92135-0004-665: ref=['FRANKLY', 'I', 'CANNOT', 'ALWAYS', 'SAY'] +7176-92135-0004-665: hyp=['FRANKLY', 'I', 'CANNOT', 'ALWAYS', 'SAY'] +7176-92135-0005-666: ref=['BUT', 'SUPPOSE', 'YOU', 'SAID', "I'M", 'FOND', 'OF', 'WRITING', 'MY', 'PEOPLE', 'ALWAYS', 'SAY', 'MY', 'LETTERS', 'HOME', 'ARE', 'GOOD', 'ENOUGH', 'FOR', 'PUNCH'] +7176-92135-0005-666: hyp=['BUT', 'SUPPOSE', 'YOU', 'SAID', "I'M", 'FOND', 'OF', 'WRITING', 'MY', 'PEOPLE', 'ALWAYS', 'SAY', 'MY', 'LETTERS', 'HOME', 'ARE', 'GOOD', 'ENOUGH', 'FOR', 'PUNCH'] +7176-92135-0006-667: ref=["I'VE", 'GOT', 'A', 'LITTLE', 'IDEA', 'FOR', 'A', 'PLAY', 'ABOUT', 'A', 'MAN', 'AND', 'A', 'WOMAN', 'AND', 'ANOTHER', 'WOMAN', 'AND', 'BUT', 'PERHAPS', "I'D", 'BETTER', 'KEEP', 'THE', 'PLOT', 'A', 'SECRET', 'FOR', 'THE', 'MOMENT'] +7176-92135-0006-667: hyp=["I'VE", 'GOT', 'A', 'LITTLE', 'IDEA', 'FOR', 'A', 'PLAY', 'ABOUT', 'A', 'MAN', 'AND', 'A', 'WOMAN', 'AND', 'ANOTHER', 'WOMAN', 'AND', 'BUT', 'PERHAPS', 'I', 'BETTER', 'KEEP', 'THE', 'PLOT', 'A', 'SECRET', 'FOR', 'THE', 'MOMENT'] +7176-92135-0007-668: ref=['ANYHOW', "IT'S", 'JOLLY', 'EXCITING', 'AND', 'I', 'CAN', 'DO', 'THE', 'DIALOGUE', 'ALL', 'RIGHT'] +7176-92135-0007-668: hyp=['ANYHOW', "IT'S", 'JOLLY', 'EXCITING', 'AND', 'I', 'CAN', 'DO', 'THE', 'DIALOGUE', 'ALL', 'RIGHT'] +7176-92135-0008-669: ref=['LEND', 'ME', 'YOUR', 'EAR', 'FOR', 'TEN', 'MINUTES', 'AND', 'YOU', 'SHALL', 'LEARN', 'JUST', 'WHAT', 'STAGECRAFT', 'IS'] +7176-92135-0008-669: hyp=['LEND', 'ME', 'YOUR', 'EAR', 'FOR', 'TEN', 'MINUTES', 'AND', 'YOU', 'SHALL', 'LEARN', 'JUST', 'WHAT', 'STAGECRAFT', 'IS'] +7176-92135-0009-670: ref=['AND', 'I', 'SHOULD', 'BEGIN', 'WITH', 'A', 'SHORT', 'HOMILY', 'ON', 'SOLILOQUY'] +7176-92135-0009-670: hyp=['AND', 'I', 'SHOULD', 'BEGIN', 'WITH', 'A', 'SHORT', 'HOMILY', 'ON', 'SOLILOQUY'] +7176-92135-0010-671: ref=['HAM', 'TO', 'BE', 'OR', 'NOT', 'TO', 'BE'] +7176-92135-0010-671: hyp=['HIM', 'TO', 'BE', 'OR', 'NOT', 'TO', 'BE'] +7176-92135-0011-672: ref=['NOW', 'THE', 'OBJECT', 'OF', 'THIS', 'SOLILOQUY', 'IS', 'PLAIN'] +7176-92135-0011-672: hyp=['NOW', 'THE', 'OBJECT', 'OF', 'THIS', 'SOLOQUY', 'IS', 'PLAIN'] +7176-92135-0012-673: ref=['INDEED', 'IRRESOLUTION', 'BEING', 'THE', 'KEYNOTE', 'OF', "HAMLET'S", 'SOLILOQUY', 'A', 'CLEVER', 'PLAYER', 'COULD', 'TO', 'SOME', 'EXTENT', 'INDICATE', 'THE', 'WHOLE', 'THIRTY', 'LINES', 'BY', 'A', 'SILENT', 'WORKING', 'OF', 'THE', 'JAW', 'BUT', 'AT', 'THE', 'SAME', 'TIME', 'IT', 'WOULD', 'BE', 'IDLE', 'TO', 'DENY', 'THAT', 'HE', 'WOULD', 'MISS', 'THE', 'FINER', 'SHADES', 'OF', 'THE', "DRAMATIST'S", 'MEANING'] +7176-92135-0012-673: hyp=['INDEED', 'IRRESOLUTION', 'MEAN', 'THE', 'KEYNOTE', 'OF', "HAMLET'S", 'SOLILOQUY', 'A', 'CLEVER', 'PLAYER', 'COULD', 'TO', 'SOME', 'EXTENT', 'INDICATE', 'THE', 'WHOLE', 'THIRTY', 'LINES', 'BY', 'A', 'SILENCE', 'WORKING', 'OF', 'THE', 'JOB', 'BUT', 'AT', 'THE', 'SAME', 'TIME', 'IT', 'WOULD', 'BE', 'IDLE', 'TO', 'DENY', 'THAT', 'HE', 'WOULD', 'MISS', 'THE', 'FINER', 'SHADES', 'OF', 'THE', "DRAMATIST'S", 'MEANING'] +7176-92135-0013-674: ref=['WE', 'MODERNS', 'HOWEVER', 'SEE', 'THE', 'ABSURDITY', 'OF', 'IT'] +7176-92135-0013-674: hyp=['WE', 'MODERNS', 'HOWEVER', 'SEE', 'THE', 'ABSURDITY', 'OF', 'IT'] +7176-92135-0014-675: ref=['IF', 'IT', 'BE', 'GRANTED', 'FIRST', 'THAT', 'THE', 'THOUGHTS', 'OF', 'A', 'CERTAIN', 'CHARACTER', 'SHOULD', 'BE', 'KNOWN', 'TO', 'THE', 'AUDIENCE', 'AND', 'SECONDLY', 'THAT', 'SOLILOQUY', 'OR', 'THE', 'HABIT', 'OF', 'THINKING', 'ALOUD', 'IS', 'IN', 'OPPOSITION', 'TO', 'MODERN', 'STAGE', 'TECHNIQUE', 'HOW', 'SHALL', 'A', 'SOLILOQUY', 'BE', 'AVOIDED', 'WITHOUT', 'DAMAGE', 'TO', 'THE', 'PLAY'] +7176-92135-0014-675: hyp=['IF', 'IT', 'BE', 'GRANTED', 'FIRST', 'THAT', 'THE', 'THOUGHTS', 'OF', 'A', 'CERTAIN', 'CHARACTER', 'SHOULD', 'BE', 'KNOWN', 'TO', 'THE', 'AUDIENCE', 'AND', 'SECONDLY', 'THAT', 'SOLILOQUY', 'OR', 'THE', 'HABIT', 'OF', 'THINKING', 'ALOUD', 'IS', 'IN', 'OPPOSITION', 'TO', 'MODERN', 'STAGE', 'TYPE', 'MAKE', 'HOW', 'SHALL', 'A', 'SOLILOQUY', 'BE', 'AVOIDED', 'WITHOUT', 'DAMAGE', 'TO', 'THE', 'PLAY'] +7176-92135-0015-676: ref=['AND', 'SO', 'ON', 'TILL', 'YOU', 'GET', 'TO', 'THE', 'END', 'WHEN', 'OPHELIA', 'MIGHT', 'SAY', 'AH', 'YES', 'OR', 'SOMETHING', 'NON', 'COMMITTAL', 'OF', 'THAT', 'SORT'] +7176-92135-0015-676: hyp=['AND', 'SO', 'ON', 'TILL', 'YOU', 'GET', 'TO', 'THE', 'END', 'ONE', 'OF', 'YOU', 'MIGHT', 'SAY', 'AH', 'YES', 'OR', 'SOMETHING', 'NON', 'COMMITTAL', 'OF', 'THAT', 'SORT'] +7176-92135-0016-677: ref=['THIS', 'WOULD', 'BE', 'AN', 'EASY', 'WAY', 'OF', 'DOING', 'IT', 'BUT', 'IT', 'WOULD', 'NOT', 'BE', 'THE', 'BEST', 'WAY', 'FOR', 'THE', 'REASON', 'THAT', 'IT', 'IS', 'TOO', 'EASY', 'TO', 'CALL', 'ATTENTION', 'TO', 'ITSELF'] +7176-92135-0016-677: hyp=['THIS', 'WOULD', 'BE', 'AN', 'EASY', 'WAY', 'OF', 'DOING', 'IT', 'BUT', 'IT', 'WOULD', 'NOT', 'BE', 'THE', 'BEST', 'WAY', 'FOR', 'THE', 'REASON', 'THAT', 'IT', 'IS', 'TOO', 'EASY', 'TO', 'CALL', 'ATTENTION', 'TO', 'ITSELF'] +7176-92135-0017-678: ref=['IN', 'THE', 'OLD', 'BADLY', 'MADE', 'PLAY', 'IT', 'WAS', 'FREQUENTLY', 'NECESSARY', 'FOR', 'ONE', 'OF', 'THE', 'CHARACTERS', 'TO', 'TAKE', 'THE', 'AUDIENCE', 'INTO', 'HIS', 'CONFIDENCE'] +7176-92135-0017-678: hyp=['IN', 'THE', 'OLD', 'BADLY', 'MADE', 'PLAY', 'IT', 'WAS', 'FREQUENTLY', 'NECESSARY', 'FOR', 'ONE', 'OF', 'THE', 'CHARACTERS', 'TO', 'TAKE', 'THE', 'AUDIENCE', 'INTO', 'HIS', 'CONFIDENCE'] +7176-92135-0018-679: ref=['IN', 'THE', 'MODERN', 'WELL', 'CONSTRUCTED', 'PLAY', 'HE', 'SIMPLY', 'RINGS', 'UP', 'AN', 'IMAGINARY', 'CONFEDERATE', 'AND', 'TELLS', 'HIM', 'WHAT', 'HE', 'IS', 'GOING', 'TO', 'DO', 'COULD', 'ANYTHING', 'BE', 'MORE', 'NATURAL'] +7176-92135-0018-679: hyp=['IN', 'THE', 'MODERN', 'WELL', 'CONSTRUCTED', 'PLAY', 'HE', 'SIMPLY', 'RINGS', 'UP', 'AN', 'IMAGINARY', 'CONFEDERATE', 'AND', 'TELLS', 'HIM', 'WHAT', 'HE', 'IS', 'GOING', 'TO', 'DO', 'COULD', 'ANYTHING', 'BE', 'MORE', 'NATURAL'] +7176-92135-0019-680: ref=['I', 'WANT', 'DOUBLE', 'NINE', 'HAL', 'LO'] +7176-92135-0019-680: hyp=['I', 'WANT', 'DOUBLE', 'NINE', 'HELLO'] +7176-92135-0020-681: ref=['DOUBLE', 'NINE', 'TWO', 'THREE', 'ELSINORE', 'DOUBLE', 'NINE', 'YES', 'HALLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0020-681: hyp=['DOUBLE', 'NINE', 'TO', 'THREE', 'ELSINOR', 'DOUBLE', 'NOT', 'YES', 'HELLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'PANEL', 'SPEAKING'] +7176-92135-0021-682: ref=['I', 'SAY', "I'VE", 'BEEN', 'WONDERING', 'ABOUT', 'THIS', 'BUSINESS'] +7176-92135-0021-682: hyp=['I', 'SAY', "I'VE", 'BEEN', 'WANDERING', 'ABOUT', 'THIS', 'BUSINESS'] +7176-92135-0022-683: ref=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER', 'IN', 'THE', 'MIND', 'TO', 'SUFFER', 'THE', 'SLINGS', 'AND', 'ARROWS', 'WHAT', 'NO', 'HAMLET', 'SPEAKING'] +7176-92135-0022-683: hyp=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER', 'IN', 'THE', 'MIND', 'TO', 'SUFFER', 'THE', 'SLINGS', 'AND', 'ARROWS', 'WHAT', 'NO', 'HAMLET', 'SPEAKING'] +7176-92135-0023-684: ref=['YOU', 'GAVE', 'ME', 'DOUBLE', 'FIVE', 'I', 'WANT', 'DOUBLE', 'NINE', 'HALLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0023-684: hyp=['YOU', 'GAVE', 'ME', 'DOUBLE', 'FIVE', 'I', 'WANT', 'DOUBLE', 'NINE', 'HELLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0024-685: ref=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER'] +7176-92135-0024-685: hyp=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER'] +7176-92135-0025-686: ref=['IT', 'IS', 'TO', 'LET', 'HAMLET', 'IF', 'THAT', 'HAPPEN', 'TO', 'BE', 'THE', 'NAME', 'OF', 'YOUR', 'CHARACTER', 'ENTER', 'WITH', 'A', 'SMALL', 'DOG', 'PET', 'FALCON', 'MONGOOSE', 'TAME', 'BEAR', 'OR', 'WHATEVER', 'ANIMAL', 'IS', 'MOST', 'IN', 'KEEPING', 'WITH', 'THE', 'PART', 'AND', 'CONFIDE', 'IN', 'THIS', 'ANIMAL', 'SUCH', 'SORROWS', 'HOPES', 'OR', 'SECRET', 'HISTORY', 'AS', 'THE', 'AUDIENCE', 'HAS', 'GOT', 'TO', 'KNOW'] +7176-92135-0025-686: hyp=['IT', 'IS', 'TO', 'LET', 'HAMLET', 'IF', 'THAT', 'HAPPEN', 'TO', 'BE', 'THE', 'NAME', 'OF', 'YOUR', 'CHARACTER', 'ENTER', 'WITH', 'A', 'SMALL', 'DOG', 'PET', 'FALCON', "MONGOO'S", 'TAME', 'BEAR', 'OR', 'WHATEVER', 'ANIMAL', 'IS', 'MOST', 'IN', 'KEEPING', 'WITH', 'THE', 'PART', 'AND', 'CONFIDE', 'IN', 'THIS', 'ANIMAL', 'SUCH', 'SORROWS', 'HOPES', 'OR', 'SECRET', 'HISTORY', 'AS', 'THE', 'AUDIENCE', 'HAS', 'GOT', 'TO', 'KNOW'] +7176-92135-0026-687: ref=['ENTER', 'HAMLET', 'WITH', 'HIS', 'FAVOURITE', 'BOAR', 'HOUND'] +7176-92135-0026-687: hyp=['INTER', 'HAMLET', 'WITH', 'HIS', 'FAVOURITE', 'BOREHOUND'] +7176-92135-0027-688: ref=['LADY', 'LARKSPUR', 'STARTS', 'SUDDENLY', 'AND', 'TURNS', 'TOWARDS', 'HIM'] +7176-92135-0027-688: hyp=['LADY', 'LARKSBURG', 'START', 'SUDDENLY', 'AND', 'TURNS', 'TOWARD', 'HIM'] +7176-92135-0028-689: ref=['LARKSPUR', 'BIT', 'ME', 'AGAIN', 'THIS', 'MORNING', 'FOR', 'THE', 'THIRD', 'TIME'] +7176-92135-0028-689: hyp=['LARKSPER', 'BID', 'ME', 'AGAIN', 'THIS', 'MORNING', 'FOR', 'THE', 'THIRD', 'TIME'] +7176-92135-0029-690: ref=['I', 'WANT', 'TO', 'GET', 'AWAY', 'FROM', 'IT', 'ALL', 'SWOONS'] +7176-92135-0029-690: hyp=['I', 'WANT', 'TO', 'GET', 'AWAY', 'FROM', 'IT', 'ALL', 'SWOON'] +7176-92135-0030-691: ref=['ENTER', 'LORD', 'ARTHUR', 'FLUFFINOSE'] +7176-92135-0030-691: hyp=['ENTERED', 'LORD', 'ARTHUR', "FLAPHANO'S"] +7176-92135-0031-692: ref=['AND', 'THERE', 'YOU', 'ARE', 'YOU', 'WILL', 'OF', 'COURSE', 'APPRECIATE', 'THAT', 'THE', 'UNFINISHED', 'SENTENCES', 'NOT', 'ONLY', 'SAVE', 'TIME', 'BUT', 'ALSO', 'MAKE', 'THE', 'MANOEUVRING', 'VERY', 'MUCH', 'MORE', 'NATURAL'] +7176-92135-0031-692: hyp=['AND', 'THERE', 'YOU', 'ARE', 'YOU', 'WILL', 'OF', 'COURSE', 'APPRECIATE', 'THAT', 'THE', 'UNFINISHED', 'SENTENCES', 'NOT', 'ONLY', 'SAVE', 'TIME', 'BUT', 'ALSO', 'MAKE', 'THE', 'MANOEUVRING', 'VERY', 'MUCH', 'MORE', 'NATURAL'] +7176-92135-0032-693: ref=['HOW', 'YOU', 'MAY', 'BE', 'WONDERING', 'ARE', 'YOU', 'TO', 'BEGIN', 'YOUR', 'MASTERPIECE'] +7176-92135-0032-693: hyp=['HOW', 'YOU', 'MAY', 'BE', 'WONDERING', 'ARE', 'YOU', 'TO', 'BEGIN', 'YOUR', 'MASTERPIECE'] +7176-92135-0033-694: ref=['RELAPSES', 'INTO', 'SILENCE', 'FOR', 'THE', 'REST', 'OF', 'THE', 'EVENING'] +7176-92135-0033-694: hyp=['RELAPSES', 'INTO', 'SILENCE', 'FOR', 'THE', 'REST', 'OF', 'THE', 'EVENING'] +7176-92135-0034-695: ref=['THE', 'DUCHESS', 'OF', 'SOUTHBRIDGE', 'TO', 'LORD', 'REGGIE', 'OH', 'REGGIE', 'WHAT', 'DID', 'YOU', 'SAY'] +7176-92135-0034-695: hyp=['THE', 'DUCHESS', 'OF', 'SOUTHBRIDGE', 'TO', 'LORD', 'REGGIE', 'O', 'READY', 'WHAT', 'DID', 'YOU', 'SAY'] +7176-92135-0035-696: ref=['THEN', 'LORD', 'TUPPENY', 'WELL', 'WHAT', 'ABOUT', 'AUCTION'] +7176-92135-0035-696: hyp=['THEN', 'LORD', 'TUPPENNY', 'WHAT', 'ABOUT', 'AUCTION'] +7176-92135-0036-697: ref=['THE', 'CROWD', 'DRIFTS', 'OFF', 'LEAVING', 'THE', 'HERO', 'AND', 'HEROINE', 'ALONE', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'STAGE', 'AND', 'THEN', 'YOU', 'CAN', 'BEGIN'] +7176-92135-0036-697: hyp=['THE', 'CROWD', 'DRIFTS', 'OFF', 'LEAPING', 'THE', 'HERO', 'AND', 'HEROINE', 'ALONE', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'STAGE', 'AND', 'THEN', 'YOU', 'CAN', 'BEGIN'] +7176-92135-0037-698: ref=['THEN', 'IS', 'THE', 'TIME', 'TO', 'INTRODUCE', 'A', 'MEAL', 'ON', 'THE', 'STAGE'] +7176-92135-0037-698: hyp=['THEN', 'IS', 'THE', 'TIME', 'TO', 'INTRODUCE', 'A', 'MEAL', 'ON', 'THE', 'STAGE'] +7176-92135-0038-699: ref=['A', 'STAGE', 'MEAL', 'IS', 'POPULAR', 'BECAUSE', 'IT', 'PROVES', 'TO', 'THE', 'AUDIENCE', 'THAT', 'THE', 'ACTORS', 'EVEN', 'WHEN', 'CALLED', 'CHARLES', 'HAWTREY', 'OR', 'OWEN', 'NARES', 'ARE', 'REAL', 'PEOPLE', 'JUST', 'LIKE', 'YOU', 'AND', 'ME'] +7176-92135-0038-699: hyp=['A', 'STAGE', 'MEAL', 'IS', 'POPULAR', 'BECAUSE', 'IT', 'PROVED', 'TO', 'THE', 'AUDIENCE', 'THAT', 'THE', 'ACTORS', 'EVEN', 'WHEN', 'CALLED', 'CHARLES', 'HALTREE', 'OR', 'OWEN', "NEAR'S", 'ARE', 'REAL', 'PEOPLE', 'JUST', 'LIKE', 'YOU', 'AND', 'ME'] +7176-92135-0039-700: ref=['TEA', 'PLEASE', 'MATTHEWS', 'BUTLER', 'IMPASSIVELY'] +7176-92135-0039-700: hyp=['T', 'PLEASE', 'MATTHEWS', 'BUTLER', 'IMPASSIVELY'] +7176-92135-0040-701: ref=['HOSTESS', 'REPLACES', 'LUMP', 'AND', 'INCLINES', 'EMPTY', 'TEAPOT', 'OVER', 'TRAY', 'FOR', 'A', 'MOMENT', 'THEN', 'HANDS', 'HIM', 'A', 'CUP', 'PAINTED', 'BROWN', 'INSIDE', 'THUS', 'DECEIVING', 'THE', 'GENTLEMAN', 'WITH', 'THE', 'TELESCOPE', 'IN', 'THE', 'UPPER', 'CIRCLE'] +7176-92135-0040-701: hyp=['HOSTES', 'REPLACES', 'LUMP', 'AND', 'INCLINES', 'EMPTY', 'TEAPOT', 'OVER', 'TRAY', 'FOR', 'MOMENT', 'THEN', 'HANDSOME', 'A', 'CUP', 'PAINTED', 'BROWN', 'INSIDE', 'LUSTY', 'SEEING', 'THE', 'GENTLEMAN', 'WITH', 'THE', 'TELESCOPE', 'IN', 'THE', 'UPPER', 'CIRCLE'] +7176-92135-0041-702: ref=['RE', 'ENTER', 'BUTLER', 'AND', 'THREE', 'FOOTMEN', 'WHO', 'REMOVE', 'THE', 'TEA', 'THINGS', 'HOSTESS', 'TO', 'GUEST'] +7176-92135-0041-702: hyp=['REINTER', 'BUTLER', 'AND', 'THREE', 'FOOTMEN', 'WHO', 'MOVED', 'THE', 'TEA', 'THINGS', 'HOSTESS', 'TWO', 'GUEST'] +7176-92135-0042-703: ref=['IN', 'NOVELS', 'THE', 'HERO', 'HAS', 'OFTEN', 'PUSHED', 'HIS', 'MEALS', 'AWAY', 'UNTASTED', 'BUT', 'NO', 'STAGE', 'HERO', 'WOULD', 'DO', 'ANYTHING', 'SO', 'UNNATURAL', 'AS', 'THIS'] +7176-92135-0042-703: hyp=['AND', 'NOVELS', 'THE', 'HERO', 'HAS', 'OFTEN', 'PUSHED', 'HIS', 'MEALS', 'AWAY', 'UNTASTED', 'BUT', 'NO', 'STEED', 'HERO', 'WOULD', 'DO', 'ANYTHING', 'SO', 'UNNATURAL', 'AS', 'THIS'] +7176-92135-0043-704: ref=['TWO', 'BITES', 'ARE', 'MADE', 'AND', 'THE', 'BREAD', 'IS', 'CRUMBLED', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'EAGERNESS', 'INDEED', 'ONE', 'FEELS', 'THAT', 'IN', 'REAL', 'LIFE', 'THE', 'GUEST', 'WOULD', 'CLUTCH', 'HOLD', 'OF', 'THE', 'FOOTMAN', 'AND', 'SAY', 'HALF', 'A', 'MO', 'OLD', 'CHAP', 'I', "HAVEN'T", 'NEARLY', 'FINISHED', 'BUT', 'THE', 'ACTOR', 'IS', 'BETTER', 'SCHOOLED', 'THAN', 'THIS'] +7176-92135-0043-704: hyp=['TWO', 'BITES', 'ARE', 'MADE', 'AND', 'THE', 'ABREAD', 'IS', 'CRUMBLED', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'EAGERNESS', 'INDEED', 'ONE', 'FEELS', 'THAT', 'IN', 'REAL', 'LIFE', 'THE', 'GUESTS', 'WOULD', 'CLUTCH', 'HOLD', 'OF', 'THE', 'FOOTMAN', 'AND', 'SAY', 'HALF', 'A', 'MOLE', 'CHAP', 'I', "HAVEN'T", 'NEARLY', 'FINISHED', 'BUT', 'THE', "ACTOR'S", 'BETTER', 'SCHOOLED', 'THAN', 'THIS'] +7176-92135-0044-705: ref=['BUT', 'IT', 'IS', 'THE', 'CIGARETTE', 'WHICH', 'CHIEFLY', 'HAS', 'BROUGHT', 'THE', 'MODERN', 'DRAMA', 'TO', 'ITS', 'PRESENT', 'STATE', 'OF', 'PERFECTION'] +7176-92135-0044-705: hyp=['BUT', 'IT', 'IS', 'THE', 'CIGARETTE', 'WHICH', 'CHIEFLY', 'HAS', 'BROUGHT', 'THE', 'MODERN', 'DRAMA', 'TO', 'ITS', 'PRESENT', 'STATE', 'OF', 'PERFECTION'] +7176-92135-0045-706: ref=['LORD', 'JOHN', 'TAKING', 'OUT', 'GOLD', 'CIGARETTE', 'CASE', 'FROM', 'HIS', 'LEFT', 'HAND', 'UPPER', 'WAISTCOAT', 'POCKET'] +7176-92135-0045-706: hyp=['LORD', 'JOHN', 'TAKING', 'OUT', 'GOLD', 'SICK', 'RED', 'CASE', 'FROM', 'HIS', 'LEFT', 'HAND', 'UPPER', 'WAISTCOAT', 'POCKET'] +7729-102255-0000-261: ref=['THE', 'BOGUS', 'LEGISLATURE', 'NUMBERED', 'THIRTY', 'SIX', 'MEMBERS'] +7729-102255-0000-261: hyp=['THE', 'BOGUS', 'LEGISLATURE', 'NUMBERED', 'THIRTY', 'SIX', 'MEMBERS'] +7729-102255-0001-262: ref=['THIS', 'WAS', 'AT', 'THE', 'MARCH', 'ELECTION', 'EIGHTEEN', 'FIFTY', 'FIVE'] +7729-102255-0001-262: hyp=['THIS', 'WAS', 'AT', 'THE', 'MARCH', 'ELECTION', 'EIGHTEEN', 'FIFTY', 'FIVE'] +7729-102255-0002-263: ref=['THAT', "SUMMER'S", 'EMIGRATION', 'HOWEVER', 'BEING', 'MAINLY', 'FROM', 'THE', 'FREE', 'STATES', 'GREATLY', 'CHANGED', 'THE', 'RELATIVE', 'STRENGTH', 'OF', 'THE', 'TWO', 'PARTIES'] +7729-102255-0002-263: hyp=['THAT', "SUMMER'S", 'IMMIGRATION', 'HOWEVER', 'BEING', 'MAINLY', 'FROM', 'THE', 'FREE', 'STATES', 'GREATLY', 'CHANGED', 'THE', 'RELATIVE', 'STRENGTH', 'OF', 'THE', 'TWO', 'PARTIES'] +7729-102255-0003-264: ref=['FOR', 'GENERAL', 'SERVICE', 'THEREFORE', 'REQUIRING', 'NO', 'SPECIAL', 'EFFORT', 'THE', 'NUMERICAL', 'STRENGTH', 'OF', 'THE', 'FACTIONS', 'WAS', 'ABOUT', 'EQUAL', 'WHILE', 'ON', 'EXTRAORDINARY', 'OCCASIONS', 'THE', 'TWO', 'THOUSAND', 'BORDER', 'RUFFIAN', 'RESERVE', 'LYING', 'A', 'LITTLE', 'FARTHER', 'BACK', 'FROM', 'THE', 'STATE', 'LINE', 'COULD', 'AT', 'ANY', 'TIME', 'EASILY', 'TURN', 'THE', 'SCALE'] +7729-102255-0003-264: hyp=['FOR', 'GENERAL', 'SERVICE', 'THEREFORE', 'REQUIRING', 'NO', 'SPECIAL', 'EFFORT', 'THE', 'NUMERICAL', 'STRENGTH', 'OF', 'THE', 'FACTIONS', 'WAS', 'ABOUT', 'EQUAL', 'WHILE', 'ON', 'EXTRAORDINARY', 'OCCASIONS', 'THE', 'TWO', 'THOUSAND', 'BORDER', 'RUFFIAN', 'RESERVE', 'LYING', 'A', 'LITTLE', 'FARTHER', 'BACK', 'FROM', 'THE', 'STATE', 'LINE', 'COULD', 'AT', 'ANY', 'TIME', 'EASILY', 'TURN', 'THE', 'SCALE'] +7729-102255-0004-265: ref=['THE', 'FREE', 'STATE', 'MEN', 'HAD', 'ONLY', 'THEIR', 'CONVICTIONS', 'THEIR', 'INTELLIGENCE', 'THEIR', 'COURAGE', 'AND', 'THE', 'MORAL', 'SUPPORT', 'OF', 'THE', 'NORTH', 'THE', 'CONSPIRACY', 'HAD', 'ITS', 'SECRET', 'COMBINATION', 'THE', 'TERRITORIAL', 'OFFICIALS', 'THE', 'LEGISLATURE', 'THE', 'BOGUS', 'LAWS', 'THE', 'COURTS', 'THE', 'MILITIA', 'OFFICERS', 'THE', 'PRESIDENT', 'AND', 'THE', 'ARMY'] +7729-102255-0004-265: hyp=['THE', 'FREE', 'STATE', 'MEN', 'HAD', 'ONLY', 'THEIR', 'CONVICTIONS', 'THEIR', 'INTELLIGENCE', 'THEIR', 'COURAGE', 'AND', 'THE', 'MORAL', 'SUPPORT', 'OF', 'THE', 'NORTH', 'THE', 'CONSPIRACY', 'HAD', 'ITS', 'SECRET', 'COMBINATION', 'THE', 'TERRITORIAL', 'OFFICIALS', 'THE', 'LEGISLATURE', 'THE', 'BOGUS', 'LAWS', 'THE', 'COURTS', 'THE', 'MILITIA', 'OFFICERS', 'THE', 'PRESIDENT', 'AND', 'THE', 'ARMY'] +7729-102255-0005-266: ref=['THIS', 'WAS', 'A', 'FORMIDABLE', 'ARRAY', 'OF', 'ADVANTAGES', 'SLAVERY', 'WAS', 'PLAYING', 'WITH', 'LOADED', 'DICE'] +7729-102255-0005-266: hyp=['THIS', 'WAS', 'A', 'FORMIDABLE', 'ARRAY', 'OF', 'ADVANTAGES', 'SLAVERY', 'WAS', 'PLAYING', 'WITH', 'LOADED', 'DICE'] +7729-102255-0006-267: ref=['COMING', 'BY', 'WAY', 'OF', 'THE', 'MISSOURI', 'RIVER', 'TOWNS', 'HE', 'FELL', 'FIRST', 'AMONG', 'BORDER', 'RUFFIAN', 'COMPANIONSHIP', 'AND', 'INFLUENCES', 'AND', 'PERHAPS', 'HAVING', 'HIS', 'INCLINATIONS', 'ALREADY', 'MOLDED', 'BY', 'HIS', 'WASHINGTON', 'INSTRUCTIONS', 'HIS', 'EARLY', 'IMPRESSIONS', 'WERE', 'DECIDEDLY', 'ADVERSE', 'TO', 'THE', 'FREE', 'STATE', 'CAUSE'] +7729-102255-0006-267: hyp=['COMMON', 'BY', 'WAY', 'OF', 'THE', 'MISSOURI', 'RIVER', 'TOWNS', 'HE', 'FELL', 'FIRST', 'AMONG', 'BORDER', 'RUFFIAN', 'COMPANIONSHIP', 'AND', 'INFLUENCES', 'AND', 'PERHAPS', 'HAVING', 'HIS', 'INCLINATIONS', 'ALREADY', 'MOULDED', 'BY', 'HIS', 'WASHINGTON', 'INSTRUCTIONS', 'HIS', 'EARLY', 'IMPRESSIONS', 'WERE', 'DECIDEDLY', 'ADVERSE', 'TO', 'THE', 'FREE', 'STATE', 'CAUSE'] +7729-102255-0007-268: ref=['HIS', 'RECEPTION', 'SPEECH', 'AT', 'WESTPORT', 'IN', 'WHICH', 'HE', 'MAINTAINED', 'THE', 'LEGALITY', 'OF', 'THE', 'LEGISLATURE', 'AND', 'HIS', 'DETERMINATION', 'TO', 'ENFORCE', 'THEIR', 'LAWS', 'DELIGHTED', 'HIS', 'PRO', 'SLAVERY', 'AUDITORS'] +7729-102255-0007-268: hyp=['HIS', 'RECEPTION', 'SPEECH', 'AT', 'WESTWARD', 'IN', 'WHICH', 'HE', 'MAINTAINED', 'THE', 'LEGALITY', 'OF', 'THE', 'LEGISLATURE', 'AND', 'HIS', 'DETERMINATION', 'TO', 'ENFORCE', 'THEIR', 'LAWS', 'DELIGHTED', 'HIS', 'PRO', 'SLAVERY', 'AUDITORS'] +7729-102255-0008-269: ref=['ALL', 'THE', 'TERRITORIAL', 'DIGNITARIES', 'WERE', 'PRESENT', 'GOVERNOR', 'SHANNON', 'PRESIDED', 'JOHN', 'CALHOUN', 'THE', 'SURVEYOR', 'GENERAL', 'MADE', 'THE', 'PRINCIPAL', 'SPEECH', 'A', 'DENUNCIATION', 'OF', 'THE', 'ABOLITIONISTS', 'SUPPORTING', 'THE', 'TOPEKA', 'MOVEMENT', 'CHIEF', 'JUSTICE', 'LECOMPTE', 'DIGNIFIED', 'THE', 'OCCASION', 'WITH', 'APPROVING', 'REMARKS'] +7729-102255-0008-269: hyp=['ALL', 'THE', 'TERRITORIAL', 'DIGNITARIES', 'WERE', 'PRESENT', 'GOVERNOR', 'SHAN', 'AND', 'PRESIDED', 'JOHN', 'CALHOUN', 'THE', 'SURVEYOR', 'GENERAL', 'MADE', 'THE', 'PRINCIPAL', 'SPEECH', 'A', 'DENUNCIATION', 'OF', 'THE', 'ABOLITIONIST', 'SUPPORTING', 'THE', 'TOPEKA', 'MOVEMENT', 'CHIEF', 'JUSTICE', 'LE', 'COMTE', 'DIGNIFIED', 'THE', 'OCCASION', 'WITH', 'APPROVING', 'REMARKS'] +7729-102255-0009-270: ref=['ALL', 'DISSENT', 'ALL', 'NON', 'COMPLIANCE', 'ALL', 'HESITATION', 'ALL', 'MERE', 'SILENCE', 'EVEN', 'WERE', 'IN', 'THEIR', 'STRONGHOLD', 'TOWNS', 'LIKE', 'LEAVENWORTH', 'BRANDED', 'AS', 'ABOLITIONISM', 'DECLARED', 'TO', 'BE', 'HOSTILITY', 'TO', 'THE', 'PUBLIC', 'WELFARE', 'AND', 'PUNISHED', 'WITH', 'PROSCRIPTION', 'PERSONAL', 'VIOLENCE', 'EXPULSION', 'AND', 'FREQUENTLY', 'DEATH'] +7729-102255-0009-270: hyp=['ALL', 'DESCENT', 'ALL', 'NON', 'COMPLIANCE', 'ALL', 'HESITATION', 'ALL', 'MERE', 'SILENCE', 'EVEN', 'WERE', 'IN', 'THEIR', 'STRONGHOLD', 'TOWNS', 'LIKE', 'LEVIN', 'WORTH', 'BRANDED', 'AS', 'ABOLITIONISM', 'DECLARED', 'TO', 'BE', 'HOSTILITY', 'TO', 'THE', 'PUBLIC', 'WELFARE', 'AND', 'PUNISHED', 'WITH', 'PROSCRIPTION', 'PERSONAL', 'VIOLENCE', 'EXPULSION', 'AND', 'FREQUENTLY', 'DEATH'] +7729-102255-0010-271: ref=['OF', 'THE', 'LYNCHINGS', 'THE', 'MOBS', 'AND', 'THE', 'MURDERS', 'IT', 'WOULD', 'BE', 'IMPOSSIBLE', 'EXCEPT', 'IN', 'A', 'VERY', 'EXTENDED', 'WORK', 'TO', 'NOTE', 'THE', 'FREQUENT', 'AND', 'ATROCIOUS', 'DETAILS'] +7729-102255-0010-271: hyp=['OF', 'THE', 'LYNCHINGS', 'THE', 'MOBS', 'AND', 'THE', 'MURDERS', 'IT', 'WOULD', 'BE', 'IMPOSSIBLE', 'EXCEPT', 'IN', 'A', 'VERY', 'EXTENDED', 'WORK', 'TO', 'NOTE', 'THE', 'FREQUENT', 'AND', 'ATROCIOUS', 'DETAILS'] +7729-102255-0011-272: ref=['THE', 'PRESENT', 'CHAPTERS', 'CAN', 'ONLY', 'TOUCH', 'UPON', 'THE', 'MORE', 'SALIENT', 'MOVEMENTS', 'OF', 'THE', 'CIVIL', 'WAR', 'IN', 'KANSAS', 'WHICH', 'HAPPILY', 'WERE', 'NOT', 'SANGUINARY', 'IF', 'HOWEVER', 'THE', 'INDIVIDUAL', 'AND', 'MORE', 'ISOLATED', 'CASES', 'OF', 'BLOODSHED', 'COULD', 'BE', 'DESCRIBED', 'THEY', 'WOULD', 'SHOW', 'A', 'STARTLING', 'AGGREGATE', 'OF', 'BARBARITY', 'AND', 'LOSS', 'OF', 'LIFE', 'FOR', "OPINION'S", 'SAKE'] +7729-102255-0011-272: hyp=['THE', 'PRESENT', 'CHAPTERS', 'CAN', 'ONLY', 'TOUCH', 'UPON', 'THE', 'MORE', 'SALIENT', 'MOVEMENTS', 'OF', 'THE', 'CIVIL', 'WAR', 'IN', 'KANSAS', 'WHICH', 'HAPPILY', 'ARE', 'NOT', 'SANGUINARY', 'IF', 'HOWEVER', 'THE', 'INDIVIDUAL', 'AND', 'MORE', 'ISOLATED', 'CASES', 'OF', 'BLOODSHED', 'COULD', 'BE', 'DESCRIBED', 'THEY', 'WOULD', 'SHOW', 'A', 'STARTLING', 'AGGREGATE', 'OF', 'BARBARITY', 'AND', 'A', 'LOSS', 'OF', 'LIFE', 'FOR', "OPINION'S", 'SAKE'] +7729-102255-0012-273: ref=['SEVERAL', 'HUNDRED', 'FREE', 'STATE', 'MEN', 'PROMPTLY', 'RESPONDED', 'TO', 'THE', 'SUMMONS'] +7729-102255-0012-273: hyp=['SEVERAL', 'HUNDRED', 'FREE', 'STATE', 'MEN', 'PROMPTLY', 'RESPONDED', 'TO', 'THE', 'SUMMONS'] +7729-102255-0013-274: ref=['IT', 'WAS', 'IN', 'FACT', 'THE', 'BEST', 'WEAPON', 'OF', 'ITS', 'DAY'] +7729-102255-0013-274: hyp=['IT', 'WAS', 'IN', 'FACT', 'THE', 'BEST', 'WEAPON', 'OF', 'ITS', 'DAY'] +7729-102255-0014-275: ref=['THE', 'LEADERS', 'OF', 'THE', 'CONSPIRACY', 'BECAME', 'DISTRUSTFUL', 'OF', 'THEIR', 'POWER', 'TO', 'CRUSH', 'THE', 'TOWN'] +7729-102255-0014-275: hyp=['THE', 'LEADERS', 'OF', 'THE', 'CONSPIRACY', 'BECAME', 'DISTRUSTFUL', 'OF', 'THEIR', 'POWER', 'TO', 'CRUSH', 'THE', 'TOWN'] +7729-102255-0015-276: ref=['ONE', 'OF', 'HIS', 'MILITIA', 'GENERALS', 'SUGGESTED', 'THAT', 'THE', 'GOVERNOR', 'SHOULD', 'REQUIRE', 'THE', 'OUTLAWS', 'AT', 'LAWRENCE', 'AND', 'ELSEWHERE', 'TO', 'SURRENDER', 'THE', 'SHARPS', 'RIFLES', 'ANOTHER', 'WROTE', 'ASKING', 'HIM', 'TO', 'CALL', 'OUT', 'THE', 'GOVERNMENT', 'TROOPS', 'AT', 'FORT', 'LEAVENWORTH'] +7729-102255-0015-276: hyp=['ONE', 'OF', 'HIS', 'MILITIA', 'GENERALS', 'SUGGESTED', 'THAT', 'THE', 'GOVERNOR', 'SHOULD', 'REQUIRE', 'THE', 'OUTLAWS', 'AT', 'LAWRENCE', 'AND', 'ELSEWHERE', 'TO', 'SURRENDER', 'THE', "SHARP'S", 'RIFLES', 'ANOTHER', 'WROTE', 'ASKING', 'HIM', 'TO', 'CALL', 'OUT', 'THE', 'GOVERNMENT', 'TROOPS', 'AT', 'FORT', 'LEVINWORTH'] +7729-102255-0016-277: ref=['THE', 'GOVERNOR', 'ON', 'HIS', 'PART', 'BECOMING', 'DOUBTFUL', 'OF', 'THE', 'LEGALITY', 'OF', 'EMPLOYING', 'MISSOURI', 'MILITIA', 'TO', 'ENFORCE', 'KANSAS', 'LAWS', 'WAS', 'ALSO', 'EAGER', 'TO', 'SECURE', 'THE', 'HELP', 'OF', 'FEDERAL', 'TROOPS'] +7729-102255-0016-277: hyp=['THE', 'GOVERNOR', 'ON', 'HIS', 'PART', 'BECOMING', 'DOUBTFUL', 'OF', 'THE', 'LEGALITY', 'OF', 'EMPLOYING', 'MISSOURI', 'MILITIA', 'TO', 'ENFORCE', 'KANSAS', 'LAWS', 'WAS', 'ALSO', 'EAGER', 'TO', 'SECURE', 'THE', 'HELP', 'OF', 'FEDERAL', 'TROOPS'] +7729-102255-0017-278: ref=['SHERIFF', 'JONES', 'HAD', 'HIS', 'POCKETS', 'ALWAYS', 'FULL', 'OF', 'WRITS', 'ISSUED', 'IN', 'THE', 'SPIRIT', 'OF', 'PERSECUTION', 'BUT', 'WAS', 'OFTEN', 'BAFFLED', 'BY', 'THE', 'SHARP', 'WITS', 'AND', 'READY', 'RESOURCES', 'OF', 'THE', 'FREE', 'STATE', 'PEOPLE', 'AND', 'SOMETIMES', 'DEFIED', 'OUTRIGHT'] +7729-102255-0017-278: hyp=['SHERIFF', 'JONES', 'HAD', 'HIS', 'POCKETS', 'ALWAYS', 'FULL', 'OF', 'WRITS', 'ISSUED', 'IN', 'THE', 'SPIRIT', 'OF', 'PERSECUTION', 'BUT', 'WAS', 'OFTEN', 'BAFFLED', 'BY', 'THE', 'SHARP', 'WITS', 'AND', 'READY', 'RESOURCES', 'OF', 'THE', 'FREE', 'STATE', 'PEOPLE', 'AND', 'SOMETIMES', 'DEFIED', 'OUTRIGHT'] +7729-102255-0018-279: ref=['LITTLE', 'BY', 'LITTLE', 'HOWEVER', 'THE', 'LATTER', 'BECAME', 'HEMMED', 'AND', 'BOUND', 'IN', 'THE', 'MESHES', 'OF', 'THE', 'VARIOUS', 'DEVICES', 'AND', 'PROCEEDINGS', 'WHICH', 'THE', 'TERRITORIAL', 'OFFICIALS', 'EVOLVED', 'FROM', 'THE', 'BOGUS', 'LAWS'] +7729-102255-0018-279: hyp=['LITTLE', 'BY', 'LITTLE', 'HOWEVER', 'THE', 'LATTER', 'BECAME', 'HEMMED', 'AND', 'BOUND', 'IN', 'THE', 'MESHES', 'OF', 'THE', 'VARIOUS', 'DEVICES', 'AND', 'PROCEEDINGS', 'WHICH', 'THE', 'TERRITORIAL', 'OFFICIALS', 'EVOLVED', 'FROM', 'THE', 'BOGUS', 'LAWS'] +7729-102255-0019-280: ref=['TO', 'EMBARRASS', 'THIS', 'DAMAGING', 'EXPOSURE', 'JUDGE', 'LECOMPTE', 'ISSUED', 'A', 'WRIT', 'AGAINST', 'THE', 'EX', 'GOVERNOR', 'ON', 'A', 'FRIVOLOUS', 'CHARGE', 'OF', 'CONTEMPT'] +7729-102255-0019-280: hyp=['TO', 'EMBARRASS', 'THIS', 'DAMAGING', 'EXPOSURE', 'JUDGE', 'LECOMTE', 'ISSUED', 'A', 'WRIT', 'AGAINST', 'THE', 'EX', 'GOVERNOR', 'ON', 'A', 'FRIVOLOUS', 'CHARGE', 'OF', 'CONTEMPT'] +7729-102255-0020-281: ref=['THE', 'INCIDENT', 'WAS', 'NOT', 'VIOLENT', 'NOR', 'EVEN', 'DRAMATIC', 'NO', 'POSSE', 'WAS', 'SUMMONED', 'NO', 'FURTHER', 'EFFORT', 'MADE', 'AND', 'REEDER', 'FEARING', 'PERSONAL', 'VIOLENCE', 'SOON', 'FLED', 'IN', 'DISGUISE'] +7729-102255-0020-281: hyp=['THE', 'INCIDENT', 'WAS', 'NOT', 'VIOLENT', 'NOR', 'EVEN', 'DRAMATIC', 'NO', 'POSSE', 'WAS', 'SUMMONED', 'NO', 'FURTHER', 'EFFORT', 'MADE', 'AND', 'READER', 'FEARING', 'PERSONAL', 'VIOLENCE', 'SOON', 'FLED', 'IN', 'DISGUISE'] +7729-102255-0021-282: ref=['BUT', 'THE', 'AFFAIR', 'WAS', 'MAGNIFIED', 'AS', 'A', 'CROWNING', 'PROOF', 'THAT', 'THE', 'FREE', 'STATE', 'MEN', 'WERE', 'INSURRECTIONISTS', 'AND', 'OUTLAWS'] +7729-102255-0021-282: hyp=['BUT', 'THE', 'AFFAIR', 'WAS', 'MAGNIFIED', 'AS', 'A', 'CROWNING', 'PROOF', 'THAT', 'THE', 'FREE', 'STATE', 'MEN', 'WERE', 'INSURRECTIONOUS', 'AND', 'OUTLAWS'] +7729-102255-0022-283: ref=['FROM', 'THESE', 'AGAIN', 'SPRANG', 'BARRICADED', 'AND', 'FORTIFIED', 'DWELLINGS', 'CAMPS', 'AND', 'SCOUTING', 'PARTIES', 'FINALLY', 'CULMINATING', 'IN', 'ROVING', 'GUERRILLA', 'BANDS', 'HALF', 'PARTISAN', 'HALF', 'PREDATORY'] +7729-102255-0022-283: hyp=['FROM', 'THESE', 'AGAIN', 'SPRANG', 'BARRICADED', 'AND', 'FORTIFIED', 'DWELLINGS', 'CAMPS', 'AND', 'SCOUT', 'PARTIES', 'FINALLY', 'CULMINATING', 'IN', 'ROVING', 'GUERRILLA', 'BANDS', 'HALF', 'PARTISAN', 'HALF', 'PREDATORY'] +7729-102255-0023-284: ref=['THEIR', 'DISTINCTIVE', 'CHARACTERS', 'HOWEVER', 'DISPLAY', 'ONE', 'BROAD', 'AND', 'UNFAILING', 'DIFFERENCE'] +7729-102255-0023-284: hyp=['THEIR', 'DISTINCTIVE', 'CHARACTERS', 'HOWEVER', 'DISPLAY', 'ONE', 'BROAD', 'AND', 'UNFAILING', 'DIFFERENCE'] +7729-102255-0024-285: ref=['THE', 'FREE', 'STATE', 'MEN', 'CLUNG', 'TO', 'THEIR', 'PRAIRIE', 'TOWNS', 'AND', 'PRAIRIE', 'RAVINES', 'WITH', 'ALL', 'THE', 'OBSTINACY', 'AND', 'COURAGE', 'OF', 'TRUE', 'DEFENDERS', 'OF', 'THEIR', 'HOMES', 'AND', 'FIRESIDES'] +7729-102255-0024-285: hyp=['THE', 'FREE', 'STATE', 'MEN', 'CLUNG', 'TO', 'THEIR', 'PRAIRIE', 'TOWNS', 'AND', 'PRAIRIE', 'RAVINES', 'WITH', 'ALL', 'THE', 'OBSTINACY', 'AND', 'COURAGE', 'OF', 'TRUE', 'DEFENDERS', 'OF', 'THEIR', 'HOMES', 'AND', 'FIRESIDES'] +7729-102255-0025-286: ref=['THEIR', 'ASSUMED', 'CHARACTER', 'CHANGED', 'WITH', 'THEIR', 'CHANGING', 'OPPORTUNITIES', 'OR', 'NECESSITIES'] +7729-102255-0025-286: hyp=['THERE', 'ASSUMED', 'CHARACTER', 'CHANGED', 'WITH', 'THEIR', 'CHANGING', 'OPPORTUNITIES', 'OR', 'NECESSITIES'] +7729-102255-0026-287: ref=['IN', 'THE', 'SHOOTING', 'OF', 'SHERIFF', 'JONES', 'IN', 'LAWRENCE', 'AND', 'IN', 'THE', 'REFUSAL', 'OF', 'EX', 'GOVERNOR', 'BEEDER', 'TO', 'ALLOW', 'THE', 'DEPUTY', 'MARSHAL', 'TO', 'ARREST', 'HIM', 'THEY', 'DISCOVERED', 'GRAVE', 'OFFENSES', 'AGAINST', 'THE', 'TERRITORIAL', 'AND', 'UNITED', 'STATES', 'LAWS'] +7729-102255-0026-287: hyp=['IN', 'THE', 'SHOOTING', 'OF', "SHERIFF'S", 'JONES', 'AND', 'LAWRENCE', 'AND', 'IN', 'THE', 'REFUSAL', 'OF', 'EX', 'GOVERNOR', 'READER', 'TO', 'ALLOW', 'THE', 'DEPUTY', 'MARSHAL', 'TO', 'ARREST', 'HIM', 'THEY', 'DISCOVERED', 'GRAVE', 'OFFENCES', 'AGAINST', 'THE', 'TERRITORIAL', 'AND', 'THE', 'UNITED', 'STATES', 'LAWS'] +7729-102255-0027-288: ref=['FOOTNOTE', 'SUMNER', 'TO', 'SHANNON', 'MAY', 'TWELFTH', 'EIGHTEEN', 'FIFTY', 'SIX'] +7729-102255-0027-288: hyp=['FOOTNOTE', 'SUMMER', 'TO', 'SHANNON', 'MAY', 'TWELFTH', 'EIGHTEEN', 'FIFTY', 'SIX'] +7729-102255-0028-289: ref=['PRIVATE', 'PERSONS', 'WHO', 'HAD', 'LEASED', 'THE', 'FREE', 'STATE', 'HOTEL', 'VAINLY', 'BESOUGHT', 'THE', 'VARIOUS', 'AUTHORITIES', 'TO', 'PREVENT', 'THE', 'DESTRUCTION', 'OF', 'THEIR', 'PROPERTY'] +7729-102255-0028-289: hyp=['PRIVATE', 'PERSONS', 'WHO', 'AT', 'LEAST', 'THE', 'FREE', 'STATE', 'HOTEL', 'VAINLY', 'BESOUGHT', 'THE', 'VARIOUS', 'AUTHORITIES', 'TO', 'PRESENT', 'THE', 'DESTRUCTION', 'OF', 'THEIR', 'PROPERTY'] +7729-102255-0029-290: ref=['TEN', 'DAYS', 'WERE', 'CONSUMED', 'IN', 'THESE', 'NEGOTIATIONS', 'BUT', 'THE', 'SPIRIT', 'OF', 'VENGEANCE', 'REFUSED', 'TO', 'YIELD'] +7729-102255-0029-290: hyp=['TEN', 'DAYS', 'WERE', 'CONSUMED', 'IN', 'THESE', 'NEGOTIATIONS', 'BUT', 'THE', 'SPIRIT', 'OF', 'VENGEANCE', 'REFUSED', 'TO', 'YIELD'] +7729-102255-0030-291: ref=['HE', 'SUMMONED', 'HALF', 'A', 'DOZEN', 'CITIZENS', 'TO', 'JOIN', 'HIS', 'POSSE', 'WHO', 'FOLLOWED', 'OBEYED', 'AND', 'ASSISTED', 'HIM'] +7729-102255-0030-291: hyp=['HE', 'SUMMONED', 'HALF', 'A', 'DOZEN', 'CITIZENS', 'TO', 'JOIN', 'HIS', 'POSSE', 'WHO', 'FOLLOWED', 'OBEYED', 'AND', 'ASSISTED', 'HIM'] +7729-102255-0031-292: ref=['HE', 'CONTINUED', 'HIS', 'PRETENDED', 'SEARCH', 'AND', 'TO', 'GIVE', 'COLOR', 'TO', 'HIS', 'ERRAND', 'MADE', 'TWO', 'ARRESTS'] +7729-102255-0031-292: hyp=['HE', 'CONTINUED', 'HIS', 'PRETENDED', 'SEARCH', 'AND', 'TO', 'GIVE', 'COLOR', 'TO', 'HIS', 'ERRAND', 'MADE', 'TO', 'ARREST'] +7729-102255-0032-293: ref=['THE', 'FREE', 'STATE', 'HOTEL', 'A', 'STONE', 'BUILDING', 'IN', 'DIMENSIONS', 'FIFTY', 'BY', 'SEVENTY', 'FEET', 'THREE', 'STORIES', 'HIGH', 'AND', 'HANDSOMELY', 'FURNISHED', 'PREVIOUSLY', 'OCCUPIED', 'ONLY', 'FOR', 'LODGING', 'ROOMS', 'ON', 'THAT', 'DAY', 'FOR', 'THE', 'FIRST', 'TIME', 'OPENED', 'ITS', 'TABLE', 'ACCOMMODATIONS', 'TO', 'THE', 'PUBLIC', 'AND', 'PROVIDED', 'A', 'FREE', 'DINNER', 'IN', 'HONOR', 'OF', 'THE', 'OCCASION'] +7729-102255-0032-293: hyp=['THE', 'FREE', 'STATE', 'HOTEL', 'A', 'STONE', 'BUILDING', 'IN', 'DIMENSIONS', 'FIFTY', 'BY', 'SEVENTY', 'FEET', 'THREE', 'STORIES', 'HIGH', 'AND', 'HANDSOMELY', 'FURNISHED', 'PREVIOUSLY', 'OCCUPIED', 'ONLY', 'FOR', 'LODGING', 'ROOMS', 'ON', 'THAT', 'DAY', 'FOR', 'THE', 'FIRST', 'TIME', 'OPENED', 'ITS', 'TABLE', 'ACCOMMODATIONS', 'TO', 'THE', 'PUBLIC', 'AND', 'PROVIDED', 'A', 'FREE', 'DINNER', 'IN', 'HONOR', 'OF', 'THE', 'OCCASION'] +7729-102255-0033-294: ref=['AS', 'HE', 'HAD', 'PROMISED', 'TO', 'PROTECT', 'THE', 'HOTEL', 'THE', 'REASSURED', 'CITIZENS', 'BEGAN', 'TO', 'LAUGH', 'AT', 'THEIR', 'OWN', 'FEARS'] +7729-102255-0033-294: hyp=['AS', 'HE', 'HAD', 'PROMISED', 'TO', 'PROTECT', 'THE', 'HOTEL', 'THE', 'REASSURED', 'CITIZENS', 'BEGAN', 'TO', 'LAUGH', 'AT', 'THEIR', 'OWN', 'FEARS'] +7729-102255-0034-295: ref=['TO', 'THEIR', 'SORROW', 'THEY', 'WERE', 'SOON', 'UNDECEIVED'] +7729-102255-0034-295: hyp=['TO', 'THEIR', 'SORROW', 'THEY', 'WERE', 'SOON', 'UNDECEIVED'] +7729-102255-0035-296: ref=['THE', 'MILITARY', 'FORCE', 'PARTLY', 'RABBLE', 'PARTLY', 'ORGANIZED', 'HAD', 'MEANWHILE', 'MOVED', 'INTO', 'THE', 'TOWN'] +7729-102255-0035-296: hyp=['THE', 'MILITARY', 'FORCE', 'PARTLY', 'RABBLE', 'PARTLY', 'ORGANIZED', 'HAD', 'MEANWHILE', 'MOVED', 'INTO', 'THE', 'TOWN'] +7729-102255-0036-297: ref=['HE', 'PLANTED', 'A', 'COMPANY', 'BEFORE', 'THE', 'HOTEL', 'AND', 'DEMANDED', 'A', 'SURRENDER', 'OF', 'THE', 'ARMS', 'BELONGING', 'TO', 'THE', 'FREE', 'STATE', 'MILITARY', 'COMPANIES'] +7729-102255-0036-297: hyp=['HE', 'PLANTED', 'A', 'COMPANY', 'BEFORE', 'THE', 'HOTEL', 'AND', 'DEMANDED', 'A', 'SURRENDER', 'OF', 'THE', 'ARMS', 'BELONGING', 'TO', 'THE', 'FREE', 'STATE', 'MILITARY', 'COMPANIES'] +7729-102255-0037-298: ref=['HALF', 'AN', 'HOUR', 'LATER', 'TURNING', 'A', 'DEAF', 'EAR', 'TO', 'ALL', 'REMONSTRANCE', 'HE', 'GAVE', 'THE', 'PROPRIETORS', 'UNTIL', 'FIVE', "O'CLOCK", 'TO', 'REMOVE', 'THEIR', 'FAMILIES', 'AND', 'PERSONAL', 'PROPERTY', 'FROM', 'THE', 'FREE', 'STATE', 'HOTEL'] +7729-102255-0037-298: hyp=['HALF', 'AN', 'HOUR', 'LATER', 'TURNING', 'A', 'DEAF', 'EAR', 'TO', 'ALL', 'REMONSTRANCE', 'HE', 'GAVE', 'THE', 'PROPRIETORS', 'UNTIL', 'FIVE', "O'CLOCK", 'TO', 'REMOVE', 'THEIR', 'FAMILIES', 'AND', 'PERSONAL', 'PROPERTY', 'FROM', 'THE', 'FREE', 'STATE', 'HOTEL'] +7729-102255-0038-299: ref=['ATCHISON', 'WHO', 'HAD', 'BEEN', 'HARANGUING', 'THE', 'MOB', 'PLANTED', 'HIS', 'TWO', 'GUNS', 'BEFORE', 'THE', 'BUILDING', 'AND', 'TRAINED', 'THEM', 'UPON', 'IT'] +7729-102255-0038-299: hyp=['ATTITSON', 'WHO', 'HAD', 'BEEN', 'HARANGUING', 'THE', 'MOB', 'PLANTED', 'HIS', 'TWO', 'GUNS', 'BEFORE', 'THE', 'BUILDING', 'AND', 'TRAINED', 'THEM', 'UPON', 'IT'] +7729-102255-0039-300: ref=['THE', 'INMATES', 'BEING', 'REMOVED', 'AT', 'THE', 'APPOINTED', 'HOUR', 'A', 'FEW', 'CANNON', 'BALLS', 'WERE', 'FIRED', 'THROUGH', 'THE', 'STONE', 'WALLS'] +7729-102255-0039-300: hyp=['THE', 'INMATES', 'BEING', 'REMOVED', 'AT', 'THE', 'APPOINTED', 'HOUR', 'A', 'FEW', 'CANNON', 'BALLS', 'WERE', 'FIRED', 'THROUGH', 'THE', 'STONE', 'WALLS'] +7729-102255-0040-301: ref=['IN', 'THIS', 'INCIDENT', 'CONTRASTING', 'THE', 'CREATIVE', 'AND', 'THE', 'DESTRUCTIVE', 'SPIRIT', 'OF', 'THE', 'FACTIONS', 'THE', 'EMIGRANT', 'AID', 'SOCIETY', 'OF', 'MASSACHUSETTS', 'FINDS', 'ITS', 'MOST', 'HONORABLE', 'AND', 'TRIUMPHANT', 'VINDICATION'] +7729-102255-0040-301: hyp=['IN', 'THIS', 'INCIDENT', 'CONTRASTING', 'THE', 'CREATIVE', 'AND', 'THE', 'DESTRUCTIVE', 'SPIRIT', 'OF', 'THE', 'FACTIONS', 'THE', 'IMMIGRANT', 'AIDS', 'SOCIETY', 'OF', 'MASSACHUSETTS', 'FINDS', 'ITS', 'MOST', 'HONORABLE', 'AND', 'TRIUMPHANT', 'VINDICATION'] +7729-102255-0041-302: ref=['THE', 'WHOLE', 'PROCEEDING', 'WAS', 'SO', 'CHILDISH', 'THE', 'MISERABLE', 'PLOT', 'SO', 'TRANSPARENT', 'THE', 'OUTRAGE', 'SO', 'GROSS', 'AS', 'TO', 'BRING', 'DISGUST', 'TO', 'THE', 'BETTER', 'CLASS', 'OF', 'BORDER', 'RUFFIANS', 'WHO', 'WERE', 'WITNESSES', 'AND', 'ACCESSORIES'] +7729-102255-0041-302: hyp=['THE', 'WHOLE', 'PROCEEDING', 'WAS', 'SO', 'CHILDISH', 'THE', 'MISERABLE', 'PLOT', 'SO', 'TRANSPARENT', 'THE', 'OUTRAGED', 'SO', 'GROSS', 'AS', 'TO', 'BRING', 'DISGUST', 'TO', 'THE', 'BETTER', 'CLASS', 'OF', 'BORDER', 'RUFFIANS', 'WHO', 'WERE', 'WITNESSES', 'AND', 'ACCESSORIES'] +7729-102255-0042-303: ref=['RELOCATED', 'FOOTNOTE', 'GOVERNOR', 'ROBINSON', 'BEING', 'ON', 'HIS', 'WAY', 'EAST', 'THE', 'STEAMBOAT', 'ON', 'WHICH', 'HE', 'WAS', 'TRAVELING', 'STOPPED', 'AT', 'LEXINGTON', 'MISSOURI'] +7729-102255-0042-303: hyp=['RE', 'LOCATED', 'FOOTNOTE', 'GOVERNOR', 'ROBINSON', 'BEING', 'ON', 'HIS', 'WAY', 'EAST', 'THE', 'STEAMBOAT', 'ON', 'WHICH', 'HE', 'WAS', 'TRAVELLING', 'STOPPED', 'AT', 'LEXINGTON', 'MISSOURI'] +7729-102255-0043-304: ref=['IN', 'A', 'FEW', 'DAYS', 'AN', 'OFFICER', 'CAME', 'WITH', 'A', 'REQUISITION', 'FROM', 'GOVERNOR', 'SHANNON', 'AND', 'TOOK', 'THE', 'PRISONER', 'BY', 'LAND', 'TO', 'WESTPORT', 'AND', 'AFTERWARDS', 'FROM', 'THERE', 'TO', 'KANSAS', 'CITY', 'AND', 'LEAVENWORTH'] +7729-102255-0043-304: hyp=['IN', 'A', 'FEW', 'DAYS', 'AN', 'OFFICER', 'CAME', 'WITH', 'A', 'REQUISITION', 'FROM', 'GOVERNOR', 'SHANNON', 'AND', 'TOOK', 'THE', 'PRISONER', 'BY', 'LANDA', 'WEST', 'PORT', 'AND', 'AFTERWARDS', 'FROM', 'THERE', 'TO', 'KANSAS', 'CITY', 'IN', 'LEVINWORTH'] +7729-102255-0044-305: ref=['HERE', 'HE', 'WAS', 'PLACED', 'IN', 'THE', 'CUSTODY', 'OF', 'CAPTAIN', 'MARTIN', 'OF', 'THE', 'KICKAPOO', 'RANGERS', 'WHO', 'PROVED', 'A', 'KIND', 'JAILER', 'AND', 'MATERIALLY', 'ASSISTED', 'IN', 'PROTECTING', 'HIM', 'FROM', 'THE', 'DANGEROUS', 'INTENTIONS', 'OF', 'THE', 'MOB', 'WHICH', 'AT', 'THAT', 'TIME', 'HELD', 'LEAVENWORTH', 'UNDER', 'A', 'REIGN', 'OF', 'TERROR'] +7729-102255-0044-305: hyp=['HERE', 'HE', 'WAS', 'PLACED', 'IN', 'THE', 'CUSTODY', 'OF', 'CAPTAIN', 'MARTIN', 'OF', 'THE', 'KICKAPOO', 'RANGERS', 'WHO', 'PROVED', 'A', 'KIND', 'JAILER', 'AND', 'MATERIALLY', 'ASSISTED', 'IN', 'PROTECTING', 'HIM', 'FROM', 'THE', 'DANGEROUS', 'INTENTIONS', 'OF', 'THE', 'MOB', 'WHICH', 'AT', 'THAT', 'TIME', 'HELD', 'LEVIN', 'WORTH', 'UNDER', 'THE', 'REIGN', 'OF', 'TERROR'] +7729-102255-0045-306: ref=['CAPTAIN', 'MARTIN', 'SAID', 'I', 'SHALL', 'GIVE', 'YOU', 'A', 'PISTOL', 'TO', 'HELP', 'PROTECT', 'YOURSELF', 'IF', 'WORSE', 'COMES', 'TO', 'WORST'] +7729-102255-0045-306: hyp=['CAPTAIN', 'MARTIN', 'SAID', 'I', 'SHALL', 'GIVE', 'YOU', 'A', 'PISTOL', 'TO', 'HELP', 'PROTECT', 'YOURSELF', 'IF', 'WORSE', 'COMES', 'TO', 'WORST'] +7729-102255-0046-307: ref=['IN', 'THE', 'EARLY', 'MORNING', 'OF', 'THE', 'NEXT', 'DAY', 'MAY', 'TWENTY', 'NINTH', 'A', 'COMPANY', 'OF', 'DRAGOONS', 'WITH', 'ONE', 'EMPTY', 'SADDLE', 'CAME', 'DOWN', 'FROM', 'THE', 'FORT', 'AND', 'WHILE', 'THE', 'PRO', 'SLAVERY', 'MEN', 'STILL', 'SLEPT', 'THE', 'PRISONER', 'AND', 'HIS', 'ESCORT', 'WERE', 'ON', 'THEIR', 'WAY', 'ACROSS', 'THE', 'PRAIRIES', 'TO', 'LECOMPTON', 'IN', 'THE', 'CHARGE', 'OF', 'OFFICERS', 'OF', 'THE', 'UNITED', 'STATES', 'ARMY'] +7729-102255-0046-307: hyp=['IN', 'THE', 'EARLY', 'MORNING', 'OF', 'THE', 'NEXT', 'DAY', 'MAY', 'TWENTY', 'NINTH', 'A', 'COMPANY', 'OF', 'DRAGOONS', 'WITH', 'ONE', 'EMPTY', 'SADDLE', 'CAME', 'DOWN', 'FROM', 'THE', 'FORT', 'AND', 'WHILE', 'THE', 'PRO', 'SLAVERY', 'MEN', 'STILL', 'SLEPT', 'THE', 'PRISONER', 'AND', 'HIS', 'ESCORT', 'WERE', 'ON', 'THEIR', 'WAY', 'ACROSS', 'THE', 'PRAIRIES', 'TO', 'LECOMPTON', 'IN', 'THE', 'CHARGE', 'OF', 'OFFICERS', 'OF', 'THE', 'UNITED', 'STATES', 'ARMY'] +8224-274381-0000-1451: ref=['THOUGH', 'THROWN', 'INTO', 'PRISON', 'FOR', 'THIS', 'ENTERPRISE', 'AND', 'DETAINED', 'SOME', 'TIME', 'HE', 'WAS', 'NOT', 'DISCOURAGED', 'BUT', 'STILL', 'CONTINUED', 'BY', 'HIS', 'COUNTENANCE', 'AND', 'PROTECTION', 'TO', 'INFUSE', 'SPIRIT', 'INTO', 'THE', 'DISTRESSED', 'ROYALISTS'] +8224-274381-0000-1451: hyp=['THOUGH', 'THROWN', 'INTO', 'PRISON', 'FOR', 'THIS', 'ENTERPRISE', 'AND', 'DETAINED', 'SOME', 'TIME', 'HE', 'WAS', 'NOT', 'DISCOURAGED', 'BUT', 'STILL', 'CONTINUED', 'BY', 'HIS', 'COUNTENANCE', 'AND', 'PROTECTION', 'TO', 'INFUSE', 'SPIRIT', 'INTO', 'THE', 'DISTRESSED', 'ROYALISTS'] +8224-274381-0001-1452: ref=['AMONG', 'OTHER', 'PERSONS', 'OF', 'DISTINCTION', 'WHO', 'UNITED', 'THEMSELVES', 'TO', 'HIM', 'WAS', 'LORD', 'NAPIER', 'OF', 'MERCHISTON', 'SON', 'OF', 'THE', 'FAMOUS', 'INVENTOR', 'OF', 'THE', 'LOGARITHMS', 'THE', 'PERSON', 'TO', 'WHOM', 'THE', 'TITLE', 'OF', 'A', 'GREAT', 'MAN', 'IS', 'MORE', 'JUSTLY', 'DUE', 'THAN', 'TO', 'ANY', 'OTHER', 'WHOM', 'HIS', 'COUNTRY', 'EVER', 'PRODUCED'] +8224-274381-0001-1452: hyp=['AMONG', 'OTHER', 'PERSONS', 'OF', 'DISTINCTION', 'WHO', 'UNITED', 'THEMSELVES', 'TO', 'HIM', 'WAS', 'LORD', 'NAPIER', 'OF', 'MURCHISON', 'SON', 'OF', 'THE', 'FAMOUS', 'INVENTOR', 'OF', 'THE', 'LOGARTHEMS', 'THE', 'PERSON', 'TO', 'WHOM', 'THE', 'TITLE', 'OF', 'A', 'GREAT', 'MAN', 'IS', 'MORE', 'JUSTLY', 'DUE', 'THAN', 'TO', 'ANY', 'OTHER', 'WHOM', 'HIS', 'COUNTRY', 'EVER', 'PRODUCED'] +8224-274381-0002-1453: ref=['WHILE', 'THE', 'FORMER', 'FORETOLD', 'THAT', 'THE', 'SCOTTISH', 'COVENANTERS', 'WERE', 'SECRETLY', 'FORMING', 'A', 'UNION', 'WITH', 'THE', 'ENGLISH', 'PARLIAMENT', 'AND', 'INCULCATED', 'THE', 'NECESSITY', 'OF', 'PREVENTING', 'THEM', 'BY', 'SOME', 'VIGOROUS', 'UNDERTAKING', 'THE', 'LATTER', 'STILL', 'INSISTED', 'THAT', 'EVERY', 'SUCH', 'ATTEMPT', 'WOULD', 'PRECIPITATE', 'THEM', 'INTO', 'MEASURES', 'TO', 'WHICH', 'OTHERWISE', 'THEY', 'WERE', 'NOT', 'PERHAPS', 'INCLINED'] +8224-274381-0002-1453: hyp=['WHILE', 'THE', 'FORMER', 'FORETOLD', 'THAT', 'THE', 'SCOTTISH', 'COVENANTERS', 'WERE', 'SECRETLY', 'FORMING', 'A', 'UNION', 'WITH', 'THE', 'ENGLISH', 'PARLIAMENT', 'AND', 'INCULCATED', 'THE', 'NECESSITY', 'OF', 'PREVENTING', 'THEM', 'BY', 'SOME', 'VIGOROUS', 'UNDERTAKING', 'THE', 'LATTER', 'STILL', 'INSISTED', 'THAT', 'EVERY', 'SUCH', 'ATTEMPT', 'WOULD', 'PRECIPITATE', 'THEM', 'INTO', 'MEASURES', 'TO', 'WHICH', 'OTHERWISE', 'THEY', 'WERE', 'NOT', 'PERHAPS', 'INCLINED'] +8224-274381-0003-1454: ref=['THE', "KING'S", 'EARS', 'WERE', 'NOW', 'OPEN', 'TO', "MONTROSE'S", 'COUNSELS', 'WHO', 'PROPOSED', 'NONE', 'BUT', 'THE', 'BOLDEST', 'AND', 'MOST', 'DARING', 'AGREEABLY', 'TO', 'THE', 'DESPERATE', 'STATE', 'OF', 'THE', 'ROYAL', 'CAUSE', 'IN', 'SCOTLAND'] +8224-274381-0003-1454: hyp=['THE', "KING'S", 'EARS', 'WERE', 'NOW', 'OPEN', 'TO', "MONTROSE'S", 'COUNCILS', 'WHO', 'PROPOSED', 'NONE', 'BUT', 'THE', 'BOLDEST', 'AND', 'MOST', 'DARING', 'AGREEABLY', 'TO', 'THE', 'DESPERATE', 'STATE', 'OF', 'THE', 'ROYAL', 'CAUSE', 'IN', 'SCOTLAND'] +8224-274381-0004-1455: ref=['FIVE', 'HUNDRED', 'MEN', 'MORE', 'WHO', 'HAD', 'BEEN', 'LEVIED', 'BY', 'THE', 'COVENANTERS', 'WERE', 'PERSUADED', 'TO', 'EMBRACE', 'THE', 'ROYAL', 'CAUSE', 'AND', 'WITH', 'THIS', 'COMBINED', 'FORCE', 'HE', 'HASTENED', 'TO', 'ATTACK', 'LORD', 'ELCHO', 'WHO', 'LAY', 'AT', 'PERTH', 'WITH', 'AN', 'ARMY', 'OF', 'SIX', 'THOUSAND', 'MEN', 'ASSEMBLED', 'UPON', 'THE', 'FIRST', 'NEWS', 'OF', 'THE', 'IRISH', 'INVASION'] +8224-274381-0004-1455: hyp=['FIVE', 'HUNDRED', 'MEN', 'MORE', 'WHO', 'HAD', 'BEEN', 'LEVIED', 'BY', 'THE', 'COVENANTERS', 'WERE', 'PERSUADED', 'TO', 'EMBRACE', 'THE', 'ROYAL', 'CAUSE', 'AND', 'WITH', 'THIS', 'COMBINED', 'FORCE', 'HE', 'HASTENED', 'TO', 'ATTACK', 'LORD', 'ELKO', 'WHO', 'LAY', 'AT', 'PERTH', 'WITH', 'AN', 'ARMY', 'OF', 'SIX', 'THOUSAND', 'MEN', 'ASSEMBLED', 'UPON', 'THE', 'FIRST', 'NEWS', 'OF', 'THE', 'IRISH', 'INVASION'] +8224-274381-0005-1456: ref=['DREADING', 'THE', 'SUPERIOR', 'POWER', 'OF', 'ARGYLE', 'WHO', 'HAVING', 'JOINED', 'HIS', 'VASSALS', 'TO', 'A', 'FORCE', 'LEVIED', 'BY', 'THE', 'PUBLIC', 'WAS', 'APPROACHING', 'WITH', 'A', 'CONSIDERABLE', 'ARMY', 'MONTROSE', 'HASTENED', 'NORTHWARDS', 'IN', 'ORDER', 'TO', 'ROUSE', 'AGAIN', 'THE', 'MARQUIS', 'OF', 'HUNTLEY', 'AND', 'THE', 'GORDONS', 'WHO', 'HAVING', 'BEFORE', 'HASTILY', 'TAKEN', 'ARMS', 'HAD', 'BEEN', 'INSTANTLY', 'SUPPRESSED', 'BY', 'THE', 'COVENANTERS'] +8224-274381-0005-1456: hyp=['DREADING', 'THE', 'SUPERIOR', 'POWER', 'OF', 'ARGYLE', 'WHO', 'HAVING', 'JOINED', 'HIS', 'VASSALS', 'TO', 'A', 'FORCE', 'LEVIED', 'BY', 'THE', 'PUBLIC', 'WAS', 'APPROACHING', 'WITH', 'A', 'CONSIDERABLE', 'ARMY', 'MONTROSE', 'HASTENED', 'NORTHWARD', 'IN', 'ORDER', 'TO', 'ROUSE', 'AGAIN', 'THE', 'MARQUIS', 'OF', 'HUNTLY', 'AND', 'THE', 'GORDONS', 'WHO', 'HAVING', 'BEFORE', 'HASTILY', 'TAKEN', 'ARMS', 'HAD', 'BEEN', 'INSTANTLY', 'SUPPRESSED', 'BY', 'THE', 'COVENANTERS'] +8224-274381-0006-1457: ref=['THIS', "NOBLEMAN'S", 'CHARACTER', 'THOUGH', 'CELEBRATED', 'FOR', 'POLITICAL', 'COURAGE', 'AND', 'CONDUCT', 'WAS', 'VERY', 'LOW', 'FOR', 'MILITARY', 'PROWESS', 'AND', 'AFTER', 'SOME', 'SKIRMISHES', 'IN', 'WHICH', 'HE', 'WAS', 'WORSTED', 'HE', 'HERE', 'ALLOWED', 'MONTROSE', 'TO', 'ESCAPE', 'HIM'] +8224-274381-0006-1457: hyp=['THIS', "NOBLEMAN'S", 'CHARACTER', 'THOUGH', 'CELEBRATED', 'FOR', 'POLITICAL', 'COURAGE', 'AND', 'CONDUCT', 'WAS', 'VERY', 'LOW', 'FOR', 'MILITARY', 'PROWESS', 'AND', 'AFTER', 'SOME', 'SKIRMISHES', 'IN', 'WHICH', 'HE', 'WAS', 'WORSTED', 'HE', 'HERE', 'ALLOWED', 'MONTROSE', 'TO', 'ESCAPE', 'HIM'] +8224-274381-0007-1458: ref=['BY', 'QUICK', 'MARCHES', 'THROUGH', 'THESE', 'INACCESSIBLE', 'MOUNTAINS', 'THAT', 'GENERAL', 'FREED', 'HIMSELF', 'FROM', 'THE', 'SUPERIOR', 'FORCES', 'OF', 'THE', 'COVENANTERS'] +8224-274381-0007-1458: hyp=['BY', 'QUICK', 'MARCHES', 'THROUGH', 'THESE', 'INACCESSIBLE', 'MOUNTAINS', 'THAT', 'GENERAL', 'FREED', 'HIMSELF', 'FROM', 'THE', 'SUPERIOR', 'FORCES', 'OF', 'THE', 'COVENANTERS'] +8224-274381-0008-1459: ref=['WITH', 'THESE', 'AND', 'SOME', 'REENFORCEMENTS', 'OF', 'THE', 'ATHOLEMEN', 'AND', 'MACDONALDS', 'WHOM', 'HE', 'HAD', 'RECALLED', 'MONTROSE', 'FELL', 'SUDDENLY', 'UPON', "ARGYLE'S", 'COUNTRY', 'AND', 'LET', 'LOOSE', 'UPON', 'IT', 'ALL', 'THE', 'RAGE', 'OF', 'WAR', 'CARRYING', 'OFF', 'THE', 'CATTLE', 'BURNING', 'THE', 'HOUSES', 'AND', 'PUTTING', 'THE', 'INHABITANTS', 'TO', 'THE', 'SWORD'] +8224-274381-0008-1459: hyp=['WITH', 'THESE', 'AND', 'SOME', 'REINFORCEMENTS', 'OF', 'THE', 'ETHEL', 'MEN', 'AND', 'MC', 'DONALDS', 'WHOM', 'HE', 'HAD', 'RECALLED', 'MONTROSE', 'FELL', 'SUDDENLY', 'UPON', "ARGYLE'S", 'COUNTRY', 'AND', 'LET', 'LOOSE', 'UPON', 'IT', 'ALL', 'THE', 'RAGE', 'OF', 'WAR', 'CARRYING', 'OFF', 'THE', 'CATTLE', 'BURNING', 'THE', 'HOUSES', 'AND', 'PUTTING', 'THE', 'INHABITANTS', 'TO', 'THE', 'SWORD'] +8224-274381-0009-1460: ref=['THIS', 'SEVERITY', 'BY', 'WHICH', 'MONTROSE', 'SULLIED', 'HIS', 'VICTORIES', 'WAS', 'THE', 'RESULT', 'OF', 'PRIVATE', 'ANIMOSITY', 'AGAINST', 'THE', 'CHIEFTAIN', 'AS', 'MUCH', 'AS', 'OF', 'ZEAL', 'FOR', 'THE', 'PUBLIC', 'CAUSE', 'ARGYLE', 'COLLECTING', 'THREE', 'THOUSAND', 'MEN', 'MARCHED', 'IN', 'QUEST', 'OF', 'THE', 'ENEMY', 'WHO', 'HAD', 'RETIRED', 'WITH', 'THEIR', 'PLUNDER', 'AND', 'HE', 'LAY', 'AT', 'INNERLOCHY', 'SUPPOSING', 'HIMSELF', 'STILL', 'AT', 'A', 'CONSIDERABLE', 'DISTANCE', 'FROM', 'THEM'] +8224-274381-0009-1460: hyp=['THIS', 'SEVERITY', 'BY', 'WHICH', 'MONTROSE', 'SULLIED', 'HIS', 'VICTORIES', 'WAS', 'THE', 'RESULT', 'OF', 'PRIVATE', 'ANIMOSITY', 'AGAINST', 'THE', 'CHIEFTAIN', 'AS', 'MUCH', 'AS', 'OF', 'ZEAL', 'FOR', 'THE', 'PUBLIC', 'CAUSE', 'OUR', 'GUILE', 'COLLECTING', 'THREE', 'THOUSAND', 'MEN', 'MARCHED', 'IN', 'QUEST', 'OF', 'THE', 'ENEMY', 'WHO', 'HAD', 'RETIRED', 'WITH', 'THEIR', 'PLUNDER', 'AND', 'HE', 'LAY', 'AT', 'INERLOCHY', 'SUPPOSING', 'HIMSELF', 'STILL', 'AT', 'A', 'CONSIDERABLE', 'DISTANCE', 'FROM', 'THEM'] +8224-274381-0010-1461: ref=['BY', 'A', 'QUICK', 'AND', 'UNEXPECTED', 'MARCH', 'MONTROSE', 'HASTENED', 'TO', 'INNERLOCHY', 'AND', 'PRESENTED', 'HIMSELF', 'IN', 'ORDER', 'OF', 'BATTLE', 'BEFORE', 'THE', 'SURPRISED', 'BUT', 'NOT', 'AFFRIGHTENED', 'COVENANTERS'] +8224-274381-0010-1461: hyp=['BY', 'A', 'QUICK', 'AND', 'UNEXPECTED', 'MARCH', 'MONTROSE', 'HASTENED', 'TO', 'IN', 'A', 'LOCKY', 'AND', 'PRESENTED', 'HIMSELF', 'IN', 'ORDER', 'OF', 'BATTLE', 'BEFORE', 'THE', 'SURPRISED', 'BUT', 'NOT', 'A', 'FRIGHTENED', 'COVENANTERS'] +8224-274381-0011-1462: ref=['HIS', 'CONDUCT', 'AND', 'PRESENCE', 'OF', 'MIND', 'IN', 'THIS', 'EMERGENCE', 'APPEARED', 'CONSPICUOUS'] +8224-274381-0011-1462: hyp=['HIS', 'CONDUCT', 'AND', 'PRESENCE', 'OF', 'MIND', 'IN', 'THIS', 'EMERGENCE', 'APPEARED', 'CONSPICUOUS'] +8224-274381-0012-1463: ref=['MONTROSE', 'WEAK', 'IN', 'CAVALRY', 'HERE', 'LINED', 'HIS', 'TROOPS', 'OF', 'HORSE', 'WITH', 'INFANTRY', 'AND', 'AFTER', 'PUTTING', 'THE', "ENEMY'S", 'HORSE', 'TO', 'ROUT', 'FELL', 'WITH', 'UNITED', 'FORCE', 'UPON', 'THEIR', 'FOOT', 'WHO', 'WERE', 'ENTIRELY', 'CUT', 'IN', 'PIECES', 'THOUGH', 'WITH', 'THE', 'LOSS', 'OF', 'THE', 'GALLANT', 'LORD', 'GORDON', 'ON', 'THE', 'PART', 'OF', 'THE', 'ROYALISTS'] +8224-274381-0012-1463: hyp=['MONTROSE', 'WEAK', 'IN', 'CAVALRY', 'HERE', 'LINED', 'HIS', 'TROOPS', 'OF', 'HORSE', 'WITH', 'INFANTRY', 'AND', 'AFTER', 'PUTTING', 'THE', "ENEMY'S", 'HORSE', 'TO', 'ROUT', 'FELL', 'WITH', 'UNITED', 'FORCE', 'UPON', 'THEIR', 'FOOT', 'WHO', 'WERE', 'ENTIRELY', 'CUT', 'IN', 'PIECES', 'THOUGH', 'WITH', 'THE', 'LOSS', 'OF', 'THE', 'GALLANT', 'LORD', 'GORDON', 'ON', 'THE', 'PART', 'OF', 'THE', 'ROYALISTS'] +8224-274381-0013-1464: ref=['FROM', 'THE', 'SAME', 'MEN', 'NEW', 'REGIMENTS', 'AND', 'NEW', 'COMPANIES', 'WERE', 'FORMED', 'DIFFERENT', 'OFFICERS', 'APPOINTED', 'AND', 'THE', 'WHOLE', 'MILITARY', 'FORCE', 'PUT', 'INTO', 'SUCH', 'HANDS', 'AS', 'THE', 'INDEPENDENTS', 'COULD', 'RELY', 'ON'] +8224-274381-0013-1464: hyp=['FROM', 'THE', 'SAME', 'MEN', 'NEW', 'REGIMENTS', 'AND', 'NEW', 'COMPANIES', 'WERE', 'FORMED', 'DIFFERENT', 'OFFICERS', 'APPOINTED', 'AND', 'THE', 'WHOLE', 'MILITARY', 'FORCE', 'PUT', 'INTO', 'SUCH', 'HANDS', 'AS', 'THE', 'INDEPENDENTS', 'COULD', 'RELY', 'ON'] +8224-274381-0014-1465: ref=['BESIDES', 'MEMBERS', 'OF', 'PARLIAMENT', 'WHO', 'WERE', 'EXCLUDED', 'MANY', 'OFFICERS', 'UNWILLING', 'TO', 'SERVE', 'UNDER', 'THE', 'NEW', 'GENERALS', 'THREW', 'UP', 'THEIR', 'COMMISSIONS', 'AND', 'UNWARILY', 'FACILITATED', 'THE', 'PROJECT', 'OF', 'PUTTING', 'THE', 'ARMY', 'ENTIRELY', 'INTO', 'THE', 'HANDS', 'OF', 'THAT', 'FACTION'] +8224-274381-0014-1465: hyp=['BESIDES', 'MEMBERS', 'OF', 'PARLIAMENT', 'WHO', 'WERE', 'EXCLUDED', 'MANY', 'OFFICERS', 'UNWILLING', 'TO', 'SERVE', 'UNDER', 'THE', 'NEW', 'GENERALS', 'THREW', 'UP', 'THEIR', 'COMMISSIONS', 'AND', 'THEN', 'WARILY', 'FACILITATED', 'THE', 'PROJECT', 'OF', 'PUTTING', 'THE', 'ARMY', 'ENTIRELY', 'INTO', 'THE', 'HANDS', 'OF', 'THAT', 'FACTION'] +8224-274381-0015-1466: ref=['THOUGH', 'THE', 'DISCIPLINE', 'OF', 'THE', 'FORMER', 'PARLIAMENTARY', 'ARMY', 'WAS', 'NOT', 'CONTEMPTIBLE', 'A', 'MORE', 'EXACT', 'PLAN', 'WAS', 'INTRODUCED', 'AND', 'RIGOROUSLY', 'EXECUTED', 'BY', 'THESE', 'NEW', 'COMMANDERS'] +8224-274381-0015-1466: hyp=['THOUGH', 'THE', 'DISCIPLINE', 'OF', 'THE', 'FORMER', 'PARLIAMENTARY', 'ARMY', 'WAS', 'NOT', 'CONTEMPTIBLE', 'A', 'MORE', 'EXACT', 'PLAN', 'WAS', 'INTRODUCED', 'AND', 'RIGOROUSLY', 'EXECUTED', 'BY', 'THESE', 'NEW', 'COMMANDERS'] +8224-274381-0016-1467: ref=['VALOR', 'INDEED', 'WAS', 'VERY', 'GENERALLY', 'DIFFUSED', 'OVER', 'THE', 'ONE', 'PARTY', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'DURING', 'THIS', 'PERIOD', 'DISCIPLINE', 'ALSO', 'WAS', 'ATTAINED', 'BY', 'THE', 'FORCES', 'OF', 'THE', 'PARLIAMENT', 'BUT', 'THE', 'PERFECTION', 'OF', 'THE', 'MILITARY', 'ART', 'IN', 'CONCERTING', 'THE', 'GENERAL', 'PLANS', 'OF', 'ACTION', 'AND', 'THE', 'OPERATIONS', 'OF', 'THE', 'FIELD', 'SEEMS', 'STILL', 'ON', 'BOTH', 'SIDES', 'TO', 'HAVE', 'BEEN', 'IN', 'A', 'GREAT', 'MEASURE', 'WANTING'] +8224-274381-0016-1467: hyp=['VALOR', 'INDEED', 'WAS', 'VERY', 'GENERALLY', 'DIFFUSED', 'OVER', 'THE', 'ONE', 'PARTY', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'DURING', 'THIS', 'PERIOD', 'DISCIPLINE', 'ALSO', 'WAS', 'ATTAINED', 'BY', 'THE', 'FORCES', 'OF', 'THE', 'PARLIAMENT', 'BUT', 'THE', 'PERFECTION', 'OF', 'THE', 'MILITARY', 'ART', 'IN', 'CONCERTING', 'THE', 'GENERAL', 'PLANS', 'OF', 'ACTION', 'AND', 'THE', 'OPERATIONS', 'OF', 'THE', 'FIELD', 'SEEMS', 'STILL', 'ON', 'BOTH', 'SIDES', 'TO', 'HAVE', 'BEEN', 'IN', 'A', 'GREAT', 'MEASURE', 'WANTING'] +8224-274381-0017-1468: ref=['HISTORIANS', 'AT', 'LEAST', 'PERHAPS', 'FROM', 'THEIR', 'OWN', 'IGNORANCE', 'AND', 'INEXPERIENCE', 'HAVE', 'NOT', 'REMARKED', 'ANY', 'THING', 'BUT', 'A', 'HEADLONG', 'IMPETUOUS', 'CONDUCT', 'EACH', 'PARTY', 'HURRYING', 'TO', 'A', 'BATTLE', 'WHERE', 'VALOR', 'AND', 'FORTUNE', 'CHIEFLY', 'DETERMINED', 'THE', 'SUCCESS'] +8224-274381-0017-1468: hyp=['HISTORIANS', 'AT', 'LEAST', 'PERHAPS', 'FROM', 'THEIR', 'OWN', 'IGNORANCE', 'AND', 'INEXPERIENCE', 'HAVE', 'NOT', 'REMARKED', 'ANY', 'THING', 'BUT', 'A', 'HEADLONG', 'IMPETUOUS', 'CONDUCT', 'EACH', 'PARTY', 'HURRYING', 'TO', 'A', 'BATTLE', 'WERE', 'VALOR', 'AND', 'FORTUNE', 'CHIEFLY', 'DETERMINED', 'THE', 'SUCCESS'] +8224-274384-0000-1437: ref=['HE', 'PASSED', 'THROUGH', 'HENLEY', 'SAINT', 'ALBANS', 'AND', 'CAME', 'SO', 'NEAR', 'TO', 'LONDON', 'AS', 'HARROW', 'ON', 'THE', 'HILL'] +8224-274384-0000-1437: hyp=['HE', 'PASSED', 'THROUGH', 'HENLEY', 'SAINT', "ALBAN'S", 'AND', 'CAME', 'SO', 'NEAR', 'TO', 'LONDON', 'AS', 'HARROW', 'ON', 'THE', 'HILL'] +8224-274384-0001-1438: ref=['THE', 'SCOTTISH', 'GENERALS', 'AND', 'COMMISSIONERS', 'AFFECTED', 'GREAT', 'SURPRISE', 'ON', 'THE', 'APPEARANCE', 'OF', 'THE', 'KING', 'AND', 'THOUGH', 'THEY', 'PAID', 'HIM', 'ALL', 'THE', 'EXTERIOR', 'RESPECT', 'DUE', 'TO', 'HIS', 'DIGNITY', 'THEY', 'INSTANTLY', 'SET', 'A', 'GUARD', 'UPON', 'HIM', 'UNDER', 'COLOR', 'OF', 'PROTECTION', 'AND', 'MADE', 'HIM', 'IN', 'REALITY', 'A', 'PRISONER'] +8224-274384-0001-1438: hyp=['THE', 'SCOTTISH', 'GENERALS', 'AND', 'COMMISSIONERS', 'AFFECTED', 'GREAT', 'SURPRISE', 'ON', 'THE', 'APPEARANCE', 'OF', 'THE', 'KING', 'AND', 'THOUGH', 'THEY', 'PAID', 'HIM', 'ALL', 'THE', 'EXTERIOR', 'RESPECT', 'DUE', 'TO', 'HIS', 'DIGNITY', 'THEY', 'INSTANTLY', 'SET', 'A', 'GUARD', 'UPON', 'HIM', 'UNDER', 'COLOR', 'OF', 'PROTECTION', 'AND', 'MADE', 'HIM', 'IN', 'REALITY', 'A', 'PRISONER'] +8224-274384-0002-1439: ref=['THEY', 'INFORMED', 'THE', 'ENGLISH', 'PARLIAMENT', 'OF', 'THIS', 'UNEXPECTED', 'INCIDENT', 'AND', 'ASSURED', 'THEM', 'THAT', 'THEY', 'HAD', 'ENTERED', 'INTO', 'NO', 'PRIVATE', 'TREATY', 'WITH', 'THE', 'KING'] +8224-274384-0002-1439: hyp=['THEY', 'INFORMED', 'THE', 'ENGLISH', 'PARLIAMENT', 'OF', 'THIS', 'UNEXPECTED', 'INCIDENT', 'AND', 'ASSURED', 'THEM', 'THAT', 'THEY', 'HAD', 'ENTERED', 'INTO', 'NO', 'PRIVATE', 'TREATY', 'WITH', 'THE', 'KING'] +8224-274384-0003-1440: ref=['OR', 'HATH', 'HE', 'GIVEN', 'US', 'ANY', 'GIFT'] +8224-274384-0003-1440: hyp=['OR', 'HATH', 'HE', 'GIVEN', 'US', 'ANY', 'GIFT'] +8224-274384-0004-1441: ref=['AND', 'THE', 'MEN', 'OF', 'ISRAEL', 'ANSWERED', 'THE', 'MEN', 'OF', 'JUDAH', 'AND', 'SAID', 'WE', 'HAVE', 'TEN', 'PARTS', 'IN', 'THE', 'KING', 'AND', 'WE', 'HAVE', 'ALSO', 'MORE', 'RIGHT', 'IN', 'DAVID', 'THAN', 'YE', 'WHY', 'THEN', 'DID', 'YE', 'DESPISE', 'US', 'THAT', 'OUR', 'ADVICE', 'SHOULD', 'NOT', 'BE', 'FIRST', 'HAD', 'IN', 'BRINGING', 'BACK', 'OUR', 'KING'] +8224-274384-0004-1441: hyp=['AND', 'THE', 'MEN', 'OF', 'ISRAEL', 'ANSWERED', 'THE', 'MEN', 'OF', 'JUDAH', 'AND', 'SAID', 'WE', 'HAVE', 'TEN', 'PARTS', 'IN', 'THE', 'KING', 'AND', 'WE', 'HAVE', 'ALSO', 'MORE', 'RIGHT', 'IN', 'DAVID', 'THAN', 'YE', 'WHY', 'THEN', 'DID', 'YE', 'DESPISE', 'US', 'THAT', 'OUR', 'ADVICE', 'SHOULD', 'NOT', 'BE', 'FIRST', 'HAD', 'IN', 'BRINGING', 'BACK', 'OUR', 'KING'] +8224-274384-0005-1442: ref=['ANOTHER', 'PREACHER', 'AFTER', 'REPROACHING', 'HIM', 'TO', 'HIS', 'FACE', 'WITH', 'HIS', 'MISGOVERNMENT', 'ORDERED', 'THIS', 'PSALM', 'TO', 'BE', 'SUNG'] +8224-274384-0005-1442: hyp=['ANOTHER', 'PREACHER', 'AFTER', 'REPROACHING', 'HIM', 'TO', 'HIS', 'FACE', 'WITH', 'HIS', 'MISGOVERNMENT', 'ORDERED', 'THIS', 'SUM', 'TO', 'BE', 'SUNG'] +8224-274384-0006-1443: ref=['THE', 'KING', 'STOOD', 'UP', 'AND', 'CALLED', 'FOR', 'THAT', 'PSALM', 'WHICH', 'BEGINS', 'WITH', 'THESE', 'WORDS'] +8224-274384-0006-1443: hyp=['THE', 'KING', 'STOOD', 'UP', 'AND', 'CALLED', 'FOR', 'THAT', 'PSALM', 'WHICH', 'BEGINS', 'WITH', 'THESE', 'WORDS'] +8224-274384-0007-1444: ref=['HAVE', 'MERCY', 'LORD', 'ON', 'ME', 'I', 'PRAY', 'FOR', 'MEN', 'WOULD', 'ME', 'DEVOUR'] +8224-274384-0007-1444: hyp=['HAVE', 'MERCY', 'LORD', 'ON', 'ME', 'I', 'PRAY', 'FOR', 'MEN', 'WOULD', 'ME', 'DEVOUR'] +8224-274384-0008-1445: ref=['THE', 'GOOD', 'NATURED', 'AUDIENCE', 'IN', 'PITY', 'TO', 'FALLEN', 'MAJESTY', 'SHOWED', 'FOR', 'ONCE', 'GREATER', 'DEFERENCE', 'TO', 'THE', 'KING', 'THAN', 'TO', 'THE', 'MINISTER', 'AND', 'SUNG', 'THE', 'PSALM', 'WHICH', 'THE', 'FORMER', 'HAD', 'CALLED', 'FOR'] +8224-274384-0008-1445: hyp=['THE', 'GOOD', 'NATURED', 'AUDIENCE', 'IN', 'PITY', 'TO', 'FALL', 'IN', 'MAJESTY', 'SHOWED', 'FOR', 'ONCE', 'GREATER', 'DEFERENCE', 'TO', 'THE', 'KING', 'THAN', 'TO', 'THE', 'MINISTER', 'AND', 'SUNG', 'THE', 'PSALM', 'WHICH', 'THE', 'FORMER', 'HAD', 'CALLED', 'FOR'] +8224-274384-0009-1446: ref=['THE', 'PARLIAMENT', 'AND', 'THE', 'SCOTS', 'LAID', 'THEIR', 'PROPOSALS', 'BEFORE', 'THE', 'KING'] +8224-274384-0009-1446: hyp=['THE', 'PARLIAMENT', 'AND', 'THE', 'SCOTS', 'LAID', 'THEIR', 'PROPOSALS', 'BEFORE', 'THE', 'KING'] +8224-274384-0010-1447: ref=['BEFORE', 'THE', 'SETTLEMENT', 'OF', 'TERMS', 'THE', 'ADMINISTRATION', 'MUST', 'BE', 'POSSESSED', 'ENTIRELY', 'BY', 'THE', 'PARLIAMENTS', 'OF', 'BOTH', 'KINGDOMS', 'AND', 'HOW', 'INCOMPATIBLE', 'THAT', 'SCHEME', 'WITH', 'THE', 'LIBERTY', 'OF', 'THE', 'KING', 'IS', 'EASILY', 'IMAGINED'] +8224-274384-0010-1447: hyp=['BEFORE', 'THE', 'SETTLEMENT', 'OF', 'TERMS', 'THE', 'ADMINISTRATION', 'MUST', 'BE', 'POSSESSED', 'ENTIRELY', 'BY', 'THE', 'PARLIAMENTS', 'OF', 'BOTH', 'KINGDOMS', 'AND', 'HOW', 'INCOMPATIBLE', 'THAT', 'SCHEME', 'WITH', 'THE', 'LIBERTY', 'OF', 'THE', 'KING', 'IS', 'EASILY', 'IMAGINED'] +8224-274384-0011-1448: ref=['THE', 'ENGLISH', 'IT', 'IS', 'EVIDENT', 'HAD', 'THEY', 'NOT', 'BEEN', 'PREVIOUSLY', 'ASSURED', 'OF', 'RECEIVING', 'THE', 'KING', 'WOULD', 'NEVER', 'HAVE', 'PARTED', 'WITH', 'SO', 'CONSIDERABLE', 'A', 'SUM', 'AND', 'WHILE', 'THEY', 'WEAKENED', 'THEMSELVES', 'BY', 'THE', 'SAME', 'MEASURE', 'HAVE', 'STRENGTHENED', 'A', 'PEOPLE', 'WITH', 'WHOM', 'THEY', 'MUST', 'AFTERWARDS', 'HAVE', 'SO', 'MATERIAL', 'AN', 'INTEREST', 'TO', 'DISCUSS'] +8224-274384-0011-1448: hyp=['THE', 'ENGLISH', 'IT', 'IS', 'EVIDENT', 'HAD', 'THEY', 'NOT', 'BEEN', 'PREVIOUSLY', 'ASSURED', 'OF', 'RECEIVING', 'THE', 'KING', 'WOULD', 'NEVER', 'HAVE', 'PARTED', 'WITH', 'SO', 'CONSIDERABLE', 'A', 'SUM', 'AND', 'WHILE', 'THEY', 'WEAKENED', 'THEMSELVES', 'BY', 'THE', 'SAME', 'MEASURE', 'HAVE', 'STRENGTHENED', 'A', 'PEOPLE', 'WITH', 'WHOM', 'THEY', 'MUST', 'AFTERWARDS', 'HAVE', 'SO', 'MATERIAL', 'AN', 'INTEREST', 'TO', 'DISCUSS'] +8224-274384-0012-1449: ref=['IF', 'ANY', 'STILL', 'RETAINED', 'RANCOR', 'AGAINST', 'HIM', 'IN', 'HIS', 'PRESENT', 'CONDITION', 'THEY', 'PASSED', 'IN', 'SILENCE', 'WHILE', 'HIS', 'WELL', 'WISHERS', 'MORE', 'GENEROUS', 'THAN', 'PRUDENT', 'ACCOMPANIED', 'HIS', 'MARCH', 'WITH', 'TEARS', 'WITH', 'ACCLAMATIONS', 'AND', 'WITH', 'PRAYERS', 'FOR', 'HIS', 'SAFETY'] +8224-274384-0012-1449: hyp=['IF', 'ANY', 'STILL', 'RETAINED', 'RANCOUR', 'AGAINST', 'HIM', 'IN', 'HIS', 'PRESENT', 'CONDITION', 'THEY', 'PASSED', 'IN', 'SILENCE', 'WHILE', 'HIS', 'WELL', 'WISHERS', 'MORE', 'GENEROUS', 'THAN', 'PRUDENT', 'ACCOMPANIED', 'HIS', 'MARCH', 'WITH', 'TEARS', 'WITH', 'ACCLAMATIONS', 'AND', 'WITH', 'PRAYERS', 'FOR', 'HIS', 'SAFETY'] +8224-274384-0013-1450: ref=['HIS', 'DEATH', 'IN', 'THIS', 'CONJUNCTURE', 'WAS', 'A', 'PUBLIC', 'MISFORTUNE'] +8224-274384-0013-1450: hyp=['HIS', 'DEATH', 'IN', 'THIS', 'CONJUNCTURE', 'WAS', 'A', 'PUBLIC', 'MISFORTUNE'] +8230-279154-0000-617: ref=['THE', 'ANALYSIS', 'OF', 'KNOWLEDGE', 'WILL', 'OCCUPY', 'US', 'UNTIL', 'THE', 'END', 'OF', 'THE', 'THIRTEENTH', 'LECTURE', 'AND', 'IS', 'THE', 'MOST', 'DIFFICULT', 'PART', 'OF', 'OUR', 'WHOLE', 'ENTERPRISE'] +8230-279154-0000-617: hyp=['THE', 'ANALYSIS', 'OF', 'KNOWLEDGE', 'WILL', 'OCCUPY', 'US', 'UNTIL', 'THE', 'END', 'OF', 'THE', 'THIRTEENTH', 'LECTURE', 'AND', 'IS', 'THE', 'MOST', 'DIFFICULT', 'PART', 'OF', 'OUR', 'WHOLE', 'ENTERPRISE'] +8230-279154-0001-618: ref=['WHAT', 'IS', 'CALLED', 'PERCEPTION', 'DIFFERS', 'FROM', 'SENSATION', 'BY', 'THE', 'FACT', 'THAT', 'THE', 'SENSATIONAL', 'INGREDIENTS', 'BRING', 'UP', 'HABITUAL', 'ASSOCIATES', 'IMAGES', 'AND', 'EXPECTATIONS', 'OF', 'THEIR', 'USUAL', 'CORRELATES', 'ALL', 'OF', 'WHICH', 'ARE', 'SUBJECTIVELY', 'INDISTINGUISHABLE', 'FROM', 'THE', 'SENSATION'] +8230-279154-0001-618: hyp=['WHAT', 'IS', 'CALLED', 'PERCEPTION', 'DIFFERS', 'FROM', 'SENSATION', 'BY', 'THE', 'FACT', 'THAT', 'THE', 'SENSATIONAL', 'INGREDIENTS', 'BRING', 'UP', 'HABITUAL', 'ASSOCIATES', 'IMAGES', 'AND', 'EXPECTATIONS', 'OF', 'THEIR', 'USUAL', 'COROLLETS', 'ALL', 'OF', 'WHICH', 'ARE', 'SUBJECTIVELY', 'INDISTINGUISHABLE', 'FROM', 'THE', 'SENSATION'] +8230-279154-0002-619: ref=['WHETHER', 'OR', 'NOT', 'THIS', 'PRINCIPLE', 'IS', 'LIABLE', 'TO', 'EXCEPTIONS', 'EVERYONE', 'WOULD', 'AGREE', 'THAT', 'IS', 'HAS', 'A', 'BROAD', 'MEASURE', 'OF', 'TRUTH', 'THOUGH', 'THE', 'WORD', 'EXACTLY', 'MIGHT', 'SEEM', 'AN', 'OVERSTATEMENT', 'AND', 'IT', 'MIGHT', 'SEEM', 'MORE', 'CORRECT', 'TO', 'SAY', 'THAT', 'IDEAS', 'APPROXIMATELY', 'REPRESENT', 'IMPRESSIONS'] +8230-279154-0002-619: hyp=['WHETHER', 'OR', 'NOT', 'THIS', 'PRINCIPLE', 'IS', 'LIABLE', 'TO', 'EXCEPTIONS', 'EVERY', 'ONE', 'WOULD', 'AGREE', 'THAT', 'IT', 'HAS', 'A', 'BROAD', 'MEASURE', 'OF', 'TRUTH', 'THOUGH', 'THE', 'WORD', 'EXACTLY', 'MIGHT', 'SEEM', 'AN', 'OVERSTATEMENT', 'AND', 'IT', 'MIGHT', 'SEEM', 'MORE', 'CORRECT', 'TO', 'SAY', 'THAT', 'IDEAS', 'APPROXIMATELY', 'REPRESENT', 'IMPRESSIONS'] +8230-279154-0003-620: ref=['AND', 'WHAT', 'SORT', 'OF', 'EVIDENCE', 'IS', 'LOGICALLY', 'POSSIBLE'] +8230-279154-0003-620: hyp=['AND', 'WHAT', 'SORT', 'OF', 'EVIDENCE', 'IS', 'LOGICALLY', 'POSSIBLE'] +8230-279154-0004-621: ref=['THERE', 'IS', 'NO', 'LOGICAL', 'IMPOSSIBILITY', 'IN', 'THE', 'HYPOTHESIS', 'THAT', 'THE', 'WORLD', 'SPRANG', 'INTO', 'BEING', 'FIVE', 'MINUTES', 'AGO', 'EXACTLY', 'AS', 'IT', 'THEN', 'WAS', 'WITH', 'A', 'POPULATION', 'THAT', 'REMEMBERED', 'A', 'WHOLLY', 'UNREAL', 'PAST'] +8230-279154-0004-621: hyp=['THERE', 'IS', 'NO', 'LOGICAL', 'IMPOSSIBILITY', 'IN', 'THE', 'HYPOTHESIS', 'THAT', 'THE', 'WORLD', 'SPRANG', 'INTO', 'BEING', 'FIVE', 'MINUTES', 'AGO', 'EXACTLY', 'AS', 'IT', 'THEN', 'WAS', 'WITH', 'THE', 'POPULATION', 'THAT', 'REMEMBERED', 'A', 'WHOLLY', 'UNREAL', 'PAST'] +8230-279154-0005-622: ref=['ALL', 'THAT', 'I', 'AM', 'DOING', 'IS', 'TO', 'USE', 'ITS', 'LOGICAL', 'TENABILITY', 'AS', 'A', 'HELP', 'IN', 'THE', 'ANALYSIS', 'OF', 'WHAT', 'OCCURS', 'WHEN', 'WE', 'REMEMBER'] +8230-279154-0005-622: hyp=['ALL', 'THAT', 'I', 'AM', 'DOING', 'IS', 'TO', 'USE', 'ITS', 'LOGICAL', 'TENABILITY', 'AS', 'A', 'HELP', 'IN', 'THE', 'ANALYSIS', 'OF', 'WHAT', 'OCCURS', 'WHEN', 'WE', 'REMEMBER'] +8230-279154-0006-623: ref=['THE', 'BEHAVIOURIST', 'WHO', 'ATTEMPTS', 'TO', 'MAKE', 'PSYCHOLOGY', 'A', 'RECORD', 'OF', 'BEHAVIOUR', 'HAS', 'TO', 'TRUST', 'HIS', 'MEMORY', 'IN', 'MAKING', 'THE', 'RECORD'] +8230-279154-0006-623: hyp=['THE', 'BEHAVIOURIST', 'WHO', 'ATTEMPTS', 'TO', 'MAKE', 'PSYCHOLOGY', 'A', 'RECORD', 'OF', 'BEHAVIOR', 'HAS', 'TO', 'TRUST', 'HIS', 'MEMORY', 'IN', 'MAKING', 'THE', 'RECORD'] +8230-279154-0007-624: ref=['HABIT', 'IS', 'A', 'CONCEPT', 'INVOLVING', 'THE', 'OCCURRENCE', 'OF', 'SIMILAR', 'EVENTS', 'AT', 'DIFFERENT', 'TIMES', 'IF', 'THE', 'BEHAVIOURIST', 'FEELS', 'CONFIDENT', 'THAT', 'THERE', 'IS', 'SUCH', 'A', 'PHENOMENON', 'AS', 'HABIT', 'THAT', 'CAN', 'ONLY', 'BE', 'BECAUSE', 'HE', 'TRUSTS', 'HIS', 'MEMORY', 'WHEN', 'IT', 'ASSURES', 'HIM', 'THAT', 'THERE', 'HAVE', 'BEEN', 'OTHER', 'TIMES'] +8230-279154-0007-624: hyp=['HABIT', 'IS', 'A', 'CONCEPT', 'INVOLVING', 'THE', 'OCCURRENCE', 'OF', 'SIMILAR', 'EVENTS', 'AT', 'DIFFERENT', 'TIMES', 'IF', 'THE', 'BEHAVIORIST', 'FILLS', 'CONFIDENT', 'THAT', 'THERE', 'IS', 'SUCH', 'A', 'PHENOMENON', 'AS', 'HABIT', 'THAT', 'CAN', 'ONLY', 'BE', 'BECAUSE', 'HE', 'TRUSTS', 'HIS', 'MEMORY', 'WHEN', 'IT', 'ASSURES', 'HIM', 'THAT', 'THERE', 'HAVE', 'BEEN', 'OTHER', 'TIMES'] +8230-279154-0008-625: ref=['BUT', 'I', 'DO', 'NOT', 'THINK', 'SUCH', 'AN', 'INFERENCE', 'IS', 'WARRANTED'] +8230-279154-0008-625: hyp=['BUT', 'I', 'DO', 'NOT', 'THINK', 'SUCH', 'AN', 'INFERENCE', 'IS', 'WARRANTED'] +8230-279154-0009-626: ref=['OUR', 'CONFIDENCE', 'OR', 'LACK', 'OF', 'CONFIDENCE', 'IN', 'THE', 'ACCURACY', 'OF', 'A', 'MEMORY', 'IMAGE', 'MUST', 'IN', 'FUNDAMENTAL', 'CASES', 'BE', 'BASED', 'UPON', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'IMAGE', 'ITSELF', 'SINCE', 'WE', 'CANNOT', 'EVOKE', 'THE', 'PAST', 'BODILY', 'AND', 'COMPARE', 'IT', 'WITH', 'THE', 'PRESENT', 'IMAGE'] +8230-279154-0009-626: hyp=['OUR', 'CONFIDENCE', 'OR', 'LACK', 'OF', 'CONFIDENCE', 'IN', 'THE', 'ACCURACY', 'OF', 'A', 'MEMORY', 'IMAGE', 'MUST', 'IN', 'FUNDAMENTAL', 'CASES', 'BE', 'BASED', 'UPON', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'IMAGE', 'ITSELF', 'SINCE', 'WE', 'CANNOT', 'EVOKE', 'THE', 'PAST', 'BODILY', 'AND', 'COMPARE', 'IT', 'WITH', 'THE', 'PRESENT', 'IMAGE'] +8230-279154-0010-627: ref=['WE', 'SOMETIMES', 'HAVE', 'IMAGES', 'THAT', 'ARE', 'BY', 'NO', 'MEANS', 'PECULIARLY', 'VAGUE', 'WHICH', 'YET', 'WE', 'DO', 'NOT', 'TRUST', 'FOR', 'EXAMPLE', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'FATIGUE', 'WE', 'MAY', 'SEE', 'A', "FRIEND'S", 'FACE', 'VIVIDLY', 'AND', 'CLEARLY', 'BUT', 'HORRIBLY', 'DISTORTED'] +8230-279154-0010-627: hyp=['WE', 'SOMETIMES', 'HAVE', 'IMAGES', 'THAT', 'ARE', 'BY', 'NO', 'MEANS', 'PECULIARLY', 'VAGUE', 'WHICH', 'YET', 'WE', 'DO', 'NOT', 'TRUST', 'FOR', 'EXAMPLE', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'FATIGUE', 'WE', 'MAY', 'SEE', 'A', "FRIEND'S", 'FACE', 'VIVIDLY', 'AND', 'CLEARLY', 'BUT', 'HORRIBLY', 'DISTORTED'] +8230-279154-0011-628: ref=['SOME', 'IMAGES', 'LIKE', 'SOME', 'SENSATIONS', 'FEEL', 'VERY', 'FAMILIAR', 'WHILE', 'OTHERS', 'FEEL', 'STRANGE'] +8230-279154-0011-628: hyp=['SOME', 'IMAGES', 'LIKE', 'SOME', 'SENSATIONS', 'FEEL', 'VERY', 'FAMILIAR', 'WHILE', 'OTHERS', 'FEEL', 'STRANGE'] +8230-279154-0012-629: ref=['FAMILIARITY', 'IS', 'A', 'FEELING', 'CAPABLE', 'OF', 'DEGREES'] +8230-279154-0012-629: hyp=['FAMILIARITY', 'IS', 'A', 'FILLING', 'CAPABLE', 'OF', 'DEGREES'] +8230-279154-0013-630: ref=['IN', 'AN', 'IMAGE', 'OF', 'A', 'WELL', 'KNOWN', 'FACE', 'FOR', 'EXAMPLE', 'SOME', 'PARTS', 'MAY', 'FEEL', 'MORE', 'FAMILIAR', 'THAN', 'OTHERS', 'WHEN', 'THIS', 'HAPPENS', 'WE', 'HAVE', 'MORE', 'BELIEF', 'IN', 'THE', 'ACCURACY', 'OF', 'THE', 'FAMILIAR', 'PARTS', 'THAN', 'IN', 'THAT', 'OF', 'THE', 'UNFAMILIAR', 'PARTS'] +8230-279154-0013-630: hyp=['IN', 'AN', 'IMAGE', 'OF', 'A', 'WELL', 'KNOWN', 'FACE', 'FOR', 'EXAMPLE', 'SOME', 'PARTS', 'MAY', 'FEEL', 'MORE', 'FAMILIAR', 'THAN', 'OTHERS', 'WHEN', 'THIS', 'HAPPENS', 'WE', 'HAVE', 'MORE', 'BELIEF', 'IN', 'THE', 'ACCURACY', 'OF', 'THE', 'FAMILIAR', 'PARTS', 'THAN', 'IN', 'THAT', 'OF', 'THE', 'UNFAMILIAR', 'PARTS'] +8230-279154-0014-631: ref=['I', 'COME', 'NOW', 'TO', 'THE', 'OTHER', 'CHARACTERISTIC', 'WHICH', 'MEMORY', 'IMAGES', 'MUST', 'HAVE', 'IN', 'ORDER', 'TO', 'ACCOUNT', 'FOR', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0014-631: hyp=['I', 'COME', 'NOW', 'TO', 'THE', 'OTHER', 'CHARACTERISTIC', 'WHICH', 'MEMORY', 'IMAGES', 'MUST', 'HAVE', 'IN', 'ORDER', 'TO', 'ACCOUNT', 'FOR', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0015-632: ref=['THEY', 'MUST', 'HAVE', 'SOME', 'CHARACTERISTIC', 'WHICH', 'MAKES', 'US', 'REGARD', 'THEM', 'AS', 'REFERRING', 'TO', 'MORE', 'OR', 'LESS', 'REMOTE', 'PORTIONS', 'OF', 'THE', 'PAST'] +8230-279154-0015-632: hyp=['THEY', 'MUST', 'HAVE', 'SOME', 'CHARACTERISTIC', 'WHICH', 'MAKES', 'US', 'REGARD', 'THEM', 'AS', 'REFERRING', 'TO', 'MORE', 'OR', 'LESS', 'REMOTE', 'PORTIONS', 'OF', 'THE', 'PAST'] +8230-279154-0016-633: ref=['IN', 'ACTUAL', 'FACT', 'THERE', 'ARE', 'DOUBTLESS', 'VARIOUS', 'FACTORS', 'THAT', 'CONCUR', 'IN', 'GIVING', 'US', 'THE', 'FEELING', 'OF', 'GREATER', 'OR', 'LESS', 'REMOTENESS', 'IN', 'SOME', 'REMEMBERED', 'EVENT'] +8230-279154-0016-633: hyp=['IN', 'ACTUAL', 'FACT', 'THERE', 'ARE', 'DOUBTLESS', 'VARIOUS', 'FACTORS', 'THAT', 'CONCUR', 'IN', 'GIVING', 'US', 'THE', 'FEELING', 'OF', 'GREATER', 'OR', 'LESS', 'REMOTENESS', 'IN', 'SOME', 'REMEMBERED', 'EVENT'] +8230-279154-0017-634: ref=['THERE', 'MAY', 'BE', 'A', 'SPECIFIC', 'FEELING', 'WHICH', 'COULD', 'BE', 'CALLED', 'THE', 'FEELING', 'OF', 'PASTNESS', 'ESPECIALLY', 'WHERE', 'IMMEDIATE', 'MEMORY', 'IS', 'CONCERNED'] +8230-279154-0017-634: hyp=['THERE', 'MAY', 'BE', 'A', 'SPECIFIC', 'FEELING', 'WHICH', 'COULD', 'BE', 'CALLED', 'THE', 'FILLING', 'OF', 'PASTNESS', 'ESPECIALLY', 'WHERE', 'IMMEDIATE', 'MEMORY', 'IS', 'CONCERNED'] +8230-279154-0018-635: ref=['THERE', 'IS', 'OF', 'COURSE', 'A', 'DIFFERENCE', 'BETWEEN', 'KNOWING', 'THE', 'TEMPORAL', 'RELATION', 'OF', 'A', 'REMEMBERED', 'EVENT', 'TO', 'THE', 'PRESENT', 'AND', 'KNOWING', 'THE', 'TIME', 'ORDER', 'OF', 'TWO', 'REMEMBERED', 'EVENTS'] +8230-279154-0018-635: hyp=['THERE', 'IS', 'OF', 'COURSE', 'A', 'DIFFERENCE', 'BETWEEN', 'KNOWING', 'THE', 'TEMPORAL', 'RELATION', 'OF', 'A', 'REMEMBERED', 'EVENT', 'TO', 'THE', 'PRESENT', 'AND', 'KNOWING', 'THE', 'TIME', 'ORDER', 'OF', 'TWO', 'REMEMBERED', 'EVENTS'] +8230-279154-0019-636: ref=['IT', 'WOULD', 'SEEM', 'THAT', 'ONLY', 'RATHER', 'RECENT', 'EVENTS', 'CAN', 'BE', 'PLACED', 'AT', 'ALL', 'ACCURATELY', 'BY', 'MEANS', 'OF', 'FEELINGS', 'GIVING', 'THEIR', 'TEMPORAL', 'RELATION', 'TO', 'THE', 'PRESENT', 'BUT', 'IT', 'IS', 'CLEAR', 'THAT', 'SUCH', 'FEELINGS', 'MUST', 'PLAY', 'AN', 'ESSENTIAL', 'PART', 'IN', 'THE', 'PROCESS', 'OF', 'DATING', 'REMEMBERED', 'EVENTS'] +8230-279154-0019-636: hyp=['IT', 'WOULD', 'SEEM', 'THAT', 'ONLY', 'RATHER', 'RECENT', 'EVENTS', 'CAN', 'BE', 'PLACED', 'AT', 'ALL', 'ACCURATELY', 'BY', 'MEANS', 'OF', 'FEELINGS', 'GIVING', 'THEIR', 'TEMPORAL', 'RELATION', 'TO', 'THE', 'PRESENT', 'BUT', 'IT', 'IS', 'CLEAR', 'THAT', 'SUCH', 'FEELINGS', 'MUST', 'PLAY', 'AN', 'ESSENTIAL', 'PART', 'IN', 'THE', 'PROCESS', 'OF', 'DATING', 'REMEMBERED', 'EVENTS'] +8230-279154-0020-637: ref=['IF', 'WE', 'HAD', 'RETAINED', 'THE', 'SUBJECT', 'OR', 'ACT', 'IN', 'KNOWLEDGE', 'THE', 'WHOLE', 'PROBLEM', 'OF', 'MEMORY', 'WOULD', 'HAVE', 'BEEN', 'COMPARATIVELY', 'SIMPLE'] +8230-279154-0020-637: hyp=['IF', 'WE', 'HAD', 'RETAINED', 'THE', 'SUBJECT', 'OR', 'ACT', 'IN', 'KNOWLEDGE', 'THE', 'WHOLE', 'PROBLEM', 'OF', 'MEMORY', 'WOULD', 'HAVE', 'BEEN', 'COMPARATIVELY', 'SIMPLE'] +8230-279154-0021-638: ref=['REMEMBERING', 'HAS', 'TO', 'BE', 'A', 'PRESENT', 'OCCURRENCE', 'IN', 'SOME', 'WAY', 'RESEMBLING', 'OR', 'RELATED', 'TO', 'WHAT', 'IS', 'REMEMBERED'] +8230-279154-0021-638: hyp=['REMEMBERING', 'HAS', 'TO', 'BE', 'A', 'PRESENT', 'OCCURRENCE', 'IN', 'SOME', 'WAY', 'RESEMBLING', 'OR', 'RELATED', 'TO', 'WHAT', 'IS', 'REMEMBERED'] +8230-279154-0022-639: ref=['SOME', 'POINTS', 'MAY', 'BE', 'TAKEN', 'AS', 'FIXED', 'AND', 'SUCH', 'AS', 'ANY', 'THEORY', 'OF', 'MEMORY', 'MUST', 'ARRIVE', 'AT'] +8230-279154-0022-639: hyp=['SOME', 'POINTS', 'MAY', 'BE', 'TAKEN', 'AS', 'FIXED', 'AND', 'SUCH', 'AS', 'ANY', 'THEORY', 'OF', 'MEMORY', 'MUST', 'ARRIVE', 'AT'] +8230-279154-0023-640: ref=['IN', 'THIS', 'CASE', 'AS', 'IN', 'MOST', 'OTHERS', 'WHAT', 'MAY', 'BE', 'TAKEN', 'AS', 'CERTAIN', 'IN', 'ADVANCE', 'IS', 'RATHER', 'VAGUE'] +8230-279154-0023-640: hyp=['IN', 'THIS', 'CASE', 'AS', 'IN', 'MOST', 'OTHERS', 'WHAT', 'MAY', 'BE', 'TAKEN', 'AS', 'CERTAIN', 'IN', 'ADVANCE', 'IS', 'RATHER', 'VAGUE'] +8230-279154-0024-641: ref=['THE', 'FIRST', 'OF', 'OUR', 'VAGUE', 'BUT', 'INDUBITABLE', 'DATA', 'IS', 'THAT', 'THERE', 'IS', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0024-641: hyp=['THE', 'FIRST', 'OF', 'OUR', 'VAGUE', 'BUT', 'INDUBITABLE', 'DATA', 'IS', 'THAT', 'THERE', 'IS', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0025-642: ref=['WE', 'MIGHT', 'PROVISIONALLY', 'THOUGH', 'PERHAPS', 'NOT', 'QUITE', 'CORRECTLY', 'DEFINE', 'MEMORY', 'AS', 'THAT', 'WAY', 'OF', 'KNOWING', 'ABOUT', 'THE', 'PAST', 'WHICH', 'HAS', 'NO', 'ANALOGUE', 'IN', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'FUTURE', 'SUCH', 'A', 'DEFINITION', 'WOULD', 'AT', 'LEAST', 'SERVE', 'TO', 'MARK', 'THE', 'PROBLEM', 'WITH', 'WHICH', 'WE', 'ARE', 'CONCERNED', 'THOUGH', 'SOME', 'EXPECTATIONS', 'MAY', 'DESERVE', 'TO', 'RANK', 'WITH', 'MEMORY', 'AS', 'REGARDS', 'IMMEDIACY'] +8230-279154-0025-642: hyp=['WE', 'MIGHT', 'PROVISIONALLY', 'THOUGH', 'PERHAPS', 'NOT', 'QUITE', 'CORRECTLY', 'DEFINE', 'MEMORY', 'AS', 'THAT', 'WAY', 'OF', 'KNOWING', 'ABOUT', 'THE', 'PAST', 'WHICH', 'HAS', 'NO', 'ANALOGUE', 'IN', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'FUTURE', 'SUCH', 'A', 'DEFINITION', 'WOULD', 'AT', 'LEAST', 'SERVE', 'TO', 'MARK', 'THE', 'PROBLEM', 'WITH', 'WHICH', 'WE', 'ARE', 'CONCERNED', 'THOUGH', 'SOME', 'EXPECTATIONS', 'MAY', 'DESERVE', 'TO', 'RANK', 'WITH', 'MEMORY', 'AS', 'REGARDS', 'IMMEDIACY'] +8230-279154-0026-643: ref=['THIS', 'DISTINCTION', 'IS', 'VITAL', 'TO', 'THE', 'UNDERSTANDING', 'OF', 'MEMORY', 'BUT', 'IT', 'IS', 'NOT', 'SO', 'EASY', 'TO', 'CARRY', 'OUT', 'IN', 'PRACTICE', 'AS', 'IT', 'IS', 'TO', 'DRAW', 'IN', 'THEORY'] +8230-279154-0026-643: hyp=['THIS', 'DISTINCTION', 'IS', 'VITAL', 'TO', 'THE', 'UNDERSTANDING', 'OF', 'MEMORY', 'BUT', 'IT', 'IS', 'NOT', 'SO', 'EASY', 'TO', 'CARRY', 'OUT', 'IN', 'PRACTICE', 'AS', 'IT', 'IS', 'TO', 'DRAW', 'IN', 'THEORY'] +8230-279154-0027-644: ref=['A', 'GRAMOPHONE', 'BY', 'THE', 'HELP', 'OF', 'SUITABLE', 'RECORDS', 'MIGHT', 'RELATE', 'TO', 'US', 'THE', 'INCIDENTS', 'OF', 'ITS', 'PAST', 'AND', 'PEOPLE', 'ARE', 'NOT', 'SO', 'DIFFERENT', 'FROM', 'GRAMOPHONES', 'AS', 'THEY', 'LIKE', 'TO', 'BELIEVE'] +8230-279154-0027-644: hyp=['A', 'GRAMMAPHONE', 'BY', 'THE', 'HELP', 'OF', 'SUITABLE', 'RECORDS', 'MIGHT', 'RELATE', 'TO', 'US', 'THE', 'INCIDENTS', 'OF', 'ITS', 'PAST', 'AND', 'PEOPLE', 'ARE', 'NOT', 'SO', 'DIFFERENT', 'FROM', 'GRAMOPHONES', 'AS', 'THEY', 'LIKE', 'TO', 'BELIEVE'] +8230-279154-0028-645: ref=['I', 'CAN', 'SET', 'TO', 'WORK', 'NOW', 'TO', 'REMEMBER', 'THINGS', 'I', 'NEVER', 'REMEMBERED', 'BEFORE', 'SUCH', 'AS', 'WHAT', 'I', 'HAD', 'TO', 'EAT', 'FOR', 'BREAKFAST', 'THIS', 'MORNING', 'AND', 'IT', 'CAN', 'HARDLY', 'BE', 'WHOLLY', 'HABIT', 'THAT', 'ENABLES', 'ME', 'TO', 'DO', 'THIS'] +8230-279154-0028-645: hyp=['I', 'CAN', 'SET', 'TO', 'WORK', 'NOW', 'TO', 'REMEMBER', 'THINGS', 'I', 'NEVER', 'REMEMBERED', 'BEFORE', 'SUCH', 'AS', 'WHAT', 'I', 'HAD', 'TO', 'EAT', 'FOR', 'BREAKFAST', 'THIS', 'MORNING', 'AND', 'IT', 'CAN', 'HARDLY', 'BE', 'WHOLLY', 'HABIT', 'THAT', 'ENABLES', 'ME', 'TO', 'DO', 'THIS'] +8230-279154-0029-646: ref=['THE', 'FACT', 'THAT', 'A', 'MAN', 'CAN', 'RECITE', 'A', 'POEM', 'DOES', 'NOT', 'SHOW', 'THAT', 'HE', 'REMEMBERS', 'ANY', 'PREVIOUS', 'OCCASION', 'ON', 'WHICH', 'HE', 'HAS', 'RECITED', 'OR', 'READ', 'IT'] +8230-279154-0029-646: hyp=['THE', 'FACT', 'THAT', 'A', 'MAN', 'CAN', 'RECITE', 'A', 'POEM', 'DOES', 'NOT', 'SHOW', 'THAT', 'HE', 'REMEMBERS', 'ANY', 'PREVIOUS', 'OCCASION', 'ON', 'WHICH', 'HE', 'HAS', 'RECITED', 'OR', 'READ', 'IT'] +8230-279154-0030-647: ref=["SEMON'S", 'TWO', 'BOOKS', 'MENTIONED', 'IN', 'AN', 'EARLIER', 'LECTURE', 'DO', 'NOT', 'TOUCH', 'KNOWLEDGE', 'MEMORY', 'AT', 'ALL', 'CLOSELY'] +8230-279154-0030-647: hyp=['SIMMONS', 'TWO', 'BOOKS', 'MENTIONED', 'IN', 'AN', 'EARLIER', 'LECTURE', 'DO', 'NOT', 'TOUCH', 'KNOWLEDGE', 'MEMORY', 'AT', 'ALL', 'CLOSELY'] +8230-279154-0031-648: ref=['THEY', 'GIVE', 'LAWS', 'ACCORDING', 'TO', 'WHICH', 'IMAGES', 'OF', 'PAST', 'OCCURRENCES', 'COME', 'INTO', 'OUR', 'MINDS', 'BUT', 'DO', 'NOT', 'DISCUSS', 'OUR', 'BELIEF', 'THAT', 'THESE', 'IMAGES', 'REFER', 'TO', 'PAST', 'OCCURRENCES', 'WHICH', 'IS', 'WHAT', 'CONSTITUTES', 'KNOWLEDGE', 'MEMORY'] +8230-279154-0031-648: hyp=['THEY', 'GIVE', 'LAWS', 'ACCORDING', 'TO', 'WHICH', 'IMAGES', 'OF', 'PAST', 'OCCURRENCES', 'COME', 'INTO', 'OUR', 'MINDS', 'BUT', 'DO', 'NOT', 'DISCUSS', 'OUR', 'BELIEF', 'THAT', 'THESE', 'IMAGES', 'REFER', 'TO', 'PAST', 'OCCURRENCES', 'WHICH', 'IS', 'WHAT', 'CONSTITUTES', 'KNOWLEDGE', 'MEMORY'] +8230-279154-0032-649: ref=['IT', 'IS', 'THIS', 'THAT', 'IS', 'OF', 'INTEREST', 'TO', 'THEORY', 'OF', 'KNOWLEDGE'] +8230-279154-0032-649: hyp=['IT', 'IS', 'THIS', 'THAT', 'IS', 'OF', 'INTEREST', 'TO', 'THEORY', 'OF', 'KNOWLEDGE'] +8230-279154-0033-650: ref=['IT', 'IS', 'BY', 'NO', 'MEANS', 'ALWAYS', 'RELIABLE', 'ALMOST', 'EVERYBODY', 'HAS', 'AT', 'SOME', 'TIME', 'EXPERIENCED', 'THE', 'WELL', 'KNOWN', 'ILLUSION', 'THAT', 'ALL', 'THAT', 'IS', 'HAPPENING', 'NOW', 'HAPPENED', 'BEFORE', 'AT', 'SOME', 'TIME'] +8230-279154-0033-650: hyp=['IT', 'IS', 'BY', 'NO', 'MEANS', 'ALWAYS', 'RELIABLE', 'ALMOST', 'EVERYBODY', 'HAS', 'AT', 'SOME', 'TIME', 'EXPERIENCED', 'THE', 'WELL', 'KNOWN', 'ILLUSION', 'THAT', 'ALL', 'THAT', 'IS', 'HAPPENING', 'NOW', 'HAPPENED', 'BEFORE', 'AT', 'SOME', 'TIME'] +8230-279154-0034-651: ref=['WHENEVER', 'THE', 'SENSE', 'OF', 'FAMILIARITY', 'OCCURS', 'WITHOUT', 'A', 'DEFINITE', 'OBJECT', 'IT', 'LEADS', 'US', 'TO', 'SEARCH', 'THE', 'ENVIRONMENT', 'UNTIL', 'WE', 'ARE', 'SATISFIED', 'THAT', 'WE', 'HAVE', 'FOUND', 'THE', 'APPROPRIATE', 'OBJECT', 'WHICH', 'LEADS', 'US', 'TO', 'THE', 'JUDGMENT', 'THIS', 'IS', 'FAMILIAR'] +8230-279154-0034-651: hyp=['WHENEVER', 'THE', 'SENSE', 'OF', 'FAMILIARITY', 'OCCURS', 'WITHOUT', 'A', 'DEFINITE', 'OBJECT', 'IT', 'LEAVES', 'US', 'TO', 'SEARCH', 'THE', 'ENVIRONMENT', 'UNTIL', 'WE', 'ARE', 'SATISFIED', 'THAT', 'WE', 'HAVE', 'FOUND', 'THE', 'APPROPRIATE', 'OBJECT', 'WHICH', 'LEADS', 'US', 'TO', 'THE', 'JUDGMENT', 'THIS', 'IS', 'FAMILIAR'] +8230-279154-0035-652: ref=['THUS', 'NO', 'KNOWLEDGE', 'AS', 'TO', 'THE', 'PAST', 'IS', 'TO', 'BE', 'DERIVED', 'FROM', 'THE', 'FEELING', 'OF', 'FAMILIARITY', 'ALONE'] +8230-279154-0035-652: hyp=['THUS', 'NO', 'KNOWLEDGE', 'AS', 'TO', 'THE', 'PAST', 'IS', 'TO', 'BE', 'DERIVED', 'FROM', 'THE', 'FEELING', 'OF', 'FAMILIARITY', 'ALONE'] +8230-279154-0036-653: ref=['A', 'FURTHER', 'STAGE', 'IS', 'RECOGNITION'] +8230-279154-0036-653: hyp=['A', 'FURTHER', 'STAGE', 'IS', 'RECOGNITION'] +8230-279154-0037-654: ref=['RECOGNITION', 'IN', 'THIS', 'SENSE', 'DOES', 'NOT', 'NECESSARILY', 'INVOLVE', 'MORE', 'THAN', 'A', 'HABIT', 'OF', 'ASSOCIATION', 'THE', 'KIND', 'OF', 'OBJECT', 'WE', 'ARE', 'SEEING', 'AT', 'THE', 'MOMENT', 'IS', 'ASSOCIATED', 'WITH', 'THE', 'WORD', 'CAT', 'OR', 'WITH', 'AN', 'AUDITORY', 'IMAGE', 'OF', 'PURRING', 'OR', 'WHATEVER', 'OTHER', 'CHARACTERISTIC', 'WE', 'MAY', 'HAPPEN', 'TO', 'RECOGNIZE', 'IN', 'THE', 'CAT', 'OF', 'THE', 'MOMENT'] +8230-279154-0037-654: hyp=['RECOGNITION', 'IN', 'THIS', 'SENSE', 'DOES', 'NOT', 'NECESSARILY', 'INVOLVE', 'MORE', 'THAN', 'A', 'HABIT', 'OF', 'ASSOCIATION', 'THE', 'KIND', 'OF', 'OBJECT', 'WE', 'ARE', 'SEEING', 'AT', 'THE', 'MOMENT', 'IS', 'ASSOCIATED', 'WITH', 'THE', 'WORD', 'CAT', 'OR', 'WITH', 'AN', 'AUDITORY', 'IMAGE', 'OF', 'PURRING', 'OR', 'WHATEVER', 'OTHER', 'CHARACTERISTIC', 'WE', 'MAY', 'HAPPEN', 'TO', 'RECOGNIZE', 'IN', 'THE', 'CAT', 'OF', 'THE', 'MOMENT'] +8230-279154-0038-655: ref=['WE', 'ARE', 'OF', 'COURSE', 'IN', 'FACT', 'ABLE', 'TO', 'JUDGE', 'WHEN', 'WE', 'RECOGNIZE', 'AN', 'OBJECT', 'THAT', 'WE', 'HAVE', 'SEEN', 'IT', 'BEFORE', 'BUT', 'THIS', 'JUDGMENT', 'IS', 'SOMETHING', 'OVER', 'AND', 'ABOVE', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'AND', 'MAY', 'VERY', 'PROBABLY', 'BE', 'IMPOSSIBLE', 'TO', 'ANIMALS', 'THAT', 'NEVERTHELESS', 'HAVE', 'THE', 'EXPERIENCE', 'OF', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'OF', 'THE', 'WORD'] +8230-279154-0038-655: hyp=['WE', 'ARE', 'OF', 'COURSE', 'IN', 'FACT', 'ABLE', 'TO', 'JUDGE', 'WHEN', 'WE', 'RECOGNIZE', 'AN', 'OBJECT', 'THAT', 'WE', 'HAVE', 'SEEN', 'IT', 'BEFORE', 'BUT', 'THIS', 'JUDGMENT', 'IS', 'SOMETHING', 'OVER', 'AND', 'ABOVE', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'AND', 'MAY', 'VERY', 'PROBABLY', 'BE', 'IMPOSSIBLE', 'TO', 'ANIMALS', 'THAT', 'NEVERTHELESS', 'HAVE', 'THE', 'EXPERIENCE', 'OF', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'OF', 'THE', 'WORD'] +8230-279154-0039-656: ref=['THIS', 'KNOWLEDGE', 'IS', 'MEMORY', 'IN', 'ONE', 'SENSE', 'THOUGH', 'IN', 'ANOTHER', 'IT', 'IS', 'NOT'] +8230-279154-0039-656: hyp=['THIS', 'KNOWLEDGE', 'IS', 'MEMORY', 'IN', 'ONE', 'SENSE', 'THOUGH', 'IN', 'ANOTHER', 'IT', 'IS', 'NOT'] +8230-279154-0040-657: ref=['THERE', 'ARE', 'HOWEVER', 'SEVERAL', 'POINTS', 'IN', 'WHICH', 'SUCH', 'AN', 'ACCOUNT', 'OF', 'RECOGNITION', 'IS', 'INADEQUATE', 'TO', 'BEGIN', 'WITH', 'IT', 'MIGHT', 'SEEM', 'AT', 'FIRST', 'SIGHT', 'MORE', 'CORRECT', 'TO', 'DEFINE', 'RECOGNITION', 'AS', 'I', 'HAVE', 'SEEN', 'THIS', 'BEFORE', 'THAN', 'AS', 'THIS', 'HAS', 'EXISTED', 'BEFORE'] +8230-279154-0040-657: hyp=['THERE', 'ARE', 'HOWEVER', 'SEVERAL', 'POINTS', 'IN', 'WHICH', 'SUCH', 'AN', 'ACCOUNT', 'OF', 'RECOGNITION', 'IS', 'INADEQUATE', 'TO', 'BEGIN', 'WITH', 'IT', 'MIGHT', 'SEEM', 'AT', 'FIRST', 'SIGHT', 'MORE', 'CORRECT', 'TO', 'DEFINE', 'RECOGNITION', 'AS', 'I', 'HAVE', 'SEEN', 'THIS', 'BEFORE', 'THAN', 'AS', 'THIS', 'HAS', 'EXISTED', 'BEFORE'] +8230-279154-0041-658: ref=['THE', 'DEFINITION', 'OF', 'MY', 'EXPERIENCE', 'IS', 'DIFFICULT', 'BROADLY', 'SPEAKING', 'IT', 'IS', 'EVERYTHING', 'THAT', 'IS', 'CONNECTED', 'WITH', 'WHAT', 'I', 'AM', 'EXPERIENCING', 'NOW', 'BY', 'CERTAIN', 'LINKS', 'OF', 'WHICH', 'THE', 'VARIOUS', 'FORMS', 'OF', 'MEMORY', 'ARE', 'AMONG', 'THE', 'MOST', 'IMPORTANT'] +8230-279154-0041-658: hyp=['THE', 'DEFINITION', 'OF', 'MY', 'EXPERIENCE', 'IS', 'DIFFICULT', 'BROADLY', 'SPEAKING', 'IT', 'IS', 'EVERYTHING', 'THAT', 'IS', 'CONNECTED', 'WITH', 'WHAT', 'I', 'AM', 'EXPERIENCING', 'NOW', 'BY', 'CERTAIN', 'LINKS', 'OF', 'WHICH', 'THE', 'VARIOUS', 'FORMS', 'OF', 'MEMORY', 'ARE', 'AMONG', 'THE', 'MOST', 'IMPORTANT'] +8230-279154-0042-659: ref=['THUS', 'IF', 'I', 'RECOGNIZE', 'A', 'THING', 'THE', 'OCCASION', 'OF', 'ITS', 'PREVIOUS', 'EXISTENCE', 'IN', 'VIRTUE', 'OF', 'WHICH', 'I', 'RECOGNIZE', 'IT', 'FORMS', 'PART', 'OF', 'MY', 'EXPERIENCE', 'BY', 'DEFINITION', 'RECOGNITION', 'WILL', 'BE', 'ONE', 'OF', 'THE', 'MARKS', 'BY', 'WHICH', 'MY', 'EXPERIENCE', 'IS', 'SINGLED', 'OUT', 'FROM', 'THE', 'REST', 'OF', 'THE', 'WORLD'] +8230-279154-0042-659: hyp=['THUS', 'IF', 'I', 'RECOGNIZE', 'A', 'THING', 'THE', 'OCCASION', 'OF', 'ITS', 'PREVIOUS', 'EXISTENCE', 'IN', 'VIRTUE', 'OF', 'WHICH', 'I', 'RECOGNIZE', 'IT', 'FORMS', 'PART', 'OF', 'MY', 'EXPERIENCE', 'BY', 'DEFINITION', 'RECOGNITION', 'WILL', 'BE', 'ONE', 'OF', 'THE', 'MARKS', 'BY', 'WHICH', 'MY', 'EXPERIENCE', 'IS', 'SINGLED', 'OUT', 'FROM', 'THE', 'REST', 'OF', 'THE', 'WORLD'] +8230-279154-0043-660: ref=['OF', 'COURSE', 'THE', 'WORDS', 'THIS', 'HAS', 'EXISTED', 'BEFORE', 'ARE', 'A', 'VERY', 'INADEQUATE', 'TRANSLATION', 'OF', 'WHAT', 'ACTUALLY', 'HAPPENS', 'WHEN', 'WE', 'FORM', 'A', 'JUDGMENT', 'OF', 'RECOGNITION', 'BUT', 'THAT', 'IS', 'UNAVOIDABLE', 'WORDS', 'ARE', 'FRAMED', 'TO', 'EXPRESS', 'A', 'LEVEL', 'OF', 'THOUGHT', 'WHICH', 'IS', 'BY', 'NO', 'MEANS', 'PRIMITIVE', 'AND', 'ARE', 'QUITE', 'INCAPABLE', 'OF', 'EXPRESSING', 'SUCH', 'AN', 'ELEMENTARY', 'OCCURRENCE', 'AS', 'RECOGNITION'] +8230-279154-0043-660: hyp=['OF', 'COURSE', 'THE', 'WORDS', 'THIS', 'HAS', 'EXISTED', 'BEFORE', 'ARE', 'OF', 'VERY', 'INADEQUATE', 'TRANSLATION', 'OF', 'WHAT', 'ACTUALLY', 'HAPPENS', 'WHEN', 'WE', 'FORM', 'A', 'JUDGMENT', 'OF', 'RECOGNITION', 'BUT', 'THAT', 'IS', 'UNAVOIDABLE', 'WORDS', 'ARE', 'FRAMED', 'TO', 'EXPRESS', 'A', 'LEVEL', 'OF', 'THOUGHT', 'WHICH', 'IS', 'BY', 'NO', 'MEANS', 'PRIMITIVE', 'AND', 'ARE', 'QUITE', 'INCAPABLE', 'OF', 'EXPRESSING', 'SUCH', 'AN', 'ELEMENTARY', 'OCCURRENCE', 'AS', 'RECOGNITION'] +8455-210777-0000-972: ref=['I', 'REMAINED', 'THERE', 'ALONE', 'FOR', 'MANY', 'HOURS', 'BUT', 'I', 'MUST', 'ACKNOWLEDGE', 'THAT', 'BEFORE', 'I', 'LEFT', 'THE', 'CHAMBERS', 'I', 'HAD', 'GRADUALLY', 'BROUGHT', 'MYSELF', 'TO', 'LOOK', 'AT', 'THE', 'MATTER', 'IN', 'ANOTHER', 'LIGHT'] +8455-210777-0000-972: hyp=['I', 'REMAIN', 'THERE', 'ALONE', 'FOR', 'MANY', 'HOURS', 'BUT', 'I', 'MUST', 'ACKNOWLEDGE', 'THAT', 'BEFORE', 'I', 'LEFT', 'THE', 'CHAMBERS', 'I', 'HAD', 'GRADUALLY', 'BROUGHT', 'MYSELF', 'TO', 'LOOK', 'AT', 'THE', 'MATTER', 'IN', 'ANOTHER', 'LIGHT'] +8455-210777-0001-973: ref=['HAD', 'EVA', 'CRASWELLER', 'NOT', 'BEEN', 'GOOD', 'LOOKING', 'HAD', 'JACK', 'BEEN', 'STILL', 'AT', 'COLLEGE', 'HAD', 'SIR', 'KENNINGTON', 'OVAL', 'REMAINED', 'IN', 'ENGLAND', 'HAD', 'MISTER', 'BUNNIT', 'AND', 'THE', 'BAR', 'KEEPER', 'NOT', 'SUCCEEDED', 'IN', 'STOPPING', 'MY', 'CARRIAGE', 'ON', 'THE', 'HILL', 'SHOULD', 'I', 'HAVE', 'SUCCEEDED', 'IN', 'ARRANGING', 'FOR', 'THE', 'FINAL', 'DEPARTURE', 'OF', 'MY', 'OLD', 'FRIEND'] +8455-210777-0001-973: hyp=['HAD', 'EITHER', 'CRUSWELLER', 'NOT', 'BEEN', 'GOOD', 'LOOKING', 'HAD', 'JACK', 'BEEN', 'STILL', 'AT', 'COLLEGE', 'HAD', 'SIR', 'KENNINGTON', 'OVAL', 'REMAINED', 'IN', 'ENGLAND', 'HAD', 'MISTER', 'BUNNITT', 'IN', 'THE', 'BAR', 'KEEPER', 'NOT', 'SUCCEEDED', 'IN', 'STOPPING', 'MY', 'CARRIAGE', 'ON', 'THE', 'HILL', 'SHOULD', 'I', 'HAVE', 'SUCCEEDED', 'IN', 'ARRANGING', 'FOR', 'THE', 'FINAL', 'DEPARTURE', 'OF', 'MY', 'OLD', 'FRIEND'] +8455-210777-0002-974: ref=['ON', 'ARRIVING', 'AT', 'HOME', 'AT', 'MY', 'OWN', 'RESIDENCE', 'I', 'FOUND', 'THAT', 'OUR', 'SALON', 'WAS', 'FILLED', 'WITH', 'A', 'BRILLIANT', 'COMPANY'] +8455-210777-0002-974: hyp=['ON', 'ARRIVING', 'AT', 'HOME', 'AT', 'MY', 'OWN', 'RESIDENCE', 'I', 'FOUND', 'THAT', 'OUR', 'SALON', 'WAS', 'FILLED', 'WITH', 'A', 'BRILLIANT', 'COMPANY'] +8455-210777-0003-975: ref=['AS', 'I', 'SPOKE', 'I', 'MADE', 'HIM', 'A', 'GRACIOUS', 'BOW', 'AND', 'I', 'THINK', 'I', 'SHOWED', 'HIM', 'BY', 'MY', 'MODE', 'OF', 'ADDRESS', 'THAT', 'I', 'DID', 'NOT', 'BEAR', 'ANY', 'GRUDGE', 'AS', 'TO', 'MY', 'INDIVIDUAL', 'SELF'] +8455-210777-0003-975: hyp=['AS', 'I', 'SPOKE', 'I', 'MADE', 'HIM', 'A', 'GRACIOUS', 'BOW', 'AND', 'I', 'THINK', 'I', 'SHOWED', 'HIM', 'BY', 'MY', 'MODE', 'OF', 'ADDRESS', 'THAT', 'I', 'DID', 'NOT', 'BEAR', 'ANY', 'GRUDGE', 'AS', 'TO', 'MY', 'INDIVIDUAL', 'SELF'] +8455-210777-0004-976: ref=['I', 'HAVE', 'COME', 'TO', 'YOUR', 'SHORES', 'MISTER', 'PRESIDENT', 'WITH', 'THE', 'PURPOSE', 'OF', 'SEEING', 'HOW', 'THINGS', 'ARE', 'PROGRESSING', 'IN', 'THIS', 'DISTANT', 'QUARTER', 'OF', 'THE', 'WORLD'] +8455-210777-0004-976: hyp=['I', 'HAVE', 'COME', 'TO', 'YOUR', 'SHORES', 'MISTER', 'PRESIDENT', 'WITH', 'THE', 'PURPOSE', 'OF', 'SEEING', 'HOW', 'THINGS', 'ARE', 'PROGRESSING', 'IN', 'THIS', 'DISTANT', 'QUARTER', 'OF', 'THE', 'WORLD'] +8455-210777-0005-977: ref=['WE', 'HAVE', 'OUR', 'LITTLE', 'STRUGGLES', 'HERE', 'AS', 'ELSEWHERE', 'AND', 'ALL', 'THINGS', 'CANNOT', 'BE', 'DONE', 'BY', 'ROSE', 'WATER'] +8455-210777-0005-977: hyp=['WE', 'HAVE', 'OUR', 'LITTLE', 'STRUGGLES', 'HERE', 'AS', 'ELSEWHERE', 'AND', 'ALL', 'THINGS', 'CANNOT', 'BE', 'DONE', 'BY', 'ROSE', 'WATER'] +8455-210777-0006-978: ref=['WE', 'ARE', 'QUITE', 'SATISFIED', 'NOW', 'CAPTAIN', 'BATTLEAX', 'SAID', 'MY', 'WIFE'] +8455-210777-0006-978: hyp=['WE', 'ARE', 'QUITE', 'SATISFIED', 'NOW', 'CAPTAIN', 'BATTLE', 'AXE', 'SAID', 'MY', 'WIFE'] +8455-210777-0007-979: ref=['QUITE', 'SATISFIED', 'SAID', 'EVA'] +8455-210777-0007-979: hyp=['QUITE', 'SATISFIED', 'SAID', 'EVA'] +8455-210777-0008-980: ref=['THE', 'LADIES', 'IN', 'COMPLIANCE', 'WITH', 'THAT', 'SOFTNESS', 'OF', 'HEART', 'WHICH', 'IS', 'THEIR', 'CHARACTERISTIC', 'ARE', 'ON', 'ONE', 'SIDE', 'AND', 'THE', 'MEN', 'BY', 'WHOM', 'THE', 'WORLD', 'HAS', 'TO', 'BE', 'MANAGED', 'ARE', 'ON', 'THE', 'OTHER'] +8455-210777-0008-980: hyp=['THE', 'LADIES', 'IN', 'COMPLIANCE', 'WITH', 'THAT', 'SOFTNESS', 'OF', 'HEART', 'WHICH', 'IS', 'THEIR', 'CHARACTERISTIC', 'ARE', 'ON', 'ONE', 'SIDE', 'AND', 'THE', 'MEN', 'BY', 'WHOM', 'THE', 'WORLD', 'HAS', 'TO', 'BE', 'MANAGED', 'ARE', 'ON', 'THE', 'OTHER'] +8455-210777-0009-981: ref=['NO', 'DOUBT', 'IN', 'PROCESS', 'OF', 'TIME', 'THE', 'LADIES', 'WILL', 'FOLLOW'] +8455-210777-0009-981: hyp=['NO', 'DOUBT', 'IN', 'PROCESS', 'OF', 'TIME', 'THE', 'LADIES', 'WILL', 'FOLLOW'] +8455-210777-0010-982: ref=['THEIR', 'MASTERS', 'SAID', 'MISSUS', 'NEVERBEND'] +8455-210777-0010-982: hyp=['THEIR', 'MASTERS', 'SAID', 'MISSUS', 'NEVERBAND'] +8455-210777-0011-983: ref=['I', 'DID', 'NOT', 'MEAN', 'SAID', 'CAPTAIN', 'BATTLEAX', 'TO', 'TOUCH', 'UPON', 'PUBLIC', 'SUBJECTS', 'AT', 'SUCH', 'A', 'MOMENT', 'AS', 'THIS'] +8455-210777-0011-983: hyp=['I', 'DID', 'NOT', 'MEAN', 'SAID', 'CAPTAIN', 'BATTLE', 'AXE', 'TO', 'TOUCH', 'UPON', 'PUBLIC', 'SUBJECTS', 'AT', 'SUCH', 'A', 'MOMENT', 'AS', 'THIS'] +8455-210777-0012-984: ref=['MISSUS', 'NEVERBEND', 'YOU', 'MUST', 'INDEED', 'BE', 'PROUD', 'OF', 'YOUR', 'SON'] +8455-210777-0012-984: hyp=['MISSUS', 'NEVERBEND', 'YOU', 'MUST', 'INDEED', 'BE', 'PROUD', 'OF', 'YOUR', 'SON'] +8455-210777-0013-985: ref=['JACK', 'HAD', 'BEEN', 'STANDING', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'ROOM', 'TALKING', 'TO', 'EVA', 'AND', 'WAS', 'NOW', 'REDUCED', 'TO', 'SILENCE', 'BY', 'HIS', 'PRAISES'] +8455-210777-0013-985: hyp=['JACK', 'HAD', 'BEEN', 'STANDING', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'ROOM', 'TALKING', 'TO', 'EVA', 'AND', 'WAS', 'NOW', 'REDUCED', 'TO', 'SILENCE', 'BY', 'HIS', 'PRAISES'] +8455-210777-0014-986: ref=['SIR', 'KENNINGTON', 'OVAL', 'IS', 'A', 'VERY', 'FINE', 'PLAYER', 'SAID', 'MY', 'WIFE'] +8455-210777-0014-986: hyp=['SIR', 'KENNINGTON', 'OVAL', 'IS', 'A', 'VERY', 'FINE', 'PLAYER', 'SAID', 'MY', 'WIFE'] +8455-210777-0015-987: ref=['I', 'AND', 'MY', 'WIFE', 'AND', 'SON', 'AND', 'THE', 'TWO', 'CRASWELLERS', 'AND', 'THREE', 'OR', 'FOUR', 'OTHERS', 'AGREED', 'TO', 'DINE', 'ON', 'BOARD', 'THE', 'SHIP', 'ON', 'THE', 'NEXT'] +8455-210777-0015-987: hyp=['I', 'AM', 'MY', 'WIFE', 'AND', 'SON', 'AND', 'THE', 'TWO', 'CRESTWELLERS', 'AND', 'THREE', 'OR', 'FOUR', 'OTHERS', 'AGREED', 'TO', 'DINE', 'ON', 'BOARD', 'THE', 'SHIP', 'ON', 'THE', 'NEXT'] +8455-210777-0016-988: ref=['THIS', 'I', 'FELT', 'WAS', 'PAID', 'TO', 'ME', 'AS', 'BEING', 'PRESIDENT', 'OF', 'THE', 'REPUBLIC', 'AND', 'I', 'ENDEAVOURED', 'TO', 'BEHAVE', 'MYSELF', 'WITH', 'SUCH', 'MINGLED', 'HUMILITY', 'AND', 'DIGNITY', 'AS', 'MIGHT', 'BEFIT', 'THE', 'OCCASION', 'BUT', 'I', 'COULD', 'NOT', 'BUT', 'FEEL', 'THAT', 'SOMETHING', 'WAS', 'WANTING', 'TO', 'THE', 'SIMPLICITY', 'OF', 'MY', 'ORDINARY', 'LIFE'] +8455-210777-0016-988: hyp=['THIS', 'I', 'FELT', 'WAS', 'PAID', 'TO', 'ME', 'AS', 'BEING', 'PRESIDENT', 'OF', 'THE', 'REPUBLIC', 'AND', 'I', 'ENDEAVOURED', 'TO', 'BEHAVE', 'MYSELF', 'WITH', 'SUCH', 'MINGLED', 'HUMILITY', 'AND', 'DIGNITY', 'AS', 'MIGHT', 'BE', 'FIT', 'THE', 'OCCASION', 'BUT', 'I', 'COULD', 'NOT', 'BUT', 'FEEL', 'THAT', 'SOMETHING', 'WAS', 'WANTING', 'TO', 'THE', 'SIMPLICITY', 'OF', 'MY', 'ORDINARY', 'LIFE'] +8455-210777-0017-989: ref=['MY', 'WIFE', 'ON', 'THE', 'SPUR', 'OF', 'THE', 'MOMENT', 'MANAGED', 'TO', 'GIVE', 'THE', 'GENTLEMEN', 'A', 'VERY', 'GOOD', 'DINNER'] +8455-210777-0017-989: hyp=['MY', 'WIFE', 'ON', 'THE', 'SPUR', 'OF', 'THE', 'MOMENT', 'MANAGED', 'TO', 'GIVE', 'THE', 'GENTLEMAN', 'A', 'VERY', 'GOOD', 'DINNER'] +8455-210777-0018-990: ref=['THIS', 'SHE', 'SAID', 'WAS', 'TRUE', 'HOSPITALITY', 'AND', 'I', 'AM', 'NOT', 'SURE', 'THAT', 'I', 'DID', 'NOT', 'AGREE', 'WITH', 'HER'] +8455-210777-0018-990: hyp=['THIS', 'SHE', 'SAID', 'WAS', 'TRUE', 'HOSPITALITY', 'AND', 'I', 'AM', 'NOT', 'SURE', 'THAT', 'I', 'DID', 'NOT', 'AGREE', 'WITH', 'THAT'] +8455-210777-0019-991: ref=['THEN', 'THERE', 'WERE', 'THREE', 'OR', 'FOUR', 'LEADING', 'MEN', 'OF', 'THE', 'COMMUNITY', 'WITH', 'THEIR', 'WIVES', 'WHO', 'WERE', 'FOR', 'THE', 'MOST', 'PART', 'THE', 'FATHERS', 'AND', 'MOTHERS', 'OF', 'THE', 'YOUNG', 'LADIES'] +8455-210777-0019-991: hyp=['THEN', 'THERE', 'WERE', 'THREE', 'OR', 'FOUR', 'LEADING', 'MEN', 'OF', 'THE', 'COMMUNITY', 'WITH', 'THEIR', 'WIVES', 'WHO', 'WERE', 'FOR', 'THE', 'MOST', 'PART', 'THE', 'FATHERS', 'AND', 'MOTHERS', 'OF', 'THE', 'YOUNG', 'LADIES'] +8455-210777-0020-992: ref=['OH', 'YES', 'SAID', 'JACK', 'AND', "I'M", 'NOWHERE'] +8455-210777-0020-992: hyp=['OH', 'YES', 'SAID', 'JACK', 'AND', "I'M", 'NOWHERE'] +8455-210777-0021-993: ref=['BUT', 'I', 'MEAN', 'TO', 'HAVE', 'MY', 'INNINGS', 'BEFORE', 'LONG'] +8455-210777-0021-993: hyp=['BUT', 'I', 'MEAN', 'TO', 'HAVE', 'MY', 'INNINGS', 'BEFORE', 'LONG'] +8455-210777-0022-994: ref=['OF', 'WHAT', 'MISSUS', 'NEVERBEND', 'HAD', 'GONE', 'THROUGH', 'IN', 'PROVIDING', 'BIRDS', 'BEASTS', 'AND', 'FISHES', 'NOT', 'TO', 'TALK', 'OF', 'TARTS', 'AND', 'JELLIES', 'FOR', 'THE', 'DINNER', 'OF', 'THAT', 'DAY', 'NO', 'ONE', 'BUT', 'MYSELF', 'CAN', 'HAVE', 'ANY', 'IDEA', 'BUT', 'IT', 'MUST', 'BE', 'ADMITTED', 'THAT', 'SHE', 'ACCOMPLISHED', 'HER', 'TASK', 'WITH', 'THOROUGH', 'SUCCESS'] +8455-210777-0022-994: hyp=['OF', 'WHAT', 'MISSUS', 'NEVERS', 'BEASTS', 'AND', 'FISHES', 'NOT', 'TO', 'TALK', 'OF', 'TARTS', 'AND', 'JELLIES', 'FOR', 'THE', 'DINNER', 'OF', 'THAT', 'DAY', 'NO', 'ONE', 'BUT', 'MYSELF', 'CAN', 'HAVE', 'ANY', 'IDEA', 'BUT', 'IT', 'MUST', 'BE', 'ADMITTED', 'THAT', 'SHE', 'ACCOMPLISHED', 'HER', 'TASK', 'WITH', 'THOROUGH', 'SUCCESS'] +8455-210777-0023-995: ref=['WE', 'SAT', 'WITH', 'THE', 'OFFICERS', 'SOME', 'LITTLE', 'TIME', 'AFTER', 'DINNER', 'AND', 'THEN', 'WENT', 'ASHORE'] +8455-210777-0023-995: hyp=['WE', 'SAT', 'WITH', 'THE', 'OFFICER', 'SOME', 'LITTLE', 'TIME', 'AFTER', 'DINNER', 'AND', 'THEN', 'WENT', 'ASHORE'] +8455-210777-0024-996: ref=['HOW', 'MUCH', 'OF', 'EVIL', 'OF', 'REAL', 'ACCOMPLISHED', 'EVIL', 'HAD', 'THERE', 'NOT', 'OCCURRED', 'TO', 'ME', 'DURING', 'THE', 'LAST', 'FEW', 'DAYS'] +8455-210777-0024-996: hyp=['HOW', 'MUCH', 'OF', 'EVIL', 'OF', 'REAL', 'ACCOMPLISHED', 'EVIL', 'HAD', 'THERE', 'NOT', 'OCCURRED', 'TO', 'ME', 'DURING', 'THE', 'LAST', 'FEW', 'DAYS'] +8455-210777-0025-997: ref=['WHAT', 'COULD', 'I', 'DO', 'NOW', 'BUT', 'JUST', 'LAY', 'MYSELF', 'DOWN', 'AND', 'DIE'] +8455-210777-0025-997: hyp=['WHAT', 'COULD', 'I', 'DO', 'NOW', 'BUT', 'JUST', 'LAY', 'MYSELF', 'DOWN', 'AND', 'DIE'] +8455-210777-0026-998: ref=['AND', 'THE', 'DEATH', 'OF', 'WHICH', 'I', 'DREAMT', 'COULD', 'NOT', 'ALAS'] +8455-210777-0026-998: hyp=['AND', 'THE', 'DEATH', 'OF', 'WHICH', 'I', 'DREAMT', 'COULD', 'NOT', 'ALAS'] +8455-210777-0027-999: ref=['WHEN', 'THIS', 'CAPTAIN', 'SHOULD', 'HAVE', 'TAKEN', 'HIMSELF', 'AND', 'HIS', 'VESSEL', 'BACK', 'TO', 'ENGLAND', 'I', 'WOULD', 'RETIRE', 'TO', 'A', 'SMALL', 'FARM', 'WHICH', 'I', 'POSSESSED', 'AT', 'THE', 'FARTHEST', 'SIDE', 'OF', 'THE', 'ISLAND', 'AND', 'THERE', 'IN', 'SECLUSION', 'WOULD', 'I', 'END', 'MY', 'DAYS'] +8455-210777-0027-999: hyp=['WHEN', 'THIS', 'CAPTAIN', 'SHOULD', 'HAVE', 'TAKEN', 'HIMSELF', 'AND', 'HIS', 'VESSEL', 'BACK', 'TO', 'ENGLAND', 'I', 'WOULD', 'RETIRE', 'TO', 'A', 'SMALL', 'FARM', 'WHICH', 'I', 'POSSESSED', 'AT', 'THE', 'FURTHEST', 'SIDE', 'OF', 'THE', 'ISLAND', 'AND', 'THERE', 'IN', 'SECLUSION', 'WOULD', 'I', 'END', 'MY', 'DAYS'] +8455-210777-0028-1000: ref=['JACK', 'WOULD', 'BECOME', "EVA'S", 'HAPPY', 'HUSBAND', 'AND', 'WOULD', 'REMAIN', 'AMIDST', 'THE', 'HURRIED', 'DUTIES', 'OF', 'THE', 'EAGER', 'WORLD'] +8455-210777-0028-1000: hyp=['JACK', 'WOULD', 'BECOME', "EVA'S", 'HAPPY', 'HUSBAND', 'AND', 'WOULD', 'REMAIN', 'AMIDST', 'THE', 'HURRIED', 'DUTIES', 'OF', 'THE', 'EAGER', 'WORLD'] +8455-210777-0029-1001: ref=['THINKING', 'OF', 'ALL', 'THIS', 'I', 'WENT', 'TO', 'SLEEP'] +8455-210777-0029-1001: hyp=['THINKING', 'OF', 'ALL', 'THIS', 'I', 'WENT', 'TO', 'SLEEP'] +8455-210777-0030-1002: ref=['MISTER', 'NEVERBEND', 'BEGAN', 'THE', 'CAPTAIN', 'AND', 'I', 'OBSERVED', 'THAT', 'UP', 'TO', 'THAT', 'MOMENT', 'HE', 'HAD', 'GENERALLY', 'ADDRESSED', 'ME', 'AS', 'PRESIDENT', 'IT', 'CANNOT', 'BE', 'DENIED', 'THAT', 'WE', 'HAVE', 'COME', 'HERE', 'ON', 'AN', 'UNPLEASANT', 'MISSION'] +8455-210777-0030-1002: hyp=['MISTER', 'NEVERBEND', 'BEGAN', 'THE', 'CAPTAIN', 'AND', 'I', 'OBSERVE', 'THAT', 'UP', 'TO', 'THAT', 'MOMENT', 'HE', 'HAD', 'GENERALLY', 'ADDRESSED', 'ME', 'AS', 'PRESIDENT', 'IT', 'CANNOT', 'BE', 'DENIED', 'THAT', 'WE', 'HAVE', 'COME', 'HERE', 'ON', 'AN', 'UNPLEASANT', 'MISSION'] +8455-210777-0031-1003: ref=['YOU', 'HAVE', 'RECEIVED', 'US', 'WITH', 'ALL', 'THAT', 'COURTESY', 'AND', 'HOSPITALITY', 'FOR', 'WHICH', 'YOUR', 'CHARACTER', 'IN', 'ENGLAND', 'STANDS', 'SO', 'HIGH'] +8455-210777-0031-1003: hyp=['YOU', 'HAVE', 'RECEIVED', 'US', 'WITH', 'ALL', 'THAT', 'COURTESY', 'AND', 'HOSPITALITY', 'FOR', 'WHICH', 'YOUR', 'CHARACTER', 'AND', 'IN', 'ENGLAND', 'STAND', 'SO', 'HIGH'] +8455-210777-0032-1004: ref=['IT', 'IS', 'A', 'DUTY', 'SAID', 'I'] +8455-210777-0032-1004: hyp=['IT', 'IS', 'A', 'DUTY', 'SAID', 'I'] +8455-210777-0033-1005: ref=['BUT', 'YOUR', 'POWER', 'IS', 'SO', 'SUPERIOR', 'TO', 'ANY', 'THAT', 'I', 'CAN', 'ADVANCE', 'AS', 'TO', 'MAKE', 'US', 'HERE', 'FEEL', 'THAT', 'THERE', 'IS', 'NO', 'DISGRACE', 'IN', 'YIELDING', 'TO', 'IT'] +8455-210777-0033-1005: hyp=['BUT', 'YOUR', 'POWER', 'IS', 'SO', 'SUPERIOR', 'TO', 'ANY', 'THAT', 'I', 'CAN', 'ADVANCE', 'AS', 'TO', 'MAKE', 'US', 'HERE', 'FEEL', 'THAT', 'THERE', 'IS', 'NO', 'DISGRACE', 'IN', 'YIELDING', 'TO', 'IT'] +8455-210777-0034-1006: ref=['NOT', 'A', 'DOUBT', 'BUT', 'HAD', 'YOUR', 'FORCE', 'BEEN', 'ONLY', 'DOUBLE', 'OR', 'TREBLE', 'OUR', 'OWN', 'I', 'SHOULD', 'HAVE', 'FOUND', 'IT', 'MY', 'DUTY', 'TO', 'STRUGGLE', 'WITH', 'YOU'] +8455-210777-0034-1006: hyp=['NOT', 'A', 'DOUBT', 'BUT', 'HAD', 'YOUR', 'FORCE', 'BEEN', 'ONLY', 'DOUBLE', 'OR', 'TROUBLE', 'OUR', 'OWN', 'I', 'SHOULD', 'HAVE', 'FOUND', 'IT', 'MY', 'DUTY', 'TO', 'STRUGGLE', 'WITH', 'YOU'] +8455-210777-0035-1007: ref=['THAT', 'IS', 'ALL', 'QUITE', 'TRUE', 'MISTER', 'NEVERBEND', 'SAID', 'SIR', 'FERDINANDO', 'BROWN'] +8455-210777-0035-1007: hyp=['THAT', 'IS', 'ALL', 'QUITE', 'TRUE', 'MISTER', 'NEVERBEND', 'SAID', 'SIR', 'FERDINAND', 'OBROWN'] +8455-210777-0036-1008: ref=['I', 'CAN', 'AFFORD', 'TO', 'SMILE', 'BECAUSE', 'I', 'AM', 'ABSOLUTELY', 'POWERLESS', 'BEFORE', 'YOU', 'BUT', 'I', 'DO', 'NOT', 'THE', 'LESS', 'FEEL', 'THAT', 'IN', 'A', 'MATTER', 'IN', 'WHICH', 'THE', 'PROGRESS', 'OF', 'THE', 'WORLD', 'IS', 'CONCERNED', 'I', 'OR', 'RATHER', 'WE', 'HAVE', 'BEEN', 'PUT', 'DOWN', 'BY', 'BRUTE', 'FORCE'] +8455-210777-0036-1008: hyp=['I', 'CAN', 'AFFORD', 'TO', 'SMILE', 'BECAUSE', 'I', 'AM', 'ABSOLUTELY', 'POWERLESS', 'BEFORE', 'YOU', 'BUT', 'I', 'DO', 'NOT', 'THE', 'LESS', 'FEEL', 'THAT', 'IN', 'A', 'MATTER', 'OF', 'WHICH', 'THE', 'PROGRESS', 'OF', 'THE', 'WORLD', 'IS', 'CONCERNED', 'I', 'OR', 'RATHER', 'WE', 'HAVE', 'BEEN', 'PUT', 'DOWN', 'BY', 'BRUTE', 'FORCE'] +8455-210777-0037-1009: ref=['YOU', 'HAVE', 'COME', 'TO', 'US', 'THREATENING', 'US', 'WITH', 'ABSOLUTE', 'DESTRUCTION'] +8455-210777-0037-1009: hyp=['YOU', 'HAVE', 'COME', 'TO', 'US', 'THREATENING', 'US', 'WITH', 'ABSOLUTE', 'DESTRUCTION'] +8455-210777-0038-1010: ref=['THEREFORE', 'I', 'FEEL', 'MYSELF', 'QUITE', 'ABLE', 'AS', 'PRESIDENT', 'OF', 'THIS', 'REPUBLIC', 'TO', 'RECEIVE', 'YOU', 'WITH', 'A', 'COURTESY', 'DUE', 'TO', 'THE', 'SERVANTS', 'OF', 'A', 'FRIENDLY', 'ALLY'] +8455-210777-0038-1010: hyp=['THEREFORE', 'I', 'FEEL', 'MYSELF', 'QUITE', 'ABLE', 'AS', 'PRESIDENT', 'OF', 'THIS', 'REPUBLIC', 'TO', 'RECEIVE', 'YOU', 'WITH', 'A', 'COURTESY', 'DUE', 'TO', 'THE', 'SERVANTS', 'OF', 'A', 'FRIENDLY', 'ALLY'] +8455-210777-0039-1011: ref=['I', 'CAN', 'ASSURE', 'YOU', 'HE', 'HAS', 'NOT', 'EVEN', 'ALLOWED', 'ME', 'TO', 'SEE', 'THE', 'TRIGGER', 'SINCE', 'I', 'HAVE', 'BEEN', 'ON', 'BOARD'] +8455-210777-0039-1011: hyp=['I', 'CAN', 'ASSURE', 'YOU', 'HE', 'HAS', 'NOT', 'EVEN', 'ALLOWED', 'ME', 'TO', 'SEE', 'THE', 'TRIGGER', 'SINCE', 'I', 'HAVE', 'BEEN', 'ON', 'BOARD'] +8455-210777-0040-1012: ref=['THEN', 'SAID', 'SIR', 'FERDINANDO', 'THERE', 'IS', 'NOTHING', 'FOR', 'IT', 'BUT', 'THAT', 'HE', 'MUST', 'TAKE', 'YOU', 'WITH', 'HIM'] +8455-210777-0040-1012: hyp=['THEN', 'SAID', 'SIR', 'FERDINANDO', 'THERE', 'IS', 'NOTHING', 'FOR', 'IT', 'BUT', 'THAT', 'WE', 'MUST', 'TAKE', 'YOU', 'WITH', 'HIM'] +8455-210777-0041-1013: ref=['THERE', 'CAME', 'UPON', 'ME', 'A', 'SUDDEN', 'SHOCK', 'WHEN', 'I', 'HEARD', 'THESE', 'WORDS', 'WHICH', 'EXCEEDED', 'ANYTHING', 'WHICH', 'I', 'HAD', 'YET', 'FELT'] +8455-210777-0041-1013: hyp=['THERE', 'CAME', 'UPON', 'ME', 'A', 'SUDDEN', 'SHOCK', 'WHEN', 'I', 'HEARD', 'THESE', 'WORDS', 'WHICH', 'EXCEEDED', 'ANYTHING', 'WHICH', 'I', 'HAD', 'YET', 'FELT'] +8455-210777-0042-1014: ref=['YOU', 'HEAR', 'WHAT', 'SIR', 'FERDINANDO', 'BROWN', 'HAS', 'SAID', 'REPLIED', 'CAPTAIN', 'BATTLEAX'] +8455-210777-0042-1014: hyp=['YOU', 'HEAR', 'WHAT', 'SIR', 'FERDINANDO', 'BROWN', 'HAS', 'SAID', 'REPLIED', 'CAPTAIN', 'BATTLE', 'AXE'] +8455-210777-0043-1015: ref=['BUT', 'WHAT', 'IS', 'THE', 'DELICATE', 'MISSION', 'I', 'ASKED'] +8455-210777-0043-1015: hyp=['BUT', 'WHAT', 'IS', 'THE', 'DELICATE', 'MISSION', 'I', 'ASKED'] +8455-210777-0044-1016: ref=['I', 'WAS', 'TO', 'BE', 'TAKEN', 'AWAY', 'AND', 'CARRIED', 'TO', 'ENGLAND', 'OR', 'ELSEWHERE', 'OR', 'DROWNED', 'UPON', 'THE', 'VOYAGE', 'IT', 'MATTERED', 'NOT', 'WHICH'] +8455-210777-0044-1016: hyp=['I', 'WAS', 'TO', 'BE', 'TAKEN', 'AWAY', 'AND', 'CARRIED', 'TO', 'ENGLAND', 'OR', 'ELSEWHERE', 'OR', 'DROWNED', 'UPON', 'THE', 'VOYAGE', 'IT', 'MATTERED', 'NOT', 'WHICH'] +8455-210777-0045-1017: ref=['THEN', 'THE', 'REPUBLIC', 'OF', 'BRITANNULA', 'WAS', 'TO', 'BE', 'DECLARED', 'AS', 'NON', 'EXISTENT', 'AND', 'THE', 'BRITISH', 'FLAG', 'WAS', 'TO', 'BE', 'EXALTED', 'AND', 'A', 'BRITISH', 'GOVERNOR', 'INSTALLED', 'IN', 'THE', 'EXECUTIVE', 'CHAMBERS'] +8455-210777-0045-1017: hyp=['THEN', 'THE', 'REPUBLIC', 'OF', 'BRITAIN', 'NULA', 'WAS', 'TO', 'BE', 'DECLARED', 'AS', 'NON', 'EXISTENT', 'AND', 'THE', 'BRITISH', 'FLAG', 'WAS', 'TO', 'BE', 'EXALTED', 'AND', 'A', 'BRITISH', 'GOVERNOR', 'INSTALLED', 'IN', 'THE', 'EXECUTIVE', 'CHAMBERS'] +8455-210777-0046-1018: ref=['YOU', 'MAY', 'BE', 'QUITE', 'SURE', "IT'S", 'THERE', 'SAID', 'CAPTAIN', 'BATTLEAX', 'AND', 'THAT', 'I', 'CAN', 'SO', 'USE', 'IT', 'AS', 'TO', 'HALF', 'OBLITERATE', 'YOUR', 'TOWN', 'WITHIN', 'TWO', 'MINUTES', 'OF', 'MY', 'RETURN', 'ON', 'BOARD'] +8455-210777-0046-1018: hyp=['YOU', 'MAY', 'BE', 'QUITE', 'SURE', 'TO', 'THERE', 'SAID', 'CAPTAIN', 'BATTLE', 'AXE', 'AND', 'THAT', 'I', 'CAN', 'SO', 'USE', 'IT', 'AS', 'TO', 'HALF', 'OBLITERATE', 'YOUR', 'TOWN', 'WITHIN', 'TWO', 'MINUTES', 'OF', 'MY', 'RETURN', 'ON', 'BOARD'] +8455-210777-0047-1019: ref=['YOU', 'PROPOSE', 'TO', 'KIDNAP', 'ME', 'I', 'SAID'] +8455-210777-0047-1019: hyp=['YOU', 'PROPOSE', 'TO', 'KIDNAP', 'ME', 'I', 'SAID'] +8455-210777-0048-1020: ref=['WHAT', 'WOULD', 'BECOME', 'OF', 'YOUR', 'GUN', 'WERE', 'I', 'TO', 'KIDNAP', 'YOU'] +8455-210777-0048-1020: hyp=['WHAT', 'WILL', 'BECOME', 'OF', 'YOUR', 'GUN', 'WERE', 'I', 'TO', 'KIDNAP', 'YOU'] +8455-210777-0049-1021: ref=['LIEUTENANT', 'CROSSTREES', 'IS', 'A', 'VERY', 'GALLANT', 'OFFICER'] +8455-210777-0049-1021: hyp=['LIEUTENANT', 'CROSS', 'TREES', 'IS', 'A', 'VERY', 'GALLANT', 'OFFICER'] +8455-210777-0050-1022: ref=['ONE', 'OF', 'US', 'ALWAYS', 'REMAINS', 'ON', 'BOARD', 'WHILE', 'THE', 'OTHER', 'IS', 'ON', 'SHORE'] +8455-210777-0050-1022: hyp=['ONE', 'OF', 'US', 'ALWAYS', 'REMAINS', 'ON', 'BOARD', 'WHILE', 'THE', 'OTHER', 'IS', 'ON', 'SHORE'] +8455-210777-0051-1023: ref=['WHAT', 'WORLD', 'WIDE', 'INIQUITY', 'SUCH', 'A', 'SPEECH', 'AS', 'THAT', 'DISCLOSES', 'SAID', 'I', 'STILL', 'TURNING', 'MYSELF', 'TO', 'THE', 'CAPTAIN', 'FOR', 'THOUGH', 'I', 'WOULD', 'HAVE', 'CRUSHED', 'THEM', 'BOTH', 'BY', 'MY', 'WORDS', 'HAD', 'IT', 'BEEN', 'POSSIBLE', 'MY', 'DISLIKE', 'CENTRED', 'ITSELF', 'ON', 'SIR', 'FERDINANDO'] +8455-210777-0051-1023: hyp=['WHAT', 'WORLD', 'WIDE', 'INIQUITY', 'SUCH', 'A', 'SPEECH', 'AS', 'THAT', 'DISCLOSES', 'SAID', 'I', 'STILL', 'TURNING', 'MYSELF', 'TO', 'THE', 'CAPTAIN', 'FOR', 'THOUGH', 'I', 'WOULD', 'HAVE', 'CRUSHED', 'THEM', 'BOTH', 'BY', 'MY', 'WORDS', 'HAD', 'IT', 'BEEN', 'POSSIBLE', 'MY', 'DISLIKE', 'SENATE', 'ITSELF', 'ON', 'SIR', 'FERDINANDO'] +8455-210777-0052-1024: ref=['YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'SUGGEST', 'SAID', 'HE', 'THAT', 'THAT', 'IS', 'A', 'MATTER', 'OF', 'OPINION'] +8455-210777-0052-1024: hyp=['YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'SUGGEST', 'SAID', 'HE', 'THAT', 'THAT', 'IS', 'A', 'MATTER', 'OF', 'OPINION'] +8455-210777-0053-1025: ref=['WERE', 'I', 'TO', 'COMPLY', 'WITH', 'YOUR', 'ORDERS', 'WITHOUT', 'EXPRESSING', 'MY', 'OWN', 'OPINION', 'I', 'SHOULD', 'SEEM', 'TO', 'HAVE', 'DONE', 'SO', 'WILLINGLY', 'HEREAFTER'] +8455-210777-0053-1025: hyp=['WERE', 'I', 'TO', 'COMPLY', 'WITH', 'YOUR', 'ORDERS', 'WITHOUT', 'EXPRESSING', 'MY', 'OWN', 'OPINION', 'I', 'SHOULD', 'SEEM', 'TO', 'HAVE', 'DONE', 'SO', 'WILLINGLY', 'HEREAFTER'] +8455-210777-0054-1026: ref=['THE', 'LETTER', 'RAN', 'AS', 'FOLLOWS'] +8455-210777-0054-1026: hyp=['THE', 'LETTER', 'RAN', 'AS', 'FOLLOWS'] +8455-210777-0055-1027: ref=['SIR', 'I', 'HAVE', 'IT', 'IN', 'COMMAND', 'TO', 'INFORM', 'YOUR', 'EXCELLENCY', 'THAT', 'YOU', 'HAVE', 'BEEN', 'APPOINTED', 'GOVERNOR', 'OF', 'THE', 'CROWN', 'COLONY', 'WHICH', 'IS', 'CALLED', 'BRITANNULA'] +8455-210777-0055-1027: hyp=['SIR', 'I', 'HAVE', 'IT', 'IN', 'COMMAND', 'TO', 'INFORM', 'YOUR', 'EXCELLENCY', 'THAT', 'YOU', 'HAVE', 'BEEN', 'APPOINTED', 'GOVERNOR', 'OF', 'THE', 'CROWN', 'COLONY', 'WHICH', 'IS', 'CALLED', 'BRITAIN', 'NULA'] +8455-210777-0056-1028: ref=['THE', 'PECULIAR', 'CIRCUMSTANCES', 'OF', 'THE', 'COLONY', 'ARE', 'WITHIN', 'YOUR', "EXCELLENCY'S", 'KNOWLEDGE'] +8455-210777-0056-1028: hyp=['THE', 'PECULIAR', 'CIRCUMSTANCES', 'OF', 'THE', 'COLONY', 'ARE', 'WITHIN', 'YOUR', "EXCELLENCY'S", 'KNOWLEDGE'] +8455-210777-0057-1029: ref=['BUT', 'IN', 'THEIR', 'SELECTION', 'OF', 'A', 'CONSTITUTION', 'THE', 'BRITANNULISTS', 'HAVE', 'UNFORTUNATELY', 'ALLOWED', 'THEMSELVES', 'BUT', 'ONE', 'DELIBERATIVE', 'ASSEMBLY', 'AND', 'HENCE', 'HAVE', 'SPRUNG', 'THEIR', 'PRESENT', 'DIFFICULTIES'] +8455-210777-0057-1029: hyp=['BUT', 'IN', 'THEIR', 'SELECTION', 'OF', 'A', 'CONSTITUTION', 'THE', 'BRITON', 'ULYSTS', 'HAVE', 'UNFORTUNATELY', 'ALLOWED', 'THEMSELVES', 'BUT', 'ONE', 'DELIBERATE', 'ASSEMBLY', 'AND', 'HENCE', 'HAS', 'SPRUNG', 'THEIR', 'PRESENT', 'DIFFICULTIES'] +8455-210777-0058-1030: ref=['IT', 'IS', 'FOUNDED', 'ON', 'THE', 'ACKNOWLEDGED', 'WEAKNESS', 'OF', 'THOSE', 'WHO', 'SURVIVE', 'THAT', 'PERIOD', 'OF', 'LIFE', 'AT', 'WHICH', 'MEN', 'CEASE', 'TO', 'WORK'] +8455-210777-0058-1030: hyp=['IT', 'IS', 'FOUNDED', 'ON', 'THE', 'ACKNOWLEDGED', 'WEAKNESS', 'OF', 'THOSE', 'WHO', 'SURVIVE', 'THAT', 'PERIOD', 'OF', 'LIFE', 'AT', 'WHICH', 'MEN', 'CEASE', 'TO', 'WORK'] +8455-210777-0059-1031: ref=['BUT', 'IT', 'IS', 'SURMISED', 'THAT', 'YOU', 'WILL', 'FIND', 'DIFFICULTIES', 'IN', 'THE', 'WAY', 'OF', 'YOUR', 'ENTERING', 'AT', 'ONCE', 'UPON', 'YOUR', 'GOVERNMENT'] +8455-210777-0059-1031: hyp=['BUT', 'IT', 'IS', 'SURMISED', 'THAT', 'YOU', 'WILL', 'FIND', 'DIFFICULTIES', 'IN', 'THE', 'WAY', 'OF', 'YOUR', 'ENTERING', 'AT', 'ONCE', 'UPON', 'YOUR', 'GOVERNOR'] +8455-210777-0060-1032: ref=['THE', 'JOHN', 'BRIGHT', 'IS', 'ARMED', 'WITH', 'A', 'WEAPON', 'OF', 'GREAT', 'POWER', 'AGAINST', 'WHICH', 'IT', 'IS', 'IMPOSSIBLE', 'THAT', 'THE', 'PEOPLE', 'OF', 'BRITANNULA', 'SHOULD', 'PREVAIL'] +8455-210777-0060-1032: hyp=['THE', 'JOHN', 'BRIGHT', 'HIS', 'ARM', 'WITH', 'A', 'WEAPON', 'OF', 'GREAT', 'POWER', 'AGAINST', 'WHICH', 'IT', 'IS', 'IMPOSSIBLE', 'THAT', 'THE', 'PEOPLE', 'OF', 'BRITAIN', 'EULO', 'SHOULD', 'PREVAIL'] +8455-210777-0061-1033: ref=['YOU', 'WILL', 'CARRY', 'OUT', 'WITH', 'YOU', 'ONE', 'HUNDRED', 'MEN', 'OF', 'THE', 'NORTH', 'NORTH', 'WEST', 'BIRMINGHAM', 'REGIMENT', 'WHICH', 'WILL', 'PROBABLY', 'SUFFICE', 'FOR', 'YOUR', 'OWN', 'SECURITY', 'AS', 'IT', 'IS', 'THOUGHT', 'THAT', 'IF', 'MISTER', 'NEVERBEND', 'BE', 'WITHDRAWN', 'THE', 'PEOPLE', 'WILL', 'REVERT', 'EASILY', 'TO', 'THEIR', 'OLD', 'HABITS', 'OF', 'OBEDIENCE'] +8455-210777-0061-1033: hyp=['YOU', 'WILL', 'CARRY', 'OUT', 'WITH', 'YOU', 'ONE', 'HUNDRED', 'MEN', 'OF', 'THE', 'NORTH', 'NORTHWEST', 'BIRMINGHAM', 'REGIMENT', 'WHICH', 'WILL', 'PROBABLY', 'SUFFICE', 'FOR', 'YOUR', 'OWN', 'SECURITY', 'AS', 'IT', 'IS', 'THOUGHT', 'THAT', 'IF', 'MISTER', 'NEVERBEND', 'BE', 'WITHDRAWN', 'THE', 'PEOPLE', 'WILL', 'REVERT', 'EASILY', 'TO', 'THEIR', 'OLD', 'HABITS', 'OF', 'OBEDIENCE'] +8455-210777-0062-1034: ref=['WHEN', 'DO', 'YOU', 'INTEND', 'THAT', 'THE', 'JOHN', 'BRIGHT', 'SHALL', 'START'] +8455-210777-0062-1034: hyp=['WHEN', 'DO', 'YOU', 'INTEND', 'THAT', 'THE', 'JOHN', 'BRIGHT', 'SHALL', 'START'] +8455-210777-0063-1035: ref=['TO', 'DAY', 'I', 'SHOUTED'] +8455-210777-0063-1035: hyp=['TO', 'DAY', 'I', 'SHOUTED'] +8455-210777-0064-1036: ref=['AND', 'I', 'HAVE', 'NO', 'ONE', 'READY', 'TO', 'WHOM', 'I', 'CAN', 'GIVE', 'UP', 'THE', 'ARCHIVES', 'OF', 'THE', 'GOVERNMENT'] +8455-210777-0064-1036: hyp=['AND', 'I', 'HAVE', 'NO', 'ONE', 'READY', 'TO', 'WHOM', 'I', 'CAN', 'GIVE', 'UP', 'THE', 'ARCHIVES', 'OF', 'THE', 'GOVERNMENT'] +8455-210777-0065-1037: ref=['I', 'SHALL', 'BE', 'HAPPY', 'TO', 'TAKE', 'CHARGE', 'OF', 'THEM', 'SAID', 'SIR', 'FERDINANDO'] +8455-210777-0065-1037: hyp=['I', 'SHALL', 'BE', 'HAPPY', 'TO', 'TAKE', 'CHARGE', 'OF', 'THEM', 'SAID', 'SIR', 'FERDINANDO'] +8455-210777-0066-1038: ref=['THEY', 'OF', 'COURSE', 'MUST', 'ALL', 'BE', 'ALTERED'] +8455-210777-0066-1038: hyp=['THEY', 'OF', 'COURSE', 'MUST', 'ALL', 'BE', 'ALTERED'] +8455-210777-0067-1039: ref=['OR', 'OF', 'THE', 'HABITS', 'OF', 'OUR', 'PEOPLE', 'IT', 'IS', 'QUITE', 'IMPOSSIBLE'] +8455-210777-0067-1039: hyp=['OR', 'OF', 'THE', 'HABITS', 'OF', 'OUR', 'PEOPLE', 'IT', 'IS', 'QUITE', 'IMPOSSIBLE'] +8455-210777-0068-1040: ref=['YOUR', 'POWER', 'IS', 'SUFFICIENT', 'I', 'SAID'] +8455-210777-0068-1040: hyp=['YOUR', 'POWER', 'IS', 'SUFFICIENT', 'I', 'SAID'] +8455-210777-0069-1041: ref=['IF', 'YOU', 'WILL', 'GIVE', 'US', 'YOUR', 'PROMISE', 'TO', 'MEET', 'CAPTAIN', 'BATTLEAX', 'HERE', 'AT', 'THIS', 'TIME', 'TO', 'MORROW', 'WE', 'WILL', 'STRETCH', 'A', 'POINT', 'AND', 'DELAY', 'THE', 'DEPARTURE', 'OF', 'THE', 'JOHN', 'BRIGHT', 'FOR', 'TWENTY', 'FOUR', 'HOURS'] +8455-210777-0069-1041: hyp=['IF', 'YOU', 'WILL', 'GIVE', 'US', 'YOUR', 'PROMISE', 'TO', 'MEET', 'CAPTAIN', 'ADELAX', 'HERE', 'AT', 'THIS', 'TIME', 'TO', 'MORROW', 'WE', 'WILL', 'STRETCH', 'A', 'POINT', 'AND', 'DELAY', 'THE', 'DEPARTURE', 'OF', 'THE', 'JOHN', 'BRIGHT', 'FOR', 'TWENTY', 'FOUR', 'HOURS'] +8455-210777-0070-1042: ref=['AND', 'THIS', 'PLAN', 'WAS', 'ADOPTED', 'TOO', 'IN', 'ORDER', 'TO', 'EXTRACT', 'FROM', 'ME', 'A', 'PROMISE', 'THAT', 'I', 'WOULD', 'DEPART', 'IN', 'PEACE'] +8455-210777-0070-1042: hyp=['AND', 'THIS', 'PLAN', 'WAS', 'ADOPTED', 'TOO', 'IN', 'ORDER', 'TO', 'EXTRACT', 'FROM', 'ME', 'A', 'PROMISE', 'THAT', 'I', 'WOULD', 'DEPART', 'IN', 'PEACE'] +8463-287645-0000-543: ref=['THIS', 'WAS', 'WHAT', 'DID', 'THE', 'MISCHIEF', 'SO', 'FAR', 'AS', 'THE', 'RUNNING', 'AWAY', 'WAS', 'CONCERNED'] +8463-287645-0000-543: hyp=['THIS', 'WAS', 'WHAT', 'DID', 'THE', 'MISCHIEF', 'SO', 'FAR', 'AS', 'THE', 'RUNNING', 'AWAY', 'WAS', 'CONCERNED'] +8463-287645-0001-544: ref=['IT', 'IS', 'HARDLY', 'NECESSARY', 'TO', 'SAY', 'MORE', 'OF', 'THEM', 'HERE'] +8463-287645-0001-544: hyp=['IT', 'IS', 'HARDLY', 'NECESSARY', 'TO', 'SAY', 'MORE', 'OF', 'THEM', 'HERE'] +8463-287645-0002-545: ref=['FROM', 'THE', 'MANNER', 'IN', 'WHICH', 'HE', 'EXPRESSED', 'HIMSELF', 'WITH', 'REGARD', 'TO', 'ROBERT', 'HOLLAN', 'NO', 'MAN', 'IN', 'THE', 'WHOLE', 'RANGE', 'OF', 'HIS', 'RECOLLECTIONS', 'WILL', 'BE', 'LONGER', 'REMEMBERED', 'THAN', 'HE', 'HIS', 'ENTHRALMENT', 'WHILE', 'UNDER', 'HOLLAN', 'WILL', 'HARDLY', 'EVER', 'BE', 'FORGOTTEN'] +8463-287645-0002-545: hyp=['FROM', 'THE', 'MANNER', 'IN', 'WHICH', 'SHE', 'EXPRESSED', 'HIMSELF', 'WITH', 'REGARD', 'TO', 'ROBERT', 'HOLLAND', 'NO', 'MAN', 'IN', 'THE', 'WHOLE', 'RANGE', 'OF', 'HIS', 'RECOLLECTIONS', 'WILL', 'BE', 'LONGER', 'REMEMBERED', 'THAN', 'HE', 'HIS', 'ENTHRALIMENT', 'WHILE', 'UNDER', 'HOLLAND', 'WILL', 'HARDLY', 'EVER', 'BE', 'FORGOTTEN'] +8463-287645-0003-546: ref=['OF', 'THIS', 'PARTY', 'EDWARD', 'A', 'BOY', 'OF', 'SEVENTEEN', 'CALLED', 'FORTH', 'MUCH', 'SYMPATHY', 'HE', 'TOO', 'WAS', 'CLAIMED', 'BY', 'HOLLAN'] +8463-287645-0003-546: hyp=['OF', 'THIS', 'PARTY', 'EDWARD', 'A', 'BOY', 'OF', 'SEVENTEEN', 'CALLED', 'FORTH', 'MUCH', 'SYMPATHY', 'HE', 'TOO', 'WAS', 'CLAIMED', 'BY', 'HOLLAND'] +8463-287645-0004-547: ref=['JOHN', 'WESLEY', 'COMBASH', 'JACOB', 'TAYLOR', 'AND', 'THOMAS', 'EDWARD', 'SKINNER'] +8463-287645-0004-547: hyp=['JOHN', 'WESLEY', 'COMBASH', 'JACOB', 'TAYLOR', 'AND', 'THOMAS', 'EDWARD', 'SKINNER'] +8463-287645-0005-548: ref=['A', 'FEW', 'YEARS', 'BACK', 'ONE', 'OF', 'THEIR', 'SLAVES', 'A', 'COACHMAN', 'WAS', 'KEPT', 'ON', 'THE', 'COACH', 'BOX', 'ONE', 'COLD', 'NIGHT', 'WHEN', 'THEY', 'WERE', 'OUT', 'AT', 'A', 'BALL', 'UNTIL', 'HE', 'BECAME', 'ALMOST', 'FROZEN', 'TO', 'DEATH', 'IN', 'FACT', 'HE', 'DID', 'DIE', 'IN', 'THE', 'INFIRMARY', 'FROM', 'THE', 'EFFECTS', 'OF', 'THE', 'FROST', 'ABOUT', 'ONE', 'WEEK', 'AFTERWARDS'] +8463-287645-0005-548: hyp=['A', 'FEW', 'YEARS', 'BACK', 'ONE', 'OF', 'THEIR', 'SLAVES', 'A', 'COACHMAN', 'WAS', 'KEPT', 'ON', 'THE', 'COACH', 'BOX', 'ONE', 'CALLED', 'NIGHT', 'WHEN', 'THEY', 'WERE', 'OUT', 'AT', 'A', 'BALL', 'UNTIL', 'HE', 'BECAME', 'ALMOST', 'FROZEN', 'TO', 'DEATH', 'IN', 'FACT', 'HE', 'DID', 'DIE', 'IN', 'THE', 'INFIRMARY', 'FROM', 'THE', 'EFFECTS', 'OF', 'THE', 'FROST', 'ABOUT', 'ONE', 'WEEK', 'AFTERWARDS'] +8463-287645-0006-549: ref=['THE', 'DOCTOR', 'WHO', 'ATTENDED', 'THE', 'INJURED', 'CREATURE', 'IN', 'THIS', 'CASE', 'WAS', 'SIMPLY', 'TOLD', 'THAT', 'SHE', 'SLIPPED', 'AND', 'FELL', 'DOWN', 'STAIRS', 'AS', 'SHE', 'WAS', 'COMING', 'DOWN'] +8463-287645-0006-549: hyp=['THE', 'DOCTOR', 'WHO', 'ATTENDED', 'THE', 'INJURED', 'CREATURE', 'IN', 'THIS', 'CASE', 'WAS', 'SIMPLY', 'TOLD', 'THAT', 'SHE', 'SLIPPED', 'AND', 'FELL', 'DOWN', 'THE', 'STAIRS', 'AS', 'SHE', 'WAS', 'COMING', 'DOWN'] +8463-287645-0007-550: ref=['ANOTHER', 'CASE', 'SAID', 'JOHN', 'WESLEY', 'WAS', 'A', 'LITTLE', 'GIRL', 'HALF', 'GROWN', 'WHO', 'WAS', 'WASHING', 'WINDOWS', 'UP', 'STAIRS', 'ONE', 'DAY', 'AND', 'UNLUCKILY', 'FELL', 'ASLEEP', 'IN', 'THE', 'WINDOW', 'AND', 'IN', 'THIS', 'POSITION', 'WAS', 'FOUND', 'BY', 'HER', 'MISTRESS', 'IN', 'A', 'RAGE', 'THE', 'MISTRESS', 'HIT', 'HER', 'A', 'HEAVY', 'SLAP', 'KNOCKED', 'HER', 'OUT', 'OF', 'THE', 'WINDOW', 'AND', 'SHE', 'FELL', 'TO', 'THE', 'PAVEMENT', 'AND', 'DIED', 'IN', 'A', 'FEW', 'HOURS', 'FROM', 'THE', 'EFFECTS', 'THEREOF'] +8463-287645-0007-550: hyp=['ANOTHER', 'CASE', 'SAID', 'JOHN', 'WESLEY', 'WAS', 'A', 'LITTLE', 'GIRL', 'HALF', 'GROWN', 'WHO', 'WAS', 'WASHING', 'WINDOWS', 'UPSTAIRS', 'ONE', 'DAY', 'AND', 'UNLUCKILY', 'FELL', 'ASLEEP', 'IN', 'THE', 'WINDOW', 'AND', 'IN', 'THIS', 'POSITION', 'WAS', 'FOUND', 'BY', 'HER', 'MISTRESS', 'IN', 'A', 'RAGE', 'THE', 'MISTRESS', 'HID', 'HER', 'A', 'HEAVY', 'SLAP', 'KNOCKED', 'HER', 'OUT', 'OF', 'THE', 'WINDOW', 'AND', 'SHE', 'FELL', 'TO', 'THE', 'PAVEMENT', 'AND', 'DIED', 'IN', 'A', 'FEW', 'HOURS', 'FROM', 'THE', 'EFFECTS', 'THEREOF'] +8463-287645-0008-551: ref=['AS', 'USUAL', 'NOTHING', 'WAS', 'DONE', 'IN', 'THE', 'WAY', 'OF', 'PUNISHMENT'] +8463-287645-0008-551: hyp=['AS', 'USUAL', 'NOTHING', 'WAS', 'DONE', 'IN', 'THE', 'WAY', 'OF', 'PUNISHMENT'] +8463-287645-0009-552: ref=['I', 'NEVER', 'KNEW', 'OF', 'BUT', 'ONE', 'MAN', 'WHO', 'COULD', 'EVER', 'PLEASE', 'HIM'] +8463-287645-0009-552: hyp=['I', 'NEVER', 'KNEW', 'OF', 'BUT', 'ONE', 'MAN', 'WHO', 'COULD', 'EVER', 'PLEASE', 'HIM'] +8463-287645-0010-553: ref=['HE', 'WORKED', 'ME', 'VERY', 'HARD', 'HE', 'WANTED', 'TO', 'BE', 'BEATING', 'ME', 'ALL', 'THE', 'TIME'] +8463-287645-0010-553: hyp=['HE', 'WORKED', 'ME', 'VERY', 'HARD', 'HE', 'WANTED', 'TO', 'BE', 'BEATING', 'ME', 'ALL', 'THE', 'TIME'] +8463-287645-0011-554: ref=['SHE', 'WAS', 'A', 'LARGE', 'HOMELY', 'WOMAN', 'THEY', 'WERE', 'COMMON', 'WHITE', 'PEOPLE', 'WITH', 'NO', 'REPUTATION', 'IN', 'THE', 'COMMUNITY'] +8463-287645-0011-554: hyp=['SHE', 'WAS', 'A', 'LARGE', 'HOMELY', 'WOMAN', 'THEY', 'WERE', 'COMMON', 'WHITE', 'PEOPLE', 'WITH', 'NO', 'REPUTATION', 'IN', 'THE', 'COMMUNITY'] +8463-287645-0012-555: ref=['SUBSTANTIALLY', 'THIS', 'WAS', "JACOB'S", 'UNVARNISHED', 'DESCRIPTION', 'OF', 'HIS', 'MASTER', 'AND', 'MISTRESS'] +8463-287645-0012-555: hyp=['SUBSTANTIALLY', 'THIS', 'WAS', "JACOB'S", 'UNVARNISHED', 'DESCRIPTION', 'OF', 'HIS', 'MASTER', 'AND', 'MISTRESS'] +8463-287645-0013-556: ref=['AS', 'TO', 'HIS', 'AGE', 'AND', 'ALSO', 'THE', 'NAME', 'OF', 'HIS', 'MASTER', "JACOB'S", 'STATEMENT', 'VARIED', 'SOMEWHAT', 'FROM', 'THE', 'ADVERTISEMENT'] +8463-287645-0013-556: hyp=['AS', 'TO', 'HIS', 'AGE', 'AND', 'ALSO', 'THE', 'NAME', 'OF', 'HIS', 'MASTER', "JACOB'S", 'STATEMENT', 'VARIED', 'SOMEWHAT', 'FROM', 'THE', 'ADVERTISEMENT'] +8463-287645-0014-557: ref=['OF', 'STARTING', 'I', "DIDN'T", 'KNOW', 'THE', 'WAY', 'TO', 'COME'] +8463-287645-0014-557: hyp=['OF', 'STARTING', 'I', "DIDN'T", 'KNOW', 'THE', 'WAY', 'TO', 'COME'] +8463-294825-0000-558: ref=["IT'S", 'ALMOST', 'BEYOND', 'CONJECTURE'] +8463-294825-0000-558: hyp=["IT'S", 'ALMOST', 'BEYOND', 'CONJECTURE'] +8463-294825-0001-559: ref=['THIS', 'REALITY', 'BEGINS', 'TO', 'EXPLAIN', 'THE', 'DARK', 'POWER', 'AND', 'OTHERWORLDLY', 'FASCINATION', 'OF', 'TWENTY', 'THOUSAND', 'LEAGUES', 'UNDER', 'THE', 'SEAS'] +8463-294825-0001-559: hyp=['THIS', 'REALITY', 'BEGINS', 'TO', 'EXPLAIN', 'THE', 'DARK', 'POWER', 'AND', 'OTHER', 'WORLDLY', 'FASCINATION', 'OF', 'TWENTY', 'THOUSAND', 'LEAGUES', 'UNDER', 'THE', 'SEAS'] +8463-294825-0002-560: ref=['FIRST', 'AS', 'A', 'PARIS', 'STOCKBROKER', 'LATER', 'AS', 'A', 'CELEBRATED', 'AUTHOR', 'AND', 'YACHTSMAN', 'HE', 'WENT', 'ON', 'FREQUENT', 'VOYAGES', 'TO', 'BRITAIN', 'AMERICA', 'THE', 'MEDITERRANEAN'] +8463-294825-0002-560: hyp=['FIRST', 'AS', 'A', 'PARIS', 'STOCKBROKER', 'LATER', 'AS', 'A', 'CELEBRATED', 'AUTHOR', 'AND', 'YACHTSMAN', 'HE', 'WENT', 'ON', 'FREQUENT', 'VOYAGES', 'TO', 'BRITAIN', 'AMERICA', 'THE', 'MEDITERRANEAN'] +8463-294825-0003-561: ref=['NEMO', 'BUILDS', 'A', 'FABULOUS', 'FUTURISTIC', 'SUBMARINE', 'THE', 'NAUTILUS', 'THEN', 'CONDUCTS', 'AN', 'UNDERWATER', 'CAMPAIGN', 'OF', 'VENGEANCE', 'AGAINST', 'HIS', 'IMPERIALIST', 'OPPRESSOR'] +8463-294825-0003-561: hyp=['NEMO', 'BUILDS', 'A', 'FABULOUS', 'FUTUREISTIC', 'SUBMARINE', 'THE', 'NAUTILUS', 'THEN', 'CONDUCTS', 'AN', 'UNDERWATER', 'CAMPAIGN', 'OF', 'VENGEANCE', 'AGAINST', 'HIS', 'IMPERIALIST', 'OPPRESSOR'] +8463-294825-0004-562: ref=['IN', 'ALL', 'THE', 'NOVEL', 'HAD', 'A', 'DIFFICULT', 'GESTATION'] +8463-294825-0004-562: hyp=['IN', 'ALL', 'THE', 'NOVEL', 'HEAD', 'A', 'DIFFICULT', 'GESTATION'] +8463-294825-0005-563: ref=['OTHER', 'SUBTLETIES', 'OCCUR', 'INSIDE', 'EACH', 'EPISODE', 'THE', 'TEXTURES', 'SPARKLING', 'WITH', 'WIT', 'INFORMATION', 'AND', 'INSIGHT'] +8463-294825-0005-563: hyp=['OTHER', 'SUBTLETIES', 'OCCUR', 'INSIDE', 'EACH', 'EPISODE', 'THE', 'TEXTURES', 'SPARKLING', 'WITH', 'WIT', 'INFORMATION', 'AND', 'INSIGHT'] +8463-294825-0006-564: ref=['HIS', 'SPECIFICATIONS', 'FOR', 'AN', 'OPEN', 'SEA', 'SUBMARINE', 'AND', 'A', 'SELF', 'CONTAINED', 'DIVING', 'SUIT', 'WERE', 'DECADES', 'BEFORE', 'THEIR', 'TIME', 'YET', 'MODERN', 'TECHNOLOGY', 'BEARS', 'THEM', 'OUT', 'TRIUMPHANTLY'] +8463-294825-0006-564: hyp=['HIS', 'SPECIFICATIONS', 'FOR', 'AN', 'OPEN', 'SEA', 'SUBMARINE', 'AND', 'A', 'SELF', 'CONTAINING', 'DIVING', 'SUIT', 'WERE', 'DECADES', 'BEFORE', 'THEIR', 'TIME', 'YET', 'MODERN', 'TECHNOLOGY', 'BEARS', 'THEM', 'OUT', 'TRIUMPHANTLY'] +8463-294825-0007-565: ref=['EVEN', 'THE', 'SUPPORTING', 'CAST', 'IS', 'SHREWDLY', 'DRAWN', 'PROFESSOR', 'ARONNAX', 'THE', 'CAREER', 'SCIENTIST', 'CAUGHT', 'IN', 'AN', 'ETHICAL', 'CONFLICT', 'CONSEIL', 'THE', 'COMPULSIVE', 'CLASSIFIER', 'WHO', 'SUPPLIES', 'HUMOROUS', 'TAG', 'LINES', 'FOR', "VERNE'S", 'FAST', 'FACTS', 'THE', 'HARPOONER', 'NED', 'LAND', 'A', 'CREATURE', 'OF', 'CONSTANT', 'APPETITES', 'MAN', 'AS', 'HEROIC', 'ANIMAL'] +8463-294825-0007-565: hyp=['EVEN', 'THE', 'SUPPORTING', 'CAST', 'IS', 'SHREWDLY', 'DRAWN', 'PROFESSOR', 'ARONNAX', 'THE', 'CAREER', 'SCIENTIST', 'CAUGHT', 'IN', 'AN', 'ETHICAL', 'CONFLICT', 'CONSEIL', 'THE', 'COMPULSIVE', 'CLASSIFIER', 'WHO', 'SUPPLIES', 'HUMOROUS', 'TAG', 'LINES', 'FOR', "VERN'S", 'FAST', 'FACTS', 'THE', 'HARPOONER', 'NED', 'LAND', 'A', 'CREATURE', 'OF', 'CONSTANT', 'APPETITES', 'MAN', 'AS', 'HEROIC', 'ANIMAL'] +8463-294825-0008-566: ref=['BUT', 'MUCH', 'OF', 'THE', "NOVEL'S", 'BROODING', 'POWER', 'COMES', 'FROM', 'CAPTAIN', 'NEMO'] +8463-294825-0008-566: hyp=['BUT', 'MUCH', 'OF', 'THE', 'NOVELS', 'BROODING', 'POWER', 'COMES', 'FROM', 'CAPTAIN', 'NEMO'] +8463-294825-0009-567: ref=['THIS', 'COMPULSION', 'LEADS', 'NEMO', 'INTO', 'UGLY', 'CONTRADICTIONS', "HE'S", 'A', 'FIGHTER', 'FOR', 'FREEDOM', 'YET', 'ALL', 'WHO', 'BOARD', 'HIS', 'SHIP', 'ARE', 'IMPRISONED', 'THERE', 'FOR', 'GOOD', 'HE', 'WORKS', 'TO', 'SAVE', 'LIVES', 'BOTH', 'HUMAN', 'AND', 'ANIMAL', 'YET', 'HE', 'HIMSELF', 'CREATES', 'A', 'HOLOCAUST', 'HE', 'DETESTS', 'IMPERIALISM', 'YET', 'HE', 'LAYS', 'PERSONAL', 'CLAIM', 'TO', 'THE', 'SOUTH', 'POLE'] +8463-294825-0009-567: hyp=['THIS', 'COMPULSION', 'LEADS', 'NEMO', 'INTO', 'UGLY', 'CONTRADICTIONS', 'HE', 'IS', 'A', 'FRIGHTER', 'FOR', 'FREEDOM', 'YET', 'ALL', 'WHO', 'BOARD', 'HIS', 'SHIP', 'OR', 'IMPRISONED', 'THERE', 'FOR', 'GOOD', 'HE', 'WORKS', 'TO', 'SAVE', 'LIVES', 'BOTH', 'HUMAN', 'AND', 'ANIMAL', 'YET', 'HE', 'HIMSELF', 'CREATES', 'A', 'HOLOCOST', 'HE', 'DETESTS', 'IMPERIALISM', 'YET', 'HE', 'LAYS', 'PERSONAL', 'CLAIM', 'TO', 'THE', 'SOUTH', 'POLE'] +8463-294825-0010-568: ref=['AND', 'IN', 'THIS', 'LAST', 'ACTION', 'HE', 'FALLS', 'INTO', 'THE', 'CLASSIC', 'SIN', 'OF', 'PRIDE'] +8463-294825-0010-568: hyp=['AND', 'IN', 'THIS', 'LAST', 'ACTION', 'HE', 'FALLS', 'INTO', 'THE', 'CLASSIC', 'SIN', 'OF', 'PRIDE'] +8463-294825-0011-569: ref=["HE'S", 'SWIFTLY', 'PUNISHED'] +8463-294825-0011-569: hyp=['HE', 'IS', 'SWIFTLY', 'PUNISH'] +8463-294825-0012-570: ref=['THE', 'NAUTILUS', 'NEARLY', 'PERISHES', 'IN', 'THE', 'ANTARCTIC', 'AND', 'NEMO', 'SINKS', 'INTO', 'A', 'GROWING', 'DEPRESSION'] +8463-294825-0012-570: hyp=['THE', 'NAUTILUS', 'NEARLY', 'PERISHES', 'IN', 'THE', 'ANTARCTIC', 'AND', 'NEMO', 'SINKS', 'INTO', 'A', 'GROWING', 'DEPRESSION'] +8463-294825-0013-571: ref=['FOR', 'MANY', 'THEN', 'THIS', 'BOOK', 'HAS', 'BEEN', 'A', 'SOURCE', 'OF', 'FASCINATION', 'SURELY', 'ONE', 'OF', 'THE', 'MOST', 'INFLUENTIAL', 'NOVELS', 'EVER', 'WRITTEN', 'AN', 'INSPIRATION', 'FOR', 'SUCH', 'SCIENTISTS', 'AND', 'DISCOVERERS', 'AS', 'ENGINEER', 'SIMON', 'LAKE', 'OCEANOGRAPHER', 'WILLIAM', 'BEEBE', 'POLAR', 'TRAVELER', 'SIR', 'ERNEST', 'SHACKLETON'] +8463-294825-0013-571: hyp=['FOR', 'MANY', 'THEN', 'THIS', 'BOOK', 'HAS', 'BEEN', 'A', 'SOURCE', 'OF', 'FASCINATION', 'SURELY', 'ONE', 'OF', 'THE', 'MOST', 'INFLUENTIAL', 'NOVELS', 'EVER', 'WRITTEN', 'AN', 'INSPIRATION', 'FOR', 'SUCH', 'SCIENTISTS', 'AND', 'DISCOVERERS', 'AS', 'ENGINEER', 'SIMON', 'LAKE', 'OCEANOGRAPHER', 'WILLIAM', 'B', 'POLAR', 'TRAVELLERS', 'ARE', 'EARNEST', 'SHACKLETON'] +8463-294825-0014-572: ref=['FATHOM', 'SIX', 'FEET'] +8463-294825-0014-572: hyp=['FATHOM', 'SIX', 'FEET'] +8463-294825-0015-573: ref=['GRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0015-573: hyp=['GRAHAM', 'ROUGHLY', 'WON', 'TWENTY', 'EIGHTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0016-574: ref=['MILLIGRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHT', 'THOUSAND', 'OF', 'AN', 'OUNCE'] +8463-294825-0016-574: hyp=['MILAGRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHT', 'THOUSANDTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0017-575: ref=['LITER', 'ROUGHLY', 'ONE', 'QUART'] +8463-294825-0017-575: hyp=['LATER', 'ROUGHLY', 'WON', 'COURT'] +8463-294825-0018-576: ref=['METER', 'ROUGHLY', 'ONE', 'YARD', 'THREE', 'INCHES'] +8463-294825-0018-576: hyp=['METER', 'ROUGHLY', 'ONE', 'YARD', 'THREE', 'INCHES'] +8463-294825-0019-577: ref=['MILLIMETER', 'ROUGHLY', 'ONE', 'TWENTY', 'FIFTH', 'OF', 'AN', 'INCH'] +8463-294825-0019-577: hyp=['MILLIMETRE', 'ROUGHLY', 'ONE', 'TWENTY', 'FIFTH', 'OF', 'AN', 'INCH'] +8463-294828-0000-578: ref=['CHAPTER', 'THREE', 'AS', 'MASTER', 'WISHES'] +8463-294828-0000-578: hyp=['CHAPTER', 'THREE', 'AS', 'MASTER', 'WISHES'] +8463-294828-0001-579: ref=['THREE', 'SECONDS', 'BEFORE', 'THE', 'ARRIVAL', 'OF', 'J', 'B', "HOBSON'S", 'LETTER', 'I', 'NO', 'MORE', 'DREAMED', 'OF', 'CHASING', 'THE', 'UNICORN', 'THAN', 'OF', 'TRYING', 'FOR', 'THE', 'NORTHWEST', 'PASSAGE'] +8463-294828-0001-579: hyp=['THREE', 'SECONDS', 'BEFORE', 'THE', 'ARRIVAL', 'OF', 'J', 'B', "HOBSON'S", 'LETTER', 'I', 'KNOW', 'MORE', 'DREAMED', 'OF', 'CHASING', 'THE', 'UNICORN', 'THAN', 'OF', 'TRYING', 'FOR', 'THE', 'NORTH', 'WEST', 'PASSAGE'] +8463-294828-0002-580: ref=['EVEN', 'SO', 'I', 'HAD', 'JUST', 'RETURNED', 'FROM', 'AN', 'ARDUOUS', 'JOURNEY', 'EXHAUSTED', 'AND', 'BADLY', 'NEEDING', 'A', 'REST'] +8463-294828-0002-580: hyp=['EVEN', 'SO', 'I', 'HAD', 'JUST', 'RETURNED', 'FROM', 'AN', 'ARDUOUS', 'JOURNEY', 'EXHAUSTED', 'AND', 'BADLY', 'NEEDING', 'ARREST'] +8463-294828-0003-581: ref=['I', 'WANTED', 'NOTHING', 'MORE', 'THAN', 'TO', 'SEE', 'MY', 'COUNTRY', 'AGAIN', 'MY', 'FRIENDS', 'MY', 'MODEST', 'QUARTERS', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'MY', 'DEARLY', 'BELOVED', 'COLLECTIONS'] +8463-294828-0003-581: hyp=['I', 'WANTED', 'NOTHING', 'MORE', 'THAN', 'TO', 'SEE', 'MY', 'COUNTRY', 'AGAIN', 'MY', 'FRIENDS', 'MY', 'MODEST', 'QUARTERS', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'MY', 'DEARLY', 'BELOVED', 'COLLECTIONS'] +8463-294828-0004-582: ref=['BUT', 'NOW', 'NOTHING', 'COULD', 'HOLD', 'ME', 'BACK'] +8463-294828-0004-582: hyp=['BUT', 'NOW', 'NOTHING', 'COULD', 'HOLD', 'ME', 'BACK'] +8463-294828-0005-583: ref=['CONSEIL', 'WAS', 'MY', 'MANSERVANT'] +8463-294828-0005-583: hyp=['CONSEIL', 'WAS', 'MY', 'MANSERVANT'] +8463-294828-0006-584: ref=['FROM', 'RUBBING', 'SHOULDERS', 'WITH', 'SCIENTISTS', 'IN', 'OUR', 'LITTLE', 'UNIVERSE', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'THE', 'BOY', 'HAD', 'COME', 'TO', 'KNOW', 'A', 'THING', 'OR', 'TWO'] +8463-294828-0006-584: hyp=['FROM', 'RUBBING', 'SHOULDERS', 'WITH', 'SCIENTISTS', 'IN', 'OUR', 'LITTLE', 'UNIVERSE', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'THE', 'BOY', 'HAD', 'COME', 'TO', 'KNOW', 'A', 'THING', 'OR', 'TWO'] +8463-294828-0007-585: ref=['CLASSIFYING', 'WAS', 'EVERYTHING', 'TO', 'HIM', 'SO', 'HE', 'KNEW', 'NOTHING', 'ELSE', 'WELL', 'VERSED', 'IN', 'THE', 'THEORY', 'OF', 'CLASSIFICATION', 'HE', 'WAS', 'POORLY', 'VERSED', 'IN', 'ITS', 'PRACTICAL', 'APPLICATION', 'AND', 'I', 'DOUBT', 'THAT', 'HE', 'COULD', 'TELL', 'A', 'SPERM', 'WHALE', 'FROM', 'A', 'BALEEN', 'WHALE'] +8463-294828-0007-585: hyp=['CLASSIFYING', 'WAS', 'EVERYTHING', 'TO', 'HIM', 'SO', 'HE', 'KNEW', 'NOTHING', 'ELSE', 'WELL', 'VERSED', 'IN', 'A', 'THEORY', 'OF', 'CLASSIFICATION', 'HE', 'WAS', 'POORLY', 'VERSED', 'IN', 'ITS', 'PRACTICAL', 'APPLICATION', 'AND', 'I', 'DOUBT', 'THAT', 'HE', 'COULD', 'TELL', 'A', 'SPERM', 'WHALE', 'FROM', 'A', 'BALEEN', 'WHALE'] +8463-294828-0008-586: ref=['AND', 'YET', 'WHAT', 'A', 'FINE', 'GALLANT', 'LAD'] +8463-294828-0008-586: hyp=['AND', 'YET', 'WHAT', 'A', 'FINE', 'GALLANT', 'LA'] +8463-294828-0009-587: ref=['NOT', 'ONCE', 'DID', 'HE', 'COMMENT', 'ON', 'THE', 'LENGTH', 'OR', 'THE', 'HARDSHIPS', 'OF', 'A', 'JOURNEY'] +8463-294828-0009-587: hyp=['NOT', 'ONCE', 'DID', 'HE', 'COMMENT', 'ON', 'THE', 'LENGTH', 'OR', 'THE', 'HARDSHIPS', 'OF', 'THE', 'JOURNEY'] +8463-294828-0010-588: ref=['NEVER', 'DID', 'HE', 'OBJECT', 'TO', 'BUCKLING', 'UP', 'HIS', 'SUITCASE', 'FOR', 'ANY', 'COUNTRY', 'WHATEVER', 'CHINA', 'OR', 'THE', 'CONGO', 'NO', 'MATTER', 'HOW', 'FAR', 'OFF', 'IT', 'WAS'] +8463-294828-0010-588: hyp=['NEVER', 'DID', 'HE', 'OBJECT', 'TO', 'BUCKLING', 'UP', 'HIS', 'SUIT', 'CASE', 'FOR', 'ANY', 'COUNTRY', 'WHATEVER', 'CHINA', 'OR', 'THE', 'CONGO', 'NO', 'MATTER', 'HOW', 'FAR', 'OFF', 'IT', 'WAS'] +8463-294828-0011-589: ref=['HE', 'WENT', 'HERE', 'THERE', 'AND', 'EVERYWHERE', 'IN', 'PERFECT', 'CONTENTMENT'] +8463-294828-0011-589: hyp=['HE', 'WENT', 'HERE', 'THERE', 'AND', 'EVERYWHERE', 'IN', 'PERFECT', 'CONTENTMENT'] +8463-294828-0012-590: ref=['PLEASE', 'FORGIVE', 'ME', 'FOR', 'THIS', 'UNDERHANDED', 'WAY', 'OF', 'ADMITTING', 'I', 'HAD', 'TURNED', 'FORTY'] +8463-294828-0012-590: hyp=['PLEASE', 'FORGIVE', 'ME', 'FOR', 'THIS', 'UNDERHANDED', 'WAY', 'OF', 'ADMITTING', 'THAT', 'I', 'HAD', 'TURNED', 'FORTY'] +8463-294828-0013-591: ref=['HE', 'WAS', 'A', 'FANATIC', 'ON', 'FORMALITY', 'AND', 'HE', 'ONLY', 'ADDRESSED', 'ME', 'IN', 'THE', 'THIRD', 'PERSON', 'TO', 'THE', 'POINT', 'WHERE', 'IT', 'GOT', 'TIRESOME'] +8463-294828-0013-591: hyp=['HE', 'WAS', 'A', 'FANATIC', 'ON', 'FORMALITY', 'AND', 'HE', 'ONLY', 'ADDRESSED', 'ME', 'IN', 'THE', 'THIRD', 'PERSON', 'TO', 'THE', 'POINT', 'WHERE', 'IT', 'GOT', 'TO', 'HIRESUM'] +8463-294828-0014-592: ref=['THERE', 'WAS', 'GOOD', 'REASON', 'TO', 'STOP', 'AND', 'THINK', 'EVEN', 'FOR', 'THE', "WORLD'S", 'MOST', 'EMOTIONLESS', 'MAN'] +8463-294828-0014-592: hyp=['THERE', 'WAS', 'GOOD', 'REASON', 'TO', 'STOP', 'AND', 'THINK', 'EVEN', 'FOR', 'THE', "WORLD'S", 'MOST', 'EMOTIONLESS', 'MAN'] +8463-294828-0015-593: ref=['CONSEIL', 'I', 'CALLED', 'A', 'THIRD', 'TIME', 'CONSEIL', 'APPEARED'] +8463-294828-0015-593: hyp=['CONSEIL', 'I', 'CALLED', 'A', 'THIRD', 'TON', 'CONSEIL', 'APPEARED'] +8463-294828-0016-594: ref=['DID', 'MASTER', 'SUMMON', 'ME', 'HE', 'SAID', 'ENTERING'] +8463-294828-0016-594: hyp=['DEAD', 'MASTER', 'SUMMONED', 'ME', 'HE', 'SAID', 'ENTERING'] +8463-294828-0017-595: ref=['PACK', 'AS', 'MUCH', 'INTO', 'MY', 'TRUNK', 'AS', 'YOU', 'CAN', 'MY', 'TRAVELING', 'KIT', 'MY', 'SUITS', 'SHIRTS', 'AND', 'SOCKS', "DON'T", 'BOTHER', 'COUNTING', 'JUST', 'SQUEEZE', 'IT', 'ALL', 'IN', 'AND', 'HURRY'] +8463-294828-0017-595: hyp=['PACK', 'AS', 'MUCH', 'INTO', 'MY', 'TRUNK', 'AS', 'YOU', 'CAN', 'MY', 'TRAVELLING', 'KIT', 'MY', 'SUITS', 'SHIRTS', 'AND', 'SOCKS', "DON'T", 'BOTHER', 'COUNTING', 'JUST', 'SQUEEZE', 'IT', 'ALL', 'IN', 'AND', 'HURRY'] +8463-294828-0018-596: ref=["WE'LL", 'DEAL', 'WITH', 'THEM', 'LATER', 'WHAT'] +8463-294828-0018-596: hyp=["WE'LL", 'DEAL', 'WITH', 'THEM', 'LATER', 'WHAT'] +8463-294828-0019-597: ref=['ANYHOW', "WE'LL", 'LEAVE', 'INSTRUCTIONS', 'TO', 'SHIP', 'THE', 'WHOLE', 'MENAGERIE', 'TO', 'FRANCE'] +8463-294828-0019-597: hyp=['ANYHOW', "WE'LL", 'LEAVE', 'INSTRUCTIONS', 'TO', 'SHIP', 'THE', 'WHOLE', 'MENAGERIE', 'TO', 'FRANCE'] +8463-294828-0020-598: ref=['YES', 'WE', 'ARE', 'CERTAINLY', 'I', 'REPLIED', 'EVASIVELY', 'BUT', 'AFTER', 'WE', 'MAKE', 'A', 'DETOUR'] +8463-294828-0020-598: hyp=['YES', 'WE', 'ARE', 'CERTAINLY', 'I', 'REPLIED', 'EVASIVELY', 'BUT', 'AFTER', 'WE', 'MAKE', 'A', 'DETOUR'] +8463-294828-0021-599: ref=['A', 'ROUTE', 'SLIGHTLY', 'LESS', 'DIRECT', "THAT'S", 'ALL'] +8463-294828-0021-599: hyp=['A', 'ROUTE', 'SLIGHTLY', 'LESS', 'DIRECT', "THAT'S", 'ALL'] +8463-294828-0022-600: ref=["WE'RE", 'LEAVING', 'ON', 'THE', 'ABRAHAM', 'LINCOLN'] +8463-294828-0022-600: hyp=["WE'RE", 'LEAVING', 'ON', 'THE', 'ABRAHAM', 'LINCOLN'] +8463-294828-0023-601: ref=['YOU', 'SEE', 'MY', 'FRIEND', "IT'S", 'AN', 'ISSUE', 'OF', 'THE', 'MONSTER', 'THE', 'NOTORIOUS', 'NARWHALE'] +8463-294828-0023-601: hyp=['YOU', 'SEE', 'MY', 'FRIEND', "IT'S", 'AN', 'ISSUE', 'OF', 'THE', 'MONSTER', 'THE', 'NOTORIOUS', 'NARWHALE'] +8463-294828-0024-602: ref=['WE', "DON'T", 'KNOW', 'WHERE', 'IT', 'WILL', 'TAKE', 'US'] +8463-294828-0024-602: hyp=['WE', "DON'T", 'KNOW', 'WHERE', 'IT', 'WILL', 'TAKE', 'US'] +8463-294828-0025-603: ref=['BUT', "WE'RE", 'GOING', 'JUST', 'THE', 'SAME'] +8463-294828-0025-603: hyp=['BUT', 'WERE', 'GOING', 'JUST', 'THE', 'SAME'] +8463-294828-0026-604: ref=['WE', 'HAVE', 'A', 'COMMANDER', "WHO'S", 'GAME', 'FOR', 'ANYTHING'] +8463-294828-0026-604: hyp=['WE', 'HAVE', 'A', 'COMMANDER', 'WHOSE', 'GAME', 'FOR', 'ANYTHING'] +8463-294828-0027-605: ref=['I', 'LEFT', 'INSTRUCTIONS', 'FOR', 'SHIPPING', 'MY', 'CONTAINERS', 'OF', 'STUFFED', 'ANIMALS', 'AND', 'DRIED', 'PLANTS', 'TO', 'PARIS', 'FRANCE'] +8463-294828-0027-605: hyp=['I', 'LEFT', 'INSTRUCTIONS', 'FOR', 'SHIPPING', 'MY', 'CONTAINERS', 'OF', 'STUFFED', 'ANIMALS', 'AND', 'DRIED', 'PLANTS', 'TO', 'PARIS', 'FRANCE'] +8463-294828-0028-606: ref=['I', 'OPENED', 'A', 'LINE', 'OF', 'CREDIT', 'SUFFICIENT', 'TO', 'COVER', 'THE', 'BABIRUSA', 'AND', 'CONSEIL', 'AT', 'MY', 'HEELS', 'I', 'JUMPED', 'INTO', 'A', 'CARRIAGE'] +8463-294828-0028-606: hyp=['I', 'OPENED', 'A', 'LINE', 'OF', 'CREDIT', 'SUFFICIENT', 'TO', 'COVER', 'THE', 'BABRUSA', 'AND', 'CONSEIL', 'AT', 'MY', 'HEELS', 'I', 'JUMPED', 'INTO', 'A', 'CARRIAGE'] +8463-294828-0029-607: ref=['OUR', 'BAGGAGE', 'WAS', 'IMMEDIATELY', 'CARRIED', 'TO', 'THE', 'DECK', 'OF', 'THE', 'FRIGATE', 'I', 'RUSHED', 'ABOARD'] +8463-294828-0029-607: hyp=['OUR', 'BAGGAGE', 'WAS', 'IMMEDIATELY', 'CARRIED', 'TO', 'THE', 'DECK', 'OF', 'THE', 'FRIGATE', 'I', 'RUSHED', 'ABOARD'] +8463-294828-0030-608: ref=['I', 'ASKED', 'FOR', 'COMMANDER', 'FARRAGUT'] +8463-294828-0030-608: hyp=['I', 'ASKED', 'FOR', 'COMMANDER', 'FERRAGUT'] +8463-294828-0031-609: ref=['ONE', 'OF', 'THE', 'SAILORS', 'LED', 'ME', 'TO', 'THE', 'AFTERDECK', 'WHERE', 'I', 'STOOD', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'SMART', 'LOOKING', 'OFFICER', 'WHO', 'EXTENDED', 'HIS', 'HAND', 'TO', 'ME'] +8463-294828-0031-609: hyp=['ONE', 'OF', 'THE', 'SAILORS', 'LED', 'ME', 'TO', 'THE', 'AFTER', 'DECK', 'WHERE', 'I', 'STOOD', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'SMART', 'LOOKING', 'OFFICER', 'WHO', 'EXTENDED', 'HIS', 'HAND', 'TO', 'ME'] +8463-294828-0032-610: ref=['IN', 'PERSON', 'WELCOME', 'ABOARD', 'PROFESSOR', 'YOUR', 'CABIN', 'IS', 'WAITING', 'FOR', 'YOU'] +8463-294828-0032-610: hyp=['IN', 'PERSON', 'WELCOME', 'ABOARD', 'PROFESSOR', 'YOUR', 'CABIN', 'IS', 'WAITING', 'FOR', 'YOU'] +8463-294828-0033-611: ref=['I', 'WAS', 'WELL', 'SATISFIED', 'WITH', 'MY', 'CABIN', 'WHICH', 'WAS', 'LOCATED', 'IN', 'THE', 'STERN', 'AND', 'OPENED', 'INTO', 'THE', 'OFFICERS', 'MESS'] +8463-294828-0033-611: hyp=['I', 'WAS', 'WELL', 'SATISFIED', 'WITH', 'MY', 'CABIN', 'WHICH', 'WAS', 'LOCATED', 'IN', 'THE', 'STERN', 'AND', 'OPENED', 'INTO', 'THE', "OFFICER'S", 'MASS'] +8463-294828-0034-612: ref=["WE'LL", 'BE', 'QUITE', 'COMFORTABLE', 'HERE', 'I', 'TOLD', 'CONSEIL'] +8463-294828-0034-612: hyp=['WILL', 'BE', 'QUITE', 'COMFORTABLE', 'HERE', 'I', 'TOLD', 'CONSEIL'] +8463-294828-0035-613: ref=['AND', 'SO', 'IF', "I'D", 'BEEN', 'DELAYED', 'BY', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'OR', 'EVEN', 'LESS', 'THE', 'FRIGATE', 'WOULD', 'HAVE', 'GONE', 'WITHOUT', 'ME', 'AND', 'I', 'WOULD', 'HAVE', 'MISSED', 'OUT', 'ON', 'THIS', 'UNEARTHLY', 'EXTRAORDINARY', 'AND', 'INCONCEIVABLE', 'EXPEDITION', 'WHOSE', 'TRUE', 'STORY', 'MIGHT', 'WELL', 'MEET', 'WITH', 'SOME', 'SKEPTICISM'] +8463-294828-0035-613: hyp=['AND', 'SO', 'IF', 'I', 'HAD', 'BEEN', 'DELAYED', 'BY', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'OR', 'EVEN', 'LESS', 'THE', 'FRIGATE', 'WOULD', 'HAVE', 'GONE', 'WITHOUT', 'ME', 'AND', 'I', 'WOULD', 'HAVE', 'MISSED', 'OUT', 'ON', 'THIS', 'UNEARTHLY', 'EXTRAORDINARY', 'AND', 'INCONCEIVABLE', 'EXPEDITION', 'WHOSE', 'TRUE', 'STORY', 'MIGHT', 'WELL', 'MEET', 'WITH', 'SOME', 'SKEPTICISM'] +8463-294828-0036-614: ref=['THE', 'WHARVES', 'OF', 'BROOKLYN', 'AND', 'EVERY', 'PART', 'OF', 'NEW', 'YORK', 'BORDERING', 'THE', 'EAST', 'RIVER', 'WERE', 'CROWDED', 'WITH', 'CURIOSITY', 'SEEKERS'] +8463-294828-0036-614: hyp=['THE', 'WHARVES', 'OF', 'BROOKLYN', 'AND', 'EVERY', 'PART', 'OF', 'NEW', 'YORK', 'BORDERING', 'THE', 'EAST', 'RIVER', 'WERE', 'CROWDED', 'WITH', 'CURIOSITY', 'SEEKERS'] +8463-294828-0037-615: ref=['DEPARTING', 'FROM', 'FIVE', 'HUNDRED', 'THOUSAND', 'THROATS', 'THREE', 'CHEERS', 'BURST', 'FORTH', 'IN', 'SUCCESSION'] +8463-294828-0037-615: hyp=['DEPARTING', 'FROM', 'FIVE', 'HUNDRED', 'THOUSAND', 'THROATS', 'THREE', 'CHEERS', 'BURST', 'FORTH', 'IN', 'SUCCESSION'] +8463-294828-0038-616: ref=['THOUSANDS', 'OF', 'HANDKERCHIEFS', 'WERE', 'WAVING', 'ABOVE', 'THESE', 'TIGHTLY', 'PACKED', 'MASSES', 'HAILING', 'THE', 'ABRAHAM', 'LINCOLN', 'UNTIL', 'IT', 'REACHED', 'THE', 'WATERS', 'OF', 'THE', 'HUDSON', 'RIVER', 'AT', 'THE', 'TIP', 'OF', 'THE', 'LONG', 'PENINSULA', 'THAT', 'FORMS', 'NEW', 'YORK', 'CITY'] +8463-294828-0038-616: hyp=['THOUSANDS', 'OF', 'HANDKERCHIEFS', 'WERE', 'WAVING', 'ABOVE', 'THESE', 'TIGHTLY', 'PACKED', 'MASSES', 'HAILING', 'THE', 'ABRAHAM', 'LINCOLN', 'UNTIL', 'IT', 'REACHED', 'THE', 'WATERS', 'OF', 'THE', 'HUDSON', 'RIVER', 'AT', 'THE', 'TIP', 'OF', 'THE', 'LONG', 'PENINSULA', 'THAT', 'FORMS', 'NEW', 'YORK', 'CITY'] +8555-284447-0000-2299: ref=['THEN', 'HE', 'RUSHED', 'DOWN', 'STAIRS', 'INTO', 'THE', 'COURTYARD', 'SHOUTING', 'LOUDLY', 'FOR', 'HIS', 'SOLDIERS', 'AND', 'THREATENING', 'TO', 'PATCH', 'EVERYBODY', 'IN', 'HIS', 'DOMINIONS', 'IF', 'THE', 'SAILORMAN', 'WAS', 'NOT', 'RECAPTURED'] +8555-284447-0000-2299: hyp=['THEN', 'HE', 'RUSHED', 'DOWNSTAIRS', 'INTO', 'THE', 'COURTYARD', 'SHOUTING', 'LOUDLY', 'FOR', 'HIS', 'SOLDIERS', 'AND', 'THREATENING', 'TO', 'PATCH', 'EVERYBODY', 'IN', 'HIS', 'DOMINIONS', 'AT', 'THE', 'SAILORMAN', 'WAS', 'NOT', 'RECAPTURED'] +8555-284447-0001-2300: ref=['HOLD', 'HIM', 'FAST', 'MY', 'MEN', 'AND', 'AS', 'SOON', 'AS', "I'VE", 'HAD', 'MY', 'COFFEE', 'AND', 'OATMEAL', "I'LL", 'TAKE', 'HIM', 'TO', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'AND', 'PATCH', 'HIM'] +8555-284447-0001-2300: hyp=['HOLD', 'HIM', 'FAST', 'MY', 'MEN', 'AND', 'AS', 'SOON', 'AS', "I'VE", 'HAD', 'MY', 'COFFEE', 'AND', 'OATMEAL', 'I', 'WILL', 'TAKE', 'HIM', 'TO', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'AND', 'PAT', 'HIM'] +8555-284447-0002-2301: ref=['I', "WOULDN'T", 'MIND', 'A', 'CUP', 'O', 'COFFEE', 'MYSELF', 'SAID', "CAP'N", 'BILL', "I'VE", 'HAD', "CONSID'BLE", 'EXERCISE', 'THIS', 'MORNIN', 'AND', "I'M", 'ALL', 'READY', 'FOR', 'BREAKFAS'] +8555-284447-0002-2301: hyp=['I', "WOULDN'T", 'MIND', 'A', 'CUP', 'OF', 'COFFEE', 'MYSELF', 'SAID', "CAP'N", 'BILL', 'I', 'HAVE', 'HAD', 'CONSIDERABLE', 'EXERCISE', 'THIS', 'MORNING', 'AND', "I'M", 'ALREADY', 'FOR', 'BREAKFAST'] +8555-284447-0003-2302: ref=['BUT', "CAP'N", 'BILL', 'MADE', 'NO', 'SUCH', 'ATTEMPT', 'KNOWING', 'IT', 'WOULD', 'BE', 'USELESS'] +8555-284447-0003-2302: hyp=['BUT', "CAP'N", 'BILL', 'MADE', 'NO', 'SUCH', 'ATTEMPT', 'KNOWING', 'IT', 'WOULD', 'BE', 'USELESS'] +8555-284447-0004-2303: ref=['AS', 'SOON', 'AS', 'THEY', 'ENTERED', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'THE', 'BOOLOOROO', 'GAVE', 'A', 'YELL', 'OF', 'DISAPPOINTMENT'] +8555-284447-0004-2303: hyp=['AS', 'SOON', 'AS', 'THEY', 'ENTERED', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'THE', 'BOOLOOROO', 'GAVE', 'A', 'YELL', 'OF', 'DISAPPOINTMENT'] +8555-284447-0005-2304: ref=['THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'WAS', 'HIGH', 'AND', 'BIG', 'AND', 'AROUND', 'IT', 'RAN', 'ROWS', 'OF', 'BENCHES', 'FOR', 'THE', 'SPECTATORS', 'TO', 'SIT', 'UPON'] +8555-284447-0005-2304: hyp=['THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'WAS', 'HIGH', 'AND', 'BIG', 'AND', 'AROUND', 'IT', 'RAN', 'ROWS', 'OF', 'BENCHES', 'FOR', 'THE', 'SPECTATORS', 'TO', 'SIT', 'UPON'] +8555-284447-0006-2305: ref=['IN', 'ONE', 'PLACE', 'AT', 'THE', 'HEAD', 'OF', 'THE', 'ROOM', 'WAS', 'A', 'RAISED', 'PLATFORM', 'FOR', 'THE', 'ROYAL', 'FAMILY', 'WITH', 'ELEGANT', 'THRONE', 'CHAIRS', 'FOR', 'THE', 'KING', 'AND', 'QUEEN', 'AND', 'SIX', 'SMALLER', 'BUT', 'RICHLY', 'UPHOLSTERED', 'CHAIRS', 'FOR', 'THE', 'SNUBNOSED', 'PRINCESSES'] +8555-284447-0006-2305: hyp=['IN', 'ONE', 'PLACE', 'AT', 'THE', 'HEAD', 'OF', 'THE', 'ROOM', 'WAS', 'A', 'RAISED', 'PLATFORM', 'FOR', 'THE', 'ROYAL', 'FAMILY', 'WITH', 'ELEGANT', 'THROWN', 'CHAIRS', 'FOR', 'THE', 'KING', 'AND', 'QUEEN', 'AND', 'SIX', 'SMALLER', 'BUT', 'RICHLY', 'UPHOLSTERED', 'CHAIRS', 'WITH', 'A', 'SNUB', 'NOSED', 'PRINCESSES'] +8555-284447-0007-2306: ref=['THEREFORE', 'HER', 'MAJESTY', 'PAID', 'NO', 'ATTENTION', 'TO', 'ANYONE', 'AND', 'NO', 'ONE', 'PAID', 'ANY', 'ATTENTION', 'TO', 'HER'] +8555-284447-0007-2306: hyp=['THEY', 'ARE', 'FOR', 'HER', 'MAJESTY', 'PAID', 'NO', 'ATTENTION', 'TO', 'ANY', 'ONE', 'AND', 'NO', 'ONE', 'PAID', 'ANY', 'ATTENTION', 'TO', 'HER'] +8555-284447-0008-2307: ref=['RICH', 'JEWELS', 'OF', 'BLUE', 'STONES', 'GLITTERED', 'UPON', 'THEIR', 'PERSONS', 'AND', 'THE', 'ROYAL', 'LADIES', 'WERE', 'FULLY', 'AS', 'GORGEOUS', 'AS', 'THEY', 'WERE', 'HAUGHTY', 'AND', 'OVERBEARING'] +8555-284447-0008-2307: hyp=['RICH', 'JEWELS', 'OF', 'BLUESTS', 'GLITTERED', 'UPON', 'THEIR', 'PERSONS', 'AND', 'THE', 'ROYAL', 'LADIES', 'WERE', 'FULLY', 'AS', 'GORGEOUS', 'AS', 'THERE', 'WERE', 'HAUGHTY', 'AND', 'OVERBEARING'] +8555-284447-0009-2308: ref=['MORNIN', 'GIRLS', 'HOPE', 'YE', 'FEEL', 'AS', 'WELL', 'AS', 'YE', 'LOOK'] +8555-284447-0009-2308: hyp=['MORNING', 'GIRLS', 'OH', 'BE', 'BILL', 'AS', 'WELL', 'AS', 'YOU', 'LOOK'] +8555-284447-0010-2309: ref=['CONTROL', 'YOURSELVES', 'MY', 'DEARS', 'REPLIED', 'THE', 'BOOLOOROO', 'THE', 'WORST', 'PUNISHMENT', 'I', 'KNOW', 'HOW', 'TO', 'INFLICT', 'ON', 'ANYONE', 'THIS', 'PRISONER', 'IS', 'ABOUT', 'TO', 'SUFFER', "YOU'LL", 'SEE', 'A', 'VERY', 'PRETTY', 'PATCHING', 'MY', 'ROYAL', 'DAUGHTERS'] +8555-284447-0010-2309: hyp=['CONTROLL', 'YOURSELVES', 'MY', 'DEARS', 'REPLIED', 'THE', 'BOOLOOROO', 'THE', 'WORST', 'PUNISHMENT', 'I', 'KNOW', 'HOW', 'TO', 'INFLICT', 'ON', 'ANY', 'ONE', 'THIS', 'PRISONER', 'IS', 'ABOUT', 'TO', 'SUFFER', 'YOU', 'WILL', 'SEE', 'A', 'VERY', 'PRETTY', 'PATCHING', 'MY', 'ROYAL', 'DAUGHTERS'] +8555-284447-0011-2310: ref=['SUPPOSE', "IT'S", 'A', 'FRIEND'] +8555-284447-0011-2310: hyp=['SUPPOSE', "IT'S", 'OF', 'BRAND'] +8555-284447-0012-2311: ref=['THE', 'CAPTAIN', 'SHOOK', 'HIS', 'HEAD'] +8555-284447-0012-2311: hyp=['THE', 'CAPTAIN', 'SHOOK', 'HIS', 'HEAD'] +8555-284447-0013-2312: ref=['WHY', 'YOU', 'SAID', 'TO', 'FETCH', 'THE', 'FIRST', 'LIVING', 'CREATURE', 'WE', 'MET', 'AND', 'THAT', 'WAS', 'THIS', 'BILLYGOAT', 'REPLIED', 'THE', 'CAPTAIN', 'PANTING', 'HARD', 'AS', 'HE', 'HELD', 'FAST', 'TO', 'ONE', 'OF', 'THE', "GOAT'S", 'HORNS'] +8555-284447-0013-2312: hyp=['WHY', 'YOU', 'SIT', 'TO', 'FETCH', 'THE', 'FIRST', 'LIVING', 'CREATURE', 'WE', 'MET', 'AND', 'THAT', 'WAS', 'THE', 'SPILLIGOAT', 'REPLIED', 'THE', 'CAPTAIN', 'PANTING', 'HARD', 'AS', 'HE', 'HELD', 'FAST', 'TO', 'ONE', 'OF', 'THE', "GOAT'S", 'HORNS'] +8555-284447-0014-2313: ref=['THE', 'IDEA', 'OF', 'PATCHING', "CAP'N", 'BILL', 'TO', 'A', 'GOAT', 'WAS', 'VASTLY', 'AMUSING', 'TO', 'HIM', 'AND', 'THE', 'MORE', 'HE', 'THOUGHT', 'OF', 'IT', 'THE', 'MORE', 'HE', 'ROARED', 'WITH', 'LAUGHTER'] +8555-284447-0014-2313: hyp=['THE', 'IDEA', 'OF', 'PATCHING', "CAP'N", 'BILL', 'TO', 'A', 'GOAT', 'WAS', 'VASTLY', 'AMUSING', 'TO', 'HIM', 'AND', 'THE', 'MORE', 'HE', 'THOUGHT', 'OF', 'IT', 'THE', 'MORE', 'HE', 'ROARED', 'WITH', 'LAUGHTER'] +8555-284447-0015-2314: ref=['THEY', 'LOOK', 'SOMETHING', 'ALIKE', 'YOU', 'KNOW', 'SUGGESTED', 'THE', 'CAPTAIN', 'OF', 'THE', 'GUARDS', 'LOOKING', 'FROM', 'ONE', 'TO', 'THE', 'OTHER', 'DOUBTFULLY', 'AND', "THEY'RE", 'NEARLY', 'THE', 'SAME', 'SIZE', 'IF', 'YOU', 'STAND', 'THE', 'GOAT', 'ON', 'HIS', 'HIND', 'LEGS', "THEY'VE", 'BOTH', 'GOT', 'THE', 'SAME', 'STYLE', 'OF', 'WHISKERS', 'AND', "THEY'RE", 'BOTH', 'OF', 'EM', 'OBSTINATE', 'AND', 'DANGEROUS', 'SO', 'THEY', 'OUGHT', 'TO', 'MAKE', 'A', 'GOOD', 'PATCH', 'SPLENDID'] +8555-284447-0015-2314: hyp=['THEY', 'LOOK', 'SOMETHING', 'ALIKE', 'YOU', 'KNOW', 'SUGGESTED', 'THE', 'CAPTAIN', 'OF', 'THE', 'GUARDS', 'LOOKING', 'FROM', 'ONE', 'TO', 'THE', 'OTHER', 'DOUBTFULLY', 'AND', 'THEY', 'NEARLY', 'THE', 'SAME', 'SIZE', 'IF', 'YOU', 'STAND', 'THE', 'BOAT', 'ON', 'HIS', 'HIND', 'LEGS', "THEY'VE", 'BOTH', 'GOT', 'THE', 'SAME', 'STYLE', 'OF', 'WHISKERS', 'AND', "THEY'RE", 'BOTH', 'OF', 'THEM', 'OBSTINATE', 'AND', 'DANGEROUS', 'SO', 'THEY', 'OUGHT', 'TO', 'MAKE', 'A', 'GOOD', 'PATCH', 'SPLENDID'] +8555-284447-0016-2315: ref=['FINE', 'GLORIOUS'] +8555-284447-0016-2315: hyp=['FINE', 'GLORIOUS'] +8555-284447-0017-2316: ref=['WHEN', 'THIS', 'HAD', 'BEEN', 'ACCOMPLISHED', 'THE', 'BOOLOOROO', 'LEANED', 'OVER', 'TO', 'TRY', 'TO', 'DISCOVER', 'WHY', 'THE', 'FRAME', 'ROLLED', 'AWAY', 'SEEMINGLY', 'OF', 'ITS', 'OWN', 'ACCORD', 'AND', 'HE', 'WAS', 'THE', 'MORE', 'PUZZLED', 'BECAUSE', 'IT', 'HAD', 'NEVER', 'DONE', 'SUCH', 'A', 'THING', 'BEFORE'] +8555-284447-0017-2316: hyp=['WHEN', 'THIS', 'HAD', 'BEEN', 'ACCOMPLISHED', 'THE', 'BOOLOOROO', 'LEANED', 'OVER', 'TO', 'TRY', 'TO', 'DISCOVER', 'WHY', 'THE', 'FRAME', 'ROLLED', 'AWAY', 'SEEMINGLY', 'OF', 'ITS', 'OWN', 'ACCORD', 'AND', 'HE', 'WAS', 'THE', 'MORE', 'PUZZLED', 'BECAUSE', 'IT', 'HAD', 'NEVER', 'DONE', 'SUCH', 'A', 'THING', 'BEFORE'] +8555-284447-0018-2317: ref=['AT', 'ONCE', 'THE', 'GOAT', 'GAVE', 'A', 'LEAP', 'ESCAPED', 'FROM', 'THE', 'SOLDIERS', 'AND', 'WITH', 'BOWED', 'HEAD', 'RUSHED', 'UPON', 'THE', 'BOOLOOROO'] +8555-284447-0018-2317: hyp=['AT', 'ONCE', 'THE', 'GOAT', 'GAVE', 'A', 'LEAP', 'ESCAPED', 'FROM', 'THE', 'SOLDIERS', 'AND', 'WITH', 'BOWED', 'HEAD', 'RUSHED', 'UPON', 'THE', 'BOOLOOROO'] +8555-284447-0019-2318: ref=['BEFORE', 'ANY', 'COULD', 'STOP', 'HIM', 'HE', 'BUTTED', 'HIS', 'MAJESTY', 'SO', 'FURIOUSLY', 'THAT', 'THE', 'KING', 'SOARED', 'FAR', 'INTO', 'THE', 'AIR', 'AND', 'TUMBLED', 'IN', 'A', 'HEAP', 'AMONG', 'THE', 'BENCHES', 'WHERE', 'HE', 'LAY', 'MOANING', 'AND', 'GROANING'] +8555-284447-0019-2318: hyp=['BEFORE', 'ANY', 'COULD', 'STOP', 'HIM', 'HE', 'BUDDED', 'HIS', 'MAJESTY', 'SO', 'FURIOUSLY', 'THAT', 'THE', "KING'S", 'SOARED', 'FAR', 'INTO', 'THE', 'AIR', 'AND', 'TUMBLED', 'IN', 'A', 'HEAP', 'AMONG', 'THE', 'BENCHES', 'WHERE', 'HE', 'LAY', 'MOANING', 'AND', 'GROANING'] +8555-284447-0020-2319: ref=['THE', "GOAT'S", 'WARLIKE', 'SPIRIT', 'WAS', 'ROUSED', 'BY', 'THIS', 'SUCCESSFUL', 'ATTACK'] +8555-284447-0020-2319: hyp=['THE', 'GOATS', 'WORE', 'LIKE', 'SPIRIT', 'WAS', 'ROUSED', 'BY', 'THIS', 'SUCCESSFUL', 'ATTACK'] +8555-284447-0021-2320: ref=['THEN', 'THEY', 'SPED', 'IN', 'GREAT', 'HASTE', 'FOR', 'THE', 'DOOR', 'AND', 'THE', 'GOAT', 'GAVE', 'A', 'FINAL', 'BUTT', 'THAT', 'SENT', 'THE', 'ROW', 'OF', 'ROYAL', 'LADIES', 'ALL', 'DIVING', 'INTO', 'THE', 'CORRIDOR', 'IN', 'ANOTHER', 'TANGLE', 'WHEREUPON', 'THEY', 'SHRIEKED', 'IN', 'A', 'MANNER', 'THAT', 'TERRIFIED', 'EVERYONE', 'WITHIN', 'SOUND', 'OF', 'THEIR', 'VOICES'] +8555-284447-0021-2320: hyp=['THEN', 'THEY', 'SPED', 'IN', 'GREAT', 'HASTE', 'FOR', 'THE', 'DOOR', 'AND', 'THE', 'GOAT', 'GAVE', 'A', 'FINAL', 'BUTT', 'THAT', 'SENT', 'THE', 'ROW', 'OF', 'ROYAL', 'LADIES', 'ALL', 'DIVING', 'INTO', 'THE', 'CORRIDOR', 'IN', 'ANOTHER', 'TANGLE', 'WHEREUPON', 'THEY', 'SHRIEKED', 'IN', 'A', 'MANNER', 'THAT', 'TERRIFIED', 'EVERY', 'ONE', 'WITHIN', 'SOUND', 'OF', 'THEIR', 'VOICES'] +8555-284447-0022-2321: ref=['I', 'HAD', 'A', 'NOTION', 'IT', 'WAS', 'YOU', 'MATE', 'AS', 'SAVED', 'ME', 'FROM', 'THE', 'KNIFE'] +8555-284447-0022-2321: hyp=['I', 'HAD', 'A', 'NOTION', 'IT', 'WAS', 'YOU', 'MADE', 'TO', 'SEE', 'ME', 'FROM', 'THE', 'KNIFE'] +8555-284447-0023-2322: ref=['I', "COULDN'T", 'SHIVER', 'MUCH', 'BEIN', 'BOUND', 'SO', 'TIGHT', 'BUT', 'WHEN', "I'M", 'LOOSE', 'I', 'MEAN', 'TO', 'HAVE', 'JUS', 'ONE', 'GOOD', 'SHIVER', 'TO', 'RELIEVE', 'MY', "FEELIN'S"] +8555-284447-0023-2322: hyp=['I', "COULDN'T", 'SHIVER', 'MUCH', 'BEING', 'BOUND', 'SO', 'TIGHT', 'BUT', 'WHEN', "I'M", 'LOOSE', 'I', 'MEAN', 'TO', 'HAVE', 'JUST', 'SWUNG', 'GOOD', 'SHIVER', 'TO', 'RELIEVE', 'MY', 'FEELINS'] +8555-284447-0024-2323: ref=['COME', 'AND', 'GET', 'THE', 'BOOLOOROO', 'SHE', 'SAID', 'GOING', 'TOWARD', 'THE', 'BENCHES'] +8555-284447-0024-2323: hyp=['COME', 'AND', 'GET', 'THE', 'BOOLOOROO', 'SHE', 'SAID', 'GOING', 'TOWARD', 'THE', 'BENCHES'] +8555-284449-0000-2324: ref=['SO', 'THEY', 'WERE', 'QUITE', 'WILLING', 'TO', 'OBEY', 'THE', 'ORDERS', 'OF', 'THEIR', 'GIRL', 'QUEEN', 'AND', 'IN', 'A', 'SHORT', 'TIME', 'THE', 'BLASTS', 'OF', 'TRUMPETS', 'AND', 'ROLL', 'OF', 'DRUMS', 'AND', 'CLASHING', 'OF', 'CYMBALS', 'TOLD', 'TROT', 'AND', "CAP'N", 'BILL', 'THAT', 'THE', 'BLUE', 'BANDS', 'HAD', 'ASSEMBLED', 'BEFORE', 'THE', 'PALACE'] +8555-284449-0000-2324: hyp=['SO', 'THEY', 'WERE', 'QUITE', 'WILLING', 'TO', 'OBEY', 'THE', 'ORDERS', 'OF', 'THEIR', 'GIRL', 'QUEEN', 'AND', 'IN', 'A', 'SHORT', 'TIME', 'THE', 'BLAST', 'OF', 'TRUMPETS', 'AND', 'ROLL', 'OF', 'DRUMS', 'AND', 'CLASHING', 'OF', 'CYMBALS', 'TOLD', 'TROT', 'AND', "CAP'N", 'BILL', 'THAT', 'THE', 'BLUE', 'BANDS', 'HAD', 'A', 'SIMPLED', 'BEFORE', 'THE', 'PALACE'] +8555-284449-0001-2325: ref=['THEN', 'THEY', 'ALL', 'MARCHED', 'OUT', 'A', 'LITTLE', 'WAY', 'INTO', 'THE', 'FIELDS', 'AND', 'FOUND', 'THAT', 'THE', 'ARMY', 'OF', 'PINKIES', 'HAD', 'ALREADY', 'FORMED', 'AND', 'WAS', 'ADVANCING', 'STEADILY', 'TOWARD', 'THEM'] +8555-284449-0001-2325: hyp=['THEN', 'THEY', 'ALL', 'MARCHED', 'OUT', 'A', 'LITTLE', 'WAY', 'INTO', 'THE', 'FIELDS', 'AND', 'FOUND', 'THAT', 'THE', 'ARMY', 'OF', 'PINKIES', 'HAD', 'ALREADY', 'FORMED', 'AND', 'WAS', 'ADVANCING', 'STEADILY', 'TOWARD', 'THEM'] +8555-284449-0002-2326: ref=['AT', 'THE', 'HEAD', 'OF', 'THE', 'PINKIES', 'WERE', 'GHIP', 'GHISIZZLE', 'AND', 'BUTTON', 'BRIGHT', 'WHO', 'HAD', 'THE', 'PARROT', 'ON', 'HIS', 'SHOULDER', 'AND', 'THEY', 'WERE', 'SUPPORTED', 'BY', 'CAPTAIN', 'CORALIE', 'AND', 'CAPTAIN', 'TINTINT', 'AND', 'ROSALIE', 'THE', 'WITCH'] +8555-284449-0002-2326: hyp=['AT', 'THE', 'HEAD', 'OF', 'THE', 'PINKIES', 'WERE', 'GHIP', 'GHISIZZLE', 'AND', 'BUTTON', 'BRIGHT', 'WHO', 'HAD', 'THE', 'PARROT', 'ON', 'HIS', 'SHOULDER', 'AND', 'THEY', 'WERE', 'SUPPORTED', 'BY', 'CAPTAIN', 'CORALLY', 'AND', 'CAPTAIN', 'TINTANT', 'AND', 'ROSALIE', 'THE', 'WITCH'] +8555-284449-0003-2327: ref=['WHEN', 'THE', 'BLUESKINS', 'SAW', 'GHIP', 'GHISIZZLE', 'THEY', 'RAISED', 'ANOTHER', 'GREAT', 'SHOUT', 'FOR', 'HE', 'WAS', 'THE', 'FAVORITE', 'OF', 'THE', 'SOLDIERS', 'AND', 'VERY', 'POPULAR', 'WITH', 'ALL', 'THE', 'PEOPLE'] +8555-284449-0003-2327: hyp=['WHEN', 'THE', 'BLUESKIN', 'SAW', 'GHIP', 'GHISIZZLE', 'THEY', 'RAISED', 'ANOTHER', 'GREAT', 'SHOUT', 'FOR', 'HE', 'WAS', 'THE', 'FAVOURITE', 'OF', 'THE', 'SOLDIERS', 'AND', 'VERY', 'POPULAR', 'WITH', 'ALL', 'THE', 'PEOPLE'] +8555-284449-0004-2328: ref=['SINCE', 'LAST', 'THURSDAY', 'I', 'GHIP', 'GHISIZZLE', 'HAVE', 'BEEN', 'THE', 'LAWFUL', 'BOOLOOROO', 'OF', 'THE', 'BLUE', 'COUNTRY', 'BUT', 'NOW', 'THAT', 'YOU', 'ARE', 'CONQUERED', 'BY', 'QUEEN', 'TROT', 'I', 'SUPPOSE', 'I', 'AM', 'CONQUERED', 'TOO', 'AND', 'YOU', 'HAVE', 'NO', 'BOOLOOROO', 'AT', 'ALL'] +8555-284449-0004-2328: hyp=['SINCE', 'LAST', 'THURSDAY', 'I', 'GIP', 'GHISIZZLE', 'HAVE', 'BEEN', 'THE', 'LAWFUL', 'BOOLOOROO', 'OF', 'THE', 'BLUE', 'COUNTRY', 'BUT', 'NOW', 'THAT', 'YOU', 'ARE', 'CONQUERED', 'BY', 'QUEEN', 'TROT', 'I', 'SUPPOSE', 'I', 'AM', 'CONQUERED', 'TOO', 'AND', 'YOU', 'HAVE', 'NO', 'BOOLOOROO', 'AT', 'ALL'] +8555-284449-0005-2329: ref=['WHEN', 'HE', 'FINISHED', 'SHE', 'SAID', 'CHEERFULLY'] +8555-284449-0005-2329: hyp=['WHEN', 'HE', 'FINISHED', 'SHE', 'SAID', 'CHEERFULLY'] +8555-284449-0006-2330: ref=["DON'T", 'WORRY', 'SIZZLE', 'DEAR', "IT'LL", 'ALL', 'COME', 'RIGHT', 'PRETTY', 'SOON'] +8555-284449-0006-2330: hyp=["DON'T", 'WORRY', 'SIZZLE', 'DEAR', "IT'LL", 'ALL', 'COME', 'RIGHT', 'PRETTY', 'SOON'] +8555-284449-0007-2331: ref=['NOW', 'THEN', "LET'S", 'ENTER', 'THE', 'CITY', 'AN', 'ENJOY', 'THE', 'GRAND', 'FEAST', "THAT'S", 'BEING', 'COOKED', "I'M", 'NEARLY', 'STARVED', 'MYSELF', 'FOR', 'THIS', 'CONQUERIN', 'KINGDOMS', 'IS', 'HARD', 'WORK'] +8555-284449-0007-2331: hyp=['NOW', 'THEN', "LET'S", 'ENTER', 'THE', 'CITY', 'AND', 'ENJOY', 'THE', 'GREAT', 'FEAST', 'ITS', 'BEING', 'COOKED', "I'M", 'NEARLY', 'STORMED', 'MYSELF', 'FOR', 'THIS', 'CONQUERING', "KINGDOM'S", 'IS', 'HARD', 'WORK'] +8555-284449-0008-2332: ref=['THEN', 'SHE', 'GAVE', 'ROSALIE', 'BACK', 'HER', 'MAGIC', 'RING', 'THANKING', 'THE', 'KIND', 'WITCH', 'FOR', 'ALL', 'SHE', 'HAD', 'DONE', 'FOR', 'THEM'] +8555-284449-0008-2332: hyp=['THEN', 'SHE', 'GAVE', 'ROSALIE', 'BACK', 'HER', 'MAGIC', 'RING', 'THANKING', 'THE', 'KIND', 'WITCH', 'FOR', 'ALL', 'SHE', 'HAD', 'DONE', 'FOR', 'THEM'] +8555-284449-0009-2333: ref=['YOU', 'ARE', 'MATE', 'REPLIED', 'THE', 'SAILOR'] +8555-284449-0009-2333: hyp=['YOU', 'ARE', 'A', 'MATE', 'REPLIED', 'THE', 'SAILOR'] +8555-284449-0010-2334: ref=['IT', 'WILL', 'BE', 'SUCH', 'A', 'SATISFACTION'] +8555-284449-0010-2334: hyp=['IT', 'WILL', 'BE', 'SUCH', 'A', 'SATISFACTION'] +8555-284449-0011-2335: ref=['THE', 'GUARDS', 'HAD', 'A', 'TERRIBLE', 'STRUGGLE', 'WITH', 'THE', 'GOAT', 'WHICH', 'WAS', 'LOOSE', 'IN', 'THE', 'ROOM', 'AND', 'STILL', 'WANTED', 'TO', 'FIGHT', 'BUT', 'FINALLY', 'THEY', 'SUBDUED', 'THE', 'ANIMAL', 'AND', 'THEN', 'THEY', 'TOOK', 'THE', 'BOOLOOROO', 'OUT', 'OF', 'THE', 'FRAME', 'HE', 'WAS', 'TIED', 'IN', 'AND', 'BROUGHT', 'BOTH', 'HIM', 'AND', 'THE', 'GOAT', 'BEFORE', 'QUEEN', 'TROT', 'WHO', 'AWAITED', 'THEM', 'IN', 'THE', 'THRONE', 'ROOM', 'OF', 'THE', 'PALACE'] +8555-284449-0011-2335: hyp=['THE', 'GUARDS', 'HAD', 'A', 'TERRIBLE', 'STRUGGLE', 'WITH', 'THE', 'GOAT', 'WHICH', 'WAS', 'LOOSE', 'IN', 'THE', 'ROOM', 'AND', 'STILL', 'WANTED', 'TO', 'FIGHT', 'BUT', 'FINALLY', 'THEY', 'SUBDUED', 'THE', 'ANIMAL', 'AND', 'THEN', 'THEY', 'TOOK', 'THE', 'BOOLOOROO', 'OUT', 'OF', 'THE', 'FRAME', 'HE', 'WAS', 'TIED', 'IN', 'AND', 'BROUGHT', 'BOTH', 'HIM', 'AND', 'THE', 'GOAT', 'BEFORE', 'QUEEN', 'TROT', 'WHO', 'AWAITED', 'THEM', 'IN', 'THE', 'THRONE', 'ROOM', 'OF', 'THE', 'PALACE'] +8555-284449-0012-2336: ref=["I'LL", 'GLADLY', 'DO', 'THAT', 'PROMISED', 'THE', 'NEW', 'BOOLOOROO', 'AND', "I'LL", 'FEED', 'THE', 'HONORABLE', 'GOAT', 'ALL', 'THE', 'SHAVINGS', 'AND', 'LEATHER', 'AND', 'TIN', 'CANS', 'HE', 'CAN', 'EAT', 'BESIDES', 'THE', 'GRASS'] +8555-284449-0012-2336: hyp=["I'LL", 'GLADLY', 'DO', 'THAT', 'PROMISED', 'THE', 'NEW', 'BOOLOOROO', 'AND', "I'LL", 'FEED', 'THE', 'HONED', 'ALL', 'THE', 'SHAVINGS', 'AND', 'LEATHER', 'AND', 'TIN', 'CANS', 'HE', 'CAN', 'EAT', 'BESIDES', 'THE', 'GRASS'] +8555-284449-0013-2337: ref=['SCUSE', 'ME', 'SAID', 'TROT', 'I', 'NEGLECTED', 'TO', 'TELL', 'YOU', 'THAT', "YOU'RE", 'NOT', 'THE', 'BOOLOOROO', 'ANY', 'MORE'] +8555-284449-0013-2337: hyp=['EXCUSE', 'ME', 'SAID', 'SHOT', 'I', 'NEGLECTED', 'TO', 'TELL', 'YOU', 'THAT', "YOU'RE", 'NOT', 'THE', 'BOOLOOROO', 'ANY', 'MORE'] +8555-284449-0014-2338: ref=['THE', 'FORMER', 'BOOLOOROO', 'GROANED'] +8555-284449-0014-2338: hyp=['THE', 'FORMER', 'BOOLOOROO', 'GROANED'] +8555-284449-0015-2339: ref=["I'LL", 'NOT', 'BE', 'WICKED', 'ANY', 'MORE', 'SIGHED', 'THE', 'OLD', 'BOOLOOROO', "I'LL", 'REFORM'] +8555-284449-0015-2339: hyp=["I'LL", 'NOT', 'BE', 'WICKED', 'ANY', 'MORE', 'SIGHED', 'THE', 'OLD', 'BOOLOOROO', "I'LL", 'REFORM'] +8555-284449-0016-2340: ref=['AS', 'A', 'PRIVATE', 'CITIZEN', 'I', 'SHALL', 'BE', 'A', 'MODEL', 'OF', 'DEPORTMENT', 'BECAUSE', 'IT', 'WOULD', 'BE', 'DANGEROUS', 'TO', 'BE', 'OTHERWISE'] +8555-284449-0016-2340: hyp=['AS', 'A', 'PRIVATE', 'CITIZEN', 'I', 'SHALL', 'BE', 'A', 'MODEL', 'OF', 'DEPORTMENT', 'BECAUSE', 'IT', 'WOULD', 'BE', 'DANGEROUS', 'TO', 'BE', 'OTHERWISE'] +8555-284449-0017-2341: ref=['WHEN', 'FIRST', 'THEY', 'ENTERED', 'THE', 'THRONE', 'ROOM', 'THEY', 'TRIED', 'TO', 'BE', 'AS', 'HAUGHTY', 'AND', 'SCORNFUL', 'AS', 'EVER', 'BUT', 'THE', 'BLUES', 'WHO', 'WERE', 'ASSEMBLED', 'THERE', 'ALL', 'LAUGHED', 'AT', 'THEM', 'AND', 'JEERED', 'THEM', 'FOR', 'THERE', 'WAS', 'NOT', 'A', 'SINGLE', 'PERSON', 'IN', 'ALL', 'THE', 'BLUE', 'COUNTRY', 'WHO', 'LOVED', 'THE', 'PRINCESSES', 'THE', 'LEAST', 'LITTLE', 'BIT'] +8555-284449-0017-2341: hyp=['WHEN', 'FIRST', 'THEY', 'ENTERED', 'THE', 'THRONE', 'ROOM', 'THEY', 'TRIED', 'TO', 'BE', 'AS', 'HAUGHTY', 'AND', 'SCORNFUL', 'AS', 'EVER', 'BUT', 'THE', 'BLUES', 'WHO', 'WERE', 'ASSEMBLED', 'THERE', 'ALL', 'LAUGHED', 'AT', 'THEM', 'AND', 'JEERED', 'THEM', 'FOR', 'THERE', 'WAS', 'NOT', 'A', 'SINGLE', 'PERSON', 'IN', 'ALL', 'THE', 'BLUE', 'COUNTRY', 'WHO', 'LOVED', 'THE', 'PRINCESSES', 'THE', 'LEAST', 'LITTLE', 'BIT'] +8555-284449-0018-2342: ref=['SO', 'GHIP', 'GHISIZZLE', 'ORDERED', 'THE', 'CAPTAIN', 'TO', 'TAKE', 'A', 'FILE', 'OF', 'SOLDIERS', 'AND', 'ESCORT', 'THE', 'RAVING', 'BEAUTIES', 'TO', 'THEIR', 'NEW', 'HOME'] +8555-284449-0018-2342: hyp=['SO', 'GHIP', 'GHISIZZLE', 'ORDERED', 'THE', 'CAPTAIN', 'TO', 'TAKE', 'A', 'FILE', 'OF', 'SOLDIERS', 'AND', 'ESCORT', 'THE', 'RAVING', 'BEAUTIES', 'TO', 'THEIR', 'NEW', 'HOME'] +8555-284449-0019-2343: ref=['THAT', 'EVENING', 'TROT', 'GAVE', 'A', 'GRAND', 'BALL', 'IN', 'THE', 'PALACE', 'TO', 'WHICH', 'THE', 'MOST', 'IMPORTANT', 'OF', 'THE', 'PINKIES', 'AND', 'THE', 'BLUESKINS', 'WERE', 'INVITED'] +8555-284449-0019-2343: hyp=['THAT', 'EVENING', 'TROT', 'GAVE', 'A', 'GRAND', 'BALL', 'IN', 'THE', 'PALACE', 'TO', 'WHICH', 'THE', 'MOST', 'IMPORTANT', 'OF', 'THE', 'PINKIES', 'AND', 'THE', 'BLUESKINS', 'WERE', 'INVITED'] +8555-284449-0020-2344: ref=['THE', 'COMBINED', 'BANDS', 'OF', 'BOTH', 'THE', 'COUNTRIES', 'PLAYED', 'THE', 'MUSIC', 'AND', 'A', 'FINE', 'SUPPER', 'WAS', 'SERVED'] +8555-284449-0020-2344: hyp=['THE', 'COMBINED', 'BANDS', 'OF', 'BOTH', 'THE', 'COUNTRIES', 'PLAYED', 'THE', 'MUSIC', 'AND', 'A', 'FINE', 'SUPPER', 'WAS', 'SERVED'] +8555-292519-0000-2283: ref=['BRIGHTER', 'THAN', 'EARLY', "DAWN'S", 'MOST', 'BRILLIANT', 'DYE', 'ARE', 'BLOWN', 'CLEAR', 'BANDS', 'OF', 'COLOR', 'THROUGH', 'THE', 'SKY', 'THAT', 'SWIRL', 'AND', 'SWEEP', 'AND', 'MEET', 'TO', 'BREAK', 'AND', 'FOAM', 'LIKE', 'RAINBOW', 'VEILS', 'UPON', 'A', "BUBBLE'S", 'DOME'] +8555-292519-0000-2283: hyp=['BRIGHTER', 'THAN', 'EARLY', 'DAWNS', 'MOST', 'BRILLIANT', 'DYE', 'ARE', 'BLOWN', 'CLEAR', 'BANDS', 'OF', 'COLOR', 'THROUGH', 'THE', 'SKY', 'THAT', 'SWIRL', 'AND', 'SWEEP', 'AND', 'MEET', 'TO', 'BREAK', 'AND', 'FOAM', 'LIKE', 'RAINBOW', 'VEILS', 'UPON', 'A', "BUBBLE'S", 'DOME'] +8555-292519-0001-2284: ref=['GUIDED', 'BY', 'YOU', 'HOW', 'WE', 'MIGHT', 'STROLL', 'TOWARDS', 'DEATH', 'OUR', 'ONLY', 'MUSIC', 'ONE', "ANOTHER'S", 'BREATH', 'THROUGH', 'GARDENS', 'INTIMATE', 'WITH', 'HOLLYHOCKS', 'WHERE', 'SILENT', 'POPPIES', 'BURN', 'BETWEEN', 'THE', 'ROCKS', 'BY', 'POOLS', 'WHERE', 'BIRCHES', 'BEND', 'TO', 'CONFIDANTS', 'ABOVE', 'GREEN', 'WATERS', 'SCUMMED', 'WITH', 'LILY', 'PLANTS'] +8555-292519-0001-2284: hyp=['GUIDED', 'BY', 'YOU', 'HOW', 'WE', 'MIGHT', 'STROLL', 'TOWARDS', 'DEATH', 'OUR', 'ONLY', 'MUSIC', 'ONE', "ANOTHER'S", 'BREATH', 'THROUGH', "GARDEN'S", 'INTIMATE', 'WITH', 'HOLLYHOCKS', 'WHERE', 'IS', 'SILENT', 'POPPIES', 'BURN', 'BETWEEN', 'THE', 'ROCKS', 'BY', 'POOLS', 'WHERE', 'BIRCHES', 'BEND', 'TO', 'CONFIDANTS', 'ABOVE', 'GREEN', 'WATERS', 'SCUMMED', 'WITH', 'THE', 'LILY', 'PLANTS'] +8555-292519-0002-2285: ref=['VENICE'] +8555-292519-0002-2285: hyp=['VENICE'] +8555-292519-0003-2286: ref=['IN', 'A', 'SUNSET', 'GLOWING', 'OF', 'CRIMSON', 'AND', 'GOLD', 'SHE', 'LIES', 'THE', 'GLORY', 'OF', 'THE', 'WORLD', 'A', 'BEACHED', "KING'S", 'GALLEY', 'WHOSE', 'SAILS', 'ARE', 'FURLED', 'WHO', 'IS', 'HUNG', 'WITH', 'TAPESTRIES', 'RICH', 'AND', 'OLD'] +8555-292519-0003-2286: hyp=['IN', 'A', 'SUNSET', 'GLOWING', 'OF', 'CRIMSON', 'AND', 'GOLD', 'SHE', 'LIES', 'THE', 'GLORY', 'OF', 'THE', 'WORLD', 'A', 'BEECHED', "KING'S", 'GALLEY', 'WHO', 'SAILS', 'ARE', 'FURLED', 'WHO', 'IS', 'HUNG', 'WITH', 'TAPESTRIES', 'RICH', 'AND', 'OLD'] +8555-292519-0004-2287: ref=['THE', 'PITY', 'THAT', 'WE', 'MUST', 'COME', 'AND', 'GO'] +8555-292519-0004-2287: hyp=['THE', 'PITY', 'THAT', 'WE', 'MUST', 'COME', 'AND', 'GO'] +8555-292519-0005-2288: ref=['WHILE', 'THE', 'OLD', 'GOLD', 'AND', 'THE', 'MARBLE', 'STAYS', 'FOREVER', 'GLEAMING', 'ITS', 'SOFT', 'STRONG', 'BLAZE', 'CALM', 'IN', 'THE', 'EARLY', 'EVENING', 'GLOW'] +8555-292519-0005-2288: hyp=['WHILE', 'THE', 'OLD', 'GOLD', 'AND', 'THE', 'MARBLE', 'STAYS', 'FOREVER', 'GLEAMING', 'ITS', 'SOFT', 'STRONG', 'BLAZE', 'CALM', 'IN', 'THE', 'EARLY', 'EVENING', 'GLOW'] +8555-292519-0006-2289: ref=['THE', 'PLEASANT', 'GRAVEYARD', 'OF', 'MY', 'SOUL', 'WITH', 'SENTIMENTAL', 'CYPRESS', 'TREES', 'AND', 'FLOWERS', 'IS', 'FILLED', 'THAT', 'I', 'MAY', 'STROLL', 'IN', 'MEDITATION', 'AT', 'MY', 'EASE'] +8555-292519-0006-2289: hyp=['THE', 'PLEASANT', 'GRAVEYARD', 'OF', 'MY', 'SOUL', 'WITH', 'SENTIMENTAL', 'CYPRESS', 'TREES', 'AND', 'FLOWERS', 'IS', 'FILLED', 'THAT', 'I', 'MAY', 'STROLL', 'IN', 'MEDITATION', 'AT', 'MY', 'EASE'] +8555-292519-0007-2290: ref=['IT', 'IS', 'MY', 'HEART', 'HUNG', 'IN', 'THE', 'SKY', 'AND', 'NO', 'CLOUDS', 'EVER', 'FLOAT', 'BETWEEN', 'THE', 'GRAVE', 'FLOWERS', 'AND', 'MY', 'HEART', 'ON', 'HIGH'] +8555-292519-0007-2290: hyp=['IT', 'IS', 'MY', 'HEART', 'HUNG', 'IN', 'THE', 'SKY', 'AND', 'NO', 'CLOUDS', 'EVER', 'FLOAT', 'BETWEEN', 'THE', 'GRAVE', 'FLOWERS', 'AND', 'MY', 'HEART', 'ON', 'HIGH'] +8555-292519-0008-2291: ref=['OVER', 'THE', 'TRACK', 'LINED', 'CITY', 'STREET', 'THE', 'YOUNG', 'MEN', 'THE', 'GRINNING', 'MEN', 'PASS'] +8555-292519-0008-2291: hyp=['OVER', 'THE', 'TRACK', 'LINED', 'CITY', 'STREET', 'THE', 'YOUNG', 'MAN', 'THE', 'GRINNING', 'MAN', 'PASS'] +8555-292519-0009-2292: ref=['HO', 'YE', 'SAILS', 'THAT', 'SEEM', 'TO', 'WANDER', 'IN', 'DREAM', 'FILLED', 'MEADOWS', 'SAY', 'IS', 'THE', 'SHORE', 'WHERE', 'I', 'STAND', 'THE', 'ONLY', 'FIELD', 'OF', 'STRUGGLE', 'OR', 'ARE', 'YE', 'HIT', 'AND', 'BATTERED', 'OUT', 'THERE', 'BY', 'WAVES', 'AND', 'WIND', 'GUSTS', 'AS', 'YE', 'TACK', 'OVER', 'A', 'CLASHING', 'SEA', 'OF', 'WATERY', 'ECHOES'] +8555-292519-0009-2292: hyp=['HO', 'YE', 'SAILS', 'THAT', 'SEEM', 'TO', 'WONDER', 'AND', 'DREAM', 'FILLED', 'MEADOWS', 'SAY', 'IS', 'THE', 'SHORE', 'WHERE', 'I', 'STAND', 'THE', 'ONLY', 'FIELD', 'OF', 'STRUGGLE', 'OR', 'ARE', 'YE', 'HIT', 'AND', 'BATTERED', 'OUT', 'THERE', 'BY', 'WAVES', 'AND', 'WIND', 'GUSTS', 'AS', 'YE', 'TACK', 'OVER', 'A', 'CLASHING', 'SEA', 'OF', 'WATERY', 'ECHOES'] +8555-292519-0010-2293: ref=['OLD', 'DANCES', 'ARE', 'SIMPLIFIED', 'OF', 'THEIR', 'YEARNING', 'BLEACHED', 'BY', 'TIME'] +8555-292519-0010-2293: hyp=['OLD', 'DANCES', 'ARE', 'SIMPLIFIED', 'OF', 'THEIR', 'YEARNING', 'BLEACHED', 'BY', 'TIME'] +8555-292519-0011-2294: ref=['HE', 'HAD', 'GOT', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0011-2294: hyp=['HE', 'HAD', 'GOT', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0012-2295: ref=['THROUGH', 'THE', 'BLACK', 'NIGHT', 'RAIN', 'HE', 'SANG', 'TO', 'HER', 'WINDOW', 'BARS'] +8555-292519-0012-2295: hyp=['THROUGH', 'THE', 'BLACK', 'NIGHT', 'RAIN', 'HE', 'SANG', 'TO', 'HER', 'WINDOW', 'BARS'] +8555-292519-0013-2296: ref=['THAT', 'WAS', 'BUT', 'RUSTLING', 'OF', 'DRIPPING', 'PLANTS', 'IN', 'THE', 'DARK'] +8555-292519-0013-2296: hyp=['THAT', 'WAS', 'BUT', 'RUSTLING', 'OF', 'TRIPPING', 'PLANTS', 'IN', 'THE', 'DARK'] +8555-292519-0014-2297: ref=['SHE', 'WAS', 'ALONE', 'THAT', 'NIGHT'] +8555-292519-0014-2297: hyp=['SHE', 'WAS', 'ALONE', 'THAT', 'NIGHT'] +8555-292519-0015-2298: ref=['HE', 'HAD', 'BROKEN', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0015-2298: hyp=['HE', 'HAD', 'BROKEN', 'INTO', 'HER', 'COURTYARD'] +908-157963-0000-1321: ref=['TO', 'FADE', 'AWAY', 'LIKE', 'MORNING', 'BEAUTY', 'FROM', 'HER', 'MORTAL', 'DAY', 'DOWN', 'BY', 'THE', 'RIVER', 'OF', 'ADONA', 'HER', 'SOFT', 'VOICE', 'IS', 'HEARD', 'AND', 'THUS', 'HER', 'GENTLE', 'LAMENTATION', 'FALLS', 'LIKE', 'MORNING', 'DEW'] +908-157963-0000-1321: hyp=['TO', 'FADE', 'AWAY', 'LIKE', 'MORNING', 'BEAUTY', 'FROM', 'HER', 'MORTAL', 'DAY', 'DOWN', 'BY', 'THE', 'RIVER', 'OF', 'ADONNA', 'HER', 'SOFT', 'VOICE', 'IS', 'HEARD', 'AND', 'THUS', 'HER', 'GENTLE', 'LAMENTATION', 'FALLS', 'LIKE', 'MORNING', 'DEW'] +908-157963-0001-1322: ref=['O', 'LIFE', 'OF', 'THIS', 'OUR', 'SPRING'] +908-157963-0001-1322: hyp=['OH', 'LIFE', 'OF', 'THIS', 'OUR', 'SPRING'] +908-157963-0002-1323: ref=['WHY', 'FADES', 'THE', 'LOTUS', 'OF', 'THE', 'WATER'] +908-157963-0002-1323: hyp=['WHY', 'FADES', 'THE', 'LOTUS', 'OF', 'THE', 'WATER'] +908-157963-0003-1324: ref=['WHY', 'FADE', 'THESE', 'CHILDREN', 'OF', 'THE', 'SPRING'] +908-157963-0003-1324: hyp=['WHY', 'FADE', 'THESE', 'CHILDREN', 'OF', 'THE', 'SPRING'] +908-157963-0004-1325: ref=['THEL', 'IS', 'LIKE', 'A', 'WATRY', 'BOW', 'AND', 'LIKE', 'A', 'PARTING', 'CLOUD', 'LIKE', 'A', 'REFLECTION', 'IN', 'A', 'GLASS', 'LIKE', 'SHADOWS', 'IN', 'THE', 'WATER', 'LIKE', 'DREAMS', 'OF', 'INFANTS', 'LIKE', 'A', 'SMILE', 'UPON', 'AN', 'INFANTS', 'FACE'] +908-157963-0004-1325: hyp=['FELL', 'IS', 'LIKE', 'A', 'WATRY', 'BOW', 'AND', 'LIKE', 'A', 'PARTING', 'CLOUD', 'LIKE', 'A', 'REFLECTION', 'IN', 'A', 'GLASS', 'LIKE', 'SHADOWS', 'IN', 'THE', 'WATER', 'LIKE', 'DREAMS', 'OF', 'INFANTS', 'LIKE', 'A', 'SMILE', 'UPON', 'AN', "INFANT'S", 'FACE'] +908-157963-0005-1326: ref=['LIKE', 'THE', 'DOVES', 'VOICE', 'LIKE', 'TRANSIENT', 'DAY', 'LIKE', 'MUSIC', 'IN', 'THE', 'AIR', 'AH'] +908-157963-0005-1326: hyp=['LIKE', 'THE', "DOVE'S", 'BOYS', 'LIKE', 'TRANSIENT', 'DAY', 'LIKE', 'MUSIC', 'IN', 'THE', 'AIR', 'AH'] +908-157963-0006-1327: ref=['AND', 'GENTLE', 'SLEEP', 'THE', 'SLEEP', 'OF', 'DEATH', 'AND', 'GENTLY', 'HEAR', 'THE', 'VOICE', 'OF', 'HIM', 'THAT', 'WALKETH', 'IN', 'THE', 'GARDEN', 'IN', 'THE', 'EVENING', 'TIME'] +908-157963-0006-1327: hyp=['AND', 'GENTLE', 'SLEEP', 'THE', 'SLEEP', 'OF', 'DEATH', 'AND', 'GENTLY', 'HEAR', 'THE', 'VOICE', 'OF', 'HIM', 'THAT', 'WALKETH', 'IN', 'THE', 'GARDEN', 'IN', 'THE', 'EVENING', 'TIME'] +908-157963-0007-1328: ref=['THE', 'LILLY', 'OF', 'THE', 'VALLEY', 'BREATHING', 'IN', 'THE', 'HUMBLE', 'GRASS', 'ANSWERD', 'THE', 'LOVELY', 'MAID', 'AND', 'SAID', 'I', 'AM', 'A', 'WATRY', 'WEED', 'AND', 'I', 'AM', 'VERY', 'SMALL', 'AND', 'LOVE', 'TO', 'DWELL', 'IN', 'LOWLY', 'VALES', 'SO', 'WEAK', 'THE', 'GILDED', 'BUTTERFLY', 'SCARCE', 'PERCHES', 'ON', 'MY', 'HEAD', 'YET', 'I', 'AM', 'VISITED', 'FROM', 'HEAVEN', 'AND', 'HE', 'THAT', 'SMILES', 'ON', 'ALL', 'WALKS', 'IN', 'THE', 'VALLEY', 'AND', 'EACH', 'MORN', 'OVER', 'ME', 'SPREADS', 'HIS', 'HAND', 'SAYING', 'REJOICE', 'THOU', 'HUMBLE', 'GRASS', 'THOU', 'NEW', 'BORN', 'LILY', 'FLOWER'] +908-157963-0007-1328: hyp=['THE', 'LILY', 'OF', 'THE', 'VALLEY', 'BREATHING', 'IN', 'THE', 'HUMBLE', 'GRASS', 'ANSWERED', 'THE', 'LOVELY', 'MAIDEN', 'SAID', 'I', 'AM', 'A', 'WATCHERY', 'WEED', 'AND', 'I', 'AM', 'VERY', 'SMALL', 'AND', 'LOVE', 'TO', 'DWELL', 'IN', 'LOWLY', 'VALES', 'SO', 'WEAK', 'THE', 'GILDED', 'BUTTERFLY', 'SCARCE', 'PURCHASE', 'ON', 'MY', 'HEAD', 'YET', 'I', 'AM', 'VISITED', 'FROM', 'HEAVEN', 'AND', 'HE', 'THAT', 'SMILES', 'ON', 'ALL', 'WALKS', 'IN', 'THE', 'VALLEY', 'AND', 'EACH', 'MORN', 'OVER', 'ME', 'SPREADS', 'HIS', 'HAND', 'SAYING', 'REJOICE', 'THOU', 'HUMBLE', 'GRASS', 'THOU', 'NEWBORN', 'LILY', 'FLOWER'] +908-157963-0008-1329: ref=['THOU', 'GENTLE', 'MAID', 'OF', 'SILENT', 'VALLEYS', 'AND', 'OF', 'MODEST', 'BROOKS', 'FOR', 'THOU', 'SHALL', 'BE', 'CLOTHED', 'IN', 'LIGHT', 'AND', 'FED', 'WITH', 'MORNING', 'MANNA', 'TILL', 'SUMMERS', 'HEAT', 'MELTS', 'THEE', 'BESIDE', 'THE', 'FOUNTAINS', 'AND', 'THE', 'SPRINGS', 'TO', 'FLOURISH', 'IN', 'ETERNAL', 'VALES', 'THEY', 'WHY', 'SHOULD', 'THEL', 'COMPLAIN'] +908-157963-0008-1329: hyp=['THOU', 'GENTLE', 'MAID', 'OF', 'SILENT', 'VALLEYS', 'AND', 'OF', 'MODEST', 'BROOKS', 'FOR', 'THOU', 'SHALT', 'BE', 'CLOTHED', 'IN', 'LIGHT', 'AND', 'FED', 'WITH', 'MORNING', 'MANNA', 'TILL', "SUMMER'S", 'HEAT', 'MELTS', 'THEE', 'BESIDE', 'THE', 'FOUNTAINS', 'AND', 'THE', 'SPRINGS', 'TO', 'FLOURISH', 'IN', 'ETERNAL', 'VALES', 'THEY', 'WHY', 'SHOULDST', 'THOU', 'COMPLAIN'] +908-157963-0009-1330: ref=['WHY', 'SHOULD', 'THE', 'MISTRESS', 'OF', 'THE', 'VALES', 'OF', 'HAR', 'UTTER', 'A', 'SIGH'] +908-157963-0009-1330: hyp=['WHY', 'SHOULD', 'THE', 'MISTRESS', 'OF', 'THE', 'VEILS', 'OF', 'HOAR', 'UTTER', 'A', 'SIGH'] +908-157963-0010-1331: ref=['SHE', 'CEASD', 'AND', 'SMILD', 'IN', 'TEARS', 'THEN', 'SAT', 'DOWN', 'IN', 'HER', 'SILVER', 'SHRINE'] +908-157963-0010-1331: hyp=['SHE', 'CEASED', 'AND', 'SMILED', 'IN', 'TEARS', 'THEN', 'SAT', 'DOWN', 'IN', 'HER', 'SILVER', 'SHRINE'] +908-157963-0011-1332: ref=['WHICH', 'THOU', 'DOST', 'SCATTER', 'ON', 'EVERY', 'LITTLE', 'BLADE', 'OF', 'GRASS', 'THAT', 'SPRINGS', 'REVIVES', 'THE', 'MILKED', 'COW', 'AND', 'TAMES', 'THE', 'FIRE', 'BREATHING', 'STEED'] +908-157963-0011-1332: hyp=['WHICH', 'THOU', 'DOST', 'SCATTER', 'ON', 'EVERY', 'LITTLE', 'BLADE', 'OF', 'GRASS', 'THAT', 'SPRINGS', 'REVIVES', 'THE', 'MILKED', 'COW', 'AND', 'TAMES', 'THE', 'FIRE', 'BREATHING', 'STEED'] +908-157963-0012-1333: ref=['BUT', 'THEL', 'IS', 'LIKE', 'A', 'FAINT', 'CLOUD', 'KINDLED', 'AT', 'THE', 'RISING', 'SUN', 'I', 'VANISH', 'FROM', 'MY', 'PEARLY', 'THRONE', 'AND', 'WHO', 'SHALL', 'FIND', 'MY', 'PLACE'] +908-157963-0012-1333: hyp=['BUT', 'THOUGH', 'IS', 'LIKE', 'A', 'FAINT', 'CLOUD', 'KINDLED', 'AT', 'THE', 'RISING', 'SUN', 'I', 'VANISH', 'FROM', 'MY', 'PEARLY', 'THRONE', 'AND', 'WHO', 'SHALL', 'FIND', 'MY', 'PLACE'] +908-157963-0013-1334: ref=['AND', 'WHY', 'IT', 'SCATTERS', 'ITS', 'BRIGHT', 'BEAUTY', 'THRO', 'THE', 'HUMID', 'AIR'] +908-157963-0013-1334: hyp=['AND', 'WHY', 'IT', 'SCATTERS', 'ITS', 'BRIGHT', 'BEAUTY', 'THROUGH', 'THE', 'HUMAN', 'AIR'] +908-157963-0014-1335: ref=['DESCEND', 'O', 'LITTLE', 'CLOUD', 'AND', 'HOVER', 'BEFORE', 'THE', 'EYES', 'OF', 'THEL'] +908-157963-0014-1335: hyp=['DESCEND', 'A', 'LITTLE', 'CLOUD', 'AND', 'HOVER', 'BEFORE', 'THE', 'EYES', 'OF', 'FELL'] +908-157963-0015-1336: ref=['O', 'LITTLE', 'CLOUD', 'THE', 'VIRGIN', 'SAID', 'I', 'CHARGE', 'THEE', 'TO', 'TELL', 'ME', 'WHY', 'THOU', 'COMPLAINEST', 'NOW', 'WHEN', 'IN', 'ONE', 'HOUR', 'THOU', 'FADE', 'AWAY', 'THEN', 'WE', 'SHALL', 'SEEK', 'THEE', 'BUT', 'NOT', 'FIND', 'AH', 'THEL', 'IS', 'LIKE', 'TO', 'THEE'] +908-157963-0015-1336: hyp=['O', 'LITTLE', 'CLOUD', 'THE', 'VIRGIN', 'SAID', 'I', 'CHARGE', 'THEE', 'TO', 'TELL', 'ME', 'WHY', 'THOU', 'COMPLAINEST', 'NOW', 'WHEN', 'IN', 'ONE', 'HOUR', 'THOU', 'FADE', 'AWAY', 'THEN', 'WE', 'SHALL', 'SEEK', 'THEE', 'BUT', 'NOT', 'FIND', 'AH', 'FELL', 'IS', 'LIKE', 'TO', 'THEE'] +908-157963-0016-1337: ref=['I', 'PASS', 'AWAY', 'YET', 'I', 'COMPLAIN', 'AND', 'NO', 'ONE', 'HEARS', 'MY', 'VOICE'] +908-157963-0016-1337: hyp=['I', 'PASS', 'AWAY', 'YET', 'I', 'COMPLAIN', 'AND', 'NO', 'ONE', 'HEARS', 'MY', 'VOICE'] +908-157963-0017-1338: ref=['THE', 'CLOUD', 'THEN', 'SHEWD', 'HIS', 'GOLDEN', 'HEAD', 'AND', 'HIS', 'BRIGHT', 'FORM', "EMERG'D"] +908-157963-0017-1338: hyp=['THE', 'CLOUD', 'THEN', 'SHOWED', 'HIS', 'GOLDEN', 'HEAD', 'AND', 'HIS', 'BRIGHT', 'FORM', 'EMERGED'] +908-157963-0018-1339: ref=['AND', 'FEAREST', 'THOU', 'BECAUSE', 'I', 'VANISH', 'AND', 'AM', 'SEEN', 'NO', 'MORE'] +908-157963-0018-1339: hyp=['AND', 'FEAREST', 'THOU', 'BECAUSE', 'I', 'VANISH', 'AND', 'AM', 'SEEN', 'NO', 'MORE'] +908-157963-0019-1340: ref=['IT', 'IS', 'TO', 'TENFOLD', 'LIFE', 'TO', 'LOVE', 'TO', 'PEACE', 'AND', 'RAPTURES', 'HOLY', 'UNSEEN', 'DESCENDING', 'WEIGH', 'MY', 'LIGHT', 'WINGS', 'UPON', 'BALMY', 'FLOWERS', 'AND', 'COURT', 'THE', 'FAIR', 'EYED', 'DEW', 'TO', 'TAKE', 'ME', 'TO', 'HER', 'SHINING', 'TENT', 'THE', 'WEEPING', 'VIRGIN', 'TREMBLING', 'KNEELS', 'BEFORE', 'THE', 'RISEN', 'SUN'] +908-157963-0019-1340: hyp=['IT', 'IS', 'TO', 'TENFOLD', 'LIFE', 'TO', 'LOVE', 'TO', 'PEACE', 'AND', 'RAPTURES', 'WHOLLY', 'UNSEEN', 'DESCENDING', 'WEIGH', 'MY', 'LIGHT', 'WINGS', 'UPON', 'BALMY', 'FLOWERS', 'AND', 'COURT', 'THE', 'FAIR', 'EYED', 'DO', 'TO', 'TAKE', 'ME', 'TO', 'HER', 'SHINING', 'TENT', 'THE', 'WEEPING', 'VIRGIN', 'TREMBLING', 'KNEELS', 'BEFORE', 'THE', 'RISEN', 'SUN'] +908-157963-0020-1341: ref=['TILL', 'WE', 'ARISE', "LINK'D", 'IN', 'A', 'GOLDEN', 'BAND', 'AND', 'NEVER', 'PART', 'BUT', 'WALK', 'UNITED', 'BEARING', 'FOOD', 'TO', 'ALL', 'OUR', 'TENDER', 'FLOWERS'] +908-157963-0020-1341: hyp=['TILL', 'WE', 'ARISE', 'LINKED', 'IN', 'A', 'GOLDEN', 'BAND', 'AND', 'NEVER', 'PART', 'BUT', 'WALK', 'UNITED', 'BEARING', 'FOOD', 'TO', 'ALL', 'OUR', 'TENDER', 'FLOWERS'] +908-157963-0021-1342: ref=['LIVES', 'NOT', 'ALONE', 'NOR', 'OR', 'ITSELF', 'FEAR', 'NOT', 'AND', 'I', 'WILL', 'CALL', 'THE', 'WEAK', 'WORM', 'FROM', 'ITS', 'LOWLY', 'BED', 'AND', 'THOU', 'SHALT', 'HEAR', 'ITS', 'VOICE'] +908-157963-0021-1342: hyp=['LIVES', 'NOT', 'ALONE', 'NOR', 'OF', 'ITSELF', 'FEAR', 'NOT', 'AND', 'I', 'WILL', 'CALL', 'THE', 'WEAK', 'WORM', 'FROM', 'ITS', 'LOWLY', 'BED', 'AND', 'THOU', 'SHALT', 'HEAR', 'ITS', 'VOICE'] +908-157963-0022-1343: ref=['COME', 'FORTH', 'WORM', 'AND', 'THE', 'SILENT', 'VALLEY', 'TO', 'THY', 'PENSIVE', 'QUEEN'] +908-157963-0022-1343: hyp=['COME', 'FORTH', 'WORM', 'AND', 'THE', 'SILENT', 'VALLEY', 'TO', 'THY', 'PENSIVE', 'QUEEN'] +908-157963-0023-1344: ref=['THE', 'HELPLESS', 'WORM', 'AROSE', 'AND', 'SAT', 'UPON', 'THE', 'LILLYS', 'LEAF', 'AND', 'THE', 'BRIGHT', 'CLOUD', 'SAILD', 'ON', 'TO', 'FIND', 'HIS', 'PARTNER', 'IN', 'THE', 'VALE'] +908-157963-0023-1344: hyp=['THE', 'HELPLESS', 'WORM', 'AROSE', 'AND', 'SAT', 'UPON', 'THE', "LILY'S", 'LEAF', 'AND', 'THE', 'BRIGHT', 'CLOUDS', 'SAILED', 'ON', 'TO', 'FIND', 'HIS', 'PARTNER', 'IN', 'THE', 'VALE'] +908-157963-0024-1345: ref=['IMAGE', 'OF', 'WEAKNESS', 'ART', 'THOU', 'BUT', 'A', 'WORM'] +908-157963-0024-1345: hyp=['IMAGE', 'OF', 'WEAKNESS', 'ART', 'THOU', 'BUT', 'A', 'WORM'] +908-157963-0025-1346: ref=['I', 'SEE', 'THEY', 'LAY', 'HELPLESS', 'AND', 'NAKED', 'WEEPING', 'AND', 'NONE', 'TO', 'ANSWER', 'NONE', 'TO', 'CHERISH', 'THEE', 'WITH', 'MOTHERS', 'SMILES'] +908-157963-0025-1346: hyp=['I', 'SEE', 'THEY', 'LAY', 'HELPLESS', 'AND', 'NAKED', 'WEEPING', 'AND', 'NONE', 'TO', 'ANSWER', 'NONE', 'TO', 'CHERISH', 'THEE', 'WITH', "MOTHER'S", 'SMILES'] +908-157963-0026-1347: ref=['AND', 'SAYS', 'THOU', 'MOTHER', 'OF', 'MY', 'CHILDREN', 'I', 'HAVE', 'LOVED', 'THEE', 'AND', 'I', 'HAVE', 'GIVEN', 'THEE', 'A', 'CROWN', 'THAT', 'NONE', 'CAN', 'TAKE', 'AWAY'] +908-157963-0026-1347: hyp=['AND', 'SAYS', 'THOU', 'MOTHER', 'OF', 'MY', 'CHILDREN', 'I', 'HAVE', 'LOVED', 'THEE', 'AND', 'I', 'HAVE', 'GIVEN', 'THEE', 'A', 'CROWN', 'THAT', 'NONE', 'CAN', 'TAKE', 'AWAY'] +908-157963-0027-1348: ref=['AND', 'LAY', 'ME', 'DOWN', 'IN', 'THY', 'COLD', 'BED', 'AND', 'LEAVE', 'MY', 'SHINING', 'LOT'] +908-157963-0027-1348: hyp=['AND', 'LAY', 'ME', 'DOWN', 'IN', 'THY', 'COLD', 'BED', 'AND', 'LEAVE', 'MY', 'SHINING', 'LOT'] +908-157963-0028-1349: ref=['OR', 'AN', 'EYE', 'OF', 'GIFTS', 'AND', 'GRACES', 'SHOWRING', 'FRUITS', 'AND', 'COINED', 'GOLD'] +908-157963-0028-1349: hyp=['OR', 'AN', 'EYE', 'OF', 'GIFTS', 'AND', 'GRACES', 'SHOWERING', 'FRUITS', 'AND', 'COINED', 'GOLD'] +908-157963-0029-1350: ref=['WHY', 'A', 'TONGUE', "IMPRESS'D", 'WITH', 'HONEY', 'FROM', 'EVERY', 'WIND'] +908-157963-0029-1350: hyp=['WHY', 'A', 'TONGUE', 'IMPRESSED', 'WITH', 'HONEY', 'FROM', 'EVERY', 'WIND'] +908-157963-0030-1351: ref=['WHY', 'AN', 'EAR', 'A', 'WHIRLPOOL', 'FIERCE', 'TO', 'DRAW', 'CREATIONS', 'IN'] +908-157963-0030-1351: hyp=['WHY', 'AN', 'EAR', 'A', 'WHIRLPOOL', 'FIERCE', 'TO', 'DRAW', 'CREATIONS', 'IN'] +908-31957-0000-1352: ref=['ALL', 'IS', 'SAID', 'WITHOUT', 'A', 'WORD'] +908-31957-0000-1352: hyp=['ALL', 'IS', 'SAID', 'WITHOUT', 'A', 'WORD'] +908-31957-0001-1353: ref=['I', 'SIT', 'BENEATH', 'THY', 'LOOKS', 'AS', 'CHILDREN', 'DO', 'IN', 'THE', 'NOON', 'SUN', 'WITH', 'SOULS', 'THAT', 'TREMBLE', 'THROUGH', 'THEIR', 'HAPPY', 'EYELIDS', 'FROM', 'AN', 'UNAVERRED', 'YET', 'PRODIGAL', 'INWARD', 'JOY'] +908-31957-0001-1353: hyp=['I', 'SIT', 'BENEATH', 'THY', 'LOOKS', 'AS', 'CHILDREN', 'DO', 'IN', 'THE', 'NOON', 'SUN', 'WITH', 'SOULS', 'THAT', 'TREMBLE', 'THROUGH', 'THEIR', 'HAPPY', 'EYELIDS', 'FROM', 'AN', 'UNAVERRED', 'YET', 'CHRONICAL', 'INWARD', 'JOY'] +908-31957-0002-1354: ref=['I', 'DID', 'NOT', 'WRONG', 'MYSELF', 'SO', 'BUT', 'I', 'PLACED', 'A', 'WRONG', 'ON', 'THEE'] +908-31957-0002-1354: hyp=['I', 'DID', 'NOT', 'WRONG', 'MYSELF', 'SO', 'BUT', 'I', 'PLACED', 'A', 'WRONG', 'ON', 'THEE'] +908-31957-0003-1355: ref=['WHEN', 'CALLED', 'BEFORE', 'I', 'TOLD', 'HOW', 'HASTILY', 'I', 'DROPPED', 'MY', 'FLOWERS', 'OR', 'BRAKE', 'OFF', 'FROM', 'A', 'GAME'] +908-31957-0003-1355: hyp=['WHEN', 'CALLED', 'BEFORE', 'I', 'TOLD', 'HOW', 'HASTILY', 'I', 'DROPPED', 'MY', 'FLOWERS', 'OR', 'BREAK', 'OFF', 'FROM', 'A', 'GAME'] +908-31957-0004-1356: ref=['SHALL', 'I', 'NEVER', 'MISS', 'HOME', 'TALK', 'AND', 'BLESSING', 'AND', 'THE', 'COMMON', 'KISS', 'THAT', 'COMES', 'TO', 'EACH', 'IN', 'TURN', 'NOR', 'COUNT', 'IT', 'STRANGE', 'WHEN', 'I', 'LOOK', 'UP', 'TO', 'DROP', 'ON', 'A', 'NEW', 'RANGE', 'OF', 'WALLS', 'AND', 'FLOORS', 'ANOTHER', 'HOME', 'THAN', 'THIS'] +908-31957-0004-1356: hyp=['SHALL', 'I', 'NEVER', 'MISS', 'HOME', 'TALK', 'AND', 'BLESSING', 'AND', 'THE', 'COMMON', 'KISS', 'THAT', 'COMES', 'TO', 'EACH', 'IN', 'TURN', 'NOR', 'COUNT', 'IT', 'STRANGE', 'WHEN', 'I', 'LOOK', 'UP', 'TO', 'DROP', 'ON', 'A', 'NEW', 'RANGE', 'OF', 'WALLS', 'AND', 'FLOORS', 'ANOTHER', 'HOME', 'THAN', 'THIS'] +908-31957-0005-1357: ref=['ALAS', 'I', 'HAVE', 'GRIEVED', 'SO', 'I', 'AM', 'HARD', 'TO', 'LOVE'] +908-31957-0005-1357: hyp=['ALAS', 'I', 'HAVE', 'GRIEVED', 'SO', 'I', 'AM', 'HARD', 'TO', 'LOVE'] +908-31957-0006-1358: ref=['OPEN', 'THY', 'HEART', 'WIDE', 'AND', 'FOLD', 'WITHIN', 'THE', 'WET', 'WINGS', 'OF', 'THY', 'DOVE'] +908-31957-0006-1358: hyp=['OPEN', 'THY', 'HEART', 'WIDE', 'AND', 'FOLD', 'WITHIN', 'THE', 'WET', 'WINGS', 'OF', 'THY', 'DOVE'] +908-31957-0007-1359: ref=['COULD', 'IT', 'MEAN', 'TO', 'LAST', 'A', 'LOVE', 'SET', 'PENDULOUS', 'BETWEEN', 'SORROW', 'AND', 'SORROW'] +908-31957-0007-1359: hyp=['COULD', 'IT', 'MEAN', 'TO', 'LAST', 'A', 'LOVE', 'SET', 'PENDULOUS', 'BETWEEN', 'SORROW', 'AND', 'SORROW'] +908-31957-0008-1360: ref=['NAY', 'I', 'RATHER', 'THRILLED', 'DISTRUSTING', 'EVERY', 'LIGHT', 'THAT', 'SEEMED', 'TO', 'GILD', 'THE', 'ONWARD', 'PATH', 'AND', 'FEARED', 'TO', 'OVERLEAN', 'A', 'FINGER', 'EVEN'] +908-31957-0008-1360: hyp=['NAY', 'I', 'RATHER', 'THRILLED', 'DISTRUSTING', 'EVERY', 'LIGHT', 'THAT', 'SEEMED', 'TO', 'GILD', 'THE', 'ONWARD', 'PATH', 'AND', 'FEAR', 'TO', 'OVERLEAN', 'A', 'FINGER', 'EVEN'] +908-31957-0009-1361: ref=['AND', 'THOUGH', 'I', 'HAVE', 'GROWN', 'SERENE', 'AND', 'STRONG', 'SINCE', 'THEN', 'I', 'THINK', 'THAT', 'GOD', 'HAS', 'WILLED', 'A', 'STILL', 'RENEWABLE', 'FEAR'] +908-31957-0009-1361: hyp=['AND', 'THOUGH', 'I', 'HAVE', 'GROWN', 'SERENE', 'AND', 'STRONG', 'SINCE', 'THEN', 'I', 'THINK', 'THAT', 'GOD', 'HAS', 'WILLED', 'A', 'STILL', 'RENEWABLE', 'FEAR'] +908-31957-0010-1362: ref=['O', 'LOVE', 'O', 'TROTH'] +908-31957-0010-1362: hyp=['O', 'LOVE', 'O', 'TROTH'] +908-31957-0011-1363: ref=['AND', 'LOVE', 'BE', 'FALSE'] +908-31957-0011-1363: hyp=['AND', 'LOVE', 'BE', 'FALSE'] +908-31957-0012-1364: ref=['IF', 'HE', 'TO', 'KEEP', 'ONE', 'OATH', 'MUST', 'LOSE', 'ONE', 'JOY', 'BY', 'HIS', "LIFE'S", 'STAR', 'FORETOLD'] +908-31957-0012-1364: hyp=['IF', 'HE', 'TO', 'KEEP', 'ONE', 'OATH', 'MUST', 'LOSE', 'ONE', 'JOY', 'BY', 'HIS', "LIFE'S", 'STAR', 'FORETOLD'] +908-31957-0013-1365: ref=['SLOW', 'TO', 'WORLD', 'GREETINGS', 'QUICK', 'WITH', 'ITS', 'O', 'LIST', 'WHEN', 'THE', 'ANGELS', 'SPEAK'] +908-31957-0013-1365: hyp=['SLOW', 'TO', 'WORLD', 'GREETINGS', 'QUICK', 'WITH', 'ITS', 'O', 'LIST', 'WHEN', 'THE', 'ANGEL', 'SPEAK'] +908-31957-0014-1366: ref=['A', 'RING', 'OF', 'AMETHYST', 'I', 'COULD', 'NOT', 'WEAR', 'HERE', 'PLAINER', 'TO', 'MY', 'SIGHT', 'THAN', 'THAT', 'FIRST', 'KISS'] +908-31957-0014-1366: hyp=['A', 'RING', 'OF', 'AMETHYST', 'I', 'COULD', 'NOT', 'WEAR', 'HERE', 'PLAINER', 'TO', 'MY', 'SIGHT', 'THAN', 'THAT', 'FIRST', 'KISS'] +908-31957-0015-1367: ref=['THAT', 'WAS', 'THE', 'CHRISM', 'OF', 'LOVE', 'WHICH', "LOVE'S", 'OWN', 'CROWN', 'WITH', 'SANCTIFYING', 'SWEETNESS', 'DID', 'PRECEDE', 'THE', 'THIRD', 'UPON', 'MY', 'LIPS', 'WAS', 'FOLDED', 'DOWN', 'IN', 'PERFECT', 'PURPLE', 'STATE', 'SINCE', 'WHEN', 'INDEED', 'I', 'HAVE', 'BEEN', 'PROUD', 'AND', 'SAID', 'MY', 'LOVE', 'MY', 'OWN'] +908-31957-0015-1367: hyp=['THAT', 'WAS', 'THE', 'CHRISM', 'OF', 'LOVE', 'WHICH', 'LOVES', 'OWN', 'CROWN', 'WITH', 'SANCTIFYING', 'SWEETNESS', 'DID', 'PROCEED', 'THE', 'THIRD', 'UPON', 'MY', 'LIPS', 'WAS', 'FOLDED', 'DOWN', 'IMPERFECT', 'PURPLE', 'STATE', 'SINCE', 'WHEN', 'INDEED', 'I', 'HAVE', 'BEEN', 'PROUD', 'AND', 'SAID', 'MY', 'LOVE', 'MY', 'OWN'] +908-31957-0016-1368: ref=['DEAREST', 'TEACH', 'ME', 'SO', 'TO', 'POUR', 'OUT', 'GRATITUDE', 'AS', 'THOU', 'DOST', 'GOOD'] +908-31957-0016-1368: hyp=['DEAREST', 'TEACH', 'ME', 'SO', 'TO', 'POUR', 'OUT', 'GRATITUDE', 'AS', 'THOU', 'DOST', 'GOOD'] +908-31957-0017-1369: ref=['MUSSULMANS', 'AND', 'GIAOURS', 'THROW', 'KERCHIEFS', 'AT', 'A', 'SMILE', 'AND', 'HAVE', 'NO', 'RUTH', 'FOR', 'ANY', 'WEEPING'] +908-31957-0017-1369: hyp=['MUSSULMANS', 'AND', 'GUY', 'ORS', 'THROW', 'KERCHIEFS', 'AT', 'A', 'SMILE', 'AND', 'HAVE', 'NO', 'RUTH', 'FOR', 'ANY', 'WEEPING'] +908-31957-0018-1370: ref=['BUT', 'THOU', 'ART', 'NOT', 'SUCH', 'A', 'LOVER', 'MY', 'BELOVED'] +908-31957-0018-1370: hyp=['BUT', 'THOU', 'ART', 'NOT', 'SUCH', 'A', 'LOVER', 'MY', 'BELOVED'] +908-31957-0019-1371: ref=['THOU', 'CANST', 'WAIT', 'THROUGH', 'SORROW', 'AND', 'SICKNESS', 'TO', 'BRING', 'SOULS', 'TO', 'TOUCH', 'AND', 'THINK', 'IT', 'SOON', 'WHEN', 'OTHERS', 'CRY', 'TOO', 'LATE'] +908-31957-0019-1371: hyp=['THOU', 'CANST', 'WAIT', 'THROUGH', 'SORROW', 'AND', 'SICKNESS', 'TO', 'BRING', 'SOULS', 'TO', 'TOUCH', 'AND', 'THINK', 'IT', 'SOON', 'WHEN', 'OTHERS', 'CRY', 'TOO', 'LATE'] +908-31957-0020-1372: ref=['I', 'THANK', 'ALL', 'WHO', 'HAVE', 'LOVED', 'ME', 'IN', 'THEIR', 'HEARTS', 'WITH', 'THANKS', 'AND', 'LOVE', 'FROM', 'MINE'] +908-31957-0020-1372: hyp=['I', 'THINK', 'ALL', 'WHO', 'HAVE', 'LOVED', 'ME', 'IN', 'THEIR', 'HEARTS', 'WITH', 'THANKS', 'AND', 'LOVE', 'FROM', 'MINE'] +908-31957-0021-1373: ref=['OH', 'TO', 'SHOOT', 'MY', "SOUL'S", 'FULL', 'MEANING', 'INTO', 'FUTURE', 'YEARS', 'THAT', 'THEY', 'SHOULD', 'LEND', 'IT', 'UTTERANCE', 'AND', 'SALUTE', 'LOVE', 'THAT', 'ENDURES', 'FROM', 'LIFE', 'THAT', 'DISAPPEARS'] +908-31957-0021-1373: hyp=['OH', 'TO', 'SHOOT', 'MY', "SOUL'S", 'FULL', 'MEANING', 'INTO', 'FUTURE', 'YEARS', 'THAT', 'THEY', 'SHOULD', 'LEND', 'IT', 'UTTERANCE', 'AND', 'SALUTE', 'LOVE', 'THAT', 'ENDURES', 'FROM', 'LIFE', 'THAT', 'DISAPPEARS'] +908-31957-0022-1374: ref=['THEN', 'I', 'LONG', 'TRIED', 'BY', 'NATURAL', 'ILLS', 'RECEIVED', 'THE', 'COMFORT', 'FAST', 'WHILE', 'BUDDING', 'AT', 'THY', 'SIGHT', 'MY', "PILGRIM'S", 'STAFF', 'GAVE', 'OUT', 'GREEN', 'LEAVES', 'WITH', 'MORNING', 'DEWS', 'IMPEARLED'] +908-31957-0022-1374: hyp=['THEN', 'I', 'LONG', 'TRIED', 'BY', 'NATURAL', 'ILLS', 'RECEIVED', 'THE', 'COMFORT', 'FAST', 'WHILE', 'BUDDING', 'AT', 'THY', 'SIGHT', 'MY', "PILGRIM'S", 'STAFF', 'GAVE', 'OUT', 'GREEN', 'LEAVES', 'WITH', 'MORNING', 'DEWS', 'IMPELLED'] +908-31957-0023-1375: ref=['I', 'LOVE', 'THEE', 'FREELY', 'AS', 'MEN', 'STRIVE', 'FOR', 'RIGHT', 'I', 'LOVE', 'THEE', 'PURELY', 'AS', 'THEY', 'TURN', 'FROM', 'PRAISE'] +908-31957-0023-1375: hyp=['I', 'LOVE', 'THEE', 'FREELY', 'AS', 'MEN', 'STRIVE', 'FOR', 'RIGHT', 'I', 'LOVE', 'THEE', 'PURELY', 'AS', 'THEY', 'TURN', 'FROM', 'PRAISE'] +908-31957-0024-1376: ref=['I', 'LOVE', 'THEE', 'WITH', 'THE', 'PASSION', 'PUT', 'TO', 'USE', 'IN', 'MY', 'OLD', 'GRIEFS', 'AND', 'WITH', 'MY', "CHILDHOOD'S", 'FAITH'] +908-31957-0024-1376: hyp=['I', 'LOVE', 'THEE', 'WITH', 'THE', 'PASSION', 'PUT', 'TO', 'USE', 'IN', 'MY', 'OLD', 'GREEDS', 'AND', 'WITH', 'MY', "CHILDHOOD'S", 'FAITH'] +908-31957-0025-1377: ref=['I', 'LOVE', 'THEE', 'WITH', 'A', 'LOVE', 'I', 'SEEMED', 'TO', 'LOSE', 'WITH', 'MY', 'LOST', 'SAINTS', 'I', 'LOVE', 'THEE', 'WITH', 'THE', 'BREATH', 'SMILES', 'TEARS', 'OF', 'ALL', 'MY', 'LIFE', 'AND', 'IF', 'GOD', 'CHOOSE', 'I', 'SHALL', 'BUT', 'LOVE', 'THEE', 'BETTER', 'AFTER', 'DEATH'] +908-31957-0025-1377: hyp=['I', 'LOVE', 'THEE', 'WITH', 'A', 'LOVE', 'I', 'SEEMED', 'TO', 'LOSE', 'WITH', 'MY', 'LOST', 'SAINTS', 'I', 'LOVE', 'THEE', 'WITH', 'THE', 'BREATH', 'SMILES', 'TEARS', 'OF', 'ALL', 'MY', 'LIFE', 'AND', 'IF', 'GOD', 'CHOOSE', 'I', 'SHALL', 'BUT', 'LOVE', 'THEE', 'BETTER', 'AFTER', 'DEATH'] diff --git a/log/fast_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt b/log/fast_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e7bca59bb601a645d5d9b4b7f373e6afdd33a86 --- /dev/null +++ b/log/fast_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt @@ -0,0 +1,5878 @@ +1688-142285-0000-1948: ref=["THERE'S", 'IRON', 'THEY', 'SAY', 'IN', 'ALL', 'OUR', 'BLOOD', 'AND', 'A', 'GRAIN', 'OR', 'TWO', 'PERHAPS', 'IS', 'GOOD', 'BUT', 'HIS', 'HE', 'MAKES', 'ME', 'HARSHLY', 'FEEL', 'HAS', 'GOT', 'A', 'LITTLE', 'TOO', 'MUCH', 'OF', 'STEEL', 'ANON'] +1688-142285-0000-1948: hyp=["THERE'S", 'IRON', 'THEY', 'SAY', 'IN', 'ALL', 'OUR', 'BLOOD', 'AND', 'A', 'GRAIN', 'OR', 'TWO', 'PERHAPS', 'IS', 'GOOD', 'BUT', 'HIS', 'HE', 'MAKES', 'ME', 'HARSHLY', 'FEEL', 'HAS', 'GOT', 'A', 'LITTLE', 'TOO', 'MUCH', 'OF', 'STEEL', 'ANON'] +1688-142285-0001-1949: ref=['MARGARET', 'SAID', 'MISTER', 'HALE', 'AS', 'HE', 'RETURNED', 'FROM', 'SHOWING', 'HIS', 'GUEST', 'DOWNSTAIRS', 'I', 'COULD', 'NOT', 'HELP', 'WATCHING', 'YOUR', 'FACE', 'WITH', 'SOME', 'ANXIETY', 'WHEN', 'MISTER', 'THORNTON', 'MADE', 'HIS', 'CONFESSION', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY'] +1688-142285-0001-1949: hyp=['MARGARET', 'SAID', 'MISTER', 'HALE', 'AS', 'HE', 'RETURNED', 'FROM', 'SHOWING', 'HIS', 'GUESTS', 'DOWNSTAIRS', 'I', 'COULD', 'NOT', 'HELP', 'WATCHING', 'YOUR', 'FACE', 'WITH', 'SOME', 'ANXIETY', 'WHEN', 'MISTER', 'THORNTON', 'MADE', 'HIS', 'CONFESSION', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY'] +1688-142285-0002-1950: ref=['YOU', "DON'T", 'MEAN', 'THAT', 'YOU', 'THOUGHT', 'ME', 'SO', 'SILLY'] +1688-142285-0002-1950: hyp=['YOU', "DON'T", 'MEAN', 'THAT', 'YOU', 'THOUGHT', 'ME', 'SO', 'SILLY'] +1688-142285-0003-1951: ref=['I', 'REALLY', 'LIKED', 'THAT', 'ACCOUNT', 'OF', 'HIMSELF', 'BETTER', 'THAN', 'ANYTHING', 'ELSE', 'HE', 'SAID'] +1688-142285-0003-1951: hyp=['I', 'REALLY', 'LIKE', 'THAT', 'ACCOUNT', 'OF', 'HIMSELF', 'BETTER', 'THAN', 'ANYTHING', 'ELSE', 'HE', 'SAID'] +1688-142285-0004-1952: ref=['HIS', 'STATEMENT', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY', 'WAS', 'THE', 'THING', 'I', 'LIKED', 'BEST', 'OF', 'ALL'] +1688-142285-0004-1952: hyp=['HIS', 'STATEMENT', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY', 'WAS', 'THE', 'THING', 'I', 'LIKE', 'BEST', 'OF', 'ALL'] +1688-142285-0005-1953: ref=['YOU', 'WHO', 'WERE', 'ALWAYS', 'ACCUSING', 'PEOPLE', 'OF', 'BEING', 'SHOPPY', 'AT', 'HELSTONE'] +1688-142285-0005-1953: hyp=['YOU', 'WHO', 'WERE', 'ALWAYS', 'ACCUSING', 'PEOPLE', 'HAVE', 'BEEN', 'SHOPPY', 'AT', 'HELSTONE'] +1688-142285-0006-1954: ref=['I', "DON'T", 'THINK', 'MISTER', 'HALE', 'YOU', 'HAVE', 'DONE', 'QUITE', 'RIGHT', 'IN', 'INTRODUCING', 'SUCH', 'A', 'PERSON', 'TO', 'US', 'WITHOUT', 'TELLING', 'US', 'WHAT', 'HE', 'HAD', 'BEEN'] +1688-142285-0006-1954: hyp=['I', "DON'T", 'THINK', 'MISTER', 'HALE', 'YOU', 'HAVE', 'DONE', 'QUITE', 'RIGHT', 'INTRODUCING', 'SUCH', 'A', 'PERSON', 'TO', 'US', 'WITHOUT', 'TELLING', 'US', 'WHAT', 'HE', 'HAD', 'BEEN'] +1688-142285-0007-1955: ref=['I', 'REALLY', 'WAS', 'VERY', 'MUCH', 'AFRAID', 'OF', 'SHOWING', 'HIM', 'HOW', 'MUCH', 'SHOCKED', 'I', 'WAS', 'AT', 'SOME', 'PARTS', 'OF', 'WHAT', 'HE', 'SAID'] +1688-142285-0007-1955: hyp=['I', 'REALLY', 'WAS', 'VERY', 'MUCH', 'AFRAID', 'OF', 'SHOWING', 'HIM', 'HOW', 'MUCH', 'SHOCKED', 'I', 'WAS', 'AT', 'SOME', 'PART', 'OF', 'WHAT', 'HE', 'SAID'] +1688-142285-0008-1956: ref=['HIS', 'FATHER', 'DYING', 'IN', 'MISERABLE', 'CIRCUMSTANCES'] +1688-142285-0008-1956: hyp=['HIS', 'FATHER', 'DYING', 'IN', 'MISERABLE', 'CIRCUMSTANCES'] +1688-142285-0009-1957: ref=['WHY', 'IT', 'MIGHT', 'HAVE', 'BEEN', 'IN', 'THE', 'WORKHOUSE'] +1688-142285-0009-1957: hyp=['WHY', 'IT', 'MIGHT', 'HAVE', 'BEEN', 'IN', 'THE', 'WORKHOUSE'] +1688-142285-0010-1958: ref=['HIS', 'FATHER', 'SPECULATED', 'WILDLY', 'FAILED', 'AND', 'THEN', 'KILLED', 'HIMSELF', 'BECAUSE', 'HE', 'COULD', 'NOT', 'BEAR', 'THE', 'DISGRACE'] +1688-142285-0010-1958: hyp=['HIS', 'FATHER', 'SPECULATED', 'WILDLY', 'FAILED', 'AND', 'THEN', 'KILLED', 'HIMSELF', 'BECAUSE', 'HE', 'COULD', 'NOT', 'BEAR', 'THE', 'DISGRACE'] +1688-142285-0011-1959: ref=['ALL', 'HIS', 'FORMER', 'FRIENDS', 'SHRUNK', 'FROM', 'THE', 'DISCLOSURES', 'THAT', 'HAD', 'TO', 'BE', 'MADE', 'OF', 'HIS', 'DISHONEST', 'GAMBLING', 'WILD', 'HOPELESS', 'STRUGGLES', 'MADE', 'WITH', 'OTHER', "PEOPLE'S", 'MONEY', 'TO', 'REGAIN', 'HIS', 'OWN', 'MODERATE', 'PORTION', 'OF', 'WEALTH'] +1688-142285-0011-1959: hyp=['ALL', 'HIS', 'FORMER', 'FRIENDS', 'SHRUNK', 'FROM', 'THE', 'DISCLOSURES', 'THAT', 'HAD', 'TO', 'BE', 'MADE', 'OF', 'HIS', 'DISHONEST', 'GAMBLING', 'WILD', 'HOPELESS', 'STRUGGLES', 'MADE', 'WITH', 'OTHER', "PEOPLE'S", 'MONEY', 'TO', 'REGAIN', 'HIS', 'OWN', 'MODERATE', 'PORTION', 'OF', 'WEALTH'] +1688-142285-0012-1960: ref=['NO', 'ONE', 'CAME', 'FORWARDS', 'TO', 'HELP', 'THE', 'MOTHER', 'AND', 'THIS', 'BOY'] +1688-142285-0012-1960: hyp=['NO', 'ONE', 'CAME', 'FORWARDS', 'TO', 'HELP', 'THE', 'MOTHER', 'AND', 'THIS', 'BOY'] +1688-142285-0013-1961: ref=['AT', 'LEAST', 'NO', 'FRIEND', 'CAME', 'FORWARDS', 'IMMEDIATELY', 'AND', 'MISSUS', 'THORNTON', 'IS', 'NOT', 'ONE', 'I', 'FANCY', 'TO', 'WAIT', 'TILL', 'TARDY', 'KINDNESS', 'COMES', 'TO', 'FIND', 'HER', 'OUT'] +1688-142285-0013-1961: hyp=['AT', 'LEAST', 'NO', 'FRIEND', 'CAME', 'FORWARDS', 'IMMEDIATELY', 'AND', 'MISTER', 'THORNTON', 'IS', 'NOT', 'ONE', 'I', 'FANCY', 'TO', 'WAIT', 'TILL', 'TIDY', 'KINDNESS', 'COMES', 'TO', 'FIND', 'HER', 'OUT'] +1688-142285-0014-1962: ref=['SO', 'THEY', 'LEFT', 'MILTON'] +1688-142285-0014-1962: hyp=['SO', 'THEY', 'LEFT', 'MILTON'] +1688-142285-0015-1963: ref=['HOW', 'TAINTED', 'ASKED', 'HER', 'FATHER'] +1688-142285-0015-1963: hyp=['HOW', 'TAINTED', 'ASKED', 'HER', 'FATHER'] +1688-142285-0016-1964: ref=['OH', 'PAPA', 'BY', 'THAT', 'TESTING', 'EVERYTHING', 'BY', 'THE', 'STANDARD', 'OF', 'WEALTH'] +1688-142285-0016-1964: hyp=['O', 'PAPA', 'BY', 'THAT', 'TESTING', 'EVERYTHING', 'BY', 'THE', 'STANDARD', 'OF', 'WEALTH'] +1688-142285-0017-1965: ref=['WHEN', 'HE', 'SPOKE', 'OF', 'THE', 'MECHANICAL', 'POWERS', 'HE', 'EVIDENTLY', 'LOOKED', 'UPON', 'THEM', 'ONLY', 'AS', 'NEW', 'WAYS', 'OF', 'EXTENDING', 'TRADE', 'AND', 'MAKING', 'MONEY'] +1688-142285-0017-1965: hyp=['WHEN', 'HE', 'SPOKE', 'OF', 'THE', 'MECHANICAL', 'POWERS', 'HE', 'EVIDENTLY', 'LOOKED', 'UPON', 'THEM', 'ONLY', 'AS', 'NEW', 'WAYS', 'OF', 'EXTENDING', 'TRADE', 'AND', 'MAKING', 'MONEY'] +1688-142285-0018-1966: ref=['AND', 'THE', 'POOR', 'MEN', 'AROUND', 'HIM', 'THEY', 'WERE', 'POOR', 'BECAUSE', 'THEY', 'WERE', 'VICIOUS', 'OUT', 'OF', 'THE', 'PALE', 'OF', 'HIS', 'SYMPATHIES', 'BECAUSE', 'THEY', 'HAD', 'NOT', 'HIS', 'IRON', 'NATURE', 'AND', 'THE', 'CAPABILITIES', 'THAT', 'IT', 'GIVES', 'HIM', 'FOR', 'BEING', 'RICH'] +1688-142285-0018-1966: hyp=['AND', 'THE', 'POOR', 'MEN', 'AROUND', 'HIM', 'THERE', 'WERE', 'POOR', 'BECAUSE', 'THEY', 'WERE', 'VICIOUS', 'OUT', 'OF', 'THE', 'PALE', 'OF', 'HIS', 'SYMPATHIES', 'BECAUSE', 'THEY', 'HAD', 'NOT', 'HIS', 'IRON', 'NATURE', 'AND', 'THE', 'CAPABILITIES', 'THAT', 'IT', 'GIVES', 'HIM', 'FOR', 'BEING', 'RICH'] +1688-142285-0019-1967: ref=['NOT', 'VICIOUS', 'HE', 'NEVER', 'SAID', 'THAT'] +1688-142285-0019-1967: hyp=['NOT', 'VICIOUS', 'HE', 'NEVER', 'SAID', 'THAT'] +1688-142285-0020-1968: ref=['IMPROVIDENT', 'AND', 'SELF', 'INDULGENT', 'WERE', 'HIS', 'WORDS'] +1688-142285-0020-1968: hyp=['IMPROVIDENT', 'AND', 'SELF', 'INDULGENT', 'WERE', 'HIS', 'WORDS'] +1688-142285-0021-1969: ref=['MARGARET', 'WAS', 'COLLECTING', 'HER', "MOTHER'S", 'WORKING', 'MATERIALS', 'AND', 'PREPARING', 'TO', 'GO', 'TO', 'BED'] +1688-142285-0021-1969: hyp=['MARGARET', 'WAS', 'COLLECTING', 'HER', "MOTHER'S", 'WORKING', 'MATERIALS', 'AND', 'PREPARING', 'TO', 'GO', 'TO', 'BED'] +1688-142285-0022-1970: ref=['JUST', 'AS', 'SHE', 'WAS', 'LEAVING', 'THE', 'ROOM', 'SHE', 'HESITATED', 'SHE', 'WAS', 'INCLINED', 'TO', 'MAKE', 'AN', 'ACKNOWLEDGMENT', 'WHICH', 'SHE', 'THOUGHT', 'WOULD', 'PLEASE', 'HER', 'FATHER', 'BUT', 'WHICH', 'TO', 'BE', 'FULL', 'AND', 'TRUE', 'MUST', 'INCLUDE', 'A', 'LITTLE', 'ANNOYANCE'] +1688-142285-0022-1970: hyp=['JUST', 'AS', 'SHE', 'WAS', 'LEAVING', 'THE', 'ROOM', 'SHE', 'HESITATED', 'SHE', 'WAS', 'INCLINED', 'TO', 'MAKE', 'AN', 'ACKNOWLEDGMENT', 'WHICH', 'SHE', 'THOUGHT', 'WOULD', 'PLEASE', 'HER', 'FATHER', 'BUT', 'WHICH', 'TO', 'BE', 'FULL', 'AND', 'TRUE', 'MUST', 'INCLUDE', 'A', 'LITTLE', 'ANNOYANCE'] +1688-142285-0023-1971: ref=['HOWEVER', 'OUT', 'IT', 'CAME'] +1688-142285-0023-1971: hyp=['HOWEVER', 'OUT', 'IT', 'CAME'] +1688-142285-0024-1972: ref=['PAPA', 'I', 'DO', 'THINK', 'MISTER', 'THORNTON', 'A', 'VERY', 'REMARKABLE', 'MAN', 'BUT', 'PERSONALLY', 'I', "DON'T", 'LIKE', 'HIM', 'AT', 'ALL'] +1688-142285-0024-1972: hyp=['PAPA', 'I', 'DO', 'THINK', 'MISTER', 'THORNTON', 'A', 'VERY', 'REMARKABLE', 'MAN', 'BUT', 'PERSONALLY', 'I', "DON'T", 'LIKE', 'HIM', 'AT', 'ALL'] +1688-142285-0025-1973: ref=['AND', 'I', 'DO', 'SAID', 'HER', 'FATHER', 'LAUGHING'] +1688-142285-0025-1973: hyp=['AND', 'I', 'DO', 'SAID', 'HER', 'FATHER', 'LAUGHING'] +1688-142285-0026-1974: ref=['PERSONALLY', 'AS', 'YOU', 'CALL', 'IT', 'AND', 'ALL'] +1688-142285-0026-1974: hyp=['PERSONALLY', 'AS', 'YOU', 'CALL', 'IT', 'AND', 'ALL'] +1688-142285-0027-1975: ref=['I', "DON'T", 'SET', 'HIM', 'UP', 'FOR', 'A', 'HERO', 'OR', 'ANYTHING', 'OF', 'THAT', 'KIND'] +1688-142285-0027-1975: hyp=['I', "DON'T", 'SET', 'HIM', 'UP', 'FOR', 'A', 'HERO', 'OR', 'ANYTHING', 'OF', 'THAT', 'KIND'] +1688-142285-0028-1976: ref=['BUT', 'GOOD', 'NIGHT', 'CHILD'] +1688-142285-0028-1976: hyp=['BUT', 'GOOD', 'NIGHT', 'CHILD'] +1688-142285-0029-1977: ref=['THERE', 'WERE', 'SEVERAL', 'OTHER', 'SIGNS', 'OF', 'SOMETHING', 'WRONG', 'ABOUT', 'MISSUS', 'HALE'] +1688-142285-0029-1977: hyp=['THERE', 'WERE', 'SEVERAL', 'OTHER', 'SIGNS', 'OF', 'SOMETHING', 'WRONG', 'ABOUT', 'MISSUS', 'HALE'] +1688-142285-0030-1978: ref=['SHE', 'AND', 'DIXON', 'HELD', 'MYSTERIOUS', 'CONSULTATIONS', 'IN', 'HER', 'BEDROOM', 'FROM', 'WHICH', 'DIXON', 'WOULD', 'COME', 'OUT', 'CRYING', 'AND', 'CROSS', 'AS', 'WAS', 'HER', 'CUSTOM', 'WHEN', 'ANY', 'DISTRESS', 'OF', 'HER', 'MISTRESS', 'CALLED', 'UPON', 'HER', 'SYMPATHY'] +1688-142285-0030-1978: hyp=['SHE', 'AND', 'DIXON', 'HELD', 'MYSTERIOUS', 'CONSULTATIONS', 'IN', 'HER', 'BEDROOM', 'FROM', 'WHICH', 'DIXON', 'WOULD', 'COME', 'OUT', 'CRYING', 'AND', 'CROSS', 'AS', 'WAS', 'A', 'CUSTOM', 'WHEN', 'ANY', 'DISTRESS', 'OF', 'HER', 'MISTRESS', 'CALLED', 'UPON', 'HER', 'SYMPATHY'] +1688-142285-0031-1979: ref=['ONCE', 'MARGARET', 'HAD', 'GONE', 'INTO', 'THE', 'CHAMBER', 'SOON', 'AFTER', 'DIXON', 'LEFT', 'IT', 'AND', 'FOUND', 'HER', 'MOTHER', 'ON', 'HER', 'KNEES', 'AND', 'AS', 'MARGARET', 'STOLE', 'OUT', 'SHE', 'CAUGHT', 'A', 'FEW', 'WORDS', 'WHICH', 'WERE', 'EVIDENTLY', 'A', 'PRAYER', 'FOR', 'STRENGTH', 'AND', 'PATIENCE', 'TO', 'ENDURE', 'SEVERE', 'BODILY', 'SUFFERING'] +1688-142285-0031-1979: hyp=['ONCE', 'MARGARET', 'HAD', 'GONE', 'INTO', 'THE', 'CHAMBER', 'SOON', 'AFTER', 'DIXON', 'LIFTED', 'AND', 'FOUND', 'HER', 'MOTHER', 'ON', 'HER', 'KNEES', 'AND', 'AS', 'MARGARET', 'STOLE', 'OUT', 'SHE', 'CAUGHT', 'A', 'FEW', 'WORDS', 'WHICH', 'WERE', 'EVIDENTLY', 'A', 'PRAYER', 'FOR', 'STRENGTH', 'AND', 'PATIENCE', 'TO', 'INDURE', 'SEVERE', 'BODILY', 'SUFFERING'] +1688-142285-0032-1980: ref=['BUT', 'THOUGH', 'SHE', 'RECEIVED', 'CARESSES', 'AND', 'FOND', 'WORDS', 'BACK', 'AGAIN', 'IN', 'SUCH', 'PROFUSION', 'AS', 'WOULD', 'HAVE', 'GLADDENED', 'HER', 'FORMERLY', 'YET', 'SHE', 'FELT', 'THAT', 'THERE', 'WAS', 'A', 'SECRET', 'WITHHELD', 'FROM', 'HER', 'AND', 'SHE', 'BELIEVED', 'IT', 'BORE', 'SERIOUS', 'REFERENCE', 'TO', 'HER', "MOTHER'S", 'HEALTH'] +1688-142285-0032-1980: hyp=['BUT', 'THOUGH', 'SHE', 'RECEIVED', 'CARESSES', 'AND', 'FOND', 'WORDS', 'BACK', 'AGAIN', 'IN', 'SUCH', 'PROFUSION', 'AS', 'WOULD', 'HAVE', 'GLADDENED', 'HER', 'FORMERLY', 'YET', 'SHE', 'FELT', 'THAT', 'THERE', 'WAS', 'A', 'SECRET', 'WITHHELD', 'FROM', 'HER', 'AND', 'SHE', 'BELIEVED', 'IT', 'BORE', 'SERIOUS', 'REFERENCE', 'TO', 'HER', "MOTHER'S", 'HEALTH'] +1688-142285-0033-1981: ref=['SHE', 'LAY', 'AWAKE', 'VERY', 'LONG', 'THIS', 'NIGHT', 'PLANNING', 'HOW', 'TO', 'LESSEN', 'THE', 'EVIL', 'INFLUENCE', 'OF', 'THEIR', 'MILTON', 'LIFE', 'ON', 'HER', 'MOTHER'] +1688-142285-0033-1981: hyp=['SHE', 'LAY', 'AWAKE', 'VERY', 'LONG', 'THIS', 'NIGHT', 'PLANNING', 'HOW', 'TO', 'LESSEN', 'THE', 'EVIL', 'INFLUENCE', 'OF', 'THEIR', 'MILTON', 'LIFE', 'ON', 'HER', 'MOTHER'] +1688-142285-0034-1982: ref=['A', 'SERVANT', 'TO', 'GIVE', 'DIXON', 'PERMANENT', 'ASSISTANCE', 'SHOULD', 'BE', 'GOT', 'IF', 'SHE', 'GAVE', 'UP', 'HER', 'WHOLE', 'TIME', 'TO', 'THE', 'SEARCH', 'AND', 'THEN', 'AT', 'ANY', 'RATE', 'HER', 'MOTHER', 'MIGHT', 'HAVE', 'ALL', 'THE', 'PERSONAL', 'ATTENTION', 'SHE', 'REQUIRED', 'AND', 'HAD', 'BEEN', 'ACCUSTOMED', 'TO', 'HER', 'WHOLE', 'LIFE'] +1688-142285-0034-1982: hyp=['A', 'SERVANT', 'GIVE', 'DIXON', 'PERMANENT', 'ASSISTANCE', 'SHOULD', 'BE', 'GOT', 'IF', 'SHE', 'GAVE', 'UP', 'THE', 'WHOLE', 'TIME', 'TO', 'THE', 'SEARCH', 'AND', 'THEN', 'AT', 'ANY', 'RATE', 'HER', 'MOTHER', 'MIGHT', 'HAVE', 'ALL', 'THE', 'PERSONAL', 'ATTENTIONS', 'SHE', 'REQUIRED', 'AND', 'HAD', 'BEEN', 'ACCUSTOMED', 'TO', 'HER', 'WHOLE', 'LIFE'] +1688-142285-0035-1983: ref=['VISITING', 'REGISTER', 'OFFICES', 'SEEING', 'ALL', 'MANNER', 'OF', 'UNLIKELY', 'PEOPLE', 'AND', 'VERY', 'FEW', 'IN', 'THE', 'LEAST', 'LIKELY', 'ABSORBED', "MARGARET'S", 'TIME', 'AND', 'THOUGHTS', 'FOR', 'SEVERAL', 'DAYS'] +1688-142285-0035-1983: hyp=['VISITING', 'REGISTER', 'OFFICERS', 'SEEING', 'ALL', 'MANNER', 'OF', 'UNLIKELY', 'PEOPLE', 'AND', 'VERY', 'FEW', 'IN', 'THE', 'LEAST', 'LIKELY', 'ABSORBED', "MARGARET'S", 'TIME', 'AND', 'THOUGHTS', 'FOR', 'SEVERAL', 'DAYS'] +1688-142285-0036-1984: ref=['ONE', 'AFTERNOON', 'SHE', 'MET', 'BESSY', 'HIGGINS', 'IN', 'THE', 'STREET', 'AND', 'STOPPED', 'TO', 'SPEAK', 'TO', 'HER'] +1688-142285-0036-1984: hyp=['ONE', 'AFTERNOON', 'SHE', 'MET', 'BESSY', 'HIGGINS', 'IN', 'THE', 'STREET', 'AND', 'STOPPED', 'TO', 'SPEAK', 'TO', 'HER'] +1688-142285-0037-1985: ref=['WELL', 'BESSY', 'HOW', 'ARE', 'YOU'] +1688-142285-0037-1985: hyp=['WELL', 'BUSY', 'HOW', 'ARE', 'YOU'] +1688-142285-0038-1986: ref=['BETTER', 'AND', 'NOT', 'BETTER', 'IF', 'YO', 'KNOW', 'WHAT', 'THAT', 'MEANS'] +1688-142285-0038-1986: hyp=['BETTER', 'AND', 'NOT', 'BETTER', 'IF', 'YOU', 'KNOW', 'WHAT', 'THAT', 'MEANS'] +1688-142285-0039-1987: ref=['NOT', 'EXACTLY', 'REPLIED', 'MARGARET', 'SMILING'] +1688-142285-0039-1987: hyp=['NOT', 'EXACTLY', 'REPLIED', 'MARGARET', 'SMILING'] +1688-142285-0040-1988: ref=["I'M", 'BETTER', 'IN', 'NOT', 'BEING', 'TORN', 'TO', 'PIECES', 'BY', 'COUGHING', "O'NIGHTS", 'BUT', "I'M", 'WEARY', 'AND', 'TIRED', 'O', 'MILTON', 'AND', 'LONGING', 'TO', 'GET', 'AWAY', 'TO', 'THE', 'LAND', 'O', 'BEULAH', 'AND', 'WHEN', 'I', 'THINK', "I'M", 'FARTHER', 'AND', 'FARTHER', 'OFF', 'MY', 'HEART', 'SINKS', 'AND', "I'M", 'NO', 'BETTER', "I'M", 'WORSE'] +1688-142285-0040-1988: hyp=["I'M", 'BETTER', 'IN', 'NOT', 'BEING', 'TORN', 'TO', 'PIECES', 'BUT', 'COUGHING', 'A', "KNIGHT'S", 'BUT', "I'M", 'WEARY', 'AND', 'TIRED', 'OF', 'MILTON', 'AND', 'LONGING', 'TO', 'GET', 'AWAY', 'TO', 'THE', 'LAND', 'OF', 'BOOLA', 'AND', 'WHEN', 'I', 'THINK', "I'M", 'FARTHER', 'AND', 'FARTHER', 'OFF', 'MY', 'HEART', 'SINKS', 'AND', "I'M", 'NO', 'BETTER', "I'M", 'WORSE'] +1688-142285-0041-1989: ref=['MARGARET', 'TURNED', 'ROUND', 'TO', 'WALK', 'ALONGSIDE', 'OF', 'THE', 'GIRL', 'IN', 'HER', 'FEEBLE', 'PROGRESS', 'HOMEWARD'] +1688-142285-0041-1989: hyp=['MARGARET', 'TURNED', 'ROUND', 'TO', 'WALK', 'ALONGSIDE', 'OF', 'THE', 'GIRL', 'IN', 'HER', 'FEEBLE', 'PROGRESS', 'HOMEWARD'] +1688-142285-0042-1990: ref=['BUT', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'SHE', 'DID', 'NOT', 'SPEAK'] +1688-142285-0042-1990: hyp=['BUT', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'SHE', 'DID', 'NOT', 'SPEAK'] +1688-142285-0043-1991: ref=['AT', 'LAST', 'SHE', 'SAID', 'IN', 'A', 'LOW', 'VOICE'] +1688-142285-0043-1991: hyp=['AT', 'LAST', 'SHE', 'SAID', 'IN', 'A', 'LOW', 'VOICE'] +1688-142285-0044-1992: ref=['BESSY', 'DO', 'YOU', 'WISH', 'TO', 'DIE'] +1688-142285-0044-1992: hyp=['BESSY', 'DO', 'YOU', 'WISH', 'TO', 'DIE'] +1688-142285-0045-1993: ref=['BESSY', 'WAS', 'SILENT', 'IN', 'HER', 'TURN', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'THEN', 'SHE', 'REPLIED'] +1688-142285-0045-1993: hyp=['BESSY', 'WAS', 'SILENT', 'IN', 'HER', 'TURN', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'THEN', 'SHE', 'REPLIED'] +1688-142285-0046-1994: ref=['NOUGHT', 'WORSE', 'THAN', 'MANY', 'OTHERS', 'I', 'RECKON'] +1688-142285-0046-1994: hyp=['NOT', 'WORSE', 'THAN', 'MANY', 'OTHERS', 'I', 'RECKON'] +1688-142285-0047-1995: ref=['BUT', 'WHAT', 'WAS', 'IT'] +1688-142285-0047-1995: hyp=['BUT', 'WHAT', 'WAS', 'IT'] +1688-142285-0048-1996: ref=['YOU', 'KNOW', "I'M", 'A', 'STRANGER', 'HERE', 'SO', 'PERHAPS', "I'M", 'NOT', 'SO', 'QUICK', 'AT', 'UNDERSTANDING', 'WHAT', 'YOU', 'MEAN', 'AS', 'IF', "I'D", 'LIVED', 'ALL', 'MY', 'LIFE', 'AT', 'MILTON'] +1688-142285-0048-1996: hyp=['YOU', 'KNOW', "I'M", 'A', 'STRANGER', 'HERE', 'SO', 'PERHAPS', "I'M", 'NOT', 'SO', 'QUICK', 'AT', 'UNDERSTANDING', 'WHAT', 'YOU', 'MEAN', 'AS', 'IF', "I'D", 'LIVED', 'ALL', 'MY', 'LIFE', 'IN', 'MILTON'] +1688-142285-0049-1997: ref=['I', 'HAD', 'FORGOTTEN', 'WHAT', 'I', 'SAID', 'FOR', 'THE', 'TIME', 'CONTINUED', 'MARGARET', 'QUIETLY'] +1688-142285-0049-1997: hyp=['I', 'HAD', 'FORGOTTEN', 'WHAT', 'I', 'SAID', 'FOR', 'THE', 'TIME', 'CONTINUED', 'MARGARET', 'QUIETLY'] +1688-142285-0050-1998: ref=['I', 'SHOULD', 'HAVE', 'THOUGHT', 'OF', 'IT', 'AGAIN', 'WHEN', 'I', 'WAS', 'LESS', 'BUSY', 'MAY', 'I', 'GO', 'WITH', 'YOU', 'NOW'] +1688-142285-0050-1998: hyp=['I', 'SHOULD', 'HAVE', 'THOUGHT', 'OF', 'IT', 'AGAIN', 'WHEN', 'I', 'WAS', 'LESS', 'BUSY', 'MARGAR', 'WITH', 'YOU', 'NOW'] +1688-142285-0051-1999: ref=['THE', 'SHARPNESS', 'IN', 'HER', 'EYE', 'TURNED', 'TO', 'A', 'WISTFUL', 'LONGING', 'AS', 'SHE', 'MET', "MARGARET'S", 'SOFT', 'AND', 'FRIENDLY', 'GAZE'] +1688-142285-0051-1999: hyp=['THE', 'SHARPNESS', 'IN', 'HER', 'EYE', 'TURNED', 'TO', 'A', 'WISTFUL', 'LONGING', 'AS', 'SHE', 'MET', "MARGARET'S", 'SOFT', 'AND', 'FRIENDLY', 'GAZE'] +1688-142285-0052-2000: ref=['AS', 'THEY', 'TURNED', 'UP', 'INTO', 'A', 'SMALL', 'COURT', 'OPENING', 'OUT', 'OF', 'A', 'SQUALID', 'STREET', 'BESSY', 'SAID'] +1688-142285-0052-2000: hyp=['AS', 'THEY', 'TURNED', 'UP', 'INTO', 'A', 'SMALL', 'COURT', 'OPENING', 'OUT', 'INTO', 'A', 'SQUALID', 'STREET', 'BESSY', 'SAID'] +1688-142285-0053-2001: ref=["YO'LL", 'NOT', 'BE', 'DAUNTED', 'IF', "FATHER'S", 'AT', 'HOME', 'AND', 'SPEAKS', 'A', 'BIT', 'GRUFFISH', 'AT', 'FIRST'] +1688-142285-0053-2001: hyp=["YOU'LL", 'NOT', 'BE', 'DAUNTED', 'IF', 'FATHER', 'SAID', 'HOME', 'AND', 'SPEAKS', 'A', 'BIT', 'GRUFFISH', 'AT', 'FIRST'] +1688-142285-0054-2002: ref=['BUT', 'NICHOLAS', 'WAS', 'NOT', 'AT', 'HOME', 'WHEN', 'THEY', 'ENTERED'] +1688-142285-0054-2002: hyp=['BUT', 'NICHOLAS', 'WAS', 'NOT', 'AT', 'HOME', 'WHEN', 'THEY', 'ENTERED'] +1688-142285-0055-2003: ref=['GASPED', 'BESSY', 'AT', 'LAST'] +1688-142285-0055-2003: hyp=['GASPED', 'BESSIE', 'AT', 'LAST'] +1688-142285-0056-2004: ref=['BESSY', 'TOOK', 'A', 'LONG', 'AND', 'FEVERISH', 'DRAUGHT', 'AND', 'THEN', 'FELL', 'BACK', 'AND', 'SHUT', 'HER', 'EYES'] +1688-142285-0056-2004: hyp=['BESSY', 'TOOK', 'A', 'LONG', 'AND', 'FEVERISH', 'DRAUGHT', 'AND', 'THEN', 'FELL', 'BACK', 'AND', 'SHUT', 'HER', 'EYES'] +1688-142285-0057-2005: ref=['MARGARET', 'BENT', 'OVER', 'AND', 'SAID', 'BESSY', "DON'T", 'BE', 'IMPATIENT', 'WITH', 'YOUR', 'LIFE', 'WHATEVER', 'IT', 'IS', 'OR', 'MAY', 'HAVE', 'BEEN'] +1688-142285-0057-2005: hyp=['MARGARET', 'BENT', 'OVER', 'AND', 'SAID', 'BESSY', "DON'T", 'BE', 'IMPATIENT', 'WITH', 'YOUR', 'LIFE', 'WHATEVER', 'IT', 'IS', 'OR', 'MAY', 'HAVE', 'BEEN'] +1688-142285-0058-2006: ref=['REMEMBER', 'WHO', 'GAVE', 'IT', 'YOU', 'AND', 'MADE', 'IT', 'WHAT', 'IT', 'IS'] +1688-142285-0058-2006: hyp=['REMEMBER', 'WHO', 'GAVE', 'IT', 'TO', 'YOU', 'AND', 'MADE', 'IT', 'WHAT', 'IT', 'IS'] +1688-142285-0059-2007: ref=['NOW', "I'LL", 'NOT', 'HAVE', 'MY', 'WENCH', 'PREACHED', 'TO'] +1688-142285-0059-2007: hyp=['NOW', "I'LL", 'NOT', 'HAVE', 'MY', 'WENCH', 'PREACH', 'TOO'] +1688-142285-0060-2008: ref=['BUT', 'SURELY', 'SAID', 'MARGARET', 'FACING', 'ROUND', 'YOU', 'BELIEVE', 'IN', 'WHAT', 'I', 'SAID', 'THAT', 'GOD', 'GAVE', 'HER', 'LIFE', 'AND', 'ORDERED', 'WHAT', 'KIND', 'OF', 'LIFE', 'IT', 'WAS', 'TO', 'BE'] +1688-142285-0060-2008: hyp=['BUT', 'SURELY', 'SAID', 'MARGARET', 'FACING', 'ROUND', 'YOU', 'BELIEVE', 'IN', 'WHAT', 'I', 'SAID', 'THAT', 'GOD', 'GAVE', 'HER', 'LIFE', 'AND', 'ORDERED', 'WHAT', 'KIND', 'OF', 'LIFE', 'IT', 'WAS', 'TO', 'BE'] +1688-142285-0061-2009: ref=['I', 'BELIEVE', 'WHAT', 'I', 'SEE', 'AND', 'NO', 'MORE'] +1688-142285-0061-2009: hyp=['I', 'BELIEVE', 'WHAT', 'I', 'SEE', 'AND', 'NO', 'MORE'] +1688-142285-0062-2010: ref=["THAT'S", 'WHAT', 'I', 'BELIEVE', 'YOUNG', 'WOMAN'] +1688-142285-0062-2010: hyp=["THAT'S", 'WHAT', 'I', 'BELIEVE', 'YOUNG', 'WOMAN'] +1688-142285-0063-2011: ref=['I', "DON'T", 'BELIEVE', 'ALL', 'I', 'HEAR', 'NO', 'NOT', 'BY', 'A', 'BIG', 'DEAL'] +1688-142285-0063-2011: hyp=['I', "DON'T", 'BELIEVE', 'ALL', 'I', 'HEAR', 'NO', 'NOT', 'BY', 'A', 'BIG', 'DEAL'] +1688-142285-0064-2012: ref=['BUT', "HOO'S", 'COME', 'AT', 'LAST', 'AND', "HOO'S", 'WELCOME', 'AS', 'LONG', 'AS', "HOO'LL", 'KEEP', 'FROM', 'PREACHING', 'ON', 'WHAT', 'HOO', 'KNOWS', 'NOUGHT', 'ABOUT'] +1688-142285-0064-2012: hyp=['BUT', "WHO'S", 'COME', 'AT', 'LAST', 'AND', "WHO'S", 'WELCOME', 'AS', 'LONG', 'AS', "HE'LL", 'KEEP', 'FROM', 'PREACHING', 'ON', 'WHAT', 'HE', 'KNOWS', 'NOT', 'ABOUT'] +1688-142285-0065-2013: ref=["IT'S", 'SIMPLE', 'AND', 'NOT', 'FAR', 'TO', 'FETCH', 'NOR', 'HARD', 'TO', 'WORK'] +1688-142285-0065-2013: hyp=["IT'S", 'SIMPLE', 'AND', 'NOT', 'FAR', 'TO', 'FETCH', 'NOR', 'HARD', 'TO', 'WORK'] +1688-142285-0066-2014: ref=['BUT', 'THE', 'GIRL', 'ONLY', 'PLEADED', 'THE', 'MORE', 'WITH', 'MARGARET'] +1688-142285-0066-2014: hyp=['BUT', 'THE', 'GIRL', 'ONLY', 'PLEADED', 'THE', 'MORE', 'WITH', 'MARGARET'] +1688-142285-0067-2015: ref=["DON'T", 'THINK', 'HARDLY', 'ON', 'HIM', "HE'S", 'A', 'GOOD', 'MAN', 'HE', 'IS'] +1688-142285-0067-2015: hyp=["DON'T", 'THINK', 'HARDLY', 'ON', 'HIM', "HE'S", 'A', 'GOOD', 'MAN', 'HE', 'IS'] +1688-142285-0068-2016: ref=['I', 'SOMETIMES', 'THINK', 'I', 'SHALL', 'BE', 'MOPED', 'WI', 'SORROW', 'EVEN', 'IN', 'THE', 'CITY', 'OF', 'GOD', 'IF', 'FATHER', 'IS', 'NOT', 'THERE'] +1688-142285-0068-2016: hyp=['I', 'SOMETIMES', 'THINK', 'I', 'SHALL', 'BE', 'MIRKED', 'WITH', 'SORROW', 'EVEN', 'IN', 'THE', 'CITY', 'OF', 'GOD', 'IF', 'EITHER', 'IS', 'NOT', 'THERE'] +1688-142285-0069-2017: ref=['THE', 'FEVERISH', 'COLOUR', 'CAME', 'INTO', 'HER', 'CHEEK', 'AND', 'THE', 'FEVERISH', 'FLAME', 'INTO', 'HER', 'EYE'] +1688-142285-0069-2017: hyp=['THE', 'FEVERISH', 'COLOUR', 'CAME', 'INTO', 'HER', 'CHEEKS', 'AND', 'THE', 'FEVERISH', 'FLAME', 'INTO', 'HER', 'EYE'] +1688-142285-0070-2018: ref=['BUT', 'YOU', 'WILL', 'BE', 'THERE', 'FATHER', 'YOU', 'SHALL', 'OH', 'MY', 'HEART'] +1688-142285-0070-2018: hyp=['BUT', 'YOU', 'WILL', 'BE', 'THERE', 'FATHER', 'YOU', 'SHALL', 'OH', 'MY', 'HEART'] +1688-142285-0071-2019: ref=['SHE', 'PUT', 'HER', 'HAND', 'TO', 'IT', 'AND', 'BECAME', 'GHASTLY', 'PALE'] +1688-142285-0071-2019: hyp=['SHE', 'PUT', 'HER', 'HAND', 'TO', 'IT', 'AND', 'BECAME', 'GHASTLY', 'PALE'] +1688-142285-0072-2020: ref=['MARGARET', 'HELD', 'HER', 'IN', 'HER', 'ARMS', 'AND', 'PUT', 'THE', 'WEARY', 'HEAD', 'TO', 'REST', 'UPON', 'HER', 'BOSOM'] +1688-142285-0072-2020: hyp=['MARGARET', 'HELD', 'HER', 'IN', 'HER', 'ARMS', 'AND', 'PUT', 'THE', 'WEARY', 'HEAD', 'TO', 'REST', 'UPON', 'HER', 'BOSOM'] +1688-142285-0073-2021: ref=['PRESENTLY', 'THE', 'SPASM', 'THAT', 'FORESHADOWED', 'DEATH', 'HAD', 'PASSED', 'AWAY', 'AND', 'BESSY', 'ROUSED', 'HERSELF', 'AND', 'SAID'] +1688-142285-0073-2021: hyp=['PRESENTLY', 'THE', 'SPASM', 'THAT', 'FORESHADOWED', 'DEATH', 'HAD', 'PASSED', 'AWAY', 'AND', 'BESSY', 'ROUSED', 'HERSELF', 'AND', 'SAID'] +1688-142285-0074-2022: ref=["I'LL", 'GO', 'TO', 'BED', "IT'S", 'BEST', 'PLACE', 'BUT', 'CATCHING', 'AT', "MARGARET'S", 'GOWN', "YO'LL", 'COME', 'AGAIN', 'I', 'KNOW', 'YO', 'WILL', 'BUT', 'JUST', 'SAY', 'IT'] +1688-142285-0074-2022: hyp=["I'LL", 'GO', 'TO', 'BED', "IT'S", 'BEST', 'PLACE', 'BUT', 'CATCHING', 'THAT', "MARGARET'S", 'GUN', "YOU'LL", 'COME', 'AGAIN', 'I', 'KNOW', 'YOU', 'WILL', 'BUT', 'JUST', 'SAY', 'IT'] +1688-142285-0075-2023: ref=['I', 'WILL', 'COME', 'TO', 'MORROW', 'SAID', 'MARGARET'] +1688-142285-0075-2023: hyp=['I', 'WILL', 'COME', 'TO', 'MORROW', 'SAID', 'MARGARET'] +1688-142285-0076-2024: ref=['MARGARET', 'WENT', 'AWAY', 'VERY', 'SAD', 'AND', 'THOUGHTFUL'] +1688-142285-0076-2024: hyp=['MARGARET', 'WENT', 'AWAY', 'VERY', 'SAD', 'AND', 'THOUGHTFUL'] +1688-142285-0077-2025: ref=['SHE', 'WAS', 'LATE', 'FOR', 'TEA', 'AT', 'HOME'] +1688-142285-0077-2025: hyp=['SHE', 'WAS', 'LATE', 'FOR', 'TEA', 'AT', 'HOME'] +1688-142285-0078-2026: ref=['HAVE', 'YOU', 'MET', 'WITH', 'A', 'SERVANT', 'DEAR'] +1688-142285-0078-2026: hyp=['HAVE', 'YOU', 'MET', 'WITH', 'A', 'SERVANT', 'DEAR'] +1688-142285-0079-2027: ref=['NO', 'MAMMA', 'THAT', 'ANNE', 'BUCKLEY', 'WOULD', 'NEVER', 'HAVE', 'DONE'] +1688-142285-0079-2027: hyp=['NO', 'MAMMA', 'THAT', 'ANNE', 'BUCKLEY', 'WOULD', 'NEVER', 'HAVE', 'DONE'] +1688-142285-0080-2028: ref=['SUPPOSE', 'I', 'TRY', 'SAID', 'MISTER', 'HALE'] +1688-142285-0080-2028: hyp=["S'POSE", 'I', 'TRY', 'SAID', 'MISTER', 'HALE'] +1688-142285-0081-2029: ref=['EVERYBODY', 'ELSE', 'HAS', 'HAD', 'THEIR', 'TURN', 'AT', 'THIS', 'GREAT', 'DIFFICULTY', 'NOW', 'LET', 'ME', 'TRY'] +1688-142285-0081-2029: hyp=['EVERYBODY', 'ELSE', 'HAS', 'HAD', 'THEY', 'TURN', 'AT', 'THIS', 'GREAT', 'DIFFICULTY', 'NOW', 'LET', 'ME', 'TRY'] +1688-142285-0082-2030: ref=['I', 'MAY', 'BE', 'THE', 'CINDERELLA', 'TO', 'PUT', 'ON', 'THE', 'SLIPPER', 'AFTER', 'ALL'] +1688-142285-0082-2030: hyp=['I', 'MAY', 'BE', 'THE', 'CINOLA', 'TO', 'PUT', 'ON', 'THE', 'SLIPPER', 'AFTER', 'ALL'] +1688-142285-0083-2031: ref=['WHAT', 'WOULD', 'YOU', 'DO', 'PAPA', 'HOW', 'WOULD', 'YOU', 'SET', 'ABOUT', 'IT'] +1688-142285-0083-2031: hyp=['BUT', 'WOULD', 'YOU', 'DO', 'PAPA', 'HOW', 'WOULD', 'YOU', 'SET', 'ABOUT', 'IT'] +1688-142285-0084-2032: ref=['WHY', 'I', 'WOULD', 'APPLY', 'TO', 'SOME', 'GOOD', 'HOUSE', 'MOTHER', 'TO', 'RECOMMEND', 'ME', 'ONE', 'KNOWN', 'TO', 'HERSELF', 'OR', 'HER', 'SERVANTS'] +1688-142285-0084-2032: hyp=['WHY', 'I', 'WOULD', 'APPLY', 'IT', 'TO', 'SOME', 'GOOD', 'HOUSE', 'MOTHER', 'TO', 'RECOMMEND', 'ME', 'ONE', 'KNOWN', 'TO', 'HERSELF', 'OR', 'HER', 'SERVANTS'] +1688-142285-0085-2033: ref=['VERY', 'GOOD', 'BUT', 'WE', 'MUST', 'FIRST', 'CATCH', 'OUR', 'HOUSE', 'MOTHER'] +1688-142285-0085-2033: hyp=['VERY', 'GOOD', 'BUT', 'WE', 'MUST', 'FIRST', 'CATCH', 'OUR', 'HOUSE', 'MOTHER'] +1688-142285-0086-2034: ref=['THE', 'MOTHER', 'OF', 'WHOM', 'HE', 'SPOKE', 'TO', 'US', 'SAID', 'MARGARET'] +1688-142285-0086-2034: hyp=['THE', 'MOTHER', 'OF', 'WHOM', 'HE', 'SPOKE', 'TO', 'US', 'SAID', 'MARGARET'] +1688-142285-0087-2035: ref=['MISSUS', 'THORNTON', 'THE', 'ONLY', 'MOTHER', 'HE', 'HAS', 'I', 'BELIEVE', 'SAID', 'MISTER', 'HALE', 'QUIETLY'] +1688-142285-0087-2035: hyp=['MISTER', 'THORNTON', 'THE', 'ONLY', 'MOTHER', 'HE', 'HAS', 'I', 'BELIEVE', 'SAID', 'MISTER', 'HALE', 'QUIETLY'] +1688-142285-0088-2036: ref=['I', 'SHALL', 'LIKE', 'TO', 'SEE', 'HER', 'SHE', 'MUST', 'BE', 'AN', 'UNCOMMON', 'PERSON', 'HER', 'MOTHER', 'ADDED'] +1688-142285-0088-2036: hyp=['I', 'SHALL', 'LIKE', 'TO', 'SEE', 'HER', 'SHE', 'MUST', 'BE', 'AN', 'UNCOMMON', 'PERSON', 'HER', 'MOTHER', 'ADDED'] +1688-142285-0089-2037: ref=['PERHAPS', 'SHE', 'MAY', 'HAVE', 'A', 'RELATION', 'WHO', 'MIGHT', 'SUIT', 'US', 'AND', 'BE', 'GLAD', 'OF', 'OUR', 'PLACE'] +1688-142285-0089-2037: hyp=['PERHAPS', 'SHE', 'MAY', 'HAVE', 'A', 'RELATION', 'WHO', 'MIGHT', 'SUIT', 'US', 'AND', 'BE', 'GLAD', 'OF', 'OUR', 'PLACE'] +1688-142285-0090-2038: ref=['SHE', 'SOUNDED', 'TO', 'BE', 'SUCH', 'A', 'CAREFUL', 'ECONOMICAL', 'PERSON', 'THAT', 'I', 'SHOULD', 'LIKE', 'ANY', 'ONE', 'OUT', 'OF', 'THE', 'SAME', 'FAMILY'] +1688-142285-0090-2038: hyp=['SHE', 'SOUNDED', 'TO', 'BE', 'SUCH', 'A', 'CAREFUL', 'ECONOMICAL', 'PERSON', 'THAT', 'I', 'SHOULD', 'LIKE', 'ANY', 'ONE', 'OUT', 'OF', 'THE', 'SAME', 'FAMILY'] +1688-142285-0091-2039: ref=['MY', 'DEAR', 'SAID', 'MISTER', 'HALE', 'ALARMED', 'PRAY', "DON'T", 'GO', 'OFF', 'ON', 'THAT', 'IDEA'] +1688-142285-0091-2039: hyp=['MY', 'DEAR', 'SAID', 'MISTER', 'HALE', 'ALARMED', 'PRAY', "DON'T", 'GO', 'OFF', 'ON', 'THAT', 'IDEA'] +1688-142285-0092-2040: ref=['I', 'AM', 'SURE', 'AT', 'ANY', 'RATE', 'SHE', 'WOULD', 'NOT', 'LIKE', 'STRANGERS', 'TO', 'KNOW', 'ANYTHING', 'ABOUT', 'IT'] +1688-142285-0092-2040: hyp=['I', 'AM', 'SURE', 'AT', 'ANY', 'RATE', 'SHE', 'WOULD', 'NOT', 'LIKE', 'STRANGERS', 'TO', 'KNOW', 'ANYTHING', 'ABOUT', 'IT'] +1688-142285-0093-2041: ref=['TAKE', 'NOTICE', 'THAT', 'IS', 'NOT', 'MY', 'KIND', 'OF', 'HAUGHTINESS', 'PAPA', 'IF', 'I', 'HAVE', 'ANY', 'AT', 'ALL', 'WHICH', 'I', "DON'T", 'AGREE', 'TO', 'THOUGH', "YOU'RE", 'ALWAYS', 'ACCUSING', 'ME', 'OF', 'IT'] +1688-142285-0093-2041: hyp=['TAKE', 'NOTICE', 'THAT', 'THIS', 'IS', 'NOT', 'MY', 'KIND', 'OF', 'HAUGHTINESS', 'PAPA', 'IF', 'I', 'HAVE', 'ANY', 'AT', 'ALL', 'WHICH', 'I', "DON'T", 'AGREE', 'TO', 'THOUGH', 'YOU', 'ALWAYS', 'ACCUSING', 'ME', 'OF', 'IT'] +1688-142285-0094-2042: ref=['I', "DON'T", 'KNOW', 'POSITIVELY', 'THAT', 'IT', 'IS', 'HERS', 'EITHER', 'BUT', 'FROM', 'LITTLE', 'THINGS', 'I', 'HAVE', 'GATHERED', 'FROM', 'HIM', 'I', 'FANCY', 'SO'] +1688-142285-0094-2042: hyp=['I', "DON'T", 'KNOW', 'POSITIVELY', 'THAT', 'IT', 'IS', 'HERS', 'EITHER', 'BUT', 'FROM', 'LITTLE', 'THINGS', 'I', 'HAVE', 'GATHERED', 'FROM', 'HIM', 'I', 'FANCY', 'SO'] +1688-142285-0095-2043: ref=['THEY', 'CARED', 'TOO', 'LITTLE', 'TO', 'ASK', 'IN', 'WHAT', 'MANNER', 'HER', 'SON', 'HAD', 'SPOKEN', 'ABOUT', 'HER'] +1688-142285-0095-2043: hyp=['THEY', 'CARED', 'TOO', 'LITTLE', 'TO', 'ASK', 'IN', 'WHAT', 'MANNER', 'HER', 'SON', 'HAD', 'SPOKEN', 'ABOUT', 'HER'] +1998-15444-0000-2204: ref=['IF', 'CALLED', 'TO', 'A', 'CASE', 'SUPPOSED', 'OR', 'SUSPECTED', 'TO', 'BE', 'ONE', 'OF', 'POISONING', 'THE', 'MEDICAL', 'MAN', 'HAS', 'TWO', 'DUTIES', 'TO', 'PERFORM', 'TO', 'SAVE', 'THE', "PATIENT'S", 'LIFE', 'AND', 'TO', 'PLACE', 'HIMSELF', 'IN', 'A', 'POSITION', 'TO', 'GIVE', 'EVIDENCE', 'IF', 'CALLED', 'ON', 'TO', 'DO', 'SO'] +1998-15444-0000-2204: hyp=['IF', 'CALLED', 'TO', 'A', 'CASE', 'SUPPOSED', 'AS', 'SUSPECTED', 'TO', 'BE', 'ONE', 'OF', 'POISONING', 'THE', 'MEDICAL', 'MAN', 'HAS', 'TWO', 'DUTIES', 'TO', 'PERFORM', 'TO', 'SAVE', 'THE', "PATIENT'S", 'LIFE', 'AND', 'TO', 'PLACE', 'HIMSELF', 'IN', 'A', 'POSITION', 'TO', 'GIVE', 'EVIDENCE', 'OF', 'CALLED', 'UNTO', 'SO'] +1998-15444-0001-2205: ref=['HE', 'SHOULD', 'MAKE', 'INQUIRIES', 'AS', 'TO', 'SYMPTOMS', 'AND', 'TIME', 'AT', 'WHICH', 'FOOD', 'OR', 'MEDICINE', 'WAS', 'LAST', 'TAKEN'] +1998-15444-0001-2205: hyp=['HE', 'SHOULD', 'MAKE', 'INQUIRIES', 'AS', 'TO', 'SYMPTOMS', 'AND', 'TIME', 'AT', 'WHICH', 'FOOD', 'OR', 'MEDICINE', 'MUST', 'LAST', 'TAKEN'] +1998-15444-0002-2206: ref=['HE', 'SHOULD', 'NOTICE', 'THE', 'POSITION', 'AND', 'TEMPERATURE', 'OF', 'THE', 'BODY', 'THE', 'CONDITION', 'OF', 'RIGOR', 'MORTIS', 'MARKS', 'OF', 'VIOLENCE', 'APPEARANCE', 'OF', 'LIPS', 'AND', 'MOUTH'] +1998-15444-0002-2206: hyp=['HE', 'SHOULD', 'NOTICE', 'THE', 'POSITION', 'AND', 'TEMPERATURE', 'OF', 'THE', 'BODY', 'THE', 'CONDITION', 'OF', 'RIGOR', 'MORTIS', 'MARKS', 'OF', 'VIOLENCE', 'APPEARANCE', 'OF', 'LIPS', 'AND', 'MOUTH'] +1998-15444-0003-2207: ref=['IN', 'MAKING', 'A', 'POST', 'MORTEM', 'EXAMINATION', 'THE', 'ALIMENTARY', 'CANAL', 'SHOULD', 'BE', 'REMOVED', 'AND', 'PRESERVED', 'FOR', 'FURTHER', 'INVESTIGATION'] +1998-15444-0003-2207: hyp=['IN', 'MAKING', 'A', 'POST', 'MODE', 'OF', 'EXAMINATION', 'THE', 'ALIMENTARY', 'CANAL', 'SHOULD', 'BE', 'REMOVED', 'AND', 'PRESERVED', 'FOR', 'FURTHER', 'INVESTIGATION'] +1998-15444-0004-2208: ref=['THE', 'GUT', 'AND', 'THE', 'GULLET', 'BEING', 'CUT', 'ACROSS', 'BETWEEN', 'THESE', 'LIGATURES', 'THE', 'STOMACH', 'MAY', 'BE', 'REMOVED', 'ENTIRE', 'WITHOUT', 'SPILLING', 'ITS', 'CONTENTS'] +1998-15444-0004-2208: hyp=['THE', 'GUT', 'AND', 'THE', 'GALLANT', 'BEING', 'CUT', 'ACROSS', 'BETWEEN', 'THESE', 'LIGATURES', 'THE', 'STOMACH', 'MAY', 'BE', 'REMOVED', 'ENTIRE', 'WITHOUT', 'SPINNING', 'ITS', 'CONTENTS'] +1998-15444-0005-2209: ref=['IF', 'THE', 'MEDICAL', 'PRACTITIONER', 'IS', 'IN', 'DOUBT', 'ON', 'ANY', 'POINT', 'HE', 'SHOULD', 'OBTAIN', 'TECHNICAL', 'ASSISTANCE', 'FROM', 'SOMEONE', 'WHO', 'HAS', 'PAID', 'ATTENTION', 'TO', 'THE', 'SUBJECT'] +1998-15444-0005-2209: hyp=['IF', 'THE', 'MEDICA', 'PETITIONERS', 'ENDOWED', 'ON', 'ANY', 'POINT', 'HE', 'SHOULD', 'OBTAIN', 'TECHNICAL', 'ASSISTANCE', 'FROM', 'SOME', 'ONE', 'WHO', 'HAS', 'PAID', 'ATTENTION', 'TO', 'THE', 'SUBJECT'] +1998-15444-0006-2210: ref=['IN', 'A', 'CASE', 'OF', 'ATTEMPTED', 'SUICIDE', 'BY', 'POISONING', 'IS', 'IT', 'THE', 'DUTY', 'OF', 'THE', 'DOCTOR', 'TO', 'INFORM', 'THE', 'POLICE'] +1998-15444-0006-2210: hyp=['IN', 'A', 'CASE', 'OF', 'ATTEMPTED', 'SUICIDE', 'BY', 'POISONING', 'IS', 'IT', 'THE', 'DUTY', 'OF', 'THE', 'DOCTOR', 'TO', 'INFORM', 'THE', 'POLICE'] +1998-15444-0007-2211: ref=['THE', 'BEST', 'EMETIC', 'IS', 'THAT', 'WHICH', 'IS', 'AT', 'HAND'] +1998-15444-0007-2211: hyp=['THE', 'BEST', 'AMATIC', 'IS', 'THAT', 'WHICH', 'IS', 'AT', 'HAND'] +1998-15444-0008-2212: ref=['THE', 'DOSE', 'FOR', 'AN', 'ADULT', 'IS', 'TEN', 'MINIMS'] +1998-15444-0008-2212: hyp=['THAT', 'IS', 'FOR', 'AN', 'ADULGE', 'IS', 'TEN', 'MINIMS'] +1998-15444-0009-2213: ref=['APOMORPHINE', 'IS', 'NOT', 'ALLIED', 'IN', 'PHYSIOLOGICAL', 'ACTION', 'TO', 'MORPHINE', 'AND', 'MAY', 'BE', 'GIVEN', 'IN', 'CASES', 'OF', 'NARCOTIC', 'POISONING'] +1998-15444-0009-2213: hyp=['EPIMORPHIN', 'IS', 'NOT', 'ALID', 'IN', 'PHYSIOLOGICAL', 'ACTION', 'TO', 'MORPHINE', 'AND', 'MAY', 'BE', 'GIVEN', 'IN', 'CASES', 'OF', 'NARCOTIC', 'POISONING'] +1998-15444-0010-2214: ref=['TICKLING', 'THE', 'FAUCES', 'WITH', 'A', 'FEATHER', 'MAY', 'EXCITE', 'VOMITING'] +1998-15444-0010-2214: hyp=['TITLING', 'THE', 'FORCES', 'WITH', 'THE', 'FEATHER', 'MAY', 'EXCITE', 'WARMITTING'] +1998-15444-0011-2215: ref=['IN', 'USING', 'THE', 'ELASTIC', 'STOMACH', 'TUBE', 'SOME', 'FLUID', 'SHOULD', 'BE', 'INTRODUCED', 'INTO', 'THE', 'STOMACH', 'BEFORE', 'ATTEMPTING', 'TO', 'EMPTY', 'IT', 'OR', 'A', 'PORTION', 'OF', 'THE', 'MUCOUS', 'MEMBRANE', 'MAY', 'BE', 'SUCKED', 'INTO', 'THE', 'APERTURE'] +1998-15444-0011-2215: hyp=['IN', 'USING', 'THE', 'ELECTIC', 'STOMACH', 'TUBE', 'SOME', 'FLUID', 'SHOULD', 'BE', 'INTRODUCED', 'INTO', 'THE', 'STOMACH', 'BEFORE', 'ATTEMPTING', 'TO', 'EMPTY', 'IT', 'OR', 'A', 'PORTION', 'OF', 'THE', 'MUCOUS', 'MEMBRANE', 'MAY', 'BE', 'SACKED', 'INTO', 'THE', 'APERTURE'] +1998-15444-0012-2216: ref=['THE', 'TUBE', 'SHOULD', 'BE', 'EXAMINED', 'TO', 'SEE', 'THAT', 'IT', 'IS', 'NOT', 'BROKEN', 'OR', 'CRACKED', 'AS', 'ACCIDENTS', 'HAVE', 'HAPPENED', 'FROM', 'NEGLECTING', 'THIS', 'PRECAUTION'] +1998-15444-0012-2216: hyp=['THE', 'TUBE', 'SHOULD', 'BE', 'EXAMINED', 'TO', 'SEE', 'THAT', 'IT', 'IS', 'NOT', 'BROKEN', 'OR', 'CRACKED', 'AS', 'ACCIDENTS', 'HAVE', 'HAPPENED', 'FROM', 'NEGLECTING', 'THIS', 'PRECAUTION'] +1998-15444-0013-2217: ref=['ANTIDOTES', 'ARE', 'USUALLY', 'GIVEN', 'HYPODERMICALLY', 'OR', 'IF', 'BY', 'MOUTH', 'IN', 'THE', 'FORM', 'OF', 'TABLETS'] +1998-15444-0013-2217: hyp=['AND', 'HE', 'VOTES', 'A', 'USUALLY', 'GIVEN', 'HYPODERMICALLY', 'OR', 'IF', 'THE', 'MOUSE', 'IN', 'THE', 'FORM', 'OF', 'TABLETS'] +1998-15444-0014-2218: ref=['IN', 'THE', 'ABSENCE', 'OF', 'A', 'HYPODERMIC', 'SYRINGE', 'THE', 'REMEDY', 'MAY', 'BE', 'GIVEN', 'BY', 'THE', 'RECTUM'] +1998-15444-0014-2218: hyp=['IN', 'THE', 'ABSENCE', 'OF', 'THE', 'HYPODERMIC', 'SYRINGE', 'THE', 'REMEDY', 'MAY', 'BE', 'GIVEN', 'BY', 'THE', 'RECTIM'] +1998-15444-0015-2219: ref=['NOTICE', 'THE', 'SMELL', 'COLOUR', 'AND', 'GENERAL', 'APPEARANCE', 'OF', 'THE', 'MATTER', 'SUBMITTED', 'FOR', 'EXAMINATION'] +1998-15444-0015-2219: hyp=['NOTICE', 'THE', 'SMAR', 'COLOUR', 'AND', 'GENERAL', 'APPEARANCE', 'OF', 'THE', 'MATTER', 'SUBMITTED', 'FOR', 'EXAMINATION'] +1998-15444-0016-2220: ref=['FOR', 'THE', 'SEPARATION', 'OF', 'AN', 'ALKALOID', 'THE', 'FOLLOWING', 'IS', 'THE', 'PROCESS', 'OF', 'STAS', 'OTTO'] +1998-15444-0016-2220: hyp=['FOR', 'THE', 'SEPARATION', 'OF', 'AN', 'AKALOID', 'THE', 'FOLLOWING', 'IS', 'THE', 'PROCESS', 'OF', 'STARS', 'ARE', 'TWO'] +1998-15444-0017-2221: ref=['THIS', 'PROCESS', 'IS', 'BASED', 'UPON', 'THE', 'PRINCIPLE', 'THAT', 'THE', 'SALTS', 'OF', 'THE', 'ALKALOIDS', 'ARE', 'SOLUBLE', 'IN', 'ALCOHOL', 'AND', 'WATER', 'AND', 'INSOLUBLE', 'IN', 'ETHER'] +1998-15444-0017-2221: hyp=['THIS', 'PROCESS', 'IS', 'BASED', 'UPON', 'THE', 'PRINCIPLE', 'THAT', 'THE', 'SALTS', 'OF', 'THE', 'AKALITES', 'ARE', 'SOLUBLE', 'IN', 'AKELET', 'WATER', 'AND', 'INSOLUBLE', 'IN', 'ETHER'] +1998-15444-0018-2222: ref=['THE', 'PURE', 'ALKALOIDS', 'WITH', 'THE', 'EXCEPTION', 'OF', 'MORPHINE', 'IN', 'ITS', 'CRYSTALLINE', 'FORM', 'ARE', 'SOLUBLE', 'IN', 'ETHER'] +1998-15444-0018-2222: hyp=['THE', 'PURE', 'IKOLOITS', 'WAS', 'THE', 'EXCEPTION', 'OF', 'MORPHINE', 'IN', 'ITS', 'CRYSTALLINE', 'FORM', 'A', 'SOLUBLE', 'BENEATH', 'THEM'] +1998-15444-0019-2223: ref=['TWO', 'COOL', 'THE', 'MIXTURE', 'AND', 'FILTER', 'WASH', 'THE', 'RESIDUE', 'WITH', 'STRONG', 'ALCOHOL', 'AND', 'MIX', 'THE', 'FILTRATES'] +1998-15444-0019-2223: hyp=['TWO', 'U', 'THE', 'MIXED', 'AND', 'FILTER', 'WASH', 'THE', 'RESIDUE', 'WITH', 'STRONG', 'ALCOHOL', 'AND', 'MIX', 'THE', 'FUR', 'TRADES'] +1998-15444-0020-2224: ref=['THE', 'RESIDUE', 'MAY', 'BE', 'SET', 'ASIDE', 'FOR', 'THE', 'DETECTION', 'OF', 'THE', 'METALLIC', 'POISONS', 'IF', 'SUSPECTED', 'EXPEL', 'THE', 'ALCOHOL', 'BY', 'CAREFUL', 'EVAPORATION'] +1998-15444-0020-2224: hyp=['THE', 'RESIDUE', 'MAY', 'BE', 'SAID', 'ASIDE', 'FOR', 'THE', 'DETECTION', 'OF', 'THE', 'METALLIC', 'POISONS', 'OF', 'SUSPECTED', 'EXPELLED', 'THE', 'ALCOHOLBA', 'CAREFUL', 'EVAPORATION'] +1998-15444-0021-2225: ref=['ON', 'THE', 'EVAPORATION', 'OF', 'THE', 'ALCOHOL', 'THE', 'RESINOUS', 'AND', 'FATTY', 'MATTERS', 'SEPARATE'] +1998-15444-0021-2225: hyp=['ON', 'THE', 'EVAPORATION', 'OF', 'THE', 'ALCOHOL', 'THE', 'ZENOUS', 'AND', 'FATTY', 'MATTER', 'SEPARATE'] +1998-15444-0022-2226: ref=['EVAPORATE', 'THE', 'FILTRATE', 'TO', 'A', 'SYRUP', 'AND', 'EXTRACT', 'WITH', 'SUCCESSIVE', 'PORTIONS', 'OF', 'ABSOLUTE', 'ALCOHOL'] +1998-15444-0022-2226: hyp=['EVAPORATE', 'THE', 'FEDERATE', 'TO', 'A', 'CYRUP', 'AND', 'EXTRACT', 'WITH', 'SUCCESSIVE', 'PORTIONS', 'OF', 'ABSOLUTE', 'ALCOHOL'] +1998-15444-0023-2227: ref=['SEPARATE', 'THE', 'ETHEREAL', 'SOLUTION', 'AND', 'EVAPORATE'] +1998-15444-0023-2227: hyp=['SEPARATE', 'THE', 'ETHEREAL', 'SOLUTION', 'AND', 'THE', 'REPARATE'] +1998-15444-0024-2228: ref=['FIVE', 'A', 'PART', 'OF', 'THIS', 'ETHEREAL', 'SOLUTION', 'IS', 'POURED', 'INTO', 'A', 'WATCH', 'GLASS', 'AND', 'ALLOWED', 'TO', 'EVAPORATE'] +1998-15444-0024-2228: hyp=['FIVE', 'A', 'PART', 'OF', 'THIS', 'ASSYRIAL', 'SOLUTION', 'IS', 'PUT', 'INTO', 'A', 'WATCH', 'GLASS', 'AND', 'ALLOW', 'TO', 'EVAPORATE'] +1998-15444-0025-2229: ref=['TO', 'PURIFY', 'IT', 'ADD', 'A', 'SMALL', 'QUANTITY', 'OF', 'DILUTE', 'SULPHURIC', 'ACID', 'AND', 'AFTER', 'EVAPORATING', 'TO', 'THREE', 'QUARTERS', 'OF', 'ITS', 'BULK', 'ADD', 'A', 'SATURATED', 'SOLUTION', 'OF', 'CARBONATE', 'OF', 'POTASH', 'OR', 'SODA'] +1998-15444-0025-2229: hyp=['TO', 'PURIFY', 'IT', 'ADDISMA', 'QUANTITY', 'OF', 'DELUDE', 'SUFFERG', 'ACID', 'AND', 'AFTER', 'EVAPORATING', 'TO', 'THREE', 'QUARTERS', 'OF', 'ITS', 'BULK', 'ADD', 'SATURATED', 'SOLUTION', 'OF', 'CARBONATE', 'OF', 'POTASH', 'OR', 'SODA'] +1998-15444-0026-2230: ref=['BOIL', 'THE', 'FINELY', 'DIVIDED', 'SUBSTANCE', 'WITH', 'ABOUT', 'ONE', 'EIGHTH', 'ITS', 'BULK', 'OF', 'PURE', 'HYDROCHLORIC', 'ACID', 'ADD', 'FROM', 'TIME', 'TO', 'TIME', 'POTASSIC', 'CHLORATE', 'UNTIL', 'THE', 'SOLIDS', 'ARE', 'REDUCED', 'TO', 'A', 'STRAW', 'YELLOW', 'FLUID'] +1998-15444-0026-2230: hyp=['BY', 'THE', 'FINAL', 'DIVIDED', 'SUBSTANCE', 'WITH', 'ABOUT', 'ONE', 'EIGHTHS', 'ITS', 'BAG', 'OF', 'PURE', 'HYDROCHLORIC', 'ACID', 'ADD', 'FROM', 'TIME', 'TO', 'TIME', 'POTASSIC', 'LOW', 'RAGE', 'UNTIL', 'THE', 'SOLIDS', 'ARE', 'REDUCED', 'TO', 'A', 'STRAW', 'YELLOW', 'FLUID'] +1998-15444-0027-2231: ref=['THE', 'RESIDUE', 'OF', 'THE', 'MATERIAL', 'AFTER', 'DIGESTION', 'WITH', 'HYDROCHLORIC', 'ACID', 'AND', 'POTASSIUM', 'CHLORATE', 'MAY', 'HAVE', 'TO', 'BE', 'EXAMINED', 'FOR', 'SILVER', 'LEAD', 'AND', 'BARIUM'] +1998-15444-0027-2231: hyp=['THE', 'RESIDUE', 'OF', 'THE', 'MATERIAL', 'AFTER', 'DIGESTION', 'WAS', 'HYDROCHLORIC', 'ACID', 'AND', 'POTASSIAN', 'CHLORIDE', 'MAY', 'HAVE', 'TO', 'BE', 'EXAMINED', 'FOR', 'SILVER', 'LEAD', 'AND', 'BURIUM'] +1998-29454-0000-2157: ref=['A', 'THOUSAND', 'BLESSINGS', 'FROM', 'A', 'GRATEFUL', 'HEART'] +1998-29454-0000-2157: hyp=['A', 'THOUSAND', 'BLESSINGS', 'FROM', 'A', 'GRATEFUL', 'HEART'] +1998-29454-0001-2158: ref=['PERUSAL', 'SAID', 'THE', 'PAWNBROKER', "THAT'S", 'THE', 'WAY', 'TO', 'PERNOUNCE', 'IT'] +1998-29454-0001-2158: hyp=['PERUSAL', 'SET', 'UPON', 'BROKER', "THAT'S", 'THE', 'WAY', 'TO', 'PRONOUNCE', 'IT'] +1998-29454-0002-2159: ref=['HIS', 'BOOKS', 'TOLD', 'HIM', 'THAT', 'TREASURE', 'IS', 'BEST', 'HIDDEN', 'UNDER', 'LOOSE', 'BOARDS', 'UNLESS', 'OF', 'COURSE', 'YOUR', 'HOUSE', 'HAS', 'A', 'SECRET', 'PANEL', 'WHICH', 'HIS', 'HAD', 'NOT'] +1998-29454-0002-2159: hyp=['HIS', 'BOOKS', 'TOLD', 'HIM', 'THAT', 'TREASURES', 'BEST', 'HIDDEN', 'UNDER', 'LOOSE', 'BOARDS', 'AND', 'AS', 'OF', 'COURSE', 'YOUR', 'HOUSE', 'HAD', 'A', 'SECRET', 'PANEL', 'WHICH', 'HIS', 'HAD', 'NOT'] +1998-29454-0003-2160: ref=['HE', 'GOT', 'IT', 'UP', 'AND', 'PUSHED', 'HIS', 'TREASURES', 'AS', 'FAR', 'IN', 'AS', 'HE', 'COULD', 'ALONG', 'THE', 'ROUGH', 'CRUMBLY', 'SURFACE', 'OF', 'THE', 'LATH', 'AND', 'PLASTER'] +1998-29454-0003-2160: hyp=['HE', 'GOT', 'IT', 'UP', 'AND', 'PUSHED', 'HIS', 'TREASURES', 'AS', 'FAR', 'IN', 'AS', 'HE', 'COULD', 'ALONG', 'THE', 'ROUGH', 'CRAMBLY', 'SURFACE', 'OF', 'THE', 'LAST', 'AND', 'PLASTER'] +1998-29454-0004-2161: ref=['WHEN', 'DICKIE', 'CAME', 'DOWN', 'HIS', 'AUNT', 'SLIGHTLY', 'SLAPPED', 'HIM', 'AND', 'HE', 'TOOK', 'THE', 'HALFPENNY', 'AND', 'LIMPED', 'OFF', 'OBEDIENTLY'] +1998-29454-0004-2161: hyp=['WHEN', 'DICKIE', 'CAME', 'DOWN', 'HIS', 'AUNT', 'SAT', 'HE', 'SLAPPED', 'HIM', 'AND', 'HE', 'TOOK', 'THE', 'HALFPENNY', 'AND', 'LIMP', 'OF', 'OBEDIENTLY'] +1998-29454-0005-2162: ref=['HE', 'HAD', 'NEVER', 'SEEN', 'ONE', 'BEFORE', 'AND', 'IT', 'INTERESTED', 'HIM', 'EXTREMELY'] +1998-29454-0005-2162: hyp=['HE', 'HAD', 'NEVER', 'SEEN', 'ONE', 'BEFORE', 'AND', 'IT', 'INTERESTED', 'HIM', 'EXTREMELY'] +1998-29454-0006-2163: ref=['HE', 'LOOKED', 'ABOUT', 'HIM', 'AND', 'KNEW', 'THAT', 'HE', 'DID', 'NOT', 'AT', 'ALL', 'KNOW', 'WHERE', 'HE', 'WAS'] +1998-29454-0006-2163: hyp=['HE', 'LOOKED', 'ABOUT', 'HIM', 'AND', 'KNEW', 'THAT', 'HE', 'DID', 'NOT', 'AT', 'ALL', 'KNOW', 'WHERE', 'HE', 'WAS'] +1998-29454-0007-2164: ref=["WHAT'S", 'UP', 'MATEY', 'LOST', 'YOUR', 'WAY', 'DICKIE', 'EXPLAINED'] +1998-29454-0007-2164: hyp=["WHAT'S", 'UP', 'MATE', 'YOU', 'ASKED', 'YOUR', 'WAY', 'DICKIE', 'EXPLAINED'] +1998-29454-0008-2165: ref=['WHEN', 'HE', 'SAID', 'AVE', 'I', 'BIN', 'ASLEEP'] +1998-29454-0008-2165: hyp=['WHEN', 'HE', 'SAID', 'HAVE', 'I', 'BEEN', 'ASLEEP'] +1998-29454-0009-2166: ref=['HERE', 'WE', 'ARE', 'SAID', 'THE', 'MAN'] +1998-29454-0009-2166: hyp=['HERE', 'WE', 'ARE', 'SAID', 'THE', 'MAN'] +1998-29454-0010-2167: ref=['NOT', 'EXACKLY', 'SAID', 'THE', 'MAN', 'BUT', "IT'S", 'ALL', 'RIGHT'] +1998-29454-0010-2167: hyp=['NOT', 'EXACTLY', 'SAID', 'THE', 'MAN', 'BUT', "IT'S", 'ALL', 'RIGHT'] +1998-29454-0011-2168: ref=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'MAN', 'ASKED', 'DICKIE', 'IF', 'HE', 'COULD', 'WALK', 'A', 'LITTLE', 'WAY', 'AND', 'WHEN', 'DICKIE', 'SAID', 'HE', 'COULD', 'THEY', 'SET', 'OUT', 'IN', 'THE', 'MOST', 'FRIENDLY', 'WAY', 'SIDE', 'BY', 'SIDE'] +1998-29454-0011-2168: hyp=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'MEN', 'ASKED', 'DICKIE', 'IF', 'HE', 'COULD', 'WALK', 'A', 'LITTLE', 'WAY', 'AND', 'WHEN', 'DICKY', 'SAID', 'HE', 'COULD', 'THEY', 'SET', 'OUT', 'IN', 'THE', 'MOST', 'FRIENDLY', 'WAY', 'SIDE', 'BY', 'SIDE'] +1998-29454-0012-2169: ref=['AND', 'THE', 'TEA', 'AND', 'ALL', 'AN', 'THE', 'EGG'] +1998-29454-0012-2169: hyp=['AND', 'THE', 'TUNO', 'AND', 'THE', 'EGG'] +1998-29454-0013-2170: ref=['AND', 'THIS', 'IS', 'THE', 'PRETTIEST', 'PLACE', 'EVER', 'I', 'SEE'] +1998-29454-0013-2170: hyp=['AND', 'THIS', 'IS', 'THE', 'PRETTIEST', 'PLACE', 'EVER', 'I', 'SEE'] +1998-29454-0014-2171: ref=['I', 'SHALL', 'CATCH', 'IT', 'A', 'FAIR', 'TREAT', 'AS', 'IT', 'IS'] +1998-29454-0014-2171: hyp=['I', 'SHALL', 'CATCH', 'IT', 'IF', 'HER', 'TREAT', 'AS', 'IT', 'IS'] +1998-29454-0015-2172: ref=['SHE', 'WAS', 'WAITIN', 'FOR', 'THE', 'WOOD', 'TO', 'BOIL', 'THE', 'KETTLE', 'WHEN', 'I', 'COME', 'OUT', 'MOTHER'] +1998-29454-0015-2172: hyp=['SHE', 'WAS', 'WAITING', 'FOR', 'THE', 'WOOD', 'TO', 'BOY', 'THE', 'KETTLE', 'WHEN', 'TO', 'COME', 'OUT', 'MOTHER'] +1998-29454-0016-2173: ref=["AIN'T", 'BAD', 'WHEN', "SHE'S", 'IN', 'A', 'GOOD', 'TEMPER'] +1998-29454-0016-2173: hyp=['AND', 'BAD', 'WHEN', "SHE'S", 'IN', 'A', 'GOOD', 'TEMPER'] +1998-29454-0017-2174: ref=['THAT', "AIN'T", 'WHAT', "SHE'LL", 'BE', 'IN', 'WHEN', 'YOU', 'GETS', 'BACK'] +1998-29454-0017-2174: hyp=['THEN', 'BUT', "HE'LL", 'BE', 'IN', 'WHEN', 'YOU', 'GETS', 'BACK'] +1998-29454-0018-2175: ref=['I', 'GOT', 'TO', 'STICK', 'IT', 'SAID', 'DICKIE', 'SADLY', "I'D", 'BEST', 'BE', 'GETTING', 'HOME'] +1998-29454-0018-2175: hyp=['I', 'GOT', 'A', 'STICKET', 'SAID', 'DICKY', 'SADLY', "I'D", 'BEST', 'BE', 'GETTING', 'HOME'] +1998-29454-0019-2176: ref=['I', "WOULDN'T", 'GO', 'OME', 'NOT', 'IF', 'I', 'WAS', 'YOU', 'SAID', 'THE', 'MAN'] +1998-29454-0019-2176: hyp=['I', "WOULDN'T", 'GO', 'HOME', 'NOT', 'IF', 'EVER', 'WAS', 'YOU', 'SAID', 'THE', 'MAN'] +1998-29454-0020-2177: ref=['NO', 'SAID', 'DICKIE', 'OH', 'NO', 'NO', 'I', 'NEVER'] +1998-29454-0020-2177: hyp=['NO', 'SAID', 'DICKIE', 'OH', 'NO', 'NO', 'I', 'NEVER'] +1998-29454-0021-2178: ref=['I', "AIN'T", 'IT', 'YER', 'HAVE', 'I', 'LIKE', 'WHAT', 'YER', 'AUNT', 'DO'] +1998-29454-0021-2178: hyp=['AND', 'A', 'DEAR', 'HAVE', 'I', 'LIKE', 'WHAT', 'YOU', "AREN'T", 'TO'] +1998-29454-0022-2179: ref=['WELL', "THAT'LL", 'SHOW', 'YOU', 'THE', 'SORT', 'OF', 'MAN', 'I', 'AM'] +1998-29454-0022-2179: hyp=['WELL', 'THOU', 'SHOW', 'YOU', 'THE', 'SORT', 'OF', 'MEN', 'I', 'AM'] +1998-29454-0023-2180: ref=['THE', "MAN'S", 'MANNER', 'WAS', 'SO', 'KIND', 'AND', 'HEARTY', 'THE', 'WHOLE', 'ADVENTURE', 'WAS', 'SO', 'WONDERFUL', 'AND', 'NEW', 'IS', 'IT', 'COUNTRY', 'WHERE', 'YOU', 'GOING'] +1998-29454-0023-2180: hyp=['THE', "MAN'S", 'MANNER', 'WAS', 'SO', 'KIND', 'AND', 'HEARTY', 'THE', 'WHOLE', 'ADVENTURE', 'WAS', 'SO', 'WONDERFUL', 'AND', 'NEW', 'IS', 'IT', 'COUNTRY', 'WHERE', 'YOU', 'GOING'] +1998-29454-0024-2181: ref=['THE', 'SUN', 'SHOT', 'LONG', 'GOLDEN', 'BEAMS', 'THROUGH', 'THE', 'GAPS', 'IN', 'THE', 'HEDGE'] +1998-29454-0024-2181: hyp=['THE', 'SUN', 'HAD', 'LONG', 'GOLDEN', 'BEAMS', 'THROUGH', 'THE', 'GAPS', 'AND', 'THE', 'HEDGE'] +1998-29454-0025-2182: ref=['A', 'BIRD', 'PAUSED', 'IN', 'ITS', 'FLIGHT', 'ON', 'A', 'BRANCH', 'QUITE', 'CLOSE', 'AND', 'CLUNG', 'THERE', 'SWAYING'] +1998-29454-0025-2182: hyp=['A', 'BIRD', 'PASSED', 'IN', 'ITS', 'FLIGHT', 'ON', 'A', 'BRANCH', 'QUITE', 'CLOSE', 'AND', 'CLUNG', 'THERE', 'SWAYING'] +1998-29454-0026-2183: ref=['HE', 'TOOK', 'OUT', 'OF', 'HIS', 'POCKET', 'A', 'NEW', 'ENVELOPE', 'A', 'NEW', 'SHEET', 'OF', 'PAPER', 'AND', 'A', 'NEW', 'PENCIL', 'READY', 'SHARPENED', 'BY', 'MACHINERY'] +1998-29454-0026-2183: hyp=['HE', 'TOOK', 'OUT', 'OF', 'HIS', 'POCKET', 'A', 'NEW', 'ENVELOPE', 'AND', 'NEW', 'SHEET', 'OF', 'PAPER', 'AND', 'A', 'NEW', 'PENCIL', 'READY', 'SHARPENED', 'BY', 'MACHINERY'] +1998-29454-0027-2184: ref=['AN', 'I', 'ASKS', 'YOU', 'LET', 'ME', 'COME', 'ALONGER', 'YOU', 'GOT', 'THAT'] +1998-29454-0027-2184: hyp=['AND', 'I', 'ASK', 'YOU', 'LET', 'ME', 'COME', 'ALONG', 'ARE', 'YOU', 'GOT', 'THAT'] +1998-29454-0028-2185: ref=['GET', 'IT', 'WROTE', 'DOWN', 'THEN', 'DONE'] +1998-29454-0028-2185: hyp=['GET', 'US', 'RODE', 'DOWN', 'THEN', 'DONE'] +1998-29454-0029-2186: ref=['THEN', 'HE', 'FOLDED', 'IT', 'AND', 'PUT', 'IT', 'IN', 'HIS', 'POCKET'] +1998-29454-0029-2186: hyp=['THEN', 'HE', 'FOLDED', 'IT', 'AND', 'PUT', 'IT', 'IN', 'HIS', 'POCKET'] +1998-29454-0030-2187: ref=['NOW', "WE'RE", 'SQUARE', 'HE', 'SAID'] +1998-29454-0030-2187: hyp=['NOW', 'WE', 'ARE', 'SQUARE', 'HE', 'SAID'] +1998-29454-0031-2188: ref=['THEY', 'COULD', 'PUT', 'A', 'MAN', 'AWAY', 'FOR', 'LESS', 'THAN', 'THAT'] +1998-29454-0031-2188: hyp=['THEY', 'COULD', 'PUT', 'A', 'MEN', 'AWAY', 'FOR', 'US', 'THAN', 'THAT'] +1998-29454-0032-2189: ref=['I', 'SEE', 'THAT', 'THERE', 'IN', 'A', 'BOOK', 'SAID', 'DICKIE', 'CHARMED'] +1998-29454-0032-2189: hyp=['I', 'SEE', 'THAT', 'THEN', 'A', 'BOOK', 'SAID', 'DICK', 'HAD', 'SHUMMED'] +1998-29454-0033-2190: ref=['HE', 'REWARD', 'THE', 'WAKE', 'THE', 'LAST', 'OF', 'THE', 'ENGLISH', 'AND', 'I', 'WUNNERED', 'WHAT', 'IT', 'STOOD', 'FOR'] +1998-29454-0033-2190: hyp=['HE', 'REWARD', 'THE', 'WAKE', 'THE', 'LAST', 'OF', 'THE', 'ENGLISH', 'AND', 'A', 'ONE', 'AT', 'WHAT', 'IT', 'STOOD', 'FOR'] +1998-29454-0034-2191: ref=['WILD', 'ONES', "AIN'T", 'ALF', 'THE', 'SIZE', 'I', 'LAY'] +1998-29454-0034-2191: hyp=['WHITE', 'ONES', 'AND', 'A', 'HALF', 'SIZE', 'I', 'LAY'] +1998-29454-0035-2192: ref=['ADVENTURES', 'I', 'SHOULD', 'THINK', 'SO'] +1998-29454-0035-2192: hyp=['ADVENTURES', 'I', 'SHOULD', 'THINK', 'SO'] +1998-29454-0036-2193: ref=['AH', 'SAID', 'DICKIE', 'AND', 'A', 'FULL', 'SILENCE', 'FELL', 'BETWEEN', 'THEM'] +1998-29454-0036-2193: hyp=['AH', 'SAID', 'DICKY', 'AND', 'A', 'FOOT', 'SILENCE', 'FELL', 'BETWEEN', 'THEM'] +1998-29454-0037-2194: ref=['THAT', 'WAS', 'CHARMING', 'BUT', 'IT', 'WAS', 'PLEASANT', 'TOO', 'TO', 'WASH', 'THE', 'MUD', 'OFF', 'ON', 'THE', 'WET', 'GRASS'] +1998-29454-0037-2194: hyp=['THAT', 'WAS', 'CHARMING', 'BUT', 'IT', 'WAS', 'PLEASANT', 'TOO', 'TO', 'WASH', 'THE', 'MATTER', 'ON', 'THE', 'WET', 'GRASS'] +1998-29454-0038-2195: ref=['DICKIE', 'ALWAYS', 'REMEMBERED', 'THAT', 'MOMENT'] +1998-29454-0038-2195: hyp=['DICKY', 'ALWAYS', 'REMEMBERED', 'THAT', 'MOMENT'] +1998-29454-0039-2196: ref=['SO', 'YOU', 'SHALL', 'SAID', 'MISTER', 'BEALE', 'A', "REG'LER", 'WASH', 'ALL', 'OVER', 'THIS', 'VERY', 'NIGHT', 'I', 'ALWAYS', 'LIKE', 'A', 'WASH', 'MESELF'] +1998-29454-0039-2196: hyp=['SO', 'YOU', 'SHALL', 'SAID', 'MISTER', 'BEALE', 'A', "REG'LAR", 'WASH', 'ALL', 'OVER', 'THIS', 'VERY', 'NIGHT', 'I', 'ALWAYS', 'LIKE', 'A', 'WASH', 'MESELF'] +1998-29454-0040-2197: ref=['SOME', 'BLOKES', 'THINK', 'IT', 'PAYS', 'TO', 'BE', 'DIRTY', 'BUT', 'IT', "DON'T"] +1998-29454-0040-2197: hyp=['SOME', 'LOOKS', 'THINK', 'IT', 'PAYS', 'TO', 'BE', 'DIRTY', 'BUT', 'IT', "DON'T"] +1998-29454-0041-2198: ref=['IF', "YOU'RE", 'CLEAN', 'THEY', 'SAY', 'HONEST', 'POVERTY', 'AN', 'IF', "YOU'RE", 'DIRTY', 'THEY', 'SAY', 'SERVE', 'YOU', 'RIGHT'] +1998-29454-0041-2198: hyp=['IF', 'YO', 'CLEAN', 'THEY', 'SAY', 'ON', 'DIS', 'POVERTY', 'AN', 'IF', 'YO', 'DIRTY', 'THEY', 'SAY', 'SERVE', 'YOU', 'RIGHT'] +1998-29454-0042-2199: ref=['YOU', 'ARE', 'GOOD', 'SAID', 'DICKIE', 'I', 'DO', 'LIKE', 'YOU'] +1998-29454-0042-2199: hyp=['YOU', 'ARE', 'GOOD', 'SAID', 'DICKIE', 'I', 'DO', 'LIKE', 'YOU'] +1998-29454-0043-2200: ref=['I', 'KNOW', 'YOU', 'WILL', 'SAID', 'DICKIE', 'WITH', 'ENTHUSIASM', 'I', 'KNOW', 'OW', 'GOOD', 'YOU', 'ARE'] +1998-29454-0043-2200: hyp=['I', 'KNOW', 'YOU', 'WILL', 'SAID', 'DICKIE', 'WITH', 'ENTHUSIASM', 'I', 'KNOW', 'HOW', 'GOOD', 'YOU', 'ARE'] +1998-29454-0044-2201: ref=['BLESS', 'ME', 'SAID', 'MISTER', 'BEALE', 'UNCOMFORTABLY', 'WELL', 'THERE'] +1998-29454-0044-2201: hyp=['BLESS', 'ME', 'SAID', 'MISTER', 'BEALE', 'UNCOMFORTABLY', 'WELL', 'THERE'] +1998-29454-0045-2202: ref=['STEP', 'OUT', 'SONNY', 'OR', "WE'LL", 'NEVER', 'GET', 'THERE', 'THIS', 'SIDE', 'CHRISTMAS'] +1998-29454-0045-2202: hyp=['SPATANI', 'ALBER', 'NEVER', 'GET', 'THERE', 'THIS', 'SORT', 'OF', 'CHRISTMAS'] +1998-29454-0046-2203: ref=['WELL', "YOU'LL", 'KNOW', 'ALL', 'ABOUT', 'IT', 'PRESENTLY'] +1998-29454-0046-2203: hyp=['WELL', 'YOU', 'KNOW', 'ALL', 'ABOUT', 'IT', 'PRESENTLY'] +1998-29455-0000-2232: ref=['THE', 'SINGING', 'AND', 'LAUGHING', 'WENT', 'ON', 'LONG', 'AFTER', 'HE', 'HAD', 'FALLEN', 'ASLEEP', 'AND', 'IF', 'LATER', 'IN', 'THE', 'EVENING', 'THERE', 'WERE', 'LOUD', 'VOICED', 'ARGUMENTS', 'OR', 'QUARRELS', 'EVEN', 'DICKIE', 'DID', 'NOT', 'HEAR', 'THEM'] +1998-29455-0000-2232: hyp=['THE', 'SINGING', 'AND', 'LAUGHING', 'WENT', 'ON', 'LONG', 'AFTER', 'HE', 'HAD', 'FALLEN', 'ASLEEP', 'AND', 'IF', 'LATER', 'IN', 'THE', 'EVENING', 'THEY', 'WERE', 'ALL', 'OUTWARDS', 'ARGUMENTS', 'OR', 'A', 'QUARRELS', 'EVEN', 'DICKIE', 'DID', 'NOT', 'HEAR', 'THEM'] +1998-29455-0001-2233: ref=["WHAT'S", 'ALL', 'THAT', 'THERE', 'DICKIE', 'ASKED', 'POINTING', 'TO', 'THE', 'ODD', 'KNOBBLY', 'BUNDLES', 'OF', 'ALL', 'SORTS', 'AND', 'SHAPES', 'TIED', 'ON', 'TO', 'THE', "PERAMBULATOR'S", 'FRONT'] +1998-29455-0001-2233: hyp=["WHAT'S", 'ON', 'THAT', 'THERE', 'DICKIE', 'ASKED', 'POINTING', 'TO', 'THE', 'ODD', 'NOBLY', 'BUNDLES', 'OF', 'ALL', 'SORTS', 'AND', 'SHAPES', 'TIED', 'ON', 'TO', 'THE', "PRAMULATOR'S", 'FRONT'] +1998-29455-0002-2234: ref=['TELL', 'YER', 'WHAT', 'MATE', 'LOOKS', 'TO', 'ME', 'AS', 'IF', "I'D", 'TOOK', 'A', 'FANCY', 'TO', 'YOU'] +1998-29455-0002-2234: hyp=['TELL', 'YOU', 'WHAT', 'MADE', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'I', 'TOOK', 'A', 'FANCY', 'TO', 'YOU'] +1998-29455-0003-2235: ref=['SWELP', 'ME', 'HE', 'SAID', 'HELPLESSLY'] +1998-29455-0003-2235: hyp=['SWAP', 'ME', 'HE', 'SAID', 'HELPLESSLY'] +1998-29455-0004-2236: ref=['OH', 'LOOK', 'SAID', 'DICKIE', 'THE', 'FLOWERS'] +1998-29455-0004-2236: hyp=['O', 'LOOK', 'SAID', 'DICKY', 'THE', 'FLOWERS'] +1998-29455-0005-2237: ref=["THEY'RE", 'ONLY', 'WEEDS', 'SAID', 'BEALE'] +1998-29455-0005-2237: hyp=['THERE', 'ONLY', 'READS', 'SAID', 'BEER'] +1998-29455-0006-2238: ref=['BUT', 'I', 'SHALL', 'HAVE', 'THEM', 'WHILE', "THEY'RE", 'ALIVE', 'SAID', 'DICKIE', 'AS', 'HE', 'HAD', 'SAID', 'TO', 'THE', 'PAWNBROKER', 'ABOUT', 'THE', 'MOONFLOWERS'] +1998-29455-0006-2238: hyp=['BUT', 'I', 'SHOULD', 'HAVE', 'THEM', 'WHETHER', 'ALIVE', 'SAID', 'DICKY', 'AS', 'HE', 'HAD', 'SAID', 'TO', 'THE', 'PONDBROKER', 'BUT', 'THE', 'MOONFLOWERS'] +1998-29455-0007-2239: ref=['HI', 'THERE', 'GOES', 'A', 'RABBIT'] +1998-29455-0007-2239: hyp=['AY', 'THERE', 'WAS', 'A', 'RABBIT'] +1998-29455-0008-2240: ref=['SEE', 'IM', 'CROST', 'THE', 'ROAD', 'THERE', 'SEE', 'HIM'] +1998-29455-0008-2240: hyp=['SEEM', 'QUEST', 'ROAD', 'THERE', 'SEEM'] +1998-29455-0009-2241: ref=['HOW', 'BEAUTIFUL', 'SAID', 'DICKIE', 'WRIGGLING', 'WITH', 'DELIGHT'] +1998-29455-0009-2241: hyp=['HOW', 'BEAUTIFUL', 'SAID', 'DICKY', 'WRIGGLING', 'WITH', 'DELIGHT'] +1998-29455-0010-2242: ref=['THIS', 'LIFE', 'OF', 'THE', 'RABBIT', 'AS', 'DESCRIBED', 'BY', 'MISTER', 'BEALE', 'WAS', 'THE', "CHILD'S", 'FIRST', 'GLIMPSE', 'OF', 'FREEDOM', "I'D", 'LIKE', 'TO', 'BE', 'A', 'RABBIT'] +1998-29455-0010-2242: hyp=['THIS', 'LIFE', 'OF', 'THE', 'RABBIT', 'AS', 'DESCRIBED', 'BY', 'MISTER', 'BEALE', 'WAS', 'THE', "CHILD'S", 'FIRST', 'GLIMPSE', 'OF', 'FREEDOM', "I'D", 'LIKE', 'TO', 'BE', 'A', 'RABBIT'] +1998-29455-0011-2243: ref=["OW'M", 'I', 'TO', 'WHEEL', 'THE', 'BLOOMIN', 'PRAM', 'IF', 'YOU', 'GOES', 'ON', 'LIKE', 'AS', 'IF', 'YOU', 'WAS', 'A', 'BAG', 'OF', 'EELS'] +1998-29455-0011-2243: hyp=['AM', 'I', 'TO', 'REA', 'THE', 'ROOM', 'IN', 'PEM', 'IF', 'YOUR', 'GOES', 'ON', 'LIKE', 'AS', 'IF', 'YOU', 'WAS', 'A', 'PEG', 'OF', 'EELS'] +1998-29455-0012-2244: ref=['I', 'LIKE', 'YOU', 'NEXTER', 'MY', 'OWN', 'DADDY', 'AND', 'MISTER', 'BAXTER', 'NEXT', 'DOOR'] +1998-29455-0012-2244: hyp=['I', 'LIKE', 'YOU', 'NEXT', 'TO', 'MY', 'OWN', 'DADDY', 'AND', 'MISTER', 'BAXT', 'THE', 'NEXT', 'DOOR'] +1998-29455-0013-2245: ref=["THAT'S", 'ALL', 'RIGHT', 'SAID', 'MISTER', 'BEALE', 'AWKWARDLY'] +1998-29455-0013-2245: hyp=["THAT'S", 'ALL', 'RIGHT', 'SAID', 'MISTER', 'BEALE', 'AWKWARDLY'] +1998-29455-0014-2246: ref=['DICKIE', 'QUICK', 'TO', 'IMITATE', 'TOUCHED', 'HIS'] +1998-29455-0014-2246: hyp=['DICKY', 'QUICKLY', 'IMITATE', 'TOUCHED', 'HIS'] +1998-29455-0015-2247: ref=['POOR', 'LITTLE', 'MAN', 'SAID', 'THE', 'LADY', 'YOU', 'MISS', 'YOUR', 'MOTHER', "DON'T", 'YOU'] +1998-29455-0015-2247: hyp=['POOR', 'LITTLE', 'MAN', 'SAID', 'THE', 'LADY', 'YOU', 'MISS', 'YOUR', 'MOTHER', "DON'T", 'YOU'] +1998-29455-0016-2248: ref=['OH', 'WELL', 'DONE', 'LITTLE', 'UN', 'SAID', 'MISTER', 'BEALE', 'TO', 'HIMSELF'] +1998-29455-0016-2248: hyp=['OH', 'WELL', 'DONE', 'LITTLE', 'ONE', 'SAID', 'MISTER', 'BEE', 'TO', 'HIMSELF'] +1998-29455-0017-2249: ref=['THE', 'TWO', 'TRAVELLERS', 'WERE', 'LEFT', 'FACING', 'EACH', 'OTHER', 'THE', 'RICHER', 'BY', 'A', 'PENNY', 'AND', 'OH', 'WONDERFUL', 'GOOD', 'FORTUNE', 'A', 'WHOLE', 'HALF', 'CROWN'] +1998-29455-0017-2249: hyp=['THE', 'TWO', 'TRAVELLERS', 'WERE', 'LEFT', 'FACING', 'EACH', 'OTHER', 'THE', 'RICHER', 'BY', 'A', 'PENNY', 'AND', 'O', 'WONDERFUL', 'GOOD', 'FORTUNE', 'A', 'WHOLE', 'HALF', 'CROWN'] +1998-29455-0018-2250: ref=['NO', 'I', 'NEVER', 'SAID', 'DICKIE', "ERE'S", 'THE', 'STEEVER'] +1998-29455-0018-2250: hyp=['NO', 'I', 'NEVER', 'SAID', 'DICKIE', 'YES', 'THE', 'STEVER'] +1998-29455-0019-2251: ref=['YOU', 'STICK', 'TO', 'THAT', 'SAID', 'BEALE', 'RADIANT', 'WITH', 'DELIGHT', "YOU'RE", 'A', 'FAIR', 'MASTERPIECE', 'YOU', 'ARE', 'YOU', 'EARNED', 'IT', 'HONEST', 'IF', 'EVER', 'A', 'KID', 'DONE'] +1998-29455-0019-2251: hyp=['YOU', 'STICK', 'TO', 'THAT', 'SAID', 'BEARD', 'RADIANT', 'WAS', 'DELIGHT', "YOU'RE", 'A', 'FAIR', 'MASTERPIECE', 'YOU', 'ARE', 'YOU', 'EARNED', 'IT', 'HONEST', 'IF', 'EVER', 'KIT', 'DONE'] +1998-29455-0020-2252: ref=['THEY', 'WENT', 'ON', 'UP', 'THE', 'HILL', 'AS', 'HAPPY', 'AS', 'ANY', 'ONE', 'NEED', 'WISH', 'TO', 'BE'] +1998-29455-0020-2252: hyp=['THEY', 'WENT', 'ON', 'UP', 'THE', 'HILL', 'AS', 'HAPPY', 'AS', 'ANY', 'ONE', 'NEED', 'WISH', 'TO', 'BE'] +1998-29455-0021-2253: ref=['PLEASE', 'DO', 'NOT', 'BE', 'TOO', 'SHOCKED'] +1998-29455-0021-2253: hyp=['PLEASE', "DON'T", 'REPEAT', 'TOO', 'SHOCKED'] +1998-29455-0022-2254: ref=['REMEMBER', 'THAT', 'NEITHER', 'OF', 'THEM', 'KNEW', 'ANY', 'BETTER'] +1998-29455-0022-2254: hyp=['REMEMBER', 'THAT', 'NEITHER', 'OF', 'THEM', 'KNEW', 'ANY', 'BETTER'] +1998-29455-0023-2255: ref=['TO', 'THE', 'ELDER', 'TRAMP', 'LIES', 'AND', 'BEGGING', 'WERE', 'NATURAL', 'MEANS', 'OF', 'LIVELIHOOD'] +1998-29455-0023-2255: hyp=['TO', 'THE', 'OTHER', 'TRAMP', 'LIES', 'IN', 'PEGGING', 'WHERE', 'NATURAL', 'MEANS', 'OF', 'LIVELIHOOD'] +1998-29455-0024-2256: ref=['BUT', 'YOU', 'SAID', 'THE', 'BED', 'WITH', 'THE', 'GREEN', 'CURTAINS', 'URGED', 'DICKIE'] +1998-29455-0024-2256: hyp=['BUT', 'YOU', 'SAID', 'THE', 'BED', 'WAS', 'THE', 'GREEN', 'CURTAINS', 'ADDED', 'THE', 'KEI'] +1998-29455-0025-2257: ref=['WHICH', 'THIS', "AIN'T", 'NOT', 'BY', 'NO', 'MEANS'] +1998-29455-0025-2257: hyp=['WHICH', 'THIS', 'END', 'NOT', 'BY', 'NO', 'MEANS'] +1998-29455-0026-2258: ref=['THE', 'NIGHT', 'IS', 'FULL', 'OF', 'INTERESTING', 'LITTLE', 'SOUNDS', 'THAT', 'WILL', 'NOT', 'AT', 'FIRST', 'LET', 'YOU', 'SLEEP', 'THE', 'RUSTLE', 'OF', 'LITTLE', 'WILD', 'THINGS', 'IN', 'THE', 'HEDGES', 'THE', 'BARKING', 'OF', 'DOGS', 'IN', 'DISTANT', 'FARMS', 'THE', 'CHIRP', 'OF', 'CRICKETS', 'AND', 'THE', 'CROAKING', 'OF', 'FROGS'] +1998-29455-0026-2258: hyp=['THE', 'NIGHT', 'IS', 'FULL', 'OF', 'INTERESTING', 'LITTLE', 'SOUNDS', 'THAT', 'WILL', 'NOT', 'AT', 'FIRST', 'LET', 'YOU', 'SLEEP', 'THE', 'RUSTLE', 'OF', 'LITTLE', 'WHITE', 'THINGS', 'ON', 'THE', 'HATCHES', 'THE', 'BARKING', 'OF', 'DOGS', 'AND', 'DISTANT', 'FARMS', 'THE', 'CHIRP', 'OF', 'CRICKETS', 'AND', 'THE', 'CROAKING', 'OF', 'FROGS'] +1998-29455-0027-2259: ref=['THE', 'NEW', 'GAME', 'OF', 'BEGGING', 'AND', 'INVENTING', 'STORIES', 'TO', 'INTEREST', 'THE', 'PEOPLE', 'FROM', 'WHOM', 'IT', 'WAS', 'WORTH', 'WHILE', 'TO', 'BEG', 'WENT', 'ON', 'GAILY', 'DAY', 'BY', 'DAY', 'AND', 'WEEK', 'BY', 'WEEK', 'AND', 'DICKIE', 'BY', 'CONSTANT', 'PRACTICE', 'GREW', 'SO', 'CLEVER', 'AT', 'TAKING', 'HIS', 'PART', 'IN', 'THE', 'ACTING', 'THAT', 'MISTER', 'BEALE', 'WAS', 'QUITE', 'DAZED', 'WITH', 'ADMIRATION'] +1998-29455-0027-2259: hyp=['THEN', 'YOU', 'GAME', 'OF', 'BEGGING', 'AND', 'INVENTING', 'STORIES', 'TO', 'INTEREST', 'THE', 'PEOPLE', 'FROM', 'WHOM', 'IT', 'WAS', 'WORSE', 'WHILE', 'TO', 'BEG', 'WENT', 'ON', 'GAILY', 'DAY', 'BY', 'DAY', 'AND', 'WEEK', 'BY', 'WEEK', 'AND', 'DICKIE', 'BY', 'CONSTANT', 'PRACTICE', 'GREW', 'SO', 'CLEVER', 'AT', 'TAKING', 'HIS', 'PART', 'IN', 'THE', 'ACTING', 'THAT', 'MISTER', 'BEER', 'WAS', 'QUITE', 'DAZED', 'WITH', 'ADMIRATION'] +1998-29455-0028-2260: ref=['BLESSED', 'IF', 'I', 'EVER', 'SEE', 'SUCH', 'A', 'NIPPER', 'HE', 'SAID', 'OVER', 'AND', 'OVER', 'AGAIN'] +1998-29455-0028-2260: hyp=['BLEST', 'IF', 'I', 'EVER', 'SEE', 'SUCH', 'A', 'NIPPER', 'HE', 'SAID', 'OVER', 'AND', 'OVER', 'AGAIN'] +1998-29455-0029-2261: ref=['CLEVER', 'AS', 'A', 'TRAINDAWG', 'E', 'IS', 'AN', 'ALL', 'OUTER', 'IS', 'OWN', 'EAD'] +1998-29455-0029-2261: hyp=['CLEVER', 'AS', 'A', 'TRAIN', 'DOG', 'IS', 'IN', 'OUR', "OUTER'S", 'OWN', 'HEAD'] +1998-29455-0030-2262: ref=['I', "AIN'T", 'SURE', 'AS', 'I', "ADN'T", 'BETTER', 'STICK', 'TO', 'THE', 'ROAD', 'AND', 'KEEP', 'AWAY', 'FROM', 'OLD', 'ANDS', 'LIKE', 'YOU', 'JIM'] +1998-29455-0030-2262: hyp=['I', 'AM', 'SURE', 'AS', 'I', "HADN'T", 'BETTER', 'STICK', 'TO', 'THE', 'ROAD', 'AND', 'KEEP', 'AWAY', 'FROM', 'OLD', 'ENDS', 'LIKE', 'EUGEN'] +1998-29455-0031-2263: ref=['I', 'OPE', "E'S", 'CLEVER', 'ENOUGH', 'TO', 'DO', 'WOT', "E'S", 'TOLD', 'KEEP', 'IS', 'MUG', 'SHUT', "THAT'S", 'ALL'] +1998-29455-0031-2263: hyp=['IOPIUS', 'LOVE', 'ENOUGH', 'TO', 'DO', 'WHAT', 'IS', 'TOLD', 'HE', 'WAS', 'MUCH', 'AT', "THAT'S", 'ALL'] +1998-29455-0032-2264: ref=['IF', "E'S", 'STRAIGHT', "E'LL", 'DO', 'FOR', 'ME', 'AND', 'IF', 'HE', "AIN'T", "I'LL", 'DO', 'FOR', 'IM', 'SEE'] +1998-29455-0032-2264: hyp=['IF', 'HE', 'STRAYED', 'YOU', 'DO', 'FOR', 'ME', 'AND', 'IF', 'HE', 'AND', "I'LL", 'DO', 'FOR', 'HIM', 'SEE'] +1998-29455-0033-2265: ref=['SEE', 'THAT', 'BLOKE', 'JUST', 'NOW', 'SAID', 'MISTER', 'BEALE', 'YUSS', 'SAID', 'DICKIE'] +1998-29455-0033-2265: hyp=['SEE', 'THAT', 'LOCTICE', 'NOW', 'SAID', 'MISTER', 'BEALE', 'YES', 'SAID', 'DICKY'] +1998-29455-0034-2266: ref=['WELL', 'YOU', 'NEVER', 'SEE', 'IM'] +1998-29455-0034-2266: hyp=['WELL', 'YOU', 'NEVER', 'SEE', 'HIM'] +1998-29455-0035-2267: ref=['IF', 'ANY', 'ONE', 'ARSTS', 'YOU', 'IF', 'YOU', 'EVER', 'SEE', 'IM', 'YOU', 'NEVER', 'SET', 'EYES', 'ON', 'IM', 'IN', 'ALL', 'YOUR', 'BORN', 'NOT', 'TO', 'REMEMBER', 'IM'] +1998-29455-0035-2267: hyp=['IF', 'ANY', 'ONE', 'ASKED', 'YOU', 'IF', 'YOU', 'EVER', 'SEE', 'HIM', 'YOU', 'NEVER', 'SET', 'EYES', 'ON', 'HIM', 'IN', 'ALL', 'YOUR', 'BORN', 'NOT', 'TO', 'REMEMBER', 'HIM'] +1998-29455-0036-2268: ref=['DICKIE', 'WAS', 'FULL', 'OF', 'QUESTIONS', 'BUT', 'MISTER', 'BEALE', 'HAD', 'NO', 'ANSWERS', 'FOR', 'THEM'] +1998-29455-0036-2268: hyp=['DICKIE', 'WAS', 'FULL', 'OF', 'QUESTIONS', 'BUT', 'MISTER', 'BEE', 'HAD', 'NO', 'ANSWERS', 'FOR', 'THEM'] +1998-29455-0037-2269: ref=['NOR', 'WAS', 'IT', 'SUNDAY', 'ON', 'WHICH', 'THEY', 'TOOK', 'A', 'REST', 'AND', 'WASHED', 'THEIR', 'SHIRTS', 'ACCORDING', 'TO', 'MISTER', "BEALE'S", 'RULE', 'OF', 'LIFE'] +1998-29455-0037-2269: hyp=['NOR', 'WAS', 'IT', 'SUNDAY', 'ON', 'WHICH', 'THEY', 'TOOK', 'A', 'REST', 'AND', 'WASHED', 'THEIR', 'SHIRTS', 'ACCORDING', 'TO', 'MISTER', "BEAT'S", 'RULE', 'OF', 'LIFE'] +1998-29455-0038-2270: ref=['THEY', 'DID', 'NOT', 'STAY', 'THERE', 'BUT', 'WALKED', 'OUT', 'ACROSS', 'THE', 'DOWNS', 'WHERE', 'THE', 'SKYLARKS', 'WERE', 'SINGING', 'AND', 'ON', 'A', 'DIP', 'OF', 'THE', 'DOWNS', 'CAME', 'UPON', 'GREAT', 'STONE', 'WALLS', 'AND', 'TOWERS', 'VERY', 'STRONG', 'AND', 'GRAY'] +1998-29455-0038-2270: hyp=['THEY', 'DID', 'NOT', 'STAY', 'THERE', 'BUT', 'WALKED', 'OUT', 'ACROSS', 'THE', 'DOWNS', 'WITH', 'THE', 'SKYLECKS', 'WERE', 'SINGING', 'AND', 'ON', 'A', 'DIP', 'OF', 'THE', 'DOWNS', 'CAME', 'UPON', 'GREAT', 'STONE', 'WALLS', 'AND', 'TOWERS', 'WHERE', 'STRONG', 'AND', 'GRAY'] +1998-29455-0039-2271: ref=["WHAT'S", 'THAT', 'THERE', 'SAID', 'DICKIE'] +1998-29455-0039-2271: hyp=["WHAT'S", 'THAT', 'THERE', 'SAID', 'DICKY'] +2033-164914-0000-661: ref=['REPLIED', 'HE', 'OF', 'A', 'TRUTH', 'I', 'HEARD', 'HIM', 'NOT', 'AND', 'I', 'WOT', 'HIM', 'NOT', 'AND', 'FOLKS', 'ARE', 'ALL', 'SLEEPING'] +2033-164914-0000-661: hyp=['REPLIED', 'HE', 'OF', 'A', 'TRUTH', 'I', 'HEARD', 'HIM', 'NOT', 'AND', 'I', 'WOT', 'HIM', 'NOT', 'AND', 'FOLKS', 'ARE', 'ALL', 'SLEEPING'] +2033-164914-0001-662: ref=['BUT', 'SHE', 'SAID', 'WHOMSOEVER', 'THOU', 'SEEST', 'AWAKE', 'HE', 'IS', 'THE', 'RECITER'] +2033-164914-0001-662: hyp=['BUT', 'SHE', 'SAID', 'WHOSOEVER', 'THOU', 'SEEST', 'AWAKE', 'HE', 'IS', 'THE', 'RECITER'] +2033-164914-0002-663: ref=['THEN', 'SAID', 'THE', 'EUNUCH', 'ART', 'THOU', 'HE', 'WHO', 'REPEATED', 'POETRY', 'BUT', 'NOW', 'AND', 'MY', 'LADY', 'HEARD', 'HIM'] +2033-164914-0002-663: hyp=['THEN', 'SAID', 'THE', 'EUNUCH', 'ART', 'THOU', 'HE', 'WHO', 'REPEATED', 'POETRY', 'BUT', 'NOW', 'AND', 'MY', 'LADY', 'HEARD', 'HIM'] +2033-164914-0003-664: ref=['REJOINED', 'THE', 'EUNUCH', 'WHO', 'THEN', 'WAS', 'THE', 'RECITER', 'POINT', 'HIM', 'OUT', 'TO', 'ME'] +2033-164914-0003-664: hyp=['REJOINED', 'THE', 'EUNUCH', 'WHO', 'THEN', 'WAS', 'THE', 'RECITER', 'POINT', 'HIM', 'OUT', 'TO', 'ME'] +2033-164914-0004-665: ref=['BY', 'ALLAH', 'REPLIED', 'THE', 'FIREMAN', 'I', 'TELL', 'THEE', 'THE', 'TRUTH'] +2033-164914-0004-665: hyp=['BY', 'ALLAH', 'REPLIED', 'THE', 'FIREMAN', 'I', 'TELL', 'THEE', 'THE', 'TRUTH'] +2033-164914-0005-666: ref=['TELL', 'ME', 'WHAT', 'HAPPENED', 'QUOTH', 'ZAU', 'AL', 'MAKAN'] +2033-164914-0005-666: hyp=['TELL', 'ME', 'WHAT', 'HAPPENED', 'QUOMAN'] +2033-164914-0006-667: ref=['WHAT', 'AILS', 'THEE', 'THEN', 'THAT', 'THOU', 'MUST', 'NEEDS', 'RECITE', 'VERSES', 'SEEING', 'THAT', 'WE', 'ARE', 'TIRED', 'OUT', 'WITH', 'WALKING', 'AND', 'WATCHING', 'AND', 'ALL', 'THE', 'FOLK', 'ARE', 'ASLEEP', 'FOR', 'THEY', 'REQUIRE', 'SLEEP', 'TO', 'REST', 'THEM', 'OF', 'THEIR', 'FATIGUE'] +2033-164914-0006-667: hyp=['WHAT', 'AILS', 'THEE', 'THEN', 'THAT', 'THOU', 'MUST', 'NEEDS', 'RECITE', 'VERSES', 'SEEING', 'THAT', 'WE', 'ARE', 'TIRED', 'OUT', 'WITH', 'WALKING', 'AND', 'WATCHING', 'AND', 'ALL', 'THE', 'FOLK', 'ARE', 'ASLEEP', 'FOR', 'THEY', 'REQUIRE', 'SLEEP', 'TO', 'REST', 'THEM', 'OF', 'THEIR', 'FATIGUE'] +2033-164914-0007-668: ref=['AND', 'HE', 'ALSO', 'IMPROVISED', 'THE', 'TWO', 'FOLLOWING', 'DISTICHS'] +2033-164914-0007-668: hyp=['AND', 'HE', 'ALSO', 'PROVISED', 'THE', 'TWO', 'FOLLOWING', 'DISTINCTS'] +2033-164914-0008-669: ref=['WHEN', 'NUZHAT', 'AL', 'ZAMAN', 'HEARD', 'THE', 'FIRST', 'IMPROVISATION', 'SHE', 'CALLED', 'TO', 'MIND', 'HER', 'FATHER', 'AND', 'HER', 'MOTHER', 'AND', 'HER', 'BROTHER', 'AND', 'THEIR', 'WHILOME', 'HOME', 'THEN', 'SHE', 'WEPT', 'AND', 'CRIED', 'AT', 'THE', 'EUNUCH', 'AND', 'SAID', 'TO', 'HIM', 'WOE', 'TO', 'THEE'] +2033-164914-0008-669: hyp=['WHEN', "NUZHA'S", 'AL', 'ZAMAN', 'HEARD', 'THE', 'FIRST', 'IMPROVISATION', 'SHE', 'CALLED', 'TO', 'MINE', 'HER', 'FATHER', 'AND', 'HER', 'MOTHER', 'AND', 'HER', 'BROTHER', 'AND', 'THEIR', 'WILM', 'HOME', 'THEN', 'SHE', 'WEPT', 'AND', 'CRIED', 'TO', 'THE', 'EUNUCH', 'AND', 'SAID', 'TO', 'HIM', 'WOE', 'TO', 'THEE'] +2033-164914-0009-670: ref=['HE', 'WHO', 'RECITED', 'THE', 'FIRST', 'TIME', 'HATH', 'RECITED', 'A', 'SECOND', 'TIME', 'AND', 'I', 'HEARD', 'HIM', 'HARD', 'BY'] +2033-164914-0009-670: hyp=['HE', 'WHO', 'RECITED', 'THE', 'FIRST', 'TIME', 'HAD', 'RECITED', 'A', 'SECOND', 'TIME', 'AND', 'HEARD', 'HIM', 'HEART', 'BY'] +2033-164914-0010-671: ref=['BY', 'ALLAH', 'AN', 'THOU', 'FETCH', 'HIM', 'NOT', 'TO', 'ME', 'I', 'WILL', 'ASSUREDLY', 'ROUSE', 'THE', 'CHAMBERLAIN', 'ON', 'THEE', 'AND', 'HE', 'SHALL', 'BEAT', 'THEE', 'AND', 'CAST', 'THEE', 'OUT'] +2033-164914-0010-671: hyp=['BY', 'ALLAH', 'AN', 'THOU', 'FETCH', 'HIM', 'NOT', 'TO', 'ME', 'I', 'WILL', 'ASSUREDLY', 'ROUSE', 'THE', 'CHAMBERLAIN', 'ON', 'THEE', 'AND', 'HE', 'SHALL', 'BEAT', 'THEE', 'AND', 'CAST', 'THEE', 'OUT'] +2033-164914-0011-672: ref=['BUT', 'TAKE', 'THESE', 'HUNDRED', 'DINERS', 'AND', 'GIVE', 'THEM', 'TO', 'THE', 'SINGER', 'AND', 'BRING', 'HIM', 'TO', 'ME', 'GENTLY', 'AND', 'DO', 'HIM', 'NO', 'HURT'] +2033-164914-0011-672: hyp=['BUT', 'TAKE', 'THESE', 'HUNDRED', 'DINERS', 'AND', 'GIVE', 'THEM', 'TO', 'THE', 'SINGER', 'AND', 'BRING', 'HIM', 'TO', 'ME', 'GENTLY', 'AND', 'DO', 'HIM', 'NO', 'HURT'] +2033-164914-0012-673: ref=['RETURN', 'QUICKLY', 'AND', 'LINGER', 'NOT'] +2033-164914-0012-673: hyp=['RETURN', 'QUICKLY', 'AND', 'LINGER', 'NOT'] +2033-164914-0013-674: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'THIRD', 'NIGHT'] +2033-164914-0013-674: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'THIRD', 'NIGHT'] +2033-164914-0014-675: ref=['BUT', 'THE', 'EUNUCH', 'SAID', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'TILL', 'THOU', 'SHOW', 'ME', 'WHO', 'IT', 'WAS', 'THAT', 'RECITED', 'THE', 'VERSES', 'FOR', 'I', 'DREAD', 'RETURNING', 'TO', 'MY', 'LADY', 'WITHOUT', 'HIM'] +2033-164914-0014-675: hyp=['BUT', 'THE', 'EUNUCH', 'SAID', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'TILL', 'THOU', 'SHOW', 'ME', 'WHO', 'IT', 'WAS', 'THAT', 'RECITED', 'THE', 'VERSES', 'FOR', 'I', 'DREAD', 'RETURNING', 'TO', 'MY', 'LADY', 'WITHOUT', 'HIM'] +2033-164914-0015-676: ref=['NOW', 'WHEN', 'THE', 'FIREMAN', 'HEARD', 'THESE', 'WORDS', 'HE', 'FEARED', 'FOR', 'ZAU', 'AL', 'MAKAN', 'AND', 'WEPT', 'WITH', 'EXCEEDING', 'WEEPING', 'AND', 'SAID', 'TO', 'THE', 'EUNUCH', 'BY', 'ALLAH', 'IT', 'WAS', 'NOT', 'I', 'AND', 'I', 'KNOW', 'HIM', 'NOT'] +2033-164914-0015-676: hyp=['NOW', 'WHEN', 'THE', 'FIREMAN', 'HEARD', 'THESE', 'WORDS', 'HE', 'FEARED', 'FOR', 'ZA', 'AL', 'MAKAN', 'AND', 'WEPT', 'WITH', 'EXCEEDING', 'WEEPING', 'AND', 'SAID', 'TO', 'THE', 'EUNUCH', 'BY', 'ALLAH', 'IT', 'WAS', 'NOT', 'I', 'AND', 'THEY', 'KNOW', 'HIM', 'NOT'] +2033-164914-0016-677: ref=['SO', 'GO', 'THOU', 'TO', 'THY', 'STATION', 'AND', 'IF', 'THOU', 'AGAIN', 'MEET', 'ANY', 'ONE', 'AFTER', 'THIS', 'HOUR', 'RECITING', 'AUGHT', 'OF', 'POETRY', 'WHETHER', 'HE', 'BE', 'NEAR', 'OR', 'FAR', 'IT', 'WILL', 'BE', 'I', 'OR', 'SOME', 'ONE', 'I', 'KNOW', 'AND', 'THOU', 'SHALT', 'NOT', 'LEARN', 'OF', 'HIM', 'BUT', 'BY', 'ME'] +2033-164914-0016-677: hyp=['SO', 'GO', 'THOU', 'TO', 'THY', 'STATION', 'AND', 'IF', 'THOU', 'AGAIN', 'ANY', 'ONE', 'AFTER', 'THIS', 'HOUR', 'RECITING', 'AUGHT', 'OF', 'POETRY', 'WHETHER', 'HE', 'BE', 'NEAR', 'OR', 'FAR', 'IT', 'WILL', 'BE', 'I', 'OR', 'SOME', 'ONE', 'I', 'KNOW', 'AND', 'THOU', 'SHALT', 'NOT', 'LEARN', 'OF', 'HIM', 'BUT', 'BY', 'ME'] +2033-164914-0017-678: ref=['THEN', 'HE', 'KISSED', 'THE', "EUNUCH'S", 'HEAD', 'AND', 'SPAKE', 'HIM', 'FAIR', 'TILL', 'HE', 'WENT', 'AWAY', 'BUT', 'THE', 'CASTRATO', 'FETCHED', 'A', 'ROUND', 'AND', 'RETURNING', 'SECRETLY', 'CAME', 'AND', 'STOOD', 'BEHIND', 'THE', 'FIREMAN', 'FEARING', 'TO', 'GO', 'BACK', 'TO', 'HIS', 'MISTRESS', 'WITHOUT', 'TIDINGS'] +2033-164914-0017-678: hyp=['THEN', 'HE', 'KISSED', 'THE', "EUNUCH'S", 'HEAD', 'AND', 'SPAKE', 'HIM', 'FAIR', 'TILL', 'HE', 'WENT', 'AWAY', 'BUT', 'THE', 'CASTRATO', 'FETCHED', 'AROUND', 'AND', 'RETURNING', 'SECRETLY', 'CAME', 'AND', 'STOOD', 'BEHIND', 'THE', 'FIREMAN', 'FEARING', 'TO', 'GO', 'BACK', 'TO', 'HIS', 'MISTRESS', 'WITHOUT', 'HIDINGS'] +2033-164914-0018-679: ref=['I', 'SAY', 'WHAT', 'MADE', 'MY', 'IGNOMY', "WHATE'ER", 'THE', 'BITTER', 'CUP', 'I', 'DRAIN', 'FAR', 'BE', 'FRO', 'ME', 'THAT', 'LAND', 'TO', 'FLEE', 'NOR', 'WILL', 'I', 'BOW', 'TO', 'THOSE', 'WHO', 'BLAME', 'AND', 'FOR', 'SUCH', 'LOVE', 'WOULD', 'DEAL', 'ME', 'SHAME'] +2033-164914-0018-679: hyp=['I', 'SAY', 'WHAT', 'MADE', 'MY', 'IGNOMY', 'WHATEVER', 'THE', 'BITTER', 'CUPIED', 'DRAIN', 'FAR', 'BE', 'FROM', 'ME', 'THE', 'LAND', 'TO', 'FLEE', 'NOR', 'WILL', 'I', 'BOW', 'TO', 'THOSE', 'WHO', 'BLAME', 'AND', 'FOR', 'SUCH', 'LOVE', 'WOULD', 'DEAL', 'ME', 'SHAME'] +2033-164914-0019-680: ref=['THEN', 'SAID', 'THE', 'EUNUCH', 'TO', 'ZAU', 'AL', 'MAKAN', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'LORD'] +2033-164914-0019-680: hyp=['THEN', 'SAID', 'THE', 'EUNUCH', 'TO', 'ZA', 'AL', 'MAKAN', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'LORD'] +2033-164914-0020-681: ref=['O', 'MY', 'LORD', 'CONTINUED', 'THE', 'EUNUCH', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164914-0020-681: hyp=['O', 'MY', 'LORD', 'CONTINUED', 'THE', 'EUNUCH', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THAT', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164914-0021-682: ref=['WE', 'WILL', 'DO', 'THEE', 'NO', 'UPRIGHT', 'O', 'MY', 'SON', 'NOR', 'WRONG', 'THEE', 'IN', 'AUGHT', 'BUT', 'OUR', 'OBJECT', 'IS', 'THAT', 'THOU', 'BEND', 'THY', 'GRACIOUS', 'STEPS', 'WITH', 'ME', 'TO', 'MY', 'MISTRESS', 'TO', 'RECEIVE', 'HER', 'ANSWER', 'AND', 'RETURN', 'IN', 'WEAL', 'AND', 'SAFETY', 'AND', 'THOU', 'SHALT', 'HAVE', 'A', 'HANDSOME', 'PRESENT', 'AS', 'ONE', 'WHO', 'BRINGETH', 'GOOD', 'NEWS'] +2033-164914-0021-682: hyp=['WE', 'WILL', 'DO', 'THEE', 'NO', 'UPRIGHT', 'O', 'MY', 'SON', 'NOR', 'WRONG', 'THEE', 'IN', 'AUGHT', 'BUT', 'OUR', 'OBJECT', 'IS', 'THAT', 'THOU', 'BEND', 'THY', 'GRECIOUS', 'STEPS', 'WITH', 'ME', 'TO', 'MY', 'MISTRESS', 'TO', 'RECEIVE', 'HER', 'ANSWER', 'AND', 'RETURNING', 'WHEEL', 'AND', 'SAFETY', 'AND', 'THOU', 'SHALT', 'HAVE', 'A', 'HANDSOME', 'PRESENT', 'AS', 'ONE', 'WHO', 'BRINGETH', 'GOOD', 'NEWS'] +2033-164914-0022-683: ref=['THEN', 'THE', 'EUNUCH', 'WENT', 'OUT', 'TO', 'ZAU', 'AL', 'MAKAN', 'AND', 'SAID', 'TO', 'HIM', 'RECITE', 'WHAT', 'VERSES', 'THOU', 'KNOWEST', 'FOR', 'MY', 'LADY', 'IS', 'HERE', 'HARD', 'BY', 'LISTENING', 'TO', 'THEE', 'AND', 'AFTER', 'I', 'WILL', 'ASK', 'THEE', 'OF', 'THY', 'NAME', 'AND', 'THY', 'NATIVE', 'COUNTRY', 'AND', 'THY', 'CONDITION'] +2033-164914-0022-683: hyp=['THEN', 'THE', 'EUNUCH', 'WENT', 'OUT', 'TO', 'ZAO', 'MAKAN', 'AND', 'SAID', 'TO', 'HIM', 'RECITE', 'WHAT', 'VERSEST', 'THOU', 'KNOWEST', 'FOR', 'MY', 'LADIES', 'HERE', 'HARD', 'BY', 'LISTENING', 'TO', 'THEE', 'AND', 'AFTER', 'I', 'WILL', 'ASK', 'THEE', 'OF', 'THY', 'NAME', 'AND', 'THINE', 'NATIVE', 'COUNTRY', 'AND', 'THY', 'CONDITION'] +2033-164915-0000-643: ref=['AND', 'ALSO', 'THESE'] +2033-164915-0000-643: hyp=['AND', 'ALSO', 'THESE'] +2033-164915-0001-644: ref=['THEN', 'SHE', 'THREW', 'HERSELF', 'UPON', 'HIM', 'AND', 'HE', 'GATHERED', 'HER', 'TO', 'HIS', 'BOSOM', 'AND', 'THE', 'TWAIN', 'FELL', 'DOWN', 'IN', 'A', 'FAINTING', 'FIT'] +2033-164915-0001-644: hyp=['THEN', 'SHE', 'THREW', 'HERSELF', 'UPON', 'HIM', 'AND', 'HE', 'GATHERED', 'HER', 'TO', 'HIS', 'BOSOM', 'AND', 'THE', 'TWAIN', 'FELL', 'DOWN', 'IN', 'A', 'FAINTING', 'FIT'] +2033-164915-0002-645: ref=['WHEN', 'THE', 'EUNUCH', 'SAW', 'THIS', 'CASE', 'HE', 'WONDERED', 'AT', 'THEM', 'AND', 'THROWING', 'OVER', 'THEM', 'SOMEWHAT', 'TO', 'COVER', 'THEM', 'WAITED', 'TILL', 'THEY', 'SHOULD', 'RECOVER'] +2033-164915-0002-645: hyp=['WHEN', 'THE', 'EUNUCHS', 'SAW', 'THESE', 'CAVES', 'HE', 'WONDERED', 'AT', 'THEM', 'AND', 'THROWING', 'OVER', 'THEM', 'SOMEWHAT', 'TO', 'COVER', 'THEM', 'WAITED', 'TILL', 'THEY', 'SHOULD', 'RECOVER'] +2033-164915-0003-646: ref=['AFTER', 'A', 'WHILE', 'THEY', 'CAME', 'TO', 'THEMSELVES', 'AND', 'NUZHAT', 'AL', 'ZAMAN', 'REJOICED', 'WITH', 'EXCEEDING', 'JOY', 'OPPRESSION', 'AND', 'DEPRESSION', 'LEFT', 'HER', 'AND', 'GLADNESS', 'TOOK', 'THE', 'MASTERY', 'OF', 'HER', 'AND', 'SHE', 'REPEATED', 'THESE', 'VERSES'] +2033-164915-0003-646: hyp=['AFTER', 'A', 'WHILE', 'THEY', 'CAME', 'TO', 'THEMSELVES', 'AND', 'UZHAT', 'AL', 'ZAMAN', 'REJOICED', 'WITH', 'EXCEEDING', 'JOY', 'OPPRESSION', 'AND', 'DEPRESSION', 'LEFT', 'HER', 'AND', 'GLADNESS', 'TOOK', 'THE', 'MYSTERY', 'OF', 'HER', 'AND', 'SHE', 'REPEATED', 'THESE', 'VERSES'] +2033-164915-0004-647: ref=['ACCORDINGLY', 'SHE', 'TOLD', 'HIM', 'ALL', 'THAT', 'HAD', 'COME', 'TO', 'HER', 'SINCE', 'THEIR', 'SEPARATION', 'AT', 'THE', 'KHAN', 'AND', 'WHAT', 'HAD', 'HAPPENED', 'TO', 'HER', 'WITH', 'THE', 'BADAWI', 'HOW', 'THE', 'MERCHANT', 'HAD', 'BOUGHT', 'HER', 'OF', 'HIM', 'AND', 'HAD', 'TAKEN', 'HER', 'TO', 'HER', 'BROTHER', 'SHARRKAN', 'AND', 'HAD', 'SOLD', 'HER', 'TO', 'HIM', 'HOW', 'HE', 'HAD', 'FREED', 'HER', 'AT', 'THE', 'TIME', 'OF', 'BUYING', 'HOW', 'HE', 'HAD', 'MADE', 'A', 'MARRIAGE', 'CONTRACT', 'WITH', 'HER', 'AND', 'HAD', 'GONE', 'IN', 'TO', 'HER', 'AND', 'HOW', 'THE', 'KING', 'THEIR', 'SIRE', 'HAD', 'SENT', 'AND', 'ASKED', 'FOR', 'HER', 'FROM', 'SHARRKAN'] +2033-164915-0004-647: hyp=['ACCORDINGLY', 'SHE', 'TOLD', 'HIM', 'ALL', 'THAT', 'HAD', 'COME', 'TO', 'HER', 'SINCE', 'THEIR', 'SEPARATION', 'AT', 'THE', 'KHAN', 'AND', 'WHAT', 'HAD', 'HAPPENED', 'TO', 'HER', 'WITH', 'THE', 'BADARI', 'HOW', 'THE', 'MERCHANT', 'HAD', 'BOUGHT', 'HER', 'OF', 'HIM', 'AND', 'HAD', 'TAKEN', 'HER', 'TO', 'HER', 'BROTHER', 'SHARKAN', 'AND', 'HAD', 'SOLD', 'HER', 'TO', 'HIM', 'HOW', 'HE', 'HAD', 'FREED', 'HER', 'AT', 'THE', 'TIME', 'OF', 'BUYING', 'HOW', 'HE', 'HAD', 'MADE', 'HER', 'MARRIAGE', 'CONTRACT', 'WITH', 'HER', 'AND', 'HAD', 'GONE', 'IN', 'TO', 'HER', 'AND', 'HOW', 'THE', 'KING', 'THEIR', 'SIRE', 'HAD', 'SENT', 'AND', 'ASKED', 'FOR', 'HER', 'FROM', 'SHARKAN'] +2033-164915-0005-648: ref=['BUT', 'NOW', 'GO', 'TO', 'THY', 'MASTER', 'AND', 'BRING', 'HIM', 'QUICKLY', 'TO', 'ME'] +2033-164915-0005-648: hyp=['BUT', 'NOW', 'GO', 'TO', 'THY', 'MASTER', 'AND', 'BRING', 'HIM', 'QUICKLY', 'TO', 'ME'] +2033-164915-0006-649: ref=['THE', 'CHAMBERLAIN', 'CALLED', 'THE', 'CASTRATO', 'AND', 'CHARGED', 'HIM', 'TO', 'DO', 'ACCORDINGLY', 'SO', 'HE', 'REPLIED', 'I', 'HEAR', 'AND', 'I', 'OBEY', 'AND', 'HE', 'TOOK', 'HIS', 'PAGES', 'WITH', 'HIM', 'AND', 'WENT', 'OUT', 'IN', 'SEARCH', 'OF', 'THE', 'STOKER', 'TILL', 'HE', 'FOUND', 'HIM', 'IN', 'THE', 'REAR', 'OF', 'THE', 'CARAVAN', 'GIRTHING', 'HIS', 'ASS', 'AND', 'PREPARING', 'FOR', 'FLIGHT'] +2033-164915-0006-649: hyp=['THE', 'CHAMBERLAIN', 'CALLED', 'THE', 'CASTRATO', 'AND', 'CHARGED', 'HIM', 'TO', 'DO', 'ACCORDINGLY', 'SO', 'HE', 'REPLIED', 'I', 'HEAR', 'AND', 'I', 'OBEY', 'AND', 'HE', 'TOOK', 'HIS', 'PAGES', 'WITH', 'HIM', 'AND', 'WENT', 'OUT', 'IN', 'SEARCH', 'OF', 'THE', 'STOCKER', 'TILL', 'HE', 'FOUND', 'HIM', 'IN', 'THE', 'REAR', 'OF', 'THE', 'CARAVAN', 'GIRDING', 'HIS', 'ASS', 'AND', 'PREPARING', 'FOR', 'FLIGHT'] +2033-164915-0007-650: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'THE', 'STOKER', 'GIRTHED', 'HIS', 'ASS', 'FOR', 'FLIGHT', 'AND', 'BESPAKE', 'HIMSELF', 'SAYING', 'OH', 'WOULD', 'I', 'KNEW', 'WHAT', 'IS', 'BECOME', 'OF', 'HIM'] +2033-164915-0007-650: hyp=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'THE', 'STOCKER', 'GIRDED', 'HIS', 'ASS', 'FOR', 'FLIGHT', 'AND', 'BESPAKE', 'HIMSELF', 'SAYING', 'OV', 'WOULD', 'I', 'KNEW', 'WHAT', 'IS', 'BECOME', 'OF', 'HIM'] +2033-164915-0008-651: ref=['I', 'BELIEVE', 'HE', 'HATH', 'DENOUNCED', 'ME', 'TO', 'THE', 'EUNUCH', 'HENCE', 'THESE', 'PAGES', 'ET', 'ABOUT', 'ME', 'AND', 'HE', 'HATH', 'MADE', 'ME', 'AN', 'ACCOMPLICE', 'IN', 'HIS', 'CRIME'] +2033-164915-0008-651: hyp=['I', 'BELIEVE', 'HE', 'HATH', 'DENOUNCED', 'ME', 'TO', 'THE', 'EUNUCH', 'HENCE', 'THESE', 'PAGES', 'AT', 'ABOUT', 'ME', 'AND', 'HE', 'HATH', 'MADE', 'ME', 'AN', 'ACCOMPLICE', 'IN', 'HIS', 'CRIME'] +2033-164915-0009-652: ref=['WHY', 'DIDST', 'THOU', 'SAY', 'I', 'NEVER', 'REPEATED', 'THESE', 'COUPLETS', 'NOR', 'DO', 'I', 'KNOW', 'WHO', 'REPEATED', 'THEM', 'WHEN', 'IT', 'WAS', 'THY', 'COMPANION'] +2033-164915-0009-652: hyp=['WHY', 'DIDST', 'THOU', 'SAY', 'I', 'NEVER', 'REPEATED', 'THIS', 'COUPLETS', 'NOR', 'DO', 'I', 'KNOW', 'WHO', 'REPEATED', 'THEM', 'WHEN', 'IT', 'WAS', 'THY', 'COMPANION'] +2033-164915-0010-653: ref=['BUT', 'NOW', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'BETWEEN', 'THIS', 'PLACE', 'AND', 'BAGHDAD', 'AND', 'WHAT', 'BETIDETH', 'THY', 'COMRADE', 'SHALL', 'BETIDE', 'THEE'] +2033-164915-0010-653: hyp=['BUT', 'NOW', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'BETWEEN', 'THIS', 'PLACE', 'AND', 'BAGHDAD', 'AND', 'WHAT', 'BETIDETH', 'THY', 'COMRADE', 'SHALL', 'BE', 'TIDE', 'THEE'] +2033-164915-0011-654: ref=['TWAS', 'AS', 'I', 'FEARED', 'THE', 'COMING', 'ILLS', 'DISCERNING', 'BUT', 'UNTO', 'ALLAH', 'WE', 'ARE', 'ALL', 'RETURNING'] +2033-164915-0011-654: hyp=['TWAS', 'AS', 'I', 'FEARED', 'THE', 'CARMINALS', 'DISCERNING', 'BUT', 'UNTO', 'ALLAH', 'WE', 'ARE', 'ALL', 'RETURNING'] +2033-164915-0012-655: ref=['THEN', 'THE', 'EUNUCH', 'CRIED', 'UPON', 'THE', 'PAGES', 'SAYING', 'TAKE', 'HIM', 'OFF', 'THE', 'ASS'] +2033-164915-0012-655: hyp=['THEN', 'THE', 'EUNUCH', 'CRIED', 'UPON', 'HIS', 'PAGES', 'SAYING', 'TAKE', 'HIM', 'OFF', 'THE', 'ASS'] +2033-164915-0013-656: ref=['AND', 'HE', 'ANSWERED', 'I', 'AM', 'THE', 'CHAMBERLAIN', 'OF', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'KING', 'SHARRKAN', 'SON', 'OF', 'OMAR', 'BIN', 'AL', "NU'UMAN", 'LORD', 'OF', 'BAGHDAD', 'AND', 'OF', 'THE', 'LAND', 'OF', 'KHORASAN', 'AND', 'I', 'BRING', 'TRIBUTE', 'AND', 'PRESENTS', 'FROM', 'HIM', 'TO', 'HIS', 'FATHER', 'IN', 'BAGHDAD'] +2033-164915-0013-656: hyp=['AND', 'HE', 'ANSWERED', 'I', 'AM', 'THE', 'CHAMBERLAIN', 'OF', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'KING', 'SHARKAN', 'SONG', 'OF', 'OMAR', 'BIN', 'AL', 'NUMAN', 'LORD', 'OF', 'AGAD', 'AND', 'OF', 'THE', 'LAND', 'OF', 'KHORASAN', 'AND', 'I', 'BRING', 'TRIBUTE', 'AND', 'PRESENTS', 'FROM', 'HIM', 'TO', 'HIS', 'FATHER', 'IN', 'BAGHDAD'] +2033-164915-0014-657: ref=['SO', 'FARE', 'YE', 'FORWARDS', 'NO', 'HARM', 'SHALL', 'BEFAL', 'YOU', 'TILL', 'YOU', 'JOIN', 'HIS', 'GRAND', 'WAZIR', 'DANDAN'] +2033-164915-0014-657: hyp=['SOPHIA', 'HE', 'FORWARDS', 'NO', 'HARM', 'SHALL', 'BEFALL', 'YOU', 'TILL', 'YOU', 'JOIN', 'HIS', 'GRAND', 'WAZIR', 'TAN'] +2033-164915-0015-658: ref=['THEN', 'HE', 'BADE', 'HIM', 'BE', 'SEATED', 'AND', 'QUESTIONED', 'HIM', 'AND', 'HE', 'REPLIED', 'THAT', 'HE', 'WAS', 'CHAMBERLAIN', 'TO', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'AND', 'WAS', 'BOUND', 'TO', 'KING', 'OMAR', 'WITH', 'PRESENTS', 'AND', 'THE', 'TRIBUTE', 'OF', 'SYRIA'] +2033-164915-0015-658: hyp=['THEN', 'HE', 'BADE', 'HIM', 'BE', 'SEATED', 'AND', 'QUESTIONED', 'HIM', 'AND', 'HE', 'REPLIED', 'THAT', 'HE', 'WAS', 'CHAMBERLAIN', 'TO', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'AND', 'WAS', 'BOUND', 'TO', 'KING', 'OMAR', 'WITH', 'PRESENTS', 'AND', 'THE', 'TRIBUTE', 'OF', 'SYRIA'] +2033-164915-0016-659: ref=['SO', 'IT', 'WAS', 'AGREED', 'THAT', 'WE', 'GO', 'TO', 'DAMASCUS', 'AND', 'FETCH', 'THENCE', 'THE', "KING'S", 'SON', 'SHARRKAN', 'AND', 'MAKE', 'HIM', 'SULTAN', 'OVER', 'HIS', "FATHER'S", 'REALM'] +2033-164915-0016-659: hyp=['SO', 'IT', 'WAS', 'AGREED', 'THAT', 'WE', 'GO', 'TO', 'DAMASCUS', 'AND', 'FETCH', 'THENCE', 'THE', "KING'S", 'SON', 'SHARKAN', 'AND', 'MADE', 'HIM', 'SULTAN', 'OVER', 'HIS', "FATHER'S", 'REALM'] +2033-164915-0017-660: ref=['AND', 'AMONGST', 'THEM', 'WERE', 'SOME', 'WHO', 'WOULD', 'HAVE', 'CHOSEN', 'THE', 'CADET', 'ZAU', 'AL', 'MAKAN', 'FOR', 'QUOTH', 'THEY', 'HIS', 'NAME', 'BE', 'LIGHT', 'OF', 'THE', 'PLACE', 'AND', 'HE', 'HATH', 'A', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'HIGHS', 'THE', 'DELIGHT', 'OF', 'THE', 'TIME', 'BUT', 'THEY', 'SET', 'OUT', 'FIVE', 'YEARS', 'AGO', 'FOR', 'AL', 'HIJAZ', 'AND', 'NONE', 'WOTTETH', 'WHAT', 'IS', 'BECOME', 'OF', 'THEM'] +2033-164915-0017-660: hyp=['AND', 'AMONGST', 'THEM', 'WERE', 'SOME', 'WHO', 'WOULD', 'HAVE', 'CHOSEN', 'THE', 'CADET', 'THOU', 'A', 'MACAN', 'FOR', 'QUOTH', 'THEY', 'HIS', 'NAME', 'BE', 'LIGHT', 'OF', 'THE', 'PLACE', 'AND', 'HE', 'HATH', 'A', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'HIES', 'THE', 'DELIGHT', 'OF', 'THE', 'TIME', 'BUT', 'THEY', 'SET', 'OUT', 'FIVE', 'YEARS', 'AGO', 'FOR', 'AL', 'HI', 'JAS', 'AND', 'NONE', 'WHATETH', 'WHAT', 'IS', 'BECOME', 'OF', 'THEM'] +2033-164916-0000-684: ref=['SO', 'HE', 'TURNED', 'TO', 'THE', 'WAZIR', 'DANDAN', 'AND', 'SAID', 'TO', 'HIM', 'VERILY', 'YOUR', 'TALE', 'IS', 'A', 'WONDER', 'OF', 'WONDERS'] +2033-164916-0000-684: hyp=['SO', 'HE', 'TURNED', 'TO', 'THE', 'WAZIR', 'DANDAN', 'AND', 'SAID', 'TO', 'HIM', 'VERILY', 'YOUR', 'TALE', 'IS', 'A', 'WANDER', 'OF', 'WONDERS'] +2033-164916-0001-685: ref=['KNOW', 'O', 'CHIEF', 'WAZIR', 'THAT', 'HERE', 'WHERE', 'YOU', 'HAVE', 'ENCOUNTERED', 'ME', 'ALLAH', 'HATH', 'GIVEN', 'YOU', 'REST', 'FROM', 'FATIGUE', 'AND', 'BRINGETH', 'YOU', 'YOUR', 'DESIRE', 'AFTER', 'THE', 'EASIEST', 'OF', 'FASHIONS', 'FOR', 'THAT', 'HIS', 'ALMIGHTY', 'WILL', 'RESTORETH', 'TO', 'YOU', 'ZAU', 'AL', 'MAKAN', 'AND', 'HIS', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'WHEREBY', 'WE', 'WILL', 'SETTLE', 'THE', 'MATTER', 'AS', 'WE', 'EASILY', 'CAN'] +2033-164916-0001-685: hyp=['NO', 'O', 'CHIEF', 'WAZIR', 'THAT', 'HERE', 'WHERE', 'YOU', 'HAVE', 'ENCOUNTERED', 'ME', 'ALLAH', 'HATH', 'GIVEN', 'YOU', 'REST', 'FROM', 'FATIGUE', 'AND', 'BRINGETH', 'YOU', 'YOUR', 'DESIRE', 'AFTER', 'THE', 'EASIEST', 'OF', 'FASHIONS', 'FOR', 'LET', 'HIS', 'ALMIGHTY', 'WILL', 'RESTORE', 'IT', 'TO', 'YOU', 'THOU', 'ARMANQUIN', 'AND', 'HE', 'SISTER', 'KNOWSAT', 'AL', 'ZAMAN', 'WHEREBY', 'WE', 'WILL', 'SETTLE', 'THE', 'MATTER', 'AS', 'WE', 'EASILY', 'CAN'] +2033-164916-0002-686: ref=['WHEN', 'THE', 'MINISTER', 'HEARD', 'THESE', 'WORDS', 'HE', 'REJOICED', 'WITH', 'GREAT', 'JOY', 'AND', 'SAID', 'O', 'CHAMBERLAIN', 'TELL', 'ME', 'THE', 'TALE', 'OF', 'THE', 'TWAIN', 'AND', 'WHAT', 'BEFEL', 'THEM', 'AND', 'THE', 'CAUSE', 'OF', 'THEIR', 'LONG', 'ABSENCE'] +2033-164916-0002-686: hyp=['WHEN', 'THE', 'MEANS', 'SIR', 'HEARD', 'THESE', 'WORDS', 'HE', 'REJOICED', 'WITH', 'GREAT', 'JOY', 'AND', 'SAID', 'O', 'CHAMBERLAIN', 'TELL', 'ME', 'THE', 'TALE', 'OF', 'THE', 'TWAIN', 'AND', 'WHAT', 'BEFEL', 'THEM', 'AND', 'THE', 'CAUSE', 'OF', 'THEIR', 'LONG', 'ABSENCE'] +2033-164916-0003-687: ref=['ZAU', 'AL', 'MAKAN', 'BOWED', 'HIS', 'HEAD', 'AWHILE', 'AND', 'THEN', 'SAID', 'I', 'ACCEPT', 'THIS', 'POSITION', 'FOR', 'INDEED', 'THERE', 'WAS', 'NO', 'REFUSING', 'AND', 'HE', 'WAS', 'CERTIFIED', 'THAT', 'THE', 'CHAMBERLAIN', 'HAD', 'COUNSELLED', 'HIM', 'WELL', 'AND', 'WISELY', 'AND', 'SET', 'HIM', 'ON', 'THE', 'RIGHT', 'WAY'] +2033-164916-0003-687: hyp=['ZOMAN', 'BOWED', 'HIS', 'HEAD', 'AWHILE', 'AND', 'THEN', 'SAID', 'I', 'ACCEPT', 'THE', 'POSITION', 'FOR', 'INDEED', 'THERE', 'WAS', 'NO', 'REFUSING', 'AND', 'HE', 'WAS', 'CERTIFIED', 'THAT', 'THE', 'CHAMBERLAIN', 'HAD', 'COUNSELLED', 'HIM', 'WELL', 'AND', 'WISELY', 'AND', 'SAID', 'TO', 'HIM', 'ON', 'THE', 'RIGHT', 'WAY'] +2033-164916-0004-688: ref=['THEN', 'HE', 'ADDED', 'O', 'MY', 'UNCLE', 'HOW', 'SHALL', 'I', 'DO', 'WITH', 'MY', 'BROTHER', 'SHARRKAN'] +2033-164916-0004-688: hyp=['THEN', 'HE', 'ADDED', 'O', 'MY', 'UNCLE', 'HOW', 'SHALL', 'I', 'DO', 'WITH', 'MY', 'BROTHER', 'SHARKAN'] +2033-164916-0005-689: ref=['AFTER', 'AWHILE', 'THE', 'DUST', 'DISPERSED', 'AND', 'THERE', 'APPEARED', 'UNDER', 'IT', 'THE', 'ARMY', 'OF', 'BAGHDAD', 'AND', 'KHORASAN', 'A', 'CONQUERING', 'HOST', 'LIKE', 'THE', 'FULL', 'TIDE', 'SEA', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164916-0005-689: hyp=['AFTER', 'A', 'WHILE', 'THE', 'DUST', 'DISPERSED', 'AND', 'THERE', 'APPEARED', 'UNDER', 'IT', 'THE', 'ARMY', 'OF', 'BAGHDAD', 'AND', 'KHORASAN', 'A', 'CONQUERING', 'HOST', 'LIKE', 'THE', 'POOL', 'TIED', 'SEA', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164916-0006-690: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'EIGHTH', 'NIGHT'] +2033-164916-0006-690: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'EIGHTH', 'NIGHT'] +2033-164916-0007-691: ref=['AND', 'IN', 'IT', 'ALL', 'REJOICED', 'AT', 'THE', 'ACCESSION', 'OF', 'THE', 'LIGHT', 'OF', 'THE', 'PLACE'] +2033-164916-0007-691: hyp=['ANY', 'NEAT', 'OR', 'REJOICED', 'AT', 'THE', 'ACCESSION', 'OF', 'THE', 'LIGHT', 'OF', 'THE', 'PLACE'] +2033-164916-0008-692: ref=['LASTLY', 'THE', 'MINISTER', 'WENT', 'IN', 'AND', 'KISSED', 'THE', 'GROUND', 'BEFORE', 'ZAU', 'AL', 'MAKAN', 'WHO', 'ROSE', 'TO', 'MEET', 'HIM', 'SAYING', 'WELCOME', 'O', 'WAZIR', 'AND', 'SIRE', 'SANS', 'PEER'] +2033-164916-0008-692: hyp=['LASTLY', 'THE', 'MINISTER', 'WENT', 'IN', 'AND', 'KISSED', 'THE', 'GROUND', 'BEFORE', 'ZAO', 'MAKAN', 'WHO', 'ROSE', 'TO', 'MEET', 'HIM', 'SAYING', 'WELCOME', 'O', 'WAZIR', 'AND', 'SIRES', 'SONSPIER'] +2033-164916-0009-693: ref=['MOREOVER', 'THE', 'SULTAN', 'COMMANDED', 'HIS', 'WAZIR', 'DANDAN', 'CALL', 'A', 'TEN', 'DAYS', 'HALT', 'OF', 'THE', 'ARMY', 'THAT', 'HE', 'MIGHT', 'BE', 'PRIVATE', 'WITH', 'HIM', 'AND', 'LEARN', 'FROM', 'HIM', 'HOW', 'AND', 'WHEREFORE', 'HIS', 'FATHER', 'HAD', 'BEEN', 'SLAIN'] +2033-164916-0009-693: hyp=['MOREOVER', 'THE', 'SULTAN', 'COMMANDED', 'HIS', 'WAZIR', 'DANDAN', 'CALL', 'AT', 'TEN', 'DAYS', 'HALT', 'OF', 'THE', 'ARMY', 'THAT', 'HE', 'MIGHT', 'BE', 'PRIVATE', 'WITH', 'HIM', 'AND', 'LEARN', 'FROM', 'HIM', 'HOW', 'AND', 'WHEREFORE', 'HIS', 'FATHER', 'HAD', 'BEEN', 'SLAIN'] +2033-164916-0010-694: ref=['HE', 'THEN', 'REPAIRED', 'TO', 'THE', 'HEART', 'OF', 'THE', 'ENCAMPMENT', 'AND', 'ORDERED', 'THE', 'HOST', 'TO', 'HALT', 'TEN', 'DAYS'] +2033-164916-0010-694: hyp=['HE', 'THEN', 'REPAIRED', 'TO', 'THE', 'HEART', 'OF', 'THE', 'ENCAMPMENT', 'AND', 'ORDERED', 'THAT', 'THE', 'HOST', 'TO', 'HALT', 'TEN', 'DAYS'] +2414-128291-0000-2689: ref=['WHAT', 'HATH', 'HAPPENED', 'UNTO', 'ME'] +2414-128291-0000-2689: hyp=['WHAT', 'HATH', 'HAPPENED', 'TO', 'ME'] +2414-128291-0001-2690: ref=['HE', 'ASKED', 'HIMSELF', 'SOMETHING', 'WARM', 'AND', 'LIVING', 'QUICKENETH', 'ME', 'IT', 'MUST', 'BE', 'IN', 'THE', 'NEIGHBOURHOOD'] +2414-128291-0001-2690: hyp=['HE', 'ASKED', 'HIMSELF', 'SOMETHING', 'WRONG', 'AND', 'LIVING', 'QUICKENETH', 'ME', 'IT', 'MUST', 'BE', 'IN', 'THE', 'NEIGHBOURHOOD'] +2414-128291-0002-2691: ref=['WHEN', 'HOWEVER', 'ZARATHUSTRA', 'WAS', 'QUITE', 'NIGH', 'UNTO', 'THEM', 'THEN', 'DID', 'HE', 'HEAR', 'PLAINLY', 'THAT', 'A', 'HUMAN', 'VOICE', 'SPAKE', 'IN', 'THE', 'MIDST', 'OF', 'THE', 'KINE', 'AND', 'APPARENTLY', 'ALL', 'OF', 'THEM', 'HAD', 'TURNED', 'THEIR', 'HEADS', 'TOWARDS', 'THE', 'SPEAKER'] +2414-128291-0002-2691: hyp=['WHO', 'READ', 'HOWEVER', 'THEIR', 'TWO', 'STRAW', 'WAS', 'QUITE', 'NIGH', 'AND', 'TO', 'THEM', 'THEN', 'DID', 'HE', 'HEAR', 'PLAINLY', 'WITH', 'HUMAN', 'VOICE', 'TAKE', 'IN', 'THE', 'MIDST', 'OF', 'THE', 'KIND', 'AND', 'A', 'FRIENDLY', 'ALL', 'OF', 'THEM', 'HAD', 'TURNED', 'THEIR', 'HEADS', 'TOWARDS', 'THE', 'SPEAKER'] +2414-128291-0003-2692: ref=['WHAT', 'DO', 'I', 'HERE', 'SEEK'] +2414-128291-0003-2692: hyp=['FOR', 'DIEU', 'I', 'HERE', 'SEEK'] +2414-128291-0004-2693: ref=['ANSWERED', 'HE', 'THE', 'SAME', 'THAT', 'THOU', 'SEEKEST', 'THOU', 'MISCHIEF', 'MAKER', 'THAT', 'IS', 'TO', 'SAY', 'HAPPINESS', 'UPON', 'EARTH'] +2414-128291-0004-2693: hyp=['ANSWERED', 'HE', 'THE', 'SAME', 'THAT', 'THOU', "SEEK'ST", 'THOU', 'MISCHIEF', 'MAKER', 'THAT', 'IS', 'TO', 'SAY', 'HAPPINESS', 'UPON', 'EARTH'] +2414-128291-0005-2694: ref=['FOR', 'I', 'TELL', 'THEE', 'THAT', 'I', 'HAVE', 'ALREADY', 'TALKED', 'HALF', 'A', 'MORNING', 'UNTO', 'THEM', 'AND', 'JUST', 'NOW', 'WERE', 'THEY', 'ABOUT', 'TO', 'GIVE', 'ME', 'THEIR', 'ANSWER'] +2414-128291-0005-2694: hyp=['FOR', 'I', 'TELL', 'THEE', 'THAT', 'I', 'HAVE', 'ALWAYS', 'TALKED', 'HALF', 'A', 'MORNING', 'UNTO', 'THEM', 'AND', 'JUST', 'NOW', 'WERE', 'THEY', 'ABOUT', 'TO', 'GIVE', 'ME', 'THE', 'ANSWER'] +2414-128291-0006-2695: ref=['HE', 'WOULD', 'NOT', 'BE', 'RID', 'OF', 'HIS', 'AFFLICTION'] +2414-128291-0006-2695: hyp=['HE', 'WOULD', 'NOT', 'BE', 'RID', 'OF', 'HIS', 'AFFLICTION'] +2414-128291-0007-2696: ref=['WHO', 'HATH', 'NOT', 'AT', 'PRESENT', 'HIS', 'HEART', 'HIS', 'MOUTH', 'AND', 'HIS', 'EYES', 'FULL', 'OF', 'DISGUST'] +2414-128291-0007-2696: hyp=['WHO', 'HAD', 'NOT', 'AT', 'PRESENT', 'HIS', 'HEART', 'HIS', 'MOUTH', 'AND', 'HIS', 'EYES', 'FULL', 'OF', 'DISGUST'] +2414-128291-0008-2697: ref=['THOU', 'ALSO', 'THOU', 'ALSO'] +2414-128291-0008-2697: hyp=['THOU', 'ALSO', 'THOU', 'ALSO'] +2414-128291-0009-2698: ref=['BUT', 'BEHOLD', 'THESE', 'KINE'] +2414-128291-0009-2698: hyp=['BUT', 'BEHOLD', 'HIS', 'KIND'] +2414-128291-0010-2699: ref=['THE', 'KINE', 'HOWEVER', 'GAZED', 'AT', 'IT', 'ALL', 'AND', 'WONDERED'] +2414-128291-0010-2699: hyp=['THE', 'KIND', 'HOWEVER', 'GAZED', 'AT', 'IT', 'ALL', 'AND', 'WONDERED'] +2414-128291-0011-2700: ref=['WANTON', 'AVIDITY', 'BILIOUS', 'ENVY', 'CAREWORN', 'REVENGE', 'POPULACE', 'PRIDE', 'ALL', 'THESE', 'STRUCK', 'MINE', 'EYE'] +2414-128291-0011-2700: hyp=['WANTON', 'ALDITY', 'BILIOUS', 'ENVY', 'CAREWORN', 'REVENGE', 'POPULOUS', 'PRIDE', 'ALL', 'DISTRACT', 'MIGHT', 'EYE'] +2414-128291-0012-2701: ref=['IT', 'IS', 'NO', 'LONGER', 'TRUE', 'THAT', 'THE', 'POOR', 'ARE', 'BLESSED'] +2414-128291-0012-2701: hyp=['IT', 'IS', 'NO', 'LONGER', 'TRUE', 'LITTLE', 'POOR', 'ARE', 'BLEST'] +2414-128291-0013-2702: ref=['THE', 'KINGDOM', 'OF', 'HEAVEN', 'HOWEVER', 'IS', 'WITH', 'THE', 'KINE', 'AND', 'WHY', 'IS', 'IT', 'NOT', 'WITH', 'THE', 'RICH'] +2414-128291-0013-2702: hyp=['THE', 'KINGDOM', 'OF', 'HEAVEN', 'HOWEVER', 'IS', 'WITH', 'THE', 'KIND', 'AND', 'WHY', 'IS', 'IT', 'NOT', 'WITH', 'A', 'RICH'] +2414-128291-0014-2703: ref=['WHY', 'DOST', 'THOU', 'TEMPT', 'ME'] +2414-128291-0014-2703: hyp=['WHY', 'THOSE', 'DOUB', 'TEMPT', 'ME'] +2414-128291-0015-2704: ref=['ANSWERED', 'THE', 'OTHER'] +2414-128291-0015-2704: hyp=['ANSWERED', 'HER'] +2414-128291-0016-2705: ref=['THOU', 'KNOWEST', 'IT', 'THYSELF', 'BETTER', 'EVEN', 'THAN', 'I'] +2414-128291-0016-2705: hyp=['THOU', 'KNOWEST', 'IT', 'THYSELF', 'BETTER', 'EVEN', 'THAN', 'I'] +2414-128291-0017-2706: ref=['THUS', 'SPAKE', 'THE', 'PEACEFUL', 'ONE', 'AND', 'PUFFED', 'HIMSELF', 'AND', 'PERSPIRED', 'WITH', 'HIS', 'WORDS', 'SO', 'THAT', 'THE', 'KINE', 'WONDERED', 'ANEW'] +2414-128291-0017-2706: hyp=['DOES', 'BEG', 'THE', 'PEACEFUL', 'ONE', 'AND', 'PUFFED', 'HIMSELF', 'AND', 'POISPIRED', 'WITH', 'HIS', 'WORDS', 'TO', 'INTER', 'KIND', 'WONDERED', 'ANEW'] +2414-128291-0018-2707: ref=['THOU', 'DOEST', 'VIOLENCE', 'TO', 'THYSELF', 'THOU', 'PREACHER', 'ON', 'THE', 'MOUNT', 'WHEN', 'THOU', 'USEST', 'SUCH', 'SEVERE', 'WORDS'] +2414-128291-0018-2707: hyp=['THOU', 'DOEST', 'WILDEST', 'TO', 'THYSELF', 'THOU', 'PREACHER', 'ON', 'THE', 'MOUND', 'AND', 'THOU', 'USEST', 'SUCH', 'SAVOUR', 'WORDS'] +2414-128291-0019-2708: ref=['THEY', 'ALSO', 'ABSTAIN', 'FROM', 'ALL', 'HEAVY', 'THOUGHTS', 'WHICH', 'INFLATE', 'THE', 'HEART'] +2414-128291-0019-2708: hyp=['THEY', 'ALSO', 'ABSTAINED', 'FROM', 'ALL', 'HEAVY', 'THOUGHTS', 'WHICH', 'INFLATE', 'THE', 'HEART'] +2414-128291-0020-2709: ref=['WELL'] +2414-128291-0020-2709: hyp=['WELL'] +2414-128291-0021-2710: ref=['SAID', 'ZARATHUSTRA', 'THOU', 'SHOULDST', 'ALSO', 'SEE', 'MINE', 'ANIMALS', 'MINE', 'EAGLE', 'AND', 'MY', 'SERPENT', 'THEIR', 'LIKE', 'DO', 'NOT', 'AT', 'PRESENT', 'EXIST', 'ON', 'EARTH'] +2414-128291-0021-2710: hyp=['SAID', 'GUESTRA', 'THOU', 'SHOULDST', 'ALSO', 'SEE', 'MINE', 'ANIMALS', 'MY', 'EAGLE', 'AND', 'MY', 'SERPENT', 'THEY', 'ARE', 'LIKE', 'DO', 'NOT', 'AT', 'PRESENT', 'EXIST', 'ON', 'EARTH'] +2414-128291-0022-2711: ref=['AND', 'TALK', 'TO', 'MINE', 'ANIMALS', 'OF', 'THE', 'HAPPINESS', 'OF', 'ANIMALS'] +2414-128291-0022-2711: hyp=['AND', 'TALK', 'TO', 'MY', 'ANIMALS', 'OF', 'THE', 'HAPPINESS', 'OF', 'ANIMALS'] +2414-128291-0023-2712: ref=['NOW', 'HOWEVER', 'TAKE', 'LEAVE', 'AT', 'ONCE', 'OF', 'THY', 'KINE', 'THOU', 'STRANGE', 'ONE'] +2414-128291-0023-2712: hyp=['NOW', 'HOWEVER', 'THEY', 'LEAVE', 'IT', 'WAS', 'OF', 'THAT', 'KIND', 'THOU', 'STRANGE', 'WORLD'] +2414-128291-0024-2713: ref=['THOU', 'AMIABLE', 'ONE'] +2414-128291-0024-2713: hyp=['THOU', 'AMIABLE', 'ONE'] +2414-128291-0025-2714: ref=['FOR', 'THEY', 'ARE', 'THY', 'WARMEST', 'FRIENDS', 'AND', 'PRECEPTORS'] +2414-128291-0025-2714: hyp=['FOR', 'THEY', 'ARE', 'DIVERMISH', 'FRIENDS', 'AND', 'PERCEPTORS'] +2414-128291-0026-2715: ref=['THOU', 'EVIL', 'FLATTERER'] +2414-128291-0026-2715: hyp=['THOU', 'EVEN', 'SLACKER'] +2414-128292-0000-2618: ref=['WHITHER', 'HATH', 'MY', 'LONESOMENESS', 'GONE', 'SPAKE', 'HE'] +2414-128292-0000-2618: hyp=['WHITHER', 'HAD', 'MY', 'LONESOME', 'DISCOUR', 'SPAKE', 'HE'] +2414-128292-0001-2619: ref=['MY', 'SHADOW', 'CALLETH', 'ME'] +2414-128292-0001-2619: hyp=['MY', 'SHADOW', 'CAUGHT', 'ME'] +2414-128292-0002-2620: ref=['WHAT', 'MATTER', 'ABOUT', 'MY', 'SHADOW'] +2414-128292-0002-2620: hyp=['WHAT', 'MATTER', 'ABOUT', 'MY', 'SHADOW'] +2414-128292-0003-2621: ref=['LET', 'IT', 'RUN', 'AFTER', 'ME', 'I', 'RUN', 'AWAY', 'FROM', 'IT'] +2414-128292-0003-2621: hyp=['NEKHLUD', 'TRUE', 'ENOUGH', 'TO', 'ME', 'I', 'RAN', 'AWAY', 'FROM', 'IT'] +2414-128292-0004-2622: ref=['THUS', 'SPAKE', 'ZARATHUSTRA', 'TO', 'HIS', 'HEART', 'AND', 'RAN', 'AWAY'] +2414-128292-0004-2622: hyp=['THUS', 'BEING', 'THEIR', 'TOO', 'STRIKE', 'TO', 'HIS', 'HEART', 'AND', 'RAN', 'AWAY'] +2414-128292-0005-2623: ref=['VERILY', 'MY', 'FOLLY', 'HATH', 'GROWN', 'BIG', 'IN', 'THE', 'MOUNTAINS'] +2414-128292-0005-2623: hyp=['VERILY', 'MY', 'FOLLY', 'HATH', 'GROWN', 'BIG', 'IN', 'THE', 'MOUNTAINS'] +2414-128292-0006-2624: ref=['NOW', 'DO', 'I', 'HEAR', 'SIX', 'OLD', 'FOOLS', 'LEGS', 'RATTLING', 'BEHIND', 'ONE', 'ANOTHER'] +2414-128292-0006-2624: hyp=['NOW', 'DO', 'I', 'HEAR', 'SIX', 'OLD', "FOOD'S", 'LEGS', 'RATTLING', 'BEHIND', 'ONE', 'ANOTHER'] +2414-128292-0007-2625: ref=['BUT', 'DOTH', 'ZARATHUSTRA', 'NEED', 'TO', 'BE', 'FRIGHTENED', 'BY', 'HIS', 'SHADOW'] +2414-128292-0007-2625: hyp=['BY', 'DIRTS', 'ARE', 'TOUSTRA', 'NEED', 'TO', 'BE', 'FRIGHTENED', 'BY', 'A', 'SHADOW'] +2414-128292-0008-2626: ref=['ALSO', 'METHINKETH', 'THAT', 'AFTER', 'ALL', 'IT', 'HATH', 'LONGER', 'LEGS', 'THAN', 'MINE'] +2414-128292-0008-2626: hyp=['ALSO', 'METHINK', 'IT', 'THAT', 'AFTER', 'ALL', 'IT', 'HAD', 'LONG', 'OR', 'LESS', 'THAN', 'MINE'] +2414-128292-0009-2627: ref=['FOR', 'WHEN', 'ZARATHUSTRA', 'SCRUTINISED', 'HIM', 'WITH', 'HIS', 'GLANCE', 'HE', 'WAS', 'FRIGHTENED', 'AS', 'BY', 'A', 'SUDDEN', 'APPARITION', 'SO', 'SLENDER', 'SWARTHY', 'HOLLOW', 'AND', 'WORN', 'OUT', 'DID', 'THIS', 'FOLLOWER', 'APPEAR'] +2414-128292-0009-2627: hyp=['FOR', 'WHEN', 'THEIR', 'DISTRESS', 'COGNIZED', 'HIM', 'IT', 'IS', 'GLANCE', 'HE', 'WAS', 'FRIGHTENED', 'ALBERT', 'A', 'CERTAIN', 'APPARITION', 'SO', 'SLENDER', 'SWALLTY', 'HOLLOW', 'AND', 'WORN', 'OUT', 'DID', 'HIS', 'FOLLOWER', 'APPEAR'] +2414-128292-0010-2628: ref=['ASKED', 'ZARATHUSTRA', 'VEHEMENTLY', 'WHAT', 'DOEST', 'THOU', 'HERE'] +2414-128292-0010-2628: hyp=['I', 'TAKE', 'TO', 'EXTRAVE', 'IMAGING', 'WHAT', 'DOST', 'THOU', 'HEAR'] +2414-128292-0011-2629: ref=['AND', 'WHY', 'CALLEST', 'THOU', 'THYSELF', 'MY', 'SHADOW'] +2414-128292-0011-2629: hyp=['AND', 'WHY', 'COLLARST', 'THOU', 'THYSELF', 'MY', 'SHADOW'] +2414-128292-0012-2630: ref=['THOU', 'ART', 'NOT', 'PLEASING', 'UNTO', 'ME'] +2414-128292-0012-2630: hyp=['THOU', 'ART', 'NOT', 'PLEASING', 'INTO', 'ME'] +2414-128292-0013-2631: ref=['MUST', 'I', 'EVER', 'BE', 'ON', 'THE', 'WAY'] +2414-128292-0013-2631: hyp=['MUST', 'I', 'EVER', 'BE', 'ON', 'THE', 'WAY'] +2414-128292-0014-2632: ref=['O', 'EARTH', 'THOU', 'HAST', 'BECOME', 'TOO', 'ROUND', 'FOR', 'ME'] +2414-128292-0014-2632: hyp=['O', 'ART', 'THOU', 'HAST', 'BECOME', 'TO', 'ROUND', 'FOR', 'ME'] +2414-128292-0015-2633: ref=['WHEN', 'THE', 'DEVIL', 'CASTETH', 'HIS', 'SKIN', 'DOTH', 'NOT', 'HIS', 'NAME', 'ALSO', 'FALL', 'AWAY', 'IT', 'IS', 'ALSO', 'SKIN'] +2414-128292-0015-2633: hyp=['WITH', 'THE', 'DEVIL', 'CAST', 'AT', 'HIS', 'KIN', 'DOTH', 'NOT', 'HIS', 'NAME', 'ALSO', 'FALL', 'AWAY', 'IT', 'IS', 'ALSO', 'SKIN'] +2414-128292-0016-2634: ref=['THE', 'DEVIL', 'HIMSELF', 'IS', 'PERHAPS', 'SKIN'] +2414-128292-0016-2634: hyp=['THE', 'DEVIL', 'HIMSELF', 'IS', 'PERHAPS', 'SKIN'] +2414-128292-0017-2635: ref=['SOMETIMES', 'I', 'MEANT', 'TO', 'LIE', 'AND', 'BEHOLD'] +2414-128292-0017-2635: hyp=['SOMETIMES', 'I', 'MEANT', 'TO', 'LIE', 'AND', 'BEHOLD'] +2414-128292-0018-2636: ref=['THEN', 'ONLY', 'DID', 'I', 'HIT', 'THE', 'TRUTH'] +2414-128292-0018-2636: hyp=['THEN', 'OLD', 'LADY', 'DID', 'I', 'HIT', 'THE', 'TRUTH'] +2414-128292-0019-2637: ref=['HOW', 'HAVE', 'I', 'STILL', 'INCLINATION'] +2414-128292-0019-2637: hyp=['HOW', 'I', 'STILL', 'INCLINATION'] +2414-128292-0020-2638: ref=['HAVE', 'I', 'STILL', 'A', 'GOAL'] +2414-128292-0020-2638: hyp=['EH', 'I', 'STILL', 'A', 'GOLD'] +2414-128292-0021-2639: ref=['A', 'HAVEN', 'TOWARDS', 'WHICH', 'MY', 'SAIL', 'IS', 'SET'] +2414-128292-0021-2639: hyp=['A', 'HAIRY', 'DOOR', 'SPEECH', 'MY', 'SAILOR', 'SAID'] +2414-128292-0022-2640: ref=['FOR', 'IT', 'DO', 'I', 'ASK', 'AND', 'SEEK', 'AND', 'HAVE', 'SOUGHT', 'BUT', 'HAVE', 'NOT', 'FOUND', 'IT'] +2414-128292-0022-2640: hyp=['FOR', 'IT', 'TOO', 'I', 'ASK', 'AND', 'SEEK', 'AND', 'HAVE', 'THOUGHT', 'IT', 'HATH', 'NOT', 'FOUND', 'IT'] +2414-128292-0023-2641: ref=['O', 'ETERNAL', 'EVERYWHERE', 'O', 'ETERNAL', 'NOWHERE', 'O', 'ETERNAL', 'IN', 'VAIN'] +2414-128292-0023-2641: hyp=['I', 'TURNED', 'EVERYWHERE', 'WHO', 'HAD', 'TURNED', 'OUT', 'NOWHERE', 'WHO', 'HAD', 'TURNED', 'UP', 'IN', 'VAIN'] +2414-128292-0024-2642: ref=['THOU', 'ART', 'MY', 'SHADOW'] +2414-128292-0024-2642: hyp=['THOU', 'ART', 'MY', 'SHADOW'] +2414-128292-0025-2643: ref=['SAID', 'HE', 'AT', 'LAST', 'SADLY'] +2414-128292-0025-2643: hyp=['SAID', 'HE', 'AT', 'LAST', 'SADLY'] +2414-128292-0026-2644: ref=['THY', 'DANGER', 'IS', 'NOT', 'SMALL', 'THOU', 'FREE', 'SPIRIT', 'AND', 'WANDERER'] +2414-128292-0026-2644: hyp=['THY', 'DANGER', 'HIS', 'PURSUAL', 'THOU', 'FREE', 'SPIRIT', 'AND', 'WONDER'] +2414-128292-0027-2645: ref=['THEY', 'SLEEP', 'QUIETLY', 'THEY', 'ENJOY', 'THEIR', 'NEW', 'SECURITY'] +2414-128292-0027-2645: hyp=['THEY', 'SLEEP', 'QUIETLY', 'THEY', 'ENJOYED', 'THEIR', 'NEW', 'SECURITY'] +2414-128292-0028-2646: ref=['BEWARE', 'LEST', 'IN', 'THE', 'END', 'A', 'NARROW', 'FAITH', 'CAPTURE', 'THEE', 'A', 'HARD', 'RIGOROUS', 'DELUSION'] +2414-128292-0028-2646: hyp=['BEWARE', 'LEST', 'IN', 'THE', 'END', 'A', 'NARROW', 'FATE', 'CAPTURE', 'THEE', 'A', 'HARD', 'RECKLESS', 'DELUSION'] +2414-128292-0029-2647: ref=['FOR', 'NOW', 'EVERYTHING', 'THAT', 'IS', 'NARROW', 'AND', 'FIXED', 'SEDUCETH', 'AND', 'TEMPTETH', 'THEE'] +2414-128292-0029-2647: hyp=['FOR', 'NOW', 'EVERYTHING', 'THAT', 'IS', 'NARROW', 'AND', 'FIXED', 'SEDUCE', 'IT', 'AND', 'TEMPTED', 'THEE'] +2414-128292-0030-2648: ref=['THOU', 'HAST', 'LOST', 'THY', 'GOAL'] +2414-128292-0030-2648: hyp=['THOU', 'HAST', 'LOST', 'THEIR', 'GOULD'] +2414-128292-0031-2649: ref=['THOU', 'POOR', 'ROVER', 'AND', 'RAMBLER', 'THOU', 'TIRED', 'BUTTERFLY'] +2414-128292-0031-2649: hyp=['THE', 'POOR', 'ROVER', 'AND', 'RAMBLER', 'NOW', 'TIRED', 'BUT', 'TO', 'FLY'] +2414-128292-0032-2650: ref=['WILT', 'THOU', 'HAVE', 'A', 'REST', 'AND', 'A', 'HOME', 'THIS', 'EVENING'] +2414-128292-0032-2650: hyp=['WILT', 'THOU', 'HAVE', 'A', 'REST', 'IN', 'THE', 'WHOLE', 'THIS', 'EVENING'] +2414-159411-0000-2653: ref=['ONCE', 'UPON', 'A', 'TIME', 'A', 'BRAHMAN', 'WHO', 'WAS', 'WALKING', 'ALONG', 'THE', 'ROAD', 'CAME', 'UPON', 'AN', 'IRON', 'CAGE', 'IN', 'WHICH', 'A', 'GREAT', 'TIGER', 'HAD', 'BEEN', 'SHUT', 'UP', 'BY', 'THE', 'VILLAGERS', 'WHO', 'CAUGHT', 'HIM'] +2414-159411-0000-2653: hyp=['ONCE', 'UPON', 'HER', 'TIME', 'A', 'BRAHMAN', 'WHO', 'WAS', 'WALKING', 'ALONG', 'THE', 'ROAD', 'CAME', 'UPON', 'AN', 'IRON', 'CAGE', 'IN', 'WHICH', 'A', 'GREAT', 'TIGER', 'AT', 'MONS', 'SHUT', 'UP', 'BY', 'THE', 'VILLAGES', 'WHO', 'CAUGHT', 'HIM'] +2414-159411-0001-2654: ref=['THE', 'BRAHMAN', 'ANSWERED', 'NO', 'I', 'WILL', 'NOT', 'FOR', 'IF', 'I', 'LET', 'YOU', 'OUT', 'OF', 'THE', 'CAGE', 'YOU', 'WILL', 'EAT', 'ME'] +2414-159411-0001-2654: hyp=['THE', 'BRAMIAN', 'ANSWERED', 'NO', 'I', 'WILL', 'NOT', 'FOR', 'IF', 'I', 'LET', 'YOU', 'OUT', 'OF', 'THE', 'CAGE', 'YOU', 'WILL', 'EAT', 'ME'] +2414-159411-0002-2655: ref=['OH', 'FATHER', 'OF', 'MERCY', 'ANSWERED', 'THE', 'TIGER', 'IN', 'TRUTH', 'THAT', 'I', 'WILL', 'NOT'] +2414-159411-0002-2655: hyp=['OH', 'FATHER', 'OF', 'MERCY', 'ANSWERED', 'THE', 'TIGER', 'IN', 'TRUTH', 'THAT', 'I', 'WILL', 'NOT'] +2414-159411-0003-2656: ref=['I', 'WILL', 'NEVER', 'BE', 'SO', 'UNGRATEFUL', 'ONLY', 'LET', 'ME', 'OUT', 'THAT', 'I', 'MAY', 'DRINK', 'SOME', 'WATER', 'AND', 'RETURN'] +2414-159411-0003-2656: hyp=['I', 'WILL', 'NEVER', 'BE', 'SO', 'UNGRATEFUL', 'ONLY', 'LET', 'ME', 'OUT', 'THAT', 'I', 'MAY', 'BRING', 'SOME', 'WATER', 'AND', 'RETURN'] +2414-159411-0004-2657: ref=['THEN', 'THE', 'BRAHMAN', 'TOOK', 'PITY', 'ON', 'HIM', 'AND', 'OPENED', 'THE', 'CAGE', 'DOOR', 'BUT', 'NO', 'SOONER', 'HAD', 'HE', 'DONE', 'SO', 'THAN', 'THE', 'TIGER', 'JUMPING', 'OUT', 'SAID', 'NOW', 'I', 'WILL', 'EAT', 'YOU', 'FIRST', 'AND', 'DRINK', 'THE', 'WATER', 'AFTERWARDS'] +2414-159411-0004-2657: hyp=['AND', 'IN', 'THE', 'BRAM', 'INTO', 'PITY', 'ON', 'HIM', 'AND', 'OPENED', 'THE', 'CAGE', 'DOOR', 'BUT', 'NO', 'SOONER', 'HAD', 'HE', 'TURNED', 'SO', 'THAN', 'THE', 'TIGER', 'JUMPING', 'OUT', 'SAID', 'NOW', 'I', 'WILL', 'EAT', 'YOU', 'FIRST', 'AND', 'DRINK', 'THE', 'WATER', 'AFTERWARDS'] +2414-159411-0005-2658: ref=['SO', 'THE', 'BRAHMAN', 'AND', 'THE', 'TIGER', 'WALKED', 'ON', 'TILL', 'THEY', 'CAME', 'TO', 'A', 'BANYAN', 'TREE', 'AND', 'THE', 'BRAHMAN', 'SAID', 'TO', 'IT', 'BANYAN', 'TREE', 'BANYAN', 'TREE', 'HEAR', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0005-2658: hyp=['SO', 'THE', 'BRAMID', 'AND', 'THE', 'TIGER', 'WALKED', 'ON', 'TILL', 'THEY', 'CAME', 'TO', 'A', 'BANDON', 'TREE', 'AND', 'THE', 'BRAMMEN', 'SAID', 'TO', 'IT', 'BANION', 'TREE', 'BAN', 'AND', 'TREE', 'HERE', 'AND', 'GIVE', 'JOINTMENT'] +2414-159411-0006-2659: ref=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'BANYAN', 'TREE'] +2414-159411-0006-2659: hyp=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'BEN', 'TREE'] +2414-159411-0007-2660: ref=['THIS', 'TIGER', 'SAID', 'THE', 'BRAHMAN', 'BEGGED', 'ME', 'TO', 'LET', 'HIM', 'OUT', 'OF', 'HIS', 'CAGE', 'TO', 'DRINK', 'A', 'LITTLE', 'WATER', 'AND', 'HE', 'PROMISED', 'NOT', 'TO', 'HURT', 'ME', 'IF', 'I', 'DID', 'SO', 'BUT', 'NOW', 'THAT', 'I', 'HAVE', 'LET', 'HIM', 'OUT', 'HE', 'WISHES', 'TO', 'EAT', 'ME'] +2414-159411-0007-2660: hyp=['DISTAGGER', 'SAID', 'DEBRAMAN', 'BEGGED', 'ME', 'TO', 'LET', 'HIM', 'OUT', 'OF', 'HIS', 'CAGE', 'TO', 'DRINK', 'A', 'LITTLE', 'WATER', 'AND', 'HE', 'PROMISED', 'NOT', 'TO', 'HIDE', 'ME', 'IF', 'I', 'DID', 'SO', 'BUT', 'NOW', 'THAT', 'I', 'HAVE', 'LEFT', 'HIM', 'OUT', 'HE', 'WISHES', 'TO', 'EAT', 'ME'] +2414-159411-0008-2661: ref=['IS', 'IT', 'JUST', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NO'] +2414-159411-0008-2661: hyp=['IT', 'IS', 'JEALOUS', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'I', 'KNOW'] +2414-159411-0009-2662: ref=['LET', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'ARE', 'AN', 'UNGRATEFUL', 'RACE'] +2414-159411-0009-2662: hyp=['LATE', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'ARE', 'IN', 'UNGRATEFUL', 'RACE'] +2414-159411-0010-2663: ref=['SIR', 'CAMEL', 'SIR', 'CAMEL', 'CRIED', 'THE', 'BRAHMAN', 'HEAR', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0010-2663: hyp=['SO', 'CAMEL', 'SIR', 'CAMEL', 'CRIED', 'THE', 'BRAMIN', 'HERE', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0011-2664: ref=['AT', 'A', 'LITTLE', 'DISTANCE', 'THEY', 'FOUND', 'A', 'BULLOCK', 'LYING', 'BY', 'THE', 'ROADSIDE'] +2414-159411-0011-2664: hyp=['AT', 'A', 'LITTLE', 'DISTANCE', 'THEY', 'FOUND', 'A', 'BULLOCK', 'LYING', 'BY', 'THE', 'ROADSIDE'] +2414-159411-0012-2665: ref=['IS', 'IT', 'FAIR', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NOT'] +2414-159411-0012-2665: hyp=['IS', 'IT', 'FAIR', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NOT'] +2414-159411-0013-2666: ref=['LET', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'HAVE', 'NO', 'PITY'] +2414-159411-0013-2666: hyp=['LATER', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'HAVE', 'NO', 'PITY'] +2414-159411-0014-2667: ref=['THREE', 'OUT', 'OF', 'THE', 'SIX', 'HAD', 'GIVEN', 'JUDGMENT', 'AGAINST', 'THE', 'BRAHMAN', 'BUT', 'STILL', 'HE', 'DID', 'NOT', 'LOSE', 'ALL', 'HOPE', 'AND', 'DETERMINED', 'TO', 'ASK', 'THE', 'OTHER', 'THREE'] +2414-159411-0014-2667: hyp=['THREE', 'OUT', 'OF', 'THE', 'SIX', 'IN', 'GIVING', 'JUDGMENT', 'AGAINST', 'THE', 'BRAHMAN', 'WHICH', 'STILL', 'HE', 'DID', 'NOT', 'LOSE', 'ALL', 'HOPE', 'AND', 'TURN', 'MIND', 'TO', 'ASK', 'THE', 'OTHER', 'THREE'] +2414-159411-0015-2668: ref=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'EAGLE'] +2414-159411-0015-2668: hyp=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'YOU', 'TELL', 'ME', 'ASKED', 'THE', 'EAGLE'] +2414-159411-0016-2669: ref=['THE', 'BRAHMAN', 'STATED', 'THE', 'CASE', 'AND', 'THE', 'EAGLE', 'ANSWERED', 'WHENEVER', 'MEN', 'SEE', 'ME', 'THEY', 'TRY', 'TO', 'SHOOT', 'ME', 'THEY', 'CLIMB', 'THE', 'ROCKS', 'AND', 'STEAL', 'AWAY', 'MY', 'LITTLE', 'ONES'] +2414-159411-0016-2669: hyp=['THE', 'BRAHMAN', 'SUITED', 'THE', 'CASE', 'AND', 'THE', 'EAGLE', 'ANSWERED', 'WHENEVER', 'MEN', 'SEE', 'ME', 'THEY', 'TRY', 'TO', 'SHOOT', 'ME', 'DECLINE', 'THE', 'ROCKS', 'AND', 'STEAL', 'AWAY', 'MY', 'LITTLE', 'ONES'] +2414-159411-0017-2670: ref=['THEN', 'THE', 'TIGER', 'BEGAN', 'TO', 'ROAR', 'AND', 'SAID', 'THE', 'JUDGMENT', 'OF', 'ALL', 'IS', 'AGAINST', 'YOU', 'O', 'BRAHMAN'] +2414-159411-0017-2670: hyp=['THEN', 'THE', 'TIGER', 'BEGAN', 'TO', 'ROAR', 'AND', 'SAID', 'JUDGMENT', 'OF', 'ALL', 'IS', 'AGAINST', 'YOU', 'O', 'BRAHMAN'] +2414-159411-0018-2671: ref=['AFTER', 'THIS', 'THEY', 'SAW', 'AN', 'ALLIGATOR', 'AND', 'THE', 'BRAHMAN', 'RELATED', 'THE', 'MATTER', 'TO', 'HIM', 'HOPING', 'FOR', 'A', 'MORE', 'FAVORABLE', 'VERDICT'] +2414-159411-0018-2671: hyp=['AFTER', 'THIS', 'THEY', 'SAW', 'AN', 'ALLEGATOR', 'AND', 'THE', 'BRAMA', 'RELATED', 'THE', 'MATTER', 'TO', 'HIM', 'HOPING', 'FOR', 'A', 'MORE', 'FAVORABLE', 'VERDICT'] +2414-159411-0019-2672: ref=['BUT', 'THE', 'ALLIGATOR', 'SAID', 'WHENEVER', 'I', 'PUT', 'MY', 'NOSE', 'OUT', 'OF', 'THE', 'WATER', 'MEN', 'TORMENT', 'ME', 'AND', 'TRY', 'TO', 'KILL', 'ME'] +2414-159411-0019-2672: hyp=['WITH', 'THE', 'ADDIER', 'TO', 'THE', 'SUIT', 'WHENEVER', 'A', 'PUT', 'MY', 'NOSE', 'OUT', 'OF', 'THE', 'WATER', 'MEAN', 'TOM', 'AND', 'ME', 'AND', 'TRIED', 'TO', 'KILL', 'ME'] +2414-159411-0020-2673: ref=['THE', 'BRAHMAN', 'GAVE', 'HIMSELF', 'UP', 'AS', 'LOST', 'BUT', 'AGAIN', 'HE', 'PRAYED', 'THE', 'TIGER', 'TO', 'HAVE', 'PATIENCE', 'AND', 'LET', 'HIM', 'ASK', 'THE', 'OPINION', 'OF', 'THE', 'SIXTH', 'JUDGE'] +2414-159411-0020-2673: hyp=['NEGROMMAN', 'GAVE', 'HIMSELF', 'UP', 'AT', 'LOST', 'BUT', 'AGAIN', 'HE', 'PRAYED', 'THE', 'TIGER', 'TO', 'HAVE', 'PATIENCE', 'AND', 'LET', 'HIM', 'ASK', 'THE', 'OPINION', 'OF', 'THE', 'SIXTH', 'JUDGE'] +2414-159411-0021-2674: ref=['NOW', 'THE', 'SIXTH', 'WAS', 'A', 'JACKAL'] +2414-159411-0021-2674: hyp=['BY', 'THE', 'SIXTH', 'WAS', 'A', 'JACKAL'] +2414-159411-0022-2675: ref=['THE', 'BRAHMAN', 'TOLD', 'HIS', 'STORY', 'AND', 'SAID', 'TO', 'HIM', 'UNCLE', 'JACKAL', 'UNCLE', 'JACKAL', 'SAY', 'WHAT', 'IS', 'YOUR', 'JUDGMENT'] +2414-159411-0022-2675: hyp=['THE', 'GRAMMER', 'TOLD', 'HIS', 'STORY', 'AND', 'SAID', 'TO', 'HIM', 'UNCLE', 'JACKO', 'AND', 'WILL', 'JACK', 'HOLE', 'SAY', 'WHAT', 'IS', 'YOUR', 'JUDGMENT'] +2414-159411-0023-2676: ref=['SHOW', 'ME', 'THE', 'PLACE'] +2414-159411-0023-2676: hyp=['SHOW', 'ME', 'THE', 'PACE'] +2414-159411-0024-2677: ref=['WHEN', 'THEY', 'GOT', 'THERE', 'THE', 'JACKAL', 'SAID', 'NOW', 'BRAHMAN', 'SHOW', 'ME', 'EXACTLY', 'WHERE', 'YOU', 'STOOD'] +2414-159411-0024-2677: hyp=['AND', 'THE', 'COURT', 'THERE', 'THE', 'JACKAL', 'SAID', 'NABRAMAN', 'SHOW', 'ME', 'EXACTLY', 'WHERE', 'YOU', 'STOOD'] +2414-159411-0025-2678: ref=['EXACTLY', 'THERE', 'WAS', 'IT', 'ASKED', 'THE', 'JACKAL'] +2414-159411-0025-2678: hyp=['EXACTLY', 'THERE', 'WAS', 'IT', 'ASKED', 'JACO'] +2414-159411-0026-2679: ref=['EXACTLY', 'HERE', 'REPLIED', 'THE', 'BRAHMAN'] +2414-159411-0026-2679: hyp=['EXACTLY', 'HERE', 'REPLIED', 'THE', 'PROMIN'] +2414-159411-0027-2680: ref=['WHERE', 'WAS', 'THE', 'TIGER', 'THEN'] +2414-159411-0027-2680: hyp=['THERE', 'WAS', 'THE', 'TIGER', 'THEN'] +2414-159411-0028-2681: ref=['WHY', 'I', 'STOOD', 'SO', 'SAID', 'THE', 'TIGER', 'JUMPING', 'INTO', 'THE', 'CAGE', 'AND', 'MY', 'HEAD', 'WAS', 'ON', 'THIS', 'SIDE'] +2414-159411-0028-2681: hyp=['WHY', 'I', 'STOOD', 'SO', 'SAID', 'THE', 'DRAGGER', 'JUMPING', 'INTO', 'THE', 'CAGE', 'AND', 'MY', 'HEAD', 'WAS', 'ON', 'THIS', 'SIDE'] +2414-159411-0029-2682: ref=['VERY', 'GOOD', 'SAID', 'THE', 'JACKAL', 'BUT', 'I', 'CANNOT', 'JUDGE', 'WITHOUT', 'UNDERSTANDING', 'THE', 'WHOLE', 'MATTER', 'EXACTLY'] +2414-159411-0029-2682: hyp=['VERY', 'GOOD', 'SAID', 'THE', 'JACK', 'HOPE', 'BUT', 'I', 'CANNOT', 'JUDGE', 'WITHOUT', 'UNDERSTANDING', 'THE', 'WHOLE', 'MATTER', 'EXACTLY'] +2414-159411-0030-2683: ref=['SHUT', 'AND', 'BOLTED', 'SAID', 'THE', 'BRAHMAN'] +2414-159411-0030-2683: hyp=['SHED', 'AND', 'BOLTED', 'SAID', 'DEBRAMIN'] +2414-159411-0031-2684: ref=['THEN', 'SHUT', 'AND', 'BOLT', 'IT', 'SAID', 'THE', 'JACKAL'] +2414-159411-0031-2684: hyp=['VENTURED', 'AND', 'BOLTED', 'SAID', 'TO', 'JACKAL'] +2414-159411-0032-2685: ref=['WHEN', 'THE', 'BRAHMAN', 'HAD', 'DONE', 'THIS', 'THE', 'JACKAL', 'SAID', 'OH', 'YOU', 'WICKED', 'AND', 'UNGRATEFUL', 'TIGER'] +2414-159411-0032-2685: hyp=['WHEN', 'THE', 'BRAHMAN', 'HAD', 'DONE', 'THIS', 'THE', 'JACKAL', 'SAID', 'OH', 'YOU', 'WICKED', 'AND', 'UNGRATEFUL', 'TYER'] +2414-159411-0033-2686: ref=['WHEN', 'THE', 'GOOD', 'BRAHMAN', 'OPENED', 'YOUR', 'CAGE', 'DOOR', 'IS', 'TO', 'EAT', 'HIM', 'THE', 'ONLY', 'RETURN', 'YOU', 'WOULD', 'MAKE'] +2414-159411-0033-2686: hyp=['WITH', 'A', 'GOOD', 'BRAHMAN', 'OPENED', 'YOU', 'CARED', 'DOOR', 'IS', 'TO', 'EAT', 'HIM', 'THE', 'ONLY', 'RETURN', 'HE', 'WOULD', 'MAKE'] +2414-159411-0034-2687: ref=['PROCEED', 'ON', 'YOUR', 'JOURNEY', 'FRIEND', 'BRAHMAN'] +2414-159411-0034-2687: hyp=['PROCEED', 'ON', 'YOUR', 'JOURNEY', 'FRIEND', 'RAMI'] +2414-159411-0035-2688: ref=['YOUR', 'ROAD', 'LIES', 'THAT', 'WAY', 'AND', 'MINE', 'THIS'] +2414-159411-0035-2688: hyp=['HE', 'RULED', 'LIES', 'THAT', 'WAY', 'IN', 'MIND', 'THIS'] +2414-165385-0000-2651: ref=['THUS', 'ACCOMPLISHED', 'HE', 'EXCITED', 'THE', 'ADMIRATION', 'OF', 'EVERY', 'SILLY', 'COQUETTE', 'AND', 'THE', 'ENVY', 'OF', 'EVERY', 'FLUTTERING', 'COXCOMB', 'BUT', 'BY', 'ALL', 'YOUNG', 'GENTLEMEN', 'AND', 'LADIES', 'OF', 'UNDERSTANDING', 'HE', 'WAS', 'HEARTILY', 'DESPISED', 'AS', 'A', 'MERE', 'CIVILIZED', 'MONKEY'] +2414-165385-0000-2651: hyp=["THERE'S", 'ACCOMPLISHED', 'EXCITED', 'ADMIRATION', 'OF', 'EVERY', 'SILLY', 'POCKET', 'AND', 'THE', 'ENVY', 'OF', 'EVERY', 'REFLECTING', 'ACCOUNT', 'BUT', 'BY', 'ALL', 'YOUNG', 'GENTLEMEN', 'AND', 'LADIES', 'OF', 'UNDERSTANDING', 'HE', 'WAS', 'HEARTILY', 'DESPISED', 'AS', 'A', 'MERE', 'CIVILIZED', 'MONKEY'] +2414-165385-0001-2652: ref=['THAT', 'HIS', 'SOUL', 'MIGHT', 'AFTERWARDS', 'OCCUPY', 'SUCH', 'A', 'STATION', 'AS', 'WOULD', 'BE', 'MOST', 'SUITABLE', 'TO', 'HIS', 'CHARACTER', 'IT', 'WAS', 'SENTENCED', 'TO', 'INHABIT', 'THE', 'BODY', 'OF', 'THAT', 'FINICAL', 'GRINNING', 'AND', 'MISCHIEVOUS', 'LITTLE', 'MIMICK', 'WITH', 'FOUR', 'LEGS', 'WHICH', 'YOU', 'NOW', 'BEHOLD', 'BEFORE', 'YOU'] +2414-165385-0001-2652: hyp=['THAT', 'HIS', 'SOUL', 'MIGHT', 'AFTERWARDS', 'OCCUPY', 'SUCH', 'A', 'STATION', 'AS', 'WOULD', 'BE', 'MOST', 'SUITABLE', 'TO', 'HIS', 'CHARACTER', 'IT', 'WAS', 'INTENSE', 'TO', 'INHABIT', 'A', 'BODY', 'OF', 'THAT', 'PHYNICAL', 'GRINNING', 'AND', 'MACHIEVOUS', 'LITTLE', 'MIMIC', 'WITH', 'FULL', 'LEGS', 'WHICH', 'SHE', 'NOW', 'BEHOLD', 'BEFORE', 'YOU'] +2609-156975-0000-2367: ref=['THEN', 'MOSES', 'WAS', 'AFRAID', 'AND', 'SAID', 'SURELY', 'THE', 'THING', 'IS', 'KNOWN'] +2609-156975-0000-2367: hyp=['THEN', 'MOSES', 'WAS', 'AFRAID', 'AND', 'SAID', 'SURELY', 'THE', 'THING', 'IS', 'KNOWN'] +2609-156975-0001-2368: ref=['HOLD', 'ON', 'HOLD', 'FAST', 'HOLD', 'OUT', 'PATIENCE', 'IS', 'GENIUS'] +2609-156975-0001-2368: hyp=['OR', 'ON', 'HER', 'FAST', 'HODE', 'PATENTS', 'AS', 'GENIUS'] +2609-156975-0002-2369: ref=['LET', 'US', 'HAVE', 'FAITH', 'THAT', 'RIGHT', 'MAKES', 'MIGHT', 'AND', 'IN', 'THAT', 'FAITH', 'LET', 'US', 'DARE', 'TO', 'DO', 'OUR', 'DUTY', 'AS', 'WE', 'UNDERSTAND', 'IT', 'LINCOLN'] +2609-156975-0002-2369: hyp=['LET', 'US', 'HAVE', 'FAITH', 'THAT', 'RIGHT', 'MATRON', 'MIGHT', 'AND', 'IN', 'THAT', 'FAITH', 'LET', 'STARED', 'TO', 'DO', 'OUR', 'DUTY', 'IF', 'WE', 'UNDERSTAND', 'IT', 'LINCOLN'] +2609-156975-0003-2370: ref=['THE', 'EGYPTIAN', 'BACKGROUND', 'OF', 'THE', 'BONDAGE'] +2609-156975-0003-2370: hyp=['THE', 'EGYPTIAN', 'BACKGROUND', 'OF', 'THE', 'BONDAGE'] +2609-156975-0004-2371: ref=['EVERY', 'ONE', 'WHO', 'IS', 'TURBULENT', 'HAS', 'BEEN', 'FOUND', 'BY', 'KING', 'MERNEPTAH', 'THE', 'TESTIMONY', 'OF', 'THE', 'OLDEST', 'BIBLICAL', 'NARRATIVES', 'REGARDING', 'THE', 'SOJOURN', 'OF', 'THE', 'HEBREWS', 'IN', 'EGYPT', 'IS', 'ALSO', 'IN', 'PERFECT', 'ACCORD', 'WITH', 'THE', 'PICTURE', 'WHICH', 'THE', 'CONTEMPORARY', 'EGYPTIAN', 'INSCRIPTIONS', 'GIVE', 'OF', 'THE', 'PERIOD'] +2609-156975-0004-2371: hyp=['EVERY', 'ONE', 'WHOSE', 'TREBRANT', 'HAS', 'BEEN', 'FOUND', 'BY', 'GIMERNETTE', 'PATH', 'THE', 'TESTIMONY', 'OF', 'THE', 'OLDEST', 'PABRICAL', 'NARRATIVE', 'REGARDING', 'THE', 'SOJOURN', 'OF', 'THE', 'HEBREWS', 'IN', 'EGYPT', 'IS', 'ALSO', 'IN', 'PERFECT', 'ACCORD', 'WITH', 'THE', 'PITCHER', 'WHICH', 'IT', 'CONTEMPORARY', 'EGYPTIAN', 'INSCRIPTIONS', 'GIVE', 'THIS', 'PERIOD'] +2609-156975-0005-2372: ref=['THE', 'ABSENCE', 'OF', 'DETAILED', 'REFERENCE', 'TO', 'THE', 'HEBREWS', 'IS', 'THEREFORE', 'PERFECTLY', 'NATURAL'] +2609-156975-0005-2372: hyp=['THE', 'ABSENCE', 'OF', 'DETAILED', 'REFUCER', 'THE', 'HEBREWS', 'IS', 'THEREFORE', 'PERFECTLY', 'NATURAL'] +2609-156975-0006-2373: ref=['IT', 'SEEMS', 'PROBABLE', 'THAT', 'NOT', 'ALL', 'BUT', 'ONLY', 'PART', 'OF', 'THE', 'TRIBES', 'WHICH', 'ULTIMATELY', 'COALESCED', 'INTO', 'THE', 'HEBREW', 'NATION', 'FOUND', 'THEIR', 'WAY', 'TO', 'EGYPT'] +2609-156975-0006-2373: hyp=['IT', 'SEEMS', 'PROBABLE', 'THAT', 'NOT', 'ALL', 'BUT', 'ONLY', 'PART', 'IN', 'THE', 'TRIBES', 'WHICH', 'ULTIMATE', 'COLLETS', 'INTO', 'THE', 'HEBREW', 'NATION', 'FOUND', 'THEIR', 'WAY', 'TO', 'EGYPT'] +2609-156975-0007-2374: ref=['THE', 'STORIES', 'REGARDING', 'JOSEPH', 'THE', 'TRADITIONAL', 'FATHER', 'OF', 'EPHRAIM', 'AND', 'MANASSEH', 'IMPLY', 'THAT', 'THESE', 'STRONG', 'CENTRAL', 'TRIBES', 'POSSIBLY', 'TOGETHER', 'WITH', 'THE', 'SOUTHERN', 'TRIBES', 'OF', 'BENJAMIN', 'AND', 'JUDAH', 'WERE', 'THE', 'CHIEF', 'ACTORS', 'IN', 'THIS', 'OPENING', 'SCENE', 'IN', "ISRAEL'S", 'HISTORY'] +2609-156975-0007-2374: hyp=['THE', 'STORIES', 'REGARDING', 'JOSEPH', 'THEIR', 'TRADITIONAL', 'FOUNDER', 'THAT', 'FROM', 'IN', 'MANETTE', 'SE', 'INCLINE', 'THAT', 'THESE', 'STRONG', 'CENTRAL', 'TRIBES', 'POSSIBLY', 'TOGETHER', 'WITH', 'THE', 'SOUTHERN', 'TRINES', 'OF', 'BENJAMIN', 'AND', 'JUDAH', 'WERE', 'THE', 'CHIEF', 'ACTORS', 'OPENING', 'SEEN', 'IN', "ISRAEL'S", 'HISTORY'] +2609-156975-0008-2375: ref=['THE', 'BIBLICAL', 'NARRATIVES', 'APPARENTLY', 'DISAGREE', 'REGARDING', 'THE', 'DURATION', 'OF', 'THE', 'SOJOURN', 'IN', 'EGYPT'] +2609-156975-0008-2375: hyp=['THE', 'BIBOCO', 'NARRATIVES', 'APPARENTLY', 'DISAGREED', 'GUARDING', 'THE', 'DURATION', 'OF', 'THE', 'SOJOURN', 'IN', 'EGYPT'] +2609-156975-0009-2376: ref=['THE', 'LATER', 'TRADITIONS', 'TEND', 'TO', 'EXTEND', 'THE', 'PERIOD'] +2609-156975-0009-2376: hyp=['THE', 'LATER', 'JUDICINES', 'INTEREST', 'IN', 'THE', 'PERIOD'] +2609-156975-0010-2377: ref=['HERE', 'WERE', 'FOUND', 'SEVERAL', 'INSCRIPTIONS', 'BEARING', 'THE', 'EGYPTIAN', 'NAME', 'OF', 'THE', 'CITY', 'P', 'ATUM', 'HOUSE', 'OF', 'THE', 'GOD', 'ATUM'] +2609-156975-0010-2377: hyp=['WHO', 'WERE', 'FOUND', 'SEVEREND', 'SCRIPS', 'AND', 'SPARED', 'THE', 'EGYPTIAN', 'NAME', 'OF', 'THE', 'CITY', 'PATUM', 'OUTS', 'OF', 'THE', 'GOD', 'ATOM'] +2609-156975-0011-2378: ref=['A', 'CONTEMPORARY', 'INSCRIPTION', 'ALSO', 'STATES', 'THAT', 'HE', 'FOUNDED', 'NEAR', 'PITHUM', 'THE', 'HOUSE', 'OF', 'RAMSES', 'A', 'CITY', 'WITH', 'A', 'ROYAL', 'RESIDENCE', 'AND', 'TEMPLES'] +2609-156975-0011-2378: hyp=['A', 'CONTEMPORARY', 'INSCRIPTION', 'ONCE', 'ESTATES', 'THAT', 'HE', 'FOUND', 'A', 'NEAR', 'PITTHAM', 'THE', 'HOUSE', 'OF', 'RANSES', 'A', 'CITY', 'WITH', 'THE', 'ROYAL', 'RESIDENCE', 'IN', 'SIMPLES'] +2609-156975-0012-2379: ref=['THAT', 'THE', 'HEBREWS', 'WERE', 'RESTIVE', 'UNDER', 'THIS', 'TYRANNY', 'WAS', 'NATURAL', 'INEVITABLE'] +2609-156975-0012-2379: hyp=['THAT', 'THE', 'HEBREWS', 'WERE', 'WRETS', 'OF', 'UNDER', 'THIS', 'SURNING', 'WAS', 'NATURALLY', 'INEVITABLE'] +2609-156975-0013-2380: ref=['WAS', 'ANY', 'OTHER', 'PROCEDURE', 'TO', 'BE', 'EXPECTED', 'FROM', 'A', 'DESPOTIC', 'RULER', 'OF', 'THAT', 'LAND', 'AND', 'DAY'] +2609-156975-0013-2380: hyp=['WAS', 'ANY', 'OTHER', 'PROCEDURE', 'TO', 'BE', 'INSPECTRE', 'FROM', 'IT', 'THAT', 'SPOTIC', 'ROAR', 'OF', 'THAT', 'LAND', 'AND', 'DAY'] +2609-156975-0014-2381: ref=['THE', 'MAKING', 'OF', 'A', 'LOYAL', 'PATRIOT'] +2609-156975-0014-2381: hyp=['THE', 'MAKING', 'OF', 'THE', 'LOYAL', 'PATRIOT'] +2609-156975-0015-2382: ref=['THE', 'STORY', 'OF', 'MOSES', 'BIRTH', 'AND', 'EARLY', 'CHILDHOOD', 'IS', 'ONE', 'OF', 'THE', 'MOST', 'INTERESTING', 'CHAPTERS', 'IN', 'BIBLICAL', 'HISTORY'] +2609-156975-0015-2382: hyp=['THE', 'STORY', 'OF', 'MOSES', 'BIRTH', 'AN', 'EARLY', 'CHILDHOOD', 'IS', 'ONE', 'OF', 'THE', 'MOST', 'INTERESTING', 'CHAPTERS', 'IN', 'BIBLICAL', 'HISTORY'] +2609-156975-0016-2383: ref=['WAS', 'MOSES', 'JUSTIFIED', 'IN', 'RESISTING', 'THE', 'EGYPTIAN', 'TASKMASTER'] +2609-156975-0016-2383: hyp=['WIS', 'MOVES', "IT'S", 'JEST', 'FIND', 'AN', 'RESISTIN', 'DE', 'GYPTIAN', 'TAX', 'MASTER'] +2609-156975-0017-2384: ref=['IS', 'PEONAGE', 'ALWAYS', 'DISASTROUS', 'NOT', 'ONLY', 'TO', 'ITS', 'VICTIMS', 'BUT', 'ALSO', 'TO', 'THE', 'GOVERNMENT', 'IMPOSING', 'IT'] +2609-156975-0017-2384: hyp=['HIS', 'PINIONS', 'ALWAYS', 'DISASTRATES', 'NOT', 'OWING', 'TO', 'ITS', 'VICTIMS', 'BUT', 'ALSO', 'TO', 'THE', 'GOVERNMENT', 'IMPOSING', 'IT'] +2609-156975-0018-2385: ref=['NATURALLY', 'HE', 'WENT', 'TO', 'THE', 'LAND', 'OF', 'MIDIAN'] +2609-156975-0018-2385: hyp=['NATURALLY', 'HE', 'WENT', 'TO', 'THE', 'LAND', 'A', 'MILLION'] +2609-156975-0019-2386: ref=['THE', 'WILDERNESS', 'TO', 'THE', 'EAST', 'OF', 'EGYPT', 'HAD', 'FOR', 'CENTURIES', 'BEEN', 'THE', 'PLACE', 'OF', 'REFUGE', 'FOR', 'EGYPTIAN', 'FUGITIVES'] +2609-156975-0019-2386: hyp=['THE', 'WILDERNESS', 'TO', 'THE', 'EAST', 'OF', 'EGYPT', 'AND', 'FOR', 'CENTURIES', 'BEEN', 'THE', 'PLATES', 'OF', 'RED', 'FUTURE', 'EGYPTIAN', 'FUGITIVES'] +2609-156975-0020-2387: ref=['FROM', 'ABOUT', 'TWO', 'THOUSAND', 'B', 'C'] +2609-156975-0020-2387: hyp=['FROM', 'ABOUT', 'TWO', 'THOUSAND', 'B', 'C'] +2609-156975-0021-2388: ref=['ON', 'THE', 'BORDERS', 'OF', 'THE', 'WILDERNESS', 'HE', 'FOUND', 'CERTAIN', 'BEDOUIN', 'HERDSMEN', 'WHO', 'RECEIVED', 'HIM', 'HOSPITABLY'] +2609-156975-0021-2388: hyp=['ON', 'THE', 'BORDERS', 'OF', 'THE', 'WILDERNESS', 'HE', 'FOUND', 'CERTAIN', 'BEDOUIN', 'HERDSMAN', 'WHO', 'RECEIVED', 'HIM', 'HALF', 'SPITABLY'] +2609-156975-0022-2389: ref=['THESE', 'SAND', 'WANDERERS', 'SENT', 'HIM', 'ON', 'FROM', 'TRIBE', 'TO', 'TRIBE', 'UNTIL', 'HE', 'REACHED', 'THE', 'LAND', 'OF', 'KEDEM', 'EAST', 'OF', 'THE', 'DEAD', 'SEA', 'WHERE', 'HE', 'REMAINED', 'FOR', 'A', 'YEAR', 'AND', 'A', 'HALF'] +2609-156975-0022-2389: hyp=['THESE', 'SANDWARES', 'SENT', 'HIM', 'ON', 'FROM', 'TIME', 'TO', 'TIME', 'UNTIL', 'HE', 'REACHED', 'THE', 'LAND', 'OF', 'KIDDAM', 'EACH', 'OF', 'THE', 'DEAD', 'SEA', 'WHERE', 'HE', 'REMAINED', 'FOR', 'YEAR', 'AND', 'A', 'HALF'] +2609-156975-0023-2390: ref=['LATER', 'HE', 'FOUND', 'HIS', 'WAY', 'TO', 'THE', 'COURT', 'OF', 'ONE', 'OF', 'THE', 'LOCAL', 'KINGS', 'IN', 'CENTRAL', 'PALESTINE', 'WHERE', 'HE', 'MARRIED', 'AND', 'BECAME', 'IN', 'TIME', 'A', 'PROSPEROUS', 'LOCAL', 'PRINCE'] +2609-156975-0023-2390: hyp=['LATER', 'HE', 'FOUND', 'HIS', 'WAY', 'TO', 'THE', 'COURT', 'OF', 'ONE', 'OF', 'THE', 'LOCAL', 'KINGS', 'AND', 'CENTRAL', 'PALESTINE', 'WHERE', 'HE', 'MARRIED', 'AND', 'MICHANG', 'IN', 'THE', 'TIME', 'A', 'PROSPEROUS', 'LOCAL', 'PRINCE'] +2609-156975-0024-2391: ref=['THE', 'SCHOOL', 'OF', 'THE', 'WILDERNESS'] +2609-156975-0024-2391: hyp=['THE', 'SCHOOL', 'OF', 'THE', 'WARINESS'] +2609-156975-0025-2392: ref=['THE', 'STORY', 'OF', 'MOSES', 'IS', 'IN', 'MANY', 'WAYS', 'CLOSELY', 'PARALLEL', 'TO', 'THAT', 'OF', 'SINUHIT'] +2609-156975-0025-2392: hyp=['THE', 'STORY', 'OF', 'MOSES', 'IS', 'IN', 'MANY', 'WAYS', 'CLOSELY', 'PARALLEL', 'DID', 'NOT', 'ASSUME', 'IT'] +2609-156975-0026-2393: ref=['THE', 'PRIEST', 'OF', 'THE', 'SUB', 'TRIBE', 'OF', 'THE', 'KENITES', 'RECEIVED', 'HIM', 'INTO', 'HIS', 'HOME', 'AND', 'GAVE', 'HIM', 'HIS', 'DAUGHTER', 'IN', 'MARRIAGE'] +2609-156975-0026-2393: hyp=['THE', 'PRIEST', 'THE', 'SUBTRINE', 'OF', 'THE', 'KENITES', 'RECEIVED', 'HIM', 'INTO', 'HIS', 'HOME', 'AND', 'GAVE', 'HIM', 'HIS', 'DAUGHTER', 'IN', 'MARRIAGE'] +2609-156975-0027-2394: ref=['NOTE', 'THE', 'CHARACTERISTIC', 'ORIENTAL', 'IDEA', 'OF', 'MARRIAGE'] +2609-156975-0027-2394: hyp=['NOTE', 'THE', 'CARE', 'OF', 'RIVER', 'STICK', 'ORIENTOUINE', 'OF', 'MARES'] +2609-156975-0028-2395: ref=['HERE', 'MOSES', 'LEARNED', 'THE', 'LESSONS', 'THAT', 'WERE', 'ESSENTIAL', 'FOR', 'HIS', 'TRAINING', 'AS', 'THE', 'LEADER', 'AND', 'DELIVERER', 'OF', 'HIS', 'PEOPLE'] +2609-156975-0028-2395: hyp=['HERE', 'MOSES', 'LEARNED', 'THAT', 'LESSONS', 'THAT', 'WERE', 'ESSENTIAL', 'FOR', 'HIS', 'TRAINED', 'IN', 'AS', 'A', 'LEADER', 'AND', 'DELIVERER', 'OF', 'HIS', 'PEOPLE'] +2609-156975-0029-2396: ref=['AFTER', 'THE', 'CAPTURE', 'OF', 'JERICHO', 'CERTAIN', 'OF', 'THEM', 'WENT', 'UP', 'WITH', 'THE', 'SOUTHERN', 'TRIBES', 'TO', 'CONQUER', 'SOUTHERN', 'PALESTINE'] +2609-156975-0029-2396: hyp=['AFTER', 'THE', 'CAPTURE', 'OF', 'JERICHO', 'CERTAIN', 'OF', 'THEM', 'WENT', 'UP', 'WITH', 'A', 'SUDDEN', 'TRIUMPH', 'SHE', 'CONQUER', 'SOUTHERN', 'PALESTINE'] +2609-156975-0030-2397: ref=['MANY', 'MODERN', 'SCHOLARS', 'DRAW', 'THE', 'CONCLUSION', 'FROM', 'THE', 'BIBLICAL', 'NARRATIVE', 'THAT', 'IT', 'WAS', 'FROM', 'THE', 'KENITES', 'THAT', 'MOSES', 'FIRST', 'LEARNED', 'OF', 'YAHWEH', 'OR', 'AS', 'THE', 'DISTINCTIVE', 'NAME', 'OF', "ISRAEL'S", 'GOD', 'WAS', 'TRANSLATED', 'BY', 'LATER', 'JEWISH', 'SCRIBES', 'JEHOVAH'] +2609-156975-0030-2397: hyp=['MANY', 'MODERN', 'SCHOLARS', 'DRAWN', 'THE', 'CONCLUSION', 'FROM', 'THE', 'BIBLICAL', 'NARRATIVE', 'THAT', 'IT', 'WAS', 'FROM', 'THE', 'KENNITES', 'THAT', 'MOSES', 'FIRST', 'LEARNED', 'OF', 'YONWAY', 'OR', 'AS', 'THE', 'DISTINCTIVE', 'NAME', 'OF', "ISRAEL'S", 'GONE', 'WAS', 'TRANSLATED', 'BY', 'LATER', 'JEWISH', 'GRIBES', 'JEHOVAH'] +2609-156975-0031-2398: ref=['DO', 'THE', 'EARLIEST', 'HEBREW', 'TRADITIONS', 'IMPLY', 'THAT', 'THE', 'ANCESTORS', 'OF', 'THE', 'ISRAELITES', 'WERE', 'WORSHIPPERS', 'OF', 'JEHOVAH'] +2609-156975-0031-2398: hyp=['DO', 'THE', 'ARIAD', 'SEA', 'BOU', 'TRADITIONS', 'IMPLY', 'THAT', 'INSECTORS', 'OF', 'THE', 'ISRAIT', 'WERE', 'WORSHIPPED', 'OF', 'JEHOVAH'] +2609-156975-0032-2399: ref=['THE', 'TITLE', 'OF', 'HIS', 'FATHER', 'IN', 'LAW', 'IMPLIES', 'THAT', 'THIS', 'PRIEST', 'MINISTERED', 'AT', 'SOME', 'WILDERNESS', 'SANCTUARY'] +2609-156975-0032-2399: hyp=['THE', 'TOWN', 'OF', 'HIS', 'FUND', 'THEM', 'AND', 'ALL', 'IMPLIES', 'AT', 'THIS', 'PREACH', 'MINISTERED', 'AT', 'SOME', 'LINEN', 'AT', 'SANCTUARY'] +2609-156975-0033-2400: ref=['MOSES', 'IN', 'THE', 'HOME', 'OF', 'THE', 'MIDIAN', 'PRIEST', 'WAS', 'BROUGHT', 'INTO', 'DIRECT', 'AND', 'CONSTANT', 'CONTACT', 'WITH', 'THE', 'JEHOVAH', 'WORSHIP'] +2609-156975-0033-2400: hyp=['ROSES', 'IN', 'THE', 'HOME', 'OF', 'THE', 'MENDIAN', 'PRIESTS', 'WAS', 'BROUGHT', 'INTO', 'DIRECT', 'AND', 'CONSTANT', 'CONTACT', 'WITH', 'THE', 'JEHOVAH', 'WORSHIP'] +2609-156975-0034-2401: ref=['THE', 'CRUEL', 'FATE', 'OF', 'HIS', 'PEOPLE', 'AND', 'THE', 'PAINFUL', 'EXPERIENCE', 'IN', 'EGYPT', 'THAT', 'HAD', 'DRIVEN', 'HIM', 'INTO', 'THE', 'WILDERNESS', 'PREPARED', 'HIS', 'MIND', 'TO', 'RECEIVE', 'THIS', 'TRAINING'] +2609-156975-0034-2401: hyp=['THE', 'CRUEL', 'FATE', 'OF', 'THIS', 'PEOPLE', 'AND', 'THE', 'PAINFUL', 'EXPERIENCE', 'IN', 'EGYPT', 'THAT', 'HAD', 'DRIVEN', 'HIM', 'INTO', 'THE', 'WILDERNESS', 'PREPARED', 'HIS', 'MIND', 'TO', 'RECEIVE', 'THIS', 'TRAINING'] +2609-156975-0035-2402: ref=['HIS', 'QUEST', 'WAS', 'FOR', 'A', 'JUST', 'AND', 'STRONG', 'GOD', 'ABLE', 'TO', 'DELIVER', 'THE', 'OPPRESSED'] +2609-156975-0035-2402: hyp=['HIS', 'PRESS', 'WAS', 'FOR', 'JETS', 'AND', 'STRONG', 'GOD', 'ABLE', 'TO', 'DELIVER', 'THE', 'OPPRESSED'] +2609-156975-0036-2403: ref=['THE', 'WILDERNESS', 'WITH', 'ITS', 'LURKING', 'FOES', 'AND', 'THE', 'EVER', 'PRESENT', 'DREAD', 'OF', 'HUNGER', 'AND', 'THIRST', 'DEEPENED', 'HIS', 'SENSE', 'OF', 'NEED', 'AND', 'OF', 'DEPENDENCE', 'UPON', 'A', 'POWER', 'ABLE', 'TO', 'GUIDE', 'THE', 'DESTINIES', 'OF', 'MEN'] +2609-156975-0036-2403: hyp=['THE', 'WEDDINANCE', 'WITH', 'ITS', 'LURKING', 'FOES', 'AND', 'THE', 'EVER', 'PRESENT', 'DREAD', 'OF', 'HUNGER', 'AND', 'THIRST', 'DEEP', 'INTO', 'SENSE', 'OF', 'NEED', 'AND', 'OF', 'DEPENDENCE', 'UPON', 'A', 'POWER', 'ABLE', 'TO', 'GUIDE', 'THE', 'DEBTS', 'NEEDS', 'OF', 'MEN'] +2609-156975-0037-2404: ref=['THE', 'PEASANTS', 'OF', 'THE', 'VAST', 'ANTOLIAN', 'PLAIN', 'IN', 'CENTRAL', 'ASIA', 'MINOR', 'STILL', 'CALL', 'EVERY', 'LIFE', 'GIVING', 'SPRING', 'GOD', 'HATH', 'GIVEN'] +2609-156975-0037-2404: hyp=['THE', 'PEASANTS', 'OF', 'THE', 'VATS', 'INTOLLIUM', 'PLAIN', 'OF', 'CENTRAL', 'AS', 'A', 'MINOR', 'SO', 'CALL', 'EVERY', 'LIFE', 'GIVEN', 'SPRING', 'GOD', 'HATH', 'GIVEN'] +2609-156975-0038-2405: ref=['THE', 'CONSTANT', 'NECESSITY', 'OF', 'MEETING', 'THE', 'DANGERS', 'OF', 'THE', 'WILDERNESS', 'AND', 'OF', 'DEFENDING', 'THE', 'FLOCKS', 'ENTRUSTED', 'TO', 'MOSES', 'CARE', 'DEVELOPED', 'HIS', 'COURAGE', 'AND', 'POWER', 'OF', 'LEADERSHIP', 'AND', 'ACTION'] +2609-156975-0038-2405: hyp=['THEY', "CAN'T", 'SENT', 'THE', 'NECESSITY', 'A', 'MEETING', 'THE', 'DANGERS', 'OF', 'THE', 'WILDERNESS', 'AND', 'THE', 'DEFENDING', 'THE', 'FLOCKS', 'AND', 'TRAITS', 'OF', 'JEMOSIS', 'CARE', 'DEVELOPED', 'HIS', 'COURAGE', 'AND', 'POWER', 'OF', 'LEGERSHIP', 'AND', 'ACTION'] +2609-157645-0000-2352: ref=['EVIDENTLY', 'THE', 'INTENTION', 'WAS', 'TO', 'MAKE', 'THINGS', 'PLEASANT', 'FOR', 'THE', 'ROYAL', 'FOE', 'OF', 'TOBACCO', 'DURING', 'HIS', 'VISIT'] +2609-157645-0000-2352: hyp=['EVIDENTLY', 'THE', 'INTENTION', 'WHICH', 'MADE', 'THINGS', 'PRESENT', 'FOR', 'THE', 'ROYAL', 'FOLK', 'A', 'TOBACCO', 'DURING', 'HIS', 'VISIT'] +2609-157645-0001-2353: ref=['THE', 'PROHIBITION', 'IN', 'THE', 'REGULATION', 'QUOTED', 'OF', 'SMOKING', 'IN', 'SAINT', "MARY'S", 'CHURCH', 'REFERRED', 'IT', 'MAY', 'BE', 'NOTED', 'TO', 'THE', 'ACT', 'WHICH', 'WAS', 'HELD', 'THEREIN'] +2609-157645-0001-2353: hyp=['THE', 'PROBES', 'AND', 'THE', 'REGULATING', 'QUOTED', 'HER', 'SMOKING', 'AND', 'SAINT', "MARY'S", 'CHURCH', 'REFERRED', 'MAY', 'BE', 'NOTED', 'TO', 'THE', 'ACT', 'WHICH', 'WAS', 'HELD', 'THEREIN'] +2609-157645-0002-2354: ref=['SOMETIMES', 'TOBACCO', 'WAS', 'USED', 'IN', 'CHURCH', 'FOR', 'DISINFECTING', 'OR', 'DEODORIZING', 'PURPOSES'] +2609-157645-0002-2354: hyp=['SOMETIMES', 'TOBACCO', 'IS', 'USED', 'IN', 'CHURCH', 'FOR', 'DISINFECT', 'AND', 'NO', 'DEAL', 'ARISING', 'PURPOSES'] +2609-157645-0003-2355: ref=['BLACKBURN', 'ARCHBISHOP', 'OF', 'YORK', 'WAS', 'A', 'GREAT', 'SMOKER'] +2609-157645-0003-2355: hyp=['BLACKBURN', 'ARCHBISH', 'OF', 'YORK', 'WAS', 'A', 'GREAT', 'SMOKER'] +2609-157645-0004-2356: ref=['ON', 'ONE', 'OCCASION', 'HE', 'WAS', 'AT', 'SAINT', "MARY'S", 'CHURCH', 'NOTTINGHAM', 'FOR', 'A', 'CONFIRMATION'] +2609-157645-0004-2356: hyp=['ON', 'ONE', 'OCCASION', 'HE', 'WAS', 'AT', 'SAINT', "MARY'S", 'CHURCH', 'NINE', 'IN', 'HAM', 'FOR', 'A', 'CONFIRMATESON'] +2609-157645-0005-2357: ref=['ANOTHER', 'EIGHTEENTH', 'CENTURY', 'CLERICAL', 'WORTHY', 'THE', 'FAMOUS', 'DOCTOR', 'PARR', 'AN', 'INVETERATE', 'SMOKER', 'WAS', 'ACCUSTOMED', 'TO', 'DO', 'WHAT', 'MISTER', 'DISNEY', 'PREVENTED', 'ARCHBISHOP', 'BLACKBURN', 'FROM', 'DOING', 'HE', 'SMOKED', 'IN', 'HIS', 'VESTRY', 'AT', 'HATTON'] +2609-157645-0005-2357: hyp=['ANOTHER', 'EIGHTEENTH', 'CENTURY', 'CLERICAL', 'WORTHY', 'THE', 'FAMOUS', 'DOCTRIPAR', 'AN', 'INVETERATE', 'SMOKER', 'WAS', 'ACCUSTOMED', 'TO', 'DO', 'AT', 'MIDSER', 'DIDNY', 'PREVENTED', 'ARCHBISH', 'OF', 'BLACKBURN', 'FROM', 'DOING', 'HE', 'SMOKED', 'IN', 'HIS', 'VETERY', 'AT', 'HATTON'] +2609-157645-0006-2358: ref=['PARR', 'WAS', 'SUCH', 'A', 'CONTINUAL', 'SMOKER', 'THAT', 'ANYONE', 'WHO', 'CAME', 'INTO', 'HIS', 'COMPANY', 'IF', 'HE', 'HAD', 'NEVER', 'SMOKED', 'BEFORE', 'HAD', 'TO', 'LEARN', 'THE', 'USE', 'OF', 'A', 'PIPE', 'AS', 'A', 'MEANS', 'OF', 'SELF', 'DEFENCE'] +2609-157645-0006-2358: hyp=['PAR', 'WITH', 'SUCH', 'A', 'CONTINUOUS', 'MOCHER', 'THAT', 'ANY', 'ONE', 'WHO', 'CAME', 'INTO', 'HIS', 'COMPANY', 'IF', 'HE', 'HAD', 'NEVER', 'SMOKED', 'BEFORE', 'AND', 'TO', 'LEARNED', 'THE', 'USE', 'OF', 'A', 'PIPE', 'AS', 'A', 'MEANS', 'OF', 'SELF', 'DEFENCE'] +2609-157645-0007-2359: ref=['ONE', 'SUNDAY', 'SAYS', 'MISTER', 'DITCHFIELD', 'HE', 'HAD', 'AN', 'EXTRA', 'PIPE', 'AND', 'JOSHUA', 'THE', 'CLERK', 'TOLD', 'HIM', 'THAT', 'THE', 'PEOPLE', 'WERE', 'GETTING', 'IMPATIENT'] +2609-157645-0007-2359: hyp=['ONE', 'SUNDAY', 'SAYS', 'MISTER', 'DITZFIELD', 'HE', 'END', 'IN', 'THAT', 'SIR', 'PIPE', 'AND', 'JOHNSHAW', 'THE', 'CLERK', 'TOLD', 'HIM', 'THAT', 'THE', 'PEOPLE', 'WERE', 'GETTING', 'IMPATIENT'] +2609-157645-0008-2360: ref=['LET', 'THEM', 'SING', 'ANOTHER', 'PSALM', 'SAID', 'THE', 'CURATE'] +2609-157645-0008-2360: hyp=['THEM', 'TO', 'THEM', 'SINGING', 'NOW', 'THE', 'PSALMS', 'SAKE', 'THE', 'CURATE'] +2609-157645-0009-2361: ref=['THEY', 'HAVE', 'SIR', 'REPLIED', 'THE', 'CLERK'] +2609-157645-0009-2361: hyp=['THEY', 'HAVE', 'SIR', 'REPLIED', 'THE', 'CLERK'] +2609-157645-0010-2362: ref=['THEN', 'LET', 'THEM', 'SING', 'THE', 'HUNDRED', 'AND', 'NINETEENTH', 'REPLIED', 'THE', 'CURATE'] +2609-157645-0010-2362: hyp=['THEN', 'LET', 'THEM', 'SING', 'THE', 'HUNDRED', 'AND', 'NINETEENTH', 'REPLIED', 'THE', 'CURATE'] +2609-157645-0011-2363: ref=['SIX', 'ARMS', 'THE', 'NEAREST', 'WITHIN', 'REACH', 'PRESENTED', 'WITH', 'AN', 'OBEDIENT', 'START', 'AS', 'MANY', 'TOBACCO', 'POUCHES', 'TO', 'THE', 'MAN', 'OF', 'OFFICE'] +2609-157645-0011-2363: hyp=['SIX', 'ARMS', 'THE', 'NURSE', 'WITHIN', 'REACH', 'PRESENTED', 'WITH', 'AN', 'OBEDIENT', 'START', 'AS', 'MANY', 'TOBACCO', 'PIUCHES', 'TO', 'THE', 'MEN', 'OF', 'OFFICE'] +2609-157645-0012-2364: ref=['DAVID', 'DEANS', 'HOWEVER', 'DID', 'NOT', 'AT', 'ALL', 'APPROVE', 'THIS', 'IRREVERENCE'] +2609-157645-0012-2364: hyp=['DAVID', 'DEAN', 'SAMURED', 'DID', 'NOT', 'AT', 'ARM', 'PROVE', 'THIS', 'IRREVERENCE'] +2609-157645-0013-2365: ref=['GOING', 'TO', 'CHURCH', 'AT', 'HAYES', 'IN', 'THOSE', 'DAYS', 'MUST', 'HAVE', 'BEEN', 'QUITE', 'AN', 'EXCITING', 'EXPERIENCE'] +2609-157645-0013-2365: hyp=['GO', 'INTO', 'CHURCH', 'THAT', 'HAS', 'BEEN', 'THUS', 'DAYS', 'MISTS', 'HAVE', 'BEEN', 'ACQUAINTED', 'AND', 'THESE', 'SIGNING', 'SPIRITS'] +2609-157645-0014-2366: ref=['WHEN', 'THESE', 'MEN', 'IN', 'THE', 'COURSE', 'OF', 'MY', 'REMONSTRANCE', 'FOUND', 'THAT', 'I', 'WAS', 'NOT', 'GOING', 'TO', 'CONTINUE', 'THE', 'CUSTOM', 'THEY', 'NO', 'LONGER', 'CARED', 'TO', 'BE', 'COMMUNICANTS'] +2609-157645-0014-2366: hyp=['WHEN', 'THESE', 'MEN', 'IN', 'THE', 'COURSE', 'OF', 'MY', 'REMONSTRANCE', 'FOUND', 'OUT', 'THAT', 'WAS', 'NOT', 'GOING', 'TO', 'CONTINUE', 'THE', 'COTTOM', 'THEY', 'NO', 'LONGER', 'CARED', 'TO', 'BE', 'COMMUNICANTS'] +2609-169640-0000-2406: ref=['PROAS', 'IN', 'THAT', 'QUARTER', 'WERE', 'USUALLY', 'DISTRUSTED', 'BY', 'SHIPS', 'IT', 'IS', 'TRUE', 'BUT', 'THE', 'SEA', 'IS', 'FULL', 'OF', 'THEM', 'AND', 'FAR', 'MORE', 'ARE', 'INNOCENT', 'THAN', 'ARE', 'GUILTY', 'OF', 'ANY', 'ACTS', 'OF', 'VIOLENCE'] +2609-169640-0000-2406: hyp=['PERHAPS', 'IN', 'THAT', 'QUARTER', 'WERE', 'USUAL', 'DISTRUDGED', 'BY', 'THE', 'STEPS', 'AT', 'IS', 'TRUE', 'BUT', 'THE', 'SEAS', 'FOR', 'THEM', 'FAR', 'MORE', 'ARE', 'INNOCENT', 'THAN', 'ARE', 'GUILTY', 'OF', 'ANY', 'ACTS', 'OF', 'VIOLENCE'] +2609-169640-0001-2407: ref=['AN', 'HOUR', 'AFTER', 'THE', 'SUN', 'HAD', 'SET', 'THE', 'WIND', 'FELL', 'TO', 'A', 'LIGHT', 'AIR', 'THAT', 'JUST', 'KEPT', 'STEERAGE', 'WAY', 'ON', 'THE', 'SHIP'] +2609-169640-0001-2407: hyp=['NOW', 'I', 'OUTREW', 'THE', 'SUN', 'HAD', 'SET', 'THE', 'WIND', 'FELL', 'TURNED', 'LIGHT', 'AIR', 'DAT', 'DIDST', 'KEPT', 'STEERAGE', 'WAY', 'ON', 'THE', 'SHIP'] +2609-169640-0002-2408: ref=['FORTUNATELY', 'THE', 'JOHN', 'WAS', 'NOT', 'ONLY', 'FAST', 'BUT', 'SHE', 'MINDED', 'HER', 'HELM', 'AS', 'A', 'LIGHT', 'FOOTED', 'GIRL', 'TURNS', 'IN', 'A', 'LIVELY', 'DANCE'] +2609-169640-0002-2408: hyp=['FORTUNATELY', 'THE', 'JOHN', 'WAS', 'NOT', 'ONLY', 'FAT', 'BUT', 'SEA', 'MINDED', 'HER', 'HAIL', 'AS', 'THE', 'LIGHTFOOTED', 'GIRL', 'TURNED', 'TO', 'THE', 'LIVELY', 'DANCE'] +2609-169640-0003-2409: ref=['I', 'NEVER', 'WAS', 'IN', 'A', 'BETTER', 'STEERING', 'SHIP', 'MOST', 'ESPECIALLY', 'IN', 'MODERATE', 'WEATHER'] +2609-169640-0003-2409: hyp=['I', 'NEVER', 'WAS', 'IN', 'A', 'BETTER', 'STIRRING', 'SHIP', 'PERCY', 'SPENTRY', 'AND', 'MARGARET', 'WEATHER'] +2609-169640-0004-2410: ref=['MISTER', 'MARBLE', 'HE', 'I', 'DO', 'BELIEVE', 'WAS', 'FAIRLY', 'SNOOZING', 'ON', 'THE', 'HEN', 'COOPS', 'BEING', 'LIKE', 'THE', 'SAILS', 'AS', 'ONE', 'MIGHT', 'SAY', 'BARELY', 'ASLEEP'] +2609-169640-0004-2410: hyp=['MISTER', 'MARBLE', 'HE', 'OUGHT', 'TO', 'BELIEVE', 'WAS', 'FAIRLY', 'NUSING', 'AND', 'THE', 'INCOUPS', 'BEING', 'LIKE', 'THE', 'SAILORS', 'AS', 'ONE', 'MIGHT', 'SAY', 'VARIOUSLY'] +2609-169640-0005-2411: ref=['AT', 'THAT', 'MOMENT', 'I', 'HEARD', 'A', 'NOISE', 'ONE', 'FAMILIAR', 'TO', 'SEAMEN', 'THAT', 'OF', 'AN', 'OAR', 'FALLING', 'IN', 'A', 'BOAT'] +2609-169640-0005-2411: hyp=['AT', 'THAT', 'MOMENT', 'I', 'IN', 'A', 'NOISE', 'WHEN', 'FAMILIAR', 'TO', 'SEE', 'MEN', 'THAT', 'OF', 'AN', 'OAR', 'FOLLOWING', 'IN', 'A', 'BOAT'] +2609-169640-0006-2412: ref=['I', 'SANG', 'OUT', 'SAIL', 'HO', 'AND', 'CLOSE', 'ABOARD'] +2609-169640-0006-2412: hyp=['AS', 'IN', 'YET', 'SO', 'HO', 'AND', 'CLOSE', 'ABROAD'] +2609-169640-0007-2413: ref=['HE', 'WAS', 'TOO', 'MUCH', 'OF', 'A', 'SEAMAN', 'TO', 'REQUIRE', 'A', 'SECOND', 'LOOK', 'IN', 'ORDER', 'TO', 'ASCERTAIN', 'WHAT', 'WAS', 'TO', 'BE', 'DONE'] +2609-169640-0007-2413: hyp=['HE', 'WAS', 'SHIMMERTS', 'OF', 'THE', 'SEAMAN', 'TO', 'REQUIRE', 'SECOND', 'LOOK', 'IN', 'ORDER', 'TO', 'ASCERTAIN', 'BUT', 'WAS', 'TO', 'BE', 'DONE'] +2609-169640-0008-2414: ref=['ALTHOUGH', 'THEY', 'WENT', 'THREE', 'FEET', 'TO', 'OUR', 'TWO', 'THIS', 'GAVE', 'US', 'A', 'MOMENT', 'OF', 'BREATHING', 'TIME'] +2609-169640-0008-2414: hyp=['ALTHOUGH', 'THEY', 'WENT', 'THREE', 'FEET', 'TO', 'OUR', 'TWO', 'THIS', 'GAVE', 'UP', 'SOME', 'MOMENT', 'OF', 'BREATHING', 'TIME'] +2609-169640-0009-2415: ref=['AS', 'OUR', 'SHEETS', 'WERE', 'ALL', 'FLYING', 'FORWARD', 'AND', 'REMAINED', 'SO', 'FOR', 'A', 'FEW', 'MINUTES', 'IT', 'GAVE', 'ME', 'LEISURE', 'TO', 'LOOK', 'ABOUT'] +2609-169640-0009-2415: hyp=['AS', 'OUR', 'SEATS', 'WERE', 'ALL', 'FLYING', 'FORWARD', 'AND', 'REMAINED', 'SO', 'FOR', 'A', 'FEW', 'MINUTES', 'IT', 'GAVE', 'ME', 'A', 'LEISURE', 'TO', 'WORK', 'ABOUT'] +2609-169640-0010-2416: ref=['I', 'SOON', 'SAW', 'BOTH', 'PROAS', 'AND', 'GLAD', 'ENOUGH', 'WAS', 'I', 'TO', 'PERCEIVE', 'THAT', 'THEY', 'HAD', 'NOT', 'APPROACHED', 'MATERIALLY', 'NEARER'] +2609-169640-0010-2416: hyp=['I', 'SOON', 'SAW', 'BOTH', 'PROTS', 'AND', 'GRINDING', 'UP', 'WAS', 'I', 'TO', 'PERCEIVE', 'THAT', 'THEY', 'HAD', 'NOT', 'APPROACHED', 'MATERIALLY', 'IN', 'NEW', 'YORK'] +2609-169640-0011-2417: ref=['MISTER', 'KITE', 'OBSERVED', 'THIS', 'ALSO', 'AND', 'REMARKED', 'THAT', 'OUR', 'MOVEMENTS', 'HAD', 'BEEN', 'SO', 'PROMPT', 'AS', 'TO', 'TAKE', 'THE', 'RASCALS', 'ABACK'] +2609-169640-0011-2417: hyp=['BISHOIS', 'DESERVED', 'THIS', 'ALSO', 'AND', 'REMARK', 'THAT', 'OUR', 'MOVEMENTS', 'HAD', 'BEEN', 'SO', 'PROMPT', 'AS', 'TO', 'TAKE', 'THE', 'RASCAL', 'WAS', 'ABACK'] +2609-169640-0012-2418: ref=['A', 'BREATHLESS', 'STILLNESS', 'SUCCEEDED'] +2609-169640-0012-2418: hyp=['A', 'BREATH', 'WHICH', 'STILL', 'IN', 'ITS', 'SUCCEEDED'] +2609-169640-0013-2419: ref=['THE', 'PROAS', 'DID', 'NOT', 'ALTER', 'THEIR', 'COURSE', 'BUT', 'NEARED', 'US', 'FAST'] +2609-169640-0013-2419: hyp=['THE', 'PROITS', 'DID', 'NOT', 'ALTER', 'THE', 'COURSE', 'BUT', 'NEAR', 'TO', 'ITS', 'FAST'] +2609-169640-0014-2420: ref=['I', 'HEARD', 'THE', 'RATTLING', 'OF', 'THE', 'BOARDING', 'PIKES', 'TOO', 'AS', 'THEY', 'WERE', 'CUT', 'ADRIFT', 'FROM', 'THE', 'SPANKER', 'BOOM', 'AND', 'FELL', 'UPON', 'THE', 'DECKS'] +2609-169640-0014-2420: hyp=['I', 'HEARD', 'THE', 'RIDING', 'OF', 'THE', 'BOARDING', 'PIPES', 'TOO', 'AS', 'THEY', 'WERE', 'CUT', 'ADRIFT', 'FROM', 'THE', 'SPANKER', 'BOOM', 'AND', 'FELL', 'UPON', 'THE', 'DECKS'] +2609-169640-0015-2421: ref=['KITE', 'WENT', 'AFT', 'AND', 'RETURNED', 'WITH', 'THREE', 'OR', 'FOUR', 'MUSKETS', 'AND', 'AS', 'MANY', 'PIKES'] +2609-169640-0015-2421: hyp=['COUNT', 'WENT', 'APT', 'AND', 'RETURNED', 'WITH', 'THREE', 'OR', 'FOUR', 'MASKETS', 'AND', 'AS', 'MANY', 'PIKES'] +2609-169640-0016-2422: ref=['THE', 'STILLNESS', 'THAT', 'REIGNED', 'ON', 'BOTH', 'SIDES', 'WAS', 'LIKE', 'THAT', 'OF', 'DEATH'] +2609-169640-0016-2422: hyp=['THE', 'STILLNESS', 'THAT', 'RAINED', 'ON', 'BOTH', 'SIDES', 'WAS', 'LIKE', 'THAT', 'OF', 'DEA'] +2609-169640-0017-2423: ref=['THE', 'JOHN', 'BEHAVED', 'BEAUTIFULLY', 'AND', 'CAME', 'ROUND', 'LIKE', 'A', 'TOP'] +2609-169640-0017-2423: hyp=['THE', 'JOHN', 'BEHAVED', 'BEAUTIFULLY', 'HE', 'CAME', 'ROUND', 'LIKE', 'A', 'TOP'] +2609-169640-0018-2424: ref=['THE', 'QUESTION', 'WAS', 'NOW', 'WHETHER', 'WE', 'COULD', 'PASS', 'THEM', 'OR', 'NOT', 'BEFORE', 'THEY', 'GOT', 'NEAR', 'ENOUGH', 'TO', 'GRAPPLE'] +2609-169640-0018-2424: hyp=['THE', 'QUESTION', 'WAS', 'NOW', 'WHETHER', 'WE', 'COULD', 'PASS', 'AND', 'OR', 'NOT', 'BEFORE', 'THEY', 'GOT', 'NEAR', 'ENOUGH', 'TO', 'GRANTEL'] +2609-169640-0019-2425: ref=['THE', 'CAPTAIN', 'BEHAVED', 'PERFECTLY', 'WELL', 'IN', 'THIS', 'CRITICAL', 'INSTANT', 'COMMANDING', 'A', 'DEAD', 'SILENCE', 'AND', 'THE', 'CLOSEST', 'ATTENTION', 'TO', 'HIS', 'ORDERS'] +2609-169640-0019-2425: hyp=['THE', 'CAPTAIN', 'BEHAVED', 'PERFECTUALLY', 'WELL', 'IN', 'ITS', 'CRITICAL', 'INSTANT', 'COMMANDING', 'A', 'DEAD', 'SCIENCE', 'IN', 'THE', 'CLOSET', 'SENTENCE', 'INTO', 'HIS', 'ORDERS'] +2609-169640-0020-2426: ref=['NOT', 'A', 'SOUL', 'ON', 'BOARD', 'THE', 'JOHN', 'WAS', 'HURT'] +2609-169640-0020-2426: hyp=['NOW', 'SO', 'ON', 'BOARD', 'THE', 'JOHN', 'WAS', 'HURT'] +2609-169640-0021-2427: ref=['ON', 'OUR', 'SIDE', 'WE', 'GAVE', 'THE', 'GENTLEMEN', 'THE', 'FOUR', 'SIXES', 'TWO', 'AT', 'THE', 'NEAREST', 'AND', 'TWO', 'AT', 'THE', 'STERN', 'MOST', 'PROA', 'WHICH', 'WAS', 'STILL', 'NEAR', 'A', "CABLE'S", 'LENGTH', 'DISTANT'] +2609-169640-0021-2427: hyp=['WHEN', 'OURSAN', 'WE', 'GAVE', 'THE', 'GENTLEMAN', 'THE', 'FOUR', 'SIX', 'TO', 'OUT', 'THE', 'NEWS', 'AND', 'TWO', 'AT', 'THE', 'STERNMOST', 'PRO', 'WHICH', 'WAS', 'STILL', 'NEAR', 'A', "CABLE'S", 'LENGTH', 'DISTANT'] +2609-169640-0022-2428: ref=['THEY', 'WERE', 'LIKE', 'THE', 'YELLS', 'OF', 'FIENDS', 'IN', 'ANGUISH'] +2609-169640-0022-2428: hyp=['THEY', 'WERE', 'NIGHTLY', 'YEARS', 'OF', 'FIENDS', 'IN', 'ENGLISH'] +2609-169640-0023-2429: ref=['I', 'DOUBT', 'IF', 'WE', 'TOUCHED', 'A', 'MAN', 'IN', 'THE', 'NEAREST', 'PROA'] +2609-169640-0023-2429: hyp=['AND', 'OUT', 'IF', 'WE', 'TOUCH', 'THE', 'REMAIN', 'IN', 'THE', 'NURTURE'] +2609-169640-0024-2430: ref=['IN', 'THIS', 'STATE', 'THE', 'SHIP', 'PASSED', 'AHEAD', 'ALL', 'HER', 'CANVAS', 'BEING', 'FULL', 'LEAVING', 'THE', 'PROA', 'MOTIONLESS', 'IN', 'HER', 'WAKE'] +2609-169640-0024-2430: hyp=['IN', 'THAT', 'STATE', 'THE', 'SHIP', 'PASSED', 'AHEAD', 'ON', 'FOR', 'A', 'CANVAS', 'BEEN', 'FOR', 'LEAVING', 'THE', 'PROW', 'MUCH', 'ENRICHED', 'IN', 'HER', 'WAKE'] +3005-163389-0000-1108: ref=['THEY', 'SWARMED', 'UP', 'IN', 'FRONT', 'OF', "SHERBURN'S", 'PALINGS', 'AS', 'THICK', 'AS', 'THEY', 'COULD', 'JAM', 'TOGETHER', 'AND', 'YOU', "COULDN'T", 'HEAR', 'YOURSELF', 'THINK', 'FOR', 'THE', 'NOISE'] +3005-163389-0000-1108: hyp=['THEY', 'SWARMED', 'UP', 'IN', 'FRONT', 'OF', "SHERBURN'S", 'PALINGS', 'AS', 'THICK', 'AS', 'THEY', 'COULD', 'JAM', 'TOGETHER', 'AND', 'YOU', "COULDN'T", 'HEAR', 'YOURSELF', 'THINK', 'FOR', 'THE', 'NOISE'] +3005-163389-0001-1109: ref=['SOME', 'SUNG', 'OUT', 'TEAR', 'DOWN', 'THE', 'FENCE', 'TEAR', 'DOWN', 'THE', 'FENCE'] +3005-163389-0001-1109: hyp=['SOME', 'SUNG', 'OUT', 'TEAR', 'DOWN', 'THE', 'FENCE', 'TEARE', 'DOWN', 'THE', 'FENCE'] +3005-163389-0002-1110: ref=['THE', 'STILLNESS', 'WAS', 'AWFUL', 'CREEPY', 'AND', 'UNCOMFORTABLE'] +3005-163389-0002-1110: hyp=['THE', 'STILLNESS', 'WAS', 'AWFUL', 'CREEPY', 'AND', 'UNCOMFORTABLE'] +3005-163389-0003-1111: ref=['SHERBURN', 'RUN', 'HIS', 'EYE', 'SLOW', 'ALONG', 'THE', 'CROWD', 'AND', 'WHEREVER', 'IT', 'STRUCK', 'THE', 'PEOPLE', 'TRIED', 'A', 'LITTLE', 'TO', 'OUT', 'GAZE', 'HIM', 'BUT', 'THEY', "COULDN'T", 'THEY', 'DROPPED', 'THEIR', 'EYES', 'AND', 'LOOKED', 'SNEAKY'] +3005-163389-0003-1111: hyp=['SHERBURN', 'RUN', 'HIS', 'EYE', 'SLOW', 'ALONG', 'THE', 'CROWD', 'AND', 'WHEREVER', 'IT', 'STRUCK', 'THE', 'PEOPLE', 'TRIED', 'A', 'LITTLE', 'TO', 'OUTGAZE', 'HIM', 'BUT', 'THEY', "COULDN'T", 'THEY', 'DROPPED', 'THEIR', 'EYES', 'AND', 'LOOKED', 'SNEAKY'] +3005-163389-0004-1112: ref=['THE', 'AVERAGE', "MAN'S", 'A', 'COWARD'] +3005-163389-0004-1112: hyp=['THE', 'AVERAGE', "MAN'S", 'A', 'COWARD'] +3005-163389-0005-1113: ref=['BECAUSE', "THEY'RE", 'AFRAID', 'THE', "MAN'S", 'FRIENDS', 'WILL', 'SHOOT', 'THEM', 'IN', 'THE', 'BACK', 'IN', 'THE', 'DARKAND', "IT'S", 'JUST', 'WHAT', 'THEY', 'WOULD', 'DO'] +3005-163389-0005-1113: hyp=['BECAUSE', "THEY'RE", 'AFRAID', 'THE', "MAN'S", 'FRIENDS', 'WILL', 'SHOOT', 'THEM', 'IN', 'THE', 'BACK', 'IN', 'THE', 'DARK', 'AND', "IT'S", 'JUST', 'WHAT', 'THEY', 'WOULD', 'DO'] +3005-163389-0006-1114: ref=['SO', 'THEY', 'ALWAYS', 'ACQUIT', 'AND', 'THEN', 'A', 'MAN', 'GOES', 'IN', 'THE', 'NIGHT', 'WITH', 'A', 'HUNDRED', 'MASKED', 'COWARDS', 'AT', 'HIS', 'BACK', 'AND', 'LYNCHES', 'THE', 'RASCAL'] +3005-163389-0006-1114: hyp=['SO', 'THEY', 'ALWAYS', 'ACQUIT', 'AND', 'THEN', 'A', 'MAN', 'GOES', 'IN', 'THE', 'NIGHT', 'WITH', 'A', 'HUNDRED', 'MASSED', 'COWARDS', 'AT', 'HIS', 'BACK', 'AND', 'LYNCHES', 'THE', 'RASCAL'] +3005-163389-0007-1115: ref=['YOU', "DIDN'T", 'WANT', 'TO', 'COME'] +3005-163389-0007-1115: hyp=['YOU', "DIDN'T", 'WANT', 'TO', 'COME'] +3005-163389-0008-1116: ref=['BUT', 'A', 'MOB', 'WITHOUT', 'ANY', 'MAN', 'AT', 'THE', 'HEAD', 'OF', 'IT', 'IS', 'BENEATH', 'PITIFULNESS'] +3005-163389-0008-1116: hyp=['BUT', 'A', 'MOB', 'WITHOUT', 'ANY', 'MAN', 'AT', 'THE', 'HEAD', 'OF', 'IT', 'IS', 'BENEATH', 'PITIFULNESS'] +3005-163389-0009-1117: ref=['NOW', 'LEAVE', 'AND', 'TAKE', 'YOUR', 'HALF', 'A', 'MAN', 'WITH', 'YOU', 'TOSSING', 'HIS', 'GUN', 'UP', 'ACROSS', 'HIS', 'LEFT', 'ARM', 'AND', 'COCKING', 'IT', 'WHEN', 'HE', 'SAYS', 'THIS'] +3005-163389-0009-1117: hyp=['NOW', 'LE', 'AND', 'TAKE', 'YOUR', 'HALF', 'A', 'MAN', 'WITH', 'YOU', 'TOSSING', 'HIS', 'GUN', 'UP', 'ACROSS', 'HIS', 'LEFT', 'ARM', 'AND', 'COCKING', 'IT', 'WHEN', 'HE', 'SAYS', 'THIS'] +3005-163389-0010-1118: ref=['THE', 'CROWD', 'WASHED', 'BACK', 'SUDDEN', 'AND', 'THEN', 'BROKE', 'ALL', 'APART', 'AND', 'WENT', 'TEARING', 'OFF', 'EVERY', 'WHICH', 'WAY', 'AND', 'BUCK', 'HARKNESS', 'HE', 'HEELED', 'IT', 'AFTER', 'THEM', 'LOOKING', 'TOLERABLE', 'CHEAP'] +3005-163389-0010-1118: hyp=['THE', 'CROWD', 'WASHED', 'BACK', 'SUDDEN', 'AND', 'THEN', 'BROKE', 'ALL', 'APART', 'AND', 'WENT', 'TEARING', 'OFF', 'EVERY', 'WHICH', 'WAY', 'AND', 'BUCK', 'HARKINS', 'HE', 'HEALED', 'IT', 'AFTER', 'THEM', 'LOOKING', 'TOLERABLE', 'CHEAP'] +3005-163389-0011-1119: ref=['YOU', "CAN'T", 'BE', 'TOO', 'CAREFUL'] +3005-163389-0011-1119: hyp=['HE', "CAN'T", 'BE', 'TOO', 'CAREFUL'] +3005-163389-0012-1120: ref=['THEY', 'ARGUED', 'AND', 'TRIED', 'TO', 'KEEP', 'HIM', 'OUT', 'BUT', 'HE', "WOULDN'T", 'LISTEN', 'AND', 'THE', 'WHOLE', 'SHOW', 'COME', 'TO', 'A', 'STANDSTILL'] +3005-163389-0012-1120: hyp=['THEY', 'ARGUED', 'AND', 'TRIED', 'TO', 'KEEP', 'HIM', 'OUT', 'BUT', 'HE', "WOULDN'T", 'LISTEN', 'AND', 'A', 'WHOLE', 'SHOW', 'COME', 'TO', 'A', 'FAN', 'STILL'] +3005-163389-0013-1121: ref=['AND', 'ONE', 'OR', 'TWO', 'WOMEN', 'BEGUN', 'TO', 'SCREAM'] +3005-163389-0013-1121: hyp=['AND', 'ONE', 'OR', 'TWO', 'WOMEN', 'BEGAN', 'TO', 'SCREAM'] +3005-163389-0014-1122: ref=['SO', 'THEN', 'THE', 'RINGMASTER', 'HE', 'MADE', 'A', 'LITTLE', 'SPEECH', 'AND', 'SAID', 'HE', 'HOPED', 'THERE', "WOULDN'T", 'BE', 'NO', 'DISTURBANCE', 'AND', 'IF', 'THE', 'MAN', 'WOULD', 'PROMISE', 'HE', "WOULDN'T", 'MAKE', 'NO', 'MORE', 'TROUBLE', 'HE', 'WOULD', 'LET', 'HIM', 'RIDE', 'IF', 'HE', 'THOUGHT', 'HE', 'COULD', 'STAY', 'ON', 'THE', 'HORSE'] +3005-163389-0014-1122: hyp=['SO', 'THEN', 'A', 'RING', 'MASTER', 'HE', 'MADE', 'A', 'LITTLE', 'SPEECH', 'AND', 'SAID', 'HE', 'HOPED', 'THERE', "WOULDN'T", 'BE', 'NO', 'DISTURBANCE', 'AND', 'IF', 'THE', 'MAN', 'WOULD', 'PROMISE', 'HE', "WOULDN'T", 'MAKE', 'NO', 'MORE', 'TROUBLE', 'HE', 'WOULD', 'LET', 'HIM', 'RIDE', 'IF', 'HE', 'THOUGHT', 'HE', 'COULD', 'STAY', 'ON', 'THE', 'HORSE'] +3005-163389-0015-1123: ref=['IT', "WARN'T", 'FUNNY', 'TO', 'ME', 'THOUGH', 'I', 'WAS', 'ALL', 'OF', 'A', 'TREMBLE', 'TO', 'SEE', 'HIS', 'DANGER'] +3005-163389-0015-1123: hyp=['IT', "WARN'T", 'FUNNY', 'TO', 'ME', 'THOUGH', 'I', 'WAS', 'ALL', 'OF', 'A', 'TREMBLE', 'TO', 'SEE', 'HIS', 'DANGER'] +3005-163389-0016-1124: ref=['AND', 'THE', 'HORSE', 'A', 'GOING', 'LIKE', 'A', 'HOUSE', 'AFIRE', 'TOO'] +3005-163389-0016-1124: hyp=['AND', 'A', 'HORSE', 'A', 'GOING', 'LIKE', 'A', 'HOUSE', 'AFAR', 'TOO'] +3005-163389-0017-1125: ref=['HE', 'SHED', 'THEM', 'SO', 'THICK', 'THEY', 'KIND', 'OF', 'CLOGGED', 'UP', 'THE', 'AIR', 'AND', 'ALTOGETHER', 'HE', 'SHED', 'SEVENTEEN', 'SUITS'] +3005-163389-0017-1125: hyp=['HE', 'SHARED', 'THEM', 'SO', 'THICK', 'THAT', 'KIND', 'OF', 'CLOGGED', 'UP', 'THE', 'AIR', 'AND', 'ALTOGETHER', 'HE', 'SHED', 'SEVENTEEN', 'SUITS'] +3005-163389-0018-1126: ref=['WHY', 'IT', 'WAS', 'ONE', 'OF', 'HIS', 'OWN', 'MEN'] +3005-163389-0018-1126: hyp=['WHY', 'IT', 'WAS', 'ONE', 'OF', 'HIS', 'OWN', 'MEN'] +3005-163390-0000-1185: ref=['ANDBUT', 'NEVER', 'MIND', 'THE', 'REST', 'OF', 'HIS', 'OUTFIT', 'IT', 'WAS', 'JUST', 'WILD', 'BUT', 'IT', 'WAS', 'AWFUL', 'FUNNY'] +3005-163390-0000-1185: hyp=['AND', 'BUT', 'NEVER', 'MIND', 'THE', 'REST', 'OF', 'HIS', 'OUTFIT', 'IT', 'WAS', 'JUST', 'WILD', 'BUT', 'IT', 'WAS', 'AWFUL', 'FUNNY'] +3005-163390-0001-1186: ref=['THE', 'PEOPLE', 'MOST', 'KILLED', 'THEMSELVES', 'LAUGHING', 'AND', 'WHEN', 'THE', 'KING', 'GOT', 'DONE', 'CAPERING', 'AND', 'CAPERED', 'OFF', 'BEHIND', 'THE', 'SCENES', 'THEY', 'ROARED', 'AND', 'CLAPPED', 'AND', 'STORMED', 'AND', 'HAW', 'HAWED', 'TILL', 'HE', 'COME', 'BACK', 'AND', 'DONE', 'IT', 'OVER', 'AGAIN', 'AND', 'AFTER', 'THAT', 'THEY', 'MADE', 'HIM', 'DO', 'IT', 'ANOTHER', 'TIME'] +3005-163390-0001-1186: hyp=['THE', 'PEOPLE', 'MOST', 'KILLED', 'THEMSELVES', 'LAUGHING', 'AND', 'WHEN', 'THE', 'KING', 'GOT', 'DONE', 'CAPERING', 'AND', 'CAPERED', 'OFF', 'BEHIND', 'THE', 'SCENES', 'THEY', 'ROARED', 'AND', 'CLAPPED', 'AND', 'STORMED', 'AND', 'HAWHAT', 'TILL', 'HE', 'COME', 'BACK', 'AND', 'DONE', 'IT', 'OVER', 'AGAIN', 'AND', 'AFTER', 'THAT', 'THEY', 'MADE', 'HIM', 'DO', 'IT', 'ANOTHER', 'TIME'] +3005-163390-0002-1187: ref=['TWENTY', 'PEOPLE', 'SINGS', 'OUT'] +3005-163390-0002-1187: hyp=['TWENTY', 'PEOPLE', 'SANGS', 'OUT'] +3005-163390-0003-1188: ref=['THE', 'DUKE', 'SAYS', 'YES'] +3005-163390-0003-1188: hyp=['THE', 'DUKE', 'SAYS', 'YES'] +3005-163390-0004-1189: ref=['EVERYBODY', 'SINGS', 'OUT', 'SOLD'] +3005-163390-0004-1189: hyp=['EVERYBODY', 'SINGS', 'OUT', 'SOLD'] +3005-163390-0005-1190: ref=['BUT', 'A', 'BIG', 'FINE', 'LOOKING', 'MAN', 'JUMPS', 'UP', 'ON', 'A', 'BENCH', 'AND', 'SHOUTS', 'HOLD', 'ON'] +3005-163390-0005-1190: hyp=['BUT', 'A', 'BIG', 'FINE', 'LOOKING', 'MAN', 'JUMPS', 'UP', 'ON', 'A', 'BENCH', 'AND', 'SHOUTS', 'HOLD', 'ON'] +3005-163390-0006-1191: ref=['JUST', 'A', 'WORD', 'GENTLEMEN', 'THEY', 'STOPPED', 'TO', 'LISTEN'] +3005-163390-0006-1191: hyp=['JUST', 'A', 'WORD', 'GENTLEMEN', 'THEY', 'STOPPED', 'TO', 'LISTEN'] +3005-163390-0007-1192: ref=['WHAT', 'WE', 'WANT', 'IS', 'TO', 'GO', 'OUT', 'OF', 'HERE', 'QUIET', 'AND', 'TALK', 'THIS', 'SHOW', 'UP', 'AND', 'SELL', 'THE', 'REST', 'OF', 'THE', 'TOWN'] +3005-163390-0007-1192: hyp=['WHAT', 'WE', 'WANT', 'IS', 'TO', 'GO', 'OUT', 'OF', 'HERE', 'QUIET', 'AND', 'TALK', 'THIS', 'SHOW', 'UP', 'AND', 'SELL', 'THE', 'REST', 'O', 'THE', 'TOWN'] +3005-163390-0008-1193: ref=['YOU', 'BET', 'IT', 'IS', 'THE', 'JEDGE', 'IS', 'RIGHT', 'EVERYBODY', 'SINGS', 'OUT'] +3005-163390-0008-1193: hyp=['YOU', 'BADE', 'IT', 'IS', 'THE', 'JUDGE', 'IS', 'RIGHT', 'EVERYBODY', 'SINGS', 'OUT'] +3005-163390-0009-1194: ref=['WE', 'STRUCK', 'THE', 'RAFT', 'AT', 'THE', 'SAME', 'TIME', 'AND', 'IN', 'LESS', 'THAN', 'TWO', 'SECONDS', 'WE', 'WAS', 'GLIDING', 'DOWN', 'STREAM', 'ALL', 'DARK', 'AND', 'STILL', 'AND', 'EDGING', 'TOWARDS', 'THE', 'MIDDLE', 'OF', 'THE', 'RIVER', 'NOBODY', 'SAYING', 'A', 'WORD'] +3005-163390-0009-1194: hyp=['WE', 'STRUCK', 'THE', 'RAFT', 'AT', 'THE', 'SAME', 'TIME', 'AND', 'IN', 'LESS', 'THAN', 'TWO', 'SECONDS', 'WE', 'WAS', 'GLIDING', 'DOWN', 'STREAM', 'ALL', 'DARK', 'AND', 'STILL', 'AND', 'EDGING', 'TOWARDS', 'THE', 'MIDDLE', 'OF', 'THE', 'RIVER', 'NOBODY', 'SAYING', 'A', 'WORD'] +3005-163390-0010-1195: ref=['WE', 'NEVER', 'SHOWED', 'A', 'LIGHT', 'TILL', 'WE', 'WAS', 'ABOUT', 'TEN', 'MILE', 'BELOW', 'THE', 'VILLAGE'] +3005-163390-0010-1195: hyp=['WE', 'NEVER', 'SHOWED', 'A', 'LIGHT', 'TILL', 'WE', 'WAS', 'ABOUT', 'TEN', 'MILE', 'BELOW', 'THE', 'VILLAGE'] +3005-163390-0011-1196: ref=['GREENHORNS', 'FLATHEADS'] +3005-163390-0011-1196: hyp=['GREENHORNS', 'FLAT', 'HEADS'] +3005-163390-0012-1197: ref=['NO', 'I', 'SAYS', 'IT', "DON'T"] +3005-163390-0012-1197: hyp=['NO', 'I', 'SAY', 'IS', 'IT', "DON'T"] +3005-163390-0013-1198: ref=['WELL', 'IT', "DON'T", 'BECAUSE', "IT'S", 'IN', 'THE', 'BREED', 'I', 'RECKON', "THEY'RE", 'ALL', 'ALIKE'] +3005-163390-0013-1198: hyp=['WELL', 'IT', "DON'T", 'BECAUSE', "IT'S", 'IN', 'DE', 'BREED', 'I', 'RECKON', "THEY'RE", 'ALL', 'ALIKE'] +3005-163390-0014-1199: ref=['WELL', "THAT'S", 'WHAT', "I'M", 'A', 'SAYING', 'ALL', 'KINGS', 'IS', 'MOSTLY', 'RAPSCALLIONS', 'AS', 'FUR', 'AS', 'I', 'CAN', 'MAKE', 'OUT', 'IS', 'DAT', 'SO'] +3005-163390-0014-1199: hyp=['WELL', "THAT'S", 'WHAT', 'I', 'MUST', 'SAYING', 'ALL', 'KINGS', 'IS', 'MOSTLY', 'RATCALIONS', 'AS', 'FUR', 'AS', 'I', 'CAN', 'MAKE', 'OUT', 'IS', "DAT'S", 'SO'] +3005-163390-0015-1200: ref=['AND', 'LOOK', 'AT', 'CHARLES', 'SECOND', 'AND', 'LOUIS', 'FOURTEEN', 'AND', 'LOUIS', 'FIFTEEN', 'AND', 'JAMES', 'SECOND', 'AND', 'EDWARD', 'SECOND', 'AND', 'RICHARD', 'THIRD', 'AND', 'FORTY', 'MORE', 'BESIDES', 'ALL', 'THEM', 'SAXON', 'HEPTARCHIES', 'THAT', 'USED', 'TO', 'RIP', 'AROUND', 'SO', 'IN', 'OLD', 'TIMES', 'AND', 'RAISE', 'CAIN'] +3005-163390-0015-1200: hyp=['AND', 'LOOK', 'AT', 'CHARLES', 'SECOND', 'AND', 'LOUIS', 'FOURTEEN', 'AND', 'LOUIS', 'FIFTEEN', 'AND', 'JAMES', 'SECOND', 'AND', 'EDWARD', 'SECOND', 'AND', 'RICHARD', 'AND', 'FORTY', 'MORE', 'BESIDES', 'ALL', 'THEM', 'SAXON', 'HEPTARK', 'IS', 'THAT', 'USED', 'TO', 'RIP', 'AROUND', 'SO', 'WHEN', 'OLD', 'TIMES', 'AND', 'RAISED', 'GAME'] +3005-163390-0016-1201: ref=['MY', 'YOU', 'OUGHT', 'TO', 'SEEN', 'OLD', 'HENRY', 'THE', 'EIGHT', 'WHEN', 'HE', 'WAS', 'IN', 'BLOOM', 'HE', 'WAS', 'A', 'BLOSSOM'] +3005-163390-0016-1201: hyp=['MY', 'YOU', 'OUGHT', 'TO', 'SEE', 'AN', 'OLD', 'HENRY', 'THE', 'EIGHT', 'WHEN', 'HE', 'WAS', 'IN', 'BLOOM', 'HE', 'WAS', 'A', 'BLOSSOM'] +3005-163390-0017-1202: ref=['RING', 'UP', 'FAIR', 'ROSAMUN'] +3005-163390-0017-1202: hyp=['RING', 'UP', 'FAIR', 'ROSAMOND'] +3005-163390-0018-1203: ref=['WELL', 'HENRY', 'HE', 'TAKES', 'A', 'NOTION', 'HE', 'WANTS', 'TO', 'GET', 'UP', 'SOME', 'TROUBLE', 'WITH', 'THIS', 'COUNTRY'] +3005-163390-0018-1203: hyp=['WELL', 'HENRY', 'HE', 'TAKES', 'A', 'NOTION', 'HE', 'WANTS', 'TO', 'GET', 'UP', 'SOME', 'TROUBLE', 'WITH', 'THIS', 'COUNTRY'] +3005-163390-0019-1204: ref=["S'POSE", 'HE', 'OPENED', 'HIS', 'MOUTHWHAT', 'THEN'] +3005-163390-0019-1204: hyp=["S'POSE", 'HE', 'OPENED', 'HIS', 'MOUTH', 'WHAT', 'THEN'] +3005-163390-0020-1205: ref=['ALL', 'I', 'SAY', 'IS', 'KINGS', 'IS', 'KINGS', 'AND', 'YOU', 'GOT', 'TO', 'MAKE', 'ALLOWANCES'] +3005-163390-0020-1205: hyp=['ALL', 'I', 'SAY', 'IS', 'KINGS', 'IS', 'KINGS', 'AN', 'YOU', 'GOT', 'TO', 'MAKE', 'ALLOWANCES'] +3005-163390-0021-1206: ref=['TAKE', 'THEM', 'ALL', 'AROUND', "THEY'RE", 'A', 'MIGHTY', 'ORNERY', 'LOT', "IT'S", 'THE', 'WAY', "THEY'RE", 'RAISED'] +3005-163390-0021-1206: hyp=['TAKE', 'THEM', 'ALL', 'AROUND', "THEY'RE", 'A', 'MIGHTY', 'ORNERY', 'LOT', "IT'S", 'THE', 'WAY', "THEY'RE", 'RAISED'] +3005-163390-0022-1207: ref=['WELL', 'THEY', 'ALL', 'DO', 'JIM'] +3005-163390-0022-1207: hyp=['WELL', 'THEY', 'ALL', 'DO', 'JIM'] +3005-163390-0023-1208: ref=['NOW', 'DE', 'DUKE', "HE'S", 'A', 'TOLERBLE', 'LIKELY', 'MAN', 'IN', 'SOME', 'WAYS'] +3005-163390-0023-1208: hyp=['NOW', 'TO', 'DO', "HE'S", 'A', 'TOLERABLE', 'LIKE', 'THE', 'MAN', 'IN', 'SOME', 'WAYS'] +3005-163390-0024-1209: ref=['THIS', "ONE'S", 'A', 'MIDDLING', 'HARD', 'LOT', 'FOR', 'A', 'DUKE'] +3005-163390-0024-1209: hyp=['THIS', "ONE'S", 'A', 'MIDDLIN', 'HARD', 'LOT', 'FOR', 'A', 'DUPE'] +3005-163390-0025-1210: ref=['WHEN', 'I', 'WAKED', 'UP', 'JUST', 'AT', 'DAYBREAK', 'HE', 'WAS', 'SITTING', 'THERE', 'WITH', 'HIS', 'HEAD', 'DOWN', 'BETWIXT', 'HIS', 'KNEES', 'MOANING', 'AND', 'MOURNING', 'TO', 'HIMSELF'] +3005-163390-0025-1210: hyp=['WHEN', 'I', 'WAKED', 'UP', 'JEST', 'AT', 'DAYBREAK', 'HE', 'WAS', 'SITTING', 'THERE', 'WITH', 'HIS', 'HEAD', 'DOWN', 'BETWIXT', 'HIS', 'KNEES', 'MOANING', 'AND', 'MOURNING', 'TO', 'HIMSELF'] +3005-163390-0026-1211: ref=['IT', "DON'T", 'SEEM', 'NATURAL', 'BUT', 'I', 'RECKON', "IT'S", 'SO'] +3005-163390-0026-1211: hyp=['IT', "DON'T", 'SEEM', 'NATURAL', 'BUT', 'I', 'RECKON', "IT'S", 'SO'] +3005-163390-0027-1212: ref=['HE', 'WAS', 'OFTEN', 'MOANING', 'AND', 'MOURNING', 'THAT', 'WAY', 'NIGHTS', 'WHEN', 'HE', 'JUDGED', 'I', 'WAS', 'ASLEEP', 'AND', 'SAYING', 'PO', 'LITTLE', 'LIZABETH'] +3005-163390-0027-1212: hyp=['HE', 'WAS', 'OFTEN', 'MOANING', 'IN', 'MOURNING', 'THAT', 'WAY', 'NIGHTS', 'WHEN', 'HE', 'JUDGED', 'I', 'WAS', 'ASLEEP', 'AND', 'SAYING', 'PO', 'LITTLE', 'ELIZABETH'] +3005-163390-0028-1213: ref=['DOAN', 'YOU', 'HEAR', 'ME', 'SHET', 'DE', 'DO'] +3005-163390-0028-1213: hyp=["DON'T", 'YOU', 'HEAR', 'ME', 'SHUT', 'DE', 'DO'] +3005-163390-0029-1214: ref=['I', 'LAY', 'I', 'MAKE', 'YOU', 'MINE'] +3005-163390-0029-1214: hyp=['I', 'LAY', 'I', 'MAKE', 'YOU', 'MINE'] +3005-163390-0030-1215: ref=['JIS', 'AS', 'LOUD', 'AS', 'I', 'COULD', 'YELL'] +3005-163390-0030-1215: hyp=['GIT', 'AS', 'LOUD', 'AS', 'I', 'COULD', 'YELL'] +3005-163391-0000-1127: ref=['WHICH', 'WAS', 'SOUND', 'ENOUGH', 'JUDGMENT', 'BUT', 'YOU', 'TAKE', 'THE', 'AVERAGE', 'MAN', 'AND', 'HE', "WOULDN'T", 'WAIT', 'FOR', 'HIM', 'TO', 'HOWL'] +3005-163391-0000-1127: hyp=['WHICH', 'WAS', 'SOUND', 'ENOUGH', 'JUDGMENT', 'BUT', 'YOU', 'TAKE', 'THE', 'AVERAGE', 'MAN', 'AND', 'HE', "WOULDN'T", 'WAIT', 'FOR', 'HIM', 'TO', 'HOWL'] +3005-163391-0001-1128: ref=['THE', "KING'S", 'DUDS', 'WAS', 'ALL', 'BLACK', 'AND', 'HE', 'DID', 'LOOK', 'REAL', 'SWELL', 'AND', 'STARCHY'] +3005-163391-0001-1128: hyp=['THE', "KING'S", 'DERDS', 'WAS', 'ALL', 'BLACK', 'AND', 'HE', 'DID', 'LOOK', 'REAL', 'SWELL', 'AN', 'STARCHY'] +3005-163391-0002-1129: ref=['WHY', 'BEFORE', 'HE', 'LOOKED', 'LIKE', 'THE', 'ORNERIEST', 'OLD', 'RIP', 'THAT', 'EVER', 'WAS', 'BUT', 'NOW', 'WHEN', "HE'D", 'TAKE', 'OFF', 'HIS', 'NEW', 'WHITE', 'BEAVER', 'AND', 'MAKE', 'A', 'BOW', 'AND', 'DO', 'A', 'SMILE', 'HE', 'LOOKED', 'THAT', 'GRAND', 'AND', 'GOOD', 'AND', 'PIOUS', 'THAT', "YOU'D", 'SAY', 'HE', 'HAD', 'WALKED', 'RIGHT', 'OUT', 'OF', 'THE', 'ARK', 'AND', 'MAYBE', 'WAS', 'OLD', 'LEVITICUS', 'HIMSELF'] +3005-163391-0002-1129: hyp=['WHY', 'BEFORE', 'HE', 'LOOKED', 'LIKE', 'THE', 'ORNERIEST', 'OLD', 'RIP', 'THAT', 'EVER', 'WAS', 'BUT', 'NOW', 'WHEN', "HE'D", 'TAKE', 'OFF', 'HIS', 'NEW', 'WHITE', 'BEAVER', 'AND', 'MAKE', 'A', 'BOW', 'AND', 'DO', 'A', 'SMILE', 'HE', 'LOOKED', 'THAT', 'GRAND', 'AND', 'GOOD', 'AND', 'PIOUS', 'THAT', "YOU'D", 'SAY', "HE'D", 'WALKED', 'RIGHT', 'OUT', 'OF', 'THE', 'ARK', 'AND', 'MAYBE', 'WAS', 'OLD', 'LUVIDICUS', 'HIMSELF'] +3005-163391-0003-1130: ref=['JIM', 'CLEANED', 'UP', 'THE', 'CANOE', 'AND', 'I', 'GOT', 'MY', 'PADDLE', 'READY'] +3005-163391-0003-1130: hyp=['JIM', 'CLEANED', 'UP', 'THE', 'CANOE', 'AND', 'I', 'GOT', 'MY', 'PADDLE', 'READY'] +3005-163391-0004-1131: ref=['WHER', 'YOU', 'BOUND', 'FOR', 'YOUNG', 'MAN'] +3005-163391-0004-1131: hyp=['WERE', 'YE', 'BOUND', 'FOR', 'YOUNG', 'MAN'] +3005-163391-0005-1132: ref=['GIT', 'ABOARD', 'SAYS', 'THE', 'KING'] +3005-163391-0005-1132: hyp=['GET', 'ABOARD', 'SAYS', 'THE', 'KING'] +3005-163391-0006-1133: ref=['I', 'DONE', 'SO', 'AND', 'THEN', 'WE', 'ALL', 'THREE', 'STARTED', 'ON', 'AGAIN'] +3005-163391-0006-1133: hyp=['I', 'DONE', 'SO', 'AN', 'THEN', 'WE', 'ALL', 'THREE', 'STARTED', 'ON', 'AGAIN'] +3005-163391-0007-1134: ref=['THE', 'YOUNG', 'CHAP', 'WAS', 'MIGHTY', 'THANKFUL', 'SAID', 'IT', 'WAS', 'TOUGH', 'WORK', 'TOTING', 'HIS', 'BAGGAGE', 'SUCH', 'WEATHER'] +3005-163391-0007-1134: hyp=['THE', 'YOUNG', 'CHAP', 'WAS', 'MIGHTY', 'THANKFUL', 'SAID', 'IT', 'WAS', 'TOUGH', 'WORK', 'TOTING', 'HIS', 'BAGGAGE', 'SUCH', 'WEATHER'] +3005-163391-0008-1135: ref=['HE', 'ASKED', 'THE', 'KING', 'WHERE', 'HE', 'WAS', 'GOING', 'AND', 'THE', 'KING', 'TOLD', 'HIM', "HE'D", 'COME', 'DOWN', 'THE', 'RIVER', 'AND', 'LANDED', 'AT', 'THE', 'OTHER', 'VILLAGE', 'THIS', 'MORNING', 'AND', 'NOW', 'HE', 'WAS', 'GOING', 'UP', 'A', 'FEW', 'MILE', 'TO', 'SEE', 'AN', 'OLD', 'FRIEND', 'ON', 'A', 'FARM', 'UP', 'THERE', 'THE', 'YOUNG', 'FELLOW', 'SAYS'] +3005-163391-0008-1135: hyp=['HE', 'ASKED', 'THE', 'KING', 'WHERE', 'HE', 'WAS', 'GOING', 'AND', 'THE', 'KING', 'TOLD', 'HIM', "HE'D", 'COME', 'DOWN', 'A', 'RIVER', 'AND', 'LANDED', 'AT', 'THE', 'OTHER', 'VILLAGE', 'THIS', 'MORNING', 'AND', 'NOW', 'HE', 'WAS', 'GOING', 'UP', 'A', 'FEW', 'MILE', 'TO', 'SEE', 'AN', 'OLD', 'FRIEND', 'ON', 'A', 'FARM', 'UP', 'THERE', 'THE', 'YOUNG', 'FELLOW', 'SAYS'] +3005-163391-0009-1136: ref=['BUT', 'THEN', 'I', 'SAYS', 'AGAIN', 'NO', 'I', 'RECKON', 'IT', "AIN'T", 'HIM', 'OR', 'ELSE', 'HE', "WOULDN'T", 'BE', 'PADDLING', 'UP', 'THE', 'RIVER', 'YOU', "AIN'T", 'HIM', 'ARE', 'YOU'] +3005-163391-0009-1136: hyp=['BUT', 'THEN', 'I', 'SAYS', 'AGAIN', 'NO', 'I', 'RECKON', 'IT', "AIN'T", 'HIM', 'OR', 'ELSE', 'HE', "WOULDN'T", 'BE', 'PADDLIN', 'UP', 'THE', 'RIVER', 'YOU', "AIN'T", 'HIM', 'ARE', 'YOU'] +3005-163391-0010-1137: ref=['NO', 'MY', "NAME'S", 'BLODGETT', 'ELEXANDER', 'BLODGETT', 'REVEREND', 'ELEXANDER', 'BLODGETT', 'I', "S'POSE", 'I', 'MUST', 'SAY', 'AS', "I'M", 'ONE', 'O', 'THE', "LORD'S", 'POOR', 'SERVANTS'] +3005-163391-0010-1137: hyp=['NO', 'MY', "NAME'S", 'OBLIGE', 'IT', 'ALEXANDER', 'BLODGET', 'REVEREND', 'ALEXANDER', 'BLODGET', 'I', 'SUPPOSE', 'I', 'MUST', 'SAY', 'AS', "I'M", 'ONE', 'OF', 'THE', 'LORDS', 'POOR', 'SERVANTS'] +3005-163391-0011-1138: ref=['YOU', 'SEE', 'HE', 'WAS', 'PRETTY', 'OLD', 'AND', "GEORGE'S", "G'YIRLS", 'WAS', 'TOO', 'YOUNG', 'TO', 'BE', 'MUCH', 'COMPANY', 'FOR', 'HIM', 'EXCEPT', 'MARY', 'JANE', 'THE', 'RED', 'HEADED', 'ONE', 'AND', 'SO', 'HE', 'WAS', 'KINDER', 'LONESOME', 'AFTER', 'GEORGE', 'AND', 'HIS', 'WIFE', 'DIED', 'AND', "DIDN'T", 'SEEM', 'TO', 'CARE', 'MUCH', 'TO', 'LIVE'] +3005-163391-0011-1138: hyp=['YOU', 'SEE', 'HE', 'WAS', 'PRETTY', 'OLD', 'AND', 'GEORGE', 'IS', 'GUY', 'EARLS', 'WAS', 'TOO', 'YOUNG', 'TO', 'BE', 'MUCH', 'COMPANY', 'FOR', 'HIM', 'EXCEPT', 'MARY', 'JANE', 'THE', 'RED', 'HEADED', 'ONE', 'AND', 'SO', 'HE', 'WAS', 'KINDER', 'LONESOME', 'AFTER', 'GEORGE', 'AND', 'HIS', 'WIFE', 'DIED', 'AND', "DIDN'T", 'SEEM', 'TO', 'CARE', 'MUCH', 'TO', 'LIVE'] +3005-163391-0012-1139: ref=['TOO', 'BAD', 'TOO', 'BAD', 'HE', "COULDN'T", 'A', 'LIVED', 'TO', 'SEE', 'HIS', 'BROTHERS', 'POOR', 'SOUL'] +3005-163391-0012-1139: hyp=['TOO', 'BAD', 'TOO', 'BAD', 'HE', "COULDN'T", 'HAVE', 'LIVED', 'TO', 'SEE', 'HIS', "BROTHER'S", 'POOR', 'SOUL'] +3005-163391-0013-1140: ref=["I'M", 'GOING', 'IN', 'A', 'SHIP', 'NEXT', 'WEDNESDAY', 'FOR', 'RYO', 'JANEERO', 'WHERE', 'MY', 'UNCLE', 'LIVES'] +3005-163391-0013-1140: hyp=["I'M", 'GOIN', 'IN', 'A', 'SHIP', 'NEXT', 'WEDNESDAY', 'FERIO', 'GENERO', 'WHERE', 'MY', 'UNCLE', 'IS'] +3005-163391-0014-1141: ref=['BUT', "IT'LL", 'BE', 'LOVELY', 'WISHT', 'I', 'WAS', 'A', 'GOING'] +3005-163391-0014-1141: hyp=['BUT', "IT'LL", 'BE', 'LOVELY', 'WISHED', 'I', 'WAS', 'A', 'GOIN'] +3005-163391-0015-1142: ref=['MARY', "JANE'S", 'NINETEEN', "SUSAN'S", 'FIFTEEN', 'AND', "JOANNA'S", 'ABOUT', "FOURTEENTHAT'S", 'THE', 'ONE', 'THAT', 'GIVES', 'HERSELF', 'TO', 'GOOD', 'WORKS', 'AND', 'HAS', 'A', 'HARE', 'LIP', 'POOR', 'THINGS'] +3005-163391-0015-1142: hyp=['MARY', "JANE'S", 'NINETEEN', "SUSAN'S", 'FIFTEEN', 'AND', "JOANNA'S", 'ABOUT', 'FOURTEEN', "THAT'S", 'THE', 'ONE', 'THAT', 'GIVES', 'HERSELF', 'TO', 'GOOD', 'WORKS', 'AND', 'HAS', 'A', 'HAIR', 'LIP', 'POOR', 'THINGS'] +3005-163391-0016-1143: ref=['WELL', 'THEY', 'COULD', 'BE', 'WORSE', 'OFF'] +3005-163391-0016-1143: hyp=['WELL', 'THEY', 'COULD', 'BE', 'WORSE', 'OFF'] +3005-163391-0017-1144: ref=['OLD', 'PETER', 'HAD', 'FRIENDS', 'AND', 'THEY', "AIN'T", 'GOING', 'TO', 'LET', 'THEM', 'COME', 'TO', 'NO', 'HARM'] +3005-163391-0017-1144: hyp=['O', 'PETER', 'HAD', 'FRIENDS', 'AND', 'THEY', "AIN'T", 'GOING', 'TO', 'LET', 'THEM', 'COME', 'TO', 'NO', 'HARM'] +3005-163391-0018-1145: ref=['BLAMED', 'IF', 'HE', "DIDN'T", 'INQUIRE', 'ABOUT', 'EVERYBODY', 'AND', 'EVERYTHING', 'IN', 'THAT', 'BLESSED', 'TOWN', 'AND', 'ALL', 'ABOUT', 'THE', 'WILKSES', 'AND', 'ABOUT', "PETER'S", 'BUSINESSWHICH', 'WAS', 'A', 'TANNER', 'AND', 'ABOUT', "GEORGE'SWHICH", 'WAS', 'A', 'CARPENTER', 'AND', 'ABOUT', "HARVEY'SWHICH", 'WAS', 'A', 'DISSENTERING', 'MINISTER', 'AND', 'SO', 'ON', 'AND', 'SO', 'ON', 'THEN', 'HE', 'SAYS'] +3005-163391-0018-1145: hyp=['BLAMED', 'IF', 'HE', "DIDN'T", 'ACQUIRE', 'ABOUT', 'EVERYBODY', 'AND', 'EVERYTHING', 'AND', 'THAT', 'BLESSED', 'TOWN', 'AND', 'ALL', 'ABOUT', 'THE', 'WILKES', 'AND', 'ABOUT', "PETER'S", 'BUSINESS', 'WHICH', 'WAS', 'A', 'TANNER', 'AND', 'ABOUT', "GEORGE'S", 'WHICH', 'WAS', 'A', 'CARPENTER', 'AND', 'ABOUT', 'HARVEST', 'WHICH', 'WAS', 'A', 'DISSENTERING', 'MINISTER', 'AND', 'SO', 'ON', 'AND', 'SO', 'ON', 'THEN', 'HE', 'SAYS'] +3005-163391-0019-1146: ref=['WHEN', "THEY'RE", 'DEEP', 'THEY', "WON'T", 'STOP', 'FOR', 'A', 'HAIL'] +3005-163391-0019-1146: hyp=['WHEN', 'HER', 'DEEP', 'THEY', "WON'T", 'STOP', 'FOR', 'A', 'HAIL'] +3005-163391-0020-1147: ref=['WAS', 'PETER', 'WILKS', 'WELL', 'OFF'] +3005-163391-0020-1147: hyp=['WAS', 'PETER', 'WILKES', 'WELL', 'OFF'] +3005-163391-0021-1148: ref=['WHEN', 'WE', 'STRUCK', 'THE', 'BOAT', 'SHE', 'WAS', 'ABOUT', 'DONE', 'LOADING', 'AND', 'PRETTY', 'SOON', 'SHE', 'GOT', 'OFF'] +3005-163391-0021-1148: hyp=['WHEN', 'WASTED', 'UP', 'THE', 'BOAT', 'SHE', 'WAS', 'ABOUT', 'DONE', 'LOADING', 'AND', 'PRETTY', 'SOON', 'SHE', 'GOT', 'OFF'] +3005-163391-0022-1149: ref=['NOW', 'HUSTLE', 'BACK', 'RIGHT', 'OFF', 'AND', 'FETCH', 'THE', 'DUKE', 'UP', 'HERE', 'AND', 'THE', 'NEW', 'CARPET', 'BAGS'] +3005-163391-0022-1149: hyp=['NOW', 'HUSTLE', 'BACK', 'RIGHT', 'OFF', 'AND', 'FETCH', 'THE', 'DUKE', 'UP', 'HERE', 'AND', 'THE', 'NEW', 'CARPET', 'BAGS'] +3005-163391-0023-1150: ref=['SO', 'THEN', 'THEY', 'WAITED', 'FOR', 'A', 'STEAMBOAT'] +3005-163391-0023-1150: hyp=['SO', 'THEN', 'THEY', 'WAITED', 'FOR', 'A', 'STEAMBOAT'] +3005-163391-0024-1151: ref=['BUT', 'THE', 'KING', 'WAS', "CA'M", 'HE', 'SAYS'] +3005-163391-0024-1151: hyp=['THAT', 'THE', 'KING', 'WAS', 'CALM', 'HE', 'SAYS'] +3005-163391-0025-1152: ref=['THEY', 'GIVE', 'A', 'GLANCE', 'AT', 'ONE', 'ANOTHER', 'AND', 'NODDED', 'THEIR', 'HEADS', 'AS', 'MUCH', 'AS', 'TO', 'SAY', 'WHAT', 'D', 'I', 'TELL', 'YOU'] +3005-163391-0025-1152: hyp=['THEY', 'GAVE', 'A', 'GLANCE', 'AT', 'ONE', 'ANOTHER', 'AND', 'NODDED', 'THEIR', 'HEADS', 'AS', 'MUCH', 'AS', 'TO', 'SAY', 'WOULD', 'THEY', 'TELL', 'YOU'] +3005-163391-0026-1153: ref=['THEN', 'ONE', 'OF', 'THEM', 'SAYS', 'KIND', 'OF', 'SOFT', 'AND', 'GENTLE'] +3005-163391-0026-1153: hyp=['THEN', 'ONE', 'OF', 'THEM', 'SAYS', 'KIND', 'OF', 'SOFT', 'AND', 'GENTLE'] +3005-163399-0000-1154: ref=['PHELPS', 'WAS', 'ONE', 'OF', 'THESE', 'LITTLE', 'ONE', 'HORSE', 'COTTON', 'PLANTATIONS', 'AND', 'THEY', 'ALL', 'LOOK', 'ALIKE'] +3005-163399-0000-1154: hyp=['PHELPS', 'IS', 'ONE', 'OF', 'THESE', 'LITTLE', 'ONE', 'HORSE', 'COTTON', 'PLANTATIONS', 'AND', 'THEY', 'ALL', 'LOOK', 'ALIKE'] +3005-163399-0001-1155: ref=['I', 'WENT', 'AROUND', 'AND', 'CLUMB', 'OVER', 'THE', 'BACK', 'STILE', 'BY', 'THE', 'ASH', 'HOPPER', 'AND', 'STARTED', 'FOR', 'THE', 'KITCHEN'] +3005-163399-0001-1155: hyp=['I', 'WENT', 'AROUND', 'AND', 'CLIMB', 'OVER', 'THE', 'BACK', 'STILE', 'BY', 'THE', 'ASH', 'HOPPER', 'AND', 'STARTED', 'FOR', 'THE', 'KITCHEN'] +3005-163399-0002-1156: ref=['I', 'OUT', 'WITH', 'A', "YES'M", 'BEFORE', 'I', 'THOUGHT'] +3005-163399-0002-1156: hyp=['AH', 'OUT', 'WITH', 'A', 'YES', 'AND', 'FORE', 'I', 'THOUGHT'] +3005-163399-0003-1157: ref=['SO', 'THEN', 'SHE', 'STARTED', 'FOR', 'THE', 'HOUSE', 'LEADING', 'ME', 'BY', 'THE', 'HAND', 'AND', 'THE', 'CHILDREN', 'TAGGING', 'AFTER'] +3005-163399-0003-1157: hyp=['SO', 'THEN', 'SHE', 'STARTED', 'FOR', 'THE', 'HOUSE', 'LEADING', 'ME', 'BY', 'THE', 'HAND', 'AND', 'THE', 'CHILDREN', 'TAGGING', 'AFTER'] +3005-163399-0004-1158: ref=['WHEN', 'WE', 'GOT', 'THERE', 'SHE', 'SET', 'ME', 'DOWN', 'IN', 'A', 'SPLIT', 'BOTTOMED', 'CHAIR', 'AND', 'SET', 'HERSELF', 'DOWN', 'ON', 'A', 'LITTLE', 'LOW', 'STOOL', 'IN', 'FRONT', 'OF', 'ME', 'HOLDING', 'BOTH', 'OF', 'MY', 'HANDS', 'AND', 'SAYS'] +3005-163399-0004-1158: hyp=['WHEN', 'WE', 'GOT', 'THERE', 'SHE', 'SET', 'ME', 'DOWN', 'IN', 'A', 'SPLIT', 'BOTTOM', 'CHAIR', 'AND', 'SET', 'HERSELF', 'DOWN', 'ON', 'A', 'LITTLE', 'LOW', 'STOOL', 'IN', 'FRONT', 'OF', 'ME', 'HOLDING', 'BOTH', 'OF', 'MY', 'HANDS', 'AND', 'SAYS'] +3005-163399-0005-1159: ref=['WELL', "IT'S", 'LUCKY', 'BECAUSE', 'SOMETIMES', 'PEOPLE', 'DO', 'GET', 'HURT'] +3005-163399-0005-1159: hyp=['WELL', "IT'S", 'LUCKY', 'BECAUSE', 'SOMETIMES', 'PEOPLE', 'DO', 'GET', 'HURT'] +3005-163399-0006-1160: ref=['AND', 'I', 'THINK', 'HE', 'DIED', 'AFTERWARDS', 'HE', 'WAS', 'A', 'BAPTIST'] +3005-163399-0006-1160: hyp=['AND', 'I', 'THINK', 'HE', 'DIED', 'AFTERWARDS', 'HE', 'WAS', 'A', 'BAPTIST'] +3005-163399-0007-1161: ref=['YES', 'IT', 'WAS', 'MORTIFICATIONTHAT', 'WAS', 'IT'] +3005-163399-0007-1161: hyp=['YES', 'IT', 'WAS', 'MORTIFICATION', 'THAT', 'WAS', 'IT'] +3005-163399-0008-1162: ref=['YOUR', "UNCLE'S", 'BEEN', 'UP', 'TO', 'THE', 'TOWN', 'EVERY', 'DAY', 'TO', 'FETCH', 'YOU'] +3005-163399-0008-1162: hyp=['YOUR', "UNCLE'S", 'BEEN', 'UP', 'TO', 'THE', 'TOWN', 'EVERY', 'DAY', 'TO', 'FETCH'] +3005-163399-0009-1163: ref=['YOU', 'MUST', 'A', 'MET', 'HIM', 'ON', 'THE', 'ROAD', "DIDN'T", 'YOU', 'OLDISH', 'MAN', 'WITH', 'A'] +3005-163399-0009-1163: hyp=['YOU', 'MUST', 'AMERD', 'HIM', 'ON', 'THE', 'ROAD', "DIDN'T", 'YOU', 'OLDISH', 'MAN', 'WITH', 'A'] +3005-163399-0010-1164: ref=['WHY', 'CHILD', 'IT', 'LL', 'BE', 'STOLE'] +3005-163399-0010-1164: hyp=['WHY', 'CHILD', "IT'LL", 'BE', 'STOLE'] +3005-163399-0011-1165: ref=['IT', 'WAS', 'KINDER', 'THIN', 'ICE', 'BUT', 'I', 'SAYS'] +3005-163399-0011-1165: hyp=['IT', 'WAS', 'KIND', 'OR', 'THIN', 'EYES', 'BUT', 'I', 'SAYS'] +3005-163399-0012-1166: ref=['I', 'HAD', 'MY', 'MIND', 'ON', 'THE', 'CHILDREN', 'ALL', 'THE', 'TIME', 'I', 'WANTED', 'TO', 'GET', 'THEM', 'OUT', 'TO', 'ONE', 'SIDE', 'AND', 'PUMP', 'THEM', 'A', 'LITTLE', 'AND', 'FIND', 'OUT', 'WHO', 'I', 'WAS'] +3005-163399-0012-1166: hyp=['I', 'HAD', 'MY', 'MIND', 'ON', 'THE', 'CHILDREN', 'ALL', 'THE', 'TIME', 'I', 'WANTED', 'TO', 'GET', 'THEM', 'OUT', 'TO', 'ONE', 'SIDE', 'AND', 'PUMPED', 'THEM', 'A', 'LITTLE', 'AND', 'FIND', 'OUT', 'WHO', 'I', 'WAS'] +3005-163399-0013-1167: ref=['PRETTY', 'SOON', 'SHE', 'MADE', 'THE', 'COLD', 'CHILLS', 'STREAK', 'ALL', 'DOWN', 'MY', 'BACK', 'BECAUSE', 'SHE', 'SAYS'] +3005-163399-0013-1167: hyp=['PRETTY', 'SOON', 'SHE', 'MADE', 'THE', 'COLD', 'CHILL', 'STREAK', 'ALL', 'DOWN', 'MY', 'BACK', 'BECAUSE', 'SHE', 'SAYS'] +3005-163399-0014-1168: ref=['I', 'SEE', 'IT', "WARN'T", 'A', 'BIT', 'OF', 'USE', 'TO', 'TRY', 'TO', 'GO', 'AHEAD', "I'D", 'GOT', 'TO', 'THROW', 'UP', 'MY', 'HAND'] +3005-163399-0014-1168: hyp=['I', 'SEE', 'IT', "WARN'T", 'A', 'BIT', 'OF', 'USE', 'TO', 'TRY', 'TO', 'GO', 'AHEAD', "I'D", 'GOT', 'TO', 'THROW', 'UP', 'MY', 'HAND'] +3005-163399-0015-1169: ref=['SO', 'I', 'SAYS', 'TO', 'MYSELF', "HERE'S", 'ANOTHER', 'PLACE', 'WHERE', 'I', 'GOT', 'TO', 'RESK', 'THE', 'TRUTH'] +3005-163399-0015-1169: hyp=['SO', 'I', 'SAYS', 'TO', 'MYSELF', 'HERE', 'IS', 'ANOTHER', 'PLACE', 'WHERE', 'I', 'GOT', 'TO', 'REST', 'THE', 'TRUTH'] +3005-163399-0016-1170: ref=['I', 'OPENED', 'MY', 'MOUTH', 'TO', 'BEGIN', 'BUT', 'SHE', 'GRABBED', 'ME', 'AND', 'HUSTLED', 'ME', 'IN', 'BEHIND', 'THE', 'BED', 'AND', 'SAYS', 'HERE', 'HE', 'COMES'] +3005-163399-0016-1170: hyp=['I', 'OPENED', 'MY', 'MOUTH', 'TO', 'BEGIN', 'BUT', 'SHE', 'GRABBED', 'ME', 'AND', 'HUSTLED', 'ME', 'IN', 'BEHIND', 'THE', 'BED', 'AND', 'SAYS', 'HERE', 'HE', 'COMES'] +3005-163399-0017-1171: ref=['CHILDREN', "DON'T", 'YOU', 'SAY', 'A', 'WORD'] +3005-163399-0017-1171: hyp=['CHILDREN', "DON'T", 'YOU', 'SAY', 'A', 'WORD'] +3005-163399-0018-1172: ref=['I', 'SEE', 'I', 'WAS', 'IN', 'A', 'FIX', 'NOW'] +3005-163399-0018-1172: hyp=['I', 'SEE', 'I', 'WAS', 'IN', 'A', 'FIX', 'NOW'] +3005-163399-0019-1173: ref=['MISSUS', 'PHELPS', 'SHE', 'JUMPS', 'FOR', 'HIM', 'AND', 'SAYS'] +3005-163399-0019-1173: hyp=['MISSUS', 'PHELPS', 'SHE', 'JUMPED', 'FOR', 'HIM', 'AND', 'SAYS'] +3005-163399-0020-1174: ref=['HAS', 'HE', 'COME', 'NO', 'SAYS', 'HER', 'HUSBAND'] +3005-163399-0020-1174: hyp=['HAS', 'HE', 'COME', 'NO', 'SAYS', 'HER', 'HUSBAND'] +3005-163399-0021-1175: ref=['I', "CAN'T", 'IMAGINE', 'SAYS', 'THE', 'OLD', 'GENTLEMAN', 'AND', 'I', 'MUST', 'SAY', 'IT', 'MAKES', 'ME', 'DREADFUL', 'UNEASY'] +3005-163399-0021-1175: hyp=['I', "CAN'T", 'IMAGINE', 'SAYS', 'THE', 'OLD', 'GENTLEMAN', 'AND', 'I', 'MUST', 'SAY', 'IT', 'MAKES', 'ME', 'DREADFUL', 'UNEASY'] +3005-163399-0022-1176: ref=['UNEASY', 'SHE', 'SAYS', "I'M", 'READY', 'TO', 'GO', 'DISTRACTED'] +3005-163399-0022-1176: hyp=['UNEASY', 'SHE', 'SAYS', "I'M", 'READY', 'TO', 'GO', 'DISTRACTED'] +3005-163399-0023-1177: ref=['HE', 'MUST', 'A', 'COME', 'AND', "YOU'VE", 'MISSED', 'HIM', 'ALONG', 'THE', 'ROAD'] +3005-163399-0023-1177: hyp=['HE', 'MUST', 'HAVE', 'COME', 'AND', "YOU'VE", 'MISSED', 'HIM', 'ALONG', 'THE', 'ROAD'] +3005-163399-0024-1178: ref=['OH', "DON'T", 'DISTRESS', 'ME', 'ANY', "MORE'N", "I'M", 'ALREADY', 'DISTRESSED'] +3005-163399-0024-1178: hyp=['OH', "DON'T", 'DISTRESS', 'ME', 'ANY', "MORE'N", "I'M", 'ALREADY', 'DISTRESSED'] +3005-163399-0025-1179: ref=['WHY', 'SILAS', 'LOOK', 'YONDER', 'UP', 'THE', 'ROAD', "AIN'T", 'THAT', 'SOMEBODY', 'COMING'] +3005-163399-0025-1179: hyp=['WHY', 'SILAS', 'LOOK', 'YONDER', 'UP', 'THE', 'ROAD', "HAIN'T", 'THAT', 'SOMEBODY', 'COMIN'] +3005-163399-0026-1180: ref=['THE', 'OLD', 'GENTLEMAN', 'STARED', 'AND', 'SAYS'] +3005-163399-0026-1180: hyp=['THE', 'OLD', 'GENTLEMAN', 'STARED', 'AND', 'SAYS'] +3005-163399-0027-1181: ref=['I', "HAIN'T", 'NO', 'IDEA', 'WHO', 'IS', 'IT'] +3005-163399-0027-1181: hyp=['I', "HAIN'T", 'NO', 'IDEA', 'WHO', 'IS', 'IT'] +3005-163399-0028-1182: ref=["IT'S", 'TOM', 'SAWYER'] +3005-163399-0028-1182: hyp=['IS', 'TOM', 'SAWYER'] +3005-163399-0029-1183: ref=['BEING', 'TOM', 'SAWYER', 'WAS', 'EASY', 'AND', 'COMFORTABLE', 'AND', 'IT', 'STAYED', 'EASY', 'AND', 'COMFORTABLE', 'TILL', 'BY', 'AND', 'BY', 'I', 'HEAR', 'A', 'STEAMBOAT', 'COUGHING', 'ALONG', 'DOWN', 'THE', 'RIVER'] +3005-163399-0029-1183: hyp=['BEING', 'TOM', 'SAWYER', 'WAS', 'EASY', 'AND', 'COMFORTABLE', 'AND', 'ITS', 'STATE', 'EASY', 'AND', 'COMFORTABLE', 'TILL', 'BY', 'AND', 'BY', 'I', 'HEAR', 'A', 'STEAMBOAT', 'COUGHING', 'ALONG', 'DOWN', 'THE', 'RIVER'] +3005-163399-0030-1184: ref=['THEN', 'I', 'SAYS', 'TO', 'MYSELF', "S'POSE", 'TOM', 'SAWYER', 'COMES', 'DOWN', 'ON', 'THAT', 'BOAT'] +3005-163399-0030-1184: hyp=['THEN', 'I', 'SAYS', 'TO', 'MYSELF', "S'POSE", 'TOM', 'SAWYER', 'COMES', 'DOWN', 'ON', 'MY', 'BOAT'] +3080-5032-0000-312: ref=['BUT', 'I', 'AM', 'HUGELY', 'PLEASED', 'THAT', 'YOU', 'HAVE', 'SEEN', 'MY', 'LADY'] +3080-5032-0000-312: hyp=['BUT', 'I', 'AM', 'HUGELY', 'PLEASED', 'THAT', 'YOU', 'HAVE', 'SEEN', 'MY', 'LADY'] +3080-5032-0001-313: ref=['I', 'KNEW', 'YOU', 'COULD', 'NOT', 'CHOOSE', 'BUT', 'LIKE', 'HER', 'BUT', 'YET', 'LET', 'ME', 'TELL', 'YOU', 'YOU', 'HAVE', 'SEEN', 'BUT', 'THE', 'WORST', 'OF', 'HER'] +3080-5032-0001-313: hyp=['I', 'KNEW', 'YOU', 'COULD', 'NOT', 'CHOOSE', 'BUT', 'LIKE', 'HER', 'BUT', 'YET', 'LET', 'ME', 'TELL', 'YOU', 'YOU', 'HAVE', 'SEEN', 'BUT', 'THE', 'WORST', 'OF', 'HER'] +3080-5032-0002-314: ref=['HER', 'CONVERSATION', 'HAS', 'MORE', 'CHARMS', 'THAN', 'CAN', 'BE', 'IN', 'MERE', 'BEAUTY', 'AND', 'HER', 'HUMOUR', 'AND', 'DISPOSITION', 'WOULD', 'MAKE', 'A', 'DEFORMED', 'PERSON', 'APPEAR', 'LOVELY'] +3080-5032-0002-314: hyp=['HER', 'CONVERSATION', 'HAS', 'MORE', 'CHARMS', 'THAN', 'CAN', 'BE', 'IN', 'MERE', 'BEAUTY', 'AND', 'A', 'HUMOUR', 'AND', 'DISPOSITION', 'WOULD', 'MAKE', 'A', 'DEFORMED', 'PERSON', 'APPEAR', 'LOVELY'] +3080-5032-0003-315: ref=['WHY', 'DID', 'YOU', 'NOT', 'SEND', 'ME', 'THAT', 'NEWS', 'AND', 'A', 'GARLAND'] +3080-5032-0003-315: hyp=['WHY', 'DID', 'YOU', 'NOT', 'SEND', 'ME', 'THAT', 'NEWS', 'AND', 'A', 'GARLAND'] +3080-5032-0004-316: ref=['WELL', 'THE', 'BEST', "ON'T", 'IS', 'I', 'HAVE', 'A', 'SQUIRE', 'NOW', 'THAT', 'IS', 'AS', 'GOOD', 'AS', 'A', 'KNIGHT'] +3080-5032-0004-316: hyp=['WHY', 'THE', 'BEST', 'ON', 'IT', 'IS', 'THAT', 'I', 'HAVE', 'A', 'SQUIRE', 'NOW', 'THAT', 'IS', 'AS', 'GOOD', 'AS', 'A', 'KNIGHT'] +3080-5032-0005-317: ref=['IN', 'EARNEST', 'WE', 'HAVE', 'HAD', 'SUCH', 'A', 'SKIRMISH', 'AND', 'UPON', 'SO', 'FOOLISH', 'AN', 'OCCASION', 'AS', 'I', 'CANNOT', 'TELL', 'WHICH', 'IS', 'STRANGEST'] +3080-5032-0005-317: hyp=['IN', 'EARNEST', 'WE', 'HAVE', 'HAD', 'SUCH', 'A', 'SKIRMISH', 'IN', 'A', 'POINT', 'SO', 'FOOLISH', 'AN', 'OCCASION', 'AS', 'I', 'CANNOT', 'TELL', 'WHICH', 'IS', 'STRANGERS'] +3080-5032-0006-318: ref=['ALL', 'THE', 'PEOPLE', 'THAT', 'I', 'HAD', 'EVER', 'IN', 'MY', 'LIFE', 'REFUSED', 'WERE', 'BROUGHT', 'AGAIN', 'UPON', 'THE', 'STAGE', 'LIKE', 'RICHARD', 'THE', 'THREE', 'S', 'GHOSTS', 'TO', 'REPROACH', 'ME', 'WITHAL', 'AND', 'ALL', 'THE', 'KINDNESS', 'HIS', 'DISCOVERIES', 'COULD', 'MAKE', 'I', 'HAD', 'FOR', 'YOU', 'WAS', 'LAID', 'TO', 'MY', 'CHARGE'] +3080-5032-0006-318: hyp=['ALL', 'THE', 'PEOPLE', 'THAT', 'I', 'HAD', 'EVER', 'IN', 'MY', 'LIFE', 'REFUSED', 'WERE', 'BROUGHT', 'AGAIN', 'UPON', 'THE', 'STAGE', 'LIKE', 'RICHARD', 'THE', 'THIRD', 'GHOSTS', 'TO', 'REPROACH', 'ME', 'WITHAL', 'IN', 'ALL', 'THE', 'KINDNESS', 'HIS', 'DISCOVERIES', 'COULD', 'MAKE', 'I', 'HAD', 'FOR', 'YOU', 'WAS', 'LATE', 'TO', 'MY', 'CHARGE'] +3080-5032-0007-319: ref=['MY', 'BEST', 'QUALITIES', 'IF', 'I', 'HAVE', 'ANY', 'THAT', 'ARE', 'GOOD', 'SERVED', 'BUT', 'FOR', 'AGGRAVATIONS', 'OF', 'MY', 'FAULT', 'AND', 'I', 'WAS', 'ALLOWED', 'TO', 'HAVE', 'WIT', 'AND', 'UNDERSTANDING', 'AND', 'DISCRETION', 'IN', 'OTHER', 'THINGS', 'THAT', 'IT', 'MIGHT', 'APPEAR', 'I', 'HAD', 'NONE', 'IN', 'THIS'] +3080-5032-0007-319: hyp=['MY', 'BEST', 'QUALITIES', 'IF', 'I', 'HAVE', 'ANY', 'THAT', 'ARE', 'GOOD', 'SERVED', 'BUT', 'FOR', 'AGGRAVATIONS', 'OF', 'MY', 'FAULT', 'AND', 'I', 'WAS', 'ALLOWED', 'TO', 'HAVE', 'WIT', 'AND', 'UNDERSTANDING', 'AND', 'DISCRETION', 'IN', 'OTHER', 'THINGS', 'THAT', 'IT', 'MIGHT', 'APPEAR', 'I', 'HAD', 'NONE', 'IN', 'THIS'] +3080-5032-0008-320: ref=['TIS', 'A', 'STRANGE', 'CHANGE', 'AND', 'I', 'AM', 'VERY', 'SORRY', 'FOR', 'IT', 'BUT', "I'LL", 'SWEAR', 'I', 'KNOW', 'NOT', 'HOW', 'TO', 'HELP', 'IT'] +3080-5032-0008-320: hyp=['TIS', 'A', 'STRANGE', 'CHANGE', 'AND', 'I', 'AM', 'VERY', 'SORRY', 'FOR', 'IT', 'BUT', "I'LL", 'SWEAR', 'I', 'KNOW', 'NOT', 'HOW', 'TO', 'HELP', 'IT'] +3080-5032-0009-321: ref=['MISTER', 'FISH', 'IS', 'THE', 'SQUIRE', 'OF', 'DAMES', 'AND', 'HAS', 'SO', 'MANY', 'MISTRESSES', 'THAT', 'ANYBODY', 'MAY', 'PRETEND', 'A', 'SHARE', 'IN', 'HIM', 'AND', 'BE', 'BELIEVED', 'BUT', 'THOUGH', 'I', 'HAVE', 'THE', 'HONOUR', 'TO', 'BE', 'HIS', 'NEAR', 'NEIGHBOUR', 'TO', 'SPEAK', 'FREELY', 'I', 'CANNOT', 'BRAG', 'MUCH', 'THAT', 'HE', 'MAKES', 'ANY', 'COURT', 'TO', 'ME', 'AND', 'I', 'KNOW', 'NO', 'YOUNG', 'WOMAN', 'IN', 'THE', 'COUNTRY', 'THAT', 'HE', 'DOES', 'NOT', 'VISIT', 'OFTEN'] +3080-5032-0009-321: hyp=['MISTER', 'FISH', 'IS', 'A', 'SQUIRE', 'OF', 'DAMES', 'AND', 'HAS', 'SO', 'MANY', 'MISTRESSES', 'THAN', 'ANYBODY', 'MAY', 'PRETEND', 'TO', 'SHARE', 'IN', 'HIM', 'AND', 'BE', 'BELIEVED', 'BUT', 'THOUGH', 'I', 'HAVE', 'THE', 'HONOR', 'TO', 'BE', 'HIS', 'NEAR', 'NEIGHBOUR', 'TO', 'SPEAK', 'FREELY', 'I', 'CANNOT', 'BRAG', 'MUCH', 'THAT', 'HE', 'MAKES', 'ANY', 'COURT', 'TO', 'ME', 'AND', 'I', 'KNOW', 'NO', 'YOUNG', 'WOMAN', 'IN', 'THE', 'COUNTRY', 'THAT', 'HE', 'DOES', 'NOT', 'VISIT', 'OFTEN'] +3080-5032-0010-322: ref=['I', 'THINK', 'MY', 'YOUNGEST', 'BROTHER', 'COMES', 'DOWN', 'WITH', 'HIM'] +3080-5032-0010-322: hyp=['I', 'THINK', 'MY', 'YOUNGEST', 'BROTHER', 'COMES', 'DOWN', 'WITH', 'HIM'] +3080-5032-0011-323: ref=['I', 'CAN', 'NO', 'SOONER', 'GIVE', 'YOU', 'SOME', 'LITTLE', 'HINTS', 'WHEREABOUTS', 'THEY', 'LIVE', 'BUT', 'YOU', 'KNOW', 'THEM', 'PRESENTLY', 'AND', 'I', 'MEANT', 'YOU', 'SHOULD', 'BE', 'BEHOLDING', 'TO', 'ME', 'FOR', 'YOUR', 'ACQUAINTANCE'] +3080-5032-0011-323: hyp=['I', 'CAN', 'NO', 'SOONER', 'GIVE', 'YOU', 'SOME', 'LITTLE', 'HINTS', 'WHEREABOUT', 'THEY', 'LIVE', 'BUT', 'YOU', 'KNOW', 'THEM', 'PRESENTLY', 'AND', 'I', 'MEANT', 'YOU', 'SHOULD', 'BE', 'BEHOLDING', 'TO', 'ME', 'FOR', 'YOUR', 'ACQUAINTANCE'] +3080-5032-0012-324: ref=['BUT', 'IT', 'SEEMS', 'THIS', 'GENTLEMAN', 'IS', 'NOT', 'SO', 'EASY', 'ACCESS', 'BUT', 'YOU', 'MAY', 'ACKNOWLEDGE', 'SOMETHING', 'DUE', 'TO', 'ME', 'IF', 'I', 'INCLINE', 'HIM', 'TO', 'LOOK', 'GRACIOUSLY', 'UPON', 'YOU', 'AND', 'THEREFORE', 'THERE', 'IS', 'NOT', 'MUCH', 'HARM', 'DONE'] +3080-5032-0012-324: hyp=['BUT', 'IT', 'SEEMS', 'THIS', 'GENTLEMAN', 'IS', 'NOT', 'SO', 'EASY', 'ACCESS', 'BUT', 'YOU', 'MAY', 'ACKNOWLEDGE', 'SOMETHING', 'DUE', 'TO', 'ME', 'IF', 'I', 'INCLINE', 'HIM', 'TO', 'LOOK', 'GRACIOUSLY', 'UPON', 'YOU', 'AND', 'THEREFORE', 'THERE', 'IS', 'NOT', 'MUCH', 'HARM', 'DONE'] +3080-5032-0013-325: ref=['I', 'HAVE', 'MISSED', 'FOUR', 'FITS', 'AND', 'HAD', 'BUT', 'FIVE', 'AND', 'HAVE', 'RECOVERED', 'SO', 'MUCH', 'STRENGTH', 'AS', 'MADE', 'ME', 'VENTURE', 'TO', 'MEET', 'YOUR', 'LETTER', 'ON', 'WEDNESDAY', 'A', 'MILE', 'FROM', 'HOME'] +3080-5032-0013-325: hyp=['I', 'HAVE', 'MISSED', 'FOUR', 'FITS', 'AND', 'HAVE', 'HAD', 'BUT', 'FIVE', 'AND', 'HAVE', 'RECOVERED', 'SO', 'MUCH', 'STRENGTH', 'AS', 'MADE', 'ME', 'VENTURE', 'TO', 'MEET', 'YOUR', 'LETTER', 'ON', 'WEDNESDAY', 'A', 'MILE', 'FROM', 'HOME'] +3080-5032-0014-326: ref=['BUT', 'BESIDES', 'I', 'CAN', 'GIVE', 'YOU', 'OTHERS'] +3080-5032-0014-326: hyp=['BUT', 'BESIDES', 'I', 'CAN', 'GIVE', 'YOU', 'OTHERS'] +3080-5032-0015-327: ref=['I', 'AM', 'HERE', 'MUCH', 'MORE', 'OUT', 'OF', "PEOPLE'S", 'WAY', 'THAN', 'IN', 'TOWN', 'WHERE', 'MY', 'AUNT', 'AND', 'SUCH', 'AS', 'PRETEND', 'AN', 'INTEREST', 'IN', 'ME', 'AND', 'A', 'POWER', 'OVER', 'ME', 'DO', 'SO', 'PERSECUTE', 'ME', 'WITH', 'THEIR', 'GOOD', 'NATURE', 'AND', 'TAKE', 'IT', 'SO', 'ILL', 'THAT', 'THEY', 'ARE', 'NOT', 'ACCEPTED', 'AS', 'I', 'WOULD', 'LIVE', 'IN', 'A', 'HOLLOW', 'TREE', 'TO', 'AVOID', 'THEM'] +3080-5032-0015-327: hyp=['I', 'AM', 'HERE', 'MUCH', 'MORE', 'OUT', 'OF', "PEOPLE'S", 'WAY', 'THAN', 'IN', 'TOWN', 'WHERE', 'MY', 'AUNTS', 'IN', 'SUCH', 'HAS', 'PRETEND', 'AN', 'INTEREST', 'IN', 'ME', 'IN', 'A', 'POWER', 'OVER', 'ME', 'DO', 'SO', 'PERSECUTE', 'ME', 'WITH', 'DEAR', 'GOOD', 'NATURE', "YOU'LL", 'TAKE', 'IT', 'SO', 'ILL', 'THAT', 'THEY', 'ARE', 'NOT', 'ACCEPTED', 'AS', 'I', 'WOULD', 'LIVE', 'IN', 'A', 'HOLLOW', 'TREE', 'TO', 'AVOID', 'THEM'] +3080-5032-0016-328: ref=['YOU', 'WILL', 'THINK', 'HIM', 'ALTERED', 'AND', 'IF', 'IT', 'BE', 'POSSIBLE', 'MORE', 'MELANCHOLY', 'THAN', 'HE', 'WAS'] +3080-5032-0016-328: hyp=['YOU', 'WILL', 'THINK', 'HIM', 'ALTERED', 'AND', 'IF', 'IT', 'BE', 'POSSIBLE', 'MORE', 'MELANCHOLY', 'THAN', 'HE', 'WAS'] +3080-5032-0017-329: ref=['IF', 'MARRIAGE', 'AGREES', 'NO', 'BETTER', 'WITH', 'OTHER', 'PEOPLE', 'THAN', 'IT', 'DOES', 'WITH', 'HIM', 'I', 'SHALL', 'PRAY', 'THAT', 'ALL', 'MY', 'FRIENDS', 'MAY', 'SCAPE', 'IT'] +3080-5032-0017-329: hyp=['IF', 'MARRIAGE', 'AGREES', 'NO', 'BETTER', 'WITH', 'OTHER', 'PEOPLE', 'THAN', 'IT', 'DOES', 'WITH', 'HIM', 'I', 'SHALL', 'PRAY', 'THAT', 'ALL', 'MY', 'FRIENDS', 'MAY', 'ESCAPE', 'IT'] +3080-5032-0018-330: ref=['WELL', 'IN', 'EARNEST', 'IF', 'I', 'WERE', 'A', 'PRINCE', 'THAT', 'LADY', 'SHOULD', 'BE', 'MY', 'MISTRESS', 'BUT', 'I', 'CAN', 'GIVE', 'NO', 'RULE', 'TO', 'ANY', 'ONE', 'ELSE', 'AND', 'PERHAPS', 'THOSE', 'THAT', 'ARE', 'IN', 'NO', 'DANGER', 'OF', 'LOSING', 'THEIR', 'HEARTS', 'TO', 'HER', 'MAY', 'BE', 'INFINITELY', 'TAKEN', 'WITH', 'ONE', 'I', 'SHOULD', 'NOT', 'VALUE', 'AT', 'ALL', 'FOR', 'SO', 'SAYS', 'THE', 'JUSTINIAN', 'WISE', 'PROVIDENCE', 'HAS', 'ORDAINED', 'IT', 'THAT', 'BY', 'THEIR', 'DIFFERENT', 'HUMOURS', 'EVERYBODY', 'MIGHT', 'FIND', 'SOMETHING', 'TO', 'PLEASE', 'THEMSELVES', 'WITHAL', 'WITHOUT', 'ENVYING', 'THEIR', 'NEIGHBOURS'] +3080-5032-0018-330: hyp=['WELL', 'IN', 'EARNEST', 'IF', 'I', 'WERE', 'A', 'PRINCE', 'THAT', 'LADY', 'SHOULD', 'BE', 'MY', 'MISTRESS', 'BUT', 'I', 'CAN', 'GIVE', 'NO', 'RULE', 'TO', 'ANY', 'ONE', 'ELSE', 'AND', 'PERHAPS', 'THOSE', 'THAT', 'ARE', 'IN', 'NO', 'DANGER', 'OF', 'LOSING', 'THEIR', 'HEARTS', 'TO', 'HER', 'MAY', 'BE', 'INFINITELY', 'TAKEN', 'WITH', 'ONE', 'I', 'SHOULD', 'NOT', 'VALUE', 'IT', 'ALL', 'FOR', 'SO', 'SAYS', 'THE', 'JUSTINIAN', 'WISE', 'PROVIDENCE', 'HAS', 'ORDAINED', 'IT', 'THAT', 'BY', 'THEIR', 'DIFFERENT', 'HUMANS', 'EVERYBODY', 'MIGHT', 'FIND', 'SOMETHING', 'TO', 'PLEASE', 'THEMSELVES', 'WITHAL', 'WITHOUT', 'ENVYING', 'THEIR', 'NEIGHBORS'] +3080-5032-0019-331: ref=['THE', 'MATTER', 'IS', 'NOT', 'GREAT', 'FOR', 'I', 'CONFESS', 'I', 'DO', 'NATURALLY', 'HATE', 'THE', 'NOISE', 'AND', 'TALK', 'OF', 'THE', 'WORLD', 'AND', 'SHOULD', 'BE', 'BEST', 'PLEASED', 'NEVER', 'TO', 'BE', 'KNOWN', "IN'T", 'UPON', 'ANY', 'OCCASION', 'WHATSOEVER', 'YET', 'SINCE', 'IT', 'CAN', 'NEVER', 'BE', 'WHOLLY', 'AVOIDED', 'ONE', 'MUST', 'SATISFY', 'ONESELF', 'BY', 'DOING', 'NOTHING', 'THAT', 'ONE', 'NEED', 'CARE', 'WHO', 'KNOWS'] +3080-5032-0019-331: hyp=['THE', 'MATTER', 'IS', 'NOT', 'GREAT', 'FOR', 'I', 'CONFESS', 'I', 'DO', 'NATURALLY', 'HATE', 'THE', 'NOISE', 'AND', 'TALK', 'OF', 'THE', 'WORLD', 'AND', 'SHOULD', 'BE', 'BEST', 'PLEASED', 'NEVER', 'TO', 'BE', 'KNOWN', 'IN', 'UPON', 'ANY', 'OCCASION', 'WHATSOEVER', 'YET', 'SINCE', 'IT', 'CAN', 'NEVER', 'BE', 'WHOLLY', 'AVOIDED', 'ONE', 'MUST', 'SATISFY', 'ONESELF', 'BY', 'DOING', 'NOTHING', 'THAT', 'ONE', 'NEED', 'CARE', 'WHO', 'KNOWS'] +3080-5032-0020-332: ref=['IF', 'I', 'HAD', 'A', 'PICTURE', 'THAT', 'WERE', 'FIT', 'FOR', 'YOU', 'YOU', 'SHOULD', 'HAVE', 'IT'] +3080-5032-0020-332: hyp=['IF', 'I', 'HAD', 'A', 'PICTURE', 'THAT', 'WERE', 'FIT', 'FOR', 'YOU', 'YOU', 'SHOULD', 'HAVE', 'IT'] +3080-5032-0021-333: ref=['HOW', 'CAN', 'YOU', 'TALK', 'OF', 'DEFYING', 'FORTUNE', 'NOBODY', 'LIVES', 'WITHOUT', 'IT', 'AND', 'THEREFORE', 'WHY', 'SHOULD', 'YOU', 'IMAGINE', 'YOU', 'COULD'] +3080-5032-0021-333: hyp=['HOW', 'CAN', 'YOU', 'TALK', 'OF', 'DEFYING', 'FORTUNE', 'NOBODY', 'LIVES', 'WITHOUT', 'IT', 'AND', 'THEREFORE', 'WHY', 'SHOULD', 'YOU', 'IMAGINE', 'YOU', 'COULD'] +3080-5032-0022-334: ref=['I', 'KNOW', 'NOT', 'HOW', 'MY', 'BROTHER', 'COMES', 'TO', 'BE', 'SO', 'WELL', 'INFORMED', 'AS', 'YOU', 'SAY', 'BUT', 'I', 'AM', 'CERTAIN', 'HE', 'KNOWS', 'THE', 'UTMOST', 'OF', 'THE', 'INJURIES', 'YOU', 'HAVE', 'RECEIVED', 'FROM', 'HER'] +3080-5032-0022-334: hyp=['I', 'KNOW', 'NOT', 'HOW', 'MY', 'BROTHER', 'COMES', 'TO', 'BE', 'SO', 'WELL', 'INFORMED', 'AS', 'YOU', 'SAY', 'BUT', 'I', 'AM', 'CERTAIN', 'HE', 'KNOWS', 'THE', 'UTMOST', 'OF', 'THE', 'INJURIES', 'YOU', 'HAVE', 'RECEIVED', 'FROM', 'HER'] +3080-5032-0023-335: ref=['WE', 'HAVE', 'HAD', 'ANOTHER', 'DEBATE', 'BUT', 'MUCH', 'MORE', 'CALMLY'] +3080-5032-0023-335: hyp=['WE', 'HAVE', 'HAD', 'ANOTHER', 'DEBATE', 'BUT', 'MUCH', 'MORE', 'CALMLY'] +3080-5032-0024-336: ref=['AND', 'BESIDES', 'THERE', 'WAS', 'A', 'TIME', 'WHEN', 'WE', 'OURSELVES', 'WERE', 'INDIFFERENT', 'TO', 'ONE', 'ANOTHER', 'DID', 'I', 'DO', 'SO', 'THEN', 'OR', 'HAVE', 'I', 'LEARNED', 'IT', 'SINCE'] +3080-5032-0024-336: hyp=['AND', 'BESIDES', 'THERE', 'WAS', 'A', 'TIME', 'WHEN', 'WE', 'OURSELVES', 'WERE', 'INDIFFERENT', 'TO', 'ONE', 'ANOTHER', 'DID', 'I', 'DO', 'SO', 'THEN', 'OR', 'HAVE', 'I', 'LEARNED', 'IT', 'SINCE'] +3080-5032-0025-337: ref=['I', 'HAVE', 'BEEN', 'STUDYING', 'HOW', 'TOM', 'CHEEKE', 'MIGHT', 'COME', 'BY', 'HIS', 'INTELLIGENCE', 'AND', 'I', 'VERILY', 'BELIEVE', 'HE', 'HAS', 'IT', 'FROM', 'MY', 'COUSIN', 'PETERS'] +3080-5032-0025-337: hyp=['I', 'HAVE', 'BEEN', 'STUDYING', 'HOW', 'TOM', 'CHEEK', 'MIGHT', 'COME', 'BY', 'HIS', 'INTELLIGENCE', 'AND', 'I', 'VERY', 'BELIEVE', 'HE', 'HAS', 'IT', 'FROM', 'MY', 'COUSIN', 'PETERS'] +3080-5032-0026-338: ref=['HOW', 'KINDLY', 'DO', 'I', 'TAKE', 'THESE', 'CIVILITIES', 'OF', 'YOUR', "FATHER'S", 'IN', 'EARNEST', 'YOU', 'CANNOT', 'IMAGINE', 'HOW', 'HIS', 'LETTER', 'PLEASED', 'ME'] +3080-5032-0026-338: hyp=['HOW', 'KINDLY', 'DO', 'I', 'TAKE', 'THE', 'CIVILITIES', 'OF', 'YOUR', 'FATHERS', 'IN', 'EARNEST', 'YOU', 'CANNOT', 'IMAGINE', 'HOW', 'HIS', 'LETTER', 'PLEASED', 'ME'] +3080-5040-0000-278: ref=['WOULD', 'IT', 'WOULD', 'LEAVE', 'ME', 'AND', 'THEN', 'I', 'COULD', 'BELIEVE', 'I', 'SHALL', 'NOT', 'ALWAYS', 'HAVE', 'OCCASION', 'FOR', 'IT'] +3080-5040-0000-278: hyp=['WOULD', 'IT', 'WOULD', 'LEAVE', 'ME', 'AND', 'THEN', 'I', 'COULD', 'BELIEVE', 'I', 'SHALL', 'NOT', 'ALWAYS', 'HAVE', 'OCCASION', 'FOR', 'IT'] +3080-5040-0001-279: ref=['MY', 'POOR', 'LADY', 'VAVASOUR', 'IS', 'CARRIED', 'TO', 'THE', 'TOWER', 'AND', 'HER', 'GREAT', 'BELLY', 'COULD', 'NOT', 'EXCUSE', 'HER', 'BECAUSE', 'SHE', 'WAS', 'ACQUAINTED', 'BY', 'SOMEBODY', 'THAT', 'THERE', 'WAS', 'A', 'PLOT', 'AGAINST', 'THE', 'PROTECTOR', 'AND', 'DID', 'NOT', 'DISCOVER', 'IT'] +3080-5040-0001-279: hyp=['MY', 'POOR', 'LADY', 'VAVASOR', 'IS', 'CHARACTERED', 'A', 'TOWER', 'IN', 'HER', 'GREAT', 'BELLY', 'COULD', 'NOT', 'EXCUSE', 'HER', 'BECAUSE', 'SHE', 'WAS', 'ACQUAINTED', 'BY', 'SOMEBODY', 'THAT', 'THERE', 'WAS', 'A', 'PLOT', 'AGAINST', 'THE', 'PROTECTOR', 'AND', 'DID', 'NOT', 'DISCOVER', 'IT'] +3080-5040-0002-280: ref=['SHE', 'HAS', 'TOLD', 'NOW', 'ALL', 'THAT', 'WAS', 'TOLD', 'HER', 'BUT', 'VOWS', 'SHE', 'WILL', 'NEVER', 'SAY', 'FROM', 'WHENCE', 'SHE', 'HAD', 'IT', 'WE', 'SHALL', 'SEE', 'WHETHER', 'HER', 'RESOLUTIONS', 'ARE', 'AS', 'UNALTERABLE', 'AS', 'THOSE', 'OF', 'MY', 'LADY', 'TALMASH'] +3080-5040-0002-280: hyp=['SHE', 'HAS', 'TOLD', 'NOW', 'ALL', 'THAT', 'WAS', 'TOLD', 'HER', 'BUT', 'VOWS', 'SHE', 'WILL', 'NEVER', 'SAY', 'FROM', 'WHENCE', 'SHE', 'HAD', 'IT', 'WE', 'SHALL', 'SEE', 'WHETHER', 'HER', 'RESOLUTIONS', 'ARE', 'AS', 'UNALTERABLE', 'AS', 'THOSE', 'OF', 'MY', 'LADY', 'THOMMISH'] +3080-5040-0003-281: ref=['I', 'WONDER', 'HOW', 'SHE', 'BEHAVED', 'HERSELF', 'WHEN', 'SHE', 'WAS', 'MARRIED'] +3080-5040-0003-281: hyp=['I', 'WONDER', 'HOW', 'SHE', 'BEHAVED', 'HERSELF', 'WHEN', 'SHE', 'WAS', 'MARRIED'] +3080-5040-0004-282: ref=['I', 'NEVER', 'SAW', 'ANY', 'ONE', 'YET', 'THAT', 'DID', 'NOT', 'LOOK', 'SIMPLY', 'AND', 'OUT', 'OF', 'COUNTENANCE', 'NOR', 'EVER', 'KNEW', 'A', 'WEDDING', 'WELL', 'DESIGNED', 'BUT', 'ONE', 'AND', 'THAT', 'WAS', 'OF', 'TWO', 'PERSONS', 'WHO', 'HAD', 'TIME', 'ENOUGH', 'I', 'CONFESS', 'TO', 'CONTRIVE', 'IT', 'AND', 'NOBODY', 'TO', 'PLEASE', "IN'T", 'BUT', 'THEMSELVES'] +3080-5040-0004-282: hyp=['I', 'NEVER', 'SAW', 'ANY', 'ONE', 'YET', 'THAT', 'DID', 'NOT', 'LOOK', 'SIMPLY', 'AND', 'OUT', 'OF', 'COUNTENANCE', 'NOR', 'EVER', 'KNEW', 'A', 'WEDDING', 'WELL', 'DESIGNED', 'BUT', 'ONE', 'AND', 'THAT', 'WAS', 'OF', 'TWO', 'PERSONS', 'WHO', 'AT', 'TIME', 'ENOUGH', 'I', 'CONFESS', 'TO', 'CONTRIVE', 'IT', 'AND', 'NOBODY', 'TO', 'PLEASE', 'IN', 'BUT', 'THEMSELVES'] +3080-5040-0005-283: ref=['THE', 'TRUTH', 'IS', 'I', 'COULD', 'NOT', 'ENDURE', 'TO', 'BE', 'MISSUS', 'BRIDE', 'IN', 'A', 'PUBLIC', 'WEDDING', 'TO', 'BE', 'MADE', 'THE', 'HAPPIEST', 'PERSON', 'ON', 'EARTH'] +3080-5040-0005-283: hyp=['THE', 'TRUTH', 'IS', 'I', 'COULD', 'NOT', 'ENDURE', 'TO', 'BE', 'MISSUS', 'BRIDE', 'IN', 'A', 'PUBLIC', 'WEDDING', 'TO', 'BE', 'MADE', 'THE', 'HAPPIEST', 'PERSON', 'ON', 'EARTH'] +3080-5040-0006-284: ref=['DO', 'NOT', 'TAKE', 'IT', 'ILL', 'FOR', 'I', 'WOULD', 'ENDURE', 'IT', 'IF', 'I', 'COULD', 'RATHER', 'THAN', 'FAIL', 'BUT', 'IN', 'EARNEST', 'I', 'DO', 'NOT', 'THINK', 'IT', 'WERE', 'POSSIBLE', 'FOR', 'ME'] +3080-5040-0006-284: hyp=['DO', 'NOT', 'TAKE', 'IT', 'ILL', 'FOR', 'I', 'WOULD', 'ENDURE', 'IT', 'IF', 'I', 'COULD', 'RATHER', 'THAN', 'FAIL', 'BUT', 'IN', 'EARNEST', 'I', 'DO', 'NOT', 'THINK', 'IT', 'WERE', 'POSSIBLE', 'FOR', 'ME'] +3080-5040-0007-285: ref=['YET', 'IN', 'EARNEST', 'YOUR', 'FATHER', 'WILL', 'NOT', 'FIND', 'MY', 'BROTHER', 'PEYTON', 'WANTING', 'IN', 'CIVILITY', 'THOUGH', 'HE', 'IS', 'NOT', 'A', 'MAN', 'OF', 'MUCH', 'COMPLIMENT', 'UNLESS', 'IT', 'BE', 'IN', 'HIS', 'LETTERS', 'TO', 'ME', 'NOR', 'AN', 'UNREASONABLE', 'PERSON', 'IN', 'ANYTHING', 'SO', 'HE', 'WILL', 'ALLOW', 'HIM', 'OUT', 'OF', 'HIS', 'KINDNESS', 'TO', 'HIS', 'WIFE', 'TO', 'SET', 'A', 'HIGHER', 'VALUE', 'UPON', 'HER', 'SISTER', 'THAN', 'SHE', 'DESERVES'] +3080-5040-0007-285: hyp=['YET', 'IN', 'EARNEST', 'YOUR', 'FATHER', 'WILL', 'NOT', 'FIND', 'MY', 'BROTHER', 'PEYTON', 'WANTING', 'IN', 'CIVILITY', 'THOUGH', 'HE', 'IS', 'NOT', 'A', 'MAN', 'OF', 'MUCH', 'COMPLIMENT', 'UNLESS', 'IT', 'BE', 'IN', 'HIS', 'LETTER', 'TO', 'ME', 'NO', 'AN', 'UNREASONABLE', 'PERSON', 'IN', 'ANYTHING', 'SO', 'HE', 'WILL', 'ALLOW', 'HIM', 'OUT', 'OF', 'HIS', 'KINDNESS', 'TO', 'HIS', 'WIFE', 'TO', 'SET', 'A', 'HIGHER', 'VALUE', 'UPON', 'HER', 'SISTER', 'THAN', 'SHE', 'DESERVES'] +3080-5040-0008-286: ref=['MY', 'AUNT', 'TOLD', 'ME', 'NO', 'LONGER', 'AGONE', 'THAN', 'YESTERDAY', 'THAT', 'I', 'WAS', 'THE', 'MOST', 'WILFUL', 'WOMAN', 'THAT', 'EVER', 'SHE', 'KNEW', 'AND', 'HAD', 'AN', 'OBSTINACY', 'OF', 'SPIRIT', 'NOTHING', 'COULD', 'OVERCOME', 'TAKE', 'HEED'] +3080-5040-0008-286: hyp=['MY', 'AUNT', 'TOLD', 'ME', 'NO', 'LONGER', 'A', 'GONDON', 'YESTERDAY', 'THAT', 'I', 'WAS', 'THE', 'MOST', 'WILFUL', 'WOMAN', 'THAT', 'EVER', 'SHE', 'KNEW', 'AND', 'HAD', 'AN', 'OBSTINACY', 'OF', 'SPIRIT', 'NOTHING', 'COULD', 'OVERCOME', 'TAKE', 'HEED'] +3080-5040-0009-287: ref=['YOU', 'SEE', 'I', 'GIVE', 'YOU', 'FAIR', 'WARNING'] +3080-5040-0009-287: hyp=['YOU', 'SEE', 'I', 'GIVE', 'YOU', 'FAIR', 'WARNING'] +3080-5040-0010-288: ref=['BY', 'THE', 'NEXT', 'I', 'SHALL', 'BE', 'GONE', 'INTO', 'KENT', 'AND', 'MY', 'OTHER', 'JOURNEY', 'IS', 'LAID', 'ASIDE', 'WHICH', 'I', 'AM', 'NOT', 'DISPLEASED', 'AT', 'BECAUSE', 'IT', 'WOULD', 'HAVE', 'BROKEN', 'OUR', 'INTERCOURSE', 'VERY', 'MUCH'] +3080-5040-0010-288: hyp=['BY', 'THE', 'NEXT', 'I', 'SHALL', 'BE', 'GONE', 'INTO', 'KENT', 'AND', 'MY', 'OTHER', 'JOURNEY', 'IS', 'LAID', 'ASIDE', 'WHICH', 'I', 'AM', 'NOT', 'DISPLEASED', 'AT', 'BECAUSE', 'IT', 'WOULD', 'HAVE', 'BROKEN', 'OUR', 'INTERCOURSE', 'VERY', 'MUCH'] +3080-5040-0011-289: ref=['HERE', 'ARE', 'SOME', 'VERSES', 'OF', "COWLEY'S", 'TELL', 'ME', 'HOW', 'YOU', 'LIKE', 'THEM'] +3080-5040-0011-289: hyp=['HERE', 'ARE', 'SOME', 'VERSES', 'OF', 'CARLIS', 'TELL', 'ME', 'HOW', 'YOU', 'LIKE', 'THEM'] +3080-5040-0012-290: ref=['I', 'TOLD', 'YOU', 'IN', 'MY', 'LAST', 'THAT', 'MY', 'SUFFOLK', 'JOURNEY', 'WAS', 'LAID', 'ASIDE', 'AND', 'THAT', 'INTO', 'KENT', 'HASTENED'] +3080-5040-0012-290: hyp=['I', 'TOLD', 'YOU', 'IN', 'MY', 'LAST', 'THAT', 'MY', 'SUFFOLK', 'JOURNEY', 'WAS', 'LAID', 'ASIDE', 'AND', 'THAT', 'INTO', 'KENT', 'HASTENED'] +3080-5040-0013-291: ref=['IF', 'I', 'DROWN', 'BY', 'THE', 'WAY', 'THIS', 'WILL', 'BE', 'MY', 'LAST', 'LETTER', 'AND', 'LIKE', 'A', 'WILL', 'I', 'BEQUEATH', 'ALL', 'MY', 'KINDNESS', 'TO', 'YOU', 'IN', 'IT', 'WITH', 'A', 'CHARGE', 'NEVER', 'TO', 'BESTOW', 'IT', 'ALL', 'UPON', 'ANOTHER', 'MISTRESS', 'LEST', 'MY', 'GHOST', 'RISE', 'AGAIN', 'AND', 'HAUNT', 'YOU'] +3080-5040-0013-291: hyp=['IF', 'I', 'DROWN', 'BY', 'THE', 'WAY', 'THIS', 'WILL', 'BE', 'MY', 'LAST', 'LETTER', 'AND', 'LIKE', 'A', 'WILL', 'I', 'BEQUEATH', 'ALL', 'MY', 'KINDNESS', 'TO', 'YOU', 'IN', 'IT', 'WITH', 'A', 'CHARGE', 'NEVER', 'TO', 'BESTOW', 'AT', 'ALL', 'UPON', 'ANOTHER', 'MISTRESS', 'LEST', 'MY', 'GHOST', 'RISE', 'AGAIN', 'AND', 'HAUNT', 'YOU'] +3080-5040-0014-292: ref=['INDEED', 'I', 'LIKE', 'HIM', 'EXTREMELY', 'AND', 'HE', 'IS', 'COMMENDED', 'TO', 'ME', 'BY', 'PEOPLE', 'THAT', 'KNOW', 'HIM', 'VERY', 'WELL', 'AND', 'ARE', 'ABLE', 'TO', 'JUDGE', 'FOR', 'A', 'MOST', 'EXCELLENT', 'SERVANT', 'AND', 'FAITHFUL', 'AS', 'POSSIBLE'] +3080-5040-0014-292: hyp=['INDEED', 'I', 'LIKE', 'HIM', 'EXTREMELY', 'AND', 'HE', 'IS', 'COMMENDED', 'TO', 'ME', 'BY', 'PEOPLE', 'THAT', 'KNOW', 'HIM', 'VERY', 'WELL', 'AND', 'ARE', 'ABLE', 'TO', 'JUDGE', 'FOR', 'A', 'MOST', 'EXCELLENT', 'SERVANT', 'AND', 'FAITHFUL', 'AS', 'POSSIBLE'] +3080-5040-0015-293: ref=['BECAUSE', 'YOU', 'FIND', 'FAULT', 'WITH', 'MY', 'OTHER', 'LETTERS', 'THIS', 'IS', 'LIKE', 'TO', 'BE', 'SHORTER', 'THAN', 'THEY', 'I', 'DID', 'NOT', 'INTEND', 'IT', 'SO', 'THOUGH', 'I', 'CAN', 'ASSURE', 'YOU'] +3080-5040-0015-293: hyp=['BECAUSE', 'YOU', 'FIND', 'FAULT', 'WITH', 'MY', 'OTHER', 'LETTERS', 'THIS', 'IS', 'LIKE', 'TO', 'BE', 'SHORTER', 'THAN', 'THEY', 'I', 'DID', 'NOT', 'INTEND', 'IT', 'SO', 'THOUGH', 'I', 'CAN', 'ASSURE', 'YOU'] +3080-5040-0016-294: ref=['I', 'DO', 'NOT', 'FIND', 'IT', 'THOUGH', 'I', 'AM', 'TOLD', 'I', 'WAS', 'SO', 'EXTREMELY', 'WHEN', 'I', 'BELIEVED', 'YOU', 'LOVED', 'ME'] +3080-5040-0016-294: hyp=['I', 'DO', 'NOT', 'FIND', 'IT', 'THOUGH', 'I', 'AM', 'TOLD', 'I', 'WAS', 'SO', 'EXTREMELY', 'WHEN', 'I', 'BELIEVED', 'YOU', 'LOVED', 'ME'] +3080-5040-0017-295: ref=['BUT', 'I', 'AM', 'CALLED', 'UPON'] +3080-5040-0017-295: hyp=['BUT', 'I', 'AM', 'CALLED', 'UPON'] +3080-5040-0018-296: ref=['DIRECTED', 'FOR', 'YOUR', 'MASTER'] +3080-5040-0018-296: hyp=['DIRECTED', 'FOR', 'YOUR', 'MASTER'] +3080-5040-0019-297: ref=['I', 'SEE', 'YOU', 'CAN', 'CHIDE', 'WHEN', 'YOU', 'PLEASE', 'AND', 'WITH', 'AUTHORITY', 'BUT', 'I', 'DESERVE', 'IT', 'I', 'CONFESS', 'AND', 'ALL', 'I', 'CAN', 'SAY', 'FOR', 'MYSELF', 'IS', 'THAT', 'MY', 'FAULT', 'PROCEEDED', 'FROM', 'A', 'VERY', 'GOOD', 'PRINCIPLE', 'IN', 'ME'] +3080-5040-0019-297: hyp=['I', 'SEE', 'YOU', 'CAN', 'CHID', 'WHEN', 'YOU', 'PLEASE', 'AND', 'WITH', 'AUTHORITY', 'BUT', 'I', 'DESERVE', 'IT', 'I', 'CONFESS', 'AND', 'ALL', 'I', 'CAN', 'SAY', 'FOR', 'MYSELF', 'IS', 'THAT', 'MY', 'FAULT', 'PROCEEDED', 'FROM', 'A', 'VERY', 'GOOD', 'PRINCIPLE', 'IN', 'ME'] +3080-5040-0020-298: ref=['WE', 'DARE', 'NOT', 'LET', 'OUR', 'TONGUES', 'LIE', 'MORE', 'ON', 'ONE', 'SIDE', 'OF', 'OUR', 'MOUTHS', 'THAN', "T'OTHER", 'FOR', 'FEAR', 'OF', 'OVERTURNING', 'IT'] +3080-5040-0020-298: hyp=['WE', 'DARE', 'NOT', 'LET', 'OUR', 'TONGUES', 'LIE', 'MORE', 'AND', 'ONE', 'SIDE', 'OF', 'OUR', 'MOTHS', 'THAN', 'THE', 'OTHER', 'FOR', 'FEAR', 'OF', 'OVERTURNING', 'IT'] +3080-5040-0021-299: ref=['YOU', 'ARE', 'SATISFIED', 'I', 'HOPE', 'ERE', 'THIS', 'THAT', 'I', 'SCAPED', 'DROWNING'] +3080-5040-0021-299: hyp=['YOU', 'ARE', 'SATISFIED', 'I', 'HOPE', 'IF', 'THIS', 'THAT', 'I', 'ESCAPE', 'DROWNING'] +3080-5040-0022-300: ref=['BUT', 'I', 'AM', 'TROUBLED', 'MUCH', 'YOU', 'SHOULD', 'MAKE', 'SO', 'ILL', 'A', 'JOURNEY', 'TO', 'SO', 'LITTLE', 'PURPOSE', 'INDEED', 'I', 'WRIT', 'BY', 'THE', 'FIRST', 'POST', 'AFTER', 'MY', 'ARRIVAL', 'HERE', 'AND', 'CANNOT', 'IMAGINE', 'HOW', 'YOU', 'CAME', 'TO', 'MISS', 'OF', 'MY', 'LETTERS'] +3080-5040-0022-300: hyp=['BUT', 'I', 'AM', 'TROUBLED', 'MUCH', 'YOU', 'SHOULD', 'MAKE', 'SO', 'ILL', 'A', 'JOURNEY', 'TO', 'SO', 'LITTLE', 'PURPOSE', 'INDEED', 'I', 'WRITE', 'BY', 'THE', 'FIRST', 'POST', 'AFTER', 'MY', 'ARRIVAL', 'HERE', 'AND', 'CANNOT', 'IMAGINE', 'HOW', 'YOU', 'CAME', 'TO', 'MISS', 'OF', 'MY', 'LETTERS'] +3080-5040-0023-301: ref=['HOW', 'WELCOME', 'YOU', 'WILL', 'BE', 'BUT', 'ALAS'] +3080-5040-0023-301: hyp=['OH', 'WELCOME', 'YOU', 'WILL', 'BE', 'BUT', 'ALAS'] +3080-5040-0024-302: ref=['FOR', 'MY', 'LIFE', 'I', 'CANNOT', 'BEAT', 'INTO', 'THEIR', 'HEADS', 'A', 'PASSION', 'THAT', 'MUST', 'BE', 'SUBJECT', 'TO', 'NO', 'DECAY', 'AN', 'EVEN', 'PERFECT', 'KINDNESS', 'THAT', 'MUST', 'LAST', 'PERPETUALLY', 'WITHOUT', 'THE', 'LEAST', 'INTERMISSION'] +3080-5040-0024-302: hyp=['FOR', 'MY', 'LIFE', 'I', 'CANNOT', 'BEAT', 'INTO', 'THEIR', 'HEADS', 'A', 'PASSION', 'THAT', 'MUST', 'BE', 'SUBJECT', 'TO', 'NO', 'DECAY', 'AND', 'EVEN', 'PERFECT', 'KINDNESS', 'THAT', 'MUST', 'LAST', 'PERPETUALLY', 'WITHOUT', 'THE', 'LEAST', 'INTERMISSION'] +3080-5040-0025-303: ref=['THEY', 'LAUGH', 'TO', 'HEAR', 'ME', 'SAY', 'THAT', 'ONE', 'UNKIND', 'WORD', 'WOULD', 'DESTROY', 'ALL', 'THE', 'SATISFACTION', 'OF', 'MY', 'LIFE', 'AND', 'THAT', 'I', 'SHOULD', 'EXPECT', 'OUR', 'KINDNESS', 'SHOULD', 'INCREASE', 'EVERY', 'DAY', 'IF', 'IT', 'WERE', 'POSSIBLE', 'BUT', 'NEVER', 'LESSEN'] +3080-5040-0025-303: hyp=['THEY', 'LAUGH', 'TO', 'HEAR', 'ME', 'SAY', 'THAT', 'ONE', 'UNKIND', 'WORD', 'WOULD', 'DESTROY', 'ALL', 'THE', 'SATISFACTION', 'OF', 'MY', 'LIFE', 'AND', 'THAT', 'I', 'SHOULD', 'EXPECT', 'OUR', 'KINDNESS', 'SHOULD', 'INCREASE', 'EVERY', 'DAY', 'IF', 'IT', 'WERE', 'POSSIBLE', 'BUT', 'NEVER', 'LESSEN'] +3080-5040-0026-304: ref=['WE', 'GO', 'ABROAD', 'ALL', 'DAY', 'AND', 'PLAY', 'ALL', 'NIGHT', 'AND', 'SAY', 'OUR', 'PRAYERS', 'WHEN', 'WE', 'HAVE', 'TIME'] +3080-5040-0026-304: hyp=['WE', 'GO', 'ABROAD', 'ALL', 'DAY', 'AND', 'PLAY', 'ALL', 'NIGHT', 'AND', 'SAY', "I'LL", 'PRAY', 'AS', 'WHEN', 'WE', 'HAVE', 'TIME'] +3080-5040-0027-305: ref=['WELL', 'IN', 'SOBER', 'EARNEST', 'NOW', 'I', 'WOULD', 'NOT', 'LIVE', 'THUS', 'A', 'TWELVEMONTH', 'TO', 'GAIN', 'ALL', 'THAT', 'THE', 'KING', 'HAS', 'LOST', 'UNLESS', 'IT', 'WERE', 'TO', 'GIVE', 'IT', 'HIM', 'AGAIN'] +3080-5040-0027-305: hyp=['WHILE', 'IN', 'SOBER', 'EARNEST', 'NOW', 'I', 'WOULD', 'NOT', 'LIVE', 'THUS', 'AT', 'TWELVE', 'MONTHS', 'TO', 'GAIN', 'ALL', 'THAT', 'KING', 'HAS', 'LOST', 'UNLESS', 'IT', 'WERE', 'TO', 'GIVE', 'IT', 'HIM', 'AGAIN'] +3080-5040-0028-306: ref=['WILL', 'YOU', 'BE', 'SO', 'GOOD', 'NATURED'] +3080-5040-0028-306: hyp=['WILL', 'YOU', 'BE', 'SO', 'GOOD', 'NATURED'] +3080-5040-0029-307: ref=['HE', 'HAS', 'ONE', 'SON', 'AND', 'TIS', 'THE', 'FINEST', 'BOY', 'THAT', "E'ER", 'YOU', 'SAW', 'AND', 'HAS', 'A', 'NOBLE', 'SPIRIT', 'BUT', 'YET', 'STANDS', 'IN', 'THAT', 'AWE', 'OF', 'HIS', 'FATHER', 'THAT', 'ONE', 'WORD', 'FROM', 'HIM', 'IS', 'AS', 'MUCH', 'AS', 'TWENTY', 'WHIPPINGS'] +3080-5040-0029-307: hyp=['HE', 'HAS', 'ONE', 'SON', 'AND', 'TIS', 'THE', 'FINEST', 'BOY', 'THAT', 'ERE', 'YOU', 'SAW', 'AND', 'HAS', 'A', 'NOBLE', 'SPIRIT', 'BUT', 'YET', 'STANDS', 'IN', 'THAT', 'AWE', 'OF', 'HIS', 'FATHER', 'THAT', 'ONE', 'WORD', 'FROM', 'HIM', 'IS', 'AS', 'MUCH', 'AS', 'TWENTY', 'WHIPPINGS'] +3080-5040-0030-308: ref=['YOU', 'MUST', 'GIVE', 'ME', 'LEAVE', 'TO', 'ENTERTAIN', 'YOU', 'THUS', 'WITH', 'DISCOURSES', 'OF', 'THE', 'FAMILY', 'FOR', 'I', 'CAN', 'TELL', 'YOU', 'NOTHING', 'ELSE', 'FROM', 'HENCE'] +3080-5040-0030-308: hyp=['YOU', 'MUST', 'GIVE', 'ME', 'LEAVE', 'TO', 'ENTERTAIN', 'YOURSELVES', 'WITH', 'DISCOURSES', 'OF', 'THE', 'FAMILY', 'FOR', 'I', 'CAN', 'TELL', 'YOU', 'NOTHING', 'ELSE', 'FROM', 'HENCE'] +3080-5040-0031-309: ref=['NOT', 'TO', 'KNOW', 'WHEN', 'YOU', 'WOULD', 'COME', 'HOME', 'I', 'CAN', 'ASSURE', 'YOU', 'NOR', 'FOR', 'ANY', 'OTHER', 'OCCASION', 'OF', 'MY', 'OWN', 'BUT', 'WITH', 'A', 'COUSIN', 'OF', 'MINE', 'THAT', 'HAD', 'LONG', 'DESIGNED', 'TO', 'MAKE', 'HERSELF', 'SPORT', 'WITH', 'HIM', 'AND', 'DID', 'NOT', 'MISS', 'OF', 'HER', 'AIM'] +3080-5040-0031-309: hyp=['NOT', 'TO', 'KNOW', 'WHEN', 'YOU', 'HAD', 'COME', 'HOME', 'I', 'CAN', 'ASSURE', 'YOU', 'NO', 'FOR', 'ANY', 'OTHER', 'OCCASION', 'ON', 'MY', 'OWN', 'BUT', 'WITH', 'A', 'COUSIN', 'OF', 'MINE', 'THAT', 'HAD', 'LONG', 'DESIGNED', 'TO', 'MAKE', 'HERSELF', 'SPORT', 'WITH', 'HIM', 'AND', 'DID', 'NOT', 'MISS', 'OF', 'HER', 'AIM'] +3080-5040-0032-310: ref=['IN', 'MY', 'LIFE', 'I', 'NEVER', 'HEARD', 'SO', 'RIDICULOUS', 'A', 'DISCOURSE', 'AS', 'HE', 'MADE', 'US', 'AND', 'NO', 'OLD', 'WOMAN', 'WHO', 'PASSES', 'FOR', 'A', 'WITCH', 'COULD', 'HAVE', 'BEEN', 'MORE', 'PUZZLED', 'TO', 'SEEK', 'WHAT', 'TO', 'SAY', 'TO', 'REASONABLE', 'PEOPLE', 'THAN', 'HE', 'WAS'] +3080-5040-0032-310: hyp=['IN', 'MY', 'LIFE', 'I', 'NEVER', 'HEARD', 'SO', 'RIDICULOUS', 'A', 'DISCOURSE', 'AS', 'HE', 'MADE', 'US', 'AND', 'NO', 'OLD', 'WOMAN', 'WHO', 'PAUSES', 'FOR', 'A', 'WITCH', 'COULD', 'HAVE', 'BEEN', 'MORE', 'PUZZLED', 'TO', 'SEEK', 'WHAT', 'TO', 'SAY', 'TO', 'REASONABLE', 'PEOPLE', 'THAN', 'HE', 'WAS'] +3080-5040-0033-311: ref=['EVER', 'SINCE', 'THIS', 'ADVENTURE', 'I', 'HAVE', 'HAD', 'SO', 'GREAT', 'A', 'BELIEF', 'IN', 'ALL', 'THINGS', 'OF', 'THIS', 'NATURE', 'THAT', 'I', 'COULD', 'NOT', 'FORBEAR', 'LAYING', 'A', 'PEAS', 'COD', 'WITH', 'NINE', 'PEAS', "IN'T", 'UNDER', 'MY', 'DOOR', 'YESTERDAY', 'AND', 'WAS', 'INFORMED', 'BY', 'IT', 'THAT', 'MY', "HUSBAND'S", 'NAME', 'SHOULD', 'BE', 'THOMAS', 'HOW', 'DO', 'YOU', 'LIKE', 'THAT'] +3080-5040-0033-311: hyp=['EVER', 'SINCE', 'THIS', 'ADVENTURE', 'I', 'HAVE', 'HAD', 'SO', 'GREAT', 'A', 'BELIEF', 'IN', 'ALL', 'THINGS', 'WITH', 'ITS', 'NATURE', 'THAT', 'I', 'COULD', 'NOT', 'FORBEAR', 'LAYING', 'A', 'PEASE', 'COT', 'WITH', 'NINE', 'PEAS', 'INTO', 'UNDER', 'MY', 'DOOR', 'YESTERDAY', 'IT', 'WAS', 'INFORMED', 'BY', 'IT', 'THAT', 'MY', "HUSBAND'S", 'NAME', 'SHOULD', 'BE', 'THOMAS', 'HOW', 'DO', 'YOU', 'LIKE', 'THAT'] +3331-159605-0000-695: ref=['SHE', 'PULLED', 'HER', 'HAIR', 'DOWN', 'TURNED', 'HER', 'SKIRT', 'BACK', 'PUT', 'HER', 'FEET', 'ON', 'THE', 'FENDER', 'AND', 'TOOK', 'PUTTEL', 'INTO', 'HER', 'LAP', 'ALL', 'OF', 'WHICH', 'ARRANGEMENTS', 'SIGNIFIED', 'THAT', 'SOMETHING', 'VERY', 'IMPORTANT', 'HAD', 'GOT', 'TO', 'BE', 'THOUGHT', 'OVER', 'AND', 'SETTLED'] +3331-159605-0000-695: hyp=['SHE', 'PULLED', 'HER', 'HAIR', 'DOWN', 'TURNED', 'HIS', 'GOOD', 'BACK', 'PUT', 'HER', 'FEET', 'ON', 'THE', 'FENDER', 'AND', 'TOOK', 'PATTERN', 'INTO', 'HER', 'LAP', 'ALL', 'OF', 'WHICH', 'ARRANGEMENTS', 'SIGNIFIED', 'THAT', 'SOMETHING', 'VERY', 'IMPORTANT', 'HAD', 'GOT', 'TO', 'BE', 'THOUGHT', 'OVER', 'AND', 'SETTLED'] +3331-159605-0001-696: ref=['THE', 'MORE', 'PROPOSALS', 'THE', 'MORE', 'CREDIT'] +3331-159605-0001-696: hyp=['THE', 'MORE', 'PROPOSALS', 'THE', 'MORE', 'CREDIT'] +3331-159605-0002-697: ref=['I', 'VE', 'TRIED', 'IT', 'AND', 'LIKED', 'IT', 'AND', 'MAYBE', 'THIS', 'IS', 'THE', 'CONSEQUENCE', 'OF', 'THAT', "NIGHT'S", 'FUN'] +3331-159605-0002-697: hyp=["I'VE", 'TRIED', 'IT', 'AND', 'LIKED', 'IT', 'AND', 'MAYBE', 'THIS', 'IS', 'THE', 'CONSEQUENCE', 'OF', 'THAT', "NIGHT'S", 'FUN'] +3331-159605-0003-698: ref=['JUST', 'SUPPOSE', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'DOES', 'ASK', 'ME', 'AND', 'I', 'SAY', 'YES'] +3331-159605-0003-698: hyp=['JUST', 'SUPPOSE', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'DOES', 'ASK', 'ME', 'AND', 'I', 'SAY', 'YES'] +3331-159605-0004-699: ref=['WHAT', 'A', 'SPITEFUL', 'THING', 'I', 'AM'] +3331-159605-0004-699: hyp=['WHAT', 'A', 'SPITEFUL', 'THING', 'I', 'AM'] +3331-159605-0005-700: ref=['I', 'COULD', 'DO', 'SO', 'MUCH', 'FOR', 'ALL', 'AT', 'HOME', 'HOW', 'I', 'SHOULD', 'ENJOY', 'THAT'] +3331-159605-0005-700: hyp=['I', 'COULD', 'DO', 'SO', 'MUCH', 'FOR', 'ALL', 'AT', 'HOME', 'HOW', 'I', 'SHOULD', 'ENJOY', 'THAT'] +3331-159605-0006-701: ref=['LET', 'ME', 'SEE', 'HOW', 'CAN', 'I', 'BEGIN'] +3331-159605-0006-701: hyp=['LET', 'ME', 'SEE', 'HOW', 'CAN', 'I', 'BEGIN'] +3331-159605-0007-702: ref=['HE', 'HAS', 'KNOWN', 'HER', 'ALL', 'HER', 'LIFE', 'AND', 'HAS', 'A', 'GOOD', 'INFLUENCE', 'OVER', 'HER'] +3331-159605-0007-702: hyp=['HE', 'HAS', 'KNOWN', 'HER', 'ALL', 'HER', 'LIFE', 'AND', 'HAS', 'A', 'GOOD', 'INFLUENCE', 'OVER', 'HER'] +3331-159605-0008-703: ref=['NOW', 'AS', 'POLLY', 'WAS', 'BY', 'NO', 'MEANS', 'A', 'PERFECT', 'CREATURE', 'I', 'AM', 'FREE', 'TO', 'CONFESS', 'THAT', 'THE', 'OLD', 'TEMPTATION', 'ASSAILED', 'HER', 'MORE', 'THAN', 'ONCE', 'THAT', 'WEEK', 'FOR', 'WHEN', 'THE', 'FIRST', 'EXCITEMENT', 'OF', 'THE', 'DODGING', 'REFORM', 'HAD', 'SUBSIDED', 'SHE', 'MISSED', 'THE', 'PLEASANT', 'LITTLE', 'INTERVIEWS', 'THAT', 'USED', 'TO', 'PUT', 'A', 'CERTAIN', 'FLAVOR', 'OF', 'ROMANCE', 'INTO', 'HER', 'DULL', 'HARD', 'WORKING', 'DAYS'] +3331-159605-0008-703: hyp=['NOW', 'AS', 'POLLY', 'WAS', 'BY', 'NO', 'MEANS', 'A', 'PERFECT', 'CREATURE', 'I', 'AM', 'FREE', 'TO', 'CONFESS', 'THAT', 'THE', 'OLD', 'TEMPTATION', 'ASSAILED', 'HER', 'MORE', 'THAN', 'ONCE', 'THE', 'WEEK', 'FOR', 'WHEN', 'THE', 'FIRST', 'EXCITEMENT', 'OF', 'THE', 'DODGING', 'REFORM', 'HAD', 'SUBSIDED', 'SHE', 'MISSED', 'THE', 'PLEASANT', 'LITTLE', 'INTERVIEWS', 'THAT', 'USED', 'TO', 'PUT', 'A', 'CERTAIN', 'FLAVOUR', 'OF', 'ROMANCE', 'INTO', 'HER', 'DULL', 'HARD', 'WORKING', 'DAYS'] +3331-159605-0009-704: ref=['I', "DON'T", 'THINK', 'IT', 'WAS', 'HIS', 'WEALTH', 'ACCOMPLISHMENTS', 'OR', 'POSITION', 'THAT', 'MOST', 'ATTRACTED', 'POLLY', 'THOUGH', 'THESE', 'DOUBTLESS', 'POSSESSED', 'A', 'GREATER', 'INFLUENCE', 'THAN', 'SHE', 'SUSPECTED'] +3331-159605-0009-704: hyp=['I', "DON'T", 'THINK', 'IT', 'WAS', 'HIS', 'WEALTH', 'ACCOMPLISHMENTS', 'OPPOSITION', 'THAT', 'MOST', 'ATTRACTED', 'POLLY', 'THOUGH', 'THESE', 'DOUBTLESS', 'POSSESSED', 'A', 'GREATER', 'INFLUENCE', 'THAN', 'SHE', 'SUSPECTED'] +3331-159605-0010-705: ref=['IT', 'WAS', 'THAT', 'INDESCRIBABLE', 'SOMETHING', 'WHICH', 'WOMEN', 'ARE', 'QUICK', 'TO', 'SEE', 'AND', 'FEEL', 'IN', 'MEN', 'WHO', 'HAVE', 'BEEN', 'BLESSED', 'WITH', 'WISE', 'AND', 'GOOD', 'MOTHERS'] +3331-159605-0010-705: hyp=['IT', 'WAS', 'THAT', 'INDESCRIBABLE', 'SOMETHING', 'WHICH', 'WOMEN', 'ARE', 'QUICK', 'TO', 'SEE', 'AND', 'FEEL', 'IN', 'MEN', 'WHO', 'HAVE', 'BEEN', 'BLESSED', 'THE', 'WISE', 'AND', 'GOOD', 'MOTHERS'] +3331-159605-0011-706: ref=['THIS', 'HAD', 'AN', 'ESPECIAL', 'CHARM', 'TO', 'POLLY', 'FOR', 'SHE', 'SOON', 'FOUND', 'THAT', 'THIS', 'SIDE', 'OF', 'HIS', 'CHARACTER', 'WAS', 'NOT', 'SHOWN', 'TO', 'EVERY', 'ONE'] +3331-159605-0011-706: hyp=['THIS', 'HAD', 'AN', 'ESPECIAL', 'CHARM', 'TO', 'POLLY', 'FOR', 'SHE', 'SOON', 'FOUND', 'THAT', 'THIS', 'SIGHT', 'OF', 'HIS', 'CHARACTER', 'WAS', 'NOT', 'SHOWN', 'TO', 'EVERY', 'ONE'] +3331-159605-0012-707: ref=['LATELY', 'THIS', 'HAD', 'CHANGED', 'ESPECIALLY', 'TOWARDS', 'POLLY', 'AND', 'IT', 'FLATTERED', 'HER', 'MORE', 'THAN', 'SHE', 'WOULD', 'CONFESS', 'EVEN', 'TO', 'HERSELF'] +3331-159605-0012-707: hyp=['LATELY', 'THIS', 'HAD', 'CHANGED', 'ESPECIALLY', 'TOWARDS', 'POLLY', 'AND', 'IT', 'FURTHER', 'HER', 'MORE', 'THAN', 'SHE', 'WOULD', 'CONFESS', 'EVEN', 'TO', 'HERSELF'] +3331-159605-0013-708: ref=['AT', 'FIRST', 'SHE', 'TRIED', 'TO', 'THINK', 'SHE', 'COULD', 'BUT', 'UNFORTUNATELY', 'HEARTS', 'ARE', 'SO', 'CONTRARY', 'THAT', 'THEY', "WON'T", 'BE', 'OBEDIENT', 'TO', 'REASON', 'WILL', 'OR', 'EVEN', 'GRATITUDE'] +3331-159605-0013-708: hyp=['AT', 'FIRST', 'SHE', 'TRIED', 'TO', 'THINK', 'SHE', 'COULD', 'BUT', 'UNFORTUNATELY', 'HEARTS', 'ARE', 'SO', 'CONTRARY', 'THAT', 'THEY', "WON'T", 'BE', 'OBEDIENT', 'TO', 'REASON', 'WILL', 'OR', 'EVEN', 'CREDITUDE'] +3331-159605-0014-709: ref=['POLLY', 'FELT', 'A', 'VERY', 'CORDIAL', 'FRIENDSHIP', 'FOR', 'MISTER', 'SYDNEY', 'BUT', 'NOT', 'ONE', 'PARTICLE', 'OF', 'THE', 'LOVE', 'WHICH', 'IS', 'THE', 'ONLY', 'COIN', 'IN', 'WHICH', 'LOVE', 'CAN', 'BE', 'TRULY', 'PAID'] +3331-159605-0014-709: hyp=['POLLY', 'FELT', 'A', 'VERY', 'CORDIAL', 'FRIENDSHIP', 'FOR', 'MISTER', 'SYDNEY', 'BUT', 'NOT', 'ONE', 'PARTICLE', 'OF', 'THE', 'LOVE', 'PICTURED', 'THE', 'ONLY', 'COIN', 'IN', 'WHICH', 'LOVE', 'CAN', 'BE', 'TRULY', 'PAID'] +3331-159605-0015-710: ref=['THIS', 'FINISHED', "POLLY'S", 'INDECISION', 'AND', 'AFTER', 'THAT', 'NIGHT', 'SHE', 'NEVER', 'ALLOWED', 'HERSELF', 'TO', 'DWELL', 'UPON', 'THE', 'PLEASANT', 'TEMPTATION', 'WHICH', 'CAME', 'IN', 'A', 'GUISE', 'PARTICULARLY', 'ATTRACTIVE', 'TO', 'A', 'YOUNG', 'GIRL', 'WITH', 'A', 'SPICE', 'OF', 'THE', 'OLD', 'EVE', 'IN', 'HER', 'COMPOSITION'] +3331-159605-0015-710: hyp=['THIS', 'FINISHED', "POLLY'S", 'INDECISION', 'AND', 'AFTER', 'THAT', 'NIGHT', 'SHE', 'NEVER', 'ALLOWED', 'HERSELF', 'TO', 'DWELL', 'UPON', 'THE', 'PLEASANT', 'TEMPTATION', 'WHICH', 'CAME', 'IN', 'A', 'GUISE', 'PARTICULARLY', 'ATTRACTIVE', 'TO', 'A', 'YOUNG', 'GIRL', 'BUT', 'THE', 'SPIES', 'OF', 'THE', 'OLD', 'EVE', 'AND', 'HER', 'COMPOSITION'] +3331-159605-0016-711: ref=['WHEN', 'SATURDAY', 'CAME', 'POLLY', 'STARTED', 'AS', 'USUAL', 'FOR', 'A', 'VISIT', 'TO', 'BECKY', 'AND', 'BESS', 'BUT', 'COULD', "N'T", 'RESIST', 'STOPPING', 'AT', 'THE', 'SHAWS', 'TO', 'LEAVE', 'A', 'LITTLE', 'PARCEL', 'FOR', 'FAN', 'THOUGH', 'IT', 'WAS', 'CALLING', 'TIME'] +3331-159605-0016-711: hyp=['WHEN', 'SAID', 'CAME', 'POLLY', 'STARTED', 'AS', 'USUAL', 'FOR', 'A', 'VISIT', 'TO', 'BECKY', 'AND', 'BESS', 'BUT', "COULDN'T", 'RESIST', 'STOPPING', 'AT', 'THE', 'SHORES', 'TO', 'LEAVE', 'A', 'LITTLE', 'PARCEL', 'FOR', 'FAN', 'THAT', 'WAS', 'CALLING', 'TIME'] +3331-159605-0017-712: ref=['A', 'FOOLISH', 'LITTLE', 'SPEECH', 'TO', 'MAKE', 'TO', 'A', 'DOG', 'BUT', 'YOU', 'SEE', 'POLLY', 'WAS', 'ONLY', 'A', 'TENDER', 'HEARTED', 'GIRL', 'TRYING', 'TO', 'DO', 'HER', 'DUTY'] +3331-159605-0017-712: hyp=['A', 'FOOLISH', 'LITTLE', 'SPEECH', 'TO', 'MAKE', 'TO', 'A', 'DARK', 'BUT', 'YOU', 'SEE', 'POLLY', 'WAS', 'ONLY', 'A', 'TENDER', 'HEARTED', 'GIRL', 'TRYING', 'TO', 'HER', 'DUTY'] +3331-159605-0018-713: ref=['TAKE', 'HOLD', 'OF', 'MASTER', "CHARLEY'S", 'HAND', 'MISS', 'MAMIE', 'AND', 'WALK', 'PRETTY', 'LIKE', 'WILLY', 'AND', 'FLOSSY', 'SAID', 'THE', 'MAID'] +3331-159605-0018-713: hyp=['TAKE', 'HOLD', 'OF', 'MASSR', "CHARLIE'S", 'HAND', 'MISS', 'MAY', 'AND', 'BUCK', 'PRETTY', 'LIKE', 'BILLY', 'AND', 'FLOSSIE', 'SAID', 'THE', 'MATE'] +3331-159605-0019-714: ref=['AT', 'A', 'STREET', 'CORNER', 'A', 'BLACK', 'EYED', 'SCHOOL', 'BOY', 'WAS', 'PARTING', 'FROM', 'A', 'ROSY', 'FACED', 'SCHOOL', 'GIRL', 'WHOSE', 'MUSIC', 'ROLL', 'HE', 'WAS', 'RELUCTANTLY', 'SURRENDERING'] +3331-159605-0019-714: hyp=['A', 'DISTRICT', 'CORNER', 'A', 'BLACK', 'EYED', 'SCHOOLBOY', 'WAS', 'PARTING', 'FROM', 'A', 'ROSY', 'FACED', 'SCHOOL', 'GIRL', 'WHOSE', 'MUSIC', 'ROLL', 'HE', 'WAS', 'RELUCTANTLY', 'SURRENDERING'] +3331-159605-0020-715: ref=['HOW', 'HE', 'GOT', 'THERE', 'WAS', 'NEVER', 'VERY', 'CLEAR', 'TO', 'POLLY', 'BUT', 'THERE', 'HE', 'WAS', 'FLUSHED', 'AND', 'A', 'LITTLE', 'OUT', 'OF', 'BREATH', 'BUT', 'LOOKING', 'SO', 'GLAD', 'TO', 'SEE', 'HER', 'THAT', 'SHE', 'HAD', "N'T", 'THE', 'HEART', 'TO', 'BE', 'STIFF', 'AND', 'COOL', 'AS', 'SHE', 'HAD', 'FULLY', 'INTENDED', 'TO', 'BE', 'WHEN', 'THEY', 'MET'] +3331-159605-0020-715: hyp=['HOW', 'HE', 'GOT', 'THERE', 'WAS', 'NEVER', 'VERY', 'CLEAR', 'TO', 'POLLY', 'BUT', 'THERE', 'HE', 'WAS', 'FLUSHED', 'AND', 'THE', 'LITTLE', 'OUT', 'OF', 'BREATH', 'BUT', 'LOOKING', 'SO', 'GLAD', 'TO', 'SEE', 'HER', 'TILL', 'SHE', 'HAD', 'NOT', 'THE', 'HEART', 'TO', 'BE', 'STIFF', 'AND', 'COOL', 'AS', 'SHE', 'HAD', 'FULLY', 'INTENDED', 'TO', 'BE', 'WHEN', 'THEY', 'MET'] +3331-159605-0021-716: ref=['SHE', 'REALLY', 'COULD', "N'T", 'HELP', 'IT', 'IT', 'WAS', 'SO', 'PLEASANT', 'TO', 'SEE', 'HIM', 'AGAIN', 'JUST', 'WHEN', 'SHE', 'WAS', 'FEELING', 'SO', 'LONELY'] +3331-159605-0021-716: hyp=['SHE', 'REALLY', 'COULD', 'NOT', 'HELP', 'IT', 'IT', 'WAS', 'SO', 'PLEASANT', 'TO', 'SEE', 'HIM', 'AGAIN', 'JUST', 'WHEN', 'SHE', 'WAS', 'FEELING', 'SO', 'LONELY'] +3331-159605-0022-717: ref=['THAT', 'IS', 'THE', 'WAY', 'I', 'GET', 'TO', 'THE', 'ROTHS', 'ANSWERED', 'POLLY'] +3331-159605-0022-717: hyp=['THAT', 'IS', 'THE', 'WAY', 'I', 'GET', 'TO', 'THE', 'WORSE', 'ANSWERED', 'POLLY'] +3331-159605-0023-718: ref=['SHE', 'DID', 'NOT', 'MEAN', 'TO', 'TELL', 'BUT', 'HIS', 'FRANKNESS', 'WAS', 'SO', 'AGREEABLE', 'SHE', 'FORGOT', 'HERSELF'] +3331-159605-0023-718: hyp=['SHE', 'DID', 'NOT', 'MEAN', 'TO', 'TELL', 'BUT', 'HIS', 'FRANKNESS', 'WAS', 'TO', 'AGREEABLE', 'SHE', 'FORGOT', 'HERSELF'] +3331-159605-0024-719: ref=['BUT', 'I', 'KNOW', 'HER', 'BETTER', 'AND', 'I', 'ASSURE', 'YOU', 'THAT', 'SHE', 'DOES', 'IMPROVE', 'SHE', 'TRIES', 'TO', 'MEND', 'HER', 'FAULTS', 'THOUGH', 'SHE', "WON'T", 'OWN', 'IT', 'AND', 'WILL', 'SURPRISE', 'YOU', 'SOME', 'DAY', 'BY', 'THE', 'AMOUNT', 'OF', 'HEART', 'AND', 'SENSE', 'AND', 'GOODNESS', 'SHE', 'HAS', 'GOT'] +3331-159605-0024-719: hyp=['BUT', 'I', 'KNOW', 'HER', 'BETTER', 'AND', 'I', 'ASSURE', 'YOU', 'THAT', 'SHE', "DOESN'T", 'PROVE', 'SHE', 'TRIES', 'TO', 'MEAN', 'TO', 'FAULTS', 'THOUGH', 'SHE', "WON'T", 'OWN', 'IT', 'AND', 'WILL', 'SURPRISE', 'YOU', 'SOME', 'DAY', 'BY', 'THE', 'AMOUNT', 'OF', 'HEART', 'AND', 'SENSE', 'AND', 'GOODNESS', 'SHE', 'HAS', 'GOT'] +3331-159605-0025-720: ref=['THANK', 'YOU', 'NO'] +3331-159605-0025-720: hyp=['THANK', 'YOU', 'NO'] +3331-159605-0026-721: ref=['HOW', 'LOVELY', 'THE', 'PARK', 'LOOKS', 'SHE', 'SAID', 'IN', 'GREAT', 'CONFUSION'] +3331-159605-0026-721: hyp=['HER', 'LOVELY', 'THE', 'PARK', 'LOOKS', 'SHE', 'SAID', 'IN', 'GREAT', 'CONFUSION'] +3331-159605-0027-722: ref=['ASKED', 'THE', 'ARTFUL', 'YOUNG', 'MAN', 'LAYING', 'A', 'TRAP', 'INTO', 'WHICH', 'POLLY', 'IMMEDIATELY', 'FELL'] +3331-159605-0027-722: hyp=['ASKED', 'THE', 'ARTFUL', 'YOUNG', 'MAN', 'LAYING', 'A', 'TRAP', 'INTO', 'WHICH', 'POLLY', 'IMMEDIATELY', 'FELL'] +3331-159605-0028-723: ref=['HE', 'WAS', 'QUICKER', 'TO', 'TAKE', 'A', 'HINT', 'THAN', 'SHE', 'HAD', 'EXPECTED', 'AND', 'BEING', 'BOTH', 'PROUD', 'AND', 'GENEROUS', 'RESOLVED', 'TO', 'SETTLE', 'THE', 'MATTER', 'AT', 'ONCE', 'FOR', "POLLY'S", 'SAKE', 'AS', 'WELL', 'AS', 'HIS', 'OWN'] +3331-159605-0028-723: hyp=['HE', 'WAS', 'QUICKER', 'TO', 'TAKE', 'A', 'HINT', 'THAN', 'SHE', 'HAD', 'EXPECTED', 'AND', 'BEING', 'BOTH', 'PROUD', 'AND', 'GENEROUS', 'WE', 'SOFT', 'TO', 'SETTLE', 'THE', 'MATTER', 'AT', 'ONCE', 'FOR', "POLLY'S", 'SAKE', 'AS', 'WELL', 'AS', 'HIS', 'OWN'] +3331-159605-0029-724: ref=['SO', 'WHEN', 'SHE', 'MADE', 'HER', 'LAST', 'BRILLIANT', 'REMARK', 'HE', 'SAID', 'QUIETLY', 'WATCHING', 'HER', 'FACE', 'KEENLY', 'ALL', 'THE', 'WHILE', 'I', 'THOUGHT', 'SO', 'WELL', 'I', 'M', 'GOING', 'OUT', 'OF', 'TOWN', 'ON', 'BUSINESS', 'FOR', 'SEVERAL', 'WEEKS', 'SO', 'YOU', 'CAN', 'ENJOY', 'YOUR', 'LITTLE', 'BIT', 'OF', 'COUNTRY', 'WITHOUT', 'BEING', 'ANNOYED', 'BY', 'ME', 'ANNOYED'] +3331-159605-0029-724: hyp=['SO', 'WHEN', 'SHE', 'MADE', 'HER', 'LAST', 'BUOYANT', 'REMARK', 'HE', 'SAID', 'QUIETLY', 'WATCHING', 'HER', 'FACE', 'KEENLY', 'ALL', 'THE', 'WHILE', 'I', 'THOUGHT', 'SO', 'WELL', "I'M", 'GOING', 'OUT', 'OF', 'TOWN', 'ON', 'BUSINESS', 'FOR', 'SEVERAL', 'WEEKS', 'SO', 'YOU', 'CAN', 'ENJOY', 'YOUR', 'LITTLE', 'BIT', 'OF', 'COUNTRY', 'WITHOUT', 'BEING', 'ANNOYED', 'BY', 'ME', 'ANNOY', 'IT'] +3331-159605-0030-725: ref=['SHE', 'THOUGHT', 'SHE', 'HAD', 'A', 'GOOD', 'DEAL', 'OF', 'THE', 'COQUETTE', 'IN', 'HER', 'AND', 'I', 'VE', 'NO', 'DOUBT', 'THAT', 'WITH', 'TIME', 'AND', 'TRAINING', 'SHE', 'WOULD', 'HAVE', 'BECOME', 'A', 'VERY', 'DANGEROUS', 'LITTLE', 'PERSON', 'BUT', 'NOW', 'SHE', 'WAS', 'FAR', 'TOO', 'TRANSPARENT', 'AND', 'STRAIGHTFORWARD', 'BY', 'NATURE', 'EVEN', 'TO', 'TELL', 'A', 'WHITE', 'LIE', 'CLEVERLY'] +3331-159605-0030-725: hyp=['SHE', 'THOUGHT', 'SHE', 'HAD', 'A', 'GOOD', 'DEAL', 'OF', 'THE', 'COQUETTE', 'IN', 'HER', 'AND', "I'VE", 'NO', 'DOUBT', 'THAT', 'WITH', 'TIME', 'AND', 'TRAINING', 'SHE', 'WOULD', 'HAVE', 'BECOME', 'A', 'VERY', 'DANGEROUS', 'LITTLE', 'PERSON', 'BUT', 'NOW', 'SHE', 'WAS', 'FAR', 'TO', 'TRANSPARENT', 'AND', 'STRAIGHTFORWARD', 'BY', 'NATURE', 'EVEN', 'TO', 'TELL', 'A', 'WIDE', 'LIKE', 'LEVELLY'] +3331-159605-0031-726: ref=['HE', 'WAS', 'GONE', 'BEFORE', 'SHE', 'COULD', 'DO', 'ANYTHING', 'BUT', 'LOOK', 'UP', 'AT', 'HIM', 'WITH', 'A', 'REMORSEFUL', 'FACE', 'AND', 'SHE', 'WALKED', 'ON', 'FEELING', 'THAT', 'THE', 'FIRST', 'AND', 'PERHAPS', 'THE', 'ONLY', 'LOVER', 'SHE', 'WOULD', 'EVER', 'HAVE', 'HAD', 'READ', 'HIS', 'ANSWER', 'AND', 'ACCEPTED', 'IT', 'IN', 'SILENCE'] +3331-159605-0031-726: hyp=['HE', 'WAS', 'GONE', 'BEFORE', 'SHE', 'COULD', 'DO', 'ANYTHING', 'BUT', 'LOOK', 'UP', 'AT', 'HIM', 'WITH', 'A', 'REMORSEFUL', 'FACE', 'AND', 'SHE', 'WALKED', 'ON', 'FEELING', 'THAT', 'THE', 'FIRST', 'AND', 'PERHAPS', 'THE', 'ONLY', 'LOVE', 'SHE', 'WOULD', 'EVER', 'HAVE', 'HAD', 'READ', 'HIS', 'ANSWER', 'AND', 'ACCEPTED', 'IN', 'SILENCE'] +3331-159605-0032-727: ref=['POLLY', 'DID', 'NOT', 'RETURN', 'TO', 'HER', 'FAVORITE', 'WALK', 'TILL', 'SHE', 'LEARNED', 'FROM', 'MINNIE', 'THAT', 'UNCLE', 'HAD', 'REALLY', 'LEFT', 'TOWN', 'AND', 'THEN', 'SHE', 'FOUND', 'THAT', 'HIS', 'FRIENDLY', 'COMPANY', 'AND', 'CONVERSATION', 'WAS', 'WHAT', 'HAD', 'MADE', 'THE', 'WAY', 'SO', 'PLEASANT', 'AFTER', 'ALL'] +3331-159605-0032-727: hyp=['POLLY', 'DID', 'NOT', 'RETURN', 'TO', 'HER', 'FAVOURITE', 'WALK', 'TILL', 'SHE', 'LEARNED', 'FOR', 'MINNIE', 'THAT', 'UNCLE', 'HAD', 'REALLY', 'LEFT', 'TOWN', 'AND', 'THEN', 'SHE', 'FOUND', 'THAT', 'HIS', 'FRIENDLY', 'COMPANY', 'AND', 'CONVERSATION', 'WAS', 'WHAT', 'HAD', 'MADE', 'THE', 'WAY', 'SO', 'PLEASANT', 'AFTER', 'ALL'] +3331-159605-0033-728: ref=['WAGGING', 'TO', 'AND', 'FRO', 'AS', 'USUAL', "WHAT'S", 'THE', 'NEWS', 'WITH', 'YOU'] +3331-159605-0033-728: hyp=['WORKING', 'TO', 'AND', 'FRO', 'AS', 'USUAL', "WHAT'S", 'THE', 'NEWS', 'WITH', 'YOU'] +3331-159605-0034-729: ref=['PERHAPS', 'SHE', 'LL', 'JILT', 'HIM'] +3331-159605-0034-729: hyp=['PERHAPS', "SHE'LL", 'CHILLED', 'HIM'] +3331-159605-0035-730: ref=['UTTERLY', 'DONE', 'WITH', 'AND', 'LAID', 'UPON', 'THE', 'SHELF'] +3331-159605-0035-730: hyp=['UTTERLY', 'DONE', 'WITH', 'AND', 'LAID', 'UPON', 'THE', 'SHELF'] +3331-159605-0036-731: ref=['MINNIE', 'SAID', 'THE', 'OTHER', 'DAY', 'SHE', 'WISHED', 'SHE', 'WAS', 'A', 'PIGEON', 'SO', 'SHE', 'COULD', 'PADDLE', 'IN', 'THE', 'PUDDLES', 'AND', 'NOT', 'FUSS', 'ABOUT', 'RUBBERS'] +3331-159605-0036-731: hyp=['MANY', 'SAID', 'THE', 'OTHER', 'DAY', 'SHE', 'WISHED', 'SHE', 'WAS', 'A', 'PIGEON', 'SO', 'SHE', 'COULD', 'PADDLE', 'IN', 'THE', 'POTTLES', 'AND', 'NOT', 'FUSS', 'ABOUT', 'WRAPPERS'] +3331-159605-0037-732: ref=['NOW', "DON'T", 'BE', 'AFFECTED', 'POLLY', 'BUT', 'JUST', 'TELL', 'ME', 'LIKE', 'A', 'DEAR', 'HAS', "N'T", 'HE', 'PROPOSED'] +3331-159605-0037-732: hyp=['NOW', "DON'T", 'BE', 'AFFECTED', 'POLLY', 'BUT', 'JUST', 'TELL', 'ME', 'LIKE', 'A', 'DEAR', 'HAS', 'NOT', 'HE', 'PROPOSED'] +3331-159605-0038-733: ref=["DON'T", 'YOU', 'THINK', 'HE', 'MEANS', 'TO'] +3331-159605-0038-733: hyp=["DON'T", 'YOU', 'THINK', 'HE', 'MEANS', 'TO'] +3331-159605-0039-734: ref=['TRULY', 'TRULY', 'FAN'] +3331-159605-0039-734: hyp=['TRULY', 'JULIE', 'FAN'] +3331-159605-0040-735: ref=['I', "DON'T", 'MEAN', 'TO', 'BE', 'PRYING', 'BUT', 'I', 'REALLY', 'THOUGHT', 'HE', 'DID'] +3331-159605-0040-735: hyp=['I', "DON'T", 'MEAN', 'TO', 'BE', 'PRYING', 'BUT', 'I', 'REALLY', 'THOUGHT', 'HE', 'DID'] +3331-159605-0041-736: ref=['WELL', 'I', 'ALWAYS', 'MEANT', 'TO', 'TRY', 'IT', 'IF', 'I', 'GOT', 'A', 'CHANCE', 'AND', 'I', 'HAVE'] +3331-159605-0041-736: hyp=['WELL', 'I', 'ALWAYS', 'MEANT', 'TO', 'TRY', 'IT', 'IF', 'I', 'GOT', 'A', 'CHANCE', 'AND', 'I', 'HAVE'] +3331-159605-0042-737: ref=['I', 'JUST', 'GAVE', 'HIM', 'A', 'HINT', 'AND', 'HE', 'TOOK', 'IT'] +3331-159605-0042-737: hyp=['I', 'JUST', 'GAVE', 'HIM', 'A', 'HINT', 'AND', 'HE', 'TOOK', 'IT'] +3331-159605-0043-738: ref=['HE', 'MEANT', 'TO', 'GO', 'AWAY', 'BEFORE', 'THAT', 'SO', "DON'T", 'THINK', 'HIS', 'HEART', 'IS', 'BROKEN', 'OR', 'MIND', 'WHAT', 'SILLY', 'TATTLERS', 'SAY'] +3331-159605-0043-738: hyp=['HE', 'MEANT', 'TO', 'GO', 'AWAY', 'BEFORE', 'THAT', 'SO', "DON'T", 'THINK', 'HIS', 'HEART', 'IS', 'BROKEN', 'OH', 'MIND', 'WHAT', 'DITTY', 'TEDLER', 'SAY'] +3331-159605-0044-739: ref=['HE', 'UNDERSTOOD', 'AND', 'BEING', 'A', 'GENTLEMAN', 'MADE', 'NO', 'FUSS'] +3331-159605-0044-739: hyp=['HE', 'UNDERSTOOD', 'AND', 'BEING', 'A', 'GENTLEMAN', 'MADE', 'NO', 'FUSS'] +3331-159605-0045-740: ref=['BUT', 'POLLY', 'IT', 'WOULD', 'HAVE', 'BEEN', 'A', 'GRAND', 'THING', 'FOR', 'YOU'] +3331-159605-0045-740: hyp=['BUT', 'POLLY', 'IT', 'WOULD', 'HAVE', 'BEEN', 'A', 'GRAND', 'THING', 'FOR', 'YOU'] +3331-159605-0046-741: ref=['I', 'M', 'ODD', 'YOU', 'KNOW', 'AND', 'PREFER', 'TO', 'BE', 'AN', 'INDEPENDENT', 'SPINSTER', 'AND', 'TEACH', 'MUSIC', 'ALL', 'MY', 'DAYS'] +3331-159605-0046-741: hyp=["I'M", 'NOT', 'YOU', 'KNOW', 'AND', 'PREFER', 'TO', 'BE', 'AN', 'INDEPENDENT', 'SPINSTER', 'AND', 'TEACH', 'MUSIC', 'ALL', 'MY', 'DAYS'] +3331-159609-0000-742: ref=['NEVER', 'MIND', 'WHAT', 'THE', 'BUSINESS', 'WAS', 'IT', 'SUFFICES', 'TO', 'SAY', 'THAT', 'IT', 'WAS', 'A', 'GOOD', 'BEGINNING', 'FOR', 'A', 'YOUNG', 'MAN', 'LIKE', 'TOM', 'WHO', 'HAVING', 'BEEN', 'BORN', 'AND', 'BRED', 'IN', 'THE', 'MOST', 'CONSERVATIVE', 'CLASS', 'OF', 'THE', 'MOST', 'CONCEITED', 'CITY', 'IN', 'NEW', 'ENGLAND', 'NEEDED', 'JUST', 'THE', 'HEALTHY', 'HEARTY', 'SOCIAL', 'INFLUENCES', 'OF', 'THE', 'WEST', 'TO', 'WIDEN', 'HIS', 'VIEWS', 'AND', 'MAKE', 'A', 'MAN', 'OF', 'HIM'] +3331-159609-0000-742: hyp=['NEVER', 'MIND', 'WHAT', 'THE', 'BUSINESS', 'WAS', 'IT', 'SURFACES', 'TO', 'SAY', 'THAT', 'IT', 'WAS', 'A', 'GOOD', 'BEGINNING', 'FOR', 'A', 'YOUNG', 'MAN', 'LIKE', 'TOM', 'WHO', 'HAVING', 'BEEN', 'BORN', 'AND', 'BRED', 'IN', 'THE', 'MOST', 'CONSERVATIVE', 'CLASS', 'OF', 'THE', 'MOST', 'CONCEITED', 'CITY', 'IN', 'NEW', 'ENGLAND', 'NEEDED', 'JUST', 'THE', 'HEALTHY', 'HEARTY', 'SOCIAL', 'INFLUENCES', 'OF', 'THE', 'WEST', 'TO', 'WIDEN', 'HIS', 'VIEWS', 'AND', 'MAKE', 'A', 'MAN', 'OF', 'HIM'] +3331-159609-0001-743: ref=['FORTUNATELY', 'EVERY', 'ONE', 'WAS', 'SO', 'BUSY', 'WITH', 'THE', 'NECESSARY', 'PREPARATIONS', 'THAT', 'THERE', 'WAS', 'NO', 'TIME', 'FOR', 'ROMANCE', 'OF', 'ANY', 'SORT', 'AND', 'THE', 'FOUR', 'YOUNG', 'PEOPLE', 'WORKED', 'TOGETHER', 'AS', 'SOBERLY', 'AND', 'SENSIBLY', 'AS', 'IF', 'ALL', 'SORTS', 'OF', 'EMOTIONS', 'WERE', 'NOT', 'BOTTLED', 'UP', 'IN', 'THEIR', 'RESPECTIVE', 'HEARTS'] +3331-159609-0001-743: hyp=['FORTUNATELY', 'EVERY', 'ONE', 'WAS', 'SO', 'BUSY', 'WITH', 'THE', 'NECESSARY', 'PREPARATIONS', 'THAT', 'THERE', 'WAS', 'NO', 'TIME', 'FOR', 'ROMANS', 'OF', 'ANY', 'SORT', 'AND', 'THE', 'FOUR', 'YOUNG', 'PEOPLE', 'WORKED', 'TOGETHER', 'AS', 'SOBERLY', 'AND', 'SENSIBLY', 'AS', 'IF', 'ALL', 'SORTS', 'OF', 'EMOTIONS', 'WERE', 'NOT', 'BUTTERED', 'UP', 'IN', 'THEIR', 'RESPECTIVE', 'HEARTS'] +3331-159609-0002-744: ref=['PITY', 'THAT', 'THE', 'END', 'SHOULD', 'COME', 'SO', 'SOON', 'BUT', 'THE', 'HOUR', 'DID', 'ITS', 'WORK', 'AND', 'WENT', 'ITS', 'WAY', 'LEAVING', 'A', 'CLEARER', 'ATMOSPHERE', 'BEHIND', 'THOUGH', 'THE', 'YOUNG', 'FOLKS', 'DID', 'NOT', 'SEE', 'IT', 'THEN', 'FOR', 'THEIR', 'EYES', 'WERE', 'DIM', 'BECAUSE', 'OF', 'THE', 'PARTINGS', 'THAT', 'MUST', 'BE'] +3331-159609-0002-744: hyp=['PITY', 'THAT', 'THE', 'END', 'SHOULD', 'COME', 'SO', 'SOON', 'BUT', 'THE', 'HOUR', 'DID', 'ITS', 'WORK', 'AND', 'WHEN', 'ITS', 'WAY', 'LEAVING', 'A', 'CLEARER', 'ATMOSPHERE', 'BEHIND', 'THAN', 'THE', 'YOUNG', 'FOLKS', 'DID', 'NOT', 'SEE', 'IT', 'THEN', 'FOR', 'THEIR', 'EYES', 'WERE', 'DIM', 'BECAUSE', 'OF', 'THE', 'PARTINGS', 'THAT', 'MUST', 'BE'] +3331-159609-0003-745: ref=['IF', 'IT', 'HAD', 'NOT', 'BEEN', 'FOR', 'TWO', 'THINGS', 'I', 'FEAR', 'SHE', 'NEVER', 'WOULD', 'HAVE', 'STOOD', 'A', 'SUMMER', 'IN', 'TOWN', 'BUT', 'SYDNEY', 'OFTEN', 'CALLED', 'TILL', 'HIS', 'VACATION', 'CAME', 'AND', 'A', 'VOLUMINOUS', 'CORRESPONDENCE', 'WITH', 'POLLY', 'BEGUILED', 'THE', 'LONG', 'DAYS'] +3331-159609-0003-745: hyp=['IF', 'IT', 'HAD', 'NOT', 'BEEN', 'FOR', 'TWO', 'THINGS', 'I', 'FEAR', 'SHE', 'NEVER', 'WOULD', 'HAVE', 'STOOD', 'A', 'SUMMER', 'IN', 'TOWN', 'BUT', 'SYDNEY', 'OFTEN', 'CALLED', 'TO', 'HIS', 'VACATION', 'CAME', 'AND', 'A', 'VOLUMINOUS', 'CORRESPONDENCE', 'WITH', 'POLLY', 'BEGUILED', 'THE', 'LONG', 'DAYS'] +3331-159609-0004-746: ref=['TOM', 'WROTE', 'ONCE', 'A', 'WEEK', 'TO', 'HIS', 'MOTHER', 'BUT', 'THE', 'LETTERS', 'WERE', 'SHORT', 'AND', 'NOT', 'VERY', 'SATISFACTORY', 'FOR', 'MEN', 'NEVER', 'DO', 'TELL', 'THE', 'INTERESTING', 'LITTLE', 'THINGS', 'THAT', 'WOMEN', 'BEST', 'LIKE', 'TO', 'HEAR'] +3331-159609-0004-746: hyp=['TUMBLED', 'ONES', 'A', 'WEEK', 'TO', 'HIS', 'MOTHER', 'BUT', 'THEY', 'LET', 'US', 'WERE', 'SHORT', 'AND', 'NOT', 'VERY', 'SATISFACTORY', 'FOR', 'MEN', 'NEVER', 'DO', 'TELL', 'THE', 'INTERESTING', 'LITTLE', 'THINGS', 'THAT', 'WOMEN', 'BEST', 'LIKE', 'TO', 'HEAR'] +3331-159609-0005-747: ref=['NO', 'I', 'M', 'ONLY', 'TIRED', 'HAD', 'A', 'GOOD', 'DEAL', 'TO', 'DO', 'LATELY', 'AND', 'THE', 'DULL', 'WEATHER', 'MAKES', 'ME', 'JUST', 'A', 'TRIFLE', 'BLUE'] +3331-159609-0005-747: hyp=['NOW', 'I', 'AM', 'ONLY', 'TIRED', 'HAD', 'A', 'GOOD', 'DEAL', 'TO', 'DO', 'LATELY', 'AND', 'THE', 'DOLL', 'WEATHER', 'MAKES', 'ME', 'JUST', 'A', 'TRAVEL', 'BLUE'] +3331-159609-0006-748: ref=['FORGIVE', 'ME', 'POLLY', 'BUT', 'I', "CAN'T", 'HELP', 'SAYING', 'IT', 'FOR', 'IT', 'IS', 'THERE', 'AND', 'I', 'WANT', 'TO', 'BE', 'AS', 'TRUE', 'TO', 'YOU', 'AS', 'YOU', 'WERE', 'TO', 'ME', 'IF', 'I', 'CAN'] +3331-159609-0006-748: hyp=['FORGIVE', 'ME', 'POLLY', 'BUT', 'I', "CAN'T", 'HELP', 'SAYING', 'IT', 'FOR', 'THIS', 'THERE', 'AND', 'I', 'WANT', 'TO', 'BE', 'AS', 'TRUE', 'TO', 'YOU', 'AS', 'YOU', 'WERE', 'TO', 'ME', 'IF', 'I', 'CAN'] +3331-159609-0007-749: ref=['I', 'TRY', 'NOT', 'TO', 'DECEIVE', 'MYSELF', 'BUT', 'IT', 'DOES', 'SEEM', 'AS', 'IF', 'THERE', 'WAS', 'A', 'CHANCE', 'OF', 'HAPPINESS', 'FOR', 'ME'] +3331-159609-0007-749: hyp=['I', 'TRIED', 'NOT', 'TO', 'DECEIVE', 'MYSELF', 'BUT', 'IT', 'DOES', 'SEEM', 'AS', 'IF', 'THERE', 'WAS', 'A', 'CHANCE', 'OF', 'HAPPINESS', 'FOR', 'ME'] +3331-159609-0008-750: ref=['THANK', 'HEAVEN', 'FOR', 'THAT'] +3331-159609-0008-750: hyp=['THANK', 'HEAVEN', 'FOR', 'THAT'] +3331-159609-0009-751: ref=['CRIED', 'POLLY', 'WITH', 'THE', 'HEARTIEST', 'SATISFACTION', 'IN', 'HER', 'VOICE'] +3331-159609-0009-751: hyp=['CRIED', 'POLLY', 'WITH', 'THE', 'HEARTIEST', 'SATISFACTION', 'IN', 'HER', 'VOICE'] +3331-159609-0010-752: ref=['POOR', 'POLLY', 'WAS', 'SO', 'TAKEN', 'BY', 'SURPRISE', 'THAT', 'SHE', 'HAD', 'NOT', 'A', 'WORD', 'TO', 'SAY'] +3331-159609-0010-752: hyp=['POOR', 'POLLY', 'WAS', 'SO', 'TAKEN', 'BY', 'SURPRISE', 'THAT', 'SHE', 'HAD', 'NOT', 'A', 'WORD', 'TO', 'SAY'] +3331-159609-0011-753: ref=['NONE', 'WERE', 'NEEDED', 'HER', 'TELLTALE', 'FACE', 'ANSWERED', 'FOR', 'HER', 'AS', 'WELL', 'AS', 'THE', 'IMPULSE', 'WHICH', 'MADE', 'HER', 'HIDE', 'HER', 'HEAD', 'IN', 'THE', 'SOFA', 'CUSHION', 'LIKE', 'A', 'FOOLISH', 'OSTRICH', 'WHEN', 'THE', 'HUNTERS', 'ARE', 'AFTER', 'IT'] +3331-159609-0011-753: hyp=['NONE', 'WERE', 'NEEDED', 'HER', 'TELLS', 'HER', 'FACE', 'ANSWERED', 'FOR', 'HER', 'AS', 'WELL', 'AS', 'THE', 'IMPULSE', 'WHICH', 'MADE', 'HER', 'HIDE', 'HER', 'HEAD', 'IN', 'THE', 'SILVER', 'CUSHION', 'LIKE', 'A', 'FOOLISH', 'OSTRICH', 'AND', 'THE', 'HANDLES', 'ARE', 'AFTER', 'IT'] +3331-159609-0012-754: ref=['ONCE', 'OR', 'TWICE', 'BUT', 'SORT', 'OF', 'JOKINGLY', 'AND', 'I', 'THOUGHT', 'IT', 'WAS', 'ONLY', 'SOME', 'LITTLE', 'FLIRTATION'] +3331-159609-0012-754: hyp=['ONCE', 'OR', 'TWICE', 'THAT', 'SORT', 'OF', 'CHOKINGLY', 'AND', 'I', 'THOUGHT', 'IT', 'WAS', 'ONLY', 'SOME', 'LITTLE', 'FLIRTATION'] +3331-159609-0013-755: ref=['IT', 'WAS', 'SO', 'STUPID', 'OF', 'ME', 'NOT', 'TO', 'GUESS', 'BEFORE'] +3331-159609-0013-755: hyp=['IT', 'WAS', 'SO', 'STUPID', 'OF', 'ME', 'NOT', 'TO', 'GUESS', 'BEFORE'] +3331-159609-0014-756: ref=['IT', 'WAS', 'SO', 'TENDER', 'EARNEST', 'AND', 'DEFIANT', 'THAT', 'FANNY', 'FORGOT', 'THE', 'DEFENCE', 'OF', 'HER', 'OWN', 'LOVER', 'IN', 'ADMIRATION', 'OF', "POLLY'S", 'LOYALTY', 'TO', 'HERS', 'FOR', 'THIS', 'FAITHFUL', 'ALL', 'ABSORBING', 'LOVE', 'WAS', 'A', 'NEW', 'REVELATION', 'TO', 'FANNY', 'WHO', 'WAS', 'USED', 'TO', 'HEARING', 'HER', 'FRIENDS', 'BOAST', 'OF', 'TWO', 'OR', 'THREE', 'LOVERS', 'A', 'YEAR', 'AND', 'CALCULATE', 'THEIR', 'RESPECTIVE', 'VALUES', 'WITH', 'ALMOST', 'AS', 'MUCH', 'COOLNESS', 'AS', 'THE', 'YOUNG', 'MEN', 'DISCUSSED', 'THE', 'FORTUNES', 'OF', 'THE', 'GIRLS', 'THEY', 'WISHED', 'FOR', 'BUT', 'COULD', 'NOT', 'AFFORD', 'TO', 'MARRY'] +3331-159609-0014-756: hyp=['IT', 'WAS', 'SO', 'TENDER', 'EARNEST', 'AND', 'DEFIANT', 'THAT', 'FANNY', 'FORGOT', 'THE', 'DEFENCE', 'OF', 'HER', 'OWN', 'LOVER', 'AND', 'ADMIRATION', 'OF', "POLLY'S", 'LOYALTY', 'TO', 'HERS', 'FOR', 'THIS', 'FAITHFUL', 'ALL', 'ABSORBING', 'LOVE', 'WAS', 'A', 'NEWER', 'RELATION', 'TO', 'FANNY', 'WHO', 'WAS', 'USED', 'TO', 'HEARING', 'HER', 'FRIENDS', 'BOAST', 'OF', 'TWO', 'OR', 'THREE', 'LOVERS', 'A', 'YEAR', 'AND', 'CALCULATE', 'THEIR', 'RESPECTIVE', 'VALUES', 'WITH', 'ALMOST', 'AS', 'MUCH', 'COOLNESS', 'AS', 'THE', 'YOUNG', 'MEN', 'DISCUSSED', 'THE', 'FORTUNES', 'OF', 'THE', 'GIRLS', 'THEY', 'WISHED', 'FOR', 'BUT', 'COULD', 'NOT', 'AFFORD', 'TO', 'MARRY'] +3331-159609-0015-757: ref=['I', 'HOPE', 'MARIA', 'BAILEY', 'IS', 'ALL', 'HE', 'THINKS', 'HER', 'SHE', 'ADDED', 'SOFTLY', 'FOR', 'I', 'COULD', "N'T", 'BEAR', 'TO', 'HAVE', 'HIM', 'DISAPPOINTED', 'AGAIN'] +3331-159609-0015-757: hyp=['I', 'HOPE', 'MARIA', "BAILEY'S", 'ONLY', 'THINKS', 'HER', 'SHE', 'ADDED', 'SOFTLY', 'FOR', 'I', 'COULD', 'NOT', 'BEAR', 'TO', 'HAVE', 'HIM', 'DISAPPOINTED', 'AGAIN'] +3331-159609-0016-758: ref=['SAID', 'FANNY', 'TURNING', 'HOPEFUL', 'ALL', 'AT', 'ONCE'] +3331-159609-0016-758: hyp=['SAID', 'FANNY', 'TURNING', 'HOPEFUL', 'ALL', 'AT', 'ONCE'] +3331-159609-0017-759: ref=['SUPPOSE', 'I', 'SAY', 'A', 'WORD', 'TO', 'TOM', 'JUST', 'INQUIRE', 'AFTER', 'HIS', 'HEART', 'IN', 'A', 'GENERAL', 'WAY', 'YOU', 'KNOW', 'AND', 'GIVE', 'HIM', 'A', 'CHANCE', 'TO', 'TELL', 'ME', 'IF', 'THERE', 'IS', 'ANYTHING', 'TO', 'TELL'] +3331-159609-0017-759: hyp=['SUPPOSE', 'HER', 'SAY', 'A', 'WORD', 'TO', 'TOM', 'JUST', 'INQUIRE', 'AFTER', 'HIS', 'HEART', 'IN', 'A', 'GENERAL', 'WAY', 'YOU', 'KNOW', 'AND', 'GIVE', 'HIM', 'A', 'CHANCE', 'TO', 'TELL', 'ME', 'IF', "THERE'S", 'ANYTHING', 'TO', 'TELL'] +3331-159609-0018-760: ref=['BEAR', 'IT', 'PEOPLE', 'ALWAYS', 'DO', 'BEAR', 'THINGS', 'SOMEHOW', 'ANSWERED', 'POLLY', 'LOOKING', 'AS', 'IF', 'SENTENCE', 'HAD', 'BEEN', 'PASSED', 'UPON', 'HER'] +3331-159609-0018-760: hyp=['BEAR', 'IT', 'PEOPLE', 'ALWAYS', 'DO', 'BEAR', 'THINGS', 'SOMEHOW', 'ANSWERED', 'POLLY', 'LOOKING', 'AS', 'IF', 'SENTENCE', 'HAD', 'BEEN', 'PASSED', 'UPON', 'HER'] +3331-159609-0019-761: ref=['IT', 'WAS', 'A', 'VERY', 'DIFFERENT', 'WINTER', 'FROM', 'THE', 'LAST', 'FOR', 'BOTH', 'THE', 'GIRLS'] +3331-159609-0019-761: hyp=['IT', 'WAS', 'VERY', 'DIFFERENT', 'WINDOW', 'FROM', 'THE', 'LAST', 'ABOVE', 'THE', 'GIRL'] +3331-159609-0020-762: ref=['IF', 'FANNY', 'WANTED', 'TO', 'SHOW', 'HIM', 'WHAT', 'SHE', 'COULD', 'DO', 'TOWARD', 'MAKING', 'A', 'PLEASANT', 'HOME', 'SHE', 'CERTAINLY', 'SUCCEEDED', 'BETTER', 'THAN', 'SHE', 'SUSPECTED', 'FOR', 'IN', 'SPITE', 'OF', 'MANY', 'FAILURES', 'AND', 'DISCOURAGEMENTS', 'BEHIND', 'THE', 'SCENES', 'THE', 'LITTLE', 'HOUSE', 'BECAME', 'A', 'MOST', 'ATTRACTIVE', 'PLACE', 'TO', 'MISTER', 'SYDNEY', 'AT', 'LEAST', 'FOR', 'HE', 'WAS', 'MORE', 'THE', 'HOUSE', 'FRIEND', 'THAN', 'EVER', 'AND', 'SEEMED', 'DETERMINED', 'TO', 'PROVE', 'THAT', 'CHANGE', 'OF', 'FORTUNE', 'MADE', 'NO', 'DIFFERENCE', 'TO', 'HIM'] +3331-159609-0020-762: hyp=['IF', 'ANY', 'WANTED', 'TO', 'SHOW', 'HIM', 'WHAT', 'SHE', 'COULD', 'DO', 'TOWARD', 'MAKING', 'A', 'PLEASANT', 'HOME', 'SHE', 'CERTAINLY', 'SUCCEEDED', 'BY', 'THEN', 'SHE', 'SUSPECTED', 'FOR', 'IN', 'SPITE', 'OF', 'MANY', 'FAILURES', 'AND', 'DISCOURAGEMENTS', 'BEHIND', 'THE', 'SCENES', 'THE', 'LITTLE', 'HOUSE', 'BECAME', 'A', 'MOST', 'ATTRACTIVE', 'PLACE', 'TO', 'MISTER', 'SIDNEY', 'AT', 'LEAST', 'FOR', 'HE', 'WAS', 'MORE', 'THE', 'HOUSE', 'FRIEND', 'THAN', 'EVER', 'AND', 'SEEMED', 'DETERMINED', 'TO', 'PROVE', 'THAT', 'CHANGE', 'OF', 'FORTUNE', 'MADE', 'NO', 'DIFFERENCE', 'TO', 'HIM'] +3331-159609-0021-763: ref=['SHE', 'KEPT', 'MUCH', 'AT', 'HOME', 'WHEN', 'THE', "DAY'S", 'WORK', 'WAS', 'DONE', 'FINDING', 'IT', 'PLEASANTER', 'TO', 'SIT', 'DREAMING', 'OVER', 'BOOK', 'OR', 'SEWING', 'ALONE', 'THAN', 'TO', 'EXERT', 'HERSELF', 'EVEN', 'TO', 'GO', 'TO', 'THE', 'SHAWS'] +3331-159609-0021-763: hyp=['SHE', 'KEPT', 'MUCH', 'AT', 'HOME', 'IN', 'THE', "DAY'S", 'WORK', 'WAS', 'DONE', 'FINDING', 'IT', 'PLEASANTER', 'TO', 'SIT', 'DREAMING', 'OF', 'A', 'BOOK', 'OR', 'SOON', 'ALONE', 'THAN', 'TO', 'EXERT', 'HERSELF', 'EVEN', 'TO', 'GO', 'TO', 'THE', 'SHORES'] +3331-159609-0022-764: ref=['POLLY', 'WAS', 'NOT', 'AT', 'ALL', 'LIKE', 'HERSELF', 'THAT', 'WINTER', 'AND', 'THOSE', 'NEAREST', 'TO', 'HER', 'SAW', 'AND', 'WONDERED', 'AT', 'IT', 'MOST'] +3331-159609-0022-764: hyp=['POLLY', 'WAS', 'NOT', 'AT', 'ALL', 'LIKE', 'HERSELF', 'THAT', 'WINDOW', 'AND', 'THOSE', 'NEAREST', 'TO', 'HER', 'SAW', 'ENVANTED', 'AT', 'IT', 'MOST'] +3331-159609-0023-765: ref=['FOR', 'NED', 'WAS', 'SO', 'ABSORBED', 'IN', 'BUSINESS', 'THAT', 'HE', 'IGNORED', 'THE', 'WHOLE', 'BAILEY', 'QUESTION', 'AND', 'LEFT', 'THEM', 'IN', 'UTTER', 'DARKNESS'] +3331-159609-0023-765: hyp=['FOR', 'NED', 'WAS', 'SO', 'ABSORBED', 'IN', 'BUSINESS', 'THAT', 'HE', 'IGNORED', 'THE', 'WHOLE', 'BAILIQUE', 'QUESTION', 'AND', 'LEFT', 'THEM', 'IN', 'OTHER', 'DARKNESS'] +3331-159609-0024-766: ref=['FANNY', 'CAME', 'WALKING', 'IN', 'UPON', 'HER', 'ONE', 'DAY', 'LOOKING', 'AS', 'IF', 'SHE', 'BROUGHT', 'TIDINGS', 'OF', 'SUCH', 'GREAT', 'JOY', 'THAT', 'SHE', 'HARDLY', 'KNEW', 'HOW', 'TO', 'TELL', 'THEM'] +3331-159609-0024-766: hyp=['WHEN', 'HE', 'CAME', 'WALKING', 'IN', 'UPON', 'HER', 'ONE', 'DAY', 'LOOKING', 'AS', 'IF', 'SHE', 'POURED', 'HIDINGS', 'OF', 'SUCH', 'GREAT', 'JOY', 'THAT', 'SHE', 'HARDLY', 'KNEW', 'HOW', 'TO', 'TELL', 'THEM'] +3331-159609-0025-767: ref=['BUT', 'IF', 'WORK', 'BASKETS', 'WERE', 'GIFTED', 'WITH', 'POWERS', 'OF', 'SPEECH', 'THEY', 'COULD', 'TELL', 'STORIES', 'MORE', 'TRUE', 'AND', 'TENDER', 'THAN', 'ANY', 'WE', 'READ'] +3331-159609-0025-767: hyp=['BUT', 'IF', 'WORK', 'BASKETS', 'WERE', 'GIFTED', 'WITH', 'POWERS', 'OF', 'SPEECH', 'THEY', 'COULD', 'TELL', 'STORIES', 'MORE', 'TRUE', 'AND', 'TENDER', 'THAN', 'ANY', 'WE', 'READ'] +3528-168656-0000-864: ref=['SHE', 'HAD', 'EVEN', 'BEEN', 'IN', 'SOCIETY', 'BEFORE', 'THE', 'REVOLUTION'] +3528-168656-0000-864: hyp=['SHE', 'HAD', 'EVEN', 'BEEN', 'IN', 'SOCIETY', 'BEFORE', 'THE', 'REVOLUTION'] +3528-168656-0001-865: ref=['IT', 'WAS', 'HER', 'PLEASURE', 'AND', 'HER', 'VANITY', 'TO', 'DRAG', 'IN', 'THESE', 'NAMES', 'ON', 'EVERY', 'PRETEXT'] +3528-168656-0001-865: hyp=['IT', 'WAS', 'HER', 'PLEASURE', 'AND', 'HER', 'VANITY', 'TO', 'DRAG', 'IN', 'THESE', 'NAMES', 'ON', 'EVERY', 'PRETEXT'] +3528-168656-0002-866: ref=['EVERY', 'YEAR', 'SHE', 'SOLEMNLY', 'RENEWED', 'HER', 'VOWS', 'AND', 'AT', 'THE', 'MOMENT', 'OF', 'TAKING', 'THE', 'OATH', 'SHE', 'SAID', 'TO', 'THE', 'PRIEST', 'MONSEIGNEUR', 'SAINT', 'FRANCOIS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'EUSEBIUS', 'MONSEIGNEUR', 'SAINT', 'EUSEBIUS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'PROCOPIUS', 'ET', 'CETERA', 'ET', 'CETERA'] +3528-168656-0002-866: hyp=['EVERY', 'YEAR', 'SHE', 'SOLEMNLY', 'RENEWED', 'HER', 'VOWS', 'AND', 'AT', 'THE', 'MOMENT', 'OF', 'TAKING', 'THE', 'OATH', 'SHE', 'SAID', 'TO', 'THE', 'PRIEST', 'MONSEIGNEUR', 'SAINT', 'FROIS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'JULIAN', 'MONSEIGNEUR', 'SAINT', 'JULIAN', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'EUSIDIUS', 'MONSIEUR', 'SAINT', 'USUVIUS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'PROCOPIUS', 'ET', 'CETERA', 'ET', 'CETERA'] +3528-168656-0003-867: ref=['AND', 'THE', 'SCHOOL', 'GIRLS', 'WOULD', 'BEGIN', 'TO', 'LAUGH', 'NOT', 'IN', 'THEIR', 'SLEEVES', 'BUT', 'UNDER', 'THEIR', 'VEILS', 'CHARMING', 'LITTLE', 'STIFLED', 'LAUGHS', 'WHICH', 'MADE', 'THE', 'VOCAL', 'MOTHERS', 'FROWN'] +3528-168656-0003-867: hyp=['AND', 'THE', 'SCHOOLGIRLS', 'WOULD', 'BEGIN', 'TO', 'LAUGH', 'NOT', 'IN', 'THEIR', 'SLEEVES', 'BUT', 'UNDER', 'THE', 'VEILS', 'CHARMING', 'LITTLE', 'STIFLED', 'LAUGHS', 'WHICH', 'MADE', 'THE', 'VOCAL', 'MOTHERS', 'FROWN'] +3528-168656-0004-868: ref=['IT', 'WAS', 'A', 'CENTURY', 'WHICH', 'SPOKE', 'THROUGH', 'HER', 'BUT', 'IT', 'WAS', 'THE', 'EIGHTEENTH', 'CENTURY'] +3528-168656-0004-868: hyp=['IT', 'WAS', 'A', 'CENTURY', 'WHICH', 'SPOKE', 'THROUGH', 'HER', 'BUT', 'IT', 'WAS', 'THE', 'EIGHTEENTH', 'CENTURY'] +3528-168656-0005-869: ref=['THE', 'RULE', 'OF', 'FONTEVRAULT', 'DID', 'NOT', 'FORBID', 'THIS'] +3528-168656-0005-869: hyp=['THE', 'RULE', 'OF', 'FONTREVAL', 'DID', 'NOT', 'FORBID', 'THIS'] +3528-168656-0006-870: ref=['SHE', 'WOULD', 'NOT', 'SHOW', 'THIS', 'OBJECT', 'TO', 'ANYONE'] +3528-168656-0006-870: hyp=['SHE', 'WOULD', 'NOT', 'SHOW', 'THE', 'SUBJECT', 'TO', 'ANY', 'ONE'] +3528-168656-0007-871: ref=['THUS', 'IT', 'FURNISHED', 'A', 'SUBJECT', 'OF', 'COMMENT', 'FOR', 'ALL', 'THOSE', 'WHO', 'WERE', 'UNOCCUPIED', 'OR', 'BORED', 'IN', 'THE', 'CONVENT'] +3528-168656-0007-871: hyp=['THUS', 'IT', 'FURNISHED', 'A', 'SUBJECT', 'OF', 'COMMENT', 'FOR', 'ALL', 'THOSE', 'WHO', 'WERE', 'ON', 'OCCUPIED', 'OR', 'BORED', 'IN', 'THE', 'CONVENT'] +3528-168656-0008-872: ref=['SOME', 'UNIQUE', 'CHAPLET', 'SOME', 'AUTHENTIC', 'RELIC'] +3528-168656-0008-872: hyp=['SOME', 'EUIK', 'CHAPLET', 'SOME', 'AUTHENTIC', 'RELIC'] +3528-168656-0009-873: ref=['THEY', 'LOST', 'THEMSELVES', 'IN', 'CONJECTURES'] +3528-168656-0009-873: hyp=['THEY', 'LOST', 'THEMSELVES', 'IN', 'CONJECTURES'] +3528-168656-0010-874: ref=['WHEN', 'THE', 'POOR', 'OLD', 'WOMAN', 'DIED', 'THEY', 'RUSHED', 'TO', 'HER', 'CUPBOARD', 'MORE', 'HASTILY', 'THAN', 'WAS', 'FITTING', 'PERHAPS', 'AND', 'OPENED', 'IT'] +3528-168656-0010-874: hyp=['WHEN', 'THE', 'POOR', 'OLD', 'WOMAN', 'DIED', 'THEY', 'RUSHED', 'TO', 'HER', 'CUPBOARD', 'MORE', 'HASTILY', 'THAN', 'WAS', 'FITTING', 'PERHAPS', 'AND', 'OPENED', 'IT'] +3528-168656-0011-875: ref=['HE', 'IS', 'RESISTING', 'FLUTTERING', 'HIS', 'TINY', 'WINGS', 'AND', 'STILL', 'MAKING', 'AN', 'EFFORT', 'TO', 'FLY', 'BUT', 'THE', 'DANCER', 'IS', 'LAUGHING', 'WITH', 'A', 'SATANICAL', 'AIR'] +3528-168656-0011-875: hyp=['HE', 'IS', 'RESISTING', 'FLUTTERING', 'HIS', 'TINY', 'WINGS', 'AND', 'STILL', 'MAKING', 'AN', 'EFFORT', 'TO', 'FLY', 'BUT', 'THE', 'DANCERS', 'LAUGHING', 'WITH', 'A', 'SATANICAL', 'AIR'] +3528-168656-0012-876: ref=['MORAL', 'LOVE', 'CONQUERED', 'BY', 'THE', 'COLIC'] +3528-168656-0012-876: hyp=['MORAL', 'LOVE', 'CONQUERED', 'BY', 'THE', 'COLIC'] +3528-168669-0000-877: ref=['THE', 'PRIORESS', 'RETURNED', 'AND', 'SEATED', 'HERSELF', 'ONCE', 'MORE', 'ON', 'HER', 'CHAIR'] +3528-168669-0000-877: hyp=['THE', 'PIRAS', 'RETURNED', 'AND', 'SEATED', 'HERSELF', 'ONCE', 'MORE', 'ON', 'HER', 'CHAIR'] +3528-168669-0001-878: ref=['WE', 'WILL', 'PRESENT', 'A', 'STENOGRAPHIC', 'REPORT', 'OF', 'THE', 'DIALOGUE', 'WHICH', 'THEN', 'ENSUED', 'TO', 'THE', 'BEST', 'OF', 'OUR', 'ABILITY'] +3528-168669-0001-878: hyp=['WE', 'WILL', 'PRESENT', 'A', 'STENOGRAPHIC', 'REPORT', 'OF', 'THE', 'DIALOGUE', 'WHICH', 'THEN', 'ENSUED', 'TO', 'THE', 'BEST', 'OF', 'OUR', 'ABILITY'] +3528-168669-0002-879: ref=['FATHER', 'FAUVENT'] +3528-168669-0002-879: hyp=['FATHER', 'VUENT'] +3528-168669-0003-880: ref=['REVEREND', 'MOTHER', 'DO', 'YOU', 'KNOW', 'THE', 'CHAPEL'] +3528-168669-0003-880: hyp=['REVEREND', 'MOTHER', 'DO', 'YOU', 'KNOW', 'THE', 'CHAPEL'] +3528-168669-0004-881: ref=['AND', 'YOU', 'HAVE', 'BEEN', 'IN', 'THE', 'CHOIR', 'IN', 'PURSUANCE', 'OF', 'YOUR', 'DUTIES', 'TWO', 'OR', 'THREE', 'TIMES'] +3528-168669-0004-881: hyp=['AND', 'YOU', 'HAVE', 'BEEN', 'IN', 'THE', 'CHOIR', 'IN', 'PURSUANCE', 'OF', 'YOUR', 'DUTIES', 'TWO', 'OR', 'THREE', 'TIMES'] +3528-168669-0005-882: ref=['THERE', 'IS', 'A', 'STONE', 'TO', 'BE', 'RAISED', 'HEAVY'] +3528-168669-0005-882: hyp=['THERE', 'IS', 'A', 'STONE', 'TO', 'BE', 'RAISED', 'HEAVY'] +3528-168669-0006-883: ref=['THE', 'SLAB', 'OF', 'THE', 'PAVEMENT', 'WHICH', 'IS', 'AT', 'THE', 'SIDE', 'OF', 'THE', 'ALTAR'] +3528-168669-0006-883: hyp=['THE', 'SLAB', 'OF', 'THE', 'PAVEMENT', 'WHICH', 'IS', 'AT', 'THE', 'THOUGHT', 'OF', 'THE', 'ALTAR'] +3528-168669-0007-884: ref=['THE', 'SLAB', 'WHICH', 'CLOSES', 'THE', 'VAULT', 'YES'] +3528-168669-0007-884: hyp=['THE', 'FLAP', 'WHICH', 'CLOSES', 'THE', 'VAULT', 'YES'] +3528-168669-0008-885: ref=['IT', 'WOULD', 'BE', 'A', 'GOOD', 'THING', 'TO', 'HAVE', 'TWO', 'MEN', 'FOR', 'IT'] +3528-168669-0008-885: hyp=['IT', 'WOULD', 'BE', 'A', 'GOOD', 'THING', 'TO', 'HAVE', 'TWO', 'MEN', 'FOR', 'IT'] +3528-168669-0009-886: ref=['A', 'WOMAN', 'IS', 'NEVER', 'A', 'MAN'] +3528-168669-0009-886: hyp=['A', 'WOMAN', 'IS', 'NEVER', 'A', 'MAN'] +3528-168669-0010-887: ref=['BECAUSE', 'DOM', 'MABILLON', 'GIVES', 'FOUR', 'HUNDRED', 'AND', 'SEVENTEEN', 'EPISTLES', 'OF', 'SAINT', 'BERNARD', 'WHILE', 'MERLONUS', 'HORSTIUS', 'ONLY', 'GIVES', 'THREE', 'HUNDRED', 'AND', 'SIXTY', 'SEVEN', 'I', 'DO', 'NOT', 'DESPISE', 'MERLONUS', 'HORSTIUS', 'NEITHER', 'DO', 'I'] +3528-168669-0010-887: hyp=['BECAUSE', 'DON', 'MARVALAN', 'GIVES', 'FOUR', 'HUNDRED', 'AND', 'SEVENTEEN', 'EPISTLES', 'OF', 'SAINT', 'BERNARD', 'WHILE', 'MERLINUS', 'HORSES', 'ONLY', 'GIVES', 'THREE', 'HUNDRED', 'AND', 'SIXTY', 'SEVEN', 'I', 'DO', 'NOT', 'DESPISE', "MERLINA'S", 'HORSES', 'NEITHER', 'DO', 'I'] +3528-168669-0011-888: ref=['MERIT', 'CONSISTS', 'IN', 'WORKING', 'ACCORDING', 'TO', "ONE'S", 'STRENGTH', 'A', 'CLOISTER', 'IS', 'NOT', 'A', 'DOCK', 'YARD'] +3528-168669-0011-888: hyp=['MARRIAGE', 'CONSISTS', 'IN', 'WORKING', 'ACCORDING', 'TO', "ONE'S", 'STRENGTH', 'A', 'CLOISTER', 'IS', 'NOT', 'A', 'DOCKYARD'] +3528-168669-0012-889: ref=['AND', 'A', 'WOMAN', 'IS', 'NOT', 'A', 'MAN', 'BUT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'ONE', 'THOUGH'] +3528-168669-0012-889: hyp=['AND', 'A', 'WOMAN', 'IS', 'NOT', 'A', 'MAN', 'BUT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'ONE', 'THOUGH'] +3528-168669-0013-890: ref=['AND', 'CAN', 'YOU', 'GET', 'A', 'LEVER'] +3528-168669-0013-890: hyp=['AND', 'CAN', 'YOU', 'GET', 'A', 'LOVER'] +3528-168669-0014-891: ref=['THERE', 'IS', 'A', 'RING', 'IN', 'THE', 'STONE'] +3528-168669-0014-891: hyp=['THERE', 'IS', 'A', 'RING', 'IN', 'THE', 'STONE'] +3528-168669-0015-892: ref=['I', 'WILL', 'PUT', 'THE', 'LEVER', 'THROUGH', 'IT'] +3528-168669-0015-892: hyp=['I', 'WILL', 'PUT', 'THE', 'LEVER', 'THROUGH', 'IT'] +3528-168669-0016-893: ref=['THAT', 'IS', 'GOOD', 'REVEREND', 'MOTHER', 'I', 'WILL', 'OPEN', 'THE', 'VAULT'] +3528-168669-0016-893: hyp=['THAT', 'IS', 'GOOD', 'REVEREND', 'MOTHER', 'I', 'WILL', 'OPEN', 'THE', 'VAULT'] +3528-168669-0017-894: ref=['WILL', 'THAT', 'BE', 'ALL', 'NO'] +3528-168669-0017-894: hyp=['WILL', 'THAT', 'BE', 'ALL', 'NO'] +3528-168669-0018-895: ref=['GIVE', 'ME', 'YOUR', 'ORDERS', 'VERY', 'REVEREND', 'MOTHER'] +3528-168669-0018-895: hyp=['GIVE', 'ME', 'YOUR', 'ORDERS', 'VERY', 'REVEREND', 'MOTHER'] +3528-168669-0019-896: ref=['FAUVENT', 'WE', 'HAVE', 'CONFIDENCE', 'IN', 'YOU'] +3528-168669-0019-896: hyp=['FOR', 'THAT', 'WE', 'HAVE', 'CONFIDENCE', 'IN', 'YOU'] +3528-168669-0020-897: ref=['I', 'AM', 'HERE', 'TO', 'DO', 'ANYTHING', 'YOU', 'WISH'] +3528-168669-0020-897: hyp=['I', 'AM', 'HERE', 'TO', 'DO', 'ANYTHING', 'YOU', 'WISH'] +3528-168669-0021-898: ref=['AND', 'TO', 'HOLD', 'YOUR', 'PEACE', 'ABOUT', 'EVERYTHING', 'YES', 'REVEREND', 'MOTHER'] +3528-168669-0021-898: hyp=['AND', 'TO', 'HOLD', 'YOUR', 'PEACE', 'ABOUT', 'EVERYTHING', 'YES', 'ROBIN', 'MOTHER'] +3528-168669-0022-899: ref=['WHEN', 'THE', 'VAULT', 'IS', 'OPEN', 'I', 'WILL', 'CLOSE', 'IT', 'AGAIN'] +3528-168669-0022-899: hyp=['WHEN', 'THE', 'WALL', 'IS', 'OPEN', 'I', 'WILL', 'CLOSE', 'IT', 'AGAIN'] +3528-168669-0023-900: ref=['BUT', 'BEFORE', 'THAT', 'WHAT', 'REVEREND', 'MOTHER'] +3528-168669-0023-900: hyp=['BUT', 'BEFORE', 'THAT', 'WHAT', 'REVEREND', 'MOTHER'] +3528-168669-0024-901: ref=['FATHER', 'FAUVENT', 'REVEREND', 'MOTHER'] +3528-168669-0024-901: hyp=['FATHER', 'FERVENT', 'REVEREND', 'MOTHER'] +3528-168669-0025-902: ref=['YOU', 'KNOW', 'THAT', 'A', 'MOTHER', 'DIED', 'THIS', 'MORNING'] +3528-168669-0025-902: hyp=['YOU', 'KNOW', 'THAT', 'A', 'MOTHER', 'DIED', 'THIS', 'MORNING'] +3528-168669-0026-903: ref=['NO', 'DID', 'YOU', 'NOT', 'HEAR', 'THE', 'BELL'] +3528-168669-0026-903: hyp=['NO', 'DID', 'YOU', 'NOT', 'HEAR', 'THE', 'BELL'] +3528-168669-0027-904: ref=['NOTHING', 'CAN', 'BE', 'HEARD', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN', 'REALLY'] +3528-168669-0027-904: hyp=['NOTHING', 'CAN', 'BE', 'HEARD', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN', 'REALLY'] +3528-168669-0028-905: ref=['AND', 'THEN', 'THE', 'WIND', 'IS', 'NOT', 'BLOWING', 'IN', 'MY', 'DIRECTION', 'THIS', 'MORNING'] +3528-168669-0028-905: hyp=['AND', 'THEN', 'THE', 'WIND', 'DOES', 'NOT', 'BLOWING', 'IN', 'MY', 'DIRECTION', 'THIS', 'MORNING'] +3528-168669-0029-906: ref=['IT', 'WAS', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0029-906: hyp=['IT', 'WAS', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0030-907: ref=['THREE', 'YEARS', 'AGO', 'MADAME', 'DE', 'BETHUNE', 'A', 'JANSENIST', 'TURNED', 'ORTHODOX', 'MERELY', 'FROM', 'HAVING', 'SEEN', 'MOTHER', 'CRUCIFIXION', 'AT', 'PRAYER', 'AH'] +3528-168669-0030-907: hyp=['THREE', 'YEARS', 'AGO', 'MADAME', 'DE', 'BESOON', 'A', 'GENTLEST', 'TURNED', 'ORTHODOX', 'MERELY', 'FROM', 'HAVING', 'SEEN', 'MOTHER', 'CRUCIFIXION', 'AT', 'PRAYER', 'AH'] +3528-168669-0031-908: ref=['THE', 'MOTHERS', 'HAVE', 'TAKEN', 'HER', 'TO', 'THE', 'DEAD', 'ROOM', 'WHICH', 'OPENS', 'ON', 'THE', 'CHURCH', 'I', 'KNOW'] +3528-168669-0031-908: hyp=['THE', 'MOTHERS', 'HAVE', 'TAKEN', 'HER', 'TO', 'THE', 'DEAD', 'ROOM', 'WHICH', 'OPENS', 'ON', 'THE', 'CHURCH', 'I', 'KNOW'] +3528-168669-0032-909: ref=['A', 'FINE', 'SIGHT', 'IT', 'WOULD', 'BE', 'TO', 'SEE', 'A', 'MAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'MORE', 'OFTEN'] +3528-168669-0032-909: hyp=['A', 'FINE', 'SIGHT', 'IT', 'WOULD', 'BE', 'TO', 'SEE', 'A', 'MAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'MORE', 'OFTEN'] +3528-168669-0033-910: ref=['HEY', 'MORE', 'OFTEN'] +3528-168669-0033-910: hyp=['HEY', 'MORE', 'OFTEN'] +3528-168669-0034-911: ref=['WHAT', 'DO', 'YOU', 'SAY'] +3528-168669-0034-911: hyp=['WHAT', 'DO', 'YOU', 'SAY'] +3528-168669-0035-912: ref=['I', 'SAY', 'MORE', 'OFTEN', 'MORE', 'OFTEN', 'THAN', 'WHAT'] +3528-168669-0035-912: hyp=['I', 'SAY', 'MORE', 'OFTEN', 'MORE', 'OFTEN', 'THAN', 'WHAT'] +3528-168669-0036-913: ref=['REVEREND', 'MOTHER', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN', 'THAN', 'WHAT', 'I', 'SAID', 'MORE', 'OFTEN'] +3528-168669-0036-913: hyp=['REVEREND', 'MOTHER', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN', 'THAN', 'WHAT', 'I', 'SAID', 'MORE', 'OFTEN'] +3528-168669-0037-914: ref=['BUT', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN'] +3528-168669-0037-914: hyp=['BUT', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN'] +3528-168669-0038-915: ref=['AT', 'THAT', 'MOMENT', 'NINE', "O'CLOCK", 'STRUCK'] +3528-168669-0038-915: hyp=['AT', 'THAT', 'MOMENT', 'NINE', "O'CLOCK", 'STRUCK'] +3528-168669-0039-916: ref=['AT', 'NINE', "O'CLOCK", 'IN', 'THE', 'MORNING', 'AND', 'AT', 'ALL', 'HOURS', 'PRAISED', 'AND', 'ADORED', 'BE', 'THE', 'MOST', 'HOLY', 'SACRAMENT', 'OF', 'THE', 'ALTAR', 'SAID', 'THE', 'PRIORESS'] +3528-168669-0039-916: hyp=['AT', 'NINE', "O'CLOCK", 'IN', 'THE', 'MORNING', 'AND', 'AT', 'ALL', 'HOURS', 'PRAISED', 'AND', 'ADORED', 'TO', 'BE', 'THE', 'MOST', 'HOLY', 'SACRAMENT', 'OF', 'THE', 'ALTAR', 'SAID', 'THE', 'PIRATES'] +3528-168669-0040-917: ref=['IT', 'CUT', 'MORE', 'OFTEN', 'SHORT'] +3528-168669-0040-917: hyp=['IT', 'CUT', 'MORE', 'OFTEN', 'SHORT'] +3528-168669-0041-918: ref=['FAUCHELEVENT', 'MOPPED', 'HIS', 'FOREHEAD'] +3528-168669-0041-918: hyp=['FAUCHELEVENT', 'MOPPED', 'HIS', 'FOREHEAD'] +3528-168669-0042-919: ref=['IN', 'HER', 'LIFETIME', 'MOTHER', 'CRUCIFIXION', 'MADE', 'CONVERTS', 'AFTER', 'HER', 'DEATH', 'SHE', 'WILL', 'PERFORM', 'MIRACLES', 'SHE', 'WILL'] +3528-168669-0042-919: hyp=['IN', 'HER', 'LIFE', 'TIME', 'MOTHER', 'CRUCIFIXION', 'MADE', 'CONVERTS', 'AFTER', 'HER', 'DEATH', 'SHE', 'WILL', 'PERFORM', 'MIRACLES', 'SHE', 'WILL'] +3528-168669-0043-920: ref=['FATHER', 'FAUVENT', 'THE', 'COMMUNITY', 'HAS', 'BEEN', 'BLESSED', 'IN', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0043-920: hyp=['FATHER', 'FUVENT', 'THE', 'COMMUNITY', 'HAS', 'BEEN', 'BLESSED', 'IN', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0044-921: ref=['SHE', 'RETAINED', 'HER', 'CONSCIOUSNESS', 'TO', 'THE', 'VERY', 'LAST', 'MOMENT'] +3528-168669-0044-921: hyp=['SHE', 'RETAINED', 'HER', 'CONSCIOUSNESS', 'TO', 'THE', 'VERY', 'LAST', 'MOMENT'] +3528-168669-0045-922: ref=['SHE', 'GAVE', 'US', 'HER', 'LAST', 'COMMANDS'] +3528-168669-0045-922: hyp=['SHE', 'GAVE', 'US', 'HER', 'LAST', 'COMMANDS'] +3528-168669-0046-923: ref=['IF', 'YOU', 'HAD', 'A', 'LITTLE', 'MORE', 'FAITH', 'AND', 'IF', 'YOU', 'COULD', 'HAVE', 'BEEN', 'IN', 'HER', 'CELL', 'SHE', 'WOULD', 'HAVE', 'CURED', 'YOUR', 'LEG', 'MERELY', 'BY', 'TOUCHING', 'IT', 'SHE', 'SMILED'] +3528-168669-0046-923: hyp=['IF', 'YOU', 'HAD', 'A', 'LITTLE', 'MORE', 'FAITH', 'AND', 'IF', 'YOU', 'COULD', 'HAVE', 'BEEN', 'IN', 'HERSELF', 'SHE', 'WOULD', 'HAVE', 'CURED', 'YOUR', 'LEG', 'MERELY', 'BY', 'TOUCHING', 'IT', 'SHE', 'SMILED'] +3528-168669-0047-924: ref=['THERE', 'WAS', 'SOMETHING', 'OF', 'PARADISE', 'IN', 'THAT', 'DEATH'] +3528-168669-0047-924: hyp=['THERE', 'WAS', 'SOMETHING', 'OF', 'PARADISE', 'IN', 'THAT', 'DEATH'] +3528-168669-0048-925: ref=['FAUCHELEVENT', 'THOUGHT', 'THAT', 'IT', 'WAS', 'AN', 'ORISON', 'WHICH', 'SHE', 'WAS', 'FINISHING'] +3528-168669-0048-925: hyp=['FAUCHELEVENT', 'THOUGHT', 'THAT', 'IT', 'WAS', 'AN', 'ORISON', 'WHICH', 'SHE', 'WAS', 'FINISHING'] +3528-168669-0049-926: ref=['FAUCHELEVENT', 'HELD', 'HIS', 'PEACE', 'SHE', 'WENT', 'ON'] +3528-168669-0049-926: hyp=['FAUCHELEVENT', 'HELD', 'HIS', 'PEACE', 'SHE', 'WENT', 'ON'] +3528-168669-0050-927: ref=['I', 'HAVE', 'CONSULTED', 'UPON', 'THIS', 'POINT', 'MANY', 'ECCLESIASTICS', 'LABORING', 'IN', 'OUR', 'LORD', 'WHO', 'OCCUPY', 'THEMSELVES', 'IN', 'THE', 'EXERCISES', 'OF', 'THE', 'CLERICAL', 'LIFE', 'AND', 'WHO', 'BEAR', 'WONDERFUL', 'FRUIT'] +3528-168669-0050-927: hyp=['I', 'HAVE', 'CONSULTED', 'UPON', 'THIS', 'POINT', 'MANY', 'ECCLESIASTICS', 'LABORING', 'IN', 'OUR', 'LORD', 'WHO', 'OCCUPY', 'THEMSELVES', 'IN', 'THE', 'EXERCISES', 'OF', 'THE', 'CLERICAL', 'LIFE', 'AND', 'WHO', 'BEAR', 'WONDERFUL', 'FRUIT'] +3528-168669-0051-928: ref=['FORTUNATELY', 'THE', 'PRIORESS', 'COMPLETELY', 'ABSORBED', 'IN', 'HER', 'OWN', 'THOUGHTS', 'DID', 'NOT', 'HEAR', 'IT'] +3528-168669-0051-928: hyp=['FORTUNATELY', 'THE', 'PIOUS', 'COMPLETELY', 'ABSORBED', 'IN', 'HER', 'OWN', 'THOUGHTS', 'DID', 'NOT', 'HEAR', 'IT'] +3528-168669-0052-929: ref=['SHE', 'CONTINUED', 'FATHER', 'FAUVENT'] +3528-168669-0052-929: hyp=['SHE', 'CONTINUED', 'FATHER', 'PROVENCE'] +3528-168669-0053-930: ref=['YES', 'REVEREND', 'MOTHER'] +3528-168669-0053-930: hyp=['YES', 'REVEREND', 'MOTHER'] +3528-168669-0054-931: ref=['SAINT', 'TERENTIUS', 'BISHOP', 'OF', 'PORT', 'WHERE', 'THE', 'MOUTH', 'OF', 'THE', 'TIBER', 'EMPTIES', 'INTO', 'THE', 'SEA', 'REQUESTED', 'THAT', 'ON', 'HIS', 'TOMB', 'MIGHT', 'BE', 'ENGRAVED', 'THE', 'SIGN', 'WHICH', 'WAS', 'PLACED', 'ON', 'THE', 'GRAVES', 'OF', 'PARRICIDES', 'IN', 'THE', 'HOPE', 'THAT', 'PASSERS', 'BY', 'WOULD', 'SPIT', 'ON', 'HIS', 'TOMB', 'THIS', 'WAS', 'DONE'] +3528-168669-0054-931: hyp=['SAID', 'TERENTIUS', 'BISHOP', 'OF', 'PORT', 'WHERE', 'THE', 'MOUTH', 'OF', 'THE', 'TIBER', 'EMPTIES', 'INTO', 'THE', 'SEA', 'REQUESTED', 'THAT', 'ON', 'HIS', 'TWO', 'MIGHT', 'BE', 'ENGRAVED', 'THE', 'SIGN', 'WHICH', 'WAS', 'PLACED', 'ON', 'THE', 'GRAVES', 'OF', 'PARASITES', 'IN', 'THE', 'HOPE', 'THAT', 'PASSERS', 'BY', 'WOULD', 'SPIT', 'ON', 'HIS', 'TOMB', 'THIS', 'WAS', 'DONE'] +3528-168669-0055-932: ref=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'SO', 'BE', 'IT'] +3528-168669-0055-932: hyp=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'SO', 'BE', 'IT'] +3528-168669-0056-933: ref=['FOR', 'THAT', 'MATTER', 'NO', 'REVEREND', 'MOTHER'] +3528-168669-0056-933: hyp=['FOR', 'THAT', 'MATTER', 'NO', 'REVEREND', 'MOTHER'] +3528-168669-0057-934: ref=['FATHER', 'FAUVENT', 'MOTHER', 'CRUCIFIXION', 'WILL', 'BE', 'INTERRED', 'IN', 'THE', 'COFFIN', 'IN', 'WHICH', 'SHE', 'HAS', 'SLEPT', 'FOR', 'THE', 'LAST', 'TWENTY', 'YEARS', 'THAT', 'IS', 'JUST'] +3528-168669-0057-934: hyp=['FATHER', 'VENT', 'MOTHER', 'CRUCIFIXION', 'WILL', 'BE', 'INTERRED', 'IN', 'THE', 'COFFIN', 'IN', 'WHICH', 'SHE', 'HAS', 'SLEPT', 'FOR', 'THE', 'LAST', 'TWENTY', 'YEARS', 'THAT', 'IS', 'JUST'] +3528-168669-0058-935: ref=['IT', 'IS', 'A', 'CONTINUATION', 'OF', 'HER', 'SLUMBER'] +3528-168669-0058-935: hyp=['IT', 'IS', 'A', 'CONTINUATION', 'OF', 'HER', 'SLUMBER'] +3528-168669-0059-936: ref=['SO', 'I', 'SHALL', 'HAVE', 'TO', 'NAIL', 'UP', 'THAT', 'COFFIN', 'YES'] +3528-168669-0059-936: hyp=['SO', 'I', 'SHALL', 'HAVE', 'TO', 'NAIL', 'UP', 'THAT', 'COFFIN', 'YES'] +3528-168669-0060-937: ref=['I', 'AM', 'AT', 'THE', 'ORDERS', 'OF', 'THE', 'VERY', 'REVEREND', 'COMMUNITY'] +3528-168669-0060-937: hyp=['I', 'AM', 'AT', 'THE', 'ORDERS', 'OF', 'THE', 'VERY', 'REVEREND', 'CUNITY'] +3528-168669-0061-938: ref=['THE', 'FOUR', 'MOTHER', 'PRECENTORS', 'WILL', 'ASSIST', 'YOU'] +3528-168669-0061-938: hyp=['THE', 'FOREMOTHER', 'PRESENTERS', 'WILL', 'ASSIST', 'YOU'] +3528-168669-0062-939: ref=['NO', 'IN', 'LOWERING', 'THE', 'COFFIN'] +3528-168669-0062-939: hyp=['NO', 'INLORING', 'THE', 'COFFIN'] +3528-168669-0063-940: ref=['WHERE', 'INTO', 'THE', 'VAULT'] +3528-168669-0063-940: hyp=['WHERE', 'INTO', 'THE', 'VAULT'] +3528-168669-0064-941: ref=['FAUCHELEVENT', 'STARTED', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR'] +3528-168669-0064-941: hyp=['FAUCHELEVENT', 'STARTED', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR'] +3528-168669-0065-942: ref=['UNDER', 'THE', 'ALTAR', 'BUT'] +3528-168669-0065-942: hyp=['UNDER', 'THE', 'ALTAR', 'BUT'] +3528-168669-0066-943: ref=['YOU', 'WILL', 'HAVE', 'AN', 'IRON', 'BAR', 'YES', 'BUT'] +3528-168669-0066-943: hyp=['YOU', 'WILL', 'HAVE', 'AN', 'IRON', 'BAR', 'YES', 'BUT'] +3528-168669-0067-944: ref=['YOU', 'WILL', 'RAISE', 'THE', 'STONE', 'WITH', 'THE', 'BAR', 'BY', 'MEANS', 'OF', 'THE', 'RING', 'BUT'] +3528-168669-0067-944: hyp=['YOU', 'WILL', 'RAISE', 'THE', 'STONE', 'WITH', 'THE', 'BAR', 'BY', 'MEANS', 'OF', 'THE', 'RING', 'BUT'] +3528-168669-0068-945: ref=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL', 'NOT', 'TO', 'GO', 'TO', 'PROFANE', 'EARTH', 'TO', 'REMAIN', 'THERE', 'IN', 'DEATH', 'WHERE', 'SHE', 'PRAYED', 'WHILE', 'LIVING', 'SUCH', 'WAS', 'THE', 'LAST', 'WISH', 'OF', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0068-945: hyp=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL', 'NOT', 'TO', 'GO', 'TO', 'PROFANE', 'EARTH', 'TO', 'REMAIN', 'THERE', 'IN', 'DEATH', 'WHERE', 'SHE', 'PRAYED', 'WHILE', 'LIVING', 'SUCH', 'WAS', 'THE', 'LAST', 'WISH', 'OF', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0069-946: ref=['SHE', 'ASKED', 'IT', 'OF', 'US', 'THAT', 'IS', 'TO', 'SAY', 'COMMANDED', 'US'] +3528-168669-0069-946: hyp=['SHE', 'ASKED', 'IT', 'OF', 'US', 'THAT', 'IS', 'TO', 'SAY', 'COMMANDED', 'US'] +3528-168669-0070-947: ref=['BUT', 'IT', 'IS', 'FORBIDDEN'] +3528-168669-0070-947: hyp=['BUT', 'IT', 'IS', 'FORBIDDEN'] +3528-168669-0071-948: ref=['OH', 'I', 'AM', 'A', 'STONE', 'IN', 'YOUR', 'WALLS'] +3528-168669-0071-948: hyp=['OH', 'I', 'AM', 'A', 'STONE', 'IN', 'YOUR', 'WALLS'] +3528-168669-0072-949: ref=['THINK', 'FATHER', 'FAUVENT', 'IF', 'SHE', 'WERE', 'TO', 'WORK', 'MIRACLES', 'HERE'] +3528-168669-0072-949: hyp=['THINK', 'FATHER', 'IF', 'SHE', 'WERE', 'TO', 'WORK', 'MIRACLES', 'HERE'] +3528-168669-0073-950: ref=['WHAT', 'A', 'GLORY', 'OF', 'GOD', 'FOR', 'THE', 'COMMUNITY', 'AND', 'MIRACLES', 'ISSUE', 'FROM', 'TOMBS'] +3528-168669-0073-950: hyp=['WHAT', 'A', 'GLORY', 'OF', 'GOD', 'FOR', 'THE', 'COMMUNITY', 'AND', 'MIRACLES', 'ISSUE', 'FROM', 'TOMBS'] +3528-168669-0074-951: ref=['BUT', 'REVEREND', 'MOTHER', 'IF', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'COMMISSION'] +3528-168669-0074-951: hyp=['BUT', 'REVEREND', 'MOTHER', 'IF', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'COMMISSION'] +3528-168669-0075-952: ref=['BUT', 'THE', 'COMMISSARY', 'OF', 'POLICE'] +3528-168669-0075-952: hyp=['BUT', 'THE', 'COMMISSARY', 'OF', 'POLICE'] +3528-168669-0076-953: ref=['CHONODEMAIRE', 'ONE', 'OF', 'THE', 'SEVEN', 'GERMAN', 'KINGS', 'WHO', 'ENTERED', 'AMONG', 'THE', 'GAULS', 'UNDER', 'THE', 'EMPIRE', 'OF', 'CONSTANTIUS', 'EXPRESSLY', 'RECOGNIZED', 'THE', 'RIGHT', 'OF', 'NUNS', 'TO', 'BE', 'BURIED', 'IN', 'RELIGION', 'THAT', 'IS', 'TO', 'SAY', 'BENEATH', 'THE', 'ALTAR'] +3528-168669-0076-953: hyp=['SHADOW', 'DE', 'MER', 'ONE', 'OF', 'THE', 'SEVEN', 'GERMAN', 'KINGS', 'WHO', 'ENTERED', 'AMONG', 'THE', 'GULFS', 'UNDER', 'THE', 'EMPIRE', 'OF', 'CONSTANTIUS', 'EXPRESSLY', 'RECOGNIZED', 'THE', 'RIGHT', 'OF', 'NUNS', 'TO', 'BE', 'BURIED', 'IN', 'RELIGION', 'THAT', 'IS', 'TO', 'SAY', 'BENEATH', 'THE', 'ALTAR'] +3528-168669-0077-954: ref=['THE', 'WORLD', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'THE', 'CROSS'] +3528-168669-0077-954: hyp=['THE', 'WORLD', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'THE', 'CROSS'] +3528-168669-0078-955: ref=['MARTIN', 'THE', 'ELEVENTH', 'GENERAL', 'OF', 'THE', 'CARTHUSIANS', 'GAVE', 'TO', 'HIS', 'ORDER', 'THIS', 'DEVICE', 'STAT', 'CRUX', 'DUM', 'VOLVITUR', 'ORBIS'] +3528-168669-0078-955: hyp=['MARTIN', 'THE', 'ELEVENTH', 'GENERAL', 'OF', 'THE', 'CARTHUSIANS', 'GAVE', 'TO', 'HIS', 'ORDER', 'THIS', 'DEVICE', 'STAT', 'CREW', 'DOOM', 'VOLVETER', 'ORBUS'] +3528-168669-0079-956: ref=['THE', 'PRIORESS', 'WHO', 'WAS', 'USUALLY', 'SUBJECTED', 'TO', 'THE', 'BARRIER', 'OF', 'SILENCE', 'AND', 'WHOSE', 'RESERVOIR', 'WAS', 'OVERFULL', 'ROSE', 'AND', 'EXCLAIMED', 'WITH', 'THE', 'LOQUACITY', 'OF', 'A', 'DAM', 'WHICH', 'HAS', 'BROKEN', 'AWAY'] +3528-168669-0079-956: hyp=['THE', 'PYRIUS', 'WHO', 'WAS', 'USUALLY', 'SUBJECTED', 'TO', 'THE', 'BARRIER', 'OF', 'SILENCE', 'AND', 'WHOSE', 'RESERVOIR', 'WAS', 'OVER', 'FULL', 'ROSE', 'AND', 'EXCLAIMED', 'WITH', 'THE', 'LEQUESTITY', 'OF', 'A', 'DAM', 'WHICH', 'HAS', 'BROKEN', 'AWAY'] +3528-168669-0080-957: ref=['I', 'HAVE', 'ON', 'MY', 'RIGHT', 'BENOIT', 'AND', 'ON', 'MY', 'LEFT', 'BERNARD', 'WHO', 'WAS', 'BERNARD'] +3528-168669-0080-957: hyp=['I', 'HAVE', 'ON', 'MY', 'RIGHT', 'BENOIS', 'AND', 'ON', 'MY', 'LEFT', 'BERNARD', 'WHO', 'WAS', 'BERNARD'] +3528-168669-0081-958: ref=['THE', 'FIRST', 'ABBOT', 'OF', 'CLAIRVAUX'] +3528-168669-0081-958: hyp=['THE', 'FIRST', 'ABBOT', 'OF', 'CLERVAL'] +3528-168669-0082-959: ref=['HIS', 'ORDER', 'HAS', 'PRODUCED', 'FORTY', 'POPES', 'TWO', 'HUNDRED', 'CARDINALS', 'FIFTY', 'PATRIARCHS', 'SIXTEEN', 'HUNDRED', 'ARCHBISHOPS', 'FOUR', 'THOUSAND', 'SIX', 'HUNDRED', 'BISHOPS', 'FOUR', 'EMPERORS', 'TWELVE', 'EMPRESSES', 'FORTY', 'SIX', 'KINGS', 'FORTY', 'ONE', 'QUEENS', 'THREE', 'THOUSAND', 'SIX', 'HUNDRED', 'CANONIZED', 'SAINTS', 'AND', 'HAS', 'BEEN', 'IN', 'EXISTENCE', 'FOR', 'FOURTEEN', 'HUNDRED', 'YEARS'] +3528-168669-0082-959: hyp=['HIS', 'ORDER', 'HAS', 'PRODUCED', 'FORTY', 'POPES', 'TWO', 'HUNDRED', 'CARDINALS', 'FIFTY', 'PATRIARCHS', 'SIXTEEN', 'HUNDRED', 'ARCHBISHOPS', 'FOUR', 'THOUSAND', 'SIX', 'HUNDRED', 'BISHOPS', 'FOUR', 'EMPERORS', 'TWELVE', 'EMPRESSES', 'FORTY', 'SIX', 'KINGS', 'FORTY', 'ONE', 'QUEENS', 'THREE', 'THOUSAND', 'SIX', 'HUNDRED', 'CANNONIZED', 'SAINTS', 'AND', 'HAS', 'BEEN', 'IN', 'EXISTENCE', 'FOR', 'FOURTEEN', 'HUNDRED', 'YEARS'] +3528-168669-0083-960: ref=['ON', 'ONE', 'SIDE', 'SAINT', 'BERNARD', 'ON', 'THE', 'OTHER', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'DEPARTMENT'] +3528-168669-0083-960: hyp=['ON', 'ONE', 'SIDE', 'SAINT', 'BERNARD', 'ON', 'THE', 'OTHER', 'THE', 'AGENT', 'OF', 'THE', 'SENATORY', 'DEPARTMENT'] +3528-168669-0084-961: ref=['GOD', 'SUBORDINATED', 'TO', 'THE', 'COMMISSARY', 'OF', 'POLICE', 'SUCH', 'IS', 'THE', 'AGE', 'SILENCE', 'FAUVENT'] +3528-168669-0084-961: hyp=['GOD', 'SUBORDINATED', 'TO', 'THE', 'COMMISSORY', 'OF', 'POLICE', 'SUCH', 'WAS', 'THE', 'AGE', 'SILENCE', 'FAVAN'] +3528-168669-0085-962: ref=['NO', 'ONE', 'DOUBTS', 'THE', 'RIGHT', 'OF', 'THE', 'MONASTERY', 'TO', 'SEPULTURE'] +3528-168669-0085-962: hyp=['NO', 'ONE', 'DOUBTS', 'THE', 'RIGHT', 'OF', 'THE', 'MONASTERY', 'CHOOSE', 'SEPULTURE'] +3528-168669-0086-963: ref=['ONLY', 'FANATICS', 'AND', 'THOSE', 'IN', 'ERROR', 'DENY', 'IT'] +3528-168669-0086-963: hyp=['ONLY', 'FANATICS', 'AND', 'THOSE', 'IN', 'ERROR', 'DENY', 'IT'] +3528-168669-0087-964: ref=['WE', 'LIVE', 'IN', 'TIMES', 'OF', 'TERRIBLE', 'CONFUSION'] +3528-168669-0087-964: hyp=['WE', 'LIVE', 'IN', 'TIMES', 'OF', 'TERRIBLE', 'CONFUSION'] +3528-168669-0088-965: ref=['WE', 'ARE', 'IGNORANT', 'AND', 'IMPIOUS'] +3528-168669-0088-965: hyp=['WE', 'ARE', 'IGNORANT', 'AND', 'IMPIOUS'] +3528-168669-0089-966: ref=['AND', 'THEN', 'RELIGION', 'IS', 'ATTACKED', 'WHY'] +3528-168669-0089-966: hyp=['AND', 'THEN', 'RELIGION', 'IS', 'ATTACKED', 'WHY'] +3528-168669-0090-967: ref=['BECAUSE', 'THERE', 'HAVE', 'BEEN', 'BAD', 'PRIESTS', 'BECAUSE', 'SAGITTAIRE', 'BISHOP', 'OF', 'GAP', 'WAS', 'THE', 'BROTHER', 'OF', 'SALONE', 'BISHOP', 'OF', 'EMBRUN', 'AND', 'BECAUSE', 'BOTH', 'OF', 'THEM', 'FOLLOWED', 'MOMMOL'] +3528-168669-0090-967: hyp=['BECAUSE', 'THERE', 'HAVE', 'BEEN', 'BAD', 'PRIESTS', 'BECAUSE', 'SAGATURE', 'BISHOP', 'OF', 'GAP', 'WAS', 'THE', 'BROTHER', 'OF', 'SALON', 'BISHOP', 'OF', 'EMBRON', 'AND', 'BECAUSE', 'BOTH', 'OF', 'THEM', 'FOLLOWED', 'MAMMA'] +3528-168669-0091-968: ref=['THEY', 'PERSECUTE', 'THE', 'SAINTS'] +3528-168669-0091-968: hyp=['THEY', 'PERSECUTE', 'THE', 'SAINTS'] +3528-168669-0092-969: ref=['THEY', 'SHUT', 'THEIR', 'EYES', 'TO', 'THE', 'TRUTH', 'DARKNESS', 'IS', 'THE', 'RULE'] +3528-168669-0092-969: hyp=['THEY', 'SHUT', 'THEIR', 'EYES', 'TO', 'THE', 'TRUTH', 'DARKNESS', 'IS', 'THE', 'RULE'] +3528-168669-0093-970: ref=['THE', 'MOST', 'FEROCIOUS', 'BEASTS', 'ARE', 'BEASTS', 'WHICH', 'ARE', 'BLIND'] +3528-168669-0093-970: hyp=['THE', 'MOST', 'FEROCIOUS', 'BEASTS', 'ARE', 'BEASTS', 'WHICH', 'ARE', 'BLIND'] +3528-168669-0094-971: ref=['OH', 'HOW', 'WICKED', 'PEOPLE', 'ARE'] +3528-168669-0094-971: hyp=['OH', 'HOW', 'WICKED', 'PEOPLE', 'ARE'] +3528-168669-0095-972: ref=['BY', 'ORDER', 'OF', 'THE', 'KING', 'SIGNIFIES', 'TO', 'DAY', 'BY', 'ORDER', 'OF', 'THE', 'REVOLUTION'] +3528-168669-0095-972: hyp=['BY', 'ORDER', 'OF', 'THE', 'KING', 'SIGNIFIES', 'TO', 'DAY', 'BY', 'ORDER', 'OF', 'THE', 'REVOLUTION'] +3528-168669-0096-973: ref=['ONE', 'NO', 'LONGER', 'KNOWS', 'WHAT', 'IS', 'DUE', 'TO', 'THE', 'LIVING', 'OR', 'TO', 'THE', 'DEAD', 'A', 'HOLY', 'DEATH', 'IS', 'PROHIBITED'] +3528-168669-0096-973: hyp=['ONE', 'NO', 'LONGER', 'KNOWS', 'WHAT', 'IS', 'DUE', 'TO', 'THE', 'LIVING', 'OR', 'TO', 'THE', 'DEAD', 'A', 'HOLY', 'DEATH', 'IS', 'PROHIBITED'] +3528-168669-0097-974: ref=['GAUTHIER', 'BISHOP', 'OF', 'CHALONS', 'HELD', 'HIS', 'OWN', 'IN', 'THIS', 'MATTER', 'AGAINST', 'OTHO', 'DUKE', 'OF', 'BURGUNDY'] +3528-168669-0097-974: hyp=['GATHIER', 'BISHOP', 'OF', 'CALON', 'HELD', 'HIS', 'OWN', 'IN', 'THIS', 'MATTER', 'AGAINST', 'OTHO', 'DUKE', 'OF', 'BURGUNDY'] +3528-168669-0098-975: ref=['THE', 'PRIORESS', 'TOOK', 'BREATH', 'THEN', 'TURNED', 'TO', 'FAUCHELEVENT'] +3528-168669-0098-975: hyp=['THE', 'PRIORS', 'TOOK', 'BREATH', 'THEN', 'TURNED', 'TO', 'FAUCHELEVENT'] +3528-168669-0099-976: ref=['YOU', 'WILL', 'CLOSE', 'THE', 'COFFIN', 'THE', 'SISTERS', 'WILL', 'CARRY', 'IT', 'TO', 'THE', 'CHAPEL'] +3528-168669-0099-976: hyp=['YOU', 'WILL', 'CLOSE', 'THE', 'COFFIN', 'THE', 'SISTERS', 'WILL', 'CARRY', 'IT', 'TO', 'THE', 'CHAPEL'] +3528-168669-0100-977: ref=['THE', 'OFFICE', 'FOR', 'THE', 'DEAD', 'WILL', 'THEN', 'BE', 'SAID'] +3528-168669-0100-977: hyp=['THE', 'OFFICE', 'FOR', 'THE', 'DEAD', 'WILL', 'THEN', 'BE', 'SAID'] +3528-168669-0101-978: ref=['BUT', 'SHE', 'WILL', 'HEAR', 'SHE', 'WILL', 'NOT', 'LISTEN'] +3528-168669-0101-978: hyp=['BUT', 'SHE', 'WILL', 'HEAR', 'SHE', 'WILL', 'NOT', 'LISTEN'] +3528-168669-0102-979: ref=['BESIDES', 'WHAT', 'THE', 'CLOISTER', 'KNOWS', 'THE', 'WORLD', 'LEARNS', 'NOT'] +3528-168669-0102-979: hyp=['BESIDES', 'WHAT', 'THE', 'CLOISTER', 'KNOWS', 'THE', 'WORLD', 'LEARNS', 'NOT'] +3528-168669-0103-980: ref=['A', 'PAUSE', 'ENSUED'] +3528-168669-0103-980: hyp=['A', 'PAUSE', 'AND', 'SUIT'] +3528-168669-0104-981: ref=['YOU', 'WILL', 'REMOVE', 'YOUR', 'BELL'] +3528-168669-0104-981: hyp=['YOU', 'WILL', 'REMOVE', 'YOUR', 'BEL'] +3528-168669-0105-982: ref=['HAS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'PAID', 'HIS', 'VISIT'] +3528-168669-0105-982: hyp=['HAS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'PAID', 'HIS', 'VISIT'] +3528-168669-0106-983: ref=['HE', 'WILL', 'PAY', 'IT', 'AT', 'FOUR', "O'CLOCK", 'TO', 'DAY'] +3528-168669-0106-983: hyp=['HE', 'WILL', 'PAY', 'IT', 'AT', 'FOUR', "O'CLOCK", 'TO', 'DAY'] +3528-168669-0107-984: ref=['THE', 'PEAL', 'WHICH', 'ORDERS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'TO', 'BE', 'SUMMONED', 'HAS', 'ALREADY', 'BEEN', 'RUNG'] +3528-168669-0107-984: hyp=['THE', 'PEAL', 'WHICH', 'ORDERS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'TO', 'BE', 'SUMMONED', 'HAS', 'ALREADY', 'BEEN', 'RUNG'] +3528-168669-0108-985: ref=['BUT', 'YOU', 'DO', 'NOT', 'UNDERSTAND', 'ANY', 'OF', 'THE', 'PEALS'] +3528-168669-0108-985: hyp=['BUT', 'YOU', 'DO', 'NOT', 'UNDERSTAND', 'ANY', 'OF', 'THE', 'PEALS'] +3528-168669-0109-986: ref=['THAT', 'IS', 'WELL', 'FATHER', 'FAUVENT'] +3528-168669-0109-986: hyp=['THAT', 'IS', 'WELL', 'FATHER', 'VENT'] +3528-168669-0110-987: ref=['WHERE', 'WILL', 'YOU', 'OBTAIN', 'IT'] +3528-168669-0110-987: hyp=['WHERE', 'WILL', 'YOU', 'OBTAIN', 'IT'] +3528-168669-0111-988: ref=['I', 'HAVE', 'MY', 'HEAP', 'OF', 'OLD', 'IRON', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN'] +3528-168669-0111-988: hyp=['I', 'HAVE', 'MY', 'HEAP', 'OF', 'OLD', 'IRON', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN'] +3528-168669-0112-989: ref=['REVEREND', 'MOTHER', 'WHAT'] +3528-168669-0112-989: hyp=['RIVER', 'MOTHER', 'WHAT'] +3528-168669-0113-990: ref=['IF', 'YOU', 'WERE', 'EVER', 'TO', 'HAVE', 'ANY', 'OTHER', 'JOBS', 'OF', 'THIS', 'SORT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'MAN', 'FOR', 'YOU', 'A', 'PERFECT', 'TURK'] +3528-168669-0113-990: hyp=['IF', 'YOU', 'WERE', 'EVER', 'TO', 'HAVE', 'ANY', 'OTHER', 'JOBS', 'OF', 'THIS', 'SORT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'MAN', 'FOR', 'YOU', 'A', 'PERFECT', 'TURK'] +3528-168669-0114-991: ref=['YOU', 'WILL', 'DO', 'IT', 'AS', 'SPEEDILY', 'AS', 'POSSIBLE'] +3528-168669-0114-991: hyp=['YOU', 'WILL', 'DO', 'IT', 'AS', 'SPEEDILY', 'AS', 'POSSIBLE'] +3528-168669-0115-992: ref=['I', 'CANNOT', 'WORK', 'VERY', 'FAST', 'I', 'AM', 'INFIRM', 'THAT', 'IS', 'WHY', 'I', 'REQUIRE', 'AN', 'ASSISTANT', 'I', 'LIMP'] +3528-168669-0115-992: hyp=['I', 'CANNOT', 'WORK', 'VERY', 'FAST', 'I', 'AM', 'INFIRM', 'THAT', 'IS', 'WHY', 'I', 'REQUIRE', 'AN', 'ASSISTANT', 'I', 'LIMP'] +3528-168669-0116-993: ref=['EVERYTHING', 'MUST', 'HAVE', 'BEEN', 'COMPLETED', 'A', 'GOOD', 'QUARTER', 'OF', 'AN', 'HOUR', 'BEFORE', 'THAT'] +3528-168669-0116-993: hyp=['EVERYTHING', 'MUST', 'HAVE', 'BEEN', 'COMPLETED', 'A', 'GOOD', 'QUARTER', 'OF', 'AN', 'HOUR', 'BEFORE', 'THAT'] +3528-168669-0117-994: ref=['I', 'WILL', 'DO', 'ANYTHING', 'TO', 'PROVE', 'MY', 'ZEAL', 'TOWARDS', 'THE', 'COMMUNITY', 'THESE', 'ARE', 'MY', 'ORDERS', 'I', 'AM', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN'] +3528-168669-0117-994: hyp=['I', 'WILL', 'DO', 'ANYTHING', 'TO', 'PROVE', 'MY', 'ZEAL', 'TOWARDS', 'THE', 'COMMUNITY', 'THESE', 'ARE', 'MY', 'ORDERS', 'I', 'AM', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN'] +3528-168669-0118-995: ref=['AT', 'ELEVEN', "O'CLOCK", 'EXACTLY', 'I', 'AM', 'TO', 'BE', 'IN', 'THE', 'CHAPEL'] +3528-168669-0118-995: hyp=['AT', 'ELEVEN', "O'CLOCK", 'EXACTLY', 'I', 'AM', 'TO', 'BE', 'IN', 'THE', 'CHAPEL'] +3528-168669-0119-996: ref=['MOTHER', 'ASCENSION', 'WILL', 'BE', 'THERE', 'TWO', 'MEN', 'WOULD', 'BE', 'BETTER'] +3528-168669-0119-996: hyp=['MOTHER', 'ASCENSION', 'WILL', 'BE', 'THERE', 'TWO', 'MEN', 'WOULD', 'BE', 'BETTER'] +3528-168669-0120-997: ref=['HOWEVER', 'NEVER', 'MIND', 'I', 'SHALL', 'HAVE', 'MY', 'LEVER'] +3528-168669-0120-997: hyp=['HOWEVER', 'NEVER', 'MIND', 'I', 'SHALL', 'HAVE', 'MY', 'LOVE'] +3528-168669-0121-998: ref=['AFTER', 'WHICH', 'THERE', 'WILL', 'BE', 'NO', 'TRACE', 'OF', 'ANYTHING'] +3528-168669-0121-998: hyp=['AFTER', 'WHICH', 'THERE', 'WILL', 'BE', 'NO', 'TRACE', 'OF', 'ANYTHING'] +3528-168669-0122-999: ref=['THE', 'GOVERNMENT', 'WILL', 'HAVE', 'NO', 'SUSPICION'] +3528-168669-0122-999: hyp=['THE', 'GOVERNMENT', 'WILL', 'HAVE', 'NO', 'SUSPICION'] +3528-168669-0123-1000: ref=['THE', 'EMPTY', 'COFFIN', 'REMAINS', 'THIS', 'PRODUCED', 'A', 'PAUSE'] +3528-168669-0123-1000: hyp=['THE', 'EMPTY', 'COFFIN', 'REMAINS', 'THIS', 'PRODUCED', 'A', 'PAUSE'] +3528-168669-0124-1001: ref=['WHAT', 'IS', 'TO', 'BE', 'DONE', 'WITH', 'THAT', 'COFFIN', 'FATHER', 'FAUVENT'] +3528-168669-0124-1001: hyp=['WHAT', 'IS', 'TO', 'BE', 'DONE', 'WITH', 'THAT', 'COFFIN', 'FATHER', 'PREVENT'] +3528-168669-0125-1002: ref=['IT', 'WILL', 'BE', 'GIVEN', 'TO', 'THE', 'EARTH', 'EMPTY'] +3528-168669-0125-1002: hyp=['IT', 'WILL', 'BE', 'GIVEN', 'TO', 'THE', 'EARTH', 'EMPTY'] +3528-168669-0126-1003: ref=['AH', 'THE', 'DE', 'EXCLAIMED', 'FAUCHELEVENT'] +3528-168669-0126-1003: hyp=['AH', 'LEDA', 'EXCLAIMED', 'FAUCHELEVENT'] +3528-168669-0127-1004: ref=['THE', 'VIL', 'STUCK', 'FAST', 'IN', 'HIS', 'THROAT'] +3528-168669-0127-1004: hyp=['THE', 'VILLE', 'STUCK', 'FAST', 'IN', 'HIS', 'THROAT'] +3528-168669-0128-1005: ref=['HE', 'MADE', 'HASTE', 'TO', 'IMPROVISE', 'AN', 'EXPEDIENT', 'TO', 'MAKE', 'HER', 'FORGET', 'THE', 'OATH'] +3528-168669-0128-1005: hyp=['HE', 'MADE', 'HASTE', 'TO', 'IMPROVISE', 'AN', 'EXPEDIENT', 'TO', 'MAKE', 'HER', 'FORGET', 'THE', 'OATH'] +3528-168669-0129-1006: ref=['I', 'WILL', 'PUT', 'EARTH', 'IN', 'THE', 'COFFIN', 'REVEREND', 'MOTHER', 'THAT', 'WILL', 'PRODUCE', 'THE', 'EFFECT', 'OF', 'A', 'CORPSE'] +3528-168669-0129-1006: hyp=['I', 'WILL', 'PUT', 'EARTH', 'IN', 'THE', 'COFFIN', 'REVEREND', 'MOTHER', 'THAT', 'WILL', 'PRODUCE', 'THE', 'EFFECT', 'OF', 'A', 'CORPSE'] +3528-168669-0130-1007: ref=['I', 'WILL', 'MAKE', 'THAT', 'MY', 'SPECIAL', 'BUSINESS'] +3528-168669-0130-1007: hyp=['I', 'WILL', 'MAKE', 'THAT', 'MY', 'SPECIAL', 'BUSINESS'] +3538-142836-0000-1567: ref=['GENERAL', 'OBSERVATIONS', 'ON', 'PRESERVES', 'CONFECTIONARY', 'ICES', 'AND', 'DESSERT', 'DISHES'] +3538-142836-0000-1567: hyp=['GENERAL', 'OBSERVATIONS', 'ON', 'PRESERVES', 'CONFECTIONERY', 'ICES', 'AND', 'DESSERT', 'DISHES'] +3538-142836-0001-1568: ref=['THE', 'EXPENSE', 'OF', 'PRESERVING', 'THEM', 'WITH', 'SUGAR', 'IS', 'A', 'SERIOUS', 'OBJECTION', 'FOR', 'EXCEPT', 'THE', 'SUGAR', 'IS', 'USED', 'IN', 'CONSIDERABLE', 'QUANTITIES', 'THE', 'SUCCESS', 'IS', 'VERY', 'UNCERTAIN'] +3538-142836-0001-1568: hyp=['THE', 'EXPENSE', 'OF', 'PRESERVING', 'THEM', 'WITH', 'SUGAR', 'IS', 'A', 'SERIOUS', 'OBJECTION', 'FOR', 'EXCEPT', 'THE', 'SUGAR', 'IS', 'USED', 'IN', 'CONSIDERABLE', 'QUALITIES', 'THE', 'SUCCESS', 'IS', 'VERY', 'UNCERTAIN'] +3538-142836-0002-1569: ref=['FRUIT', 'GATHERED', 'IN', 'WET', 'OR', 'FOGGY', 'WEATHER', 'WILL', 'SOON', 'BE', 'MILDEWED', 'AND', 'BE', 'OF', 'NO', 'SERVICE', 'FOR', 'PRESERVES'] +3538-142836-0002-1569: hyp=['FRUIT', 'GATHERED', 'IN', 'WET', 'OR', 'FOGGY', 'WEATHER', 'WILL', 'SOON', 'BE', 'MELTED', 'AND', 'BE', 'OF', 'NO', 'SERVICE', 'FOR', 'PRESERVES'] +3538-142836-0003-1570: ref=['BUT', 'TO', 'DISTINGUISH', 'THESE', 'PROPERLY', 'REQUIRES', 'VERY', 'GREAT', 'ATTENTION', 'AND', 'CONSIDERABLE', 'EXPERIENCE'] +3538-142836-0003-1570: hyp=['BUT', 'TO', 'DISTINGUISH', 'THESE', 'PROPERLY', 'REQUIRES', 'VERY', 'GREAT', 'ATTENTION', 'AND', 'CONSIDERABLE', 'EXPERIENCE'] +3538-142836-0004-1571: ref=['IF', 'YOU', 'DIP', 'THE', 'FINGER', 'INTO', 'THE', 'SYRUP', 'AND', 'APPLY', 'IT', 'TO', 'THE', 'THUMB', 'THE', 'TENACITY', 'OF', 'THE', 'SYRUP', 'WILL', 'ON', 'SEPARATING', 'THE', 'FINGER', 'AND', 'THUMB', 'AFFORD', 'A', 'THREAD', 'WHICH', 'SHORTLY', 'BREAKS', 'THIS', 'IS', 'THE', 'LITTLE', 'THREAD'] +3538-142836-0004-1571: hyp=['IF', 'YOU', 'DIP', 'THE', 'FINGER', 'INTO', 'THE', 'SERF', 'AND', 'APPLY', 'IT', 'TO', 'THE', 'THUMB', 'THE', 'TENACITY', 'OF', 'THE', 'SERF', 'WILL', 'ON', 'SEPARATING', 'THE', 'FINGER', 'AND', 'THUMB', 'AFFORD', 'A', 'THREAD', 'WHICH', 'SHORTLY', 'BREAKS', 'THIS', 'IS', 'THE', 'LITTLE', 'THREAD'] +3538-142836-0005-1572: ref=['LET', 'IT', 'BOIL', 'UP', 'AGAIN', 'THEN', 'TAKE', 'IT', 'OFF', 'AND', 'REMOVE', 'CAREFULLY', 'THE', 'SCUM', 'THAT', 'HAS', 'RISEN'] +3538-142836-0005-1572: hyp=['LET', 'IT', 'BOIL', 'UP', 'AGAIN', 'THEN', 'TAKE', 'IT', 'OFF', 'AND', 'REMOVE', 'CAREFULLY', 'THE', 'SCUM', 'THAT', 'HAS', 'RISEN'] +3538-142836-0006-1573: ref=['IT', 'IS', 'CONSIDERED', 'TO', 'BE', 'SUFFICIENTLY', 'BOILED', 'WHEN', 'SOME', 'TAKEN', 'UP', 'IN', 'A', 'SPOON', 'POURS', 'OUT', 'LIKE', 'OIL'] +3538-142836-0006-1573: hyp=['IT', 'IS', 'CONSIDERED', 'TO', 'BE', 'SUFFICIENTLY', 'BOILED', 'WHEN', 'SOME', 'TAKEN', 'UP', 'IN', 'A', 'SPOON', 'POURS', 'OUT', 'LIKE', 'OIL'] +3538-142836-0007-1574: ref=['BEFORE', 'SUGAR', 'WAS', 'IN', 'USE', 'HONEY', 'WAS', 'EMPLOYED', 'TO', 'PRESERVE', 'MANY', 'VEGETABLE', 'PRODUCTIONS', 'THOUGH', 'THIS', 'SUBSTANCE', 'HAS', 'NOW', 'GIVEN', 'WAY', 'TO', 'THE', 'JUICE', 'OF', 'THE', 'SUGAR', 'CANE'] +3538-142836-0007-1574: hyp=['BEFORE', 'SUGAR', 'WAS', 'IN', 'USE', 'HONEY', 'WAS', 'EMPLOYED', 'TO', 'PRESERVE', 'MANY', 'VEGETABLE', 'PRODUCTIONS', 'THOUGH', 'THIS', 'SUBSTANCE', 'IS', 'NOW', 'GIVEN', 'WAY', 'TO', 'THE', 'JUICE', 'OF', 'THE', 'SUGAR', 'CANE'] +3538-142836-0008-1575: ref=['FOURTEEN', 'NINETY', 'NINE'] +3538-142836-0008-1575: hyp=['FOURTEEN', 'NINETY', 'NINE'] +3538-142836-0009-1576: ref=['BOIL', 'THEM', 'UP', 'THREE', 'DAYS', 'SUCCESSIVELY', 'SKIMMING', 'EACH', 'TIME', 'AND', 'THEY', 'WILL', 'THEN', 'BE', 'FINISHED', 'AND', 'IN', 'A', 'STATE', 'FIT', 'TO', 'BE', 'PUT', 'INTO', 'POTS', 'FOR', 'USE'] +3538-142836-0009-1576: hyp=['BOIL', 'THEM', 'UP', 'THREE', 'DAYS', 'SUCCESSIVELY', 'SKIMMING', 'EACH', 'TIME', 'AND', 'THEY', 'WILL', 'THEN', 'BE', 'FINISHED', 'AND', 'IN', 'A', 'STATE', 'FIT', 'TO', 'BE', 'PUT', 'INTO', 'POTS', 'FOR', 'USE'] +3538-142836-0010-1577: ref=['THE', 'REASON', 'WHY', 'THE', 'FRUIT', 'IS', 'EMPTIED', 'OUT', 'OF', 'THE', 'PRESERVING', 'PAN', 'INTO', 'AN', 'EARTHEN', 'PAN', 'IS', 'THAT', 'THE', 'ACID', 'OF', 'THE', 'FRUIT', 'ACTS', 'UPON', 'THE', 'COPPER', 'OF', 'WHICH', 'THE', 'PRESERVING', 'PANS', 'ARE', 'USUALLY', 'MADE'] +3538-142836-0010-1577: hyp=['THE', 'REASON', 'WHY', 'THE', 'FRUIT', 'IS', 'EMPTIED', 'OUT', 'OF', 'THE', 'PRESERVING', 'PAN', 'INTO', 'OUR', 'EARTHEN', 'PAN', 'IS', 'THAT', 'THE', 'ACID', 'OF', 'THE', 'FRUIT', 'ACTS', 'UPON', 'THE', 'COPPER', 'OF', 'WHICH', 'THE', 'PRESERVING', 'PANS', 'ARE', 'USUALLY', 'MADE'] +3538-142836-0011-1578: ref=['FROM', 'THIS', 'EXAMPLE', 'THE', 'PROCESS', 'OF', 'PRESERVING', 'FRUITS', 'BY', 'SYRUP', 'WILL', 'BE', 'EASILY', 'COMPREHENDED'] +3538-142836-0011-1578: hyp=['FROM', 'THIS', 'EXAMPLE', 'THE', 'PROCESS', 'OF', 'PRESERVING', 'FRUITS', 'BY', 'SYRUP', 'WOULD', 'BE', 'EASILY', 'COMPREHENDED'] +3538-142836-0012-1579: ref=['THEY', 'SHOULD', 'BE', 'DRIED', 'IN', 'THE', 'STOVE', 'OR', 'OVEN', 'ON', 'A', 'SIEVE', 'AND', 'TURNED', 'EVERY', 'SIX', 'OR', 'EIGHT', 'HOURS', 'FRESH', 'POWDERED', 'SUGAR', 'BEING', 'SIFTED', 'OVER', 'THEM', 'EVERY', 'TIME', 'THEY', 'ARE', 'TURNED'] +3538-142836-0012-1579: hyp=['THEY', 'SHOULD', 'BE', 'DRIED', 'IN', 'THE', 'STOVE', 'OR', 'OVEN', 'ON', 'A', 'SEA', 'AND', 'TURNED', 'EVERY', 'SIX', 'OR', 'EIGHT', 'HOURS', 'FRESH', 'POWDERED', 'SUGAR', 'BEING', 'SIFTED', 'OVER', 'THEM', 'EVERY', 'TIME', 'THEY', 'RETURNED'] +3538-142836-0013-1580: ref=['IN', 'THIS', 'WAY', 'IT', 'IS', 'ALSO', 'THAT', 'ORANGE', 'AND', 'LEMON', 'CHIPS', 'ARE', 'PRESERVED'] +3538-142836-0013-1580: hyp=['IN', 'THIS', 'WAY', 'IT', 'IS', 'ALSO', 'THAT', 'ORANGE', 'AND', 'LINENSHIPS', 'ARE', 'PRESERVED'] +3538-142836-0014-1581: ref=['MARMALADES', 'JAMS', 'AND', 'FRUIT', 'PASTES', 'ARE', 'OF', 'THE', 'SAME', 'NATURE', 'AND', 'ARE', 'NOW', 'IN', 'VERY', 'GENERAL', 'REQUEST'] +3538-142836-0014-1581: hyp=['MARMALADES', 'JAMS', 'AND', 'FRUIT', 'PACE', 'ARE', 'OF', 'THE', 'SAME', 'NATURE', 'AND', 'ARE', 'NOW', 'IN', 'VERY', 'GENERAL', 'QUEST'] +3538-142836-0015-1582: ref=['MARMALADES', 'AND', 'JAMS', 'DIFFER', 'LITTLE', 'FROM', 'EACH', 'OTHER', 'THEY', 'ARE', 'PRESERVES', 'OF', 'A', 'HALF', 'LIQUID', 'CONSISTENCY', 'MADE', 'BY', 'BOILING', 'THE', 'PULP', 'OF', 'FRUITS', 'AND', 'SOMETIMES', 'PART', 'OF', 'THE', 'RINDS', 'WITH', 'SUGAR'] +3538-142836-0015-1582: hyp=['MARMALITIES', 'AND', 'JAMES', 'DIFFER', 'LITTLE', 'FROM', 'EACH', 'OTHER', 'THEIR', 'PRESERVES', 'OF', 'HALF', 'LIKELY', 'CONSISTENCY', 'MADE', 'BY', 'BOILING', 'THE', 'PULP', 'OF', 'FRUITS', 'AND', 'SOMETIMES', 'PART', 'OF', 'THE', 'RHINS', 'WITH', 'SUGAR'] +3538-142836-0016-1583: ref=['THAT', 'THEY', 'MAY', 'KEEP', 'IT', 'IS', 'NECESSARY', 'NOT', 'TO', 'BE', 'SPARING', 'OF', 'SUGAR', 'FIFTEEN', 'O', 'THREE'] +3538-142836-0016-1583: hyp=['THAT', 'THEY', 'MAY', 'KEEP', 'IT', 'IS', 'NECESSARY', 'NOT', 'TO', 'BE', 'SPARING', 'OF', 'SUGAR', 'FIFTEEN', 'O', 'THREE'] +3538-142836-0017-1584: ref=['IN', 'ALL', 'THE', 'OPERATIONS', 'FOR', 'PRESERVE', 'MAKING', 'WHEN', 'THE', 'PRESERVING', 'PAN', 'IS', 'USED', 'IT', 'SHOULD', 'NOT', 'BE', 'PLACED', 'ON', 'THE', 'FIRE', 'BUT', 'ON', 'A', 'TRIVET', 'UNLESS', 'THE', 'JAM', 'IS', 'MADE', 'ON', 'A', 'HOT', 'PLATE', 'WHEN', 'THIS', 'IS', 'NOT', 'NECESSARY'] +3538-142836-0017-1584: hyp=['IN', 'ALL', 'THE', 'OPERATIONS', 'FOR', 'PRESERVE', 'MAKING', 'WHEN', 'THE', 'PRESERVING', 'PAN', 'IS', 'USED', 'IT', 'SHOULD', 'NOT', 'BE', 'PLACED', 'ON', 'THE', 'FIRE', 'BUT', 'ON', 'A', 'TRIBUT', 'UNLESS', 'THE', 'JAM', 'IS', 'MADE', 'ON', 'A', 'HOT', 'PLATE', 'WHEN', 'THIS', 'IS', 'NOT', 'NECESSARY'] +3538-142836-0018-1585: ref=['CONFECTIONARY', 'FIFTEEN', 'O', 'EIGHT'] +3538-142836-0018-1585: hyp=['CONFECTIONERY', 'FIFTEEN', 'O', 'EIGHT'] +3538-142836-0019-1586: ref=['IN', 'SPEAKING', 'OF', 'CONFECTIONARY', 'IT', 'SHOULD', 'BE', 'REMARKED', 'THAT', 'ALL', 'THE', 'VARIOUS', 'PREPARATIONS', 'ABOVE', 'NAMED', 'COME', 'STRICTLY', 'SPEAKING', 'UNDER', 'THAT', 'HEAD', 'FOR', 'THE', 'VARIOUS', 'FRUITS', 'FLOWERS', 'HERBS', 'ROOTS', 'AND', 'JUICES', 'WHICH', 'WHEN', 'BOILED', 'WITH', 'SUGAR', 'WERE', 'FORMERLY', 'EMPLOYED', 'IN', 'PHARMACY', 'AS', 'WELL', 'AS', 'FOR', 'SWEETMEATS', 'WERE', 'CALLED', 'CONFECTIONS', 'FROM', 'THE', 'LATIN', 'WORD', 'CONFICERE', 'TO', 'MAKE', 'UP', 'BUT', 'THE', 'TERM', 'CONFECTIONARY', 'EMBRACES', 'A', 'VERY', 'LARGE', 'CLASS', 'INDEED', 'OF', 'SWEET', 'FOOD', 'MANY', 'KINDS', 'OF', 'WHICH', 'SHOULD', 'NOT', 'BE', 'ATTEMPTED', 'IN', 'THE', 'ORDINARY', 'CUISINE'] +3538-142836-0019-1586: hyp=['IN', 'SPEAKING', 'OF', 'CONFECTIONERIES', 'SHOULD', 'BE', 'REMARKED', 'THAT', 'ALL', 'THE', 'VARIOUS', 'PREPARATIONS', 'ABOVE', 'NAMED', 'COME', 'STRICTLY', 'SPEAKING', 'UNDER', 'THAT', 'HEAD', 'FOR', 'THE', 'VARIOUS', 'FRUITS', 'FLOWERS', 'HERBS', 'RUTHS', 'AND', 'JUICES', 'WHICH', 'ONE', 'BOILED', 'WITH', 'SUGAR', 'WERE', 'FORMERLY', 'EMPLOYED', 'IN', 'PHARMACY', 'AS', 'WELL', 'AS', 'FOR', 'SWEETMEATS', 'WERE', 'CALLED', 'CONFECTIONS', 'FROM', 'THE', 'LATIN', 'WORD', 'CONFERS', 'TO', 'MAKE', 'UP', 'BUT', 'THE', 'TERM', 'CONFECTIONERY', 'EMBRACES', 'A', 'VERY', 'LARGE', 'CLASS', 'INDEED', 'OF', 'SWEET', 'FOOD', 'MANY', 'KINDS', 'OF', 'WHICH', 'SHOULD', 'NOT', 'BE', 'ATTEMPTED', 'IN', 'THE', 'ORDINARY', 'COISINE'] +3538-142836-0020-1587: ref=['THE', 'THOUSAND', 'AND', 'ONE', 'ORNAMENTAL', 'DISHES', 'THAT', 'ADORN', 'THE', 'TABLES', 'OF', 'THE', 'WEALTHY', 'SHOULD', 'BE', 'PURCHASED', 'FROM', 'THE', 'CONFECTIONER', 'THEY', 'CANNOT', 'PROFITABLY', 'BE', 'MADE', 'AT', 'HOME'] +3538-142836-0020-1587: hyp=['THE', 'THOUSAND', 'AND', 'ONE', 'ORNAMENTAL', 'DISHES', 'THAT', 'ADORN', 'THE', 'TABLES', 'OF', 'THE', 'WEALTHY', 'SHOULD', 'BE', 'PURCHASED', 'FROM', 'THE', 'CONFECTIONER', 'THEY', 'CANNOT', 'PROFITABLY', 'BE', 'MADE', 'AT', 'HOME'] +3538-142836-0021-1588: ref=['HOWEVER', 'AS', 'LATE', 'AS', 'THE', 'REIGNS', 'OF', 'OUR', 'TWO', 'LAST', 'GEORGES', 'FABULOUS', 'SUMS', 'WERE', 'OFTEN', 'EXPENDED', 'UPON', 'FANCIFUL', 'DESSERTS'] +3538-142836-0021-1588: hyp=['HOWEVER', 'AS', 'LATE', 'AS', 'THE', 'REIGN', 'OF', 'OUR', 'TWO', 'LAST', 'GEORGES', 'FABULOUS', 'SUMS', 'WERE', 'OFTEN', 'EXPENDED', 'UPON', 'FANCIFUL', 'DESERTS'] +3538-142836-0022-1589: ref=['THE', 'SHAPE', 'OF', 'THE', 'DISHES', 'VARIES', 'AT', 'DIFFERENT', 'PERIODS', 'THE', 'PREVAILING', 'FASHION', 'AT', 'PRESENT', 'BEING', 'OVAL', 'AND', 'CIRCULAR', 'DISHES', 'ON', 'STEMS'] +3538-142836-0022-1589: hyp=['THE', 'SHAPE', 'OF', 'THE', 'DISHES', 'VARIES', 'AT', 'DIFFERENT', 'PERIODS', 'THE', 'PREVAILING', 'FASHION', 'AT', 'PRESENT', 'BEING', 'OVAL', 'AND', 'CIRCULAR', 'DISHES', 'ON', 'STEMS'] +3538-142836-0023-1590: ref=['ICES'] +3538-142836-0023-1590: hyp=['ISIS'] +3538-142836-0024-1591: ref=['AT', 'DESSERTS', 'OR', 'AT', 'SOME', 'EVENING', 'PARTIES', 'ICES', 'ARE', 'SCARCELY', 'TO', 'BE', 'DISPENSED', 'WITH'] +3538-142836-0024-1591: hyp=['A', 'DESSERTS', 'OR', 'AT', 'SOME', 'EVENING', 'PARTIES', 'IISES', 'ARE', 'SCARCELY', 'TO', 'BE', 'DISPENSED', 'WITH'] +3538-142836-0025-1592: ref=['THE', 'SPADDLE', 'IS', 'GENERALLY', 'MADE', 'OF', 'COPPER', 'KEPT', 'BRIGHT', 'AND', 'CLEAN'] +3538-142836-0025-1592: hyp=['THE', 'SPADEL', 'IS', 'GENERALLY', 'MADE', 'OF', 'COPPER', 'KEPT', 'BRIGHT', 'AND', 'CLEAN'] +3538-142836-0026-1593: ref=['THEY', 'SHOULD', 'BE', 'TAKEN', 'IMMEDIATELY', 'AFTER', 'THE', 'REPAST', 'OR', 'SOME', 'HOURS', 'AFTER', 'BECAUSE', 'THE', 'TAKING', 'THESE', 'SUBSTANCES', 'DURING', 'THE', 'PROCESS', 'OF', 'DIGESTION', 'IS', 'APT', 'TO', 'PROVOKE', 'INDISPOSITION'] +3538-142836-0026-1593: hyp=['THEY', 'SHOULD', 'BE', 'TAKEN', 'IMMEDIATELY', 'AFTER', 'THE', 'REPAST', 'OR', 'SOME', 'HOURS', 'AFTER', 'BECAUSE', 'THE', 'TAKING', 'OF', 'THESE', 'SUBSTANCES', 'DURING', 'THE', 'PROCESS', 'OF', 'DIGESTION', 'IS', 'APT', 'TO', 'PROVOKE', 'INDISPOSITION'] +3538-163619-0000-1500: ref=['THERE', 'WAS', 'ONCE', 'ON', 'A', 'TIME', 'A', 'WIDOWER', 'WHO', 'HAD', 'A', 'SON', 'AND', 'A', 'DAUGHTER', 'BY', 'HIS', 'FIRST', 'WIFE'] +3538-163619-0000-1500: hyp=['THERE', 'WAS', 'ONCE', 'TILL', 'THE', 'TIME', 'A', 'WIDOWER', 'WHO', 'HAD', 'A', 'SON', 'AND', 'A', 'DAUGHTER', 'BY', 'HIS', 'FIRST', 'WI'] +3538-163619-0001-1501: ref=['FROM', 'THE', 'VERY', 'DAY', 'THAT', 'THE', 'NEW', 'WIFE', 'CAME', 'INTO', 'THE', 'HOUSE', 'THERE', 'WAS', 'NO', 'PEACE', 'FOR', 'THE', "MAN'S", 'CHILDREN', 'AND', 'NOT', 'A', 'CORNER', 'TO', 'BE', 'FOUND', 'WHERE', 'THEY', 'COULD', 'GET', 'ANY', 'REST', 'SO', 'THE', 'BOY', 'THOUGHT', 'THAT', 'THE', 'BEST', 'THING', 'HE', 'COULD', 'DO', 'WAS', 'TO', 'GO', 'OUT', 'INTO', 'THE', 'WORLD', 'AND', 'TRY', 'TO', 'EARN', 'HIS', 'OWN', 'BREAD'] +3538-163619-0001-1501: hyp=['FROM', 'THE', 'VERY', 'DAY', 'THAT', 'THE', 'NEW', 'WIFE', 'CAME', 'INTO', 'THE', 'HOUSE', 'THERE', 'WAS', 'NO', 'PEACE', 'FOR', 'THE', "MAN'S", 'CHILDREN', 'AND', 'NOT', 'A', 'CORNER', 'TO', 'BE', 'FOUND', 'WHERE', 'THEY', 'COULD', 'GET', 'ANY', 'REST', 'SO', 'THE', 'BOY', 'THOUGHT', 'THAT', 'THE', 'BEST', 'THING', 'HE', 'COULD', 'DO', 'WAS', 'TO', 'GO', 'OUT', 'INTO', 'THE', 'WORLD', 'AND', 'TRY', 'TO', 'EARN', 'HIS', 'OWN', 'BREAD'] +3538-163619-0002-1502: ref=['BUT', 'HIS', 'SISTER', 'WHO', 'WAS', 'STILL', 'AT', 'HOME', 'FARED', 'WORSE', 'AND', 'WORSE'] +3538-163619-0002-1502: hyp=['BUT', 'HIS', 'SISTER', 'WHO', 'WAS', 'STILL', 'AT', 'HOME', 'FARED', 'WORSE', 'AND', 'WORSE'] +3538-163619-0003-1503: ref=['KISS', 'ME', 'GIRL', 'SAID', 'THE', 'HEAD'] +3538-163619-0003-1503: hyp=['KISS', 'ME', 'GO', 'SAID', 'THE', 'HEAD'] +3538-163619-0004-1504: ref=['WHEN', 'THE', 'KING', 'ENTERED', 'AND', 'SAW', 'IT', 'HE', 'STOOD', 'STILL', 'AS', 'IF', 'HE', 'WERE', 'IN', 'FETTERS', 'AND', 'COULD', 'NOT', 'STIR', 'FROM', 'THE', 'SPOT', 'FOR', 'THE', 'PICTURE', 'SEEMED', 'TO', 'HIM', 'SO', 'BEAUTIFUL'] +3538-163619-0004-1504: hyp=['WHEN', 'THE', 'KING', 'ENTERED', 'AND', 'SAW', 'IT', 'HE', 'STOOD', 'STILL', 'AS', 'IF', 'HE', 'WERE', 'IN', 'FETTERS', 'AND', 'COULD', 'NOT', 'STIR', 'FROM', 'THE', 'SPOT', 'FOR', 'THE', 'PICTURE', 'SEEMED', 'TO', 'HIM', 'SO', 'BEAUTIFUL'] +3538-163619-0005-1505: ref=['THE', 'YOUTH', 'PROMISED', 'TO', 'MAKE', 'ALL', 'THE', 'HASTE', 'HE', 'COULD', 'AND', 'SET', 'FORTH', 'FROM', 'THE', "KING'S", 'PALACE'] +3538-163619-0005-1505: hyp=['THESE', 'PROMISED', 'TO', 'MAKE', 'ALL', 'THE', 'HASTE', 'HE', 'COULD', 'AND', 'SET', 'FORTH', 'FROM', 'THE', "KING'S", 'PALACE'] +3538-163619-0006-1506: ref=['AT', 'LAST', 'THEY', 'CAME', 'IN', 'SIGHT', 'OF', 'LAND'] +3538-163619-0006-1506: hyp=['AT', 'LAST', 'THEY', 'CAME', 'IN', 'SIGHT', 'OF', 'LAND'] +3538-163619-0007-1507: ref=['WELL', 'IF', 'MY', 'BROTHER', 'SAYS', 'SO', 'I', 'MUST', 'DO', 'IT', 'SAID', 'THE', "MAN'S", 'DAUGHTER', 'AND', 'SHE', 'FLUNG', 'HER', 'CASKET', 'INTO', 'THE', 'SEA'] +3538-163619-0007-1507: hyp=['WELL', 'IF', 'MY', 'BROTHER', 'SAYS', 'SO', 'I', 'MUST', 'DO', 'IT', 'SAID', 'THE', "MAN'S", 'DAUGHTER', 'AND', 'SHE', 'FLUNG', 'HER', 'CASKET', 'INTO', 'THE', 'SEA'] +3538-163619-0008-1508: ref=['WHAT', 'IS', 'MY', 'BROTHER', 'SAYING', 'ASKED', 'HIS', 'SISTER', 'AGAIN'] +3538-163619-0008-1508: hyp=['WHAT', 'IS', 'MY', 'BROTHER', 'SAYING', 'ASKED', 'HIS', 'SISTER', 'AGAIN'] +3538-163619-0009-1509: ref=['ON', 'THE', 'FIRST', 'THURSDAY', 'NIGHT', 'AFTER', 'THIS', 'A', 'BEAUTIFUL', 'MAIDEN', 'CAME', 'INTO', 'THE', 'KITCHEN', 'OF', 'THE', 'PALACE', 'AND', 'BEGGED', 'THE', 'KITCHEN', 'MAID', 'WHO', 'SLEPT', 'THERE', 'TO', 'LEND', 'HER', 'A', 'BRUSH'] +3538-163619-0009-1509: hyp=['ON', 'THE', 'FIRST', 'THURSDAY', 'NIGHT', 'AFTER', 'THIS', 'A', 'BEAUTIFUL', 'MAIDEN', 'CAME', 'INTO', 'THE', 'KITCHEN', 'OF', 'THE', 'PALACE', 'AND', 'BEGGED', 'THE', 'KITCHEN', 'MAID', 'WHO', 'SLEPT', 'THERE', 'TO', 'LEND', 'HER', 'A', 'BRUSH'] +3538-163619-0010-1510: ref=['SHE', 'BEGGED', 'VERY', 'PRETTILY', 'AND', 'GOT', 'IT', 'AND', 'THEN', 'SHE', 'BRUSHED', 'HER', 'HAIR', 'AND', 'THE', 'GOLD', 'DROPPED', 'FROM', 'IT'] +3538-163619-0010-1510: hyp=['SHE', 'BEGGED', 'VERY', 'PRETTILY', 'AND', 'GOT', 'IT', 'AND', 'THEN', 'SHE', 'BRUSHED', 'HER', 'HAIR', 'AND', 'THE', 'GOLD', 'DROPPED', 'FROM', 'IT'] +3538-163619-0011-1511: ref=['OUT', 'ON', 'THEE', 'UGLY', 'BUSHY', 'BRIDE', 'SLEEPING', 'SO', 'SOFT', 'BY', 'THE', 'YOUNG', "KING'S", 'SIDE', 'ON', 'SAND', 'AND', 'STONES', 'MY', 'BED', 'I', 'MAKE', 'AND', 'MY', 'BROTHER', 'SLEEPS', 'WITH', 'THE', 'COLD', 'SNAKE', 'UNPITIED', 'AND', 'UNWEPT'] +3538-163619-0011-1511: hyp=['OUT', 'ON', 'THEE', 'UGLY', 'BUSHY', 'BRIDE', 'SLEEPING', 'SO', 'SOFT', 'BY', 'THE', 'YOUNG', "KING'S", 'SIDE', 'ON', 'SAND', 'AND', 'STONES', 'MY', 'BED', 'I', 'MAKE', 'AND', 'MY', 'BROTHERS', 'SLEEPS', 'WITH', 'THE', 'COLD', 'SNAKE', 'UNPITIED', 'AND', 'UNWEPT'] +3538-163619-0012-1512: ref=['I', 'SHALL', 'COME', 'TWICE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN', 'SAID', 'SHE'] +3538-163619-0012-1512: hyp=['I', 'SHALL', 'COME', 'TWICE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN', 'SAID', 'SHE'] +3538-163619-0013-1513: ref=['THIS', 'TIME', 'ALSO', 'AS', 'BEFORE', 'SHE', 'BORROWED', 'A', 'BRUSH', 'AND', 'BRUSHED', 'HER', 'HAIR', 'WITH', 'IT', 'AND', 'THE', 'GOLD', 'DROPPED', 'DOWN', 'AS', 'SHE', 'DID', 'IT', 'AND', 'AGAIN', 'SHE', 'SENT', 'THE', 'DOG', 'OUT', 'THREE', 'TIMES', 'AND', 'WHEN', 'DAY', 'DAWNED', 'SHE', 'DEPARTED', 'BUT', 'AS', 'SHE', 'WAS', 'GOING', 'SHE', 'SAID', 'AS', 'SHE', 'HAD', 'SAID', 'BEFORE', 'I', 'SHALL', 'COME', 'ONCE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN'] +3538-163619-0013-1513: hyp=['THIS', 'TIME', 'ALSO', 'AS', 'BEFORE', 'SHE', 'BORROWED', 'A', 'BRUSH', 'AND', 'BRUSHED', 'HER', 'HAIR', 'WITH', 'IT', 'AND', 'THE', 'GOLD', 'DROPPED', 'DOWN', 'AS', 'SHE', 'DID', 'IT', 'AND', 'AGAIN', 'SHE', 'SENT', 'THE', 'DOG', 'OUT', 'THREE', 'TIMES', 'AND', 'WHEN', 'THEY', 'DAWNED', 'SHE', 'DEPARTED', 'BUT', 'AS', 'SHE', 'WAS', 'GOING', 'SHE', 'SAID', 'AS', 'SHE', 'HAD', 'SAID', 'BEFORE', 'I', 'SHALL', 'COME', 'ONCE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN'] +3538-163619-0014-1514: ref=['NO', 'ONE', 'CAN', 'TELL', 'HOW', 'DELIGHTED', 'THE', 'KING', 'WAS', 'TO', 'GET', 'RID', 'OF', 'THAT', 'HIDEOUS', 'BUSHY', 'BRIDE', 'AND', 'GET', 'A', 'QUEEN', 'WHO', 'WAS', 'BRIGHT', 'AND', 'BEAUTIFUL', 'AS', 'DAY', 'ITSELF'] +3538-163619-0014-1514: hyp=['NO', 'ONE', 'CAN', 'TELL', 'HOW', 'DELIGHTED', 'THE', 'KING', 'WAS', 'TO', 'GET', 'RID', 'OF', 'THAT', 'HIDEOUS', 'BUSHY', 'BRIDE', 'AND', 'GET', 'A', 'QUEEN', 'WHO', 'WAS', 'BRIGHT', 'AND', 'BEAUTIFUL', 'AS', 'DAY', 'ITSEL'] +3538-163622-0000-1515: ref=['WILT', 'THOU', 'SERVE', 'ME', 'AND', 'WATCH', 'MY', 'SEVEN', 'FOALS', 'ASKED', 'THE', 'KING'] +3538-163622-0000-1515: hyp=['WILT', 'THOU', 'SERVE', 'ME', 'AND', 'WATCH', 'MY', 'SEVEN', 'FOLDS', 'ASKED', 'THE', 'KING'] +3538-163622-0001-1516: ref=['THE', 'YOUTH', 'THOUGHT', 'THAT', 'IT', 'WAS', 'VERY', 'EASY', 'WORK', 'TO', 'WATCH', 'THE', 'FOALS', 'AND', 'THAT', 'HE', 'COULD', 'DO', 'IT', 'WELL', 'ENOUGH'] +3538-163622-0001-1516: hyp=['THE', 'YOUTH', 'THOUGHT', 'THAT', 'IT', 'WAS', 'VERY', 'EASY', 'WORK', 'TO', 'WATCH', 'THE', 'FOALS', 'AND', 'HE', 'COULD', 'DO', 'IT', 'WELL', 'ENOUGH'] +3538-163622-0002-1517: ref=['HAST', 'THOU', 'WATCHED', 'FAITHFULLY', 'AND', 'WELL', 'THE', 'WHOLE', 'DAY', 'LONG', 'SAID', 'THE', 'KING', 'WHEN', 'THE', 'LAD', 'CAME', 'INTO', 'HIS', 'PRESENCE', 'IN', 'THE', 'EVENING'] +3538-163622-0002-1517: hyp=['HAST', 'THOU', 'WATCH', 'FAITHFULLY', 'AND', 'WELL', 'THE', 'WHOLE', 'DAY', 'LONG', 'SAID', 'THE', 'KING', 'WHEN', 'THE', 'LAD', 'CAME', 'INTO', 'HIS', 'PRESENCE', 'IN', 'THE', 'EVENING'] +3538-163622-0003-1518: ref=['YES', 'THAT', 'I', 'HAVE', 'SAID', 'THE', 'YOUTH'] +3538-163622-0003-1518: hyp=['YES', 'THAT', 'I', 'HAVE', 'SAID', 'THE', 'YOUTH'] +3538-163622-0004-1519: ref=['HE', 'HAD', 'GONE', 'OUT', 'ONCE', 'TO', 'SEEK', 'A', 'PLACE', 'HE', 'SAID', 'BUT', 'NEVER', 'WOULD', 'HE', 'DO', 'SUCH', 'A', 'THING', 'AGAIN'] +3538-163622-0004-1519: hyp=['HE', 'HAD', 'GONE', 'OUT', 'ONCE', 'TO', 'SEEK', 'A', 'PLACE', 'HE', 'SAID', 'BUT', 'NEVER', 'WOULD', 'HE', 'DO', 'SUCH', 'A', 'THING', 'AGAIN'] +3538-163622-0005-1520: ref=['THEN', 'THE', 'KING', 'PROMISED', 'HIM', 'THE', 'SAME', 'PUNISHMENT', 'AND', 'THE', 'SAME', 'REWARD', 'THAT', 'HE', 'HAD', 'PROMISED', 'HIS', 'BROTHER'] +3538-163622-0005-1520: hyp=['THE', 'MACKING', 'PROMISED', 'HIM', 'THE', 'SAME', 'PUNISHMENT', 'AND', 'THE', 'SAME', 'REWARD', 'THAT', 'HE', 'HAD', 'PROMISED', 'HIS', 'BROTHER'] +3538-163622-0006-1521: ref=['WHEN', 'HE', 'HAD', 'RUN', 'AFTER', 'THE', 'FOALS', 'FOR', 'A', 'LONG', 'LONG', 'TIME', 'AND', 'WAS', 'HOT', 'AND', 'TIRED', 'HE', 'PASSED', 'BY', 'A', 'CLEFT', 'IN', 'THE', 'ROCK', 'WHERE', 'AN', 'OLD', 'WOMAN', 'WAS', 'SITTING', 'SPINNING', 'WITH', 'A', 'DISTAFF', 'AND', 'SHE', 'CALLED', 'TO', 'HIM'] +3538-163622-0006-1521: hyp=['WHEN', 'HE', 'HAD', 'RUN', 'AFTER', 'THE', 'FOOLS', 'FOR', 'A', 'LONG', 'LONG', 'TIME', 'AND', 'WAS', 'HOT', 'AND', 'TIRED', 'HE', 'PASSED', 'BY', 'CLIFF', 'IN', 'THE', 'ROCK', 'WHERE', 'AN', 'OLD', 'WOMAN', 'WAS', 'SITTING', 'SPINNING', 'WITH', 'A', 'DISTAFF', 'AND', 'SHE', 'CALLED', 'TO', 'HIM'] +3538-163622-0007-1522: ref=['COME', 'HITHER', 'COME', 'HITHER', 'MY', 'HANDSOME', 'SON', 'AND', 'LET', 'ME', 'COMB', 'YOUR', 'HAIR'] +3538-163622-0007-1522: hyp=['COMMANDER', 'COME', 'HITHER', 'MY', 'HANDSOME', 'SON', 'AND', 'LET', 'ME', 'COMB', 'YOUR', 'HAIR'] +3538-163622-0008-1523: ref=['THE', 'YOUTH', 'LIKED', 'THE', 'THOUGHT', 'OF', 'THIS', 'LET', 'THE', 'FOALS', 'RUN', 'WHERE', 'THEY', 'CHOSE', 'AND', 'SEATED', 'HIMSELF', 'IN', 'THE', 'CLEFT', 'OF', 'THE', 'ROCK', 'BY', 'THE', 'SIDE', 'OF', 'THE', 'OLD', 'HAG'] +3538-163622-0008-1523: hyp=['THE', 'YOUTH', 'LIKED', 'THE', 'THOUGHT', 'OF', 'THIS', 'LET', 'THE', 'FOLDS', 'WARM', 'WHERE', 'THEY', 'CHOSE', 'AND', 'SEATED', 'HIMSELF', 'IN', 'THE', 'CLEFT', 'OF', 'THE', 'ROCK', 'BY', 'THE', 'SIDE', 'OF', 'THE', 'OLD', 'HAG'] +3538-163622-0009-1524: ref=['SO', 'THERE', 'HE', 'SAT', 'WITH', 'HIS', 'HEAD', 'ON', 'HER', 'LAP', 'TAKING', 'HIS', 'EASE', 'THE', 'LIVELONG', 'DAY'] +3538-163622-0009-1524: hyp=['SO', 'THERE', 'HE', 'SAT', 'WITH', 'HIS', 'HEAD', 'ON', 'HER', 'LAP', 'TAKING', 'HIS', 'EASE', 'THE', 'LIVELONG', 'DAY'] +3538-163622-0010-1525: ref=['ON', 'THE', 'THIRD', 'DAY', 'CINDERLAD', 'WANTED', 'TO', 'SET', 'OUT'] +3538-163622-0010-1525: hyp=['ON', 'THE', 'THIRD', 'DAY', 'SAID', 'THE', 'LAD', 'WANTED', 'TO', 'SET', 'OUT'] +3538-163622-0011-1526: ref=['THE', 'TWO', 'BROTHERS', 'LAUGHED', 'AT', 'HIM', 'AND', 'HIS', 'FATHER', 'AND', 'MOTHER', 'BEGGED', 'HIM', 'NOT', 'TO', 'GO', 'BUT', 'ALL', 'TO', 'NO', 'PURPOSE', 'AND', 'CINDERLAD', 'SET', 'OUT', 'ON', 'HIS', 'WAY'] +3538-163622-0011-1526: hyp=['THE', 'TWO', 'BROTHERS', 'LAUGHED', 'AT', 'HIM', 'AND', 'HIS', 'FATHER', 'AND', 'MOTHER', 'BEGGED', 'HIM', 'NOT', 'TO', 'GO', 'BUT', 'ALL', 'TO', 'NO', 'PURPOSE', 'WHEN', 'CINDERLAD', 'SET', 'OUT', 'ON', 'HIS', 'WAY'] +3538-163622-0012-1527: ref=['I', 'AM', 'WALKING', 'ABOUT', 'IN', 'SEARCH', 'OF', 'A', 'PLACE', 'SAID', 'CINDERLAD'] +3538-163622-0012-1527: hyp=['I', 'AM', 'WALKING', 'ABOUT', 'IN', 'SEARCH', 'OF', 'A', 'PLACE', 'SAID', 'SANDAL', 'LAD'] +3538-163622-0013-1528: ref=['I', 'WOULD', 'MUCH', 'RATHER', 'HAVE', 'THE', 'PRINCESS', 'SAID', 'CINDERLAD'] +3538-163622-0013-1528: hyp=['I', 'WOULD', 'MUCH', 'RATHER', 'HAVE', 'THE', 'PRINCESS', 'SAID', 'CINDER', 'LAD'] +3538-163622-0014-1529: ref=['AND', 'THUS', 'THEY', 'JOURNEYED', 'ONWARDS', 'A', 'LONG', 'LONG', 'WAY'] +3538-163622-0014-1529: hyp=['AND', 'THUS', 'THEY', 'JOURNEYED', 'ONWARDS', 'A', 'LONG', 'LONG', 'WAY'] +3538-163622-0015-1530: ref=['WHEN', 'THEY', 'HAD', 'GONE', 'THUS', 'FOR', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FOAL', 'AGAIN', 'ASKED', 'DOST', 'THOU', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0015-1530: hyp=['WHEN', 'THEY', 'HAD', 'GONE', 'THUS', 'FOR', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FULL', 'AGAIN', 'ASKED', 'DOST', 'THOU', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0016-1531: ref=['YES', 'NOW', 'I', 'SEE', 'SOMETHING', 'THAT', 'IS', 'WHITE', 'SAID', 'CINDERLAD'] +3538-163622-0016-1531: hyp=['YES', 'NOW', 'I', 'SEE', 'SOMETHING', 'THAT', 'IS', 'WHITE', 'SAID', 'CINDER', 'LAD'] +3538-163622-0017-1532: ref=['IT', 'LOOKS', 'LIKE', 'THE', 'TRUNK', 'OF', 'A', 'GREAT', 'THICK', 'BIRCH', 'TREE'] +3538-163622-0017-1532: hyp=['IT', 'LOOKS', 'LIKE', 'THE', 'TRUNK', 'OF', 'A', 'GREAT', 'THICK', 'BIRCH', 'TREE'] +3538-163622-0018-1533: ref=['CINDERLAD', 'TRIED', 'BUT', 'COULD', 'NOT', 'DO', 'IT', 'SO', 'HE', 'HAD', 'TO', 'TAKE', 'A', 'DRAUGHT', 'FROM', 'THE', 'PITCHER', 'AND', 'THEN', 'ONE', 'MORE', 'AND', 'AFTER', 'THAT', 'STILL', 'ANOTHER', 'AND', 'THEN', 'HE', 'WAS', 'ABLE', 'TO', 'WIELD', 'THE', 'SWORD', 'WITH', 'PERFECT', 'EASE'] +3538-163622-0018-1533: hyp=['SOONER', 'LAD', 'TRIED', 'BUT', 'COULD', 'NOT', 'DO', 'IT', 'SO', 'HE', 'HAD', 'TO', 'TAKE', 'A', 'DROP', 'FROM', 'THE', 'PITCHER', 'AND', 'THEN', 'ONE', 'MORE', 'AND', 'AFTER', 'THAT', 'STILL', 'ANOTHER', 'AND', 'THEN', 'HE', 'WAS', 'ABLE', 'TO', 'WHEEL', 'THE', 'SWORD', 'WITH', 'PERFECT', 'EASE'] +3538-163622-0019-1534: ref=['FOR', 'WE', 'ARE', 'BROTHERS', 'OF', 'THE', 'PRINCESS', 'WHOM', 'THOU', 'ART', 'TO', 'HAVE', 'WHEN', 'THOU', 'CANST', 'TELL', 'THE', 'KING', 'WHAT', 'WE', 'EAT', 'AND', 'DRINK', 'BUT', 'THERE', 'IS', 'A', 'MIGHTY', 'TROLL', 'WHO', 'HAS', 'CAST', 'A', 'SPELL', 'OVER', 'US'] +3538-163622-0019-1534: hyp=['FOR', 'WE', 'ARE', 'BROTHERS', 'OF', 'THE', 'PRINCESS', 'WHOM', 'THOU', 'ART', 'TO', 'HAVE', 'WHEN', 'THOU', 'CANST', 'TELL', 'THE', 'KING', 'WHAT', 'WE', 'EAT', 'AND', 'DRINK', 'BUT', 'THERE', 'IS', 'A', 'MIGHTY', 'TROLL', 'WHO', 'IS', 'CAST', 'A', 'SPELL', 'OVER', 'US'] +3538-163622-0020-1535: ref=['WHEN', 'THEY', 'HAD', 'TRAVELLED', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FOAL', 'SAID', 'DOST', 'THOU', 'SEE', 'ANYTHING'] +3538-163622-0020-1535: hyp=['WHEN', 'THEY', 'HAD', 'TRAVELLED', 'ALONG', 'A', 'LONG', 'WAY', 'THE', 'FOAL', 'SAID', 'DOST', 'THOU', 'SEE', 'ANYTHING'] +3538-163622-0021-1536: ref=['AND', 'NOW', 'INQUIRED', 'THE', 'FOAL', 'SEEST', 'THOU', 'NOTHING', 'NOW'] +3538-163622-0021-1536: hyp=['AND', 'NOW', 'INQUIRED', 'THE', 'FULL', 'CEASE', 'DONE', 'NOTHING', 'NOW'] +3538-163622-0022-1537: ref=['NOW', 'THEN', 'SAID', 'THE', 'FOAL', 'DOST', 'THOU', 'NOT', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0022-1537: hyp=['NOW', 'THEN', 'SAID', 'THE', 'FOUR', 'DOST', 'THOU', 'NOT', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0023-1538: ref=['THAT', 'IS', 'A', 'RIVER', 'SAID', 'THE', 'FOAL', 'AND', 'WE', 'HAVE', 'TO', 'CROSS', 'IT'] +3538-163622-0023-1538: hyp=['THAT', 'IS', 'A', 'RIVER', 'SAID', 'THE', 'FOAL', 'AND', 'WE', 'HAVE', 'TO', 'CROSS', 'IT'] +3538-163622-0024-1539: ref=['I', 'HAVE', 'DONE', 'MY', 'BEST', 'REPLIED', 'CINDERLAD'] +3538-163622-0024-1539: hyp=['I', 'HAVE', 'DONE', 'MY', 'BEST', 'REPLIED', 'SIR', 'LAD'] +3538-163624-0000-1540: ref=['ONCE', 'UPON', 'A', 'TIME', 'THERE', 'WAS', 'A', 'KING', 'IN', 'THE', 'NORTH', 'WHO', 'HAD', 'WON', 'MANY', 'WARS', 'BUT', 'NOW', 'HE', 'WAS', 'OLD'] +3538-163624-0000-1540: hyp=['ONCE', 'UPON', 'A', 'TIME', 'THERE', 'WAS', 'A', 'KING', 'IN', 'THE', 'NORTH', 'WHO', 'HAD', 'WON', 'MANY', 'WARS', 'BUT', 'NOW', 'HE', 'WAS', 'OLD'] +3538-163624-0001-1541: ref=['THE', 'OLD', 'KING', 'WENT', 'OUT', 'AND', 'FOUGHT', 'BRAVELY', 'BUT', 'AT', 'LAST', 'HIS', 'SWORD', 'BROKE', 'AND', 'HE', 'WAS', 'WOUNDED', 'AND', 'HIS', 'MEN', 'FLED'] +3538-163624-0001-1541: hyp=['THE', 'OLD', 'KING', 'WENT', 'OUT', 'AND', 'THOUGHT', 'BRAVELY', 'BUT', 'AT', 'LAST', 'HIS', 'SWORD', 'BROKE', 'AND', 'HE', 'WAS', 'WOUNDED', 'AND', 'HIS', 'MEN', 'FLED'] +3538-163624-0002-1542: ref=['BUT', 'IN', 'THE', 'NIGHT', 'WHEN', 'THE', 'BATTLE', 'WAS', 'OVER', 'HIS', 'YOUNG', 'WIFE', 'CAME', 'OUT', 'AND', 'SEARCHED', 'FOR', 'HIM', 'AMONG', 'THE', 'SLAIN', 'AND', 'AT', 'LAST', 'SHE', 'FOUND', 'HIM', 'AND', 'ASKED', 'WHETHER', 'HE', 'MIGHT', 'BE', 'HEALED'] +3538-163624-0002-1542: hyp=['BUT', 'IN', 'THE', 'NIGHT', 'WHEN', 'THE', 'BATTLE', 'WAS', 'OVER', 'HIS', 'YOUNG', 'WIFE', 'CAME', 'OUT', 'AND', 'SEARCHED', 'FOR', 'HIM', 'AMONG', 'THE', 'SLAIN', 'AND', 'AT', 'LAST', 'SHE', 'FOUND', 'HIM', 'AND', 'ASKED', 'WHETHER', 'HE', 'MIGHT', 'BE', 'HEALED'] +3538-163624-0003-1543: ref=['SO', 'HE', 'ASKED', 'THE', 'QUEEN', 'HOW', 'DO', 'YOU', 'KNOW', 'IN', 'THE', 'DARK', 'OF', 'NIGHT', 'WHETHER', 'THE', 'HOURS', 'ARE', 'WEARING', 'TO', 'THE', 'MORNING', 'AND', 'SHE', 'SAID'] +3538-163624-0003-1543: hyp=['SO', 'YES', 'THE', 'QUEEN', 'HOW', 'DO', 'YOU', 'KNOW', 'IN', 'THE', 'DARK', 'OF', 'NIGHT', 'WHETHER', 'THE', 'HOURS', 'ARE', 'WEARING', 'TO', 'THE', 'MORNING', 'AND', 'SHE', 'SAID'] +3538-163624-0004-1544: ref=['THEN', 'THE', 'OLD', 'MAN', 'SAID', 'DRIVE', 'ALL', 'THE', 'HORSES', 'INTO', 'THE', 'RIVER', 'AND', 'CHOOSE', 'THE', 'ONE', 'THAT', 'SWIMS', 'ACROSS'] +3538-163624-0004-1544: hyp=['THEN', 'THE', 'OLD', 'MAN', 'SAID', 'DRIVE', 'ALL', 'THE', 'HORSES', 'INTO', 'THE', 'RIVER', 'AND', 'CHOOSE', 'THE', 'ONE', 'THAT', 'SWIMS', 'ACROSS'] +3538-163624-0005-1545: ref=['HE', 'IS', 'NO', 'BIGGER', 'THAN', 'OTHER', 'DRAGONS', 'SAID', 'THE', 'TUTOR', 'AND', 'IF', 'YOU', 'WERE', 'AS', 'BRAVE', 'AS', 'YOUR', 'FATHER', 'YOU', 'WOULD', 'NOT', 'FEAR', 'HIM'] +3538-163624-0005-1545: hyp=['HE', 'HAS', 'NO', 'BIGGER', 'THAN', 'OTHER', 'DRAGONS', 'SAID', 'THE', 'TUTOR', 'AND', 'IF', 'YOU', 'WERE', 'AS', 'BRAVE', 'AS', 'YOUR', 'FATHER', 'YOU', 'WOULD', 'NOT', 'FEAR', 'HIM'] +3538-163624-0006-1546: ref=['THEN', 'THE', 'PERSON', 'WHO', 'HAD', 'KILLED', 'OTTER', 'WENT', 'DOWN', 'AND', 'CAUGHT', 'THE', 'DWARF', 'WHO', 'OWNED', 'ALL', 'THE', 'TREASURE', 'AND', 'TOOK', 'IT', 'FROM', 'HIM'] +3538-163624-0006-1546: hyp=['THEN', 'THE', 'PERSON', 'WHO', 'HAD', 'KILLED', 'OTTER', 'WENT', 'DOWN', 'AND', 'CAUGHT', 'THE', 'DWARF', 'WHO', 'OWNED', 'ALL', 'THE', 'TREASURE', 'AND', 'TOOK', 'IT', 'FROM', 'HIM'] +3538-163624-0007-1547: ref=['ONLY', 'ONE', 'RING', 'WAS', 'LEFT', 'WHICH', 'THE', 'DWARF', 'WORE', 'AND', 'EVEN', 'THAT', 'WAS', 'TAKEN', 'FROM', 'HIM'] +3538-163624-0007-1547: hyp=['ONLY', 'ONE', 'RING', 'WAS', 'LEFT', 'WHICH', 'THE', 'DWARF', 'WORE', 'AND', 'EVEN', 'THAT', 'WAS', 'TAKEN', 'FROM', 'HIM'] +3538-163624-0008-1548: ref=['SO', 'REGIN', 'MADE', 'A', 'SWORD', 'AND', 'SIGURD', 'TRIED', 'IT', 'WITH', 'A', 'BLOW', 'ON', 'A', 'LUMP', 'OF', 'IRON', 'AND', 'THE', 'SWORD', 'BROKE'] +3538-163624-0008-1548: hyp=['SO', 'RIGAN', 'MADE', 'A', 'SWORD', 'AND', 'CIGAR', 'TRIED', 'IT', 'WITH', 'A', 'BLOW', 'AND', 'A', 'LUMP', 'OF', 'IRON', 'AND', 'THE', 'SWORD', 'BROKE'] +3538-163624-0009-1549: ref=['THEN', 'SIGURD', 'WENT', 'TO', 'HIS', 'MOTHER', 'AND', 'ASKED', 'FOR', 'THE', 'BROKEN', 'PIECES', 'OF', 'HIS', "FATHER'S", 'BLADE', 'AND', 'GAVE', 'THEM', 'TO', 'REGIN'] +3538-163624-0009-1549: hyp=['THEN', 'CIGAR', 'WENT', 'TO', 'HIS', 'MOTHER', 'AND', 'ASKED', 'FOR', 'THE', 'BROKEN', 'PIECES', 'OF', 'HIS', "FATHER'S", 'BLADE', 'AND', 'GAVE', 'THEM', 'TO', 'REGAN'] +3538-163624-0010-1550: ref=['SO', 'SIGURD', 'SAID', 'THAT', 'SWORD', 'WOULD', 'DO'] +3538-163624-0010-1550: hyp=['SO', 'CIGARS', 'SAID', 'THAT', 'SWORD', 'WOULD', 'DO'] +3538-163624-0011-1551: ref=['THEN', 'HE', 'SAW', 'THE', 'TRACK', 'WHICH', 'THE', 'DRAGON', 'MADE', 'WHEN', 'HE', 'WENT', 'TO', 'A', 'CLIFF', 'TO', 'DRINK', 'AND', 'THE', 'TRACK', 'WAS', 'AS', 'IF', 'A', 'GREAT', 'RIVER', 'HAD', 'ROLLED', 'ALONG', 'AND', 'LEFT', 'A', 'DEEP', 'VALLEY'] +3538-163624-0011-1551: hyp=['THEN', 'HE', 'SAW', 'THE', 'TRACK', 'WHICH', 'THE', 'DRAGON', 'HAD', 'MADE', 'WHEN', 'HE', 'WENT', 'TO', 'A', 'CLIFF', 'TO', 'DRINK', 'AND', 'THE', 'TRACK', 'WAS', 'AS', 'IF', 'A', 'GREAT', 'RIVER', 'HAD', 'ROLLED', 'ALONG', 'AND', 'LEFT', 'A', 'DEEP', 'VALLEY'] +3538-163624-0012-1552: ref=['BUT', 'SIGURD', 'WAITED', 'TILL', 'HALF', 'OF', 'HIM', 'HAD', 'CRAWLED', 'OVER', 'THE', 'PIT', 'AND', 'THEN', 'HE', 'THRUST', 'THE', 'SWORD', 'GRAM', 'RIGHT', 'INTO', 'HIS', 'VERY', 'HEART'] +3538-163624-0012-1552: hyp=['BUT', 'CIGARET', 'WAITED', 'TILL', 'HALF', 'OF', 'HIM', 'HAD', 'CRAWLED', 'OVER', 'THE', 'PIT', 'AND', 'THEN', 'HE', 'THRUST', 'THE', 'SWORD', 'GRAHAM', 'RIGHT', 'INTO', 'HIS', 'VERY', 'HEART'] +3538-163624-0013-1553: ref=['SIGURD', 'SAID', 'I', 'WOULD', 'TOUCH', 'NONE', 'OF', 'IT', 'IF', 'BY', 'LOSING', 'IT', 'I', 'SHOULD', 'NEVER', 'DIE'] +3538-163624-0013-1553: hyp=['CIGAR', 'SAID', 'I', 'WOULD', 'TOUCH', 'NONE', 'OF', 'IT', 'IF', 'BY', 'LOSING', 'IT', 'I', 'SHOULD', 'NEVER', 'DIE'] +3538-163624-0014-1554: ref=['BUT', 'ALL', 'MEN', 'DIE', 'AND', 'NO', 'BRAVE', 'MAN', 'LETS', 'DEATH', 'FRIGHTEN', 'HIM', 'FROM', 'HIS', 'DESIRE'] +3538-163624-0014-1554: hyp=['BUT', 'ALL', 'MEN', 'DIE', 'AND', 'KNOW', 'BRAVE', 'MAN', "LET'S", 'DEATH', 'FRIGHTEN', 'HIM', 'FROM', 'HIS', 'DESIRE'] +3538-163624-0015-1555: ref=['DIE', 'THOU', 'FAFNIR', 'AND', 'THEN', 'FAFNIR', 'DIED'] +3538-163624-0015-1555: hyp=['GUY', 'THOU', 'FAFFNER', 'AND', 'THEN', 'STAFF', 'DIED'] +3538-163624-0016-1556: ref=['THEN', 'SIGURD', 'RODE', 'BACK', 'AND', 'MET', 'REGIN', 'AND', 'REGIN', 'ASKED', 'HIM', 'TO', 'ROAST', "FAFNIR'S", 'HEART', 'AND', 'LET', 'HIM', 'TASTE', 'OF', 'IT'] +3538-163624-0016-1556: hyp=['THEN', 'CIGAR', 'RODE', 'BACK', 'AND', 'MET', 'RIGAN', 'AND', 'RIGAN', 'ASKED', 'HIM', 'TO', 'ROAST', "FAFNER'S", 'HEART', 'AND', 'LET', 'HIM', 'TASTE', 'OF', 'IT'] +3538-163624-0017-1557: ref=['SO', 'SIGURD', 'PUT', 'THE', 'HEART', 'OF', 'FAFNIR', 'ON', 'A', 'STAKE', 'AND', 'ROASTED', 'IT'] +3538-163624-0017-1557: hyp=['SO', 'SIR', 'GOD', 'PUT', 'THE', 'HEART', 'OF', 'FAFNER', 'ON', 'A', 'STAKE', 'AND', 'ROASTED', 'IT'] +3538-163624-0018-1558: ref=['THERE', 'IS', 'SIGURD', 'ROASTING', "FAFNIR'S", 'HEART', 'FOR', 'ANOTHER', 'WHEN', 'HE', 'SHOULD', 'TASTE', 'OF', 'IT', 'HIMSELF', 'AND', 'LEARN', 'ALL', 'WISDOM'] +3538-163624-0018-1558: hyp=['THEIR', 'CIGAR', 'ROASTING', "FASTENER'S", 'HEART', 'FOR', 'ANOTHER', 'WHEN', 'HE', 'SHOULD', 'TASTE', 'OF', 'IT', 'HIMSELF', 'AND', 'LEARN', 'ALL', 'WISDOM'] +3538-163624-0019-1559: ref=['THAT', 'LET', 'HIM', 'DO', 'AND', 'THEN', 'RIDE', 'OVER', 'HINDFELL', 'TO', 'THE', 'PLACE', 'WHERE', 'BRYNHILD', 'SLEEPS'] +3538-163624-0019-1559: hyp=['THAT', 'LET', 'HIM', 'DO', 'THEN', 'RIDE', 'OVER', 'HINFIELD', 'TO', 'THE', 'PLACE', 'WHERE', 'BURNHILD', 'SLEEPS'] +3538-163624-0020-1560: ref=['THERE', 'MUST', 'SHE', 'SLEEP', 'TILL', 'THOU', 'COMEST', 'FOR', 'HER', 'WAKING', 'RISE', 'UP', 'AND', 'RIDE', 'FOR', 'NOW', 'SURE', 'SHE', 'WILL', 'SWEAR', 'THE', 'VOW', 'FEARLESS', 'OF', 'BREAKING'] +3538-163624-0020-1560: hyp=['THERE', 'MUST', 'SHE', 'SLEEP', 'TILL', 'THOU', 'COMES', 'FOR', 'HER', 'WAKING', 'WHO', 'RISE', 'UP', 'AND', 'RIDE', 'FOR', 'NOW', 'SURE', 'SHE', 'WILL', 'SWEAR', 'THE', 'VOW', 'FEARLESS', 'OF', 'BREAKING'] +3538-163624-0021-1561: ref=['THEN', 'HE', 'TOOK', 'THE', 'HELMET', 'OFF', 'THE', 'HEAD', 'OF', 'THE', 'SLEEPER', 'AND', 'BEHOLD', 'SHE', 'WAS', 'A', 'MOST', 'BEAUTIFUL', 'LADY'] +3538-163624-0021-1561: hyp=['THEN', 'HE', 'TOOK', 'THE', 'HELMET', 'OFF', 'THE', 'HEAD', 'OF', 'THE', 'SLEEPER', 'AND', 'BEHOLD', 'SHE', 'WAS', 'A', 'MOST', 'BEAUTIFUL', 'LADY'] +3538-163624-0022-1562: ref=['THEN', 'SIGURD', 'RODE', 'AWAY', 'AND', 'HE', 'CAME', 'TO', 'THE', 'HOUSE', 'OF', 'A', 'KING', 'WHO', 'HAD', 'A', 'FAIR', 'DAUGHTER'] +3538-163624-0022-1562: hyp=['THEN', 'CIGAR', 'RODE', 'AWAY', 'AND', 'HE', 'CAME', 'TO', 'THE', 'HOUSE', 'OF', 'A', 'KING', 'WHO', 'HAD', 'A', 'FAIR', 'DAUGHTER'] +3538-163624-0023-1563: ref=['THEN', "BRYNHILD'S", 'FATHER', 'TOLD', 'GUNNAR', 'THAT', 'SHE', 'WOULD', 'MARRY', 'NONE', 'BUT', 'HIM', 'WHO', 'COULD', 'RIDE', 'THE', 'FLAME', 'IN', 'FRONT', 'OF', 'HER', 'ENCHANTED', 'TOWER', 'AND', 'THITHER', 'THEY', 'RODE', 'AND', 'GUNNAR', 'SET', 'HIS', 'HORSE', 'AT', 'THE', 'FLAME', 'BUT', 'HE', 'WOULD', 'NOT', 'FACE', 'IT'] +3538-163624-0023-1563: hyp=['WHEN', "BRUNHOLD'S", 'FATHER', 'TOLD', 'GUNNER', 'THAT', 'SHE', 'WOULD', 'MARRY', 'NONE', 'BUT', 'HIM', 'WHO', 'COULD', 'RIDE', 'THE', 'FLAME', 'IN', 'FRONT', 'OF', 'HER', 'ENCHANTED', 'TOWER', 'AND', 'THITHER', 'THEY', 'RODE', 'AND', 'GUNNER', 'SET', 'HIS', 'HORSE', 'TO', 'THE', 'FLAME', 'BUT', 'HE', 'WOULD', 'NOT', 'FACE', 'IT'] +3538-163624-0024-1564: ref=['FOR', 'ONE', 'DAY', 'WHEN', 'BRYNHILD', 'AND', 'GUDRUN', 'WERE', 'BATHING', 'BRYNHILD', 'WADED', 'FARTHEST', 'OUT', 'INTO', 'THE', 'RIVER', 'AND', 'SAID', 'SHE', 'DID', 'THAT', 'TO', 'SHOW', 'SHE', 'WAS', "GUIRUN'S", 'SUPERIOR'] +3538-163624-0024-1564: hyp=['FOR', 'ONE', 'DAY', 'WHEN', 'BURNEHELD', 'AND', 'GUNDRAIN', 'WERE', 'BATHING', 'BURNEHELD', 'WAITED', 'FARTHEST', 'SOUTH', 'INTO', 'THE', 'RIVER', 'AND', 'SAID', 'SHE', 'DID', 'THAT', 'TO', 'SHOW', 'SHE', 'WAS', 'GUNDERING', 'SUPERIOR'] +3538-163624-0025-1565: ref=['FOR', 'HER', 'HUSBAND', 'SHE', 'SAID', 'HAD', 'RIDDEN', 'THROUGH', 'THE', 'FLAME', 'WHEN', 'NO', 'OTHER', 'MAN', 'DARED', 'FACE', 'IT'] +3538-163624-0025-1565: hyp=['FOR', 'HER', 'HUSBAND', 'SHE', 'SAID', 'HAD', 'RIDDEN', 'THROUGH', 'THE', 'FLAME', 'WHEN', 'NO', 'OTHER', 'MAN', 'DARED', 'FACE', 'IT'] +3538-163624-0026-1566: ref=['NOT', 'LONG', 'TO', 'WAIT', 'HE', 'SAID', 'TILL', 'THE', 'BITTER', 'SWORD', 'STANDS', 'FAST', 'IN', 'MY', 'HEART', 'AND', 'THOU', 'WILL', 'NOT', 'LIVE', 'LONG', 'WHEN', 'I', 'AM', 'DEAD'] +3538-163624-0026-1566: hyp=['NOT', 'LONG', 'TO', 'WAIT', 'HE', 'SAID', 'TILL', 'THE', 'BITTER', 'SWORD', 'STANDS', 'FAST', 'IN', 'MY', 'HEART', 'AND', 'THOU', 'WILT', 'NOT', 'LIVE', 'LONG', 'WHEN', 'I', 'AM', 'DEAD'] +367-130732-0000-1466: ref=['LOBSTERS', 'AND', 'LOBSTERS'] +367-130732-0000-1466: hyp=['LOBSTERS', 'AND', 'LOBSTERS'] +367-130732-0001-1467: ref=['WHEN', 'IS', 'A', 'LOBSTER', 'NOT', 'A', 'LOBSTER', 'WHEN', 'IT', 'IS', 'A', 'CRAYFISH'] +367-130732-0001-1467: hyp=['WHEN', 'AS', 'A', 'LOBSTER', 'NOT', 'A', 'LOBSTER', 'WHEN', 'IT', 'IS', 'A', 'CRAYFISH'] +367-130732-0002-1468: ref=['THIS', 'QUESTION', 'AND', 'ANSWER', 'MIGHT', 'WELL', 'GO', 'INTO', 'THE', 'PRIMER', 'OF', 'INFORMATION', 'FOR', 'THOSE', 'WHO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'FROM', 'THE', 'EAST', 'FOR', 'WHAT', 'IS', 'CALLED', 'A', 'LOBSTER', 'IN', 'SAN', 'FRANCISCO', 'IS', 'NOT', 'A', 'LOBSTER', 'AT', 'ALL', 'BUT', 'A', 'CRAYFISH'] +367-130732-0002-1468: hyp=['THIS', 'QUESTION', 'IN', 'ANSWER', 'MIGHT', 'WELL', 'GO', 'INTO', 'THE', 'PRIMARY', 'OF', 'INFORMATION', 'FOR', 'THOSE', 'WHO', 'COME', 'THE', 'SAN', 'FRANCISCO', 'FROM', 'THE', 'EAST', 'FOR', 'WHAT', 'IS', 'CALLED', 'A', 'LOBSTER', 'IN', 'SAN', 'FRANCISCO', 'IS', 'NOT', 'A', 'LOBSTER', 'AT', 'ALL', 'BUT', 'A', 'CRAYFISH'] +367-130732-0003-1469: ref=['THE', 'PACIFIC', 'CRAYFISH', 'HOWEVER', 'SERVES', 'EVERY', 'PURPOSE', 'AND', 'WHILE', 'MANY', 'CONTEND', 'THAT', 'ITS', 'MEAT', 'IS', 'NOT', 'SO', 'DELICATE', 'IN', 'FLAVOR', 'AS', 'THAT', 'OF', 'ITS', 'EASTERN', 'COUSIN', 'THE', 'CALIFORNIAN', 'WILL', 'AS', 'STRENUOUSLY', 'INSIST', 'THAT', 'IT', 'IS', 'BETTER', 'BUT', 'OF', 'COURSE', 'SOMETHING', 'MUST', 'ALWAYS', 'BE', 'ALLOWED', 'FOR', 'THE', 'PATRIOTISM', 'OF', 'THE', 'CALIFORNIAN'] +367-130732-0003-1469: hyp=['THE', 'PACIFIC', 'CRAYFISH', 'HOURSERVES', 'EVERY', 'PURPOSE', 'AND', 'WHILE', 'MANY', 'CONTENDED', 'ITS', 'MEAT', 'IS', 'NOT', 'SO', 'DELICATE', 'FLAVORITE', 'AS', 'THAT', 'OF', 'ITS', 'EASTERN', 'COUSIN', 'THE', 'CALIFORNIA', 'WILL', 'AS', 'STRENUOUSLY', 'INSIST', 'THAT', 'IT', 'IS', 'BETTER', 'BUT', 'OF', 'COURSE', 'SOMETHING', 'MUST', 'ALWAYS', 'BE', 'ALLOWED', 'FOR', 'THE', 'PATRIOTISM', 'OF', 'THE', 'CALIFORNIA'] +367-130732-0004-1470: ref=['A', 'BOOK', 'COULD', 'BE', 'WRITTEN', 'ABOUT', 'THIS', 'RESTAURANT', 'AND', 'THEN', 'ALL', 'WOULD', 'NOT', 'BE', 'TOLD', 'FOR', 'ALL', 'ITS', 'SECRETS', 'CAN', 'NEVER', 'BE', 'KNOWN'] +367-130732-0004-1470: hyp=['A', 'BOOK', 'COULD', 'BE', 'WRITTEN', 'ABOUT', 'THIS', 'RESTAURANT', 'AND', 'THEN', 'ALL', 'WOULD', 'NOT', 'BE', 'TOLD', 'FOR', 'ALL', 'ITS', 'SECRETS', 'CAN', 'NEVER', 'BE', 'KNOWN'] +367-130732-0005-1471: ref=['IT', 'WAS', 'HERE', 'THAT', 'MOST', 'MAGNIFICENT', 'DINNERS', 'WERE', 'ARRANGED', 'IT', 'WAS', 'HERE', 'THAT', 'EXTRAORDINARY', 'DISHES', 'WERE', 'CONCOCTED', 'BY', 'CHEFS', 'OF', 'WORLD', 'WIDE', 'FAME', 'IT', 'WAS', 'HERE', 'THAT', 'LOBSTER', 'A', 'LA', 'NEWBERG', 'REACHED', 'ITS', 'HIGHEST', 'PERFECTION', 'AND', 'THIS', 'IS', 'THE', 'RECIPE', 'THAT', 'WAS', 'FOLLOWED', 'WHEN', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', 'DELMONICO'] +367-130732-0005-1471: hyp=['IT', 'WAS', 'HERE', 'THAT', 'MOST', 'MAGNIFICENT', 'DINNERS', 'WERE', 'ARRANGED', 'IT', 'WAS', 'HERE', 'THAT', 'EXTRAORDINARY', 'DISHES', 'WERE', 'CONCOCTED', 'BY', 'CHEFS', 'OF', 'WOOLWRIGHT', 'FAME', 'IT', 'WAS', 'HERE', 'THAT', 'LOBSTER', 'ALAD', 'NEWBURG', 'REACHED', 'ITS', 'HIGHEST', 'PERFECTION', 'AND', 'THIS', 'IS', 'THE', 'RECIPE', 'THAT', 'WAS', 'FOLLOWED', 'WHEN', 'HE', 'WAS', 'PREPARED', 'IN', 'THE', 'DOMONICO'] +367-130732-0006-1472: ref=['LOBSTER', 'A', 'LA', 'NEWBERG'] +367-130732-0006-1472: hyp=['LOBSTER', 'OLY', 'NEWBURG'] +367-130732-0007-1473: ref=['ONE', 'POUND', 'OF', 'LOBSTER', 'MEAT', 'ONE', 'TEASPOONFUL', 'OF', 'BUTTER', 'ONE', 'HALF', 'PINT', 'OF', 'CREAM', 'YOLKS', 'OF', 'FOUR', 'EGGS', 'ONE', 'WINE', 'GLASS', 'OF', 'SHERRY', 'LOBSTER', 'FAT'] +367-130732-0007-1473: hyp=['ONE', 'POUND', 'OF', 'LOBSTER', 'MEAT', 'ONE', 'TEASPOONFUL', 'OF', 'BUTTER', 'ONE', 'HALF', 'PINT', 'OF', 'CREAM', 'YOLKS', 'OF', 'FOUR', 'EGGS', 'ONE', 'WINE', 'GLASS', 'OF', 'SHERRY', 'LOBSTER', 'FAT'] +367-130732-0008-1474: ref=['PUT', 'THIS', 'IN', 'A', 'DOUBLE', 'BOILER', 'AND', 'LET', 'COOK', 'UNTIL', 'THICK', 'STIRRING', 'CONSTANTLY'] +367-130732-0008-1474: hyp=['PUT', 'THIS', 'IN', 'A', 'DOUBLE', 'BOILER', 'AND', 'LET', 'COOK', 'UNTIL', 'THICK', 'STIRRING', 'CONSTANTLY'] +367-130732-0009-1475: ref=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'THIN', 'SLICES', 'OF', 'DRY', 'TOAST'] +367-130732-0009-1475: hyp=['SERVE', 'IN', 'A', 'CHIEFING', 'DISH', 'WITH', 'FLIND', 'SLICES', 'OF', 'DRY', 'TOAST'] +367-130732-0010-1476: ref=['KING', 'OF', 'SHELL', 'FISH'] +367-130732-0010-1476: hyp=['KING', 'OF', 'SHELLFISH'] +367-130732-0011-1477: ref=['ONE', 'HAS', 'TO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'TO', 'PARTAKE', 'OF', 'THE', 'KING', 'OF', 'SHELL', 'FISH', 'THE', 'MAMMOTH', 'PACIFIC', 'CRAB'] +367-130732-0011-1477: hyp=['ONE', 'HAS', 'TO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'TO', 'PARTAKE', 'OF', 'THE', 'KING', 'OF', 'SHELLFISH', 'THE', 'MAMMOTH', 'PACIFIC', 'CRAB'] +367-130732-0012-1478: ref=['I', 'SAY', 'COME', 'TO', 'SAN', 'FRANCISCO', 'ADVISEDLY', 'FOR', 'WHILE', 'THE', 'CRAB', 'IS', 'FOUND', 'ALL', 'ALONG', 'THE', 'COAST', 'IT', 'IS', 'PREPARED', 'NOWHERE', 'SO', 'DELICIOUSLY', 'AS', 'IN', 'SAN', 'FRANCISCO'] +367-130732-0012-1478: hyp=['I', 'SAY', 'COME', 'TO', 'SAN', 'FRANCISCO', 'ADVISEDLY', 'FOR', 'WHILE', 'THE', 'CRAB', 'IS', 'FOUND', 'ALL', 'ALONG', 'THE', 'COAST', 'IT', 'IS', 'PREPARED', 'NOWHERE', 'SO', 'DELICIOUSLY', 'AS', 'IN', 'SAMPANCISCO'] +367-130732-0013-1479: ref=["GOBEY'S", 'PASSED', 'WITH', 'THE', 'FIRE', 'AND', 'THE', 'LITTLE', 'RESTAURANT', 'BEARING', 'HIS', 'NAME', 'AND', 'IN', 'CHARGE', 'OF', 'HIS', 'WIDOW', 'IN', 'UNION', 'SQUARE', 'AVENUE', 'HAS', 'NOT', 'ATTAINED', 'THE', 'FAME', 'OF', 'THE', 'OLD', 'PLACE'] +367-130732-0013-1479: hyp=['GOBYS', 'PASS', 'WITH', 'THE', 'FIRE', 'AND', 'THE', 'LITTLE', 'RESTAURANT', 'BEARING', 'HIS', 'NAME', 'IN', 'CHARGE', 'OF', 'HIS', 'WIDOW', 'AND', 'UNION', 'SQUARE', 'AVENUE', 'HAS', 'NOT', 'ATTAINED', 'THE', 'FAME', 'OF', 'THE', 'OLD', 'PLACE'] +367-130732-0014-1480: ref=['IT', 'IS', 'POSSIBLE', 'THAT', 'SHE', 'KNOWS', 'THE', 'SECRET', 'OF', 'PREPARING', 'CRAB', 'AS', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', "GOBEY'S", 'OF', 'BEFORE', 'THE', 'FIRE', 'BUT', 'HIS', 'PRESTIGE', 'DID', 'NOT', 'DESCEND', 'TO', 'HER'] +367-130732-0014-1480: hyp=['IT', 'IS', 'POSSIBLE', 'THAT', 'SHE', 'KNOWS', 'THE', 'SECRET', 'OF', 'PREPARING', 'CRAB', 'AS', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', 'GOBIES', 'OF', 'BEFORE', 'THE', 'FIRE', 'BUT', 'HIS', 'PRESAGE', 'DID', 'NOT', 'DESCEND', 'TO', 'HER'] +367-130732-0015-1481: ref=["GOBEY'S", 'CRAB', 'STEW'] +367-130732-0015-1481: hyp=["GOBY'S", 'CRABS', 'DO'] +367-130732-0016-1482: ref=['TAKE', 'THE', 'MEAT', 'OF', 'ONE', 'LARGE', 'CRAB', 'SCRAPING', 'OUT', 'ALL', 'OF', 'THE', 'FAT', 'FROM', 'THE', 'SHELL'] +367-130732-0016-1482: hyp=['TAKE', 'THE', 'MEAT', 'OF', 'ONE', 'LARGE', 'CRAB', 'SCRAPING', 'OUT', 'ALL', 'THE', 'BAT', 'FROM', 'THE', 'SHELL'] +367-130732-0017-1483: ref=['SOAK', 'THE', 'CRAB', 'MEAT', 'IN', 'THE', 'SHERRY', 'TWO', 'HOURS', 'BEFORE', 'COOKING'] +367-130732-0017-1483: hyp=['SOAK', 'THE', 'CRAB', 'MEAT', 'IN', 'THE', 'SHERRY', 'TWO', 'HOURS', 'BEFORE', 'COOKING'] +367-130732-0018-1484: ref=['CHOP', 'FINE', 'THE', 'ONION', 'SWEET', 'PEPPER', 'AND', 'TOMATO', 'WITH', 'THE', 'ROSEMARY'] +367-130732-0018-1484: hyp=['CHOP', 'FINE', 'THE', 'ONION', 'SWEET', 'PEPPER', 'AND', 'TOMATO', 'WITH', 'THE', 'ROSEMARY'] +367-130732-0019-1485: ref=['HEAT', 'THIS', 'IN', 'A', 'STEWPAN', 'AND', 'WHEN', 'SIMMERING', 'ADD', 'THE', 'SHERRY', 'AND', 'CRAB', 'MEAT', 'AND', 'LET', 'ALL', 'COOK', 'TOGETHER', 'WITH', 'A', 'SLOW', 'FIRE', 'FOR', 'EIGHT', 'MINUTES'] +367-130732-0019-1485: hyp=['HEAT', 'THIS', 'IN', 'A', 'STEWPANT', 'AND', 'WENT', 'SIMMERING', 'AT', 'THE', 'SHERRY', 'AND', 'CRAB', 'ME', 'AND', 'LET', 'ALL', 'COOK', 'TOGETHER', 'WITH', 'A', 'SLOW', 'FIRE', 'FOR', 'EIGHT', 'MINUTES'] +367-130732-0020-1486: ref=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'TOASTED', 'CRACKERS', 'OR', 'THIN', 'SLICES', 'OF', 'TOASTED', 'BREAD'] +367-130732-0020-1486: hyp=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'TOASTED', 'CRACKERS', 'OR', 'THIN', 'SLICES', 'OF', 'TOASTED', 'BREAD'] +367-130732-0021-1487: ref=['LOBSTER', 'IN', 'MINIATURE'] +367-130732-0021-1487: hyp=['LOBSTER', 'IN', 'MINIATURE'] +367-130732-0022-1488: ref=['SO', 'FAR', 'IT', 'HAS', 'BEEN', 'USED', 'MOSTLY', 'FOR', 'GARNISHMENT', 'OF', 'OTHER', 'DISHES', 'AND', 'IT', 'IS', 'ONLY', 'RECENTLY', 'THAT', 'THE', 'HOF', 'BRAU', 'HAS', 'BEEN', 'MAKING', 'A', 'SPECIALTY', 'OF', 'THEM'] +367-130732-0022-1488: hyp=['SO', 'FAR', 'IT', 'HAS', 'BEEN', 'USED', 'MOSTLY', 'FOR', 'GARNISHMENT', 'OF', 'OTHER', 'DISHES', 'AND', 'IT', 'IS', 'ONLY', 'RECENTLY', 'THAT', 'THE', 'WHOLE', 'BROW', 'HAS', 'BEEN', 'MAKING', 'A', 'SPECIALTY', 'OF', 'THEM'] +367-130732-0023-1489: ref=['ALL', 'OF', 'THE', 'BETTER', 'CLASS', 'RESTAURANTS', 'HOWEVER', 'WILL', 'SERVE', 'THEM', 'IF', 'YOU', 'ORDER', 'THEM'] +367-130732-0023-1489: hyp=['ALL', 'THE', 'BETTER', 'CLASS', 'RESTAURANTS', 'HOWEVER', 'WILL', 'SERVE', 'THEM', 'IF', 'YOU', 'ORDER', 'THEM'] +367-130732-0024-1490: ref=['THIS', 'IS', 'THE', 'RECIPE', 'FOR', 'EIGHT', 'PEOPLE', 'AND', 'IT', 'IS', 'WELL', 'WORTH', 'TRYING', 'IF', 'YOU', 'ARE', 'GIVING', 'A', 'DINNER', 'OF', 'IMPORTANCE'] +367-130732-0024-1490: hyp=['THIS', 'IS', 'THE', 'RECIPE', 'FOR', 'EIGHT', 'PEOPLE', 'AND', 'IT', 'IS', 'WELL', 'IT', 'WORTH', 'TRYING', 'IF', 'YOU', 'ARE', 'GIVING', 'A', 'DINNER', 'OF', 'IMPORTANCE'] +367-130732-0025-1491: ref=['BISQUE', 'OF', 'CRAWFISH'] +367-130732-0025-1491: hyp=['THIS', 'OF', 'CROFISH'] +367-130732-0026-1492: ref=['TAKE', 'THIRTY', 'CRAWFISH', 'FROM', 'WHICH', 'REMOVE', 'THE', 'GUT', 'CONTAINING', 'THE', 'GALL', 'IN', 'THE', 'FOLLOWING', 'MANNER', 'TAKE', 'FIRM', 'HOLD', 'OF', 'THE', 'CRAWFISH', 'WITH', 'THE', 'LEFT', 'HAND', 'SO', 'AS', 'TO', 'AVOID', 'BEING', 'PINCHED', 'BY', 'ITS', 'CLAWS', 'WITH', 'THE', 'THUMB', 'AND', 'FOREFINGER', 'OF', 'THE', 'RIGHT', 'HAND', 'PINCH', 'THE', 'EXTREME', 'END', 'OF', 'THE', 'CENTRAL', 'FIN', 'OF', 'THE', 'TAIL', 'AND', 'WITH', 'A', 'SUDDEN', 'JERK', 'THE', 'GUT', 'WILL', 'BE', 'WITHDRAWN'] +367-130732-0026-1492: hyp=['TAKE', 'THIRTY', 'CROPFISH', 'FROM', 'WHICH', 'REMOVE', 'THE', 'GUT', 'CONTAINING', 'THE', 'GALL', 'IN', 'THE', 'FOLLOWING', 'MANNER', 'TAKE', 'FIRM', 'HOLD', 'OF', 'THE', 'CRAWFISH', 'WITH', 'THE', 'LEFT', 'HAND', 'SO', 'AS', 'TO', 'AVOID', 'BEING', 'PINCHED', 'BY', 'ITS', 'CLOTH', 'WITH', 'THE', 'THUMB', 'AND', 'FOREFINGER', 'OF', 'THE', 'RIGHT', 'HAND', 'PINCH', 'THE', 'EXTREME', 'END', 'OF', 'THE', 'CENTRAL', 'FIN', 'OF', 'THE', 'TAIL', 'AND', 'WITH', 'A', 'SUDDEN', 'JERK', 'THE', 'GUT', 'WILL', 'BE', 'WITHDRAWN'] +367-130732-0027-1493: ref=['MINCE', 'OR', 'CUT', 'INTO', 'SMALL', 'DICE', 'A', 'CARROT', 'AN', 'ONION', 'ONE', 'HEAD', 'OF', 'CELERY', 'AND', 'A', 'FEW', 'PARSLEY', 'ROOTS', 'AND', 'TO', 'THESE', 'ADD', 'A', 'BAY', 'LEAF', 'A', 'SPRIG', 'OF', 'THYME', 'A', 'LITTLE', 'MINIONETTE', 'PEPPER', 'AND', 'TWO', 'OUNCES', 'OF', 'BUTTER'] +367-130732-0027-1493: hyp=['MINCE', 'ARE', 'CUT', 'INTO', 'SMALL', 'DICE', 'A', 'CARROT', 'AN', 'ONION', 'ONE', 'HEAD', 'OF', 'CELERY', 'AND', 'A', 'FEW', 'PARSLEY', 'ROOTS', 'AND', 'TO', 'THESE', 'AT', 'A', 'BAY', 'LEAF', 'OF', 'A', 'SPRIG', 'OF', 'THYME', 'A', 'LITTLE', 'MEAN', 'ON', 'IT', 'PEPPER', 'AND', 'TWO', 'OZ', 'OF', 'BUTTER'] +367-130732-0028-1494: ref=['PUT', 'THESE', 'INGREDIENTS', 'INTO', 'A', 'STEWPAN', 'AND', 'FRY', 'THEM', 'TEN', 'MINUTES', 'THEN', 'THROW', 'IN', 'THE', 'CRAWFISH', 'AND', 'POUR', 'ON', 'THEM', 'HALF', 'A', 'BOTTLE', 'OF', 'FRENCH', 'WHITE', 'WINE'] +367-130732-0028-1494: hyp=['PUT', 'THESE', 'INGREDIENTS', 'INTO', 'A', 'STEWPAN', 'AND', 'FRY', 'THEM', 'TEN', 'MINUTES', 'THEN', 'THROW', 'IN', 'THE', 'CROPPISH', 'AND', 'POUR', 'ON', 'THEM', 'HALF', 'A', 'BOTTLE', 'OF', 'FRENCH', 'WHITE', 'WINE'] +367-130732-0029-1495: ref=['ALLOW', 'THIS', 'TO', 'BOIL', 'AND', 'THEN', 'ADD', 'A', 'QUART', 'OF', 'STRONG', 'CONSOMME', 'AND', 'LET', 'ALL', 'CONTINUE', 'BOILING', 'FOR', 'HALF', 'AN', 'HOUR'] +367-130732-0029-1495: hyp=['ALLOW', 'US', 'TO', 'BOIL', 'AND', 'THEN', 'ADD', 'A', 'QUART', 'OF', 'STRONG', 'CONSUM', 'AND', 'LET', 'ALL', 'CONTINUE', 'BOILING', 'FOR', 'HALF', 'AN', 'HOUR'] +367-130732-0030-1496: ref=['PICK', 'OUT', 'THE', 'CRAWFISH', 'AND', 'STRAIN', 'THE', 'BROTH', 'THROUGH', 'A', 'NAPKIN', 'BY', 'PRESSURE', 'INTO', 'A', 'BASIN', 'IN', 'ORDER', 'TO', 'EXTRACT', 'ALL', 'THE', 'ESSENCE', 'FROM', 'THE', 'VEGETABLES'] +367-130732-0030-1496: hyp=['PICK', 'OUT', 'THE', 'CRAW', 'FISH', 'AND', 'STRAIN', 'THE', 'BROTH', 'THROUGH', 'A', 'NAPKIN', 'BY', 'PRESSURE', 'INTO', 'A', 'BASIN', 'IN', 'ORDER', 'TO', 'EXTRACT', 'ALL', 'THE', 'ESSENCE', 'FROM', 'THE', 'VEGETABLES'] +367-130732-0031-1497: ref=['PICK', 'THE', 'SHELLS', 'OFF', 'TWENTY', 'FIVE', 'OF', 'THE', 'CRAWFISH', 'TAILS', 'TRIM', 'THEM', 'NEATLY', 'AND', 'SET', 'THEM', 'ASIDE', 'UNTIL', 'WANTED'] +367-130732-0031-1497: hyp=['PICK', 'THE', 'SHELLS', 'OF', 'TWENTY', 'FIVE', 'OF', 'THE', 'CROFISH', 'TAILS', 'TRIM', 'THEM', 'NEATLY', 'AND', 'SET', 'THEM', 'ASIDE', 'UNTIL', 'WANTED'] +367-130732-0032-1498: ref=['RESERVE', 'SOME', 'OF', 'THE', 'SPAWN', 'ALSO', 'HALF', 'OF', 'THE', 'BODY', 'SHELLS', 'WITH', 'WHICH', 'TO', 'MAKE', 'THE', 'CRAWFISH', 'BUTTER', 'TO', 'FINISH', 'THE', 'SOUP'] +367-130732-0032-1498: hyp=['RESERVE', 'SOME', 'OF', 'THE', 'SPAWN', 'ALSO', 'HALF', 'THE', 'BODY', 'SHELLS', 'WITH', 'WHICH', 'TO', 'MAKE', 'THE', 'CRAWFISH', 'BUTTER', 'TO', 'FINISH', 'THE', 'SOUP'] +367-130732-0033-1499: ref=['THIS', 'BUTTER', 'IS', 'MADE', 'AS', 'FOLLOWS', 'PLACE', 'THE', 'SHELLS', 'ON', 'A', 'BAKING', 'SHEET', 'IN', 'THE', 'OVEN', 'TO', 'DRY', 'LET', 'THE', 'SHELLS', 'COOL', 'AND', 'THEN', 'POUND', 'THEM', 'IN', 'A', 'MORTAR', 'WITH', 'A', 'LITTLE', 'LOBSTER', 'CORAL', 'AND', 'FOUR', 'OUNCES', 'OF', 'FRESH', 'BUTTER', 'THOROUGHLY', 'BRUISING', 'THE', 'WHOLE', 'TOGETHER', 'SO', 'AS', 'TO', 'MAKE', 'A', 'FINE', 'PASTE'] +367-130732-0033-1499: hyp=['THIS', 'BUTTER', 'IS', 'MADE', 'AS', 'FOLLOWS', 'PLACE', 'THE', 'SHELLS', 'IN', 'A', 'BAKING', 'SHEET', 'IN', 'THE', 'OVEN', 'TO', 'DRY', 'LET', 'THE', 'SHELLS', 'COOL', 'AND', 'THEN', 'POUND', 'THEM', 'IN', 'A', 'MORTAR', 'WITH', 'A', 'LITTLE', 'LOBSTER', 'COAL', 'AND', 'FOUR', 'OUNCES', 'OF', 'FRESH', 'BUTTER', 'THOROUGHLY', 'BRUISING', 'THE', 'WHOLE', 'TOGETHER', 'SO', 'AS', 'TO', 'MAKE', 'A', 'FINE', 'PASTE'] +367-293981-0000-1445: ref=['I', 'SWEAR', 'IT', 'ANSWERED', 'SANCHO'] +367-293981-0000-1445: hyp=['I', 'SWEAR', 'ANSWERED', 'SANCHO'] +367-293981-0001-1446: ref=['I', 'SAY', 'SO', 'CONTINUED', 'DON', 'QUIXOTE', 'BECAUSE', 'I', 'HATE', 'TAKING', 'AWAY', "ANYONE'S", 'GOOD', 'NAME'] +367-293981-0001-1446: hyp=['I', 'SAY', 'SO', 'CONTINUED', 'DON', 'QUIXOTE', 'BECAUSE', 'I', 'HATE', 'TAKING', 'AWAY', 'ANY', "ONE'S", 'GOOD', 'NAME'] +367-293981-0002-1447: ref=['I', 'SAY', 'REPLIED', 'SANCHO', 'THAT', 'I', 'SWEAR', 'TO', 'HOLD', 'MY', 'TONGUE', 'ABOUT', 'IT', 'TILL', 'THE', 'END', 'OF', 'YOUR', "WORSHIP'S", 'DAYS', 'AND', 'GOD', 'GRANT', 'I', 'MAY', 'BE', 'ABLE', 'TO', 'LET', 'IT', 'OUT', 'TOMORROW'] +367-293981-0002-1447: hyp=['I', 'SAY', 'REPLIED', 'SANCHO', 'THAT', 'I', 'SWEAR', 'TO', 'HOLD', 'MY', 'TONGUE', 'ABOUT', 'IT', 'TILL', 'THE', 'END', 'OF', 'YOUR', 'WORSHIP', 'STAYS', 'AND', 'GONE', 'GRANT', 'I', 'MAY', 'BE', 'ABLE', 'TO', 'LET', 'IT', 'OUT', 'TO', 'MORROW'] +367-293981-0003-1448: ref=['THOUGH', 'YOUR', 'WORSHIP', 'WAS', 'NOT', 'SO', 'BADLY', 'OFF', 'HAVING', 'IN', 'YOUR', 'ARMS', 'THAT', 'INCOMPARABLE', 'BEAUTY', 'YOU', 'SPOKE', 'OF', 'BUT', 'I', 'WHAT', 'DID', 'I', 'HAVE', 'EXCEPT', 'THE', 'HEAVIEST', 'WHACKS', 'I', 'THINK', 'I', 'HAD', 'IN', 'ALL', 'MY', 'LIFE'] +367-293981-0003-1448: hyp=['THOUGH', 'YOUR', 'WORSHIP', 'WAS', 'NOT', 'SO', 'BADLY', 'OFF', 'HAVING', 'IN', 'YOUR', 'ARMS', 'THE', 'INN', 'COMPARABLE', 'BEAUTY', 'YOU', 'SPOKE', 'OF', 'BUT', 'I', 'WHAT', 'DID', 'I', 'HAVE', 'EXCEPT', 'THE', 'HEAVIEST', 'WAX', 'THAT', 'I', 'THINK', 'I', 'HAD', 'IN', 'ALL', 'MY', 'LIFE'] +367-293981-0004-1449: ref=['UNLUCKY', 'ME', 'AND', 'THE', 'MOTHER', 'THAT', 'BORE', 'ME'] +367-293981-0004-1449: hyp=['UNLUCKY', 'ME', 'INTO', 'THE', 'MOTHER', 'THAT', 'BORE', 'ME'] +367-293981-0005-1450: ref=["DIDN'T", 'I', 'SAY', 'SO', 'WORSE', 'LUCK', 'TO', 'MY', 'LINE', 'SAID', 'SANCHO'] +367-293981-0005-1450: hyp=["DIDN'T", 'I', 'SAY', 'SO', 'WORSE', 'LUCK', 'TO', 'MY', 'LINE', 'SAID', 'SANCHO'] +367-293981-0006-1451: ref=['IT', 'CANNOT', 'BE', 'THE', 'MOOR', 'ANSWERED', 'DON', 'QUIXOTE', 'FOR', 'THOSE', 'UNDER', 'ENCHANTMENT', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'BY', 'ANYONE'] +367-293981-0006-1451: hyp=['IT', 'CANNOT', 'BE', 'THE', 'MORE', 'ANSWERED', 'DON', 'QUIXOTE', 'FOR', 'THOSE', 'UNDER', 'ENCHANTMENT', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'BY', 'ANY', 'ONE'] +367-293981-0007-1452: ref=['IF', 'THEY', "DON'T", 'LET', 'THEMSELVES', 'BE', 'SEEN', 'THEY', 'LET', 'THEMSELVES', 'BE', 'FELT', 'SAID', 'SANCHO', 'IF', 'NOT', 'LET', 'MY', 'SHOULDERS', 'SPEAK', 'TO', 'THE', 'POINT'] +367-293981-0007-1452: hyp=['IF', 'THEY', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'THEY', 'LET', 'THEMSELVES', 'BE', 'FELT', 'SAID', 'SANCHO', 'IF', 'NOT', 'LET', 'MY', 'SHOULDERS', 'SPEAK', 'TO', 'THE', 'POINT'] +367-293981-0008-1453: ref=['MINE', 'COULD', 'SPEAK', 'TOO', 'SAID', 'DON', 'QUIXOTE', 'BUT', 'THAT', 'IS', 'NOT', 'A', 'SUFFICIENT', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'WHAT', 'WE', 'SEE', 'IS', 'THE', 'ENCHANTED', 'MOOR'] +367-293981-0008-1453: hyp=['MIKE', 'COULD', 'SPEAK', 'TOO', 'SAID', 'DON', 'QUIXOTE', 'BUT', 'THAT', 'IS', 'NOT', 'A', 'SUSPICION', 'OF', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'WHAT', 'WE', 'SEE', 'IS', 'THE', 'ENCHANTED', 'MOOR'] +367-293981-0009-1454: ref=['THE', 'OFFICER', 'TURNED', 'TO', 'HIM', 'AND', 'SAID', 'WELL', 'HOW', 'GOES', 'IT', 'GOOD', 'MAN'] +367-293981-0009-1454: hyp=['THE', 'OFFICERS', 'TURNED', 'TO', 'HIM', 'AND', 'SAID', 'WELL', 'HOW', 'GOES', 'A', 'GOOD', 'MAN'] +367-293981-0010-1455: ref=['SANCHO', 'GOT', 'UP', 'WITH', 'PAIN', 'ENOUGH', 'IN', 'HIS', 'BONES', 'AND', 'WENT', 'AFTER', 'THE', 'INNKEEPER', 'IN', 'THE', 'DARK', 'AND', 'MEETING', 'THE', 'OFFICER', 'WHO', 'WAS', 'LOOKING', 'TO', 'SEE', 'WHAT', 'HAD', 'BECOME', 'OF', 'HIS', 'ENEMY', 'HE', 'SAID', 'TO', 'HIM', 'SENOR', 'WHOEVER', 'YOU', 'ARE', 'DO', 'US', 'THE', 'FAVOUR', 'AND', 'KINDNESS', 'TO', 'GIVE', 'US', 'A', 'LITTLE', 'ROSEMARY', 'OIL', 'SALT', 'AND', 'WINE', 'FOR', 'IT', 'IS', 'WANTED', 'TO', 'CURE', 'ONE', 'OF', 'THE', 'BEST', 'KNIGHTS', 'ERRANT', 'ON', 'EARTH', 'WHO', 'LIES', 'ON', 'YONDER', 'BED', 'WOUNDED', 'BY', 'THE', 'HANDS', 'OF', 'THE', 'ENCHANTED', 'MOOR', 'THAT', 'IS', 'IN', 'THIS', 'INN'] +367-293981-0010-1455: hyp=['SANCHA', 'CUT', 'UP', 'WITH', 'PAIN', 'ENOUGH', 'IN', 'HIS', 'BONES', 'AND', 'WENT', 'AFTER', 'THE', 'INNKEEPER', 'IN', 'THE', 'DARK', 'AND', 'MEETING', 'THE', 'OFFICER', 'WHO', 'WAS', 'LOOKING', 'TO', 'SEE', 'WHAT', 'HAD', 'BECOME', 'OF', 'HIS', 'ENEMY', 'HE', 'SAID', 'TO', 'HIM', 'SIGNOR', 'WHOEVER', 'YOU', 'ARE', 'DO', 'US', 'TO', 'FAVOR', 'AND', 'KINDNESS', 'TO', 'GIVE', 'US', 'A', 'LITTLE', 'ROSEMARY', 'OIL', 'SALT', 'AND', 'WHITE', 'FOR', 'IT', 'IS', 'WATER', 'TO', 'CURE', 'ONE', 'OF', 'OUR', 'BEST', 'KNIGHTS', 'ERRANT', 'ON', 'EARTH', 'WHO', 'LIES', 'ON', 'YONDER', 'BED', 'WOUNDED', 'BY', 'THE', 'HANDS', 'OF', 'THE', 'ENCHANTED', 'MOOR', 'THAT', 'IS', 'IN', 'THIS', 'INN'] +367-293981-0011-1456: ref=['TO', 'BE', 'BRIEF', 'HE', 'TOOK', 'THE', 'MATERIALS', 'OF', 'WHICH', 'HE', 'MADE', 'A', 'COMPOUND', 'MIXING', 'THEM', 'ALL', 'AND', 'BOILING', 'THEM', 'A', 'GOOD', 'WHILE', 'UNTIL', 'IT', 'SEEMED', 'TO', 'HIM', 'THEY', 'HAD', 'COME', 'TO', 'PERFECTION'] +367-293981-0011-1456: hyp=['TO', 'BE', 'BRIEF', 'HE', 'TOOK', 'THE', 'MATURES', 'OF', 'WHICH', 'HE', 'MADE', 'A', 'COMPOUND', 'MIXING', 'THEM', 'ALL', 'BOILING', 'THEM', 'A', 'GOOD', 'WHILE', 'IT', 'UNTIL', 'IT', 'SEEMED', 'TO', 'HIM', 'THEY', 'HAD', 'COME', 'TO', 'PERFECTION'] +367-293981-0012-1457: ref=['SANCHO', 'PANZA', 'WHO', 'ALSO', 'REGARDED', 'THE', 'AMENDMENT', 'OF', 'HIS', 'MASTER', 'AS', 'MIRACULOUS', 'BEGGED', 'HIM', 'TO', 'GIVE', 'HIM', 'WHAT', 'WAS', 'LEFT', 'IN', 'THE', 'PIGSKIN', 'WHICH', 'WAS', 'NO', 'SMALL', 'QUANTITY'] +367-293981-0012-1457: hyp=['SANCHO', 'PANZA', 'WHO', 'ALSO', 'REGARDED', 'THE', 'AMENDMENT', 'OF', 'HIS', 'MASTER', 'AS', 'MIRACULOUS', 'BEGGED', 'HIM', 'TO', 'GIVE', 'HIM', 'WHAT', 'WAS', 'LET', 'AN', 'OPINION', 'WHICH', 'WAS', 'NO', 'SMALL', 'QUANTITY'] +367-293981-0013-1458: ref=['DON', 'QUIXOTE', 'CONSENTED', 'AND', 'HE', 'TAKING', 'IT', 'WITH', 'BOTH', 'HANDS', 'IN', 'GOOD', 'FAITH', 'AND', 'WITH', 'A', 'BETTER', 'WILL', 'GULPED', 'DOWN', 'AND', 'DRAINED', 'OFF', 'VERY', 'LITTLE', 'LESS', 'THAN', 'HIS', 'MASTER'] +367-293981-0013-1458: hyp=['DON', 'QUIXOTE', 'CONSENTED', 'AND', 'HE', 'TAKING', 'IT', 'WITH', 'BOTH', 'HANDS', 'IN', 'GOOD', 'FAITH', 'AND', 'WITH', 'A', 'BETTER', 'WILL', 'GULPED', 'IT', 'DOWN', 'AND', 'DRAINED', 'UP', 'VERY', 'LITTLE', 'LESS', 'IN', 'HIS', 'MASTER'] +367-293981-0014-1459: ref=['IF', 'YOUR', 'WORSHIP', 'KNEW', 'THAT', 'RETURNED', 'SANCHO', 'WOE', 'BETIDE', 'ME', 'AND', 'ALL', 'MY', 'KINDRED', 'WHY', 'DID', 'YOU', 'LET', 'ME', 'TASTE', 'IT'] +367-293981-0014-1459: hyp=['IF', 'YOUR', 'WORSHIP', 'KNEW', 'THAT', 'RETURNED', 'SANCHO', "WON'T", 'BETIDE', 'ME', 'AND', 'ALL', 'MY', 'KINDRED', 'WHY', 'DID', 'YOU', 'LET', 'ME', 'TASTE', 'IT'] +367-293981-0015-1460: ref=['SEARCH', 'YOUR', 'MEMORY', 'AND', 'IF', 'YOU', 'FIND', 'ANYTHING', 'OF', 'THIS', 'KIND', 'YOU', 'NEED', 'ONLY', 'TELL', 'ME', 'OF', 'IT', 'AND', 'I', 'PROMISE', 'YOU', 'BY', 'THE', 'ORDER', 'OF', 'KNIGHTHOOD', 'WHICH', 'I', 'HAVE', 'RECEIVED', 'TO', 'PROCURE', 'YOU', 'SATISFACTION', 'AND', 'REPARATION', 'TO', 'THE', 'UTMOST', 'OF', 'YOUR', 'DESIRE'] +367-293981-0015-1460: hyp=['SEARCH', 'YOUR', 'MEMORY', 'AND', 'IF', 'YOU', 'FIND', 'ANYTHING', 'OF', 'THIS', 'KIND', 'YOU', 'NEED', 'ONLY', 'TELL', 'ME', 'OF', 'IT', 'AND', 'I', 'PROMISE', 'YOU', 'BY', 'THE', 'ORDER', 'OF', 'KNIGHTHOOD', 'WHICH', 'I', 'HAVE', 'RECEIVED', 'TO', 'PROCURE', 'YOU', 'SATISFACTION', 'IN', 'REPARATION', 'TO', 'THE', 'UTMOST', 'OF', 'YOUR', 'DESIRE'] +367-293981-0016-1461: ref=['THEN', 'THIS', 'IS', 'AN', 'INN', 'SAID', 'DON', 'QUIXOTE'] +367-293981-0016-1461: hyp=['THEN', 'THIS', 'IS', 'AN', 'IN', 'SAID', 'DON', 'QUIXOTE'] +367-293981-0017-1462: ref=['AND', 'A', 'VERY', 'RESPECTABLE', 'ONE', 'SAID', 'THE', 'INNKEEPER'] +367-293981-0017-1462: hyp=['IN', 'A', 'VERY', 'RESPECTABLE', 'ONE', 'SAID', 'THE', 'INNKEEPER'] +367-293981-0018-1463: ref=['THE', 'CRIES', 'OF', 'THE', 'POOR', 'BLANKETED', 'WRETCH', 'WERE', 'SO', 'LOUD', 'THAT', 'THEY', 'REACHED', 'THE', 'EARS', 'OF', 'HIS', 'MASTER', 'WHO', 'HALTING', 'TO', 'LISTEN', 'ATTENTIVELY', 'WAS', 'PERSUADED', 'THAT', 'SOME', 'NEW', 'ADVENTURE', 'WAS', 'COMING', 'UNTIL', 'HE', 'CLEARLY', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'HIS', 'SQUIRE', 'WHO', 'UTTERED', 'THEM'] +367-293981-0018-1463: hyp=['THE', 'CRIES', 'OF', 'THE', 'POOR', 'BLANKET', 'WRETCH', 'WERE', 'SO', 'LOUD', 'THAT', 'THEY', 'REACHED', 'THE', 'EARS', 'OF', 'HIS', 'MASTER', 'WHO', 'HALTING', 'TO', 'LISTEN', 'ATTENTIVELY', 'WAS', 'PERSUADED', 'THAT', 'SOME', 'NEW', 'ADVENTURE', 'WAS', 'COMING', 'UNTIL', 'HE', 'CLEARLY', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'THE', 'SQUIRE', 'WHO', 'UTTERED', 'THEM'] +367-293981-0019-1464: ref=['HE', 'SAW', 'HIM', 'RISING', 'AND', 'FALLING', 'IN', 'THE', 'AIR', 'WITH', 'SUCH', 'GRACE', 'AND', 'NIMBLENESS', 'THAT', 'HAD', 'HIS', 'RAGE', 'ALLOWED', 'HIM', 'IT', 'IS', 'MY', 'BELIEF', 'HE', 'WOULD', 'HAVE', 'LAUGHED'] +367-293981-0019-1464: hyp=['HE', 'SAW', 'HIM', 'RISING', 'AND', 'FALLING', 'IN', 'THE', 'AIR', 'WITH', 'SUCH', 'GRACE', 'AND', 'NIMBLENESS', 'THAT', 'HAD', 'HIS', 'RAGE', 'ALLOWED', 'HIM', 'IT', 'IS', 'MY', 'BELIEF', 'HE', 'WOULD', 'HAVE', 'LAUGHED'] +367-293981-0020-1465: ref=['SANCHO', 'TOOK', 'IT', 'AND', 'AS', 'HE', 'WAS', 'RAISING', 'IT', 'TO', 'HIS', 'MOUTH', 'HE', 'WAS', 'STOPPED', 'BY', 'THE', 'CRIES', 'OF', 'HIS', 'MASTER', 'EXCLAIMING', 'SANCHO', 'MY', 'SON', 'DRINK', 'NOT', 'WATER', 'DRINK', 'IT', 'NOT', 'MY', 'SON', 'FOR', 'IT', 'WILL', 'KILL', 'THEE', 'SEE', 'HERE', 'I', 'HAVE', 'THE', 'BLESSED', 'BALSAM', 'AND', 'HE', 'HELD', 'UP', 'THE', 'FLASK', 'OF', 'LIQUOR', 'AND', 'WITH', 'DRINKING', 'TWO', 'DROPS', 'OF', 'IT', 'THOU', 'WILT', 'CERTAINLY', 'BE', 'RESTORED'] +367-293981-0020-1465: hyp=['SANCHO', 'TOOK', 'IT', 'AND', 'AS', 'HE', 'WAS', 'RAISING', 'IT', 'TO', 'HIS', 'MOUTH', 'HE', 'WAS', 'STOPPED', 'BY', 'THE', 'CRIES', 'OF', 'HIS', 'MASTER', 'EXCLAIMING', 'SANCHO', 'MY', 'SON', 'DRINK', 'NOT', 'WATER', 'DRINKIN', 'UP', 'MY', 'SON', 'FOR', 'IT', 'WILL', 'KILL', 'THEE', 'SEE', 'HERE', 'I', 'HAVE', 'THE', 'BLESSED', 'BALSAM', 'AND', 'HE', 'HELD', 'UP', 'THE', 'FLASK', 'OF', 'LIQUOR', 'AND', 'WITH', 'DRINKING', 'TWO', 'DROPS', 'WHAT', 'THOU', 'WILT', 'CERTAINLY', 'BE', 'RESTORED'] +3764-168670-0000-1666: ref=['THE', 'STRIDES', 'OF', 'A', 'LAME', 'MAN', 'ARE', 'LIKE', 'THE', 'OGLING', 'GLANCES', 'OF', 'A', 'ONE', 'EYED', 'MAN', 'THEY', 'DO', 'NOT', 'REACH', 'THEIR', 'GOAL', 'VERY', 'PROMPTLY'] +3764-168670-0000-1666: hyp=['THE', 'STRIDES', 'OF', 'A', 'LAME', 'MAN', 'ARE', 'LIKE', 'THE', 'OGLING', 'GLANCES', 'OF', 'A', 'ONE', 'EYED', 'MAN', 'THEY', 'DO', 'NOT', 'REACH', 'THEIR', 'GOAL', 'VERY', 'PROMPTLY'] +3764-168670-0001-1667: ref=['COSETTE', 'HAD', 'WAKED', 'UP'] +3764-168670-0001-1667: hyp=['COSETTE', 'HAD', 'WAKED', 'UP'] +3764-168670-0002-1668: ref=['JEAN', 'VALJEAN', 'HAD', 'PLACED', 'HER', 'NEAR', 'THE', 'FIRE'] +3764-168670-0002-1668: hyp=['JEAN', 'VALJEAN', 'HAD', 'PLACED', 'HER', 'NEAR', 'THE', 'FIRE'] +3764-168670-0003-1669: ref=['YOU', 'WILL', 'WAIT', 'FOR', 'ME', 'AT', 'A', "LADY'S", 'HOUSE', 'I', 'SHALL', 'COME', 'TO', 'FETCH', 'YOU'] +3764-168670-0003-1669: hyp=['YOU', 'WILL', 'WAIT', 'FOR', 'ME', 'AT', 'A', "LADY'S", 'HOUSE', 'I', 'SHALL', 'COME', 'TO', 'FETCH', 'YOU'] +3764-168670-0004-1670: ref=['EVERYTHING', 'IS', 'ARRANGED', 'AND', 'NOTHING', 'IS', 'SAID', 'FAUCHELEVENT'] +3764-168670-0004-1670: hyp=['EVERYTHING', 'IS', 'RANGED', 'AND', 'NOTHING', 'IS', 'SAID', 'FAUCHELEVENT'] +3764-168670-0005-1671: ref=['I', 'HAVE', 'PERMISSION', 'TO', 'BRING', 'YOU', 'IN', 'BUT', 'BEFORE', 'BRINGING', 'YOU', 'IN', 'YOU', 'MUST', 'BE', 'GOT', 'OUT'] +3764-168670-0005-1671: hyp=['I', 'HAVE', 'PERMISSION', 'TO', 'BRING', 'YOU', 'IN', 'BUT', 'BEFORE', 'BRINGING', 'YOU', 'IN', 'YOU', 'MUST', 'BE', 'GOT', 'OUT'] +3764-168670-0006-1672: ref=["THAT'S", 'WHERE', 'THE', 'DIFFICULTY', 'LIES'] +3764-168670-0006-1672: hyp=["THAT'S", 'WHERE', 'THE', 'DIFFICULTY', 'LIES'] +3764-168670-0007-1673: ref=['IT', 'IS', 'EASY', 'ENOUGH', 'WITH', 'THE', 'CHILD', 'YOU', 'WILL', 'CARRY', 'HER', 'OUT'] +3764-168670-0007-1673: hyp=['IT', 'IS', 'EASY', 'ENOUGH', 'WITH', 'THE', 'CHILD', 'YOU', 'WILL', 'CARRY', 'HER', 'OUT'] +3764-168670-0008-1674: ref=['AND', 'SHE', 'WILL', 'HOLD', 'HER', 'TONGUE', 'I', 'ANSWER', 'FOR', 'THAT'] +3764-168670-0008-1674: hyp=['AND', 'SHE', 'WILL', 'HOLD', 'HER', 'TONGUE', 'I', 'ANSWER', 'FOR', 'THAT'] +3764-168670-0009-1675: ref=['FAUCHELEVENT', 'GRUMBLED', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'JEAN', 'VALJEAN'] +3764-168670-0009-1675: hyp=['FAUCHELEVENT', 'GRUMBLED', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'JEAN', 'VALJEAN'] +3764-168670-0010-1676: ref=['YOU', 'UNDERSTAND', 'FATHER', 'MADELEINE', 'THE', 'GOVERNMENT', 'WILL', 'NOTICE', 'IT'] +3764-168670-0010-1676: hyp=['YOU', 'UNDERSTAND', 'FATHER', 'MADELEINE', 'THE', 'GOVERNMENT', 'WILL', 'NOTICE', 'IT'] +3764-168670-0011-1677: ref=['JEAN', 'VALJEAN', 'STARED', 'HIM', 'STRAIGHT', 'IN', 'THE', 'EYE', 'AND', 'THOUGHT', 'THAT', 'HE', 'WAS', 'RAVING'] +3764-168670-0011-1677: hyp=['JEAN', 'VALJEAN', 'STARED', 'HIM', 'STRAIGHT', 'IN', 'THE', 'EYE', 'AND', 'THOUGHT', 'THAT', 'HE', 'WAS', 'RAVING'] +3764-168670-0012-1678: ref=['FAUCHELEVENT', 'WENT', 'ON'] +3764-168670-0012-1678: hyp=['FAUCHELEVENT', 'WENT', 'ON'] +3764-168670-0013-1679: ref=['IT', 'IS', 'TO', 'MORROW', 'THAT', 'I', 'AM', 'TO', 'BRING', 'YOU', 'IN', 'THE', 'PRIORESS', 'EXPECTS', 'YOU'] +3764-168670-0013-1679: hyp=['IT', 'IS', 'TO', 'MORROW', 'THAT', 'I', 'AM', 'TO', 'BRING', 'YOU', 'IN', 'THE', 'PRIORS', 'EXPECTS', 'YOU'] +3764-168670-0014-1680: ref=['THEN', 'HE', 'EXPLAINED', 'TO', 'JEAN', 'VALJEAN', 'THAT', 'THIS', 'WAS', 'HIS', 'RECOMPENSE', 'FOR', 'A', 'SERVICE', 'WHICH', 'HE', 'FAUCHELEVENT', 'WAS', 'TO', 'RENDER', 'TO', 'THE', 'COMMUNITY'] +3764-168670-0014-1680: hyp=['THEN', 'HE', 'EXPLAINED', 'TO', 'JEAN', 'VALJEAN', 'THAT', 'THIS', 'WAS', 'HIS', 'RECOMPENSE', 'FOR', 'A', 'SERVICE', 'WHICH', 'HE', 'FOR', 'CHAUVELIN', 'WAS', 'TO', 'RENDER', 'TO', 'THE', 'COMMUNITY'] +3764-168670-0015-1681: ref=['THAT', 'THE', 'NUN', 'WHO', 'HAD', 'DIED', 'THAT', 'MORNING', 'HAD', 'REQUESTED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'COFFIN', 'WHICH', 'HAD', 'SERVED', 'HER', 'FOR', 'A', 'BED', 'AND', 'INTERRED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL'] +3764-168670-0015-1681: hyp=['THAT', 'THE', 'NUN', 'WHO', 'HAD', 'DIED', 'THAT', 'MORNING', 'HAD', 'REQUESTED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'COFFIN', 'WHICH', 'HAD', 'SERVED', 'HER', 'FOR', 'A', 'BED', 'AND', 'INTERRED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL'] +3764-168670-0016-1682: ref=['THAT', 'THE', 'PRIORESS', 'AND', 'THE', 'VOCAL', 'MOTHERS', 'INTENDED', 'TO', 'FULFIL', 'THE', 'WISH', 'OF', 'THE', 'DECEASED'] +3764-168670-0016-1682: hyp=['THAT', 'THE', 'PRIOR', 'REST', 'AND', 'THE', 'VOCAL', 'MOTHERS', 'INTENDED', 'TO', 'FULFIL', 'THE', 'WISH', 'OF', 'THE', 'DECEASED'] +3764-168670-0017-1683: ref=['THAT', 'HE', 'FAUCHELEVENT', 'WAS', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN', 'IN', 'THE', 'CELL', 'RAISE', 'THE', 'STONE', 'IN', 'THE', 'CHAPEL', 'AND', 'LOWER', 'THE', 'CORPSE', 'INTO', 'THE', 'VAULT'] +3764-168670-0017-1683: hyp=['THAT', 'HE', 'FOR', 'SCHLEVENT', 'WAS', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN', 'IN', 'THE', 'CELL', 'RAISE', 'THE', 'STONE', 'IN', 'THE', 'CHAPEL', 'AND', 'BLOW', 'THE', 'CORPSE', 'INTO', 'THE', 'VAULT'] +3764-168670-0018-1684: ref=['AND', 'THEN', 'THAT', 'THERE', 'WAS', 'ANOTHER', 'THE', 'EMPTY', 'COFFIN'] +3764-168670-0018-1684: hyp=['AND', 'THEN', 'THAT', 'THERE', 'WAS', 'ANOTHER', 'THE', 'EMPTY', 'COFFIN'] +3764-168670-0019-1685: ref=['WHAT', 'IS', 'THAT', 'EMPTY', 'COFFIN'] +3764-168670-0019-1685: hyp=['WHAT', 'IS', 'THAT', 'EMPTY', 'COFFIN'] +3764-168670-0020-1686: ref=['ASKED', 'JEAN', 'VALJEAN', 'FAUCHELEVENT', 'REPLIED'] +3764-168670-0020-1686: hyp=['ASKED', 'JEAN', 'VALJEAN', 'FAUCHELEVENT', 'REPLIED'] +3764-168670-0021-1687: ref=['WHAT', 'COFFIN', 'WHAT', 'ADMINISTRATION'] +3764-168670-0021-1687: hyp=['WHAT', 'COFFIN', 'WHAT', 'ADMINISTRATION'] +3764-168670-0022-1688: ref=['FAUCHELEVENT', 'WHO', 'WAS', 'SEATED', 'SPRANG', 'UP', 'AS', 'THOUGH', 'A', 'BOMB', 'HAD', 'BURST', 'UNDER', 'HIS', 'CHAIR', 'YOU'] +3764-168670-0022-1688: hyp=['FAUCHELEVENT', 'WHO', 'WAS', 'SEATED', 'SPRANG', 'UP', 'AS', 'THOUGH', 'A', 'BOMB', 'HAD', 'BURST', 'UNDER', 'HIS', 'CHAIR', 'YOU'] +3764-168670-0023-1689: ref=['YOU', 'KNOW', 'FAUCHELEVENT', 'WHAT', 'YOU', 'HAVE', 'SAID', 'MOTHER', 'CRUCIFIXION', 'IS', 'DEAD'] +3764-168670-0023-1689: hyp=['YOU', 'KNOW', 'FAUCHELEVENT', 'WHAT', 'YOU', 'HAVE', 'SAID', 'MOTHER', 'CRUCIFIXION', 'IS', 'DEAD'] +3764-168670-0024-1690: ref=['AND', 'I', 'ADD', 'AND', 'FATHER', 'MADELEINE', 'IS', 'BURIED', 'AH'] +3764-168670-0024-1690: hyp=['AND', 'I', 'ADD', 'AND', 'FATHER', 'MADELEINE', 'IS', 'BURIED'] +3764-168670-0025-1691: ref=['YOU', 'ARE', 'NOT', 'LIKE', 'OTHER', 'MEN', 'FATHER', 'MADELEINE'] +3764-168670-0025-1691: hyp=['YOU', 'ARE', 'NOT', 'LIKE', 'OTHER', 'MEN', 'FATHER', 'MADELEINE'] +3764-168670-0026-1692: ref=['THIS', 'OFFERS', 'THE', 'MEANS', 'BUT', 'GIVE', 'ME', 'SOME', 'INFORMATION', 'IN', 'THE', 'FIRST', 'PLACE'] +3764-168670-0026-1692: hyp=['THIS', 'OFFERS', 'THE', 'MEANS', 'BUT', 'GIVE', 'ME', 'SOME', 'INFORMATION', 'IN', 'THE', 'FIRST', 'PLACE'] +3764-168670-0027-1693: ref=['HOW', 'LONG', 'IS', 'THE', 'COFFIN', 'SIX', 'FEET'] +3764-168670-0027-1693: hyp=['HOW', 'LONG', 'IS', 'THE', 'COFFIN', 'SIX', 'FEET'] +3764-168670-0028-1694: ref=['IT', 'IS', 'A', 'CHAMBER', 'ON', 'THE', 'GROUND', 'FLOOR', 'WHICH', 'HAS', 'A', 'GRATED', 'WINDOW', 'OPENING', 'ON', 'THE', 'GARDEN', 'WHICH', 'IS', 'CLOSED', 'ON', 'THE', 'OUTSIDE', 'BY', 'A', 'SHUTTER', 'AND', 'TWO', 'DOORS', 'ONE', 'LEADS', 'INTO', 'THE', 'CONVENT', 'THE', 'OTHER', 'INTO', 'THE', 'CHURCH', 'WHAT', 'CHURCH'] +3764-168670-0028-1694: hyp=['IT', 'IS', 'A', 'CHAMBER', 'ON', 'THE', 'GROUND', 'FLOOR', 'WHICH', 'HAS', 'A', 'GRATED', 'WINDOW', 'OPENING', 'ON', 'THE', 'GARDEN', 'WHICH', 'IS', 'CLOSED', 'ON', 'THE', 'OUTSIDE', 'BY', 'A', 'SHUTTER', 'AND', 'TWO', 'DOORS', 'ONE', 'LEADS', 'INTO', 'THE', 'CONVENT', 'THE', 'OTHER', 'INTO', 'THE', 'CHURCH', 'A', 'WATCH'] +3764-168670-0029-1695: ref=['THE', 'CHURCH', 'IN', 'THE', 'STREET', 'THE', 'CHURCH', 'WHICH', 'ANY', 'ONE', 'CAN', 'ENTER'] +3764-168670-0029-1695: hyp=['THE', 'CHURCH', 'IN', 'THE', 'STREET', 'THOUGH', 'THE', 'CHURCH', 'WHICH', 'ANY', 'ONE', 'CAN', 'ENTER'] +3764-168670-0030-1696: ref=['HAVE', 'YOU', 'THE', 'KEYS', 'TO', 'THOSE', 'TWO', 'DOORS'] +3764-168670-0030-1696: hyp=['HAVE', 'YOU', 'THE', 'KEYS', 'TO', 'THOSE', 'TWO', 'DOORS'] +3764-168670-0031-1697: ref=['NO', 'I', 'HAVE', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CONVENT', 'THE', 'PORTER', 'HAS', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CHURCH'] +3764-168670-0031-1697: hyp=['AND', 'NO', 'I', 'HAVE', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CONVENT', 'THE', 'PORTER', 'HAS', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CHURCH'] +3764-168670-0032-1698: ref=['ONLY', 'TO', 'ALLOW', 'THE', "UNDERTAKER'S", 'MEN', 'TO', 'ENTER', 'WHEN', 'THEY', 'COME', 'TO', 'GET', 'THE', 'COFFIN'] +3764-168670-0032-1698: hyp=['ONLY', 'TO', 'ALLOW', 'THE', 'UNDERTAKERS', 'MEN', 'TO', 'ENTER', 'WHEN', 'THEY', 'COME', 'TO', 'GET', 'THE', 'COFFIN'] +3764-168670-0033-1699: ref=['WHO', 'NAILS', 'UP', 'THE', 'COFFIN', 'I', 'DO'] +3764-168670-0033-1699: hyp=['WHO', 'NAILS', 'UP', 'THE', 'COFFIN', 'I', 'DO'] +3764-168670-0034-1700: ref=['WHO', 'SPREADS', 'THE', 'PALL', 'OVER', 'IT'] +3764-168670-0034-1700: hyp=['WHO', 'SPREADS', 'THE', 'POOL', 'OVER', 'IT'] +3764-168670-0035-1701: ref=['NOT', 'ANOTHER', 'MAN', 'EXCEPT', 'THE', 'POLICE', 'DOCTOR', 'CAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'THAT', 'IS', 'EVEN', 'WRITTEN', 'ON', 'THE', 'WALL'] +3764-168670-0035-1701: hyp=['NOT', 'ANOTHER', 'MAN', 'EXCEPT', 'THE', 'POLICE', 'DOCTOR', 'CAN', 'ENTER', 'THE', 'BEDROOM', 'THAT', 'IS', 'EVEN', 'WRITTEN', 'ON', 'THE', 'WALL'] +3764-168670-0036-1702: ref=['COULD', 'YOU', 'HIDE', 'ME', 'IN', 'THAT', 'ROOM', 'TO', 'NIGHT', 'WHEN', 'EVERY', 'ONE', 'IS', 'ASLEEP'] +3764-168670-0036-1702: hyp=['COULD', 'YOU', 'HIDE', 'ME', 'IN', 'THAT', 'ROOM', 'TO', 'NIGHT', 'WHEN', 'EVERY', 'ONE', 'IS', 'ASLEEP'] +3764-168670-0037-1703: ref=['ABOUT', 'THREE', "O'CLOCK", 'IN', 'THE', 'AFTERNOON'] +3764-168670-0037-1703: hyp=['ABOUT', 'THREE', "O'CLOCK", 'IN', 'THE', 'AFTERNOON'] +3764-168670-0038-1704: ref=['I', 'SHALL', 'BE', 'HUNGRY', 'I', 'WILL', 'BRING', 'YOU', 'SOMETHING'] +3764-168670-0038-1704: hyp=['I', 'SHALL', 'BE', 'HUNGRY', 'I', 'WILL', 'BRING', 'YOU', 'SOMETHING'] +3764-168670-0039-1705: ref=['YOU', 'CAN', 'COME', 'AND', 'NAIL', 'ME', 'UP', 'IN', 'THE', 'COFFIN', 'AT', 'TWO', "O'CLOCK"] +3764-168670-0039-1705: hyp=['YOU', 'CAN', 'COME', 'AND', 'NAIL', 'ME', 'UP', 'IN', 'THE', 'COFFIN', 'AT', 'TWO', "O'CLOCK"] +3764-168670-0040-1706: ref=['FAUCHELEVENT', 'RECOILED', 'AND', 'CRACKED', 'HIS', 'FINGER', 'JOINTS', 'BUT', 'THAT', 'IS', 'IMPOSSIBLE'] +3764-168670-0040-1706: hyp=['FAUCHELEVENT', 'RECOILED', 'AND', 'CRACKED', 'HIS', 'FINGER', 'JOINTS', 'BUT', 'THAT', 'IS', 'IMPOSSIBLE'] +3764-168670-0041-1707: ref=['BAH', 'IMPOSSIBLE', 'TO', 'TAKE', 'A', 'HAMMER', 'AND', 'DRIVE', 'SOME', 'NAILS', 'IN', 'A', 'PLANK'] +3764-168670-0041-1707: hyp=['BAH', 'IMPOSSIBLE', 'TO', 'TAKE', 'A', 'HAMMER', 'AND', 'DRIVE', 'SOME', 'NAILS', 'IN', 'A', 'PLANK'] +3764-168670-0042-1708: ref=['JEAN', 'VALJEAN', 'HAD', 'BEEN', 'IN', 'WORSE', 'STRAITS', 'THAN', 'THIS'] +3764-168670-0042-1708: hyp=['JEAN', 'VALJEAN', 'HAD', 'BEEN', 'IN', 'WORSE', 'STRAIT', 'THAN', 'THIS'] +3764-168670-0043-1709: ref=['ANY', 'MAN', 'WHO', 'HAS', 'BEEN', 'A', 'PRISONER', 'UNDERSTANDS', 'HOW', 'TO', 'CONTRACT', 'HIMSELF', 'TO', 'FIT', 'THE', 'DIAMETER', 'OF', 'THE', 'ESCAPE'] +3764-168670-0043-1709: hyp=['ANY', 'MAN', 'WHO', 'HAS', 'BEEN', 'A', 'PRISONER', 'UNDERSTANDS', 'HOW', 'TO', 'CONTRACT', 'HIMSELF', 'TO', 'FIT', 'THE', 'DIAMETER', 'OF', 'THE', 'ESCAPE'] +3764-168670-0044-1710: ref=['WHAT', 'DOES', 'NOT', 'A', 'MAN', 'UNDERGO', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'CURE'] +3764-168670-0044-1710: hyp=['WHAT', 'DOES', 'NOT', 'A', 'MAN', 'UNDERGO', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'CURE'] +3764-168670-0045-1711: ref=['TO', 'HAVE', 'HIMSELF', 'NAILED', 'UP', 'IN', 'A', 'CASE', 'AND', 'CARRIED', 'OFF', 'LIKE', 'A', 'BALE', 'OF', 'GOODS', 'TO', 'LIVE', 'FOR', 'A', 'LONG', 'TIME', 'IN', 'A', 'BOX', 'TO', 'FIND', 'AIR', 'WHERE', 'THERE', 'IS', 'NONE', 'TO', 'ECONOMIZE', 'HIS', 'BREATH', 'FOR', 'HOURS', 'TO', 'KNOW', 'HOW', 'TO', 'STIFLE', 'WITHOUT', 'DYING', 'THIS', 'WAS', 'ONE', 'OF', 'JEAN', "VALJEAN'S", 'GLOOMY', 'TALENTS'] +3764-168670-0045-1711: hyp=['TO', 'HAVE', 'HIMSELF', 'NAILED', 'UP', 'IN', 'A', 'CASE', 'AND', 'CARRIED', 'OFF', 'LIKE', 'A', 'BALE', 'OF', 'GOODS', 'TO', 'LIVE', 'FOR', 'A', 'LONG', 'TIME', 'IN', 'A', 'BOX', 'TO', 'FIND', 'AIR', 'WHERE', 'THERE', 'IS', 'NONE', 'TO', 'ECONOMIZE', 'HIS', 'BREATH', 'FOR', 'HOURS', 'TO', 'KNOW', 'HOW', 'TO', 'STIFLE', 'WITHOUT', 'DYING', 'THIS', 'WAS', 'ONE', 'OF', 'JEAN', "VALJEAN'S", 'GLOOMY', 'TALENTS'] +3764-168670-0046-1712: ref=['YOU', 'SURELY', 'MUST', 'HAVE', 'A', 'GIMLET', 'YOU', 'WILL', 'MAKE', 'A', 'FEW', 'HOLES', 'HERE', 'AND', 'THERE', 'AROUND', 'MY', 'MOUTH', 'AND', 'YOU', 'WILL', 'NAIL', 'THE', 'TOP', 'PLANK', 'ON', 'LOOSELY', 'GOOD', 'AND', 'WHAT', 'IF', 'YOU', 'SHOULD', 'HAPPEN', 'TO', 'COUGH', 'OR', 'TO', 'SNEEZE'] +3764-168670-0046-1712: hyp=['YOU', 'SURELY', 'MUST', 'HAVE', 'A', 'GIMLET', 'YOU', 'WILL', 'MAKE', 'A', 'FEW', 'HOLES', 'HERE', 'AND', 'THERE', 'AROUND', 'MY', 'MOUTH', 'AND', 'YOU', 'WILL', 'NAIL', 'THE', 'TOP', 'PLANK', 'ON', 'LOOSELY', 'GOOD', 'AND', 'WHAT', 'IF', 'YOU', 'SHOULD', 'HAPPEN', 'TO', 'COUGH', 'OR', 'TO', 'SNEEZE'] +3764-168670-0047-1713: ref=['A', 'MAN', 'WHO', 'IS', 'MAKING', 'HIS', 'ESCAPE', 'DOES', 'NOT', 'COUGH', 'OR', 'SNEEZE'] +3764-168670-0047-1713: hyp=['A', 'MAN', 'WHO', 'IS', 'MAKING', 'HIS', 'ESCAPE', 'DOES', 'NOT', 'COUGH', 'OR', 'SNEEZE'] +3764-168670-0048-1714: ref=['WHO', 'IS', 'THERE', 'WHO', 'HAS', 'NOT', 'SAID', 'TO', 'A', 'CAT', 'DO', 'COME', 'IN'] +3764-168670-0048-1714: hyp=['WHO', 'IS', 'THERE', 'WHO', 'HAS', 'NOT', 'SAID', 'TO', 'A', 'CAT', 'DO', 'COME', 'IN'] +3764-168670-0049-1715: ref=['THE', 'OVER', 'PRUDENT', 'CATS', 'AS', 'THEY', 'ARE', 'AND', 'BECAUSE', 'THEY', 'ARE', 'CATS', 'SOMETIMES', 'INCUR', 'MORE', 'DANGER', 'THAN', 'THE', 'AUDACIOUS'] +3764-168670-0049-1715: hyp=['THE', 'OVERPRUDENT', 'CATS', 'AS', 'THEY', 'ARE', 'AND', 'BECAUSE', 'THEY', 'ARE', 'CATS', 'SOMETIMES', 'INCUR', 'MORE', 'DANGER', 'THAN', 'THE', 'AUDACIOUS'] +3764-168670-0050-1716: ref=['BUT', 'JEAN', "VALJEAN'S", 'COOLNESS', 'PREVAILED', 'OVER', 'HIM', 'IN', 'SPITE', 'OF', 'HIMSELF', 'HE', 'GRUMBLED'] +3764-168670-0050-1716: hyp=['BUT', 'JEAN', "VALJEAN'S", 'COOLNESS', 'PREVAILED', 'OVER', 'HIM', 'IN', 'SPITE', 'OF', 'HIMSELF', 'HE', 'GRUMBLED'] +3764-168670-0051-1717: ref=['IF', 'YOU', 'ARE', 'SURE', 'OF', 'COMING', 'OUT', 'OF', 'THE', 'COFFIN', 'ALL', 'RIGHT', 'I', 'AM', 'SURE', 'OF', 'GETTING', 'YOU', 'OUT', 'OF', 'THE', 'GRAVE'] +3764-168670-0051-1717: hyp=['IF', 'YOU', 'ARE', 'SURE', 'OF', 'COMING', 'OUT', 'OF', 'THE', 'COFFIN', 'ALL', 'RIGHT', 'I', 'AM', 'SURE', 'OF', 'GETTING', 'OUT', 'OF', 'THE', 'GRAVE'] +3764-168670-0052-1718: ref=['AN', 'OLD', 'FELLOW', 'OF', 'THE', 'OLD', 'SCHOOL', 'THE', 'GRAVE', 'DIGGER', 'PUTS', 'THE', 'CORPSES', 'IN', 'THE', 'GRAVE', 'AND', 'I', 'PUT', 'THE', 'GRAVE', 'DIGGER', 'IN', 'MY', 'POCKET'] +3764-168670-0052-1718: hyp=['AN', 'OLD', 'FELLOW', 'OF', 'THE', 'OLD', 'SCHOOL', 'THE', 'GRAVE', 'DIGGER', 'PUTS', 'THE', 'CORPSES', 'IN', 'THE', 'GRAVE', 'AND', 'I', 'PUT', 'THE', 'GRAVE', 'DIGGER', 'IN', 'MY', 'POCKET'] +3764-168670-0053-1719: ref=['I', 'SHALL', 'FOLLOW', 'THAT', 'IS', 'MY', 'BUSINESS'] +3764-168670-0053-1719: hyp=['I', 'SHALL', 'FOLLOW', 'THAT', 'IS', 'MY', 'BUSINESS'] +3764-168670-0054-1720: ref=['THE', 'HEARSE', 'HALTS', 'THE', "UNDERTAKER'S", 'MEN', 'KNOT', 'A', 'ROPE', 'AROUND', 'YOUR', 'COFFIN', 'AND', 'LOWER', 'YOU', 'DOWN'] +3764-168670-0054-1720: hyp=['THE', 'HOUSEHOLTS', 'THE', 'UNDERTAKERS', 'MEN', 'NOT', 'A', 'ROPE', 'AROUND', 'YOUR', 'COFFIN', 'AND', 'LOWER', 'YOU', 'DOWN'] +3764-168670-0055-1721: ref=['THE', 'PRIEST', 'SAYS', 'THE', 'PRAYERS', 'MAKES', 'THE', 'SIGN', 'OF', 'THE', 'CROSS', 'SPRINKLES', 'THE', 'HOLY', 'WATER', 'AND', 'TAKES', 'HIS', 'DEPARTURE'] +3764-168670-0055-1721: hyp=['THE', 'PRIESTS', 'AS', 'THE', 'PRAYERS', 'MAKES', 'THE', 'SIGN', 'OF', 'THE', 'CROSS', 'SPRINKLES', 'THE', 'HOLY', 'WATER', 'AND', 'TAKES', 'HIS', 'DEPARTURE'] +3764-168670-0056-1722: ref=['ONE', 'OF', 'TWO', 'THINGS', 'WILL', 'HAPPEN', 'HE', 'WILL', 'EITHER', 'BE', 'SOBER', 'OR', 'HE', 'WILL', 'NOT', 'BE', 'SOBER'] +3764-168670-0056-1722: hyp=['ONE', 'OF', 'TWO', 'THINGS', 'WILL', 'HAPPEN', 'HE', 'WILL', 'EITHER', 'BE', 'SOBER', 'OR', 'HE', 'WILL', 'NOT', 'BE', 'SOBER'] +3764-168670-0057-1723: ref=['THAT', 'IS', 'SETTLED', 'FATHER', 'FAUCHELEVENT', 'ALL', 'WILL', 'GO', 'WELL'] +3764-168670-0057-1723: hyp=['THAT', 'IS', 'SETTLED', 'FATHER', 'FAUCHELEVENT', 'ALL', 'WILL', 'GO', 'WELL'] +3764-168671-0000-1724: ref=['ON', 'THE', 'FOLLOWING', 'DAY', 'AS', 'THE', 'SUN', 'WAS', 'DECLINING', 'THE', 'VERY', 'RARE', 'PASSERS', 'BY', 'ON', 'THE', 'BOULEVARD', 'DU', 'MAINE', 'PULLED', 'OFF', 'THEIR', 'HATS', 'TO', 'AN', 'OLD', 'FASHIONED', 'HEARSE', 'ORNAMENTED', 'WITH', 'SKULLS', 'CROSS', 'BONES', 'AND', 'TEARS'] +3764-168671-0000-1724: hyp=['ON', 'THE', 'FOLLOWING', 'DAY', 'AS', 'THE', 'SUN', 'WAS', 'DECLINING', 'THE', 'VERY', 'RARE', 'PASSERS', 'BY', 'ON', 'THE', 'BOULEVARD', 'DU', 'MIN', 'PULLED', 'OFF', 'THEIR', 'HATS', 'TO', 'AN', 'OLD', 'FASHIONED', 'HEARSE', 'ORNAMENTED', 'WITH', 'SKULLS', 'CROSSBONES', 'AND', 'TEARS'] +3764-168671-0001-1725: ref=['THIS', 'HEARSE', 'CONTAINED', 'A', 'COFFIN', 'COVERED', 'WITH', 'A', 'WHITE', 'CLOTH', 'OVER', 'WHICH', 'SPREAD', 'A', 'LARGE', 'BLACK', 'CROSS', 'LIKE', 'A', 'HUGE', 'CORPSE', 'WITH', 'DROOPING', 'ARMS'] +3764-168671-0001-1725: hyp=['THIS', 'HEARSE', 'CONTAINED', 'A', 'COFFIN', 'COVERED', 'WITH', 'A', 'WHITE', 'CLOTH', 'OVER', 'WHICH', 'SPREAD', 'A', 'LARGE', 'BLACK', 'CROSS', 'LIKE', 'A', 'HUGE', 'CORPSE', 'WITH', 'DROOPING', 'ARMS'] +3764-168671-0002-1726: ref=['A', 'MOURNING', 'COACH', 'IN', 'WHICH', 'COULD', 'BE', 'SEEN', 'A', 'PRIEST', 'IN', 'HIS', 'SURPLICE', 'AND', 'A', 'CHOIR', 'BOY', 'IN', 'HIS', 'RED', 'CAP', 'FOLLOWED'] +3764-168671-0002-1726: hyp=['THE', 'MORNING', 'COACH', 'IN', 'WHICH', 'COULD', 'BE', 'SEEN', 'A', 'PRIEST', 'IN', 'HIS', 'SURPLICE', 'AND', 'A', 'CHOIR', 'BOY', 'IN', 'HIS', 'RED', 'CAP', 'FOLLOWED'] +3764-168671-0003-1727: ref=['BEHIND', 'IT', 'CAME', 'AN', 'OLD', 'MAN', 'IN', 'THE', 'GARMENTS', 'OF', 'A', 'LABORER', 'WHO', 'LIMPED', 'ALONG'] +3764-168671-0003-1727: hyp=['BEHIND', 'IT', 'CAME', 'AN', 'OLD', 'MAN', 'IN', 'THE', 'GARMENTS', 'OF', 'A', 'LABORER', 'WHO', 'LIMPED', 'ALONG'] +3764-168671-0004-1728: ref=['THE', 'GRAVE', 'DIGGERS', 'BEING', 'THUS', 'BOUND', 'TO', 'SERVICE', 'IN', 'THE', 'EVENING', 'IN', 'SUMMER', 'AND', 'AT', 'NIGHT', 'IN', 'WINTER', 'IN', 'THIS', 'CEMETERY', 'THEY', 'WERE', 'SUBJECTED', 'TO', 'A', 'SPECIAL', 'DISCIPLINE'] +3764-168671-0004-1728: hyp=['THE', 'GRAVE', 'DIGGERS', 'BEING', 'THUS', 'BOUND', 'TO', 'SERVICE', 'IN', 'THE', 'EVENING', 'IN', 'SUMMER', 'AND', 'AT', 'NIGHT', 'IN', 'WINTER', 'IN', 'THIS', 'CEMETERY', 'THEY', 'WERE', 'SUBJECTED', 'TO', 'A', 'SPECIAL', 'DISCIPLINE'] +3764-168671-0005-1729: ref=['THESE', 'GATES', 'THEREFORE', 'SWUNG', 'INEXORABLY', 'ON', 'THEIR', 'HINGES', 'AT', 'THE', 'INSTANT', 'WHEN', 'THE', 'SUN', 'DISAPPEARED', 'BEHIND', 'THE', 'DOME', 'OF', 'THE', 'INVALIDES'] +3764-168671-0005-1729: hyp=['THESE', 'GATES', 'THEREFORE', 'SWUNG', 'INEXORABLY', 'ON', 'THEIR', 'HINGES', 'AT', 'THE', 'INSTANT', 'WHEN', 'THE', 'SUN', 'DISAPPEARED', 'BEHIND', 'THE', 'DOME', 'OF', 'THE', 'INVALIDES'] +3764-168671-0006-1730: ref=['DAMPNESS', 'WAS', 'INVADING', 'IT', 'THE', 'FLOWERS', 'WERE', 'DESERTING', 'IT'] +3764-168671-0006-1730: hyp=['DAMPNESS', 'WAS', 'INVADING', 'IT', 'THE', 'FLOWERS', 'WERE', 'DESERTING', 'IT'] +3764-168671-0007-1731: ref=['THE', 'BOURGEOIS', 'DID', 'NOT', 'CARE', 'MUCH', 'ABOUT', 'BEING', 'BURIED', 'IN', 'THE', 'VAUGIRARD', 'IT', 'HINTED', 'AT', 'POVERTY', 'PERE', 'LACHAISE', 'IF', 'YOU', 'PLEASE'] +3764-168671-0007-1731: hyp=['THE', 'BOURGEOIS', 'DID', 'NOT', 'CARE', 'MUCH', 'ABOUT', 'BEING', 'BURIED', 'IN', 'THE', 'ROGER', 'IT', 'HINTED', 'AT', 'POVERTY', 'PALACE', 'IF', 'YOU', 'PLEASE'] +3764-168671-0008-1732: ref=['TO', 'BE', 'BURIED', 'IN', 'PERE', 'LACHAISE', 'IS', 'EQUIVALENT', 'TO', 'HAVING', 'FURNITURE', 'OF', 'MAHOGANY', 'IT', 'IS', 'RECOGNIZED', 'AS', 'ELEGANT'] +3764-168671-0008-1732: hyp=['TO', 'BE', 'BURIED', 'IN', 'PERFELASHES', 'IS', 'EQUIVALENT', 'TO', 'HAVING', 'FURNITURE', 'OF', 'MAHOGANY', 'IT', 'IS', 'RECOGNIZED', 'AS', 'ELEGANT'] +3764-168671-0009-1733: ref=['THE', 'INTERMENT', 'OF', 'MOTHER', 'CRUCIFIXION', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'THE', 'EXIT', 'OF', 'COSETTE', 'THE', 'INTRODUCTION', 'OF', 'JEAN', 'VALJEAN', 'TO', 'THE', 'DEAD', 'ROOM', 'ALL', 'HAD', 'BEEN', 'EXECUTED', 'WITHOUT', 'DIFFICULTY', 'AND', 'THERE', 'HAD', 'BEEN', 'NO', 'HITCH', 'LET', 'US', 'REMARK', 'IN', 'PASSING', 'THAT', 'THE', 'BURIAL', 'OF', 'MOTHER', 'CRUCIFIXION', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CONVENT', 'IS', 'A', 'PERFECTLY', 'VENIAL', 'OFFENCE', 'IN', 'OUR', 'SIGHT'] +3764-168671-0009-1733: hyp=['THE', 'INTERMENT', 'OF', 'MOTHER', 'CRUCIFIXION', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'THE', 'EXIT', 'OF', 'COSETTE', 'THE', 'INTRODUCTION', 'OF', 'JEAN', 'VALJEAN', 'INTO', 'THE', 'DEAD', 'ROOM', 'ALL', 'HAD', 'BEEN', 'EXECUTED', 'WITHOUT', 'DIFFICULTY', 'AND', 'THERE', 'HAD', 'BEEN', 'NO', 'HITCH', 'LET', 'US', 'REMARK', 'IN', 'PASSING', 'THAT', 'THE', 'BURIAL', 'OF', 'MOTHER', 'CRUCIFIXION', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CONVENT', 'IS', 'A', 'PERFECTLY', 'VENIAL', 'OFFENCE', 'IN', 'OUR', 'SIGHT'] +3764-168671-0010-1734: ref=['IT', 'IS', 'ONE', 'OF', 'THE', 'FAULTS', 'WHICH', 'RESEMBLE', 'A', 'DUTY'] +3764-168671-0010-1734: hyp=['IT', 'IS', 'ONE', 'OF', 'THE', 'FAULTS', 'WHICH', 'RESEMBLE', 'A', 'DUTY'] +3764-168671-0011-1735: ref=['THE', 'NUNS', 'HAD', 'COMMITTED', 'IT', 'NOT', 'ONLY', 'WITHOUT', 'DIFFICULTY', 'BUT', 'EVEN', 'WITH', 'THE', 'APPLAUSE', 'OF', 'THEIR', 'OWN', 'CONSCIENCES'] +3764-168671-0011-1735: hyp=['THE', 'NUNS', 'HAD', 'COMMITTED', 'IT', 'NOT', 'ONLY', 'WITHOUT', 'DIFFICULTY', 'BUT', 'EVEN', 'WITH', 'THE', 'APPLAUSE', 'OF', 'THEIR', 'OWN', 'CONSCIENCES'] +3764-168671-0012-1736: ref=['IN', 'THE', 'CLOISTER', 'WHAT', 'IS', 'CALLED', 'THE', 'GOVERNMENT', 'IS', 'ONLY', 'AN', 'INTERMEDDLING', 'WITH', 'AUTHORITY', 'AN', 'INTERFERENCE', 'WHICH', 'IS', 'ALWAYS', 'QUESTIONABLE'] +3764-168671-0012-1736: hyp=['IN', 'THE', 'CLOISTER', 'WHAT', 'IS', 'CALLED', 'THE', 'GOVERNMENT', 'IS', 'ONLY', 'AN', 'INTERMEDDLING', 'WITH', 'AUTHORITY', 'AN', 'INTERFERENCE', 'WHICH', 'HAS', 'ALWAYS', 'QUESTIONABLE'] +3764-168671-0013-1737: ref=['MAKE', 'AS', 'MANY', 'LAWS', 'AS', 'YOU', 'PLEASE', 'MEN', 'BUT', 'KEEP', 'THEM', 'FOR', 'YOURSELVES'] +3764-168671-0013-1737: hyp=['MAKE', 'AS', 'MANY', 'NOISE', 'AS', 'YOU', 'PLEASE', 'MEN', 'BUT', 'KEEP', 'THEM', 'FOR', 'YOURSELVES'] +3764-168671-0014-1738: ref=['A', 'PRINCE', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'PRINCIPLE'] +3764-168671-0014-1738: hyp=['A', 'PRINCE', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'PRINCIPLE'] +3764-168671-0015-1739: ref=['FAUCHELEVENT', 'LIMPED', 'ALONG', 'BEHIND', 'THE', 'HEARSE', 'IN', 'A', 'VERY', 'CONTENTED', 'FRAME', 'OF', 'MIND'] +3764-168671-0015-1739: hyp=['FAUCHELEVENT', 'LIMPED', 'ALONG', 'BEHIND', 'THE', 'HEARSE', 'IN', 'A', 'VERY', 'CONTENTED', 'FRAME', 'OF', 'MIND'] +3764-168671-0016-1740: ref=['JEAN', "VALJEAN'S", 'COMPOSURE', 'WAS', 'ONE', 'OF', 'THOSE', 'POWERFUL', 'TRANQUILLITIES', 'WHICH', 'ARE', 'CONTAGIOUS'] +3764-168671-0016-1740: hyp=['JEAN', "VALJEAN'S", 'COMPOSURE', 'WAS', 'ONE', 'OF', 'THOSE', 'POWERFUL', 'TRANQUILLITIES', 'WHICH', 'ARE', 'CONTAGIOUS'] +3764-168671-0017-1741: ref=['WHAT', 'REMAINED', 'TO', 'BE', 'DONE', 'WAS', 'A', 'MERE', 'NOTHING'] +3764-168671-0017-1741: hyp=['WHAT', 'REMAINED', 'TO', 'BE', 'DONE', 'WAS', 'A', 'MERE', 'NOTHING'] +3764-168671-0018-1742: ref=['HE', 'PLAYED', 'WITH', 'FATHER', 'MESTIENNE'] +3764-168671-0018-1742: hyp=['HE', 'PLAYED', 'WITH', 'FATHER', 'MESSION'] +3764-168671-0019-1743: ref=['HE', 'DID', 'WHAT', 'HE', 'LIKED', 'WITH', 'HIM', 'HE', 'MADE', 'HIM', 'DANCE', 'ACCORDING', 'TO', 'HIS', 'WHIM'] +3764-168671-0019-1743: hyp=['HE', 'DID', 'WHAT', 'HE', 'LIKED', 'WITH', 'HIM', 'HE', 'MADE', 'HIM', 'DANCE', 'ACCORDING', 'TO', 'HIS', 'WHIM'] +3764-168671-0020-1744: ref=['THE', 'PERMISSION', 'FOR', 'INTERMENT', 'MUST', 'BE', 'EXHIBITED'] +3764-168671-0020-1744: hyp=['THE', 'PERMISSION', 'FOR', 'INTERMENT', 'MUST', 'BE', 'EXHIBITED'] +3764-168671-0021-1745: ref=['HE', 'WAS', 'A', 'SORT', 'OF', 'LABORING', 'MAN', 'WHO', 'WORE', 'A', 'WAISTCOAT', 'WITH', 'LARGE', 'POCKETS', 'AND', 'CARRIED', 'A', 'MATTOCK', 'UNDER', 'HIS', 'ARM'] +3764-168671-0021-1745: hyp=['HE', 'WAS', 'A', 'SORT', 'OF', 'LABORING', 'MAN', 'WHO', 'WORE', 'A', 'WAISTCOAT', 'WITH', 'LARGE', 'POCKETS', 'AND', 'CARRIED', 'A', 'MATTOCK', 'UNDER', 'HIS', 'ARM'] +3764-168671-0022-1746: ref=['THE', 'MAN', 'REPLIED', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0022-1746: hyp=['THE', 'MAN', 'REPLIED', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0023-1747: ref=['THE', 'GRAVE', 'DIGGER', 'YES'] +3764-168671-0023-1747: hyp=['THE', 'BRAVE', 'DIGGER', 'YES'] +3764-168671-0024-1748: ref=['YOU', 'I'] +3764-168671-0024-1748: hyp=['YOU', 'I'] +3764-168671-0025-1749: ref=['FATHER', 'MESTIENNE', 'IS', 'THE', 'GRAVE', 'DIGGER', 'HE', 'WAS'] +3764-168671-0025-1749: hyp=['FATHER', 'MACHIN', 'IS', 'THE', 'GRAVE', 'DIGGER', 'HE', 'WAS'] +3764-168671-0026-1750: ref=['FAUCHELEVENT', 'HAD', 'EXPECTED', 'ANYTHING', 'BUT', 'THIS', 'THAT', 'A', 'GRAVE', 'DIGGER', 'COULD', 'DIE'] +3764-168671-0026-1750: hyp=['FAUCHELEVENT', 'HAD', 'EXPECTED', 'ANYTHING', 'BUT', 'THIS', 'THAT', 'A', 'GRAVE', 'DIGGER', 'COULD', 'DIE'] +3764-168671-0027-1751: ref=['IT', 'IS', 'TRUE', 'NEVERTHELESS', 'THAT', 'GRAVE', 'DIGGERS', 'DO', 'DIE', 'THEMSELVES'] +3764-168671-0027-1751: hyp=['IT', 'IS', 'TRUE', 'NEVERTHELESS', 'THAT', 'GRAVE', 'DIGGERS', 'DO', 'DIE', 'THEMSELVES'] +3764-168671-0028-1752: ref=['HE', 'HAD', 'HARDLY', 'THE', 'STRENGTH', 'TO', 'STAMMER'] +3764-168671-0028-1752: hyp=['HE', 'HAD', 'HARDLY', 'THE', 'STRENGTH', 'TO', 'STAMMER'] +3764-168671-0029-1753: ref=['BUT', 'HE', 'PERSISTED', 'FEEBLY', 'FATHER', 'MESTIENNE', 'IS', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0029-1753: hyp=['BUT', 'HE', 'PERSISTED', 'FEEBLY', 'FATHER', 'MESSIAN', 'IS', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0030-1754: ref=['DO', 'YOU', 'KNOW', 'WHO', 'LITTLE', 'FATHER', 'LENOIR', 'IS', 'HE', 'IS', 'A', 'JUG', 'OF', 'RED', 'WINE'] +3764-168671-0030-1754: hyp=['DO', 'YOU', 'KNOW', 'WHO', 'LITTLE', 'FATHER', 'NOIR', 'IS', 'HE', 'IS', 'A', 'JUG', 'OF', 'RED', 'WINE'] +3764-168671-0031-1755: ref=['BUT', 'YOU', 'ARE', 'A', 'JOLLY', 'FELLOW', 'TOO'] +3764-168671-0031-1755: hyp=['BUT', 'YOU', 'ARE', 'A', 'JOLLY', 'FELLOW', 'TOO'] +3764-168671-0032-1756: ref=['ARE', 'YOU', 'NOT', 'COMRADE', "WE'LL", 'GO', 'AND', 'HAVE', 'A', 'DRINK', 'TOGETHER', 'PRESENTLY'] +3764-168671-0032-1756: hyp=['ARE', 'YOU', 'NOT', 'COMRADE', "WE'LL", 'GO', 'AND', 'HAVE', 'A', 'DRINK', 'TOGETHER', 'PRESENTLY'] +3764-168671-0033-1757: ref=['THE', 'MAN', 'REPLIED'] +3764-168671-0033-1757: hyp=['THE', 'MAN', 'REPLIED'] +3764-168671-0034-1758: ref=['HE', 'LIMPED', 'MORE', 'OUT', 'OF', 'ANXIETY', 'THAN', 'FROM', 'INFIRMITY'] +3764-168671-0034-1758: hyp=['HE', 'LIMPED', 'MORE', 'OUT', 'OF', 'ANXIETY', 'THAN', 'FROM', 'INFIRMITY'] +3764-168671-0035-1759: ref=['THE', 'GRAVE', 'DIGGER', 'WALKED', 'ON', 'IN', 'FRONT', 'OF', 'HIM'] +3764-168671-0035-1759: hyp=['THE', 'GRAVE', 'DIGGER', 'WALKED', 'ON', 'IN', 'FRONT', 'OF', 'HIM'] +3764-168671-0036-1760: ref=['FAUCHELEVENT', 'PASSED', 'THE', 'UNEXPECTED', 'GRIBIER', 'ONCE', 'MORE', 'IN', 'REVIEW'] +3764-168671-0036-1760: hyp=['FAUCHELEVENT', 'PASSED', 'THE', 'UNEXPECTED', 'CLAVIER', 'ONCE', 'MORE', 'IN', 'REVIEW'] +3764-168671-0037-1761: ref=['FAUCHELEVENT', 'WHO', 'WAS', 'ILLITERATE', 'BUT', 'VERY', 'SHARP', 'UNDERSTOOD', 'THAT', 'HE', 'HAD', 'TO', 'DEAL', 'WITH', 'A', 'FORMIDABLE', 'SPECIES', 'OF', 'MAN', 'WITH', 'A', 'FINE', 'TALKER', 'HE', 'MUTTERED'] +3764-168671-0037-1761: hyp=['FAUCHELEVENT', 'WHO', 'WAS', 'ILLITERATE', 'BUT', 'VERY', 'SHARP', 'UNDERSTOOD', 'THAT', 'HE', 'HAD', 'TO', 'DEAL', 'WITH', 'A', 'FORMIDABLE', 'SPECIES', 'OF', 'MAN', 'WITH', 'A', 'FINE', 'TALKER', 'HE', 'MUTTERED'] +3764-168671-0038-1762: ref=['SO', 'FATHER', 'MESTIENNE', 'IS', 'DEAD'] +3764-168671-0038-1762: hyp=['MISS', 'OH', 'FATHER', 'MESS', 'TEEN', 'IS', 'DEAD'] +3764-168671-0039-1763: ref=['THE', 'MAN', 'REPLIED', 'COMPLETELY'] +3764-168671-0039-1763: hyp=['THE', 'MAN', 'REPLIED', 'COMPLETELY'] +3764-168671-0040-1764: ref=['THE', 'GOOD', 'GOD', 'CONSULTED', 'HIS', 'NOTE', 'BOOK', 'WHICH', 'SHOWS', 'WHEN', 'THE', 'TIME', 'IS', 'UP', 'IT', 'WAS', 'FATHER', "MESTIENNE'S", 'TURN', 'FATHER', 'MESTIENNE', 'DIED'] +3764-168671-0040-1764: hyp=['THE', 'GOOD', 'GOD', 'CONSULTED', 'HIS', 'NOTE', 'BOOK', 'WHICH', 'SHOWS', 'WHEN', 'THE', 'TIME', 'IS', 'UP', 'IT', 'WAS', 'FATHER', "MESTIENNE'S", 'TURN', 'FOR', 'THE', 'MESSIAN', 'DIED'] +3764-168671-0041-1765: ref=['STAMMERED', 'FAUCHELEVENT', 'IT', 'IS', 'MADE'] +3764-168671-0041-1765: hyp=['STAMMERED', 'FAUCHELEVENT', 'IT', 'IS', 'MADE'] +3764-168671-0042-1766: ref=['YOU', 'ARE', 'A', 'PEASANT', 'I', 'AM', 'A', 'PARISIAN'] +3764-168671-0042-1766: hyp=['YOU', 'ARE', 'A', 'PEASANT', 'I', 'AM', 'A', 'PARISIAN'] +3764-168671-0043-1767: ref=['FAUCHELEVENT', 'THOUGHT', 'I', 'AM', 'LOST'] +3764-168671-0043-1767: hyp=['FAUCHELEVENT', 'THOUGHT', 'I', 'AM', 'LOST'] +3764-168671-0044-1768: ref=['THEY', 'WERE', 'ONLY', 'A', 'FEW', 'TURNS', 'OF', 'THE', 'WHEEL', 'DISTANT', 'FROM', 'THE', 'SMALL', 'ALLEY', 'LEADING', 'TO', 'THE', 'NUNS', 'CORNER'] +3764-168671-0044-1768: hyp=['THEY', 'WERE', 'ONLY', 'A', 'FEW', 'TURNS', 'OF', 'THE', 'WHEEL', 'DISTANT', 'FROM', 'THE', 'SMALL', 'ALLEY', 'LEADING', 'TO', 'THE', "NUN'S", 'CORNER'] +3764-168671-0045-1769: ref=['AND', 'HE', 'ADDED', 'WITH', 'THE', 'SATISFACTION', 'OF', 'A', 'SERIOUS', 'MAN', 'WHO', 'IS', 'TURNING', 'A', 'PHRASE', 'WELL'] +3764-168671-0045-1769: hyp=['AND', 'HE', 'ADDED', 'WITH', 'THE', 'SATISFACTION', 'OF', 'A', 'SERIOUS', 'MAN', 'WHO', 'IS', 'TURNING', 'A', 'PHRASE', 'WELL'] +3764-168671-0046-1770: ref=['FORTUNATELY', 'THE', 'SOIL', 'WHICH', 'WAS', 'LIGHT', 'AND', 'WET', 'WITH', 'THE', 'WINTER', 'RAINS', 'CLOGGED', 'THE', 'WHEELS', 'AND', 'RETARDED', 'ITS', 'SPEED'] +3764-168671-0046-1770: hyp=['FORTUNATELY', 'THE', 'SOIL', 'WHICH', 'WAS', 'LIGHT', 'AND', 'WET', 'WITH', 'THE', 'WINTER', 'RAINS', 'CLOGGED', 'THE', 'WHEELS', 'AND', 'RETARDED', 'ITS', 'SPEED'] +3764-168671-0047-1771: ref=['MY', 'FATHER', 'WAS', 'A', 'PORTER', 'AT', 'THE', 'PRYTANEUM', 'TOWN', 'HALL'] +3764-168671-0047-1771: hyp=['MY', 'FATHER', 'WAS', 'A', 'PORTER', 'AT', 'THE', 'BRITTANNIUM', 'TOWN', 'HALL'] +3764-168671-0048-1772: ref=['BUT', 'HE', 'HAD', 'REVERSES', 'HE', 'HAD', 'LOSSES', 'ON', 'CHANGE', 'I', 'WAS', 'OBLIGED', 'TO', 'RENOUNCE', 'THE', 'PROFESSION', 'OF', 'AUTHOR', 'BUT', 'I', 'AM', 'STILL', 'A', 'PUBLIC', 'WRITER'] +3764-168671-0048-1772: hyp=['BUT', 'HE', 'HAD', 'REVERSES', 'HE', 'HAD', 'LOSES', 'UNCHANGED', 'I', 'WAS', 'OBLIGED', 'TO', 'RENOUNCE', 'THE', 'PROFESSION', 'OF', 'AUTHOR', 'BUT', 'I', 'AM', 'STILL', 'A', 'PUBLIC', 'WRITER'] +3764-168671-0049-1773: ref=['SO', 'YOU', 'ARE', 'NOT', 'A', 'GRAVE', 'DIGGER', 'THEN'] +3764-168671-0049-1773: hyp=['BUT', 'SO', 'YOU', 'ARE', 'NOT', 'A', 'GRAVE', 'DIGGER', 'THEN'] +3764-168671-0050-1774: ref=['RETURNED', 'FAUCHELEVENT', 'CLUTCHING', 'AT', 'THIS', 'BRANCH', 'FEEBLE', 'AS', 'IT', 'WAS'] +3764-168671-0050-1774: hyp=['RETURNED', 'FAUCHELEVENT', 'CLUTCHING', 'AT', 'THIS', 'BRANCH', 'FEEBLE', 'AS', 'IT', 'WAS'] +3764-168671-0051-1775: ref=['HERE', 'A', 'REMARK', 'BECOMES', 'NECESSARY'] +3764-168671-0051-1775: hyp=['HERE', 'A', 'REMARK', 'BECOMES', 'NECESSARY'] +3764-168671-0052-1776: ref=['FAUCHELEVENT', 'WHATEVER', 'HIS', 'ANGUISH', 'OFFERED', 'A', 'DRINK', 'BUT', 'HE', 'DID', 'NOT', 'EXPLAIN', 'HIMSELF', 'ON', 'ONE', 'POINT', 'WHO', 'WAS', 'TO', 'PAY'] +3764-168671-0052-1776: hyp=['A', 'FAUCHELEVENT', 'WHATEVER', 'HIS', 'ANGUISH', 'OFFERED', 'A', 'DRINK', 'BUT', 'HE', 'DID', 'NOT', 'EXPLAIN', 'HIMSELF', 'ON', 'ONE', 'POINT', 'WHO', 'WAS', 'TO', 'PAY'] +3764-168671-0053-1777: ref=['THE', 'GRAVE', 'DIGGER', 'WENT', 'ON', 'WITH', 'A', 'SUPERIOR', 'SMILE'] +3764-168671-0053-1777: hyp=['THE', 'GRAVE', 'DIGGER', 'WENT', 'ON', 'WITH', 'THE', 'SUPERIOR', 'SMILE'] +3764-168671-0054-1778: ref=['ONE', 'MUST', 'EAT'] +3764-168671-0054-1778: hyp=['ONE', 'MUST', 'EAT'] +3997-180294-0000-1800: ref=['THE', 'DUKE', 'COMES', 'EVERY', 'MORNING', 'THEY', 'WILL', 'TELL', 'HIM', 'WHEN', 'HE', 'COMES', 'THAT', 'I', 'AM', 'ASLEEP', 'AND', 'PERHAPS', 'HE', 'WILL', 'WAIT', 'UNTIL', 'I', 'WAKE'] +3997-180294-0000-1800: hyp=['THE', 'DUKE', 'COMES', 'EVERY', 'MORNING', 'THEY', 'WILL', 'TELL', 'HIM', 'WHEN', 'HE', 'COMES', 'THAT', 'I', 'AM', 'ASLEEP', 'AND', 'PERHAPS', 'HE', 'WILL', 'WAIT', 'UNTIL', 'I', 'AWAKE'] +3997-180294-0001-1801: ref=['YES', 'BUT', 'IF', 'I', 'SHOULD', 'ALREADY', 'ASK', 'FOR', 'SOMETHING', 'WHAT'] +3997-180294-0001-1801: hyp=['YES', 'BUT', 'IF', 'I', 'SHOULD', 'ALREADY', 'ASK', 'FOR', 'SOMETHING', 'WHAT'] +3997-180294-0002-1802: ref=['WELL', 'DO', 'IT', 'FOR', 'ME', 'FOR', 'I', 'SWEAR', 'TO', 'YOU', 'THAT', 'I', "DON'T", 'LOVE', 'YOU', 'AS', 'THE', 'OTHERS', 'HAVE', 'LOVED', 'YOU'] +3997-180294-0002-1802: hyp=['WELL', 'DO', 'IT', 'FOR', 'ME', 'FOR', 'I', 'SWEAR', 'TO', 'YOU', 'THY', "DON'T", 'LOVE', 'YOU', 'AS', 'THE', 'OTHERS', 'HAVE', 'LOVED', 'YOU'] +3997-180294-0003-1803: ref=['THERE', 'ARE', 'BOLTS', 'ON', 'THE', 'DOOR', 'WRETCH'] +3997-180294-0003-1803: hyp=['THERE', 'ARE', 'BOLTS', 'IN', 'THE', 'DOOR', 'WRETCH'] +3997-180294-0004-1804: ref=['I', "DON'T", 'KNOW', 'HOW', 'IT', 'IS', 'BUT', 'IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'I', 'DO'] +3997-180294-0004-1804: hyp=['I', "DON'T", 'KNOW', 'HOW', 'IT', 'IS', 'BUT', 'IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'I', 'DO'] +3997-180294-0005-1805: ref=['NOW', 'GO', 'I', "CAN'T", 'KEEP', 'MY', 'EYES', 'OPEN'] +3997-180294-0005-1805: hyp=['NOW', 'GO', 'I', "CAN'T", 'KEEP', 'MY', 'EYES', 'OPEN'] +3997-180294-0006-1806: ref=['IT', 'SEEMED', 'TO', 'ME', 'AS', 'IF', 'THIS', 'SLEEPING', 'CITY', 'BELONGED', 'TO', 'ME', 'I', 'SEARCHED', 'MY', 'MEMORY', 'FOR', 'THE', 'NAMES', 'OF', 'THOSE', 'WHOSE', 'HAPPINESS', 'I', 'HAD', 'ONCE', 'ENVIED', 'AND', 'I', 'COULD', 'NOT', 'RECALL', 'ONE', 'WITHOUT', 'FINDING', 'MYSELF', 'THE', 'HAPPIER'] +3997-180294-0006-1806: hyp=['IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'THIS', 'SLEEPING', 'CITY', 'BELONGS', 'TO', 'ME', 'I', 'SEARCHED', 'MY', 'MEMORY', 'FOR', 'THE', 'NAMES', 'OF', 'THOSE', 'WHOSE', 'HAPPINESS', 'I', 'HAD', 'ONCE', 'ENVIED', 'AND', 'I', 'COULD', 'NOT', 'RECALL', 'ONE', 'WITHOUT', 'FINDING', 'MYSELF', 'THE', 'HAPPIER'] +3997-180294-0007-1807: ref=['EDUCATION', 'FAMILY', 'FEELING', 'THE', 'SENSE', 'OF', 'DUTY', 'THE', 'FAMILY', 'ARE', 'STRONG', 'SENTINELS', 'BUT', 'THERE', 'ARE', 'NO', 'SENTINELS', 'SO', 'VIGILANT', 'AS', 'NOT', 'TO', 'BE', 'DECEIVED', 'BY', 'A', 'GIRL', 'OF', 'SIXTEEN', 'TO', 'WHOM', 'NATURE', 'BY', 'THE', 'VOICE', 'OF', 'THE', 'MAN', 'SHE', 'LOVES', 'GIVES', 'THE', 'FIRST', 'COUNSELS', 'OF', 'LOVE', 'ALL', 'THE', 'MORE', 'ARDENT', 'BECAUSE', 'THEY', 'SEEM', 'SO', 'PURE'] +3997-180294-0007-1807: hyp=['EDUCATION', 'FAMILY', 'FEELING', 'THE', 'SENSE', 'OF', 'DUTY', 'THE', 'FAMILY', 'ARE', 'STRONG', 'SENTINELS', 'BUT', 'THERE', 'ARE', 'NO', 'SENTINELS', 'SO', 'VIGILANT', 'AS', 'NOT', 'TO', 'BE', 'DECEIVED', 'BY', 'A', 'GIRL', 'OF', 'SIXTEEN', 'TO', 'WHOM', 'NATURE', 'BY', 'THE', 'VOICE', 'OF', 'THE', 'MAN', 'SHE', 'LOVES', 'GIVES', 'THE', 'FIRST', 'COUNCIL', 'OF', 'LOVE', 'ALL', 'THE', 'MORE', 'ARDENTS', 'BECAUSE', 'THEY', 'SEEM', 'SO', 'PURE'] +3997-180294-0008-1808: ref=['THE', 'MORE', 'A', 'GIRL', 'BELIEVES', 'IN', 'GOODNESS', 'THE', 'MORE', 'EASILY', 'WILL', 'SHE', 'GIVE', 'WAY', 'IF', 'NOT', 'TO', 'HER', 'LOVER', 'AT', 'LEAST', 'TO', 'LOVE', 'FOR', 'BEING', 'WITHOUT', 'MISTRUST', 'SHE', 'IS', 'WITHOUT', 'FORCE', 'AND', 'TO', 'WIN', 'HER', 'LOVE', 'IS', 'A', 'TRIUMPH', 'THAT', 'CAN', 'BE', 'GAINED', 'BY', 'ANY', 'YOUNG', 'MAN', 'OF', 'FIVE', 'AND', 'TWENTY', 'SEE', 'HOW', 'YOUNG', 'GIRLS', 'ARE', 'WATCHED', 'AND', 'GUARDED'] +3997-180294-0008-1808: hyp=['THE', 'MORE', 'GIRL', 'BELIEVES', 'IN', 'GOODNESS', 'THE', 'MORE', 'EASILY', 'WILL', 'SHE', 'GIVE', 'WAY', 'IF', 'NOT', 'TO', 'HER', 'LOVER', 'AT', 'LEAST', 'TO', 'LOVE', 'FOR', 'BE', 'WITHOUT', 'MISTRUST', 'SHE', 'IS', 'WITHOUT', 'FORCE', 'AND', 'TO', 'WIN', 'HER', 'LOVE', 'AS', 'A', 'TRIUMPH', 'THAT', 'CAN', 'BE', 'GAINED', 'BY', 'ANY', 'YOUNG', 'MEN', 'OF', 'FIVE', 'AND', 'TWENTY', 'SEE', 'HOW', 'YOUNG', 'GIRLS', 'ARE', 'WATCHED', 'AND', 'GUARDED'] +3997-180294-0009-1809: ref=['THEN', 'HOW', 'SURELY', 'MUST', 'THEY', 'DESIRE', 'THE', 'WORLD', 'WHICH', 'IS', 'HIDDEN', 'FROM', 'THEM', 'HOW', 'SURELY', 'MUST', 'THEY', 'FIND', 'IT', 'TEMPTING', 'HOW', 'SURELY', 'MUST', 'THEY', 'LISTEN', 'TO', 'THE', 'FIRST', 'VOICE', 'WHICH', 'COMES', 'TO', 'TELL', 'ITS', 'SECRETS', 'THROUGH', 'THEIR', 'BARS', 'AND', 'BLESS', 'THE', 'HAND', 'WHICH', 'IS', 'THE', 'FIRST', 'TO', 'RAISE', 'A', 'CORNER', 'OF', 'THE', 'MYSTERIOUS', 'VEIL'] +3997-180294-0009-1809: hyp=['THEN', 'HOW', 'SURELY', 'MUST', 'THEY', 'DESIRE', 'THE', 'WORLD', 'WHICH', 'IS', 'HIDDEN', 'FROM', 'THEM', 'HOW', 'TRULY', 'MUST', 'THEY', 'FIND', 'IT', 'TEMPTING', 'HOW', 'SURELY', 'MUST', 'THEY', 'LISTENED', 'TO', 'THE', 'FIRST', 'VOICE', 'WHICH', 'COMES', 'TO', 'TELL', 'ITS', 'SECRETS', 'THROUGH', 'THEIR', 'BARS', 'AND', 'BLESS', 'THE', 'HAND', 'WHICH', 'HE', 'IS', 'THE', 'FIRST', 'TO', 'RAISE', 'A', 'CORNER', 'OF', 'THE', 'MYSTERY', 'VEIL'] +3997-180294-0010-1810: ref=['WITH', 'THEM', 'THE', 'BODY', 'HAS', 'WORN', 'OUT', 'THE', 'SOUL', 'THE', 'SENSES', 'HAVE', 'BURNED', 'UP', 'THE', 'HEART', 'DISSIPATION', 'HAS', 'BLUNTED', 'THE', 'FEELINGS'] +3997-180294-0010-1810: hyp=['WITH', 'THEM', 'THE', 'BODY', 'HAS', 'WORN', 'OUT', 'THE', 'SOUL', 'THE', 'SENSES', 'HALF', 'BURNED', 'UP', 'THE', 'HEART', 'DISSIPATION', 'HAS', 'BLUNTED', 'THE', 'FEELINGS'] +3997-180294-0011-1811: ref=['THEY', 'LOVE', 'BY', 'PROFESSION', 'AND', 'NOT', 'BY', 'INSTINCT'] +3997-180294-0011-1811: hyp=['THEY', 'LOVE', 'BY', 'PROFESSION', 'AND', 'NOT', 'BY', 'INSTINCT'] +3997-180294-0012-1812: ref=['WHEN', 'A', 'CREATURE', 'WHO', 'HAS', 'ALL', 'HER', 'PAST', 'TO', 'REPROACH', 'HERSELF', 'WITH', 'IS', 'TAKEN', 'ALL', 'AT', 'ONCE', 'BY', 'A', 'PROFOUND', 'SINCERE', 'IRRESISTIBLE', 'LOVE', 'OF', 'WHICH', 'SHE', 'HAD', 'NEVER', 'FELT', 'HERSELF', 'CAPABLE', 'WHEN', 'SHE', 'HAS', 'CONFESSED', 'HER', 'LOVE', 'HOW', 'ABSOLUTELY', 'THE', 'MAN', 'WHOM', 'SHE', 'LOVES', 'DOMINATES', 'HER'] +3997-180294-0012-1812: hyp=['WHEN', 'A', 'CREATURE', 'WHO', 'HAS', 'ALL', 'HER', 'PAST', 'TO', 'REPROACH', 'HERSELF', 'WITH', 'IS', 'TAKEN', 'ALL', 'AT', 'ONCE', 'BY', 'A', 'PROFOUND', 'SINCERE', 'IRRESISTIBLE', 'LOVE', 'OF', 'WHICH', 'SHE', 'HAD', 'NEVER', 'FELT', 'HERSELF', 'CAPABLE', 'WHEN', 'SHE', 'HAS', 'CONFESSED', 'HER', 'LOVE', 'HOW', 'ABSOLUTELY', 'THE', 'MAN', 'WHOM', 'SHE', 'LOVES', 'DOMINATES', 'HER'] +3997-180294-0013-1813: ref=['THEY', 'KNOW', 'NOT', 'WHAT', 'PROOF', 'TO', 'GIVE'] +3997-180294-0013-1813: hyp=['THEY', 'KNOW', 'NOT', 'WHAT', 'PROOF', 'TO', 'GIVE'] +3997-180294-0014-1814: ref=['IN', 'ORDER', 'TO', 'DISTURB', 'THE', 'LABOURERS', 'IN', 'THE', 'FIELD', 'WAS', 'ONE', 'DAY', 'DEVOURED', 'BY', 'A', 'WOLF', 'BECAUSE', 'THOSE', 'WHOM', 'HE', 'HAD', 'SO', 'OFTEN', 'DECEIVED', 'NO', 'LONGER', 'BELIEVED', 'IN', 'HIS', 'CRIES', 'FOR', 'HELP'] +3997-180294-0014-1814: hyp=['IN', 'ORDER', 'TO', 'DISTURB', 'THE', 'LABORERS', 'IN', 'THE', 'FIELDS', 'WAS', 'ONE', 'DAY', 'DEVOURED', 'BY', 'A', 'WOLF', 'BECAUSE', 'THOSE', 'WHOM', 'HE', 'HAD', 'SO', 'OFTEN', 'DECEIVED', 'NO', 'LONGER', 'BELIEVED', 'IN', 'HIS', 'CRIES', 'FOR', 'HELP'] +3997-180294-0015-1815: ref=['IT', 'IS', 'THE', 'SAME', 'WITH', 'THESE', 'UNHAPPY', 'WOMEN', 'WHEN', 'THEY', 'LOVE', 'SERIOUSLY'] +3997-180294-0015-1815: hyp=['THIS', 'IS', 'THE', 'SAME', 'WITH', 'THESE', 'UNHAPPY', 'WOMEN', 'WHEN', 'HE', 'LOVED', 'SERIOUSLY'] +3997-180294-0016-1816: ref=['BUT', 'WHEN', 'THE', 'MAN', 'WHO', 'INSPIRES', 'THIS', 'REDEEMING', 'LOVE', 'IS', 'GREAT', 'ENOUGH', 'IN', 'SOUL', 'TO', 'RECEIVE', 'IT', 'WITHOUT', 'REMEMBERING', 'THE', 'PAST', 'WHEN', 'HE', 'GIVES', 'HIMSELF', 'UP', 'TO', 'IT', 'WHEN', 'IN', 'SHORT', 'HE', 'LOVES', 'AS', 'HE', 'IS', 'LOVED', 'THIS', 'MAN', 'DRAINS', 'AT', 'ONE', 'DRAUGHT', 'ALL', 'EARTHLY', 'EMOTIONS', 'AND', 'AFTER', 'SUCH', 'A', 'LOVE', 'HIS', 'HEART', 'WILL', 'BE', 'CLOSED', 'TO', 'EVERY', 'OTHER'] +3997-180294-0016-1816: hyp=['BUT', 'WHEN', 'THE', 'MAN', 'WHO', 'INSPIRES', 'THIS', 'REDEEMING', 'LOVE', 'IS', 'GREAT', 'ENOUGH', 'IN', 'SOUL', 'TO', 'RECEIVE', 'IT', 'WITHOUT', 'REMEMBERING', 'THE', 'PAST', 'WHEN', 'HE', 'GIVES', 'HIMSELF', 'UP', 'TO', 'IT', 'WHEN', 'IN', 'SHORT', 'HE', 'LOVES', 'AS', 'HE', 'IS', 'LOVED', 'THIS', 'MAN', 'DRAINS', 'AT', 'ONE', 'DROUGHT', 'ALL', 'EARTHLY', 'EMOTIONS', 'AND', 'AFTER', 'SUCH', 'A', 'LOVE', 'HIS', 'HEART', 'WILL', 'BE', 'CLOSED', 'TO', 'EVERY', 'OTHER'] +3997-180294-0017-1817: ref=['BUT', 'TO', 'RETURN', 'TO', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'LIAISON'] +3997-180294-0017-1817: hyp=['BUT', 'TO', 'RETURN', 'TO', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'YEAR', 'SONG'] +3997-180294-0018-1818: ref=['WHEN', 'I', 'REACHED', 'HOME', 'I', 'WAS', 'IN', 'A', 'STATE', 'OF', 'MAD', 'GAIETY'] +3997-180294-0018-1818: hyp=['WHEN', 'I', 'REACHED', 'HOME', 'I', 'WAS', 'IN', 'A', 'STATE', 'OF', 'MADGE', 'GAIETY'] +3997-180294-0019-1819: ref=['THE', 'WOMAN', 'BECOMES', 'THE', "MAN'S", 'MISTRESS', 'AND', 'LOVES', 'HIM'] +3997-180294-0019-1819: hyp=['THE', 'WOMAN', 'BECOMES', 'THE', "MAN'S", 'MISTRESS', 'AND', 'LOVES', 'HIM'] +3997-180294-0020-1820: ref=['HOW', 'WHY'] +3997-180294-0020-1820: hyp=['HOW', 'WHY'] +3997-180294-0021-1821: ref=['MY', 'WHOLE', 'BEING', 'WAS', 'EXALTED', 'INTO', 'JOY', 'AT', 'THE', 'MEMORY', 'OF', 'THE', 'WORDS', 'WE', 'HAD', 'EXCHANGED', 'DURING', 'THAT', 'FIRST', 'NIGHT'] +3997-180294-0021-1821: hyp=['MY', 'WHOLE', 'BEING', 'WAS', 'EXALTED', 'INTO', 'JOY', 'AT', 'THE', 'MEMORY', 'OF', 'THE', 'WORDS', 'WE', 'HAD', 'EXCHANGED', 'DURING', 'THAT', 'FIRST', 'NIGHT'] +3997-180294-0022-1822: ref=['HERE', 'ARE', 'MY', 'ORDERS', 'TO', 'NIGHT', 'AT', 'THE', 'VAUDEVILLE'] +3997-180294-0022-1822: hyp=['HERE', 'ARE', 'MY', 'ORDERS', 'TO', 'NIGHT', 'AT', 'THE', 'VAUDEVILLE'] +3997-180294-0023-1823: ref=['COME', 'DURING', 'THE', 'THIRD', "ENTR'ACTE"] +3997-180294-0023-1823: hyp=['COME', 'DURING', 'THE', 'THIRD', 'ENTRACT'] +3997-180294-0024-1824: ref=['THE', 'BOXES', 'FILLED', 'ONE', 'AFTER', 'ANOTHER'] +3997-180294-0024-1824: hyp=['THE', 'BOXES', 'FILLED', 'ONE', 'AFTER', 'ANOTHER'] +3997-180294-0025-1825: ref=['ONLY', 'ONE', 'REMAINED', 'EMPTY', 'THE', 'STAGE', 'BOX'] +3997-180294-0025-1825: hyp=['ONLY', 'ONE', 'REMAINS', 'EMPTY', 'THE', 'STAGE', 'BOX'] +3997-180294-0026-1826: ref=['AT', 'THE', 'BEGINNING', 'OF', 'THE', 'THIRD', 'ACT', 'I', 'HEARD', 'THE', 'DOOR', 'OF', 'THE', 'BOX', 'ON', 'WHICH', 'MY', 'EYES', 'HAD', 'BEEN', 'ALMOST', 'CONSTANTLY', 'FIXED', 'OPEN', 'AND', 'MARGUERITE', 'APPEARED'] +3997-180294-0026-1826: hyp=['AT', 'THE', 'BEGINNING', 'OF', 'THE', 'THIRD', 'ACT', 'I', 'HEARD', 'THE', 'DOOR', 'OF', 'THE', 'BOX', 'ON', 'WHICH', 'MY', 'EYES', 'HAD', 'BEEN', 'ALMOST', 'CONSTANTLY', 'FIXED', 'OPEN', 'AND', 'MARGUERITE', 'APPEARED'] +3997-180294-0027-1827: ref=['DID', 'SHE', 'LOVE', 'ME', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'THE', 'MORE', 'BEAUTIFUL', 'SHE', 'LOOKED', 'THE', 'HAPPIER', 'I', 'SHOULD', 'BE'] +3997-180294-0027-1827: hyp=['THAT', 'SHE', 'LOVED', 'ME', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'THE', 'MORE', 'BEAUTIFUL', 'SHE', 'LOOKED', 'THE', 'HAPPIER', 'I', 'SHOULD', 'BE'] +3997-180294-0028-1828: ref=['WHAT', 'IS', 'THE', 'MATTER', 'WITH', 'YOU', 'TO', 'NIGHT', 'SAID', 'MARGUERITE', 'RISING', 'AND', 'COMING', 'TO', 'THE', 'BACK', 'OF', 'THE', 'BOX', 'AND', 'KISSING', 'ME', 'ON', 'THE', 'FOREHEAD'] +3997-180294-0028-1828: hyp=['WHAT', 'IS', 'THE', 'MATTER', 'WITH', 'YOU', 'TO', 'NIGHT', 'SAID', 'MARGUERITE', 'RISING', 'AND', 'COMING', 'TO', 'THE', 'BACK', 'OF', 'THE', 'BOX', 'AND', 'KISSING', 'ME', 'ON', 'THE', 'FOREHEAD'] +3997-180294-0029-1829: ref=['YOU', 'SHOULD', 'GO', 'TO', 'BED', 'SHE', 'REPLIED', 'WITH', 'THAT', 'IRONICAL', 'AIR', 'WHICH', 'WENT', 'SO', 'WELL', 'WITH', 'HER', 'DELICATE', 'AND', 'WITTY', 'FACE'] +3997-180294-0029-1829: hyp=['HE', 'SHOULD', 'GO', 'TO', 'BED', 'SHE', 'REPLIED', 'WITH', 'THAT', 'IRONIC', 'AIR', 'WHICH', 'WENT', 'SO', 'WELL', 'WITH', 'HER', 'DELICATE', 'AND', 'WITTY', 'FACE'] +3997-180294-0030-1830: ref=['WHERE', 'AT', 'HOME'] +3997-180294-0030-1830: hyp=['WHERE', 'AT', 'HOME'] +3997-180294-0031-1831: ref=['YOU', 'STILL', 'LOVE', 'ME', 'CAN', 'YOU', 'ASK'] +3997-180294-0031-1831: hyp=['YOU', 'STILL', 'LOVE', 'ME', 'CAN', 'YOU', 'ASK'] +3997-180294-0032-1832: ref=['BECAUSE', 'YOU', "DON'T", 'LIKE', 'SEEING', 'HIM'] +3997-180294-0032-1832: hyp=['BECAUSE', 'YOU', "DON'T", 'LIKE', 'SEEING', 'HIM'] +3997-180294-0033-1833: ref=['NONETHELESS', 'I', 'WAS', 'VERY', 'UNHAPPY', 'ALL', 'THE', 'REST', 'OF', 'THE', 'EVENING', 'AND', 'WENT', 'AWAY', 'VERY', 'SADLY', 'AFTER', 'HAVING', 'SEEN', 'PRUDENCE', 'THE', 'COUNT', 'AND', 'MARGUERITE', 'GET', 'INTO', 'THE', 'CARRIAGE', 'WHICH', 'WAS', 'WAITING', 'FOR', 'THEM', 'AT', 'THE', 'DOOR'] +3997-180294-0033-1833: hyp=['NONE', 'THE', 'LESS', 'I', 'WAS', 'VERY', 'UNHAPPY', 'ALL', 'THE', 'REST', 'OF', 'THE', 'EVENING', 'AND', 'WENT', 'AWAY', 'VERY', 'SADLY', 'AFTER', 'HAVING', 'SEEN', 'PRUDENCE', 'THE', 'COUNT', 'AND', 'MARGUERITE', 'GET', 'INTO', 'THE', 'CARRIAGE', 'WHICH', 'WAS', 'WINNING', 'FOR', 'THEM', 'AT', 'THE', 'DOOR'] +3997-180297-0000-1834: ref=['I', 'HAVE', 'NOT', 'COME', 'TO', 'HINDER', 'YOU', 'FROM', 'LEAVING', 'PARIS'] +3997-180297-0000-1834: hyp=['I', 'HAVE', 'NOT', 'COME', 'TO', 'HINDER', 'YOU', 'FROM', 'LEAVING', 'PARIS'] +3997-180297-0001-1835: ref=['YOU', 'IN', 'THE', 'WAY', 'MARGUERITE', 'BUT', 'HOW'] +3997-180297-0001-1835: hyp=['YOU', 'IN', 'THE', 'WAY', 'MARGUERITE', 'BUT', 'HOW'] +3997-180297-0002-1836: ref=['WELL', 'YOU', 'MIGHT', 'HAVE', 'HAD', 'A', 'WOMAN', 'HERE', 'SAID', 'PRUDENCE', 'AND', 'IT', 'WOULD', 'HARDLY', 'HAVE', 'BEEN', 'AMUSING', 'FOR', 'HER', 'TO', 'SEE', 'TWO', 'MORE', 'ARRIVE'] +3997-180297-0002-1836: hyp=['WELL', 'YOU', 'MIGHT', 'HAVE', 'HAD', 'A', 'WOMAN', 'HERE', 'SAID', 'PRUDENCE', 'AND', 'IT', 'WOULD', 'HARDLY', 'HAVE', 'BEEN', 'AMUSING', 'FOR', 'HER', 'TO', 'SEE', 'TWO', 'MORE', 'ARRIVE'] +3997-180297-0003-1837: ref=['DURING', 'THIS', 'REMARK', 'MARGUERITE', 'LOOKED', 'AT', 'ME', 'ATTENTIVELY'] +3997-180297-0003-1837: hyp=['DURING', 'THIS', 'REMARK', 'MARGUERITE', 'LOOKED', 'AT', 'ME', 'ATTENTIVELY'] +3997-180297-0004-1838: ref=['MY', 'DEAR', 'PRUDENCE', 'I', 'ANSWERED', 'YOU', 'DO', 'NOT', 'KNOW', 'WHAT', 'YOU', 'ARE', 'SAYING'] +3997-180297-0004-1838: hyp=['MY', 'DEAR', 'PRUDENCE', 'I', 'ANSWERED', 'YOU', 'DO', 'NOT', 'KNOW', 'WHAT', 'YOU', 'ARE', 'SAYING'] +3997-180297-0005-1839: ref=['YES', 'BUT', 'BESIDES', 'NOT', 'WISHING', 'TO', 'PUT', 'YOU', 'OUT', 'I', 'WAS', 'SURE', 'THAT', 'IF', 'YOU', 'CAME', 'AS', 'FAR', 'AS', 'MY', 'DOOR', 'YOU', 'WOULD', 'WANT', 'TO', 'COME', 'UP', 'AND', 'AS', 'I', 'COULD', 'NOT', 'LET', 'YOU', 'I', 'DID', 'NOT', 'WISH', 'TO', 'LET', 'YOU', 'GO', 'AWAY', 'BLAMING', 'ME', 'FOR', 'SAYING', 'NO'] +3997-180297-0005-1839: hyp=['YES', 'BUT', 'BESIDES', 'NOT', 'WISHING', 'TO', 'PUT', 'YOU', 'OUT', 'I', 'WAS', 'SURE', 'THAT', 'IF', 'YOU', 'CAME', 'AS', 'FAR', 'AS', 'MY', 'DOOR', 'YOU', 'WOULD', 'WANT', 'TO', 'COME', 'UP', 'AND', 'AS', 'I', 'COULD', 'NOT', 'LET', 'YOU', 'I', 'DID', 'NOT', 'WISH', 'TO', 'LET', 'YOU', 'GO', 'AWAY', 'BLAMING', 'ME', 'FOR', 'SAYING', 'NO'] +3997-180297-0006-1840: ref=['BECAUSE', 'I', 'AM', 'WATCHED', 'AND', 'THE', 'LEAST', 'SUSPICION', 'MIGHT', 'DO', 'ME', 'THE', 'GREATEST', 'HARM'] +3997-180297-0006-1840: hyp=['BECAUSE', 'I', 'AM', 'WATCHED', 'AND', 'THE', 'LEAST', 'SUSPICION', 'MIGHT', 'TO', 'ME', 'THE', 'GREATEST', 'HARM'] +3997-180297-0007-1841: ref=['IS', 'THAT', 'REALLY', 'THE', 'ONLY', 'REASON'] +3997-180297-0007-1841: hyp=['IS', 'THAT', 'REALLY', 'THE', 'ONLY', 'REASON'] +3997-180297-0008-1842: ref=['IF', 'THERE', 'WERE', 'ANY', 'OTHER', 'I', 'WOULD', 'TELL', 'YOU', 'FOR', 'WE', 'ARE', 'NOT', 'TO', 'HAVE', 'ANY', 'SECRETS', 'FROM', 'ONE', 'ANOTHER', 'NOW'] +3997-180297-0008-1842: hyp=['IF', 'THERE', 'WERE', 'ANY', 'OTHER', 'I', 'WOULD', 'TELL', 'YOU', 'FOR', 'WE', 'ARE', 'NOT', 'TO', 'HAVE', 'ANY', 'SECRETS', 'FROM', 'ONE', 'ANOTHER', 'NOW'] +3997-180297-0009-1843: ref=['HONESTLY', 'DO', 'YOU', 'CARE', 'FOR', 'ME', 'A', 'LITTLE', 'A', 'GREAT', 'DEAL'] +3997-180297-0009-1843: hyp=['ON', 'THE', 'SUIT', 'YOU', 'CARE', 'FOR', 'ME', 'A', 'LITTLE', 'A', 'GREAT', 'DEAL'] +3997-180297-0010-1844: ref=['I', 'FANCIED', 'FOR', 'A', 'MOMENT', 'THAT', 'I', 'MIGHT', 'GIVE', 'MYSELF', 'THAT', 'HAPPINESS', 'FOR', 'SIX', 'MONTHS', 'YOU', 'WOULD', 'NOT', 'HAVE', 'IT', 'YOU', 'INSISTED', 'ON', 'KNOWING', 'THE', 'MEANS'] +3997-180297-0010-1844: hyp=['I', 'FANCIED', 'FOR', 'A', 'MOMENT', 'THAT', 'I', 'MIGHT', 'GIVE', 'MYSELF', 'THAT', 'HAPPINESS', 'FOR', 'SIX', 'MONTHS', 'YOU', 'WOULD', 'NOT', 'HAVE', 'IT', 'YOU', 'INSISTED', 'ON', 'KNOWING', 'THE', 'MEANS'] +3997-180297-0011-1845: ref=['WELL', 'GOOD', 'HEAVENS', 'THE', 'MEANS', 'WERE', 'EASY', 'ENOUGH', 'TO', 'GUESS'] +3997-180297-0011-1845: hyp=['WELL', 'GOOD', 'HEAVENS', 'THE', 'MEANS', 'WERE', 'EASY', 'ENOUGH', 'TO', 'GUESS'] +3997-180297-0012-1846: ref=['I', 'LISTENED', 'AND', 'I', 'GAZED', 'AT', 'MARGUERITE', 'WITH', 'ADMIRATION'] +3997-180297-0012-1846: hyp=['I', 'LISTENED', 'AND', 'I', 'GAZED', 'AT', 'MARGUERITE', 'WITH', 'ADMIRATION'] +3997-180297-0013-1847: ref=['WHEN', 'I', 'THOUGHT', 'THAT', 'THIS', 'MARVELLOUS', 'CREATURE', 'WHOSE', 'FEET', 'I', 'HAD', 'ONCE', 'LONGED', 'TO', 'KISS', 'WAS', 'WILLING', 'TO', 'LET', 'ME', 'TAKE', 'MY', 'PLACE', 'IN', 'HER', 'THOUGHTS', 'MY', 'PART', 'IN', 'HER', 'LIFE', 'AND', 'THAT', 'I', 'WAS', 'NOT', 'YET', 'CONTENT', 'WITH', 'WHAT', 'SHE', 'GAVE', 'ME', 'I', 'ASKED', 'IF', "MAN'S", 'DESIRE', 'HAS', 'INDEED', 'LIMITS', 'WHEN', 'SATISFIED', 'AS', 'PROMPTLY', 'AS', 'MINE', 'HAD', 'BEEN', 'IT', 'REACHED', 'AFTER', 'SOMETHING', 'FURTHER'] +3997-180297-0013-1847: hyp=['WHEN', 'THEY', 'THOUGHT', 'THAT', 'THIS', 'MARVELLOUS', 'CREATURE', 'WHOSE', 'FEET', 'I', 'HAD', 'ONCE', 'LONGED', 'TO', 'KISS', 'WAS', 'WILLING', 'TO', 'LET', 'ME', 'TAKE', 'MY', 'PLACE', 'IN', 'HER', 'THOUGHTS', 'MY', 'PART', 'IN', 'HER', 'LIFE', 'AND', 'THAT', 'I', 'WAS', 'NOT', 'YET', 'CONTENT', 'WITH', 'WHAT', 'SHE', 'GAVE', 'ME', 'I', 'ASKED', 'IF', "MAN'S", 'DESIRE', 'HAD', 'INDEED', 'LIMITS', 'WHEN', 'SATISFIED', 'AS', 'PROMPTLY', 'AS', 'MINE', 'HAD', 'BEEN', 'IT', 'REACHED', 'AFTER', 'SOMETHING', 'FURTHER'] +3997-180297-0014-1848: ref=['TRULY', 'SHE', 'CONTINUED', 'WE', 'POOR', 'CREATURES', 'OF', 'CHANCE', 'HAVE', 'FANTASTIC', 'DESIRES', 'AND', 'INCONCEIVABLE', 'LOVES'] +3997-180297-0014-1848: hyp=['TRULY', 'SHE', 'CONTINUED', 'WE', 'POOR', 'CREATURES', 'OF', 'CHANCE', 'HAVE', 'FANTASTIC', 'DESIRE', 'AND', 'INCONCEIVABLE', 'LOVES'] +3997-180297-0015-1849: ref=['WE', 'ARE', 'NOT', 'ALLOWED', 'TO', 'HAVE', 'HEARTS', 'UNDER', 'PENALTY', 'OF', 'BEING', 'HOOTED', 'DOWN', 'AND', 'OF', 'RUINING', 'OUR', 'CREDIT'] +3997-180297-0015-1849: hyp=['WE', 'ARE', 'NOT', 'ALLOWED', 'TO', 'HAVE', 'HEARTS', 'UNDER', 'PENALTY', 'OF', 'BEING', 'HOOTED', 'DOWN', 'AND', 'OF', 'RUINING', 'OUR', 'CREDIT'] +3997-180297-0016-1850: ref=['WE', 'NO', 'LONGER', 'BELONG', 'TO', 'OURSELVES'] +3997-180297-0016-1850: hyp=['WE', 'NO', 'LONGER', 'BELONG', 'TO', 'OURSELVES'] +3997-180297-0017-1851: ref=['WE', 'STAND', 'FIRST', 'IN', 'THEIR', 'SELF', 'ESTEEM', 'LAST', 'IN', 'THEIR', 'ESTEEM'] +3997-180297-0017-1851: hyp=['WE', 'STAND', 'FIRST', 'IN', 'THEIR', 'SELF', 'ESTEEM', 'LAST', 'IN', 'THEIR', 'ESTEEM'] +3997-180297-0018-1852: ref=['NEVER', 'DO', 'THEY', 'GIVE', 'YOU', 'ADVICE', 'WHICH', 'IS', 'NOT', 'LUCRATIVE'] +3997-180297-0018-1852: hyp=['NEVER', 'DID', 'HE', 'GIVE', 'YOU', 'ADVICE', 'WHICH', 'IS', 'NOT', 'LUCRATIVE'] +3997-180297-0019-1853: ref=['IT', 'MEANS', 'LITTLE', 'ENOUGH', 'TO', 'THEM', 'THAT', 'WE', 'SHOULD', 'HAVE', 'TEN', 'LOVERS', 'EXTRA', 'AS', 'LONG', 'AS', 'THEY', 'GET', 'DRESSES', 'OR', 'A', 'BRACELET', 'OUT', 'OF', 'THEM', 'AND', 'THAT', 'THEY', 'CAN', 'DRIVE', 'IN', 'OUR', 'CARRIAGE', 'FROM', 'TIME', 'TO', 'TIME', 'OR', 'COME', 'TO', 'OUR', 'BOX', 'AT', 'THE', 'THEATRE'] +3997-180297-0019-1853: hyp=['IT', 'MEANS', 'LITTLE', 'ENOUGH', 'TO', 'THEM', 'THAT', 'WE', 'SHOULD', 'HAVE', 'TEN', 'LOVERS', 'EXTRA', 'AS', 'LONG', 'AS', 'THEY', 'GET', 'DRESSES', 'OR', 'A', 'BRACELET', 'OUT', 'OF', 'THEM', 'AND', 'THAT', 'THEY', 'CAN', 'DRIVE', 'AND', 'ARE', 'PARISH', 'FROM', 'TIME', 'TO', 'TIME', 'OR', 'COME', 'TO', 'OUR', 'BOX', 'AT', 'THE', 'FUTURE'] +3997-180297-0020-1854: ref=['SUCH', 'A', 'MAN', 'I', 'FOUND', 'IN', 'THE', 'DUKE', 'BUT', 'THE', 'DUKE', 'IS', 'OLD', 'AND', 'OLD', 'AGE', 'NEITHER', 'PROTECTS', 'NOR', 'CONSOLES'] +3997-180297-0020-1854: hyp=['SUCH', 'A', 'MAN', 'I', 'FOUND', 'IN', 'THE', 'DUKE', 'BUT', 'THE', 'DUKE', 'IS', 'OLD', 'AND', 'THE', 'OLD', 'AGE', 'NEITHER', 'PROTECTS', 'NOR', 'CONSOLES'] +3997-180297-0021-1855: ref=['I', 'THOUGHT', 'I', 'COULD', 'ACCEPT', 'THE', 'LIFE', 'WHICH', 'HE', 'OFFERED', 'ME', 'BUT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +3997-180297-0021-1855: hyp=['I', 'THOUGHT', 'I', 'COULD', 'ACCEPT', 'THE', 'LIFE', 'WHICH', 'HE', 'OFFERED', 'ME', 'OR', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +3997-180297-0022-1856: ref=['WHAT', 'I', 'LOVED', 'IN', 'YOU', 'WAS', 'NOT', 'THE', 'MAN', 'WHO', 'WAS', 'BUT', 'THE', 'MAN', 'WHO', 'WAS', 'GOING', 'TO', 'BE'] +3997-180297-0022-1856: hyp=['WHAT', 'I', 'LOVED', 'IN', 'YOU', 'WAS', 'NOT', 'THE', 'MAN', 'WHO', 'WAS', 'BUT', 'THE', 'MAN', 'WHO', 'WAS', 'GOING', 'TO', 'BE'] +3997-180297-0023-1857: ref=['MARGUERITE', 'TIRED', 'OUT', 'WITH', 'THIS', 'LONG', 'CONFESSION', 'THREW', 'HERSELF', 'BACK', 'ON', 'THE', 'SOFA', 'AND', 'TO', 'STIFLE', 'A', 'SLIGHT', 'COUGH', 'PUT', 'UP', 'HER', 'HANDKERCHIEF', 'TO', 'HER', 'LIPS', 'AND', 'FROM', 'THAT', 'TO', 'HER', 'EYES'] +3997-180297-0023-1857: hyp=['MARGUERITE', 'HIRED', 'OUT', 'WITH', 'HIS', 'LONG', 'CONFESSION', 'THREW', 'HERSELF', 'BACK', 'ON', 'THE', 'SOFA', 'AND', 'TO', 'STIFLE', 'A', 'SLIGHT', 'COUGH', 'PULL', 'UP', 'HER', 'HANDKERCHIEF', 'TO', 'HER', 'LIPS', 'AND', 'FROM', 'THAT', 'TO', 'HER', 'EYES'] +3997-180297-0024-1858: ref=['MARGUERITE', 'DO', 'WITH', 'ME', 'AS', 'YOU', 'WILL', 'I', 'AM', 'YOUR', 'SLAVE', 'YOUR', 'DOG', 'BUT', 'IN', 'THE', 'NAME', 'OF', 'HEAVEN', 'TEAR', 'UP', 'THE', 'LETTER', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'AND', 'DO', 'NOT', 'MAKE', 'ME', 'LEAVE', 'YOU', 'TO', 'MORROW', 'IT', 'WOULD', 'KILL', 'ME'] +3997-180297-0024-1858: hyp=['MARGUERITE', 'DO', 'WITH', 'ME', 'AS', 'YOU', 'WILL', 'I', 'AM', 'YOUR', 'SLAVE', 'YOUR', 'DOG', 'BUT', 'IN', 'THE', 'NAME', 'OF', 'HEAVEN', 'TEAR', 'UP', 'THE', 'LETTER', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'AND', 'DO', 'NOT', 'MAKE', 'ME', 'LEAVE', 'YOU', 'TO', 'MORROW', 'IT', 'WOULD', 'KILL', 'ME'] +3997-180297-0025-1859: ref=['MARGUERITE', 'DREW', 'THE', 'LETTER', 'FROM', 'HER', 'BOSOM', 'AND', 'HANDING', 'IT', 'TO', 'ME', 'WITH', 'A', 'SMILE', 'OF', 'INFINITE', 'SWEETNESS', 'SAID'] +3997-180297-0025-1859: hyp=['MARGUERITE', 'DREW', 'THE', 'LETTER', 'FROM', 'HER', 'BOSOM', 'AND', 'HANDING', 'IT', 'TO', 'ME', 'WITH', 'A', 'SMILE', 'OF', 'INFINITE', 'SWEETNESS', 'SAID'] +3997-180297-0026-1860: ref=['HERE', 'IT', 'IS', 'I', 'HAVE', 'BROUGHT', 'IT', 'BACK'] +3997-180297-0026-1860: hyp=['HERE', 'IT', 'IS', 'I', 'HAVE', 'BROUGHT', 'IT', 'BACK'] +3997-180297-0027-1861: ref=['I', 'TORE', 'THE', 'LETTER', 'INTO', 'FRAGMENTS', 'AND', 'KISSED', 'WITH', 'TEARS', 'THE', 'HAND', 'THAT', 'GAVE', 'IT', 'TO', 'ME'] +3997-180297-0027-1861: hyp=['I', 'TOLD', 'A', 'LETTER', 'INTO', 'FRAGMENTS', 'AND', 'KISSED', 'WITH', 'TEARS', 'THE', 'HAND', 'THAT', 'GAVE', 'IT', 'TO', 'ME'] +3997-180297-0028-1862: ref=['LOOK', 'HERE', 'PRUDENCE', 'DO', 'YOU', 'KNOW', 'WHAT', 'HE', 'WANTS', 'SAID', 'MARGUERITE'] +3997-180297-0028-1862: hyp=['LOOK', 'HERE', 'PRUDENCE', 'DO', 'YOU', 'KNOW', 'WHAT', 'HE', 'WANTS', 'SAID', 'MARGUERITE'] +3997-180297-0029-1863: ref=['HE', 'WANTS', 'YOU', 'TO', 'FORGIVE', 'HIM'] +3997-180297-0029-1863: hyp=['HE', 'WANTS', 'YOU', 'TO', 'FORGIVE', 'HIM'] +3997-180297-0030-1864: ref=['ONE', 'HAS', 'TO', 'BUT', 'HE', 'WANTS', 'MORE', 'THAN', 'THAT', 'WHAT', 'THEN'] +3997-180297-0030-1864: hyp=['ONE', 'HAS', 'TWO', 'BUT', 'HE', 'WANTS', 'MORE', 'THAN', 'THAT', 'WHAT', 'THEN'] +3997-180297-0031-1865: ref=['I', 'EMBRACED', 'MARGUERITE', 'UNTIL', 'SHE', 'WAS', 'ALMOST', 'STIFLED'] +3997-180297-0031-1865: hyp=['I', 'EMBRACED', 'MARGUERITE', 'UNTIL', 'SHE', 'WAS', 'ALMOST', 'STIFLED'] +3997-182399-0000-1779: ref=['OL', 'MISTAH', 'BUZZARD', 'GRINNED'] +3997-182399-0000-1779: hyp=['ALL', 'MISTER', 'BUZZARD', 'GRINNED'] +3997-182399-0001-1780: ref=['THIS', 'SOUNDED', 'LIKE', 'ANOTHER', 'STORY'] +3997-182399-0001-1780: hyp=['THIS', 'SOUNDED', 'LIKE', 'ANOTHER', 'STORY'] +3997-182399-0002-1781: ref=['HE', 'WAS', 'CURIOUS', 'ABOUT', 'THAT', 'BLACK', 'HEADED', 'COUSIN', 'OF', 'OL', 'MISTAH', 'BUZZARD', 'VERY', 'CURIOUS', 'INDEED'] +3997-182399-0002-1781: hyp=['HE', 'WAS', 'CURIOUS', 'ABOUT', 'THAT', 'BLACK', 'HEADED', 'COUSIN', 'OF', 'OLD', 'MISTER', 'BUZZARD', 'VERY', 'CURIOUS', 'INDEED'] +3997-182399-0003-1782: ref=['ANYWAY', 'HE', 'WOULD', 'FIND', 'OUT'] +3997-182399-0003-1782: hyp=['ANYWAY', 'HE', 'WOULD', 'FIND', 'OUT'] +3997-182399-0004-1783: ref=['PLEASE', 'MISTER', 'BUZZARD', 'PLEASE', 'TELL', 'US', 'THE', 'STORY', 'HE', 'BEGGED'] +3997-182399-0004-1783: hyp=['PLEASE', 'MISTER', 'BOZARD', 'PLEASE', 'TELL', 'US', 'THE', 'STORY', 'HE', 'BEGGED'] +3997-182399-0005-1784: ref=['NOW', 'OL', 'MISTAH', 'BUZZARD', 'IS', 'NATURALLY', 'GOOD', 'NATURED', 'AND', 'ACCOMMODATING', 'AND', 'WHEN', 'PETER', 'BEGGED', 'SO', 'HARD', 'HE', 'JUST', "COULDN'T", 'FIND', 'IT', 'IN', 'HIS', 'HEART', 'TO', 'REFUSE'] +3997-182399-0005-1784: hyp=['NOW', 'ALL', 'MISTER', 'BUZZARD', 'IS', 'NATURALLY', 'GOOD', 'NATURED', 'AND', 'ACCOMMODATING', 'AND', 'WHEN', 'PETER', 'BEGGED', 'SO', 'HARD', 'HE', 'JUST', "COULDN'T", 'FIND', 'IT', 'IN', 'HIS', 'HEART', 'TO', 'REFUSE'] +3997-182399-0006-1785: ref=['WAY', 'BACK', 'IN', 'THE', 'DAYS', 'WHEN', 'GRANDPAP', 'BUZZARD', 'HAD', 'HIS', 'LIL', 'FALLING', 'OUT', 'WITH', 'OL', 'KING', 'EAGLE', 'AND', 'DONE', 'FLY', 'SO', 'HIGH', 'HE', "SCO'TCH", 'THE', 'FEATHERS', 'OFFEN', 'HIS', 'HAID', 'HE', 'HAD', 'A', 'COUSIN', 'DID', 'GRANDPAP', 'BUZZARD', 'AND', 'THIS', 'COUSIN', 'WAS', 'JES', 'NATURALLY', 'LAZY', 'AND', 'NO', 'COUNT'] +3997-182399-0006-1785: hyp=['WAY', 'BACK', 'IN', 'THE', 'DAYS', 'WHEN', 'GRANDPAP', 'BUZZARD', 'HAD', 'HIS', 'LITTLE', 'FALLING', 'ON', 'WITH', 'OLD', 'KING', 'EAGLE', 'AND', 'DONE', 'FLIES', 'SO', 'HIGH', 'HE', 'SCORCHED', 'THE', 'FEATHERS', 'OFF', 'IN', 'HIS', 'HEAD', 'HE', 'HAD', 'A', 'COUSIN', 'DID', 'GRANDPA', 'BUZZARD', 'AND', 'THIS', 'COUSIN', 'WAS', 'JUST', 'NATURALLY', 'LAZY', 'AND', 'NO', 'COUNT'] +3997-182399-0007-1786: ref=['LIKE', 'MOST', 'NO', 'COUNT', 'PEOPLE', 'HE', 'USED', 'TO', 'MAKE', 'A', 'REGULAR', 'NUISANCE', 'OF', 'HISSELF', 'POKING', 'HIS', 'NOSE', 'INTO', "EV'YBODY'S", 'BUSINESS', 'AND', 'NEVER', 'TENDING', 'TO', 'HIS', 'OWN'] +3997-182399-0007-1786: hyp=['LIKE', 'MOST', 'NO', 'COUNT', 'PEOPLE', 'HE', 'USED', 'TO', 'MAKE', 'A', 'REGULAR', 'NOTIONS', 'OF', 'HIS', 'SELF', 'POKING', 'HIS', 'NOSE', 'INTO', "EVERYBODY'S", 'BUSINESS', 'AND', 'NEVER', 'TENDING', 'TO', 'HIS', 'OWN'] +3997-182399-0008-1787: ref=["WASN'T", 'ANYTHING', 'GOING', 'ON', 'THAT', 'THIS', 'TRIFLING', 'MEMBER', 'OF', 'THE', 'BUZZARD', "FAM'LY", "DIDN'T", 'FIND', 'OUT', 'ABOUT', 'AND', 'MEDDLE', 'IN', 'HE', 'COULD', 'ASK', 'MO', 'QUESTIONS', 'THAN', 'PETER', 'RABBIT', 'CAN', 'AN', 'ANYBODY', 'THAT', 'CAN', 'DO', 'THAT', 'HAS', 'GOT', 'TO', 'ASK', 'A', 'LOT'] +3997-182399-0008-1787: hyp=["WASN'T", 'ANYTHING', 'GOING', 'ON', 'THAT', 'THIS', 'TRIFLING', 'MEMBER', 'OF', 'THE', 'BUZZARD', 'FAMILY', "DIDN'T", 'FIND', 'OUT', 'ABOUT', 'A', 'MEDDLE', 'IN', 'HE', 'COULD', 'ASK', 'MORE', 'QUESTIONS', 'THAN', 'PETER', 'RABBIT', 'CAN', 'AND', 'ANYBODY', 'THAT', 'CAN', 'DO', 'THAT', 'HAS', 'GOT', 'TO', 'ASK', 'A', 'LOT'] +3997-182399-0009-1788: ref=['EVERYBODY', 'LOOKED', 'AT', 'PETER', 'AND', 'LAUGHED'] +3997-182399-0009-1788: hyp=['EVERYBODY', 'LOOKED', 'AT', 'PETER', 'AND', 'LAUGHED'] +3997-182399-0010-1789: ref=['SO', 'WE', 'UNS', 'SIT', 'ON', 'THE', 'CHIMNEY', 'TOPS', 'WHENEVER', 'OL', 'JACK', 'FROST', 'GETS', 'TO', 'STRAYING', 'DOWN', 'WHERE', 'HE', 'HAVE', 'NO', 'BUSINESS'] +3997-182399-0010-1789: hyp=['SO', 'WE', 'UNSTEAD', 'ON', 'THE', 'CHIMNEY', 'TOPS', 'WHENEVER', 'OLD', 'JACK', 'FROST', 'GETS', 'THE', 'STRAIN', 'DOWN', 'WHERE', 'HE', 'HAVE', 'NO', 'BUSINESS'] +3997-182399-0011-1790: ref=['ONE', 'DAY', 'THIS', 'NO', 'COUNT', 'TRIFLING', 'COUSIN', 'OF', 'GRANDPAP', 'BUZZARD', 'GET', 'COLD', 'IN', 'HIS', 'FEET'] +3997-182399-0011-1790: hyp=['ONE', 'DAY', 'THIS', 'NO', 'COUNT', 'TRIFLING', 'COUSIN', 'OF', 'GRANDPA', 'BUZZARD', 'GET', 'COLD', 'IN', 'HIS', 'FEET'] +3997-182399-0012-1791: ref=['IT', 'WAS', 'ON', 'A', 'LIL', 'OL', 'HOUSE', 'A', 'LIL', 'OL', 'TUMBLE', 'DOWN', 'HOUSE'] +3997-182399-0012-1791: hyp=['IT', 'WAS', 'ON', 'THE', 'LITTLE', 'OLD', 'HOUSE', 'A', 'LITTLE', 'OLD', 'TUMBLE', 'DOWN', 'HOUSE'] +3997-182399-0013-1792: ref=['WHY', 'HE', 'JES', 'STRETCH', 'HIS', 'FOOL', 'HAID', 'AS', 'FAR', 'DOWN', 'THAT', 'CHIMNEY', 'AS', 'HE', 'CAN', 'AN', 'LISTEN', 'AN', 'LISTEN'] +3997-182399-0013-1792: hyp=['WHY', 'HE', 'JUST', 'STRETCH', 'HIS', 'FULL', 'HEAD', 'AS', 'FAR', 'DOWN', 'THE', 'CHIMNEY', 'AS', 'HE', 'CAN', 'AND', 'LISTEN', 'AND', 'LISTEN'] +3997-182399-0014-1793: ref=['BUT', 'HE', "DON'T", 'MIND', 'THAT'] +3997-182399-0014-1793: hyp=['BUT', 'HE', "DON'T", 'MIND', 'THAT'] +3997-182399-0015-1794: ref=['WILL', "YO'", 'ALLS', 'PLEASE', 'SPEAK', 'A', 'LIL', 'LOUDER', 'HE', 'HOLLER', 'DOWN', 'THE', 'CHIMNEY', 'JES', 'LIKE', 'THAT'] +3997-182399-0015-1794: hyp=['WELL', 'YE', 'ALL', 'PLEASE', 'SPEAK', 'A', 'LITTLE', 'LOUDER', 'HE', 'HOLLERED', 'DOWN', 'THE', 'CHIMNEY', 'JUST', 'LIKE', 'THAT'] +3997-182399-0016-1795: ref=['YES', 'SAH', 'SHE', "SHO'LY", 'WAS', 'PLUMB', 'SCARED'] +3997-182399-0016-1795: hyp=['YES', 'SAD', 'SHE', 'SURELY', 'WAS', 'PLUM', 'SCARED'] +3997-182399-0017-1796: ref=['THEY', 'LIKE', 'TO', 'CHOKE', 'THAT', 'NO', 'COUNT', 'BUZZARD', 'TO', 'DEATH'] +3997-182399-0017-1796: hyp=['THEY', 'LIKED', 'TO', 'CHOKE', 'THAT', 'NO', 'COMPASSER', 'TO', 'DEATH'] +3997-182399-0018-1797: ref=['WHEN', 'HE', 'GET', 'HOME', 'HE', 'TRY', 'AN', 'TRY', 'TO', 'BRUSH', 'THAT', 'SOOT', 'OFF', 'BUT', 'IT', 'DONE', 'GET', 'INTO', 'THE', 'SKIN', 'AN', 'IT', 'STAY', 'THERE'] +3997-182399-0018-1797: hyp=['WHEN', 'HE', 'GET', 'HOME', 'HE', 'TRY', 'AND', 'TRY', 'TO', 'BRUSH', 'THAT', 'SUIT', 'OFF', 'BUT', 'IT', 'DONE', 'GET', 'INTO', 'THE', 'SKIN', 'AND', 'IT', 'STAY', 'THERE'] +3997-182399-0019-1798: ref=['A', 'LITTLE', 'SIGH', 'OF', 'SATISFACTION', 'WENT', 'AROUND', 'THE', 'CIRCLE', 'OF', 'LISTENERS'] +3997-182399-0019-1798: hyp=['A', 'LITTLE', 'SIGH', 'OF', 'SATISFACTION', 'WENT', 'ROUND', 'THE', 'CIRCLE', 'OF', 'LISTENERS'] +3997-182399-0020-1799: ref=['IT', 'WAS', 'JUST', 'AS', 'GOOD', 'AS', 'ONE', 'OF', 'GRANDFATHER', "FROG'S"] +3997-182399-0020-1799: hyp=['IT', 'WAS', 'JUST', 'AS', 'GOOD', 'AS', 'ONE', 'OF', 'GRANDFATHER', 'FROGS'] +4198-12259-0000-203: ref=['DRAW', 'REACH', 'FILL', 'MIX', 'GIVE', 'IT', 'ME', 'WITHOUT', 'WATER'] +4198-12259-0000-203: hyp=['DRAW', 'REACH', 'FILL', 'MIX', 'GIVE', 'IT', 'ME', 'WITHOUT', 'WATER'] +4198-12259-0001-204: ref=['SO', 'MY', 'FRIEND', 'SO', 'WHIP', 'ME', 'OFF', 'THIS', 'GLASS', 'NEATLY', 'BRING', 'ME', 'HITHER', 'SOME', 'CLARET', 'A', 'FULL', 'WEEPING', 'GLASS', 'TILL', 'IT', 'RUN', 'OVER'] +4198-12259-0001-204: hyp=['SO', 'MY', 'FRIEND', 'SO', 'WHIP', 'ME', 'OFF', 'THIS', 'GLASS', 'NEATLY', 'BRING', 'ME', 'HITHER', 'SOME', 'CLARET', 'A', 'FULL', 'WEEPING', 'GLASS', 'TILL', 'IT', 'RUN', 'OVER'] +4198-12259-0002-205: ref=['A', 'CESSATION', 'AND', 'TRUCE', 'WITH', 'THIRST'] +4198-12259-0002-205: hyp=['A', 'CESSATION', 'AND', 'TRUCE', 'WITH', 'THIRST'] +4198-12259-0003-206: ref=['YOU', 'HAVE', 'CATCHED', 'A', 'COLD', 'GAMMER', 'YEA', 'FORSOOTH', 'SIR'] +4198-12259-0003-206: hyp=['YOU', 'HAVE', 'CAST', 'A', 'COLD', 'GAMMER', 'YEA', 'FORSOOTH', 'SIR'] +4198-12259-0004-207: ref=['BY', 'THE', 'BELLY', 'OF', 'SANCT', 'BUFF', 'LET', 'US', 'TALK', 'OF', 'OUR', 'DRINK', 'I', 'NEVER', 'DRINK', 'BUT', 'AT', 'MY', 'HOURS', 'LIKE', 'THE', "POPE'S", 'MULE'] +4198-12259-0004-207: hyp=['BY', 'THE', 'VALLEY', 'OF', 'SAINT', 'BUFF', 'LET', 'US', 'TALK', 'OF', 'OUR', 'DRINK', 'I', 'NEVER', 'DRINK', 'WITHOUT', 'MY', 'HOURS', 'LIKE', 'THE', "POPE'S", 'MULE'] +4198-12259-0005-208: ref=['WHICH', 'WAS', 'FIRST', 'THIRST', 'OR', 'DRINKING'] +4198-12259-0005-208: hyp=['WHICH', 'WAS', 'FIRST', 'THOSE', 'DRINKING'] +4198-12259-0006-209: ref=['WHAT', 'IT', 'SEEMS', 'I', 'DO', 'NOT', 'DRINK', 'BUT', 'BY', 'AN', 'ATTORNEY'] +4198-12259-0006-209: hyp=['WHAT', 'IT', 'SEEMS', 'I', 'DO', 'NOT', 'DRINK', 'BUT', 'BUY', 'AN', 'ATTORNEY'] +4198-12259-0007-210: ref=['DRINK', 'ALWAYS', 'AND', 'YOU', 'SHALL', 'NEVER', 'DIE'] +4198-12259-0007-210: hyp=['DRINK', 'ALWAYS', 'AND', 'YOU', 'SHALL', 'NEVER', 'DIE'] +4198-12259-0008-211: ref=['IF', 'I', 'DRINK', 'NOT', 'I', 'AM', 'A', 'GROUND', 'DRY', 'GRAVELLED', 'AND', 'SPENT', 'I', 'AM', 'STARK', 'DEAD', 'WITHOUT', 'DRINK', 'AND', 'MY', 'SOUL', 'READY', 'TO', 'FLY', 'INTO', 'SOME', 'MARSH', 'AMONGST', 'FROGS', 'THE', 'SOUL', 'NEVER', 'DWELLS', 'IN', 'A', 'DRY', 'PLACE', 'DROUTH', 'KILLS', 'IT'] +4198-12259-0008-211: hyp=['IF', 'I', 'DRINK', 'NOT', 'I', 'AM', 'A', 'GROUND', 'DRY', 'GRAVELLED', 'AND', 'SPENT', 'I', 'AM', 'START', 'DEAD', 'WITHOUT', 'DRINK', 'AND', 'MY', 'SOUL', 'READY', 'TO', 'FLY', 'INTO', 'SOME', 'MARSH', 'AMONGST', 'FROGS', 'THE', 'SOUL', 'NEVER', 'DWELLS', 'IN', 'A', 'DRY', 'PLACE', 'DROUTH', 'KILL', 'IT'] +4198-12259-0009-212: ref=['HE', 'DRINKS', 'IN', 'VAIN', 'THAT', 'FEELS', 'NOT', 'THE', 'PLEASURE', 'OF', 'IT'] +4198-12259-0009-212: hyp=['HE', 'DRINKS', 'THEIR', 'VEIN', 'THAT', 'FILLS', 'NOT', 'THE', 'PLEASURE', 'OF', 'IT'] +4198-12259-0010-213: ref=['IT', 'IS', 'ENOUGH', 'TO', 'BREAK', 'BOTH', 'GIRDS', 'AND', 'PETREL'] +4198-12259-0010-213: hyp=['IT', 'IS', 'ENOUGH', 'TO', 'BREAK', 'BOTH', 'GOOD', 'AND', 'PETEL'] +4198-12259-0011-214: ref=['WHAT', 'DIFFERENCE', 'IS', 'THERE', 'BETWEEN', 'A', 'BOTTLE', 'AND', 'A', 'FLAGON'] +4198-12259-0011-214: hyp=['WHAT', 'DIFFERENCE', 'IS', 'THERE', 'BETWEEN', 'A', 'BOTTLE', 'AND', 'A', 'FLAGON'] +4198-12259-0012-215: ref=['BRAVELY', 'AND', 'WELL', 'PLAYED', 'UPON', 'THE', 'WORDS'] +4198-12259-0012-215: hyp=['BRAVELY', 'AND', 'WELL', 'PLAYED', 'UPON', 'THE', 'WORDS'] +4198-12259-0013-216: ref=['OUR', 'FATHERS', 'DRANK', 'LUSTILY', 'AND', 'EMPTIED', 'THEIR', 'CANS'] +4198-12259-0013-216: hyp=['OUR', 'FATHERS', 'DRANK', 'LUSTILY', 'AND', 'EMPTIED', 'THEIR', 'CANS'] +4198-12259-0014-217: ref=['WELL', 'CACKED', 'WELL', 'SUNG'] +4198-12259-0014-217: hyp=['WELL', 'CAGLED', 'WELL', 'SUNG'] +4198-12259-0015-218: ref=['COME', 'LET', 'US', 'DRINK', 'WILL', 'YOU', 'SEND', 'NOTHING', 'TO', 'THE', 'RIVER'] +4198-12259-0015-218: hyp=['COME', 'LET', 'US', 'DRINK', 'WILL', 'YOU', 'SEND', 'NOTHING', 'TO', 'THE', 'RIVER'] +4198-12259-0016-219: ref=['I', 'DRINK', 'NO', 'MORE', 'THAN', 'A', 'SPONGE'] +4198-12259-0016-219: hyp=['I', 'DRANK', 'NO', 'MORE', 'THAN', 'HIS', 'SPINES'] +4198-12259-0017-220: ref=['I', 'DRINK', 'LIKE', 'A', 'TEMPLAR', 'KNIGHT'] +4198-12259-0017-220: hyp=['I', 'DRINK', 'LIKE', 'A', 'TENT', 'LAW', 'NIGHT'] +4198-12259-0018-221: ref=['AND', 'I', 'TANQUAM', 'SPONSUS'] +4198-12259-0018-221: hyp=['AND', 'I', 'TEN', 'QUALMS', 'BONSES'] +4198-12259-0019-222: ref=['AND', 'I', 'SICUT', 'TERRA', 'SINE', 'AQUA'] +4198-12259-0019-222: hyp=['AND', 'I', 'SICUT', 'TERRACE', 'IN', 'AQUA'] +4198-12259-0020-223: ref=['GIVE', 'ME', 'A', 'SYNONYMON', 'FOR', 'A', 'GAMMON', 'OF', 'BACON'] +4198-12259-0020-223: hyp=['GIVE', 'ME', 'A', 'SNYM', 'FOR', 'A', 'GAMIN', 'OF', 'BACON'] +4198-12259-0021-224: ref=['IT', 'IS', 'THE', 'COMPULSORY', 'OF', 'DRINKERS', 'IT', 'IS', 'A', 'PULLEY'] +4198-12259-0021-224: hyp=['IT', 'IS', 'THE', 'COMPULSORY', 'OF', 'DRINKERS', 'IT', 'IS', 'A', 'POLY'] +4198-12259-0022-225: ref=['A', 'LITTLE', 'RAIN', 'ALLAYS', 'A', 'GREAT', 'DEAL', 'OF', 'WIND', 'LONG', 'TIPPLING', 'BREAKS', 'THE', 'THUNDER'] +4198-12259-0022-225: hyp=['A', 'LITTLE', 'RAIN', 'A', 'LAYS', 'A', 'GREAT', 'DEAL', 'OF', 'WIND', 'LONG', 'TIPPLING', 'BREAKS', 'THAT', 'THUNDER'] +4198-12259-0023-226: ref=['BUT', 'IF', 'THERE', 'CAME', 'SUCH', 'LIQUOR', 'FROM', 'MY', 'BALLOCK', 'WOULD', 'YOU', 'NOT', 'WILLINGLY', 'THEREAFTER', 'SUCK', 'THE', 'UDDER', 'WHENCE', 'IT', 'ISSUED'] +4198-12259-0023-226: hyp=['BUT', 'IF', 'THERE', 'CAME', 'SUCH', 'LIQUOR', 'FOR', 'MY', 'BALLIC', 'WILL', 'YOU', 'NOT', 'WILLINGLY', 'THEREAFTER', 'SUCK', 'THE', 'UTTER', 'WHENCE', 'IT', 'ISSUED'] +4198-12259-0024-227: ref=['HERE', 'PAGE', 'FILL'] +4198-12259-0024-227: hyp=['HERE', 'PAGE', 'FILL'] +4198-12259-0025-228: ref=['I', 'APPEAL', 'FROM', 'THIRST', 'AND', 'DISCLAIM', 'ITS', 'JURISDICTION'] +4198-12259-0025-228: hyp=['I', 'APPEAL', 'FROM', 'THIRST', 'AND', 'DISCLAIM', 'ITS', 'JURIS', 'DIXON'] +4198-12259-0026-229: ref=['I', 'WAS', 'WONT', 'HERETOFORE', 'TO', 'DRINK', 'OUT', 'ALL', 'BUT', 'NOW', 'I', 'LEAVE', 'NOTHING'] +4198-12259-0026-229: hyp=['I', 'WAS', 'WONT', 'HERE', 'TO', 'FORE', 'TO', 'DRINK', 'OUT', 'ALL', 'BUT', 'NOW', 'I', 'LEAVE', 'NOTHING'] +4198-12259-0027-230: ref=['HEYDAY', 'HERE', 'ARE', 'TRIPES', 'FIT', 'FOR', 'OUR', 'SPORT', 'AND', 'IN', 'EARNEST', 'EXCELLENT', 'GODEBILLIOS', 'OF', 'THE', 'DUN', 'OX', 'YOU', 'KNOW', 'WITH', 'THE', 'BLACK', 'STREAK'] +4198-12259-0027-230: hyp=['HEY', 'THEE', 'HERE', 'A', 'TRITE', 'FIT', 'FOR', 'OURSPORT', 'AND', 'IN', 'EARNEST', 'EXCELLENT', 'GO', 'TO', 'BE', 'YOURS', 'OF', 'THE', 'DUN', 'OX', 'YOU', 'KNOW', 'WITH', 'THE', 'BLACK', 'STREET'] +4198-12259-0028-231: ref=['O', 'FOR', "GOD'S", 'SAKE', 'LET', 'US', 'LASH', 'THEM', 'SOUNDLY', 'YET', 'THRIFTILY'] +4198-12259-0028-231: hyp=['OH', 'FOR', "GOD'S", 'SAKE', 'LET', 'US', 'LAST', 'THEM', 'SOUNDLY', 'YET', 'DRIFTILY'] +4198-12259-0029-232: ref=['SPARROWS', 'WILL', 'NOT', 'EAT', 'UNLESS', 'YOU', 'BOB', 'THEM', 'ON', 'THE', 'TAIL', 'NOR', 'CAN', 'I', 'DRINK', 'IF', 'I', 'BE', 'NOT', 'FAIRLY', 'SPOKE', 'TO'] +4198-12259-0029-232: hyp=['SPARROWS', 'WOULD', 'NOT', 'EAT', 'UNLESS', 'YOU', 'BOBBED', 'THEM', 'ON', 'THE', 'TAIL', 'NOR', 'CAN', 'I', 'DRINK', 'IF', 'I', 'BE', 'NOT', 'FAIRLY', 'SPOKE', 'TO'] +4198-12259-0030-233: ref=['HO', 'THIS', 'WILL', 'BANG', 'IT', 'SOUNDLY'] +4198-12259-0030-233: hyp=['OH', 'THIS', 'WAS', "BENNETT'S", 'ONLY'] +4198-12259-0031-234: ref=['BUT', 'THIS', 'SHALL', 'BANISH', 'IT', 'UTTERLY'] +4198-12259-0031-234: hyp=['BUT', 'THIS', 'OUR', 'BANACY', 'UTTERLY'] +4198-12259-0032-235: ref=['LET', 'US', 'WIND', 'OUR', 'HORNS', 'BY', 'THE', 'SOUND', 'OF', 'FLAGONS', 'AND', 'BOTTLES', 'AND', 'CRY', 'ALOUD', 'THAT', 'WHOEVER', 'HATH', 'LOST', 'HIS', 'THIRST', 'COME', 'NOT', 'HITHER', 'TO', 'SEEK', 'IT'] +4198-12259-0032-235: hyp=['LET', 'US', 'WIND', 'OUR', 'HORNS', 'BY', 'THE', 'SOUND', 'OF', 'FLAGONS', 'AND', 'BOTTLES', 'AND', 'CRY', 'ALOUD', 'THAT', 'WHOEVER', 'HATH', 'LOST', 'HIS', 'THIRST', 'COME', 'NIGH', 'HITHER', 'TO', 'SEEK', 'IT'] +4198-12259-0033-236: ref=['THE', 'GREAT', 'GOD', 'MADE', 'THE', 'PLANETS', 'AND', 'WE', 'MAKE', 'THE', 'PLATTERS', 'NEAT'] +4198-12259-0033-236: hyp=['THE', 'GREAT', 'GOD', 'MADE', 'THE', 'PLANETS', 'AND', 'WE', 'MAKE', 'THE', 'PLATTERS', 'NEAT'] +4198-12259-0034-237: ref=['APPETITE', 'COMES', 'WITH', 'EATING', 'SAYS', 'ANGESTON', 'BUT', 'THE', 'THIRST', 'GOES', 'AWAY', 'WITH', 'DRINKING'] +4198-12259-0034-237: hyp=['APPETITE', 'COMES', 'WITH', 'EATING', 'SAYS', 'ANGERSON', 'BUT', 'THAT', 'THOSE', 'GOES', 'AWAY', 'WITH', 'DRINKING'] +4198-12259-0035-238: ref=['I', 'HAVE', 'A', 'REMEDY', 'AGAINST', 'THIRST', 'QUITE', 'CONTRARY', 'TO', 'THAT', 'WHICH', 'IS', 'GOOD', 'AGAINST', 'THE', 'BITING', 'OF', 'A', 'MAD', 'DOG'] +4198-12259-0035-238: hyp=['I', 'HAVE', 'A', 'REMEDY', 'AGAINST', 'THIRST', 'QUITE', 'CONTRARY', 'TO', 'THAT', 'WHICH', 'IS', 'GOOD', 'AGAINST', 'ABIDING', 'OF', 'A', 'MAD', 'DOG'] +4198-12259-0036-239: ref=['WHITE', 'WINE', 'HERE', 'WINE', 'BOYS'] +4198-12259-0036-239: hyp=['WHITE', 'WHY', 'HERE', 'WINE', 'BOYS'] +4198-12259-0037-240: ref=['O', 'LACHRYMA', 'CHRISTI', 'IT', 'IS', 'OF', 'THE', 'BEST', 'GRAPE'] +4198-12259-0037-240: hyp=['OH', 'LACK', 'REMAR', 'CHRISTIE', 'IT', 'IS', 'OF', 'THE', 'BEST', 'GRAPE'] +4198-12259-0038-241: ref=["I'FAITH", 'PURE', 'GREEK', 'GREEK', 'O', 'THE', 'FINE', 'WHITE', 'WINE'] +4198-12259-0038-241: hyp=['I', 'FAITH', 'PURE', 'GREEK', 'GREEK', 'O', 'THE', 'FINE', 'WHITE', 'WINE'] +4198-12259-0039-242: ref=['THERE', 'IS', 'NO', 'ENCHANTMENT', 'NOR', 'CHARM', 'THERE', 'EVERY', 'ONE', 'OF', 'YOU', 'HATH', 'SEEN', 'IT'] +4198-12259-0039-242: hyp=['THERE', 'IS', 'NO', 'ENCHANTMENT', 'NOR', 'CHARM', 'THERE', 'EVERY', 'ONE', 'OF', 'YOU', 'HATH', 'SEEN', 'IT'] +4198-12259-0040-243: ref=['MY', 'PRENTICESHIP', 'IS', 'OUT', 'I', 'AM', 'A', 'FREE', 'MAN', 'AT', 'THIS', 'TRADE'] +4198-12259-0040-243: hyp=['MY', 'PRENTICE', 'IT', 'IS', 'OUT', "I'M", 'A', 'FREE', 'MAN', 'AT', 'THIS', 'TRADE'] +4198-12259-0041-244: ref=['I', 'SHOULD', 'SAY', 'MASTER', 'PAST'] +4198-12259-0041-244: hyp=['AS', 'YOU', 'SEE', 'MASTER', 'PASS'] +4198-12259-0042-245: ref=['O', 'THE', 'DRINKERS', 'THOSE', 'THAT', 'ARE', 'A', 'DRY', 'O', 'POOR', 'THIRSTY', 'SOULS'] +4198-12259-0042-245: hyp=['OH', 'THE', 'DRINKERS', 'THOSE', 'THAT', 'ARE', 'A', 'DRY', 'OH', 'POOR', 'THIRSTY', 'SOULS'] +4198-12259-0043-246: ref=['CLEAR', 'OFF', 'NEAT', 'SUPERNACULUM'] +4198-12259-0043-246: hyp=['CLEAR', 'OFF', 'NEAT', 'SUPERNACULUM'] +4198-12281-0000-187: ref=['ALTHOUGH', 'THE', 'PLAGUE', 'WAS', 'THERE', 'IN', 'THE', 'MOST', 'PART', 'OF', 'ALL', 'THE', 'HOUSES', 'THEY', 'NEVERTHELESS', 'ENTERED', 'EVERYWHERE', 'THEN', 'PLUNDERED', 'AND', 'CARRIED', 'AWAY', 'ALL', 'THAT', 'WAS', 'WITHIN', 'AND', 'YET', 'FOR', 'ALL', 'THIS', 'NOT', 'ONE', 'OF', 'THEM', 'TOOK', 'ANY', 'HURT', 'WHICH', 'IS', 'A', 'MOST', 'WONDERFUL', 'CASE'] +4198-12281-0000-187: hyp=['ALTHOUGH', 'THE', 'PLAGUE', 'WAS', 'THERE', 'IN', 'THE', 'MOST', 'PART', 'OF', 'ALL', 'THE', 'HOUSES', 'THEY', 'NEVERTHELESS', 'ENTERED', 'EVERYWHERE', 'THEN', 'PLUNDERED', 'AND', 'CARRIED', 'AWAY', 'ALL', 'THAT', 'WAS', 'WITHIN', 'AND', 'YET', 'FOR', 'ALL', 'THIS', 'NOT', 'ONE', 'OF', 'THEM', 'TOOK', 'ANY', 'HURT', 'WHICH', 'IS', 'A', 'MOST', 'WONDERFUL', 'CASE'] +4198-12281-0001-188: ref=['I', 'BESEECH', 'YOU', 'THINK', 'UPON', 'IT'] +4198-12281-0001-188: hyp=['I', 'BESEECH', 'YOU', 'THINK', 'UPON', 'IT'] +4198-12281-0002-189: ref=['NEVERTHELESS', 'AT', 'ALL', 'ADVENTURES', 'THEY', 'RANG', 'THE', 'BELLS', 'AD', 'CAPITULUM', 'CAPITULANTES'] +4198-12281-0002-189: hyp=['NEVERTHELESS', 'AT', 'ALL', 'VENTURES', 'THEY', 'RANG', 'THE', 'BELLS', 'AT', 'CAPITULAM', 'CAPITULAT', 'DAYS'] +4198-12281-0003-190: ref=['BY', 'THE', 'VIRTUE', 'OF', 'GOD', 'WHY', 'DO', 'NOT', 'YOU', 'SING', 'PANNIERS', 'FAREWELL', 'VINTAGE', 'IS', 'DONE'] +4198-12281-0003-190: hyp=['BY', 'THE', 'VIRTUE', 'OF', 'GOD', 'WHY', 'DO', 'NOT', 'YOU', 'SING', 'TEN', 'YEARS', 'FAREWELL', 'VINTAGE', 'IS', 'DONE'] +4198-12281-0004-191: ref=['BY', 'THE', 'BELLY', 'OF', 'SANCT', 'JAMES', 'WHAT', 'SHALL', 'WE', 'POOR', 'DEVILS', 'DRINK', 'THE', 'WHILE'] +4198-12281-0004-191: hyp=['BY', 'THE', 'BELLY', 'OF', 'SAINT', 'JAMES', 'WHAT', 'SHALL', 'WE', 'POOR', 'DEVILS', 'DRINK', 'THE', 'WHILE'] +4198-12281-0005-192: ref=['LORD', 'GOD', 'DA', 'MIHI', 'POTUM'] +4198-12281-0005-192: hyp=['LORD', 'GOD', 'THOU', 'ME', 'HE', 'POT', 'EM'] +4198-12281-0006-193: ref=['LET', 'HIM', 'BE', 'CARRIED', 'TO', 'PRISON', 'FOR', 'TROUBLING', 'THE', 'DIVINE', 'SERVICE'] +4198-12281-0006-193: hyp=['LET', 'HIM', 'BE', 'CARRIED', 'TO', 'PRISON', 'FOR', 'TROUBLING', 'THE', 'DIVINE', 'SERVICE'] +4198-12281-0007-194: ref=['WHEREFORE', 'IS', 'IT', 'THAT', 'OUR', 'DEVOTIONS', 'WERE', 'INSTITUTED', 'TO', 'BE', 'SHORT', 'IN', 'THE', 'TIME', 'OF', 'HARVEST', 'AND', 'VINTAGE', 'AND', 'LONG', 'IN', 'THE', 'ADVENT', 'AND', 'ALL', 'THE', 'WINTER'] +4198-12281-0007-194: hyp=['WHEREFORE', 'IS', 'IT', 'THAT', 'OUR', 'DEVOTIONS', 'WERE', 'INSTITUTED', 'TO', 'BE', 'SHORT', 'IN', 'THE', 'TIME', 'OF', 'HARVEST', 'AND', 'VINTAGE', 'AND', 'LONG', 'IN', 'THE', 'ADVENT', 'IN', 'ALL', 'THE', 'WINTER'] +4198-12281-0008-195: ref=['HARK', 'YOU', 'MY', 'MASTERS', 'YOU', 'THAT', 'LOVE', 'THE', 'WINE', "COP'S", 'BODY', 'FOLLOW', 'ME', 'FOR', 'SANCT', 'ANTHONY', 'BURN', 'ME', 'AS', 'FREELY', 'AS', 'A', 'FAGGOT', 'IF', 'THEY', 'GET', 'LEAVE', 'TO', 'TASTE', 'ONE', 'DROP', 'OF', 'THE', 'LIQUOR', 'THAT', 'WILL', 'NOT', 'NOW', 'COME', 'AND', 'FIGHT', 'FOR', 'RELIEF', 'OF', 'THE', 'VINE'] +4198-12281-0008-195: hyp=['HARK', 'YOU', 'MY', 'MASTERS', 'YOU', 'THAT', 'LOVE', 'THE', 'WINE', 'COPSE', 'BODY', 'FOLLOW', 'ME', 'FOR', 'SAINT', 'AUNT', 'ANY', 'BURN', 'ME', 'AS', 'FREELY', 'AS', 'A', 'FAGGOT', 'THEY', 'GET', 'LEAVE', 'TO', 'TASTE', 'ONE', 'DROP', 'OF', 'THE', 'LIQUOR', 'THAT', 'WOULD', 'NOT', 'NOW', 'COME', 'AND', 'FIGHT', 'FOR', 'RELIEF', 'OF', 'THE', 'VINE'] +4198-12281-0009-196: ref=['TO', 'OTHERS', 'AGAIN', 'HE', 'UNJOINTED', 'THE', 'SPONDYLES', 'OR', 'KNUCKLES', 'OF', 'THE', 'NECK', 'DISFIGURED', 'THEIR', 'CHAPS', 'GASHED', 'THEIR', 'FACES', 'MADE', 'THEIR', 'CHEEKS', 'HANG', 'FLAPPING', 'ON', 'THEIR', 'CHIN', 'AND', 'SO', 'SWINGED', 'AND', 'BALAMMED', 'THEM', 'THAT', 'THEY', 'FELL', 'DOWN', 'BEFORE', 'HIM', 'LIKE', 'HAY', 'BEFORE', 'A', 'MOWER'] +4198-12281-0009-196: hyp=['TO', 'OTHERS', 'AGAIN', 'HE', 'UNJOINTED', 'THE', 'SPAWN', 'MULES', 'OR', 'KNUCKLES', 'OF', 'THE', 'NECK', 'THIS', 'FIGURED', 'THEIR', 'CHAPS', 'GASHED', 'THEIR', 'FACES', 'MADE', 'THEIR', 'CHEEKS', 'HANG', 'FLAPPING', 'ON', 'THEIR', 'CHIN', 'AND', 'SO', 'SWINGED', 'AND', 'BLAMMED', 'THEM', 'THAT', 'THEY', 'FELL', 'DOWN', 'BEFORE', 'HIM', 'LIKE', 'HAY', 'BEFORE', 'HIM', 'OVER'] +4198-12281-0010-197: ref=['TO', 'SOME', 'WITH', 'A', 'SMART', 'SOUSE', 'ON', 'THE', 'EPIGASTER', 'HE', 'WOULD', 'MAKE', 'THEIR', 'MIDRIFF', 'SWAG', 'THEN', 'REDOUBLING', 'THE', 'BLOW', 'GAVE', 'THEM', 'SUCH', 'A', 'HOMEPUSH', 'ON', 'THE', 'NAVEL', 'THAT', 'HE', 'MADE', 'THEIR', 'PUDDINGS', 'TO', 'GUSH', 'OUT'] +4198-12281-0010-197: hyp=['TO', 'SOME', 'WOULD', 'THEY', 'SMARE', 'SOUS', 'ON', 'THEIR', 'EBERGASTER', 'HE', 'WILL', 'MAKE', 'THEM', 'MIDRIFTS', 'WAG', 'THEN', 'REDOUBLING', 'THE', 'BLOW', 'GAVE', 'THEM', 'SUCH', 'A', 'HOME', 'PUSH', 'ON', 'THE', 'NAVEL', 'THAT', 'HE', 'MADE', 'THEIR', 'PUDDINGS', 'TO', 'GUSH', 'OUT'] +4198-12281-0011-198: ref=['BELIEVE', 'THAT', 'IT', 'WAS', 'THE', 'MOST', 'HORRIBLE', 'SPECTACLE', 'THAT', 'EVER', 'ONE', 'SAW'] +4198-12281-0011-198: hyp=['BELIEVE', 'THAT', 'IT', 'WAS', 'THE', 'MOST', 'HORRIBLE', 'SPECTACLE', 'THAT', 'EVER', 'WON', 'SAW'] +4198-12281-0012-199: ref=['O', 'THE', 'HOLY', 'LADY', 'NYTOUCH', 'SAID', 'ONE', 'THE', 'GOOD', 'SANCTESS', 'O', 'OUR', 'LADY', 'OF', 'SUCCOURS', 'SAID', 'ANOTHER', 'HELP', 'HELP'] +4198-12281-0012-199: hyp=['ALL', 'THE', 'HOLY', 'LADY', 'KNIGHT', 'SAID', 'ONE', 'THE', 'GOOD', 'SANCTIS', 'O', 'OUR', 'LADY', 'OF', 'SECURUS', 'SAID', 'ANOTHER', 'HELP', 'HELP'] +4198-12281-0013-200: ref=['SOME', 'DIED', 'WITHOUT', 'SPEAKING', 'OTHERS', 'SPOKE', 'WITHOUT', 'DYING', 'SOME', 'DIED', 'IN', 'SPEAKING', 'OTHERS', 'SPOKE', 'IN', 'DYING'] +4198-12281-0013-200: hyp=['SOME', 'DIED', 'WITHOUT', 'SPEAKING', 'OTHERS', 'SPOKE', 'WITHOUT', 'DYING', 'SOME', 'DIED', 'IN', 'SPEAKING', 'OTHERS', 'SPOKE', 'AND', 'DYING'] +4198-12281-0014-201: ref=['CAN', 'YOU', 'TELL', 'WITH', 'WHAT', 'INSTRUMENTS', 'THEY', 'DID', 'IT'] +4198-12281-0014-201: hyp=['CAN', 'YOU', 'TELL', 'WITH', 'WHAT', 'INSTRUMENTS', 'THEY', 'DID', 'IT'] +4198-12281-0015-202: ref=['IN', 'THE', 'MEANTIME', 'FRIAR', 'JOHN', 'WITH', 'HIS', 'FORMIDABLE', 'BATON', 'OF', 'THE', 'CROSS', 'GOT', 'TO', 'THE', 'BREACH', 'WHICH', 'THE', 'ENEMIES', 'HAD', 'MADE', 'AND', 'THERE', 'STOOD', 'TO', 'SNATCH', 'UP', 'THOSE', 'THAT', 'ENDEAVOURED', 'TO', 'ESCAPE'] +4198-12281-0015-202: hyp=['IN', 'THE', 'MEAN', 'TIME', 'FRIED', 'JOHN', 'WITH', 'HIS', 'FORMIDABLE', 'BUT', 'TIME', 'OF', 'THE', 'CROSS', 'GOT', 'TO', 'THE', 'BREACH', 'WHICH', 'THE', 'ENEMIES', 'HAD', 'MADE', 'AND', 'THERE', 'STOOD', 'TO', 'SNATCH', 'UP', 'THOSE', 'THAT', 'ENDEAVORED', 'TO', 'ESCAPE'] +4198-61336-0000-247: ref=['IT', 'IS', 'SIGNIFICANT', 'TO', 'NOTE', 'IN', 'THIS', 'CONNECTION', 'THAT', 'THE', 'NEW', 'KING', 'WAS', 'AN', 'UNSWERVING', 'ADHERENT', 'OF', 'THE', 'CULT', 'OF', 'ASHUR', 'BY', 'THE', 'ADHERENTS', 'OF', 'WHICH', 'HE', 'WAS', 'PROBABLY', 'STRONGLY', 'SUPPORTED'] +4198-61336-0000-247: hyp=['IT', 'IS', 'SIGNIFICANT', 'TO', 'NOTE', 'IN', 'THIS', 'CONNECTION', 'THAT', 'THE', 'NEW', 'KING', 'WAS', 'AN', 'UNSWERVING', 'ADHERENT', 'OF', 'THE', 'CULT', 'OF', 'ASHER', 'BY', 'THE', 'ADHERENCE', 'OF', 'WHICH', 'HE', 'WAS', 'PROBABLY', 'STRONGLY', 'SUPPORTED'] +4198-61336-0001-248: ref=['AT', 'THE', 'BEGINNING', 'OF', 'HIS', 'REIGN', 'THERE', 'WAS', 'MUCH', 'SOCIAL', 'DISCONTENT', 'AND', 'SUFFERING'] +4198-61336-0001-248: hyp=['AT', 'THE', 'BEGINNING', 'OF', 'HIS', 'REIGN', 'THERE', 'WAS', 'MUCH', 'SOCIAL', 'DISCONTENT', 'AND', 'SUFFERING'] +4198-61336-0002-249: ref=['WELL', 'MIGHT', 'SHARDURIS', 'EXCLAIM', 'IN', 'THE', 'WORDS', 'OF', 'THE', 'PROPHET', 'WHERE', 'IS', 'THE', 'KING', 'OF', 'ARPAD'] +4198-61336-0002-249: hyp=['WELL', 'MIGHT', 'SHOW', 'DUERS', 'EXCLAIM', 'IN', 'THE', 'WORDS', 'OF', 'THE', 'PROPHET', 'WHERE', 'IS', 'THE', 'KING', 'OF', 'ARPE'] +4198-61336-0003-250: ref=['TIGLATH', 'PILESER', 'HOWEVER', 'CROSSED', 'THE', 'EUPHRATES', 'AND', 'MOVING', 'NORTHWARD', 'DELIVERED', 'AN', 'UNEXPECTED', 'ATTACK', 'ON', 'THE', 'URARTIAN', 'ARMY', 'IN', 'QUMMUKH'] +4198-61336-0003-250: hyp=['DICK', 'LAUGHED', 'PLEASURE', 'HOWEVER', 'CROSSED', 'THE', 'EUPHATEES', 'AND', 'MOVING', 'NORTHWARD', 'DELIVERED', 'AN', 'UNEXPECTED', 'ATTACK', 'ON', 'THE', 'GRACIAN', 'ARMY', 'AND', 'CUMICU'] +4198-61336-0004-251: ref=['A', 'FIERCE', 'BATTLE', 'ENSUED', 'AND', 'ONE', 'OF', 'ITS', 'DRAMATIC', 'INCIDENTS', 'WAS', 'A', 'SINGLE', 'COMBAT', 'BETWEEN', 'THE', 'RIVAL', 'KINGS'] +4198-61336-0004-251: hyp=['A', 'FIERCE', 'BATTLE', 'ENSUED', 'AND', 'ONE', 'OF', 'HIS', 'DRAMATIC', 'INCIDENTS', 'WAS', 'A', 'SINGLE', 'COMBAT', 'BETWEEN', 'THE', 'RIVAL', 'KINGS'] +4198-61336-0005-252: ref=['AN', 'ATTEMPT', 'WAS', 'MADE', 'TO', 'CAPTURE', 'KING', 'SHARDURIS', 'WHO', 'LEAPT', 'FROM', 'HIS', 'CHARIOT', 'AND', 'MADE', 'HASTY', 'ESCAPE', 'ON', 'HORSEBACK', 'HOTLY', 'PURSUED', 'IN', 'THE', 'GATHERING', 'DARKNESS', 'BY', 'AN', 'ASSYRIAN', 'CONTINGENT', 'OF', 'CAVALRY'] +4198-61336-0005-252: hyp=['AN', 'ATTEMPT', 'WAS', 'MADE', 'TO', 'CAPTURE', 'KING', 'SHADORUS', 'WHO', 'LEAPED', 'FROM', 'HIS', 'CHARIOT', 'AND', 'MADE', 'HASTY', 'ESCAPE', 'ON', 'HORSEBACK', 'HOTLY', 'PURSUED', 'IN', 'THE', 'GATHERING', 'DARKNESS', 'BY', 'AN', 'ASSYRIAN', 'CONTINGENT', 'OF', 'CAVALRY'] +4198-61336-0006-253: ref=['DESPITE', 'THE', 'BLOW', 'DEALT', 'AGAINST', 'URARTU', 'ASSYRIA', 'DID', 'NOT', 'IMMEDIATELY', 'REGAIN', 'POSSESSION', 'OF', 'NORTH', 'SYRIA'] +4198-61336-0006-253: hyp=['DESPITE', 'THE', 'BLUE', 'DEALT', 'AGAINST', 'URITU', 'ASSYRIA', 'DID', 'NOT', 'IMMEDIATELY', 'REGAIN', 'POSSESSION', 'OF', 'NORTH', 'SYRIA'] +4198-61336-0007-254: ref=['THE', 'SHIFTY', 'MATI', 'ILU', 'EITHER', 'CHERISHED', 'THE', 'HOPE', 'THAT', 'SHARDURIS', 'WOULD', 'RECOVER', 'STRENGTH', 'AND', 'AGAIN', 'INVADE', 'NORTH', 'SYRIA', 'OR', 'THAT', 'HE', 'MIGHT', 'HIMSELF', 'ESTABLISH', 'AN', 'EMPIRE', 'IN', 'THAT', 'REGION'] +4198-61336-0007-254: hyp=['THE', 'SHIFTY', 'MANTI', 'ILIU', 'EITHER', 'CHERISHED', 'THE', 'HOPE', 'THAT', 'SHALL', 'DORIS', 'WOULD', 'RECOVER', 'STRENGTH', 'AND', 'AGAIN', 'IN', 'VAIN', 'NORTH', 'ASSYRIA', 'OR', 'THAT', 'HE', 'MIGHT', 'HIMSELF', 'ESTABLISH', 'AN', 'EMPIRE', 'IN', 'THAT', 'REGION'] +4198-61336-0008-255: ref=['TIGLATH', 'PILESER', 'HAD', 'THEREFORE', 'TO', 'MARCH', 'WESTWARD', 'AGAIN'] +4198-61336-0008-255: hyp=['T', 'GLASS', 'BE', 'LEISURE', 'HAD', 'THEREFORE', 'TO', 'MARCH', 'WESTWARD', 'AGAIN'] +4198-61336-0009-256: ref=['FOR', 'THREE', 'YEARS', 'HE', 'CONDUCTED', 'VIGOROUS', 'CAMPAIGNS', 'IN', 'THE', 'WESTERN', 'LAND', 'WHERE', 'HE', 'MET', 'WITH', 'VIGOROUS', 'RESISTANCE'] +4198-61336-0009-256: hyp=['FOR', 'THREE', 'YEARS', 'HE', 'CONDUCTED', 'VIGOROUS', 'CAMPAIGNS', 'IN', 'THE', 'WESTERN', 'LAND', 'WHERE', 'HE', 'MET', 'WITH', 'VIGOROUS', 'RESISTANCE'] +4198-61336-0010-257: ref=['ARPAD', 'WAS', 'CAPTURED', 'AND', 'MATI', 'ILU', 'DEPOSED', 'AND', 'PROBABLY', 'PUT', 'TO', 'DEATH'] +4198-61336-0010-257: hyp=['OUR', 'PAD', 'WAS', 'CAPTURED', 'AND', 'MET', 'TO', 'ILL', 'YOU', 'DEPOSED', 'AND', 'PROBABLY', 'PUT', 'TO', 'DEATH'] +4198-61336-0011-258: ref=['ONCE', 'AGAIN', 'THE', 'HEBREWS', 'CAME', 'INTO', 'CONTACT', 'WITH', 'ASSYRIA'] +4198-61336-0011-258: hyp=['ONCE', 'AGAIN', 'THE', 'HEBREWS', 'CAME', 'INTO', 'CONTACT', 'WITH', 'THE', 'SYRIA'] +4198-61336-0012-259: ref=['ITS', 'FALL', 'MAY', 'NOT', 'HAVE', 'BEEN', 'UNCONNECTED', 'WITH', 'THE', 'TREND', 'OF', 'EVENTS', 'IN', 'ASSYRIA', 'DURING', 'THE', 'CLOSING', 'YEARS', 'OF', 'THE', 'MIDDLE', 'EMPIRE'] +4198-61336-0012-259: hyp=["IT'S", 'FOR', 'ME', 'NOT', 'HAV', 'BEEN', 'UNCONNECTED', 'WITH', 'THE', 'TREND', 'OF', 'EVENTS', 'IN', 'ASSYRIA', 'DURING', 'THE', 'CLOSING', 'YEARS', 'OF', 'THE', 'MIDDLE', 'EMPIRE'] +4198-61336-0013-260: ref=['JEHOASH', 'THE', 'GRANDSON', 'OF', 'JEHU', 'HAD', 'ACHIEVED', 'SUCCESSES', 'IN', 'CONFLICT', 'WITH', 'DAMASCUS'] +4198-61336-0013-260: hyp=['JO', 'ASH', 'THE', 'GRANDSON', 'OF', 'JEHOV', 'HAD', 'ACHIEVED', 'SUCCESSES', 'IN', 'CONFLICT', 'WITH', 'DAMASCUS'] +4198-61336-0014-261: ref=['SIX', 'MONTHS', 'AFTERWARDS', 'HE', 'WAS', 'ASSASSINATED', 'BY', 'SHALLUM'] +4198-61336-0014-261: hyp=['SIX', 'MONTHS', 'AFTERWARD', 'HE', 'WAS', 'ASSASSINATED', 'BY', 'CELEM'] +4198-61336-0015-262: ref=['THIS', 'USURPER', 'HELD', 'SWAY', 'AT', 'SAMARIA', 'FOR', 'ONLY', 'A', 'MONTH'] +4198-61336-0015-262: hyp=['THIS', 'USURPER', 'HELD', 'SWAY', 'AT', 'SAMARIA', 'FOR', 'ONLY', 'A', 'MONTH'] +4198-61336-0016-263: ref=['NO', 'RESISTANCE', 'WAS', 'POSSIBLE', 'ON', 'THE', 'PART', 'OF', 'MENAHEM', 'THE', 'USURPER', 'WHO', 'WAS', 'PROBABLY', 'READY', 'TO', 'WELCOME', 'THE', 'ASSYRIAN', 'CONQUEROR', 'SO', 'THAT', 'BY', 'ARRANGING', 'AN', 'ALLIANCE', 'HE', 'MIGHT', 'SECURE', 'HIS', 'OWN', 'POSITION'] +4198-61336-0016-263: hyp=['NO', 'RESISTANCE', 'WAS', 'POSSIBLE', 'ON', 'THE', 'PART', 'OF', 'MANY', 'HIM', 'THE', 'USURPER', 'WHOSE', 'PROBABLY', 'READY', 'TO', 'WELCOME', 'THE', 'ASSYRIAN', 'CONQUEROR', 'SO', 'THAT', 'BY', 'ARRANGING', 'AN', 'ALLIANCE', 'HE', 'MIGHT', 'SECURE', 'HIS', 'OWN', 'POSITION'] +4198-61336-0017-264: ref=['TIGLATH', 'PILESER', 'NEXT', 'OPERATED', 'AGAINST', 'THE', 'MEDIAN', 'AND', 'OTHER', 'HILL', 'TRIBES', 'IN', 'THE', 'NORTH', 'EAST'] +4198-61336-0017-264: hyp=['TAKE', 'LAST', 'PLEASE', 'HER', 'NEXT', 'OPERATED', 'AGAINST', 'THE', 'MEDIUM', 'AND', 'OTHER', 'HILL', 'TRIBES', 'IN', 'THE', 'NORTHEAST'] +4198-61336-0018-265: ref=['HE', 'OVERTHREW', 'BUILDINGS', 'DESTROYED', 'ORCHARDS', 'AND', 'TRANSPORTED', 'TO', 'NINEVEH', 'THOSE', 'OF', 'THE', 'INHABITANTS', 'HE', 'HAD', 'NOT', 'PUT', 'TO', 'THE', 'SWORD', 'WITH', 'ALL', 'THE', 'LIVE', 'STOCK', 'HE', 'COULD', 'LAY', 'HANDS', 'ON'] +4198-61336-0018-265: hyp=['HE', 'OVERTHREW', 'BUILDINGS', 'DESTROYED', 'ORCHARDS', 'AND', 'TRANSPORTED', 'TO', 'NINEVEH', 'THOSE', 'OF', 'THE', 'INHABITANTS', 'HE', 'HAD', 'NOT', 'PUT', 'TO', 'THE', 'SWORD', 'WITH', 'ALL', 'THE', 'LIVE', 'STOCK', 'HE', 'COULD', 'LAY', 'HANDS', 'ON'] +4198-61336-0019-266: ref=['THUS', 'WAS', 'URARTU', 'CRIPPLED', 'AND', 'HUMILIATED', 'IT', 'NEVER', 'REGAINED', 'ITS', 'FORMER', 'PRESTIGE', 'AMONG', 'THE', 'NORTHERN', 'STATES'] +4198-61336-0019-266: hyp=['THIS', 'WAS', 'URITU', 'CRIPPLED', 'AND', 'HUMILIATED', 'IT', 'NEVER', 'REGAINED', 'ITS', 'FORM', 'OF', 'PRESTIGE', 'AMONG', 'THE', 'NORTHERN', 'STATES'] +4198-61336-0020-267: ref=['IN', 'THE', 'FOLLOWING', 'YEAR', 'TIGLATH', 'PILESER', 'RETURNED', 'TO', 'SYRIA'] +4198-61336-0020-267: hyp=['IN', 'THE', 'FOLLOWING', 'YEAR', 'TIC', 'LAUGH', 'BELLEGER', 'RETURNED', 'TO', 'SYRIA'] +4198-61336-0021-268: ref=['MENAHEM', 'KING', 'OF', 'ISRAEL', 'HAD', 'DIED', 'AND', 'WAS', 'SUCCEEDED', 'BY', 'HIS', 'SON', 'PEKAHIAH'] +4198-61336-0021-268: hyp=['MANY', 'HIM', 'KING', 'OF', 'ISRAEL', 'HAD', 'DIED', 'AND', 'WAS', 'SUCCEEDED', 'BY', 'HIS', 'SON', 'PECAH'] +4198-61336-0022-269: ref=['JUDAH', 'HAD', 'TAKEN', 'ADVANTAGE', 'OF', 'THE', 'DISTURBED', 'CONDITIONS', 'IN', 'ISRAEL', 'TO', 'ASSERT', 'ITS', 'INDEPENDENCE'] +4198-61336-0022-269: hyp=['JULIA', 'HAD', 'TAKEN', 'ADVANTAGE', 'OF', 'THE', 'DISTURBED', 'CONDITIONS', 'IN', 'ISRAEL', 'TO', 'ASSERT', 'ITS', 'INDEPENDENCE'] +4198-61336-0023-270: ref=['HE', 'CONDEMNED', 'ISRAEL', 'FOR', 'ITS', 'IDOLATRIES', 'AND', 'CRIED'] +4198-61336-0023-270: hyp=['HE', 'CONDEMNED', 'ISRAEL', 'FOR', 'ITS', 'IDOLATRIES', 'AND', 'CRIED'] +4198-61336-0024-271: ref=['FOR', 'THUS', 'SAITH', 'THE', 'LORD', 'UNTO', 'THE', 'HOUSE', 'OF', 'ISRAEL', 'SEEK', 'YE', 'ME', 'AND', 'YE', 'SHALL', 'LIVE', 'HAVE', 'YE', 'OFFERED', 'UNTO', 'ME', 'SACRIFICES', 'AND', 'OFFERINGS', 'IN', 'THE', 'WILDERNESS', 'FORTY', 'YEARS', 'O', 'HOUSE', 'OF', 'ISRAEL'] +4198-61336-0024-271: hyp=['FOR', 'THIS', 'SAITH', 'THE', 'LORD', 'UNTO', 'THE', 'HOUSE', 'OF', 'ISRAEL', 'SEEK', 'YE', 'ME', 'A', 'YE', 'TO', 'LIVE', 'HAVE', 'YE', 'OFFERED', 'UNTO', 'ME', 'SACRIFICES', 'AND', 'OFFERINGS', 'IN', 'THE', 'WILDERNESS', 'FORTY', 'YEARS', 'OR', 'HOUSE', 'OF', 'ISRAEL'] +4198-61336-0025-272: ref=['THE', 'REMNANT', 'OF', 'THE', 'PHILISTINES', 'SHALL', 'PERISH'] +4198-61336-0025-272: hyp=['THE', 'REMNANT', 'OF', 'THE', 'PHILISTINES', 'SHALL', 'PERISH'] +4198-61336-0026-273: ref=['ISRAEL', 'WAS', 'ALSO', 'DEALT', 'WITH'] +4198-61336-0026-273: hyp=['ISRAEL', 'WAS', 'ALSO', 'DEALT', 'WITH'] +4198-61336-0027-274: ref=['HE', 'SWEPT', 'THROUGH', 'ISRAEL', 'LIKE', 'A', 'HURRICANE'] +4198-61336-0027-274: hyp=['HE', 'SWEPT', 'THROUGH', 'ISRAEL', 'LIKE', 'A', 'HURRICANE'] +4198-61336-0028-275: ref=['THE', 'PHILISTINES', 'AND', 'THE', 'ARABIANS', 'OF', 'THE', 'DESERT', 'WERE', 'ALSO', 'SUBDUED'] +4198-61336-0028-275: hyp=['THE', 'FURTHER', 'STEAMS', 'AND', 'THE', 'ARABIANS', 'OF', 'THE', 'DESERT', 'WERE', 'ALSO', 'SUBDUED'] +4198-61336-0029-276: ref=['HE', 'INVADED', 'BABYLONIA'] +4198-61336-0029-276: hyp=['HE', 'INVADED', 'BABYLONIA'] +4198-61336-0030-277: ref=['UKINZER', 'TOOK', 'REFUGE', 'IN', 'HIS', 'CAPITAL', 'SHAPIA', 'WHICH', 'HELD', 'OUT', 'SUCCESSFULLY', 'ALTHOUGH', 'THE', 'SURROUNDING', 'COUNTRY', 'WAS', 'RAVAGED', 'AND', 'DESPOILED'] +4198-61336-0030-277: hyp=['A', 'KINDRED', 'TOOK', 'REFUGE', 'IN', 'HIS', 'CAPITAL', 'SHAPIA', 'WHICH', 'HELD', 'OUT', 'SUCCESSFULLY', 'ALTHOUGH', 'THE', 'SURROUNDING', 'COUNTRY', 'WAS', 'RAVAGED', 'AND', 'DESPOILED'] +4294-14317-0000-1866: ref=['AS', 'I', 'THOUGHT', 'THAT', 'THIS', 'WAS', 'DUE', 'TO', 'SOME', 'FAULT', 'IN', 'THE', 'EARTH', 'I', 'WANTED', 'TO', 'MAKE', 'THESE', 'FIRST', 'EXPERIMENTS', 'BEFORE', 'I', 'UNDERTOOK', 'MY', 'PERSEUS'] +4294-14317-0000-1866: hyp=['AS', 'I', 'THOUGHT', 'THAT', 'THIS', 'WAS', 'DUE', 'TO', 'SOME', 'FAULT', 'IN', 'THE', 'EARTH', 'I', 'WANTED', 'TO', 'MAKE', 'THESE', 'FIRST', 'EXPERIMENTS', 'BEFORE', 'AND', 'UNDERTOOK', 'MY', 'PERSEUS'] +4294-14317-0001-1867: ref=['WHEN', 'I', 'SAW', 'THAT', 'THIS', 'BUST', 'CAME', 'OUT', 'SHARP', 'AND', 'CLEAN', 'I', 'SET', 'AT', 'ONCE', 'TO', 'CONSTRUCT', 'A', 'LITTLE', 'FURNACE', 'IN', 'THE', 'WORKSHOP', 'ERECTED', 'FOR', 'ME', 'BY', 'THE', 'DUKE', 'AFTER', 'MY', 'OWN', 'PLANS', 'AND', 'DESIGN', 'IN', 'THE', 'HOUSE', 'WHICH', 'THE', 'DUKE', 'HAD', 'GIVEN', 'ME'] +4294-14317-0001-1867: hyp=['WHEN', 'I', 'SAW', 'THIS', 'BUST', 'CAME', 'OUT', 'SHARP', 'AND', 'CLEAN', 'I', 'SAID', 'AT', 'ONCE', 'TO', 'CONSTRUCT', 'A', 'LITTLE', 'FURNACE', 'IN', 'THE', 'WORKSHOP', 'ERECTED', 'FOR', 'ME', 'BY', 'THE', 'DUKE', 'AFTER', 'MY', 'OWN', 'PLANS', 'AND', 'DESIGN', 'IN', 'THE', 'HOUSE', 'WHICH', 'THE', 'DUKE', 'HAD', 'GIVEN', 'ME'] +4294-14317-0002-1868: ref=['IT', 'WAS', 'AN', 'EXTREMELY', 'DIFFICULT', 'TASK', 'AND', 'I', 'WAS', 'ANXIOUS', 'TO', 'OBSERVE', 'ALL', 'THE', 'NICETIES', 'OF', 'ART', 'WHICH', 'I', 'HAD', 'LEARNED', 'SO', 'AS', 'NOT', 'TO', 'LAPSE', 'INTO', 'SOME', 'ERROR'] +4294-14317-0002-1868: hyp=['IT', 'WAS', 'AN', 'EXTREMELY', 'DIFFICULT', 'TASK', 'AND', 'I', 'WAS', 'ANXIOUS', 'TO', 'OBSERVE', 'ALL', 'THE', 'NICETIES', 'OF', 'ART', 'WHICH', 'I', 'HAD', 'LEARNED', 'SO', 'AS', 'NOT', 'TO', 'LAPSE', 'INTO', 'SOME', 'ERROR'] +4294-14317-0003-1869: ref=['I', 'IN', 'MY', 'TURN', 'FEEL', 'THE', 'SAME', 'DESIRE', 'AND', 'HOPE', 'TO', 'PLAY', 'MY', 'PART', 'LIKE', 'THEM', 'THEREFORE', 'MY', 'LORD', 'GIVE', 'ME', 'THE', 'LEAVE', 'TO', 'GO'] +4294-14317-0003-1869: hyp=['I', 'IN', 'MY', 'TURN', 'FEEL', 'THE', 'SAME', 'DESIRE', 'AND', 'HOPE', 'TO', 'PLAY', 'MY', 'PART', 'LIKE', 'THEM', 'THEREFORE', 'MY', 'LORD', 'GIVE', 'ME', 'THE', 'LEAVE', 'TO', 'GO'] +4294-14317-0004-1870: ref=['BUT', 'BEWARE', 'OF', 'LETTING', 'BANDINELLO', 'QUIT', 'YOU', 'RATHER', 'BESTOW', 'UPON', 'HIM', 'ALWAYS', 'MORE', 'THAN', 'HE', 'DEMANDS', 'FOR', 'IF', 'HE', 'GOES', 'INTO', 'FOREIGN', 'PARTS', 'HIS', 'IGNORANCE', 'IS', 'SO', 'PRESUMPTUOUS', 'THAT', 'HE', 'IS', 'JUST', 'THE', 'MAN', 'TO', 'DISGRACE', 'OUR', 'MOST', 'ILLUSTRIOUS', 'SCHOOL'] +4294-14317-0004-1870: hyp=['BUT', 'BEWARE', 'OF', 'LETTING', 'BEND', 'NELLO', 'QUIT', 'YOU', 'RATHER', 'BESTOW', 'UPON', 'HIM', 'ALWAYS', 'MORE', 'THAN', 'HE', 'DEMANDS', 'FOR', 'IF', 'HE', 'GOES', 'INTO', 'FOREIGN', 'PARTS', 'HIS', 'IGNORANCE', 'IS', 'SO', 'PRESUMPTUOUS', 'THAT', 'HE', 'IS', 'JUST', 'THE', 'MAN', 'TO', 'DISGRACE', 'OUR', 'MOST', 'ILLUSTRIOUS', 'SCHOOL'] +4294-14317-0005-1871: ref=['I', 'ASK', 'NO', 'FURTHER', 'REWARD', 'FOR', 'MY', 'LABOURS', 'UP', 'TO', 'THIS', 'TIME', 'THAN', 'THE', 'GRACIOUS', 'FAVOUR', 'OF', 'YOUR', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0005-1871: hyp=['I', 'ASKED', 'NO', 'FURTHER', 'REWARD', 'FOR', 'MY', 'LABORS', 'UP', 'TO', 'THIS', 'TIME', 'THAN', 'THE', 'GRACIOUS', 'FAVOUR', 'OF', 'YOUR', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0006-1872: ref=['THEN', 'I', 'THANKED', 'HIM', 'AND', 'SAID', 'I', 'HAD', 'NO', 'GREATER', 'DESIRE', 'THAN', 'TO', 'SHOW', 'THOSE', 'ENVIOUS', 'FOLK', 'THAT', 'I', 'HAD', 'IT', 'IN', 'ME', 'TO', 'EXECUTE', 'THE', 'PROMISED', 'WORK'] +4294-14317-0006-1872: hyp=['THEN', 'I', 'THANKED', 'HIM', 'AND', 'SAID', 'I', 'HAD', 'NO', 'GREATER', 'DESIRE', 'THAN', 'TO', 'SHOW', 'THOSE', 'ENVIOUS', 'FOLK', 'THAT', 'I', 'HAD', 'IT', 'IN', 'ME', 'TO', 'EXECUTE', 'THE', 'PROMISED', 'WORK'] +4294-14317-0007-1873: ref=['I', 'HAD', 'BETTER', 'LOOK', 'TO', 'MY', 'CONDUCT', 'FOR', 'IT', 'HAD', 'COME', 'TO', 'HIS', 'EARS', 'THAT', 'I', 'RELIED', 'UPON', 'HIS', 'FAVOUR', 'TO', 'TAKE', 'IN', 'FIRST', 'ONE', 'MAN', 'AND', 'THEN', 'ANOTHER'] +4294-14317-0007-1873: hyp=['I', 'HAD', 'BETTER', 'LOOK', 'TO', 'MY', 'CONDUCT', 'FOR', 'IT', 'HAD', 'COME', 'TO', 'HIS', 'EARS', 'THAT', 'I', 'RELIED', 'UPON', 'HIS', 'FAVOUR', 'TO', 'TAKE', 'IN', 'FIRST', 'ONE', 'MAN', 'AND', 'THEN', 'ANOTHER'] +4294-14317-0008-1874: ref=['I', 'BEGGED', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY', 'TO', 'NAME', 'A', 'SINGLE', 'PERSON', 'WHOM', 'I', 'HAD', 'EVER', 'TAKEN', 'IN'] +4294-14317-0008-1874: hyp=['I', 'BEGGED', 'HIS', 'MOST', 'LUSTRIOUS', 'EXCELLENCY', 'TO', 'NAME', 'A', 'SINGLE', 'PERSON', 'WHOM', 'I', 'HAD', 'EVER', 'TAKEN', 'IN'] +4294-14317-0009-1875: ref=['I', 'SAID', 'MY', 'LORD', 'I', 'THANK', 'YOU', 'AND', 'BEG', 'YOU', 'TO', 'CONDESCEND', 'SO', 'FAR', 'AS', 'TO', 'LISTEN', 'TO', 'FOUR', 'WORDS', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'LENT', 'ME', 'A', 'PAIR', 'OF', 'OLD', 'SCALES', 'TWO', 'ANVILS', 'AND', 'THREE', 'LITTLE', 'HAMMERS', 'WHICH', 'ARTICLES', 'I', 'BEGGED', 'HIS', 'WORKMAN', 'GIORGIO', 'DA', 'CORTONA', 'FIFTEEN', 'DAYS', 'AGO', 'TO', 'FETCH', 'BACK'] +4294-14317-0009-1875: hyp=['I', 'SAID', 'MY', 'LORD', 'I', 'THANK', 'YOU', 'AND', 'BEG', 'YOU', 'TO', 'CONDESCEND', 'SO', 'FAR', 'AS', 'TO', 'LISTEN', 'TO', 'FOUR', 'WORDS', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'LENT', 'ME', 'A', 'PAIR', 'OF', 'OLD', 'SCALES', 'TWO', 'AMBILS', 'AND', 'THREE', 'LITTLE', 'HAMMERS', 'WHICH', 'ARTICLES', 'I', 'BEGGED', 'HIS', 'WORKMEN', 'GEORGIO', 'DECORTUNA', 'FIFTEEN', 'DAYS', 'AGO', 'TO', 'FETCH', 'BACK'] +4294-14317-0010-1876: ref=['GIORGIO', 'CAME', 'FOR', 'THEM', 'HIMSELF'] +4294-14317-0010-1876: hyp=['GEORGIO', 'CAME', 'FOR', 'THEM', 'HIMSELF'] +4294-14317-0011-1877: ref=['I', 'HOPE', 'TO', 'PROVE', 'ON', 'WHAT', 'ACCOUNT', 'THAT', 'SCOUNDREL', 'TRIES', 'TO', 'BRING', 'ME', 'INTO', 'DISGRACE'] +4294-14317-0011-1877: hyp=['I', 'HOPE', 'TO', 'PROVE', 'ON', 'WHAT', 'ACCOUNT', 'THAT', 'SCOUNDREL', 'TRIES', 'TO', 'BRING', 'ME', 'INTO', 'DISGRACE'] +4294-14317-0012-1878: ref=['WHEN', 'HE', 'HAD', 'HEARD', 'THIS', 'SPEECH', 'THE', 'DUKE', 'ROSE', 'UP', 'IN', 'ANGER', 'AND', 'SENT', 'FOR', 'BERNARDONE', 'WHO', 'WAS', 'FORCED', 'TO', 'TAKE', 'FLIGHT', 'AS', 'FAR', 'AS', 'VENICE', 'HE', 'AND', 'ANTONIO', 'LANDI', 'WITH', 'HIM'] +4294-14317-0012-1878: hyp=['WHEN', 'HE', 'HAD', 'HEARD', 'THIS', 'SPEECH', 'THE', 'DUKE', 'ROSE', 'UP', 'IN', 'ANGER', 'AND', 'SENT', 'FOR', 'BERNARDONE', 'WHO', 'WAS', 'FORCED', 'TO', 'TAKE', 'FLIGHT', 'AS', 'FAR', 'AS', 'VENICE', 'HE', 'AND', 'ANTONIO', 'LANDEE', 'WITH', 'HIM'] +4294-14317-0013-1879: ref=['YOU', 'HAD', 'BETTER', 'PUT', 'THIS', 'TO', 'THE', 'PROOF', 'AND', 'I', 'WILL', 'GO', 'AT', 'ONCE', 'TO', 'THE', 'BARGELLO'] +4294-14317-0013-1879: hyp=['YOU', 'HAD', 'BETTER', 'PUT', 'THIS', 'TO', 'THE', 'PROOF', 'AND', 'I', 'WILL', 'GO', 'AT', 'ONCE', 'TO', 'THE', 'BARGENO'] +4294-14317-0014-1880: ref=['I', 'AM', 'WILLING', 'TO', 'ENTER', 'INTO', 'COMPETITION', 'WITH', 'THE', 'ANCIENTS', 'AND', 'FEEL', 'ABLE', 'TO', 'SURPASS', 'THEM', 'FOR', 'SINCE', 'THOSE', 'EARLY', 'DAYS', 'IN', 'WHICH', 'I', 'MADE', 'THE', 'MEDALS', 'OF', 'POPE', 'CLEMENT', 'I', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'THAT', 'I', 'CAN', 'NOW', 'PRODUCE', 'FAR', 'BETTER', 'PIECES', 'OF', 'THE', 'KIND', 'I', 'THINK', 'I', 'CAN', 'ALSO', 'OUTDO', 'THE', 'COINS', 'I', 'STRUCK', 'FOR', 'DUKE', 'ALESSANDRO', 'WHICH', 'ARE', 'STILL', 'HELD', 'IN', 'HIGH', 'ESTEEM', 'IN', 'LIKE', 'MANNER', 'I', 'COULD', 'MAKE', 'FOR', 'YOU', 'LARGE', 'PIECES', 'OF', 'GOLD', 'AND', 'SILVER', 'PLATE', 'AS', 'I', 'DID', 'SO', 'OFTEN', 'FOR', 'THAT', 'NOBLE', 'MONARCH', 'KING', 'FRANCIS', 'OF', 'FRANCE', 'THANKS', 'TO', 'THE', 'GREAT', 'CONVENIENCES', 'HE', 'ALLOWED', 'ME', 'WITHOUT', 'EVER', 'LOSING', 'TIME', 'FOR', 'THE', 'EXECUTION', 'OF', 'COLOSSAL', 'STATUES', 'OR', 'OTHER', 'WORKS', 'OF', 'THE', 'SCULPTORS', 'CRAFT'] +4294-14317-0014-1880: hyp=['I', 'AM', 'WILLING', 'TO', 'ENTER', 'INTO', 'COMPETITION', 'WITH', 'THE', 'ANCIENTS', 'AND', 'FEEL', 'ABLE', 'TO', 'SURPASS', 'THEM', 'FOR', 'SINCE', 'THOSE', 'EARLY', 'DAYS', 'IN', 'WHICH', 'I', 'MADE', 'THE', 'MEDALS', 'OF', 'POPE', 'CLEMENT', 'I', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'THAT', 'I', 'CAN', 'NOW', 'PRODUCE', 'FAR', 'BETTER', 'PIECES', 'OF', 'THE', 'KIND', 'I', 'THINK', 'I', 'CAN', 'ALSO', 'OUTDO', 'THE', 'COINS', 'I', 'STRUCK', 'FOR', 'DUKE', 'ALISANDRO', 'WHICH', 'IS', 'STILL', 'HELD', 'IN', 'HIGH', 'ESTEEM', 'IN', 'LIKE', 'MANNER', 'I', 'COULD', 'MAKE', 'FOR', 'YOU', 'LARGE', 'PIECES', 'OF', 'GOLD', 'AND', 'SILVER', 'PLATE', 'AS', 'I', 'DID', 'SO', 'OFTEN', 'FOR', 'THAT', 'NOBLE', 'MONARCH', 'KING', 'FRANCES', 'OF', 'FRANCE', 'THANKS', 'TO', 'THE', 'GREAT', 'CONVENIENCES', 'HE', 'ALLOWED', 'ME', 'WITHOUT', 'EVER', 'LOSING', 'TIME', 'FOR', 'THE', 'EXECUTION', 'OF', 'COLOSSAL', 'STATUES', 'OR', 'OTHER', 'WORKS', 'OF', 'THE', "SCULPTOR'S", 'CRAFT'] +4294-14317-0015-1881: ref=['AFTER', 'SEVERAL', 'MONTHS', 'WERE', 'WASTED', 'AND', 'PIERO', 'WOULD', 'NEITHER', 'WORK', 'NOR', 'PUT', 'MEN', 'TO', 'WORK', 'UPON', 'THE', 'PIECE', 'I', 'MADE', 'HIM', 'GIVE', 'IT', 'BACK'] +4294-14317-0015-1881: hyp=['AFTER', 'SEVERAL', 'MONTHS', 'WERE', 'WASTED', 'AND', 'PIERO', 'WOULD', 'NEITHER', 'WORK', 'NOR', 'PUT', 'MEN', 'TO', 'WORK', 'UPON', 'THE', 'PIECE', 'I', 'MADE', 'HIM', 'GIVE', 'IT', 'BACK'] +4294-14317-0016-1882: ref=['AMONG', 'ARTISTS', 'CERTAIN', 'ENRAGED', 'SCULPTORS', 'LAUGHED', 'AT', 'ME', 'AND', 'CALLED', 'ME', 'THE', 'NEW', 'SCULPTOR'] +4294-14317-0016-1882: hyp=['AMONG', 'ARTISTS', 'CERTAIN', 'ENRAGED', 'SCULPTORS', 'LAUGHED', 'AT', 'ME', 'AND', 'CALLED', 'ME', 'THE', 'NEW', 'SCULPTOR'] +4294-14317-0017-1883: ref=['NOW', 'I', 'HOPE', 'TO', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'AN', 'OLD', 'SCULPTOR', 'IF', 'GOD', 'SHALL', 'GRANT', 'ME', 'THE', 'BOON', 'OF', 'FINISHING', 'MY', 'PERSEUS', 'FOR', 'THAT', 'NOBLE', 'PIAZZA', 'OF', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0017-1883: hyp=['NOW', 'I', 'HOPE', 'TO', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'AN', 'OLD', 'SCULPTOR', 'IF', 'GOD', 'SHALL', 'GRANT', 'ME', 'THE', 'BOON', 'OF', 'FINISHING', 'MY', 'PERSEUS', 'FOR', 'THAT', 'NOBLE', 'PIAZZA', 'OF', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0018-1884: ref=['HAVING', 'THIS', 'EXCELLENT', 'RESOLVE', 'IN', 'HEART', 'I', 'REACHED', 'MY', 'HOME'] +4294-14317-0018-1884: hyp=['HAVING', 'THIS', 'EXCELLENT', 'RESOLVE', 'IN', 'HEART', 'I', 'REACHED', 'MY', 'HOME'] +4294-32859-0000-1942: ref=['WYLDER', 'WAS', 'RATHER', 'SURLY', 'AFTER', 'THE', 'LADIES', 'HAD', 'FLOATED', 'AWAY', 'FROM', 'THE', 'SCENE', 'AND', 'HE', 'DRANK', 'HIS', 'LIQUOR', 'DOGGEDLY'] +4294-32859-0000-1942: hyp=['WYLDER', 'WAS', 'RATHER', 'SURLY', 'AFTER', 'THE', 'LADIES', 'HAD', 'FLOATED', 'AWAY', 'FROM', 'THE', 'SCENE', 'AND', 'HE', 'DRANK', 'HIS', 'LIQUOR', 'DOGGEDLY'] +4294-32859-0001-1943: ref=['IT', 'WAS', 'HIS', 'FANCY', 'I', 'SUPPOSE', 'TO', 'REVIVE', 'CERTAIN', 'SENTIMENTAL', 'RELATIONS', 'WHICH', 'HAD', 'IT', 'MAY', 'BE', 'ONCE', 'EXISTED', 'BETWEEN', 'HIM', 'AND', 'MISS', 'LAKE', 'AND', 'HE', 'WAS', 'A', 'PERSON', 'OF', 'THAT', 'COMBATIVE', 'TEMPERAMENT', 'THAT', 'MAGNIFIES', 'AN', 'OBJECT', 'IN', 'PROPORTION', 'AS', 'ITS', 'PURSUIT', 'IS', 'THWARTED'] +4294-32859-0001-1943: hyp=['IT', 'WAS', 'HIS', 'FANCY', 'I', 'SUPPOSE', 'TO', 'REVIVE', 'CERTAIN', 'SENTIMENTAL', 'RELATIONS', 'WHICH', 'HAD', 'IT', 'MAY', 'BE', 'ONCE', 'EXISTED', 'BETWEEN', 'HIM', 'AND', 'MISS', 'LAKE', 'AND', 'HE', 'WAS', 'A', 'PERSON', 'OF', 'THAT', 'COMBATIVE', 'TEMPERAMENT', 'THAT', 'MAGNIFIES', 'AN', 'OBJECT', 'IN', 'PROPORTION', 'AS', 'ITS', 'PURSUIT', 'IS', 'THWARTED'] +4294-32859-0002-1944: ref=['THE', 'STORY', 'OF', 'FRIDOLIN', 'AND', "RETZCH'S", 'PRETTY', 'OUTLINES'] +4294-32859-0002-1944: hyp=['THE', 'STORY', 'OF', 'FRIDOLIN', 'AND', 'WRETCHES', 'PRETTY', 'OUTLINE'] +4294-32859-0003-1945: ref=['SIT', 'DOWN', 'BESIDE', 'ME', 'AND', "I'LL", 'TELL', 'YOU', 'THE', 'STORY'] +4294-32859-0003-1945: hyp=['SIT', 'DOWN', 'BESIDE', 'ME', 'AND', "I'LL", 'TELL', 'YOU', 'THE', 'STORY'] +4294-32859-0004-1946: ref=['HE', 'ASSISTED', 'AT', 'IT', 'BUT', 'TOOK', 'NO', 'PART', 'AND', 'IN', 'FACT', 'WAS', 'LISTENING', 'TO', 'THAT', 'OTHER', 'CONVERSATION', 'WHICH', 'SOUNDED', 'WITH', 'ITS', 'PLEASANT', 'GABBLE', 'AND', 'LAUGHTER', 'LIKE', 'A', 'LITTLE', 'MUSICAL', 'TINKLE', 'OF', 'BELLS', 'IN', 'THE', 'DISTANCE'] +4294-32859-0004-1946: hyp=['HE', 'ASSISTED', 'AT', 'IT', 'BUT', 'TOOK', 'NO', 'PART', 'AND', 'IN', 'FACT', 'WAS', 'LISTENING', 'TO', 'THAT', 'OTHER', 'CONVERSATION', 'WHICH', 'SOUNDED', 'WITH', 'ITS', 'PLEASANT', 'GABBLE', 'AND', 'LAUGHTER', 'LIKE', 'A', 'LITTLE', 'MUSICAL', 'TINKLE', 'OF', 'BELLS', 'IN', 'THE', 'DISTANCE'] +4294-32859-0005-1947: ref=['BUT', 'HONEST', 'MARK', 'FORGOT', 'THAT', 'YOUNG', 'LADIES', 'DO', 'NOT', 'ALWAYS', 'COME', 'OUT', 'QUITE', 'ALONE', 'AND', 'JUMP', 'UNASSISTED', 'INTO', 'THEIR', 'VEHICLES'] +4294-32859-0005-1947: hyp=['BUT', 'HONEST', 'MARK', 'FORGOT', 'THAT', 'YOUNG', 'LADIES', 'DO', 'NOT', 'ALWAYS', 'COME', 'OUT', 'QUITE', 'ALONE', 'AND', 'JUMP', 'UNASSISTED', 'INTO', 'THEIR', 'VEHICLES'] +4294-35475-0000-1885: ref=['BUT', 'THE', 'MIDDLE', 'SON', 'WAS', 'LITTLE', 'AND', 'LORN', 'HE', 'WAS', 'NEITHER', 'DARK', 'NOR', 'FAIR', 'HE', 'WAS', 'NEITHER', 'HANDSOME', 'NOR', 'STRONG'] +4294-35475-0000-1885: hyp=['BUT', 'THE', 'MIDDLE', 'SUN', 'WAS', 'LITTLE', 'AND', 'LORN', 'HE', 'WAS', 'NEITHER', 'DARK', 'NOR', 'FAIR', 'HE', 'WAS', 'NEITHER', 'HANDSOME', 'NOR', 'STRONG'] +4294-35475-0001-1886: ref=['THROWING', 'HIMSELF', 'ON', 'HIS', 'KNEES', 'BEFORE', 'THE', 'KING', 'HE', 'CRIED', 'OH', 'ROYAL', 'SIRE', 'BESTOW', 'UPON', 'ME', 'ALSO', 'A', 'SWORD', 'AND', 'A', 'STEED', 'THAT', 'I', 'MAY', 'UP', 'AND', 'AWAY', 'TO', 'FOLLOW', 'MY', 'BRETHREN'] +4294-35475-0001-1886: hyp=['THROWING', 'HIMSELF', 'ON', 'HIS', 'KNEES', 'BEFORE', 'THE', 'KING', 'HE', 'CRIED', 'O', 'ROYAL', 'SIRE', 'BESTOW', 'UPON', 'ME', 'ALSO', 'A', 'SWORD', 'AND', 'A', 'STEED', 'THAT', 'I', 'MAY', 'UP', 'AND', 'AWAY', 'TO', 'FOLLOW', 'MY', 'BRETHREN'] +4294-35475-0002-1887: ref=['BUT', 'THE', 'KING', 'LAUGHED', 'HIM', 'TO', 'SCORN', 'THOU', 'A', 'SWORD', 'HE', 'QUOTH'] +4294-35475-0002-1887: hyp=['BUT', 'THE', 'KING', 'LAUGHED', 'HIM', 'TO', 'SCORN', 'THOU', 'A', 'SWORD', 'HE', 'QUOTH'] +4294-35475-0003-1888: ref=['IN', 'SOOTH', 'THOU', 'SHALT', 'HAVE', 'ONE', 'BUT', 'IT', 'SHALL', 'BE', 'ONE', 'BEFITTING', 'THY', 'MAIDEN', 'SIZE', 'AND', 'COURAGE', 'IF', 'SO', 'SMALL', 'A', 'WEAPON', 'CAN', 'BE', 'FOUND', 'IN', 'ALL', 'MY', 'KINGDOM'] +4294-35475-0003-1888: hyp=['IN', 'SOOTH', 'THOU', 'SHALT', 'HAVE', 'ONE', 'BUT', 'IT', 'SHALL', 'BE', 'ONE', 'BEFITTING', 'THY', 'MAIDEN', 'SIZE', 'AND', 'COURAGE', 'IF', 'SO', 'SMALL', 'A', 'WEAPON', 'CAN', 'BE', 'FOUND', 'IN', 'ALL', 'MY', 'KINGDOM'] +4294-35475-0004-1889: ref=['FORTHWITH', 'THE', 'GRINNING', 'JESTER', 'BEGAN', 'SHRIEKING', 'WITH', 'LAUGHTER', 'SO', 'THAT', 'THE', 'BELLS', 'UPON', 'HIS', 'MOTLEY', 'CAP', 'WERE', 'ALL', 'SET', 'A', 'JANGLING'] +4294-35475-0004-1889: hyp=['FORTHWITH', 'THE', 'GRINNING', 'GESTURE', 'BEGAN', 'SHRIEKING', 'WITH', 'LAUGHTER', 'SO', 'THAT', 'THE', 'BELLS', 'UPON', 'HIS', 'MOTLEY', 'CAP', 'WERE', 'ALL', 'SET', 'A', 'JANGLING'] +4294-35475-0005-1890: ref=['I', 'DID', 'BUT', 'LAUGH', 'TO', 'THINK', 'THE', 'SWORD', 'OF', 'ETHELRIED', 'HAD', 'BEEN', 'SO', 'QUICKLY', 'FOUND', 'RESPONDED', 'THE', 'JESTER', 'AND', 'HE', 'POINTED', 'TO', 'THE', 'SCISSORS', 'HANGING', 'FROM', 'THE', "TAILOR'S", 'GIRDLE'] +4294-35475-0005-1890: hyp=['I', 'DID', 'BUT', 'LAUGH', 'TO', 'THINK', 'THE', 'SORT', 'OF', 'EPLORRIED', 'HAD', 'BEEN', 'SO', 'QUICKLY', 'FOUND', 'RESPONDED', 'THE', 'JESTER', 'AND', 'HE', 'POINTED', 'TO', 'THE', 'SCISSORS', 'HANGING', 'FROM', 'THE', "TAILOR'S", 'GIRDLE'] +4294-35475-0006-1891: ref=['ONE', 'NIGHT', 'AS', 'HE', 'LAY', 'IN', 'A', 'DEEP', 'FOREST', 'TOO', 'UNHAPPY', 'TO', 'SLEEP', 'HE', 'HEARD', 'A', 'NOISE', 'NEAR', 'AT', 'HAND', 'IN', 'THE', 'BUSHES'] +4294-35475-0006-1891: hyp=['ONE', 'NIGHT', 'AS', 'HE', 'LAY', 'IN', 'A', 'DEEP', 'FOREST', 'TWO', 'UNHAPPY', 'TO', 'SLEEP', 'HE', 'HEARD', 'A', 'NOISE', 'NEAR', 'AT', 'HAND', 'IN', 'THE', 'BUSHES'] +4294-35475-0007-1892: ref=['THOU', 'SHALT', 'HAVE', 'THY', 'LIBERTY', 'HE', 'CRIED', 'EVEN', 'THOUGH', 'THOU', 'SHOULDST', 'REND', 'ME', 'IN', 'PIECES', 'THE', 'MOMENT', 'THOU', 'ART', 'FREE'] +4294-35475-0007-1892: hyp=['THOU', 'SHALT', 'HAVE', 'THY', 'LIBERTY', 'HE', 'CRIED', 'EVEN', 'THOUGH', 'THOU', 'SHOULDST', 'RUN', 'ME', 'IN', 'PIECES', 'THE', 'MOMENT', 'THOU', 'ART', 'FREE'] +4294-35475-0008-1893: ref=['IT', 'HAD', 'SUDDENLY', 'DISAPPEARED', 'AND', 'IN', 'ITS', 'PLACE', 'STOOD', 'A', 'BEAUTIFUL', 'FAIRY', 'WITH', 'FILMY', 'WINGS', 'WHICH', 'SHONE', 'LIKE', 'RAINBOWS', 'IN', 'THE', 'MOONLIGHT'] +4294-35475-0008-1893: hyp=['HE', 'HAD', 'HID', 'IT', 'SUDDENLY', 'DISAPPEARED', 'AND', 'IN', 'ITS', 'PLACE', 'STOOD', 'A', 'BEAUTIFUL', 'FAIRY', 'WITH', 'FILMY', 'WINGS', 'WHICH', 'SHONE', 'LIKE', 'RAINBOWS', 'IN', 'THE', 'MOONLIGHT'] +4294-35475-0009-1894: ref=['AT', 'THIS', 'MOMENT', 'THERE', 'WAS', 'A', 'DISTANT', 'RUMBLING', 'AS', 'OF', 'THUNDER', 'TIS', 'THE', 'OGRE', 'CRIED', 'THE', 'FAIRY', 'WE', 'MUST', 'HASTEN'] +4294-35475-0009-1894: hyp=['AT', 'THIS', 'MOMENT', 'THERE', 'WAS', 'A', 'DISTANT', 'RUMBLING', 'AS', 'OF', 'THUNDER', 'TIS', 'THE', 'OGRE', 'CRIED', 'THE', 'FAIRY', 'WE', 'MUST', 'HASTEN'] +4294-35475-0010-1895: ref=['SCISSORS', 'GROW', 'A', "GIANT'S", 'HEIGHT', 'AND', 'SAVE', 'US', 'FROM', 'THE', "OGRE'S", 'MIGHT'] +4294-35475-0010-1895: hyp=['SCISSORS', 'GROW', 'A', "GIANT'S", 'HEIGHT', 'AND', 'SAVE', 'US', 'FROM', 'THE', "OGRE'S", 'MIGHT'] +4294-35475-0011-1896: ref=['HE', 'COULD', 'SEE', 'THE', 'OGRE', 'STANDING', 'POWERLESS', 'TO', 'HURT', 'HIM', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'CHASM', 'AND', 'GNASHING', 'HIS', 'TEETH', 'EACH', 'ONE', 'OF', 'WHICH', 'WAS', 'AS', 'BIG', 'AS', 'A', 'MILLSTON'] +4294-35475-0011-1896: hyp=['HE', 'COULD', 'SEE', 'THE', 'OGRE', 'STANDING', 'POWERLESS', 'TO', 'HURT', 'HIM', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'CHASM', 'AND', 'GNASHING', 'HIS', 'TEETH', 'EACH', 'ONE', 'OF', 'WHICH', 'WAS', 'AS', 'BIG', 'AS', 'A', 'MILLSTONE'] +4294-35475-0012-1897: ref=['THE', 'SIGHT', 'WAS', 'SO', 'TERRIBLE', 'THAT', 'HE', 'TURNED', 'ON', 'HIS', 'HEEL', 'AND', 'FLED', 'AWAY', 'AS', 'FAST', 'AS', 'HIS', 'FEET', 'COULD', 'CARRY', 'HIM'] +4294-35475-0012-1897: hyp=['THE', 'SIGHT', 'WAS', 'SO', 'TERRIBLE', 'THAT', 'HE', 'TURNED', 'ON', 'HIS', 'HEEL', 'AND', 'FLED', 'AWAY', 'AS', 'FAST', 'AS', 'HIS', 'FEET', 'COULD', 'CARRY', 'HIM'] +4294-35475-0013-1898: ref=['THOU', 'SHALT', 'NOT', 'BE', 'LEFT', 'A', 'PRISONER', 'IN', 'THIS', 'DISMAL', 'SPOT', 'WHILE', 'I', 'HAVE', 'THE', 'POWER', 'TO', 'HELP', 'THEE'] +4294-35475-0013-1898: hyp=['THOU', 'SHALT', 'NOT', 'BE', 'LEFT', 'A', 'PRISONER', 'IN', 'THIS', 'DISMAL', 'SPOT', 'WHILE', 'I', 'HAVE', 'THE', 'POWER', 'TO', 'HELP', 'THEE'] +4294-35475-0014-1899: ref=['HE', 'LIFTED', 'THE', 'SCISSORS', 'AND', 'WITH', 'ONE', 'STROKE', 'DESTROYED', 'THE', 'WEB', 'AND', 'GAVE', 'THE', 'FLY', 'ITS', 'FREEDOM'] +4294-35475-0014-1899: hyp=['HE', 'LIFTED', 'THE', 'SCISSORS', 'AND', 'WITH', 'ONE', 'STROKE', 'DESTROYED', 'THE', 'WEB', 'AND', 'GAVE', 'THE', 'FLY', 'TO', 'READ', 'THEM'] +4294-35475-0015-1900: ref=['A', 'FAINT', 'GLIMMER', 'OF', 'LIGHT', 'ON', 'THE', 'OPPOSITE', 'WALL', 'SHOWS', 'ME', 'THE', 'KEYHOLE'] +4294-35475-0015-1900: hyp=['A', 'FAINT', 'GLIMMER', 'OF', 'LIGHT', 'ON', 'THE', 'OPPOSITE', 'WALL', 'SHOWS', 'ME', 'THE', 'KEYHOLE'] +4294-35475-0016-1901: ref=['THE', 'PRINCE', 'SPENT', 'ALL', 'THE', 'FOLLOWING', 'TIME', 'UNTIL', 'MIDNIGHT', 'TRYING', 'TO', 'THINK', 'OF', 'A', 'SUITABLE', 'VERSE', 'TO', 'SAY', 'TO', 'THE', 'SCISSORS'] +4294-35475-0016-1901: hyp=['THE', 'PRINCE', 'SPENT', 'ALL', 'THE', 'FOLLOWING', 'TIME', 'UNTIL', 'MIDNIGHT', 'TRYING', 'TO', 'THINK', 'OF', 'A', 'SUITABLE', 'VERSE', 'TO', 'SAY', 'TO', 'THE', 'SCISSORS'] +4294-35475-0017-1902: ref=['AS', 'HE', 'UTTERED', 'THE', 'WORDS', 'THE', 'SCISSORS', 'LEAPED', 'OUT', 'OF', 'HIS', 'HAND', 'AND', 'BEGAN', 'TO', 'CUT', 'THROUGH', 'THE', 'WOODEN', 'SHUTTERS', 'AS', 'EASILY', 'AS', 'THROUGH', 'A', 'CHEESE'] +4294-35475-0017-1902: hyp=['AS', 'HE', 'UTTERED', 'THE', 'WORDS', 'THE', 'SCISSORS', 'LEAPED', 'OUT', 'OF', 'HIS', 'HAND', 'AND', 'BEGAN', 'TO', 'CUT', 'THROUGH', 'THE', 'WOODEN', 'SHUTTERS', 'AS', 'EASILY', 'AS', 'THROUGH', 'A', 'CHEESE'] +4294-35475-0018-1903: ref=['IN', 'A', 'VERY', 'SHORT', 'TIME', 'THE', 'PRINCE', 'HAD', 'CRAWLED', 'THROUGH', 'THE', 'OPENING'] +4294-35475-0018-1903: hyp=['IN', 'THE', 'VERY', 'SHORT', 'TIME', 'THE', 'PRINCE', 'AND', 'CRAWLED', 'THROUGH', 'THE', 'OPENING'] +4294-35475-0019-1904: ref=['WHILE', 'HE', 'STOOD', 'LOOKING', 'AROUND', 'HIM', 'IN', 'BEWILDERMENT', 'A', 'FIREFLY', 'ALIGHTED', 'ON', 'HIS', 'ARM', 'FLASHING', 'ITS', 'LITTLE', 'LANTERN', 'IN', 'THE', "PRINCE'S", 'FACE', 'IT', 'CRIED', 'THIS', 'WAY', 'MY', 'FRIEND', 'THE', 'FLY', 'SENT', 'ME', 'TO', 'GUIDE', 'YOU', 'TO', 'A', 'PLACE', 'OF', 'SAFETY'] +4294-35475-0019-1904: hyp=['WHILE', 'HE', 'STOOD', 'LOOKING', 'ROUND', 'HIM', 'IN', 'BEWILDERMENT', 'A', 'FIREFLY', 'LIGHTED', 'DOWN', 'HIS', 'HEART', 'FLASHING', 'ITS', 'LITTLE', 'LANTERN', 'IN', 'THE', "PRINCE'S", 'FACE', 'IT', 'CRIED', 'THIS', 'WAY', 'MY', 'FRIEND', 'THE', 'FLY', 'SENT', 'ME', 'TO', 'GUIDE', 'YOU', 'TO', 'A', 'PLACE', 'OF', 'SAFETY'] +4294-35475-0020-1905: ref=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'ME', 'CRIED', 'THE', 'POOR', 'PEASANT'] +4294-35475-0020-1905: hyp=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'ME', 'CRIED', 'THE', 'POOR', 'PEASANT'] +4294-35475-0021-1906: ref=['MY', 'GRAIN', 'MUST', 'FALL', 'AND', 'ROT', 'IN', 'THE', 'FIELD', 'FROM', 'OVERRIPENESS', 'BECAUSE', 'I', 'HAVE', 'NOT', 'THE', 'STRENGTH', 'TO', 'RISE', 'AND', 'HARVEST', 'IT', 'THEN', 'INDEED', 'MUST', 'WE', 'ALL', 'STARVE'] +4294-35475-0021-1906: hyp=['MY', 'GRAIN', 'MUST', 'FALL', 'IN', 'ROT', 'IN', 'THE', 'FIELD', 'FROM', 'OVER', 'RIPENESS', 'BECAUSE', 'I', 'HAVE', 'NOT', 'THE', 'STRENGTH', 'TO', 'RISE', 'AND', 'HARVEST', 'IT', 'THEN', 'INDEED', 'MUST', 'WE', 'ALL', 'STARVE'] +4294-35475-0022-1907: ref=['THE', 'GRANDAME', 'WHOM', 'HE', 'SUPPLIED', 'WITH', 'FAGOTS', 'THE', 'MERCHANT', 'WHOM', 'HE', 'RESCUED', 'FROM', 'ROBBERS', 'THE', "KING'S", 'COUNCILLOR', 'TO', 'WHOM', 'HE', 'GAVE', 'AID', 'ALL', 'BECAME', 'HIS', 'FRIENDS', 'UP', 'AND', 'DOWN', 'THE', 'LAND', 'TO', 'BEGGAR', 'OR', 'LORD', 'HOMELESS', 'WANDERER', 'OR', 'HIGH', 'BORN', 'DAME', 'HE', 'GLADLY', 'GAVE', 'UNSELFISH', 'SERVICE', 'ALL', 'UNSOUGHT', 'AND', 'SUCH', 'AS', 'HE', 'HELPED', 'STRAIGHTWAY', 'BECAME', 'HIS', 'FRIENDS'] +4294-35475-0022-1907: hyp=['THE', 'GRAND', 'DAME', 'WHOM', 'HE', 'SUPPLIED', 'WITH', 'FAGOTS', 'THE', 'MERCHANT', 'WHOM', 'HE', 'RESCUED', 'FROM', 'ROBBERS', 'THE', "KING'S", 'COUNSELLOR', 'TO', 'WHOM', 'HE', 'GAVE', 'AID', 'ALL', 'BECAME', 'HIS', 'FRIENDS', 'UP', 'AND', 'DOWN', 'THE', 'LAND', 'TO', 'BEGGAR', 'OR', 'LORD', 'HOMELESS', 'WANDERER', 'OR', 'HIGH', 'BORN', 'DAME', 'HE', 'GLADLY', 'GAVE', 'UNSELFISH', 'SERVICE', 'ALL', 'UNSOUGHT', 'AND', 'SUCH', 'AS', 'HE', 'HELPED', 'STRAIGHTWAY', 'BECAME', 'HIS', 'FRIENDS'] +4294-35475-0023-1908: ref=['TO', 'HIM', 'WHO', 'COULD', 'BRING', 'HER', 'BACK', 'TO', 'HER', "FATHER'S", 'CASTLE', 'SHOULD', 'BE', 'GIVEN', 'THE', 'THRONE', 'AND', 'KINGDOM', 'AS', 'WELL', 'AS', 'THE', 'PRINCESS', 'HERSELF', 'SO', 'FROM', 'FAR', 'AND', 'NEAR', 'INDEED', 'FROM', 'ALMOST', 'EVERY', 'COUNTRY', 'UNDER', 'THE', 'SUN', 'CAME', 'KNIGHTS', 'AND', 'PRINCES', 'TO', 'FIGHT', 'THE', 'OGRE'] +4294-35475-0023-1908: hyp=['TO', 'HIM', 'WHO', 'COULD', 'BRING', 'HER', 'BACK', 'TO', 'HER', "FATHER'S", 'CASTLE', 'SHOULD', 'BE', 'GIVEN', 'THE', 'THRONE', 'AND', 'KINGDOM', 'AS', 'WELL', 'AS', 'THE', 'PRINCESS', 'HERSELF', 'SO', 'FROM', 'FAR', 'AND', 'NEAR', 'INDEED', 'FROM', 'ALMOST', 'EVERY', 'COUNTRY', 'UNDER', 'THE', 'SUN', 'CAME', 'KNIGHTS', 'AND', 'PRINCES', 'TO', 'FIGHT', 'THE', 'OGRE'] +4294-35475-0024-1909: ref=['AMONG', 'THOSE', 'WHO', 'DREW', 'BACK', 'WERE', "ETHELRIED'S", 'BROTHERS', 'THE', 'THREE', 'THAT', 'WERE', 'DARK', 'AND', 'THE', 'THREE', 'THAT', 'WERE', 'FAIR'] +4294-35475-0024-1909: hyp=['AMONG', 'THOSE', 'WHO', 'DREW', 'BACK', 'WERE', "EPILRIED'S", 'BROTHERS', 'THE', 'THREE', 'THAT', 'WERE', 'DARK', 'AND', 'THE', 'THREE', 'THAT', 'WERE', 'FAIR'] +4294-35475-0025-1910: ref=['BUT', 'ETHELRIED', 'HEEDED', 'NOT', 'THEIR', 'TAUNTS'] +4294-35475-0025-1910: hyp=['BUT', 'ETHELRED', 'HE', 'DID', 'NOT', 'THEIR', 'TAUNTS'] +4294-35475-0026-1911: ref=['SO', 'THEY', 'ALL', 'CRIED', 'OUT', 'LONG', 'AND', 'LOUD', 'LONG', 'LIVE', 'THE', 'PRINCE', 'PRINCE', 'CISEAUX'] +4294-35475-0026-1911: hyp=['SO', 'THEY', 'ALL', 'CRIED', 'OUT', 'LONG', 'AND', 'LOUD', 'LONG', 'LIVE', 'THE', 'PRINCE', 'PRINCE', 'ISAU'] +4294-9934-0000-1912: ref=['HE', 'FELT', 'WHAT', 'THE', 'EARTH', 'MAY', 'POSSIBLY', 'FEEL', 'AT', 'THE', 'MOMENT', 'WHEN', 'IT', 'IS', 'TORN', 'OPEN', 'WITH', 'THE', 'IRON', 'IN', 'ORDER', 'THAT', 'GRAIN', 'MAY', 'BE', 'DEPOSITED', 'WITHIN', 'IT', 'IT', 'FEELS', 'ONLY', 'THE', 'WOUND', 'THE', 'QUIVER', 'OF', 'THE', 'GERM', 'AND', 'THE', 'JOY', 'OF', 'THE', 'FRUIT', 'ONLY', 'ARRIVE', 'LATER'] +4294-9934-0000-1912: hyp=['HE', 'FELT', 'WITH', 'THE', 'EARTH', 'MAY', 'POSSIBLY', 'FEEL', 'AT', 'THE', 'MOMENT', 'WHEN', 'IT', 'IS', 'TORN', 'OPEN', 'WITH', 'THE', 'IRON', 'IN', 'ORDER', 'THAT', 'GRAIN', 'MAY', 'BE', 'DEPOSITED', 'WITHIN', 'IT', 'IT', 'FEELS', 'ONLY', 'THE', 'WOUND', 'THE', 'QUIVER', 'OF', 'THE', 'GERM', 'THE', 'JOY', 'OF', 'THE', 'FRUIT', 'ONLY', 'ARRIVES', 'LATER'] +4294-9934-0001-1913: ref=['HE', 'HAD', 'BUT', 'JUST', 'ACQUIRED', 'A', 'FAITH', 'MUST', 'HE', 'THEN', 'REJECT', 'IT', 'ALREADY'] +4294-9934-0001-1913: hyp=['HE', 'HAD', 'BUT', 'JUST', 'ACQUIRED', 'A', 'FAITH', 'MUST', 'HE', 'THEN', 'REJECTED', 'ALREADY'] +4294-9934-0002-1914: ref=['HE', 'AFFIRMED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'HE', 'DECLARED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'DOUBT', 'AND', 'HE', 'BEGAN', 'TO', 'DOUBT', 'IN', 'SPITE', 'OF', 'HIMSELF'] +4294-9934-0002-1914: hyp=['HE', 'AFFIRMED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'HE', 'DECLARED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'DOUBT', 'AND', 'HE', 'BEGAN', 'TO', 'DOUBT', 'IN', 'SPITE', 'OF', 'HIMSELF'] +4294-9934-0003-1915: ref=['TO', 'STAND', 'BETWEEN', 'TWO', 'RELIGIONS', 'FROM', 'ONE', 'OF', 'WHICH', 'YOU', 'HAVE', 'NOT', 'AS', 'YET', 'EMERGED', 'AND', 'ANOTHER', 'INTO', 'WHICH', 'YOU', 'HAVE', 'NOT', 'YET', 'ENTERED', 'IS', 'INTOLERABLE', 'AND', 'TWILIGHT', 'IS', 'PLEASING', 'ONLY', 'TO', 'BAT', 'LIKE', 'SOULS'] +4294-9934-0003-1915: hyp=['TO', 'STAND', 'BETWEEN', 'TWO', 'RELIGIONS', 'FROM', 'ONE', 'OF', 'WHICH', 'YOU', 'HAVE', 'NOT', 'AS', 'YET', 'EMERGED', 'AND', 'ANOTHER', 'INTO', 'WHICH', 'YOU', 'HAVE', 'NOT', 'YET', 'ENTERED', 'IS', 'INTOLERABLE', 'AND', 'TWILIGHT', 'IS', 'PLEASING', 'ONLY', 'TO', 'BAT', 'LIKE', 'SOULS'] +4294-9934-0004-1916: ref=['MARIUS', 'WAS', 'CLEAR', 'EYED', 'AND', 'HE', 'REQUIRED', 'THE', 'TRUE', 'LIGHT'] +4294-9934-0004-1916: hyp=['MARIUS', 'WAS', 'CLEAR', 'EYED', 'AND', 'HE', 'REQUIRED', 'THE', 'TRUE', 'LIGHT'] +4294-9934-0005-1917: ref=['THE', 'HALF', 'LIGHTS', 'OF', 'DOUBT', 'PAINED', 'HIM'] +4294-9934-0005-1917: hyp=['THE', 'HALF', 'LIGHTS', 'OF', 'DOUBT', 'PAINED', 'HIM'] +4294-9934-0006-1918: ref=['WHATEVER', 'MAY', 'HAVE', 'BEEN', 'HIS', 'DESIRE', 'TO', 'REMAIN', 'WHERE', 'HE', 'WAS', 'HE', 'COULD', 'NOT', 'HALT', 'THERE', 'HE', 'WAS', 'IRRESISTIBLY', 'CONSTRAINED', 'TO', 'CONTINUE', 'TO', 'ADVANCE', 'TO', 'EXAMINE', 'TO', 'THINK', 'TO', 'MARCH', 'FURTHER'] +4294-9934-0006-1918: hyp=['WHATEVER', 'MAY', 'HAVE', 'BEEN', 'HIS', 'DESIRE', 'TO', 'REMAIN', 'WHERE', 'HE', 'WAS', 'HE', 'COULD', 'NOT', 'HALT', 'THERE', 'HE', 'WAS', 'IRRESISTIBLY', 'CONSTRAINED', 'TO', 'CONTINUE', 'TO', 'ADVANCE', 'TO', 'EXAMINE', 'TO', 'THINK', 'TO', 'MARCH', 'FURTHER'] +4294-9934-0007-1919: ref=['HE', 'FEARED', 'AFTER', 'HAVING', 'TAKEN', 'SO', 'MANY', 'STEPS', 'WHICH', 'HAD', 'BROUGHT', 'HIM', 'NEARER', 'TO', 'HIS', 'FATHER', 'TO', 'NOW', 'TAKE', 'A', 'STEP', 'WHICH', 'SHOULD', 'ESTRANGE', 'HIM', 'FROM', 'THAT', 'FATHER'] +4294-9934-0007-1919: hyp=['HE', 'FEARED', 'AFTER', 'HAVING', 'TAKEN', 'SO', 'MANY', 'STEPS', 'WHICH', 'HAD', 'BROUGHT', 'HIM', 'NEARER', 'TO', 'HIS', 'FATHER', 'TO', 'NOW', 'TAKE', 'A', 'STEP', 'WHICH', 'SHOULD', 'ESTRANGE', 'HIM', 'FROM', 'THAT', 'FATHER'] +4294-9934-0008-1920: ref=['HIS', 'DISCOMFORT', 'WAS', 'AUGMENTED', 'BY', 'ALL', 'THE', 'REFLECTIONS', 'WHICH', 'OCCURRED', 'TO', 'HIM'] +4294-9934-0008-1920: hyp=['HIS', 'DISCOMFORT', 'WAS', 'AUGMENTED', 'BY', 'ALL', 'THE', 'REFLECTIONS', 'WHICH', 'OCCURRED', 'TO', 'HIM'] +4294-9934-0009-1921: ref=['IN', 'THE', 'TROUBLED', 'STATE', 'OF', 'HIS', 'CONSCIENCE', 'HE', 'NO', 'LONGER', 'THOUGHT', 'OF', 'CERTAIN', 'SERIOUS', 'SIDES', 'OF', 'EXISTENCE'] +4294-9934-0009-1921: hyp=['IN', 'THE', 'TROUBLED', 'STATE', 'OF', 'HIS', 'CONSCIENCE', 'HE', 'NO', 'LONGER', 'THOUGHT', 'OF', 'CERTAIN', 'SERIOUS', 'SIDES', 'OF', 'EXISTENCE'] +4294-9934-0010-1922: ref=['THEY', 'SOON', 'ELBOWED', 'HIM', 'ABRUPTLY'] +4294-9934-0010-1922: hyp=['THEY', 'SOON', 'ELBOWED', 'HIM', 'ABRUPTLY'] +4294-9934-0011-1923: ref=['REQUEST', 'COURFEYRAC', 'TO', 'COME', 'AND', 'TALK', 'WITH', 'ME', 'SAID', 'MARIUS'] +4294-9934-0011-1923: hyp=['REQUEST', 'COURFEREK', 'TO', 'COME', 'AND', 'TALK', 'WITH', 'ME', 'SAID', 'MARIUS'] +4294-9934-0012-1924: ref=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'YOU', 'SAID', 'COURFEYRAC'] +4294-9934-0012-1924: hyp=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'YOU', 'SAID', 'COURFEYRAC'] +4294-9934-0013-1925: ref=['WHAT', 'ARE', 'YOU', 'GOING', 'TO', 'DO', 'I', 'DO', 'NOT', 'KNOW'] +4294-9934-0013-1925: hyp=['WHAT', 'ARE', 'YOU', 'GOING', 'TO', 'DO', 'I', 'DO', 'NOT', 'KNOW'] +4294-9934-0014-1926: ref=['SILVER', 'GOLD', 'HERE', 'IT', 'IS'] +4294-9934-0014-1926: hyp=['SILVER', 'GOLD', 'HERE', 'IT', 'IS'] +4294-9934-0015-1927: ref=['YOU', 'WILL', 'THEN', 'HAVE', 'ONLY', 'A', 'PAIR', 'OF', 'TROUSERS', 'A', 'WAISTCOAT', 'A', 'HAT', 'AND', 'A', 'COAT', 'AND', 'MY', 'BOOTS'] +4294-9934-0015-1927: hyp=['YOU', 'WILL', 'THEN', 'HAVE', 'ONLY', 'A', 'PAIR', 'OF', 'TROUSERS', 'A', 'WAISTCOAT', 'A', 'HAT', 'AND', 'A', 'COAT', 'AND', 'MY', 'BOOTS'] +4294-9934-0016-1928: ref=['THAT', 'WILL', 'BE', 'ENOUGH'] +4294-9934-0016-1928: hyp=['THAT', 'WILL', 'BE', 'ENOUGH'] +4294-9934-0017-1929: ref=['NO', 'IT', 'IS', 'NOT', 'GOOD', 'WHAT', 'WILL', 'YOU', 'DO', 'AFTER', 'THAT'] +4294-9934-0017-1929: hyp=['NO', 'IT', 'IS', 'NOT', 'GOOD', 'WHAT', 'WE', 'DO', 'AFTER', 'THAT'] +4294-9934-0018-1930: ref=['DO', 'YOU', 'KNOW', 'GERMAN', 'NO'] +4294-9934-0018-1930: hyp=['DO', 'YOU', 'KNOW', 'GERMAN', 'NO'] +4294-9934-0019-1931: ref=['IT', 'IS', 'BADLY', 'PAID', 'WORK', 'BUT', 'ONE', 'CAN', 'LIVE', 'BY', 'IT'] +4294-9934-0019-1931: hyp=['IT', 'IS', 'BADLY', 'PAID', 'WORK', 'BUT', 'ONE', 'CAN', 'LIVE', 'BY', 'IT'] +4294-9934-0020-1932: ref=['THE', 'CLOTHES', 'DEALER', 'WAS', 'SENT', 'FOR'] +4294-9934-0020-1932: hyp=['THE', 'CLOTHES', 'DEALER', 'WAS', 'SENT', 'FOR'] +4294-9934-0021-1933: ref=['HE', 'PAID', 'TWENTY', 'FRANCS', 'FOR', 'THE', 'CAST', 'OFF', 'GARMENTS', 'THEY', 'WENT', 'TO', 'THE', "WATCHMAKER'S"] +4294-9934-0021-1933: hyp=['HE', 'PAID', 'TWENTY', 'FRANCS', 'FOR', 'THE', 'CAST', 'OFF', 'GARMENTS', 'THEY', 'WENT', 'TO', 'THE', 'WATCHMAKERS'] +4294-9934-0022-1934: ref=['HE', 'BOUGHT', 'THE', 'WATCH', 'FOR', 'FORTY', 'FIVE', 'FRANCS'] +4294-9934-0022-1934: hyp=['HE', 'BOUGHT', 'THE', 'WATCH', 'FOR', 'FORTY', 'FIVE', 'FRANCS'] +4294-9934-0023-1935: ref=['HELLO', 'I', 'HAD', 'FORGOTTEN', 'THAT', 'SAID', 'MARIUS'] +4294-9934-0023-1935: hyp=['HELLO', 'I', 'HAD', 'FORGOTTEN', 'THAT', 'SAID', 'MARIUS'] +4294-9934-0024-1936: ref=['THE', 'LANDLORD', 'PRESENTED', 'HIS', 'BILL', 'WHICH', 'HAD', 'TO', 'BE', 'PAID', 'ON', 'THE', 'SPOT'] +4294-9934-0024-1936: hyp=['THE', 'LANDLORD', 'PRESENTED', 'HIS', 'BILL', 'WHICH', 'HAD', 'TO', 'BE', 'PAID', 'ON', 'THE', 'SPOT'] +4294-9934-0025-1937: ref=['I', 'HAVE', 'TEN', 'FRANCS', 'LEFT', 'SAID', 'MARIUS'] +4294-9934-0025-1937: hyp=['I', 'HAVE', 'TEN', 'FRANCS', 'LEFT', 'SAID', 'MARIUS'] +4294-9934-0026-1938: ref=['THAT', 'WILL', 'BE', 'SWALLOWING', 'A', 'TONGUE', 'VERY', 'FAST', 'OR', 'A', 'HUNDRED', 'SOUS', 'VERY', 'SLOWLY'] +4294-9934-0026-1938: hyp=['THAT', 'WILL', 'BE', 'SWALLOWING', 'A', 'TONGUE', 'VERY', 'FAST', 'OR', 'A', 'HUNDRED', 'SOUS', 'VERY', 'SLOWLY'] +4294-9934-0027-1939: ref=['ONE', 'MORNING', 'ON', 'HIS', 'RETURN', 'FROM', 'THE', 'LAW', 'SCHOOL', 'MARIUS', 'FOUND', 'A', 'LETTER', 'FROM', 'HIS', 'AUNT', 'AND', 'THE', 'SIXTY', 'PISTOLES', 'THAT', 'IS', 'TO', 'SAY', 'SIX', 'HUNDRED', 'FRANCS', 'IN', 'GOLD', 'IN', 'A', 'SEALED', 'BOX'] +4294-9934-0027-1939: hyp=['ONE', 'MORNING', 'ON', 'HIS', 'RETURN', 'FROM', 'THE', 'LAST', 'SCHOOL', 'MARIUS', 'FOUND', 'A', 'LETTER', 'FROM', 'HIS', 'AUNT', 'AND', 'THE', 'SIXTY', 'PISTOL', 'THAT', 'IS', 'TO', 'SAY', 'SIX', 'HUNDRED', 'FRANCS', 'IN', 'GOLD', 'AND', 'A', 'SEALED', 'BOX'] +4294-9934-0028-1940: ref=['MARIUS', 'SENT', 'BACK', 'THE', 'THIRTY', 'LOUIS', 'TO', 'HIS', 'AUNT', 'WITH', 'A', 'RESPECTFUL', 'LETTER', 'IN', 'WHICH', 'HE', 'STATED', 'THAT', 'HE', 'HAD', 'SUFFICIENT', 'MEANS', 'OF', 'SUBSISTENCE', 'AND', 'THAT', 'HE', 'SHOULD', 'BE', 'ABLE', 'THENCEFORTH', 'TO', 'SUPPLY', 'ALL', 'HIS', 'NEEDS'] +4294-9934-0028-1940: hyp=['MARIUS', 'SENT', 'BACK', 'FOR', 'THIRTY', 'LOUIS', 'TO', 'HIS', 'AUNT', 'WITH', 'THE', 'RESPECTFUL', 'LETTER', 'IN', 'WHICH', 'HE', 'STATED', 'THAT', 'HE', 'HAD', 'SUFFICIENT', 'MEANS', 'OF', 'SUBSISTENCE', 'AND', 'THAT', 'HE', 'SHOULD', 'BE', 'ABLE', 'THENCEFORTH', 'TO', 'SUPPLY', 'ALL', 'HIS', 'NEEDS'] +4294-9934-0029-1941: ref=['AT', 'THAT', 'MOMENT', 'HE', 'HAD', 'THREE', 'FRANCS', 'LEFT'] +4294-9934-0029-1941: hyp=['AT', 'THAT', 'MOMENT', 'HE', 'HAD', 'THREE', 'FRANCS', 'LEFT'] +4350-10919-0000-2716: ref=['HE', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'NO', 'GOOD', 'TALKING', 'TO', 'THE', 'OLD', 'MAN', 'AND', 'THAT', 'THE', 'PRINCIPAL', 'PERSON', 'IN', 'THE', 'HOUSE', 'WAS', 'THE', 'MOTHER'] +4350-10919-0000-2716: hyp=['HE', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'NO', 'GOOD', 'TALKING', 'TO', 'THE', 'OLD', 'MAN', 'AND', 'THAT', 'THE', 'PRINCIPAL', 'PERSON', 'IN', 'THE', 'HOUSE', 'WAS', 'THE', 'MOTHER'] +4350-10919-0001-2717: ref=['BEFORE', 'HER', 'HE', 'DECIDED', 'TO', 'SCATTER', 'HIS', 'PEARLS'] +4350-10919-0001-2717: hyp=['BEFORE', 'HER', 'HE', 'DECIDED', 'TO', 'SCATTER', 'HIS', 'PEARLS'] +4350-10919-0002-2718: ref=['THE', 'PRINCESS', 'WAS', 'DISTRACTED', 'AND', 'DID', 'NOT', 'KNOW', 'WHAT', 'TO', 'DO', 'SHE', 'FELT', 'SHE', 'HAD', 'SINNED', 'AGAINST', 'KITTY'] +4350-10919-0002-2718: hyp=['THE', 'PRINCESS', 'WAS', 'DISTRACTED', 'AND', 'DID', 'NOT', 'KNOW', 'WHAT', 'TO', 'DO', 'SHE', 'FELT', 'SHE', 'HAD', 'SINNED', 'AGAINST', 'KITTY'] +4350-10919-0003-2719: ref=['WELL', 'DOCTOR', 'DECIDE', 'OUR', 'FATE', 'SAID', 'THE', 'PRINCESS', 'TELL', 'ME', 'EVERYTHING'] +4350-10919-0003-2719: hyp=['WELL', 'DOCTOR', 'DECIDE', 'OUR', 'PHAETON', 'SAID', 'THE', 'PRINCESS', 'TELL', 'ME', 'EVERYTHING'] +4350-10919-0004-2720: ref=['IS', 'THERE', 'HOPE', 'SHE', 'MEANT', 'TO', 'SAY', 'BUT', 'HER', 'LIPS', 'QUIVERED', 'AND', 'SHE', 'COULD', 'NOT', 'UTTER', 'THE', 'QUESTION', 'WELL', 'DOCTOR'] +4350-10919-0004-2720: hyp=['IS', 'THEIR', 'HOPE', 'SHE', 'MEANT', 'TO', 'SAY', 'BUT', 'HER', 'LIPS', 'QUIVERED', 'AND', 'SHE', 'COULD', 'NOT', 'UTTER', 'THE', 'QUESTION', 'WELL', 'DOCTOR'] +4350-10919-0005-2721: ref=['AS', 'YOU', 'PLEASE', 'THE', 'PRINCESS', 'WENT', 'OUT', 'WITH', 'A', 'SIGH'] +4350-10919-0005-2721: hyp=['AS', 'YOU', 'PLEASE', 'THE', 'PRINCESS', 'WENT', 'OUT', 'WITH', 'A', 'SIGH'] +4350-10919-0006-2722: ref=['THE', 'FAMILY', 'DOCTOR', 'RESPECTFULLY', 'CEASED', 'IN', 'THE', 'MIDDLE', 'OF', 'HIS', 'OBSERVATIONS'] +4350-10919-0006-2722: hyp=['THE', 'FAMILY', 'DOCTOR', 'RESPECTFULLY', 'CEASED', 'IN', 'THE', 'MIDDLE', 'OF', 'HIS', 'OBSERVATIONS'] +4350-10919-0007-2723: ref=['AND', 'THERE', 'ARE', 'INDICATIONS', 'MALNUTRITION', 'NERVOUS', 'EXCITABILITY', 'AND', 'SO', 'ON'] +4350-10919-0007-2723: hyp=['AND', 'THERE', 'ARE', 'INDICATIONS', 'MALTRICIAN', 'NERVOUS', 'EXCITABILITY', 'AND', 'SO', 'ON'] +4350-10919-0008-2724: ref=['THE', 'QUESTION', 'STANDS', 'THUS', 'IN', 'PRESENCE', 'OF', 'INDICATIONS', 'OF', 'TUBERCULOUS', 'PROCESS', 'WHAT', 'IS', 'TO', 'BE', 'DONE', 'TO', 'MAINTAIN', 'NUTRITION'] +4350-10919-0008-2724: hyp=['THE', 'QUESTION', 'SENDS', 'THUS', 'IN', 'PRESENCE', 'OF', 'INDICATIONS', 'OF', 'TUBERK', 'AT', 'THIS', 'PROCESS', 'WHAT', 'IS', 'TO', 'BE', 'DONE', 'TO', 'MAINTAIN', 'NUTRITION'] +4350-10919-0009-2725: ref=['YES', "THAT'S", 'AN', 'UNDERSTOOD', 'THING', 'RESPONDED', 'THE', 'CELEBRATED', 'PHYSICIAN', 'AGAIN', 'GLANCING', 'AT', 'HIS', 'WATCH'] +4350-10919-0009-2725: hyp=['YES', 'I', 'CAN', 'UNDERSTOOD', 'THING', 'RESPONDED', 'THE', 'CELEBRATED', 'PHYSICIAN', 'AGAIN', 'GLANCING', 'AT', 'HIS', 'WATCH'] +4350-10919-0010-2726: ref=['BEG', 'PARDON', 'IS', 'THE', 'YAUSKY', 'BRIDGE', 'DONE', 'YET', 'OR', 'SHALL', 'I', 'HAVE', 'TO', 'DRIVE', 'AROUND'] +4350-10919-0010-2726: hyp=['BEG', 'PARDON', 'IS', 'THE', "HOUSEKEEPER'S", 'DONE', 'YET', 'OR', 'SHALL', 'I', 'HAVE', 'TO', 'DRIVE', 'HER', 'ON'] +4350-10919-0011-2727: ref=['HE', 'ASKED', 'AH', 'IT', 'IS'] +4350-10919-0011-2727: hyp=['HE', 'ASKED', 'AH', 'IT', 'IS'] +4350-10919-0012-2728: ref=['OH', 'WELL', 'THEN', 'I', 'CAN', 'DO', 'IT', 'IN', 'TWENTY', 'MINUTES'] +4350-10919-0012-2728: hyp=['OH', 'WELL', 'THEN', 'I', 'CAN', 'DO', 'IT', 'IN', 'TWENTY', 'MINUTES'] +4350-10919-0013-2729: ref=['AND', 'HOW', 'ABOUT', 'A', 'TOUR', 'ABROAD', 'ASKED', 'THE', 'FAMILY', 'DOCTOR'] +4350-10919-0013-2729: hyp=['AND', 'ABOUT', 'IT', 'TO', 'ABROAD', 'ASKED', 'THE', 'FAMILY', 'DOCTOR'] +4350-10919-0014-2730: ref=['WHAT', 'IS', 'WANTED', 'IS', 'MEANS', 'OF', 'IMPROVING', 'NUTRITION', 'AND', 'NOT', 'FOR', 'LOWERING', 'IT'] +4350-10919-0014-2730: hyp=['WHAT', 'IS', 'WANTED', 'IS', 'THE', 'MEANS', 'OF', 'IMPROVING', 'NUTRITION', 'AND', 'NOT', 'FOR', 'LOWERING', 'IT'] +4350-10919-0015-2731: ref=['THE', 'FAMILY', 'DOCTOR', 'LISTENED', 'ATTENTIVELY', 'AND', 'RESPECTFULLY'] +4350-10919-0015-2731: hyp=['THE', 'FAMILY', 'DOCTOR', 'LISTENED', 'ATTENTIVELY', 'AND', 'RESPECTFULLY'] +4350-10919-0016-2732: ref=['BUT', 'IN', 'FAVOR', 'OF', 'FOREIGN', 'TRAVEL', 'I', 'WOULD', 'URGE', 'THE', 'CHANGE', 'OF', 'HABITS', 'THE', 'REMOVAL', 'FROM', 'CONDITIONS', 'CALLING', 'UP', 'REMINISCENCES'] +4350-10919-0016-2732: hyp=['BUT', 'IN', 'FAVOUR', 'OF', 'FOREIGN', 'TRAVEL', 'I', 'WOULD', 'URGE', 'THE', 'CHANGE', 'OF', 'HABITS', 'THE', 'REMOVAL', 'FROM', 'CONDITIONS', 'CALLING', 'UP', 'REMINISCENCES'] +4350-10919-0017-2733: ref=['AND', 'THEN', 'THE', 'MOTHER', 'WISHES', 'IT', 'HE', 'ADDED'] +4350-10919-0017-2733: hyp=['AND', 'THEN', 'THE', 'MOTHER', 'WISHES', 'IT', 'HE', 'ADDED'] +4350-10919-0018-2734: ref=['AH', 'WELL', 'IN', 'THAT', 'CASE', 'TO', 'BE', 'SURE', 'LET', 'THEM', 'GO', 'ONLY', 'THOSE', 'GERMAN', 'QUACKS', 'ARE', 'MISCHIEVOUS'] +4350-10919-0018-2734: hyp=['AH', 'WELL', 'THAT', 'HAS', 'TO', 'BE', 'SURE', 'LET', 'THEM', 'GO', 'ONLY', 'THOSE', 'GERMAN', 'CLACKS', 'ARE', 'MISCHIEVOUS'] +4350-10919-0019-2735: ref=['OH', "TIME'S", 'UP', 'ALREADY', 'AND', 'HE', 'WENT', 'TO', 'THE', 'DOOR'] +4350-10919-0019-2735: hyp=['OH', "TIME'S", 'UP', 'ALREADY', 'AND', 'HE', 'WENT', 'TO', 'THE', 'DOOR'] +4350-10919-0020-2736: ref=['THE', 'CELEBRATED', 'DOCTOR', 'ANNOUNCED', 'TO', 'THE', 'PRINCESS', 'A', 'FEELING', 'OF', 'WHAT', 'WAS', 'DUE', 'FROM', 'HIM', 'DICTATED', 'HIS', 'DOING', 'SO', 'THAT', 'HE', 'OUGHT', 'TO', 'SEE', 'THE', 'PATIENT', 'ONCE', 'MORE'] +4350-10919-0020-2736: hyp=['THE', 'CELEBRATED', 'DOCTOR', 'ANNOUNCED', 'TO', 'THE', 'PRINCESS', 'A', 'FEELING', 'OF', 'WHAT', 'WAS', 'DUE', 'FROM', 'HIM', 'DICTATED', 'HIS', 'DOING', 'SO', 'THAT', 'HE', 'OUGHT', 'TO', 'SEE', 'THE', 'PATIENT', 'ONCE', 'MORE'] +4350-10919-0021-2737: ref=['OH', 'NO', 'ONLY', 'A', 'FEW', 'DETAILS', 'PRINCESS', 'COME', 'THIS', 'WAY'] +4350-10919-0021-2737: hyp=['O', 'NO', 'ONLY', 'A', 'FEW', 'DETAILS', 'PRINCESS', 'COME', 'THIS', 'WAY'] +4350-10919-0022-2738: ref=['AND', 'THE', 'MOTHER', 'ACCOMPANIED', 'BY', 'THE', 'DOCTOR', 'WENT', 'INTO', 'THE', 'DRAWING', 'ROOM', 'TO', 'KITTY'] +4350-10919-0022-2738: hyp=['AND', 'THE', 'MOTHER', 'ACCOMPANIED', 'BY', 'THE', 'DOCTOR', 'WENT', 'INTO', 'THE', 'DRAWING', 'ROOM', 'TO', 'KITTY'] +4350-10919-0023-2739: ref=['WHEN', 'THE', 'DOCTOR', 'CAME', 'IN', 'SHE', 'FLUSHED', 'CRIMSON', 'AND', 'HER', 'EYES', 'FILLED', 'WITH', 'TEARS'] +4350-10919-0023-2739: hyp=['WHEN', 'THE', 'DOCTOR', 'CAME', 'IN', 'SHE', 'FLUSHED', 'CRIMSON', 'AND', 'HER', 'EYES', 'FILLED', 'WITH', 'TEARS'] +4350-10919-0024-2740: ref=['SHE', 'ANSWERED', 'HIM', 'AND', 'ALL', 'AT', 'ONCE', 'GOT', 'UP', 'FURIOUS'] +4350-10919-0024-2740: hyp=['SHE', 'ANSWERED', 'HIM', 'AND', 'ALL', 'AT', 'ONCE', 'GOT', 'UP', 'FURIOUS'] +4350-10919-0025-2741: ref=['EXCUSE', 'ME', 'DOCTOR', 'BUT', 'THERE', 'IS', 'REALLY', 'NO', 'OBJECT', 'IN', 'THIS'] +4350-10919-0025-2741: hyp=['EXCUSE', 'ME', 'DOCTOR', 'BUT', 'THERE', 'IS', 'REALLY', 'NO', 'OBJECT', 'IN', 'THIS'] +4350-10919-0026-2742: ref=['THIS', 'IS', 'THE', 'THIRD', 'TIME', "YOU'VE", 'ASKED', 'ME', 'THE', 'SAME', 'THING'] +4350-10919-0026-2742: hyp=['THIS', 'IS', 'THE', 'THIRD', 'TIME', 'YOU', 'HAVE', 'ASKED', 'ME', 'THE', 'SAME', 'THING'] +4350-10919-0027-2743: ref=['THE', 'CELEBRATED', 'DOCTOR', 'DID', 'NOT', 'TAKE', 'OFFENSE'] +4350-10919-0027-2743: hyp=['THE', 'CLEARED', 'DOCTOR', 'DID', 'NOT', 'TAKE', 'OFFENCE'] +4350-10919-0028-2744: ref=['NERVOUS', 'IRRITABILITY', 'HE', 'SAID', 'TO', 'THE', 'PRINCESS', 'WHEN', 'KITTY', 'HAD', 'LEFT', 'THE', 'ROOM', 'HOWEVER', 'I', 'HAD', 'FINISHED'] +4350-10919-0028-2744: hyp=['NERVOUS', 'IRRITABILITY', 'HE', 'SAID', 'TO', 'THE', 'PRINCESS', 'WHEN', 'KATY', 'HAD', 'LEFT', 'THE', 'ROOM', 'HOWEVER', 'I', 'HAD', 'FINISHED'] +4350-10919-0029-2745: ref=['AND', 'THE', 'DOCTOR', 'BEGAN', 'SCIENTIFICALLY', 'EXPLAINING', 'TO', 'THE', 'PRINCESS', 'AS', 'AN', 'EXCEPTIONALLY', 'INTELLIGENT', 'WOMAN', 'THE', 'CONDITION', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'AND', 'CONCLUDED', 'BY', 'INSISTING', 'ON', 'THE', 'DRINKING', 'OF', 'THE', 'WATERS', 'WHICH', 'WERE', 'CERTAINLY', 'HARMLESS'] +4350-10919-0029-2745: hyp=['AND', 'THE', 'DOCTOR', 'BEGAN', 'SCIENTIFICALLY', 'EXPLAINING', 'TO', 'THE', 'PRINCESS', 'AS', 'AN', 'EXCEPTIONALLY', 'INTELLIGENT', 'WOMAN', 'THE', 'CONDITION', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'AND', 'CONCLUDED', 'BY', 'INSISTING', 'ON', 'THE', 'DRINKING', 'OF', 'THE', 'WATERS', 'WHICH', 'WAS', 'CERTAINLY', 'HARMLESS'] +4350-10919-0030-2746: ref=['AT', 'THE', 'QUESTION', 'SHOULD', 'THEY', 'GO', 'ABROAD', 'THE', 'DOCTOR', 'PLUNGED', 'INTO', 'DEEP', 'MEDITATION', 'AS', 'THOUGH', 'RESOLVING', 'A', 'WEIGHTY', 'PROBLEM'] +4350-10919-0030-2746: hyp=['BUT', 'THE', 'QUESTION', 'SHOULD', 'THEY', 'GO', 'ABROAD', 'THE', 'DOCTOR', 'PLUNGED', 'INTO', 'DEEP', 'MEDITATION', 'AS', 'THOUGH', 'RESOLVING', 'A', 'WEIGHTY', 'PROBLEM'] +4350-10919-0031-2747: ref=['FINALLY', 'HIS', 'DECISION', 'WAS', 'PRONOUNCED', 'THEY', 'WERE', 'TO', 'GO', 'ABROAD', 'BUT', 'TO', 'PUT', 'NO', 'FAITH', 'IN', 'FOREIGN', 'QUACKS', 'AND', 'TO', 'APPLY', 'TO', 'HIM', 'IN', 'ANY', 'NEED'] +4350-10919-0031-2747: hyp=['FINALLY', 'HIS', 'DECISION', 'WAS', 'PRONOUNCED', 'THEY', 'WERE', 'TO', 'GO', 'ABROAD', 'BUT', 'TO', 'PUT', 'NO', 'FAITH', 'IN', 'FOREIGN', 'QUACKS', 'AND', 'TO', 'APPLY', 'TO', 'HIM', 'IN', 'ANY', 'NEED'] +4350-10919-0032-2748: ref=['IT', 'SEEMED', 'AS', 'THOUGH', 'SOME', 'PIECE', 'OF', 'GOOD', 'FORTUNE', 'HAD', 'COME', 'TO', 'PASS', 'AFTER', 'THE', 'DOCTOR', 'HAD', 'GONE'] +4350-10919-0032-2748: hyp=['IT', 'SEEMED', 'AS', 'THOUGH', 'SOME', 'PIECE', 'OF', 'GOOD', 'FORTUNE', 'HAD', 'COME', 'TO', 'PASS', 'AFTER', 'THE', 'DOCTOR', 'HAD', 'GONE'] +4350-10919-0033-2749: ref=['THE', 'MOTHER', 'WAS', 'MUCH', 'MORE', 'CHEERFUL', 'WHEN', 'SHE', 'WENT', 'BACK', 'TO', 'HER', 'DAUGHTER', 'AND', 'KITTY', 'PRETENDED', 'TO', 'BE', 'MORE', 'CHEERFUL'] +4350-10919-0033-2749: hyp=['THE', 'MOTHER', 'WAS', 'MUCH', 'MORE', 'CHEERFUL', 'WHEN', 'SHE', 'WENT', 'BACK', 'TO', 'HER', 'DAUGHTER', 'AND', 'KITTY', 'PRETENDED', 'TO', 'BE', 'MORE', 'CHEERFUL'] +4350-9170-0000-2750: ref=['EDUCATED', 'PEOPLE', 'OF', 'THE', 'UPPER', 'CLASSES', 'ARE', 'TRYING', 'TO', 'STIFLE', 'THE', 'EVER', 'GROWING', 'SENSE', 'OF', 'THE', 'NECESSITY', 'OF', 'TRANSFORMING', 'THE', 'EXISTING', 'SOCIAL', 'ORDER'] +4350-9170-0000-2750: hyp=['EDUCATED', 'PEOPLE', 'OF', 'THE', 'UPPER', 'CLASSES', 'ARE', 'TRYING', 'TO', 'STIFLE', 'THE', 'EVERGREWING', 'SENSE', 'OF', 'THE', 'NECESSITY', 'OF', 'TRANSFORMING', 'THE', 'EXISTING', 'SOCIAL', 'ORDER'] +4350-9170-0001-2751: ref=['THIS', 'IS', 'ABSOLUTELY', 'INCORRECT'] +4350-9170-0001-2751: hyp=['MISSUS', 'ABSOLUTELY', 'INCORRECT'] +4350-9170-0002-2752: ref=['IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IT', 'IS', 'SUPPOSED', 'THAT', 'SINCE', 'THE', 'AIM', 'OF', 'LIFE', 'IS', 'FOUND', 'IN', 'GROUPS', 'OF', 'INDIVIDUALS', 'INDIVIDUALS', 'WILL', 'VOLUNTARILY', 'SACRIFICE', 'THEIR', 'OWN', 'INTERESTS', 'FOR', 'THE', 'INTERESTS', 'OF', 'THE', 'GROUP'] +4350-9170-0002-2752: hyp=['IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IT', 'IS', 'SUPPOSED', 'THAT', 'SINCE', 'THE', 'AIM', 'OF', 'LIFE', 'IS', 'FOUND', 'IN', 'GROUPS', 'OF', 'INDIVIDUALS', 'INDIVIDUALS', 'WILL', 'VOLUNTARILY', 'SACRIFICE', 'THEIR', 'OWN', 'INTERESTS', 'FOR', 'THE', 'INTEREST', 'OF', 'THE', 'GROUP'] +4350-9170-0003-2753: ref=['THE', 'CHAMPIONS', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'USUALLY', 'TRY', 'TO', 'CONNECT', 'THE', 'IDEA', 'OF', 'AUTHORITY', 'THAT', 'IS', 'OF', 'VIOLENCE', 'WITH', 'THE', 'IDEA', 'OF', 'MORAL', 'INFLUENCE', 'BUT', 'THIS', 'CONNECTION', 'IS', 'QUITE', 'IMPOSSIBLE'] +4350-9170-0003-2753: hyp=['THE', 'CHAMPIONS', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'USUALLY', 'TRY', 'TO', 'CONNECT', 'THE', 'IDEA', 'OF', 'AUTHORITY', 'THAT', 'IS', 'OF', 'VIOLENCE', 'WITH', 'THE', 'IDEA', 'OF', 'MORAL', 'INFLUENCE', 'BUT', 'THIS', 'CONNECTION', 'IS', 'QUITE', 'IMPOSSIBLE'] +4350-9170-0004-2754: ref=['THE', 'MAN', 'WHO', 'IS', 'CONTROLLED', 'BY', 'MORAL', 'INFLUENCE', 'ACTS', 'IN', 'ACCORDANCE', 'WITH', 'HIS', 'OWN', 'DESIRES'] +4350-9170-0004-2754: hyp=['THE', 'MAN', 'WHO', 'WAS', 'CONTROLLED', 'BY', 'MORAL', 'INFLUENCE', 'ACTS', 'IN', 'ACCORDANCE', 'WITH', 'HIS', 'OWN', 'DESIRES'] +4350-9170-0005-2755: ref=['THE', 'BASIS', 'OF', 'AUTHORITY', 'IS', 'BODILY', 'VIOLENCE'] +4350-9170-0005-2755: hyp=['THE', 'BASIS', 'OF', 'AUTHORITY', 'IS', 'BODILY', 'VIOLENCE'] +4350-9170-0006-2756: ref=['THE', 'POSSIBILITY', 'OF', 'APPLYING', 'BODILY', 'VIOLENCE', 'TO', 'PEOPLE', 'IS', 'PROVIDED', 'ABOVE', 'ALL', 'BY', 'AN', 'ORGANIZATION', 'OF', 'ARMED', 'MEN', 'TRAINED', 'TO', 'ACT', 'IN', 'UNISON', 'IN', 'SUBMISSION', 'TO', 'ONE', 'WILL'] +4350-9170-0006-2756: hyp=['THE', 'POSSIBILITY', 'OF', 'APPLYING', 'BODILY', 'VIOLENCE', 'TO', 'PEOPLE', 'IS', 'PROVIDED', 'ABOVE', 'ALL', 'BY', 'AN', 'ORGANIZATION', 'OF', 'ARMED', 'MEN', 'TRAINED', 'TO', 'ACT', 'IN', 'UNISON', 'AND', 'SUBMISSION', 'TO', 'ONE', 'WILL'] +4350-9170-0007-2757: ref=['THESE', 'BANDS', 'OF', 'ARMED', 'MEN', 'SUBMISSIVE', 'TO', 'A', 'SINGLE', 'WILL', 'ARE', 'WHAT', 'CONSTITUTE', 'THE', 'ARMY'] +4350-9170-0007-2757: hyp=['THESE', 'BANDS', 'OF', 'ARMED', 'MEN', 'SUBMISSIVE', 'TO', 'A', 'SINGLE', 'WILL', 'ARE', 'WHAT', 'CONSTITUTE', 'THE', 'ARMY'] +4350-9170-0008-2758: ref=['THE', 'ARMY', 'HAS', 'ALWAYS', 'BEEN', 'AND', 'STILL', 'IS', 'THE', 'BASIS', 'OF', 'POWER'] +4350-9170-0008-2758: hyp=['THE', 'ARMY', 'HAS', 'ALWAYS', 'BEEN', 'AND', 'STILL', 'IS', 'THE', 'BASIS', 'OF', 'POWER'] +4350-9170-0009-2759: ref=['POWER', 'IS', 'ALWAYS', 'IN', 'THE', 'HANDS', 'OF', 'THOSE', 'WHO', 'CONTROL', 'THE', 'ARMY', 'AND', 'ALL', 'MEN', 'IN', 'POWER', 'FROM', 'THE', 'ROMAN', 'CAESARS', 'TO', 'THE', 'RUSSIAN', 'AND', 'GERMAN', 'EMPERORS', 'TAKE', 'MORE', 'INTEREST', 'IN', 'THEIR', 'ARMY', 'THAN', 'IN', 'ANYTHING', 'AND', 'COURT', 'POPULARITY', 'IN', 'THE', 'ARMY', 'KNOWING', 'THAT', 'IF', 'THAT', 'IS', 'ON', 'THEIR', 'SIDE', 'THEIR', 'POWER', 'IS', 'SECURE'] +4350-9170-0009-2759: hyp=['POWER', 'IS', 'ALWAYS', 'IN', 'THE', 'HANDS', 'OF', 'THOSE', 'WHO', 'CONTROL', 'THE', 'ARMY', 'AND', 'ALL', 'MEN', 'IN', 'POWER', 'FROM', 'THE', 'ROMAN', 'CAESARS', 'TO', 'THE', 'RUSSIAN', 'AND', 'GERMAN', 'EMPERORS', 'TAKE', 'MORE', 'INTEREST', 'IN', 'THEIR', 'ARMY', 'THAN', 'IN', 'ANYTHING', 'AND', 'COURT', 'POPULARITY', 'IN', 'THE', 'ARMY', 'KNOWING', 'THAT', 'IF', 'THAT', 'IS', 'ON', 'THEIR', 'SIDE', 'THEIR', 'POWER', 'IS', 'SECURE'] +4350-9170-0010-2760: ref=['INDEED', 'IT', 'COULD', 'NOT', 'BE', 'OTHERWISE'] +4350-9170-0010-2760: hyp=['INDEED', 'IT', 'COULD', 'NOT', 'BE', 'OTHERWISE'] +4350-9170-0011-2761: ref=['ONLY', 'UNDER', 'THOSE', 'CONDITIONS', 'COULD', 'THE', 'SOCIAL', 'ORGANIZATION', 'BE', 'JUSTIFIED'] +4350-9170-0011-2761: hyp=['ONLY', 'UNDER', 'THOSE', 'CONDITIONS', 'COULD', 'THE', 'SOCIAL', 'ORGANIZATION', 'BE', 'JUSTIFIED'] +4350-9170-0012-2762: ref=['BUT', 'SINCE', 'THIS', 'IS', 'NOT', 'THE', 'CASE', 'AND', 'ON', 'THE', 'CONTRARY', 'MEN', 'IN', 'POWER', 'ARE', 'ALWAYS', 'FAR', 'FROM', 'BEING', 'SAINTS', 'THROUGH', 'THE', 'VERY', 'FACT', 'OF', 'THEIR', 'POSSESSION', 'OF', 'POWER', 'THE', 'SOCIAL', 'ORGANIZATION', 'BASED', 'ON', 'POWER', 'HAS', 'NO', 'JUSTIFICATION'] +4350-9170-0012-2762: hyp=['BUT', 'SINCE', 'THIS', 'IS', 'NOT', 'THE', 'CASE', 'AND', 'ON', 'THE', 'CONTRARY', 'MEN', 'AND', 'POWER', 'ARE', 'ALWAYS', 'FAR', 'FROM', 'BEING', 'SAINTS', 'THROUGH', 'THE', 'VERY', 'FACT', 'OF', 'THEIR', 'POSSESSION', 'OF', 'POWER', 'THE', 'SOCIAL', 'ORGANIZATION', 'BASED', 'ON', 'POWER', 'HAS', 'NO', 'JUSTIFICATION'] +4350-9170-0013-2763: ref=['EVEN', 'IF', 'THERE', 'WAS', 'ONCE', 'A', 'TIME', 'WHEN', 'OWING', 'TO', 'THE', 'LOW', 'STANDARD', 'OF', 'MORALS', 'AND', 'THE', 'DISPOSITION', 'OF', 'MEN', 'TO', 'VIOLENCE', 'THE', 'EXISTENCE', 'OF', 'AN', 'AUTHORITY', 'TO', 'RESTRAIN', 'SUCH', 'VIOLENCE', 'WAS', 'AN', 'ADVANTAGE', 'BECAUSE', 'THE', 'VIOLENCE', 'OF', 'GOVERNMENT', 'WAS', 'LESS', 'THAN', 'THE', 'VIOLENCE', 'OF', 'INDIVIDUALS', 'ONE', 'CANNOT', 'BUT', 'SEE', 'THAT', 'THIS', 'ADVANTAGE', 'COULD', 'NOT', 'BE', 'LASTING'] +4350-9170-0013-2763: hyp=['EVEN', 'IF', 'THERE', 'WAS', 'ONCE', 'A', 'TIME', 'WHEN', 'OWING', 'TO', 'THE', 'LOW', 'STANDARDS', 'OF', 'MORALS', 'WHEN', 'THE', 'DISPOSITION', 'OF', 'MEN', 'TO', 'VIOLENCE', 'THE', 'EXISTENCE', 'OF', 'AN', 'AUTHORITY', 'TO', 'RESTRAIN', 'SUCH', 'VIOLENCE', 'WAS', 'AN', 'ADVANTAGE', 'BECAUSE', 'THE', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'WAS', 'LESS', 'THAN', 'THE', 'VIOLENCE', 'OF', 'INDIVIDUALS', 'ONE', 'CANNOT', 'BUT', 'SEE', 'THAT', 'THIS', 'ADVANTAGE', 'COULD', 'NOT', 'BE', 'LASTING'] +4350-9170-0014-2764: ref=['BETWEEN', 'THE', 'MEMBERS', 'OF', 'ONE', 'STATE', 'SUBJECT', 'TO', 'A', 'SINGLE', 'AUTHORITY', 'THE', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'SEEMS', 'STILL', 'LESS', 'AND', 'THE', 'LIFE', 'OF', 'THE', 'STATE', 'SEEMS', 'EVEN', 'MORE', 'SECURE'] +4350-9170-0014-2764: hyp=['BETWEEN', 'THE', 'MEMBERS', 'OF', 'ONE', 'STATE', 'A', 'SUBJECT', 'TO', 'A', 'SINGLE', 'AUTHORITY', 'THE', 'STRIPE', 'BETWEEN', 'THE', 'INDIVIDUALS', 'SEEMED', 'STILL', 'LESS', 'AND', 'A', 'LIFE', 'OF', 'THE', 'STATE', 'SEEMS', 'EVEN', 'MORE', 'SECURE'] +4350-9170-0015-2765: ref=['IT', 'WAS', 'PRODUCED', 'ON', 'ONE', 'HAND', 'BY', 'THE', 'NATURAL', 'GROWTH', 'OF', 'POPULATION', 'AND', 'ON', 'THE', 'OTHER', 'BY', 'STRUGGLE', 'AND', 'CONQUEST'] +4350-9170-0015-2765: hyp=['IT', 'WAS', 'PRODUCED', 'ON', 'ONE', 'HAND', 'BY', 'THE', 'NATURAL', 'GROWTH', 'OF', 'POPULATION', 'AND', 'ON', 'THE', 'OTHER', 'BY', 'STRUGGLING', 'CONQUEST'] +4350-9170-0016-2766: ref=['AFTER', 'CONQUEST', 'THE', 'POWER', 'OF', 'THE', 'EMPEROR', 'PUTS', 'AN', 'END', 'TO', 'INTERNAL', 'DISSENSIONS', 'AND', 'SO', 'THE', 'STATE', 'CONCEPTION', 'OF', 'LIFE', 'JUSTIFIES', 'ITSELF'] +4350-9170-0016-2766: hyp=['AFTER', 'CONQUEST', 'THE', 'POWER', 'OF', 'THE', 'EMPEROR', 'PUTS', 'AN', 'END', 'TO', 'INTERNAL', 'DISSENSIONS', 'AND', 'SO', 'THE', 'STATE', 'CONCEPTION', 'OF', 'LIFE', 'JUSTIFIES', 'ITSELF'] +4350-9170-0017-2767: ref=['BUT', 'THIS', 'JUSTIFICATION', 'IS', 'NEVER', 'MORE', 'THAN', 'TEMPORARY'] +4350-9170-0017-2767: hyp=['BUT', 'THIS', 'JUSTIFICATION', 'IS', 'NEVER', 'MORE', 'THAN', 'TEMPORARY'] +4350-9170-0018-2768: ref=['INTERNAL', 'DISSENSIONS', 'DISAPPEAR', 'ONLY', 'IN', 'PROPORTION', 'TO', 'THE', 'DEGREE', 'OF', 'OPPRESSION', 'EXERTED', 'BY', 'THE', 'AUTHORITY', 'OVER', 'THE', 'DISSENTIENT', 'INDIVIDUALS'] +4350-9170-0018-2768: hyp=['INTERNAL', 'DISSENSIONS', 'DISAPPEAR', 'ONLY', 'IN', 'PROPORTION', 'TO', 'THE', 'DEGREE', 'OF', 'OPPRESSION', 'EXERTED', 'BY', 'THE', 'AUTHORITY', 'OVER', 'THE', 'DISINDIAN', 'INDIVIDUALS'] +4350-9170-0019-2769: ref=['GOVERNMENT', 'AUTHORITY', 'EVEN', 'IF', 'IT', 'DOES', 'SUPPRESS', 'PRIVATE', 'VIOLENCE', 'ALWAYS', 'INTRODUCES', 'INTO', 'THE', 'LIFE', 'OF', 'MEN', 'FRESH', 'FORMS', 'OF', 'VIOLENCE', 'WHICH', 'TEND', 'TO', 'BECOME', 'GREATER', 'AND', 'GREATER', 'IN', 'PROPORTION', 'TO', 'THE', 'DURATION', 'AND', 'STRENGTH', 'OF', 'THE', 'GOVERNMENT'] +4350-9170-0019-2769: hyp=['GOVERN', 'AUTHORITY', 'EVEN', 'IF', 'IT', 'DOES', 'SUPPRESS', 'PRIVATE', 'VIOLENCE', 'ALWAYS', 'INTRODUCES', 'INTO', 'THE', 'LIFE', 'OF', 'MEN', 'FRESH', 'FORMS', 'OF', 'VIOLENCE', 'WHICH', 'TEND', 'TO', 'BECOME', 'GREATER', 'AND', 'GREATER', 'IN', 'PROPORTION', 'TO', 'THE', 'DURATION', 'AND', 'STRENGTH', 'OF', 'THE', 'GOVERNMENT'] +4350-9170-0020-2770: ref=['AND', 'THEREFORE', 'THE', 'OPPRESSION', 'OF', 'THE', 'OPPRESSED', 'ALWAYS', 'GOES', 'ON', 'GROWING', 'UP', 'TO', 'THE', 'FURTHEST', 'LIMIT', 'BEYOND', 'WHICH', 'IT', 'CANNOT', 'GO', 'WITHOUT', 'KILLING', 'THE', 'GOOSE', 'WITH', 'THE', 'GOLDEN', 'EGGS'] +4350-9170-0020-2770: hyp=['AND', 'THEREFORE', 'THE', 'OPPRESSION', 'OF', 'THE', 'OPPRESSED', 'ALWAYS', 'GOES', 'ON', 'GROWING', 'UP', 'TO', 'THE', 'FURTHEST', 'LIMIT', 'BEYOND', 'WHICH', 'IT', 'CANNOT', 'GO', 'WITHOUT', 'KILLING', 'THE', 'GOOSE', 'WITH', 'THE', 'GOLD', 'KNIFE'] +4350-9170-0021-2771: ref=['THE', 'MOST', 'CONVINCING', 'EXAMPLE', 'OF', 'THIS', 'IS', 'TO', 'BE', 'FOUND', 'IN', 'THE', 'CONDITION', 'OF', 'THE', 'WORKING', 'CLASSES', 'OF', 'OUR', 'EPOCH', 'WHO', 'ARE', 'IN', 'REALITY', 'NO', 'BETTER', 'THAN', 'THE', 'SLAVES', 'OF', 'ANCIENT', 'TIMES', 'SUBDUED', 'BY', 'CONQUEST'] +4350-9170-0021-2771: hyp=['THE', 'MOST', 'CONVINCING', 'EXAMPLE', 'OF', 'THIS', 'IS', 'TO', 'BE', 'FOUND', 'IN', 'THE', 'CONDITION', 'OF', 'THE', 'WORKING', 'CLASSES', 'OF', 'OUR', 'EPOCH', 'WHO', 'ARE', 'IN', 'REALITY', 'NO', 'BETTER', 'THAN', 'THE', 'SLAVES', 'OF', 'ANCIENT', 'TIME', 'SUBDUED', 'BY', 'CONQUEST'] +4350-9170-0022-2772: ref=['SO', 'IT', 'HAS', 'ALWAYS', 'BEEN'] +4350-9170-0022-2772: hyp=['SO', 'IT', 'IS', 'ALWAYS', 'THEN'] +4350-9170-0023-2773: ref=['FOOTNOTE', 'THE', 'FACT', 'THAT', 'IN', 'AMERICA', 'THE', 'ABUSES', 'OF', 'AUTHORITY', 'EXIST', 'IN', 'SPITE', 'OF', 'THE', 'SMALL', 'NUMBER', 'OF', 'THEIR', 'TROOPS', 'NOT', 'ONLY', 'FAILS', 'TO', 'DISPROVE', 'THIS', 'POSITION', 'BUT', 'POSITIVELY', 'CONFIRMS', 'IT'] +4350-9170-0023-2773: hyp=['FOOTNOTE', 'THE', 'FACT', 'THAT', 'IN', 'AMERICA', 'THE', 'ABUSES', 'OF', 'AUTHORITY', 'EXIST', 'IN', 'SPITE', 'OF', 'THE', 'SMALL', 'NUMBER', 'OF', 'THEIR', 'TROOPS', 'NOT', 'ONLY', 'FAILS', 'TO', 'DISPROVE', 'THIS', 'POSITION', 'BUT', 'POSITIVELY', 'CONFIRMS', 'IT'] +4350-9170-0024-2774: ref=['THE', 'UPPER', 'CLASSES', 'KNOW', 'THAT', 'AN', 'ARMY', 'OF', 'FIFTY', 'THOUSAND', 'WILL', 'SOON', 'BE', 'INSUFFICIENT', 'AND', 'NO', 'LONGER', 'RELYING', 'ON', "PINKERTON'S", 'MEN', 'THEY', 'FEEL', 'THAT', 'THE', 'SECURITY', 'OF', 'THEIR', 'POSITION', 'DEPENDS', 'ON', 'THE', 'INCREASED', 'STRENGTH', 'OF', 'THE', 'ARMY'] +4350-9170-0024-2774: hyp=['THE', 'UPPER', 'CLASSES', 'KNOW', 'THAT', 'AN', 'ARMY', 'OF', 'FIFTY', 'THOUSAND', 'WILL', 'SOON', 'BE', 'INSUFFICIENT', 'AND', 'NO', 'LONGER', 'RELYING', 'ON', "PINKERTIN'S", 'MEN', 'THEY', 'FEEL', 'THAT', 'THE', 'SECURITY', 'OF', 'THEIR', 'POSITION', 'DEPENDS', 'ON', 'THE', 'INCREASED', 'STRENGTH', 'OF', 'THE', 'ARMY'] +4350-9170-0025-2775: ref=['THE', 'REASON', 'TO', 'WHICH', 'HE', 'GAVE', 'EXPRESSION', 'IS', 'ESSENTIALLY', 'THE', 'SAME', 'AS', 'THAT', 'WHICH', 'MADE', 'THE', 'FRENCH', 'KINGS', 'AND', 'THE', 'POPES', 'ENGAGE', 'SWISS', 'AND', 'SCOTCH', 'GUARDS', 'AND', 'MAKES', 'THE', 'RUSSIAN', 'AUTHORITIES', 'OF', 'TO', 'DAY', 'SO', 'CAREFULLY', 'DISTRIBUTE', 'THE', 'RECRUITS', 'SO', 'THAT', 'THE', 'REGIMENTS', 'FROM', 'THE', 'FRONTIERS', 'ARE', 'STATIONED', 'IN', 'CENTRAL', 'DISTRICTS', 'AND', 'THE', 'REGIMENTS', 'FROM', 'THE', 'CENTER', 'ARE', 'STATIONED', 'ON', 'THE', 'FRONTIERS'] +4350-9170-0025-2775: hyp=['THE', 'REASON', 'TO', 'WHICH', 'HE', 'GAVE', 'EXPRESSION', 'IS', 'ESSENTIALLY', 'THE', 'SAME', 'AS', 'THAT', 'WHICH', 'MADE', 'THE', 'FRENCH', 'KINGS', 'AND', 'THE', 'POPES', 'ENGAGE', 'SWISS', 'AND', 'SCOTCH', 'GUARDS', 'AND', 'MAKES', 'THE', 'RUSSIAN', 'AUTHORITIES', 'OF', 'TO', 'DAY', 'SO', 'CAREFULLY', 'DISTRIBUTE', 'THE', 'RECRUITS', 'SO', 'THAT', 'THE', 'REGIMENTS', 'FROM', 'THE', 'FRONTIER', 'THEY', 'ARE', 'STATIONED', 'IN', 'CENTRAL', 'DISTRICTS', 'AND', 'THE', 'REGIMENTS', 'FROM', 'THE', 'CENTRE', 'ARE', 'STATIONED', 'ON', 'THE', 'FRONTIERS'] +4350-9170-0026-2776: ref=['THE', 'MEANING', 'OF', "CAPRIVI'S", 'SPEECH', 'PUT', 'INTO', 'PLAIN', 'LANGUAGE', 'IS', 'THAT', 'FUNDS', 'ARE', 'NEEDED', 'NOT', 'TO', 'RESIST', 'FOREIGN', 'FOES', 'BUT', 'TO', 'BUY', 'UNDER', 'OFFICERS', 'TO', 'BE', 'READY', 'TO', 'ACT', 'AGAINST', 'THE', 'ENSLAVED', 'TOILING', 'MASSES'] +4350-9170-0026-2776: hyp=['THE', 'MEANING', 'OF', 'CAPRIVY', 'SPEECH', 'PUT', 'INTO', 'PLAN', 'LANGUAGE', 'IS', 'THAT', 'FONDS', 'ARE', 'NEEDED', 'NOT', 'TO', 'RESIST', 'FOREIGN', 'FOES', 'BUT', 'TO', 'BUY', 'UNDER', 'OFFICERS', 'TO', 'BE', 'READY', 'TO', 'ACT', 'AGAINST', 'THE', 'ENSLAVED', 'TOILING', 'MASSES'] +4350-9170-0027-2777: ref=['AND', 'THIS', 'ABNORMAL', 'ORDER', 'OF', 'THINGS', 'IS', 'MAINTAINED', 'BY', 'THE', 'ARMY'] +4350-9170-0027-2777: hyp=['AND', 'THIS', 'ABNORMAL', 'ORDER', 'OF', 'THANKS', 'IS', 'MAINTAINED', 'BY', 'THE', 'ARMY'] +4350-9170-0028-2778: ref=['BUT', 'THERE', 'IS', 'NOT', 'ONLY', 'ONE', 'GOVERNMENT', 'THERE', 'ARE', 'OTHER', 'GOVERNMENTS', 'EXPLOITING', 'THEIR', 'SUBJECTS', 'BY', 'VIOLENCE', 'IN', 'THE', 'SAME', 'WAY', 'AND', 'ALWAYS', 'READY', 'TO', 'POUNCE', 'DOWN', 'ON', 'ANY', 'OTHER', 'GOVERNMENT', 'AND', 'CARRY', 'OFF', 'THE', 'FRUITS', 'OF', 'THE', 'TOIL', 'OF', 'ITS', 'ENSLAVED', 'SUBJECTS'] +4350-9170-0028-2778: hyp=['BUT', 'THERE', 'IS', 'NOT', 'ONLY', 'ONE', 'GOVERNMENT', 'THERE', 'ARE', 'OTHER', 'GOVERNMENTS', 'EXPLODING', 'THEIR', 'SUBJECTS', 'BY', 'VIOLENT', 'AND', 'THE', 'SAME', 'WAY', 'AND', 'ARE', 'ALWAYS', 'READY', 'TO', 'POUNCE', 'DOWN', 'ON', 'ANY', 'OTHER', 'GOVERNMENT', 'AND', 'CARRY', 'OFF', 'THE', 'FRUITS', 'OF', 'THE', 'TOIL', 'OF', 'ITS', 'ENSLAVE', 'SUBJECTS'] +4350-9170-0029-2779: ref=['AND', 'SO', 'EVERY', 'GOVERNMENT', 'NEEDS', 'AN', 'ARMY', 'ALSO', 'TO', 'PROTECT', 'ITS', 'BOOTY', 'FROM', 'ITS', 'NEIGHBOR', 'BRIGANDS'] +4350-9170-0029-2779: hyp=['AND', 'SO', 'EVERY', 'GOVERNMENT', 'NEEDS', 'AN', 'ARMY', 'ALSO', 'TO', 'PROTECT', 'ITS', 'BOOTY', 'FROM', 'ITS', 'NEIGHBOUR', 'BRIGANDS'] +4350-9170-0030-2780: ref=['THIS', 'INCREASE', 'IS', 'CONTAGIOUS', 'AS', 'MONTESQUIEU', 'POINTED', 'OUT', 'ONE', 'HUNDRED', 'FIFTY', 'YEARS', 'AGO'] +4350-9170-0030-2780: hyp=['THIS', 'INCREASE', 'IS', 'CONTAGIOUS', 'AS', 'MONTESQUIEU', 'POINTED', 'OUT', 'A', 'HUNDRED', 'FIFTY', 'YEARS', 'AGO'] +4350-9170-0031-2781: ref=['EVERY', 'INCREASE', 'IN', 'THE', 'ARMY', 'OF', 'ONE', 'STATE', 'WITH', 'THE', 'AIM', 'OF', 'SELF', 'DEFENSE', 'AGAINST', 'ITS', 'SUBJECTS', 'BECOMES', 'A', 'SOURCE', 'OF', 'DANGER', 'FOR', 'NEIGHBORING', 'STATES', 'AND', 'CALLS', 'FOR', 'A', 'SIMILAR', 'INCREASE', 'IN', 'THEIR', 'ARMIES'] +4350-9170-0031-2781: hyp=['EVERY', 'INCREASE', 'IN', 'THE', 'ARMY', 'OF', 'ONE', 'STATE', 'WITH', 'THE', 'AIM', 'OF', 'SELF', 'DEFENSE', 'AGAINST', 'ITS', 'SUBJECTS', 'BECOMES', 'A', 'SORT', 'OF', 'DANGER', 'FOR', 'NEIGHBORING', 'STATES', 'AND', 'CALLS', 'FOR', 'A', 'SIMILAR', 'INCREASE', 'IN', 'THEIR', 'ARMIES'] +4350-9170-0032-2782: ref=['THE', 'DESPOTISM', 'OF', 'A', 'GOVERNMENT', 'ALWAYS', 'INCREASES', 'WITH', 'THE', 'STRENGTH', 'OF', 'THE', 'ARMY', 'AND', 'ITS', 'EXTERNAL', 'SUCCESSES', 'AND', 'THE', 'AGGRESSIVENESS', 'OF', 'A', 'GOVERNMENT', 'INCREASES', 'WITH', 'ITS', 'INTERNAL', 'DESPOTISM'] +4350-9170-0032-2782: hyp=['THE', 'DESPOTISM', 'OF', 'THE', 'GOVERNMENT', 'ALWAYS', 'INCREASES', 'WITH', 'THE', 'STRENGTH', 'OF', 'THE', 'ARMY', 'AND', 'ITS', 'EXTERNAL', 'SUCCESSES', 'AND', 'THE', 'AGGRESSIVENESS', 'OF', 'A', 'GOVERNMENT', 'INCREASES', 'WITH', 'ITS', 'INTERNAL', 'DESPOTISM'] +4350-9170-0033-2783: ref=['THE', 'RIVALRY', 'OF', 'THE', 'EUROPEAN', 'STATES', 'IN', 'CONSTANTLY', 'INCREASING', 'THEIR', 'FORCES', 'HAS', 'REDUCED', 'THEM', 'TO', 'THE', 'NECESSITY', 'OF', 'HAVING', 'RECOURSE', 'TO', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'SINCE', 'BY', 'THAT', 'MEANS', 'THE', 'GREATEST', 'POSSIBLE', 'NUMBER', 'OF', 'SOLDIERS', 'IS', 'OBTAINED', 'AT', 'THE', 'LEAST', 'POSSIBLE', 'EXPENSE'] +4350-9170-0033-2783: hyp=['THE', 'RIVALRY', 'OF', 'THE', 'EUROPEAN', 'STATES', 'AND', 'CONSTANTLY', 'INCREASING', 'THEIR', 'FORCES', 'HAS', 'REDUCED', 'THEM', 'TO', 'THE', 'NECESSITY', 'OF', 'HAVING', 'RECOURSE', 'TO', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'SINCE', 'BY', 'THAT', 'MEANS', 'THE', 'GREATEST', 'POSSIBLE', 'NUMBER', 'OF', 'SOLDIERS', 'IS', 'OBTAINED', 'AT', 'THE', 'LEAST', 'POSSIBLE', 'EXPENSE'] +4350-9170-0034-2784: ref=['AND', 'BY', 'THIS', 'MEANS', 'ALL', 'CITIZENS', 'ARE', 'UNDER', 'ARMS', 'TO', 'SUPPORT', 'THE', 'INIQUITIES', 'PRACTICED', 'UPON', 'THEM', 'ALL', 'CITIZENS', 'HAVE', 'BECOME', 'THEIR', 'OWN', 'OPPRESSORS'] +4350-9170-0034-2784: hyp=['AND', 'BY', 'THIS', 'MEANS', 'ALL', 'CITIZENS', 'ARE', 'UNDER', 'ARMS', 'TO', 'SUPPORT', 'THE', 'INIQUITIES', 'PRACTICED', 'UPON', 'THEM', 'ALSO', 'CITIZENS', 'HAVE', 'BECOME', 'THEIR', 'OWN', 'IMPRESSORS'] +4350-9170-0035-2785: ref=['THIS', 'INCONSISTENCY', 'HAS', 'BECOME', 'OBVIOUS', 'IN', 'UNIVERSAL', 'MILITARY', 'SERVICE'] +4350-9170-0035-2785: hyp=['THIS', 'INCONSISTENCY', 'HAS', 'BECOME', 'OBVIOUS', 'AND', 'UNIVERSAL', 'MILITARY', 'SERVICE'] +4350-9170-0036-2786: ref=['IN', 'FACT', 'THE', 'WHOLE', 'SIGNIFICANCE', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'CONSISTS', 'IN', "MAN'S", 'RECOGNITION', 'OF', 'THE', 'BARBARITY', 'OF', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'AND', 'THE', 'TRANSITORINESS', 'OF', 'PERSONAL', 'LIFE', 'ITSELF', 'AND', 'THE', 'TRANSFERENCE', 'OF', 'THE', 'AIM', 'OF', 'LIFE', 'TO', 'GROUPS', 'OF', 'PERSONS'] +4350-9170-0036-2786: hyp=['IN', 'FACT', 'THE', 'WHOLE', 'SIGNIFICANCE', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'CONSISTS', 'IN', "MAN'S", 'RECOGNITION', 'OF', 'THE', 'BARBARITY', 'OF', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'AND', 'THE', 'TRANSITORINESS', 'OF', 'PERSONAL', 'LIFE', 'ITSELF', 'AND', 'THE', 'TRANSFERENCE', 'OF', 'THE', 'AIM', 'OF', 'LIFE', 'THE', 'GROUPS', 'OF', 'PERSONS'] +4350-9170-0037-2787: ref=['BUT', 'WITH', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'IT', 'COMES', 'TO', 'PASS', 'THAT', 'MEN', 'AFTER', 'MAKING', 'EVERY', 'SACRIFICE', 'TO', 'GET', 'RID', 'OF', 'THE', 'CRUELTY', 'OF', 'STRIFE', 'AND', 'THE', 'INSECURITY', 'OF', 'EXISTENCE', 'ARE', 'CALLED', 'UPON', 'TO', 'FACE', 'ALL', 'THE', 'PERILS', 'THEY', 'HAD', 'MEANT', 'TO', 'AVOID'] +4350-9170-0037-2787: hyp=['BUT', 'WITH', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'IT', 'COMES', 'TO', 'PASS', 'THAT', 'MEN', 'AFTER', 'MAKING', 'EVERY', 'SACRIFICE', 'TO', 'GET', 'RID', 'OF', 'THE', 'CRUELTY', 'OF', 'STRIFE', 'AND', 'THE', 'INSECURITY', 'OF', 'EXISTENCE', 'ARE', 'CALLED', 'UPON', 'TO', 'FACE', 'ALL', 'THE', 'PERILS', 'THEY', 'HAD', 'MEANT', 'TO', 'AVOID'] +4350-9170-0038-2788: ref=['BUT', 'INSTEAD', 'OF', 'DOING', 'THAT', 'THEY', 'EXPOSE', 'THE', 'INDIVIDUALS', 'TO', 'THE', 'SAME', 'NECESSITY', 'OF', 'STRIFE', 'SUBSTITUTING', 'STRIFE', 'WITH', 'INDIVIDUALS', 'OF', 'OTHER', 'STATES', 'FOR', 'STRIFE', 'WITH', 'NEIGHBORS'] +4350-9170-0038-2788: hyp=['BUT', 'INSTEAD', 'OF', 'DOING', 'THAT', 'THEY', 'EXPOSED', 'TO', 'INDIVIDUALS', 'TO', 'THE', 'SAME', 'NECESSITY', 'OF', 'STRIFE', 'SUBSTITUTING', 'STRIKE', 'WITH', 'INDIVIDUALS', 'OF', 'OTHER', 'STATES', 'FOR', 'STRIFE', 'WITH', 'NEIGHBORS'] +4350-9170-0039-2789: ref=['THE', 'TAXES', 'RAISED', 'FROM', 'THE', 'PEOPLE', 'FOR', 'WAR', 'PREPARATIONS', 'ABSORB', 'THE', 'GREATER', 'PART', 'OF', 'THE', 'PRODUCE', 'OF', 'LABOR', 'WHICH', 'THE', 'ARMY', 'OUGHT', 'TO', 'DEFEND'] +4350-9170-0039-2789: hyp=['THE', 'TAXES', 'RAISED', 'FROM', 'THE', 'PEOPLE', 'FOR', 'WAR', 'PREPARATIONS', 'ABSORB', 'THE', 'GREATER', 'PART', 'OF', 'THE', 'PRODUCE', 'OF', 'LABOR', 'WHICH', 'THE', 'ARMY', 'OUGHT', 'TO', 'DEFEND'] +4350-9170-0040-2790: ref=['THE', 'DANGER', 'OF', 'WAR', 'EVER', 'READY', 'TO', 'BREAK', 'OUT', 'RENDERS', 'ALL', 'REFORMS', 'OF', 'LIFE', 'SOCIAL', 'LIFE', 'VAIN', 'AND', 'FRUITLESS'] +4350-9170-0040-2790: hyp=['THE', 'DANGER', 'OF', 'WAR', 'EVER', 'READY', 'TO', 'BREAK', 'OUT', 'RENDERS', 'ALL', 'REFORMS', 'OF', 'LIFE', 'SOCIAL', 'LIFE', 'VAIN', 'AND', 'FRUITLESS'] +4350-9170-0041-2791: ref=['BUT', 'THE', 'FATAL', 'SIGNIFICANCE', 'OF', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'AS', 'THE', 'MANIFESTATION', 'OF', 'THE', 'CONTRADICTION', 'INHERENT', 'IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IS', 'NOT', 'ONLY', 'APPARENT', 'IN', 'THAT'] +4350-9170-0041-2791: hyp=['BUT', 'THE', 'FATAL', 'SIGNIFICANCE', 'OF', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'AS', 'THE', 'MANIFESTATION', 'OF', 'THE', 'CONTRADICTION', 'INHERENT', 'IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IS', 'NOT', 'ONLY', 'APPARENT', 'IN', 'THAT'] +4350-9170-0042-2792: ref=['GOVERNMENTS', 'ASSERT', 'THAT', 'ARMIES', 'ARE', 'NEEDED', 'ABOVE', 'ALL', 'FOR', 'EXTERNAL', 'DEFENSE', 'BUT', 'THAT', 'IS', 'NOT', 'TRUE'] +4350-9170-0042-2792: hyp=['GOVERNMENT', 'ASSERT', 'THAT', 'ARMIES', 'ARE', 'NEEDED', 'ABOVE', 'ALL', 'FOR', 'EXTERNAL', 'DEFENCE', 'BUT', 'THAT', 'IS', 'NOT', 'TRUE'] +4350-9170-0043-2793: ref=['THEY', 'ARE', 'NEEDED', 'PRINCIPALLY', 'AGAINST', 'THEIR', 'SUBJECTS', 'AND', 'EVERY', 'MAN', 'UNDER', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'BECOMES', 'AN', 'ACCOMPLICE', 'IN', 'ALL', 'THE', 'ACTS', 'OF', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'AGAINST', 'THE', 'CITIZENS', 'WITHOUT', 'ANY', 'CHOICE', 'OF', 'HIS', 'OWN'] +4350-9170-0043-2793: hyp=['THERE', 'NEEDED', 'PRINCIPALLY', 'AGAINST', 'THEIR', 'SUBJECTS', 'AND', 'EVERY', 'MAN', 'UNDER', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'BECOMES', 'AN', 'ACCOMPLICE', 'AND', 'ALL', 'THAT', 'ACTS', 'OF', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'AGAINST', 'THE', 'CITIZENS', 'WITHOUT', 'ANY', 'CHOICE', 'OF', 'HIS', 'OWN'] +4350-9170-0044-2794: ref=['AND', 'FOR', 'THE', 'SAKE', 'OF', 'WHAT', 'AM', 'I', 'MAKING', 'THEM'] +4350-9170-0044-2794: hyp=['AND', 'FOR', 'THE', 'SAKE', 'OF', 'WHAT', 'AM', 'I', 'MAKING', 'THEM'] +4350-9170-0045-2795: ref=['I', 'AM', 'EXPECTED', 'FOR', 'THE', 'SAKE', 'OF', 'THE', 'STATE', 'TO', 'MAKE', 'THESE', 'SACRIFICES', 'TO', 'RENOUNCE', 'EVERYTHING', 'THAT', 'CAN', 'BE', 'PRECIOUS', 'TO', 'MAN', 'PEACE', 'FAMILY', 'SECURITY', 'AND', 'HUMAN', 'DIGNITY'] +4350-9170-0045-2795: hyp=['I', 'EXPECTED', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'STATE', 'TO', 'MAKE', 'THESE', 'SACRIFICES', 'TO', 'RENOUNCE', 'EVERYTHING', 'THAT', 'CAN', 'BE', 'PRECIOUS', 'TO', 'MAN', 'PEACE', 'FAMILY', 'SECURITY', 'AND', 'HUMAN', 'DIGNITY'] +4350-9170-0046-2796: ref=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'SAY', 'WE', 'SHOULD', 'BE', 'EXPOSED', 'TO', 'THE', 'ATTACKS', 'OF', 'EVIL', 'DISPOSED', 'PERSONS', 'IN', 'OUR', 'OWN', 'COUNTRY'] +4350-9170-0046-2796: hyp=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'SAY', 'WE', 'SHOULD', 'BE', 'EXPOSED', 'TO', 'THE', 'ATTACKS', 'OF', 'EVIL', 'DISPOSED', 'PERSONS', 'IN', 'OUR', 'OWN', 'COUNTRY'] +4350-9170-0047-2797: ref=['WE', 'KNOW', 'NOW', 'THAT', 'THREATS', 'AND', 'PUNISHMENTS', 'CANNOT', 'DIMINISH', 'THEIR', 'NUMBER', 'THAT', 'THAT', 'CAN', 'ONLY', 'BE', 'DONE', 'BY', 'CHANGE', 'OF', 'ENVIRONMENT', 'AND', 'MORAL', 'INFLUENCE'] +4350-9170-0047-2797: hyp=['WE', 'NOW', 'KNOW', 'THAT', 'THREATS', 'AND', 'PUNISHMENTS', 'CANNOT', 'DIMINISH', 'THEIR', 'NUMBER', 'THAT', 'THAT', 'CAN', 'ONLY', 'BE', 'DONE', 'BY', 'CHANGE', 'OF', 'ENVIRONMENT', 'AND', 'MORAL', 'INFLUENCE'] +4350-9170-0048-2798: ref=['SO', 'THAT', 'THE', 'JUSTIFICATION', 'OF', 'STATE', 'VIOLENCE', 'ON', 'THE', 'GROUND', 'OF', 'THE', 'PROTECTION', 'IT', 'GIVES', 'US', 'FROM', 'EVIL', 'DISPOSED', 'PERSONS', 'EVEN', 'IF', 'IT', 'HAD', 'SOME', 'FOUNDATION', 'THREE', 'OR', 'FOUR', 'CENTURIES', 'AGO', 'HAS', 'NONE', 'WHATEVER', 'NOW'] +4350-9170-0048-2798: hyp=['SO', 'THAT', 'THIS', 'JUSTIFICATION', 'OF', 'STATE', 'VIOLENCE', 'ON', 'THE', 'GROUND', 'OF', 'THE', 'PROTECTION', 'IT', 'GIVES', 'US', 'FROM', 'EVIL', 'DISPOS', 'PERSONS', 'EVEN', 'IF', 'I', 'HAD', 'SOME', 'FOUNDATION', 'THREE', 'OR', 'FOUR', 'CENTURIES', 'AGO', 'HAS', 'NONE', 'WHATEVER', 'NOW'] +4350-9170-0049-2799: ref=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'TELL', 'US', 'WE', 'SHOULD', 'NOT', 'HAVE', 'ANY', 'RELIGION', 'EDUCATION', 'CULTURE', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'SO', 'ON'] +4350-9170-0049-2799: hyp=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'TELL', 'US', 'WE', 'SHOULD', 'NOT', 'HAVE', 'ANY', 'RELIGION', 'EDUCATION', 'CULTURE', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'SO', 'ON'] +4350-9170-0050-2800: ref=['WITHOUT', 'THE', 'STATE', 'MEN', 'WOULD', 'NOT', 'HAVE', 'BEEN', 'ABLE', 'TO', 'FORM', 'THE', 'SOCIAL', 'INSTITUTIONS', 'NEEDED', 'FOR', 'DOING', 'ANY', 'THING'] +4350-9170-0050-2800: hyp=['WITHOUT', 'THE', 'STATE', 'MEN', 'WOULD', 'NOT', 'HAVE', 'BEEN', 'ABLE', 'TO', 'FORM', 'THE', 'SOCIAL', 'INSTITUTIONS', 'NEEDED', 'FOR', 'DOING', 'ANYTHING'] +4350-9170-0051-2801: ref=['THIS', 'ARGUMENT', 'TOO', 'WAS', 'WELL', 'FOUNDED', 'ONLY', 'SOME', 'CENTURIES', 'AGO'] +4350-9170-0051-2801: hyp=['THIS', 'ARGUMENT', 'TOO', 'WAS', 'WELL', 'FOUNDED', 'ONLY', 'SOME', 'CENTURIES', 'AGO'] +4350-9170-0052-2802: ref=['THE', 'GREAT', 'EXTENSION', 'OF', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'INTERCHANGE', 'OF', 'IDEAS', 'HAS', 'MADE', 'MEN', 'COMPLETELY', 'ABLE', 'TO', 'DISPENSE', 'WITH', 'STATE', 'AID', 'IN', 'FORMING', 'SOCIETIES', 'ASSOCIATIONS', 'CORPORATIONS', 'AND', 'CONGRESSES', 'FOR', 'SCIENTIFIC', 'ECONOMIC', 'AND', 'POLITICAL', 'OBJECTS'] +4350-9170-0052-2802: hyp=['THE', 'GREAT', 'EXTENSION', 'OF', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'INTERCHANGE', 'OF', 'IDEAS', 'HAS', 'MADE', 'MEN', 'COMPLETELY', 'ABLE', 'TO', 'DISPENSE', 'WITH', 'STATE', 'AID', 'IN', 'FORMING', 'SOCIETIES', 'ASSOCIATIONS', 'CORPORATIONS', 'AND', 'CONGRESSES', 'FOR', 'SCIENTIFIC', 'ECONOMIC', 'AND', 'POLITICAL', 'OBJECTS'] +4350-9170-0053-2803: ref=['WITHOUT', 'GOVERNMENTS', 'NATIONS', 'WOULD', 'BE', 'ENSLAVED', 'BY', 'THEIR', 'NEIGHBORS'] +4350-9170-0053-2803: hyp=['WITHOUT', 'GOVERNMENTS', 'NATIONS', 'WOULD', 'BE', 'ENSLAVED', 'BY', 'THEIR', 'NEIGHBORS'] +4350-9170-0054-2804: ref=['THE', 'GOVERNMENT', 'THEY', 'TELL', 'US', 'WITH', 'ITS', 'ARMY', 'IS', 'NECESSARY', 'TO', 'DEFEND', 'US', 'FROM', 'NEIGHBORING', 'STATES', 'WHO', 'MIGHT', 'ENSLAVE', 'US'] +4350-9170-0054-2804: hyp=['THE', 'GOVERNMENT', 'THEY', 'TELL', 'US', 'WITH', 'ITS', 'ARMY', 'IS', 'NECESSARY', 'TO', 'DEFEND', 'US', 'FROM', 'NEIGHBORING', 'STATES', 'WHO', 'MIGHT', 'ENSLAVE', 'US'] +4350-9170-0055-2805: ref=['AND', 'IF', 'DEFENSE', 'AGAINST', 'BARBAROUS', 'NATIONS', 'IS', 'MEANT', 'ONE', 'THOUSANDTH', 'PART', 'OF', 'THE', 'TROOPS', 'NOW', 'UNDER', 'ARMS', 'WOULD', 'BE', 'AMPLY', 'SUFFICIENT', 'FOR', 'THAT', 'PURPOSE'] +4350-9170-0055-2805: hyp=['AND', 'IF', 'DEFENCE', 'AGAINST', 'BARBAROUS', 'NATIONS', 'IS', 'MEANT', 'ONE', 'THOUSANDTH', 'PART', 'OF', 'THE', 'TROOPS', 'NOW', 'UNDER', 'ARMS', 'WOULD', 'BE', 'AMPLY', 'SUFFICIENT', 'FOR', 'THAT', 'PURPOSE'] +4350-9170-0056-2806: ref=['THE', 'POWER', 'OF', 'THE', 'STATE', 'FAR', 'FROM', 'BEING', 'A', 'SECURITY', 'AGAINST', 'THE', 'ATTACKS', 'OF', 'OUR', 'NEIGHBORS', 'EXPOSES', 'US', 'ON', 'THE', 'CONTRARY', 'TO', 'MUCH', 'GREATER', 'DANGER', 'OF', 'SUCH', 'ATTACKS'] +4350-9170-0056-2806: hyp=['THE', 'POWER', 'OF', 'THE', 'STATE', 'FAR', 'FROM', 'BEING', 'A', 'SECURITY', 'AGAINST', 'THE', 'ATTACKS', 'OF', 'OUR', 'NEIGHBORS', 'EXPOSES', 'US', 'ON', 'THE', 'CONTRARY', 'TO', 'MUCH', 'GREATER', 'DANGER', 'OF', 'SUCH', 'ATTACKS'] +4350-9170-0057-2807: ref=['EVEN', 'LOOKING', 'AT', 'IT', 'PRACTICALLY', 'WEIGHING', 'THAT', 'IS', 'TO', 'SAY', 'ALL', 'THE', 'BURDENS', 'LAID', 'ON', 'HIM', 'BY', 'THE', 'STATE', 'NO', 'MAN', 'CAN', 'FAIL', 'TO', 'SEE', 'THAT', 'FOR', 'HIM', 'PERSONALLY', 'TO', 'COMPLY', 'WITH', 'STATE', 'DEMANDS', 'AND', 'SERVE', 'IN', 'THE', 'ARMY', 'WOULD', 'IN', 'THE', 'MAJORITY', 'OF', 'CASES', 'BE', 'MORE', 'DISADVANTAGEOUS', 'THAN', 'TO', 'REFUSE', 'TO', 'DO', 'SO'] +4350-9170-0057-2807: hyp=['EVEN', 'LOOKING', 'AT', 'IT', 'PRACTICALLY', 'WEIGHING', 'THAT', 'IS', 'TO', 'SAY', 'ALL', 'THE', 'BURDENS', 'LAID', 'ON', 'HIM', 'BY', 'THE', 'STATES', 'NO', 'MAN', 'CAN', 'FAIL', 'TO', 'SEE', 'THAT', 'FOR', 'HIM', 'PERSONALLY', 'TO', 'COMPLY', 'WITH', 'THE', 'STATE', 'DEMANDS', 'AND', 'SERVE', 'IN', 'THE', 'ARMY', 'WOULD', 'IN', 'THE', 'MAJORITY', 'OF', 'CASES', 'BE', 'MORE', 'DISADVANTAGEOUS', 'THAN', 'TO', 'REFUSE', 'TO', 'DO', 'SO'] +4350-9170-0058-2808: ref=['TO', 'RESIST', 'WOULD', 'NEED', 'INDEPENDENT', 'THOUGHT', 'AND', 'EFFORT', 'OF', 'WHICH', 'EVERY', 'MAN', 'IS', 'NOT', 'CAPABLE'] +4350-9170-0058-2808: hyp=['TO', 'RESIST', 'WOULD', 'NEED', 'INDEPENDENT', 'THOUGHT', 'AND', 'EFFORT', 'OF', 'WHICH', 'EVERY', 'MAN', 'IS', 'NOT', 'CAPABLE'] +4350-9170-0059-2809: ref=['SO', 'MUCH', 'FOR', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'OF', 'BOTH', 'LINES', 'OF', 'CONDUCT', 'FOR', 'A', 'MAN', 'OF', 'THE', 'WEALTHY', 'CLASSES', 'AN', 'OPPRESSOR'] +4350-9170-0059-2809: hyp=['SO', 'MUCH', 'FOR', 'THE', 'ADVANTAGES', 'OF', 'DISADVANTAGES', 'OF', 'BOTH', 'LINES', 'OF', 'CONDUCT', 'FOR', 'A', 'MAN', 'OF', 'THE', 'WEALTHY', 'CLASS', 'AND', 'OPPRESSOR'] +4350-9170-0060-2810: ref=['FOR', 'A', 'MAN', 'OF', 'THE', 'POOR', 'WORKING', 'CLASS', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'WILL', 'BE', 'THE', 'SAME', 'BUT', 'WITH', 'A', 'GREAT', 'INCREASE', 'OF', 'DISADVANTAGES'] +4350-9170-0060-2810: hyp=['FOR', 'A', 'MAN', 'OF', 'THE', 'POOR', 'WORKING', 'CLASS', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'WILL', 'BE', 'THE', 'SAME', 'BUT', 'WITH', 'A', 'GREAT', 'INCREASE', 'OF', 'DISADVANTAGES'] +4852-28311-0000-2098: ref=['SAY', 'YOU', 'KNOW', 'SUMTHIN'] +4852-28311-0000-2098: hyp=['SAY', 'YOU', 'KNOW', 'SOMETHING'] +4852-28311-0001-2099: ref=['CHRIS', 'LOOKED', 'FROM', 'A', 'NICKEL', 'PLATED', 'FLASHLIGHT', 'TO', 'A', 'CAR', 'JACK', 'AND', 'SPARK', 'PLUG'] +4852-28311-0001-2099: hyp=['CHRIS', 'LOOKED', 'FROM', 'MENDICULATED', 'FLASH', 'LIKE', 'TO', 'A', 'CAR', 'JACK', 'AND', 'SPARK', 'PLUG'] +4852-28311-0002-2100: ref=['KNOW', 'WHO', 'NEEDS', 'A', 'JOB', 'BAD', "THAT'S", 'JAKEY', 'HARRIS'] +4852-28311-0002-2100: hyp=['NO', 'ONE', 'NEEDS', 'A', 'JOB', 'BAN', "THAT'S", 'JAKIE', 'HARRIS'] +4852-28311-0003-2101: ref=['O', 'K', 'HE', 'SAID'] +4852-28311-0003-2101: hyp=['O', 'K', 'HE', 'SAID'] +4852-28311-0004-2102: ref=['ONLY', 'WHY', "DIDN'T", 'YOU', 'ASK', 'HIM', 'YOURSELF'] +4852-28311-0004-2102: hyp=['ONLY', 'WHY', "DIDN'T", 'YOU', 'ASK', 'HIM', 'YOURSELF'] +4852-28311-0005-2103: ref=['MIKE', 'BECAME', 'UNEASY', 'AND', 'FISHED', 'AN', 'ELASTIC', 'BAND', 'OUT', 'OF', 'HIS', 'POCKET', 'MADE', 'A', 'FLICK', 'OF', 'PAPER', 'AND', 'SENT', 'IT', 'SOARING', 'OUT', 'INTO', 'M', 'STREET'] +4852-28311-0005-2103: hyp=['MIKE', 'BECAME', 'UNEASY', 'AND', 'FISHED', 'IT', 'MOLASTIC', 'BAND', 'OUT', 'OF', 'HIS', 'POCKET', 'MADE', 'A', 'FLICK', 'OF', 'PAPER', 'AND', 'SENT', 'IT', 'SOARING', 'OUT', 'AN', 'ENEM', 'STREET'] +4852-28311-0006-2104: ref=['WELL', 'HE', 'ADMITTED', 'I', 'DID'] +4852-28311-0006-2104: hyp=['WELL', 'HE', 'ADMITTED', 'I', 'DID'] +4852-28311-0007-2105: ref=['CHRIS', 'ASKED', 'AND', 'FOR', 'THE', 'FIRST', 'TIME', 'THAT', 'DAY', 'THE', 'HEAVY', 'WEIGHT', 'HE', 'CARRIED', 'WITHIN', 'HIM', 'LIFTED', 'AND', 'LIGHTENED', 'A', 'LITTLE'] +4852-28311-0007-2105: hyp=['CHRIS', 'ASKED', 'HIM', 'FOR', 'THE', 'FIRST', 'TIME', 'THAT', 'DAY', 'THE', 'HEAVY', 'WEIGHT', 'HE', 'CARRIED', 'WITHIN', 'HIM', 'LIFTED', 'AND', 'LIGHTENED', 'A', 'LITTLE'] +4852-28311-0008-2106: ref=['THINK', 'HE', 'REALLY', 'NEEDS', 'IT', 'HE', 'PURSUED'] +4852-28311-0008-2106: hyp=['THINKING', 'REALLY', 'NEEDS', 'IT', 'HE', 'PURSUED'] +4852-28311-0009-2107: ref=['HE', 'WOULD', 'HAVE', 'LIKED', 'TO', 'GET', 'THE', 'JOB', 'FOR', 'JAKEY', 'WHO', 'NEEDED', 'IT', 'BUT', 'SOMEHOW', 'THE', 'TASK', 'OF', 'FACING', 'MISTER', 'WICKER', 'ESPECIALLY', 'NOW', 'THAT', 'THE', 'LIGHT', 'WAS', 'GOING', 'AND', 'DUSK', 'EDGING', 'INTO', 'THE', 'STREETS', 'WAS', 'NOT', 'WHAT', 'CHRIS', 'HAD', 'INTENDED', 'FOR', 'ENDING', 'THE', 'AFTERNOON'] +4852-28311-0009-2107: hyp=['HE', 'WOULD', 'HAVE', 'LIKED', 'TO', 'GET', 'THE', 'JOB', 'FOR', 'JAKIE', 'WHO', 'NEEDED', 'IT', 'BUT', 'SOMEHOW', 'THE', 'TASK', 'OF', 'FACING', 'MISTER', 'WICKER', 'ESPECIALLY', 'NOW', 'THAT', 'THE', 'LIGHT', 'WAS', 'GOING', 'AND', 'DUSK', 'EDGED', 'INTO', 'THE', 'STREETS', 'WAS', 'NOT', 'WHAT', 'CHRISTEN', 'INTENDED', 'FOR', 'ENDING', 'THE', 'AFTERNOON'] +4852-28311-0010-2108: ref=["MIKE'S", 'EXPRESSION', 'CHANGED', 'AT', 'ONCE', 'TO', 'ONE', 'OF', 'TRIUMPH', 'BUT', 'CHRIS', 'WAS', 'ONLY', 'PARTLY', 'ENCOURAGED'] +4852-28311-0010-2108: hyp=["MIKE'S", 'EXPRESSION', 'CHANGED', 'AT', 'ONE', 'WANTS', 'TO', 'ONE', 'OF', 'TRIUMPH', 'BUT', 'BRUCE', 'WAS', 'ONLY', 'PARTIALLY', 'ENCOURAGED'] +4852-28311-0011-2109: ref=['BETCHA', "AREN'T", 'GOIN', 'AFTER', 'ALL', 'CHRIS', 'TURNED', 'ON', 'HIM'] +4852-28311-0011-2109: hyp=['BETTER', 'AND', 'GOIN', 'AFTER', 'ALL', 'THIS', 'TURNED', 'TO', 'HIM'] +4852-28311-0012-2110: ref=['MIKE', 'WAS', 'STANDING', 'ON', 'THE', 'CORNER'] +4852-28311-0012-2110: hyp=['MIKE', 'WAS', 'STANDING', 'ON', 'THE', 'CORNER'] +4852-28311-0013-2111: ref=['AW', 'SHUCKS'] +4852-28311-0013-2111: hyp=['AH', 'SHOCKS'] +4852-28311-0014-2112: ref=['CHRIS', 'STARTED', 'OFF', 'ONCE', 'MORE', 'PASSING', 'THE', 'BLEAK', 'LITTLE', 'VICTORIAN', 'CHURCH', 'PERCHED', 'ON', 'THE', 'HILL', 'ABOVE', 'MISTER', "WICKER'S", 'HOUSE'] +4852-28311-0014-2112: hyp=['CHRIS', 'STARTED', 'OFF', 'ONCE', 'MORE', 'PASSING', 'A', 'BLEAK', 'LITTLE', 'VICTORIAN', 'CHURCH', 'PERCHED', 'ON', 'THE', 'HILL', 'ABOVE', 'MISTER', "WICKER'S", 'HOUSE'] +4852-28311-0015-2113: ref=['AN', 'EMPTY', 'LOT', 'CUT', 'INTO', 'BY', 'CHURCH', 'LANE', 'GAVE', 'A', 'LOOK', 'OF', 'ISOLATION', 'TO', 'THE', 'L', 'SHAPED', 'BRICK', 'BUILDING', 'THAT', 'SERVED', 'MISTER', 'WICKER', 'AS', 'BOTH', 'HOUSE', 'AND', 'PLACE', 'OF', 'BUSINESS'] +4852-28311-0015-2113: hyp=['AN', 'EMPTY', 'LOT', 'CUT', 'IN', 'INTO', 'BY', 'CHURCH', 'LANE', 'GAVE', 'A', 'LOOK', 'OF', 'ISOLATION', 'TO', 'THE', 'ALE', 'SHAPED', 'BRICK', 'BUILDING', 'THAT', 'SERVED', 'MISTER', "WICKER'S", 'BOTH', 'HOUSE', 'AND', 'PLACE', 'OF', 'BUSINESS'] +4852-28311-0016-2114: ref=['THE', 'LONGER', 'WING', 'TOWARD', 'THE', 'BACK', 'HAD', 'A', 'BACK', 'DOOR', 'THAT', 'OPENED', 'ONTO', 'WATER', 'STREET', 'THE', 'SPACE', 'BETWEEN', 'THE', 'HOUSE', 'AND', 'WISCONSIN', 'AVENUE', 'HAD', 'BEEN', 'MADE', 'INTO', 'A', 'NEAT', 'OBLONG', 'FLOWER', 'GARDEN', 'FENCED', 'OFF', 'FROM', 'THE', 'SIDEWALK', 'BY', 'BOX', 'SHRUBS', 'AND', 'A', 'WHITE', 'PICKET', 'FENCE'] +4852-28311-0016-2114: hyp=['NO', 'LONGER', 'WINGED', 'TOWARD', 'THE', 'BACK', 'GOT', 'A', 'BACK', 'DOOR', 'THAT', 'OPENED', 'ON', 'A', 'WATER', 'STREET', 'THE', 'SPACE', 'BETWEEN', 'THE', 'HOUSE', 'AND', 'WISCONSIN', 'AVIGUE', 'HAD', 'BEEN', 'MADE', 'INTO', 'A', 'NEAT', 'OBLONG', 'FLOWER', 'GARDEN', 'FENCED', 'OFF', 'FROM', 'THE', 'SIDEWALK', 'BY', 'BOX', 'SHRUGS', 'AND', 'THE', 'WHITE', 'PICKET', 'FENCE'] +4852-28311-0017-2115: ref=['A', 'LIVID', 'YELLOW', 'STAINED', 'THE', 'HORIZON', 'BEYOND', 'THE', 'FACTORIES', 'AND', 'GRAY', 'CLOUDS', 'LOWERED', 'AND', 'TUMBLED', 'ABOVE'] +4852-28311-0017-2115: hyp=['A', 'LIVID', 'YELLOW', 'STAINED', 'THE', 'HORIZON', 'BEYOND', 'THE', 'FACTORIES', 'IN', 'GLAY', 'CLOUDS', 'LOWERED', 'AND', 'TUMBLED', 'ABOVE'] +4852-28311-0018-2116: ref=['THE', 'AIR', 'WAS', 'GROWING', 'CHILL', 'AND', 'CHRIS', 'DECIDED', 'TO', 'FINISH', 'HIS', 'JOB'] +4852-28311-0018-2116: hyp=['THE', 'AIR', 'WAS', 'GROWING', 'CHILL', 'AND', 'CHRIST', 'DECIDED', 'TO', 'FINISH', 'THE', 'JOB'] +4852-28311-0019-2117: ref=['ALL', 'AT', 'ONCE', 'HE', 'WONDERED', 'HOW', 'HIS', 'MOTHER', 'WAS', 'AND', 'EVERYTHING', 'IN', 'HIM', 'PINCHED', 'AND', 'TIGHTENED', 'ITSELF'] +4852-28311-0019-2117: hyp=['ALL', 'AT', 'ONCE', 'YOU', 'WONDERED', 'HOW', 'HIS', 'MOTHER', 'WAS', 'AND', 'EVERYTHING', 'IN', 'HIM', 'IMPINGED', 'AND', 'TIGHTENED', 'ITSELF'] +4852-28311-0020-2118: ref=['AT', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'HE', 'REACHED', 'THE', 'HOUSE'] +4852-28311-0020-2118: hyp=['AT', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'HE', 'REACHED', 'THE', 'HOUSE'] +4852-28311-0021-2119: ref=['THERE', 'WERE', 'THREE', 'THINGS', 'THAT', 'ALWAYS', 'CAUGHT', 'HIS', 'EYE', 'AMID', 'THE', 'LITTER', 'OF', 'DUSTY', 'PIECES'] +4852-28311-0021-2119: hyp=['THERE', 'WERE', 'THREE', 'THINGS', 'THAT', 'ALWAYS', 'CAUGHT', 'HIS', 'EYE', 'AMID', 'THE', 'LITTER', 'OF', 'DUSTY', 'PIECES'] +4852-28311-0022-2120: ref=['ON', 'THE', 'LEFT', 'THE', 'COIL', 'OF', 'ROPE', 'IN', 'THE', 'CENTER', 'THE', 'MODEL', 'OF', 'A', 'SAILING', 'SHIP', 'IN', 'A', 'GREEN', 'GLASS', 'BOTTLE', 'AND', 'ON', 'THE', 'RIGHT', 'THE', 'WOODEN', 'STATUE', 'OF', 'A', 'NEGRO', 'BOY', 'IN', 'BAGGY', 'TROUSERS', 'TURKISH', 'JACKET', 'AND', 'WHITE', 'TURBAN'] +4852-28311-0022-2120: hyp=['ON', 'THE', 'LEFT', 'THE', 'COIL', 'OF', 'ROPE', 'IN', 'THE', 'CENTER', 'THE', 'MODEL', 'OF', 'A', 'SAILING', 'SHIP', 'IN', 'A', 'GREEN', 'GLASS', 'BOTTLE', 'AND', 'ON', 'THE', 'RIGHT', 'THE', 'WOODEN', 'STATUE', 'OF', 'A', 'NEGRO', 'BOY', 'AND', 'BAGGY', 'TROUSERS', 'TURKISH', 'JACKET', 'AND', 'WHITE', 'TURBAN'] +4852-28311-0023-2121: ref=['BUT', 'THE', 'NAME', 'STILL', 'SHOWED', 'AT', 'THE', 'PROW', 'AND', 'MANY', 'A', 'TIME', 'CHRIS', 'SAFE', 'AT', 'HOME', 'IN', 'BED', 'HAD', 'SAILED', 'IMAGINARY', 'VOYAGES', 'IN', 'THE', 'MIRABELLE'] +4852-28311-0023-2121: hyp=['BUT', 'THE', 'NAME', 'STILL', 'SHOWED', 'AT', 'THE', 'PROW', 'AND', 'MANY', 'A', 'TIME', 'CHRIS', 'SAFE', 'AT', 'HOME', 'IN', 'BED', 'HAD', 'SAILED', 'IMAGINARY', 'VOYAGES', 'IN', 'THE', 'MIRABELLE'] +4852-28311-0024-2122: ref=['HE', 'HAD', 'NEVER', 'SEEN', 'ANYONE', 'GO', 'INTO', 'MISTER', "WICKER'S", 'SHOP', 'NOW', 'HE', 'THOUGHT', 'OF', 'IT'] +4852-28311-0024-2122: hyp=['HE', 'HAD', 'NEVER', 'SEEN', 'ANY', 'ONE', 'GO', 'INTO', 'MISTER', "HOOKER'S", 'SHOP', 'NOW', 'HE', 'THOUGHT', 'OF', 'IT'] +4852-28311-0025-2123: ref=['HOW', 'THEN', 'DID', 'HE', 'LIVE', 'AND', 'WHAT', 'DID', 'HE', 'EVER', 'SELL'] +4852-28311-0025-2123: hyp=['HOW', 'THEN', 'DID', 'HE', 'TO', 'LIVE', 'AND', 'WHAT', 'DID', 'HE', 'EVER', 'SELL'] +4852-28311-0026-2124: ref=['A', 'SUDDEN', 'CAR', 'HORN', 'WOKE', 'HIM', 'FROM', 'HIS', 'DREAM'] +4852-28311-0026-2124: hyp=['A', 'SUDDEN', 'CAR', 'HORN', 'WALKING', 'FROM', 'THIS', 'DREAM'] +4852-28312-0000-2125: ref=['OF', 'THE', 'MANY', 'TIMES', 'HE', 'HAD', 'EXAMINED', 'MISTER', "WICKER'S", 'WINDOW', 'AND', 'PORED', 'OVER', 'THE', 'ROPE', 'THE', 'SHIP', 'AND', 'THE', 'NUBIAN', 'BOY', 'HE', 'HAD', 'NEVER', 'GONE', 'INTO', 'MISTER', "WICKER'S", 'SHOP'] +4852-28312-0000-2125: hyp=['OF', 'THE', 'MANY', 'TIMES', "YOU'D", 'EXAMINED', 'MISTER', "WICKER'S", 'WINDOW', 'AND', 'POURED', 'OVER', 'THE', 'ROPE', 'THE', 'SHIP', 'AND', 'THE', 'NUBIAN', 'BOY', 'HE', 'HAD', 'NEVER', 'GONE', 'INTO', 'MISTER', "ROOKER'S", 'SHOP'] +4852-28312-0001-2126: ref=['SO', 'NOW', 'ALONE', 'UNTIL', 'SOMEONE', 'SHOULD', 'ANSWER', 'THE', 'BELL', 'HE', 'LOOKED', 'EAGERLY', 'IF', 'UNEASILY', 'AROUND', 'HIM'] +4852-28312-0001-2126: hyp=['SO', 'NOW', 'ALONE', 'UNTIL', 'SOME', 'ONE', 'SHOULD', 'ANSWER', 'THE', 'BELL', 'THEY', 'LOOKED', 'EAGERLY', 'IF', 'UNEASILY', 'AROUND', 'HIM'] +4852-28312-0002-2127: ref=['WHAT', 'WITH', 'THE', 'ONE', 'WINDOW', 'AND', 'THE', 'LOWERING', 'DAY', 'OUTSIDE', 'THE', 'LONG', 'NARROW', 'SHOP', 'WAS', 'SOMBER'] +4852-28312-0002-2127: hyp=['WHAT', 'WITH', 'THE', 'ONE', 'WINDOW', 'AND', 'THE', 'LOWERING', 'DAY', 'OUTSIDE', 'THE', 'LONG', 'NARROW', 'SHOP', 'WAS', 'SOMBRE'] +4852-28312-0003-2128: ref=['HEAVY', 'HAND', 'HEWN', 'BEAMS', 'CROSSED', 'IT', 'FROM', 'ONE', 'SIDE', 'TO', 'THE', 'OTHER'] +4852-28312-0003-2128: hyp=['HEAVY', 'HAND', 'YOU', 'AND', 'BEAMS', 'CROSSED', 'IT', 'FROM', 'ONE', 'SIDE', 'TO', 'THE', 'OTHER'] +4852-28312-0004-2129: ref=['MISTER', "WICKER'S", 'BACK', 'BEING', 'TOWARD', 'THE', 'SOURCE', 'OF', 'LIGHT', 'CHRIS', 'COULD', 'NOT', 'SEE', 'HIS', 'FACE'] +4852-28312-0004-2129: hyp=['MISTER', 'OAKERS', 'BACK', 'BEING', 'TOWARD', 'THE', 'SOURCE', 'OF', 'LIGHT', 'CHRIS', 'COULD', 'NOT', 'SEE', 'HIS', 'FACE'] +4852-28312-0005-2130: ref=['THE', 'DOUBLE', 'FANS', 'OF', 'MINUTE', 'WRINKLES', 'BREAKING', 'FROM', 'EYE', 'CORNER', 'TO', 'TEMPLE', 'AND', 'JOINING', 'WITH', 'THOSE', 'OVER', 'THE', 'CHEEKBONES', 'WERE', 'DRAWN', 'INTO', 'THE', 'HORIZONTAL', 'LINES', 'ACROSS', 'THE', 'DOMED', 'FOREHEAD'] +4852-28312-0005-2130: hyp=['THE', 'DOUBLE', 'FANS', 'OF', 'MINUTE', 'WRINKLES', 'BREAKING', 'FROM', 'EYE', 'CORNERED', 'A', 'TEMPLE', 'ADJOINING', 'WITH', 'THOSE', 'OVER', 'THE', 'SHEEP', 'BONES', 'WERE', 'DRAWN', 'INTO', 'THE', 'HORIZONTAL', 'LINES', 'ACROSS', 'THE', 'DOMED', 'FOREHEAD'] +4852-28312-0006-2131: ref=['LITTLE', 'TUFTS', 'OF', 'WHITE', 'FUZZ', 'ABOVE', 'THE', 'EARS', 'WERE', 'ALL', 'THAT', 'REMAINED', 'OF', 'THE', "ANTIQUARIAN'S", 'HAIR', 'BUT', 'WHAT', 'DREW', 'AND', 'HELD', "CHRIS'S", 'GAZE', 'WERE', 'THE', 'OLD', "MAN'S", 'EYES'] +4852-28312-0006-2131: hyp=['LITTLE', 'TUFTS', 'OF', 'WHITE', 'FUZ', 'ABOVE', 'THE', 'EARS', 'WERE', 'ALL', 'THAT', 'REMAINED', 'OF', 'THE', "ANTIQUARIAN'S", 'HAIR', 'BUT', 'WHAT', 'DREW', 'AND', 'HELD', "CHRIS'S", 'GAZE', 'WERE', 'THE', 'OLD', "MAN'S", 'EYES'] +4852-28312-0007-2132: ref=['CHRIS', 'BLINKED', 'AND', 'LOOKED', 'AGAIN', 'YES', 'THEY', 'WERE', 'STILL', 'THERE'] +4852-28312-0007-2132: hyp=['CRISP', 'LINKED', 'AND', 'LOOKED', 'AGAIN', 'YES', 'THEY', 'WERE', 'STILL', 'THERE'] +4852-28312-0008-2133: ref=['CHRIS', 'SWALLOWED', 'AND', 'HIS', 'VOICE', 'CAME', 'BACK', 'TO', 'HIM'] +4852-28312-0008-2133: hyp=['CHRIS', 'SWALLOWED', 'AND', 'HIS', 'VOICE', 'CAME', 'BACK', 'TO', 'HIM'] +4852-28312-0009-2134: ref=['YES', 'SIR', 'HE', 'SAID'] +4852-28312-0009-2134: hyp=['YES', 'SIR', 'HE', 'SAID'] +4852-28312-0010-2135: ref=['I', 'SAW', 'YOUR', 'SIGN', 'AND', 'I', 'KNOW', 'A', 'BOY', 'WHO', 'NEEDS', 'THE', 'JOB'] +4852-28312-0010-2135: hyp=['I', 'SAW', 'YOUR', 'SIGN', 'AND', 'I', 'KNOW', 'A', 'BOY', 'WHO', 'NEEDS', 'THE', 'JOB'] +4852-28312-0011-2136: ref=["HE'S", 'A', 'SCHOOLMATE', 'OF', 'MINE'] +4852-28312-0011-2136: hyp=["HE'S", 'A', 'SCHOOLMATE', 'OF', 'MINE'] +4852-28312-0012-2137: ref=['JAKEY', 'HARRIS', 'HIS', 'NAME', 'IS', 'AND', 'HE', 'REALLY', 'NEEDS', 'THE', 'JOB'] +4852-28312-0012-2137: hyp=["GIGIRIS'S", 'NAME', "ISN'T", 'HE', 'REALLY', 'NEEDS', 'THE', 'JOB'] +4852-28312-0013-2138: ref=['I', 'I', 'JUST', 'WONDERED', 'IF', 'THE', 'PLACE', 'WAS', 'STILL', 'OPEN'] +4852-28312-0013-2138: hyp=['I', 'I', 'JUST', 'WONDERED', 'IF', 'THE', 'PLACE', 'WAS', 'STILL', 'OPEN'] +4852-28312-0014-2139: ref=['WHAT', 'HE', 'SAW', 'WAS', 'A', 'FRESH', 'CHEEKED', 'LAD', 'TALL', 'FOR', 'THIRTEEN', 'STURDY', 'WITH', 'SINCERITY', 'AND', 'GOOD', 'HUMOR', 'IN', 'HIS', 'FACE', 'AND', 'SOMETHING', 'SENSITIVE', 'AND', 'APPEALING', 'ABOUT', 'HIS', 'EYES'] +4852-28312-0014-2139: hyp=['WHAT', 'HE', 'SAW', 'WAS', 'A', 'FRESH', 'CHEEKED', 'LAD', 'TALL', 'FOR', 'THIRTEEN', 'STURDY', 'WITH', 'SINCERITY', 'AND', 'GOOD', 'HUMOUR', 'IN', 'HIS', 'FACE', 'AND', 'SOMETHING', 'SENSITIVE', 'AND', 'APPEALING', 'ABOUT', 'HIS', 'EYES'] +4852-28312-0015-2140: ref=['HE', 'GUESSED', 'THERE', 'MUST', 'BE', 'A', 'LIVELY', 'FIRE', 'IN', 'THAT', 'ROOM', 'BEYOND'] +4852-28312-0015-2140: hyp=['HE', 'GUESSED', 'THERE', 'IT', 'MUST', 'BE', 'A', 'LIVELY', 'FIRE', 'IN', 'THAT', 'RUM', 'BEYOND'] +4852-28312-0016-2141: ref=['WOULD', 'THAT', 'INTERFERE', 'WITH', "JAKEY'S", 'GETTING', 'THE', 'JOB', 'SIR'] +4852-28312-0016-2141: hyp=['WOULD', 'THAT', 'INTERFERE', 'WITH', 'JAKIE', 'GIGS', 'GETTING', 'THE', 'JOB', 'SIR'] +4852-28312-0017-2142: ref=['BUT', 'EVEN', 'AS', 'HE', 'SLOWLY', 'TURNED', 'THE', 'THOUGHT', 'PIERCED', 'HIS', 'MIND', 'WHY', 'HAD', 'HE', 'NOT', 'SEEN', 'THE', 'REFLECTION', 'OF', 'THE', 'HEADLIGHTS', 'OF', 'THE', 'CARS', 'MOVING', 'UP', 'AROUND', 'THE', 'CORNER', 'OF', 'WATER', 'STREET', 'AND', 'UP', 'THE', 'HILL', 'TOWARD', 'THE', 'TRAFFIC', 'SIGNALS'] +4852-28312-0017-2142: hyp=['BUT', 'EVEN', 'AS', 'HE', 'SLOWLY', 'TURNED', 'THE', 'THOUGHT', 'PIERCED', 'HIS', 'MIND', 'WHY', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'REFLECTION', 'OF', 'THE', 'HEAD', 'LIGHTS', 'OF', 'THE', 'CARS', 'MOVING', 'UP', 'AROUND', 'THE', 'CORNER', 'OF', 'WALL', 'AT', 'HER', 'STREET', 'NOT', 'THE', 'HILL', 'TOWARD', 'THE', 'EFFIC', 'SIGNALS'] +4852-28312-0018-2143: ref=['THE', 'ROOM', 'SEEMED', 'OVERLY', 'STILL'] +4852-28312-0018-2143: hyp=['THE', 'ROOM', 'SEEMED', 'OVERLY', 'STILL'] +4852-28312-0019-2144: ref=['THEN', 'IN', 'THAT', 'SECOND', 'HE', 'TURNED', 'AND', 'FACED', 'ABOUT'] +4852-28312-0019-2144: hyp=['THEN', 'IN', 'THAT', 'SECOND', 'HE', 'TURNED', 'AND', 'FACED', 'ABOUT'] +4852-28312-0020-2145: ref=['THE', 'WIDE', 'BOW', 'WINDOW', 'WAS', 'THERE', 'BEFORE', 'HIM', 'THE', 'THREE', 'OBJECTS', 'HE', 'LIKED', 'BEST', 'SHOWING', 'FROSTY', 'IN', 'THE', 'MOONLIGHT', 'THAT', 'POURED', 'IN', 'FROM', 'ACROSS', 'THE', 'WATER'] +4852-28312-0020-2145: hyp=['THE', 'WIDE', 'BOW', 'WINDOW', 'WAS', 'THERE', 'BEFORE', 'HIM', 'THE', 'THREE', 'OBJECTS', 'HE', 'LIKED', 'BEST', 'SHOWING', 'FROSTY', 'IN', 'THE', 'MOONLIGHT', 'THAT', 'POURED', 'IN', 'FROM', 'ACROSS', 'THE', 'WATER'] +4852-28312-0021-2146: ref=['ACROSS', 'THE', 'WATER', 'WHERE', 'WAS', 'THE', 'FREEWAY'] +4852-28312-0021-2146: hyp=['ACROSS', 'THE', 'WATER', 'WHERE', 'WAS', 'THE', 'FREE', 'WAY'] +4852-28312-0022-2147: ref=['IT', 'WAS', 'NO', 'LONGER', 'THERE', 'NOR', 'WERE', 'THE', 'HIGH', 'WALLS', 'AND', 'SMOKESTACKS', 'OF', 'FACTORIES', 'TO', 'BE', 'SEEN'] +4852-28312-0022-2147: hyp=['IT', 'WAS', 'NO', 'LONGER', 'THERE', 'NOR', 'WERE', 'THE', 'HIGH', 'WALLS', 'AND', 'SMOKE', 'STACKS', 'OF', 'FACTORIES', 'TO', 'BE', 'SEEN'] +4852-28312-0023-2148: ref=['THE', 'WAREHOUSES', 'WERE', 'STILL', 'THERE'] +4852-28312-0023-2148: hyp=['THE', 'WAREHOUSES', 'WERE', 'STILL', 'THERE'] +4852-28312-0024-2149: ref=['FLABBERGASTED', 'AND', 'BREATHLESS', 'CHRIS', 'WAS', 'UNAWARE', 'THAT', 'HE', 'HAD', 'MOVED', 'CLOSER', 'TO', 'PEER', 'OUT', 'THE', 'WINDOW', 'IN', 'EVERY', 'DIRECTION'] +4852-28312-0024-2149: hyp=['FLABRA', 'GASTED', 'IN', 'BREATHLESS', 'CHRIS', 'WAS', 'UNAWARE', 'THAT', 'HE', 'HAD', 'MOVED', 'CLOSER', 'TO', 'PEER', 'OUT', 'THE', 'WINDOW', 'IN', 'EVERY', 'DIRECTION'] +4852-28312-0025-2150: ref=['NO', 'ELECTRIC', 'SIGNS', 'NO', 'LAMPLIT', 'STREETS'] +4852-28312-0025-2150: hyp=['NO', 'ELECTRIC', 'SIGNS', 'NO', 'LAMPLIT', 'STREETS'] +4852-28312-0026-2151: ref=['WHERE', 'THE', "PEOPLE'S", 'DRUGSTORE', 'HAD', 'STOOD', 'BUT', 'A', 'HALF', 'HOUR', 'BEFORE', 'ROSE', 'THE', 'ROOFS', 'OF', 'WHAT', 'WAS', 'EVIDENTLY', 'AN', 'INN'] +4852-28312-0026-2151: hyp=['WHERE', 'THE', "PEOPLE'S", 'DRUG', 'STORE', 'HAD', 'STOOD', 'BUT', 'HALF', 'AN', 'HOUR', 'BEFORE', 'ROSE', 'THE', 'ROOFS', 'OF', 'WHAT', 'WAS', 'EVIDENTLY', 'AN', 'INN'] +4852-28312-0027-2152: ref=['A', 'COURTYARD', 'WAS', 'SPARSELY', 'LIT', 'BY', 'A', 'FLARING', 'TORCH', 'OR', 'TWO', 'SHOWING', 'A', 'SWINGING', 'SIGN', 'HUNG', 'ON', 'A', 'POST'] +4852-28312-0027-2152: hyp=['A', 'COURTYARD', 'WAS', 'FIRSTLY', 'LIT', 'BY', 'A', 'FLARING', 'TORTURE', 'TWO', 'SHOWING', 'THE', 'SWINGING', 'SIGN', 'HUNG', 'ON', 'THE', 'POST'] +4852-28312-0028-2153: ref=['THE', 'POST', 'WAS', 'PLANTED', 'AT', 'THE', 'EDGE', 'OF', 'WHAT', 'WAS', 'NOW', 'A', 'BROAD', 'AND', 'MUDDY', 'ROAD'] +4852-28312-0028-2153: hyp=['THE', 'POSTS', 'PLANTED', 'AT', 'THE', 'EDGE', 'OF', 'IT', 'WAS', 'NOW', 'A', 'BROAD', 'AND', 'MUDDY', 'ROAD'] +4852-28312-0029-2154: ref=['A', 'COACH', 'WITH', 'ITS', 'TOP', 'PILED', 'HIGH', 'WITH', 'LUGGAGE', 'STAMPED', 'TO', 'A', 'HALT', 'BESIDE', 'THE', 'FLAGGED', 'COURTYARD'] +4852-28312-0029-2154: hyp=['A', 'COACH', 'WHICH', 'HAD', 'STOPPED', 'PILED', 'HIGH', 'WITH', 'LEGGED', 'STAMPED', 'A', 'HALT', 'BESIDE', 'THE', 'FLAGGED', 'COURTYARD'] +4852-28312-0030-2155: ref=['THEY', 'MOVED', 'INTO', 'THE', 'INN', 'THE', 'COACH', 'RATTLED', 'OFF', 'TO', 'THE', 'STABLE'] +4852-28312-0030-2155: hyp=['THEY', 'MOVED', 'INTO', 'THE', 'INN', 'THE', 'COACH', 'RATTLED', 'OFF', 'TO', 'THE', 'STABLE'] +4852-28312-0031-2156: ref=['MY', 'WINDOW', 'HAS', 'A', 'POWER', 'FOR', 'THOSE', 'FEW', 'WHO', 'ARE', 'TO', 'SEE'] +4852-28312-0031-2156: hyp=['BY', 'WINDOW', 'AS', 'A', 'POWER', 'FOR', 'THOSE', 'FEW', 'WHO', 'ARE', 'TO', 'SEE'] +4852-28319-0000-2070: ref=['THE', 'LEARNING', 'OF', 'MAGIC', 'WAS', 'BY', 'NO', 'MEANS', 'EASY'] +4852-28319-0000-2070: hyp=['THE', 'LEARNING', 'AND', 'MAGIC', 'WAS', 'BY', 'NO', 'MEANS', 'EASY'] +4852-28319-0001-2071: ref=['HE', 'HAD', 'TOLD', 'HIS', 'MASTER', 'AT', 'ONCE', 'ABOUT', 'SIMON', 'GOSLER', 'HIS', 'HORDE', 'OF', 'MONEY', 'AND', 'HIS', 'HIDING', 'PLACES', 'FOR', 'IT'] +4852-28319-0001-2071: hyp=['HE', 'HAD', 'TOLD', 'HIS', 'MASTER', 'AT', 'ONCE', 'HE', 'GOT', 'SIMON', 'GOSLER', 'HIS', 'HOARD', 'OF', 'MONEY', 'AND', 'HIS', 'HIDING', 'PLACES', 'FOR', 'IT'] +4852-28319-0002-2072: ref=['CHRIS', 'THEREFORE', 'THREW', 'HIMSELF', 'INTO', 'ALL', 'THE', 'PRELIMINARIES', 'OF', 'HIS', 'TASK'] +4852-28319-0002-2072: hyp=['CHRIS', 'THEREFORE', 'THREW', 'HIMSELF', 'AND', 'ALL', 'THE', 'PRELIMINARIES', 'OF', 'HIS', 'TASK'] +4852-28319-0003-2073: ref=['ONE', 'AFTERNOON', 'WHEN', 'HE', 'RETURNED', 'AFTER', 'A', 'REST', 'TO', 'MISTER', "WICKER'S", 'STUDY', 'HE', 'SAW', 'THAT', 'THERE', 'WAS', 'SOMETHING', 'NEW', 'IN', 'THE', 'ROOM', 'A', 'BOWL', 'WITH', 'A', 'GOLDFISH', 'IN', 'IT', 'STOOD', 'ON', 'THE', 'TABLE', 'BUT', 'MISTER', 'WICKER', 'WAS', 'NOT', 'TO', 'BE', 'SEEN'] +4852-28319-0003-2073: hyp=['ONE', 'AFTERNOON', 'WHEN', 'HE', 'HAD', 'RETURNED', 'AFTER', 'A', 'REST', 'TO', 'MISTER', "WICKER'S", 'STUDY', 'HE', 'SAW', 'THAT', 'THERE', 'WAS', 'SOMETHING', 'NEW', 'IN', 'THE', 'ROOM', 'A', 'BOWL', 'WITH', 'A', 'GOLD', 'FISH', 'IN', 'IT', 'STOOD', 'ON', 'THE', 'TABLE', 'BUT', 'MISTER', 'WICKER', 'WAS', 'NOT', 'TO', 'BE', 'SEEN'] +4852-28319-0004-2074: ref=['WHAT', 'SHALL', 'I', 'DO', 'FIRST'] +4852-28319-0004-2074: hyp=['WHAT', 'SHOULD', 'ALL', 'I', 'DO', 'FIRST'] +4852-28319-0005-2075: ref=['HOW', 'YOU', 'HAVE', 'IMPROVED', 'MY', 'BOY', 'HE', 'EXCLAIMED', 'IT', 'IS', 'NOW', 'TIME', 'FOR', 'YOU', 'TO', 'TRY', 'AND', 'THIS', 'IS', 'AS', 'GOOD', 'A', 'CHANGE', 'AS', 'ANY'] +4852-28319-0005-2075: hyp=['HOW', 'OFTEN', 'PROVED', 'MY', 'BOY', 'IT', 'EXCLAIMED', 'IS', 'NOW', 'TIME', 'FOR', 'YOU', 'TO', 'TRY', 'MISSUS', 'IS', 'GOOD', 'A', 'CHANGE', 'IS', 'ANY'] +4852-28319-0006-2076: ref=['SUPPOSE', 'I', 'CHANGE', 'AND', "CAN'T", 'CHANGE', 'BACK'] +4852-28319-0006-2076: hyp=['SUPPOSE', 'A', 'CHANGE', 'AND', "CAN'T", 'CHANCE', 'BACK'] +4852-28319-0007-2077: ref=['MISTER', 'WICKER', 'WAITED', 'PATIENTLY', 'BESIDE', 'HIM', 'FOR', 'A', 'FEW', 'MOMENTS', 'FOR', 'CHRIS', 'TO', 'GET', 'UP', 'HIS', 'COURAGE'] +4852-28319-0007-2077: hyp=['MISTER', 'WICKER', 'WAITED', 'PATIENTLY', 'BESIDE', 'HIM', 'FOR', 'A', 'FEW', 'MOMENTS', 'FOR', 'CHRIS', 'TO', 'GET', 'UP', 'HIS', 'COURAGE'] +4852-28319-0008-2078: ref=['THEN', 'AS', 'NOTHING', 'HAPPENED', 'WITH', 'A', 'VOICE', 'LIKE', 'A', 'WHIP', 'MISTER', 'WICKER', 'SAID', 'START', 'AT', 'ONCE'] +4852-28319-0008-2078: hyp=['THAT', 'IS', 'NOTHING', 'HAPPENED', 'WITH', 'A', 'VOICE', 'LIKE', 'A', 'WHIP', 'MISTER', 'WICKER', 'SAID', 'START', 'AT', 'ONCE'] +4852-28319-0009-2079: ref=['THE', 'SENSATION', 'SPREAD', 'FASTER', 'AND', 'FASTER'] +4852-28319-0009-2079: hyp=['THE', 'SENSATION', 'SPREAD', 'FASTER', 'AND', 'FASTER'] +4852-28319-0010-2080: ref=['HIS', 'HEAD', 'SWAM', 'AND', 'HE', 'FELT', 'FAINT', 'AND', 'A', 'LITTLE', 'SICK', 'BUT', 'HE', 'PERSISTED', 'THROUGH', 'THE', 'FINAL', 'WORDS'] +4852-28319-0010-2080: hyp=['HIS', 'HEAD', 'SWAM', 'AND', 'HE', 'FELT', 'FAINT', 'IN', 'A', 'LITTLE', 'SICK', 'BUT', 'HE', 'PERSISTED', 'THROUGH', 'THE', 'FINAL', 'WORDS'] +4852-28319-0011-2081: ref=['HE', 'THOUGHT', 'NOT', 'WITHOUT', 'A', 'FEELING', 'OF', 'PRIDE', 'AND', 'COMMENCED', 'EXPERIMENTING', 'WITH', 'HIS', 'TAIL', 'AND', 'FINS', 'WITH', 'SUCH', 'ENTHUSIASM', 'AND', 'DELIGHT', 'THAT', 'SOME', 'LITTLE', 'TIME', 'ELAPSED', 'BEFORE', 'MISTER', "WICKER'S", 'VOICE', 'BOOMED', 'CLOSE', 'BY'] +4852-28319-0011-2081: hyp=['HE', 'THOUGHT', 'NOT', 'WITHOUT', 'A', 'FEELING', 'OF', 'PRIDE', 'AND', 'COMMENCED', 'THE', 'EXPERIMENTING', 'WITH', 'HIS', 'TAIL', 'AND', 'FINS', 'WITH', 'SUCH', 'ENTHUSIASM', 'AND', 'DELIGHT', 'THAT', 'SOME', 'LITTLE', 'TIME', 'ELAPSED', 'BEFORE', 'MISTER', "WICKER'S", 'VOICE', 'BOOMED', 'CLOSE', 'BY'] +4852-28319-0012-2082: ref=['SEVENTY', 'FOUR', 'BOOK', 'ONE', 'THE', 'RETURN'] +4852-28319-0012-2082: hyp=['SEVENTY', 'FOUR', 'BOOK', 'ONE', 'THE', 'RETURN'] +4852-28319-0013-2083: ref=['THE', "FIGURE'S", 'SHOES', 'CARVED', 'IN', 'SOME', 'EASTERN', 'STYLE', 'HAD', 'CURVED', 'UP', 'POINTING', 'TOES'] +4852-28319-0013-2083: hyp=['THE', 'FIGURES', 'SHOES', 'CARVED', 'IN', 'SOME', 'EASTERN', 'STYLE', 'HAD', 'CURVED', 'UP', 'POINTING', 'TOES'] +4852-28319-0014-2084: ref=['THEN', 'ALL', 'AT', 'ONCE', 'THE', 'IDEA', 'CAME', 'TO', 'CHRIS'] +4852-28319-0014-2084: hyp=['THEN', 'ALL', 'AT', 'ONCE', 'THE', 'IDEA', 'CAME', 'TO', 'CHRIS'] +4852-28319-0015-2085: ref=['IF', 'HE', 'WAS', 'TO', 'BE', 'A', 'MAGICIAN', 'COULD', 'HE', 'MAKE', 'THIS', 'BOY', 'COME', 'TO', 'LIFE'] +4852-28319-0015-2085: hyp=['IF', 'HE', 'WAS', 'TO', 'BE', 'A', 'MAGICIAN', 'COULD', 'HE', 'MAKE', 'THIS', 'BOY', 'COME', 'TO', 'LIFE'] +4852-28319-0016-2086: ref=['HE', 'SQUATTED', 'ON', 'HIS', 'HAUNCHES', 'EXAMINING', 'THE', 'CARVED', 'WOODEN', 'FIGURE', 'ATTENTIVELY', 'AND', 'FELT', 'CONVINCED', 'THAT', 'ONCE', 'ALIVE', 'THE', 'BOY', 'WOULD', 'BE', 'AN', 'IDEAL', 'AND', 'HAPPY', 'COMPANION'] +4852-28319-0016-2086: hyp=['IT', 'SQUATTED', 'ON', 'HIS', 'HAUNCHES', 'EXAMINED', 'THE', 'CARVED', 'WOODEN', 'FIGURE', 'ATTENTIVELY', 'AND', 'FELT', 'CONVINCED', 'THAT', 'ONCE', 'ALIVE', 'THE', 'BOY', 'WOULD', 'BE', 'AN', 'IDEAL', 'AND', 'HAPPY', 'COMPANION'] +4852-28319-0017-2087: ref=['BUT', 'HOW', 'DID', 'ONE', 'CHANGE', 'INANIMATE', 'TO', 'ANIMATE'] +4852-28319-0017-2087: hyp=['BUT', 'HOW', 'DID', 'ONE', 'A', 'CHANGE', 'INANIMATE', 'TO', 'ANIMATE'] +4852-28319-0018-2088: ref=['CHRIS', 'GOT', 'UP', 'AND', 'STOLE', 'BACK', 'TO', 'MISTER', "WICKER'S", 'DOOR'] +4852-28319-0018-2088: hyp=['GRIS', 'GOT', 'UP', 'AND', 'STOLE', 'BACK', 'TO', 'MISTER', "WICKER'S", 'DOOR'] +4852-28319-0019-2089: ref=['HE', 'HEARD', 'THE', 'MAGICIAN', 'GOING', 'UP', 'THE', 'SPIRAL', 'STAIRCASE', 'TO', 'HIS', 'ROOM', 'ABOVE', 'AND', 'AFTER', 'CHANGING', 'HIMSELF', 'TO', 'A', 'MOUSE', 'TO', 'SLIP', 'UNDER', 'THE', 'DOOR', 'AND', 'SEE', 'THAT', 'THE', 'ROOM', 'WAS', 'REALLY', 'EMPTY', 'CHRIS', 'RESUMED', 'HIS', 'PROPER', 'SHAPE', 'AND', 'OPENED', 'THE', 'DOORS', 'OF', 'THE', 'CUPBOARD', 'AT', 'THE', 'FAR', 'END', 'OF', 'THE', 'ROOM'] +4852-28319-0019-2089: hyp=['HE', 'HEARD', 'THAT', 'MAGICIAN', 'GOING', 'UP', 'THE', 'SPIRAL', 'STAIRCASE', 'TO', 'HIS', 'ROOM', 'ABOVE', 'AND', 'AFTER', 'CHANGING', 'HIMSELF', 'TO', 'A', 'MOUSE', 'TO', 'SLIP', 'UNDER', 'THE', 'DOOR', 'AND', 'SEE', 'THAT', 'THE', 'ROOM', 'WAS', 'REALLY', 'EMPTY', 'MISTER', "JAMES'S", 'PROPER', 'SHAPE', 'AND', 'OPENED', 'THE', 'DOORS', 'OF', 'THE', 'CUPBOARD', 'AT', 'THE', 'FAR', 'END', 'OF', 'THE', 'ROOM'] +4852-28319-0020-2090: ref=['THE', 'AFTERNOON', 'RAINY', 'BEFORE', 'INCREASED', 'IN', 'STORM'] +4852-28319-0020-2090: hyp=['THE', 'AFTERNOON', 'RAINING', 'BEFORE', 'INCREASED', 'IN', 'STORM'] +4852-28319-0021-2091: ref=['DUSK', 'CAME', 'TWO', 'HOURS', 'BEFORE', 'ITS', 'TIME', 'THUNDER', 'SNARLED', 'IN', 'THE', 'SKY'] +4852-28319-0021-2091: hyp=['THUS', 'GAINED', 'TWO', 'HOURS', 'BEFORE', 'ITS', 'TIME', 'THUNDER', 'SNARLS', 'IN', 'THE', 'SKY'] +4852-28319-0022-2092: ref=['CERTAIN', 'ELEMENTS', 'WERE', 'TO', 'BE', 'MIXED', 'AND', 'POURED', 'AT', 'THE', 'PROPER', 'TIME'] +4852-28319-0022-2092: hyp=['CERTAIN', 'ELEMENTS', 'WERE', 'TO', 'BE', 'MIXED', 'AND', 'POURED', 'AT', 'THE', 'PROPER', 'TIME'] +4852-28319-0023-2093: ref=['MISTER', 'WICKER', 'BEGAN', 'MOVING', 'ABOUT', 'UPSTAIRS', 'THE', 'FLOORBOARDS', 'CREAKED', 'AND', 'STILL', 'CHRIS', 'COULD', 'NOT', 'LEAVE', 'UNTIL', 'THE', 'POTION', 'FUMED', 'AND', 'GLOWED'] +4852-28319-0023-2093: hyp=['MISTER', 'WICKER', 'BEGAN', 'MOVING', 'ABOUT', 'UPSTAIRS', 'THE', 'FOREBOARDS', 'CREAKED', 'AND', 'STILL', 'CHRIS', 'COULD', 'NOT', 'LEAVE', 'UNTIL', 'THE', 'FORCIAN', 'FUMED', 'AND', 'GLOWED'] +4852-28319-0024-2094: ref=['WITH', 'INFINITE', 'CAUTION', 'CHRIS', 'CLOSED', 'THE', 'DOOR', 'SILENTLY', 'BEHIND', 'HIM', 'AND', 'RUNNING', 'LIGHTLY', 'FORWARD', 'REACHED', 'THE', 'FIGURE', 'OF', 'THE', 'NEGRO', 'BOY'] +4852-28319-0024-2094: hyp=['WITH', 'INFINITE', 'CAUTION', 'CHRIS', 'CLOSED', 'THE', 'DOOR', 'SILENTLY', 'BEHIND', 'HIM', 'AND', 'RUNNING', 'LIGHTLY', 'FORWARD', 'REACHED', 'THE', 'FIGURE', 'AT', 'THE', 'NEGRO', 'BOY'] +4852-28319-0025-2095: ref=['IT', 'WAS', 'AS', 'IF', 'THE', 'STIFFNESS', 'MELTED'] +4852-28319-0025-2095: hyp=['IT', 'WAS', 'AS', 'IF', 'THE', 'STIFFNESS', 'MELTED'] +4852-28319-0026-2096: ref=['UNDER', 'HIS', 'EYES', 'THE', 'WOODEN', 'FOLDS', 'OF', 'CLOTH', 'BECAME', 'RICH', 'SILK', 'EMBROIDERY', 'GLEAMED', 'IN', 'ITS', 'REALITY', 'UPON', 'THE', 'COAT', 'AND', 'OH', 'THE', 'FACE'] +4852-28319-0026-2096: hyp=['UNDER', 'HIS', 'EYES', 'WELLS', 'OF', 'CLOTH', 'BECAME', 'RICH', 'SILK', 'EMBROIDERY', 'GLEAMED', 'IN', 'ITS', 'REALITY', 'UPON', 'THE', 'COAT', 'AND', 'OH', 'THE', 'FACE'] +4852-28319-0027-2097: ref=['THE', 'WOODEN', 'GRIN', 'LOOSENED', 'THE', 'LARGE', 'EYES', 'TURNED', 'THE', 'HAND', 'HOLDING', 'THE', 'HARD', 'BOUQUET', 'OF', 'CARVED', 'FLOWERS', 'MOVED', 'AND', 'LET', 'THE', 'BOUQUET', 'FALL'] +4852-28319-0027-2097: hyp=['THE', 'WOODEN', 'GRIN', 'LOOSENED', 'THE', 'LARGE', 'EYES', 'TURNED', 'THE', 'HAND', 'HOLDING', 'A', 'HARD', 'BOUQUET', 'OF', 'CARVED', 'FLOWERS', 'MOVED', 'THE', 'BOUQUET', 'FALL'] +4852-28330-0000-2044: ref=['THEY', 'WENT', 'DOWN', 'TO', 'THEIR', 'QUARTERS', 'FIRST'] +4852-28330-0000-2044: hyp=['THEY', 'WENT', 'DOWN', 'TO', 'THEIR', 'QUARTERS', 'FIRST'] +4852-28330-0001-2045: ref=['GUESS', 'MISTER', 'FINNEY', 'WENT', 'TO', 'HIS', 'QUARTERS', 'I', "DON'T", 'REMEMBER', 'SEEING', 'HIM', 'CROSS', 'THE', 'DECK', 'OR', 'COME', 'OVER', 'THAT', 'WAY', 'AT', 'ALL'] +4852-28330-0001-2045: hyp=['GUESS', 'MISTER', 'FINNEY', 'WENT', 'TO', 'HIS', 'QUARTERS', 'I', "DON'T", 'REMEMBER', 'SEEING', 'HIM', 'CROSS', 'THE', 'DECK', 'OR', 'COME', 'OVER', 'THAT', 'WAY', 'AT', 'ALL'] +4852-28330-0002-2046: ref=['NEXT', 'NED', 'CILLEY', 'WAS', 'RELIEVED', 'AT', 'THE', 'HELM', 'BY', 'ELBERT', 'JONES', 'WHO', 'TOOK', 'OVER', 'NED', 'WENT', 'ON', 'DOWN'] +4852-28330-0002-2046: hyp=['NEXT', 'NED', 'CILLEY', 'WAS', 'RELIEVED', 'TO', 'THE', 'HELM', 'BY', 'ELBER', 'JONES', 'WHO', 'TOOK', 'OVER', 'NED', 'WENT', 'ON', 'DOWN'] +4852-28330-0003-2047: ref=['IT', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'IT', 'COULD', 'HAVE', 'BEEN', 'ONE', 'OF', 'SEVERAL', 'PEOPLE', 'AND', "I'LL", 'BE', 'SWITCHED', 'IF', 'I', 'KNOW', 'WHO', "I'LL", 'KEEP', 'MY', 'EYES', 'OPEN'] +4852-28330-0003-2047: hyp=['IT', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'IT', 'COULD', 'BIT', 'OF', 'IN', 'ONE', 'OF', 'SEVERAL', 'PEOPLE', 'AND', "I'LL", 'BE', 'SWITCHED', 'IF', 'I', 'KNOW', 'WHO', "I'LL", 'KEEP', 'MY', 'EYES', 'UP', 'AND'] +4852-28330-0004-2048: ref=['THE', 'MIRABELLE', 'WAS', 'NEARING', 'TAHITI'] +4852-28330-0004-2048: hyp=['THE', 'MIRABELLE', 'WAS', 'NEAR', 'INDEEDY'] +4852-28330-0005-2049: ref=["WE'VE", 'WATER', 'AND', 'FRESH', 'STORES', 'TO', 'TAKE', 'ON', 'THERE'] +4852-28330-0005-2049: hyp=['REVOLTA', 'AND', 'FRESH', 'STORES', 'TO', 'TAKE', 'ON', 'THERE'] +4852-28330-0006-2050: ref=['CHRIS', 'LOST', 'NO', 'TIME', 'AS', 'SOON', 'AS', 'HE', 'COULD', 'DO', 'IT', 'WITHOUT', 'BEING', 'NOTICED', 'IN', 'HURRYING', 'DOWN', 'TO', 'HIS', 'CABIN'] +4852-28330-0006-2050: hyp=['CHRIS', 'LOST', 'NO', 'TIME', 'AS', 'SOON', 'AS', 'HE', 'COULD', 'DO', 'IT', 'WITHOUT', 'BEING', 'NOTICED', 'AND', 'HURRYING', 'DOWN', 'TO', 'HIS', 'CABIN'] +4852-28330-0007-2051: ref=['CERTAINLY', 'MY', 'BOY', 'BOOMED', 'OUT', 'THE', 'CAPTAIN', 'HIS', 'BLUE', 'EYES', 'ABRUPTLY', 'KEEN', 'AND', 'PENETRATING'] +4852-28330-0007-2051: hyp=['CERTAINLY', 'MY', 'BOY', 'BOOMED', 'OUT', 'THE', 'CAPTAIN', 'AS', 'BLUE', 'EYES', 'ABRUPTLY', 'KEEN', 'AND', 'PENETRATING'] +4852-28330-0008-2052: ref=['MISTER', 'FINNEY', 'WILL', 'BE', 'SOME', 'TIME', 'ON', 'DECK', 'WE', 'CANNOT', 'BE', 'OVERHEARD', 'IN', 'HERE'] +4852-28330-0008-2052: hyp=['MISTER', 'FINNELL', 'WILL', 'BE', 'SOME', 'TIME', 'ON', 'DECK', 'WE', 'CANNOT', 'BE', 'OWNED', 'HEARD', 'AND', 'HERE'] +4852-28330-0009-2053: ref=['HIS', 'FACE', 'FROZE', 'WITH', 'NERVOUSNESS', 'THAT', 'THIS', 'MIGHT', 'NOT', 'DO', 'AS', 'AN', 'ANSWER', 'AND', 'HE', 'STOOD', 'STIFF', 'AND', 'STILL', 'BEFORE', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0009-2053: hyp=['HIS', 'FACE', 'ROSE', 'WITH', 'NERVOUSNESS', 'THAT', 'THIS', 'MIGHT', 'DO', 'NOT', 'DO', 'AS', 'AN', 'ANSWER', 'AND', 'HE', 'STOOD', 'STIFF', 'AND', 'STILL', 'BEFORE', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0010-2054: ref=['THE', 'CAPTAIN', 'SAT', 'FORWARD', 'IN', 'HIS', 'CHAIR', 'LOOKING', 'AT', 'HIM', 'FOR', 'A', 'LONG', 'MOMENT', 'CONSIDERING'] +4852-28330-0010-2054: hyp=['THE', 'CAPTAIN', 'SAT', 'FORWARD', 'IN', 'HIS', 'CHAIR', 'LOOKING', 'AT', 'HIM', 'FOR', 'A', 'LONG', 'MOMENT', 'CONSIDERING'] +4852-28330-0011-2055: ref=['THEN', 'HE', 'SAID', 'WELL', 'I', 'DO', 'NOT', 'CARE', 'FOR', 'IT', 'I', 'CANNOT', 'SAY', 'I', 'DO'] +4852-28330-0011-2055: hyp=['THEN', 'HE', 'SAID', 'WELL', 'I', 'DO', 'NOT', 'CARE', 'FOR', 'IT', 'I', 'CANNOT', 'SAY', 'THAT', 'DO'] +4852-28330-0012-2056: ref=['THIS', 'SHIP', 'IS', 'MORE', 'TO', 'ME', 'THAN', 'WIFE', 'OR', 'MOTHER', 'OR', 'FAMILY'] +4852-28330-0012-2056: hyp=['THE', 'SHIP', 'IS', 'MORE', 'TO', 'ME', 'THAN', 'MY', 'FULL', 'MOTHER', 'OR', 'FAMILY'] +4852-28330-0013-2057: ref=['HE', 'PAUSED', 'FINGERING', 'HIS', 'LOWER', 'LIP', 'AND', 'LOOKING', 'SIDEWAYS', 'IN', 'A', 'REFLECTIVE', 'FASHION', 'AT', 'CHRIS', 'STANDING', 'BEFORE', 'HIM'] +4852-28330-0013-2057: hyp=['AND', 'PAUSED', 'FINGERING', 'HIS', 'LOWER', 'LIP', 'AND', 'LOOKING', 'SIDEWAYS', 'IN', 'A', 'REFLECTIVE', 'FASHION', 'AT', 'CRIS', 'STANDING', 'BEFORE', 'HIM'] +4852-28330-0014-2058: ref=['WE', 'SHALL', 'SAY', 'NO', 'MORE', 'BUT', 'I', 'TRUST', 'YOU', 'UNDERSTAND', 'THE', 'RESPONSIBILITY', 'YOU', 'HAVE'] +4852-28330-0014-2058: hyp=['WE', 'SHALL', 'SAY', 'NO', 'MORE', 'BUT', 'I', 'TRUST', 'YOU', 'UNDERSTAND', 'THE', 'RESPONSIBILITY', 'YOU', 'HAVE'] +4852-28330-0015-2059: ref=['THIS', 'SHIP', 'ITS', 'CARGO', 'AND', 'ITS', 'MEN', 'WILL', 'BE', 'IN', 'YOUR', 'HANDS'] +4852-28330-0015-2059: hyp=['THE', 'SHIP', 'ITS', 'CARGO', 'IN', 'ITS', 'MEN', 'WILL', 'BE', 'IN', 'YOUR', 'HANDS'] +4852-28330-0016-2060: ref=['YES', 'SIR', 'I', 'THINK', 'I', 'CAN', 'DO', 'IT', 'SAFELY', 'OR', 'I', 'SHOULD', 'NOT', 'TRY', 'SIR'] +4852-28330-0016-2060: hyp=['YES', 'SIR', 'I', 'THINK', 'I', 'CAN', 'DO', 'IT', 'SAFELY', 'OR', 'I', 'SHOULD', 'NOT', 'TRY', 'SIR'] +4852-28330-0017-2061: ref=['CAPTAIN', "BLIZZARD'S", 'ROUND', 'PINK', 'FACE', 'CREASED', 'IN', 'HIS', 'WINNING', 'SMILE'] +4852-28330-0017-2061: hyp=['CAPTAIN', "BLIZZARD'S", 'ROUND', 'PINK', 'FACED', 'CREASED', 'IN', 'ITS', 'WINNING', 'SMILE'] +4852-28330-0018-2062: ref=['HE', 'THEN', 'WENT', 'ON', 'TO', 'DESCRIBE', 'WHAT', 'ELSE', 'WAS', 'TO', 'FOLLOW', 'THE', 'COVERING', 'OF', 'THE', 'SHIP', 'WITH', 'LEAVES', 'TO', 'MAKE', 'IT', 'BLEND', 'WITH', 'ITS', 'SURROUNDINGS'] +4852-28330-0018-2062: hyp=['HE', 'THEN', 'WENT', 'ON', 'TO', 'DESCRIBE', 'WHAT', 'ELSE', 'WAS', 'TO', 'FOLLOW', 'THE', 'COVERING', 'OF', 'THE', 'SHIP', 'WITH', 'LEAVES', 'TO', 'MAKE', 'IT', 'BLEND', 'WITH', 'ITS', 'SURROUNDINGS'] +4852-28330-0019-2063: ref=['CAMOUFLAGE', 'WAS', 'NOT', 'A', 'WORD', 'THE', 'CAPTAIN', 'OR', 'ANYONE', 'ELSE', 'OF', 'HIS', 'TIME', 'YET', 'UNDERSTOOD'] +4852-28330-0019-2063: hyp=['THE', 'CAMOUFLAGE', 'WAS', 'NOT', 'A', 'WORD', 'THE', 'CAPTAIN', 'OR', 'ANY', 'ONE', 'ELSE', 'OF', 'HIS', 'TIME', 'HE', 'HAD', 'UNDERSTOOD'] +4852-28330-0020-2064: ref=['WHAT', 'CAN', 'BE', 'SAID', 'DURING', 'THAT', 'TIME', 'SIR', 'CHRIS', 'THOUGHT', 'TO', 'ASK'] +4852-28330-0020-2064: hyp=['WHAT', 'CAN', 'BE', 'SAID', 'DURING', 'THAT', 'TIME', 'SIR', 'CHRIS', 'THOUGHT', 'TO', 'ASK'] +4852-28330-0021-2065: ref=['I', 'AM', 'SOMEWHAT', 'SKILLED', 'IN', 'MEDICAMENTS', 'I', 'HAVE', 'TO', 'BE', 'AS', 'CAPTAIN', 'OF', 'A', 'SHIP', 'AND', 'THE', 'CREW', 'KNOW', 'IT'] +4852-28330-0021-2065: hyp=['I', 'AM', 'SOMEWHAT', 'SKILLED', 'IN', 'MEDICMENTS', 'I', 'HAVE', 'TO', 'BE', 'AS', 'A', 'CAPTAIN', 'OF', 'SHIP', 'AND', 'CREW', 'KNOW', 'IT'] +4852-28330-0022-2066: ref=['I', 'SHALL', 'SAY', 'THAT', 'YOU', 'ARE', 'IN', 'MY', 'OWN', 'CABIN', 'SO', 'THAT', 'I', 'CAN', 'CARE', 'FOR', 'YOU'] +4852-28330-0022-2066: hyp=['I', 'SHALL', 'SAY', 'THAT', 'YOU', 'ARE', 'IN', 'MY', 'OWN', 'CABIN', 'SO', 'THAT', 'I', 'CAN', 'CARE', 'FOR', 'YOU'] +4852-28330-0023-2067: ref=['NOT', 'SINCE', 'HE', 'HAD', 'LEFT', 'MISTER', 'WICKER', 'HAD', 'CHRIS', 'FELT', 'SUCH', 'CONFIDENCE', 'AS', 'HE', 'DID', 'IN', 'THE', 'WORDS', 'AND', 'ACTIONS', 'OF', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0023-2067: hyp=['NOT', 'SINCE', 'HE', 'HAD', 'LEFT', 'MISTER', 'WICKER', 'AND', 'CHRIS', 'FELT', 'SUCH', 'CONFIDENCE', 'AS', 'HE', 'DID', 'IN', 'THE', 'WORDS', 'AND', 'ACTIONS', 'OF', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0024-2068: ref=['HE', 'KNEW', 'NOW', 'THAT', 'HIS', 'ABSENCE', 'FOR', 'AS', 'LONG', 'AS', 'HE', 'HAD', 'TO', 'BE', 'AWAY', 'WOULD', 'BE', 'COVERED', 'UP', 'AND', 'SATISFACTORILY', 'ACCOUNTED', 'FOR'] +4852-28330-0024-2068: hyp=['HE', 'KNEW', 'NOW', 'THAT', 'HIS', 'ABSENCE', 'FOR', 'AS', 'LONG', 'AS', 'HE', 'HAD', 'HAD', 'TO', 'BE', 'AWAY', 'WOULD', 'BE', 'COVERED', 'UP', 'IN', 'SATISFACTORILY', 'ACCOUNTED', 'FOR'] +4852-28330-0025-2069: ref=['THEIR', 'CONVERSATION', 'HAD', 'TAKEN', 'SOME', 'LITTLE', 'WHILE'] +4852-28330-0025-2069: hyp=['THEIR', 'CONVERSATION', 'HAD', 'TAKEN', 'SOME', 'LITTLE', 'WHILE'] +533-1066-0000-796: ref=['WHEN', 'CHURCHYARDS', 'YAWN'] +533-1066-0000-796: hyp=['ONE', 'CHURCHYARDS', 'YAWN'] +533-1066-0001-797: ref=['I', 'KNEW', 'WELL', 'ENOUGH', 'THAT', 'HE', 'MIGHT', 'BE', 'CARRIED', 'THOUSANDS', 'OF', 'MILES', 'IN', 'THE', 'BOX', 'CAR', 'LOCKED', 'IN', 'PERHAPS', 'WITHOUT', 'WATER', 'OR', 'FOOD'] +533-1066-0001-797: hyp=['I', 'KNEW', 'WELL', 'ENOUGH', 'THAT', 'HE', 'MIGHT', 'BE', 'CARRIED', 'THOUSAND', 'OF', 'MILES', 'INTO', 'BOX', 'CAR', 'LOCKED', 'IN', 'PERHAPS', 'WITHOUT', 'WATER', 'OR', 'FOOT'] +533-1066-0002-798: ref=['I', 'AM', 'SURE', 'I', 'KISSED', 'LIDDY', 'AND', 'I', 'HAVE', 'HAD', 'TERRIBLE', 'MOMENTS', 'SINCE', 'WHEN', 'I', 'SEEM', 'TO', 'REMEMBER', 'KISSING', 'MISTER', 'JAMIESON', 'TOO', 'IN', 'THE', 'EXCITEMENT'] +533-1066-0002-798: hyp=['I', 'AM', 'SURE', 'I', 'KISSED', 'LADY', 'AND', "I'VE", 'HAD', 'TERRIBLE', 'MOMENTS', 'SINCE', 'WHEN', 'I', 'SEEMED', 'TO', 'REMEMBER', 'KISSING', 'MISTER', 'JAMIESON', 'TOO', 'IN', 'THE', 'EXCITEMENT'] +533-1066-0003-799: ref=['FORTUNATELY', 'WARNER', 'AND', 'THE', 'DETECTIVES', 'WERE', 'KEEPING', 'BACHELOR', 'HALL', 'IN', 'THE', 'LODGE'] +533-1066-0003-799: hyp=['FORTUNATELY', 'WARNER', 'ON', 'THE', 'DETECTIVE', 'WERE', 'KEEPING', 'BACHELOR', 'HOLLAND', 'LODGE'] +533-1066-0004-800: ref=['OUT', 'OF', 'DEFERENCE', 'TO', 'LIDDY', 'THEY', 'WASHED', 'THEIR', 'DISHES', 'ONCE', 'A', 'DAY', 'AND', 'THEY', 'CONCOCTED', 'QUEER', 'MESSES', 'ACCORDING', 'TO', 'THEIR', 'SEVERAL', 'ABILITIES'] +533-1066-0004-800: hyp=['OUT', 'OF', 'DEFERENCE', 'TO', 'LIVE', 'THEY', 'WASHED', 'HER', 'DISHES', 'ONCE', 'A', 'DAY', 'AND', 'THEY', 'CONCLUDED', 'QUEER', 'MASSES', 'ACCORDING', 'TO', 'THEIR', 'SEVERAL', 'ABILITIES'] +533-1066-0005-801: ref=['MISS', 'INNES', 'HE', 'SAID', 'STOPPING', 'ME', 'AS', 'I', 'WAS', 'ABOUT', 'TO', 'GO', 'TO', 'MY', 'ROOM', 'UP', 'STAIRS', 'HOW', 'ARE', 'YOUR', 'NERVES', 'TONIGHT'] +533-1066-0005-801: hyp=['MISS', 'EAMES', 'HE', 'SAID', 'STOPPING', 'ME', 'AS', 'I', 'WAS', 'ABOUT', 'TO', 'GO', 'TO', 'MY', 'ROOM', 'UP', 'STAIRS', 'HOW', 'ARE', 'YOUR', 'NERVES', 'TO', 'NIGHT'] +533-1066-0006-802: ref=['I', 'HAVE', 'NONE', 'I', 'SAID', 'HAPPILY'] +533-1066-0006-802: hyp=['I', 'HAVE', 'NONE', 'I', 'SAID', 'HAPPILY'] +533-1066-0007-803: ref=['I', 'MEAN', 'HE', 'PERSISTED', 'DO', 'YOU', 'FEEL', 'AS', 'THOUGH', 'YOU', 'COULD', 'GO', 'THROUGH', 'WITH', 'SOMETHING', 'RATHER', 'UNUSUAL'] +533-1066-0007-803: hyp=['I', 'MEAN', 'HE', 'PERSISTED', 'DO', 'YOU', 'FEEL', 'AS', 'THOUGH', 'YOU', 'COULD', 'GO', 'THROUGH', 'WITH', 'SOMETHING', 'RATHER', 'UNUSUAL'] +533-1066-0008-804: ref=['THE', 'MOST', 'UNUSUAL', 'THING', 'I', 'CAN', 'THINK', 'OF', 'WOULD', 'BE', 'A', 'PEACEFUL', 'NIGHT'] +533-1066-0008-804: hyp=['THE', 'MOST', 'UNUSUAL', 'THING', 'I', 'CAN', 'THINK', 'OF', 'WOULD', 'BE', 'A', 'PEACEFUL', 'NIGHT'] +533-1066-0009-805: ref=['SOMETHING', 'IS', 'GOING', 'TO', 'OCCUR', 'HE', 'SAID'] +533-1066-0009-805: hyp=['SOMETHING', 'IS', 'GOING', 'TO', 'OCCUR', 'HE', 'SAID'] +533-1066-0010-806: ref=['PUT', 'ON', 'HEAVY', 'SHOES', 'AND', 'SOME', 'OLD', 'DARK', 'CLOTHES', 'AND', 'MAKE', 'UP', 'YOUR', 'MIND', 'NOT', 'TO', 'BE', 'SURPRISED', 'AT', 'ANYTHING'] +533-1066-0010-806: hyp=['PUT', 'ON', 'HEAVY', 'SHOES', 'AND', 'SOME', 'ALL', 'DARK', 'CLOTHES', 'AND', 'MAKE', 'UP', 'YOUR', 'MIND', 'NOT', 'TO', 'BE', 'SURPRISED', 'AT', 'ANYTHING'] +533-1066-0011-807: ref=['LIDDY', 'WAS', 'SLEEPING', 'THE', 'SLEEP', 'OF', 'THE', 'JUST', 'WHEN', 'I', 'WENT', 'UP', 'STAIRS', 'AND', 'I', 'HUNTED', 'OUT', 'MY', 'THINGS', 'CAUTIOUSLY'] +533-1066-0011-807: hyp=['LEAVY', 'WAS', 'SLEEPING', 'SLEEP', 'OF', 'THE', 'JUST', 'WHEN', 'I', 'WENT', 'UPSTAIRS', 'AND', 'I', 'HUNTED', 'OUT', 'MY', 'THINGS', 'CAUTIOUSLY'] +533-1066-0012-808: ref=['THEY', 'WERE', 'TALKING', 'CONFIDENTIALLY', 'TOGETHER', 'BUT', 'WHEN', 'I', 'CAME', 'DOWN', 'THEY', 'CEASED'] +533-1066-0012-808: hyp=['YOU', 'WERE', 'TALKING', 'TO', 'FILIANTLY', 'TOGETHER', 'BUT', 'WHEN', 'I', 'CAME', 'DOWN', 'THEY', 'CEASED'] +533-1066-0013-809: ref=['THERE', 'WERE', 'A', 'FEW', 'PREPARATIONS', 'TO', 'BE', 'MADE', 'THE', 'LOCKS', 'TO', 'BE', 'GONE', 'OVER', 'WINTERS', 'TO', 'BE', 'INSTRUCTED', 'AS', 'TO', 'RENEWED', 'VIGILANCE', 'AND', 'THEN', 'AFTER', 'EXTINGUISHING', 'THE', 'HALL', 'LIGHT', 'WE', 'CREPT', 'IN', 'THE', 'DARKNESS', 'THROUGH', 'THE', 'FRONT', 'DOOR', 'AND', 'INTO', 'THE', 'NIGHT'] +533-1066-0013-809: hyp=['THERE', 'WERE', 'A', 'FEW', 'PREPARATIONS', 'TO', 'BE', 'MADE', 'LOCKS', 'TO', 'BE', 'GONE', 'OVER', 'WINTERS', 'TO', 'BE', 'INSTRUCTIVE', 'AS', 'TO', 'RENEWED', 'VIGILANCE', 'AND', 'THEN', 'AFTER', 'DISTINGUISHING', 'THE', 'WHOLE', 'LIGHT', 'WE', 'CREPT', 'IN', 'THE', 'DARKNESS', 'THROUGH', 'THE', 'FRONT', 'DOOR', 'AND', 'INTO', 'THE', 'NIGHT'] +533-1066-0014-810: ref=['I', 'ASKED', 'NO', 'QUESTIONS'] +533-1066-0014-810: hyp=['I', 'ASKED', 'NO', 'QUESTIONS'] +533-1066-0015-811: ref=['ONCE', 'ONLY', 'SOMEBODY', 'SPOKE', 'AND', 'THEN', 'IT', 'WAS', 'AN', 'EMPHATIC', 'BIT', 'OF', 'PROFANITY', 'FROM', 'DOCTOR', 'STEWART', 'WHEN', 'HE', 'RAN', 'INTO', 'A', 'WIRE', 'FENCE'] +533-1066-0015-811: hyp=['WAS', 'ONLY', 'SOMEBODY', 'SPOKE', 'AND', 'THEN', 'IT', 'WAS', 'AN', 'EMPHATIC', 'FIT', 'OF', 'PROFANITY', 'FROM', 'DOCTOR', 'STEWART', 'WHEN', 'HE', 'RAN', 'INTO', 'A', 'WIRE', 'FENCE'] +533-1066-0016-812: ref=['I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'EXPECTED'] +533-1066-0016-812: hyp=['I', 'ARE', 'TO', 'KNOW', 'WHAT', 'I', 'EXPECTED'] +533-1066-0017-813: ref=['THE', 'DOCTOR', 'WAS', 'PUFFING', 'SOMEWHAT', 'WHEN', 'WE', 'FINALLY', 'CAME', 'TO', 'A', 'HALT'] +533-1066-0017-813: hyp=['THE', 'DOCTOR', 'WAS', 'PUFFING', 'SOMEWHAT', 'BUT', 'WE', 'FINALLY', 'CAME', 'TO', 'A', 'HALT'] +533-1066-0018-814: ref=['I', 'CONFESS', 'THAT', 'JUST', 'AT', 'THAT', 'MINUTE', 'EVEN', 'SUNNYSIDE', 'SEEMED', 'A', 'CHEERFUL', 'SPOT'] +533-1066-0018-814: hyp=['I', 'CONFESS', 'THAT', 'JUST', 'AT', 'THAT', 'MINUTE', 'EVEN', 'SUNNYSIDE', 'SEEMED', 'A', 'CHEERFUL', 'SPOT'] +533-1066-0019-815: ref=['IN', 'SPITE', 'OF', 'MYSELF', 'I', 'DREW', 'MY', 'BREATH', 'IN', 'SHARPLY'] +533-1066-0019-815: hyp=['IN', 'SPITE', 'OF', 'MYSELF', 'I', 'DREW', 'MY', 'BREATH', 'IN', 'SHARPLY'] +533-1066-0020-816: ref=['IT', 'WAS', 'ALEX', 'ARMED', 'WITH', 'TWO', 'LONG', 'HANDLED', 'SPADES'] +533-1066-0020-816: hyp=['IT', 'WAS', 'ALEX', 'ON', 'THE', 'TWO', 'LONG', 'HANDLED', 'SPADES'] +533-1066-0021-817: ref=['THE', 'DOCTOR', 'KEPT', 'A', 'KEEN', 'LOOKOUT', 'BUT', 'NO', 'ONE', 'APPEARED'] +533-1066-0021-817: hyp=['THE', 'DOCTOR', 'KEPT', 'A', 'KIN', 'LOOK', 'OUT', 'BUT', 'NO', 'ONE', 'APPEARED'] +533-1066-0022-818: ref=["THERE'S", 'ONE', 'THING', 'SURE', "I'LL", 'NOT', 'BE', 'SUSPECTED', 'OF', 'COMPLICITY'] +533-1066-0022-818: hyp=["THERE'S", 'ONE', 'THING', 'SURE', "I'LL", 'NOT', 'BE', 'SUSPECTED', 'OF', 'COMPLICITY'] +533-1066-0023-819: ref=['A', 'DOCTOR', 'IS', 'GENERALLY', 'SUPPOSED', 'TO', 'BE', 'HANDIER', 'AT', 'BURYING', 'FOLKS', 'THAN', 'AT', 'DIGGING', 'THEM', 'UP'] +533-1066-0023-819: hyp=['THE', 'DOCTOR', 'IS', 'GENERALLY', 'SUPPOSED', 'TO', 'BE', 'A', 'HANDIER', 'AT', 'BERING', 'FOLKS', 'THAN', 'A', 'TIGGING', 'THEM', 'UP'] +533-1066-0024-820: ref=['I', 'HELD', 'ON', 'TO', 'HIM', 'FRANTICALLY', 'AND', 'SOMEHOW', 'I', 'GOT', 'THERE', 'AND', 'LOOKED', 'DOWN'] +533-1066-0024-820: hyp=['I', 'HELD', 'ON', 'TO', 'HIM', 'FRANTICALLY', 'AND', 'SOMEHOW', 'I', 'GOT', 'TERRANT', 'LOOKED', 'DOWN'] +533-131556-0000-821: ref=['BUT', 'HOW', 'AM', 'I', 'TO', 'GET', 'OVER', 'THE', 'TEN', 'OR', 'TWELVE', 'DAYS', 'THAT', 'MUST', 'YET', 'ELAPSE', 'BEFORE', 'THEY', 'GO'] +533-131556-0000-821: hyp=['BUT', 'HOW', 'AM', 'I', 'TO', 'IT', 'OVER', 'THE', 'TOWN', 'OR', 'TWELVE', 'DAYS', 'THAT', 'MUST', 'YET', 'ELAPSE', 'BEFORE', 'THEY', 'GO'] +533-131556-0001-822: ref=['FOR', 'NONE', 'COULD', 'INJURE', 'ME', 'AS', 'HE', 'HAS', 'DONE', 'OH'] +533-131556-0001-822: hyp=['FOR', 'NONE', 'COULD', 'ENDURE', 'ME', 'AS', 'HE', 'HAS', 'DONE', 'OH'] +533-131556-0002-823: ref=['THE', 'WORD', 'STARES', 'ME', 'IN', 'THE', 'FACE', 'LIKE', 'A', 'GUILTY', 'CONFESSION', 'BUT', 'IT', 'IS', 'TRUE', 'I', 'HATE', 'HIM', 'I', 'HATE', 'HIM'] +533-131556-0002-823: hyp=['THE', 'WORD', 'STARES', 'ME', 'IN', 'THE', 'FACE', 'LIKE', 'A', 'GUILTY', 'CONFESSION', 'BUT', 'IT', 'IS', 'TRUE', 'I', 'HATE', 'HIM', 'I', 'HATE', 'HIM'] +533-131556-0003-824: ref=['I', 'SOMETIMES', 'THINK', 'I', 'OUGHT', 'TO', 'GIVE', 'HIM', 'CREDIT', 'FOR', 'THE', 'GOOD', 'FEELING', 'HE', 'SIMULATES', 'SO', 'WELL', 'AND', 'THEN', 'AGAIN', 'I', 'THINK', 'IT', 'IS', 'MY', 'DUTY', 'TO', 'SUSPECT', 'HIM', 'UNDER', 'THE', 'PECULIAR', 'CIRCUMSTANCES', 'IN', 'WHICH', 'I', 'AM', 'PLACED'] +533-131556-0003-824: hyp=['I', 'SOMETIMES', 'THINK', 'I', 'OUGHT', 'TO', 'GIVE', 'HIM', 'CREDIT', 'FOR', 'THE', 'GOOD', 'FEELING', 'HE', 'SIMILATES', 'SO', 'WELL', 'AND', 'THEN', 'AGAIN', 'I', 'THINK', 'IT', 'IS', 'MY', 'DUTY', 'TO', 'SUSPECT', 'HIM', 'UNDER', 'THE', 'PECULIAR', 'CIRCUMSTANCES', 'IN', 'WHICH', 'I', 'AM', 'PLACED'] +533-131556-0004-825: ref=['I', 'HAVE', 'DONE', 'WELL', 'TO', 'RECORD', 'THEM', 'SO', 'MINUTELY'] +533-131556-0004-825: hyp=['I', 'HAVE', 'DONE', 'WELL', 'TO', 'RECORD', 'HIM', 'SUMINUTELY'] +533-131556-0005-826: ref=['THEY', 'HAD', 'BETAKEN', 'THEMSELVES', 'TO', 'THEIR', 'WORK', 'I', 'LESS', 'TO', 'DIVERT', 'MY', 'MIND', 'THAN', 'TO', 'DEPRECATE', 'CONVERSATION', 'HAD', 'PROVIDED', 'MYSELF', 'WITH', 'A', 'BOOK'] +533-131556-0005-826: hyp=['THE', 'YEAR', 'HAD', 'TAKEN', 'THEMSELVES', 'TO', 'THEIR', 'WORK', 'I', 'LEST', 'DIVERT', 'MY', 'MIND', 'THAN', 'TO', 'THE', 'PROCATE', 'CONVERSATION', 'HAD', 'PROVIDED', 'MYSELF', 'WITH', 'THE', 'BOOK'] +533-131556-0006-827: ref=['I', 'AM', 'TOO', 'WELL', 'ACQUAINTED', 'WITH', 'YOUR', 'CHARACTER', 'AND', 'CONDUCT', 'TO', 'FEEL', 'ANY', 'REAL', 'FRIENDSHIP', 'FOR', 'YOU', 'AND', 'AS', 'I', 'AM', 'WITHOUT', 'YOUR', 'TALENT', 'FOR', 'DISSIMULATION', 'I', 'CANNOT', 'ASSUME', 'THE', 'APPEARANCE', 'OF', 'IT'] +533-131556-0006-827: hyp=['I', 'AM', 'TOO', 'ACQUAINTED', 'WITH', 'YOUR', 'CHARACTER', 'AND', 'CONDUCT', 'TO', 'FEEL', 'ANY', 'REAL', 'FRIENDSHIP', 'FOR', 'YOU', 'AND', 'AS', 'I', 'AM', 'WITHOUT', 'YOUR', 'TALENT', 'FOR', 'DISSIMULATION', 'I', 'CANNOT', 'ASSUME', 'THE', 'APPEARANCE', 'OF', 'IT'] +533-131556-0007-828: ref=['UPON', 'PERUSING', 'THIS', 'SHE', 'TURNED', 'SCARLET', 'AND', 'BIT', 'HER', 'LIP'] +533-131556-0007-828: hyp=['UP', 'AND', 'PERUSING', 'THIS', 'SHE', 'TURNED', 'SCARLET', 'AND', 'BIT', 'HER', 'LIP'] +533-131556-0008-829: ref=['YOU', 'MAY', 'GO', 'MILICENT', 'AND', "SHE'LL", 'FOLLOW', 'IN', 'A', 'WHILE', 'MILICENT', 'WENT'] +533-131556-0008-829: hyp=['YOU', 'MAY', 'GO', 'MELLICENT', 'UNTIL', 'FOLLOWING', 'A', 'WHILE', 'MELLICENT', 'WENT'] +533-131556-0009-830: ref=['WILL', 'YOU', 'OBLIGE', 'ME', 'HELEN', 'CONTINUED', 'SHE'] +533-131556-0009-830: hyp=["OLI'", 'OBLIGE', 'ME', 'ALLAN', 'CONTINUED', 'SHE'] +533-131556-0010-831: ref=['AH', 'YOU', 'ARE', 'SUSPICIOUS'] +533-131556-0010-831: hyp=['HA', 'YOU', 'ARE', 'SUSPICIOUS'] +533-131556-0011-832: ref=['IF', 'I', 'WERE', 'SUSPICIOUS', 'I', 'REPLIED', 'I', 'SHOULD', 'HAVE', 'DISCOVERED', 'YOUR', 'INFAMY', 'LONG', 'BEFORE'] +533-131556-0011-832: hyp=['IF', 'I', 'WERE', 'SUSPICIOUS', 'I', 'REPLIED', 'I', 'SHOULD', 'HAVE', 'DISCOVERED', 'YOUR', 'INFAMY', 'LONG', 'BEFORE'] +533-131556-0012-833: ref=['I', 'ENJOY', 'A', 'MOONLIGHT', 'RAMBLE', 'AS', 'WELL', 'AS', 'YOU', 'I', 'ANSWERED', 'STEADILY', 'FIXING', 'MY', 'EYES', 'UPON', 'HER', 'AND', 'THE', 'SHRUBBERY', 'HAPPENS', 'TO', 'BE', 'ONE', 'OF', 'MY', 'FAVOURITE', 'RESORTS'] +533-131556-0012-833: hyp=['ENJOY', 'EVENLENTH', 'RAMBLE', 'AS', 'WELL', 'AS', 'YOU', 'I', 'ANSWERED', 'STEADILY', 'FIXING', 'MY', 'EYES', 'UP', 'ON', 'EARTH', 'AND', 'FREDERI', 'HAPPENS', 'TO', 'BE', 'ONE', 'OF', 'MY', 'FAVORITE', 'RESORTS'] +533-131556-0013-834: ref=['SHE', 'COLOURED', 'AGAIN', 'EXCESSIVELY', 'AND', 'REMAINED', 'SILENT', 'PRESSING', 'HER', 'FINGER', 'AGAINST', 'HER', 'TEETH', 'AND', 'GAZING', 'INTO', 'THE', 'FIRE'] +533-131556-0013-834: hyp=['SHE', 'COLOURED', 'AGAIN', 'EXCESSIVELY', 'AND', 'REMAINED', 'SILENT', 'PRESSING', 'HER', 'FINGER', 'AGAINST', 'HER', 'TEETH', 'AND', 'GAZING', 'INTO', 'THE', 'FIRE'] +533-131556-0014-835: ref=['I', 'WATCHED', 'HER', 'A', 'FEW', 'MOMENTS', 'WITH', 'A', 'FEELING', 'OF', 'MALEVOLENT', 'GRATIFICATION', 'THEN', 'MOVING', 'TOWARDS', 'THE', 'DOOR', 'I', 'CALMLY', 'ASKED', 'IF', 'SHE', 'HAD', 'ANYTHING', 'MORE', 'TO', 'SAY'] +533-131556-0014-835: hyp=['I', 'WATCH', 'FOR', 'A', 'FEW', 'MOMENTS', 'TO', 'THE', 'FEELING', 'OF', 'MALEVOLENT', 'GRATIFICATION', 'THEN', 'MOVING', 'TOWARDS', 'THE', 'DOOR', 'I', 'CALMLY', 'ASKED', 'IF', 'SHE', 'HAD', 'ANYTHING', 'MORE', 'TO', 'SAY'] +533-131556-0015-836: ref=['YES', 'YES'] +533-131556-0015-836: hyp=['YES', 'YES'] +533-131556-0016-837: ref=['SUPPOSE', 'I', 'DO'] +533-131556-0016-837: hyp=['SUPPOSE', 'I', 'DO'] +533-131556-0017-838: ref=['SHE', 'PAUSED', 'IN', 'EVIDENT', 'DISCONCERTION', 'AND', 'PERPLEXITY', 'MINGLED', 'WITH', 'ANGER', 'SHE', 'DARED', 'NOT', 'SHOW'] +533-131556-0017-838: hyp=['SHE', 'PAUSED', 'IN', 'EVIDENT', 'DISCONCERTION', 'AND', 'PERPLEXITY', 'MINGLED', 'WITH', 'ANGER', 'SHE', 'DARED', 'NOT', 'SHOW'] +533-131556-0018-839: ref=['I', 'CANNOT', 'RENOUNCE', 'WHAT', 'IS', 'DEARER', 'THAN', 'LIFE', 'SHE', 'MUTTERED', 'IN', 'A', 'LOW', 'HURRIED', 'TONE'] +533-131556-0018-839: hyp=['I', 'CANNOT', 'RENOUNCE', 'WHAT', 'IS', 'DEARER', 'THAN', 'LIFE', 'SHE', 'MUTTERED', 'IN', 'A', 'LOW', 'HURRIED', 'TONE'] +533-131556-0019-840: ref=['IF', 'YOU', 'ARE', 'GENEROUS', 'HERE', 'IS', 'A', 'FITTING', 'OPPORTUNITY', 'FOR', 'THE', 'EXERCISE', 'OF', 'YOUR', 'MAGNANIMITY', 'IF', 'YOU', 'ARE', 'PROUD', 'HERE', 'AM', 'I', 'YOUR', 'RIVAL', 'READY', 'TO', 'ACKNOWLEDGE', 'MYSELF', 'YOUR', 'DEBTOR', 'FOR', 'AN', 'ACT', 'OF', 'THE', 'MOST', 'NOBLE', 'FORBEARANCE'] +533-131556-0019-840: hyp=['IF', 'YOU', 'ARE', 'GENEROUS', 'HERE', 'IS', 'A', 'FITTING', 'OPPORTUNITY', 'FOR', 'THE', 'EXERCISE', 'OF', 'YOUR', 'MAGNANIMITY', 'IF', 'YOU', 'ARE', 'PROUD', 'HERE', 'AM', 'I', 'YOUR', 'RIVAL', 'RATHER', 'TO', 'ANNOUNCE', 'MYSELF', 'YOUR', 'DEPTOR', 'FOR', 'AN', 'ACT', 'OF', 'MOST', 'NOBLE', 'FORBEARANCE'] +533-131556-0020-841: ref=['I', 'SHALL', 'NOT', 'TELL', 'HIM'] +533-131556-0020-841: hyp=['I', 'SHALL', 'NOT', 'TELL', 'HIM'] +533-131556-0021-842: ref=['GIVE', 'ME', 'NO', 'THANKS', 'IT', 'IS', 'NOT', 'FOR', 'YOUR', 'SAKE', 'THAT', 'I', 'REFRAIN'] +533-131556-0021-842: hyp=['GIVE', 'ME', 'NO', 'THANKS', 'IT', 'IS', 'NOT', 'FOR', 'YOUR', 'SAKE', 'THAT', 'I', 'REFRAIN'] +533-131556-0022-843: ref=['AND', 'MILICENT', 'WILL', 'YOU', 'TELL', 'HER'] +533-131556-0022-843: hyp=['AND', 'MELLICENT', 'WILL', 'IT', 'TELL', 'HER'] +533-131556-0023-844: ref=['I', 'WOULD', 'NOT', 'FOR', 'MUCH', 'THAT', 'SHE', 'SHOULD', 'KNOW', 'THE', 'INFAMY', 'AND', 'DISGRACE', 'OF', 'HER', 'RELATION'] +533-131556-0023-844: hyp=['I', 'WILL', 'NOT', 'FOR', 'MUCH', 'THAT', 'YOU', 'SHOULD', 'NOT', 'INFAMY', 'AND', 'DISGRACE', 'OF', 'HER', 'RELATION'] +533-131556-0024-845: ref=['YOU', 'USE', 'HARD', 'WORDS', 'MISSUS', 'HUNTINGDON', 'BUT', 'I', 'CAN', 'PARDON', 'YOU'] +533-131556-0024-845: hyp=['YOU', 'USE', 'OUR', 'WORDS', 'MISSUS', 'HUNTINGDON', 'BUT', 'I', 'CAN', 'PARDON', 'YOU'] +533-131556-0025-846: ref=['HOW', 'DARE', 'YOU', 'MENTION', 'HIS', 'NAME', 'TO', 'ME'] +533-131556-0025-846: hyp=['HOW', 'DARE', 'YOU', 'MENTION', 'HIS', 'NAME', 'TO', 'ME'] +533-131562-0000-847: ref=['IT', 'SEEMS', 'VERY', 'INTERESTING', 'LOVE', 'SAID', 'HE', 'LIFTING', 'HIS', 'HEAD', 'AND', 'TURNING', 'TO', 'WHERE', 'I', 'STOOD', 'WRINGING', 'MY', 'HANDS', 'IN', 'SILENT', 'RAGE', 'AND', 'ANGUISH', 'BUT', "IT'S", 'RATHER', 'LONG', "I'LL", 'LOOK', 'AT', 'IT', 'SOME', 'OTHER', 'TIME', 'AND', 'MEANWHILE', "I'LL", 'TROUBLE', 'YOU', 'FOR', 'YOUR', 'KEYS', 'MY', 'DEAR', 'WHAT', 'KEYS'] +533-131562-0000-847: hyp=['IT', 'SEEMS', 'VERY', 'INTERESTING', 'LOVE', 'SAID', 'HE', 'LIFTING', 'HIS', 'HEAD', 'AND', 'TURNING', 'TO', 'HER', 'EYES', 'TOO', 'WRINGING', 'MY', 'HAND', 'IN', 'SILENT', 'RATE', 'AND', 'ANGUISH', 'BUT', "IT'S", 'RATHER', 'LONG', 'I', 'LOOK', 'AT', 'IT', 'SOME', 'OTHER', 'TIME', 'AND', 'MEANWHILE', "I'LL", 'TROUBLE', 'YOU', 'FOR', 'YOUR', 'KEYS', 'MY', 'DEAR', 'WHAT', 'CASE'] +533-131562-0001-848: ref=['THE', 'KEYS', 'OF', 'YOUR', 'CABINET', 'DESK', 'DRAWERS', 'AND', 'WHATEVER', 'ELSE', 'YOU', 'POSSESS', 'SAID', 'HE', 'RISING', 'AND', 'HOLDING', 'OUT', 'HIS', 'HAND'] +533-131562-0001-848: hyp=['IT', 'ACCUSE', 'OF', 'YOUR', 'CABINET', 'DESKED', 'RAOUL', 'AND', 'WHATEVER', 'ELSE', 'YOU', 'POSSESS', 'SAID', 'HE', 'RISING', 'AND', 'HOLDING', 'OUT', 'HIS', 'HAND'] +533-131562-0002-849: ref=['THE', 'KEY', 'OF', 'MY', 'DESK', 'IN', 'FACT', 'WAS', 'AT', 'THAT', 'MOMENT', 'IN', 'THE', 'LOCK', 'AND', 'THE', 'OTHERS', 'WERE', 'ATTACHED', 'TO', 'IT'] +533-131562-0002-849: hyp=['THE', 'KEY', 'OF', 'MY', 'VES', 'IN', 'FACT', 'WAS', 'AT', 'THAT', 'MOMENT', 'IN', 'LOVE', 'AND', 'THE', 'OTHERS', 'WERE', 'ATTACHED', 'TO', 'IT'] +533-131562-0003-850: ref=['NOW', 'THEN', 'SNEERED', 'HE', 'WE', 'MUST', 'HAVE', 'A', 'CONFISCATION', 'OF', 'PROPERTY'] +533-131562-0003-850: hyp=['NOW', 'THEN', 'SNEERED', 'HE', 'WE', 'MUST', 'HAVE', 'A', 'CONFISCATION', 'OF', 'PROPERTY'] +533-131562-0004-851: ref=['AND', 'PUTTING', 'THE', 'KEYS', 'INTO', 'HIS', 'POCKET', 'HE', 'WALKED', 'INTO', 'THE', 'LIBRARY'] +533-131562-0004-851: hyp=['AND', 'PUT', 'IN', 'THE', 'KEYS', 'INTO', 'HIS', 'POCKET', 'HE', 'WALKED', 'INTO', 'THE', 'LIBRARY'] +533-131562-0005-852: ref=['THAT', 'AND', 'ALL', 'REPLIED', 'THE', 'MASTER', 'AND', 'THE', 'THINGS', 'WERE', 'CLEARED', 'AWAY'] +533-131562-0005-852: hyp=['THAT', 'AND', 'ALL', 'REPLIED', 'THE', 'MASTER', 'AND', 'THE', 'THINGS', 'WERE', 'CLEARED', 'AWAY'] +533-131562-0006-853: ref=['MISTER', 'HUNTINGDON', 'THEN', 'WENT', 'UP', 'STAIRS'] +533-131562-0006-853: hyp=['MISTER', 'HUNTINGDON', 'THEN', 'WENT', 'UPSTAIRS'] +533-131562-0007-854: ref=['MUTTERED', 'HE', 'STARTING', 'BACK', "SHE'S", 'THE', 'VERY', 'DEVIL', 'FOR', 'SPITE'] +533-131562-0007-854: hyp=['MUTTERED', 'HE', 'STARTING', 'BACK', "SHE'S", 'VERY', 'DEVIL', 'FOR', 'A', 'SPITE'] +533-131562-0008-855: ref=['I', "DIDN'T", 'SAY', "I'D", 'BROKEN', 'IT', 'DID', 'I', 'RETURNED', 'HE'] +533-131562-0008-855: hyp=['I', "DIDN'T", 'SAY', "I'VE", 'BROKEN', 'IT', 'DID', 'I', 'RETURNED', 'HE'] +533-131562-0009-856: ref=['I', 'SHALL', 'PUT', 'YOU', 'UPON', 'A', 'SMALL', 'MONTHLY', 'ALLOWANCE', 'IN', 'FUTURE', 'FOR', 'YOUR', 'OWN', 'PRIVATE', 'EXPENSES', 'AND', 'YOU', "NEEDN'T", 'TROUBLE', 'YOURSELF', 'ANY', 'MORE', 'ABOUT', 'MY', 'CONCERNS', 'I', 'SHALL', 'LOOK', 'OUT', 'FOR', 'A', 'STEWARD', 'MY', 'DEAR', 'I', "WON'T", 'EXPOSE', 'YOU', 'TO', 'THE', 'TEMPTATION'] +533-131562-0009-856: hyp=['I', 'SHALL', 'PUT', 'YOU', 'UP', 'IN', 'A', 'SMALL', 'MOUTHFULLY', 'ALLOW', 'US', 'IN', 'FUTURE', 'FOR', 'YOUR', 'OWN', 'PRIVATE', 'EXPENSES', 'AND', 'YOU', "NEEDN'T", 'TROUBLE', 'YOURSELF', 'ANY', 'MORE', 'ABOUT', 'MY', 'CONCERNS', 'I', 'SHALL', 'LOOK', 'OUT', 'FOR', 'A', 'STEWARD', 'MY', 'DEAR', 'I', "WON'T", 'EXPOSE', 'YOU', 'TO', 'TEMPTATION'] +533-131562-0010-857: ref=['AND', 'AS', 'FOR', 'THE', 'HOUSEHOLD', 'MATTERS', 'MISSUS', 'GREAVES', 'MUST', 'BE', 'VERY', 'PARTICULAR', 'IN', 'KEEPING', 'HER', 'ACCOUNTS', 'WE', 'MUST', 'GO', 'UPON', 'AN', 'ENTIRELY', 'NEW', 'PLAN'] +533-131562-0010-857: hyp=['AND', 'AS', 'FOR', 'THE', 'HOUSE', 'OR', 'MATTERS', 'MISSUS', 'GREEBS', 'MUST', 'BE', 'VERY', 'PARTICULAR', 'IN', 'KEEPING', 'HER', 'ACCOUNTS', 'WE', 'MUST', 'GO', 'UP', 'IN', 'AN', 'ENTIRELY', 'NEW', 'PLAN'] +533-131562-0011-858: ref=['WHAT', 'GREAT', 'DISCOVERY', 'HAVE', 'YOU', 'MADE', 'NOW', 'MISTER', 'HUNTINGDON'] +533-131562-0011-858: hyp=['WHAT', 'GREAT', 'DISCOVERY', 'HAVE', 'YOU', 'MADE', 'NOW', 'MISTER', 'HONDYNON'] +533-131562-0012-859: ref=['HAVE', 'I', 'ATTEMPTED', 'TO', 'DEFRAUD', 'YOU'] +533-131562-0012-859: hyp=['IF', 'I', 'ATTENDED', 'TO', 'DEFRAUD', 'YOU'] +533-131562-0013-860: ref=['NOT', 'IN', 'MONEY', 'MATTERS', 'EXACTLY', 'IT', 'SEEMS', 'BUT', "IT'S", 'BEST', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'TEMPTATION'] +533-131562-0013-860: hyp=['NOT', 'IN', 'MONEY', 'MATTERS', 'EXACTLY', 'IT', 'SEEMS', 'BUT', 'IS', 'FAST', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'TEMPTATION'] +533-131562-0014-861: ref=['HERE', 'BENSON', 'ENTERED', 'WITH', 'THE', 'CANDLES', 'AND', 'THERE', 'FOLLOWED', 'A', 'BRIEF', 'INTERVAL', 'OF', 'SILENCE', 'I', 'SITTING', 'STILL', 'IN', 'MY', 'CHAIR', 'AND', 'HE', 'STANDING', 'WITH', 'HIS', 'BACK', 'TO', 'THE', 'FIRE', 'SILENTLY', 'TRIUMPHING', 'IN', 'MY', 'DESPAIR'] +533-131562-0014-861: hyp=['HERE', 'BUILTON', 'ENTERED', 'THE', 'CANDLES', 'AND', 'THERE', 'FOLLOWED', 'THE', 'ROOF', 'INTERVAL', 'OF', 'SILENCE', 'I', 'SITTING', 'STEALING', 'MY', 'CHAIR', 'AND', 'HE', 'STANDING', 'WITH', 'HIS', 'BACK', 'TO', 'THE', 'FIRE', 'SILENTLY', 'TRIUMPHING', 'IN', 'MY', 'DESPAIR'] +533-131562-0015-862: ref=['I', 'KNOW', 'THAT', 'DAY', 'AFTER', 'DAY', 'SUCH', 'FEELINGS', 'WILL', 'RETURN', 'UPON', 'ME'] +533-131562-0015-862: hyp=['I', 'KNOW', 'THAT', 'DAY', 'AFTER', 'DAY', 'SUCH', 'FEELINGS', 'TO', 'RETURN', 'UPON', 'ME'] +533-131562-0016-863: ref=['I', 'TRY', 'TO', 'LOOK', 'TO', 'HIM', 'AND', 'RAISE', 'MY', 'HEART', 'TO', 'HEAVEN', 'BUT', 'IT', 'WILL', 'CLEAVE', 'TO', 'THE', 'DUST'] +533-131562-0016-863: hyp=['I', 'TRIED', 'TO', 'LOOK', 'TO', 'HIM', 'AND', 'RAISE', 'MY', 'HEART', 'TO', 'HEAVEN', 'BUT', 'IT', 'WILL', 'CLIFF', 'TO', 'THE', 'DUST'] +533-131564-0000-768: ref=['VAIN', 'HOPE', 'I', 'FEAR'] +533-131564-0000-768: hyp=['VAIN', 'HOPE', 'I', 'FEAR'] +533-131564-0001-769: ref=['MISTER', 'AND', 'MISSUS', 'HATTERSLEY', 'HAVE', 'BEEN', 'STAYING', 'AT', 'THE', 'GROVE', 'A', 'FORTNIGHT', 'AND', 'AS', 'MISTER', 'HARGRAVE', 'IS', 'STILL', 'ABSENT', 'AND', 'THE', 'WEATHER', 'WAS', 'REMARKABLY', 'FINE', 'I', 'NEVER', 'PASSED', 'A', 'DAY', 'WITHOUT', 'SEEING', 'MY', 'TWO', 'FRIENDS', 'MILICENT', 'AND', 'ESTHER', 'EITHER', 'THERE', 'OR', 'HERE'] +533-131564-0001-769: hyp=['MISS', 'AND', 'MISSUS', 'HALTERSLEY', 'HAVE', 'BEEN', 'SEEING', 'IT', 'TO', 'GROW', 'A', 'FORTNIGHT', 'AND', 'AS', 'MISSUS', 'HARGRAVE', 'IS', 'STILL', 'ABSENT', 'AND', 'WEATHER', 'WAS', 'REMARKABLY', 'FINE', 'I', 'NEVER', 'PASSED', 'THE', 'DAY', 'WITHOUT', 'SEEING', 'MY', 'TWO', 'FRIENDS', 'MILLSON', 'AND', 'ASSER', 'EITHER', 'THERE', 'OR', 'HERE'] +533-131564-0002-770: ref=['NO', 'UNLESS', 'YOU', 'CAN', 'TELL', 'ME', 'WHEN', 'TO', 'EXPECT', 'HIM', 'HOME'] +533-131564-0002-770: hyp=['NO', 'UNLESS', 'YOU', 'CAN', 'TELL', 'YOU', 'WHEN', 'TO', 'EXPECT', 'HIM', 'HOME'] +533-131564-0003-771: ref=['I', "CAN'T", 'YOU', "DON'T", 'WANT', 'HIM', 'DO', 'YOU'] +533-131564-0003-771: hyp=['I', "CAN'T", 'EVEN', 'WANTS', 'HIM', 'DO', 'YOU'] +533-131564-0004-772: ref=['IT', 'IS', 'A', 'RESOLUTION', 'YOU', 'OUGHT', 'TO', 'HAVE', 'FORMED', 'LONG', 'AGO'] +533-131564-0004-772: hyp=['IT', 'IS', 'A', 'RESOLUTION', 'YOU', 'ARE', 'REFORMED', 'LONG', 'AGO'] +533-131564-0005-773: ref=['WE', 'ALL', 'HAVE', 'A', 'BIT', 'OF', 'A', 'LIKING', 'FOR', 'HIM', 'AT', 'THE', 'BOTTOM', 'OF', 'OUR', 'HEARTS', 'THOUGH', 'WE', "CAN'T", 'RESPECT', 'HIM'] +533-131564-0005-773: hyp=['WE', 'ALL', 'HAVE', 'A', 'BIT', 'OF', 'A', 'LIKING', 'FOR', 'HIM', 'AT', 'THE', 'BOTTOM', 'OF', 'OUR', 'HEART', 'THOUGH', 'WE', "CAN'T", 'RESPECT', 'HIM'] +533-131564-0006-774: ref=['NO', "I'D", 'RATHER', 'BE', 'LIKE', 'MYSELF', 'BAD', 'AS', 'I', 'AM'] +533-131564-0006-774: hyp=['NO', "I'D", 'RATHER', 'BE', 'LIKE', 'MYSELF', 'THAT', 'AS', 'I', 'AM'] +533-131564-0007-775: ref=['NEVER', 'MIND', 'MY', 'PLAIN', 'SPEAKING', 'SAID', 'I', 'IT', 'IS', 'FROM', 'THE', 'BEST', 'OF', 'MOTIVES'] +533-131564-0007-775: hyp=['NEVER', 'MIND', 'MY', 'PLAIN', 'SPEAKING', 'SAID', 'I', 'IT', 'IS', 'FROM', 'THE', 'BEST', 'OF', 'MOTIVES'] +533-131564-0008-776: ref=['BUT', 'TELL', 'ME', 'SHOULD', 'YOU', 'WISH', 'YOUR', 'SONS', 'TO', 'BE', 'LIKE', 'MISTER', 'HUNTINGDON', 'OR', 'EVEN', 'LIKE', 'YOURSELF'] +533-131564-0008-776: hyp=['BUT', 'TELL', 'ME', 'SHOULD', 'YOU', 'WISH', 'YOURSELVES', 'TO', 'BE', 'LIKE', 'MISTER', 'HUNTINGDON', 'OR', 'EVEN', 'LIKE', 'YOURSELF'] +533-131564-0009-777: ref=['OH', 'NO', 'I', "COULDN'T", 'STAND', 'THAT'] +533-131564-0009-777: hyp=['OH', 'NO', 'ECHOLYN', 'STAND', 'THAT'] +533-131564-0010-778: ref=['FIRE', 'AND', 'FURY'] +533-131564-0010-778: hyp=['FAR', 'AND', 'FURY'] +533-131564-0011-779: ref=['NOW', "DON'T", 'BURST', 'INTO', 'A', 'TEMPEST', 'AT', 'THAT'] +533-131564-0011-779: hyp=['NOW', "DON'T", 'FORCE', 'INTO', 'A', 'TEMPEST', 'AT', 'THAT'] +533-131564-0012-780: ref=['BUT', 'HANG', 'IT', "THAT'S", 'NOT', 'MY', 'FAULT'] +533-131564-0012-780: hyp=['BUT', 'HANG', 'IT', "THAT'S", 'NOT', 'MY', 'FAULT'] +533-131564-0013-781: ref=['NOT', 'YEARS', 'FOR', "SHE'S", 'ONLY', 'FIVE', 'AND', 'TWENTY'] +533-131564-0013-781: hyp=['NOT', 'EARS', 'FOR', "SHE'S", 'ONLY', 'FIVE', 'AND', 'TWENTY'] +533-131564-0014-782: ref=['WHAT', 'WOULD', 'YOU', 'MAKE', 'OF', 'ME', 'AND', 'THE', 'CHILDREN', 'TO', 'BE', 'SURE', 'THAT', 'WORRY', 'HER', 'TO', 'DEATH', 'BETWEEN', 'THEM'] +533-131564-0014-782: hyp=['WHAT', 'DID', 'YOU', 'MAKE', 'OF', 'ME', 'AND', 'THE', 'CHILDREN', 'TO', 'BE', 'SURE', 'THAT', 'WERE', 'HE', 'HURT', 'DEATH', 'BETWEEN', 'THEM'] +533-131564-0015-783: ref=['I', 'KNOW', 'THEY', 'ARE', 'BLESS', 'THEM'] +533-131564-0015-783: hyp=['I', 'KNOW', 'THEY', 'ARE', 'BLESS', 'THEM'] +533-131564-0016-784: ref=['HE', 'FOLLOWED', 'ME', 'INTO', 'THE', 'LIBRARY'] +533-131564-0016-784: hyp=['IF', 'ALL', 'OF', 'ME', 'INTO', 'THE', 'LIBRARY'] +533-131564-0017-785: ref=['I', 'SOUGHT', 'OUT', 'AND', 'PUT', 'INTO', 'HIS', 'HANDS', 'TWO', 'OF', "MILICENT'S", 'LETTERS', 'ONE', 'DATED', 'FROM', 'LONDON', 'AND', 'WRITTEN', 'DURING', 'ONE', 'OF', 'HIS', 'WILDEST', 'SEASONS', 'OF', 'RECKLESS', 'DISSIPATION', 'THE', 'OTHER', 'IN', 'THE', 'COUNTRY', 'DURING', 'A', 'LUCID', 'INTERVAL'] +533-131564-0017-785: hyp=['I', 'SOUGHT', 'OUT', 'AND', 'PUT', 'INTO', 'HIS', 'HANDS', 'TWO', 'OF', "MILICSON'S", 'LETTERS', 'ONE', 'DID', 'IT', 'FROM', 'LONDON', 'AND', 'WRITTEN', 'DURING', 'ONE', 'OF', 'HIS', 'WALDEST', 'SEASONS', 'OF', 'RECKLESS', 'DISSIPATION', 'THE', 'OTHER', 'IN', 'THE', 'COUNTRY', 'DURING', 'ELUSIVE', 'INTERVAL'] +533-131564-0018-786: ref=['THE', 'FORMER', 'WAS', 'FULL', 'OF', 'TROUBLE', 'AND', 'ANGUISH', 'NOT', 'ACCUSING', 'HIM', 'BUT', 'DEEPLY', 'REGRETTING', 'HIS', 'CONNECTION', 'WITH', 'HIS', 'PROFLIGATE', 'COMPANIONS', 'ABUSING', 'MISTER', 'GRIMSBY', 'AND', 'OTHERS', 'INSINUATING', 'BITTER', 'THINGS', 'AGAINST', 'MISTER', 'HUNTINGDON', 'AND', 'MOST', 'INGENIOUSLY', 'THROWING', 'THE', 'BLAME', 'OF', 'HER', "HUSBAND'S", 'MISCONDUCT', 'ON', 'TO', 'OTHER', "MEN'S", 'SHOULDERS'] +533-131564-0018-786: hyp=['THE', 'FORMER', 'WAS', 'FULL', 'OF', 'TROUBLE', 'AND', 'ANGUISH', 'NOT', 'ACCUSING', 'HIM', 'BUT', 'DEEPLY', 'REGRETTING', 'HIS', 'CONNECTION', 'WITH', 'HIS', 'PROFLIGATE', 'COMPANIONS', 'ABUSING', 'MISTER', "GRIM'S", 'BEING', 'OTHERS', 'INSINUATING', 'BITTER', 'THINGS', 'AGAINST', 'MISTER', 'HUNTINGDON', 'AND', 'MOST', 'INGENUOUSLY', 'THREW', 'IN', 'THE', 'BLAME', 'OF', 'HER', "HUSBAND'S", 'MISCONDUCT', 'ON', 'THE', 'OTHER', "MAN'S", 'SHOULDERS'] +533-131564-0019-787: ref=["I'VE", 'BEEN', 'A', 'CURSED', 'RASCAL', 'GOD', 'KNOWS', 'SAID', 'HE', 'AS', 'HE', 'GAVE', 'IT', 'A', 'HEARTY', 'SQUEEZE', 'BUT', 'YOU', 'SEE', 'IF', 'I', "DON'T", 'MAKE', 'AMENDS', 'FOR', 'IT', 'D', 'N', 'ME', 'IF', 'I', "DON'T"] +533-131564-0019-787: hyp=["I'VE", 'BEEN', 'A', 'CURSED', 'RASCAL', 'GOD', 'KNOWS', 'SAID', 'HE', 'AS', 'HE', 'GAVE', 'IT', 'EARTHLY', 'SQUEEZE', 'BUT', 'YOU', 'SEE', 'IF', 'I', "DON'T", 'MAKE', 'AMENDS', 'FOR', 'IT', 'THEN', 'ME', 'IF', 'I', "DON'T"] +533-131564-0020-788: ref=['IF', 'YOU', 'INTEND', 'TO', 'REFORM', 'INVOKE', "GOD'S", 'BLESSING', 'HIS', 'MERCY', 'AND', 'HIS', 'AID', 'NOT', 'HIS', 'CURSE'] +533-131564-0020-788: hyp=['IF', 'YOU', 'INSENT', 'WITH', 'FORM', 'INVOKE', "GOD'S", 'BLESSING', 'IS', 'A', 'MERCY', 'IN', 'THIS', 'APE', 'NOR', 'DISCOURSE'] +533-131564-0021-789: ref=['GOD', 'HELP', 'ME', 'THEN', 'FOR', "I'M", 'SURE', 'I', 'NEED', 'IT'] +533-131564-0021-789: hyp=['GOD', 'HELP', 'ME', 'THEN', 'FOR', 'I', 'AM', 'SURE', 'I', 'NEEDED'] +533-131564-0022-790: ref=["WHERE'S", 'MILICENT'] +533-131564-0022-790: hyp=['WHERE', 'IS', 'MILICENT'] +533-131564-0023-791: ref=['NAY', 'NOT', 'I', 'SAID', 'HE', 'TURNING', 'HER', 'ROUND', 'AND', 'PUSHING', 'HER', 'TOWARDS', 'ME'] +533-131564-0023-791: hyp=['NAY', 'NOT', 'I', 'SAID', 'HE', 'TURNING', 'AROUND', 'AND', 'PUSHING', 'IT', 'TOWARDS', 'ME'] +533-131564-0024-792: ref=['MILICENT', 'FLEW', 'TO', 'THANK', 'ME', 'OVERFLOWING', 'WITH', 'GRATITUDE'] +533-131564-0024-792: hyp=['MILICENT', 'FLEW', 'TO', 'THANK', 'ME', 'OVERWHELMING', 'ITS', 'GRATITUDE'] +533-131564-0025-793: ref=['CRIED', 'SHE', 'I', "COULDN'T", 'HAVE', 'INFLUENCED', 'HIM', "I'M", 'SURE', 'BY', 'ANYTHING', 'THAT', 'I', 'COULD', 'HAVE', 'SAID'] +533-131564-0025-793: hyp=['CRIED', 'SHE', 'I', "COULDN'T", 'HAVE', 'INFLUENCED', 'HIM', "I'M", 'SURE', 'BY', 'ANYTHING', 'THAT', 'I', 'COULD', 'HAVE', 'SAID'] +533-131564-0026-794: ref=['YOU', 'NEVER', 'TRIED', 'ME', 'MILLY', 'SAID', 'HE'] +533-131564-0026-794: hyp=['YOU', 'NEVER', 'TRIED', 'ME', 'MERELY', 'SAID', 'HE'] +533-131564-0027-795: ref=['AFTER', 'THAT', 'THEY', 'WILL', 'REPAIR', 'TO', 'THEIR', 'COUNTRY', 'HOME'] +533-131564-0027-795: hyp=['AFTER', 'THAT', 'THEY', 'WILL', 'REPAIR', 'TO', 'THEIR', 'COUNTRY', 'HOME'] +5442-32873-0000-1365: ref=['CAPTAIN', 'LAKE', 'DID', 'NOT', 'LOOK', 'AT', 'ALL', 'LIKE', 'A', 'LONDON', 'DANDY', 'NOW'] +5442-32873-0000-1365: hyp=['CAPTAIN', 'LAKE', 'DID', 'NOT', 'LOOK', 'AT', 'ALL', 'LIKE', 'A', 'LONDON', 'DANDY', 'NOW'] +5442-32873-0001-1366: ref=['THERE', 'WAS', 'A', 'VERY', 'NATURAL', 'SAVAGERY', 'AND', 'DEJECTION', 'THERE', 'AND', 'A', 'WILD', 'LEER', 'IN', 'HIS', 'YELLOW', 'EYES', 'RACHEL', 'SAT', 'DOWN'] +5442-32873-0001-1366: hyp=['THERE', 'WAS', 'A', 'VERY', 'NATURAL', 'SAVAGERY', 'AND', 'DEJECTION', 'THEN', 'AND', 'A', 'WILD', 'YARD', 'IN', 'HIS', 'YELLOW', 'EYES', 'RACHEL', 'SAT', 'DOWN'] +5442-32873-0002-1367: ref=['A', 'SLAVE', 'ONLY', 'THINK', 'A', 'SLAVE'] +5442-32873-0002-1367: hyp=['AND', 'SLAVE', 'ONLY', 'THINK', 'A', 'SLAVE'] +5442-32873-0003-1368: ref=['OH', 'FRIGHTFUL', 'FRIGHTFUL', 'IS', 'IT', 'A', 'DREAM'] +5442-32873-0003-1368: hyp=['OH', 'FRIGHTFUL', 'FRIGHTFUL', 'IS', 'IT', 'A', 'DREAM'] +5442-32873-0004-1369: ref=['OH', 'FRIGHTFUL', 'FRIGHTFUL'] +5442-32873-0004-1369: hyp=['OH', 'FRIGHTFUL', 'DREADFUL'] +5442-32873-0005-1370: ref=['STANLEY', 'STANLEY', 'IT', 'WOULD', 'BE', 'MERCY', 'TO', 'KILL', 'ME', 'SHE', 'BROKE', 'OUT', 'AGAIN'] +5442-32873-0005-1370: hyp=['STANLEY', 'STANLEY', 'IT', 'WOULD', 'BE', 'MERCY', 'TO', 'KILL', 'ME', 'SHE', 'BROKE', 'OUT', 'AGAIN'] +5442-32873-0006-1371: ref=['BRIGHT', 'AND', 'NATTY', 'WERE', 'THE', 'CHINTZ', 'CURTAINS', 'AND', 'THE', 'LITTLE', 'TOILET', 'SET', 'OUT', 'NOT', 'INELEGANTLY', 'AND', 'HER', 'PET', 'PIPING', 'GOLDFINCH', 'ASLEEP', 'ON', 'HIS', 'PERCH', 'WITH', 'HIS', 'BIT', 'OF', 'SUGAR', 'BETWEEN', 'THE', 'WIRES', 'OF', 'HIS', 'CAGE', 'HER', 'PILLOW', 'SO', 'WHITE', 'AND', 'UNPRESSED', 'WITH', 'ITS', 'LITTLE', 'EDGING', 'OF', 'LACE'] +5442-32873-0006-1371: hyp=['BRIGHT', 'AND', 'NATTY', 'WITH', 'A', "CHIN'S", 'CURTAINS', 'AND', 'THE', 'LITTLE', 'TOILET', 'SET', 'OUT', 'NOT', 'INELEGANTLY', 'AND', 'HER', 'PET', 'PIPING', 'GOLDFINCH', 'ASLEEP', 'ON', 'HIS', 'PERCH', 'WITH', 'HIS', 'BIT', 'OF', 'SUGAR', 'BETWEEN', 'THE', 'WIVES', 'OF', 'HIS', 'CAGE', 'HER', 'PILLOW', 'SO', 'WHITE', 'AND', 'UNPRESSED', 'WITH', 'ITS', 'LITTLE', 'EDGING', 'OF', 'LACE'] +5442-32873-0007-1372: ref=['WHEN', 'HE', 'CAME', 'BACK', 'TO', 'THE', 'DRAWING', 'ROOM', 'A', 'TOILET', 'BOTTLE', 'OF', 'EAU', 'DE', 'COLOGNE', 'IN', 'HIS', 'HAND', 'WITH', 'HER', 'LACE', 'HANDKERCHIEF', 'HE', 'BATHED', 'HER', 'TEMPLES', 'AND', 'FOREHEAD'] +5442-32873-0007-1372: hyp=['WHEN', 'HE', 'CAME', 'BACK', 'TO', 'THE', 'DRAWING', 'ROOM', 'A', 'TALLED', 'BOTTLE', 'OF', 'OVERCLONE', 'IN', 'HIS', 'HAND', 'WITH', 'HER', 'LACE', 'HANDKERCHIEF', 'HE', 'BATHED', 'HER', 'TEMPLE', 'AND', 'FOREHEAD'] +5442-32873-0008-1373: ref=['THERE', 'WAS', 'NOTHING', 'VERY', 'BROTHERLY', 'IN', 'HIS', 'LOOK', 'AS', 'HE', 'PEERED', 'INTO', 'HER', 'PALE', 'SHARP', 'FEATURES', 'DURING', 'THE', 'PROCESS'] +5442-32873-0008-1373: hyp=['THERE', 'WAS', 'NOTHING', 'VERY', 'BROTHERLY', 'IN', 'HIS', 'LOOK', 'AS', 'HE', 'PEERED', 'INTO', 'A', 'PALE', 'SHARP', 'FEATURES', 'DURING', 'THE', 'PROCESS'] +5442-32873-0009-1374: ref=['THERE', "DON'T", 'MIND', 'ME', 'SHE', 'SAID', 'SHARPLY', 'AND', 'GETTING', 'UP', 'SHE', 'LOOKED', 'DOWN', 'AT', 'HER', 'DRESS', 'AND', 'THIN', 'SHOES', 'AND', 'SEEMING', 'TO', 'RECOLLECT', 'HERSELF', 'SHE', 'TOOK', 'THE', 'CANDLE', 'HE', 'HAD', 'JUST', 'SET', 'DOWN', 'AND', 'WENT', 'SWIFTLY', 'TO', 'HER', 'ROOM'] +5442-32873-0009-1374: hyp=['THERE', "DON'T", 'MIND', 'ME', 'SHE', 'SAID', 'SHARPLY', 'AND', 'GETTING', 'UP', 'SHE', 'LOOKED', 'DOWN', 'AT', 'HER', 'DRESS', 'AND', 'THIN', 'SHOES', 'AND', 'SEEMING', 'TO', 'RECOLLECT', 'HERSELF', 'SHE', 'TOOK', 'THE', 'CANDLE', 'HE', 'HAD', 'JUST', 'SET', 'DOWN', 'AND', 'WENT', 'SWIFTLY', 'TO', 'HER', 'ROOM'] +5442-32873-0010-1375: ref=['AND', 'SHE', 'THREW', 'BACK', 'HER', 'VEIL', 'AND', 'GOING', 'HURRIEDLY', 'TO', 'THE', 'TOILET', 'MECHANICALLY', 'SURVEYED', 'HERSELF', 'IN', 'THE', 'GLASS'] +5442-32873-0010-1375: hyp=['AND', 'SHE', 'THREW', 'BACK', 'HER', 'VEIL', 'AND', 'GOING', 'HURRIEDLY', 'TO', 'THE', 'TOILET', 'MECHANICALLY', 'SURVEYED', 'HERSELF', 'FROM', 'THE', 'GLASS'] +5442-32873-0011-1376: ref=['RACHEL', 'LAKE', 'RACHEL', 'LAKE', 'WHAT', 'ARE', 'YOU', 'NOW'] +5442-32873-0011-1376: hyp=['RIGIDLY', 'TO', 'MAKE', 'WHAT', 'ARE', 'YOU', 'NOW'] +5442-32873-0012-1377: ref=["I'LL", 'STAY', 'HERE', 'THAT', 'IS', 'IN', 'THE', 'DRAWING', 'ROOM', 'SHE', 'ANSWERED', 'AND', 'THE', 'FACE', 'WAS', 'WITHDRAWN'] +5442-32873-0012-1377: hyp=["I'LL", 'STAY', 'HERE', 'THAT', 'IS', 'IN', 'THE', 'DRAWING', 'ROOM', 'SHE', 'ANSWERED', 'AND', 'THE', 'FACE', 'WAS', 'WITHDRAWN'] +5442-32873-0013-1378: ref=['HE', 'SLACKENED', 'HIS', 'PACE', 'AND', 'TAPPED', 'SHARPLY', 'AT', 'THE', 'LITTLE', 'WINDOW', 'OF', 'THAT', 'MODEST', 'POST', 'OFFICE', 'AT', 'WHICH', 'THE', 'YOUNG', 'LADIES', 'IN', 'THE', 'PONY', 'CARRIAGE', 'HAD', 'PULLED', 'UP', 'THE', 'DAY', 'BEFORE', 'AND', 'WITHIN', 'WHICH', 'LUKE', 'WAGGOT', 'WAS', 'WONT', 'TO', 'SLEEP', 'IN', 'A', 'SORT', 'OF', 'WOODEN', 'BOX', 'THAT', 'FOLDED', 'UP', 'AND', 'APPEARED', 'TO', 'BE', 'A', 'CHEST', 'OF', 'DRAWERS', 'ALL', 'DAY'] +5442-32873-0013-1378: hyp=['HIS', 'CLACK', 'IN', 'THE', 'SPACE', 'AND', 'TOP', 'SHARPLY', 'AT', 'THE', 'LITTLE', 'WINDOW', 'OF', 'THAT', 'MODEST', 'POST', 'OFFICE', 'AT', 'WHICH', 'THE', 'YOUNG', 'LADIES', 'IN', 'THE', 'PONY', 'CARRIAGE', 'HAD', 'PULLED', 'UP', 'THE', 'DAY', 'BEFORE', 'AND', 'WITHIN', 'WHICH', 'LUKE', 'RAGGED', 'WAS', 'WONT', 'TO', 'SLEEP', 'IN', 'A', 'SORT', 'OF', 'WOODEN', 'BOX', 'THAT', 'FOLDED', 'UP', 'AND', 'APPEARED', 'TO', 'BE', 'A', 'CHEST', 'OF', 'DRAWERS', 'ALL', 'DAY'] +5442-32873-0014-1379: ref=['LUKE', 'TOOK', 'CARE', 'OF', 'MISTER', "LARKIN'S", 'DOGS', 'AND', 'GROOMED', 'MISTER', "WYLDER'S", 'HORSE', 'AND', 'CLEANED', 'UP', 'HIS', 'DOG', 'CART', 'FOR', 'MARK', 'BEING', 'CLOSE', 'ABOUT', 'MONEY', 'AND', 'FINDING', 'THAT', 'THE', 'THING', 'WAS', 'TO', 'BE', 'DONE', 'MORE', 'CHEAPLY', 'THAT', 'WAY', 'PUT', 'UP', 'HIS', 'HORSE', 'AND', 'DOG', 'CART', 'IN', 'THE', 'POST', 'OFFICE', 'PREMISES', 'AND', 'SO', 'EVADED', 'THE', 'LIVERY', 'CHARGES', 'OF', 'THE', 'BRANDON', 'ARMS'] +5442-32873-0014-1379: hyp=['LOOK', 'TOOK', 'CARE', 'OF', 'MISTER', "LARKIN'S", 'DOG', 'AND', 'GROOMED', 'MISTER', "WYLDER'S", 'HORSE', 'AND', 'CLEANED', 'UP', 'HIS', 'DOOR', 'CART', 'FOR', 'MARK', 'BEING', 'CLOSE', 'ABOUT', 'MONEY', 'AND', 'FINDING', 'THAT', 'THE', 'THING', 'WAS', 'TO', 'BE', 'DONE', 'MORE', 'CHEAPLY', 'THAT', 'WAY', 'PUT', 'UP', 'HIS', 'HORSE', 'AND', 'DOOR', 'CART', 'IN', 'THE', 'POST', 'OFFICE', 'PREMISES', 'AND', 'SO', 'EVADED', 'THE', 'LIVERY', 'CHARGES', 'OF', 'THE', 'BRANDON', 'ARMS'] +5442-32873-0015-1380: ref=['BUT', 'LUKE', 'WAS', 'NOT', 'THERE', 'AND', 'CAPTAIN', 'LAKE', 'RECOLLECTING', 'HIS', 'HABITS', 'AND', 'HIS', 'HAUNT', 'HURRIED', 'ON', 'TO', 'THE', 'SILVER', 'LION', 'WHICH', 'HAS', 'ITS', 'GABLE', 'TOWARDS', 'THE', 'COMMON', 'ONLY', 'ABOUT', 'A', 'HUNDRED', 'STEPS', 'AWAY', 'FOR', 'DISTANCES', 'ARE', 'NOT', 'GREAT', 'IN', 'GYLINGDEN'] +5442-32873-0015-1380: hyp=['BUT', 'LUKE', 'WAS', 'KNOWN', 'THERE', 'AND', 'CAPTAIN', 'LAKE', 'RECOLLECTING', 'HIS', 'HABITS', 'AND', 'HIS', 'HAUNT', 'HURRIED', 'ON', 'TO', 'THE', 'SILVER', 'LION', 'WHICH', 'HAS', 'ITS', 'GABLE', 'TOWARDS', 'THE', 'COMMON', 'ONLY', 'ABOUT', 'A', 'HUNDRED', 'STEPS', 'AWAY', 'FOR', 'DISTANCES', 'ARE', 'NOT', 'GREAT', 'IN', 'GILINGDEN'] +5442-32873-0016-1381: ref=['HERE', 'WERE', 'THE', 'FLOW', 'OF', 'SOUL', 'AND', 'OF', 'STOUT', 'LONG', 'PIPES', 'LONG', 'YARNS', 'AND', 'TOLERABLY', 'LONG', 'CREDITS', 'AND', 'THE', 'HUMBLE', 'SCAPEGRACES', 'OF', 'THE', 'TOWN', 'RESORTED', 'THITHER', 'FOR', 'THE', 'PLEASURES', 'OF', 'A', 'CLUB', 'LIFE', 'AND', 'OFTEN', 'REVELLED', 'DEEP', 'INTO', 'THE', 'SMALL', 'HOURS', 'OF', 'THE', 'MORNING'] +5442-32873-0016-1381: hyp=['HERE', 'WERE', 'THE', 'FLOOR', 'OF', 'SOUL', 'AND', 'OF', 'STOUT', 'LONG', 'PIPES', 'LONG', 'YARNS', 'AND', 'TOLERABLY', 'LONG', 'CREDITS', 'AND', 'THE', 'HUMBLE', 'CAPE', 'BRACES', 'OF', 'THE', 'TOWN', 'RESORTED', 'THITHER', 'FOR', 'THE', 'PLEASURES', 'OF', 'A', 'CLUB', 'LIFE', 'AND', 'OFTEN', 'REVELLED', 'DEEP', 'INTO', 'THE', 'SMALL', 'HOURS', 'OF', 'THE', 'MORNING'] +5442-32873-0017-1382: ref=['LOSE', 'NO', 'TIME', 'AND', "I'LL", 'GIVE', 'YOU', 'HALF', 'A', 'CROWN'] +5442-32873-0017-1382: hyp=['LOSE', 'NO', 'TIME', 'BUT', "I'LL", 'GIVE', 'YOU', 'HALF', 'A', 'CROWN'] +5442-32873-0018-1383: ref=['LUKE', 'STUCK', 'ON', 'HIS', 'GREASY', 'WIDEAWAKE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'MORE', 'THE', 'DOG', 'CART', 'WAS', 'TRUNDLED', 'OUT', 'INTO', 'THE', 'LANE', 'AND', 'THE', 'HORSE', 'HARNESSED', 'WENT', 'BETWEEN', 'THE', 'SHAFTS', 'WITH', 'THAT', 'WONDERFUL', 'CHEERFULNESS', 'WITH', 'WHICH', 'THEY', 'BEAR', 'TO', 'BE', 'CALLED', 'UP', 'UNDER', 'STARTLING', 'CIRCUMSTANCES', 'AT', 'UNSEASONABLE', 'HOURS'] +5442-32873-0018-1383: hyp=['LUKE', 'STUCK', 'ON', 'HIS', 'GREASY', 'WIDE', 'AWAKE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'MORE', 'THE', 'DOOR', 'CART', 'WAS', 'TUMBLED', 'OUT', 'INTO', 'THE', 'LANE', 'AND', 'THE', 'HORSE', 'HARNESSED', 'WENT', 'BETWEEN', 'THE', 'SHAFTS', 'WITH', 'THAT', 'WONDERFUL', 'CHEERFULNESS', 'WITH', 'WHICH', 'THEIR', 'BEAR', 'TO', 'BE', 'CALLED', 'UP', 'AND', 'THE', 'STARTLING', 'CIRCUMSTANCES', 'AND', 'UNSEASONABLE', 'HOURS'] +5442-32873-0019-1384: ref=['IF', 'I', 'THOUGHT', "YOU'D", 'FAIL', 'ME', 'NOW', 'TAMAR', 'I', 'SHOULD', 'NEVER', 'COME', 'BACK', 'GOOD', 'NIGHT', 'TAMAR'] +5442-32873-0019-1384: hyp=['IF', 'I', 'THOUGHT', "YOU'D", 'FAIL', 'ME', 'NOW', 'TO', 'MORROW', 'I', 'SHOULD', 'NEVER', 'COME', 'BACK', 'GOOD', 'NIGHT', 'TO', 'MOR'] +5442-41168-0000-1385: ref=['THE', 'ACT', 'SAID', 'THAT', 'IN', 'CASE', 'OF', 'DIFFERENCE', 'OF', 'OPINION', 'THERE', 'MUST', 'BE', 'A', 'BALLOT'] +5442-41168-0000-1385: hyp=['THE', 'ACT', 'SAID', 'THAT', 'IN', 'CASE', 'OF', 'DIFFERENCE', 'OF', 'OPINION', 'THERE', 'MUST', 'BE', 'A', 'BALLOT'] +5442-41168-0001-1386: ref=['HE', 'WENT', 'UP', 'TO', 'THE', 'TABLE', 'AND', 'STRIKING', 'IT', 'WITH', 'HIS', 'FINGER', 'RING', 'HE', 'SHOUTED', 'LOUDLY', 'A', 'BALLOT'] +5442-41168-0001-1386: hyp=['HE', 'WENT', 'UP', 'TO', 'THE', 'TABLE', 'AND', 'STRIKING', 'IT', 'WITH', 'HIS', 'FINGER', 'RING', 'HE', 'SHOUTED', 'LOUDLY', 'A', 'BALLOT'] +5442-41168-0002-1387: ref=['HE', 'WAS', 'SHOUTING', 'FOR', 'THE', 'VERY', 'COURSE', 'SERGEY', 'IVANOVITCH', 'HAD', 'PROPOSED', 'BUT', 'IT', 'WAS', 'EVIDENT', 'THAT', 'HE', 'HATED', 'HIM', 'AND', 'ALL', 'HIS', 'PARTY', 'AND', 'THIS', 'FEELING', 'OF', 'HATRED', 'SPREAD', 'THROUGH', 'THE', 'WHOLE', 'PARTY', 'AND', 'ROUSED', 'IN', 'OPPOSITION', 'TO', 'IT', 'THE', 'SAME', 'VINDICTIVENESS', 'THOUGH', 'IN', 'A', 'MORE', 'SEEMLY', 'FORM', 'ON', 'THE', 'OTHER', 'SIDE'] +5442-41168-0002-1387: hyp=['HE', 'WAS', 'SHOUTING', 'FOR', 'THE', 'VERY', 'COARSE', 'SURGY', 'IVANOVITCH', 'HAD', 'PROPOSED', 'BUT', 'IT', 'WAS', 'EVIDENT', 'THAT', 'HE', 'HATED', 'HIM', 'AND', 'ALL', 'HIS', 'PARTY', 'AND', 'THIS', 'FEELING', 'OF', 'HATRED', 'SPREAD', 'THROUGH', 'THE', 'WHOLE', 'PARTY', 'AND', 'ROUSED', 'IN', 'OPPOSITION', 'TO', 'IT', 'THE', 'SAME', 'VINDICTIVENESS', 'THOUGH', 'IN', 'A', 'MORE', 'SEEMLY', 'FORM', 'ON', 'THE', 'OTHER', 'SIDE'] +5442-41168-0003-1388: ref=['SHOUTS', 'WERE', 'RAISED', 'AND', 'FOR', 'A', 'MOMENT', 'ALL', 'WAS', 'CONFUSION', 'SO', 'THAT', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'HAD', 'TO', 'CALL', 'FOR', 'ORDER', 'A', 'BALLOT'] +5442-41168-0003-1388: hyp=['SHOUTS', 'WERE', 'RAISED', 'AND', 'FOR', 'A', 'MOMENT', 'ALL', 'WAS', 'CONFUSION', 'SO', 'THAT', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'HAD', 'TO', 'CALL', 'FOR', 'ODO', 'A', 'BALLOT'] +5442-41168-0004-1389: ref=['WE', 'SHED', 'OUR', 'BLOOD', 'FOR', 'OUR', 'COUNTRY'] +5442-41168-0004-1389: hyp=['WE', 'SHED', 'OUR', 'BLOOD', 'FOR', 'OUR', 'COUNTRY'] +5442-41168-0005-1390: ref=['THE', 'CONFIDENCE', 'OF', 'THE', 'MONARCH', 'NO', 'CHECKING', 'THE', 'ACCOUNTS', 'OF', 'THE', 'MARSHAL', "HE'S", 'NOT', 'A', 'CASHIER', 'BUT', "THAT'S", 'NOT', 'THE', 'POINT'] +5442-41168-0005-1390: hyp=['THE', 'CONFIDENCE', 'OF', 'THE', 'MONARCH', 'BUT', 'NO', 'CHECKING', 'THE', 'ACCOUNTS', 'OF', 'THE', 'MARTIAN', 'IS', 'NOT', 'A', 'CASHIER', 'BUT', "THAT'S", 'NOT', 'THE', 'POINT'] +5442-41168-0006-1391: ref=['VOTES', 'PLEASE', 'BEASTLY'] +5442-41168-0006-1391: hyp=['VOTES', 'PLEASE', 'BEASTLY'] +5442-41168-0007-1392: ref=['THEY', 'EXPRESSED', 'THE', 'MOST', 'IMPLACABLE', 'HATRED'] +5442-41168-0007-1392: hyp=['THEY', 'EXPRESSED', 'THE', 'MOST', 'IMPLACABLE', 'HATRED'] +5442-41168-0008-1393: ref=['LEVIN', 'DID', 'NOT', 'IN', 'THE', 'LEAST', 'UNDERSTAND', 'WHAT', 'WAS', 'THE', 'MATTER', 'AND', 'HE', 'MARVELED', 'AT', 'THE', 'PASSION', 'WITH', 'WHICH', 'IT', 'WAS', 'DISPUTED', 'WHETHER', 'OR', 'NOT', 'THE', 'DECISION', 'ABOUT', 'FLEROV', 'SHOULD', 'BE', 'PUT', 'TO', 'THE', 'VOTE'] +5442-41168-0008-1393: hyp=['LEVIN', 'DID', 'NOT', 'IN', 'THE', 'LEAST', 'UNDERSTAND', 'WHAT', 'WAS', 'THE', 'MATTER', 'AND', 'HE', 'MARVELLED', 'AT', 'THE', 'PASSION', 'WITH', 'WHICH', 'IT', 'WAS', 'DISPUTED', 'WHETHER', 'OR', 'NOT', 'THE', 'DECISION', 'ABOUT', 'FLARE', 'OFF', 'SHOULD', 'BE', 'PUT', 'TO', 'THE', 'VOTE'] +5442-41168-0009-1394: ref=['HE', 'FORGOT', 'AS', 'SERGEY', 'IVANOVITCH', 'EXPLAINED', 'TO', 'HIM', 'AFTERWARDS', 'THIS', 'SYLLOGISM', 'THAT', 'IT', 'WAS', 'NECESSARY', 'FOR', 'THE', 'PUBLIC', 'GOOD', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'THAT', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'IT', 'WAS', 'NECESSARY', 'TO', 'HAVE', 'A', 'MAJORITY', 'OF', 'VOTES', 'THAT', 'TO', 'GET', 'A', 'MAJORITY', 'OF', 'VOTES', 'IT', 'WAS', 'NECESSARY', 'TO', 'SECURE', "FLEROV'S", 'RIGHT', 'TO', 'VOTE', 'THAT', 'TO', 'SECURE', 'THE', 'RECOGNITION', 'OF', "FLEROV'S", 'RIGHT', 'TO', 'VOTE', 'THEY', 'MUST', 'DECIDE', 'ON', 'THE', 'INTERPRETATION', 'TO', 'BE', 'PUT', 'ON', 'THE', 'ACT'] +5442-41168-0009-1394: hyp=['HE', 'FORGOT', 'AS', 'SO', 'GIVANOVITCH', 'EXPLAINED', 'TO', 'HIM', 'AFTERWARDS', 'THIS', 'SYLLISM', 'THAT', 'IT', 'WAS', 'NECESSARY', 'FOR', 'THE', 'PUBLIC', 'GOOD', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'THAT', 'TO', 'GET', 'INTO', 'THE', 'MARTIAN', 'IT', 'WAS', 'NECESSARY', 'TO', 'HAVE', 'A', 'MAJORITY', 'OF', 'VOTES', 'THAT', 'TO', 'GET', 'A', 'MAJORITY', 'OF', 'VOTES', 'IT', 'WAS', 'NECESSARY', 'TO', 'SECURE', "FYOV'S", 'RIGHT', 'TO', 'VOTE', 'THAT', 'TO', 'SECURED', 'THE', 'RECOGNITION', 'OF', "FLORO'S", 'RIGHT', 'TO', 'VOTE', 'THEY', 'MUST', 'DECIDE', 'ON', 'THE', 'INTERPRETATION', 'TO', 'BE', 'PUT', 'ON', 'THE', 'ACT'] +5442-41168-0010-1395: ref=['BUT', 'LEVIN', 'FORGOT', 'ALL', 'THAT', 'AND', 'IT', 'WAS', 'PAINFUL', 'TO', 'HIM', 'TO', 'SEE', 'ALL', 'THESE', 'EXCELLENT', 'PERSONS', 'FOR', 'WHOM', 'HE', 'HAD', 'A', 'RESPECT', 'IN', 'SUCH', 'AN', 'UNPLEASANT', 'AND', 'VICIOUS', 'STATE', 'OF', 'EXCITEMENT'] +5442-41168-0010-1395: hyp=['BUT', 'LEVIN', 'FORGOT', 'ALL', 'THAT', 'AND', 'IT', 'WAS', 'PAINFUL', 'TO', 'HIM', 'TO', 'SEE', 'ALL', 'THESE', 'EXCELLENT', 'PERSONS', 'FOR', 'WHOM', 'HE', 'HAD', 'A', 'RESPECT', 'IN', 'SUCH', 'AN', 'UNPLEASANT', 'AND', 'VICIOUS', 'STATE', 'OF', 'EXCITEMENT'] +5442-41168-0011-1396: ref=['TO', 'ESCAPE', 'FROM', 'THIS', 'PAINFUL', 'FEELING', 'HE', 'WENT', 'AWAY', 'INTO', 'THE', 'OTHER', 'ROOM', 'WHERE', 'THERE', 'WAS', 'NOBODY', 'EXCEPT', 'THE', 'WAITERS', 'AT', 'THE', 'REFRESHMENT', 'BAR'] +5442-41168-0011-1396: hyp=['TO', 'ESCAPE', 'FROM', 'THIS', 'PAINFUL', 'FEELING', 'HE', 'WENT', 'AWAY', 'INTO', 'THE', 'OTHER', 'ROOM', 'WHERE', 'THERE', 'WAS', 'NOBODY', 'EXCEPT', 'THE', 'WAITERS', 'AT', 'THE', 'REFRESHMENT', 'BAR'] +5442-41168-0012-1397: ref=['HE', 'PARTICULARLY', 'LIKED', 'THE', 'WAY', 'ONE', 'GRAY', 'WHISKERED', 'WAITER', 'WHO', 'SHOWED', 'HIS', 'SCORN', 'FOR', 'THE', 'OTHER', 'YOUNGER', 'ONES', 'AND', 'WAS', 'JEERED', 'AT', 'BY', 'THEM', 'WAS', 'TEACHING', 'THEM', 'HOW', 'TO', 'FOLD', 'UP', 'NAPKINS', 'PROPERLY'] +5442-41168-0012-1397: hyp=['HE', 'PARTICULARLY', 'LIKED', 'THE', 'WAY', 'ONE', 'GREY', 'WHISKIRT', 'WAITER', 'WHO', 'SHOWED', 'HIS', 'CORN', 'FOR', 'THE', 'OTHER', 'YOUNGER', 'ONES', 'AND', 'WAS', 'JEWED', 'AT', 'BY', 'THEM', 'WAS', 'TEACHING', 'THEM', 'HOW', 'TO', 'FOLD', 'UP', 'NAPKINS', 'PROPERLY'] +5442-41168-0013-1398: ref=['LEVIN', 'ADVANCED', 'BUT', 'UTTERLY', 'FORGETTING', 'WHAT', 'HE', 'WAS', 'TO', 'DO', 'AND', 'MUCH', 'EMBARRASSED', 'HE', 'TURNED', 'TO', 'SERGEY', 'IVANOVITCH', 'WITH', 'THE', 'QUESTION', 'WHERE', 'AM', 'I', 'TO', 'PUT', 'IT'] +5442-41168-0013-1398: hyp=['LEVIN', 'ADVANCED', 'BUT', 'UTTERLY', 'FORGETTING', 'WHAT', 'HE', 'WAS', 'TO', 'DO', 'AND', 'MUCH', 'EMBARRASSED', 'HE', 'TURNED', 'TO', 'SERGEY', 'IVANOVITCH', 'WITH', 'THE', 'QUESTION', 'WHERE', 'AM', 'I', 'TO', 'PUT', 'IT'] +5442-41168-0014-1399: ref=['SERGEY', 'IVANOVITCH', 'FROWNED'] +5442-41168-0014-1399: hyp=['SOJOURNOVITCH', 'FROWNED'] +5442-41168-0015-1400: ref=['THAT', 'IS', 'A', 'MATTER', 'FOR', 'EACH', "MAN'S", 'OWN', 'DECISION', 'HE', 'SAID', 'SEVERELY'] +5442-41168-0015-1400: hyp=['THAT', 'IS', 'A', 'MATTER', 'FOR', 'EACH', "MAN'S", 'OWN', 'DECISION', 'HE', 'SAID', 'SEVERELY'] +5442-41168-0016-1401: ref=['HAVING', 'PUT', 'IT', 'IN', 'HE', 'RECOLLECTED', 'THAT', 'HE', 'OUGHT', 'TO', 'HAVE', 'THRUST', 'HIS', 'LEFT', 'HAND', 'TOO', 'AND', 'SO', 'HE', 'THRUST', 'IT', 'IN', 'THOUGH', 'TOO', 'LATE', 'AND', 'STILL', 'MORE', 'OVERCOME', 'WITH', 'CONFUSION', 'HE', 'BEAT', 'A', 'HASTY', 'RETREAT', 'INTO', 'THE', 'BACKGROUND'] +5442-41168-0016-1401: hyp=['HAVING', 'PUT', 'IT', 'IN', 'HE', 'RECOLLECTED', 'THAT', 'HE', 'OUGHT', 'TO', 'HAVE', 'THRUST', 'HIS', 'LEFT', 'HAND', 'TOO', 'AND', 'SO', 'HE', 'THRUST', 'IT', 'THOUGH', 'TOO', 'LATE', 'AND', 'STILL', 'MORE', 'OVERCOME', 'WITH', 'CONFUSION', 'HE', 'BEAT', 'A', 'HASTY', 'RETREAT', 'INTO', 'THE', 'BACKGROUND'] +5442-41168-0017-1402: ref=['A', 'HUNDRED', 'AND', 'TWENTY', 'SIX', 'FOR', 'ADMISSION', 'NINETY', 'EIGHT', 'AGAINST'] +5442-41168-0017-1402: hyp=['A', 'HUNDRED', 'AND', 'TWENTY', 'SIX', 'FOR', 'ADMISSION', 'NINETY', 'EIGHT', 'AGAINST'] +5442-41168-0018-1403: ref=['SANG', 'OUT', 'THE', 'VOICE', 'OF', 'THE', 'SECRETARY', 'WHO', 'COULD', 'NOT', 'PRONOUNCE', 'THE', 'LETTER', 'R'] +5442-41168-0018-1403: hyp=['SANG', 'ALL', 'THE', 'VOICE', 'OF', 'THE', 'SECRETARY', 'WHO', 'COULD', 'NOT', 'PRONOUNCE', 'THE', 'LETTER', 'R'] +5442-41168-0019-1404: ref=['THEN', 'THERE', 'WAS', 'A', 'LAUGH', 'A', 'BUTTON', 'AND', 'TWO', 'NUTS', 'WERE', 'FOUND', 'IN', 'THE', 'BOX'] +5442-41168-0019-1404: hyp=['THEN', 'THERE', 'WAS', 'A', 'LAUGH', 'OF', 'BOTTOM', 'AND', 'TWO', 'KNOTS', 'WERE', 'FOUND', 'IN', 'THE', 'BOX'] +5442-41168-0020-1405: ref=['BUT', 'THE', 'OLD', 'PARTY', 'DID', 'NOT', 'CONSIDER', 'THEMSELVES', 'CONQUERED'] +5442-41168-0020-1405: hyp=['BUT', 'THE', 'OLD', 'PARTY', 'DID', 'NOT', 'CONSIDER', 'THEMSELVES', 'CONQUERED'] +5442-41168-0021-1406: ref=['IN', 'REPLY', 'SNETKOV', 'SPOKE', 'OF', 'THE', 'TRUST', 'THE', 'NOBLEMEN', 'OF', 'THE', 'PROVINCE', 'HAD', 'PLACED', 'IN', 'HIM', 'THE', 'AFFECTION', 'THEY', 'HAD', 'SHOWN', 'HIM', 'WHICH', 'HE', 'DID', 'NOT', 'DESERVE', 'AS', 'HIS', 'ONLY', 'MERIT', 'HAD', 'BEEN', 'HIS', 'ATTACHMENT', 'TO', 'THE', 'NOBILITY', 'TO', 'WHOM', 'HE', 'HAD', 'DEVOTED', 'TWELVE', 'YEARS', 'OF', 'SERVICE'] +5442-41168-0021-1406: hyp=['INTERPLIES', 'NEDCOV', 'SPOKE', 'OF', 'THE', 'TRUST', 'AND', 'NOBLEMEN', 'OF', 'THE', 'PROVINCE', 'HAD', 'PLACED', 'ON', 'HIM', 'THE', 'EFFECT', 'ON', 'THEY', 'HAD', 'SHOWN', 'HIM', 'WHICH', 'HE', 'DID', 'NOT', 'DESERVE', 'AS', 'HIS', 'ONLY', 'MERIT', 'HAD', 'BEEN', 'HIS', 'ATTACHMENT', 'TO', 'THE', 'NOBILITY', 'TO', 'WHOM', 'HE', 'HAD', 'DEVOTED', 'TWELVE', 'YEARS', 'OF', 'SERVICE'] +5442-41168-0022-1407: ref=['THIS', 'EXPRESSION', 'IN', 'THE', "MARSHAL'S", 'FACE', 'WAS', 'PARTICULARLY', 'TOUCHING', 'TO', 'LEVIN', 'BECAUSE', 'ONLY', 'THE', 'DAY', 'BEFORE', 'HE', 'HAD', 'BEEN', 'AT', 'HIS', 'HOUSE', 'ABOUT', 'HIS', 'TRUSTEE', 'BUSINESS', 'AND', 'HAD', 'SEEN', 'HIM', 'IN', 'ALL', 'HIS', 'GRANDEUR', 'A', 'KIND', 'HEARTED', 'FATHERLY', 'MAN'] +5442-41168-0022-1407: hyp=['THIS', 'EXPRESSION', 'IN', 'THE', "MARSHAL'S", 'FACE', 'WAS', 'PARTICULARLY', 'TOUCHING', 'TO', 'LEVIN', 'BECAUSE', 'ONLY', 'THE', 'DAY', 'FOR', 'HE', 'HAD', 'BEEN', 'AT', 'HIS', 'HOUSE', 'ABOUT', 'HIS', 'TRUSTY', 'BUSINESS', 'AND', 'HAD', 'SEEN', 'HIM', 'IN', 'ALL', 'HIS', 'GRANDEUR', 'A', 'KIND', 'HEARTED', 'FATHERLY', 'MAN'] +5442-41168-0023-1408: ref=['IF', 'THERE', 'ARE', 'MEN', 'YOUNGER', 'AND', 'MORE', 'DESERVING', 'THAN', 'I', 'LET', 'THEM', 'SERVE'] +5442-41168-0023-1408: hyp=['IF', 'THERE', 'ARE', 'MEN', 'YOUNGER', 'AND', 'MORE', 'DESERVING', 'THAN', 'I', 'LET', 'THEM', 'SERVE'] +5442-41168-0024-1409: ref=['AND', 'THE', 'MARSHAL', 'DISAPPEARED', 'THROUGH', 'A', 'SIDE', 'DOOR'] +5442-41168-0024-1409: hyp=['AND', 'THE', 'MARSHAL', 'DISAPPEARED', 'THROUGH', 'A', 'SIDE', 'DOOR'] +5442-41168-0025-1410: ref=['THEY', 'WERE', 'TO', 'PROCEED', 'IMMEDIATELY', 'TO', 'THE', 'ELECTION'] +5442-41168-0025-1410: hyp=['THERE', 'WERE', 'TO', 'PROCEED', 'IMMEDIATELY', 'TO', 'THE', 'ELECTION'] +5442-41168-0026-1411: ref=['TWO', 'NOBLE', 'GENTLEMEN', 'WHO', 'HAD', 'A', 'WEAKNESS', 'FOR', 'STRONG', 'DRINK', 'HAD', 'BEEN', 'MADE', 'DRUNK', 'BY', 'THE', 'PARTISANS', 'OF', 'SNETKOV', 'AND', 'A', 'THIRD', 'HAD', 'BEEN', 'ROBBED', 'OF', 'HIS', 'UNIFORM'] +5442-41168-0026-1411: hyp=['DO', 'NOBLE', 'GENTLEMEN', 'WHO', 'HAD', 'A', 'WEAKNESS', 'WAS', 'STRONG', 'DRINK', 'HAD', 'BEEN', 'MADE', 'DRUNK', 'BY', 'THE', 'PARTISANS', 'OF', 'SNETKOV', 'AND', 'THE', 'THIRD', 'HAD', 'BEEN', 'ROBBED', 'OF', 'HIS', 'UNIFORM'] +5442-41168-0027-1412: ref=['ON', 'LEARNING', 'THIS', 'THE', 'NEW', 'PARTY', 'HAD', 'MADE', 'HASTE', 'DURING', 'THE', 'DISPUTE', 'ABOUT', 'FLEROV', 'TO', 'SEND', 'SOME', 'OF', 'THEIR', 'MEN', 'IN', 'A', 'SLEDGE', 'TO', 'CLOTHE', 'THE', 'STRIPPED', 'GENTLEMAN', 'AND', 'TO', 'BRING', 'ALONG', 'ONE', 'OF', 'THE', 'INTOXICATED', 'TO', 'THE', 'MEETING'] +5442-41168-0027-1412: hyp=['ON', 'LEARNING', 'THIS', 'THE', 'NEW', 'PARTY', 'HAD', 'MADE', 'HASTE', 'DURING', 'THE', 'DISPUTE', 'ABOUT', 'FLAREFF', 'TO', 'SEND', 'SOME', 'OF', 'THEIR', 'MEN', 'IN', 'A', 'SLEDGE', 'TO', 'CLOTHE', 'THE', 'STRIPPED', 'GENTLEMAN', 'AND', 'TO', 'BRING', 'ALONG', 'ONE', 'OF', 'THE', 'INTOXICATED', 'TO', 'THE', 'MEETING'] +5442-41169-0000-1413: ref=['LEVIN', 'DID', 'NOT', 'CARE', 'TO', 'EAT', 'AND', 'HE', 'WAS', 'NOT', 'SMOKING', 'HE', 'DID', 'NOT', 'WANT', 'TO', 'JOIN', 'HIS', 'OWN', 'FRIENDS', 'THAT', 'IS', 'SERGEY', 'IVANOVITCH', 'STEPAN', 'ARKADYEVITCH', 'SVIAZHSKY', 'AND', 'THE', 'REST', 'BECAUSE', 'VRONSKY', 'IN', 'HIS', "EQUERRY'S", 'UNIFORM', 'WAS', 'STANDING', 'WITH', 'THEM', 'IN', 'EAGER', 'CONVERSATION'] +5442-41169-0000-1413: hyp=['LEVIN', 'DID', 'NOT', 'CARE', 'TO', 'EAT', 'AND', 'HE', 'WAS', 'NOT', 'SMOKING', 'HE', 'DID', 'NOT', 'WANT', 'TO', 'JOIN', 'HIS', 'OWN', 'FRIENDS', 'THAT', 'IS', 'SO', 'SHE', 'IVANOVITCH', 'STEPAN', 'ARKADYEVITCH', 'SVIAZHSKY', 'AND', 'THE', 'REST', 'BECAUSE', 'VRONSKY', 'IN', 'AN', 'EQUERRIES', 'UNIFORM', 'WAS', 'STANDING', 'WITH', 'THEM', 'IN', 'EAGER', 'CONVERSATION'] +5442-41169-0001-1414: ref=['HE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'SAT', 'DOWN', 'SCANNING', 'THE', 'GROUPS', 'AND', 'LISTENING', 'TO', 'WHAT', 'WAS', 'BEING', 'SAID', 'AROUND', 'HIM'] +5442-41169-0001-1414: hyp=['HE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'SAT', 'DOWN', 'SCANNING', 'THE', 'GROUPS', 'AND', 'LISTENING', 'TO', 'WHAT', 'WAS', 'BEING', 'SAID', 'AROUND', 'HIM'] +5442-41169-0002-1415: ref=["HE'S", 'SUCH', 'A', 'BLACKGUARD'] +5442-41169-0002-1415: hyp=["HE'S", 'SUCH', 'A', 'BLACKGUARD'] +5442-41169-0003-1416: ref=['I', 'HAVE', 'TOLD', 'HIM', 'SO', 'BUT', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'ONLY', 'THINK', 'OF', 'IT'] +5442-41169-0003-1416: hyp=['I', 'HAVE', 'TOLD', 'HIM', 'SO', 'BUT', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'ONLY', 'THINK', 'OF', 'IT'] +5442-41169-0004-1417: ref=['THESE', 'PERSONS', 'WERE', 'UNMISTAKABLY', 'SEEKING', 'A', 'PLACE', 'WHERE', 'THEY', 'COULD', 'TALK', 'WITHOUT', 'BEING', 'OVERHEARD'] +5442-41169-0004-1417: hyp=['THESE', 'PERSONS', 'WERE', 'UNMISTAKABLY', 'SEEKING', 'A', 'PLACE', 'WHERE', 'THEY', 'COULD', 'TALK', 'WITHOUT', 'BEING', 'OVERHEARD'] +5442-41169-0005-1418: ref=['SHALL', 'WE', 'GO', 'ON', 'YOUR', 'EXCELLENCY', 'FINE', 'CHAMPAGNE'] +5442-41169-0005-1418: hyp=['SHALL', 'WE', 'GO', 'ON', 'YOUR', 'EXCELLENCY', 'FINE', 'CHAMPAGNE'] +5442-41169-0006-1419: ref=['LAST', 'YEAR', 'AT', 'OUR', 'DISTRICT', 'MARSHAL', 'NIKOLAY', "IVANOVITCH'S"] +5442-41169-0006-1419: hyp=['MASTER', 'AT', 'OUR', 'DISTRICT', 'MARTIAL', 'NIKOLAY', "IVANOVITCH'S"] +5442-41169-0007-1420: ref=['OH', 'STILL', 'JUST', 'THE', 'SAME', 'ALWAYS', 'AT', 'A', 'LOSS', 'THE', 'LANDOWNER', 'ANSWERED', 'WITH', 'A', 'RESIGNED', 'SMILE', 'BUT', 'WITH', 'AN', 'EXPRESSION', 'OF', 'SERENITY', 'AND', 'CONVICTION', 'THAT', 'SO', 'IT', 'MUST', 'BE'] +5442-41169-0007-1420: hyp=['OH', 'STILL', 'JUST', 'THE', 'SAME', 'ALWAYS', 'AT', 'A', 'LOSS', 'THE', 'LANDOWNER', 'ANSWERED', 'WITH', 'A', 'RESIGNED', 'SMILE', 'BUT', 'WITH', 'AN', 'EXPRESSION', 'OF', 'SERENITY', 'AND', 'CONVICTION', 'THAT', 'SO', 'IT', 'MUST', 'BE'] +5442-41169-0008-1421: ref=['WHY', 'WHAT', 'IS', 'THERE', 'TO', 'UNDERSTAND'] +5442-41169-0008-1421: hyp=['WHY', 'WHAT', 'IS', 'THAT', 'TO', 'UNDERSTAND'] +5442-41169-0009-1422: ref=["THERE'S", 'NO', 'MEANING', 'IN', 'IT', 'AT', 'ALL'] +5442-41169-0009-1422: hyp=['THERE', 'IS', 'NO', 'MEANING', 'IN', 'IT', 'AT', 'ALL'] +5442-41169-0010-1423: ref=['THEN', 'TOO', 'ONE', 'MUST', 'KEEP', 'UP', 'CONNECTIONS'] +5442-41169-0010-1423: hyp=['THEN', 'DO', 'ONE', 'MUST', 'KEEP', 'UP', 'CONNECTIONS'] +5442-41169-0011-1424: ref=["IT'S", 'A', 'MORAL', 'OBLIGATION', 'OF', 'A', 'SORT'] +5442-41169-0011-1424: hyp=["IT'S", 'A', 'MORAL', 'OBLIGATION', 'OF', 'A', 'SORT'] +5442-41169-0012-1425: ref=['AND', 'THEN', 'TO', 'TELL', 'THE', 'TRUTH', "THERE'S", "ONE'S", 'OWN', 'INTERESTS'] +5442-41169-0012-1425: hyp=['AND', 'THEN', 'TO', 'TELL', 'THE', 'TRUTH', "THERE'S", "ONE'S", 'OWN', 'INTEREST'] +5442-41169-0013-1426: ref=["THEY'RE", 'PROPRIETORS', 'OF', 'A', 'SORT', 'BUT', "WE'RE", 'THE', 'LANDOWNERS'] +5442-41169-0013-1426: hyp=['THEIR', 'PROPRIETORS', 'OF', 'A', 'SORT', 'BUT', 'WE', 'ARE', 'THE', 'LANDOWNERS'] +5442-41169-0014-1427: ref=['THAT', 'IT', 'MAY', 'BE', 'BUT', 'STILL', 'IT', 'OUGHT', 'TO', 'BE', 'TREATED', 'A', 'LITTLE', 'MORE', 'RESPECTFULLY'] +5442-41169-0014-1427: hyp=['THAT', 'IT', 'MAY', 'BE', 'BUT', 'STILL', 'IT', 'OUGHT', 'TO', 'BE', 'TREATED', 'A', 'LITTLE', 'MORE', 'RESPECTFULLY'] +5442-41169-0015-1428: ref=['IF', "WE'RE", 'LAYING', 'OUT', 'A', 'GARDEN', 'PLANNING', 'ONE', 'BEFORE', 'THE', 'HOUSE', 'YOU', 'KNOW', 'AND', 'THERE', "YOU'VE", 'A', 'TREE', "THAT'S", 'STOOD', 'FOR', 'CENTURIES', 'IN', 'THE', 'VERY', 'SPOT', 'OLD', 'AND', 'GNARLED', 'IT', 'MAY', 'BE', 'AND', 'YET', 'YOU', "DON'T", 'CUT', 'DOWN', 'THE', 'OLD', 'FELLOW', 'TO', 'MAKE', 'ROOM', 'FOR', 'THE', 'FLOWERBEDS', 'BUT', 'LAY', 'OUT', 'YOUR', 'BEDS', 'SO', 'AS', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'THE', 'TREE'] +5442-41169-0015-1428: hyp=['IF', 'WE', 'ARE', 'LAYING', 'OUT', 'A', 'GARDEN', 'CLIMBING', 'ONE', 'BEFORE', 'THE', 'HOUSE', 'YOU', 'KNOW', 'AND', 'THERE', 'YOU', 'HAVE', 'A', 'TREE', 'THAT', 'STOOD', 'IN', 'CENTURIES', 'IN', 'THE', 'VERY', 'SPOT', 'OLD', 'AND', 'GNARLED', 'IT', 'MAY', 'BE', 'AND', 'YET', 'YOU', "DON'T", 'CUT', 'DOWN', 'THE', 'OLD', 'FELLOW', 'TO', 'MAKE', 'ROOM', 'FOR', 'THE', 'FLOWER', 'BEDS', 'BUT', 'LAY', 'OUT', 'YOUR', 'BEDS', 'SO', 'AS', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'THE', 'TREE'] +5442-41169-0016-1429: ref=['WELL', 'AND', 'HOW', 'IS', 'YOUR', 'LAND', 'DOING'] +5442-41169-0016-1429: hyp=['WELL', 'AND', 'HOW', 'IS', 'YOUR', 'LAND', 'DOING'] +5442-41169-0017-1430: ref=['BUT', "ONE'S", 'WORK', 'IS', 'THROWN', 'IN', 'FOR', 'NOTHING'] +5442-41169-0017-1430: hyp=['BUT', "ONE'S", 'WORK', 'IS', 'THROWN', 'IN', 'FOR', 'NOTHING'] +5442-41169-0018-1431: ref=['OH', 'WELL', 'ONE', 'DOES', 'IT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +5442-41169-0018-1431: hyp=['OH', 'WELL', 'ONE', 'DOES', 'IT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +5442-41169-0019-1432: ref=['AND', "WHAT'S", 'MORE', 'THE', 'LANDOWNER', 'WENT', 'ON', 'LEANING', 'HIS', 'ELBOWS', 'ON', 'THE', 'WINDOW', 'AND', 'CHATTING', 'ON', 'MY', 'SON', 'I', 'MUST', 'TELL', 'YOU', 'HAS', 'NO', 'TASTE', 'FOR', 'IT'] +5442-41169-0019-1432: hyp=['AND', 'ONCE', 'MORE', 'THE', 'LANDOWNER', 'WENT', 'ON', 'LEANING', 'HIS', 'ELBOWS', 'ON', 'THE', 'WINDOW', 'AND', 'CHATTING', 'ON', 'MY', 'SON', 'I', 'MUST', 'TELL', 'YOU', 'HAS', 'NO', 'TASTE', 'FOR', 'IT'] +5442-41169-0020-1433: ref=['SO', "THERE'LL", 'BE', 'NO', 'ONE', 'TO', 'KEEP', 'IT', 'UP', 'AND', 'YET', 'ONE', 'DOES', 'IT'] +5442-41169-0020-1433: hyp=['SO', "THERE'LL", 'BE', 'NO', 'ONE', 'TO', 'KEEP', 'IT', 'UP', 'AND', 'YET', 'ONE', 'DOES', 'IT'] +5442-41169-0021-1434: ref=['WE', 'WALKED', 'ABOUT', 'THE', 'FIELDS', 'AND', 'THE', 'GARDEN', 'NO', 'SAID', 'HE', 'STEPAN', 'VASSILIEVITCH', "EVERYTHING'S", 'WELL', 'LOOKED', 'AFTER', 'BUT', 'YOUR', "GARDEN'S", 'NEGLECTED'] +5442-41169-0021-1434: hyp=['WE', 'WALKED', 'ABOUT', 'THE', 'FIELDS', 'AND', 'THE', 'GARDEN', 'NO', 'SAID', 'HE', 'STEPAN', 'WISLOVITCH', "EVERYTHING'S", 'WELL', 'LOOKED', 'AFTER', 'BUT', 'YOUR', 'GARDENS', 'NEGLECTED'] +5442-41169-0022-1435: ref=['TO', 'MY', 'THINKING', "I'D", 'CUT', 'DOWN', 'THAT', 'LIME', 'TREE'] +5442-41169-0022-1435: hyp=['TO', 'MY', 'THINKING', "I'D", 'GOT', 'DOWN', 'THE', 'LINE', 'TREE'] +5442-41169-0023-1436: ref=['HERE', "YOU'VE", 'THOUSANDS', 'OF', 'LIMES', 'AND', 'EACH', 'WOULD', 'MAKE', 'TWO', 'GOOD', 'BUNDLES', 'OF', 'BARK'] +5442-41169-0023-1436: hyp=['HERE', 'YOU', 'THOUSANDS', 'OF', 'LIMES', 'AND', 'EACH', 'WOULD', 'MAKE', 'TWO', 'GOOD', 'BUNDLES', 'OF', 'BARK'] +5442-41169-0024-1437: ref=["YOU'RE", 'MARRIED', "I'VE", 'HEARD', 'SAID', 'THE', 'LANDOWNER'] +5442-41169-0024-1437: hyp=["YOU'RE", 'MARRIED', 'I', 'HEARD', 'SAID', 'THE', 'LANDOWNER'] +5442-41169-0025-1438: ref=['YES', "IT'S", 'RATHER', 'STRANGE', 'HE', 'WENT', 'ON'] +5442-41169-0025-1438: hyp=['YES', 'AND', 'JOHN', 'IS', 'STRANGE', 'HE', 'WENT', 'ON'] +5442-41169-0026-1439: ref=['THE', 'LANDOWNER', 'CHUCKLED', 'UNDER', 'HIS', 'WHITE', 'MUSTACHES'] +5442-41169-0026-1439: hyp=['THE', 'LANDOWNER', 'CHUCKLED', 'UNDER', 'HIS', 'WHITE', 'MOUSTACHES'] +5442-41169-0027-1440: ref=['WHY', "DON'T", 'WE', 'CUT', 'DOWN', 'OUR', 'PARKS', 'FOR', 'TIMBER'] +5442-41169-0027-1440: hyp=['WHY', "DON'T", 'WE', 'GO', 'DOWN', 'OUR', 'BOX', 'FOR', 'TIMBER'] +5442-41169-0028-1441: ref=['SAID', 'LEVIN', 'RETURNING', 'TO', 'A', 'THOUGHT', 'THAT', 'HAD', 'STRUCK', 'HIM'] +5442-41169-0028-1441: hyp=['SAID', 'LEVIN', 'RETURNING', 'TO', 'A', 'THOUGHT', 'THAT', 'HAD', 'STRUCK', 'HIM'] +5442-41169-0029-1442: ref=["THERE'S", 'A', 'CLASS', 'INSTINCT', 'TOO', 'OF', 'WHAT', 'ONE', 'OUGHT', 'AND', "OUGHTN'T", 'TO', 'DO'] +5442-41169-0029-1442: hyp=["THERE'S", 'THE', 'CLASS', 'INSTINCT', 'TOO', 'OF', 'WHAT', 'ONE', 'OUGHT', 'AND', 'OUGHT', 'NOT', 'KNOWN', 'TO', 'DO'] +5442-41169-0030-1443: ref=["THERE'S", 'THE', 'PEASANTS', 'TOO', 'I', 'WONDER', 'AT', 'THEM', 'SOMETIMES', 'ANY', 'GOOD', 'PEASANT', 'TRIES', 'TO', 'TAKE', 'ALL', 'THE', 'LAND', 'HE', 'CAN'] +5442-41169-0030-1443: hyp=["THERE'S", 'THE', 'PEASANTS', 'TOO', 'I', 'WONDER', 'AT', 'THEM', 'SOMETIMES', 'ANY', 'GOOD', 'PEASANT', 'TRIES', 'TO', 'TAKE', 'ALL', 'THE', 'LAND', 'HE', 'CAN'] +5442-41169-0031-1444: ref=['WITHOUT', 'A', 'RETURN', 'TOO', 'AT', 'A', 'SIMPLE', 'LOSS'] +5442-41169-0031-1444: hyp=['WITHOUT', 'A', 'RETURN', 'TOO', 'ADD', 'A', 'SIMPLE', 'LAWS'] +5484-24317-0000-571: ref=['WHEN', 'HE', 'CAME', 'FROM', 'THE', 'BATH', 'PROCLUS', 'VISITED', 'HIM', 'AGAIN'] +5484-24317-0000-571: hyp=['WHEN', 'HE', 'CAME', 'FROM', 'THE', 'BATH', 'PROCLASS', 'VISITED', 'HIM', 'AGAIN'] +5484-24317-0001-572: ref=['BUT', 'HERMON', 'WAS', 'NOT', 'IN', 'THE', 'MOOD', 'TO', 'SHARE', 'A', 'JOYOUS', 'REVEL', 'AND', 'HE', 'FRANKLY', 'SAID', 'SO', 'ALTHOUGH', 'IMMEDIATELY', 'AFTER', 'HIS', 'RETURN', 'HE', 'HAD', 'ACCEPTED', 'THE', 'INVITATION', 'TO', 'THE', 'FESTIVAL', 'WHICH', 'THE', 'WHOLE', 'FELLOWSHIP', 'OF', 'ARTISTS', 'WOULD', 'GIVE', 'THE', 'FOLLOWING', 'DAY', 'IN', 'HONOUR', 'OF', 'THE', 'SEVENTIETH', 'BIRTHDAY', 'OF', 'THE', 'OLD', 'SCULPTOR', 'EUPHRANOR'] +5484-24317-0001-572: hyp=['BUT', 'HARMON', 'WAS', 'NOT', 'IN', 'THE', 'MOOD', 'TO', 'SHARE', 'A', 'JOYOUS', 'REVEL', 'AND', 'HE', 'FRANKLY', 'SAID', 'SO', 'ALTHOUGH', 'IMMEDIATELY', 'AFTER', 'HIS', 'RETURN', 'HE', 'HAD', 'ACCEPTED', 'THE', 'INVITATION', 'TO', 'THE', 'FESTIVAL', 'WHICH', 'THE', 'WHOLE', 'FELLOWSHIP', 'OF', 'ARTISTS', 'WOULD', 'GIVE', 'THE', 'FOLLOWING', 'DAY', 'AN', 'HONOR', 'OF', 'THE', 'SEVENTEENTH', 'BIRTHDAY', 'OF', 'THE', 'OLD', 'SCULPTOR', 'EUPHRANER'] +5484-24317-0002-573: ref=['SHE', 'WOULD', 'APPEAR', 'HERSELF', 'AT', 'DESSERT', 'AND', 'THE', 'BANQUET', 'MUST', 'THEREFORE', 'BEGIN', 'AT', 'AN', 'UNUSUALLY', 'EARLY', 'HOUR'] +5484-24317-0002-573: hyp=['SHE', 'WOULD', 'APPEAR', 'HERSELF', 'AT', 'DESSERT', 'AND', 'THE', 'BANQUET', 'MUST', 'THEREFORE', 'BEGIN', 'AT', 'AN', 'UNUSUALLY', 'EARLY', 'HOUR'] +5484-24317-0003-574: ref=['SO', 'THE', 'ARTIST', 'FOUND', 'HIMSELF', 'OBLIGED', 'TO', 'RELINQUISH', 'HIS', 'OPPOSITION'] +5484-24317-0003-574: hyp=['SO', 'THE', 'ARTIST', 'FOUND', 'HIMSELF', 'OBLIGED', 'TO', 'RELINQUISH', 'HIS', 'OPPOSITION'] +5484-24317-0004-575: ref=['THE', 'BANQUET', 'WAS', 'TO', 'BEGIN', 'IN', 'A', 'FEW', 'HOURS', 'YET', 'HE', 'COULD', 'NOT', 'LET', 'THE', 'DAY', 'PASS', 'WITHOUT', 'SEEING', 'DAPHNE', 'AND', 'TELLING', 'HER', 'THE', 'WORDS', 'OF', 'THE', 'ORACLE'] +5484-24317-0004-575: hyp=['THE', 'BANQUET', 'WAS', 'TO', 'BEGIN', 'IN', 'A', 'FEW', 'HOURS', 'YET', 'HE', 'COULD', 'NOT', 'LET', 'THE', 'DAY', 'PASS', 'WITHOUT', 'SEEING', 'DAPHNE', 'AND', 'TELLING', 'HER', 'THE', 'WORDS', 'OF', 'THE', 'ORACLE'] +5484-24317-0005-576: ref=['HE', 'LONGED', 'WITH', 'ARDENT', 'YEARNING', 'FOR', 'THE', 'SOUND', 'OF', 'HER', 'VOICE', 'AND', 'STILL', 'MORE', 'TO', 'UNBURDEN', 'HIS', 'SORELY', 'TROUBLED', 'SOUL', 'TO', 'HER'] +5484-24317-0005-576: hyp=['HE', 'LONGED', 'WITH', 'ARDENT', 'YEARNING', 'FOR', 'THE', 'SOUND', 'OF', 'HER', 'VOICE', 'AND', 'STILL', 'MORE', 'TO', 'UNBURDEN', 'HIS', 'SORELY', 'TROUBLED', 'SOUL', 'TO', 'HER'] +5484-24317-0006-577: ref=['SINCE', 'HIS', 'RETURN', 'FROM', 'THE', 'ORACLE', 'THE', 'FEAR', 'THAT', 'THE', 'RESCUED', 'DEMETER', 'MIGHT', 'YET', 'BE', 'THE', 'WORK', 'OF', 'MYRTILUS', 'HAD', 'AGAIN', 'MASTERED', 'HIM'] +5484-24317-0006-577: hyp=['SINCE', 'HIS', 'RETURN', 'FROM', 'THE', 'ORACLE', 'THE', 'FEAR', 'THAT', 'THE', 'RESCUE', 'DEMETER', 'MIGHT', 'YET', 'BE', 'THE', 'WORK', 'OF', 'MERTOLUS', 'HAD', 'AGAIN', 'MASTERED', 'HIM'] +5484-24317-0007-578: ref=['THE', 'APPROVAL', 'AS', 'WELL', 'AS', 'THE', 'DOUBTS', 'WHICH', 'IT', 'AROUSED', 'IN', 'OTHERS', 'STRENGTHENED', 'HIS', 'OPINION', 'ALTHOUGH', 'EVEN', 'NOW', 'HE', 'COULD', 'NOT', 'SUCCEED', 'IN', 'BRINGING', 'IT', 'INTO', 'HARMONY', 'WITH', 'THE', 'FACTS'] +5484-24317-0007-578: hyp=['THE', 'APPROVAL', 'AS', 'WELL', 'AS', 'THE', 'DOUBTS', 'WHICH', 'IT', 'ARISED', 'IN', 'OTHERS', 'STRENGTHENED', 'HIS', 'OPINION', 'ALTHOUGH', 'EVEN', 'NOW', 'HE', 'COULD', 'NOT', 'SUCCEED', 'IN', 'BRINGING', 'IT', 'INTO', 'HARMONY', 'WITH', 'THE', 'FACTS'] +5484-24317-0008-579: ref=['THEN', 'HE', 'WENT', 'DIRECTLY', 'TO', 'THE', 'NEIGHBOURING', 'PALACE', 'THE', 'QUEEN', 'MIGHT', 'HAVE', 'APPEARED', 'ALREADY', 'AND', 'IT', 'WOULD', 'NOT', 'DO', 'TO', 'KEEP', 'HER', 'WAITING'] +5484-24317-0008-579: hyp=['THEN', 'HE', 'WENT', 'DIRECTLY', 'TO', 'THE', 'NEIGHBORING', 'PALACE', 'THE', 'QUEEN', 'MIGHT', 'HAVE', 'APPEARED', 'ALREADY', 'AND', 'IT', 'WOULD', 'NOT', 'DO', 'TO', 'KEEP', 'HER', 'WAITING'] +5484-24317-0009-580: ref=['HITHERTO', 'THE', 'MERCHANT', 'HAD', 'BEEN', 'INDUCED', 'IT', 'IS', 'TRUE', 'TO', 'ADVANCE', 'LARGE', 'SUMS', 'OF', 'MONEY', 'TO', 'THE', 'QUEEN', 'BUT', 'THE', 'LOYAL', 'DEVOTION', 'WHICH', 'HE', 'SHOWED', 'TO', 'HER', 'ROYAL', 'HUSBAND', 'HAD', 'RENDERED', 'IT', 'IMPOSSIBLE', 'TO', 'GIVE', 'HIM', 'EVEN', 'A', 'HINT', 'OF', 'THE', 'CONSPIRACY'] +5484-24317-0009-580: hyp=['HITHERTO', 'THE', 'MERCHANT', 'HAD', 'BEEN', 'INDUCED', 'IT', 'IS', 'TRUE', 'TO', 'ADVANCE', 'LARGE', 'SUMS', 'OF', 'MONEY', 'TO', 'THE', 'QUEEN', 'BUT', 'THE', 'LOYAL', 'DEVOTION', 'WHICH', 'HE', 'SHOWED', 'TO', 'HER', 'ROYAL', 'HUSBAND', 'HAD', 'RENDERED', 'IT', 'IMPOSSIBLE', 'TO', 'GIVE', 'HIM', 'EVEN', 'A', 'HINT', 'OF', 'THE', 'CONSPIRACY'] +5484-24317-0010-581: ref=['WHEN', 'HERMON', 'ENTERED', 'THE', 'RESIDENCE', 'OF', 'THE', 'GRAMMATEUS', 'IN', 'THE', 'PALACE', 'THE', 'GUESTS', 'HAD', 'ALREADY', 'ASSEMBLED'] +5484-24317-0010-581: hyp=['WHEN', 'HERMAN', 'ANSWERED', 'THE', 'RESIDENCE', 'OF', 'THE', 'GRAMMATIUS', 'IN', 'THE', 'PALACE', 'THE', 'GUESTS', 'HAD', 'ALREADY', 'ASSEMBLED'] +5484-24317-0011-582: ref=['THE', 'PLACE', 'BY', "HERMON'S", 'SIDE', 'WHICH', 'ALTHEA', 'HAD', 'CHOSEN', 'FOR', 'HERSELF', 'WOULD', 'THEN', 'BE', 'GIVEN', 'UP', 'TO', 'ARSINOE'] +5484-24317-0011-582: hyp=['THEY', 'PLACED', 'BY', "HERMANN'S", 'SIDE', 'WHICH', 'ALTHIE', 'HAD', 'CHOSEN', 'FOR', 'HERSELF', 'WOULD', 'THEN', 'BE', 'GIVEN', 'UP', 'TO', 'ARSENO'] +5484-24317-0012-583: ref=['TRUE', 'AN', 'INTERESTING', 'CONVERSATION', 'STILL', 'HAD', 'POWER', 'TO', 'CHARM', 'HIM', 'BUT', 'OFTEN', 'DURING', 'ITS', 'CONTINUANCE', 'THE', 'FULL', 'CONSCIOUSNESS', 'OF', 'HIS', 'MISFORTUNE', 'FORCED', 'ITSELF', 'UPON', 'HIS', 'MIND', 'FOR', 'THE', 'MAJORITY', 'OF', 'THE', 'SUBJECTS', 'DISCUSSED', 'BY', 'THE', 'ARTISTS', 'CAME', 'TO', 'THEM', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'SIGHT', 'AND', 'REFERRED', 'TO', 'NEW', 'CREATIONS', 'OF', 'ARCHITECTURE', 'SCULPTURE', 'AND', 'PAINTING', 'FROM', 'WHOSE', 'ENJOYMENT', 'HIS', 'BLINDNESS', 'DEBARRED', 'HIM'] +5484-24317-0012-583: hyp=['TRUE', 'AN', 'INTERESTING', 'CONVERSATION', 'STILL', 'HAD', 'POWER', 'TO', 'CHARM', 'HIM', 'BUT', 'OFTEN', 'DURING', 'ITS', 'CONTINUANCE', 'THE', 'FULL', 'CONSCIOUSNESS', 'OF', 'HIS', 'MISFORTUNE', 'FORCED', 'ITSELF', 'UPON', 'HIS', 'MIND', 'FOR', 'THE', 'MAJORITY', 'OF', 'THE', 'SUBJECTS', 'DISCUSSED', 'BY', 'THE', 'ARTISTS', 'CAME', 'TO', 'THEM', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'SIGHT', 'AND', 'REFERRED', 'TO', 'NEW', 'CREATIONS', 'OF', 'ARCHITECTURE', 'SCULPTURE', 'AND', 'PAINTING', 'FROM', 'WHOSE', 'ENJOYMENT', 'IS', 'BLINDNESS', 'DEBARED', 'HIM'] +5484-24317-0013-584: ref=['A', 'STRANGER', 'OUT', 'OF', 'HIS', 'OWN', 'SPHERE', 'HE', 'FELT', 'CHILLED', 'AMONG', 'THESE', 'CLOSELY', 'UNITED', 'MEN', 'AND', 'WOMEN', 'TO', 'WHOM', 'NO', 'TIE', 'BOUND', 'HIM', 'SAVE', 'THE', 'PRESENCE', 'OF', 'THE', 'SAME', 'HOST'] +5484-24317-0013-584: hyp=['A', 'STRANGER', 'OUT', 'OF', 'HIS', 'OWN', 'SPHERE', 'HE', 'FELL', 'CHILLED', 'AMONG', 'THESE', 'CLOSELY', 'UNITED', 'MEN', 'AND', 'WOMEN', 'TO', 'WHOM', 'NO', 'TIE', 'BOUND', 'HIM', 'SAVE', 'THE', 'PRESENCE', 'OF', 'THE', 'SAME', 'HOST'] +5484-24317-0014-585: ref=['CRATES', 'HAD', 'REALLY', 'BEEN', 'INVITED', 'IN', 'ORDER', 'TO', 'WIN', 'HIM', 'OVER', 'TO', 'THE', "QUEEN'S", 'CAUSE', 'BUT', 'CHARMING', 'FAIR', 'HAIRED', 'NICO', 'HAD', 'BEEN', 'COMMISSIONED', 'BY', 'THE', 'CONSPIRATORS', 'TO', 'PERSUADE', 'HIM', 'TO', 'SING', "ARSINOE'S", 'PRAISES', 'AMONG', 'HIS', 'PROFESSIONAL', 'ASSOCIATES'] +5484-24317-0014-585: hyp=['CREEDS', 'HAD', 'REALLY', 'BEEN', 'INVITED', 'IN', 'ORDER', 'TO', 'WIN', 'HIM', 'OVER', 'TO', 'THE', "QUEEN'S", 'CAUSE', 'BUT', 'CHARMING', 'FAIR', 'HAIRED', 'NIGO', 'HAD', 'BEEN', 'COMMISSIONED', 'BY', 'THE', 'CONSPIRATORS', 'TO', 'PERSUADE', 'HIM', 'TO', 'SING', "ARSENO'S", 'PRAISES', 'AMONG', 'HIS', 'PROFESSIONAL', 'ASSOCIATES'] +5484-24317-0015-586: ref=['HIS', 'SON', 'HAD', 'BEEN', 'THIS', 'ROYAL', "DAME'S", 'FIRST', 'HUSBAND', 'AND', 'SHE', 'HAD', 'DESERTED', 'HIM', 'TO', 'MARRY', 'LYSIMACHUS', 'THE', 'AGED', 'KING', 'OF', 'THRACE'] +5484-24317-0015-586: hyp=['HIS', 'SON', 'HAD', 'BEEN', 'THE', 'ROYAL', "JAMES'S", 'FIRST', 'HUSBAND', 'AND', 'SHE', 'HAD', 'DESERTED', 'HIM', 'TO', 'MARRY', 'LYSMACHUS', 'THE', 'AGED', 'KING', 'OF', 'THRACE'] +5484-24317-0016-587: ref=['THE', "KING'S", 'SISTER', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'CRIED', 'HERMON', 'INCREDULOUSLY'] +5484-24317-0016-587: hyp=['THE', "KING'S", 'SISTER', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'CRIED', 'HARMON', 'INCREDULOUSLY'] +5484-24317-0017-588: ref=['WE', 'WOMEN', 'ARE', 'ONLY', 'AS', 'OLD', 'AS', 'WE', 'LOOK', 'AND', 'THE', 'LEECHES', 'AND', 'TIRING', 'WOMEN', 'OF', 'THIS', 'BEAUTY', 'OF', 'FORTY', 'PRACTISE', 'ARTS', 'WHICH', 'GIVE', 'HER', 'THE', 'APPEARANCE', 'OF', 'TWENTY', 'FIVE', 'YET', 'PERHAPS', 'THE', 'KING', 'VALUES', 'HER', 'INTELLECT', 'MORE', 'THAN', 'HER', 'PERSON', 'AND', 'THE', 'WISDOM', 'OF', 'A', 'HUNDRED', 'SERPENTS', 'IS', 'CERTAINLY', 'UNITED', 'IN', 'THIS', "WOMAN'S", 'HEAD'] +5484-24317-0017-588: hyp=['WE', 'WOMEN', 'ARE', 'ONLY', 'AS', 'OLD', 'AS', 'WE', 'LOOK', 'AND', 'THE', 'LEECHES', 'ENTIRE', 'AND', 'WOMAN', 'OF', 'THIS', 'BEAUTY', 'OF', 'FORTY', 'PRACTISE', 'ARTS', 'WHICH', 'GIVE', 'HER', 'THE', 'APPEARANCE', 'OF', 'TWENTY', 'FIVE', 'YET', 'PERHAPS', 'THE', 'KING', 'VALUES', 'HER', 'INTELLECT', 'MORE', 'THAN', 'HER', 'PERSON', 'AND', 'THE', 'WISDOM', 'OF', 'A', 'HUNDRED', 'SERPENTS', 'IS', 'CERTAINLY', 'UNITED', 'IN', 'THIS', "WOMAN'S", 'HEAD'] +5484-24317-0018-589: ref=['THE', 'THREE', 'MOST', 'TRUSTWORTHY', 'ONES', 'ARE', 'HERE', 'AMYNTAS', 'THE', 'LEECH', 'CHRYSIPPUS', 'AND', 'THE', 'ADMIRABLE', 'PROCLUS'] +5484-24317-0018-589: hyp=['THE', 'THREE', 'MOST', 'TRUSTWORTHY', 'ONES', 'I', 'HEAR', 'I', 'MEANTIS', 'THE', 'LEECH', 'CHRYSIPPUS', 'IN', 'THE', 'ADMIRABLE', 'PROCLASS'] +5484-24317-0019-590: ref=['LET', 'US', 'HOPE', 'THAT', 'YOU', 'WILL', 'MAKE', 'THIS', 'THREE', 'LEAVED', 'CLOVER', 'THE', 'LUCK', 'PROMISING', 'FOUR', 'LEAVED', 'ONE'] +5484-24317-0019-590: hyp=['LET', 'US', 'HOPE', 'THAT', 'YOU', 'WILL', 'MAKE', 'THIS', 'THREE', 'LEAVED', 'CLOVER', 'THE', 'LUCK', 'PROMISING', 'FOLIEVED', 'ONE'] +5484-24317-0020-591: ref=['YOUR', 'UNCLE', 'TOO', 'HAS', 'OFTEN', 'WITH', 'PRAISEWORTHY', 'GENEROSITY', 'HELPED', 'ARSINOE', 'IN', 'MANY', 'AN', 'EMBARRASSMENT'] +5484-24317-0020-591: hyp=['YOUR', 'UNCLE', 'TOO', 'HAS', 'OFTEN', 'WITH', 'PRAISED', 'WORTHY', 'GENEROSITY', 'HELPED', 'OFTEN', 'KNOW', 'IN', 'MANY', 'EMBARRASSMENT'] +5484-24317-0021-592: ref=['HOW', 'LONG', 'HE', 'KEPT', 'YOU', 'WAITING', 'FOR', 'THE', 'FIRST', 'WORD', 'CONCERNING', 'A', 'WORK', 'WHICH', 'JUSTLY', 'TRANSPORTED', 'THE', 'WHOLE', 'CITY', 'WITH', 'DELIGHT'] +5484-24317-0021-592: hyp=['HOW', 'LONG', 'HE', 'KEPT', 'YOU', 'WAITING', 'FROM', 'THE', 'FIRST', 'WORD', 'CONCERNING', 'A', 'WORK', 'WHICH', 'JUSTLY', 'TRANSPORTED', 'THE', 'WHOLE', 'CITY', 'WITH', 'DELIGHT'] +5484-24317-0022-593: ref=['WHEN', 'HE', 'DID', 'FINALLY', 'SUMMON', 'YOU', 'HE', 'SAID', 'THINGS', 'WHICH', 'MUST', 'HAVE', 'WOUNDED', 'YOU'] +5484-24317-0022-593: hyp=['WHEN', 'HE', 'DID', 'FINALLY', 'SUMMON', 'YOU', 'HE', 'SAID', 'THINGS', 'WHICH', 'MUST', 'HAVE', 'WOUNDED', 'YOU'] +5484-24317-0023-594: ref=['THAT', 'IS', 'GOING', 'TOO', 'FAR', 'REPLIED', 'HERMON'] +5484-24317-0023-594: hyp=['THAT', 'IS', 'GOING', 'TOO', 'FAR', 'REPLIED', 'HARMON'] +5484-24317-0024-595: ref=['HE', 'WINKED', 'AT', 'HER', 'AND', 'MADE', 'A', 'SIGNIFICANT', 'GESTURE', 'AS', 'HE', 'SPOKE', 'AND', 'THEN', 'INFORMED', 'THE', 'BLIND', 'ARTIST', 'HOW', 'GRACIOUSLY', 'ARSINOE', 'HAD', 'REMEMBERED', 'HIM', 'WHEN', 'SHE', 'HEARD', 'OF', 'THE', 'REMEDY', 'BY', 'WHOSE', 'AID', 'MANY', 'A', 'WONDERFUL', 'CURE', 'OF', 'BLIND', 'EYES', 'HAD', 'BEEN', 'MADE', 'IN', 'RHODES'] +5484-24317-0024-595: hyp=['HE', 'WINKED', 'AT', 'HER', 'AND', 'MADE', 'A', 'SIGNIFICANT', 'GESTURE', 'AS', 'HE', 'SPOKE', 'AND', 'THEN', 'INFORMED', 'THE', 'BLIND', 'ARTIST', 'HOW', 'GRACIOUSLY', 'ARSENO', 'HAD', 'REMEMBERED', 'HIM', 'WHEN', 'SHE', 'HEARD', 'OF', 'THE', 'REMEDY', 'BY', 'WHOSE', 'AID', 'MANY', 'A', 'WONDERFUL', 'CURE', 'OF', 'BLIND', 'EYE', 'HAD', 'BEEN', 'MADE', 'IN', 'ROADS'] +5484-24317-0025-596: ref=['THE', 'ROYAL', 'LADY', 'HAD', 'INQUIRED', 'ABOUT', 'HIM', 'AND', 'HIS', 'SUFFERINGS', 'WITH', 'ALMOST', 'SISTERLY', 'INTEREST', 'AND', 'ALTHEA', 'EAGERLY', 'CONFIRMED', 'THE', 'STATEMENT'] +5484-24317-0025-596: hyp=['THE', 'ROYAL', 'LADY', 'HAD', 'INQUIRED', 'ABOUT', 'HIM', 'AND', 'HIS', 'SUFFERINGS', 'WITH', 'ALMOST', 'SISTERLY', 'INTEREST', 'AND', 'ALTHIA', 'EAGERLY', 'CONFIRMED', 'THE', 'STATEMENT'] +5484-24317-0026-597: ref=['HERMON', 'LISTENED', 'TO', 'THE', 'PAIR', 'IN', 'SILENCE'] +5484-24317-0026-597: hyp=['HERMAN', 'LISTENED', 'TO', 'THE', 'PARENT', 'SILENCE'] +5484-24317-0027-598: ref=['THE', 'RHODIAN', 'WAS', 'JUST', 'BEGINNING', 'TO', 'PRAISE', 'ARSINOE', 'ALSO', 'AS', 'A', 'SPECIAL', 'FRIEND', 'AND', 'CONNOISSEUR', 'OF', 'THE', "SCULPTOR'S", 'ART', 'WHEN', 'CRATES', "HERMON'S", 'FELLOW', 'STUDENT', 'ASKED', 'THE', 'BLIND', 'ARTIST', 'IN', 'BEHALF', 'OF', 'HIS', 'BEAUTIFUL', 'COMPANION', 'WHY', 'HIS', 'DEMETER', 'WAS', 'PLACED', 'UPON', 'A', 'PEDESTAL', 'WHICH', 'TO', 'OTHERS', 'AS', 'WELL', 'AS', 'HIMSELF', 'SEEMED', 'TOO', 'HIGH', 'FOR', 'THE', 'SIZE', 'OF', 'THE', 'STATUE'] +5484-24317-0027-598: hyp=['THE', 'RADIAN', 'WAS', 'JUST', 'BEGINNING', 'TO', 'PRAISE', 'ARSENAL', 'ALSO', 'AS', 'A', 'SPECIAL', 'FRIEND', 'AND', 'CONNOISSEUR', 'OF', 'THE', "SCULPTOR'S", 'ART', 'WHEN', 'CRATES', "HERMANN'S", 'FELLOW', 'STUDENT', 'ASKED', 'THE', 'BLIND', 'ARTIST', 'IN', 'BEHALF', 'OF', 'HIS', 'BEAUTIFUL', 'COMPANION', 'WHY', 'HIS', 'DEMETER', 'WAS', 'PLACED', 'UPON', 'A', 'PEDESTAL', 'WITCH', 'TO', 'OTHERS', 'AS', 'WELL', 'AS', 'HIMSELF', 'SEEMED', 'TOO', 'HIGH', 'FOR', 'THE', 'SIZE', 'OF', 'THE', 'STATUE'] +5484-24317-0028-599: ref=['YET', 'WHAT', 'MATTERED', 'IT', 'EVEN', 'IF', 'THESE', 'MISERABLE', 'PEOPLE', 'CONSIDERED', 'THEMSELVES', 'DECEIVED', 'AND', 'POINTED', 'THE', 'FINGER', 'OF', 'SCORN', 'AT', 'HIM'] +5484-24317-0028-599: hyp=['YET', 'WHAT', 'MATTERED', 'IT', 'EVEN', 'IF', 'THESE', 'MISERABLE', 'PEOPLE', 'CONSIDERED', 'THEMSELVES', 'DECEIVED', 'AND', 'POINTED', 'THE', 'FINGER', 'OF', 'SCORN', 'AT', 'HIM'] +5484-24317-0029-600: ref=['A', 'WOMAN', 'WHO', 'YEARNS', 'FOR', 'THE', 'REGARD', 'OF', 'ALL', 'MEN', 'AND', 'MAKES', 'LOVE', 'A', 'TOY', 'EASILY', 'LESSENS', 'THE', 'DEMANDS', 'SHE', 'IMPOSES', 'UPON', 'INDIVIDUALS'] +5484-24317-0029-600: hyp=['A', 'WOMAN', 'WHO', 'YEARNS', 'FOR', 'THE', 'REGARD', 'OF', 'ALL', 'MEN', 'AND', 'MAKES', 'LOVE', 'A', 'TOY', 'EASILY', 'LESSENS', 'THE', 'DEMANDS', 'SHE', 'IMPOSES', 'UPON', 'INDIVIDUALS'] +5484-24317-0030-601: ref=['ONLY', 'EVEN', 'THOUGH', 'LOVE', 'HAS', 'WHOLLY', 'DISAPPEARED', 'SHE', 'STILL', 'CLAIMS', 'CONSIDERATION', 'AND', 'ALTHEA', 'DID', 'NOT', 'WISH', 'TO', 'LOSE', "HERMON'S", 'REGARD'] +5484-24317-0030-601: hyp=['ONLY', 'EVEN', 'THOUGH', 'LOVE', 'HAS', 'WHOLLY', 'DISAPPEARED', 'SHE', 'STILL', 'CLAIMS', 'CONSIDERATION', 'AND', 'ALTHIA', 'DID', 'NOT', 'WISH', 'TO', 'LOSE', "HARMON'S", 'REGARD'] +5484-24317-0031-602: ref=['HOW', 'INDIFFERENT', 'YOU', 'LOOK', 'BUT', 'I', 'TELL', 'YOU', 'HER', 'DEEP', 'BLUE', 'EYES', 'FLASHED', 'AS', 'SHE', 'SPOKE', 'THAT', 'SO', 'LONG', 'AS', 'YOU', 'WERE', 'STILL', 'A', 'GENUINE', 'CREATING', 'ARTIST', 'THE', 'CASE', 'WAS', 'DIFFERENT'] +5484-24317-0031-602: hyp=['HOW', 'INDIFFERENT', 'YOU', 'LOOK', 'BUT', 'I', 'TELL', 'YOU', 'HER', 'DEEP', 'BLUE', 'EYES', 'FLASHED', 'AS', 'SHE', 'SPOKE', 'THAT', 'SO', 'LONG', 'AS', 'YOU', 'WAS', 'STILL', 'A', 'GENUINE', 'CREATING', 'ARTIST', 'THE', 'CASE', 'WAS', 'DIFFERENT'] +5484-24317-0032-603: ref=['THOUGH', 'SO', 'LOUD', 'A', 'DENIAL', 'IS', 'WRITTEN', 'ON', 'YOUR', 'FACE', 'I', 'PERSIST', 'IN', 'MY', 'CONVICTION', 'AND', 'THAT', 'NO', 'IDLE', 'DELUSION', 'ENSNARES', 'ME', 'I', 'CAN', 'PROVE'] +5484-24317-0032-603: hyp=['THOUGH', 'SO', 'LOUD', 'A', 'DENIAL', 'IS', 'WRITTEN', 'ON', 'YOUR', 'FACE', 'I', 'PERSIST', 'IN', 'MY', 'CONVICTION', 'AND', 'THAT', 'NO', 'IDLE', 'DELUSION', 'AND', 'SNATHS', 'ME', 'I', 'CAN', 'PROVE'] +5484-24317-0033-604: ref=['IT', 'WAS', 'NAY', 'IT', 'COULD', 'HAVE', 'BEEN', 'NOTHING', 'ELSE', 'THAT', 'VERY', 'SPIDER'] +5484-24317-0033-604: hyp=['IT', 'WAS', 'NAY', 'IT', 'COULD', 'HAVE', 'BEEN', 'NOTHING', 'ELSE', 'THAT', 'VERY', 'SPIDER'] +5484-24318-0000-605: ref=['NOT', 'A', 'SOUND', 'IF', 'YOU', 'VALUE', 'YOUR', 'LIVES'] +5484-24318-0000-605: hyp=['NOT', 'A', 'SOUND', 'IF', 'YOU', 'VALUE', 'YOUR', 'LIVES'] +5484-24318-0001-606: ref=['TO', 'OFFER', 'RESISTANCE', 'WOULD', 'HAVE', 'BEEN', 'MADNESS', 'FOR', 'EVEN', 'HERMON', 'PERCEIVED', 'BY', 'THE', 'LOUD', 'CLANKING', 'OF', 'WEAPONS', 'AROUND', 'THEM', 'THE', 'GREATLY', 'SUPERIOR', 'POWER', 'OF', 'THE', 'ENEMY', 'AND', 'THEY', 'WERE', 'ACTING', 'BY', 'THE', 'ORDERS', 'OF', 'THE', 'KING', 'TO', 'THE', 'PRISON', 'NEAR', 'THE', 'PLACE', 'OF', 'EXECUTION'] +5484-24318-0001-606: hyp=['TO', 'OFFER', 'RESISTANCE', 'WOULD', 'HAVE', 'BEEN', 'MADNESS', 'FOR', 'EVEN', 'HERMANN', 'PERCEIVED', 'BY', 'THE', 'LOUD', 'CLANKING', 'OF', 'WEAPONS', 'AROUND', 'THEM', 'THEY', 'GREATLY', 'SUPERIOR', 'POWER', 'OF', 'THE', 'ENEMY', 'AND', 'THEY', 'WERE', 'ACTING', 'BY', 'THE', 'ORDERS', 'OF', 'THE', 'KING', 'TO', 'THE', 'PRISON', 'NEAR', 'THE', 'PLACE', 'OF', 'EXECUTION'] +5484-24318-0002-607: ref=['WAS', 'HE', 'TO', 'BE', 'LED', 'TO', 'THE', "EXECUTIONER'S", 'BLOCK'] +5484-24318-0002-607: hyp=['WAS', 'HE', 'TO', 'BE', 'LED', 'TO', 'THE', "EXECUTIONER'S", 'BLOCK'] +5484-24318-0003-608: ref=['WHAT', 'PLEASURE', 'HAD', 'LIFE', 'TO', 'OFFER', 'HIM', 'THE', 'BLIND', 'MAN', 'WHO', 'WAS', 'ALREADY', 'DEAD', 'TO', 'HIS', 'ART'] +5484-24318-0003-608: hyp=['WHAT', 'PLEASURE', 'HAD', 'LIFE', 'TO', 'OFFER', 'HIM', 'THE', 'BLIND', 'MAN', 'WHO', 'WAS', 'ALREADY', 'DEAD', 'TO', 'HIS', 'ART'] +5484-24318-0004-609: ref=['OUGHT', 'HE', 'NOT', 'TO', 'GREET', 'THIS', 'SUDDEN', 'END', 'AS', 'A', 'BOON', 'FROM', 'THE', 'IMMORTALS'] +5484-24318-0004-609: hyp=['OUGHT', 'HE', 'NOT', 'TO', 'GREET', 'HIS', 'SUDDEN', 'END', 'AS', 'A', 'BOOM', 'FROM', 'THE', 'IMMORTALS'] +5484-24318-0005-610: ref=['DID', 'IT', 'NOT', 'SPARE', 'HIM', 'A', 'HUMILIATION', 'AS', 'GREAT', 'AND', 'PAINFUL', 'AS', 'COULD', 'BE', 'IMAGINED'] +5484-24318-0005-610: hyp=['DID', 'IT', 'NOT', 'SPARE', 'HIM', 'A', 'HUMILIATION', 'AS', 'GREAT', 'AND', 'PAINFUL', 'AS', 'COULD', 'BE', 'IMAGINED'] +5484-24318-0006-611: ref=['WHATEVER', 'MIGHT', 'AWAIT', 'HIM', 'HE', 'DESIRED', 'NO', 'BETTER', 'FATE'] +5484-24318-0006-611: hyp=['WHATEVER', 'MIGHT', 'AWAIT', 'HIM', 'HE', 'DESIRED', 'NO', 'BETTER', 'FATE'] +5484-24318-0007-612: ref=['IF', 'HE', 'HAD', 'PASSED', 'INTO', 'ANNIHILATION', 'HE', 'HERMON', 'WISHED', 'TO', 'FOLLOW', 'HIM', 'THITHER', 'AND', 'ANNIHILATION', 'CERTAINLY', 'MEANT', 'REDEMPTION', 'FROM', 'PAIN', 'AND', 'MISERY'] +5484-24318-0007-612: hyp=['IF', 'HE', 'HAD', 'PASSED', 'INTO', 'ANNIHILATION', 'HE', 'HERMAN', 'WISHED', 'TO', 'FOLLOW', 'HIM', 'THITHER', 'AND', 'ANNIHILATION', 'CERTAINLY', 'MEANT', 'REDEMPTION', 'FROM', 'PAIN', 'AND', 'MISERY'] +5484-24318-0008-613: ref=['BUT', 'IF', 'HE', 'WERE', 'DESTINED', 'TO', 'MEET', 'HIS', 'MYRTILUS', 'AND', 'HIS', 'MOTHER', 'IN', 'THE', 'WORLD', 'BEYOND', 'THE', 'GRAVE', 'WHAT', 'HAD', 'HE', 'NOT', 'TO', 'TELL', 'THEM', 'HOW', 'SURE', 'HE', 'WAS', 'OF', 'FINDING', 'A', 'JOYFUL', 'RECEPTION', 'THERE', 'FROM', 'BOTH'] +5484-24318-0008-613: hyp=['BUT', 'IF', 'HE', 'WERE', 'DESTINED', 'TO', 'MEET', 'HIS', 'BURTLES', 'AND', 'HIS', 'MOTHER', 'IN', 'THE', 'WORLD', 'BEYOND', 'THE', 'GRAVE', 'WHAT', 'HAD', 'HE', 'NOT', 'TO', 'TELL', 'THEM', 'HOW', 'SURE', 'HE', 'WAS', 'A', 'FINDING', 'A', 'JOYFUL', 'RECEPTION', 'THERE', 'FROM', 'BOTH'] +5484-24318-0009-614: ref=['THE', 'POWER', 'WHICH', 'DELIVERED', 'HIM', 'OVER', 'TO', 'DEATH', 'JUST', 'AT', 'THAT', 'MOMENT', 'WAS', 'NOT', 'NEMESIS', 'NO', 'IT', 'WAS', 'A', 'KINDLY', 'DEITY'] +5484-24318-0009-614: hyp=['THE', 'POWER', 'WHICH', 'DELIVERED', 'HIM', 'OVER', 'TO', 'DEATH', 'JUST', 'AT', 'THAT', 'MOMENT', 'WAS', 'NOT', 'NEMESIS', 'NO', 'IT', 'WAS', 'A', 'KINDLY', 'DEITY'] +5484-24318-0010-615: ref=['YET', 'IT', 'WAS', 'NO', 'ILLUSION', 'THAT', 'DECEIVED', 'HIM'] +5484-24318-0010-615: hyp=['YET', 'IT', 'WAS', 'NO', 'ILLUSION', 'THAT', 'DECEIVED', 'HIM'] +5484-24318-0011-616: ref=['AGAIN', 'HE', 'HEARD', 'THE', 'BELOVED', 'VOICE', 'AND', 'THIS', 'TIME', 'IT', 'ADDRESSED', 'NOT', 'ONLY', 'HIM', 'BUT', 'WITH', 'THE', 'UTMOST', 'HASTE', 'THE', 'COMMANDER', 'OF', 'THE', 'SOLDIERS'] +5484-24318-0011-616: hyp=['AGAIN', 'HE', 'HEARD', 'THE', 'BELOVED', 'VOICE', 'AND', 'THIS', 'TIME', 'IT', 'ADDRESSED', 'NOT', 'ONLY', 'HIM', 'BUT', 'WITH', 'THE', 'UTMOST', 'HASTE', 'THE', 'COMMANDER', 'OF', 'THE', 'SOLDIERS'] +5484-24318-0012-617: ref=['SOMETIMES', 'WITH', 'TOUCHING', 'ENTREATY', 'SOMETIMES', 'WITH', 'IMPERIOUS', 'COMMAND', 'SHE', 'PROTESTED', 'AFTER', 'GIVING', 'HIM', 'HER', 'NAME', 'THAT', 'THIS', 'MATTER', 'COULD', 'BE', 'NOTHING', 'BUT', 'AN', 'UNFORTUNATE', 'MISTAKE'] +5484-24318-0012-617: hyp=['SOMETIMES', 'WITH', 'TOUCHING', 'ENTREATY', 'SOMETIMES', 'WITH', 'IMPERIOUS', 'COMMAND', 'SHE', 'PROTESTED', 'AFTER', 'GIVING', 'HIM', 'HER', 'NAME', 'THAT', 'THIS', 'MATTER', 'COULD', 'BE', 'NOTHING', 'BUT', 'AN', 'UNFORTUNATE', 'MISTAKE'] +5484-24318-0013-618: ref=['LASTLY', 'WITH', 'EARNEST', 'WARMTH', 'SHE', 'BESOUGHT', 'HIM', 'BEFORE', 'TAKING', 'THE', 'PRISONERS', 'AWAY', 'TO', 'PERMIT', 'HER', 'TO', 'SPEAK', 'TO', 'THE', 'COMMANDING', 'GENERAL', 'PHILIPPUS', 'HER', "FATHER'S", 'GUEST', 'WHO', 'SHE', 'WAS', 'CERTAIN', 'WAS', 'IN', 'THE', 'PALACE'] +5484-24318-0013-618: hyp=['LASTLY', 'WITH', 'EARNEST', 'WARMTH', 'SHE', 'BESOUGHT', 'HIM', 'BEFORE', 'TAKING', 'THE', 'PRISONERS', 'AWAY', 'TO', 'PERMIT', 'HER', 'TO', 'SPEAK', 'TO', 'THE', 'COMMANDING', 'GENERAL', 'PHILIPPUS', 'HER', "FATHER'S", 'GUEST', 'WHO', 'SHE', 'WAS', 'CERTAIN', 'WAS', 'IN', 'THE', 'PALACE'] +5484-24318-0014-619: ref=['CRIED', 'HERMON', 'IN', 'GRATEFUL', 'AGITATION', 'BUT', 'SHE', 'WOULD', 'NOT', 'LISTEN', 'TO', 'HIM', 'AND', 'FOLLOWED', 'THE', 'SOLDIER', 'WHOM', 'THE', 'CAPTAIN', 'DETAILED', 'TO', 'GUIDE', 'HER', 'INTO', 'THE', 'PALACE'] +5484-24318-0014-619: hyp=['CRIED', 'HERMANN', 'IN', 'GRATEFUL', 'AGITATION', 'BUT', 'SHE', 'WOULD', 'NOT', 'LISTEN', 'TO', 'HIM', 'AND', 'FOLLOW', 'THE', 'SOLDIER', 'WHOM', 'THE', 'CAPTAIN', 'DETAILED', 'TO', 'GUIDE', 'HER', 'INTO', 'THE', 'PALACE'] +5484-24318-0015-620: ref=['TO', 'MORROW', 'YOU', 'SHALL', 'CONFESS', 'TO', 'ME', 'WHO', 'TREACHEROUSLY', 'DIRECTED', 'YOU', 'TO', 'THIS', 'DANGEROUS', 'PATH'] +5484-24318-0015-620: hyp=['TO', 'MORROW', 'YOU', 'SHALL', 'CONFESS', 'TO', 'ME', 'WHO', 'TREACHEROUSLY', 'DIRECTED', 'YOU', 'TO', 'THIS', 'DANGEROUS', 'PATH'] +5484-24318-0016-621: ref=['DAPHNE', 'AGAIN', 'PLEADED', 'FOR', 'THE', 'LIBERATION', 'OF', 'THE', 'PRISONERS', 'BUT', 'PHILIPPUS', 'SILENCED', 'HER', 'WITH', 'THE', 'GRAVE', 'EXCLAMATION', 'THE', 'ORDER', 'OF', 'THE', 'KING'] +5484-24318-0016-621: hyp=['DAPHNE', 'AGAIN', 'PLEADED', 'FOR', 'THE', 'LIBERATION', 'OF', 'THE', 'PRISONERS', 'BUT', 'PHILIP', 'WAS', 'SILENCED', 'HER', 'WITH', 'A', 'GRAVE', 'EXCLAMATION', 'THE', 'ORDER', 'OF', 'THE', 'KING'] +5484-24318-0017-622: ref=['AS', 'SOON', 'AS', 'THE', 'CAPTIVE', 'ARTIST', 'WAS', 'ALONE', 'WITH', 'THE', 'WOMAN', 'HE', 'LOVED', 'HE', 'CLASPED', 'HER', 'HAND', 'POURING', 'FORTH', 'INCOHERENT', 'WORDS', 'OF', 'THE', 'MOST', 'ARDENT', 'GRATITUDE', 'AND', 'WHEN', 'HE', 'FELT', 'HER', 'WARMLY', 'RETURN', 'THE', 'PRESSURE', 'HE', 'COULD', 'NOT', 'RESTRAIN', 'THE', 'DESIRE', 'TO', 'CLASP', 'HER', 'TO', 'HIS', 'HEART'] +5484-24318-0017-622: hyp=['AS', 'SOON', 'AS', 'THE', 'CAPTIVE', 'ARTIST', 'WAS', 'ALONE', 'WITH', 'A', 'WOMAN', 'HE', 'LOVED', 'HE', 'CLASPED', 'HER', 'HAND', 'POURING', 'FORTH', 'INCOHERENT', 'WORDS', 'OF', 'THE', 'MOST', 'ARDENT', 'GRATITUDE', 'AND', 'WHEN', 'HE', 'FELT', 'HER', 'WARMLY', 'RETURNED', 'THE', 'PRESSURE', 'HE', 'COULD', 'NOT', 'RESTRAIN', 'THE', 'DESIRE', 'TO', 'CLASP', 'HER', 'TO', 'HIS', 'HEART'] +5484-24318-0018-623: ref=['IN', 'SPITE', 'OF', 'HIS', 'DEEP', 'MENTAL', 'DISTRESS', 'HE', 'COULD', 'HAVE', 'SHOUTED', 'ALOUD', 'IN', 'HIS', 'DELIGHT', 'AND', 'GRATITUDE'] +5484-24318-0018-623: hyp=['IN', 'SPITE', 'OF', 'HIS', 'DEEP', 'MANTLE', 'DISTRESS', 'HE', 'COULD', 'HAVE', 'SHOUTED', 'ALOUD', 'IN', 'HIS', 'DELIGHT', 'AND', 'GRATITUDE'] +5484-24318-0019-624: ref=['HE', 'MIGHT', 'NOW', 'HAVE', 'BEEN', 'PERMITTED', 'TO', 'BIND', 'FOREVER', 'TO', 'HIS', 'LIFE', 'THE', 'WOMAN', 'WHO', 'HAD', 'JUST', 'RESCUED', 'HIM', 'FROM', 'THE', 'GREATEST', 'DANGER', 'BUT', 'THE', 'CONFESSION', 'HE', 'MUST', 'MAKE', 'TO', 'HIS', 'FELLOW', 'ARTISTS', 'IN', 'THE', 'PALAESTRA', 'THE', 'FOLLOWING', 'MORNING', 'STILL', 'SEALED', 'HIS', 'LIPS', 'YET', 'IN', 'THIS', 'HOUR', 'HE', 'FELT', 'THAT', 'HE', 'WAS', 'UNITED', 'TO', 'HER', 'AND', 'OUGHT', 'NOT', 'TO', 'CONCEAL', 'WHAT', 'AWAITED', 'HIM', 'SO', 'OBEYING', 'A', 'STRONG', 'IMPULSE', 'HE', 'EXCLAIMED', 'YOU', 'KNOW', 'THAT', 'I', 'LOVE', 'YOU'] +5484-24318-0019-624: hyp=['HE', 'MIGHT', 'NOW', 'HAVE', 'BEEN', 'PERMITTED', 'TO', 'BIND', 'FOREVER', 'TO', 'HIS', 'LIFE', 'THE', 'WOMAN', 'WHO', 'HAD', 'JUST', 'RESCUED', 'HIM', 'FROM', 'THE', 'GREATEST', 'DANGER', 'BUT', 'THE', 'CONFESSION', 'HE', 'MUST', 'MAKE', 'TO', 'HIS', 'FELLOW', 'ARTISTS', 'IN', 'THE', 'PELLESTRA', 'THE', 'FOLLOWING', 'MORNING', 'STILL', 'SEALED', 'HIS', 'LIPS', 'YET', 'IN', 'THIS', 'HOUR', 'HE', 'FELT', 'THAT', 'HE', 'WAS', 'UNITED', 'TO', 'HER', 'AND', 'OUGHT', 'NOT', 'TO', 'CONCEAL', 'WHAT', 'AWAITED', 'HIM', 'SO', 'OBEYING', 'A', 'STRONG', 'IMPULSE', 'HE', 'EXCLAIMED', 'YOU', 'KNOW', 'THAT', 'I', 'LOVE', 'YOU'] +5484-24318-0020-625: ref=['I', 'LOVE', 'YOU', 'AND', 'HAVE', 'LOVED', 'YOU', 'ALWAYS'] +5484-24318-0020-625: hyp=['I', 'LOVE', 'YOU', 'AND', 'HAVE', 'LOVED', 'YOU', 'ALWAYS'] +5484-24318-0021-626: ref=['DAPHNE', 'EXCLAIMED', 'TENDERLY', 'WHAT', 'MORE', 'IS', 'NEEDED'] +5484-24318-0021-626: hyp=['JAPANE', 'EXCLAIMED', 'TENDERLY', 'WHAT', 'MORE', 'IS', 'NEEDED'] +5484-24318-0022-627: ref=['BUT', 'HERMON', 'WITH', 'DROOPING', 'HEAD', 'MURMURED', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'WHAT', 'I', 'AM', 'NOW'] +5484-24318-0022-627: hyp=['BUT', 'HERMAN', 'WITH', 'DROOPING', 'HEAD', 'MURMURED', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'WHAT', 'I', 'AM', 'NOW'] +5484-24318-0023-628: ref=['THEN', 'DAPHNE', 'RAISED', 'HER', 'FACE', 'TO', 'HIS', 'ASKING', 'SO', 'THE', 'DEMETER', 'IS', 'THE', 'WORK', 'OF', 'MYRTILUS'] +5484-24318-0023-628: hyp=['THEN', 'JAPANE', 'RAISED', 'HER', 'FACE', 'TO', 'HIS', 'ASKING', 'SO', 'THE', 'DEMEANOR', 'IS', 'THE', 'WORK', 'OF', 'MYRTALIS'] +5484-24318-0024-629: ref=['WHAT', 'A', 'TERRIBLE', 'ORDEAL', 'AGAIN', 'AWAITS', 'YOU'] +5484-24318-0024-629: hyp=['WHAT', 'A', 'TERRIBLE', 'ORDEAL', 'AGAIN', 'AWAITS', 'YOU'] +5484-24318-0025-630: ref=['AND', 'I', 'FOOL', 'BLINDED', 'ALSO', 'IN', 'MIND', 'COULD', 'BE', 'VEXED', 'WITH', 'YOU', 'FOR', 'IT'] +5484-24318-0025-630: hyp=['AND', 'I', 'FOOL', 'BLINDED', 'ALSO', 'IN', 'MIND', 'COULD', 'BE', 'VEXED', 'WITH', 'YOU', 'FOR', 'IT'] +5484-24318-0026-631: ref=['BRING', 'THIS', 'BEFORE', 'YOUR', 'MIND', 'AND', 'EVERYTHING', 'ELSE', 'THAT', 'YOU', 'MUST', 'ACCEPT', 'WITH', 'IT', 'IF', 'YOU', 'CONSENT', 'WHEN', 'THE', 'TIME', 'ARRIVES', 'TO', 'BECOME', 'MINE', 'CONCEAL', 'AND', 'PALLIATE', 'NOTHING'] +5484-24318-0026-631: hyp=['BRING', 'THIS', 'BEFORE', 'YOUR', 'MIND', 'AND', 'EVERYTHING', 'ELSE', 'THAT', 'YOU', 'MUST', 'ACCEPT', 'WITH', 'IT', 'IF', 'YOU', 'CONSENT', 'WITH', 'THE', 'TIME', 'ARRIVES', 'TO', 'BECOME', 'MINE', 'CONCEAL', 'IMPALION', 'TO', 'NOTHING'] +5484-24318-0027-632: ref=['SO', 'ARCHIAS', 'INTENDED', 'TO', 'LEAVE', 'THE', 'CITY', 'ON', 'ONE', 'OF', 'HIS', 'OWN', 'SHIPS', 'THAT', 'VERY', 'DAY'] +5484-24318-0027-632: hyp=['SARKAIUS', 'INTENDED', 'TO', 'LEAVE', 'THE', 'CITY', 'ON', 'ONE', 'OF', 'HIS', 'OWN', 'SHIPS', 'THAT', 'VERY', 'DAY'] +5484-24318-0028-633: ref=['HE', 'HIMSELF', 'ON', 'THE', 'WAY', 'TO', 'EXPOSE', 'HIMSELF', 'TO', 'THE', 'MALICE', 'AND', 'MOCKERY', 'OF', 'THE', 'WHOLE', 'CITY'] +5484-24318-0028-633: hyp=['SHE', 'HIMSELF', 'ON', 'THE', 'WAY', 'TO', 'EXPOSE', 'HIMSELF', 'TO', 'THE', 'MALICE', 'AND', 'MOCKERY', 'OF', 'THE', 'WHOLE', 'CITY'] +5484-24318-0029-634: ref=['HIS', 'HEART', 'CONTRACTED', 'PAINFULLY', 'AND', 'HIS', 'SOLICITUDE', 'ABOUT', 'HIS', "UNCLE'S", 'FATE', 'INCREASED', 'WHEN', 'PHILIPPUS', 'INFORMED', 'HIM', 'THAT', 'THE', 'CONSPIRATORS', 'HAD', 'BEEN', 'ARRESTED', 'AT', 'THE', 'BANQUET', 'AND', 'HEADED', 'BY', 'AMYNTAS', 'THE', 'RHODIAN', 'CHRYSIPPUS', 'AND', 'PROCLUS', 'HAD', 'PERISHED', 'BY', 'THE', "EXECUTIONER'S", 'SWORD', 'AT', 'SUNRISE'] +5484-24318-0029-634: hyp=['HIS', 'HEART', 'CONTRACTED', 'PAINFULLY', 'AND', 'HIS', 'SOLICITUDE', 'ABOUT', 'HIS', "UNCLE'S", 'FATE', 'INCREASED', 'WHEN', 'PHILIPUS', 'INFORMED', 'HIM', 'THAT', 'THE', 'CONSPIRATORS', 'HAD', 'BEEN', 'ARRESTED', 'AT', 'THE', 'BANQUET', 'AND', 'HEADED', 'BY', 'A', 'MEANTESSE', 'THE', 'RODIAN', 'CHRYSIPPUS', 'AND', 'PROCLAUS', 'HAD', 'PERISHED', 'BY', 'THE', "EXECUTIONER'S", 'SWORD', 'AT', 'SUNRISE'] +5484-24318-0030-635: ref=['BESIDES', 'HE', 'KNEW', 'THAT', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'WOULD', 'NOT', 'PART', 'FROM', 'HIM', 'WITHOUT', 'GRANTING', 'HIM', 'ONE', 'LAST', 'WORD'] +5484-24318-0030-635: hyp=['BESIDES', 'HE', 'KNEW', 'THAT', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'WOULD', 'NOT', 'PART', 'FROM', 'HIM', 'WITHOUT', 'GRANTING', 'HIM', 'ONE', 'LAST', 'WORD'] +5484-24318-0031-636: ref=['ON', 'THE', 'WAY', 'HIS', 'HEART', 'THROBBED', 'ALMOST', 'TO', 'BURSTING'] +5484-24318-0031-636: hyp=['ON', 'THE', 'WAY', 'HIS', 'HEART', 'THROBBED', 'ALMOST', 'TO', 'BURSTING'] +5484-24318-0032-637: ref=['EVEN', "DAPHNE'S", 'IMAGE', 'AND', 'WHAT', 'THREATENED', 'HER', 'FATHER', 'AND', 'HER', 'WITH', 'HIM', 'RECEDED', 'FAR', 'INTO', 'THE', 'BACKGROUND'] +5484-24318-0032-637: hyp=['EVEN', 'THESE', 'IMAGE', 'AND', 'WHAT', 'THREATENED', 'HER', 'FATHER', 'AND', 'HER', 'WITH', 'HIM', 'WAS', 'SEATED', 'FAR', 'INTO', 'THE', 'BACKGROUND'] +5484-24318-0033-638: ref=['HE', 'WAS', 'APPEARING', 'BEFORE', 'HIS', 'COMPANIONS', 'ONLY', 'TO', 'GIVE', 'TRUTH', 'ITS', 'JUST', 'DUE'] +5484-24318-0033-638: hyp=['HE', 'WAS', 'APPEARING', 'BEFORE', 'HIS', 'COMPANIONS', 'ONLY', 'TO', 'GIVE', 'TRUTH', 'ITS', 'JUST', 'DUE'] +5484-24318-0034-639: ref=['THE', 'EGYPTIAN', 'OBEYED', 'AND', 'HIS', 'MASTER', 'CROSSED', 'THE', 'WIDE', 'SPACE', 'STREWN', 'WITH', 'SAND', 'AND', 'APPROACHED', 'THE', 'STAGE', 'WHICH', 'HAD', 'BEEN', 'ERECTED', 'FOR', 'THE', 'FESTAL', 'PERFORMANCES', 'EVEN', 'HAD', 'HIS', 'EYES', 'RETAINED', 'THE', 'POWER', 'OF', 'SIGHT', 'HIS', 'BLOOD', 'WAS', 'COURSING', 'SO', 'WILDLY', 'THROUGH', 'HIS', 'VEINS', 'THAT', 'HE', 'MIGHT', 'PERHAPS', 'HAVE', 'BEEN', 'UNABLE', 'TO', 'DISTINGUISH', 'THE', 'STATUES', 'AROUND', 'HIM', 'AND', 'THE', 'THOUSANDS', 'OF', 'SPECTATORS', 'WHO', 'CROWDED', 'CLOSELY', 'TOGETHER', 'RICHLY', 'GARLANDED', 'THEIR', 'CHEEKS', 'GLOWING', 'WITH', 'ENTHUSIASM', 'SURROUNDED', 'THE', 'ARENA', 'HERMON'] +5484-24318-0034-639: hyp=['THE', 'EGYPTIAN', 'OBEYED', 'AND', 'HIS', 'MASTER', 'CROSSED', 'THE', 'WIDE', 'SPACE', 'STREWN', 'WITH', 'SAND', 'AND', 'APPROACHED', 'THE', 'STAGE', 'WHICH', 'HAD', 'BEEN', 'ERECTED', 'FOR', 'THE', 'FEAST', 'OF', 'PERFORMANCES', 'EVEN', 'HAD', 'HIS', 'EYES', 'RETAINED', 'THE', 'POWER', 'OF', 'SIGHT', 'HIS', 'BLOOD', 'WAS', 'COARSING', 'SO', 'WIDELY', 'THROUGH', 'HIS', 'VEINS', 'THAT', 'HE', 'MIGHT', 'PERHAPS', 'HAVE', 'BEEN', 'UNABLE', 'TO', 'DISTINGUISH', 'THE', 'STATUES', 'AROUND', 'HIM', 'AND', 'THE', 'THOUSANDS', 'OF', 'SPECTATORS', 'WHO', 'CROWDED', 'CLOSELY', 'TOGETHER', 'RICHLY', 'GARLANDED', 'THEIR', 'CHEEKS', 'GLOWING', 'WITH', 'ENTHUSIASM', 'SURROUNDED', 'THE', 'ARENA', 'HERMANN'] +5484-24318-0035-640: ref=['SHOUTED', 'HIS', 'FRIEND', 'SOTELES', 'IN', 'JOYFUL', 'SURPRISE', 'IN', 'THE', 'MIDST', 'OF', 'THIS', 'PAINFUL', 'WALK', 'HERMON'] +5484-24318-0035-640: hyp=['SHOUTED', 'HIS', 'FRIEND', 'SARTUOUS', 'AND', 'JOYFUL', 'SURPRISE', 'IN', 'THE', 'MIDST', 'OF', 'HIS', 'PAINFUL', 'WALK', 'HERE', 'ON'] +5484-24318-0036-641: ref=['EVEN', 'WHILE', 'HE', 'BELIEVED', 'HIMSELF', 'TO', 'BE', 'THE', 'CREATOR', 'OF', 'THE', 'DEMETER', 'HE', 'HAD', 'BEEN', 'SERIOUSLY', 'TROUBLED', 'BY', 'THE', 'PRAISE', 'OF', 'SO', 'MANY', 'CRITICS', 'BECAUSE', 'IT', 'HAD', 'EXPOSED', 'HIM', 'TO', 'THE', 'SUSPICION', 'OF', 'HAVING', 'BECOME', 'FAITHLESS', 'TO', 'HIS', 'ART', 'AND', 'HIS', 'NATURE'] +5484-24318-0036-641: hyp=['EVEN', 'WHILE', 'HE', 'BELIEVED', 'HIMSELF', 'TO', 'BE', 'THE', 'CREATOR', 'OF', 'THE', 'DEMETER', 'HE', 'HAD', 'BEEN', 'SERIOUSLY', 'TROUBLED', 'BY', 'THE', 'PRAISE', 'OF', 'SO', 'MANY', 'CRITICS', 'BECAUSE', 'IT', 'HAD', 'EXPOSED', 'HIM', 'TO', 'THE', 'SUSPICION', 'OF', 'HAVING', 'BECOME', 'FAITHLESS', 'TO', 'HIS', 'ART', 'AND', 'HIS', 'NATURE'] +5484-24318-0037-642: ref=['HONOUR', 'TO', 'MYRTILUS', 'AND', 'HIS', 'ART', 'BUT', 'HE', 'TRUSTED', 'THIS', 'NOBLE', 'FESTAL', 'ASSEMBLAGE', 'WOULD', 'PARDON', 'THE', 'UNINTENTIONAL', 'DECEPTION', 'AND', 'AID', 'HIS', 'PRAYER', 'FOR', 'RECOVERY'] +5484-24318-0037-642: hyp=['HONOUR', 'TO', 'MARTILLUS', 'AND', 'HIS', 'ART', 'BUT', 'HE', 'TRUSTED', 'THIS', 'NOBLE', 'FEAST', 'AN', 'ASSEMBLAGE', 'WOULD', 'PARDON', 'THE', 'UNINTENTIONAL', 'DECEPTION', 'AND', 'AID', 'HIS', 'PRAYER', 'FOR', 'RECOVERY'] +5764-299665-0000-405: ref=['AFTERWARD', 'IT', 'WAS', 'SUPPOSED', 'THAT', 'HE', 'WAS', 'SATISFIED', 'WITH', 'THE', 'BLOOD', 'OF', 'OXEN', 'LAMBS', 'AND', 'DOVES', 'AND', 'THAT', 'IN', 'EXCHANGE', 'FOR', 'OR', 'ON', 'ACCOUNT', 'OF', 'THESE', 'SACRIFICES', 'THIS', 'GOD', 'GAVE', 'RAIN', 'SUNSHINE', 'AND', 'HARVEST'] +5764-299665-0000-405: hyp=['AFTERWARD', 'IT', 'WAS', 'SUPPOSED', 'THAT', 'HE', 'WAS', 'SATISFIED', 'WITH', 'THE', 'BLOOD', 'OF', 'OXEN', 'LAMPS', 'AND', 'DOVES', 'AND', 'THAT', 'IN', 'EXCHANGE', 'FOR', 'OR', 'IN', 'ACCOUNT', 'OF', 'THESE', 'SACRIFICES', 'THESE', 'GOD', 'GAVE', 'REIN', 'SUNSHINE', 'AND', 'HARVEST'] +5764-299665-0001-406: ref=['WHETHER', 'HE', 'WAS', 'THE', 'CREATOR', 'OF', 'YOURSELF', 'AND', 'MYSELF'] +5764-299665-0001-406: hyp=['WHETHER', 'HE', 'WAS', 'THE', 'CREATOR', 'OF', 'YOURSELF', 'AND', 'MYSELF'] +5764-299665-0002-407: ref=['WHETHER', 'ANY', 'PRAYER', 'WAS', 'EVER', 'ANSWERED'] +5764-299665-0002-407: hyp=['WHETHER', 'ANY', 'PRAYER', 'WAS', 'EVER', 'ANSWERED'] +5764-299665-0003-408: ref=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'INTELLECTUALLY', 'INFERIOR'] +5764-299665-0003-408: hyp=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'INTELLECTUAL', 'INFERIOR'] +5764-299665-0004-409: ref=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'DEFORMED', 'AND', 'HELPLESS', 'WHY', 'DID', 'HE', 'CREATE', 'THE', 'CRIMINAL', 'THE', 'IDIOTIC', 'THE', 'INSANE'] +5764-299665-0004-409: hyp=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'DEFORMED', 'AND', 'HELPLESS', 'WHY', 'DID', 'HE', 'CREATE', 'THE', 'CRIMINAL', 'THE', 'IDIOTIC', 'THE', 'INSANE'] +5764-299665-0005-410: ref=['ARE', 'THE', 'FAILURES', 'UNDER', 'OBLIGATION', 'TO', 'THEIR', 'CREATOR'] +5764-299665-0005-410: hyp=['ARE', 'THE', 'FAILURES', 'UNDER', 'OBLIGATION', 'TO', 'THEIR', 'CREATOR'] +5764-299665-0006-411: ref=['IS', 'HE', 'RESPONSIBLE', 'FOR', 'ALL', 'THE', 'WARS', 'THAT', 'HAVE', 'BEEN', 'WAGED', 'FOR', 'ALL', 'THE', 'INNOCENT', 'BLOOD', 'THAT', 'HAS', 'BEEN', 'SHED'] +5764-299665-0006-411: hyp=['HIS', 'IRRESPONSIBLE', 'FOR', 'ALL', 'THE', 'WARS', 'THAT', 'HAVE', 'BEEN', 'RAGED', 'FOR', 'ALL', 'THE', 'INNOCENT', 'BLOOD', 'THAT', 'HAS', 'BEEN', 'SHED'] +5764-299665-0007-412: ref=['IS', 'HE', 'RESPONSIBLE', 'FOR', 'THE', 'CENTURIES', 'OF', 'SLAVERY', 'FOR', 'THE', 'BACKS', 'THAT', 'HAVE', 'BEEN', 'SCARRED', 'WITH', 'THE', 'LASH', 'FOR', 'THE', 'BABES', 'THAT', 'HAVE', 'BEEN', 'SOLD', 'FROM', 'THE', 'BREASTS', 'OF', 'MOTHERS', 'FOR', 'THE', 'FAMILIES', 'THAT', 'HAVE', 'BEEN', 'SEPARATED', 'AND', 'DESTROYED'] +5764-299665-0007-412: hyp=['IF', 'HE', 'RESPONSIBLE', 'FOR', 'THE', 'CENTURIES', 'OF', 'SLAVERY', 'FOR', 'THE', 'BACKS', 'THAT', 'HAVE', 'BEEN', 'SCARRED', 'WITH', 'A', 'LASH', 'FOR', 'THE', 'BABES', 'THAT', 'HAVE', 'BEEN', 'SOLD', 'FROM', 'THE', 'BREASTS', 'OF', 'MOTHERS', 'FOR', 'THE', 'FAMILIES', 'THAT', 'HAVE', 'BEEN', 'SEPARATED', 'AND', 'DESTROYED'] +5764-299665-0008-413: ref=['IS', 'THIS', 'GOD', 'RESPONSIBLE', 'FOR', 'RELIGIOUS', 'PERSECUTION', 'FOR', 'THE', 'INQUISITION', 'FOR', 'THE', 'THUMB', 'SCREW', 'AND', 'RACK', 'AND', 'FOR', 'ALL', 'THE', 'INSTRUMENTS', 'OF', 'TORTURE'] +5764-299665-0008-413: hyp=['IS', 'THE', 'SCOTT', 'RESPONSIBLE', 'FOR', 'RELIGIOUS', 'PERSECUTION', 'FOR', 'THE', 'INQUISITION', 'FOR', 'THE', 'TIME', 'SCREW', 'AND', 'RACK', 'AND', 'FOR', 'ALL', 'THE', 'INSTRUMENTS', 'OF', 'TORTURE'] +5764-299665-0009-414: ref=['DID', 'THIS', 'GOD', 'ALLOW', 'THE', 'CRUEL', 'AND', 'VILE', 'TO', 'DESTROY', 'THE', 'BRAVE', 'AND', 'VIRTUOUS'] +5764-299665-0009-414: hyp=['DID', 'THIS', 'GOT', 'THE', 'LOW', 'THE', 'CRUEL', 'AND', 'VILE', 'TO', 'DESTROY', 'THE', 'BRAVE', 'AND', 'VIRTUOUS'] +5764-299665-0010-415: ref=['DID', 'HE', 'ALLOW', 'TYRANTS', 'TO', 'SHED', 'THE', 'BLOOD', 'OF', 'PATRIOTS'] +5764-299665-0010-415: hyp=['DID', 'HE', 'ALONE', 'TYRANTS', 'TO', 'SHED', 'A', 'BLOOD', 'OF', 'PATRIOTS'] +5764-299665-0011-416: ref=['CAN', 'WE', 'CONCEIVE', 'OF', 'A', 'DEVIL', 'BASE', 'ENOUGH', 'TO', 'PREFER', 'HIS', 'ENEMIES', 'TO', 'HIS', 'FRIENDS'] +5764-299665-0011-416: hyp=['CAN', 'WE', 'CONCEIVE', 'OF', 'A', 'DEVIL', 'BASE', 'ENOUGH', 'TO', 'PREFER', 'HIS', 'ENEMIES', 'TO', 'HIS', 'FRIENDS'] +5764-299665-0012-417: ref=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'THE', 'WILD', 'BEASTS', 'THAT', 'DEVOUR', 'HUMAN', 'BEINGS', 'FOR', 'THE', 'FANGED', 'SERPENTS', 'WHOSE', 'BITE', 'IS', 'DEATH'] +5764-299665-0012-417: hyp=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'THE', 'WILD', 'BEASTS', 'THAT', 'THE', 'FOUR', 'HUMAN', 'BEINGS', 'FOR', 'THE', 'FACT', 'SERPENTS', 'WHOSE', 'BITE', 'IS', 'DEATH'] +5764-299665-0013-418: ref=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'A', 'WORLD', 'WHERE', 'LIFE', 'FEEDS', 'ON', 'LIFE'] +5764-299665-0013-418: hyp=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'A', 'WORLD', 'WERE', 'LIE', 'FEATS', 'ON', 'LIFE'] +5764-299665-0014-419: ref=['DID', 'INFINITE', 'WISDOM', 'INTENTIONALLY', 'PRODUCE', 'THE', 'MICROSCOPIC', 'BEASTS', 'THAT', 'FEED', 'UPON', 'THE', 'OPTIC', 'NERVE', 'THINK', 'OF', 'BLINDING', 'A', 'MAN', 'TO', 'SATISFY', 'THE', 'APPETITE', 'OF', 'A', 'MICROBE'] +5764-299665-0014-419: hyp=['THE', 'INFINITE', 'WISDOM', 'INTENTIONALLY', 'PRODUCED', 'A', 'MICROSCOPIC', 'BEASTS', 'THAT', 'FEAT', 'UPON', 'THE', 'OPTIC', 'NERVES', 'THINK', 'OF', 'BLINDING', 'A', 'MAN', 'TO', 'SATISFY', 'THE', 'APPETITE', 'OF', 'A', 'MICROBE'] +5764-299665-0015-420: ref=['FEAR', 'BUILDS', 'THE', 'ALTAR', 'AND', 'OFFERS', 'THE', 'SACRIFICE'] +5764-299665-0015-420: hyp=['FEAR', 'BIDS', 'THE', 'ALTAR', 'AND', 'OFFERS', 'THE', 'SACRIFIC'] +5764-299665-0016-421: ref=['FEAR', 'ERECTS', 'THE', 'CATHEDRAL', 'AND', 'BOWS', 'THE', 'HEAD', 'OF', 'MAN', 'IN', 'WORSHIP'] +5764-299665-0016-421: hyp=['FEAR', 'ERECTS', 'THE', 'KITRAL', 'AND', 'BOWS', 'THE', 'HEAD', 'OF', 'MAN', 'IN', 'WORSHIP'] +5764-299665-0017-422: ref=['LIPS', 'RELIGIOUS', 'AND', 'FEARFUL', 'TREMBLINGLY', 'REPEAT', 'THIS', 'PASSAGE', 'THOUGH', 'HE', 'SLAY', 'ME', 'YET', 'WILL', 'I', 'TRUST', 'HIM'] +5764-299665-0017-422: hyp=['LIPS', 'RELIGIOUS', 'AND', 'FEARFUL', 'TREMBLINGLY', 'REPEAT', 'THIS', 'PASSAGE', 'THOUGH', 'HE', 'SLAY', 'ME', 'YET', 'WILL', 'I', 'TRUST', 'HIM'] +5764-299665-0018-423: ref=['CAN', 'WE', 'SAY', 'THAT', 'HE', 'CARED', 'FOR', 'THE', 'CHILDREN', 'OF', 'MEN'] +5764-299665-0018-423: hyp=['CAN', 'WE', 'SAY', 'THAT', 'HE', 'CARED', 'FOR', 'THE', 'CHILDREN', 'OF', 'MEN'] +5764-299665-0019-424: ref=['CAN', 'WE', 'SAY', 'THAT', 'HIS', 'MERCY', 'ENDURETH', 'FOREVER'] +5764-299665-0019-424: hyp=['CAN', 'WE', 'SAY', 'THAT', 'HIS', 'MERCY', 'AND', 'DURRED', 'FOREVER'] +5764-299665-0020-425: ref=['DO', 'WE', 'PROVE', 'HIS', 'GOODNESS', 'BY', 'SHOWING', 'THAT', 'HE', 'HAS', 'OPENED', 'THE', 'EARTH', 'AND', 'SWALLOWED', 'THOUSANDS', 'OF', 'HIS', 'HELPLESS', 'CHILDREN', 'OR', 'THAT', 'WITH', 'THE', 'VOLCANOES', 'HE', 'HAS', 'OVERWHELMED', 'THEM', 'WITH', 'RIVERS', 'OF', 'FIRE'] +5764-299665-0020-425: hyp=['DO', 'WE', 'PROVE', 'HIS', 'GOODNESS', 'BY', 'SHOWING', 'THAT', 'HE', 'HAS', 'OPENED', 'THE', 'EARTH', 'AND', 'SWALLOWED', 'THOUSAND', 'OF', 'HIS', 'HELPLESS', 'CHILDREN', 'ALL', 'THAT', 'WITH', 'THE', 'VOLCANOES', 'HE', 'HAS', 'OVERWHELMED', 'THEM', 'WITH', 'RIVERS', 'OF', 'FIRE'] +5764-299665-0021-426: ref=['WAS', 'THERE', 'GOODNESS', 'WAS', 'THERE', 'WISDOM', 'IN', 'THIS'] +5764-299665-0021-426: hyp=['WAS', 'THERE', 'GOODNESS', 'WAS', 'THEIR', 'WISDOM', 'IN', 'THIS'] +5764-299665-0022-427: ref=['OUGHT', 'THE', 'SUPERIOR', 'RACES', 'TO', 'THANK', 'GOD', 'THAT', 'THEY', 'ARE', 'NOT', 'THE', 'INFERIOR'] +5764-299665-0022-427: hyp=['ALL', 'THE', 'SUPERIOR', 'RAYS', 'TO', 'THANK', 'GOD', 'THAT', 'THEY', 'ARE', 'NOT', 'THE', 'INFERIOR'] +5764-299665-0023-428: ref=['MOST', 'PEOPLE', 'CLING', 'TO', 'THE', 'SUPERNATURAL'] +5764-299665-0023-428: hyp=['MOST', 'PEOPLE', 'CLINK', 'TO', 'THE', 'SUPERNATURAL'] +5764-299665-0024-429: ref=['IF', 'THEY', 'GIVE', 'UP', 'ONE', 'GOD', 'THEY', 'IMAGINE', 'ANOTHER'] +5764-299665-0024-429: hyp=['IF', 'THEY', 'GIVE', 'UP', 'ONE', 'GOD', 'THEY', 'IMAGINE', 'ANOTHER'] +5764-299665-0025-430: ref=['WHAT', 'IS', 'THIS', 'POWER'] +5764-299665-0025-430: hyp=['WHAT', 'IS', 'THIS', 'POWER'] +5764-299665-0026-431: ref=['MAN', 'ADVANCES', 'AND', 'NECESSARILY', 'ADVANCES', 'THROUGH', 'EXPERIENCE'] +5764-299665-0026-431: hyp=['MAN', 'ADVANCES', 'A', 'NECESSARILY', 'ADVANCES', 'THROUGH', 'EXPERIENCE'] +5764-299665-0027-432: ref=['A', 'MAN', 'WISHING', 'TO', 'GO', 'TO', 'A', 'CERTAIN', 'PLACE', 'COMES', 'TO', 'WHERE', 'THE', 'ROAD', 'DIVIDES'] +5764-299665-0027-432: hyp=['A', 'MAN', 'WISHING', 'TO', 'GO', 'TO', 'A', 'CERTAIN', 'PLACE', 'COME', 'TO', 'WHERE', 'THE', 'ROAD', 'DIVIDES'] +5764-299665-0028-433: ref=['HE', 'HAS', 'TRIED', 'THAT', 'ROAD', 'AND', 'KNOWS', 'THAT', 'IT', 'IS', 'THE', 'WRONG', 'ROAD'] +5764-299665-0028-433: hyp=['HIS', 'TRIED', 'THAT', 'ROAD', 'AND', 'KNOWS', 'THAT', 'IT', 'IS', 'THE', 'WRONG', 'ROAD'] +5764-299665-0029-434: ref=['A', 'CHILD', 'CHARMED', 'BY', 'THE', 'BEAUTY', 'OF', 'THE', 'FLAME', 'GRASPS', 'IT', 'WITH', 'ITS', 'DIMPLED', 'HAND'] +5764-299665-0029-434: hyp=['A', 'CHILD', 'SHOWN', 'BY', 'THE', 'BEAUTY', 'OF', 'THE', 'FLAME', 'GRASPED', 'IT', 'WITH', 'HIS', 'DIMPLED', 'HAND'] +5764-299665-0030-435: ref=['THE', 'POWER', 'THAT', 'WORKS', 'FOR', 'RIGHTEOUSNESS', 'HAS', 'TAUGHT', 'THE', 'CHILD', 'A', 'LESSON'] +5764-299665-0030-435: hyp=['THE', 'POWER', 'THAT', 'WORK', 'FOR', 'RIGHTEOUSNESS', 'HAD', 'TAUGHT', 'THE', 'CHILD', 'A', 'LESSON'] +5764-299665-0031-436: ref=['IT', 'IS', 'A', 'RESULT'] +5764-299665-0031-436: hyp=['IT', 'IS', 'A', 'RESULT'] +5764-299665-0032-437: ref=['IT', 'IS', 'INSISTED', 'BY', 'THESE', 'THEOLOGIANS', 'AND', 'BY', 'MANY', 'OF', 'THE', 'SO', 'CALLED', 'PHILOSOPHERS', 'THAT', 'THIS', 'MORAL', 'SENSE', 'THIS', 'SENSE', 'OF', 'DUTY', 'OF', 'OBLIGATION', 'WAS', 'IMPORTED', 'AND', 'THAT', 'CONSCIENCE', 'IS', 'AN', 'EXOTIC'] +5764-299665-0032-437: hyp=['IT', 'IS', 'INSISTED', 'BY', 'THESE', 'THEOLOGIANS', 'AND', 'BY', 'MANY', 'OF', 'THE', 'SOUL', 'CALLED', 'PHILOSOPHERS', 'THAT', 'THIS', 'MORAL', 'SENSE', 'THIS', 'SENSE', 'OF', 'DUTY', 'OF', 'OBLIGATION', 'WAS', 'IMPORTED', 'AND', 'THAT', 'CONSCIENCE', 'IS', 'AN', 'EXOTIC'] +5764-299665-0033-438: ref=['WE', 'LIVE', 'TOGETHER', 'IN', 'FAMILIES', 'TRIBES', 'AND', 'NATIONS'] +5764-299665-0033-438: hyp=['WE', 'LIVE', 'TOGETHER', 'IN', 'FAMILIES', 'TRIBES', 'AND', 'NATIONS'] +5764-299665-0034-439: ref=['THEY', 'ARE', 'PRAISED', 'ADMIRED', 'AND', 'RESPECTED'] +5764-299665-0034-439: hyp=['THEY', 'ARE', 'PRAISED', 'ADMIRED', 'AND', 'RESPECTED'] +5764-299665-0035-440: ref=['THEY', 'ARE', 'REGARDED', 'AS', 'GOOD', 'THAT', 'IS', 'TO', 'SAY', 'AS', 'MORAL'] +5764-299665-0035-440: hyp=['THEY', 'ARE', 'REGARDED', 'AS', 'GOOD', 'THAT', 'IS', 'TO', 'SAY', 'S', 'MORAL'] +5764-299665-0036-441: ref=['THE', 'MEMBERS', 'WHO', 'ADD', 'TO', 'THE', 'MISERY', 'OF', 'THE', 'FAMILY', 'THE', 'TRIBE', 'OR', 'THE', 'NATION', 'ARE', 'CONSIDERED', 'BAD', 'MEMBERS'] +5764-299665-0036-441: hyp=['THE', 'MEMBERS', 'WHO', 'ADD', 'TO', 'THE', 'MISERY', 'OF', 'THE', 'FAMILY', 'THE', 'TRIBE', 'OF', 'THE', 'NATION', 'ARE', 'CONSIDERED', 'BAD', 'MEMBERS'] +5764-299665-0037-442: ref=['THE', 'GREATEST', 'OF', 'HUMAN', 'BEINGS', 'HAS', 'SAID', 'CONSCIENCE', 'IS', 'BORN', 'OF', 'LOVE'] +5764-299665-0037-442: hyp=['THE', 'GREATEST', 'OF', 'HUMAN', 'BEINGS', 'HAD', 'SAID', 'CONSCIENCE', 'IS', 'BORN', 'OF', 'LOVE'] +5764-299665-0038-443: ref=['AS', 'PEOPLE', 'ADVANCE', 'THE', 'REMOTE', 'CONSEQUENCES', 'ARE', 'PERCEIVED'] +5764-299665-0038-443: hyp=['AS', 'PEOPLE', 'ADVANCE', 'THE', 'REMOTE', 'CONSEQUENCES', 'ARE', 'PERCEIVED'] +5764-299665-0039-444: ref=['THE', 'IMAGINATION', 'IS', 'CULTIVATED'] +5764-299665-0039-444: hyp=['THE', 'IMAGINATION', 'IS', 'CULTIVATED'] +5764-299665-0040-445: ref=['A', 'MAN', 'PUTS', 'HIMSELF', 'IN', 'THE', 'PLACE', 'OF', 'ANOTHER'] +5764-299665-0040-445: hyp=['A', 'MAN', 'BUT', 'HIMSELF', 'IN', 'THE', 'PLACE', 'OF', 'ANOTHER'] +5764-299665-0041-446: ref=['THE', 'SENSE', 'OF', 'DUTY', 'BECOMES', 'STRONGER', 'MORE', 'IMPERATIVE'] +5764-299665-0041-446: hyp=['THE', 'SENSE', 'OF', 'DUTY', 'BECOMES', 'STRONGER', 'MORE', 'IMPERATIVE'] +5764-299665-0042-447: ref=['MAN', 'JUDGES', 'HIMSELF'] +5764-299665-0042-447: hyp=['MAN', 'JUDGES', 'HIMSELF'] +5764-299665-0043-448: ref=['IN', 'ALL', 'THIS', 'THERE', 'IS', 'NOTHING', 'SUPERNATURAL'] +5764-299665-0043-448: hyp=['IN', 'ALL', 'THIS', 'THERE', 'IS', 'NOTHING', 'SUPERNATURAL'] +5764-299665-0044-449: ref=['MAN', 'HAS', 'DECEIVED', 'HIMSELF'] +5764-299665-0044-449: hyp=['MAN', 'HAS', 'DECEIVED', 'HIMSELF'] +5764-299665-0045-450: ref=['HAS', 'CHRISTIANITY', 'DONE', 'GOOD'] +5764-299665-0045-450: hyp=['HESTERITY', 'DONEGOOD'] +5764-299665-0046-451: ref=['WHEN', 'THE', 'CHURCH', 'HAD', 'CONTROL', 'WERE', 'MEN', 'MADE', 'BETTER', 'AND', 'HAPPIER'] +5764-299665-0046-451: hyp=['WHEN', 'THE', 'CHURCH', 'HAD', 'CONTROLLED', 'WHERE', 'MEN', 'MADE', 'BETTER', 'AND', 'HAPPIER'] +5764-299665-0047-452: ref=['WHAT', 'HAS', 'RELIGION', 'DONE', 'FOR', 'HUNGARY', 'OR', 'AUSTRIA'] +5764-299665-0047-452: hyp=['WHAT', 'HAS', 'RELIGION', 'DONE', 'FOR', 'HUNGARY', 'OR', 'AUSTRIA'] +5764-299665-0048-453: ref=['COULD', 'THESE', 'COUNTRIES', 'HAVE', 'BEEN', 'WORSE', 'WITHOUT', 'RELIGION'] +5764-299665-0048-453: hyp=['GOOD', 'THESE', 'COUNTRIES', 'HAVE', 'BEEN', 'WORSE', 'WITHOUT', 'RELIGION'] +5764-299665-0049-454: ref=['COULD', 'THEY', 'HAVE', 'BEEN', 'WORSE', 'HAD', 'THEY', 'HAD', 'ANY', 'OTHER', 'RELIGION', 'THAN', 'CHRISTIANITY'] +5764-299665-0049-454: hyp=['COULD', 'THEY', 'HAVE', 'BEEN', 'WORSE', 'HAD', 'THEY', 'HAD', 'ANY', 'OTHER', 'RELIGION', 'THAN', 'CHRISTIANITY'] +5764-299665-0050-455: ref=['WHAT', 'DID', 'CHRISTIANITY', 'DO', 'FOR', 'THEM'] +5764-299665-0050-455: hyp=['WHAT', 'DID', 'CHRISTIANITY', 'DO', 'FAULT', 'THEM'] +5764-299665-0051-456: ref=['THEY', 'HATED', 'PLEASURE'] +5764-299665-0051-456: hyp=['THEY', 'HATED', 'PLEASURE'] +5764-299665-0052-457: ref=['THEY', 'MUFFLED', 'ALL', 'THE', 'BELLS', 'OF', 'GLADNESS'] +5764-299665-0052-457: hyp=['THEY', 'MUFFLED', 'ALL', 'THE', 'BELLS', 'OF', 'GLADNESS'] +5764-299665-0053-458: ref=['THE', 'RELIGION', 'OF', 'THE', 'PURITAN', 'WAS', 'AN', 'UNADULTERATED', 'CURSE'] +5764-299665-0053-458: hyp=['THE', 'RELIGION', 'OF', 'THE', 'PURITAN', 'WAS', 'AN', 'ANADULTERATED', 'CURSE'] +5764-299665-0054-459: ref=['THE', 'PURITAN', 'BELIEVED', 'THE', 'BIBLE', 'TO', 'BE', 'THE', 'WORD', 'OF', 'GOD', 'AND', 'THIS', 'BELIEF', 'HAS', 'ALWAYS', 'MADE', 'THOSE', 'WHO', 'HELD', 'IT', 'CRUEL', 'AND', 'WRETCHED'] +5764-299665-0054-459: hyp=['THE', 'PURITAN', 'BELIEVED', 'THE', 'BIBLE', 'TO', 'BE', 'THE', 'WORLD', 'OF', 'GOD', 'AND', 'THIS', 'BELIEF', 'HAS', 'ALWAYS', 'MADE', 'THOSE', 'WHO', 'HELD', 'IT', 'CRUEL', 'AND', 'WRETCHED'] +5764-299665-0055-460: ref=['LET', 'ME', 'REFER', 'TO', 'JUST', 'ONE', 'FACT', 'SHOWING', 'THE', 'INFLUENCE', 'OF', 'A', 'BELIEF', 'IN', 'THE', 'BIBLE', 'ON', 'HUMAN', 'BEINGS'] +5764-299665-0055-460: hyp=['LET', 'ME', 'REFER', 'TO', 'JUST', 'ONE', 'FACT', 'SHOWING', 'THE', 'INFLUENCE', 'OF', 'A', 'BELIEF', 'IN', 'THE', 'BIBLE', 'ON', 'HUMAN', 'BEINGS'] +5764-299665-0056-461: ref=['THE', 'QUEEN', 'RECEIVED', 'THE', 'BIBLE', 'KISSED', 'IT', 'AND', 'PLEDGED', 'HERSELF', 'TO', 'DILIGENTLY', 'READ', 'THEREIN'] +5764-299665-0056-461: hyp=['THE', 'QUEEN', 'RECEIVED', 'THE', 'BIBLE', 'KISSED', 'IT', 'AND', 'PLEDGED', 'HERSELF', 'TO', 'DILIGENTLY', 'READ', 'THEREIN'] +5764-299665-0057-462: ref=['IN', 'OTHER', 'WORDS', 'IT', 'WAS', 'JUST', 'AS', 'FIENDISH', 'JUST', 'AS', 'INFAMOUS', 'AS', 'THE', 'CATHOLIC', 'SPIRIT'] +5764-299665-0057-462: hyp=['IN', 'OTHER', 'WORDS', 'IT', 'WAS', 'JUST', 'AS', 'FIENDISH', 'JUST', 'AS', 'IN', 'FAMOUS', 'AS', 'THE', 'CATTLE', 'EXPERIOR'] +5764-299665-0058-463: ref=['HAS', 'THE', 'BIBLE', 'MADE', 'THE', 'PEOPLE', 'OF', 'GEORGIA', 'KIND', 'AND', 'MERCIFUL'] +5764-299665-0058-463: hyp=['HESDAY', 'BUT', 'MADE', 'THE', 'PEOPLE', 'OF', 'GEORGIA', 'KIND', 'AND', 'MERCIFUL'] +5764-299665-0059-464: ref=['RELIGION', 'HAS', 'BEEN', 'TRIED', 'AND', 'IN', 'ALL', 'COUNTRIES', 'IN', 'ALL', 'TIMES', 'HAS', 'FAILED'] +5764-299665-0059-464: hyp=['WHO', 'RELIGION', 'HAS', 'BEEN', 'TRIED', 'AND', 'IN', 'ALL', 'COUNTRIES', 'IN', 'ALL', 'TIMES', 'HAS', 'FAILED'] +5764-299665-0060-465: ref=['RELIGION', 'HAS', 'ALWAYS', 'BEEN', 'THE', 'ENEMY', 'OF', 'SCIENCE', 'OF', 'INVESTIGATION', 'AND', 'THOUGHT'] +5764-299665-0060-465: hyp=['RELIGION', 'HATH', 'ALWAYS', 'BEEN', 'THE', 'ENEMY', 'OF', 'SCIENCE', 'OF', 'INVESTIGATION', 'AND', 'THOUGHT'] +5764-299665-0061-466: ref=['RELIGION', 'HAS', 'NEVER', 'MADE', 'MAN', 'FREE'] +5764-299665-0061-466: hyp=['RELIGION', 'IS', 'NEVER', 'MADE', 'MEN', 'FRE'] +5764-299665-0062-467: ref=['IT', 'HAS', 'NEVER', 'MADE', 'MAN', 'MORAL', 'TEMPERATE', 'INDUSTRIOUS', 'AND', 'HONEST'] +5764-299665-0062-467: hyp=['HE', 'JUST', 'NEVER', 'MADE', 'MAN', 'MORAL', 'TEMPERATE', 'INDUSTRIOUS', 'AND', 'HONEST'] +5764-299665-0063-468: ref=['ARE', 'CHRISTIANS', 'MORE', 'TEMPERATE', 'NEARER', 'VIRTUOUS', 'NEARER', 'HONEST', 'THAN', 'SAVAGES'] +5764-299665-0063-468: hyp=['AH', 'CHRISTIAN', 'SMALL', 'TEMPERATE', 'NEARER', 'VIRTUOUS', 'NEARER', 'HONEST', 'THAN', 'SAVAGES'] +5764-299665-0064-469: ref=['CAN', 'WE', 'CURE', 'DISEASE', 'BY', 'SUPPLICATION'] +5764-299665-0064-469: hyp=['CAN', 'WE', 'CURE', 'DISEASE', 'BY', 'SUPPLICATION'] +5764-299665-0065-470: ref=['CAN', 'WE', 'RECEIVE', 'VIRTUE', 'OR', 'HONOR', 'AS', 'ALMS'] +5764-299665-0065-470: hyp=['CAN', 'WE', 'RECEIVE', 'VIRTUE', 'OR', 'HUNGER', 'AS', 'ALMS'] +5764-299665-0066-471: ref=['RELIGION', 'RESTS', 'ON', 'THE', 'IDEA', 'THAT', 'NATURE', 'HAS', 'A', 'MASTER', 'AND', 'THAT', 'THIS', 'MASTER', 'WILL', 'LISTEN', 'TO', 'PRAYER', 'THAT', 'THIS', 'MASTER', 'PUNISHES', 'AND', 'REWARDS', 'THAT', 'HE', 'LOVES', 'PRAISE', 'AND', 'FLATTERY', 'AND', 'HATES', 'THE', 'BRAVE', 'AND', 'FREE'] +5764-299665-0066-471: hyp=['RELIGION', 'RESTS', 'ON', 'THE', 'IDEA', 'THAT', 'NATURE', 'HAS', 'A', 'MASTER', 'AND', 'THAT', 'THIS', 'MASTER', 'WILL', 'LISTEN', 'TO', 'PRAYER', 'THAT', 'HIS', 'MASTER', 'PUNISHES', 'AND', 'REWARDS', 'THAT', 'HE', 'LOVES', 'PRAISE', 'AND', 'FLATTERY', 'AND', 'HATES', 'THE', 'BRAVE', 'AND', 'FREE'] +5764-299665-0067-472: ref=['WE', 'MUST', 'HAVE', 'CORNER', 'STONES'] +5764-299665-0067-472: hyp=['WE', 'MUST', 'HAVE', 'CORN', 'THE', 'STONES'] +5764-299665-0068-473: ref=['THE', 'STRUCTURE', 'MUST', 'HAVE', 'A', 'BASEMENT'] +5764-299665-0068-473: hyp=['THE', 'STRUCTURE', 'MUST', 'HAVE', 'ABASEMENT'] +5764-299665-0069-474: ref=['IF', 'WE', 'BUILD', 'WE', 'MUST', 'BEGIN', 'AT', 'THE', 'BOTTOM'] +5764-299665-0069-474: hyp=['IF', 'WE', 'BUILD', 'WE', 'MUST', 'BEGIN', 'AT', 'THE', 'BOTTOM'] +5764-299665-0070-475: ref=['I', 'HAVE', 'A', 'THEORY', 'AND', 'I', 'HAVE', 'FOUR', 'CORNER', 'STONES'] +5764-299665-0070-475: hyp=['I', 'HAVE', 'IT', 'THEORY', 'AND', 'I', 'HAVE', 'FOUR', 'CORNER', 'STONES'] +5764-299665-0071-476: ref=['THE', 'FIRST', 'STONE', 'IS', 'THAT', 'MATTER', 'SUBSTANCE', 'CANNOT', 'BE', 'DESTROYED', 'CANNOT', 'BE', 'ANNIHILATED'] +5764-299665-0071-476: hyp=['THE', 'FIRST', 'STONE', 'EAST', 'THAT', 'MATTER', 'SUBSTANCE', 'CANNOT', 'BE', 'DESTROYED', 'CANNOT', 'BE', 'ANNIHILATED'] +5764-299665-0072-477: ref=['IF', 'THESE', 'CORNER', 'STONES', 'ARE', 'FACTS', 'IT', 'FOLLOWS', 'AS', 'A', 'NECESSITY', 'THAT', 'MATTER', 'AND', 'FORCE', 'ARE', 'FROM', 'AND', 'TO', 'ETERNITY', 'THAT', 'THEY', 'CAN', 'NEITHER', 'BE', 'INCREASED', 'NOR', 'DIMINISHED'] +5764-299665-0072-477: hyp=['IF', 'THIS', 'CORN', 'THE', 'STONES', 'ARE', 'FACTS', 'IT', 'FOLLOWS', 'AS', 'A', 'NECESSITY', 'THAT', 'MATTER', 'AND', 'FORCE', 'ARE', 'FROM', 'END', 'TO', 'ETERNITY', 'THAT', 'THEY', 'CAN', 'NEITHER', 'BE', 'INCREASED', 'NOR', 'DIMINISHED'] +5764-299665-0073-478: ref=['IT', 'FOLLOWS', 'THAT', 'NOTHING', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'CREATED', 'THAT', 'THERE', 'NEVER', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'A', 'CREATOR'] +5764-299665-0073-478: hyp=['IT', 'FOLLOWS', 'THAT', 'NOTHING', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'CREATED', 'THAT', 'THERE', 'NEVER', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'A', 'CREATOR'] +5764-299665-0074-479: ref=['IT', 'FOLLOWS', 'THAT', 'THERE', 'COULD', 'NOT', 'HAVE', 'BEEN', 'ANY', 'INTELLIGENCE', 'ANY', 'DESIGN', 'BACK', 'OF', 'MATTER', 'AND', 'FORCE'] +5764-299665-0074-479: hyp=['IT', 'FOLLOWED', 'THAT', 'THERE', 'COULD', 'NOT', 'HAVE', 'BEEN', 'ANY', 'INTELLIGENCE', 'AND', 'A', 'DESIGNED', 'BACK', 'OF', 'MATTER', 'AND', 'FORCE'] +5764-299665-0075-480: ref=['I', 'SAY', 'WHAT', 'I', 'THINK'] +5764-299665-0075-480: hyp=['I', 'SAY', 'WHAT', 'I', 'THINK'] +5764-299665-0076-481: ref=['EVERY', 'EVENT', 'HAS', 'PARENTS'] +5764-299665-0076-481: hyp=['EVERY', 'EVENT', 'HAS', 'PARENTS'] +5764-299665-0077-482: ref=['THAT', 'WHICH', 'HAS', 'NOT', 'HAPPENED', 'COULD', 'NOT'] +5764-299665-0077-482: hyp=['THAT', 'WHICH', 'HATH', 'NOT', 'HAPPENED', 'COULD', 'NOT'] +5764-299665-0078-483: ref=['IN', 'THE', 'INFINITE', 'CHAIN', 'THERE', 'IS', 'AND', 'THERE', 'CAN', 'BE', 'NO', 'BROKEN', 'NO', 'MISSING', 'LINK'] +5764-299665-0078-483: hyp=['IN', 'THE', 'INFINITE', 'CHANGE', 'WREATHS', 'AND', 'THERE', 'CAN', 'BE', 'NO', 'BROKEN', 'NO', 'MISSING', 'LINK'] +5764-299665-0079-484: ref=['WE', 'NOW', 'KNOW', 'THAT', 'OUR', 'FIRST', 'PARENTS', 'WERE', 'NOT', 'FOREIGNERS'] +5764-299665-0079-484: hyp=['WE', 'NOW', 'KNOW', 'THAT', 'OUR', 'FIRST', 'PARENTS', 'WERE', 'NOT', 'FOREIGNERS'] +5764-299665-0080-485: ref=['WE', 'NOW', 'KNOW', 'IF', 'WE', 'KNOW', 'ANYTHING', 'THAT', 'THE', 'UNIVERSE', 'IS', 'NATURAL', 'AND', 'THAT', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'NATURALLY', 'PRODUCED'] +5764-299665-0080-485: hyp=['WE', 'NOW', 'KNOW', 'IF', 'WE', 'KNOW', 'ANYTHING', 'THAT', 'THE', 'UNIVERSE', 'IS', 'NATURAL', 'AND', 'THAT', 'MAN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'NATURALLY', 'PRODUCED'] +5764-299665-0081-486: ref=['WE', 'KNOW', 'THE', 'PATHS', 'THAT', 'LIFE', 'HAS', 'TRAVELED'] +5764-299665-0081-486: hyp=['WE', 'KNOW', 'THE', 'PATHS', 'THAT', 'LIFE', 'HAS', 'TRAVELLED'] +5764-299665-0082-487: ref=['WE', 'KNOW', 'THE', 'FOOTSTEPS', 'OF', 'ADVANCE', 'THEY', 'HAVE', 'BEEN', 'TRACED'] +5764-299665-0082-487: hyp=['WE', 'KNOW', 'THE', 'FOOTSTEPS', 'OF', 'ADVANCE', 'THEY', 'HAVE', 'BEEN', 'PRAISED'] +5764-299665-0083-488: ref=['FOR', 'THOUSANDS', 'OF', 'YEARS', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'TRYING', 'TO', 'REFORM', 'THE', 'WORLD'] +5764-299665-0083-488: hyp=['FOUR', 'THOUSANDS', 'OF', 'YEARS', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'CRYING', 'TO', 'REFORM', 'THE', 'WORLD'] +5764-299665-0084-489: ref=['WHY', 'HAVE', 'THE', 'REFORMERS', 'FAILED'] +5764-299665-0084-489: hyp=['WHY', 'HAVE', 'THE', 'REFORMED', 'FAITH'] +5764-299665-0085-490: ref=['THEY', 'DEPEND', 'ON', 'THE', 'LORD', 'ON', 'LUCK', 'AND', 'CHARITY'] +5764-299665-0085-490: hyp=['THEY', 'DEPEND', 'ON', 'THE', 'LOT', 'UNLUCK', 'AND', 'CHARITY'] +5764-299665-0086-491: ref=['THEY', 'LIVE', 'BY', 'FRAUD', 'AND', 'VIOLENCE', 'AND', 'BEQUEATH', 'THEIR', 'VICES', 'TO', 'THEIR', 'CHILDREN'] +5764-299665-0086-491: hyp=['THEY', 'LEAVE', 'BY', 'FRAUD', 'AND', 'VIOLENCE', 'AND', 'BEQUEATH', 'THEIR', 'VICES', 'TO', 'THEIR', 'CHILDREN'] +5764-299665-0087-492: ref=['FAILURE', 'SEEMS', 'TO', 'BE', 'THE', 'TRADEMARK', 'OF', 'NATURE', 'WHY'] +5764-299665-0087-492: hyp=['FAILURE', 'SEEMS', 'TO', 'BE', 'THE', 'TRADE', 'MARK', 'OF', 'NATURE', 'WHY'] +5764-299665-0088-493: ref=['NATURE', 'PRODUCES', 'WITHOUT', 'PURPOSE', 'SUSTAINS', 'WITHOUT', 'INTENTION', 'AND', 'DESTROYS', 'WITHOUT', 'THOUGHT'] +5764-299665-0088-493: hyp=['NATURE', 'PRODUCED', 'WITHOUT', 'PURPOSE', 'SUSTAINS', 'WITHOUT', 'INTENTION', 'AND', 'DESTROYS', 'WITHOUT', 'THOUGHT'] +5764-299665-0089-494: ref=['MUST', 'THE', 'WORLD', 'FOREVER', 'REMAIN', 'THE', 'VICTIM', 'OF', 'IGNORANT', 'PASSION'] +5764-299665-0089-494: hyp=['MISTER', 'BUILD', 'FOREVER', 'REMAINED', 'A', 'VICTIM', 'OF', 'IGNORANT', 'PASSION'] +5764-299665-0090-495: ref=['WHY', 'SHOULD', 'MEN', 'AND', 'WOMEN', 'HAVE', 'CHILDREN', 'THAT', 'THEY', 'CANNOT', 'TAKE', 'CARE', 'OF', 'CHILDREN', 'THAT', 'ARE', 'BURDENS', 'AND', 'CURSES', 'WHY'] +5764-299665-0090-495: hyp=['WHY', 'SHOULD', 'MEN', 'AND', 'WOMEN', 'HAVE', 'CHILDREN', 'THAT', 'THEY', 'CANNOT', 'TAKE', 'CARE', 'OF', 'CHILDREN', 'THAT', 'ARE', 'A', 'BURDEN', 'AND', 'CURSES', 'WHY'] +5764-299665-0091-496: ref=['PASSION', 'IS', 'AND', 'ALWAYS', 'HAS', 'BEEN', 'DEAF'] +5764-299665-0091-496: hyp=['PASSION', 'IS', 'AND', 'ALL', 'THIS', 'HAS', 'BEEN', 'DEAF'] +5764-299665-0092-497: ref=['LAW', 'CAN', 'PUNISH', 'BUT', 'IT', 'CAN', 'NEITHER', 'REFORM', 'CRIMINALS', 'NOR', 'PREVENT', 'CRIME'] +5764-299665-0092-497: hyp=['LAW', 'CAN', 'PUNISH', 'THAT', 'IT', 'CAN', 'NEITHER', 'REFORM', 'CRIMINALS', 'NOR', 'PREVENT', 'CRIME'] +5764-299665-0093-498: ref=['THIS', 'CANNOT', 'BE', 'DONE', 'BY', 'TALK', 'OR', 'EXAMPLE'] +5764-299665-0093-498: hyp=['THESE', 'CANNOT', 'BE', 'DONE', 'BY', 'TALK', 'OR', 'EXAMPLE'] +5764-299665-0094-499: ref=['THIS', 'IS', 'THE', 'SOLUTION', 'OF', 'THE', 'WHOLE', 'QUESTION'] +5764-299665-0094-499: hyp=['THIS', 'IS', 'THE', 'SOLUTION', 'OF', 'THE', 'WHOLE', 'QUESTION'] +5764-299665-0095-500: ref=['THIS', 'FREES', 'WOMAN'] +5764-299665-0095-500: hyp=['THIS', 'FREEZE', 'WOMEN'] +5764-299665-0096-501: ref=['POVERTY', 'AND', 'CRIME', 'WILL', 'BE', 'CHILDLESS'] +5764-299665-0096-501: hyp=['POVERTY', 'AND', 'CRIME', 'WILL', 'BE', 'CHILDLESS'] +5764-299665-0097-502: ref=['IT', 'IS', 'FAR', 'BETTER', 'TO', 'BE', 'FREE', 'TO', 'LEAVE', 'THE', 'FORTS', 'AND', 'BARRICADES', 'OF', 'FEAR', 'TO', 'STAND', 'ERECT', 'AND', 'FACE', 'THE', 'FUTURE', 'WITH', 'A', 'SMILE'] +5764-299665-0097-502: hyp=['IT', 'IS', 'FAR', 'BETTER', 'TO', 'BE', 'FREE', 'TO', 'LEAVE', 'THE', 'FAULTS', 'AND', 'BARRICADES', 'OF', 'FEAR', 'TO', 'STAND', 'ERECT', 'AND', 'FAITH', 'THE', 'FUTURE', 'WITH', 'US', 'MIND'] +6070-63485-0000-2599: ref=["THEY'RE", 'DONE', 'FOR', 'SAID', 'THE', 'SCHOOLMASTER', 'IN', 'A', 'LOW', 'KEY', 'TO', 'THE', 'CHOUETTE', 'OUT', 'WITH', 'YOUR', 'VITRIOL', 'AND', 'MIND', 'YOUR', 'EYE'] +6070-63485-0000-2599: hyp=['THERE', 'DONE', 'FAR', 'SAID', 'THE', 'SCHOOLMASTER', 'IN', 'A', 'LOKI', 'TO', 'THE', 'SWEAT', 'OUT', 'WITH', 'YOUR', 'VITRIOL', 'AND', 'MIND', 'YOUR', 'EYE'] +6070-63485-0001-2600: ref=['THE', 'TWO', 'MONSTERS', 'TOOK', 'OFF', 'THEIR', 'SHOES', 'AND', 'MOVED', 'STEALTHILY', 'ALONG', 'KEEPING', 'IN', 'THE', 'SHADOWS', 'OF', 'THE', 'HOUSES'] +6070-63485-0001-2600: hyp=['THE', 'TWO', 'MONSTERS', 'TOOK', 'OFF', 'THEIR', 'SHOES', 'AND', 'MOVED', 'STEALTHILY', 'ALONG', 'KEEPING', 'IN', 'THE', 'SHADOWS', 'OF', 'THE', 'HOUSES'] +6070-63485-0002-2601: ref=['BY', 'MEANS', 'OF', 'THIS', 'STRATAGEM', 'THEY', 'FOLLOWED', 'SO', 'CLOSELY', 'THAT', 'ALTHOUGH', 'WITHIN', 'A', 'FEW', 'STEPS', 'OF', 'SARAH', 'AND', 'TOM', 'THEY', 'DID', 'NOT', 'HEAR', 'THEM'] +6070-63485-0002-2601: hyp=['BY', 'MEANS', 'OF', 'THIS', 'STRATAGEM', 'THEY', 'FOLLOWED', 'SO', 'CLOSELY', 'THAT', 'ALTHOUGH', 'WITHIN', 'A', 'FEW', 'STEPS', 'OF', 'SEREN', 'TOM', 'THEY', 'DID', 'NOT', 'HEAR', 'THEM'] +6070-63485-0003-2602: ref=['SARAH', 'AND', 'HER', 'BROTHER', 'HAVING', 'AGAIN', 'PASSED', 'BY', 'THE', 'TAPIS', 'FRANC', 'ARRIVED', 'CLOSE', 'TO', 'THE', 'DILAPIDATED', 'HOUSE', 'WHICH', 'WAS', 'PARTLY', 'IN', 'RUINS', 'AND', 'ITS', 'OPENED', 'CELLARS', 'FORMED', 'A', 'KIND', 'OF', 'GULF', 'ALONG', 'WHICH', 'THE', 'STREET', 'RAN', 'IN', 'THAT', 'DIRECTION'] +6070-63485-0003-2602: hyp=['SARAH', 'AND', 'HER', 'BROTHER', 'HAVING', 'AGAIN', 'PASSED', 'BY', 'THE', 'TAPPY', 'FRANK', 'ARRIVED', 'CLOSE', 'TO', 'THE', 'DILAPIDATED', 'HOUSE', 'WHICH', 'WAS', 'PARTLY', 'IN', 'RUINS', 'AND', 'ITS', 'OPEN', 'CELLARS', 'FORMED', 'A', 'KIND', 'OF', 'GULF', 'ALONG', 'WHICH', 'THE', 'STREET', 'RAN', 'IN', 'THAT', 'DIRECTION'] +6070-63485-0004-2603: ref=['IN', 'AN', 'INSTANT', 'THE', 'SCHOOLMASTER', 'WITH', 'A', 'LEAP', 'RESEMBLING', 'IN', 'STRENGTH', 'AND', 'AGILITY', 'THE', 'SPRING', 'OF', 'A', 'TIGER', 'SEIZED', 'SEYTON', 'WITH', 'ONE', 'HAND', 'BY', 'THE', 'THROAT', 'AND', 'EXCLAIMED', 'YOUR', 'MONEY', 'OR', 'I', 'WILL', 'FLING', 'YOU', 'INTO', 'THIS', 'HOLE'] +6070-63485-0004-2603: hyp=['IN', 'AN', 'INSTANT', 'THE', 'SCHOOLMASTER', 'WITH', 'A', 'LEAP', 'RESEMBLING', 'IN', 'STRENGTH', 'AND', 'AGILITY', 'THE', 'SPRING', 'OF', 'A', 'TIGER', 'SEIZED', 'SEYTON', 'WITH', 'ONE', 'HAND', 'BY', 'THE', 'THROAT', 'AND', 'EXCLAIMED', 'YOUR', 'MONEY', 'OR', 'I', 'WILL', 'FLING', 'YOU', 'INTO', 'THIS', 'HOLE'] +6070-63485-0005-2604: ref=['NO', 'SAID', 'THE', 'OLD', 'BRUTE', 'GRUMBLINGLY', 'NO', 'NOT', 'ONE', 'RING', 'WHAT', 'A', 'SHAME'] +6070-63485-0005-2604: hyp=['NO', 'SAID', 'THE', 'OLD', 'BRUTE', 'TREMBLINGLY', 'NO', 'NOT', 'ONE', 'RING', 'WHAT', 'A', 'SHAME'] +6070-63485-0006-2605: ref=['TOM', 'SEYTON', 'DID', 'NOT', 'LOSE', 'HIS', 'PRESENCE', 'OF', 'MIND', 'DURING', 'THIS', 'SCENE', 'RAPIDLY', 'AND', 'UNEXPECTEDLY', 'AS', 'IT', 'HAD', 'OCCURRED'] +6070-63485-0006-2605: hyp=['TOM', 'SEYTON', 'DID', 'NOT', 'LOSE', 'HIS', 'PRESENCE', 'OF', 'MIND', 'DURING', 'THIS', 'SCENE', 'RAPIDLY', 'AND', 'UNEXPECTEDLY', 'AS', 'IT', 'HAD', 'OCCURRED'] +6070-63485-0007-2606: ref=['OH', 'AH', 'TO', 'LAY', 'A', 'TRAP', 'TO', 'CATCH', 'US', 'REPLIED', 'THE', 'THIEF'] +6070-63485-0007-2606: hyp=['UH', 'TO', 'LAY', 'A', 'TRAP', 'TO', 'CATCH', 'US', 'REPLIED', 'THE', 'THIEF'] +6070-63485-0008-2607: ref=['THEN', 'ADDRESSING', 'THOMAS', 'SEYTON', 'YOU', 'KNOW', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0008-2607: hyp=['THEN', 'ADDRESSING', 'THOMAS', 'SETTON', 'YOU', 'KNOW', 'THE', 'PLANE', 'OF', 'SAINT', 'DENIS'] +6070-63485-0009-2608: ref=['DID', 'YOU', 'SEE', 'IN', 'THE', 'CABARET', 'WE', 'HAVE', 'JUST', 'LEFT', 'FOR', 'I', 'KNOW', 'YOU', 'AGAIN', 'THE', 'MAN', 'WHOM', 'THE', 'CHARCOAL', 'MAN', 'CAME', 'TO', 'SEEK'] +6070-63485-0009-2608: hyp=['DID', 'YOU', 'SEE', 'IN', 'THE', 'CABARET', 'WE', 'HAD', 'JUST', 'LEFT', 'FOR', 'I', 'KNOW', 'YOU', 'AGAIN', 'THE', 'MAN', 'WHOM', 'THE', 'CHARCOAL', 'MAN', 'CAME', 'TO', 'SEEK'] +6070-63485-0010-2609: ref=['CRIED', 'THE', 'SCHOOLMASTER', 'A', 'THOUSAND', 'FRANCS', 'AND', "I'LL", 'KILL', 'HIM'] +6070-63485-0010-2609: hyp=['CRIED', 'THE', 'SCHOOLMASTER', 'A', 'THOUSAND', 'FRANCS', 'AND', "I'LL", 'KILL', 'HIM'] +6070-63485-0011-2610: ref=['WRETCH', 'I', 'DO', 'NOT', 'SEEK', 'HIS', 'LIFE', 'REPLIED', 'SARAH', 'TO', 'THE', 'SCHOOLMASTER'] +6070-63485-0011-2610: hyp=['WRETCH', 'I', 'DO', 'NOT', 'SEE', 'HIS', 'LIFE', 'REPLIED', 'SARAH', 'TO', 'THE', 'SCHOOLMASTER'] +6070-63485-0012-2611: ref=["LET'S", 'GO', 'AND', 'MEET', 'HIM'] +6070-63485-0012-2611: hyp=["LET'S", 'GO', 'AND', 'MEET', 'HIM'] +6070-63485-0013-2612: ref=['OLD', 'BOY', 'IT', 'WILL', 'PAY', 'FOR', 'LOOKING', 'AFTER'] +6070-63485-0013-2612: hyp=['OLD', 'BY', 'IT', 'WILL', 'PAY', 'FOR', 'LOOKING', 'AFTER'] +6070-63485-0014-2613: ref=['WELL', 'MY', 'WIFE', 'SHALL', 'BE', 'THERE', 'SAID', 'THE', 'SCHOOLMASTER', 'YOU', 'WILL', 'TELL', 'HER', 'WHAT', 'YOU', 'WANT', 'AND', 'I', 'SHALL', 'SEE'] +6070-63485-0014-2613: hyp=['WELL', 'MY', 'WIFE', 'SHALL', 'BE', 'THERE', 'SAID', 'THE', 'SCHOOLMASTER', 'YOU', 'WILL', 'TELL', 'HER', 'WHAT', 'YOU', 'WANT', 'AND', 'I', 'SHALL', 'SEE'] +6070-63485-0015-2614: ref=['IN', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0015-2614: hyp=['IN', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0016-2615: ref=['BETWEEN', 'SAINT', 'OUEN', 'AND', 'THE', 'ROAD', 'OF', 'LA', 'REVOLTE', 'AT', 'THE', 'END', 'OF', 'THE', 'ROAD', 'AGREED'] +6070-63485-0016-2615: hyp=['BETWEEN', 'SAINT', 'WAT', 'AND', 'THE', 'ROAD', 'OF', 'LA', 'REVOLT', 'AT', 'THE', 'END', 'OF', 'THE', 'ROAD', 'AGREED'] +6070-63485-0017-2616: ref=['HE', 'HAD', 'FORGOTTEN', 'THE', 'ADDRESS', 'OF', 'THE', 'SELF', 'STYLED', 'FAN', 'PAINTER'] +6070-63485-0017-2616: hyp=['HE', 'HAD', 'FORGOTTEN', 'THE', 'ADDRESS', 'OF', 'THE', 'SELF', 'STYLED', 'PAN', 'PAINTER'] +6070-63485-0018-2617: ref=['THE', 'FIACRE', 'STARTED'] +6070-63485-0018-2617: hyp=['THE', 'FIACCHUS', 'STARTED'] +6070-86744-0000-2569: ref=['FRANZ', 'WHO', 'SEEMED', 'ATTRACTED', 'BY', 'SOME', 'INVISIBLE', 'INFLUENCE', 'TOWARDS', 'THE', 'COUNT', 'IN', 'WHICH', 'TERROR', 'WAS', 'STRANGELY', 'MINGLED', 'FELT', 'AN', 'EXTREME', 'RELUCTANCE', 'TO', 'PERMIT', 'HIS', 'FRIEND', 'TO', 'BE', 'EXPOSED', 'ALONE', 'TO', 'THE', 'SINGULAR', 'FASCINATION', 'THAT', 'THIS', 'MYSTERIOUS', 'PERSONAGE', 'SEEMED', 'TO', 'EXERCISE', 'OVER', 'HIM', 'AND', 'THEREFORE', 'MADE', 'NO', 'OBJECTION', 'TO', "ALBERT'S", 'REQUEST', 'BUT', 'AT', 'ONCE', 'ACCOMPANIED', 'HIM', 'TO', 'THE', 'DESIRED', 'SPOT', 'AND', 'AFTER', 'A', 'SHORT', 'DELAY', 'THE', 'COUNT', 'JOINED', 'THEM', 'IN', 'THE', 'SALON'] +6070-86744-0000-2569: hyp=['FRANCE', 'WHO', 'SEEMED', 'ATTRACTED', 'BY', 'SOME', 'INVISIBLE', 'INFLUENCE', 'TOWARDS', 'THE', 'COUNT', 'IN', 'WHICH', 'TERROR', 'WAS', 'STRANGELY', 'MINGLED', 'FELT', 'AN', 'EXTREME', 'RELUCTANCE', 'TO', 'PERMIT', 'HIS', 'FRIEND', 'TO', 'BE', 'EXPOSED', 'ALONE', 'TO', 'THE', 'SINGULAR', 'FASCINATION', 'THAT', 'THIS', 'MYSTERIOUS', 'PERSONAGE', 'SEEMED', 'TO', 'EXERCISE', 'OVER', 'HIM', 'AND', 'THEREFORE', 'MADE', 'NO', 'OBJECTION', 'TO', "ALBERT'S", 'REQUEST', 'BUT', 'AT', 'ONCE', 'ACCOMPANIED', 'HIM', 'TO', 'THE', 'DESIRED', 'SPOT', 'AND', 'AFTER', 'A', 'SHORT', 'DELAY', 'THE', 'COUNT', 'JOINED', 'THEM', 'IN', 'THE', 'SALON'] +6070-86744-0001-2570: ref=['MY', 'VERY', 'GOOD', 'FRIEND', 'AND', 'EXCELLENT', 'NEIGHBOR', 'REPLIED', 'THE', 'COUNT', 'WITH', 'A', 'SMILE', 'YOU', 'REALLY', 'EXAGGERATE', 'MY', 'TRIFLING', 'EXERTIONS'] +6070-86744-0001-2570: hyp=['MY', 'VERY', 'GOOD', 'FRIEND', 'AN', 'EXCELLENT', 'NEIGHBOUR', 'REPLIED', 'THE', 'COUNT', 'WITH', 'A', 'SMILE', 'YOU', 'REALLY', 'EXAGGERATE', 'MY', 'TRIFLING', 'EXERTIONS'] +6070-86744-0002-2571: ref=['MY', 'FATHER', 'THE', 'COMTE', 'DE', 'MORCERF', 'ALTHOUGH', 'OF', 'SPANISH', 'ORIGIN', 'POSSESSES', 'CONSIDERABLE', 'INFLUENCE', 'BOTH', 'AT', 'THE', 'COURT', 'OF', 'FRANCE', 'AND', 'MADRID', 'AND', 'I', 'UNHESITATINGLY', 'PLACE', 'THE', 'BEST', 'SERVICES', 'OF', 'MYSELF', 'AND', 'ALL', 'TO', 'WHOM', 'MY', 'LIFE', 'IS', 'DEAR', 'AT', 'YOUR', 'DISPOSAL'] +6070-86744-0002-2571: hyp=['MY', 'FATHER', 'THE', 'COMTE', 'DE', 'MORCERF', 'ALTHOUGH', 'A', 'SPANISH', 'ORIGIN', 'POSSESSES', 'CONSIDERABLE', 'INFLUENCE', 'BOTH', 'AT', 'THE', 'COURT', 'OF', 'FRANCE', 'AND', 'MADRID', 'AND', 'I', 'AM', 'HESITATINGLY', 'PLACE', 'THE', 'BEST', 'SERVICES', 'OF', 'MYSELF', 'AND', 'ALL', 'TO', 'WHOM', 'MY', 'LIFE', 'IS', 'DEAR', 'AT', 'YOUR', 'DISPOSAL'] +6070-86744-0003-2572: ref=['I', 'CAN', 'SCARCELY', 'CREDIT', 'IT'] +6070-86744-0003-2572: hyp=['I', 'CAN', 'SCARCELY', 'CREDIT', 'IT'] +6070-86744-0004-2573: ref=['THEN', 'IT', 'IS', 'SETTLED', 'SAID', 'THE', 'COUNT', 'AND', 'I', 'GIVE', 'YOU', 'MY', 'SOLEMN', 'ASSURANCE', 'THAT', 'I', 'ONLY', 'WAITED', 'AN', 'OPPORTUNITY', 'LIKE', 'THE', 'PRESENT', 'TO', 'REALIZE', 'PLANS', 'THAT', 'I', 'HAVE', 'LONG', 'MEDITATED'] +6070-86744-0004-2573: hyp=['THEN', 'IT', 'IS', 'SETTLED', 'SAID', 'THE', 'COUNT', 'AND', 'I', 'GIVE', 'YOU', 'MY', 'SOLEMN', 'ASSURANCE', 'THAT', 'I', 'ONLY', 'WAITED', 'IN', 'AN', 'OPPORTUNITY', 'LIKE', 'THE', 'PRESENT', 'TO', 'REALIZE', 'PLANS', 'THAT', 'I', 'HAVE', 'LONG', 'MEDITATED'] +6070-86744-0005-2574: ref=['SHALL', 'WE', 'MAKE', 'A', 'POSITIVE', 'APPOINTMENT', 'FOR', 'A', 'PARTICULAR', 'DAY', 'AND', 'HOUR', 'INQUIRED', 'THE', 'COUNT', 'ONLY', 'LET', 'ME', 'WARN', 'YOU', 'THAT', 'I', 'AM', 'PROVERBIAL', 'FOR', 'MY', 'PUNCTILIOUS', 'EXACTITUDE', 'IN', 'KEEPING', 'MY', 'ENGAGEMENTS', 'DAY', 'FOR', 'DAY', 'HOUR', 'FOR', 'HOUR', 'SAID', 'ALBERT', 'THAT', 'WILL', 'SUIT', 'ME', 'TO', 'A', 'DOT'] +6070-86744-0005-2574: hyp=['SHOW', 'WE', 'MAKE', 'A', 'POSITIVE', 'APPOINTMENT', 'FOR', 'A', 'PARTICULAR', 'DAY', 'AND', 'HOUR', 'INQUIRED', 'THE', 'COUNT', 'ONLY', 'LET', 'ME', 'WARN', 'YOU', 'THAT', 'I', 'AM', 'PROVERBIAL', 'FOR', 'MY', 'PUNCTILIOUS', 'EXACTITUDE', 'IN', 'KEEPING', 'MY', 'ENGAGEMENTS', 'DAY', 'FOR', 'DAY', 'HOUR', 'FOR', 'HOUR', 'SAID', 'ALBERT', 'THAT', 'WILL', 'SUIT', 'ME', 'TO', 'A', 'DOT'] +6070-86744-0006-2575: ref=['SO', 'BE', 'IT', 'THEN', 'REPLIED', 'THE', 'COUNT', 'AND', 'EXTENDING', 'HIS', 'HAND', 'TOWARDS', 'A', 'CALENDAR', 'SUSPENDED', 'NEAR', 'THE', 'CHIMNEY', 'PIECE', 'HE', 'SAID', 'TO', 'DAY', 'IS', 'THE', 'TWENTY', 'FIRST', 'OF', 'FEBRUARY', 'AND', 'DRAWING', 'OUT', 'HIS', 'WATCH', 'ADDED', 'IT', 'IS', 'EXACTLY', 'HALF', 'PAST', 'TEN', "O'CLOCK", 'NOW', 'PROMISE', 'ME', 'TO', 'REMEMBER', 'THIS', 'AND', 'EXPECT', 'ME', 'THE', 'TWENTY', 'FIRST', 'OF', 'MAY', 'AT', 'THE', 'SAME', 'HOUR', 'IN', 'THE', 'FORENOON'] +6070-86744-0006-2575: hyp=['SO', 'BE', 'IT', 'THEN', 'REPLIED', 'THE', 'COUNT', 'AND', 'EXTENDING', 'HIS', 'HAND', 'TOWARDS', 'THE', 'CALENDAR', 'SUSPENDED', 'NEAR', 'THE', 'CHIMNEY', 'PIECE', 'HE', 'SAID', 'TO', 'DAY', 'IS', 'THE', 'TWENTY', 'FIRST', 'OF', 'FEBRUARY', 'AND', 'DRAWING', 'OUT', 'HIS', 'WATCH', 'I', 'DID', 'IT', 'IS', 'EXACTLY', 'HALF', 'PAST', 'TEN', "O'CLOCK", 'NOW', 'PROMISE', 'ME', 'TO', 'REMEMBER', 'THIS', 'AND', 'EXPECT', 'ME', 'THAT', 'TWENTY', 'FIRST', 'OF', 'MAY', 'AT', 'THE', 'SAME', 'HOUR', 'IN', 'THE', 'FORENOON'] +6070-86744-0007-2576: ref=['I', 'RESIDE', 'IN', 'MY', "FATHER'S", 'HOUSE', 'BUT', 'OCCUPY', 'A', 'PAVILION', 'AT', 'THE', 'FARTHER', 'SIDE', 'OF', 'THE', 'COURT', 'YARD', 'ENTIRELY', 'SEPARATED', 'FROM', 'THE', 'MAIN', 'BUILDING'] +6070-86744-0007-2576: hyp=['I', 'RESIDE', 'IN', 'MY', "FATHER'S", 'HOUSE', 'BUT', 'OCCUPY', 'A', 'PAVILION', 'AT', 'THE', 'FARTHER', 'SIDE', 'OF', 'THE', 'COURTYARD', 'ENTIRELY', 'SEPARATED', 'FROM', 'THE', 'MAIN', 'BUILDING'] +6070-86744-0008-2577: ref=['NOW', 'THEN', 'SAID', 'THE', 'COUNT', 'RETURNING', 'HIS', 'TABLETS', 'TO', 'HIS', 'POCKET', 'MAKE', 'YOURSELF', 'PERFECTLY', 'EASY', 'THE', 'HAND', 'OF', 'YOUR', 'TIME', 'PIECE', 'WILL', 'NOT', 'BE', 'MORE', 'ACCURATE', 'IN', 'MARKING', 'THE', 'TIME', 'THAN', 'MYSELF'] +6070-86744-0008-2577: hyp=['NOW', 'THEN', 'SAID', 'THE', 'COUNT', 'RETURNING', 'HIS', 'TABLETS', 'TO', 'HIS', 'POCKET', 'MAKE', 'YOURSELF', 'PERFECTLY', 'EASY', 'THE', 'HAND', 'OF', 'YOUR', 'TIME', 'PIECE', 'WILL', 'NOT', 'BE', 'MORE', 'ACCURATE', 'IN', 'MARKING', 'THE', 'TIME', 'THAN', 'MYSELF'] +6070-86744-0009-2578: ref=['THAT', 'DEPENDS', 'WHEN', 'DO', 'YOU', 'LEAVE'] +6070-86744-0009-2578: hyp=['THAT', 'DEPENDS', 'WHEN', 'DO', 'YOU', 'LEAVE'] +6070-86744-0010-2579: ref=['FOR', 'FRANCE', 'NO', 'FOR', 'VENICE', 'I', 'SHALL', 'REMAIN', 'IN', 'ITALY', 'FOR', 'ANOTHER', 'YEAR', 'OR', 'TWO'] +6070-86744-0010-2579: hyp=['FOR', 'FRANCE', 'NO', 'FOR', 'VENICE', 'I', 'SHALL', 'REMAIN', 'IN', 'ITALY', 'FOR', 'ANOTHER', 'YEAR', 'OR', 'TWO'] +6070-86744-0011-2580: ref=['THEN', 'WE', 'SHALL', 'NOT', 'MEET', 'IN', 'PARIS'] +6070-86744-0011-2580: hyp=['THEN', 'WE', 'SHALL', 'NOT', 'MEET', 'IN', 'PARIS'] +6070-86744-0012-2581: ref=['I', 'FEAR', 'I', 'SHALL', 'NOT', 'HAVE', 'THAT', 'HONOR'] +6070-86744-0012-2581: hyp=['I', 'FEAR', 'I', 'SHALL', 'NOT', 'HAVE', 'THAT', 'HONOUR'] +6070-86744-0013-2582: ref=['WELL', 'SINCE', 'WE', 'MUST', 'PART', 'SAID', 'THE', 'COUNT', 'HOLDING', 'OUT', 'A', 'HAND', 'TO', 'EACH', 'OF', 'THE', 'YOUNG', 'MEN', 'ALLOW', 'ME', 'TO', 'WISH', 'YOU', 'BOTH', 'A', 'SAFE', 'AND', 'PLEASANT', 'JOURNEY'] +6070-86744-0013-2582: hyp=['WELL', 'SINCE', 'WE', 'MUST', 'PART', 'SAID', 'THE', 'COUNT', 'HOLDING', 'OUT', 'A', 'HAND', 'TO', 'EACH', 'OF', 'THE', 'YOUNG', 'MEN', 'ALLOW', 'ME', 'TO', 'WISH', 'YOU', 'BOTH', 'A', 'SAFE', 'AND', 'PLEASANT', 'JOURNEY'] +6070-86744-0014-2583: ref=['WHAT', 'IS', 'THE', 'MATTER', 'ASKED', 'ALBERT', 'OF', 'FRANZ', 'WHEN', 'THEY', 'HAD', 'RETURNED', 'TO', 'THEIR', 'OWN', 'APARTMENTS', 'YOU', 'SEEM', 'MORE', 'THAN', 'COMMONLY', 'THOUGHTFUL'] +6070-86744-0014-2583: hyp=['WHAT', 'IS', 'THE', 'MATTER', 'ASKED', 'ALBERT', 'OF', 'FRANZ', 'WHEN', 'THEY', 'HAD', 'RETURNED', 'TO', 'THEIR', 'OWN', 'APARTMENTS', 'YOU', 'SEE', 'MORE', 'THAN', 'COMMONLY', 'THOUGHTFUL'] +6070-86744-0015-2584: ref=['I', 'WILL', 'CONFESS', 'TO', 'YOU', 'ALBERT', 'REPLIED', 'FRANZ', 'THE', 'COUNT', 'IS', 'A', 'VERY', 'SINGULAR', 'PERSON', 'AND', 'THE', 'APPOINTMENT', 'YOU', 'HAVE', 'MADE', 'TO', 'MEET', 'HIM', 'IN', 'PARIS', 'FILLS', 'ME', 'WITH', 'A', 'THOUSAND', 'APPREHENSIONS'] +6070-86744-0015-2584: hyp=['I', 'WILL', 'CONFESS', 'TO', 'YOU', 'ALBERT', 'REPLIED', 'FRANZ', 'THE', 'COUNT', 'IS', 'A', 'VERY', 'SINGULAR', 'PERSON', 'AND', 'THE', 'APPOINTMENT', 'YOU', 'HAVE', 'MADE', 'TO', 'MEET', 'HIM', 'IN', 'PARIS', 'FILLS', 'ME', 'WITH', 'A', 'THOUSAND', 'APPREHENSIONS'] +6070-86744-0016-2585: ref=['DID', 'YOU', 'EVER', 'MEET', 'HIM', 'PREVIOUSLY', 'TO', 'COMING', 'HITHER'] +6070-86744-0016-2585: hyp=['DID', 'YOU', 'EVER', 'MEET', 'HIM', 'PREVIOUSLY', 'TO', 'COMING', 'HITHER'] +6070-86744-0017-2586: ref=['UPON', 'MY', 'HONOR', 'THEN', 'LISTEN', 'TO', 'ME'] +6070-86744-0017-2586: hyp=['UPON', 'MY', 'HONOUR', 'THEN', 'LISTEN', 'TO', 'ME'] +6070-86744-0018-2587: ref=['HE', 'DWELT', 'WITH', 'CONSIDERABLE', 'FORCE', 'AND', 'ENERGY', 'ON', 'THE', 'ALMOST', 'MAGICAL', 'HOSPITALITY', 'HE', 'HAD', 'RECEIVED', 'FROM', 'THE', 'COUNT', 'AND', 'THE', 'MAGNIFICENCE', 'OF', 'HIS', 'ENTERTAINMENT', 'IN', 'THE', 'GROTTO', 'OF', 'THE', 'THOUSAND', 'AND', 'ONE', 'NIGHTS', 'HE', 'RECOUNTED', 'WITH', 'CIRCUMSTANTIAL', 'EXACTITUDE', 'ALL', 'THE', 'PARTICULARS', 'OF', 'THE', 'SUPPER', 'THE', 'HASHISH', 'THE', 'STATUES', 'THE', 'DREAM', 'AND', 'HOW', 'AT', 'HIS', 'AWAKENING', 'THERE', 'REMAINED', 'NO', 'PROOF', 'OR', 'TRACE', 'OF', 'ALL', 'THESE', 'EVENTS', 'SAVE', 'THE', 'SMALL', 'YACHT', 'SEEN', 'IN', 'THE', 'DISTANT', 'HORIZON', 'DRIVING', 'UNDER', 'FULL', 'SAIL', 'TOWARD', 'PORTO', 'VECCHIO'] +6070-86744-0018-2587: hyp=['HE', 'DWELT', 'WITH', 'CONSIDERABLE', 'FORCE', 'AND', 'ENERGY', 'ON', 'THE', 'ALMOST', 'MAGICAL', 'HOSPITALITY', 'HE', 'HAD', 'RECEIVED', 'FROM', 'THE', 'COUNT', 'AND', 'THE', 'MAGNIFICENCE', 'OF', 'HIS', 'ENTERTAINMENT', 'IN', 'THE', 'GROTTO', 'OF', 'THE', 'THOUSAND', 'AND', 'ONE', 'NIGHTS', 'HE', 'RECOUNTED', 'WITH', 'CIRCUMSTANTIAL', 'EXACTITUDE', 'ALL', 'THE', 'PARTICULARS', 'OF', 'THE', 'SUPPER', 'THE', 'HASHISH', 'THE', 'STATUES', 'THE', 'DREAM', 'AND', 'HOW', 'AT', 'HIS', 'AWAKENING', 'THERE', 'REMAINED', 'NO', 'PROOF', 'OR', 'TRACE', 'OF', 'ALL', 'THESE', 'EVENTS', 'SAVE', 'THE', 'SMALL', 'YACHT', 'SEEN', 'IN', 'THE', 'DISTANT', 'HORIZON', 'DRIVING', 'UNDER', 'FULL', 'SAIL', 'TOWARD', 'PORT', 'OR', 'VECCHIO'] +6070-86744-0019-2588: ref=['THEN', 'HE', 'DETAILED', 'THE', 'CONVERSATION', 'OVERHEARD', 'BY', 'HIM', 'AT', 'THE', 'COLOSSEUM', 'BETWEEN', 'THE', 'COUNT', 'AND', 'VAMPA', 'IN', 'WHICH', 'THE', 'COUNT', 'HAD', 'PROMISED', 'TO', 'OBTAIN', 'THE', 'RELEASE', 'OF', 'THE', 'BANDIT', 'PEPPINO', 'AN', 'ENGAGEMENT', 'WHICH', 'AS', 'OUR', 'READERS', 'ARE', 'AWARE', 'HE', 'MOST', 'FAITHFULLY', 'FULFILLED'] +6070-86744-0019-2588: hyp=['THEN', 'HE', 'DETAILED', 'THE', 'CONVERSATION', 'OVERHEARD', 'BY', 'HIM', 'AT', 'THE', 'COLISEUM', 'BETWEEN', 'THE', 'COUNT', 'AND', 'VAMPA', 'IN', 'WHICH', 'THE', 'COUNT', 'HAD', 'PROMISED', 'TO', 'OBTAIN', 'THE', 'RELEASE', 'OF', 'THE', 'BANDIT', 'PEPPINO', 'AN', 'ENGAGEMENT', 'WHICH', 'AS', 'OUR', 'READERS', 'ARE', 'AWARE', 'HE', 'MOST', 'FAITHFULLY', 'FULFILLED'] +6070-86744-0020-2589: ref=['BUT', 'SAID', 'FRANZ', 'THE', 'CORSICAN', 'BANDITS', 'THAT', 'WERE', 'AMONG', 'THE', 'CREW', 'OF', 'HIS', 'VESSEL'] +6070-86744-0020-2589: hyp=['BUT', 'SAID', 'FRANZ', 'THE', 'CORSICAN', 'BANDITS', 'THAT', 'WERE', 'AMONG', 'THE', 'CREW', 'OF', 'HIS', 'VESSEL'] +6070-86744-0021-2590: ref=['WHY', 'REALLY', 'THE', 'THING', 'SEEMS', 'TO', 'ME', 'SIMPLE', 'ENOUGH'] +6070-86744-0021-2590: hyp=['WHY', 'REALLY', 'THE', 'THING', 'SEEMS', 'TO', 'ME', 'SIMPLE', 'ENOUGH'] +6070-86744-0022-2591: ref=['TALKING', 'OF', 'COUNTRIES', 'REPLIED', 'FRANZ', 'OF', 'WHAT', 'COUNTRY', 'IS', 'THE', 'COUNT', 'WHAT', 'IS', 'HIS', 'NATIVE', 'TONGUE', 'WHENCE', 'DOES', 'HE', 'DERIVE', 'HIS', 'IMMENSE', 'FORTUNE', 'AND', 'WHAT', 'WERE', 'THOSE', 'EVENTS', 'OF', 'HIS', 'EARLY', 'LIFE', 'A', 'LIFE', 'AS', 'MARVELLOUS', 'AS', 'UNKNOWN', 'THAT', 'HAVE', 'TINCTURED', 'HIS', 'SUCCEEDING', 'YEARS', 'WITH', 'SO', 'DARK', 'AND', 'GLOOMY', 'A', 'MISANTHROPY'] +6070-86744-0022-2591: hyp=['TALKING', 'OF', 'COUNTRIES', 'REPLIED', 'FRANZ', 'OF', 'WHAT', 'COUNTRIES', 'THE', 'COUNT', 'WHAT', 'IS', 'HIS', 'NATIVE', 'TONG', 'WHENCE', 'DOES', 'HE', 'DERIVE', 'HIS', 'IMMENSE', 'FORTUNE', 'AND', 'WHAT', 'WERE', 'THOSE', 'EVENTS', 'OF', 'HIS', 'EARLY', 'LIFE', 'A', 'LIFE', 'AS', 'MARVELLOUS', 'AS', 'UNKNOWN', 'THAT', 'HATH', 'TINCTURED', 'HIS', 'SUCCEEDING', 'YEARS', 'WITH', 'SO', 'DARK', 'AND', 'GLOOMY', 'A', 'MISANTHROPY'] +6070-86744-0023-2592: ref=['CERTAINLY', 'THESE', 'ARE', 'QUESTIONS', 'THAT', 'IN', 'YOUR', 'PLACE', 'I', 'SHOULD', 'LIKE', 'TO', 'HAVE', 'ANSWERED'] +6070-86744-0023-2592: hyp=['CERTAINLY', 'THESE', 'ARE', 'QUESTIONS', 'THAT', 'IN', 'YOUR', 'PLACE', 'I', 'SHOULD', 'LIKE', 'TO', 'HAVE', 'ANSWERED'] +6070-86744-0024-2593: ref=['MY', 'DEAR', 'FRANZ', 'REPLIED', 'ALBERT', 'WHEN', 'UPON', 'RECEIPT', 'OF', 'MY', 'LETTER', 'YOU', 'FOUND', 'THE', 'NECESSITY', 'OF', 'ASKING', 'THE', "COUNT'S", 'ASSISTANCE', 'YOU', 'PROMPTLY', 'WENT', 'TO', 'HIM', 'SAYING', 'MY', 'FRIEND', 'ALBERT', 'DE', 'MORCERF', 'IS', 'IN', 'DANGER', 'HELP', 'ME', 'TO', 'DELIVER', 'HIM'] +6070-86744-0024-2593: hyp=['MY', 'DEAR', 'FRIENDS', 'REPLIED', 'ALBERT', 'WHEN', 'UPON', 'RECEIPT', 'OF', 'MY', 'LETTER', 'YOU', 'FOUND', 'THE', 'NECESSITY', 'OF', 'ASKING', 'THE', "COUNT'S", 'ASSISTANCE', 'YOU', 'PROMPTLY', 'WENT', 'TO', 'HIM', 'SAYING', 'MY', 'FRIEND', 'ALBERT', 'DE', 'MORCERF', 'IS', 'IN', 'DANGER', 'HELP', 'ME', 'TO', 'DELIVER', 'HIM'] +6070-86744-0025-2594: ref=['WHAT', 'ARE', 'HIS', 'MEANS', 'OF', 'EXISTENCE', 'WHAT', 'IS', 'HIS', 'BIRTHPLACE', 'OF', 'WHAT', 'COUNTRY', 'IS', 'HE', 'A', 'NATIVE'] +6070-86744-0025-2594: hyp=['WHAT', 'ARE', 'HIS', 'MEANS', 'OF', 'EXISTENCE', 'WHAT', 'IS', 'HIS', 'BIRTHPLACE', 'OF', 'WHAT', 'COUNTRIES', 'HE', 'A', 'NATIVE'] +6070-86744-0026-2595: ref=['I', 'CONFESS', 'HE', 'ASKED', 'ME', 'NONE', 'NO', 'HE', 'MERELY', 'CAME', 'AND', 'FREED', 'ME', 'FROM', 'THE', 'HANDS', 'OF', 'SIGNOR', 'VAMPA', 'WHERE', 'I', 'CAN', 'ASSURE', 'YOU', 'IN', 'SPITE', 'OF', 'ALL', 'MY', 'OUTWARD', 'APPEARANCE', 'OF', 'EASE', 'AND', 'UNCONCERN', 'I', 'DID', 'NOT', 'VERY', 'PARTICULARLY', 'CARE', 'TO', 'REMAIN'] +6070-86744-0026-2595: hyp=['I', 'CONFESS', 'HE', 'ASKED', 'ME', 'NONE', 'NO', 'HE', 'MERELY', 'CAME', 'AND', 'FREED', 'ME', 'FROM', 'THE', 'HANDS', 'OF', 'SENOR', 'VAMPA', 'WHERE', 'I', 'CAN', 'ASSURE', 'YOU', 'IN', 'SPITE', 'OF', 'ALL', 'MY', 'OUTWARD', 'APPEARANCE', 'OF', 'EASE', 'AND', 'UNCONCERN', 'I', 'DID', 'NOT', 'VERY', 'PARTICULARLY', 'CARE', 'TO', 'REMAIN'] +6070-86744-0027-2596: ref=['AND', 'THIS', 'TIME', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'CONTRARY', 'TO', 'THE', 'USUAL', 'STATE', 'OF', 'AFFAIRS', 'IN', 'DISCUSSIONS', 'BETWEEN', 'THE', 'YOUNG', 'MEN', 'THE', 'EFFECTIVE', 'ARGUMENTS', 'WERE', 'ALL', 'ON', "ALBERT'S", 'SIDE'] +6070-86744-0027-2596: hyp=['AND', 'THIS', 'TIME', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'CONTRARY', 'TO', 'THE', 'USUAL', 'STATE', 'OF', 'AFFAIRS', 'IN', 'DISCUSSIONS', 'BETWEEN', 'THE', 'YOUNG', 'MEN', 'THE', 'EFFECTIVE', 'ARGUMENTS', 'WERE', 'ALL', 'ON', "ALBERT'S", 'SIDE'] +6070-86744-0028-2597: ref=['WELL', 'SAID', 'FRANZ', 'WITH', 'A', 'SIGH', 'DO', 'AS', 'YOU', 'PLEASE', 'MY', 'DEAR', 'VISCOUNT', 'FOR', 'YOUR', 'ARGUMENTS', 'ARE', 'BEYOND', 'MY', 'POWERS', 'OF', 'REFUTATION'] +6070-86744-0028-2597: hyp=['WELL', 'SAID', 'FRANZ', 'WITH', 'A', 'SIGH', 'DO', 'AS', 'YOU', 'PLEASE', 'MY', 'DEAR', 'VISCOUNT', 'FOR', 'YOUR', 'ARGUMENTS', 'ARE', 'BEYOND', 'MY', 'POWERS', 'OF', 'REFUTATION'] +6070-86744-0029-2598: ref=['AND', 'NOW', 'MY', 'DEAR', 'FRANZ', 'LET', 'US', 'TALK', 'OF', 'SOMETHING', 'ELSE'] +6070-86744-0029-2598: hyp=['AND', 'NOW', 'MY', 'DEAR', 'FRANZ', 'LET', 'US', 'TALK', 'OF', 'SOMETHING', 'ELSE'] +6070-86745-0000-2549: ref=['THEN', 'SHOULD', 'ANYTHING', 'APPEAR', 'TO', 'MERIT', 'A', 'MORE', 'MINUTE', 'EXAMINATION', 'ALBERT', 'DE', 'MORCERF', 'COULD', 'FOLLOW', 'UP', 'HIS', 'RESEARCHES', 'BY', 'MEANS', 'OF', 'A', 'SMALL', 'GATE', 'SIMILAR', 'TO', 'THAT', 'CLOSE', 'TO', 'THE', "CONCIERGE'S", 'DOOR', 'AND', 'WHICH', 'MERITS', 'A', 'PARTICULAR', 'DESCRIPTION'] +6070-86745-0000-2549: hyp=['THEN', 'SHOULD', 'ANYTHING', 'APPEAR', 'TO', 'MARRIT', 'A', 'MORE', 'MINUTE', 'EXAMINATION', 'ALBERT', 'DE', 'MORCERF', 'COULD', 'FOLLOW', 'UP', 'HIS', 'RESEARCHES', 'BY', 'MEANS', 'OF', 'A', 'SMALL', 'GATE', 'SIMILAR', 'TO', 'THAT', 'CLOSE', 'TO', 'THE', "CONCIERGE'S", 'DOOR', 'AND', 'WHICH', 'MERITS', 'A', 'PARTICULAR', 'DESCRIPTION'] +6070-86745-0001-2550: ref=['SHRUBS', 'AND', 'CREEPING', 'PLANTS', 'COVERED', 'THE', 'WINDOWS', 'AND', 'HID', 'FROM', 'THE', 'GARDEN', 'AND', 'COURT', 'THESE', 'TWO', 'APARTMENTS', 'THE', 'ONLY', 'ROOMS', 'INTO', 'WHICH', 'AS', 'THEY', 'WERE', 'ON', 'THE', 'GROUND', 'FLOOR', 'THE', 'PRYING', 'EYES', 'OF', 'THE', 'CURIOUS', 'COULD', 'PENETRATE'] +6070-86745-0001-2550: hyp=['SHRUBS', 'AND', 'CREEPING', 'PLANTS', 'COVERED', 'THE', 'WINDOWS', 'AND', 'HID', 'FROM', 'THE', 'GARDEN', 'AND', 'COURT', 'THESE', 'TWO', 'APARTMENTS', 'THE', 'ONLY', 'ROOMS', 'INTO', 'WHICH', 'AS', 'THEY', 'WERE', 'ON', 'THE', 'GROUND', 'FLOOR', 'THE', 'PRYING', 'EYES', 'OF', 'THE', 'CURIOUS', 'COULD', 'PENETRATE'] +6070-86745-0002-2551: ref=['AT', 'A', 'QUARTER', 'TO', 'TEN', 'A', 'VALET', 'ENTERED', 'HE', 'COMPOSED', 'WITH', 'A', 'LITTLE', 'GROOM', 'NAMED', 'JOHN', 'AND', 'WHO', 'ONLY', 'SPOKE', 'ENGLISH', 'ALL', "ALBERT'S", 'ESTABLISHMENT', 'ALTHOUGH', 'THE', 'COOK', 'OF', 'THE', 'HOTEL', 'WAS', 'ALWAYS', 'AT', 'HIS', 'SERVICE', 'AND', 'ON', 'GREAT', 'OCCASIONS', 'THE', "COUNT'S", 'CHASSEUR', 'ALSO'] +6070-86745-0002-2551: hyp=['AT', 'A', 'QUARTER', 'TO', 'TEN', 'A', 'VALET', 'ENTERED', 'HE', 'COMPOSED', 'WITH', 'A', 'LITTLE', 'GROOM', 'NAMED', 'JOHN', 'AND', 'WHO', 'ONLY', 'SPOKE', 'ENGLISH', 'ALL', "ALBERT'S", 'ESTABLISHMENT', 'ALTHOUGH', 'THE', 'COOK', 'OF', 'THE', 'HOTEL', 'WAS', 'ALWAYS', 'AT', 'HIS', 'SERVICE', 'AND', 'ON', 'GREAT', 'OCCASIONS', 'THE', "COUNT'S", 'CHASSEUR', 'ALSO'] +6070-86745-0003-2552: ref=['WAIT', 'THEN', 'DURING', 'THE', 'DAY', 'TELL', 'ROSA', 'THAT', 'WHEN', 'I', 'LEAVE', 'THE', 'OPERA', 'I', 'WILL', 'SUP', 'WITH', 'HER', 'AS', 'SHE', 'WISHES'] +6070-86745-0003-2552: hyp=['WAIT', 'THEN', 'DURING', 'THE', 'DAY', 'TELL', 'ROSA', 'THAT', 'WHEN', 'I', 'LEAVE', 'THE', 'OPERA', 'I', 'WILL', 'SUP', 'WITH', 'HER', 'AS', 'SHE', 'WISHES'] +6070-86745-0004-2553: ref=['VERY', 'WELL', 'AT', 'HALF', 'PAST', 'TEN'] +6070-86745-0004-2553: hyp=['VERY', 'WELL', 'AT', 'HALF', 'PAST', 'TEN'] +6070-86745-0005-2554: ref=['IS', 'THE', 'COUNTESS', 'UP', 'YET'] +6070-86745-0005-2554: hyp=['IS', 'THE', 'COUNTESS', 'UP', 'YET'] +6070-86745-0006-2555: ref=['THE', 'VALET', 'LEFT', 'THE', 'ROOM'] +6070-86745-0006-2555: hyp=['THE', 'VALET', 'LEFT', 'THE', 'ROOM'] +6070-86745-0007-2556: ref=['GOOD', 'MORNING', 'LUCIEN', 'GOOD', 'MORNING', 'SAID', 'ALBERT', 'YOUR', 'PUNCTUALITY', 'REALLY', 'ALARMS', 'ME'] +6070-86745-0007-2556: hyp=['GOOD', 'MORNING', 'LUCIEN', 'GOOD', 'MORNING', 'SAID', 'ALBERT', 'YOUR', 'PUNCTUALITY', 'REALLY', 'ALARMS', 'ME'] +6070-86745-0008-2557: ref=['YOU', 'WHOM', 'I', 'EXPECTED', 'LAST', 'YOU', 'ARRIVE', 'AT', 'FIVE', 'MINUTES', 'TO', 'TEN', 'WHEN', 'THE', 'TIME', 'FIXED', 'WAS', 'HALF', 'PAST'] +6070-86745-0008-2557: hyp=['YOU', 'WHOM', 'I', 'EXPECTED', 'LAST', 'YOU', 'ARRIVE', 'AT', 'FIVE', 'MINUTES', 'TO', 'TEN', 'WHEN', 'THE', 'TIME', 'FIXED', 'WAS', 'HALF', 'PAST'] +6070-86745-0009-2558: ref=['NO', 'NO', 'MY', 'DEAR', 'FELLOW', 'DO', 'NOT', 'CONFOUND', 'OUR', 'PLANS'] +6070-86745-0009-2558: hyp=['NO', 'NO', 'MY', 'DEAR', 'FELLOW', 'DO', 'NOT', 'CONFOUND', 'OUR', 'PLANS'] +6070-86745-0010-2559: ref=['YES', 'HE', 'HAS', 'NOT', 'MUCH', 'TO', 'COMPLAIN', 'OF', 'BOURGES', 'IS', 'THE', 'CAPITAL', 'OF', 'CHARLES', 'SEVEN'] +6070-86745-0010-2559: hyp=['YES', 'HE', 'HAS', 'NOT', 'MUCH', 'TO', 'COMPLAIN', 'OF', 'BOURGE', 'IS', 'THE', 'CAPITAL', 'OF', 'CHARLES', 'THE', 'SEVENTH'] +6070-86745-0011-2560: ref=['IT', 'IS', 'FOR', 'THAT', 'REASON', 'YOU', 'SEE', 'ME', 'SO', 'EARLY'] +6070-86745-0011-2560: hyp=['IT', 'IS', 'FOR', 'THAT', 'REASON', 'YOU', 'SEE', 'ME', 'SO', 'EARLY'] +6070-86745-0012-2561: ref=['I', 'RETURNED', 'HOME', 'AT', 'DAYBREAK', 'AND', 'STROVE', 'TO', 'SLEEP', 'BUT', 'MY', 'HEAD', 'ACHED', 'AND', 'I', 'GOT', 'UP', 'TO', 'HAVE', 'A', 'RIDE', 'FOR', 'AN', 'HOUR'] +6070-86745-0012-2561: hyp=['I', 'RETURNED', 'HOME', 'AT', 'DAYBREAK', 'AND', 'STROVE', 'TO', 'SLEEP', 'BUT', 'MY', 'HEAD', 'ACHED', 'AND', 'I', 'GOT', 'UP', 'TO', 'HAVE', 'A', 'RIDE', 'FOR', 'AN', 'HOUR'] +6070-86745-0013-2562: ref=['PESTE', 'I', 'WILL', 'DO', 'NOTHING', 'OF', 'THE', 'KIND', 'THE', 'MOMENT', 'THEY', 'COME', 'FROM', 'GOVERNMENT', 'YOU', 'WOULD', 'FIND', 'THEM', 'EXECRABLE'] +6070-86745-0013-2562: hyp=['PESTS', 'I', 'WILL', 'DO', 'NOTHING', 'OF', 'THE', 'KIND', 'THE', 'MOMENT', 'THEY', 'COME', 'FROM', 'GOVERNMENT', 'YOU', 'WOULD', 'FIND', 'THEM', 'EXECRABLE'] +6070-86745-0014-2563: ref=['BESIDES', 'THAT', 'DOES', 'NOT', 'CONCERN', 'THE', 'HOME', 'BUT', 'THE', 'FINANCIAL', 'DEPARTMENT'] +6070-86745-0014-2563: hyp=['BESIDES', 'THAT', 'DOES', 'NOT', 'CONCERN', 'THE', 'HOME', 'BUT', 'THE', 'FINANCIAL', 'DEPARTMENT'] +6070-86745-0015-2564: ref=['ABOUT', 'WHAT', 'ABOUT', 'THE', 'PAPERS'] +6070-86745-0015-2564: hyp=['ABOUT', 'WHAT', 'ABOUT', 'THE', 'PAPERS'] +6070-86745-0016-2565: ref=['IN', 'THE', 'ENTIRE', 'POLITICAL', 'WORLD', 'OF', 'WHICH', 'YOU', 'ARE', 'ONE', 'OF', 'THE', 'LEADERS'] +6070-86745-0016-2565: hyp=['IN', 'THE', 'ENTIRE', 'POLITICAL', 'WORLD', 'OF', 'WHICH', 'YOU', 'ARE', 'ONE', 'OF', 'THE', 'LEADERS'] +6070-86745-0017-2566: ref=['THEY', 'SAY', 'THAT', 'IT', 'IS', 'QUITE', 'FAIR', 'AND', 'THAT', 'SOWING', 'SO', 'MUCH', 'RED', 'YOU', 'OUGHT', 'TO', 'REAP', 'A', 'LITTLE', 'BLUE'] +6070-86745-0017-2566: hyp=['THEY', 'SAY', 'THAT', 'IT', 'IS', 'QUITE', 'FAIR', 'AND', 'THAT', 'SOWING', 'SO', 'MUCH', 'RED', 'YOU', 'OUGHT', 'TO', 'READ', 'A', 'LITTLE', 'BLUE'] +6070-86745-0018-2567: ref=['COME', 'COME', 'THAT', 'IS', 'NOT', 'BAD', 'SAID', 'LUCIEN'] +6070-86745-0018-2567: hyp=['COME', 'COME', 'THAT', 'IS', 'NOT', 'BAD', 'SAID', 'LUCIAN'] +6070-86745-0019-2568: ref=['WITH', 'YOUR', 'TALENTS', 'YOU', 'WOULD', 'MAKE', 'YOUR', 'FORTUNE', 'IN', 'THREE', 'OR', 'FOUR', 'YEARS'] +6070-86745-0019-2568: hyp=['WITH', 'THE', 'OR', 'TALONS', 'HE', 'WOULD', 'MAKE', 'YOUR', 'FORTUNE', 'IN', 'THREE', 'OR', 'FOUR', 'YEARS'] +6128-63240-0000-503: ref=['THE', 'GENTLEMAN', 'HAD', 'NOT', 'EVEN', 'NEEDED', 'TO', 'SIT', 'DOWN', 'TO', 'BECOME', 'INTERESTED', 'APPARENTLY', 'HE', 'HAD', 'TAKEN', 'UP', 'THE', 'VOLUME', 'FROM', 'A', 'TABLE', 'AS', 'SOON', 'AS', 'HE', 'CAME', 'IN', 'AND', 'STANDING', 'THERE', 'AFTER', 'A', 'SINGLE', 'GLANCE', 'ROUND', 'THE', 'APARTMENT', 'HAD', 'LOST', 'HIMSELF', 'IN', 'ITS', 'PAGES'] +6128-63240-0000-503: hyp=['THE', 'GENTLEMAN', 'HAD', 'NOT', 'EVEN', 'NEEDED', 'TO', 'SIT', 'DOWN', 'TO', 'BECOME', 'INTERESTED', 'APPARENTLY', 'HE', 'HAD', 'TAKEN', 'UP', 'THE', 'VOLUME', 'FROM', 'A', 'TABLE', 'AS', 'SOON', 'AS', 'HE', 'CAME', 'IN', 'AND', 'STANDING', 'THERE', 'AFTER', 'A', 'SINGLE', 'GLANCE', 'ROUND', 'THE', 'APARTMENT', 'HAD', 'LOST', 'HIMSELF', 'IN', 'ITS', 'PAGES'] +6128-63240-0001-504: ref=['THAT', 'HAS', 'AN', 'UNFLATTERING', 'SOUND', 'FOR', 'ME', 'SAID', 'THE', 'YOUNG', 'MAN'] +6128-63240-0001-504: hyp=['THAT', 'HAS', 'AN', 'UNFLATTERING', 'SOUND', 'FOR', 'ME', 'SAID', 'THE', 'YOUNG', 'MAN'] +6128-63240-0002-505: ref=['SHE', 'IS', 'WILLING', 'TO', 'RISK', 'THAT'] +6128-63240-0002-505: hyp=['SHE', 'IS', 'WILLING', 'TO', 'RISK', 'THAT'] +6128-63240-0003-506: ref=['JUST', 'AS', 'I', 'AM', 'THE', 'VISITOR', 'INQUIRED', 'PRESENTING', 'HIMSELF', 'WITH', 'RATHER', 'A', 'WORK', 'A', 'DAY', 'ASPECT'] +6128-63240-0003-506: hyp=['JUST', 'AS', 'I', 'AM', 'THE', 'VISITOR', 'INQUIRED', 'PRESENTING', 'HIMSELF', 'WITH', 'MOTHER', 'A', 'WORKADAY', 'ASPECT'] +6128-63240-0004-507: ref=['HE', 'WAS', 'TALL', 'AND', 'LEAN', 'AND', 'DRESSED', 'THROUGHOUT', 'IN', 'BLACK', 'HIS', 'SHIRT', 'COLLAR', 'WAS', 'LOW', 'AND', 'WIDE', 'AND', 'THE', 'TRIANGLE', 'OF', 'LINEN', 'A', 'LITTLE', 'CRUMPLED', 'EXHIBITED', 'BY', 'THE', 'OPENING', 'OF', 'HIS', 'WAISTCOAT', 'WAS', 'ADORNED', 'BY', 'A', 'PIN', 'CONTAINING', 'A', 'SMALL', 'RED', 'STONE'] +6128-63240-0004-507: hyp=['HE', 'WAS', 'TALL', 'AND', 'LEAN', 'AND', 'DRESSED', 'THROUGHOUT', 'IN', 'BLACK', 'HIS', 'SHIRT', 'COLLAR', 'WAS', 'LOW', 'AND', 'WIDE', 'AND', 'THE', 'TRIANGLE', 'OF', 'LINEN', 'A', 'LITTLE', 'CRAMPLED', 'EXHIBITED', 'BY', 'THE', 'OPENING', 'OF', 'HIS', 'WAISTCOAT', 'WAS', 'ADORNED', 'BY', 'A', 'PIN', 'CONTAINING', 'A', 'SMALL', 'RED', 'STONE'] +6128-63240-0005-508: ref=['IN', 'SPITE', 'OF', 'THIS', 'DECORATION', 'THE', 'YOUNG', 'MAN', 'LOOKED', 'POOR', 'AS', 'POOR', 'AS', 'A', 'YOUNG', 'MAN', 'COULD', 'LOOK', 'WHO', 'HAD', 'SUCH', 'A', 'FINE', 'HEAD', 'AND', 'SUCH', 'MAGNIFICENT', 'EYES'] +6128-63240-0005-508: hyp=['IN', 'SPITE', 'OF', 'THIS', 'DECORATION', 'THE', 'YOUNG', 'MAN', 'LOOKED', 'POOR', 'AS', 'FAR', 'AS', 'A', 'YOUNG', 'MAN', 'COULD', 'LIVE', 'WHO', 'HAD', 'SUCH', 'A', 'FINE', 'HEAD', 'AND', 'SUCH', 'MAGNIFICENT', 'EYES'] +6128-63240-0006-509: ref=['THOSE', 'OF', 'BASIL', 'RANSOM', 'WERE', 'DARK', 'DEEP', 'AND', 'GLOWING', 'HIS', 'HEAD', 'HAD', 'A', 'CHARACTER', 'OF', 'ELEVATION', 'WHICH', 'FAIRLY', 'ADDED', 'TO', 'HIS', 'STATURE', 'IT', 'WAS', 'A', 'HEAD', 'TO', 'BE', 'SEEN', 'ABOVE', 'THE', 'LEVEL', 'OF', 'A', 'CROWD', 'ON', 'SOME', 'JUDICIAL', 'BENCH', 'OR', 'POLITICAL', 'PLATFORM', 'OR', 'EVEN', 'ON', 'A', 'BRONZE', 'MEDAL'] +6128-63240-0006-509: hyp=['THOSE', 'OF', 'BAZA', 'RANSOM', 'WENT', 'DARK', 'DEEP', 'AND', 'GLOWING', 'HIS', 'HEAD', 'HAD', 'A', 'CHARACTER', 'OF', 'ELEVATION', 'WHICH', 'FAIRLY', 'ADDED', 'TO', 'HIS', 'STATUE', 'IT', 'WAS', 'A', 'HEAD', 'TO', 'BE', 'SEEN', 'ABOVE', 'THE', 'LEVEL', 'OF', 'A', 'CROWD', 'ON', 'SOME', 'JUDICIAL', 'BENCH', 'OR', 'POLITICAL', 'PLATFORM', 'OR', 'EVEN', 'ON', 'A', 'BRONZE', 'MEDAL'] +6128-63240-0007-510: ref=['THESE', 'THINGS', 'THE', 'EYES', 'ESPECIALLY', 'WITH', 'THEIR', 'SMOULDERING', 'FIRE', 'MIGHT', 'HAVE', 'INDICATED', 'THAT', 'HE', 'WAS', 'TO', 'BE', 'A', 'GREAT', 'AMERICAN', 'STATESMAN', 'OR', 'ON', 'THE', 'OTHER', 'HAND', 'THEY', 'MIGHT', 'SIMPLY', 'HAVE', 'PROVED', 'THAT', 'HE', 'CAME', 'FROM', 'CAROLINA', 'OR', 'ALABAMA'] +6128-63240-0007-510: hyp=['THESE', 'THINGS', 'THE', 'EYES', 'ESPECIALLY', 'WITH', 'THEIR', 'SMOULDERING', 'FIRE', 'MIGHT', 'HAVE', 'INDICATED', 'THAT', 'HE', 'WAS', 'TO', 'BE', 'A', 'GREAT', 'AMERICAN', 'STATESMAN', 'OR', 'ON', 'THE', 'OTHER', 'HAND', 'THEY', 'MIGHT', 'SIMPLY', 'HAVE', 'PROVED', 'THAT', 'HE', 'CAME', 'FROM', 'CAROLINA', 'OR', 'ALABAMA'] +6128-63240-0008-511: ref=['AND', 'YET', 'THE', 'READER', 'WHO', 'LIKES', 'A', 'COMPLETE', 'IMAGE', 'WHO', 'DESIRES', 'TO', 'READ', 'WITH', 'THE', 'SENSES', 'AS', 'WELL', 'AS', 'WITH', 'THE', 'REASON', 'IS', 'ENTREATED', 'NOT', 'TO', 'FORGET', 'THAT', 'HE', 'PROLONGED', 'HIS', 'CONSONANTS', 'AND', 'SWALLOWED', 'HIS', 'VOWELS', 'THAT', 'HE', 'WAS', 'GUILTY', 'OF', 'ELISIONS', 'AND', 'INTERPOLATIONS', 'WHICH', 'WERE', 'EQUALLY', 'UNEXPECTED', 'AND', 'THAT', 'HIS', 'DISCOURSE', 'WAS', 'PERVADED', 'BY', 'SOMETHING', 'SULTRY', 'AND', 'VAST', 'SOMETHING', 'ALMOST', 'AFRICAN', 'IN', 'ITS', 'RICH', 'BASKING', 'TONE', 'SOMETHING', 'THAT', 'SUGGESTED', 'THE', 'TEEMING', 'EXPANSE', 'OF', 'THE', 'COTTON', 'FIELD'] +6128-63240-0008-511: hyp=['AND', 'YET', 'THE', 'READER', 'WHO', 'LIKES', 'A', 'COMPLETE', 'IMAGE', 'WHO', 'DESIRES', 'TO', 'READ', 'WITH', 'THE', 'SENSES', 'AS', 'WELL', 'AS', 'WITH', 'THE', 'REASON', 'IS', 'ENTREATED', 'NOT', 'TO', 'FORGET', 'THAT', 'HE', 'PROLONGED', 'HIS', 'COUNTENANCE', 'AND', 'SWALLOWED', 'HIS', 'VOWALS', 'THAT', 'HE', 'WAS', 'GUILTY', 'OF', 'ELYGIANS', 'AND', 'INTERPOLATIONS', 'WHICH', 'WERE', 'EQUALLY', 'UNEXPECTED', 'AND', 'THAT', 'HIS', 'DISCOURSE', 'WAS', 'PERVADED', 'BY', 'SOMETHING', 'SULTRY', 'AND', 'VAST', 'SOMETHING', 'ALMOST', 'AFRICAN', 'IN', 'ITS', 'RICH', 'BASKING', 'TONE', 'SOMETHING', 'THAT', 'SUGGESTED', 'THE', 'TEEMING', 'EXPOUNDS', 'OF', 'THE', 'COTTON', 'FIELD'] +6128-63240-0009-512: ref=['AND', 'HE', 'TOOK', 'UP', 'HIS', 'HAT', 'VAGUELY', 'A', 'SOFT', 'BLACK', 'HAT', 'WITH', 'A', 'LOW', 'CROWN', 'AND', 'AN', 'IMMENSE', 'STRAIGHT', 'BRIM'] +6128-63240-0009-512: hyp=['AND', 'HE', 'TOOK', 'UP', 'HIS', 'HAT', 'VAGUELY', 'A', 'SOFT', 'BLACK', 'HAT', 'WITH', 'A', 'LOW', 'CROWN', 'AND', 'AN', 'IMMENSE', 'STRAIGHT', 'BRIM'] +6128-63240-0010-513: ref=['WELL', 'SO', 'IT', 'IS', 'THEY', 'ARE', 'ALL', 'WITCHES', 'AND', 'WIZARDS', 'MEDIUMS', 'AND', 'SPIRIT', 'RAPPERS', 'AND', 'ROARING', 'RADICALS'] +6128-63240-0010-513: hyp=['WELL', 'SO', 'IT', 'IS', 'THEY', 'ARE', 'ALL', 'WITCHES', 'AND', 'WIZARDS', 'MEDIUMS', 'AND', 'SPIRIT', 'WRAPPERS', 'AND', 'ROWING', 'RADICALS'] +6128-63240-0011-514: ref=['IF', 'YOU', 'ARE', 'GOING', 'TO', 'DINE', 'WITH', 'HER', 'YOU', 'HAD', 'BETTER', 'KNOW', 'IT', 'OH', 'MURDER'] +6128-63240-0011-514: hyp=['IF', 'YOU', 'ARE', 'GOING', 'TO', 'DINE', 'WITH', 'HER', 'YOU', 'HAD', 'BETTER', 'KNOW', 'IT', 'OH', 'MURDER'] +6128-63240-0012-515: ref=['HE', 'LOOKED', 'AT', 'MISSUS', 'LUNA', 'WITH', 'INTELLIGENT', 'INCREDULITY'] +6128-63240-0012-515: hyp=['HE', 'LIFTED', 'MISSUS', 'LUNA', 'WITH', 'INTELLIGENT', 'INCREDULITY'] +6128-63240-0013-516: ref=['SHE', 'WAS', 'ATTRACTIVE', 'AND', 'IMPERTINENT', 'ESPECIALLY', 'THE', 'LATTER'] +6128-63240-0013-516: hyp=['SHE', 'WAS', 'ATTRACTIVE', 'AND', 'IMPERTINENT', 'ESPECIALLY', 'THE', 'LATTER'] +6128-63240-0014-517: ref=['HAVE', 'YOU', 'BEEN', 'IN', 'EUROPE'] +6128-63240-0014-517: hyp=['HAVE', 'YOU', 'BEEN', 'IN', 'EUROPE'] +6128-63240-0015-518: ref=['NO', 'I', "HAVEN'T", 'BEEN', 'ANYWHERE'] +6128-63240-0015-518: hyp=['NO', 'I', "HAVEN'T", 'BEEN', 'ANYWHERE'] +6128-63240-0016-519: ref=['SHE', 'HATES', 'IT', 'SHE', 'WOULD', 'LIKE', 'TO', 'ABOLISH', 'IT'] +6128-63240-0016-519: hyp=['SHE', 'HATES', 'IT', 'SHE', 'WOULD', 'LIKE', 'TO', 'ABOLISH', 'IT'] +6128-63240-0017-520: ref=['THIS', 'LAST', 'REMARK', 'HE', 'MADE', 'AT', 'A', 'VENTURE', 'FOR', 'HE', 'HAD', 'NATURALLY', 'NOT', 'DEVOTED', 'ANY', 'SUPPOSITION', 'WHATEVER', 'TO', 'MISSUS', 'LUNA'] +6128-63240-0017-520: hyp=['THIS', 'LAST', 'REMARK', 'HE', 'MADE', 'A', 'VENTURE', 'FOR', 'HE', 'HAD', 'NATURALLY', 'NOT', 'DEVOTED', 'ANY', 'SUPPOSITION', 'WHATEVER', 'TO', 'MISSUS', 'LENA'] +6128-63240-0018-521: ref=['ARE', 'YOU', 'VERY', 'AMBITIOUS', 'YOU', 'LOOK', 'AS', 'IF', 'YOU', 'WERE'] +6128-63240-0018-521: hyp=['ARE', 'YOU', 'VERY', 'AMBITIOUS', 'YOU', 'LOOK', 'AS', 'IF', 'YOU', 'WERE'] +6128-63240-0019-522: ref=['AND', 'MISSUS', 'LUNA', 'ADDED', 'THAT', 'NOW', 'SHE', 'WAS', 'BACK', 'SHE', "DIDN'T", 'KNOW', 'WHAT', 'SHE', 'SHOULD', 'DO'] +6128-63240-0019-522: hyp=['AND', 'MISSUS', 'LENA', 'ADDED', 'THAT', 'NOW', 'SHE', 'WAS', 'BACK', 'SHE', "DIDN'T", 'KNOW', 'WHAT', 'SHE', 'SHOULD', 'DO'] +6128-63240-0020-523: ref=['ONE', "DIDN'T", 'EVEN', 'KNOW', 'WHAT', 'ONE', 'HAD', 'COME', 'BACK', 'FOR'] +6128-63240-0020-523: hyp=['ONE', "DIDN'T", 'EVEN', 'KNOW', 'WHAT', 'ONE', 'HAD', 'COME', 'BACK', 'FOR'] +6128-63240-0021-524: ref=['BESIDES', 'OLIVE', "DIDN'T", 'WANT', 'HER', 'IN', 'BOSTON', 'AND', "DIDN'T", 'GO', 'THROUGH', 'THE', 'FORM', 'OF', 'SAYING', 'SO'] +6128-63240-0021-524: hyp=['BESIDES', 'OLIVE', "DIDN'T", 'WANT', 'HER', 'IN', 'BUSTON', 'AND', "DIDN'T", 'GO', 'THROUGH', 'THE', 'FORM', 'OF', 'SAYING', 'SO'] +6128-63240-0022-525: ref=['THAT', 'WAS', 'ONE', 'COMFORT', 'WITH', 'OLIVE', 'SHE', 'NEVER', 'WENT', 'THROUGH', 'ANY', 'FORMS'] +6128-63240-0022-525: hyp=['THAT', 'WAS', 'ONE', 'COMFORT', 'WITH', 'OLIVE', 'SHE', 'NEVER', 'WON', 'THROUGH', 'ANY', 'FORMS'] +6128-63240-0023-526: ref=['SHE', 'STOOD', 'THERE', 'LOOKING', 'CONSCIOUSLY', 'AND', 'RATHER', 'SERIOUSLY', 'AT', 'MISTER', 'RANSOM', 'A', 'SMILE', 'OF', 'EXCEEDING', 'FAINTNESS', 'PLAYED', 'ABOUT', 'HER', 'LIPS', 'IT', 'WAS', 'JUST', 'PERCEPTIBLE', 'ENOUGH', 'TO', 'LIGHT', 'UP', 'THE', 'NATIVE', 'GRAVITY', 'OF', 'HER', 'FACE'] +6128-63240-0023-526: hyp=['SHE', 'STOOD', 'THERE', 'LOOKING', 'CONSCIOUSLY', 'AND', 'RATHER', 'SERIOUSLY', 'AND', 'MISTER', 'RANSOM', 'A', 'SMILE', 'OF', 'EXCEEDING', 'FAINTNESS', 'PLAYED', 'ABOUT', 'HER', 'LIPS', 'IT', 'WAS', 'JUST', 'PERCEPTIBLE', 'ENOUGH', 'TO', 'LIGHT', 'UP', 'THE', 'NATIVE', 'GRAVITY', 'OF', 'HER', 'FACE'] +6128-63240-0024-527: ref=['HER', 'VOICE', 'WAS', 'LOW', 'AND', 'AGREEABLE', 'A', 'CULTIVATED', 'VOICE', 'AND', 'SHE', 'EXTENDED', 'A', 'SLENDER', 'WHITE', 'HAND', 'TO', 'HER', 'VISITOR', 'WHO', 'REMARKED', 'WITH', 'SOME', 'SOLEMNITY', 'HE', 'FELT', 'A', 'CERTAIN', 'GUILT', 'OF', 'PARTICIPATION', 'IN', 'MISSUS', "LUNA'S", 'INDISCRETION', 'THAT', 'HE', 'WAS', 'INTENSELY', 'HAPPY', 'TO', 'MAKE', 'HER', 'ACQUAINTANCE'] +6128-63240-0024-527: hyp=['HER', 'VOICE', 'WAS', 'LOW', 'AND', 'AGREEABLE', 'A', 'CULTIVATED', 'VOICE', 'AND', 'SHE', 'EXTENDED', 'A', 'SLENDER', 'WHITE', 'HAND', 'TO', 'HER', 'VISITOR', 'HER', 'REMARKED', 'WITH', 'SOME', 'SOLEMNITY', 'HE', 'FELT', 'A', 'CERTAIN', 'GUILT', 'OF', 'PARTICIPATION', 'IN', 'MISSUS', "LUNAR'S", 'INDISCRETION', 'THAT', 'HE', 'WAS', 'INTENSELY', 'HAPPY', 'TO', 'MAKE', 'HER', 'ACQUAINTANCE'] +6128-63240-0025-528: ref=['HE', 'OBSERVED', 'THAT', 'MISS', "CHANCELLOR'S", 'HAND', 'WAS', 'AT', 'ONCE', 'COLD', 'AND', 'LIMP', 'SHE', 'MERELY', 'PLACED', 'IT', 'IN', 'HIS', 'WITHOUT', 'EXERTING', 'THE', 'SMALLEST', 'PRESSURE'] +6128-63240-0025-528: hyp=['HE', 'OBSERVED', 'THAT', 'MISS', "CHANCELLOR'S", 'HAND', 'WAS', 'AT', 'ONCE', 'CALLED', 'AND', 'LIMP', 'SHE', 'MERELY', 'PLACED', 'IT', 'IN', 'HIS', 'WITHOUT', 'EXERTING', 'THE', 'SMALLEST', 'PRESSURE'] +6128-63240-0026-529: ref=['I', 'SHALL', 'BE', 'BACK', 'VERY', 'LATE', 'WE', 'ARE', 'GOING', 'TO', 'A', 'THEATRE', 'PARTY', "THAT'S", 'WHY', 'WE', 'DINE', 'SO', 'EARLY'] +6128-63240-0026-529: hyp=['I', 'SHALL', 'BE', 'BACK', 'VERY', 'LATE', 'WILL', "DON'T", 'YOU', 'THEATER', 'PARTY', "THAT'S", 'WHY', 'WE', 'DINE', 'SO', 'EARLY'] +6128-63240-0027-530: ref=['MISSUS', "LUNA'S", 'FAMILIARITY', 'EXTENDED', 'EVEN', 'TO', 'HER', 'SISTER', 'SHE', 'REMARKED', 'TO', 'MISS', 'CHANCELLOR', 'THAT', 'SHE', 'LOOKED', 'AS', 'IF', 'SHE', 'WERE', 'GOT', 'UP', 'FOR', 'A', 'SEA', 'VOYAGE'] +6128-63240-0027-530: hyp=['MISSUS', "LUNDY'S", 'FAMILIARITY', 'EXTENDED', 'EVEN', 'TO', 'HER', 'SISTER', 'SHE', 'REMARKED', 'TO', 'MISS', 'CHANCELLOR', 'THAT', 'SHE', 'LOOKED', 'AS', 'IF', 'SHE', 'WERE', 'GOT', 'UP', 'FOR', 'A', 'SEA', 'VOY', 'EACH'] +6128-63241-0000-557: ref=['POOR', 'RANSOM', 'ANNOUNCED', 'THIS', 'FACT', 'TO', 'HIMSELF', 'AS', 'IF', 'HE', 'HAD', 'MADE', 'A', 'GREAT', 'DISCOVERY', 'BUT', 'IN', 'REALITY', 'HE', 'HAD', 'NEVER', 'BEEN', 'SO', 'BOEOTIAN', 'AS', 'AT', 'THAT', 'MOMENT'] +6128-63241-0000-557: hyp=['POOR', 'RANSOM', 'ANNOUNCED', 'THIS', 'FACT', 'TO', 'HIMSELF', 'AS', 'IF', 'HE', 'HAD', 'MADE', 'A', 'GREAT', 'DISCOVERY', 'BUT', 'IN', 'REALITY', 'HE', 'HAD', 'NEVER', 'BEEN', 'SO', 'BE', 'OTIAN', 'AS', 'AT', 'THAT', 'MOMENT'] +6128-63241-0001-558: ref=['THE', 'WOMEN', 'HE', 'HAD', 'HITHERTO', 'KNOWN', 'HAD', 'BEEN', 'MAINLY', 'OF', 'HIS', 'OWN', 'SOFT', 'CLIME', 'AND', 'IT', 'WAS', 'NOT', 'OFTEN', 'THEY', 'EXHIBITED', 'THE', 'TENDENCY', 'HE', 'DETECTED', 'AND', 'CURSORILY', 'DEPLORED', 'IN', 'MISSUS', "LUNA'S", 'SISTER'] +6128-63241-0001-558: hyp=['THE', 'WOMEN', 'HE', 'HAD', 'HITHERTO', 'KNOWN', 'HAD', 'BEEN', 'MAINLY', 'OF', 'HIS', 'OWN', 'SOFT', 'CLIMB', 'AND', 'IT', 'WAS', 'NOT', 'OFTEN', 'THEY', 'EXHIBITED', 'THE', 'TENDENCY', 'HE', 'DETECTED', 'AND', 'CURSORY', 'DEPLORED', 'IN', 'MISSUS', "LUNA'S", 'SISTER'] +6128-63241-0002-559: ref=['RANSOM', 'WAS', 'PLEASED', 'WITH', 'THE', 'VISION', 'OF', 'THAT', 'REMEDY', 'IT', 'MUST', 'BE', 'REPEATED', 'THAT', 'HE', 'WAS', 'VERY', 'PROVINCIAL'] +6128-63241-0002-559: hyp=['RANSOM', 'WAS', 'PLEASED', 'WITH', 'THE', 'VISION', 'OF', 'THAT', 'REMEDY', 'IT', 'MUST', 'BE', 'REPEATED', 'THAT', 'HE', 'WAS', 'VERY', 'PROVINCIAL'] +6128-63241-0003-560: ref=['HE', 'WAS', 'SORRY', 'FOR', 'HER', 'BUT', 'HE', 'SAW', 'IN', 'A', 'FLASH', 'THAT', 'NO', 'ONE', 'COULD', 'HELP', 'HER', 'THAT', 'WAS', 'WHAT', 'MADE', 'HER', 'TRAGIC'] +6128-63241-0003-560: hyp=['HE', 'WAS', 'SORRY', 'FOR', 'HER', 'BUT', 'HE', 'SAW', 'IN', 'A', 'FLASH', 'THAT', 'NO', 'ONE', 'COULD', 'HELP', 'HER', 'THAT', 'WAS', 'WHAT', 'MADE', 'HER', 'TRAGIC'] +6128-63241-0004-561: ref=['SHE', 'COULD', 'NOT', 'DEFEND', 'HERSELF', 'AGAINST', 'A', 'RICH', 'ADMIRATION', 'A', 'KIND', 'OF', 'TENDERNESS', 'OF', 'ENVY', 'OF', 'ANY', 'ONE', 'WHO', 'HAD', 'BEEN', 'SO', 'HAPPY', 'AS', 'TO', 'HAVE', 'THAT', 'OPPORTUNITY'] +6128-63241-0004-561: hyp=['SHE', 'COULD', 'NOT', 'DEFEND', 'HERSELF', 'AGAINST', 'A', 'RICH', 'ADMIRATION', 'A', 'KIND', 'OF', 'TENDERNESS', 'OF', 'ENVY', 'OF', 'ANY', 'ONE', 'WHO', 'HAD', 'BEEN', 'SO', 'HAPPY', 'AS', 'TO', 'HAVE', 'THAT', 'OPPORTUNITY'] +6128-63241-0005-562: ref=['HIS', 'FAMILY', 'WAS', 'RUINED', 'THEY', 'HAD', 'LOST', 'THEIR', 'SLAVES', 'THEIR', 'PROPERTY', 'THEIR', 'FRIENDS', 'AND', 'RELATIONS', 'THEIR', 'HOME', 'HAD', 'TASTED', 'OF', 'ALL', 'THE', 'CRUELTY', 'OF', 'DEFEAT'] +6128-63241-0005-562: hyp=['HIS', 'FAMILY', 'WAS', 'RUINED', 'THEY', 'HAD', 'LOST', 'THEIR', 'SLAVES', 'THEIR', 'PROPERTY', 'THEIR', 'FRIENDS', 'AND', 'RELATIONS', 'THEIR', 'HOME', 'HAD', 'TASTED', 'OF', 'ALL', 'THE', 'CRUELTY', 'OF', 'DEFEAT'] +6128-63241-0006-563: ref=['THE', 'STATE', 'OF', 'MISSISSIPPI', 'SEEMED', 'TO', 'HIM', 'THE', 'STATE', 'OF', 'DESPAIR', 'SO', 'HE', 'SURRENDERED', 'THE', 'REMNANTS', 'OF', 'HIS', 'PATRIMONY', 'TO', 'HIS', 'MOTHER', 'AND', 'SISTERS', 'AND', 'AT', 'NEARLY', 'THIRTY', 'YEARS', 'OF', 'AGE', 'ALIGHTED', 'FOR', 'THE', 'FIRST', 'TIME', 'IN', 'NEW', 'YORK', 'IN', 'THE', 'COSTUME', 'OF', 'HIS', 'PROVINCE', 'WITH', 'FIFTY', 'DOLLARS', 'IN', 'HIS', 'POCKET', 'AND', 'A', 'GNAWING', 'HUNGER', 'IN', 'HIS', 'HEART'] +6128-63241-0006-563: hyp=['THE', 'STATE', 'OF', 'MISSISSIPPI', 'SEEMED', 'TO', 'HIM', 'THE', 'STATE', 'OF', 'DESPAIR', 'SO', 'HIS', 'SURRENDERED', 'THE', 'REMNANTS', 'OF', 'HIS', 'PATRIMONY', 'TO', 'HIS', 'MOTHER', 'AND', 'SISTERS', 'AND', 'AT', 'NEARLY', 'THIRTY', 'YEARS', 'OF', 'AGE', 'ALIGHTED', 'FOR', 'THE', 'FIRST', 'TIME', 'IN', 'NEW', 'YORK', 'IN', 'THE', 'COSTUME', 'OF', 'HIS', 'PROVINCE', 'WITH', 'FIFTY', 'DOLLARS', 'IN', 'HIS', 'POCKET', 'AND', 'ENNARING', 'HUNGER', 'IN', 'HIS', 'HEART'] +6128-63241-0007-564: ref=['IT', 'WAS', 'IN', 'THE', 'FEMALE', 'LINE', 'AS', 'BASIL', 'RANSOM', 'HAD', 'WRITTEN', 'IN', 'ANSWERING', 'HER', 'LETTER', 'WITH', 'A', 'GOOD', 'DEAL', 'OF', 'FORM', 'AND', 'FLOURISH', 'HE', 'SPOKE', 'AS', 'IF', 'THEY', 'HAD', 'BEEN', 'ROYAL', 'HOUSES'] +6128-63241-0007-564: hyp=['IT', 'WAS', 'IN', 'THE', 'FEMALE', 'LINE', 'AS', 'BALES', 'AT', 'RANSOM', 'HAD', 'WRITTEN', 'IN', 'ANSWERING', 'HER', 'LETTER', 'WITH', 'A', 'GOOD', 'DEAL', 'OF', 'FORM', 'AND', 'FLOURISH', 'HE', 'SPOKE', 'AS', 'IF', 'THEY', 'HAD', 'BEEN', 'ROYAL', 'HOUSES'] +6128-63241-0008-565: ref=['IF', 'IT', 'HAD', 'BEEN', 'POSSIBLE', 'TO', 'SEND', 'MISSUS', 'RANSOM', 'MONEY', 'OR', 'EVEN', 'CLOTHES', 'SHE', 'WOULD', 'HAVE', 'LIKED', 'THAT', 'BUT', 'SHE', 'HAD', 'NO', 'MEANS', 'OF', 'ASCERTAINING', 'HOW', 'SUCH', 'AN', 'OFFERING', 'WOULD', 'BE', 'TAKEN'] +6128-63241-0008-565: hyp=['IF', 'IT', 'HAD', 'BEEN', 'POSSIBLE', 'TO', 'SEND', 'MISSUS', 'RANDOM', 'MONEY', 'OR', 'EVEN', 'CLOTHES', 'SHE', 'WOULD', 'HAVE', 'LIKED', 'THAT', 'BUT', 'SHE', 'HAD', 'NO', 'MEANS', 'OF', 'ASCERTAINING', 'HER', 'SUCH', 'AN', 'OFFERING', 'WOULD', 'BE', 'TAKEN'] +6128-63241-0009-566: ref=['OLIVE', 'HAD', 'A', 'FEAR', 'OF', 'EVERYTHING', 'BUT', 'HER', 'GREATEST', 'FEAR', 'WAS', 'OF', 'BEING', 'AFRAID'] +6128-63241-0009-566: hyp=['OLIV', 'HAD', 'A', 'FEAR', 'OF', 'EVERYTHING', 'BUT', 'HER', 'GREATEST', 'FEAR', 'WAS', 'OF', 'BEING', 'AFRAID'] +6128-63241-0010-567: ref=['SHE', 'HAD', 'ERECTED', 'IT', 'INTO', 'A', 'SORT', 'OF', 'RULE', 'OF', 'CONDUCT', 'THAT', 'WHENEVER', 'SHE', 'SAW', 'A', 'RISK', 'SHE', 'WAS', 'TO', 'TAKE', 'IT', 'AND', 'SHE', 'HAD', 'FREQUENT', 'HUMILIATIONS', 'AT', 'FINDING', 'HERSELF', 'SAFE', 'AFTER', 'ALL'] +6128-63241-0010-567: hyp=['SHE', 'HAD', 'ERECTED', 'IT', 'INTO', 'A', 'SORT', 'OF', 'RULE', 'OF', 'CONDUCT', 'THAT', 'WHENEVER', 'SHE', 'SAW', 'A', 'RISK', 'SHE', 'WAS', 'TO', 'TAKE', 'IT', 'AND', 'SHE', 'HAD', 'FREQUENT', 'HUMILIATIONS', 'AT', 'FINDING', 'HERSELF', 'SAFE', 'AFTER', 'ALL'] +6128-63241-0011-568: ref=['SHE', 'WAS', 'PERFECTLY', 'SAFE', 'AFTER', 'WRITING', 'TO', 'BASIL', 'RANSOM', 'AND', 'INDEED', 'IT', 'WAS', 'DIFFICULT', 'TO', 'SEE', 'WHAT', 'HE', 'COULD', 'HAVE', 'DONE', 'TO', 'HER', 'EXCEPT', 'THANK', 'HER', 'HE', 'WAS', 'ONLY', 'EXCEPTIONALLY', 'SUPERLATIVE', 'FOR', 'HER', 'LETTER', 'AND', 'ASSURE', 'HER', 'THAT', 'HE', 'WOULD', 'COME', 'AND', 'SEE', 'HER', 'THE', 'FIRST', 'TIME', 'HIS', 'BUSINESS', 'HE', 'WAS', 'BEGINNING', 'TO', 'GET', 'A', 'LITTLE', 'SHOULD', 'TAKE', 'HIM', 'TO', 'BOSTON'] +6128-63241-0011-568: hyp=['SHE', 'WAS', 'PERFECTLY', 'SAFE', 'AFTER', 'WRITING', 'TO', 'BASE', 'OR', 'RANSOM', 'AND', 'INDEED', 'IT', 'WAS', 'DIFFICULT', 'TO', 'SEE', 'WHAT', 'HE', 'COULD', 'HAVE', 'DONE', 'TO', 'HER', 'EXCEPT', 'THANK', 'HER', 'HE', 'WAS', 'ONLY', 'EXCEPTIONALLY', 'SUPERLATIVE', 'FOR', 'HER', 'LETTER', 'AND', 'ASSURE', 'HER', 'THAT', 'HE', 'WOULD', 'COME', 'AND', 'SEE', 'HER', 'THE', 'FIRST', 'TIME', 'HIS', 'BUSINESS', 'HE', 'WAS', 'BEGINNING', 'TO', 'GET', 'A', 'LITTLE', 'SHOULD', 'TAKE', 'HIM', 'TO', 'BOSTON'] +6128-63241-0012-569: ref=['HE', 'WAS', 'TOO', 'SIMPLE', 'TOO', 'MISSISSIPPIAN', 'FOR', 'THAT', 'SHE', 'WAS', 'ALMOST', 'DISAPPOINTED'] +6128-63241-0012-569: hyp=['HE', 'WAS', 'TOO', 'SIMPLE', 'TOO', 'MISSISSIPPIAN', 'FOR', 'THAT', 'SHE', 'WAS', 'ALMOST', 'DISAPPOINTED'] +6128-63241-0013-570: ref=['OF', 'ALL', 'THINGS', 'IN', 'THE', 'WORLD', 'CONTENTION', 'WAS', 'MOST', 'SWEET', 'TO', 'HER', 'THOUGH', 'WHY', 'IT', 'IS', 'HARD', 'TO', 'IMAGINE', 'FOR', 'IT', 'ALWAYS', 'COST', 'HER', 'TEARS', 'HEADACHES', 'A', 'DAY', 'OR', 'TWO', 'IN', 'BED', 'ACUTE', 'EMOTION', 'AND', 'IT', 'WAS', 'VERY', 'POSSIBLE', 'BASIL', 'RANSOM', 'WOULD', 'NOT', 'CARE', 'TO', 'CONTEND'] +6128-63241-0013-570: hyp=['OF', 'ALL', 'THINGS', 'IN', 'THE', 'WORLD', 'CONTENTION', 'WAS', 'MOST', 'SWEET', 'TO', 'HER', 'THOUGH', 'WHY', 'IT', 'IS', 'HARD', 'TO', 'IMAGINE', 'FOR', 'IT', 'ALWAYS', 'COST', 'HER', 'TEARS', 'HEADACHES', 'A', 'DAY', 'OR', 'TWO', 'IN', 'BED', 'ACUTE', 'EMOTION', 'AND', 'IT', 'WAS', 'VERY', 'POSSIBLE', 'BEESER', 'RANSOM', 'WOULD', 'NOT', 'CARE', 'TO', 'COMPEND'] +6128-63244-0000-531: ref=['MISS', 'CHANCELLOR', 'HERSELF', 'HAD', 'THOUGHT', 'SO', 'MUCH', 'ON', 'THE', 'VITAL', 'SUBJECT', 'WOULD', 'NOT', 'SHE', 'MAKE', 'A', 'FEW', 'REMARKS', 'AND', 'GIVE', 'THEM', 'SOME', 'OF', 'HER', 'EXPERIENCES'] +6128-63244-0000-531: hyp=['MISS', 'CHANCELLOR', 'HERSELF', 'HAD', 'THOUGHT', 'SO', 'MUCH', 'ON', 'THE', 'VITAL', 'SUBJECT', 'WOULD', 'NOT', 'SHE', 'MAKE', 'A', 'FEW', 'REMARKS', 'AND', 'GIVE', 'THEM', 'SOME', 'OF', 'HER', 'EXPERIENCES'] +6128-63244-0001-532: ref=['HOW', 'DID', 'THE', 'LADIES', 'ON', 'BEACON', 'STREET', 'FEEL', 'ABOUT', 'THE', 'BALLOT'] +6128-63244-0001-532: hyp=['HOW', 'DID', 'THE', 'LADIES', 'AND', 'BEACON', 'STREET', 'FEEL', 'ABOUT', 'THE', 'BALLOT'] +6128-63244-0002-533: ref=['PERHAPS', 'SHE', 'COULD', 'SPEAK', 'FOR', 'THEM', 'MORE', 'THAN', 'FOR', 'SOME', 'OTHERS'] +6128-63244-0002-533: hyp=['PERHAPS', 'SHE', 'COULD', 'SPEAK', 'FOR', 'THEM', 'MORE', 'THAN', 'FOR', 'SOME', 'OTHERS'] +6128-63244-0003-534: ref=['WITH', 'HER', 'IMMENSE', 'SYMPATHY', 'FOR', 'REFORM', 'SHE', 'FOUND', 'HERSELF', 'SO', 'OFTEN', 'WISHING', 'THAT', 'REFORMERS', 'WERE', 'A', 'LITTLE', 'DIFFERENT'] +6128-63244-0003-534: hyp=['WITH', 'HER', 'MENST', 'SYMPATHY', 'FOR', 'REFORM', 'SHE', 'FOUND', 'HERSELF', 'SO', 'OFTEN', 'WISHING', 'THAT', 'REFUSE', 'WERE', 'A', 'LITTLE', 'DIFFERENT'] +6128-63244-0004-535: ref=['OLIVE', 'HATED', 'TO', 'HEAR', 'THAT', 'FINE', 'AVENUE', 'TALKED', 'ABOUT', 'AS', 'IF', 'IT', 'WERE', 'SUCH', 'A', 'REMARKABLE', 'PLACE', 'AND', 'TO', 'LIVE', 'THERE', 'WERE', 'A', 'PROOF', 'OF', 'WORLDLY', 'GLORY'] +6128-63244-0004-535: hyp=['I', 'DID', 'HATED', 'TO', 'HEAR', 'THAT', 'FINE', 'AVENUE', 'TALKS', 'ABOUT', 'AS', 'IF', 'IT', 'WAS', 'SUCH', 'A', 'REMARKABLE', 'PLACE', 'AND', 'TO', 'LIVE', 'THERE', 'WHERE', 'A', 'PROOF', 'OF', 'WORLDLY', 'GLORY'] +6128-63244-0005-536: ref=['ALL', 'SORTS', 'OF', 'INFERIOR', 'PEOPLE', 'LIVED', 'THERE', 'AND', 'SO', 'BRILLIANT', 'A', 'WOMAN', 'AS', 'MISSUS', 'FARRINDER', 'WHO', 'LIVED', 'AT', 'ROXBURY', 'OUGHT', 'NOT', 'TO', 'MIX', 'THINGS', 'UP'] +6128-63244-0005-536: hyp=['ALL', 'SORTS', 'OF', 'INFERIOR', 'PEOPLE', 'IF', 'THERE', 'AND', 'SO', 'BRILLIANT', 'A', 'WOMAN', 'AS', 'MISSUS', 'FALLINGER', 'WHO', 'LIVED', 'AT', 'BRAXBURY', 'OUGHT', 'NOT', 'TO', 'MAKE', 'SPENCE', 'UP'] +6128-63244-0006-537: ref=['SHE', 'KNEW', 'HER', 'PLACE', 'IN', 'THE', 'BOSTON', 'HIERARCHY', 'AND', 'IT', 'WAS', 'NOT', 'WHAT', 'MISSUS', 'FARRINDER', 'SUPPOSED', 'SO', 'THAT', 'THERE', 'WAS', 'A', 'WANT', 'OF', 'PERSPECTIVE', 'IN', 'TALKING', 'TO', 'HER', 'AS', 'IF', 'SHE', 'HAD', 'BEEN', 'A', 'REPRESENTATIVE', 'OF', 'THE', 'ARISTOCRACY'] +6128-63244-0006-537: hyp=['SHE', 'KNEW', 'HER', 'PLACE', 'IN', 'THE', 'BOSTON', 'HIRAKEE', 'AND', 'IT', 'WAS', 'NOT', 'WHAT', 'MISSUS', 'FAIRRING', 'JUST', 'SUPPOSED', 'SO', 'THAT', 'THERE', 'WAS', 'A', 'WANT', 'OF', 'PERSPECTIVE', 'IN', 'TALKING', 'TO', 'HER', 'AS', 'IF', 'SHE', 'HAD', 'BEEN', 'UNREPRESENTATIVE', 'OF', 'THE', 'ARISTOCRACY'] +6128-63244-0007-538: ref=['SHE', 'WISHED', 'TO', 'WORK', 'IN', 'ANOTHER', 'FIELD', 'SHE', 'HAD', 'LONG', 'BEEN', 'PREOCCUPIED', 'WITH', 'THE', 'ROMANCE', 'OF', 'THE', 'PEOPLE'] +6128-63244-0007-538: hyp=['SHE', 'WISHED', 'TO', 'WORK', 'IN', 'ANOTHER', 'FIELD', 'SHE', 'HAD', 'LONG', 'BEEN', 'PREOCCUPIED', 'WITH', 'THE', 'ROMANCE', 'OF', 'THE', 'PEOPLE'] +6128-63244-0008-539: ref=['THIS', 'MIGHT', 'SEEM', 'ONE', 'OF', 'THE', 'MOST', 'ACCESSIBLE', 'OF', 'PLEASURES', 'BUT', 'IN', 'POINT', 'OF', 'FACT', 'SHE', 'HAD', 'NOT', 'FOUND', 'IT', 'SO'] +6128-63244-0008-539: hyp=['THIS', 'MIGHT', 'SEEM', 'ONE', 'OF', 'THE', 'MOST', 'ACCESSIBLE', 'OF', 'PLEASURES', 'BUT', 'IN', 'POINT', 'OF', 'FACT', 'SHE', 'HAD', 'NOT', 'FOUND', 'IT', 'SO'] +6128-63244-0009-540: ref=['CHARLIE', 'WAS', 'A', 'YOUNG', 'MAN', 'IN', 'A', 'WHITE', 'OVERCOAT', 'AND', 'A', 'PAPER', 'COLLAR', 'IT', 'WAS', 'FOR', 'HIM', 'IN', 'THE', 'LAST', 'ANALYSIS', 'THAT', 'THEY', 'CARED', 'MUCH', 'THE', 'MOST'] +6128-63244-0009-540: hyp=['CHARLIE', 'WAS', 'A', 'YOUNG', 'MAN', 'IN', 'A', 'WORLD', 'OVERCOAT', 'AND', 'A', 'PAPER', 'COLLAR', 'IT', 'WAS', 'FOR', 'HIM', 'IN', 'THE', 'LASTIS', 'THAT', 'THE', 'CARED', 'MUCH', 'THE', 'MOST'] +6128-63244-0010-541: ref=['OLIVE', 'CHANCELLOR', 'WONDERED', 'HOW', 'MISSUS', 'FARRINDER', 'WOULD', 'TREAT', 'THAT', 'BRANCH', 'OF', 'THE', 'QUESTION'] +6128-63244-0010-541: hyp=['OUT', 'OF', 'CHANCELLOR', 'I', 'WONDERED', 'HOW', 'MISSUS', 'THINDER', 'WOULD', 'TREAT', 'THEIR', 'BRANCH', 'OF', 'THE', 'QUESTION'] +6128-63244-0011-542: ref=['IF', 'IT', 'BE', 'NECESSARY', 'WE', 'ARE', 'PREPARED', 'TO', 'TAKE', 'CERTAIN', 'STEPS', 'TO', 'CONCILIATE', 'THE', 'SHRINKING'] +6128-63244-0011-542: hyp=['IF', 'IT', 'BE', 'NECESSARY', 'WE', 'ARE', 'PREPARED', 'TO', 'TAKE', 'CERTAIN', 'STEPS', 'TO', 'CONCILIATE', 'THE', 'SHRINKING'] +6128-63244-0012-543: ref=['OUR', 'MOVEMENT', 'IS', 'FOR', 'ALL', 'IT', 'APPEALS', 'TO', 'THE', 'MOST', 'DELICATE', 'LADIES'] +6128-63244-0012-543: hyp=['OUR', 'MOVEMENT', 'IS', 'FOR', 'ALL', 'IT', 'APPEALS', 'TO', 'THE', 'MOST', 'DELICATE', 'LADIES'] +6128-63244-0013-544: ref=['RAISE', 'THE', 'STANDARD', 'AMONG', 'THEM', 'AND', 'BRING', 'ME', 'A', 'THOUSAND', 'NAMES'] +6128-63244-0013-544: hyp=['PRINCE', 'THE', 'STANDARD', 'AMONG', 'THEM', 'AND', 'BRING', 'ME', 'A', 'SPASM', 'NAMES'] +6128-63244-0014-545: ref=['I', 'LOOK', 'AFTER', 'THE', 'DETAILS', 'AS', 'WELL', 'AS', 'THE', 'BIG', 'CURRENTS', 'MISSUS', 'FARRINDER', 'ADDED', 'IN', 'A', 'TONE', 'AS', 'EXPLANATORY', 'AS', 'COULD', 'BE', 'EXPECTED', 'OF', 'SUCH', 'A', 'WOMAN', 'AND', 'WITH', 'A', 'SMILE', 'OF', 'WHICH', 'THE', 'SWEETNESS', 'WAS', 'THRILLING', 'TO', 'HER', 'LISTENER'] +6128-63244-0014-545: hyp=['AND', 'LOOK', 'AFTER', 'THE', 'DETAILS', 'AS', 'WELL', 'AS', 'THE', 'BIG', 'CURRANTS', 'MISSUS', 'FARRENDER', 'ADDED', 'IN', 'A', 'TONE', 'AS', 'EXPLANATORY', 'AS', 'COULD', 'BE', 'EXPECTED', 'OF', 'SUCH', 'A', 'WOMAN', 'AND', 'WITH', 'A', 'SMILE', 'OF', 'WHICH', 'THIS', 'SWEETNESS', 'WAS', 'THRILLING', 'TO', 'HER', 'LISTENER'] +6128-63244-0015-546: ref=['SAID', 'OLIVE', 'CHANCELLOR', 'WITH', 'A', 'FACE', 'WHICH', 'SEEMED', 'TO', 'PLEAD', 'FOR', 'A', 'REMISSION', 'OF', 'RESPONSIBILITY'] +6128-63244-0015-546: hyp=['SAID', 'OLDEST', 'CHANCELLOR', 'WITH', 'A', 'FACE', 'WHICH', 'SEEMED', 'TO', 'PLEAD', 'FOR', 'A', 'REMISSIONOUS', 'RESPONSIBILITY'] +6128-63244-0016-547: ref=['I', 'WANT', 'TO', 'BE', 'NEAR', 'TO', 'THEM', 'TO', 'HELP', 'THEM'] +6128-63244-0016-547: hyp=['HOW', 'WONT', 'TO', 'BE', 'NEAR', 'TO', 'THEM', 'TO', 'HELP', 'THEM'] +6128-63244-0017-548: ref=['IT', 'WAS', 'ONE', 'THING', 'TO', 'CHOOSE', 'FOR', 'HERSELF', 'BUT', 'NOW', 'THE', 'GREAT', 'REPRESENTATIVE', 'OF', 'THE', 'ENFRANCHISEMENT', 'OF', 'THEIR', 'SEX', 'FROM', 'EVERY', 'FORM', 'OF', 'BONDAGE', 'HAD', 'CHOSEN', 'FOR', 'HER'] +6128-63244-0017-548: hyp=['IT', 'WAS', 'ONE', 'THING', 'TO', 'CHOOSE', 'FOR', 'HERSELF', 'BUT', 'NOW', 'THE', 'GREAT', 'REPRESENTATIVE', 'OF', 'THE', 'ENCOMCHISEMENT', 'OF', 'THEIR', 'SEX', 'FROM', 'EVERY', 'FORM', 'OF', 'BONDAGE', 'HAD', 'CHOSEN', 'FOR', 'HER'] +6128-63244-0018-549: ref=['THE', 'UNHAPPINESS', 'OF', 'WOMEN'] +6128-63244-0018-549: hyp=['THE', 'UNHAPPINESS', 'OF', 'WOMEN'] +6128-63244-0019-550: ref=['THEY', 'WERE', 'HER', 'SISTERS', 'THEY', 'WERE', 'HER', 'OWN', 'AND', 'THE', 'DAY', 'OF', 'THEIR', 'DELIVERY', 'HAD', 'DAWNED'] +6128-63244-0019-550: hyp=['THEY', 'WERE', 'HIS', 'SISTERS', 'THEY', 'WERE', 'HER', 'OWN', 'AND', 'THE', 'DAY', 'OF', 'THEIR', 'DELIVERY', 'HAD', 'DAWNED'] +6128-63244-0020-551: ref=['THIS', 'WAS', 'THE', 'ONLY', 'SACRED', 'CAUSE', 'THIS', 'WAS', 'THE', 'GREAT', 'THE', 'JUST', 'REVOLUTION', 'IT', 'MUST', 'TRIUMPH', 'IT', 'MUST', 'SWEEP', 'EVERYTHING', 'BEFORE', 'IT', 'IT', 'MUST', 'EXACT', 'FROM', 'THE', 'OTHER', 'THE', 'BRUTAL', 'BLOOD', 'STAINED', 'RAVENING', 'RACE', 'THE', 'LAST', 'PARTICLE', 'OF', 'EXPIATION'] +6128-63244-0020-551: hyp=['THIS', 'WAS', 'THE', 'ONLY', 'SACRED', 'CAUSE', 'THIS', 'WAS', 'THE', 'GREAT', 'DRESSED', 'REVELATION', 'IT', 'WAS', 'TRIUMPH', 'IT', 'WAS', 'SWEEP', 'EVERYTHING', 'BEFORE', 'IT', 'IT', 'MUST', 'EXACT', 'FROM', 'THE', 'OTHER', 'THE', 'BRUTAL', 'BLOODSTAINED', 'RAVENING', 'RACE', 'THE', 'LAST', 'PARTICLE', 'OF', 'EXPIATION'] +6128-63244-0021-552: ref=['THEY', 'WOULD', 'BE', 'NAMES', 'OF', 'WOMEN', 'WEAK', 'INSULTED', 'PERSECUTED', 'BUT', 'DEVOTED', 'IN', 'EVERY', 'PULSE', 'OF', 'THEIR', 'BEING', 'TO', 'THE', 'CAUSE', 'AND', 'ASKING', 'NO', 'BETTER', 'FATE', 'THAN', 'TO', 'DIE', 'FOR', 'IT'] +6128-63244-0021-552: hyp=['THERE', 'WOULD', 'BE', 'NAMES', 'OF', 'WOMEN', 'WEAK', 'INSULTED', 'PERSECUTED', 'BUT', 'DEVOTED', 'IN', 'EVERY', 'PULSE', 'OF', 'THEIR', 'BEING', 'TO', 'THE', 'CAUSE', 'AND', 'ASKING', 'NO', 'BETTER', 'FATE', 'THAN', 'TO', 'DIE', 'FOR', 'IT'] +6128-63244-0022-553: ref=['IT', 'WAS', 'NOT', 'CLEAR', 'TO', 'THIS', 'INTERESTING', 'GIRL', 'IN', 'WHAT', 'MANNER', 'SUCH', 'A', 'SACRIFICE', 'AS', 'THIS', 'LAST', 'WOULD', 'BE', 'REQUIRED', 'OF', 'HER', 'BUT', 'SHE', 'SAW', 'THE', 'MATTER', 'THROUGH', 'A', 'KIND', 'OF', 'SUNRISE', 'MIST', 'OF', 'EMOTION', 'WHICH', 'MADE', 'DANGER', 'AS', 'ROSY', 'AS', 'SUCCESS'] +6128-63244-0022-553: hyp=['IT', 'WAS', 'NOT', 'CLEAR', 'TO', 'THIS', 'INTERESTING', 'GIRL', 'IN', 'WHAT', 'MANNER', 'SUCH', 'A', 'SACRIFICE', 'OF', 'THIS', 'LAST', 'WOULD', 'BE', 'REQUIRED', 'OF', 'HER', 'BUT', 'SHE', 'SOLDOM', 'MATTER', 'THROUGH', 'A', 'KIND', 'OF', 'SUNRISE', 'MIST', 'OF', 'THE', 'NATION', 'WHICH', 'MADE', 'DANGER', 'AS', 'ROSY', 'IS', 'SUCCESS'] +6128-63244-0023-554: ref=['WHEN', 'MISS', 'BIRDSEYE', 'APPROACHED', 'IT', 'TRANSFIGURED', 'HER', 'FAMILIAR', 'HER', 'COMICAL', 'SHAPE', 'AND', 'MADE', 'THE', 'POOR', 'LITTLE', 'HUMANITARY', 'HACK', 'SEEM', 'ALREADY', 'A', 'MARTYR'] +6128-63244-0023-554: hyp=['WHEN', 'MISS', "BIRD'S", 'EYE', 'APPROACHED', 'IT', 'TRANSFIGURED', 'HER', 'FAMILIAR', 'HYCOMICAL', 'SHAPE', 'AND', 'MADE', 'THE', 'POOR', 'LITTLE', 'HUMANITY', 'HACK', 'SEEM', 'ALREADY', 'A', 'MARTYR'] +6128-63244-0024-555: ref=['OLIVE', 'CHANCELLOR', 'LOOKED', 'AT', 'HER', 'WITH', 'LOVE', 'REMEMBERED', 'THAT', 'SHE', 'HAD', 'NEVER', 'IN', 'HER', 'LONG', 'UNREWARDED', 'WEARY', 'LIFE', 'HAD', 'A', 'THOUGHT', 'OR', 'AN', 'IMPULSE', 'FOR', 'HERSELF'] +6128-63244-0024-555: hyp=['ONLY', 'IF', 'CHANCELLOR', 'LOOKED', 'AT', 'HER', 'WITH', 'LOVE', 'REMEMBERED', 'THAT', 'SHE', 'HAD', 'NEVER', 'IN', 'HER', 'LONG', 'IN', 'REWARDED', 'WEARY', 'LIFE', 'HAD', 'A', 'THOUGHT', 'OF', 'AN', 'IMPULSE', 'FOR', 'HERSELF'] +6128-63244-0025-556: ref=['SHE', 'HAD', 'BEEN', 'CONSUMED', 'BY', 'THE', 'PASSION', 'OF', 'SYMPATHY', 'IT', 'HAD', 'CRUMPLED', 'HER', 'INTO', 'AS', 'MANY', 'CREASES', 'AS', 'AN', 'OLD', 'GLAZED', 'DISTENDED', 'GLOVE'] +6128-63244-0025-556: hyp=['IF', 'SHE', 'HAD', 'BEEN', 'CONSUMED', 'BY', 'THE', 'PASSION', 'OF', 'SYMPATHY', 'IT', 'HAD', 'CRUMBLED', 'HER', 'INTO', 'AS', 'MANY', 'CREASES', 'AS', 'AN', 'OLD', 'GLAZED', 'DISTENDED', 'GLOVE'] +6432-63722-0000-2431: ref=['BUT', 'SCUSE', 'ME', "DIDN'T", 'YO', 'FIGGER', 'ON', 'DOIN', 'SOME', 'DETECTIN', 'AN', 'GIVE', 'UP', 'FISHIN'] +6432-63722-0000-2431: hyp=['BUTCHUSE', 'ME', 'THEN', "YOU'LL", 'FAGONNE', 'DOING', 'SOME', 'DETECTIVE', 'AND', 'GIVIN', 'UP', 'FISHING'] +6432-63722-0001-2432: ref=['AND', 'SHAG', 'WITH', 'THE', 'FREEDOM', 'OF', 'AN', 'OLD', 'SERVANT', 'STOOD', 'LOOKING', 'AT', 'HIS', 'MASTER', 'AS', 'IF', 'NOT', 'QUITE', 'UNDERSTANDING', 'THE', 'NEW', 'TWIST', 'THE', 'AFFAIRS', 'HAD', 'TAKEN'] +6432-63722-0001-2432: hyp=['AND', 'SHAG', 'WITH', 'THE', 'FREEDOM', 'OF', 'AN', 'OLD', 'SERVANT', 'STOOD', 'LOOKING', 'AT', 'HIS', 'MASTER', 'AS', 'IF', 'NOT', 'QUITE', 'UNDERSTANDING', 'THE', 'NEW', 'TWIST', 'THE', 'AFFAIRS', 'HAD', 'TAKEN'] +6432-63722-0002-2433: ref=["I'M", 'GOING', 'OFF', 'FISHING', 'I', 'MAY', 'NOT', 'CATCH', 'ANYTHING', 'I', 'MAY', 'NOT', 'WANT', 'TO', 'AFTER', 'I', 'GET', 'THERE'] +6432-63722-0002-2433: hyp=["I'M", 'GOIN', 'OUR', 'FISHIN', 'I', 'MAY', 'NOT', 'CATCH', 'ANYTHING', 'AND', 'MAY', 'NOT', 'WANT', 'TO', 'AFTER', 'I', 'GET', 'THERE'] +6432-63722-0003-2434: ref=['GET', 'READY', 'SHAG', 'YES', 'SAH', 'COLONEL'] +6432-63722-0003-2434: hyp=['GET', 'READY', 'SHAGG', 'YES', 'A', 'COLONEL'] +6432-63722-0004-2435: ref=['AND', 'HAVING', 'PUT', 'HIMSELF', 'IN', 'A', 'FAIR', 'WAY', 'AS', 'HE', 'HOPED', 'TO', 'SOLVE', 'SOME', 'OF', 'THE', 'PROBLEMS', 'CONNECTED', 'WITH', 'THE', 'DARCY', 'CASE', 'COLONEL', 'ASHLEY', 'WENT', 'DOWN', 'TO', 'POLICE', 'HEADQUARTERS', 'TO', 'LEARN', 'MORE', 'FACTS', 'IN', 'CONNECTION', 'WITH', 'THE', 'MURDER', 'OF', 'THE', 'EAST', 'INDIAN'] +6432-63722-0004-2435: hyp=['AND', 'HAVING', 'PUT', 'HIMSELF', 'IN', 'A', 'FAIR', 'WAY', 'AS', 'HE', 'HOPED', 'TO', 'SOLVE', 'SOME', 'OF', 'THE', 'PROBLEMS', 'CONNECTED', 'WITH', 'THE', 'DARCY', 'CASE', 'COLONEL', 'ASHLEY', 'WENT', 'DOWN', 'TO', 'POLICE', 'HEADQUARTERS', 'TO', 'LEARN', 'MORE', 'FACTS', 'IN', 'THE', 'CONNECTION', 'WITH', 'THE', 'MURDER', 'OF', 'THE', 'EAST', 'INDIAN'] +6432-63722-0005-2436: ref=['PINKUS', 'AND', 'DONOVAN', "HAVEN'T", 'THEY', 'CARROLL', 'YEP'] +6432-63722-0005-2436: hyp=['PICK', 'US', 'AND', 'DONOVAN', "HAVEN'T", 'THEY', 'CARROLL', 'YEP'] +6432-63722-0006-2437: ref=['CARROLL', 'WAS', 'TOO', 'MUCH', 'ENGAGED', 'IN', 'WATCHING', 'THE', 'BLUE', 'SMOKE', 'CURL', 'LAZILY', 'UPWARD', 'FROM', 'HIS', 'CIGAR', 'JUST', 'THEN', 'TO', 'SAY', 'MORE'] +6432-63722-0006-2437: hyp=['KAL', 'WAS', 'TOO', 'MUCH', 'ENGAGED', 'IN', 'WATCHING', 'THE', 'BLUE', 'SMOKE', 'GIRL', 'LAZILY', 'UPWARD', 'FROM', 'HIS', 'CIGAR', 'JUST', 'THEN', 'TO', 'SAY', 'MORE'] +6432-63722-0007-2438: ref=['ARE', 'YOU', 'GOING', 'TO', 'WORK', 'ON', 'THAT', 'CASE', 'COLONEL'] +6432-63722-0007-2438: hyp=['ARE', 'YOU', 'GOING', 'TO', 'WORK', 'ON', 'THAT', 'CASE', 'COLONEL'] +6432-63722-0008-2439: ref=['BUT', 'HE', "HADN'T", 'ANY', 'MORE', 'TO', 'DO', 'WITH', 'IT', 'COLONEL', 'THAN', 'THAT', 'CAT'] +6432-63722-0008-2439: hyp=['BUT', 'HE', "HADN'T", 'ANY', 'MORE', 'TO', 'DO', 'WITH', 'IT', 'COLONEL', 'THAN', 'THAT', 'CAT'] +6432-63722-0009-2440: ref=['PERHAPS', 'NOT', 'ADMITTED', 'COLONEL', 'ASHLEY'] +6432-63722-0009-2440: hyp=['PERHAPS', 'NOT', 'ADMITTED', 'COLONEL', 'ASHLEY'] +6432-63722-0010-2441: ref=["WE'VE", 'GOT', 'OUR', 'MAN', 'AND', "THAT'S", 'ALL', 'WE', 'WANT'] +6432-63722-0010-2441: hyp=["WE'VE", 'GOT', 'OUR', 'MAN', 'AND', "THAT'S", 'ALL', 'WE', 'WANT'] +6432-63722-0011-2442: ref=["YOU'RE", 'ON', 'THE', 'DARCY', 'CASE', 'THEY', 'TELL', 'ME', 'IN', 'A', 'WAY', 'YES'] +6432-63722-0011-2442: hyp=['YOU', 'ARE', 'ON', 'THE', 'DARCY', 'CASE', 'THEY', 'TELL', 'ME', 'IN', 'A', 'WAY', 'YES'] +6432-63722-0012-2443: ref=["I'M", 'WORKING', 'IN', 'THE', 'INTERESTS', 'OF', 'THE', 'YOUNG', 'MAN'] +6432-63722-0012-2443: hyp=['HIGH', 'MARKING', 'IN', 'THE', 'INTEREST', 'OF', 'THE', 'YOUNG', 'MAN'] +6432-63722-0013-2444: ref=["IT'S", 'JUST', 'ONE', 'OF', 'THEM', 'COINCIDENCES', 'LIKE'] +6432-63722-0013-2444: hyp=["IT'S", 'JUST', 'ONE', 'OF', 'THEM', 'COINCIDENCE', 'IS', 'LIKE'] +6432-63722-0014-2445: ref=['BUSTED', 'HIS', 'HEAD', 'IN', 'WITH', 'A', 'HEAVY', 'CANDLESTICK', 'ONE', 'OF', 'A', 'PAIR'] +6432-63722-0014-2445: hyp=['BUSTED', 'HIS', 'HEAD', 'IN', 'WITH', 'A', 'HEAVY', 'CANDLESTICK', 'ONE', 'OF', 'A', 'PAIR'] +6432-63722-0015-2446: ref=['GAD', 'EXCLAIMED', 'THE', 'COLONEL'] +6432-63722-0015-2446: hyp=['GAD', 'EXPLAINED', 'THE', 'COLONEL'] +6432-63722-0016-2447: ref=['THE', 'VERY', 'PAIR', 'I', 'WAS', 'GOING', 'TO', 'BUY'] +6432-63722-0016-2447: hyp=['THE', 'VERY', 'PAIR', 'I', 'WAS', 'GOING', 'TO', 'BUY'] +6432-63722-0017-2448: ref=['LOOK', 'HERE', 'COLONEL', 'DO', 'YOU', 'KNOW', 'ANYTHING', 'ABOUT', 'THIS'] +6432-63722-0017-2448: hyp=['LOOK', 'HERE', 'CAROL', 'DO', 'YOU', 'KNOW', 'ANYTHING', 'ABOUT', 'THIS'] +6432-63722-0018-2449: ref=['AND', 'THE', "DETECTIVE'S", 'PROFESSIONAL', 'INSTINCTS', 'GOT', 'THE', 'UPPER', 'HAND', 'OF', 'HIS', 'FRIENDLINESS', 'NOT', 'THE', 'LEAST', 'IN', 'THE', 'WORLD', 'NOT', 'AS', 'MUCH', 'AS', 'YOU', 'DO', 'WAS', 'THE', 'COOL', 'ANSWER'] +6432-63722-0018-2449: hyp=['AND', 'THE', "DETECTIVE'S", 'PROFESSIONAL', 'INSTINCTS', 'GOT', 'THE', 'UPPER', 'HAND', 'OF', 'HIS', 'FRIENDLINESS', 'NOT', 'THE', 'LEAST', 'IN', 'THE', 'WORLD', 'NOT', 'AS', 'MUCH', 'AS', 'YOU', 'DO', 'WAS', 'THE', 'COOL', 'ANSWER'] +6432-63722-0019-2450: ref=['I', 'HAPPENED', 'TO', 'SEE', 'THOSE', 'CANDLESTICKS', 'IN', 'THE', 'WINDOW', 'OF', 'SINGA', "PHUT'S", 'SHOP', 'THE', 'OTHER', 'DAY', 'AND', 'I', 'MADE', 'UP', 'MY', 'MIND', 'TO', 'BUY', 'THEM', 'WHEN', 'I', 'HAD', 'A', 'CHANCE'] +6432-63722-0019-2450: hyp=['I', 'HAPPENED', 'TO', 'SEE', 'THOSE', 'CANDLESTICKS', 'IN', 'THE', 'WINDOW', 'OF', "SINGAFUT'S", 'SHOP', 'THE', 'OTHER', 'DAY', 'AND', 'I', 'MADE', 'UP', 'MY', 'MIND', 'TO', 'BUY', 'THEM', 'WHEN', 'I', 'HAD', 'A', 'CHANCE'] +6432-63722-0020-2451: ref=['NOW', "I'M", 'AFRAID', 'I', "WON'T", 'BUT', 'HOW', 'DID', 'IT', 'HAPPEN'] +6432-63722-0020-2451: hyp=['NOW', "I'M", 'AFRAID', 'I', "WON'T", 'BUT', 'HOW', 'DID', 'IT', 'HAPPEN'] +6432-63722-0021-2452: ref=['PHUT', 'I', "DON'T", 'KNOW', 'WHETHER', "THAT'S", 'HIS', 'FIRST', 'OR', 'HIS', 'LAST', 'NAME', 'ANYHOW', 'HE', 'HAD', 'A', 'PARTNER', 'NAMED', 'SHERE', 'ALI'] +6432-63722-0021-2452: hyp=['FIVE', 'I', "DON'T", 'KNOW', 'WHETHER', "THAT'S", 'HIS', 'FIRST', 'OR', 'HIS', 'LAST', 'NAME', 'ANYHOW', 'HE', 'HAD', 'A', 'PARTNER', 'NAMED', 'TO', 'SHARE', 'ALI'] +6432-63722-0022-2453: ref=['ANYHOW', 'HE', 'AND', 'PHUT', "DIDN'T", 'GET', 'ALONG', 'VERY', 'WELL', 'IT', 'SEEMS'] +6432-63722-0022-2453: hyp=['ANYHOW', 'HE', 'INFECTED', 'GET', 'ALONG', 'VERY', 'WELL', 'IT', 'SEEMS'] +6432-63722-0023-2454: ref=['NEIGHBORS', 'OFTEN', 'HEARD', 'EM', 'SCRAPPIN', 'A', 'LOT', 'AND', 'THIS', 'AFTERNOON', 'THEY', 'WENT', 'AT', 'IT', 'AGAIN', 'HOT', 'AND', 'HEAVY'] +6432-63722-0023-2454: hyp=['LABORS', 'OFTEN', 'HEARD', 'HIM', 'SCRAP', 'IN', 'A', 'LOT', 'AND', 'THIS', 'AFTERNOON', 'THEY', 'WENT', 'AT', 'IT', 'AGAIN', 'HOT', 'AND', 'HEAVY'] +6432-63722-0024-2455: ref=['TOWARD', 'DARK', 'A', 'MAN', 'WENT', 'IN', 'TO', 'BUY', 'A', 'LAMP'] +6432-63722-0024-2455: hyp=['TO', 'OUR', 'DARK', 'A', 'MAN', 'WENT', 'IN', 'TO', 'BUY', 'A', 'LAMP'] +6432-63722-0025-2456: ref=['HE', 'FOUND', 'THE', 'PLACE', 'WITHOUT', 'A', 'LIGHT', 'IN', 'IT', 'STUMBLED', 'OVER', 'SOMETHING', 'ON', 'THE', 'FLOOR', 'AND', 'THERE', 'WAS', "ALI'S", 'BODY', 'WITH', 'THE', 'HEAD', 'BUSTED', 'IN', 'AND', 'THIS', 'HEAVY', 'CANDLESTICK', 'NEAR', 'IT'] +6432-63722-0025-2456: hyp=['HE', 'FOUND', 'THE', 'PLACE', 'WITHOUT', 'A', 'LIGHT', 'IN', 'IT', 'STUMBLED', 'OVER', 'SOMETHING', 'ON', 'THE', 'FLOOR', 'AND', 'THERE', 'WAS', 'ALWAYS', 'BODY', 'WITH', 'THE', 'HEAD', 'BUSTED', 'IN', 'AND', 'THIS', 'HEAVY', 'CANDLESTICK', 'NEAR', 'IT'] +6432-63722-0026-2457: ref=['SURE', 'HELD', 'SO', 'TIGHT', 'WE', 'COULD', 'HARDLY', 'GET', 'IT', 'OUT'] +6432-63722-0026-2457: hyp=['SURE', 'HELD', 'SO', 'TIGHT', 'WE', 'COULD', 'HARDLY', 'GET', 'IT', 'OUT'] +6432-63722-0027-2458: ref=['MAYBE', 'THE', 'FIGHT', 'WAS', 'ABOUT', 'WHO', 'OWNED', 'THE', 'WATCH', 'FOR', 'THE', 'DAGOS', 'TALKED', 'IN', 'THEIR', 'FOREIGN', 'LINGO', 'AND', 'NONE', 'OF', 'THE', 'NEIGHBORS', 'COULD', 'TELL', 'WHAT', 'THEY', 'WERE', 'SAYIN', 'I', 'SEE'] +6432-63722-0027-2458: hyp=['MAYBE', 'THE', 'FIGHT', 'WAS', 'ABOUT', 'WHO', 'OWNED', 'THE', 'WATCH', 'FOR', 'THE', 'DAG', 'WAS', 'TALKED', 'IN', 'THEIR', 'FOREIGN', 'LINGO', 'AND', 'NONE', 'OF', 'THE', 'NEIGHBORS', 'COULD', 'TELL', 'WHAT', 'THEY', 'WERE', 'SAYING', 'I', 'SEE'] +6432-63722-0028-2459: ref=['AND', 'THE', 'WATCH', 'HAVE', 'YOU', 'IT', 'YES', "IT'S", 'HERE'] +6432-63722-0028-2459: hyp=['AND', 'THE', 'WATCH', 'HAVE', 'YOU', 'IT', 'YES', "IT'S", 'HERE'] +6432-63722-0029-2460: ref=["THAT'S", 'THE', 'WATCH', 'ANNOUNCED', 'THE', 'HEADQUARTERS', 'DETECTIVE', 'REACHING', 'IN', 'FOR', 'IT', 'GOING', 'YET', 'SEE'] +6432-63722-0029-2460: hyp=["THAT'S", 'THE', 'WATCH', 'ANNOUNCED', 'THE', 'HEADQUARTER', 'DETECTIVE', 'REACHING', 'IN', 'FOR', 'IT', 'GOING', 'AT', 'SEE'] +6432-63722-0030-2461: ref=["YOU'RE", 'NOT', 'AS', 'SQUEAMISH', 'AS', 'ALL', 'THAT', 'ARE', 'YOU', 'JUST', 'BECAUSE', 'IT', 'WAS', 'IN', 'A', 'DEAD', "MAN'S", 'HAND', 'AND', 'IN', 'A', "WOMAN'S"] +6432-63722-0030-2461: hyp=["YOU'RE", 'NOT', 'A', 'SCREAMY', 'AS', 'ALL', 'THAT', 'ARE', 'YOU', 'JUST', 'BECAUSE', 'IT', 'WAS', 'IN', 'A', 'DEAD', "MAN'S", 'HANDS', 'AND', 'A', "WOMAN'S"] +6432-63722-0031-2462: ref=['AND', "DONOVAN'S", 'VOICE', 'WAS', 'PLAINLY', 'SKEPTICAL'] +6432-63722-0031-2462: hyp=['AND', "DONOVAN'S", 'VOICE', 'WAS', 'PLAINLY', 'SCEPTICAL'] +6432-63722-0032-2463: ref=['YES', 'IT', 'MAY', 'HAVE', 'SOME', 'ROUGH', 'EDGES', 'ON', 'IT'] +6432-63722-0032-2463: hyp=['YES', 'IT', 'MAY', 'HAVE', 'SOME', 'ROUGH', 'EDGES', 'ON', 'IT'] +6432-63722-0033-2464: ref=['AND', "I'VE", 'READ', 'ENOUGH', 'ABOUT', 'GERMS', 'TO', 'KNOW', 'THE', 'DANGER', "I'D", 'ADVISE', 'YOU', 'TO', 'BE', 'CAREFUL'] +6432-63722-0033-2464: hyp=['AND', "I'VE", 'READ', 'ENOUGH', 'ABOUT', 'GERMS', 'TO', 'KNOW', 'THE', 'DANGER', "I'D", 'ADVISE', 'YOU', 'TO', 'BE', 'CAREFUL'] +6432-63722-0034-2465: ref=['IF', 'YOU', "DON'T", 'MIND', 'I', 'SHOULD', 'LIKE', 'TO', 'EXAMINE', 'THIS', 'A', 'BIT'] +6432-63722-0034-2465: hyp=['IF', 'YOU', "DON'T", 'MIND', 'I', 'SHOULD', 'LIKE', 'TO', 'EXAMINE', 'THIS', 'A', 'BIT'] +6432-63722-0035-2466: ref=['BEFORE', 'THE', 'BIG', 'WIND', 'IN', 'IRELAND', 'SUGGESTED', 'THONG', 'WITH', 'A', 'NOD', 'AT', 'HIS', 'IRISH', 'COMPATRIOT', 'SLIGHTLY', 'LAUGHED', 'THE', 'COLONEL'] +6432-63722-0035-2466: hyp=['BEFORE', 'THE', 'BIG', 'WIND', 'IN', 'IRELAND', 'SUGGESTED', 'THONG', 'WITH', 'A', 'NOD', 'OF', 'HIS', 'IRISH', 'COMPATRIOT', 'SLIGHTLY', "THEY'LL", 'HAVE', 'THE', 'COLONEL'] +6432-63722-0036-2467: ref=["THAT'S", 'RIGHT', 'AGREED', 'THE', 'COLONEL', 'AS', 'HE', 'CONTINUED', 'TO', 'MOVE', 'HIS', 'MAGNIFYING', 'GLASS', 'OVER', 'THE', 'SURFACE', 'OF', 'THE', 'STILL', 'TICKING', 'WATCH'] +6432-63722-0036-2467: hyp=["THAT'S", 'RIGHT', 'AGREED', 'THE', 'COLONEL', 'AS', 'HE', 'CONTINUED', 'TO', 'MOVE', 'HIS', 'MAGNIFYING', 'GLASS', 'OVER', 'THE', 'SURFACE', 'OF', 'THE', 'STILL', 'TICKING', 'WATCH'] +6432-63722-0037-2468: ref=['AND', 'A', 'CLOSE', 'OBSERVER', 'MIGHT', 'HAVE', 'OBSERVED', 'THAT', 'HE', 'DID', 'NOT', 'TOUCH', 'HIS', 'BARE', 'FINGERS', 'TO', 'THE', 'TIMEPIECE', 'BUT', 'POKED', 'IT', 'ABOUT', 'AND', 'TOUCHED', 'IT', 'HERE', 'AND', 'THERE', 'WITH', 'THE', 'END', 'OF', 'A', 'LEADPENCIL'] +6432-63722-0037-2468: hyp=['IN', 'A', 'CLOSE', 'OBSERVER', 'MIGHT', 'HAVE', 'OBSERVED', 'THAT', 'HE', 'DID', 'NOT', 'TOUCH', 'HIS', 'BARE', 'FINGERS', 'TO', 'THE', 'TIMEPIECE', 'BUT', 'POKED', 'IT', 'ABOUT', 'AND', 'TOUCHED', 'IT', 'HERE', 'AND', 'THERE', 'WITH', 'THE', 'END', 'OF', 'A', 'LEAD', 'PENCIL'] +6432-63722-0038-2469: ref=['AND', 'DONOVAN', 'TAKE', 'A', "FRIEND'S", 'ADVICE', 'AND', "DON'T", 'BE', 'TOO', 'FREE', 'WITH', 'THAT', 'WATCH', 'TOO', 'FREE', 'WITH', 'IT'] +6432-63722-0038-2469: hyp=['AND', 'DONALIN', 'TAKE', 'HER', "FRIEND'S", 'ADVICE', 'AND', "DON'T", 'BE', 'TOO', 'FREE', 'WITH', 'THAT', 'WATCH', 'TOO', 'FREE', 'WITH', 'IT'] +6432-63722-0039-2470: ref=['ASKED', 'THE', 'SURPRISED', 'DETECTIVE', 'YES'] +6432-63722-0039-2470: hyp=['ASKED', 'THE', 'SURPRISED', 'DETECTIVE', 'YES'] +6432-63722-0040-2471: ref=["DON'T", 'SCRATCH', 'YOURSELF', 'ON', 'IT', 'WHATEVER', 'YOU', 'DO', 'WHY', 'NOT'] +6432-63722-0040-2471: hyp=["DON'T", 'SCRATCH', 'YOURSELF', 'ON', 'IT', 'WHATEVER', 'YOU', 'DO', 'WHY', 'NOT'] +6432-63722-0041-2472: ref=['SIMPLY', 'BECAUSE', 'THIS', 'WATCH'] +6432-63722-0041-2472: hyp=['SIMPLY', 'BECAUSE', 'THIS', 'WATCH'] +6432-63722-0042-2473: ref=['SOME', 'ONE', 'OUT', 'HERE', 'TO', 'SEE', 'YOU'] +6432-63722-0042-2473: hyp=['SOME', 'ONE', 'OUT', 'HERE', 'TO', 'SEE', 'YOU'] +6432-63722-0043-2474: ref=['ALL', 'RIGHT', 'BE', 'THERE', 'IN', 'A', 'SECOND'] +6432-63722-0043-2474: hyp=['ALL', 'RIGHT', 'BE', 'THERE', 'IN', 'A', 'SECOND'] +6432-63722-0044-2475: ref=['SINGA', 'PHUT', 'WAS', 'THE', 'PANTING', 'ANSWER'] +6432-63722-0044-2475: hyp=['SHING', 'AFOOT', 'WAS', 'THE', 'PANTING', 'ANSWER'] +6432-63722-0045-2476: ref=['I', 'WANT', 'TO', 'TALK', 'OVER', "DARCY'S", 'CASE', 'WITH', 'YOU', 'THE', 'COLONEL', 'HAD', 'SAID', 'AND', 'THE', 'TWO', 'HAD', 'TALKED', 'HAD', 'THOUGHT', 'HAD', 'TALKED', 'AGAIN', 'AND', 'NOW', 'WERE', 'SILENT', 'FOR', 'A', 'TIME'] +6432-63722-0045-2476: hyp=['I', 'WANT', 'TO', 'TALK', 'OVER', "DARCY'S", 'CASE', 'WITH', 'YOU', 'THE', 'COLONEL', 'HAD', 'SAID', 'AND', 'THE', 'JEW', 'HAD', 'TALKED', 'HAD', 'THOUGHT', 'HAD', 'TALKED', 'AGAIN', 'AND', 'NOW', 'WERE', 'SILENT', 'FOR', 'A', 'TIME'] +6432-63722-0046-2477: ref=['WHAT', 'ARE', 'THE', 'CHANCES', 'OF', 'GETTING', 'HIM', 'OFF', 'LEGALLY', 'IF', 'WE', 'GO', 'AT', 'IT', 'FROM', 'A', 'NEGATIVE', 'STANDPOINT', 'ASKED', 'THE', 'COLONEL'] +6432-63722-0046-2477: hyp=['WHAT', 'ARE', 'THE', 'CHURCHES', 'OF', 'GETTING', 'HIM', 'OFF', 'LEGALLY', 'IF', 'WE', 'GO', 'AT', 'IT', 'FROM', 'A', 'NEGATIVE', 'STANDPOINT', 'ASKED', 'THE', 'COLONEL'] +6432-63722-0047-2478: ref=['RATHER', 'A', 'HYPOTHETICAL', 'QUESTION', 'COLONEL', 'BUT', 'I', 'SHOULD', 'SAY', 'IT', 'MIGHT', 'BE', 'A', 'FIFTY', 'FIFTY', 'PROPOSITION'] +6432-63722-0047-2478: hyp=['RATHER', 'A', 'HYPOTHETICAL', 'QUESTION', 'COLONEL', 'BUT', 'I', 'SHOULD', 'SAY', 'IT', 'MIGHT', 'BE', 'A', 'FIFTY', 'FIFTY', 'PROPOSITION'] +6432-63722-0048-2479: ref=['AT', 'BEST', 'HE', 'WOULD', 'GET', 'OFF', 'WITH', 'A', 'SCOTCH', 'VERDICT', 'OF', 'NOT', 'PROVEN', 'BUT', 'HE', "DOESN'T", 'WANT', 'THAT', 'NOR', 'DO', 'I'] +6432-63722-0048-2479: hyp=['AT', 'BEST', 'HE', 'WOULD', 'GET', 'OFF', 'FOR', 'THE', 'SCOTCH', 'VERDICT', 'OF', 'NOT', 'PROVEN', 'BUT', 'HE', "DOESN'T", 'WANT', 'THAT', 'NOR', 'DO', 'I'] +6432-63722-0049-2480: ref=['AND', 'YOU', 'I', "DON'T", 'WANT', 'IT', 'EITHER'] +6432-63722-0049-2480: hyp=['AND', 'YOU', 'I', "DON'T", 'WANT', 'IT', 'EITHER'] +6432-63722-0050-2481: ref=['BUT', 'I', 'WANT', 'TO', 'KNOW', 'JUST', 'WHERE', 'WE', 'STAND', 'NOW', 'I', 'KNOW'] +6432-63722-0050-2481: hyp=['BUT', 'I', 'WANT', 'TO', 'KNOW', 'JUST', 'WHERE', 'WE', 'STAND', 'NOW', 'I', 'KNOW'] +6432-63722-0051-2482: ref=['BUT', 'I', 'NEED', 'TO', 'DO', 'A', 'LITTLE', 'MORE', 'SMOKING', 'OUT', 'FIRST', 'NOW', 'I', 'WANT', 'TO', 'THINK'] +6432-63722-0051-2482: hyp=['BUT', 'I', 'NEED', 'TO', 'DO', 'A', 'LITTLE', 'MORE', 'SMOKING', 'OUT', 'FIRST', 'NOW', 'I', 'WANT', 'TO', 'THINK'] +6432-63722-0052-2483: ref=['IF', "YOU'LL", 'EXCUSE', 'ME', "I'LL", 'PRETEND', "I'M", 'FISHING', 'AND', 'I', 'MAY', 'CATCH', 'SOMETHING'] +6432-63722-0052-2483: hyp=['IF', "YOU'LL", 'EXCUSE', 'ME', "I'LL", 'PRETEND', "I'M", 'FISHING', 'AND', 'I', 'MAY', 'CATCH', 'SOMETHING'] +6432-63722-0053-2484: ref=['IN', 'FACT', 'I', 'HAVE', 'A', 'FEELING', 'THAT', "I'LL", 'LAND', 'MY', 'FISH'] +6432-63722-0053-2484: hyp=['IN', 'FACT', 'I', 'HAVE', 'A', 'FEELING', 'THAT', 'I', 'LAND', 'MY', 'FISH'] +6432-63722-0054-2485: ref=["I'D", 'RECOMMEND', 'HIM', 'TO', 'YOU', 'INSTEAD', 'OF', 'BLACKSTONE', 'THANKS', 'LAUGHED', 'KENNETH'] +6432-63722-0054-2485: hyp=['I', 'RECOMMEND', 'HIM', 'TO', 'YOU', 'INSTEAD', 'OF', 'BLACKSTONE', 'THANKS', 'LAUGHED', 'KENNETH'] +6432-63722-0055-2486: ref=['WHAT', 'IS', 'IT', 'PERHAPS', 'I', 'CAN', 'HELP', 'YOU'] +6432-63722-0055-2486: hyp=['WHAT', 'IS', 'IT', 'PERHAPS', 'I', 'CAN', 'HELP', 'YOU'] +6432-63722-0056-2487: ref=['THE', 'OLD', 'ADAGE', 'OF', 'TWO', 'HEADS', 'YOU', 'KNOW'] +6432-63722-0056-2487: hyp=['THE', 'OLD', 'ADAGE', 'OF', 'TWO', 'HEADS', 'YOU', 'KNOW'] +6432-63722-0057-2488: ref=['YES', 'IT', 'STILL', 'HOLDS', 'GOOD'] +6432-63722-0057-2488: hyp=['YES', "IT'S", 'STILL', 'HOLDS', 'GOOD'] +6432-63722-0058-2489: ref=['NO', 'ALIMONY', 'REPEATED', 'THE', 'COLONEL', 'PUZZLED', 'YES', 'JUST', 'THAT'] +6432-63722-0058-2489: hyp=['NO', 'ALIMONY', 'REPLIED', 'THE', 'COLONEL', 'PUZZLED', 'YES', 'JUST', 'THAT'] +6432-63722-0059-2490: ref=['AND', "THERE'S", 'NO', 'REASON', 'YOU', "SHOULDN'T", 'KNOW'] +6432-63722-0059-2490: hyp=['AND', "THERE'S", 'NO', 'REASON', 'YOU', "SHOULDN'T", 'KNOW'] +6432-63723-0000-2491: ref=['CHUCKLED', 'THE', 'COLONEL', 'AS', 'HE', 'SKILFULLY', 'PLAYED', 'THE', 'LUCKLESS', 'TROUT', 'NOW', 'STRUGGLING', 'TO', 'GET', 'LOOSE', 'FROM', 'THE', 'HOOK'] +6432-63723-0000-2491: hyp=['CHUCKLED', 'THE', 'COLONEL', 'AS', 'HE', 'SKILFULLY', 'PLAYED', 'THE', 'LUCKLESS', 'TROUT', 'NOW', 'STRUGGLING', 'TO', 'GET', 'LOOSE', 'FROM', 'THE', 'HOOK'] +6432-63723-0001-2492: ref=['AND', 'WHEN', 'THE', 'FISH', 'WAS', 'LANDED', 'PANTING', 'ON', 'THE', 'GRASS', 'AND', 'SHAG', 'HAD', 'BEEN', 'ROUSED', 'FROM', 'HIS', 'SLUMBER', 'TO', 'SLIP', 'THE', 'NOW', 'LIMP', 'FISH', 'INTO', 'THE', 'CREEL', 'COLONEL', 'ASHLEY', 'GAVE', 'A', 'SIGH', 'OF', 'RELIEF', 'AND', 'REMARKED', 'I', 'THINK', 'I', 'SEE', 'IT', 'NOW'] +6432-63723-0001-2492: hyp=['AND', 'WHEN', 'THE', 'FISH', 'WAS', 'LANDED', 'PANTING', 'ON', 'THE', 'GRASS', 'AND', 'SHAG', 'HAD', 'BEEN', 'ROUSED', 'FROM', 'HIS', 'SLUMBER', 'TO', 'SLIP', 'A', 'NOW', 'LIMP', 'FISH', 'INTO', 'THE', 'CREOLE', 'COLONEL', 'ASHLEY', 'GAVE', 'A', 'SIGH', 'OF', 'RELIEF', 'AND', 'REMARKED', 'I', 'THINK', 'I', 'SEE', 'IT', 'NOW'] +6432-63723-0002-2493: ref=['THE', 'REASON', 'SHE', 'ASKED', 'NO', 'ALIMONY', 'INQUIRED', 'KENNETH'] +6432-63723-0002-2493: hyp=['THE', 'REASON', 'SHE', 'ASKED', 'NO', 'ALIMONY', 'INQUIRED', 'KENNETH'] +6432-63723-0003-2494: ref=['NO', 'I', "WASN'T", 'THINKING', 'OF', 'THAT'] +6432-63723-0003-2494: hyp=['NO', 'I', "WASN'T", 'THINKING', 'OF', 'THAT'] +6432-63723-0004-2495: ref=['HOWEVER', "DON'T", 'THINK', "I'M", 'NOT', 'INTERESTED', 'IN', 'YOUR', 'CASE', "I'VE", 'FISHED', 'ENOUGH', 'FOR', 'TO', 'DAY'] +6432-63723-0004-2495: hyp=['HOWEVER', "DON'T", 'THINK', "I'M", 'NOT', 'INTERESTED', 'IN', 'YOUR', 'CASE', "I'VE", 'FINISHED', 'ENOUGH', 'FOR', 'TO', 'DAY'] +6432-63723-0005-2496: ref=['WELL', 'I', "DON'T", 'KNOW', 'THAT', 'YOU', 'CAN'] +6432-63723-0005-2496: hyp=['WELL', 'I', "DON'T", 'KNOW', 'THAT', 'YOU', 'CAN'] +6432-63723-0006-2497: ref=['IT', "ISN'T", 'GENERALLY', 'KNOWN', 'WENT', 'ON', 'THE', 'LAWYER', 'THAT', 'THE', 'HOTEL', "KEEPER'S", 'WIFE', 'HAS', 'LEFT', 'HIM'] +6432-63723-0006-2497: hyp=['IT', 'IS', 'IN', 'GENERALLY', 'KNOWN', 'WENT', 'ON', 'THE', 'LAWYER', 'THAT', 'THE', 'HOTEL', "KEEPER'S", 'WIFE', 'HAS', 'LEFT', 'HIM'] +6432-63723-0007-2498: ref=['IT', 'WAS', 'ONE', 'OF', 'WHAT', 'AT', 'FIRST', 'MIGHT', 'BE', 'CALLED', 'REFINED', 'CRUELTY', 'ON', 'HER', "HUSBAND'S", 'PART', 'DEGENERATING', 'GRADUALLY', 'INTO', 'THAT', 'OF', 'THE', 'BASER', 'SORT'] +6432-63723-0007-2498: hyp=['IT', 'WAS', 'ONE', 'OF', 'WHAT', 'AT', 'FIRST', 'MIGHT', 'BE', 'CALLED', 'REFINED', 'CRUELTY', 'ON', 'HER', "HUSBAND'S", 'PART', 'DEGENERATING', 'GRADUALLY', 'INTO', 'THAT', 'OF', 'A', 'BASER', 'SORT'] +6432-63723-0008-2499: ref=['YOU', "DON'T", 'MEAN', 'THAT', 'LARCH', 'STRUCK', 'HER', 'THAT', 'THERE', 'WAS', 'PHYSICAL', 'ABUSE', 'DO', 'YOU', 'ASKED', 'THE', 'COLONEL', "THAT'S", 'WHAT', 'HE', 'DID'] +6432-63723-0008-2499: hyp=['IT', 'ALL', 'MEAN', 'THAT', 'LARGE', 'STRUCK', 'HER', 'THAT', 'THERE', 'WAS', 'PHYSICAL', 'ABUSE', 'DO', 'YOU', 'ASKED', 'THE', 'COLONEL', "THAT'S", 'WHAT', 'HE', 'DID'] +6432-63723-0009-2500: ref=['THE', 'COLONEL', 'DID', 'NOT', 'DISCLOSE', 'THE', 'FACT', 'THAT', 'IT', 'WAS', 'NO', 'NEWS', 'TO', 'HIM'] +6432-63723-0009-2500: hyp=['THE', 'COLONEL', 'DID', 'NOT', 'DISCLOSE', 'THE', 'FACT', 'THAT', 'IT', 'WAS', 'NO', 'NEWS', 'TO', 'HIM'] +6432-63723-0010-2501: ref=['AARON', "GRAFTON'S", 'STATEMENT', 'WAS', 'BEING', 'UNEXPECTEDLY', 'CONFIRMED'] +6432-63723-0010-2501: hyp=['AARON', "GRAFTON'S", 'STATEMENT', 'WAS', 'BEING', 'UNEXPECTED', 'GREAT', 'CONFIRMED'] +6432-63723-0011-2502: ref=['HE', 'REMEMBERED', 'THAT', 'CYNTHIA', 'AND', 'GRAFTON', 'HAD', 'ONCE', 'BEEN', 'IN', 'LOVE', 'WITH', 'EACH', 'OTHER'] +6432-63723-0011-2502: hyp=['HE', 'REMEMBERED', 'THAT', 'CYNTHIA', 'AND', 'GRAFTON', 'HAD', 'ONCE', 'BEEN', 'IN', 'LOVE', 'WITH', 'EACH', 'OTHER'] +6432-63723-0012-2503: ref=['SHE', 'SAID', 'HE', 'HAD', 'STRUCK', 'HER', 'MORE', 'THAN', 'ONCE', 'AND', 'SHE', 'COULD', 'STAND', 'IT', 'NO', 'LONGER'] +6432-63723-0012-2503: hyp=['SHE', 'SAID', 'HE', 'HAD', 'STRUCK', 'HER', 'MORE', 'THAN', 'ONCE', 'AND', 'SHE', 'COULD', 'STAND', 'IT', 'NO', 'LONGER'] +6432-63723-0013-2504: ref=['BECAUSE', 'LARCH', 'MADE', 'NO', 'DEFENSE'] +6432-63723-0013-2504: hyp=['BECAUSE', 'LARGE', 'MADE', 'NO', 'DEFENCE'] +6432-63723-0014-2505: ref=['LARCH', 'BY', 'REFUSING', 'TO', 'APPEAR', 'PRACTICALLY', 'ADMITTED', 'THE', 'CHARGES', 'AGAINST', 'HIM', 'AND', 'DID', 'NOT', 'OPPOSE', 'THE', 'SEPARATION'] +6432-63723-0014-2505: hyp=['LARGE', 'BY', 'REFUSING', 'TO', 'APPEAR', 'PRACTICALLY', 'ADMITTED', 'THE', 'CHARGES', 'AGAINST', 'HIM', 'AND', 'DID', 'NOT', 'OPPOSE', 'THE', 'SEPARATION'] +6432-63723-0015-2506: ref=['SO', 'I', 'HAD', 'TO', 'LET', 'HER', 'HAVE', 'HER', 'WAY', 'AND', 'WE', 'DID', 'NOT', 'ASK', 'THE', 'COURT', 'FOR', 'MONEY', 'THOUGH', 'I', 'HAD', 'NO', 'SUCH', 'SQUEAMISH', 'FEELINGS', 'WHEN', 'IT', 'CAME', 'TO', 'MY', 'COUNSEL', 'FEE'] +6432-63723-0015-2506: hyp=['SO', 'I', 'HAD', 'TO', 'LET', 'HER', 'HAVE', 'HER', 'WAY', 'AND', 'WE', 'DID', 'NOT', 'ASK', 'THE', 'CORP', 'FOR', 'MONEY', 'THOUGH', 'I', 'HAD', 'NO', 'SUCH', 'SQUEAMISH', 'FEELINGS', 'WHEN', 'IT', 'CAME', 'TO', 'MY', 'COUNSEL', 'FEE'] +6432-63723-0016-2507: ref=['NO', 'BUT', 'HE', 'WILL', 'OR', "I'LL", 'SUE', 'HIM', 'AND', 'GET', 'JUDGMENT', 'OH', "HE'LL", 'PAY', 'ALL', 'RIGHT'] +6432-63723-0016-2507: hyp=['NO', 'BUT', 'HE', 'WILL', 'OR', 'ELSE', 'UM', 'AND', 'GET', 'JUDGMENT', 'OH', "HE'LL", 'PAY', 'ALL', 'RIGHT'] +6432-63723-0017-2508: ref=['AND', 'IT', 'TAKES', 'ALL', 'SORTS', 'OF', 'PERSONS', 'TO', 'MAKE', 'IT', 'UP'] +6432-63723-0017-2508: hyp=['AND', 'IT', 'TAKES', 'ALL', 'SORTS', 'OF', 'PERSONS', 'TO', 'MAKE', 'IT', 'UP'] +6432-63723-0018-2509: ref=['STILL', 'I', 'WOULD', 'LIKE', 'TO', 'KNOW'] +6432-63723-0018-2509: hyp=['STILL', 'I', 'WOULD', 'LIKE', 'TO', 'KNOW'] +6432-63723-0019-2510: ref=['THE', 'MURDER', 'OF', 'MISSUS', 'DARCY', 'HAD', 'SOME', 'TIME', 'AGO', 'BEEN', 'SHIFTED', 'OFF', 'THE', 'FRONT', 'PAGE', 'THOUGH', 'IT', 'WOULD', 'GET', 'BACK', 'THERE', 'WHEN', 'THE', 'YOUNG', 'JEWELER', 'WAS', 'TRIED'] +6432-63723-0019-2510: hyp=['THE', 'MURDER', 'OF', 'MISSUS', 'DARCY', 'HAD', 'SOME', 'TIME', 'AGO', 'BEEN', 'SHIFTED', 'OFF', 'THE', 'FRONT', 'PAGE', 'THOUGH', 'IT', 'WOULD', 'GET', 'BACK', 'THERE', 'WHEN', 'THE', 'YOUNG', 'JEWELER', 'WAS', 'TRIED'] +6432-63723-0020-2511: ref=['IT', 'HAD', 'A', 'DOUBLE', 'REPUTATION', 'SO', 'TO', 'SPEAK'] +6432-63723-0020-2511: hyp=['IT', 'HAD', 'A', 'DOUBLE', 'REPUTATION', 'SO', 'TO', 'SPEAK'] +6432-63723-0021-2512: ref=['GRAVE', 'AND', 'EVEN', 'REVEREND', 'CONVENTIONS', 'ASSEMBLED', 'IN', 'ITS', 'BALLROOM', 'AND', 'POLITICIANS', 'OF', 'THE', 'UPPER', 'IF', 'NOT', 'BETTER', 'CLASS', 'WERE', 'FREQUENTLY', 'SEEN', 'IN', 'ITS', 'DINING', 'ROOM', 'OR', 'CAFE'] +6432-63723-0021-2512: hyp=['GRAVE', 'AND', 'EVEN', 'REVEREND', 'THE', 'CONVENTIONS', 'ASSEMBLED', 'IN', 'ITS', 'BALL', 'ROOM', 'IN', 'POLITICIANS', 'OF', 'THE', 'UPPER', 'IF', 'NOT', 'BETTER', 'CLASS', 'WERE', 'FREQUENTLY', 'SEEN', 'IN', 'ITS', 'DINING', 'ROOM', 'OR', 'CAFE'] +6432-63723-0022-2513: ref=['LARCH', 'HIMSELF', 'WAS', 'A', 'PECULIAR', 'CHARACTER'] +6432-63723-0022-2513: hyp=['LARGE', 'HIMSELF', 'WAS', 'A', 'PECULIAR', 'CHARACTER'] +6432-63723-0023-2514: ref=['IN', 'A', 'SMALLER', 'PLACE', 'HE', 'WOULD', 'HAVE', 'BEEN', 'CALLED', 'A', 'SALOON', 'KEEPER'] +6432-63723-0023-2514: hyp=['IN', 'A', 'SMALLER', 'PLACE', 'HE', 'WOULD', 'HAVE', 'BEEN', 'CALLED', 'A', 'SALOON', 'KEEPER'] +6432-63723-0024-2515: ref=['AND', 'IT', 'WAS', 'THIS', 'MAN', 'RICH', 'IT', 'WAS', 'SAID', 'HANDSOME', 'CERTAINLY', 'THAT', 'CYNTHIA', 'RATCHFORD', 'HAD', 'MARRIED'] +6432-63723-0024-2515: hyp=['AND', 'IT', 'WAS', 'THIS', 'MAN', 'RICH', 'EVER', 'SAID', 'HANDSOME', 'CERTAINLY', 'THAT', 'CYNTHIA', 'RATCHFORD', 'HAD', 'MARRIED'] +6432-63723-0025-2516: ref=['TO', 'THIS', 'WAS', 'THE', 'ANSWER', 'WHISPERED', 'MONEY'] +6432-63723-0025-2516: hyp=['TO', 'THIS', 'WAS', 'THE', 'ANSWER', 'WHISPERED', 'MONEY'] +6432-63723-0026-2517: ref=['AND', 'IN', 'A', 'WAY', 'IT', 'WAS', 'TRUE'] +6432-63723-0026-2517: hyp=['AND', 'IN', 'A', 'WAY', 'IT', 'WAS', 'TRUE'] +6432-63723-0027-2518: ref=['SHE', 'ALSO', 'SAW', 'AN', 'OPPORTUNITY', 'OF', 'PAYING', 'OLD', 'DEBTS', 'AND', 'REAPING', 'SOME', 'REVENGES'] +6432-63723-0027-2518: hyp=['SHE', 'ALSO', 'SAW', 'AN', 'OPPORTUNITY', 'OF', 'PAYING', 'OLD', 'DEBTS', 'AND', 'REAPING', 'SOME', 'REVENGES'] +6432-63723-0028-2519: ref=['AFTER', 'THE', 'MARRIAGE', 'WHICH', 'WAS', 'A', 'BRILLIANT', 'AND', 'GAY', 'ONE', 'IF', 'NOT', 'HAPPY', 'THE', 'LARCH', 'HOTEL', 'IT', 'COULD', 'HARDLY', 'BE', 'CALLED', 'A', 'HOME', 'BECAME', 'THE', 'SCENE', 'OF', 'MANY', 'FESTIVE', 'OCCASIONS'] +6432-63723-0028-2519: hyp=['AFTER', 'THE', 'MARRIAGE', 'WHICH', 'WAS', 'A', 'BRILLIANT', 'AND', 'GAY', 'ONE', 'IF', 'NOT', 'HAPPY', 'THE', 'LARGE', 'HOTEL', 'IT', 'COULD', 'HARDLY', 'BE', 'CALLED', 'A', 'HOME', 'BECAME', 'THE', 'SCENE', 'OF', 'MANY', 'FESTIVE', 'OCCASIONS'] +6432-63723-0029-2520: ref=['THEN', 'IT', 'WAS', 'SAID', 'OF', 'LARCH', 'THAT', 'SOON', 'AFTER', 'THE', 'ECHOES', 'OF', 'THE', 'WEDDING', 'CHIMES', 'HAD', 'DIED', 'AWAY', 'HE', 'HAD', 'BEGUN', 'TO', 'TREAT', 'HIS', 'WIFE', 'WITH', 'REFINED', 'CRUELTY', 'THAT', 'HIDDEN', 'AWAY', 'FROM', 'THE', 'PUBLIC', 'UNDERNEATH', 'HIS', 'HABITUAL', 'MANNER', 'THERE', 'WAS', 'THE', 'RAWNESS', 'OF', 'THE', 'BRUTE'] +6432-63723-0029-2520: hyp=['THEN', 'IT', 'WAS', 'SAID', 'OF', 'LARGE', 'THAT', 'SOON', 'AFTER', 'THE', 'ECHOES', 'OF', 'THE', 'WEDDING', 'CHIMES', 'HAD', 'DIED', 'AWAY', 'HE', 'HAD', 'BEGUN', 'TO', 'TREAT', 'HIS', 'WIFE', 'WITH', 'A', 'REFINED', 'CRUELTY', 'THAT', 'HIDDEN', 'AWAY', 'FROM', 'THE', 'PUBLIC', 'UNDERNEATH', 'HIS', 'HABITUAL', 'MANNER', 'THERE', 'WAS', 'THE', 'RAWNESS', 'OF', 'THE', 'BRUTE'] +6432-63723-0030-2521: ref=['BUT', 'IT', 'WAS', 'NOTICED', 'THAT', 'THE', 'OLDER', 'AND', 'MORE', 'CONSERVATIVE', 'FAMILIES', 'WERE', 'LESS', 'OFTEN', 'REPRESENTED', 'AND', 'WHEN', 'THEY', 'WERE', 'IT', 'WAS', 'BY', 'SOME', 'OF', 'THE', 'YOUNGER', 'MEMBERS', 'WHOSE', 'REPUTATIONS', 'WERE', 'ALREADY', 'SMIRCHED', 'OR', 'WHO', 'HAD', 'NOT', 'YET', 'ACQUIRED', 'ANY', 'AND', 'WERE', 'WILLING', 'TO', 'TAKE', 'A', 'CHANCE'] +6432-63723-0030-2521: hyp=['BUT', 'IT', 'WAS', 'NOTICED', 'THAT', 'THE', 'OLDER', 'AND', 'MORE', 'CONSERVATIVE', 'FAMILIES', 'WERE', 'LESS', 'OFTEN', 'REPRESENTED', 'AND', 'WHEN', 'THEY', 'WERE', 'IT', 'WAS', 'BY', 'SOME', 'OF', 'THE', 'YOUNGER', 'MEMBERS', 'WHOSE', 'REPUTATIONS', 'WERE', 'ALREADY', 'SMARGED', 'OR', 'WHO', 'HAD', 'NOT', 'YET', 'ACQUIRED', 'ANY', 'AND', 'WERE', 'WILLING', 'TO', 'TAKE', 'A', 'CHANCE'] +6432-63723-0031-2522: ref=['IT', "WOULDN'T", 'DO', 'YOU', 'KNOW', 'AFTER', 'THAT', 'STORY', 'CAME', 'OUT', 'FOR', 'ME', 'AND', 'THE', 'VICE', 'CHANCELLOR', 'WHO', 'SAT', 'IN', 'THE', 'CASE', 'AS', 'WELL', 'AS', 'OTHER', 'JUDGES', 'AND', 'MEMBERS', 'OF', 'THE', 'BAR', 'TO', 'BE', 'SEEN', 'THERE', 'KENNETH', 'EXPLAINED', 'TO', 'THE', 'COLONEL'] +6432-63723-0031-2522: hyp=['IT', "WOULDN'T", 'DO', 'YOU', 'KNOW', 'AFTER', 'THAT', 'STORY', 'CAME', 'OUT', 'FOR', 'ME', 'AND', 'THE', 'VICE', 'CHANCELLOR', 'WHO', 'SAT', 'IN', 'A', 'CASE', 'AS', 'WELL', 'AS', 'OTHER', 'JUDGES', 'AND', 'MEMBERS', 'OF', 'THE', 'BAR', 'TO', 'BE', 'SEEN', 'THERE', 'KENNETH', 'EXPLAINED', 'TO', 'THE', 'COLONEL'] +6432-63723-0032-2523: ref=['MEANWHILE', 'COLONEL', 'ASHLEY', 'WAS', 'A', 'VERY', 'BUSY', 'MAN', 'AND', 'TO', 'NO', 'ONE', 'DID', 'HE', 'TELL', 'VERY', 'MUCH', 'ABOUT', 'HIS', 'ACTIVITIES', 'HE', 'SAW', 'DARCY', 'FREQUENTLY', 'AT', 'THE', 'JAIL', 'AND', 'TO', 'THAT', 'YOUNG', "MAN'S", 'PLEADINGS', 'THAT', 'SOMETHING', 'BE', 'DONE', 'ALWAYS', 'RETURNED', 'THE', 'ANSWER'] +6432-63723-0032-2523: hyp=['MEANWHILE', 'COLONEL', 'ASHLEY', 'WAS', 'A', 'VERY', 'BUSY', 'MAN', 'AND', 'TO', 'NO', 'ONE', 'DID', 'HE', 'TELL', 'VERY', 'MUCH', 'ABOUT', 'HIS', 'ACTIVITIES', 'HE', 'SAW', 'DARCY', 'FREQUENTLY', 'AT', 'THE', 'JAIL', 'AND', 'TO', 'THAT', 'YOUNG', "MAN'S", 'PLEADINGS', 'THAT', 'SOMETHING', 'TO', 'BE', 'DONE', 'ALWAYS', 'RETURNED', 'THE', 'ANSWER'] +6432-63723-0033-2524: ref=["DON'T", 'WORRY', 'IT', 'WILL', 'COME', 'OUT', 'ALL', 'RIGHT'] +6432-63723-0033-2524: hyp=["DON'T", 'WORRY', 'IT', 'WILL', 'COME', 'OUT', 'ALL', 'RIGHT'] +6432-63723-0034-2525: ref=["I'M", 'GOING', 'TO', 'RECTIFY', 'THEM', 'BUT', 'IT', 'WILL', 'TAKE', 'TIME'] +6432-63723-0034-2525: hyp=["I'M", 'GOING', 'DIRECTIFY', 'THEM', 'BUT', 'I', 'WILL', 'TAKE', 'TIME'] +6432-63723-0035-2526: ref=["IT'S", 'HARD', 'FOR', 'MISS', 'MASON', 'TOO', 'ALTHOUGH', "SHE'S", 'BEARING', 'UP', 'LIKE', 'A', 'MAJOR'] +6432-63723-0035-2526: hyp=['HIS', 'HARD', 'FOR', 'MISS', 'MASON', 'TOO', 'ALTHOUGH', "SHE'S", 'BEARING', 'UP', 'LIKE', 'A', 'MAJOR'] +6432-63723-0036-2527: ref=['SO', 'KING', 'GOT', 'BAIL', 'WHO', 'PUT', 'IT', 'UP'] +6432-63723-0036-2527: hyp=['SO', 'KING', 'GOD', 'BAIL', 'WHO', 'PUT', 'IT'] +6432-63723-0037-2528: ref=['IT', 'WAS', 'HIGH', 'LARCH'] +6432-63723-0037-2528: hyp=['IT', 'WAS', 'I', 'LARCH'] +6432-63723-0038-2529: ref=['THEY', 'TOOK', 'HARRY', 'AWAY', 'A', 'WHILE', 'AGO'] +6432-63723-0038-2529: hyp=['THEY', 'TOOK', 'HARRY', 'AWAY', 'A', 'WHILE', 'AGO'] +6432-63723-0039-2530: ref=['BUT', 'HIS', 'ARE', 'PRETTY', 'UNCERTAIN', 'SHOES', 'TO', 'BE', 'IN', 'JUST', 'THE', 'SAME'] +6432-63723-0039-2530: hyp=['BUT', 'HIS', 'ARE', 'PRETTY', 'UNCERTAIN', 'SHOES', 'TO', 'BE', 'IN', 'JUST', 'THE', 'SAME'] +6432-63723-0040-2531: ref=['ONLY', 'THAT', 'I', 'DARCY', 'HESITATED', 'AND', 'GREW', 'RED'] +6432-63723-0040-2531: hyp=['ONLY', 'THAT', 'I', 'DARCY', 'HESITATED', 'AND', 'GREW', 'RED'] +6432-63723-0041-2532: ref=['GOOD', 'EVENING', 'COLONEL', 'HE', 'CALLED', 'GENIALLY', 'WILL', 'YOU', 'JOIN', 'ME', 'IN', 'A', 'WELSH', 'RABBIT'] +6432-63723-0041-2532: hyp=['GOOD', 'EVENING', 'COLONEL', 'HE', 'CALLED', 'GENIALLY', 'WILL', 'YOU', 'JOIN', 'ME', 'IN', 'A', 'WELSH', 'RABBIT'] +6432-63723-0042-2533: ref=['THANK', 'YOU', 'NO'] +6432-63723-0042-2533: hyp=['THANK', 'YOU', 'NO'] +6432-63723-0043-2534: ref=["I'M", 'AFRAID', 'MY', 'DIGESTION', "ISN'T", 'QUITE', 'UP', 'TO', 'THAT', 'AS', "I'VE", 'HAD', 'TO', 'CUT', 'OUT', 'MY', 'FISHING', 'OF', 'LATE'] +6432-63723-0043-2534: hyp=["I'M", 'AFRAID', 'MY', 'DIRECTION', "ISN'T", 'QUITE', 'UP', 'TO', 'THAT', 'AS', "I'VE", 'HAD', 'TO', 'CUT', 'OUT', 'MY', 'FISHING', 'OF', 'LATE'] +6432-63723-0044-2535: ref=['NOW', 'AS', 'TO', 'CERTAIN', 'MATTERS', 'IN', 'THE', 'STORE', 'ON', 'THE', 'MORNING', 'OF', 'THE', 'MURDER'] +6432-63723-0044-2535: hyp=['NOW', 'AS', 'TO', 'CERTAIN', 'MATTERS', 'IN', 'THE', 'STORE', 'ON', 'THE', 'MORNING', 'OF', 'THE', 'MURDER'] +6432-63723-0045-2536: ref=['THE', 'STOPPED', 'CLOCKS', 'FOR', 'INSTANCE', 'HAVE', 'YOU', 'ANY', 'THEORY'] +6432-63723-0045-2536: hyp=['THEY', 'STOPPED', 'CLUXED', 'FOR', 'INSTANCE', 'HAVE', 'YOU', 'ANY', 'THEORY'] +6432-63723-0046-2537: ref=['THERE', 'WERE', 'THREE', 'OF', 'THEM', 'THE', 'CENTER', 'FIGURE', 'BEING', 'THAT', 'OF', 'HARRY', 'KING', 'AND', 'HE', 'WAS', 'VERY', 'MUCH', 'INTOXICATED'] +6432-63723-0046-2537: hyp=['THERE', 'WERE', 'THREE', 'OF', 'THEM', 'THE', 'CENTER', 'FIGURE', 'BEING', 'THAT', 'OF', 'HARRY', 'KING', 'AND', 'HE', 'WAS', 'VERY', 'MUCH', 'INTOXICATED'] +6432-63723-0047-2538: ref=['THAT', 'IS', 'NOT', 'ALWAYS', 'BUT', 'SOMETIMES', 'IT', 'HAPPENED', 'TO', 'BE', 'SO', 'NOW'] +6432-63723-0047-2538: hyp=['THAT', 'IS', 'NOT', 'ALWAYS', 'BUT', 'SOMETIMES', 'IT', 'HAPPENED', 'TO', 'BE', 'SO', 'NOW'] +6432-63723-0048-2539: ref=['I', 'BEG', 'YOUR', 'PARDON', 'HE', 'SAID', 'IN', 'THE', 'CULTURED', 'TONES', 'HE', 'KNEW', 'SO', 'WELL', 'HOW', 'TO', 'USE', 'YET', 'OF', 'WHICH', 'HE', 'MADE', 'SO', 'LITTLE', 'USE', 'OF', 'LATE'] +6432-63723-0048-2539: hyp=['I', 'BEG', 'YOUR', 'PARDON', 'HE', 'SAID', 'IN', 'THE', 'CULTURED', 'TONES', 'HE', 'KNEW', 'SO', 'WELL', 'HOW', 'TO', 'USE', 'YET', 'OF', 'WHICH', 'HE', 'MADE', 'SO', 'LITTLE', 'USE', 'OF', 'LATE'] +6432-63723-0049-2540: ref=['I', 'SAID', 'WHERE', 'HAVE', 'YOU', 'BEEN', 'REMARKED', 'THE', 'OTHER', "WE'VE", 'MISSED', 'YOU'] +6432-63723-0049-2540: hyp=['I', 'SAID', 'WHERE', 'HAVE', 'YOU', 'BEEN', 'REMARKED', 'THE', 'OTHER', "WE'VE", 'MISSED', 'YOU'] +6432-63723-0050-2541: ref=['I', 'SAID', 'I', 'WAS', 'GOLFING', 'HE', 'WENT', 'ON', 'EXCEEDINGLY', 'DISTINCTLY', 'THOUGH', 'WITH', 'AN', 'EFFORT'] +6432-63723-0050-2541: hyp=['I', 'SAID', 'I', 'WAS', 'GOLFING', 'HE', 'WENT', 'ON', 'EXCEEDINGLY', 'DISTINCTLY', 'THOUGH', 'WITH', 'AN', 'EFFORT'] +6432-63723-0051-2542: ref=['WHY', 'POLONIUS', 'SOME', 'ONE', 'ASKED'] +6432-63723-0051-2542: hyp=['WHY', 'BONIUS', 'SOME', 'ONE', 'ASKED'] +6432-63723-0052-2543: ref=['BECAUSE', 'DEAR', 'FRIEND', 'REPLIED', 'KING', 'SOFTLY', 'HE', 'SOMEWHAT', 'RESEMBLES', 'A', 'CERTAIN', 'PERSON', 'HERE', 'WHO', 'TALKS', 'TOO', 'MUCH', 'BUT', 'WHO', 'IS', 'NOT', 'SO', 'WISE', 'AS', 'HE', 'THINKS'] +6432-63723-0052-2543: hyp=['BECAUSE', 'DEAR', 'FRIEND', 'REPLIED', 'KING', 'SOFTLY', 'HE', 'SOMEWHAT', 'RESEMBLES', 'A', 'CERTAIN', 'PERSON', 'HERE', 'WHO', 'TALKS', 'TOO', 'MUCH', 'BUT', 'WHO', 'IS', 'NOT', 'SO', 'WISE', 'AS', 'HE', 'THINKS'] +6432-63723-0053-2544: ref=['THERE', 'WAS', 'A', 'RATTLE', 'OF', 'COINS', 'ON', 'THE', 'MAHOGANY', 'BAR', 'AS', 'KING', 'SOUGHT', 'TO', 'DISENTANGLE', 'A', 'SINGLE', 'BILL', 'FROM', 'THE', 'WADDED', 'UP', 'CURRENCY', 'IN', 'HIS', 'POCKET'] +6432-63723-0053-2544: hyp=['THERE', 'WAS', 'A', 'RATTLE', 'OF', 'COIN', 'DOWN', 'THE', 'MAHOGANY', 'BAR', 'AS', 'KING', 'SOUGHT', 'TO', 'DISENTANGLE', 'A', 'SINGLE', 'BILL', 'FROM', 'THE', 'WATERED', 'UP', 'CURRENCY', 'IN', 'HIS', 'POCKET'] +6432-63723-0054-2545: ref=["IT'S", "IT'S", 'AN', 'ODD', 'COIN', 'AN', 'OLD', 'ROMAN', 'ONE', 'THAT', 'MISSUS', 'DARCY', 'HAD', 'IN', 'HER', 'PRIVATE', 'COLLECTION', 'KEPT', 'IN', 'THE', 'JEWELRY', 'STORE', 'SAFE', 'WAS', 'THE', 'WHISPERED', 'ANSWER'] +6432-63723-0054-2545: hyp=["IT'S", 'AN', 'ODD', 'COIN', 'AN', 'OLD', 'ROMAN', 'ONE', 'THAT', 'MISSUS', 'DARCY', 'HAD', 'IN', 'HER', 'PRIVATE', 'COLLECTION', 'KEPT', 'IN', 'THE', 'JEWELRY', 'STORE', 'SAFE', 'WAS', 'THE', 'WHISPERED', 'ANSWER'] +6432-63723-0055-2546: ref=['I', 'WENT', 'OVER', 'THEM', 'THE', 'OTHER', 'DAY', 'AND', 'NOTICED', 'SOME', 'WERE', 'MISSING', 'THOUGH', 'I', 'SAW', 'THEM', 'ALL', 'WHEN', 'I', 'PAID', 'A', 'VISIT', 'TO', 'HER', 'JUST', 'A', 'SHORT', 'TIME', 'BEFORE', 'SHE', 'WAS', 'KILLED'] +6432-63723-0055-2546: hyp=['I', 'WENT', 'OVER', 'THEM', 'NEAR', 'THE', 'DAY', 'AND', 'NOTICED', 'SOME', 'WERE', 'MISSING', 'THOUGH', 'I', 'SAW', 'THEM', 'ALL', 'WHEN', 'I', 'PAID', 'A', 'VISIT', 'TO', 'HER', 'JUST', 'A', 'SHORT', 'TIME', 'BEFORE', 'SHE', 'WAS', 'KILLED'] +6432-63723-0056-2547: ref=['THAT', 'WAS', 'HERS', 'WENT', 'ON', 'THE', 'JEWELER'] +6432-63723-0056-2547: hyp=['THAT', 'WAS', 'HERS', 'WENT', 'ON', 'THE', 'JEWELER'] +6432-63723-0057-2548: ref=['NOW', 'HARRY', 'KING', 'HAS', 'IT', 'EXCLAIMED', 'COLONEL', 'ASHLEY'] +6432-63723-0057-2548: hyp=['NOW', 'HARRY', 'KING', 'HAS', 'IT', 'EXCLAIMED', 'COLONEL', 'ASHLEY'] +6938-70848-0000-1216: ref=['EVEN', 'THE', 'SUN', 'CAME', 'OUT', 'PALE', 'AND', 'WATERY', 'AT', 'NOON'] +6938-70848-0000-1216: hyp=['EVEN', 'THE', 'SUN', 'CAME', 'OUT', 'PALE', 'AND', 'WATERY', 'AT', 'NOON'] +6938-70848-0001-1217: ref=['THE', 'COLDS', 'AND', 'RHEUMATISM', 'OF', 'THE', 'RAINY', 'MONTHS', 'VANISHED'] +6938-70848-0001-1217: hyp=['THE', 'GOLDS', 'AND', 'RHEUMATISM', 'OF', 'THE', 'RAINY', 'MONTHS', 'VANISHED'] +6938-70848-0002-1218: ref=['ASKED', 'A', 'WORKER', 'LAST', 'SUNDAY', 'YOU', 'DID', 'IT', 'WHEN', 'THE', 'YUNKERS'] +6938-70848-0002-1218: hyp=['AS', 'TO', 'WORKER', 'LAST', 'SUNDAY', 'YOU', 'DID', 'IT', 'WHEN', 'THE', 'YUNKERS'] +6938-70848-0003-1219: ref=['WELL', "DIDN'T", 'THEY', 'SHOOT', 'US', 'ONE', 'MAN', 'EXHIBITED', 'HIS', 'ARM', 'IN', 'A', 'SLING'] +6938-70848-0003-1219: hyp=['WELL', "DIDN'T", 'THEY', 'SHOOT', 'US', 'ONE', 'MAN', 'EXHIBITED', 'HIS', 'ARM', 'IN', 'A', 'SLING'] +6938-70848-0004-1220: ref=["HAVEN'T", 'I', 'GOT', 'SOMETHING', 'TO', 'REMEMBER', 'THEM', 'BY', 'THE', 'DEVILS'] +6938-70848-0004-1220: hyp=["HAVEN'T", 'I', 'GOT', 'SOMETHING', 'TO', 'REMEMBER', 'THEM', 'BY', 'THE', 'DEVILS'] +6938-70848-0005-1221: ref=['WHO', 'ARE', 'YOU', 'TO', 'DESTROY', 'THE', 'LEGAL', 'GOVERNMENT', 'WHO', 'IS', 'LENIN', 'A', 'GERMAN'] +6938-70848-0005-1221: hyp=['WHO', 'ARE', 'YOU', 'TO', 'DESTROY', 'THE', 'LEGAL', 'GOVERNMENT', 'WITH', 'LANY', 'A', 'GERMAN'] +6938-70848-0006-1222: ref=['WHO', 'ARE', 'YOU', 'A', 'COUNTER', 'REVOLUTIONIST', 'A', 'PROVOCATOR', 'THEY', 'BELLOWED', 'AT', 'HIM'] +6938-70848-0006-1222: hyp=['WHO', 'ARE', 'YOU', 'A', 'COUNTER', 'REVOLITIONIST', 'APPROPATOR', 'THEY', 'BELOVED', 'AT', 'HIM'] +6938-70848-0007-1223: ref=['YOU', 'CALL', 'YOURSELVES', 'THE', 'PEOPLE', 'OF', 'RUSSIA', 'BUT', "YOU'RE", 'NOT', 'THE', 'PEOPLE', 'OF', 'RUSSIA'] +6938-70848-0007-1223: hyp=['YOU', 'CALL', 'YOURSELVES', 'THE', 'PEOPLE', 'OF', 'A', 'SHEPHERD', 'YOU', 'ARE', 'NOT', 'THE', 'PEOPLE', 'OF', 'RUSSIA'] +6938-70848-0008-1224: ref=['THE', 'PEASANTS', 'ARE', 'THE', 'PEOPLE', 'OF', 'RUSSIA', 'WAIT', 'UNTIL', 'THE', 'PEASANTS'] +6938-70848-0008-1224: hyp=['TO', 'PIECE', 'AND', 'OTHER', 'PEOPLE', 'OF', 'RUSSIA', 'WAIT', 'UNTIL', 'THE', 'PEASANTS'] +6938-70848-0009-1225: ref=['WE', 'KNOW', 'WHAT', 'THE', 'PEASANTS', 'WILL', 'SAY', "AREN'T", 'THEY', 'WORKINGMEN', 'LIKE', 'OURSELVES'] +6938-70848-0009-1225: hyp=['WE', 'KNOW', 'WHAT', 'THE', 'PEASANTS', 'WILL', 'SAY', "AREN'T", 'THEY', 'WORKING', 'MEN', 'LIKE', 'OURSELVES'] +6938-70848-0010-1226: ref=['THESE', 'MEN', 'ESPECIALLY', 'WELCOMED', 'THE', 'CALL', 'TO', 'A', 'CONGRESS', 'OF', 'PEASANTS'] +6938-70848-0010-1226: hyp=['THIS', 'MAN', 'HAS', 'SPECIALLY', 'WELCOMED', 'THE', 'CALL', 'TO', 'A', 'CONGRESS', 'OF', 'PEASANTS'] +6938-70848-0011-1227: ref=['THESE', 'LAST', 'WERE', 'THE', 'YOUNG', 'GENERATION', 'WHO', 'HAD', 'BEEN', 'SERVING', 'IN', 'THE', 'ARMY'] +6938-70848-0011-1227: hyp=['THIS', 'LAST', 'WHERE', 'THE', 'YOUNG', 'GENERATION', 'WHO', 'HAD', 'BEEN', 'SERVING', 'IN', 'THE', 'ARMY'] +6938-70848-0012-1228: ref=['WHEREUPON', 'THE', 'OLD', 'EXECUTIVE', 'COMMITTEE', 'LEFT', 'THE', 'HALL'] +6938-70848-0012-1228: hyp=['WHEREUPON', 'THE', 'OLD', 'EXECUTED', 'COMMITTEE', 'LEFT', 'THE', 'HALL'] +6938-70848-0013-1229: ref=['DOWN', 'WITH', 'HIM', 'THEY', 'SHRIEKED'] +6938-70848-0013-1229: hyp=['DOWN', 'WITH', 'HIM', 'THEY', 'SHRIEKED'] +6938-70848-0014-1230: ref=['FEARFUL', 'TUMULT', 'CRIES', 'DOWN', 'WITH', 'THE', 'BOLSHEVIKI'] +6938-70848-0014-1230: hyp=['FEARFUL', 'TUMULT', 'QUITE', 'CHEVIKI'] +6938-70848-0015-1231: ref=['UPON', 'MY', 'RETURN', 'I', 'VISITED', 'SMOLNY', 'NO', 'SUCH', 'ACCUSATION', 'WAS', 'MADE', 'AGAINST', 'ME', 'THERE', 'AFTER', 'A', 'BRIEF', 'CONVERSATION', 'I', 'LEFT', 'AND', "THAT'S", 'ALL', 'LET', 'ANY', 'ONE', 'PRESENT', 'MAKE', 'SUCH', 'AN', 'ACCUSATION'] +6938-70848-0015-1231: hyp=['UPON', 'MY', 'RETURN', 'I', 'VISITED', 'MORLEY', 'NO', 'SUCH', 'ACCUSATION', 'WAS', 'MADE', 'AGAINST', 'ME', 'THERE', 'AFTER', 'A', 'BRIEF', 'CONVERSATION', 'I', 'LEFT', 'AND', 'THAT', 'SOUL', 'LET', 'ANYONE', 'PRESENT', 'MAKE', 'SUCH', 'AN', 'ACCUSATION'] +6938-70848-0016-1232: ref=['MEANWHILE', 'THE', 'QUESTION', 'OF', 'THE', 'STATUS', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE', 'WAS', 'AGITATING', 'ALL', 'MINDS'] +6938-70848-0016-1232: hyp=['MEANWHILE', 'THE', 'QUESTION', 'OF', 'THE', 'STRATAS', 'OF', 'THE', 'EXECUTED', 'COMMITTEE', 'WAS', 'AGITATING', 'ALL', 'MINDS'] +6938-70848-0017-1233: ref=['BY', 'DECLARING', 'THE', 'ASSEMBLY', 'EXTRAORDINARY', 'CONFERENCE', 'IT', 'HAD', 'BEEN', 'PLANNED', 'TO', 'BLOCK', 'THE', 'REELECTION', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE'] +6938-70848-0017-1233: hyp=['BY', 'DECLINING', 'THEIR', 'ASSEMBLY', 'EXTRAORDINARY', 'CONFERENCE', 'IT', 'HAD', 'BEEN', 'PLANNED', 'TO', 'PLUCK', 'THE', 'RE', 'ELECTION', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE'] +6938-70848-0018-1234: ref=['BUT', 'THIS', 'WORKED', 'BOTH', 'WAYS', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONISTS', 'DECIDED', 'THAT', 'IF', 'THE', 'CONGRESS', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'EXECUTIVE', 'COMMITTEE', 'THEN', 'THE', 'EXECUTIVE', 'COMMITTEE', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'CONGRESS'] +6938-70848-0018-1234: hyp=['BUT', 'THIS', 'WORTH', 'BOTH', 'WAYS', 'THE', 'LAST', 'SOCIALLY', 'REVOLUTION', 'IS', 'DECIDED', 'THAT', 'IF', 'THE', 'CONGRESS', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'EXECUTIVE', 'COMMITTEE', 'THEN', 'THE', 'EXECUTIVE', 'COMMITTEE', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'CONGRESS'] +6938-70848-0019-1235: ref=['ON', 'THE', 'TWENTY', 'SEVENTH', 'OCCURRED', 'THE', 'DEBATE', 'ON', 'THE', 'LAND', 'QUESTION', 'WHICH', 'REVEALED', 'THE', 'DIFFERENCES', 'BETWEEN', 'THE', 'AGRARIAN', 'PROGRAMME', 'OF', 'THE', 'BOLSHEVIKI', 'AND', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONARIES'] +6938-70848-0019-1235: hyp=['ON', 'THE', 'TWENTY', 'SEVENTH', 'OCCURRED', 'THE', 'DEBATE', 'ON', 'THE', 'LAND', 'QUESTION', 'WHICH', 'REVEALED', 'THE', 'DIFFERENCES', 'BETWEEN', 'THE', 'AGRIAN', 'PROGRAMME', 'OF', 'THE', 'BOLSHEVIKI', 'AND', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONARIES'] +6938-70848-0020-1236: ref=['THE', 'CONSTITUENT', 'ASSEMBLY', 'WILL', 'NOT', 'DARE', 'TO', 'BREAK', 'WITH', 'THE', 'WILL', 'OF', 'THE', 'PEOPLE'] +6938-70848-0020-1236: hyp=['THE', 'CONSTITUTE', 'ASSEMBLY', 'WILL', 'NOT', 'DARE', 'TO', 'BREAK', 'WITH', 'THE', 'WILL', 'OF', 'THE', 'PEOPLE'] +6938-70848-0021-1237: ref=['FOLLOWED', 'HIM', 'LENIN', 'LISTENED', 'TO', 'NOW', 'WITH', 'ABSORBING', 'INTENSITY'] +6938-70848-0021-1237: hyp=['FOLLOWED', 'HIM', 'LENIN', 'LISTENED', 'TO', 'NOW', 'WITH', 'ABSORBING', 'INTENSITY'] +6938-70848-0022-1238: ref=['THE', 'FIRST', 'STAGE', 'WAS', 'THE', 'CRUSHING', 'OF', 'AUTOCRACY', 'AND', 'THE', 'CRUSHING', 'OF', 'THE', 'POWER', 'OF', 'THE', 'INDUSTRIAL', 'CAPITALISTS', 'AND', 'LAND', 'OWNERS', 'WHOSE', 'INTERESTS', 'ARE', 'CLOSELY', 'RELATED'] +6938-70848-0022-1238: hyp=['THE', 'FIRST', 'STAGE', 'WAS', 'A', 'CRUSHING', 'OF', 'AUTOCRACY', 'AND', 'THE', 'CRASHING', 'OF', 'THE', 'POWER', 'OF', 'THE', 'INDUSTRIAL', 'CAPITALIST', 'AND', 'THE', 'LANDOWNERS', 'WHOSE', 'INTERESTS', 'ARE', 'CLOSELY', 'RELATED'] +6938-70848-0023-1239: ref=['THE', 'DUMAS', 'AND', 'ZEMSTVOS', 'WERE', 'DROPPED'] +6938-70848-0023-1239: hyp=['DID', 'YOU', 'ME', 'SEND', 'THEMSELVES', 'WERE', 'DROPPED'] +6938-70848-0024-1240: ref=['HE', 'KNEW', 'THAT', 'AN', 'AGREEMENT', 'WITH', 'THE', 'BOLSHEVIKI', 'WAS', 'BEING', 'DISCUSSED', 'BUT', 'HE', 'DID', 'NOT', 'KNOW', 'THAT', 'IT', 'HAD', 'BEEN', 'CONCLUDED'] +6938-70848-0024-1240: hyp=['HE', 'KNEW', 'THAT', 'AN', 'AGREEMENT', 'WITH', 'THE', 'BOLSHEVIKI', 'WAS', 'BEING', 'DISCUSSED', 'BUT', 'HE', 'DID', 'NOT', 'KNOW', 'THAT', 'IT', 'HAD', 'BEEN', 'CONCLUDED'] +6938-70848-0025-1241: ref=['HE', 'SPOKE', 'TO', 'THE', 'RUMP', 'CONVENTION'] +6938-70848-0025-1241: hyp=['HE', 'SPOKE', 'TO', 'THE', 'WRONG', 'CONVENTION'] +6938-70848-0026-1242: ref=['THE', 'VILLAGES', 'WILL', 'SAVE', 'US', 'IN', 'THE', 'END'] +6938-70848-0026-1242: hyp=['THE', 'RELIGIOUS', 'WILL', 'SAVE', 'US', 'IN', 'THE', 'END'] +6938-70848-0027-1243: ref=['BUT', 'THE', 'PRESENT', 'MOVEMENT', 'IS', 'INTERNATIONAL', 'AND', 'THAT', 'IS', 'WHY', 'IT', 'IS', 'INVINCIBLE'] +6938-70848-0027-1243: hyp=['BUT', 'THE', 'PRESENT', 'MOMENT', 'IS', 'INTERNATIONAL', 'AND', 'THAT', 'IS', 'WHY', 'IT', 'IS', 'INVINCIBLE'] +6938-70848-0028-1244: ref=['THE', 'WILL', 'OF', 'MILLIONS', 'OF', 'WORKERS', 'IS', 'NOW', 'CONCENTRATED', 'IN', 'THIS', 'HALL'] +6938-70848-0028-1244: hyp=['THE', 'WIDOW', 'OF', 'MILLIONS', 'OF', 'WORKERS', 'IS', 'SO', 'CONCENTRATED', 'IN', 'THE', 'HALL'] +6938-70848-0029-1245: ref=['A', 'NEW', 'HUMANITY', 'WILL', 'BE', 'BORN', 'OF', 'THIS', 'WAR'] +6938-70848-0029-1245: hyp=['A', 'NEW', 'HUMANITY', 'WILL', 'BE', 'BORN', 'OF', 'THIS', 'WAR'] +6938-70848-0030-1246: ref=['I', 'GREET', 'YOU', 'WITH', 'THE', 'CHRISTENING', 'OF', 'A', 'NEW', 'RUSSIAN', 'LIFE', 'AND', 'FREEDOM'] +6938-70848-0030-1246: hyp=['I', 'GREET', 'YOU', 'WITH', 'THE', 'CHRISTIANNING', 'OF', 'A', 'NEW', 'RUSSIAN', 'LIFE', 'AND', 'FREEDOM'] +7018-75788-0000-135: ref=['THEN', 'I', 'TOOK', 'UP', 'A', 'GREAT', 'STONE', 'FROM', 'AMONG', 'THE', 'TREES', 'AND', 'COMING', 'UP', 'TO', 'HIM', 'SMOTE', 'HIM', 'THEREWITH', 'ON', 'THE', 'HEAD', 'WITH', 'ALL', 'MY', 'MIGHT', 'AND', 'CRUSHED', 'IN', 'HIS', 'SKULL', 'AS', 'HE', 'LAY', 'DEAD', 'DRUNK'] +7018-75788-0000-135: hyp=['THEN', 'I', 'TOOK', 'UP', 'A', 'GREAT', 'STONE', 'FROM', 'AMONG', 'THE', 'TREES', 'AND', 'COMING', 'UP', 'TO', 'HIM', 'SMOTE', 'HIM', 'THEREWITH', 'ON', 'THE', 'HEAD', 'WITH', 'ALL', 'MY', 'MIGHT', 'AND', 'CRUSHED', 'IN', 'HIS', 'SKULL', 'AS', 'HE', 'LAY', 'DEAD', 'DRUNK'] +7018-75788-0001-136: ref=['BEHOLD', 'A', 'SHIP', 'WAS', 'MAKING', 'FOR', 'THE', 'ISLAND', 'THROUGH', 'THE', 'DASHING', 'SEA', 'AND', 'CLASHING', 'WAVES'] +7018-75788-0001-136: hyp=['BEHOLD', 'A', 'SHIP', 'WAS', 'MAKING', 'FOR', 'THE', 'ISLAND', 'THROUGH', 'THE', 'DASHING', 'SEA', 'AND', 'CLASHING', 'WAVES'] +7018-75788-0002-137: ref=['HEARING', 'THIS', 'I', 'WAS', 'SORE', 'TROUBLED', 'REMEMBERING', 'WHAT', 'I', 'HAD', 'BEFORE', 'SUFFERED', 'FROM', 'THE', 'APE', 'KIND'] +7018-75788-0002-137: hyp=['HEARING', 'THIS', 'I', 'WAS', 'SORE', 'TROUBLED', 'REMEMBERING', 'WHAT', 'I', 'HAD', 'BEFORE', 'SUFFERED', 'FROM', 'THE', 'APE', 'KIND'] +7018-75788-0003-138: ref=['UPON', 'THIS', 'HE', 'BROUGHT', 'ME', 'A', 'COTTON', 'BAG', 'AND', 'GIVING', 'IT', 'TO', 'ME', 'SAID', 'TAKE', 'THIS', 'BAG', 'AND', 'FILL', 'IT', 'WITH', 'PEBBLES', 'FROM', 'THE', 'BEACH', 'AND', 'GO', 'FORTH', 'WITH', 'A', 'COMPANY', 'OF', 'THE', 'TOWNSFOLK', 'TO', 'WHOM', 'I', 'WILL', 'GIVE', 'A', 'CHARGE', 'RESPECTING', 'THEE'] +7018-75788-0003-138: hyp=['UPON', 'THIS', 'HE', 'BROUGHT', 'ME', 'A', 'COT', 'AND', 'BAG', 'AND', 'GIVEN', 'IT', 'TO', 'ME', 'SAID', 'TAKE', 'THIS', 'BAG', 'AND', 'FILL', 'IT', 'WITH', 'PEBBLES', 'FROM', 'THE', 'BEACH', 'AND', 'GO', 'FORTH', 'WITH', 'A', 'COMPANY', 'OF', 'THE', 'TOWNSFOLK', 'TO', 'WHOM', 'I', 'WILL', 'GIVE', 'A', 'CHARGE', 'RESPECTING', 'THEE'] +7018-75788-0004-139: ref=['DO', 'AS', 'THEY', 'DO', 'AND', 'BELIKE', 'THOU', 'SHALT', 'GAIN', 'WHAT', 'MAY', 'FURTHER', 'THY', 'RETURN', 'VOYAGE', 'TO', 'THY', 'NATIVE', 'LAND'] +7018-75788-0004-139: hyp=['DO', 'AS', 'THEY', 'DO', 'AND', 'BE', 'LIKE', 'THOU', 'SHALT', 'GAIN', 'WHAT', 'MAY', 'FURTHER', 'THY', 'RETURN', 'VOYAGE', 'TO', 'THY', 'NATIVE', 'LAND'] +7018-75788-0005-140: ref=['THEN', 'HE', 'CARRIED', 'ME', 'TO', 'THE', 'BEACH', 'WHERE', 'I', 'FILLED', 'MY', 'BAG', 'WITH', 'PEBBLES', 'LARGE', 'AND', 'SMALL', 'AND', 'PRESENTLY', 'WE', 'SAW', 'A', 'COMPANY', 'OF', 'FOLK', 'ISSUE', 'FROM', 'THE', 'TOWN', 'EACH', 'BEARING', 'A', 'BAG', 'LIKE', 'MINE', 'FILLED', 'WITH', 'PEBBLES'] +7018-75788-0005-140: hyp=['THEN', 'HE', 'CARRIED', 'ME', 'TO', 'THE', 'BEACH', 'WHERE', 'I', 'FILLED', 'MY', 'BAG', 'WITH', 'PEBBLES', 'LARGE', 'AND', 'SMALL', 'AND', 'PRESENTLY', 'WE', 'SAW', 'A', 'COMPANY', 'OF', 'FOLK', 'ISSUE', 'FROM', 'THE', 'TOWN', 'EACH', 'BEARING', 'A', 'BAG', 'LIKE', 'MINE', 'FILLED', 'WITH', 'PEBBLES'] +7018-75788-0006-141: ref=['TO', 'THESE', 'HE', 'COMMITTED', 'ME', 'COMMENDING', 'ME', 'TO', 'THEIR', 'CARE', 'AND', 'SAYING', 'THIS', 'MAN', 'IS', 'A', 'STRANGER', 'SO', 'TAKE', 'HIM', 'WITH', 'YOU', 'AND', 'TEACH', 'HIM', 'HOW', 'TO', 'GATHER', 'THAT', 'HE', 'MAY', 'GET', 'HIS', 'DAILY', 'BREAD', 'AND', 'YOU', 'WILL', 'EARN', 'YOUR', 'REWARD', 'AND', 'RECOMPENSE', 'IN', 'HEAVEN'] +7018-75788-0006-141: hyp=['TO', 'THESE', 'HE', 'COMMITTED', 'ME', 'COMMENDING', 'ME', 'TO', 'THEIR', 'CARE', 'AND', 'SAYING', 'THIS', 'MAN', 'IS', 'A', 'STRANGER', 'SO', 'TAKE', 'HIM', 'WITH', 'YOU', 'AND', 'TEACH', 'HIM', 'HOW', 'TO', 'GATHER', 'THAT', 'HE', 'MAY', 'GET', 'HIS', 'DAILY', 'BREAD', 'AND', 'YOU', 'WILL', 'EARN', 'YOUR', 'REWARD', 'AND', 'RECOMPENSE', 'IN', 'HEAVEN'] +7018-75788-0007-142: ref=['NOW', 'SLEEPING', 'UNDER', 'THESE', 'TREES', 'WERE', 'MANY', 'APES', 'WHICH', 'WHEN', 'THEY', 'SAW', 'US', 'ROSE', 'AND', 'FLED', 'FROM', 'US', 'AND', 'SWARMED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'WHEREUPON', 'MY', 'COMPANIONS', 'BEGAN', 'TO', 'PELT', 'THEM', 'WITH', 'WHAT', 'THEY', 'HAD', 'IN', 'THEIR', 'BAGS', 'AND', 'THE', 'APES', 'FELL', 'TO', 'PLUCKING', 'OF', 'THE', 'FRUIT', 'OF', 'THE', 'TREES', 'AND', 'CASTING', 'THEM', 'AT', 'THE', 'FOLK'] +7018-75788-0007-142: hyp=['NOW', 'SLEEPING', 'UNDER', 'THESE', 'TREES', 'WERE', 'MANY', 'IPES', 'WHICH', 'WHEN', 'THEY', 'SAW', 'US', 'ROSE', 'AND', 'FLED', 'FROM', 'US', 'AND', 'SWARMED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'WHEREUPON', 'MY', 'COMPANIONS', 'BEGAN', 'TO', 'PELT', 'THEM', 'WITH', 'WHAT', 'THEY', 'HAD', 'IN', 'THEIR', 'BAGS', 'AND', 'THE', 'APES', 'FELL', 'TO', 'PLUCKING', 'OF', 'THE', 'FRUIT', 'OF', 'THE', 'TREES', 'AND', 'CASTING', 'THEM', 'AT', 'THE', 'FOLK'] +7018-75788-0008-143: ref=['WE', 'WEIGHED', 'ANCHOR', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'SAYING', 'HER', 'PERMITTED', 'SAY'] +7018-75788-0008-143: hyp=['WE', 'WADE', 'ANCHOR', 'AN', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'SAYING', 'HER', 'PERMITTED', 'SAY'] +7018-75788-0009-144: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'FIFTY', 'NINTH', 'NIGHT'] +7018-75788-0009-144: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'FIFTY', 'NINTH', 'NIGHT'] +7018-75788-0010-145: ref=['AND', 'CEASED', 'NOT', 'SAILING', 'TILL', 'WE', 'ARRIVED', 'SAFELY', 'AT', 'BASSORAH'] +7018-75788-0010-145: hyp=['AND', 'CEASED', 'NOT', 'SAILING', 'TILL', 'WE', 'ARRIVED', 'SAFELY', 'AT', 'PUSSARA'] +7018-75788-0011-146: ref=['THERE', 'I', 'ABODE', 'A', 'LITTLE', 'AND', 'THEN', 'WENT', 'ON', 'TO', 'BAGHDAD', 'WHERE', 'I', 'ENTERED', 'MY', 'QUARTER', 'AND', 'FOUND', 'MY', 'HOUSE', 'AND', 'FOREGATHERED', 'WITH', 'MY', 'FAMILY', 'AND', 'SALUTED', 'MY', 'FRIENDS', 'WHO', 'GAVE', 'ME', 'JOY', 'OF', 'MY', 'SAFE', 'RETURN', 'AND', 'I', 'LAID', 'UP', 'ALL', 'MY', 'GOODS', 'AND', 'VALUABLES', 'IN', 'MY', 'STOREHOUSES'] +7018-75788-0011-146: hyp=['THERE', 'I', 'ABODE', 'A', 'LITTLE', 'AND', 'THEN', 'WENT', 'ON', 'TO', 'BAGDAD', 'WHERE', 'I', 'ENTERED', 'MY', 'QUARTER', 'AND', 'FOUND', 'MY', 'HOUSE', 'AND', 'FOR', 'GATHERED', 'WITH', 'MY', 'FAMILY', 'AND', 'SALUTED', 'MY', 'FRIENDS', 'WHO', 'GAVE', 'ME', 'JOY', 'OF', 'MY', 'SAFE', 'RETURN', 'AND', 'I', 'LAID', 'UP', 'ALL', 'MY', 'GOODS', 'AND', 'VALUABLES', 'IN', 'MY', 'STOREHOUSES'] +7018-75788-0012-147: ref=['AFTER', 'WHICH', 'I', 'RETURNED', 'TO', 'MY', 'OLD', 'MERRY', 'WAY', 'OF', 'LIFE', 'AND', 'FORGOT', 'ALL', 'I', 'HAD', 'SUFFERED', 'IN', 'THE', 'GREAT', 'PROFIT', 'AND', 'GAIN', 'I', 'HAD', 'MADE'] +7018-75788-0012-147: hyp=['AFTER', 'WHICH', 'I', 'RETURNED', 'TO', 'MY', 'OLD', 'MERRY', 'WAY', 'OF', 'LIFE', 'AND', 'FORGOT', 'ALL', 'I', 'HAD', 'SUFFERED', 'IN', 'THE', 'GREAT', 'PROFIT', 'AND', 'GAIN', 'I', 'HAD', 'MADE'] +7018-75788-0013-148: ref=['NEXT', 'MORNING', 'AS', 'SOON', 'AS', 'IT', 'WAS', 'LIGHT', 'HE', 'PRAYED', 'THE', 'DAWN', 'PRAYER', 'AND', 'AFTER', 'BLESSING', 'MOHAMMED', 'THE', 'CREAM', 'OF', 'ALL', 'CREATURES', 'BETOOK', 'HIMSELF', 'TO', 'THE', 'HOUSE', 'OF', 'SINDBAD', 'THE', 'SEAMAN', 'AND', 'WISHED', 'HIM', 'A', 'GOOD', 'DAY'] +7018-75788-0013-148: hyp=['NEXT', 'MORNING', 'AS', 'SOON', 'AS', 'IT', 'WAS', 'LIGHT', 'HE', 'PRAYED', 'THE', 'DAWN', 'PRAYER', 'AND', 'AFTER', 'BLESSING', 'MOHAMMED', 'THE', 'CREAM', 'OF', 'ALL', 'CREATURES', 'BETOOK', 'HIMSELF', 'TO', 'THE', 'HOUSE', 'OF', 'SINBAD', 'THE', 'SEAMAN', 'AND', 'WISHED', 'HIM', 'A', 'GOOD', 'DAY'] +7018-75788-0014-149: ref=['HERE', 'I', 'FOUND', 'A', 'GREAT', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'FULL', 'OF', 'MERCHANTS', 'AND', 'NOTABLES', 'WHO', 'HAD', 'WITH', 'THEM', 'GOODS', 'OF', 'PRICE', 'SO', 'I', 'EMBARKED', 'MY', 'BALES', 'THEREIN'] +7018-75788-0014-149: hyp=['HERE', 'I', 'FOUND', 'A', 'GREAT', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'FULL', 'OF', 'MERCHANTS', 'AND', 'NOTABLES', 'WHO', 'HAD', 'WITH', 'THEM', 'GOODS', 'OF', 'PRICE', 'SO', 'I', 'EMBARKED', 'MY', 'BALES', 'THEREIN'] +7018-75788-0015-150: ref=['HAPLY', 'AMONGST', 'YOU', 'IS', 'ONE', 'RIGHTEOUS', 'WHOSE', 'PRAYERS', 'THE', 'LORD', 'WILL', 'ACCEPT'] +7018-75788-0015-150: hyp=['HAPPILY', 'AMONGST', 'YOU', 'IS', 'ONE', 'RIGHTEOUS', 'WHOSE', 'PRAYERS', 'THE', 'LORD', 'WILL', 'ACCEPT'] +7018-75788-0016-151: ref=['PRESENTLY', 'THE', 'SHIP', 'STRUCK', 'THE', 'MOUNTAIN', 'AND', 'BROKE', 'UP', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'OF', 'HER', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75788-0016-151: hyp=['PRESENTLY', 'THE', 'SHIP', 'STRUCK', 'THE', 'MOUNTAIN', 'AND', 'BROKE', 'UP', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'OF', 'HER', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75788-0017-152: ref=['BUT', 'IT', 'BURNETH', 'IN', 'THEIR', 'BELLIES', 'SO', 'THEY', 'CAST', 'IT', 'UP', 'AGAIN', 'AND', 'IT', 'CONGEALETH', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'WATER', 'WHEREBY', 'ITS', 'COLOR', 'AND', 'QUANTITIES', 'ARE', 'CHANGED', 'AND', 'AT', 'LAST', 'THE', 'WAVES', 'CAST', 'IT', 'ASHORE', 'AND', 'THE', 'TRAVELLERS', 'AND', 'MERCHANTS', 'WHO', 'KNOW', 'IT', 'COLLECT', 'IT', 'AND', 'SELL', 'IT'] +7018-75788-0017-152: hyp=['BUT', 'AT', 'BERNETH', 'IN', 'THEIR', 'BELLIES', 'SO', 'THEY', 'CAST', 'IT', 'UP', 'AGAIN', 'AND', 'IT', 'CONGEALETH', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'WATER', 'WHEREBY', 'ITS', 'COLOR', 'AND', 'QUANTITIES', 'ARE', 'CHANGED', 'AND', 'AT', 'LAST', 'THE', 'WAVES', 'CAST', 'IT', 'ASHORE', 'AND', 'THE', 'TRAVELLERS', 'AND', 'MERCHANTS', 'WHO', 'KNOW', 'IT', 'COLLECTED', 'AND', 'SELL', 'IT'] +7018-75788-0018-153: ref=['EACH', 'THAT', 'DIED', 'WE', 'WASHED', 'AND', 'SHROUDED', 'IN', 'SOME', 'OF', 'THE', 'CLOTHES', 'AND', 'LINEN', 'CAST', 'ASHORE', 'BY', 'THE', 'TIDES', 'AND', 'AFTER', 'A', 'LITTLE', 'THE', 'REST', 'OF', 'MY', 'FELLOWS', 'PERISHED', 'ONE', 'BY', 'ONE', 'TILL', 'I', 'HAD', 'BURIED', 'THE', 'LAST', 'OF', 'THE', 'PARTY', 'AND', 'ABODE', 'ALONE', 'ON', 'THE', 'ISLAND', 'WITH', 'BUT', 'A', 'LITTLE', 'PROVISION', 'LEFT', 'I', 'WHO', 'WAS', 'WONT', 'TO', 'HAVE', 'SO', 'MUCH'] +7018-75788-0018-153: hyp=['EACH', 'THAT', 'DIED', 'WE', 'WASHED', 'AND', 'SHROUDED', 'IN', 'SOME', 'OF', 'THE', 'CLOTHES', 'AND', 'LINEN', 'CAST', 'ASHORE', 'BY', 'THE', 'TIDES', 'AND', 'AFTER', 'A', 'LITTLE', 'THE', 'REST', 'OF', 'MY', 'FELLOWS', 'PERISHED', 'ONE', 'BY', 'ONE', 'TILL', 'I', 'HAD', 'BURIED', 'THE', 'LAST', 'OF', 'THE', 'PARTY', 'AND', 'A', 'BOAT', 'ALONE', 'ON', 'THE', 'ISLAND', 'WITH', 'BUT', 'A', 'LITTLE', 'PROVISION', 'LEFT', 'I', 'WHO', 'WAS', 'WONT', 'TO', 'HAVE', 'SO', 'MUCH'] +7018-75788-0019-154: ref=['BUT', 'THERE', 'IS', 'MAJESTY', 'AND', 'THERE', 'IS', 'NO', 'MIGHT', 'SAVE', 'IN', 'ALLAH', 'THE', 'GLORIOUS', 'THE', 'GREAT'] +7018-75788-0019-154: hyp=['BUT', 'THERE', 'IS', 'MAJESTY', 'AND', 'THERE', 'IS', 'NO', 'MIGHT', 'SAVE', 'IN', 'ALLAH', 'THE', 'GLORIOUS', 'THE', 'GREAT'] +7018-75789-0000-155: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'FIRST', 'NIGHT'] +7018-75789-0000-155: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'FIRST', 'NIGHT'] +7018-75789-0001-156: ref=['THEN', 'SIGHING', 'FOR', 'MYSELF', 'I', 'SET', 'TO', 'WORK', 'COLLECTING', 'A', 'NUMBER', 'OF', 'PIECES', 'OF', 'CHINESE', 'AND', 'COMORIN', 'ALOES', 'WOOD', 'AND', 'I', 'BOUND', 'THEM', 'TOGETHER', 'WITH', 'ROPES', 'FROM', 'THE', 'WRECKAGE', 'THEN', 'I', 'CHOSE', 'OUT', 'FROM', 'THE', 'BROKEN', 'UP', 'SHIPS', 'STRAIGHT', 'PLANKS', 'OF', 'EVEN', 'SIZE', 'AND', 'FIXED', 'THEM', 'FIRMLY', 'UPON', 'THE', 'ALOES', 'WOOD', 'MAKING', 'ME', 'A', 'BOAT', 'RAFT', 'A', 'LITTLE', 'NARROWER', 'THAN', 'THE', 'CHANNEL', 'OF', 'THE', 'STREAM', 'AND', 'I', 'TIED', 'IT', 'TIGHTLY', 'AND', 'FIRMLY', 'AS', 'THOUGH', 'IT', 'WERE', 'NAILED'] +7018-75789-0001-156: hyp=['THEN', 'SIGNED', 'FOR', 'MYSELF', 'I', 'SET', 'TO', 'WORK', 'COLLECTING', 'A', 'NUMBER', 'OF', 'PIECES', 'OF', 'CHINESE', 'AND', 'CORMOR', 'AND', 'ALLIES', 'WOOD', 'AND', 'I', 'BOUND', 'THEM', 'TOGETHER', 'WITH', 'ROPES', 'FROM', 'THE', 'WRECKAGE', 'THEN', 'I', 'CHOSE', 'OUT', 'FROM', 'THE', 'BROKEN', 'UP', 'SHIP', 'STRAIGHT', 'PLANKS', 'OF', 'EVEN', 'SIZE', 'AND', 'FIXED', 'THEM', 'FIRMLY', 'UPON', 'THE', 'ALLIES', 'WOOD', 'MAKING', 'ME', 'A', 'BOAT', 'RAFT', 'A', 'LITTLE', 'NARROWER', 'THAN', 'THE', 'CHANNEL', 'OF', 'THE', 'STREAM', 'AND', 'I', 'TIED', 'IT', 'TIGHTLY', 'AND', 'FIRMLY', 'AS', 'THOUGH', 'IT', 'WERE', 'NAILED'] +7018-75789-0002-157: ref=['LAND', 'AFTER', 'LAND', 'SHALT', 'THOU', 'SEEK', 'AND', 'FIND', 'BUT', 'NO', 'OTHER', 'LIFE', 'ON', 'THY', 'WISH', 'SHALL', 'WAIT', 'FRET', 'NOT', 'THY', 'SOUL', 'IN', 'THY', 'THOUGHTS', 'O', 'NIGHT', 'ALL', 'WOES', 'SHALL', 'END', 'OR', 'SOONER', 'OR', 'LATE'] +7018-75789-0002-157: hyp=['LAND', 'AFTER', 'LAND', 'SHALT', 'THOU', 'SEE', 'CONFINED', 'BUT', 'NO', 'OTHER', 'LIFE', 'ON', 'THY', 'WISH', 'SHALL', 'WAIT', 'FRET', 'NOT', 'THY', 'SOUL', 'IN', 'THY', 'THOUGHTS', 'A', 'NIGHT', 'OR', 'WOES', 'SHALL', 'END', 'OR', 'SOONER', 'OR', 'LATE'] +7018-75789-0003-158: ref=['I', 'ROWED', 'MY', 'CONVEYANCE', 'INTO', 'THE', 'PLACE', 'WHICH', 'WAS', 'INTENSELY', 'DARK', 'AND', 'THE', 'CURRENT', 'CARRIED', 'THE', 'RAFT', 'WITH', 'IT', 'DOWN', 'THE', 'UNDERGROUND', 'CHANNEL'] +7018-75789-0003-158: hyp=['I', 'RIDE', 'MY', 'CONVEYANCE', 'INTO', 'THE', 'PLACE', 'WHICH', 'WAS', 'INTENSELY', 'DARK', 'AND', 'THE', 'CURRENT', 'CARRIED', 'ME', 'THE', 'RAFT', 'WITH', 'IT', 'DOWN', 'THE', 'UNDERGROUND', 'CHANNEL'] +7018-75789-0004-159: ref=['AND', 'I', 'THREW', 'MYSELF', 'DOWN', 'UPON', 'MY', 'FACE', 'ON', 'THE', 'RAFT', 'BY', 'REASON', 'OF', 'THE', 'NARROWNESS', 'OF', 'THE', 'CHANNEL', 'WHILST', 'THE', 'STREAM', 'CEASED', 'NOT', 'TO', 'CARRY', 'ME', 'ALONG', 'KNOWING', 'NOT', 'NIGHT', 'FROM', 'DAY', 'FOR', 'THE', 'EXCESS', 'OF', 'THE', 'GLOOM', 'WHICH', 'ENCOMPASSED', 'ME', 'ABOUT', 'AND', 'MY', 'TERROR', 'AND', 'CONCERN', 'FOR', 'MYSELF', 'LEST', 'I', 'SHOULD', 'PERISH'] +7018-75789-0004-159: hyp=['AND', 'I', 'THREW', 'MYSELF', 'DOWN', 'UPON', 'MY', 'FACE', 'ON', 'THE', 'RAFT', 'BY', 'REASON', 'OF', 'THE', 'NARROWNESS', 'OF', 'THE', 'CHANNEL', 'WHILST', 'THE', 'STREAM', 'CEASED', 'NOT', 'TO', 'CARRY', 'ME', 'ALONG', 'KNOWING', 'NOT', 'NIGHT', 'FROM', 'DAY', 'FOR', 'THE', 'EXCESS', 'OF', 'THE', 'GLOOM', 'WHICH', 'ENCOMPASSED', 'ME', 'ABOUT', 'IN', 'MY', 'TERROR', 'AND', 'CONCERN', 'FOR', 'MYSELF', 'LEST', 'I', 'SHOULD', 'PERISH'] +7018-75789-0005-160: ref=['WHEN', 'I', 'AWOKE', 'AT', 'LAST', 'I', 'FOUND', 'MYSELF', 'IN', 'THE', 'LIGHT', 'OF', 'HEAVEN', 'AND', 'OPENING', 'MY', 'EYES', 'I', 'SAW', 'MYSELF', 'IN', 'A', 'BROAD', 'STREAM', 'AND', 'THE', 'RAFT', 'MOORED', 'TO', 'AN', 'ISLAND', 'IN', 'THE', 'MIDST', 'OF', 'A', 'NUMBER', 'OF', 'INDIANS', 'AND', 'ABYSSINIANS'] +7018-75789-0005-160: hyp=['WHEN', 'I', 'AWOKE', 'AT', 'LAST', 'I', 'FOUND', 'MYSELF', 'IN', 'THE', 'LIGHT', 'OF', 'HEAVEN', 'AND', 'OPENING', 'MY', 'EYES', 'I', 'SAW', 'MYSELF', 'IN', 'A', 'BROAD', 'STREAM', 'AND', 'THE', 'RAFT', 'MOORED', 'TO', 'AN', 'ISLAND', 'IN', 'THE', 'MIDST', 'OF', 'A', 'NUMBER', 'OF', 'INDIANS', 'AND', 'ABYSSINIANS'] +7018-75789-0006-161: ref=['BUT', 'I', 'WAS', 'DELIGHTED', 'AT', 'MY', 'ESCAPE', 'FROM', 'THE', 'RIVER'] +7018-75789-0006-161: hyp=['BUT', 'I', 'WAS', 'DELIGHTED', 'AT', 'MY', 'ESCAPE', 'FROM', 'THE', 'RIVER'] +7018-75789-0007-162: ref=['WHEN', 'THEY', 'SAW', 'I', 'UNDERSTOOD', 'THEM', 'NOT', 'AND', 'MADE', 'THEM', 'NO', 'ANSWER', 'ONE', 'OF', 'THEM', 'CAME', 'FORWARD', 'AND', 'SAID', 'TO', 'ME', 'IN', 'ARABIC', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'BROTHER'] +7018-75789-0007-162: hyp=['WHEN', 'THEY', 'SAW', 'I', 'UNDERSTOOD', 'THEM', 'NOT', 'AND', 'MADE', 'THEM', 'NO', 'ANSWER', 'ONE', 'OF', 'THEM', 'CAME', 'FORWARD', 'AND', 'SAID', 'TO', 'ME', 'IN', 'ARABIC', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'BROTHER'] +7018-75789-0008-163: ref=['O', 'MY', 'BROTHER', 'ANSWERED', 'HE', 'WE', 'ARE', 'HUSBANDMEN', 'AND', 'TILLERS', 'OF', 'THE', 'SOIL', 'WHO', 'CAME', 'OUT', 'TO', 'WATER', 'OUR', 'FIELDS', 'AND', 'PLANTATIONS', 'AND', 'FINDING', 'THEE', 'ASLEEP', 'ON', 'THIS', 'RAFT', 'LAID', 'HOLD', 'OF', 'IT', 'AND', 'MADE', 'IT', 'FAST', 'BY', 'US', 'AGAINST', 'THOU', 'SHOULDST', 'AWAKE', 'AT', 'THY', 'LEISURE'] +7018-75789-0008-163: hyp=['O', 'MY', 'BROTHER', 'ANSWERED', 'HE', 'WE', 'ARE', 'HUSBANDMEN', 'AND', 'TELLERS', 'OF', 'THE', 'SOIL', 'WHO', 'CAME', 'OUT', 'TO', 'WATER', 'OUR', 'FIELDS', 'AND', 'PLANTATIONS', 'AND', 'FINDING', 'THEE', 'ASLEEP', 'ON', 'THIS', 'RAFT', 'LAID', 'HOLD', 'OF', 'IT', 'AND', 'MADE', 'IT', 'FAST', 'BY', 'US', 'AGAINST', 'THOU', 'SHOULDEST', 'AWAKE', 'AT', 'THY', 'LEISURE'] +7018-75789-0009-164: ref=['I', 'ANSWERED', 'FOR', "ALLAH'S", 'SAKE', 'O', 'MY', 'LORD', 'ERE', 'I', 'SPEAK', 'GIVE', 'ME', 'SOMEWHAT', 'TO', 'EAT', 'FOR', 'I', 'AM', 'STARVING', 'AND', 'AFTER', 'ASK', 'ME', 'WHAT', 'THOU', 'WILT'] +7018-75789-0009-164: hyp=['I', 'ANSWERED', 'FOR', "ALLAH'S", 'SAKE', 'AM', 'MY', 'LORD', 'ERE', 'I', 'SPEAK', 'GIVE', 'ME', 'SOMEWHAT', 'TO', 'EAT', 'FOR', 'I', 'AM', 'STARVING', 'AND', 'AFTER', 'ASK', 'ME', 'WHAT', 'THOU', 'WILT'] +7018-75789-0010-165: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'SECOND', 'NIGHT'] +7018-75789-0010-165: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'SECOND', 'NIGHT'] +7018-75789-0011-166: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'SINDBAD', 'THE', 'SEAMAN', 'CONTINUED', 'WHEN', 'I', 'LANDED', 'AND', 'FOUND', 'MYSELF', 'AMONGST', 'THE', 'INDIANS', 'AND', 'ABYSSINIANS', 'AND', 'HAD', 'TAKEN', 'SOME', 'REST', 'THEY', 'CONSULTED', 'AMONG', 'THEMSELVES', 'AND', 'SAID', 'TO', 'ONE', 'ANOTHER', 'THERE', 'IS', 'NO', 'HELP', 'FOR', 'IT', 'BUT', 'WE', 'CARRY', 'HIM', 'WITH', 'US', 'AND', 'PRESENT', 'HIM', 'TO', 'OUR', 'KING', 'THAT', 'HE', 'MAY', 'ACQUAINT', 'HIM', 'WITH', 'HIS', 'ADVENTURES'] +7018-75789-0011-166: hyp=['SHE', 'SAID', 'IT', 'HATH', 'RAGED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'SINBAD', 'THE', 'SEAMAN', 'CONTINUED', 'WHEN', 'I', 'LANDED', 'AND', 'FOUND', 'MYSELF', 'AMONGST', 'THE', 'INDIANS', 'AND', 'ABYSSINIANS', 'AND', 'HAD', 'TAKEN', 'SOME', 'REST', 'THEY', 'CONSULTED', 'AMONG', 'THEMSELVES', 'AND', 'SAID', 'TO', 'ONE', 'ANOTHER', 'THERE', 'IS', 'NO', 'HELP', 'FOR', 'IT', 'BUT', 'WE', 'CARRY', 'HIM', 'WITH', 'US', 'AND', 'PRESENT', 'HIM', 'TO', 'OUR', 'KING', 'THAT', 'HE', 'MAY', 'ACQUAINT', 'HIM', 'WITH', 'HIS', 'ADVENTURES'] +7018-75789-0012-167: ref=['SO', 'I', 'CONSORTED', 'WITH', 'THE', 'CHIEF', 'OF', 'THE', 'ISLANDERS', 'AND', 'THEY', 'PAID', 'ME', 'THE', 'UTMOST', 'RESPECT'] +7018-75789-0012-167: hyp=['SO', 'I', 'CONSORTED', 'WITH', 'THE', 'CHIEF', 'OF', 'THE', 'ISLANDERS', 'AND', 'THEY', 'PAID', 'ME', 'THE', 'UTMOST', 'RESPECT'] +7018-75789-0013-168: ref=['SO', 'I', 'ROSE', 'WITHOUT', 'STAY', 'OR', 'DELAY', 'AND', 'KISSED', 'THE', "KING'S", 'HAND', 'AND', 'ACQUAINTED', 'HIM', 'WITH', 'MY', 'LONGING', 'TO', 'SET', 'OUT', 'WITH', 'THE', 'MERCHANTS', 'FOR', 'THAT', 'I', 'PINED', 'AFTER', 'MY', 'PEOPLE', 'AND', 'MINE', 'OWN', 'LAND'] +7018-75789-0013-168: hyp=['SO', 'I', 'ROSE', 'WITHOUT', 'STAY', 'OR', 'DELAY', 'AND', 'KISSED', 'THE', "KING'S", 'HAND', 'AND', 'ACQUAINTED', 'HIM', 'WITH', 'MY', 'LONGING', 'TO', 'SET', 'OUT', 'WITH', 'THE', 'MERCHANTS', 'FOR', 'THAT', 'I', 'PINED', 'AFTER', 'MY', 'PEOPLE', 'AND', 'MINE', 'OWN', 'LAND'] +7018-75789-0014-169: ref=['QUOTH', 'HE', 'THOU', 'ART', 'THINE', 'OWN', 'MASTER', 'YET', 'IF', 'IT', 'BE', 'THY', 'WILL', 'TO', 'ABIDE', 'WITH', 'US', 'ON', 'OUR', 'HEAD', 'AND', 'EYES', 'BE', 'IT', 'FOR', 'THOU', 'GLADDENEST', 'US', 'WITH', 'THY', 'COMPANY'] +7018-75789-0014-169: hyp=['QUOTH', 'HE', 'THOU', 'ART', 'THINE', 'OWN', 'MASTER', 'YET', 'IF', 'IT', 'BE', 'THY', 'WILL', 'TO', 'ABIDE', 'WITH', 'US', 'ON', 'OUR', 'HEAD', 'NIGHS', 'BE', 'IT', 'FOR', 'THOU', 'GLADDENEST', 'US', 'WITH', 'THY', 'COMPANY'] +7018-75789-0015-170: ref=['BY', 'ALLAH', 'O', 'MY', 'LORD', 'ANSWERED', 'I', 'THOU', 'HAST', 'INDEED', 'OVERWHELMED', 'ME', 'WITH', 'THY', 'FAVOURS', 'AND', 'WELL', 'DOINGS', 'BUT', 'I', 'WEARY', 'FOR', 'A', 'SIGHT', 'OF', 'MY', 'FRIENDS', 'AND', 'FAMILY', 'AND', 'NATIVE', 'COUNTRY'] +7018-75789-0015-170: hyp=['BY', 'ALLAH', 'O', 'MY', 'LORD', 'ANSWERED', 'I', 'THOU', 'HAST', 'INDEED', 'OVERWHELMED', 'ME', 'WITH', 'THY', 'FAVOURS', 'AND', 'WELL', 'DOINGS', 'BUT', 'I', 'WEARY', 'FOR', 'A', 'SIGHT', 'OF', 'MY', 'FRIENDS', 'AND', 'FAMILY', 'AND', 'NATIVE', 'COUNTRY'] +7018-75789-0016-171: ref=['THEN', 'I', 'TOOK', 'LEAVE', 'OF', 'HIM', 'AND', 'OF', 'ALL', 'MY', 'INTIMATES', 'AND', 'ACQUAINTANCES', 'IN', 'THE', 'ISLAND', 'AND', 'EMBARKED', 'WITH', 'THE', 'MERCHANTS', 'AFORESAID'] +7018-75789-0016-171: hyp=['THEN', 'I', 'TOOK', 'LEAVE', 'OF', 'HIM', 'AND', 'OF', 'ALL', 'MY', 'INTIMATES', 'AND', 'ACQUAINTANCES', 'IN', 'THE', 'ISLAND', 'AND', 'EMBARKED', 'WITH', 'THE', 'MERCHANTS', 'AFORESAID'] +7018-75789-0017-172: ref=['HE', 'ASKED', 'ME', 'WHENCE', 'THEY', 'CAME', 'AND', 'I', 'SAID', 'TO', 'HIM', 'BY', 'ALLAH', 'O', 'COMMANDER', 'OF', 'THE', 'FAITHFUL', 'I', 'KNOW', 'NOT', 'THE', 'NAME', 'OF', 'THE', 'CITY', 'NOR', 'THE', 'WAY', 'THITHER'] +7018-75789-0017-172: hyp=['HE', 'ASKED', 'ME', 'WHENCE', 'THEY', 'CAME', 'AND', 'I', 'SAID', 'TO', 'HIM', 'BY', 'ALLAH', 'A', 'COMMANDER', 'OF', 'THE', 'FAITHFUL', 'I', 'KNOW', 'NOT', 'THE', 'NAME', 'OF', 'THE', 'CITY', 'NOR', 'THE', 'WAY', 'THITHER'] +7018-75789-0018-173: ref=['FOR', 'STATE', 'PROCESSIONS', 'A', 'THRONE', 'IS', 'SET', 'FOR', 'HIM', 'UPON', 'A', 'HUGE', 'ELEPHANT', 'ELEVEN', 'CUBITS', 'HIGH', 'AND', 'UPON', 'THIS', 'HE', 'SITTETH', 'HAVING', 'HIS', 'GREAT', 'LORDS', 'AND', 'OFFICERS', 'AND', 'GUESTS', 'STANDING', 'IN', 'TWO', 'RANKS', 'ON', 'HIS', 'RIGHT', 'HAND', 'AND', 'ON', 'HIS', 'LEFT'] +7018-75789-0018-173: hyp=['FOR', 'STATE', 'PROCESSIONS', 'A', 'THRONE', 'IS', 'SET', 'FOR', 'HIM', 'UPON', 'A', 'HUGE', 'ELEPHANT', 'ELEVEN', 'CUBITS', 'HIGH', 'AND', 'UPON', 'THIS', 'HE', 'SITTETH', 'HAVING', 'HIS', 'GREAT', 'LORDS', 'AND', 'OFFICERS', 'AND', 'GUESTS', 'STANDING', 'IN', 'TWO', 'RANKS', 'ON', 'HIS', 'RIGHT', 'HAND', 'AND', 'ON', 'HIS', 'LEFT'] +7018-75789-0019-174: ref=['HIS', 'LETTER', 'HATH', 'SHOWN', 'ME', 'THIS', 'AND', 'AS', 'FOR', 'THE', 'MIGHTINESS', 'OF', 'HIS', 'DOMINION', 'THOU', 'HAST', 'TOLD', 'US', 'WHAT', 'THOU', 'HAST', 'EYE', 'WITNESSED'] +7018-75789-0019-174: hyp=['HIS', 'LETTER', 'HATH', 'SHOWN', 'ME', 'THIS', 'AND', 'AS', 'FOR', 'THE', 'MIGHTINESS', 'OF', 'HIS', 'DOMINION', 'THOU', 'HAST', 'TOLD', 'US', 'WHAT', 'THOU', 'HAST', 'I', 'WITNESSED'] +7018-75789-0020-175: ref=['PRESENTLY', 'MY', 'FRIENDS', 'CAME', 'TO', 'ME', 'AND', 'I', 'DISTRIBUTED', 'PRESENTS', 'AMONG', 'MY', 'FAMILY', 'AND', 'GAVE', 'ALMS', 'AND', 'LARGESSE', 'AFTER', 'WHICH', 'I', 'YIELDED', 'MYSELF', 'TO', 'JOYANCE', 'AND', 'ENJOYMENT', 'MIRTH', 'AND', 'MERRY', 'MAKING', 'AND', 'FORGOT', 'ALL', 'THAT', 'I', 'HAD', 'SUFFERED'] +7018-75789-0020-175: hyp=['PRESENTLY', 'MY', 'FRIENDS', 'CAME', 'TO', 'ME', 'AND', 'I', 'DISTRIBUTED', 'PRESENTS', 'AMONG', 'MY', 'FAMILY', 'AND', 'GAVE', 'ARMS', 'AND', 'LARGESSE', 'AFTER', 'WHICH', 'I', 'YIELDED', 'MYSELF', 'TO', 'JOYANCE', 'AND', 'ENJOYMENT', 'MIRTH', 'AND', 'MERRYMAKING', 'AND', 'FORGOT', 'ALL', 'THAT', 'I', 'HAD', 'SUFFERED'] +7018-75789-0021-176: ref=['SUCH', 'THEN', 'O', 'MY', 'BROTHERS', 'IS', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFEL', 'ME', 'IN', 'MY', 'SIXTH', 'VOYAGE', 'AND', 'TO', 'MORROW', 'INSHALLAH'] +7018-75789-0021-176: hyp=['SUCH', 'THEN', 'O', 'MY', 'BROTHERS', 'IS', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFELL', 'ME', 'IN', 'MY', 'SIXTH', 'VOYAGE', 'AND', 'TO', 'MORROW', 'INSHALLAH'] +7018-75789-0022-177: ref=['I', 'WILL', 'TELL', 'YOU', 'THE', 'STORY', 'OF', 'MY', 'SEVENTH', 'AND', 'LAST', 'VOYAGE', 'WHICH', 'IS', 'STILL', 'MORE', 'WONDROUS', 'AND', 'MARVELLOUS', 'THAN', 'THAT', 'OF', 'THE', 'FIRST', 'SIX'] +7018-75789-0022-177: hyp=['I', 'WILL', 'TELL', 'YOU', 'THE', 'STORY', 'OF', 'MY', 'SEVENTH', 'AND', 'LAST', 'VOYAGE', 'WHICH', 'IS', 'STILL', 'MORE', 'WONDROUS', 'AND', 'MARVELLOUS', 'THAN', 'THAT', 'OF', 'THE', 'FIRST', 'SIX'] +7018-75789-0023-178: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'THIRD', 'NIGHT'] +7018-75789-0023-178: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'THIRD', 'NIGHT'] +7018-75789-0024-179: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'SINDBAD', 'THE', 'SEAMAN', 'HAD', 'RELATED', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFEL', 'HIM', 'IN', 'HIS', 'SIXTH', 'VOYAGE', 'AND', 'ALL', 'THE', 'COMPANY', 'HAD', 'DISPERSED', 'SINDBAD', 'THE', 'LANDSMAN', 'WENT', 'HOME', 'AND', 'SLEPT', 'AS', 'OF', 'WONT'] +7018-75789-0024-179: hyp=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'SINBAD', 'THE', 'SEAMAN', 'HAD', 'RELIGHTED', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFELL', 'HIM', 'IN', 'HIS', 'SIXTH', 'VOYAGE', 'AND', 'ALL', 'THE', 'COMPANY', 'HAD', 'DISPERSED', 'SINBAD', 'THE', 'LANDSMAN', 'WENT', 'HOME', 'AND', 'SLEPT', 'AS', 'OF', 'WONT'] +7018-75789-0025-180: ref=['THE', 'SEVENTH', 'VOYAGE', 'OF', 'SINDBAD', 'THE', 'SEAMAN'] +7018-75789-0025-180: hyp=['THE', 'SEVENTH', 'VOYAGE', 'OF', 'SINBAD', 'THE', 'SALMON'] +7018-75789-0026-181: ref=['KNOW', 'O', 'COMPANY', 'THAT', 'AFTER', 'MY', 'RETURN', 'FROM', 'MY', 'SIXTH', 'VOYAGE', 'WHICH', 'BROUGHT', 'ME', 'ABUNDANT', 'PROFIT', 'I', 'RESUMED', 'MY', 'FORMER', 'LIFE', 'IN', 'ALL', 'POSSIBLE', 'JOYANCE', 'AND', 'ENJOYMENT', 'AND', 'MIRTH', 'AND', 'MAKING', 'MERRY', 'DAY', 'AND', 'NIGHT', 'AND', 'I', 'TARRIED', 'SOME', 'TIME', 'IN', 'THIS', 'SOLACE', 'AND', 'SATISFACTION', 'TILL', 'MY', 'SOUL', 'BEGAN', 'ONCE', 'MORE', 'TO', 'LONG', 'TO', 'SAIL', 'THE', 'SEAS', 'AND', 'SEE', 'FOREIGN', 'COUNTRIES', 'AND', 'COMPANY', 'WITH', 'MERCHANTS', 'AND', 'HEAR', 'NEW', 'THINGS'] +7018-75789-0026-181: hyp=['NO', 'O', 'COMPANY', 'THAT', 'AFTER', 'MY', 'RETURN', 'FROM', 'MY', 'SIXTH', 'VOYAGE', 'WHICH', 'BROUGHT', 'ME', 'ABUNDANT', 'PROFIT', 'I', 'RESUMED', 'MY', 'FORMER', 'LIFE', 'AND', 'ALL', 'POSSIBLE', 'JOYANCE', 'AND', 'ENJOYMENT', 'AND', 'MIRTH', 'AND', 'MAKING', 'MERRY', 'DAY', 'AND', 'NIGHT', 'AND', 'I', 'TARRIED', 'SOME', 'TIME', 'IN', 'THIS', 'SOLACE', 'AND', 'SATISFACTION', 'TILL', 'MY', 'SOUL', 'BEGAN', 'ONCE', 'MORE', 'TO', 'LONG', 'TO', 'SAIL', 'THE', 'SEAS', 'AND', 'SEE', 'FOREIGN', 'COUNTRIES', 'AND', 'COMPANY', 'WITH', 'MERCHANTS', 'AND', 'HERE', 'NEW', 'THINGS'] +7018-75789-0027-182: ref=['SO', 'HAVING', 'MADE', 'UP', 'MY', 'MIND', 'I', 'PACKED', 'UP', 'IN', 'BALES', 'A', 'QUANTITY', 'OF', 'PRECIOUS', 'STUFFS', 'SUITED', 'FOR', 'SEA', 'TRADE', 'AND', 'REPAIRED', 'WITH', 'THEM', 'FROM', 'BAGHDAD', 'CITY', 'TO', 'BASSORAH', 'TOWN', 'WHERE', 'I', 'FOUND', 'A', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'IN', 'HER', 'A', 'COMPANY', 'OF', 'CONSIDERABLE', 'MERCHANTS'] +7018-75789-0027-182: hyp=['SO', 'HAVING', 'MADE', 'UP', 'MY', 'MIND', 'I', 'PACKED', 'UP', 'IN', 'BALES', 'A', 'QUANTITY', 'OF', 'PRECIOUS', 'STUFFS', 'SUITED', 'FOR', 'SEA', 'TRADE', 'AND', 'REPAIRED', 'WITH', 'THEM', 'FROM', 'BAGHDAD', 'CITY', 'TO', 'BASSERA', 'TOWN', 'WHERE', 'I', 'FOUND', 'A', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'IN', 'HER', 'A', 'COMPANY', 'OF', 'CONSIDERABLE', 'MERCHANTS'] +7018-75789-0028-183: ref=['BUT', 'THE', 'CAPTAIN', 'AROSE', 'AND', 'TIGHTENING', 'HIS', 'GIRDLE', 'TUCKED', 'UP', 'HIS', 'SKIRTS', 'AND', 'AFTER', 'TAKING', 'REFUGE', 'WITH', 'ALLAH', 'FROM', 'SATAN', 'THE', 'STONED', 'CLOMB', 'TO', 'THE', 'MAST', 'HEAD', 'WHENCE', 'HE', 'LOOKED', 'OUT', 'RIGHT', 'AND', 'LEFT', 'AND', 'GAZING', 'AT', 'THE', 'PASSENGERS', 'AND', 'CREW', 'FELL', 'TO', 'BUFFETING', 'HIS', 'FACE', 'AND', 'PLUCKING', 'OUT', 'HIS', 'BEARD'] +7018-75789-0028-183: hyp=['BUT', 'THE', 'CAPTAIN', 'AROSE', 'AND', 'TIGHTENED', 'IN', 'HIS', 'GIRDLE', 'TUCKED', 'UP', 'HIS', 'SKIRTS', 'AND', 'AFTER', 'TAKING', 'REFUGE', 'WITH', 'ALLAH', 'FROM', 'SATAN', 'THE', 'STONE', 'CLIMBED', 'TO', 'THE', 'MAST', 'HEAD', 'WHENCE', 'HE', 'LOOKED', 'OUT', 'RIGHT', 'AND', 'LEFT', 'AND', 'GAZING', 'AT', 'THE', 'PASSENGERS', 'AND', 'CREW', 'FELL', 'TO', 'BUFFET', 'IN', 'HIS', 'FACE', 'AND', 'PLUCKING', 'OUT', 'HIS', 'BEARD'] +7018-75789-0029-184: ref=['THIS', 'HE', 'SET', 'IN', 'A', 'SAUCER', 'WETTED', 'WITH', 'A', 'LITTLE', 'WATER', 'AND', 'AFTER', 'WAITING', 'A', 'SHORT', 'TIME', 'SMELT', 'AND', 'TASTED', 'IT', 'AND', 'THEN', 'HE', 'TOOK', 'OUT', 'OF', 'THE', 'CHEST', 'A', 'BOOKLET', 'WHEREIN', 'HE', 'READ', 'AWHILE', 'AND', 'SAID', 'WEEPING', 'KNOW', 'O', 'YE', 'PASSENGERS', 'THAT', 'IN', 'THIS', 'BOOK', 'IS', 'A', 'MARVELLOUS', 'MATTER', 'DENOTING', 'THAT', 'WHOSO', 'COMETH', 'HITHER', 'SHALL', 'SURELY', 'DIE', 'WITHOUT', 'HOPE', 'OF', 'ESCAPE', 'FOR', 'THAT', 'THIS', 'OCEAN', 'IS', 'CALLED', 'THE', 'SEA', 'OF', 'THE', 'CLIME', 'OF', 'THE', 'KING', 'WHEREIN', 'IS', 'THE', 'SEPULCHRE', 'OF', 'OUR', 'LORD', 'SOLOMON', 'SON', 'OF', 'DAVID', 'ON', 'BOTH', 'BE', 'PEACE'] +7018-75789-0029-184: hyp=['THIS', 'HE', 'SAID', 'IN', 'A', 'SAUCER', 'WETTED', 'WITH', 'A', 'LITTLE', 'WATER', 'AND', 'AFTER', 'WAITING', 'A', 'SHORT', 'TIME', 'SMELT', 'AND', 'TASTED', 'IT', 'AND', 'THEN', 'HE', 'TOOK', 'OUT', 'OF', 'THE', 'CHEST', 'A', 'BOOKLET', 'WHEREIN', 'HE', 'READ', 'A', 'WHILE', 'AND', 'SAID', 'WEEPING', 'KNOW', 'O', 'YE', 'PASSENGERS', 'THAT', 'IN', 'THIS', 'BOOK', 'IS', 'A', 'MARVELLOUS', 'MATTER', 'DENOTING', 'THAT', 'WHOSO', 'COME', 'THITHER', 'SHALL', 'SURELY', 'DIE', 'WITHOUT', 'HOPE', 'OF', 'ESCAPE', 'FOR', 'THAT', 'THIS', 'OCEAN', 'IS', 'CALLED', 'THE', 'SEA', 'OF', 'THE', 'CLIME', 'OF', 'THE', 'KING', 'WHEREIN', 'IS', 'A', 'SEPULCHRE', 'OF', 'OUR', 'LORD', 'SOLOMON', 'SON', 'OF', 'DAVID', 'ON', 'BOTH', 'BE', 'PEACE'] +7018-75789-0030-185: ref=['A', 'SECOND', 'FISH', 'MADE', 'ITS', 'APPEARANCE', 'THAN', 'WHICH', 'WE', 'HAD', 'SEEN', 'NAUGHT', 'MORE', 'MONSTROUS'] +7018-75789-0030-185: hyp=['A', 'SECOND', 'FISH', 'READ', 'ITS', 'APPEARANCE', 'AND', 'WHICH', 'WE', 'HAD', 'SEEN', 'NOUGHT', 'MORE', 'MONSTROUS'] +7018-75789-0031-186: ref=['WHEN', 'SUDDENLY', 'A', 'VIOLENT', 'SQUALL', 'OF', 'WIND', 'AROSE', 'AND', 'SMOTE', 'THE', 'SHIP', 'WHICH', 'ROSE', 'OUT', 'OF', 'THE', 'WATER', 'AND', 'SETTLED', 'UPON', 'A', 'GREAT', 'REEF', 'THE', 'HAUNT', 'OF', 'SEA', 'MONSTERS', 'WHERE', 'IT', 'BROKE', 'UP', 'AND', 'FELL', 'ASUNDER', 'INTO', 'PLANKS', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75789-0031-186: hyp=['WHEN', 'SUDDENLY', 'A', 'VIOLENT', 'SQUALL', 'OF', 'WIND', 'AROSE', 'AND', 'SMOTE', 'THE', 'SHIP', 'WHICH', 'ROSE', 'OUT', 'OF', 'THE', 'WATER', 'AND', 'SETTLED', 'UPON', 'A', 'GREAT', 'REEF', 'THE', 'HAUNT', 'OF', 'SEA', 'MONSTERS', 'WHERE', 'IT', 'BROKE', 'UP', 'AND', 'FELL', 'ASUNDER', 'INTO', 'PLANKS', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7105-2330-0000-2310: ref=['UNFORTUNATELY', 'THERE', 'COULD', 'BE', 'NO', 'DOUBT', 'OR', 'MISCONCEPTION', 'AS', 'TO', "PLATTERBAFF'S", 'GUILT'] +7105-2330-0000-2310: hyp=['UNFORTUNATELY', 'THERE', 'COULD', 'BE', 'NO', 'DOUBT', 'OUR', 'MISCONCEPTION', 'AS', 'THE', "PLATTERBUFF'S", 'GUILT'] +7105-2330-0001-2311: ref=['HE', 'HAD', 'NOT', 'ONLY', 'PLEADED', 'GUILTY', 'BUT', 'HAD', 'EXPRESSED', 'HIS', 'INTENTION', 'OF', 'REPEATING', 'HIS', 'ESCAPADE', 'IN', 'OTHER', 'DIRECTIONS', 'AS', 'SOON', 'AS', 'CIRCUMSTANCES', 'PERMITTED', 'THROUGHOUT', 'THE', 'TRIAL', 'HE', 'WAS', 'BUSY', 'EXAMINING', 'A', 'SMALL', 'MODEL', 'OF', 'THE', 'FREE', 'TRADE', 'HALL', 'IN', 'MANCHESTER'] +7105-2330-0001-2311: hyp=['HE', 'HAD', 'NOT', 'ONLY', 'PLAYED', 'IT', 'GUILTY', 'BUT', 'HAD', 'EXPRESSED', 'HIS', 'INTENTION', 'OF', 'REPEATING', 'HIS', 'ESCAPADE', 'IN', 'OTHER', 'DIRECTIONS', 'AS', 'SOON', 'AS', 'CIRCUMSTANCES', 'PERMITTED', 'THROUGHOUT', 'THE', 'TRIAL', 'HE', 'WAS', 'BUSY', 'EXAMINING', 'A', 'SMALL', 'MODEL', 'OF', 'THE', 'FREE', 'TRADE', 'HALL', 'IN', 'MANCHESTER'] +7105-2330-0002-2312: ref=['THE', 'JURY', 'COULD', 'NOT', 'POSSIBLY', 'FIND', 'THAT', 'THE', 'PRISONER', 'HAD', 'NOT', 'DELIBERATELY', 'AND', 'INTENTIONALLY', 'BLOWN', 'UP', 'THE', 'ALBERT', 'HALL', 'THE', 'QUESTION', 'WAS', 'COULD', 'THEY', 'FIND', 'ANY', 'EXTENUATING', 'CIRCUMSTANCES', 'WHICH', 'WOULD', 'PERMIT', 'OF', 'AN', 'ACQUITTAL'] +7105-2330-0002-2312: hyp=['VERY', 'CHEERY', 'COULD', 'NOT', 'POSSIBLY', 'FIND', 'THAT', 'THE', 'PRISONER', 'HAD', 'NOT', 'DELIBERATELY', 'AND', 'INTENTIONALLY', 'BLOWN', 'UP', 'THE', 'ALBERT', 'HALL', 'THE', 'QUESTION', 'WAS', 'COULD', 'THEY', 'FIND', 'ANY', 'EXTINUATING', 'CIRCUMSTANCES', 'WHICH', 'WOULD', 'PERMIT', 'OF', 'AN', 'ACQUITTAL'] +7105-2330-0003-2313: ref=['OF', 'COURSE', 'ANY', 'SENTENCE', 'WHICH', 'THE', 'LAW', 'MIGHT', 'FEEL', 'COMPELLED', 'TO', 'INFLICT', 'WOULD', 'BE', 'FOLLOWED', 'BY', 'AN', 'IMMEDIATE', 'PARDON', 'BUT', 'IT', 'WAS', 'HIGHLY', 'DESIRABLE', 'FROM', 'THE', "GOVERNMENT'S", 'POINT', 'OF', 'VIEW', 'THAT', 'THE', 'NECESSITY', 'FOR', 'SUCH', 'AN', 'EXERCISE', 'OF', 'CLEMENCY', 'SHOULD', 'NOT', 'ARISE'] +7105-2330-0003-2313: hyp=['OF', 'COURSE', 'ANY', 'SENTENCE', 'REACHED', 'THE', 'LAW', 'MIGHT', 'FEEL', 'COMPELLED', 'TO', 'INFLICT', 'WOULD', 'BE', 'FOLLOWED', 'BY', 'AN', 'IMMEDIATE', 'PARDON', 'BUT', 'IT', 'WAS', 'HIGHLY', 'DESIRABLE', 'FROM', 'THE', 'GOVERNMENTS', 'BY', 'A', 'VIEW', 'THAT', 'THE', 'NECESSITY', 'FOR', 'SUCH', 'AN', 'EXERCISE', 'OF', 'CLEMENCY', 'SHOULD', 'NOT', 'ARISE'] +7105-2330-0004-2314: ref=['A', 'HEADLONG', 'PARDON', 'ON', 'THE', 'EVE', 'OF', 'A', 'BYE', 'ELECTION', 'WITH', 'THREATS', 'OF', 'A', 'HEAVY', 'VOTING', 'DEFECTION', 'IF', 'IT', 'WERE', 'WITHHELD', 'OR', 'EVEN', 'DELAYED', 'WOULD', 'NOT', 'NECESSARILY', 'BE', 'A', 'SURRENDER', 'BUT', 'IT', 'WOULD', 'LOOK', 'LIKE', 'ONE'] +7105-2330-0004-2314: hyp=['I', 'HAD', 'LONG', 'PARDON', 'AND', 'THE', 'EVE', 'OF', 'A', 'BIOLECTION', 'WITH', 'THREATS', 'OF', 'A', 'HEAVY', 'VOTING', 'AFFECTION', 'IF', 'IT', 'WERE', 'WITHHELD', 'OR', 'EVEN', 'DELAYED', 'WOULD', 'NOT', 'NECESSARILY', 'BE', 'A', 'SURRENDER', 'BUT', 'IT', 'WOULD', 'LOOK', 'LIKE', 'ONE'] +7105-2330-0005-2315: ref=['HENCE', 'THE', 'ANXIETY', 'IN', 'THE', 'CROWDED', 'COURT', 'AND', 'IN', 'THE', 'LITTLE', 'GROUPS', 'GATHERED', 'ROUND', 'THE', 'TAPE', 'MACHINES', 'IN', 'WHITEHALL', 'AND', 'DOWNING', 'STREET', 'AND', 'OTHER', 'AFFECTED', 'CENTRES'] +7105-2330-0005-2315: hyp=['HENCE', 'THEIR', 'ANXIETY', 'IN', 'THE', 'CROWDED', 'COURT', 'AND', 'IN', 'THE', 'LITTLE', 'GROUPS', 'GATHERED', 'ROUND', 'THE', 'TAPE', 'MACHINES', 'IN', 'WHITE', 'HALL', 'AND', 'DAWNING', 'STREET', 'ANOTHER', 'AFFECTED', 'CENTRES'] +7105-2330-0006-2316: ref=['THE', 'JURY', 'RETURNED', 'FROM', 'CONSIDERING', 'THEIR', 'VERDICT', 'THERE', 'WAS', 'A', 'FLUTTER', 'AN', 'EXCITED', 'MURMUR', 'A', 'DEATHLIKE', 'HUSH'] +7105-2330-0006-2316: hyp=['THEIR', 'CHEERY', 'RETURN', 'FROM', 'CONSIDERING', 'THEIR', 'VERDICT', 'THERE', 'WAS', 'A', 'FLUTTER', 'AN', 'EXCITED', 'MURMUR', 'A', 'DEATH', 'LIKE', 'HUSH'] +7105-2330-0007-2317: ref=['THE', 'FOREMAN', 'DELIVERED', 'HIS', 'MESSAGE'] +7105-2330-0007-2317: hyp=['THE', 'FOUR', 'MEN', 'DELIVERED', 'HIS', 'MESSAGE'] +7105-2330-0008-2318: ref=['THE', 'JURY', 'FIND', 'THE', 'PRISONER', 'GUILTY', 'OF', 'BLOWING', 'UP', 'THE', 'ALBERT', 'HALL'] +7105-2330-0008-2318: hyp=['THE', 'CHERRY', 'FIND', 'THE', 'PRISONER', 'GUILTY', 'OF', 'BLOWING', 'UP', 'THE', 'ALBERT', 'HALL'] +7105-2330-0009-2319: ref=['THE', 'JURY', 'WISH', 'TO', 'ADD', 'A', 'RIDER', 'DRAWING', 'ATTENTION', 'TO', 'THE', 'FACT', 'THAT', 'A', 'BY', 'ELECTION', 'IS', 'PENDING', 'IN', 'THE', 'PARLIAMENTARY', 'DIVISION', 'OF', 'NEMESIS', 'ON', 'HAND'] +7105-2330-0009-2319: hyp=['THEY', 'JERRY', 'WISH', 'TO', 'ADD', 'A', 'WRITER', 'DRAWING', 'ATTENTION', 'TO', 'THE', 'FACT', 'THAT', 'A', 'BILL', 'IS', 'SPENDING', 'IN', 'THE', 'PARLIAMENTARY', 'DIVISION', 'OF', 'NEMESIS', 'ON', 'HAND'] +7105-2330-0010-2320: ref=['AND', 'MAY', 'THE', 'LORD', 'HAVE', 'MERCY', 'ON', 'THE', 'POLL', 'A', 'JUNIOR', 'COUNSEL', 'EXCLAIMED', 'IRREVERENTLY'] +7105-2330-0010-2320: hyp=['AND', 'MADE', 'THE', 'LARD', 'HAVE', 'MERCY', 'ON', 'THE', 'POLE', 'A', 'GENIOR', 'CONSUL', 'EXCLAIMED', 'IRREVERENTLY'] +7105-2330-0011-2321: ref=['FIFTEEN', 'HUNDRED', 'SAID', 'THE', 'PRIME', 'MINISTER', 'WITH', 'A', 'SHUDDER', "IT'S", 'TOO', 'HORRIBLE', 'TO', 'THINK', 'OF'] +7105-2330-0011-2321: hyp=['FIFTEEN', 'HUNDRED', 'SAID', 'THE', 'PRIME', 'MINISTER', 'WITH', 'A', 'SHUDDER', "IT'S", 'TOO', 'HORRIBLE', 'TO', 'THINK', 'OF'] +7105-2330-0012-2322: ref=['OUR', 'MAJORITY', 'LAST', 'TIME', 'WAS', 'ONLY', 'A', 'THOUSAND', 'AND', 'SEVEN'] +7105-2330-0012-2322: hyp=['OUR', 'MAJORITY', 'LAST', 'TIME', 'WAS', 'ONLY', 'A', 'THOUSAND', 'AND', 'SEVEN'] +7105-2330-0013-2323: ref=['SEVEN', 'THIRTY', 'AMENDED', 'THE', 'PRIME', 'MINISTER', 'WE', 'MUST', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PRECIPITANCY'] +7105-2330-0013-2323: hyp=['SEVEN', 'THIRTY', 'AMENDED', 'THE', 'PRIME', 'MINISTER', 'WE', 'MUST', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PRECIPITANCY'] +7105-2330-0014-2324: ref=['NOT', 'LATER', 'THAN', 'SEVEN', 'THIRTY', 'THEN', 'SAID', 'THE', 'CHIEF', 'ORGANISER', 'I', 'HAVE', 'PROMISED', 'THE', 'AGENT', 'DOWN', 'THERE', 'THAT', 'HE', 'SHALL', 'BE', 'ABLE', 'TO', 'DISPLAY', 'POSTERS', 'ANNOUNCING', 'PLATTERBAFF', 'IS', 'OUT', 'BEFORE', 'THE', 'POLL', 'OPENS'] +7105-2330-0014-2324: hyp=['NOT', 'LATER', 'THAN', 'SEVEN', 'THIRTY', 'THEN', 'SAID', 'THE', 'CHIEF', 'ORGANIZER', 'I', 'HAVE', 'PROMISED', 'THE', 'AGENT', 'DOWN', 'THERE', 'THAT', 'HE', 'SHALL', 'BE', 'ABLE', 'TO', 'DISPLAY', 'POSTERS', 'ANNOUNCING', 'PLATTERBAFF', 'IS', 'OUT', 'BEFORE', 'THE', 'POLE', 'OPENS'] +7105-2330-0015-2325: ref=['HE', 'SAID', 'IT', 'WAS', 'OUR', 'ONLY', 'CHANCE', 'OF', 'GETTING', 'A', 'TELEGRAM', 'RADPROP', 'IS', 'IN', 'TO', 'NIGHT'] +7105-2330-0015-2325: hyp=['HE', 'SAID', 'IT', 'WAS', 'HER', 'ONLY', 'CHANCE', 'OF', 'GETTING', 'A', 'TELEGRAM', 'REDRUP', 'IS', 'INN', 'TO', 'NIGHT'] +7105-2330-0016-2326: ref=['DESPITE', 'THE', 'EARLINESS', 'OF', 'THE', 'HOUR', 'A', 'SMALL', 'CROWD', 'HAD', 'GATHERED', 'IN', 'THE', 'STREET', 'OUTSIDE', 'AND', 'THE', 'HORRIBLE', 'MENACING', 'TRELAWNEY', 'REFRAIN', 'OF', 'THE', 'FIFTEEN', 'HUNDRED', 'VOTING', 'MEN', 'CAME', 'IN', 'A', 'STEADY', 'MONOTONOUS', 'CHANT'] +7105-2330-0016-2326: hyp=['THIS', 'SPITE', 'THE', 'EARLINESS', 'OF', 'THE', 'HOUR', 'A', 'SMALL', 'CROWD', 'HAD', 'GATHERED', 'IN', 'THE', 'STREET', 'OUTSIDE', 'AND', 'THE', 'HORRIBLE', 'MENACING', 'TREEONER', 'REFRAIN', 'OF', 'THE', 'FIFTEEN', 'HUNDRED', 'VOTING', 'MEN', 'CAME', 'IN', 'A', 'STEADY', 'MONOTONOUS', 'CHANT'] +7105-2330-0017-2327: ref=['HE', 'EXCLAIMED', "WON'T", 'GO'] +7105-2330-0017-2327: hyp=['HE', 'EXCLAIMED', "WON'T", 'GO'] +7105-2330-0018-2328: ref=['HE', 'SAYS', 'HE', 'NEVER', 'HAS', 'LEFT', 'PRISON', 'WITHOUT', 'A', 'BRASS', 'BAND', 'TO', 'PLAY', 'HIM', 'OUT', 'AND', "HE'S", 'NOT', 'GOING', 'TO', 'GO', 'WITHOUT', 'ONE', 'NOW'] +7105-2330-0018-2328: hyp=['HE', 'SAYS', 'HE', 'NEVER', 'HAS', 'LEFT', 'PRISON', 'WITHOUT', 'A', 'BREASTPAND', 'TO', 'PLAY', 'HIM', 'OUT', 'AND', "HE'S", 'NOT', 'GOING', 'TO', 'GO', 'WITHOUT', 'ONE', 'NOW'] +7105-2330-0019-2329: ref=['SAID', 'THE', 'PRIME', 'MINISTER', 'WE', 'CAN', 'HARDLY', 'BE', 'SUPPOSED', 'TO', 'SUPPLY', 'A', 'RELEASED', 'PRISONER', 'WITH', 'A', 'BRASS', 'BAND', 'HOW', 'ON', 'EARTH', 'COULD', 'WE', 'DEFEND', 'IT', 'ON', 'THE', 'ESTIMATES'] +7105-2330-0019-2329: hyp=['SAID', 'THE', 'PRIME', 'MINISTER', 'WE', 'CAN', 'HARDLY', 'BE', 'SUPPOSED', 'TO', 'SUPPLY', 'A', 'LESS', 'PRISONER', 'WITH', 'A', 'BRASS', 'BAND', 'HOW', 'ON', 'EARTH', 'COULD', 'WE', 'DEFENDED', 'ON', 'THE', 'ESTIMATES'] +7105-2330-0020-2330: ref=['ANYWAY', 'HE', "WON'T", 'GO', 'UNLESS', 'HE', 'HAS', 'A', 'BAND'] +7105-2330-0020-2330: hyp=['AND', 'AWAY', 'YOU', "WON'T", 'GO', 'UNLESS', 'HE', 'HAS', 'A', 'BAND'] +7105-2330-0021-2331: ref=['POLL', 'OPENS', 'IN', 'FIVE', 'MINUTES'] +7105-2330-0021-2331: hyp=['PAUL', 'OPENS', 'IN', 'FIVE', 'MINUTES'] +7105-2330-0022-2332: ref=['IS', 'PLATTERBAFF', 'OUT', 'YET'] +7105-2330-0022-2332: hyp=['HIS', 'FURTHER', 'BATH', 'OUT', 'YET'] +7105-2330-0023-2333: ref=['IN', "HEAVEN'S", 'NAME', 'WHY'] +7105-2330-0023-2333: hyp=['IN', "HEAVEN'S", 'NAME', 'WHY'] +7105-2330-0024-2334: ref=['THE', 'CHIEF', 'ORGANISER', 'RANG', 'OFF'] +7105-2330-0024-2334: hyp=['THE', 'CHIEF', 'ORGANIZER', 'RANG', 'OFF'] +7105-2330-0025-2335: ref=['THIS', 'IS', 'NOT', 'A', 'MOMENT', 'FOR', 'STANDING', 'ON', 'DIGNITY', 'HE', 'OBSERVED', 'BLUNTLY', 'MUSICIANS', 'MUST', 'BE', 'SUPPLIED', 'AT', 'ONCE'] +7105-2330-0025-2335: hyp=['THIS', 'IS', 'NOT', 'A', 'MOMENT', 'FOR', 'STANDING', 'ON', 'DIGNITY', 'HE', 'OBSERVED', 'BLUNTLY', 'MEASIANS', 'MUST', 'BE', 'SUPPLIED', 'AT', 'ONCE'] +7105-2330-0026-2336: ref=["CAN'T", 'YOU', 'GET', 'A', 'STRIKE', 'PERMIT', 'ASKED', 'THE', 'ORGANISER'] +7105-2330-0026-2336: hyp=["CAN'T", 'YOU', 'GET', 'US', 'STRIKE', 'PERMIT', 'ASKED', 'THE', 'ORGANIZER'] +7105-2330-0027-2337: ref=["I'LL", 'TRY', 'SAID', 'THE', 'HOME', 'SECRETARY', 'AND', 'WENT', 'TO', 'THE', 'TELEPHONE'] +7105-2330-0027-2337: hyp=["I'LL", 'TRY', 'SAID', 'THE', 'HOME', 'SECRETARY', 'AND', 'WENT', 'TO', 'THE', 'TELEPHONE'] +7105-2330-0028-2338: ref=['EIGHT', "O'CLOCK", 'STRUCK', 'THE', 'CROWD', 'OUTSIDE', 'CHANTED', 'WITH', 'AN', 'INCREASING', 'VOLUME', 'OF', 'SOUND', 'WILL', 'VOTE', 'THE', 'OTHER', 'WAY'] +7105-2330-0028-2338: hyp=['EIGHT', "O'CLOCK", 'STRUCK', 'THE', 'CROWD', 'OUTSIDE', 'CHANTED', 'WITH', 'AN', 'INCREASING', 'VOLUME', 'OF', 'SOUND', 'REVOTE', 'THE', 'OTHER', 'WAY'] +7105-2330-0029-2339: ref=['A', 'TELEGRAM', 'WAS', 'BROUGHT', 'IN'] +7105-2330-0029-2339: hyp=['I', 'TELEGRAMAS', 'BROUGHT', 'IN'] +7105-2330-0030-2340: ref=['IT', 'WAS', 'FROM', 'THE', 'CENTRAL', 'COMMITTEE', 'ROOMS', 'AT', 'NEMESIS'] +7105-2330-0030-2340: hyp=['IT', 'WAS', 'FROM', 'THE', 'CENTRAL', 'COMEDY', 'ROOMS', 'AT', 'NEMESIS'] +7105-2330-0031-2341: ref=['WITHOUT', 'A', 'BAND', 'HE', 'WOULD', 'NOT', 'GO', 'AND', 'THEY', 'HAD', 'NO', 'BAND'] +7105-2330-0031-2341: hyp=['WITHOUT', 'A', 'BAND', 'HE', 'WOULD', 'NOT', 'GO', 'AND', 'THEY', 'HAD', 'NO', 'BAND'] +7105-2330-0032-2342: ref=['A', 'QUARTER', 'PAST', 'TEN', 'HALF', 'PAST'] +7105-2330-0032-2342: hyp=['A', 'QUARTER', 'PAST', 'TEN', 'HALF', 'PAST'] +7105-2330-0033-2343: ref=['HAVE', 'YOU', 'ANY', 'BAND', 'INSTRUMENTS', 'OF', 'AN', 'EASY', 'NATURE', 'TO', 'PLAY'] +7105-2330-0033-2343: hyp=['HAVE', 'YOU', 'ANY', 'BAND', 'INSTRUMENTS', 'OF', 'AN', 'EASY', 'NATURE', 'TO', 'PLAY'] +7105-2330-0034-2344: ref=['DEMANDED', 'THE', 'CHIEF', 'ORGANISER', 'OF', 'THE', 'PRISON', 'GOVERNOR', 'DRUMS', 'CYMBALS', 'THOSE', 'SORT', 'OF', 'THINGS'] +7105-2330-0034-2344: hyp=['DEMANDED', 'THE', 'CHIEF', 'ORGANIZER', 'OF', 'THE', 'PRISON', 'GOVERNOR', 'DRUMS', 'SYMBOLS', 'THOSE', 'SORT', 'OF', 'THINGS'] +7105-2330-0035-2345: ref=['THE', 'WARDERS', 'HAVE', 'A', 'PRIVATE', 'BAND', 'OF', 'THEIR', 'OWN', 'SAID', 'THE', 'GOVERNOR', 'BUT', 'OF', 'COURSE', 'I', "COULDN'T", 'ALLOW', 'THE', 'MEN', 'THEMSELVES'] +7105-2330-0035-2345: hyp=['THE', 'ORDERS', 'HAVE', 'A', 'PRIVATE', 'BAND', 'OF', 'THEIR', 'OWN', 'SAID', 'THE', 'GOVERNOR', 'BUT', 'OF', 'COURSE', 'I', "COULDN'T", 'ALLOW', 'THE', 'MEN', 'THEMSELVES'] +7105-2330-0036-2346: ref=['LEND', 'US', 'THE', 'INSTRUMENTS', 'SAID', 'THE', 'CHIEF', 'ORGANISER'] +7105-2330-0036-2346: hyp=['BLENDEST', 'THE', 'INSTRUMENTS', 'SAID', 'THE', 'CHIEF', 'ORGANISER'] +7105-2330-0037-2347: ref=['THE', 'POPULAR', 'SONG', 'OF', 'THE', 'MOMENT', 'REPLIED', 'THE', 'AGITATOR', 'AFTER', 'A', "MOMENT'S", 'REFLECTION'] +7105-2330-0037-2347: hyp=['THOUGH', 'POPULAR', 'SONG', 'OF', 'THE', 'MOMENT', 'REPLIED', 'THE', 'AGITATOR', 'AFTER', 'A', "MOMENT'S", 'REFLECTION'] +7105-2330-0038-2348: ref=['IT', 'WAS', 'A', 'TUNE', 'THEY', 'HAD', 'ALL', 'HEARD', 'HUNDREDS', 'OF', 'TIMES', 'SO', 'THERE', 'WAS', 'NO', 'DIFFICULTY', 'IN', 'TURNING', 'OUT', 'A', 'PASSABLE', 'IMITATION', 'OF', 'IT', 'TO', 'THE', 'IMPROVISED', 'STRAINS', 'OF', 'I', "DIDN'T", 'WANT', 'TO', 'DO', 'IT', 'THE', 'PRISONER', 'STRODE', 'FORTH', 'TO', 'FREEDOM'] +7105-2330-0038-2348: hyp=['IT', 'WAS', 'A', 'TUNE', 'THEY', 'HAD', 'ALL', 'HEARD', 'HUNDREDS', 'OF', 'TIMES', 'SO', 'THERE', 'IS', 'NO', 'DIFFICULTY', 'IN', 'TURNING', 'OUT', 'A', 'PASSABLE', 'IMITATION', 'OF', 'IT', 'TO', 'THE', 'IMPROVISED', 'TRAINS', 'OF', 'I', "DON'T", 'WANT', 'TO', 'DO', 'IT', 'THE', 'PRISONER', 'STRODE', 'FORTH', 'TO', 'FREEDOM'] +7105-2330-0039-2349: ref=['THE', 'WORD', 'OF', 'THE', 'SONG', 'HAD', 'REFERENCE', 'IT', 'WAS', 'UNDERSTOOD', 'TO', 'THE', 'INCARCERATING', 'GOVERNMENT', 'AND', 'NOT', 'TO', 'THE', 'DESTROYER', 'OF', 'THE', 'ALBERT', 'HALL'] +7105-2330-0039-2349: hyp=['THE', 'WORD', 'OF', 'THE', 'SUN', 'HAD', 'REFERENCE', 'IT', 'WAS', 'UNDERSTOOD', 'THAT', 'INCARCERATING', 'GOVERNMENT', 'AND', 'NOT', 'TO', 'THE', 'DESTROYER', 'OF', 'THE', 'ALBERT', 'HALL'] +7105-2330-0040-2350: ref=['THE', 'SEAT', 'WAS', 'LOST', 'AFTER', 'ALL', 'BY', 'A', 'NARROW', 'MAJORITY'] +7105-2330-0040-2350: hyp=['THIS', 'HEAT', 'WAS', 'LOST', 'AFTER', 'ALL', 'BY', 'A', 'NARROW', 'MATURITY'] +7105-2330-0041-2351: ref=['THE', 'LOCAL', 'TRADE', 'UNIONISTS', 'TOOK', 'OFFENCE', 'AT', 'THE', 'FACT', 'OF', 'CABINET', 'MINISTERS', 'HAVING', 'PERSONALLY', 'ACTED', 'AS', 'STRIKE', 'BREAKERS', 'AND', 'EVEN', 'THE', 'RELEASE', 'OF', 'PLATTERBAFF', 'FAILED', 'TO', 'PACIFY', 'THEM'] +7105-2330-0041-2351: hyp=['THE', 'LOCAL', 'TRADE', 'UNIONISTS', 'TOOK', 'OFFENCE', 'AT', 'THE', 'FACT', 'OF', 'CABINETS', 'HAVING', 'PERSONALLY', 'ACTED', 'AS', 'STRIKE', 'BREAKERS', 'AND', 'EVEN', 'THE', 'RELEASE', 'OF', 'PLATTERBUFF', 'FAILED', 'TO', 'PACIFY', 'THEM'] +7105-2340-0000-2272: ref=['WITH', 'THAT', 'NOTORIOUS', 'FAILING', 'OF', 'HIS', 'HE', 'WAS', 'NOT', 'THE', 'SORT', 'OF', 'PERSON', 'ONE', 'WANTED', 'IN', "ONE'S", 'HOUSE'] +7105-2340-0000-2272: hyp=['WITH', 'THAT', 'NOTORIOUS', 'FAILING', 'OF', 'HIS', 'HE', 'WAS', 'NOT', 'THE', 'SORT', 'OF', 'PERSON', 'ONE', 'WANTED', 'IN', "ONE'S", 'HOUSE'] +7105-2340-0001-2273: ref=['WELL', 'THE', 'FAILING', 'STILL', 'EXISTS', "DOESN'T", 'IT', 'SAID', 'HER', 'HUSBAND', 'OR', 'DO', 'YOU', 'SUPPOSE', 'A', 'REFORM', 'OF', 'CHARACTER', 'IS', 'ENTAILED', 'ALONG', 'WITH', 'THE', 'ESTATE'] +7105-2340-0001-2273: hyp=['WELL', 'THE', 'FAILING', 'STILL', 'EXISTS', "DOESN'T", 'IT', 'SAID', 'THE', 'HUSBAND', 'OR', 'A', 'DO', 'YOU', 'SUPPOSE', 'A', 'REFORM', 'OF', 'CHARACTER', 'IS', 'ENTAILED', 'ALONG', 'WITH', 'THE', 'ESTATE'] +7105-2340-0002-2274: ref=['BESIDES', 'CYNICISM', 'APART', 'HIS', 'BEING', 'RICH', 'WILL', 'MAKE', 'A', 'DIFFERENCE', 'IN', 'THE', 'WAY', 'PEOPLE', 'WILL', 'LOOK', 'AT', 'HIS', 'FAILING'] +7105-2340-0002-2274: hyp=['BESIDES', 'CYS', 'IN', 'A', 'PART', 'IS', 'BEING', 'RICH', "WE'LL", 'MAKE', 'A', 'DIFFERENCE', 'IN', 'THE', 'WAY', 'PEOPLE', 'WILL', 'LOOK', 'AT', 'HIS', 'FEELING'] +7105-2340-0003-2275: ref=['WHEN', 'A', 'MAN', 'IS', 'ABSOLUTELY', 'WEALTHY', 'NOT', 'MERELY', 'WELL', 'TO', 'DO', 'ALL', 'SUSPICION', 'OF', 'SORDID', 'MOTIVE', 'NATURALLY', 'DISAPPEARS', 'THE', 'THING', 'BECOMES', 'MERELY', 'A', 'TIRESOME', 'MALADY'] +7105-2340-0003-2275: hyp=['WHEN', 'A', 'MAN', 'IS', 'ABSOLUTELY', 'WEALTHY', 'NOT', 'MERELY', 'WELL', 'TO', 'DO', 'ALL', 'SUSPICION', 'OF', 'SARDID', 'MOTIVE', 'NATURAL', 'DISAPPEARS', 'THE', 'THING', 'BECOMES', 'MERELY', 'A', 'PARASAN', 'MALADY'] +7105-2340-0004-2276: ref=['WILFRID', 'PIGEONCOTE', 'HAD', 'SUDDENLY', 'BECOME', 'HEIR', 'TO', 'HIS', 'UNCLE', 'SIR', 'WILFRID', 'PIGEONCOTE', 'ON', 'THE', 'DEATH', 'OF', 'HIS', 'COUSIN', 'MAJOR', 'WILFRID', 'PIGEONCOTE', 'WHO', 'HAD', 'SUCCUMBED', 'TO', 'THE', 'AFTER', 'EFFECTS', 'OF', 'A', 'POLO', 'ACCIDENT'] +7105-2340-0004-2276: hyp=['WILFRED', 'DIGESON', 'COLT', 'HAD', 'SUDDENLY', 'BECOME', 'HEIR', 'TO', 'HIS', 'UNCLE', 'SIR', 'WILFRID', 'PIGEON', 'COAT', 'ON', 'THE', 'DEATH', 'OF', 'HIS', 'COUSIN', 'MAJOR', 'WILFRED', 'PIGEONOTE', 'WHO', 'HAD', 'SUCCUMBED', 'THE', 'DAY', 'AFTER', 'EFFECTS', 'OF', 'APOLLO', 'ACCIDENT'] +7105-2340-0005-2277: ref=['A', 'WILFRID', 'PIGEONCOTE', 'HAD', 'COVERED', 'HIMSELF', 'WITH', 'HONOURS', 'IN', 'THE', 'COURSE', 'OF', "MARLBOROUGH'S", 'CAMPAIGNS', 'AND', 'THE', 'NAME', 'WILFRID', 'HAD', 'BEEN', 'A', 'BAPTISMAL', 'WEAKNESS', 'IN', 'THE', 'FAMILY', 'EVER', 'SINCE', 'THE', 'NEW', 'HEIR', 'TO', 'THE', 'FAMILY', 'DIGNITY', 'AND', 'ESTATES', 'WAS', 'A', 'YOUNG', 'MAN', 'OF', 'ABOUT', 'FIVE', 'AND', 'TWENTY', 'WHO', 'WAS', 'KNOWN', 'MORE', 'BY', 'REPUTATION', 'THAN', 'BY', 'PERSON', 'TO', 'A', 'WIDE', 'CIRCLE', 'OF', 'COUSINS', 'AND', 'KINSFOLK'] +7105-2340-0005-2277: hyp=['OF', 'WILFRED', 'BEECH', 'AND', 'COURT', 'HAD', 'COVERED', 'HIMSELF', 'WITH', 'HONOURS', 'IN', 'THE', 'COURSE', 'OF', "MARLBOROUGH'S", 'CAMPAIGNS', 'AND', 'THE', 'NAME', 'LOYAL', 'FRED', 'HAD', 'BEEN', 'ABOVE', 'THE', 'SMALL', 'WEAKNESS', 'IN', 'THE', 'FAMILY', 'EVER', 'SINCE', 'THE', 'NEW', 'HEIR', 'TO', 'THE', 'FAMILY', 'DIGNITY', 'AND', 'ESTATES', 'WAS', 'A', 'YOUNG', 'MAN', 'OF', 'ABOUT', 'FIVE', 'AND', 'TWENTY', 'WHO', 'WAS', 'KNOWN', 'MORE', 'BY', 'REPETITION', 'THAN', 'BY', 'PERSON', 'TO', 'AVIDE', 'CIRCLE', 'OF', 'COUSINS', 'AND', 'KINSFOLK'] +7105-2340-0006-2278: ref=['AND', 'THE', 'REPUTATION', 'WAS', 'AN', 'UNPLEASANT', 'ONE'] +7105-2340-0006-2278: hyp=['AND', 'THE', 'REPUTATION', 'WAS', 'AN', 'UNPLEASANT', 'ONE'] +7105-2340-0007-2279: ref=['FROM', 'HIS', 'LATE', 'SCHOOLDAYS', 'ONWARD', 'HE', 'HAD', 'BEEN', 'POSSESSED', 'BY', 'AN', 'ACUTE', 'AND', 'OBSTINATE', 'FORM', 'OF', 'KLEPTOMANIA', 'HE', 'HAD', 'THE', 'ACQUISITIVE', 'INSTINCT', 'OF', 'THE', 'COLLECTOR', 'WITHOUT', 'ANY', 'OF', 'THE', "COLLECTOR'S", 'DISCRIMINATION'] +7105-2340-0007-2279: hyp=['FROM', 'HIS', 'LATE', 'SCHOOL', 'DAYS', 'ONWARD', 'HE', 'HAD', 'BEEN', 'POSSESSED', 'BY', 'AN', 'ACUTE', 'AND', 'OBSTINATE', 'FORM', 'OF', 'CLEFTOMANIA', 'HE', 'HAD', 'THE', 'ACQUISITIVE', 'INSTINCT', 'OF', 'THE', 'COLLECTOR', 'WITHOUT', 'ANY', 'OF', 'THE', "COLLECTOR'S", 'DISCRIMINATION'] +7105-2340-0008-2280: ref=['THE', 'SEARCH', 'USUALLY', 'PRODUCED', 'A', 'LARGE', 'AND', 'VARIED', 'YIELD', 'THIS', 'IS', 'FUNNY', 'SAID', 'PETER', 'PIGEONCOTE', 'TO', 'HIS', 'WIFE', 'SOME', 'HALF', 'HOUR', 'AFTER', 'THEIR', 'CONVERSATION', "HERE'S", 'A', 'TELEGRAM', 'FROM', 'WILFRID', 'SAYING', "HE'S", 'PASSING', 'THROUGH', 'HERE', 'IN', 'HIS', 'MOTOR', 'AND', 'WOULD', 'LIKE', 'TO', 'STOP', 'AND', 'PAY', 'US', 'HIS', 'RESPECTS'] +7105-2340-0008-2280: hyp=['THIS', 'SEARCH', 'USUALLY', 'PRODUCED', 'A', 'LARGE', 'AND', 'VARIED', 'YIELD', 'THIS', 'IS', 'FUNNY', 'SAID', 'PETER', 'PIGEON', 'BOAT', 'TO', 'HIS', 'WIFE', 'THEM', 'HALF', 'HOUR', 'AFTER', 'THEIR', 'CONVERSATION', 'HERE', 'IS', 'A', 'TELEGRAM', 'FROM', 'MILFRED', 'SAYING', "HE'S", 'PASSING', 'THROUGH', 'HERE', 'IN', 'HIS', 'MOTOR', 'AND', 'WOULD', 'LIKE', 'TO', 'STOP', 'AND', 'PAY', 'US', 'HIS', 'RESPECTS'] +7105-2340-0009-2281: ref=['SIGNED', 'WILFRID', 'PIGEONCOTE'] +7105-2340-0009-2281: hyp=['SIGN', 'WILFRED', 'PEACH', 'AND', 'CO'] +7105-2340-0010-2282: ref=['I', 'SUPPOSE', "HE'S", 'BRINGING', 'US', 'A', 'PRESENT', 'FOR', 'THE', 'SILVER', 'WEDDING', 'GOOD', 'GRACIOUS'] +7105-2340-0010-2282: hyp=['I', 'SUPPOSE', 'THIS', 'BRINGING', 'US', 'A', 'PRESENT', 'FOR', 'THE', 'SILVER', 'WEDDING', 'GOOD', 'GRACIOUS'] +7105-2340-0011-2283: ref=['THE', 'TALK', 'FLITTED', 'NERVOUSLY', 'AND', 'HURRIEDLY', 'FROM', 'ONE', 'IMPERSONAL', 'TOPIC', 'TO', 'ANOTHER'] +7105-2340-0011-2283: hyp=['THE', 'TALK', 'FLITTED', 'NERVOUSLY', 'AND', 'HURRIEDLY', 'FROM', 'ONE', 'IMPERSONAL', 'TOPIC', 'TO', 'ANOTHER'] +7105-2340-0012-2284: ref=['IN', 'THE', 'DRAWING', 'ROOM', 'AFTER', 'DINNER', 'THEIR', 'NERVOUSNESS', 'AND', 'AWKWARDNESS', 'INCREASED'] +7105-2340-0012-2284: hyp=['IN', 'THE', 'DRAWING', 'ROOM', 'AFTER', 'DINNER', 'THEIR', 'NERVOUSNESS', 'AND', 'AWKWARDNESS', 'INCREASED'] +7105-2340-0013-2285: ref=['OH', 'WE', "HAVEN'T", 'SHOWN', 'YOU', 'THE', 'SILVER', 'WEDDING', 'PRESENTS', 'SAID', 'MISSUS', 'PETER', 'SUDDENLY', 'AS', 'THOUGH', 'STRUCK', 'BY', 'A', 'BRILLIANT', 'IDEA', 'FOR', 'ENTERTAINING', 'THE', 'GUEST', 'HERE', 'THEY', 'ALL', 'ARE'] +7105-2340-0013-2285: hyp=['OH', 'WE', "HAVEN'T", 'SHOWN', 'YOU', 'THE', 'SILVERY', 'WEDDING', 'PRESENTS', 'SAID', 'MISSUS', 'PETER', 'SUDDENLY', 'AS', 'THOUGH', 'STRUCK', 'BY', 'A', 'BRILLIANT', 'IDEA', 'FOR', 'ENTERTAINING', 'THE', 'GUEST', 'HERE', 'THEY', 'ALL', 'ARE'] +7105-2340-0014-2286: ref=['SUCH', 'NICE', 'USEFUL', 'GIFTS', 'A', 'FEW', 'DUPLICATES', 'OF', 'COURSE'] +7105-2340-0014-2286: hyp=['SUCH', 'NICE', 'FORGIFTS', 'A', 'FEW', 'DEPLICATES', 'OF', 'COURSE'] +7105-2340-0015-2287: ref=['SEVEN', 'CREAM', 'JUGS', 'PUT', 'IN', 'PETER'] +7105-2340-0015-2287: hyp=['SEVEN', 'QUEEN', 'JUGS', 'PUT', 'IN', 'PETER'] +7105-2340-0016-2288: ref=['WE', 'FEEL', 'THAT', 'WE', 'MUST', 'LIVE', 'ON', 'CREAM', 'FOR', 'THE', 'REST', 'OF', 'OUR', 'LIVES'] +7105-2340-0016-2288: hyp=['WE', 'FEEL', 'THAT', 'WE', 'MUST', 'LIVE', 'UNCREAM', 'FOR', 'THE', 'REST', 'OF', 'OUR', 'LIVES'] +7105-2340-0017-2289: ref=['OF', 'COURSE', 'SOME', 'OF', 'THEM', 'CAN', 'BE', 'CHANGED'] +7105-2340-0017-2289: hyp=['OF', 'COURSE', 'SOME', 'OF', 'THEM', 'CAN', 'BE', 'CHANGED'] +7105-2340-0018-2290: ref=['I', 'PUT', 'IT', 'DOWN', 'BY', 'THE', 'CLARET', 'JUG', 'SAID', 'WILFRID', 'BUSY', 'WITH', 'ANOTHER', 'OBJECT'] +7105-2340-0018-2290: hyp=['I', 'PUT', 'IT', 'DOWN', 'BY', 'THE', 'CLARGA', 'SAID', 'WILFRIED', 'BUSY', 'WITH', 'ANOTHER', 'OBJECT'] +7105-2340-0019-2291: ref=['VIGILANCE', 'WAS', 'NOT', 'COMPLETELY', 'CROWNED', 'WITH', 'A', 'SENSE', 'OF', 'VICTORY'] +7105-2340-0019-2291: hyp=['EACH', 'A', 'LENS', 'WAS', 'NOT', 'COMPLETELY', 'CROWNED', 'WITH', 'A', 'SENSE', 'OF', 'VICTORY'] +7105-2340-0020-2292: ref=['AFTER', 'THEY', 'HAD', 'SAID', 'GOOD', 'NIGHT', 'TO', 'THEIR', 'VISITOR', 'MISSUS', 'PETER', 'EXPRESSED', 'HER', 'CONVICTION', 'THAT', 'HE', 'HAD', 'TAKEN', 'SOMETHING'] +7105-2340-0020-2292: hyp=['AFTER', 'THEY', 'HAD', 'SAID', 'GOOD', 'NIGHT', 'TO', 'THEIR', 'VISITOR', 'MISSUS', 'PETER', 'EXPRESSED', 'HER', 'CONVICTION', 'THAT', 'HE', 'HAD', 'TAKEN', 'SOMETHING'] +7105-2340-0021-2293: ref=['HOW', 'ON', 'EARTH', 'ARE', 'WE', 'TO', 'KNOW', 'SAID', 'PETER', 'THE', 'MEAN', 'PIG', "HASN'T", 'BROUGHT', 'US', 'A', 'PRESENT', 'AND', "I'M", 'HANGED', 'IF', 'HE', 'SHALL', 'CARRY', 'ONE', 'OFF'] +7105-2340-0021-2293: hyp=['HOW', 'ON', 'EARTH', 'ARE', 'WE', 'TO', 'KNOW', 'SAID', 'PETER', 'THE', 'MEAN', 'PIG', "HASN'T", 'BROUGHT', 'US', 'A', 'PRESENT', 'AND', "I'M", 'HANGED', 'IF', 'HE', 'SHALL', 'CARRY', 'ONE', 'OF'] +7105-2340-0022-2294: ref=["IT'S", 'THE', 'ONLY', 'THING', 'TO', 'DO'] +7105-2340-0022-2294: hyp=['IS', 'THE', 'ONLY', 'THING', 'TO', 'DO'] +7105-2340-0023-2295: ref=['WILFRID', 'WAS', 'LATE', 'IN', 'COMING', 'DOWN', 'TO', 'BREAKFAST', 'AND', 'HIS', 'MANNER', 'SHOWED', 'PLAINLY', 'THAT', 'SOMETHING', 'WAS', 'AMISS'] +7105-2340-0023-2295: hyp=['WILFRED', 'WAS', 'LATE', 'IN', 'COMING', 'DOWN', 'TO', 'BREAKFAST', 'AND', 'HIS', 'MANNER', 'SHOWED', 'PLAINLY', 'THAT', 'SOMETHING', 'WAS', 'AMISS'] +7105-2340-0024-2296: ref=["IT'S", 'AN', 'UNPLEASANT', 'THING', 'TO', 'HAVE', 'TO', 'SAY', 'HE', 'BLURTED', 'OUT', 'PRESENTLY', 'BUT', "I'M", 'AFRAID', 'YOU', 'MUST', 'HAVE', 'A', 'THIEF', 'AMONG', 'YOUR', 'SERVANTS', "SOMETHING'S", 'BEEN', 'TAKEN', 'OUT', 'OF', 'MY', 'PORTMANTEAU'] +7105-2340-0024-2296: hyp=['IS', 'AN', 'UNPLEASANT', 'THING', 'TO', 'HAVE', 'TO', 'SAY', 'HE', 'BLURTED', 'OUT', 'PRESENTLY', 'BUT', "I'M", 'AFRAID', 'YOU', 'MUST', 'HAVE', 'A', 'THIEF', 'AMONG', 'YOUR', 'SERVANTS', "SOMETHING'S", 'BEEN', 'TAKEN', 'OUT', 'OF', 'MY', 'PORTMANTEAU'] +7105-2340-0025-2297: ref=['IT', 'WAS', 'A', 'LITTLE', 'PRESENT', 'FROM', 'MY', 'MOTHER', 'AND', 'MYSELF', 'FOR', 'YOUR', 'SILVER', 'WEDDING'] +7105-2340-0025-2297: hyp=['IT', 'WAS', 'A', 'LITTLE', 'PRESENT', 'FROM', 'MY', 'MOTHER', 'AND', 'MYSELF', 'FOR', 'YOUR', 'SILVER', 'WEDDING'] +7105-2340-0026-2298: ref=['I', 'SHOULD', 'HAVE', 'GIVEN', 'IT', 'TO', 'YOU', 'LAST', 'NIGHT', 'AFTER', 'DINNER', 'ONLY', 'IT', 'HAPPENED', 'TO', 'BE', 'A', 'CREAM', 'JUG', 'AND', 'YOU', 'SEEMED', 'ANNOYED', 'AT', 'HAVING', 'SO', 'MANY', 'DUPLICATES', 'SO', 'I', 'FELT', 'RATHER', 'AWKWARD', 'ABOUT', 'GIVING', 'YOU', 'ANOTHER'] +7105-2340-0026-2298: hyp=['I', 'SHOULD', 'HAVE', 'GIVEN', 'IT', 'TO', 'YOU', 'LAST', 'NIGHT', 'AFTER', 'DINNER', 'ONLY', 'IT', 'HAPPENED', 'TO', 'BE', 'A', 'QUEEN', 'JUG', 'AND', 'YOU', 'SEEMED', 'ANNOYED', 'AT', 'HAVING', 'SO', 'MANY', 'DUPLICATES', 'SO', 'I', 'FELT', 'RATHER', 'AWKWARD', 'OF', 'A', 'GIVING', 'YOU', 'ANOTHER'] +7105-2340-0027-2299: ref=['THE', 'SNATCHER', 'HAD', 'BEEN', 'AN', 'ORPHAN', 'THESE', 'MANY', 'YEARS'] +7105-2340-0027-2299: hyp=['THIS', 'SNATCHER', 'HAD', 'BEEN', 'AN', 'ORPHAN', 'THIS', 'MANY', 'YEARS'] +7105-2340-0028-2300: ref=['LADY', 'ERNESTINE', 'PIGEONCOTE', 'HIS', 'MOTHER', 'MOVED', 'IN', 'CIRCLES', 'WHICH', 'WERE', 'ENTIRELY', 'BEYOND', 'THEIR', 'COMPASS', 'OR', 'AMBITIONS', 'AND', 'THE', 'SON', 'WOULD', 'PROBABLY', 'ONE', 'DAY', 'BE', 'AN', 'AMBASSADOR'] +7105-2340-0028-2300: hyp=['LADY', 'ERNESTON', 'BEECH', 'AND', 'COLD', 'HIS', 'MOTHER', 'MOVED', 'IN', 'CIRCLES', 'WHICH', 'WERE', 'ENTIRELY', 'BEYOND', 'THEIR', 'COMPASS', 'OR', 'AMBITIONS', 'AND', 'THE', 'SUN', 'WOULD', 'PROBABLY', 'ONE', 'DAY', 'BE', 'AN', 'AMBASSADOR'] +7105-2340-0029-2301: ref=['HUSBAND', 'AND', 'WIFE', 'LOOKED', 'BLANKLY', 'AND', 'DESPERATELY', 'AT', 'ONE', 'ANOTHER'] +7105-2340-0029-2301: hyp=['HUSBAND', 'AND', 'WIFE', 'LOOKED', 'BLANKLY', 'AND', 'DESPERATELY', 'AT', 'ONE', 'ANOTHER'] +7105-2340-0030-2302: ref=['IT', 'WAS', 'MISSUS', 'PETER', 'WHO', 'ARRIVED', 'FIRST', 'AT', 'AN', 'INSPIRATION', 'HOW', 'DREADFUL', 'TO', 'THINK', 'THERE', 'ARE', 'THIEVES', 'IN', 'THE', 'HOUSE', 'WE', 'KEEP', 'THE', 'DRAWING', 'ROOM', 'LOCKED', 'UP', 'AT', 'NIGHT', 'OF', 'COURSE', 'BUT', 'ANYTHING', 'MIGHT', 'BE', 'CARRIED', 'OFF', 'WHILE', 'WE', 'ARE', 'AT', 'BREAKFAST'] +7105-2340-0030-2302: hyp=['IT', 'WAS', 'MISSUS', 'PETER', 'WHO', 'ARRIVED', 'FIRST', 'AT', 'AN', 'INSPIRATION', 'HOW', 'DREADFUL', 'TO', 'THINK', 'THERE', 'ARE', 'THIEVES', 'IN', 'THE', 'HOUSE', 'WE', 'KEEP', 'THE', 'DRAWING', 'ROOM', 'LOCKED', 'UP', 'AT', 'NIGHT', 'OF', 'COURSE', 'BUT', 'ANYTHING', 'MIGHT', 'BE', 'CARRIED', 'OFF', 'WHILE', 'WE', 'ARE', 'AT', 'BREAKFAST'] +7105-2340-0031-2303: ref=['SHE', 'ROSE', 'AND', 'WENT', 'OUT', 'HURRIEDLY', 'AS', 'THOUGH', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'THE', 'DRAWING', 'ROOM', 'WAS', 'NOT', 'BEING', 'STRIPPED', 'OF', 'ITS', 'SILVERWARE', 'AND', 'RETURNED', 'A', 'MOMENT', 'LATER', 'BEARING', 'A', 'CREAM', 'JUG', 'IN', 'HER', 'HANDS'] +7105-2340-0031-2303: hyp=['SHE', 'ROSE', 'AND', 'WENT', 'OUT', 'HURRIEDLY', 'AS', 'THOUGH', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'THE', 'DRAWING', 'ROOM', 'WAS', 'NOT', 'BEING', 'STRIPPED', 'OF', 'ITS', 'SILVER', 'WARE', 'AND', 'RETURNED', 'A', 'MOMENT', 'LATER', 'BEARING', 'A', 'CREAM', 'JUG', 'IN', 'HER', 'HANDS'] +7105-2340-0032-2304: ref=['THE', 'PIGEONCOTES', 'HAD', 'TURNED', 'PALER', 'THAN', 'EVER', 'MISSUS', 'PETER', 'HAD', 'A', 'FINAL', 'INSPIRATION'] +7105-2340-0032-2304: hyp=['THE', 'PIGEON', 'CORDS', 'HAD', 'TURNED', 'PALER', 'THAN', 'EVER', 'MISSUS', 'PETER', 'HAD', 'A', 'FINAL', 'INSPIRATION'] +7105-2340-0033-2305: ref=['PETER', 'DASHED', 'OUT', 'OF', 'THE', 'ROOM', 'WITH', 'GLAD', 'RELIEF', 'HE', 'HAD', 'LIVED', 'SO', 'LONG', 'DURING', 'THE', 'LAST', 'FEW', 'MINUTES', 'THAT', 'A', 'GOLDEN', 'WEDDING', 'SEEMED', 'WITHIN', 'MEASURABLE', 'DISTANCE'] +7105-2340-0033-2305: hyp=['PETER', 'DASHED', 'OUT', 'OF', 'THE', 'ROOM', 'WITH', 'GLAD', 'RELIEF', 'HE', 'HAD', 'LIVED', 'SO', 'LONG', 'DURING', 'THE', 'LAST', 'FEW', 'MINUTES', 'THAT', 'A', 'GOLDEN', 'WEDDING', 'SEEMED', 'WITHIN', 'MEASURABLE', 'DISTANCE'] +7105-2340-0034-2306: ref=['MISSUS', 'PETER', 'TURNED', 'TO', 'HER', 'GUEST', 'WITH', 'CONFIDENTIAL', 'COYNESS'] +7105-2340-0034-2306: hyp=['MISSUS', 'BEATER', 'TURNED', 'TO', 'HER', 'GUEST', 'WITH', 'CONFIDENTIAL', 'KINDNESS'] +7105-2340-0035-2307: ref=["PETER'S", 'LITTLE', 'WEAKNESS', 'IT', 'RUNS', 'IN', 'THE', 'FAMILY', 'GOOD', 'LORD'] +7105-2340-0035-2307: hyp=['PETER', 'IS', 'LITTLE', 'WEAKNESS', 'EACH', "ONE'S", 'IN', 'THE', 'FAMILY', 'GOOD', 'LORD'] +7105-2340-0036-2308: ref=['DO', 'YOU', 'MEAN', 'TO', 'SAY', "HE'S", 'A', 'KLEPTOMANIAC', 'LIKE', 'COUSIN', 'SNATCHER'] +7105-2340-0036-2308: hyp=['DO', 'YOU', 'MEAN', 'TO', 'SAY', 'HE', 'IS', 'A', 'CLAPTOMANIA', 'LIKE', 'COUSIN', 'SNATCHER'] +7105-2340-0037-2309: ref=['BRAVE', 'LITTLE', 'WOMAN', 'SAID', 'PETER', 'WITH', 'A', 'GASP', 'OF', 'RELIEF', 'I', 'COULD', 'NEVER', 'HAVE', 'DONE', 'IT'] +7105-2340-0037-2309: hyp=['PRETTY', 'LITTLE', 'WOMAN', 'SAID', 'PETER', 'WITH', 'A', 'GASP', 'OF', 'RELIEF', 'I', 'COULD', 'NEVER', 'HAVE', 'DONE', 'IT'] +7902-96591-0000-0: ref=['I', 'AM', 'FROM', 'THE', 'CUTTER', 'LYING', 'OFF', 'THE', 'COAST'] +7902-96591-0000-0: hyp=['AND', 'FROM', 'THE', 'CUTTER', 'LYING', 'OFF', 'THE', 'COAST'] +7902-96591-0001-1: ref=["DON'T", 'CRY', 'HE', 'SAID', 'I', 'WAS', 'OBLIGED', 'TO', 'COME'] +7902-96591-0001-1: hyp=["DON'T", 'CRY', 'HE', 'SAID', 'I', 'WAS', 'OBLIGED', 'TO', 'COME'] +7902-96591-0002-2: ref=['AND', 'AND', 'YOU', 'HAVE', 'NOT', 'FOUND', 'OUT', 'ANYTHING', 'CAME', 'IN', 'QUICK', 'FRIGHTENED', 'TONES'] +7902-96591-0002-2: hyp=['AND', 'AND', 'YOU', 'HAVE', 'NOT', 'FOUND', 'OUT', 'ANYTHING', 'CAME', 'IN', 'QUICK', 'FRIGHTENED', 'TONES'] +7902-96591-0003-3: ref=['I', 'WISH', 'YOU', 'WOULD', 'BELIEVE', 'ME', 'THAT', 'I', 'AM', 'IN', 'AS', 'GREAT', 'TROUBLE', 'ABOUT', 'IT', 'AS', 'YOU', 'ARE'] +7902-96591-0003-3: hyp=['I', 'WISH', 'YOU', 'WOULD', 'BELIEVE', 'ME', 'THAT', 'I', 'AM', 'IN', 'AS', 'GREAT', 'TROUBLE', 'ABOUT', 'IT', 'AS', 'YOU', 'ARE'] +7902-96591-0004-4: ref=['THAT', 'MY', 'FATHER', 'SIR', 'RISDON', 'GRAEME', 'HAS', 'SMUGGLED', 'GOODS', 'HERE'] +7902-96591-0004-4: hyp=['THAT', 'MY', 'FATHER', 'SIR', 'RISDON', 'GRAHAME', 'SMUGGLED', 'GOODS', 'HERE'] +7902-96591-0005-5: ref=['HE', 'COULD', 'NOT', 'HELP', 'IT', 'HE', 'HATES', 'THE', 'SMUGGLERS', 'YOU', 'SHALL', 'NOT', 'TELL'] +7902-96591-0005-5: hyp=['HE', 'COULD', 'NOT', 'HELP', 'IT', 'HE', 'HATES', 'THE', 'SMUGGLERS', 'YOU', 'SHALL', 'NOT', 'TELL'] +7902-96591-0006-6: ref=['PRAY', 'PRAY', 'SAY', 'YOU', 'WILL', 'NOT', 'ARCHY', 'WAS', 'SILENT'] +7902-96591-0006-6: hyp=['PRAY', 'PRAY', 'SAY', 'YOU', 'WILL', 'NOT', 'ARCHIE', 'WAS', 'SILENT'] +7902-96591-0007-7: ref=['THEN', 'AS', 'ARCHY', 'STOOD', 'IN', 'THE', 'DARK', 'LITERALLY', 'AGHAST', 'WITH', 'ASTONISHMENT', 'HE', 'HEARD', 'THE', 'FAINT', 'RUSTLING', 'ONCE', 'MORE', 'AND', 'AGAIN', 'ALL', 'WAS', 'SILENT'] +7902-96591-0007-7: hyp=['THEN', 'AS', 'ARCHIE', 'STOOD', 'IN', 'THE', 'DARK', 'LITERALLY', 'AGHAST', 'WITH', 'ASTONISHMENT', 'HE', 'HEARD', 'THE', 'FAINT', 'RUSTLING', 'ONCE', 'MORE', 'AND', 'AGAIN', 'ALL', 'WAS', 'SILENT'] +7902-96591-0008-8: ref=['HE', 'LAUGHED', 'BUT', 'IT', 'WAS', 'A', 'CURIOUS', 'KIND', 'OF', 'LAUGH', 'FULL', 'OF', 'VEXATION', 'INJURED', 'AMOUR', 'PROPRE', 'AS', 'THE', 'FRENCH', 'CALL', 'OUR', 'LOVE', 'OF', 'OUR', 'OWN', 'DIGNITY', 'OF', 'WHICH', 'ARCHIBALD', 'RAYSTOKE', 'IN', 'THE', 'FULL', 'FLUSH', 'OF', 'HIS', 'YOUNG', 'BELIEF', 'IN', 'HIS', 'IMPORTANCE', 'AS', 'A', 'BRITISH', 'OFFICER', 'HAD', 'A', 'PRETTY', 'GOOD', 'STOCK'] +7902-96591-0008-8: hyp=['HE', 'LAUGHED', 'BUT', 'IT', 'WAS', 'A', 'CURIOUS', 'KIND', 'OF', 'LAUGH', 'FULL', 'OF', 'VEXATION', 'INJURED', 'AMORE', 'A', 'PROPER', 'AS', 'THE', 'FRENCH', 'CALL', 'OUR', 'LOVE', 'OF', 'OUR', 'OWN', 'DIGNITY', 'OF', 'WHICH', 'ARQUEBALD', 'RAY', 'STROKE', 'IN', 'THE', 'FULL', 'FLUSH', 'OF', 'HIS', 'YOUNG', 'BELIEF', 'IN', 'HIS', 'IMPORTANCE', 'AS', 'A', 'BRITISH', 'OFFICER', 'HAD', 'A', 'PRETTY', 'GOOD', 'STOCK'] +7902-96591-0009-9: ref=['IT', 'ALL', 'COMES', 'OF', 'DRESSING', 'UP', 'IN', 'THIS', 'STUPID', 'WAY', 'LIKE', 'A', 'ROUGH', 'FISHER', 'LAD'] +7902-96591-0009-9: hyp=['AND', 'ALL', 'COMES', 'OF', 'DRESSING', 'UP', 'IN', 'THIS', 'STUPID', 'WAY', 'LIKE', 'A', 'ROUGH', 'FISHER', 'LAD'] +7902-96591-0010-10: ref=['COLD', 'WATER', 'CAME', 'ON', 'THIS', 'IDEA', 'DIRECTLY', 'AS', 'HE', 'RECALLED', 'THE', 'FACT', 'THAT', 'THE', 'DARKNESS', 'WAS', 'INTENSE', 'AND', 'CELIA', 'COULD', 'NOT', 'HAVE', 'SEEN', 'HIM'] +7902-96591-0010-10: hyp=['COLD', 'WATER', 'CAME', 'ON', 'THIS', 'IDEA', 'DIRECTLY', 'AS', 'HE', 'RECALLED', 'THE', 'FACT', 'THAT', 'THE', 'DARKNESS', 'WAS', 'INTENSE', 'AND', 'CELIA', 'COULD', 'NOT', 'HAVE', 'SEEN', 'HIM'] +7902-96591-0011-11: ref=["I'LL", 'SOON', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'NOT', 'GOING', 'TO', 'BE', 'PLAYED', 'WITH'] +7902-96591-0011-11: hyp=["I'LL", 'SOON', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'NOT', 'GOING', 'TO', 'BE', 'PLAYED', 'WITH'] +7902-96591-0012-12: ref=['FOR', 'IT', 'SUDDENLY', 'OCCURRED', 'TO', 'HIM', 'THAT', 'HE', 'WAS', 'NOT', 'ONLY', 'A', 'PRISONER', 'BUT', 'A', 'PRISONER', 'IN', 'THE', 'POWER', 'OF', 'A', 'VERY', 'RECKLESS', 'SET', 'OF', 'PEOPLE', 'WHO', 'WOULD', 'STOP', 'AT', 'NOTHING'] +7902-96591-0012-12: hyp=['FOR', 'IT', 'SUDDENLY', 'OCCURRED', 'TO', 'HIM', 'THAT', 'HE', 'WAS', 'NOT', 'ONLY', 'A', 'PRISONER', 'BUT', 'A', 'PRISONER', 'IN', 'THE', 'POWER', 'OF', 'A', 'VERY', 'RECKLESS', 'SET', 'OF', 'PEOPLE', 'WHO', 'WOULD', 'STOP', 'AT', 'NOTHING'] +7902-96591-0013-13: ref=['NO', 'HE', 'THOUGHT', 'TO', 'HIMSELF', 'I', "DON'T", 'BELIEVE', 'THEY', 'WOULD', 'KILL', 'ME', 'BUT', 'THEY', 'WOULD', 'KNOCK', 'ME', 'ABOUT'] +7902-96591-0013-13: hyp=['NO', 'HE', 'THOUGHT', 'TO', 'HIMSELF', 'I', "DON'T", 'BELIEVE', 'THEY', 'WOULD', 'KILL', 'ME', 'BUT', 'THEY', 'WOULD', 'KNOCK', 'ME', 'ABOUT'] +7902-96591-0014-14: ref=['THE', 'KICK', 'HE', 'HAD', 'RECEIVED', 'WAS', 'A', 'FORETASTE', 'OF', 'WHAT', 'HE', 'MIGHT', 'EXPECT', 'AND', 'AFTER', 'A', 'LITTLE', 'CONSIDERATION', 'HE', 'CAME', 'TO', 'THE', 'CONCLUSION', 'THAT', 'HIS', 'DUTY', 'WAS', 'TO', 'ESCAPE', 'AND', 'GET', 'BACK', 'TO', 'THE', 'CUTTER', 'AS', 'QUICKLY', 'AS', 'HE', 'COULD'] +7902-96591-0014-14: hyp=['THE', 'KICKIE', 'HAD', 'RECEIVED', 'WAS', 'A', 'FORETASTE', 'OF', 'WHAT', 'HE', 'MIGHT', 'EXPECT', 'AND', 'AFTER', 'A', 'LITTLE', 'CONSIDERATION', 'HE', 'CAME', 'TO', 'THE', 'CONCLUSION', 'THAT', 'HIS', 'DUTY', 'WAS', 'TO', 'ESCAPE', 'AND', 'GET', 'BACK', 'TO', 'THE', 'CUTTER', 'AS', 'QUICKLY', 'AS', 'HE', 'COULD'] +7902-96591-0015-15: ref=['TO', 'DO', 'THIS', 'HE', 'MUST', 'SCHEME', 'LIE', 'HID', 'TILL', 'MORNING', 'THEN', 'MAKE', 'FOR', 'THE', 'NEAREST', 'POINT', 'AND', 'SIGNAL', 'FOR', 'HELP', 'UNLESS', 'A', "BOAT'S", 'CREW', 'WERE', 'ALREADY', 'SEARCHING', 'FOR', 'HIM', 'HOW', 'TO', 'ESCAPE'] +7902-96591-0015-15: hyp=['TO', 'DO', 'THIS', 'HE', 'MUST', 'SCHEME', 'LIE', 'HID', 'TILL', 'MORNING', 'THAN', 'MAKE', 'FOR', 'THE', 'NEAREST', 'POINT', 'AND', 'SIGNAL', 'FOR', 'HELP', 'UNLESS', 'A', "BOAT'S", 'CREW', 'WERE', 'ALREADY', 'SEARCHING', 'FOR', 'HIM', 'HOW', 'TO', 'ESCAPE'] +7902-96591-0016-16: ref=['THE', 'WINDOW', 'WAS', 'BARRED', 'BUT', 'HE', 'WENT', 'TO', 'IT', 'AND', 'TRIED', 'THE', 'BARS', 'ONE', 'BY', 'ONE', 'TO', 'FIND', 'THEM', 'ALL', 'SOLIDLY', 'FITTED', 'INTO', 'THE', 'STONE', 'SILL'] +7902-96591-0016-16: hyp=['THE', 'WINDOW', 'WAS', 'BARRED', 'BUT', 'HE', 'WENT', 'TO', 'IT', 'AND', 'TRIED', 'THE', 'BARS', 'ONE', 'BY', 'ONE', 'TO', 'FIND', 'THEM', 'ALL', 'SOLIDLY', 'FITTED', 'INTO', 'THE', 'STONE', 'SILL'] +7902-96591-0017-17: ref=['NEXT', 'MOMENT', 'AS', 'HE', 'FELT', 'HIS', 'WAY', 'ABOUT', 'HIS', 'HAND', 'TOUCHED', 'AN', 'OLD', 'FASHIONED', 'MARBLE', 'MANTELPIECE', 'FIREPLACE', 'CHIMNEY'] +7902-96591-0017-17: hyp=['NEXT', 'MOMENT', 'AS', 'HE', 'FELT', 'HIS', 'WAY', 'ABOUT', 'HIS', 'HAND', 'TOUCHED', 'AN', 'OLD', 'FASHIONED', 'MARBLE', 'MANTELPIECE', 'FIREPLACE', 'CHIMNEY'] +7902-96591-0018-18: ref=['YES', 'IF', 'OTHER', 'WAYS', 'FAILED', 'HE', 'COULD', 'ESCAPE', 'UP', 'THE', 'CHIMNEY'] +7902-96591-0018-18: hyp=['YES', 'IF', 'OTHER', 'WAYS', 'FAILED', 'HE', 'COULD', 'ESCAPE', 'UP', 'THE', 'CHIMNEY'] +7902-96591-0019-19: ref=['NO', 'THAT', 'WAS', 'TOO', 'BAD', 'HE', 'COULD', 'NOT', 'DO', 'THAT'] +7902-96591-0019-19: hyp=['NO', 'THAT', 'WAS', 'TOO', 'BAD', 'HE', 'CANNOT', 'DO', 'THAT'] +7902-96591-0020-20: ref=['SYMPATHY', 'AND', 'PITY', 'FOR', 'THE', 'DWELLERS', 'IN', 'THE', 'HOZE', 'WERE', 'COMPLETELY', 'GONE', 'NOW', 'AND', 'HE', 'SET', 'HIS', 'TEETH', 'FAST', 'AND', 'MENTALLY', 'CALLED', 'HIMSELF', 'A', 'WEAK', 'IDIOT', 'FOR', 'EVER', 'THINKING', 'ABOUT', 'SUCH', 'PEOPLE'] +7902-96591-0020-20: hyp=['SYMPATHY', 'AND', 'PITY', 'FOR', 'THE', 'DWELLERS', 'IN', 'THE', 'HOSE', 'WERE', 'COMPLETELY', 'GONE', 'NOW', 'AND', 'HE', 'SET', 'HIS', 'TEETH', 'FAST', 'AND', 'MENTALLY', 'CALLED', 'HIMSELF', 'A', 'WEAK', 'IDIOT', 'FOR', 'EVER', 'THINKING', 'ABOUT', 'SUCH', 'PEOPLE'] +7902-96591-0021-21: ref=['A', 'NARROW', 'TABLE', 'AGAINST', 'THE', 'WALL', 'IN', 'TWO', 'PLACES'] +7902-96591-0021-21: hyp=['A', 'NARROW', 'TABLE', 'AGAINST', 'THE', 'WALL', 'IN', 'TWO', 'PLACES'] +7902-96591-0022-22: ref=['HE', 'WENT', 'AND', 'TRIED', 'TO', 'FORCE', 'HIS', 'HEAD', 'THROUGH', 'RECALLING', 'AS', 'HE', 'DID', 'THAT', 'WHERE', 'A', "PERSON'S", 'HEAD', 'WOULD', 'GO', 'THE', 'REST', 'OF', 'THE', 'BODY', 'WOULD', 'PASS'] +7902-96591-0022-22: hyp=['HE', 'WENT', 'AND', 'TRIED', 'TO', 'FORCE', 'HIS', 'HEAD', 'THROUGH', 'RECALLING', 'AS', 'HE', 'DID', 'THAT', 'WHERE', 'A', "PERSON'S", 'HEAD', 'WOULD', 'GO', 'THE', 'REST', 'OF', 'THE', 'BODY', 'WOULD', 'PASS'] +7902-96591-0023-23: ref=['BUT', 'THERE', 'WAS', 'NO', 'CHANCE', 'FOR', 'HIS', 'BODY', 'THERE', 'THE', 'HEAD', 'WOULD', 'NOT', 'GO', 'FIRST'] +7902-96591-0023-23: hyp=['BUT', 'THERE', 'WAS', 'NO', 'CHANCE', 'FOR', 'HIS', 'BODY', 'THERE', 'THE', 'HEAD', 'WOULD', 'NOT', 'GO', 'FIRST'] +7902-96591-0024-24: ref=['A', 'FELLOW', 'WHO', 'WAS', 'SHUT', 'UP', 'IN', 'PRISON', 'FOR', 'LIFE', 'MIGHT', 'DO', 'IT', 'HE', 'SAID', 'BUT', 'NOT', 'IN', 'A', 'CASE', 'LIKE', 'THIS'] +7902-96591-0024-24: hyp=['A', 'FELLOW', 'WHO', 'WAS', 'SHUT', 'UP', 'IN', 'PRISON', 'FOR', 'LIFE', 'MIGHT', 'DO', 'IT', 'HE', 'SAID', 'BUT', 'NOT', 'IN', 'A', 'CASE', 'LIKE', 'THIS'] +7902-96592-0000-25: ref=['SURE', "YOU'VE", 'LOOKED', 'ROUND', 'EVERYWHERE', 'BOY', 'YES', 'FATHER', 'QUITE'] +7902-96592-0000-25: hyp=['SURE', 'YOU', 'LOOK', 'ROUND', 'EVERYWHERE', 'BOY', 'YES', 'FATHER', 'QUITE'] +7902-96592-0001-26: ref=["I'M", 'GOING', 'HOME', 'TO', 'BREAKFAST'] +7902-96592-0001-26: hyp=["I'M", 'GOING', 'HOME', 'TO', 'BREAKFAST'] +7902-96592-0002-27: ref=['SHALL', 'I', 'COME', 'TOO', 'FATHER', 'NO'] +7902-96592-0002-27: hyp=['SHALL', 'I', 'COME', 'TO', 'FATHER', 'NO'] +7902-96592-0003-28: ref=['STOP', 'HERE', 'TILL', 'SIR', 'RISDON', 'COMES', 'DOWN', 'AND', 'TELL', 'HIM', "I'M", 'VERY', 'SORRY', 'THAT', 'WE', 'SHOULD', 'HAVE', 'CLEARED', 'OUT', 'LAST', 'NIGHT', 'ONLY', 'A', 'BORN', 'FOOL', 'SAW', 'JERRY', "NANDY'S", 'LOBSTER', 'BOAT', 'COMING', 'INTO', 'THE', 'COVE', 'AND', 'CAME', 'RUNNING', 'TO', 'SAY', 'IT', 'WAS', 'A', 'PARTY', 'FROM', 'THE', 'CUTTER', 'YES', 'FATHER'] +7902-96592-0003-28: hyp=['STOP', 'HERE', 'TILL', 'SIR', 'RISDON', 'COMES', 'DOWN', 'AND', 'TELL', 'HIM', "I'M", 'VERY', 'SORRY', 'THAT', 'WE', 'SHOULD', 'HAVE', 'CLEARED', 'OUT', 'LAST', 'NIGHT', 'ONLY', 'A', 'BORN', 'FOOL', 'SAW', 'JERRY', "ANDY'S", 'LOBSTER', 'BOAT', 'COMING', 'INTO', 'THE', 'COVE', 'AND', 'CAME', 'RUNNING', 'TO', 'SAY', 'IT', 'WAS', 'A', 'PARTY', 'FROM', 'THE', 'CUTTER', 'YES', 'FATHER'] +7902-96592-0004-29: ref=['TELL', 'HIM', 'NOT', 'TO', 'BE', 'UNEASY', 'TIS', 'ALL', 'RIGHT', 'AND', "I'LL", 'HAVE', 'EVERYTHING', 'CLEAR', 'AWAY', 'TO', 'NIGHT'] +7902-96592-0004-29: hyp=['TELL', 'HIM', 'NOT', 'TO', 'BE', 'UNEASY', 'TIS', 'ALL', 'RIGHT', 'AND', "I'LL", 'HAVE', 'EVERYTHING', 'CLEAR', 'AWAY', 'TO', 'NIGHT'] +7902-96592-0005-30: ref=['THE', 'DULL', 'SOUND', 'OF', 'DEPARTING', 'STEPS', 'AND', 'A', 'LOW', 'WHISTLING', 'SOUND', 'COMING', 'DOWN', 'THROUGH', 'THE', 'SKYLIGHT', 'WINDOW', 'INTO', 'THE', 'CABIN', 'WHERE', 'ARCHY', 'RAYSTOKE', 'LAY', 'WITH', 'HIS', 'HEAVY', 'EYELIDS', 'PRESSED', 'DOWN', 'BY', 'SLEEP'] +7902-96592-0005-30: hyp=['THE', 'DULL', 'SOUND', 'OF', 'DEPARTING', 'STEPS', 'AND', 'A', 'LOW', 'WHISTLING', 'SOUND', 'COMING', 'DOWN', 'THROUGH', 'THE', 'SKYLIGHT', 'WINDOW', 'INTO', 'THE', 'CABIN', 'WHERE', 'ARCHIE', 'RAY', 'STROKE', 'LAY', 'WITH', 'HIS', 'HEAVY', 'EYELIDS', 'PRESSED', 'DOWN', 'BY', 'SLEEP'] +7902-96592-0006-31: ref=['WHAT', 'A', 'QUEER', 'DREAM', 'HE', 'THOUGHT', 'TO', 'HIMSELF'] +7902-96592-0006-31: hyp=['WHAT', 'A', 'QUEER', 'DREAM', 'HE', 'THOUGHT', 'TO', 'HIMSELF'] +7902-96592-0007-32: ref=['BUT', 'HOW', 'QUEER', 'FOR', 'MISTER', 'GURR', 'TO', 'BE', 'TALKING', 'LIKE', 'THAT', 'TO', 'ANDREW', 'TEAL', 'THE', 'BOY', 'WHO', 'HELPED', 'THE', 'COOK'] +7902-96592-0007-32: hyp=['BUT', 'HOW', 'QUEER', 'FOR', 'MISTER', 'GIRT', 'TO', 'BE', 'TALKING', 'LIKE', 'THAT', 'TO', 'ANDREW', 'TEALE', 'THE', 'BOY', 'WHO', 'HELPS', 'THE', 'COOK'] +7902-96592-0008-33: ref=['AND', 'WHY', 'DID', 'ANDY', 'CALL', 'MISTER', 'GURR', 'FATHER'] +7902-96592-0008-33: hyp=['AND', 'WHY', 'DID', 'ANDY', 'CALL', 'MISTER', 'GERFATHER'] +7902-96592-0009-34: ref=['THERE', 'WAS', 'AN', 'INTERVAL', 'OF', 'THINKING', 'OVER', 'THIS', 'KNOTTY', 'QUESTION', 'DURING', 'WHICH', 'THE', 'LOW', 'WHISTLING', 'WENT', 'ON'] +7902-96592-0009-34: hyp=['THERE', 'WAS', 'AN', 'INTERVAL', 'OF', 'THINKING', 'OVER', 'THIS', 'NAUGHTY', 'QUESTION', 'DURING', 'WHICH', 'THE', 'LOW', 'WHISTLING', 'WENT', 'ON'] +7902-96592-0010-35: ref=['AND', "I'M", 'HUNGRY', 'TOO', 'TIME', 'I', 'WAS', 'UP', 'I', 'SUPPOSE'] +7902-96592-0010-35: hyp=['AND', 'UNHUNGRY', 'TOO', 'TELL', 'IT', 'WAS', 'UP', 'I', 'SUPPOSE'] +7902-96592-0011-36: ref=['NO', 'HE', 'WAS', 'NOT', 'DREAMING', 'FOR', 'HE', 'WAS', 'LOOKING', 'OUT', 'ON', 'THE', 'SEA', 'OVER', 'WHICH', 'A', 'FAINT', 'MIST', 'HUNG', 'LIKE', 'WREATHS', 'OF', 'SMOKE'] +7902-96592-0011-36: hyp=['NO', 'HE', 'WAS', 'NOT', 'DREAMING', 'FOR', 'HE', 'WAS', 'LOOKING', 'OUT', 'ON', 'THE', 'SEA', 'OVER', 'WHICH', 'A', 'FAINT', 'MIST', 'HUNG', 'LIKE', 'WREATHS', 'OF', 'SMOKE'] +7902-96592-0012-37: ref=['WHAT', 'DID', 'THEY', 'SAY', 'FALSE', 'ALARM', 'TELL', 'SIR', 'RISDON', 'THEY', 'WOULD', 'CLEAR', 'ALL', 'AWAY', 'TO', 'NIGHT', 'SEE', 'IF', 'ANYTHING', 'HAD', 'BEEN', 'LEFT', 'ABOUT', 'LOBSTER', 'BOAT'] +7902-96592-0012-37: hyp=['WHAT', 'DID', 'THEY', 'SAY', 'FALSE', 'ALARM', 'TELL', 'SIR', 'RISDEN', 'THEY', 'WOULD', 'CLEAR', 'ALL', 'AWAY', 'TO', 'NIGHT', 'SEE', 'IF', 'ANYTHING', 'HAD', 'BEEN', 'LEFT', 'ABOUT', 'LOBSTER', 'BOAT'] +7902-96592-0013-38: ref=['ONCE', 'OUT', 'OF', 'THAT', 'ROOM', 'HE', 'COULD', 'RAN', 'AND', 'BY', 'DAYLIGHT', 'THE', 'SMUGGLERS', 'DARE', 'NOT', 'HUNT', 'HIM', 'DOWN'] +7902-96592-0013-38: hyp=['ONCE', 'OUT', 'OF', 'THAT', 'ROOM', 'HE', 'COULD', 'RUN', 'AND', 'BY', 'DAYLIGHT', 'THE', 'SMUGGLERS', 'DARED', 'NOT', 'HUNT', 'HIM', 'DOWN'] +7902-96592-0014-39: ref=['OH', 'THOSE', 'BARS', 'HE', 'MENTALLY', 'EXCLAIMED', 'AND', 'HE', 'WAS', 'ADVANCING', 'TOWARD', 'THEM', 'WHEN', 'JUST', 'AS', 'HE', 'DREW', 'NEAR', 'THERE', 'WAS', 'A', 'RUSTLING', 'NOISE', 'UNDER', 'THE', 'WINDOW', 'A', 'COUPLE', 'OF', 'HANDS', 'SEIZED', 'THE', 'BARS', 'THERE', 'WAS', 'A', 'SCRATCHING', 'OF', 'BOOT', 'TOES', 'AGAINST', 'STONE', 'WORK', 'AND', "RAM'S", 'FACE', 'APPEARED', 'TO', 'GAZE', 'INTO', 'THE', 'ROOM', 'BY', 'INTENTION', 'BUT', 'INTO', 'THE', 'ASTONISHED', 'COUNTENANCE', 'OF', 'THE', 'YOUNG', 'MIDSHIPMAN', 'INSTEAD'] +7902-96592-0014-39: hyp=['OH', 'THOSE', 'BARS', 'HE', 'MENTALLY', 'EXCLAIMED', 'AND', 'HE', 'WAS', 'ADVANCING', 'TOWARDS', 'THEM', 'WHEN', 'JUST', 'AS', 'HE', 'DREW', 'NEAR', 'THERE', 'WAS', 'A', 'RUSTLING', 'NOISE', 'UNDER', 'THE', 'WINDOW', 'A', 'COUPLE', 'OF', 'HANDS', 'SEIZED', 'THE', 'BARS', 'THERE', 'WAS', 'A', 'SCRATCHING', 'OF', 'BOOT', 'TOES', 'AGAINST', 'STONE', 'WORK', 'AND', "RAM'S", 'FACE', 'APPEARED', 'TO', 'GAZE', 'INTO', 'THE', 'ROOM', 'BY', 'INTENTION', 'BUT', 'INTO', 'THE', 'ASTONISHED', 'COUNTENANCE', 'OF', 'THE', 'YOUNG', 'MIDSHIPMAN', 'INSTEAD'] +7902-96592-0015-40: ref=['RAM', 'WAS', 'THE', 'FIRST', 'TO', 'RECOVER', 'FROM', 'HIS', 'SURPRISE'] +7902-96592-0015-40: hyp=['ROOM', 'WAS', 'THE', 'FIRST', 'TO', 'RECOVER', 'FROM', 'HIS', 'SURPRISE'] +7902-96592-0016-41: ref=['HULLO', 'HE', 'SAID', 'WHO', 'ARE', 'YOU'] +7902-96592-0016-41: hyp=['HULLO', 'HE', 'SAID', 'WHO', 'ARE', 'YOU'] +7902-96592-0017-42: ref=['GO', 'ROUND', 'AND', 'OPEN', 'THE', 'DOOR', 'I', 'WAS', 'SHUT', 'IN', 'LAST', 'NIGHT', 'BY', 'MISTAKE'] +7902-96592-0017-42: hyp=['GO', 'ROUND', 'AND', 'OPEN', 'THE', 'DOOR', 'I', 'WAS', 'SHUT', 'IN', 'LAST', 'NIGHT', 'BY', 'MISTAKE'] +7902-96592-0018-43: ref=['I', 'SAW', 'YOU', 'LAST', 'NIGHT', 'AND', 'WONDERED', 'WHOSE', 'BOY', 'YOU', 'WAS'] +7902-96592-0018-43: hyp=['I', 'SAW', 'YOU', 'LAST', 'NIGHT', 'AND', 'WONDERED', 'WHOSE', 'BOY', 'HE', 'WAS'] +7902-96592-0019-44: ref=['IT', 'WAS', 'YOU', 'FATHER', 'KICKED', 'FOR', 'SHIRKING', 'AND', 'MY', 'WELL', 'I', 'HARDLY', 'KNOWED', 'YOU'] +7902-96592-0019-44: hyp=['IT', 'WAS', 'YOUR', 'FATHER', 'KICKED', 'FOR', 'SHIRKING', 'AND', 'MY', 'WELL', 'I', 'HARDLY', 'KNOWED', 'YOU'] +7902-96592-0020-45: ref=['NONSENSE'] +7902-96592-0020-45: hyp=['NONSENSE'] +7902-96592-0021-46: ref=["WON'T", 'DO', 'SAID', 'RAM', 'GRINNING'] +7902-96592-0021-46: hyp=["WON'T", 'DO', 'SAID', 'RAM', 'GRINNING'] +7902-96592-0022-47: ref=['THINK', 'I', "DON'T", 'KNOW', 'YOU', 'MISTER', 'ORFICER'] +7902-96592-0022-47: hyp=['THINK', 'I', "DON'T", 'KNOW', 'YOU', 'MISTER', 'ORFASTER'] +7902-96592-0023-48: ref=["WON'T", 'DO', 'SAID', 'RAM', 'QUICKLY', 'I', 'KNOW', 'YOU'] +7902-96592-0023-48: hyp=['WELL', 'DO', 'SAID', 'RAM', 'QUICKLY', 'I', 'KNOW', 'YOU'] +7902-96592-0024-49: ref=['BEEN', 'PLAYING', 'THE', 'SPY', "THAT'S", 'WHAT', "YOU'VE", 'BEEN', 'DOING', 'WHO', 'LOCKED', 'YOU', 'IN'] +7902-96592-0024-49: hyp=['COMPLYING', 'THE', 'SPY', "THAT'S", 'WHAT', "YOU'VE", 'BEEN', 'DOING', 'WHO', 'LOCKED', 'YOU', 'IN'] +7902-96592-0025-50: ref=['ARCHY', 'STEPPED', 'BACK', 'TO', 'THE', 'DOOR', 'LISTENING', 'BUT', 'THERE', 'WAS', 'NOT', 'A', 'SOUND'] +7902-96592-0025-50: hyp=['ARCHIE', 'STEPPED', 'BACK', 'TO', 'THE', 'DOOR', 'LISTENING', 'BUT', 'THERE', 'WAS', 'NOT', 'A', 'SOUND'] +7902-96592-0026-51: ref=['HE', 'HAS', 'GONE', 'TO', 'GIVE', 'THE', 'ALARM', 'THOUGHT', 'THE', 'PRISONER', 'AND', 'HE', 'LOOKED', 'EXCITEDLY', 'ROUND', 'FOR', 'A', 'WAY', 'OF', 'ESCAPE'] +7902-96592-0026-51: hyp=['HE', 'HAS', 'GONE', 'TO', 'GIVE', 'THE', 'ALARM', 'THOUGHT', 'THE', 'PRISONER', 'AND', 'HE', 'LOOKED', 'EXCITEDLY', 'ROUND', 'FOR', 'A', 'WAY', 'OF', 'ESCAPE'] +7902-96592-0027-52: ref=['NOTHING', 'BUT', 'THE', 'CHIMNEY', 'PRESENTED', 'ITSELF'] +7902-96592-0027-52: hyp=['NOTHING', 'BUT', 'THE', 'CHIMNEY', 'PRESENTED', 'ITSELF'] +7902-96592-0028-53: ref=['A', 'HAPPY', 'INSPIRATION', 'HAD', 'COME', 'AND', 'PLACING', 'ONE', 'HAND', 'UPON', 'HIS', 'BREAST', 'HE', 'THRUST', 'IN', 'THE', 'OTHER', 'GAVE', 'A', 'TUG', 'AND', 'DREW', 'OUT', 'HIS', 'LITTLE', 'CURVED', 'DIRK', 'GLANCED', 'AT', 'THE', 'EDGE', 'RAN', 'TO', 'THE', 'WINDOW', 'AND', 'BEGAN', 'TO', 'CUT', 'AT', 'ONE', 'OF', 'THE', 'BARS', 'LABOUR', 'IN', 'VAIN'] +7902-96592-0028-53: hyp=['A', 'HAPPY', 'INSPIRATION', 'HAD', 'COME', 'AND', 'PLACING', 'ONE', 'HAND', 'UPON', 'HIS', 'CHEST', 'HE', 'THRUST', 'IN', 'THE', 'OTHER', 'GAVE', 'A', 'TUG', 'AND', 'DREW', 'OUT', 'HIS', 'LITTLE', 'CURVED', 'DIRK', 'GLANCED', 'AT', 'THE', 'EDGE', 'RAN', 'TO', 'THE', 'WINDOW', 'AND', 'BEGAN', 'TO', 'CUT', 'AT', 'ONE', 'OF', 'THE', 'BARS', 'LABOR', 'IN', 'VAIN'] +7902-96592-0029-54: ref=['HE', 'DIVIDED', 'THE', 'PAINT', 'AND', 'PRODUCED', 'A', 'FEW', 'SQUEAKS', 'AND', 'GRATING', 'SOUNDS', 'AS', 'HE', 'REALISED', 'THAT', 'THE', 'ATTEMPT', 'WAS', 'MADNESS'] +7902-96592-0029-54: hyp=['HE', 'DIVIDED', 'THE', 'PAINT', 'AND', 'PRODUCED', 'A', 'FEW', 'SQUEAKS', 'AND', 'GRATING', 'SOUNDS', 'AS', 'HE', 'REALIZED', 'THAT', 'THE', 'ATTEMPT', 'WAS', 'MADNESS'] +7902-96592-0030-55: ref=['THE', 'RESULT', 'WAS', 'NOT', 'VERY', 'SATISFACTORY', 'BUT', 'SUFFICIENTLY', 'SO', 'TO', 'MAKE', 'HIM', 'ESSAY', 'THE', 'BAR', 'OF', 'THE', 'WINDOW', 'ONCE', 'MORE', 'PRODUCING', 'A', 'GRATING', 'EAR', 'ASSAILING', 'SOUND', 'AS', 'HE', 'FOUND', 'THAT', 'NOW', 'HE', 'DID', 'MAKE', 'A', 'LITTLE', 'IMPRESSION', 'SO', 'LITTLE', 'THOUGH', 'THAT', 'THE', 'PROBABILITY', 'WAS', 'IF', 'HE', 'KEPT', 'ON', 'WORKING', 'WELL', 'FOR', 'TWENTY', 'FOUR', 'HOURS', 'HE', 'WOULD', 'NOT', 'GET', 'THROUGH'] +7902-96592-0030-55: hyp=['THE', 'RESULT', 'WAS', 'NOT', 'VERY', 'SATISFACTORY', 'BUT', 'SUFFICIENTLY', 'SO', 'TO', 'MAKE', 'HIM', 'ESSAY', 'THE', 'BAR', 'OF', 'THE', 'WINDOW', 'ONCE', 'MORE', 'PRODUCING', 'A', 'GRATING', 'IRISH', 'SELLING', 'SOUND', 'AS', 'HE', 'FOUND', 'THAT', 'NOW', 'HE', 'DID', 'MAKE', 'A', 'LITTLE', 'IMPRESSION', 'SO', 'LITTLE', 'THOUGH', 'THAT', 'THE', 'PROBABILITY', 'WAS', 'IF', 'HE', 'KEPT', 'ON', 'WORKING', 'WELL', 'FOR', 'TWENTY', 'FOUR', 'HOURS', 'HE', 'WOULD', 'NOT', 'GET', 'THROUGH'] +7902-96592-0031-56: ref=['BUT', 'AT', 'THE', 'END', 'OF', 'FIVE', 'MINUTES', 'HE', 'STOPPED', 'AND', 'THRUST', 'BACK', 'THE', 'DIRK', 'INTO', 'ITS', 'SHEATH'] +7902-96592-0031-56: hyp=['BUT', 'AT', 'THE', 'END', 'OF', 'FIVE', 'MINUTES', 'HE', 'STOPPED', 'AND', 'THRUST', 'BACK', 'THE', 'DIRK', 'INTO', 'ITS', 'SHEATH'] +7902-96592-0032-57: ref=['NO', 'I', "CAN'T", 'PART', 'WITH', 'THAT', 'HA', 'HA', 'HA', 'LAUGHED', 'THE', 'BOY', 'JEERINGLY'] +7902-96592-0032-57: hyp=['NO', 'I', "CAN'T", 'PART', 'WITH', 'THAT', 'HA', 'HA', 'LAUGHED', 'THE', 'BOY', 'JEERINGLY'] +7902-96592-0033-58: ref=['BUT', "I'LL", 'YES', "I'LL", 'GIVE', 'YOU', 'A', 'GUINEA', 'IF', 'YOU', 'WILL', 'LET', 'ME', 'OUT'] +7902-96592-0033-58: hyp=['BUT', 'ALL', 'YES', "I'LL", 'GIVE', 'YOU', 'A', 'GUINEA', 'IF', 'YOU', 'WILL', 'LET', 'ME', 'OUT'] +7902-96592-0034-59: ref=['GUINEA', 'SAID', 'THE', 'BOY', 'THINK', "I'D", 'DO', 'IT', 'FOR', 'A', 'GUINEA', 'WELL', 'THEN', 'TWO'] +7902-96592-0034-59: hyp=['GUINEAS', 'OF', 'THE', 'BOY', 'THINK', "I'LL", 'DO', 'IT', 'FOR', 'A', 'GUINEA', 'WELL', 'THEN', 'TOO'] +7902-96592-0035-60: ref=['BE', 'QUICK', "THERE'S", 'A', 'GOOD', 'FELLOW', 'I', 'WANT', 'TO', 'GET', 'AWAY', 'AT', 'ONCE'] +7902-96592-0035-60: hyp=['BE', 'QUICK', "THERE'S", 'A', 'GOOD', 'FELLOW', 'I', 'WANT', 'TO', 'GET', 'AWAY', 'AT', 'ONCE'] +7902-96592-0036-61: ref=['NOT', 'YOU', 'ONLY', 'A', 'SHAM'] +7902-96592-0036-61: hyp=['NOT', 'YOU', 'ONLY', 'A', 'SHAM'] +7902-96592-0037-62: ref=['WHY', 'YOUR', 'CLOTHES', "DON'T", 'FIT', 'YOU', 'AND', 'YOUR', "CAP'S", 'PUT', 'ON', 'ALL', 'SKEW', 'REW'] +7902-96592-0037-62: hyp=['WHY', 'YOUR', 'CLOTHES', "DON'T", 'FIT', 'YOU', 'AND', 'YOUR', "CAP'S", 'PUT', 'ON', 'ALL', 'SKIRO'] +7902-96592-0038-63: ref=['NEVER', 'MIND', 'ABOUT', 'THAT', 'LET', 'ME', 'OUT', 'OF', 'THIS', 'PLACE'] +7902-96592-0038-63: hyp=['NEVER', 'MIND', 'ABOUT', 'THAT', 'LET', 'ME', 'OUT', 'OF', 'THIS', 'PLACE'] +7902-96592-0039-64: ref=['I', 'TOLD', 'YOU', 'A', 'FISHER', 'BOY', 'CRIED', 'ARCHY', 'IMPATIENTLY', 'BUT', 'TRYING', 'NOT', 'TO', 'OFFEND', 'HIS', 'VISITOR', 'WHO', 'POSSESSED', 'THE', 'POWER', 'OF', 'CONFERRING', 'FREEDOM', 'BY', 'SPEAKING', 'SHARPLY'] +7902-96592-0039-64: hyp=['I', 'TOLD', 'YOU', 'A', 'FISHER', 'BOY', 'CRIED', 'ARCHIE', 'IMPATIENTLY', 'BUT', 'TRYING', 'NOT', 'TO', 'OFFEND', 'HIS', 'VISITOR', 'WHO', 'POSSESSED', 'THE', 'POWER', 'OF', 'CONFERRING', 'FREEDOM', 'BY', 'SPEAKING', 'SHARPLY'] +7902-96592-0040-65: ref=['NOT', 'YOU', 'LOOK', 'LIKE', 'A', 'WILD', 'BEAST', 'IN', 'A', 'CAGE', 'LIKE', 'A', 'MONKEY', 'YOU', 'INSOLENT'] +7902-96592-0040-65: hyp=['NOT', 'YOU', 'LOOK', 'LIKE', 'A', 'WILD', 'BEAST', 'IN', 'A', 'CAGE', 'LIKE', 'A', 'MONKEY', 'YOU', 'INSOLENT'] +7902-96592-0041-66: ref=['ARCHY', 'CHECKED', 'HIMSELF', 'AND', 'THE', 'BOY', 'LAUGHED'] +7902-96592-0041-66: hyp=['ARCHIE', 'CHECKED', 'HIMSELF', 'AND', 'THE', 'BOY', 'LAUGHED'] +7902-96592-0042-67: ref=['IT', 'WAS', 'YOUR', 'TURN', 'YESTERDAY', "IT'S", 'MINE', 'TO', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0042-67: hyp=['IT', 'WAS', 'YOUR', 'TURN', 'YESTERDAY', "IT'S", 'MINE', 'TO', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0043-68: ref=['YOU', 'LAUGHED', 'AND', 'FLEERED', 'AT', 'ME', 'WHEN', 'I', 'WAS', 'ON', 'THE', "CUTTER'S", 'DECK'] +7902-96592-0043-68: hyp=['YOU', 'LAUGHED', 'AND', 'FLARED', 'AT', 'ME', 'WHEN', 'I', 'WAS', 'ON', 'THE', "CUTTER'S", 'DECK'] +7902-96592-0044-69: ref=['I', 'SAY', 'YOU', 'DO', 'LOOK', 'A', 'RUM', 'UN', 'JUST', 'LIKE', 'A', 'BIG', 'MONKEY', 'IN', 'A', 'SHOW'] +7902-96592-0044-69: hyp=['I', 'SAY', 'YOU', 'DO', 'LOOK', 'LIKE', 'A', 'ROMAN', 'JUST', 'LIKE', 'A', 'BIG', 'MONKEY', 'IN', 'A', 'SHOW'] +7902-96592-0045-70: ref=['RAM', 'SHOWED', 'HIS', 'WHITE', 'TEETH', 'AS', 'HE', 'BURST', 'OUT', 'WITH', 'A', 'LONG', 'LOW', 'FIT', 'OF', 'LAUGHTER'] +7902-96592-0045-70: hyp=['RAM', 'SHOWED', 'HIS', 'WHITE', 'TEETH', 'AS', 'HE', 'BURST', 'OUT', 'WITH', 'A', 'LONG', 'LOW', 'FIT', 'OF', 'LAUGHTER'] +7902-96592-0046-71: ref=['YOU', "ROPE'S", 'END', 'ME', 'HE', 'SAID'] +7902-96592-0046-71: hyp=['YOU', 'HOPES', 'AND', 'ME', 'HE', 'SAID'] +7902-96592-0047-72: ref=['WHY', 'I', 'COULD', 'TIE', 'YOU', 'UP', 'IN', 'A', 'KNOT', 'AND', 'HEAVE', 'YOU', 'OFF', 'THE', 'CLIFF', 'ANY', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0047-72: hyp=['WHY', 'I', 'COULD', 'TELL', 'YOU', 'UP', 'IN', 'A', 'KNOT', 'AND', 'HEAVE', 'YOU', 'OFF', 'THE', 'CLIFF', 'ANY', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0048-73: ref=['BIT', 'OF', 'A', 'MIDDY', 'FED', 'ON', 'SALT', 'TACK', 'AND', 'WEEVILLY', 'BISCUIT', 'TALK', 'OF', 'GIVING', 'ME', "ROPE'S", 'END'] +7902-96592-0048-73: hyp=['BIT', 'OF', 'AMITY', 'FED', 'ON', 'A', 'SALT', 'TACK', 'AND', 'WEEVILY', 'BISCUIT', 'TALK', 'OF', 'GIVING', 'ME', 'ROPES', 'AND'] +7902-96592-0049-74: ref=['ONCE', 'MORE', 'WILL', 'YOU', 'COME', 'AND', 'LET', 'ME', 'OUT', 'NO'] +7902-96592-0049-74: hyp=['ONCE', 'MORE', 'WILL', 'YOU', 'COME', 'AND', 'LET', 'ME', 'OUT', 'NO'] +7902-96592-0050-75: ref=['TO', 'HIS', 'ASTONISHMENT', 'THE', 'BOY', 'DID', 'NOT', 'FLINCH', 'BUT', 'THRUST', 'HIS', 'OWN', 'ARMS', 'THROUGH', 'PLACING', 'THEM', 'ABOUT', 'THE', "MIDDY'S", 'WAIST', 'CLENCHING', 'HIS', 'HANDS', 'BEHIND', 'AND', 'UTTERING', 'A', 'SHARP', 'WHISTLE'] +7902-96592-0050-75: hyp=['TO', 'HIS', 'ASTONISHMENT', 'THE', 'BOY', 'DID', 'NOT', 'FLINCH', 'BUT', 'THRUST', 'HIS', 'OWN', 'ARMS', 'THROUGH', 'PLACING', 'HIM', 'ABOUT', 'THE', "MIDDY'S", 'WAIST', 'CLENCHING', 'HIS', 'HAND', 'BEHIND', 'AND', 'UTTERING', 'A', 'SHARP', 'WHISTLE'] +7902-96594-0000-76: ref=['SEEMED', 'IN', 'GOOD', 'SPIRITS', 'LAST', 'NIGHT', 'MISTER', 'GURR', 'EH'] +7902-96594-0000-76: hyp=['SEEMING', 'AT', "SPEAR'S", 'LAST', 'NIGHT', 'MISTER', 'GARR', 'EH'] +7902-96594-0001-77: ref=['YES', 'SIR', 'BUT', 'HE', 'MAY', 'TURN', 'UP', 'ON', 'THE', 'CLIFF', 'AT', 'ANY', 'MOMENT'] +7902-96594-0001-77: hyp=['YES', 'SIR', 'BUT', 'HE', 'MAY', 'TURN', 'UP', 'ON', 'THE', 'CLIFF', 'AT', 'ANY', 'MOMENT'] +7902-96594-0002-78: ref=['YES', 'MEN', 'QUITE', 'READY', 'YES', 'SIR'] +7902-96594-0002-78: hyp=['YES', 'MEN', 'QUITE', 'READY', 'YES', 'SIR'] +7902-96594-0003-79: ref=["THAT'S", 'RIGHT', 'OF', 'COURSE', 'WELL', 'ARMED'] +7902-96594-0003-79: hyp=['THE', 'THREAD', 'OF', 'COURSE', 'WILL', 'ALARMED'] +7902-96594-0004-80: ref=['SOON', 'AS', 'THE', 'SIGNAL', 'COMES', 'WE', 'SHALL', 'PUSH', 'OFF'] +7902-96594-0004-80: hyp=['SOON', 'AS', 'THE', 'SIGNAL', 'COMES', 'WE', 'SHALL', 'PUSH', 'OFF'] +7902-96594-0005-81: ref=['AWKWARD', 'BIT', 'O', 'COUNTRY', 'SIR', 'SIX', 'MILES', 'ROW', 'BEFORE', 'YOU', 'CAN', 'FIND', 'A', 'PLACE', 'TO', 'LAND'] +7902-96594-0005-81: hyp=['AWKWARD', 'BITTER', 'COUNTRY', 'SIR', 'SIX', 'MILES', 'ROW', 'BEFORE', 'YOU', 'CAN', 'FIND', 'A', 'PLACE', 'TO', 'LAND'] +7902-96594-0006-82: ref=['SO', 'SHALL', 'WE', 'YET', 'SIR'] +7902-96594-0006-82: hyp=['SO', 'SHALL', 'WE', 'YET', 'SIR'] +7902-96594-0007-83: ref=['YOU', "DON'T", 'THINK', 'MISTER', 'GURR', 'THAT', 'THEY', 'WOULD', 'DARE', 'TO', 'INJURE', 'HIM', 'IF', 'HE', 'WAS', 'SO', 'UNLUCKY', 'AS', 'TO', 'BE', 'CAUGHT'] +7902-96594-0007-83: hyp=['YOU', "DON'T", 'THINK', 'MISTER', 'GORE', 'THAT', 'THEY', 'WOULD', 'DARE', 'TO', 'INJURE', 'HIM', 'IF', 'HE', 'WAS', 'SO', 'UNLUCKY', 'AS', 'TO', 'BE', 'CAUGHT'] +7902-96594-0008-84: ref=['WELL', 'SIR', 'SAID', 'THE', 'MASTER', 'HESITATING', 'SMUGGLERS', 'ARE', 'SMUGGLERS'] +7902-96594-0008-84: hyp=['WELL', 'SIR', 'SAID', 'THE', 'MASTER', 'HESITATING', 'SMUGGLERS', 'ARE', 'SMUGGLERS'] +7902-96594-0009-85: ref=['CERTAINLY', 'SIR', 'SMUGGLERS', 'ARE', 'SMUGGLERS', 'INDEED'] +7902-96594-0009-85: hyp=['CERTAINLY', 'SIR', 'SMUGGLERS', 'ARE', 'SMUGGLERS', 'INDE'] +7902-96594-0010-86: ref=['BEG', 'PARDON', 'SIR', "DIDN'T", 'MEAN', 'ANY', 'HARM'] +7902-96594-0010-86: hyp=['THEY', 'PARDON', 'SIR', "DIDN'T", 'MEAN', 'ANY', 'HARM'] +7902-96594-0011-87: ref=["I'M", 'GETTING', 'VERY', 'ANXIOUS', 'ABOUT', 'MISTER', 'RAYSTOKE', 'START', 'AT', 'ONCE', 'SIR'] +7902-96594-0011-87: hyp=["I'M", 'GETTING', 'VERY', 'ANXIOUS', 'ABOUT', 'MISTER', 'RAYSTROKE', 'START', 'AT', 'ONCE', 'SIR'] +7902-96594-0012-88: ref=['NO', 'WAIT', 'ANOTHER', 'HALF', 'HOUR'] +7902-96594-0012-88: hyp=['NO', 'WAIT', 'ANOTHER', 'AND', 'HALF', 'HOUR'] +7902-96594-0013-89: ref=['VERY', 'ILL', 'ADVISED', 'THING', 'TO', 'DO'] +7902-96594-0013-89: hyp=['VERY', 'ILL', 'ADVISED', 'THING', 'TO', 'DO'] +7902-96594-0014-90: ref=['THEN', 'I', 'MUST', 'REQUEST', 'THAT', 'YOU', 'WILL', 'NOT', 'MAKE', 'IT', 'AGAIN', 'VERY', 'TRUE'] +7902-96594-0014-90: hyp=['THAT', 'I', 'MUST', 'REQUEST', 'THAT', 'YOU', 'WILL', 'NOT', 'MAKE', 'IT', 'AGAIN', 'VERY', 'TRUE'] +7902-96594-0015-91: ref=['AWK', 'WARD', 'MISTER', 'GURR', 'AWKWARD'] +7902-96594-0015-91: hyp=['AWKWARD', 'MISTER', 'GARR', 'AWKWARD'] +7902-96594-0016-92: ref=['YES', 'SIR', 'OF', 'COURSE'] +7902-96594-0016-92: hyp=['YES', 'SIR', 'OF', 'COURSE'] +7902-96594-0017-93: ref=['SAY', 'AWK', 'WARD', 'IN', 'FUTURE', 'NOT', "AWK'ARD"] +7902-96594-0017-93: hyp=['SAY', 'AWKWARD', 'IN', 'THE', 'FUTURE', 'NOT', 'UPWARD'] +7902-96594-0018-94: ref=['I', 'MEAN', 'ALL', 'ALONE', 'BY', 'MYSELF', 'SIR'] +7902-96594-0018-94: hyp=['I', 'MEAN', 'OUR', 'OWN', 'BY', 'MYSELF', 'SIR'] +7902-96594-0019-95: ref=['WHAT', 'FOR', 'THERE', "AREN'T", 'A', 'PUBLIC', 'HOUSE', 'FOR', 'TEN', 'MILES', "DIDN'T", 'MEAN', 'THAT'] +7902-96594-0019-95: hyp=['WHAT', 'FOR', 'THERE', 'ARE', 'TO', 'PUBLIC', 'HOUSE', 'FOR', 'TEN', 'MILES', "DIDN'T", 'MEAN', 'THAT'] +7902-96594-0020-96: ref=['THEN', 'WHAT', 'DID', 'YOU', 'MEAN', 'SPEAK', 'OUT', 'AND', "DON'T", 'DO', 'THE', 'DOUBLE', 'SHUFFLE', 'ALL', 'OVER', 'MY', 'CLEAN', 'DECK', 'NO', 'SIR'] +7902-96594-0020-96: hyp=['THEN', 'WHAT', 'DID', 'YOU', 'MEAN', 'SPEAK', 'OUT', 'AND', "DON'T", 'DO', 'THE', 'DOUBLE', 'SHUFFLE', 'ALL', 'OVER', 'MY', 'CLEAN', 'DECK', 'NO', 'SIR'] +7902-96594-0021-97: ref=['HOPPING', 'ABOUT', 'LIKE', 'A', 'CAT', 'ON', 'HOT', 'BRICKS'] +7902-96594-0021-97: hyp=['HAVING', 'ABOUT', 'THE', 'GOOD', 'CAT', 'ON', 'HOT', 'BRICKS'] +7902-96594-0022-98: ref=['NOW', 'THEN', 'WHY', 'DO', 'YOU', 'WANT', 'TO', 'GO', 'ASHORE'] +7902-96594-0022-98: hyp=['NOW', 'THEN', 'WHY', 'DO', 'YOU', 'WANT', 'TO', 'GO', 'ASHORE'] +7902-96594-0023-99: ref=['BEG', 'PARDON', "DIDN'T", 'MEAN', 'NOWT', 'SIR', 'SAID', 'THE', 'SAILOR', 'TOUCHING', 'HIS', 'FORELOCK'] +7902-96594-0023-99: hyp=['THEY', 'PARDON', "DIDN'T", 'MEAN', 'OUT', 'SIR', 'SAID', 'THE', 'SAILOR', 'TOUCHING', 'HIS', 'FORELOCK'] +7902-96594-0024-100: ref=['YES', 'SIR', 'SAID', 'THE', 'MAN', 'HUMBLY', 'SHALL', 'I', 'GO', 'AT', 'ONCE', 'SIR'] +7902-96594-0024-100: hyp=['YES', 'SIR', 'SAID', 'THE', 'MAN', 'HUMBLY', 'SHALL', 'I', 'GO', 'AT', 'ONCE', 'SIR'] +7902-96594-0025-101: ref=['NO', 'WAIT'] +7902-96594-0025-101: hyp=['NO', 'WAIT'] +7902-96594-0026-102: ref=['KEEP', 'A', 'SHARP', 'LOOK', 'OUT', 'ON', 'THE', 'CLIFF', 'TO', 'SEE', 'IF', 'MISTER', 'RAYSTOKE', 'IS', 'MAKING', 'SIGNALS', 'FOR', 'A', 'BOAT'] +7902-96594-0026-102: hyp=['HE', 'WAS', 'SHARP', 'LOOK', 'OUT', 'ON', 'THE', 'CLIFF', 'TO', 'SEE', 'IF', 'MISTER', 'RAYSTROKE', 'IS', 'MAKING', 'SIGNALS', 'FOR', 'A', 'BO'] +7902-96594-0027-103: ref=['HE', 'SWUNG', 'ROUND', 'WALKED', 'AFT', 'AND', 'BEGAN', 'SWEEPING', 'THE', 'SHORE', 'AGAIN', 'WITH', 'HIS', 'GLASS', 'WHILE', 'THE', 'MASTER', 'AND', 'DICK', 'EXCHANGED', 'GLANCES', 'WHICH', 'MEANT', 'A', 'GREAT', 'DEAL'] +7902-96594-0027-103: hyp=['HE', 'SWUNG', 'ROUND', 'WALKED', 'OFF', 'AND', 'BEGAN', 'SWEEPING', 'ASHORE', 'AGAIN', 'WITH', 'HIS', 'GLASS', 'WHILE', 'THE', 'MASTER', 'AND', 'DICK', 'EXCHANGED', 'GLANCES', 'WHICH', 'MEANT', 'A', 'GREAT', 'DEAL'] +7902-96594-0028-104: ref=['AT', 'LAST', 'THE', 'LITTLE', 'LIEUTENANT', 'COULD', 'BEAR', 'THE', 'ANXIETY', 'NO', 'LONGER'] +7902-96594-0028-104: hyp=['AT', 'LAST', 'THE', 'LITTLE', 'LIEUTENANT', 'COULD', 'BEAR', 'THE', 'ANXIETY', 'NO', 'LONGER'] +7902-96594-0029-105: ref=['PIPE', 'AWAY', 'THE', 'MEN', 'TO', 'THAT', 'BOAT', 'THERE', 'HE', 'SAID', 'AND', 'AS', 'THE', 'CREW', 'SPRANG', 'IN'] +7902-96594-0029-105: hyp=['PEG', 'AWAY', 'THEM', 'INTO', 'THAT', 'BOAT', 'THERE', 'HE', 'SAID', 'AND', 'AS', 'THE', 'CREW', 'SPRANG', 'IN'] +7902-96594-0030-106: ref=['NOW', 'MISTER', 'GURR', 'HE', 'SAID', "I'M", 'ONLY', 'GOING', 'TO', 'SAY', 'ONE', 'THING', 'TO', 'YOU', 'IN', 'THE', 'WAY', 'OF', 'INSTRUCTIONS', 'YES', 'SIR'] +7902-96594-0030-106: hyp=['NO', 'MISTER', 'GURR', 'HE', 'SAID', "I'M", 'ONLY', 'GOING', 'TO', 'SAY', 'ONE', 'THING', 'TO', 'YOU', 'IN', 'THE', 'WAY', 'OF', 'INSTRUCTIONS', 'YES', 'SIR'] +7902-96594-0031-107: ref=['BEG', 'PARDON', 'SIR', 'SAID', 'THE', 'MASTER', 'DEPRECATINGLY'] +7902-96594-0031-107: hyp=['BEG', 'PARDON', 'SIR', 'SAID', 'THE', 'MASTER', 'DEPRECATINGLY'] +7902-96594-0032-108: ref=['STEADY', 'MY', 'LADS', 'STEADY', 'CRIED', 'THE', 'MASTER', 'KEEP', 'STROKE', 'AND', 'THEN', 'HE', 'BEGAN', 'TO', 'MAKE', 'PLANS', 'AS', 'TO', 'HIS', 'FIRST', 'PROCEEDINGS', 'ON', 'GETTING', 'ASHORE'] +7902-96594-0032-108: hyp=['STEADY', 'MY', 'LAD', 'STEADY', 'CRIED', 'THE', 'MASTER', 'KEEP', 'STROKE', 'AND', 'THEN', 'HE', 'BEGAN', 'TO', 'MAKE', 'PLANS', 'AS', 'TO', 'HIS', 'FIRST', 'PROCEEDINGS', 'ON', 'GETTING', 'ASHORE'] +7902-96595-0000-109: ref=['SAY', 'MESTER', 'GURR', 'SAID', 'DICK', 'AFTER', 'ONE', 'OF', 'THESE', 'SEARCHES', 'HE', "WOULDN'T", 'RUN', 'AWAY', 'WHAT'] +7902-96595-0000-109: hyp=['SAY', 'MISTER', 'GIRK', 'SAID', 'DICK', 'AFTER', 'ONE', 'OF', 'THESE', 'SEARCHES', 'HE', "WOULDN'T", 'RUN', 'AWAY', 'WHAT'] +7902-96595-0001-110: ref=['MISTER', 'RAYSTOKE', 'SIR', "DON'T", 'BE', 'A', 'FOOL'] +7902-96595-0001-110: hyp=['MISTER', 'RAYSTOKE', 'SIR', "DON'T", 'BE', 'A', 'FOOL'] +7902-96595-0002-111: ref=['WHAT', 'CHUCKED', 'HIM', 'OFF', 'YONDER'] +7902-96595-0002-111: hyp=['WHAT', 'SAID', 'DE', 'MORVE', 'YONDER'] +7902-96595-0003-112: ref=['GURR', 'GLANCED', 'ROUND', 'TO', 'SEE', 'IF', 'THE', 'MEN', 'WERE', 'LOOKING', 'AND', 'THEN', 'SAID', 'RATHER', 'HUSKILY', 'BUT', 'KINDLY'] +7902-96595-0003-112: hyp=['GIRK', 'GLANCED', 'ROUND', 'TO', 'SEE', 'IF', 'THE', 'MEN', 'WERE', 'LOOKING', 'AND', 'THEN', 'SAID', 'RATHER', 'HUSKILY', 'BE', 'KINDLY'] +7902-96595-0004-113: ref=['AH', 'EJACULATED', 'DICK', 'SADLY'] +7902-96595-0004-113: hyp=['AH', 'EJACULATED', 'DICK', 'SADLY'] +7902-96595-0005-114: ref=['SAY', 'MESTER', 'GURR', 'SIR', 'WHICH', 'THANKFUL', 'I', 'AM', 'TO', 'YOU', 'FOR', 'SPEAKING', 'SO', 'BUT', 'YOU', "DON'T", 'REALLY', 'THINK', 'AS', 'HE', 'HAS', 'COME', 'TO', 'HARM'] +7902-96595-0005-114: hyp=['SAY', 'MISTER', 'GER', 'WHICH', 'THANKFUL', 'I', 'AM', 'FOR', 'YOU', 'FOR', 'SPEAKING', 'SO', 'BUT', 'YOU', "DON'T", 'REALLY', 'THINK', 'AS', 'HE', 'HAS', 'COME', 'TO', 'HARM'] +7902-96595-0006-115: ref=['I', 'HOPE', 'NOT', 'DICK', 'I', 'HOPE', 'NOT', 'BUT', 'SMUGGLERS', "DON'T", 'STAND', 'AT', 'ANYTHING', 'SOMETIMES'] +7902-96595-0006-115: hyp=['I', 'HOPE', 'NOT', 'DICK', 'I', 'OPEN', 'IT', 'BUT', 'SMUGGLERS', "DON'T", 'STAND', 'AT', 'ANYTHING', 'SOMETIMES'] +7902-96595-0007-116: ref=['I', 'DO', 'ASSURE', 'YOU', "THERE'S", 'NOTHING', 'HERE', 'BUT', 'WHAT', 'YOU', 'MAY', 'SEE'] +7902-96595-0007-116: hyp=['I', 'DO', 'ASSURE', 'YOU', "THERE'S", 'NOTHING', 'HERE', 'BUT', 'WHAT', 'YOU', 'MAY', 'SEE'] +7902-96595-0008-117: ref=['IF', "YOU'D", 'LET', 'ME', 'FINISH', "YOU'D", 'KNOW', 'SAID', 'GURR', 'GRUFFLY', 'ONE', 'OF', 'OUR', 'BOYS', 'IS', 'MISSING', 'SEEN', 'HIM', 'UP', 'HERE'] +7902-96595-0008-117: hyp=['IF', 'YOU', 'LET', 'ME', 'FINISH', "YOU'D", 'KNOW', 'SAID', 'GRIGGLY', 'ONE', 'OF', 'OUR', 'BOYS', 'IS', 'MISSING', 'SEEN', 'EM', 'UP', 'HERE'] +7902-96595-0009-118: ref=['BOY', 'BOUT', 'SEVENTEEN', 'WITH', 'A', 'RED', 'CAP', 'NO', 'SIR', 'INDEED', "I'VE", 'NOT'] +7902-96595-0009-118: hyp=['BOY', 'ABOUT', 'SEVENTEEN', 'WITH', 'A', 'RED', 'CAP', 'NO', 'SIR', 'INDEED', "I'VE", 'NOT'] +7902-96595-0010-119: ref=["DON'T", 'KNOW', 'AS', 'HE', 'HAS', 'BEEN', 'SEEN', 'ABOUT', 'HERE', 'DO', 'YOU', 'SAID', 'GURR', 'LOOKING', 'AT', 'HER', 'SEARCHINGLY', 'NO', 'SIR'] +7902-96595-0010-119: hyp=["DON'T", 'KNOW', 'AS', 'HE', 'HAS', 'BEEN', 'SEEN', 'ABOUT', 'HERE', 'DO', 'YOU', 'SAID', 'GIRL', 'LOOKING', 'AT', 'HER', 'SEARCHINGLY', 'NO', 'SIR'] +7902-96595-0011-120: ref=['IF', 'SHE', 'KNEW', 'EVIL', 'HAD', 'COME', 'TO', 'THE', 'POOR', 'LAD', 'HER', 'FACE', 'WOULD', 'TELL', 'TALES', 'LIKE', 'PRINT'] +7902-96595-0011-120: hyp=['IF', 'SHE', 'KNEW', 'EVIL', 'HAD', 'COME', 'TO', 'THE', 'POOR', 'LAD', 'HER', 'FACE', 'WOULD', 'TELL', 'TALES', 'LIKE', 'PRINT'] +7902-96595-0012-121: ref=['I', 'SAID', 'A', 'LAD', 'BOUT', 'SEVENTEEN', 'IN', 'A', 'RED', 'CAP', 'LIKE', 'YOURS', 'SAID', 'GURR', 'VERY', 'SHORTLY'] +7902-96595-0012-121: hyp=['I', 'STOOD', 'ALOUD', 'ABOUT', 'SEVENTEEN', 'IN', 'A', 'RED', 'CAPLICH', 'YOURS', 'SAID', 'GREW', 'VERY', 'SHORTLY'] +7902-96595-0013-122: ref=['THE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'AND', 'STARED', 'AS', 'IF', 'HE', "DIDN'T", 'HALF', 'UNDERSTAND', 'THE', 'DRIFT', 'OF', 'WHAT', 'WAS', 'SAID'] +7902-96595-0013-122: hyp=['THE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'AND', 'STARED', 'AS', 'IF', 'HE', "DIDN'T", 'HALF', 'UNDERSTAND', 'THE', 'DRIFT', 'OF', 'WHAT', 'WAS', 'SAID'] +7902-96595-0014-123: ref=['HERE', 'MY', 'LAD', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0014-123: hyp=['HERE', 'MY', 'LAD', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0015-124: ref=['EH', 'I', 'SAY', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0015-124: hyp=['EH', 'I', 'SAY', 'WAS', 'YOUR', 'MASTER'] +7902-96595-0016-125: ref=['GURR', 'TURNED', 'AWAY', 'IMPATIENTLY', 'AGAIN', 'AND', 'SIGNING', 'TO', 'HIS', 'MEN', 'TO', 'FOLLOW', 'THEY', 'ALL', 'BEGAN', 'TO', 'TRAMP', 'UP', 'THE', 'STEEP', 'TRACK', 'LEADING', 'TOWARD', 'THE', 'HOZE', 'WITH', 'THE', 'RABBITS', 'SCUTTLING', 'AWAY', 'AMONG', 'THE', 'FURZE', 'AND', 'SHOWING', 'THEIR', 'WHITE', 'COTTONY', 'TAILS', 'FOR', 'A', 'MOMENT', 'AS', 'THEY', 'DARTED', 'DOWN', 'INTO', 'THEIR', 'HOLES'] +7902-96595-0016-125: hyp=['GERT', 'TURNED', 'AWAY', 'IMPATIENTLY', 'AGAIN', 'AND', 'SUNNING', 'TO', 'HIS', 'MEN', 'TO', 'FOLLOW', 'THEY', 'ALL', 'BEGAN', 'TO', 'TRAMP', 'UP', 'A', 'STEEP', 'CHECK', 'LEADING', 'TOWARD', 'THE', 'HOSE', 'WITH', 'THE', "RABBIT'S", 'SCUTTLING', 'AWAY', 'AMONG', 'THE', 'FIRS', 'AND', 'SHOWING', 'THEIR', 'WHITE', 'COTTONY', 'TAILS', 'FOR', 'A', 'MOMENT', 'AS', 'THEY', 'DARTED', 'DOWN', 'INTO', 'THEIR', 'HOLES'] +7902-96595-0017-126: ref=['I', 'DUNNO', 'MUTTERED', 'DICK', 'AND', 'A', 'MAN', "CAN'T", 'BE', 'SURE'] +7902-96595-0017-126: hyp=['I', "DON'T", 'KNOW', 'MUTTERED', 'DICK', 'AND', 'A', 'MEN', "CAN'T", 'BE', 'SURE'] +7902-96595-0018-127: ref=['GURR', 'SALUTED', 'AND', 'STATED', 'HIS', 'BUSINESS', 'WHILE', 'THE', 'BARONET', 'WHO', 'HAD', 'TURNED', 'SALLOWER', 'AND', 'MORE', 'CAREWORN', 'THAN', 'HIS', 'LOT', 'DREW', 'A', 'BREATH', 'FULL', 'OF', 'RELIEF', 'ONE', 'OF', 'YOUR', 'SHIP', 'BOYS', 'HE', 'SAID'] +7902-96595-0018-127: hyp=['GER', 'SALUTED', 'AND', 'STATED', 'HIS', 'BUSINESS', 'WHILE', 'THE', 'BARONET', 'WHO', 'HAD', 'TURNED', 'SALARY', 'AND', 'MORE', 'CAREWORN', 'THAN', 'HIS', 'LOT', 'DREW', 'A', 'BREATH', 'OF', 'FULL', 'OF', 'RELIEF', 'ONE', 'OF', 'YOUR', 'SHIP', 'BOYS', 'HE', 'SAID'] +7902-96595-0019-128: ref=['A', 'LAD', 'LOOKING', 'LIKE', 'A', 'COMMON', 'SAILOR', 'AND', 'WEARING', 'A', 'RED', 'CAP', 'NO', 'SAID', 'SIR', 'RISDON'] +7902-96595-0019-128: hyp=['A', 'LAD', 'LOOKING', 'LIKE', 'A', 'COMMON', 'SAILOR', 'AND', 'WEARING', 'A', 'RED', 'CAP', 'NO', 'SAID', 'SIR', 'RISDON'] +7902-96595-0020-129: ref=['I', 'HAVE', 'SEEN', 'NO', 'ONE', 'ANSWERING', 'TO', 'THE', 'DESCRIPTION', 'HERE'] +7902-96595-0020-129: hyp=['I', 'HAVE', 'SEEN', 'NO', 'ONE', 'ANSWERING', 'TO', 'THE', 'DESCRIPTION', 'HERE'] +7902-96595-0021-130: ref=['BEG', 'PARDON', 'SIR', 'BUT', 'CAN', 'YOU', 'AS', 'A', 'GENTLEMAN', 'ASSURE', 'ME', 'THAT', 'HE', 'IS', 'NOT', 'HERE', 'CERTAINLY', 'SAID', 'SIR', 'RISDON'] +7902-96595-0021-130: hyp=['BIG', 'PARTISER', 'BECAME', 'AS', 'GENTLEMAN', 'ASSURE', 'ME', 'THAT', 'HE', 'IS', 'NOT', 'HERE', 'CERTAINLY', 'SAID', 'SIR', 'RISDON'] +7902-96595-0022-131: ref=['SURELY', 'CRIED', 'SIR', 'RISDON', 'EXCITEDLY'] +7902-96595-0022-131: hyp=['SURELY', 'CRIED', 'SIR', 'RISDON', 'EXCITEDLY'] +7902-96595-0023-132: ref=['SIR', 'RISDON', 'WAS', 'SILENT'] +7902-96595-0023-132: hyp=['SIR', 'RICHARD', 'WAS', 'SILENT'] +7902-96595-0024-133: ref=['LADY', 'GRAEME', 'LOOKED', 'GHASTLY'] +7902-96595-0024-133: hyp=['LADY', 'GRAHAM', 'LOOKED', 'GHASTLY'] +7902-96595-0025-134: ref=['YOU', 'DO', 'NOT', 'KNOW', 'NO'] +7902-96595-0025-134: hyp=['YOU', 'DO', 'NOT', 'KNOW', 'NO'] +7975-280057-0000-1008: ref=['THESE', 'HATREDS', 'WERE', 'SOON', 'TO', 'MAKE', 'TROUBLE', 'FOR', 'ME', 'OF', 'WHICH', 'I', 'HAD', 'NEVER', 'DREAMED'] +7975-280057-0000-1008: hyp=['THESE', 'HATREDS', 'WERE', 'SOON', 'TO', 'MAKE', 'TROUBLE', 'FOR', 'ME', 'OF', 'WHICH', 'I', 'HAD', 'NEVER', 'DREAMED'] +7975-280057-0001-1009: ref=['HENRY', 'WASHINGTON', 'YOUNGER', 'MY', 'FATHER', 'REPRESENTED', 'JACKSON', 'COUNTY', 'THREE', 'TIMES', 'IN', 'THE', 'LEGISLATURE', 'AND', 'WAS', 'ALSO', 'JUDGE', 'OF', 'THE', 'COUNTY', 'COURT'] +7975-280057-0001-1009: hyp=['HENRY', 'WASHINGTON', 'YOUNGER', 'MY', 'FATHER', 'REPRESENTED', 'JACKSON', 'COUNTY', 'THREE', 'TIMES', 'IN', 'THE', 'LEGISLATURE', 'AND', 'WAS', 'ALSO', 'A', 'JUDGE', 'OF', 'THE', 'COUNTY', 'COURT'] +7975-280057-0002-1010: ref=['MY', 'MOTHER', 'WHO', 'WAS', 'BURSHEBA', 'FRISTOE', 'OF', 'INDEPENDENCE', 'WAS', 'THE', 'DAUGHTER', 'OF', 'RICHARD', 'FRISTOE', 'WHO', 'FOUGHT', 'UNDER', 'GENERAL', 'ANDREW', 'JACKSON', 'AT', 'NEW', 'ORLEANS', 'JACKSON', 'COUNTY', 'HAVING', 'BEEN', 'SO', 'NAMED', 'AT', 'MY', 'GRANDFATHER', "FRISTOE'S", 'INSISTENCE'] +7975-280057-0002-1010: hyp=['MY', 'MOTHER', 'WHO', 'WAS', 'PERCEIVER', 'FOR', 'STOVE', 'OF', 'INDEPENDENCE', 'WAS', 'A', 'DAUGHTER', 'OF', 'RICHARD', 'FRISTOW', 'WHO', 'FOUGHT', 'UNDER', 'GENERAL', 'ANDREW', 'JACKSON', 'AT', 'NEW', 'ORLEANS', 'JACKSON', 'COUNTY', 'HAVING', 'BEEN', 'SO', 'NAMED', 'AND', 'MY', 'GRANDFATHER', 'FIRST', 'THOSE', 'INSISTANTS'] +7975-280057-0003-1011: ref=['I', 'CANNOT', 'REMEMBER', 'WHEN', 'I', 'DID', 'NOT', 'KNOW', 'HOW', 'TO', 'SHOOT'] +7975-280057-0003-1011: hyp=['I', 'CANNOT', 'REMEMBER', 'WHEN', 'I', 'DID', 'NOT', 'KNOW', 'HOW', 'TO', 'SHOOT'] +7975-280057-0004-1012: ref=['MY', 'BROTHER', 'JAMES', 'WAS', 'BORN', 'JANUARY', 'FIFTEENTH', 'EIGHTEEN', 'FORTY', 'EIGHT', 'JOHN', 'IN', 'EIGHTEEN', 'FIFTY', 'ONE', 'AND', 'ROBERT', 'IN', 'DECEMBER', 'EIGHTEEN', 'FIFTY', 'THREE'] +7975-280057-0004-1012: hyp=['MY', 'BROTHER', 'JAMES', 'WAS', 'BORN', 'JANUARY', 'FIFTEEN', 'EIGHTEEN', 'FORTY', 'EIGHT', 'JOHN', 'AND', 'EIGHTEEN', 'FIFTY', 'ONE', 'AND', 'ROBERT', 'IN', 'DECEMBER', 'EIGHTEEN', 'FIFTY', 'THREE'] +7975-280057-0005-1013: ref=['MY', 'ELDEST', 'BROTHER', 'RICHARD', 'DIED', 'IN', 'EIGHTEEN', 'SIXTY'] +7975-280057-0005-1013: hyp=['MY', 'ELDEST', 'BROTHER', 'RICHARD', 'DIED', 'IN', 'EIGHTEEN', 'SIXTY'] +7975-280057-0006-1014: ref=['MY', 'FATHER', 'WAS', 'IN', 'THE', 'EMPLOY', 'OF', 'THE', 'UNITED', 'STATES', 'GOVERNMENT', 'AND', 'HAD', 'THE', 'MAIL', 'CONTRACT', 'FOR', 'FIVE', 'HUNDRED', 'MILES'] +7975-280057-0006-1014: hyp=['MY', 'FATHER', 'WAS', 'IN', 'THE', 'EMPLOY', 'OF', 'THE', 'UNITED', 'STATES', 'GOVERNMENT', 'AND', 'HAD', 'THE', 'MALE', 'CONTRACT', 'FOR', 'FIVE', 'HUNDRED', 'MILES'] +7975-280057-0007-1015: ref=['HE', 'HAD', 'STARTED', 'BACK', 'TO', 'HARRISONVILLE', 'IN', 'A', 'BUGGY', 'BUT', 'WAS', 'WAYLAID', 'ONE', 'MILE', 'SOUTH', 'OF', 'WESTPORT', 'A', 'SUBURB', 'OF', 'KANSAS', 'CITY', 'AND', 'BRUTALLY', 'MURDERED', 'FALLING', 'OUT', 'OF', 'HIS', 'BUGGY', 'INTO', 'THE', 'ROAD', 'WITH', 'THREE', 'MORTAL', 'BULLET', 'WOUNDS'] +7975-280057-0007-1015: hyp=['HE', 'HAD', 'STARTED', 'BACK', 'TO', 'HARRISONVILLE', 'IN', 'A', 'BUGGY', 'BUT', 'WAS', 'WAYLAID', 'ONE', 'MILE', 'SOUTH', 'OF', 'WESTWARD', 'A', 'SUBURB', 'OF', 'KANSAS', 'CITY', 'AND', 'BRUTALLY', 'MURDERED', 'FALLING', 'OUT', 'OF', 'HIS', 'BUGGY', 'INTO', 'THE', 'ROAD', 'WITH', 'THREE', 'MORTAL', 'BULLET', 'WOUNDS'] +7975-280057-0008-1016: ref=['MISSUS', 'WASHINGTON', 'WELLS', 'AND', 'HER', 'SON', 'SAMUEL', 'ON', 'THE', 'ROAD', 'HOME', 'FROM', 'KANSAS', 'CITY', 'TO', "LEE'S", 'SUMMIT', 'RECOGNIZED', 'THE', 'BODY', 'AS', 'THAT', 'OF', 'MY', 'FATHER'] +7975-280057-0008-1016: hyp=['MISS', 'WASHINGTON', 'WALES', 'AND', 'HER', 'SON', 'SAMUEL', 'ON', 'THE', 'ROAD', 'HOME', 'FROM', 'KANSAS', 'CITY', 'TO', 'LEE', 'SUMMIT', 'RECOGNIZED', 'THE', 'BODY', 'AS', 'THAT', 'OF', 'MY', 'FATHER'] +7975-280057-0009-1017: ref=['MISSUS', 'WELLS', 'STAYED', 'TO', 'GUARD', 'THE', 'REMAINS', 'WHILE', 'HER', 'SON', 'CARRIED', 'THE', 'NEWS', 'OF', 'THE', 'MURDER', 'TO', 'COLONEL', 'PEABODY', 'OF', 'THE', 'FEDERAL', 'COMMAND', 'WHO', 'WAS', 'THEN', 'IN', 'CAMP', 'AT', 'KANSAS', 'CITY'] +7975-280057-0009-1017: hyp=['MUST', 'WELL', 'STAY', 'TO', 'GUARD', 'THE', 'REMAINS', 'WHETHER', 'HIS', 'SON', 'CARRIED', 'THE', 'NEWS', 'OF', 'THE', 'MURDER', 'TO', 'COLONEL', 'PEABODY', 'OF', 'THE', 'FEDERAL', 'COMMAND', 'WHO', 'WAS', 'THEN', 'IN', 'CAMP', 'AT', 'KANSAS', 'CITY'] +7975-280057-0010-1018: ref=['MISSUS', 'MC', 'CORKLE', 'JUMPED', 'FROM', 'THE', 'WINDOW', 'OF', 'THE', 'HOUSE', 'AND', 'ESCAPED'] +7975-280057-0010-1018: hyp=['MISS', 'MICROCLE', 'JUMPED', 'FROM', 'THE', 'WINDOW', 'OF', 'THE', 'HOUSE', 'AND', 'ESCAPED'] +7975-280057-0011-1019: ref=['AS', 'THE', 'RAIDERS', 'LEFT', 'ONE', 'OF', 'THEM', 'SHOUTED'] +7975-280057-0011-1019: hyp=['AS', 'THE', 'RAIDERS', 'LIVED', 'ONE', 'OF', 'THEM', 'SHOUTED'] +7975-280057-0012-1020: ref=['NOW', 'OLD', 'LADY', 'CALL', 'ON', 'YOUR', 'PROTECTORS', 'WHY', "DON'T", 'YOU', 'CALL', 'ON', 'COLE', 'YOUNGER', 'NOW'] +7975-280057-0012-1020: hyp=['NOW', 'LADY', 'CALL', 'ON', 'YOUR', 'PROTECTORS', 'WHY', "DON'T", 'YOU', 'CALL', 'AND', 'CO', 'YOUNGER', 'NOW'] +7975-280057-0013-1021: ref=['EVERY', 'KNOT', 'REPRESENTED', 'A', 'HUMAN', 'LIFE'] +7975-280057-0013-1021: hyp=['EVERY', 'KNOT', 'REPRESENTED', 'A', 'HUMAN', 'LIFE'] +7975-280057-0014-1022: ref=['BUT', 'SHE', 'FAILED', 'TO', 'FIND', 'THE', 'COMFORT', 'SHE', 'SOUGHT', 'FOR', 'ANNOYANCES', 'CONTINUED', 'IN', 'A', 'MORE', 'AGGRAVATED', 'FORM'] +7975-280057-0014-1022: hyp=['BUT', 'SHE', 'FAILED', 'TO', 'FANCY', 'COMFORT', 'SHE', 'SAW', 'FOR', 'ANNOYANCES', 'CONTINUED', 'IN', 'A', 'MORE', 'AGGRAVATED', 'FOR'] +7975-280057-0015-1023: ref=['TWO', 'MONTHS', 'AFTER', 'THIS', 'INCIDENT', 'THE', 'SAME', 'PERSECUTORS', 'AGAIN', 'ENTERED', 'OUR', 'HOME', 'IN', 'THE', 'DEAD', 'OF', 'THE', 'NIGHT', 'AND', 'AT', 'THE', 'POINT', 'OF', 'A', 'PISTOL', 'TRIED', 'TO', 'FORCE', 'MY', 'MOTHER', 'TO', 'SET', 'FIRE', 'TO', 'HER', 'OWN', 'HOME'] +7975-280057-0015-1023: hyp=['TWO', 'MONTHS', 'AFTER', 'THE', 'INCIDENT', 'THE', 'SAME', 'PERSECUTORS', 'AGAIN', 'ENTERED', 'OUR', 'HOME', 'IN', 'THE', 'DAY', 'OF', 'THE', 'NIGHT', 'AND', 'AT', 'THE', 'POINT', 'OF', 'A', 'PISTOL', 'TRIED', 'TO', 'FORCE', 'MY', 'MOTHER', 'TO', 'SET', 'FIRE', 'TO', 'HER', 'OWN', 'HOME'] +7975-280057-0016-1024: ref=['I', 'HAVE', 'ALWAYS', 'FELT', 'THAT', 'THE', 'EXPOSURE', 'TO', 'WHICH', 'SHE', 'WAS', 'SUBJECTED', 'ON', 'THIS', 'CRUEL', 'JOURNEY', 'TOO', 'HARD', 'EVEN', 'FOR', 'A', 'MAN', 'TO', 'TAKE', 'WAS', 'THE', 'DIRECT', 'CAUSE', 'OF', 'HER', 'DEATH'] +7975-280057-0016-1024: hyp=['I', 'HAVE', 'ALWAYS', 'FELT', 'THAT', 'THE', 'EXPOSURE', 'TO', 'WHICH', 'SHE', 'WAS', 'SUBJECTED', 'ON', 'THIS', 'CRUEL', 'JOURNEY', 'TOO', 'HARD', 'EVEN', 'FOR', 'A', 'MAN', 'TO', 'TAKE', 'WAS', 'A', 'DIRECT', 'CAUSE', 'OF', 'HER', 'DEATH'] +7975-280057-0017-1025: ref=['FROM', 'HARRISONVILLE', 'SHE', 'WENT', 'TO', 'WAVERLY', 'WHERE', 'SHE', 'WAS', 'HOUNDED', 'CONTINUALLY'] +7975-280057-0017-1025: hyp=['FROM', 'HARRISONVILLE', 'SHE', 'WENT', 'TO', 'WAVERLEY', 'WHERE', 'SHE', 'WAS', 'HANDY', 'CONTINUAL'] +7975-280057-0018-1026: ref=['ONE', 'OF', 'THE', 'CONDITIONS', 'UPON', 'WHICH', 'HER', 'LIFE', 'WAS', 'SPARED', 'WAS', 'THAT', 'SHE', 'WOULD', 'REPORT', 'AT', 'LEXINGTON', 'WEEKLY'] +7975-280057-0018-1026: hyp=['ONE', 'OF', 'THE', 'CONDITIONS', 'UPON', 'WHICH', 'HER', 'LIFE', 'WAS', 'SPARED', 'WAS', 'THAT', 'SHE', 'WOULD', 'REPORT', 'IT', 'LESSINGTON', 'WEEKLY'] +7975-280057-0019-1027: ref=['ONE', 'OF', 'MY', 'OLD', 'SCHOOL', 'TEACHERS', 'WHOM', 'I', 'HAVE', 'NEVER', 'SEEN', 'SINCE', 'THE', 'SPRING', 'OR', 'SUMMER', 'OF', 'EIGHTEEN', 'SIXTY', 'TWO', 'IS', 'STEPHEN', 'B', 'ELKINS', 'SENATOR', 'FROM', 'WEST', 'VIRGINIA'] +7975-280057-0019-1027: hyp=['ONE', 'OF', 'MY', 'OLD', 'SCHOOL', 'TEACHERS', 'WHOM', 'I', 'HAVE', 'NEVER', 'SEEN', 'SINCE', 'THE', 'SPRING', 'OF', 'SUMMER', 'OF', 'EIGHTEEN', 'SIXTY', 'TWO', 'IS', 'STEPHEN', 'B', 'ELKINS', 'SENATOR', 'FROM', 'WEST', 'VIRGINIA'] +7975-280057-0020-1028: ref=['WHEN', 'I', 'WAS', 'TAKEN', 'PRISONER', 'I', 'EXPECTED', 'TO', 'BE', 'SHOT', 'WITHOUT', 'CEREMONY'] +7975-280057-0020-1028: hyp=['WHEN', 'I', 'WAS', 'TAKING', 'PRISONER', 'I', 'EXPECTED', 'TO', 'BE', 'SHOT', 'WITHOUT', 'CEREMONY'] +7975-280063-0000-1058: ref=['WE', 'TOOK', 'THE', 'OATH', 'PERHAPS', 'THREE', 'HUNDRED', 'OF', 'US', 'DOWN', 'ON', 'LUTHER', "MASON'S", 'FARM', 'A', 'FEW', 'MILES', 'FROM', 'WHERE', 'I', 'NOW', 'WRITE', 'WHERE', 'COLONEL', 'HAYS', 'HAD', 'ENCAMPED', 'AFTER', 'INDEPENDENCE'] +7975-280063-0000-1058: hyp=['WE', 'TOOK', 'THE', 'OATH', 'PERHAPS', 'THREE', 'HUNDRED', 'OF', 'US', 'DOWN', 'ON', 'LUTHER', "MASON'S", 'FARM', 'A', 'FEW', 'MILES', 'FROM', 'WHERE', 'I', 'NOW', 'WRITE', 'WHERE', 'COLONEL', 'HAYES', 'HAD', 'ENCAMPED', 'AFTER', 'INDEPENDENCE'] +7975-280063-0001-1059: ref=['BOONE', 'MUIR', 'AND', 'MYSELF', 'MET', 'COFFEE', 'AND', 'THE', 'REST', 'BELOW', 'ROSE', 'HILL', 'ON', 'GRAND', 'RIVER'] +7975-280063-0001-1059: hyp=['BOOM', 'YOU', 'AND', 'MYSELF', 'MAKE', 'COFFEE', 'IN', 'THE', 'REST', 'BELOW', 'ROSE', 'HILL', 'ON', 'GRAND', 'RIVER'] +7975-280063-0002-1060: ref=['ACCORDINGLY', 'I', 'WAS', 'SHORTLY', 'AWAKENED', 'TO', 'ACCOMPANY', 'HIM', 'TO', 'LONE', 'JACK', 'WHERE', 'HE', 'WOULD', 'PERSONALLY', 'MAKE', 'KNOWN', 'THE', 'SITUATION', 'TO', 'THE', 'OTHER', 'COLONELS'] +7975-280063-0002-1060: hyp=['ACCORDINGLY', 'I', 'WAS', 'SHORTLY', 'AWAKENED', 'TO', 'ACCOMPANY', 'HIM', 'THE', 'LONG', 'JACK', 'WHERE', 'HE', 'WOULD', 'PERSONALLY', 'MAKE', 'KNOWN', 'THE', 'SITUATION', 'TO', 'THE', 'OTHER', 'COLONELS'] +7975-280063-0003-1061: ref=['FOSTER', 'HAD', 'NEARLY', 'ONE', 'THOUSAND', 'CAVALRYMEN', 'AND', 'TWO', 'PIECES', 'OF', "RABB'S", 'INDIANA', 'BATTERY', 'THAT', 'HAD', 'ALREADY', 'MADE', 'FOR', 'ITSELF', 'A', 'NAME', 'FOR', 'HARD', 'FIGHTING'] +7975-280063-0003-1061: hyp=['FOSTER', 'HAD', 'NEARLY', 'ONE', 'THOUSAND', 'CAVERNMENT', 'AND', 'TWO', 'PIECES', 'OF', 'RABBS', 'INDIANA', 'BATTERY', 'THAT', 'HAD', 'ALREADY', 'MADE', 'FOR', 'ITSELF', 'A', 'NAME', 'FOR', 'HARD', 'FIGHTING'] +7975-280063-0004-1062: ref=['COME', 'IN', 'COLONEL', 'HAYS', 'EXCLAIMED', 'COLONEL', 'COCKRELL'] +7975-280063-0004-1062: hyp=['COMMONED', 'COLONEL', 'HAYES', 'EXCLAIMED', 'COLONEL', 'COCKLE'] +7975-280063-0005-1063: ref=['I', 'THINK', "HE'LL", 'BE', 'RATHER', 'TOUGH', 'MEAT', 'FOR', 'BREAKFAST', 'I', 'REPLIED', 'HE', 'MIGHT', 'BE', 'ALL', 'RIGHT', 'FOR', 'DINNER'] +7975-280063-0005-1063: hyp=['I', 'THINK', "HE'LL", 'BE', 'READY', 'TO', 'HAVE', 'MEET', 'FOR', 'BREAKFAST', 'I', 'REPLIED', 'HE', 'MIGHT', 'BE', 'ALL', 'RIGHT', 'FOR', 'DINNER'] +7975-280063-0006-1064: ref=['JACKMAN', 'WITH', 'A', 'PARTY', 'OF', 'THIRTY', 'SEASONED', 'MEN', 'CHARGED', 'THE', 'INDIANA', 'GUNS', 'AND', 'CAPTURED', 'THEM', 'BUT', 'MAJOR', 'FOSTER', 'LED', 'A', 'GALLANT', 'CHARGE', 'AGAINST', 'THE', 'INVADERS', 'AND', 'RECAPTURED', 'THE', 'PIECES'] +7975-280063-0006-1064: hyp=['JACK', 'WENT', 'WITH', 'A', 'PARTY', 'OF', 'THIRTY', 'SEASONED', 'MEN', 'CHARGED', 'THE', 'INDIANA', 'GUNS', 'AND', 'CAPTURED', 'THEM', 'BUT', 'MAJOR', 'FOSTER', 'LED', 'A', 'GALLANT', 'CHARGE', 'AGAINST', 'THE', 'INVADERS', 'AND', 'RE', 'CAPTURED', 'THE', 'PIECES'] +7975-280063-0007-1065: ref=['WE', 'WERE', 'OUT', 'OF', 'AMMUNITION', 'AND', 'WERE', 'HELPLESS', 'HAD', 'THE', 'FIGHT', 'BEEN', 'PRESSED'] +7975-280063-0007-1065: hyp=['WE', 'WERE', 'OUT', 'OF', 'AMMUNITION', 'AND', 'WERE', 'HELPLESS', 'HAD', 'THE', 'FIGHT', 'BEEN', 'PRESSED'] +7975-280063-0008-1066: ref=['THEY', 'DID', 'MARK', 'MY', 'CLOTHES', 'IN', 'ONE', 'OR', 'TWO', 'PLACES', 'HOWEVER'] +7975-280063-0008-1066: hyp=['THEY', 'DID', 'MARK', 'MY', 'CLOTHES', 'IN', 'ONE', 'OR', 'TWO', 'PLACES', 'HOWEVER'] +7975-280063-0009-1067: ref=['MAJOR', 'FOSTER', 'IN', 'A', 'LETTER', 'TO', 'JUDGE', 'GEORGE', 'M', 'BENNETT', 'OF', 'MINNEAPOLIS', 'SAID'] +7975-280063-0009-1067: hyp=['MAJOR', 'FOSTER', 'IN', 'A', 'LETTER', 'TO', 'JOE', 'GEORGE', 'I', 'INVITED', 'OF', 'MANY', 'APOLIS', 'SAID'] +7975-280063-0010-1068: ref=['I', 'WAS', 'TOLD', 'BY', 'SOME', 'OF', 'OUR', 'MEN', 'FROM', 'THE', 'WESTERN', 'BORDER', 'OF', 'THE', 'STATE', 'THAT', 'THEY', 'RECOGNIZED', 'THE', 'DARING', 'YOUNG', 'RIDER', 'AS', 'COLE', 'YOUNGER'] +7975-280063-0010-1068: hyp=['I', 'WAS', 'TOLD', 'BY', 'SOME', 'OF', 'OUR', 'MEN', 'FROM', 'THE', 'WESTERN', 'BORDER', 'OF', 'THE', 'STATE', 'THAT', 'THEY', 'RECOGNIZED', 'A', 'DARING', 'YOUNG', 'RATURIST', 'COAL', 'YOUNGER'] +7975-280063-0011-1069: ref=['ABOUT', 'NINE', 'THIRTY', 'A', 'M', 'I', 'WAS', 'SHOT', 'DOWN'] +7975-280063-0011-1069: hyp=['ABOUT', 'NINE', 'THIRTY', 'A', 'M', 'I', 'WAS', 'SHOT', 'DOWN'] +7975-280063-0012-1070: ref=['THE', 'WOUNDED', 'OF', 'BOTH', 'FORCES', 'WERE', 'GATHERED', 'UP', 'AND', 'WERE', 'PLACED', 'IN', 'HOUSES'] +7975-280063-0012-1070: hyp=['THE', 'WOUNDED', 'OF', 'BOTH', 'FORCES', 'WERE', 'GATHERED', 'UP', 'AND', 'WERE', 'PLACED', 'IN', 'HOUSES'] +7975-280076-0000-1029: ref=['ALTHOUGH', 'EVERY', 'BOOK', 'PURPORTING', 'TO', 'NARRATE', 'THE', 'LIVES', 'OF', 'THE', 'YOUNGER', 'BROTHERS', 'HAS', 'TOLD', 'OF', 'THE', 'LIBERTY', 'ROBBERY', 'AND', 'IMPLIED', 'THAT', 'WE', 'HAD', 'A', 'PART', 'IN', 'IT', 'THE', 'YOUNGERS', 'WERE', 'NOT', 'SUSPECTED', 'AT', 'THAT', 'TIME', 'NOR', 'FOR', 'A', 'LONG', 'TIME', 'AFTERWARD'] +7975-280076-0000-1029: hyp=['ALTHOUGH', 'EVERY', 'BOOK', 'REPORTING', 'TO', 'NARRATE', 'THE', 'LIVES', 'OF', 'THE', 'YOUNGER', 'BROTHERS', 'IS', 'TOLD', 'OF', 'THE', 'LIBERTY', 'ROBBERY', 'AND', 'IMPLIED', 'THAT', 'WE', 'HAD', 'A', 'PART', 'IN', 'IT', 'THE', 'YOUNGERS', 'WERE', 'NOT', 'SUSPECTED', 'AT', 'THAT', 'TIME', 'NOR', 'FOR', 'A', 'LONG', 'TIME', 'AFTERWARD'] +7975-280076-0001-1030: ref=['IT', 'WAS', 'CLAIMED', 'BY', 'PEOPLE', 'OF', 'LIBERTY', 'THAT', 'THEY', 'POSITIVELY', 'RECOGNIZED', 'AMONG', 'THE', 'ROBBERS', 'OLL', 'SHEPHERD', 'RED', 'MONKERS', 'AND', 'BUD', 'PENCE', 'WHO', 'HAD', 'SEEN', 'SERVICE', 'WITH', 'QUANTRELL'] +7975-280076-0001-1030: hyp=['IT', 'WAS', 'CLAIMED', 'BY', 'PEOPLE', 'OF', 'LIBERTY', 'THAT', 'THEY', 'POSITIVELY', 'RECOGNIZED', 'AMONG', 'THE', 'ROBBERS', 'ALL', 'SHEPARD', 'REDMOCKERS', 'AND', 'BUD', 'PENCE', 'WHO', 'HAD', 'SEEN', 'SERVICE', 'WITH', 'QUANTREAL'] +7975-280076-0002-1031: ref=['THIS', 'RAID', 'WAS', 'ACCOMPANIED', 'BY', 'BLOODSHED', 'JUDGE', 'MC', 'LAIN', 'THE', 'BANKER', 'BEING', 'SHOT', 'THOUGH', 'NOT', 'FATALLY'] +7975-280076-0002-1031: hyp=['THIS', 'RAY', 'WAS', 'ACCOMPANIED', 'BY', 'BLOTCHET', 'JOSE', 'MC', 'LANE', 'THE', 'BANKER', 'BEING', 'SHOT', 'THOUGH', 'NOT', 'FATALLY'] +7975-280076-0003-1032: ref=['NO', 'WARRANT', 'WAS', 'ISSUED', 'FOR', 'THE', 'YOUNGERS', 'BUT', 'SUBSEQUENT', 'HISTORIANS', 'HAVE', 'INFERENTIALLY', 'AT', 'LEAST', 'ACCUSED', 'US', 'OF', 'TAKING', 'PART', 'BUT', 'AS', 'I', 'SAID', 'BEFORE', 'THERE', 'IS', 'NO', 'TRUTH', 'IN', 'THE', 'ACCUSATION'] +7975-280076-0003-1032: hyp=['THOUGH', 'WARRANT', 'WAS', 'ISSUED', 'FOR', 'THE', 'YOUNGERS', 'BUT', 'SUBSEQUENT', 'HISTORIANS', 'HAVE', 'INFERENTIALLY', 'AT', 'LEAST', 'ACCUSED', 'US', 'OF', 'TAKING', 'PART', 'BUT', 'AS', 'I', 'SAID', 'BEFORE', 'THERE', 'IS', 'NO', 'TRUTH', 'IN', 'THE', 'ACCUSATION'] +7975-280076-0004-1033: ref=['JUNE', 'THIRD', 'EIGHTEEN', 'SEVENTY', 'ONE', 'OBOCOCK', 'BROTHERS', 'BANK', 'AT', 'CORYDON', 'IOWA', 'WAS', 'ROBBED', 'OF', 'FORTY', 'THOUSAND', 'DOLLARS', 'BY', 'SEVEN', 'MEN', 'IN', 'BROAD', 'DAYLIGHT'] +7975-280076-0004-1033: hyp=['JUNE', 'THIRD', 'EIGHTEEN', 'SEVENTY', 'ONE', 'OBEY', "BROTHER'S", 'BANK', 'AT', 'CROYDEN', 'HOUR', 'WAS', 'ROBBED', 'OF', 'FORTY', 'THOUSAND', 'DOLLARS', 'BY', 'SEVEN', 'MEN', 'IN', 'BROAD', 'DAYLIGHT'] +7975-280076-0005-1034: ref=['IT', 'WAS', 'CHARGED', 'THAT', 'ARTHUR', 'MC', 'COY', 'OR', 'A', 'C', 'MC', 'COY', 'AND', 'MYSELF', 'HAD', 'BEEN', 'PARTICIPANTS', 'IN', 'THE', "GAD'S", 'HILL', 'AFFAIR', 'AND', 'THE', 'TWO', 'STAGE', 'ROBBERIES'] +7975-280076-0005-1034: hyp=['IT', 'WAS', 'CHARGE', 'THAT', 'OFTEN', 'MA', 'KOY', 'OR', 'A', 'SEA', 'MAC', 'COY', 'AND', 'MYSELF', 'HAD', 'BEEN', 'PARTICIPANTS', 'IN', 'THE', "GAD'S", 'HILL', 'AFFAIR', 'AND', 'THE', 'TWO', 'STAGE', 'ROBBERIES'] +7975-280076-0006-1035: ref=['THE', 'PARTS', 'OF', 'THIS', 'LETTER', 'NOW', 'RELEVANT', 'ARE', 'AS', 'FOLLOWS'] +7975-280076-0006-1035: hyp=['THE', 'PARTS', 'OF', 'THIS', 'LETTER', 'NOW', 'RELEVANT', 'ARE', 'AS', 'FOLLOWS'] +7975-280076-0007-1036: ref=['YOU', 'MAY', 'USE', 'THIS', 'LETTER', 'IN', 'YOUR', 'OWN', 'WAY'] +7975-280076-0007-1036: hyp=['YOU', 'MAY', 'USE', 'THIS', 'LETTER', 'IN', 'YOUR', 'OWN', 'WAY'] +7975-280076-0008-1037: ref=['I', 'WILL', 'GIVE', 'YOU', 'THIS', 'OUTLINE', 'AND', 'SKETCH', 'OF', 'MY', 'WHEREABOUTS', 'AND', 'ACTIONS', 'AT', 'THE', 'TIME', 'OF', 'CERTAIN', 'ROBBERIES', 'WITH', 'WHICH', 'I', 'AM', 'CHARGED'] +7975-280076-0008-1037: hyp=['I', 'WILL', 'GIVE', 'YOU', 'THIS', 'OUTLINE', 'AND', 'SKETCH', 'OF', 'MY', 'WHEREABOUTS', 'AND', 'ACTIONS', 'AT', 'THE', 'TIME', 'OF', 'CERTAIN', 'ROBBERIES', 'WITH', 'WHICH', 'I', 'AM', 'CHARGED'] +7975-280076-0009-1038: ref=['AT', 'THE', 'TIME', 'OF', 'THE', 'GALLATIN', 'BANK', 'ROBBERY', 'I', 'WAS', 'GATHERING', 'CATTLE', 'IN', 'ELLIS', 'COUNTY', 'TEXAS', 'CATTLE', 'THAT', 'I', 'BOUGHT', 'FROM', 'PLEAS', 'TAYLOR', 'AND', 'RECTOR'] +7975-280076-0009-1038: hyp=["IT'S", 'THE', 'TIME', 'OF', 'THE', 'GELATIN', 'BANK', 'ROBBERY', 'I', 'WAS', 'GATHERING', 'CATTLE', 'AND', 'ILLIS', 'COUNTY', 'TEXAS', 'CATTLETTA', 'BROUGHT', 'FROM', 'PLACE', 'TAYLOR', 'AND', 'RECTOR'] +7975-280076-0010-1039: ref=['THIS', 'CAN', 'BE', 'PROVED', 'BY', 'BOTH', 'OF', 'THEM', 'ALSO', 'BY', 'SHERIFF', 'BARKLEY', 'AND', 'FIFTY', 'OTHER', 'RESPECTABLE', 'MEN', 'OF', 'THAT', 'COUNTY'] +7975-280076-0010-1039: hyp=['THIS', 'CAN', 'BE', 'PROVED', 'BY', 'BOTH', 'OF', 'THEM', 'ALSO', 'BY', 'SIR', 'PARKLEY', 'AND', 'FIFTY', 'OTHER', 'RESPECTABLE', 'MEN', 'OF', 'THAT', 'COUNTY'] +7975-280076-0011-1040: ref=['I', 'BROUGHT', 'THE', 'CATTLE', 'TO', 'KANSAS', 'THAT', 'FALL', 'AND', 'REMAINED', 'IN', 'SAINT', 'CLAIR', 'COUNTY', 'UNTIL', 'FEBRUARY'] +7975-280076-0011-1040: hyp=['I', 'BROUGHT', 'THE', 'CATTLE', 'THE', 'KANSASTE', 'FALL', 'AND', 'REMAINED', 'IN', 'SAINT', 'CLAIR', 'COUNTY', 'UNTIL', 'FEBRUARY'] +7975-280076-0012-1041: ref=['I', 'THEN', 'WENT', 'TO', 'ARKANSAS', 'AND', 'RETURNED', 'TO', 'SAINT', 'CLAIR', 'COUNTY', 'ABOUT', 'THE', 'FIRST', 'OF', 'MAY'] +7975-280076-0012-1041: hyp=['I', 'THEN', 'WENT', 'TO', 'OUR', 'CONSOL', 'AND', 'RETURN', 'TO', 'SAINT', 'CLAIR', 'COUNTY', 'ABOUT', 'THE', 'FIRST', 'OF', 'MAY'] +7975-280076-0013-1042: ref=['I', 'WENT', 'TO', 'KANSAS', 'WHERE', 'OUR', 'CATTLE', 'WERE', 'IN', 'WOODSON', 'COUNTY', 'AT', 'COLONEL', "RIDGE'S"] +7975-280076-0013-1042: hyp=['AND', 'WENT', 'TO', 'KANSAS', 'WHERE', 'A', 'CATTLERON', 'WOODSON', 'COUNTY', 'AT', 'COLONEL', 'RICHES'] +7975-280076-0014-1043: ref=['DURING', 'THE', 'SUMMER', 'I', 'WAS', 'EITHER', 'IN', 'SAINT', 'CLAIR', 'JACKSON', 'OR', 'KANSAS', 'BUT', 'AS', 'THERE', 'WAS', 'NO', 'ROBBERY', 'COMMITTED', 'THAT', 'SUMMER', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'WHERE', 'I', 'WAS'] +7975-280076-0014-1043: hyp=['DURING', 'SUMMER', 'I', 'WAS', 'EITHER', 'IN', 'SAINT', 'CLAIR', 'OR', 'JACKSON', 'OR', 'KANSAS', 'BUT', 'AS', 'THERE', 'WAS', 'NO', 'ROBBERY', 'COMMITTED', 'THAT', 'SUMMER', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'WHERE', 'I', 'WAS'] +7975-280076-0015-1044: ref=['I', 'WENT', 'THROUGH', 'INDEPENDENCE', 'AND', 'FROM', 'THERE', 'TO', 'ACE', "WEBB'S"] +7975-280076-0015-1044: hyp=['AND', 'WENT', 'THROUGH', 'INDEPENDENCE', 'AND', 'FROM', 'THERE', 'TO', 'ACE', 'WEBBS'] +7975-280076-0016-1045: ref=['THERE', 'I', 'TOOK', 'DINNER', 'AND', 'THEN', 'WENT', 'TO', 'DOCTOR', 'L', 'W', "TWYMAN'S"] +7975-280076-0016-1045: hyp=['THERE', 'I', 'TOOK', 'DINNER', 'AND', 'THEN', 'WENT', 'TO', 'DOCTOR', 'OLD', 'W', 'TWIMMAN'] +7975-280076-0017-1046: ref=['OUR', 'BUSINESS', 'THERE', 'WAS', 'TO', 'SEE', 'E', 'P', 'WEST', 'HE', 'WAS', 'NOT', 'AT', 'HOME', 'BUT', 'THE', 'FAMILY', 'WILL', 'REMEMBER', 'THAT', 'WE', 'WERE', 'THERE'] +7975-280076-0017-1046: hyp=['OUR', 'BUSINESS', 'THERE', 'WAS', 'TO', 'SEE', 'E', 'P', 'WEST', 'HE', 'WAS', 'NOT', 'AT', 'HOME', 'BUT', 'THE', 'FAMILY', 'WILL', 'REMEMBER', 'THAT', 'WE', 'WERE', 'THERE'] +7975-280076-0018-1047: ref=['WE', 'CROSSED', 'ON', 'THE', 'BRIDGE', 'STAYED', 'IN', 'THE', 'CITY', 'ALL', 'NIGHT', 'AND', 'THE', 'NEXT', 'MORNING', 'WE', 'RODE', 'UP', 'THROUGH', 'THE', 'CITY'] +7975-280076-0018-1047: hyp=['WE', 'CROSSED', 'ON', 'THE', 'BRIDGE', 'STATE', 'IN', 'THE', 'CITY', 'ALL', 'NIGHT', 'AND', 'THE', 'NEXT', 'MORNING', 'WE', 'RODE', 'UP', 'TO', 'THE', 'CITY'] +7975-280076-0019-1048: ref=['I', 'MET', 'SEVERAL', 'OF', 'MY', 'FRIENDS', 'AMONG', 'THEM', 'WAS', 'BOB', 'HUDSPETH'] +7975-280076-0019-1048: hyp=['AMID', 'SEVERAL', 'OF', 'MY', 'FRIENDS', 'AMONG', 'THEM', 'WAS', 'BOB', 'HUSBUTH'] +7975-280076-0020-1049: ref=['WE', 'WERE', 'NOT', 'ON', 'GOOD', 'TERMS', 'AT', 'THE', 'TIME', 'NOR', 'HAVE', 'WE', 'BEEN', 'FOR', 'SEVERAL', 'YEARS'] +7975-280076-0020-1049: hyp=['WE', 'WERE', 'NOT', 'ONLY', 'TERMS', 'AT', 'THE', 'TIME', 'NOR', 'HAVE', 'WE', 'BEEN', 'FOR', 'SEVERAL', 'YEARS'] +7975-280076-0021-1050: ref=['POOR', 'JOHN', 'HE', 'HAS', 'BEEN', 'HUNTED', 'DOWN', 'AND', 'SHOT', 'LIKE', 'A', 'WILD', 'BEAST', 'AND', 'NEVER', 'WAS', 'A', 'BOY', 'MORE', 'INNOCENT'] +7975-280076-0021-1050: hyp=['POOR', 'JOHN', 'HE', 'HAS', 'BEEN', 'HUNTED', 'DOWN', 'AND', 'SHOT', 'LIKE', 'A', 'WILD', 'BEAST', 'AND', 'NEVER', 'WAS', 'A', 'BOY', 'MORE', 'INNOCENT'] +7975-280076-0022-1051: ref=['DOCTOR', 'L', 'LEWIS', 'WAS', 'HIS', 'PHYSICIAN'] +7975-280076-0022-1051: hyp=['DOCTOR', 'ELUS', 'WAS', 'HIS', 'PHYSICIAN'] +7975-280076-0023-1052: ref=['THERE', 'WERE', 'FIFTY', 'OR', 'A', 'HUNDRED', 'PERSONS', 'THERE', 'WHO', 'WILL', 'TESTIFY', 'IN', 'ANY', 'COURT', 'THAT', 'JOHN', 'AND', 'I', 'WERE', 'THERE'] +7975-280076-0023-1052: hyp=['THERE', 'WERE', 'FIFTY', 'OR', 'HUNDRED', 'PERSONS', 'THERE', 'WHO', 'WILL', 'TESTIFY', 'IN', 'ANY', 'COURT', 'THAT', 'JOHN', 'AND', 'I', 'WERE', 'THERE'] +7975-280076-0024-1053: ref=['HELVIN', 'FICKLE', 'AND', 'WIFE', 'OF', 'GREENTON', 'VALLEY', 'WERE', 'ATTENDING', 'THE', 'SPRINGS', 'AT', 'THAT', 'TIME', 'AND', 'EITHER', 'OF', 'THEM', 'WILL', 'TESTIFY', 'TO', 'THE', 'ABOVE', 'FOR', 'JOHN', 'AND', 'I', 'SAT', 'IN', 'FRONT', 'OF', 'MISTER', 'SMITH', 'WHILE', 'HE', 'WAS', 'PREACHING', 'AND', 'WAS', 'IN', 'HIS', 'COMPANY', 'FOR', 'A', 'FEW', 'MOMENTS', 'TOGETHER', 'WITH', 'HIS', 'WIFE', 'AND', 'MISTER', 'AND', 'MISSUS', 'FICKLE', 'AFTER', 'SERVICE'] +7975-280076-0024-1053: hyp=['HELVAN', 'FICKLE', 'AND', 'WIFE', 'OF', 'GREENSON', 'VALLEY', 'WERE', 'ATTENDING', 'THE', 'SPRINGS', 'AT', 'THAT', 'TIME', 'AND', 'EITHER', 'OF', 'THEM', 'WILL', 'TESTIFY', 'TO', 'THE', 'ABOVE', 'FOR', 'JOHN', 'AND', 'I', 'SET', 'IN', 'FRONT', 'OF', 'MISTER', 'SMITH', 'WHILE', 'HE', 'WAS', 'PREACHING', 'AND', 'WAS', 'IN', 'HIS', 'COMPANY', 'FOR', 'A', 'FEW', 'MOMENTS', 'TOGETHER', 'WITH', 'HIS', 'WIFE', 'AND', 'MISTER', 'AND', 'MISS', 'FICKLE', 'AFTER', 'THE', 'SERVICE'] +7975-280076-0025-1054: ref=['ABOUT', 'THE', 'LAST', 'OF', 'DECEMBER', 'EIGHTEEN', 'SEVENTY', 'THREE', 'I', 'ARRIVED', 'IN', 'CARROLL', 'PARISH', 'LOUISIANA'] +7975-280076-0025-1054: hyp=['ABOUT', 'THE', 'LAST', 'OF', 'DECEMBER', 'EIGHTEEN', 'SEVENTY', 'THREE', 'I', 'ARRIVED', 'IN', 'CAROL', 'PARISH', 'LOUISIANA'] +7975-280076-0026-1055: ref=['I', 'STAYED', 'THERE', 'UNTIL', 'THE', 'EIGHTH', 'OF', 'FEBRUARY', 'EIGHTEEN', 'SEVENTY', 'FOUR'] +7975-280076-0026-1055: hyp=['I', 'STAYED', 'THERE', 'UNTIL', 'THE', 'EIGHTH', 'OF', 'FEBRUARY', 'EIGHTEEN', 'SEVENTY', 'FOUR'] +7975-280076-0027-1056: ref=['I', 'HAD', 'NOT', 'HEARD', 'OF', 'THAT', 'WHEN', 'I', 'WROTE', 'THE', 'LETTER', 'OF', 'EIGHTEEN', 'SEVENTY', 'FOUR', 'AND', 'TO', 'CORRECT', 'ANY', 'MISAPPREHENSION', 'THAT', 'MIGHT', 'BE', 'CREATED', 'BY', 'OMITTING', 'IT', 'I', 'WILL', 'SAY', 'THAT', 'AT', 'THAT', 'TIME', 'I', 'WAS', 'AT', 'NEOSHO', 'KANSAS', 'WITH', 'A', 'DROVE', 'OF', 'CATTLE', 'WHICH', 'I', 'SOLD', 'TO', 'MAJOR', 'RAY'] +7975-280076-0027-1056: hyp=['I', 'HAD', 'NOT', 'HEARD', 'OF', 'THAT', 'WHEN', 'I', 'WROTE', 'THE', 'LETTER', 'OF', 'EIGHTEEN', 'SEVENTY', 'FOUR', 'AND', 'TO', 'CORRECT', 'ANY', 'MISAPPREHENSION', 'THAT', 'MIGHT', 'BE', 'CREATED', 'BY', 'OMITTING', 'IT', 'I', 'WILL', 'SAY', 'THAT', 'AT', 'THE', 'TIME', 'I', 'WAS', 'AT', 'NIOKILL', 'KANSAS', 'WITH', 'A', 'DROVE', 'OF', 'CATTLE', 'WHICH', 'I', 'SOLD', 'TO', 'MAJOR', 'WRAYE'] +7975-280076-0028-1057: ref=['IT', 'WAS', 'IMMEDIATELY', 'FOLLOWING', 'THE', 'ROCK', 'ISLAND', 'ROBBERY', 'AT', 'ADAIR', 'IOWA', 'THAT', 'THERE', 'FIRST', 'APPEARED', 'A', 'DELIBERATE', 'ENLISTMENT', 'OF', 'SOME', 'LOCAL', 'PAPERS', 'IN', 'MISSOURI', 'TO', 'CONNECT', 'US', 'WITH', 'THIS', 'ROBBERY'] +7975-280076-0028-1057: hyp=['IT', 'WAS', 'IMMEDIATELY', 'FOLLOWING', 'THE', 'ROCK', 'ISLAND', 'ROBBERY', 'AT', 'EIGHT', 'AIR', 'IOWA', 'THAT', 'THEIR', 'FIRST', 'APPEARED', 'A', 'DELIVERED', 'ENLISTMENT', 'OF', 'SOME', 'LOCAL', 'PAPERS', 'AND', 'MISSOURI', 'TO', 'CONNECT', 'US', 'WITH', 'THIS', 'ROBBERY'] +7975-280084-0000-1090: ref=['I', 'URGED', 'ON', 'THE', 'BOYS', 'THAT', 'WHATEVER', 'HAPPENED', 'WE', 'SHOULD', 'NOT', 'SHOOT', 'ANY', 'ONE'] +7975-280084-0000-1090: hyp=['I', 'URGED', 'ON', 'THE', 'BOYS', 'AT', 'WHATEVER', 'HAPPENED', 'WE', 'SHOULD', 'NOT', 'SHOOT', 'ANY', 'ONE'] +7975-280084-0001-1091: ref=['WHEN', 'MILLER', 'AND', 'I', 'CROSSED', 'THE', 'BRIDGE', 'THE', 'THREE', 'WERE', 'ON', 'SOME', 'DRY', 'GOODS', 'BOXES', 'AT', 'THE', 'CORNER', 'NEAR', 'THE', 'BANK', 'AND', 'AS', 'SOON', 'AS', 'THEY', 'SAW', 'US', 'WENT', 'RIGHT', 'INTO', 'THE', 'BANK', 'INSTEAD', 'OF', 'WAITING', 'FOR', 'US', 'TO', 'GET', 'THERE'] +7975-280084-0001-1091: hyp=['WHEN', 'MILLER', 'AND', 'I', 'CROSSED', 'THE', 'BRIDGE', 'THE', 'THREE', 'WERE', 'ON', 'SOME', 'DRAGOOD', 'BOXES', 'AT', 'THE', 'CORNER', 'NEAR', 'THE', 'BANK', 'AND', 'AS', 'SOON', 'AS', 'I', 'SAW', 'US', 'WENT', 'RIGHT', 'INTO', 'THE', 'BANK', 'INSTEAD', 'OF', 'WAITING', 'FOR', 'US', 'TO', 'GET', 'THERE'] +7975-280084-0002-1092: ref=['WHEN', 'WE', 'CAME', 'UP', 'I', 'TOLD', 'MILLER', 'TO', 'SHUT', 'THE', 'BANK', 'DOOR', 'WHICH', 'THEY', 'HAD', 'LEFT', 'OPEN', 'IN', 'THEIR', 'HURRY'] +7975-280084-0002-1092: hyp=['WHEN', 'WE', 'CAME', 'UP', 'I', 'TOLD', 'MILLER', 'TO', 'SHUT', 'THE', 'BANK', 'DOOR', 'WHICH', 'THEY', 'HAD', 'LEFT', 'OPEN', 'IN', 'THEIR', 'HURRY'] +7975-280084-0003-1093: ref=['J', 'S', 'ALLEN', 'WHOSE', 'HARDWARE', 'STORE', 'WAS', 'NEAR', 'TRIED', 'TO', 'GO', 'INTO', 'THE', 'BANK', 'BUT', 'MILLER', 'ORDERED', 'HIM', 'AWAY', 'AND', 'HE', 'RAN', 'AROUND', 'THE', 'CORNER', 'SHOUTING'] +7975-280084-0003-1093: hyp=['J', 'HELEN', 'WHOSE', 'HARD', 'WORKED', 'ALWAYS', 'NEAR', 'TRIED', 'TO', 'GO', 'INTO', 'THE', 'BANK', 'BUT', 'MILLER', 'ORDERED', 'HIM', 'AWAY', 'AND', 'HE', 'RAN', 'ROUND', 'THE', 'CORNER', 'SHOUTING'] +7975-280084-0004-1094: ref=['GET', 'YOUR', 'GUNS', 'BOYS', "THEY'RE", 'ROBBING', 'THE', 'BANK'] +7975-280084-0004-1094: hyp=['GET', 'YOUR', 'GUNS', 'BOYS', "THEY'RE", 'ROBBING', 'THE', 'BANK'] +7975-280084-0005-1095: ref=['AND', 'I', 'CALLED', 'TO', 'HIM', 'TO', 'GET', 'INSIDE', 'AT', 'THE', 'SAME', 'TIME', 'FIRING', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'AIR', 'AS', 'A', 'SIGNAL', 'TO', 'THE', 'THREE', 'BOYS', 'AT', 'THE', 'BRIDGE', 'THAT', 'WE', 'HAD', 'BEEN', 'DISCOVERED'] +7975-280084-0005-1095: hyp=['AND', 'I', 'CALL', 'TO', 'HIM', 'TO', 'GET', 'INSIDE', 'AT', 'THE', 'SAME', 'TIME', 'FIRING', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'AIR', 'AS', 'THE', 'SIGNAL', 'TO', 'THE', 'THREE', 'BOYS', 'AT', 'THE', 'BRIDGE', 'THAT', 'WE', 'HAD', 'BEEN', 'DISCOVERED'] +7975-280084-0006-1096: ref=['ALMOST', 'AT', 'THIS', 'INSTANT', 'I', 'HEARD', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'BANK'] +7975-280084-0006-1096: hyp=['ALMOST', 'AT', 'THIS', 'INSTANT', 'I', 'HEARD', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'BANK'] +7975-280084-0007-1097: ref=['CHADWELL', 'WOODS', 'AND', 'JIM', 'RODE', 'UP', 'AND', 'JOINED', 'US', 'SHOUTING', 'TO', 'PEOPLE', 'IN', 'THE', 'STREET', 'TO', 'GET', 'INSIDE', 'AND', 'FIRING', 'THEIR', 'PISTOLS', 'TO', 'EMPHASIZE', 'THEIR', 'COMMANDS'] +7975-280084-0007-1097: hyp=['TEDWELL', 'WOODS', 'AND', 'JIM', 'RODE', 'UP', 'AND', 'JARNDYCE', 'SHOUTING', 'TO', 'THE', 'PEOPLE', 'IN', 'THE', 'STREET', 'TO', 'GET', 'INSIDE', 'AND', 'FIRING', 'THEIR', 'PISTOLS', 'TO', 'EMPHASIZE', 'THEIR', 'COMMANDS'] +7975-280084-0008-1098: ref=['IF', 'ANY', 'OF', 'OUR', 'PARTY', 'SHOT', 'HIM', 'IT', 'MUST', 'HAVE', 'BEEN', 'WOODS'] +7975-280084-0008-1098: hyp=['IF', 'ANY', 'OF', 'OUR', 'PARTY', 'SHOT', 'HIM', 'IT', 'MUST', 'HAVE', 'BEEN', 'WOODS'] +7975-280084-0009-1099: ref=['MEANTIME', 'THE', 'STREET', 'WAS', 'GETTING', 'UNCOMFORTABLY', 'HOT'] +7975-280084-0009-1099: hyp=['MEANTIME', 'THE', 'STREET', 'WAS', 'GETTING', 'UNCOMFORTABLY', 'HOT'] +7975-280084-0010-1100: ref=['EVERY', 'TIME', 'I', 'SAW', 'ANY', 'ONE', 'WITH', 'A', 'BEAD', 'ON', 'ME', 'I', 'WOULD', 'DROP', 'OFF', 'MY', 'HORSE', 'AND', 'TRY', 'TO', 'DRIVE', 'THE', 'SHOOTER', 'INSIDE', 'BUT', 'I', 'COULD', 'NOT', 'SEE', 'IN', 'EVERY', 'DIRECTION'] +7975-280084-0010-1100: hyp=['EVERY', 'TIME', 'I', 'SAW', 'ANY', 'ONE', 'WITH', 'A', 'BEAD', 'ON', 'ME', 'I', 'WOULD', 'DROP', 'OFF', 'MY', 'HORSE', 'AND', 'TROT', 'TO', 'DRIVE', 'THE', 'SHOOTER', 'INSIDE', 'BUT', 'I', 'COULD', 'NOT', 'SEE', 'IN', 'EVERY', 'DIRECTION'] +7975-280084-0011-1101: ref=['DOCTOR', 'WHEELER', 'WHO', 'HAD', 'GONE', 'UPSTAIRS', 'IN', 'THE', 'HOTEL', 'SHOT', 'MILLER', 'AND', 'HE', 'LAY', 'DYING', 'IN', 'THE', 'STREET'] +7975-280084-0011-1101: hyp=['DOCTOR', 'WHALER', 'WHO', 'HAD', 'GONE', 'UPSTAIRS', 'IN', 'THE', 'HOTEL', 'SHOT', 'MILLER', 'AND', 'HE', 'LAY', 'DYING', 'IN', 'THE', 'STREET'] +7975-280084-0012-1102: ref=['CHANGING', 'HIS', 'PISTOL', 'TO', 'HIS', 'LEFT', 'HAND', 'BOB', 'RAN', 'OUT', 'AND', 'MOUNTED', "MILLER'S", 'MARE'] +7975-280084-0012-1102: hyp=['CHANGING', 'HIS', 'PISTOL', 'TO', 'HIS', 'LEFT', 'HAND', 'BOB', 'RAN', 'OUT', 'AND', 'MOUNTED', "MILLER'S", 'MARE'] +7975-280084-0013-1103: ref=['WHAT', 'KEPT', 'YOU', 'SO', 'LONG', 'I', 'ASKED', 'PITTS'] +7975-280084-0013-1103: hyp=['WHAT', 'KEPT', 'YOU', 'SO', 'LONG', 'I', 'ASKED', 'PITTS'] +7975-280084-0014-1104: ref=['AS', 'TO', 'THE', 'REST', 'OF', 'THE', 'AFFAIR', 'INSIDE', 'THE', 'BANK', 'I', 'TAKE', 'THE', 'ACCOUNT', 'OF', 'A', 'NORTHFIELD', 'NARRATOR'] +7975-280084-0014-1104: hyp=['AS', 'TO', 'THE', 'REST', 'OF', 'THE', 'AFFAIR', 'INSIDE', 'THE', 'BANK', 'I', 'TAKE', 'THE', 'ACCOUNT', 'OF', 'A', 'NORTH', 'FIELD', 'NARRATOR'] +7975-280084-0015-1105: ref=["WHERE'S", 'THE', 'MONEY', 'OUTSIDE', 'THE', 'SAFE', 'BOB', 'ASKED'] +7975-280084-0015-1105: hyp=["WHERE'S", 'THE', 'MONEY', 'OUTSIDE', 'THE', 'SAFE', 'BOB', 'ASKED'] +7975-280084-0016-1106: ref=['THE', 'SHUTTERS', 'WERE', 'CLOSED', 'AND', 'THIS', 'CAUSED', 'BUNKER', 'AN', "INSTANT'S", 'DELAY', 'THAT', 'WAS', 'ALMOST', 'FATAL', 'PITTS', 'CHASED', 'HIM', 'WITH', 'A', 'BULLET'] +7975-280084-0016-1106: hyp=['THE', 'SHUTTERS', 'WERE', 'CLOSED', 'AND', 'THIS', 'CAUSED', 'BUNKER', 'AN', "INSTANT'S", 'DELAY', 'THAT', 'WAS', 'ALMOST', 'FATAL', 'FITZ', 'CHASED', 'HIM', 'WITH', 'A', 'BULLET'] +7975-280084-0017-1107: ref=['THE', 'FIRST', 'ONE', 'MISSED', 'HIM', 'BUT', 'THE', 'SECOND', 'WENT', 'THROUGH', 'HIS', 'RIGHT', 'SHOULDER'] +7975-280084-0017-1107: hyp=['THE', 'FIRST', 'ONE', 'MISTING', 'BUT', 'THE', 'SECOND', 'WENT', 'THROUGH', 'HIS', 'RIGHT', 'SHOULDER'] +7975-280085-0000-1071: ref=['THAT', 'NIGHT', 'IT', 'STARTED', 'TO', 'RAIN', 'AND', 'WE', 'WORE', 'OUT', 'OUR', 'HORSES'] +7975-280085-0000-1071: hyp=['THAT', 'NIGHT', 'IT', 'STARTED', 'TO', 'RAIN', 'AND', 'WE', 'WORE', 'OUT', 'OUR', 'HORSES'] +7975-280085-0001-1072: ref=['FRIDAY', 'WE', 'MOVED', 'TOWARD', 'WATERVILLE', 'AND', 'FRIDAY', 'NIGHT', 'WE', 'CAMPED', 'BETWEEN', 'ELYSIAN', 'AND', 'GERMAN', 'LAKE'] +7975-280085-0001-1072: hyp=['FRIDAY', 'WE', 'MOVED', 'TOWARD', 'WATERVILLE', 'AND', 'FRIDAY', 'NIGHT', 'WE', 'CAME', 'BETWEEN', 'ALYCIAN', 'AND', 'GERMAN', 'LAKE'] +7975-280085-0002-1073: ref=["BOB'S", 'SHATTERED', 'ELBOW', 'WAS', 'REQUIRING', 'FREQUENT', 'ATTENTION', 'AND', 'THAT', 'NIGHT', 'WE', 'MADE', 'ONLY', 'NINE', 'MILES', 'AND', 'MONDAY', 'MONDAY', 'NIGHT', 'AND', 'TUESDAY', 'WE', 'SPENT', 'IN', 'A', 'DESERTED', 'FARM', 'HOUSE', 'CLOSE', 'TO', 'MANKATO'] +7975-280085-0002-1073: hyp=['BOB', 'SATURDAIL', 'BOWS', 'REQUIRING', 'FREQUENT', 'ATTENTION', 'AND', 'THAT', 'NIGHT', 'WE', 'MADE', 'ONLY', 'NINE', 'MILES', 'AND', 'MONDAY', 'MONDAY', 'NIGHT', 'AND', 'TUESDAY', 'WE', 'SPENT', 'IN', 'A', 'DESERTED', 'FARM', 'HOUSE', 'CLOSE', 'TO', 'MANKADO'] +7975-280085-0003-1074: ref=['THAT', 'DAY', 'A', 'MAN', 'NAMED', 'DUNNING', 'DISCOVERED', 'US', 'AND', 'WE', 'TOOK', 'HIM', 'PRISONER'] +7975-280085-0003-1074: hyp=['THAT', 'THEY', 'A', 'MAN', 'NAMED', 'DINNING', 'DISCOVERED', 'US', 'AND', 'WE', 'TOOK', 'HIM', 'PRISONER'] +7975-280085-0004-1075: ref=['FINALLY', 'WE', 'ADMINISTERED', 'TO', 'HIM', 'AN', 'OATH', 'NOT', 'TO', 'BETRAY', 'OUR', 'WHEREABOUTS', 'UNTIL', 'WE', 'HAD', 'TIME', 'TO', 'MAKE', 'OUR', 'ESCAPE', 'AND', 'HE', 'AGREED', 'NOT', 'TO'] +7975-280085-0004-1075: hyp=['FINALLY', 'WE', 'ADMINISTERED', 'TO', 'HIM', 'AN', 'OATH', 'NOT', 'TO', 'BETRAY', 'OUR', 'WHEREABOUTS', 'UNTIL', 'WE', 'HAD', 'TIME', 'TO', 'MAKE', 'OUR', 'ESCAPE', 'AND', 'HE', 'AGREED', 'NOT', 'TO'] +7975-280085-0005-1076: ref=['NO', 'SOONER', 'HOWEVER', 'WAS', 'HE', 'RELEASED', 'THAN', 'HE', 'MADE', 'POSTHASTE', 'INTO', 'MANKATO', 'TO', 'ANNOUNCE', 'OUR', 'PRESENCE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'ANOTHER', 'POSSE', 'WAS', 'LOOKING', 'FOR', 'US'] +7975-280085-0005-1076: hyp=['NO', 'SOONER', 'HOWEVER', 'WAS', 'HE', 'RELEASED', 'THAN', 'HE', 'MADE', 'POST', 'HASTE', 'INTO', 'MANKE', 'TO', 'ANNOUNCE', 'OUR', 'PRESENCE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'ANOTHER', 'POSSE', 'WAS', 'LOOKING', 'FOR', 'US'] +7975-280085-0006-1077: ref=['THE', 'WHISTLE', 'ON', 'THE', 'OIL', 'MILL', 'BLEW', 'AND', 'WE', 'FEARED', 'THAT', 'IT', 'WAS', 'A', 'SIGNAL', 'THAT', 'HAD', 'BEEN', 'AGREED', 'UPON', 'TO', 'ALARM', 'THE', 'TOWN', 'IN', 'CASE', 'WE', 'WERE', 'OBSERVED', 'BUT', 'WE', 'WERE', 'NOT', 'MOLESTED'] +7975-280085-0006-1077: hyp=['THE', 'WHISTLE', 'ON', 'THE', 'ORE', 'MILL', 'BLEW', 'AND', 'WE', 'FEARED', 'THAT', 'IT', 'WAS', 'A', 'SIGNAL', 'THAT', 'HAD', 'BEEN', 'AGREED', 'UPON', 'TO', 'ALARM', 'THE', 'TOWN', 'IN', 'CASE', 'WE', 'WERE', 'OBSERVED', 'BUT', 'WE', 'WERE', 'NOT', 'MOLESTED'] +7975-280085-0007-1078: ref=['HE', 'HAD', 'TO', 'SLEEP', 'WITH', 'IT', 'PILLOWED', 'ON', 'MY', 'BREAST', 'JIM', 'BEING', 'ALSO', 'CRIPPLED', 'WITH', 'A', 'WOUND', 'IN', 'HIS', 'SHOULDER', 'AND', 'WE', 'COULD', 'NOT', 'GET', 'MUCH', 'SLEEP'] +7975-280085-0007-1078: hyp=['HE', 'HAD', 'TO', 'SLEEP', 'WITH', 'A', 'PILL', 'IT', 'ON', 'MY', 'BREAST', 'JIM', 'BEING', 'ALSO', 'A', 'CRIPPLED', 'WITH', 'A', 'WOUND', 'IN', 'HIS', 'SHOULDER', 'AND', 'WE', 'COULD', 'NOT', 'GET', 'MUCH', 'SLEEP'] +7975-280085-0008-1079: ref=['BUT', 'THEY', 'SOON', 'AFTER', 'GOT', 'CLOSE', 'ENOUGH', 'SO', 'THAT', 'ONE', 'OF', 'THEM', 'BROKE', 'MY', 'WALKING', 'STICK', 'WITH', 'A', 'SHOT'] +7975-280085-0008-1079: hyp=['BUT', 'THEY', 'SOON', 'AFTER', 'GOT', 'CLOSE', 'ENOUGH', 'SO', 'THAT', 'ONE', 'OF', 'THEM', 'BROKE', 'MY', 'WALKING', 'STICK', 'WITH', 'A', 'SHOT'] +7975-280085-0009-1080: ref=['WE', 'WERE', 'IN', 'SIGHT', 'OF', 'OUR', 'LONG', 'SOUGHT', 'HORSES', 'WHEN', 'THEY', 'CUT', 'US', 'OFF', 'FROM', 'THE', 'ANIMALS', 'AND', 'OUR', 'LAST', 'HOPE', 'WAS', 'GONE'] +7975-280085-0009-1080: hyp=['WE', 'WERE', 'INSIDE', 'OF', 'OUR', 'LONG', 'SOWED', 'HORSES', 'WHEN', 'THEY', 'CUT', 'US', 'OFF', 'FROM', 'THE', 'ANIMALS', 'AND', 'OUR', 'LAST', 'HOPE', 'WAS', 'GONE'] +7975-280085-0010-1081: ref=['SIX', 'STEPPED', 'TO', 'THE', 'FRONT', 'SHERIFF', 'GLISPIN', 'COLONEL', 'T', 'L', 'VOUGHT', 'B', 'M', 'RICE', 'G', 'A', 'BRADFORD', 'C', 'A', 'POMEROY', 'AND', 'S', 'J', 'SEVERSON'] +7975-280085-0010-1081: hyp=['SIX', 'STEPS', 'TO', 'THE', 'FRONT', 'SHERIFF', 'CLISPIN', 'COLONEL', 'T', 'L', 'WALT', 'B', 'AND', 'RICE', 'G', 'A', 'BRADFORD', 'C', 'A', 'POMROY', 'AND', 'S', 'J', 'SEVERSON'] +7975-280085-0011-1082: ref=['FORMING', 'IN', 'LINE', 'FOUR', 'PACES', 'APART', 'HE', 'ORDERED', 'THEM', 'TO', 'ADVANCE', 'RAPIDLY', 'AND', 'CONCENTRATE', 'THE', 'FIRE', 'OF', 'THE', 'WHOLE', 'LINE', 'THE', 'INSTANT', 'THE', 'ROBBERS', 'WERE', 'DISCOVERED'] +7975-280085-0011-1082: hyp=['FORMING', 'A', 'LINE', 'FOUR', 'PACES', 'APART', 'HE', 'ORDERED', 'THEM', 'TO', 'ADVANCE', 'RAPIDLY', 'AND', 'CONCENTRATE', 'THE', 'FIRE', 'OF', 'THE', 'WHOLE', 'LINE', 'THE', 'INSTANT', 'THE', 'ROBBERS', 'WERE', 'DISCOVERED'] +7975-280085-0012-1083: ref=['MAKE', 'FOR', 'THE', 'HORSES', 'I', 'SAID', 'EVERY', 'MAN', 'FOR', 'HIMSELF'] +7975-280085-0012-1083: hyp=['MAKE', 'FOR', 'THE', 'HORSES', 'I', 'SAID', 'EVERY', 'MAN', 'FOR', 'HIMSELF'] +7975-280085-0013-1084: ref=['THERE', 'IS', 'NO', 'USE', 'STOPPING', 'TO', 'PICK', 'UP', 'A', 'COMRADE', 'HERE', 'FOR', 'WE', "CAN'T", 'GET', 'HIM', 'THROUGH', 'THE', 'LINE', 'JUST', 'CHARGE', 'THEM', 'AND', 'MAKE', 'IT', 'IF', 'WE', 'CAN'] +7975-280085-0013-1084: hyp=['THERE', 'IS', 'NO', 'USE', 'STOPPING', 'TO', 'PICK', 'UP', 'A', 'COMRADE', 'HERE', 'TILL', 'WE', "CAN'T", 'GET', 'HIM', 'THROUGH', 'THE', 'LINE', 'JUST', 'SHARS', 'THEM', 'AND', 'MAKE', 'IT', 'IF', 'WE', 'CAN'] +7975-280085-0014-1085: ref=['I', 'GOT', 'UP', 'AS', 'THE', 'SIGNAL', 'FOR', 'THE', 'CHARGE', 'AND', 'WE', 'FIRED', 'ONE', 'VOLLEY'] +7975-280085-0014-1085: hyp=['I', 'GOT', 'UP', 'AS', 'A', 'SIGNAL', 'FOR', 'THE', 'CHARGE', 'AND', 'WE', 'FIRED', 'ONE', 'VOLLEY'] +7975-280085-0015-1086: ref=['ONE', 'OF', 'THE', 'FELLOWS', 'IN', 'THE', 'OUTER', 'LINE', 'NOT', 'BRAVE', 'ENOUGH', 'HIMSELF', 'TO', 'JOIN', 'THE', 'VOLUNTEERS', 'WHO', 'HAD', 'COME', 'IN', 'TO', 'BEAT', 'US', 'OUT', 'WAS', 'NOT', 'DISPOSED', 'TO', 'BELIEVE', 'IN', 'THE', 'SURRENDER', 'AND', 'HAD', 'HIS', 'GUN', 'LEVELLED', 'ON', 'BOB', 'IN', 'SPITE', 'OF', 'THE', 'HANDKERCHIEF', 'WHICH', 'WAS', 'WAVING', 'AS', 'A', 'FLAG', 'OF', 'TRUCE'] +7975-280085-0015-1086: hyp=['ONE', 'OF', 'THE', 'FELLOWS', 'IN', 'THE', 'OUTER', 'LAND', 'NOT', 'BRAVE', 'ENOUGH', 'HIMSELF', 'TO', 'JOIN', 'THE', 'VOLUNTEERS', 'WHO', 'HAD', 'COME', 'IN', 'TO', 'BEAT', 'US', 'OUT', 'WAS', 'NOT', 'DISPOSED', 'TO', 'BELIEVE', 'IN', 'THE', 'SURRENDER', 'AND', 'HAD', 'HIS', 'GUN', 'LEVELLED', 'ON', 'BOB', 'IN', 'SPITE', 'OF', 'THE', 'HANDKERCHIEF', 'WHICH', 'WAS', 'WAVING', 'AS', 'A', 'FLAG', 'OF', 'TRUCE'] +7975-280085-0016-1087: ref=['SHERIFF', 'GLISPIN', 'OF', 'WATONWAN', 'COUNTY', 'WHO', 'WAS', 'TAKING', "BOB'S", 'PISTOL', 'FROM', 'HIM', 'WAS', 'ALSO', 'SHOUTING', 'TO', 'THE', 'FELLOW'] +7975-280085-0016-1087: hyp=['SURE', 'OF', 'GLISPIN', 'OF', 'WATERWAM', 'COUNTY', 'WHO', 'WAS', 'TAKING', "BOB'S", 'PISTOL', 'FROM', 'HIM', 'WAS', 'ALSO', 'SHOUTING', 'TO', 'THE', 'FELLOW'] +7975-280085-0017-1088: ref=['INCLUDING', 'THOSE', 'RECEIVED', 'IN', 'AND', 'ON', 'THE', 'WAY', 'FROM', 'NORTHFIELD', 'I', 'HAD', 'ELEVEN', 'WOUNDS'] +7975-280085-0017-1088: hyp=['INCLUDING', 'THOSE', 'RECEIVED', 'IN', 'AND', 'ON', 'THE', 'WAY', 'FROM', 'NORTH', 'FIELD', 'I', 'HAD', 'ELEVEN', 'ONES'] +7975-280085-0018-1089: ref=['AND', 'SHERIFF', "GLISPIN'S", 'ORDER', 'NOT', 'TO', 'SHOOT', 'WAS', 'THE', 'BEGINNING', 'OF', 'THE', 'PROTECTORATE', 'THAT', 'MINNESOTA', 'PEOPLE', 'ESTABLISHED', 'OVER', 'US'] +7975-280085-0018-1089: hyp=['IN', 'CHEER', 'OF', "GLISPIN'S", 'ORDER', 'NOT', 'TO', 'SHOOT', 'WAS', 'THE', 'BEGINNING', 'OF', 'THE', 'PROTECTOR', 'THAT', 'MINNESOTA', 'PEOPLE', 'ESTABLISHED', 'OVER', 'US'] +8131-117016-0000-1303: ref=['CAPTAIN', 'MURDOCH'] +8131-117016-0000-1303: hyp=['CAPTAIN', 'MURDOCK'] +8131-117016-0001-1304: ref=['BUT', 'MARSPORT', 'HAD', 'FLOURISHED', 'ENOUGH', 'TO', 'KILL', 'IT', 'OFF'] +8131-117016-0001-1304: hyp=['BUT', 'MARSPORT', 'HAD', 'FLOURISHED', 'ENOUGH', 'TO', 'KILL', 'IT', 'OFF'] +8131-117016-0002-1305: ref=['SOME', 'OF', 'MARS', 'LAWS', 'DATED', 'FROM', 'THE', 'TIME', 'WHEN', 'LAW', 'ENFORCEMENT', 'HAD', 'BEEN', 'HAMPERED', 'BY', 'LACK', 'OF', 'MEN', 'RATHER', 'THAN', 'BY', 'THE', 'TYPE', 'OF', 'MEN'] +8131-117016-0002-1305: hyp=['SOME', 'OF', 'MARS', 'LAWS', 'DATED', 'FROM', 'THE', 'TIME', 'WHEN', 'LAWN', 'FORCEMENT', 'HAD', 'BEEN', 'HAMPERED', 'BY', 'LACK', 'OF', 'MEN', 'RATHER', 'THAN', 'BY', 'THE', 'TYPE', 'OF', 'MEN'] +8131-117016-0003-1306: ref=['THE', 'STONEWALL', 'GANG', 'NUMBERED', 'PERHAPS', 'FIVE', 'HUNDRED'] +8131-117016-0003-1306: hyp=['THE', 'STONE', 'WALL', 'GANG', 'NUMBERED', 'PERHAPS', 'FIVE', 'HUNDRED'] +8131-117016-0004-1307: ref=['EVEN', 'DERELICTS', 'AND', 'FAILURES', 'HAD', 'TO', 'EAT', 'THERE', 'WERE', 'STORES', 'AND', 'SHOPS', 'THROUGHOUT', 'THE', 'DISTRICT', 'WHICH', 'EKED', 'OUT', 'SOME', 'KIND', 'OF', 'A', 'MARGINAL', 'LIVING'] +8131-117016-0004-1307: hyp=['EVEN', 'DEAR', 'ALEXAM', 'FAILURES', 'HAD', 'TO', 'EAT', 'THERE', 'WERE', 'STORIES', 'AND', 'SHOPS', 'THROUGHOUT', 'THE', 'DISTRICT', 'WHICH', 'EKED', 'OUT', 'SOME', 'KIND', 'OF', 'A', 'MARGINAL', 'LIVING'] +8131-117016-0005-1308: ref=['THEY', 'WERE', 'SAFE', 'FROM', 'PROTECTION', 'RACKETEERS', 'THERE', 'NONE', 'BOTHERED', 'TO', 'COME', 'SO', 'FAR', 'OUT'] +8131-117016-0005-1308: hyp=['THEY', 'WERE', 'SAFE', 'FROM', 'PROTECTION', 'RAGATIRS', 'THERE', 'NONE', 'BOTHERED', 'TO', 'COME', 'SO', 'FAR', 'OUT'] +8131-117016-0006-1309: ref=['THE', 'SHOPKEEPERS', 'AND', 'SOME', 'OF', 'THE', 'LESS', 'UNFORTUNATE', 'PEOPLE', 'THERE', 'HAD', 'PROTESTED', 'LOUD', 'ENOUGH', 'TO', 'REACH', 'CLEAR', 'BACK', 'TO', 'EARTH'] +8131-117016-0006-1309: hyp=['THE', 'SHOPKEEPERS', 'AND', 'SOME', 'OF', 'THE', 'LESS', 'UNFORTUNATE', 'PEOPLE', 'THERE', 'HAD', 'PROTESTED', 'LOUD', 'ENOUGH', 'TO', 'REACH', 'CLEAR', 'BACK', 'TO', 'EARTH'] +8131-117016-0007-1310: ref=['CAPTAIN', 'MURDOCH', 'WAS', 'AN', 'UNKNOWN', 'FACTOR', 'AND', 'NOW', 'WAS', 'ASKING', 'FOR', 'MORE', 'MEN'] +8131-117016-0007-1310: hyp=['CAPTAIN', 'MURDOCK', 'WAS', 'AN', 'UNKNOWN', 'FACTOR', 'AND', 'NOW', 'WAS', 'ASKING', 'FOR', 'MORE', 'MEN'] +8131-117016-0008-1311: ref=['THE', 'PRESSURE', 'WAS', 'ENOUGH', 'TO', 'GET', 'THEM', 'FOR', 'HIM'] +8131-117016-0008-1311: hyp=['THE', 'PRESSURE', 'WAS', 'ENOUGH', 'TO', 'GET', 'THEM', 'FOR', 'HIM'] +8131-117016-0009-1312: ref=['GORDON', 'REPORTED', 'FOR', 'WORK', 'WITH', 'A', 'SENSE', 'OF', 'THE', 'BOTTOM', 'FALLING', 'OUT', 'MIXED', 'WITH', 'A', 'VAGUE', 'RELIEF'] +8131-117016-0009-1312: hyp=['GORDON', 'REPORTED', 'FOR', 'WORK', 'WITH', 'A', 'SENSE', 'OF', 'THE', 'BOTTOM', 'FALLING', 'OUT', 'MIXED', 'WITH', 'A', 'VAGUE', 'RELIEF'] +8131-117016-0010-1313: ref=["I'VE", 'GOT', 'A', 'FREE', 'HAND', 'AND', "WE'RE", 'GOING', 'TO', 'RUN', 'THIS', 'THE', 'WAY', 'WE', 'WOULD', 'ON', 'EARTH'] +8131-117016-0010-1313: hyp=["I'VE", 'GOT', 'A', 'FREE', 'HAND', 'AND', "WE'RE", 'GOING', 'TO', 'RUN', 'THIS', 'THE', 'WAY', 'WE', 'WOULD', 'ON', 'EARTH'] +8131-117016-0011-1314: ref=['YOUR', 'JOB', 'IS', 'TO', 'PROTECT', 'THE', 'CITIZENS', 'HERE', 'AND', 'THAT', 'MEANS', 'EVERYONE', 'NOT', 'BREAKING', 'THE', 'LAWS', 'WHETHER', 'YOU', 'FEEL', 'LIKE', 'IT', 'OR', 'NOT', 'NO', 'GRAFT'] +8131-117016-0011-1314: hyp=['YOUR', 'JOB', 'IS', 'TO', 'PROTECT', 'THE', 'CITIZENS', 'HERE', 'AND', 'THAT', 'MEANS', 'EVERY', 'ONE', 'NOT', 'BREAKING', 'THE', 'LAWS', 'WHETHER', 'YOU', 'FEEL', 'LIKE', 'IT', 'OR', 'NOT', 'NO', 'GRAFT'] +8131-117016-0012-1315: ref=['THE', 'FIRST', 'MAN', 'MAKING', 'A', 'SHAKEDOWN', 'WILL', 'GET', 'THE', 'SAME', 'TREATMENT', "WE'RE", 'GOING', 'TO', 'USE', 'ON', 'THE', 'STONEWALL', 'BOYS', "YOU'LL", 'GET', 'DOUBLE', 'PAY', 'HERE', 'AND', 'YOU', 'CAN', 'LIVE', 'ON', 'IT'] +8131-117016-0012-1315: hyp=['THE', 'FIRST', 'MAN', 'MAKING', 'A', 'SHAKE', 'DOWN', 'WILL', 'GET', 'THE', 'SAME', 'TREATMENT', "WE'RE", 'GOING', 'TO', 'USE', 'ON', 'THE', 'STONE', 'WALL', 'BOYS', "YOU'LL", 'GET', 'DOUBLE', 'PAY', 'HERE', 'AND', 'YOU', 'CAN', 'LIVE', 'ON', 'IT'] +8131-117016-0013-1316: ref=['HE', 'PICKED', 'OUT', 'FIVE', 'OF', 'THE', 'MEN', 'INCLUDING', 'GORDON', 'YOU', 'FIVE', 'WILL', 'COME', 'WITH', 'ME'] +8131-117016-0013-1316: hyp=['HE', 'PICKED', 'OUT', 'FIVE', 'OF', 'THE', 'MEN', 'INCLUDING', 'GORDON', 'YOU', 'FIVE', 'WILL', 'COME', 'WITH', 'ME'] +8131-117016-0014-1317: ref=['THE', 'REST', 'OF', 'YOU', 'CAN', 'TEAM', 'UP', 'ANY', 'WAY', 'YOU', 'WANT', 'TONIGHT', 'PICK', 'ANY', 'ROUTE', "THAT'S", 'OPEN', 'OKAY', 'MEN', "LET'S", 'GO'] +8131-117016-0014-1317: hyp=['THE', 'REST', 'OF', 'YOU', 'CAN', 'TEEM', 'UP', 'ANY', 'WAY', 'YOU', 'WANT', 'TO', 'NIGHT', 'PICK', 'ANY', 'ROUGH', "THAT'S", 'OPEN', 'OH', 'CAME', 'AND', "LET'S", 'GO'] +8131-117016-0015-1318: ref=['BRUCE', 'GORDON', 'GRINNED', 'SLOWLY', 'AS', 'HE', 'SWUNG', 'THE', 'STICK', 'AND', "MURDOCH'S", 'EYES', 'FELL', 'ON', 'HIM', 'EARTH', 'COP'] +8131-117016-0015-1318: hyp=['BRUSH', 'GORDON', 'GRINNED', 'SLOWLY', 'AS', 'HE', 'SWUNG', 'THE', 'STICK', 'AND', "MARDOC'S", 'EYES', 'FELL', 'ON', 'HIM', 'EARTH', 'COP'] +8131-117016-0016-1319: ref=['TWO', 'YEARS', 'GORDON', 'ADMITTED'] +8131-117016-0016-1319: hyp=['TWO', 'YEARS', 'GORDON', 'ADMITTED'] +8131-117016-0017-1320: ref=['FOR', 'A', 'SECOND', 'GORDON', 'CURSED', 'HIMSELF'] +8131-117016-0017-1320: hyp=['FOR', 'A', 'SECOND', 'GORDON', 'CURSED', 'HIMSELF'] +8131-117016-0018-1321: ref=['HE', 'BEGAN', 'WONDERING', 'ABOUT', 'SECURITY', 'THEN'] +8131-117016-0018-1321: hyp=['HE', 'BEGAN', 'WONDERING', 'ABOUT', 'SECURITY', 'THEN'] +8131-117016-0019-1322: ref=['NOBODY', 'HAD', 'TRIED', 'TO', 'GET', 'IN', 'TOUCH', 'WITH', 'HIM'] +8131-117016-0019-1322: hyp=['NOBODY', 'HAD', 'TRIED', 'TO', 'GET', 'IN', 'TOUCH', 'WITH', 'HIM'] +8131-117016-0020-1323: ref=['THERE', 'WAS', 'A', 'CRUDE', 'LIGHTING', 'SYSTEM', 'HERE', 'PUT', 'UP', 'BY', 'THE', 'CITIZENS', 'AT', 'THE', 'FRONT', 'OF', 'EACH', 'BUILDING', 'A', 'DIM', 'PHOSPHOR', 'BULB', 'GLOWED', 'WHEN', 'DARKNESS', 'FELL', 'THEY', 'WOULD', 'HAVE', 'NOTHING', 'ELSE', 'TO', 'SEE', 'BY'] +8131-117016-0020-1323: hyp=['THERE', 'WAS', 'A', 'CRUDE', 'LIGHTING', 'SYSTEM', 'HERE', 'PUT', 'UP', 'BY', 'THE', 'CITIZENS', 'AT', 'THE', 'FRONT', 'OF', 'EACH', 'BUILDING', 'A', 'DIM', 'PHOSPHER', 'BULB', 'GLOWED', 'WHEN', 'DARKNESS', 'FELL', 'THEY', 'WOULD', 'HAVE', 'NOTHING', 'ELSE', 'TO', 'SEE', 'BY'] +8131-117016-0021-1324: ref=['MOVING', 'IN', 'TWO', 'GROUPS', 'OF', 'THREES', 'AT', 'OPPOSITE', 'SIDES', 'OF', 'THE', 'STREET', 'THEY', 'BEGAN', 'THEIR', 'BEAT'] +8131-117016-0021-1324: hyp=['MOVING', 'IN', 'TWO', 'GROUPS', 'OF', 'THREES', 'IT', 'OPPOSITE', 'SIDES', 'OF', 'THE', 'STREET', 'THEY', 'BEGAN', 'THEIR', 'BEAT'] +8131-117016-0022-1325: ref=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'SAVE', 'THE', 'CITIZEN', 'WHO', 'WAS', 'DYING', 'FROM', 'LACK', 'OF', 'AIR'] +8131-117016-0022-1325: hyp=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'SAVE', 'THE', 'CITIZEN', 'WHO', 'WAS', 'DYING', 'FROM', 'LACK', 'OF', 'AIR'] +8131-117016-0023-1326: ref=['GORDON', 'FELT', 'THE', 'SOLID', 'PLEASURE', 'OF', 'THE', 'FINELY', 'TURNED', 'CLUB', 'IN', 'HIS', 'HANDS'] +8131-117016-0023-1326: hyp=['GORDON', 'FELT', 'THE', 'SOLID', 'PLEASURE', 'OF', 'THE', 'FINELY', 'TURNED', 'CLUB', 'IN', 'HIS', 'HANDS'] +8131-117016-0024-1327: ref=["GORDON'S", 'EYES', 'POPPED', 'AT', 'THAT'] +8131-117016-0024-1327: hyp=["GORDON'S", 'EYES', 'POPPED', 'AT', 'THAT'] +8131-117016-0025-1328: ref=['HE', 'SWALLOWED', 'THE', 'SENTIMENT', 'HIS', 'OWN', 'CLUB', 'WAS', 'MOVING', 'NOW'] +8131-117016-0025-1328: hyp=['HE', 'SWALLOWED', 'THE', 'SENTIMENT', 'HIS', 'OWN', 'CLUB', 'WAS', 'MOVING', 'NOW'] +8131-117016-0026-1329: ref=['THE', 'OTHER', 'FOUR', 'COPS', 'HAD', 'COME', 'IN', 'RELUCTANTLY'] +8131-117016-0026-1329: hyp=['THE', 'OTHER', 'FUPS', 'HAD', 'COME', 'IN', 'RELUCTANTLY'] +8131-117016-0027-1330: ref=['HE', 'BROUGHT', 'HIM', 'TO', 'THE', 'GROUND', 'WITH', 'A', 'SINGLE', 'BLOW', 'ACROSS', 'THE', 'KIDNEYS'] +8131-117016-0027-1330: hyp=['HE', 'BROUGHT', 'HIM', 'TO', 'THE', 'GROUND', 'WITH', 'A', 'SINGLE', 'BLOW', 'ACROSS', 'THE', 'KIDNEYS'] +8131-117016-0028-1331: ref=['THEY', 'ROUNDED', 'UP', 'THE', 'MEN', 'OF', 'THE', 'GANG', 'AND', 'ONE', 'OF', 'THE', 'COPS', 'STARTED', 'OFF'] +8131-117016-0028-1331: hyp=['THEY', 'ROUTED', 'UP', 'THE', 'MEN', 'OF', 'THE', 'GANG', 'AND', 'ONE', 'OF', 'THE', 'CUPS', 'STARTED', 'OFF'] +8131-117016-0029-1332: ref=['TO', 'FIND', 'A', 'PHONE', 'AND', 'CALL', 'THE', 'WAGON'] +8131-117016-0029-1332: hyp=['TO', 'FIND', 'A', 'PHONE', 'AND', 'CALL', 'THE', 'WAGON'] +8131-117016-0030-1333: ref=["WE'RE", 'NOT', 'USING', 'WAGONS', 'MURDOCH', 'TOLD', 'HIM', 'LINE', 'THEM', 'UP'] +8131-117016-0030-1333: hyp=['WERE', 'NOT', 'USING', 'WAGONS', 'MURDOCK', 'TOLD', 'HIM', 'LYING', 'THEM', 'UP'] +8131-117016-0031-1334: ref=['IF', 'THEY', 'TRIED', 'TO', 'RUN', 'THEY', 'WERE', 'HIT', 'FROM', 'BEHIND', 'IF', 'THEY', 'STOOD', 'STILL', 'THEY', 'WERE', 'CLUBBED', 'CAREFULLY'] +8131-117016-0031-1334: hyp=['IF', 'THEY', 'TRIED', 'TO', 'RUN', 'THEY', 'WERE', 'HIT', 'FROM', 'BEHIND', 'THAT', 'THEY', 'STOOD', 'STILL', 'THEY', 'WERE', 'CLUBBED', 'CAREFULLY'] +8131-117016-0032-1335: ref=['MURDOCH', 'INDICATED', 'ONE', 'WHO', 'STOOD', 'WITH', 'HIS', 'SHOULDERS', 'SHAKING', 'AND', 'TEARS', 'RUNNING', 'DOWN', 'HIS', 'CHEEKS'] +8131-117016-0032-1335: hyp=['MURDOCK', 'INDICATED', 'ONE', 'WHO', 'STOOD', 'WITH', 'HIS', 'SHOULDER', 'SHAKING', 'AND', 'TEARS', 'RUNNING', 'DOWN', 'HIS', 'CHEEKS'] +8131-117016-0033-1336: ref=['THE', "CAPTAIN'S", 'FACE', 'WAS', 'AS', 'SICK', 'AS', 'GORDON', 'FELT'] +8131-117016-0033-1336: hyp=['THE', "CAPTAIN'S", 'FACE', 'WAS', 'AS', 'SICK', 'AS', "GORDON'S", 'FELT'] +8131-117016-0034-1337: ref=['I', 'WANT', 'THE', 'NAME', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'GANG', 'YOU', 'CAN', 'REMEMBER', 'HE', 'TOLD', 'THE', 'MAN'] +8131-117016-0034-1337: hyp=['I', 'WANT', 'THE', 'NAME', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'GANG', 'YOU', 'CAN', 'REMEMBER', 'HE', 'TOLD', 'THE', 'MAN'] +8131-117016-0035-1338: ref=['COLONEL', "THEY'D", 'KILL', 'ME', 'I', "DON'T", 'KNOW'] +8131-117016-0035-1338: hyp=['COLONEL', "THEY'D", 'KILL', 'ME', 'I', "DON'T", 'KNOW'] +8131-117016-0036-1339: ref=['MURDOCH', 'TOOK', 'HIS', 'NOD', 'AS', 'EVIDENCE', 'ENOUGH', 'AND', 'TURNED', 'TO', 'THE', 'WRETCHED', 'TOUGHS'] +8131-117016-0036-1339: hyp=['MURDOCK', 'TOOK', 'HIS', 'NOD', 'AS', 'EVIDENCE', 'ENOUGH', 'AND', 'TURNED', 'TO', 'THE', 'WRETCHED', 'TUFTS'] +8131-117016-0037-1340: ref=['IF', 'HE', 'SHOULD', 'TURN', 'UP', 'DEAD', "I'LL", 'KNOW', 'YOU', 'BOYS', 'ARE', 'RESPONSIBLE', 'AND', "I'LL", 'FIND', 'YOU'] +8131-117016-0037-1340: hyp=['IF', 'HE', 'SHOULD', 'TURN', 'UP', 'DEAD', "I'LL", 'KNOW', 'YOU', 'BOYS', 'ARE', 'RESPONSIBLE', 'AND', "I'LL", 'FIND', 'YOU'] +8131-117016-0038-1341: ref=['TROUBLE', 'BEGAN', 'BREWING', 'SHORTLY', 'AFTER', 'THOUGH'] +8131-117016-0038-1341: hyp=['TROUBLE', 'BEGAN', 'BREWING', 'SHORTLY', 'AFTER', 'THOUGH'] +8131-117016-0039-1342: ref=['MURDOCH', 'SENT', 'ONE', 'OF', 'THE', 'MEN', 'TO', 'PICK', 'UP', 'A', 'SECOND', 'SQUAD', 'OF', 'SIX', 'AND', 'THEN', 'A', 'THIRD'] +8131-117016-0039-1342: hyp=['MARDOX', 'SAT', 'ONE', 'OF', 'THE', 'MEN', 'TO', 'PICK', 'UP', 'A', 'SECOND', 'SQUAD', 'OF', 'SIX', 'AND', 'THEN', 'A', 'THIRD'] +8131-117016-0040-1343: ref=['IN', 'THE', 'THIRD', 'ONE', 'BRUCE', 'GORDON', 'SPOTTED', 'ONE', 'OF', 'THE', 'MEN', "WHO'D", 'BEEN', 'BEATEN', 'BEFORE'] +8131-117016-0040-1343: hyp=['AND', 'THE', 'THIRD', 'ONE', 'BRUCE', 'GORDON', 'SPOTTED', 'ONE', 'OF', 'THE', 'MEN', 'WHO', 'HAD', 'BEEN', 'BEATEN', 'BEFORE'] +8131-117016-0041-1344: ref=['GET', 'A', 'STRETCHER', 'AND', 'TAKE', 'HIM', 'WHEREVER', 'HE', 'BELONGS', 'HE', 'ORDERED'] +8131-117016-0041-1344: hyp=['GET', 'A', 'STRETCHER', 'AND', 'TAKE', 'HIM', 'WHEREVER', 'HE', 'BELONGS', 'HE', 'ORDERED'] +8131-117016-0042-1345: ref=['BUT', 'THE', 'CAPTAIN', 'STIRRED', 'FINALLY', 'SIGHING'] +8131-117016-0042-1345: hyp=['BUT', 'THE', 'CAPTAIN', 'STIRRED', 'FINALLY', 'SIGHING'] +8131-117016-0043-1346: ref=['NO', 'THE', 'COPS', "THEY'RE", 'GIVING', 'ME', "WE'RE", 'COVERED', 'GORDON'] +8131-117016-0043-1346: hyp=['NOW', 'THE', 'CAPS', 'ARE', 'GIVING', 'ME', 'WERE', 'COVERED', 'GORDON'] +8131-117016-0044-1347: ref=['BUT', 'THE', 'STONEWALL', 'GANG', 'IS', 'BACKING', 'WAYNE'] +8131-117016-0044-1347: hyp=['BUT', 'THE', 'STERN', 'WALL', 'GANG', 'IS', 'BACK', 'IN', 'WAIN'] +8131-117016-0045-1348: ref=['BUT', "IT'S", 'GOING', 'TO', 'BE', 'TOUGH', 'ON', 'THEM'] +8131-117016-0045-1348: hyp=['BUT', "IT'S", 'GOING', 'TO', 'BE', 'TOUGH', 'ON', 'THEM'] +8131-117016-0046-1349: ref=['BRUCE', 'GORDON', 'GRIMACED', "I'VE", 'GOT', 'A', 'YELLOW', 'TICKET', 'FROM', 'SECURITY'] +8131-117016-0046-1349: hyp=['BRUCE', 'GORD', 'AND', 'GRIMACED', "I'VE", 'GOT', 'A', 'YELLOW', 'TICKET', 'FROM', 'SECURITY'] +8131-117016-0047-1350: ref=['MURDOCH', 'BLINKED', 'HE', 'DROPPED', 'HIS', 'EYES', 'SLOWLY'] +8131-117016-0047-1350: hyp=['MARDOCK', 'BLINKED', 'HE', 'DROPPED', 'HIS', 'EYES', 'SLOWLY'] +8131-117016-0048-1351: ref=['WHAT', 'MAKES', 'YOU', 'THINK', 'WAYNE', 'WILL', 'BE', 'RE', 'ELECTED'] +8131-117016-0048-1351: hyp=['WHAT', 'MAKES', 'YOU', 'THINK', 'WAIN', 'WILL', 'BE', 'RE', 'ELECTED'] +8131-117016-0049-1352: ref=['NOBODY', 'WANTS', 'HIM', 'EXCEPT', 'A', 'GANG', 'OF', 'CROOKS', 'AND', 'THOSE', 'IN', 'POWER'] +8131-117016-0049-1352: hyp=['NOBODY', 'WANTS', 'HIM', 'EXCEPT', 'A', 'GANG', 'OF', 'COOKS', 'AND', 'THOSE', 'IN', 'POWER'] +8131-117016-0050-1353: ref=['EVER', 'SEE', 'A', 'MARTIAN', 'ELECTION'] +8131-117016-0050-1353: hyp=['EVER', 'SEE', 'A', 'MARTIAN', 'ELECTION'] +8131-117016-0051-1354: ref=['NO', "YOU'RE", 'A', 'FIRSTER', 'HE', "CAN'T", 'LOSE'] +8131-117016-0051-1354: hyp=['NO', 'YOU', 'ARE', 'A', 'FORSTER', 'HE', "CAN'T", 'LOSE'] +8131-117016-0052-1355: ref=['AND', 'THEN', 'HELL', 'IS', 'GOING', 'TO', 'POP', 'AND', 'THIS', 'WHOLE', 'PLANET', 'MAY', 'BE', 'BLOWN', 'WIDE', 'OPEN'] +8131-117016-0052-1355: hyp=['AND', 'THEN', 'HELL', 'IS', 'GOING', 'TO', 'POP', 'AND', 'THIS', 'WHOLE', 'PLANET', 'MAY', 'BE', 'BLOWN', 'WIDE', 'OPEN'] +8131-117016-0053-1356: ref=['IT', 'FITTED', 'WITH', 'THE', 'DIRE', 'PREDICTIONS', 'OF', 'SECURITY', 'AND', 'WITH', 'THE', 'SPYING', 'GORDON', 'WAS', 'GOING', 'TO', 'DO', 'ACCORDING', 'TO', 'THEM'] +8131-117016-0053-1356: hyp=['YET', 'FITTED', 'WITH', 'THE', 'DIA', 'PREDICTIONS', 'OF', 'SECURITY', 'AND', 'WITH', 'A', 'SPYING', 'GORDON', 'WAS', 'GOING', 'TO', 'DO', 'ACCORDING', 'TO', 'THEM'] +8131-117016-0054-1357: ref=['HE', 'WAS', 'GETTING', 'EVEN', 'FATTER', 'NOW', 'THAT', 'HE', 'WAS', 'EATING', 'BETTER', 'FOOD', 'FROM', 'THE', 'FAIR', 'RESTAURANT', 'AROUND', 'THE', 'CORNER'] +8131-117016-0054-1357: hyp=['HE', 'WAS', 'GETTING', 'EVEN', 'FATTER', 'NOW', 'THAT', 'HE', 'WAS', 'EATING', 'BETTER', 'FOOD', 'FROM', 'THE', 'FAIR', 'RESTAURANT', 'AROUND', 'THE', 'CORNER'] +8131-117016-0055-1358: ref=['COST', 'EM', 'MORE', 'BUT', "THEY'D", 'BE', 'RESPECTABLE'] +8131-117016-0055-1358: hyp=['COSTUM', 'MORE', 'BUT', "THEY'D", 'BE', 'RESPECTABLE'] +8131-117016-0056-1359: ref=['BECAUSE', 'IZZY', 'IS', 'ALWAYS', 'HONEST', 'ACCORDING', 'TO', 'HOW', 'HE', 'SEES', 'IT'] +8131-117016-0056-1359: hyp=['BECAUSE', 'IZZIE', 'IS', 'ALWAYS', 'HONEST', 'ACCORDING', 'TO', 'HOW', 'HE', 'SEES', 'IT'] +8131-117016-0057-1360: ref=['BUT', 'YOU', 'GOT', 'EARTH', 'IDEAS', 'OF', 'THE', 'STUFF', 'LIKE', 'I', 'HAD', 'ONCE'] +8131-117016-0057-1360: hyp=['BUT', 'YOU', 'GOT', 'EARTH', 'IDEAS', 'OF', 'THE', 'STUFF', 'LIKE', 'I', 'HAD', 'ONCE'] +8131-117016-0058-1361: ref=['THE', 'GROUPS', 'GREW', 'MORE', 'EXPERIENCED', 'AND', 'MURDOCH', 'WAS', 'TRAINING', 'A', 'NEW', 'SQUAD', 'EVERY', 'NIGHT'] +8131-117016-0058-1361: hyp=['THE', 'GROUPS', 'GREW', 'MORE', 'EXPERIENCED', 'AND', 'MURDOCK', 'WAS', 'TRAINING', 'A', 'NEW', 'SQUAD', 'EVERY', 'NIGHT'] +8131-117016-0059-1362: ref=['IT', "WASN'T", 'EXACTLY', 'LEGAL', 'BUT', 'NOTHING', 'WAS', 'HERE'] +8131-117016-0059-1362: hyp=['IT', "WASN'T", 'EXACTLY', 'LEGAL', 'BUT', 'NOTHING', 'WAS', 'HERE'] +8131-117016-0060-1363: ref=['THIS', 'COULD', 'LEAD', 'TO', 'ABUSES', 'AS', "HE'D", 'SEEN', 'ON', 'EARTH'] +8131-117016-0060-1363: hyp=['THIS', 'COULD', 'LEAD', 'TO', 'ABUSES', 'AS', "HE'D", 'SEEN', 'ON', 'EARTH'] +8131-117016-0061-1364: ref=['BUT', 'THERE', 'PROBABLY', "WOULDN'T", 'BE', 'TIME', 'FOR', 'IT', 'IF', 'MAYOR', 'WAYNE', 'WAS', 'RE', 'ELECTED'] +8131-117016-0061-1364: hyp=['BUT', 'THEIR', 'PROBABLY', "WOULDN'T", 'BE', 'TIME', 'FOR', 'IT', 'IF', 'MAYOR', 'WAIN', 'WAS', 'RE', 'ELECTED'] +8131-117017-0000-1270: ref=['IT', 'WAS', 'NIGHT', 'OUTSIDE', 'AND', 'THE', 'PHOSPHOR', 'BULBS', 'AT', 'THE', 'CORNERS', 'GLOWED', 'DIMLY', 'GIVING', 'HIM', 'BARELY', 'ENOUGH', 'LIGHT', 'BY', 'WHICH', 'TO', 'LOCATE', 'THE', 'WAY', 'TO', 'THE', 'EXTEMPORIZED', 'PRECINCT', 'HOUSE'] +8131-117017-0000-1270: hyp=['IT', 'WAS', 'NIGHT', 'OUTSIDE', 'AND', 'THE', 'PHOSPHOBS', 'AT', 'THE', 'CORNERS', 'GLOWED', 'DIMLY', 'GIVING', 'HIM', 'BARELY', 'ENOUGH', 'LIGHT', 'BY', 'WHICH', 'TO', 'LOCATE', 'THE', 'WAY', 'TO', 'THE', 'EXTEMPORIZED', 'PRECINCT', 'HOUSE'] +8131-117017-0001-1271: ref=['IT', 'HAD', 'PROBABLY', 'BEEN', 'YEARS', 'SINCE', 'ANY', 'HAD', 'DARED', 'RISK', 'IT', 'AFTER', 'THE', 'SUN', 'WENT', 'DOWN'] +8131-117017-0001-1271: hyp=['IT', 'HAD', 'PROBABLY', 'BEEN', 'YEARS', 'SINCE', 'ANY', 'HAD', 'DARED', 'RISK', 'IT', 'AFTER', 'THE', 'SUN', 'WENT', 'DOWN'] +8131-117017-0002-1272: ref=['AND', 'THE', 'SLOW', 'DOUBTFUL', 'RESPECT', 'ON', 'THE', 'FACES', 'OF', 'THE', 'CITIZENS', 'AS', 'THEY', 'NODDED', 'TO', 'HIM', 'WAS', 'EVEN', 'MORE', 'PROOF', 'THAT', "HALEY'S", 'SYSTEM', 'WAS', 'WORKING'] +8131-117017-0002-1272: hyp=['AND', 'THE', 'SLOW', 'DOUBTFUL', 'RESPECT', 'ON', 'THE', 'FACES', 'OF', 'THE', 'CITIZENS', 'AS', 'THEY', 'NODDED', 'TO', 'HIM', 'WAS', 'EVEN', 'MORE', 'PROOF', 'THAT', 'HALELY', 'SYSTEM', 'WAS', 'WORKING'] +8131-117017-0003-1273: ref=['GORDON', 'HIT', 'THE', 'SIGNAL', 'SWITCH', 'AND', 'THE', 'MARSPEAKER', 'LET', 'OUT', 'A', 'SHRILL', 'WHISTLE'] +8131-117017-0003-1273: hyp=['GORDON', 'HIT', 'THE', 'SIGNAL', 'SWITCH', 'AND', 'THE', 'MARKEER', 'LET', 'OUT', 'A', 'SHRILL', 'WHISTLE'] +8131-117017-0004-1274: ref=['GUNS', 'SUDDENLY', 'SEEMED', 'TO', 'BE', 'FLOURISHING', 'EVERYWHERE'] +8131-117017-0004-1274: hyp=['GUN', 'SUDDENLY', 'SEEMED', 'TO', 'BE', 'FLOURISHING', 'EVERYWHERE'] +8131-117017-0005-1275: ref=['YOU', "CAN'T", 'DO', 'IT', 'TO', 'ME'] +8131-117017-0005-1275: hyp=['YOU', "CAN'T", 'DO', 'IT', 'TO', 'ME'] +8131-117017-0006-1276: ref=["I'M", 'REFORMED', "I'M", 'GOING', 'STRAIGHT'] +8131-117017-0006-1276: hyp=['I', 'AM', 'REFORMED', "I'M", 'GOING', 'STRAIGHT'] +8131-117017-0007-1277: ref=['YOU', 'DAMNED', 'COPS', "CAN'T", "O'NEILL", 'WAS', 'BLUBBERING'] +8131-117017-0007-1277: hyp=['YOU', 'DAMNED', 'COPSE', "CAN'T", "O'NEIA", 'WAS', 'BLUBBERING'] +8131-117017-0008-1278: ref=['ONE', 'LOOK', 'WAS', 'ENOUGH', 'THE', 'WORK', 'PAPERS', 'HAD', 'THE', 'TELLTALE', 'OVER', 'THICKENING', 'OF', 'THE', 'SIGNATURE', 'THAT', 'HAD', 'SHOWED', 'UP', 'ON', 'OTHER', 'PAPERS', 'OBVIOUSLY', 'FORGERIES'] +8131-117017-0008-1278: hyp=['ONE', 'LOOK', 'WAS', 'ENOUGH', 'THE', 'WORK', 'PAPERS', 'HAD', 'THE', 'TELL', 'TALE', 'OVER', 'THICKENING', 'OF', 'THE', 'SIGNATURE', 'THEY', 'HAD', 'SHOWED', 'UP', 'ON', 'OTHER', 'PAPERS', 'OBVIOUSLY', 'FORGERIES'] +8131-117017-0009-1279: ref=['SOME', 'TURNED', 'AWAY', 'AS', 'GORDON', 'AND', 'THE', 'OTHER', 'COP', 'WENT', 'TO', 'WORK', 'BUT', 'MOST', 'OF', 'THEM', "WEREN'T", 'SQUEAMISH'] +8131-117017-0009-1279: hyp=['SOME', 'TURNED', 'AWAY', 'AS', 'GORDON', 'AND', 'THE', 'OTHER', 'COPP', 'WENT', 'TO', 'WORK', 'BUT', 'MOST', 'OF', 'THEM', "WEREN'T", 'SQUEAMISH'] +8131-117017-0010-1280: ref=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'TWO', 'PICKED', 'UP', 'THEIR', 'WHIMPERING', 'CAPTIVE'] +8131-117017-0010-1280: hyp=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'TWO', 'PICKED', 'UP', 'THEIR', 'WHIMPERING', 'CAPTIVE'] +8131-117017-0011-1281: ref=['JENKINS', 'THE', 'OTHER', 'COP', 'HAD', 'BEEN', 'HOLDING', 'THE', 'WALLET'] +8131-117017-0011-1281: hyp=['JENKINS', 'THE', 'OTHER', 'COP', 'HAD', 'BEEN', 'HOLDING', 'THE', 'WALLET'] +8131-117017-0012-1282: ref=['MUST', 'OF', 'BEEN', 'MAKING', 'A', 'BIG', 'CONTACT', 'IN', 'SOMETHING', 'FIFTY', 'FIFTY'] +8131-117017-0012-1282: hyp=['MUST', 'HAVE', 'BEEN', 'MAKING', 'A', 'BIG', 'CONTACT', 'IN', 'SOMETHING', 'FIFTY', 'FIFTY'] +8131-117017-0013-1283: ref=['THERE', 'MUST', 'HAVE', 'BEEN', 'OVER', 'TWO', 'THOUSAND', 'CREDITS', 'IN', 'THE', 'WALLET'] +8131-117017-0013-1283: hyp=['THERE', 'MUST', 'HAVE', 'BEEN', 'OVER', 'TWO', 'THOUSAND', 'CREDITS', 'IN', 'THE', 'WALLET'] +8131-117017-0014-1284: ref=['WHEN', 'GORDON', 'AND', 'JENKINS', 'CAME', 'BACK', 'MURDOCH', 'TOSSED', 'THE', 'MONEY', 'TO', 'THEM', 'SPLIT', 'IT'] +8131-117017-0014-1284: hyp=['WHEN', 'GORDON', 'AND', 'JENKINS', 'CAME', 'BACK', 'MURDOCK', 'TOSSED', 'THE', 'MONEY', 'TO', 'THEM', 'SPLIT', 'IT'] +8131-117017-0015-1285: ref=['WHATEVER', 'COMES', 'TO', 'HAND', "GOV'NOR"] +8131-117017-0015-1285: hyp=['WHATEVER', 'COMES', 'TO', 'HAND', 'GOVERNOR'] +8131-117017-0016-1286: ref=['LIKE', 'THIS', 'SOCIAL', 'CALL', 'GORDON', 'ASKED', 'HIM'] +8131-117017-0016-1286: hyp=['LIKE', 'THIS', 'SOCIAL', 'CALL', 'GORDON', 'ASKED', 'HIM'] +8131-117017-0017-1287: ref=['THE', 'LITTLE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'HIS', 'ANCIENT', 'EIGHTEEN', 'YEAR', 'OLD', 'FACE', 'TURNING', 'SOBER', 'NOPE'] +8131-117017-0017-1287: hyp=['THE', 'LITTLE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'HIS', 'ANCIENT', 'EIGHTEEN', 'YEAR', 'OLD', 'FACE', 'TURNING', 'SOBER', 'NOTE'] +8131-117017-0018-1288: ref=['YOU', 'OWE', 'ME', 'SOME', 'BILLS', "GOV'NOR"] +8131-117017-0018-1288: hyp=['YOU', 'ARE', 'ME', 'SOME', 'BILLS', "GUV'NER"] +8131-117017-0019-1289: ref=['ELEVEN', 'HUNDRED', 'FIFTY', 'CREDITS'] +8131-117017-0019-1289: hyp=['ELEVEN', 'HUNDRED', 'FIFTY', 'CREDITS'] +8131-117017-0020-1290: ref=['YOU', "DIDN'T", 'PAY', 'UP', 'YOUR', 'PLEDGE', 'TO', 'THE', 'CAMPAIGN', 'FUND', 'SO', 'I', 'HADDA', 'FILL', 'IN'] +8131-117017-0020-1290: hyp=['YOU', "DIDN'T", 'PAY', 'UP', 'YOUR', 'PLEDGE', 'TO', 'THE', 'CAPTAIN', 'FUND', 'SO', 'I', 'HAD', 'A', 'FILL', 'IN'] +8131-117017-0021-1291: ref=['A', 'THOUSAND', 'INTEREST', 'AT', 'TEN', 'PER', 'CENT', 'A', 'WEEK', 'STANDARD', 'RIGHT'] +8131-117017-0021-1291: hyp=['A', 'THOUSAND', 'INTERESTS', 'AT', 'TEN', 'PER', 'CENT', 'A', 'WEEK', 'STANDARD', 'RIGHT'] +8131-117017-0022-1292: ref=['GORDON', 'HAD', 'HEARD', 'OF', 'THE', 'FRIENDLY', 'INTEREST', 'CHARGED', 'ON', 'THE', 'SIDE', 'HERE', 'BUT', 'HE', 'SHOOK', 'HIS', 'HEAD', 'WRONG', 'IZZY'] +8131-117017-0022-1292: hyp=['GORDON', 'HAD', 'HEARD', 'OF', 'THE', 'FRIENDLY', 'INTEREST', 'CHARGED', 'ON', 'THE', 'SIDE', 'HERE', 'BUT', 'HE', 'SHOOK', 'HIS', 'HEAD', 'WRONG', 'IS', 'HE'] +8131-117017-0023-1293: ref=['HUH', 'IZZY', 'TURNED', 'IT', 'OVER', 'AND', 'SHOOK', 'HIS', 'HEAD'] +8131-117017-0023-1293: hyp=['HOW', 'AS', 'HE', 'TURNED', 'IT', 'OVER', 'AND', 'SHOOK', 'HIS', 'HEAD'] +8131-117017-0024-1294: ref=['NOW', 'SHOW', 'ME', 'WHERE', 'I', 'SIGNED', 'ANY', 'AGREEMENT', 'SAYING', "I'D", 'PAY', 'YOU', 'BACK'] +8131-117017-0024-1294: hyp=['NOW', 'SHOW', 'ME', 'WHERE', 'I', 'SIGNED', 'ANY', 'AGREEMENT', 'SAYING', "I'D", 'PAY', 'YOU', 'BACK'] +8131-117017-0025-1295: ref=['FOR', 'A', 'SECOND', "IZZY'S", 'FACE', 'WENT', 'BLANK', 'THEN', 'HE', 'CHUCKLED'] +8131-117017-0025-1295: hyp=['FOR', 'A', 'SECOND', "IZZIE'S", 'FACE', 'WENT', 'BLANK', 'THEN', 'HE', 'CHUCKLED'] +8131-117017-0026-1296: ref=['HE', 'PULLED', 'OUT', 'THE', 'BILLS', 'AND', 'HANDED', 'THEM', 'OVER'] +8131-117017-0026-1296: hyp=['HE', 'POURED', 'OUT', 'THE', 'BILLS', 'AND', 'HANDED', 'THEM', 'OVER'] +8131-117017-0027-1297: ref=['THANKS', 'IZZY', 'THANKS', 'YOURSELF'] +8131-117017-0027-1297: hyp=['THANKS', 'IS', 'HE', 'THANKS', 'YOURSELF'] +8131-117017-0028-1298: ref=['THE', 'KID', 'POCKETED', 'THE', 'MONEY', 'CHEERFULLY', 'NODDING'] +8131-117017-0028-1298: hyp=['THE', 'KID', 'POCKETED', 'THE', 'MONEY', 'CHEERFULLY', 'NODDING'] +8131-117017-0029-1299: ref=['THE', 'LITTLE', 'GUY', 'KNEW', 'MARS', 'AS', 'FEW', 'OTHERS', 'DID', 'APPARENTLY', 'FROM', 'ALL', 'SIDES'] +8131-117017-0029-1299: hyp=['THE', 'LITTLE', 'GUY', 'KNEW', 'MARS', 'AS', 'FEW', 'OTHERS', 'DID', 'APPARENTLY', 'FROM', 'ALL', 'SIDES'] +8131-117017-0030-1300: ref=['AND', 'IF', 'ANY', 'OF', 'THE', 'OTHER', 'COPS', 'HAD', 'PRIVATE', 'RACKETS', 'OF', 'THEIR', 'OWN', 'IZZY', 'WAS', 'UNDOUBTEDLY', 'THE', 'MAN', 'TO', 'FIND', 'IT', 'OUT', 'AND', 'USE', 'THE', 'INFORMATION', 'WITH', 'A', 'BEAT', 'SUCH', 'AS', 'THAT', 'EVEN', 'GOING', 'HALVES', 'AND', 'WITH', 'ALL', 'THE', 'GRAFT', 'TO', 'THE', 'UPPER', 'BRACKETS', "HE'D", 'STILL', 'BE', 'ABLE', 'TO', 'MAKE', 'HIS', 'PILE', 'IN', 'A', 'MATTER', 'OF', 'MONTHS'] +8131-117017-0030-1300: hyp=['AND', 'IF', 'ANY', 'OF', 'THE', 'OTHER', 'CUPS', 'HAD', 'PRIVATE', 'RACKETS', 'OF', 'THEIR', 'OWN', 'IZZIE', 'WAS', 'UNDOUBTEDLY', 'THE', 'MAN', 'TO', 'FIND', 'IT', 'OUT', 'AND', 'USED', 'THE', 'INFORMATION', 'WITH', 'A', 'BEAT', 'SUCH', 'AS', 'THAT', 'EVEN', 'GOING', 'HALVES', 'AND', 'WITH', 'ALL', 'THE', 'GRAFT', 'AT', 'THE', 'UPPER', 'BRACKETS', "HE'D", 'STILL', 'BE', 'ABLE', 'TO', 'MAKE', 'HIS', 'PILE', 'IN', 'A', 'MATTER', 'OF', 'MONTHS'] +8131-117017-0031-1301: ref=['THE', 'CAPTAIN', 'LOOKED', 'COMPLETELY', 'BEATEN', 'AS', 'HE', 'CAME', 'INTO', 'THE', 'ROOM', 'AND', 'DROPPED', 'ONTO', 'THE', 'BENCH'] +8131-117017-0031-1301: hyp=['THE', 'CAPTAIN', 'LOOKED', 'COMPLETELY', 'BEATEN', 'AS', 'HE', 'CAME', 'INTO', 'THE', 'ROOM', 'AND', 'DROPPED', 'INTO', 'THE', 'BENCH'] +8131-117017-0032-1302: ref=['GO', 'ON', 'ACCEPT', 'DAMN', 'IT'] +8131-117017-0032-1302: hyp=['GO', 'ON', 'EXCEPT', 'DEAR', 'MIN'] +8131-117029-0000-1247: ref=['THERE', 'WAS', 'A', 'MAN', 'COMING', 'FROM', 'EARTH', 'ON', 'A', 'SECOND', 'SHIP', 'WHO', 'WOULD', 'SEE', 'HIM'] +8131-117029-0000-1247: hyp=['THERE', 'WAS', 'A', 'MAN', 'COMING', 'FROM', 'EARTH', 'ON', 'A', 'SECOND', 'SHIP', 'WHO', 'WOULD', 'SEE', 'HIM'] +8131-117029-0001-1248: ref=['THE', 'LITTLE', 'PUBLISHER', 'WAS', 'BACK', 'AT', 'THE', 'CRUSADER', 'AGAIN'] +8131-117029-0001-1248: hyp=['THE', 'LITTLE', 'PUBLISHER', 'WAS', 'BACK', 'AT', 'THE', 'CRUSADER', 'AGAIN'] +8131-117029-0002-1249: ref=['ONLY', 'GORDON', 'AND', 'SHEILA', 'WERE', 'LEFT'] +8131-117029-0002-1249: hyp=['ONLY', 'GORDON', 'AND', 'SHEILA', 'WERE', 'LEFT'] +8131-117029-0003-1250: ref=['CREDIT', 'HAD', 'BEEN', 'ESTABLISHED', 'AGAIN', 'AND', 'THE', 'BUSINESSES', 'WERE', 'OPEN'] +8131-117029-0003-1250: hyp=['CREDIT', 'HAD', 'BEEN', 'ESTABLISHED', 'AGAIN', 'AND', 'THE', 'BUSINESSES', 'WERE', 'OPEN'] +8131-117029-0004-1251: ref=['GORDON', 'CAME', 'TO', 'A', 'ROW', 'OF', 'TEMPORARY', 'BUBBLES', 'INDIVIDUAL', 'DWELLINGS', 'BUILT', 'LIKE', 'THE', 'DOME', 'BUT', 'OPAQUE', 'FOR', 'PRIVACY'] +8131-117029-0004-1251: hyp=['GORDON', 'CAME', 'TO', 'A', 'ROW', 'OF', 'TEMPORARY', 'BUBBLES', 'INDIVIDUAL', 'DWELLINGS', 'BUILT', 'LIKE', 'THE', 'DOME', 'BUT', 'OPAQUE', 'FOR', 'PRIVACY'] +8131-117029-0005-1252: ref=['THEY', 'HAD', 'BEEN', 'LUCKY'] +8131-117029-0005-1252: hyp=['THEY', 'HAD', 'BEEN', 'LUCKY'] +8131-117029-0006-1253: ref=["SCHULBERG'S", 'VOLUNTEERS', 'WERE', 'OFFICIAL', 'NOW'] +8131-117029-0006-1253: hyp=["SHOALBURG'S", 'VOLUNTEERS', 'WERE', 'OFFICIAL', 'NOW'] +8131-117029-0007-1254: ref=['FATS', 'PLACE', 'WAS', 'STILL', 'OPEN', 'THOUGH', 'THE', 'CROOKED', 'TABLES', 'HAD', 'BEEN', 'REMOVED', 'GORDON', 'DROPPED', 'TO', 'A', 'STOOL', 'SLIPPING', 'OFF', 'HIS', 'HELMET'] +8131-117029-0007-1254: hyp=["FAT'S", 'PLACE', 'WAS', 'STILL', 'OPEN', 'THOUGH', 'THE', 'CROOKED', 'TABLES', 'HAD', 'BEEN', 'REMOVED', 'GORDON', 'DROPPED', 'TO', 'A', 'STOOL', 'SLIPPING', 'OFF', 'HIS', 'HELMET'] +8131-117029-0008-1255: ref=['HE', 'REACHED', 'AUTOMATICALLY', 'FOR', 'THE', 'GLASS', 'OF', 'ETHER', 'NEEDLED', 'BEER'] +8131-117029-0008-1255: hyp=['HE', 'REACHED', 'AUTOMATICALLY', 'FOR', 'THE', 'GLASS', 'OF', 'ETHER', 'NEEDLE', 'BEER'] +8131-117029-0009-1256: ref=['THOUGHT', "YOU'D", 'BE', 'IN', 'THE', 'CHIPS'] +8131-117029-0009-1256: hyp=['THOUGHT', "YE'D", 'BE', 'IN', 'THE', 'CHIPS'] +8131-117029-0010-1257: ref=["THAT'S", 'MARS', 'GORDON', 'ECHOED', 'THE', "OTHER'S", 'COMMENT', 'WHY', "DON'T", 'YOU', 'PULL', 'OFF', 'THE', 'PLANET', 'FATS', 'YOU', 'COULD', 'GO', 'BACK', 'TO', 'EARTH', "I'D", 'GUESS', 'THE', 'OTHER', 'NODDED'] +8131-117029-0010-1257: hyp=["THAT'S", 'MARS', 'GORDON', 'ECHOED', 'OTHERS', 'COMMENTS', 'WHY', "DON'T", 'YOU', 'PULL', 'OFF', 'THE', 'PLANET', 'FATS', 'YOU', 'COULD', 'GO', 'BACK', 'TO', 'EARTH', "I'D", 'GUESS', 'THE', 'OTHER', 'NODDED'] +8131-117029-0011-1258: ref=['GUESS', 'A', 'MAN', 'GETS', 'USED', 'TO', 'ANYTHING', 'HELL', 'MAYBE', 'I', 'CAN', 'HIRE', 'SOME', 'BUMS', 'TO', 'SIT', 'AROUND', 'AND', 'WHOOP', 'IT', 'UP', 'WHEN', 'THE', 'SHIPS', 'COME', 'IN', 'AND', 'BILL', 'THIS', 'AS', 'A', 'REAL', 'OLD', 'MARTIAN', 'DEN', 'OF', 'SIN'] +8131-117029-0011-1258: hyp=['GES', 'A', 'MAN', 'GETS', 'USED', 'TO', 'ANYTHING', 'HELL', 'MAYBE', 'I', 'CAN', 'HIRE', 'SOME', 'BUMS', 'TO', 'SIT', 'AROUND', 'AND', 'WHOOP', 'IT', 'UP', 'WHEN', 'THE', 'SHIPS', 'COME', 'IN', 'AND', 'BUILD', 'THIS', 'IS', 'A', 'REAL', 'OLD', 'MARTIAN', 'DEN', 'OF', 'SIN'] +8131-117029-0012-1259: ref=['THERE', 'WAS', 'A', 'GRIN', 'ON', 'THE', "OTHER'S", 'FACE'] +8131-117029-0012-1259: hyp=['THERE', 'WAS', 'A', 'GRIN', 'ON', 'THE', "OTHER'S", 'FACE'] +8131-117029-0013-1260: ref=['FINALLY', 'GOT', 'OUR', 'ORDERS', 'FOR', 'YOU', "IT'S", 'MERCURY'] +8131-117029-0013-1260: hyp=['FINALLY', 'GOT', 'OUR', 'ORDERS', 'FOR', 'YOU', "IT'S", 'MERCURY'] +8131-117029-0014-1261: ref=['WE', 'SENT', 'TWENTY', 'OTHERS', 'THE', 'SAME', 'WAY', 'AND', 'THEY', 'FAILED'] +8131-117029-0014-1261: hyp=['WE', 'SENT', 'TWENTY', 'OTHERS', 'THE', 'SAME', 'WAY', 'AND', 'THEY', 'FAILED'] +8131-117029-0015-1262: ref=["LET'S", 'SAY', "YOU'VE", 'SHIFTED', 'SOME', 'OF', 'THE', 'MISERY', 'AROUND', 'A', 'BIT', 'AND', 'GIVEN', 'THEM', 'A', 'CHANCE', 'TO', 'DO', 'BETTER'] +8131-117029-0015-1262: hyp=['LET', 'SAVE', 'SHIFTED', 'SOME', 'OF', 'THE', 'MISERY', 'AROUND', 'A', 'BIT', 'AND', 'GIVEN', 'THEM', 'A', 'CHANCE', 'TO', 'DO', 'BETTER'] +8131-117029-0016-1263: ref=['YOU', "CAN'T", 'STAY', 'HERE'] +8131-117029-0016-1263: hyp=['YOU', "CAN'T", 'STAY', 'HERE'] +8131-117029-0017-1264: ref=["THERE'S", 'A', 'ROCKET', 'WAITING', 'TO', 'TRANSSHIP', 'YOU', 'TO', 'THE', 'MOON', 'ON', 'THE', 'WAY', 'TO', 'MERCURY', 'RIGHT', 'NOW', 'GORDON', 'SIGHED'] +8131-117029-0017-1264: hyp=["THERE'S", 'A', 'ROCKET', 'WAITING', 'TO', 'TRANSHIP', 'YOU', 'TO', 'THE', 'MOON', 'ON', 'THE', 'WAY', 'TO', 'MERCURY', 'RIGHT', 'NOW', 'GORDON', 'SIGHED'] +8131-117029-0018-1265: ref=['AND', "I'VE", 'PAID', 'HER', 'THE', 'PAY', 'WE', 'OWE', 'YOU', 'FROM', 'THE', 'TIME', 'YOU', 'BEGAN', 'USING', 'YOUR', 'BADGE', "SHE'S", 'OUT', 'SHOPPING'] +8131-117029-0018-1265: hyp=['AND', 'I', 'PAID', 'HER', 'THE', 'PAY', 'WE', 'OWE', 'YOU', 'FROM', 'THE', 'TIME', 'YOU', 'BEGIN', 'USING', 'YOUR', 'BADGE', "SHE'S", 'OUT', 'SHOPPING'] +8131-117029-0019-1266: ref=['BUT', 'HIS', 'OLD', 'EYES', 'WERE', 'GLINTING'] +8131-117029-0019-1266: hyp=['BUT', 'HIS', 'OLD', 'EYES', 'WERE', 'GLINTING'] +8131-117029-0020-1267: ref=['DID', 'YOU', 'THINK', "WE'D", 'LET', 'YOU', 'GO', 'WITHOUT', 'SEEING', 'YOU', 'OFF', 'COBBER', 'HE', 'ASKED'] +8131-117029-0020-1267: hyp=['DID', 'YOU', 'THINK', "WE'D", 'LET', 'YOU', 'GO', 'WITHOUT', 'SEEING', 'YOU', 'OFF', 'COPPER', 'HE', 'ASKED'] +8131-117029-0021-1268: ref=['I', 'I', 'OH', 'DRAT', 'IT', "I'M", 'GETTING', 'OLD', 'IZZY', 'YOU', 'TELL', 'HIM'] +8131-117029-0021-1268: hyp=['I', 'I', 'OH', 'DRAT', 'IT', "I'M", 'GETTING', 'OLD', 'IS', 'HE', 'YOU', 'TELL', 'HIM'] +8131-117029-0022-1269: ref=['HE', 'GRABBED', "GORDON'S", 'HAND', 'AND', 'WADDLED', 'DOWN', 'THE', 'LANDING', 'PLANK', 'IZZY', 'SHOOK', 'HIS', 'HEAD'] +8131-117029-0022-1269: hyp=['HE', 'GRABBED', "GORDON'S", 'HAND', 'AND', 'WADDLED', 'DOWN', 'THE', 'LANDING', 'PLANK', 'IZZIE', 'SHOOK', 'HIS', 'HEAD'] +8188-269288-0000-2881: ref=['ANNIE', 'COLCHESTER', 'HAD', 'BEGUN', 'TO', 'MAKE', 'FRIENDS', 'WITH', 'LESLIE'] +8188-269288-0000-2881: hyp=['ANY', 'COLCHESTER', 'HAD', 'BEGUN', 'TO', 'MAKE', 'FRIENDS', 'WITH', 'LISLEY'] +8188-269288-0001-2882: ref=['LESLIE', 'DETERMINED', 'TO', 'TRY', 'FOR', 'HONORS', 'IN', 'ENGLISH', 'LANGUAGE', 'AND', 'LITERATURE'] +8188-269288-0001-2882: hyp=['LESLIE', 'DETERMINED', 'TO', 'TRIFLE', 'HONORS', 'IN', 'ENGLISH', 'LANGUAGE', 'AND', 'LITERATURE'] +8188-269288-0002-2883: ref=['HER', 'TASTES', 'ALL', 'LAY', 'IN', 'THIS', 'DIRECTION', 'HER', 'IDEA', 'BEING', 'BY', 'AND', 'BY', 'TO', 'FOLLOW', 'HER', "MOTHER'S", 'PROFESSION', 'OF', 'JOURNALISM', 'FOR', 'WHICH', 'SHE', 'ALREADY', 'SHOWED', 'CONSIDERABLE', 'APTITUDE'] +8188-269288-0002-2883: hyp=['HER', 'TASTES', 'ALL', 'LAY', 'IN', 'THIS', 'DIRECTION', 'HER', 'IDEA', 'BEING', 'BY', 'AND', 'BY', 'TO', 'FOLLOW', 'HER', "MOTHER'S", 'PROFESSION', 'OF', 'JOURNALISM', 'FOR', 'WHICH', 'SHE', 'ALREADY', 'SHOWED', 'CONSIDERABLE', 'APTITUDE'] +8188-269288-0003-2884: ref=['SHE', 'HAD', 'NO', 'IDEA', 'OF', 'ALLOWING', 'HERSELF', 'TO', 'BREAK', 'DOWN'] +8188-269288-0003-2884: hyp=['SHE', 'HAD', 'NO', 'IDEA', 'OF', 'ALLOWING', 'HERSELF', 'TO', 'BREAK', 'DOWN'] +8188-269288-0004-2885: ref=['WHAT', 'DO', 'YOU', 'MEAN', 'REPLIED', 'LESLIE'] +8188-269288-0004-2885: hyp=['WHAT', 'DO', 'YOU', 'MEAN', 'REPLIED', 'LESLIE'] +8188-269288-0005-2886: ref=['WHY', 'YOU', 'WILL', 'BE', 'PARTING', 'FROM', 'ME', 'YOU', 'KNOW'] +8188-269288-0005-2886: hyp=['WHY', 'YOU', 'WILL', 'BE', 'PARTING', 'FROM', 'ME', 'YOU', 'KNOW'] +8188-269288-0006-2887: ref=['I', "WON'T", 'BE', 'THE', 'CONSTANT', 'WORRY', 'AND', 'PLAGUE', 'OF', 'YOUR', 'LIFE'] +8188-269288-0006-2887: hyp=['I', 'WOULD', 'BE', 'THE', 'CONSTANT', 'WORRY', 'IN', 'PLAGUE', 'OF', 'YOUR', 'LIFE'] +8188-269288-0007-2888: ref=['IT', 'IS', 'THIS', 'IF', 'BY', 'ANY', 'CHANCE', 'YOU', "DON'T", 'LEAVE', 'SAINT', "WODE'S", 'ANNIE', 'I', 'HOPE', 'YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'BE', 'YOUR', 'ROOMFELLOW', 'AGAIN', 'NEXT', 'TERM'] +8188-269288-0007-2888: hyp=['IT', 'IS', 'THIS', 'IF', 'BY', 'ANY', 'CHANCE', 'YOU', "DON'T", 'LEAVE', 'SAINT', 'WORDS', 'ANNIE', 'I', 'HOPE', 'YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'BE', 'YOUR', 'ROOM', 'FELLOW', 'AGAIN', 'NEXT', 'TERM'] +8188-269288-0008-2889: ref=['SAID', 'ANNIE', 'A', 'FLASH', 'OF', 'LIGHT', 'COMING', 'INTO', 'HER', 'EYES', 'AND', 'THEN', 'LEAVING', 'THEM'] +8188-269288-0008-2889: hyp=['SAID', 'ANNIE', 'A', 'FLASH', 'OF', 'LIGHT', 'COMING', 'INTO', 'HER', 'EYES', 'AND', 'THEN', 'LEAVING', 'THEM'] +8188-269288-0009-2890: ref=['BUT', 'SHE', 'ADDED', 'ABRUPTLY', 'YOU', 'SPEAK', 'OF', 'SOMETHING', 'WHICH', 'MUST', 'NOT', 'TAKE', 'PLACE'] +8188-269288-0009-2890: hyp=['BUT', 'SHE', 'ADDED', 'ABRUPTLY', 'YOU', 'SPEAK', 'OF', 'SOMETHING', 'WHICH', 'MUST', 'NOT', 'TAKE', 'PLACE'] +8188-269288-0010-2891: ref=['I', 'MUST', 'PASS', 'IN', 'HONORS', 'IF', 'I', "DON'T", 'I', 'SHALL', 'DIE'] +8188-269288-0010-2891: hyp=['I', 'MUST', 'PASS', 'AN', 'HONOURS', 'IF', 'I', "DON'T", 'I', 'SHALL', 'DIE'] +8188-269288-0011-2892: ref=['A', 'FEW', 'MOMENTS', 'LATER', 'THERE', 'CAME', 'A', 'TAP', 'AT', 'THE', 'DOOR'] +8188-269288-0011-2892: hyp=['A', 'FEW', 'MOMENTS', 'LATER', 'THERE', 'CAME', 'A', 'TAP', 'AT', 'THE', 'DOOR'] +8188-269288-0012-2893: ref=['LESLIE', 'OPENED', 'THE', 'DOOR'] +8188-269288-0012-2893: hyp=['LESLIE', 'OPENED', 'THE', 'DOOR'] +8188-269288-0013-2894: ref=['JANE', 'HERIOT', 'STOOD', 'WITHOUT'] +8188-269288-0013-2894: hyp=['JANE', 'HERRIOT', 'STOOD', 'WITHOUT'] +8188-269288-0014-2895: ref=['THESE', 'LETTERS', 'HAVE', 'JUST', 'COME', 'FOR', 'YOU', 'AND', 'ANNIE', 'COLCHESTER', 'SHE', 'SAID', 'AND', 'AS', 'I', 'WAS', 'COMING', 'UPSTAIRS', 'I', 'THOUGHT', 'I', 'WOULD', 'LEAVE', 'THEM', 'WITH', 'YOU'] +8188-269288-0014-2895: hyp=['THESE', 'LETTERS', 'HAVE', 'JUST', 'COME', 'FOR', 'YOU', 'AND', 'ANY', 'COLCHESTER', 'SHE', 'SAID', 'AND', 'AS', 'I', 'WAS', 'COMING', 'UP', 'STAIRS', 'I', 'THOUGHT', 'I', 'WOULD', 'LEAVE', 'THEM', 'WITH', 'YOU'] +8188-269288-0015-2896: ref=['LESLIE', 'THANKED', 'HER', 'AND', 'EAGERLY', 'GRASPED', 'THE', 'LITTLE', 'PARCEL'] +8188-269288-0015-2896: hyp=['LIZLY', 'THANKED', 'HER', 'AND', 'EAGERLY', 'GRASPED', 'THE', 'LITTLE', 'PARCEL'] +8188-269288-0016-2897: ref=['HER', 'EYES', 'SHONE', 'WITH', 'PLEASURE', 'AT', 'THE', 'ANTICIPATION', 'OF', 'THE', 'DELIGHTFUL', 'TIME', 'SHE', 'WOULD', 'HAVE', 'REVELING', 'IN', 'THE', 'HOME', 'NEWS', 'THE', 'OTHER', 'LETTER', 'WAS', 'DIRECTED', 'TO', 'ANNIE', 'COLCHESTER'] +8188-269288-0016-2897: hyp=['HER', 'EYES', 'SHONE', 'WITH', 'PLEASURE', 'AT', 'THE', 'ANTICIPATION', 'OF', 'THE', 'DELIGHTFUL', 'TIME', 'SHE', 'WOULD', 'HAVE', 'REVELLING', 'IN', 'THE', 'HOME', 'NEWS', 'THE', 'OTHER', 'LETTER', 'WAS', 'DIRECTED', 'TO', 'ANY', 'COLCHESTER'] +8188-269288-0017-2898: ref=['HERE', 'IS', 'A', 'LETTER', 'FOR', 'YOU', 'ANNIE', 'CRIED', 'LESLIE'] +8188-269288-0017-2898: hyp=['HERE', 'IS', 'A', 'LETTER', 'FOR', 'YOU', 'ANNIE', 'CRIED', 'LESLIE'] +8188-269288-0018-2899: ref=['HER', 'FACE', 'GREW', 'SUDDENLY', 'WHITE', 'AS', 'DEATH', 'WHAT', 'IS', 'IT', 'DEAR'] +8188-269288-0018-2899: hyp=['HER', 'FACE', 'GREW', 'SUDDENLY', 'WHITE', 'AS', 'DEATH', 'WHAT', 'IS', 'IT', 'DEAR'] +8188-269288-0019-2900: ref=['I', 'HAVE', 'BEEN', 'STARVING', 'OR', 'RATHER', 'I', 'HAVE', 'BEEN', 'THIRSTING'] +8188-269288-0019-2900: hyp=['I', 'HAVE', 'BEEN', 'STARVING', 'OR', 'RATHER', 'I', 'HAVE', 'BEEN', 'THIRSTING'] +8188-269288-0020-2901: ref=['WELL', 'READ', 'IT', 'IN', 'PEACE', 'SAID', 'LESLIE', 'I', "WON'T", 'DISTURB', 'YOU'] +8188-269288-0020-2901: hyp=['WELL', 'READ', 'IT', 'IN', 'PEACE', 'SAID', 'LINSLEY', 'I', "WON'T", 'DISTURB', 'YOU'] +8188-269288-0021-2902: ref=['I', 'AM', 'TRULY', 'GLAD', 'IT', 'HAS', 'COME'] +8188-269288-0021-2902: hyp=['I', 'AM', 'TRULY', 'GLAD', 'IT', 'HAS', 'COME'] +8188-269288-0022-2903: ref=['LESLIE', 'SEATED', 'HERSELF', 'WITH', 'HER', 'BACK', 'TO', 'HER', 'COMPANION', 'AND', 'OPENED', 'HER', 'OWN', 'LETTERS'] +8188-269288-0022-2903: hyp=['LISALLY', 'SEATED', 'HERSELF', 'WITH', 'HER', 'BACK', 'TO', 'HER', 'COMPANION', 'AND', 'OPENED', 'HER', 'ON', 'LETTERS'] +8188-269288-0023-2904: ref=["DON'T", 'NOTICE', 'ME', 'REPLIED', 'ANNIE'] +8188-269288-0023-2904: hyp=["DON'T", 'NOTICE', 'ME', 'REPLIED', 'ANNIE'] +8188-269288-0024-2905: ref=['I', 'MUST', 'GO', 'INTO', 'THE', 'GROUNDS', 'THE', 'AIR', 'IS', 'STIFLING'] +8188-269288-0024-2905: hyp=['I', 'MUST', 'GO', 'INTO', 'THE', 'GROUNDS', 'THE', 'AIR', 'IS', 'STIFLING'] +8188-269288-0025-2906: ref=['BUT', 'THEY', 'ARE', 'JUST', 'SHUTTING', 'UP'] +8188-269288-0025-2906: hyp=['BUT', 'THEY', 'ARE', 'JUST', 'SHUTTING', 'UP'] +8188-269288-0026-2907: ref=['I', 'SHALL', 'GO', 'I', 'KNOW', 'A', 'WAY'] +8188-269288-0026-2907: hyp=['I', 'SHALL', 'GO', 'I', 'KNOW', 'A', 'WAY'] +8188-269288-0027-2908: ref=['JUST', 'AFTER', 'MIDNIGHT', 'SHE', 'ROSE', 'WITH', 'A', 'SIGH', 'TO', 'PREPARE', 'FOR', 'BED'] +8188-269288-0027-2908: hyp=['JUST', 'AFTER', 'MIDNIGHT', 'SHE', 'ROSE', 'WITH', 'A', 'SIGH', 'TO', 'PREPARE', 'FOR', 'BED'] +8188-269288-0028-2909: ref=['SHE', 'LOOKED', 'ROUND', 'THE', 'ROOM'] +8188-269288-0028-2909: hyp=['SHE', 'LOOKED', 'ROUND', 'THE', 'ROOM'] +8188-269288-0029-2910: ref=['NOW', 'I', 'REMEMBER', 'SHE', 'GOT', 'A', 'LETTER', 'WHICH', 'UPSET', 'HER', 'VERY', 'MUCH', 'AND', 'WENT', 'OUT'] +8188-269288-0029-2910: hyp=['NOW', 'I', 'REMEMBER', 'SHE', 'GOT', 'A', 'LETTER', 'WHICH', 'UPSET', 'HER', 'VERY', 'MUCH', 'AND', 'WENT', 'OUT'] +8188-269288-0030-2911: ref=['LESLIE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'FLUNG', 'IT', 'OPEN', 'SHE', 'PUT', 'HER', 'HEAD', 'OUT', 'AND', 'TRIED', 'TO', 'PEER', 'INTO', 'THE', 'DARKNESS', 'BUT', 'THE', 'MOON', 'HAD', 'ALREADY', 'SET', 'AND', 'SHE', 'COULD', 'NOT', 'SEE', 'MORE', 'THAN', 'A', 'COUPLE', 'OF', 'YARDS', 'IN', 'FRONT', 'OF', 'HER'] +8188-269288-0030-2911: hyp=['LIZLY', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'FLUNG', 'IT', 'OPEN', 'SHE', 'PUT', 'HER', 'HEAD', 'OUT', 'AND', 'TRIED', 'TO', 'PEER', 'INTO', 'THE', 'DARKNESS', 'BUT', 'THE', 'MOON', 'HAD', 'ALREADY', 'SET', 'AND', 'SHE', 'COULD', 'NOT', 'SEE', 'MORE', 'THAN', 'A', 'COUPLE', 'OF', 'YARDS', 'IN', 'FRONT', 'OF', 'HER'] +8188-269288-0031-2912: ref=['SHE', 'IS', 'A', 'VERY', 'QUEER', 'ERRATIC', 'CREATURE', 'AND', 'THAT', 'LETTER', 'THERE', 'WAS', 'BAD', 'NEWS', 'IN', 'THAT', 'LETTER'] +8188-269288-0031-2912: hyp=['SHE', 'IS', 'A', 'VERY', 'QUEER', 'ERRATIC', 'CREATURE', 'AND', 'THAT', 'LETTER', 'THERE', 'IS', 'BAD', 'NEWS', 'IN', 'THAT', 'LETTER'] +8188-269288-0032-2913: ref=['WHAT', 'CAN', 'SHE', 'BE', 'DOING', 'OUT', 'BY', 'HERSELF'] +8188-269288-0032-2913: hyp=['WHAT', "CAN'T", 'YOU', 'BE', 'DOING', 'OUT', 'BY', 'HERSELF'] +8188-269288-0033-2914: ref=['LESLIE', 'LEFT', 'THE', 'ROOM', 'BUT', 'SHE', 'HAD', 'SCARCELY', 'GONE', 'A', 'DOZEN', 'PACES', 'DOWN', 'THE', 'CORRIDOR', 'BEFORE', 'SHE', 'MET', 'ANNIE', 'RETURNING'] +8188-269288-0033-2914: hyp=['THIS', 'LILY', 'LIT', 'THE', 'ROOM', 'BUT', 'SHE', 'HAD', 'SCARCELY', 'GONE', 'A', 'DOZEN', 'PLACES', 'DOWN', 'THE', 'CORRIDOR', 'BEFORE', 'SHE', 'MET', 'ANY', 'RETURNING'] +8188-269288-0034-2915: ref=["ANNIE'S", 'EYES', 'WERE', 'VERY', 'BRIGHT', 'HER', 'CHEEKS', 'WERE', 'NO', 'LONGER', 'PALE', 'AND', 'THERE', 'WAS', 'A', 'BRILLIANT', 'COLOR', 'IN', 'THEM'] +8188-269288-0034-2915: hyp=['AND', 'HIS', 'EYES', 'WERE', 'VERY', 'BRIGHT', 'HER', 'CHEEKS', 'WERE', 'NO', 'LONGER', 'PALE', 'AND', 'THERE', 'WAS', 'A', 'BRILLIANT', 'COLOR', 'IN', 'THEM'] +8188-269288-0035-2916: ref=['SHE', 'DID', 'NOT', 'TAKE', 'THE', 'LEAST', 'NOTICE', 'OF', 'LESLIE', 'BUT', 'GOING', 'INTO', 'THE', 'ROOM', 'SHUT', 'THE', 'DOOR'] +8188-269288-0035-2916: hyp=['SHE', 'DID', 'NOT', 'TAKE', 'THE', 'LEAST', 'NOTICE', 'OF', 'LIZZLING', 'BUT', 'GOING', 'INTO', 'THE', 'ROOM', 'SHUT', 'THE', 'DOOR'] +8188-269288-0036-2917: ref=["DON'T", 'BEGIN', 'SAID', 'ANNIE'] +8188-269288-0036-2917: hyp=["DON'T", 'BEGIN', 'SAID', 'ANNIE'] +8188-269288-0037-2918: ref=["DON'T", 'BEGIN', 'WHAT', 'DO', 'YOU', 'MEAN'] +8188-269288-0037-2918: hyp=["DON'T", 'BEGIN', 'WHAT', 'DO', 'YOU', 'MEAN'] +8188-269288-0038-2919: ref=['I', 'MEAN', 'THAT', 'I', "DON'T", 'WANT', 'YOU', 'TO', 'BEGIN', 'TO', 'ASK', 'QUESTIONS'] +8188-269288-0038-2919: hyp=['I', 'MEAN', 'THAT', 'I', "DON'T", 'WANT', 'YOU', 'TO', 'BEGIN', 'TO', 'ASK', 'QUESTIONS'] +8188-269288-0039-2920: ref=['I', 'WALKED', 'UP', 'AND', 'DOWN', 'AS', 'FAST', 'AS', 'EVER', 'I', 'COULD', 'OUTSIDE', 'IN', 'ORDER', 'TO', 'MAKE', 'MYSELF', 'SLEEPY'] +8188-269288-0039-2920: hyp=['I', 'WALKED', 'UP', 'AND', 'DOWN', 'AS', 'FAST', 'AS', 'EVER', 'I', 'COULD', 'OUTSIDE', 'IN', 'ORDER', 'TO', 'MAKE', 'MYSELF', 'SLEEPY'] +8188-269288-0040-2921: ref=["DON'T", 'TALK', 'TO', 'ME', 'LESLIE', "DON'T", 'SAY', 'A', 'SINGLE', 'WORD'] +8188-269288-0040-2921: hyp=["DON'T", 'TALK', 'TO', 'ME', 'LESLIE', "DON'T", 'SAY', 'A', 'SINGLE', 'WORD'] +8188-269288-0041-2922: ref=['I', 'SHALL', 'GO', 'OFF', 'TO', 'SLEEP', 'THAT', 'IS', 'ALL', 'I', 'CARE', 'FOR'] +8188-269288-0041-2922: hyp=['I', 'SHALL', 'GO', 'OFF', 'TO', 'SLEEP', 'THAT', 'IS', 'ALL', 'I', 'CARE', 'FOR'] +8188-269288-0042-2923: ref=["DON'T", 'SAID', 'ANNIE'] +8188-269288-0042-2923: hyp=["DON'T", 'SAID', 'ANNIE'] +8188-269288-0043-2924: ref=['NOW', 'DRINK', 'THIS', 'AT', 'ONCE', 'SHE', 'SAID', 'IN', 'A', 'VOICE', 'OF', 'AUTHORITY', 'IF', 'YOU', 'REALLY', 'WISH', 'TO', 'SLEEP'] +8188-269288-0043-2924: hyp=['NOW', 'DRINK', 'THIS', 'AT', 'ONCE', 'SHE', 'SAID', 'IN', 'A', 'VOICE', 'OF', 'AUTHORITY', 'IF', 'YOU', 'REALLY', 'WISH', 'TO', 'SLEEP'] +8188-269288-0044-2925: ref=['ANNIE', 'STARED', 'VACANTLY', 'AT', 'THE', 'COCOA', 'THEN', 'SHE', 'UTTERED', 'A', 'LAUGH'] +8188-269288-0044-2925: hyp=['ANY', 'STEERED', 'VACANTLY', 'AT', 'THE', 'COOKER', "DIDN'T", 'A', 'LAUGH'] +8188-269288-0045-2926: ref=['DRINK', 'THAT', 'SHE', 'SAID'] +8188-269288-0045-2926: hyp=['DRINK', 'THAT', 'SHE', 'SAID'] +8188-269288-0046-2927: ref=['DO', 'YOU', 'WANT', 'TO', 'KILL', 'ME', "DON'T", 'TALK', 'ANY', 'MORE'] +8188-269288-0046-2927: hyp=['DO', 'YOU', 'WANT', 'TO', 'KILL', 'ME', "DON'T", 'TALK', 'ANY', 'MORE'] +8188-269288-0047-2928: ref=['I', 'AM', 'SLEEPY', 'I', 'SHALL', 'SLEEP'] +8188-269288-0047-2928: hyp=['I', 'AMY', 'I', 'SHALL', 'SLEEP'] +8188-269288-0048-2929: ref=['SHE', 'GOT', 'INTO', 'BED', 'AS', 'SHE', 'SPOKE', 'AND', 'WRAPPED', 'THE', 'CLOTHES', 'TIGHTLY', 'ROUND', 'HER'] +8188-269288-0048-2929: hyp=['SHE', 'GOT', 'INTO', 'BED', 'AS', 'SHE', 'SPOKE', 'AND', 'WRAPPED', 'THE', 'CLOTHES', 'TIGHTLY', 'ROUND', 'HER'] +8188-269288-0049-2930: ref=["CAN'T", 'YOU', 'MANAGE', 'WITH', 'A', 'CANDLE', 'JUST', 'FOR', 'ONCE'] +8188-269288-0049-2930: hyp=['COULD', 'YOU', 'MANAGE', 'WITH', 'A', 'CANDLE', 'JUST', 'FOR', 'ONCE'] +8188-269288-0050-2931: ref=['CERTAINLY', 'SAID', 'LESLIE'] +8188-269288-0050-2931: hyp=['CERTAINLY', 'SAID', 'IT', 'EASILY'] +8188-269288-0051-2932: ref=['SHE', 'TURNED', 'OFF', 'THE', 'LIGHT', 'AND', 'LIT', 'A', 'CANDLE', 'WHICH', 'SHE', 'PUT', 'BEHIND', 'HER', 'SCREEN', 'THEN', 'PREPARED', 'TO', 'GET', 'INTO', 'BED'] +8188-269288-0051-2932: hyp=['SHE', 'TURNED', 'OFF', 'THE', 'LIGHT', 'AND', 'LIT', 'A', 'CANDLE', 'WHICH', 'HE', 'PUT', 'BEHIND', 'HER', 'SCREEN', 'THEN', 'PREPARED', 'TO', 'GET', 'INTO', 'BED'] +8188-269288-0052-2933: ref=["ANNIE'S", 'MANNER', 'WAS', 'VERY', 'MYSTERIOUS'] +8188-269288-0052-2933: hyp=['ANY', 'MANNER', 'WAS', 'VERY', 'MYSTERIOUS'] +8188-269288-0053-2934: ref=['ANNIE', 'DID', 'NOT', 'MEAN', 'TO', 'CONFIDE', 'IN', 'ANYONE', 'THAT', 'NIGHT', 'AND', 'THE', 'KINDEST', 'THING', 'WAS', 'TO', 'LEAVE', 'HER', 'ALONE'] +8188-269288-0053-2934: hyp=['AND', 'HE', 'DID', 'NOT', 'MEAN', 'TO', 'CONFINE', 'IN', 'ANY', 'ONE', 'THAT', 'NIGHT', 'AND', 'THE', 'KINDEST', 'THING', 'WAS', 'TO', 'LEAVE', 'HER', 'ALONE'] +8188-269288-0054-2935: ref=['TIRED', 'OUT', 'LESLIE', 'HERSELF', 'DROPPED', 'ASLEEP'] +8188-269288-0054-2935: hyp=['TIE', 'IT', 'OUT', 'LIZZLY', 'HERSELF', 'DROPPED', 'ASLEEP'] +8188-269288-0055-2936: ref=['ANNIE', 'IS', 'THAT', 'YOU', 'SHE', 'CALLED', 'OUT'] +8188-269288-0055-2936: hyp=['ANNIE', 'IS', 'THAT', 'YOU', 'SHE', 'CALLED', 'OUT'] +8188-269288-0056-2937: ref=['THERE', 'WAS', 'NO', 'REPLY', 'BUT', 'THE', 'SOUND', 'OF', 'HURRYING', 'STEPS', 'CAME', 'QUICKER', 'AND', 'QUICKER', 'NOW', 'AND', 'THEN', 'THEY', 'WERE', 'INTERRUPTED', 'BY', 'A', 'GROAN'] +8188-269288-0056-2937: hyp=['THERE', 'WAS', 'NO', 'REPLY', 'BUT', 'THE', 'SOUND', 'OF', 'HURRYING', 'STEPS', 'CAME', 'QUICKER', 'AND', 'QUICKER', 'NOW', 'AND', 'THEN', 'THEIR', 'INTERRUPTED', 'BY', 'A', 'GROAN'] +8188-269288-0057-2938: ref=['OH', 'THIS', 'WILL', 'KILL', 'ME', 'MY', 'HEART', 'WILL', 'BREAK', 'THIS', 'WILL', 'KILL', 'ME'] +8188-269288-0057-2938: hyp=['OH', 'THIS', 'WILL', 'KILL', 'ME', 'MY', 'HEART', 'WILL', 'BREAK', 'THIS', 'WILL', 'KILL', 'ME'] +8188-269290-0000-2823: ref=['THE', 'GUILD', 'OF', 'SAINT', 'ELIZABETH'] +8188-269290-0000-2823: hyp=['THE', 'GULD', 'OF', 'SAINT', 'ELIZABETH'] +8188-269290-0001-2824: ref=['IMMEDIATELY', 'AFTER', 'DINNER', 'THAT', 'EVENING', 'LESLIE', 'RAN', 'UP', 'TO', 'HER', 'ROOM', 'TO', 'MAKE', 'PREPARATIONS', 'FOR', 'HER', 'VISIT', 'TO', 'EAST', 'HALL'] +8188-269290-0001-2824: hyp=['IMMEDIATELY', 'AFTER', 'DINNER', 'THAT', 'EVENING', 'LESLIE', 'RAN', 'UP', 'TO', 'HER', 'ROOM', 'TO', 'MAKE', 'PREPARATIONS', 'FOR', 'HER', 'VISIT', 'TO', 'EAST', 'HALL'] +8188-269290-0002-2825: ref=["I'M", 'NOT', 'COMING', 'SAID', 'ANNIE'] +8188-269290-0002-2825: hyp=["I'M", 'NOT', 'COMING', 'SAID', 'ANNIE'] +8188-269290-0003-2826: ref=['EVERY', 'STUDENT', 'IS', 'TO', 'BE', 'IN', 'EAST', 'HALL', 'AT', 'HALF', 'PAST', 'EIGHT'] +8188-269290-0003-2826: hyp=['EVERY', 'STUDENT', 'IS', 'TO', 'BE', 'AN', 'EAST', 'HALL', 'AT', 'HALF', 'PAST', 'EIGHT'] +8188-269290-0004-2827: ref=['IT', "DOESN'T", 'MATTER', 'REPLIED', 'ANNIE', 'WHETHER', 'IT', 'IS', 'AN', 'ORDER', 'OR', 'NOT', "I'M", 'NOT', 'COMING', 'SAY', 'NOTHING', 'ABOUT', 'ME', 'PLEASE'] +8188-269290-0004-2827: hyp=['IT', "DOESN'T", 'MATTER', 'REPLIED', 'ANNIE', 'WHETHER', 'IT', 'IS', 'AN', 'ORDER', 'OR', 'NOT', "I'M", 'NOT', 'COMING', 'SAY', 'NOTHING', 'ABOUT', 'ME', 'PLEASE'] +8188-269290-0005-2828: ref=['IT', 'BURNED', 'AS', 'IF', 'WITH', 'FEVER'] +8188-269290-0005-2828: hyp=['IT', 'BURNED', 'AS', 'IF', 'WITH', 'FEVER'] +8188-269290-0006-2829: ref=['YOU', "DON'T", 'KNOW', 'WHAT', 'A', 'TRIAL', 'IT', 'IS', 'FOR', 'ME', 'TO', 'HAVE', 'YOU', 'HERE'] +8188-269290-0006-2829: hyp=['YOU', "DON'T", 'KNOW', 'WHAT', 'A', 'TRIAL', 'IT', 'IS', 'FOR', 'ME', 'TO', 'HAVE', 'YOU', 'HERE'] +8188-269290-0007-2830: ref=['I', 'WANT', 'TO', 'BE', 'ALONE', 'GO'] +8188-269290-0007-2830: hyp=['I', 'WANT', 'TO', 'BE', 'ALONE', 'GO'] +8188-269290-0008-2831: ref=['I', 'KNOW', 'YOU', "DON'T", 'QUITE', 'MEAN', 'WHAT', 'YOU', 'SAY', 'SAID', 'LESLIE', 'BUT', 'OF', 'COURSE', 'IF', 'YOU', 'REALLY', 'WISH', 'ME'] +8188-269290-0008-2831: hyp=['I', 'KNOW', 'YOU', "DON'T", 'QUITE', 'MEAN', 'WHAT', 'YOU', 'SAY', 'SAID', 'LESLIE', 'BUT', 'OF', 'COURSE', 'IF', 'YOU', 'REALLY', 'WISH', 'ME'] +8188-269290-0009-2832: ref=['YOU', 'FRET', 'ME', 'BEYOND', 'ENDURANCE'] +8188-269290-0009-2832: hyp=['YOU', 'FRITTEN', 'ME', 'BEYOND', 'ENDURANCE'] +8188-269290-0010-2833: ref=['WRAPPING', 'A', 'PRETTY', 'BLUE', 'SHAWL', 'ROUND', 'HER', 'HEAD', 'AND', 'SHOULDERS', 'SHE', 'TURNED', 'TO', 'ANNIE'] +8188-269290-0010-2833: hyp=['WRAPPING', 'A', 'PRETTY', 'BLUE', 'SHAWL', 'AROUND', 'A', 'HIDDEN', 'SHOULDERS', 'SHE', 'TURNED', 'TO', 'ANNIE'] +8188-269290-0011-2834: ref=['LESLIE', 'WAS', 'JUST', 'CLOSING', 'THE', 'DOOR', 'BEHIND', 'HER', 'WHEN', 'ANNIE', 'CALLED', 'AFTER', 'HER'] +8188-269290-0011-2834: hyp=['LESLIE', 'WAS', 'JUST', 'CLOSING', 'THE', 'DOOR', 'BEHIND', 'HER', 'WHEN', 'ANY', 'CALLED', 'AFTER', 'HER'] +8188-269290-0012-2835: ref=['I', 'TOOK', 'IT', 'OUT', 'SAID', 'LESLIE', 'TOOK', 'IT', 'OUT'] +8188-269290-0012-2835: hyp=['I', 'TOOK', 'IT', 'OUT', 'SAID', 'LIZLY', 'TOOK', 'IT', 'OUT'] +8188-269290-0013-2836: ref=['HAVE', 'THE', 'GOODNESS', 'TO', 'FIND', 'IT', 'AND', 'PUT', 'IT', 'BACK'] +8188-269290-0013-2836: hyp=['HAVE', 'THE', 'GOODNESS', 'TO', 'FIND', 'IT', 'AND', 'PUT', 'IT', 'BACK'] +8188-269290-0014-2837: ref=['BUT', "DON'T", 'LOCK', 'ME', 'OUT', 'PLEASE', 'ANNIE'] +8188-269290-0014-2837: hyp=['BUT', "DON'T", 'LOCK', 'ME', 'OUT', 'PLEASE', 'ANY'] +8188-269290-0015-2838: ref=['OH', 'I', "WON'T", 'LOCK', 'YOU', 'OUT', 'SHE', 'SAID', 'BUT', 'I', 'MUST', 'HAVE', 'THE', 'KEY'] +8188-269290-0015-2838: hyp=['OH', 'I', "WON'T", 'LOOK', 'YOU', 'OUT', 'SHE', 'SAID', 'BUT', 'I', 'MUST', 'HAVE', 'THE', 'KEY'] +8188-269290-0016-2839: ref=['JANE', "HERIOT'S", 'VOICE', 'WAS', 'HEARD', 'IN', 'THE', 'PASSAGE'] +8188-269290-0016-2839: hyp=['JANE', "HERETT'S", 'VOICE', 'WAS', 'HEARD', 'IN', 'THE', 'PASSAGE'] +8188-269290-0017-2840: ref=['AS', 'SHE', 'WALKED', 'DOWN', 'THE', 'CORRIDOR', 'SHE', 'HEARD', 'IT', 'BEING', 'TURNED', 'IN', 'THE', 'LOCK'] +8188-269290-0017-2840: hyp=['AS', 'SHE', 'WALKED', 'ROUND', 'THE', 'CORRIDOR', 'SHE', 'HEARD', 'IT', 'BEING', 'TURNED', 'TO', 'THE', 'LOCK'] +8188-269290-0018-2841: ref=['WHAT', 'CAN', 'THIS', 'MEAN', 'SHE', 'SAID', 'TO', 'HERSELF'] +8188-269290-0018-2841: hyp=['WHAT', 'CAN', 'THIS', 'MEAN', 'SHE', 'SAID', 'TO', 'HERSELF'] +8188-269290-0019-2842: ref=['OH', 'I', "WON'T", 'PRESS', 'YOU', 'REPLIED', 'JANE'] +8188-269290-0019-2842: hyp=['OH', 'I', 'WOULD', 'PRESS', 'YOU', 'REPLIED', 'JANE'] +8188-269290-0020-2843: ref=['OH', 'I', 'SHALL', 'NEVER', 'DO', 'THAT', 'REPLIED', 'LESLIE'] +8188-269290-0020-2843: hyp=['OH', 'I', 'SHALL', 'NEVER', 'DO', 'THAT', 'REPLIED', 'LESLIE'] +8188-269290-0021-2844: ref=['YOU', 'SEE', 'ALL', 'THE', 'GIRLS', 'EXCEPT', 'EILEEN', 'AND', 'MARJORIE', 'LAUGH', 'AT', 'HER', 'AND', 'THAT', 'SEEMS', 'TO', 'ME', 'TO', 'MAKE', 'HER', 'WORSE'] +8188-269290-0021-2844: hyp=['YOU', 'SEE', 'ALL', 'THE', 'GIRLS', 'EXCEPT', 'AILEEN', 'AND', 'MARJORIE', 'LAUGH', 'AT', 'HER', 'AND', 'THAT', 'SEEMS', 'TO', 'ME', 'TO', 'MAKE', 'HER', 'WORSE'] +8188-269290-0022-2845: ref=['SOME', 'DAY', 'JANE', 'YOU', 'MUST', 'SEE', 'HER'] +8188-269290-0022-2845: hyp=['SOME', 'DAY', 'JANE', 'YOU', 'MUST', 'SEE', 'HER'] +8188-269290-0023-2846: ref=['IF', 'YOU', 'ARE', 'IN', 'LONDON', 'DURING', 'THE', 'SUMMER', 'YOU', 'MUST', 'COME', 'AND', 'PAY', 'US', 'A', 'VISIT', 'WILL', 'YOU'] +8188-269290-0023-2846: hyp=['IF', 'YOU', 'IN', 'LONDON', 'DURING', 'THE', 'SUMMER', 'YOU', 'MUST', 'COME', 'AND', 'PASS', 'A', 'VISIT', 'WILL', 'YOU'] +8188-269290-0024-2847: ref=['THAT', 'IS', 'IF', 'YOU', 'CARE', 'TO', 'CONFIDE', 'IN', 'ME'] +8188-269290-0024-2847: hyp=['THAT', 'IS', 'IF', 'YOU', 'CARE', 'TO', 'CONFIDE', 'IN', 'ME'] +8188-269290-0025-2848: ref=['I', 'BELIEVE', 'POOR', 'ANNIE', 'IS', 'DREADFULLY', 'UNHAPPY'] +8188-269290-0025-2848: hyp=['I', 'BELIEVE', 'POOR', 'ANNIE', 'IS', 'DREADFULLY', 'UNHAPPY'] +8188-269290-0026-2849: ref=["THAT'S", 'JUST', 'IT', 'JANE', 'THAT', 'IS', 'WHAT', 'FRIGHTENS', 'ME', 'SHE', 'REFUSES', 'TO', 'COME'] +8188-269290-0026-2849: hyp=["THAT'S", 'JUST', 'A', 'CHAIN', 'THAT', 'IS', 'WHAT', 'BRIGHTENS', 'ME', 'SHE', 'REFUSES', 'TO', 'COME'] +8188-269290-0027-2850: ref=['REFUSES', 'TO', 'COME', 'SHE', 'CRIED'] +8188-269290-0027-2850: hyp=['REFUSES', 'TO', 'COME', 'SHE', 'CRI'] +8188-269290-0028-2851: ref=['SHE', 'WILL', 'GET', 'INTO', 'AN', 'AWFUL', 'SCRAPE'] +8188-269290-0028-2851: hyp=["SHE'LL", 'GET', 'IN', 'AN', 'AWFUL', 'SCRAPE'] +8188-269290-0029-2852: ref=['I', 'AM', 'SURE', 'SHE', 'IS', 'ILL', 'SHE', 'WORKS', 'TOO', 'HARD', 'AND', 'SHE', 'BUT', 'THERE', 'I', "DON'T", 'KNOW', 'THAT', 'I', 'OUGHT', 'TO', 'SAY', 'ANY', 'MORE'] +8188-269290-0029-2852: hyp=['I', 'AM', 'SURE', 'SHE', 'IS', 'ILL', 'SHE', 'WORKS', 'TOO', 'HARD', 'AND', 'SHE', 'BUT', 'THERE', 'I', "DON'T", 'KNOW', 'THAT', 'I', 'OUGHT', 'TO', 'SAY', 'ANY', 'MORE'] +8188-269290-0030-2853: ref=["I'LL", 'WAIT', 'FOR', 'YOU', 'HERE', 'SAID', 'LESLIE'] +8188-269290-0030-2853: hyp=["I'LL", 'WAIT', 'FOR', 'YOU', 'HERE', 'SAID', 'LESLIE'] +8188-269290-0031-2854: ref=['DO', 'COME', 'ANNIE', 'DO'] +8188-269290-0031-2854: hyp=['DO', 'COME', 'ANY', 'DO'] +8188-269290-0032-2855: ref=['SCARCELY', 'LIKELY', 'REPLIED', 'LESLIE', 'SHE', 'TOLD', 'ME', 'SHE', 'WAS', 'DETERMINED', 'NOT', 'TO', 'COME', 'TO', 'THE', 'MEETING'] +8188-269290-0032-2855: hyp=['SCARCELY', 'LIKELY', 'REPLIED', 'LESLIE', 'SHE', 'TOLD', 'ME', 'SHE', 'WAS', 'DETERMINED', 'NOT', 'TO', 'COME', 'TO', 'THE', 'MEETING'] +8188-269290-0033-2856: ref=['BUT', 'MARJORIE', 'AND', 'EILEEN', 'HAD', 'ALREADY', 'DEPARTED', 'AND', 'LESLIE', 'AND', 'JANE', 'FOUND', 'THEMSELVES', 'AMONG', 'THE', 'LAST', 'STUDENTS', 'TO', 'ARRIVE', 'AT', 'THE', 'GREAT', 'EAST', 'HALL'] +8188-269290-0033-2856: hyp=['BUT', 'MARGERY', 'AND', 'AILEEN', 'HAD', 'ALREADY', 'DEPARTED', 'AND', 'LESLIE', 'AND', 'JANE', 'FOUND', 'THEMSELVES', 'AMONG', 'THE', 'LAST', 'STUDENTS', 'TO', 'ARRIVE', 'AT', 'THE', 'GREAT', 'EAST', 'HALL'] +8188-269290-0034-2857: ref=['MISS', 'LAUDERDALE', 'WAS', 'STANDING', 'WITH', 'THE', 'OTHER', 'TUTORS', 'AND', 'PRINCIPALS', 'OF', 'THE', 'DIFFERENT', 'HALLS', 'ON', 'A', 'RAISED', 'PLATFORM'] +8188-269290-0034-2857: hyp=['MISS', 'LAUDIDAL', 'WAS', 'STANDING', 'WITH', 'THE', 'OTHER', 'TUTORS', 'AND', 'PRINCIPLES', 'OF', 'THE', 'DIFFERENT', 'HALLS', 'ON', 'A', 'RAISED', 'PLATFORM'] +8188-269290-0035-2858: ref=['THEN', 'A', 'ROLL', 'CALL', 'WAS', 'GONE', 'THROUGH', 'BY', 'ONE', 'OF', 'THE', 'TUTORS', 'THE', 'ONLY', 'ABSENTEE', 'WAS', 'ANNIE', 'COLCHESTER'] +8188-269290-0035-2858: hyp=['THEN', 'A', 'ROCCALL', 'WAS', 'GONE', 'THROUGH', 'BY', 'ONE', 'OF', 'THE', 'TUTORS', 'THE', 'ONLY', 'ABSENTE', 'WAS', 'ANY', 'COLCHESTER'] +8188-269290-0036-2859: ref=['THE', 'PHYSICAL', 'PART', 'OF', 'YOUR', 'TRAINING', 'AND', 'ALSO', 'THE', 'MENTAL', 'PART', 'ARE', 'ABUNDANTLY', 'SUPPLIED', 'IN', 'THIS', 'GREAT', 'HOUSE', 'OF', 'LEARNING', 'SHE', 'CONTINUED', 'BUT', 'THE', 'SPIRITUAL', 'PART', 'IT', 'SEEMS', 'TO', 'ME', 'OUGHT', 'NOW', 'TO', 'BE', 'STRENGTHENED'] +8188-269290-0036-2859: hyp=['THE', 'PHYSICAL', 'PART', 'OF', 'THE', 'OLD', 'TRAINING', 'AND', 'ALSO', 'THE', 'MENTAL', 'PART', 'ARE', 'ABUNDANTLY', 'SUPPLIED', 'IN', 'THIS', 'GREAT', 'HOUSE', 'OF', 'LEARNING', 'SHE', 'CONTINUED', 'BUT', 'THE', 'SPIRITUAL', 'PART', 'IT', 'SEEMS', 'TO', 'ME', 'OUGHT', 'NOW', 'TO', 'BE', 'STRENGTHENED'] +8188-269290-0037-2860: ref=['HEAR', 'HEAR', 'AND', 'ONCE', 'AGAIN', 'HEAR'] +8188-269290-0037-2860: hyp=['HAIR', 'HAIR', 'AND', 'ONCE', 'AGAIN', 'HAIR'] +8188-269290-0038-2861: ref=['SHE', 'UTTERED', 'HER', 'STRANGE', 'REMARK', 'STANDING', 'UP'] +8188-269290-0038-2861: hyp=['SHE', 'UTTERED', 'A', 'STRAIN', 'REMARK', 'STANDING', 'UP'] +8188-269290-0039-2862: ref=['MARJORIE', 'AND', 'EILEEN', 'WERE', 'CLOSE', 'TO', 'HER'] +8188-269290-0039-2862: hyp=['MARGERY', 'AND', 'AILEEN', 'WERE', 'CLOSE', 'TO', 'HER'] +8188-269290-0040-2863: ref=['I', 'WILL', 'TALK', 'WITH', 'YOU', 'BELLE', 'ACHESON', 'PRESENTLY', 'SHE', 'SAID'] +8188-269290-0040-2863: hyp=['I', 'WILL', 'TALK', 'WITH', 'YOU', 'BELL', 'ARCHISON', 'PRESENTLY', 'SHE', 'SAID'] +8188-269290-0041-2864: ref=['THE', 'NAMES', 'OF', 'PROPOSED', 'MEMBERS', 'ARE', 'TO', 'BE', 'SUBMITTED', 'TO', 'ME', 'BEFORE', 'THIS', 'DAY', 'WEEK'] +8188-269290-0041-2864: hyp=['THE', 'NAMES', 'OF', 'THE', 'PROPOSED', 'MEMBERS', 'ARE', 'TO', 'BE', 'SUBMITTED', 'TO', 'ME', 'BEFORE', 'THIS', 'DAY', 'WEEK'] +8188-269290-0042-2865: ref=['AM', 'I', 'MY', "BROTHER'S", 'KEEPER'] +8188-269290-0042-2865: hyp=['AM', 'I', 'MY', "BROTHER'S", 'KEEPER'] +8188-269290-0043-2866: ref=['YOU', 'ASK', 'SHE', 'CONTINUED'] +8188-269290-0043-2866: hyp=['YOU', 'ASK', 'SHE', 'CONTINUED'] +8188-269290-0044-2867: ref=['GOD', 'ANSWERS', 'TO', 'EACH', 'OF', 'YOU', 'YOU', 'ARE'] +8188-269290-0044-2867: hyp=['GOD', 'ADDEST', 'EACH', 'OF', 'YOU', 'YOU', 'ARE'] +8188-269290-0045-2868: ref=['THE', 'WORLD', 'SAYS', 'NO', 'I', 'AM', 'NOT', 'BUT', 'GOD', 'SAYS', 'YES', 'YOU', 'ARE'] +8188-269290-0045-2868: hyp=['THE', 'WORLD', 'TEETH', 'NO', 'I', 'AM', 'NOT', 'BUT', 'GOD', 'SAYS', 'YES', 'YOU', 'ARE'] +8188-269290-0046-2869: ref=['ALL', 'MEN', 'ARE', 'YOUR', 'BROTHERS'] +8188-269290-0046-2869: hyp=['ALL', 'MEN', 'ARE', 'YOUR', 'BROTHERS'] +8188-269290-0047-2870: ref=['FOR', 'ALL', 'WHO', 'SIN', 'ALL', 'WHO', 'SUFFER', 'YOU', 'ARE', 'TO', 'A', 'CERTAIN', 'EXTENT', 'RESPONSIBLE'] +8188-269290-0047-2870: hyp=['FOR', 'ALL', 'WHO', 'SIN', 'ALL', 'WHO', 'SUFFER', 'YOU', 'ARE', 'TO', 'EXERT', 'AN', 'EXTENT', 'RESPONSIBLE'] +8188-269290-0048-2871: ref=['AFTER', 'THE', 'ADDRESS', 'THE', 'GIRLS', 'THEMSELVES', 'WERE', 'ENCOURAGED', 'TO', 'SPEAK', 'AND', 'A', 'VERY', 'ANIMATED', 'DISCUSSION', 'FOLLOWED'] +8188-269290-0048-2871: hyp=['AFTER', 'THE', 'ADDRESS', 'THE', 'GIRLS', 'THEMSELVES', 'WERE', 'ENCOURAGED', 'TO', 'SPEAK', 'AND', 'A', 'VERY', 'ANIMATED', 'DISCUSSION', 'FOLLOWED'] +8188-269290-0049-2872: ref=['IT', 'WAS', 'PAST', 'TEN', "O'CLOCK", 'WHEN', 'SHE', 'LEFT', 'THE', 'HALL'] +8188-269290-0049-2872: hyp=['IT', 'WAS', 'PAST', 'TEN', "O'CLOCK", 'WHEN', 'SHE', 'LEFT', 'THE', 'HALL'] +8188-269290-0050-2873: ref=['JUST', 'AS', 'SHE', 'WAS', 'DOING', 'SO', 'MISS', 'FRERE', 'CAME', 'UP'] +8188-269290-0050-2873: hyp=['JUST', 'AS', 'SHE', 'WAS', 'DOING', 'SO', 'MISS', 'FRERE', 'CAME', 'UP'] +8188-269290-0051-2874: ref=['ANNIE', 'COLCHESTER', 'IS', 'YOUR', 'ROOMFELLOW', 'IS', 'SHE', 'NOT', 'SHE', 'SAID'] +8188-269290-0051-2874: hyp=['ANY', 'COLCHESTER', 'AS', 'YOUR', 'ROOM', 'FELLOW', 'IS', 'SHE', 'NOT', 'SHE', 'SAID'] +8188-269290-0052-2875: ref=['I', 'SEE', 'BY', 'YOUR', 'FACE', 'MISS', 'GILROY', 'THAT', 'YOU', 'ARE', 'DISTRESSED', 'ABOUT', 'SOMETHING', 'ARE', 'YOU', 'KEEPING', 'ANYTHING', 'BACK'] +8188-269290-0052-2875: hyp=['I', 'SEE', 'BY', 'YOUR', 'FACE', 'MISS', 'GILROY', 'THAT', 'YOU', 'ARE', 'DISTRESSED', 'ABOUT', 'SOMETHING', 'ARE', 'YOU', 'KEEPING', 'ANYTHING', 'BACK'] +8188-269290-0053-2876: ref=['I', 'AM', 'AFRAID', 'I', 'AM', 'REPLIED', 'LESLIE', 'DISTRESS', 'NOW', 'IN', 'HER', 'TONE'] +8188-269290-0053-2876: hyp=["I'M", 'AFRAID', 'I', 'AM', 'REPLIED', 'LIZZIE', 'DISTRESSED', 'NOW', 'IN', 'HER', 'TONE'] +8188-269290-0054-2877: ref=['I', 'MUST', 'SEE', 'HER', 'MYSELF', 'EARLY', 'IN', 'THE', 'MORNING', 'AND', 'I', 'AM', 'QUITE', 'SURE', 'THAT', 'NOTHING', 'WILL', 'SATISFY', 'MISS', 'LAUDERDALE', 'EXCEPT', 'A', 'VERY', 'AMPLE', 'APOLOGY', 'AND', 'A', 'FULL', 'EXPLANATION', 'OF', 'THE', 'REASON', 'WHY', 'SHE', 'ABSENTED', 'HERSELF'] +8188-269290-0054-2877: hyp=['I', 'MUST', 'SEE', 'HER', 'MYSELF', 'EARLY', 'IN', 'THE', 'MORNING', 'AND', 'I', 'AM', 'QUITE', 'SURE', 'THAT', 'NOTHING', 'WILL', 'SATISFY', 'MISS', 'LAURAIL', 'EXCEPT', 'A', 'VERY', 'AMPLE', 'APOLOGY', 'AND', 'A', 'FULL', 'EXPLANATION', 'OF', 'THE', 'REASON', 'WHY', 'SHE', 'ABSENTED', 'HERSELF'] +8188-269290-0055-2878: ref=['EXCUSES', 'MAKE', 'NO', 'DIFFERENCE'] +8188-269290-0055-2878: hyp=['EXCUSES', 'MAKE', 'NO', 'DIFFERENCE'] +8188-269290-0056-2879: ref=['THE', 'GIRL', 'WHO', 'BREAKS', 'THE', 'RULES', 'HAS', 'TO', 'BE', 'PUNISHED'] +8188-269290-0056-2879: hyp=['THE', 'GIRL', 'WHO', 'BREAKS', 'THE', 'RULES', 'HAVE', 'TO', 'BE', 'PUNISHED'] +8188-269290-0057-2880: ref=['I', 'WILL', 'TELL', 'HER'] +8188-269290-0057-2880: hyp=['I', 'WILL', 'TELL', 'HER'] +8188-274364-0000-2811: ref=['THE', 'COMMONS', 'ALSO', 'VOTED', 'THAT', 'THE', 'NEW', 'CREATED', 'PEERS', 'OUGHT', 'TO', 'HAVE', 'NO', 'VOICE', 'IN', 'THIS', 'TRIAL', 'BECAUSE', 'THE', 'ACCUSATION', 'BEING', 'AGREED', 'TO', 'WHILE', 'THEY', 'WERE', 'COMMONERS', 'THEIR', 'CONSENT', 'TO', 'IT', 'WAS', 'IMPLIED', 'WITH', 'THAT', 'OF', 'ALL', 'THE', 'COMMONS', 'OF', 'ENGLAND'] +8188-274364-0000-2811: hyp=['THE', 'COMMONS', 'ALSO', 'VOTED', 'THAT', 'THE', 'NEW', 'CREATED', 'PEERS', 'OUGHT', 'TO', 'HAVE', 'NO', 'VOICE', 'IN', 'THIS', 'TRIAL', 'BECAUSE', 'THE', 'ACCUSATION', 'BEING', 'AGREED', 'TO', 'WHILE', 'THEY', 'WERE', 'COMMONERS', 'THEY', 'CONSENT', 'TO', 'IT', 'WAS', 'IMPLIED', 'WITH', 'THAT', 'OF', 'ALL', 'THE', 'COMMONS', 'OF', 'ENGLAND'] +8188-274364-0001-2812: ref=['IN', 'THE', 'GOVERNMENT', 'OF', 'IRELAND', 'HIS', 'ADMINISTRATION', 'HAD', 'BEEN', 'EQUALLY', 'PROMOTIVE', 'OF', 'HIS', "MASTER'S", 'INTEREST', 'AND', 'THAT', 'OF', 'THE', 'SUBJECTS', 'COMMITTED', 'TO', 'HIS', 'CARE'] +8188-274364-0001-2812: hyp=['IN', 'THE', 'GOVERNMENT', 'OF', 'IRELAND', 'HIS', 'ADMINISTRATION', 'HAD', 'BEEN', 'EQUALLY', 'PROMOTED', 'OF', 'HIS', "MASTER'S", 'INTEREST', 'AND', 'THAT', 'OF', 'THE', 'SUBJECTS', 'COMMITTED', 'TO', 'HIS', 'CARE'] +8188-274364-0002-2813: ref=['THE', 'CASE', 'OF', 'LORD', 'MOUNTNORRIS', 'OF', 'ALL', 'THOSE', 'WHICH', 'WERE', 'COLLECTED', 'WITH', 'SO', 'MUCH', 'INDUSTRY', 'IS', 'THE', 'MOST', 'FLAGRANT', 'AND', 'THE', 'LEAST', 'EXCUSABLE'] +8188-274364-0002-2813: hyp=['THE', 'CASE', 'OF', 'LORD', 'MONTORAS', 'OF', 'ALL', 'THOSE', 'WHICH', 'WERE', 'COLLECTED', 'WITH', 'SO', 'MUCH', 'INDUSTRY', 'IS', 'THE', 'MOST', 'FLAGRANT', 'AND', 'THE', 'LEAST', 'EXCUSABLE'] +8188-274364-0003-2814: ref=['THE', 'COURT', 'WHICH', 'CONSISTED', 'OF', 'THE', 'CHIEF', 'OFFICERS', 'OF', 'THE', 'ARMY', 'FOUND', 'THE', 'CRIME', 'TO', 'BE', 'CAPITAL', 'AND', 'CONDEMNED', 'THAT', 'NOBLEMAN', 'TO', 'LOSE', 'HIS', 'HEAD'] +8188-274364-0003-2814: hyp=['THE', 'COURT', 'WHICH', 'CONSISTED', 'OF', 'THE', 'CHIE', 'OFFICIALS', 'OF', 'THE', 'ARMY', 'FOUND', 'THE', 'CRIME', 'TO', 'BE', 'CAPT', 'ON', 'AND', 'CONDEMNED', 'THAT', 'NOBLEMAN', 'TO', 'LOSE', 'HIS', 'HEAD'] +8188-274364-0004-2815: ref=['WHERE', 'THE', 'TOKEN', 'BY', 'WHICH', 'I', 'SHOULD', 'DISCOVER', 'IT'] +8188-274364-0004-2815: hyp=['WHERE', 'THE', 'TOKEN', 'BY', 'WHICH', 'I', 'SHALL', 'DISCOVER', 'IT'] +8188-274364-0005-2816: ref=['IT', 'IS', 'NOW', 'FULL', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'YEARS', 'SINCE', 'TREASONS', 'WERE', 'DEFINED', 'AND', 'SO', 'LONG', 'HAS', 'IT', 'BEEN', 'SINCE', 'ANY', 'MAN', 'WAS', 'TOUCHED', 'TO', 'THIS', 'EXTENT', 'UPON', 'THIS', 'CRIME', 'BEFORE', 'MYSELF'] +8188-274364-0005-2816: hyp=['IT', 'IS', 'NOW', 'A', 'FULL', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'YEARS', 'SINCE', 'TREASONS', 'WERE', 'DEFINED', 'AND', 'SO', 'LONG', 'HAS', 'IT', 'BEEN', 'SINCE', 'ANY', 'MAN', 'WAS', 'TOUCHED', 'TO', 'THIS', 'EXTENT', 'UPON', 'THIS', 'CRIME', 'BEFORE', 'MYSELF'] +8188-274364-0006-2817: ref=['LET', 'US', 'NOT', 'TO', 'OUR', 'OWN', 'DESTRUCTION', 'AWAKE', 'THOSE', 'SLEEPING', 'LIONS', 'BY', 'RATTLING', 'UP', 'A', 'COMPANY', 'OF', 'OLD', 'RECORDS', 'WHICH', 'HAVE', 'LAIN', 'FOR', 'SO', 'MANY', 'AGES', 'BY', 'THE', 'WALL', 'FORGOTTEN', 'AND', 'NEGLECTED'] +8188-274364-0006-2817: hyp=['LET', 'US', 'NOT', 'TO', 'UNDERSTRUCTION', 'AWAKE', 'THOSE', 'SLEEPING', 'LIONS', 'BY', 'RATTLING', 'UP', 'A', 'COMPANY', 'OF', 'OLD', 'RECORDS', 'WHICH', 'HAVE', 'LAIN', 'FOR', 'SO', 'MANY', 'AGES', 'BY', 'THE', 'WAR', 'FORGOTTEN', 'AND', 'NEGLECTED'] +8188-274364-0007-2818: ref=['HOWEVER', 'THESE', 'GENTLEMEN', 'AT', 'THE', 'BAR', 'SAY', 'THEY', 'SPEAK', 'FOR', 'THE', 'COMMONWEALTH', 'AND', 'THEY', 'BELIEVE', 'SO', 'YET', 'UNDER', 'FAVOR', 'IT', 'IS', 'I', 'WHO', 'IN', 'THIS', 'PARTICULAR', 'SPEAK', 'FOR', 'THE', 'COMMONWEALTH'] +8188-274364-0007-2818: hyp=['HERBERT', 'THESE', 'GENTLEMAN', 'AT', 'THE', 'BAR', 'SO', 'THEY', 'SPEAK', 'FOR', 'THE', 'CORNWEALTH', 'AND', 'THEY', 'BELIEVE', 'SO', 'YET', 'UNDER', 'FAVOUR', 'IT', 'IS', 'I', 'WHO', 'IN', 'THIS', 'PARTICULAR', 'SPEAK', 'FOR', 'THE', 'CORNWEALTH'] +8188-274364-0008-2819: ref=['MY', 'LORDS', 'I', 'HAVE', 'NOW', 'TROUBLED', 'YOUR', 'LORDSHIPS', 'A', 'GREAT', 'DEAL', 'LONGER', 'THAN', 'I', 'SHOULD', 'HAVE', 'DONE'] +8188-274364-0008-2819: hyp=['MY', 'LORDS', 'I', 'HAVE', 'NOW', 'TROUBLED', 'YOUR', 'LORDSHIPS', 'A', 'GREAT', 'DEAL', 'LONGER', 'THAN', 'I', 'SHOULD', 'HAVE', 'DONE'] +8188-274364-0009-2820: ref=['YOUNG', 'VANE', 'FALLING', 'UPON', 'THIS', 'PAPER', 'OF', 'NOTES', 'DEEMED', 'THE', 'MATTER', 'OF', 'THE', 'UTMOST', 'IMPORTANCE', 'AND', 'IMMEDIATELY', 'COMMUNICATED', 'IT', 'TO', 'PYM', 'WHO', 'NOW', 'PRODUCED', 'THE', 'PAPER', 'BEFORE', 'THE', 'HOUSE', 'OF', 'COMMONS'] +8188-274364-0009-2820: hyp=['YOUNG', 'VAIN', 'FALLING', 'UPON', 'THIS', 'PAPER', 'OF', 'NOTES', 'DEEMED', 'THE', 'MATTER', 'OF', 'THE', 'UTMOST', 'IMPORTANCE', 'AND', 'IMMEDIATELY', 'COMMUNICATED', 'IT', 'TO', 'POEM', 'WHO', 'NOW', 'PRODUCED', 'THE', 'PAPER', 'BEFORE', 'THE', 'HOUSE', 'OF', 'COMMONS'] +8188-274364-0010-2821: ref=['THE', 'KING', 'PROPOSES', 'THIS', 'DIFFICULTY', 'BUT', 'HOW', 'CAN', 'I', 'UNDERTAKE', 'OFFENSIVE', 'WAR', 'IF', 'I', 'HAVE', 'NO', 'MORE', 'MONEY'] +8188-274364-0010-2821: hyp=['THE', 'KING', 'PROPOSES', 'THIS', 'DIFFICULTY', 'BUT', 'HOW', 'CAN', 'I', 'UNDERTAKE', 'OFFENCE', 'OF', 'WAR', 'IF', 'I', 'HAVE', 'NO', 'MORE', 'MONEY'] +8188-274364-0011-2822: ref=['YOUR', 'MAJESTY', 'HAVING', 'TRIED', 'THE', 'AFFECTIONS', 'OF', 'YOUR', 'PEOPLE', 'YOU', 'ARE', 'ABSOLVED', 'AND', 'LOOSE', 'FROM', 'ALL', 'RULES', 'OF', 'GOVERNMENT', 'AND', 'MAY', 'DO', 'WHAT', 'POWER', 'WILL', 'ADMIT'] +8188-274364-0011-2822: hyp=['YOUR', 'MAJESTY', 'HAVING', 'TRIED', 'THE', 'AFFECTIONS', 'OF', 'YOUR', 'PEOPLE', 'YOU', 'ARE', 'ABSORBED', 'AND', 'LOOSE', 'FROM', 'ALL', 'RULES', 'OF', 'GOVERNMENT', 'AND', 'MAY', 'DO', 'WHAT', 'POWER', 'WILL', 'ADMIT'] +8280-266249-0000-339: ref=['OLD', 'MISTER', 'DINSMORE', 'HAD', 'ACCEPTED', 'A', 'PRESSING', 'INVITATION', 'FROM', 'HIS', 'GRANDDAUGHTER', 'AND', 'HER', 'HUSBAND', 'TO', 'JOIN', 'THE', 'PARTY', 'AND', 'WITH', 'THE', 'ADDITION', 'OF', 'SERVANTS', 'IT', 'WAS', 'A', 'LARGE', 'ONE'] +8280-266249-0000-339: hyp=['OLD', 'MISTER', 'DINSMORE', 'HAD', 'ACCEPTED', 'A', 'PRESSING', 'INVITATION', 'FROM', 'HIS', 'GRANDDAUGHTER', 'AND', 'HER', 'HUSBAND', 'TO', 'JOIN', 'THE', 'PARTY', 'AND', 'WITH', 'THE', 'ADDITION', 'OF', 'SERVANTS', 'IT', 'WAS', 'A', 'LARGE', 'ONE'] +8280-266249-0001-340: ref=['AS', 'THEY', 'WERE', 'IN', 'NO', 'HASTE', 'AND', 'THE', 'CONFINEMENT', 'OF', 'A', 'RAILROAD', 'CAR', 'WOULD', 'BE', 'VERY', 'IRKSOME', 'TO', 'THE', 'YOUNGER', 'CHILDREN', 'IT', 'HAD', 'BEEN', 'DECIDED', 'TO', 'MAKE', 'THE', 'JOURNEY', 'BY', 'WATER'] +8280-266249-0001-340: hyp=['AS', 'THEY', 'WERE', 'IN', 'NO', 'HASTE', 'AND', 'THE', 'CONFINEMENT', 'OF', 'A', 'RAILROAD', 'CAR', 'WOULD', 'BE', 'VERY', 'IRKSOME', 'TO', 'THE', 'YOUNGER', 'CHILDREN', 'IT', 'HAD', 'BEEN', 'DECIDED', 'TO', 'MAKE', 'THE', 'JOURNEY', 'BY', 'WATER'] +8280-266249-0002-341: ref=['THERE', 'WERE', 'NO', 'SAD', 'LEAVE', 'TAKINGS', 'TO', 'MAR', 'THEIR', 'PLEASURE', 'THE', 'CHILDREN', 'WERE', 'IN', 'WILD', 'SPIRITS', 'AND', 'ALL', 'SEEMED', 'CHEERFUL', 'AND', 'HAPPY', 'AS', 'THEY', 'SAT', 'OR', 'STOOD', 'UPON', 'THE', 'DECK', 'WATCHING', 'THE', 'RECEDING', 'SHORE', 'AS', 'THE', 'VESSEL', 'STEAMED', 'OUT', 'OF', 'THE', 'HARBOR'] +8280-266249-0002-341: hyp=['THERE', 'WERE', 'NO', 'SAD', 'LEAVE', 'TAKINGS', 'TO', 'MAR', 'THEIR', 'PLEASURE', 'THE', 'CHILDREN', 'WERE', 'IN', 'WILD', 'SPIRITS', 'AND', 'ALL', 'SEEMED', 'CHEERFUL', 'AND', 'HAPPY', 'AS', 'THEY', 'SAT', 'OR', 'STOOD', 'UPON', 'THE', 'DECK', 'WATCHING', 'THE', 'RECEDING', 'SHORE', 'AS', 'THE', 'VESSEL', 'STEAMED', 'OUT', 'OF', 'THE', 'HARBOR'] +8280-266249-0003-342: ref=['AT', 'LENGTH', 'THE', 'LAND', 'HAD', 'QUITE', 'DISAPPEARED', 'NOTHING', 'COULD', 'BE', 'SEEN', 'BUT', 'THE', 'SKY', 'OVERHEAD', 'AND', 'A', 'VAST', 'EXPANSE', 'OF', 'WATER', 'ALL', 'AROUND', 'AND', 'THE', 'PASSENGERS', 'FOUND', 'LEISURE', 'TO', 'TURN', 'THEIR', 'ATTENTION', 'UPON', 'EACH', 'OTHER'] +8280-266249-0003-342: hyp=['AT', 'LENGTH', 'THE', 'LAND', 'HAD', 'QUITE', 'DISAPPEARED', 'NOTHING', 'COULD', 'BE', 'SEEN', 'BUT', 'THE', 'SKY', 'OVERHEAD', 'AND', 'A', 'VAST', 'EXPANSE', 'OF', 'WATER', 'ALL', 'ROUND', 'AND', 'THE', 'PASSENGERS', 'FOUND', 'LEISURE', 'TO', 'TURN', 'THEIR', 'ATTENTION', 'UPON', 'EACH', 'OTHER'] +8280-266249-0004-343: ref=['THERE', 'ARE', 'SOME', 'NICE', 'LOOKING', 'PEOPLE', 'ON', 'BOARD', 'REMARKED', 'MISTER', 'TRAVILLA', 'IN', 'AN', 'UNDERTONE', 'TO', 'HIS', 'WIFE'] +8280-266249-0004-343: hyp=['THERE', 'ARE', 'SOME', 'NICE', 'LOOKING', 'PEOPLE', 'ON', 'BOARD', 'REMARKED', 'MISTER', 'TRAVILLA', 'IN', 'AN', 'UNDERTONE', 'TO', 'HIS', 'WIFE'] +8280-266249-0005-344: ref=['BESIDE', 'OURSELVES', 'ADDED', 'COUSIN', 'RONALD', 'LAUGHING'] +8280-266249-0005-344: hyp=['BESIDES', 'OURSELVES', 'ADDED', 'COUSIN', 'RANALD', 'LAUGHING'] +8280-266249-0006-345: ref=['YES', 'SHE', 'ANSWERED', 'THAT', 'LITTLE', 'GROUP', 'YONDER', 'A', 'YOUNG', 'MINISTER', 'AND', 'HIS', 'WIFE', 'AND', 'CHILD', 'I', 'SUPPOSE'] +8280-266249-0006-345: hyp=['YES', 'SHE', 'ANSWERED', 'THAT', 'LITTLE', 'GROUP', 'YONDER', 'A', 'YOUNG', 'MINISTER', 'AND', 'HIS', 'WIFE', 'AND', 'CHILD', 'I', 'SUPPOSE'] +8280-266249-0007-346: ref=['AND', 'WHAT', 'A', 'DEAR', 'LITTLE', 'FELLOW', 'HE', 'IS', 'JUST', 'ABOUT', 'THE', 'AGE', 'OF', 'OUR', 'HAROLD', 'I', 'SHOULD', 'JUDGE'] +8280-266249-0007-346: hyp=['AND', 'WHAT', 'A', 'DEAR', 'LITTLE', 'FELLOW', 'HE', 'IS', 'JUST', 'ABOUT', 'THE', 'AGE', 'OF', 'OUR', 'HERALD', 'I', 'SHOULD', 'JUDGE'] +8280-266249-0008-347: ref=['DO', 'YOU', 'SON', 'WAS', 'THE', 'SMILING', 'REJOINDER'] +8280-266249-0008-347: hyp=['DO', 'YOU', 'SON', 'WAS', 'THE', 'SMILING', 'REJOINDER'] +8280-266249-0009-348: ref=['HE', 'CERTAINLY', 'LOOKS', 'LIKE', 'A', 'VERY', 'NICE', 'LITTLE', 'BOY'] +8280-266249-0009-348: hyp=['HE', 'CERTAINLY', 'LOOKS', 'LIKE', 'A', 'VERY', 'NICE', 'LITTLE', 'BOY'] +8280-266249-0010-349: ref=['SUPPOSE', 'YOU', 'AND', 'HE', 'SHAKE', 'HANDS', 'FRANK'] +8280-266249-0010-349: hyp=['SUPPOSE', 'YOU', 'AND', 'HE', 'SHAKE', 'HANDS', 'FRANK'] +8280-266249-0011-350: ref=['I', 'DO', 'INDEED', 'THOUGH', 'PROBABLY', 'COMPARATIVELY', 'FEW', 'ARE', 'AWARE', 'THAT', 'TOBACCO', 'IS', 'THE', 'CAUSE', 'OF', 'THEIR', 'AILMENTS'] +8280-266249-0011-350: hyp=['I', 'DO', 'INDEED', 'THE', 'PROBABLY', 'COMPARATIVELY', 'FEW', 'ARE', 'AWARE', 'THAT', 'TOBACCO', 'IS', 'THE', 'CAUSE', 'OF', 'THEIR', 'AILMENTS'] +8280-266249-0012-351: ref=['DOUBTLESS', 'THAT', 'IS', 'THE', 'CASE', 'REMARKED', 'MISTER', 'DINSMORE'] +8280-266249-0012-351: hyp=['DOUBTLESS', 'THAT', 'IS', 'THE', 'CASE', 'REMARKED', 'MISTER', 'DINSMORE'] +8280-266249-0013-352: ref=['WITH', 'ALL', 'MY', 'HEART', 'IF', 'YOU', 'WILL', 'STEP', 'INTO', 'THE', "GENTLEMEN'S", 'CABIN', 'WHERE', "THERE'S", 'A', 'LIGHT'] +8280-266249-0013-352: hyp=['WITH', 'ALL', 'MY', 'HEART', 'IF', 'YOU', 'WILL', 'STEP', 'INTO', 'THE', "GENTLEMAN'S", 'CABIN', 'WHERE', "THERE'S", 'A', 'LIGHT'] +8280-266249-0014-353: ref=['HE', 'LED', 'THE', 'WAY', 'THE', 'OTHERS', 'ALL', 'FOLLOWING', 'AND', 'TAKING', 'OUT', 'A', 'SLIP', 'OF', 'PAPER', 'READ', 'FROM', 'IT', 'IN', 'A', 'DISTINCT', 'TONE', 'LOUD', 'ENOUGH', 'TO', 'BE', 'HEARD', 'BY', 'THOSE', 'ABOUT', 'HIM', 'WITHOUT', 'DISTURBING', 'THE', 'OTHER', 'PASSENGERS'] +8280-266249-0014-353: hyp=['HE', 'LED', 'THE', 'WAY', 'THE', 'OTHERS', 'ALL', 'FOLLOWING', 'AND', 'TAKING', 'OUT', 'A', 'SLIP', 'OF', 'PAPER', 'READ', 'FROM', 'IT', 'IN', 'A', 'DISTINCT', 'TONE', 'LOUD', 'ENOUGH', 'TO', 'BE', 'HEARD', 'BY', 'THOSE', 'ALL', 'ABOUT', 'HIM', 'WITHOUT', 'DISTURBING', 'THE', 'OTHER', 'PASSENGERS'] +8280-266249-0015-354: ref=['ONE', 'DROP', 'OF', 'NICOTINE', 'EXTRACT', 'OF', 'TOBACCO', 'PLACED', 'ON', 'THE', 'TONGUE', 'OF', 'A', 'DOG', 'WILL', 'KILL', 'HIM', 'IN', 'A', 'MINUTE', 'THE', 'HUNDREDTH', 'PART', 'OF', 'A', 'GRAIN', 'PICKED', 'UNDER', 'THE', 'SKIN', 'OF', 'A', "MAN'S", 'ARM', 'WILL', 'PRODUCE', 'NAUSEA', 'AND', 'FAINTING'] +8280-266249-0015-354: hyp=['ONE', 'DROP', 'OF', 'NICOTINE', 'EXTRACTED', 'TOBACCO', 'PLACED', 'ON', 'THE', 'TONGUE', 'OF', 'THE', 'DOG', 'WILL', 'KILL', 'HIM', 'IN', 'A', 'MINUTE', 'THE', 'HUNDREDTH', 'PART', 'OF', 'THE', 'GRAIN', 'PRICKED', 'UNDER', 'THE', 'SKIN', 'OF', 'A', "MAN'S", 'ARM', 'WILL', 'PRODUCE', 'NAUSEA', 'AND', 'FAINTING'] +8280-266249-0016-355: ref=['THE', 'HALF', 'DOZEN', 'CIGARS', 'WHICH', 'MOST', 'SMOKERS', 'USE', 'A', 'DAY', 'CONTAIN', 'SIX', 'OR', 'SEVEN', 'GRAINS', 'ENOUGH', 'IF', 'CONCENTRATED', 'AND', 'ABSORBED', 'TO', 'KILL', 'THREE', 'MEN', 'AND', 'A', 'POUND', 'OF', 'TOBACCO', 'ACCORDING', 'TO', 'ITS', 'QUALITY', 'CONTAINS', 'FROM', 'ONE', 'QUARTER', 'TO', 'ONE', 'AND', 'A', 'QUARTER', 'OUNCES'] +8280-266249-0016-355: hyp=['THE', 'HALF', 'DOZEN', 'CIGARS', 'WHICH', 'MOST', 'SMOKERS', 'YEARS', 'A', 'DAY', 'CONTAIN', 'SIX', 'OR', 'SEVEN', 'GRAINS', 'ENOUGH', 'IF', 'CONCENTRATED', 'AND', 'ABSORBED', 'TO', 'KILL', 'THREE', 'MEN', 'AND', 'A', 'POUND', 'OF', 'TOBACCO', 'ACCORDING', 'TO', 'ITS', 'QUALITY', 'CONTAINS', 'FROM', 'ONE', 'QUARTER', 'TO', 'ONE', 'AND', 'A', 'QUARTER', 'OUNCES'] +8280-266249-0017-356: ref=['IS', 'IT', 'STRANGE', 'THEN', 'THAT', 'SMOKERS', 'AND', 'CHEWERS', 'HAVE', 'A', 'THOUSAND', 'AILMENTS'] +8280-266249-0017-356: hyp=['IS', 'IT', 'STRANGE', 'THEN', 'THAT', 'SMOKERS', 'AND', 'CHEWERS', 'HAVE', 'A', 'THOUSAND', 'AILMENTS'] +8280-266249-0018-357: ref=['THAT', 'THE', 'FRENCH', 'POLYTECHNIC', 'INSTITUTE', 'HAD', 'TO', 'PROHIBIT', 'ITS', 'USE', 'ON', 'ACCOUNT', 'OF', 'ITS', 'EFFECTS', 'ON', 'THE', 'MIND'] +8280-266249-0018-357: hyp=['THAT', 'THE', 'FRENCH', 'POLY', 'TECHNIC', 'AT', 'INSTITUTE', 'HAD', 'TO', 'PROHIBIT', 'ITS', 'THE', 'USE', 'ON', 'ACCOUNT', 'OF', 'ITS', 'EFFECTS', 'UPON', 'THE', 'MIND'] +8280-266249-0019-358: ref=['NOTICE', 'THE', 'MULTITUDE', 'OF', 'SUDDEN', 'DEATHS', 'AND', 'SEE', 'HOW', 'MANY', 'ARE', 'SMOKERS', 'AND', 'CHEWERS'] +8280-266249-0019-358: hyp=['NOTICED', 'THE', 'MULTITUDE', 'OF', 'SUDDEN', 'DEATHS', 'AND', 'SEE', 'HOW', 'MANY', 'ARE', 'SMOKERS', 'AND', 'CHEWERS'] +8280-266249-0020-359: ref=['IN', 'A', 'SMALL', 'COUNTRY', 'TOWN', 'SEVEN', 'OF', 'THESE', 'MYSTERIOUS', 'PROVIDENCES', 'OCCURRED', 'WITHIN', 'THE', 'CIRCUIT', 'OF', 'A', 'MILE', 'ALL', 'DIRECTLY', 'TRACEABLE', 'TO', 'TOBACCO', 'AND', 'ANY', 'PHYSICIAN', 'ON', 'A', 'FEW', 'MOMENTS', 'REFLECTION', 'CAN', 'MATCH', 'THIS', 'FACT', 'BY', 'HIS', 'OWN', 'OBSERVATION'] +8280-266249-0020-359: hyp=['AND', 'A', 'SMALL', 'COUNTRY', 'TOWN', 'SEVEN', 'OF', 'THESE', 'MYSTERIOUS', 'PROVIDENCES', 'OCCURRED', 'WITHIN', 'THE', 'CIRCUIT', 'OF', 'A', 'MILE', 'ALL', 'DIRECTLY', 'TRACEABLE', 'TO', 'TOBACCO', 'AND', 'ANY', 'PHYSICIAN', 'ON', 'A', 'FEW', 'MOMENTS', 'REFLECTION', 'CAN', 'MATCH', 'THIS', 'FACT', 'BY', 'HIS', 'OWN', 'OBSERVATION'] +8280-266249-0021-360: ref=['AND', 'THEN', 'SUCH', 'POWERFUL', 'ACIDS', 'PRODUCE', 'INTENSE', 'IRRITATION', 'AND', 'THIRST', 'THIRST', 'WHICH', 'WATER', 'DOES', 'NOT', 'QUENCH'] +8280-266249-0021-360: hyp=['AND', 'THEN', 'SUCH', 'POWERFUL', 'ACIDS', 'PRODUCE', 'INTENSE', 'IRRITATION', 'AND', 'THIRST', 'THIRST', 'WHICH', 'WATER', 'DOES', 'NOT', 'QUENCH'] +8280-266249-0022-361: ref=['HENCE', 'A', 'RESORT', 'TO', 'CIDER', 'AND', 'BEER'] +8280-266249-0022-361: hyp=['HENCE', 'A', 'RESORT', 'TO', 'CIDER', 'AND', 'BEER'] +8280-266249-0023-362: ref=['NO', 'SIR', 'WHAT', 'KNOW', 'YE', 'NOT', 'THAT', 'YOUR', 'BODY', 'IS', 'THE', 'TEMPLE', 'OF', 'THE', 'HOLY', 'GHOST', 'WHICH', 'IS', 'IN', 'YOU', 'WHICH', 'YE', 'HAVE', 'OF', 'GOD', 'AND', 'YE', 'ARE', 'NOT', 'YOUR', 'OWN'] +8280-266249-0023-362: hyp=['NO', 'SIR', 'WHAT', 'NO', 'YE', 'NOT', 'THAT', 'YOUR', 'BODY', 'IS', 'THE', 'TEMPLE', 'OF', 'THE', 'HOLY', 'GHOST', 'WHICH', 'IS', 'IN', 'YOU', 'WHICH', 'YE', 'HAVE', 'OF', 'GOD', 'AND', 'YE', 'ARE', 'NOT', 'YOUR', 'OWN'] +8280-266249-0024-363: ref=['FOR', 'YE', 'ARE', 'BOUGHT', 'WITH', 'A', 'PRICE', 'THEREFORE', 'GLORIFY', 'GOD', 'IN', 'YOUR', 'BODY', 'AND', 'IN', 'YOUR', 'SPIRIT', 'WHICH', 'ARE', "GOD'S"] +8280-266249-0024-363: hyp=['FOR', 'YOU', 'ARE', 'BROUGHT', 'WITH', 'A', 'PRICE', 'THEREFORE', 'GLORIFY', 'GOD', 'IN', 'YOUR', 'BODY', 'AND', 'IN', 'YOUR', 'SPIRIT', 'WHICH', 'ARE', "GOD'S"] +8280-266249-0025-364: ref=['WE', 'CERTAINLY', 'HAVE', 'NO', 'RIGHT', 'TO', 'INJURE', 'OUR', 'BODIES', 'EITHER', 'BY', 'NEGLECT', 'OR', 'SELF', 'INDULGENCE'] +8280-266249-0025-364: hyp=['WE', 'CERTAINLY', 'HAVE', 'NO', 'RIGHT', 'TO', 'INJURE', 'OUR', 'BODIES', 'EITHER', 'BY', 'NEGLECT', 'OR', 'SELF', 'INDULGENCE'] +8280-266249-0026-365: ref=['AND', 'AGAIN', 'I', 'BESEECH', 'YOU', 'THEREFORE', 'BRETHREN', 'BY', 'THE', 'MERCIES', 'OF', 'GOD', 'THAT', 'YE', 'PRESENT', 'YOUR', 'BODIES', 'A', 'LIVING', 'SACRIFICE', 'HOLY', 'ACCEPTABLE', 'UNTO', 'GOD', 'WHICH', 'IS', 'YOUR', 'REASONABLE', 'SERVICE'] +8280-266249-0026-365: hyp=['AND', 'AGAIN', 'I', 'BESEECH', 'YOU', 'THEREFORE', 'BRETHREN', 'BY', 'THE', 'MERCIES', 'OF', 'GOD', 'THAT', 'YE', 'PRESENT', 'YOUR', 'BODIES', 'A', 'LIVING', 'SACRIFICE', 'WHOLLY', 'ACCEPTABLE', 'UNTO', 'GOD', 'WHICH', 'IS', 'YOUR', 'REASONABLE', 'SERVICE'] +8280-266249-0027-366: ref=['IT', 'MUST', 'REQUIRE', 'A', 'GOOD', 'DEAL', 'OF', 'RESOLUTION', 'FOR', 'ONE', 'WHO', 'HAS', 'BECOME', 'FOND', 'OF', 'THE', 'INDULGENCE', 'TO', 'GIVE', 'IT', 'UP', 'REMARKED', 'MISTER', 'DALY'] +8280-266249-0027-366: hyp=['IT', 'MUST', 'REQUIRE', 'A', 'GOOD', 'DEAL', 'OF', 'RESOLUTION', 'FOR', 'ONE', 'WHO', 'HAS', 'BECOME', 'FOND', 'OF', 'THE', 'INDULGENCE', 'TO', 'GIVE', 'IT', 'UP', 'REMARKED', 'MISTER', 'DALEY'] +8280-266249-0028-367: ref=['NO', 'DOUBT', 'NO', 'DOUBT', 'RETURNED', 'MISTER', 'LILBURN', 'BUT', 'IF', 'THY', 'RIGHT', 'EYE', 'OFFEND', 'THEE', 'PLUCK', 'IT', 'OUT', 'AND', 'CAST', 'IT', 'FROM', 'THEE', 'FOR', 'IT', 'IS', 'PROFITABLE', 'FOR', 'THEE', 'THAT', 'ONE', 'OF', 'THY', 'MEMBERS', 'SHOULD', 'PERISH', 'AND', 'NOT', 'THAT', 'THY', 'WHOLE', 'BODY', 'SHOULD', 'BE', 'CAST', 'INTO', 'HELL'] +8280-266249-0028-367: hyp=['NO', 'DOUBT', 'NO', 'DOUBT', 'RETURNED', 'MISTER', 'LOWBOURNE', 'BUT', 'IF', 'THY', 'RIGHT', 'I', 'OFFEND', 'THEE', 'PLUCK', 'IT', 'UP', 'AND', 'CAST', 'IT', 'FROM', 'ME', 'FOR', 'IT', 'IS', 'PROFITABLE', 'FOR', 'THEE', 'THAT', 'ONE', 'OF', 'THY', 'MEMBERS', 'SHOULD', 'PERISH', 'AND', 'NOT', 'THAT', 'THY', 'WHOLE', 'BODY', 'SHOULD', 'BE', 'CAST', 'INTO', 'HELL'] +8280-266249-0029-368: ref=['THERE', 'WAS', 'A', 'PAUSE', 'BROKEN', 'BY', 'YOUNG', 'HORACE', 'WHO', 'HAD', 'BEEN', 'WATCHING', 'A', 'GROUP', 'OF', 'MEN', 'GATHERED', 'ABOUT', 'A', 'TABLE', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'ROOM'] +8280-266249-0029-368: hyp=['THERE', 'WAS', 'A', 'PAUSE', 'BROKEN', 'BY', 'YOUNG', 'HORACE', 'WHO', 'HAD', 'BEEN', 'WATCHING', 'A', 'GROUP', 'OF', 'MEN', 'GATHERED', 'ABOUT', 'A', 'TABLE', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'ROOM'] +8280-266249-0030-369: ref=['THEY', 'ARE', 'GAMBLING', 'YONDER', 'AND', "I'M", 'AFRAID', 'THAT', 'YOUNG', 'FELLOW', 'IS', 'BEING', 'BADLY', 'FLEECED', 'BY', 'THAT', 'MIDDLE', 'AGED', 'MAN', 'OPPOSITE'] +8280-266249-0030-369: hyp=['THEY', 'ARE', 'GAMBLING', 'YONDER', 'AND', "I'M", 'AFRAID', 'THAT', 'YOUNG', 'FELLOW', 'IS', 'BEING', 'BADLY', 'FLEECED', 'BY', 'THE', 'MIDDLE', 'AGED', 'MAN', 'OPPOSITE'] +8280-266249-0031-370: ref=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'WERE', 'AT', 'ONCE', 'TURNED', 'IN', 'THAT', 'DIRECTION'] +8280-266249-0031-370: hyp=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'WERE', 'AT', 'ONCE', 'TURNED', 'IN', 'THAT', 'DIRECTION'] +8280-266249-0032-371: ref=['NO', 'SIR', 'HE', 'IS', 'NOT', 'HERE'] +8280-266249-0032-371: hyp=['NO', 'SIR', 'HE', 'IS', 'NOT', 'HERE'] +8280-266249-0033-372: ref=['AND', 'THE', 'DOOR', 'WAS', 'SLAMMED', 'VIOLENTLY', 'TO'] +8280-266249-0033-372: hyp=['AS', 'THE', 'DOOR', 'WAS', 'SLAMMED', 'VIOLENTLY', 'TOO'] +8280-266249-0034-373: ref=['NOW', 'THE', 'VOICE', 'CAME', 'FROM', 'THE', 'SKYLIGHT', 'OVERHEAD', 'APPARENTLY', 'AND', 'WITH', 'A', 'FIERCE', 'IMPRECATION', 'THE', 'IRATE', 'GAMESTER', 'RUSHED', 'UPON', 'DECK', 'AND', 'RAN', 'HITHER', 'AND', 'THITHER', 'IN', 'SEARCH', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0034-373: hyp=['NOW', 'THE', 'VOICE', 'CAME', 'FROM', 'THE', 'SKYLIGHT', 'OVERHEAD', 'APPARENTLY', 'AND', 'WITH', 'A', 'FIERCE', 'IMPRECATION', 'THE', 'IRATE', 'GAMESTER', 'RUSHED', 'UPON', 'DECK', 'AND', 'RAN', 'HITHER', 'AND', 'THITHER', 'IN', 'SEARCH', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0035-374: ref=['HIS', 'VICTIM', 'WHO', 'HAD', 'BEEN', 'LOOKING', 'ON', 'DURING', 'THE', 'LITTLE', 'SCENE', 'AND', 'LISTENING', 'TO', 'THE', 'MYSTERIOUS', 'VOICE', 'IN', 'SILENT', 'WIDE', 'EYED', 'WONDER', 'AND', 'FEAR', 'NOW', 'ROSE', 'HASTILY', 'HIS', 'FACE', 'DEATHLY', 'PALE', 'WITH', 'TREMBLING', 'HANDS', 'GATHERED', 'UP', 'THE', 'MONEY', 'HE', 'HAD', 'STAKED', 'AND', 'HURRYING', 'INTO', 'HIS', 'STATE', 'ROOM', 'LOCKED', 'HIMSELF', 'IN'] +8280-266249-0035-374: hyp=['HIS', 'VICTIM', 'WHO', 'HAD', 'BEEN', 'LOOKING', 'ON', 'DURING', 'THE', 'LITTLE', 'SCENE', 'AND', 'LISTENING', 'TO', 'THE', 'MYSTERIOUS', 'VOICE', 'IN', 'SILENT', 'WIDE', 'EYED', 'WONDER', 'AND', 'FEAR', 'NOW', 'AROSE', 'HASTILY', 'HIS', 'FACE', 'DEFTLY', 'PALE', 'WITH', 'TREMBLING', 'HANDS', 'GATHERED', 'UP', 'THE', 'MONEY', 'HE', 'HAD', 'STAKED', 'AND', 'HURRYING', 'TO', 'HIS', 'STATEROOM', 'LOCKED', 'HIMSELF', 'IN'] +8280-266249-0036-375: ref=['WHAT', 'DOES', 'IT', 'MEAN', 'CRIED', 'ONE'] +8280-266249-0036-375: hyp=['WHAT', 'DOES', 'IT', 'MEAN', 'CRIED', 'ONE'] +8280-266249-0037-376: ref=['A', 'VENTRILOQUIST', 'ABOARD', 'OF', 'COURSE', 'RETURNED', 'ANOTHER', "LET'S", 'FOLLOW', 'AND', 'SEE', 'THE', 'FUN'] +8280-266249-0037-376: hyp=['A', 'VENTILLA', 'QUESTED', 'BORN', 'OF', 'COURSE', 'RETURNED', 'ANOTHER', "LET'S", 'FOLLOW', 'AND', 'SEE', 'THE', 'FUN'] +8280-266249-0038-377: ref=['I', 'WONDER', 'WHICH', 'OF', 'US', 'IT', 'IS', 'REMARKED', 'THE', 'FIRST', 'LOOKING', 'HARD', 'AT', 'OUR', 'PARTY', 'I', "DON'T", 'KNOW', 'BUT', 'COME', 'ON'] +8280-266249-0038-377: hyp=['I', 'WONDER', 'WHICH', 'OF', 'US', 'IT', 'IS', 'REMARKED', 'THE', 'FIRST', 'LOOKING', 'HARD', 'AT', 'OUR', 'PARTY', 'I', "DON'T", 'KNOW', 'BUT', 'COME', 'ON'] +8280-266249-0039-378: ref=['THAT', 'FELLOW', 'NICK', 'WARD', 'IS', 'A', 'NOTED', 'BLACKLEG', 'AND', 'RUFFIAN', 'HAD', 'HIS', 'NOSE', 'BROKEN', 'IN', 'A', 'FIGHT', 'AND', 'IS', 'SENSITIVE', 'ON', 'THE', 'SUBJECT', 'WAS', 'CHEATING', 'OF', 'COURSE'] +8280-266249-0039-378: hyp=['THAT', 'FELLOW', 'NICK', 'WARD', 'IS', 'A', 'NOTED', 'BLACK', 'LAG', 'IN', 'RUFFIAN', 'HAD', 'HIS', 'NOSE', 'BROKEN', 'IN', 'A', 'FIGHT', 'AND', 'IS', 'SENSITIVE', 'ON', 'THE', 'SUBJECT', 'WAS', 'CHEATING', 'OF', 'COURSE'] +8280-266249-0040-379: ref=['WHO', 'ASKED', 'THE', 'MATE', "I'VE", 'SEEN', 'NONE', 'UP', 'HERE', 'THOUGH', 'THERE', 'ARE', 'SOME', 'IN', 'THE', 'STEERAGE'] +8280-266249-0040-379: hyp=['WHO', 'ASKED', 'THE', 'MATE', "I'VE", 'SEEN', 'NO', 'NOT', 'HERE', 'THOUGH', 'THERE', 'ARE', 'SOME', 'IN', 'THE', 'STEERAGE'] +8280-266249-0041-380: ref=['THEY', 'HEARD', 'HIM', 'IN', 'SILENCE', 'WITH', 'A', 'COOL', 'PHLEGMATIC', 'INDIFFERENCE', 'MOST', 'EXASPERATING', 'TO', 'ONE', 'IN', 'HIS', 'PRESENT', 'MOOD'] +8280-266249-0041-380: hyp=['THEY', 'HEARD', 'HIM', 'IN', 'SILENCE', 'WITH', 'A', 'COOL', 'PHLEGMATIC', 'INDIFFERENCE', 'MOST', 'EXASPERATING', 'TO', 'ONE', 'IN', 'HIS', 'PRESENT', 'MOOD'] +8280-266249-0042-381: ref=['A', 'MAN', 'OF', 'GIANT', 'SIZE', 'AND', 'HERCULEAN', 'STRENGTH', 'HAD', 'LAID', 'ASIDE', 'HIS', 'PIPE', 'AND', 'SLOWLY', 'RISING', 'TO', 'HIS', 'FEET', 'SEIZED', 'THE', 'SCOUNDREL', 'IN', 'HIS', 'POWERFUL', 'GRASP'] +8280-266249-0042-381: hyp=['A', 'MAN', 'OF', 'GIANT', 'SIZE', 'AND', 'HERCULEAN', 'STRENGTH', 'HAD', 'LAID', 'ASIDE', 'HIS', 'PIPE', 'AND', 'SLOWLY', 'RISING', 'TO', 'HIS', 'FEET', 'SEIZED', 'THE', 'SCOUNDREL', 'IN', 'HIS', 'POWERFUL', 'GRASP'] +8280-266249-0043-382: ref=['LET', 'ME', 'GO', 'YELLED', 'WARD', 'MAKING', 'A', 'DESPERATE', 'EFFORT', 'TO', 'FREE', 'HIS', 'ARMS'] +8280-266249-0043-382: hyp=['LET', 'ME', 'GO', 'YELLED', 'WARD', 'MAKING', 'A', 'DESPERATE', 'EFFORT', 'TO', 'FREE', 'HIS', 'ARMS'] +8280-266249-0044-383: ref=['I', 'DINKS', 'NO', 'I', 'DINKS', 'I', 'DEACH', 'YOU', 'VON', 'LESSON', 'RETURNED', 'HIS', 'CAPTOR', 'NOT', 'RELAXING', 'HIS', 'GRASP', 'IN', 'THE', 'LEAST'] +8280-266249-0044-383: hyp=['I', 'DENZ', 'NO', 'I', 'THINK', 'I', 'DID', 'YOU', 'FUN', 'LESSON', 'RETURNED', 'HIS', 'CAPTOR', 'NOT', 'RELAXING', 'HIS', 'GRASP', 'IN', 'THE', 'LEAST'] +8280-266249-0045-384: ref=['THE', 'GERMAN', 'RELEASED', 'HIS', 'PRISONER', 'AND', 'THE', 'LATTER', 'SLUNK', 'AWAY', 'WITH', 'MUTTERED', 'THREATS', 'AND', 'IMPRECATIONS', 'UPON', 'THE', 'HEAD', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0045-384: hyp=['THE', 'GERMAN', 'RELEASED', 'HIS', 'PRISONER', 'AND', 'THE', 'LATTER', 'SUNK', 'AWAY', 'WITH', 'MUTTERED', 'THREATS', 'AND', 'IMPRECATIONS', 'UPON', 'THE', 'HEAD', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0046-385: ref=['MISTER', 'LILBURN', 'AND', 'MISTER', 'DALY', 'EACH', 'AT', 'A', 'DIFFERENT', 'TIME', 'SOUGHT', 'OUT', 'THE', 'YOUNG', 'MAN', "WARD'S", 'INTENDED', 'VICTIM', 'AND', 'TRIED', 'TO', 'INFLUENCE', 'HIM', 'FOR', 'GOOD'] +8280-266249-0046-385: hyp=['MISTER', 'LILBURN', 'AND', 'MISTER', 'DALEY', 'EACH', 'HAD', 'A', 'DIFFERENT', 'TIME', 'SOUGHT', 'OUT', 'THE', 'YOUNG', 'MAN', 'WORDS', 'INTENDED', 'VICTIM', 'AND', 'TRIED', 'TO', 'INFLUENCE', 'HIM', 'FOR', 'GOOD'] +8280-266249-0047-386: ref=['YET', 'THERE', 'WAS', 'GAMBLING', 'AGAIN', 'THE', 'SECOND', 'NIGHT', 'BETWEEN', 'WARD', 'AND', 'SEVERAL', 'OTHERS', 'OF', 'HIS', 'PROFESSION'] +8280-266249-0047-386: hyp=['YET', 'THERE', 'WAS', 'GAMBLING', 'AGAIN', 'THE', 'SECOND', 'NIGHT', 'BETWEEN', 'WARD', 'AND', 'SEVERAL', 'OTHERS', 'OF', 'HIS', 'PROFESSION'] +8280-266249-0048-387: ref=['THEY', 'KEPT', 'IT', 'UP', 'TILL', 'AFTER', 'MIDNIGHT'] +8280-266249-0048-387: hyp=['THEY', 'KEPT', 'IT', 'UP', 'TILL', 'AFTER', 'MIDNIGHT'] +8280-266249-0049-388: ref=['THEN', 'MISTER', 'LILBURN', 'WAKING', 'FROM', 'HIS', 'FIRST', 'SLEEP', 'IN', 'A', 'STATEROOM', 'NEAR', 'BY', 'THOUGHT', 'HE', 'WOULD', 'BREAK', 'IT', 'UP', 'ONCE', 'MORE'] +8280-266249-0049-388: hyp=['THEN', 'MISTER', 'LOWBORNE', 'WAKING', 'FROM', 'HIS', 'FIRST', 'SLEEP', 'IN', 'A', 'STATEROOM', 'NEAR', 'BY', 'THOUGHT', 'HE', 'WOULD', 'BREAK', 'IT', 'UP', 'ONCE', 'MORE'] +8280-266249-0050-389: ref=['AN', 'INTENSE', 'VOICELESS', 'EXCITEMENT', 'POSSESSED', 'THE', 'PLAYERS', 'FOR', 'THE', 'GAME', 'WAS', 'A', 'CLOSE', 'ONE', 'AND', 'THE', 'STAKES', 'WERE', 'VERY', 'HEAVY'] +8280-266249-0050-389: hyp=['AN', 'INTENSE', 'VOICELESS', 'EXCITEMENT', 'POSSESSED', 'THE', 'PLAYERS', 'FOR', 'THE', 'GAME', 'WAS', 'A', 'CLOSE', 'ONE', 'AND', 'MISTAKES', 'WERE', 'VERY', 'HEAVY'] +8280-266249-0051-390: ref=['THEY', 'BENT', 'EAGERLY', 'OVER', 'THE', 'BOARD', 'EACH', 'WATCHING', 'WITH', 'FEVERISH', 'ANXIETY', 'HIS', "COMPANION'S", 'MOVEMENTS', 'EACH', 'CASTING', 'NOW', 'AND', 'AGAIN', 'A', 'GLOATING', 'EYE', 'UPON', 'THE', 'HEAP', 'OF', 'GOLD', 'AND', 'GREENBACKS', 'THAT', 'LAY', 'BETWEEN', 'THEM', 'AND', 'AT', 'TIMES', 'HALF', 'STRETCHING', 'OUT', 'HIS', 'HAND', 'TO', 'CLUTCH', 'IT'] +8280-266249-0051-390: hyp=['THEY', 'BENT', 'EAGERLY', 'OVER', 'THE', 'BOARD', 'EACH', 'WATCHING', 'WITH', 'FEVERISH', 'ANXIETY', 'HIS', "COMPANION'S", 'MOVEMENTS', 'EACH', 'CASTING', 'NOW', 'AND', 'AGAIN', 'A', 'GLOATING', 'EYE', 'UPON', 'THE', 'HEAP', 'OF', 'GOLD', 'AND', 'GREEN', 'BACKS', 'THAT', 'LAY', 'BETWEEN', 'THEM', 'AND', 'AT', 'TIMES', 'HALF', 'STRETCHING', 'OUT', 'HIS', 'HAND', 'TO', 'CLUTCH', 'IT'] +8280-266249-0052-391: ref=['A', 'DEEP', 'GROAN', 'STARTLED', 'THEM', 'AND', 'THEY', 'SPRANG', 'TO', 'THEIR', 'FEET', 'PALE', 'AND', 'TREMBLING', 'WITH', 'SUDDEN', 'TERROR', 'EACH', 'HOLDING', 'HIS', 'BREATH', 'AND', 'STRAINING', 'HIS', 'EAR', 'TO', 'CATCH', 'A', 'REPETITION', 'OF', 'THE', 'DREAD', 'SOUND'] +8280-266249-0052-391: hyp=['A', 'DEEP', 'GROUND', 'STARTLED', 'THEM', 'AND', 'THEY', 'SPRANG', 'TO', 'THEIR', 'FEET', 'PALE', 'AND', 'TREMBLING', 'WITH', 'SUDDEN', 'TERROR', 'EACH', 'HOLDING', 'HIS', 'BREATH', 'AND', 'STRAINING', 'HIS', 'EAR', 'TO', 'CATCH', 'A', 'REPETITION', 'OF', 'THE', 'DREAD', 'SOUND'] +8280-266249-0053-392: ref=['BUT', 'ALL', 'WAS', 'SILENT', 'AND', 'AFTER', 'A', 'MOMENT', 'OF', 'ANXIOUS', 'WAITING', 'THEY', 'SAT', 'DOWN', 'TO', 'THEIR', 'GAME', 'AGAIN', 'TRYING', 'TO', 'CONCEAL', 'AND', 'SHAKE', 'OFF', 'THEIR', 'FEARS', 'WITH', 'A', 'FORCED', 'UNNATURAL', 'LAUGH'] +8280-266249-0053-392: hyp=['BUT', 'ALWAYS', 'SILENT', 'AND', 'AFTER', 'A', 'MOMENT', 'OF', 'ANXIOUS', 'WAITING', 'THEY', 'SAT', 'DOWN', 'TO', 'THEIR', 'GAME', 'AGAIN', 'TRYING', 'TO', 'CONCEAL', 'AND', 'SHAKE', 'OFF', 'THEIR', 'FEARS', 'FOR', 'THE', 'FORCED', 'UNNATURAL', 'LAUGH'] +8280-266249-0054-393: ref=['IT', 'CAME', 'FROM', 'UNDER', 'THE', 'TABLE', 'GASPED', 'WARD', 'LOOK', "WHAT'S", 'THERE', 'LOOK', 'YOURSELF'] +8280-266249-0054-393: hyp=['IT', 'CAME', 'FROM', 'UNDER', 'THE', 'TABLE', 'GASPED', 'TOWARD', 'LOOK', "WHAT'S", 'THERE', 'LOOKED', 'YOURSELF'] +8280-266249-0055-394: ref=['WHAT', 'CAN', 'IT', 'HAVE', 'BEEN', 'THEY', 'ASKED', 'EACH', 'OTHER'] +8280-266249-0055-394: hyp=['WHAT', 'CAN', 'IT', 'HAVE', 'BEEN', 'THEY', 'ASKED', 'EACH', 'OTHER'] +8280-266249-0056-395: ref=['OH', 'NONSENSE', 'WHAT', 'FOOLS', 'WE', 'ARE'] +8280-266249-0056-395: hyp=['OH', 'NONSENSE', 'WHAT', 'FOOLS', 'WE', 'ARE'] +8280-266249-0057-396: ref=['IT', 'WAS', 'THE', 'LAST', 'GAME', 'OF', 'CARDS', 'FOR', 'THAT', 'TRIP'] +8280-266249-0057-396: hyp=['IT', 'WAS', 'THE', 'LAST', 'GAME', 'OF', 'CARDS', 'FOR', 'THAT', 'TRIP'] +8280-266249-0058-397: ref=['THE', 'CAPTAIN', 'COMING', 'IN', 'SHORTLY', 'AFTER', 'THE', 'SUDDEN', 'FLIGHT', 'OF', 'THE', 'GAMBLERS', 'TOOK', 'CHARGE', 'OF', 'THE', 'MONEY', 'AND', 'THE', 'NEXT', 'DAY', 'RESTORED', 'IT', 'TO', 'THE', 'OWNERS'] +8280-266249-0058-397: hyp=['THE', 'CAPTAIN', 'COMING', 'IN', 'SHORTLY', 'AFTER', 'THE', 'SUDDEN', 'FLIGHT', 'OF', 'THE', 'GAMBLERS', 'TOOK', 'CHARGE', 'OF', 'THE', 'MONEY', 'AND', 'THE', 'NEXT', 'DAY', 'RESTORED', 'IT', 'TO', 'THE', 'OWNERS'] +8280-266249-0059-398: ref=['TO', "ELSIE'S", 'OBSERVANT', 'EYES', 'IT', 'PRESENTLY', 'BECAME', 'EVIDENT', 'THAT', 'THE', 'DALYS', 'WERE', 'IN', 'VERY', 'STRAITENED', 'CIRCUMSTANCES'] +8280-266249-0059-398: hyp=['TO', "ELSIE'S", 'OBSERVANT', 'EYES', 'IT', 'PRESENTLY', 'BECAME', 'EVIDENT', 'THAT', 'THE', 'DAILIES', 'RAN', 'VERY', 'STRAIGHT', 'AND', 'CIRCUMSTANCES'] +8280-266249-0060-399: ref=['OH', 'HOW', 'KIND', 'HOW', 'VERY', 'KIND', 'MISSUS', 'DALY', 'SAID', 'WITH', 'TEARS', 'OF', 'JOY', 'AND', 'GRATITUDE', 'WE', 'HAVE', 'HARDLY', 'KNOWN', 'HOW', 'WE', 'SHOULD', 'MEET', 'THE', 'MOST', 'NECESSARY', 'EXPENSES', 'OF', 'THIS', 'TRIP', 'BUT', 'HAVE', 'BEEN', 'TRYING', 'TO', 'CAST', 'OUR', 'CARE', 'UPON', 'THE', 'LORD', 'ASKING', 'HIM', 'TO', 'PROVIDE'] +8280-266249-0060-399: hyp=['OH', 'HOW', 'KIND', 'HOW', 'VERY', 'KIND', 'MISSUS', 'DALEY', 'SAID', 'WITH', 'TEARS', 'OF', 'JOY', 'AND', 'GRATITUDE', 'WE', 'HAVE', 'HARDLY', 'KNOWN', 'HOW', 'WE', 'SHOULD', 'MEET', 'THE', 'MOST', 'NECESSARY', 'EXPENSES', 'OF', 'THIS', 'TRIP', 'BUT', 'HAVE', 'BEEN', 'TRYING', 'TO', 'CAST', 'OUR', 'CARE', 'UPON', 'THE', 'LORD', 'ASKING', 'HIM', 'TO', 'PROVIDE'] +8280-266249-0061-400: ref=['AND', 'HOW', 'WONDERFULLY', 'HE', 'HAS', 'ANSWERED', 'OUR', 'PETITIONS'] +8280-266249-0061-400: hyp=['AND', 'HOW', 'WONDERFULLY', 'HE', 'HAS', 'ANSWERED', 'OUR', 'PETITIONS'] +8280-266249-0062-401: ref=['ELSIE', 'ANSWERED', 'PRESSING', 'HER', 'HAND', 'AFFECTIONATELY', 'ART', 'WE', 'NOT', 'SISTERS', 'IN', 'CHRIST'] +8280-266249-0062-401: hyp=['ELSIE', 'ANSWERED', 'PRESSING', 'HER', 'HAND', 'AFFECTIONATELY', 'ARE', 'WE', 'NOT', 'SISTERS', 'IN', 'CHRIST'] +8280-266249-0063-402: ref=['YE', 'ARE', 'ALL', 'THE', 'CHILDREN', 'OF', 'GOD', 'BY', 'FAITH', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0063-402: hyp=['YE', 'ARE', 'ALL', 'THE', 'CHILDREN', 'OF', 'GOD', 'BY', 'FAITH', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0064-403: ref=['YE', 'ARE', 'ALL', 'ONE', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0064-403: hyp=['YE', 'ARE', 'ALL', 'ONE', 'AND', 'CHRIST', 'JESUS'] +8280-266249-0065-404: ref=['WE', 'FEEL', 'MY', 'HUSBAND', 'AND', 'I', 'THAT', 'WE', 'ARE', 'ONLY', 'THE', 'STEWARDS', 'OF', 'HIS', 'BOUNTY', 'AND', 'THAT', 'BECAUSE', 'HE', 'HAS', 'SAID', 'INASMUCH', 'AS', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ONE', 'OF', 'THE', 'LEAST', 'OF', 'THESE', 'MY', 'BRETHREN', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ME', 'IT', 'IS', 'THE', 'GREATEST', 'PRIVILEGE', 'AND', 'DELIGHT', 'TO', 'DO', 'ANYTHING', 'FOR', 'HIS', 'PEOPLE'] +8280-266249-0065-404: hyp=['WE', 'SEE', 'ON', 'MY', 'HUSBAND', 'AND', 'I', 'THAT', 'WE', 'ARE', 'ONLY', 'THE', 'STEWARDS', 'OF', 'HIS', 'BOUNTY', 'AND', 'BECAUSE', 'HE', 'HAS', 'SAID', 'INASMUCH', 'AS', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ONE', 'OF', 'THE', 'LEAST', 'OF', 'THESE', 'MY', 'BRETHREN', 'YOU', 'HAVE', 'DONE', 'IT', 'UNTO', 'ME', 'IT', 'IS', 'THE', 'GREATEST', 'PRIVILEGE', 'AND', 'DELIGHT', 'TO', 'DO', 'ANYTHING', 'FOR', 'HIS', 'PEOPLE'] +8461-258277-0000-1649: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVEN', 'HUNDRED', 'AND', 'EIGHTEENTH', 'NIGHT'] +8461-258277-0000-1649: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVEN', 'HUNDRED', 'AND', 'EIGHTEENTH', 'NIGHT'] +8461-258277-0001-1650: ref=['BUT', 'HE', 'ANSWERED', 'NEEDS', 'MUST', 'I', 'HAVE', 'ZAYNAB', 'ALSO', 'NOW', 'SUDDENLY', 'THERE', 'CAME', 'A', 'RAP', 'AT', 'THE', 'DOOR', 'AND', 'THE', 'MAID', 'SAID', 'WHO', 'IS', 'AT', 'THE', 'DOOR'] +8461-258277-0001-1650: hyp=['BUT', 'HE', 'ANSWERED', 'NEEDS', 'MY', 'STY', 'HAVE', 'THY', 'NABBS', 'SO', 'NOW', 'SUDDENLY', 'THERE', 'CAME', 'A', 'RAP', 'AT', 'THE', 'DOOR', 'AND', 'THE', 'MAID', 'SAID', 'WHO', 'IS', 'AT', 'THE', 'DOOR'] +8461-258277-0002-1651: ref=['THE', 'KNOCKER', 'REPLIED', 'KAMAR', 'DAUGHTER', 'OF', 'AZARIAH', 'THE', 'JEW', 'SAY', 'ME', 'IS', 'ALI', 'OF', 'CAIRO', 'WITH', 'YOU'] +8461-258277-0002-1651: hyp=['THE', 'KNOCKER', 'REPLIED', 'COME', 'ON', 'DAUGHTER', 'VAZARRE', 'THE', 'JEW', 'SAY', 'ME', 'IS', 'ALI', 'OF', 'CAIRO', 'WITH', 'YOU'] +8461-258277-0003-1652: ref=['REPLIED', 'THE', "BROKER'S", 'DAUGHTER', 'O', 'THOU', 'DAUGHTER', 'OF', 'A', 'DOG'] +8461-258277-0003-1652: hyp=['REPLIED', 'THE', "BROKER'S", 'DAUGHTER', 'O', 'THOU', 'DAUGHTER', 'OF', 'A', 'DOG'] +8461-258277-0004-1653: ref=['AND', 'HAVING', 'THUS', 'ISLAMISED', 'SHE', 'ASKED', 'HIM', 'DO', 'MEN', 'IN', 'THE', 'FAITH', 'OF', 'AL', 'ISLAM', 'GIVE', 'MARRIAGE', 'PORTIONS', 'TO', 'WOMEN', 'OR', 'DO', 'WOMEN', 'DOWER', 'MEN'] +8461-258277-0004-1653: hyp=['AND', 'HAVING', 'THUS', 'ISLAMMISED', 'SHE', 'ASKED', 'HIM', 'TWO', 'MEN', 'IN', 'THE', 'FAITH', 'OF', 'ALICELA', 'GAVE', 'MARRIAGE', 'PORTIONS', 'TO', 'WOMEN', 'OR', 'TWO', 'WOMEN', 'DOWER', 'MEN'] +8461-258277-0005-1654: ref=['AND', 'SHE', 'THREW', 'DOWN', 'THE', "JEW'S", 'HEAD', 'BEFORE', 'HIM'] +8461-258277-0005-1654: hyp=['AND', 'SHE', 'THREW', 'DOWN', 'THE', "JEW'S", 'HEAD', 'BEFORE', 'HIM'] +8461-258277-0006-1655: ref=['NOW', 'THE', 'CAUSE', 'OF', 'HER', 'SLAYING', 'HER', 'SIRE', 'WAS', 'AS', 'FOLLOWS'] +8461-258277-0006-1655: hyp=['NOW', 'THE', 'COURSE', 'OF', 'HER', 'SLAYING', 'HER', 'SIRE', 'WAS', 'AS', 'FOLLOWS'] +8461-258277-0007-1656: ref=['THEN', 'HE', 'SET', 'OUT', 'REJOICING', 'TO', 'RETURN', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0007-1656: hyp=['THEN', 'HE', 'SAT', 'DOWN', 'REJOICING', 'TO', 'RETURN', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0008-1657: ref=['SO', 'HE', 'ATE', 'AND', 'FELL', 'DOWN', 'SENSELESS', 'FOR', 'THE', 'SWEETMEATS', 'WERE', 'DRUGGED', 'WITH', 'BHANG', 'WHEREUPON', 'THE', 'KAZI', 'BUNDLED', 'HIM', 'INTO', 'THE', 'SACK', 'AND', 'MADE', 'OFF', 'WITH', 'HIM', 'CHARGER', 'AND', 'CHEST', 'AND', 'ALL', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0008-1657: hyp=['SO', 'HE', 'ATE', 'AND', 'FELL', 'DOWN', 'SENSELESS', 'FOR', 'THE', 'SWEETMEATS', 'WERE', 'DRUGGED', 'WITH', 'BANG', 'WHEREUPON', 'THE', 'KAZI', 'BUNDLED', 'HIM', 'INTO', 'THE', 'SACK', 'AND', 'MADE', 'OFF', 'WITH', 'THEM', 'CHARGER', 'AND', 'CHEST', 'AND', 'ALL', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTE'] +8461-258277-0009-1658: ref=['PRESENTLY', 'HASAN', 'SHUMAN', 'CAME', 'OUT', 'OF', 'A', 'CLOSET', 'AND', 'SAID', 'TO', 'HIM', 'HAST', 'THOU', 'GOTTEN', 'THE', 'GEAR', 'O', 'ALI'] +8461-258277-0009-1658: hyp=['PRESENTLY', 'HER', 'SON', 'SCHUMANN', 'CAME', 'OUT', 'OF', 'A', 'CLOSE', 'AND', 'SAID', 'TO', 'HIM', 'HAST', 'THOU', 'GOTTEN', 'THE', 'GEAR', 'O', 'ALI'] +8461-258277-0010-1659: ref=['SO', 'HE', 'TOLD', 'HIM', 'WHAT', 'HAD', 'BEFALLEN', 'HIM', 'AND', 'ADDED', 'IF', 'I', 'KNOW', 'WHITHER', 'THE', 'RASCAL', 'IS', 'GONE', 'AND', 'WHERE', 'TO', 'FIND', 'THE', 'KNAVE', 'I', 'WOULD', 'PAY', 'HIM', 'OUT'] +8461-258277-0010-1659: hyp=['SO', 'HE', 'TOLD', 'HIM', 'WHAT', 'HAD', 'BEFALLEN', 'HIM', 'AND', 'ADDED', 'IF', 'I', 'KNOW', 'WHETHER', 'THE', 'RASCAL', 'IS', 'GONE', 'AND', 'WHERE', 'TO', 'FIND', 'THE', 'KNAVE', 'I', 'WOULD', 'PAY', 'HIM', 'OUT'] +8461-258277-0011-1660: ref=['KNOWEST', 'THOU', 'WHITHER', 'HE', 'WENT'] +8461-258277-0011-1660: hyp=['KNOWEST', 'THOU', 'WHITHER', 'HE', 'WENT'] +8461-258277-0012-1661: ref=['ANSWERED', 'HASAN', 'I', 'KNOW', 'WHERE', 'HE', 'IS', 'AND', 'OPENING', 'THE', 'DOOR', 'OF', 'THE', 'CLOSET', 'SHOWED', 'HIM', 'THE', 'SWEETMEAT', 'SELLER', 'WITHIN', 'DRUGGED', 'AND', 'SENSELESS'] +8461-258277-0012-1661: hyp=['ANSWERED', 'HASAN', 'I', 'KNOW', 'WHERE', 'HE', 'IS', 'AND', 'OPENING', 'THE', 'DOOR', 'OF', 'THE', 'CLOSET', 'SHOWED', 'HIM', 'THE', 'SWEETMEAT', 'CELLAR', 'WITHIN', 'DRUGGED', 'AND', 'SENSELESS'] +8461-258277-0013-1662: ref=['SO', 'I', 'WENT', 'ROUND', 'ABOUT', 'THE', 'HIGHWAYS', 'OF', 'THE', 'CITY', 'TILL', 'I', 'MET', 'A', 'SWEETMEAT', 'SELLER', 'AND', 'BUYING', 'HIS', 'CLOTHES', 'AND', 'STOCK', 'IN', 'TRADE', 'AND', 'GEAR', 'FOR', 'TEN', 'DINARS', 'DID', 'WHAT', 'WAS', 'DONE'] +8461-258277-0013-1662: hyp=['SO', 'I', 'WENT', 'ROUND', 'ABOUT', 'THE', 'HIGHWAYS', 'OF', 'THE', 'CITY', 'TILL', 'I', 'MET', 'A', 'SWEETMEAT', 'CELLAR', 'AND', 'BUYING', 'HIS', 'CLOTHES', 'AND', 'STOCKING', 'TRADE', 'AND', 'GEAR', 'FOR', 'TEN', 'HOURS', 'DID', 'WHAT', 'WAS', 'DONE'] +8461-258277-0014-1663: ref=['QUOTH', 'AL', 'RASHID', 'WHOSE', 'HEAD', 'IS', 'THIS'] +8461-258277-0014-1663: hyp=['QUOTH', 'A', 'RASHID', 'WHOSE', 'HEAD', 'IS', 'THIS'] +8461-258277-0015-1664: ref=['SO', 'ALI', 'RELATED', 'TO', 'HIM', 'ALL', 'THAT', 'HAD', 'PASSED', 'FROM', 'FIRST', 'TO', 'LAST', 'AND', 'THE', 'CALIPH', 'SAID', 'I', 'HAD', 'NOT', 'THOUGHT', 'THOU', 'WOULDST', 'KILL', 'HIM', 'FOR', 'THAT', 'HE', 'WAS', 'A', 'SORCERER'] +8461-258277-0015-1664: hyp=['SO', 'ALI', 'RELATED', 'TO', 'HIM', 'ALL', 'THAT', 'THAT', 'PASSED', 'FROM', 'FIRST', 'LAST', 'AND', 'THE', 'CALIPH', 'SAID', 'I', 'HAD', 'NOT', 'THOUGHT', 'THOU', 'WOULDST', 'KILL', 'HIM', 'FOR', 'THAT', 'HE', 'WAS', 'A', 'SORCERER'] +8461-258277-0016-1665: ref=['HE', 'REPLIED', 'I', 'HAVE', 'FORTY', 'LADS', 'BUT', 'THEY', 'ARE', 'IN', 'CAIRO'] +8461-258277-0016-1665: hyp=['HE', 'REPLIED', 'I', 'HAVE', 'FORTY', 'LADS', 'BUT', 'THEY', 'ARE', 'IN', 'CAIRO'] +8461-278226-0000-1633: ref=['AND', 'LAURA', 'HAD', 'HER', 'OWN', 'PET', 'PLANS'] +8461-278226-0000-1633: hyp=['AND', 'LAURA', 'HAD', 'HER', 'OWN', 'PET', 'PLANS'] +8461-278226-0001-1634: ref=['SHE', 'MEANT', 'TO', 'BE', 'SCRUPULOUSLY', 'CONSCIENTIOUS', 'IN', 'THE', 'ADMINISTRATION', 'OF', 'HER', 'TALENTS', 'AND', 'SOMETIMES', 'AT', 'CHURCH', 'ON', 'A', 'SUNDAY', 'WHEN', 'THE', 'SERMON', 'WAS', 'PARTICULARLY', 'AWAKENING', 'SHE', 'MENTALLY', 'DEBATED', 'THE', 'SERIOUS', 'QUESTION', 'AS', 'TO', 'WHETHER', 'NEW', 'BONNETS', 'AND', 'A', 'PAIR', 'OF', "JOUVIN'S", 'GLOVES', 'DAILY', 'WERE', 'NOT', 'SINFUL', 'BUT', 'I', 'THINK', 'SHE', 'DECIDED', 'THAT', 'THE', 'NEW', 'BONNETS', 'AND', 'GLOVES', 'WERE', 'ON', 'THE', 'WHOLE', 'A', 'PARDONABLE', 'WEAKNESS', 'AS', 'BEING', 'GOOD', 'FOR', 'TRADE'] +8461-278226-0001-1634: hyp=['SHE', 'MEANT', 'TO', 'BE', 'SCRUPULOUSLY', 'CONSCIENTIOUS', 'IN', 'THE', 'ADMINISTRATION', 'OF', 'ITALIANS', 'AND', 'SOMETIMES', 'AT', 'CHURCH', 'ON', 'A', 'SUNDAY', 'WHEN', 'THE', 'SAME', 'WAS', 'PARTICULARLY', 'AWAKENING', 'SHE', 'MENTALLY', 'DEBATED', 'A', 'SERIOUS', 'QUESTION', 'AS', 'TO', 'WHETHER', 'NEW', 'BONNETS', 'AND', 'A', 'PAIR', 'OF', 'JUBANCE', 'GLOVES', 'DAILY', 'WERE', 'NOT', 'SENT', 'FOR', 'BUT', 'I', 'THINK', 'SHE', 'DECIDED', 'THAT', 'THE', 'NEW', 'BONNETS', 'AND', 'GLOVES', 'WERE', 'ON', 'THE', 'WHOLE', 'A', 'PARDONABLE', 'WEAKNESS', 'AS', 'BEING', 'GOOD', 'FOR', 'TRADE'] +8461-278226-0002-1635: ref=['ONE', 'MORNING', 'LAURA', 'TOLD', 'HER', 'HUSBAND', 'WITH', 'A', 'GAY', 'LAUGH', 'THAT', 'SHE', 'WAS', 'GOING', 'TO', 'VICTIMIZE', 'HIM', 'BUT', 'HE', 'WAS', 'TO', 'PROMISE', 'TO', 'BE', 'PATIENT', 'AND', 'BEAR', 'WITH', 'HER', 'FOR', 'ONCE', 'IN', 'A', 'WAY'] +8461-278226-0002-1635: hyp=['ONE', 'MORNING', 'LAURA', 'TOLD', 'HER', 'HUSBAND', 'WITH', 'A', 'GAY', 'LAUGH', 'THAT', 'SHE', 'WAS', 'GOING', 'TO', 'VICTIMISE', 'HIM', 'BUT', 'HE', 'WAS', 'TO', 'PROMISE', 'TO', 'BE', 'PATIENT', 'AND', 'BEAR', 'WITH', 'HER', 'FOR', 'ONCE', 'IN', 'A', 'WAY'] +8461-278226-0003-1636: ref=['I', 'WANT', 'TO', 'SEE', 'ALL', 'THE', 'PICTURES', 'THE', 'MODERN', 'PICTURES', 'ESPECIALLY'] +8461-278226-0003-1636: hyp=['I', 'WANT', 'TO', 'SEE', 'ALL', 'THE', 'PICTURES', 'THE', 'MODERN', 'PICTURES', 'ESPECIALLY'] +8461-278226-0004-1637: ref=['I', 'REMEMBER', 'ALL', 'THE', 'RUBENSES', 'AT', 'THE', 'LOUVRE', 'FOR', 'I', 'SAW', 'THEM', 'THREE', 'YEARS', 'AGO', 'WHEN', 'I', 'WAS', 'STAYING', 'IN', 'PARIS', 'WITH', 'GRANDPAPA'] +8461-278226-0004-1637: hyp=['I', 'REMEMBER', 'ALL', 'THE', 'REUBEN', 'SAYS', 'THAT', 'THE', 'LOUVRE', 'FOR', 'I', 'SAW', 'THEM', 'THREE', 'YEARS', 'AGO', 'WHEN', 'I', 'WAS', 'STAYING', 'IN', 'PARIS', 'WITH', 'GRANDPAPA'] +8461-278226-0005-1638: ref=['SHE', 'RETURNED', 'IN', 'A', 'LITTLE', 'MORE', 'THAN', 'TEN', 'MINUTES', 'IN', 'THE', 'FRESHEST', 'TOILETTE', 'ALL', 'PALE', 'SHIMMERING', 'BLUE', 'LIKE', 'THE', 'SPRING', 'SKY', 'WITH', 'PEARL', 'GREY', 'GLOVES', 'AND', 'BOOTS', 'AND', 'PARASOL', 'AND', 'A', 'BONNET', 'THAT', 'SEEMED', 'MADE', 'OF', 'AZURE', 'BUTTERFLIES'] +8461-278226-0005-1638: hyp=['SHE', 'RETURNED', 'IN', 'A', 'LITTLE', 'MORE', 'THAN', 'TEN', 'MINUTES', 'IN', 'THE', 'FRESHEST', 'TOILETTE', 'ALL', 'PALE', 'SHIMMERING', 'BLUE', 'LIKE', 'THE', 'SPRING', 'SKY', 'WITH', 'PER', 'GRAY', 'GLOVES', 'AND', 'BOOTS', 'AND', 'PARASOL', 'AND', 'A', 'BONNET', 'THAT', 'SEEMED', 'MADE', 'OF', 'AZURE', 'BUTTERFLIES'] +8461-278226-0006-1639: ref=['IT', 'WAS', 'DRAWING', 'TOWARDS', 'THE', 'CLOSE', 'OF', 'THIS', 'DELIGHTFUL', 'HONEYMOON', 'TOUR', 'AND', 'IT', 'WAS', 'A', 'BRIGHT', 'SUNSHINY', 'MORNING', 'EARLY', 'IN', 'FEBRUARY', 'BUT', 'FEBRUARY', 'IN', 'PARIS', 'IS', 'SOMETIMES', 'BETTER', 'THAN', 'APRIL', 'IN', 'LONDON'] +8461-278226-0006-1639: hyp=['HE', 'WAS', 'DRAWING', 'TOWARDS', 'THE', 'CLOSE', 'OF', 'THIS', 'DELIGHTFUL', 'HONEYMOON', 'TOUR', 'AND', 'IT', 'WAS', 'A', 'BRIGHT', 'SUNSHINY', 'MORNING', 'EARLY', 'IN', 'FEBRUARY', 'BUT', 'FEBRUARY', 'IN', 'PARIS', 'IS', 'SOMETIMES', 'BETTER', 'THAN', 'APRIL', 'IN', 'LONDON'] +8461-278226-0007-1640: ref=['BUT', 'SHE', 'FIXED', 'UPON', 'A', 'PICTURE', 'WHICH', 'SHE', 'SAID', 'SHE', 'PREFERRED', 'TO', 'ANYTHING', 'SHE', 'HAD', 'SEEN', 'IN', 'THE', 'GALLERY'] +8461-278226-0007-1640: hyp=['BUT', 'SHE', 'FIXED', 'UPON', 'A', 'PICTURE', 'WHICH', 'SHE', 'SAID', 'SHE', 'PREFERRED', 'TO', 'ANYTHING', 'SHE', 'HAD', 'SEEN', 'IN', 'THE', 'GALLERY'] +8461-278226-0008-1641: ref=['PHILIP', 'JOCELYN', 'WAS', 'EXAMINING', 'SOME', 'PICTURES', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'ROOM', 'WHEN', 'HIS', 'WIFE', 'MADE', 'THIS', 'DISCOVERY'] +8461-278226-0008-1641: hyp=['PHILIP', 'JOCELYN', 'WAS', 'EXAMINING', 'SOME', 'PICTURES', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'ROOM', 'WHEN', 'HIS', 'WIFE', 'MADE', 'THE', 'DISCOVERY'] +8461-278226-0009-1642: ref=['HOW', 'I', 'WISH', 'YOU', 'COULD', 'GET', 'ME', 'A', 'COPY', 'OF', 'THAT', 'PICTURE', 'PHILIP', 'LAURA', 'SAID', 'ENTREATINGLY'] +8461-278226-0009-1642: hyp=['HOW', 'I', 'WISH', 'YOU', 'COULD', 'GET', 'ME', 'A', 'COPY', 'OF', 'THAT', 'PICTURE', 'PHILIP', 'LAURA', 'SAID', 'ENTREATINGLY'] +8461-278226-0010-1643: ref=['I', 'SHOULD', 'SO', 'LIKE', 'ONE', 'TO', 'HANG', 'IN', 'MY', 'MORNING', 'ROOM', 'AT', "JOCELYN'S", 'ROCK'] +8461-278226-0010-1643: hyp=['I', 'SHOULD', 'SO', 'LIKE', 'ONE', 'TO', 'HANG', 'IN', 'MY', 'MORNING', 'ROOM', 'A', "JOCELYN'S", 'ROCK'] +8461-278226-0011-1644: ref=['SHE', 'TURNED', 'TO', 'THE', 'FRENCH', 'ARTIST', 'PRESENTLY', 'AND', 'ASKED', 'HIM', 'WHERE', 'THE', 'ELDER', 'MISTER', 'KERSTALL', 'LIVED', 'AND', 'IF', 'THERE', 'WAS', 'ANY', 'POSSIBILITY', 'OF', 'SEEING', 'HIM'] +8461-278226-0011-1644: hyp=['SHE', 'TURNED', 'TO', 'THE', 'FRENCHARD', 'THIS', 'PRESENTLY', 'AND', 'ASKED', 'THEM', 'WHERE', 'THE', 'ELDER', 'MISTER', 'COASTON', 'LIVED', 'AND', 'IF', 'THERE', 'WAS', 'ANY', 'POSSIBILITY', 'OF', 'SEEING', 'HIM'] +8461-278226-0012-1645: ref=['THEY', 'HAVE', 'SAID', 'THAT', 'HE', 'IS', 'EVEN', 'A', 'LITTLE', 'IMBECILE', 'THAT', 'HE', 'DOES', 'NOT', 'REMEMBER', 'HIMSELF', 'OF', 'THE', 'MOST', 'COMMON', 'EVENTS', 'OF', 'HIS', 'LIFE'] +8461-278226-0012-1645: hyp=['THEY', 'HAVE', 'SAID', 'THAT', 'HE', 'IS', 'EVEN', 'A', 'LITTLE', 'IMBECILE', 'THAT', 'HE', 'DOES', 'NOT', 'REMEMBER', 'HIMSELF', 'OF', 'THE', 'MOST', 'COMMON', 'EVENTS', 'OF', 'HIS', 'LIFE'] +8461-278226-0013-1646: ref=['BUT', 'THERE', 'ARE', 'SOME', 'OTHERS', 'WHO', 'SAY', 'THAT', 'HIS', 'MEMORY', 'HAS', 'NOT', 'ALTOGETHER', 'FAILED', 'AND', 'THAT', 'HE', 'IS', 'STILL', 'ENOUGH', 'HARSHLY', 'CRITICAL', 'TOWARDS', 'THE', 'WORKS', 'OF', 'OTHERS'] +8461-278226-0013-1646: hyp=['BUT', 'THERE', 'ARE', 'SOME', 'OTHERS', 'WHO', 'SAY', 'THAT', 'HIS', 'MEMORY', 'HAS', 'NOT', 'ALTOGETHER', 'FAILED', 'AND', 'THAT', 'HE', 'STILL', 'ENOUGH', 'HARSHLY', 'CRITICAL', 'TOWARDS', 'THE', 'WORKS', 'OF', 'OTHERS'] +8461-278226-0014-1647: ref=['I', "DON'T", 'THINK', 'YOU', 'WILL', 'HAVE', 'ANY', 'DIFFICULTY', 'IN', 'FINDING', 'THE', 'HOUSE'] +8461-278226-0014-1647: hyp=['I', "DON'T", 'THINK', 'YOU', 'WILL', 'HAVE', 'ANY', 'DIFFICULTY', 'IN', 'FINDING', 'THE', 'HOUSE'] +8461-278226-0015-1648: ref=['YOU', 'WILL', 'BE', 'DOING', 'ME', 'SUCH', 'A', 'FAVOUR', 'PHILIP', 'IF', "YOU'LL", 'SAY', 'YES'] +8461-278226-0015-1648: hyp=['YOU', 'WERE', 'BETWEEN', 'ME', 'SUCH', 'A', 'FAVOUR', 'FELLOW', 'IF', "YOU'LL", 'SAY', 'YES'] +8461-281231-0000-1594: ref=['HIS', 'FOLLOWERS', 'RUSHED', 'FORWARD', 'TO', 'WHERE', 'HE', 'LAY', 'AND', 'THEIR', 'UNITED', 'FORCE', 'COMPELLING', 'THE', 'BLACK', 'KNIGHT', 'TO', 'PAUSE', 'THEY', 'DRAGGED', 'THEIR', 'WOUNDED', 'LEADER', 'WITHIN', 'THE', 'WALLS'] +8461-281231-0000-1594: hyp=['HIS', 'FOLLOWERS', 'RUSH', 'FORWARD', 'TO', 'WHERE', 'HE', 'LAY', 'AND', 'THEIR', 'UNITED', 'FORCE', 'COMPELLING', 'THE', 'BLACK', 'NIGHT', 'TO', 'PAUSE', 'THEY', 'DRAGGED', 'THE', 'WOUNDED', 'LEADER', 'WITHIN', 'THE', 'WALLS'] +8461-281231-0001-1595: ref=['IT', 'WAS', 'ON', 'THEIR', 'JOURNEY', 'TO', 'THAT', 'TOWN', 'THAT', 'THEY', 'WERE', 'OVERTAKEN', 'ON', 'THE', 'ROAD', 'BY', 'CEDRIC', 'AND', 'HIS', 'PARTY', 'IN', 'WHOSE', 'COMPANY', 'THEY', 'WERE', 'AFTERWARDS', 'CARRIED', 'CAPTIVE', 'TO', 'THE', 'CASTLE', 'OF', 'TORQUILSTONE'] +8461-281231-0001-1595: hyp=['IT', 'WAS', 'ON', 'THEIR', 'JOURNEY', 'TO', 'THAT', 'TOWN', 'THAT', 'THEY', 'WERE', 'OVERTAKEN', 'ON', 'THE', 'ROAD', 'BY', 'SEDRRICK', 'AND', 'HIS', 'PARTY', 'IN', 'WHOSE', 'COMPANY', 'THEY', 'WERE', 'AFTERWARDS', 'CARRIED', 'CAPTIVE', 'TO', 'THE', 'COUNCIL', 'OF', 'TORCHLESTONE'] +8461-281231-0002-1596: ref=['AS', 'HE', 'LAY', 'UPON', 'HIS', 'BED', 'RACKED', 'WITH', 'PAIN', 'AND', 'MENTAL', 'AGONY', 'AND', 'FILLED', 'WITH', 'THE', 'FEAR', 'OF', 'RAPIDLY', 'APPROACHING', 'DEATH', 'HE', 'HEARD', 'A', 'VOICE', 'ADDRESS', 'HIM'] +8461-281231-0002-1596: hyp=['I', 'SEE', 'LAY', 'UPON', 'HIS', 'BED', 'WRAPPED', 'WITH', 'PAIN', 'AND', 'MENTAL', 'AGONY', 'AND', 'FILLED', 'WITH', 'FEAR', 'OF', 'RAPIDLY', 'APPROACHING', 'DEATH', 'HE', 'HEARD', 'A', 'VOICE', 'ADDRESS', 'HIM'] +8461-281231-0003-1597: ref=['WHAT', 'ART', 'THOU', 'HE', 'EXCLAIMED', 'IN', 'TERROR'] +8461-281231-0003-1597: hyp=['WHAT', 'ART', 'THOU', 'HE', 'EXCLAIMED', 'IN', 'TERROR'] +8461-281231-0004-1598: ref=['LEAVE', 'ME', 'AND', 'SEEK', 'THE', 'SAXON', 'WITCH', 'ULRICA', 'WHO', 'WAS', 'MY', 'TEMPTRESS', 'LET', 'HER', 'AS', 'WELL', 'AS', 'I', 'TASTE', 'THE', 'TORTURES', 'WHICH', 'ANTICIPATE', 'HELL'] +8461-281231-0004-1598: hyp=['LEAVE', 'ME', 'AND', 'SEEK', 'THE', 'SAXON', 'WHICH', 'OIKA', 'WHO', 'WAS', 'MY', 'TEMPTRESS', 'LET', 'HER', 'AS', 'WELL', 'AS', 'I', 'CASE', 'THE', 'TORTURES', 'WHICH', 'ANTICIPATE', 'HELL'] +8461-281231-0005-1599: ref=['EXCLAIMED', 'THE', 'NORMAN', 'HO'] +8461-281231-0005-1599: hyp=['EXCLAIMED', 'THE', 'NORMAN', 'OH'] +8461-281231-0006-1600: ref=['REMEMBEREST', 'THOU', 'THE', 'MAGAZINE', 'OF', 'FUEL', 'THAT', 'IS', 'STORED', 'BENEATH', 'THESE', 'APARTMENTS', 'WOMAN'] +8461-281231-0006-1600: hyp=['REMEMBER', 'AS', 'THOU', 'THE', 'MAGAZINE', 'OF', 'FUEL', 'THAT', 'HIS', 'STORE', 'BENEATH', 'THESE', 'APARTMENTS', 'WOMAN'] +8461-281231-0007-1601: ref=['THEY', 'ARE', 'FAST', 'RISING', 'AT', 'LEAST', 'SAID', 'ULRICA', 'AND', 'A', 'SIGNAL', 'SHALL', 'SOON', 'WAVE', 'TO', 'WARN', 'THE', 'BESIEGERS', 'TO', 'PRESS', 'HARD', 'UPON', 'THOSE', 'WHO', 'WOULD', 'EXTINGUISH', 'THEM'] +8461-281231-0007-1601: hyp=['THEY', 'ARE', 'FAST', 'RISING', 'AT', 'LEAST', 'SAID', 'EUREKA', 'AND', 'A', 'SIGNAL', 'SHALL', 'SOON', 'WAVE', 'TOWARD', 'THE', 'BESIEGERS', 'TO', 'PRESS', 'HARD', 'UPON', 'THOSE', 'WHO', 'WOULD', 'EXTINGUISH', 'THEM'] +8461-281231-0008-1602: ref=['MEANWHILE', 'THE', 'BLACK', 'KNIGHT', 'HAD', 'LED', 'HIS', 'FORCES', 'AGAIN', 'TO', 'THE', 'ATTACK', 'AND', 'SO', 'VIGOROUS', 'WAS', 'THEIR', 'ASSAULT', 'THAT', 'BEFORE', 'LONG', 'THE', 'GATE', 'OF', 'THE', 'CASTLE', 'ALONE', 'SEPARATED', 'THEM', 'FROM', 'THOSE', 'WITHIN'] +8461-281231-0008-1602: hyp=['MEANWHILE', 'THE', 'BLACK', 'KNIGHT', 'HAD', 'LED', 'HIS', 'FORCES', 'AGAIN', 'TO', 'THE', 'ATTACK', 'AND', 'SO', 'VIGOROUS', 'WAS', 'THEIR', 'ASSAULT', 'THAT', 'BEFORE', 'LONG', 'THE', 'GATE', 'OF', 'THE', 'CASTLE', 'ALONE', 'SEPARATED', 'THEM', 'FROM', 'THOSE', 'WITHIN'] +8461-281231-0009-1603: ref=['THE', 'DEFENDERS', 'FINDING', 'THE', 'CASTLE', 'TO', 'BE', 'ON', 'FIRE', 'NOW', 'DETERMINED', 'TO', 'SELL', 'THEIR', 'LIVES', 'AS', 'DEARLY', 'AS', 'THEY', 'COULD', 'AND', 'HEADED', 'BY', 'DE', 'BRACY', 'THEY', 'THREW', 'OPEN', 'THE', 'GATE', 'AND', 'WERE', 'AT', 'ONCE', 'INVOLVED', 'IN', 'A', 'TERRIFIC', 'CONFLICT', 'WITH', 'THOSE', 'OUTSIDE'] +8461-281231-0009-1603: hyp=['THE', 'DEFENDERS', 'FIND', 'IN', 'THE', 'CASTLE', 'TO', 'BE', 'ON', 'FIRE', 'NOW', 'DETERMINED', 'TO', 'SELL', 'THEIR', 'LIVES', 'AS', 'DAILY', 'AS', 'THEY', 'COULD', 'AND', 'HEADED', 'BY', 'THE', 'BRACES', 'THEY', 'THREW', 'OPEN', 'THE', 'GATE', 'AND', 'WERE', 'AT', 'ONCE', 'INVOLVED', 'IN', 'A', 'TERRIFIC', 'CONFLICT', 'WITH', 'THOSE', 'OUTSIDE'] +8461-281231-0010-1604: ref=['THE', 'BLACK', 'KNIGHT', 'WITH', 'PORTENTOUS', 'STRENGTH', 'FORCED', 'HIS', 'WAY', 'INWARD', 'IN', 'DESPITE', 'OF', 'DE', 'BRACY', 'AND', 'HIS', 'FOLLOWERS'] +8461-281231-0010-1604: hyp=['THE', 'BLACK', 'NIGHT', 'WITH', 'POTENTI', 'FORCE', 'HIS', 'WAY', 'INWARD', 'IN', 'DESPITE', 'OF', 'THE', 'BRAZY', 'AND', 'HIS', 'FOLLOWERS'] +8461-281231-0011-1605: ref=['TWO', 'OF', 'THE', 'FOREMOST', 'INSTANTLY', 'FELL', 'AND', 'THE', 'REST', 'GAVE', 'WAY', 'NOTWITHSTANDING', 'ALL', 'THEIR', 'LEADERS', 'EFFORTS', 'TO', 'STOP', 'THEM'] +8461-281231-0011-1605: hyp=['TWO', 'OF', 'THE', 'FOREMOST', 'THING', 'AND', 'THE', 'REST', 'GAVE', 'WAY', 'NOTWITHSTANDING', 'ALL', 'THE', "LEADER'S", 'EFFORTS', 'TO', 'STOP', 'THEM'] +8461-281231-0012-1606: ref=['THE', 'BLACK', 'KNIGHT', 'WAS', 'SOON', 'ENGAGED', 'IN', 'DESPERATE', 'COMBAT', 'WITH', 'THE', 'NORMAN', 'CHIEF', 'AND', 'THE', 'VAULTED', 'ROOF', 'OF', 'THE', 'HALL', 'RUNG', 'WITH', 'THEIR', 'FURIOUS', 'BLOWS'] +8461-281231-0012-1606: hyp=['THE', 'BLACK', 'NIGHT', 'WAS', 'SOON', 'ENGAGED', 'IN', 'DESPERATE', 'COMBAT', 'WITH', 'THE', 'NORMAN', 'CHIEF', 'AND', 'DEVOTED', 'ROOF', 'OF', 'THE', 'HALL', 'RANG', 'WITH', 'THE', 'FURIOUS', 'BLOWS'] +8461-281231-0013-1607: ref=['AT', 'LENGTH', 'DE', 'BRACY', 'FELL'] +8461-281231-0013-1607: hyp=['AT', 'LENGTH', 'THE', 'BRACEY', 'FELL'] +8461-281231-0014-1608: ref=['TELL', 'ME', 'THY', 'NAME', 'OR', 'WORK', 'THY', 'PLEASURE', 'ON', 'ME'] +8461-281231-0014-1608: hyp=['TELL', 'ME', 'THY', 'NAME', 'A', 'WORK', 'THY', 'PLEASURE', 'ON', 'ME'] +8461-281231-0015-1609: ref=['YET', 'FIRST', 'LET', 'ME', 'SAY', 'SAID', 'DE', 'BRACY', 'WHAT', 'IT', 'IMPORTS', 'THEE', 'TO', 'KNOW'] +8461-281231-0015-1609: hyp=['YET', 'FIRST', 'LET', 'ME', 'SAY', 'SAID', 'DEBRACY', 'WHAT', 'IT', 'IMPORTS', 'THEE', 'TO', 'KNOW'] +8461-281231-0016-1610: ref=['EXCLAIMED', 'THE', 'BLACK', 'KNIGHT', 'PRISONER', 'AND', 'PERISH'] +8461-281231-0016-1610: hyp=['EXCLAIMED', 'THE', 'BLACK', 'KNIGHT', 'PRISONER', 'AND', 'PERISH'] +8461-281231-0017-1611: ref=['THE', 'LIFE', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'CASTLE', 'SHALL', 'ANSWER', 'IT', 'IF', 'A', 'HAIR', 'OF', 'HIS', 'HEAD', 'BE', 'SINGED', 'SHOW', 'ME', 'HIS', 'CHAMBER'] +8461-281231-0017-1611: hyp=['THE', 'LIFE', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'CASTLE', 'SHALL', 'ANSWER', 'IT', 'IF', 'A', 'HAIR', 'OF', 'HIS', 'HEAD', 'BE', 'SINGED', 'SHOW', 'ME', 'HIS', 'CHAMBER'] +8461-281231-0018-1612: ref=['RAISING', 'THE', 'WOUNDED', 'MAN', 'WITH', 'EASE', 'THE', 'BLACK', 'KNIGHT', 'RUSHED', 'WITH', 'HIM', 'TO', 'THE', 'POSTERN', 'GATE', 'AND', 'HAVING', 'THERE', 'DELIVERED', 'HIS', 'BURDEN', 'TO', 'THE', 'CARE', 'OF', 'TWO', 'YEOMEN', 'HE', 'AGAIN', 'ENTERED', 'THE', 'CASTLE', 'TO', 'ASSIST', 'IN', 'THE', 'RESCUE', 'OF', 'THE', 'OTHER', 'PRISONERS'] +8461-281231-0018-1612: hyp=['RAISING', 'THE', 'WOUNDED', 'MAN', 'WITH', 'EASE', 'THE', 'BLACK', 'KNIGHT', 'RUSHED', 'WITH', 'THEM', 'TO', 'THE', 'PASSING', 'GATE', 'AND', 'HAVING', 'THERE', 'DELIVERED', 'HIS', 'BURDEN', 'TO', 'THE', 'CARE', 'OF', 'TWO', 'YOUNG', 'MEN', 'HE', 'AGAIN', 'ENTERED', 'THE', 'CASTLE', 'TO', 'ASSIST', 'IN', 'THE', 'RESCUE', 'OF', 'THEIR', 'PRISONERS'] +8461-281231-0019-1613: ref=['BUT', 'IN', 'OTHER', 'PARTS', 'THE', 'BESIEGERS', 'PURSUED', 'THE', 'DEFENDERS', 'OF', 'THE', 'CASTLE', 'FROM', 'CHAMBER', 'TO', 'CHAMBER', 'AND', 'SATIATED', 'IN', 'THEIR', 'BLOOD', 'THE', 'VENGEANCE', 'WHICH', 'HAD', 'LONG', 'ANIMATED', 'THEM', 'AGAINST', 'THE', 'SOLDIERS', 'OF', 'THE', 'TYRANT', 'FRONT', 'DE', 'BOEUF'] +8461-281231-0019-1613: hyp=['BUT', 'IN', 'OTHER', 'PARTS', 'THE', 'BESIEGERS', 'PURSUED', 'THE', 'DEFENDERS', 'OF', 'THE', 'CASTLE', 'FROM', 'CHAMBER', 'TO', 'CHAMBER', 'AND', 'SATIATED', 'IN', 'THE', 'BLOOD', 'THE', 'VENGEANCE', 'WHICH', 'HAD', 'LONG', 'ANIMATED', 'THEM', 'AGAINST', 'THE', 'SOLDIERS', 'OF', 'THE', 'TYRANT', 'FROM', 'DE', 'BOEUF'] +8461-281231-0020-1614: ref=['AS', 'THE', 'FIRE', 'COMMENCED', 'TO', 'SPREAD', 'RAPIDLY', 'THROUGH', 'ALL', 'PARTS', 'OF', 'THE', 'CASTLE', 'ULRICA', 'APPEARED', 'ON', 'ONE', 'OF', 'THE', 'TURRETS'] +8461-281231-0020-1614: hyp=['AS', 'THE', 'FIRE', 'COMMANDS', 'TO', 'SPREAD', 'RAPIDLY', 'THROUGH', 'ALL', 'PARTS', 'OF', 'THE', 'CASTLE', 'OR', 'RICHA', 'APPEARED', 'ON', 'ONE', 'OF', 'THE', 'TURRETS'] +8461-281231-0021-1615: ref=['BEFORE', 'LONG', 'THE', 'TOWERING', 'FLAMES', 'HAD', 'SURMOUNTED', 'EVERY', 'OBSTRUCTION', 'AND', 'ROSE', 'TO', 'THE', 'EVENING', 'SKIES', 'ONE', 'HUGE', 'AND', 'BURNING', 'BEACON', 'SEEN', 'FAR', 'AND', 'WIDE', 'THROUGH', 'THE', 'ADJACENT', 'COUNTRY', 'TOWER', 'AFTER', 'TOWER', 'CRASHED', 'DOWN', 'WITH', 'BLAZING', 'ROOF', 'AND', 'RAFTER'] +8461-281231-0021-1615: hyp=['BEFORE', 'LONG', 'THE', 'TOWERING', 'FLAMES', 'THAT', 'SURMOUNTED', 'EVERY', 'OBSTRUCTION', 'AND', 'ROSE', 'TO', 'THE', 'EVENING', 'SKIES', 'WHEN', 'HUGE', 'AND', 'BURNING', 'BEACON', 'SEEMED', 'FAR', 'AND', 'WIDE', 'THROUGH', 'THE', 'ADJACENT', 'COUNTRY', 'TOWERED', 'AFTER', 'TOWER', 'CRASHED', 'DOWN', 'WITH', 'BLAZING', 'ROOF', 'AND', 'RAFTER'] +8461-281231-0022-1616: ref=['AT', 'LENGTH', 'WITH', 'A', 'TERRIFIC', 'CRASH', 'THE', 'WHOLE', 'TURRET', 'GAVE', 'WAY', 'AND', 'SHE', 'PERISHED', 'IN', 'THE', 'FLAMES', 'WHICH', 'HAD', 'CONSUMED', 'HER', 'TYRANT'] +8461-281231-0022-1616: hyp=['AT', 'LENGTH', 'WITH', 'A', 'TERRIFIC', 'CRASH', 'THE', 'WHOLE', 'TOWER', 'GAVE', 'WAY', 'AND', 'SHE', 'PERISHED', 'IN', 'FLAMES', 'WHICH', 'HAD', 'CONSUMED', 'HER', 'TYRANT'] +8461-281231-0023-1617: ref=['WHEN', 'THE', 'OUTLAWS', 'HAD', 'DIVIDED', 'THE', 'SPOILS', 'WHICH', 'THEY', 'HAD', 'TAKEN', 'FROM', 'THE', 'CASTLE', 'OF', 'TORQUILSTONE', 'CEDRIC', 'PREPARED', 'TO', 'TAKE', 'HIS', 'DEPARTURE'] +8461-281231-0023-1617: hyp=['WHEN', 'THE', 'OUTLAWS', 'ARE', 'DIVIDED', 'THE', 'SPOILS', 'WHICH', 'THEY', 'HAD', 'TAKEN', 'FROM', 'THE', 'CASTLE', 'OF', 'TORCHLESTONE', 'CEDRIC', 'PREPARED', 'TO', 'TAKE', 'HIS', 'DEPARTURE'] +8461-281231-0024-1618: ref=['HE', 'LEFT', 'THE', 'GALLANT', 'BAND', 'OF', 'FORESTERS', 'SORROWING', 'DEEPLY', 'FOR', 'HIS', 'LOST', 'FRIEND', 'THE', 'LORD', 'OF', 'CONINGSBURGH', 'AND', 'HE', 'AND', 'HIS', 'FOLLOWERS', 'HAD', 'SCARCE', 'DEPARTED', 'WHEN', 'A', 'PROCESSION', 'MOVED', 'SLOWLY', 'FROM', 'UNDER', 'THE', 'GREENWOOD', 'BRANCHES', 'IN', 'THE', 'DIRECTION', 'WHICH', 'HE', 'HAD', 'TAKEN', 'IN', 'THE', 'CENTRE', 'OF', 'WHICH', 'WAS', 'THE', 'CAR', 'IN', 'WHICH', 'THE', 'BODY', 'OF', 'ATHELSTANE', 'WAS', 'LAID'] +8461-281231-0024-1618: hyp=['HE', 'LEFT', 'THE', 'GALLANT', 'BAND', 'OF', 'FORESTERS', 'SORROWING', 'DEEPLY', 'FOR', 'HIS', 'LOST', 'FRIEND', 'THE', 'LORD', 'OF', 'CONIGSBURG', 'AND', 'HE', 'AND', 'HIS', 'FOLLOWERS', 'HAD', 'SCARCE', 'DEPARTED', 'WHEN', 'A', 'PROCESSION', 'MOVED', 'SLOWLY', 'FROM', 'UNDER', 'THE', 'GREENWOOD', 'BRANCHES', 'IN', 'THE', 'DIRECTION', 'WHICH', 'HE', 'HAD', 'TAKEN', 'IN', 'THE', 'CENTRE', 'OF', 'WHICH', 'WAS', 'THE', 'CAR', 'IN', 'WHICH', 'THE', 'BODY', 'OF', 'OTHERSTEIN', 'WAS', 'LAID'] +8461-281231-0025-1619: ref=['DE', 'BRACY', 'BOWED', 'LOW', 'AND', 'IN', 'SILENCE', 'THREW', 'HIMSELF', 'UPON', 'A', 'HORSE', 'AND', 'GALLOPED', 'OFF', 'THROUGH', 'THE', 'WOOD'] +8461-281231-0025-1619: hyp=['DEBRACY', 'BOWED', 'LOW', 'AND', 'IN', 'SILENCE', 'THREW', 'HIMSELF', 'UPON', 'A', 'HORSE', 'AND', 'GALLOPED', 'OFF', 'THROUGH', 'THE', 'WOODS'] +8461-281231-0026-1620: ref=['HERE', 'IS', 'A', 'BUGLE', 'WHICH', 'AN', 'ENGLISH', 'YEOMAN', 'HAS', 'ONCE', 'WORN', 'I', 'PRAY', 'YOU', 'TO', 'KEEP', 'IT', 'AS', 'A', 'MEMORIAL', 'OF', 'YOUR', 'GALLANT', 'BEARING'] +8461-281231-0026-1620: hyp=['HERE', 'IS', 'A', 'BUGLE', 'WHICH', 'AN', 'ENGLISH', 'YEOMAN', 'HAS', 'ONCE', 'WORN', 'I', 'PRAY', 'YOU', 'TO', 'KEEP', 'IT', 'AS', 'A', 'MEMORIAL', 'OF', 'YOUR', 'GALLANT', 'BEARING'] +8461-281231-0027-1621: ref=['SO', 'SAYING', 'HE', 'MOUNTED', 'HIS', 'STRONG', 'WAR', 'HORSE', 'AND', 'RODE', 'OFF', 'THROUGH', 'THE', 'FOREST'] +8461-281231-0027-1621: hyp=['SO', 'SAYING', 'HE', 'MOUNTED', 'HIS', 'STRONG', 'WAR', 'HORSE', 'AND', 'RODE', 'OFF', 'THROUGH', 'THE', 'FOREST'] +8461-281231-0028-1622: ref=['DURING', 'ALL', 'THIS', 'TIME', 'ISAAC', 'OF', 'YORK', 'SAT', 'MOURNFULLY', 'APART', 'GRIEVING', 'FOR', 'THE', 'LOSS', 'OF', 'HIS', 'DEARLY', 'LOVED', 'DAUGHTER', 'REBECCA'] +8461-281231-0028-1622: hyp=['DURING', 'ALL', 'THIS', 'TIME', 'MISERC', 'OF', 'YORK', 'SAT', 'MOURNFULLY', 'APART', 'GRIEVING', 'FOR', 'THE', 'LOSS', 'OF', 'HIS', 'STAIRLY', 'LOVED', 'DAUGHTER', 'REBECCA'] +8461-281231-0029-1623: ref=['AND', 'WITH', 'THIS', 'EPISTLE', 'THE', 'UNHAPPY', 'OLD', 'MAN', 'SET', 'OUT', 'TO', 'PROCURE', 'HIS', "DAUGHTER'S", 'LIBERATION'] +8461-281231-0029-1623: hyp=['AND', 'WITH', 'THIS', 'EPISTLE', 'THEN', 'HAPPY', 'OLD', 'MAN', 'SET', 'OUT', 'TO', 'PROCURE', 'HIS', "DAUGHTER'S", 'LIBERATION'] +8461-281231-0030-1624: ref=['THE', 'TEMPLAR', 'IS', 'FLED', 'SAID', 'DE', 'BRACY', 'IN', 'ANSWER', 'TO', 'THE', "PRINCE'S", 'EAGER', 'QUESTIONS', 'FRONT', 'DE', 'BOEUF', 'YOU', 'WILL', 'NEVER', 'SEE', 'MORE', 'AND', 'HE', 'ADDED', 'IN', 'A', 'LOW', 'AND', 'EMPHATIC', 'TONE', 'RICHARD', 'IS', 'IN', 'ENGLAND', 'I', 'HAVE', 'SEEN', 'HIM', 'AND', 'SPOKEN', 'WITH', 'HIM'] +8461-281231-0030-1624: hyp=['THE', 'TEMPLAR', 'IS', 'FLED', 'SAID', 'THE', 'BRAZY', 'IN', 'ANSWER', 'TO', 'THE', "PRINCE'S", 'EAGER', 'QUESTIONS', 'FROM', 'DE', 'BOEUF', 'YOU', 'WILL', 'NEVER', 'SEE', 'MORE', 'AND', 'HE', 'ADDED', 'IN', 'A', 'LOW', 'AND', 'EMPHATIC', 'TONE', 'WRETCHED', 'IS', 'AN', 'ENGLAND', 'I', 'HAVE', 'SEEN', 'HIM', 'AND', 'SPOKEN', 'WITH', 'HIM'] +8461-281231-0031-1625: ref=['HE', 'APPEALED', 'TO', 'DE', 'BRACY', 'TO', 'ASSIST', 'HIM', 'IN', 'THIS', 'PROJECT', 'AND', 'BECAME', 'AT', 'ONCE', 'DEEPLY', 'SUSPICIOUS', 'OF', 'THE', "KNIGHT'S", 'LOYALTY', 'TOWARDS', 'HIM', 'WHEN', 'HE', 'DECLINED', 'TO', 'LIFT', 'HAND', 'AGAINST', 'THE', 'MAN', 'WHO', 'HAD', 'SPARED', 'HIS', 'OWN', 'LIFE'] +8461-281231-0031-1625: hyp=['HE', 'APPEARED', 'TO', 'THE', 'BRACELET', 'TO', 'ASSIST', 'HIM', 'IN', 'THIS', 'PROJECT', 'AND', 'BECAME', 'AT', 'ONCE', 'DEEPLY', 'SUSPICIOUS', 'OF', 'THE', "NIGHT'S", 'LOYALTY', 'TOWARDS', 'HIM', 'WHEN', 'HE', 'DECLINED', 'TO', 'LIFT', 'HAND', 'AGAINST', 'THE', 'MAN', 'WHO', 'HAD', 'SPARED', 'HIS', 'OWN', 'LIFE'] +8461-281231-0032-1626: ref=['BEFORE', 'REACHING', 'HIS', 'DESTINATION', 'HE', 'WAS', 'TOLD', 'THAT', 'LUCAS', 'DE', 'BEAUMANOIR', 'THE', 'GRAND', 'MASTER', 'OF', 'THE', 'ORDER', 'OF', 'THE', 'TEMPLARS', 'WAS', 'THEN', 'ON', 'VISIT', 'TO', 'THE', 'PRECEPTORY'] +8461-281231-0032-1626: hyp=['BEFORE', 'REACHING', 'HIS', 'DESTINATION', 'HE', 'WAS', 'TOLD', 'THAT', 'LUCAS', 'THE', 'BURMANOIS', 'THE', 'GRAND', 'MASTER', 'OF', 'THE', 'ORDER', 'OF', 'THE', 'TEMPLARS', 'WAS', 'THEN', 'ON', 'VISIT', 'TO', 'THE', 'PRECEPTORY'] +8461-281231-0033-1627: ref=['HE', 'HAD', 'NOT', 'UNTIL', 'THEN', 'BEEN', 'INFORMED', 'OF', 'THE', 'PRESENCE', 'OF', 'THE', 'JEWISH', 'MAIDEN', 'IN', 'THE', 'ABODE', 'OF', 'THE', 'TEMPLARS', 'AND', 'GREAT', 'WAS', 'HIS', 'FURY', 'AND', 'INDIGNATION', 'ON', 'LEARNING', 'THAT', 'SHE', 'WAS', 'AMONGST', 'THEM'] +8461-281231-0033-1627: hyp=['HE', 'HAD', 'NOT', 'UNTIL', 'THEN', 'BEEN', 'INFORMED', 'TO', 'THE', 'PRESENCE', 'OF', 'THE', 'JEWISH', 'MAIDEN', 'IN', 'THE', 'ABODE', 'OF', 'THE', 'TEMPLARS', 'AND', 'GREAT', 'WAS', 'HIS', 'FURY', 'AND', 'INDIGNATION', 'ON', 'LEARNING', 'THAT', 'SHE', 'WAS', 'AMONGST', 'THEM'] +8461-281231-0034-1628: ref=['POOR', 'ISAAC', 'WAS', 'HURRIED', 'OFF', 'ACCORDINGLY', 'AND', 'EXPELLED', 'FROM', 'THE', 'PRECEPTORY', 'ALL', 'HIS', 'ENTREATIES', 'AND', 'EVEN', 'HIS', 'OFFERS', 'UNHEARD', 'AND', 'DISREGARDED'] +8461-281231-0034-1628: hyp=['POOR', 'ISAAC', 'WAS', 'HURRIED', 'OFF', 'ACCORDINGLY', 'AND', 'EXPELLED', 'FROM', 'THE', 'PRECEPTORY', 'ALL', 'HIS', 'ENTREATIES', 'AND', 'EVEN', 'HIS', 'OFFERS', 'UNHEARD', 'AND', 'DISREGARDED'] +8461-281231-0035-1629: ref=['THE', 'ASSURANCE', 'THAT', 'SHE', 'POSSESSED', 'SOME', 'FRIEND', 'IN', 'THIS', 'AWFUL', 'ASSEMBLY', 'GAVE', 'HER', 'COURAGE', 'TO', 'LOOK', 'AROUND', 'AND', 'TO', 'MARK', 'INTO', 'WHOSE', 'PRESENCE', 'SHE', 'HAD', 'BEEN', 'CONDUCTED'] +8461-281231-0035-1629: hyp=['THE', 'ASSURANCE', 'THAT', 'SHE', 'POSSESSED', 'SOME', 'FRIEND', 'IN', 'THIS', 'AWFUL', 'ASSEMBLY', 'GAVE', 'HER', 'COURAGE', 'TO', 'LOOK', 'AROUND', 'AND', 'TO', 'MARK', 'INTO', 'WHOSE', 'PRESENCE', 'SHE', 'HAD', 'BEEN', 'CONDUCTED'] +8461-281231-0036-1630: ref=['SHE', 'GAZED', 'ACCORDINGLY', 'UPON', 'A', 'SCENE', 'WHICH', 'MIGHT', 'WELL', 'HAVE', 'STRUCK', 'TERROR', 'INTO', 'A', 'BOLDER', 'HEART', 'THAN', 'HERS'] +8461-281231-0036-1630: hyp=['SHE', 'GAZED', 'ACCORDINGLY', 'UPON', 'A', 'SCENE', 'WHICH', 'MIGHT', 'WELL', 'HAVE', 'STRUCK', 'TERROR', 'INTO', 'A', 'BOLDER', 'HEART', 'THAN', 'HERS'] +8461-281231-0037-1631: ref=['AT', 'HIS', 'FEET', 'WAS', 'PLACED', 'A', 'TABLE', 'OCCUPIED', 'BY', 'TWO', 'SCRIBES', 'WHOSE', 'DUTY', 'IT', 'WAS', 'TO', 'RECORD', 'THE', 'PROCEEDINGS', 'OF', 'THE', 'DAY'] +8461-281231-0037-1631: hyp=['AT', 'HIS', 'FEET', 'WAS', 'PLACED', 'THE', 'TABLE', 'OCCUPIED', 'BY', 'TWO', 'SCRIBES', 'WHOSE', 'DUTY', 'WAS', 'TO', 'RECORD', 'THE', 'PROCEEDINGS', 'OF', 'THE', 'DAY'] +8461-281231-0038-1632: ref=['THE', 'PRECEPTORS', 'OF', 'WHOM', 'THERE', 'WERE', 'FOUR', 'PRESENT', 'OCCUPIED', 'SEATS', 'BEHIND', 'THEIR', 'SUPERIORS', 'AND', 'BEHIND', 'THEM', 'STOOD', 'THE', 'ESQUIRES', 'OF', 'THE', 'ORDER', 'ROBED', 'IN', 'WHITE'] +8461-281231-0038-1632: hyp=['THE', 'PRECEPTORS', 'OF', 'WHOM', 'THERE', 'WERE', 'FOUR', 'PRESENT', 'OCCUPIED', 'SEATS', 'BEHIND', 'THE', 'SUPERIORS', 'AND', 'BEHIND', 'THEM', 'STOOD', 'THE', 'ESQUIRES', 'OF', 'THE', 'ORDER', 'ROBED', 'IN', 'WHITE'] diff --git a/log/fast_beam_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt b/log/fast_beam_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..b48ca9b30a56338006fd8f6d198c039894a54f65 --- /dev/null +++ b/log/fast_beam_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt @@ -0,0 +1,2 @@ +settings WER +beam_20.0_max_contexts_4_max_states_8 3.57 diff --git a/log/fast_beam_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt b/log/fast_beam_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..4bfffb5a1eca2dce612892df2d591e493ed6ebbb --- /dev/null +++ b/log/fast_beam_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-beam-20.0-max-contexts-4-max-states-8-use-averaged-model.txt @@ -0,0 +1,2 @@ +settings WER +beam_20.0_max_contexts_4_max_states_8 9.05 diff --git a/log/greedy_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt b/log/greedy_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..22518f05b089edec01079efa772fb4cab1e59dfd --- /dev/null +++ b/log/greedy_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt @@ -0,0 +1,12871 @@ +%WER = 3.58 +Errors: 211 insertions, 163 deletions, 1507 substitutions, over 52576 reference words (50906 correct) +Search below for sections starting with PER-UTT DETAILS:, SUBSTITUTIONS:, DELETIONS:, INSERTIONS:, PER-WORD STATS: + +PER-UTT DETAILS: corr or (ref->hyp) +1089-134686-0000-1733: HE HOPED THERE WOULD BE STEW FOR DINNER TURNIPS AND CARROTS AND BRUISED POTATOES AND FAT MUTTON PIECES TO BE LADLED OUT IN THICK PEPPERED (FLOUR->FLOWER) FATTENED SAUCE +1089-134686-0001-1734: STUFF IT INTO YOU HIS BELLY COUNSELLED HIM +1089-134686-0002-1735: AFTER EARLY (NIGHTFALL->NIGHT FALL) THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS +1089-134686-0003-1736: HELLO BERTIE ANY GOOD IN YOUR MIND +1089-134686-0004-1737: NUMBER TEN FRESH (NELLY->NELLIE) IS WAITING ON YOU GOOD NIGHT HUSBAND +1089-134686-0005-1738: THE MUSIC CAME NEARER AND HE RECALLED THE WORDS THE WORDS OF SHELLEY'S FRAGMENT UPON THE MOON WANDERING COMPANIONLESS PALE FOR WEARINESS +1089-134686-0006-1739: THE DULL LIGHT FELL MORE FAINTLY UPON THE PAGE WHEREON ANOTHER EQUATION BEGAN TO UNFOLD ITSELF SLOWLY AND TO SPREAD ABROAD ITS WIDENING TAIL +1089-134686-0007-1740: A COLD LUCID INDIFFERENCE REIGNED IN HIS SOUL +1089-134686-0008-1741: THE CHAOS IN WHICH HIS (ARDOUR->ARDOR) EXTINGUISHED ITSELF WAS A COLD INDIFFERENT KNOWLEDGE OF HIMSELF +1089-134686-0009-1742: AT MOST BY AN ALMS GIVEN TO A BEGGAR WHOSE BLESSING HE FLED FROM HE MIGHT HOPE WEARILY TO WIN FOR HIMSELF SOME MEASURE OF ACTUAL GRACE +1089-134686-0010-1743: WELL NOW ENNIS I DECLARE YOU HAVE A HEAD AND SO HAS MY STICK +1089-134686-0011-1744: ON SATURDAY MORNINGS WHEN THE (SODALITY->SODELITY) MET IN THE CHAPEL TO RECITE THE LITTLE OFFICE HIS PLACE WAS A CUSHIONED KNEELING DESK AT THE RIGHT OF THE ALTAR FROM WHICH HE LED HIS WING OF BOYS THROUGH THE RESPONSES +1089-134686-0012-1745: HER EYES SEEMED TO REGARD HIM WITH MILD PITY HER HOLINESS A STRANGE LIGHT GLOWING FAINTLY UPON HER FRAIL FLESH DID NOT HUMILIATE THE SINNER WHO APPROACHED HER +1089-134686-0013-1746: IF EVER HE WAS IMPELLED TO CAST SIN FROM HIM AND TO REPENT THE IMPULSE THAT MOVED HIM WAS THE WISH TO BE HER KNIGHT +1089-134686-0014-1747: HE TRIED TO THINK HOW IT COULD BE +1089-134686-0015-1748: BUT THE DUSK DEEPENING IN THE SCHOOLROOM COVERED OVER HIS THOUGHTS THE BELL RANG +1089-134686-0016-1749: THEN YOU CAN ASK HIM QUESTIONS ON THE CATECHISM (DEDALUS->DEEDOLUS) +1089-134686-0017-1750: STEPHEN LEANING BACK AND DRAWING IDLY ON HIS SCRIBBLER LISTENED TO THE TALK ABOUT HIM WHICH HERON CHECKED FROM TIME TO TIME BY SAYING +1089-134686-0018-1751: IT WAS STRANGE TOO THAT HE FOUND AN ARID PLEASURE IN FOLLOWING UP TO THE END THE RIGID LINES OF THE DOCTRINES OF THE CHURCH AND PENETRATING INTO OBSCURE SILENCES ONLY TO HEAR AND FEEL THE MORE DEEPLY HIS OWN CONDEMNATION +1089-134686-0019-1752: THE SENTENCE OF SAINT JAMES WHICH SAYS THAT HE WHO OFFENDS AGAINST ONE COMMANDMENT BECOMES GUILTY OF ALL HAD SEEMED TO HIM FIRST A SWOLLEN PHRASE UNTIL HE HAD BEGUN TO GROPE IN THE DARKNESS OF HIS OWN STATE +1089-134686-0020-1753: IF A MAN HAD STOLEN A POUND IN HIS YOUTH AND HAD USED THAT POUND TO (AMASS->A MASS) A HUGE FORTUNE HOW MUCH WAS HE OBLIGED TO GIVE BACK THE POUND HE HAD STOLEN ONLY OR THE POUND TOGETHER WITH THE COMPOUND INTEREST ACCRUING UPON IT OR ALL HIS HUGE FORTUNE +1089-134686-0021-1754: IF A LAYMAN IN GIVING BAPTISM POUR THE WATER BEFORE SAYING THE WORDS IS THE CHILD BAPTIZED +1089-134686-0022-1755: HOW COMES IT THAT WHILE THE FIRST BEATITUDE PROMISES THE KINGDOM OF HEAVEN TO THE POOR OF HEART THE SECOND BEATITUDE PROMISES ALSO TO THE MEEK THAT THEY SHALL POSSESS THE LAND +1089-134686-0023-1756: WHY WAS THE SACRAMENT OF THE EUCHARIST INSTITUTED UNDER THE TWO SPECIES OF BREAD AND WINE IF JESUS CHRIST BE PRESENT BODY AND BLOOD SOUL AND DIVINITY IN THE BREAD ALONE AND IN THE WINE ALONE +1089-134686-0024-1757: IF THE WINE CHANGE INTO VINEGAR AND THE HOST CRUMBLE INTO CORRUPTION AFTER THEY HAVE BEEN CONSECRATED IS JESUS CHRIST STILL PRESENT UNDER THEIR SPECIES AS GOD AND AS MAN +1089-134686-0025-1758: A GENTLE KICK FROM THE TALL BOY IN THE BENCH BEHIND URGED STEPHEN TO ASK A DIFFICULT QUESTION +1089-134686-0026-1759: THE RECTOR DID NOT ASK FOR A CATECHISM TO HEAR THE LESSON FROM +1089-134686-0027-1760: HE CLASPED HIS HANDS ON THE DESK AND SAID +1089-134686-0028-1761: THE RETREAT WILL BEGIN ON WEDNESDAY AFTERNOON IN (HONOUR->HONOR) OF SAINT FRANCIS (XAVIER->ZAVIOUR) WHOSE FEAST DAY IS SATURDAY +1089-134686-0029-1762: ON FRIDAY CONFESSION WILL BE HEARD ALL THE AFTERNOON AFTER BEADS +1089-134686-0030-1763: BEWARE OF MAKING THAT MISTAKE +1089-134686-0031-1764: STEPHEN'S HEART BEGAN SLOWLY TO FOLD AND FADE WITH FEAR LIKE A WITHERING FLOWER +1089-134686-0032-1765: HE (IS->HAS) CALLED AS YOU KNOW THE APOSTLE OF THE INDIES +1089-134686-0033-1766: A GREAT SAINT SAINT FRANCIS (XAVIER->ZAVIER) +1089-134686-0034-1767: THE RECTOR PAUSED AND THEN SHAKING HIS CLASPED HANDS BEFORE HIM WENT ON +1089-134686-0035-1768: HE HAD THE FAITH IN HIM THAT MOVES MOUNTAINS +1089-134686-0036-1769: A GREAT SAINT SAINT FRANCIS (XAVIER->ZAVIER) +1089-134686-0037-1770: IN THE SILENCE THEIR DARK FIRE KINDLED THE DUSK INTO A TAWNY GLOW +1089-134691-0000-1707: HE COULD WAIT NO LONGER +1089-134691-0001-1708: FOR A FULL HOUR HE HAD PACED UP AND DOWN WAITING BUT HE COULD WAIT NO LONGER +1089-134691-0002-1709: HE SET OFF ABRUPTLY FOR THE BULL WALKING RAPIDLY LEST HIS FATHER'S SHRILL WHISTLE MIGHT CALL HIM BACK AND IN A FEW MOMENTS HE HAD ROUNDED THE CURVE AT THE POLICE BARRACK AND WAS SAFE +1089-134691-0003-1710: THE UNIVERSITY +1089-134691-0004-1711: PRIDE AFTER SATISFACTION UPLIFTED HIM LIKE LONG SLOW WAVES +1089-134691-0005-1712: WHOSE FEET ARE AS THE FEET OF (HARTS->HEARTS) AND UNDERNEATH THE EVERLASTING ARMS +1089-134691-0006-1713: THE PRIDE OF THAT DIM IMAGE BROUGHT BACK TO HIS MIND THE DIGNITY OF THE OFFICE HE HAD REFUSED +1089-134691-0007-1714: SOON THE WHOLE BRIDGE WAS TREMBLING AND RESOUNDING +1089-134691-0008-1715: THE UNCOUTH FACES PASSED HIM TWO BY TWO STAINED YELLOW OR RED OR LIVID BY THE SEA AND AS HE STROVE TO LOOK AT THEM WITH EASE AND INDIFFERENCE A FAINT STAIN OF PERSONAL SHAME AND COMMISERATION ROSE TO HIS OWN FACE +1089-134691-0009-1716: ANGRY WITH HIMSELF HE TRIED TO HIDE HIS FACE FROM THEIR EYES BY GAZING DOWN SIDEWAYS INTO THE SHALLOW SWIRLING WATER UNDER THE BRIDGE BUT HE STILL SAW A REFLECTION THEREIN OF THEIR TOP HEAVY SILK HATS AND HUMBLE TAPE LIKE COLLARS AND LOOSELY HANGING CLERICAL CLOTHES BROTHER HICKEY +1089-134691-0010-1717: BROTHER (MAC ARDLE->MICARTLE) BROTHER (KEOGH->KIEV) +1089-134691-0011-1718: THEIR PIETY WOULD BE LIKE THEIR NAMES LIKE THEIR FACES LIKE THEIR CLOTHES AND (IT->*) WAS IDLE FOR HIM TO TELL HIMSELF THAT THEIR HUMBLE AND CONTRITE HEARTS IT MIGHT BE PAID A FAR RICHER TRIBUTE OF DEVOTION THAN HIS HAD EVER BEEN A GIFT TENFOLD MORE ACCEPTABLE THAN HIS ELABORATE ADORATION +1089-134691-0012-1719: IT WAS IDLE FOR HIM TO MOVE HIMSELF TO BE GENEROUS TOWARDS THEM TO TELL HIMSELF THAT IF HE EVER CAME TO THEIR GATES STRIPPED OF HIS PRIDE BEATEN AND IN BEGGAR'S WEEDS THAT THEY WOULD BE GENEROUS TOWARDS HIM LOVING HIM AS THEMSELVES +1089-134691-0013-1720: IDLE AND EMBITTERING FINALLY TO ARGUE AGAINST HIS OWN DISPASSIONATE CERTITUDE THAT THE COMMANDMENT OF LOVE BADE US NOT TO LOVE OUR NEIGHBOUR AS OURSELVES WITH THE SAME AMOUNT AND INTENSITY OF LOVE BUT TO LOVE HIM AS OURSELVES WITH THE SAME KIND OF LOVE +1089-134691-0014-1721: THE PHRASE AND THE DAY AND THE SCENE HARMONIZED IN (A CHORD->ACCORD) +1089-134691-0015-1722: WORDS WAS IT THEIR (COLOURS->COLORS) +1089-134691-0016-1723: THEY WERE VOYAGING ACROSS THE DESERTS OF THE SKY A HOST OF NOMADS ON THE MARCH VOYAGING HIGH OVER IRELAND WESTWARD BOUND +1089-134691-0017-1724: THE EUROPE THEY HAD COME FROM LAY OUT THERE BEYOND THE IRISH SEA EUROPE OF STRANGE TONGUES AND VALLEYED AND (WOODBEGIRT->WOOD BEGIRT) AND (CITADELLED->CITADELED) AND OF ENTRENCHED AND (MARSHALLED->MARSHALED) RACES +1089-134691-0018-1725: AGAIN AGAIN +1089-134691-0019-1726: A VOICE FROM BEYOND THE WORLD WAS CALLING +1089-134691-0020-1727: (HELLO->HALLO) STEPHANOS HERE COMES THE (DEDALUS->DAEDALUS) +1089-134691-0021-1728: THEIR DIVING STONE POISED ON ITS RUDE SUPPORTS AND ROCKING UNDER THEIR PLUNGES AND THE ROUGH HEWN STONES OF THE SLOPING BREAKWATER OVER WHICH THEY SCRAMBLED IN THEIR (HORSEPLAY->HORSE PLAY) GLEAMED WITH COLD WET LUSTRE +1089-134691-0022-1729: HE STOOD STILL IN DEFERENCE TO THEIR CALLS AND PARRIED THEIR BANTER WITH EASY WORDS +1089-134691-0023-1730: IT WAS A PAIN TO SEE THEM AND A SWORD LIKE PAIN TO SEE THE SIGNS OF ADOLESCENCE THAT MADE REPELLENT THEIR PITIABLE NAKEDNESS +1089-134691-0024-1731: STEPHANOS (DEDALOS->DELOS) +1089-134691-0025-1732: A MOMENT BEFORE THE GHOST OF THE ANCIENT KINGDOM OF THE DANES HAD LOOKED FORTH THROUGH THE VESTURE OF THE (HAZEWRAPPED->HAYES WRAPPED) CITY +1188-133604-0000-1771: YOU WILL FIND ME CONTINUALLY SPEAKING OF FOUR MEN TITIAN (HOLBEIN->HOLBINE) TURNER AND (TINTORET->TINTARETTE) IN ALMOST THE SAME TERMS +1188-133604-0001-1772: THEY UNITE EVERY QUALITY AND SOMETIMES YOU WILL FIND ME REFERRING TO THEM AS COLORISTS SOMETIMES AS (CHIAROSCURISTS->KIERUSCURISTS) +1188-133604-0002-1773: BY BEING STUDIOUS OF (COLOR->COLOUR) THEY ARE STUDIOUS OF DIVISION AND WHILE THE (CHIAROSCURIST->CUIRASCURISTS) DEVOTES HIMSELF TO THE REPRESENTATION OF DEGREES OF FORCE IN ONE THING UNSEPARATED LIGHT THE COLORISTS HAVE FOR THEIR FUNCTION THE ATTAINMENT OF BEAUTY BY ARRANGEMENT OF THE DIVISIONS OF LIGHT +1188-133604-0003-1774: MY FIRST AND PRINCIPAL REASON WAS THAT THEY ENFORCED BEYOND ALL RESISTANCE ON ANY STUDENT WHO MIGHT ATTEMPT TO COPY THEM THIS METHOD OF LAYING PORTIONS OF DISTINCT HUE SIDE BY SIDE +1188-133604-0004-1775: SOME OF THE TOUCHES INDEED WHEN THE TINT HAS BEEN MIXED WITH MUCH WATER HAVE BEEN LAID IN LITTLE DROPS OR PONDS SO THAT THE PIGMENT MIGHT CRYSTALLIZE HARD AT THE EDGE +1188-133604-0005-1776: IT IS THE HEAD OF A PARROT WITH A LITTLE FLOWER IN HIS BEAK FROM A PICTURE OF (CARPACCIO'S->CARPATIUS) ONE OF HIS SERIES OF THE LIFE OF SAINT GEORGE +1188-133604-0006-1777: THEN HE COMES TO THE BEAK OF IT +1188-133604-0007-1778: THE BROWN GROUND BENEATH IS LEFT FOR THE MOST PART ONE TOUCH OF BLACK IS PUT FOR THE HOLLOW (TWO->TOO) DELICATE LINES OF DARK (GRAY->GREY) DEFINE THE OUTER CURVE AND ONE LITTLE QUIVERING TOUCH OF WHITE DRAWS THE INNER EDGE OF THE MANDIBLE +1188-133604-0008-1779: FOR BELIEVE ME THE FINAL PHILOSOPHY OF ART CAN ONLY RATIFY THEIR OPINION THAT THE BEAUTY OF A COCK ROBIN IS TO BE (RED->READ) AND OF A GRASS PLOT TO BE GREEN AND THE BEST SKILL OF ART IS (IN->AN) INSTANTLY SEIZING ON THE MANIFOLD DELICIOUSNESS OF LIGHT WHICH YOU CAN ONLY SEIZE BY PRECISION OF INSTANTANEOUS TOUCH +1188-133604-0009-1780: NOW YOU WILL SEE IN THESE STUDIES THAT THE MOMENT THE WHITE IS (INCLOSED->ENCLOSED) PROPERLY AND (HARMONIZED->HARMONIZE) WITH THE OTHER HUES IT BECOMES SOMEHOW MORE PRECIOUS AND PEARLY THAN THE WHITE PAPER AND THAT I AM NOT AFRAID TO LEAVE A WHOLE FIELD OF UNTREATED WHITE PAPER ALL ROUND IT BEING SURE THAT EVEN THE LITTLE DIAMONDS IN THE ROUND WINDOW WILL TELL AS JEWELS IF THEY ARE GRADATED JUSTLY +1188-133604-0010-1781: BUT IN THIS (VIGNETTE->VINEYARD) COPIED FROM TURNER YOU HAVE THE TWO PRINCIPLES BROUGHT OUT PERFECTLY +1188-133604-0011-1782: THEY ARE BEYOND ALL OTHER WORKS (THAT->THAN) I KNOW EXISTING DEPENDENT FOR THEIR EFFECT ON LOW SUBDUED TONES THEIR (FAVORITE->FAVOURITE) CHOICE IN TIME OF DAY BEING EITHER DAWN OR TWILIGHT AND EVEN THEIR BRIGHTEST SUNSETS PRODUCED CHIEFLY OUT OF GRAY PAPER +1188-133604-0012-1783: IT MAY BE THAT A GREAT (COLORIST->COLORLESS) WILL USE HIS UTMOST FORCE OF COLOR AS A SINGER HIS FULL POWER OF VOICE BUT LOUD OR LOW THE VIRTUE IS IN BOTH CASES ALWAYS IN REFINEMENT NEVER IN LOUDNESS +1188-133604-0013-1784: IT MUST REMEMBER BE ONE OR THE OTHER +1188-133604-0014-1785: DO NOT THEREFORE THINK THAT THE GOTHIC (SCHOOL IS->SCHOOLS) AN EASY ONE +1188-133604-0015-1786: THE LAW OF THAT SCHOOL IS THAT EVERYTHING SHALL BE SEEN CLEARLY OR AT LEAST ONLY IN SUCH MIST OR FAINTNESS AS SHALL BE DELIGHTFUL AND I HAVE NO DOUBT THAT THE BEST INTRODUCTION TO IT WOULD BE THE ELEMENTARY PRACTICE OF PAINTING EVERY STUDY ON A GOLDEN GROUND +1188-133604-0016-1787: THIS AT ONCE COMPELS YOU TO UNDERSTAND THAT THE WORK IS TO BE IMAGINATIVE AND DECORATIVE THAT IT REPRESENTS BEAUTIFUL THINGS IN THE CLEAREST WAY BUT NOT UNDER EXISTING CONDITIONS AND THAT IN FACT YOU ARE PRODUCING (JEWELER'S->JEWELLERS) WORK RATHER THAN PICTURES +1188-133604-0017-1788: THAT A STYLE (IS->WAS) RESTRAINED OR SEVERE DOES NOT MEAN THAT IT IS ALSO ERRONEOUS +1188-133604-0018-1789: IN ALL EARLY GOTHIC ART INDEED YOU WILL FIND FAILURE OF THIS KIND ESPECIALLY DISTORTION AND RIGIDITY WHICH ARE IN MANY RESPECTS PAINFULLY TO BE COMPARED WITH THE SPLENDID REPOSE OF CLASSIC ART +1188-133604-0019-1790: THE LARGE LETTER CONTAINS INDEED ENTIRELY FEEBLE AND ILL DRAWN FIGURES THAT IS MERELY CHILDISH (AND->IN) FAILING WORK OF AN INFERIOR HAND IT IS NOT CHARACTERISTIC OF GOTHIC OR ANY OTHER SCHOOL +1188-133604-0020-1791: BUT OBSERVE YOU CAN ONLY DO THIS ON ONE CONDITION THAT OF STRIVING ALSO TO CREATE IN REALITY THE BEAUTY WHICH YOU SEEK IN IMAGINATION +1188-133604-0021-1792: IT WILL BE WHOLLY IMPOSSIBLE FOR YOU TO RETAIN THE TRANQUILLITY OF TEMPER AND FELICITY OF FAITH NECESSARY FOR NOBLE (PURIST->PUREST) PAINTING UNLESS YOU ARE ACTIVELY ENGAGED IN PROMOTING THE FELICITY AND PEACE OF PRACTICAL LIFE +1188-133604-0022-1793: YOU MUST LOOK AT HIM IN THE FACE FIGHT HIM CONQUER HIM WITH WHAT (SCATHE->SCATH) YOU MAY YOU NEED NOT THINK TO KEEP OUT OF THE WAY OF HIM +1188-133604-0023-1794: THE (COLORIST->CHOLERIST) SAYS FIRST OF ALL AS MY DELICIOUS (PAROQUET->PARRIQUET) WAS RUBY SO THIS NASTY VIPER SHALL BE BLACK AND THEN (IS->AS) THE QUESTION CAN I ROUND HIM OFF EVEN THOUGH HE IS BLACK AND MAKE HIM SLIMY AND YET SPRINGY AND CLOSE DOWN CLOTTED LIKE A POOL OF BLACK BLOOD ON THE EARTH ALL THE SAME +1188-133604-0024-1795: NOTHING WILL BE MORE PRECIOUS TO YOU I THINK IN THE PRACTICAL STUDY OF ART THAN THE CONVICTION WHICH WILL FORCE ITSELF ON YOU MORE AND MORE EVERY HOUR OF THE WAY ALL THINGS ARE BOUND TOGETHER LITTLE AND GREAT IN SPIRIT AND IN MATTER +1188-133604-0025-1796: YOU KNOW I HAVE JUST BEEN TELLING YOU HOW THIS SCHOOL OF MATERIALISM AND CLAY INVOLVED ITSELF AT LAST IN CLOUD AND FIRE +1188-133604-0026-1797: HERE IS AN EQUALLY TYPICAL GREEK SCHOOL LANDSCAPE BY WILSON LOST WHOLLY IN GOLDEN MIST THE TREES SO SLIGHTLY DRAWN THAT YOU DON'T KNOW IF THEY ARE TREES OR TOWERS AND NO CARE FOR COLOR (WHATEVER->WHATSOEVER) PERFECTLY DECEPTIVE AND (MARVELOUS->MARVELLOUS) EFFECT OF SUNSHINE THROUGH THE MIST APOLLO (AND->IN) THE PYTHON +1188-133604-0027-1798: NOW HERE IS RAPHAEL EXACTLY BETWEEN THE TWO TREES STILL DRAWN LEAF BY LEAF WHOLLY FORMAL BUT BEAUTIFUL MIST COMING GRADUALLY INTO THE DISTANCE +1188-133604-0028-1799: WELL THEN LAST HERE IS TURNER'S GREEK SCHOOL OF THE HIGHEST CLASS AND YOU DEFINE HIS ART ABSOLUTELY AS FIRST THE DISPLAYING INTENSELY AND WITH THE STERNEST INTELLECT OF NATURAL FORM AS IT IS AND THEN THE ENVELOPMENT OF IT WITH CLOUD AND FIRE +1188-133604-0029-1800: ONLY THERE ARE TWO SORTS OF CLOUD (AND->IN) FIRE +1188-133604-0030-1801: HE KNOWS THEM BOTH +1188-133604-0031-1802: THERE'S ONE AND THERE'S ANOTHER THE DUDLEY AND THE FLINT +1188-133604-0032-1803: IT IS ONLY A PENCIL OUTLINE BY EDWARD BURNE JONES IN ILLUSTRATION OF THE STORY OF PSYCHE IT IS THE INTRODUCTION OF PSYCHE AFTER ALL HER TROUBLES INTO HEAVEN +1188-133604-0033-1804: EVERY PLANT IN THE GRASS IS SET FORMALLY GROWS PERFECTLY AND MAY BE REALIZED COMPLETELY +1188-133604-0034-1805: EXQUISITE ORDER AND UNIVERSAL WITH ETERNAL LIFE AND LIGHT THIS IS THE FAITH AND EFFORT OF THE SCHOOLS OF (CRYSTAL->CRISTEL) AND YOU MAY DESCRIBE AND COMPLETE THEIR WORK QUITE LITERALLY BY TAKING ANY VERSES OF CHAUCER IN HIS TENDER MOOD AND OBSERVING HOW HE INSISTS ON THE CLEARNESS AND BRIGHTNESS FIRST AND THEN ON THE ORDER +1188-133604-0035-1806: THUS IN CHAUCER'S DREAM +1188-133604-0036-1807: IN BOTH THESE HIGH MYTHICAL SUBJECTS THE SURROUNDING NATURE THOUGH SUFFERING IS STILL DIGNIFIED AND BEAUTIFUL +1188-133604-0037-1808: EVERY LINE IN WHICH THE MASTER TRACES IT EVEN WHERE SEEMINGLY NEGLIGENT IS LOVELY AND SET DOWN WITH A MEDITATIVE CALMNESS WHICH MAKES THESE TWO ETCHINGS CAPABLE OF BEING PLACED BESIDE THE MOST TRANQUIL WORK OF (HOLBEIN->HOLBINE) OR (DUERER->DURE) +1188-133604-0038-1809: BUT NOW HERE IS A SUBJECT OF WHICH YOU WILL WONDER AT FIRST WHY TURNER DREW IT AT ALL +1188-133604-0039-1810: IT HAS NO BEAUTY WHATSOEVER NO SPECIALTY OF PICTURESQUENESS (AND->IN) ALL ITS LINES ARE CRAMPED AND POOR +1188-133604-0040-1811: THE CRAMPNESS (AND->IN) THE POVERTY ARE ALL INTENDED +1188-133604-0041-1812: IT IS A GLEANER BRINGING DOWN HER ONE SHEAF OF CORN TO AN OLD (WATERMILL->WATER MILL) ITSELF MOSSY AND RENT SCARCELY ABLE TO GET ITS STONES TO TURN +1188-133604-0042-1813: THE SCENE IS ABSOLUTELY ARCADIAN +1188-133604-0043-1814: SEE THAT YOUR LIVES BE IN NOTHING WORSE THAN A BOY'S CLIMBING FOR HIS ENTANGLED KITE +1188-133604-0044-1815: IT WILL BE WELL FOR YOU IF YOU JOIN NOT WITH THOSE WHO INSTEAD OF KITES FLY FALCONS WHO INSTEAD OF OBEYING THE LAST WORDS OF THE GREAT CLOUD SHEPHERD TO FEED HIS SHEEP LIVE THE LIVES HOW MUCH LESS THAN VANITY OF THE WAR WOLF (AND->IN) THE (GIER->GEAR) EAGLE +121-121726-0000-2558: ALSO A POPULAR CONTRIVANCE WHEREBY LOVE MAKING MAY BE SUSPENDED BUT NOT STOPPED DURING THE PICNIC SEASON +121-121726-0001-2559: (HARANGUE->HURRY) THE TIRESOME PRODUCT OF A TIRELESS TONGUE +121-121726-0002-2560: ANGOR PAIN PAINFUL TO HEAR +121-121726-0003-2561: (HAY->HEY) FEVER A (HEART->HARD) TROUBLE CAUSED BY FALLING IN LOVE WITH A GRASS WIDOW +121-121726-0004-2562: HEAVEN A GOOD PLACE TO BE RAISED TO +121-121726-0005-2563: HEDGE A FENCE +121-121726-0006-2564: HEREDITY THE CAUSE OF ALL OUR FAULTS +121-121726-0007-2565: HORSE SENSE A DEGREE OF WISDOM THAT KEEPS ONE FROM BETTING ON THE RACES +121-121726-0008-2566: HOSE MAN'S EXCUSE FOR WETTING THE WALK +121-121726-0009-2567: HOTEL A PLACE WHERE A GUEST OFTEN GIVES UP GOOD DOLLARS FOR POOR QUARTERS +121-121726-0010-2568: (HOUSECLEANING->HOUSE CLEANING) A DOMESTIC UPHEAVAL THAT MAKES IT EASY FOR THE GOVERNMENT TO ENLIST ALL THE SOLDIERS IT NEEDS +121-121726-0011-2569: HUSBAND THE NEXT THING TO A WIFE +121-121726-0012-2570: HUSSY WOMAN AND BOND TIE +121-121726-0013-2571: TIED TO A WOMAN +121-121726-0014-2572: HYPOCRITE A HORSE DEALER +121-123852-0000-2615: THOSE PRETTY WRONGS THAT LIBERTY COMMITS WHEN I AM (SOMETIME->SOME TIME) ABSENT FROM THY HEART THY BEAUTY AND THY YEARS FULL WELL BEFITS FOR STILL TEMPTATION FOLLOWS WHERE THOU ART +121-123852-0001-2616: (AY->I) ME +121-123852-0002-2617: NO MATTER THEN ALTHOUGH MY FOOT DID STAND UPON THE FARTHEST EARTH (REMOV'D->REMOVED) FROM THEE FOR NIMBLE THOUGHT CAN JUMP BOTH SEA AND LAND AS SOON AS THINK THE PLACE WHERE HE WOULD BE BUT AH +121-123852-0003-2618: THOUGHT KILLS ME THAT I AM NOT (THOUGHT->BOUGHT) TO LEAP LARGE LENGTHS OF MILES WHEN THOU ART GONE BUT THAT SO MUCH OF EARTH AND WATER WROUGHT I MUST ATTEND TIME'S LEISURE WITH MY MOAN RECEIVING (NOUGHT->NAUGHT) BY ELEMENTS SO SLOW BUT HEAVY TEARS (BADGES->BADGERS) OF EITHER'S WOE +121-123852-0004-2619: MY HEART DOTH PLEAD THAT THOU IN HIM DOST LIE A CLOSET NEVER (PIERC'D->PIERCED) WITH CRYSTAL EYES BUT THE DEFENDANT DOTH THAT (PLEA->PLEAD) DENY AND SAYS IN HIM THY FAIR APPEARANCE LIES +121-123859-0000-2573: YOU ARE MY ALL THE WORLD AND I MUST STRIVE TO KNOW MY SHAMES AND PRAISES FROM YOUR TONGUE NONE ELSE TO ME NOR I TO NONE ALIVE THAT MY (STEEL'D SENSE->STEELED SCENTS) OR CHANGES RIGHT OR WRONG +121-123859-0001-2574: (O->OH) TIS THE FIRST TIS FLATTERY IN MY SEEING AND MY GREAT MIND MOST KINGLY DRINKS IT UP MINE EYE WELL KNOWS WHAT WITH HIS GUST IS (GREEING->GREEN) AND TO HIS (PALATE->PALLET) DOTH PREPARE THE CUP IF IT BE (POISON'D->POISONED) TIS THE LESSER SIN THAT MINE EYE LOVES IT AND DOTH FIRST BEGIN +121-123859-0002-2575: BUT RECKONING TIME WHOSE (MILLION'D->MILLIONED) ACCIDENTS CREEP IN TWIXT VOWS AND CHANGE DECREES OF KINGS TAN SACRED BEAUTY BLUNT THE (SHARP'ST INTENTS->SHARPEST INTENSE) DIVERT STRONG MINDS TO THE COURSE OF ALTERING THINGS ALAS WHY FEARING OF TIME'S TYRANNY MIGHT I NOT THEN SAY NOW I LOVE YOU BEST WHEN I WAS CERTAIN (O'ER INCERTAINTY->OR IN CERTAINTY) CROWNING THE PRESENT DOUBTING OF THE REST +121-123859-0003-2576: LOVE IS A BABE THEN MIGHT I NOT SAY SO TO GIVE FULL GROWTH TO THAT WHICH STILL DOTH GROW +121-123859-0004-2577: SO I (RETURN REBUK'D->RETURNED REBUKED) TO MY CONTENT AND GAIN BY ILL THRICE MORE THAN I HAVE SPENT +121-127105-0000-2578: IT WAS THIS OBSERVATION THAT DREW FROM DOUGLAS NOT IMMEDIATELY BUT LATER IN THE EVENING A REPLY THAT HAD THE INTERESTING CONSEQUENCE TO WHICH I CALL ATTENTION +121-127105-0001-2579: (SOMEONE->SOME ONE) ELSE TOLD A STORY NOT PARTICULARLY EFFECTIVE WHICH I SAW HE WAS NOT FOLLOWING +121-127105-0002-2580: CRIED ONE OF THE WOMEN HE TOOK NO NOTICE OF HER HE LOOKED AT ME BUT AS IF INSTEAD OF ME HE SAW WHAT HE SPOKE OF +121-127105-0003-2581: THERE WAS A UNANIMOUS GROAN AT THIS AND MUCH REPROACH AFTER WHICH IN HIS PREOCCUPIED WAY HE EXPLAINED +121-127105-0004-2582: THE (STORY'S->STORIES) WRITTEN +121-127105-0005-2583: I COULD WRITE TO MY MAN AND ENCLOSE THE KEY HE COULD SEND DOWN THE PACKET AS HE FINDS IT +121-127105-0006-2584: THE OTHERS RESENTED POSTPONEMENT BUT IT WAS JUST HIS SCRUPLES THAT CHARMED ME +121-127105-0007-2585: TO THIS HIS ANSWER WAS PROMPT (OH->O) THANK GOD NO AND IS THE RECORD YOURS +121-127105-0008-2586: HE HUNG FIRE AGAIN A WOMAN'S +121-127105-0009-2587: SHE HAS BEEN DEAD THESE TWENTY YEARS +121-127105-0010-2588: SHE SENT ME THE PAGES IN QUESTION BEFORE SHE DIED +121-127105-0011-2589: SHE WAS THE MOST AGREEABLE WOMAN I'VE EVER KNOWN IN HER POSITION SHE WOULD HAVE BEEN WORTHY OF ANY WHATEVER +121-127105-0012-2590: (IT->*) WASN'T SIMPLY THAT SHE SAID SO BUT THAT I KNEW SHE HADN'T I WAS SURE I COULD SEE +121-127105-0013-2591: YOU'LL EASILY JUDGE WHY WHEN YOU HEAR BECAUSE THE THING HAD BEEN SUCH A SCARE HE CONTINUED TO FIX ME +121-127105-0014-2592: YOU ARE ACUTE +121-127105-0015-2593: HE QUITTED THE FIRE AND DROPPED BACK INTO HIS CHAIR +121-127105-0016-2594: PROBABLY NOT TILL THE SECOND POST +121-127105-0017-2595: IT WAS ALMOST THE TONE OF HOPE EVERYBODY WILL STAY +121-127105-0018-2596: CRIED THE LADIES WHOSE DEPARTURE HAD BEEN FIXED +121-127105-0019-2597: MISSUS GRIFFIN HOWEVER EXPRESSED THE NEED FOR A LITTLE MORE LIGHT +121-127105-0020-2598: WHO WAS IT SHE WAS IN LOVE WITH THE STORY WILL TELL I TOOK UPON MYSELF TO REPLY OH I CAN'T WAIT FOR THE STORY THE STORY WON'T TELL SAID DOUGLAS NOT IN ANY LITERAL VULGAR WAY (MORE'S->NOR'S) THE PITY THEN +121-127105-0021-2599: WON'T YOU TELL DOUGLAS +121-127105-0022-2600: (WELL->FOR) IF I DON'T KNOW WHO SHE WAS IN LOVE WITH I KNOW WHO HE WAS +121-127105-0023-2601: LET ME SAY HERE DISTINCTLY TO HAVE DONE WITH IT THAT THIS NARRATIVE FROM AN EXACT TRANSCRIPT OF MY OWN MADE MUCH LATER IS WHAT I SHALL PRESENTLY GIVE +121-127105-0024-2602: POOR DOUGLAS BEFORE HIS DEATH WHEN IT WAS IN SIGHT COMMITTED TO ME THE MANUSCRIPT THAT REACHED HIM ON THE THIRD OF THESE DAYS AND THAT ON THE SAME SPOT WITH IMMENSE EFFECT HE BEGAN TO READ TO OUR HUSHED LITTLE CIRCLE ON THE NIGHT OF THE FOURTH +121-127105-0025-2603: THE DEPARTING LADIES WHO HAD SAID THEY WOULD STAY DIDN'T OF COURSE THANK HEAVEN STAY THEY DEPARTED IN CONSEQUENCE OF ARRANGEMENTS MADE IN A RAGE OF CURIOSITY AS THEY PROFESSED PRODUCED BY THE TOUCHES WITH WHICH HE HAD ALREADY WORKED US UP +121-127105-0026-2604: THE FIRST OF THESE TOUCHES CONVEYED THAT THE WRITTEN STATEMENT TOOK UP THE TALE AT A POINT AFTER IT HAD IN A MANNER BEGUN +121-127105-0027-2605: HE HAD FOR HIS OWN TOWN RESIDENCE A BIG HOUSE FILLED WITH THE SPOILS OF TRAVEL AND THE TROPHIES OF THE CHASE BUT IT WAS TO HIS COUNTRY HOME AN OLD FAMILY PLACE IN ESSEX THAT HE WISHED HER IMMEDIATELY TO PROCEED +121-127105-0028-2606: THE AWKWARD THING WAS THAT THEY HAD PRACTICALLY NO OTHER RELATIONS AND THAT HIS OWN AFFAIRS TOOK UP ALL HIS TIME +121-127105-0029-2607: THERE WERE PLENTY OF PEOPLE TO HELP BUT OF COURSE THE YOUNG LADY WHO SHOULD GO DOWN AS GOVERNESS WOULD BE IN SUPREME AUTHORITY +121-127105-0030-2608: I DON'T ANTICIPATE +121-127105-0031-2609: SHE WAS YOUNG UNTRIED NERVOUS IT WAS A VISION OF SERIOUS DUTIES AND LITTLE COMPANY OF REALLY GREAT LONELINESS +121-127105-0032-2610: YES BUT THAT'S JUST THE BEAUTY OF HER PASSION +121-127105-0033-2611: IT WAS THE BEAUTY OF IT +121-127105-0034-2612: IT SOUNDED DULL IT SOUNDED STRANGE AND ALL THE MORE SO BECAUSE OF HIS MAIN CONDITION WHICH WAS +121-127105-0035-2613: SHE PROMISED TO DO THIS AND SHE MENTIONED TO ME THAT WHEN FOR A MOMENT DISBURDENED DELIGHTED HE HELD HER HAND THANKING HER FOR THE SACRIFICE SHE ALREADY FELT REWARDED +121-127105-0036-2614: BUT WAS THAT ALL HER REWARD ONE OF THE LADIES ASKED +1221-135766-0000-1305: HOW STRANGE IT SEEMED TO THE SAD WOMAN AS SHE WATCHED THE GROWTH AND THE BEAUTY THAT BECAME EVERY DAY MORE BRILLIANT AND THE INTELLIGENCE THAT THREW ITS QUIVERING SUNSHINE OVER THE TINY FEATURES OF THIS CHILD +1221-135766-0001-1306: GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME (DISHONOURED->DISHONORED) BOSOM TO CONNECT HER PARENT (FOR EVER->FOREVER) WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN +1221-135766-0002-1307: YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION +1221-135766-0003-1308: THE CHILD HAD A NATIVE GRACE WHICH DOES NOT INVARIABLY (CO EXIST->COEXIST) WITH FAULTLESS BEAUTY ITS ATTIRE HOWEVER SIMPLE ALWAYS IMPRESSED THE BEHOLDER AS IF IT WERE THE VERY GARB THAT PRECISELY BECAME IT BEST +1221-135766-0004-1309: THIS OUTWARD MUTABILITY INDICATED AND DID NOT MORE THAN FAIRLY EXPRESS THE VARIOUS PROPERTIES OF HER INNER LIFE +1221-135766-0005-1310: HESTER COULD ONLY ACCOUNT FOR THE CHILD'S CHARACTER AND EVEN THEN MOST VAGUELY AND IMPERFECTLY BY RECALLING WHAT SHE HERSELF HAD BEEN DURING THAT MOMENTOUS PERIOD WHILE PEARL WAS IMBIBING HER SOUL FROM THE SPIRITUAL WORLD AND HER BODILY FRAME FROM ITS MATERIAL OF EARTH +1221-135766-0006-1311: THEY WERE NOW ILLUMINATED BY THE MORNING RADIANCE OF A YOUNG CHILD'S DISPOSITION BUT LATER IN THE DAY OF EARTHLY EXISTENCE MIGHT BE PROLIFIC OF THE STORM AND WHIRLWIND +1221-135766-0007-1312: HESTER PRYNNE NEVERTHELESS THE LOVING MOTHER OF THIS ONE CHILD RAN LITTLE RISK OF ERRING ON THE SIDE OF UNDUE SEVERITY +1221-135766-0008-1313: MINDFUL HOWEVER OF HER OWN ERRORS AND MISFORTUNES SHE EARLY SOUGHT TO IMPOSE A TENDER BUT STRICT CONTROL OVER THE INFANT IMMORTALITY THAT WAS COMMITTED TO HER CHARGE +1221-135766-0009-1314: AS TO ANY OTHER KIND OF DISCIPLINE WHETHER ADDRESSED TO HER MIND OR HEART LITTLE PEARL MIGHT OR MIGHT NOT BE WITHIN ITS REACH IN ACCORDANCE WITH THE CAPRICE THAT (RULED->ROLLED) THE MOMENT +1221-135766-0010-1315: IT WAS A LOOK SO INTELLIGENT YET INEXPLICABLE PERVERSE SOMETIMES SO MALICIOUS BUT GENERALLY ACCOMPANIED BY A WILD FLOW OF SPIRITS THAT HESTER COULD NOT HELP QUESTIONING AT SUCH MOMENTS WHETHER PEARL WAS A HUMAN CHILD +1221-135766-0011-1316: BEHOLDING IT HESTER WAS CONSTRAINED TO RUSH TOWARDS THE CHILD TO PURSUE THE LITTLE ELF IN THE FLIGHT WHICH SHE INVARIABLY BEGAN TO SNATCH HER TO HER BOSOM WITH A CLOSE PRESSURE AND EARNEST KISSES NOT SO MUCH FROM OVERFLOWING LOVE AS TO ASSURE HERSELF THAT PEARL WAS FLESH AND BLOOD AND NOT UTTERLY DELUSIVE +1221-135766-0012-1317: BROODING OVER ALL THESE MATTERS THE MOTHER FELT LIKE ONE WHO HAS EVOKED A SPIRIT BUT BY SOME IRREGULARITY IN THE PROCESS OF CONJURATION HAS FAILED TO WIN THE MASTER WORD THAT SHOULD CONTROL THIS NEW AND INCOMPREHENSIBLE INTELLIGENCE +1221-135766-0013-1318: PEARL WAS A BORN OUTCAST OF THE (INFANTILE->INVENTILE) WORLD +1221-135766-0014-1319: PEARL SAW AND GAZED INTENTLY BUT NEVER SOUGHT TO MAKE ACQUAINTANCE +1221-135766-0015-1320: IF SPOKEN TO SHE WOULD NOT SPEAK AGAIN +1221-135767-0000-1280: HESTER PRYNNE WENT ONE DAY TO THE MANSION OF GOVERNOR BELLINGHAM WITH A PAIR OF GLOVES WHICH SHE HAD FRINGED AND EMBROIDERED TO HIS ORDER AND WHICH WERE TO BE WORN ON SOME GREAT OCCASION OF STATE FOR THOUGH THE CHANCES OF A POPULAR ELECTION HAD CAUSED THIS FORMER RULER TO DESCEND A STEP OR TWO FROM THE HIGHEST RANK HE STILL HELD AN (HONOURABLE->HONORABLE) AND INFLUENTIAL PLACE AMONG THE COLONIAL MAGISTRACY +1221-135767-0001-1281: ANOTHER AND FAR MORE IMPORTANT REASON THAN THE DELIVERY OF A PAIR OF EMBROIDERED GLOVES IMPELLED HESTER AT THIS TIME TO SEEK AN INTERVIEW WITH A PERSONAGE OF SO MUCH POWER AND ACTIVITY IN THE AFFAIRS OF THE SETTLEMENT +1221-135767-0002-1282: AT THAT EPOCH OF PRISTINE SIMPLICITY HOWEVER MATTERS OF EVEN SLIGHTER PUBLIC INTEREST AND OF FAR LESS INTRINSIC WEIGHT THAN THE WELFARE OF HESTER AND HER CHILD WERE STRANGELY MIXED UP WITH THE DELIBERATIONS OF LEGISLATORS AND ACTS OF STATE +1221-135767-0003-1283: THE PERIOD WAS HARDLY IF AT ALL EARLIER THAN THAT OF OUR STORY WHEN A DISPUTE CONCERNING THE RIGHT OF PROPERTY IN A PIG NOT ONLY CAUSED A FIERCE AND BITTER CONTEST IN THE LEGISLATIVE BODY OF THE COLONY BUT RESULTED IN AN IMPORTANT MODIFICATION OF THE FRAMEWORK ITSELF OF THE LEGISLATURE +1221-135767-0004-1284: WE HAVE SPOKEN OF PEARL'S RICH AND LUXURIANT BEAUTY A BEAUTY THAT SHONE WITH DEEP AND VIVID TINTS A BRIGHT COMPLEXION EYES POSSESSING INTENSITY BOTH OF DEPTH AND GLOW AND HAIR ALREADY OF A DEEP GLOSSY BROWN AND WHICH IN AFTER YEARS WOULD BE NEARLY AKIN TO BLACK +1221-135767-0005-1285: IT WAS THE SCARLET LETTER IN ANOTHER FORM THE SCARLET LETTER ENDOWED WITH LIFE +1221-135767-0006-1286: THE MOTHER HERSELF AS IF THE RED IGNOMINY WERE SO DEEPLY SCORCHED INTO HER BRAIN THAT ALL HER CONCEPTIONS ASSUMED ITS FORM HAD CAREFULLY WROUGHT OUT THE SIMILITUDE LAVISHING MANY HOURS OF MORBID INGENUITY TO CREATE AN ANALOGY BETWEEN THE OBJECT OF HER AFFECTION AND THE EMBLEM OF HER GUILT AND TORTURE +1221-135767-0007-1287: BUT IN TRUTH PEARL WAS THE ONE AS WELL AS THE OTHER AND ONLY IN CONSEQUENCE OF THAT IDENTITY HAD HESTER CONTRIVED SO PERFECTLY TO REPRESENT THE SCARLET LETTER IN HER APPEARANCE +1221-135767-0008-1288: COME THEREFORE AND LET US FLING MUD AT THEM +1221-135767-0009-1289: BUT PEARL WHO WAS A DAUNTLESS CHILD AFTER FROWNING STAMPING HER FOOT AND SHAKING HER LITTLE HAND WITH A VARIETY OF THREATENING GESTURES SUDDENLY MADE A RUSH AT THE KNOT OF HER ENEMIES AND PUT THEM ALL TO FLIGHT +1221-135767-0010-1290: SHE SCREAMED AND SHOUTED TOO WITH A TERRIFIC VOLUME OF SOUND WHICH DOUBTLESS CAUSED THE HEARTS OF THE FUGITIVES TO QUAKE WITHIN THEM +1221-135767-0011-1291: IT WAS FURTHER DECORATED WITH STRANGE AND SEEMINGLY CABALISTIC FIGURES AND DIAGRAMS SUITABLE TO THE QUAINT TASTE OF THE AGE WHICH HAD BEEN DRAWN IN THE STUCCO WHEN NEWLY LAID ON AND HAD NOW GROWN HARD AND DURABLE FOR THE ADMIRATION OF AFTER TIMES +1221-135767-0012-1292: THEY APPROACHED THE DOOR WHICH WAS OF AN ARCHED FORM AND FLANKED ON EACH SIDE BY A NARROW TOWER OR PROJECTION OF THE EDIFICE IN BOTH OF WHICH WERE LATTICE WINDOWS THE WOODEN SHUTTERS TO CLOSE OVER THEM AT NEED +1221-135767-0013-1293: LIFTING THE IRON HAMMER THAT HUNG AT THE PORTAL HESTER PRYNNE GAVE A SUMMONS WHICH WAS ANSWERED BY ONE OF THE GOVERNOR'S BOND (SERVANT->SERVANTS) A FREE BORN ENGLISHMAN BUT NOW A SEVEN YEARS SLAVE +1221-135767-0014-1294: YEA HIS HONOURABLE WORSHIP IS WITHIN BUT HE HATH A GODLY MINISTER OR TWO WITH HIM AND LIKEWISE A LEECH +1221-135767-0015-1295: YE MAY NOT SEE HIS WORSHIP NOW +1221-135767-0016-1296: WITH MANY VARIATIONS SUGGESTED BY THE NATURE OF HIS BUILDING MATERIALS DIVERSITY OF CLIMATE AND A DIFFERENT MODE OF SOCIAL LIFE GOVERNOR BELLINGHAM HAD PLANNED HIS NEW HABITATION AFTER THE RESIDENCES OF GENTLEMEN OF (FAIR ESTATE->FAIREST STATE) IN HIS NATIVE LAND +1221-135767-0017-1297: ON THE TABLE IN TOKEN THAT THE SENTIMENT OF OLD ENGLISH HOSPITALITY HAD NOT BEEN LEFT BEHIND STOOD A LARGE PEWTER TANKARD AT THE BOTTOM OF WHICH HAD HESTER OR PEARL PEEPED INTO IT THEY MIGHT HAVE SEEN THE FROTHY REMNANT OF A RECENT DRAUGHT OF ALE +1221-135767-0018-1298: LITTLE PEARL WHO WAS AS GREATLY PLEASED WITH THE GLEAMING ARMOUR AS SHE HAD BEEN WITH THE GLITTERING FRONTISPIECE OF THE HOUSE SPENT SOME TIME LOOKING INTO THE POLISHED MIRROR OF THE BREASTPLATE +1221-135767-0019-1299: MOTHER CRIED SHE I SEE YOU HERE LOOK (LOOK->*) +1221-135767-0020-1300: IN TRUTH SHE SEEMED ABSOLUTELY HIDDEN BEHIND IT +1221-135767-0021-1301: PEARL ACCORDINGLY RAN TO THE BOW WINDOW AT THE FURTHER END OF THE HALL AND LOOKED ALONG THE VISTA OF A GARDEN WALK CARPETED WITH CLOSELY SHAVEN GRASS AND BORDERED WITH SOME RUDE AND (IMMATURE->IMMATEUR) ATTEMPT AT SHRUBBERY +1221-135767-0022-1302: BUT THE PROPRIETOR APPEARED (ALREADY->ALL READY) TO HAVE RELINQUISHED AS HOPELESS THE EFFORT TO PERPETUATE ON THIS SIDE OF THE ATLANTIC IN A HARD SOIL AND AMID THE CLOSE STRUGGLE FOR SUBSISTENCE THE NATIVE ENGLISH TASTE FOR ORNAMENTAL GARDENING +1221-135767-0023-1303: THERE WERE A FEW ROSE BUSHES HOWEVER AND A NUMBER OF APPLE TREES PROBABLY THE DESCENDANTS OF THOSE PLANTED BY THE REVEREND MISTER BLACKSTONE THE FIRST SETTLER OF THE PENINSULA THAT HALF MYTHOLOGICAL PERSONAGE WHO RIDES THROUGH OUR EARLY ANNALS SEATED ON THE BACK OF A BULL +1221-135767-0024-1304: PEARL SEEING THE ROSE BUSHES BEGAN TO CRY FOR A RED ROSE AND WOULD NOT BE PACIFIED +1284-1180-0000-829: HE WORE BLUE SILK STOCKINGS BLUE (KNEE PANTS->KNEEP HANDS) WITH GOLD BUCKLES A BLUE RUFFLED WAIST AND A JACKET OF BRIGHT BLUE BRAIDED WITH GOLD +1284-1180-0001-830: HIS HAT HAD A PEAKED CROWN AND A FLAT BRIM AND AROUND THE BRIM WAS A ROW OF TINY GOLDEN BELLS THAT TINKLED WHEN HE MOVED +1284-1180-0002-831: INSTEAD OF SHOES THE OLD MAN WORE BOOTS WITH (TURNOVER->TURN OVER) TOPS AND HIS BLUE COAT HAD WIDE CUFFS OF GOLD BRAID +1284-1180-0003-832: FOR A LONG TIME HE HAD WISHED TO EXPLORE THE BEAUTIFUL LAND OF OZ IN WHICH THEY LIVED +1284-1180-0004-833: WHEN THEY WERE OUTSIDE UNC SIMPLY LATCHED THE DOOR AND STARTED UP THE PATH +1284-1180-0005-834: NO ONE WOULD DISTURB THEIR LITTLE HOUSE EVEN IF (ANYONE->ANY ONE) CAME SO FAR INTO THE THICK FOREST WHILE THEY WERE GONE +1284-1180-0006-835: AT THE FOOT OF THE MOUNTAIN THAT SEPARATED THE COUNTRY OF THE MUNCHKINS FROM THE COUNTRY OF THE GILLIKINS THE PATH DIVIDED +1284-1180-0007-836: HE KNEW IT WOULD TAKE THEM TO THE HOUSE OF THE CROOKED MAGICIAN WHOM HE HAD NEVER SEEN BUT WHO WAS (THEIR->THERE) NEAREST (NEIGHBOR->NEIGHBOUR) +1284-1180-0008-837: ALL THE MORNING THEY TRUDGED UP THE MOUNTAIN PATH AND AT (NOON UNC->NOONK) AND OJO SAT ON A FALLEN TREE TRUNK AND ATE THE LAST OF THE BREAD WHICH THE OLD MUNCHKIN HAD PLACED IN HIS POCKET +1284-1180-0009-838: THEN THEY STARTED ON AGAIN AND TWO HOURS LATER CAME IN SIGHT OF THE HOUSE OF DOCTOR PIPT +1284-1180-0010-839: UNC KNOCKED AT THE DOOR OF THE HOUSE AND A CHUBBY PLEASANT FACED WOMAN DRESSED ALL IN BLUE OPENED IT AND GREETED THE VISITORS WITH A SMILE +1284-1180-0011-840: I AM MY DEAR AND ALL STRANGERS ARE WELCOME TO MY HOME +1284-1180-0012-841: WE HAVE COME FROM A FAR LONELIER PLACE THAN THIS A LONELIER PLACE +1284-1180-0013-842: AND YOU MUST BE OJO THE UNLUCKY SHE ADDED +1284-1180-0014-843: OJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HIS LIFE +1284-1180-0015-844: WE ARE TRAVELING REPLIED OJO AND WE STOPPED AT YOUR HOUSE JUST TO REST AND REFRESH OURSELVES +1284-1180-0016-845: THE WOMAN SEEMED THOUGHTFUL +1284-1180-0017-846: AT ONE END STOOD A GREAT FIREPLACE IN WHICH A BLUE LOG WAS BLAZING WITH A BLUE FLAME AND OVER THE FIRE HUNG FOUR KETTLES IN A ROW ALL BUBBLING AND STEAMING AT A GREAT RATE +1284-1180-0018-847: IT TAKES ME SEVERAL YEARS TO MAKE THIS MAGIC POWDER BUT AT THIS MOMENT I AM PLEASED TO SAY IT IS NEARLY DONE YOU SEE I AM MAKING IT FOR MY GOOD WIFE MARGOLOTTE WHO WANTS TO USE SOME OF IT FOR A PURPOSE OF HER OWN +1284-1180-0019-848: YOU MUST KNOW SAID MARGOLOTTE WHEN THEY WERE ALL SEATED TOGETHER ON THE BROAD WINDOW SEAT THAT MY HUSBAND FOOLISHLY GAVE AWAY ALL THE POWDER OF LIFE HE FIRST MADE TO OLD (MOMBI->MUMBIE) THE WITCH WHO USED TO LIVE IN THE COUNTRY OF THE GILLIKINS TO THE NORTH OF HERE +1284-1180-0020-849: THE FIRST LOT WE TESTED ON OUR GLASS (CAT->HAT) WHICH NOT ONLY BEGAN TO LIVE BUT HAS LIVED EVER SINCE +1284-1180-0021-850: I THINK THE NEXT GLASS CAT THE MAGICIAN MAKES WILL HAVE NEITHER BRAINS NOR HEART FOR THEN IT WILL NOT OBJECT TO CATCHING MICE AND MAY PROVE OF SOME USE TO US +1284-1180-0022-851: I'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ +1284-1180-0023-852: YOU SEE I'VE LIVED ALL MY LIFE WITH UNC NUNKIE THE SILENT ONE AND THERE WAS NO ONE TO TELL ME ANYTHING +1284-1180-0024-853: THAT IS ONE REASON YOU ARE OJO THE UNLUCKY SAID THE WOMAN IN (A->*) SYMPATHETIC TONE +1284-1180-0025-854: I THINK I MUST SHOW YOU MY PATCHWORK GIRL SAID (MARGOLOTTE->MARGOLOT) LAUGHING AT THE BOY'S ASTONISHMENT FOR SHE IS RATHER DIFFICULT TO EXPLAIN +1284-1180-0026-855: BUT FIRST I WILL TELL YOU THAT FOR MANY YEARS I HAVE LONGED FOR A SERVANT TO HELP ME WITH THE HOUSEWORK AND TO (COOK->COPE) THE MEALS AND WASH THE DISHES +1284-1180-0027-856: YET THAT TASK WAS NOT SO EASY AS YOU MAY SUPPOSE +1284-1180-0028-857: A BED QUILT MADE OF PATCHES OF DIFFERENT KINDS AND (COLORS->COLLARS) OF CLOTH ALL NEATLY SEWED TOGETHER +1284-1180-0029-858: SOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE THE PATCHES AND COLORS ARE SO MIXED UP +1284-1180-0030-859: WHEN I FOUND IT I SAID TO MYSELF THAT IT WOULD DO NICELY FOR MY SERVANT GIRL FOR WHEN SHE WAS BROUGHT TO LIFE SHE WOULD NOT BE PROUD NOR HAUGHTY AS THE GLASS CAT IS FOR SUCH A DREADFUL MIXTURE OF (COLORS->COLOURS) WOULD DISCOURAGE HER FROM TRYING TO BE AS DIGNIFIED AS THE BLUE MUNCHKINS ARE +1284-1180-0031-860: AT THE EMERALD CITY WHERE OUR PRINCESS OZMA LIVES GREEN IS THE POPULAR COLOR +1284-1180-0032-861: I WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE WENT TO A TALL CUPBOARD AND THREW OPEN THE DOORS +1284-1181-0000-807: OJO EXAMINED THIS CURIOUS CONTRIVANCE WITH WONDER +1284-1181-0001-808: MARGOLOTTE HAD FIRST MADE THE GIRL'S FORM FROM THE PATCHWORK QUILT AND THEN SHE HAD DRESSED IT WITH A PATCHWORK SKIRT AND AN APRON WITH POCKETS IN IT USING THE SAME GAY MATERIAL THROUGHOUT +1284-1181-0002-809: THE HEAD OF THE PATCHWORK GIRL WAS THE MOST CURIOUS PART OF HER +1284-1181-0003-810: THE HAIR WAS OF BROWN YARN AND HUNG DOWN ON HER NECK IN SEVERAL NEAT BRAIDS +1284-1181-0004-811: GOLD IS THE MOST COMMON METAL IN THE LAND OF OZ AND IS USED FOR MANY PURPOSES BECAUSE IT IS SOFT AND PLIABLE +1284-1181-0005-812: NO I FORGOT ALL ABOUT THE BRAINS EXCLAIMED THE WOMAN +1284-1181-0006-813: WELL THAT MAY BE TRUE AGREED MARGOLOTTE BUT ON THE CONTRARY A SERVANT WITH TOO MUCH BRAINS IS SURE TO BECOME INDEPENDENT AND HIGH AND MIGHTY AND FEEL ABOVE HER WORK +1284-1181-0007-814: SHE POURED INTO THE DISH A QUANTITY FROM EACH OF THESE BOTTLES +1284-1181-0008-815: I THINK THAT WILL DO SHE CONTINUED FOR THE OTHER QUALITIES ARE NOT NEEDED IN A SERVANT +1284-1181-0009-816: SHE RAN TO HER HUSBAND'S SIDE AT ONCE AND HELPED HIM LIFT THE FOUR KETTLES FROM THE FIRE +1284-1181-0010-817: THEIR CONTENTS HAD ALL BOILED AWAY LEAVING IN THE BOTTOM OF EACH KETTLE A FEW GRAINS OF FINE WHITE POWDER +1284-1181-0011-818: VERY CAREFULLY THE MAGICIAN REMOVED THIS POWDER PLACING IT (ALL TOGETHER->ALTOGETHER) IN A GOLDEN DISH WHERE HE MIXED IT WITH A GOLDEN SPOON +1284-1181-0012-819: NO ONE SAW HIM DO THIS FOR ALL WERE LOOKING AT THE POWDER OF LIFE BUT SOON THE WOMAN REMEMBERED WHAT SHE HAD BEEN DOING AND CAME BACK TO THE CUPBOARD +1284-1181-0013-820: OJO BECAME A BIT UNEASY AT THIS FOR HE HAD ALREADY PUT QUITE A LOT OF THE CLEVERNESS POWDER IN THE DISH BUT HE DARED NOT INTERFERE AND SO HE COMFORTED HIMSELF WITH THE THOUGHT THAT ONE CANNOT HAVE TOO MUCH CLEVERNESS +1284-1181-0014-821: HE SELECTED A SMALL GOLD BOTTLE WITH A PEPPER BOX TOP SO THAT THE POWDER MIGHT BE SPRINKLED ON ANY OBJECT THROUGH THE SMALL HOLES +1284-1181-0015-822: MOST PEOPLE TALK TOO MUCH SO IT IS A RELIEF TO FIND ONE WHO TALKS TOO LITTLE +1284-1181-0016-823: I AM NOT ALLOWED TO PERFORM MAGIC EXCEPT FOR MY OWN AMUSEMENT HE TOLD HIS VISITORS AS HE LIGHTED A PIPE WITH A CROOKED STEM AND BEGAN TO SMOKE +1284-1181-0017-824: THE WIZARD OF OZ WHO USED TO BE A HUMBUG AND KNEW NO MAGIC AT ALL HAS BEEN TAKING LESSONS OF GLINDA AND I'M TOLD HE IS GETTING TO BE A PRETTY GOOD WIZARD BUT HE IS MERELY THE ASSISTANT OF THE GREAT SORCERESS +1284-1181-0018-825: IT TRULY IS ASSERTED THE MAGICIAN +1284-1181-0019-826: I NOW USE THEM AS ORNAMENTAL STATUARY IN MY GARDEN +1284-1181-0020-827: DEAR ME WHAT A (CHATTERBOX->CHATTER BOX) YOU'RE GETTING TO BE (UNC->UG) REMARKED THE MAGICIAN WHO WAS PLEASED WITH THE COMPLIMENT +1284-1181-0021-828: ASKED THE VOICE IN SCORNFUL ACCENTS +1284-134647-0000-862: THE GRATEFUL APPLAUSE OF THE CLERGY HAS CONSECRATED THE MEMORY OF A PRINCE WHO INDULGED THEIR PASSIONS AND PROMOTED THEIR INTEREST +1284-134647-0001-863: THE EDICT OF MILAN THE GREAT CHARTER OF TOLERATION HAD CONFIRMED TO EACH INDIVIDUAL OF THE ROMAN WORLD THE PRIVILEGE OF CHOOSING AND PROFESSING HIS OWN RELIGION +1284-134647-0002-864: BUT THIS INESTIMABLE PRIVILEGE WAS SOON VIOLATED WITH THE KNOWLEDGE OF TRUTH THE EMPEROR IMBIBED THE MAXIMS OF PERSECUTION AND THE (SECTS->SEX) WHICH DISSENTED FROM THE CATHOLIC CHURCH WERE AFFLICTED AND OPPRESSED BY THE TRIUMPH OF CHRISTIANITY +1284-134647-0003-865: (CONSTANTINE->KONSTANTINE) EASILY BELIEVED THAT THE HERETICS WHO PRESUMED TO DISPUTE HIS OPINIONS OR TO OPPOSE HIS COMMANDS WERE GUILTY OF THE MOST ABSURD AND CRIMINAL OBSTINACY AND THAT A SEASONABLE APPLICATION OF MODERATE SEVERITIES MIGHT SAVE THOSE UNHAPPY MEN FROM THE DANGER OF AN EVERLASTING CONDEMNATION +1284-134647-0004-866: SOME OF THE PENAL REGULATIONS WERE COPIED FROM THE EDICTS OF DIOCLETIAN AND THIS METHOD OF CONVERSION WAS APPLAUDED BY THE SAME BISHOPS WHO HAD (FELT->FELLED) THE HAND OF OPPRESSION AND PLEADED FOR THE RIGHTS OF HUMANITY +1284-134647-0005-867: THEY ASSERTED WITH CONFIDENCE AND ALMOST WITH EXULTATION THAT THE APOSTOLICAL SUCCESSION WAS INTERRUPTED THAT ALL THE BISHOPS OF EUROPE AND ASIA WERE INFECTED BY THE CONTAGION OF GUILT AND SCHISM AND THAT THE PREROGATIVES OF THE CATHOLIC CHURCH WERE CONFINED TO THE CHOSEN PORTION OF THE AFRICAN BELIEVERS WHO ALONE HAD PRESERVED INVIOLATE THE INTEGRITY OF THEIR FAITH AND DISCIPLINE +1284-134647-0006-868: BISHOPS VIRGINS AND EVEN SPOTLESS INFANTS WERE SUBJECTED TO THE DISGRACE OF A PUBLIC PENANCE BEFORE THEY COULD BE ADMITTED TO THE COMMUNION OF THE DONATISTS +1284-134647-0007-869: (PROSCRIBED->PRESCRIBED) BY THE CIVIL AND ECCLESIASTICAL POWERS OF THE EMPIRE THE (DONATISTS->DONATIST) STILL MAINTAINED IN SOME PROVINCES PARTICULARLY (IN NUMIDIA->INIMITIA) THEIR SUPERIOR NUMBERS AND FOUR HUNDRED BISHOPS ACKNOWLEDGED THE JURISDICTION OF THEIR PRIMATE +1320-122612-0000-120: SINCE THE PERIOD OF OUR TALE THE ACTIVE SPIRIT OF THE COUNTRY HAS SURROUNDED IT WITH A BELT OF RICH (AND THRIVING->ENTHRIBING) SETTLEMENTS THOUGH NONE BUT THE HUNTER OR THE SAVAGE IS EVER KNOWN EVEN NOW TO PENETRATE ITS WILD RECESSES +1320-122612-0001-121: THE DEWS WERE SUFFERED TO EXHALE AND THE SUN HAD DISPERSED THE MISTS AND WAS SHEDDING A STRONG AND CLEAR LIGHT IN THE FOREST WHEN THE (TRAVELERS->TRAVELLERS) RESUMED THEIR JOURNEY +1320-122612-0002-122: AFTER PROCEEDING A FEW MILES THE PROGRESS OF HAWKEYE WHO LED THE ADVANCE BECAME MORE DELIBERATE AND WATCHFUL +1320-122612-0003-123: HE OFTEN STOPPED TO EXAMINE THE TREES NOR DID HE CROSS A RIVULET WITHOUT ATTENTIVELY CONSIDERING THE QUANTITY THE VELOCITY AND THE COLOR OF ITS WATERS +1320-122612-0004-124: DISTRUSTING HIS OWN JUDGMENT HIS APPEALS TO THE OPINION OF CHINGACHGOOK WERE FREQUENT AND EARNEST +1320-122612-0005-125: YET HERE ARE WE WITHIN A SHORT RANGE OF THE (SCAROONS->SCARONS) AND NOT A SIGN OF A TRAIL HAVE WE CROSSED +1320-122612-0006-126: LET US RETRACE OUR STEPS AND EXAMINE AS WE GO WITH KEENER EYES +1320-122612-0007-127: (CHINGACHGOOK->CHINGACHOOK) HAD CAUGHT THE LOOK AND MOTIONING WITH HIS HAND HE BADE HIM SPEAK +1320-122612-0008-128: THE EYES OF THE WHOLE PARTY FOLLOWED THE UNEXPECTED MOVEMENT AND READ THEIR SUCCESS IN THE AIR OF TRIUMPH THAT THE YOUTH ASSUMED +1320-122612-0009-129: IT WOULD HAVE BEEN MORE WONDERFUL HAD HE SPOKEN WITHOUT A BIDDING +1320-122612-0010-130: SEE SAID UNCAS POINTING NORTH AND SOUTH AT THE EVIDENT MARKS OF THE BROAD TRAIL ON EITHER SIDE OF HIM THE DARK (HAIR->AIR) HAS GONE TOWARD THE FOREST +1320-122612-0011-131: IF A ROCK OR A RIVULET OR A BIT OF EARTH HARDER THAN COMMON SEVERED THE LINKS OF THE (CLEW->CLUE) THEY FOLLOWED THE TRUE EYE OF THE SCOUT RECOVERED THEM AT A DISTANCE AND SELDOM RENDERED THE DELAY OF A SINGLE MOMENT NECESSARY +1320-122612-0012-132: EXTINGUISHED BRANDS WERE LYING AROUND A SPRING THE OFFALS OF A DEER WERE SCATTERED ABOUT THE PLACE AND THE TREES BORE EVIDENT MARKS OF HAVING BEEN BROWSED BY THE HORSES +1320-122612-0013-133: A CIRCLE OF A FEW HUNDRED FEET IN CIRCUMFERENCE WAS DRAWN AND EACH OF THE PARTY TOOK A SEGMENT FOR HIS PORTION +1320-122612-0014-134: THE EXAMINATION HOWEVER RESULTED IN NO DISCOVERY +1320-122612-0015-135: THE WHOLE PARTY CROWDED TO THE SPOT WHERE UNCAS POINTED OUT THE IMPRESSION OF A MOCCASIN IN THE MOIST (ALLUVION->ALLUVIAN) +1320-122612-0016-136: RUN BACK UNCAS AND BRING ME THE SIZE OF THE SINGER'S FOOT +1320-122617-0000-78: NOTWITHSTANDING THE HIGH RESOLUTION OF HAWKEYE HE FULLY COMPREHENDED ALL THE DIFFICULTIES AND DANGER HE WAS ABOUT TO INCUR +1320-122617-0001-79: IN HIS RETURN TO THE CAMP HIS ACUTE AND PRACTISED INTELLECTS WERE INTENTLY ENGAGED IN DEVISING MEANS TO COUNTERACT A WATCHFULNESS AND SUSPICION ON THE PART OF HIS ENEMIES THAT HE KNEW WERE IN NO DEGREE INFERIOR TO HIS OWN +1320-122617-0002-80: IN OTHER WORDS WHILE HE HAD IMPLICIT FAITH IN THE ABILITY OF (BALAAM'S->BAYLIM'S) ASS TO SPEAK HE WAS SOMEWHAT (SKEPTICAL->SCEPTICAL) ON THE SUBJECT OF A BEAR'S SINGING AND YET HE HAD BEEN ASSURED OF THE LATTER ON THE TESTIMONY OF HIS OWN EXQUISITE ORGANS +1320-122617-0003-81: THERE WAS SOMETHING IN HIS AIR AND MANNER THAT BETRAYED TO THE SCOUT THE UTTER CONFUSION OF THE STATE OF HIS MIND +1320-122617-0004-82: THE INGENIOUS HAWKEYE WHO RECALLED THE HASTY MANNER IN WHICH THE OTHER HAD ABANDONED HIS POST AT THE BEDSIDE OF THE SICK WOMAN WAS NOT WITHOUT HIS SUSPICIONS CONCERNING THE SUBJECT OF SO MUCH SOLEMN DELIBERATION +1320-122617-0005-83: THE BEAR SHOOK HIS SHAGGY SIDES AND THEN A WELL KNOWN VOICE REPLIED +1320-122617-0006-84: CAN THESE THINGS BE RETURNED DAVID BREATHING MORE FREELY AS THE TRUTH BEGAN TO DAWN UPON HIM +1320-122617-0007-85: COME COME RETURNED HAWKEYE UNCASING HIS HONEST COUNTENANCE THE BETTER TO ASSURE THE WAVERING CONFIDENCE OF HIS COMPANION YOU MAY SEE A SKIN WHICH IF IT BE NOT AS WHITE AS ONE OF THE GENTLE ONES HAS NO TINGE OF RED TO IT THAT THE WINDS OF THE HEAVEN AND THE SUN HAVE NOT BESTOWED NOW LET US TO BUSINESS +1320-122617-0008-86: THE YOUNG MAN IS IN BONDAGE AND MUCH I FEAR HIS DEATH IS DECREED +1320-122617-0009-87: I GREATLY MOURN THAT ONE SO WELL DISPOSED SHOULD DIE IN HIS IGNORANCE AND I HAVE SOUGHT A GOODLY HYMN CAN YOU LEAD ME TO HIM +1320-122617-0010-88: THE TASK WILL NOT BE DIFFICULT RETURNED DAVID HESITATING THOUGH I GREATLY FEAR YOUR PRESENCE WOULD RATHER INCREASE THAN MITIGATE HIS UNHAPPY FORTUNES +1320-122617-0011-89: THE LODGE IN WHICH UNCAS WAS CONFINED WAS IN THE VERY CENTER OF THE VILLAGE AND IN A SITUATION PERHAPS MORE DIFFICULT THAN ANY OTHER TO APPROACH OR LEAVE WITHOUT OBSERVATION +1320-122617-0012-90: FOUR OR FIVE OF THE LATTER ONLY LINGERED ABOUT THE DOOR OF THE PRISON OF UNCAS WARY BUT CLOSE OBSERVERS OF THE MANNER OF THEIR CAPTIVE +1320-122617-0013-91: DELIVERED IN A STRONG TONE OF ASSENT ANNOUNCED THE GRATIFICATION THE SAVAGE WOULD RECEIVE (IN->AND) WITNESSING SUCH AN EXHIBITION OF WEAKNESS IN AN ENEMY SO LONG HATED AND SO MUCH FEARED +1320-122617-0014-92: THEY DREW BACK A LITTLE FROM THE ENTRANCE AND MOTIONED TO THE SUPPOSED (CONJURER->CONJUROR) TO ENTER +1320-122617-0015-93: BUT THE BEAR INSTEAD OF OBEYING MAINTAINED THE (SEAT->SEED) IT HAD TAKEN AND GROWLED +1320-122617-0016-94: THE CUNNING MAN IS AFRAID THAT HIS BREATH WILL BLOW UPON HIS BROTHERS AND TAKE AWAY THEIR COURAGE TOO CONTINUED DAVID IMPROVING THE HINT HE RECEIVED THEY MUST STAND FURTHER OFF +1320-122617-0017-95: THEN AS IF SATISFIED OF THEIR SAFETY THE SCOUT LEFT HIS POSITION AND SLOWLY ENTERED THE PLACE +1320-122617-0018-96: IT WAS SILENT AND GLOOMY BEING TENANTED SOLELY BY THE CAPTIVE AND LIGHTED BY THE DYING EMBERS OF A FIRE WHICH HAD BEEN USED FOR THE (PURPOSED->PURPOSE) OF COOKERY +1320-122617-0019-97: UNCAS OCCUPIED A DISTANT CORNER IN A RECLINING ATTITUDE BEING RIGIDLY BOUND BOTH HANDS AND FEET BY STRONG AND PAINFUL (WITHES->WIDTHS) +1320-122617-0020-98: THE SCOUT WHO HAD LEFT DAVID AT THE DOOR TO ASCERTAIN THEY WERE NOT OBSERVED THOUGHT IT PRUDENT TO PRESERVE HIS DISGUISE UNTIL ASSURED OF THEIR PRIVACY +1320-122617-0021-99: WHAT SHALL WE DO WITH THE MINGOES AT THE DOOR THEY COUNT SIX AND (THIS->THE) SINGER IS AS GOOD AS NOTHING +1320-122617-0022-100: THE DELAWARES ARE CHILDREN OF THE TORTOISE AND (THEY->THE) OUTSTRIP THE DEER +1320-122617-0023-101: UNCAS WHO HAD ALREADY APPROACHED THE DOOR IN READINESS TO LEAD THE WAY NOW RECOILED AND PLACED HIMSELF ONCE MORE IN THE BOTTOM OF THE LODGE +1320-122617-0024-102: BUT HAWKEYE WHO WAS TOO MUCH OCCUPIED WITH HIS OWN THOUGHTS TO NOTE THE MOVEMENT CONTINUED SPEAKING MORE TO HIMSELF THAN TO HIS COMPANION +1320-122617-0025-103: SO UNCAS YOU HAD BETTER TAKE THE LEAD WHILE I WILL PUT ON THE SKIN AGAIN AND TRUST TO CUNNING FOR WANT OF SPEED +1320-122617-0026-104: WELL WHAT CAN'T BE DONE BY MAIN COURAGE (IN->AND) WAR MUST BE DONE BY CIRCUMVENTION +1320-122617-0027-105: AS SOON AS THESE DISPOSITIONS WERE MADE THE SCOUT TURNED TO DAVID AND GAVE HIM HIS PARTING INSTRUCTIONS +1320-122617-0028-106: MY PURSUITS ARE PEACEFUL AND MY TEMPER I HUMBLY TRUST IS GREATLY GIVEN TO MERCY AND LOVE RETURNED DAVID A LITTLE NETTLED AT SO DIRECT AN ATTACK ON HIS MANHOOD BUT THERE ARE NONE WHO CAN SAY THAT I HAVE EVER FORGOTTEN MY FAITH IN THE LORD EVEN IN THE GREATEST STRAITS +1320-122617-0029-107: IF YOU ARE NOT THEN KNOCKED ON THE HEAD YOUR BEING A NON (COMPOSSER WILL->COMPOSSIBLE) PROTECT YOU AND YOU'LL THEN HAVE A GOOD REASON TO EXPECT TO DIE IN YOUR BED +1320-122617-0030-108: SO CHOOSE FOR YOURSELF TO MAKE A RUSH OR TARRY HERE +1320-122617-0031-109: BRAVELY AND GENEROUSLY HAS HE BATTLED IN MY BEHALF AND THIS AND MORE WILL I DARE IN HIS SERVICE +1320-122617-0032-110: KEEP SILENT AS LONG AS MAY BE AND IT WOULD BE WISE WHEN YOU DO SPEAK TO BREAK OUT SUDDENLY IN ONE OF YOUR SHOUTINGS WHICH WILL SERVE TO REMIND THE INDIANS THAT YOU ARE NOT ALTOGETHER AS RESPONSIBLE AS MEN SHOULD BE +1320-122617-0033-111: IF HOWEVER THEY TAKE YOUR SCALP AS I TRUST AND BELIEVE THEY WILL NOT DEPEND ON IT UNCAS AND I WILL NOT FORGET THE DEED BUT REVENGE IT (AS->IS) BECOMES TRUE WARRIORS AND TRUSTY FRIENDS +1320-122617-0034-112: HOLD SAID DAVID PERCEIVING THAT WITH THIS ASSURANCE THEY WERE ABOUT TO LEAVE HIM I AM AN UNWORTHY AND HUMBLE FOLLOWER OF ONE WHO TAUGHT NOT THE DAMNABLE PRINCIPLE OF REVENGE +1320-122617-0035-113: THEN HEAVING A HEAVY SIGH PROBABLY AMONG THE LAST HE EVER DREW IN PINING FOR A CONDITION HE HAD SO LONG ABANDONED HE ADDED IT IS WHAT I WOULD WISH TO PRACTISE MYSELF AS ONE WITHOUT A CROSS OF BLOOD THOUGH IT IS NOT ALWAYS EASY TO DEAL WITH AN INDIAN AS YOU WOULD WITH A FELLOW CHRISTIAN +1320-122617-0036-114: GOD BLESS YOU FRIEND I DO BELIEVE YOUR SCENT (IS->HAS) NOT GREATLY WRONG WHEN THE MATTER IS DULY CONSIDERED AND KEEPING ETERNITY BEFORE THE EYES THOUGH MUCH DEPENDS ON THE NATURAL GIFTS AND THE FORCE OF TEMPTATION +1320-122617-0037-115: THE DELAWARE DOG HE SAID LEANING FORWARD AND PEERING THROUGH THE DIM LIGHT TO CATCH THE EXPRESSION OF THE OTHER'S FEATURES IS HE AFRAID +1320-122617-0038-116: WILL THE HURONS HEAR HIS GROANS +1320-122617-0039-117: THE (MOHICAN->MOHICANS) STARTED ON HIS FEET AND SHOOK HIS SHAGGY COVERING AS THOUGH THE ANIMAL HE COUNTERFEITED WAS ABOUT TO MAKE SOME DESPERATE EFFORT +1320-122617-0040-118: HE HAD NO OCCASION TO DELAY FOR AT THE NEXT INSTANT A BURST OF CRIES FILLED THE OUTER AIR AND RAN ALONG THE WHOLE EXTENT OF THE VILLAGE +1320-122617-0041-119: UNCAS CAST HIS SKIN AND STEPPED FORTH IN HIS OWN BEAUTIFUL PROPORTIONS +1580-141083-0000-1949: I WILL ENDEAVOUR IN MY STATEMENT TO AVOID SUCH TERMS AS WOULD SERVE TO LIMIT THE EVENTS TO ANY PARTICULAR PLACE OR GIVE A CLUE AS TO THE PEOPLE CONCERNED +1580-141083-0001-1950: I HAD ALWAYS KNOWN HIM TO BE RESTLESS IN HIS MANNER BUT ON THIS PARTICULAR OCCASION HE WAS IN SUCH A STATE OF UNCONTROLLABLE AGITATION THAT IT WAS CLEAR SOMETHING VERY UNUSUAL HAD OCCURRED +1580-141083-0002-1951: MY FRIEND'S TEMPER HAD NOT IMPROVED SINCE HE HAD BEEN DEPRIVED OF THE CONGENIAL SURROUNDINGS OF BAKER STREET +1580-141083-0003-1952: WITHOUT HIS (SCRAPBOOKS->SCRAP BOOKS) HIS CHEMICALS AND HIS HOMELY UNTIDINESS HE WAS AN UNCOMFORTABLE MAN +1580-141083-0004-1953: I HAD TO READ IT OVER CAREFULLY AS THE TEXT MUST BE ABSOLUTELY CORRECT +1580-141083-0005-1954: I WAS ABSENT RATHER MORE THAN AN HOUR +1580-141083-0006-1955: THE ONLY DUPLICATE WHICH EXISTED SO FAR AS I KNEW WAS THAT WHICH BELONGED TO MY SERVANT (BANNISTER->BANISTER) A MAN WHO HAS LOOKED AFTER MY ROOM FOR TEN YEARS AND WHOSE HONESTY IS ABSOLUTELY ABOVE SUSPICION +1580-141083-0007-1956: THE MOMENT I LOOKED AT MY TABLE I WAS AWARE THAT SOMEONE HAD RUMMAGED AMONG MY PAPERS +1580-141083-0008-1957: THE PROOF WAS IN THREE LONG SLIPS I HAD LEFT THEM (ALL TOGETHER->ALTOGETHER) +1580-141083-0009-1958: (THE ALTERNATIVE->THEY ALL TURNED OF) WAS THAT (SOMEONE->SOME ONE) PASSING HAD OBSERVED THE KEY IN THE DOOR HAD KNOWN THAT I WAS OUT AND HAD ENTERED TO LOOK AT THE PAPERS +1580-141083-0010-1959: I GAVE HIM A LITTLE BRANDY AND LEFT HIM COLLAPSED IN A CHAIR WHILE I MADE A MOST CAREFUL EXAMINATION OF THE ROOM +1580-141083-0011-1960: A BROKEN TIP OF LEAD WAS LYING THERE ALSO +1580-141083-0012-1961: NOT ONLY THIS BUT ON THE TABLE I FOUND A SMALL BALL OF BLACK DOUGH OR CLAY WITH SPECKS OF SOMETHING WHICH LOOKS LIKE SAWDUST IN IT +1580-141083-0013-1962: ABOVE ALL THINGS I DESIRE TO SETTLE THE MATTER QUIETLY AND DISCREETLY +1580-141083-0014-1963: TO THE BEST OF MY BELIEF THEY WERE ROLLED UP +1580-141083-0015-1964: DID (ANYONE->ANY ONE) KNOW THAT THESE PROOFS WOULD BE THERE NO ONE SAVE THE PRINTER +1580-141083-0016-1965: I WAS IN SUCH A HURRY TO COME TO YOU YOU LEFT YOUR DOOR OPEN +1580-141083-0017-1966: SO IT SEEMS TO ME +1580-141083-0018-1967: NOW MISTER (SOAMES->SOLMES) AT YOUR DISPOSAL +1580-141083-0019-1968: ABOVE WERE THREE STUDENTS ONE ON EACH STORY +1580-141083-0020-1969: THEN HE APPROACHED IT AND STANDING ON TIPTOE WITH HIS (NECK->NET) CRANED HE LOOKED INTO THE ROOM +1580-141083-0021-1970: THERE IS NO OPENING EXCEPT THE ONE PANE SAID OUR LEARNED GUIDE +1580-141083-0022-1971: I AM AFRAID THERE ARE NO SIGNS HERE SAID HE +1580-141083-0023-1972: ONE COULD HARDLY HOPE FOR ANY UPON SO DRY A DAY +1580-141083-0024-1973: YOU LEFT HIM IN A CHAIR YOU SAY WHICH CHAIR BY THE WINDOW THERE +1580-141083-0025-1974: THE (MAN->MEN) ENTERED AND TOOK THE PAPERS SHEET BY SHEET FROM THE CENTRAL TABLE +1580-141083-0026-1975: AS A MATTER OF FACT HE COULD NOT SAID (SOAMES->SOLMES) FOR I ENTERED BY THE SIDE DOOR +1580-141083-0027-1976: HOW LONG WOULD IT TAKE HIM TO DO THAT USING EVERY POSSIBLE CONTRACTION A QUARTER OF AN HOUR NOT LESS +1580-141083-0028-1977: THEN HE TOSSED IT DOWN AND SEIZED THE NEXT +1580-141083-0029-1978: HE WAS IN THE MIDST OF THAT WHEN YOUR RETURN CAUSED HIM TO MAKE A VERY HURRIED RETREAT VERY HURRIED SINCE HE HAD NOT TIME TO REPLACE THE PAPERS WHICH WOULD TELL YOU THAT HE HAD BEEN THERE +1580-141083-0030-1979: MISTER (SOAMES->SOLMES) WAS SOMEWHAT OVERWHELMED BY THIS FLOOD OF INFORMATION +1580-141083-0031-1980: HOLMES HELD OUT A SMALL CHIP WITH THE LETTERS N (N->*) AND A SPACE OF CLEAR WOOD AFTER THEM YOU SEE +1580-141083-0032-1981: WATSON I HAVE ALWAYS DONE YOU AN INJUSTICE THERE ARE OTHERS +1580-141083-0033-1982: I WAS HOPING THAT IF THE PAPER ON WHICH HE WROTE WAS THIN SOME TRACE OF IT MIGHT COME THROUGH UPON THIS POLISHED SURFACE NO I SEE NOTHING +1580-141083-0034-1983: AS HOLMES DREW THE CURTAIN I WAS AWARE FROM SOME LITTLE RIGIDITY AND (*->AN) ALERTNESS OF HIS ATTITUDE THAT HE WAS PREPARED FOR AN EMERGENCY +1580-141083-0035-1984: HOLMES TURNED AWAY AND STOOPED SUDDENLY TO THE FLOOR (HALLOA WHAT'S->HULLO WHAT IS) THIS +1580-141083-0036-1985: HOLMES (HELD IT->HUTTED) OUT ON HIS OPEN PALM IN THE GLARE OF THE ELECTRIC LIGHT +1580-141083-0037-1986: WHAT COULD HE DO HE CAUGHT UP EVERYTHING WHICH WOULD BETRAY HIM AND HE RUSHED INTO YOUR BEDROOM TO CONCEAL HIMSELF +1580-141083-0038-1987: I UNDERSTAND YOU TO SAY THAT THERE ARE THREE STUDENTS WHO USE THIS (STAIR->STARE) AND ARE IN THE HABIT OF PASSING YOUR DOOR YES THERE ARE +1580-141083-0039-1988: AND THEY ARE ALL IN FOR THIS EXAMINATION YES +1580-141083-0040-1989: ONE HARDLY LIKES TO THROW SUSPICION WHERE THERE ARE NO PROOFS +1580-141083-0041-1990: LET US (HEAR->SEE) THE SUSPICIONS I WILL LOOK AFTER THE PROOFS +1580-141083-0042-1991: MY SCHOLAR HAS BEEN LEFT (*->A) VERY POOR BUT HE IS HARD WORKING AND INDUSTRIOUS HE WILL DO WELL +1580-141083-0043-1992: THE TOP FLOOR BELONGS TO (MILES->MYLES) MC LAREN +1580-141083-0044-1993: I DARE NOT GO SO FAR AS THAT BUT OF THE THREE HE IS PERHAPS THE LEAST UNLIKELY +1580-141083-0045-1994: HE WAS STILL SUFFERING FROM THIS SUDDEN DISTURBANCE OF THE QUIET ROUTINE OF HIS LIFE +1580-141083-0046-1995: BUT I HAVE OCCASIONALLY DONE THE SAME THING AT OTHER TIMES +1580-141083-0047-1996: DID YOU LOOK AT THESE PAPERS ON THE TABLE +1580-141083-0048-1997: HOW CAME YOU TO LEAVE THE KEY IN THE DOOR +1580-141083-0049-1998: (ANYONE->ANY ONE) IN THE ROOM COULD GET OUT YES SIR +1580-141083-0050-1999: I (*->HAVE) REALLY DON'T THINK HE KNEW MUCH ABOUT IT MISTER HOLMES +1580-141083-0051-2000: ONLY FOR A MINUTE OR SO +1580-141083-0052-2001: OH I WOULD NOT VENTURE TO SAY SIR +1580-141083-0053-2002: YOU HAVEN'T SEEN ANY OF THEM NO SIR +1580-141084-0000-2003: IT WAS THE INDIAN WHOSE DARK SILHOUETTE APPEARED SUDDENLY UPON HIS BLIND +1580-141084-0001-2004: HE WAS PACING SWIFTLY UP AND DOWN HIS ROOM +1580-141084-0002-2005: (THIS->THE) SET OF ROOMS IS QUITE THE OLDEST IN THE COLLEGE AND IT IS NOT UNUSUAL FOR VISITORS TO GO OVER THEM +1580-141084-0003-2006: NO NAMES PLEASE SAID HOLMES AS WE KNOCKED AT (GILCHRIST'S->GILCHER'S) DOOR +1580-141084-0004-2007: OF COURSE HE DID NOT REALIZE THAT IT WAS I WHO WAS KNOCKING BUT NONE THE LESS HIS CONDUCT WAS VERY UNCOURTEOUS AND INDEED UNDER THE CIRCUMSTANCES RATHER SUSPICIOUS +1580-141084-0005-2008: THAT IS VERY IMPORTANT SAID HOLMES +1580-141084-0006-2009: YOU DON'T SEEM TO REALIZE THE POSITION +1580-141084-0007-2010: TO MORROW (IS->WAS) THE EXAMINATION +1580-141084-0008-2011: I CANNOT ALLOW THE EXAMINATION TO BE HELD IF ONE OF THE PAPERS HAS BEEN TAMPERED WITH THE SITUATION MUST BE FACED +1580-141084-0009-2012: IT IS POSSIBLE THAT I MAY BE IN A POSITION THEN TO INDICATE SOME COURSE OF ACTION +1580-141084-0010-2013: I WILL TAKE THE BLACK CLAY WITH ME ALSO THE PENCIL CUTTINGS GOOD BYE +1580-141084-0011-2014: WHEN WE WERE OUT IN THE DARKNESS OF THE QUADRANGLE WE AGAIN LOOKED UP AT THE WINDOWS +1580-141084-0012-2015: THE FOUL MOUTHED FELLOW AT THE TOP +1580-141084-0013-2016: HE IS THE ONE WITH THE WORST RECORD +1580-141084-0014-2017: WHY (BANNISTER->BANISTER) THE SERVANT WHAT'S HIS GAME IN THE MATTER +1580-141084-0015-2018: HE IMPRESSED ME AS BEING A PERFECTLY HONEST MAN +1580-141084-0016-2019: MY FRIEND DID NOT APPEAR TO BE DEPRESSED BY HIS FAILURE BUT SHRUGGED HIS SHOULDERS (IN->AND) HALF HUMOROUS RESIGNATION +1580-141084-0017-2020: NO GOOD MY DEAR WATSON +1580-141084-0018-2021: I THINK SO YOU HAVE FORMED A CONCLUSION +1580-141084-0019-2022: YES MY DEAR WATSON I HAVE SOLVED THE MYSTERY +1580-141084-0020-2023: LOOK AT THAT HE HELD OUT HIS HAND +1580-141084-0021-2024: ON THE PALM WERE THREE LITTLE PYRAMIDS OF BLACK DOUGHY CLAY +1580-141084-0022-2025: AND ONE MORE THIS MORNING +1580-141084-0023-2026: IN A FEW HOURS THE EXAMINATION WOULD COMMENCE AND HE WAS STILL IN THE DILEMMA BETWEEN MAKING THE FACTS PUBLIC AND ALLOWING THE CULPRIT TO COMPETE FOR THE VALUABLE SCHOLARSHIP +1580-141084-0024-2027: HE COULD HARDLY STAND STILL SO GREAT WAS HIS MENTAL AGITATION AND HE RAN TOWARDS HOLMES WITH (TWO->TOO) EAGER HANDS OUTSTRETCHED THANK HEAVEN THAT YOU HAVE COME +1580-141084-0025-2028: YOU KNOW HIM I THINK SO +1580-141084-0026-2029: IF THIS MATTER IS NOT TO BECOME PUBLIC WE MUST GIVE OURSELVES CERTAIN POWERS AND RESOLVE OURSELVES INTO A SMALL PRIVATE COURT MARTIAL +1580-141084-0027-2030: NO SIR CERTAINLY NOT +1580-141084-0028-2031: THERE WAS NO MAN SIR +1580-141084-0029-2032: HIS TROUBLED BLUE EYES GLANCED AT EACH OF US AND FINALLY RESTED WITH AN EXPRESSION OF BLANK DISMAY UPON (BANNISTER->BANISTER) IN THE FARTHER CORNER +1580-141084-0030-2033: JUST CLOSE THE DOOR SAID HOLMES +1580-141084-0031-2034: WE WANT TO KNOW MISTER (GILCHRIST->GILGRIST) HOW YOU AN HONOURABLE MAN EVER CAME TO COMMIT SUCH AN ACTION AS THAT OF YESTERDAY +1580-141084-0032-2035: FOR A MOMENT GILCHRIST WITH UPRAISED HAND TRIED TO CONTROL HIS WRITHING FEATURES +1580-141084-0033-2036: COME COME SAID HOLMES KINDLY IT IS HUMAN TO ERR AND AT LEAST NO ONE CAN ACCUSE YOU OF BEING A CALLOUS CRIMINAL +1580-141084-0034-2037: WELL WELL DON'T TROUBLE TO ANSWER LISTEN AND SEE THAT I DO YOU (NO->KNOW) INJUSTICE +1580-141084-0035-2038: HE COULD EXAMINE THE PAPERS IN HIS OWN OFFICE +1580-141084-0036-2039: THE INDIAN I ALSO THOUGHT NOTHING OF +1580-141084-0037-2040: WHEN I APPROACHED YOUR ROOM I EXAMINED THE WINDOW +1580-141084-0038-2041: NO ONE LESS THAN THAT WOULD HAVE A CHANCE +1580-141084-0039-2042: I ENTERED AND I TOOK YOU INTO MY CONFIDENCE AS TO THE SUGGESTIONS OF THE SIDE TABLE +1580-141084-0040-2043: HE RETURNED CARRYING HIS JUMPING SHOES WHICH ARE PROVIDED AS YOU ARE (AWARE->WHERE) WITH SEVERAL SHARP SPIKES +1580-141084-0041-2044: NO HARM WOULD HAVE BEEN DONE HAD IT NOT BEEN THAT AS HE PASSED YOUR DOOR HE PERCEIVED THE KEY WHICH HAD BEEN LEFT BY THE CARELESSNESS OF YOUR SERVANT +1580-141084-0042-2045: A SUDDEN IMPULSE CAME OVER HIM TO ENTER AND SEE IF THEY WERE INDEED THE PROOFS +1580-141084-0043-2046: HE PUT HIS SHOES ON THE TABLE +1580-141084-0044-2047: GLOVES SAID THE YOUNG MAN +1580-141084-0045-2048: SUDDENLY HE HEARD HIM AT THE VERY DOOR THERE WAS NO POSSIBLE ESCAPE +1580-141084-0046-2049: HAVE I TOLD THE TRUTH MISTER (GILCHRIST->GILGRIST) +1580-141084-0047-2050: I HAVE A LETTER HERE MISTER (SOAMES->SOLMES) WHICH I WROTE TO YOU EARLY THIS MORNING IN THE MIDDLE OF A RESTLESS NIGHT +1580-141084-0048-2051: IT (WILL->WOULD) BE CLEAR TO YOU FROM WHAT I HAVE SAID THAT ONLY YOU COULD HAVE LET THIS YOUNG MAN OUT SINCE YOU WERE LEFT IN THE ROOM AND MUST HAVE LOCKED THE DOOR WHEN YOU WENT OUT +1580-141084-0049-2052: IT WAS SIMPLE ENOUGH SIR IF YOU ONLY HAD KNOWN BUT WITH ALL YOUR CLEVERNESS IT WAS IMPOSSIBLE THAT YOU COULD KNOW +1580-141084-0050-2053: IF MISTER (SOAMES->SOLMES) SAW THEM THE GAME WAS UP +1995-1826-0000-750: IN THE DEBATE BETWEEN THE SENIOR SOCIETIES HER DEFENCE OF THE FIFTEENTH AMENDMENT HAD BEEN NOT ONLY A NOTABLE BIT OF REASONING BUT DELIVERED WITH REAL ENTHUSIASM +1995-1826-0001-751: THE SOUTH SHE HAD NOT THOUGHT OF SERIOUSLY AND YET KNOWING OF ITS DELIGHTFUL HOSPITALITY AND MILD CLIMATE SHE WAS NOT AVERSE TO CHARLESTON OR NEW ORLEANS +1995-1826-0002-752: JOHN TAYLOR WHO HAD SUPPORTED HER THROUGH COLLEGE WAS INTERESTED IN COTTON +1995-1826-0003-753: BETTER GO HE HAD (COUNSELLED->COUNSEL) SENTENTIOUSLY +1995-1826-0004-754: MIGHT LEARN SOMETHING USEFUL DOWN THERE +1995-1826-0005-755: BUT JOHN THERE'S NO SOCIETY JUST ELEMENTARY WORK +1995-1826-0006-756: BEEN LOOKING UP (TOOMS->TOMBS) COUNTY +1995-1826-0007-757: (FIND->FIVE) SOME (CRESSWELLS->CRUSTWELLS) THERE BIG PLANTATIONS RATED AT TWO HUNDRED AND FIFTY THOUSAND DOLLARS +1995-1826-0008-758: SOME OTHERS TOO BIG COTTON COUNTY +1995-1826-0009-759: YOU OUGHT TO KNOW JOHN IF I TEACH NEGROES I'LL SCARCELY SEE MUCH OF PEOPLE IN MY OWN CLASS +1995-1826-0010-760: AT ANY RATE I SAY GO +1995-1826-0011-761: HERE SHE WAS TEACHING DIRTY CHILDREN AND THE SMELL OF CONFUSED ODORS AND BODILY PERSPIRATION WAS TO HER AT TIMES UNBEARABLE +1995-1826-0012-762: SHE WANTED A GLANCE OF THE NEW BOOKS AND PERIODICALS AND TALK OF (GREAT PHILANTHROPIES->GRATEFUL ANTHROPIES) AND REFORMS +1995-1826-0013-763: SO FOR THE HUNDREDTH TIME SHE WAS THINKING (TODAY->TO DAY) AS SHE WALKED ALONE UP THE LANE BACK OF THE BARN AND THEN SLOWLY DOWN THROUGH THE BOTTOMS +1995-1826-0014-764: COTTON SHE PAUSED +1995-1826-0015-765: SHE HAD ALMOST FORGOTTEN THAT IT WAS HERE WITHIN TOUCH (AND->IN) SIGHT +1995-1826-0016-766: THE GLIMMERING SEA OF DELICATE LEAVES WHISPERED AND MURMURED BEFORE HER STRETCHING AWAY TO THE NORTHWARD +1995-1826-0017-767: THERE MIGHT BE A BIT OF POETRY HERE AND THERE BUT MOST OF THIS PLACE WAS SUCH DESPERATE PROSE +1995-1826-0018-768: HER REGARD SHIFTED TO THE GREEN STALKS AND LEAVES AGAIN AND SHE STARTED TO MOVE AWAY +1995-1826-0019-769: COTTON IS A WONDERFUL THING IS IT NOT BOYS SHE SAID RATHER PRIMLY +1995-1826-0020-770: MISS TAYLOR DID NOT KNOW MUCH ABOUT COTTON BUT AT LEAST ONE MORE (REMARK->REMARKED) SEEMED CALLED FOR +1995-1826-0021-771: DON'T KNOW WELL OF ALL THINGS INWARDLY COMMENTED MISS TAYLOR LITERALLY BORN IN COTTON AND OH WELL AS MUCH AS TO ASK WHAT'S THE USE SHE TURNED AGAIN TO GO +1995-1826-0022-772: I SUPPOSE THOUGH IT'S TOO EARLY FOR THEM THEN CAME THE EXPLOSION +1995-1826-0023-773: (GOOBERS->GOULD WAS) DON'T GROW ON THE (TOPS OF VINES->TOPSY BANDS) BUT (UNDERGROUND->ON THE GROUND) ON THE ROOTS LIKE YAMS IS THAT SO +1995-1826-0024-774: THE GOLDEN FLEECE IT'S THE SILVER FLEECE HE (HARKENED->HEARKENED) +1995-1826-0025-775: (SOME TIME YOU'LL->SOMETIME YOU) TELL ME PLEASE WON'T YOU +1995-1826-0026-776: (NOW->THOU) FOR ONE LITTLE HALF HOUR SHE HAD BEEN A WOMAN TALKING TO A BOY NO NOT EVEN THAT SHE HAD BEEN TALKING JUST TALKING THERE WERE NO PERSONS IN THE CONVERSATION JUST THINGS ONE THING COTTON +1995-1836-0000-735: THE HON (CHARLES->*) SMITH MISS SARAH'S BROTHER WAS WALKING SWIFTLY UPTOWN FROM MISTER EASTERLY'S WALL STREET OFFICE AND HIS FACE WAS PALE +1995-1836-0001-736: AT LAST THE COTTON COMBINE WAS TO ALL APPEARANCES AN ASSURED FACT AND HE WAS SLATED FOR THE SENATE +1995-1836-0002-737: WHY SHOULD HE NOT BE AS OTHER MEN +1995-1836-0003-738: SHE WAS NOT HERSELF (A NOTABLY->UNNOTABLY) INTELLIGENT WOMAN SHE GREATLY ADMIRED INTELLIGENCE OR WHATEVER LOOKED TO HER LIKE INTELLIGENCE IN OTHERS +1995-1836-0004-739: AS SHE AWAITED HER (GUESTS->GUESS) SHE SURVEYED THE TABLE WITH BOTH SATISFACTION AND DISQUIETUDE FOR HER SOCIAL FUNCTIONS WERE FEW (TONIGHT->TO NIGHT) THERE WERE SHE CHECKED THEM OFF ON HER FINGERS SIR JAMES (CREIGHTON->CREDON) THE RICH ENGLISH MANUFACTURER AND LADY (CREIGHTON->CRIGHTON) MISTER AND MISSUS (VANDERPOOL->VAN DERPOOL) MISTER HARRY (CRESSWELL->CRESWELL) AND HIS SISTER JOHN TAYLOR AND HIS SISTER AND MISTER CHARLES SMITH WHOM THE EVENING PAPERS MENTIONED AS LIKELY TO BE UNITED STATES SENATOR FROM NEW JERSEY A SELECTION OF GUESTS THAT HAD BEEN DETERMINED UNKNOWN TO THE HOSTESS BY THE MEETING OF COTTON INTERESTS EARLIER IN THE DAY +1995-1836-0005-740: MISSUS (GREY->GRAY) HAD MET SOUTHERNERS BEFORE BUT NOT INTIMATELY AND SHE ALWAYS HAD IN MIND VIVIDLY THEIR CRUELTY TO POOR NEGROES A SUBJECT SHE MADE A POINT OF INTRODUCING FORTHWITH +1995-1836-0006-741: SHE WAS THEREFORE MOST AGREEABLY SURPRISED TO HEAR MISTER (CRESSWELL->CRESWELL) EXPRESS HIMSELF SO CORDIALLY AS APPROVING OF NEGRO EDUCATION +1995-1836-0007-742: (BUT YOU->DO) BELIEVE IN SOME EDUCATION ASKED MARY TAYLOR +1995-1836-0008-743: I BELIEVE IN THE TRAINING OF PEOPLE TO THEIR (HIGHEST CAPACITY->HACITY) THE ENGLISHMAN HERE HEARTILY SECONDED HIM +1995-1836-0009-744: BUT (CRESSWELL->CRASWELL) ADDED SIGNIFICANTLY CAPACITY DIFFERS ENORMOUSLY BETWEEN RACES +1995-1836-0010-745: THE VANDERPOOLS WERE SURE (OF->*) THIS AND THE ENGLISHMAN INSTANCING INDIA BECAME QUITE ELOQUENT MISSUS (GREY->GRAY) WAS MYSTIFIED BUT HARDLY DARED ADMIT IT THE GENERAL TREND OF THE CONVERSATION SEEMED TO BE THAT MOST INDIVIDUALS NEEDED TO BE SUBMITTED TO THE SHARPEST SCRUTINY BEFORE BEING ALLOWED MUCH EDUCATION AND AS FOR THE LOWER RACES IT WAS SIMPLY CRIMINAL TO OPEN SUCH USELESS OPPORTUNITIES TO THEM +1995-1836-0011-746: POSITIVELY HEROIC ADDED (CRESSWELL->CRASWELL) AVOIDING HIS SISTER'S EYES +1995-1836-0012-747: BUT (WE'RE->WE ARE) NOT (ER->A) EXACTLY (WELCOMED->WELCOME) +1995-1836-0013-748: MARY TAYLOR HOWEVER RELATED THE TALE OF ZORA TO MISSUS (GREY'S->GRAY'S) PRIVATE EAR LATER +1995-1836-0014-749: FORTUNATELY SAID MISTER (VANDERPOOL NORTHERNERS->VAN DERPOOL NOR THE NOSE) AND SOUTHERNERS (ARE ARRIVING->ALL RIVING) AT A BETTER MUTUAL UNDERSTANDING ON MOST OF THESE MATTERS +1995-1837-0000-777: HE KNEW THE SILVER FLEECE HIS AND (ZORA'S->ZORAS) MUST BE RUINED +1995-1837-0001-778: IT WAS THE FIRST GREAT SORROW OF HIS LIFE IT WAS NOT SO MUCH THE LOSS OF THE COTTON ITSELF BUT THE FANTASY THE HOPES THE DREAMS BUILT AROUND IT +1995-1837-0002-779: AH THE SWAMP THE CRUEL SWAMP +1995-1837-0003-780: (THE->WHO) REVELATION OF HIS LOVE LIGHTED AND BRIGHTENED SLOWLY TILL IT FLAMED LIKE A SUNRISE OVER HIM AND LEFT HIM IN BURNING WONDER +1995-1837-0004-781: HE PANTED TO KNOW IF SHE TOO KNEW OR KNEW AND CARED NOT OR CARED AND KNEW NOT +1995-1837-0005-782: SHE WAS SO STRANGE (AND->IN) HUMAN A CREATURE +1995-1837-0006-783: THE WORLD WAS WATER VEILED IN MISTS +1995-1837-0007-784: THEN OF A SUDDEN AT MIDDAY THE SUN SHOT OUT HOT AND STILL NO BREATH OF AIR STIRRED THE SKY WAS LIKE BLUE STEEL THE EARTH STEAMED +1995-1837-0008-785: WHERE WAS THE USE OF IMAGINING +1995-1837-0009-786: THE LAGOON HAD BEEN LEVEL WITH THE (DYKES->DIKES) A WEEK AGO AND NOW +1995-1837-0010-787: PERHAPS SHE TOO MIGHT BE THERE WAITING WEEPING +1995-1837-0011-788: HE STARTED AT THE THOUGHT HE HURRIED FORTH SADLY +1995-1837-0012-789: HE SPLASHED AND STAMPED ALONG FARTHER AND FARTHER ONWARD UNTIL HE NEARED THE RAMPART OF THE CLEARING AND PUT FOOT UPON THE TREE BRIDGE +1995-1837-0013-790: THEN HE LOOKED DOWN THE LAGOON WAS DRY +1995-1837-0014-791: HE STOOD A MOMENT BEWILDERED THEN TURNED AND RUSHED UPON THE ISLAND A GREAT SHEET OF DAZZLING SUNLIGHT SWEPT THE PLACE AND BENEATH LAY A MIGHTY MASS OF OLIVE GREEN THICK TALL WET AND WILLOWY +1995-1837-0015-792: THE SQUARES OF COTTON SHARP EDGED HEAVY WERE JUST ABOUT TO BURST TO (BOLLS->BOWLS) +1995-1837-0016-793: FOR ONE LONG MOMENT HE PAUSED STUPID AGAPE WITH UTTER AMAZEMENT THEN LEANED DIZZILY AGAINST A TREE +1995-1837-0017-794: HE GAZED ABOUT PERPLEXED ASTONISHED +1995-1837-0018-795: HERE LAY THE READING OF THE RIDDLE WITH INFINITE WORK AND PAIN SOME ONE HAD DUG A CANAL FROM THE LAGOON TO THE CREEK INTO WHICH THE FORMER HAD DRAINED BY A LONG AND CROOKED WAY THUS ALLOWING IT TO EMPTY DIRECTLY +1995-1837-0019-796: HE SAT DOWN WEAK BEWILDERED AND ONE THOUGHT WAS UPPERMOST (ZORA->SORA) +1995-1837-0020-797: THE YEARS OF THE DAYS OF HER DYING WERE TEN +1995-1837-0021-798: THE HOPE AND DREAM OF HARVEST WAS UPON THE LAND +1995-1837-0022-799: UP IN THE SICK ROOM ZORA LAY ON THE LITTLE WHITE BED +1995-1837-0023-800: THE (NET->NED) AND WEB OF ENDLESS THINGS HAD BEEN CRAWLING AND CREEPING AROUND HER SHE HAD STRUGGLED IN DUMB SPEECHLESS TERROR AGAINST SOME MIGHTY GRASPING THAT STROVE FOR HER LIFE WITH GNARLED AND CREEPING FINGERS BUT NOW AT LAST (WEAKLY->WEEKLY) SHE OPENED HER EYES AND QUESTIONED +1995-1837-0024-801: FOR A WHILE SHE LAY IN HER CHAIR IN HAPPY DREAMY PLEASURE AT SUN AND BIRD AND TREE +1995-1837-0025-802: SHE ROSE WITH A FLEETING GLANCE GATHERED THE SHAWL (ROUND->AROUND) HER THEN GLIDING FORWARD WAVERING TREMULOUS SLIPPED ACROSS THE ROAD AND INTO THE SWAMP +1995-1837-0026-803: SHE HAD BEEN BORN WITHIN ITS BORDERS WITHIN ITS BORDERS SHE HAD LIVED AND GROWN AND WITHIN ITS (BORDERS->BORDER) SHE HAD MET HER LOVE +1995-1837-0027-804: ON SHE HURRIED UNTIL SWEEPING DOWN TO THE LAGOON AND THE ISLAND LO THE COTTON LAY BEFORE HER +1995-1837-0028-805: THE CHAIR WAS EMPTY BUT HE KNEW +1995-1837-0029-806: HE DARTED THROUGH THE TREES AND PAUSED A TALL MAN STRONGLY BUT SLIMLY MADE +2094-142345-0000-308: IT IS A VERY FINE OLD PLACE OF RED BRICK SOFTENED BY A PALE POWDERY LICHEN WHICH HAS DISPERSED ITSELF WITH HAPPY IRREGULARITY SO AS TO BRING THE RED BRICK INTO TERMS OF FRIENDLY COMPANIONSHIP WITH (THE->A) LIMESTONE ORNAMENTS SURROUNDING THE THREE GABLES THE WINDOWS AND THE DOOR PLACE +2094-142345-0001-309: BUT THE WINDOWS ARE PATCHED WITH WOODEN PANES AND THE DOOR I THINK IS LIKE THE GATE IT IS NEVER OPENED +2094-142345-0002-310: FOR IT IS A SOLID HEAVY HANDSOME DOOR AND MUST ONCE HAVE BEEN IN THE HABIT OF SHUTTING WITH A SONOROUS BANG BEHIND (A->THE) LIVERIED LACKEY WHO HAD JUST SEEN HIS MASTER AND MISTRESS OFF THE GROUNDS IN A CARRIAGE AND PAIR +2094-142345-0003-311: A LARGE OPEN FIREPLACE WITH RUSTY DOGS IN IT AND A BARE BOARDED FLOOR AT THE FAR END FLEECES OF WOOL STACKED UP IN THE MIDDLE OF THE FLOOR SOME EMPTY CORN BAGS +2094-142345-0004-312: AND WHAT THROUGH THE LEFT HAND WINDOW +2094-142345-0005-313: SEVERAL CLOTHES HORSES A PILLION A SPINNING WHEEL AND AN OLD BOX WIDE OPEN AND STUFFED FULL OF COLOURED RAGS +2094-142345-0006-314: AT THE EDGE OF THIS BOX THERE LIES A GREAT WOODEN DOLL WHICH SO FAR AS MUTILATION IS CONCERNED BEARS A STRONG RESEMBLANCE TO THE FINEST GREEK SCULPTURE AND ESPECIALLY IN THE TOTAL LOSS OF ITS NOSE +2094-142345-0007-315: THE HISTORY OF THE HOUSE IS PLAIN NOW +2094-142345-0008-316: BUT THERE IS ALWAYS (A->AS) STRONGER SENSE OF LIFE WHEN THE SUN IS BRILLIANT AFTER RAIN AND NOW HE IS POURING DOWN HIS BEAMS AND MAKING SPARKLES AMONG THE WET STRAW AND LIGHTING UP EVERY PATCH OF VIVID GREEN MOSS ON THE RED TILES OF THE COW SHED AND TURNING EVEN THE MUDDY WATER THAT IS HURRYING ALONG THE CHANNEL TO THE DRAIN INTO A MIRROR FOR THE YELLOW (BILLED->BUILD) DUCKS WHO ARE SEIZING THE OPPORTUNITY OF GETTING A DRINK WITH AS MUCH BODY IN IT AS POSSIBLE +2094-142345-0009-317: FOR THE GREAT BARN DOORS ARE THROWN WIDE OPEN AND MEN ARE BUSY THERE MENDING THE HARNESS UNDER THE SUPERINTENDENCE OF MISTER GOBY THE (WHITTAW->WIDOW) OTHERWISE SADDLER WHO ENTERTAINS THEM WITH THE LATEST (TREDDLESTON->TREDDLESTONE) GOSSIP +2094-142345-0010-318: (HETTY->HETTY'S) SORREL OFTEN TOOK THE OPPORTUNITY WHEN HER AUNT'S BACK WAS TURNED OF LOOKING AT THE PLEASING REFLECTION OF HERSELF IN THOSE POLISHED (SURFACES->SERVICES) FOR THE OAK TABLE WAS USUALLY TURNED UP LIKE A SCREEN AND WAS MORE FOR ORNAMENT THAN FOR USE AND SHE COULD SEE HERSELF SOMETIMES IN THE GREAT ROUND PEWTER DISHES THAT WERE RANGED ON THE SHELVES ABOVE THE LONG DEAL DINNER TABLE OR IN THE HOBS OF THE GRATE WHICH ALWAYS SHONE LIKE JASPER +2094-142345-0011-319: DO NOT SUPPOSE HOWEVER THAT MISSUS POYSER WAS ELDERLY OR SHREWISH IN HER APPEARANCE SHE WAS A GOOD LOOKING WOMAN NOT MORE THAN EIGHT AND THIRTY OF FAIR COMPLEXION AND SANDY HAIR WELL SHAPEN LIGHT FOOTED +2094-142345-0012-320: THE FAMILY LIKENESS BETWEEN HER AND HER NIECE (DINAH->DINA) MORRIS WITH THE CONTRAST BETWEEN HER KEENNESS AND (DINAH'S->DYNAS) SERAPHIC GENTLENESS OF EXPRESSION MIGHT HAVE SERVED A PAINTER AS AN EXCELLENT SUGGESTION FOR A MARTHA AND MARY +2094-142345-0013-321: HER TONGUE WAS NOT LESS KEEN THAN HER EYE AND WHENEVER A DAMSEL CAME WITHIN (EARSHOT->EAR SHOT) SEEMED TO TAKE UP AN UNFINISHED LECTURE AS A BARREL ORGAN TAKES UP A TUNE PRECISELY AT THE POINT WHERE IT HAD LEFT OFF +2094-142345-0014-322: THE FACT THAT IT WAS CHURNING DAY WAS ANOTHER REASON WHY IT WAS INCONVENIENT TO HAVE THE (WHITTAWS->WIDOWS) AND WHY CONSEQUENTLY MISSUS POYSER SHOULD SCOLD MOLLY THE HOUSEMAID WITH UNUSUAL SEVERITY +2094-142345-0015-323: TO ALL APPEARANCE MOLLY HAD GOT THROUGH HER AFTER DINNER WORK IN AN EXEMPLARY MANNER HAD CLEANED HERSELF WITH GREAT DISPATCH AND NOW CAME TO ASK SUBMISSIVELY IF SHE SHOULD SIT DOWN TO HER SPINNING TILL MILKING TIME +2094-142345-0016-324: SPINNING INDEED +2094-142345-0017-325: I NEVER KNEW YOUR EQUALS FOR (GALLOWSNESS->GALLOW'S NICE) +2094-142345-0018-326: WHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE TO KNOW +2094-142345-0019-327: COMB THE WOOL FOR THE (WHITTAWS->WIDOWS) INDEED +2094-142345-0020-328: THAT'S WHAT YOU'D LIKE TO BE DOING IS IT +2094-142345-0021-329: THAT'S THE WAY WITH YOU THAT'S THE ROAD YOU'D ALL LIKE TO GO HEADLONGS TO RUIN +2094-142345-0022-330: MISTER (OTTLEY'S->OAKLEIGHS) INDEED +2094-142345-0023-331: (YOU'RE->YOU ARE) A RARE (UN->AND) FOR SITTING DOWN TO YOUR WORK A LITTLE WHILE AFTER (IT'S->ITS) TIME TO PUT BY +2094-142345-0024-332: (MUNNY->MONEY) MY (IRON'S->IRONS) TWITE (TOLD PEASE->TOLLED PEAS) PUT IT DOWN TO WARM +2094-142345-0025-333: COLD IS IT MY DARLING BLESS YOUR SWEET FACE +2094-142345-0026-334: SHE'S GOING TO PUT THE IRONING THINGS AWAY +2094-142345-0027-335: (MUNNY->MONEY) I (TOULD IKE->DID LIKE) TO DO INTO (DE->THE) BARN TO TOMMY TO SEE (DE WHITTAWD->THE WIDOW) +2094-142345-0028-336: NO NO (NO TOTTY UD->TODDY HAD) GET HER FEET WET SAID MISSUS POYSER CARRYING AWAY HER IRON +2094-142345-0029-337: DID EVER ANYBODY SEE THE LIKE SCREAMED MISSUS POYSER RUNNING TOWARDS THE TABLE WHEN HER EYE HAD FALLEN ON THE BLUE STREAM +2094-142345-0030-338: TOTTY HOWEVER HAD DESCENDED FROM HER CHAIR WITH GREAT SWIFTNESS AND WAS ALREADY IN RETREAT TOWARDS THE DAIRY WITH A SORT OF WADDLING RUN AND AN AMOUNT OF FAT ON THE NAPE OF HER NECK WHICH MADE HER LOOK LIKE THE METAMORPHOSIS OF A WHITE SUCKLING PIG +2094-142345-0031-339: AND SHE WAS VERY FOND OF YOU TOO AUNT RACHEL +2094-142345-0032-340: I OFTEN HEARD HER TALK OF YOU IN THE SAME SORT OF WAY +2094-142345-0033-341: WHEN SHE HAD THAT BAD ILLNESS AND I WAS ONLY ELEVEN YEARS OLD SHE USED TO SAY YOU'LL HAVE A FRIEND ON EARTH IN YOUR AUNT RACHEL IF I'M TAKEN FROM YOU FOR SHE HAS A KIND HEART AND I'M SURE I'VE FOUND IT SO +2094-142345-0034-342: AND THERE'S LINEN IN THE HOUSE AS I COULD WELL SPARE YOU FOR (I'VE->I) GOT LOTS (O->OF) SHEETING AND TABLE CLOTHING AND (TOWELLING AS->TOWELINGS) ISN'T MADE UP +2094-142345-0035-343: BUT NOT MORE THAN WHAT'S IN THE BIBLE (AUNT->AND) SAID DINAH +2094-142345-0036-344: NAY DEAR AUNT YOU NEVER HEARD ME SAY THAT ALL PEOPLE ARE CALLED TO FORSAKE THEIR WORK AND THEIR FAMILIES +2094-142345-0037-345: WE CAN ALL BE SERVANTS OF GOD WHEREVER OUR LOT IS CAST BUT HE GIVES US DIFFERENT SORTS OF WORK ACCORDING AS HE FITS US FOR IT AND CALLS US TO IT +2094-142345-0038-346: I CAN NO MORE HELP SPENDING MY LIFE IN TRYING TO DO WHAT I CAN FOR THE SOULS OF OTHERS (THAN->THEN) YOU COULD HELP RUNNING IF YOU HEARD LITTLE TOTTY CRYING AT THE OTHER END OF THE HOUSE THE VOICE WOULD GO TO YOUR HEART YOU WOULD THINK THE DEAR CHILD WAS IN TROUBLE OR IN DANGER AND YOU COULDN'T REST WITHOUT RUNNING TO HELP HER AND COMFORT HER +2094-142345-0039-347: I'VE STRONG ASSURANCE THAT NO EVIL WILL HAPPEN TO YOU AND MY UNCLE AND THE CHILDREN FROM ANYTHING (I'VE->I HAVE) DONE +2094-142345-0040-348: I DIDN'T PREACH WITHOUT DIRECTION +2094-142345-0041-349: DIRECTION +2094-142345-0042-350: I (HANNA->HAD A) COMMON PATIENCE WITH YOU +2094-142345-0043-351: BY THIS TIME THE TWO GENTLEMEN HAD REACHED THE PALINGS AND HAD GOT DOWN FROM THEIR HORSES IT WAS PLAIN THEY MEANT TO COME IN +2094-142345-0044-352: SAID MISTER IRWINE WITH HIS STATELY CORDIALITY +2094-142345-0045-353: OH SIR DON'T MENTION IT SAID MISSUS POYSER +2094-142345-0046-354: I DELIGHT IN YOUR KITCHEN +2094-142345-0047-355: POYSER IS NOT AT HOME IS HE +2094-142345-0048-356: SAID CAPTAIN (DONNITHORNE SEATING->DONNYTHORNE SITTING) HIMSELF WHERE HE COULD SEE ALONG THE SHORT PASSAGE TO THE OPEN DAIRY DOOR +2094-142345-0049-357: NO SIR HE ISN'T HE'S GONE TO (ROSSETER->ROSSITUR) TO SEE MISTER WEST THE FACTOR ABOUT THE WOOL +2094-142345-0050-358: BUT THERE'S FATHER (THE->IN) BARN SIR IF HE'D BE OF ANY USE +2094-142345-0051-359: NO THANK YOU I'LL JUST LOOK AT THE WHELPS AND LEAVE A MESSAGE ABOUT THEM WITH YOUR SHEPHERD +2094-142345-0052-360: I MUST COME ANOTHER DAY AND SEE YOUR HUSBAND I WANT TO HAVE A CONSULTATION WITH HIM ABOUT HORSES +2094-142345-0053-361: FOR IF (HE'S->IS) ANYWHERE ON THE FARM WE CAN SEND FOR HIM IN A MINUTE +2094-142345-0054-362: OH SIR SAID MISSUS POYSER RATHER ALARMED YOU WOULDN'T LIKE IT AT ALL +2094-142345-0055-363: BUT YOU KNOW MORE ABOUT THAT THAN I DO SIR +2094-142345-0056-364: I THINK I SHOULD BE DOING YOU A SERVICE TO TURN YOU OUT OF SUCH A PLACE +2094-142345-0057-365: I (KNOW HIS->KNOWS) FARM IS IN BETTER ORDER THAN ANY OTHER WITHIN TEN MILES OF US AND AS FOR THE KITCHEN HE ADDED SMILING I DON'T BELIEVE THERE'S ONE IN THE KINGDOM TO BEAT IT +2094-142345-0058-366: BY THE BY (I'VE->I HAVE) NEVER SEEN YOUR DAIRY I MUST SEE YOUR (DAIRY->DEARIE) MISSUS POYSER +2094-142345-0059-367: THIS MISSUS POYSER SAID BLUSHING AND BELIEVING THAT THE CAPTAIN WAS REALLY INTERESTED IN HER MILK PANS AND WOULD ADJUST HIS OPINION OF HER TO THE APPEARANCE OF HER DAIRY +2094-142345-0060-368: OH I'VE NO DOUBT IT'S IN CAPITAL ORDER +2300-131720-0000-1816: THE PARIS PLANT LIKE THAT AT THE CRYSTAL PALACE WAS A TEMPORARY EXHIBIT +2300-131720-0001-1817: THE LONDON PLANT WAS LESS TEMPORARY BUT NOT PERMANENT SUPPLYING BEFORE IT WAS TORN OUT NO FEWER THAN THREE THOUSAND LAMPS IN HOTELS CHURCHES STORES AND DWELLINGS IN THE VICINITY OF HOLBORN (VIADUCT->VIA DOC) +2300-131720-0002-1818: THERE (MESSRS->MESSIERS) JOHNSON AND HAMMER PUT INTO PRACTICE MANY OF THE IDEAS NOW STANDARD IN THE ART AND SECURED MUCH USEFUL DATA FOR THE WORK IN NEW YORK OF WHICH THE STORY HAS JUST BEEN TOLD +2300-131720-0003-1819: THE DYNAMO ELECTRIC MACHINE THOUGH SMALL WAS ROBUST FOR UNDER ALL THE VARYING SPEEDS OF WATER POWER AND THE VICISSITUDES OF THE PLANT TO WHICH IT BELONGED IT CONTINUED IN ACTIVE USE UNTIL EIGHTEEN NINETY NINE SEVENTEEN YEARS +2300-131720-0004-1820: OWING TO HIS INSISTENCE ON LOW PRESSURE DIRECT CURRENT FOR USE IN DENSELY POPULATED DISTRICTS AS THE ONLY SAFE AND TRULY UNIVERSAL PROFITABLE WAY OF DELIVERING ELECTRICAL ENERGY TO THE CONSUMERS EDISON HAS BEEN FREQUENTLY SPOKEN OF AS AN OPPONENT OF THE ALTERNATING CURRENT +2300-131720-0005-1821: WHY IF WE ERECT A STATION AT THE FALLS IT IS A GREAT ECONOMY TO GET IT UP TO THE CITY +2300-131720-0006-1822: THERE SEEMS NO GOOD REASON FOR BELIEVING THAT IT WILL CHANGE +2300-131720-0007-1823: BROAD AS THE PRAIRIES AND FREE IN THOUGHT AS THE WINDS THAT (SWEEP->SWEPT) THEM HE IS (IDIOSYNCRATICALLY->IDIOS AND CRADICALLY) OPPOSED TO LOOSE AND WASTEFUL METHODS TO PLANS OF EMPIRE THAT NEGLECT THE POOR AT THE GATE +2300-131720-0008-1824: EVERYTHING HE HAS DONE HAS BEEN AIMED AT THE CONSERVATION OF ENERGY THE CONTRACTION OF SPACE THE INTENSIFICATION OF CULTURE +2300-131720-0009-1825: FOR SOME YEARS IT WAS NOT FOUND FEASIBLE TO OPERATE MOTORS ON ALTERNATING CURRENT CIRCUITS AND THAT REASON WAS OFTEN URGED AGAINST (IT->ITS) SERIOUSLY +2300-131720-0010-1826: IT COULD NOT BE USED FOR ELECTROPLATING OR DEPOSITION NOR COULD IT CHARGE STORAGE BATTERIES ALL OF WHICH ARE EASILY WITHIN THE ABILITY OF THE DIRECT CURRENT +2300-131720-0011-1827: BUT WHEN IT CAME TO BE A QUESTION OF LIGHTING A SCATTERED SUBURB A GROUP OF DWELLINGS ON THE OUTSKIRTS A REMOTE COUNTRY RESIDENCE OR A FARM HOUSE THE ALTERNATING CURRENT IN ALL ELEMENTS SAVE ITS DANGER WAS AND IS IDEAL +2300-131720-0012-1828: EDISON WAS INTOLERANT OF SHAM AND (SHODDY->SHODY) AND NOTHING WOULD SATISFY HIM THAT COULD NOT STAND CROSS EXAMINATION BY MICROSCOPE TEST TUBE AND GALVANOMETER +2300-131720-0013-1829: UNLESS HE COULD SECURE AN ENGINE OF SMOOTHER RUNNING AND MORE EXACTLY (GOVERNED->GOVERN) AND REGULATED THAN THOSE AVAILABLE FOR HIS DYNAMO AND LAMP EDISON REALIZED THAT HE WOULD FIND IT ALMOST IMPOSSIBLE TO GIVE A STEADY LIGHT +2300-131720-0014-1830: MISTER EDISON WAS A LEADER FAR AHEAD OF THE TIME +2300-131720-0015-1831: HE OBTAINED THE DESIRED SPEED AND (LOAD->LOWED) WITH A FRICTION (BRAKE->BREAK) ALSO REGULATOR OF SPEED BUT WAITED FOR AN INDICATOR TO VERIFY IT +2300-131720-0016-1832: THEN AGAIN THERE WAS NO KNOWN WAY TO (LUBRICATE->LUBRICADE) AN ENGINE FOR CONTINUOUS RUNNING AND MISTER EDISON INFORMED ME THAT AS A MARINE ENGINE STARTED BEFORE THE SHIP LEFT NEW YORK AND CONTINUED RUNNING UNTIL IT REACHED ITS HOME PORT SO AN ENGINE FOR HIS PURPOSES MUST PRODUCE LIGHT AT ALL TIMES +2300-131720-0017-1833: EDISON HAD INSTALLED HIS HISTORIC FIRST GREAT CENTRAL STATION SYSTEM IN NEW YORK ON THE MULTIPLE ARC SYSTEM COVERED BY HIS FEEDER AND MAIN INVENTION WHICH RESULTED IN A NOTABLE SAVING IN THE COST OF CONDUCTORS AS AGAINST A STRAIGHT TWO WIRE SYSTEM THROUGHOUT OF THE TREE KIND +2300-131720-0018-1834: HE SOON FORESAW THAT STILL GREATER ECONOMY WOULD BE NECESSARY FOR COMMERCIAL SUCCESS NOT ALONE FOR THE LARGER TERRITORY OPENING BUT FOR THE COMPACT (DISTRICTS->DISTRICT) OF LARGE CITIES +2300-131720-0019-1835: THE STRONG POSITION HELD BY THE EDISON SYSTEM UNDER THE STRENUOUS COMPETITION THAT WAS ALREADY SPRINGING UP WAS ENORMOUSLY IMPROVED BY THE INTRODUCTION OF THE THREE WIRE SYSTEM AND IT GAVE AN IMMEDIATE IMPETUS TO INCANDESCENT LIGHTING +2300-131720-0020-1836: IT WAS SPECIALLY SUITED FOR A TRIAL PLANT ALSO IN THE EARLY DAYS WHEN A YIELD OF SIX OR EIGHT LAMPS TO THE HORSE (POWER->BOWER) WAS CONSIDERED SUBJECT FOR CONGRATULATION +2300-131720-0021-1837: THE STREET CONDUCTORS WERE OF THE OVERHEAD POLE LINE CONSTRUCTION AND WERE INSTALLED BY THE CONSTRUCTION COMPANY THAT HAD BEEN ORGANIZED BY EDISON TO BUILD (AND->AN) EQUIP CENTRAL STATIONS +2300-131720-0022-1838: MEANWHILE HE HAD CALLED UPON ME TO MAKE A REPORT OF THE THREE WIRE SYSTEM KNOWN IN ENGLAND AS THE HOPKINSON BOTH DOCTOR JOHN HOPKINSON AND MISTER EDISON BEING INDEPENDENT (INVENTORS->IN VENORS) AT PRACTICALLY THE SAME TIME +2300-131720-0023-1839: I THINK HE WAS PERHAPS MORE APPRECIATIVE THAN I WAS OF THE DISCIPLINE OF THE EDISON CONSTRUCTION DEPARTMENT AND THOUGHT IT WOULD BE WELL FOR US TO WAIT UNTIL THE MORNING OF THE FOURTH BEFORE WE STARTED UP +2300-131720-0024-1840: BUT THE PLANT RAN AND IT WAS THE FIRST THREE WIRE STATION IN THIS COUNTRY +2300-131720-0025-1841: THEY WERE LATER USED AS RESERVE MACHINES AND FINALLY WITH THE ENGINE RETIRED FROM SERVICE AS PART OF THE COLLECTION OF EDISONIA BUT THEY REMAIN IN PRACTICALLY AS GOOD CONDITION AS (WHEN->ONE) INSTALLED IN EIGHTEEN EIGHTY THREE +2300-131720-0026-1842: THE (ARC->ARK) LAMP INSTALLED OUTSIDE A CUSTOMER'S PREMISES OR IN A CIRCUIT FOR PUBLIC STREET LIGHTING BURNED SO MANY HOURS NIGHTLY SO MANY NIGHTS IN THE MONTH AND WAS PAID FOR AT THAT RATE SUBJECT TO REBATE FOR HOURS WHEN THE LAMP MIGHT BE OUT THROUGH ACCIDENT +2300-131720-0027-1843: EDISON HELD THAT THE ELECTRICITY SOLD MUST BE MEASURED JUST LIKE GAS OR WATER AND HE PROCEEDED TO DEVELOP A METER +2300-131720-0028-1844: THERE WAS INFINITE SCEPTICISM AROUND HIM ON THE SUBJECT AND WHILE OTHER INVENTORS WERE ALSO GIVING THE SUBJECT THEIR THOUGHT THE PUBLIC TOOK IT FOR GRANTED THAT ANYTHING SO UTTERLY INTANGIBLE AS ELECTRICITY THAT COULD NOT BE SEEN OR WEIGHED AND ONLY GAVE SECONDARY EVIDENCE OF ITSELF AT THE EXACT POINT OF USE COULD NOT BE BROUGHT TO ACCURATE REGISTRATION +2300-131720-0029-1845: HENCE THE EDISON ELECTROLYTIC METER IS NO LONGER USED DESPITE ITS EXCELLENT QUALITIES +2300-131720-0030-1846: THE (PRINCIPLE->PRINCIPAL) EMPLOYED IN THE EDISON ELECTROLYTIC METER IS THAT WHICH EXEMPLIFIES THE POWER OF ELECTRICITY TO DECOMPOSE A CHEMICAL SUBSTANCE +2300-131720-0031-1847: ASSOCIATED WITH THIS SIMPLE FORM OF APPARATUS WERE VARIOUS INGENIOUS DETAILS AND REFINEMENTS TO SECURE REGULARITY OF OPERATION FREEDOM FROM INACCURACY AND IMMUNITY FROM SUCH TAMPERING AS WOULD PERMIT THEFT OF CURRENT OR DAMAGE +2300-131720-0032-1848: THE STANDARD EDISON METER PRACTICE WAS TO REMOVE THE CELLS ONCE A MONTH TO THE METER ROOM OF THE CENTRAL STATION COMPANY FOR EXAMINATION ANOTHER SET BEING SUBSTITUTED +2300-131720-0033-1849: IN DECEMBER EIGHTEEN EIGHTY EIGHT MISTER W J JENKS READ AN INTERESTING PAPER BEFORE THE AMERICAN INSTITUTE OF ELECTRICAL ENGINEERS ON THE SIX YEARS OF PRACTICAL EXPERIENCE HAD UP TO THAT TIME WITH THE (METER->METRE) THEN MORE GENERALLY IN USE THAN ANY OTHER +2300-131720-0034-1850: THE OTHERS HAVING BEEN IN OPERATION TOO SHORT A TIME TO SHOW DEFINITE RESULTS ALTHOUGH THEY ALSO WENT QUICKLY TO A DIVIDEND BASIS +2300-131720-0035-1851: IN THIS CONNECTION IT SHOULD BE MENTIONED THAT THE ASSOCIATION OF EDISON ILLUMINATING COMPANIES IN THE SAME YEAR ADOPTED RESOLUTIONS UNANIMOUSLY TO THE EFFECT THAT THE EDISON METER WAS ACCURATE AND THAT ITS USE WAS NOT EXPENSIVE FOR STATIONS ABOVE ONE THOUSAND LIGHTS AND THAT THE BEST FINANCIAL RESULTS WERE INVARIABLY SECURED IN A STATION SELLING CURRENT BY (METER->METRE) +2300-131720-0036-1852: THE (METER->METRE) CONTINUED IN GENERAL SERVICE DURING EIGHTEEN NINETY NINE AND PROBABLY UP TO THE CLOSE OF THE CENTURY +2300-131720-0037-1853: HE WEIGHED AND (REWEIGHED->REWAIED) THE (METER->METRE) PLATES AND PURSUED EVERY LINE OF INVESTIGATION IMAGINABLE BUT ALL IN VAIN +2300-131720-0038-1854: HE FELT HE WAS UP AGAINST IT AND THAT PERHAPS ANOTHER KIND OF A JOB WOULD SUIT HIM BETTER +2300-131720-0039-1855: THE PROBLEM WAS SOLVED +2300-131720-0040-1856: WE WERE MORE INTERESTED IN THE TECHNICAL CONDITION OF THE STATION THAN IN THE COMMERCIAL PART +2300-131720-0041-1857: WE HAD (METERS->METRES) IN WHICH THERE WERE TWO BOTTLES OF LIQUID +237-126133-0000-2407: HERE SHE WOULD STAY COMFORTED AND (SOOTHED->SOOTHE) AMONG THE LOVELY PLANTS AND RICH EXOTICS REJOICING THE HEART OF OLD TURNER THE GARDENER WHO SINCE POLLY'S FIRST RAPTUROUS ENTRANCE HAD TAKEN HER INTO HIS GOOD GRACES FOR ALL TIME +237-126133-0001-2408: EVERY CHANCE SHE COULD STEAL AFTER PRACTICE HOURS WERE OVER AND AFTER THE CLAMOROUS DEMANDS OF THE BOYS UPON HER TIME WERE FULLY SATISFIED WAS SEIZED TO FLY ON THE WINGS OF THE WIND TO THE FLOWERS +237-126133-0002-2409: THEN DEAR SAID MISSUS WHITNEY YOU MUST BE KINDER TO HER THAN EVER THINK WHAT IT WOULD BE FOR ONE OF YOU TO BE AWAY FROM HOME EVEN AMONG FRIENDS +237-126133-0003-2410: SOMEHOW OF ALL THE DAYS WHEN THE HOME FEELING WAS THE STRONGEST THIS DAY IT SEEMED AS IF SHE COULD BEAR IT NO LONGER +237-126133-0004-2411: IF SHE COULD ONLY SEE PHRONSIE FOR JUST ONE MOMENT +237-126133-0005-2412: OH SHE'S ALWAYS AT THE PIANO SAID VAN SHE MUST BE THERE NOW SOMEWHERE AND THEN SOMEBODY LAUGHED +237-126133-0006-2413: AT THIS THE BUNDLE OPENED SUDDENLY AND OUT POPPED PHRONSIE +237-126133-0007-2414: BUT POLLY COULDN'T SPEAK AND IF JASPER HADN'T CAUGHT HER JUST IN TIME SHE WOULD HAVE TUMBLED OVER BACKWARD FROM THE STOOL PHRONSIE AND ALL +237-126133-0008-2415: ASKED PHRONSIE WITH HER LITTLE FACE CLOSE TO POLLY'S OWN +237-126133-0009-2416: NOW YOU'LL STAY CRIED VAN SAY POLLY WON'T YOU +237-126133-0010-2417: OH YOU ARE THE DEAREST AND BEST MISTER KING I EVER SAW BUT HOW DID YOU MAKE MAMMY LET HER COME +237-126133-0011-2418: ISN'T HE SPLENDID CRIED JASPER (IN->AN) INTENSE PRIDE SWELLING UP FATHER KNEW HOW TO DO IT +237-126133-0012-2419: THERE THERE HE SAID SOOTHINGLY PATTING HER BROWN FUZZY HEAD +237-126133-0013-2420: I KNOW GASPED POLLY CONTROLLING HER SOBS I WON'T ONLY I CAN'T THANK YOU +237-126133-0014-2421: ASKED PHRONSIE IN INTENSE INTEREST SLIPPING DOWN OUT OF POLLY'S ARMS AND CROWDING UP CLOSE TO JASPER'S SIDE +237-126133-0015-2422: YES ALL ALONE BY HIMSELF ASSERTED JASPER VEHEMENTLY AND WINKING FURIOUSLY TO THE OTHERS TO STOP THEIR LAUGHING HE DID NOW TRULY PHRONSIE +237-126133-0016-2423: OH NO (JASPER->JAPSER) I MUST GO BY MY VERY OWN SELF +237-126133-0017-2424: THERE JAP YOU'VE CAUGHT IT LAUGHED PERCY WHILE THE OTHERS SCREAMED AT THE SIGHT OF JASPER'S FACE +237-126133-0018-2425: DON'T MIND IT POLLY WHISPERED JASPER TWASN'T HER FAULT +237-126133-0019-2426: DEAR ME EJACULATED THE OLD GENTLEMAN IN THE UTMOST AMAZEMENT AND SUCH A TIME AS I'VE HAD TO GET HER HERE TOO +237-126133-0020-2427: HOW DID HER MOTHER EVER LET HER GO +237-126133-0021-2428: SHE ASKED IMPULSIVELY I DIDN'T BELIEVE YOU COULD PERSUADE HER FATHER +237-126133-0022-2429: I DIDN'T HAVE ANY FEARS IF I WORKED IT RIGHTLY SAID THE OLD GENTLEMAN COMPLACENTLY +237-126133-0023-2430: HE CRIED IN HIGH DUDGEON JUST AS IF HE OWNED THE WHOLE OF THE PEPPERS AND COULD DISPOSE OF THEM ALL TO SUIT HIS FANCY +237-126133-0024-2431: AND THE OLD GENTLEMAN WAS SO DELIGHTED WITH HIS SUCCESS THAT HE HAD TO BURST OUT INTO A SERIES OF SHORT HAPPY BITS OF LAUGHTER THAT OCCUPIED QUITE A SPACE OF TIME +237-126133-0025-2432: AT LAST HE CAME OUT OF THEM AND WIPED HIS FACE VIGOROUSLY +237-134493-0000-2388: IT IS SIXTEEN YEARS SINCE JOHN (BERGSON->BERKSON) DIED +237-134493-0001-2389: HIS WIFE NOW LIES BESIDE HIM AND THE WHITE SHAFT THAT MARKS THEIR GRAVES GLEAMS ACROSS THE WHEAT FIELDS +237-134493-0002-2390: FROM THE NORWEGIAN GRAVEYARD ONE LOOKS OUT OVER A VAST (CHECKER BOARD->CHECKERBOARD) MARKED OFF IN SQUARES OF WHEAT AND CORN LIGHT AND DARK (DARK->*) AND LIGHT +237-134493-0003-2391: FROM THE GRAVEYARD GATE ONE CAN COUNT A DOZEN (GAYLY->GAILY) PAINTED FARMHOUSES THE GILDED WEATHER (VANES->VEINS) ON THE BIG RED BARNS WINK AT EACH OTHER ACROSS THE GREEN AND BROWN AND YELLOW FIELDS +237-134493-0004-2392: THE AIR AND THE EARTH ARE CURIOUSLY MATED AND INTERMINGLED AS IF THE ONE WERE THE BREATH OF THE OTHER +237-134493-0005-2393: HE WAS A SPLENDID FIGURE OF A BOY TALL AND STRAIGHT AS A YOUNG PINE TREE WITH A HANDSOME HEAD AND STORMY GRAY EYES DEEPLY SET UNDER A SERIOUS BROW +237-134493-0006-2394: THAT'S NOT MUCH OF A JOB FOR AN ATHLETE HERE I'VE BEEN TO TOWN AND BACK +237-134493-0007-2395: ALEXANDRA (LETS->THAT'S) YOU SLEEP LATE +237-134493-0008-2396: SHE GATHERED UP HER REINS +237-134493-0009-2397: PLEASE WAIT FOR ME MARIE (EMIL->AMYL) COAXED +237-134493-0010-2398: I NEVER SEE (LOU'S->LOOSE) SCYTHE OVER HERE +237-134493-0011-2399: HOW BROWN YOU'VE GOT SINCE YOU CAME HOME I WISH I HAD AN (ATHLETE->ADETE) TO MOW MY ORCHARD +237-134493-0012-2400: I GET WET TO MY KNEES WHEN I GO DOWN TO (PICK->PIC) CHERRIES +237-134493-0013-2401: INDEED HE HAD LOOKED AWAY WITH THE PURPOSE OF NOT SEEING IT +237-134493-0014-2402: THEY THINK (YOU'RE->YOU ARE) PROUD BECAUSE YOU'VE BEEN AWAY TO SCHOOL OR SOMETHING +237-134493-0015-2403: THERE WAS SOMETHING INDIVIDUAL ABOUT THE GREAT FARM A MOST UNUSUAL TRIMNESS AND CARE FOR DETAIL +237-134493-0016-2404: ON EITHER SIDE OF THE ROAD FOR A MILE BEFORE YOU REACHED THE FOOT OF THE HILL STOOD TALL (OSAGE ORANGE HEDGES->O SAGE ORANGES) THEIR GLOSSY GREEN MARKING OFF THE YELLOW FIELDS +237-134493-0017-2405: ANY ONE THEREABOUTS WOULD HAVE TOLD YOU THAT THIS WAS ONE OF THE RICHEST FARMS ON THE DIVIDE AND THAT THE FARMER WAS A WOMAN ALEXANDRA (BERGSON->BERGIN) +237-134493-0018-2406: THERE IS EVEN A WHITE ROW OF BEEHIVES IN THE ORCHARD UNDER THE WALNUT TREES +237-134500-0000-2345: FRANK READ ENGLISH SLOWLY AND THE MORE HE READ ABOUT THIS DIVORCE CASE THE ANGRIER HE GREW +237-134500-0001-2346: MARIE SIGHED +237-134500-0002-2347: A (BRISK->BRACE) WIND HAD COME UP AND WAS DRIVING PUFFY WHITE CLOUDS ACROSS THE SKY +237-134500-0003-2348: THE (ORCHARD->ARCHER) WAS SPARKLING AND RIPPLING IN THE SUN +237-134500-0004-2349: THAT INVITATION DECIDED HER +237-134500-0005-2350: OH BUT (I'M->I AM) GLAD TO GET THIS PLACE MOWED +237-134500-0006-2351: JUST SMELL THE WILD ROSES THEY ARE ALWAYS SO SPICY AFTER A RAIN +237-134500-0007-2352: WE NEVER HAD SO MANY OF THEM IN HERE BEFORE +237-134500-0008-2353: I SUPPOSE IT'S THE WET SEASON WILL YOU HAVE TO CUT THEM TOO +237-134500-0009-2354: I SUPPOSE THAT'S THE WET SEASON TOO THEN +237-134500-0010-2355: IT'S EXCITING TO SEE EVERYTHING GROWING SO FAST AND TO GET THE GRASS CUT +237-134500-0011-2356: AREN'T YOU SPLASHED LOOK AT THE SPIDER WEBS ALL OVER THE GRASS +237-134500-0012-2357: IN A FEW MOMENTS HE HEARD THE CHERRIES DROPPING SMARTLY INTO THE PAIL AND HE BEGAN TO SWING HIS SCYTHE WITH THAT LONG EVEN STROKE THAT FEW AMERICAN BOYS EVER LEARN +237-134500-0013-2358: MARIE PICKED CHERRIES AND SANG SOFTLY TO HERSELF STRIPPING ONE GLITTERING (BRANCH->RANCH) AFTER ANOTHER SHIVERING WHEN SHE (CAUGHT->THOUGHT) A SHOWER OF RAINDROPS ON HER NECK AND HAIR +237-134500-0014-2359: AND (EMIL->AMYL) MOWED HIS WAY SLOWLY DOWN TOWARD THE CHERRY TREES +237-134500-0015-2360: THAT SUMMER THE RAINS HAD BEEN SO MANY AND OPPORTUNE THAT IT WAS ALMOST MORE THAN (SHABATA->SHEBATA) AND HIS MAN COULD DO TO KEEP UP WITH THE CORN THE ORCHARD WAS A NEGLECTED WILDERNESS +237-134500-0016-2361: I DON'T KNOW ALL OF THEM BUT I KNOW LINDENS ARE +237-134500-0017-2362: IF I FEEL THAT WAY I FEEL THAT WAY +237-134500-0018-2363: HE REACHED UP AMONG THE BRANCHES AND BEGAN TO PICK THE SWEET INSIPID FRUIT LONG IVORY COLORED BERRIES TIPPED WITH FAINT PINK LIKE WHITE CORAL THAT FALL TO THE GROUND UNHEEDED ALL SUMMER THROUGH +237-134500-0019-2364: HE DROPPED A HANDFUL INTO HER LAP +237-134500-0020-2365: YES DON'T YOU +237-134500-0021-2366: OH EVER SO MUCH ONLY HE SEEMS KIND OF (STAID AND->STAY AT IN) SCHOOL TEACHERY +237-134500-0022-2367: WHEN SHE USED TO TELL ME ABOUT HIM I ALWAYS WONDERED WHETHER SHE WASN'T A LITTLE IN LOVE WITH HIM +237-134500-0023-2368: IT WOULD SERVE YOU ALL RIGHT IF SHE WALKED OFF WITH (CARL->KARL) +237-134500-0024-2369: I LIKE TO TALK TO (CARL->KARL) ABOUT NEW YORK AND WHAT A FELLOW CAN DO THERE +237-134500-0025-2370: OH (EMIL->AMY ILL) +237-134500-0026-2371: SURELY YOU ARE NOT THINKING OF GOING OFF THERE +237-134500-0027-2372: MARIE'S FACE FELL UNDER HIS BROODING GAZE +237-134500-0028-2373: (I'M->I AM) SURE (ALEXANDRA HOPES->ALEXANDER HELPS) YOU WILL STAY ON HERE SHE MURMURED +237-134500-0029-2374: I DON'T WANT TO STAND AROUND AND LOOK ON +237-134500-0030-2375: I WANT TO BE DOING SOMETHING ON MY OWN ACCOUNT +237-134500-0031-2376: SOMETIMES I DON'T WANT TO DO ANYTHING AT ALL AND SOMETIMES I WANT TO PULL THE FOUR CORNERS OF THE DIVIDE TOGETHER HE THREW OUT HIS ARM AND BROUGHT IT BACK WITH A JERK SO LIKE A (TABLE CLOTH->TABLECLOTH) +237-134500-0032-2377: I GET TIRED OF SEEING (MEN->MAN) AND HORSES GOING UP AND DOWN UP AND DOWN +237-134500-0033-2378: I WISH YOU WEREN'T SO RESTLESS AND DIDN'T GET SO WORKED UP OVER THINGS SHE SAID SADLY +237-134500-0034-2379: THANK YOU HE RETURNED SHORTLY +237-134500-0035-2380: AND YOU NEVER USED TO BE CROSS TO ME +237-134500-0036-2381: I CAN'T PLAY WITH YOU LIKE A LITTLE BOY ANY MORE HE SAID SLOWLY THAT'S WHAT YOU MISS MARIE +237-134500-0037-2382: BUT (EMIL->AM ILL) IF I UNDERSTAND (THEN->IN) ALL OUR GOOD TIMES ARE OVER WE CAN NEVER DO NICE THINGS TOGETHER ANY MORE +237-134500-0038-2383: AND ANYHOW THERE'S NOTHING TO UNDERSTAND +237-134500-0039-2384: THAT WON'T LAST IT WILL GO AWAY AND THINGS WILL BE JUST AS THEY USED TO +237-134500-0040-2385: I PRAY FOR YOU BUT THAT'S NOT THE SAME AS IF YOU PRAYED YOURSELF +237-134500-0041-2386: I CAN'T PRAY TO HAVE THE THINGS I WANT HE SAID SLOWLY AND I WON'T PRAY NOT TO HAVE THEM NOT IF I'M DAMNED FOR IT +237-134500-0042-2387: THEN ALL OUR GOOD TIMES ARE OVER +260-123286-0000-200: SATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL ROUND NO LAND IN SIGHT +260-123286-0001-201: THE HORIZON SEEMS EXTREMELY DISTANT +260-123286-0002-202: ALL MY DANGER AND SUFFERINGS WERE NEEDED TO STRIKE A SPARK OF HUMAN FEELING OUT OF HIM BUT NOW THAT I AM WELL HIS NATURE HAS RESUMED ITS SWAY +260-123286-0003-203: YOU SEEM ANXIOUS MY UNCLE I SAID SEEING HIM CONTINUALLY WITH HIS GLASS TO HIS EYE ANXIOUS +260-123286-0004-204: ONE MIGHT BE WITH LESS REASON THAN NOW +260-123286-0005-205: I AM NOT COMPLAINING THAT THE RATE IS SLOW BUT THAT THE SEA IS SO WIDE +260-123286-0006-206: WE ARE LOSING TIME AND THE FACT IS I HAVE NOT COME ALL THIS WAY TO TAKE A LITTLE SAIL UPON A POND ON A RAFT +260-123286-0007-207: HE CALLED THIS SEA (A POND->UPON) AND OUR LONG VOYAGE TAKING A LITTLE SAIL +260-123286-0008-208: THEREFORE DON'T TALK TO ME ABOUT VIEWS AND PROSPECTS +260-123286-0009-209: I TAKE THIS AS MY ANSWER AND I LEAVE THE PROFESSOR TO BITE HIS LIPS WITH IMPATIENCE +260-123286-0010-210: SUNDAY AUGUST SIXTEENTH +260-123286-0011-211: NOTHING NEW (WEATHER->WHETHER) UNCHANGED THE WIND FRESHENS +260-123286-0012-212: BUT THERE SEEMED NO REASON (TO->OF) FEAR +260-123286-0013-213: THE SHADOW OF THE RAFT WAS CLEARLY OUTLINED UPON THE SURFACE OF THE WAVES +260-123286-0014-214: TRULY (THIS->THE) SEA IS OF INFINITE WIDTH +260-123286-0015-215: IT MUST BE AS WIDE AS THE MEDITERRANEAN OR THE ATLANTIC AND WHY NOT +260-123286-0016-216: THESE THOUGHTS AGITATED ME ALL DAY AND MY IMAGINATION SCARCELY CALMED DOWN AFTER SEVERAL HOURS SLEEP +260-123286-0017-217: I SHUDDER AS I RECALL THESE MONSTERS TO MY REMEMBRANCE +260-123286-0018-218: I SAW AT THE HAMBURG MUSEUM THE SKELETON OF ONE OF THESE CREATURES THIRTY FEET IN LENGTH +260-123286-0019-219: I SUPPOSE PROFESSOR LIEDENBROCK WAS OF MY OPINION TOO AND EVEN SHARED MY FEARS FOR AFTER HAVING EXAMINED THE (PICK->PIG) HIS EYES TRAVERSED THE OCEAN FROM SIDE TO SIDE +260-123286-0020-220: TUESDAY AUGUST EIGHTEENTH +260-123286-0021-221: DURING HIS WATCH I SLEPT +260-123286-0022-222: TWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE ME +260-123286-0023-223: THE RAFT WAS HEAVED UP ON A WATERY MOUNTAIN AND PITCHED DOWN AGAIN AT A DISTANCE OF TWENTY FATHOMS +260-123286-0024-224: THERE'S A (WHALE->WAIL) A (WHALE->WELL) CRIED THE PROFESSOR +260-123286-0025-225: (FLIGHT->FIGHT) WAS OUT OF THE QUESTION NOW THE REPTILES ROSE THEY WHEELED AROUND OUR LITTLE RAFT WITH A RAPIDITY GREATER THAN THAT OF EXPRESS TRAINS +260-123286-0026-226: TWO MONSTERS (ONLY->OMER) WERE CREATING ALL THIS COMMOTION AND BEFORE MY EYES (ARE->OUR) TWO REPTILES OF THE PRIMITIVE WORLD +260-123286-0027-227: I CAN DISTINGUISH THE EYE OF THE (ICHTHYOSAURUS->ITHUS) GLOWING LIKE A RED HOT (COAL->CO) AND AS LARGE AS A MAN'S HEAD +260-123286-0028-228: ITS JAW IS ENORMOUS AND ACCORDING TO NATURALISTS IT IS ARMED WITH NO LESS THAN ONE HUNDRED AND EIGHTY TWO TEETH +260-123286-0029-229: THOSE HUGE CREATURES ATTACKED EACH OTHER WITH THE GREATEST ANIMOSITY +260-123286-0030-230: SUDDENLY THE (ICHTHYOSAURUS->IKESORIS) AND THE PLESIOSAURUS DISAPPEAR BELOW LEAVING A (WHIRLPOOL->WAR POOL) EDDYING IN THE WATER +260-123286-0031-231: AS FOR THE (ICHTHYOSAURUS->INHEOSORIS) HAS HE RETURNED TO HIS SUBMARINE CAVERN +260-123288-0000-232: THE ROARINGS BECOME LOST IN THE DISTANCE +260-123288-0001-233: THE WEATHER IF WE MAY USE (THAT->THE) TERM WILL CHANGE BEFORE LONG +260-123288-0002-234: THE ATMOSPHERE IS CHARGED WITH (VAPOURS->VAPORS) PERVADED WITH THE ELECTRICITY GENERATED BY THE EVAPORATION OF (SALINE->SAILING) WATERS +260-123288-0003-235: THE ELECTRIC LIGHT CAN SCARCELY PENETRATE (THROUGH->TO) THE DENSE CURTAIN WHICH (HAS->IS) DROPPED OVER THE THEATRE ON WHICH THE BATTLE OF THE ELEMENTS IS ABOUT TO BE WAGED +260-123288-0004-236: THE AIR IS HEAVY THE SEA IS CALM +260-123288-0005-237: FROM TIME TO TIME A FLEECY TUFT OF (MIST->MISTS) WITH YET SOME GLEAMING LIGHT LEFT UPON IT DROPS DOWN UPON THE DENSE FLOOR OF GREY AND LOSES ITSELF IN THE OPAQUE AND IMPENETRABLE MASS +260-123288-0006-238: THE ATMOSPHERE (IS->AS) EVIDENTLY CHARGED (AND->IN) SURCHARGED WITH ELECTRICITY +260-123288-0007-239: THE WIND NEVER (LULLS->LOLLS) BUT TO ACQUIRE INCREASED STRENGTH THE VAST BANK OF HEAVY CLOUDS IS A HUGE RESERVOIR OF FEARFUL WINDY GUSTS AND RUSHING STORMS +260-123288-0008-240: THERE'S A HEAVY STORM COMING ON I CRIED POINTING TOWARDS THE HORIZON +260-123288-0009-241: THOSE CLOUDS SEEM AS IF THEY WERE GOING TO CRUSH THE SEA +260-123288-0010-242: ON THE MAST ALREADY I SEE THE LIGHT PLAY OF A (LAMBENT->LAMMA) SAINT (ELMO'S->ABLE'S) FIRE THE OUTSTRETCHED SAIL CATCHES NOT A BREATH OF WIND AND HANGS LIKE A SHEET OF LEAD +260-123288-0011-243: BUT IF WE HAVE NOW CEASED TO ADVANCE WHY DO WE YET LEAVE THAT SAIL LOOSE WHICH AT THE FIRST SHOCK OF (THE->A) TEMPEST MAY CAPSIZE US IN A MOMENT +260-123288-0012-244: THAT WILL BE (*->THE) SAFEST NO NO NEVER +260-123288-0013-245: (THE->THAT) PILED UP (VAPOURS CONDENSE->VAPORS CONTENSED) INTO WATER AND THE AIR PUT INTO VIOLENT ACTION TO SUPPLY THE VACUUM LEFT BY THE CONDENSATION OF THE (MISTS->MIST) ROUSES ITSELF INTO A WHIRLWIND +260-123288-0014-246: HANS STIRS NOT +260-123288-0015-247: FROM THE UNDER SURFACE OF THE CLOUDS THERE ARE CONTINUAL (EMISSIONS->ADMISSIONS) OF LURID LIGHT ELECTRIC MATTER IS IN CONTINUAL EVOLUTION FROM THEIR COMPONENT MOLECULES THE GASEOUS ELEMENTS OF THE AIR NEED TO BE SLAKED WITH MOISTURE FOR INNUMERABLE COLUMNS OF WATER RUSH UPWARDS INTO THE AIR AND FALL BACK AGAIN IN WHITE FOAM +260-123288-0016-248: I REFER TO THE THERMOMETER IT INDICATES THE FIGURE IS OBLITERATED +260-123288-0017-249: IS THE (ATMOSPHERIC->ATMOSPHERE) CONDITION HAVING ONCE REACHED (THIS DENSITY->OSTENSITY) TO BECOME FINAL +260-123288-0018-250: THE RAFT BEARS ON STILL TO THE SOUTH EAST +260-123288-0019-251: AT NOON THE VIOLENCE OF THE STORM REDOUBLES +260-123288-0020-252: EACH OF US IS LASHED TO SOME PART OF THE RAFT +260-123288-0021-253: THE WAVES RISE ABOVE OUR HEADS +260-123288-0022-254: THEY (SEEM->SEEMED) TO BE WE ARE LOST BUT I AM NOT SURE +260-123288-0023-255: HE NODS HIS CONSENT +260-123288-0024-256: THE (FIREBALL->FIRE BALL) HALF OF IT WHITE HALF AZURE BLUE AND THE SIZE OF A TEN INCH SHELL MOVED SLOWLY ABOUT THE RAFT BUT REVOLVING ON ITS OWN AXIS WITH ASTONISHING VELOCITY AS IF (WHIPPED->WHIP) ROUND BY THE FORCE OF THE WHIRLWIND +260-123288-0025-257: HERE IT COMES THERE IT GLIDES NOW IT IS UP THE RAGGED STUMP OF THE MAST THENCE IT LIGHTLY LEAPS ON THE PROVISION BAG DESCENDS WITH A LIGHT BOUND AND JUST SKIMS THE POWDER MAGAZINE HORRIBLE +260-123288-0026-258: WE SHALL BE BLOWN UP BUT NO THE DAZZLING DISK OF MYSTERIOUS LIGHT NIMBLY LEAPS ASIDE IT APPROACHES HANS WHO FIXES HIS BLUE EYE UPON IT STEADILY IT THREATENS THE HEAD OF MY UNCLE WHO FALLS UPON HIS KNEES WITH HIS HEAD DOWN TO AVOID IT +260-123288-0027-259: A SUFFOCATING SMELL OF NITROGEN FILLS THE AIR IT ENTERS THE THROAT IT FILLS THE LUNGS +260-123288-0028-260: WE SUFFER STIFLING PAINS +260-123440-0000-179: AND HOW ODD THE DIRECTIONS WILL LOOK +260-123440-0001-180: POOR ALICE +260-123440-0002-181: IT WAS THE WHITE RABBIT RETURNING SPLENDIDLY DRESSED WITH A PAIR OF WHITE KID GLOVES IN ONE HAND AND A LARGE FAN IN THE OTHER HE CAME TROTTING ALONG IN A GREAT HURRY MUTTERING TO HIMSELF AS HE CAME OH THE DUCHESS THE DUCHESS +260-123440-0003-182: OH WON'T SHE BE SAVAGE IF I'VE KEPT HER WAITING +260-123440-0004-183: ALICE TOOK UP THE FAN AND GLOVES AND AS THE HALL WAS VERY HOT SHE KEPT FANNING HERSELF ALL THE TIME SHE WENT ON TALKING DEAR DEAR HOW QUEER EVERYTHING IS TO DAY +260-123440-0005-184: AND YESTERDAY THINGS WENT ON JUST AS USUAL +260-123440-0006-185: I WONDER IF I'VE BEEN CHANGED IN THE NIGHT +260-123440-0007-186: I ALMOST THINK I CAN REMEMBER FEELING (A->*) LITTLE DIFFERENT +260-123440-0008-187: I'LL TRY IF I KNOW ALL THE THINGS I USED TO KNOW +260-123440-0009-188: I SHALL NEVER GET TO TWENTY AT THAT RATE +260-123440-0010-189: HOW CHEERFULLY HE SEEMS TO GRIN HOW NEATLY SPREAD HIS CLAWS AND WELCOME LITTLE FISHES IN WITH GENTLY SMILING JAWS +260-123440-0011-190: NO I'VE MADE UP MY MIND ABOUT IT IF I'M MABEL I'LL STAY DOWN HERE +260-123440-0012-191: IT'LL BE NO USE THEIR PUTTING THEIR HEADS DOWN AND SAYING COME UP AGAIN DEAR +260-123440-0013-192: I AM SO VERY TIRED OF BEING ALL ALONE HERE +260-123440-0014-193: AND I DECLARE IT'S TOO BAD THAT IT IS +260-123440-0015-194: I WISH I HADN'T CRIED SO MUCH SAID ALICE AS SHE SWAM ABOUT TRYING TO FIND HER WAY OUT +260-123440-0016-195: I SHALL BE PUNISHED FOR IT NOW I SUPPOSE BY BEING DROWNED IN MY OWN TEARS +260-123440-0017-196: THAT WILL BE A QUEER THING TO BE SURE +260-123440-0018-197: I AM VERY TIRED OF SWIMMING ABOUT HERE (O->OH) MOUSE +260-123440-0019-198: CRIED ALICE AGAIN FOR THIS TIME THE MOUSE WAS BRISTLING ALL OVER AND SHE FELT CERTAIN IT MUST BE REALLY OFFENDED +260-123440-0020-199: WE WON'T TALK ABOUT HER ANY MORE IF YOU'D RATHER NOT WE INDEED +2830-3979-0000-1120: WE WANT YOU TO HELP US PUBLISH SOME LEADING WORK OF (LUTHER'S->LUTHERS) FOR THE GENERAL AMERICAN MARKET WILL YOU DO IT +2830-3979-0001-1121: THE CONDITION IS THAT I WILL BE PERMITTED TO MAKE LUTHER TALK AMERICAN (STREAMLINE HIM->STREAM LINE HYMN) SO TO SPEAK BECAUSE YOU WILL NEVER GET PEOPLE WHETHER IN OR OUTSIDE THE LUTHERAN CHURCH ACTUALLY TO READ LUTHER UNLESS WE MAKE HIM TALK AS HE WOULD TALK (TODAY->TO DAY) TO AMERICANS +2830-3979-0002-1122: LET US BEGIN WITH THAT HIS COMMENTARY ON (GALATIANS->GALLATIONS) +2830-3979-0003-1123: THE UNDERTAKING WHICH (SEEMED->SEEMS) SO ATTRACTIVE WHEN VIEWED AS A LITERARY TASK PROVED A MOST DIFFICULT ONE AND AT TIMES BECAME OPPRESSIVE +2830-3979-0004-1124: IT WAS WRITTEN IN LATIN +2830-3979-0005-1125: THE WORK HAD TO BE CONDENSED +2830-3979-0006-1126: A WORD SHOULD NOW BE SAID ABOUT THE ORIGIN OF LUTHER'S COMMENTARY (ON GALATIANS->ANGULATIONS) +2830-3979-0007-1127: MUCH LATER WHEN A FRIEND OF HIS WAS PREPARING AN (EDITION->ADDITION) OF ALL HIS LATIN WORKS HE REMARKED TO HIS HOME CIRCLE IF I HAD MY WAY ABOUT IT THEY WOULD REPUBLISH ONLY THOSE OF MY BOOKS WHICH HAVE DOCTRINE MY (GALATIANS->GALLATIONS) FOR INSTANCE +2830-3979-0008-1128: IN OTHER WORDS THESE THREE MEN TOOK DOWN THE LECTURES WHICH LUTHER ADDRESSED TO HIS STUDENTS IN THE COURSE OF (GALATIANS->GALLATIONS) AND (ROERER->ROAR) PREPARED THE MANUSCRIPT FOR THE PRINTER +2830-3979-0009-1129: IT PRESENTS LIKE NO OTHER OF LUTHER'S WRITINGS THE CENTRAL THOUGHT OF CHRISTIANITY THE JUSTIFICATION OF THE SINNER FOR THE SAKE OF CHRIST'S MERITS ALONE +2830-3979-0010-1130: BUT THE ESSENCE OF LUTHER'S LECTURES IS THERE +2830-3979-0011-1131: THE LORD WHO HAS GIVEN US POWER TO TEACH AND TO HEAR LET HIM ALSO GIVE US THE POWER TO SERVE AND TO DO LUKE TWO +2830-3979-0012-1132: THE WORD OF OUR GOD SHALL STAND (FOREVER->FOR EVER) +2830-3980-0000-1043: IN EVERY WAY THEY SOUGHT TO UNDERMINE THE AUTHORITY OF SAINT PAUL +2830-3980-0001-1044: THEY SAID TO THE GALATIANS YOU HAVE NO RIGHT TO THINK HIGHLY OF PAUL +2830-3980-0002-1045: HE WAS THE LAST TO TURN TO CHRIST +2830-3980-0003-1046: (PAUL->PAW) CAME LATER (AND IS->IN HIS) BENEATH US +2830-3980-0004-1047: INDEED HE PERSECUTED THE CHURCH OF CHRIST FOR A LONG TIME +2830-3980-0005-1048: DO YOU SUPPOSE THAT GOD FOR THE SAKE OF A FEW LUTHERAN HERETICS WOULD DISOWN HIS ENTIRE CHURCH +2830-3980-0006-1049: AGAINST THESE BOASTING FALSE APOSTLES PAUL BOLDLY DEFENDS HIS APOSTOLIC AUTHORITY AND MINISTRY +2830-3980-0007-1050: AS THE AMBASSADOR OF A GOVERNMENT IS HONORED FOR HIS OFFICE AND NOT FOR HIS PRIVATE PERSON SO THE MINISTER OF CHRIST SHOULD EXALT HIS OFFICE IN ORDER TO GAIN AUTHORITY AMONG MEN +2830-3980-0008-1051: (PAUL TAKES->POLITICS) PRIDE IN HIS MINISTRY NOT TO HIS OWN PRAISE BUT TO THE PRAISE OF GOD +2830-3980-0009-1052: PAUL (AN->AND) APOSTLE NOT OF MEN ET CETERA +2830-3980-0010-1053: EITHER HE CALLS MINISTERS THROUGH THE AGENCY OF MEN OR HE CALLS THEM DIRECTLY AS HE CALLED THE PROPHETS AND APOSTLES +2830-3980-0011-1054: PAUL DECLARES THAT THE FALSE APOSTLES WERE CALLED OR SENT NEITHER BY MEN NOR BY MAN +2830-3980-0012-1055: THE MOST THEY COULD CLAIM IS THAT THEY WERE SENT BY OTHERS +2830-3980-0013-1056: HE MENTIONS THE APOSTLES FIRST BECAUSE THEY WERE APPOINTED DIRECTLY BY GOD +2830-3980-0014-1057: THE CALL IS NOT TO BE TAKEN LIGHTLY +2830-3980-0015-1058: FOR A PERSON TO (POSSESS->POSSESSED) KNOWLEDGE IS NOT ENOUGH +2830-3980-0016-1059: IT SPOILS ONE'S BEST WORK +2830-3980-0017-1060: WHEN I WAS A YOUNG MAN I THOUGHT PAUL WAS MAKING TOO MUCH OF HIS CALL +2830-3980-0018-1061: I DID NOT THEN REALIZE THE IMPORTANCE OF THE MINISTRY +2830-3980-0019-1062: I KNEW NOTHING OF THE DOCTRINE OF FAITH BECAUSE WE WERE TAUGHT SOPHISTRY INSTEAD OF CERTAINTY AND NOBODY UNDERSTOOD SPIRITUAL BOASTING +2830-3980-0020-1063: THIS IS NO SINFUL PRIDE IT IS (HOLY->WHOLLY) PRIDE +2830-3980-0021-1064: AND GOD THE FATHER WHO RAISED HIM FROM THE DEAD +2830-3980-0022-1065: THE (CLAUSE->CLAS) SEEMS SUPERFLUOUS ON FIRST SIGHT +2830-3980-0023-1066: THESE (PERVERTERS->PERVERTIVES) OF THE RIGHTEOUSNESS OF CHRIST RESIST THE FATHER AND THE SON AND THE WORKS OF THEM BOTH +2830-3980-0024-1067: IN THIS WHOLE EPISTLE PAUL TREATS OF THE RESURRECTION OF CHRIST +2830-3980-0025-1068: BY HIS RESURRECTION CHRIST WON THE VICTORY OVER LAW SIN FLESH WORLD DEVIL DEATH HELL AND EVERY EVIL +2830-3980-0026-1069: (VERSE TWO->FIRST TOO) +2830-3980-0027-1070: AND ALL THE BRETHREN WHICH ARE WITH ME +2830-3980-0028-1071: THIS SHOULD GO FAR IN SHUTTING THE MOUTHS OF THE FALSE APOSTLES +2830-3980-0029-1072: ALTHOUGH THE BRETHREN WITH ME ARE NOT APOSTLES LIKE MYSELF YET THEY ARE ALL OF ONE MIND WITH ME THINK WRITE AND TEACH AS I DO +2830-3980-0030-1073: THEY DO NOT GO WHERE THE ENEMIES OF THE GOSPEL PREDOMINATE THEY GO WHERE THE CHRISTIANS ARE +2830-3980-0031-1074: WHY DO THEY NOT INVADE THE CATHOLIC PROVINCES AND PREACH THEIR DOCTRINE TO GODLESS PRINCES BISHOPS AND DOCTORS AS WE HAVE DONE BY THE HELP OF GOD +2830-3980-0032-1075: WE LOOK FOR THAT REWARD WHICH (EYE->I) HATH NOT SEEN NOR EAR HEARD NEITHER HATH ENTERED INTO THE HEART OF MAN +2830-3980-0033-1076: NOT ALL THE (GALATIANS->GALLATIONS) HAD BECOME PERVERTED +2830-3980-0034-1077: THESE MEANS CANNOT BE CONTAMINATED +2830-3980-0035-1078: THEY (REMAIN->REMAINED) DIVINE REGARDLESS OF MEN'S OPINION +2830-3980-0036-1079: WHEREVER THE MEANS OF GRACE ARE FOUND THERE IS THE HOLY CHURCH EVEN THOUGH ANTICHRIST REIGNS THERE +2830-3980-0037-1080: SO MUCH FOR THE TITLE OF THE EPISTLE NOW FOLLOWS THE GREETING OF THE APOSTLE VERSE THREE +2830-3980-0038-1081: GRACE BE TO YOU (AND->IN) PEACE FROM GOD THE FATHER AND FROM OUR LORD JESUS CHRIST +2830-3980-0039-1082: THE TERMS OF GRACE AND PEACE ARE COMMON TERMS WITH PAUL AND ARE NOW PRETTY WELL UNDERSTOOD +2830-3980-0040-1083: THE GREETING OF THE APOSTLE IS REFRESHING +2830-3980-0041-1084: GRACE INVOLVES THE REMISSION OF SINS PEACE AND A HAPPY CONSCIENCE +2830-3980-0042-1085: THE WORLD (BRANDS->BRINGS) THIS A PERNICIOUS DOCTRINE +2830-3980-0043-1086: EXPERIENCE PROVES THIS +2830-3980-0044-1087: HOWEVER THE GRACE AND PEACE OF GOD WILL +2830-3980-0045-1088: MEN SHOULD NOT SPECULATE ABOUT THE NATURE OF GOD +2830-3980-0046-1089: WAS IT NOT ENOUGH TO SAY FROM GOD THE FATHER +2830-3980-0047-1090: TO DO SO IS TO LOSE GOD ALTOGETHER BECAUSE GOD BECOMES INTOLERABLE WHEN WE SEEK TO MEASURE (AND TO->INTO) COMPREHEND HIS INFINITE MAJESTY +2830-3980-0048-1091: HE CAME DOWN TO EARTH LIVED AMONG MEN SUFFERED WAS CRUCIFIED AND THEN HE DIED STANDING CLEARLY BEFORE US SO THAT OUR HEARTS AND EYES MAY FASTEN UPON HIM +2830-3980-0049-1092: EMBRACE HIM AND FORGET ABOUT THE NATURE OF GOD +2830-3980-0050-1093: DID NOT CHRIST HIMSELF SAY I AM THE WAY AND THE TRUTH AND THE LIFE NO MAN COMETH UNTO THE FATHER BUT BY ME +2830-3980-0051-1094: WHEN YOU ARGUE ABOUT THE NATURE OF GOD APART FROM THE QUESTION OF JUSTIFICATION YOU MAY BE AS PROFOUND AS YOU LIKE +2830-3980-0052-1095: WE ARE TO HEAR CHRIST WHO HAS BEEN APPOINTED BY THE FATHER AS OUR DIVINE TEACHER +2830-3980-0053-1096: AT THE SAME TIME PAUL CONFIRMS OUR CREED THAT CHRIST IS VERY GOD +2830-3980-0054-1097: THAT CHRIST IS VERY GOD IS APPARENT IN THAT PAUL ASCRIBES TO HIM DIVINE POWERS EQUALLY WITH THE FATHER AS FOR INSTANCE THE POWER (TO->DOES) DISPENSE GRACE AND PEACE +2830-3980-0055-1098: TO BESTOW PEACE AND GRACE LIES IN THE PROVINCE OF GOD WHO ALONE CAN CREATE THESE BLESSINGS THE ANGELS CANNOT +2830-3980-0056-1099: OTHERWISE PAUL SHOULD HAVE WRITTEN GRACE FROM GOD THE FATHER AND PEACE FROM OUR LORD JESUS CHRIST +2830-3980-0057-1100: THE ARIANS TOOK CHRIST FOR A NOBLE AND PERFECT CREATURE SUPERIOR EVEN TO THE ANGELS BECAUSE BY HIM GOD CREATED HEAVEN AND EARTH +2830-3980-0058-1101: MOHAMMED ALSO SPEAKS HIGHLY OF CHRIST +2830-3980-0059-1102: PAUL STICKS TO HIS THEME +2830-3980-0060-1103: HE NEVER LOSES SIGHT OF THE PURPOSE OF HIS EPISTLE +2830-3980-0061-1104: NOT GOLD OR SILVER OR (PASCHAL->PASSION) LAMBS OR AN ANGEL BUT HIMSELF WHAT FOR +2830-3980-0062-1105: NOT FOR A CROWN OR A KINGDOM OR (OUR->A) GOODNESS BUT FOR OUR SINS +2830-3980-0063-1106: UNDERSCORE THESE WORDS FOR THEY ARE FULL OF COMFORT FOR SORE CONSCIENCES +2830-3980-0064-1107: HOW MAY WE OBTAIN REMISSION OF OUR SINS +2830-3980-0065-1108: PAUL ANSWERS THE MAN WHO IS NAMED JESUS CHRIST AND THE SON OF GOD GAVE HIMSELF FOR OUR SINS +2830-3980-0066-1109: SINCE CHRIST WAS GIVEN FOR OUR SINS IT STANDS TO REASON THAT THEY CANNOT BE PUT AWAY BY OUR OWN EFFORTS +2830-3980-0067-1110: THIS SENTENCE ALSO DEFINES OUR SINS AS GREAT SO GREAT IN FACT THAT THE WHOLE WORLD COULD NOT MAKE AMENDS FOR A SINGLE SIN +2830-3980-0068-1111: THE GREATNESS OF THE RANSOM CHRIST THE SON OF GOD INDICATES THIS +2830-3980-0069-1112: THE VICIOUS CHARACTER OF SIN IS BROUGHT OUT BY THE WORDS WHO GAVE HIMSELF FOR OUR SINS +2830-3980-0070-1113: BUT WE ARE CARELESS WE MAKE LIGHT OF SIN +2830-3980-0071-1114: WE THINK THAT BY SOME LITTLE WORK OR MERIT WE CAN DISMISS (SIN->IN) +2830-3980-0072-1115: THIS PASSAGE THEN BEARS OUT THE FACT THAT ALL MEN ARE SOLD UNDER SIN +2830-3980-0073-1116: THIS ATTITUDE SPRINGS FROM A FALSE CONCEPTION OF SIN THE CONCEPTION THAT SIN IS A SMALL MATTER EASILY (TAKEN->TAKING) CARE OF BY GOOD WORKS THAT WE MUST PRESENT OURSELVES (UNTO->INTO) GOD WITH (A->*) GOOD CONSCIENCE THAT WE MUST FEEL NO SIN BEFORE WE MAY FEEL THAT CHRIST WAS GIVEN FOR OUR SINS +2830-3980-0074-1117: (THIS ATTITUDE->THE SATITUDE) IS UNIVERSAL (AND->IN) PARTICULARLY DEVELOPED IN THOSE WHO CONSIDER THEMSELVES BETTER THAN OTHERS +2830-3980-0075-1118: BUT THE REAL SIGNIFICANCE AND COMFORT OF THE WORDS FOR OUR SINS IS LOST UPON THEM +2830-3980-0076-1119: ON THE OTHER HAND WE ARE NOT TO REGARD THEM AS SO TERRIBLE THAT WE MUST DESPAIR +2961-960-0000-497: HE PASSES ABRUPTLY FROM PERSONS TO IDEAS AND NUMBERS AND FROM IDEAS AND NUMBERS TO PERSONS FROM THE HEAVENS TO MAN FROM ASTRONOMY TO PHYSIOLOGY HE CONFUSES OR RATHER DOES NOT DISTINGUISH SUBJECT AND OBJECT FIRST AND FINAL CAUSES AND IS DREAMING OF GEOMETRICAL FIGURES LOST IN A FLUX OF SENSE +2961-960-0001-498: THE INFLUENCE (WITH->WHICH) THE TIMAEUS HAS EXERCISED UPON POSTERITY IS DUE PARTLY TO A MISUNDERSTANDING +2961-960-0002-499: IN THE SUPPOSED DEPTHS OF THIS DIALOGUE THE NEO (PLATONISTS->PLATINISTS) FOUND HIDDEN MEANINGS (AND->IN) CONNECTIONS WITH THE JEWISH AND CHRISTIAN SCRIPTURES AND OUT OF THEM THEY ELICITED DOCTRINES QUITE AT VARIANCE WITH THE SPIRIT OF PLATO +2961-960-0003-500: THEY WERE ABSORBED IN HIS THEOLOGY AND WERE UNDER THE DOMINION OF HIS NAME WHILE THAT WHICH WAS TRULY GREAT AND TRULY (CHARACTERISTIC->CORRECTORISTIC) IN HIM HIS EFFORT TO REALIZE AND CONNECT ABSTRACTIONS WAS NOT UNDERSTOOD BY THEM AT ALL +2961-960-0004-501: THERE IS NO DANGER OF THE MODERN (COMMENTATORS->COMMON TEACHERS) ON THE (TIMAEUS->TIMIRAS) FALLING INTO THE ABSURDITIES OF THE (NEO PLATONISTS->NEW PLATANISTS) +2961-960-0005-502: IN THE PRESENT DAY WE ARE WELL AWARE THAT AN ANCIENT PHILOSOPHER IS TO BE INTERPRETED FROM HIMSELF AND BY THE CONTEMPORARY HISTORY OF THOUGHT +2961-960-0006-503: THE FANCIES OF THE (NEO PLATONISTS->NEW PLATANISTS) ARE ONLY INTERESTING TO US BECAUSE THEY EXHIBIT A PHASE OF THE HUMAN MIND WHICH PREVAILED WIDELY IN THE FIRST CENTURIES OF THE CHRISTIAN ERA AND IS NOT WHOLLY EXTINCT IN OUR OWN DAY +2961-960-0007-504: BUT THEY HAVE NOTHING TO DO WITH THE INTERPRETATION OF PLATO AND IN SPIRIT THEY ARE OPPOSED TO HIM +2961-960-0008-505: WE DO NOT KNOW HOW PLATO WOULD HAVE ARRANGED HIS OWN (DIALOGUES->DIALECTS) OR WHETHER THE THOUGHT OF ARRANGING ANY OF THEM BESIDES THE (TWO TRILOGIES->TUTRILOGIES) WHICH HE HAS EXPRESSLY CONNECTED WAS EVER PRESENT TO HIS MIND +2961-960-0009-506: THE DIALOGUE IS PRIMARILY CONCERNED WITH THE ANIMAL CREATION INCLUDING UNDER THIS TERM THE HEAVENLY BODIES AND WITH MAN ONLY AS ONE AMONG THE ANIMALS +2961-960-0010-507: BUT HE HAS NOT AS YET (DEFINED->THE FIND) THIS INTERMEDIATE TERRITORY WHICH LIES SOMEWHERE BETWEEN MEDICINE AND MATHEMATICS AND HE WOULD HAVE FELT THAT THERE WAS AS GREAT AN IMPIETY IN RANKING THEORIES OF PHYSICS FIRST IN THE ORDER OF KNOWLEDGE AS IN PLACING THE BODY BEFORE THE SOUL +2961-960-0011-508: WITH (HERACLEITUS->HERACLITUS) HE ACKNOWLEDGES THE PERPETUAL FLUX LIKE (ANAXAGORAS->AN EXAGGERUS) HE ASSERTS THE PREDOMINANCE OF MIND ALTHOUGH ADMITTING AN ELEMENT OF NECESSITY WHICH REASON IS INCAPABLE OF SUBDUING LIKE THE (PYTHAGOREANS->PYTHAGORIANS) HE SUPPOSES THE MYSTERY OF THE WORLD TO BE CONTAINED IN NUMBER +2961-960-0012-509: MANY IF NOT ALL THE ELEMENTS OF THE (PRE SOCRATIC->PRIESTHOO CRADIC) PHILOSOPHY ARE INCLUDED IN THE (TIMAEUS->TIMIUS) +2961-960-0013-510: IT IS PROBABLE THAT THE RELATION OF THE IDEAS TO GOD OR OF GOD TO THE WORLD WAS DIFFERENTLY CONCEIVED BY HIM AT DIFFERENT TIMES OF HIS LIFE +2961-960-0014-511: THE IDEAS ALSO REMAIN BUT THEY HAVE BECOME TYPES IN NATURE FORMS OF MEN ANIMALS BIRDS FISHES +2961-960-0015-512: THE STYLE AND PLAN OF THE (TIMAEUS->TENEAS) DIFFER GREATLY FROM THAT OF ANY OTHER OF THE PLATONIC DIALOGUES +2961-960-0016-513: BUT PLATO HAS NOT THE SAME MASTERY OVER HIS INSTRUMENT WHICH HE EXHIBITS IN THE (PHAEDRUS->FEATURES) OR (SYMPOSIUM->SIMPOS HIM) +2961-960-0017-514: NOTHING CAN EXCEED THE BEAUTY OR ART OF (THE->*) INTRODUCTION IN WHICH (HE IS->HIS) USING WORDS AFTER HIS ACCUSTOMED MANNER +2961-960-0018-515: BUT IN THE REST OF THE WORK THE POWER OF LANGUAGE SEEMS TO FAIL HIM AND THE DRAMATIC FORM IS WHOLLY GIVEN UP +2961-960-0019-516: HE COULD WRITE IN ONE STYLE BUT NOT IN ANOTHER (AND->*) THE GREEK LANGUAGE HAD NOT AS YET BEEN FASHIONED BY ANY POET OR PHILOSOPHER TO DESCRIBE PHYSICAL PHENOMENA +2961-960-0020-517: AND HENCE WE FIND THE SAME SORT OF CLUMSINESS IN THE (TIMAEUS->TIMAIRS) OF PLATO WHICH CHARACTERIZES THE PHILOSOPHICAL POEM OF LUCRETIUS +2961-960-0021-518: THERE IS A WANT OF FLOW AND OFTEN A DEFECT OF RHYTHM THE MEANING IS SOMETIMES OBSCURE AND THERE IS A GREATER USE OF APPOSITION (AND->IN) MORE OF REPETITION THAN OCCURS IN PLATO'S EARLIER WRITINGS +2961-960-0022-519: PLATO HAD NOT THE COMMAND OF HIS MATERIALS WHICH WOULD HAVE ENABLED HIM TO PRODUCE A PERFECT WORK OF ART +2961-961-0000-520: SOCRATES BEGINS (THE TIMAEUS->TO TEARS) WITH A SUMMARY OF THE REPUBLIC +2961-961-0001-521: AND NOW HE DESIRES TO SEE THE IDEAL STATE SET IN MOTION HE WOULD LIKE TO KNOW HOW SHE BEHAVED IN SOME GREAT STRUGGLE +2961-961-0002-522: AND THEREFORE TO YOU I TURN (TIMAEUS->TO ME AS) CITIZEN OF (LOCRIS->LOCHRIS) WHO ARE AT ONCE A PHILOSOPHER (AND->IN) A STATESMAN AND TO YOU (CRITIAS->CRITIUS) WHOM ALL ATHENIANS KNOW TO BE SIMILARLY ACCOMPLISHED AND TO HERMOCRATES (WHO IS->WHOSE) ALSO FITTED BY NATURE AND EDUCATION TO SHARE IN OUR DISCOURSE +2961-961-0003-523: I WILL IF (TIMAEUS APPROVES->TO ME AS A PROOFS) I APPROVE +2961-961-0004-524: LISTEN THEN SOCRATES TO A TALE OF (SOLON'S->SILENCE) WHO BEING THE FRIEND OF (DROPIDAS MY->TROPIDAS BY) GREAT GRANDFATHER TOLD IT TO MY GRANDFATHER (CRITIAS->CRITIUS) AND HE TOLD ME +2961-961-0005-525: SOME POEMS OF (SOLON->SOLEMN) WERE RECITED BY THE BOYS +2961-961-0006-526: AND WHAT WAS THE SUBJECT OF THE POEM SAID THE PERSON WHO MADE THE REMARK +2961-961-0007-527: THE SUBJECT WAS A VERY NOBLE ONE HE DESCRIBED THE MOST FAMOUS ACTION IN WHICH THE ATHENIAN PEOPLE WERE EVER ENGAGED +2961-961-0008-528: BUT THE MEMORY OF THEIR EXPLOITS (HAS->HAD) PASSED AWAY OWING TO THE LAPSE OF TIME AND THE EXTINCTION OF THE ACTORS +2961-961-0009-529: TELL US SAID THE OTHER THE WHOLE STORY AND WHERE SOLON HEARD THE STORY +2961-961-0010-530: BUT IN EGYPT THE TRADITIONS OF OUR OWN AND OTHER LANDS ARE BY US REGISTERED FOR EVER IN OUR TEMPLES +2961-961-0011-531: THE GENEALOGIES WHICH YOU HAVE RECITED TO US OUT OF YOUR OWN (ANNALS SOLON->ANNAL SOLEMN) ARE A MERE CHILDREN'S STORY +2961-961-0012-532: FOR IN THE TIMES BEFORE THE GREAT FLOOD ATHENS WAS THE GREATEST AND BEST OF CITIES AND DID THE NOBLEST DEEDS AND HAD THE BEST CONSTITUTION OF ANY UNDER THE FACE OF HEAVEN +2961-961-0013-533: (SOLON->SOLEMN) MARVELLED AND DESIRED TO BE INFORMED OF THE PARTICULARS +2961-961-0014-534: NINE THOUSAND YEARS HAVE ELAPSED SINCE SHE (FOUNDED->FOUND IT) YOURS AND EIGHT THOUSAND SINCE (SHE FOUNDED->YOU FOUND IT) OURS AS OUR ANNALS RECORD +2961-961-0015-535: MANY LAWS EXIST AMONG US WHICH ARE THE COUNTERPART OF YOURS AS THEY WERE IN THE OLDEN TIME +2961-961-0016-536: I WILL BRIEFLY DESCRIBE (THEM->HIM) TO YOU AND YOU SHALL READ THE ACCOUNT OF THEM AT YOUR LEISURE IN THE SACRED REGISTERS +2961-961-0017-537: OBSERVE AGAIN WHAT CARE THE LAW TOOK IN THE PURSUIT OF WISDOM SEARCHING OUT THE DEEP THINGS OF THE WORLD AND APPLYING THEM TO THE USE OF (MAN->MEN) +2961-961-0018-538: THE MOST FAMOUS OF THEM ALL WAS THE OVERTHROW OF THE ISLAND OF ATLANTIS +2961-961-0019-539: FOR AT THE PERIL OF HER OWN EXISTENCE AND WHEN THE (OTHER->OTTER) HELLENES HAD DESERTED HER SHE REPELLED THE INVADER AND OF HER OWN ACCORD GAVE LIBERTY TO ALL THE NATIONS WITHIN THE PILLARS +2961-961-0020-540: THIS IS THE EXPLANATION OF THE SHALLOWS WHICH ARE FOUND IN THAT PART OF THE ATLANTIC OCEAN +2961-961-0021-541: BUT I WOULD NOT SPEAK AT THE TIME BECAUSE I WANTED TO REFRESH MY MEMORY +2961-961-0022-542: THEN (NOW->THOU) LET ME EXPLAIN TO YOU THE ORDER OF OUR ENTERTAINMENT FIRST TIMAEUS WHO IS A NATURAL PHILOSOPHER WILL SPEAK OF THE ORIGIN OF THE WORLD GOING DOWN TO THE CREATION OF (MAN->MEN) AND THEN I SHALL RECEIVE THE MEN WHOM HE HAS CREATED AND SOME OF WHOM WILL HAVE BEEN EDUCATED BY YOU AND INTRODUCE THEM TO YOU AS THE LOST ATHENIAN CITIZENS OF WHOM THE EGYPTIAN (RECORD->RECORDS) SPOKE +3570-5694-0000-2433: (BUT ALREADY->BETTER) AT A POINT IN ECONOMIC EVOLUTION FAR (ANTEDATING->ANTETING) THE EMERGENCE OF THE LADY (SPECIALISED->SPECIALIZED) CONSUMPTION OF GOODS AS AN EVIDENCE OF PECUNIARY STRENGTH HAD BEGUN TO WORK OUT IN A MORE OR LESS (ELABORATE->CELEBRATE) SYSTEM +3570-5694-0001-2434: THE UTILITY OF CONSUMPTION AS AN EVIDENCE OF WEALTH IS TO BE CLASSED AS A DERIVATIVE GROWTH +3570-5694-0002-2435: SUCH CONSUMPTION AS FALLS (TO->THROUGH) THE WOMEN IS MERELY INCIDENTAL TO THEIR WORK IT IS A MEANS TO THEIR CONTINUED (LABOUR->LABOR) AND NOT (A->TO) CONSUMPTION DIRECTED TO THEIR OWN COMFORT AND (FULNESS->FULLNESS) OF LIFE +3570-5694-0003-2436: WITH A FURTHER ADVANCE (IN->AND) CULTURE THIS (TABU->TABOU) MAY (CHANGE->CHANGED) INTO SIMPLE CUSTOM OF A MORE OR LESS RIGOROUS CHARACTER BUT WHATEVER BE THE THEORETICAL BASIS OF THE DISTINCTION WHICH IS MAINTAINED WHETHER IT BE (*->AT) A (TABU->BOOT) OR A LARGER CONVENTIONALITY THE FEATURES OF THE CONVENTIONAL SCHEME OF CONSUMPTION DO NOT CHANGE EASILY +3570-5694-0004-2437: IN THE NATURE OF THINGS LUXURIES AND THE COMFORTS OF LIFE BELONG TO THE LEISURE CLASS +3570-5694-0005-2438: UNDER THE (TABU->TABOO) CERTAIN VICTUALS AND MORE PARTICULARLY CERTAIN BEVERAGES ARE STRICTLY RESERVED FOR THE USE OF THE SUPERIOR CLASS +3570-5694-0006-2439: DRUNKENNESS AND THE OTHER PATHOLOGICAL CONSEQUENCES OF THE FREE USE OF STIMULANTS THEREFORE TEND IN THEIR TURN TO BECOME HONORIFIC AS BEING A MARK AT THE SECOND REMOVE OF THE SUPERIOR STATUS OF THOSE WHO ARE ABLE TO AFFORD THE INDULGENCE +3570-5694-0007-2440: IT HAS EVEN HAPPENED THAT THE NAME FOR CERTAIN DISEASED CONDITIONS OF THE BODY ARISING FROM SUCH AN ORIGIN HAS PASSED INTO EVERYDAY SPEECH AS A SYNONYM FOR NOBLE OR GENTLE +3570-5694-0008-2441: THE CONSUMPTION OF LUXURIES IN THE TRUE SENSE IS A CONSUMPTION DIRECTED TO THE COMFORT OF THE CONSUMER HIMSELF AND IS THEREFORE A MARK OF THE MASTER +3570-5694-0009-2442: WITH MANY QUALIFICATIONS WITH MORE QUALIFICATIONS AS THE PATRIARCHAL TRADITION HAS GRADUALLY WEAKENED THE GENERAL RULE IS FELT TO BE RIGHT AND BINDING THAT WOMEN SHOULD CONSUME ONLY FOR THE BENEFIT OF THEIR MASTERS +3570-5694-0010-2443: THE OBJECTION OF COURSE PRESENTS ITSELF THAT EXPENDITURE ON WOMEN'S DRESS AND HOUSEHOLD PARAPHERNALIA IS AN OBVIOUS EXCEPTION TO THIS RULE BUT IT WILL APPEAR IN THE SEQUEL THAT THIS EXCEPTION IS MUCH MORE OBVIOUS THAN SUBSTANTIAL +3570-5694-0011-2444: THE CUSTOM OF FESTIVE GATHERINGS PROBABLY ORIGINATED IN MOTIVES OF CONVIVIALITY AND RELIGION THESE MOTIVES ARE ALSO PRESENT IN THE LATER DEVELOPMENT (BUT->THAT) THEY DO NOT CONTINUE TO BE THE SOLE MOTIVES +3570-5694-0012-2445: THERE IS A MORE OR LESS ELABORATE SYSTEM OF RANK AND (GRADES->GRATES) +3570-5694-0013-2446: THIS (DIFFERENTIATION->DIFFUREATION) IS FURTHERED BY THE INHERITANCE OF WEALTH AND THE CONSEQUENT INHERITANCE OF GENTILITY +3570-5694-0014-2447: MANY OF THESE (AFFILIATED->ARE FILLIOTTED) GENTLEMEN OF LEISURE ARE AT THE SAME TIME (LESSER MEN->LESS AMEN) OF SUBSTANCE IN THEIR OWN RIGHT SO THAT SOME OF THEM ARE SCARCELY AT ALL OTHERS ONLY PARTIALLY TO BE RATED AS VICARIOUS CONSUMERS +3570-5694-0015-2448: SO MANY OF THEM HOWEVER AS MAKE UP THE RETAINER AND HANGERS ON OF THE PATRON MAY BE CLASSED AS VICARIOUS CONSUMER WITHOUT QUALIFICATION +3570-5694-0016-2449: MANY OF THESE AGAIN AND ALSO MANY OF THE OTHER ARISTOCRACY OF LESS DEGREE HAVE IN TURN ATTACHED TO THEIR PERSONS A MORE OR LESS COMPREHENSIVE GROUP OF VICARIOUS CONSUMER IN THE PERSONS OF THEIR WIVES AND CHILDREN THEIR SERVANTS RETAINERS ET CETERA +3570-5694-0017-2450: THE WEARING OF UNIFORMS (OR->ARE) LIVERIES IMPLIES A CONSIDERABLE DEGREE OF DEPENDENCE AND MAY EVEN BE SAID TO BE A MARK OF SERVITUDE REAL OR OSTENSIBLE +3570-5694-0018-2451: THE WEARERS OF UNIFORMS AND LIVERIES MAY BE ROUGHLY DIVIDED INTO TWO CLASSES THE FREE AND THE SERVILE OR THE NOBLE AND THE IGNOBLE +3570-5694-0019-2452: BUT THE GENERAL DISTINCTION IS NOT ON THAT ACCOUNT TO BE OVERLOOKED +3570-5694-0020-2453: SO THOSE (OFFICES->OFFICERS) WHICH ARE BY RIGHT THE PROPER EMPLOYMENT OF THE LEISURE CLASS ARE NOBLE SUCH AS GOVERNMENT FIGHTING HUNTING THE CARE OF ARMS AND (ACCOUTREMENTS->ACCUTMENTS) AND THE LIKE IN SHORT THOSE WHICH MAY BE CLASSED AS OSTENSIBLY PREDATORY EMPLOYMENTS +3570-5694-0021-2454: WHENEVER AS IN THESE CASES THE MENIAL SERVICE IN QUESTION HAS TO DO DIRECTLY WITH (THE->A) PRIMARY LEISURE EMPLOYMENTS OF FIGHTING AND HUNTING IT EASILY ACQUIRES A REFLECTED HONORIFIC CHARACTER +3570-5694-0022-2455: THE LIVERY BECOMES OBNOXIOUS TO NEARLY ALL WHO ARE REQUIRED TO WEAR IT +3570-5695-0000-2456: IN A GENERAL WAY THOUGH NOT WHOLLY NOR CONSISTENTLY THESE TWO GROUPS COINCIDE +3570-5695-0001-2457: THE DEPENDENT WHO WAS FIRST DELEGATED FOR THESE DUTIES WAS THE WIFE OR THE CHIEF WIFE AND AS WOULD BE EXPECTED IN (THE->A) LATER DEVELOPMENT OF THE INSTITUTION WHEN THE NUMBER OF PERSONS BY WHOM THESE DUTIES ARE CUSTOMARILY PERFORMED GRADUALLY NARROWS THE WIFE REMAINS THE LAST +3570-5695-0002-2458: BUT AS WE DESCEND THE SOCIAL SCALE THE POINT IS PRESENTLY REACHED WHERE THE DUTIES OF (VICARIOUS->VICHAIRLESS) LEISURE AND CONSUMPTION DEVOLVE UPON THE WIFE ALONE +3570-5695-0003-2459: IN THE COMMUNITIES OF THE WESTERN CULTURE THIS POINT IS AT PRESENT FOUND AMONG THE LOWER MIDDLE CLASS +3570-5695-0004-2460: IF BEAUTY (OR->*) COMFORT IS ACHIEVED AND IT IS A MORE OR LESS FORTUITOUS CIRCUMSTANCE IF THEY ARE THEY MUST BE ACHIEVED BY MEANS AND METHODS THAT COMMEND THEMSELVES TO THE GREAT ECONOMIC LAW OF WASTED EFFORT +3570-5695-0005-2461: THE MAN OF THE HOUSEHOLD ALSO CAN DO SOMETHING IN THIS DIRECTION AND INDEED HE COMMONLY DOES BUT WITH A STILL LOWER DESCENT INTO THE LEVELS OF INDIGENCE ALONG THE MARGIN OF THE SLUMS THE MAN AND PRESENTLY ALSO THE CHILDREN VIRTUALLY CEASE TO CONSUME VALUABLE GOODS FOR APPEARANCES AND THE WOMAN REMAINS VIRTUALLY THE SOLE EXPONENT OF THE HOUSEHOLD'S PECUNIARY DECENCY +3570-5695-0006-2462: VERY MUCH OF SQUALOR AND DISCOMFORT WILL BE ENDURED BEFORE THE LAST TRINKET OR THE LAST (PRETENSE->PRETENCE) OF PECUNIARY (DECENCY IS->DECENCIES) PUT AWAY +3570-5695-0007-2463: THERE IS NO CLASS (AND->IN) NO COUNTRY THAT HAS YIELDED SO ABJECTLY BEFORE THE PRESSURE OF PHYSICAL WANT AS TO DENY THEMSELVES ALL GRATIFICATION OF THIS HIGHER OR SPIRITUAL NEED +3570-5695-0008-2464: THE QUESTION IS WHICH OF THE TWO METHODS WILL MOST EFFECTIVELY REACH THE PERSONS WHOSE CONVICTIONS IT IS DESIRED TO (AFFECT->EFFECT) +3570-5695-0009-2465: EACH WILL THEREFORE SERVE ABOUT EQUALLY WELL DURING THE EARLIER STAGES OF SOCIAL GROWTH +3570-5695-0010-2466: THE MODERN ORGANIZATION OF INDUSTRY WORKS IN THE SAME DIRECTION ALSO BY ANOTHER LINE +3570-5695-0011-2467: IT IS EVIDENT THEREFORE THAT THE PRESENT TREND OF THE DEVELOPMENT IS IN THE DIRECTION OF HEIGHTENING THE UTILITY OF CONSPICUOUS CONSUMPTION AS COMPARED WITH LEISURE +3570-5695-0012-2468: IT IS ALSO NOTICEABLE THAT THE SERVICEABILITY OF CONSUMPTION AS A MEANS OF REPUTE AS WELL AS THE INSISTENCE ON IT AS AN ELEMENT OF DECENCY IS AT ITS BEST IN THOSE PORTIONS OF THE COMMUNITY WHERE THE HUMAN (CONTACT->CONDUCT) OF THE INDIVIDUAL IS WIDEST AND THE MOBILITY OF THE POPULATION IS GREATEST +3570-5695-0013-2469: CONSUMPTION BECOMES A LARGER ELEMENT IN THE STANDARD OF LIVING IN THE CITY THAN IN THE COUNTRY +3570-5695-0014-2470: AMONG THE COUNTRY POPULATION ITS (PLACE IS->PLACES) TO SOME EXTENT TAKEN BY SAVINGS AND HOME COMFORTS KNOWN THROUGH THE MEDIUM OF (NEIGHBORHOOD->NEIGHBOURHOOD) GOSSIP SUFFICIENTLY TO SERVE THE LIKE GENERAL PURPOSE OF PECUNIARY REPUTE +3570-5695-0015-2471: THE RESULT IS A GREAT MOBILITY OF THE LABOR EMPLOYED IN PRINTING PERHAPS GREATER THAN IN ANY OTHER EQUALLY WELL DEFINED AND CONSIDERABLE BODY OF WORKMEN +3570-5696-0000-2472: UNDER THE SIMPLE TEST OF EFFECTIVENESS FOR ADVERTISING WE SHOULD EXPECT TO FIND LEISURE AND THE CONSPICUOUS CONSUMPTION OF GOODS DIVIDING THE FIELD OF PECUNIARY EMULATION PRETTY EVENLY BETWEEN THEM AT THE OUTSET +3570-5696-0001-2473: BUT THE ACTUAL COURSE OF DEVELOPMENT HAS BEEN SOMEWHAT DIFFERENT FROM THIS IDEAL SCHEME LEISURE HELD THE FIRST PLACE AT THE START AND CAME TO (HOLD A->ALL THE) RANK (VERY MUCH->VERIMENT) ABOVE WASTEFUL CONSUMPTION OF GOODS BOTH AS A DIRECT EXPONENT OF WEALTH AND AS AN ELEMENT IN THE STANDARD OF DECENCY DURING THE (QUASI->COURSE I) PEACEABLE CULTURE +3570-5696-0002-2474: OTHER CIRCUMSTANCES PERMITTING THAT INSTINCT DISPOSES MEN TO LOOK WITH FAVOR UPON PRODUCTIVE EFFICIENCY AND ON WHATEVER IS OF HUMAN USE +3570-5696-0003-2475: A RECONCILIATION BETWEEN THE TWO CONFLICTING REQUIREMENTS IS (EFFECTED->AFFECTED) BY (A->*) RESORT TO MAKE BELIEVE (MANY AND->MEN IN) INTRICATE POLITE OBSERVANCES AND SOCIAL DUTIES OF A CEREMONIAL NATURE ARE DEVELOPED MANY ORGANIZATIONS ARE FOUNDED WITH SOME SPECIOUS OBJECT OF AMELIORATION EMBODIED IN THEIR OFFICIAL STYLE AND TITLE THERE IS MUCH COMING AND GOING AND A DEAL OF TALK TO THE END THAT THE (TALKERS MAY->TALK IS) NOT HAVE OCCASION TO REFLECT ON WHAT IS THE EFFECTUAL ECONOMIC VALUE OF THEIR TRAFFIC +3570-5696-0004-2476: THE (SALIENT->SAILORED) FEATURES OF THIS DEVELOPMENT OF DOMESTIC SERVICE HAVE ALREADY BEEN INDICATED +3570-5696-0005-2477: THROUGHOUT THE ENTIRE EVOLUTION OF CONSPICUOUS EXPENDITURE WHETHER OF GOODS OR OF SERVICES OR HUMAN LIFE RUNS THE OBVIOUS IMPLICATION THAT IN ORDER TO EFFECTUALLY MEND THE CONSUMER'S GOOD FAME IT MUST BE AN EXPENDITURE OF SUPERFLUITIES +3570-5696-0006-2478: AS USED IN THE SPEECH OF (EVERYDAY->EVERY DAY) LIFE THE WORD CARRIES AN UNDERTONE OF DEPRECATION +3570-5696-0007-2479: THE USE OF THE WORD WASTE AS A TECHNICAL TERM THEREFORE IMPLIES NO DEPRECATION OF THE MOTIVES OR OF THE ENDS SOUGHT BY THE CONSUMER UNDER THIS CANON OF CONSPICUOUS WASTE +3570-5696-0008-2480: BUT IT IS (ON OTHER->ANOTHER) GROUNDS WORTH NOTING THAT THE TERM WASTE IN THE LANGUAGE OF EVERYDAY LIFE IMPLIES DEPRECATION OF WHAT IS CHARACTERIZED AS WASTEFUL +3570-5696-0009-2481: IN STRICT ACCURACY NOTHING SHOULD BE INCLUDED UNDER THE HEAD OF CONSPICUOUS WASTE BUT SUCH EXPENDITURE AS IS INCURRED ON THE GROUND OF AN INVIDIOUS PECUNIARY COMPARISON +3570-5696-0010-2482: AN ARTICLE MAY BE USEFUL AND WASTEFUL BOTH AND ITS UTILITY TO THE CONSUMER MAY BE MADE UP OF USE AND WASTE IN THE MOST VARYING PROPORTIONS +3575-170457-0000-369: AND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP I LAID MY HEAD SHE FEARED FOR TIME I WAS NOT MADE BUT FOR ETERNITY +3575-170457-0001-370: WHY ARE WE TO BE DENIED EACH OTHER'S SOCIETY +3575-170457-0002-371: WHY ARE WE TO BE DIVIDED +3575-170457-0003-372: SURELY IT MUST BE BECAUSE WE ARE IN DANGER OF LOVING EACH OTHER TOO WELL OF LOSING SIGHT OF THE CREATOR (IN->AND) IDOLATRY OF THE CREATURE +3575-170457-0004-373: WE USED TO DISPUTE ABOUT POLITICS AND RELIGION +3575-170457-0005-374: SHE (A TORY AND->ATTORIAN) CLERGYMAN'S DAUGHTER WAS ALWAYS IN A MINORITY OF ONE IN OUR HOUSE OF VIOLENT (DISSENT->DESCENT) AND RADICALISM +3575-170457-0006-375: HER FEEBLE HEALTH GAVE HER HER YIELDING MANNER FOR SHE COULD NEVER OPPOSE ANY ONE WITHOUT GATHERING UP ALL HER STRENGTH FOR THE STRUGGLE +3575-170457-0007-376: HE SPOKE FRENCH PERFECTLY I HAVE BEEN TOLD WHEN NEED WAS BUT DELIGHTED USUALLY IN TALKING THE BROADEST YORKSHIRE +3575-170457-0008-377: AND SO LIFE AND DEATH HAVE DISPERSED THE CIRCLE OF VIOLENT RADICALS AND DISSENTERS INTO WHICH TWENTY YEARS AGO THE LITTLE QUIET RESOLUTE CLERGYMAN'S DAUGHTER WAS RECEIVED AND BY WHOM SHE WAS TRULY LOVED AND HONOURED +3575-170457-0009-378: JANUARY AND FEBRUARY OF EIGHTEEN THIRTY SEVEN HAD PASSED AWAY AND STILL THERE WAS NO REPLY FROM (SOUTHEY->SALVI) +3575-170457-0010-379: I AM NOT DEPRECIATING IT WHEN I SAY THAT IN THESE TIMES IT IS NOT RARE +3575-170457-0011-380: BUT IT IS NOT WITH A VIEW TO DISTINCTION THAT YOU SHOULD CULTIVATE THIS TALENT IF YOU CONSULT YOUR OWN HAPPINESS +3575-170457-0012-381: YOU WILL SAY THAT A WOMAN HAS NO NEED OF SUCH A CAUTION THERE CAN BE NO PERIL IN IT FOR HER +3575-170457-0013-382: THE MORE SHE IS ENGAGED IN HER PROPER DUTIES THE LESS LEISURE WILL SHE HAVE FOR IT EVEN AS AN ACCOMPLISHMENT AND A RECREATION +3575-170457-0014-383: TO THOSE DUTIES YOU HAVE NOT YET BEEN CALLED AND WHEN YOU ARE YOU WILL BE LESS EAGER FOR CELEBRITY +3575-170457-0015-384: BUT DO NOT SUPPOSE THAT I DISPARAGE THE GIFT WHICH YOU POSSESS NOR THAT I WOULD DISCOURAGE YOU FROM EXERCISING IT I ONLY EXHORT YOU SO TO THINK OF IT AND SO TO USE IT AS TO RENDER IT CONDUCIVE TO YOUR OWN PERMANENT GOOD +3575-170457-0016-385: FAREWELL MADAM +3575-170457-0017-386: THOUGH I MAY BE BUT AN UNGRACIOUS ADVISER YOU WILL ALLOW ME THEREFORE TO SUBSCRIBE MYSELF WITH THE BEST WISHES FOR YOUR HAPPINESS HERE AND HEREAFTER YOUR TRUE FRIEND ROBERT (SOUTHEY->SELVEY) +3575-170457-0018-387: SIR MARCH SIXTEENTH +3575-170457-0019-388: I (HAD->HAVE) NOT VENTURED TO HOPE FOR SUCH A REPLY SO (CONSIDERATE->CONSIDER IT) IN ITS TONE SO NOBLE IN ITS SPIRIT +3575-170457-0020-389: I KNOW THE FIRST LETTER I WROTE TO YOU WAS ALL SENSELESS TRASH FROM BEGINNING TO END BUT I AM NOT ALTOGETHER THE IDLE DREAMING BEING IT WOULD SEEM TO DENOTE +3575-170457-0021-390: I THOUGHT IT THEREFORE MY DUTY WHEN I LEFT SCHOOL TO BECOME A GOVERNESS +3575-170457-0022-391: IN THE EVENINGS I CONFESS I DO THINK BUT I NEVER TROUBLE ANY ONE ELSE WITH MY THOUGHTS +3575-170457-0023-392: I CAREFULLY AVOID ANY APPEARANCE OF PREOCCUPATION AND ECCENTRICITY WHICH MIGHT LEAD THOSE I LIVE AMONGST TO SUSPECT THE NATURE OF MY PURSUITS +3575-170457-0024-393: I DON'T ALWAYS SUCCEED FOR SOMETIMES WHEN I'M TEACHING OR SEWING I WOULD RATHER BE READING (OR->A) WRITING BUT I TRY TO DENY MYSELF AND MY FATHER'S APPROBATION AMPLY REWARDED ME FOR THE PRIVATION +3575-170457-0025-394: AGAIN I THANK YOU THIS INCIDENT I SUPPOSE WILL BE RENEWED NO MORE IF I LIVE TO BE AN OLD WOMAN I SHALL REMEMBER IT THIRTY YEARS HENCE AS A BRIGHT DREAM +3575-170457-0026-395: P S PRAY SIR EXCUSE ME FOR WRITING TO YOU A SECOND TIME I COULD NOT HELP WRITING PARTLY TO TELL YOU HOW THANKFUL I AM FOR YOUR KINDNESS AND PARTLY TO LET YOU KNOW THAT YOUR ADVICE SHALL NOT BE WASTED HOWEVER SORROWFULLY AND RELUCTANTLY IT MAY BE AT FIRST FOLLOWED (C B->*) +3575-170457-0027-396: I CANNOT DENY MYSELF THE GRATIFICATION OF INSERTING (SOUTHEY'S->SO THESE) REPLY +3575-170457-0028-397: (KESWICK->KEZWICK) MARCH TWENTY SECOND EIGHTEEN THIRTY SEVEN DEAR MADAM +3575-170457-0029-398: YOUR LETTER HAS GIVEN ME GREAT PLEASURE AND I SHOULD NOT FORGIVE MYSELF IF I DID NOT TELL YOU SO +3575-170457-0030-399: OF THIS SECOND LETTER ALSO SHE SPOKE AND TOLD ME THAT IT CONTAINED AN INVITATION FOR HER TO GO AND SEE THE POET IF EVER SHE VISITED THE LAKES +3575-170457-0031-400: ON AUGUST TWENTY SEVENTH EIGHTEEN THIRTY SEVEN SHE WRITES +3575-170457-0032-401: COME COME (I AM->I'M) GETTING REALLY TIRED OF YOUR ABSENCE +3575-170457-0033-402: SATURDAY AFTER SATURDAY COMES (ROUND->AROUND) AND I CAN HAVE NO HOPE OF HEARING YOUR KNOCK AT THE DOOR AND THEN BEING TOLD THAT MISS (E->EA) IS COME OH DEAR +3575-170457-0034-403: IN THIS MONOTONOUS LIFE OF (MINE->MIND) THAT WAS A PLEASANT EVENT +3575-170457-0035-404: I WISH (IT WOULD->YOU WERE) RECUR AGAIN BUT IT WILL TAKE TWO OR THREE INTERVIEWS BEFORE THE STIFFNESS THE ESTRANGEMENT OF THIS LONG SEPARATION WILL WEAR AWAY +3575-170457-0036-405: MY EYES (FILL WITH->FILLED) TEARS WHEN I CONTRAST THE BLISS OF SUCH A STATE BRIGHTENED BY HOPES OF THE FUTURE WITH THE MELANCHOLY STATE I NOW LIVE IN UNCERTAIN THAT I EVER FELT TRUE CONTRITION (WANDERING->WONDERING) IN THOUGHT (AND DEED->INDEED) LONGING FOR HOLINESS WHICH I SHALL NEVER NEVER OBTAIN (SMITTEN AT->SMIT IN THAT) TIMES TO THE HEART WITH THE CONVICTION THAT GHASTLY CALVINISTIC DOCTRINES ARE TRUE DARKENED (IN->AND) SHORT BY THE VERY SHADOWS OF SPIRITUAL DEATH +3575-170457-0037-406: IF CHRISTIAN PERFECTION BE NECESSARY TO SALVATION I SHALL NEVER BE SAVED MY HEART IS A VERY (HOTBED->HOT BED) FOR SINFUL THOUGHTS AND WHEN I DECIDE ON AN ACTION I SCARCELY REMEMBER TO LOOK TO MY REDEEMER FOR (*->A) DIRECTION +3575-170457-0038-407: AND MEANTIME I KNOW THE GREATNESS OF JEHOVAH I ACKNOWLEDGE THE PERFECTION OF HIS WORD I ADORE THE PURITY OF THE (CHRISTIAN->CHISH) FAITH MY THEORY IS RIGHT MY PRACTICE HORRIBLY WRONG +3575-170457-0039-408: THE CHRISTMAS HOLIDAYS CAME AND SHE AND ANNE RETURNED TO THE PARSONAGE AND TO THAT HAPPY HOME CIRCLE IN WHICH ALONE THEIR NATURES EXPANDED AMONGST ALL OTHER PEOPLE THEY SHRIVELLED UP MORE OR LESS +3575-170457-0040-409: INDEED THERE WERE ONLY ONE OR TWO STRANGERS WHO COULD BE ADMITTED AMONG THE SISTERS WITHOUT PRODUCING THE SAME RESULT +3575-170457-0041-410: SHE WAS GONE OUT INTO THE VILLAGE ON SOME ERRAND WHEN AS SHE WAS DESCENDING THE STEEP STREET HER FOOT SLIPPED ON THE ICE AND SHE FELL (IT->HE) WAS DARK AND NO ONE SAW HER MISCHANCE TILL AFTER A TIME HER GROANS ATTRACTED THE ATTENTION OF A PASSER BY +3575-170457-0042-411: UNFORTUNATELY THE FRACTURE COULD NOT BE SET TILL SIX O'CLOCK THE NEXT MORNING AS NO SURGEON WAS TO BE HAD BEFORE THAT TIME AND SHE NOW LIES AT (OUR->HER) HOUSE IN A VERY DOUBTFUL AND DANGEROUS STATE +3575-170457-0043-412: HOWEVER REMEMBERING WHAT YOU TOLD ME NAMELY THAT YOU HAD COMMENDED THE MATTER TO A HIGHER DECISION THAN OURS AND THAT YOU WERE RESOLVED TO SUBMIT WITH RESIGNATION TO THAT DECISION WHATEVER IT MIGHT BE I HOLD IT MY DUTY TO YIELD ALSO AND TO BE SILENT (IT->AND) MAY BE ALL FOR THE BEST +3575-170457-0044-413: AFTER THIS DISAPPOINTMENT I NEVER DARE RECKON WITH CERTAINTY ON THE ENJOYMENT OF A PLEASURE AGAIN IT SEEMS AS IF SOME FATALITY STOOD BETWEEN YOU AND ME +3575-170457-0045-414: I AM NOT GOOD ENOUGH FOR YOU AND YOU MUST BE KEPT FROM THE CONTAMINATION OF (TOO->TWO) INTIMATE SOCIETY +3575-170457-0046-415: A GOOD (NEIGHBOUR->NEIGHBOR) OF THE BRONTES A CLEVER INTELLIGENT YORKSHIRE WOMAN WHO KEEPS A (DRUGGIST'S->DRUGGIST) SHOP IN HAWORTH (AND->*) FROM HER OCCUPATION HER EXPERIENCE AND EXCELLENT SENSE HOLDS THE POSITION OF VILLAGE (DOCTRESS->DOCTRIS) AND NURSE AND AS SUCH HAS BEEN A FRIEND IN MANY A TIME OF TRIAL AND SICKNESS AND DEATH IN THE HOUSEHOLDS ROUND TOLD ME A CHARACTERISTIC LITTLE INCIDENT CONNECTED WITH TABBY'S FRACTURED LEG +3575-170457-0047-416: TABBY HAD LIVED WITH THEM FOR TEN OR TWELVE YEARS AND WAS AS CHARLOTTE EXPRESSED IT ONE OF THE FAMILY +3575-170457-0048-417: HE REFUSED AT FIRST TO LISTEN TO THE CAREFUL ADVICE IT WAS REPUGNANT TO HIS LIBERAL NATURE +3575-170457-0049-418: THIS DECISION WAS COMMUNICATED TO THE GIRLS +3575-170457-0050-419: TABBY HAD TENDED THEM IN THEIR CHILDHOOD THEY AND NONE OTHER SHOULD TEND HER IN HER INFIRMITY (AND->IN) AGE +3575-170457-0051-420: AT TEA TIME THEY WERE SAD AND SILENT AND THE MEAL WENT AWAY UNTOUCHED BY ANY OF THE THREE +3575-170457-0052-421: SHE HAD ANOTHER WEIGHT ON HER MIND THIS CHRISTMAS +3575-170457-0053-422: BUT ANNE HAD BEGUN TO SUFFER JUST BEFORE THE HOLIDAYS AND CHARLOTTE WATCHED OVER HER YOUNGER SISTERS WITH (THE->A) JEALOUS VIGILANCE OF SOME WILD CREATURE THAT CHANGES HER VERY NATURE IF DANGER THREATENS HER YOUNG +3575-170457-0054-423: STUNG BY ANXIETY FOR THIS LITTLE SISTER SHE UPBRAIDED MISS W FOR HER FANCIED INDIFFERENCE TO ANNE'S STATE OF HEALTH +3575-170457-0055-424: STILL HER HEART HAD RECEIVED A SHOCK IN THE PERCEPTION OF ANNE'S DELICACY AND ALL THESE HOLIDAYS SHE WATCHED OVER HER WITH THE LONGING FOND ANXIETY WHICH IS SO FULL OF SUDDEN PANGS OF FEAR +3575-170457-0056-425: I DOUBT WHETHER BRANWELL WAS MAINTAINING HIMSELF AT THIS TIME +3729-6852-0000-1660: TO CELEBRATE THE ARRIVAL OF HER SON (SILVIA->SYLVIA) GAVE A SPLENDID SUPPER TO WHICH SHE HAD INVITED ALL HER RELATIVES AND IT WAS A GOOD OPPORTUNITY FOR ME TO MAKE THEIR ACQUAINTANCE +3729-6852-0001-1661: WITHOUT SAYING IT POSITIVELY SHE MADE ME UNDERSTAND THAT BEING HERSELF AN ILLUSTRIOUS MEMBER OF THE REPUBLIC OF LETTERS SHE WAS WELL AWARE THAT SHE WAS SPEAKING TO AN INSECT +3729-6852-0002-1662: IN ORDER TO PLEASE HER I SPOKE TO HER OF THE (ABBE CONTI->ABBEY KANTI) AND I HAD OCCASION TO QUOTE TWO LINES OF THAT PROFOUND WRITER +3729-6852-0003-1663: (MADAM->MADAME) CORRECTED ME WITH A PATRONIZING AIR FOR MY PRONUNCIATION OF THE WORD (SCEVRA->SCAVRA) WHICH MEANS DIVIDED SAYING THAT IT OUGHT TO BE PRONOUNCED (SCEURA->SKURA) AND SHE ADDED THAT I OUGHT TO BE VERY GLAD TO HAVE LEARNED SO MUCH ON THE FIRST DAY OF MY ARRIVAL IN PARIS TELLING ME THAT IT WOULD BE AN IMPORTANT DAY IN MY LIFE +3729-6852-0004-1664: HER FACE WAS AN ENIGMA FOR IT INSPIRED (EVERYONE->EVERY ONE) WITH THE WARMEST SYMPATHY AND YET IF YOU EXAMINED IT ATTENTIVELY THERE WAS NOT ONE BEAUTIFUL FEATURE SHE COULD NOT BE CALLED HANDSOME BUT NO ONE COULD HAVE THOUGHT HER UGLY +3729-6852-0005-1665: (SILVIA->SYLVIA) WAS THE ADORATION OF FRANCE AND HER TALENT WAS THE REAL SUPPORT OF ALL THE COMEDIES WHICH THE GREATEST AUTHORS WROTE FOR HER ESPECIALLY OF THE PLAYS OF (MARIVAUX->MARY VO) FOR WITHOUT HER HIS COMEDIES WOULD NEVER HAVE GONE TO POSTERITY +3729-6852-0006-1666: (SILVIA->SYLVIA) DID NOT THINK THAT HER GOOD CONDUCT WAS A MERIT FOR SHE KNEW THAT SHE WAS VIRTUOUS ONLY BECAUSE HER SELF LOVE COMPELLED HER TO BE SO AND SHE NEVER EXHIBITED ANY PRIDE OR ASSUMED ANY SUPERIORITY TOWARDS HER THEATRICAL SISTERS ALTHOUGH SATISFIED TO SHINE BY THEIR TALENT OR THEIR BEAUTY THEY CARED LITTLE ABOUT RENDERING THEMSELVES CONSPICUOUS BY THEIR VIRTUE +3729-6852-0007-1667: TWO YEARS BEFORE HER DEATH I SAW HER PERFORM THE CHARACTER OF MARIANNE IN THE COMEDY OF (MARIVAUX->MARAVAUX) AND IN SPITE OF HER AGE AND DECLINING HEALTH THE ILLUSION WAS COMPLETE +3729-6852-0008-1668: SHE WAS (HONOURABLY->HONORABLY) BURIED IN THE CHURCH OF SAINT (SAUVEUR->SEVER) WITHOUT THE SLIGHTEST OPPOSITION FROM THE VENERABLE PRIEST WHO FAR FROM SHARING THE ANTI (CHRISTAIN->CHRISTIAN) INTOLERANCY OF THE CLERGY IN GENERAL SAID THAT HER PROFESSION AS AN ACTRESS HAD NOT HINDERED HER FROM BEING A GOOD CHRISTIAN AND THAT THE EARTH WAS (THE->A) COMMON MOTHER OF ALL HUMAN BEINGS AS JESUS CHRIST HAD BEEN THE SAVIOUR OF ALL MANKIND +3729-6852-0009-1669: YOU WILL FORGIVE ME DEAR READER IF I HAVE MADE YOU ATTEND THE FUNERAL OF (SILVIA->SYLVIA) TEN YEARS BEFORE HER DEATH BELIEVE ME I HAVE NO INTENTION OF PERFORMING A MIRACLE YOU MAY CONSOLE YOURSELF WITH THE IDEA THAT I SHALL SPARE YOU THAT UNPLEASANT TASK WHEN POOR (SILVIA->SYLVIA) DIES +3729-6852-0010-1670: I NEVER HAD ANY FAMILY +3729-6852-0011-1671: I HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I HAVE FORGOTTEN IT SINCE I HAVE BEEN IN SERVICE +3729-6852-0012-1672: I SHALL CALL YOU (ESPRIT->A SPREE) +3729-6852-0013-1673: YOU DO ME A GREAT HONOUR +3729-6852-0014-1674: HERE GO AND GET ME CHANGE FOR A LOUIS I HAVE IT SIR +3729-6852-0015-1675: AT YOUR SERVICE SIR +3729-6852-0016-1676: MADAME QUINSON BESIDES CAN ANSWER YOUR (ENQUIRIES->INQUIRIES) +3729-6852-0017-1677: I SEE A QUANTITY OF CHAIRS FOR HIRE AT THE RATE OF ONE (SOU->SOUS) MEN READING THE NEWSPAPER UNDER THE SHADE OF THE TREES GIRLS AND MEN BREAKFASTING EITHER ALONE OR IN COMPANY WAITERS WHO WERE RAPIDLY GOING UP AND DOWN A NARROW STAIRCASE HIDDEN UNDER THE FOLIAGE +3729-6852-0018-1678: I SIT DOWN AT A SMALL TABLE A WAITER COMES IMMEDIATELY TO (ENQUIRE->INQUIRE) MY WISHES +3729-6852-0019-1679: I TELL HIM TO GIVE ME SOME COFFEE IF IT IS GOOD +3729-6852-0020-1680: THEN TURNING TOWARDS ME HE SAYS THAT I LOOK LIKE A FOREIGNER AND WHEN I SAY THAT I AM AN ITALIAN HE BEGINS TO SPEAK TO ME OF THE COURT (OF->*) THE CITY OF THE THEATRES AND AT LAST HE OFFERS TO ACCOMPANY ME EVERYWHERE +3729-6852-0021-1681: I THANK HIM AND TAKE MY LEAVE +3729-6852-0022-1682: I ADDRESS HIM IN ITALIAN AND HE ANSWERS VERY WITTILY BUT HIS WAY OF SPEAKING MAKES ME SMILE AND I TELL HIM WHY +3729-6852-0023-1683: MY REMARK PLEASES HIM BUT I SOON PROVE TO HIM THAT IT IS NOT THE RIGHT WAY TO SPEAK HOWEVER PERFECT MAY HAVE BEEN THE LANGUAGE OF THAT ANCIENT WRITER +3729-6852-0024-1684: I SEE A CROWD IN ONE CORNER OF THE GARDEN EVERYBODY STANDING STILL AND LOOKING UP +3729-6852-0025-1685: IS THERE NOT A MERIDIAN EVERYWHERE +3729-6852-0026-1686: YES BUT THE MERIDIAN OF THE PALAIS ROYAL IS THE MOST EXACT +3729-6852-0027-1687: THAT IS TRUE (BADAUDERIE->BADR'D GREE) +3729-6852-0028-1688: ALL THESE HONEST PERSONS ARE WAITING THEIR TURN TO GET THEIR SNUFF BOXES FILLED +3729-6852-0029-1689: IT IS SOLD EVERYWHERE BUT FOR THE LAST THREE WEEKS NOBODY WILL USE ANY SNUFF BUT THAT SOLD AT THE (CIVET->SAVEETTE) CAT +3729-6852-0030-1690: IS IT BETTER THAN ANYWHERE ELSE +3729-6852-0031-1691: BUT HOW DID SHE MANAGE TO RENDER IT SO FASHIONABLE +3729-6852-0032-1692: SIMPLY BY STOPPING HER CARRIAGE TWO OR THREE TIMES BEFORE THE SHOP TO HAVE HER SNUFF BOX FILLED AND BY SAYING ALOUD TO THE YOUNG GIRL WHO HANDED BACK THE BOX THAT HER SNUFF WAS THE VERY BEST IN PARIS +3729-6852-0033-1693: YOU ARE NOW IN THE ONLY COUNTRY IN THE WORLD WHERE WIT CAN MAKE A FORTUNE BY SELLING EITHER A GENUINE OR A FALSE ARTICLE IN THE FIRST CASE IT RECEIVES THE WELCOME OF INTELLIGENT AND TALENTED PEOPLE AND IN THE SECOND FOOLS ARE ALWAYS READY TO REWARD IT FOR SILLINESS IS TRULY A CHARACTERISTIC OF THE PEOPLE HERE AND HOWEVER WONDERFUL IT MAY APPEAR SILLINESS IS THE DAUGHTER OF WIT +3729-6852-0034-1694: LET A MAN RUN AND EVERYBODY WILL RUN AFTER HIM THE CROWD WILL NOT STOP UNLESS THE MAN IS PROVED TO BE MAD BUT TO PROVE IT IS INDEED A DIFFICULT TASK BECAUSE WE HAVE A CROWD OF MEN WHO MAD FROM THEIR BIRTH ARE STILL CONSIDERED WISE +3729-6852-0035-1695: IT SEEMS TO ME I REPLIED THAT SUCH APPROVAL SUCH RATIFICATION OF THE OPINION EXPRESSED BY THE KING THE PRINCES OF THE BLOOD ET CETERA IS RATHER A PROOF OF THE AFFECTION FELT FOR THEM BY THE NATION FOR THE FRENCH CARRY THAT AFFECTION TO SUCH AN EXTENT THAT THEY BELIEVE THEM INFALLIBLE +3729-6852-0036-1696: WHEN THE KING COMES TO PARIS EVERYBODY CALLS OUT VIVE (LE ROI->LAURY) +3729-6852-0037-1697: SHE INTRODUCED ME TO ALL HER GUESTS AND GAVE ME SOME PARTICULARS RESPECTING EVERY ONE OF THEM +3729-6852-0038-1698: WHAT SIR I SAID TO HIM AM I FORTUNATE ENOUGH TO SEE YOU +3729-6852-0039-1699: HE HIMSELF RECITED THE SAME PASSAGE IN FRENCH AND POLITELY POINTED OUT THE PARTS IN WHICH HE THOUGHT THAT I HAD IMPROVED ON THE ORIGINAL +3729-6852-0040-1700: FOR THE FIRST DAY SIR I THINK THAT WHAT YOU HAVE DONE GIVES GREAT HOPES OF YOU AND WITHOUT ANY DOUBT YOU WILL MAKE RAPID PROGRESS +3729-6852-0041-1701: I BELIEVE IT SIR AND THAT IS WHAT I FEAR THEREFORE THE PRINCIPAL OBJECT OF MY VISIT HERE IS TO DEVOTE MYSELF ENTIRELY TO THE STUDY OF THE FRENCH LANGUAGE +3729-6852-0042-1702: I AM A VERY UNPLEASANT PUPIL ALWAYS ASKING QUESTIONS CURIOUS TROUBLESOME INSATIABLE AND EVEN SUPPOSING THAT I COULD MEET WITH THE TEACHER I REQUIRE I AM AFRAID I AM NOT RICH ENOUGH TO PAY HIM +3729-6852-0043-1703: I RESIDE IN THE (MARAIS RUE->MARAY GRUE) DE (DOUZE PORTES->DUSPORT) +3729-6852-0044-1704: I WILL MAKE YOU TRANSLATE THEM INTO FRENCH AND YOU NEED NOT BE AFRAID OF MY FINDING YOU INSATIABLE +3729-6852-0045-1705: HE HAD A GOOD APPETITE COULD TELL A GOOD STORY WITHOUT LAUGHING (WAS->WITH) CELEBRATED FOR HIS WITTY REPARTEES AND HIS SOCIABLE MANNERS BUT HE SPENT HIS LIFE AT HOME SELDOM GOING OUT AND SEEING HARDLY (ANYONE->ANY ONE) BECAUSE HE ALWAYS HAD A PIPE IN HIS MOUTH AND WAS SURROUNDED BY AT LEAST TWENTY CATS WITH WHICH HE WOULD AMUSE HIMSELF ALL DAY +3729-6852-0046-1706: HIS HOUSEKEEPER HAD THE MANAGEMENT OF EVERYTHING SHE NEVER ALLOWED HIM TO BE IN NEED OF ANYTHING AND SHE GAVE NO ACCOUNT OF HIS MONEY WHICH SHE KEPT ALTOGETHER BECAUSE HE NEVER ASKED HER TO RENDER ANY ACCOUNTS +4077-13751-0000-1258: ON THE SIXTH OF APRIL EIGHTEEN THIRTY THE CHURCH OF JESUS CHRIST OF LATTER DAY SAINTS WAS (FORMALLY->FORMERLY) ORGANIZED AND THUS TOOK ON A LEGAL EXISTENCE +4077-13751-0001-1259: ITS ORIGIN WAS SMALL A GERM AN INSIGNIFICANT SEED HARDLY TO BE THOUGHT OF AS LIKELY TO AROUSE OPPOSITION +4077-13751-0002-1260: INSTEAD OF BUT SIX REGULARLY AFFILIATED MEMBERS AND AT MOST TWO SCORE OF ADHERENTS THE ORGANIZATION NUMBERS (TODAY->TO DAY) MANY HUNDRED THOUSAND SOULS +4077-13751-0003-1261: IN PLACE (OF->HAVE) A SINGLE HAMLET IN THE SMALLEST CORNER OF WHICH THE MEMBERS COULD HAVE CONGREGATED THERE NOW ARE ABOUT SEVENTY STAKES OF ZION AND ABOUT SEVEN HUNDRED ORGANIZED WARDS EACH WARD AND STAKE WITH ITS FULL COMPLEMENT OF OFFICERS AND PRIESTHOOD ORGANIZATIONS +4077-13751-0004-1262: THE (PRACTISE->PRACTICE) OF GATHERING ITS PROSELYTES INTO ONE PLACE PREVENTS THE BUILDING UP AND STRENGTHENING OF FOREIGN BRANCHES AND INASMUCH AS EXTENSIVE AND STRONG ORGANIZATIONS ARE SELDOM MET WITH ABROAD VERY ERRONEOUS IDEAS EXIST CONCERNING THE STRENGTH OF THE CHURCH +4077-13751-0005-1263: NEVERTHELESS THE MUSTARD SEED AMONG THE SMALLEST OF ALL (SEEDS->SEATS) HAS ATTAINED (THE->THAT) PROPORTIONS OF A TREE AND THE BIRDS OF THE AIR ARE NESTING IN ITS BRANCHES THE ACORN IS NOW (AN->IN) OAK OFFERING PROTECTION AND THE SWEETS OF SATISFACTION TO EVERY EARNEST PILGRIM JOURNEYING ITS WAY (FOR->FIR) TRUTH +4077-13751-0006-1264: THEIR EYES WERE FROM THE FIRST TURNED IN ANTICIPATION TOWARD THE EVENING SUN NOT MERELY THAT THE WORK OF (PROSELYTING->PROSELY) SHOULD BE CARRIED ON IN THE WEST BUT THAT THE HEADQUARTERS OF THE CHURCH SHOULD BE THERE ESTABLISHED +4077-13751-0007-1265: THE BOOK (OF->O) MORMON HAD TAUGHT THE PEOPLE THE TRUE ORIGIN AND DESTINY OF THE AMERICAN INDIANS AND TOWARD THIS DARK SKINNED REMNANT OF A ONCE MIGHTY PEOPLE THE MISSIONARIES OF MORMONISM EARLY TURNED THEIR EYES AND WITH THEIR EYES WENT THEIR HEARTS AND THEIR HOPES +4077-13751-0008-1266: IT IS NOTABLE THAT THE INDIAN TRIBES HAVE (GENERALLY->GERALLY) REGARDED (THE->THEIR) RELIGION OF THE LATTER DAY SAINTS WITH FAVOR SEEING IN THE BOOK (OF->O) MORMON STRIKING AGREEMENT WITH THEIR OWN TRADITIONS +4077-13751-0009-1267: THE FIRST WELL ESTABLISHED SEAT OF THE CHURCH WAS IN THE PRETTY LITTLE TOWN OF (KIRTLAND->CURTLIN) OHIO ALMOST WITHIN SIGHT OF LAKE ERIE AND HERE SOON ROSE THE FIRST TEMPLE OF MODERN TIMES +4077-13751-0010-1268: TO THE FERVENT LATTER DAY SAINT A TEMPLE IS NOT SIMPLY A CHURCH BUILDING A HOUSE FOR RELIGIOUS ASSEMBLY +4077-13751-0011-1269: SOON THOUSANDS OF CONVERTS HAD RENTED OR PURCHASED HOMES IN MISSOURI INDEPENDENCE JACKSON COUNTY BEING THEIR (CENTER->CENTRE) BUT FROM THE FIRST THEY WERE UNPOPULAR AMONG THE (MISSOURIANS->MISSOURIENS) +4077-13751-0012-1270: THE LIEUTENANT GOVERNOR (LILBURN->LITTLE BURN) W BOGGS AFTERWARD GOVERNOR WAS A PRONOUNCED MORMON HATER AND THROUGHOUT THE PERIOD OF THE TROUBLES HE (MANIFESTED->MANIFEST HIS) SYMPATHY WITH THE PERSECUTORS +4077-13751-0013-1271: THEIR SUFFERINGS HAVE NEVER YET BEEN FITLY CHRONICLED BY HUMAN SCRIBE +4077-13751-0014-1272: MAKING THEIR WAY ACROSS THE RIVER MOST OF THE REFUGEES FOUND SHELTER AMONG THE MORE HOSPITABLE PEOPLE OF CLAY COUNTY AND AFTERWARD ESTABLISHED THEMSELVES IN (CALDWELL->CAULDWELL) COUNTY THEREIN FOUNDING THE CITY OF FAR WEST +4077-13751-0015-1273: A SMALL SETTLEMENT HAD BEEN FOUNDED BY MORMON FAMILIES ON SHOAL CREEK AND HERE ON THE THIRTIETH OF OCTOBER EIGHTEEN THIRTY EIGHT A COMPANY OF TWO HUNDRED AND FORTY FELL UPON THE HAPLESS SETTLERS AND BUTCHERED A SCORE +4077-13751-0016-1274: BE IT SAID TO THE HONOR OF SOME OF THE OFFICERS ENTRUSTED WITH THE TERRIBLE COMMISSION THAT WHEN THEY LEARNED ITS TRUE SIGNIFICANCE THEY (RESIGNED->RESIGN) THEIR AUTHORITY RATHER THAN HAVE ANYTHING TO DO WITH WHAT THEY DESIGNATED A COLD BLOODED BUTCHERY +4077-13751-0017-1275: OH WHAT A RECORD TO READ WHAT A PICTURE TO GAZE UPON HOW AWFUL THE FACT +4077-13751-0018-1276: AMERICAN (SCHOOL BOYS->SCHOOLBOYS) READ WITH EMOTIONS OF HORROR OF THE ALBIGENSES DRIVEN BEATEN AND KILLED WITH A (PAPAL->PEPPEL) LEGATE DIRECTING THE BUTCHERY AND OF THE (VAUDOIS->FAUDOIS) HUNTED AND HOUNDED LIKE BEASTS AS THE EFFECT OF A ROYAL DECREE AND THEY YET SHALL READ IN THE HISTORY OF THEIR OWN COUNTRY OF SCENES AS TERRIBLE AS THESE IN THE EXHIBITION OF INJUSTICE AND INHUMAN HATE +4077-13751-0019-1277: WHO BEGAN THE QUARREL WAS IT THE MORMONS +4077-13751-0020-1278: AS (A SAMPLE->THE SABLE) OF THE PRESS COMMENTS AGAINST THE BRUTALITY OF THE MISSOURIANS I QUOTE A PARAGRAPH FROM THE (QUINCY->QUINCEY) ARGUS MARCH SIXTEENTH EIGHTEEN THIRTY NINE +4077-13751-0021-1279: IT WILL BE OBSERVED THAT AN ORGANIZED MOB AIDED BY MANY OF THE CIVIL AND MILITARY OFFICERS OF MISSOURI WITH GOVERNOR BOGGS AT THEIR HEAD HAVE BEEN THE PROMINENT ACTORS IN THIS BUSINESS INCITED TOO IT APPEARS AGAINST THE MORMONS BY POLITICAL HATRED AND BY THE ADDITIONAL MOTIVES OF PLUNDER AND REVENGE +4077-13754-0000-1241: THE ARMY FOUND THE PEOPLE IN POVERTY AND LEFT THEM IN COMPARATIVE WEALTH +4077-13754-0001-1242: BUT A WORD FURTHER CONCERNING THE EXPEDITION IN GENERAL +4077-13754-0002-1243: IT WAS THROUGH FLOYD'S ADVICE THAT (BUCHANAN->YOU CANNOT) ORDERED THE MILITARY EXPEDITION TO UTAH OSTENSIBLY TO INSTALL CERTAIN FEDERAL OFFICIALS AND TO REPRESS AN ALLEGED INFANTILE REBELLION WHICH IN FACT HAD NEVER COME INTO EXISTENCE BUT IN REALITY TO FURTHER THE (INTERESTS->ENTRANCE) OF THE SECESSIONISTS +4077-13754-0003-1244: MOREOVER HAD THE PEOPLE BEEN INCLINED TO REBELLION WHAT GREATER OPPORTUNITY COULD THEY HAVE WISHED +4077-13754-0004-1245: ALREADY A NORTH AND (A->THE) SOUTH WERE TALKED OF WHY NOT SET UP ALSO (A->*) WEST +4077-13754-0005-1246: THEY KNEW NO NORTH NO SOUTH NO EAST NO WEST THEY STOOD POSITIVELY BY THE CONSTITUTION AND WOULD HAVE NOTHING TO DO IN THE BLOODY STRIFE BETWEEN BROTHERS UNLESS INDEED THEY WERE SUMMONED BY THE AUTHORITY TO WHICH THEY HAD ALREADY ONCE LOYALLY RESPONDED TO FURNISH MEN (AND->IN) ARMS FOR (THEIR->THE) COUNTRY'S NEED +4077-13754-0006-1247: WHAT THE LATTER DAY (SAINTS->SAYS) CALL CELESTIAL MARRIAGE IS CHARACTERISTIC OF THE CHURCH AND IS IN VERY GENERAL (PRACTISE->PRACTICE) BUT OF CELESTIAL MARRIAGE PLURALITY OF WIVES WAS AN INCIDENT NEVER AN ESSENTIAL +4077-13754-0007-1248: WE BELIEVE IN A LITERAL RESURRECTION AND AN ACTUAL HEREAFTER IN WHICH FUTURE (STATE->STATES) SHALL BE RECOGNIZED EVERY SANCTIFIED AND AUTHORIZED RELATIONSHIP EXISTING HERE ON EARTH OF PARENT AND CHILD (BROTHER AND->BRETHREN) SISTER HUSBAND AND WIFE +4077-13754-0008-1249: IT HAS BEEN MY PRIVILEGE TO TREAD THE SOIL OF MANY LANDS TO OBSERVE THE CUSTOMS AND STUDY THE HABITS OF MORE NATIONS THAN ONE AND I HAVE YET (TO FIND->DEFINED) THE PLACE AND MEET THE PEOPLE WHERE AND WITH WHOM THE PURITY OF MAN AND WOMAN IS HELD MORE PRECIOUS THAN AMONG THE MALIGNED MORMONS IN THE MOUNTAIN VALLEYS OF THE WEST +4077-13754-0009-1250: AT THE INCEPTION OF (PLURAL->PEARL) MARRIAGE AMONG THE LATTER DAY SAINTS THERE WAS NO LAW NATIONAL OR STATE AGAINST ITS (PRACTISE->PRACTICE) +4077-13754-0010-1251: IN EIGHTEEN SIXTY TWO A LAW WAS ENACTED WITH (THE->A) PURPOSE OF SUPPRESSING (PLURAL->PLORO) MARRIAGE AND AS HAD BEEN PREDICTED IN THE NATIONAL SENATE PRIOR TO ITS PASSAGE IT LAY FOR MANY YEARS A DEAD LETTER +4077-13754-0011-1252: FEDERAL JUDGES AND UNITED STATES ATTORNEYS (IN UTAH->AND NEW TOP) WHO WERE NOT (MORMONS->MORE MEN'S) NOR LOVERS OF (MORMONISM->WARMONISM) REFUSED TO ENTERTAIN COMPLAINTS OR PROSECUTE CASES UNDER THE LAW BECAUSE OF ITS MANIFEST INJUSTICE AND INADEQUACY +4077-13754-0012-1253: THIS MEANT THAT FOR AN ALLEGED (MISDEMEANOR->MISDEMEANOUR) FOR WHICH CONGRESS PRESCRIBED A MAXIMUM PENALTY OF SIX MONTHS IMPRISONMENT AND A FINE OF THREE HUNDRED DOLLARS A MAN MIGHT BE IMPRISONED FOR LIFE (AYE->I) FOR MANY TERMS OF A MAN'S NATURAL LIFE DID THE COURT'S POWER TO ENFORCE ITS SENTENCES EXTEND SO FAR AND MIGHT BE FINED MILLIONS OF DOLLARS +4077-13754-0013-1254: BEFORE THIS TRAVESTY ON THE ADMINISTRATION OF LAW COULD BE BROUGHT BEFORE THE COURT OF LAST RESORT AND THERE (MEET->MET) WITH THE REVERSAL AND REBUKE IT DESERVED MEN WERE IMPRISONED UNDER (SENTENCES->SENTENCE) OF MANY YEARS DURATION +4077-13754-0014-1255: THE PEOPLE CONTESTED THESE MEASURES ONE BY ONE IN THE COURTS PRESENTING IN CASE AFTER CASE THE DIFFERENT PHASES OF THE SUBJECT AND URGING THE UNCONSTITUTIONALITY OF THE MEASURE +4077-13754-0015-1256: THEN THE CHURCH WAS DISINCORPORATED AND ITS PROPERTY BOTH REAL AND PERSONAL CONFISCATED AND (ESCHEATED->ISTIATED) TO THE GOVERNMENT OF THE UNITED STATES AND ALTHOUGH THE PERSONAL PROPERTY WAS SOON RESTORED REAL ESTATE OF GREAT VALUE LONG LAY IN THE HANDS OF THE (COURT'S->COURTS) RECEIVER AND THE MORMON CHURCH HAD TO PAY THE NATIONAL GOVERNMENT HIGH RENTAL ON ITS OWN PROPERTY +4077-13754-0016-1257: AND SO THE STORY OF MORMONISM RUNS ON ITS FINALE HAS NOT YET BEEN WRITTEN THE CURRENT PRESS PRESENTS CONTINUOUSLY NEW STAGES OF ITS PROGRESS NEW DEVELOPMENTS OF ITS PLAN +4446-2271-0000-1133: (MAINHALL->MAIN HALL) LIKED ALEXANDER BECAUSE HE WAS AN ENGINEER +4446-2271-0001-1134: (HE HAD->WE NOT) PRECONCEIVED IDEAS ABOUT EVERYTHING AND HIS IDEA ABOUT AMERICANS WAS THAT THEY SHOULD BE ENGINEERS OR MECHANICS +4446-2271-0002-1135: (IT'S->ITS) TREMENDOUSLY WELL PUT ON TOO +4446-2271-0003-1136: IT'S BEEN ON ONLY TWO WEEKS AND I'VE BEEN HALF A DOZEN TIMES ALREADY +4446-2271-0004-1137: DO YOU KNOW ALEXANDER (MAINHALL->MAIN HALL) LOOKED WITH PERPLEXITY UP INTO THE TOP OF THE HANSOM AND RUBBED HIS PINK CHEEK WITH HIS GLOVED FINGER DO YOU KNOW I SOMETIMES THINK OF TAKING TO CRITICISM SERIOUSLY MYSELF +4446-2271-0005-1138: SHE SAVES HER HAND TOO (SHE'S AT->SHE SAID) HER BEST IN THE SECOND ACT +4446-2271-0006-1139: HE'S BEEN WANTING TO MARRY HILDA THESE THREE YEARS AND MORE +4446-2271-0007-1140: SHE DOESN'T TAKE UP WITH ANYBODY YOU KNOW +4446-2271-0008-1141: IRENE (BURGOYNE->WERE GOING) ONE OF HER FAMILY TOLD ME IN CONFIDENCE THAT THERE WAS A ROMANCE SOMEWHERE BACK IN THE BEGINNING +4446-2271-0009-1142: (MAINHALL VOUCHED->MEANHAVED) FOR HER CONSTANCY WITH A LOFTINESS THAT MADE ALEXANDER SMILE EVEN WHILE A KIND OF RAPID EXCITEMENT WAS TINGLING THROUGH HIM +4446-2271-0010-1143: HE'S ANOTHER WHO'S AWFULLY KEEN ABOUT HER LET ME INTRODUCE YOU +4446-2271-0011-1144: SIR HARRY (TOWNE->TOWN) MISTER BARTLEY ALEXANDER THE AMERICAN ENGINEER +4446-2271-0012-1145: I SAY SIR HARRY THE LITTLE GIRL'S GOING FAMOUSLY TO NIGHT ISN'T SHE +4446-2271-0013-1146: (DO->*) YOU KNOW I THOUGHT THE DANCE (A BIT CONSCIOUS->OF GOOD CONSCIENCE) TO NIGHT FOR THE FIRST TIME +4446-2271-0014-1147: (WESTMERE->WESTMARE) AND I WERE BACK AFTER THE FIRST ACT AND WE THOUGHT SHE SEEMED QUITE UNCERTAIN OF HERSELF +4446-2271-0015-1148: A LITTLE ATTACK OF NERVES POSSIBLY +4446-2271-0016-1149: HE WAS BEGINNING TO FEEL (A->THE) KEEN INTEREST IN THE SLENDER BAREFOOT DONKEY GIRL WHO SLIPPED IN AND OUT OF THE PLAY SINGING LIKE SOME ONE WINDING THROUGH A HILLY FIELD +4446-2271-0017-1150: ONE NIGHT WHEN HE AND WINIFRED WERE SITTING TOGETHER ON THE BRIDGE HE TOLD HER (THAT->THE) THINGS HAD HAPPENED WHILE HE WAS STUDYING ABROAD THAT HE WAS SORRY FOR ONE THING IN PARTICULAR AND HE ASKED HER WHETHER SHE THOUGHT SHE OUGHT TO KNOW ABOUT THEM +4446-2271-0018-1151: SHE CONSIDERED (*->FOR) A MOMENT AND THEN SAID NO I THINK NOT (THOUGH->THE WAY) I AM GLAD YOU ASK ME +4446-2271-0019-1152: AFTER THAT IT WAS EASY TO FORGET ACTUALLY TO FORGET +4446-2271-0020-1153: OF COURSE HE REFLECTED SHE ALWAYS HAD THAT COMBINATION OF SOMETHING HOMELY AND SENSIBLE AND SOMETHING UTTERLY WILD AND DAFT +4446-2271-0021-1154: SHE MUST CARE ABOUT THE THEATRE A GREAT DEAL MORE THAN SHE USED TO +4446-2271-0022-1155: I'M GLAD SHE'S HELD HER OWN (SINCE->SEN) +4446-2271-0023-1156: AFTER ALL WE WERE AWFULLY YOUNG +4446-2271-0024-1157: I SHOULDN'T WONDER IF SHE COULD LAUGH ABOUT IT WITH ME NOW +4446-2273-0000-1158: HILDA WAS VERY NICE TO HIM AND HE SAT ON THE EDGE OF HIS CHAIR FLUSHED WITH HIS CONVERSATIONAL EFFORTS AND MOVING HIS CHIN ABOUT NERVOUSLY OVER HIS HIGH COLLAR +4446-2273-0001-1159: THEY ASKED HIM TO COME TO SEE THEM IN CHELSEA AND THEY SPOKE VERY TENDERLY OF HILDA +4446-2273-0002-1160: LAMB WOULDN'T CARE A GREAT DEAL ABOUT MANY OF THEM I FANCY +4446-2273-0003-1161: WHEN BARTLEY ARRIVED AT BEDFORD SQUARE ON SUNDAY EVENING MARIE THE PRETTY LITTLE FRENCH GIRL MET HIM AT THE DOOR AND CONDUCTED HIM UPSTAIRS +4446-2273-0004-1162: I SHOULD NEVER HAVE ASKED YOU IF MOLLY HAD BEEN HERE FOR I REMEMBER YOU DON'T LIKE ENGLISH COOKERY +4446-2273-0005-1163: I HAVEN'T HAD A CHANCE YET TO TELL YOU WHAT A JOLLY LITTLE PLACE I THINK THIS IS +4446-2273-0006-1164: THEY ARE ALL SKETCHES MADE ABOUT THE (VILLA D'ESTE->VILLIDESA) YOU SEE +4446-2273-0007-1165: THOSE FELLOWS ARE ALL VERY LOYAL EVEN (MAINHALL->MAIN HALL) +4446-2273-0008-1166: I'VE MANAGED TO SAVE SOMETHING EVERY YEAR AND THAT WITH HELPING MY THREE SISTERS NOW AND THEN AND TIDING POOR COUSIN MIKE OVER BAD SEASONS +4446-2273-0009-1167: IT'S NOT PARTICULARLY RARE SHE SAID BUT SOME OF IT WAS MY MOTHER'S +4446-2273-0010-1168: THERE WAS WATERCRESS SOUP AND SOLE AND A DELIGHTFUL OMELETTE STUFFED WITH MUSHROOMS AND TRUFFLES AND TWO SMALL RARE DUCKLINGS AND ARTICHOKES AND A DRY YELLOW RHONE WINE OF WHICH BARTLEY HAD ALWAYS BEEN VERY FOND +4446-2273-0011-1169: THERE IS NOTHING ELSE THAT LOOKS SO JOLLY +4446-2273-0012-1170: THANK YOU BUT I DON'T LIKE IT SO WELL AS THIS +4446-2273-0013-1171: HAVE YOU BEEN IN PARIS MUCH THESE LATE YEARS +4446-2273-0014-1172: THERE ARE (*->A) FEW CHANGES IN THE OLD QUARTER +4446-2273-0015-1173: DON'T I THOUGH I'M SO SORRY TO HEAR IT HOW DID HER SON TURN OUT +4446-2273-0016-1174: HER HAIR IS STILL LIKE FLAX AND HER BLUE EYES ARE JUST LIKE A BABY'S AND SHE HAS THE SAME THREE FRECKLES ON HER LITTLE NOSE AND TALKS ABOUT GOING BACK TO HER (BAINS DE MER->BANDERE) +4446-2273-0017-1175: HOW JOLLY IT WAS BEING YOUNG HILDA +4446-2273-0018-1176: DO YOU REMEMBER THAT FIRST WALK WE TOOK TOGETHER IN PARIS +4446-2273-0019-1177: COME WE'LL HAVE OUR COFFEE IN THE OTHER ROOM AND YOU CAN SMOKE +4446-2273-0020-1178: I THINK WE DID SHE ANSWERED DEMURELY +4446-2273-0021-1179: WHAT SHE WANTED FROM US WAS NEITHER OUR FLOWERS NOR OUR (FRANCS->FRANKS) BUT JUST OUR YOUTH +4446-2273-0022-1180: THEY WERE BOTH REMEMBERING WHAT THE WOMAN HAD SAID WHEN SHE TOOK THE MONEY GOD GIVE YOU A HAPPY LOVE +4446-2273-0023-1181: THE STRANGE WOMAN AND HER PASSIONATE SENTENCE THAT RANG OUT SO SHARPLY HAD FRIGHTENED THEM BOTH +4446-2273-0024-1182: BARTLEY STARTED WHEN HILDA RANG THE LITTLE BELL BESIDE HER DEAR ME WHY DID YOU DO THAT +4446-2273-0025-1183: IT WAS VERY JOLLY HE MURMURED LAZILY AS MARIE CAME IN TO TAKE AWAY THE COFFEE +4446-2273-0026-1184: HAVE I TOLD YOU ABOUT MY NEW PLAY +4446-2273-0027-1185: WHEN SHE FINISHED ALEXANDER SHOOK HIMSELF OUT OF A REVERIE +4446-2273-0028-1186: NONSENSE OF COURSE I CAN'T REALLY SING EXCEPT THE WAY MY MOTHER AND GRANDMOTHER DID BEFORE ME +4446-2273-0029-1187: IT'S REALLY TOO WARM IN THIS ROOM TO SING DON'T YOU FEEL IT +4446-2273-0030-1188: ALEXANDER WENT OVER AND OPENED THE WINDOW FOR HER +4446-2273-0031-1189: THERE JUST IN FRONT +4446-2273-0032-1190: HE STOOD A LITTLE BEHIND HER AND TRIED TO STEADY HIMSELF AS HE SAID IT'S SOFT AND MISTY SEE HOW WHITE THE STARS ARE +4446-2273-0033-1191: FOR A LONG TIME NEITHER HILDA NOR BARTLEY SPOKE +4446-2273-0034-1192: HE FELT A TREMOR RUN THROUGH THE SLENDER YELLOW FIGURE IN FRONT OF HIM +4446-2273-0035-1193: BARTLEY LEANED OVER HER SHOULDER WITHOUT TOUCHING HER AND WHISPERED IN HER EAR YOU ARE GIVING ME A CHANCE YES +4446-2273-0036-1194: ALEXANDER (UNCLENCHED->CLENCHED) THE TWO HANDS AT HIS SIDES +4446-2275-0000-1195: THE STOP AT QUEENSTOWN THE TEDIOUS PASSAGE (UP->OF) THE (MERSEY->MERCY) WERE THINGS THAT HE NOTED DIMLY THROUGH HIS GROWING IMPATIENCE +4446-2275-0001-1196: SHE BLUSHED AND SMILED AND FUMBLED HIS CARD IN HER CONFUSION BEFORE SHE RAN UPSTAIRS +4446-2275-0002-1197: ALEXANDER PACED UP AND DOWN THE HALLWAY BUTTONING AND UNBUTTONING HIS OVERCOAT UNTIL SHE RETURNED AND TOOK HIM UP TO HILDA'S LIVING ROOM +4446-2275-0003-1198: THE ROOM WAS EMPTY WHEN HE ENTERED +4446-2275-0004-1199: ALEXANDER DID NOT SIT DOWN +4446-2275-0005-1200: I FELT IT IN MY BONES WHEN I WOKE THIS MORNING THAT SOMETHING SPLENDID WAS GOING TO TURN UP +4446-2275-0006-1201: I THOUGHT IT MIGHT BE SISTER KATE OR COUSIN MIKE WOULD BE HAPPENING ALONG +4446-2275-0007-1202: SHE PUSHED HIM TOWARD THE BIG CHAIR BY THE FIRE AND SAT DOWN ON A STOOL AT THE OPPOSITE SIDE OF THE HEARTH HER KNEES DRAWN UP TO HER CHIN LAUGHING LIKE A HAPPY LITTLE GIRL +4446-2275-0008-1203: WHEN DID YOU COME BARTLEY AND HOW DID IT HAPPEN YOU HAVEN'T SPOKEN A WORD +4446-2275-0009-1204: I GOT IN ABOUT TEN MINUTES AGO +4446-2275-0010-1205: ALEXANDER LEANED FORWARD AND WARMED HIS HANDS BEFORE THE BLAZE +4446-2275-0011-1206: BARTLEY BENT (LOWER->LOWERED) OVER THE FIRE +4446-2275-0012-1207: SHE LOOKED AT HIS HEAVY SHOULDERS AND BIG DETERMINED HEAD THRUST FORWARD LIKE A CATAPULT IN LEASH +4446-2275-0013-1208: I'LL DO ANYTHING YOU WISH ME TO BARTLEY SHE SAID TREMULOUSLY +4446-2275-0014-1209: I CAN'T STAND SEEING YOU MISERABLE +4446-2275-0015-1210: HE PULLED UP A WINDOW AS IF THE AIR WERE HEAVY +4446-2275-0016-1211: HILDA WATCHED HIM FROM (HER->THE) CORNER TREMBLING AND SCARCELY BREATHING DARK SHADOWS GROWING ABOUT HER EYES IT +4446-2275-0017-1212: BUT IT'S WORSE NOW IT'S UNBEARABLE +4446-2275-0018-1213: I GET NOTHING BUT MISERY OUT OF EITHER +4446-2275-0019-1214: THE WORLD IS ALL THERE JUST AS IT USED TO BE BUT I CAN'T GET AT IT ANY MORE +4446-2275-0020-1215: IT WAS MYSELF I WAS DEFYING HILDA +4446-2275-0021-1216: (HILDA'S->HELDA'S) FACE QUIVERED BUT SHE WHISPERED YES I THINK IT MUST HAVE BEEN +4446-2275-0022-1217: BUT WHY DIDN'T YOU TELL ME WHEN YOU WERE HERE IN THE SUMMER +4446-2275-0023-1218: ALEXANDER GROANED I MEANT TO BUT SOMEHOW I COULDN'T +4446-2275-0024-1219: SHE PRESSED HIS HAND GENTLY IN GRATITUDE +4446-2275-0025-1220: WEREN'T YOU HAPPY THEN AT ALL +4446-2275-0026-1221: SHE CLOSED HER EYES AND TOOK A DEEP BREATH AS IF TO DRAW IN AGAIN THE FRAGRANCE OF THOSE DAYS +4446-2275-0027-1222: HE MOVED UNEASILY AND HIS CHAIR CREAKED +4446-2275-0028-1223: YES YES SHE HURRIED PULLING HER HAND GENTLY AWAY FROM HIM +4446-2275-0029-1224: PLEASE TELL ME ONE THING BARTLEY AT LEAST TELL ME THAT YOU BELIEVE I THOUGHT I WAS MAKING YOU HAPPY +4446-2275-0030-1225: YES (HILDA->HELDA) I KNOW THAT HE SAID SIMPLY +4446-2275-0031-1226: I UNDERSTAND BARTLEY I WAS WRONG +4446-2275-0032-1227: BUT I DIDN'T KNOW YOU'VE ONLY TO TELL ME NOW +4446-2275-0033-1228: WHAT I MEAN IS THAT I WANT YOU TO PROMISE NEVER TO SEE ME AGAIN NO MATTER HOW OFTEN I COME NO MATTER HOW HARD I BEG +4446-2275-0034-1229: KEEP AWAY IF YOU WISH WHEN HAVE I EVER FOLLOWED YOU +4446-2275-0035-1230: ALEXANDER ROSE AND SHOOK HIMSELF ANGRILY YES I KNOW I'M COWARDLY +4446-2275-0036-1231: HE TOOK (HER->A) ROUGHLY IN HIS ARMS DO YOU KNOW WHAT I MEAN +4446-2275-0037-1232: OH BARTLEY WHAT AM I TO DO +4446-2275-0038-1233: I WILL ASK THE LEAST IMAGINABLE BUT I MUST HAVE SOMETHING +4446-2275-0039-1234: I MUST KNOW ABOUT YOU +4446-2275-0040-1235: THE SIGHT OF YOU BARTLEY TO SEE YOU LIVING AND HAPPY AND SUCCESSFUL CAN I NEVER MAKE YOU UNDERSTAND WHAT THAT MEANS TO ME +4446-2275-0041-1236: YOU SEE LOVING SOME ONE AS I LOVE YOU MAKES THE WHOLE WORLD DIFFERENT +4446-2275-0042-1237: AND THEN YOU CAME BACK NOT CARING VERY MUCH BUT IT MADE NO DIFFERENCE +4446-2275-0043-1238: BARTLEY BENT OVER AND TOOK HER IN HIS ARMS KISSING HER MOUTH AND HER WET TIRED EYES +4446-2275-0044-1239: (DON'T->A TOLL) CRY DON'T CRY HE WHISPERED +4446-2275-0045-1240: WE'VE TORTURED EACH OTHER ENOUGH FOR (TONIGHT->TO NIGHT) +4507-16021-0000-1469: CHAPTER ONE ORIGIN +4507-16021-0001-1470: IT ENGENDERS A WHOLE WORLD LA (PEGRE->PEG) FOR WHICH (READ->RED) THEFT AND A HELL LA (PEGRENNE->PEGRIN) FOR WHICH (READ->RED) HUNGER +4507-16021-0002-1471: THUS IDLENESS IS THE MOTHER +4507-16021-0003-1472: SHE HAS A SON THEFT AND A DAUGHTER HUNGER +4507-16021-0004-1473: WHAT IS SLANG +4507-16021-0005-1474: WE HAVE NEVER UNDERSTOOD THIS SORT OF OBJECTIONS +4507-16021-0006-1475: SLANG IS ODIOUS +4507-16021-0007-1476: SLANG MAKES ONE SHUDDER +4507-16021-0008-1477: WHO DENIES THAT OF COURSE IT DOES +4507-16021-0009-1478: WHEN IT IS A QUESTION OF PROBING A WOUND A GULF A SOCIETY SINCE (WHEN->ONE) HAS IT BEEN CONSIDERED WRONG TO GO TOO FAR TO GO TO THE BOTTOM +4507-16021-0010-1479: WE HAVE ALWAYS THOUGHT THAT IT WAS SOMETIMES A COURAGEOUS ACT AND AT LEAST A SIMPLE AND USEFUL DEED WORTHY OF THE SYMPATHETIC ATTENTION WHICH DUTY ACCEPTED (AND->IN) FULFILLED MERITS +4507-16021-0011-1480: WHY SHOULD ONE NOT EXPLORE EVERYTHING AND STUDY EVERYTHING +4507-16021-0012-1481: WHY SHOULD ONE HALT ON THE WAY +4507-16021-0013-1482: NOTHING IS MORE LUGUBRIOUS THAN THE CONTEMPLATION THUS IN ITS NUDITY IN THE BROAD LIGHT OF THOUGHT OF THE HORRIBLE SWARMING OF SLANG +4507-16021-0014-1483: (NOW->NO) WHEN HAS HORROR EVER EXCLUDED STUDY +4507-16021-0015-1484: SINCE WHEN HAS MALADY BANISHED MEDICINE +4507-16021-0016-1485: CAN ONE IMAGINE A NATURALIST REFUSING TO STUDY THE VIPER THE BAT THE SCORPION THE CENTIPEDE THE (TARANTULA->TERENTIAL) AND ONE WHO WOULD CAST THEM BACK INTO THEIR DARKNESS SAYING (OH->O) HOW UGLY THAT IS +4507-16021-0017-1486: HE WOULD BE LIKE A PHILOLOGIST REFUSING TO EXAMINE A FACT IN LANGUAGE A PHILOSOPHER HESITATING TO SCRUTINIZE A FACT IN HUMANITY +4507-16021-0018-1487: WHAT IS SLANG PROPERLY SPEAKING +4507-16021-0019-1488: IT IS THE LANGUAGE OF WRETCHEDNESS +4507-16021-0020-1489: WE MAY BE STOPPED THE FACT MAY BE PUT TO US IN GENERAL TERMS WHICH IS ONE WAY OF ATTENUATING IT WE MAY BE TOLD THAT ALL TRADES PROFESSIONS IT MAY BE ADDED ALL THE ACCIDENTS OF THE SOCIAL HIERARCHY AND ALL FORMS OF INTELLIGENCE HAVE THEIR OWN SLANG +4507-16021-0021-1490: THE PAINTER WHO SAYS MY GRINDER THE NOTARY WHO SAYS MY SKIP THE GUTTER THE (HAIRDRESSER->HAIR DRESSER) WHO SAYS MY (MEALYBACK->MEALLY BACK) THE COBBLER WHO SAYS MY CUB TALKS SLANG +4507-16021-0022-1491: THERE IS THE SLANG OF THE AFFECTED LADY AS WELL AS OF THE (PRECIEUSES->PURSUS) +4507-16021-0023-1492: THE SUGAR MANUFACTURER WHO SAYS LOAF CLARIFIED LUMPS BASTARD COMMON BURNT THIS HONEST MANUFACTURER TALKS SLANG +4507-16021-0024-1493: ALGEBRA MEDICINE (BOTANY->BARTANY) HAVE EACH THEIR SLANG +4507-16021-0025-1494: TO MEET THE NEEDS OF THIS CONFLICT WRETCHEDNESS HAS INVENTED A LANGUAGE OF COMBAT WHICH IS SLANG +4507-16021-0026-1495: TO KEEP AFLOAT AND TO RESCUE FROM OBLIVION TO HOLD ABOVE THE GULF WERE IT BUT A FRAGMENT OF SOME LANGUAGE WHICH MAN HAS SPOKEN AND WHICH WOULD OTHERWISE BE LOST THAT IS TO SAY ONE OF THE ELEMENTS GOOD OR BAD OF WHICH CIVILIZATION IS COMPOSED OR BY WHICH IT IS COMPLICATED TO EXTEND THE RECORDS OF SOCIAL OBSERVATION IS TO SERVE CIVILIZATION ITSELF +4507-16021-0027-1496: PHOENICIAN VERY GOOD +4507-16021-0028-1497: EVEN DIALECT LET THAT PASS +4507-16021-0029-1498: TO THIS WE REPLY IN ONE WORD ONLY +4507-16021-0030-1499: ASSUREDLY IF THE TONGUE WHICH A NATION OR A PROVINCE HAS SPOKEN IS WORTHY OF INTEREST THE LANGUAGE WHICH HAS BEEN SPOKEN BY A MISERY IS STILL MORE WORTHY OF ATTENTION AND STUDY +4507-16021-0031-1500: AND THEN WE INSIST UPON IT THE STUDY OF SOCIAL DEFORMITIES AND INFIRMITIES AND THE TASK OF POINTING THEM OUT WITH A VIEW TO REMEDY IS NOT A BUSINESS IN WHICH CHOICE IS PERMITTED +4507-16021-0032-1501: HE MUST DESCEND WITH HIS HEART FULL OF CHARITY AND SEVERITY AT THE SAME TIME AS A BROTHER AND AS (A->HE) JUDGE TO THOSE IMPENETRABLE CASEMATES (WHERE->WERE) CRAWL PELL (MELL->*) THOSE WHO BLEED AND THOSE WHO DEAL THE BLOW THOSE WHO WEEP AND THOSE WHO CURSE THOSE WHO FAST (AND->IN) THOSE WHO DEVOUR THOSE WHO ENDURE EVIL AND THOSE WHO INFLICT IT +4507-16021-0033-1502: DO WE REALLY KNOW THE MOUNTAIN WELL WHEN WE ARE NOT ACQUAINTED WITH THE CAVERN +4507-16021-0034-1503: THEY CONSTITUTE TWO DIFFERENT ORDERS OF FACTS WHICH CORRESPOND TO EACH OTHER WHICH ARE ALWAYS INTERLACED AND WHICH OFTEN BRING FORTH RESULTS +4507-16021-0035-1504: TRUE HISTORY BEING A MIXTURE OF ALL THINGS THE TRUE HISTORIAN MINGLES IN EVERYTHING +4507-16021-0036-1505: FACTS FORM ONE OF THESE AND IDEAS THE OTHER +4507-16021-0037-1506: THERE IT CLOTHES ITSELF IN WORD MASKS IN METAPHOR RAGS +4507-16021-0038-1507: IN (THIS GUISE->THE SKIES) IT BECOMES HORRIBLE +4507-16021-0039-1508: ONE PERCEIVES WITHOUT UNDERSTANDING IT A HIDEOUS MURMUR SOUNDING ALMOST LIKE HUMAN ACCENTS BUT MORE NEARLY RESEMBLING A HOWL THAN AN ARTICULATE WORD +4507-16021-0040-1509: ONE THINKS ONE HEARS (HYDRAS->HYDRAST) TALKING +4507-16021-0041-1510: IT IS UNINTELLIGIBLE IN THE DARK +4507-16021-0042-1511: IT IS BLACK IN MISFORTUNE IT IS BLACKER STILL IN CRIME THESE TWO BLACKNESSES AMALGAMATED (COMPOSE->COMPOSED) SLANG +4507-16021-0043-1512: THE EARTH IS NOT DEVOID OF RESEMBLANCE TO A JAIL +4507-16021-0044-1513: LOOK CLOSELY AT LIFE +4507-16021-0045-1514: IT IS SO MADE THAT EVERYWHERE WE FEEL THE SENSE OF PUNISHMENT +4507-16021-0046-1515: EACH DAY HAS ITS OWN GREAT GRIEF OR ITS LITTLE CARE +4507-16021-0047-1516: YESTERDAY (YOU->*) WERE TREMBLING FOR A HEALTH THAT IS DEAR TO YOU TO DAY YOU FEAR FOR YOUR OWN TO MORROW IT WILL BE ANXIETY ABOUT MONEY THE DAY AFTER TO MORROW THE (DIATRIBE->DIETRIBE) OF A SLANDERER THE DAY AFTER THAT THE MISFORTUNE OF SOME FRIEND THEN THE PREVAILING WEATHER THEN SOMETHING THAT HAS BEEN BROKEN OR LOST THEN A PLEASURE WITH WHICH YOUR CONSCIENCE AND YOUR VERTEBRAL COLUMN REPROACH YOU AGAIN THE COURSE OF PUBLIC AFFAIRS +4507-16021-0048-1517: THIS WITHOUT RECKONING IN THE PAINS OF THE HEART AND SO (IT->TO) GOES ON +4507-16021-0049-1518: THERE IS HARDLY ONE DAY OUT OF A HUNDRED WHICH IS WHOLLY JOYOUS AND SUNNY +4507-16021-0050-1519: AND YOU BELONG TO THAT SMALL CLASS WHO ARE (*->A) HAPPY +4507-16021-0051-1520: IN THIS WORLD EVIDENTLY THE VESTIBULE OF ANOTHER THERE ARE NO FORTUNATE +4507-16021-0052-1521: THE REAL HUMAN DIVISION IS THIS THE LUMINOUS AND THE SHADY +4507-16021-0053-1522: TO DIMINISH THE NUMBER OF THE SHADY TO AUGMENT THE NUMBER OF THE LUMINOUS THAT IS THE OBJECT +4507-16021-0054-1523: THAT IS WHY WE CRY EDUCATION SCIENCE +4507-16021-0055-1524: TO TEACH READING MEANS TO (LIGHT->WRITE) THE FIRE EVERY SYLLABLE SPELLED OUT SPARKLES +4507-16021-0056-1525: HOWEVER HE WHO SAYS LIGHT DOES NOT NECESSARILY SAY JOY +4507-16021-0057-1526: PEOPLE SUFFER IN THE LIGHT EXCESS BURNS +4507-16021-0058-1527: THE FLAME IS THE ENEMY OF THE WING +4507-16021-0059-1528: TO BURN WITHOUT CEASING TO FLY THEREIN LIES THE MARVEL OF GENIUS +4970-29093-0000-2093: YOU'LL NEVER DIG IT OUT OF THE (ASTOR->ASTRO) LIBRARY +4970-29093-0001-2094: TO THE YOUNG AMERICAN HERE OR ELSEWHERE THE PATHS TO FORTUNE ARE INNUMERABLE AND ALL OPEN THERE IS INVITATION IN THE AIR AND SUCCESS IN ALL HIS WIDE HORIZON +4970-29093-0002-2095: HE HAS NO TRADITIONS TO BIND HIM OR GUIDE HIM AND HIS IMPULSE IS TO BREAK AWAY FROM THE OCCUPATION HIS FATHER HAS FOLLOWED AND MAKE A NEW WAY FOR HIMSELF +4970-29093-0003-2096: THE MODEST FELLOW WOULD HAVE LIKED FAME THRUST UPON HIM FOR SOME WORTHY ACHIEVEMENT IT MIGHT BE FOR A BOOK OR FOR THE (SKILLFUL->SKILFUL) MANAGEMENT OF SOME GREAT NEWSPAPER OR FOR SOME DARING EXPEDITION LIKE THAT OF LIEUTENANT STRAIN OR DOCTOR KANE +4970-29093-0004-2097: HE WAS UNABLE TO DECIDE EXACTLY WHAT IT SHOULD BE +4970-29093-0005-2098: SOMETIMES HE THOUGHT HE WOULD LIKE TO STAND IN A CONSPICUOUS PULPIT AND HUMBLY PREACH THE GOSPEL OF REPENTANCE AND IT EVEN CROSSED HIS MIND THAT IT WOULD BE NOBLE TO GIVE HIMSELF TO A MISSIONARY LIFE TO SOME BENIGHTED REGION WHERE THE DATE PALM (GROWS->GROVES) AND THE NIGHTINGALE'S VOICE IS IN TUNE AND THE (BUL BUL->BOL) SINGS ON THE (OFF->OPT) NIGHTS +4970-29093-0006-2099: LAW SEEMED TO HIM WELL ENOUGH AS A SCIENCE BUT HE NEVER COULD DISCOVER A PRACTICAL CASE WHERE IT APPEARED TO HIM WORTH WHILE TO GO TO LAW AND ALL THE CLIENTS WHO STOPPED WITH THIS NEW CLERK (IN->AND) THE (ANTE ROOM->ANTEROOM) OF THE LAW OFFICE WHERE HE WAS WRITING PHILIP INVARIABLY ADVISED TO SETTLE NO MATTER HOW BUT (SETTLE->SETTLED) GREATLY TO THE DISGUST OF HIS EMPLOYER WHO KNEW THAT JUSTICE BETWEEN MAN AND MAN COULD ONLY BE ATTAINED BY THE RECOGNIZED PROCESSES WITH THE ATTENDANT (FEES->BEES) +4970-29093-0007-2100: IT IS SUCH A NOBLE AMBITION THAT IT IS A PITY IT HAS USUALLY SUCH A SHALLOW FOUNDATION +4970-29093-0008-2101: HE WANTED TO BEGIN AT THE TOP OF THE LADDER +4970-29093-0009-2102: PHILIP THEREFORE READ DILIGENTLY IN THE ASTOR LIBRARY PLANNED LITERARY WORKS THAT SHOULD COMPEL ATTENTION AND NURSED HIS GENIUS +4970-29093-0010-2103: HE HAD NO FRIEND WISE ENOUGH TO TELL HIM TO STEP INTO THE DORKING CONVENTION (THEN->THAN) IN SESSION MAKE A SKETCH OF THE MEN AND WOMEN ON THE PLATFORM AND TAKE IT TO THE EDITOR OF THE DAILY (GRAPEVINE->GRAPE VINE) AND SEE WHAT HE COULD GET A LINE FOR IT +4970-29093-0011-2104: (O->OH) VERY WELL SAID (GRINGO->GRENGO) TURNING AWAY WITH A SHADE OF CONTEMPT YOU'LL FIND IF YOU ARE GOING INTO LITERATURE AND NEWSPAPER WORK THAT YOU CAN'T AFFORD A CONSCIENCE LIKE THAT +4970-29093-0012-2105: BUT PHILIP DID AFFORD IT AND HE WROTE (THANKING->THINKING) HIS FRIENDS AND DECLINING BECAUSE HE SAID THE POLITICAL SCHEME WOULD FAIL AND OUGHT TO FAIL +4970-29093-0013-2106: AND HE WENT BACK TO HIS BOOKS AND TO HIS WAITING FOR AN OPENING LARGE ENOUGH FOR HIS DIGNIFIED ENTRANCE INTO THE LITERARY WORLD +4970-29093-0014-2107: WELL I'M GOING AS AN ENGINEER YOU (CAN->COULD) GO AS ONE +4970-29093-0015-2108: YOU CAN BEGIN BY CARRYING A ROD AND PUTTING DOWN THE FIGURES +4970-29093-0016-2109: NO (ITS->IT'S) NOT TOO SOON +4970-29093-0017-2110: I'VE BEEN READY TO GO ANYWHERE FOR SIX MONTHS +4970-29093-0018-2111: THE TWO YOUNG MEN WHO WERE BY THIS TIME FULL OF THE (ADVENTURE->ADVENTURER) WENT DOWN TO THE WALL STREET OFFICE OF HENRY'S UNCLE AND HAD A TALK WITH THAT WILY OPERATOR +4970-29093-0019-2112: THE NIGHT WAS SPENT IN PACKING UP AND WRITING LETTERS FOR PHILIP WOULD NOT TAKE SUCH AN IMPORTANT STEP WITHOUT INFORMING HIS FRIENDS +4970-29093-0020-2113: WHY IT'S (IN->A) MISSOURI SOMEWHERE ON THE FRONTIER I THINK WE'LL GET A MAP +4970-29093-0021-2114: I WAS AFRAID IT WAS NEARER HOME +4970-29093-0022-2115: HE KNEW HIS UNCLE WOULD BE GLAD TO HEAR THAT HE HAD AT LAST TURNED HIS THOUGHTS TO A PRACTICAL MATTER +4970-29093-0023-2116: HE WELL KNEW THE PERILS OF THE FRONTIER THE SAVAGE STATE OF SOCIETY THE LURKING INDIANS AND THE DANGERS OF FEVER +4970-29095-0000-2054: SHE WAS TIRED OF OTHER THINGS +4970-29095-0001-2055: SHE TRIED THIS MORNING AN AIR OR TWO UPON THE PIANO (SANG->SAYING) A SIMPLE SONG IN A SWEET BUT SLIGHTLY METALLIC VOICE AND THEN SEATING HERSELF BY THE OPEN WINDOW READ PHILIP'S LETTER +4970-29095-0002-2056: WELL MOTHER SAID THE YOUNG STUDENT LOOKING UP WITH A SHADE OF IMPATIENCE +4970-29095-0003-2057: I HOPE THEE TOLD THE ELDERS THAT FATHER AND I ARE RESPONSIBLE FOR THE PIANO AND THAT MUCH AS THEE LOVES MUSIC THEE IS NEVER IN THE ROOM WHEN IT IS PLAYED +4970-29095-0004-2058: I HEARD FATHER TELL COUSIN ABNER THAT HE WAS WHIPPED SO OFTEN FOR WHISTLING WHEN HE WAS A BOY THAT HE WAS DETERMINED TO HAVE WHAT COMPENSATION HE COULD GET NOW +4970-29095-0005-2059: THY WAYS GREATLY TRY ME RUTH AND ALL THY RELATIONS +4970-29095-0006-2060: IS THY FATHER WILLING THEE SHOULD GO AWAY TO A SCHOOL OF THE WORLD'S PEOPLE +4970-29095-0007-2061: I HAVE NOT ASKED HIM RUTH REPLIED WITH A LOOK THAT MIGHT IMPLY THAT SHE WAS ONE OF THOSE DETERMINED LITTLE BODIES WHO FIRST MADE UP HER OWN MIND AND THEN COMPELLED OTHERS TO MAKE UP THEIRS IN ACCORDANCE WITH HERS +4970-29095-0008-2062: MOTHER (I'M->I AM) GOING TO STUDY MEDICINE +4970-29095-0009-2063: MARGARET BOLTON ALMOST LOST FOR A MOMENT HER HABITUAL PLACIDITY +4970-29095-0010-2064: (THEE->THE) STUDY MEDICINE +4970-29095-0011-2065: DOES THEE THINK THEE COULD STAND IT SIX MONTHS +4970-29095-0012-2066: AND BESIDES SUPPOSE THEE DOES LEARN MEDICINE +4970-29095-0013-2067: I WILL (PRACTICE->PRACTISE) IT +4970-29095-0014-2068: (WHERE->WHERE'S) THEE AND THY FAMILY ARE KNOWN +4970-29095-0015-2069: IF I CAN GET (PATIENTS->PATIENCE) +4970-29095-0016-2070: RUTH SAT QUITE STILL FOR A TIME WITH FACE (INTENT->AND TENT) AND FLUSHED IT WAS OUT NOW +4970-29095-0017-2071: THE (SIGHT SEERS->SIGHTSEERS) RETURNED IN HIGH SPIRITS FROM THE CITY +4970-29095-0018-2072: RUTH ASKED THE (ENTHUSIASTS->ENTHUSIAST) IF THEY WOULD LIKE TO LIVE IN SUCH A SOUNDING (MAUSOLEUM->MUSOLEUM) WITH ITS GREAT HALLS AND ECHOING ROOMS AND NO COMFORTABLE PLACE IN IT FOR THE ACCOMMODATION OF ANY BODY +4970-29095-0019-2073: AND THEN THERE WAS BROAD STREET +4970-29095-0020-2074: THERE CERTAINLY WAS NO END TO IT AND EVEN RUTH WAS (PHILADELPHIAN->PHILADELPHIA) ENOUGH TO BELIEVE THAT A STREET OUGHT NOT TO HAVE ANY END OR ARCHITECTURAL (POINT->BLINT) UPON WHICH THE WEARY EYE COULD REST +4970-29095-0021-2075: BUT NEITHER SAINT (GIRARD->GERARD) NOR BROAD STREET NEITHER WONDERS OF THE MINT NOR THE GLORIES OF THE HALL WHERE THE GHOSTS OF OUR FATHERS SIT ALWAYS SIGNING THE DECLARATION (IMPRESSED->IMPRESS) THE (VISITORS->VISITOR) SO MUCH AS THE SPLENDORS OF THE CHESTNUT STREET WINDOWS AND THE BARGAINS ON EIGHTH STREET +4970-29095-0022-2076: IS THEE GOING TO THE YEARLY MEETING RUTH ASKED ONE OF THE GIRLS +4970-29095-0023-2077: I HAVE NOTHING TO WEAR REPLIED (THAT->THE) DEMURE PERSON +4970-29095-0024-2078: IT HAS OCCUPIED MOTHER A LONG TIME TO FIND (AT->*) THE SHOPS THE EXACT SHADE FOR HER NEW BONNET +4970-29095-0025-2079: AND THEE WON'T GO WHY SHOULD I +4970-29095-0026-2080: IF I GO TO MEETING AT ALL I LIKE BEST TO SIT IN THE QUIET OLD HOUSE IN GERMANTOWN WHERE THE WINDOWS ARE ALL OPEN AND I CAN SEE THE TREES AND (HEAR->HERE) THE STIR OF THE LEAVES +4970-29095-0027-2081: IT'S SUCH A CRUSH AT THE YEARLY MEETING AT ARCH STREET AND THEN THERE'S THE ROW OF SLEEK LOOKING YOUNG MEN WHO (LINE->LIE IN) THE CURBSTONE AND STARE AT US AS WE COME OUT +4970-29095-0028-2082: HE DOESN'T SAY BUT IT'S ON THE FRONTIER AND ON THE MAP EVERYTHING BEYOND IT IS MARKED INDIANS AND DESERT AND LOOKS AS DESOLATE AS A (WEDNESDAY->WIND ZAY) MEETING (HUMPH->*) IT WAS TIME FOR HIM TO DO SOMETHING +4970-29095-0029-2083: IS HE GOING TO START A DAILY NEWSPAPER AMONG THE (KICK A POOS->KICKAPOOS) +4970-29095-0030-2084: FATHER (THEE'S->THESE) UNJUST TO PHILIP HE'S GOING INTO BUSINESS +4970-29095-0031-2085: HE DOESN'T SAY EXACTLY WHAT IT IS SAID RUTH A LITTLE DUBIOUSLY BUT IT'S SOMETHING ABOUT LAND AND RAILROADS AND (THEE->HE) KNOWS FATHER THAT FORTUNES ARE MADE NOBODY KNOWS EXACTLY HOW IN A NEW COUNTRY +4970-29095-0032-2086: (BUT->THAT) PHILIP IS HONEST AND HE HAS TALENT ENOUGH IF HE WILL STOP SCRIBBLING TO MAKE HIS WAY +4970-29095-0033-2087: WHAT A (BOX WOMEN->BOXWOMEN) ARE PUT INTO MEASURED FOR IT AND (PUT IN->PUTTING) YOUNG IF WE GO ANYWHERE IT'S IN A BOX VEILED AND PINIONED AND SHUT IN BY DISABILITIES +4970-29095-0034-2088: WHY SHOULD I (RUST->REST) AND BE STUPID AND SIT IN (INACTION->AN ACTION) BECAUSE I AM A GIRL +4970-29095-0035-2089: AND IF I HAD A FORTUNE WOULD THEE WANT ME TO LEAD A USELESS LIFE +4970-29095-0036-2090: HAS (THEE->THE) CONSULTED THY MOTHER ABOUT A CAREER I SUPPOSE IT IS A CAREER (*->OF) THEE WANTS +4970-29095-0037-2091: BUT THAT WISE AND PLACID WOMAN UNDERSTOOD THE SWEET REBEL A GREAT DEAL BETTER THAN RUTH UNDERSTOOD HERSELF +4970-29095-0038-2092: RUTH WAS GLAD TO HEAR THAT PHILIP HAD MADE A PUSH INTO THE WORLD AND SHE WAS SURE THAT HIS TALENT AND COURAGE WOULD MAKE (A WAY->AWAY) FOR HIM +4992-23283-0000-2140: BUT THE MORE FORGETFULNESS HAD THEN PREVAILED THE MORE POWERFUL WAS THE FORCE OF REMEMBRANCE WHEN SHE AWOKE +4992-23283-0001-2141: MISS MILNER'S HEALTH IS NOT GOOD +4992-23283-0002-2142: SAID MISSUS (HORTON->WHARTON) A FEW MINUTES AFTER +4992-23283-0003-2143: SO THERE IS TO ME ADDED SANDFORD WITH A SARCASTIC SNEER +4992-23283-0004-2144: AND YET YOU MUST OWN HER (BEHAVIOUR->BEHAVIOR) HAS WARRANTED THEM HAS IT NOT BEEN IN THIS PARTICULAR INCOHERENT AND UNACCOUNTABLE +4992-23283-0005-2145: NOT THAT I KNOW OF NOT ONE MORE THAT I KNOW OF HE REPLIED WITH ASTONISHMENT AT WHAT SHE HAD INSINUATED AND YET WITH A PERFECT ASSURANCE THAT SHE WAS IN THE WRONG +4992-23283-0006-2146: PERHAPS I AM MISTAKEN ANSWERED SHE +4992-23283-0007-2147: TO ASK ANY MORE QUESTIONS OF YOU I BELIEVE WOULD BE UNFAIR +4992-23283-0008-2148: HE SEEMED TO WAIT FOR HER REPLY BUT AS SHE MADE NONE HE PROCEEDED +4992-23283-0009-2149: (OH->O) MY LORD CRIED MISS WOODLEY WITH A MOST FORCIBLE ACCENT YOU ARE THE LAST (PERSON->PERSONAL) ON EARTH SHE WOULD PARDON ME FOR (ENTRUSTING->INTRUSTING) +4992-23283-0010-2150: BUT IN SUCH A CASE MISS MILNER'S ELECTION OF A HUSBAND SHALL NOT DIRECT MINE +4992-23283-0011-2151: IF SHE DOES NOT KNOW HOW TO ESTIMATE HER OWN VALUE I DO +4992-23283-0012-2152: INDEPENDENT OF HER FORTUNE SHE HAS BEAUTY TO CAPTIVATE THE HEART OF ANY MAN AND WITH ALL HER FOLLIES SHE HAS A FRANKNESS IN HER MANNER AN UNAFFECTED WISDOM IN HER THOUGHTS (A->OF) VIVACITY IN HER CONVERSATION AND WITHAL A SOFTNESS IN HER DEMEANOUR THAT MIGHT ALONE ENGAGE THE AFFECTIONS OF A MAN OF THE NICEST SENTIMENTS AND THE STRONGEST UNDERSTANDING +4992-23283-0013-2153: MY LORD MISS MILNER'S TASTE IS NOT A DEPRAVED ONE IT IS BUT TOO REFINED +4992-23283-0014-2154: WHAT CAN YOU MEAN BY THAT MISS WOODLEY YOU TALK MYSTERIOUSLY +4992-23283-0015-2155: IS SHE NOT AFRAID THAT I WILL THWART HER INCLINATIONS +4992-23283-0016-2156: AGAIN HE SEARCHED HIS OWN THOUGHTS NOR INEFFECTUALLY AS BEFORE +4992-23283-0017-2157: MISS WOODLEY WAS TOO LITTLE VERSED IN THE SUBJECT TO KNOW THIS WOULD HAVE BEEN NOT TO LOVE AT ALL AT LEAST NOT TO THE EXTENT OF BREAKING THROUGH ENGAGEMENTS AND ALL THE VARIOUS OBSTACLES THAT STILL (MILITATED->MITIGATED) AGAINST THEIR UNION +4992-23283-0018-2158: TO RELIEVE HER FROM BOTH HE LAID HIS HAND WITH FORCE UPON HIS HEART AND SAID DO YOU BELIEVE ME +4992-23283-0019-2159: I WILL MAKE NO UNJUST USE OF WHAT I KNOW HE REPLIED WITH FIRMNESS I BELIEVE YOU MY LORD +4992-23283-0020-2160: I HAVE NEVER YET HOWEVER BEEN VANQUISHED BY THEM AND EVEN UPON THIS OCCASION MY REASON SHALL COMBAT THEM TO THE LAST AND MY REASON SHALL FAIL ME BEFORE I DO WRONG +4992-41797-0000-2117: YES DEAD THESE FOUR YEARS (AN->AND) A GOOD JOB FOR HER TOO +4992-41797-0001-2118: WELL AS I SAY IT'S AN AWFUL QUEER WORLD THEY CLAP ALL THE BURGLARS (INTO->AND) JAIL (AND->*) THE MURDERERS (AND->IN) THE (WIFE->WHITE) BEATERS (I'VE->I) ALLERS THOUGHT A GENTLE REPROOF WOULD BE ENOUGH PUNISHMENT FOR A WIFE (BEATER->PETER) CAUSE HE PROBABLY HAS A LOT (O->OF) PROVOCATION THAT NOBODY KNOWS AND THE (FIREBUGS->FIRE BUGS) CAN'T THINK (O->OF) THE RIGHT NAME SOMETHING LIKE (CENDENARIES AN->SENDIARIES AND) THE BREAKERS (O->OF) THE (PEACE AN->PIECE AND) WHAT NOT (AN->AND) YET THE LAW HAS (NOTHIN->NOTHING) TO SAY TO A MAN LIKE (HEN LORD->HANDLED) +4992-41797-0002-2119: GRANDFATHER WAS ALEXANDER CAREY L (L->*) D DOCTOR OF LAWS THAT IS +4992-41797-0003-2120: MISTER POPHAM LAID DOWN HIS BRUSH +4992-41797-0004-2121: I (SWAN->SWAYING) TO MAN HE EJACULATED IF YOU DON'T WORK HARD YOU CAN'T KEEP UP WITH THE (TIMES->TUBS) DOCTOR OF LAWS +4992-41797-0005-2122: DONE HE AIN'T DONE A THING (HE'D OUGHTER SENCE->HE ORDERS SINCE) HE WAS BORN +4992-41797-0006-2123: HE KEEPS THE THOU SHALT NOT (COMMANDMENTS->COMMANDS) FIRST RATE HEN LORD DOES +4992-41797-0007-2124: HE (GIVE->GAVE) UP HIS POSITION AND SHUT THE FAMILY UP IN THAT TOMB OF A HOUSE (SO T->SEWED) HE (COULD->COULDN'T) STUDY HIS BOOKS +4992-41797-0008-2125: MISTER POPHAM EXAGGERATED NOTHING BUT ON THE CONTRARY LEFT MUCH UNSAID IN HIS NARRATIVE OF THE FAMILY AT THE HOUSE OF LORDS +4992-41797-0009-2126: HENRY LORD WITH THE DEGREE OF (PH->P) D TO HIS CREDIT HAD BEEN PROFESSOR OF ZOOLOGY AT A NEW ENGLAND COLLEGE BUT HAD RESIGNED HIS POST IN ORDER TO WRITE A SERIES OF SCIENTIFIC TEXT BOOKS +4992-41797-0010-2127: ALWAYS IRRITABLE COLD INDIFFERENT HE HAD GROWN RAPIDLY MORE SO AS YEARS WENT ON +4992-41797-0011-2128: WHATEVER (APPEALED->APPEAL) TO HER SENSE OF BEAUTY WAS STRAIGHTWAY TRANSFERRED TO PAPER OR CANVAS +4992-41797-0012-2129: SHE IS WILD TO KNOW HOW TO DO THINGS +4992-41797-0013-2130: SHE MAKES EFFORT AFTER EFFORT TREMBLING WITH EAGERNESS AND WHEN SHE FAILS TO REPRODUCE WHAT SHE SEES SHE WORKS HERSELF INTO A FRENZY OF GRIEF AND DISAPPOINTMENT +4992-41797-0014-2131: WHEN SHE COULD NOT MAKE A RABBIT OR A BIRD LOOK REAL ON PAPER SHE SEARCHED IN HER FATHER'S BOOKS FOR PICTURES OF ITS BONES +4992-41797-0015-2132: CYRIL THERE MUST BE SOME BETTER WAY OF DOING I JUST DRAW THE OUTLINE OF AN ANIMAL AND THEN I PUT HAIRS OR FEATHERS ON IT THEY HAVE NO BODIES +4992-41797-0016-2133: THEY COULDN'T RUN (NOR->OR) MOVE THEY'RE JUST PASTEBOARD +4992-41797-0017-2134: HE WOULDN'T SEARCH SO DON'T WORRY REPLIED CYRIL QUIETLY AND THE TWO LOOKED AT EACH OTHER AND KNEW THAT IT WAS SO +4992-41797-0018-2135: THERE IN THE CEDAR HOLLOW THEN LIVED OLIVE LORD AN ANGRY RESENTFUL LITTLE CREATURE WEIGHED DOWN BY A FIERCE SENSE OF INJURY +4992-41797-0019-2136: (OLIVE'S->ALL OF HIS) MOURNFUL BLACK EYES MET NANCY'S SPARKLING BROWN ONES +4992-41797-0020-2137: NANCY'S CURLY CHESTNUT CROP SHONE IN THE SUN AND OLIVE'S THICK BLACK (PLAITS->PLATES) LOOKED BLACKER BY CONTRAST +4992-41797-0021-2138: (SHE'S->SHE IS) WONDERFUL MORE WONDERFUL (THAN->IN) ANYBODY WE'VE EVER SEEN ANYWHERE AND SHE (DRAWS->DRAWLS) BETTER THAN THE TEACHER IN CHARLESTOWN +4992-41797-0022-2139: SHE'S OLDER THAN I AM BUT SO TINY AND SAD AND SHY THAT SHE SEEMS LIKE A CHILD +4992-41806-0000-2161: NATTY HARMON TRIED THE KITCHEN PUMP SECRETLY SEVERAL TIMES DURING THE EVENING FOR THE WATER HAD TO RUN UP HILL ALL THE WAY FROM THE WELL TO THE KITCHEN SINK AND HE BELIEVED THIS TO BE A CONTINUAL MIRACLE THAT MIGHT GIVE OUT AT ANY MOMENT +4992-41806-0001-2162: TO NIGHT THERE WAS NO NEED OF EXTRA HEAT AND THERE WERE GREAT CEREMONIES TO BE OBSERVED IN LIGHTING THE FIRES ON THE HEARTHSTONES +4992-41806-0002-2163: THEY BEGAN WITH THE ONE IN THE FAMILY SITTING ROOM COLONEL WHEELER RALPH THURSTON MISTER AND MISSUS BILL HARMON WITH (NATTY->NANNIE) AND (RUFUS->RUFFUS) MISTER AND MISSUS POPHAM WITH DIGBY AND (LALLIE->LILY) JOY ALL STANDING IN ADMIRING GROUPS AND THRILLING WITH DELIGHT AT THE ORDER OF EVENTS +4992-41806-0003-2164: KATHLEEN WAVED THE TORCH TO AND FRO AS SHE RECITED SOME BEAUTIFUL LINES WRITTEN FOR SOME SUCH PURPOSE AS THAT WHICH CALLED THEM TOGETHER TO NIGHT +4992-41806-0004-2165: (BURN->BURNE) FIRE BURN FLICKER FLICKER FLAME +4992-41806-0005-2166: NEXT CAME OLIVE'S TURN TO HELP IN THE CEREMONIES +4992-41806-0006-2167: RALPH THURSTON HAD FOUND A LINE OF LATIN FOR THEM IN HIS BELOVED (HORACE TIBI SPLENDET->HORNS TIBBY SPLENDID) FOCUS FOR YOU THE HEARTH FIRE SHINES +4992-41806-0007-2168: OLIVE HAD PAINTED THE MOTTO ON A LONG NARROW PANEL OF CANVAS AND GIVING IT TO MISTER POPHAM STOOD BY THE FIRESIDE WHILE HE DEFTLY FITTED IT INTO THE PLACE PREPARED FOR IT +4992-41806-0008-2169: (OLIVE->ALAP) HAS ANOTHER LOVELY GIFT FOR THE YELLOW HOUSE SAID MOTHER CAREY RISING AND TO CARRY OUT THE NEXT PART OF THE PROGRAMME WE SHALL HAVE TO GO IN PROCESSION UPSTAIRS TO MY BEDROOM +4992-41806-0009-2170: EXCLAIMED BILL HARMON TO HIS WIFE AS THEY WENT THROUGH THE LIGHTED HALL +4992-41806-0010-2171: AIN'T THEY THE GREATEST +4992-41806-0011-2172: MOTHER CAREY POURED COFFEE NANCY CHOCOLATE AND THE (OTHERS HELPED SERVE->OTHER SELF SERVED) THE SANDWICHES AND CAKE DOUGHNUTS AND TARTS +4992-41806-0012-2173: AT THAT MOMENT THE GENTLEMAN ENTERED BEARING A HUGE OBJECT CONCEALED BY A PIECE OF GREEN (FELT->FIL) +4992-41806-0013-2174: APPROACHING THE DINING TABLE HE CAREFULLY PLACED THE ARTICLE IN THE CENTRE AND REMOVED THE CLOTH +4992-41806-0014-2175: (THINKS I TO->THINK SOUND OF) MYSELF I NEVER SEEN ANYTHING (OSH POPHAM COULDN'T MEND->ID) IF HE TOOK TIME ENOUGH AND GLUE ENOUGH SO I CARRIED THIS LITTLE FELLER HOME IN A BUSHEL BASKET ONE NIGHT LAST MONTH (AN->AND) I'VE SPENT ELEVEN (EVENIN'S PUTTIN->EVENINGS PUTTING) HIM TOGETHER +4992-41806-0015-2176: MISSUS HARMON THOUGHT HE SANG TOO MUCH AND TOLD HER (HUSBAND->HUSBA) PRIVATELY THAT IF HE WAS A CANARY BIRD SHE SHOULD WANT TO KEEP A TABLE COVER (OVER->OF) HIS (HEAD MOST->EDMOST) OF THE TIME BUT HE WAS IMMENSELY POPULAR WITH THE REST OF HIS AUDIENCE +4992-41806-0016-2177: THE FACE OF THE MAHOGANY SHONE WITH DELIGHT AND WHY NOT WHEN IT WAS DOING EVERYTHING ALMOST EVERYTHING WITHIN THE SCOPE OF A PIANO AND YET THE FAMILY HAD ENJOYED WEEKS OF GOOD NOURISHING MEALS ON WHAT HAD BEEN SAVED BY ITS EXERTIONS +4992-41806-0017-2178: WE SHUT OUR EYES THE FLOWERS BLOOM ON WE MURMUR BUT THE (CORN EARS->CORNIERS) FILL WE CHOOSE THE SHADOW BUT THE SUN THAT (CASTS->CAST) IT SHINES BEHIND US STILL +5105-28233-0000-1649: LENGTH OF SERVICE FOURTEEN YEARS THREE MONTHS AND FIVE DAYS +5105-28233-0001-1650: HE SEEMED BORN TO PLEASE WITHOUT BEING CONSCIOUS OF THE POWER HE POSSESSED +5105-28233-0002-1651: IT MUST BE OWNED AND NO ONE WAS MORE READY TO CONFESS IT THAN HIMSELF THAT HIS LITERARY ATTAINMENTS WERE BY NO MEANS OF A HIGH ORDER +5105-28233-0003-1652: WE DON'T (SPIN->SPEND) TOPS (IS->AS) A FAVORITE SAYING AMONGST ARTILLERY OFFICERS INDICATING THAT THEY DO NOT SHIRK THEIR DUTY BY FRIVOLOUS PURSUITS BUT IT MUST BE CONFESSED THAT SERVADAC BEING NATURALLY IDLE WAS VERY MUCH GIVEN TO SPINNING TOPS +5105-28233-0004-1653: ONCE (IN->AN) ACTION HE WAS LEADING A DETACHMENT OF INFANTRY THROUGH AN (INTRENCHMENT->ENTRENCHMENT) +5105-28233-0005-1654: SOMETIMES HE WOULD WANDER ON FOOT UPON THE SANDY SHORE AND SOMETIMES HE WOULD ENJOY A RIDE ALONG THE SUMMIT OF THE CLIFF ALTOGETHER BEING IN NO HURRY AT ALL TO BRING HIS TASK TO AN END +5105-28233-0006-1655: NO CATHEDRAL NOT EVEN BURGOS ITSELF COULD VIE WITH THE CHURCH AT (MONTMARTRE->MOUNT MARTRE) +5105-28233-0007-1656: BEN ZOOF'S MOST AMBITIOUS DESIRE WAS TO INDUCE THE CAPTAIN TO GO WITH HIM AND END HIS DAYS IN HIS MUCH LOVED HOME AND SO INCESSANTLY WERE SERVADAC'S EARS BESIEGED WITH DESCRIPTIONS OF THE UNPARALLELED BEAUTIES AND ADVANTAGES OF THIS EIGHTEENTH (ARRONDISSEMENT->ARE ON DESSIMA) OF PARIS THAT HE COULD SCARCELY HEAR THE NAME OF (MONTMARTRE->MONTMARTRA) WITHOUT A CONSCIOUS THRILL OF AVERSION +5105-28233-0008-1657: WHEN A PRIVATE IN THE EIGHTH CAVALRY HE HAD BEEN ON THE POINT OF QUITTING THE ARMY AT TWENTY EIGHT YEARS OF AGE BUT UNEXPECTEDLY HE HAD BEEN APPOINTED ORDERLY TO CAPTAIN SERVADAC +5105-28233-0009-1658: THE BOND OF UNION THUS EFFECTED COULD NEVER BE SEVERED AND ALTHOUGH BEN (ZOOF'S->ZEF'S) ACHIEVEMENTS HAD FAIRLY EARNED HIM THE RIGHT OF RETIREMENT HE FIRMLY DECLINED ALL (HONORS->HONOURS) OR ANY PENSION THAT MIGHT PART HIM FROM HIS SUPERIOR OFFICER +5105-28233-0010-1659: (UNLIKE->I MIKE) HIS MASTER HE MADE NO PRETENSION TO ANY GIFT OF POETIC POWER BUT HIS INEXHAUSTIBLE MEMORY MADE HIM A LIVING ENCYCLOPAEDIA AND FOR HIS STOCK OF ANECDOTES AND TROOPER'S TALES HE WAS MATCHLESS +5105-28240-0000-1624: FAST AS HIS LEGS COULD CARRY HIM SERVADAC HAD MADE HIS WAY TO THE TOP OF THE CLIFF +5105-28240-0001-1625: IT WAS QUITE TRUE THAT A VESSEL WAS IN SIGHT HARDLY MORE THAN SIX MILES FROM THE SHORE BUT OWING TO THE INCREASE IN THE EARTH'S CONVEXITY AND THE CONSEQUENT LIMITATION OF THE RANGE OF VISION THE RIGGING OF THE TOPMASTS ALONE WAS VISIBLE ABOVE THE WATER +5105-28240-0002-1626: EXCLAIMED SERVADAC KEEPING HIS EYE UNMOVED AT HIS TELESCOPE +5105-28240-0003-1627: SHE IS UNDER (SAIL->SALE) BUT SHE IS COUNT TIMASCHEFF'S YACHT HE WAS RIGHT +5105-28240-0004-1628: IF THE COUNT WERE ON BOARD A STRANGE FATALITY WAS BRINGING HIM TO THE PRESENCE OF HIS RIVAL +5105-28240-0005-1629: HE RECKONED THEREFORE NOT ONLY UPON ASCERTAINING THE EXTENT OF THE LATE CATASTROPHE BUT UPON LEARNING ITS CAUSE +5105-28240-0006-1630: THE WIND BEING ADVERSE THE DOBRYNA DID NOT MAKE VERY RAPID PROGRESS BUT AS THE WEATHER IN SPITE OF A FEW CLOUDS REMAINED CALM AND THE SEA WAS QUITE SMOOTH SHE WAS ENABLED TO HOLD A STEADY COURSE +5105-28240-0007-1631: SERVADAC TOOK IT FOR GRANTED THAT THE DOBRYNA WAS ENDEAVORING TO PUT IN +5105-28240-0008-1632: A NARROW CHANNEL FORMED A PASSAGE THROUGH THE RIDGE OF ROCKS THAT PROTECTED IT FROM THE OPEN SEA AND WHICH EVEN IN THE ROUGHEST WEATHER WOULD (ENSURE->INSURE) THE CALMNESS OF ITS WATERS +5105-28240-0009-1633: SLIGHTLY CHANGING HER COURSE SHE FIRST STRUCK HER MAINSAIL AND IN ORDER TO FACILITATE THE MOVEMENTS OF HER HELMSMAN SOON CARRIED NOTHING BUT HER TWO TOPSAILS BRIGANTINE AND JIB +5105-28240-0010-1634: CAPTAIN SERVADAC HASTENED (TOWARDS->TOWARD) HIM +5105-28240-0011-1635: I LEFT YOU ON A CONTINENT AND HERE I HAVE THE HONOR OF FINDING YOU ON AN ISLAND +5105-28240-0012-1636: NEVER MIND NOW INTERPOSED THE CAPTAIN WE WILL TALK OF THAT BY AND BY +5105-28240-0013-1637: NOTHING MORE THAN YOU KNOW YOURSELF +5105-28240-0014-1638: ARE YOU CERTAIN THAT THIS IS THE MEDITERRANEAN +5105-28240-0015-1639: FOR SOME MOMENTS HE SEEMED PERFECTLY STUPEFIED (*->AND) THEN RECOVERING HIMSELF HE BEGAN TO OVERWHELM THE COUNT WITH A TORRENT OF QUESTIONS +5105-28240-0016-1640: TO ALL THESE INQUIRIES THE COUNT RESPONDED IN THE AFFIRMATIVE +5105-28240-0017-1641: SOME MYSTERIOUS FORCE SEEMED TO HAVE BROUGHT ABOUT A CONVULSION OF THE ELEMENTS +5105-28240-0018-1642: YOU WILL TAKE ME ON BOARD COUNT WILL YOU NOT +5105-28240-0019-1643: MY YACHT IS AT YOUR SERVICE SIR EVEN SHOULD YOU REQUIRE TO MAKE A TOUR (ROUND->AROUND) THE WORLD +5105-28240-0020-1644: THE COUNT SHOOK HIS HEAD +5105-28240-0021-1645: BEFORE STARTING IT WAS INDISPENSABLE THAT THE ENGINE OF THE DOBRYNA SHOULD BE REPAIRED TO SAIL UNDER CANVAS ONLY WOULD IN CONTRARY WINDS AND ROUGH SEAS BE BOTH TEDIOUS AND DIFFICULT +5105-28240-0022-1646: IT WAS ON THE LAST DAY OF JANUARY THAT THE REPAIRS OF THE SCHOONER WERE COMPLETED +5105-28240-0023-1647: A SLIGHT DIMINUTION IN THE EXCESSIVELY HIGH TEMPERATURE WHICH HAD PREVAILED FOR THE LAST FEW WEEKS WAS THE ONLY APPARENT CHANGE IN THE GENERAL ORDER OF THINGS BUT WHETHER THIS WAS TO BE ATTRIBUTED TO ANY ALTERATION IN THE EARTH'S ORBIT WAS A QUESTION WHICH WOULD STILL REQUIRE SEVERAL DAYS TO DECIDE +5105-28240-0024-1648: DOUBTS NOW AROSE AND SOME DISCUSSION FOLLOWED WHETHER OR NOT IT WAS DESIRABLE FOR BEN ZOOF TO ACCOMPANY HIS MASTER +5105-28241-0000-1604: HER SEA GOING QUALITIES WERE EXCELLENT AND WOULD HAVE AMPLY SUFFICED FOR A CIRCUMNAVIGATION OF THE GLOBE +5105-28241-0001-1605: AFTER AN APPRENTICESHIP ON A MERCHANT SHIP HE HAD ENTERED THE IMPERIAL NAVY AND HAD ALREADY REACHED THE RANK OF LIEUTENANT WHEN THE COUNT APPOINTED HIM TO THE CHARGE OF HIS OWN PRIVATE YACHT IN WHICH HE WAS ACCUSTOMED TO SPEND BY (FAR THE->FARTHER) GREATER PART OF HIS TIME THROUGHOUT THE WINTER GENERALLY CRUISING IN THE MEDITERRANEAN WHILST IN THE SUMMER HE VISITED MORE NORTHERN WATERS +5105-28241-0002-1606: THE LATE ASTOUNDING EVENTS HOWEVER HAD RENDERED PROCOPE MANIFESTLY UNEASY AND NOT THE LESS SO FROM HIS CONSCIOUSNESS THAT THE COUNT SECRETLY PARTOOK OF HIS OWN ANXIETY +5105-28241-0003-1607: STEAM UP AND CANVAS SPREAD THE SCHOONER STARTED EASTWARDS +5105-28241-0004-1608: ALTHOUGH ONLY A MODERATE BREEZE WAS BLOWING THE SEA WAS ROUGH A CIRCUMSTANCE TO BE ACCOUNTED FOR ONLY BY THE DIMINUTION IN THE FORCE OF THE EARTH'S ATTRACTION RENDERING THE LIQUID (PARTICLES->PARTICLE) SO BUOYANT THAT BY THE MERE EFFECT OF OSCILLATION THEY WERE CARRIED TO A HEIGHT THAT WAS QUITE UNPRECEDENTED +5105-28241-0005-1609: FOR A FEW MILES SHE FOLLOWED THE LINE HITHERTO PRESUMABLY OCCUPIED BY THE COAST OF ALGERIA BUT NO LAND APPEARED TO THE SOUTH +5105-28241-0006-1610: THE LOG AND THE COMPASS THEREFORE WERE ABLE TO BE CALLED UPON TO DO THE WORK OF THE SEXTANT WHICH HAD BECOME UTTERLY USELESS +5105-28241-0007-1611: (THERE IS->THERE'S) NO FEAR OF THAT SIR +5105-28241-0008-1612: (*->THAT) THE EARTH HAS UNDOUBTEDLY ENTERED UPON A NEW ORBIT BUT SHE IS NOT INCURRING ANY PROBABLE RISK OF BEING PRECIPITATED (ONTO->ON TO) THE SUN +5105-28241-0009-1613: AND WHAT DEMONSTRATION DO YOU OFFER ASKED SERVADAC EAGERLY THAT IT WILL NOT HAPPEN +5105-28241-0010-1614: OCEAN (REIGNED->RAINED) SUPREME +5105-28241-0011-1615: ALL THE IMAGES OF HIS PAST LIFE FLOATED UPON HIS MEMORY HIS THOUGHTS SPED AWAY TO HIS NATIVE FRANCE ONLY TO RETURN AGAIN TO WONDER WHETHER THE DEPTHS OF OCEAN WOULD REVEAL ANY TRACES OF THE ALGERIAN METROPOLIS +5105-28241-0012-1616: IS IT NOT IMPOSSIBLE HE MURMURED ALOUD THAT ANY CITY SHOULD DISAPPEAR SO COMPLETELY +5105-28241-0013-1617: WOULD NOT THE LOFTIEST EMINENCES OF THE CITY AT LEAST BE VISIBLE +5105-28241-0014-1618: ANOTHER CIRCUMSTANCE WAS MOST REMARKABLE +5105-28241-0015-1619: TO THE SURPRISE OF ALL AND ESPECIALLY OF LIEUTENANT PROCOPE THE LINE INDICATED A BOTTOM AT A NEARLY UNIFORM DEPTH OF FROM FOUR TO FIVE FATHOMS AND ALTHOUGH THE SOUNDING WAS PERSEVERED WITH CONTINUOUSLY FOR MORE THAN TWO HOURS OVER A CONSIDERABLE AREA THE DIFFERENCES OF LEVEL WERE INSIGNIFICANT NOT CORRESPONDING IN ANY DEGREE TO WHAT WOULD BE EXPECTED OVER THE SITE OF A CITY THAT HAD BEEN TERRACED LIKE THE SEATS OF AN (AMPHITHEATER->AMPHITHEATRE) +5105-28241-0016-1620: YOU MUST SEE LIEUTENANT I SHOULD THINK THAT WE ARE NOT SO NEAR THE COAST OF ALGERIA AS YOU IMAGINED +5105-28241-0017-1621: AFTER PONDERING (AWHILE->A WHILE) HE SAID IF WE WERE FARTHER AWAY I SHOULD EXPECT TO FIND A DEPTH OF TWO OR THREE HUNDRED FATHOMS INSTEAD OF FIVE FATHOMS FIVE FATHOMS +5105-28241-0018-1622: ITS DEPTH REMAINED INVARIABLE STILL FOUR OR AT MOST FIVE FATHOMS AND ALTHOUGH ITS BOTTOM WAS ASSIDUOUSLY DREDGED IT WAS ONLY TO PROVE IT BARREN OF MARINE PRODUCTION OF ANY TYPE +5105-28241-0019-1623: NOTHING WAS TO BE DONE BUT TO PUT ABOUT AND RETURN (IN->AND) DISAPPOINTMENT (TOWARDS->TOWARD) THE NORTH +5142-33396-0000-898: AT ANOTHER TIME (HARALD->HAROLD) ASKED +5142-33396-0001-899: WHAT IS YOUR COUNTRY OLAF HAVE YOU ALWAYS BEEN A THRALL THE THRALL'S EYES FLASHED +5142-33396-0002-900: TWO HUNDRED WARRIORS FEASTED IN HIS HALL AND FOLLOWED HIM TO BATTLE +5142-33396-0003-901: THE REST OF YOU OFF A VIKING HE HAD THREE SHIPS +5142-33396-0004-902: THESE HE GAVE TO THREE OF MY BROTHERS +5142-33396-0005-903: BUT I STAYED THAT SPRING AND BUILT ME A BOAT +5142-33396-0006-904: I MADE HER (FOR ONLY->FALLING) TWENTY OARS BECAUSE I THOUGHT FEW MEN WOULD FOLLOW ME FOR I WAS YOUNG FIFTEEN YEARS OLD +5142-33396-0007-905: AT THE PROW I CARVED THE HEAD WITH OPEN MOUTH AND FORKED TONGUE THRUST OUT +5142-33396-0008-906: I PAINTED THE EYES RED FOR ANGER +5142-33396-0009-907: THERE STAND SO I SAID AND GLARE AND HISS AT MY FOES +5142-33396-0010-908: IN THE STERN I (CURVED->CARVED) THE TAIL UP ALMOST AS HIGH AS THE HEAD +5142-33396-0011-909: THERE SHE SAT ON THE ROLLERS AS FAIR A SHIP AS I EVER SAW +5142-33396-0012-910: THEN I WILL GET ME A FARM AND (WILL WINTER->WE'LL WINNER) IN THAT LAND NOW WHO WILL FOLLOW ME +5142-33396-0013-911: HE IS BUT A BOY THE (MEN->MAN) SAID +5142-33396-0014-912: THIRTY MEN ONE AFTER ANOTHER RAISED THEIR HORNS AND SAID +5142-33396-0015-913: AS OUR BOAT FLASHED DOWN THE ROLLERS INTO THE WATER I MADE THIS SONG AND SANG IT +5142-33396-0016-914: SO WE HARRIED THE COAST OF NORWAY +5142-33396-0017-915: WE ATE (AT->IT) MANY MEN'S TABLES UNINVITED +5142-33396-0018-916: (MY->I) DRAGON'S BELLY IS NEVER FULL AND ON BOARD WENT THE GOLD +5142-33396-0019-917: OH IT IS BETTER TO LIVE ON THE SEA AND LET OTHER MEN RAISE YOUR CROPS AND COOK YOUR MEALS +5142-33396-0020-918: A HOUSE SMELLS OF SMOKE A (SHIP SMELLS->SHIP'S MILLS) OF FROLIC +5142-33396-0021-919: UP AND DOWN THE WATER WE WENT TO GET MUCH WEALTH AND MUCH FROLIC +5142-33396-0022-920: WHAT (OF->IS) THE FARM (OLAF->OLOFF) NOT YET I ANSWERED VIKING IS BETTER FOR SUMMER +5142-33396-0023-921: IT WAS SO DARK THAT I COULD SEE NOTHING BUT A FEW SPARKS ON THE HEARTH +5142-33396-0024-922: I STOOD WITH MY BACK TO THE WALL FOR I WANTED NO SWORD REACHING OUT OF THE DARK FOR ME +5142-33396-0025-923: COME COME I CALLED WHEN NO ONE OBEYED A FIRE +5142-33396-0026-924: MY MEN LAUGHED YES A STINGY (HOST->HOSE) +5142-33396-0027-925: HE ACTS AS THOUGH HE (HAD->IS) NOT EXPECTED US +5142-33396-0028-926: ON A BENCH IN A FAR CORNER WERE A DOZEN PEOPLE HUDDLED TOGETHER +5142-33396-0029-927: BRING IN THE TABLE WE ARE HUNGRY +5142-33396-0030-928: THE THRALLS WERE (BRINGING->RINGING) IN A GREAT POT OF MEAT +5142-33396-0031-929: THEY SET UP A CRANE OVER THE FIRE AND HUNG THE POT UPON IT AND WE SAT AND WATCHED IT BOIL WHILE WE JOKED AT LAST THE SUPPER BEGAN +5142-33396-0032-930: THE FARMER SAT GLOOMILY ON THE BENCH AND WOULD NOT EAT AND YOU CANNOT WONDER FOR HE SAW US PUTTING POTFULS OF HIS GOOD BEEF AND (BASKET LOADS->BASCULADES) OF BREAD (INTO->AND) OUR BIG MOUTHS +5142-33396-0033-931: YOU WOULD NOT EAT WITH US YOU CANNOT SAY NO TO HALF OF MY ALE I DRINK THIS TO YOUR HEALTH +5142-33396-0034-932: THEN I DRANK HALF OF THE HORNFUL AND (SENT->SET) THE REST ACROSS THE FIRE TO THE FARMER HE TOOK IT AND SMILED SAYING +5142-33396-0035-933: DID YOU EVER HAVE SUCH A LORDLY GUEST BEFORE I WENT ON +5142-33396-0036-934: SO I WILL GIVE OUT THIS LAW THAT MY MEN SHALL NEVER LEAVE YOU ALONE +5142-33396-0037-935: (HAKON->HOCKIN) THERE SHALL BE YOUR CONSTANT COMPANION FRIEND FARMER +5142-33396-0038-936: HE SHALL NOT LEAVE YOU DAY OR NIGHT WHETHER YOU ARE WORKING OR PLAYING OR SLEEPING +5142-33396-0039-937: I (NAMED->NAME) NINE OTHERS AND SAID +5142-33396-0040-938: AND THESE SHALL FOLLOW YOUR THRALLS IN THE SAME WAY +5142-33396-0041-939: SO I SET GUARDS OVER EVERY ONE IN THAT HOUSE +5142-33396-0042-940: SO NO TALES GOT OUT TO THE NEIGHBORS BESIDES IT WAS A LONELY PLACE AND BY GOOD LUCK NO ONE CAME THAT WAY +5142-33396-0043-941: THEIR EYES DANCED BIG (THORLEIF->TORE LEAF) STOOD UP AND STRETCHED HIMSELF +5142-33396-0044-942: (I AM->I'M) STIFF WITH LONG SITTING HE SAID I ITCH FOR A FIGHT I TURNED TO THE FARMER +5142-33396-0045-943: THIS IS OUR LAST FEAST WITH YOU I SAID +5142-33396-0046-944: BY THE BEARD OF ODIN I CRIED YOU HAVE TAKEN OUR JOKE LIKE A MAN +5142-33396-0047-945: MY MEN POUNDED THE TABLE WITH THEIR FISTS +5142-33396-0048-946: BY THE HAMMER (OF->A) THOR SHOUTED GRIM (HERE->THERE) IS NO STINGY COWARD +5142-33396-0049-947: HERE FRIEND TAKE IT AND HE THRUST IT INTO THE FARMER'S HAND +5142-33396-0050-948: MAY YOU DRINK (HEART'S EASE->HEARTSEASE) FROM IT FOR MANY YEARS +5142-33396-0051-949: AND WITH IT I LEAVE YOU A NAME (SIF->SIFT) THE FRIENDLY I SHALL HOPE TO DRINK WITH YOU (SOMETIME->SOME TIME) IN VALHALLA +5142-33396-0052-950: HERE IS A RING FOR SIF THE FRIENDLY AND HERE IS A BRACELET (*->AND) A SWORD WOULD NOT BE ASHAMED TO HANG AT YOUR SIDE +5142-33396-0053-951: I TOOK FIVE GREAT BRACELETS OF GOLD FROM OUR TREASURE CHEST AND GAVE THEM TO HIM +5142-33396-0054-952: THAT IS THE BEST WAY TO DECIDE FOR THE SPEAR WILL ALWAYS POINT SOMEWHERE AND ONE THING IS AS GOOD AS ANOTHER +5142-33396-0055-953: THAT TIME IT POINTED US INTO YOUR FATHER'S SHIPS +5142-33396-0056-954: HERE THEY SAID IS A RASCAL WHO HAS BEEN HARRYING OUR COASTS +5142-33396-0057-955: WE SUNK HIS SHIP AND MEN BUT HIM WE BROUGHT TO YOU +5142-33396-0058-956: A ROBBER VIKING SAID THE KING AND (*->HE) SCOWLED AT ME +5142-33396-0059-957: YES AND WITH ALL YOUR FINGERS IT TOOK YOU A YEAR TO CATCH ME THE KING FROWNED MORE ANGRILY +5142-33396-0060-958: TAKE HIM OUT (THORKEL->TORCOLE) AND LET HIM TASTE YOUR SWORD +5142-33396-0061-959: YOUR MOTHER THE QUEEN WAS STANDING BY +5142-33396-0062-960: NOW SHE PUT HER HAND ON HIS ARM AND SMILED AND SAID +5142-33396-0063-961: AND WOULD HE NOT BE A GOOD GIFT FOR OUR BABY +5142-33396-0064-962: YOUR FATHER THOUGHT A MOMENT (THEN->AND) LOOKED AT YOUR MOTHER AND SMILED +5142-33396-0065-963: SOFT HEART HE SAID GENTLY TO HER THEN TO (THORKEL->TORCOAL) WELL LET HIM GO (THORKEL->TORCOAL) +5142-33396-0066-964: THEN HE TURNED TO ME AGAIN FROWNING +5142-33396-0067-965: BUT YOUNG SHARP TONGUE NOW THAT (WE HAVE->WE'VE) CAUGHT YOU (WE->*) WILL PUT YOU INTO A TRAP THAT YOU CANNOT GET OUT OF +5142-33396-0068-966: SO I LIVED AND NOW (AM->I'M) YOUR TOOTH THRALL WELL IT IS THE LUCK OF WAR +5142-36377-0000-870: IT WAS ONE OF THE MASTERLY AND CHARMING STORIES OF (DUMAS->DE MAU) THE ELDER +5142-36377-0001-871: IN FIVE MINUTES I WAS IN A NEW WORLD AND MY MELANCHOLY ROOM WAS FULL OF THE LIVELIEST FRENCH COMPANY +5142-36377-0002-872: THE SOUND OF AN IMPERATIVE AND UNCOMPROMISING BELL RECALLED ME IN DUE TIME TO THE REGIONS OF REALITY +5142-36377-0003-873: AMBROSE MET ME AT THE BOTTOM OF THE STAIRS AND SHOWED ME THE WAY TO THE SUPPER ROOM +5142-36377-0004-874: SHE SIGNED TO ME WITH A GHOSTLY SOLEMNITY TO TAKE THE VACANT PLACE ON THE LEFT OF HER FATHER +5142-36377-0005-875: THE DOOR OPENED AGAIN WHILE I WAS STILL STUDYING THE TWO BROTHERS WITHOUT I HONESTLY CONFESS BEING VERY FAVORABLY IMPRESSED BY EITHER OF THEM +5142-36377-0006-876: A NEW MEMBER OF THE FAMILY CIRCLE WHO INSTANTLY ATTRACTED MY ATTENTION ENTERED THE ROOM +5142-36377-0007-877: A LITTLE CRACKED THAT IN THE POPULAR PHRASE WAS MY IMPRESSION OF THE STRANGER WHO NOW MADE HIS APPEARANCE IN THE SUPPER ROOM +5142-36377-0008-878: MISTER (MEADOWCROFT->MEDICROFT) THE ELDER HAVING NOT SPOKEN ONE WORD THUS FAR HIMSELF INTRODUCED THE (NEWCOMER->NEW COMER) TO ME WITH A (SIDE->SIGH) GLANCE AT HIS SONS WHICH HAD SOMETHING LIKE DEFIANCE IN IT A GLANCE WHICH AS I WAS SORRY TO NOTICE WAS RETURNED WITH THE DEFIANCE ON THEIR SIDE BY THE TWO YOUNG MEN +5142-36377-0009-879: PHILIP (LEFRANK->LE FRANK) THIS IS MY OVERLOOKER MISTER (JAGO->YAGO) SAID THE OLD MAN FORMALLY PRESENTING US +5142-36377-0010-880: HE IS NOT WELL HE HAS COME OVER THE OCEAN FOR REST AND (CHANGE OF->CHANGES) SCENE +5142-36377-0011-881: (MISTER JAGO->THIS GIAGO) IS AN AMERICAN PHILIP +5142-36377-0012-882: MAKE ACQUAINTANCE WITH (MISTER JAGO->MISS GIAGO) SIT TOGETHER +5142-36377-0013-883: THEY POINTEDLY DREW BACK FROM JOHN (JAGO->YAGO) AS HE APPROACHED THE EMPTY CHAIR NEXT (TO->*) ME AND MOVED ROUND TO THE OPPOSITE SIDE OF THE TABLE +5142-36377-0014-884: A PRETTY GIRL AND SO FAR AS I COULD JUDGE BY APPEARANCES A GOOD GIRL TOO DESCRIBING HER GENERALLY I MAY SAY THAT SHE HAD A SMALL HEAD WELL CARRIED AND WELL SET ON HER SHOULDERS BRIGHT GRAY EYES THAT LOOKED AT YOU HONESTLY AND MEANT WHAT THEY LOOKED A TRIM SLIGHT LITTLE FIGURE TOO SLIGHT FOR OUR ENGLISH NOTIONS OF BEAUTY A STRONG AMERICAN ACCENT AND A RARE THING IN AMERICA A PLEASANTLY TONED VOICE WHICH MADE THE ACCENT AGREEABLE TO ENGLISH EARS +5142-36377-0015-885: OUR FIRST IMPRESSIONS OF PEOPLE ARE IN NINE CASES OUT OF TEN THE RIGHT IMPRESSIONS +5142-36377-0016-886: FOR ONCE IN A WAY I PROVED A TRUE PROPHET +5142-36377-0017-887: THE ONLY CHEERFUL CONVERSATION WAS THE CONVERSATION ACROSS THE TABLE BETWEEN NAOMI AND ME +5142-36377-0018-888: HE LOOKED UP (AT NAOMI->AND NOW ON ME) DOUBTINGLY FROM HIS PLATE AND LOOKED DOWN AGAIN SLOWLY WITH A FROWN +5142-36377-0019-889: WHEN I ADDRESSED HIM HE ANSWERED CONSTRAINEDLY +5142-36377-0020-890: A MORE DREARY AND MORE DISUNITED FAMILY PARTY I NEVER SAT AT THE TABLE WITH +5142-36377-0021-891: ENVY HATRED MALICE AND UNCHARITABLENESS ARE NEVER SO ESSENTIALLY DETESTABLE TO MY MIND AS WHEN THEY ARE ANIMATED BY (A->THE) SENSE OF PROPRIETY AND WORK UNDER THE SURFACE BUT FOR MY INTEREST IN (NAOMI->NAY OWE ME) AND MY OTHER INTEREST IN THE LITTLE LOVE LOOKS WHICH I NOW AND THEN SURPRISED PASSING BETWEEN HER AND AMBROSE I SHOULD NEVER HAVE SAT THROUGH THAT SUPPER +5142-36377-0022-892: I WISH YOU GOOD NIGHT SHE LAID HER BONY HANDS ON THE BACK OF MISTER MEADOWCROFT'S INVALID CHAIR CUT HIM SHORT IN HIS FAREWELL SALUTATION TO ME AND WHEELED HIM OUT TO HIS BED AS IF SHE WERE WHEELING HIM OUT TO HIS GRAVE +5142-36377-0023-893: YOU WERE QUITE RIGHT TO SAY NO AMBROSE BEGAN NEVER SMOKE WITH (JOHN JAGO->JOHNNIEAUGO) HIS CIGARS WILL POISON YOU +5142-36377-0024-894: (NAOMI->THEY ONLY) SHOOK HER FOREFINGER REPROACHFULLY AT THEM AS IF THE TWO STURDY YOUNG FARMERS HAD BEEN TWO CHILDREN +5142-36377-0025-895: SILAS SLUNK AWAY WITHOUT A WORD OF PROTEST AMBROSE STOOD HIS GROUND EVIDENTLY BENT ON MAKING HIS PEACE (WITH->WHEN) NAOMI BEFORE HE LEFT HER SEEING THAT I WAS IN THE WAY I WALKED ASIDE TOWARD A GLASS DOOR AT THE LOWER END OF THE ROOM +5142-36586-0000-967: IT IS MANIFEST THAT MAN IS NOW SUBJECT TO MUCH VARIABILITY +5142-36586-0001-968: SO IT IS WITH THE LOWER ANIMALS +5142-36586-0002-969: THE VARIABILITY OF MULTIPLE PARTS +5142-36586-0003-970: BUT THIS SUBJECT WILL BE MORE PROPERLY DISCUSSED WHEN WE TREAT OF THE DIFFERENT RACES OF MANKIND +5142-36586-0004-971: EFFECTS OF THE INCREASED USE AND DISUSE OF PARTS +5142-36600-0000-896: CHAPTER SEVEN ON THE RACES OF MAN +5142-36600-0001-897: IN DETERMINING WHETHER TWO OR MORE ALLIED FORMS OUGHT TO BE RANKED (AS->A) SPECIES OR VARIETIES NATURALISTS ARE PRACTICALLY GUIDED BY THE FOLLOWING CONSIDERATIONS NAMELY THE AMOUNT OF DIFFERENCE BETWEEN THEM AND WHETHER SUCH (DIFFERENCES->DIFFERENCE IS) RELATE TO FEW OR MANY POINTS OF STRUCTURE AND WHETHER THEY ARE OF PHYSIOLOGICAL IMPORTANCE BUT MORE ESPECIALLY WHETHER THEY ARE CONSTANT +5639-40744-0000-137: ELEVEN O'CLOCK HAD STRUCK IT WAS A FINE CLEAR NIGHT (THEY->THERE) WERE THE ONLY PERSONS ON THE ROAD AND THEY SAUNTERED LEISURELY ALONG TO AVOID PAYING THE PRICE OF FATIGUE FOR THE RECREATION PROVIDED FOR THE TOLEDANS IN (THEIR->THE) VALLEY OR ON THE BANKS OF THEIR RIVER +5639-40744-0001-138: SECURE AS HE THOUGHT IN THE CAREFUL ADMINISTRATION OF JUSTICE IN THAT CITY AND THE CHARACTER OF ITS WELL DISPOSED INHABITANTS THE GOOD (HIDALGO->HAD ALGO) WAS FAR FROM THINKING THAT ANY DISASTER COULD (BEFAL->BEFALL) HIS FAMILY +5639-40744-0002-139: (RODOLFO->RUDOLPHO) AND HIS COMPANIONS WITH THEIR FACES MUFFLED IN THEIR CLOAKS STARED RUDELY AND INSOLENTLY AT THE MOTHER THE DAUGHTER AND THE SERVANT MAID +5639-40744-0003-140: IN A MOMENT HE COMMUNICATED HIS THOUGHTS TO HIS COMPANIONS AND IN THE NEXT MOMENT THEY RESOLVED TO TURN BACK AND CARRY HER OFF TO PLEASE (RODOLFO->RUDOLPHO) FOR THE RICH WHO ARE OPEN HANDED ALWAYS FIND (PARASITES->PARRICIDES) READY TO ENCOURAGE THEIR BAD PROPENSITIES AND THUS TO CONCEIVE THIS WICKED DESIGN TO COMMUNICATE IT APPROVE IT RESOLVE ON RAVISHING (LEOCADIA->THE OCCAIA) AND TO CARRY THAT DESIGN INTO EFFECT WAS THE WORK OF A MOMENT +5639-40744-0004-141: THEY DREW THEIR SWORDS HID THEIR FACES IN THE FLAPS OF THEIR CLOAKS TURNED BACK AND SOON CAME IN FRONT OF THE LITTLE PARTY WHO HAD NOT YET DONE GIVING THANKS TO GOD FOR THEIR ESCAPE FROM THOSE AUDACIOUS MEN +5639-40744-0005-142: FINALLY THE ONE PARTY WENT OFF EXULTING AND THE OTHER WAS LEFT IN DESOLATION AND WOE +5639-40744-0006-143: (RODOLFO->RODOLPHO) ARRIVED AT HIS OWN HOUSE WITHOUT ANY IMPEDIMENT (AND LEOCADIA'S->A LOCATEUS) PARENTS REACHED THEIRS HEART BROKEN AND DESPAIRING +5639-40744-0007-144: MEANWHILE (RODOLFO->RUDOLPHO) HAD (LEOCADIA->LOCALIA) SAFE IN HIS CUSTODY AND IN HIS OWN APARTMENT +5639-40744-0008-145: WHO TOUCHES ME AM I IN BED +5639-40744-0009-146: MOTHER DEAR FATHER DO YOU HEAR ME +5639-40744-0010-147: IT IS THE ONLY AMENDS I ASK OF YOU FOR THE WRONG YOU HAVE DONE ME +5639-40744-0011-148: SHE FOUND THE DOOR BUT IT WAS LOCKED OUTSIDE +5639-40744-0012-149: SHE SUCCEEDED IN OPENING THE WINDOW AND THE MOONLIGHT SHONE IN SO BRIGHTLY THAT SHE COULD DISTINGUISH THE (COLOUR->COLOR) OF SOME DAMASK (HANGINGS->HANGING) IN THE ROOM +5639-40744-0013-150: SHE SAW THAT THE BED WAS GILDED AND SO RICH THAT IT SEEMED THAT OF A PRINCE (*->THE) RATHER (THAN->THAT) OF A PRIVATE GENTLEMAN +5639-40744-0014-151: AMONG OTHER THINGS ON WHICH (SHE->HE) CAST HER EYES WAS A SMALL CRUCIFIX OF SOLID SILVER STANDING ON A CABINET NEAR THE WINDOW +5639-40744-0015-152: THIS PERSON WAS (RODOLFO->RIDOLPHO) WHO THOUGH HE HAD GONE TO LOOK FOR HIS FRIENDS HAD CHANGED HIS MIND IN THAT RESPECT (NOT THINKING->NOTHING) IT ADVISABLE TO ACQUAINT THEM WITH WHAT HAD PASSED BETWEEN HIM AND THE GIRL +5639-40744-0016-153: ON THE CONTRARY HE RESOLVED TO TELL THEM THAT REPENTING OF HIS VIOLENCE AND MOVED BY (HER->A) TEARS HE HAD ONLY CARRIED HER HALF WAY TOWARDS HIS HOUSE AND THEN LET HER GO +5639-40744-0017-154: CHOKING WITH EMOTION (LEOCADI->LUCADIA) MADE A SIGN TO HER PARENTS THAT SHE WISHED TO BE ALONE WITH THEM +5639-40744-0018-155: THAT WOULD BE VERY WELL MY CHILD REPLIED HER FATHER IF YOUR PLAN (WERE->WOULD) NOT LIABLE TO BE FRUSTRATED BY ORDINARY CUNNING BUT NO DOUBT THIS IMAGE (HAS->HAD) BEEN ALREADY MISSED BY ITS OWNER AND HE WILL HAVE SET IT DOWN FOR CERTAIN THAT IT WAS TAKEN OUT OF THE ROOM BY THE PERSON HE LOCKED UP THERE +5639-40744-0019-156: WHAT YOU HAD BEST DO MY CHILD IS TO KEEP IT AND PRAY TO IT THAT SINCE IT WAS A WITNESS TO YOUR UNDOING IT WILL DEIGN TO VINDICATE YOUR CAUSE BY ITS RIGHTEOUS JUDGMENT +5639-40744-0020-157: THUS DID (THIS->THE) HUMANE AND RIGHT MINDED FATHER COMFORT HIS UNHAPPY DAUGHTER AND HER MOTHER EMBRACING HER AGAIN DID ALL SHE COULD TO SOOTHE (HER->A) FEELINGS +5639-40744-0021-158: SHE MEANWHILE PASSED HER LIFE WITH HER PARENTS IN THE STRICTEST RETIREMENT NEVER LETTING HERSELF BE SEEN BUT SHUNNING EVERY EYE LEST IT SHOULD READ HER MISFORTUNE IN HER FACE +5639-40744-0022-159: TIME ROLLED ON THE HOUR OF HER DELIVERY ARRIVED IT TOOK PLACE IN THE UTMOST SECRECY HER MOTHER TAKING UPON HER THE OFFICE OF MIDWIFE (AND->AS) SHE GAVE BIRTH TO A SON ONE OF THE MOST BEAUTIFUL EVER SEEN +5639-40744-0023-160: (WHEN->AND) THE BOY WALKED THROUGH THE STREETS BLESSINGS WERE SHOWERED UPON HIM BY ALL WHO SAW HIM (BLESSINGS->BLESSING) UPON HIS BEAUTY UPON THE MOTHER THAT BORE HIM UPON THE FATHER THAT BEGOT HIM UPON THOSE WHO BROUGHT HIM UP SO WELL +5639-40744-0024-161: ONE DAY WHEN THE BOY WAS SENT BY HIS GRANDFATHER WITH A MESSAGE TO A RELATION HE PASSED ALONG A STREET IN WHICH THERE WAS A GREAT CONCOURSE OF HORSEMEN +5639-40744-0025-162: THE BED SHE TOO WELL REMEMBERED WAS THERE AND ABOVE ALL THE CABINET ON WHICH HAD STOOD THE IMAGE SHE HAD TAKEN AWAY WAS STILL ON THE SAME SPOT +5639-40744-0026-163: (LUIS->LOUIS) WAS OUT OF DANGER IN A FORTNIGHT IN A MONTH HE ROSE FROM HIS BED AND (DURING->DREWING) ALL THAT TIME HE WAS VISITED DAILY BY HIS MOTHER AND GRANDMOTHER AND TREATED BY THE MASTER AND MISTRESS OF THE HOUSE AS IF HE WAS THEIR OWN CHILD +5639-40744-0027-164: THUS SAYING AND PRESSING THE CRUCIFIX TO HER BREAST SHE FELL FAINTING INTO THE ARMS OF DONA (ESTAFANIA->ESTAFFANIA) WHO AS A GENTLEWOMAN TO WHOSE SEX PITY IS (AS->A) NATURAL AS CRUELTY (IS->AS) TO MAN INSTANTLY PRESSED HER LIPS TO THOSE OF THE FAINTING GIRL SHEDDING OVER HER SO MANY TEARS THAT THERE NEEDED NO OTHER SPRINKLING OF WATER TO RECOVER (LEOCADIA->LOCATIA) FROM HER SWOON +5639-40744-0028-165: I HAVE GREAT THINGS TO TELL YOU SENOR SAID (DONA ESTAFANIA->DORNESTE FANIA) TO HER HUSBAND THE CREAM AND SUBSTANCE OF WHICH IS THIS THE FAINTING GIRL BEFORE YOU IS YOUR DAUGHTER AND (THAT->THE) BOY IS YOUR GRANDSON +5639-40744-0029-166: THIS TRUTH WHICH I HAVE LEARNED FROM HER LIPS IS CONFIRMED BY HIS FACE IN WHICH WE HAVE BOTH BEHELD THAT OF OUR SON +5639-40744-0030-167: JUST THEN (LEOCADIA CAME TO HERSELF->LOQUES) AND EMBRACING THE CROSS SEEMED CHANGED INTO A SEA OF TEARS AND THE GENTLEMAN (REMAINED->REMAINING) IN UTTER BEWILDERMENT UNTIL HIS WIFE HAD REPEATED TO HIM FROM BEGINNING TO END (LEOCADIA'S->LUCADIUS) WHOLE STORY AND HE BELIEVED IT THROUGH THE BLESSED DISPENSATION OF HEAVEN WHICH HAD CONFIRMED IT BY SO MANY CONVINCING TESTIMONIES +5639-40744-0031-168: SO PERSUASIVE WERE HER ENTREATIES AND SO STRONG HER ASSURANCES THAT NO HARM WHATEVER COULD RESULT TO THEM FROM THE INFORMATION SHE SOUGHT THEY WERE INDUCED TO CONFESS THAT ONE SUMMER'S NIGHT THE SAME SHE HAD MENTIONED THEMSELVES (AND->IN) ANOTHER FRIEND BEING OUT ON A (STROLL->STRAW) WITH (RODOLFO->RADOLPHO) THEY HAD BEEN CONCERNED IN THE (ABDUCTION->ADOCTION) OF A GIRL WHOM (RODOLFO->UDOLPH) CARRIED OFF WHILST THE REST OF THEM DETAINED HER FAMILY WHO MADE A GREAT OUTCRY AND WOULD HAVE DEFENDED HER IF THEY COULD +5639-40744-0032-169: FOR GOD'S SAKE MY LADY MOTHER GIVE ME A WIFE WHO WOULD BE AN AGREEABLE COMPANION NOT ONE WHO WILL DISGUST ME SO THAT WE MAY BOTH BEAR EVENLY AND WITH MUTUAL GOOD WILL THE YOKE IMPOSED ON US BY HEAVEN INSTEAD OF PULLING THIS WAY AND THAT WAY AND FRETTING EACH OTHER TO DEATH +5639-40744-0033-170: HER BEARING WAS GRACEFUL (AND->*) ANIMATED SHE LED HER SON BY THE HAND AND BEFORE HER WALKED TWO MAIDS WITH WAX LIGHTS AND SILVER CANDLESTICKS +5639-40744-0034-171: ALL ROSE TO DO HER REVERENCE AS IF SOMETHING FROM HEAVEN HAD MIRACULOUSLY APPEARED BEFORE THEM BUT GAZING ON HER ENTRANCED WITH ADMIRATION NOT ONE OF THEM WAS ABLE TO ADDRESS A SINGLE WORD TO HER +5639-40744-0035-172: SHE REFLECTED HOW NEAR SHE STOOD TO THE CRISIS WHICH WAS TO DETERMINE WHETHER SHE WAS TO BE BLESSED OR UNHAPPY FOR EVER AND RACKED BY THE INTENSITY OF HER EMOTIONS SHE SUDDENLY CHANGED (COLOUR->COLOR) HER HEAD DROPPED AND SHE FELL FORWARD IN A SWOON INTO THE ARMS OF THE (DISMAYED ESTAFANIA->DISMAYEDESTAFHANIA) +5639-40744-0036-173: HIS MOTHER HAD LEFT HER TO HIM AS BEING HER DESTINED PROTECTOR BUT WHEN SHE SAW THAT HE TOO WAS INSENSIBLE SHE WAS NEAR MAKING A THIRD AND WOULD HAVE DONE SO HAD HE NOT COME TO HIMSELF +5639-40744-0037-174: KNOW THEN SON OF MY HEART THAT THIS FAINTING LADY IS YOUR REAL BRIDE I SAY REAL BECAUSE SHE IS THE ONE WHOM YOUR FATHER AND I HAVE CHOSEN FOR YOU AND (THE->A) PORTRAIT WAS A PRETENCE +5639-40744-0038-175: JUST AT (THE->A) MOMENT WHEN THE TEARS OF THE PITYING BEHOLDERS FLOWED FASTEST AND (THEIR->THERE) EJACULATIONS WERE MOST EXPRESSIVE OF DESPAIR (LEOCADIA->THE OCCAS) GAVE SIGNS OF RECOVERY AND BROUGHT BACK GLADNESS TO THE HEARTS OF ALL +5639-40744-0039-176: WHEN SHE CAME TO HER SENSES AND BLUSHING TO FIND HERSELF IN (RODOLFO'S->GODOLPH'S) ARMS WOULD HAVE DISENGAGED HERSELF NO SENORA HE SAID THAT MUST NOT BE STRIVE NOT TO WITHDRAW FROM THE ARMS OF HIM WHO HOLDS YOU IN HIS SOUL +5639-40744-0040-177: THIS WAS DONE FOR THE EVENT TOOK PLACE AT A TIME (WHEN->WITH) THE CONSENT OF THE PARTIES WAS SUFFICIENT FOR THE CELEBRATION OF A MARRIAGE WITHOUT ANY OF THE PRELIMINARY FORMALITIES WHICH ARE NOW SO PROPERLY REQUIRED +5639-40744-0041-178: NOR WAS (RODOLFO->RDOLPHAL) LESS SURPRISED THAN THEY AND THE BETTER TO ASSURE HIMSELF OF SO WONDERFUL A FACT HE BEGGED (LEOCADIA->LOU KATYA) TO GIVE HIM SOME TOKEN WHICH SHOULD MAKE PERFECTLY CLEAR TO HIM THAT WHICH INDEED HE DID NOT DOUBT SINCE IT WAS AUTHENTICATED BY HIS PARENTS +5683-32865-0000-2483: YOU KNOW CAPTAIN LAKE +5683-32865-0001-2484: SAID LORD CHELFORD ADDRESSING ME +5683-32865-0002-2485: HE HAD HIS HAND UPON LAKE'S SHOULDER +5683-32865-0003-2486: THEY ARE COUSINS YOU KNOW WE ARE ALL COUSINS +5683-32865-0004-2487: WHATEVER LORD CHELFORD SAID MISS BRANDON RECEIVED IT VERY GRACIOUSLY AND EVEN WITH A MOMENTARY SMILE +5683-32865-0005-2488: BUT HER GREETING TO CAPTAIN (LAKE->LEEK) WAS MORE THAN USUALLY HAUGHTY AND FROZEN AND HER FEATURES I FANCIED PARTICULARLY PROUD AND PALE +5683-32865-0006-2489: AT DINNER LAKE WAS EASY AND AMUSING +5683-32865-0007-2490: (I'M->I AM) GLAD YOU LIKE IT SAYS (WYLDER->WILDER) CHUCKLING BENIGNANTLY ON IT OVER HIS SHOULDER +5683-32865-0008-2491: I BELIEVE I HAVE A LITTLE TASTE THAT WAY THOSE ARE ALL REAL YOU KNOW THOSE JEWELS +5683-32865-0009-2492: AND HE PLACED IT IN THAT GENTLEMAN'S FINGERS WHO NOW TOOK HIS TURN AT THE LAMP AND CONTEMPLATED THE LITTLE (PARALLELOGRAM->PARALLELLOGRAM) WITH A GLEAM OF SLY AMUSEMENT +5683-32865-0010-2493: I WAS THINKING IT'S VERY LIKE THE ACE OF HEARTS ANSWERED THE CAPTAIN SOFTLY SMILING ON +5683-32865-0011-2494: WHEREUPON LAKE LAUGHED QUIETLY STILL LOOKING ON THE ACE OF HEARTS WITH HIS SLY EYES +5683-32865-0012-2495: AND WYLDER LAUGHED TOO MORE SUDDENLY AND NOISILY THAN THE HUMOUR OF THE JOKE SEEMED QUITE TO CALL FOR AND GLANCED A GRIM LOOK FROM THE CORNERS OF HIS EYES ON LAKE BUT THE GALLANT CAPTAIN DID NOT SEEM TO PERCEIVE IT AND AFTER A FEW SECONDS MORE HE HANDED IT VERY INNOCENTLY BACK TO MISSUS DOROTHY ONLY REMARKING +5683-32865-0013-2496: DO YOU KNOW LAKE OH I REALLY CAN'T TELL BUT HE'LL SOON TIRE OF COUNTRY LIFE +5683-32865-0014-2497: HE'S NOT A MAN FOR COUNTRY QUARTERS +5683-32865-0015-2498: I HAD A HORRID DREAM ABOUT HIM LAST NIGHT THAT +5683-32865-0016-2499: OH I KNOW THAT'S (LORNE->LORN) BRANDON +5683-32865-0017-2500: ALL THE TIME HE WAS TALKING TO ME HIS ANGRY LITTLE EYES WERE FOLLOWING LAKE +5683-32866-0000-2527: MISS LAKE DECLINED THE CARRIAGE TO NIGHT +5683-32866-0001-2528: AND HE ADDED SOMETHING STILL LESS COMPLIMENTARY +5683-32866-0002-2529: BUT DON'T THESE VERY WISE THINGS SOMETIMES TURN OUT VERY FOOLISHLY +5683-32866-0003-2530: IN THE MEANTIME I HAD FORMED A NEW IDEA OF HER +5683-32866-0004-2531: BY THIS TIME LORD CHELFORD AND WYLDER RETURNED AND DISGUSTED RATHER WITH MYSELF I RUMINATED ON MY WANT OF (GENERAL SHIP->GENERALSHIP) +5683-32866-0005-2532: AND HE MADE A LITTLE DIP OF HIS CANE TOWARDS BRANDON HALL OVER HIS SHOULDER +5683-32866-0006-2533: YES SO THEY SAID BUT THAT WOULD I THINK HAVE BEEN WORSE +5683-32866-0007-2534: IF A FELLOW'S BEEN A LITTLE BIT WILD (HE'S->HE IS) BEELZEBUB AT ONCE +5683-32866-0008-2535: (BRACTON'S->BROCKTON'S) A VERY GOOD FELLOW I CAN ASSURE YOU +5683-32866-0009-2536: I DON'T KNOW (AND->ONE) CAN'T SAY HOW YOU (FINE->FIND) GENTLEMEN (DEFINE->TO FIND) WICKEDNESS ONLY AS AN OBSCURE FEMALE I SPEAK ACCORDING TO MY LIGHTS AND HE IS GENERALLY THOUGHT THE WICKEDEST MAN IN THIS COUNTY +5683-32866-0010-2537: WELL YOU KNOW RADIE WOMEN LIKE WICKED FELLOWS IT IS CONTRAST I SUPPOSE BUT THEY DO AND I'M SURE FROM WHAT BRACTON HAS SAID TO ME I KNOW HIM INTIMATELY THAT DORCAS LIKES HIM AND I CAN'T CONCEIVE WHY THEY ARE NOT MARRIED +5683-32866-0011-2538: THEIR WALK CONTINUED SILENT FOR THE GREATER PART NEITHER WAS QUITE SATISFIED WITH THE OTHER BUT RACHEL AT LAST SAID +5683-32866-0012-2539: NOW THAT'S IMPOSSIBLE RADIE FOR I REALLY DON'T THINK I ONCE THOUGHT OF HIM ALL THIS EVENING EXCEPT JUST WHILE WE WERE TALKING +5683-32866-0013-2540: THERE WAS A BRIGHT MOONLIGHT BROKEN BY THE SHADOWS OF OVERHANGING BOUGHS AND WITHERED LEAVES AND THE MOTTLED LIGHTS AND SHADOWS GLIDED ODDLY ACROSS HIS PALE FEATURES +5683-32866-0014-2541: DON'T INSULT ME STANLEY BY TALKING AGAIN AS YOU DID THIS MORNING +5683-32866-0015-2542: WHAT I SAY IS ALTOGETHER ON YOUR OWN ACCOUNT +5683-32866-0016-2543: MARK MY WORDS YOU'LL FIND HIM TOO STRONG FOR YOU (AYE->I) AND TOO DEEP +5683-32866-0017-2544: I AM VERY UNEASY ABOUT IT WHATEVER IT IS I CAN'T HELP IT +5683-32866-0018-2545: TO MY MIND THERE HAS ALWAYS BEEN SOMETHING INEXPRESSIBLY AWFUL IN FAMILY FEUDS +5683-32866-0019-2546: THE MYSTERY OF THEIR ORIGIN THEIR CAPACITY FOR EVOLVING LATENT FACULTIES OF CRIME AND THE (STEADY->STUDY) VITALITY WITH WHICH THEY (SURVIVE->SURVIVED) THE HEARSE AND SPEAK THEIR DEEP MOUTHED MALIGNITIES IN EVERY NEW BORN GENERATION HAVE ASSOCIATED THEM SOMEHOW IN MY MIND WITH (A->THE) SPELL OF LIFE EXCEEDING AND DISTINCT FROM HUMAN AND (A SPECIAL->ESPECIAL) SATANIC ACTION +5683-32866-0020-2547: THE FLOOR MORE THAN ANYTHING ELSE SHOWED THE GREAT AGE OF THE ROOM +5683-32866-0021-2548: MY BED WAS UNEXCEPTIONABLY COMFORTABLE BUT IN MY THEN MOOD I COULD HAVE WISHED IT A GREAT DEAL MORE MODERN +5683-32866-0022-2549: ITS CURTAINS WERE OF THICK AND FADED TAPESTRY +5683-32866-0023-2550: ALL THE FURNITURE BELONGED TO OTHER TIMES +5683-32866-0024-2551: I (SHAN'T->SHA'N'T) TROUBLE YOU ABOUT MY TRAIN OF THOUGHTS OR FANCIES BUT I BEGAN TO FEEL VERY LIKE A GENTLEMAN IN A GHOST STORY WATCHING EXPERIMENTALLY IN A HAUNTED CHAMBER +5683-32866-0025-2552: I DID NOT EVEN TAKE THE PRECAUTION OF SMOKING UP THE CHIMNEY +5683-32866-0026-2553: I BOLDLY LIGHTED MY (CHEROOT->TRUTH) +5683-32866-0027-2554: A COLD BRIGHT MOON WAS SHINING WITH CLEAR SHARP LIGHTS AND SHADOWS +5683-32866-0028-2555: THE SOMBRE OLD TREES LIKE GIGANTIC HEARSE PLUMES BLACK AND AWFUL +5683-32866-0029-2556: SOMEHOW I HAD GROWN NERVOUS +5683-32866-0030-2557: A LITTLE BIT OF PLASTER TUMBLED DOWN THE CHIMNEY AND STARTLED ME CONFOUNDEDLY +5683-32879-0000-2501: IT WAS NOT VERY MUCH PAST ELEVEN THAT MORNING WHEN THE PONY CARRIAGE FROM BRANDON DREW UP BEFORE THE LITTLE GARDEN WICKET OF REDMAN'S FARM +5683-32879-0001-2502: (WELL->WHILE) SHE WAS BETTER THOUGH SHE HAD HAD A BAD NIGHT +5683-32879-0002-2503: SO THERE CAME A STEP AND A LITTLE RUSTLING OF FEMININE DRAPERIES THE SMALL DOOR OPENED AND RACHEL ENTERED WITH HER HAND EXTENDED AND A PALE SMILE OF WELCOME +5683-32879-0003-2504: WOMEN CAN HIDE THEIR PAIN BETTER THAN WE MEN AND BEAR IT BETTER TOO EXCEPT WHEN SHAME DROPS FIRE INTO THE DREADFUL CHALICE +5683-32879-0004-2505: BUT POOR RACHEL LAKE HAD MORE THAN THAT STOICAL HYPOCRISY WHICH ENABLES THE TORTURED SPIRITS OF HER SEX TO LIFT A PALE FACE THROUGH THE FLAMES AND SMILE +5683-32879-0005-2506: THIS TRANSIENT SPRING AND LIGHTING UP ARE BEAUTIFUL A GLAMOUR BEGUILING OUR SENSES +5683-32879-0006-2507: THERE WAS SOMETHING OF SWEETNESS AND FONDNESS IN HER TONES AND MANNER WHICH WAS NEW TO RACHEL AND COMFORTING AND SHE RETURNED THE GREETING AS KINDLY AND FELT MORE LIKE HER FORMER SELF +5683-32879-0007-2508: RACHEL'S PALE AND SHARPENED FEATURES AND DILATED EYE STRUCK HER WITH A PAINFUL SURPRISE +5683-32879-0008-2509: YOU HAVE BEEN SO ILL MY POOR RACHEL +5683-32879-0009-2510: ILL AND TROUBLED DEAR TROUBLED IN MIND AND MISERABLY NERVOUS +5683-32879-0010-2511: POOR RACHEL HER NATURE RECOILED FROM DECEIT AND SHE TOLD AT ALL EVENTS AS MUCH OF THE TRUTH AS SHE DARED +5683-32879-0011-2512: SHE SPOKE WITH A SUDDEN ENERGY WHICH PARTOOK (OF->A) FEAR AND PASSION AND FLUSHED HER THIN CHEEK AND MADE HER LANGUID EYES FLASH +5683-32879-0012-2513: THANK YOU (RACHEL->RACHAEL) MY COUSIN RACHEL MY ONLY FRIEND +5683-32879-0013-2514: CHELFORD HAD A NOTE FROM MISTER (WYLDER->WILDER) THIS MORNING ANOTHER NOTE HIS COMING DELAYED AND SOMETHING OF HIS HAVING TO SEE SOME PERSON WHO (IS->WAS) ABROAD CONTINUED DORCAS AFTER A LITTLE PAUSE +5683-32879-0014-2515: YES SOMETHING EVERYTHING SAID RACHEL HURRIEDLY LOOKING FROWNINGLY AT A FLOWER WHICH SHE WAS TWIRLING IN HER FINGERS +5683-32879-0015-2516: YES SAID RACHEL +5683-32879-0016-2517: AND THE WAN ORACLE HAVING SPOKEN SHE (SATE->SAT) DOWN IN THE SAME SORT OF ABSTRACTION AGAIN BESIDE DORCAS AND SHE LOOKED FULL IN HER COUSIN'S EYES +5683-32879-0017-2518: OF MARK WYLDER I SAY THIS HIS NAME HAS BEEN FOR YEARS HATEFUL TO ME AND RECENTLY IT HAS BECOME FRIGHTFUL AND YOU WILL PROMISE ME SIMPLY THIS THAT YOU WILL NEVER ASK ME TO SPEAK AGAIN ABOUT HIM +5683-32879-0018-2519: IT IS AN ANTIPATHY AN ANTIPATHY I CANNOT GET OVER DEAR DORCAS YOU MAY THINK IT A MADNESS BUT DON'T BLAME ME +5683-32879-0019-2520: I HAVE VERY FEW TO LOVE ME NOW AND I THOUGHT YOU MIGHT LOVE ME AS I HAVE BEGUN TO LOVE YOU +5683-32879-0020-2521: AND SHE THREW HER ARMS ROUND HER COUSIN'S NECK AND BRAVE RACHEL AT LAST BURST INTO TEARS +5683-32879-0021-2522: DORCAS IN HER STRANGE WAY WAS MOVED +5683-32879-0022-2523: I LIKE YOU STILL RACHEL I'M SURE I'LL ALWAYS LIKE YOU +5683-32879-0023-2524: YOU RESEMBLE ME RACHEL YOU ARE FEARLESS AND INFLEXIBLE AND GENEROUS +5683-32879-0024-2525: YES RACHEL I DO LOVE YOU +5683-32879-0025-2526: THANK YOU DORCAS DEAR +61-70968-0000-2179: HE BEGAN A CONFUSED COMPLAINT AGAINST THE WIZARD WHO HAD VANISHED BEHIND THE CURTAIN ON THE LEFT +61-70968-0001-2180: (GIVE->KIVED) NOT SO EARNEST A MIND TO THESE (MUMMERIES->MEMORIES) CHILD +61-70968-0002-2181: A GOLDEN FORTUNE AND A HAPPY LIFE +61-70968-0003-2182: HE WAS LIKE UNTO MY FATHER IN A WAY AND YET WAS NOT MY FATHER +61-70968-0004-2183: ALSO THERE WAS A STRIPLING PAGE WHO TURNED INTO A MAID +61-70968-0005-2184: THIS WAS SO SWEET A LADY SIR AND IN SOME MANNER I DO THINK SHE DIED +61-70968-0006-2185: BUT THEN THE PICTURE WAS GONE AS QUICKLY AS IT CAME +61-70968-0007-2186: SISTER NELL DO YOU HEAR THESE MARVELS +61-70968-0008-2187: TAKE YOUR PLACE AND LET US SEE WHAT THE CRYSTAL CAN SHOW TO YOU +61-70968-0009-2188: LIKE AS NOT YOUNG MASTER THOUGH I AM AN OLD MAN +61-70968-0010-2189: FORTHWITH ALL RAN TO THE OPENING OF THE TENT TO SEE WHAT MIGHT BE AMISS BUT MASTER WILL WHO PEEPED OUT FIRST NEEDED NO MORE THAN ONE GLANCE +61-70968-0011-2190: HE GAVE WAY TO THE OTHERS VERY READILY AND RETREATED UNPERCEIVED BY THE SQUIRE AND MISTRESS FITZOOTH TO THE REAR OF THE TENT +61-70968-0012-2191: CRIES OF (A NOTTINGHAM A->UNNOTTINGHAM ARE) NOTTINGHAM +61-70968-0013-2192: BEFORE THEM FLED THE STROLLER AND HIS THREE SONS (CAPLESS->CAPLICE) AND TERRIFIED +61-70968-0014-2193: WHAT IS THE TUMULT AND RIOTING CRIED OUT THE SQUIRE AUTHORITATIVELY AND HE BLEW TWICE ON (A->THE) SILVER WHISTLE WHICH HUNG AT HIS BELT +61-70968-0015-2194: NAY WE (REFUSED->WERE FREEZED) THEIR REQUEST MOST POLITELY MOST NOBLE SAID THE LITTLE STROLLER +61-70968-0016-2195: AND THEN THEY BECAME VEXED AND WOULD HAVE SNATCHED YOUR PURSE FROM US +61-70968-0017-2196: I COULD NOT SEE MY BOY INJURED EXCELLENCE FOR BUT DOING HIS DUTY AS ONE OF CUMBERLAND'S SONS +61-70968-0018-2197: SO I DID PUSH THIS FELLOW +61-70968-0019-2198: IT IS ENOUGH SAID GEORGE GAMEWELL SHARPLY (AND->AS) HE TURNED UPON THE CROWD +61-70968-0020-2199: SHAME ON YOU CITIZENS CRIED HE I BLUSH FOR MY FELLOWS OF NOTTINGHAM +61-70968-0021-2200: SURELY WE CAN SUBMIT WITH GOOD GRACE +61-70968-0022-2201: TIS FINE FOR YOU TO TALK OLD MAN ANSWERED THE LEAN SULLEN APPRENTICE +61-70968-0023-2202: BUT I WRESTLED WITH THIS FELLOW AND DO KNOW THAT HE PLAYED UNFAIRLY IN THE SECOND BOUT +61-70968-0024-2203: SPOKE THE SQUIRE LOSING ALL (PATIENCE->PATIENT) AND IT WAS TO YOU THAT I GAVE ANOTHER (PURSE IN->PERSON) CONSOLATION +61-70968-0025-2204: COME TO ME MEN HERE HERE HE RAISED HIS VOICE STILL LOUDER +61-70968-0026-2205: THE STROLLERS TOOK THEIR PART IN IT WITH HEARTY ZEST NOW THAT THEY HAD SOME CHANCE OF BEATING OFF THEIR FOES +61-70968-0027-2206: ROBIN AND THE LITTLE TUMBLER BETWEEN THEM TRIED TO FORCE THE SQUIRE TO STAND BACK AND VERY VALIANTLY DID THESE TWO COMPORT THEMSELVES +61-70968-0028-2207: THE HEAD AND CHIEF OF THE RIOT (THE NOTTINGHAM APPRENTICE->IN AUTTINGHAM APPRENTICED) WITH CLENCHED FISTS THREATENED MONTFICHET +61-70968-0029-2208: THE SQUIRE HELPED TO THRUST THEM ALL IN AND ENTERED SWIFTLY HIMSELF +61-70968-0030-2209: NOW BE SILENT ON YOUR LIVES HE BEGAN BUT THE CAPTURED APPRENTICE SET UP AN INSTANT SHOUT +61-70968-0031-2210: SILENCE YOU (KNAVE->NAVE) CRIED MONTFICHET +61-70968-0032-2211: HE FELT FOR AND FOUND THE WIZARD'S BLACK CLOTH THE SQUIRE WAS QUITE OUT OF BREATH +61-70968-0033-2212: THRUSTING OPEN THE PROPER ENTRANCE OF THE TENT ROBIN SUDDENLY RUSHED FORTH WITH HIS BURDEN WITH A GREAT SHOUT +61-70968-0034-2213: A MONTFICHET A MONTFICHET GAMEWELL TO THE RESCUE +61-70968-0035-2214: TAKING ADVANTAGE OF THIS THE SQUIRE'S FEW MEN REDOUBLED THEIR EFFORTS AND ENCOURAGED BY (ROBIN'S->ROBINS) AND THE LITTLE STROLLER'S CRIES FOUGHT THEIR WAY TO HIM +61-70968-0036-2215: GEORGE MONTFICHET WILL NEVER FORGET THIS DAY +61-70968-0037-2216: WHAT IS YOUR NAME LORDING ASKED THE LITTLE STROLLER PRESENTLY +61-70968-0038-2217: ROBIN FITZOOTH +61-70968-0039-2218: AND MINE IS WILL STUTELEY SHALL WE BE COMRADES +61-70968-0040-2219: RIGHT WILLINGLY FOR BETWEEN US WE HAVE WON THE BATTLE ANSWERED ROBIN +61-70968-0041-2220: I LIKE YOU WILL YOU ARE THE SECOND WILL THAT I HAVE MET AND LIKED WITHIN TWO DAYS IS THERE A SIGN IN THAT +61-70968-0042-2221: (MONTFICHET->MARTFICHE) CALLED OUT FOR ROBIN TO GIVE HIM AN ARM +61-70968-0043-2222: FRIENDS SAID (MONTFICHET->MONTFICHE) FAINTLY TO THE WRESTLERS BEAR US ESCORT SO FAR AS THE SHERIFF'S HOUSE +61-70968-0044-2223: IT WILL NOT BE SAFE FOR YOU TO STAY HERE NOW +61-70968-0045-2224: PRAY FOLLOW US WITH MINE (AND->IN) MY LORD SHERIFF'S MEN +61-70968-0046-2225: NOTTINGHAM CASTLE WAS REACHED AND ADMITTANCE WAS DEMANDED +61-70968-0047-2226: MASTER MONCEUX THE SHERIFF OF NOTTINGHAM WAS MIGHTILY PUT ABOUT WHEN TOLD OF THE RIOTING +61-70968-0048-2227: AND HENRY MIGHT RETURN TO ENGLAND AT ANY MOMENT +61-70968-0049-2228: HAVE YOUR WILL CHILD IF THE BOY ALSO WILLS IT MONTFICHET ANSWERED FEELING TOO ILL TO OPPOSE ANYTHING VERY STRONGLY JUST THEN +61-70968-0050-2229: HE MADE AN EFFORT TO HIDE HIS CONDITION FROM THEM ALL AND ROBIN FELT HIS FINGERS TIGHTEN UPON HIS ARM +61-70968-0051-2230: (BEG->BEGGED) ME A ROOM OF THE SHERIFF CHILD QUICKLY +61-70968-0052-2231: BUT WHO IS THIS FELLOW PLUCKING AT YOUR (SLEEVE->STEVE) +61-70968-0053-2232: HE IS MY ESQUIRE EXCELLENCY RETURNED ROBIN WITH DIGNITY +61-70968-0054-2233: MISTRESS FITZOOTH HAD BEEN CARRIED OFF BY THE SHERIFF'S DAUGHTER AND HER MAIDS AS SOON AS THEY HAD ENTERED THE HOUSE SO THAT ROBIN ALONE HAD THE CARE OF MONTFICHET +61-70968-0055-2234: ROBIN WAS GLAD WHEN AT LENGTH THEY WERE LEFT TO THEIR OWN DEVICES +61-70968-0056-2235: THE WINE DID CERTAINLY BRING BACK THE COLOR TO THE SQUIRE'S CHEEKS +61-70968-0057-2236: THESE ESCAPADES ARE NOT FOR OLD (GAMEWELL LAD->GAME WELL LED) HIS DAY HAS COME TO TWILIGHT +61-70968-0058-2237: WILL YOU FORGIVE ME NOW +61-70968-0059-2238: (IT WILL->IT'LL) BE NO DISAPPOINTMENT TO ME +61-70968-0060-2239: NO THANKS I AM GLAD TO GIVE YOU SUCH EASY HAPPINESS +61-70968-0061-2240: YOU ARE A WORTHY LEECH WILL PRESENTLY WHISPERED ROBIN THE WINE HAS WORKED A MARVEL +61-70968-0062-2241: (AY->I) AND SHOW YOU SOME PRETTY TRICKS +61-70970-0000-2242: YOUNG FITZOOTH HAD BEEN COMMANDED TO HIS MOTHER'S CHAMBER SO SOON AS HE HAD COME OUT FROM HIS CONVERSE WITH THE SQUIRE +61-70970-0001-2243: THERE BEFELL AN ANXIOUS INTERVIEW MISTRESS FITZOOTH ARGUING FOR AND AGAINST THE SQUIRE'S PROJECT IN A BREATH +61-70970-0002-2244: MOST OF ALL ROBIN THOUGHT OF HIS FATHER WHAT WOULD HE COUNSEL +61-70970-0003-2245: IF FOR A WHIM YOU BEGGAR YOURSELF I CANNOT STAY YOU +61-70970-0004-2246: BUT TAKE IT WHILST I LIVE AND (WEAR MONTFICHET'S->WHERE MONTFICHE) SHIELD IN THE DAYS WHEN MY EYES CAN BE REJOICED BY SO BRAVE A SIGHT FOR YOU WILL (NE'ER->NEVER) DISGRACE OUR (SCUTCHEON->STUTTON) I WARRANT ME +61-70970-0005-2247: THE LAD HAD CHECKED HIM THEN +61-70970-0006-2248: NEVER THAT SIR HE HAD SAID +61-70970-0007-2249: HE WAS IN DEEP CONVERSE WITH THE CLERK AND ENTERED THE HALL HOLDING HIM BY THE ARM +61-70970-0008-2250: NOW TO BED BOY +61-70970-0009-2251: TIS LATE AND I GO MYSELF WITHIN A SHORT SPACE +61-70970-0010-2252: DISMISS YOUR SQUIRE ROBIN AND BID ME GOOD (E E N->EEN) +61-70970-0011-2253: AS ANY IN ENGLAND I WOULD SAY SAID GAMEWELL PROUDLY THAT IS IN HIS DAY +61-70970-0012-2254: YET HE WILL TEACH YOU A FEW TRICKS WHEN MORNING IS COME +61-70970-0013-2255: THERE WAS NO CHANCE TO ALTER HIS SLEEPING ROOM TO ONE NEARER TO GAMEWELL'S CHAMBER +61-70970-0014-2256: PRESENTLY HE CROSSED THE FLOOR OF HIS ROOM WITH DECIDED STEP +61-70970-0015-2257: WILL CRIED HE SOFTLY AND STUTELEY WHO HAD CHOSEN HIS COUCH ACROSS THE DOOR OF HIS YOUNG MASTER'S CHAMBER SPRANG UP AT ONCE IN ANSWER +61-70970-0016-2258: WE WILL GO OUT TOGETHER TO THE BOWER THERE IS A WAY DOWN TO THE COURT FROM MY WINDOW +61-70970-0017-2259: REST AND BE STILL UNTIL I WARN YOU +61-70970-0018-2260: THE HOURS PASSED WEARILY BY AND MOVEMENT COULD YET BE HEARD ABOUT THE HALL +61-70970-0019-2261: AT LAST ALL WAS QUIET AND BLACK IN THE COURTYARD OF GAMEWELL +61-70970-0020-2262: WILL WHISPERED ROBIN OPENING HIS DOOR AS HE SPOKE ARE YOU READY +61-70970-0021-2263: THEY THEN RENEWED THEIR JOURNEY AND UNDER THE BETTER LIGHT MADE A SAFE CROSSING OF THE STABLE ROOFS +61-70970-0022-2264: ROBIN ENTERED THE HUT DRAGGING THE UNWILLING ESQUIRE AFTER HIM +61-70970-0023-2265: BE NOT SO FOOLISH FRIEND SAID FITZOOTH CROSSLY +61-70970-0024-2266: THEY MOVED THEREAFTER CAUTIOUSLY ABOUT THE HUT GROPING BEFORE AND ABOUT THEM TO FIND SOMETHING TO SHOW THAT WARRENTON HAD FULFILLED HIS MISSION +61-70970-0025-2267: THEY WERE UPON THE VERGE OF AN OPEN TRAP IN THE FAR CORNER OF THE HUT AND STUTELEY HAD TRIPPED OVER THE EDGE OF THE REVERSED FLAP MOUTH OF THIS PIT +61-70970-0026-2268: (FITZOOTH'S->FITTOOTH'S) HAND RESTED AT LAST UPON THE TOP RUNG OF A LADDER AND SLOWLY THE TRUTH CAME TO HIM +61-70970-0027-2269: ROBIN CAREFULLY DESCENDED THE LADDER AND FOUND HIMSELF SOON UPON FIRM ROCKY GROUND +61-70970-0028-2270: STUTELEY WAS BY HIS SIDE IN A FLASH AND THEN THEY BOTH BEGAN FEELING ABOUT THEM TO ASCERTAIN THE SHAPE AND CHARACTER OF THIS VAULT +61-70970-0029-2271: FROM THE BLACKNESS BEHIND THE LIGHT THEY HEARD A VOICE WARRENTON'S +61-70970-0030-2272: SAVE ME MASTERS BUT YOU STARTLED ME RARELY +61-70970-0031-2273: CRIED HE WAVING THE (LANTHORN->LANTERN) BEFORE HIM TO MAKE SURE THAT THESE WERE NO GHOSTS IN FRONT OF HIM +61-70970-0032-2274: (ENQUIRED->INQUIRED) ROBIN WITH HIS (SUSPICIONS->SUSPICION) STILL UPON HIM +61-70970-0033-2275: TRULY SUCH A HORSE (SHOULD->WOULD) BE WORTH MUCH IN NOTTINGHAM FAIR +61-70970-0034-2276: NAY NAY LORDING ANSWERED WARRENTON WITH A HALF LAUGH +61-70970-0035-2277: WARRENTON SPOKE THUS WITH SIGNIFICANCE TO SHOW ROBIN THAT HE WAS NOT TO THINK (GEOFFREY'S->JEFFREY'S) CLAIMS TO THE ESTATE WOULD BE PASSED BY +61-70970-0036-2278: ROBIN FITZOOTH SAW THAT HIS DOUBTS OF WARRENTON HAD BEEN UNFAIR AND HE BECAME ASHAMED OF HIMSELF FOR (HARBORING->HARBOURING) THEM +61-70970-0037-2279: HIS TONES RANG PLEASANTLY ON WARRENTON'S EARS AND FORTHWITH (A->THE) GOOD FELLOWSHIP WAS HERALDED BETWEEN THEM +61-70970-0038-2280: THE OLD SERVANT TOLD HIM QUIETLY AS THEY CREPT BACK TO GAMEWELL THAT THIS (PASSAGE WAY->PASSAGEWAY) LED FROM THE HUT IN THE (PLEASANCE->PLEASANTS) TO SHERWOOD AND THAT (GEOFFREY->JEFFREY) FOR THE TIME WAS HIDING WITH THE OUTLAWS IN THE FOREST +61-70970-0039-2281: HE IMPLORES US TO BE DISCREET AS THE GRAVE IN THIS MATTER FOR IN SOOTH HIS LIFE IS IN THE HOLLOW OF OUR HANDS +61-70970-0040-2282: THEY (REGAINED->REGAIN) THEIR APARTMENT APPARENTLY WITHOUT DISTURBING THE HOUSEHOLD OF GAMEWELL +672-122797-0000-1529: OUT IN THE (WOODS->WOOD) STOOD A NICE LITTLE FIR TREE +672-122797-0001-1530: THE PLACE HE HAD WAS A VERY GOOD ONE THE SUN SHONE ON HIM AS TO FRESH AIR THERE WAS ENOUGH OF THAT AND ROUND HIM GREW MANY LARGE SIZED COMRADES PINES AS WELL AS (FIRS->FURS) +672-122797-0002-1531: HE DID NOT THINK OF THE WARM SUN AND OF THE FRESH AIR HE DID NOT CARE FOR THE LITTLE COTTAGE CHILDREN THAT RAN ABOUT (AND->IN) PRATTLED WHEN THEY WERE IN THE WOODS LOOKING FOR WILD STRAWBERRIES +672-122797-0003-1532: BUT THIS WAS WHAT THE TREE COULD NOT BEAR TO HEAR +672-122797-0004-1533: IN WINTER WHEN THE SNOW LAY GLITTERING ON THE GROUND A HARE WOULD OFTEN COME LEAPING ALONG AND JUMP RIGHT OVER THE LITTLE TREE +672-122797-0005-1534: OH THAT MADE HIM SO ANGRY +672-122797-0006-1535: TO GROW AND GROW TO GET OLDER AND BE TALL THOUGHT THE TREE THAT AFTER ALL IS THE MOST DELIGHTFUL THING IN THE WORLD +672-122797-0007-1536: IN AUTUMN THE (WOOD CUTTERS->WOODCUTTERS) ALWAYS CAME AND FELLED SOME OF THE LARGEST TREES +672-122797-0008-1537: THIS HAPPENED EVERY YEAR AND THE YOUNG FIR TREE THAT HAD NOW GROWN TO A VERY COMELY (SIZE->SIZED) TREMBLED AT THE SIGHT FOR THE MAGNIFICENT GREAT TREES FELL TO THE EARTH WITH NOISE AND CRACKING THE BRANCHES WERE LOPPED OFF AND THE TREES LOOKED LONG AND BARE THEY WERE HARDLY TO BE (RECOGNISED->RECOGNIZED) AND THEN THEY WERE (LAID IN->LADEN) CARTS AND THE HORSES DRAGGED THEM OUT OF THE WOOD +672-122797-0009-1538: HAVE YOU NOT MET (THEM ANYWHERE->THE MANY WHERE) +672-122797-0010-1539: REJOICE IN THY GROWTH SAID THE SUNBEAMS +672-122797-0011-1540: AND THEN WHAT HAPPENS THEN +672-122797-0012-1541: I WOULD FAIN KNOW IF I AM DESTINED FOR SO GLORIOUS A CAREER CRIED THE TREE REJOICING +672-122797-0013-1542: I AM NOW TALL AND MY BRANCHES SPREAD LIKE THE OTHERS THAT WERE CARRIED OFF LAST YEAR OH +672-122797-0014-1543: WERE I BUT ALREADY ON THE CART +672-122797-0015-1544: (WERE->WHERE) I IN THE WARM ROOM WITH ALL THE (SPLENDOR->SPLENDOUR) AND MAGNIFICENCE +672-122797-0016-1545: YES THEN SOMETHING BETTER SOMETHING STILL GRANDER WILL SURELY FOLLOW OR WHEREFORE SHOULD THEY THUS ORNAMENT ME +672-122797-0017-1546: SOMETHING BETTER SOMETHING STILL GRANDER MUST FOLLOW BUT WHAT +672-122797-0018-1547: REJOICE IN OUR PRESENCE SAID THE (AIR->HEIR) AND THE SUNLIGHT +672-122797-0019-1548: REJOICE IN THY OWN FRESH YOUTH +672-122797-0020-1549: BUT THE TREE DID NOT REJOICE AT ALL HE GREW AND GREW AND WAS GREEN BOTH WINTER AND SUMMER +672-122797-0021-1550: AND TOWARDS CHRISTMAS HE WAS ONE OF THE FIRST THAT WAS CUT DOWN +672-122797-0022-1551: THE AXE STRUCK DEEP INTO THE VERY PITH THE TREE FELL TO THE EARTH WITH A SIGH HE FELT A PANG IT WAS LIKE A SWOON HE COULD NOT THINK OF HAPPINESS FOR HE WAS SORROWFUL AT BEING SEPARATED FROM HIS HOME FROM THE PLACE WHERE HE HAD SPRUNG UP +672-122797-0023-1552: HE WELL KNEW THAT HE SHOULD NEVER SEE HIS DEAR OLD COMRADES THE LITTLE BUSHES AND FLOWERS AROUND HIM (ANYMORE->ANY MORE) PERHAPS NOT EVEN THE BIRDS +672-122797-0024-1553: THE DEPARTURE WAS NOT AT ALL AGREEABLE +672-122797-0025-1554: THE TREE ONLY CAME TO HIMSELF WHEN HE WAS UNLOADED IN A (COURT YARD->COURTYARD) WITH THE OTHER TREES AND HEARD A MAN SAY THAT ONE IS SPLENDID WE DON'T WANT THE OTHERS +672-122797-0026-1555: THERE TOO WERE LARGE EASY CHAIRS SILKEN SOFAS LARGE TABLES FULL OF PICTURE BOOKS AND FULL OF TOYS WORTH HUNDREDS AND HUNDREDS OF CROWNS AT LEAST THE CHILDREN SAID SO +672-122797-0027-1556: THE SERVANTS AS WELL AS THE YOUNG LADIES DECORATED IT +672-122797-0028-1557: THIS EVENING THEY ALL SAID +672-122797-0029-1558: HOW IT WILL SHINE THIS EVENING +672-122797-0030-1559: PERHAPS THE OTHER TREES FROM THE FOREST WILL COME TO LOOK AT ME +672-122797-0031-1560: IT BLAZED UP FAMOUSLY HELP HELP +672-122797-0032-1561: CRIED THE YOUNG LADIES AND THEY QUICKLY PUT OUT THE FIRE +672-122797-0033-1562: A STORY +672-122797-0034-1563: A STORY CRIED THE CHILDREN DRAWING A LITTLE FAT MAN TOWARDS THE TREE +672-122797-0035-1564: BUT I SHALL TELL ONLY ONE STORY +672-122797-0036-1565: HUMPY (DUMPY->DON'T BE) FELL DOWNSTAIRS AND YET HE MARRIED THE PRINCESS +672-122797-0037-1566: THAT'S THE WAY OF THE WORLD +672-122797-0038-1567: THOUGHT THE FIR TREE AND BELIEVED IT ALL BECAUSE THE MAN WHO TOLD THE STORY WAS SO GOOD LOOKING WELL WELL +672-122797-0039-1568: I WON'T TREMBLE TO MORROW THOUGHT THE FIR TREE +672-122797-0040-1569: AND THE WHOLE NIGHT THE TREE STOOD STILL AND IN DEEP THOUGHT +672-122797-0041-1570: IN THE MORNING THE SERVANT AND THE HOUSEMAID CAME IN +672-122797-0042-1571: BUT THEY DRAGGED HIM OUT OF THE ROOM AND UP THE STAIRS INTO THE LOFT AND HERE IN A DARK CORNER WHERE NO DAYLIGHT COULD ENTER THEY LEFT HIM +672-122797-0043-1572: WHAT'S THE MEANING OF THIS THOUGHT THE TREE +672-122797-0044-1573: AND HE LEANED AGAINST THE WALL LOST IN REVERIE +672-122797-0045-1574: TIME ENOUGH HAD HE TOO FOR HIS REFLECTIONS FOR DAYS AND NIGHTS PASSED ON AND NOBODY CAME UP AND WHEN AT LAST SOMEBODY DID COME IT WAS ONLY TO PUT SOME GREAT TRUNKS IN A CORNER OUT OF THE WAY +672-122797-0046-1575: TIS NOW WINTER OUT OF DOORS THOUGHT THE TREE +672-122797-0047-1576: HOW KIND MAN IS AFTER ALL +672-122797-0048-1577: IF IT ONLY WERE NOT SO DARK HERE AND SO TERRIBLY LONELY +672-122797-0049-1578: SQUEAK (SQUEAK->SQUI) +672-122797-0050-1579: THEY SNUFFED ABOUT THE FIR TREE AND RUSTLED AMONG THE BRANCHES +672-122797-0051-1580: I AM BY NO MEANS OLD SAID THE FIR TREE +672-122797-0052-1581: THERE'S MANY A ONE CONSIDERABLY OLDER THAN I AM +672-122797-0053-1582: THEY WERE SO EXTREMELY CURIOUS +672-122797-0054-1583: I KNOW NO SUCH PLACE SAID THE TREE +672-122797-0055-1584: AND THEN HE TOLD ALL ABOUT HIS YOUTH AND THE LITTLE MICE HAD NEVER HEARD THE LIKE BEFORE AND THEY LISTENED AND SAID +672-122797-0056-1585: SAID THE FIR TREE THINKING OVER WHAT HE HAD HIMSELF RELATED +672-122797-0057-1586: YES IN REALITY THOSE WERE HAPPY TIMES +672-122797-0058-1587: WHO (IS->IT'S) HUMPY DUMPY ASKED THE MICE +672-122797-0059-1588: ONLY THAT ONE ANSWERED THE TREE +672-122797-0060-1589: IT IS A VERY STUPID STORY +672-122797-0061-1590: DON'T YOU KNOW ONE ABOUT BACON AND TALLOW CANDLES CAN'T YOU TELL ANY LARDER STORIES +672-122797-0062-1591: NO SAID THE TREE +672-122797-0063-1592: THEN GOOD (BYE->BY) SAID THE RATS AND THEY WENT HOME +672-122797-0064-1593: AT LAST THE LITTLE MICE STAYED AWAY ALSO AND THE TREE SIGHED AFTER ALL IT WAS VERY PLEASANT WHEN THE SLEEK LITTLE MICE SAT ROUND ME AND LISTENED TO WHAT I TOLD THEM +672-122797-0065-1594: NOW THAT TOO IS OVER +672-122797-0066-1595: WHY ONE MORNING THERE CAME A QUANTITY OF PEOPLE AND SET TO WORK IN THE LOFT +672-122797-0067-1596: THE TRUNKS WERE MOVED THE TREE WAS PULLED OUT AND THROWN RATHER HARD IT IS TRUE DOWN ON THE FLOOR BUT A MAN DREW HIM TOWARDS THE STAIRS WHERE THE DAYLIGHT SHONE +672-122797-0068-1597: BUT IT WAS NOT THE FIR TREE THAT THEY MEANT +672-122797-0069-1598: IT WAS IN A CORNER THAT HE LAY AMONG WEEDS AND NETTLES +672-122797-0070-1599: THE GOLDEN STAR OF TINSEL WAS STILL ON THE TOP OF THE TREE AND GLITTERED IN THE SUNSHINE +672-122797-0071-1600: IN THE (COURT YARD->COURTYARD) SOME OF THE (MERRY->MERRIED) CHILDREN WERE PLAYING WHO HAD DANCED AT CHRISTMAS ROUND THE FIR TREE AND WERE SO GLAD AT THE SIGHT OF HIM +672-122797-0072-1601: AND THE GARDENER'S BOY CHOPPED THE TREE INTO SMALL PIECES THERE WAS A WHOLE HEAP LYING THERE +672-122797-0073-1602: THE WOOD FLAMED UP SPLENDIDLY UNDER THE LARGE BREWING COPPER AND (IT SIGHED->ITS SIDE) SO DEEPLY +672-122797-0074-1603: HOWEVER THAT WAS OVER NOW THE TREE GONE THE STORY AT AN END +6829-68769-0000-1858: KENNETH AND BETH REFRAINED FROM TELLING THE OTHER GIRLS OR UNCLE JOHN OF OLD WILL ROGERS'S VISIT BUT THEY GOT MISTER WATSON IN THE LIBRARY AND QUESTIONED HIM CLOSELY ABOUT THE PENALTY FOR FORGING A (CHECK->CHEQUE) +6829-68769-0001-1859: IT WAS A SERIOUS CRIME INDEED MISTER WATSON TOLD THEM AND TOM GATES BADE FAIR TO SERVE A LENGTHY TERM IN (*->THE) STATE'S PRISON AS A CONSEQUENCE OF HIS RASH ACT +6829-68769-0002-1860: I CAN'T SEE IT IN THAT LIGHT SAID THE OLD LAWYER +6829-68769-0003-1861: IT WAS A DELIBERATE THEFT FROM HIS EMPLOYERS TO PROTECT A GIRL HE LOVED +6829-68769-0004-1862: BUT THEY COULD NOT HAVE PROVEN A (CASE->GASE) AGAINST LUCY IF SHE WAS INNOCENT AND ALL THEIR THREATS OF ARRESTING HER WERE PROBABLY (*->A) MERE BLUFF +6829-68769-0005-1863: HE WAS (*->A) SOFT HEARTED AND IMPETUOUS SAID BETH AND BEING IN LOVE HE DIDN'T STOP TO COUNT THE COST +6829-68769-0006-1864: IF THE PROSECUTION WERE WITHDRAWN AND THE CASE SETTLED WITH THE VICTIM OF THE FORGED CHECK THEN THE YOUNG MAN WOULD BE ALLOWED HIS FREEDOM +6829-68769-0007-1865: BUT UNDER THE CIRCUMSTANCES I DOUBT (IF->OF) SUCH AN ARRANGEMENT COULD BE MADE +6829-68769-0008-1866: (FAIRVIEW WAS->FAIR VIEW'S) TWELVE MILES AWAY BUT BY TEN O'CLOCK THEY DREW UP AT THE COUNTY (JAIL->DRALE) +6829-68769-0009-1867: THEY WERE RECEIVED IN THE LITTLE OFFICE BY A MAN NAMED MARKHAM WHO WAS THE JAILER +6829-68769-0010-1868: WE WISH TO TALK WITH HIM ANSWERED KENNETH TALK +6829-68769-0011-1869: I'M RUNNING FOR REPRESENTATIVE ON THE REPUBLICAN TICKET SAID KENNETH QUIETLY +6829-68769-0012-1870: (OH->I'LL) SAY THAT'S DIFFERENT OBSERVED MARKHAM ALTERING HIS DEMEANOR +6829-68769-0013-1871: MAY WE (SEE->SEA) GATES AT ONCE ASKED KENNETH +6829-68769-0014-1872: THEY FOLLOWED THE JAILER ALONG (A->THE) SUCCESSION OF PASSAGES +6829-68769-0015-1873: SOMETIMES (I'M->ON) THAT (YEARNING->YEARNIN) FOR A SMOKE I'M NEARLY CRAZY (AN->AND) I (DUNNO->DON'T KNOW) WHICH IS (WORST->WORSE) DYIN ONE WAY OR (ANOTHER->THE OTHER) +6829-68769-0016-1874: HE UNLOCKED THE DOOR AND CALLED HERE'S VISITORS TOM +6829-68769-0017-1875: (WORSE->HORSE) TOM WORSE (N EVER->THAN ARROW) REPLIED THE JAILER GLOOMILY +6829-68769-0018-1876: (MISS DE GRAF->MISTER GRAFT) SAID KENNETH NOTICING THE BOY'S FACE CRITICALLY AS HE STOOD WHERE THE LIGHT FROM THE PASSAGE FELL UPON IT +6829-68769-0019-1877: SORRY WE HAVEN'T ANY RECEPTION ROOM IN THE JAIL +6829-68769-0020-1878: SIT DOWN PLEASE SAID GATES IN A CHEERFUL AND PLEASANT VOICE THERE'S A (BENCH->PENCH) HERE +6829-68769-0021-1879: A FRESH WHOLESOME LOOKING BOY WAS TOM GATES WITH STEADY GRAY EYES AN INTELLIGENT FOREHEAD BUT A SENSITIVE RATHER WEAK MOUTH +6829-68769-0022-1880: WE HAVE HEARD SOMETHING OF YOUR STORY SAID KENNETH AND (ARE->OUR) INTERESTED IN IT +6829-68769-0023-1881: I DIDN'T STOP TO THINK WHETHER IT WAS FOOLISH OR NOT I DID IT AND I'M GLAD I DID (*->IT) +6829-68769-0024-1882: OLD WILL IS A FINE FELLOW BUT POOR AND HELPLESS SINCE MISSUS ROGERS HAD HER ACCIDENT +6829-68769-0025-1883: THEN ROGERS WOULDN'T DO ANYTHING BUT LEAD HER AROUND AND WAIT UPON HER AND THE PLACE WENT TO RACK AND RUIN +6829-68769-0026-1884: HE SPOKE SIMPLY BUT PACED UP AND DOWN THE NARROW CELL IN FRONT OF THEM +6829-68769-0027-1885: WHOSE NAME DID YOU SIGN TO THE CHECK ASKED KENNETH +6829-68769-0028-1886: HE IS SUPPOSED TO SIGN ALL THE CHECKS OF THE CONCERN +6829-68769-0029-1887: IT'S A STOCK COMPANY (AND->IN) RICH +6829-68769-0030-1888: I WAS (BOOKKEEPER->BITKEEPER) SO IT WAS EASY TO GET A BLANK CHECK AND FORGE THE SIGNATURE +6829-68769-0031-1889: AS REGARDS MY ROBBING THE COMPANY I'LL SAY THAT I SAVED (THEM->HIM) A HEAVY LOSS ONE DAY +6829-68769-0032-1890: I DISCOVERED AND PUT OUT A FIRE THAT WOULD HAVE DESTROYED THE WHOLE PLANT BUT MARSHALL NEVER EVEN THANKED ME +6829-68769-0033-1891: IT WAS BETTER FOR HIM TO THINK THE GIRL UNFEELING THAN TO KNOW THE TRUTH +6829-68769-0034-1892: I'M GOING TO SEE MISTER (MARSHALL->MARSHAL) SAID KENNETH AND DISCOVER WHAT I CAN DO TO ASSIST YOU THANK YOU SIR +6829-68769-0035-1893: IT WON'T BE MUCH BUT I'M GRATEFUL TO FIND A FRIEND +6829-68769-0036-1894: THEY LEFT HIM THEN FOR THE JAILER ARRIVED TO UNLOCK THE DOOR AND ESCORT THEM TO THE OFFICE +6829-68769-0037-1895: I'VE SEEN LOTS OF THAT KIND IN MY DAY +6829-68769-0038-1896: AND IT RUINS A MAN'S DISPOSITION +6829-68769-0039-1897: HE LOOKED UP RATHER UNGRACIOUSLY BUT MOTIONED THEM TO BE SEATED +6829-68769-0040-1898: SOME GIRL HAS BEEN (*->IN) HERE TWICE TO INTERVIEW MY MEN AND I HAVE REFUSED TO ADMIT HER +6829-68769-0041-1899: I'M NOT ELECTIONEERING JUST NOW +6829-68769-0042-1900: OH WELL SIR WHAT ABOUT HIM +6829-68769-0043-1901: AND HE DESERVES A TERM (IN STATE'S->AND STATES) PRISON +6829-68769-0044-1902: IT HAS COST ME TWICE SIXTY DOLLARS (IN->AN) ANNOYANCE +6829-68769-0045-1903: I'LL PAY ALL THE (COSTS->COST) BESIDES +6829-68769-0046-1904: YOU'RE FOOLISH WHY SHOULD YOU DO ALL THIS +6829-68769-0047-1905: I HAVE MY OWN REASONS MISTER (MARSHALL->MARSHAL) +6829-68769-0048-1906: GIVE ME A (CHECK->CHEQUE) FOR A HUNDRED AND FIFTY AND I'LL TURN OVER TO YOU THE FORGED CHECK AND (QUASH->CAUSH) FURTHER PROCEEDINGS +6829-68769-0049-1907: HE DETESTED THE GRASPING DISPOSITION THAT WOULD (ENDEAVOR->ENDEAVOUR) TO TAKE ADVANTAGE OF HIS EVIDENT DESIRE TO HELP YOUNG GATES +6829-68769-0050-1908: BETH UNEASY AT HIS SILENCE NUDGED HIM +6829-68769-0051-1909: THERE WAS A GRIM SMILE OF AMUSEMENT ON HIS SHREWD FACE +6829-68769-0052-1910: HE MIGHT HAVE HAD THAT FORGED CHECK FOR THE FACE OF IT IF HE'D BEEN SHARP +6829-68769-0053-1911: AND TO THINK WE CAN SAVE ALL THAT MISERY AND DESPAIR BY THE PAYMENT OF A HUNDRED AND FIFTY DOLLARS +6829-68771-0000-1912: SO TO THE SURPRISE OF THE DEMOCRATIC COMMITTEE AND ALL HIS FRIENDS MISTER HOPKINS ANNOUNCED THAT HE WOULD OPPOSE (FORBES'S->FORTS) AGGRESSIVE CAMPAIGN WITH AN EQUAL AGGRESSIVENESS AND SPEND AS MANY DOLLARS IN DOING SO AS MIGHT BE NECESSARY +6829-68771-0001-1913: ONE OF MISTER HOPKINS'S FIRST TASKS AFTER CALLING HIS FAITHFUL (HENCHMEN->HENCHMAN) AROUND HIM WAS TO MAKE A CAREFUL (CANVASS->CANVAS) OF THE VOTERS OF HIS DISTRICT TO SEE WHAT WAS STILL TO BE ACCOMPLISHED +6829-68771-0002-1914: THE WEAK (KNEED->NEED) CONTINGENCY MUST BE STRENGTHENED AND FORTIFIED AND A COUPLE OF HUNDRED VOTES IN ONE WAY OR (ANOTHER->THE OTHER) SECURED FROM THE OPPOSITION +6829-68771-0003-1915: THE DEMOCRATIC COMMITTEE FIGURED OUT A WAY TO DO THIS +6829-68771-0004-1916: UNDER ORDINARY CONDITIONS REYNOLDS WAS SURE TO BE ELECTED BUT THE COMMITTEE PROPOSED TO SACRIFICE HIM IN ORDER TO (ELECT->ELEC) HOPKINS +6829-68771-0005-1917: THE ONLY THING NECESSARY WAS TO FIX SETH REYNOLDS AND THIS HOPKINS ARRANGED PERSONALLY +6829-68771-0006-1918: AND THIS WAS WHY KENNETH AND BETH DISCOVERED HIM CONVERSING WITH THE YOUNG WOMAN IN THE BUGGY +6829-68771-0007-1919: THE DESCRIPTION SHE GAVE OF THE COMING RECEPTION TO THE (WOMAN'S->WOMEN'S) POLITICAL LEAGUE WAS SO HUMOROUS AND DIVERTING THAT THEY WERE BOTH LAUGHING HEARTILY OVER THE THING WHEN THE YOUNG PEOPLE PASSED THEM AND THUS MISTER HOPKINS FAILED TO NOTICE WHO THE (OCCUPANTS->OCCUPANT) OF THE OTHER VEHICLE WERE +6829-68771-0008-1920: THESE WOMEN WERE FLATTERED BY THE ATTENTION OF THE YOUNG LADY AND HAD PROMISED TO ASSIST IN ELECTING MISTER FORBES +6829-68771-0009-1921: LOUISE HOPED FOR EXCELLENT RESULTS FROM THIS ORGANIZATION AND WISHED THE ENTERTAINMENT TO BE SO EFFECTIVE IN WINNING THEIR GOOD WILL THAT THEY WOULD WORK EARNESTLY FOR THE CAUSE IN WHICH THEY WERE ENLISTED +6829-68771-0010-1922: THE (FAIRVIEW->FAIR VIEW) BAND WAS ENGAGED TO DISCOURSE AS MUCH HARMONY AS IT COULD PRODUCE AND THE RESOURCES OF THE GREAT HOUSE WERE TAXED TO ENTERTAIN THE GUESTS +6829-68771-0011-1923: TABLES WERE SPREAD ON THE LAWN AND A DAINTY BUT SUBSTANTIAL REPAST WAS TO BE SERVED +6829-68771-0012-1924: THIS WAS THE FIRST OCCASION WITHIN A GENERATION WHEN SUCH AN ENTERTAINMENT HAD BEEN GIVEN AT ELMHURST AND THE ONLY ONE WITHIN THE MEMORY OF MAN (WHERE->WERE) THE NEIGHBORS AND COUNTRY PEOPLE HAD BEEN (*->THE) INVITED (GUESTS->GUEST) +6829-68771-0013-1925: THE (ATTENDANCE->ATTENDANTS) WAS UNEXPECTEDLY LARGE AND THE GIRLS WERE DELIGHTED FORESEEING GREAT SUCCESS FOR THEIR (FETE->FIGHT) +6829-68771-0014-1926: WE OUGHT TO HAVE MORE (ATTENDANTS->ATTENDANCE) BETH SAID LOUISE APPROACHING HER COUSIN +6829-68771-0015-1927: WON'T YOU RUN INTO THE HOUSE AND SEE IF MARTHA CAN'T SPARE ONE OR TWO MORE MAIDS +6829-68771-0016-1928: SHE WAS VERY FOND OF THE YOUNG LADIES WHOM SHE HAD KNOWN WHEN AUNT JANE WAS (THE->THEIR) MISTRESS HERE AND BETH WAS HER (ESPECIAL FAVORITE->SPECIAL FAVOURITE) +6829-68771-0017-1929: THE HOUSEKEEPER LED THE WAY (AND->IN) BETH FOLLOWED +6829-68771-0018-1930: FOR A MOMENT BETH STOOD STARING WHILE THE NEW MAID REGARDED HER WITH COMPOSURE AND (A->OF) SLIGHT SMILE UPON HER BEAUTIFUL FACE +6829-68771-0019-1931: SHE WAS DRESSED IN THE REGULATION COSTUME OF THE MAIDS AT ELMHURST A PLAIN BLACK GOWN WITH (*->A) WHITE APRON AND CAP +6829-68771-0020-1932: THEN SHE GAVE A LITTLE LAUGH AND REPLIED NO MISS BETH I'M ELIZABETH PARSONS +6829-68771-0021-1933: BUT IT CAN'T BE PROTESTED THE GIRL +6829-68771-0022-1934: I ATTEND TO THE HOUSEHOLD MENDING YOU KNOW AND CARE FOR THE LINEN +6829-68771-0023-1935: YOU SPEAK LIKE AN EDUCATED PERSON SAID BETH WONDERINGLY WHERE IS YOUR HOME +6829-68771-0024-1936: FOR THE FIRST TIME THE MAID SEEMED A LITTLE CONFUSED AND HER GAZE WANDERED FROM THE FACE OF HER VISITOR +6829-68771-0025-1937: SHE SAT DOWN IN A ROCKING CHAIR AND CLASPING HER HANDS IN HER LAP (ROCKED->ROCK) SLOWLY BACK AND FORTH I'M SORRY SAID BETH +6829-68771-0026-1938: ELIZA (PARSONS->PARSON) SHOOK HER HEAD +6829-68771-0027-1939: THEY THEY EXCITE ME IN SOME WAY AND I I CAN'T BEAR THEM YOU MUST EXCUSE ME +6829-68771-0028-1940: SHE EVEN SEEMED MILDLY AMUSED AT THE ATTENTION SHE ATTRACTED +6829-68771-0029-1941: BETH WAS A BEAUTIFUL GIRL THE HANDSOMEST OF THE THREE COUSINS BY FAR YET ELIZA SURPASSED HER (IN->A) NATURAL CHARM AND SEEMED WELL AWARE OF THE FACT +6829-68771-0030-1942: HER MANNER WAS NEITHER INDEPENDENT NOR ASSERTIVE BUT RATHER ONE OF WELL BRED COMPOSURE AND CALM RELIANCE +6829-68771-0031-1943: HER EYES WANDERED TO THE MAID'S HANDS +6829-68771-0032-1944: HOWEVER HER FEATURES AND FORM MIGHT REPRESS ANY EVIDENCE OF NERVOUSNESS THESE HANDS TOLD A DIFFERENT STORY +6829-68771-0033-1945: SHE ROSE QUICKLY TO HER FEET WITH AN IMPETUOUS GESTURE THAT MADE HER VISITOR CATCH HER BREATH +6829-68771-0034-1946: I WISH I KNEW MYSELF SHE CRIED FIERCELY +6829-68771-0035-1947: WILL YOU LEAVE ME ALONE IN MY OWN ROOM OR MUST I GO AWAY TO ESCAPE YOU +6829-68771-0036-1948: ELIZA CLOSED THE DOOR BEHIND HER WITH A DECIDED SLAM AND A KEY CLICKED IN THE LOCK +6930-75918-0000-0: CONCORD RETURNED TO ITS PLACE AMIDST THE TENTS +6930-75918-0001-1: THE ENGLISH (FORWARDED->FOEEDED) TO THE FRENCH BASKETS OF FLOWERS OF WHICH THEY HAD MADE A PLENTIFUL PROVISION TO GREET THE ARRIVAL OF THE YOUNG PRINCESS THE FRENCH IN RETURN INVITED THE ENGLISH TO A SUPPER WHICH WAS TO BE GIVEN THE NEXT DAY +6930-75918-0002-2: CONGRATULATIONS WERE POURED IN UPON THE PRINCESS EVERYWHERE DURING HER JOURNEY +6930-75918-0003-3: FROM THE RESPECT PAID HER ON ALL SIDES SHE SEEMED LIKE A QUEEN AND FROM THE ADORATION WITH WHICH SHE WAS TREATED BY TWO OR THREE SHE APPEARED AN OBJECT OF WORSHIP THE QUEEN MOTHER GAVE THE FRENCH THE MOST AFFECTIONATE RECEPTION FRANCE WAS HER NATIVE COUNTRY AND SHE HAD SUFFERED TOO MUCH UNHAPPINESS IN ENGLAND FOR ENGLAND TO HAVE MADE HER FORGET FRANCE +6930-75918-0004-4: SHE TAUGHT HER DAUGHTER THEN BY HER OWN AFFECTION FOR IT THAT LOVE FOR A COUNTRY WHERE THEY HAD BOTH BEEN HOSPITABLY RECEIVED AND WHERE A BRILLIANT FUTURE OPENED (BEFORE->FOR) THEM +6930-75918-0005-5: THE COUNT HAD THROWN HIMSELF BACK ON HIS SEAT LEANING HIS SHOULDERS AGAINST THE PARTITION OF THE TENT AND REMAINED THUS HIS FACE BURIED IN HIS HANDS WITH HEAVING CHEST AND RESTLESS LIMBS +6930-75918-0006-6: THIS HAS INDEED BEEN (A->AN) HARASSING DAY CONTINUED THE YOUNG MAN HIS EYES FIXED UPON HIS FRIEND +6930-75918-0007-7: YOU WILL BE FRANK WITH ME I ALWAYS AM +6930-75918-0008-8: CAN YOU IMAGINE WHY BUCKINGHAM HAS BEEN SO VIOLENT I SUSPECT +6930-75918-0009-9: IT IS YOU WHO ARE MISTAKEN RAOUL I HAVE READ HIS DISTRESS IN HIS EYES IN HIS EVERY GESTURE AND ACTION THE WHOLE DAY +6930-75918-0010-10: I CAN PERCEIVE LOVE CLEARLY ENOUGH +6930-75918-0011-11: I AM CONVINCED OF WHAT I SAY SAID THE COUNT +6930-75918-0012-12: IT IS ANNOYANCE THEN +6930-75918-0013-13: IN THOSE VERY TERMS I EVEN ADDED MORE +6930-75918-0014-14: BUT CONTINUED RAOUL NOT INTERRUPTED BY THIS MOVEMENT OF HIS FRIEND HEAVEN BE PRAISED THE FRENCH WHO ARE PRONOUNCED TO BE THOUGHTLESS AND INDISCREET RECKLESS EVEN ARE CAPABLE OF BRINGING A CALM AND SOUND JUDGMENT TO (BEAR ON->BERYL) MATTERS OF SUCH HIGH IMPORTANCE +6930-75918-0015-15: THUS IT IS THAT THE HONOR OF THREE IS SAVED OUR (COUNTRY'S->COUNTRY) OUR (MASTER'S->MASTERS) AND OUR OWN +6930-75918-0016-16: YES I NEED REPOSE MANY THINGS HAVE AGITATED ME TO DAY BOTH IN MIND AND BODY WHEN YOU RETURN TO MORROW I SHALL NO LONGER BE THE SAME MAN +6930-75918-0017-17: (BUT->BY) IN THIS FRIENDLY PRESSURE RAOUL COULD DETECT THE NERVOUS AGITATION OF A GREAT INTERNAL CONFLICT +6930-75918-0018-18: THE NIGHT WAS CLEAR STARLIT AND SPLENDID THE TEMPEST HAD PASSED AWAY AND THE SWEET INFLUENCES OF THE EVENING HAD RESTORED LIFE PEACE AND SECURITY EVERYWHERE +6930-75918-0019-19: UPON THE LARGE SQUARE IN FRONT OF THE HOTEL THE SHADOWS OF THE TENTS INTERSECTED BY THE GOLDEN MOONBEAMS FORMED AS IT WERE A HUGE MOSAIC OF JET AND YELLOW FLAGSTONES +6930-75918-0020-20: (BRAGELONNE->BRAGGELON) WATCHED FOR SOME TIME THE CONDUCT OF THE TWO LOVERS LISTENED TO THE LOUD AND UNCIVIL SLUMBERS OF MANICAMP WHO SNORED AS IMPERIOUSLY AS THOUGH HE WAS WEARING HIS BLUE AND GOLD INSTEAD OF HIS VIOLET SUIT +6930-76324-0000-21: GOLIATH MAKES ANOTHER DISCOVERY +6930-76324-0001-22: (THEY->THERE) WERE CERTAINLY NO (NEARER->NEAR) THE SOLUTION OF THEIR PROBLEM +6930-76324-0002-23: THE POOR LITTLE THINGS CRIED CYNTHIA THINK OF THEM HAVING BEEN TURNED TO THE WALL ALL THESE YEARS +6930-76324-0003-24: NOW WHAT (WAS->IS) THE SENSE OF IT (TWO->TOO) INNOCENT BABIES LIKE THAT +6930-76324-0004-25: BUT JOYCE HAD NOT BEEN LISTENING ALL AT ONCE SHE PUT DOWN HER CANDLE ON THE TABLE AND FACED HER COMPANION +6930-76324-0005-26: THE TWIN BROTHER DID SOMETHING SHE DIDN'T LIKE AND SHE TURNED HIS PICTURE TO THE WALL +6930-76324-0006-27: HERS HAPPENED TO BE (IN->ON) THE SAME FRAME TOO BUT SHE EVIDENTLY DIDN'T CARE ABOUT (THAT->IT) +6930-76324-0007-28: NOW WHAT HAVE YOU TO SAY CYNTHIA (SPRAGUE->SP) +6930-76324-0008-29: I THOUGHT WE WERE STUMPED AGAIN WHEN I FIRST SAW THAT PICTURE BUT IT'S BEEN OF SOME USE AFTER ALL +6930-76324-0009-30: DO YOU SUPPOSE THE MINIATURE WAS A COPY OF THE SAME THING +6930-76324-0010-31: (WHAT->WHEN) IN THE WORLD IS (THAT->IT) QUERIED JOYCE +6930-76324-0011-32: (THEY->MAY) WORRY ME TERRIBLY AND BESIDES I'D LIKE TO SEE WHAT THIS LOVELY FURNITURE LOOKS LIKE WITHOUT SUCH QUANTITIES OF DUST ALL OVER IT GOOD SCHEME (CYN->SIN) +6930-76324-0012-33: (WE'LL->WILL) COME IN HERE THIS AFTERNOON WITH OLD CLOTHES ON AND (HAVE->HALF) A REGULAR HOUSE CLEANING +6930-76324-0013-34: IT CAN'T HURT ANYTHING I'M SURE FOR WE WON'T DISTURB THINGS AT ALL +6930-76324-0014-35: THIS THOUGHT HOWEVER DID NOT ENTER THE HEADS OF THE ENTHUSIASTIC PAIR +6930-76324-0015-36: SMUGGLING THE HOUSE CLEANING PARAPHERNALIA INTO THE CELLAR WINDOW UNOBSERVED THAT AFTERNOON PROVED NO EASY TASK FOR CYNTHIA HAD ADDED A WHISK BROOM AND DUST PAN TO THE OUTFIT +6930-76324-0016-37: THE (LURE->LOWER) PROVED TOO MUCH FOR HIM AND HE CAME SPORTING AFTER IT AS (FRISKILY->FRISKLY) AS A YOUNG KITTEN MUCH TO CYNTHIA'S DELIGHT WHEN SHE CAUGHT SIGHT OF HIM +6930-76324-0017-38: OH LET HIM COME ALONG SHE URGED I DO LOVE TO SEE HIM ABOUT THAT OLD HOUSE +6930-76324-0018-39: HE MAKES IT SORT OF COZIER +6930-76324-0019-40: NOW LET'S DUST THE FURNITURE AND PICTURES +6930-76324-0020-41: YET LITTLE AS IT WAS IT HAD ALREADY MADE A VAST DIFFERENCE IN THE ASPECT OF THE ROOM +6930-76324-0021-42: SURFACE DUST AT LEAST HAD BEEN REMOVED AND THE FINE OLD FURNITURE GAVE A HINT OF ITS REAL ELEGANCE AND POLISH +6930-76324-0022-43: THEN SHE SUDDENLY REMARKED +6930-76324-0023-44: AND MY POCKET MONEY IS GETTING LOW AGAIN AND YOU HAVEN'T ANY LEFT AS USUAL +6930-76324-0024-45: THEY SAY ILLUMINATION BY (CANDLE LIGHT->CANDLELIGHT) IS THE PRETTIEST IN THE WORLD +6930-76324-0025-46: WHY IT'S GOLIATH AS USUAL THEY BOTH CRIED PEERING IN +6930-76324-0026-47: ISN'T HE THE GREATEST FOR GETTING INTO ODD CORNERS +6930-76324-0027-48: FORGETTING ALL THEIR WEARINESS THEY SEIZED THEIR CANDLES AND SCURRIED THROUGH THE HOUSE FINDING (AN->ON) OCCASIONAL PAPER TUCKED AWAY IN SOME ODD CORNER +6930-76324-0028-49: WELL I'M CONVINCED THAT THE BOARDED UP HOUSE MYSTERY HAPPENED NOT EARLIER THAN APRIL SIXTEENTH EIGHTEEN SIXTY ONE AND PROBABLY NOT MUCH LATER +6930-81414-0000-50: NO WORDS WERE SPOKEN NO LANGUAGE WAS UTTERED SAVE THAT OF WAILING AND HISSING AND THAT SOMEHOW WAS INDISTINCT AS IF IT EXISTED IN FANCY AND NOT IN REALITY +6930-81414-0001-51: I HEARD A NOISE BEHIND I TURNED AND SAW (KAFFAR->KAFFIR) HIS BLACK EYES SHINING WHILE IN HIS HAND HE HELD A GLEAMING KNIFE HE LIFTED IT ABOVE HIS HEAD AS IF TO STRIKE BUT I HAD THE STRENGTH OF TEN MEN AND I HURLED HIM FROM ME +6930-81414-0002-52: ONWARD SAID A DISTANT VOICE +6930-81414-0003-53: NO SOUND BROKE THE STILLNESS OF THE NIGHT +6930-81414-0004-54: THE STORY OF ITS EVIL INFLUENCE CAME BACK TO ME AND IN MY BEWILDERED CONDITION I WONDERED WHETHER THERE WAS NOT SOME TRUTH IN WHAT HAD BEEN SAID +6930-81414-0005-55: WHAT WAS THAT +6930-81414-0006-56: WHAT THEN A HUMAN HAND LARGE AND SHAPELY APPEARED DISTINCTLY ON THE SURFACE OF THE POND +6930-81414-0007-57: NOTHING MORE NOT EVEN THE WRIST TO WHICH IT MIGHT BE ATTACHED +6930-81414-0008-58: IT DID NOT BECKON OR INDEED MOVE AT ALL IT WAS AS STILL AS THE HAND OF DEATH +6930-81414-0009-59: I AWOKE TO CONSCIOUSNESS FIGHTING AT FIRST IT SEEMED AS IF I WAS FIGHTING WITH (A->THE) PHANTOM BUT GRADUALLY MY OPPONENT BECAME MORE REAL TO ME IT WAS (KAFFAR->KAFFIR) +6930-81414-0010-60: A SOUND OF VOICES A FLASH OF LIGHT +6930-81414-0011-61: A FEELING OF FREEDOM AND I WAS AWAKE WHERE +6930-81414-0012-62: SAID ANOTHER VOICE WHICH I RECOGNIZED AS VOLTAIRE'S (KAFFAR->KAFFIR) +6930-81414-0013-63: I HAD SCARCELY KNOWN WHAT I HAD BEEN SAYING OR DOING UP TO THIS TIME BUT AS HE SPOKE I LOOKED AT MY HAND +6930-81414-0014-64: IN THE LIGHT OF THE MOON I SAW A KNIFE RED WITH BLOOD AND MY HAND TOO WAS ALSO (DISCOLOURED->DISCOLORED) +6930-81414-0015-65: I DO NOT KNOW I AM DAZED BEWILDERED +6930-81414-0016-66: BUT THAT IS (KAFFAR'S->KAFFIR'S) KNIFE +6930-81414-0017-67: I KNOW HE HAD IT THIS VERY (EVENING->EVEN) +6930-81414-0018-68: I REMEMBER SAYING HAVE WE BEEN TOGETHER +6930-81414-0019-69: (VOLTAIRE->WHILE CHEER) PICKED UP SOMETHING FROM THE GROUND AND LOOKED AT IT +6930-81414-0020-70: I SAY YOU DO KNOW WHAT THIS MEANS AND YOU MUST TELL US +6930-81414-0021-71: A TERRIBLE THOUGHT FLASHED INTO MY MIND +6930-81414-0022-72: I HAD AGAIN BEEN ACTING UNDER THE INFLUENCE OF THIS MAN'S POWER +6930-81414-0023-73: PERCHANCE TOO (KAFFAR'S->KAFFIRS) DEATH MIGHT SERVE HIM IN GOOD STEAD +6930-81414-0024-74: MY TONGUE REFUSED TO ARTICULATE MY POWER OF SPEECH LEFT ME +6930-81414-0025-75: MY POSITION WAS TOO TERRIBLE +6930-81414-0026-76: MY OVERWROUGHT NERVES YIELDED AT LAST +6930-81414-0027-77: FOR SOME TIME AFTER THAT I REMEMBERED NOTHING DISTINCTLY +7021-79730-0000-1399: THE THREE MODES OF MANAGEMENT +7021-79730-0001-1400: TO SUPPOSE THAT THE OBJECT OF THIS WORK IS TO AID IN EFFECTING SUCH A SUBSTITUTION AS THAT IS ENTIRELY TO MISTAKE ITS NATURE AND DESIGN +7021-79730-0002-1401: BY REASON AND AFFECTION +7021-79730-0003-1402: AS THE (CHAISE->CHASE) DRIVES AWAY MARY STANDS BEWILDERED AND PERPLEXED ON THE (DOOR STEP->DOORSTEP) HER MIND IN A TUMULT OF EXCITEMENT IN WHICH HATRED OF THE DOCTOR DISTRUST AND SUSPICION OF HER MOTHER DISAPPOINTMENT VEXATION AND ILL HUMOR SURGE AND SWELL AMONG THOSE (DELICATE->DELEGATE) ORGANIZATIONS ON WHICH THE STRUCTURE AND DEVELOPMENT OF THE SOUL SO CLOSELY DEPEND DOING PERHAPS AN IRREPARABLE INJURY +7021-79730-0004-1403: THE MOTHER AS SOON AS THE (CHAISE->CHASE) IS SO FAR TURNED THAT MARY CAN NO LONGER WATCH THE EXPRESSION OF HER COUNTENANCE GOES AWAY FROM THE DOOR WITH A SMILE OF COMPLACENCY AND SATISFACTION (UPON->ON) HER FACE AT THE INGENUITY AND SUCCESS OF HER LITTLE ARTIFICE +7021-79730-0005-1404: SO YOU WILL BE A GOOD GIRL I KNOW AND NOT MAKE ANY TROUBLE BUT WILL STAY AT HOME CONTENTEDLY WON'T YOU +7021-79730-0006-1405: THE MOTHER IN MANAGING THE CASE IN THIS WAY (RELIES->REALIZE) PARTLY ON CONVINCING THE REASON OF THE CHILD AND PARTLY ON AN APPEAL TO HER AFFECTION +7021-79730-0007-1406: IF YOU SHOULD NOT BE A GOOD GIRL BUT SHOULD SHOW SIGNS OF MAKING US ANY TROUBLE I SHALL HAVE TO SEND YOU OUT SOMEWHERE TO THE BACK PART OF THE HOUSE UNTIL WE ARE GONE +7021-79730-0008-1407: BUT THIS LAST (SUPPOSITION->OPPOSITION) IS ALMOST ALWAYS UNNECESSARY FOR IF MARY HAS BEEN HABITUALLY MANAGED ON THIS PRINCIPLE SHE WILL NOT MAKE ANY TROUBLE +7021-79730-0009-1408: IT IS INDEED TRUE THAT THE IMPORTANCE OF TACT AND SKILL IN THE TRAINING OF THE YOUNG AND OF CULTIVATING THEIR REASON AND SECURING THEIR AFFECTION (CAN NOT->CANNOT) BE OVERRATED +7021-79740-0000-1384: TO SUCH PERSONS THESE INDIRECT MODES OF TRAINING CHILDREN IN HABITS OF SUBORDINATION TO THEIR WILL OR RATHER OF YIELDING TO THEIR INFLUENCE ARE SPECIALLY USEFUL +7021-79740-0001-1385: DELLA HAD A YOUNG SISTER NAMED MARIA AND A COUSIN WHOSE NAME WAS JANE +7021-79740-0002-1386: NOW (DELIA->GILLIA) CONTRIVED TO OBTAIN A GREAT INFLUENCE AND (ASCENDENCY->ASCENDANCY) OVER THE MINDS OF THE CHILDREN BY MEANS OF THESE DOLLS +7021-79740-0003-1387: TO GIVE AN IDEA OF THESE CONVERSATIONS I WILL REPORT ONE OF THEM IN FULL +7021-79740-0004-1388: YOU HAVE COME (ANDELLA ANDELLA->AMDELLA AND DELLA) WAS THE NAME OF JANE'S (DOLL->DAL) TO MAKE ROSALIE A VISIT +7021-79740-0005-1389: I AM VERY GLAD +7021-79740-0006-1390: I EXPECT YOU HAVE BEEN A VERY GOOD GIRL (ANDELLA->ANNE DELA) SINCE YOU WERE HERE LAST +7021-79740-0007-1391: THEN TURNING TO JANE SHE ASKED IN A SOMEWHAT ALTERED TONE HAS SHE BEEN A GOOD GIRL JANE +7021-79740-0008-1392: FOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLAYING UPON THE PIAZZA WITH BLOCKS AND OTHER PLAYTHINGS AND FINALLY HAD GONE INTO THE HOUSE LEAVING ALL THE THINGS ON THE FLOOR OF THE PIAZZA INSTEAD OF PUTTING THEM AWAY IN THEIR PLACES AS THEY OUGHT TO HAVE DONE +7021-79740-0009-1393: THEY WERE NOW PLAYING WITH THEIR DOLLS IN THE PARLOR +7021-79740-0010-1394: (DELIA->DELIGHT) CAME TO THE (PARLOR->PARLOUR) AND WITH AN AIR OF GREAT MYSTERY BECKONED THE CHILDREN ASIDE AND SAID TO THEM IN A WHISPER LEAVE (ANDELLA->ANDDELA) AND ROSALIE HERE AND DON'T SAY A WORD TO THEM +7021-79740-0011-1395: SO SAYING SHE LED THE WAY ON TIPTOE FOLLOWED BY THE CHILDREN OUT OF THE ROOM AND ROUND BY A CIRCUITOUS ROUTE TO THE PIAZZA THERE +7021-79740-0012-1396: SAID SHE POINTING TO THE PLAYTHINGS SEE +7021-79740-0013-1397: PUT THESE PLAYTHINGS ALL AWAY QUICK AND CAREFULLY AND WE WILL NOT LET THEM KNOW (ANY THING->ANYTHING) ABOUT YOUR LEAVING THEM OUT +7021-79740-0014-1398: AND THIS METHOD OF TREATING THE CASE WAS MUCH MORE EFFECTUAL IN MAKING THEM DISPOSED TO AVOID COMMITTING A SIMILAR FAULT ANOTHER TIME THAN ANY DIRECT REBUKES OR EXPRESSIONS OF DISPLEASURE ADDRESSED PERSONALLY TO THEM WOULD HAVE BEEN +7021-79759-0000-1378: NATURE OF THE EFFECT PRODUCED BY EARLY IMPRESSIONS +7021-79759-0001-1379: THAT IS COMPARATIVELY NOTHING +7021-79759-0002-1380: THEY ARE CHIEFLY FORMED FROM COMBINATIONS OF THE IMPRESSIONS MADE IN CHILDHOOD +7021-79759-0003-1381: VAST IMPORTANCE AND INFLUENCE OF THIS MENTAL FURNISHING +7021-79759-0004-1382: WITHOUT GOING TO ANY SUCH EXTREME AS THIS WE CAN EASILY SEE ON REFLECTION HOW VAST AN INFLUENCE ON THE IDEAS AND CONCEPTIONS AS WELL AS ON THE PRINCIPLES OF ACTION (IN->AND) MATURE YEARS MUST BE EXERTED BY THE NATURE AND CHARACTER OF THE IMAGES WHICH THE PERIOD OF INFANCY AND CHILDHOOD (IMPRESSES->IMPRESS) UPON THE MIND +7021-79759-0005-1383: THE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY VIOLENCE TO WHICH A FATHER SUBJECTS HIS SON MAY SOON PASS AWAY BUT THE MEMORY OF IT DOES NOT PASS AWAY WITH THE PAIN +7021-85628-0000-1409: BUT (ANDERS->ANDREWS) CARED NOTHING ABOUT THAT +7021-85628-0001-1410: HE MADE A BOW SO DEEP THAT HIS BACK CAME NEAR BREAKING AND HE WAS DUMBFOUNDED I CAN TELL YOU WHEN HE SAW IT WAS NOBODY BUT ANDERS +7021-85628-0002-1411: HE WAS SUCH A BIG BOY THAT HE WORE HIGH BOOTS AND CARRIED A JACK KNIFE +7021-85628-0003-1412: NOW THIS KNIFE WAS A SPLENDID ONE THOUGH HALF THE BLADE WAS GONE AND THE HANDLE WAS A LITTLE CRACKED AND ANDERS KNEW THAT ONE IS ALMOST A MAN AS SOON AS ONE HAS A (JACK KNIFE->JACKKNIFE) +7021-85628-0004-1413: YES WHY NOT THOUGHT (ANDERS->ANDREWS) +7021-85628-0005-1414: SEEING THAT I AM SO FINE I MAY AS WELL GO AND VISIT THE KING +7021-85628-0006-1415: I AM GOING TO THE COURT BALL ANSWERED (ANDERS->ANDRES) +7021-85628-0007-1416: AND SHE TOOK (ANDERS->ANDRE'S) HAND AND WALKED WITH HIM UP THE BROAD MARBLE STAIRS WHERE SOLDIERS WERE POSTED AT EVERY THIRD STEP AND THROUGH THE MAGNIFICENT HALLS WHERE COURTIERS IN SILK AND VELVET STOOD BOWING WHEREVER HE WENT +7021-85628-0008-1417: FOR LIKE AS NOT THEY MUST HAVE THOUGHT HIM A PRINCE WHEN THEY SAW HIS FINE CAP +7021-85628-0009-1418: AT THE FARTHER END OF THE LARGEST HALL A TABLE WAS SET WITH GOLDEN CUPS AND GOLDEN PLATES IN LONG ROWS +7021-85628-0010-1419: ON HUGE SILVER PLATTERS WERE PYRAMIDS OF TARTS AND CAKES AND RED WINE SPARKLED IN GLITTERING DECANTERS +7021-85628-0011-1420: THE PRINCESS SAT DOWN UNDER A BLUE CANOPY WITH BOUQUETS OF ROSES AND SHE LET (ANDERS->ANDRE) SIT IN A GOLDEN CHAIR BY HER SIDE +7021-85628-0012-1421: BUT YOU MUST NOT EAT WITH YOUR CAP ON YOUR HEAD SHE SAID AND WAS GOING TO TAKE IT OFF +7021-85628-0013-1422: THE PRINCESS CERTAINLY WAS BEAUTIFUL AND HE WOULD HAVE DEARLY LIKED TO BE KISSED BY HER BUT THE CAP WHICH HIS MOTHER HAD MADE HE WOULD NOT GIVE UP ON ANY CONDITION +7021-85628-0014-1423: HE ONLY SHOOK HIS HEAD +7021-85628-0015-1424: WELL BUT NOW SAID THE PRINCESS AND SHE FILLED HIS POCKETS WITH CAKES AND PUT HER OWN HEAVY GOLD CHAIN AROUND HIS NECK AND BENT DOWN AND KISSED HIM +7021-85628-0016-1425: THAT IS A VERY FINE CAP YOU HAVE HE SAID +7021-85628-0017-1426: SO IT IS SAID (ANDERS->ANDREWS) +7021-85628-0018-1427: AND IT IS MADE OF MOTHER'S BEST YARN AND SHE KNITTED IT HERSELF AND EVERYBODY WANTS TO GET IT AWAY FROM ME +7021-85628-0019-1428: WITH ONE JUMP ANDERS GOT OUT OF HIS CHAIR +7021-85628-0020-1429: HE DARTED LIKE AN ARROW THROUGH ALL THE HALLS DOWN ALL THE STAIRS AND ACROSS THE YARD +7021-85628-0021-1430: HE STILL HELD ON TO IT WITH BOTH HANDS AS HE RUSHED INTO HIS MOTHER'S COTTAGE +7021-85628-0022-1431: AND ALL HIS BROTHERS AND SISTERS STOOD ROUND AND LISTENED WITH THEIR MOUTHS OPEN +7021-85628-0023-1432: BUT WHEN HIS BIG BROTHER HEARD THAT HE HAD REFUSED TO GIVE HIS CAP FOR A KING'S GOLDEN CROWN HE SAID THAT (ANDERS->ANDREWS) WAS A STUPID +7021-85628-0024-1433: (ANDERS->ANDREW'S) FACE GREW RED +7021-85628-0025-1434: BUT HIS MOTHER HUGGED HIM CLOSE +7021-85628-0026-1435: NO MY LITTLE (SON->FUN) SHE SAID +7021-85628-0027-1436: IF YOU DRESSED IN SILK AND GOLD FROM TOP TO TOE YOU COULD NOT LOOK ANY NICER THAN IN YOUR LITTLE RED CAP +7127-75946-0000-467: AT THE CONCLUSION OF THE BANQUET WHICH WAS SERVED AT FIVE O'CLOCK THE KING ENTERED HIS CABINET WHERE HIS TAILORS WERE AWAITING HIM FOR THE PURPOSE OF TRYING ON THE CELEBRATED COSTUME REPRESENTING SPRING WHICH WAS THE RESULT OF SO MUCH IMAGINATION AND HAD COST SO MANY EFFORTS OF THOUGHT TO THE DESIGNERS AND ORNAMENT WORKERS OF THE COURT +7127-75946-0001-468: AH VERY WELL +7127-75946-0002-469: LET HIM COME IN THEN SAID THE KING AND AS IF COLBERT HAD BEEN LISTENING AT THE DOOR FOR THE PURPOSE OF KEEPING HIMSELF (AU COURANT->OCCOURANT) WITH THE CONVERSATION HE ENTERED AS SOON AS THE KING HAD PRONOUNCED HIS NAME TO THE TWO COURTIERS +7127-75946-0003-470: GENTLEMEN TO YOUR POSTS WHEREUPON SAINT (AIGNAN->DAN) AND (VILLEROY->VILLEROI) TOOK THEIR LEAVE +7127-75946-0004-471: CERTAINLY SIRE BUT I MUST HAVE MONEY TO DO THAT WHAT +7127-75946-0005-472: WHAT DO YOU MEAN INQUIRED (LOUIS->LOUISE) +7127-75946-0006-473: HE HAS GIVEN THEM WITH TOO MUCH GRACE NOT TO HAVE OTHERS STILL TO GIVE IF THEY ARE REQUIRED WHICH IS THE CASE AT THE PRESENT MOMENT +7127-75946-0007-474: IT IS NECESSARY THEREFORE THAT HE SHOULD COMPLY THE KING FROWNED +7127-75946-0008-475: DOES YOUR MAJESTY THEN NO LONGER BELIEVE THE DISLOYAL ATTEMPT +7127-75946-0009-476: NOT AT ALL YOU ARE ON THE CONTRARY MOST AGREEABLE TO ME +7127-75946-0010-477: YOUR MAJESTY'S PLAN THEN IN THIS AFFAIR IS +7127-75946-0011-478: YOU WILL TAKE THEM FROM MY PRIVATE TREASURE +7127-75946-0012-479: THE NEWS CIRCULATED WITH THE RAPIDITY OF LIGHTNING DURING ITS PROGRESS IT KINDLED EVERY VARIETY OF COQUETRY DESIRE AND WILD AMBITION +7127-75946-0013-480: THE KING HAD COMPLETED HIS (TOILETTE->TOILET) BY NINE O'CLOCK HE APPEARED IN AN OPEN CARRIAGE DECORATED WITH BRANCHES OF TREES AND FLOWERS +7127-75946-0014-481: THE QUEENS HAD TAKEN THEIR SEATS UPON A MAGNIFICENT (DIAS->DAIS) OR PLATFORM ERECTED UPON THE BORDERS OF THE LAKE IN A (THEATER->THEATRE) OF WONDERFUL ELEGANCE OF CONSTRUCTION +7127-75946-0015-482: SUDDENLY FOR THE PURPOSE OF RESTORING PEACE AND ORDER (SPRING->SPRANG) ACCOMPANIED BY HIS WHOLE COURT MADE HIS APPEARANCE +7127-75946-0016-483: THE SEASONS ALLIES OF SPRING FOLLOWED HIM CLOSELY TO FORM A QUADRILLE WHICH AFTER MANY WORDS OF MORE OR LESS FLATTERING IMPORT WAS THE COMMENCEMENT OF THE DANCE +7127-75946-0017-484: HIS LEGS THE BEST SHAPED AT COURT WERE DISPLAYED TO GREAT ADVANTAGE IN FLESH COLORED SILKEN HOSE (OF->A) SILK SO FINE AND SO TRANSPARENT THAT IT SEEMED ALMOST LIKE FLESH ITSELF +7127-75946-0018-485: THERE WAS SOMETHING IN HIS CARRIAGE WHICH RESEMBLED THE BUOYANT MOVEMENTS OF AN IMMORTAL AND HE DID NOT DANCE SO MUCH AS (SEEM->SEEMED) TO SOAR ALONG +7127-75946-0019-486: YES IT IS SUPPRESSED +7127-75946-0020-487: FAR FROM IT SIRE YOUR MAJESTY (HAVING->HEAVEN) GIVEN NO DIRECTIONS ABOUT IT THE MUSICIANS HAVE RETAINED IT +7127-75946-0021-488: YES SIRE AND READY DRESSED FOR THE BALLET +7127-75946-0022-489: SIRE HE SAID YOUR MAJESTY'S MOST DEVOTED SERVANT APPROACHES TO PERFORM A SERVICE ON THIS OCCASION WITH SIMILAR ZEAL THAT HE HAS ALREADY SHOWN ON THE FIELD OF BATTLE +7127-75946-0023-490: THE KING SEEMED ONLY PLEASED WITH EVERY ONE PRESENT +7127-75946-0024-491: MONSIEUR WAS THE ONLY ONE WHO DID NOT UNDERSTAND ANYTHING ABOUT THE MATTER +7127-75946-0025-492: THE BALLET BEGAN THE EFFECT WAS MORE THAN BEAUTIFUL +7127-75946-0026-493: WHEN THE MUSIC BY ITS BURSTS OF MELODY CARRIED AWAY THESE ILLUSTRIOUS DANCERS WHEN (THE->THIS) SIMPLE UNTUTORED PANTOMIME OF THAT PERIOD ONLY THE MORE NATURAL ON ACCOUNT OF THE VERY INDIFFERENT ACTING OF THE AUGUST ACTORS HAD REACHED ITS CULMINATING POINT OF TRIUMPH THE (THEATER->THEATRE) SHOOK WITH TUMULTUOUS APPLAUSE +7127-75946-0027-494: DISDAINFUL OF A SUCCESS OF WHICH MADAME SHOWED NO (ACKNOWLEDGEMENT->ACKNOWLEDGMENT) HE THOUGHT OF NOTHING BUT BOLDLY REGAINING THE MARKED PREFERENCE OF THE PRINCESS +7127-75946-0028-495: BY DEGREES ALL HIS HAPPINESS ALL HIS BRILLIANCY SUBSIDED INTO REGRET AND UNEASINESS SO THAT HIS LIMBS LOST THEIR POWER HIS ARMS HUNG HEAVILY BY HIS SIDES AND HIS HEAD DROOPED AS THOUGH HE WAS STUPEFIED +7127-75946-0029-496: THE KING WHO HAD FROM THIS MOMENT BECOME IN REALITY THE PRINCIPAL DANCER IN THE (QUADRILLE->QUADRILL) CAST A LOOK UPON HIS VANQUISHED RIVAL +7127-75947-0000-426: EVERY ONE COULD OBSERVE HIS AGITATION AND PROSTRATION A PROSTRATION WHICH WAS INDEED THE MORE REMARKABLE SINCE PEOPLE WERE NOT ACCUSTOMED TO SEE HIM WITH HIS ARMS HANGING LISTLESSLY BY HIS SIDE HIS HEAD BEWILDERED AND HIS EYES WITH ALL THEIR BRIGHT INTELLIGENCE (BEDIMMED->BEDEMNED) +7127-75947-0001-427: UPON THIS MADAME DEIGNED TO TURN HER EYES LANGUISHINGLY TOWARDS THE COMTE OBSERVING +7127-75947-0002-428: DO YOU THINK SO SHE REPLIED WITH INDIFFERENCE +7127-75947-0003-429: YES THE CHARACTER WHICH YOUR ROYAL HIGHNESS ASSUMED IS IN PERFECT HARMONY WITH YOUR OWN +7127-75947-0004-430: EXPLAIN YOURSELF +7127-75947-0005-431: I ALLUDE TO THE GODDESS +7127-75947-0006-432: THE PRINCESS INQUIRED NO +7127-75947-0007-433: SHE THEN ROSE HUMMING THE AIR TO WHICH SHE WAS PRESENTLY GOING TO DANCE +7127-75947-0008-434: THE ARROW PIERCED HIS HEART AND WOUNDED HIM MORTALLY +7127-75947-0009-435: A QUARTER OF AN HOUR AFTERWARDS HE RETURNED TO THE (THEATER->THEATRE) BUT IT WILL BE READILY BELIEVED THAT IT WAS ONLY A POWERFUL EFFORT OF REASON OVER HIS GREAT EXCITEMENT THAT ENABLED HIM TO GO BACK OR PERHAPS FOR LOVE IS THUS STRANGELY CONSTITUTED HE FOUND IT IMPOSSIBLE EVEN TO REMAIN MUCH LONGER SEPARATED FROM (THE->THEIR) PRESENCE OF ONE WHO HAD BROKEN HIS HEART +7127-75947-0010-436: WHEN SHE PERCEIVED THE YOUNG MAN SHE ROSE LIKE A WOMAN SURPRISED IN THE MIDST OF IDEAS SHE WAS DESIROUS OF CONCEALING FROM HERSELF +7127-75947-0011-437: REMAIN I IMPLORE YOU THE EVENING IS MOST LOVELY +7127-75947-0012-438: INDEED (AH->A) +7127-75947-0013-439: I REMEMBER NOW AND I CONGRATULATE MYSELF DO YOU LOVE ANY ONE +7127-75947-0014-440: FORGIVE ME I HARDLY KNOW WHAT I AM SAYING A THOUSAND TIMES FORGIVE ME MADAME WAS RIGHT QUITE RIGHT THIS BRUTAL EXILE HAS COMPLETELY TURNED MY BRAIN +7127-75947-0015-441: THERE CANNOT BE A DOUBT HE RECEIVED YOU KINDLY FOR IN FACT YOU RETURNED WITHOUT HIS PERMISSION +7127-75947-0016-442: OH MADEMOISELLE WHY HAVE I NOT A DEVOTED SISTER OR A TRUE FRIEND SUCH AS YOURSELF +7127-75947-0017-443: WHAT ALREADY HERE THEY SAID TO HER +7127-75947-0018-444: I HAVE BEEN HERE THIS QUARTER OF AN HOUR REPLIED LA (VALLIERE->VALLIERS) +7127-75947-0019-445: DID NOT THE DANCING AMUSE YOU NO +7127-75947-0020-446: NO MORE THAN THE DANCING +7127-75947-0021-447: LA (VALLIERE->VALLIERS) IS QUITE A POETESS SAID (TONNAY CHARENTE->TONIET CHART) +7127-75947-0022-448: I AM A WOMAN AND THERE ARE FEW LIKE ME WHOEVER LOVES ME FLATTERS ME WHOEVER FLATTERS ME PLEASES ME AND WHOEVER PLEASES WELL SAID MONTALAIS YOU DO NOT FINISH +7127-75947-0023-449: IT IS TOO DIFFICULT REPLIED MADEMOISELLE (DE TONNAY CHARENTE->DETONICHAUCH) LAUGHING LOUDLY +7127-75947-0024-450: (LOOK->LUCK) YONDER DO YOU NOT SEE THE MOON SLOWLY RISING SILVERING THE TOPMOST BRANCHES OF THE CHESTNUTS AND THE (OAKS->YOKES) +7127-75947-0025-451: EXQUISITE SOFT TURF OF THE WOODS THE HAPPINESS WHICH YOUR FRIENDSHIP CONFERS UPON ME +7127-75947-0026-452: WELL SAID MADEMOISELLE DE (TONNAY CHARENTE->TONECHAU AND) I ALSO THINK A GOOD DEAL BUT I TAKE CARE +7127-75947-0027-453: TO SAY NOTHING SAID MONTALAIS SO THAT WHEN MADEMOISELLE DE (TONNAY CHARENTE->TO NECHERANT) THINKS (ATHENAIS->ETHNEE) IS THE ONLY ONE WHO KNOWS IT +7127-75947-0028-454: QUICK QUICK THEN AMONG THE HIGH REED GRASS SAID MONTALAIS STOOP (ATHENAIS->ETHINAY) YOU ARE SO TALL +7127-75947-0029-455: THE YOUNG GIRLS HAD INDEED MADE THEMSELVES SMALL INDEED INVISIBLE +7127-75947-0030-456: SHE WAS HERE JUST NOW SAID THE COUNT +7127-75947-0031-457: YOU ARE POSITIVE THEN +7127-75947-0032-458: YES BUT PERHAPS I FRIGHTENED HER (IN->AND) WHAT WAY +7127-75947-0033-459: HOW IS IT LA (VALLIERE->VALLIERS) SAID MADEMOISELLE DE (TONNAY CHARENTE->TENACHALANT) THAT THE VICOMTE DE (BRAGELONNE->BRAGELON) SPOKE OF YOU AS LOUISE +7127-75947-0034-460: IT SEEMS THE KING WILL NOT CONSENT TO IT +7127-75947-0035-461: GOOD GRACIOUS (HAS->AS) THE KING ANY RIGHT TO INTERFERE IN MATTERS OF THAT KIND +7127-75947-0036-462: I GIVE MY CONSENT +7127-75947-0037-463: OH I AM SPEAKING SERIOUSLY REPLIED MONTALAIS AND MY OPINION IN THIS CASE IS QUITE AS GOOD AS THE KING'S I SUPPOSE IS IT NOT LOUISE +7127-75947-0038-464: LET US RUN THEN SAID ALL THREE AND GRACEFULLY LIFTING UP THE LONG SKIRTS OF THEIR SILK DRESSES THEY LIGHTLY RAN ACROSS THE OPEN SPACE BETWEEN THE LAKE AND THE THICKEST COVERT OF THE PARK +7127-75947-0039-465: IN FACT THE SOUND OF MADAME'S AND THE QUEEN'S CARRIAGES COULD BE HEARD IN THE DISTANCE UPON THE HARD DRY GROUND OF THE ROADS FOLLOWED BY THE (MOUNTED->MOUNTAIN) CAVALIERS +7127-75947-0040-466: IN THIS WAY THE FETE OF THE WHOLE COURT WAS A FETE ALSO FOR THE MYSTERIOUS INHABITANTS OF THE FOREST FOR CERTAINLY THE DEER IN THE BRAKE THE PHEASANT ON THE BRANCH THE FOX IN ITS HOLE WERE ALL LISTENING +7176-88083-0000-707: ALL ABOUT HIM WAS A TUMULT OF BRIGHT AND BROKEN COLOR SCATTERED IN BROAD SPLASHES +7176-88083-0001-708: THE (MERGANSER->MERGANCER) HAD A CRESTED HEAD OF IRIDESCENT GREEN BLACK A BROAD COLLAR OF LUSTROUS WHITE BLACK BACK BLACK AND WHITE WINGS WHITE BELLY SIDES FINELY PENCILLED (IN->AND) BLACK AND WHITE AND (A->HER) BREAST OF RICH CHESTNUT RED STREAKED WITH BLACK +7176-88083-0002-709: HIS FEET WERE RED HIS LONG NARROW BEAK WITH ITS (SAW->SOLID) TOOTHED EDGES AND SHARP HOOKED TIP WAS BRIGHT RED +7176-88083-0003-710: BUT HERE HE WAS AT A TERRIBLE DISADVANTAGE AS COMPARED WITH THE OWLS HAWKS AND EAGLES HE HAD NO RENDING CLAWS +7176-88083-0004-711: BUT SUDDENLY STRAIGHT AND SWIFT AS A DIVING CORMORANT HE SHOT DOWN INTO THE TORRENT AND DISAPPEARED BENEATH THE SURFACE +7176-88083-0005-712: ONCE FAIRLY A WING HOWEVER HE WHEELED AND MADE BACK HURRIEDLY FOR HIS PERCH +7176-88083-0006-713: IT MIGHT HAVE SEEMED THAT A TROUT OF THIS SIZE WAS A FAIRLY SUBSTANTIAL MEAL +7176-88083-0007-714: BUT SUCH WAS HIS KEENNESS THAT EVEN WHILE THE WIDE FLUKES OF HIS ENGORGED VICTIM WERE STILL STICKING OUT AT THE CORNERS OF HIS BEAK HIS FIERCE RED EYES WERE ONCE MORE PEERING DOWNWARD INTO THE TORRENT IN SEARCH OF FRESH PREY +7176-88083-0008-715: IN DESPAIR HE HURLED HIMSELF DOWNWARD TOO SOON +7176-88083-0009-716: THE GREAT HAWK (FOLLOWED->FOWLED) HURRIEDLY TO RETRIEVE HIS PREY FROM THE GROUND +7176-88083-0010-717: THE CAT GROWLED SOFTLY PICKED UP THE PRIZE IN HER JAWS AND TROTTED INTO THE BUSHES TO DEVOUR IT +7176-88083-0011-718: IN FACT HE HAD JUST FINISHED IT THE LAST OF THE TROUT'S TAIL HAD JUST VANISHED WITH A SPASM DOWN HIS STRAINED GULLET WHEN THE BAFFLED HAWK CAUGHT SIGHT OF HIM AND SWOOPED +7176-88083-0012-719: THE HAWK ALIGHTED ON THE DEAD BRANCH AND SAT UPRIGHT MOTIONLESS AS IF SURPRISED +7176-88083-0013-720: LIKE HIS UNFORTUNATE LITTLE COUSIN THE TEAL HE TOO HAD FELT THE FEAR OF DEATH SMITTEN INTO HIS HEART AND WAS HEADING DESPERATELY FOR THE REFUGE OF SOME DARK OVERHANGING BANK DEEP FRINGED WITH WEEDS WHERE THE DREADFUL EYE OF THE HAWK SHOULD NOT DISCERN HIM +7176-88083-0014-721: THE HAWK SAT UPON THE BRANCH AND WATCHED HIS QUARRY SWIMMING BENEATH THE SURFACE +7176-88083-0015-722: ALMOST INSTANTLY HE WAS FORCED TO THE TOP +7176-88083-0016-723: STRAIGHTWAY THE (HAWK->HOT) GLIDED FROM HIS PERCH AND DARTED AFTER HIM +7176-88083-0017-724: BUT AT THIS POINT IN THE RAPIDS IT WAS IMPOSSIBLE FOR HIM TO STAY DOWN +7176-88083-0018-725: BUT THIS FREQUENTER OF THE HEIGHTS OF AIR FOR ALL HIS SAVAGE VALOR WAS TROUBLED AT THE LEAPING WAVES AND THE TOSSING FOAM OF THESE MAD RAPIDS HE DID NOT UNDERSTAND THEM +7176-88083-0019-726: AS HE FLEW HIS DOWN REACHING CLUTCHING TALONS WERE NOT HALF A YARD ABOVE THE FUGITIVE'S HEAD +7176-88083-0020-727: WHERE THE (WAVES->WAY IS) FOR AN INSTANT SANK THEY CAME CLOSER BUT NOT QUITE WITHIN GRASPING REACH +7176-88083-0021-728: BUT AS BEFORE THE LEAPING WAVES OF THE RAPIDS WERE TOO MUCH FOR HIS PURSUER AND HE WAS ABLE TO FLAP HIS WAY ONWARD IN A CLOUD OF FOAM WHILE DOOM HUNG LOW ABOVE HIS HEAD YET HESITATED TO STRIKE +7176-88083-0022-729: THE HAWK EMBITTERED BY THE LOSS OF HIS FIRST QUARRY HAD BECOME AS DOGGED IN PURSUIT AS A WEASEL NOT TO BE SHAKEN OFF OR EVADED OR DECEIVED +7176-88083-0023-730: HE HAD A LOT OF LINE OUT AND THE PLACE WAS NONE TOO FREE FOR A LONG CAST BUT HE WAS IMPATIENT TO DROP HIS FLIES AGAIN ON THE SPOT WHERE THE BIG FISH WAS FEEDING +7176-88083-0024-731: THE LAST DROP FLY AS LUCK WOULD HAVE IT CAUGHT JUST IN THE CORNER OF THE HAWK'S ANGRILY OPEN BEAK HOOKING ITSELF FIRMLY +7176-88083-0025-732: AT THE SUDDEN SHARP STING OF IT THE GREAT BIRD TURNED HIS HEAD AND NOTICED FOR THE FIRST TIME THE FISHERMAN STANDING ON THE BANK +7176-88083-0026-733: THE DRAG UPON HIS BEAK AND THE LIGHT CHECK UPON HIS WINGS WERE INEXPLICABLE TO HIM AND APPALLING +7176-88083-0027-734: (THEN->THAN) THE LEADER PARTED FROM THE LINE +7176-92135-0000-661: HE IS A WELCOME FIGURE AT THE GARDEN PARTIES OF THE ELECT WHO ARE ALWAYS READY TO ENCOURAGE HIM BY ACCEPTING FREE SEATS FOR HIS PLAY ACTOR MANAGERS NOD TO HIM EDITORS ALLOW HIM TO CONTRIBUTE WITHOUT CHARGE TO A (SYMPOSIUM->SIMPOSIUM) ON THE PRICE OF GOLF BALLS +7176-92135-0001-662: IN SHORT HE BECOMES A PROMINENT FIGURE IN LONDON SOCIETY AND IF HE IS NOT CAREFUL SOMEBODY WILL SAY SO +7176-92135-0002-663: BUT EVEN THE UNSUCCESSFUL DRAMATIST HAS HIS MOMENTS +7176-92135-0003-664: YOUR PLAY MUST BE NOT MERELY A GOOD PLAY BUT A SUCCESSFUL ONE +7176-92135-0004-665: FRANKLY I CANNOT ALWAYS SAY +7176-92135-0005-666: BUT SUPPOSE YOU SAID I'M FOND OF WRITING MY PEOPLE ALWAYS SAY MY LETTERS HOME ARE GOOD ENOUGH FOR PUNCH +7176-92135-0006-667: I'VE GOT A LITTLE IDEA FOR A PLAY ABOUT A MAN AND A WOMAN AND ANOTHER WOMAN AND BUT PERHAPS (I'D->I) BETTER KEEP THE PLOT A SECRET FOR THE MOMENT +7176-92135-0007-668: ANYHOW IT'S JOLLY EXCITING AND I CAN DO THE DIALOGUE ALL RIGHT +7176-92135-0008-669: (LEND ME->LINEN) YOUR EAR FOR TEN MINUTES AND YOU SHALL LEARN JUST WHAT STAGECRAFT IS +7176-92135-0009-670: AND I SHOULD BEGIN WITH A SHORT HOMILY ON SOLILOQUY +7176-92135-0010-671: (HAM->HIM) TO BE OR NOT TO BE +7176-92135-0011-672: NOW THE OBJECT OF THIS (SOLILOQUY->SOLOQUY) IS PLAIN +7176-92135-0012-673: INDEED IRRESOLUTION (BEING->MEAN) THE KEYNOTE OF HAMLET'S SOLILOQUY A CLEVER PLAYER COULD TO SOME EXTENT INDICATE THE WHOLE THIRTY LINES BY A (SILENT->SILENCE) WORKING OF THE (JAW->JOB) BUT AT THE SAME TIME IT WOULD BE IDLE TO DENY THAT HE WOULD MISS THE FINER SHADES OF THE DRAMATIST'S MEANING +7176-92135-0013-674: WE MODERNS HOWEVER SEE THE ABSURDITY OF IT +7176-92135-0014-675: IF IT BE GRANTED FIRST THAT THE THOUGHTS OF A CERTAIN CHARACTER SHOULD BE KNOWN TO THE AUDIENCE AND SECONDLY THAT SOLILOQUY OR THE HABIT OF THINKING ALOUD IS IN OPPOSITION TO MODERN STAGE (TECHNIQUE->TYPENIQUE) HOW SHALL A SOLILOQUY BE AVOIDED WITHOUT DAMAGE TO THE PLAY +7176-92135-0015-676: AND SO ON TILL YOU GET (TO->*) THE END (WHEN OPHELIA->ONE OF WILLIAM) MIGHT SAY AH YES OR SOMETHING NON COMMITTAL OF THAT SORT +7176-92135-0016-677: THIS WOULD BE AN EASY WAY OF DOING IT BUT IT WOULD NOT BE THE BEST WAY FOR THE REASON THAT IT IS TOO EASY TO CALL ATTENTION TO ITSELF +7176-92135-0017-678: IN THE OLD BADLY MADE PLAY IT WAS FREQUENTLY NECESSARY FOR ONE OF THE CHARACTERS TO TAKE THE AUDIENCE INTO HIS CONFIDENCE +7176-92135-0018-679: IN THE MODERN WELL CONSTRUCTED PLAY HE SIMPLY RINGS UP AN IMAGINARY CONFEDERATE AND TELLS HIM WHAT HE IS GOING TO DO COULD ANYTHING BE MORE NATURAL +7176-92135-0019-680: I WANT DOUBLE NINE (HAL LO->HELLO) +7176-92135-0020-681: DOUBLE NINE (TWO->TO) THREE (ELSINORE->ELZINOR) DOUBLE (NINE->NOT) YES (HALLO->HELLO) IS THAT YOU HORATIO (HAMLET->PANLESS) SPEAKING +7176-92135-0021-682: I SAY I'VE BEEN (WONDERING->WANDERING) ABOUT THIS BUSINESS +7176-92135-0022-683: TO BE OR NOT TO BE THAT IS THE QUESTION WHETHER TIS NOBLER IN THE MIND TO SUFFER THE SLINGS AND ARROWS WHAT NO HAMLET SPEAKING +7176-92135-0023-684: YOU GAVE ME DOUBLE FIVE I WANT DOUBLE NINE (HALLO->HELLO) IS THAT YOU HORATIO HAMLET SPEAKING +7176-92135-0024-685: TO BE OR NOT TO BE THAT IS THE QUESTION WHETHER TIS NOBLER +7176-92135-0025-686: IT IS TO LET HAMLET IF THAT (HAPPEN->HAPPENED) TO BE THE NAME OF YOUR CHARACTER ENTER WITH A SMALL DOG (PET FALCON MONGOOSE->PETALKAN MONGOO'S) TAME BEAR OR WHATEVER ANIMAL IS MOST IN KEEPING WITH THE PART AND CONFIDE IN THIS ANIMAL SUCH SORROWS HOPES OR SECRET HISTORY AS THE AUDIENCE HAS GOT TO KNOW +7176-92135-0026-687: (ENTER->INTER) HAMLET WITH HIS FAVOURITE (BOAR HOUND->BOREHOUND) +7176-92135-0027-688: LADY (LARKSPUR STARTS->LARKSBURG START) SUDDENLY AND TURNS (TOWARDS->TOWARD) HIM +7176-92135-0028-689: (LARKSPUR BIT ME AGAIN->LARKSBURGH) THIS MORNING FOR THE THIRD TIME +7176-92135-0029-690: I WANT TO GET AWAY FROM IT ALL (SWOONS->SWOON) +7176-92135-0030-691: (ENTER->ENTERED) LORD ARTHUR (FLUFFINOSE->FLAPHANO'S) +7176-92135-0031-692: AND THERE YOU ARE YOU WILL OF COURSE APPRECIATE THAT THE UNFINISHED SENTENCES NOT ONLY SAVE TIME BUT ALSO MAKE THE MANOEUVRING VERY MUCH MORE NATURAL +7176-92135-0032-693: HOW YOU MAY BE WONDERING ARE YOU TO BEGIN YOUR MASTERPIECE +7176-92135-0033-694: RELAPSES INTO SILENCE FOR THE REST OF THE EVENING +7176-92135-0034-695: THE DUCHESS OF SOUTHBRIDGE TO LORD REGGIE (OH REGGIE->O READY) WHAT DID YOU SAY +7176-92135-0035-696: THEN LORD (TUPPENY WELL->TUPPENNY) WHAT ABOUT AUCTION +7176-92135-0036-697: THE CROWD DRIFTS OFF (LEAVING->LEAPING) THE HERO AND HEROINE ALONE IN THE MIDDLE OF THE STAGE AND THEN YOU CAN BEGIN +7176-92135-0037-698: THEN IS THE TIME TO INTRODUCE A MEAL ON THE STAGE +7176-92135-0038-699: A STAGE MEAL IS POPULAR BECAUSE IT (PROVES->PROVED) TO THE AUDIENCE THAT THE ACTORS EVEN WHEN CALLED CHARLES (HAWTREY->HALTREE) OR OWEN (NARES->NEAR'S) ARE REAL PEOPLE JUST LIKE YOU AND ME +7176-92135-0039-700: (TEA->T) PLEASE MATTHEWS BUTLER IMPASSIVELY +7176-92135-0040-701: (HOSTESS->HOSTES) REPLACES LUMP AND INCLINES EMPTY TEAPOT OVER TRAY FOR (A->*) MOMENT THEN (HANDS HIM->HANDSOME) A CUP PAINTED BROWN INSIDE (THUS DECEIVING->LUSTY SEATING) THE GENTLEMAN WITH THE TELESCOPE IN THE UPPER CIRCLE +7176-92135-0041-702: (RE ENTER->REINTER) BUTLER AND THREE FOOTMEN WHO (REMOVE->MOVED) THE TEA THINGS (HOSTESS TO->HOSTES TWO) GUEST +7176-92135-0042-703: (IN->AND) NOVELS THE HERO HAS OFTEN PUSHED HIS MEALS AWAY UNTASTED BUT NO (STAGE->STEED) HERO WOULD DO ANYTHING SO UNNATURAL AS THIS +7176-92135-0043-704: TWO BITES ARE MADE AND THE (BREAD->ABREAD) IS CRUMBLED WITH AN AIR OF GREAT EAGERNESS INDEED ONE FEELS THAT IN REAL LIFE THE (GUEST->GUESTS) WOULD CLUTCH HOLD OF THE FOOTMAN AND SAY HALF A (MO OLD->MOLE) CHAP I HAVEN'T NEARLY FINISHED BUT THE (ACTOR IS->ACTOR'S) BETTER (SCHOOLED->SCHOOL) THAN THIS +7176-92135-0044-705: BUT IT IS THE CIGARETTE WHICH CHIEFLY HAS BROUGHT THE MODERN DRAMA TO ITS PRESENT STATE OF PERFECTION +7176-92135-0045-706: LORD JOHN TAKING OUT GOLD (CIGARETTE->SICK RED) CASE FROM HIS LEFT HAND UPPER WAISTCOAT POCKET +7729-102255-0000-261: THE BOGUS LEGISLATURE NUMBERED THIRTY SIX MEMBERS +7729-102255-0001-262: THIS WAS AT THE MARCH ELECTION EIGHTEEN FIFTY FIVE +7729-102255-0002-263: THAT SUMMER'S (EMIGRATION->IMMIGRATION) HOWEVER BEING MAINLY FROM THE FREE STATES GREATLY (CHANGED->CHANGE) THE RELATIVE STRENGTH OF THE TWO PARTIES +7729-102255-0003-264: FOR GENERAL SERVICE THEREFORE REQUIRING NO SPECIAL EFFORT THE NUMERICAL STRENGTH OF THE FACTIONS WAS ABOUT EQUAL WHILE ON EXTRAORDINARY OCCASIONS THE TWO THOUSAND BORDER RUFFIAN RESERVE LYING A LITTLE FARTHER BACK FROM THE STATE LINE COULD AT ANY TIME EASILY TURN THE SCALE +7729-102255-0004-265: THE FREE STATE MEN HAD ONLY THEIR CONVICTIONS THEIR INTELLIGENCE THEIR COURAGE AND THE MORAL SUPPORT OF THE NORTH THE CONSPIRACY HAD ITS SECRET COMBINATION THE TERRITORIAL OFFICIALS THE LEGISLATURE THE BOGUS LAWS THE COURTS THE MILITIA OFFICERS THE PRESIDENT AND THE ARMY +7729-102255-0005-266: THIS WAS A FORMIDABLE ARRAY OF ADVANTAGES SLAVERY WAS PLAYING WITH LOADED DICE +7729-102255-0006-267: (COMING->COMMON) BY WAY OF THE MISSOURI RIVER TOWNS HE FELL FIRST AMONG BORDER RUFFIAN COMPANIONSHIP AND INFLUENCES AND PERHAPS HAVING HIS INCLINATIONS ALREADY (MOLDED->MOULDED) BY HIS WASHINGTON INSTRUCTIONS HIS EARLY IMPRESSIONS WERE DECIDEDLY ADVERSE TO THE FREE STATE CAUSE +7729-102255-0007-268: HIS RECEPTION SPEECH AT (WESTPORT->WESTWARD) IN WHICH HE MAINTAINED THE LEGALITY OF THE LEGISLATURE AND HIS DETERMINATION TO ENFORCE THEIR LAWS DELIGHTED HIS PRO SLAVERY AUDITORS +7729-102255-0008-269: ALL THE TERRITORIAL DIGNITARIES WERE PRESENT GOVERNOR (SHANNON->SHAN AND) PRESIDED JOHN CALHOUN THE SURVEYOR GENERAL MADE THE PRINCIPAL SPEECH A DENUNCIATION OF THE (ABOLITIONISTS->ABOLITIONIST) SUPPORTING THE (TOPEKA->TEPEAKA) MOVEMENT CHIEF JUSTICE (LECOMPTE->LE COMTE) DIGNIFIED THE OCCASION WITH APPROVING REMARKS +7729-102255-0009-270: ALL (DISSENT->DESCENT) ALL NON COMPLIANCE ALL HESITATION ALL MERE SILENCE EVEN WERE IN THEIR STRONGHOLD TOWNS LIKE (LEAVENWORTH->LEVIN WORTH) BRANDED AS ABOLITIONISM DECLARED TO BE HOSTILITY TO THE PUBLIC WELFARE AND PUNISHED WITH PROSCRIPTION PERSONAL VIOLENCE EXPULSION AND FREQUENTLY DEATH +7729-102255-0010-271: OF THE LYNCHINGS THE MOBS AND THE MURDERS IT WOULD BE IMPOSSIBLE EXCEPT IN A VERY EXTENDED WORK TO NOTE THE FREQUENT AND ATROCIOUS DETAILS +7729-102255-0011-272: THE PRESENT CHAPTERS CAN ONLY TOUCH UPON THE MORE SALIENT MOVEMENTS OF THE CIVIL WAR IN KANSAS WHICH HAPPILY (WERE->ARE) NOT SANGUINARY IF HOWEVER THE INDIVIDUAL AND MORE ISOLATED CASES OF BLOODSHED COULD BE DESCRIBED THEY WOULD SHOW A STARTLING AGGREGATE OF BARBARITY AND (*->A) LOSS OF LIFE FOR OPINION'S SAKE +7729-102255-0012-273: SEVERAL HUNDRED FREE STATE MEN PROMPTLY RESPONDED TO THE SUMMONS +7729-102255-0013-274: IT WAS IN FACT THE BEST WEAPON OF ITS DAY +7729-102255-0014-275: THE LEADERS OF THE CONSPIRACY BECAME DISTRUSTFUL OF THEIR POWER TO CRUSH THE TOWN +7729-102255-0015-276: ONE OF HIS MILITIA GENERALS SUGGESTED THAT THE GOVERNOR SHOULD REQUIRE THE OUTLAWS AT LAWRENCE AND ELSEWHERE TO SURRENDER THE (SHARPS->SHARP'S) RIFLES ANOTHER WROTE ASKING HIM TO CALL OUT THE GOVERNMENT TROOPS AT FORT (LEAVENWORTH->LEVINWORTH) +7729-102255-0016-277: THE GOVERNOR ON HIS PART BECOMING DOUBTFUL OF THE (LEGALITY->LOGALITY) OF EMPLOYING MISSOURI MILITIA TO ENFORCE KANSAS LAWS WAS ALSO EAGER TO SECURE THE HELP OF FEDERAL TROOPS +7729-102255-0017-278: SHERIFF JONES HAD HIS POCKETS ALWAYS FULL OF WRITS ISSUED IN THE SPIRIT OF PERSECUTION BUT WAS OFTEN BAFFLED BY THE SHARP WITS AND READY RESOURCES OF THE FREE STATE PEOPLE AND SOMETIMES DEFIED OUTRIGHT +7729-102255-0018-279: LITTLE BY LITTLE HOWEVER THE LATTER BECAME HEMMED AND BOUND IN THE MESHES OF THE VARIOUS DEVICES AND PROCEEDINGS WHICH THE TERRITORIAL OFFICIALS EVOLVED FROM THE BOGUS LAWS +7729-102255-0019-280: TO EMBARRASS THIS DAMAGING EXPOSURE JUDGE (LECOMPTE->LECOMTE) ISSUED A WRIT AGAINST THE EX GOVERNOR ON A FRIVOLOUS CHARGE OF CONTEMPT +7729-102255-0020-281: THE INCIDENT WAS NOT VIOLENT NOR EVEN DRAMATIC NO POSSE WAS SUMMONED NO FURTHER EFFORT MADE AND (REEDER->READER) FEARING PERSONAL VIOLENCE SOON FLED IN DISGUISE +7729-102255-0021-282: BUT THE AFFAIR WAS MAGNIFIED AS A CROWNING PROOF THAT THE FREE STATE MEN WERE (INSURRECTIONISTS->INSURRECTIONOUS) AND OUTLAWS +7729-102255-0022-283: FROM THESE AGAIN SPRANG BARRICADED AND FORTIFIED DWELLINGS CAMPS AND (SCOUTING->SCOUT) PARTIES FINALLY CULMINATING IN ROVING GUERRILLA BANDS HALF PARTISAN HALF PREDATORY +7729-102255-0023-284: (THEIR->THERE) DISTINCTIVE CHARACTERS HOWEVER DISPLAY ONE BROAD AND UNFAILING DIFFERENCE +7729-102255-0024-285: THE FREE STATE MEN CLUNG TO THEIR PRAIRIE TOWNS AND PRAIRIE RAVINES WITH ALL THE OBSTINACY AND COURAGE OF TRUE DEFENDERS OF THEIR HOMES AND FIRESIDES +7729-102255-0025-286: (THEIR->THERE) ASSUMED CHARACTER CHANGED WITH THEIR CHANGING OPPORTUNITIES OR NECESSITIES +7729-102255-0026-287: IN THE SHOOTING OF (SHERIFF->SHERIFF'S) JONES (IN->AND) LAWRENCE AND IN THE REFUSAL OF EX GOVERNOR (BEEDER->READER) TO ALLOW THE DEPUTY MARSHAL TO ARREST HIM THEY DISCOVERED GRAVE (OFFENSES->OFFENCES) AGAINST THE TERRITORIAL AND (*->THE) UNITED STATES LAWS +7729-102255-0027-288: FOOTNOTE (SUMNER->SUMMER) TO SHANNON MAY TWELFTH EIGHTEEN FIFTY SIX +7729-102255-0028-289: PRIVATE PERSONS WHO (HAD LEASED->AT LEAST) THE FREE STATE HOTEL VAINLY BESOUGHT THE VARIOUS AUTHORITIES TO (PREVENT->PRESENT) THE DESTRUCTION OF THEIR PROPERTY +7729-102255-0029-290: TEN DAYS WERE CONSUMED IN THESE NEGOTIATIONS BUT THE SPIRIT OF VENGEANCE REFUSED TO YIELD +7729-102255-0030-291: HE SUMMONED HALF A DOZEN CITIZENS TO JOIN HIS POSSE WHO FOLLOWED OBEYED AND ASSISTED HIM +7729-102255-0031-292: HE CONTINUED HIS PRETENDED SEARCH AND TO GIVE COLOR TO HIS ERRAND MADE (TWO ARRESTS->TO ARREST) +7729-102255-0032-293: THE FREE STATE HOTEL A STONE BUILDING IN DIMENSIONS FIFTY BY SEVENTY FEET THREE STORIES HIGH AND HANDSOMELY FURNISHED PREVIOUSLY OCCUPIED ONLY FOR LODGING ROOMS ON THAT DAY FOR THE FIRST TIME OPENED ITS TABLE ACCOMMODATIONS TO THE PUBLIC AND PROVIDED A FREE DINNER IN HONOR OF THE OCCASION +7729-102255-0033-294: AS HE HAD PROMISED TO PROTECT THE HOTEL THE REASSURED CITIZENS BEGAN TO LAUGH AT THEIR OWN FEARS +7729-102255-0034-295: TO THEIR SORROW THEY WERE SOON UNDECEIVED +7729-102255-0035-296: THE MILITARY FORCE PARTLY RABBLE PARTLY ORGANIZED HAD MEANWHILE MOVED INTO THE TOWN +7729-102255-0036-297: HE PLANTED A COMPANY BEFORE THE HOTEL AND DEMANDED A SURRENDER OF THE ARMS BELONGING TO THE FREE STATE MILITARY COMPANIES +7729-102255-0037-298: HALF AN HOUR LATER TURNING A DEAF EAR TO ALL REMONSTRANCE HE GAVE THE PROPRIETORS UNTIL FIVE O'CLOCK TO REMOVE THEIR FAMILIES AND PERSONAL PROPERTY FROM THE FREE STATE HOTEL +7729-102255-0038-299: (ATCHISON->ATTITSON) WHO HAD BEEN HARANGUING THE MOB PLANTED HIS TWO GUNS BEFORE THE BUILDING AND TRAINED THEM UPON IT +7729-102255-0039-300: THE INMATES BEING REMOVED AT THE APPOINTED HOUR A FEW CANNON BALLS WERE FIRED THROUGH THE STONE WALLS +7729-102255-0040-301: IN THIS INCIDENT CONTRASTING THE CREATIVE AND THE DESTRUCTIVE SPIRIT OF THE FACTIONS THE (EMIGRANT AID->IMMIGRANT AIDS) SOCIETY OF MASSACHUSETTS FINDS ITS MOST (HONORABLE->HONOURABLE) AND TRIUMPHANT VINDICATION +7729-102255-0041-302: THE WHOLE PROCEEDING WAS SO CHILDISH THE MISERABLE PLOT SO TRANSPARENT THE (OUTRAGE->OUTRAGED) SO GROSS AS TO BRING DISGUST TO THE BETTER CLASS OF BORDER RUFFIANS WHO WERE WITNESSES AND ACCESSORIES +7729-102255-0042-303: (RELOCATED->RE LOCATED) FOOTNOTE GOVERNOR ROBINSON BEING ON HIS WAY EAST THE STEAMBOAT ON WHICH HE WAS (TRAVELING->TRAVELLING) STOPPED AT LEXINGTON MISSOURI +7729-102255-0043-304: IN A FEW DAYS AN OFFICER CAME WITH A REQUISITION FROM GOVERNOR SHANNON AND TOOK THE PRISONER BY (LAND TO WESTPORT->LANDA WEST PORT) AND AFTERWARDS FROM THERE TO KANSAS CITY (AND LEAVENWORTH->IN LEVINWORTH) +7729-102255-0044-305: HERE HE WAS PLACED IN THE CUSTODY OF CAPTAIN MARTIN OF THE KICKAPOO RANGERS WHO PROVED A KIND JAILER AND MATERIALLY ASSISTED IN PROTECTING HIM FROM THE DANGEROUS INTENTIONS OF THE MOB WHICH AT THAT TIME HELD (LEAVENWORTH->LEVIN WORTH) UNDER (A->THE) REIGN OF TERROR +7729-102255-0045-306: CAPTAIN MARTIN SAID I SHALL GIVE YOU A PISTOL TO HELP PROTECT YOURSELF IF WORSE COMES TO WORST +7729-102255-0046-307: IN THE EARLY MORNING OF THE NEXT DAY MAY TWENTY NINTH A COMPANY OF DRAGOONS WITH ONE EMPTY SADDLE CAME DOWN FROM THE FORT AND WHILE THE PRO SLAVERY MEN STILL SLEPT THE PRISONER AND HIS ESCORT WERE ON THEIR WAY ACROSS THE PRAIRIES TO LECOMPTON IN THE CHARGE OF OFFICERS OF THE UNITED STATES ARMY +8224-274381-0000-1451: THOUGH THROWN INTO PRISON FOR THIS ENTERPRISE AND DETAINED SOME TIME HE WAS NOT DISCOURAGED BUT STILL CONTINUED BY HIS COUNTENANCE AND PROTECTION TO INFUSE SPIRIT INTO THE DISTRESSED ROYALISTS +8224-274381-0001-1452: AMONG OTHER PERSONS OF DISTINCTION WHO UNITED THEMSELVES TO HIM WAS LORD NAPIER OF (MERCHISTON->MURCHESON) SON OF THE FAMOUS INVENTOR OF THE (LOGARITHMS->LOGARTHEMS) THE PERSON TO WHOM THE TITLE OF A GREAT MAN IS MORE JUSTLY DUE THAN TO ANY OTHER WHOM HIS COUNTRY EVER PRODUCED +8224-274381-0002-1453: WHILE THE FORMER FORETOLD THAT THE SCOTTISH COVENANTERS WERE SECRETLY FORMING A UNION WITH THE ENGLISH PARLIAMENT AND (INCULCATED->INCALCATED) THE NECESSITY OF PREVENTING THEM BY SOME VIGOROUS UNDERTAKING THE LATTER STILL INSISTED THAT EVERY SUCH ATTEMPT WOULD PRECIPITATE THEM INTO MEASURES TO WHICH OTHERWISE THEY WERE NOT PERHAPS INCLINED +8224-274381-0003-1454: THE KING'S EARS WERE NOW OPEN TO MONTROSE'S (COUNSELS->COUNCILS) WHO PROPOSED NONE BUT THE BOLDEST AND MOST DARING AGREEABLY TO THE DESPERATE STATE OF THE ROYAL CAUSE IN SCOTLAND +8224-274381-0004-1455: FIVE HUNDRED MEN MORE WHO HAD BEEN LEVIED BY THE COVENANTERS WERE PERSUADED TO EMBRACE THE ROYAL CAUSE AND WITH THIS COMBINED FORCE HE HASTENED TO ATTACK LORD (ELCHO->ELKO) WHO LAY AT PERTH WITH AN ARMY OF SIX THOUSAND MEN ASSEMBLED UPON THE FIRST NEWS OF THE IRISH INVASION +8224-274381-0005-1456: DREADING THE SUPERIOR POWER OF ARGYLE WHO HAVING JOINED HIS VASSALS TO A FORCE LEVIED BY THE PUBLIC WAS APPROACHING WITH A CONSIDERABLE ARMY MONTROSE HASTENED (NORTHWARDS->NORTHWARD) IN ORDER TO ROUSE AGAIN THE MARQUIS OF (HUNTLEY->HUNTLY) AND THE GORDONS WHO HAVING BEFORE HASTILY TAKEN ARMS HAD BEEN INSTANTLY SUPPRESSED BY THE COVENANTERS +8224-274381-0006-1457: THIS NOBLEMAN'S CHARACTER THOUGH CELEBRATED FOR POLITICAL COURAGE AND CONDUCT WAS VERY LOW FOR MILITARY PROWESS AND AFTER SOME SKIRMISHES IN WHICH HE WAS WORSTED HE HERE ALLOWED MONTROSE TO ESCAPE HIM +8224-274381-0007-1458: BY QUICK MARCHES THROUGH THESE INACCESSIBLE MOUNTAINS THAT GENERAL FREED HIMSELF FROM THE SUPERIOR FORCES OF THE COVENANTERS +8224-274381-0008-1459: WITH THESE AND SOME (REENFORCEMENTS->REINFORCEMENTS) OF THE (ATHOLEMEN->ETHEL MEN) AND (MACDONALDS->MC DONALDS) WHOM HE HAD RECALLED MONTROSE FELL SUDDENLY UPON ARGYLE'S COUNTRY AND LET LOOSE UPON IT ALL THE RAGE OF WAR CARRYING OFF THE CATTLE BURNING THE HOUSES AND PUTTING THE INHABITANTS TO THE SWORD +8224-274381-0009-1460: THIS SEVERITY BY WHICH MONTROSE SULLIED HIS VICTORIES WAS THE RESULT OF PRIVATE ANIMOSITY AGAINST THE CHIEFTAIN AS MUCH AS OF ZEAL FOR THE PUBLIC CAUSE ARGYLE COLLECTING THREE THOUSAND MEN MARCHED IN QUEST OF THE ENEMY WHO HAD RETIRED WITH THEIR PLUNDER AND HE LAY AT (INNERLOCHY->INERLOCHY) SUPPOSING HIMSELF STILL AT A CONSIDERABLE DISTANCE FROM THEM +8224-274381-0010-1461: BY A QUICK AND UNEXPECTED MARCH MONTROSE HASTENED TO (INNERLOCHY->IN A LOCKY) AND PRESENTED HIMSELF IN ORDER OF BATTLE BEFORE THE SURPRISED BUT NOT (AFFRIGHTENED->A FRIGHTENED) COVENANTERS +8224-274381-0011-1462: HIS CONDUCT AND PRESENCE OF MIND IN THIS EMERGENCE APPEARED CONSPICUOUS +8224-274381-0012-1463: MONTROSE WEAK IN CAVALRY HERE LINED HIS TROOPS OF HORSE WITH INFANTRY AND AFTER PUTTING THE ENEMY'S HORSE TO ROUT FELL WITH UNITED FORCE UPON THEIR FOOT WHO WERE ENTIRELY CUT IN PIECES THOUGH WITH THE LOSS OF THE GALLANT LORD GORDON ON THE PART OF THE ROYALISTS +8224-274381-0013-1464: FROM THE SAME MEN NEW REGIMENTS AND NEW COMPANIES WERE FORMED DIFFERENT OFFICERS APPOINTED AND THE WHOLE MILITARY FORCE PUT INTO SUCH HANDS AS THE INDEPENDENTS COULD RELY ON +8224-274381-0014-1465: BESIDES MEMBERS OF PARLIAMENT WHO WERE EXCLUDED MANY OFFICERS UNWILLING TO SERVE UNDER THE NEW GENERALS THREW UP THEIR COMMISSIONS AND (UNWARILY->THEN WARILY) FACILITATED THE PROJECT OF PUTTING THE ARMY ENTIRELY INTO THE HANDS OF THAT FACTION +8224-274381-0015-1466: THOUGH THE DISCIPLINE OF THE FORMER PARLIAMENTARY ARMY WAS NOT CONTEMPTIBLE A MORE EXACT PLAN WAS INTRODUCED AND RIGOROUSLY EXECUTED BY THESE NEW COMMANDERS +8224-274381-0016-1467: (VALOR->VALO) INDEED WAS VERY GENERALLY DIFFUSED OVER THE ONE PARTY AS WELL AS THE OTHER DURING THIS PERIOD DISCIPLINE ALSO WAS ATTAINED BY THE FORCES OF THE PARLIAMENT BUT THE PERFECTION OF THE MILITARY ART IN CONCERTING THE GENERAL PLANS OF ACTION AND THE OPERATIONS OF THE FIELD SEEMS STILL ON BOTH SIDES TO HAVE BEEN IN A GREAT MEASURE WANTING +8224-274381-0017-1468: HISTORIANS AT LEAST PERHAPS FROM THEIR OWN IGNORANCE AND INEXPERIENCE HAVE NOT REMARKED ANY THING BUT A HEADLONG IMPETUOUS CONDUCT EACH PARTY HURRYING TO A BATTLE (WHERE->WERE) VALOR AND FORTUNE CHIEFLY DETERMINED THE SUCCESS +8224-274384-0000-1437: HE PASSED THROUGH HENLEY SAINT (ALBANS->ALBAN'S) AND CAME SO NEAR TO LONDON AS HARROW ON THE HILL +8224-274384-0001-1438: THE SCOTTISH GENERALS AND COMMISSIONERS AFFECTED GREAT SURPRISE ON THE APPEARANCE OF THE KING AND THOUGH THEY PAID HIM ALL THE EXTERIOR RESPECT DUE TO HIS DIGNITY THEY INSTANTLY SET A GUARD UPON HIM UNDER COLOR OF PROTECTION AND MADE HIM IN REALITY A PRISONER +8224-274384-0002-1439: THEY INFORMED THE ENGLISH PARLIAMENT OF THIS UNEXPECTED INCIDENT AND ASSURED THEM THAT THEY HAD ENTERED INTO NO PRIVATE TREATY WITH THE KING +8224-274384-0003-1440: OR HATH HE GIVEN US ANY GIFT +8224-274384-0004-1441: AND THE MEN OF ISRAEL ANSWERED THE MEN OF JUDAH AND SAID WE HAVE TEN PARTS IN THE KING AND WE HAVE ALSO MORE RIGHT IN DAVID THAN YE WHY THEN DID YE DESPISE US THAT OUR ADVICE SHOULD NOT BE FIRST HAD IN BRINGING BACK OUR KING +8224-274384-0005-1442: ANOTHER PREACHER AFTER REPROACHING HIM TO HIS FACE WITH HIS MISGOVERNMENT ORDERED THIS (PSALM->SUM) TO BE SUNG +8224-274384-0006-1443: THE KING STOOD UP AND CALLED FOR THAT PSALM WHICH BEGINS WITH THESE WORDS +8224-274384-0007-1444: HAVE MERCY LORD ON ME I PRAY FOR MEN WOULD ME DEVOUR +8224-274384-0008-1445: THE GOOD NATURED AUDIENCE IN PITY TO (FALLEN->FALL IN) MAJESTY SHOWED FOR ONCE GREATER DEFERENCE TO THE KING THAN TO THE MINISTER AND SUNG THE PSALM WHICH THE FORMER HAD CALLED FOR +8224-274384-0009-1446: THE PARLIAMENT AND THE SCOTS LAID THEIR PROPOSALS BEFORE THE KING +8224-274384-0010-1447: BEFORE THE SETTLEMENT OF TERMS THE ADMINISTRATION MUST BE POSSESSED ENTIRELY BY THE PARLIAMENTS OF BOTH KINGDOMS AND HOW INCOMPATIBLE THAT SCHEME WITH THE LIBERTY OF THE KING IS EASILY IMAGINED +8224-274384-0011-1448: THE ENGLISH IT IS EVIDENT HAD THEY NOT BEEN PREVIOUSLY ASSURED OF RECEIVING THE KING WOULD NEVER HAVE PARTED WITH SO CONSIDERABLE A SUM AND WHILE THEY WEAKENED THEMSELVES BY THE SAME MEASURE HAVE STRENGTHENED A PEOPLE WITH WHOM THEY MUST AFTERWARDS HAVE SO MATERIAL AN INTEREST TO DISCUSS +8224-274384-0012-1449: IF ANY STILL RETAINED RANCOR AGAINST HIM IN HIS PRESENT CONDITION THEY PASSED IN SILENCE WHILE HIS WELL WISHERS MORE GENEROUS THAN PRUDENT ACCOMPANIED HIS MARCH WITH TEARS WITH ACCLAMATIONS AND WITH PRAYERS FOR HIS SAFETY +8224-274384-0013-1450: HIS DEATH IN THIS CONJUNCTURE WAS A PUBLIC MISFORTUNE +8230-279154-0000-617: THE ANALYSIS OF KNOWLEDGE WILL OCCUPY US UNTIL THE END OF THE THIRTEENTH LECTURE AND IS THE MOST DIFFICULT PART OF OUR WHOLE ENTERPRISE +8230-279154-0001-618: WHAT IS CALLED PERCEPTION DIFFERS FROM SENSATION BY THE FACT THAT THE SENSATIONAL INGREDIENTS BRING UP HABITUAL ASSOCIATES IMAGES AND EXPECTATIONS OF THEIR USUAL (CORRELATES->COROTS) ALL OF WHICH ARE SUBJECTIVELY INDISTINGUISHABLE FROM THE SENSATION +8230-279154-0002-619: WHETHER OR NOT THIS PRINCIPLE IS LIABLE TO EXCEPTIONS (EVERYONE->EVERY ONE) WOULD AGREE THAT (IS->IT) HAS A BROAD MEASURE OF TRUTH THOUGH THE WORD EXACTLY MIGHT SEEM AN OVERSTATEMENT AND IT MIGHT SEEM MORE CORRECT TO SAY THAT IDEAS APPROXIMATELY REPRESENT IMPRESSIONS +8230-279154-0003-620: AND WHAT SORT OF EVIDENCE IS LOGICALLY POSSIBLE +8230-279154-0004-621: THERE IS NO LOGICAL IMPOSSIBILITY IN THE HYPOTHESIS THAT THE WORLD SPRANG INTO BEING FIVE MINUTES AGO EXACTLY AS IT THEN WAS WITH (A->THE) POPULATION THAT REMEMBERED A WHOLLY UNREAL PAST +8230-279154-0005-622: ALL THAT I AM DOING IS TO USE ITS LOGICAL TENABILITY AS A HELP IN THE ANALYSIS OF WHAT OCCURS WHEN WE REMEMBER +8230-279154-0006-623: THE (BEHAVIOURIST->BEHAVIORIST) WHO ATTEMPTS TO MAKE PSYCHOLOGY A RECORD OF (BEHAVIOUR->BEHAVIOR) HAS TO TRUST HIS MEMORY IN MAKING THE RECORD +8230-279154-0007-624: HABIT IS A CONCEPT INVOLVING THE OCCURRENCE OF SIMILAR EVENTS AT DIFFERENT TIMES IF THE (BEHAVIOURIST FEELS->BEHAVIOURISTS) CONFIDENT THAT THERE IS SUCH A PHENOMENON AS HABIT THAT CAN ONLY BE BECAUSE HE TRUSTS HIS MEMORY WHEN IT ASSURES HIM THAT THERE HAVE BEEN OTHER TIMES +8230-279154-0008-625: BUT I DO NOT THINK SUCH AN INFERENCE IS WARRANTED +8230-279154-0009-626: OUR CONFIDENCE OR LACK OF CONFIDENCE IN THE ACCURACY OF A MEMORY IMAGE MUST IN FUNDAMENTAL CASES BE BASED UPON A CHARACTERISTIC OF THE IMAGE ITSELF SINCE WE CANNOT EVOKE THE PAST BODILY AND COMPARE IT WITH THE PRESENT IMAGE +8230-279154-0010-627: WE SOMETIMES HAVE IMAGES THAT ARE BY NO MEANS PECULIARLY VAGUE WHICH YET WE DO NOT TRUST FOR EXAMPLE UNDER THE INFLUENCE OF FATIGUE WE MAY SEE A FRIEND'S FACE VIVIDLY AND CLEARLY BUT HORRIBLY DISTORTED +8230-279154-0011-628: SOME IMAGES LIKE SOME SENSATIONS FEEL VERY FAMILIAR WHILE OTHERS FEEL STRANGE +8230-279154-0012-629: FAMILIARITY IS A (FEELING->FILLING) CAPABLE OF DEGREES +8230-279154-0013-630: IN AN IMAGE OF A WELL KNOWN FACE FOR EXAMPLE SOME PARTS MAY FEEL MORE FAMILIAR THAN OTHERS WHEN THIS HAPPENS WE HAVE MORE BELIEF IN THE ACCURACY OF THE FAMILIAR PARTS THAN IN THAT OF THE UNFAMILIAR PARTS +8230-279154-0014-631: I COME NOW TO THE OTHER CHARACTERISTIC WHICH MEMORY IMAGES MUST HAVE IN ORDER TO ACCOUNT FOR OUR KNOWLEDGE OF THE PAST +8230-279154-0015-632: THEY MUST HAVE SOME CHARACTERISTIC WHICH MAKES US REGARD THEM AS REFERRING TO MORE OR LESS REMOTE PORTIONS OF THE PAST +8230-279154-0016-633: IN ACTUAL FACT THERE ARE DOUBTLESS VARIOUS FACTORS THAT CONCUR IN GIVING US THE FEELING OF GREATER OR LESS REMOTENESS IN SOME REMEMBERED EVENT +8230-279154-0017-634: THERE MAY BE A SPECIFIC FEELING WHICH COULD BE CALLED THE (FEELING->FILLING) OF PASTNESS ESPECIALLY WHERE IMMEDIATE MEMORY IS CONCERNED +8230-279154-0018-635: THERE IS OF COURSE A DIFFERENCE BETWEEN KNOWING THE TEMPORAL RELATION OF A REMEMBERED EVENT TO THE PRESENT AND KNOWING THE TIME ORDER OF TWO REMEMBERED EVENTS +8230-279154-0019-636: IT WOULD SEEM THAT ONLY RATHER RECENT EVENTS CAN BE PLACED AT ALL ACCURATELY BY MEANS OF FEELINGS GIVING THEIR TEMPORAL RELATION TO THE PRESENT BUT IT IS CLEAR THAT SUCH FEELINGS MUST PLAY AN ESSENTIAL PART IN THE PROCESS OF DATING REMEMBERED EVENTS +8230-279154-0020-637: IF WE HAD RETAINED THE SUBJECT OR ACT IN KNOWLEDGE THE WHOLE PROBLEM OF MEMORY WOULD HAVE BEEN COMPARATIVELY SIMPLE +8230-279154-0021-638: REMEMBERING HAS TO BE A PRESENT OCCURRENCE IN SOME WAY RESEMBLING OR RELATED TO WHAT IS REMEMBERED +8230-279154-0022-639: SOME POINTS MAY BE TAKEN AS FIXED AND SUCH AS ANY THEORY OF MEMORY MUST ARRIVE AT +8230-279154-0023-640: IN THIS CASE AS IN MOST OTHERS WHAT MAY BE TAKEN AS CERTAIN IN ADVANCE IS RATHER VAGUE +8230-279154-0024-641: THE FIRST OF OUR VAGUE BUT INDUBITABLE DATA IS THAT THERE IS KNOWLEDGE OF THE PAST +8230-279154-0025-642: WE MIGHT PROVISIONALLY THOUGH PERHAPS NOT QUITE CORRECTLY DEFINE MEMORY AS THAT WAY OF KNOWING ABOUT THE PAST WHICH HAS NO ANALOGUE IN OUR KNOWLEDGE OF THE FUTURE SUCH A DEFINITION WOULD AT LEAST SERVE TO MARK THE PROBLEM WITH WHICH WE ARE CONCERNED THOUGH SOME EXPECTATIONS MAY DESERVE TO RANK WITH MEMORY AS REGARDS IMMEDIACY +8230-279154-0026-643: THIS DISTINCTION IS VITAL TO THE UNDERSTANDING OF MEMORY BUT IT IS NOT SO EASY TO CARRY OUT IN PRACTICE AS IT IS TO DRAW IN THEORY +8230-279154-0027-644: A (GRAMOPHONE->GRAMMAPHONE) BY THE HELP OF SUITABLE RECORDS MIGHT RELATE TO US THE INCIDENTS OF ITS PAST AND PEOPLE ARE NOT SO DIFFERENT FROM (GRAMOPHONES->GRAMIPHONES) AS THEY LIKE TO BELIEVE +8230-279154-0028-645: I CAN SET TO WORK NOW TO REMEMBER THINGS I NEVER REMEMBERED BEFORE SUCH AS WHAT I HAD TO EAT FOR BREAKFAST THIS MORNING AND IT CAN HARDLY BE WHOLLY HABIT THAT ENABLES ME TO DO THIS +8230-279154-0029-646: THE FACT THAT A MAN CAN RECITE A POEM DOES NOT SHOW THAT HE REMEMBERS ANY PREVIOUS OCCASION ON WHICH HE HAS RECITED OR READ IT +8230-279154-0030-647: (SEMON'S->SIMMONS) TWO BOOKS MENTIONED IN AN EARLIER LECTURE DO NOT TOUCH KNOWLEDGE MEMORY AT ALL CLOSELY +8230-279154-0031-648: THEY GIVE LAWS ACCORDING TO WHICH IMAGES OF PAST OCCURRENCES COME INTO OUR MINDS BUT DO NOT DISCUSS OUR BELIEF THAT THESE IMAGES REFER TO PAST OCCURRENCES WHICH IS WHAT CONSTITUTES KNOWLEDGE MEMORY +8230-279154-0032-649: IT IS THIS THAT IS OF INTEREST TO THEORY OF KNOWLEDGE +8230-279154-0033-650: IT IS BY NO MEANS ALWAYS RELIABLE ALMOST EVERYBODY HAS AT SOME TIME EXPERIENCED THE WELL KNOWN ILLUSION THAT ALL THAT IS HAPPENING NOW HAPPENED BEFORE AT SOME TIME +8230-279154-0034-651: WHENEVER THE SENSE OF FAMILIARITY OCCURS WITHOUT A DEFINITE OBJECT IT (LEADS->LEAVES) US TO SEARCH THE ENVIRONMENT UNTIL WE ARE SATISFIED THAT WE HAVE FOUND THE APPROPRIATE OBJECT WHICH LEADS US TO THE JUDGMENT THIS IS FAMILIAR +8230-279154-0035-652: THUS NO KNOWLEDGE AS TO THE PAST IS TO BE DERIVED FROM THE FEELING OF FAMILIARITY ALONE +8230-279154-0036-653: A FURTHER STAGE IS RECOGNITION +8230-279154-0037-654: RECOGNITION IN THIS SENSE DOES NOT NECESSARILY INVOLVE MORE THAN A HABIT OF ASSOCIATION THE KIND OF OBJECT WE ARE SEEING AT THE MOMENT IS ASSOCIATED WITH THE WORD CAT OR WITH AN AUDITORY IMAGE OF PURRING OR WHATEVER OTHER CHARACTERISTIC WE MAY HAPPEN TO RECOGNIZE IN THE CAT OF THE MOMENT +8230-279154-0038-655: WE ARE OF COURSE IN FACT ABLE TO JUDGE WHEN WE RECOGNIZE AN OBJECT THAT WE HAVE SEEN IT BEFORE BUT THIS JUDGMENT IS SOMETHING OVER AND ABOVE RECOGNITION IN THIS FIRST SENSE AND MAY VERY PROBABLY BE IMPOSSIBLE TO ANIMALS THAT NEVERTHELESS HAVE THE EXPERIENCE OF RECOGNITION IN THIS FIRST SENSE OF THE WORD +8230-279154-0039-656: THIS KNOWLEDGE IS MEMORY IN ONE SENSE THOUGH IN ANOTHER IT IS NOT +8230-279154-0040-657: THERE ARE HOWEVER SEVERAL POINTS IN WHICH SUCH AN ACCOUNT OF RECOGNITION IS INADEQUATE TO BEGIN WITH IT MIGHT SEEM AT FIRST SIGHT MORE CORRECT TO DEFINE RECOGNITION AS I HAVE SEEN THIS BEFORE THAN AS THIS HAS EXISTED BEFORE +8230-279154-0041-658: THE DEFINITION OF MY EXPERIENCE IS DIFFICULT BROADLY SPEAKING IT IS EVERYTHING THAT IS CONNECTED WITH WHAT I AM EXPERIENCING NOW BY CERTAIN LINKS OF WHICH THE VARIOUS FORMS OF MEMORY ARE AMONG THE MOST IMPORTANT +8230-279154-0042-659: THUS IF I RECOGNIZE A THING THE OCCASION OF ITS PREVIOUS EXISTENCE IN VIRTUE OF WHICH I RECOGNIZE IT FORMS PART OF MY EXPERIENCE BY DEFINITION RECOGNITION WILL BE ONE OF THE MARKS BY WHICH MY EXPERIENCE IS SINGLED OUT FROM THE REST OF THE WORLD +8230-279154-0043-660: OF COURSE THE WORDS THIS HAS EXISTED BEFORE ARE (A->*) VERY INADEQUATE TRANSLATION OF WHAT ACTUALLY HAPPENS WHEN WE FORM A JUDGMENT OF RECOGNITION BUT THAT IS UNAVOIDABLE WORDS ARE FRAMED TO EXPRESS A LEVEL OF THOUGHT WHICH IS BY NO MEANS PRIMITIVE AND ARE QUITE INCAPABLE OF EXPRESSING SUCH AN ELEMENTARY OCCURRENCE AS RECOGNITION +8455-210777-0000-972: I (REMAINED->REMAIN) THERE ALONE FOR MANY HOURS BUT I MUST ACKNOWLEDGE THAT BEFORE I LEFT THE CHAMBERS I HAD GRADUALLY BROUGHT MYSELF TO LOOK AT THE MATTER IN ANOTHER LIGHT +8455-210777-0001-973: HAD (EVA CRASWELLER->EITHER CRUSWELLER) NOT BEEN GOOD LOOKING HAD JACK BEEN STILL AT COLLEGE HAD SIR KENNINGTON OVAL REMAINED IN ENGLAND HAD MISTER (BUNNIT AND->BUNNITT IN) THE BAR KEEPER NOT SUCCEEDED IN STOPPING MY CARRIAGE ON THE HILL SHOULD I HAVE SUCCEEDED IN ARRANGING FOR THE FINAL DEPARTURE OF MY OLD FRIEND +8455-210777-0002-974: ON ARRIVING AT HOME AT MY OWN RESIDENCE I FOUND THAT OUR SALON WAS FILLED WITH A BRILLIANT COMPANY +8455-210777-0003-975: AS I SPOKE I MADE HIM A GRACIOUS BOW AND I THINK I SHOWED HIM BY MY MODE OF ADDRESS THAT I DID NOT BEAR ANY GRUDGE AS TO MY INDIVIDUAL SELF +8455-210777-0004-976: I HAVE COME TO YOUR SHORES MISTER PRESIDENT WITH THE PURPOSE OF SEEING HOW THINGS ARE PROGRESSING IN THIS DISTANT QUARTER OF THE WORLD +8455-210777-0005-977: WE HAVE OUR LITTLE STRUGGLES HERE AS ELSEWHERE AND ALL THINGS CANNOT BE DONE BY ROSE WATER +8455-210777-0006-978: WE ARE QUITE SATISFIED NOW CAPTAIN (BATTLEAX->BATTLE AX) SAID MY WIFE +8455-210777-0007-979: QUITE SATISFIED SAID EVA +8455-210777-0008-980: THE LADIES IN COMPLIANCE WITH THAT SOFTNESS OF HEART WHICH IS THEIR CHARACTERISTIC ARE ON ONE SIDE AND THE MEN BY WHOM THE WORLD HAS TO BE MANAGED ARE ON THE OTHER +8455-210777-0009-981: NO DOUBT IN PROCESS OF TIME THE LADIES WILL FOLLOW +8455-210777-0010-982: THEIR (MASTERS->MASTER) SAID MISSUS (NEVERBEND->NEVERBAND) +8455-210777-0011-983: I DID NOT MEAN SAID CAPTAIN (BATTLEAX->BATTLE AXE) TO TOUCH UPON PUBLIC SUBJECTS AT SUCH A MOMENT AS THIS +8455-210777-0012-984: MISSUS NEVERBEND YOU MUST INDEED BE PROUD OF YOUR SON +8455-210777-0013-985: JACK HAD BEEN STANDING IN THE FAR CORNER OF THE ROOM TALKING TO EVA AND WAS NOW REDUCED TO SILENCE BY HIS PRAISES +8455-210777-0014-986: SIR KENNINGTON OVAL IS A VERY FINE PLAYER SAID MY WIFE +8455-210777-0015-987: I (AND->AM) MY WIFE AND SON AND THE TWO (CRASWELLERS->CRESTWELLERS) AND THREE OR FOUR OTHERS AGREED TO DINE ON BOARD THE SHIP ON THE NEXT +8455-210777-0016-988: THIS I FELT WAS PAID TO ME AS BEING PRESIDENT OF THE REPUBLIC AND I (ENDEAVOURED->ENDEAVORED) TO BEHAVE MYSELF WITH SUCH MINGLED HUMILITY AND DIGNITY AS MIGHT (BEFIT->BE FIT) THE OCCASION BUT I COULD NOT BUT FEEL THAT SOMETHING WAS WANTING TO THE SIMPLICITY OF MY ORDINARY LIFE +8455-210777-0017-989: MY WIFE ON THE SPUR OF THE MOMENT MANAGED TO GIVE THE (GENTLEMEN->GENTLEMAN) A VERY GOOD DINNER +8455-210777-0018-990: THIS SHE SAID WAS TRUE HOSPITALITY AND I AM NOT SURE THAT I DID NOT AGREE WITH (HER->THAT) +8455-210777-0019-991: THEN THERE WERE THREE OR FOUR LEADING MEN OF THE COMMUNITY WITH THEIR WIVES WHO WERE FOR THE MOST PART THE FATHERS AND MOTHERS OF THE YOUNG LADIES +8455-210777-0020-992: OH YES SAID JACK AND I'M NOWHERE +8455-210777-0021-993: BUT I MEAN TO HAVE MY INNINGS BEFORE LONG +8455-210777-0022-994: OF WHAT MISSUS (NEVERBEND HAD GONE THROUGH IN PROVIDING BIRDS->NEVERS) BEASTS AND FISHES NOT TO TALK OF TARTS AND JELLIES FOR THE DINNER OF THAT DAY NO ONE BUT MYSELF CAN HAVE ANY IDEA BUT IT MUST BE ADMITTED THAT SHE ACCOMPLISHED HER TASK WITH THOROUGH SUCCESS +8455-210777-0023-995: WE SAT WITH THE (OFFICERS->OFFICER) SOME LITTLE TIME AFTER DINNER AND THEN WENT ASHORE +8455-210777-0024-996: HOW MUCH OF EVIL OF REAL ACCOMPLISHED EVIL HAD THERE NOT OCCURRED TO ME DURING THE LAST FEW DAYS +8455-210777-0025-997: WHAT COULD I DO NOW BUT JUST LAY MYSELF DOWN AND DIE +8455-210777-0026-998: AND THE DEATH OF WHICH I DREAMT COULD NOT ALAS +8455-210777-0027-999: WHEN THIS CAPTAIN SHOULD HAVE TAKEN HIMSELF AND HIS VESSEL BACK TO ENGLAND I WOULD RETIRE TO A SMALL FARM WHICH I POSSESSED AT THE (FARTHEST->FURTHEST) SIDE OF THE ISLAND AND THERE IN SECLUSION WOULD I END MY DAYS +8455-210777-0028-1000: JACK WOULD BECOME EVA'S HAPPY HUSBAND AND WOULD REMAIN AMIDST THE HURRIED DUTIES OF THE EAGER WORLD +8455-210777-0029-1001: THINKING OF ALL THIS I WENT TO SLEEP +8455-210777-0030-1002: MISTER NEVERBEND BEGAN THE CAPTAIN AND I (OBSERVED->OBSERVE) THAT UP TO THAT MOMENT HE HAD GENERALLY ADDRESSED ME AS PRESIDENT IT CANNOT BE DENIED THAT WE HAVE COME HERE ON AN UNPLEASANT MISSION +8455-210777-0031-1003: YOU HAVE RECEIVED US WITH ALL THAT COURTESY AND HOSPITALITY FOR WHICH YOUR CHARACTER (*->AND) IN ENGLAND (STANDS->STAND) SO HIGH +8455-210777-0032-1004: IT IS A DUTY SAID I +8455-210777-0033-1005: BUT YOUR POWER IS SO SUPERIOR TO ANY THAT I CAN ADVANCE AS TO MAKE US HERE FEEL THAT THERE IS NO DISGRACE IN YIELDING TO IT +8455-210777-0034-1006: NOT A DOUBT BUT HAD YOUR FORCE BEEN ONLY DOUBLE OR (TREBLE->TROUBLE) OUR OWN I SHOULD HAVE FOUND IT MY DUTY TO STRUGGLE WITH YOU +8455-210777-0035-1007: THAT IS ALL QUITE TRUE MISTER NEVERBEND SAID SIR (FERDINANDO BROWN->FERDINAND OBROWN) +8455-210777-0036-1008: I CAN AFFORD TO SMILE BECAUSE I AM ABSOLUTELY POWERLESS BEFORE YOU BUT I DO NOT THE LESS FEEL THAT IN A MATTER (IN->OF) WHICH THE PROGRESS OF THE WORLD IS CONCERNED I OR RATHER WE HAVE BEEN PUT DOWN BY BRUTE FORCE +8455-210777-0037-1009: YOU HAVE COME TO US THREATENING US WITH ABSOLUTE DESTRUCTION +8455-210777-0038-1010: THEREFORE I FEEL MYSELF QUITE ABLE AS PRESIDENT OF THIS REPUBLIC TO RECEIVE YOU WITH A COURTESY DUE TO THE SERVANTS OF A FRIENDLY ALLY +8455-210777-0039-1011: I CAN ASSURE YOU HE HAS NOT EVEN ALLOWED ME TO SEE THE TRIGGER SINCE I HAVE BEEN ON BOARD +8455-210777-0040-1012: THEN SAID SIR FERDINANDO THERE IS NOTHING FOR IT BUT THAT (HE->WE) MUST TAKE YOU WITH HIM +8455-210777-0041-1013: THERE CAME UPON ME A SUDDEN SHOCK WHEN I HEARD THESE WORDS WHICH EXCEEDED ANYTHING WHICH I HAD YET FELT +8455-210777-0042-1014: YOU HEAR WHAT SIR FERDINANDO BROWN HAS SAID REPLIED CAPTAIN (BATTLEAX->BATTLE AXE) +8455-210777-0043-1015: BUT WHAT IS THE DELICATE MISSION I ASKED +8455-210777-0044-1016: I WAS TO BE TAKEN AWAY AND CARRIED TO ENGLAND OR ELSEWHERE OR DROWNED UPON THE VOYAGE IT MATTERED NOT WHICH +8455-210777-0045-1017: THEN THE REPUBLIC OF (BRITANNULA->BRITAIN NULA) WAS TO BE DECLARED AS NON EXISTENT AND THE BRITISH FLAG WAS TO BE EXALTED AND A BRITISH GOVERNOR INSTALLED IN THE EXECUTIVE CHAMBERS +8455-210777-0046-1018: YOU MAY BE QUITE SURE (IT'S->TO) THERE SAID CAPTAIN (BATTLEAX->BATTLE AXE) AND THAT I CAN SO USE IT AS TO HALF OBLITERATE YOUR TOWN WITHIN TWO MINUTES OF MY RETURN ON BOARD +8455-210777-0047-1019: YOU PROPOSE TO KIDNAP ME I SAID +8455-210777-0048-1020: WHAT (WOULD->WILL) BECOME OF YOUR GUN WERE I TO KIDNAP YOU +8455-210777-0049-1021: LIEUTENANT (CROSSTREES->CROSS TREES) IS A VERY GALLANT OFFICER +8455-210777-0050-1022: ONE OF US ALWAYS REMAINS ON BOARD WHILE THE OTHER IS ON SHORE +8455-210777-0051-1023: WHAT WORLD WIDE INIQUITY SUCH A SPEECH AS THAT DISCLOSES SAID I STILL TURNING MYSELF TO THE CAPTAIN FOR THOUGH I WOULD HAVE CRUSHED THEM BOTH BY MY WORDS HAD IT BEEN POSSIBLE MY DISLIKE (CENTRED->SENATE) ITSELF ON SIR FERDINANDO +8455-210777-0052-1024: YOU WILL ALLOW ME TO SUGGEST SAID HE THAT THAT IS A MATTER OF OPINION +8455-210777-0053-1025: WERE I TO COMPLY WITH YOUR ORDERS WITHOUT EXPRESSING MY OWN OPINION I SHOULD SEEM TO HAVE DONE SO WILLINGLY HEREAFTER +8455-210777-0054-1026: THE LETTER RAN AS FOLLOWS +8455-210777-0055-1027: SIR I HAVE IT IN COMMAND TO INFORM YOUR EXCELLENCY THAT YOU HAVE BEEN APPOINTED GOVERNOR OF THE CROWN COLONY WHICH IS CALLED (BRITANNULA->BRITAIN NULA) +8455-210777-0056-1028: THE PECULIAR CIRCUMSTANCES OF THE COLONY ARE WITHIN YOUR EXCELLENCY'S KNOWLEDGE +8455-210777-0057-1029: BUT IN THEIR SELECTION OF A CONSTITUTION THE (BRITANNULISTS->BRITAIN ULYSTS) HAVE UNFORTUNATELY ALLOWED THEMSELVES BUT ONE (DELIBERATIVE->DELIBERATE) ASSEMBLY AND HENCE (HAVE->HAS) SPRUNG THEIR PRESENT DIFFICULTIES +8455-210777-0058-1030: IT IS FOUNDED ON THE ACKNOWLEDGED WEAKNESS OF THOSE WHO SURVIVE THAT PERIOD OF LIFE AT WHICH MEN CEASE TO WORK +8455-210777-0059-1031: BUT IT IS SURMISED THAT YOU WILL FIND DIFFICULTIES IN THE WAY OF YOUR ENTERING AT ONCE UPON YOUR (GOVERNMENT->GOVERNOR) +8455-210777-0060-1032: THE JOHN BRIGHT IS (ARMED->ARM) WITH A WEAPON OF GREAT POWER AGAINST WHICH IT IS IMPOSSIBLE THAT THE PEOPLE OF (BRITANNULA->BRITAIN EULO) SHOULD PREVAIL +8455-210777-0061-1033: YOU WILL CARRY OUT WITH YOU ONE HUNDRED MEN OF THE NORTH (NORTH WEST->NORTHWEST) BIRMINGHAM REGIMENT WHICH WILL PROBABLY SUFFICE FOR YOUR OWN SECURITY AS IT IS THOUGHT THAT IF MISTER (NEVERBEND->NEVERBEIN) BE WITHDRAWN THE PEOPLE WILL REVERT EASILY TO THEIR OLD HABITS OF OBEDIENCE +8455-210777-0062-1034: WHEN DO YOU INTEND THAT THE JOHN BRIGHT SHALL START +8455-210777-0063-1035: TO DAY I SHOUTED +8455-210777-0064-1036: AND I HAVE NO ONE READY TO WHOM I CAN GIVE UP THE ARCHIVES OF THE GOVERNMENT +8455-210777-0065-1037: I SHALL BE HAPPY TO TAKE CHARGE OF THEM SAID SIR FERDINANDO +8455-210777-0066-1038: THEY OF COURSE MUST ALL BE ALTERED +8455-210777-0067-1039: OR OF THE HABITS OF OUR PEOPLE IT IS QUITE IMPOSSIBLE +8455-210777-0068-1040: YOUR POWER IS SUFFICIENT I SAID +8455-210777-0069-1041: IF YOU WILL GIVE US YOUR PROMISE TO MEET CAPTAIN (BATTLEAX->ADELAX) HERE AT THIS TIME TO MORROW WE WILL STRETCH A POINT AND DELAY THE DEPARTURE OF THE JOHN BRIGHT FOR TWENTY FOUR HOURS +8455-210777-0070-1042: AND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTRACT FROM ME A PROMISE THAT I WOULD DEPART IN PEACE +8463-287645-0000-543: THIS WAS WHAT DID THE MISCHIEF SO FAR AS THE RUNNING AWAY WAS CONCERNED +8463-287645-0001-544: IT IS HARDLY NECESSARY TO SAY MORE OF THEM HERE +8463-287645-0002-545: FROM THE MANNER IN WHICH (HE->SHE) EXPRESSED HIMSELF WITH REGARD TO ROBERT (HOLLAN->HOLLAND) NO MAN IN THE WHOLE RANGE OF HIS RECOLLECTIONS WILL BE LONGER REMEMBERED THAN HE HIS (ENTHRALMENT->ENTHRALIMENT) WHILE UNDER (HOLLAN->HOLLAND) WILL HARDLY EVER BE FORGOTTEN +8463-287645-0003-546: OF THIS PARTY EDWARD A BOY OF SEVENTEEN CALLED FORTH MUCH SYMPATHY HE TOO WAS CLAIMED BY (HOLLAN->HOLLAND) +8463-287645-0004-547: JOHN WESLEY COMBASH JACOB TAYLOR AND THOMAS EDWARD SKINNER +8463-287645-0005-548: A FEW YEARS BACK ONE OF THEIR SLAVES A COACHMAN WAS KEPT ON THE COACH BOX ONE (COLD->CALLED) NIGHT WHEN THEY WERE OUT AT A BALL UNTIL HE BECAME ALMOST FROZEN TO DEATH IN FACT HE DID DIE IN THE INFIRMARY FROM THE EFFECTS OF THE FROST ABOUT ONE WEEK AFTERWARDS +8463-287645-0006-549: THE DOCTOR WHO ATTENDED THE INJURED CREATURE IN THIS CASE WAS SIMPLY TOLD THAT SHE SLIPPED AND FELL DOWN (*->THE) STAIRS AS SHE WAS COMING DOWN +8463-287645-0007-550: ANOTHER CASE SAID JOHN WESLEY WAS A LITTLE GIRL HALF GROWN WHO WAS WASHING WINDOWS (UP STAIRS->UPSTAIRS) ONE DAY AND UNLUCKILY FELL ASLEEP IN THE WINDOW AND IN THIS POSITION WAS FOUND BY HER MISTRESS IN A RAGE THE MISTRESS (HIT->HID) HER A HEAVY SLAP KNOCKED HER OUT OF THE WINDOW AND SHE FELL TO THE PAVEMENT AND DIED IN A FEW HOURS FROM THE EFFECTS THEREOF +8463-287645-0008-551: AS USUAL NOTHING WAS DONE IN THE WAY OF PUNISHMENT +8463-287645-0009-552: I NEVER KNEW OF BUT ONE MAN WHO COULD EVER PLEASE HIM +8463-287645-0010-553: HE WORKED ME VERY HARD HE WANTED TO BE BEATING ME ALL THE TIME +8463-287645-0011-554: SHE WAS A LARGE HOMELY WOMAN THEY WERE COMMON WHITE PEOPLE WITH NO REPUTATION IN THE COMMUNITY +8463-287645-0012-555: SUBSTANTIALLY THIS WAS JACOB'S UNVARNISHED DESCRIPTION OF HIS MASTER AND MISTRESS +8463-287645-0013-556: AS TO HIS AGE AND ALSO THE NAME OF HIS MASTER JACOB'S STATEMENT VARIED SOMEWHAT FROM THE ADVERTISEMENT +8463-287645-0014-557: OF STARTING I DIDN'T KNOW THE WAY TO COME +8463-294825-0000-558: IT'S ALMOST BEYOND CONJECTURE +8463-294825-0001-559: THIS REALITY BEGINS TO EXPLAIN THE DARK POWER AND (OTHERWORLDLY->OTHER WORLDLY) FASCINATION OF TWENTY THOUSAND LEAGUES UNDER THE SEAS +8463-294825-0002-560: FIRST AS A PARIS STOCKBROKER LATER AS A CELEBRATED AUTHOR AND YACHTSMAN HE WENT ON FREQUENT VOYAGES TO BRITAIN AMERICA THE MEDITERRANEAN +8463-294825-0003-561: NEMO BUILDS A FABULOUS (FUTURISTIC->FUTURE STICK) SUBMARINE THE NAUTILUS THEN CONDUCTS AN UNDERWATER CAMPAIGN OF VENGEANCE AGAINST HIS IMPERIALIST OPPRESSOR +8463-294825-0004-562: IN ALL THE NOVEL (HAD->HEAD) A DIFFICULT (GESTATION->JUSTATION) +8463-294825-0005-563: OTHER SUBTLETIES OCCUR INSIDE EACH EPISODE THE TEXTURES SPARKLING WITH WIT INFORMATION AND INSIGHT +8463-294825-0006-564: HIS SPECIFICATIONS FOR AN OPEN SEA SUBMARINE AND A SELF (CONTAINED->CONTAINING) DIVING SUIT WERE DECADES BEFORE THEIR TIME YET MODERN TECHNOLOGY BEARS THEM OUT TRIUMPHANTLY +8463-294825-0007-565: EVEN THE SUPPORTING CAST IS SHREWDLY DRAWN PROFESSOR ARONNAX THE CAREER SCIENTIST CAUGHT IN AN ETHICAL CONFLICT CONSEIL THE COMPULSIVE CLASSIFIER WHO SUPPLIES HUMOROUS TAG LINES FOR VERNE'S FAST FACTS THE HARPOONER NED LAND A CREATURE OF CONSTANT APPETITES MAN AS HEROIC ANIMAL +8463-294825-0008-566: BUT MUCH OF THE (NOVEL'S->NOVELS) BROODING POWER COMES FROM CAPTAIN NEMO +8463-294825-0009-567: THIS COMPULSION LEADS NEMO INTO UGLY CONTRADICTIONS (HE'S->HE IS) A (FIGHTER->FRIGHTER) FOR FREEDOM YET ALL WHO BOARD HIS SHIP (ARE->OR) IMPRISONED THERE FOR GOOD HE WORKS TO SAVE LIVES BOTH HUMAN AND ANIMAL YET HE HIMSELF CREATES A (HOLOCAUST->HOLOCOST) HE DETESTS IMPERIALISM YET HE LAYS PERSONAL CLAIM TO THE SOUTH POLE +8463-294825-0010-568: AND IN THIS LAST ACTION HE FALLS INTO THE CLASSIC SIN OF PRIDE +8463-294825-0011-569: (HE'S->HE IS) SWIFTLY PUNISHED +8463-294825-0012-570: THE NAUTILUS NEARLY PERISHES IN THE ANTARCTIC AND NEMO SINKS INTO A GROWING DEPRESSION +8463-294825-0013-571: FOR MANY THEN THIS BOOK HAS BEEN A SOURCE OF FASCINATION SURELY ONE OF THE MOST INFLUENTIAL NOVELS EVER WRITTEN AN INSPIRATION FOR SUCH SCIENTISTS AND DISCOVERERS AS ENGINEER SIMON LAKE OCEANOGRAPHER WILLIAM (BEEBE->B) POLAR (TRAVELER SIR->TRAVELLERS ARE) ERNEST SHACKLETON +8463-294825-0014-572: FATHOM SIX FEET +8463-294825-0015-573: (GRAM->GRAHAM) ROUGHLY (ONE->WON) TWENTY EIGHTH OF AN OUNCE +8463-294825-0016-574: (MILLIGRAM->MILAGRAM) ROUGHLY ONE TWENTY EIGHT THOUSAND OF AN OUNCE +8463-294825-0017-575: (LITER->LATER) ROUGHLY (ONE QUART->WON COURT) +8463-294825-0018-576: METER ROUGHLY ONE YARD THREE INCHES +8463-294825-0019-577: (MILLIMETER->MILLIMETRE) ROUGHLY ONE TWENTY FIFTH OF AN INCH +8463-294828-0000-578: CHAPTER THREE AS MASTER WISHES +8463-294828-0001-579: THREE SECONDS BEFORE THE ARRIVAL OF J B HOBSON'S LETTER I (NO->KNOW) MORE DREAMED OF CHASING THE UNICORN THAN OF TRYING FOR THE NORTHWEST PASSAGE +8463-294828-0002-580: EVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JOURNEY EXHAUSTED AND BADLY NEEDING A REST +8463-294828-0003-581: I WANTED NOTHING MORE THAN TO SEE MY COUNTRY AGAIN MY FRIENDS MY MODEST QUARTERS BY THE (BOTANICAL->BATTANICAL) GARDENS MY DEARLY BELOVED COLLECTIONS +8463-294828-0004-582: BUT NOW NOTHING COULD HOLD ME BACK +8463-294828-0005-583: CONSEIL WAS MY MANSERVANT +8463-294828-0006-584: FROM RUBBING SHOULDERS WITH SCIENTISTS IN OUR LITTLE UNIVERSE BY THE BOTANICAL GARDENS THE BOY HAD COME TO KNOW A THING OR TWO +8463-294828-0007-585: CLASSIFYING WAS EVERYTHING TO HIM SO HE KNEW NOTHING ELSE WELL VERSED IN (THE->A) THEORY OF CLASSIFICATION HE WAS POORLY VERSED IN ITS PRACTICAL APPLICATION AND I DOUBT THAT HE COULD TELL A SPERM WHALE FROM A BALEEN WHALE +8463-294828-0008-586: AND YET WHAT A FINE GALLANT LAD +8463-294828-0009-587: NOT ONCE DID HE COMMENT ON THE LENGTH OR THE HARDSHIPS OF (A->THE) JOURNEY +8463-294828-0010-588: NEVER DID HE OBJECT TO BUCKLING UP HIS (SUITCASE->SUIT CASE) FOR ANY COUNTRY WHATEVER CHINA OR THE CONGO NO MATTER HOW FAR OFF IT WAS +8463-294828-0011-589: HE WENT HERE THERE AND EVERYWHERE IN PERFECT CONTENTMENT +8463-294828-0012-590: PLEASE FORGIVE ME FOR THIS UNDERHANDED WAY OF ADMITTING (*->THAT) I HAD TURNED FORTY +8463-294828-0013-591: HE WAS A FANATIC ON FORMALITY AND HE ONLY ADDRESSED ME IN THE THIRD PERSON TO THE POINT WHERE IT GOT (TIRESOME->TO HIRESUME) +8463-294828-0014-592: THERE WAS GOOD REASON TO STOP AND THINK EVEN FOR THE WORLD'S MOST EMOTIONLESS MAN +8463-294828-0015-593: CONSEIL I CALLED A THIRD (TIME->TON) CONSEIL APPEARED +8463-294828-0016-594: (DID->DEAD) MASTER (SUMMON->SUMMONED) ME HE SAID ENTERING +8463-294828-0017-595: PACK AS MUCH INTO MY TRUNK AS YOU CAN MY (TRAVELING->TRAVELLING) KIT MY SUITS SHIRTS AND SOCKS DON'T BOTHER COUNTING JUST SQUEEZE IT ALL IN AND HURRY +8463-294828-0018-596: WE'LL DEAL WITH THEM LATER WHAT +8463-294828-0019-597: ANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WHOLE MENAGERIE TO FRANCE +8463-294828-0020-598: YES WE ARE CERTAINLY I REPLIED EVASIVELY BUT AFTER WE MAKE A DETOUR +8463-294828-0021-599: A ROUTE SLIGHTLY LESS DIRECT THAT'S ALL +8463-294828-0022-600: WE'RE LEAVING ON THE ABRAHAM LINCOLN +8463-294828-0023-601: YOU SEE MY FRIEND IT'S AN ISSUE OF THE MONSTER THE NOTORIOUS NARWHALE +8463-294828-0024-602: WE DON'T KNOW WHERE IT WILL TAKE US +8463-294828-0025-603: BUT (WE'RE->WERE) GOING JUST THE SAME +8463-294828-0026-604: WE HAVE A COMMANDER (WHO'S->WHOSE) GAME FOR ANYTHING +8463-294828-0027-605: I LEFT INSTRUCTIONS FOR SHIPPING MY CONTAINERS OF STUFFED ANIMALS AND DRIED PLANTS TO PARIS FRANCE +8463-294828-0028-606: I OPENED A LINE OF CREDIT SUFFICIENT TO COVER THE (BABIRUSA->BABRUSA) AND CONSEIL AT MY HEELS I JUMPED INTO A CARRIAGE +8463-294828-0029-607: OUR BAGGAGE WAS IMMEDIATELY CARRIED TO THE DECK OF THE FRIGATE I RUSHED ABOARD +8463-294828-0030-608: I ASKED FOR COMMANDER (FARRAGUT->FERRAGUT) +8463-294828-0031-609: ONE OF THE SAILORS LED ME TO THE (AFTERDECK->AFTER DECK) WHERE I STOOD IN THE PRESENCE OF A SMART LOOKING OFFICER WHO EXTENDED HIS HAND TO ME +8463-294828-0032-610: IN PERSON WELCOME ABOARD PROFESSOR YOUR CABIN IS WAITING FOR YOU +8463-294828-0033-611: I WAS WELL SATISFIED WITH MY CABIN WHICH WAS LOCATED IN THE STERN AND OPENED INTO THE (OFFICERS MESS->OFFICER'S MASTS) +8463-294828-0034-612: (WE'LL->WILL) BE QUITE COMFORTABLE HERE I TOLD CONSEIL +8463-294828-0035-613: AND SO IF (I'D->I HAD) BEEN DELAYED BY A QUARTER OF AN HOUR OR EVEN LESS THE FRIGATE WOULD HAVE GONE WITHOUT ME AND I WOULD HAVE MISSED OUT ON THIS UNEARTHLY EXTRAORDINARY AND INCONCEIVABLE EXPEDITION WHOSE TRUE STORY MIGHT WELL MEET WITH SOME SKEPTICISM +8463-294828-0036-614: THE WHARVES OF BROOKLYN AND EVERY PART OF NEW YORK BORDERING THE EAST RIVER WERE CROWDED WITH CURIOSITY SEEKERS +8463-294828-0037-615: DEPARTING FROM FIVE HUNDRED THOUSAND THROATS THREE CHEERS BURST FORTH IN SUCCESSION +8463-294828-0038-616: THOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE THESE TIGHTLY PACKED MASSES HAILING THE ABRAHAM LINCOLN UNTIL IT REACHED THE WATERS OF THE HUDSON RIVER AT THE TIP OF THE LONG PENINSULA THAT FORMS NEW YORK CITY +8555-284447-0000-2299: THEN HE RUSHED (DOWN STAIRS->DOWNSTAIRS) INTO THE COURTYARD SHOUTING LOUDLY FOR HIS SOLDIERS AND THREATENING TO PATCH EVERYBODY IN HIS DOMINIONS (IF->AT) THE SAILORMAN WAS NOT RECAPTURED +8555-284447-0001-2300: HOLD HIM FAST MY MEN AND AS SOON AS (I'VE->I) HAD MY COFFEE (AND->AN) OATMEAL (I'LL->I WILL) TAKE HIM TO THE ROOM OF THE GREAT KNIFE AND (PATCH->PAT) HIM +8555-284447-0002-2301: I WOULDN'T MIND A CUP (O->OF) COFFEE MYSELF SAID CAP'N BILL (I'VE->I HAVE) HAD (CONSID'BLE->CONSIDERABLE) EXERCISE THIS MORNIN (AND->AN) I'M (ALL READY->ALREADY) FOR (BREAKFAS->BREAKFAST) +8555-284447-0003-2302: BUT CAP'N BILL MADE NO SUCH ATTEMPT KNOWING IT WOULD BE USELESS +8555-284447-0004-2303: AS SOON AS THEY ENTERED THE ROOM OF THE GREAT KNIFE THE BOOLOOROO GAVE A YELL OF DISAPPOINTMENT +8555-284447-0005-2304: THE ROOM OF THE GREAT KNIFE WAS HIGH AND BIG AND AROUND IT RAN ROWS OF BENCHES FOR THE SPECTATORS TO SIT UPON +8555-284447-0006-2305: IN ONE PLACE AT THE HEAD OF THE ROOM WAS A RAISED PLATFORM FOR THE ROYAL FAMILY WITH ELEGANT (THRONE->THROWN) CHAIRS FOR THE KING AND QUEEN AND SIX SMALLER BUT RICHLY UPHOLSTERED CHAIRS (FOR THE SNUBNOSED->WITH A SNUB NOSED) PRINCESSES +8555-284447-0007-2306: (THEREFORE->THEY ARE FOR) HER MAJESTY PAID NO ATTENTION TO (ANYONE->ANY ONE) AND NO ONE PAID ANY ATTENTION TO HER +8555-284447-0008-2307: RICH JEWELS OF (BLUE STONES->BLUESTS) GLITTERED UPON THEIR PERSONS AND THE ROYAL LADIES WERE FULLY AS GORGEOUS AS THEY WERE HAUGHTY AND OVERBEARING +8555-284447-0009-2308: (MORNIN->MORNING) GIRLS (HOPE YE FEEL->OPIEVIL) AS WELL AS (YE->YOU) LOOK +8555-284447-0010-2309: (CONTROL->CONTROLL) YOURSELVES MY DEARS REPLIED THE BOOLOOROO THE WORST PUNISHMENT I KNOW HOW TO INFLICT ON (ANYONE->ANY ONE) THIS PRISONER IS ABOUT TO SUFFER (YOU'LL->YOU WILL) SEE A VERY PRETTY PATCHING MY ROYAL DAUGHTERS +8555-284447-0011-2310: SUPPOSE IT'S (A FRIEND->OF BRAND) +8555-284447-0012-2311: THE CAPTAIN SHOOK HIS HEAD +8555-284447-0013-2312: WHY YOU (SAID->SIT) TO FETCH THE FIRST LIVING CREATURE WE MET AND THAT WAS (THIS BILLYGOAT->THE SPILLIGOAT) REPLIED THE CAPTAIN PANTING HARD AS HE HELD FAST TO ONE OF THE GOAT'S HORNS +8555-284447-0014-2313: THE IDEA OF PATCHING CAP'N BILL TO A GOAT WAS VASTLY AMUSING TO HIM AND THE MORE HE THOUGHT OF IT THE MORE HE ROARED WITH LAUGHTER +8555-284447-0015-2314: THEY LOOK SOMETHING ALIKE YOU KNOW SUGGESTED THE CAPTAIN OF THE GUARDS LOOKING FROM ONE TO THE OTHER DOUBTFULLY AND (THEY'RE->THEY) NEARLY THE SAME SIZE IF YOU STAND THE (GOAT->BOAT) ON HIS HIND LEGS THEY'VE BOTH GOT THE SAME STYLE OF WHISKERS AND THEY'RE BOTH OF (EM->THEM) OBSTINATE AND DANGEROUS SO THEY OUGHT TO MAKE A GOOD PATCH SPLENDID +8555-284447-0016-2315: FINE GLORIOUS +8555-284447-0017-2316: WHEN THIS HAD BEEN ACCOMPLISHED THE BOOLOOROO LEANED OVER TO TRY TO DISCOVER WHY THE FRAME ROLLED AWAY SEEMINGLY OF ITS OWN ACCORD AND HE WAS THE MORE PUZZLED BECAUSE IT HAD NEVER DONE SUCH A THING BEFORE +8555-284447-0018-2317: AT ONCE THE GOAT GAVE A LEAP ESCAPED FROM THE SOLDIERS AND WITH BOWED HEAD RUSHED UPON THE BOOLOOROO +8555-284447-0019-2318: BEFORE ANY COULD STOP HIM HE (BUTTED->BUDDED) HIS MAJESTY SO FURIOUSLY THAT THE (KING->KING'S) SOARED FAR INTO THE AIR AND TUMBLED IN A HEAP AMONG THE BENCHES WHERE HE LAY MOANING AND GROANING +8555-284447-0020-2319: THE (GOAT'S WARLIKE->GOATS WORE LIKE) SPIRIT WAS ROUSED BY THIS SUCCESSFUL ATTACK +8555-284447-0021-2320: THEN THEY SPED IN GREAT HASTE FOR THE DOOR AND THE GOAT GAVE A FINAL BUTT THAT SENT THE ROW OF ROYAL LADIES ALL DIVING INTO THE CORRIDOR IN ANOTHER TANGLE WHEREUPON THEY SHRIEKED IN A MANNER THAT TERRIFIED (EVERYONE->EVERY ONE) WITHIN SOUND OF THEIR VOICES +8555-284447-0022-2321: I HAD A NOTION IT WAS YOU (MATE AS SAVED->MADE TO SEE) ME FROM THE KNIFE +8555-284447-0023-2322: I COULDN'T SHIVER MUCH (BEIN->BEING) BOUND SO TIGHT BUT WHEN (I'M->I) LOOSE I MEAN TO HAVE (JUS ONE->JUST SWUNG) GOOD SHIVER TO RELIEVE MY (FEELIN'S->FEELINS) +8555-284447-0024-2323: COME AND GET THE BOOLOOROO SHE SAID GOING TOWARD THE BENCHES +8555-284449-0000-2324: SO THEY WERE QUITE WILLING TO OBEY THE ORDERS OF THEIR GIRL QUEEN AND IN A SHORT TIME THE (BLASTS->BLAST) OF TRUMPETS AND ROLL OF DRUMS AND CLASHING OF CYMBALS TOLD TROT AND CAP'N BILL THAT THE BLUE BANDS HAD (ASSEMBLED->A SIMPLED) BEFORE THE PALACE +8555-284449-0001-2325: THEN THEY ALL MARCHED OUT A LITTLE WAY INTO THE FIELDS AND FOUND THAT THE ARMY OF PINKIES HAD ALREADY FORMED AND WAS ADVANCING STEADILY TOWARD THEM +8555-284449-0002-2326: AT THE HEAD OF THE PINKIES WERE GHIP GHISIZZLE AND BUTTON BRIGHT WHO HAD THE PARROT ON HIS SHOULDER AND THEY WERE SUPPORTED BY CAPTAIN (CORALIE->CORLIE) AND CAPTAIN (TINTINT->TINTANT) AND ROSALIE THE WITCH +8555-284449-0003-2327: WHEN THE (BLUESKINS->BLUESKIN) SAW GHIP GHISIZZLE THEY RAISED ANOTHER GREAT SHOUT FOR HE WAS THE FAVORITE OF THE SOLDIERS AND VERY POPULAR WITH ALL THE PEOPLE +8555-284449-0004-2328: SINCE LAST THURSDAY I (GHIP->GIP) GHISIZZLE HAVE BEEN THE LAWFUL BOOLOOROO OF THE BLUE COUNTRY BUT NOW THAT YOU ARE CONQUERED BY QUEEN TROT I SUPPOSE I AM CONQUERED TOO AND YOU HAVE NO BOOLOOROO AT ALL +8555-284449-0005-2329: WHEN HE FINISHED SHE SAID CHEERFULLY +8555-284449-0006-2330: DON'T WORRY SIZZLE DEAR IT'LL ALL COME RIGHT PRETTY SOON +8555-284449-0007-2331: NOW THEN LET'S ENTER THE CITY (AN->AND) ENJOY THE (GRAND->GREAT) FEAST (THAT'S->AT) BEING COOKED I'M NEARLY (STARVED->STORM) MYSELF FOR THIS (CONQUERIN KINGDOMS->CONQUERING KINGDOM'S) IS HARD WORK +8555-284449-0008-2332: THEN SHE GAVE ROSALIE BACK HER MAGIC RING THANKING THE KIND WITCH FOR ALL SHE HAD DONE FOR THEM +8555-284449-0009-2333: YOU ARE (*->A) MATE REPLIED THE SAILOR +8555-284449-0010-2334: IT WILL BE SUCH A SATISFACTION +8555-284449-0011-2335: THE GUARDS HAD A TERRIBLE STRUGGLE WITH THE GOAT WHICH WAS LOOSE IN THE ROOM AND STILL WANTED TO FIGHT BUT FINALLY THEY SUBDUED THE ANIMAL AND THEN THEY TOOK THE BOOLOOROO OUT OF THE FRAME HE WAS TIED IN AND BROUGHT BOTH HIM AND THE GOAT BEFORE QUEEN TROT WHO AWAITED THEM IN THE THRONE ROOM OF THE PALACE +8555-284449-0012-2336: I'LL GLADLY DO THAT PROMISED THE NEW BOOLOOROO AND I'LL FEED THE (HONORABLE GOAT->HONED) ALL THE SHAVINGS AND LEATHER AND TIN CANS HE CAN EAT BESIDES THE GRASS +8555-284449-0013-2337: (SCUSE->EXCUSE) ME SAID (TROT->SHOT) I NEGLECTED TO TELL YOU THAT YOU'RE NOT THE BOOLOOROO ANY MORE +8555-284449-0014-2338: THE FORMER BOOLOOROO GROANED +8555-284449-0015-2339: (I'LL NOT->HOW NOW) BE WICKED ANY MORE SIGHED THE OLD BOOLOOROO I'LL REFORM +8555-284449-0016-2340: AS A PRIVATE CITIZEN I SHALL BE A MODEL OF DEPORTMENT BECAUSE IT WOULD BE DANGEROUS TO BE OTHERWISE +8555-284449-0017-2341: WHEN FIRST THEY ENTERED THE THRONE ROOM THEY TRIED TO BE AS HAUGHTY AND SCORNFUL AS EVER BUT THE BLUES WHO WERE ASSEMBLED THERE ALL LAUGHED AT THEM AND JEERED THEM FOR THERE WAS NOT A SINGLE PERSON IN ALL THE BLUE COUNTRY WHO LOVED THE PRINCESSES THE LEAST LITTLE BIT +8555-284449-0018-2342: SO GHIP GHISIZZLE ORDERED THE CAPTAIN TO TAKE A FILE OF SOLDIERS AND ESCORT THE RAVING BEAUTIES TO THEIR NEW HOME +8555-284449-0019-2343: THAT EVENING TROT GAVE A GRAND BALL IN THE PALACE TO WHICH THE MOST IMPORTANT OF THE PINKIES AND THE BLUESKINS WERE INVITED +8555-284449-0020-2344: THE COMBINED BANDS OF BOTH THE COUNTRIES PLAYED THE MUSIC AND A FINE SUPPER WAS SERVED +8555-292519-0000-2283: BRIGHTER THAN EARLY (DAWN'S->DAWNS) MOST BRILLIANT DYE ARE BLOWN CLEAR BANDS OF COLOR THROUGH THE SKY THAT SWIRL AND SWEEP AND MEET TO BREAK AND FOAM LIKE RAINBOW VEILS UPON A BUBBLE'S DOME +8555-292519-0001-2284: GUIDED BY YOU HOW WE MIGHT STROLL TOWARDS DEATH OUR ONLY MUSIC ONE ANOTHER'S BREATH THROUGH (GARDENS->GARDEN'S) INTIMATE WITH HOLLYHOCKS WHERE (*->A) SILENT POPPIES BURN BETWEEN THE ROCKS BY POOLS WHERE BIRCHES BEND TO CONFIDANTS ABOVE GREEN WATERS SCUMMED WITH (*->THE) LILY PLANTS +8555-292519-0002-2285: VENICE +8555-292519-0003-2286: IN A SUNSET GLOWING OF CRIMSON AND GOLD SHE LIES THE GLORY OF THE WORLD A (BEACHED->BEECHED) KING'S GALLEY (WHOSE->WHO) SAILS ARE FURLED WHO IS HUNG WITH TAPESTRIES RICH AND OLD +8555-292519-0004-2287: THE PITY THAT WE MUST COME AND GO +8555-292519-0005-2288: WHILE THE OLD GOLD AND THE MARBLE STAYS FOREVER GLEAMING ITS SOFT STRONG BLAZE CALM IN THE EARLY EVENING GLOW +8555-292519-0006-2289: THE PLEASANT GRAVEYARD OF MY SOUL WITH SENTIMENTAL CYPRESS TREES AND FLOWERS IS FILLED THAT I MAY STROLL IN MEDITATION AT MY EASE +8555-292519-0007-2290: IT IS MY HEART HUNG IN THE SKY AND NO CLOUDS EVER FLOAT BETWEEN THE GRAVE FLOWERS AND MY HEART ON HIGH +8555-292519-0008-2291: OVER THE TRACK LINED CITY STREET THE YOUNG (MEN->MAN) THE GRINNING (MEN->MAN) PASS +8555-292519-0009-2292: HO YE SAILS THAT SEEM TO WANDER IN DREAM FILLED MEADOWS SAY IS THE SHORE WHERE I STAND THE ONLY FIELD OF STRUGGLE OR ARE YE HIT AND BATTERED OUT THERE BY WAVES AND WIND GUSTS AS (YE->HE) TACK OVER A CLASHING SEA OF WATERY ECHOES +8555-292519-0010-2293: OLD DANCES ARE SIMPLIFIED OF THEIR YEARNING BLEACHED BY TIME +8555-292519-0011-2294: HE HAD GOT INTO HER COURTYARD +8555-292519-0012-2295: THROUGH THE BLACK NIGHT RAIN HE SANG TO HER WINDOW BARS +8555-292519-0013-2296: THAT WAS BUT RUSTLING OF (DRIPPING->TRIPPING) PLANTS IN THE DARK +8555-292519-0014-2297: SHE WAS ALONE THAT NIGHT +8555-292519-0015-2298: HE HAD BROKEN INTO HER COURTYARD +908-157963-0000-1321: TO FADE AWAY LIKE MORNING BEAUTY FROM HER MORTAL DAY DOWN BY THE RIVER OF (ADONA->ADONNA) HER SOFT VOICE IS HEARD AND THUS HER GENTLE LAMENTATION FALLS LIKE MORNING DEW +908-157963-0001-1322: (O->OH) LIFE OF THIS OUR SPRING +908-157963-0002-1323: WHY FADES THE LOTUS OF THE WATER +908-157963-0003-1324: WHY FADE THESE CHILDREN OF THE SPRING +908-157963-0004-1325: (THEL->FELL) IS LIKE A WATRY BOW AND LIKE A PARTING CLOUD LIKE A REFLECTION IN A GLASS LIKE SHADOWS IN THE WATER LIKE DREAMS OF INFANTS LIKE A SMILE UPON AN (INFANTS->INFANT'S) FACE +908-157963-0005-1326: LIKE THE (DOVES VOICE->DOVE'S BOYS) LIKE TRANSIENT DAY LIKE MUSIC IN THE AIR AH +908-157963-0006-1327: AND GENTLE SLEEP THE SLEEP OF DEATH AND GENTLY HEAR THE VOICE OF HIM THAT WALKETH IN THE GARDEN IN THE EVENING TIME +908-157963-0007-1328: THE (LILLY->LILY) OF THE VALLEY BREATHING IN THE HUMBLE GRASS (ANSWERD->ANSWERED) THE LOVELY (MAID AND->MAIDEN) SAID I AM A (WATRY->WATRIE) WEED AND I AM VERY SMALL AND LOVE TO DWELL IN LOWLY VALES SO WEAK THE GILDED BUTTERFLY SCARCE (PERCHES->PURCHASE) ON MY HEAD YET I AM VISITED FROM HEAVEN AND HE THAT SMILES ON ALL WALKS IN THE VALLEY AND EACH MORN OVER ME SPREADS HIS HAND SAYING REJOICE THOU HUMBLE GRASS THOU (NEW BORN->NEWBORN) LILY FLOWER +908-157963-0008-1329: THOU GENTLE MAID OF SILENT VALLEYS AND OF MODEST BROOKS FOR THOU (SHALL->SHALT) BE CLOTHED IN LIGHT AND FED WITH MORNING MANNA TILL (SUMMERS->SUMMER'S) HEAT MELTS THEE BESIDE THE FOUNTAINS AND THE SPRINGS TO FLOURISH IN ETERNAL VALES THEY WHY (SHOULD THEL->SHOULDST THOU) COMPLAIN +908-157963-0009-1330: WHY SHOULD THE MISTRESS OF THE (VALES->VEILS) OF HAR UTTER A SIGH +908-157963-0010-1331: SHE (CEASD->CEASED) AND (SMILD->SMILED) IN TEARS THEN SAT DOWN IN HER SILVER SHRINE +908-157963-0011-1332: WHICH THOU DOST SCATTER ON EVERY LITTLE BLADE OF GRASS THAT SPRINGS REVIVES THE MILKED COW AND TAMES THE FIRE BREATHING STEED +908-157963-0012-1333: BUT (THEL->THOUGH) IS LIKE A FAINT CLOUD KINDLED AT THE RISING SUN I VANISH FROM MY PEARLY THRONE AND WHO SHALL FIND MY PLACE +908-157963-0013-1334: AND WHY IT SCATTERS ITS BRIGHT BEAUTY (THRO->THROUGH) THE (HUMID->HUMAN) AIR +908-157963-0014-1335: DESCEND (O->A) LITTLE CLOUD AND HOVER BEFORE THE EYES OF (THEL->FELL) +908-157963-0015-1336: O LITTLE CLOUD THE VIRGIN SAID I CHARGE THEE TO TELL ME WHY THOU COMPLAINEST NOW WHEN IN ONE HOUR THOU FADE AWAY THEN WE SHALL SEEK THEE BUT NOT FIND AH (THEL->FELL) IS LIKE TO THEE +908-157963-0016-1337: I PASS AWAY YET I COMPLAIN AND NO ONE HEARS MY VOICE +908-157963-0017-1338: THE CLOUD THEN (SHEWD->SHOWED) HIS GOLDEN HEAD AND HIS BRIGHT FORM (EMERG'D->EMERGED) +908-157963-0018-1339: AND FEAREST THOU BECAUSE I VANISH AND AM SEEN NO MORE +908-157963-0019-1340: IT IS TO TENFOLD LIFE TO LOVE TO PEACE AND RAPTURES (HOLY->WHOLLY) UNSEEN DESCENDING WEIGH MY LIGHT WINGS UPON BALMY FLOWERS AND COURT THE FAIR EYED (DEW->DO) TO TAKE ME TO HER SHINING TENT THE WEEPING VIRGIN TREMBLING KNEELS BEFORE THE RISEN SUN +908-157963-0020-1341: TILL WE ARISE (LINK'D->LINKED) IN A GOLDEN BAND AND NEVER PART BUT WALK UNITED BEARING FOOD TO ALL OUR TENDER FLOWERS +908-157963-0021-1342: LIVES NOT ALONE NOR (OR->OF) ITSELF FEAR NOT AND I WILL CALL THE WEAK WORM FROM ITS LOWLY BED AND THOU SHALT HEAR ITS VOICE +908-157963-0022-1343: COME FORTH WORM AND THE SILENT VALLEY TO THY PENSIVE QUEEN +908-157963-0023-1344: THE HELPLESS WORM AROSE AND SAT UPON THE (LILLYS->LILY'S) LEAF AND THE BRIGHT CLOUD (SAILD->SAILED) ON TO FIND HIS PARTNER IN THE VALE +908-157963-0024-1345: IMAGE OF WEAKNESS ART THOU BUT A WORM +908-157963-0025-1346: I SEE THEY LAY HELPLESS AND NAKED WEEPING AND NONE TO ANSWER NONE TO CHERISH THEE WITH MOTHERS SMILES +908-157963-0026-1347: AND SAYS THOU MOTHER OF MY CHILDREN I HAVE LOVED THEE AND I HAVE GIVEN THEE A CROWN THAT NONE CAN TAKE AWAY +908-157963-0027-1348: AND LAY ME DOWN IN THY COLD BED AND LEAVE MY SHINING LOT +908-157963-0028-1349: OR AN EYE OF GIFTS AND GRACES (SHOWRING->SHOWERING) FRUITS AND COINED GOLD +908-157963-0029-1350: WHY A TONGUE (IMPRESS'D->IMPRESSED) WITH HONEY FROM EVERY WIND +908-157963-0030-1351: WHY AN EAR A WHIRLPOOL FIERCE TO DRAW CREATIONS IN +908-31957-0000-1352: ALL IS SAID WITHOUT A WORD +908-31957-0001-1353: I SIT BENEATH THY LOOKS AS CHILDREN DO IN THE NOON SUN WITH SOULS THAT TREMBLE THROUGH THEIR HAPPY EYELIDS FROM AN UNAVERRED YET (PRODIGAL->CHRONICAL) INWARD JOY +908-31957-0002-1354: I DID NOT WRONG MYSELF SO BUT I PLACED A WRONG ON THEE +908-31957-0003-1355: WHEN CALLED BEFORE I TOLD HOW HASTILY I DROPPED MY FLOWERS OR (BRAKE->BREAK) OFF FROM A GAME +908-31957-0004-1356: SHALL I NEVER MISS HOME TALK AND BLESSING AND THE COMMON KISS THAT COMES TO EACH IN TURN NOR COUNT IT STRANGE WHEN I LOOK UP TO DROP ON A NEW RANGE OF WALLS AND FLOORS ANOTHER HOME THAN THIS +908-31957-0005-1357: ALAS I HAVE GRIEVED SO I AM HARD TO LOVE +908-31957-0006-1358: OPEN THY HEART WIDE AND FOLD WITHIN THE WET WINGS OF THY DOVE +908-31957-0007-1359: COULD IT MEAN TO LAST A LOVE SET PENDULOUS BETWEEN SORROW AND SORROW +908-31957-0008-1360: NAY I RATHER THRILLED DISTRUSTING EVERY LIGHT THAT SEEMED TO GILD THE ONWARD PATH AND (FEARED->FEAR) TO OVERLEAN A FINGER EVEN +908-31957-0009-1361: AND THOUGH I HAVE GROWN SERENE AND STRONG SINCE THEN I THINK THAT GOD HAS WILLED A STILL RENEWABLE FEAR +908-31957-0010-1362: O LOVE O TROTH +908-31957-0011-1363: AND LOVE BE FALSE +908-31957-0012-1364: IF HE TO KEEP ONE OATH MUST LOSE ONE JOY BY HIS LIFE'S STAR FORETOLD +908-31957-0013-1365: SLOW TO WORLD GREETINGS QUICK WITH ITS O LIST WHEN THE (ANGELS->ANGEL) SPEAK +908-31957-0014-1366: A RING OF AMETHYST I COULD NOT WEAR HERE PLAINER TO MY SIGHT THAN THAT FIRST KISS +908-31957-0015-1367: THAT WAS THE CHRISM OF LOVE WHICH (LOVE'S->LOVES) OWN CROWN WITH SANCTIFYING SWEETNESS DID (PRECEDE->PROCEED) THE THIRD UPON MY LIPS WAS FOLDED DOWN (IN PERFECT->IMPERFECT) PURPLE STATE SINCE WHEN INDEED I HAVE BEEN PROUD AND SAID MY LOVE MY OWN +908-31957-0016-1368: DEAREST TEACH ME SO TO POUR OUT GRATITUDE AS THOU DOST GOOD +908-31957-0017-1369: MUSSULMANS AND (GIAOURS->GUYRS) THROW KERCHIEFS AT A SMILE AND HAVE NO RUTH FOR ANY WEEPING +908-31957-0018-1370: BUT THOU ART NOT SUCH A LOVER MY BELOVED +908-31957-0019-1371: THOU CANST WAIT THROUGH SORROW AND SICKNESS TO BRING SOULS TO TOUCH AND THINK IT SOON WHEN OTHERS CRY TOO LATE +908-31957-0020-1372: I (THANK->THINK) ALL WHO HAVE LOVED ME IN THEIR HEARTS WITH THANKS AND LOVE FROM MINE +908-31957-0021-1373: OH TO SHOOT MY SOUL'S FULL MEANING INTO FUTURE YEARS THAT THEY SHOULD LEND IT UTTERANCE AND SALUTE LOVE THAT ENDURES FROM LIFE THAT DISAPPEARS +908-31957-0022-1374: THEN I LONG TRIED BY NATURAL ILLS RECEIVED THE COMFORT FAST WHILE BUDDING AT THY SIGHT MY PILGRIM'S STAFF GAVE OUT GREEN LEAVES WITH MORNING DEWS (IMPEARLED->IMPELLED) +908-31957-0023-1375: I LOVE THEE FREELY AS MEN STRIVE FOR RIGHT I LOVE THEE PURELY AS THEY TURN FROM PRAISE +908-31957-0024-1376: I LOVE THEE WITH THE PASSION PUT TO USE IN MY OLD (GRIEFS->GREEDS) AND WITH MY CHILDHOOD'S FAITH +908-31957-0025-1377: I LOVE THEE WITH A LOVE I SEEMED TO LOSE WITH MY LOST SAINTS I LOVE THEE WITH THE BREATH SMILES TEARS OF ALL MY LIFE AND IF GOD CHOOSE I SHALL BUT LOVE THEE BETTER AFTER DEATH + +SUBSTITUTIONS: count ref -> hyp +29 AND -> IN +15 IN -> AND +14 A -> THE +11 THE -> A +8 AN -> AND +7 THIS -> THE +6 I'VE -> I +6 ANYONE -> ANY +5 SOAMES -> SOLMES +5 SILVIA -> SYLVIA +5 O -> OF +5 I'M -> I +4 TWO -> TOO +4 THEIR -> THERE +4 THAT -> THE +4 OH -> O +4 O -> OH +4 METER -> METRE +4 MEN -> MAN +4 IS -> AS +4 IN -> AN +4 GALATIANS -> GALLATIONS +4 BATTLEAX -> BATTLE +4 ANDERS -> ANDREWS +4 A -> OF +3 WHERE -> WERE +3 WHEN -> ONE +3 VALLIERE -> VALLIERS +3 TOWARDS -> TOWARD +3 TODAY -> TO +3 THEL -> FELL +3 THEATER -> THEATRE +3 THE -> THEIR +3 SOLON -> SOLEMN +3 ROUND -> AROUND +3 RODOLFO -> RUDOLPHO +3 PRACTISE -> PRACTICE +3 OF -> A +3 MAN -> MEN +3 MAINHALL -> MAIN +3 KAFFAR -> KAFFIR +3 IS -> WAS +3 HOLLAN -> HOLLAND +3 HER -> A +3 HE'S -> HE +3 EVERYONE -> EVERY +3 BRITANNULA -> BRITAIN +3 BANNISTER -> BANISTER +3 AND -> AN +2 YOU'RE -> YOU +2 XAVIER -> ZAVIER +2 WYLDER -> WILDER +2 WHITTAWS -> WIDOWS +2 WE'LL -> WILL +2 VAPOURS -> VAPORS +2 VANDERPOOL -> VAN +2 TWO -> TO +2 TRAVELING -> TRAVELLING +2 TONIGHT -> TO +2 TO -> OF +2 TIMAEUS -> TO +2 THORKEL -> TORCOAL +2 THEY -> THERE +2 THEN -> THAN +2 THEM -> HIM +2 THEIR -> THE +2 THEE -> THE +2 THE -> THAT +2 THE -> IN +2 THAT -> IT +2 SOMETIME -> SOME +2 SOMEONE -> SOME +2 SHE'S -> SHE +2 SEEM -> SEEMED +2 READ -> RED +2 PLATONISTS -> PLATANISTS +2 ONE -> WON +2 OF -> O +2 NOW -> THOU +2 NO -> KNOW +2 NEO -> NEW +2 MUNNY -> MONEY +2 MARSHALL -> MARSHAL +2 LEOCADIA -> THE +2 LEAVENWORTH -> LEVINWORTH +2 LEAVENWORTH -> LEVIN +2 JAGO -> YAGO +2 JAGO -> GIAGO +2 IT'S -> ITS +2 IT -> ITS +2 IS -> HAS +2 INTO -> AND +2 IN -> A +2 I'D -> I +2 I -> I'M +2 HOSTESS -> HOSTES +2 HOLY -> WHOLLY +2 HOLBEIN -> HOLBINE +2 HE -> WE +2 HAS -> HAD +2 HALLO -> HELLO +2 GREY -> GRAY +2 GILCHRIST -> GILGRIST +2 FOUNDED -> FOUND +2 FEELING -> FILLING +2 FAVORITE -> FAVOURITE +2 FAIRVIEW -> FAIR +2 EMIL -> AMYL +2 DISSENT -> DESCENT +2 DE -> THE +2 CRITIAS -> CRITIUS +2 CRESSWELL -> CRESWELL +2 CRESSWELL -> CRASWELL +2 COURT -> COURTYARD +2 COLOUR -> COLOR +2 CHECK -> CHEQUE +2 CHAISE -> CHASE +2 CARL -> KARL +2 BUT -> THAT +2 BRAKE -> BREAK +2 BEHAVIOUR -> BEHAVIOR +2 AYE -> I +2 AY -> I +2 AS -> A +2 ARE -> OUR +2 ANOTHER -> THE +2 AND -> AS +2 ALL -> ALTOGETHER +1 ZORA'S -> ZORAS +1 ZORA -> SORA +1 ZOOF'S -> ZEF'S +1 YOU'LL -> YOU +1 YEARNING -> YEARNIN +1 YE -> YOU +1 YE -> HE +1 XAVIER -> ZAVIOUR +1 WOULD -> WILL +1 WOULD -> WERE +1 WORST -> WORSE +1 WORSE -> HORSE +1 WOODS -> WOOD +1 WOODBEGIRT -> WOOD +1 WOOD -> WOODCUTTERS +1 WONDERING -> WANDERING +1 WOMAN'S -> WOMEN'S +1 WITHES -> WIDTHS +1 WITH -> WHICH +1 WITH -> WHEN +1 WINTER -> WINNER +1 WILL -> WOULD +1 WILL -> WE'LL +1 WIFE -> WHITE +1 WHOSE -> WHO +1 WHO'S -> WHOSE +1 WHO -> WHOSE +1 WHITTAWD -> WIDOW +1 WHITTAW -> WIDOW +1 WHIRLPOOL -> WAR +1 WHIPPED -> WHIP +1 WHERE -> WHERE'S +1 WHEN -> WITH +1 WHEN -> AND +1 WHATEVER -> WHATSOEVER +1 WHAT'S -> WHAT +1 WHAT -> WHEN +1 WHALE -> WELL +1 WHALE -> WAIL +1 WESTPORT -> WESTWARD +1 WESTPORT -> PORT +1 WESTMERE -> WESTMARE +1 WERE -> WOULD +1 WERE -> WHERE +1 WERE -> ARE +1 WELL -> WHILE +1 WELL -> FOR +1 WELCOMED -> WELCOME +1 WEDNESDAY -> WIND +1 WEATHER -> WHETHER +1 WEAR -> WHERE +1 WEAKLY -> WEEKLY +1 WE'RE -> WERE +1 WE'RE -> WE +1 WE -> WE'VE +1 WAVES -> WAY +1 WATRY -> WATRIE +1 WATERMILL -> WATER +1 WAS -> WITH +1 WAS -> VIEW'S +1 WAS -> IS +1 WARLIKE -> WORE +1 WANDERING -> WONDERING +1 VOLTAIRE -> WHILE +1 VOICE -> BOYS +1 VISITORS -> VISITOR +1 VILLEROY -> VILLEROI +1 VILLA -> VILLIDESA +1 VIGNETTE -> VINEYARD +1 VICARIOUS -> VICHAIRLESS +1 VIADUCT -> VIA +1 VERY -> VERIMENT +1 VERSE -> FIRST +1 VAUDOIS -> FAUDOIS +1 VANES -> VEINS +1 VALOR -> VALO +1 VALES -> VEILS +1 UTAH -> NEW +1 UPON -> ON +1 UP -> UPSTAIRS +1 UP -> OF +1 UNWARILY -> THEN +1 UNTO -> INTO +1 UNLIKE -> I +1 UNDERGROUND -> ON +1 UNCLENCHED -> CLENCHED +1 UNC -> UG +1 UN -> AND +1 TWO -> TUTRILOGIES +1 TURNOVER -> TURN +1 TUPPENY -> TUPPENNY +1 TROT -> SHOT +1 TREDDLESTON -> TREDDLESTONE +1 TREBLE -> TROUBLE +1 TRAVELERS -> TRAVELLERS +1 TRAVELER -> TRAVELLERS +1 TOWNE -> TOWN +1 TOWELLING -> TOWELINGS +1 TOULD -> DID +1 TOTTY -> HAD +1 TOPS -> TOPSY +1 TOPEKA -> TEPEAKA +1 TOOMS -> TOMBS +1 TOO -> TWO +1 TONNAY -> TONIET +1 TONNAY -> TONECHAU +1 TONNAY -> TO +1 TONNAY -> TENACHALANT +1 TOLD -> TOLLED +1 TOILETTE -> TOILET +1 TO -> WEST +1 TO -> TWO +1 TO -> THROUGH +1 TO -> DOES +1 TO -> DEFINED +1 TIRESOME -> TO +1 TINTORET -> TINTARETTE +1 TINTINT -> TINTANT +1 TIMES -> TUBS +1 TIME -> YOU +1 TIME -> TON +1 TIMAEUS -> TIMIUS +1 TIMAEUS -> TIMIRAS +1 TIMAEUS -> TIMAIRS +1 TIMAEUS -> TENEAS +1 TIMAEUS -> TEARS +1 TIBI -> TIBBY +1 THUS -> LUSTY +1 THROUGH -> TO +1 THRONE -> THROWN +1 THRO -> THROUGH +1 THOUGHT -> BOUGHT +1 THOUGH -> THE +1 THORLEIF -> TORE +1 THORKEL -> TORCOLE +1 THIS -> OSTENSITY +1 THINKS -> THINK +1 THEY'RE -> THEY +1 THEY -> THE +1 THEY -> MAY +1 THEREFORE -> THEY +1 THERE -> THERE'S +1 THEN -> IN +1 THEN -> AND +1 THEM -> THE +1 THEL -> THOUGH +1 THEL -> THOU +1 THEE'S -> THESE +1 THEE -> HE +1 THE -> WHO +1 THE -> TO +1 THE -> THIS +1 THE -> THEY +1 THAT'S -> AT +1 THAT -> THAN +1 THANKING -> THINKING +1 THANK -> THINK +1 THAN -> THEN +1 THAN -> THAT +1 THAN -> IN +1 TECHNIQUE -> TYPENIQUE +1 TEA -> T +1 TARANTULA -> TERENTIAL +1 TALKERS -> TALK +1 TAKEN -> TAKING +1 TABU -> TABOU +1 TABU -> TABOO +1 TABU -> BOOT +1 TABLE -> TABLECLOTH +1 SYMPOSIUM -> SIMPOSIUM +1 SYMPOSIUM -> SIMPOS +1 SWOONS -> SWOON +1 SWEEP -> SWEPT +1 SWAN -> SWAYING +1 SUSPICIONS -> SUSPICION +1 SURVIVE -> SURVIVED +1 SURFACES -> SERVICES +1 SUPPOSITION -> OPPOSITION +1 SUMNER -> SUMMER +1 SUMMON -> SUMMONED +1 SUMMERS -> SUMMER'S +1 SUITCASE -> SUIT +1 STROLL -> STRAW +1 STREAMLINE -> STREAM +1 STORY'S -> STORIES +1 STEEL'D -> STEELED +1 STEADY -> STUDY +1 STATE'S -> STATES +1 STATE -> STATES +1 STARVED -> STORM +1 STARTS -> START +1 STANDS -> STAND +1 STAIR -> STARE +1 STAID -> STAY +1 STAGE -> STEED +1 SQUEAK -> SQUI +1 SPRING -> SPRANG +1 SPRAGUE -> SP +1 SPLENDOR -> SPLENDOUR +1 SPLENDET -> SPLENDID +1 SPIN -> SPEND +1 SPECIALISED -> SPECIALIZED +1 SOUTHEY'S -> SO +1 SOUTHEY -> SELVEY +1 SOUTHEY -> SALVI +1 SOU -> SOUS +1 SOOTHED -> SOOTHE +1 SON -> FUN +1 SOME -> SOMETIME +1 SOLON'S -> SILENCE +1 SOLILOQUY -> SOLOQUY +1 SODALITY -> SODELITY +1 SOCRATIC -> CRADIC +1 SO -> SEWED +1 SNUBNOSED -> SNUB +1 SMITTEN -> SMIT +1 SMILD -> SMILED +1 SMELLS -> MILLS +1 SLEEVE -> STEVE +1 SKILLFUL -> SKILFUL +1 SKEPTICAL -> SCEPTICAL +1 SIZE -> SIZED +1 SIR -> ARE +1 SINCE -> SEN +1 SIN -> IN +1 SILENT -> SILENCE +1 SIGHT -> SIGHTSEERS +1 SIGHED -> SIDE +1 SIF -> SIFT +1 SIDE -> SIGH +1 SHOWRING -> SHOWERING +1 SHOULD -> WOULD +1 SHOULD -> SHOULDST +1 SHODDY -> SHODY +1 SHIP -> SHIP'S +1 SHEWD -> SHOWED +1 SHERIFF -> SHERIFF'S +1 SHE -> YOU +1 SHE -> HE +1 SHARPS -> SHARP'S +1 SHARP'ST -> SHARPEST +1 SHANNON -> SHAN +1 SHAN'T -> SHA'N'T +1 SHALL -> SHALT +1 SHABATA -> SHEBATA +1 SETTLE -> SETTLED +1 SERVE -> SERVED +1 SERVANT -> SERVANTS +1 SENTENCES -> SENTENCE +1 SENT -> SET +1 SENSE -> SCENTS +1 SENCE -> SINCE +1 SEMON'S -> SIMMONS +1 SEEMED -> SEEMS +1 SEEDS -> SEATS +1 SEE -> SEA +1 SECTS -> SEX +1 SEATING -> SITTING +1 SEAT -> SEED +1 SCUTCHEON -> STUTTON +1 SCUSE -> EXCUSE +1 SCRAPBOOKS -> SCRAP +1 SCOUTING -> SCOUT +1 SCHOOLED -> SCHOOL +1 SCHOOL -> SCHOOLS +1 SCHOOL -> SCHOOLBOYS +1 SCEVRA -> SCAVRA +1 SCEURA -> SKURA +1 SCATHE -> SCATH +1 SCAROONS -> SCARONS +1 SAW -> SOLID +1 SAVED -> SEE +1 SAUVEUR -> SEVER +1 SATE -> SAT +1 SANG -> SAYING +1 SAMPLE -> SABLE +1 SALINE -> SAILING +1 SALIENT -> SAILORED +1 SAINTS -> SAYS +1 SAILD -> SAILED +1 SAIL -> SALE +1 SAID -> SIT +1 RUST -> REST +1 RULED -> ROLLED +1 RUFUS -> RUFFUS +1 RUE -> GRUE +1 ROSSETER -> ROSSITUR +1 ROERER -> ROAR +1 RODOLFO'S -> GODOLPH'S +1 RODOLFO -> UDOLPH +1 RODOLFO -> RODOLPHO +1 RODOLFO -> RIDOLPHO +1 RODOLFO -> RDOLPHAL +1 RODOLFO -> RADOLPHO +1 ROCKED -> ROCK +1 ROBIN'S -> ROBINS +1 REWEIGHED -> REWAIED +1 RETURN -> RETURNED +1 RESIGNED -> RESIGN +1 REMOVE -> MOVED +1 REMOV'D -> REMOVED +1 REMARK -> REMARKED +1 REMAINED -> REMAINING +1 REMAINED -> REMAIN +1 REMAIN -> REMAINED +1 RELOCATED -> RE +1 RELIES -> REALIZE +1 REIGNED -> RAINED +1 REGGIE -> READY +1 REGAINED -> REGAIN +1 REFUSED -> WERE +1 REENFORCEMENTS -> REINFORCEMENTS +1 REEDER -> READER +1 RED -> READ +1 RECORD -> RECORDS +1 RECOGNISED -> RECOGNIZED +1 REBUK'D -> REBUKED +1 RE -> REINTER +1 RACHEL -> RACHAEL +1 QUINCY -> QUINCEY +1 QUASI -> COURSE +1 QUASH -> CAUSH +1 QUART -> COURT +1 QUADRILLE -> QUADRILL +1 PYTHAGOREANS -> PYTHAGORIANS +1 PUTTIN -> PUTTING +1 PUT -> PUTTING +1 PURSE -> PERSON +1 PURPOSED -> PURPOSE +1 PURIST -> PUREST +1 PSALM -> SUM +1 PROVES -> PROVED +1 PROSELYTING -> PROSELY +1 PROSCRIBED -> PRESCRIBED +1 PRODIGAL -> CHRONICAL +1 PRINCIPLE -> PRINCIPAL +1 PREVENT -> PRESENT +1 PRETENSE -> PRETENCE +1 PRECIEUSES -> PURSUS +1 PRECEDE -> PROCEED +1 PRE -> PRIESTHOO +1 PRACTICE -> PRACTISE +1 POWER -> BOWER +1 POSSESS -> POSSESSED +1 POISON'D -> POISONED +1 POINT -> BLINT +1 PLURAL -> PLORO +1 PLURAL -> PEARL +1 PLEASANCE -> PLEASANTS +1 PLEA -> PLEAD +1 PLATONISTS -> PLATINISTS +1 PLAITS -> PLATES +1 PLACE -> PLACES +1 PIERC'D -> PIERCED +1 PICK -> PIG +1 PICK -> PIC +1 PHILANTHROPIES -> ANTHROPIES +1 PHILADELPHIAN -> PHILADELPHIA +1 PHAEDRUS -> FEATURES +1 PH -> P +1 PET -> PETALKAN +1 PERVERTERS -> PERVERTIVES +1 PERSON -> PERSONAL +1 PERCHES -> PURCHASE +1 PEGRENNE -> PEGRIN +1 PEGRE -> PEG +1 PEASE -> PEAS +1 PEACE -> PIECE +1 PAUL -> POLITICS +1 PAUL -> PAW +1 PATIENTS -> PATIENCE +1 PATIENCE -> PATIENT +1 PATCH -> PAT +1 PASSAGE -> PASSAGEWAY +1 PASCHAL -> PASSION +1 PARTICLES -> PARTICLE +1 PARSONS -> PARSON +1 PAROQUET -> PARRIQUET +1 PARLOR -> PARLOUR +1 PARASITES -> PARRICIDES +1 PARALLELOGRAM -> PARALLELLOGRAM +1 PAPAL -> PEPPEL +1 PANTS -> HANDS +1 PALATE -> PALLET +1 OVER -> OF +1 OUTRAGE -> OUTRAGED +1 OUR -> HER +1 OUR -> A +1 OUGHTER -> ORDERS +1 OTTLEY'S -> OAKLEIGHS +1 OTHERWORLDLY -> OTHER +1 OTHERS -> OTHER +1 OTHER -> OTTER +1 OSH -> ID +1 OSAGE -> O +1 ORCHARD -> ARCHER +1 ORANGE -> SAGE +1 OR -> OF +1 OR -> ARE +1 OR -> A +1 OPHELIA -> OF +1 ONTO -> ON +1 ONLY -> OMER +1 ONE -> SWUNG +1 ON -> ANOTHER +1 ON -> ANGULATIONS +1 OLIVE'S -> ALL +1 OLIVE -> ALAP +1 OLAF -> OLOFF +1 OH -> I'LL +1 OFFICES -> OFFICERS +1 OFFICERS -> OFFICER'S +1 OFFICERS -> OFFICER +1 OFFENSES -> OFFENCES +1 OFF -> OPT +1 OF -> IS +1 OF -> HAVE +1 OF -> BANDS +1 OCCUPANTS -> OCCUPANT +1 OBSERVED -> OBSERVE +1 OAKS -> YOKES +1 O'ER -> OR +1 O -> A +1 NOW -> NO +1 NOVEL'S -> NOVELS +1 NOUGHT -> NAUGHT +1 NOTTINGHAM -> AUTTINGHAM +1 NOTTINGHAM -> ARE +1 NOTHIN -> NOTHING +1 NOT -> NOW +1 NOT -> NOTHING +1 NORTHWARDS -> NORTHWARD +1 NORTHERNERS -> DERPOOL +1 NORTH -> NORTHWEST +1 NOR -> OR +1 NOON -> NOONK +1 NO -> TODDY +1 NINE -> NOT +1 NIGHTFALL -> NIGHT +1 NEWCOMER -> NEW +1 NEW -> NEWBORN +1 NEVERBEND -> NEVERS +1 NEVERBEND -> NEVERBEIN +1 NEVERBEND -> NEVERBAND +1 NET -> NED +1 NELLY -> NELLIE +1 NEIGHBOUR -> NEIGHBOR +1 NEIGHBORHOOD -> NEIGHBOURHOOD +1 NEIGHBOR -> NEIGHBOUR +1 NECK -> NET +1 NEARER -> NEAR +1 NE'ER -> NEVER +1 NATTY -> NANNIE +1 NARES -> NEAR'S +1 NAOMI -> THEY +1 NAOMI -> NOW +1 NAOMI -> NAY +1 NAMED -> NAME +1 N -> THAN +1 MY -> I +1 MY -> BY +1 MUMMERIES -> MEMORIES +1 MOUNTED -> MOUNTAIN +1 MORNIN -> MORNING +1 MORMONS -> MORE +1 MORMONISM -> WARMONISM +1 MORE'S -> NOR'S +1 MONTMARTRE -> MOUNT +1 MONTMARTRE -> MONTMARTRA +1 MONTFICHET'S -> MONTFICHE +1 MONTFICHET -> MONTFICHE +1 MONTFICHET -> MARTFICHE +1 MOMBI -> MUMBIE +1 MOLDED -> MOULDED +1 MOHICAN -> MOHICANS +1 MO -> MOLE +1 MISTS -> MIST +1 MISTER -> THIS +1 MISTER -> MISS +1 MIST -> MISTS +1 MISSOURIANS -> MISSOURIENS +1 MISS -> MISTER +1 MISDEMEANOR -> MISDEMEANOUR +1 MINE -> MIND +1 MILLION'D -> MILLIONED +1 MILLIMETER -> MILLIMETRE +1 MILLIGRAM -> MILAGRAM +1 MILITATED -> MITIGATED +1 MILES -> MYLES +1 METERS -> METRES +1 MESSRS -> MESSIERS +1 MESS -> MASTS +1 MERSEY -> MERCY +1 MERRY -> MERRIED +1 MERGANSER -> MERGANCER +1 MERCHISTON -> MURCHESON +1 MEN -> AMEN +1 MEET -> MET +1 MEALYBACK -> MEALLY +1 MEADOWCROFT -> MEDICROFT +1 MAY -> IS +1 MAUSOLEUM -> MUSOLEUM +1 MATE -> MADE +1 MASTERS -> MASTER +1 MASTER'S -> MASTERS +1 MARVELOUS -> MARVELLOUS +1 MARSHALLED -> MARSHALED +1 MARIVAUX -> MARY +1 MARIVAUX -> MARAVAUX +1 MARGOLOTTE -> MARGOLOT +1 MARAIS -> MARAY +1 MANY -> MEN +1 MANIFESTED -> MANIFEST +1 MAINHALL -> MEANHAVED +1 MAID -> MAIDEN +1 MADAM -> MADAME +1 MACDONALDS -> MC +1 MAC -> MICARTLE +1 LUTHER'S -> LUTHERS +1 LURE -> LOWER +1 LULLS -> LOLLS +1 LUIS -> LOUIS +1 LUBRICATE -> LUBRICADE +1 LOWER -> LOWERED +1 LOVE'S -> LOVES +1 LOUIS -> LOUISE +1 LOU'S -> LOOSE +1 LORNE -> LORN +1 LOOK -> LUCK +1 LOGARITHMS -> LOGARTHEMS +1 LOCRIS -> LOCHRIS +1 LOAD -> LOWED +1 LITER -> LATER +1 LINK'D -> LINKED +1 LINE -> LIE +1 LILLYS -> LILY'S +1 LILLY -> LILY +1 LILBURN -> LITTLE +1 LIGHT -> WRITE +1 LETS -> THAT'S +1 LESSER -> LESS +1 LEOCADIA'S -> LUCADIUS +1 LEOCADIA'S -> LOCATEUS +1 LEOCADIA -> LOU +1 LEOCADIA -> LOQUES +1 LEOCADIA -> LOCATIA +1 LEOCADIA -> LOCALIA +1 LEOCADI -> LUCADIA +1 LEND -> LINEN +1 LEGALITY -> LOGALITY +1 LEFRANK -> LE +1 LECOMPTE -> LECOMTE +1 LECOMPTE -> LE +1 LEAVING -> LEAPING +1 LEASED -> LEAST +1 LEADS -> LEAVES +1 LE -> LAURY +1 LARKSPUR -> LARKSBURGH +1 LARKSPUR -> LARKSBURG +1 LANTHORN -> LANTERN +1 LAND -> LANDA +1 LAMBENT -> LAMMA +1 LALLIE -> LILY +1 LAKE -> LEEK +1 LAID -> LADEN +1 LAD -> WELL +1 LABOUR -> LABOR +1 KNOW -> KNOWS +1 KNEED -> NEED +1 KNEE -> KNEEP +1 KNAVE -> NAVE +1 KIRTLAND -> CURTLIN +1 KINGDOMS -> KINGDOM'S +1 KING -> KING'S +1 KICK -> KICKAPOOS +1 KESWICK -> KEZWICK +1 KEOGH -> KIEV +1 KAFFAR'S -> KAFFIRS +1 KAFFAR'S -> KAFFIR'S +1 JUS -> JUST +1 JOHN -> JOHNNIEAUGO +1 JEWELER'S -> JEWELLERS +1 JAW -> JOB +1 JASPER -> JAPSER +1 JAIL -> DRALE +1 JACK -> JACKKNIFE +1 ITS -> IT'S +1 IT'S -> TO +1 IT -> YOU +1 IT -> TO +1 IT -> IT'LL +1 IT -> HE +1 IT -> AND +1 IS -> IT'S +1 IS -> IT +1 IS -> HIS +1 IRON'S -> IRONS +1 INVENTORS -> IN +1 INTRENCHMENT -> ENTRENCHMENT +1 INTERESTS -> ENTRANCE +1 INTENTS -> INTENSE +1 INTENT -> AND +1 INSURRECTIONISTS -> INSURRECTIONOUS +1 INNERLOCHY -> INERLOCHY +1 INNERLOCHY -> IN +1 INFANTS -> INFANT'S +1 INFANTILE -> INVENTILE +1 INCULCATED -> INCALCATED +1 INCLOSED -> ENCLOSED +1 INCERTAINTY -> IN +1 INACTION -> AN +1 IN -> ON +1 IN -> OF +1 IN -> INIMITIA +1 IN -> IMPERFECT +1 IMPRESSES -> IMPRESS +1 IMPRESSED -> IMPRESS +1 IMPRESS'D -> IMPRESSED +1 IMPEARLED -> IMPELLED +1 IMMATURE -> IMMATEUR +1 IKE -> LIKE +1 IF -> OF +1 IF -> AT +1 IDIOSYNCRATICALLY -> IDIOS +1 ICHTHYOSAURUS -> ITHUS +1 ICHTHYOSAURUS -> INHEOSORIS +1 ICHTHYOSAURUS -> IKESORIS +1 I'M -> ON +1 I'LL -> I +1 I'LL -> HOW +1 I -> SOUND +1 HYDRAS -> HYDRAST +1 HUSBAND -> HUSBA +1 HUNTLEY -> HUNTLY +1 HUMID -> HUMAN +1 HOUSECLEANING -> HOUSE +1 HOTBED -> HOT +1 HOST -> HOSE +1 HORTON -> WHARTON +1 HORSEPLAY -> HORSE +1 HORACE -> HORNS +1 HOPES -> HELPS +1 HOPE -> OPIEVIL +1 HONOURABLY -> HONORABLY +1 HONOURABLE -> HONORABLE +1 HONOUR -> HONOR +1 HONORS -> HONOURS +1 HONORABLE -> HONOURABLE +1 HONORABLE -> HONED +1 HOLOCAUST -> HOLOCOST +1 HOLD -> ALL +1 HIT -> HID +1 HIM -> LINE +1 HILDA'S -> HELDA'S +1 HILDA -> HELDA +1 HIGHEST -> HACITY +1 HIDALGO -> HAD +1 HETTY -> HETTY'S +1 HERE -> THERE +1 HERACLEITUS -> HERACLITUS +1 HER -> THE +1 HER -> THAT +1 HENCHMEN -> HENCHMAN +1 HEN -> HANDLED +1 HELPED -> SELF +1 HELLO -> HALLO +1 HELD -> HUTTED +1 HEDGES -> ORANGES +1 HEART'S -> HEARTSEASE +1 HEART -> HARD +1 HEAR -> SEE +1 HEAR -> HERE +1 HEAD -> EDMOST +1 HE'S -> IS +1 HE'D -> HE +1 HE -> SHE +1 HE -> HIS +1 HAZEWRAPPED -> HAYES +1 HAY -> HEY +1 HAWTREY -> HALTREE +1 HAWK -> HOT +1 HAVING -> HEAVEN +1 HAVE -> HAS +1 HAVE -> HALF +1 HAS -> IS +1 HAS -> AS +1 HARTS -> HEARTS +1 HARMONIZED -> HARMONIZE +1 HARKENED -> HEARKENED +1 HARBORING -> HARBOURING +1 HARANGUE -> HURRY +1 HARALD -> HAROLD +1 HAPPEN -> HAPPENED +1 HANNA -> HAD +1 HANGINGS -> HANGING +1 HANDS -> HANDSOME +1 HAMLET -> PANLESS +1 HAM -> HIM +1 HALLOA -> HULLO +1 HAL -> HELLO +1 HAKON -> HOCKIN +1 HAIRDRESSER -> HAIR +1 HAIR -> AIR +1 HAD -> NOT +1 HAD -> IS +1 HAD -> HEAD +1 HAD -> HAVE +1 HAD -> AT +1 GUISE -> SKIES +1 GUESTS -> GUEST +1 GUESTS -> GUESS +1 GUEST -> GUESTS +1 GROWS -> GROVES +1 GRINGO -> GRENGO +1 GRIEFS -> GREEDS +1 GREY'S -> GRAY'S +1 GREEING -> GREEN +1 GREAT -> GRATEFUL +1 GRAY -> GREY +1 GRAPEVINE -> GRAPE +1 GRAND -> GREAT +1 GRAMOPHONES -> GRAMIPHONES +1 GRAMOPHONE -> GRAMMAPHONE +1 GRAM -> GRAHAM +1 GRADES -> GRATES +1 GOVERNMENT -> GOVERNOR +1 GOVERNED -> GOVERN +1 GOOBERS -> GOULD +1 GOAT'S -> GOATS +1 GOAT -> BOAT +1 GIVE -> KIVED +1 GIVE -> GAVE +1 GIRARD -> GERARD +1 GILCHRIST'S -> GILCHER'S +1 GIER -> GEAR +1 GIAOURS -> GUYRS +1 GHIP -> GIP +1 GESTATION -> JUSTATION +1 GEOFFREY'S -> JEFFREY'S +1 GEOFFREY -> JEFFREY +1 GENTLEMEN -> GENTLEMAN +1 GENERALLY -> GERALLY +1 GENERAL -> GENERALSHIP +1 GAYLY -> GAILY +1 GARDENS -> GARDEN'S +1 GAMEWELL -> GAME +1 GALLOWSNESS -> GALLOW'S +1 FUTURISTIC -> FUTURE +1 FULNESS -> FULLNESS +1 FRISKILY -> FRISKLY +1 FRIEND -> BRAND +1 FRANCS -> FRANKS +1 FORWARDED -> FOEEDED +1 FORMALLY -> FORMERLY +1 FOREVER -> FOR +1 FORBES'S -> FORTS +1 FOR -> WITH +1 FOR -> FOREVER +1 FOR -> FIR +1 FOR -> FALLING +1 FOLLOWED -> FOWLED +1 FLUFFINOSE -> FLAPHANO'S +1 FLOUR -> FLOWER +1 FLIGHT -> FIGHT +1 FITZOOTH'S -> FITTOOTH'S +1 FIRS -> FURS +1 FIREBUGS -> FIRE +1 FIREBALL -> FIRE +1 FINE -> FIND +1 FIND -> FIVE +1 FILL -> FILLED +1 FIGHTER -> FRIGHTER +1 FETE -> FIGHT +1 FERDINANDO -> FERDINAND +1 FELT -> FIL +1 FELT -> FELLED +1 FEES -> BEES +1 FEELIN'S -> FEELINS +1 FEARED -> FEAR +1 FARTHEST -> FURTHEST +1 FARRAGUT -> FERRAGUT +1 FAR -> FARTHER +1 FALLEN -> FALL +1 FALCON -> MONGOO'S +1 FAIR -> FAIREST +1 EYE -> I +1 EVERYDAY -> EVERY +1 EVER -> ARROW +1 EVENING -> EVEN +1 EVENIN'S -> EVENINGS +1 EVA -> EITHER +1 ESTATE -> STATE +1 ESTAFANIA -> FANIA +1 ESTAFANIA -> ESTAFFANIA +1 ESPRIT -> A +1 ESPECIAL -> SPECIAL +1 ESCHEATED -> ISTIATED +1 ER -> A +1 ENTRUSTING -> INTRUSTING +1 ENTHUSIASTS -> ENTHUSIAST +1 ENTHRALMENT -> ENTHRALIMENT +1 ENTER -> INTER +1 ENTER -> ENTERED +1 ENSURE -> INSURE +1 ENQUIRIES -> INQUIRIES +1 ENQUIRED -> INQUIRED +1 ENQUIRE -> INQUIRE +1 ENDEAVOURED -> ENDEAVORED +1 ENDEAVOR -> ENDEAVOUR +1 EMISSIONS -> ADMISSIONS +1 EMIL -> AMY +1 EMIL -> AM +1 EMIGRATION -> IMMIGRATION +1 EMIGRANT -> IMMIGRANT +1 EMERG'D -> EMERGED +1 EM -> THEM +1 ELSINORE -> ELZINOR +1 ELMO'S -> ABLE'S +1 ELECT -> ELEC +1 ELCHO -> ELKO +1 ELABORATE -> CELEBRATE +1 EFFECTED -> AFFECTED +1 EDITION -> ADDITION +1 EARSHOT -> EAR +1 E -> EEN +1 E -> EA +1 DYKES -> DIKES +1 DURING -> DREWING +1 DUNNO -> DON'T +1 DUMPY -> DON'T +1 DUMAS -> DE +1 DUERER -> DURE +1 DRUGGIST'S -> DRUGGIST +1 DROPIDAS -> TROPIDAS +1 DRIPPING -> TRIPPING +1 DRAWS -> DRAWLS +1 DOWN -> DOWNSTAIRS +1 DOVES -> DOVE'S +1 DOUZE -> DUSPORT +1 DOOR -> DOORSTEP +1 DONNITHORNE -> DONNYTHORNE +1 DONATISTS -> DONATIST +1 DONA -> DORNESTE +1 DON'T -> A +1 DOLL -> DAL +1 DOCTRESS -> DOCTRIS +1 DISTRICTS -> DISTRICT +1 DISMAYED -> DISMAYEDESTAFHANIA +1 DISHONOURED -> DISHONORED +1 DISCOLOURED -> DISCOLORED +1 DINAH'S -> DYNAS +1 DINAH -> DINA +1 DIFFERENTIATION -> DIFFUREATION +1 DIFFERENCES -> DIFFERENCE +1 DID -> DEAD +1 DIATRIBE -> DIETRIBE +1 DIAS -> DAIS +1 DIALOGUES -> DIALECTS +1 DEW -> DO +1 DELICATE -> DELEGATE +1 DELIBERATIVE -> DELIBERATE +1 DELIA -> GILLIA +1 DELIA -> DELIGHT +1 DEFINED -> THE +1 DEFINE -> TO +1 DEDALUS -> DEEDOLUS +1 DEDALUS -> DAEDALUS +1 DEDALOS -> DELOS +1 DECENCY -> DECENCIES +1 DECEIVING -> SEATING +1 DE -> GRAFT +1 DE -> DETONICHAUCH +1 DAWN'S -> DAWNS +1 DAIRY -> DEARIE +1 CYN -> SIN +1 CURVED -> CARVED +1 CRYSTAL -> CRISTEL +1 CROSSTREES -> CROSS +1 CRESSWELLS -> CRUSTWELLS +1 CREIGHTON -> CRIGHTON +1 CREIGHTON -> CREDON +1 CRASWELLERS -> CRESTWELLERS +1 CRASWELLER -> CRUSWELLER +1 COURT'S -> COURTS +1 COUNTRY'S -> COUNTRY +1 COUNSELS -> COUNCILS +1 COUNSELLED -> COUNSEL +1 COULD -> COULDN'T +1 COSTS -> COST +1 CORRELATES -> COROTS +1 CORN -> CORNIERS +1 CORALIE -> CORLIE +1 COOK -> COPE +1 CONTROL -> CONTROLL +1 CONTI -> KANTI +1 CONTAINED -> CONTAINING +1 CONTACT -> CONDUCT +1 CONSTANTINE -> KONSTANTINE +1 CONSIDERATE -> CONSIDER +1 CONSID'BLE -> CONSIDERABLE +1 CONSCIOUS -> CONSCIENCE +1 CONQUERIN -> CONQUERING +1 CONJURER -> CONJUROR +1 CONDENSE -> CONTENSED +1 COMPOSSER -> COMPOSSIBLE +1 COMPOSE -> COMPOSED +1 COMMENTATORS -> COMMON +1 COMMANDMENTS -> COMMANDS +1 COMING -> COMMON +1 COLOURS -> COLORS +1 COLORS -> COLOURS +1 COLORS -> COLLARS +1 COLORIST -> COLORLESS +1 COLORIST -> CHOLERIST +1 COLOR -> COLOUR +1 COLD -> CALLED +1 COAL -> CO +1 CO -> COEXIST +1 CLEW -> CLUE +1 CLAUSE -> CLAS +1 CIVET -> SAVEETTE +1 CITADELLED -> CITADELED +1 CIGARETTE -> SICK +1 CHRISTIAN -> CHISH +1 CHRISTAIN -> CHRISTIAN +1 CHINGACHGOOK -> CHINGACHOOK +1 CHIAROSCURISTS -> KIERUSCURISTS +1 CHIAROSCURIST -> CUIRASCURISTS +1 CHEROOT -> TRUTH +1 CHECKER -> CHECKERBOARD +1 CHATTERBOX -> CHATTER +1 CHARENTE -> NECHERANT +1 CHARENTE -> CHART +1 CHARENTE -> AND +1 CHARACTERISTIC -> CORRECTORISTIC +1 CHANGED -> CHANGE +1 CHANGE -> CHANGES +1 CHANGE -> CHANGED +1 CENTRED -> SENATE +1 CENTER -> CENTRE +1 CENDENARIES -> SENDIARIES +1 CEASD -> CEASED +1 CAUGHT -> THOUGHT +1 CAT -> HAT +1 CASTS -> CAST +1 CASE -> GASE +1 CARPACCIO'S -> CARPATIUS +1 CAPLESS -> CAPLICE +1 CANVASS -> CANVAS +1 CANDLE -> CANDLELIGHT +1 CAN -> COULD +1 CAN -> CANNOT +1 CALDWELL -> CAULDWELL +1 BYE -> BY +1 BUTTED -> BUDDED +1 BUT -> DO +1 BUT -> BY +1 BUT -> BETTER +1 BURN -> BURNE +1 BURGOYNE -> WERE +1 BUNNIT -> BUNNITT +1 BUL -> BOL +1 BUCHANAN -> YOU +1 BROWN -> OBROWN +1 BROTHER -> BRETHREN +1 BRITANNULISTS -> BRITAIN +1 BRISK -> BRACE +1 BRINGING -> RINGING +1 BREAKFAS -> BREAKFAST +1 BREAD -> ABREAD +1 BRANDS -> BRINGS +1 BRANCH -> RANCH +1 BRAGELONNE -> BRAGGELON +1 BRAGELONNE -> BRAGELON +1 BRACTON'S -> BROCKTON'S +1 BOX -> BOXWOMEN +1 BOTANY -> BARTANY +1 BOTANICAL -> BATTANICAL +1 BORDERS -> BORDER +1 BOOKKEEPER -> BITKEEPER +1 BOLLS -> BOWLS +1 BOAR -> BOREHOUND +1 BLUESKINS -> BLUESKIN +1 BLUE -> BLUESTS +1 BLESSINGS -> BLESSING +1 BLASTS -> BLAST +1 BIT -> GOOD +1 BILLYGOAT -> SPILLIGOAT +1 BILLED -> BUILD +1 BERGSON -> BERKSON +1 BERGSON -> BERGIN +1 BENCH -> PENCH +1 BEING -> MEAN +1 BEIN -> BEING +1 BEHAVIOURIST -> BEHAVIOURISTS +1 BEHAVIOURIST -> BEHAVIORIST +1 BEG -> BEGGED +1 BEFORE -> FOR +1 BEFIT -> BE +1 BEFAL -> BEFALL +1 BEEDER -> READER +1 BEEBE -> B +1 BEDIMMED -> BEDEMNED +1 BEATER -> PETER +1 BEAR -> BERYL +1 BEACHED -> BEECHED +1 BATTLEAX -> ADELAX +1 BASKET -> BASCULADES +1 BALAAM'S -> BAYLIM'S +1 BAINS -> BANDERE +1 BADGES -> BADGERS +1 BADAUDERIE -> BADR'D +1 BABIRUSA -> BABRUSA +1 AWHILE -> A +1 AWARE -> WHERE +1 AUNT -> AND +1 AU -> OCCOURANT +1 ATTITUDE -> SATITUDE +1 ATTENDANTS -> ATTENDANCE +1 ATTENDANCE -> ATTENDANTS +1 ATMOSPHERIC -> ATMOSPHERE +1 ATHOLEMEN -> ETHEL +1 ATHLETE -> ADETE +1 ATHENAIS -> ETHNEE +1 ATHENAIS -> ETHINAY +1 ATCHISON -> ATTITSON +1 AT -> SAID +1 AT -> IT +1 AT -> IN +1 AT -> AND +1 ASTOR -> ASTRO +1 ASSEMBLED -> A +1 ASCENDENCY -> ASCENDANCY +1 AS -> TO +1 AS -> IS +1 ARRONDISSEMENT -> ARE +1 ARRIVING -> RIVING +1 ARRESTS -> ARREST +1 ARMED -> ARM +1 ARE -> OR +1 ARE -> ALL +1 ARDOUR -> ARDOR +1 ARC -> ARK +1 APPROVES -> ME +1 APPRENTICE -> APPRENTICED +1 APPEALED -> APPEAL +1 ANYWHERE -> MANY +1 ANYMORE -> ANY +1 ANY -> ANYTHING +1 ANTEDATING -> ANTETING +1 ANTE -> ANTEROOM +1 ANSWERD -> ANSWERED +1 ANNALS -> ANNAL +1 ANGELS -> ANGEL +1 ANDERS -> ANDREW'S +1 ANDERS -> ANDRES +1 ANDERS -> ANDRE'S +1 ANDERS -> ANDRE +1 ANDELLA -> ANNE +1 ANDELLA -> ANDDELA +1 ANDELLA -> AND +1 ANDELLA -> AMDELLA +1 AND -> ONE +1 AND -> INTO +1 AND -> INDEED +1 AND -> ENTHRIBING +1 AND -> AT +1 AND -> AM +1 AND -> A +1 ANAXAGORAS -> AN +1 AN -> ON +1 AN -> IN +1 AMPHITHEATER -> AMPHITHEATRE +1 AMASS -> A +1 AM -> I'M +1 ALTERNATIVE -> ALL +1 ALREADY -> ALL +1 ALLUVION -> ALLUVIAN +1 ALL -> ALREADY +1 ALEXANDRA -> ALEXANDER +1 ALBANS -> ALBAN'S +1 AIR -> HEIR +1 AIGNAN -> DAN +1 AID -> AIDS +1 AH -> A +1 AFTERDECK -> AFTER +1 AFFRIGHTENED -> A +1 AFFILIATED -> ARE +1 AFFECT -> EFFECT +1 ADVENTURE -> ADVENTURER +1 ADONA -> ADONNA +1 ACTOR -> ACTOR'S +1 ACKNOWLEDGEMENT -> ACKNOWLEDGMENT +1 ACCOUTREMENTS -> ACCUTMENTS +1 ABOLITIONISTS -> ABOLITIONIST +1 ABDUCTION -> ADOCTION +1 ABBE -> ABBEY +1 A -> UPON +1 A -> UNNOTTINGHAM +1 A -> UNNOTABLY +1 A -> TO +1 A -> HER +1 A -> HE +1 A -> ESPECIAL +1 A -> AWAY +1 A -> ATTORIAN +1 A -> AS +1 A -> AN +1 A -> ACCORD + +DELETIONS: count ref +9 A +7 IS +7 AND +4 TO +4 IN +3 OF +3 IT +2 YOU +2 YARD +2 WILL +2 WAY +2 TOGETHER +2 THE +2 STAIRS +2 N +2 ME +2 CHARENTE +2 AM +1 YOU'LL +1 YE +1 WOMEN +1 WITH +1 WEST +1 WELL +1 WE +1 VOUCHED +1 VINES +1 UNC +1 UD +1 TRILOGIES +1 TORY +1 TONNAY +1 THROUGH +1 THRIVING +1 THINKING +1 THING +1 TAKES +1 T +1 STONES +1 STEP +1 SPECIAL +1 SHIP +1 SEERS +1 ROOM +1 ROI +1 READY +1 PROVIDING +1 PORTES +1 POPHAM +1 POOS +1 POND +1 PERFECT +1 OTHER +1 OR +1 ONLY +1 ON +1 OLD +1 NUMIDIA +1 NOTABLY +1 NOT +1 MUCH +1 MOST +1 MONGOOSE +1 MER +1 MEND +1 MELL +1 LORD +1 LOOK +1 LOADS +1 LO +1 LIGHT +1 L +1 KNIFE +1 JAGO +1 HUMPH +1 HOUND +1 HIS +1 HIM +1 HERSELF +1 HAVE +1 HAD +1 GRAF +1 GONE +1 GOAT +1 GALATIANS +1 FIND +1 FEELS +1 FEEL +1 EXIST +1 EVER +1 ESTAFANIA +1 ENTER +1 EASE +1 EARS +1 E +1 DO +1 DENSITY +1 DEED +1 DE +1 DARK +1 D'ESTE +1 CUTTERS +1 COURANT +1 COULDN'T +1 CLOTH +1 CHORD +1 CHARLES +1 CAPACITY +1 CAME +1 C +1 BUL +1 BOYS +1 BORN +1 BOARD +1 BIT +1 BIRDS +1 B +1 AT +1 AS +1 ARDLE +1 ALREADY +1 AGAIN + +INSERTIONS: count hyp +13 A +11 ONE +9 THE +7 IS +5 AND +4 IT +4 IN +4 HAVE +4 DAY +4 ARE +4 AM +3 THAT +3 OF +3 ME +3 HALL +3 AXE +2 WORTH +2 WILL +2 TIME +2 OTHER +2 ON +2 NULA +2 NIGHT +2 ILL +2 HIS +2 FOR +2 FIND +2 AS +1 ZAY +1 WRAPPED +1 WORLDLY +1 WILLIAM +1 WHILE +1 WHERE +1 WAY +1 WAS +1 WARILY +1 VO +1 VINE +1 VIEW +1 VENORS +1 ULYSTS +1 TURNED +1 TREES +1 TOP +1 TOLL +1 TO +1 THESE +1 TENT +1 TEACHERS +1 STICK +1 SPREE +1 SIMPLED +1 SHOT +1 RED +1 READY +1 PROOFS +1 POOL +1 PLAY +1 OWE +1 OVER +1 ONLY +1 OCCAS +1 OCCAIA +1 NOSED +1 NOSE +1 NOR +1 NICE +1 MORE +1 MILL +1 MIKE +1 MEN'S +1 MEN +1 MAU +1 MASS +1 MARTRE +1 LOCKY +1 LOCATED +1 LIKE +1 LED +1 LEAF +1 KNOW +1 KATYA +1 I +1 HYMN +1 HIRESUME +1 HIM +1 HE +1 HAD +1 GROUND +1 GREE +1 GOING +1 FRIGHTENED +1 FREEZED +1 FRANK +1 FIT +1 FILLIOTTED +1 FALL +1 EXAGGERUS +1 EVER +1 EULO +1 DRESSER +1 DONALDS +1 DOC +1 DESSIMA +1 DERPOOL +1 DELLA +1 DELA +1 DECK +1 CRADICALLY +1 COMTE +1 COMER +1 CLEANING +1 CHEER +1 CERTAINTY +1 CASE +1 CANNOT +1 BURN +1 BUGS +1 BOX +1 BOOKS +1 BEGIRT +1 BED +1 BE +1 BALL +1 BACK +1 AX +1 AT +1 AN +1 ALGO +1 ACTION + +PER-WORD STATS: word corr tot_errs count_in_ref count_in_hyp +AND 1739 87 1787 1778 +A 1127 85 1166 1173 +THE 3437 73 3461 3486 +IN 876 72 905 919 +IS 449 33 468 463 +OF 1788 31 1799 1808 +TO 1329 30 1340 1348 +I 708 25 711 730 +AN 155 21 165 166 +ONE 188 18 191 203 +IT 548 18 558 556 +O 4 17 14 11 +THAT 603 16 610 612 +AS 378 15 383 388 +HE 522 14 526 532 +ARE 178 14 182 188 +HAD 315 12 321 321 +ON 276 11 279 284 +YOU 416 10 418 424 +WERE 183 10 186 190 +THIS 255 10 263 257 +MEN 57 10 62 62 +AT 279 10 284 284 +WILL 139 9 143 144 +TWO 64 9 71 66 +THEIR 167 9 173 170 +OH 28 9 33 32 +I'M 27 9 33 30 +HAVE 212 9 215 218 +FOR 416 9 420 421 +AM 56 9 59 62 +WHERE 45 8 49 49 +THEY 205 8 209 209 +THERE 136 8 137 143 +RODOLFO 0 8 8 0 +ANY 84 8 85 91 +ANDERS 3 8 11 3 +ALL 222 8 225 227 +WHEN 128 7 133 130 +WAS 576 7 579 580 +TIMAEUS 2 7 9 2 +THAN 85 7 88 89 +OR 172 7 176 175 +MAN 64 7 67 68 +HER 319 7 324 321 +HAS 104 7 108 107 +WITH 421 6 424 424 +THEN 121 6 125 123 +OTHER 63 6 65 67 +ME 182 6 184 186 +LEOCADIA 0 6 6 0 +I'VE 17 6 23 17 +HIM 213 6 215 217 +DE 5 6 10 6 +ANYONE 0 6 6 0 +WOULD 139 5 141 142 +WELL 72 5 75 74 +WE 150 5 152 153 +TOO 60 5 61 64 +TONNAY 0 5 5 0 +THEL 0 5 5 0 +SYLVIA 0 5 0 5 +SOME 86 5 87 90 +SOLMES 0 5 0 5 +SOAMES 0 5 5 0 +SILVIA 0 5 5 0 +SHE 279 5 281 282 +NOW 91 5 94 93 +NOT 335 5 338 337 +NEW 34 5 35 38 +JAGO 0 5 5 0 +ITS 81 5 82 85 +IT'S 26 5 29 28 +HIS 472 5 473 476 +GALATIANS 1 5 6 1 +FIND 20 5 22 23 +CHARENTE 0 5 5 0 +BUT 339 5 344 339 +BATTLEAX 0 5 5 0 +WAY 71 4 73 73 +TIME 85 4 87 87 +THROUGH 40 4 42 42 +THEM 119 4 122 120 +RED 18 4 19 21 +PRACTISE 1 4 4 2 +PRACTICE 6 4 7 9 +OUR 79 4 81 81 +NO 166 4 169 167 +METRE 0 4 0 4 +METER 7 4 11 7 +MAINHALL 0 4 4 0 +LEAVENWORTH 0 4 4 0 +KNOW 75 4 76 78 +INTO 102 4 104 104 +HELLO 1 4 2 4 +HE'S 5 4 9 5 +GALLATIONS 0 4 0 4 +EVERY 31 4 31 35 +EMIL 0 4 4 0 +DAY 50 4 50 54 +CRESSWELL 0 4 4 0 +BRITAIN 1 4 1 5 +BATTLE 6 4 6 10 +ANDREWS 0 4 0 4 +ANDELLA 0 4 4 0 +YE 5 3 8 5 +XAVIER 0 3 3 0 +WOOD 3 3 4 5 +WHOSE 13 3 14 15 +WHO 153 3 154 155 +WHILE 34 3 34 37 +WE'LL 4 3 6 5 +VALLIERS 0 3 0 3 +VALLIERE 0 3 3 0 +TOWARDS 16 3 19 16 +TOWARD 8 3 8 11 +TODAY 0 3 3 0 +THOU 18 3 18 21 +THORKEL 0 3 3 0 +THEE 27 3 30 27 +THEATRE 2 3 2 5 +THEATER 0 3 3 0 +TABU 0 3 3 0 +SOMETIME 0 3 2 1 +SOLON 1 3 4 1 +SOLEMN 1 3 1 4 +SEEMED 29 3 30 31 +SEE 64 3 65 66 +SCHOOL 9 3 11 10 +RUDOLPHO 0 3 0 3 +ROUND 14 3 17 14 +REMAINED 4 3 6 5 +READY 9 3 10 11 +READ 16 3 18 17 +PLATONISTS 0 3 3 0 +ONLY 75 3 77 76 +OFFICERS 8 3 10 9 +NIGHT 24 3 24 27 +NEVERBEND 3 3 6 3 +NAOMI 2 3 5 2 +N 1 3 4 1 +MISTER 46 3 48 47 +MAIN 3 3 3 6 +LE 0 3 1 2 +KAFFIR 0 3 0 3 +KAFFAR 0 3 3 0 +ICHTHYOSAURUS 0 3 3 0 +I'LL 12 3 14 13 +HONORABLE 0 3 2 1 +HOLLAND 0 3 0 3 +HOLLAN 0 3 3 0 +HALLO 0 3 2 1 +HALL 9 3 9 12 +GUESTS 3 3 5 4 +GREY 1 3 3 2 +GRAY 4 3 5 6 +FELL 16 3 16 19 +FAIR 6 3 7 8 +EVERYONE 0 3 3 0 +EVER 33 3 35 34 +ESTAFANIA 0 3 3 0 +ENTER 6 3 9 6 +E 0 3 3 0 +DON'T 37 3 38 39 +DO 93 3 94 95 +COURT 10 3 12 11 +COLOUR 0 3 2 1 +COLORS 1 3 3 2 +COLOR 9 3 10 11 +CHANGE 7 3 9 8 +BY 248 3 248 251 +BRITANNULA 0 3 3 0 +BANNISTER 0 3 3 0 +BANISTER 0 3 0 3 +AXE 1 3 1 4 +AROUND 12 3 12 15 +ANOTHER 34 3 36 35 +ALREADY 20 3 22 21 +ZAVIER 0 2 0 2 +YOU'RE 3 2 5 3 +YOU'LL 7 2 9 7 +YARD 3 2 5 3 +YAGO 0 2 0 2 +WYLDER 3 2 5 3 +WORTH 4 2 4 6 +WORSE 5 2 6 6 +WONDERING 1 2 2 2 +WON 2 2 2 4 +WILDER 0 2 0 2 +WIDOWS 0 2 0 2 +WIDOW 1 2 1 3 +WHOLLY 9 2 9 11 +WHITTAWS 0 2 2 0 +WHAT 112 2 113 113 +WHALE 2 2 4 2 +WESTPORT 0 2 2 0 +WEST 6 2 7 7 +WE'RE 1 2 3 1 +WANDERING 1 2 2 2 +VAPOURS 0 2 2 0 +VAPORS 0 2 0 2 +VANDERPOOL 0 2 2 0 +VAN 2 2 2 4 +UPON 93 2 94 94 +UP 108 2 110 108 +UNC 3 2 5 3 +TRAVELLING 0 2 0 2 +TRAVELLERS 0 2 0 2 +TRAVELING 1 2 3 1 +TORCOAL 0 2 0 2 +TONIGHT 0 2 2 0 +TOGETHER 14 2 16 14 +THOUGHT 53 2 54 54 +THOUGH 32 2 33 33 +THINKING 7 2 8 8 +THINK 52 2 52 54 +THESE 68 2 68 70 +THAT'S 13 2 14 14 +T 0 2 1 1 +SYMPOSIUM 0 2 2 0 +STATES 6 2 6 8 +STATE 26 2 27 27 +STAIRS 6 2 8 6 +SPECIAL 1 2 2 2 +SOUTHEY 0 2 2 0 +SOMEONE 1 2 3 1 +SO 196 2 197 197 +SINCE 24 2 25 25 +SIN 12 2 13 13 +SILENCE 7 2 7 9 +SIDE 22 2 23 23 +SHOULD 59 2 61 59 +SHOT 2 2 2 4 +SHIP 7 2 9 7 +SHE'S 4 2 6 4 +SEEM 11 2 13 11 +SEATING 1 2 2 2 +SAID 160 2 161 161 +REMAIN 5 2 6 6 +READER 1 2 1 3 +RE 0 2 1 1 +PUTTING 7 2 7 9 +PLURAL 0 2 2 0 +PLATANISTS 0 2 0 2 +PICK 1 2 3 1 +PERSON 12 2 13 13 +PAUL 13 2 15 13 +PATIENCE 1 2 2 2 +OVER 58 2 59 59 +NULA 0 2 0 2 +NOTTINGHAM 5 2 7 5 +NOTHING 33 2 33 35 +NOR 20 2 21 21 +NET 0 2 1 1 +NEO 1 2 3 1 +NEIGHBOUR 1 2 2 2 +NEIGHBOR 0 2 1 1 +MY 223 2 225 223 +MUNNY 0 2 2 0 +MORE 119 2 119 121 +MONTMARTRE 0 2 2 0 +MONTFICHET 7 2 9 7 +MONTFICHE 0 2 0 2 +MONEY 5 2 5 7 +MISTS 2 2 3 3 +MIST 4 2 5 5 +MISS 17 2 18 18 +MAY 55 2 56 56 +MASTERS 2 2 3 3 +MARSHALL 1 2 3 1 +MARSHAL 1 2 1 3 +MARIVAUX 0 2 2 0 +MANY 40 2 41 41 +LOWER 5 2 6 6 +LOUIS 1 2 2 2 +LOOK 30 2 32 30 +LINE 12 2 13 13 +LILY 2 2 2 4 +LIKE 105 2 105 107 +LIGHT 37 2 39 37 +LEVINWORTH 0 2 0 2 +LEVIN 0 2 0 2 +LEOCADIA'S 0 2 2 0 +LECOMPTE 0 2 2 0 +LARKSPUR 0 2 2 0 +KARL 0 2 0 2 +KAFFAR'S 0 2 2 0 +INNERLOCHY 0 2 2 0 +IMPRESSED 3 2 4 4 +IMPRESS 0 2 0 2 +ILL 6 2 6 8 +IF 129 2 131 129 +I'D 1 2 3 1 +HOT 3 2 3 5 +HOSTESS 1 2 3 1 +HOSTES 0 2 0 2 +HORSE 6 2 6 8 +HONOURABLE 2 2 3 3 +HOLY 1 2 3 1 +HOLBINE 0 2 0 2 +HOLBEIN 0 2 2 0 +HERE 69 2 70 70 +HEAR 18 2 20 18 +HEAD 35 2 36 36 +HANDS 16 2 17 17 +HAIR 5 2 6 6 +GUEST 3 2 4 4 +GREAT 73 2 74 74 +GOAT 5 2 7 5 +GIVE 28 2 30 28 +GILGRIST 0 2 0 2 +GILCHRIST 1 2 3 1 +GIAGO 0 2 0 2 +FOUNDED 3 2 5 3 +FOUND 21 2 21 23 +FOREVER 1 2 2 2 +FIRE 22 2 22 24 +FILLING 0 2 0 2 +FIGHT 3 2 3 5 +FELT 17 2 19 17 +FEELING 9 2 11 9 +FAVOURITE 1 2 1 3 +FAVORITE 2 2 4 2 +FALL 2 2 2 4 +FAIRVIEW 0 2 2 0 +ESPECIAL 0 2 1 1 +DISSENT 0 2 2 0 +DID 66 2 67 67 +DESCENT 2 2 2 4 +DERPOOL 0 2 0 2 +DELIA 0 2 2 0 +DEFINED 1 2 2 2 +DEDALUS 0 2 2 0 +CRITIUS 0 2 0 2 +CRITIAS 0 2 2 0 +CRESWELL 0 2 0 2 +CREIGHTON 0 2 2 0 +CRASWELL 0 2 0 2 +COURTYARD 4 2 4 6 +COULDN'T 5 2 6 6 +COULD 94 2 95 95 +COMMON 8 2 8 10 +COLOURS 0 2 1 1 +COLORIST 0 2 2 0 +CO 0 2 1 1 +CHRISTIAN 5 2 6 6 +CHEQUE 0 2 0 2 +CHECK 6 2 8 6 +CHASE 1 2 1 3 +CHANGED 5 2 6 6 +CHAISE 0 2 2 0 +CASE 15 2 16 16 +CARL 0 2 2 0 +CANNOT 16 2 16 18 +CAN 64 2 66 64 +BURN 3 2 4 4 +BUL 0 2 2 0 +BREAK 3 2 3 5 +BRAKE 1 2 3 1 +BRAGELONNE 0 2 2 0 +BOYS 5 2 6 6 +BOX 7 2 8 8 +BIT 7 2 9 7 +BERGSON 0 2 2 0 +BEING 39 2 40 40 +BEHAVIOURIST 0 2 2 0 +BEHAVIOUR 0 2 2 0 +BEHAVIOR 0 2 0 2 +BE 314 2 314 316 +B 1 2 2 2 +AYE 0 2 2 0 +AY 0 2 2 0 +ATTENDANTS 0 2 1 1 +ATTENDANCE 0 2 1 1 +ATHENAIS 0 2 2 0 +AMYL 0 2 0 2 +ALTOGETHER 6 2 6 8 +AIR 24 2 25 25 +ZORAS 0 1 0 1 +ZORA'S 0 1 1 0 +ZORA 2 1 3 2 +ZOOF'S 1 1 2 1 +ZEF'S 0 1 0 1 +ZAY 0 1 0 1 +ZAVIOUR 0 1 0 1 +YOKES 0 1 0 1 +YEARNING 1 1 2 1 +YEARNIN 0 1 0 1 +WRITE 4 1 4 5 +WRAPPED 0 1 0 1 +WORST 3 1 4 3 +WORLDLY 0 1 0 1 +WORE 3 1 3 4 +WOODS 2 1 3 2 +WOODCUTTERS 0 1 0 1 +WOODBEGIRT 0 1 1 0 +WOMEN'S 1 1 1 2 +WOMEN 7 1 8 7 +WOMAN'S 1 1 2 1 +WITHES 0 1 1 0 +WINTER 4 1 5 4 +WINNER 0 1 0 1 +WIND 8 1 8 9 +WILLIAM 1 1 1 2 +WIFE 16 1 17 16 +WIDTHS 0 1 0 1 +WHO'S 1 1 2 1 +WHITTAWD 0 1 1 0 +WHITTAW 0 1 1 0 +WHITE 23 1 23 24 +WHIRLPOOL 1 1 2 1 +WHIPPED 1 1 2 1 +WHIP 0 1 0 1 +WHICH 216 1 216 217 +WHETHER 23 1 23 24 +WHERE'S 0 1 0 1 +WHATSOEVER 1 1 1 2 +WHATEVER 12 1 13 12 +WHAT'S 4 1 5 4 +WHARTON 0 1 0 1 +WESTWARD 1 1 1 2 +WESTMERE 0 1 1 0 +WESTMARE 0 1 0 1 +WELCOMED 0 1 1 0 +WELCOME 6 1 6 7 +WEEKLY 0 1 0 1 +WEDNESDAY 1 1 2 1 +WEATHER 5 1 6 5 +WEAR 4 1 5 4 +WEAKLY 0 1 1 0 +WE'VE 2 1 2 3 +WAVES 6 1 7 6 +WATRY 1 1 2 1 +WATRIE 0 1 0 1 +WATERMILL 0 1 1 0 +WATER 19 1 19 20 +WARMONISM 0 1 0 1 +WARLIKE 0 1 1 0 +WARILY 0 1 0 1 +WAR 5 1 5 6 +WAIL 0 1 0 1 +VOUCHED 0 1 1 0 +VOLTAIRE 0 1 1 0 +VOICE 17 1 18 17 +VO 0 1 0 1 +VISITORS 4 1 5 4 +VISITOR 2 1 2 3 +VINEYARD 0 1 0 1 +VINES 0 1 1 0 +VINE 0 1 0 1 +VILLIDESA 0 1 0 1 +VILLEROY 0 1 1 0 +VILLEROI 0 1 0 1 +VILLA 0 1 1 0 +VIGNETTE 0 1 1 0 +VIEW'S 0 1 0 1 +VIEW 2 1 2 3 +VICHAIRLESS 0 1 0 1 +VICARIOUS 3 1 4 3 +VIADUCT 0 1 1 0 +VIA 0 1 0 1 +VERY 82 1 83 82 +VERSE 1 1 2 1 +VERIMENT 0 1 0 1 +VENORS 0 1 0 1 +VEINS 0 1 0 1 +VEILS 1 1 1 2 +VAUDOIS 0 1 1 0 +VANES 0 1 1 0 +VALOR 2 1 3 2 +VALO 0 1 0 1 +VALES 2 1 3 2 +UTAH 1 1 2 1 +UPSTAIRS 3 1 3 4 +UNWARILY 0 1 1 0 +UNTO 2 1 3 2 +UNNOTTINGHAM 0 1 0 1 +UNNOTABLY 0 1 0 1 +UNLIKE 0 1 1 0 +UNDERGROUND 0 1 1 0 +UNCLENCHED 0 1 1 0 +UN 0 1 1 0 +ULYSTS 0 1 0 1 +UG 0 1 0 1 +UDOLPH 0 1 0 1 +UD 0 1 1 0 +TYPENIQUE 0 1 0 1 +TUTRILOGIES 0 1 0 1 +TURNOVER 0 1 1 0 +TURNED 21 1 21 22 +TURN 18 1 18 19 +TUPPENY 0 1 1 0 +TUPPENNY 0 1 0 1 +TUBS 0 1 0 1 +TRUTH 13 1 13 14 +TROUBLE 8 1 8 9 +TROT 4 1 5 4 +TROPIDAS 0 1 0 1 +TRIPPING 0 1 0 1 +TRILOGIES 0 1 1 0 +TREES 19 1 19 20 +TREDDLESTONE 0 1 0 1 +TREDDLESTON 0 1 1 0 +TREBLE 0 1 1 0 +TRAVELERS 0 1 1 0 +TRAVELER 0 1 1 0 +TOWNE 0 1 1 0 +TOWN 6 1 6 7 +TOWELLING 0 1 1 0 +TOWELINGS 0 1 0 1 +TOULD 0 1 1 0 +TOTTY 2 1 3 2 +TORY 0 1 1 0 +TORE 0 1 0 1 +TORCOLE 0 1 0 1 +TOPSY 0 1 0 1 +TOPS 3 1 4 3 +TOPEKA 0 1 1 0 +TOP 11 1 11 12 +TOOMS 0 1 1 0 +TONIET 0 1 0 1 +TONECHAU 0 1 0 1 +TON 0 1 0 1 +TOMBS 0 1 0 1 +TOLLED 0 1 0 1 +TOLL 0 1 0 1 +TOLD 31 1 32 31 +TOILETTE 0 1 1 0 +TOILET 0 1 0 1 +TODDY 0 1 0 1 +TIRESOME 1 1 2 1 +TINTORET 0 1 1 0 +TINTINT 0 1 1 0 +TINTARETTE 0 1 0 1 +TINTANT 0 1 0 1 +TIMIUS 0 1 0 1 +TIMIRAS 0 1 0 1 +TIMES 20 1 21 20 +TIMAIRS 0 1 0 1 +TIBI 0 1 1 0 +TIBBY 0 1 0 1 +THUS 20 1 21 20 +THROWN 4 1 4 5 +THRONE 3 1 4 3 +THRO 0 1 1 0 +THRIVING 0 1 1 0 +THORLEIF 0 1 1 0 +THINKS 2 1 3 2 +THING 21 1 22 21 +THEY'RE 2 1 3 2 +THEREFORE 19 1 20 19 +THERE'S 12 1 12 13 +THEE'S 0 1 1 0 +THANKING 2 1 3 2 +THANK 12 1 13 12 +TERENTIAL 0 1 0 1 +TEPEAKA 0 1 0 1 +TENT 5 1 5 6 +TENEAS 0 1 0 1 +TENACHALANT 0 1 0 1 +TECHNIQUE 0 1 1 0 +TEARS 11 1 11 12 +TEACHERS 0 1 0 1 +TEA 2 1 3 2 +TARANTULA 0 1 1 0 +TALKERS 0 1 1 0 +TALK 19 1 19 20 +TAKING 7 1 7 8 +TAKES 2 1 3 2 +TAKEN 14 1 15 14 +TABOU 0 1 0 1 +TABOO 0 1 0 1 +TABLECLOTH 0 1 0 1 +TABLE 23 1 24 23 +SWUNG 0 1 0 1 +SWOONS 0 1 1 0 +SWOON 3 1 3 4 +SWEPT 1 1 1 2 +SWEEP 1 1 2 1 +SWAYING 0 1 0 1 +SWAN 0 1 1 0 +SUSPICIONS 2 1 3 2 +SUSPICION 4 1 4 5 +SURVIVED 0 1 0 1 +SURVIVE 1 1 2 1 +SURFACES 0 1 1 0 +SUPPOSITION 0 1 1 0 +SUMNER 0 1 1 0 +SUMMONED 3 1 3 4 +SUMMON 0 1 1 0 +SUMMERS 0 1 1 0 +SUMMER'S 2 1 2 3 +SUMMER 6 1 6 7 +SUM 1 1 1 2 +SUITCASE 0 1 1 0 +SUIT 4 1 4 5 +STUTTON 0 1 0 1 +STUDY 12 1 12 13 +STROLL 2 1 3 2 +STREAMLINE 0 1 1 0 +STREAM 1 1 1 2 +STRAW 1 1 1 2 +STORY'S 0 1 1 0 +STORM 3 1 3 4 +STORIES 3 1 3 4 +STONES 2 1 3 2 +STICK 1 1 1 2 +STEVE 0 1 0 1 +STEP 6 1 7 6 +STEELED 0 1 0 1 +STEEL'D 0 1 1 0 +STEED 1 1 1 2 +STEADY 4 1 5 4 +STAY 11 1 11 12 +STATE'S 1 1 2 1 +STARVED 0 1 1 0 +STARTS 0 1 1 0 +START 3 1 3 4 +STARE 1 1 1 2 +STANDS 2 1 3 2 +STAND 13 1 13 14 +STAIR 0 1 1 0 +STAID 0 1 1 0 +STAGE 5 1 6 5 +SQUI 0 1 0 1 +SQUEAK 1 1 2 1 +SPRING 7 1 8 7 +SPREE 0 1 0 1 +SPRANG 3 1 3 4 +SPRAGUE 0 1 1 0 +SPLENDOUR 0 1 0 1 +SPLENDOR 0 1 1 0 +SPLENDID 9 1 9 10 +SPLENDET 0 1 1 0 +SPIN 0 1 1 0 +SPILLIGOAT 0 1 0 1 +SPEND 2 1 2 3 +SPECIALIZED 0 1 0 1 +SPECIALISED 0 1 1 0 +SP 0 1 0 1 +SOUTHEY'S 0 1 1 0 +SOUS 0 1 0 1 +SOUND 7 1 7 8 +SOU 0 1 1 0 +SORA 0 1 0 1 +SOOTHED 0 1 1 0 +SOOTHE 1 1 1 2 +SON 14 1 15 14 +SOLOQUY 0 1 0 1 +SOLON'S 0 1 1 0 +SOLILOQUY 4 1 5 4 +SOLID 2 1 2 3 +SODELITY 0 1 0 1 +SODALITY 0 1 1 0 +SOCRATIC 0 1 1 0 +SNUBNOSED 0 1 1 0 +SNUB 0 1 0 1 +SMITTEN 1 1 2 1 +SMIT 0 1 0 1 +SMILED 4 1 4 5 +SMILD 0 1 1 0 +SMELLS 1 1 2 1 +SLEEVE 0 1 1 0 +SKURA 0 1 0 1 +SKILLFUL 0 1 1 0 +SKILFUL 0 1 0 1 +SKIES 0 1 0 1 +SKEPTICAL 0 1 1 0 +SIZED 1 1 1 2 +SIZE 4 1 5 4 +SITTING 4 1 4 5 +SIT 11 1 11 12 +SIR 35 1 36 35 +SIMPOSIUM 0 1 0 1 +SIMPOS 0 1 0 1 +SIMPLED 0 1 0 1 +SIMMONS 0 1 0 1 +SILENT 10 1 11 10 +SIGHTSEERS 0 1 0 1 +SIGHT 19 1 20 19 +SIGHED 3 1 4 3 +SIGH 3 1 3 4 +SIFT 0 1 0 1 +SIF 1 1 2 1 +SICK 2 1 2 3 +SHOWRING 0 1 1 0 +SHOWERING 0 1 0 1 +SHOWED 5 1 5 6 +SHOULDST 0 1 0 1 +SHODY 0 1 0 1 +SHODDY 0 1 1 0 +SHIP'S 0 1 0 1 +SHEWD 0 1 1 0 +SHERIFF'S 3 1 3 4 +SHERIFF 3 1 4 3 +SHEBATA 0 1 0 1 +SHARPS 0 1 1 0 +SHARPEST 1 1 1 2 +SHARP'ST 0 1 1 0 +SHARP'S 0 1 0 1 +SHANNON 2 1 3 2 +SHAN'T 0 1 1 0 +SHAN 0 1 0 1 +SHALT 2 1 2 3 +SHALL 43 1 44 43 +SHABATA 0 1 1 0 +SHA'N'T 0 1 0 1 +SEX 2 1 2 3 +SEWED 1 1 1 2 +SEVER 0 1 0 1 +SETTLED 1 1 1 2 +SETTLE 2 1 3 2 +SET 19 1 19 20 +SERVICES 1 1 1 2 +SERVED 4 1 4 5 +SERVE 11 1 12 11 +SERVANTS 4 1 4 5 +SERVANT 11 1 12 11 +SENTENCES 2 1 3 2 +SENTENCE 3 1 3 4 +SENT 5 1 6 5 +SENSE 15 1 16 15 +SENDIARIES 0 1 0 1 +SENCE 0 1 1 0 +SENATE 2 1 2 3 +SEN 0 1 0 1 +SEMON'S 0 1 1 0 +SELVEY 0 1 0 1 +SELF 5 1 5 6 +SEERS 0 1 1 0 +SEEMS 12 1 12 13 +SEEDS 0 1 1 0 +SEED 2 1 2 3 +SECTS 0 1 1 0 +SEATS 3 1 3 4 +SEAT 3 1 4 3 +SEA 18 1 18 19 +SCUTCHEON 0 1 1 0 +SCUSE 0 1 1 0 +SCRAPBOOKS 0 1 1 0 +SCRAP 0 1 0 1 +SCOUTING 0 1 1 0 +SCOUT 5 1 5 6 +SCHOOLS 1 1 1 2 +SCHOOLED 0 1 1 0 +SCHOOLBOYS 0 1 0 1 +SCEVRA 0 1 1 0 +SCEURA 0 1 1 0 +SCEPTICAL 0 1 0 1 +SCENTS 0 1 0 1 +SCAVRA 0 1 0 1 +SCATHE 0 1 1 0 +SCATH 0 1 0 1 +SCAROONS 0 1 1 0 +SCARONS 0 1 0 1 +SAYS 12 1 12 13 +SAYING 15 1 15 16 +SAW 22 1 23 22 +SAVEETTE 0 1 0 1 +SAVED 4 1 5 4 +SAUVEUR 0 1 1 0 +SATITUDE 0 1 0 1 +SATE 0 1 1 0 +SAT 18 1 18 19 +SANG 4 1 5 4 +SAMPLE 0 1 1 0 +SALVI 0 1 0 1 +SALINE 0 1 1 0 +SALIENT 1 1 2 1 +SALE 0 1 0 1 +SAINTS 4 1 5 4 +SAILORED 0 1 0 1 +SAILING 0 1 0 1 +SAILED 0 1 0 1 +SAILD 0 1 1 0 +SAIL 5 1 6 5 +SAGE 0 1 0 1 +SABLE 0 1 0 1 +RUST 0 1 1 0 +RULED 0 1 1 0 +RUFUS 0 1 1 0 +RUFFUS 0 1 0 1 +RUE 0 1 1 0 +ROSSITUR 0 1 0 1 +ROSSETER 0 1 1 0 +ROOM 40 1 41 40 +ROLLED 3 1 3 4 +ROI 0 1 1 0 +ROERER 0 1 1 0 +RODOLPHO 0 1 0 1 +RODOLFO'S 0 1 1 0 +ROCKED 0 1 1 0 +ROCK 1 1 1 2 +ROBINS 0 1 0 1 +ROBIN'S 0 1 1 0 +ROAR 0 1 0 1 +RIVING 0 1 0 1 +RINGING 0 1 0 1 +RIDOLPHO 0 1 0 1 +REWEIGHED 0 1 1 0 +REWAIED 0 1 0 1 +RETURNED 18 1 18 19 +RETURN 8 1 9 8 +REST 14 1 14 15 +RESIGNED 1 1 2 1 +RESIGN 0 1 0 1 +REMOVED 4 1 4 5 +REMOVE 3 1 4 3 +REMOV'D 0 1 1 0 +REMARKED 4 1 4 5 +REMARK 2 1 3 2 +REMAINING 0 1 0 1 +RELOCATED 0 1 1 0 +RELIES 0 1 1 0 +REINTER 0 1 0 1 +REINFORCEMENTS 0 1 0 1 +REIGNED 1 1 2 1 +REGGIE 1 1 2 1 +REGAINED 0 1 1 0 +REGAIN 0 1 0 1 +REFUSED 7 1 8 7 +REENFORCEMENTS 0 1 1 0 +REEDER 0 1 1 0 +RECORDS 2 1 2 3 +RECORD 6 1 7 6 +RECOGNIZED 3 1 3 4 +RECOGNISED 0 1 1 0 +REBUKED 0 1 0 1 +REBUK'D 0 1 1 0 +REALIZE 4 1 4 5 +RDOLPHAL 0 1 0 1 +RANCH 0 1 0 1 +RAINED 0 1 0 1 +RADOLPHO 0 1 0 1 +RACHEL 15 1 16 15 +RACHAEL 0 1 0 1 +QUINCY 0 1 1 0 +QUINCEY 0 1 0 1 +QUASI 0 1 1 0 +QUASH 0 1 1 0 +QUART 0 1 1 0 +QUADRILLE 1 1 2 1 +QUADRILL 0 1 0 1 +PYTHAGORIANS 0 1 0 1 +PYTHAGOREANS 0 1 1 0 +PUTTIN 0 1 1 0 +PUT 31 1 32 31 +PURSUS 0 1 0 1 +PURSE 1 1 2 1 +PURPOSED 0 1 1 0 +PURPOSE 10 1 10 11 +PURIST 0 1 1 0 +PUREST 0 1 0 1 +PURCHASE 0 1 0 1 +PSALM 2 1 3 2 +PROVIDING 0 1 1 0 +PROVES 1 1 2 1 +PROVED 6 1 6 7 +PROSELYTING 0 1 1 0 +PROSELY 0 1 0 1 +PROSCRIBED 0 1 1 0 +PROOFS 4 1 4 5 +PRODIGAL 0 1 1 0 +PROCEED 1 1 1 2 +PRINCIPLE 3 1 4 3 +PRINCIPAL 4 1 4 5 +PRIESTHOO 0 1 0 1 +PREVENT 0 1 1 0 +PRETENSE 0 1 1 0 +PRETENCE 1 1 1 2 +PRESENT 20 1 20 21 +PRESCRIBED 1 1 1 2 +PRECIEUSES 0 1 1 0 +PRECEDE 0 1 1 0 +PRE 0 1 1 0 +POWER 21 1 22 21 +POSSESSED 3 1 3 4 +POSSESS 2 1 3 2 +PORTES 0 1 1 0 +PORT 1 1 1 2 +POPHAM 4 1 5 4 +POOS 0 1 1 0 +POOL 1 1 1 2 +POND 2 1 3 2 +POLITICS 1 1 1 2 +POISONED 0 1 0 1 +POISON'D 0 1 1 0 +POINT 13 1 14 13 +PLORO 0 1 0 1 +PLEASANTS 0 1 0 1 +PLEASANCE 0 1 1 0 +PLEAD 1 1 1 2 +PLEA 0 1 1 0 +PLAY 12 1 12 13 +PLATINISTS 0 1 0 1 +PLATES 2 1 2 3 +PLAITS 0 1 1 0 +PLACES 1 1 1 2 +PLACE 38 1 39 38 +PIG 2 1 2 3 +PIERCED 1 1 1 2 +PIERC'D 0 1 1 0 +PIECE 1 1 1 2 +PIC 0 1 0 1 +PHILANTHROPIES 0 1 1 0 +PHILADELPHIAN 0 1 1 0 +PHILADELPHIA 0 1 0 1 +PHAEDRUS 0 1 1 0 +PH 0 1 1 0 +PETER 0 1 0 1 +PETALKAN 0 1 0 1 +PET 0 1 1 0 +PERVERTIVES 0 1 0 1 +PERVERTERS 0 1 1 0 +PERSONAL 7 1 7 8 +PERFECT 6 1 7 6 +PERCHES 0 1 1 0 +PEPPEL 0 1 0 1 +PENCH 0 1 0 1 +PEGRIN 0 1 0 1 +PEGRENNE 0 1 1 0 +PEGRE 0 1 1 0 +PEG 0 1 0 1 +PEASE 0 1 1 0 +PEAS 0 1 0 1 +PEARL 12 1 12 13 +PEACE 13 1 14 13 +PAW 0 1 0 1 +PATIENTS 0 1 1 0 +PATIENT 0 1 0 1 +PATCH 3 1 4 3 +PAT 0 1 0 1 +PASSION 3 1 3 4 +PASSAGEWAY 0 1 0 1 +PASSAGE 8 1 9 8 +PASCHAL 0 1 1 0 +PARTICLES 0 1 1 0 +PARTICLE 0 1 0 1 +PARSONS 1 1 2 1 +PARSON 0 1 0 1 +PARRIQUET 0 1 0 1 +PARRICIDES 0 1 0 1 +PAROQUET 0 1 1 0 +PARLOUR 0 1 0 1 +PARLOR 1 1 2 1 +PARASITES 0 1 1 0 +PARALLELOGRAM 0 1 1 0 +PARALLELLOGRAM 0 1 0 1 +PAPAL 0 1 1 0 +PANTS 0 1 1 0 +PANLESS 0 1 0 1 +PALLET 0 1 0 1 +PALATE 0 1 1 0 +P 1 1 1 2 +OWE 0 1 0 1 +OUTRAGED 0 1 0 1 +OUTRAGE 0 1 1 0 +OUGHTER 0 1 1 0 +OTTLEY'S 0 1 1 0 +OTTER 0 1 0 1 +OTHERWORLDLY 0 1 1 0 +OTHERS 22 1 23 22 +OSTENSITY 0 1 0 1 +OSH 0 1 1 0 +OSAGE 0 1 1 0 +ORDERS 3 1 3 4 +ORCHARD 3 1 4 3 +ORANGES 0 1 0 1 +ORANGE 0 1 1 0 +OPT 0 1 0 1 +OPPOSITION 4 1 4 5 +OPIEVIL 0 1 0 1 +OPHELIA 0 1 1 0 +ONTO 0 1 1 0 +OMER 0 1 0 1 +OLOFF 0 1 0 1 +OLIVE'S 2 1 3 2 +OLIVE 3 1 4 3 +OLD 39 1 40 39 +OLAF 1 1 2 1 +OFFICES 0 1 1 0 +OFFICER'S 0 1 0 1 +OFFICER 4 1 4 5 +OFFENSES 0 1 1 0 +OFFENCES 0 1 0 1 +OFF 24 1 25 24 +OCCUPANTS 0 1 1 0 +OCCUPANT 0 1 0 1 +OCCOURANT 0 1 0 1 +OCCAS 0 1 0 1 +OCCAIA 0 1 0 1 +OBSERVED 5 1 6 5 +OBSERVE 4 1 4 5 +OBROWN 0 1 0 1 +OAKS 0 1 1 0 +OAKLEIGHS 0 1 0 1 +O'ER 0 1 1 0 +NUMIDIA 0 1 1 0 +NOVELS 2 1 2 3 +NOVEL'S 0 1 1 0 +NOUGHT 0 1 1 0 +NOTHIN 0 1 1 0 +NOTABLY 0 1 1 0 +NOSED 0 1 0 1 +NOSE 2 1 2 3 +NORTHWEST 1 1 1 2 +NORTHWARDS 0 1 1 0 +NORTHWARD 1 1 1 2 +NORTHERNERS 0 1 1 0 +NORTH 7 1 8 7 +NOR'S 0 1 0 1 +NOONK 0 1 0 1 +NOON 2 1 3 2 +NINE 10 1 11 10 +NIGHTFALL 0 1 1 0 +NICE 3 1 3 4 +NEWCOMER 0 1 1 0 +NEWBORN 0 1 0 1 +NEVERS 0 1 0 1 +NEVERBEIN 0 1 0 1 +NEVERBAND 0 1 0 1 +NEVER 63 1 63 64 +NELLY 0 1 1 0 +NELLIE 0 1 0 1 +NEIGHBOURHOOD 0 1 0 1 +NEIGHBORHOOD 0 1 1 0 +NEED 12 1 12 13 +NED 1 1 1 2 +NECK 5 1 6 5 +NECHERANT 0 1 0 1 +NEARER 3 1 4 3 +NEAR'S 0 1 0 1 +NEAR 6 1 6 7 +NE'ER 0 1 1 0 +NAY 5 1 5 6 +NAVE 0 1 0 1 +NAUGHT 0 1 0 1 +NATTY 1 1 2 1 +NARES 0 1 1 0 +NANNIE 0 1 0 1 +NAMED 3 1 4 3 +NAME 14 1 14 15 +MYLES 0 1 0 1 +MUSOLEUM 0 1 0 1 +MURCHESON 0 1 0 1 +MUMMERIES 0 1 1 0 +MUMBIE 0 1 0 1 +MUCH 68 1 69 68 +MOVED 10 1 10 11 +MOUNTED 0 1 1 0 +MOUNTAIN 5 1 5 6 +MOUNT 0 1 0 1 +MOULDED 0 1 0 1 +MOST 50 1 51 50 +MORNING 21 1 21 22 +MORNIN 1 1 2 1 +MORMONS 3 1 4 3 +MORMONISM 2 1 3 2 +MORE'S 0 1 1 0 +MONTMARTRA 0 1 0 1 +MONTFICHET'S 0 1 1 0 +MONGOOSE 0 1 1 0 +MONGOO'S 0 1 0 1 +MOMBI 0 1 1 0 +MOLE 0 1 0 1 +MOLDED 0 1 1 0 +MOHICANS 0 1 0 1 +MOHICAN 0 1 1 0 +MO 0 1 1 0 +MITIGATED 0 1 0 1 +MISSOURIENS 0 1 0 1 +MISSOURIANS 1 1 2 1 +MISDEMEANOUR 0 1 0 1 +MISDEMEANOR 0 1 1 0 +MINE 6 1 7 6 +MIND 29 1 29 30 +MILLS 0 1 0 1 +MILLIONED 0 1 0 1 +MILLION'D 0 1 1 0 +MILLIMETRE 0 1 0 1 +MILLIMETER 0 1 1 0 +MILLIGRAM 0 1 1 0 +MILL 0 1 0 1 +MILITATED 0 1 1 0 +MILES 6 1 7 6 +MILAGRAM 0 1 0 1 +MIKE 2 1 2 3 +MICARTLE 0 1 0 1 +METRES 0 1 0 1 +METERS 0 1 1 0 +MET 10 1 10 11 +MESSRS 0 1 1 0 +MESSIERS 0 1 0 1 +MESS 0 1 1 0 +MERSEY 0 1 1 0 +MERRY 0 1 1 0 +MERRIED 0 1 0 1 +MERGANSER 0 1 1 0 +MERGANCER 0 1 0 1 +MERCY 2 1 2 3 +MERCHISTON 0 1 1 0 +MER 0 1 1 0 +MEND 1 1 2 1 +MEN'S 2 1 2 3 +MEMORIES 0 1 0 1 +MELL 0 1 1 0 +MEET 6 1 7 6 +MEDICROFT 0 1 0 1 +MEANHAVED 0 1 0 1 +MEAN 9 1 9 10 +MEALYBACK 0 1 1 0 +MEALLY 0 1 0 1 +MEADOWCROFT 0 1 1 0 +MC 1 1 1 2 +MAUSOLEUM 0 1 1 0 +MAU 0 1 0 1 +MATE 1 1 2 1 +MASTS 0 1 0 1 +MASTER'S 1 1 2 1 +MASTER 14 1 14 15 +MASS 2 1 2 3 +MARY 6 1 6 7 +MARVELOUS 0 1 1 0 +MARVELLOUS 0 1 0 1 +MARTRE 0 1 0 1 +MARTFICHE 0 1 0 1 +MARSHALLED 0 1 1 0 +MARSHALED 0 1 0 1 +MARGOLOTTE 4 1 5 4 +MARGOLOT 0 1 0 1 +MARAY 0 1 0 1 +MARAVAUX 0 1 0 1 +MARAIS 0 1 1 0 +MANIFESTED 0 1 1 0 +MANIFEST 2 1 2 3 +MAIDEN 0 1 0 1 +MAID 5 1 6 5 +MADE 61 1 61 62 +MADAME 4 1 4 5 +MADAM 2 1 3 2 +MACDONALDS 0 1 1 0 +MAC 0 1 1 0 +LUTHERS 0 1 0 1 +LUTHER'S 3 1 4 3 +LUSTY 0 1 0 1 +LURE 0 1 1 0 +LULLS 0 1 1 0 +LUIS 0 1 1 0 +LUCK 3 1 3 4 +LUCADIUS 0 1 0 1 +LUCADIA 0 1 0 1 +LUBRICATE 0 1 1 0 +LUBRICADE 0 1 0 1 +LOWERED 0 1 0 1 +LOWED 0 1 0 1 +LOVES 3 1 3 4 +LOVE'S 0 1 1 0 +LOUISE 4 1 4 5 +LOU'S 0 1 1 0 +LOU 0 1 0 1 +LORNE 0 1 1 0 +LORN 0 1 0 1 +LORD 22 1 23 22 +LOQUES 0 1 0 1 +LOOSE 5 1 5 6 +LOLLS 0 1 0 1 +LOGARTHEMS 0 1 0 1 +LOGARITHMS 0 1 1 0 +LOGALITY 0 1 0 1 +LOCRIS 0 1 1 0 +LOCKY 0 1 0 1 +LOCHRIS 0 1 0 1 +LOCATIA 0 1 0 1 +LOCATEUS 0 1 0 1 +LOCATED 1 1 1 2 +LOCALIA 0 1 0 1 +LOADS 0 1 1 0 +LOAD 0 1 1 0 +LO 1 1 2 1 +LITTLE 101 1 101 102 +LITER 0 1 1 0 +LINKED 0 1 0 1 +LINK'D 0 1 1 0 +LINEN 2 1 2 3 +LILY'S 0 1 0 1 +LILLYS 0 1 1 0 +LILLY 0 1 1 0 +LILBURN 0 1 1 0 +LIE 1 1 1 2 +LETS 0 1 1 0 +LESSER 1 1 2 1 +LESS 28 1 28 29 +LEOCADI 0 1 1 0 +LEND 1 1 2 1 +LEGALITY 1 1 2 1 +LEFRANK 0 1 1 0 +LEEK 0 1 0 1 +LED 7 1 7 8 +LECOMTE 0 1 0 1 +LEAVING 5 1 6 5 +LEAVES 5 1 5 6 +LEAST 15 1 15 16 +LEASED 0 1 1 0 +LEAPING 3 1 3 4 +LEAF 3 1 3 4 +LEADS 2 1 3 2 +LAURY 0 1 0 1 +LATER 14 1 14 15 +LARKSBURGH 0 1 0 1 +LARKSBURG 0 1 0 1 +LANTHORN 0 1 1 0 +LANTERN 0 1 0 1 +LANDA 0 1 0 1 +LAND 12 1 13 12 +LAMMA 0 1 0 1 +LAMBENT 0 1 1 0 +LALLIE 0 1 1 0 +LAKE 12 1 13 12 +LAID 7 1 8 7 +LADEN 0 1 0 1 +LAD 2 1 3 2 +LABOUR 0 1 1 0 +LABOR 1 1 1 2 +L 1 1 2 1 +KONSTANTINE 0 1 0 1 +KNOWS 6 1 6 7 +KNIFE 9 1 10 9 +KNEEP 0 1 0 1 +KNEED 0 1 1 0 +KNEE 0 1 1 0 +KNAVE 0 1 1 0 +KIVED 0 1 0 1 +KIRTLAND 0 1 1 0 +KINGDOMS 1 1 2 1 +KINGDOM'S 0 1 0 1 +KING'S 4 1 4 5 +KING 25 1 26 25 +KIEV 0 1 0 1 +KIERUSCURISTS 0 1 0 1 +KICKAPOOS 0 1 0 1 +KICK 1 1 2 1 +KEZWICK 0 1 0 1 +KESWICK 0 1 1 0 +KEOGH 0 1 1 0 +KATYA 0 1 0 1 +KANTI 0 1 0 1 +KAFFIRS 0 1 0 1 +KAFFIR'S 0 1 0 1 +JUSTATION 0 1 0 1 +JUST 42 1 42 43 +JUS 0 1 1 0 +JOHNNIEAUGO 0 1 0 1 +JOHN 15 1 16 15 +JOB 4 1 4 5 +JEWELLERS 0 1 0 1 +JEWELER'S 0 1 1 0 +JEFFREY'S 0 1 0 1 +JEFFREY 0 1 0 1 +JAW 1 1 2 1 +JASPER 5 1 6 5 +JAPSER 0 1 0 1 +JAIL 3 1 4 3 +JACKKNIFE 0 1 0 1 +JACK 5 1 6 5 +ITHUS 0 1 0 1 +IT'LL 2 1 2 3 +ISTIATED 0 1 0 1 +IRONS 0 1 0 1 +IRON'S 0 1 1 0 +INVENTORS 1 1 2 1 +INVENTILE 0 1 0 1 +INTRUSTING 0 1 0 1 +INTRENCHMENT 0 1 1 0 +INTERESTS 1 1 2 1 +INTER 0 1 0 1 +INTENTS 0 1 1 0 +INTENT 0 1 1 0 +INTENSE 2 1 2 3 +INSURRECTIONOUS 0 1 0 1 +INSURRECTIONISTS 0 1 1 0 +INSURE 0 1 0 1 +INQUIRIES 1 1 1 2 +INQUIRED 2 1 2 3 +INQUIRE 0 1 0 1 +INIMITIA 0 1 0 1 +INHEOSORIS 0 1 0 1 +INFANTS 2 1 3 2 +INFANTILE 1 1 2 1 +INFANT'S 0 1 0 1 +INERLOCHY 0 1 0 1 +INDEED 29 1 29 30 +INCULCATED 0 1 1 0 +INCLOSED 0 1 1 0 +INCERTAINTY 0 1 1 0 +INCALCATED 0 1 0 1 +INACTION 0 1 1 0 +IMPRESSES 0 1 1 0 +IMPRESS'D 0 1 1 0 +IMPERFECT 0 1 0 1 +IMPELLED 2 1 2 3 +IMPEARLED 0 1 1 0 +IMMIGRATION 0 1 0 1 +IMMIGRANT 0 1 0 1 +IMMATURE 0 1 1 0 +IMMATEUR 0 1 0 1 +IKESORIS 0 1 0 1 +IKE 0 1 1 0 +IDIOSYNCRATICALLY 0 1 1 0 +IDIOS 0 1 0 1 +ID 0 1 0 1 +HYMN 1 1 1 2 +HYDRAST 0 1 0 1 +HYDRAS 0 1 1 0 +HUTTED 0 1 0 1 +HUSBAND 8 1 9 8 +HUSBA 0 1 0 1 +HURRY 4 1 4 5 +HUNTLY 0 1 0 1 +HUNTLEY 0 1 1 0 +HUMPH 0 1 1 0 +HUMID 0 1 1 0 +HUMAN 15 1 15 16 +HULLO 0 1 0 1 +HOW 50 1 50 51 +HOUSECLEANING 0 1 1 0 +HOUSE 34 1 34 35 +HOUND 0 1 1 0 +HOTBED 0 1 1 0 +HOST 2 1 3 2 +HOSE 2 1 2 3 +HORTON 0 1 1 0 +HORSEPLAY 0 1 1 0 +HORNS 2 1 2 3 +HORACE 0 1 1 0 +HOPES 5 1 6 5 +HOPE 9 1 10 9 +HONOURS 0 1 0 1 +HONOURABLY 0 1 1 0 +HONOUR 1 1 2 1 +HONORS 0 1 1 0 +HONORABLY 0 1 0 1 +HONOR 4 1 4 5 +HONED 0 1 0 1 +HOLOCOST 0 1 0 1 +HOLOCAUST 0 1 1 0 +HOLD 7 1 8 7 +HOCKIN 0 1 0 1 +HIT 1 1 2 1 +HIRESUME 0 1 0 1 +HILDA'S 1 1 2 1 +HILDA 8 1 9 8 +HIGHEST 2 1 3 2 +HIDALGO 0 1 1 0 +HID 1 1 1 2 +HEY 0 1 0 1 +HETTY'S 0 1 0 1 +HETTY 0 1 1 0 +HERSELF 19 1 20 19 +HERACLITUS 0 1 0 1 +HERACLEITUS 0 1 1 0 +HENCHMEN 0 1 1 0 +HENCHMAN 0 1 0 1 +HEN 1 1 2 1 +HELPS 0 1 0 1 +HELPED 2 1 3 2 +HELDA'S 0 1 0 1 +HELDA 0 1 0 1 +HELD 14 1 15 14 +HEIR 0 1 0 1 +HEDGES 0 1 1 0 +HEAVEN 14 1 14 15 +HEARTSEASE 0 1 0 1 +HEARTS 8 1 8 9 +HEART'S 0 1 1 0 +HEART 27 1 28 27 +HEARKENED 0 1 0 1 +HE'D 2 1 3 2 +HAZEWRAPPED 0 1 1 0 +HAYES 0 1 0 1 +HAY 0 1 1 0 +HAWTREY 0 1 1 0 +HAWK 6 1 7 6 +HAVING 11 1 12 11 +HAT 1 1 1 2 +HARTS 0 1 1 0 +HAROLD 0 1 0 1 +HARMONIZED 1 1 2 1 +HARMONIZE 0 1 0 1 +HARKENED 0 1 1 0 +HARD 12 1 12 13 +HARBOURING 0 1 0 1 +HARBORING 0 1 1 0 +HARANGUE 0 1 1 0 +HARALD 0 1 1 0 +HAPPENED 6 1 6 7 +HAPPEN 4 1 5 4 +HANNA 0 1 1 0 +HANGINGS 0 1 1 0 +HANGING 2 1 2 3 +HANDSOME 3 1 3 4 +HANDLED 0 1 0 1 +HAMLET 5 1 6 5 +HAM 0 1 1 0 +HALTREE 0 1 0 1 +HALLOA 0 1 1 0 +HALF 19 1 19 20 +HAL 0 1 1 0 +HAKON 0 1 1 0 +HAIRDRESSER 0 1 1 0 +HACITY 0 1 0 1 +GUYRS 0 1 0 1 +GUISE 0 1 1 0 +GUESS 0 1 0 1 +GRUE 0 1 0 1 +GROWS 1 1 2 1 +GROVES 0 1 0 1 +GROUND 10 1 10 11 +GRINGO 0 1 1 0 +GRIEFS 0 1 1 0 +GREY'S 0 1 1 0 +GRENGO 0 1 0 1 +GREEN 12 1 12 13 +GREEING 0 1 1 0 +GREEDS 0 1 0 1 +GREE 0 1 0 1 +GRAY'S 0 1 0 1 +GRATES 0 1 0 1 +GRATEFUL 2 1 2 3 +GRAPEVINE 0 1 1 0 +GRAPE 0 1 0 1 +GRAND 1 1 2 1 +GRAMOPHONES 0 1 1 0 +GRAMOPHONE 0 1 1 0 +GRAMMAPHONE 0 1 0 1 +GRAMIPHONES 0 1 0 1 +GRAM 0 1 1 0 +GRAHAM 0 1 0 1 +GRAFT 0 1 0 1 +GRAF 0 1 1 0 +GRADES 0 1 1 0 +GOVERNOR 14 1 14 15 +GOVERNMENT 7 1 8 7 +GOVERNED 0 1 1 0 +GOVERN 0 1 0 1 +GOULD 0 1 0 1 +GOOD 69 1 69 70 +GOOBERS 0 1 1 0 +GONE 13 1 14 13 +GOING 26 1 26 27 +GODOLPH'S 0 1 0 1 +GOATS 0 1 0 1 +GOAT'S 1 1 2 1 +GIRARD 0 1 1 0 +GIP 0 1 0 1 +GILLIA 0 1 0 1 +GILCHRIST'S 0 1 1 0 +GILCHER'S 0 1 0 1 +GIER 0 1 1 0 +GIAOURS 0 1 1 0 +GHIP 3 1 4 3 +GESTATION 0 1 1 0 +GERARD 0 1 0 1 +GERALLY 0 1 0 1 +GEOFFREY'S 0 1 1 0 +GEOFFREY 0 1 1 0 +GENTLEMEN 5 1 6 5 +GENTLEMAN 8 1 8 9 +GENERALSHIP 0 1 0 1 +GENERALLY 7 1 8 7 +GENERAL 16 1 17 16 +GEAR 0 1 0 1 +GAYLY 0 1 1 0 +GAVE 31 1 31 32 +GASE 0 1 0 1 +GARDENS 2 1 3 2 +GARDEN'S 0 1 0 1 +GAMEWELL 6 1 7 6 +GAME 4 1 4 5 +GALLOWSNESS 0 1 1 0 +GALLOW'S 0 1 0 1 +GAILY 0 1 0 1 +FUTURISTIC 0 1 1 0 +FUTURE 5 1 5 6 +FURTHEST 0 1 0 1 +FURS 0 1 0 1 +FUN 0 1 0 1 +FULNESS 0 1 1 0 +FULLNESS 0 1 0 1 +FRISKLY 0 1 0 1 +FRISKILY 0 1 1 0 +FRIGHTER 0 1 0 1 +FRIGHTENED 2 1 2 3 +FRIEND 20 1 21 20 +FREEZED 0 1 0 1 +FRANKS 0 1 0 1 +FRANK 2 1 2 3 +FRANCS 0 1 1 0 +FOWLED 0 1 0 1 +FORWARDED 0 1 1 0 +FORTS 0 1 0 1 +FORMERLY 0 1 0 1 +FORMALLY 2 1 3 2 +FORBES'S 0 1 1 0 +FOLLOWED 14 1 15 14 +FOEEDED 0 1 0 1 +FLUFFINOSE 0 1 1 0 +FLOWER 4 1 4 5 +FLOUR 0 1 1 0 +FLIGHT 2 1 3 2 +FLAPHANO'S 0 1 0 1 +FIVE 15 1 15 16 +FITZOOTH'S 0 1 1 0 +FITTOOTH'S 0 1 0 1 +FIT 0 1 0 1 +FIRST 67 1 67 68 +FIRS 0 1 1 0 +FIREBUGS 0 1 1 0 +FIREBALL 0 1 1 0 +FIR 9 1 9 10 +FINE 16 1 17 16 +FILLIOTTED 0 1 0 1 +FILLED 8 1 8 9 +FILL 1 1 2 1 +FIL 0 1 0 1 +FIGHTER 0 1 1 0 +FETE 2 1 3 2 +FERRAGUT 0 1 0 1 +FERDINANDO 4 1 5 4 +FERDINAND 0 1 0 1 +FELLED 1 1 1 2 +FEES 0 1 1 0 +FEELS 1 1 2 1 +FEELINS 0 1 0 1 +FEELIN'S 0 1 1 0 +FEEL 17 1 18 17 +FEATURES 9 1 9 10 +FEARED 2 1 3 2 +FEAR 12 1 12 13 +FAUDOIS 0 1 0 1 +FARTHEST 1 1 2 1 +FARTHER 6 1 6 7 +FARRAGUT 0 1 1 0 +FAR 29 1 30 29 +FANIA 0 1 0 1 +FALLING 2 1 2 3 +FALLEN 2 1 3 2 +FALCON 0 1 1 0 +FAIREST 0 1 0 1 +EYE 14 1 15 14 +EXIST 2 1 3 2 +EXCUSE 3 1 3 4 +EXAGGERUS 0 1 0 1 +EVERYDAY 2 1 3 2 +EVENINGS 1 1 1 2 +EVENING 14 1 15 14 +EVENIN'S 0 1 1 0 +EVEN 51 1 51 52 +EVA 2 1 3 2 +EULO 0 1 0 1 +ETHNEE 0 1 0 1 +ETHINAY 0 1 0 1 +ETHEL 0 1 0 1 +ESTATE 2 1 3 2 +ESTAFFANIA 0 1 0 1 +ESPRIT 0 1 1 0 +ESCHEATED 0 1 1 0 +ER 0 1 1 0 +ENTRUSTING 0 1 1 0 +ENTRENCHMENT 0 1 0 1 +ENTRANCE 4 1 4 5 +ENTHUSIASTS 0 1 1 0 +ENTHUSIAST 0 1 0 1 +ENTHRIBING 0 1 0 1 +ENTHRALMENT 0 1 1 0 +ENTHRALIMENT 0 1 0 1 +ENTERED 21 1 21 22 +ENSURE 0 1 1 0 +ENQUIRIES 0 1 1 0 +ENQUIRED 0 1 1 0 +ENQUIRE 0 1 1 0 +ENDEAVOURED 0 1 1 0 +ENDEAVOUR 1 1 1 2 +ENDEAVORED 0 1 0 1 +ENDEAVOR 0 1 1 0 +ENCLOSED 0 1 0 1 +EMISSIONS 0 1 1 0 +EMIGRATION 0 1 1 0 +EMIGRANT 0 1 1 0 +EMERGED 0 1 0 1 +EMERG'D 0 1 1 0 +EM 0 1 1 0 +ELZINOR 0 1 0 1 +ELSINORE 0 1 1 0 +ELMO'S 0 1 1 0 +ELKO 0 1 0 1 +ELECT 1 1 2 1 +ELEC 0 1 0 1 +ELCHO 0 1 1 0 +ELABORATE 2 1 3 2 +EITHER 8 1 8 9 +EFFECTED 1 1 2 1 +EFFECT 9 1 9 10 +EEN 0 1 0 1 +EDMOST 0 1 0 1 +EDITION 0 1 1 0 +EASE 2 1 3 2 +EARSHOT 0 1 1 0 +EARS 4 1 5 4 +EAR 6 1 6 7 +EA 0 1 0 1 +DYNAS 0 1 0 1 +DYKES 0 1 1 0 +DUSPORT 0 1 0 1 +DURING 11 1 12 11 +DURE 0 1 0 1 +DUNNO 0 1 1 0 +DUMPY 1 1 2 1 +DUMAS 0 1 1 0 +DUERER 0 1 1 0 +DRUGGIST'S 0 1 1 0 +DRUGGIST 0 1 0 1 +DROPIDAS 0 1 1 0 +DRIPPING 0 1 1 0 +DREWING 0 1 0 1 +DRESSER 0 1 0 1 +DRAWS 1 1 2 1 +DRAWLS 0 1 0 1 +DRALE 0 1 0 1 +DOWNSTAIRS 1 1 1 2 +DOWN 72 1 73 72 +DOVES 0 1 1 0 +DOVE'S 0 1 0 1 +DOUZE 0 1 1 0 +DORNESTE 0 1 0 1 +DOORSTEP 0 1 0 1 +DOOR 35 1 36 35 +DONNYTHORNE 0 1 0 1 +DONNITHORNE 0 1 1 0 +DONATISTS 1 1 2 1 +DONATIST 0 1 0 1 +DONALDS 0 1 0 1 +DONA 1 1 2 1 +DOLL 1 1 2 1 +DOES 14 1 14 15 +DOCTRIS 0 1 0 1 +DOCTRESS 0 1 1 0 +DOC 0 1 0 1 +DISTRICTS 1 1 2 1 +DISTRICT 1 1 1 2 +DISMAYEDESTAFHANIA 0 1 0 1 +DISMAYED 0 1 1 0 +DISHONOURED 0 1 1 0 +DISHONORED 0 1 0 1 +DISCOLOURED 0 1 1 0 +DISCOLORED 0 1 0 1 +DINAH'S 0 1 1 0 +DINAH 1 1 2 1 +DINA 0 1 0 1 +DIKES 0 1 0 1 +DIFFUREATION 0 1 0 1 +DIFFERENTIATION 0 1 1 0 +DIFFERENCES 1 1 2 1 +DIFFERENCE 5 1 5 6 +DIETRIBE 0 1 0 1 +DIATRIBE 0 1 1 0 +DIAS 0 1 1 0 +DIALOGUES 1 1 2 1 +DIALECTS 0 1 0 1 +DEW 1 1 2 1 +DETONICHAUCH 0 1 0 1 +DESSIMA 0 1 0 1 +DENSITY 0 1 1 0 +DELOS 0 1 0 1 +DELLA 1 1 1 2 +DELIGHT 4 1 4 5 +DELICATE 3 1 4 3 +DELIBERATIVE 0 1 1 0 +DELIBERATE 2 1 2 3 +DELEGATE 0 1 0 1 +DELA 0 1 0 1 +DEFINE 4 1 5 4 +DEEDOLUS 0 1 0 1 +DEED 2 1 3 2 +DEDALOS 0 1 1 0 +DECK 1 1 1 2 +DECENCY 3 1 4 3 +DECENCIES 0 1 0 1 +DECEIVING 0 1 1 0 +DEARIE 0 1 0 1 +DEAD 5 1 5 6 +DAWNS 0 1 0 1 +DAWN'S 0 1 1 0 +DARK 16 1 17 16 +DAN 0 1 0 1 +DAL 0 1 0 1 +DAIS 0 1 0 1 +DAIRY 4 1 5 4 +DAEDALUS 0 1 0 1 +D'ESTE 0 1 1 0 +CYN 0 1 1 0 +CUTTERS 0 1 1 0 +CURVED 0 1 1 0 +CURTLIN 0 1 0 1 +CUIRASCURISTS 0 1 0 1 +CRYSTAL 3 1 4 3 +CRUSWELLER 0 1 0 1 +CRUSTWELLS 0 1 0 1 +CROSSTREES 0 1 1 0 +CROSS 5 1 5 6 +CRISTEL 0 1 0 1 +CRIGHTON 0 1 0 1 +CRESTWELLERS 0 1 0 1 +CRESSWELLS 0 1 1 0 +CREDON 0 1 0 1 +CRASWELLERS 0 1 1 0 +CRASWELLER 0 1 1 0 +CRADICALLY 0 1 0 1 +CRADIC 0 1 0 1 +COURTS 2 1 2 3 +COURT'S 1 1 2 1 +COURSE 19 1 19 20 +COURANT 0 1 1 0 +COUNTRY'S 1 1 2 1 +COUNTRY 25 1 25 26 +COUNSELS 0 1 1 0 +COUNSELLED 1 1 2 1 +COUNSEL 1 1 1 2 +COUNCILS 0 1 0 1 +COSTS 0 1 1 0 +COST 4 1 4 5 +CORRELATES 0 1 1 0 +CORRECTORISTIC 0 1 0 1 +COROTS 0 1 0 1 +CORNIERS 0 1 0 1 +CORN 4 1 5 4 +CORLIE 0 1 0 1 +CORALIE 0 1 1 0 +COPE 0 1 0 1 +COOK 1 1 2 1 +CONTROLL 0 1 0 1 +CONTROL 3 1 4 3 +CONTI 0 1 1 0 +CONTENSED 0 1 0 1 +CONTAINING 0 1 0 1 +CONTAINED 2 1 3 2 +CONTACT 0 1 1 0 +CONSTANTINE 0 1 1 0 +CONSIDERATE 0 1 1 0 +CONSIDERABLE 6 1 6 7 +CONSIDER 1 1 1 2 +CONSID'BLE 0 1 1 0 +CONSCIOUS 2 1 3 2 +CONSCIENCE 4 1 4 5 +CONQUERING 0 1 0 1 +CONQUERIN 0 1 1 0 +CONJUROR 0 1 0 1 +CONJURER 0 1 1 0 +CONDUCT 6 1 6 7 +CONDENSE 0 1 1 0 +COMTE 1 1 1 2 +COMPOSSIBLE 0 1 0 1 +COMPOSSER 0 1 1 0 +COMPOSED 1 1 1 2 +COMPOSE 0 1 1 0 +COMMENTATORS 0 1 1 0 +COMMANDS 1 1 1 2 +COMMANDMENTS 0 1 1 0 +COMING 6 1 7 6 +COMER 0 1 0 1 +COLORLESS 0 1 0 1 +COLLARS 1 1 1 2 +COLD 8 1 9 8 +COEXIST 0 1 0 1 +COAL 0 1 1 0 +CLUE 1 1 1 2 +CLOTH 3 1 4 3 +CLEW 0 1 1 0 +CLENCHED 1 1 1 2 +CLEANING 2 1 2 3 +CLAUSE 0 1 1 0 +CLAS 0 1 0 1 +CIVET 0 1 1 0 +CITADELLED 0 1 1 0 +CITADELED 0 1 0 1 +CIGARETTE 1 1 2 1 +CHRONICAL 0 1 0 1 +CHRISTAIN 0 1 1 0 +CHORD 0 1 1 0 +CHOLERIST 0 1 0 1 +CHISH 0 1 0 1 +CHINGACHOOK 0 1 0 1 +CHINGACHGOOK 1 1 2 1 +CHIAROSCURISTS 0 1 1 0 +CHIAROSCURIST 0 1 1 0 +CHEROOT 0 1 1 0 +CHEER 0 1 0 1 +CHECKERBOARD 0 1 0 1 +CHECKER 0 1 1 0 +CHATTERBOX 0 1 1 0 +CHATTER 0 1 0 1 +CHART 0 1 0 1 +CHARLES 2 1 3 2 +CHARACTERISTIC 9 1 10 9 +CHANGES 3 1 3 4 +CERTAINTY 2 1 2 3 +CENTRED 0 1 1 0 +CENTRE 1 1 1 2 +CENTER 1 1 2 1 +CENDENARIES 0 1 1 0 +CELEBRATE 1 1 1 2 +CEASED 1 1 1 2 +CEASD 0 1 1 0 +CAUSH 0 1 0 1 +CAULDWELL 0 1 0 1 +CAUGHT 9 1 10 9 +CAT 6 1 7 6 +CASTS 0 1 1 0 +CAST 8 1 8 9 +CARVED 1 1 1 2 +CARPATIUS 0 1 0 1 +CARPACCIO'S 0 1 1 0 +CAPLICE 0 1 0 1 +CAPLESS 0 1 1 0 +CAPACITY 2 1 3 2 +CANVASS 0 1 1 0 +CANVAS 4 1 4 5 +CANDLELIGHT 0 1 0 1 +CANDLE 1 1 2 1 +CAME 43 1 44 43 +CALLED 24 1 24 25 +CALDWELL 0 1 1 0 +C 0 1 1 0 +BYE 1 1 2 1 +BUTTED 0 1 1 0 +BURNE 1 1 1 2 +BURGOYNE 0 1 1 0 +BUNNITT 0 1 0 1 +BUNNIT 0 1 1 0 +BUILD 1 1 1 2 +BUGS 0 1 0 1 +BUDDED 0 1 0 1 +BUCHANAN 0 1 1 0 +BROWN 9 1 10 9 +BROTHER 7 1 8 7 +BROCKTON'S 0 1 0 1 +BRITANNULISTS 0 1 1 0 +BRISK 0 1 1 0 +BRINGS 0 1 0 1 +BRINGING 4 1 5 4 +BRETHREN 2 1 2 3 +BREAKFAST 1 1 1 2 +BREAKFAS 0 1 1 0 +BREAD 4 1 5 4 +BRANDS 1 1 2 1 +BRAND 0 1 0 1 +BRANCH 3 1 4 3 +BRAGGELON 0 1 0 1 +BRAGELON 0 1 0 1 +BRACTON'S 0 1 1 0 +BRACE 0 1 0 1 +BOXWOMEN 0 1 0 1 +BOWLS 0 1 0 1 +BOWER 1 1 1 2 +BOUGHT 0 1 0 1 +BOTANY 0 1 1 0 +BOTANICAL 1 1 2 1 +BORN 7 1 8 7 +BOREHOUND 0 1 0 1 +BORDERS 3 1 4 3 +BORDER 3 1 3 4 +BOOT 0 1 0 1 +BOOKS 8 1 8 9 +BOOKKEEPER 0 1 1 0 +BOLLS 0 1 1 0 +BOL 0 1 0 1 +BOAT 2 1 2 3 +BOARD 8 1 9 8 +BOAR 0 1 1 0 +BLUESTS 0 1 0 1 +BLUESKINS 1 1 2 1 +BLUESKIN 0 1 0 1 +BLUE 20 1 21 20 +BLINT 0 1 0 1 +BLESSINGS 2 1 3 2 +BLESSING 2 1 2 3 +BLASTS 0 1 1 0 +BLAST 0 1 0 1 +BITKEEPER 0 1 0 1 +BIRDS 3 1 4 3 +BILLYGOAT 0 1 1 0 +BILLED 0 1 1 0 +BETTER 25 1 25 26 +BERYL 0 1 0 1 +BERKSON 0 1 0 1 +BERGIN 0 1 0 1 +BENCH 3 1 4 3 +BEIN 0 1 1 0 +BEHAVIOURISTS 0 1 0 1 +BEHAVIORIST 0 1 0 1 +BEGIRT 0 1 0 1 +BEGGED 1 1 1 2 +BEG 1 1 2 1 +BEFORE 74 1 75 74 +BEFIT 0 1 1 0 +BEFALL 0 1 0 1 +BEFAL 0 1 1 0 +BEES 0 1 0 1 +BEEDER 0 1 1 0 +BEECHED 0 1 0 1 +BEEBE 0 1 1 0 +BEDIMMED 0 1 1 0 +BEDEMNED 0 1 0 1 +BED 12 1 12 13 +BEATER 0 1 1 0 +BEAR 10 1 11 10 +BEACHED 0 1 1 0 +BAYLIM'S 0 1 0 1 +BATTANICAL 0 1 0 1 +BASKET 1 1 2 1 +BASCULADES 0 1 0 1 +BARTANY 0 1 0 1 +BANDS 4 1 4 5 +BANDERE 0 1 0 1 +BALL 4 1 4 5 +BALAAM'S 0 1 1 0 +BAINS 0 1 1 0 +BADR'D 0 1 0 1 +BADGES 0 1 1 0 +BADGERS 0 1 0 1 +BADAUDERIE 0 1 1 0 +BACK 45 1 45 46 +BABRUSA 0 1 0 1 +BABIRUSA 0 1 1 0 +AX 0 1 0 1 +AWHILE 0 1 1 0 +AWAY 50 1 50 51 +AWARE 5 1 6 5 +AUTTINGHAM 0 1 0 1 +AUNT 4 1 5 4 +AU 0 1 1 0 +ATTORIAN 0 1 0 1 +ATTITUDE 3 1 4 3 +ATTITSON 0 1 0 1 +ATMOSPHERIC 0 1 1 0 +ATMOSPHERE 2 1 2 3 +ATHOLEMEN 0 1 1 0 +ATHLETE 1 1 2 1 +ATCHISON 0 1 1 0 +ASTRO 0 1 0 1 +ASTOR 1 1 2 1 +ASSEMBLED 2 1 3 2 +ASCENDENCY 0 1 1 0 +ASCENDANCY 0 1 0 1 +ARROW 2 1 2 3 +ARRONDISSEMENT 0 1 1 0 +ARRIVING 1 1 2 1 +ARRESTS 0 1 1 0 +ARREST 1 1 1 2 +ARMED 1 1 2 1 +ARM 5 1 5 6 +ARK 0 1 0 1 +ARDOUR 0 1 1 0 +ARDOR 0 1 0 1 +ARDLE 0 1 1 0 +ARCHER 0 1 0 1 +ARC 1 1 2 1 +APPROVES 0 1 1 0 +APPRENTICED 0 1 0 1 +APPRENTICE 2 1 3 2 +APPEALED 0 1 1 0 +APPEAL 1 1 1 2 +ANYWHERE 5 1 6 5 +ANYTHING 17 1 17 18 +ANYMORE 0 1 1 0 +ANTHROPIES 0 1 0 1 +ANTETING 0 1 0 1 +ANTEROOM 0 1 0 1 +ANTEDATING 0 1 1 0 +ANTE 0 1 1 0 +ANSWERED 14 1 14 15 +ANSWERD 0 1 1 0 +ANNE 2 1 2 3 +ANNALS 2 1 3 2 +ANNAL 0 1 0 1 +ANGULATIONS 0 1 0 1 +ANGELS 2 1 3 2 +ANGEL 1 1 1 2 +ANDREW'S 0 1 0 1 +ANDRES 0 1 0 1 +ANDRE'S 0 1 0 1 +ANDRE 0 1 0 1 +ANDDELA 0 1 0 1 +ANAXAGORAS 0 1 1 0 +AMY 0 1 0 1 +AMPHITHEATRE 0 1 0 1 +AMPHITHEATER 0 1 1 0 +AMEN 0 1 0 1 +AMDELLA 0 1 0 1 +AMASS 0 1 1 0 +ALTERNATIVE 0 1 1 0 +ALLUVION 0 1 1 0 +ALLUVIAN 0 1 0 1 +ALGO 0 1 0 1 +ALEXANDRA 2 1 3 2 +ALEXANDER 13 1 13 14 +ALBANS 0 1 1 0 +ALBAN'S 0 1 0 1 +ALAP 0 1 0 1 +AIGNAN 0 1 1 0 +AIDS 0 1 0 1 +AID 1 1 2 1 +AH 6 1 7 6 +AGAIN 38 1 39 38 +AFTERDECK 0 1 1 0 +AFTER 58 1 58 59 +AFFRIGHTENED 0 1 1 0 +AFFILIATED 1 1 2 1 +AFFECTED 3 1 3 4 +AFFECT 0 1 1 0 +ADVENTURER 0 1 0 1 +ADVENTURE 0 1 1 0 +ADONNA 0 1 0 1 +ADONA 0 1 1 0 +ADOCTION 0 1 0 1 +ADMISSIONS 0 1 0 1 +ADETE 0 1 0 1 +ADELAX 0 1 0 1 +ADDITION 0 1 0 1 +ACTOR'S 0 1 0 1 +ACTOR 1 1 2 1 +ACTION 11 1 11 12 +ACKNOWLEDGMENT 0 1 0 1 +ACKNOWLEDGEMENT 0 1 1 0 +ACCUTMENTS 0 1 0 1 +ACCOUTREMENTS 0 1 1 0 +ACCORD 2 1 2 3 +ABREAD 0 1 0 1 +ABOLITIONISTS 0 1 1 0 +ABOLITIONIST 0 1 0 1 +ABLE'S 0 1 0 1 +ABDUCTION 0 1 1 0 +ABBEY 0 1 0 1 +ABBE 0 1 1 0 +ZOOLOGY 1 0 1 1 +ZOOF 1 0 1 1 +ZION 1 0 1 1 +ZEST 1 0 1 1 +ZEAL 2 0 2 2 +YOUTH 5 0 5 5 +YOURSELVES 1 0 1 1 +YOURSELF 8 0 8 8 +YOURS 3 0 3 3 +YOUR 109 0 109 109 +YOUNGER 1 0 1 1 +YOUNG 43 0 43 43 +YOU'VE 4 0 4 4 +YOU'D 3 0 3 3 +YORKSHIRE 2 0 2 2 +YORK 6 0 6 6 +YONDER 1 0 1 1 +YOKE 1 0 1 1 +YIELDING 3 0 3 3 +YIELDED 2 0 2 2 +YIELD 3 0 3 3 +YET 43 0 43 43 +YESTERDAY 3 0 3 3 +YES 33 0 33 33 +YELLOW 9 0 9 9 +YELL 1 0 1 1 +YEARS 34 0 34 34 +YEARLY 2 0 2 2 +YEAR 5 0 5 5 +YEA 1 0 1 1 +YARN 2 0 2 2 +YAMS 1 0 1 1 +YACHTSMAN 1 0 1 1 +YACHT 3 0 3 3 +WROUGHT 2 0 2 2 +WROTE 6 0 6 6 +WRONGS 1 0 1 1 +WRONG 10 0 10 10 +WRITTEN 7 0 7 7 +WRITS 1 0 1 1 +WRITINGS 2 0 2 2 +WRITING 6 0 6 6 +WRITHING 1 0 1 1 +WRITES 1 0 1 1 +WRITER 2 0 2 2 +WRIT 1 0 1 1 +WRIST 1 0 1 1 +WRETCHEDNESS 2 0 2 2 +WRESTLERS 1 0 1 1 +WRESTLED 1 0 1 1 +WOUNDED 1 0 1 1 +WOUND 1 0 1 1 +WOULDN'T 5 0 5 5 +WORTHY 6 0 6 6 +WORSTED 1 0 1 1 +WORSHIP 3 0 3 3 +WORRY 3 0 3 3 +WORN 1 0 1 1 +WORM 4 0 4 4 +WORLD'S 2 0 2 2 +WORLD 37 0 37 37 +WORKS 8 0 8 8 +WORKMEN 1 0 1 1 +WORKING 3 0 3 3 +WORKERS 1 0 1 1 +WORKED 5 0 5 5 +WORK 34 0 34 34 +WORDS 20 0 20 20 +WORD 20 0 20 20 +WOOL 3 0 3 3 +WOODLEY 3 0 3 3 +WOODEN 3 0 3 3 +WONDERS 1 0 1 1 +WONDERINGLY 1 0 1 1 +WONDERFUL 7 0 7 7 +WONDERED 2 0 2 2 +WONDER 7 0 7 7 +WON'T 15 0 15 15 +WOMAN 28 0 28 28 +WOLF 1 0 1 1 +WOKE 1 0 1 1 +WOE 2 0 2 2 +WIZARD'S 1 0 1 1 +WIZARD 3 0 3 3 +WIVES 3 0 3 3 +WITTY 1 0 1 1 +WITTILY 1 0 1 1 +WITS 1 0 1 1 +WITNESSING 1 0 1 1 +WITNESSES 1 0 1 1 +WITNESS 1 0 1 1 +WITHOUT 37 0 37 37 +WITHIN 23 0 23 23 +WITHERING 1 0 1 1 +WITHERED 1 0 1 1 +WITHDRAWN 2 0 2 2 +WITHDRAW 1 0 1 1 +WITHAL 1 0 1 1 +WITCH 3 0 3 3 +WIT 3 0 3 3 +WISHES 3 0 3 3 +WISHERS 1 0 1 1 +WISHED 6 0 6 6 +WISH 11 0 11 11 +WISE 5 0 5 5 +WISDOM 3 0 3 3 +WIRE 4 0 4 4 +WIPED 1 0 1 1 +WINNING 1 0 1 1 +WINKING 1 0 1 1 +WINK 1 0 1 1 +WINIFRED 1 0 1 1 +WINGS 5 0 5 5 +WING 3 0 3 3 +WINE 7 0 7 7 +WINDY 1 0 1 1 +WINDS 3 0 3 3 +WINDOWS 7 0 7 7 +WINDOW 16 0 16 16 +WINDING 1 0 1 1 +WIN 2 0 2 2 +WILY 1 0 1 1 +WILSON 1 0 1 1 +WILLS 1 0 1 1 +WILLOWY 1 0 1 1 +WILLINGLY 2 0 2 2 +WILLING 2 0 2 2 +WILLED 1 0 1 1 +WILDERNESS 1 0 1 1 +WILD 9 0 9 9 +WIDTH 1 0 1 1 +WIDEST 1 0 1 1 +WIDENING 1 0 1 1 +WIDELY 1 0 1 1 +WIDE 9 0 9 9 +WICKET 1 0 1 1 +WICKEDNESS 1 0 1 1 +WICKEDEST 1 0 1 1 +WICKED 3 0 3 3 +WHY 44 0 44 44 +WHOM 18 0 18 18 +WHOLESOME 1 0 1 1 +WHOLE 25 0 25 25 +WHOEVER 3 0 3 3 +WHITNEY 1 0 1 1 +WHISTLING 1 0 1 1 +WHISTLE 2 0 2 2 +WHISPERED 7 0 7 7 +WHISPER 1 0 1 1 +WHISKERS 1 0 1 1 +WHISK 1 0 1 1 +WHIRLWIND 3 0 3 3 +WHIM 1 0 1 1 +WHILST 3 0 3 3 +WHEREVER 3 0 3 3 +WHEREUPON 3 0 3 3 +WHEREON 1 0 1 1 +WHEREFORE 1 0 1 1 +WHEREBY 1 0 1 1 +WHENEVER 3 0 3 3 +WHELPS 1 0 1 1 +WHEELING 1 0 1 1 +WHEELER 1 0 1 1 +WHEELED 3 0 3 3 +WHEEL 1 0 1 1 +WHEAT 2 0 2 2 +WHARVES 1 0 1 1 +WETTING 1 0 1 1 +WET 9 0 9 9 +WESTERN 1 0 1 1 +WESLEY 2 0 2 2 +WEREN'T 2 0 2 2 +WENT 25 0 25 25 +WELFARE 2 0 2 2 +WEIGHT 2 0 2 2 +WEIGHED 3 0 3 3 +WEIGH 1 0 1 1 +WEEPING 4 0 4 4 +WEEP 1 0 1 1 +WEEKS 4 0 4 4 +WEEK 2 0 2 2 +WEEDS 3 0 3 3 +WEED 1 0 1 1 +WEBS 1 0 1 1 +WEB 1 0 1 1 +WEASEL 1 0 1 1 +WEARY 1 0 1 1 +WEARING 2 0 2 2 +WEARINESS 2 0 2 2 +WEARILY 2 0 2 2 +WEARERS 1 0 1 1 +WEAPON 2 0 2 2 +WEALTH 5 0 5 5 +WEAKNESS 3 0 3 3 +WEAKENED 2 0 2 2 +WEAK 6 0 6 6 +WAYS 1 0 1 1 +WAX 1 0 1 1 +WAVING 2 0 2 2 +WAVERING 2 0 2 2 +WAVED 1 0 1 1 +WATSON 5 0 5 5 +WATERY 2 0 2 2 +WATERS 6 0 6 6 +WATERCRESS 1 0 1 1 +WATCHING 1 0 1 1 +WATCHFULNESS 1 0 1 1 +WATCHFUL 1 0 1 1 +WATCHED 7 0 7 7 +WATCH 2 0 2 2 +WASTEFUL 4 0 4 4 +WASTED 2 0 2 2 +WASTE 5 0 5 5 +WASN'T 2 0 2 2 +WASHINGTON 1 0 1 1 +WASHING 1 0 1 1 +WASH 1 0 1 1 +WARY 1 0 1 1 +WARRIORS 2 0 2 2 +WARRENTON'S 2 0 2 2 +WARRENTON 4 0 4 4 +WARRANTED 2 0 2 2 +WARRANT 1 0 1 1 +WARN 1 0 1 1 +WARMEST 1 0 1 1 +WARMED 1 0 1 1 +WARM 4 0 4 4 +WARDS 1 0 1 1 +WARD 1 0 1 1 +WANTS 3 0 3 3 +WANTING 3 0 3 3 +WANTED 8 0 8 8 +WANT 19 0 19 19 +WANDERED 2 0 2 2 +WANDER 2 0 2 2 +WAN 1 0 1 1 +WALNUT 1 0 1 1 +WALLS 2 0 2 2 +WALL 6 0 6 6 +WALKS 1 0 1 1 +WALKING 2 0 2 2 +WALKETH 1 0 1 1 +WALKED 6 0 6 6 +WALK 5 0 5 5 +WAITING 7 0 7 7 +WAITERS 1 0 1 1 +WAITER 1 0 1 1 +WAITED 1 0 1 1 +WAIT 8 0 8 8 +WAISTCOAT 1 0 1 1 +WAIST 1 0 1 1 +WAILING 1 0 1 1 +WAGED 1 0 1 1 +WADDLING 1 0 1 1 +W 3 0 3 3 +VULGAR 1 0 1 1 +VOYAGING 2 0 2 2 +VOYAGES 1 0 1 1 +VOYAGE 2 0 2 2 +VOWS 1 0 1 1 +VOTES 1 0 1 1 +VOTERS 1 0 1 1 +VOLUME 1 0 1 1 +VOLTAIRE'S 1 0 1 1 +VOICES 2 0 2 2 +VIVIDLY 2 0 2 2 +VIVID 2 0 2 2 +VIVE 1 0 1 1 +VIVACITY 1 0 1 1 +VITALITY 1 0 1 1 +VITAL 1 0 1 1 +VISTA 1 0 1 1 +VISITED 4 0 4 4 +VISIT 4 0 4 4 +VISION 2 0 2 2 +VISIBLE 2 0 2 2 +VIRTUOUS 1 0 1 1 +VIRTUE 3 0 3 3 +VIRTUALLY 2 0 2 2 +VIRGINS 1 0 1 1 +VIRGIN 2 0 2 2 +VIPER 2 0 2 2 +VIOLET 1 0 1 1 +VIOLENT 5 0 5 5 +VIOLENCE 5 0 5 5 +VIOLATED 1 0 1 1 +VINEGAR 1 0 1 1 +VINDICATION 1 0 1 1 +VINDICATE 1 0 1 1 +VILLAGE 4 0 4 4 +VIKING 3 0 3 3 +VIGOROUSLY 1 0 1 1 +VIGOROUS 1 0 1 1 +VIGILANCE 1 0 1 1 +VIEWS 1 0 1 1 +VIEWED 1 0 1 1 +VIE 1 0 1 1 +VICTUALS 1 0 1 1 +VICTORY 1 0 1 1 +VICTORIES 1 0 1 1 +VICTIM 2 0 2 2 +VICOMTE 1 0 1 1 +VICISSITUDES 1 0 1 1 +VICIOUS 1 0 1 1 +VICINITY 1 0 1 1 +VEXED 1 0 1 1 +VEXATION 1 0 1 1 +VESTURE 1 0 1 1 +VESTIBULE 1 0 1 1 +VESSEL 2 0 2 2 +VERTEBRAL 1 0 1 1 +VERSES 1 0 1 1 +VERSED 3 0 3 3 +VERNE'S 1 0 1 1 +VERIFY 1 0 1 1 +VERGE 1 0 1 1 +VENTURED 1 0 1 1 +VENTURE 1 0 1 1 +VENICE 1 0 1 1 +VENGEANCE 2 0 2 2 +VENERABLE 1 0 1 1 +VELVET 1 0 1 1 +VELOCITY 2 0 2 2 +VEILED 2 0 2 2 +VEHICLE 1 0 1 1 +VEHEMENTLY 1 0 1 1 +VAULT 1 0 1 1 +VASTLY 1 0 1 1 +VAST 5 0 5 5 +VASSALS 1 0 1 1 +VARYING 2 0 2 2 +VARIOUS 7 0 7 7 +VARIETY 2 0 2 2 +VARIETIES 1 0 1 1 +VARIED 1 0 1 1 +VARIATIONS 1 0 1 1 +VARIANCE 1 0 1 1 +VARIABILITY 2 0 2 2 +VANQUISHED 2 0 2 2 +VANITY 1 0 1 1 +VANISHED 2 0 2 2 +VANISH 2 0 2 2 +VANDERPOOLS 1 0 1 1 +VALUE 3 0 3 3 +VALUABLE 2 0 2 2 +VALLEYS 2 0 2 2 +VALLEYED 1 0 1 1 +VALLEY 4 0 4 4 +VALIANTLY 1 0 1 1 +VALHALLA 1 0 1 1 +VALE 1 0 1 1 +VAINLY 1 0 1 1 +VAIN 1 0 1 1 +VAGUELY 1 0 1 1 +VAGUE 3 0 3 3 +VACUUM 1 0 1 1 +VACANT 1 0 1 1 +UTTERLY 4 0 4 4 +UTTERED 1 0 1 1 +UTTERANCE 1 0 1 1 +UTTER 4 0 4 4 +UTMOST 3 0 3 3 +UTILITY 3 0 3 3 +USUALLY 4 0 4 4 +USUAL 5 0 5 5 +USING 3 0 3 3 +USELESS 4 0 4 4 +USEFUL 5 0 5 5 +USED 17 0 17 17 +USE 31 0 31 31 +US 60 0 60 60 +URGING 1 0 1 1 +URGED 3 0 3 3 +UPWARDS 1 0 1 1 +UPTOWN 1 0 1 1 +UPRIGHT 1 0 1 1 +UPRAISED 1 0 1 1 +UPPERMOST 1 0 1 1 +UPPER 2 0 2 2 +UPLIFTED 1 0 1 1 +UPHOLSTERED 1 0 1 1 +UPHEAVAL 1 0 1 1 +UPBRAIDED 1 0 1 1 +UNWORTHY 1 0 1 1 +UNWILLING 2 0 2 2 +UNVARNISHED 1 0 1 1 +UNUSUAL 4 0 4 4 +UNTUTORED 1 0 1 1 +UNTRIED 1 0 1 1 +UNTREATED 1 0 1 1 +UNTOUCHED 1 0 1 1 +UNTIL 16 0 16 16 +UNTIDINESS 1 0 1 1 +UNTASTED 1 0 1 1 +UNSUCCESSFUL 1 0 1 1 +UNSEPARATED 1 0 1 1 +UNSEEN 1 0 1 1 +UNSAID 1 0 1 1 +UNREAL 1 0 1 1 +UNPRECEDENTED 1 0 1 1 +UNPOPULAR 1 0 1 1 +UNPLEASANT 3 0 3 3 +UNPERCEIVED 1 0 1 1 +UNPARALLELED 1 0 1 1 +UNOBSERVED 1 0 1 1 +UNNECESSARY 1 0 1 1 +UNNATURAL 1 0 1 1 +UNMOVED 1 0 1 1 +UNLUCKY 2 0 2 2 +UNLUCKILY 1 0 1 1 +UNLOCKED 1 0 1 1 +UNLOCK 1 0 1 1 +UNLOADED 1 0 1 1 +UNLIKELY 1 0 1 1 +UNLESS 5 0 5 5 +UNKNOWN 1 0 1 1 +UNJUST 2 0 2 2 +UNIVERSITY 1 0 1 1 +UNIVERSE 1 0 1 1 +UNIVERSAL 3 0 3 3 +UNITED 8 0 8 8 +UNITE 1 0 1 1 +UNION 3 0 3 3 +UNINVITED 1 0 1 1 +UNINTELLIGIBLE 1 0 1 1 +UNIFORMS 2 0 2 2 +UNIFORM 1 0 1 1 +UNICORN 1 0 1 1 +UNHEEDED 1 0 1 1 +UNHAPPY 4 0 4 4 +UNHAPPINESS 1 0 1 1 +UNGRACIOUSLY 1 0 1 1 +UNGRACIOUS 1 0 1 1 +UNFORTUNATELY 2 0 2 2 +UNFORTUNATE 1 0 1 1 +UNFOLD 1 0 1 1 +UNFINISHED 2 0 2 2 +UNFEELING 1 0 1 1 +UNFAMILIAR 1 0 1 1 +UNFAIRLY 1 0 1 1 +UNFAIR 2 0 2 2 +UNFAILING 1 0 1 1 +UNEXPECTEDLY 2 0 2 2 +UNEXPECTED 3 0 3 3 +UNEXCEPTIONABLY 1 0 1 1 +UNEASY 4 0 4 4 +UNEASINESS 1 0 1 1 +UNEASILY 1 0 1 1 +UNEARTHLY 1 0 1 1 +UNDUE 1 0 1 1 +UNDOUBTEDLY 1 0 1 1 +UNDOING 1 0 1 1 +UNDERWATER 1 0 1 1 +UNDERTONE 1 0 1 1 +UNDERTAKING 2 0 2 2 +UNDERSTOOD 6 0 6 6 +UNDERSTANDING 4 0 4 4 +UNDERSTAND 9 0 9 9 +UNDERSCORE 1 0 1 1 +UNDERNEATH 1 0 1 1 +UNDERMINE 1 0 1 1 +UNDERHANDED 1 0 1 1 +UNDER 40 0 40 40 +UNDECEIVED 1 0 1 1 +UNCOUTH 1 0 1 1 +UNCOURTEOUS 1 0 1 1 +UNCONTROLLABLE 1 0 1 1 +UNCONSTITUTIONALITY 1 0 1 1 +UNCOMPROMISING 1 0 1 1 +UNCOMFORTABLE 1 0 1 1 +UNCLE 6 0 6 6 +UNCIVIL 1 0 1 1 +UNCHARITABLENESS 1 0 1 1 +UNCHANGED 1 0 1 1 +UNCERTAIN 2 0 2 2 +UNCASING 1 0 1 1 +UNCAS 10 0 10 10 +UNBUTTONING 1 0 1 1 +UNBROKEN 1 0 1 1 +UNBEARABLE 2 0 2 2 +UNAVOIDABLE 1 0 1 1 +UNAVERRED 1 0 1 1 +UNANIMOUSLY 1 0 1 1 +UNANIMOUS 1 0 1 1 +UNAFFECTED 1 0 1 1 +UNACCOUNTABLE 1 0 1 1 +UNABLE 1 0 1 1 +UGLY 3 0 3 3 +TYRANNY 1 0 1 1 +TYPICAL 1 0 1 1 +TYPES 1 0 1 1 +TYPE 1 0 1 1 +TWIXT 1 0 1 1 +TWITE 1 0 1 1 +TWIRLING 1 0 1 1 +TWIN 1 0 1 1 +TWILIGHT 2 0 2 2 +TWICE 3 0 3 3 +TWENTY 15 0 15 15 +TWELVE 2 0 2 2 +TWELFTH 1 0 1 1 +TWASN'T 1 0 1 1 +TURNS 1 0 1 1 +TURNIPS 1 0 1 1 +TURNING 6 0 6 6 +TURNER'S 1 0 1 1 +TURNER 4 0 4 4 +TURF 1 0 1 1 +TUNE 2 0 2 2 +TUMULTUOUS 1 0 1 1 +TUMULT 3 0 3 3 +TUMBLER 1 0 1 1 +TUMBLED 3 0 3 3 +TUFT 1 0 1 1 +TUESDAY 1 0 1 1 +TUCKED 1 0 1 1 +TUBE 1 0 1 1 +TRYING 5 0 5 5 +TRY 4 0 4 4 +TRUSTY 1 0 1 1 +TRUSTS 1 0 1 1 +TRUST 5 0 5 5 +TRUNKS 2 0 2 2 +TRUNK 2 0 2 2 +TRUMPETS 1 0 1 1 +TRULY 9 0 9 9 +TRUFFLES 1 0 1 1 +TRUE 21 0 21 21 +TRUDGED 1 0 1 1 +TROUT'S 1 0 1 1 +TROUT 1 0 1 1 +TROUBLESOME 1 0 1 1 +TROUBLES 2 0 2 2 +TROUBLED 4 0 4 4 +TROTTING 1 0 1 1 +TROTTED 1 0 1 1 +TROTH 1 0 1 1 +TROPHIES 1 0 1 1 +TROOPS 3 0 3 3 +TROOPER'S 1 0 1 1 +TRIUMPHANTLY 1 0 1 1 +TRIUMPHANT 1 0 1 1 +TRIUMPH 3 0 3 3 +TRIPPED 1 0 1 1 +TRINKET 1 0 1 1 +TRIMNESS 1 0 1 1 +TRIM 1 0 1 1 +TRIGGER 1 0 1 1 +TRIED 9 0 9 9 +TRICKS 2 0 2 2 +TRIBUTE 1 0 1 1 +TRIBES 1 0 1 1 +TRIAL 2 0 2 2 +TREND 2 0 2 2 +TREMULOUSLY 1 0 1 1 +TREMULOUS 1 0 1 1 +TREMOR 1 0 1 1 +TREMENDOUSLY 1 0 1 1 +TREMBLING 5 0 5 5 +TREMBLED 1 0 1 1 +TREMBLE 2 0 2 2 +TREE 35 0 35 35 +TREATY 1 0 1 1 +TREATS 1 0 1 1 +TREATING 1 0 1 1 +TREATED 2 0 2 2 +TREAT 1 0 1 1 +TREASURE 2 0 2 2 +TREAD 1 0 1 1 +TRAY 1 0 1 1 +TRAVESTY 1 0 1 1 +TRAVERSED 1 0 1 1 +TRAVEL 1 0 1 1 +TRASH 1 0 1 1 +TRAP 2 0 2 2 +TRANSPARENT 2 0 2 2 +TRANSLATION 1 0 1 1 +TRANSLATE 1 0 1 1 +TRANSIENT 2 0 2 2 +TRANSFERRED 1 0 1 1 +TRANSCRIPT 1 0 1 1 +TRANQUILLITY 1 0 1 1 +TRANQUIL 1 0 1 1 +TRAINS 1 0 1 1 +TRAINING 3 0 3 3 +TRAINED 1 0 1 1 +TRAIN 1 0 1 1 +TRAIL 2 0 2 2 +TRAFFIC 1 0 1 1 +TRADITIONS 3 0 3 3 +TRADITION 1 0 1 1 +TRADES 1 0 1 1 +TRACK 1 0 1 1 +TRACES 2 0 2 2 +TRACE 1 0 1 1 +TOYS 1 0 1 1 +TOWNS 3 0 3 3 +TOWERS 1 0 1 1 +TOWER 1 0 1 1 +TOUR 1 0 1 1 +TOUCHING 1 0 1 1 +TOUCHES 4 0 4 4 +TOUCH 8 0 8 8 +TOTAL 1 0 1 1 +TOSSING 1 0 1 1 +TOSSED 1 0 1 1 +TORTURED 2 0 2 2 +TORTURE 1 0 1 1 +TORTOISE 1 0 1 1 +TORRENT 3 0 3 3 +TORN 1 0 1 1 +TORCH 1 0 1 1 +TOPSAILS 1 0 1 1 +TOPMOST 1 0 1 1 +TOPMASTS 1 0 1 1 +TOOTHED 1 0 1 1 +TOOTH 1 0 1 1 +TOOK 33 0 33 33 +TONGUES 1 0 1 1 +TONGUE 8 0 8 8 +TONES 3 0 3 3 +TONED 1 0 1 1 +TONE 5 0 5 5 +TOMMY 1 0 1 1 +TOMB 1 0 1 1 +TOM 4 0 4 4 +TOLERATION 1 0 1 1 +TOLEDANS 1 0 1 1 +TOKEN 2 0 2 2 +TOE 1 0 1 1 +TITLE 3 0 3 3 +TITIAN 1 0 1 1 +TIS 8 0 8 8 +TIRELESS 1 0 1 1 +TIRED 6 0 6 6 +TIRE 1 0 1 1 +TIPTOE 2 0 2 2 +TIPPED 1 0 1 1 +TIP 3 0 3 3 +TINY 3 0 3 3 +TINTS 1 0 1 1 +TINT 1 0 1 1 +TINSEL 1 0 1 1 +TINKLED 1 0 1 1 +TINGLING 1 0 1 1 +TINGE 1 0 1 1 +TIN 1 0 1 1 +TIME'S 2 0 2 2 +TIMASCHEFF'S 1 0 1 1 +TILL 8 0 8 8 +TILES 1 0 1 1 +TIGHTLY 1 0 1 1 +TIGHTEN 1 0 1 1 +TIGHT 1 0 1 1 +TIED 2 0 2 2 +TIE 1 0 1 1 +TIDING 1 0 1 1 +TICKET 1 0 1 1 +THY 17 0 17 17 +THWART 1 0 1 1 +THURSTON 2 0 2 2 +THURSDAY 1 0 1 1 +THRUSTING 1 0 1 1 +THRUST 5 0 5 5 +THROW 2 0 2 2 +THROUGHOUT 5 0 5 5 +THROATS 1 0 1 1 +THROAT 1 0 1 1 +THRILLING 1 0 1 1 +THRILLED 1 0 1 1 +THRILL 1 0 1 1 +THRICE 1 0 1 1 +THREW 5 0 5 5 +THREE 41 0 41 41 +THREATS 1 0 1 1 +THREATENS 2 0 2 2 +THREATENING 3 0 3 3 +THREATENED 1 0 1 1 +THRALLS 2 0 2 2 +THRALL'S 1 0 1 1 +THRALL 2 0 2 2 +THOUSANDS 2 0 2 2 +THOUSAND 13 0 13 13 +THOUGHTS 13 0 13 13 +THOUGHTLESS 1 0 1 1 +THOUGHTFUL 1 0 1 1 +THOSE 37 0 37 37 +THOROUGH 1 0 1 1 +THOR 1 0 1 1 +THOMAS 1 0 1 1 +THIRTY 12 0 12 12 +THIRTIETH 1 0 1 1 +THIRTEENTH 1 0 1 1 +THIRD 7 0 7 7 +THINGS 34 0 34 34 +THIN 2 0 2 2 +THICKEST 1 0 1 1 +THICK 5 0 5 5 +THEY'VE 1 0 1 1 +THERMOMETER 1 0 1 1 +THEREOF 1 0 1 1 +THEREIN 3 0 3 3 +THEREAFTER 1 0 1 1 +THEREABOUTS 1 0 1 1 +THEORY 5 0 5 5 +THEORIES 1 0 1 1 +THEORETICAL 1 0 1 1 +THEOLOGY 1 0 1 1 +THENCE 1 0 1 1 +THEMSELVES 12 0 12 12 +THEME 1 0 1 1 +THEIRS 2 0 2 2 +THEFT 4 0 4 4 +THEATRICAL 1 0 1 1 +THEATRES 1 0 1 1 +THANKS 3 0 3 3 +THANKFUL 1 0 1 1 +THANKED 1 0 1 1 +TEXTURES 1 0 1 1 +TEXT 2 0 2 2 +TESTIMONY 1 0 1 1 +TESTIMONIES 1 0 1 1 +TESTED 1 0 1 1 +TEST 2 0 2 2 +TERROR 2 0 2 2 +TERRITORY 2 0 2 2 +TERRITORIAL 4 0 4 4 +TERRIFIED 2 0 2 2 +TERRIFIC 1 0 1 1 +TERRIBLY 2 0 2 2 +TERRIBLE 8 0 8 8 +TERRACED 1 0 1 1 +TERMS 9 0 9 9 +TERM 6 0 6 6 +TENTS 2 0 2 2 +TENFOLD 2 0 2 2 +TENDERLY 1 0 1 1 +TENDER 3 0 3 3 +TENDED 1 0 1 1 +TEND 2 0 2 2 +TENANTED 1 0 1 1 +TENABILITY 1 0 1 1 +TEN 14 0 14 14 +TEMPTATION 2 0 2 2 +TEMPORARY 2 0 2 2 +TEMPORAL 2 0 2 2 +TEMPLES 1 0 1 1 +TEMPLE 2 0 2 2 +TEMPEST 2 0 2 2 +TEMPERATURE 1 0 1 1 +TEMPER 3 0 3 3 +TELLS 1 0 1 1 +TELLING 3 0 3 3 +TELL 34 0 34 34 +TELESCOPE 2 0 2 2 +TEETH 1 0 1 1 +TEDIOUS 2 0 2 2 +TECHNOLOGY 1 0 1 1 +TECHNICAL 2 0 2 2 +TEAPOT 1 0 1 1 +TEAL 1 0 1 1 +TEACHING 2 0 2 2 +TEACHERY 1 0 1 1 +TEACHER 3 0 3 3 +TEACH 6 0 6 6 +TAYLOR 7 0 7 7 +TAXED 1 0 1 1 +TAWNY 1 0 1 1 +TAUGHT 5 0 5 5 +TASTE 5 0 5 5 +TASKS 1 0 1 1 +TASK 9 0 9 9 +TARTS 3 0 3 3 +TARRY 1 0 1 1 +TAPESTRY 1 0 1 1 +TAPESTRIES 1 0 1 1 +TAPE 1 0 1 1 +TANKARD 1 0 1 1 +TANGLE 1 0 1 1 +TAN 1 0 1 1 +TAMPERING 1 0 1 1 +TAMPERED 1 0 1 1 +TAMES 1 0 1 1 +TAME 1 0 1 1 +TALONS 1 0 1 1 +TALLOW 1 0 1 1 +TALL 9 0 9 9 +TALKS 4 0 4 4 +TALKING 10 0 10 10 +TALKED 1 0 1 1 +TALES 2 0 2 2 +TALENTED 1 0 1 1 +TALENT 5 0 5 5 +TALE 4 0 4 4 +TAKE 34 0 34 34 +TAILORS 1 0 1 1 +TAIL 3 0 3 3 +TAG 1 0 1 1 +TACT 1 0 1 1 +TACK 1 0 1 1 +TABLES 3 0 3 3 +TABBY'S 1 0 1 1 +TABBY 2 0 2 2 +SYSTEM 8 0 8 8 +SYNONYM 1 0 1 1 +SYMPATHY 3 0 3 3 +SYMPATHETIC 2 0 2 2 +SYLLABLE 1 0 1 1 +SWORDS 1 0 1 1 +SWORD 5 0 5 5 +SWOOPED 1 0 1 1 +SWOLLEN 1 0 1 1 +SWIRLING 1 0 1 1 +SWIRL 1 0 1 1 +SWING 1 0 1 1 +SWIMMING 2 0 2 2 +SWIFTNESS 1 0 1 1 +SWIFTLY 4 0 4 4 +SWIFT 1 0 1 1 +SWELLING 1 0 1 1 +SWELL 1 0 1 1 +SWEETS 1 0 1 1 +SWEETNESS 2 0 2 2 +SWEET 6 0 6 6 +SWEEPING 1 0 1 1 +SWAY 1 0 1 1 +SWARMING 1 0 1 1 +SWAMP 3 0 3 3 +SWAM 1 0 1 1 +SUSPICIOUS 1 0 1 1 +SUSPENDED 1 0 1 1 +SUSPECT 2 0 2 2 +SURVEYOR 1 0 1 1 +SURVEYED 1 0 1 1 +SURROUNDINGS 1 0 1 1 +SURROUNDING 2 0 2 2 +SURROUNDED 2 0 2 2 +SURRENDER 2 0 2 2 +SURPRISED 6 0 6 6 +SURPRISE 4 0 4 4 +SURPASSED 1 0 1 1 +SURMISED 1 0 1 1 +SURGEON 1 0 1 1 +SURGE 1 0 1 1 +SURFACE 8 0 8 8 +SURELY 5 0 5 5 +SURE 16 0 16 16 +SURCHARGED 1 0 1 1 +SUPREME 2 0 2 2 +SUPPRESSING 1 0 1 1 +SUPPRESSED 2 0 2 2 +SUPPOSING 2 0 2 2 +SUPPOSES 1 0 1 1 +SUPPOSED 3 0 3 3 +SUPPOSE 19 0 19 19 +SUPPORTS 1 0 1 1 +SUPPORTING 2 0 2 2 +SUPPORTED 2 0 2 2 +SUPPORT 2 0 2 2 +SUPPLYING 1 0 1 1 +SUPPLY 1 0 1 1 +SUPPLIES 1 0 1 1 +SUPPER 7 0 7 7 +SUPERIORITY 1 0 1 1 +SUPERIOR 8 0 8 8 +SUPERINTENDENCE 1 0 1 1 +SUPERFLUOUS 1 0 1 1 +SUPERFLUITIES 1 0 1 1 +SUNSHINE 3 0 3 3 +SUNSETS 1 0 1 1 +SUNSET 1 0 1 1 +SUNRISE 1 0 1 1 +SUNNY 1 0 1 1 +SUNLIGHT 2 0 2 2 +SUNK 1 0 1 1 +SUNG 2 0 2 2 +SUNDAY 2 0 2 2 +SUNBEAMS 1 0 1 1 +SUN 15 0 15 15 +SUMMONS 2 0 2 2 +SUMMIT 1 0 1 1 +SUMMARY 1 0 1 1 +SULLIED 1 0 1 1 +SULLEN 1 0 1 1 +SUITS 1 0 1 1 +SUITED 1 0 1 1 +SUITABLE 2 0 2 2 +SUGGESTIONS 1 0 1 1 +SUGGESTION 1 0 1 1 +SUGGESTED 3 0 3 3 +SUGGEST 1 0 1 1 +SUGAR 1 0 1 1 +SUFFOCATING 1 0 1 1 +SUFFICIENTLY 1 0 1 1 +SUFFICIENT 3 0 3 3 +SUFFICED 1 0 1 1 +SUFFICE 1 0 1 1 +SUFFERINGS 2 0 2 2 +SUFFERING 2 0 2 2 +SUFFERED 3 0 3 3 +SUFFER 5 0 5 5 +SUDDENLY 15 0 15 15 +SUDDEN 7 0 7 7 +SUCKLING 1 0 1 1 +SUCH 67 0 67 67 +SUCCESSION 3 0 3 3 +SUCCESSFUL 3 0 3 3 +SUCCESS 9 0 9 9 +SUCCEEDED 3 0 3 3 +SUCCEED 1 0 1 1 +SUBURB 1 0 1 1 +SUBTLETIES 1 0 1 1 +SUBSTITUTION 1 0 1 1 +SUBSTITUTED 1 0 1 1 +SUBSTANTIALLY 1 0 1 1 +SUBSTANTIAL 3 0 3 3 +SUBSTANCE 3 0 3 3 +SUBSISTENCE 1 0 1 1 +SUBSIDED 1 0 1 1 +SUBSCRIBE 1 0 1 1 +SUBORDINATION 1 0 1 1 +SUBMITTED 1 0 1 1 +SUBMIT 2 0 2 2 +SUBMISSIVELY 1 0 1 1 +SUBMARINE 3 0 3 3 +SUBJECTS 3 0 3 3 +SUBJECTIVELY 1 0 1 1 +SUBJECTED 1 0 1 1 +SUBJECT 16 0 16 16 +SUBDUING 1 0 1 1 +SUBDUED 2 0 2 2 +STYLE 5 0 5 5 +STUTELEY 4 0 4 4 +STURDY 1 0 1 1 +STUPID 4 0 4 4 +STUPEFIED 2 0 2 2 +STUNG 1 0 1 1 +STUMPED 1 0 1 1 +STUMP 1 0 1 1 +STUFFED 3 0 3 3 +STUFF 1 0 1 1 +STUDYING 2 0 2 2 +STUDIOUS 2 0 2 2 +STUDIES 1 0 1 1 +STUDENTS 3 0 3 3 +STUDENT 2 0 2 2 +STUCCO 1 0 1 1 +STRUGGLES 1 0 1 1 +STRUGGLED 1 0 1 1 +STRUGGLE 6 0 6 6 +STRUCTURE 2 0 2 2 +STRUCK 4 0 4 4 +STROVE 2 0 2 2 +STRONGLY 2 0 2 2 +STRONGHOLD 1 0 1 1 +STRONGEST 2 0 2 2 +STRONGER 1 0 1 1 +STRONG 13 0 13 13 +STROLLERS 1 0 1 1 +STROLLER'S 1 0 1 1 +STROLLER 3 0 3 3 +STROKE 1 0 1 1 +STRIVING 1 0 1 1 +STRIVE 3 0 3 3 +STRIPPING 1 0 1 1 +STRIPPED 1 0 1 1 +STRIPLING 1 0 1 1 +STRIKING 1 0 1 1 +STRIKE 3 0 3 3 +STRIFE 1 0 1 1 +STRICTLY 1 0 1 1 +STRICTEST 1 0 1 1 +STRICT 2 0 2 2 +STRETCHING 1 0 1 1 +STRETCHED 1 0 1 1 +STRETCH 1 0 1 1 +STRENUOUS 1 0 1 1 +STRENGTHENING 1 0 1 1 +STRENGTHENED 2 0 2 2 +STRENGTH 7 0 7 7 +STREETS 1 0 1 1 +STREET 14 0 14 14 +STREAKED 1 0 1 1 +STRAWBERRIES 1 0 1 1 +STRANGERS 2 0 2 2 +STRANGER 1 0 1 1 +STRANGELY 2 0 2 2 +STRANGE 12 0 12 12 +STRAITS 1 0 1 1 +STRAINED 1 0 1 1 +STRAIN 1 0 1 1 +STRAIGHTWAY 2 0 2 2 +STRAIGHT 3 0 3 3 +STORY 25 0 25 25 +STORMY 1 0 1 1 +STORMS 1 0 1 1 +STORES 1 0 1 1 +STORAGE 1 0 1 1 +STOPPING 2 0 2 2 +STOPPED 6 0 6 6 +STOP 8 0 8 8 +STOOPED 1 0 1 1 +STOOP 1 0 1 1 +STOOL 2 0 2 2 +STOOD 22 0 22 22 +STONE 3 0 3 3 +STOLEN 2 0 2 2 +STOICAL 1 0 1 1 +STOCKINGS 1 0 1 1 +STOCKBROKER 1 0 1 1 +STOCK 2 0 2 2 +STIRS 1 0 1 1 +STIRRED 1 0 1 1 +STIR 1 0 1 1 +STINGY 2 0 2 2 +STING 1 0 1 1 +STIMULANTS 1 0 1 1 +STILLNESS 1 0 1 1 +STILL 55 0 55 55 +STIFLING 1 0 1 1 +STIFFNESS 1 0 1 1 +STIFF 1 0 1 1 +STICKS 1 0 1 1 +STICKING 1 0 1 1 +STEW 1 0 1 1 +STERNEST 1 0 1 1 +STERN 2 0 2 2 +STEPS 1 0 1 1 +STEPPED 1 0 1 1 +STEPHEN'S 1 0 1 1 +STEPHEN 2 0 2 2 +STEPHANOS 2 0 2 2 +STEM 1 0 1 1 +STEEP 1 0 1 1 +STEEL 1 0 1 1 +STEAMING 1 0 1 1 +STEAMED 1 0 1 1 +STEAMBOAT 1 0 1 1 +STEAM 1 0 1 1 +STEAL 1 0 1 1 +STEADILY 2 0 2 2 +STEAD 1 0 1 1 +STAYS 1 0 1 1 +STAYED 2 0 2 2 +STATUS 1 0 1 1 +STATUARY 1 0 1 1 +STATIONS 2 0 2 2 +STATION 6 0 6 6 +STATESMAN 1 0 1 1 +STATEMENT 3 0 3 3 +STATELY 1 0 1 1 +STARTLING 1 0 1 1 +STARTLED 2 0 2 2 +STARTING 2 0 2 2 +STARTED 9 0 9 9 +STARS 1 0 1 1 +STARLIT 1 0 1 1 +STARING 1 0 1 1 +STARED 1 0 1 1 +STAR 2 0 2 2 +STANLEY 1 0 1 1 +STANDING 8 0 8 8 +STANDARD 4 0 4 4 +STAMPING 1 0 1 1 +STAMPED 1 0 1 1 +STALKS 1 0 1 1 +STAKES 1 0 1 1 +STAKE 1 0 1 1 +STAIRCASE 1 0 1 1 +STAINED 1 0 1 1 +STAIN 1 0 1 1 +STAGES 2 0 2 2 +STAGECRAFT 1 0 1 1 +STAFF 1 0 1 1 +STACKED 1 0 1 1 +STABLE 1 0 1 1 +SQUIRE'S 3 0 3 3 +SQUIRE 8 0 8 8 +SQUEEZE 1 0 1 1 +SQUARES 2 0 2 2 +SQUARE 2 0 2 2 +SQUALOR 1 0 1 1 +SQUALID 1 0 1 1 +SPUR 1 0 1 1 +SPRUNG 2 0 2 2 +SPRINKLING 1 0 1 1 +SPRINKLED 1 0 1 1 +SPRINGY 1 0 1 1 +SPRINGS 3 0 3 3 +SPRINGING 1 0 1 1 +SPREADS 1 0 1 1 +SPREAD 5 0 5 5 +SPOTLESS 1 0 1 1 +SPOT 4 0 4 4 +SPORTING 1 0 1 1 +SPOON 1 0 1 1 +SPOKEN 11 0 11 11 +SPOKE 15 0 15 15 +SPOILS 2 0 2 2 +SPLENDORS 1 0 1 1 +SPLENDIDLY 2 0 2 2 +SPLASHES 1 0 1 1 +SPLASHED 2 0 2 2 +SPITE 2 0 2 2 +SPIRITUAL 4 0 4 4 +SPIRITS 3 0 3 3 +SPIRIT 11 0 11 11 +SPINNING 4 0 4 4 +SPIKES 1 0 1 1 +SPIDER 1 0 1 1 +SPICY 1 0 1 1 +SPERM 1 0 1 1 +SPENT 5 0 5 5 +SPENDING 1 0 1 1 +SPELLED 1 0 1 1 +SPELL 1 0 1 1 +SPEEDS 1 0 1 1 +SPEED 3 0 3 3 +SPEECHLESS 1 0 1 1 +SPEECH 6 0 6 6 +SPED 2 0 2 2 +SPECULATE 1 0 1 1 +SPECTATORS 1 0 1 1 +SPECKS 1 0 1 1 +SPECIOUS 1 0 1 1 +SPECIFICATIONS 1 0 1 1 +SPECIFIC 1 0 1 1 +SPECIES 3 0 3 3 +SPECIALTY 1 0 1 1 +SPECIALLY 2 0 2 2 +SPEAR 1 0 1 1 +SPEAKS 1 0 1 1 +SPEAKING 10 0 10 10 +SPEAK 15 0 15 15 +SPASM 1 0 1 1 +SPARKS 1 0 1 1 +SPARKLING 3 0 3 3 +SPARKLES 2 0 2 2 +SPARKLED 1 0 1 1 +SPARK 1 0 1 1 +SPARE 3 0 3 3 +SPACE 5 0 5 5 +SOUTHERNERS 2 0 2 2 +SOUTHBRIDGE 1 0 1 1 +SOUTH 7 0 7 7 +SOURCE 1 0 1 1 +SOUP 1 0 1 1 +SOUNDING 3 0 3 3 +SOUNDED 2 0 2 2 +SOULS 4 0 4 4 +SOUL'S 1 0 1 1 +SOUL 8 0 8 8 +SOUGHT 6 0 6 6 +SORTS 2 0 2 2 +SORT 8 0 8 8 +SORRY 5 0 5 5 +SORROWS 1 0 1 1 +SORROWFULLY 1 0 1 1 +SORROWFUL 1 0 1 1 +SORROW 5 0 5 5 +SORREL 1 0 1 1 +SORE 1 0 1 1 +SORCERESS 1 0 1 1 +SOPHISTRY 1 0 1 1 +SOOTHINGLY 1 0 1 1 +SOOTH 1 0 1 1 +SOON 28 0 28 28 +SONS 3 0 3 3 +SONOROUS 1 0 1 1 +SONG 2 0 2 2 +SOMEWHERE 6 0 6 6 +SOMEWHAT 5 0 5 5 +SOMETIMES 18 0 18 18 +SOMETHING 37 0 37 37 +SOMEHOW 6 0 6 6 +SOMEBODY 3 0 3 3 +SOMBRE 1 0 1 1 +SOLVED 2 0 2 2 +SOLUTION 1 0 1 1 +SOLEMNITY 1 0 1 1 +SOLELY 1 0 1 1 +SOLE 3 0 3 3 +SOLDIERS 6 0 6 6 +SOLD 4 0 4 4 +SOIL 2 0 2 2 +SOFTNESS 2 0 2 2 +SOFTLY 4 0 4 4 +SOFTENED 1 0 1 1 +SOFT 7 0 7 7 +SOFAS 1 0 1 1 +SOCRATES 2 0 2 2 +SOCKS 1 0 1 1 +SOCIETY 7 0 7 7 +SOCIETIES 1 0 1 1 +SOCIAL 8 0 8 8 +SOCIABLE 1 0 1 1 +SOBS 1 0 1 1 +SOARED 1 0 1 1 +SOAR 1 0 1 1 +SNUFFED 1 0 1 1 +SNUFF 4 0 4 4 +SNOW 1 0 1 1 +SNORED 1 0 1 1 +SNEER 1 0 1 1 +SNATCHED 1 0 1 1 +SNATCH 1 0 1 1 +SMUGGLING 1 0 1 1 +SMOOTHER 1 0 1 1 +SMOOTH 1 0 1 1 +SMOKING 1 0 1 1 +SMOKE 5 0 5 5 +SMITH 2 0 2 2 +SMILING 3 0 3 3 +SMILES 3 0 3 3 +SMILE 12 0 12 12 +SMELL 3 0 3 3 +SMARTLY 1 0 1 1 +SMART 1 0 1 1 +SMALLEST 2 0 2 2 +SMALLER 1 0 1 1 +SMALL 20 0 20 20 +SLY 2 0 2 2 +SLUNK 1 0 1 1 +SLUMS 1 0 1 1 +SLUMBERS 1 0 1 1 +SLOWLY 14 0 14 14 +SLOW 4 0 4 4 +SLOPING 1 0 1 1 +SLIPS 1 0 1 1 +SLIPPING 1 0 1 1 +SLIPPED 4 0 4 4 +SLINGS 1 0 1 1 +SLIMY 1 0 1 1 +SLIMLY 1 0 1 1 +SLIGHTLY 4 0 4 4 +SLIGHTEST 1 0 1 1 +SLIGHTER 1 0 1 1 +SLIGHT 4 0 4 4 +SLEPT 2 0 2 2 +SLENDER 2 0 2 2 +SLEEPING 2 0 2 2 +SLEEP 5 0 5 5 +SLEEK 2 0 2 2 +SLAVES 1 0 1 1 +SLAVERY 3 0 3 3 +SLAVE 1 0 1 1 +SLATED 1 0 1 1 +SLAP 1 0 1 1 +SLANG 12 0 12 12 +SLANDERER 1 0 1 1 +SLAM 1 0 1 1 +SLAKED 1 0 1 1 +SKY 5 0 5 5 +SKIRTS 1 0 1 1 +SKIRT 1 0 1 1 +SKIRMISHES 1 0 1 1 +SKIP 1 0 1 1 +SKINNER 1 0 1 1 +SKINNED 1 0 1 1 +SKIN 3 0 3 3 +SKIMS 1 0 1 1 +SKILL 2 0 2 2 +SKETCHES 1 0 1 1 +SKETCH 1 0 1 1 +SKEPTICISM 1 0 1 1 +SKELETON 1 0 1 1 +SIZZLE 1 0 1 1 +SIXTY 3 0 3 3 +SIXTH 1 0 1 1 +SIXTEENTH 4 0 4 4 +SIXTEEN 1 0 1 1 +SIX 14 0 14 14 +SITUATION 2 0 2 2 +SITE 1 0 1 1 +SISTERS 5 0 5 5 +SISTER'S 1 0 1 1 +SISTER 8 0 8 8 +SIRE 4 0 4 4 +SINS 9 0 9 9 +SINNER 2 0 2 2 +SINKS 1 0 1 1 +SINK 1 0 1 1 +SINGS 1 0 1 1 +SINGLED 1 0 1 1 +SINGLE 5 0 5 5 +SINGING 2 0 2 2 +SINGER'S 1 0 1 1 +SINGER 2 0 2 2 +SING 2 0 2 2 +SINFUL 2 0 2 2 +SIMPLY 10 0 10 10 +SIMPLIFIED 1 0 1 1 +SIMPLICITY 2 0 2 2 +SIMPLE 9 0 9 9 +SIMON 1 0 1 1 +SIMILITUDE 1 0 1 1 +SIMILARLY 1 0 1 1 +SIMILAR 3 0 3 3 +SILVERING 1 0 1 1 +SILVER 8 0 8 8 +SILLINESS 2 0 2 2 +SILKEN 2 0 2 2 +SILK 6 0 6 6 +SILHOUETTE 1 0 1 1 +SILENCES 1 0 1 1 +SILAS 1 0 1 1 +SIGNS 4 0 4 4 +SIGNING 1 0 1 1 +SIGNIFICANTLY 1 0 1 1 +SIGNIFICANCE 3 0 3 3 +SIGNED 1 0 1 1 +SIGNATURE 1 0 1 1 +SIGN 5 0 5 5 +SIDEWAYS 1 0 1 1 +SIDES 6 0 6 6 +SICKNESS 2 0 2 2 +SHY 1 0 1 1 +SHUTTING 2 0 2 2 +SHUTTERS 1 0 1 1 +SHUT 3 0 3 3 +SHUNNING 1 0 1 1 +SHUDDER 2 0 2 2 +SHRUGGED 1 0 1 1 +SHRUBBERY 1 0 1 1 +SHRIVELLED 1 0 1 1 +SHRINE 1 0 1 1 +SHRILL 1 0 1 1 +SHRIEKED 1 0 1 1 +SHREWISH 1 0 1 1 +SHREWDLY 1 0 1 1 +SHREWD 1 0 1 1 +SHOWN 1 0 1 1 +SHOWERED 1 0 1 1 +SHOWER 1 0 1 1 +SHOW 10 0 10 10 +SHOUTINGS 1 0 1 1 +SHOUTING 1 0 1 1 +SHOUTED 3 0 3 3 +SHOUT 3 0 3 3 +SHOULDN'T 1 0 1 1 +SHOULDERS 5 0 5 5 +SHOULDER 5 0 5 5 +SHORTLY 1 0 1 1 +SHORT 11 0 11 11 +SHORES 1 0 1 1 +SHORE 4 0 4 4 +SHOPS 1 0 1 1 +SHOP 2 0 2 2 +SHOOTING 1 0 1 1 +SHOOT 1 0 1 1 +SHOOK 10 0 10 10 +SHONE 7 0 7 7 +SHOES 3 0 3 3 +SHOCK 4 0 4 4 +SHOAL 1 0 1 1 +SHIVERING 1 0 1 1 +SHIVER 2 0 2 2 +SHIRTS 1 0 1 1 +SHIRK 1 0 1 1 +SHIPS 2 0 2 2 +SHIPPING 1 0 1 1 +SHINING 4 0 4 4 +SHINES 2 0 2 2 +SHINE 2 0 2 2 +SHIFTED 1 0 1 1 +SHIELD 1 0 1 1 +SHERWOOD 1 0 1 1 +SHEPHERD 2 0 2 2 +SHELVES 1 0 1 1 +SHELTER 1 0 1 1 +SHELLEY'S 1 0 1 1 +SHELL 1 0 1 1 +SHEETING 1 0 1 1 +SHEET 4 0 4 4 +SHEEP 1 0 1 1 +SHEDDING 2 0 2 2 +SHED 1 0 1 1 +SHEAF 1 0 1 1 +SHAWL 1 0 1 1 +SHAVINGS 1 0 1 1 +SHAVEN 1 0 1 1 +SHARPLY 2 0 2 2 +SHARPENED 1 0 1 1 +SHARP 8 0 8 8 +SHARING 1 0 1 1 +SHARED 1 0 1 1 +SHARE 1 0 1 1 +SHAPEN 1 0 1 1 +SHAPELY 1 0 1 1 +SHAPED 1 0 1 1 +SHAPE 1 0 1 1 +SHAMES 1 0 1 1 +SHAME 3 0 3 3 +SHAM 1 0 1 1 +SHALLOWS 1 0 1 1 +SHALLOW 2 0 2 2 +SHAKING 2 0 2 2 +SHAKEN 1 0 1 1 +SHAGGY 2 0 2 2 +SHAFT 1 0 1 1 +SHADY 2 0 2 2 +SHADOWS 7 0 7 7 +SHADOW 2 0 2 2 +SHADES 1 0 1 1 +SHADE 4 0 4 4 +SHACKLETON 1 0 1 1 +SEXTANT 1 0 1 1 +SEWING 1 0 1 1 +SEVERITY 4 0 4 4 +SEVERITIES 1 0 1 1 +SEVERED 2 0 2 2 +SEVERE 1 0 1 1 +SEVERAL 9 0 9 9 +SEVENTY 2 0 2 2 +SEVENTH 1 0 1 1 +SEVENTEEN 2 0 2 2 +SEVEN 6 0 6 6 +SETTLERS 1 0 1 1 +SETTLER 1 0 1 1 +SETTLEMENTS 1 0 1 1 +SETTLEMENT 3 0 3 3 +SETH 1 0 1 1 +SESSION 1 0 1 1 +SERVITUDE 1 0 1 1 +SERVILE 1 0 1 1 +SERVICEABILITY 1 0 1 1 +SERVICE 12 0 12 12 +SERVADAC'S 1 0 1 1 +SERVADAC 7 0 7 7 +SERIOUSLY 4 0 4 4 +SERIOUS 3 0 3 3 +SERIES 3 0 3 3 +SERENE 1 0 1 1 +SERAPHIC 1 0 1 1 +SEQUEL 1 0 1 1 +SEPARATION 1 0 1 1 +SEPARATED 3 0 3 3 +SENTIMENTS 1 0 1 1 +SENTIMENTAL 1 0 1 1 +SENTIMENT 1 0 1 1 +SENTENTIOUSLY 1 0 1 1 +SENSITIVE 1 0 1 1 +SENSIBLE 1 0 1 1 +SENSES 2 0 2 2 +SENSELESS 1 0 1 1 +SENSATIONS 1 0 1 1 +SENSATIONAL 1 0 1 1 +SENSATION 2 0 2 2 +SENORA 1 0 1 1 +SENOR 1 0 1 1 +SENIOR 1 0 1 1 +SEND 3 0 3 3 +SENATOR 1 0 1 1 +SELLING 2 0 2 2 +SELECTION 2 0 2 2 +SELECTED 1 0 1 1 +SELDOM 3 0 3 3 +SEIZING 2 0 2 2 +SEIZED 3 0 3 3 +SEIZE 1 0 1 1 +SEGMENT 1 0 1 1 +SEES 1 0 1 1 +SEEN 16 0 16 16 +SEEMINGLY 3 0 3 3 +SEEKERS 1 0 1 1 +SEEK 4 0 4 4 +SEEING 12 0 12 12 +SECURITY 2 0 2 2 +SECURING 1 0 1 1 +SECURED 3 0 3 3 +SECURE 4 0 4 4 +SECRETLY 3 0 3 3 +SECRET 3 0 3 3 +SECRECY 1 0 1 1 +SECONDS 2 0 2 2 +SECONDLY 1 0 1 1 +SECONDED 1 0 1 1 +SECONDARY 1 0 1 1 +SECOND 10 0 10 10 +SECLUSION 1 0 1 1 +SECESSIONISTS 1 0 1 1 +SEATED 3 0 3 3 +SEASONS 2 0 2 2 +SEASONABLE 1 0 1 1 +SEASON 3 0 3 3 +SEAS 2 0 2 2 +SEARCHING 1 0 1 1 +SEARCHED 2 0 2 2 +SEARCH 4 0 4 4 +SCYTHE 2 0 2 2 +SCURRIED 1 0 1 1 +SCUMMED 1 0 1 1 +SCULPTURE 1 0 1 1 +SCRUTINY 1 0 1 1 +SCRUTINIZE 1 0 1 1 +SCRUPLES 1 0 1 1 +SCRUB 1 0 1 1 +SCRIPTURES 1 0 1 1 +SCRIBE 1 0 1 1 +SCRIBBLING 1 0 1 1 +SCRIBBLER 1 0 1 1 +SCREEN 1 0 1 1 +SCREAMED 3 0 3 3 +SCRAMBLED 1 0 1 1 +SCOWLED 1 0 1 1 +SCOTTISH 2 0 2 2 +SCOTS 1 0 1 1 +SCOTLAND 1 0 1 1 +SCORPION 1 0 1 1 +SCORNFUL 2 0 2 2 +SCORE 2 0 2 2 +SCORCHED 1 0 1 1 +SCOPE 1 0 1 1 +SCOLD 1 0 1 1 +SCIENTISTS 2 0 2 2 +SCIENTIST 1 0 1 1 +SCIENTIFIC 1 0 1 1 +SCIENCE 2 0 2 2 +SCHOONER 2 0 2 2 +SCHOOLROOM 1 0 1 1 +SCHOLARSHIP 1 0 1 1 +SCHOLAR 1 0 1 1 +SCHISM 1 0 1 1 +SCHEME 5 0 5 5 +SCEPTICISM 1 0 1 1 +SCENT 1 0 1 1 +SCENES 1 0 1 1 +SCENE 3 0 3 3 +SCATTERS 1 0 1 1 +SCATTERED 3 0 3 3 +SCATTER 1 0 1 1 +SCARLET 3 0 3 3 +SCARE 1 0 1 1 +SCARCELY 9 0 9 9 +SCARCE 1 0 1 1 +SCALP 1 0 1 1 +SCALE 2 0 2 2 +SAY 51 0 51 51 +SAWDUST 1 0 1 1 +SAVIOUR 1 0 1 1 +SAVINGS 1 0 1 1 +SAVING 1 0 1 1 +SAVES 1 0 1 1 +SAVE 9 0 9 9 +SAVAGE 5 0 5 5 +SAUNTERED 1 0 1 1 +SAUCE 1 0 1 1 +SATURDAY 5 0 5 5 +SATISFY 1 0 1 1 +SATISFIED 8 0 8 8 +SATISFACTION 5 0 5 5 +SATANIC 1 0 1 1 +SARCASTIC 1 0 1 1 +SARAH'S 1 0 1 1 +SANK 1 0 1 1 +SANGUINARY 1 0 1 1 +SANDY 2 0 2 2 +SANDWICHES 1 0 1 1 +SANDFORD 1 0 1 1 +SANCTIFYING 1 0 1 1 +SANCTIFIED 1 0 1 1 +SAME 35 0 35 35 +SALVATION 1 0 1 1 +SALUTE 1 0 1 1 +SALUTATION 1 0 1 1 +SALON 1 0 1 1 +SAKE 4 0 4 4 +SAINT 14 0 14 14 +SAILS 2 0 2 2 +SAILORS 1 0 1 1 +SAILORMAN 1 0 1 1 +SAILOR 1 0 1 1 +SAFETY 2 0 2 2 +SAFEST 1 0 1 1 +SAFE 5 0 5 5 +SADLY 2 0 2 2 +SADDLER 1 0 1 1 +SADDLE 1 0 1 1 +SAD 3 0 3 3 +SACRIFICE 2 0 2 2 +SACRED 2 0 2 2 +SACRAMENT 1 0 1 1 +S 1 0 1 1 +RUTH 10 0 10 10 +RUSTY 1 0 1 1 +RUSTLING 2 0 2 2 +RUSTLED 1 0 1 1 +RUSHING 1 0 1 1 +RUSHED 7 0 7 7 +RUSH 4 0 4 4 +RUNS 2 0 2 2 +RUNNING 8 0 8 8 +RUNG 1 0 1 1 +RUN 9 0 9 9 +RUMMAGED 1 0 1 1 +RUMINATED 1 0 1 1 +RULER 1 0 1 1 +RULE 2 0 2 2 +RUINS 1 0 1 1 +RUINED 1 0 1 1 +RUIN 2 0 2 2 +RUFFLED 1 0 1 1 +RUFFIANS 1 0 1 1 +RUFFIAN 2 0 2 2 +RUDELY 1 0 1 1 +RUDE 2 0 2 2 +RUBY 1 0 1 1 +RUBBING 1 0 1 1 +RUBBED 1 0 1 1 +ROYALISTS 2 0 2 2 +ROYAL 9 0 9 9 +ROWS 2 0 2 2 +ROW 5 0 5 5 +ROVING 1 0 1 1 +ROUTINE 1 0 1 1 +ROUTE 2 0 2 2 +ROUT 1 0 1 1 +ROUSES 1 0 1 1 +ROUSED 1 0 1 1 +ROUSE 1 0 1 1 +ROUNDED 1 0 1 1 +ROUGHLY 7 0 7 7 +ROUGHEST 1 0 1 1 +ROUGH 3 0 3 3 +ROSES 2 0 2 2 +ROSE 14 0 14 14 +ROSALIE 4 0 4 4 +ROOTS 1 0 1 1 +ROOMS 3 0 3 3 +ROOFS 1 0 1 1 +ROMANCE 1 0 1 1 +ROMAN 1 0 1 1 +ROLLERS 2 0 2 2 +ROLL 1 0 1 1 +ROGERS'S 1 0 1 1 +ROGERS 2 0 2 2 +ROD 1 0 1 1 +ROCKY 1 0 1 1 +ROCKS 2 0 2 2 +ROCKING 2 0 2 2 +ROBUST 1 0 1 1 +ROBINSON 1 0 1 1 +ROBIN 19 0 19 19 +ROBERT 2 0 2 2 +ROBBING 1 0 1 1 +ROBBER 1 0 1 1 +ROARINGS 1 0 1 1 +ROARED 1 0 1 1 +ROADS 1 0 1 1 +ROAD 4 0 4 4 +RIVULET 2 0 2 2 +RIVER 6 0 6 6 +RIVAL 2 0 2 2 +RISK 2 0 2 2 +RISING 3 0 3 3 +RISEN 1 0 1 1 +RISE 1 0 1 1 +RIPPLING 1 0 1 1 +RIOTING 2 0 2 2 +RIOT 1 0 1 1 +RINGS 1 0 1 1 +RING 3 0 3 3 +RIGOROUSLY 1 0 1 1 +RIGOROUS 1 0 1 1 +RIGIDLY 1 0 1 1 +RIGIDITY 2 0 2 2 +RIGID 1 0 1 1 +RIGHTS 1 0 1 1 +RIGHTLY 1 0 1 1 +RIGHTEOUSNESS 1 0 1 1 +RIGHTEOUS 1 0 1 1 +RIGHT 25 0 25 25 +RIGGING 1 0 1 1 +RIFLES 1 0 1 1 +RIDGE 1 0 1 1 +RIDES 1 0 1 1 +RIDE 1 0 1 1 +RIDDLE 1 0 1 1 +RICHLY 1 0 1 1 +RICHEST 1 0 1 1 +RICHER 1 0 1 1 +RICH 11 0 11 11 +RHYTHM 1 0 1 1 +RHONE 1 0 1 1 +REYNOLDS 2 0 2 2 +REWARDED 2 0 2 2 +REWARD 3 0 3 3 +REVOLVING 1 0 1 1 +REVIVES 1 0 1 1 +REVERT 1 0 1 1 +REVERSED 1 0 1 1 +REVERSAL 1 0 1 1 +REVERIE 2 0 2 2 +REVEREND 1 0 1 1 +REVERENCE 1 0 1 1 +REVENGE 3 0 3 3 +REVELATION 1 0 1 1 +REVEAL 1 0 1 1 +RETURNING 1 0 1 1 +RETRIEVE 1 0 1 1 +RETREATED 1 0 1 1 +RETREAT 3 0 3 3 +RETRACE 1 0 1 1 +RETIREMENT 2 0 2 2 +RETIRED 2 0 2 2 +RETIRE 1 0 1 1 +RETAINERS 1 0 1 1 +RETAINER 1 0 1 1 +RETAINED 3 0 3 3 +RETAIN 1 0 1 1 +RESURRECTION 3 0 3 3 +RESUMED 2 0 2 2 +RESULTS 4 0 4 4 +RESULTED 3 0 3 3 +RESULT 5 0 5 5 +RESTRAINED 1 0 1 1 +RESTORING 1 0 1 1 +RESTORED 2 0 2 2 +RESTLESS 4 0 4 4 +RESTED 2 0 2 2 +RESPONSIBLE 2 0 2 2 +RESPONSES 1 0 1 1 +RESPONDED 3 0 3 3 +RESPECTS 1 0 1 1 +RESPECTING 1 0 1 1 +RESPECT 3 0 3 3 +RESOURCES 2 0 2 2 +RESOUNDING 1 0 1 1 +RESORT 2 0 2 2 +RESOLVED 3 0 3 3 +RESOLVE 2 0 2 2 +RESOLUTIONS 1 0 1 1 +RESOLUTION 1 0 1 1 +RESOLUTE 1 0 1 1 +RESISTANCE 1 0 1 1 +RESIST 1 0 1 1 +RESIGNATION 2 0 2 2 +RESIDENCES 1 0 1 1 +RESIDENCE 3 0 3 3 +RESIDE 1 0 1 1 +RESERVOIR 1 0 1 1 +RESERVED 1 0 1 1 +RESERVE 2 0 2 2 +RESENTFUL 1 0 1 1 +RESENTED 1 0 1 1 +RESEMBLING 2 0 2 2 +RESEMBLED 1 0 1 1 +RESEMBLE 1 0 1 1 +RESEMBLANCE 2 0 2 2 +RESCUE 2 0 2 2 +REQUISITION 1 0 1 1 +REQUIRING 1 0 1 1 +REQUIREMENTS 1 0 1 1 +REQUIRED 3 0 3 3 +REQUIRE 4 0 4 4 +REQUEST 1 0 1 1 +REPUTE 2 0 2 2 +REPUTATION 1 0 1 1 +REPUGNANT 1 0 1 1 +REPUBLISH 1 0 1 1 +REPUBLICAN 1 0 1 1 +REPUBLIC 5 0 5 5 +REPTILES 2 0 2 2 +REPROOF 1 0 1 1 +REPRODUCE 1 0 1 1 +REPROACHING 1 0 1 1 +REPROACHFULLY 1 0 1 1 +REPROACH 2 0 2 2 +REPRESS 2 0 2 2 +REPRESENTS 1 0 1 1 +REPRESENTING 1 0 1 1 +REPRESENTATIVE 1 0 1 1 +REPRESENTATION 1 0 1 1 +REPRESENT 2 0 2 2 +REPOSE 2 0 2 2 +REPORT 2 0 2 2 +REPLY 7 0 7 7 +REPLIED 20 0 20 20 +REPLACES 1 0 1 1 +REPLACE 1 0 1 1 +REPETITION 1 0 1 1 +REPENTING 1 0 1 1 +REPENTANCE 1 0 1 1 +REPENT 1 0 1 1 +REPELLENT 1 0 1 1 +REPELLED 1 0 1 1 +REPEATED 1 0 1 1 +REPAST 1 0 1 1 +REPARTEES 1 0 1 1 +REPAIRS 1 0 1 1 +REPAIRED 1 0 1 1 +RENTED 1 0 1 1 +RENTAL 1 0 1 1 +RENT 1 0 1 1 +RENEWED 2 0 2 2 +RENEWABLE 1 0 1 1 +RENDING 1 0 1 1 +RENDERING 2 0 2 2 +RENDERED 2 0 2 2 +RENDER 3 0 3 3 +REMOTENESS 1 0 1 1 +REMOTE 2 0 2 2 +REMONSTRANCE 1 0 1 1 +REMNANT 2 0 2 2 +REMISSION 2 0 2 2 +REMIND 1 0 1 1 +REMEMBRANCE 2 0 2 2 +REMEMBERS 1 0 1 1 +REMEMBERING 3 0 3 3 +REMEMBERED 11 0 11 11 +REMEMBER 10 0 10 10 +REMEDY 1 0 1 1 +REMARKS 1 0 1 1 +REMARKING 1 0 1 1 +REMARKABLE 2 0 2 2 +REMAINS 3 0 3 3 +RELY 1 0 1 1 +RELUCTANTLY 1 0 1 1 +RELINQUISHED 1 0 1 1 +RELIGIOUS 1 0 1 1 +RELIGION 4 0 4 4 +RELIEVE 2 0 2 2 +RELIEF 1 0 1 1 +RELIANCE 1 0 1 1 +RELIABLE 1 0 1 1 +RELATIVES 1 0 1 1 +RELATIVE 1 0 1 1 +RELATIONSHIP 1 0 1 1 +RELATIONS 2 0 2 2 +RELATION 4 0 4 4 +RELATED 3 0 3 3 +RELATE 2 0 2 2 +RELAPSES 1 0 1 1 +REJOICING 2 0 2 2 +REJOICED 1 0 1 1 +REJOICE 5 0 5 5 +REINS 1 0 1 1 +REIGNS 1 0 1 1 +REIGN 1 0 1 1 +REGULATOR 1 0 1 1 +REGULATIONS 1 0 1 1 +REGULATION 1 0 1 1 +REGULATED 1 0 1 1 +REGULARLY 1 0 1 1 +REGULARITY 1 0 1 1 +REGULAR 1 0 1 1 +REGRET 1 0 1 1 +REGISTRATION 1 0 1 1 +REGISTERS 1 0 1 1 +REGISTERED 1 0 1 1 +REGIONS 1 0 1 1 +REGION 1 0 1 1 +REGIMENTS 1 0 1 1 +REGIMENT 1 0 1 1 +REGARDS 2 0 2 2 +REGARDLESS 1 0 1 1 +REGARDED 2 0 2 2 +REGARD 5 0 5 5 +REGAINING 1 0 1 1 +REFUSING 2 0 2 2 +REFUSAL 1 0 1 1 +REFUGEES 1 0 1 1 +REFUGE 1 0 1 1 +REFRESHING 1 0 1 1 +REFRESH 2 0 2 2 +REFRAINED 1 0 1 1 +REFORMS 1 0 1 1 +REFORM 1 0 1 1 +REFLECTIONS 1 0 1 1 +REFLECTION 4 0 4 4 +REFLECTED 3 0 3 3 +REFLECT 1 0 1 1 +REFINEMENTS 1 0 1 1 +REFINEMENT 1 0 1 1 +REFINED 1 0 1 1 +REFERRING 2 0 2 2 +REFER 2 0 2 2 +REED 1 0 1 1 +REDUCED 1 0 1 1 +REDOUBLES 1 0 1 1 +REDOUBLED 1 0 1 1 +REDMAN'S 1 0 1 1 +REDEEMER 1 0 1 1 +RECUR 1 0 1 1 +RECTOR 2 0 2 2 +RECREATION 2 0 2 2 +RECOVERY 1 0 1 1 +RECOVERING 1 0 1 1 +RECOVERED 1 0 1 1 +RECOVER 1 0 1 1 +RECONCILIATION 1 0 1 1 +RECOLLECTIONS 1 0 1 1 +RECOILED 2 0 2 2 +RECOGNIZE 4 0 4 4 +RECOGNITION 9 0 9 9 +RECLINING 1 0 1 1 +RECKONING 2 0 2 2 +RECKONED 1 0 1 1 +RECKON 1 0 1 1 +RECKLESS 1 0 1 1 +RECITED 5 0 5 5 +RECITE 2 0 2 2 +RECESSES 1 0 1 1 +RECEPTION 4 0 4 4 +RECENTLY 1 0 1 1 +RECENT 2 0 2 2 +RECEIVING 2 0 2 2 +RECEIVES 1 0 1 1 +RECEIVER 1 0 1 1 +RECEIVED 9 0 9 9 +RECEIVE 3 0 3 3 +RECAPTURED 1 0 1 1 +RECALLING 1 0 1 1 +RECALLED 4 0 4 4 +RECALL 1 0 1 1 +REBUKES 1 0 1 1 +REBUKE 1 0 1 1 +REBELLION 2 0 2 2 +REBEL 1 0 1 1 +REBATE 1 0 1 1 +REASSURED 1 0 1 1 +REASONS 1 0 1 1 +REASONING 1 0 1 1 +REASON 19 0 19 19 +REAR 1 0 1 1 +REALLY 10 0 10 10 +REALIZED 2 0 2 2 +REALITY 8 0 8 8 +REAL 16 0 16 16 +READING 4 0 4 4 +READINESS 1 0 1 1 +READILY 2 0 2 2 +REACHING 2 0 2 2 +REACHED 12 0 12 12 +REACH 3 0 3 3 +RAVISHING 1 0 1 1 +RAVING 1 0 1 1 +RAVINES 1 0 1 1 +RATS 1 0 1 1 +RATIFY 1 0 1 1 +RATIFICATION 1 0 1 1 +RATHER 23 0 23 23 +RATED 2 0 2 2 +RATE 7 0 7 7 +RASH 1 0 1 1 +RASCAL 1 0 1 1 +RARELY 1 0 1 1 +RARE 5 0 5 5 +RAPTUROUS 1 0 1 1 +RAPTURES 1 0 1 1 +RAPIDS 3 0 3 3 +RAPIDLY 3 0 3 3 +RAPIDITY 2 0 2 2 +RAPID 3 0 3 3 +RAPHAEL 1 0 1 1 +RAOUL 3 0 3 3 +RANSOM 1 0 1 1 +RANKING 1 0 1 1 +RANKED 1 0 1 1 +RANK 5 0 5 5 +RANGERS 1 0 1 1 +RANGED 1 0 1 1 +RANGE 4 0 4 4 +RANG 4 0 4 4 +RANCOR 1 0 1 1 +RAN 12 0 12 12 +RAMPART 1 0 1 1 +RALPH 2 0 2 2 +RAISED 6 0 6 6 +RAISE 1 0 1 1 +RAINS 1 0 1 1 +RAINDROPS 1 0 1 1 +RAINBOW 1 0 1 1 +RAIN 3 0 3 3 +RAILROADS 1 0 1 1 +RAGS 2 0 2 2 +RAGGED 1 0 1 1 +RAGE 3 0 3 3 +RAFT 7 0 7 7 +RADIE 2 0 2 2 +RADICALS 1 0 1 1 +RADICALISM 1 0 1 1 +RADIANCE 1 0 1 1 +RACKED 1 0 1 1 +RACK 1 0 1 1 +RACHEL'S 1 0 1 1 +RACES 6 0 6 6 +RACE 1 0 1 1 +RABBLE 1 0 1 1 +RABBIT 2 0 2 2 +QUOTE 2 0 2 2 +QUIVERING 2 0 2 2 +QUIVERED 1 0 1 1 +QUITTING 1 0 1 1 +QUITTED 1 0 1 1 +QUITE 29 0 29 29 +QUINSON 1 0 1 1 +QUILT 3 0 3 3 +QUIETLY 5 0 5 5 +QUIET 4 0 4 4 +QUICKLY 5 0 5 5 +QUICK 6 0 6 6 +QUESTIONS 4 0 4 4 +QUESTIONING 1 0 1 1 +QUESTIONED 2 0 2 2 +QUESTION 12 0 12 12 +QUEST 1 0 1 1 +QUERIED 1 0 1 1 +QUEER 3 0 3 3 +QUEENSTOWN 1 0 1 1 +QUEENS 1 0 1 1 +QUEEN'S 1 0 1 1 +QUEEN 8 0 8 8 +QUARTERS 3 0 3 3 +QUARTER 7 0 7 7 +QUARRY 2 0 2 2 +QUARREL 1 0 1 1 +QUANTITY 4 0 4 4 +QUANTITIES 1 0 1 1 +QUALITY 1 0 1 1 +QUALITIES 3 0 3 3 +QUALIFICATIONS 2 0 2 2 +QUALIFICATION 1 0 1 1 +QUAKE 1 0 1 1 +QUAINT 1 0 1 1 +QUADRANGLE 1 0 1 1 +PYTHON 1 0 1 1 +PYRAMIDS 2 0 2 2 +PUZZLED 1 0 1 1 +PUSHED 2 0 2 2 +PUSH 2 0 2 2 +PURSUITS 3 0 3 3 +PURSUIT 2 0 2 2 +PURSUER 1 0 1 1 +PURSUED 1 0 1 1 +PURSUE 1 0 1 1 +PURRING 1 0 1 1 +PURPOSES 2 0 2 2 +PURPLE 1 0 1 1 +PURITY 2 0 2 2 +PURELY 1 0 1 1 +PURCHASED 1 0 1 1 +PUPIL 1 0 1 1 +PUNISHMENT 4 0 4 4 +PUNISHED 4 0 4 4 +PUNCH 1 0 1 1 +PUMP 1 0 1 1 +PULPIT 1 0 1 1 +PULLING 2 0 2 2 +PULLED 2 0 2 2 +PULL 1 0 1 1 +PUFFY 1 0 1 1 +PUBLISH 1 0 1 1 +PUBLIC 13 0 13 13 +PSYCHOLOGY 1 0 1 1 +PSYCHE 2 0 2 2 +PRYNNE 4 0 4 4 +PRUDENT 2 0 2 2 +PROWESS 1 0 1 1 +PROW 1 0 1 1 +PROVOCATION 1 0 1 1 +PROVISIONALLY 1 0 1 1 +PROVISION 2 0 2 2 +PROVINCES 2 0 2 2 +PROVINCE 2 0 2 2 +PROVIDED 3 0 3 3 +PROVEN 1 0 1 1 +PROVE 4 0 4 4 +PROUDLY 1 0 1 1 +PROUD 5 0 5 5 +PROTESTED 1 0 1 1 +PROTEST 1 0 1 1 +PROTECTOR 1 0 1 1 +PROTECTION 3 0 3 3 +PROTECTING 1 0 1 1 +PROTECTED 1 0 1 1 +PROTECT 4 0 4 4 +PROSTRATION 2 0 2 2 +PROSPECTS 1 0 1 1 +PROSELYTES 1 0 1 1 +PROSECUTION 1 0 1 1 +PROSECUTE 1 0 1 1 +PROSE 1 0 1 1 +PROSCRIPTION 1 0 1 1 +PROPRIETY 1 0 1 1 +PROPRIETORS 1 0 1 1 +PROPRIETOR 1 0 1 1 +PROPOSED 2 0 2 2 +PROPOSE 1 0 1 1 +PROPOSALS 1 0 1 1 +PROPORTIONS 3 0 3 3 +PROPHETS 1 0 1 1 +PROPHET 1 0 1 1 +PROPERTY 6 0 6 6 +PROPERTIES 1 0 1 1 +PROPERLY 4 0 4 4 +PROPER 3 0 3 3 +PROPENSITIES 1 0 1 1 +PROOF 3 0 3 3 +PRONUNCIATION 1 0 1 1 +PRONOUNCED 4 0 4 4 +PROMPTLY 1 0 1 1 +PROMPT 1 0 1 1 +PROMOTING 1 0 1 1 +PROMOTED 1 0 1 1 +PROMISES 2 0 2 2 +PROMISED 4 0 4 4 +PROMISE 4 0 4 4 +PROMINENT 2 0 2 2 +PROLIFIC 1 0 1 1 +PROJECTION 1 0 1 1 +PROJECT 2 0 2 2 +PROGRESSING 1 0 1 1 +PROGRESS 6 0 6 6 +PROGRAMME 1 0 1 1 +PROFOUND 2 0 2 2 +PROFITABLE 1 0 1 1 +PROFESSOR 6 0 6 6 +PROFESSIONS 1 0 1 1 +PROFESSION 1 0 1 1 +PROFESSING 1 0 1 1 +PROFESSED 1 0 1 1 +PRODUCTIVE 1 0 1 1 +PRODUCTION 1 0 1 1 +PRODUCT 1 0 1 1 +PRODUCING 2 0 2 2 +PRODUCED 5 0 5 5 +PRODUCE 3 0 3 3 +PROCOPE 2 0 2 2 +PROCESSION 1 0 1 1 +PROCESSES 1 0 1 1 +PROCESS 3 0 3 3 +PROCEEDINGS 2 0 2 2 +PROCEEDING 2 0 2 2 +PROCEEDED 2 0 2 2 +PROBLEM 4 0 4 4 +PROBING 1 0 1 1 +PROBABLY 10 0 10 10 +PROBABLE 2 0 2 2 +PRO 2 0 2 2 +PRIZE 1 0 1 1 +PRIVILEGE 3 0 3 3 +PRIVATION 1 0 1 1 +PRIVATELY 1 0 1 1 +PRIVATE 11 0 11 11 +PRIVACY 1 0 1 1 +PRISTINE 1 0 1 1 +PRISONER 4 0 4 4 +PRISON 4 0 4 4 +PRIOR 1 0 1 1 +PRINTING 1 0 1 1 +PRINTER 2 0 2 2 +PRINCIPLES 2 0 2 2 +PRINCESSES 2 0 2 2 +PRINCESS 9 0 9 9 +PRINCES 2 0 2 2 +PRINCE 3 0 3 3 +PRIMLY 1 0 1 1 +PRIMITIVE 2 0 2 2 +PRIMATE 1 0 1 1 +PRIMARY 1 0 1 1 +PRIMARILY 1 0 1 1 +PRIESTHOOD 1 0 1 1 +PRIEST 1 0 1 1 +PRIDE 9 0 9 9 +PRICE 2 0 2 2 +PREY 2 0 2 2 +PREVIOUSLY 2 0 2 2 +PREVIOUS 2 0 2 2 +PREVENTS 1 0 1 1 +PREVENTING 1 0 1 1 +PREVAILING 1 0 1 1 +PREVAILED 3 0 3 3 +PREVAIL 1 0 1 1 +PRETTY 10 0 10 10 +PRETTIEST 1 0 1 1 +PRETENSION 1 0 1 1 +PRETENDED 1 0 1 1 +PRESUMED 1 0 1 1 +PRESUMABLY 1 0 1 1 +PRESSURE 4 0 4 4 +PRESSING 1 0 1 1 +PRESSED 2 0 2 2 +PRESS 2 0 2 2 +PRESIDENT 5 0 5 5 +PRESIDED 1 0 1 1 +PRESERVED 1 0 1 1 +PRESERVE 1 0 1 1 +PRESENTS 3 0 3 3 +PRESENTLY 7 0 7 7 +PRESENTING 2 0 2 2 +PRESENTED 1 0 1 1 +PRESENCE 6 0 6 6 +PREROGATIVES 1 0 1 1 +PREPARING 1 0 1 1 +PREPARED 3 0 3 3 +PREPARE 1 0 1 1 +PREOCCUPIED 1 0 1 1 +PREOCCUPATION 1 0 1 1 +PREMISES 1 0 1 1 +PRELIMINARY 1 0 1 1 +PREFERENCE 1 0 1 1 +PREDOMINATE 1 0 1 1 +PREDOMINANCE 1 0 1 1 +PREDICTED 1 0 1 1 +PREDATORY 2 0 2 2 +PRECONCEIVED 1 0 1 1 +PRECISION 1 0 1 1 +PRECISELY 2 0 2 2 +PRECIPITATED 1 0 1 1 +PRECIPITATE 1 0 1 1 +PRECIOUS 3 0 3 3 +PRECAUTION 1 0 1 1 +PREACHER 1 0 1 1 +PREACH 3 0 3 3 +PRAYERS 1 0 1 1 +PRAYED 1 0 1 1 +PRAY 7 0 7 7 +PRATTLED 1 0 1 1 +PRAISES 2 0 2 2 +PRAISED 1 0 1 1 +PRAISE 3 0 3 3 +PRAIRIES 2 0 2 2 +PRAIRIE 2 0 2 2 +PRACTISED 1 0 1 1 +PRACTICALLY 4 0 4 4 +PRACTICAL 6 0 6 6 +POYSER 9 0 9 9 +POWERS 3 0 3 3 +POWERLESS 1 0 1 1 +POWERFUL 2 0 2 2 +POWDERY 1 0 1 1 +POWDER 8 0 8 8 +POVERTY 2 0 2 2 +POURING 1 0 1 1 +POURED 3 0 3 3 +POUR 2 0 2 2 +POUNDED 1 0 1 1 +POUND 4 0 4 4 +POTFULS 1 0 1 1 +POTATOES 1 0 1 1 +POT 2 0 2 2 +POSTS 1 0 1 1 +POSTPONEMENT 1 0 1 1 +POSTERITY 2 0 2 2 +POSTED 1 0 1 1 +POST 3 0 3 3 +POSSIBLY 1 0 1 1 +POSSIBLE 6 0 6 6 +POSSESSING 1 0 1 1 +POSSE 2 0 2 2 +POSITIVELY 3 0 3 3 +POSITIVE 1 0 1 1 +POSITION 9 0 9 9 +PORTRAIT 1 0 1 1 +PORTIONS 3 0 3 3 +PORTION 2 0 2 2 +PORTAL 1 0 1 1 +POPULATION 3 0 3 3 +POPULATED 1 0 1 1 +POPULAR 7 0 7 7 +POPPIES 1 0 1 1 +POPPED 1 0 1 1 +POORLY 1 0 1 1 +POOR 15 0 15 15 +POOLS 1 0 1 1 +PONY 1 0 1 1 +PONDS 1 0 1 1 +PONDERING 1 0 1 1 +POLLY'S 3 0 3 3 +POLLY 4 0 4 4 +POLITICAL 4 0 4 4 +POLITELY 2 0 2 2 +POLITE 1 0 1 1 +POLISHED 3 0 3 3 +POLISH 1 0 1 1 +POLICE 1 0 1 1 +POLE 2 0 2 2 +POLAR 1 0 1 1 +POISON 1 0 1 1 +POISED 1 0 1 1 +POINTS 3 0 3 3 +POINTING 4 0 4 4 +POINTEDLY 1 0 1 1 +POINTED 3 0 3 3 +POETRY 1 0 1 1 +POETIC 1 0 1 1 +POETESS 1 0 1 1 +POET 2 0 2 2 +POEMS 1 0 1 1 +POEM 3 0 3 3 +POCKETS 3 0 3 3 +POCKET 3 0 3 3 +PLURALITY 1 0 1 1 +PLUNGES 1 0 1 1 +PLUNDER 2 0 2 2 +PLUMES 1 0 1 1 +PLUCKING 1 0 1 1 +PLOT 3 0 3 3 +PLIABLE 1 0 1 1 +PLESIOSAURUS 1 0 1 1 +PLENTY 1 0 1 1 +PLENTIFUL 1 0 1 1 +PLEASURE 5 0 5 5 +PLEASING 1 0 1 1 +PLEASES 3 0 3 3 +PLEASED 4 0 4 4 +PLEASE 11 0 11 11 +PLEASANTLY 2 0 2 2 +PLEASANT 5 0 5 5 +PLEADED 1 0 1 1 +PLAYTHINGS 3 0 3 3 +PLAYS 1 0 1 1 +PLAYING 5 0 5 5 +PLAYER 2 0 2 2 +PLAYED 3 0 3 3 +PLATTERS 1 0 1 1 +PLATONIC 1 0 1 1 +PLATO'S 1 0 1 1 +PLATO 6 0 6 6 +PLATFORM 3 0 3 3 +PLATE 1 0 1 1 +PLASTER 1 0 1 1 +PLANTS 4 0 4 4 +PLANTED 3 0 3 3 +PLANTATIONS 1 0 1 1 +PLANT 7 0 7 7 +PLANS 2 0 2 2 +PLANNED 2 0 2 2 +PLAN 6 0 6 6 +PLAINER 1 0 1 1 +PLAIN 4 0 4 4 +PLACING 2 0 2 2 +PLACIDITY 1 0 1 1 +PLACID 1 0 1 1 +PLACED 8 0 8 8 +PITYING 1 0 1 1 +PITY 6 0 6 6 +PITIABLE 1 0 1 1 +PITH 1 0 1 1 +PITCHED 1 0 1 1 +PIT 1 0 1 1 +PISTOL 1 0 1 1 +PIPT 1 0 1 1 +PIPE 2 0 2 2 +PINKIES 3 0 3 3 +PINK 2 0 2 2 +PINIONED 1 0 1 1 +PINING 1 0 1 1 +PINES 1 0 1 1 +PINE 1 0 1 1 +PILLION 1 0 1 1 +PILLARS 1 0 1 1 +PILGRIM'S 1 0 1 1 +PILGRIM 1 0 1 1 +PILED 1 0 1 1 +PIGMENT 1 0 1 1 +PIETY 1 0 1 1 +PIECES 3 0 3 3 +PICTURESQUENESS 1 0 1 1 +PICTURES 3 0 3 3 +PICTURE 6 0 6 6 +PICNIC 1 0 1 1 +PICKED 3 0 3 3 +PIAZZA 3 0 3 3 +PIANO 4 0 4 4 +PHYSIOLOGY 1 0 1 1 +PHYSIOLOGICAL 1 0 1 1 +PHYSICS 1 0 1 1 +PHYSICAL 2 0 2 2 +PHRONSIE 6 0 6 6 +PHRASE 3 0 3 3 +PHOENICIAN 1 0 1 1 +PHILOSOPHY 2 0 2 2 +PHILOSOPHICAL 1 0 1 1 +PHILOSOPHER 5 0 5 5 +PHILOLOGIST 1 0 1 1 +PHILIP'S 1 0 1 1 +PHILIP 9 0 9 9 +PHENOMENON 1 0 1 1 +PHENOMENA 1 0 1 1 +PHEASANT 1 0 1 1 +PHASES 1 0 1 1 +PHASE 1 0 1 1 +PHANTOM 1 0 1 1 +PEWTER 2 0 2 2 +PERVERTED 1 0 1 1 +PERVERSE 1 0 1 1 +PERVADED 1 0 1 1 +PERTH 1 0 1 1 +PERSUASIVE 1 0 1 1 +PERSUADED 1 0 1 1 +PERSUADE 1 0 1 1 +PERSPIRATION 1 0 1 1 +PERSONS 13 0 13 13 +PERSONALLY 2 0 2 2 +PERSONAGE 2 0 2 2 +PERSEVERED 1 0 1 1 +PERSECUTORS 1 0 1 1 +PERSECUTION 2 0 2 2 +PERSECUTED 1 0 1 1 +PERPLEXITY 1 0 1 1 +PERPLEXED 2 0 2 2 +PERPETUATE 1 0 1 1 +PERPETUAL 1 0 1 1 +PERNICIOUS 1 0 1 1 +PERMITTING 1 0 1 1 +PERMITTED 2 0 2 2 +PERMIT 1 0 1 1 +PERMISSION 1 0 1 1 +PERMANENT 2 0 2 2 +PERISHES 1 0 1 1 +PERIODICALS 1 0 1 1 +PERIOD 8 0 8 8 +PERILS 1 0 1 1 +PERIL 2 0 2 2 +PERHAPS 17 0 17 17 +PERFORMING 1 0 1 1 +PERFORMED 1 0 1 1 +PERFORM 3 0 3 3 +PERFECTLY 8 0 8 8 +PERFECTION 4 0 4 4 +PERCY 1 0 1 1 +PERCHANCE 1 0 1 1 +PERCH 2 0 2 2 +PERCEPTION 2 0 2 2 +PERCEIVING 1 0 1 1 +PERCEIVES 1 0 1 1 +PERCEIVED 2 0 2 2 +PERCEIVE 2 0 2 2 +PEPPERS 1 0 1 1 +PEPPERED 1 0 1 1 +PEPPER 1 0 1 1 +PEOPLE 36 0 36 36 +PENSIVE 1 0 1 1 +PENSION 1 0 1 1 +PENINSULA 2 0 2 2 +PENETRATING 1 0 1 1 +PENETRATE 2 0 2 2 +PENDULOUS 1 0 1 1 +PENCILLED 1 0 1 1 +PENCIL 2 0 2 2 +PENANCE 1 0 1 1 +PENALTY 2 0 2 2 +PENAL 1 0 1 1 +PELL 1 0 1 1 +PEERING 3 0 3 3 +PEEPED 2 0 2 2 +PECUNIARY 6 0 6 6 +PECULIARLY 1 0 1 1 +PECULIAR 1 0 1 1 +PEARLY 2 0 2 2 +PEARL'S 1 0 1 1 +PEAKED 1 0 1 1 +PEACEFUL 1 0 1 1 +PEACEABLE 1 0 1 1 +PAYMENT 1 0 1 1 +PAYING 1 0 1 1 +PAY 3 0 3 3 +PAVEMENT 1 0 1 1 +PAUSED 4 0 4 4 +PAUSE 1 0 1 1 +PATTING 1 0 1 1 +PATRONIZING 1 0 1 1 +PATRON 1 0 1 1 +PATRIARCHAL 1 0 1 1 +PATHS 1 0 1 1 +PATHOLOGICAL 1 0 1 1 +PATH 4 0 4 4 +PATCHWORK 4 0 4 4 +PATCHING 2 0 2 2 +PATCHES 2 0 2 2 +PATCHED 1 0 1 1 +PASTNESS 1 0 1 1 +PASTEBOARD 1 0 1 1 +PAST 12 0 12 12 +PASSIONS 1 0 1 1 +PASSIONATE 1 0 1 1 +PASSING 3 0 3 3 +PASSES 1 0 1 1 +PASSER 1 0 1 1 +PASSED 15 0 15 15 +PASSAGES 1 0 1 1 +PASS 5 0 5 5 +PARTY 9 0 9 9 +PARTS 7 0 7 7 +PARTOOK 2 0 2 2 +PARTNER 1 0 1 1 +PARTLY 7 0 7 7 +PARTITION 1 0 1 1 +PARTISAN 1 0 1 1 +PARTING 2 0 2 2 +PARTIES 4 0 4 4 +PARTICULARS 2 0 2 2 +PARTICULARLY 6 0 6 6 +PARTICULAR 4 0 4 4 +PARTIALLY 1 0 1 1 +PARTED 2 0 2 2 +PART 22 0 22 22 +PARSONAGE 1 0 1 1 +PARROT 2 0 2 2 +PARRIED 1 0 1 1 +PARLIAMENTS 1 0 1 1 +PARLIAMENTARY 1 0 1 1 +PARLIAMENT 5 0 5 5 +PARK 1 0 1 1 +PARIS 9 0 9 9 +PARENTS 4 0 4 4 +PARENT 2 0 2 2 +PARDON 1 0 1 1 +PARAPHERNALIA 2 0 2 2 +PARAGRAPH 1 0 1 1 +PAPERS 8 0 8 8 +PAPER 8 0 8 8 +PANTOMIME 1 0 1 1 +PANTING 1 0 1 1 +PANTED 1 0 1 1 +PANS 1 0 1 1 +PANGS 1 0 1 1 +PANG 1 0 1 1 +PANES 1 0 1 1 +PANEL 1 0 1 1 +PANE 1 0 1 1 +PAN 1 0 1 1 +PALM 3 0 3 3 +PALINGS 1 0 1 1 +PALE 8 0 8 8 +PALAIS 1 0 1 1 +PALACE 4 0 4 4 +PAIR 5 0 5 5 +PAINTING 2 0 2 2 +PAINTER 2 0 2 2 +PAINTED 4 0 4 4 +PAINS 2 0 2 2 +PAINFULLY 1 0 1 1 +PAINFUL 3 0 3 3 +PAIN 7 0 7 7 +PAIL 1 0 1 1 +PAID 7 0 7 7 +PAGES 1 0 1 1 +PAGE 2 0 2 2 +PACKING 1 0 1 1 +PACKET 1 0 1 1 +PACKED 1 0 1 1 +PACK 1 0 1 1 +PACING 1 0 1 1 +PACIFIED 1 0 1 1 +PACED 3 0 3 3 +OZMA 1 0 1 1 +OZ 4 0 4 4 +OWNER 1 0 1 1 +OWNED 2 0 2 2 +OWN 69 0 69 69 +OWLS 1 0 1 1 +OWING 3 0 3 3 +OWEN 1 0 1 1 +OVERWROUGHT 1 0 1 1 +OVERWHELMED 1 0 1 1 +OVERWHELM 1 0 1 1 +OVERTHROW 1 0 1 1 +OVERSTATEMENT 1 0 1 1 +OVERRATED 1 0 1 1 +OVERLOOKER 1 0 1 1 +OVERLOOKED 1 0 1 1 +OVERLEAN 1 0 1 1 +OVERHEAD 1 0 1 1 +OVERHANGING 2 0 2 2 +OVERFLOWING 1 0 1 1 +OVERCOAT 1 0 1 1 +OVERBEARING 1 0 1 1 +OVAL 2 0 2 2 +OUTWARD 1 0 1 1 +OUTSTRIP 1 0 1 1 +OUTSTRETCHED 2 0 2 2 +OUTSKIRTS 1 0 1 1 +OUTSIDE 4 0 4 4 +OUTSET 1 0 1 1 +OUTRIGHT 1 0 1 1 +OUTLINED 1 0 1 1 +OUTLINE 2 0 2 2 +OUTLAWS 3 0 3 3 +OUTFIT 1 0 1 1 +OUTER 2 0 2 2 +OUTCRY 1 0 1 1 +OUTCAST 1 0 1 1 +OUT 100 0 100 100 +OURSELVES 6 0 6 6 +OURS 2 0 2 2 +OUNCE 2 0 2 2 +OUGHT 10 0 10 10 +OTHERWISE 5 0 5 5 +OTHER'S 2 0 2 2 +OSTENSIBLY 2 0 2 2 +OSTENSIBLE 1 0 1 1 +OSCILLATION 1 0 1 1 +ORNAMENTS 1 0 1 1 +ORNAMENTAL 2 0 2 2 +ORNAMENT 3 0 3 3 +ORLEANS 1 0 1 1 +ORIGINATED 1 0 1 1 +ORIGINAL 1 0 1 1 +ORIGIN 7 0 7 7 +ORGANS 1 0 1 1 +ORGANIZED 5 0 5 5 +ORGANIZATIONS 4 0 4 4 +ORGANIZATION 3 0 3 3 +ORGAN 1 0 1 1 +ORDINARY 3 0 3 3 +ORDERLY 1 0 1 1 +ORDERED 3 0 3 3 +ORDER 22 0 22 22 +ORBIT 2 0 2 2 +ORACLE 1 0 1 1 +OPPRESSOR 1 0 1 1 +OPPRESSIVE 1 0 1 1 +OPPRESSION 1 0 1 1 +OPPRESSED 1 0 1 1 +OPPOSITE 2 0 2 2 +OPPOSED 2 0 2 2 +OPPOSE 4 0 4 4 +OPPORTUNITY 4 0 4 4 +OPPORTUNITIES 2 0 2 2 +OPPORTUNE 1 0 1 1 +OPPONENT 2 0 2 2 +OPINIONS 1 0 1 1 +OPINION'S 1 0 1 1 +OPINION 9 0 9 9 +OPERATOR 1 0 1 1 +OPERATIONS 1 0 1 1 +OPERATION 2 0 2 2 +OPERATE 1 0 1 1 +OPENING 6 0 6 6 +OPENED 11 0 11 11 +OPEN 23 0 23 23 +OPAQUE 1 0 1 1 +ONWARD 4 0 4 4 +ONES 2 0 2 2 +ONE'S 1 0 1 1 +ONCE 22 0 22 22 +OMELETTE 1 0 1 1 +OLDEST 1 0 1 1 +OLDER 3 0 3 3 +OLDEN 1 0 1 1 +OJO 7 0 7 7 +OHIO 1 0 1 1 +OFTEN 13 0 13 13 +OFFICIALS 3 0 3 3 +OFFICIAL 1 0 1 1 +OFFICE 11 0 11 11 +OFFERS 1 0 1 1 +OFFERING 1 0 1 1 +OFFER 1 0 1 1 +OFFENDS 1 0 1 1 +OFFENDED 1 0 1 1 +OFFALS 1 0 1 1 +ODORS 1 0 1 1 +ODIOUS 1 0 1 1 +ODIN 1 0 1 1 +ODDLY 1 0 1 1 +ODD 3 0 3 3 +OCTOBER 1 0 1 1 +OCEANOGRAPHER 1 0 1 1 +OCEAN 5 0 5 5 +OCCURS 3 0 3 3 +OCCURRENCES 2 0 2 2 +OCCURRENCE 3 0 3 3 +OCCURRED 2 0 2 2 +OCCUR 1 0 1 1 +OCCUPY 1 0 1 1 +OCCUPIED 6 0 6 6 +OCCUPATION 2 0 2 2 +OCCASIONS 1 0 1 1 +OCCASIONALLY 1 0 1 1 +OCCASIONAL 1 0 1 1 +OCCASION 13 0 13 13 +OBVIOUS 3 0 3 3 +OBTAINED 1 0 1 1 +OBTAIN 3 0 3 3 +OBSTINATE 1 0 1 1 +OBSTINACY 2 0 2 2 +OBSTACLES 1 0 1 1 +OBSERVING 2 0 2 2 +OBSERVERS 1 0 1 1 +OBSERVATION 3 0 3 3 +OBSERVANCES 1 0 1 1 +OBSCURE 3 0 3 3 +OBNOXIOUS 1 0 1 1 +OBLIVION 1 0 1 1 +OBLITERATED 1 0 1 1 +OBLITERATE 1 0 1 1 +OBLIGED 1 0 1 1 +OBJECTIONS 1 0 1 1 +OBJECTION 1 0 1 1 +OBJECT 16 0 16 16 +OBEYING 2 0 2 2 +OBEYED 2 0 2 2 +OBEY 1 0 1 1 +OBEDIENCE 1 0 1 1 +OATMEAL 1 0 1 1 +OATH 1 0 1 1 +OARS 1 0 1 1 +OAK 2 0 2 2 +O'CLOCK 6 0 6 6 +NURSED 1 0 1 1 +NURSE 1 0 1 1 +NUNKIE 1 0 1 1 +NUMERICAL 1 0 1 1 +NUMBERS 4 0 4 4 +NUMBERED 1 0 1 1 +NUMBER 6 0 6 6 +NUDITY 1 0 1 1 +NUDGED 1 0 1 1 +NOWHERE 1 0 1 1 +NOVEL 1 0 1 1 +NOURISHING 1 0 1 1 +NOTWITHSTANDING 1 0 1 1 +NOTORIOUS 1 0 1 1 +NOTIONS 1 0 1 1 +NOTION 1 0 1 1 +NOTING 1 0 1 1 +NOTICING 1 0 1 1 +NOTICED 1 0 1 1 +NOTICEABLE 1 0 1 1 +NOTICE 3 0 3 3 +NOTED 1 0 1 1 +NOTE 4 0 4 4 +NOTARY 1 0 1 1 +NOTABLE 3 0 3 3 +NORWEGIAN 1 0 1 1 +NORWAY 1 0 1 1 +NORTHERN 1 0 1 1 +NONSENSE 1 0 1 1 +NONE 12 0 12 12 +NON 4 0 4 4 +NOMADS 1 0 1 1 +NOISILY 1 0 1 1 +NOISE 2 0 2 2 +NODS 1 0 1 1 +NOD 1 0 1 1 +NOBODY 6 0 6 6 +NOBLEST 1 0 1 1 +NOBLER 2 0 2 2 +NOBLEMAN'S 1 0 1 1 +NOBLE 10 0 10 10 +NITROGEN 1 0 1 1 +NINTH 1 0 1 1 +NINETY 2 0 2 2 +NIMBLY 1 0 1 1 +NIMBLE 1 0 1 1 +NIGHTS 3 0 3 3 +NIGHTLY 1 0 1 1 +NIGHTINGALE'S 1 0 1 1 +NIECE 1 0 1 1 +NICEST 1 0 1 1 +NICER 1 0 1 1 +NICELY 1 0 1 1 +NEXT 12 0 12 12 +NEWSPAPER 4 0 4 4 +NEWS 2 0 2 2 +NEWLY 1 0 1 1 +NEVERTHELESS 3 0 3 3 +NETTLES 1 0 1 1 +NETTLED 1 0 1 1 +NESTING 1 0 1 1 +NERVOUSNESS 1 0 1 1 +NERVOUSLY 1 0 1 1 +NERVOUS 4 0 4 4 +NERVES 2 0 2 2 +NEMO 4 0 4 4 +NELL 1 0 1 1 +NEITHER 9 0 9 9 +NEIGHBORS 2 0 2 2 +NEGROES 2 0 2 2 +NEGRO 1 0 1 1 +NEGOTIATIONS 1 0 1 1 +NEGLIGENT 1 0 1 1 +NEGLECTED 2 0 2 2 +NEGLECT 1 0 1 1 +NEEDS 2 0 2 2 +NEEDING 1 0 1 1 +NEEDED 5 0 5 5 +NECESSITY 2 0 2 2 +NECESSITIES 1 0 1 1 +NECESSARY 9 0 9 9 +NECESSARILY 2 0 2 2 +NEATLY 2 0 2 2 +NEAT 1 0 1 1 +NEARLY 10 0 10 10 +NEAREST 1 0 1 1 +NEARED 1 0 1 1 +NAVY 1 0 1 1 +NAUTILUS 2 0 2 2 +NATURES 1 0 1 1 +NATURED 1 0 1 1 +NATURE 17 0 17 17 +NATURALLY 1 0 1 1 +NATURALISTS 2 0 2 2 +NATURALIST 1 0 1 1 +NATURAL 10 0 10 10 +NATIVE 5 0 5 5 +NATIONS 2 0 2 2 +NATIONAL 3 0 3 3 +NATION 2 0 2 2 +NASTY 1 0 1 1 +NARWHALE 1 0 1 1 +NARROWS 1 0 1 1 +NARROW 6 0 6 6 +NARRATIVE 2 0 2 2 +NAPIER 1 0 1 1 +NAPE 1 0 1 1 +NANCY'S 2 0 2 2 +NANCY 1 0 1 1 +NAMES 2 0 2 2 +NAMELY 2 0 2 2 +NAKEDNESS 1 0 1 1 +NAKED 1 0 1 1 +MYTHOLOGICAL 1 0 1 1 +MYTHICAL 1 0 1 1 +MYSTIFIED 1 0 1 1 +MYSTERY 5 0 5 5 +MYSTERIOUSLY 1 0 1 1 +MYSTERIOUS 3 0 3 3 +MYSELF 25 0 25 25 +MUTUAL 2 0 2 2 +MUTTON 1 0 1 1 +MUTTERING 1 0 1 1 +MUTILATION 1 0 1 1 +MUTABILITY 1 0 1 1 +MUSTARD 1 0 1 1 +MUST 66 0 66 66 +MUSSULMANS 1 0 1 1 +MUSICIANS 1 0 1 1 +MUSIC 6 0 6 6 +MUSHROOMS 1 0 1 1 +MUSEUM 1 0 1 1 +MURMURED 4 0 4 4 +MURMUR 2 0 2 2 +MURDERS 1 0 1 1 +MURDERERS 1 0 1 1 +MUNCHKINS 2 0 2 2 +MUNCHKIN 1 0 1 1 +MULTIPLE 2 0 2 2 +MUFFLED 1 0 1 1 +MUDDY 1 0 1 1 +MUD 1 0 1 1 +MOWED 2 0 2 2 +MOW 1 0 1 1 +MOVING 1 0 1 1 +MOVES 1 0 1 1 +MOVEMENTS 3 0 3 3 +MOVEMENT 5 0 5 5 +MOVE 4 0 4 4 +MOUTHS 3 0 3 3 +MOUTHED 2 0 2 2 +MOUTH 5 0 5 5 +MOUSE 2 0 2 2 +MOURNFUL 1 0 1 1 +MOURN 1 0 1 1 +MOUNTAINS 2 0 2 2 +MOTTO 1 0 1 1 +MOTTLED 1 0 1 1 +MOTORS 1 0 1 1 +MOTIVES 5 0 5 5 +MOTIONLESS 1 0 1 1 +MOTIONING 1 0 1 1 +MOTIONED 2 0 2 2 +MOTION 1 0 1 1 +MOTHERS 2 0 2 2 +MOTHER'S 4 0 4 4 +MOTHER 32 0 32 32 +MOSSY 1 0 1 1 +MOSS 1 0 1 1 +MOSAIC 1 0 1 1 +MORTALS 1 0 1 1 +MORTALLY 1 0 1 1 +MORTAL 1 0 1 1 +MORROW 6 0 6 6 +MORRIS 1 0 1 1 +MORNINGS 1 0 1 1 +MORN 1 0 1 1 +MORMON 5 0 5 5 +MOREOVER 1 0 1 1 +MORBID 1 0 1 1 +MORAL 1 0 1 1 +MOONLIGHT 2 0 2 2 +MOONBEAMS 1 0 1 1 +MOON 4 0 4 4 +MOOD 2 0 2 2 +MONTROSE'S 1 0 1 1 +MONTROSE 6 0 6 6 +MONTHS 4 0 4 4 +MONTH 4 0 4 4 +MONTALAIS 4 0 4 4 +MONSTERS 2 0 2 2 +MONSTER 1 0 1 1 +MONSIEUR 1 0 1 1 +MONOTONOUS 1 0 1 1 +MONCEUX 1 0 1 1 +MOMENTS 5 0 5 5 +MOMENTOUS 1 0 1 1 +MOMENTARY 1 0 1 1 +MOMENT 32 0 32 32 +MOLLY 3 0 3 3 +MOLECULES 1 0 1 1 +MOISTURE 1 0 1 1 +MOIST 1 0 1 1 +MOHAMMED 1 0 1 1 +MODIFICATION 1 0 1 1 +MODEST 3 0 3 3 +MODES 2 0 2 2 +MODERNS 1 0 1 1 +MODERN 8 0 8 8 +MODERATE 2 0 2 2 +MODEL 1 0 1 1 +MODE 2 0 2 2 +MOCCASIN 1 0 1 1 +MOBS 1 0 1 1 +MOBILITY 2 0 2 2 +MOB 3 0 3 3 +MOANING 1 0 1 1 +MOAN 1 0 1 1 +MIXTURE 2 0 2 2 +MIXED 4 0 4 4 +MITIGATE 1 0 1 1 +MISUNDERSTANDING 1 0 1 1 +MISTY 1 0 1 1 +MISTRESS 10 0 10 10 +MISTAKEN 2 0 2 2 +MISTAKE 2 0 2 2 +MISSUS 23 0 23 23 +MISSOURI 6 0 6 6 +MISSIONARY 1 0 1 1 +MISSIONARIES 1 0 1 1 +MISSION 3 0 3 3 +MISSED 2 0 2 2 +MISGOVERNMENT 1 0 1 1 +MISFORTUNES 1 0 1 1 +MISFORTUNE 4 0 4 4 +MISERY 3 0 3 3 +MISERABLY 1 0 1 1 +MISERABLE 2 0 2 2 +MISCHIEF 1 0 1 1 +MISCHANCE 1 0 1 1 +MIRROR 2 0 2 2 +MIRACULOUSLY 1 0 1 1 +MIRACLE 2 0 2 2 +MINUTES 6 0 6 6 +MINUTE 2 0 2 2 +MINT 1 0 1 1 +MINORITY 1 0 1 1 +MINISTRY 3 0 3 3 +MINISTERS 1 0 1 1 +MINISTER 3 0 3 3 +MINIATURE 1 0 1 1 +MINGOES 1 0 1 1 +MINGLES 1 0 1 1 +MINGLED 1 0 1 1 +MINDS 3 0 3 3 +MINDFUL 1 0 1 1 +MINDED 1 0 1 1 +MILNER'S 3 0 3 3 +MILLIONS 1 0 1 1 +MILKING 1 0 1 1 +MILKED 1 0 1 1 +MILK 1 0 1 1 +MILITIA 3 0 3 3 +MILITARY 7 0 7 7 +MILE 1 0 1 1 +MILDLY 1 0 1 1 +MILD 2 0 2 2 +MILAN 1 0 1 1 +MIGHTY 4 0 4 4 +MIGHTILY 1 0 1 1 +MIGHT 48 0 48 48 +MIDWIFE 1 0 1 1 +MIDST 2 0 2 2 +MIDDLE 4 0 4 4 +MIDDAY 1 0 1 1 +MICROSCOPE 1 0 1 1 +MICE 5 0 5 5 +METROPOLIS 1 0 1 1 +METHODS 3 0 3 3 +METHOD 3 0 3 3 +METAPHOR 1 0 1 1 +METAMORPHOSIS 1 0 1 1 +METALLIC 1 0 1 1 +METAL 1 0 1 1 +MESSAGE 2 0 2 2 +MESHES 1 0 1 1 +MERITS 2 0 2 2 +MERIT 2 0 2 2 +MERIDIAN 2 0 2 2 +MERELY 5 0 5 5 +MERE 4 0 4 4 +MERCHANT 1 0 1 1 +MENTIONS 1 0 1 1 +MENTIONED 5 0 5 5 +MENTION 1 0 1 1 +MENTAL 2 0 2 2 +MENIAL 1 0 1 1 +MENDING 2 0 2 2 +MENAGERIE 1 0 1 1 +MEMORY 21 0 21 21 +MEMBERS 4 0 4 4 +MEMBER 2 0 2 2 +MELTS 1 0 1 1 +MELODY 1 0 1 1 +MELANCHOLY 2 0 2 2 +MEETING 5 0 5 5 +MEEK 1 0 1 1 +MEDIUM 1 0 1 1 +MEDITERRANEAN 4 0 4 4 +MEDITATIVE 1 0 1 1 +MEDITATION 1 0 1 1 +MEDICINE 6 0 6 6 +MECHANICS 1 0 1 1 +MEAT 1 0 1 1 +MEASURES 2 0 2 2 +MEASURED 2 0 2 2 +MEASURE 6 0 6 6 +MEANWHILE 4 0 4 4 +MEANTIME 2 0 2 2 +MEANT 5 0 5 5 +MEANS 17 0 17 17 +MEANINGS 1 0 1 1 +MEANING 4 0 4 4 +MEALS 4 0 4 4 +MEAL 5 0 5 5 +MEADOWS 1 0 1 1 +MEADOWCROFT'S 1 0 1 1 +MAXIMUM 1 0 1 1 +MAXIMS 1 0 1 1 +MATURE 1 0 1 1 +MATTHEWS 1 0 1 1 +MATTERS 5 0 5 5 +MATTERED 1 0 1 1 +MATTER 20 0 20 20 +MATHEMATICS 1 0 1 1 +MATERIALS 2 0 2 2 +MATERIALLY 1 0 1 1 +MATERIALISM 1 0 1 1 +MATERIAL 3 0 3 3 +MATED 1 0 1 1 +MATCHLESS 1 0 1 1 +MASTERY 1 0 1 1 +MASTERPIECE 1 0 1 1 +MASTERLY 1 0 1 1 +MAST 2 0 2 2 +MASSES 1 0 1 1 +MASSACHUSETTS 1 0 1 1 +MASKS 1 0 1 1 +MARVELS 1 0 1 1 +MARVELLED 1 0 1 1 +MARVEL 2 0 2 2 +MARTIN 2 0 2 2 +MARTIAL 1 0 1 1 +MARTHA 2 0 2 2 +MARRY 1 0 1 1 +MARRIED 2 0 2 2 +MARRIAGE 5 0 5 5 +MARQUIS 1 0 1 1 +MARKS 4 0 4 4 +MARKING 1 0 1 1 +MARKHAM 2 0 2 2 +MARKET 1 0 1 1 +MARKED 3 0 3 3 +MARK 6 0 6 6 +MARINE 2 0 2 2 +MARIE'S 1 0 1 1 +MARIE 6 0 6 6 +MARIANNE 1 0 1 1 +MARIA 1 0 1 1 +MARGIN 1 0 1 1 +MARGARET 1 0 1 1 +MARCHES 1 0 1 1 +MARCHED 2 0 2 2 +MARCH 7 0 7 7 +MARBLE 2 0 2 2 +MAP 2 0 2 2 +MANUSCRIPT 2 0 2 2 +MANUFACTURER 3 0 3 3 +MANSION 1 0 1 1 +MANSERVANT 1 0 1 1 +MANOEUVRING 1 0 1 1 +MANNERS 1 0 1 1 +MANNER 14 0 14 14 +MANNA 1 0 1 1 +MANKIND 2 0 2 2 +MANIFOLD 1 0 1 1 +MANIFESTLY 1 0 1 1 +MANICAMP 1 0 1 1 +MANHOOD 1 0 1 1 +MANDIBLE 1 0 1 1 +MANAGING 1 0 1 1 +MANAGERS 1 0 1 1 +MANAGEMENT 3 0 3 3 +MANAGED 4 0 4 4 +MANAGE 1 0 1 1 +MAN'S 5 0 5 5 +MAMMY 1 0 1 1 +MALIGNITIES 1 0 1 1 +MALIGNED 1 0 1 1 +MALICIOUS 1 0 1 1 +MALICE 1 0 1 1 +MALADY 1 0 1 1 +MAKING 13 0 13 13 +MAKES 10 0 10 10 +MAKE 40 0 40 40 +MAJESTY'S 2 0 2 2 +MAJESTY 6 0 6 6 +MAINTAINING 1 0 1 1 +MAINTAINED 4 0 4 4 +MAINSAIL 1 0 1 1 +MAINLY 1 0 1 1 +MAIDS 4 0 4 4 +MAID'S 1 0 1 1 +MAHOGANY 1 0 1 1 +MAGNIFIED 1 0 1 1 +MAGNIFICENT 3 0 3 3 +MAGNIFICENCE 1 0 1 1 +MAGISTRACY 1 0 1 1 +MAGICIAN 5 0 5 5 +MAGIC 4 0 4 4 +MAGAZINE 1 0 1 1 +MADNESS 1 0 1 1 +MADEMOISELLE 5 0 5 5 +MADAME'S 1 0 1 1 +MAD 3 0 3 3 +MACHINES 1 0 1 1 +MACHINE 1 0 1 1 +MABEL 1 0 1 1 +LYNCHINGS 1 0 1 1 +LYING 4 0 4 4 +LUXURIES 2 0 2 2 +LUXURIANT 1 0 1 1 +LUTHERAN 2 0 2 2 +LUTHER 3 0 3 3 +LUSTROUS 1 0 1 1 +LUSTRE 1 0 1 1 +LURKING 1 0 1 1 +LURID 1 0 1 1 +LUNGS 1 0 1 1 +LUMPS 1 0 1 1 +LUMP 1 0 1 1 +LUMINOUS 2 0 2 2 +LUKE 1 0 1 1 +LUGUBRIOUS 1 0 1 1 +LUCY 1 0 1 1 +LUCRETIUS 1 0 1 1 +LUCID 1 0 1 1 +LOYALLY 1 0 1 1 +LOYAL 1 0 1 1 +LOWLY 2 0 2 2 +LOW 6 0 6 6 +LOVING 4 0 4 4 +LOVERS 2 0 2 2 +LOVER 1 0 1 1 +LOVELY 7 0 7 7 +LOVED 6 0 6 6 +LOVE 48 0 48 48 +LOUDNESS 1 0 1 1 +LOUDLY 2 0 2 2 +LOUDER 1 0 1 1 +LOUD 2 0 2 2 +LOTUS 1 0 1 1 +LOTS 2 0 2 2 +LOT 6 0 6 6 +LOST 12 0 12 12 +LOSS 6 0 6 6 +LOSING 3 0 3 3 +LOSES 2 0 2 2 +LOSE 3 0 3 3 +LORDS 1 0 1 1 +LORDLY 1 0 1 1 +LORDING 2 0 2 2 +LOPPED 1 0 1 1 +LOOSELY 1 0 1 1 +LOOKS 7 0 7 7 +LOOKING 16 0 16 16 +LOOKED 24 0 24 24 +LONGING 2 0 2 2 +LONGER 9 0 9 9 +LONGED 1 0 1 1 +LONG 29 0 29 29 +LONELY 2 0 2 2 +LONELINESS 1 0 1 1 +LONELIER 2 0 2 2 +LONDON 3 0 3 3 +LOGICALLY 1 0 1 1 +LOGICAL 2 0 2 2 +LOG 2 0 2 2 +LOFTINESS 1 0 1 1 +LOFTIEST 1 0 1 1 +LOFT 2 0 2 2 +LODGING 1 0 1 1 +LODGE 2 0 2 2 +LOCKED 3 0 3 3 +LOCK 1 0 1 1 +LOAF 1 0 1 1 +LOADED 1 0 1 1 +LIVING 5 0 5 5 +LIVID 1 0 1 1 +LIVES 6 0 6 6 +LIVERY 1 0 1 1 +LIVERIES 2 0 2 2 +LIVERIED 1 0 1 1 +LIVELIEST 1 0 1 1 +LIVED 8 0 8 8 +LIVE 9 0 9 9 +LITERATURE 1 0 1 1 +LITERARY 4 0 4 4 +LITERALLY 2 0 2 2 +LITERAL 2 0 2 2 +LISTLESSLY 1 0 1 1 +LISTENING 3 0 3 3 +LISTENED 5 0 5 5 +LISTEN 3 0 3 3 +LIST 1 0 1 1 +LIQUID 2 0 2 2 +LIPS 4 0 4 4 +LINKS 2 0 2 2 +LINGERED 1 0 1 1 +LINES 7 0 7 7 +LINED 2 0 2 2 +LINDENS 1 0 1 1 +LINCOLN 2 0 2 2 +LIMITATION 1 0 1 1 +LIMIT 1 0 1 1 +LIMESTONE 1 0 1 1 +LIMBS 2 0 2 2 +LIKEWISE 1 0 1 1 +LIKES 2 0 2 2 +LIKENESS 1 0 1 1 +LIKELY 2 0 2 2 +LIKED 4 0 4 4 +LIGHTS 5 0 5 5 +LIGHTNING 1 0 1 1 +LIGHTLY 3 0 3 3 +LIGHTING 6 0 6 6 +LIGHTED 5 0 5 5 +LIFTING 2 0 2 2 +LIFTED 1 0 1 1 +LIFT 2 0 2 2 +LIFE'S 1 0 1 1 +LIFE 47 0 47 47 +LIEUTENANT 6 0 6 6 +LIES 8 0 8 8 +LIEDENBROCK 1 0 1 1 +LICHEN 1 0 1 1 +LIBRARY 3 0 3 3 +LIBERTY 3 0 3 3 +LIBERAL 1 0 1 1 +LIABLE 2 0 2 2 +LEXINGTON 1 0 1 1 +LEVIED 2 0 2 2 +LEVELS 1 0 1 1 +LEVEL 3 0 3 3 +LETTING 1 0 1 1 +LETTERS 4 0 4 4 +LETTER 12 0 12 12 +LET'S 2 0 2 2 +LET 27 0 27 27 +LEST 2 0 2 2 +LESSONS 1 0 1 1 +LESSON 1 0 1 1 +LENGTHY 1 0 1 1 +LENGTHS 1 0 1 1 +LENGTH 4 0 4 4 +LEISURELY 1 0 1 1 +LEISURE 11 0 11 11 +LEGS 3 0 3 3 +LEGISLATURE 4 0 4 4 +LEGISLATORS 1 0 1 1 +LEGISLATIVE 1 0 1 1 +LEGATE 1 0 1 1 +LEGAL 1 0 1 1 +LEG 1 0 1 1 +LEFT 34 0 34 34 +LEECH 2 0 2 2 +LECTURES 2 0 2 2 +LECTURE 3 0 3 3 +LECOMPTON 1 0 1 1 +LEAVE 16 0 16 16 +LEATHER 1 0 1 1 +LEASH 1 0 1 1 +LEARNING 1 0 1 1 +LEARNED 4 0 4 4 +LEARN 4 0 4 4 +LEAPS 2 0 2 2 +LEAP 2 0 2 2 +LEANING 3 0 3 3 +LEANED 5 0 5 5 +LEAN 1 0 1 1 +LEAGUES 1 0 1 1 +LEAGUE 1 0 1 1 +LEADING 3 0 3 3 +LEADERS 1 0 1 1 +LEADER 2 0 2 2 +LEAD 8 0 8 8 +LAZILY 1 0 1 1 +LAYS 1 0 1 1 +LAYMAN 1 0 1 1 +LAYING 1 0 1 1 +LAY 16 0 16 16 +LAWYER 1 0 1 1 +LAWS 9 0 9 9 +LAWRENCE 2 0 2 2 +LAWN 1 0 1 1 +LAWFUL 1 0 1 1 +LAW 13 0 13 13 +LAVISHING 1 0 1 1 +LAUGHTER 2 0 2 2 +LAUGHING 6 0 6 6 +LAUGHED 6 0 6 6 +LAUGH 4 0 4 4 +LATTICE 1 0 1 1 +LATTER 9 0 9 9 +LATIN 3 0 3 3 +LATEST 1 0 1 1 +LATENT 1 0 1 1 +LATE 6 0 6 6 +LATCHED 1 0 1 1 +LAST 41 0 41 41 +LASHED 1 0 1 1 +LARGEST 2 0 2 2 +LARGER 3 0 3 3 +LARGE 16 0 16 16 +LAREN 1 0 1 1 +LARDER 1 0 1 1 +LAPSE 1 0 1 1 +LAP 3 0 3 3 +LANGUISHINGLY 1 0 1 1 +LANGUID 1 0 1 1 +LANGUAGE 11 0 11 11 +LANE 1 0 1 1 +LANDSCAPE 1 0 1 1 +LANDS 2 0 2 2 +LAMPS 3 0 3 3 +LAMP 4 0 4 4 +LAMENTATION 1 0 1 1 +LAMBS 1 0 1 1 +LAMB 1 0 1 1 +LAKES 1 0 1 1 +LAKE'S 1 0 1 1 +LAGOON 4 0 4 4 +LADY 9 0 9 9 +LADLED 1 0 1 1 +LADIES 11 0 11 11 +LADDER 3 0 3 3 +LACKEY 1 0 1 1 +LACK 1 0 1 1 +LA 5 0 5 5 +KNOWN 15 0 15 15 +KNOWLEDGE 15 0 15 15 +KNOWING 5 0 5 5 +KNOT 1 0 1 1 +KNOCKING 1 0 1 1 +KNOCKED 4 0 4 4 +KNOCK 1 0 1 1 +KNITTED 1 0 1 1 +KNIGHT 1 0 1 1 +KNEW 25 0 25 25 +KNEES 3 0 3 3 +KNEELS 1 0 1 1 +KNEELING 1 0 1 1 +KITTEN 1 0 1 1 +KITES 1 0 1 1 +KITE 1 0 1 1 +KITCHEN 4 0 4 4 +KIT 1 0 1 1 +KISSING 1 0 1 1 +KISSES 1 0 1 1 +KISSED 2 0 2 2 +KISS 2 0 2 2 +KINGS 1 0 1 1 +KINGLY 1 0 1 1 +KINGDOM 4 0 4 4 +KINDS 1 0 1 1 +KINDNESS 1 0 1 1 +KINDLY 3 0 3 3 +KINDLED 3 0 3 3 +KINDER 1 0 1 1 +KIND 14 0 14 14 +KILLS 1 0 1 1 +KILLED 1 0 1 1 +KIDNAP 2 0 2 2 +KID 1 0 1 1 +KICKAPOO 1 0 1 1 +KEYNOTE 1 0 1 1 +KEY 5 0 5 5 +KETTLES 2 0 2 2 +KETTLE 1 0 1 1 +KERCHIEFS 1 0 1 1 +KEPT 5 0 5 5 +KENNINGTON 2 0 2 2 +KENNETH 9 0 9 9 +KEEPS 3 0 3 3 +KEEPING 4 0 4 4 +KEEPER 1 0 1 1 +KEEP 10 0 10 10 +KEENNESS 2 0 2 2 +KEENER 1 0 1 1 +KEEN 3 0 3 3 +KATHLEEN 1 0 1 1 +KATE 1 0 1 1 +KANSAS 3 0 3 3 +KANE 1 0 1 1 +JUSTLY 2 0 2 2 +JUSTIFICATION 2 0 2 2 +JUSTICE 3 0 3 3 +JURISDICTION 1 0 1 1 +JUMPING 1 0 1 1 +JUMPED 1 0 1 1 +JUMP 3 0 3 3 +JUDGMENT 6 0 6 6 +JUDGES 1 0 1 1 +JUDGE 5 0 5 5 +JUDAH 1 0 1 1 +JOYOUS 1 0 1 1 +JOYCE 2 0 2 2 +JOY 4 0 4 4 +JOURNEYING 1 0 1 1 +JOURNEY 5 0 5 5 +JONES 3 0 3 3 +JOLLY 5 0 5 5 +JOKED 1 0 1 1 +JOKE 2 0 2 2 +JOINED 1 0 1 1 +JOIN 2 0 2 2 +JOHNSON 1 0 1 1 +JIB 1 0 1 1 +JEWISH 1 0 1 1 +JEWELS 3 0 3 3 +JET 1 0 1 1 +JESUS 7 0 7 7 +JERSEY 1 0 1 1 +JERK 1 0 1 1 +JENKS 1 0 1 1 +JELLIES 1 0 1 1 +JEHOVAH 1 0 1 1 +JEERED 1 0 1 1 +JEALOUS 1 0 1 1 +JAWS 2 0 2 2 +JASPER'S 2 0 2 2 +JAP 1 0 1 1 +JANUARY 2 0 2 2 +JANE'S 1 0 1 1 +JANE 4 0 4 4 +JAMES 2 0 2 2 +JAILER 5 0 5 5 +JACOB'S 2 0 2 2 +JACOB 1 0 1 1 +JACKSON 1 0 1 1 +JACKET 1 0 1 1 +J 2 0 2 2 +IVORY 1 0 1 1 +ITSELF 21 0 21 21 +ITCH 1 0 1 1 +ITALIAN 2 0 2 2 +ISSUED 2 0 2 2 +ISSUE 1 0 1 1 +ISRAEL 1 0 1 1 +ISOLATED 1 0 1 1 +ISN'T 5 0 5 5 +ISLAND 5 0 5 5 +IRWINE 1 0 1 1 +IRRITABLE 1 0 1 1 +IRRESOLUTION 1 0 1 1 +IRREPARABLE 1 0 1 1 +IRREGULARITY 2 0 2 2 +IRONING 1 0 1 1 +IRON 2 0 2 2 +IRISH 2 0 2 2 +IRIDESCENT 1 0 1 1 +IRENE 1 0 1 1 +IRELAND 1 0 1 1 +INWARDLY 1 0 1 1 +INWARD 1 0 1 1 +INVOLVING 1 0 1 1 +INVOLVES 1 0 1 1 +INVOLVED 1 0 1 1 +INVOLVE 1 0 1 1 +INVITED 4 0 4 4 +INVITATION 3 0 3 3 +INVISIBLE 1 0 1 1 +INVIOLATE 1 0 1 1 +INVIDIOUS 1 0 1 1 +INVESTIGATION 1 0 1 1 +INVENTOR 1 0 1 1 +INVENTION 1 0 1 1 +INVENTED 1 0 1 1 +INVASION 1 0 1 1 +INVARIABLY 4 0 4 4 +INVARIABLE 1 0 1 1 +INVALID 1 0 1 1 +INVADER 1 0 1 1 +INVADE 1 0 1 1 +INTRODUCTION 4 0 4 4 +INTRODUCING 1 0 1 1 +INTRODUCED 3 0 3 3 +INTRODUCE 3 0 3 3 +INTRINSIC 1 0 1 1 +INTRICATE 1 0 1 1 +INTOLERANT 1 0 1 1 +INTOLERANCY 1 0 1 1 +INTOLERABLE 1 0 1 1 +INTIMATELY 2 0 2 2 +INTIMATE 2 0 2 2 +INTERVIEWS 1 0 1 1 +INTERVIEW 3 0 3 3 +INTERSECTED 1 0 1 1 +INTERRUPTED 2 0 2 2 +INTERPRETED 1 0 1 1 +INTERPRETATION 1 0 1 1 +INTERPOSED 1 0 1 1 +INTERNAL 1 0 1 1 +INTERMINGLED 1 0 1 1 +INTERMEDIATE 1 0 1 1 +INTERLACED 1 0 1 1 +INTERFERE 2 0 2 2 +INTERESTING 3 0 3 3 +INTERESTED 4 0 4 4 +INTEREST 10 0 10 10 +INTENTLY 2 0 2 2 +INTENTIONS 1 0 1 1 +INTENTION 1 0 1 1 +INTENSITY 3 0 3 3 +INTENSIFICATION 1 0 1 1 +INTENSELY 1 0 1 1 +INTENDED 1 0 1 1 +INTEND 1 0 1 1 +INTELLIGENT 5 0 5 5 +INTELLIGENCE 7 0 7 7 +INTELLECTS 1 0 1 1 +INTELLECT 1 0 1 1 +INTEGRITY 1 0 1 1 +INTANGIBLE 1 0 1 1 +INSULT 1 0 1 1 +INSTRUMENT 1 0 1 1 +INSTRUCTIONS 4 0 4 4 +INSTITUTION 1 0 1 1 +INSTITUTED 1 0 1 1 +INSTITUTE 1 0 1 1 +INSTINCT 1 0 1 1 +INSTEAD 11 0 11 11 +INSTANTLY 6 0 6 6 +INSTANTANEOUS 1 0 1 1 +INSTANT 3 0 3 3 +INSTANCING 1 0 1 1 +INSTANCE 3 0 3 3 +INSTALLED 5 0 5 5 +INSTALL 1 0 1 1 +INSPIRED 1 0 1 1 +INSPIRATION 1 0 1 1 +INSOLENTLY 1 0 1 1 +INSISTS 1 0 1 1 +INSISTENCE 2 0 2 2 +INSISTED 1 0 1 1 +INSIST 1 0 1 1 +INSIPID 1 0 1 1 +INSINUATED 1 0 1 1 +INSIGNIFICANT 2 0 2 2 +INSIGHT 1 0 1 1 +INSIDE 2 0 2 2 +INSERTING 1 0 1 1 +INSENSIBLE 1 0 1 1 +INSECT 1 0 1 1 +INSATIABLE 2 0 2 2 +INNUMERABLE 2 0 2 2 +INNOCENTLY 1 0 1 1 +INNOCENT 2 0 2 2 +INNINGS 1 0 1 1 +INNER 2 0 2 2 +INMATES 1 0 1 1 +INJUSTICE 4 0 4 4 +INJURY 2 0 2 2 +INJURED 2 0 2 2 +INIQUITY 1 0 1 1 +INHUMAN 1 0 1 1 +INHERITANCE 2 0 2 2 +INHABITANTS 3 0 3 3 +INGREDIENTS 1 0 1 1 +INGENUITY 2 0 2 2 +INGENIOUS 2 0 2 2 +INFUSE 1 0 1 1 +INFORMING 1 0 1 1 +INFORMED 3 0 3 3 +INFORMATION 3 0 3 3 +INFORM 1 0 1 1 +INFLUENTIAL 2 0 2 2 +INFLUENCES 2 0 2 2 +INFLUENCE 8 0 8 8 +INFLICT 2 0 2 2 +INFLEXIBLE 1 0 1 1 +INFIRMITY 1 0 1 1 +INFIRMITIES 1 0 1 1 +INFIRMARY 1 0 1 1 +INFINITE 4 0 4 4 +INFERIOR 2 0 2 2 +INFERENCE 1 0 1 1 +INFECTED 1 0 1 1 +INFANTRY 2 0 2 2 +INFANT 1 0 1 1 +INFANCY 1 0 1 1 +INFALLIBLE 1 0 1 1 +INEXPRESSIBLY 1 0 1 1 +INEXPLICABLE 2 0 2 2 +INEXPERIENCE 1 0 1 1 +INEXHAUSTIBLE 1 0 1 1 +INESTIMABLE 1 0 1 1 +INEFFECTUALLY 1 0 1 1 +INDUSTRY 1 0 1 1 +INDUSTRIOUS 1 0 1 1 +INDULGENCE 1 0 1 1 +INDULGED 1 0 1 1 +INDUCED 1 0 1 1 +INDUCE 1 0 1 1 +INDUBITABLE 1 0 1 1 +INDIVIDUALS 1 0 1 1 +INDIVIDUAL 5 0 5 5 +INDISTINGUISHABLE 1 0 1 1 +INDISTINCT 1 0 1 1 +INDISPENSABLE 1 0 1 1 +INDISCREET 1 0 1 1 +INDIRECT 1 0 1 1 +INDIGENCE 1 0 1 1 +INDIFFERENT 3 0 3 3 +INDIFFERENCE 4 0 4 4 +INDIES 1 0 1 1 +INDICATOR 1 0 1 1 +INDICATING 1 0 1 1 +INDICATES 2 0 2 2 +INDICATED 3 0 3 3 +INDICATE 2 0 2 2 +INDIANS 4 0 4 4 +INDIAN 4 0 4 4 +INDIA 1 0 1 1 +INDEPENDENTS 1 0 1 1 +INDEPENDENT 4 0 4 4 +INDEPENDENCE 1 0 1 1 +INCURRING 1 0 1 1 +INCURRED 1 0 1 1 +INCUR 1 0 1 1 +INCREASED 2 0 2 2 +INCREASE 2 0 2 2 +INCONVENIENT 1 0 1 1 +INCONCEIVABLE 1 0 1 1 +INCOMPREHENSIBLE 1 0 1 1 +INCOMPATIBLE 1 0 1 1 +INCOHERENT 1 0 1 1 +INCLUDING 1 0 1 1 +INCLUDED 2 0 2 2 +INCLINES 1 0 1 1 +INCLINED 2 0 2 2 +INCLINATIONS 2 0 2 2 +INCITED 1 0 1 1 +INCIDENTS 1 0 1 1 +INCIDENTAL 1 0 1 1 +INCIDENT 6 0 6 6 +INCHES 1 0 1 1 +INCH 2 0 2 2 +INCESSANTLY 1 0 1 1 +INCEPTION 1 0 1 1 +INCAPABLE 2 0 2 2 +INCANDESCENT 1 0 1 1 +INASMUCH 1 0 1 1 +INADEQUATE 2 0 2 2 +INADEQUACY 1 0 1 1 +INACCURACY 1 0 1 1 +INACCESSIBLE 1 0 1 1 +IMPULSIVELY 1 0 1 1 +IMPULSE 3 0 3 3 +IMPROVING 1 0 1 1 +IMPROVED 3 0 3 3 +IMPRISONMENT 1 0 1 1 +IMPRISONED 3 0 3 3 +IMPRESSIONS 6 0 6 6 +IMPRESSION 2 0 2 2 +IMPOSSIBLE 11 0 11 11 +IMPOSSIBILITY 1 0 1 1 +IMPOSED 1 0 1 1 +IMPOSE 1 0 1 1 +IMPORTANT 7 0 7 7 +IMPORTANCE 5 0 5 5 +IMPORT 1 0 1 1 +IMPLY 1 0 1 1 +IMPLORES 1 0 1 1 +IMPLORE 1 0 1 1 +IMPLIES 3 0 3 3 +IMPLICIT 1 0 1 1 +IMPLICATION 1 0 1 1 +IMPIETY 1 0 1 1 +IMPETUS 1 0 1 1 +IMPETUOUS 3 0 3 3 +IMPERIOUSLY 1 0 1 1 +IMPERIALIST 1 0 1 1 +IMPERIALISM 1 0 1 1 +IMPERIAL 1 0 1 1 +IMPERFECTLY 1 0 1 1 +IMPERATIVE 1 0 1 1 +IMPENETRABLE 2 0 2 2 +IMPEDIMENT 1 0 1 1 +IMPATIENT 1 0 1 1 +IMPATIENCE 3 0 3 3 +IMPASSIVELY 1 0 1 1 +IMMUNITY 1 0 1 1 +IMMORTALITY 1 0 1 1 +IMMORTAL 1 0 1 1 +IMMENSELY 1 0 1 1 +IMMENSE 1 0 1 1 +IMMEDIATELY 4 0 4 4 +IMMEDIATE 2 0 2 2 +IMMEDIACY 1 0 1 1 +IMBIBING 1 0 1 1 +IMBIBED 1 0 1 1 +IMAGINING 1 0 1 1 +IMAGINED 2 0 2 2 +IMAGINE 2 0 2 2 +IMAGINATIVE 1 0 1 1 +IMAGINATION 3 0 3 3 +IMAGINARY 1 0 1 1 +IMAGINABLE 2 0 2 2 +IMAGES 8 0 8 8 +IMAGE 9 0 9 9 +ILLUSTRIOUS 2 0 2 2 +ILLUSTRATION 1 0 1 1 +ILLUSION 2 0 2 2 +ILLUMINATION 1 0 1 1 +ILLUMINATING 1 0 1 1 +ILLUMINATED 1 0 1 1 +ILLS 1 0 1 1 +ILLNESS 1 0 1 1 +IGNORANCE 2 0 2 2 +IGNOMINY 1 0 1 1 +IGNOBLE 1 0 1 1 +IDOLATRY 1 0 1 1 +IDLY 1 0 1 1 +IDLENESS 1 0 1 1 +IDLE 6 0 6 6 +IDENTITY 1 0 1 1 +IDEAS 11 0 11 11 +IDEAL 3 0 3 3 +IDEA 7 0 7 7 +ICE 1 0 1 1 +HYPOTHESIS 1 0 1 1 +HYPOCRITE 1 0 1 1 +HYPOCRISY 1 0 1 1 +HUT 4 0 4 4 +HUSSY 1 0 1 1 +HUSHED 1 0 1 1 +HUSBAND'S 1 0 1 1 +HURT 1 0 1 1 +HURRYING 2 0 2 2 +HURRIEDLY 3 0 3 3 +HURRIED 6 0 6 6 +HURONS 1 0 1 1 +HURLED 2 0 2 2 +HUNTING 2 0 2 2 +HUNTER 1 0 1 1 +HUNTED 1 0 1 1 +HUNGRY 1 0 1 1 +HUNGER 2 0 2 2 +HUNG 10 0 10 10 +HUNDREDTH 1 0 1 1 +HUNDREDS 2 0 2 2 +HUNDRED 18 0 18 18 +HUMPY 2 0 2 2 +HUMOUR 1 0 1 1 +HUMOROUS 3 0 3 3 +HUMOR 1 0 1 1 +HUMMING 1 0 1 1 +HUMILITY 1 0 1 1 +HUMILIATE 1 0 1 1 +HUMBUG 1 0 1 1 +HUMBLY 2 0 2 2 +HUMBLE 5 0 5 5 +HUMANITY 2 0 2 2 +HUMANE 1 0 1 1 +HUGGED 1 0 1 1 +HUGE 7 0 7 7 +HUES 1 0 1 1 +HUE 1 0 1 1 +HUDSON 1 0 1 1 +HUDDLED 1 0 1 1 +HOWL 1 0 1 1 +HOWEVER 29 0 29 29 +HOVER 1 0 1 1 +HOUSEWORK 1 0 1 1 +HOUSES 1 0 1 1 +HOUSEMAID 2 0 2 2 +HOUSEKEEPER 2 0 2 2 +HOUSEHOLDS 1 0 1 1 +HOUSEHOLD'S 1 0 1 1 +HOUSEHOLD 4 0 4 4 +HOURS 13 0 13 13 +HOUR 12 0 12 12 +HOUNDED 1 0 1 1 +HOTELS 1 0 1 1 +HOTEL 7 0 7 7 +HOSTILITY 1 0 1 1 +HOSPITALITY 4 0 4 4 +HOSPITABLY 1 0 1 1 +HOSPITABLE 1 0 1 1 +HORSES 6 0 6 6 +HORSEMEN 1 0 1 1 +HORROR 2 0 2 2 +HORRID 1 0 1 1 +HORRIBLY 2 0 2 2 +HORRIBLE 3 0 3 3 +HORNFUL 1 0 1 1 +HORIZON 3 0 3 3 +HORATIO 2 0 2 2 +HOPKINSON 2 0 2 2 +HOPKINS'S 1 0 1 1 +HOPKINS 4 0 4 4 +HOPING 1 0 1 1 +HOPELESS 1 0 1 1 +HOPED 2 0 2 2 +HOOKING 1 0 1 1 +HOOKED 1 0 1 1 +HONOURED 1 0 1 1 +HONORIFIC 2 0 2 2 +HONORED 1 0 1 1 +HONEY 1 0 1 1 +HONESTY 1 0 1 1 +HONESTLY 2 0 2 2 +HONEST 5 0 5 5 +HON 1 0 1 1 +HOMILY 1 0 1 1 +HOMES 2 0 2 2 +HOMELY 3 0 3 3 +HOME 23 0 23 23 +HOLMES 10 0 10 10 +HOLLYHOCKS 1 0 1 1 +HOLLOW 3 0 3 3 +HOLINESS 2 0 2 2 +HOLIDAYS 3 0 3 3 +HOLES 1 0 1 1 +HOLE 1 0 1 1 +HOLDS 2 0 2 2 +HOLDING 1 0 1 1 +HOLBORN 1 0 1 1 +HOBSON'S 1 0 1 1 +HOBS 1 0 1 1 +HO 1 0 1 1 +HITHERTO 1 0 1 1 +HISTORY 5 0 5 5 +HISTORIC 1 0 1 1 +HISTORIANS 1 0 1 1 +HISTORIAN 1 0 1 1 +HISSING 1 0 1 1 +HISS 1 0 1 1 +HIRE 1 0 1 1 +HINT 2 0 2 2 +HINDERED 1 0 1 1 +HIND 1 0 1 1 +HIMSELF 49 0 49 49 +HILLY 1 0 1 1 +HILL 4 0 4 4 +HIGHNESS 1 0 1 1 +HIGHLY 2 0 2 2 +HIGHER 2 0 2 2 +HIGH 18 0 18 18 +HIERARCHY 1 0 1 1 +HIDING 1 0 1 1 +HIDEOUS 1 0 1 1 +HIDE 3 0 3 3 +HIDDEN 3 0 3 3 +HICKEY 1 0 1 1 +HEWN 1 0 1 1 +HESTER 11 0 11 11 +HESITATION 1 0 1 1 +HESITATING 2 0 2 2 +HESITATED 1 0 1 1 +HERS 2 0 2 2 +HERON 1 0 1 1 +HEROINE 1 0 1 1 +HEROIC 2 0 2 2 +HERO 3 0 3 3 +HERMOCRATES 1 0 1 1 +HERETICS 2 0 2 2 +HEREDITY 1 0 1 1 +HEREAFTER 3 0 3 3 +HERE'S 1 0 1 1 +HERALDED 1 0 1 1 +HENRY'S 1 0 1 1 +HENRY 2 0 2 2 +HENLEY 1 0 1 1 +HENCE 4 0 4 4 +HEMMED 1 0 1 1 +HELPLESS 3 0 3 3 +HELPING 1 0 1 1 +HELP 18 0 18 18 +HELMSMAN 1 0 1 1 +HELLENES 1 0 1 1 +HELL 2 0 2 2 +HEIGHTS 1 0 1 1 +HEIGHTENING 1 0 1 1 +HEIGHT 1 0 1 1 +HEELS 1 0 1 1 +HEDGE 1 0 1 1 +HEAVY 13 0 13 13 +HEAVING 2 0 2 2 +HEAVILY 1 0 1 1 +HEAVENS 1 0 1 1 +HEAVENLY 1 0 1 1 +HEAVED 1 0 1 1 +HEAT 2 0 2 2 +HEARTY 1 0 1 1 +HEARTILY 2 0 2 2 +HEARTHSTONES 1 0 1 1 +HEARTH 3 0 3 3 +HEARTED 1 0 1 1 +HEARSE 2 0 2 2 +HEARS 2 0 2 2 +HEARING 1 0 1 1 +HEARD 19 0 19 19 +HEAP 2 0 2 2 +HEALTH 6 0 6 6 +HEADS 3 0 3 3 +HEADQUARTERS 1 0 1 1 +HEADLONGS 1 0 1 1 +HEADLONG 1 0 1 1 +HEADING 1 0 1 1 +HE'LL 1 0 1 1 +HAWORTH 1 0 1 1 +HAWKS 1 0 1 1 +HAWKEYE 5 0 5 5 +HAWK'S 1 0 1 1 +HAVEN'T 6 0 6 6 +HAUNTED 1 0 1 1 +HAUGHTY 4 0 4 4 +HATS 1 0 1 1 +HATRED 3 0 3 3 +HATH 4 0 4 4 +HATER 1 0 1 1 +HATEFUL 1 0 1 1 +HATED 1 0 1 1 +HATE 1 0 1 1 +HASTY 2 0 2 2 +HASTILY 2 0 2 2 +HASTENED 4 0 4 4 +HASTE 1 0 1 1 +HARVEST 1 0 1 1 +HARRYING 1 0 1 1 +HARRY 3 0 3 3 +HARROW 1 0 1 1 +HARRIED 1 0 1 1 +HARPOONER 1 0 1 1 +HARNESS 1 0 1 1 +HARMONY 2 0 2 2 +HARMON 4 0 4 4 +HARM 2 0 2 2 +HARE 1 0 1 1 +HARDSHIPS 1 0 1 1 +HARDLY 14 0 14 14 +HARDER 1 0 1 1 +HARASSING 1 0 1 1 +HARANGUING 1 0 1 1 +HAR 1 0 1 1 +HAPPY 16 0 16 16 +HAPPINESS 6 0 6 6 +HAPPILY 1 0 1 1 +HAPPENS 3 0 3 3 +HAPPENING 2 0 2 2 +HAPLESS 1 0 1 1 +HANSOM 1 0 1 1 +HANS 2 0 2 2 +HANGS 1 0 1 1 +HANGERS 1 0 1 1 +HANG 1 0 1 1 +HANDSOMEST 1 0 1 1 +HANDSOMELY 1 0 1 1 +HANDLE 1 0 1 1 +HANDKERCHIEFS 1 0 1 1 +HANDFUL 1 0 1 1 +HANDED 3 0 3 3 +HAND 29 0 29 29 +HAMMER 3 0 3 3 +HAMLET'S 1 0 1 1 +HAMBURG 1 0 1 1 +HALT 1 0 1 1 +HALLWAY 1 0 1 1 +HALLS 3 0 3 3 +HAIRS 1 0 1 1 +HAILING 1 0 1 1 +HADN'T 3 0 3 3 +HABITUALLY 1 0 1 1 +HABITUAL 2 0 2 2 +HABITS 4 0 4 4 +HABITATION 1 0 1 1 +HABIT 7 0 7 7 +GUTTER 1 0 1 1 +GUSTS 2 0 2 2 +GUST 1 0 1 1 +GUNS 1 0 1 1 +GUN 1 0 1 1 +GULLET 1 0 1 1 +GULF 2 0 2 2 +GUILTY 2 0 2 2 +GUILT 2 0 2 2 +GUIDED 2 0 2 2 +GUIDE 2 0 2 2 +GUERRILLA 1 0 1 1 +GUARDS 3 0 3 3 +GUARD 1 0 1 1 +GRUDGE 1 0 1 1 +GROWTH 5 0 5 5 +GROWN 7 0 7 7 +GROWLED 2 0 2 2 +GROWING 4 0 4 4 +GROW 4 0 4 4 +GROUPS 2 0 2 2 +GROUP 2 0 2 2 +GROUNDS 2 0 2 2 +GROSS 1 0 1 1 +GROPING 1 0 1 1 +GROPE 1 0 1 1 +GROANS 2 0 2 2 +GROANING 1 0 1 1 +GROANED 2 0 2 2 +GROAN 1 0 1 1 +GRINNING 1 0 1 1 +GRINDER 1 0 1 1 +GRIN 1 0 1 1 +GRIM 3 0 3 3 +GRIFFIN 1 0 1 1 +GRIEVED 1 0 1 1 +GRIEF 2 0 2 2 +GREW 5 0 5 5 +GREETINGS 1 0 1 1 +GREETING 4 0 4 4 +GREETED 1 0 1 1 +GREET 1 0 1 1 +GREEK 4 0 4 4 +GREATNESS 2 0 2 2 +GREATLY 10 0 10 10 +GREATEST 7 0 7 7 +GREATER 9 0 9 9 +GRAVEYARD 3 0 3 3 +GRAVES 1 0 1 1 +GRAVE 4 0 4 4 +GRATITUDE 2 0 2 2 +GRATIFICATION 3 0 3 3 +GRATE 1 0 1 1 +GRASS 11 0 11 11 +GRASPING 3 0 3 3 +GRANTED 3 0 3 3 +GRANDSON 1 0 1 1 +GRANDMOTHER 2 0 2 2 +GRANDFATHER 4 0 4 4 +GRANDER 2 0 2 2 +GRAINS 1 0 1 1 +GRADUALLY 5 0 5 5 +GRADATED 1 0 1 1 +GRACIOUSLY 1 0 1 1 +GRACIOUS 2 0 2 2 +GRACES 2 0 2 2 +GRACEFULLY 1 0 1 1 +GRACEFUL 1 0 1 1 +GRACE 12 0 12 12 +GOWN 1 0 1 1 +GOVERNOR'S 1 0 1 1 +GOVERNESS 2 0 2 2 +GOTHIC 3 0 3 3 +GOT 13 0 13 13 +GOSSIP 2 0 2 2 +GOSPEL 2 0 2 2 +GORGEOUS 1 0 1 1 +GORDONS 1 0 1 1 +GORDON 1 0 1 1 +GOODS 5 0 5 5 +GOODNESS 1 0 1 1 +GOODLY 1 0 1 1 +GOLIATH 2 0 2 2 +GOLF 1 0 1 1 +GOLDEN 15 0 15 15 +GOLD 15 0 15 15 +GOES 2 0 2 2 +GODLY 1 0 1 1 +GODLESS 1 0 1 1 +GODDESS 1 0 1 1 +GOD'S 1 0 1 1 +GOD 33 0 33 33 +GOBY 1 0 1 1 +GO 37 0 37 37 +GNARLED 1 0 1 1 +GLUE 1 0 1 1 +GLOWING 3 0 3 3 +GLOW 3 0 3 3 +GLOVES 5 0 5 5 +GLOVED 1 0 1 1 +GLOSSY 2 0 2 2 +GLORY 1 0 1 1 +GLORIOUS 2 0 2 2 +GLORIES 1 0 1 1 +GLOOMY 1 0 1 1 +GLOOMILY 2 0 2 2 +GLOBE 1 0 1 1 +GLITTERING 4 0 4 4 +GLITTERED 2 0 2 2 +GLINDA 1 0 1 1 +GLIMMERING 1 0 1 1 +GLIDING 1 0 1 1 +GLIDES 1 0 1 1 +GLIDED 2 0 2 2 +GLEANER 1 0 1 1 +GLEAMS 1 0 1 1 +GLEAMING 4 0 4 4 +GLEAMED 1 0 1 1 +GLEAM 1 0 1 1 +GLASS 6 0 6 6 +GLARE 2 0 2 2 +GLANCED 2 0 2 2 +GLANCE 5 0 5 5 +GLAMOUR 1 0 1 1 +GLADNESS 1 0 1 1 +GLADLY 1 0 1 1 +GLAD 12 0 12 12 +GIVING 7 0 7 7 +GIVES 3 0 3 3 +GIVEN 15 0 15 15 +GIRLS 7 0 7 7 +GIRL'S 2 0 2 2 +GIRL 25 0 25 25 +GILLIKINS 2 0 2 2 +GILDED 3 0 3 3 +GILD 1 0 1 1 +GIGANTIC 1 0 1 1 +GIFTS 2 0 2 2 +GIFT 6 0 6 6 +GHOSTS 2 0 2 2 +GHOSTLY 1 0 1 1 +GHOST 2 0 2 2 +GHISIZZLE 4 0 4 4 +GHASTLY 1 0 1 1 +GETTING 6 0 6 6 +GET 30 0 30 30 +GESTURES 1 0 1 1 +GESTURE 2 0 2 2 +GERMANTOWN 1 0 1 1 +GERM 1 0 1 1 +GEORGE 3 0 3 3 +GEOMETRICAL 1 0 1 1 +GENUINE 1 0 1 1 +GENTLY 5 0 5 5 +GENTLEWOMAN 1 0 1 1 +GENTLENESS 1 0 1 1 +GENTLEMAN'S 1 0 1 1 +GENTLE 7 0 7 7 +GENTILITY 1 0 1 1 +GENIUS 2 0 2 2 +GENEROUSLY 1 0 1 1 +GENEROUS 4 0 4 4 +GENERATION 2 0 2 2 +GENERATED 1 0 1 1 +GENERALS 3 0 3 3 +GENEALOGIES 1 0 1 1 +GAZING 2 0 2 2 +GAZED 2 0 2 2 +GAZE 3 0 3 3 +GAY 1 0 1 1 +GATHERINGS 1 0 1 1 +GATHERING 2 0 2 2 +GATHERED 2 0 2 2 +GATES 6 0 6 6 +GATE 3 0 3 3 +GASPED 1 0 1 1 +GASEOUS 1 0 1 1 +GAS 1 0 1 1 +GARDENING 1 0 1 1 +GARDENER'S 1 0 1 1 +GARDENER 1 0 1 1 +GARDEN 6 0 6 6 +GARB 1 0 1 1 +GAMEWELL'S 1 0 1 1 +GALVANOMETER 1 0 1 1 +GALLEY 1 0 1 1 +GALLANT 4 0 4 4 +GAIN 2 0 2 2 +GABLES 1 0 1 1 +FUZZY 1 0 1 1 +FURTHERED 1 0 1 1 +FURTHER 9 0 9 9 +FURNITURE 4 0 4 4 +FURNISHING 1 0 1 1 +FURNISHED 1 0 1 1 +FURNISH 1 0 1 1 +FURLED 1 0 1 1 +FURIOUSLY 2 0 2 2 +FUNERAL 1 0 1 1 +FUNDAMENTAL 1 0 1 1 +FUNCTIONS 1 0 1 1 +FUNCTION 1 0 1 1 +FUMBLED 1 0 1 1 +FULLY 3 0 3 3 +FULL 18 0 18 18 +FULFILLED 2 0 2 2 +FUGITIVES 1 0 1 1 +FUGITIVE'S 1 0 1 1 +FRUSTRATED 1 0 1 1 +FRUITS 1 0 1 1 +FRUIT 1 0 1 1 +FROZEN 2 0 2 2 +FROWNINGLY 1 0 1 1 +FROWNING 2 0 2 2 +FROWNED 2 0 2 2 +FROWN 1 0 1 1 +FROTHY 1 0 1 1 +FROST 1 0 1 1 +FRONTISPIECE 1 0 1 1 +FRONTIER 3 0 3 3 +FRONT 6 0 6 6 +FROM 187 0 187 187 +FROLIC 2 0 2 2 +FRO 1 0 1 1 +FRIVOLOUS 2 0 2 2 +FRINGED 2 0 2 2 +FRIGHTFUL 1 0 1 1 +FRIGATE 2 0 2 2 +FRIENDSHIP 1 0 1 1 +FRIENDS 8 0 8 8 +FRIENDLY 5 0 5 5 +FRIEND'S 2 0 2 2 +FRIDAY 1 0 1 1 +FRICTION 1 0 1 1 +FRETTING 1 0 1 1 +FRESHENS 1 0 1 1 +FRESH 6 0 6 6 +FREQUENTLY 3 0 3 3 +FREQUENTER 1 0 1 1 +FREQUENT 3 0 3 3 +FRENZY 1 0 1 1 +FRENCH 11 0 11 11 +FREELY 2 0 2 2 +FREEDOM 4 0 4 4 +FREED 1 0 1 1 +FREE 18 0 18 18 +FRECKLES 1 0 1 1 +FRANKNESS 1 0 1 1 +FRANKLY 1 0 1 1 +FRANCIS 3 0 3 3 +FRANCE 6 0 6 6 +FRAMEWORK 1 0 1 1 +FRAMED 1 0 1 1 +FRAME 4 0 4 4 +FRAIL 1 0 1 1 +FRAGRANCE 1 0 1 1 +FRAGMENT 2 0 2 2 +FRACTURED 1 0 1 1 +FRACTURE 1 0 1 1 +FOX 1 0 1 1 +FOURTH 2 0 2 2 +FOURTEEN 1 0 1 1 +FOUR 12 0 12 12 +FOUNTAINS 1 0 1 1 +FOUNDING 1 0 1 1 +FOUNDATION 1 0 1 1 +FOUL 1 0 1 1 +FOUGHT 1 0 1 1 +FORWARD 5 0 5 5 +FORTY 2 0 2 2 +FORTUNES 2 0 2 2 +FORTUNE 8 0 8 8 +FORTUNATELY 1 0 1 1 +FORTUNATE 2 0 2 2 +FORTUITOUS 1 0 1 1 +FORTNIGHT 1 0 1 1 +FORTIFIED 2 0 2 2 +FORTHWITH 3 0 3 3 +FORTH 9 0 9 9 +FORT 2 0 2 2 +FORSAKE 1 0 1 1 +FORMS 6 0 6 6 +FORMING 1 0 1 1 +FORMIDABLE 1 0 1 1 +FORMER 7 0 7 7 +FORMED 7 0 7 7 +FORMALITY 1 0 1 1 +FORMALITIES 1 0 1 1 +FORMAL 1 0 1 1 +FORM 12 0 12 12 +FORKED 1 0 1 1 +FORGOTTEN 4 0 4 4 +FORGOT 1 0 1 1 +FORGIVE 6 0 6 6 +FORGING 1 0 1 1 +FORGETTING 1 0 1 1 +FORGETFULNESS 1 0 1 1 +FORGET 6 0 6 6 +FORGED 3 0 3 3 +FORGE 1 0 1 1 +FORETOLD 2 0 2 2 +FOREST 6 0 6 6 +FORESEEING 1 0 1 1 +FORESAW 1 0 1 1 +FOREIGNER 1 0 1 1 +FOREIGN 1 0 1 1 +FOREHEAD 1 0 1 1 +FOREFINGER 1 0 1 1 +FORCIBLE 1 0 1 1 +FORCES 2 0 2 2 +FORCED 1 0 1 1 +FORCE 17 0 17 17 +FORBES 1 0 1 1 +FOOTNOTE 2 0 2 2 +FOOTMEN 1 0 1 1 +FOOTMAN 1 0 1 1 +FOOTED 1 0 1 1 +FOOT 9 0 9 9 +FOOLS 1 0 1 1 +FOOLISHLY 2 0 2 2 +FOOLISH 3 0 3 3 +FOOD 1 0 1 1 +FONDNESS 1 0 1 1 +FOND 5 0 5 5 +FOLLOWS 3 0 3 3 +FOLLOWING 4 0 4 4 +FOLLOWER 1 0 1 1 +FOLLOW 7 0 7 7 +FOLLIES 1 0 1 1 +FOLIAGE 1 0 1 1 +FOLDED 1 0 1 1 +FOLD 2 0 2 2 +FOES 2 0 2 2 +FOCUS 1 0 1 1 +FOAM 4 0 4 4 +FLY 4 0 4 4 +FLUX 2 0 2 2 +FLUSHED 3 0 3 3 +FLUKES 1 0 1 1 +FLOYD'S 1 0 1 1 +FLOWERS 11 0 11 11 +FLOWED 1 0 1 1 +FLOW 2 0 2 2 +FLOURISH 1 0 1 1 +FLOORS 1 0 1 1 +FLOOR 10 0 10 10 +FLOOD 2 0 2 2 +FLOATED 1 0 1 1 +FLOAT 1 0 1 1 +FLINT 1 0 1 1 +FLING 1 0 1 1 +FLIES 1 0 1 1 +FLICKER 2 0 2 2 +FLEW 1 0 1 1 +FLESH 5 0 5 5 +FLEETING 1 0 1 1 +FLEECY 1 0 1 1 +FLEECES 1 0 1 1 +FLEECE 3 0 3 3 +FLED 3 0 3 3 +FLAX 1 0 1 1 +FLATTERY 1 0 1 1 +FLATTERS 2 0 2 2 +FLATTERING 1 0 1 1 +FLATTERED 1 0 1 1 +FLAT 1 0 1 1 +FLASHED 3 0 3 3 +FLASH 3 0 3 3 +FLAPS 1 0 1 1 +FLAP 2 0 2 2 +FLANKED 1 0 1 1 +FLAMES 1 0 1 1 +FLAMED 2 0 2 2 +FLAME 3 0 3 3 +FLAGSTONES 1 0 1 1 +FLAG 1 0 1 1 +FIXES 1 0 1 1 +FIXED 3 0 3 3 +FIX 2 0 2 2 +FITZOOTH 7 0 7 7 +FITTED 2 0 2 2 +FITS 1 0 1 1 +FITLY 1 0 1 1 +FISTS 2 0 2 2 +FISHES 3 0 3 3 +FISHERMAN 1 0 1 1 +FISH 1 0 1 1 +FIRMNESS 1 0 1 1 +FIRMLY 2 0 2 2 +FIRM 1 0 1 1 +FIRESIDES 1 0 1 1 +FIRESIDE 1 0 1 1 +FIRES 1 0 1 1 +FIREPLACE 2 0 2 2 +FIRED 1 0 1 1 +FINISHED 4 0 4 4 +FINISH 1 0 1 1 +FINGERS 6 0 6 6 +FINGER 2 0 2 2 +FINEST 1 0 1 1 +FINER 1 0 1 1 +FINELY 1 0 1 1 +FINED 1 0 1 1 +FINDS 2 0 2 2 +FINDING 3 0 3 3 +FINANCIAL 1 0 1 1 +FINALLY 8 0 8 8 +FINALE 1 0 1 1 +FINAL 5 0 5 5 +FILLS 2 0 2 2 +FILE 1 0 1 1 +FIGURES 4 0 4 4 +FIGURED 1 0 1 1 +FIGURE 6 0 6 6 +FIGHTING 4 0 4 4 +FIFTY 6 0 6 6 +FIFTH 1 0 1 1 +FIFTEENTH 2 0 2 2 +FIFTEEN 1 0 1 1 +FIERCELY 1 0 1 1 +FIERCE 4 0 4 4 +FIELDS 4 0 4 4 +FIELD 6 0 6 6 +FEWER 1 0 1 1 +FEW 28 0 28 28 +FEVER 2 0 2 2 +FEUDS 1 0 1 1 +FETCH 1 0 1 1 +FESTIVE 1 0 1 1 +FERVENT 1 0 1 1 +FENCE 1 0 1 1 +FEMININE 1 0 1 1 +FEMALE 1 0 1 1 +FELLOWSHIP 1 0 1 1 +FELLOWS 3 0 3 3 +FELLOW'S 1 0 1 1 +FELLOW 9 0 9 9 +FELLER 1 0 1 1 +FELICITY 2 0 2 2 +FEET 11 0 11 11 +FEELINGS 3 0 3 3 +FEEDING 1 0 1 1 +FEEDER 1 0 1 1 +FEED 2 0 2 2 +FEEBLE 2 0 2 2 +FEDERAL 3 0 3 3 +FED 1 0 1 1 +FEBRUARY 1 0 1 1 +FEATURE 1 0 1 1 +FEATHERS 1 0 1 1 +FEASTED 1 0 1 1 +FEAST 3 0 3 3 +FEASIBLE 1 0 1 1 +FEARS 3 0 3 3 +FEARLESS 1 0 1 1 +FEARING 2 0 2 2 +FEARFUL 1 0 1 1 +FEAREST 1 0 1 1 +FAVORABLY 1 0 1 1 +FAVOR 2 0 2 2 +FAULTS 1 0 1 1 +FAULTLESS 1 0 1 1 +FAULT 2 0 2 2 +FATTENED 1 0 1 1 +FATIGUE 2 0 2 2 +FATHOMS 6 0 6 6 +FATHOM 1 0 1 1 +FATHERS 2 0 2 2 +FATHER'S 4 0 4 4 +FATHER 28 0 28 28 +FATALITY 2 0 2 2 +FAT 3 0 3 3 +FASTEST 1 0 1 1 +FASTEN 1 0 1 1 +FAST 7 0 7 7 +FASHIONED 1 0 1 1 +FASHIONABLE 1 0 1 1 +FASCINATION 2 0 2 2 +FARMS 1 0 1 1 +FARMHOUSES 1 0 1 1 +FARMERS 1 0 1 1 +FARMER'S 1 0 1 1 +FARMER 5 0 5 5 +FARM 8 0 8 8 +FAREWELL 2 0 2 2 +FANTASY 1 0 1 1 +FANNING 1 0 1 1 +FANCY 3 0 3 3 +FANCIES 2 0 2 2 +FANCIED 2 0 2 2 +FANATIC 1 0 1 1 +FAN 2 0 2 2 +FAMOUSLY 2 0 2 2 +FAMOUS 3 0 3 3 +FAMILY 16 0 16 16 +FAMILIES 3 0 3 3 +FAMILIARITY 3 0 3 3 +FAMILIAR 4 0 4 4 +FAME 2 0 2 2 +FALSE 6 0 6 6 +FALLS 5 0 5 5 +FALCONS 1 0 1 1 +FAITHFUL 1 0 1 1 +FAITH 9 0 9 9 +FAIRLY 4 0 4 4 +FAINTNESS 1 0 1 1 +FAINTLY 3 0 3 3 +FAINTING 4 0 4 4 +FAINT 3 0 3 3 +FAIN 1 0 1 1 +FAILURE 2 0 2 2 +FAILS 1 0 1 1 +FAILING 1 0 1 1 +FAILED 2 0 2 2 +FAIL 4 0 4 4 +FADES 1 0 1 1 +FADED 1 0 1 1 +FADE 4 0 4 4 +FACULTIES 1 0 1 1 +FACTS 4 0 4 4 +FACTORS 1 0 1 1 +FACTOR 1 0 1 1 +FACTIONS 2 0 2 2 +FACTION 1 0 1 1 +FACT 23 0 23 23 +FACILITATED 1 0 1 1 +FACILITATE 1 0 1 1 +FACES 4 0 4 4 +FACED 3 0 3 3 +FACE 29 0 29 29 +FABULOUS 1 0 1 1 +EYES 44 0 44 44 +EYELIDS 1 0 1 1 +EYED 1 0 1 1 +EXULTING 1 0 1 1 +EXULTATION 1 0 1 1 +EXTREMELY 2 0 2 2 +EXTREME 1 0 1 1 +EXTRAORDINARY 2 0 2 2 +EXTRACT 1 0 1 1 +EXTRA 1 0 1 1 +EXTINGUISHED 2 0 2 2 +EXTINCTION 1 0 1 1 +EXTINCT 1 0 1 1 +EXTERIOR 1 0 1 1 +EXTENT 6 0 6 6 +EXTENSIVE 1 0 1 1 +EXTENDED 3 0 3 3 +EXTEND 2 0 2 2 +EXQUISITE 3 0 3 3 +EXPULSION 1 0 1 1 +EXPRESSLY 1 0 1 1 +EXPRESSIVE 1 0 1 1 +EXPRESSIONS 1 0 1 1 +EXPRESSION 4 0 4 4 +EXPRESSING 2 0 2 2 +EXPRESSED 4 0 4 4 +EXPRESS 4 0 4 4 +EXPOSURE 1 0 1 1 +EXPONENT 2 0 2 2 +EXPLOSION 1 0 1 1 +EXPLORE 2 0 2 2 +EXPLOITS 1 0 1 1 +EXPLANATION 1 0 1 1 +EXPLAINED 1 0 1 1 +EXPLAIN 4 0 4 4 +EXPERIMENTALLY 1 0 1 1 +EXPERIENCING 1 0 1 1 +EXPERIENCED 1 0 1 1 +EXPERIENCE 7 0 7 7 +EXPENSIVE 1 0 1 1 +EXPENDITURE 4 0 4 4 +EXPEDITION 4 0 4 4 +EXPECTED 3 0 3 3 +EXPECTATIONS 2 0 2 2 +EXPECT 4 0 4 4 +EXPANDED 1 0 1 1 +EXOTICS 1 0 1 1 +EXISTING 3 0 3 3 +EXISTENT 1 0 1 1 +EXISTENCE 5 0 5 5 +EXISTED 4 0 4 4 +EXILE 1 0 1 1 +EXHORT 1 0 1 1 +EXHIBITS 1 0 1 1 +EXHIBITION 2 0 2 2 +EXHIBITED 1 0 1 1 +EXHIBIT 2 0 2 2 +EXHAUSTED 1 0 1 1 +EXHALE 1 0 1 1 +EXERTIONS 1 0 1 1 +EXERTED 1 0 1 1 +EXERCISING 1 0 1 1 +EXERCISED 1 0 1 1 +EXERCISE 1 0 1 1 +EXEMPLIFIES 1 0 1 1 +EXEMPLARY 1 0 1 1 +EXECUTIVE 1 0 1 1 +EXECUTED 1 0 1 1 +EXCLUDED 2 0 2 2 +EXCLAIMED 3 0 3 3 +EXCITING 2 0 2 2 +EXCITEMENT 3 0 3 3 +EXCITE 1 0 1 1 +EXCESSIVELY 1 0 1 1 +EXCESS 1 0 1 1 +EXCEPTIONS 1 0 1 1 +EXCEPTION 2 0 2 2 +EXCEPT 6 0 6 6 +EXCELLENT 5 0 5 5 +EXCELLENCY'S 1 0 1 1 +EXCELLENCY 2 0 2 2 +EXCELLENCE 1 0 1 1 +EXCEEDING 1 0 1 1 +EXCEEDED 1 0 1 1 +EXCEED 1 0 1 1 +EXAMPLE 2 0 2 2 +EXAMINED 4 0 4 4 +EXAMINE 4 0 4 4 +EXAMINATION 8 0 8 8 +EXALTED 1 0 1 1 +EXALT 1 0 1 1 +EXAGGERATED 1 0 1 1 +EXACTLY 8 0 8 8 +EXACT 5 0 5 5 +EX 2 0 2 2 +EVOLVING 1 0 1 1 +EVOLVED 1 0 1 1 +EVOLUTION 3 0 3 3 +EVOKED 1 0 1 1 +EVOKE 1 0 1 1 +EVIL 6 0 6 6 +EVIDENTLY 4 0 4 4 +EVIDENT 5 0 5 5 +EVIDENCE 5 0 5 5 +EVERYWHERE 7 0 7 7 +EVERYTHING 16 0 16 16 +EVERYBODY 7 0 7 7 +EVERLASTING 2 0 2 2 +EVENTS 8 0 8 8 +EVENT 4 0 4 4 +EVENLY 2 0 2 2 +EVASIVELY 1 0 1 1 +EVAPORATION 1 0 1 1 +EVADED 1 0 1 1 +EVA'S 1 0 1 1 +EUROPE 3 0 3 3 +EUCHARIST 1 0 1 1 +ETHICAL 1 0 1 1 +ETERNITY 2 0 2 2 +ETERNAL 2 0 2 2 +ETCHINGS 1 0 1 1 +ET 3 0 3 3 +ESTRANGEMENT 1 0 1 1 +ESTIMATE 1 0 1 1 +ESTABLISHED 3 0 3 3 +ESSEX 1 0 1 1 +ESSENTIALLY 1 0 1 1 +ESSENTIAL 2 0 2 2 +ESSENCE 1 0 1 1 +ESQUIRE 2 0 2 2 +ESPECIALLY 6 0 6 6 +ESCORT 4 0 4 4 +ESCAPED 1 0 1 1 +ESCAPE 4 0 4 4 +ESCAPADES 1 0 1 1 +ERRORS 1 0 1 1 +ERRONEOUS 2 0 2 2 +ERRING 1 0 1 1 +ERRAND 2 0 2 2 +ERR 1 0 1 1 +ERNEST 1 0 1 1 +ERIE 1 0 1 1 +ERECTED 1 0 1 1 +ERECT 1 0 1 1 +ERA 1 0 1 1 +EQUIP 1 0 1 1 +EQUATION 1 0 1 1 +EQUALS 1 0 1 1 +EQUALLY 4 0 4 4 +EQUAL 2 0 2 2 +EPOCH 1 0 1 1 +EPISTLE 3 0 3 3 +EPISODE 1 0 1 1 +ENVY 1 0 1 1 +ENVIRONMENT 1 0 1 1 +ENVELOPMENT 1 0 1 1 +ENTRUSTED 1 0 1 1 +ENTRENCHED 1 0 1 1 +ENTREATIES 1 0 1 1 +ENTRANCED 1 0 1 1 +ENTIRELY 6 0 6 6 +ENTIRE 2 0 2 2 +ENTHUSIASTIC 1 0 1 1 +ENTHUSIASM 1 0 1 1 +ENTERTAINS 1 0 1 1 +ENTERTAINMENT 3 0 3 3 +ENTERTAIN 2 0 2 2 +ENTERS 1 0 1 1 +ENTERPRISE 2 0 2 2 +ENTERING 2 0 2 2 +ENTANGLED 1 0 1 1 +ENOUGH 20 0 20 20 +ENORMOUSLY 2 0 2 2 +ENORMOUS 1 0 1 1 +ENNIS 1 0 1 1 +ENLISTED 1 0 1 1 +ENLIST 1 0 1 1 +ENJOYMENT 1 0 1 1 +ENJOYED 1 0 1 1 +ENJOY 2 0 2 2 +ENIGMA 1 0 1 1 +ENGORGED 1 0 1 1 +ENGLISHMAN 3 0 3 3 +ENGLISH 12 0 12 12 +ENGLAND 10 0 10 10 +ENGINEERS 2 0 2 2 +ENGINEER 4 0 4 4 +ENGINE 6 0 6 6 +ENGENDERS 1 0 1 1 +ENGAGEMENTS 1 0 1 1 +ENGAGED 5 0 5 5 +ENGAGE 1 0 1 1 +ENFORCED 1 0 1 1 +ENFORCE 3 0 3 3 +ENERGY 3 0 3 3 +ENEMY'S 1 0 1 1 +ENEMY 3 0 3 3 +ENEMIES 3 0 3 3 +ENDURES 1 0 1 1 +ENDURED 1 0 1 1 +ENDURE 1 0 1 1 +ENDS 1 0 1 1 +ENDOWED 1 0 1 1 +ENDLESS 1 0 1 1 +ENDEAVORING 1 0 1 1 +END 18 0 18 18 +ENCYCLOPAEDIA 1 0 1 1 +ENCOURAGED 1 0 1 1 +ENCOURAGE 2 0 2 2 +ENCLOSE 1 0 1 1 +ENACTED 1 0 1 1 +ENABLES 2 0 2 2 +ENABLED 3 0 3 3 +EMULATION 1 0 1 1 +EMPTY 7 0 7 7 +EMPLOYMENTS 2 0 2 2 +EMPLOYMENT 1 0 1 1 +EMPLOYING 1 0 1 1 +EMPLOYERS 1 0 1 1 +EMPLOYER 1 0 1 1 +EMPLOYED 2 0 2 2 +EMPIRE 2 0 2 2 +EMPEROR 1 0 1 1 +EMOTIONS 2 0 2 2 +EMOTIONLESS 1 0 1 1 +EMOTION 1 0 1 1 +EMINENCES 1 0 1 1 +EMERGENCY 1 0 1 1 +EMERGENCE 2 0 2 2 +EMERALD 1 0 1 1 +EMBROIDERED 2 0 2 2 +EMBRACING 2 0 2 2 +EMBRACE 2 0 2 2 +EMBODIED 1 0 1 1 +EMBLEM 1 0 1 1 +EMBITTERING 1 0 1 1 +EMBITTERED 1 0 1 1 +EMBERS 1 0 1 1 +EMBARRASS 1 0 1 1 +ELSEWHERE 4 0 4 4 +ELSE 7 0 7 7 +ELOQUENT 1 0 1 1 +ELMHURST 2 0 2 2 +ELIZABETH 1 0 1 1 +ELIZA 3 0 3 3 +ELICITED 1 0 1 1 +ELF 1 0 1 1 +ELEVEN 4 0 4 4 +ELEMENTS 7 0 7 7 +ELEMENTARY 3 0 3 3 +ELEMENT 4 0 4 4 +ELEGANT 1 0 1 1 +ELEGANCE 2 0 2 2 +ELECTROPLATING 1 0 1 1 +ELECTROLYTIC 2 0 2 2 +ELECTRICITY 5 0 5 5 +ELECTRICAL 2 0 2 2 +ELECTRIC 4 0 4 4 +ELECTIONEERING 1 0 1 1 +ELECTION 3 0 3 3 +ELECTING 1 0 1 1 +ELECTED 1 0 1 1 +ELDERS 1 0 1 1 +ELDERLY 1 0 1 1 +ELDER 2 0 2 2 +ELAPSED 1 0 1 1 +EJACULATIONS 1 0 1 1 +EJACULATED 2 0 2 2 +EITHER'S 1 0 1 1 +EIGHTY 3 0 3 3 +EIGHTH 3 0 3 3 +EIGHTEENTH 2 0 2 2 +EIGHTEEN 14 0 14 14 +EIGHT 7 0 7 7 +EGYPTIAN 1 0 1 1 +EGYPT 1 0 1 1 +EFFORTS 4 0 4 4 +EFFORT 11 0 11 11 +EFFICIENCY 1 0 1 1 +EFFECTUALLY 1 0 1 1 +EFFECTUAL 2 0 2 2 +EFFECTS 3 0 3 3 +EFFECTIVENESS 1 0 1 1 +EFFECTIVELY 1 0 1 1 +EFFECTIVE 2 0 2 2 +EFFECTING 1 0 1 1 +EDWARD 3 0 3 3 +EDUCATION 5 0 5 5 +EDUCATED 2 0 2 2 +EDITORS 1 0 1 1 +EDITOR 1 0 1 1 +EDISONIA 1 0 1 1 +EDISON 16 0 16 16 +EDIFICE 1 0 1 1 +EDICTS 1 0 1 1 +EDICT 1 0 1 1 +EDGES 1 0 1 1 +EDGED 1 0 1 1 +EDGE 5 0 5 5 +EDDYING 1 0 1 1 +ECONOMY 2 0 2 2 +ECONOMIC 3 0 3 3 +ECHOING 1 0 1 1 +ECHOES 1 0 1 1 +ECCLESIASTICAL 1 0 1 1 +ECCENTRICITY 1 0 1 1 +EATEN 1 0 1 1 +EAT 5 0 5 5 +EASY 14 0 14 14 +EASTWARDS 1 0 1 1 +EASTERLY'S 1 0 1 1 +EAST 4 0 4 4 +EASILY 10 0 10 10 +EARTHLY 1 0 1 1 +EARTH'S 3 0 3 3 +EARTH 17 0 17 17 +EARNESTLY 1 0 1 1 +EARNEST 4 0 4 4 +EARNED 1 0 1 1 +EARLY 13 0 13 13 +EARLIER 6 0 6 6 +EAGLES 1 0 1 1 +EAGLE 1 0 1 1 +EAGERNESS 2 0 2 2 +EAGERLY 1 0 1 1 +EAGER 4 0 4 4 +EACH 24 0 24 24 +DYNAMO 2 0 2 2 +DYING 2 0 2 2 +DYIN 1 0 1 1 +DYE 1 0 1 1 +DWELLINGS 3 0 3 3 +DWELL 1 0 1 1 +DUTY 7 0 7 7 +DUTIES 8 0 8 8 +DUST 4 0 4 4 +DUSK 2 0 2 2 +DURATION 1 0 1 1 +DURABLE 1 0 1 1 +DUPLICATE 1 0 1 1 +DUMBFOUNDED 1 0 1 1 +DUMB 1 0 1 1 +DULY 1 0 1 1 +DULL 2 0 2 2 +DUG 1 0 1 1 +DUE 5 0 5 5 +DUDLEY 1 0 1 1 +DUDGEON 1 0 1 1 +DUCKS 1 0 1 1 +DUCKLINGS 1 0 1 1 +DUCHESS 3 0 3 3 +DUBIOUSLY 1 0 1 1 +DRY 4 0 4 4 +DRUNKENNESS 1 0 1 1 +DRUMS 1 0 1 1 +DROWNED 2 0 2 2 +DROPS 3 0 3 3 +DROPPING 1 0 1 1 +DROPPED 5 0 5 5 +DROP 3 0 3 3 +DROOPED 1 0 1 1 +DRIVING 1 0 1 1 +DRIVES 1 0 1 1 +DRIVEN 1 0 1 1 +DRINKS 1 0 1 1 +DRINK 4 0 4 4 +DRIFTS 1 0 1 1 +DRIED 1 0 1 1 +DREW 10 0 10 10 +DRESSES 1 0 1 1 +DRESSED 6 0 6 6 +DRESS 1 0 1 1 +DREDGED 1 0 1 1 +DREARY 1 0 1 1 +DREAMY 1 0 1 1 +DREAMT 1 0 1 1 +DREAMS 2 0 2 2 +DREAMING 2 0 2 2 +DREAMED 1 0 1 1 +DREAM 5 0 5 5 +DREADING 1 0 1 1 +DREADFUL 3 0 3 3 +DRAWN 7 0 7 7 +DRAWING 2 0 2 2 +DRAW 4 0 4 4 +DRAUGHT 1 0 1 1 +DRAPERIES 1 0 1 1 +DRANK 1 0 1 1 +DRAMATIST'S 1 0 1 1 +DRAMATIST 1 0 1 1 +DRAMATIC 2 0 2 2 +DRAMA 1 0 1 1 +DRAINED 1 0 1 1 +DRAIN 1 0 1 1 +DRAGOONS 1 0 1 1 +DRAGON'S 1 0 1 1 +DRAGGING 1 0 1 1 +DRAGGED 2 0 2 2 +DRAG 1 0 1 1 +DOZEN 4 0 4 4 +DOWNWARD 2 0 2 2 +DOVE 1 0 1 1 +DOUGLAS 4 0 4 4 +DOUGHY 1 0 1 1 +DOUGHNUTS 1 0 1 1 +DOUGH 1 0 1 1 +DOUBTS 2 0 2 2 +DOUBTLESS 2 0 2 2 +DOUBTINGLY 1 0 1 1 +DOUBTING 1 0 1 1 +DOUBTFULLY 1 0 1 1 +DOUBTFUL 2 0 2 2 +DOUBT 11 0 11 11 +DOUBLE 6 0 6 6 +DOTH 5 0 5 5 +DOST 3 0 3 3 +DOROTHY 1 0 1 1 +DORKING 1 0 1 1 +DORCAS 6 0 6 6 +DOORS 3 0 3 3 +DOOM 1 0 1 1 +DONKEY 1 0 1 1 +DONE 24 0 24 24 +DOMINIONS 1 0 1 1 +DOMINION 1 0 1 1 +DOMESTIC 2 0 2 2 +DOME 1 0 1 1 +DOLLS 2 0 2 2 +DOLLARS 7 0 7 7 +DOING 12 0 12 12 +DOGS 1 0 1 1 +DOGGED 1 0 1 1 +DOG 2 0 2 2 +DOESN'T 3 0 3 3 +DOCTRINES 3 0 3 3 +DOCTRINE 4 0 4 4 +DOCTORS 1 0 1 1 +DOCTOR 7 0 7 7 +DOBRYNA 3 0 3 3 +DIZZILY 1 0 1 1 +DIVORCE 1 0 1 1 +DIVISIONS 1 0 1 1 +DIVISION 2 0 2 2 +DIVINITY 1 0 1 1 +DIVING 4 0 4 4 +DIVINE 3 0 3 3 +DIVIDING 1 0 1 1 +DIVIDEND 1 0 1 1 +DIVIDED 4 0 4 4 +DIVIDE 2 0 2 2 +DIVERTING 1 0 1 1 +DIVERT 1 0 1 1 +DIVERSITY 1 0 1 1 +DISUSE 1 0 1 1 +DISUNITED 1 0 1 1 +DISTURBING 1 0 1 1 +DISTURBANCE 1 0 1 1 +DISTURB 2 0 2 2 +DISTRUSTING 2 0 2 2 +DISTRUSTFUL 1 0 1 1 +DISTRUST 1 0 1 1 +DISTRESSED 1 0 1 1 +DISTRESS 1 0 1 1 +DISTORTION 1 0 1 1 +DISTORTED 1 0 1 1 +DISTINGUISH 3 0 3 3 +DISTINCTLY 3 0 3 3 +DISTINCTIVE 1 0 1 1 +DISTINCTION 5 0 5 5 +DISTINCT 2 0 2 2 +DISTANT 4 0 4 4 +DISTANCE 6 0 6 6 +DISSENTERS 1 0 1 1 +DISSENTED 1 0 1 1 +DISQUIETUDE 1 0 1 1 +DISPUTE 3 0 3 3 +DISPOSITIONS 1 0 1 1 +DISPOSITION 3 0 3 3 +DISPOSES 1 0 1 1 +DISPOSED 3 0 3 3 +DISPOSE 1 0 1 1 +DISPOSAL 1 0 1 1 +DISPLEASURE 1 0 1 1 +DISPLAYING 1 0 1 1 +DISPLAYED 1 0 1 1 +DISPLAY 1 0 1 1 +DISPERSED 3 0 3 3 +DISPENSE 1 0 1 1 +DISPENSATION 1 0 1 1 +DISPATCH 1 0 1 1 +DISPASSIONATE 1 0 1 1 +DISPARAGE 1 0 1 1 +DISOWN 1 0 1 1 +DISMISS 2 0 2 2 +DISMAY 1 0 1 1 +DISLOYAL 1 0 1 1 +DISLIKE 1 0 1 1 +DISK 1 0 1 1 +DISINCORPORATED 1 0 1 1 +DISHES 2 0 2 2 +DISH 3 0 3 3 +DISGUSTED 1 0 1 1 +DISGUST 3 0 3 3 +DISGUISE 2 0 2 2 +DISGRACE 3 0 3 3 +DISENGAGED 1 0 1 1 +DISEASED 1 0 1 1 +DISDAINFUL 1 0 1 1 +DISCUSSION 1 0 1 1 +DISCUSSED 1 0 1 1 +DISCUSS 2 0 2 2 +DISCREETLY 1 0 1 1 +DISCREET 1 0 1 1 +DISCOVERY 2 0 2 2 +DISCOVERERS 1 0 1 1 +DISCOVERED 3 0 3 3 +DISCOVER 3 0 3 3 +DISCOURSE 2 0 2 2 +DISCOURAGED 1 0 1 1 +DISCOURAGE 2 0 2 2 +DISCOMFORT 1 0 1 1 +DISCLOSES 1 0 1 1 +DISCIPLINE 5 0 5 5 +DISCERN 1 0 1 1 +DISBURDENED 1 0 1 1 +DISASTER 1 0 1 1 +DISAPPOINTMENT 6 0 6 6 +DISAPPEARS 1 0 1 1 +DISAPPEARED 1 0 1 1 +DISAPPEAR 2 0 2 2 +DISADVANTAGE 1 0 1 1 +DISABILITIES 1 0 1 1 +DIRTY 1 0 1 1 +DIRECTLY 4 0 4 4 +DIRECTIONS 2 0 2 2 +DIRECTION 6 0 6 6 +DIRECTING 1 0 1 1 +DIRECTED 2 0 2 2 +DIRECT 8 0 8 8 +DIP 1 0 1 1 +DIOCLETIAN 1 0 1 1 +DINNER 8 0 8 8 +DINING 1 0 1 1 +DINE 1 0 1 1 +DIMLY 1 0 1 1 +DIMINUTION 2 0 2 2 +DIMINISH 1 0 1 1 +DIMENSIONS 1 0 1 1 +DIM 2 0 2 2 +DILIGENTLY 1 0 1 1 +DILEMMA 1 0 1 1 +DILATED 1 0 1 1 +DIGNITY 4 0 4 4 +DIGNITARIES 1 0 1 1 +DIGNIFIED 4 0 4 4 +DIGBY 1 0 1 1 +DIG 1 0 1 1 +DIFFUSED 1 0 1 1 +DIFFICULTIES 3 0 3 3 +DIFFICULT 11 0 11 11 +DIFFERS 2 0 2 2 +DIFFERENTLY 1 0 1 1 +DIFFERENT 15 0 15 15 +DIFFER 1 0 1 1 +DIES 1 0 1 1 +DIED 5 0 5 5 +DIE 4 0 4 4 +DIDN'T 12 0 12 12 +DICE 1 0 1 1 +DIAMONDS 1 0 1 1 +DIALOGUE 3 0 3 3 +DIALECT 1 0 1 1 +DIAGRAMS 1 0 1 1 +DEWS 2 0 2 2 +DEVOUR 3 0 3 3 +DEVOTION 1 0 1 1 +DEVOTES 1 0 1 1 +DEVOTED 2 0 2 2 +DEVOTE 1 0 1 1 +DEVOLVE 1 0 1 1 +DEVOID 1 0 1 1 +DEVISING 1 0 1 1 +DEVIL 1 0 1 1 +DEVICES 2 0 2 2 +DEVELOPMENTS 1 0 1 1 +DEVELOPMENT 6 0 6 6 +DEVELOPED 2 0 2 2 +DEVELOP 1 0 1 1 +DETOUR 1 0 1 1 +DETESTS 1 0 1 1 +DETESTED 1 0 1 1 +DETESTABLE 1 0 1 1 +DETERMINING 1 0 1 1 +DETERMINED 5 0 5 5 +DETERMINE 1 0 1 1 +DETERMINATION 1 0 1 1 +DETECT 1 0 1 1 +DETAINED 2 0 2 2 +DETAILS 2 0 2 2 +DETAIL 1 0 1 1 +DETACHMENT 1 0 1 1 +DESTRUCTIVE 1 0 1 1 +DESTRUCTION 2 0 2 2 +DESTROYED 1 0 1 1 +DESTINY 1 0 1 1 +DESTINED 2 0 2 2 +DESPITE 1 0 1 1 +DESPISE 1 0 1 1 +DESPERATELY 1 0 1 1 +DESPERATE 3 0 3 3 +DESPAIRING 1 0 1 1 +DESPAIR 4 0 4 4 +DESOLATION 1 0 1 1 +DESOLATE 1 0 1 1 +DESK 2 0 2 2 +DESIROUS 1 0 1 1 +DESIRES 1 0 1 1 +DESIRED 3 0 3 3 +DESIRE 4 0 4 4 +DESIRABLE 1 0 1 1 +DESIGNERS 1 0 1 1 +DESIGNATED 1 0 1 1 +DESIGN 3 0 3 3 +DESERVES 1 0 1 1 +DESERVED 1 0 1 1 +DESERVE 1 0 1 1 +DESERTS 1 0 1 1 +DESERTED 1 0 1 1 +DESERT 1 0 1 1 +DESCRIPTIONS 1 0 1 1 +DESCRIPTION 2 0 2 2 +DESCRIBING 1 0 1 1 +DESCRIBED 2 0 2 2 +DESCRIBE 3 0 3 3 +DESCENDS 1 0 1 1 +DESCENDING 2 0 2 2 +DESCENDED 2 0 2 2 +DESCENDANTS 1 0 1 1 +DESCEND 4 0 4 4 +DERIVED 1 0 1 1 +DERIVATIVE 1 0 1 1 +DEPUTY 1 0 1 1 +DEPTHS 2 0 2 2 +DEPTH 4 0 4 4 +DEPRIVED 1 0 1 1 +DEPRESSION 1 0 1 1 +DEPRESSED 1 0 1 1 +DEPRECIATING 1 0 1 1 +DEPRECATION 3 0 3 3 +DEPRAVED 1 0 1 1 +DEPOSITION 1 0 1 1 +DEPORTMENT 1 0 1 1 +DEPENDS 1 0 1 1 +DEPENDENT 2 0 2 2 +DEPENDENCE 1 0 1 1 +DEPEND 2 0 2 2 +DEPARTURE 4 0 4 4 +DEPARTMENT 1 0 1 1 +DEPARTING 2 0 2 2 +DEPARTED 1 0 1 1 +DEPART 1 0 1 1 +DENY 5 0 5 5 +DENUNCIATION 1 0 1 1 +DENSELY 1 0 1 1 +DENSE 2 0 2 2 +DENOTE 1 0 1 1 +DENIES 1 0 1 1 +DENIED 2 0 2 2 +DEMURELY 1 0 1 1 +DEMURE 1 0 1 1 +DEMONSTRATION 1 0 1 1 +DEMOCRATIC 2 0 2 2 +DEMEANOUR 1 0 1 1 +DEMEANOR 1 0 1 1 +DEMANDS 1 0 1 1 +DEMANDED 2 0 2 2 +DELUSIVE 1 0 1 1 +DELIVERY 2 0 2 2 +DELIVERING 1 0 1 1 +DELIVERED 2 0 2 2 +DELIGHTFUL 4 0 4 4 +DELIGHTED 5 0 5 5 +DELICIOUSNESS 1 0 1 1 +DELICIOUS 1 0 1 1 +DELICACY 1 0 1 1 +DELIBERATIONS 1 0 1 1 +DELIBERATION 1 0 1 1 +DELEGATED 1 0 1 1 +DELAYED 2 0 2 2 +DELAY 3 0 3 3 +DELAWARES 1 0 1 1 +DELAWARE 1 0 1 1 +DEIGNED 1 0 1 1 +DEIGN 1 0 1 1 +DEGREES 3 0 3 3 +DEGREE 6 0 6 6 +DEFYING 1 0 1 1 +DEFTLY 1 0 1 1 +DEFORMITIES 1 0 1 1 +DEFINITION 3 0 3 3 +DEFINITE 2 0 2 2 +DEFINES 1 0 1 1 +DEFIED 1 0 1 1 +DEFIANCE 2 0 2 2 +DEFERENCE 2 0 2 2 +DEFENDS 1 0 1 1 +DEFENDERS 1 0 1 1 +DEFENDED 1 0 1 1 +DEFENDANT 1 0 1 1 +DEFENCE 1 0 1 1 +DEFECT 1 0 1 1 +DEER 3 0 3 3 +DEEPLY 4 0 4 4 +DEEPENING 1 0 1 1 +DEEP 11 0 11 11 +DEEDS 1 0 1 1 +DECREES 1 0 1 1 +DECREED 1 0 1 1 +DECREE 1 0 1 1 +DECORATIVE 1 0 1 1 +DECORATED 3 0 3 3 +DECOMPOSE 1 0 1 1 +DECLINING 2 0 2 2 +DECLINED 2 0 2 2 +DECLARES 1 0 1 1 +DECLARED 2 0 2 2 +DECLARE 2 0 2 2 +DECLARATION 1 0 1 1 +DECISION 3 0 3 3 +DECIDEDLY 1 0 1 1 +DECIDED 3 0 3 3 +DECIDE 4 0 4 4 +DECEPTIVE 1 0 1 1 +DECEMBER 1 0 1 1 +DECEIVED 1 0 1 1 +DECEIT 1 0 1 1 +DECANTERS 1 0 1 1 +DECADES 1 0 1 1 +DEBATE 1 0 1 1 +DEATH 19 0 19 19 +DEARS 1 0 1 1 +DEARLY 2 0 2 2 +DEAREST 2 0 2 2 +DEAR 22 0 22 22 +DEALER 1 0 1 1 +DEAL 10 0 10 10 +DEAF 1 0 1 1 +DAZZLING 2 0 2 2 +DAZED 1 0 1 1 +DAYS 16 0 16 16 +DAYLIGHT 2 0 2 2 +DAWN 2 0 2 2 +DAVID 8 0 8 8 +DAUNTLESS 1 0 1 1 +DAUGHTERS 1 0 1 1 +DAUGHTER 9 0 9 9 +DATING 1 0 1 1 +DATE 1 0 1 1 +DATA 2 0 2 2 +DARTED 3 0 3 3 +DARLING 1 0 1 1 +DARKNESS 3 0 3 3 +DARKENED 1 0 1 1 +DARING 2 0 2 2 +DARED 3 0 3 3 +DARE 3 0 3 3 +DANGERS 1 0 1 1 +DANGEROUS 4 0 4 4 +DANGER 9 0 9 9 +DANES 1 0 1 1 +DANCING 2 0 2 2 +DANCES 1 0 1 1 +DANCERS 1 0 1 1 +DANCER 1 0 1 1 +DANCED 2 0 2 2 +DANCE 4 0 4 4 +DAMSEL 1 0 1 1 +DAMNED 1 0 1 1 +DAMNABLE 1 0 1 1 +DAMASK 1 0 1 1 +DAMAGING 1 0 1 1 +DAMAGE 2 0 2 2 +DAINTY 1 0 1 1 +DAILY 3 0 3 3 +DAFT 1 0 1 1 +D 2 0 2 2 +CYRIL 2 0 2 2 +CYPRESS 1 0 1 1 +CYNTHIA'S 1 0 1 1 +CYNTHIA 3 0 3 3 +CYMBALS 1 0 1 1 +CUTTINGS 1 0 1 1 +CUT 5 0 5 5 +CUSTOMS 1 0 1 1 +CUSTOMER'S 1 0 1 1 +CUSTOMARILY 1 0 1 1 +CUSTOM 2 0 2 2 +CUSTODY 2 0 2 2 +CUSHIONED 1 0 1 1 +CURVE 2 0 2 2 +CURTAINS 1 0 1 1 +CURTAIN 3 0 3 3 +CURSE 1 0 1 1 +CURRENT 8 0 8 8 +CURLY 1 0 1 1 +CURIOUSLY 1 0 1 1 +CURIOUS 4 0 4 4 +CURIOSITY 2 0 2 2 +CURBSTONE 1 0 1 1 +CUPS 1 0 1 1 +CUPBOARD 2 0 2 2 +CUP 3 0 3 3 +CUNNING 3 0 3 3 +CUMBERLAND'S 1 0 1 1 +CULTURE 4 0 4 4 +CULTIVATING 1 0 1 1 +CULTIVATE 1 0 1 1 +CULPRIT 1 0 1 1 +CULMINATING 2 0 2 2 +CUFFS 1 0 1 1 +CUB 1 0 1 1 +CRYSTALLIZE 1 0 1 1 +CRYING 1 0 1 1 +CRY 5 0 5 5 +CRUSHED 1 0 1 1 +CRUSH 3 0 3 3 +CRUMBLED 1 0 1 1 +CRUMBLE 1 0 1 1 +CRUISING 1 0 1 1 +CRUELTY 2 0 2 2 +CRUEL 1 0 1 1 +CRUCIFIX 2 0 2 2 +CRUCIFIED 1 0 1 1 +CROWNS 1 0 1 1 +CROWNING 2 0 2 2 +CROWN 6 0 6 6 +CROWDING 1 0 1 1 +CROWDED 2 0 2 2 +CROWD 5 0 5 5 +CROSSLY 1 0 1 1 +CROSSING 1 0 1 1 +CROSSED 3 0 3 3 +CROPS 1 0 1 1 +CROP 1 0 1 1 +CROOKED 3 0 3 3 +CRITICISM 1 0 1 1 +CRITICALLY 1 0 1 1 +CRISIS 1 0 1 1 +CRIMSON 1 0 1 1 +CRIMINAL 3 0 3 3 +CRIME 3 0 3 3 +CRIES 3 0 3 3 +CRIED 23 0 23 23 +CRESTED 1 0 1 1 +CREPT 1 0 1 1 +CREEPING 2 0 2 2 +CREEP 1 0 1 1 +CREEK 2 0 2 2 +CREED 1 0 1 1 +CREDIT 2 0 2 2 +CREATURES 2 0 2 2 +CREATURE 8 0 8 8 +CREATOR 1 0 1 1 +CREATIVE 1 0 1 1 +CREATIONS 1 0 1 1 +CREATION 2 0 2 2 +CREATING 1 0 1 1 +CREATES 1 0 1 1 +CREATED 2 0 2 2 +CREATE 3 0 3 3 +CREAM 1 0 1 1 +CREAKED 1 0 1 1 +CRAZY 2 0 2 2 +CRAWLING 1 0 1 1 +CRAWL 1 0 1 1 +CRANED 1 0 1 1 +CRANE 1 0 1 1 +CRAMPNESS 1 0 1 1 +CRAMPED 1 0 1 1 +CRACKING 1 0 1 1 +CRACKED 2 0 2 2 +COZIER 1 0 1 1 +COWARDLY 1 0 1 1 +COWARD 1 0 1 1 +COW 2 0 2 2 +COVERT 1 0 1 1 +COVERING 1 0 1 1 +COVERED 2 0 2 2 +COVER 2 0 2 2 +COVENANTERS 5 0 5 5 +COUSINS 3 0 3 3 +COUSIN'S 2 0 2 2 +COUSIN 7 0 7 7 +COURTIERS 2 0 2 2 +COURTESY 2 0 2 2 +COURAGEOUS 1 0 1 1 +COURAGE 6 0 6 6 +COUPLE 1 0 1 1 +COUNTY 7 0 7 7 +COUNTRIES 1 0 1 1 +COUNTING 1 0 1 1 +COUNTERPART 1 0 1 1 +COUNTERFEITED 1 0 1 1 +COUNTERACT 1 0 1 1 +COUNTENANCE 3 0 3 3 +COUNT 15 0 15 15 +COUCH 1 0 1 1 +COTTON 12 0 12 12 +COTTAGE 2 0 2 2 +COSTUME 2 0 2 2 +CORRUPTION 1 0 1 1 +CORRIDOR 1 0 1 1 +CORRESPONDING 1 0 1 1 +CORRESPOND 1 0 1 1 +CORRECTLY 1 0 1 1 +CORRECTED 1 0 1 1 +CORRECT 3 0 3 3 +CORNERS 4 0 4 4 +CORNER 13 0 13 13 +CORMORANT 1 0 1 1 +CORDIALLY 1 0 1 1 +CORDIALITY 1 0 1 1 +CORAL 1 0 1 1 +COQUETRY 1 0 1 1 +COPY 2 0 2 2 +COPPER 1 0 1 1 +COPIED 2 0 2 2 +COOKERY 2 0 2 2 +COOKED 1 0 1 1 +CONVULSION 1 0 1 1 +CONVIVIALITY 1 0 1 1 +CONVINCING 2 0 2 2 +CONVINCED 2 0 2 2 +CONVICTIONS 2 0 2 2 +CONVICTION 2 0 2 2 +CONVEYED 1 0 1 1 +CONVEXITY 1 0 1 1 +CONVERTS 1 0 1 1 +CONVERSION 1 0 1 1 +CONVERSING 1 0 1 1 +CONVERSE 2 0 2 2 +CONVERSATIONS 1 0 1 1 +CONVERSATIONAL 1 0 1 1 +CONVERSATION 6 0 6 6 +CONVENTIONALITY 1 0 1 1 +CONVENTIONAL 1 0 1 1 +CONVENTION 1 0 1 1 +CONTROLLING 1 0 1 1 +CONTRIVED 2 0 2 2 +CONTRIVANCE 2 0 2 2 +CONTRITION 1 0 1 1 +CONTRITE 1 0 1 1 +CONTRIBUTE 1 0 1 1 +CONTRASTING 1 0 1 1 +CONTRAST 4 0 4 4 +CONTRARY 5 0 5 5 +CONTRADICTIONS 1 0 1 1 +CONTRACTION 2 0 2 2 +CONTINUOUSLY 2 0 2 2 +CONTINUOUS 1 0 1 1 +CONTINUED 14 0 14 14 +CONTINUE 1 0 1 1 +CONTINUALLY 2 0 2 2 +CONTINUAL 3 0 3 3 +CONTINGENCY 1 0 1 1 +CONTINENT 1 0 1 1 +CONTESTED 1 0 1 1 +CONTEST 1 0 1 1 +CONTENTS 1 0 1 1 +CONTENTMENT 1 0 1 1 +CONTENTEDLY 1 0 1 1 +CONTENT 1 0 1 1 +CONTEMPTIBLE 1 0 1 1 +CONTEMPT 2 0 2 2 +CONTEMPORARY 1 0 1 1 +CONTEMPLATION 1 0 1 1 +CONTEMPLATED 1 0 1 1 +CONTAMINATION 1 0 1 1 +CONTAMINATED 1 0 1 1 +CONTAINS 1 0 1 1 +CONTAINERS 1 0 1 1 +CONTAGION 1 0 1 1 +CONSUMPTION 13 0 13 13 +CONSUMERS 2 0 2 2 +CONSUMER'S 1 0 1 1 +CONSUMER 5 0 5 5 +CONSUMED 1 0 1 1 +CONSUME 2 0 2 2 +CONSULTED 1 0 1 1 +CONSULTATION 1 0 1 1 +CONSULT 1 0 1 1 +CONSTRUCTION 4 0 4 4 +CONSTRUCTED 1 0 1 1 +CONSTRAINEDLY 1 0 1 1 +CONSTRAINED 1 0 1 1 +CONSTITUTION 3 0 3 3 +CONSTITUTES 1 0 1 1 +CONSTITUTED 1 0 1 1 +CONSTITUTE 1 0 1 1 +CONSTANT 3 0 3 3 +CONSTANCY 1 0 1 1 +CONSPIRACY 2 0 2 2 +CONSPICUOUS 8 0 8 8 +CONSOLE 1 0 1 1 +CONSOLATION 1 0 1 1 +CONSISTENTLY 1 0 1 1 +CONSIDERING 1 0 1 1 +CONSIDERED 5 0 5 5 +CONSIDERATIONS 1 0 1 1 +CONSIDERABLY 1 0 1 1 +CONSERVATION 1 0 1 1 +CONSEQUENTLY 1 0 1 1 +CONSEQUENT 2 0 2 2 +CONSEQUENCES 1 0 1 1 +CONSEQUENCE 5 0 5 5 +CONSENT 4 0 4 4 +CONSEIL 6 0 6 6 +CONSECRATED 2 0 2 2 +CONSCIOUSNESS 2 0 2 2 +CONSCIENCES 1 0 1 1 +CONQUERED 2 0 2 2 +CONQUER 1 0 1 1 +CONNECTIONS 1 0 1 1 +CONNECTION 1 0 1 1 +CONNECTED 3 0 3 3 +CONNECT 2 0 2 2 +CONJURATION 1 0 1 1 +CONJUNCTURE 1 0 1 1 +CONJECTURE 1 0 1 1 +CONGRESS 1 0 1 1 +CONGREGATED 1 0 1 1 +CONGRATULATIONS 1 0 1 1 +CONGRATULATION 1 0 1 1 +CONGRATULATE 1 0 1 1 +CONGO 1 0 1 1 +CONGENIAL 1 0 1 1 +CONFUSION 2 0 2 2 +CONFUSES 1 0 1 1 +CONFUSED 3 0 3 3 +CONFOUNDEDLY 1 0 1 1 +CONFLICTING 1 0 1 1 +CONFLICT 3 0 3 3 +CONFISCATED 1 0 1 1 +CONFIRMS 1 0 1 1 +CONFIRMED 3 0 3 3 +CONFINED 2 0 2 2 +CONFIDENT 1 0 1 1 +CONFIDENCE 7 0 7 7 +CONFIDE 1 0 1 1 +CONFIDANTS 1 0 1 1 +CONFESSION 1 0 1 1 +CONFESSED 1 0 1 1 +CONFESS 4 0 4 4 +CONFERS 1 0 1 1 +CONFEDERATE 1 0 1 1 +CONDUCTS 1 0 1 1 +CONDUCTORS 2 0 2 2 +CONDUCTED 1 0 1 1 +CONDUCIVE 1 0 1 1 +CONDITIONS 3 0 3 3 +CONDITION 11 0 11 11 +CONDENSED 1 0 1 1 +CONDENSATION 1 0 1 1 +CONDEMNATION 2 0 2 2 +CONCUR 1 0 1 1 +CONCOURSE 1 0 1 1 +CONCORD 1 0 1 1 +CONCLUSION 2 0 2 2 +CONCERTING 1 0 1 1 +CONCERNING 4 0 4 4 +CONCERNED 8 0 8 8 +CONCERN 1 0 1 1 +CONCEPTIONS 2 0 2 2 +CONCEPTION 2 0 2 2 +CONCEPT 1 0 1 1 +CONCEIVED 1 0 1 1 +CONCEIVE 2 0 2 2 +CONCEALING 1 0 1 1 +CONCEALED 1 0 1 1 +CONCEAL 1 0 1 1 +COMRADES 3 0 3 3 +COMPULSIVE 1 0 1 1 +COMPULSION 1 0 1 1 +COMPREHENSIVE 1 0 1 1 +COMPREHENDED 1 0 1 1 +COMPREHEND 1 0 1 1 +COMPOUND 1 0 1 1 +COMPOSURE 2 0 2 2 +COMPORT 1 0 1 1 +COMPONENT 1 0 1 1 +COMPLY 2 0 2 2 +COMPLIMENTARY 1 0 1 1 +COMPLIMENT 1 0 1 1 +COMPLICATED 1 0 1 1 +COMPLIANCE 2 0 2 2 +COMPLEXION 2 0 2 2 +COMPLETELY 3 0 3 3 +COMPLETED 2 0 2 2 +COMPLETE 2 0 2 2 +COMPLEMENT 1 0 1 1 +COMPLAINTS 1 0 1 1 +COMPLAINT 1 0 1 1 +COMPLAINING 1 0 1 1 +COMPLAINEST 1 0 1 1 +COMPLAIN 2 0 2 2 +COMPLACENTLY 1 0 1 1 +COMPLACENCY 1 0 1 1 +COMPETITION 1 0 1 1 +COMPETE 1 0 1 1 +COMPENSATION 1 0 1 1 +COMPELS 1 0 1 1 +COMPELLED 2 0 2 2 +COMPEL 1 0 1 1 +COMPASS 1 0 1 1 +COMPARISON 1 0 1 1 +COMPARED 3 0 3 3 +COMPARE 1 0 1 1 +COMPARATIVELY 2 0 2 2 +COMPARATIVE 1 0 1 1 +COMPANY 11 0 11 11 +COMPANIONSHIP 2 0 2 2 +COMPANIONS 2 0 2 2 +COMPANIONLESS 1 0 1 1 +COMPANION 5 0 5 5 +COMPANIES 3 0 3 3 +COMPACT 1 0 1 1 +COMMUNITY 3 0 3 3 +COMMUNITIES 1 0 1 1 +COMMUNION 1 0 1 1 +COMMUNICATED 2 0 2 2 +COMMUNICATE 1 0 1 1 +COMMOTION 1 0 1 1 +COMMONLY 1 0 1 1 +COMMITTING 1 0 1 1 +COMMITTEE 3 0 3 3 +COMMITTED 2 0 2 2 +COMMITTAL 1 0 1 1 +COMMITS 1 0 1 1 +COMMIT 1 0 1 1 +COMMISSIONS 1 0 1 1 +COMMISSIONERS 1 0 1 1 +COMMISSION 1 0 1 1 +COMMISERATION 1 0 1 1 +COMMERCIAL 2 0 2 2 +COMMENTS 1 0 1 1 +COMMENTED 1 0 1 1 +COMMENTARY 2 0 2 2 +COMMENT 1 0 1 1 +COMMENDED 1 0 1 1 +COMMEND 1 0 1 1 +COMMENCEMENT 1 0 1 1 +COMMENCE 1 0 1 1 +COMMANDMENT 2 0 2 2 +COMMANDERS 1 0 1 1 +COMMANDER 2 0 2 2 +COMMANDED 1 0 1 1 +COMMAND 2 0 2 2 +COMFORTS 2 0 2 2 +COMFORTING 1 0 1 1 +COMFORTED 2 0 2 2 +COMFORTABLE 3 0 3 3 +COMFORT 8 0 8 8 +COMETH 1 0 1 1 +COMES 10 0 10 10 +COMELY 1 0 1 1 +COMEDY 1 0 1 1 +COMEDIES 2 0 2 2 +COME 51 0 51 51 +COMBINED 2 0 2 2 +COMBINE 1 0 1 1 +COMBINATIONS 1 0 1 1 +COMBINATION 2 0 2 2 +COMBAT 2 0 2 2 +COMBASH 1 0 1 1 +COMB 1 0 1 1 +COLUMNS 1 0 1 1 +COLUMN 1 0 1 1 +COLOURED 1 0 1 1 +COLORISTS 2 0 2 2 +COLORED 2 0 2 2 +COLONY 3 0 3 3 +COLONIAL 1 0 1 1 +COLONEL 1 0 1 1 +COLLEGE 4 0 4 4 +COLLECTIONS 1 0 1 1 +COLLECTION 1 0 1 1 +COLLECTING 1 0 1 1 +COLLAR 2 0 2 2 +COLLAPSED 1 0 1 1 +COLBERT 1 0 1 1 +COINED 1 0 1 1 +COINCIDE 1 0 1 1 +COFFEE 6 0 6 6 +COCK 1 0 1 1 +COBBLER 1 0 1 1 +COAXED 1 0 1 1 +COAT 1 0 1 1 +COASTS 1 0 1 1 +COAST 3 0 3 3 +COACHMAN 1 0 1 1 +COACH 1 0 1 1 +CLUTCHING 1 0 1 1 +CLUTCH 1 0 1 1 +CLUNG 1 0 1 1 +CLUMSINESS 1 0 1 1 +CLOUDS 6 0 6 6 +CLOUD 11 0 11 11 +CLOTTED 1 0 1 1 +CLOTHING 1 0 1 1 +CLOTHES 5 0 5 5 +CLOTHED 1 0 1 1 +CLOSET 1 0 1 1 +CLOSER 1 0 1 1 +CLOSELY 6 0 6 6 +CLOSED 2 0 2 2 +CLOSE 10 0 10 10 +CLOAKS 2 0 2 2 +CLIMBING 1 0 1 1 +CLIMATE 2 0 2 2 +CLIFF 2 0 2 2 +CLIENTS 1 0 1 1 +CLICKED 1 0 1 1 +CLEVERNESS 3 0 3 3 +CLEVER 2 0 2 2 +CLERK 2 0 2 2 +CLERICAL 1 0 1 1 +CLERGYMAN'S 2 0 2 2 +CLERGY 2 0 2 2 +CLEARNESS 1 0 1 1 +CLEARLY 5 0 5 5 +CLEARING 1 0 1 1 +CLEAREST 1 0 1 1 +CLEAR 10 0 10 10 +CLEANED 1 0 1 1 +CLAY 5 0 5 5 +CLAWS 2 0 2 2 +CLASSIFYING 1 0 1 1 +CLASSIFIER 1 0 1 1 +CLASSIFICATION 1 0 1 1 +CLASSIC 2 0 2 2 +CLASSES 1 0 1 1 +CLASSED 3 0 3 3 +CLASS 9 0 9 9 +CLASPING 1 0 1 1 +CLASPED 2 0 2 2 +CLASHING 2 0 2 2 +CLARIFIED 1 0 1 1 +CLAP 1 0 1 1 +CLAMOROUS 1 0 1 1 +CLAIMS 1 0 1 1 +CLAIMED 1 0 1 1 +CLAIM 2 0 2 2 +CIVILIZATION 2 0 2 2 +CIVIL 3 0 3 3 +CITY 15 0 15 15 +CITIZENS 4 0 4 4 +CITIZEN 2 0 2 2 +CITIES 2 0 2 2 +CIRCUMVENTION 1 0 1 1 +CIRCUMSTANCES 4 0 4 4 +CIRCUMSTANCE 3 0 3 3 +CIRCUMNAVIGATION 1 0 1 1 +CIRCUMFERENCE 1 0 1 1 +CIRCULATED 1 0 1 1 +CIRCUITS 1 0 1 1 +CIRCUITOUS 1 0 1 1 +CIRCUIT 1 0 1 1 +CIRCLE 7 0 7 7 +CIGARS 1 0 1 1 +CHURNING 1 0 1 1 +CHURCHES 1 0 1 1 +CHURCH 17 0 17 17 +CHUCKLING 1 0 1 1 +CHUBBY 1 0 1 1 +CHRONICLED 1 0 1 1 +CHRISTMAS 4 0 4 4 +CHRISTIANS 1 0 1 1 +CHRISTIANITY 2 0 2 2 +CHRIST'S 1 0 1 1 +CHRIST 22 0 22 22 +CHRISM 1 0 1 1 +CHOSEN 3 0 3 3 +CHOPPED 1 0 1 1 +CHOOSING 1 0 1 1 +CHOOSE 3 0 3 3 +CHOKING 1 0 1 1 +CHOICE 2 0 2 2 +CHOCOLATE 1 0 1 1 +CHIP 1 0 1 1 +CHINA 1 0 1 1 +CHIN 2 0 2 2 +CHIMNEY 2 0 2 2 +CHILDREN'S 1 0 1 1 +CHILDREN 18 0 18 18 +CHILDISH 2 0 2 2 +CHILDHOOD'S 1 0 1 1 +CHILDHOOD 3 0 3 3 +CHILD'S 2 0 2 2 +CHILD 19 0 19 19 +CHIEFTAIN 1 0 1 1 +CHIEFLY 4 0 4 4 +CHIEF 3 0 3 3 +CHESTNUTS 1 0 1 1 +CHESTNUT 3 0 3 3 +CHEST 2 0 2 2 +CHERRY 1 0 1 1 +CHERRIES 3 0 3 3 +CHERISH 1 0 1 1 +CHEMICALS 1 0 1 1 +CHEMICAL 1 0 1 1 +CHELSEA 1 0 1 1 +CHELFORD 4 0 4 4 +CHEERS 1 0 1 1 +CHEERFULLY 2 0 2 2 +CHEERFUL 2 0 2 2 +CHEEKS 1 0 1 1 +CHEEK 2 0 2 2 +CHECKS 1 0 1 1 +CHECKED 3 0 3 3 +CHAUCER'S 1 0 1 1 +CHAUCER 1 0 1 1 +CHASING 1 0 1 1 +CHARTER 1 0 1 1 +CHARMING 1 0 1 1 +CHARMED 1 0 1 1 +CHARM 1 0 1 1 +CHARLOTTE 2 0 2 2 +CHARLESTOWN 1 0 1 1 +CHARLESTON 1 0 1 1 +CHARITY 1 0 1 1 +CHARGED 2 0 2 2 +CHARGE 8 0 8 8 +CHARACTERS 2 0 2 2 +CHARACTERIZES 1 0 1 1 +CHARACTERIZED 1 0 1 1 +CHARACTER 14 0 14 14 +CHAPTERS 1 0 1 1 +CHAPTER 3 0 3 3 +CHAPEL 1 0 1 1 +CHAP 1 0 1 1 +CHAOS 1 0 1 1 +CHANNEL 2 0 2 2 +CHANGING 2 0 2 2 +CHANCES 1 0 1 1 +CHANCE 6 0 6 6 +CHAMBERS 2 0 2 2 +CHAMBER 4 0 4 4 +CHALICE 1 0 1 1 +CHAIRS 4 0 4 4 +CHAIR 15 0 15 15 +CHAIN 1 0 1 1 +CETERA 3 0 3 3 +CERTITUDE 1 0 1 1 +CERTAINLY 8 0 8 8 +CERTAIN 12 0 12 12 +CEREMONIES 2 0 2 2 +CEREMONIAL 1 0 1 1 +CENTURY 1 0 1 1 +CENTURIES 1 0 1 1 +CENTRAL 5 0 5 5 +CENTIPEDE 1 0 1 1 +CELLS 1 0 1 1 +CELLAR 1 0 1 1 +CELL 1 0 1 1 +CELESTIAL 2 0 2 2 +CELEBRITY 1 0 1 1 +CELEBRATION 1 0 1 1 +CELEBRATED 4 0 4 4 +CEDAR 1 0 1 1 +CEASING 1 0 1 1 +CEASE 2 0 2 2 +CAVERN 2 0 2 2 +CAVALRY 2 0 2 2 +CAVALIERS 1 0 1 1 +CAUTIOUSLY 1 0 1 1 +CAUTION 1 0 1 1 +CAUSES 1 0 1 1 +CAUSED 5 0 5 5 +CAUSE 9 0 9 9 +CATTLE 1 0 1 1 +CATS 1 0 1 1 +CATHOLIC 3 0 3 3 +CATHEDRAL 1 0 1 1 +CATECHISM 2 0 2 2 +CATCHING 1 0 1 1 +CATCHES 1 0 1 1 +CATCH 3 0 3 3 +CATASTROPHE 1 0 1 1 +CATAPULT 1 0 1 1 +CASTLE 1 0 1 1 +CASES 6 0 6 6 +CASEMATES 1 0 1 1 +CARTS 1 0 1 1 +CART 1 0 1 1 +CARRYING 4 0 4 4 +CARRY 7 0 7 7 +CARROTS 1 0 1 1 +CARRIES 1 0 1 1 +CARRIED 13 0 13 13 +CARRIAGES 1 0 1 1 +CARRIAGE 8 0 8 8 +CARPETED 1 0 1 1 +CARING 1 0 1 1 +CAREY 3 0 3 3 +CARELESSNESS 1 0 1 1 +CARELESS 1 0 1 1 +CAREFULLY 7 0 7 7 +CAREFUL 5 0 5 5 +CAREER 4 0 4 4 +CARED 4 0 4 4 +CARE 13 0 13 13 +CARD 1 0 1 1 +CAPTURED 1 0 1 1 +CAPTIVE 2 0 2 2 +CAPTIVATE 1 0 1 1 +CAPTAIN 27 0 27 27 +CAPSIZE 1 0 1 1 +CAPRICE 1 0 1 1 +CAPITAL 1 0 1 1 +CAPABLE 3 0 3 3 +CAP'N 4 0 4 4 +CAP 7 0 7 7 +CANST 1 0 1 1 +CANS 1 0 1 1 +CANOPY 1 0 1 1 +CANON 1 0 1 1 +CANNON 1 0 1 1 +CANE 1 0 1 1 +CANDLESTICKS 1 0 1 1 +CANDLES 2 0 2 2 +CANARY 1 0 1 1 +CANAL 1 0 1 1 +CAN'T 21 0 21 21 +CAMPS 1 0 1 1 +CAMPAIGN 2 0 2 2 +CAMP 1 0 1 1 +CALVINISTIC 1 0 1 1 +CALMNESS 2 0 2 2 +CALMED 1 0 1 1 +CALM 5 0 5 5 +CALLS 5 0 5 5 +CALLOUS 1 0 1 1 +CALLING 2 0 2 2 +CALL 10 0 10 10 +CALHOUN 1 0 1 1 +CAKES 2 0 2 2 +CAKE 1 0 1 1 +CABINET 3 0 3 3 +CABIN 2 0 2 2 +CABALISTIC 1 0 1 1 +BUTTONING 1 0 1 1 +BUTTON 1 0 1 1 +BUTTERFLY 1 0 1 1 +BUTT 1 0 1 1 +BUTLER 2 0 2 2 +BUTCHERY 2 0 2 2 +BUTCHERED 1 0 1 1 +BUSY 1 0 1 1 +BUSINESS 5 0 5 5 +BUSHES 4 0 4 4 +BUSHEL 1 0 1 1 +BURSTS 1 0 1 1 +BURST 5 0 5 5 +BURNT 1 0 1 1 +BURNS 1 0 1 1 +BURNING 2 0 2 2 +BURNED 1 0 1 1 +BURIED 2 0 2 2 +BURGOS 1 0 1 1 +BURGLARS 1 0 1 1 +BURDEN 1 0 1 1 +BUOYANT 2 0 2 2 +BUNDLE 1 0 1 1 +BULL 2 0 2 2 +BUILT 2 0 2 2 +BUILDS 1 0 1 1 +BUILDING 5 0 5 5 +BUGGY 1 0 1 1 +BUDDING 1 0 1 1 +BUCKLING 1 0 1 1 +BUCKLES 1 0 1 1 +BUCKINGHAM 1 0 1 1 +BUBBLING 1 0 1 1 +BUBBLE'S 1 0 1 1 +BRUTE 1 0 1 1 +BRUTALITY 1 0 1 1 +BRUTAL 1 0 1 1 +BRUSH 1 0 1 1 +BRUISED 1 0 1 1 +BROWSED 1 0 1 1 +BROW 1 0 1 1 +BROUGHT 14 0 14 14 +BROTHERS 5 0 5 5 +BROTHELS 1 0 1 1 +BROOM 1 0 1 1 +BROOKS 1 0 1 1 +BROOKLYN 1 0 1 1 +BROODING 3 0 3 3 +BRONTES 1 0 1 1 +BROKEN 7 0 7 7 +BROKE 1 0 1 1 +BROADLY 1 0 1 1 +BROADEST 1 0 1 1 +BROAD 11 0 11 11 +BRITISH 2 0 2 2 +BRISTLING 1 0 1 1 +BRING 9 0 9 9 +BRIM 2 0 2 2 +BRILLIANT 5 0 5 5 +BRILLIANCY 1 0 1 1 +BRIGHTNESS 1 0 1 1 +BRIGHTLY 1 0 1 1 +BRIGHTEST 1 0 1 1 +BRIGHTER 1 0 1 1 +BRIGHTENED 2 0 2 2 +BRIGHT 16 0 16 16 +BRIGANTINE 1 0 1 1 +BRIEFLY 1 0 1 1 +BRIDGE 4 0 4 4 +BRIDE 1 0 1 1 +BRICK 2 0 2 2 +BREWING 1 0 1 1 +BREEZE 1 0 1 1 +BRED 1 0 1 1 +BREATHING 4 0 4 4 +BREATH 10 0 10 10 +BREASTPLATE 1 0 1 1 +BREAST 2 0 2 2 +BREAKWATER 1 0 1 1 +BREAKING 2 0 2 2 +BREAKFASTING 1 0 1 1 +BREAKERS 1 0 1 1 +BRAVELY 1 0 1 1 +BRAVE 2 0 2 2 +BRANWELL 1 0 1 1 +BRANDY 1 0 1 1 +BRANDON 4 0 4 4 +BRANDED 1 0 1 1 +BRANCHES 8 0 8 8 +BRAINS 3 0 3 3 +BRAIN 2 0 2 2 +BRAIDS 1 0 1 1 +BRAIDED 1 0 1 1 +BRAID 1 0 1 1 +BRACTON 1 0 1 1 +BRACELETS 1 0 1 1 +BRACELET 1 0 1 1 +BOY'S 3 0 3 3 +BOY 17 0 17 17 +BOXES 1 0 1 1 +BOWING 1 0 1 1 +BOWED 1 0 1 1 +BOW 4 0 4 4 +BOUT 1 0 1 1 +BOUQUETS 1 0 1 1 +BOUND 6 0 6 6 +BOUGHS 1 0 1 1 +BOTTOMS 1 0 1 1 +BOTTOM 7 0 7 7 +BOTTLES 2 0 2 2 +BOTTLE 1 0 1 1 +BOTHER 1 0 1 1 +BOTH 34 0 34 34 +BOSOM 2 0 2 2 +BORE 2 0 2 2 +BORDERING 1 0 1 1 +BORDERED 1 0 1 1 +BOOTS 2 0 2 2 +BOOLOOROO 12 0 12 12 +BOOK 4 0 4 4 +BONY 1 0 1 1 +BONNET 1 0 1 1 +BONES 2 0 2 2 +BONDAGE 1 0 1 1 +BOND 3 0 3 3 +BOLTON 1 0 1 1 +BOLDLY 3 0 3 3 +BOLDEST 1 0 1 1 +BOILED 1 0 1 1 +BOIL 1 0 1 1 +BOGUS 3 0 3 3 +BOGGS 2 0 2 2 +BODY 8 0 8 8 +BODILY 3 0 3 3 +BODIES 3 0 3 3 +BOASTING 2 0 2 2 +BOARDED 2 0 2 2 +BLUSHING 2 0 2 2 +BLUSHED 1 0 1 1 +BLUSH 1 0 1 1 +BLUNT 1 0 1 1 +BLUFF 1 0 1 1 +BLUES 1 0 1 1 +BLOWN 2 0 2 2 +BLOWING 1 0 1 1 +BLOW 2 0 2 2 +BLOOM 1 0 1 1 +BLOODY 1 0 1 1 +BLOODSHED 1 0 1 1 +BLOODED 1 0 1 1 +BLOOD 6 0 6 6 +BLOCKS 1 0 1 1 +BLISS 1 0 1 1 +BLIND 1 0 1 1 +BLEW 1 0 1 1 +BLESSED 3 0 3 3 +BLESS 2 0 2 2 +BLEED 1 0 1 1 +BLEACHED 1 0 1 1 +BLAZING 1 0 1 1 +BLAZED 1 0 1 1 +BLAZE 2 0 2 2 +BLANK 2 0 2 2 +BLAME 1 0 1 1 +BLADE 2 0 2 2 +BLACKSTONE 1 0 1 1 +BLACKNESSES 1 0 1 1 +BLACKNESS 1 0 1 1 +BLACKER 2 0 2 2 +BLACK 22 0 22 22 +BITTER 1 0 1 1 +BITS 1 0 1 1 +BITES 1 0 1 1 +BITE 1 0 1 1 +BISHOPS 5 0 5 5 +BIRTH 2 0 2 2 +BIRMINGHAM 1 0 1 1 +BIRD 4 0 4 4 +BIRCHES 1 0 1 1 +BINDING 1 0 1 1 +BIND 1 0 1 1 +BILL 6 0 6 6 +BIG 12 0 12 12 +BIDDING 1 0 1 1 +BID 1 0 1 1 +BIBLE 1 0 1 1 +BEYOND 6 0 6 6 +BEWILDERMENT 1 0 1 1 +BEWILDERED 6 0 6 6 +BEWARE 1 0 1 1 +BEVERAGES 1 0 1 1 +BETWEEN 25 0 25 25 +BETTING 1 0 1 1 +BETRAYED 1 0 1 1 +BETRAY 1 0 1 1 +BETH 12 0 12 12 +BESTOWED 1 0 1 1 +BESTOW 1 0 1 1 +BEST 22 0 22 22 +BESOUGHT 1 0 1 1 +BESIEGED 1 0 1 1 +BESIDES 8 0 8 8 +BESIDE 5 0 5 5 +BERTIE 1 0 1 1 +BERRIES 1 0 1 1 +BENT 4 0 4 4 +BENIGNANTLY 1 0 1 1 +BENIGHTED 1 0 1 1 +BENEFIT 1 0 1 1 +BENEATH 6 0 6 6 +BEND 1 0 1 1 +BENCHES 3 0 3 3 +BEN 3 0 3 3 +BELT 2 0 2 2 +BELOW 1 0 1 1 +BELOVED 3 0 3 3 +BELONGS 1 0 1 1 +BELONGING 1 0 1 1 +BELONGED 3 0 3 3 +BELONG 2 0 2 2 +BELLY 3 0 3 3 +BELLS 1 0 1 1 +BELLINGHAM 2 0 2 2 +BELL 3 0 3 3 +BELIEVING 2 0 2 2 +BELIEVERS 1 0 1 1 +BELIEVED 5 0 5 5 +BELIEVE 21 0 21 21 +BELIEF 3 0 3 3 +BEINGS 1 0 1 1 +BEHOLDING 1 0 1 1 +BEHOLDERS 1 0 1 1 +BEHOLDER 1 0 1 1 +BEHIND 10 0 10 10 +BEHELD 1 0 1 1 +BEHAVED 1 0 1 1 +BEHAVE 1 0 1 1 +BEHALF 1 0 1 1 +BEGUN 5 0 5 5 +BEGUILING 1 0 1 1 +BEGOT 1 0 1 1 +BEGINS 4 0 4 4 +BEGINNING 4 0 4 4 +BEGIN 9 0 9 9 +BEGGAR'S 1 0 1 1 +BEGGAR 2 0 2 2 +BEGAN 22 0 22 22 +BEFITS 1 0 1 1 +BEFELL 1 0 1 1 +BEEN 137 0 137 137 +BEELZEBUB 1 0 1 1 +BEEHIVES 1 0 1 1 +BEEF 1 0 1 1 +BEDSIDE 1 0 1 1 +BEDROOM 2 0 2 2 +BEDFORD 1 0 1 1 +BECOMING 1 0 1 1 +BECOMES 8 0 8 8 +BECOME 14 0 14 14 +BECKONED 1 0 1 1 +BECKON 1 0 1 1 +BECAUSE 30 0 30 30 +BECAME 12 0 12 12 +BEAUTY 21 0 21 21 +BEAUTIFUL 13 0 13 13 +BEAUTIES 2 0 2 2 +BEATITUDE 2 0 2 2 +BEATING 2 0 2 2 +BEATERS 1 0 1 1 +BEATEN 2 0 2 2 +BEAT 1 0 1 1 +BEASTS 2 0 2 2 +BEARS 4 0 4 4 +BEARING 3 0 3 3 +BEARD 1 0 1 1 +BEAR'S 1 0 1 1 +BEAMS 1 0 1 1 +BEAK 6 0 6 6 +BEADS 1 0 1 1 +BATTLED 1 0 1 1 +BATTERIES 1 0 1 1 +BATTERED 1 0 1 1 +BAT 1 0 1 1 +BASTARD 1 0 1 1 +BASKETS 1 0 1 1 +BASIS 2 0 2 2 +BASED 1 0 1 1 +BARTLEY 14 0 14 14 +BARS 1 0 1 1 +BARRICADED 1 0 1 1 +BARREN 1 0 1 1 +BARREL 1 0 1 1 +BARRACK 1 0 1 1 +BARNS 1 0 1 1 +BARN 4 0 4 4 +BARGAINS 1 0 1 1 +BAREFOOT 1 0 1 1 +BARE 2 0 2 2 +BARBARITY 1 0 1 1 +BAR 1 0 1 1 +BAPTIZED 1 0 1 1 +BAPTISM 1 0 1 1 +BANTER 1 0 1 1 +BANQUET 1 0 1 1 +BANKS 1 0 1 1 +BANK 3 0 3 3 +BANISHED 1 0 1 1 +BANG 1 0 1 1 +BAND 2 0 2 2 +BALMY 1 0 1 1 +BALLS 2 0 2 2 +BALLET 2 0 2 2 +BALEEN 1 0 1 1 +BAKER 1 0 1 1 +BAGS 1 0 1 1 +BAGGAGE 1 0 1 1 +BAG 1 0 1 1 +BAFFLED 2 0 2 2 +BADLY 2 0 2 2 +BADE 3 0 3 3 +BAD 6 0 6 6 +BACON 1 0 1 1 +BACKWARD 1 0 1 1 +BABY'S 1 0 1 1 +BABY 1 0 1 1 +BABIES 1 0 1 1 +BABE 1 0 1 1 +AZURE 1 0 1 1 +AXIS 1 0 1 1 +AWOKE 3 0 3 3 +AWKWARD 1 0 1 1 +AWFULLY 2 0 2 2 +AWFUL 4 0 4 4 +AWAKE 1 0 1 1 +AWAITING 1 0 1 1 +AWAITED 2 0 2 2 +AVOIDING 1 0 1 1 +AVOIDED 1 0 1 1 +AVOID 5 0 5 5 +AVERSION 1 0 1 1 +AVERSE 1 0 1 1 +AVAILABLE 1 0 1 1 +AUTUMN 1 0 1 1 +AUTHORS 1 0 1 1 +AUTHORIZED 1 0 1 1 +AUTHORITY 6 0 6 6 +AUTHORITIES 1 0 1 1 +AUTHORITATIVELY 1 0 1 1 +AUTHOR 1 0 1 1 +AUTHENTICATED 1 0 1 1 +AUNT'S 1 0 1 1 +AUGUST 5 0 5 5 +AUGMENT 1 0 1 1 +AUDITORY 1 0 1 1 +AUDITORS 1 0 1 1 +AUDIENCE 6 0 6 6 +AUDACIOUS 1 0 1 1 +AUCTION 1 0 1 1 +ATTRIBUTED 1 0 1 1 +ATTRACTIVE 1 0 1 1 +ATTRACTION 1 0 1 1 +ATTRACTED 3 0 3 3 +ATTORNEYS 1 0 1 1 +ATTIRE 1 0 1 1 +ATTENUATING 1 0 1 1 +ATTENTIVELY 2 0 2 2 +ATTENTION 11 0 11 11 +ATTENDED 1 0 1 1 +ATTENDANT 1 0 1 1 +ATTEND 3 0 3 3 +ATTEMPTS 1 0 1 1 +ATTEMPT 5 0 5 5 +ATTAINMENTS 1 0 1 1 +ATTAINMENT 1 0 1 1 +ATTAINED 3 0 3 3 +ATTACKED 1 0 1 1 +ATTACK 4 0 4 4 +ATTACHED 2 0 2 2 +ATROCIOUS 1 0 1 1 +ATLANTIS 1 0 1 1 +ATLANTIC 3 0 3 3 +ATHENS 1 0 1 1 +ATHENIANS 1 0 1 1 +ATHENIAN 2 0 2 2 +ATE 2 0 2 2 +ASTRONOMY 1 0 1 1 +ASTOUNDING 1 0 1 1 +ASTONISHMENT 2 0 2 2 +ASTONISHING 1 0 1 1 +ASTONISHED 1 0 1 1 +ASSURES 1 0 1 1 +ASSUREDLY 1 0 1 1 +ASSURED 5 0 5 5 +ASSURE 5 0 5 5 +ASSURANCES 1 0 1 1 +ASSURANCE 3 0 3 3 +ASSUMED 5 0 5 5 +ASSOCIATION 2 0 2 2 +ASSOCIATES 1 0 1 1 +ASSOCIATED 3 0 3 3 +ASSISTED 2 0 2 2 +ASSISTANT 1 0 1 1 +ASSIST 2 0 2 2 +ASSIDUOUSLY 1 0 1 1 +ASSERTS 1 0 1 1 +ASSERTIVE 1 0 1 1 +ASSERTED 3 0 3 3 +ASSENT 1 0 1 1 +ASSEMBLY 2 0 2 2 +ASS 1 0 1 1 +ASPECT 1 0 1 1 +ASLEEP 1 0 1 1 +ASKING 2 0 2 2 +ASKED 22 0 22 22 +ASK 10 0 10 10 +ASIDE 3 0 3 3 +ASIA 1 0 1 1 +ASHORE 1 0 1 1 +ASHAMED 2 0 2 2 +ASCRIBES 1 0 1 1 +ASCERTAINING 1 0 1 1 +ASCERTAIN 2 0 2 2 +ARTILLERY 1 0 1 1 +ARTIFICE 1 0 1 1 +ARTICULATE 2 0 2 2 +ARTICLE 3 0 3 3 +ARTICHOKES 1 0 1 1 +ARTHUR 1 0 1 1 +ART 14 0 14 14 +ARROWS 1 0 1 1 +ARRIVED 4 0 4 4 +ARRIVE 1 0 1 1 +ARRIVAL 4 0 4 4 +ARRESTING 1 0 1 1 +ARRAY 1 0 1 1 +ARRANGING 2 0 2 2 +ARRANGEMENTS 1 0 1 1 +ARRANGEMENT 2 0 2 2 +ARRANGED 2 0 2 2 +AROUSE 1 0 1 1 +AROSE 2 0 2 2 +ARONNAX 1 0 1 1 +ARMY 9 0 9 9 +ARMS 15 0 15 15 +ARMOUR 1 0 1 1 +ARISTOCRACY 1 0 1 1 +ARISING 1 0 1 1 +ARISE 1 0 1 1 +ARID 1 0 1 1 +ARIANS 1 0 1 1 +ARGYLE'S 1 0 1 1 +ARGYLE 2 0 2 2 +ARGUS 1 0 1 1 +ARGUING 1 0 1 1 +ARGUE 2 0 2 2 +AREN'T 1 0 1 1 +AREA 1 0 1 1 +ARDUOUS 1 0 1 1 +ARCHIVES 1 0 1 1 +ARCHITECTURAL 1 0 1 1 +ARCHED 1 0 1 1 +ARCH 1 0 1 1 +ARCADIAN 1 0 1 1 +APRON 2 0 2 2 +APRIL 2 0 2 2 +APPROXIMATELY 1 0 1 1 +APPROVING 2 0 2 2 +APPROVE 2 0 2 2 +APPROVAL 1 0 1 1 +APPROPRIATE 1 0 1 1 +APPROBATION 1 0 1 1 +APPROACHING 3 0 3 3 +APPROACHES 2 0 2 2 +APPROACHED 6 0 6 6 +APPROACH 1 0 1 1 +APPRENTICESHIP 1 0 1 1 +APPREHENSION 1 0 1 1 +APPRECIATIVE 1 0 1 1 +APPRECIATE 1 0 1 1 +APPOSITION 1 0 1 1 +APPOINTED 7 0 7 7 +APPLYING 1 0 1 1 +APPLICATION 2 0 2 2 +APPLE 1 0 1 1 +APPLAUSE 2 0 2 2 +APPLAUDED 1 0 1 1 +APPETITES 1 0 1 1 +APPETITE 1 0 1 1 +APPEARS 1 0 1 1 +APPEARED 10 0 10 10 +APPEARANCES 3 0 3 3 +APPEARANCE 9 0 9 9 +APPEAR 3 0 3 3 +APPEALS 1 0 1 1 +APPARENTLY 1 0 1 1 +APPARENT 2 0 2 2 +APPARATUS 1 0 1 1 +APPALLING 1 0 1 1 +APOSTOLICAL 1 0 1 1 +APOSTOLIC 1 0 1 1 +APOSTLES 6 0 6 6 +APOSTLE 4 0 4 4 +APOLLO 1 0 1 1 +APARTMENT 2 0 2 2 +APART 1 0 1 1 +ANYHOW 3 0 3 3 +ANYBODY 3 0 3 3 +ANXIOUS 3 0 3 3 +ANXIETY 4 0 4 4 +ANTIPATHY 2 0 2 2 +ANTICIPATION 1 0 1 1 +ANTICIPATE 1 0 1 1 +ANTICHRIST 1 0 1 1 +ANTI 1 0 1 1 +ANTARCTIC 1 0 1 1 +ANSWERS 2 0 2 2 +ANSWER 6 0 6 6 +ANOTHER'S 1 0 1 1 +ANNOYANCE 2 0 2 2 +ANNOUNCED 2 0 2 2 +ANNE'S 2 0 2 2 +ANIMOSITY 2 0 2 2 +ANIMATED 2 0 2 2 +ANIMALS 5 0 5 5 +ANIMAL 8 0 8 8 +ANGRY 5 0 5 5 +ANGRILY 3 0 3 3 +ANGRIER 1 0 1 1 +ANGOR 1 0 1 1 +ANGER 1 0 1 1 +ANECDOTES 1 0 1 1 +ANCIENT 3 0 3 3 +ANALYSIS 2 0 2 2 +ANALOGY 1 0 1 1 +ANALOGUE 1 0 1 1 +AMUSING 2 0 2 2 +AMUSEMENT 3 0 3 3 +AMUSED 1 0 1 1 +AMUSE 2 0 2 2 +AMPLY 2 0 2 2 +AMOUNT 3 0 3 3 +AMONGST 3 0 3 3 +AMONG 29 0 29 29 +AMISS 1 0 1 1 +AMIDST 2 0 2 2 +AMID 1 0 1 1 +AMETHYST 1 0 1 1 +AMERICANS 2 0 2 2 +AMERICAN 10 0 10 10 +AMERICA 2 0 2 2 +AMENDS 2 0 2 2 +AMENDMENT 1 0 1 1 +AMELIORATION 1 0 1 1 +AMBROSE 4 0 4 4 +AMBITIOUS 1 0 1 1 +AMBITION 2 0 2 2 +AMBASSADOR 1 0 1 1 +AMAZEMENT 2 0 2 2 +AMALGAMATED 1 0 1 1 +ALWAYS 36 0 36 36 +ALTHOUGH 10 0 10 10 +ALTERNATING 3 0 3 3 +ALTERING 2 0 2 2 +ALTERED 2 0 2 2 +ALTERATION 1 0 1 1 +ALTER 1 0 1 1 +ALTAR 1 0 1 1 +ALSO 36 0 36 36 +ALOUD 3 0 3 3 +ALONG 15 0 15 15 +ALONE 23 0 23 23 +ALMS 1 0 1 1 +ALMOST 19 0 19 19 +ALLY 1 0 1 1 +ALLUDE 1 0 1 1 +ALLOWING 2 0 2 2 +ALLOWED 7 0 7 7 +ALLOW 5 0 5 5 +ALLIES 1 0 1 1 +ALLIED 1 0 1 1 +ALLERS 1 0 1 1 +ALLEGED 2 0 2 2 +ALIVE 1 0 1 1 +ALIKE 1 0 1 1 +ALIGHTED 1 0 1 1 +ALICE 4 0 4 4 +ALGERIAN 1 0 1 1 +ALGERIA 2 0 2 2 +ALGEBRA 1 0 1 1 +ALERTNESS 1 0 1 1 +ALE 2 0 2 2 +ALBIGENSES 1 0 1 1 +ALAS 3 0 3 3 +ALARMED 1 0 1 1 +AKIN 1 0 1 1 +AIN'T 2 0 2 2 +AIMED 1 0 1 1 +AIDED 1 0 1 1 +AHEAD 1 0 1 1 +AGREEMENT 1 0 1 1 +AGREED 2 0 2 2 +AGREEABLY 2 0 2 2 +AGREEABLE 5 0 5 5 +AGREE 2 0 2 2 +AGO 4 0 4 4 +AGITATION 4 0 4 4 +AGITATED 2 0 2 2 +AGGRESSIVENESS 1 0 1 1 +AGGRESSIVE 1 0 1 1 +AGGREGATE 1 0 1 1 +AGENCY 1 0 1 1 +AGE 6 0 6 6 +AGAPE 1 0 1 1 +AGAINST 23 0 23 23 +AFTERWARDS 5 0 5 5 +AFTERWARD 2 0 2 2 +AFTERNOON 4 0 4 4 +AFRICAN 1 0 1 1 +AFRAID 9 0 9 9 +AFLOAT 1 0 1 1 +AFFORD 4 0 4 4 +AFFLICTED 1 0 1 1 +AFFIRMATIVE 1 0 1 1 +AFFECTIONS 1 0 1 1 +AFFECTIONATE 1 0 1 1 +AFFECTION 7 0 7 7 +AFFAIRS 3 0 3 3 +AFFAIR 2 0 2 2 +ADVISER 1 0 1 1 +ADVISED 1 0 1 1 +ADVISABLE 1 0 1 1 +ADVICE 4 0 4 4 +ADVERTISING 1 0 1 1 +ADVERTISEMENT 1 0 1 1 +ADVERSE 2 0 2 2 +ADVANTAGES 2 0 2 2 +ADVANTAGE 3 0 3 3 +ADVANCING 1 0 1 1 +ADVANCE 5 0 5 5 +ADORE 1 0 1 1 +ADORATION 3 0 3 3 +ADOPTED 2 0 2 2 +ADOLESCENCE 1 0 1 1 +ADMITTING 2 0 2 2 +ADMITTED 3 0 3 3 +ADMITTANCE 1 0 1 1 +ADMIT 2 0 2 2 +ADMIRING 1 0 1 1 +ADMIRED 1 0 1 1 +ADMIRATION 2 0 2 2 +ADMINISTRATION 3 0 3 3 +ADJUST 1 0 1 1 +ADHERENTS 1 0 1 1 +ADDRESSING 1 0 1 1 +ADDRESSED 6 0 6 6 +ADDRESS 3 0 3 3 +ADDITIONAL 1 0 1 1 +ADDED 11 0 11 11 +ACUTE 2 0 2 2 +ACTUALLY 3 0 3 3 +ACTUAL 4 0 4 4 +ACTS 2 0 2 2 +ACTRESS 1 0 1 1 +ACTORS 4 0 4 4 +ACTIVITY 1 0 1 1 +ACTIVELY 1 0 1 1 +ACTIVE 2 0 2 2 +ACTING 2 0 2 2 +ACT 6 0 6 6 +ACROSS 13 0 13 13 +ACQUIRES 1 0 1 1 +ACQUIRE 1 0 1 1 +ACQUAINTED 1 0 1 1 +ACQUAINTANCE 3 0 3 3 +ACQUAINT 1 0 1 1 +ACORN 1 0 1 1 +ACKNOWLEDGES 1 0 1 1 +ACKNOWLEDGED 2 0 2 2 +ACKNOWLEDGE 2 0 2 2 +ACHIEVEMENTS 1 0 1 1 +ACHIEVEMENT 1 0 1 1 +ACHIEVED 2 0 2 2 +ACE 2 0 2 2 +ACCUSTOMED 3 0 3 3 +ACCUSE 1 0 1 1 +ACCURATELY 1 0 1 1 +ACCURATE 2 0 2 2 +ACCURACY 3 0 3 3 +ACCRUING 1 0 1 1 +ACCOUNTS 1 0 1 1 +ACCOUNTED 1 0 1 1 +ACCOUNT 9 0 9 9 +ACCORDINGLY 1 0 1 1 +ACCORDING 4 0 4 4 +ACCORDANCE 2 0 2 2 +ACCOMPLISHMENT 1 0 1 1 +ACCOMPLISHED 5 0 5 5 +ACCOMPANY 2 0 2 2 +ACCOMPANIED 3 0 3 3 +ACCOMMODATIONS 1 0 1 1 +ACCOMMODATION 1 0 1 1 +ACCLAMATIONS 1 0 1 1 +ACCIDENTS 2 0 2 2 +ACCIDENT 2 0 2 2 +ACCESSORIES 1 0 1 1 +ACCEPTING 1 0 1 1 +ACCEPTED 1 0 1 1 +ACCEPTABLE 1 0 1 1 +ACCENTS 2 0 2 2 +ACCENT 3 0 3 3 +ABSURDITY 1 0 1 1 +ABSURDITIES 1 0 1 1 +ABSURD 1 0 1 1 +ABSTRACTIONS 1 0 1 1 +ABSTRACTION 1 0 1 1 +ABSORBED 1 0 1 1 +ABSOLUTELY 6 0 6 6 +ABSOLUTE 1 0 1 1 +ABSENT 2 0 2 2 +ABSENCE 1 0 1 1 +ABRUPTLY 2 0 2 2 +ABROAD 4 0 4 4 +ABRAHAM 2 0 2 2 +ABOVE 17 0 17 17 +ABOUT 85 0 85 85 +ABOLITIONISM 1 0 1 1 +ABOARD 2 0 2 2 +ABNER 1 0 1 1 +ABLE 7 0 7 7 +ABJECTLY 1 0 1 1 +ABILITY 2 0 2 2 +ABANDONED 2 0 2 2 diff --git a/log/greedy_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt b/log/greedy_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e9d46be4692f13a3f1d59f1f5b76dac0fb23295 --- /dev/null +++ b/log/greedy_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt @@ -0,0 +1,15576 @@ +%WER = 9.29 +Errors: 500 insertions, 491 deletions, 3871 substitutions, over 52343 reference words (47981 correct) +Search below for sections starting with PER-UTT DETAILS:, SUBSTITUTIONS:, DELETIONS:, INSERTIONS:, PER-WORD STATS: + +PER-UTT DETAILS: corr or (ref->hyp) +1688-142285-0000-1948: THERE'S IRON THEY SAY IN ALL OUR BLOOD AND A GRAIN OR TWO PERHAPS IS GOOD BUT HIS HE MAKES ME HARSHLY FEEL HAS GOT A LITTLE TOO MUCH OF STEEL ANON +1688-142285-0001-1949: MARGARET SAID MISTER HALE AS HE RETURNED FROM SHOWING HIS GUEST DOWNSTAIRS I COULD NOT HELP WATCHING YOUR FACE WITH SOME ANXIETY WHEN MISTER THORNTON MADE HIS CONFESSION OF HAVING BEEN A SHOP BOY +1688-142285-0002-1950: YOU DON'T MEAN THAT YOU THOUGHT ME SO SILLY +1688-142285-0003-1951: I REALLY (LIKED->LIKE) THAT ACCOUNT OF HIMSELF BETTER THAN ANYTHING ELSE HE SAID +1688-142285-0004-1952: HIS STATEMENT OF HAVING BEEN A SHOP BOY WAS THE THING I (LIKED->LIKE) BEST OF ALL +1688-142285-0005-1953: YOU WHO WERE ALWAYS ACCUSING PEOPLE (OF BEING->HAVE BEEN) SHOPPY AT HELSTONE +1688-142285-0006-1954: I DON'T THINK MISTER HALE YOU HAVE DONE QUITE RIGHT (IN->*) INTRODUCING SUCH A PERSON TO US WITHOUT TELLING US WHAT HE HAD BEEN +1688-142285-0007-1955: I REALLY WAS VERY MUCH AFRAID OF SHOWING HIM HOW MUCH SHOCKED I WAS AT SOME (PARTS->PART) OF WHAT HE SAID +1688-142285-0008-1956: HIS FATHER DYING IN MISERABLE CIRCUMSTANCES +1688-142285-0009-1957: WHY IT MIGHT HAVE BEEN IN THE WORKHOUSE +1688-142285-0010-1958: HIS FATHER SPECULATED WILDLY FAILED AND THEN KILLED HIMSELF BECAUSE HE COULD NOT BEAR THE DISGRACE +1688-142285-0011-1959: ALL HIS FORMER FRIENDS SHRUNK FROM THE DISCLOSURES THAT HAD TO BE MADE OF HIS DISHONEST GAMBLING WILD HOPELESS STRUGGLES MADE WITH OTHER PEOPLE'S MONEY TO REGAIN HIS OWN MODERATE PORTION OF WEALTH +1688-142285-0012-1960: NO ONE CAME FORWARDS TO HELP THE MOTHER AND THIS BOY +1688-142285-0013-1961: AT LEAST NO FRIEND CAME FORWARDS IMMEDIATELY AND (MISSUS->MISTER) THORNTON IS NOT ONE I FANCY TO WAIT TILL (TARDY->TIDY) KINDNESS COMES TO FIND HER OUT +1688-142285-0014-1962: SO THEY LEFT MILTON +1688-142285-0015-1963: HOW TAINTED ASKED HER FATHER +1688-142285-0016-1964: (OH->O) PAPA BY THAT TESTING EVERYTHING BY THE STANDARD OF WEALTH +1688-142285-0017-1965: WHEN HE SPOKE OF THE MECHANICAL POWERS HE EVIDENTLY LOOKED UPON THEM ONLY AS NEW WAYS OF EXTENDING TRADE AND MAKING MONEY +1688-142285-0018-1966: AND THE POOR MEN AROUND HIM (THEY->THERE) WERE POOR BECAUSE THEY WERE VICIOUS OUT OF THE PALE OF HIS SYMPATHIES BECAUSE THEY HAD NOT HIS IRON NATURE AND THE CAPABILITIES THAT IT GIVES HIM FOR BEING RICH +1688-142285-0019-1967: NOT VICIOUS HE NEVER SAID THAT +1688-142285-0020-1968: IMPROVIDENT AND SELF INDULGENT WERE HIS WORDS +1688-142285-0021-1969: MARGARET WAS COLLECTING HER MOTHER'S WORKING MATERIALS AND PREPARING TO GO TO BED +1688-142285-0022-1970: JUST AS SHE WAS LEAVING THE ROOM SHE HESITATED SHE WAS INCLINED TO MAKE AN ACKNOWLEDGMENT WHICH SHE THOUGHT WOULD PLEASE HER FATHER BUT WHICH TO BE FULL AND TRUE MUST INCLUDE A LITTLE ANNOYANCE +1688-142285-0023-1971: HOWEVER OUT IT CAME +1688-142285-0024-1972: PAPA I DO THINK MISTER THORNTON A VERY REMARKABLE MAN BUT PERSONALLY I DON'T LIKE HIM AT ALL +1688-142285-0025-1973: AND I DO SAID HER FATHER LAUGHING +1688-142285-0026-1974: PERSONALLY AS YOU CALL IT AND ALL +1688-142285-0027-1975: I DON'T SET HIM UP FOR A HERO OR ANYTHING OF THAT KIND +1688-142285-0028-1976: BUT GOOD NIGHT CHILD +1688-142285-0029-1977: THERE WERE SEVERAL OTHER SIGNS OF SOMETHING WRONG ABOUT MISSUS HALE +1688-142285-0030-1978: SHE AND DIXON HELD MYSTERIOUS CONSULTATIONS IN HER BEDROOM FROM WHICH DIXON WOULD COME OUT CRYING AND CROSS AS WAS (HER->A) CUSTOM WHEN ANY DISTRESS OF HER MISTRESS CALLED UPON HER SYMPATHY +1688-142285-0031-1979: ONCE MARGARET HAD GONE INTO THE CHAMBER SOON AFTER DIXON (LEFT IT->LIFTED) AND FOUND HER MOTHER ON HER KNEES AND AS MARGARET STOLE OUT SHE CAUGHT A FEW WORDS WHICH WERE EVIDENTLY A PRAYER FOR STRENGTH AND PATIENCE TO (ENDURE->INDURE) SEVERE BODILY SUFFERING +1688-142285-0032-1980: BUT THOUGH SHE RECEIVED CARESSES AND FOND WORDS BACK AGAIN IN SUCH PROFUSION AS WOULD HAVE GLADDENED HER FORMERLY YET SHE FELT THAT THERE WAS A SECRET WITHHELD FROM HER AND SHE BELIEVED IT BORE SERIOUS REFERENCE TO HER MOTHER'S HEALTH +1688-142285-0033-1981: SHE LAY AWAKE VERY LONG THIS NIGHT PLANNING HOW TO LESSEN THE EVIL INFLUENCE OF THEIR MILTON LIFE ON HER MOTHER +1688-142285-0034-1982: A SERVANT (TO->*) GIVE DIXON PERMANENT ASSISTANCE SHOULD BE GOT IF SHE GAVE UP (HER->THE) WHOLE TIME TO THE SEARCH AND THEN AT ANY RATE HER MOTHER MIGHT HAVE ALL THE PERSONAL (ATTENTION->ATTENTIONS) SHE REQUIRED AND HAD BEEN ACCUSTOMED TO HER WHOLE LIFE +1688-142285-0035-1983: VISITING REGISTER (OFFICES->OFFICERS) SEEING ALL MANNER OF UNLIKELY PEOPLE AND VERY FEW IN THE LEAST LIKELY ABSORBED MARGARET'S TIME AND THOUGHTS FOR SEVERAL DAYS +1688-142285-0036-1984: ONE AFTERNOON SHE MET BESSY HIGGINS IN THE STREET AND STOPPED TO SPEAK TO HER +1688-142285-0037-1985: WELL (BESSY->BUSY) HOW ARE YOU +1688-142285-0038-1986: BETTER AND NOT BETTER IF (YO->YOU) KNOW WHAT THAT MEANS +1688-142285-0039-1987: NOT EXACTLY REPLIED MARGARET SMILING +1688-142285-0040-1988: I'M BETTER IN NOT BEING TORN TO PIECES (BY->BUT) COUGHING (O'NIGHTS->A KNIGHT'S) BUT I'M WEARY AND TIRED (O->OF) MILTON AND LONGING TO GET AWAY TO THE LAND (O BEULAH->OF BOOLA) AND WHEN I THINK I'M FARTHER AND FARTHER OFF MY HEART SINKS AND I'M NO BETTER I'M WORSE +1688-142285-0041-1989: MARGARET TURNED (ROUND->AROUND) TO WALK ALONGSIDE OF THE GIRL IN HER FEEBLE PROGRESS HOMEWARD +1688-142285-0042-1990: BUT FOR A MINUTE OR TWO SHE DID NOT SPEAK +1688-142285-0043-1991: AT LAST SHE SAID IN A LOW VOICE +1688-142285-0044-1992: BESSY DO YOU WISH TO DIE +1688-142285-0045-1993: BESSY WAS SILENT IN HER TURN FOR A MINUTE OR TWO THEN SHE REPLIED +1688-142285-0046-1994: (NOUGHT->NOT) WORSE THAN MANY OTHERS I RECKON +1688-142285-0047-1995: BUT WHAT WAS IT +1688-142285-0048-1996: YOU KNOW I'M A STRANGER HERE SO PERHAPS I'M NOT SO QUICK AT UNDERSTANDING WHAT YOU MEAN AS IF I'D LIVED ALL MY LIFE (AT->IN) MILTON +1688-142285-0049-1997: I HAD FORGOTTEN WHAT I SAID FOR THE TIME CONTINUED MARGARET QUIETLY +1688-142285-0050-1998: I SHOULD HAVE THOUGHT OF IT AGAIN WHEN I WAS LESS BUSY (MAY I GO->MARGAR) WITH YOU NOW +1688-142285-0051-1999: THE SHARPNESS IN HER EYE TURNED TO A WISTFUL LONGING AS SHE MET MARGARET'S SOFT AND FRIENDLY GAZE +1688-142285-0052-2000: AS THEY TURNED UP INTO A SMALL COURT OPENING OUT (OF->INTO) A SQUALID STREET BESSY SAID +1688-142285-0053-2001: (YO'LL->YOU'LL) NOT BE DAUNTED IF (FATHER'S AT HOME->FATHER SAID HE) AND SPEAKS A BIT GRUFFISH AT FIRST +1688-142285-0054-2002: BUT NICHOLAS WAS NOT AT HOME WHEN THEY ENTERED +1688-142285-0055-2003: GASPED (BESSY->BESSIE) AT LAST +1688-142285-0056-2004: BESSY TOOK A LONG AND FEVERISH DRAUGHT AND THEN FELL BACK AND SHUT HER EYES +1688-142285-0057-2005: MARGARET BENT OVER AND SAID BESSY DON'T BE IMPATIENT WITH YOUR LIFE WHATEVER IT IS OR MAY HAVE BEEN +1688-142285-0058-2006: REMEMBER WHO GAVE IT (*->TO) YOU AND MADE IT WHAT IT IS +1688-142285-0059-2007: NOW I'LL NOT HAVE MY WENCH (PREACHED TO->PREACH TOO) +1688-142285-0060-2008: BUT SURELY SAID MARGARET FACING ROUND YOU BELIEVE IN WHAT I SAID THAT GOD GAVE HER LIFE AND ORDERED WHAT KIND OF LIFE IT WAS TO BE +1688-142285-0061-2009: I BELIEVE WHAT I SEE AND NO MORE +1688-142285-0062-2010: THAT'S WHAT I BELIEVE YOUNG WOMAN +1688-142285-0063-2011: I DON'T BELIEVE ALL I HEAR NO NOT BY A BIG DEAL +1688-142285-0064-2012: BUT (HOO'S->WHO'S) COME AT LAST AND (HOO'S->WHO'S) WELCOME AS LONG AS (HOO'LL->HE'LL) KEEP FROM PREACHING ON WHAT (HOO->HE) KNOWS (NOUGHT->NOT) ABOUT +1688-142285-0065-2013: IT'S SIMPLE AND NOT FAR TO FETCH NOR HARD TO WORK +1688-142285-0066-2014: BUT THE GIRL ONLY PLEADED THE MORE WITH MARGARET +1688-142285-0067-2015: DON'T THINK HARDLY ON HIM HE'S A GOOD MAN HE IS +1688-142285-0068-2016: I SOMETIMES THINK I SHALL BE (MOPED WI->MURKED WITH) SORROW EVEN IN THE CITY OF GOD IF (FATHER->EITHER) IS NOT THERE +1688-142285-0069-2017: THE FEVERISH COLOUR CAME INTO HER (CHEEK->CHEEKS) AND THE FEVERISH FLAME INTO HER EYE +1688-142285-0070-2018: BUT YOU WILL BE THERE FATHER YOU SHALL OH MY HEART +1688-142285-0071-2019: SHE PUT HER HAND TO IT AND BECAME GHASTLY PALE +1688-142285-0072-2020: MARGARET HELD HER IN HER ARMS AND PUT THE WEARY HEAD TO REST UPON HER BOSOM +1688-142285-0073-2021: PRESENTLY THE SPASM THAT FORESHADOWED DEATH HAD PASSED AWAY AND BESSY ROUSED HERSELF AND SAID +1688-142285-0074-2022: I'LL GO TO BED IT'S BEST PLACE BUT CATCHING (AT->THAT) MARGARET'S (GOWN YO'LL->GUN YOU'LL) COME AGAIN I KNOW (YO->YOU) WILL BUT JUST SAY IT +1688-142285-0075-2023: I WILL COME TO MORROW SAID MARGARET +1688-142285-0076-2024: MARGARET WENT AWAY VERY SAD AND THOUGHTFUL +1688-142285-0077-2025: SHE WAS LATE FOR TEA AT HOME +1688-142285-0078-2026: HAVE YOU MET WITH A SERVANT DEAR +1688-142285-0079-2027: NO MAMMA THAT ANNE BUCKLEY WOULD NEVER HAVE DONE +1688-142285-0080-2028: (SUPPOSE->S'POSE) I TRY SAID MISTER HALE +1688-142285-0081-2029: EVERYBODY ELSE HAS HAD (THEIR->THEY) TURN AT THIS GREAT DIFFICULTY NOW LET ME TRY +1688-142285-0082-2030: I MAY BE THE (CINDERELLA->CINRILO) TO PUT ON THE SLIPPER AFTER ALL +1688-142285-0083-2031: (WHAT->BUT) WOULD YOU DO PAPA HOW WOULD YOU SET ABOUT IT +1688-142285-0084-2032: WHY I WOULD APPLY (*->IT) TO SOME GOOD HOUSE MOTHER TO RECOMMEND ME ONE KNOWN TO HERSELF OR HER SERVANTS +1688-142285-0085-2033: VERY GOOD BUT WE MUST FIRST CATCH OUR HOUSE MOTHER +1688-142285-0086-2034: THE MOTHER OF WHOM HE SPOKE TO US SAID MARGARET +1688-142285-0087-2035: (MISSUS->MISTER) THORNTON THE ONLY MOTHER HE HAS I BELIEVE SAID MISTER HALE QUIETLY +1688-142285-0088-2036: I SHALL LIKE TO SEE HER SHE MUST BE AN UNCOMMON PERSON HER MOTHER ADDED +1688-142285-0089-2037: PERHAPS SHE MAY HAVE A RELATION WHO MIGHT SUIT US AND BE GLAD OF OUR PLACE +1688-142285-0090-2038: SHE SOUNDED TO BE SUCH A CAREFUL ECONOMICAL PERSON THAT I SHOULD LIKE ANY ONE OUT OF THE SAME FAMILY +1688-142285-0091-2039: MY DEAR SAID MISTER HALE ALARMED PRAY DON'T GO OFF ON THAT IDEA +1688-142285-0092-2040: I AM SURE AT ANY RATE SHE WOULD NOT LIKE STRANGERS TO KNOW ANYTHING ABOUT IT +1688-142285-0093-2041: TAKE NOTICE THAT (*->THIS) IS NOT MY KIND OF HAUGHTINESS PAPA IF I HAVE ANY AT ALL WHICH I DON'T AGREE TO THOUGH (YOU'RE->YOU) ALWAYS ACCUSING ME OF IT +1688-142285-0094-2042: I DON'T KNOW POSITIVELY THAT IT IS HERS EITHER BUT FROM LITTLE THINGS I HAVE GATHERED FROM HIM I FANCY SO +1688-142285-0095-2043: THEY CARED TOO LITTLE TO ASK IN WHAT MANNER HER SON HAD SPOKEN ABOUT HER +1998-15444-0000-2204: IF CALLED TO A CASE SUPPOSED (OR->AS) SUSPECTED TO BE ONE OF POISONING THE MEDICAL MAN HAS TWO DUTIES TO PERFORM TO SAVE THE PATIENT'S LIFE AND TO PLACE HIMSELF IN A POSITION TO GIVE EVIDENCE (IF->OF) CALLED (ON TO DO SO->UNTO) +1998-15444-0001-2205: HE SHOULD MAKE INQUIRIES AS TO SYMPTOMS AND TIME AT WHICH FOOD OR MEDICINE (WAS->MUST) LAST TAKEN +1998-15444-0002-2206: HE SHOULD NOTICE THE POSITION AND TEMPERATURE OF THE BODY THE CONDITION OF RIGOR (MORTIS->MORTUS) MARKS OF (VIOLENCE->IDOLENCE) APPEARANCE OF LIPS AND MOUTH +1998-15444-0003-2207: IN MAKING A POST (MORTEM->MODE OF) EXAMINATION THE ALIMENTARY CANAL SHOULD BE REMOVED AND PRESERVED FOR FURTHER INVESTIGATION +1998-15444-0004-2208: THE GUT AND THE (GULLET->GALLANT) BEING CUT ACROSS BETWEEN THESE LIGATURES THE STOMACH MAY BE REMOVED ENTIRE WITHOUT (SPILLING->SPINNING) ITS CONTENTS +1998-15444-0005-2209: IF THE (MEDICAL PRACTITIONER IS IN DOUBT->MEDICA PETITIONERS ENDOWED) ON ANY POINT HE SHOULD OBTAIN TECHNICAL ASSISTANCE FROM (SOMEONE->SOME ONE) WHO HAS PAID ATTENTION TO THE SUBJECT +1998-15444-0006-2210: IN A CASE OF ATTEMPTED SUICIDE BY POISONING IS IT THE DUTY OF THE DOCTOR TO INFORM THE POLICE +1998-15444-0007-2211: THE BEST (EMETIC->AMATIC) IS THAT WHICH IS AT HAND +1998-15444-0008-2212: THE (DOSE FOR AN ADULT->DAYS WERE INDULGE) IS TEN MINIMS +1998-15444-0009-2213: (APOMORPHINE->EPIMORPHIN) IS NOT (ALLIED IN->ALID AND) PHYSIOLOGICAL ACTION TO MORPHINE AND MAY BE GIVEN IN CASES OF (NARCOTIC->NAKOTIC) POISONING +1998-15444-0010-2214: (TICKLING->TITLING) THE (FAUCES->FOSSES) WITH (A->THE) FEATHER MAY EXCITE (VOMITING->WAUM ATTITTING) +1998-15444-0011-2215: IN USING THE (ELASTIC->ELECTIC) STOMACH TUBE SOME FLUID SHOULD BE INTRODUCED INTO THE STOMACH BEFORE ATTEMPTING TO EMPTY IT OR A PORTION OF THE MUCOUS MEMBRANE MAY BE (SUCKED->SACKED) INTO THE APERTURE +1998-15444-0012-2216: THE TUBE SHOULD BE EXAMINED TO SEE THAT IT IS NOT BROKEN OR CRACKED AS ACCIDENTS HAVE HAPPENED FROM NEGLECTING THIS PRECAUTION +1998-15444-0013-2217: (ANTIDOTES->AND HE VOTES) ARE USUALLY GIVEN HYPODERMICALLY OR IF (BY MOUTH->THE MOUSE) IN THE FORM OF TABLETS +1998-15444-0014-2218: IN THE ABSENCE OF (A->THE) HYPODERMIC SYRINGE THE REMEDY MAY BE GIVEN BY THE (RECTUM->RECTIM) +1998-15444-0015-2219: NOTICE THE (SMELL->SMAR) COLOUR AND GENERAL APPEARANCE OF THE MATTER SUBMITTED FOR EXAMINATION +1998-15444-0016-2220: FOR THE SEPARATION OF AN (ALKALOID->AKALOID) THE FOLLOWING IS THE PROCESS OF (STAS OTTO->STARS ARE TOO) +1998-15444-0017-2221: THIS PROCESS IS BASED UPON THE PRINCIPLE THAT THE SALTS OF THE (ALKALOIDS->AKES) ARE SOLUBLE IN (ALCOHOL AND->AKELET) WATER AND INSOLUBLE IN ETHER +1998-15444-0018-2222: THE PURE (ALKALOIDS WITH->IKOLOITS WAS) THE EXCEPTION OF MORPHINE IN ITS (CRYSTALLINE->CHRISTOLINE) FORM (ARE->A) SOLUBLE IN (ETHER->EITHER) +1998-15444-0019-2223: TWO (COOL->CUR) THE (MIXTURE->MIXED) AND FILTER WASH THE RESIDUE WITH STRONG ALCOHOL AND MIX THE (FILTRATES->FUR TRADES) +1998-15444-0020-2224: THE RESIDUE MAY BE SET ASIDE FOR THE DETECTION OF THE METALLIC POISONS (IF->OF) SUSPECTED (EXPEL->EXPELLED) THE (ALCOHOL BY->ALCOHOLD A) CAREFUL EVAPORATION +1998-15444-0021-2225: ON THE EVAPORATION OF THE ALCOHOL THE (RESINOUS->ZENOUS) AND FATTY (MATTERS->MATTER) SEPARATE +1998-15444-0022-2226: EVAPORATE THE (FILTRATE->FUDGE) TO A (SYRUP->CYRUP) AND EXTRACT WITH SUCCESSIVE PORTIONS OF ABSOLUTE ALCOHOL +1998-15444-0023-2227: SEPARATE THE ETHEREAL SOLUTION AND EVAPORATE +1998-15444-0024-2228: FIVE A PART OF THIS (ETHEREAL->ASSYRIAL) SOLUTION IS (POURED->PUT) INTO A WATCH GLASS AND (ALLOWED->ALLOW) TO EVAPORATE +1998-15444-0025-2229: TO PURIFY IT (ADD A SMALL->ADDISMA) QUANTITY OF (DILUTE SULPHURIC->DELUDE SUFFERG) ACID AND AFTER EVAPORATING TO THREE QUARTERS OF ITS (BULK->BARK) ADD A SATURATED SOLUTION OF CARBONATE OF POTASH OR SODA +1998-15444-0026-2230: (BOIL->BY) THE (FINELY DIVIDED->FINAL DIVIDE) SUBSTANCE WITH ABOUT ONE (EIGHTH->EIGHTHS) ITS (BULK->BAG) OF PURE HYDROCHLORIC ACID ADD FROM TIME TO TIME POTASSIC (CHLORATE->LOW RAGE) UNTIL THE SOLIDS (ARE REDUCED->HAVE IT USED) TO A STRAW YELLOW FLUID +1998-15444-0027-2231: THE RESIDUE OF THE MATERIAL AFTER DIGESTION (WITH->WAS) HYDROCHLORIC ACID AND (POTASSIUM CHLORATE->POTASSIAN CHLORIDE) MAY HAVE TO BE EXAMINED FOR SILVER LEAD AND (BARIUM->BURIUM) +1998-29454-0000-2157: A THOUSAND BLESSINGS FROM A GRATEFUL HEART +1998-29454-0001-2158: PERUSAL (SAID THE PAWNBROKER->SET UPON BROKER) THAT'S THE WAY TO (PERNOUNCE->PRONOUNCE) IT +1998-29454-0002-2159: HIS BOOKS TOLD HIM (THAT TREASURE IS->THE TREASURES) BEST HIDDEN UNDER LOOSE BOARDS (UNLESS->AND AS) OF COURSE YOUR HOUSE (HAS->HAD) A SECRET (PANEL->PENNEL) WHICH HIS HAD NOT +1998-29454-0003-2160: HE GOT IT UP AND PUSHED HIS TREASURES AS FAR IN AS HE COULD ALONG THE ROUGH (CRUMBLY->CRAMBLY) SURFACE OF THE (LATH->LASS) AND PLASTER +1998-29454-0004-2161: WHEN DICKIE CAME DOWN HIS AUNT (SLIGHTLY->SAT HE) SLAPPED HIM AND HE TOOK THE HALFPENNY AND (LIMPED OFF->LIMP OF) OBEDIENTLY +1998-29454-0005-2162: HE HAD NEVER SEEN ONE BEFORE AND IT INTERESTED HIM EXTREMELY +1998-29454-0006-2163: HE LOOKED ABOUT HIM AND KNEW THAT HE DID NOT AT ALL KNOW WHERE HE WAS +1998-29454-0007-2164: WHAT'S UP (MATEY LOST->MATE ASKED) YOUR WAY DICKIE EXPLAINED +1998-29454-0008-2165: WHEN HE SAID (AVE->HAVE) I (BIN->BEEN) ASLEEP +1998-29454-0009-2166: HERE WE ARE SAID THE MAN +1998-29454-0010-2167: NOT (EXACKLY->EXACTLY) SAID THE MAN BUT IT'S ALL RIGHT +1998-29454-0011-2168: WHEN IT WAS OVER THE (MAN->MEN) ASKED DICKIE IF HE COULD WALK A LITTLE WAY AND WHEN (DICKIE->DICKY) SAID HE COULD THEY SET OUT IN THE MOST FRIENDLY WAY SIDE BY SIDE +1998-29454-0012-2169: AND THE (TEA->TIENO) AND (ALL AN->*) THE EGG +1998-29454-0013-2170: AND THIS IS THE PRETTIEST PLACE EVER I SEE +1998-29454-0014-2171: I SHALL CATCH IT (A FAIR->OF HER) TREAT AS IT IS +1998-29454-0015-2172: SHE WAS (WAITIN->WAITING) FOR THE WOOD TO (BOIL->BOY) THE (KETTLE->CATTLE) WHEN (I->TO) COME OUT MOTHER +1998-29454-0016-2173: (AIN'T->AND) BAD WHEN SHE'S IN A GOOD TEMPER +1998-29454-0017-2174: THAT (AIN'T WHAT SHE'LL->ANNE BUT YE'LL) BE IN WHEN YOU GETS BACK +1998-29454-0018-2175: I GOT (TO STICK IT->A STICKET) SAID (DICKIE->DICKY) SADLY I'D BEST BE GETTING HOME +1998-29454-0019-2176: I WOULDN'T GO (OME->HOME) NOT IF (I->EVER) WAS YOU SAID THE MAN +1998-29454-0020-2177: NO SAID DICKIE OH NO NO I NEVER +1998-29454-0021-2178: (I AIN'T IT YER->AND ADIER) HAVE I LIKE WHAT (YER AUNT DO->YOU AREN'T TO) +1998-29454-0022-2179: WELL (THAT'LL->THOU) SHOW YOU THE SORT OF (MAN->MEN) I AM +1998-29454-0023-2180: THE MAN'S MANNER WAS SO KIND AND HEARTY THE WHOLE ADVENTURE WAS SO WONDERFUL AND NEW IS IT COUNTRY WHERE YOU GOING +1998-29454-0024-2181: THE SUN (SHOT->HAD) LONG GOLDEN BEAMS THROUGH THE GAPS (IN->AND) THE HEDGE +1998-29454-0025-2182: A BIRD (PAUSED->PASSED) IN ITS FLIGHT ON A BRANCH QUITE CLOSE AND CLUNG THERE SWAYING +1998-29454-0026-2183: HE TOOK OUT OF HIS POCKET A NEW ENVELOPE (A->AND) NEW SHEET OF PAPER AND A NEW PENCIL READY SHARPENED BY MACHINERY +1998-29454-0027-2184: (AN->AND) I (ASKS->ASK) YOU LET ME COME (ALONGER->ALONG ARE) YOU GOT THAT +1998-29454-0028-2185: (GET IT WROTE->GERT RODE) DOWN THEN DONE +1998-29454-0029-2186: THEN HE FOLDED IT AND PUT IT IN HIS POCKET +1998-29454-0030-2187: NOW (WE'RE->WE ARE) SQUARE HE SAID +1998-29454-0031-2188: THEY COULD PUT A (MAN->MEN) AWAY FOR (LESS->US) THAN THAT +1998-29454-0032-2189: I SEE THAT (THERE IN->THEN) A BOOK SAID (DICKIE CHARMED->DICK HAD SHUMMED) +1998-29454-0033-2190: HE REWARD THE WAKE THE LAST OF THE ENGLISH AND (I WUNNERED->A ONE AT) WHAT IT STOOD FOR +1998-29454-0034-2191: (WILD->WHITE) ONES (AIN'T ALF THE->AND A HALF) SIZE I LAY +1998-29454-0035-2192: ADVENTURES I SHOULD THINK SO +1998-29454-0036-2193: AH SAID (DICKIE->DICKY) AND A (FULL->FOOT) SILENCE FELL BETWEEN THEM +1998-29454-0037-2194: THAT WAS CHARMING BUT IT WAS PLEASANT TOO TO WASH THE (MUD OFF->MAD OF) ON THE WET GRASS +1998-29454-0038-2195: (DICKIE->DICKY) ALWAYS REMEMBERED THAT MOMENT +1998-29454-0039-2196: SO YOU SHALL SAID MISTER BEALE A (REG'LER->REGULAR) WASH ALL OVER THIS VERY NIGHT I ALWAYS LIKE A WASH MESELF +1998-29454-0040-2197: SOME (BLOKES->LOATHS) THINK IT PAYS TO BE DIRTY BUT IT DON'T +1998-29454-0041-2198: IF (YOU'RE->YO) CLEAN THEY SAY (HONEST POVERTY->I DISPOVERTY) AN IF (YOU'RE->YO) DIRTY THEY SAY SERVE YOU RIGHT +1998-29454-0042-2199: YOU ARE GOOD SAID DICKIE I DO LIKE YOU +1998-29454-0043-2200: I KNOW YOU WILL SAID DICKIE WITH ENTHUSIASM I KNOW (OW->HOW) GOOD YOU ARE +1998-29454-0044-2201: BLESS ME SAID MISTER BEALE UNCOMFORTABLY WELL THERE +1998-29454-0045-2202: (STEP OUT SONNY OR WE'LL->SPATANI ALBEA) NEVER GET THERE THIS (SIDE->SORT OF) CHRISTMAS +1998-29454-0046-2203: WELL (YOU'LL->YOU) KNOW ALL ABOUT IT PRESENTLY +1998-29455-0000-2232: THE SINGING AND LAUGHING WENT ON LONG AFTER HE HAD FALLEN ASLEEP AND IF LATER IN THE EVENING (THERE->THEY) WERE (LOUD VOICED->ALL OUTWARDS) ARGUMENTS OR (*->A) QUARRELS EVEN DICKIE DID NOT HEAR THEM +1998-29455-0001-2233: WHAT'S (ALL->ON) THAT THERE (DICKIE->DICKY) ASKED POINTING TO THE ODD (KNOBBLY->NOBLY) BUNDLES OF ALL SORTS AND SHAPES TIED ON TO THE (PERAMBULATOR'S->PRIME RELATOR'S) FRONT +1998-29455-0002-2234: TELL (YER->YOU) WHAT (MATE->MADE) LOOKS TO ME AS IF (I'D->I) TOOK A FANCY TO YOU +1998-29455-0003-2235: (SWELP->SWAB) ME HE SAID HELPLESSLY +1998-29455-0004-2236: (OH->O) LOOK SAID (DICKIE->DICKY) THE FLOWERS +1998-29455-0005-2237: (THEY'RE->THERE) ONLY (WEEDS->READS) SAID (BEALE->BEER) +1998-29455-0006-2238: BUT I (SHALL->SHOULD) HAVE THEM (WHILE THEY'RE->WHETHER) ALIVE SAID (DICKIE->DICKY) AS HE HAD SAID TO THE PAWNBROKER (ABOUT->BUT) THE MOONFLOWERS +1998-29455-0007-2239: (HI->AY) THERE (GOES->WAS) A RABBIT +1998-29455-0008-2240: (SEE IM CROST THE->SEEM QUEST) ROAD THERE (SEE HIM->SEEM) +1998-29455-0009-2241: HOW BEAUTIFUL SAID (DICKIE->DICKY) WRIGGLING WITH DELIGHT +1998-29455-0010-2242: THIS LIFE OF THE RABBIT AS DESCRIBED BY MISTER BEALE WAS THE CHILD'S FIRST GLIMPSE OF FREEDOM I'D LIKE TO BE A RABBIT +1998-29455-0011-2243: (OW'M->AM) I TO (WHEEL->BE AT) THE (BLOOMIN PRAM->ROOM IN PEM) IF (YOU GOES ON->YOUR GONE) LIKE (AS->US) IF YOU WAS A (BAG->PEG) OF EELS +1998-29455-0012-2244: I LIKE YOU (NEXTER->NEXT TO) MY OWN DADDY AND MISTER (BAXTER->BAXT THE) NEXT DOOR +1998-29455-0013-2245: THAT'S ALL RIGHT SAID MISTER BEALE AWKWARDLY +1998-29455-0014-2246: (DICKIE QUICK TO->DICKY QUICKLY) IMITATE TOUCHED HIS +1998-29455-0015-2247: POOR LITTLE MAN SAID THE LADY YOU MISS YOUR MOTHER DON'T YOU +1998-29455-0016-2248: OH WELL DONE LITTLE (UN->ONE) SAID MISTER (BEALE->BEE) TO HIMSELF +1998-29455-0017-2249: THE TWO TRAVELLERS WERE LEFT FACING EACH OTHER THE RICHER BY A PENNY AND (OH->O) WONDERFUL GOOD FORTUNE A WHOLE HALF CROWN +1998-29455-0018-2250: NO I NEVER SAID DICKIE (ERE'S->YES) THE (STEEVER->STEVER) +1998-29455-0019-2251: YOU STICK TO THAT SAID (BEALE->BEARD) RADIANT (WITH->WAS) DELIGHT YOU'RE A FAIR MASTERPIECE YOU ARE YOU EARNED IT HONEST IF EVER (A KID->KEPT) DONE +1998-29455-0020-2252: THEY WENT ON UP THE HILL AS HAPPY AS ANY ONE NEED WISH TO BE +1998-29455-0021-2253: PLEASE (DO NOT BE TOO SHOCKED->DON'T A BETOUT) +1998-29455-0022-2254: REMEMBER THAT NEITHER OF THEM KNEW ANY BETTER +1998-29455-0023-2255: TO THE (ELDER->OTHER) TRAMP LIES (AND BEGGING WERE->IN PEGGING WHERE) NATURAL MEANS OF LIVELIHOOD +1998-29455-0024-2256: BUT YOU SAID THE BED (WITH->WAS) THE GREEN CURTAINS (URGED DICKIE->ADDED THE KEI) +1998-29455-0025-2257: WHICH THIS (AIN'T->END) NOT BY NO MEANS +1998-29455-0026-2258: THE NIGHT IS FULL OF INTERESTING LITTLE SOUNDS THAT WILL NOT AT FIRST LET YOU SLEEP THE RUSTLE OF LITTLE (WILD->WHITE) THINGS (IN->ON) THE (HEDGES->HATCHES) THE BARKING OF DOGS (IN->AND) DISTANT FARMS THE CHIRP OF CRICKETS AND THE CROAKING OF FROGS +1998-29455-0027-2259: (THE NEW->THEN YOU) GAME OF BEGGING AND INVENTING STORIES TO INTEREST THE PEOPLE FROM WHOM IT WAS (WORTH->WORSE) WHILE TO BEG WENT ON GAILY DAY BY DAY AND WEEK BY WEEK AND DICKIE BY CONSTANT PRACTICE GREW SO CLEVER AT TAKING HIS PART IN THE ACTING THAT MISTER (BEALE->BEER) WAS QUITE DAZED WITH ADMIRATION +1998-29455-0028-2260: (BLESSED->BLEST) IF I EVER SEE SUCH A NIPPER HE SAID OVER AND OVER AGAIN +1998-29455-0029-2261: CLEVER AS A (TRAINDAWG E->TRAIN DOG) IS (AN->IN) ALL (OUTER IS->OUT OF HIS) OWN (EAD->HEAD) +1998-29455-0030-2262: I (AIN'T->AM) SURE AS I (ADN'T->HADN'T) BETTER STICK TO THE ROAD AND KEEP AWAY FROM OLD (ANDS->ENDS) LIKE (YOU JIM->EUGEN) +1998-29455-0031-2263: (I OPE E'S CLEVER->IOPIUS LOVE) ENOUGH TO DO (WOT E'S TOLD KEEP IS MUG SHUT->WHAT HE STOWED HE WAS MUCH AT) THAT'S ALL +1998-29455-0032-2264: IF (E'S STRAIGHT E'LL->HE STRAYED YOU) DO FOR ME AND IF HE AIN'T I'LL DO FOR (IM->HIM) SEE +1998-29455-0033-2265: SEE THAT (BLOKE JUST->LOCTICE) NOW SAID MISTER BEALE (YUSS->YES) SAID DICKIE +1998-29455-0034-2266: WELL YOU NEVER SEE (IM->HIM) +1998-29455-0035-2267: IF ANY ONE (ARSTS->ASKED) YOU IF YOU EVER SEE (IM->HIM) YOU NEVER SET EYES ON (IM->HIM) IN ALL YOUR BORN NOT TO REMEMBER (IM->HIM) +1998-29455-0036-2268: DICKIE WAS FULL OF QUESTIONS BUT MISTER (BEALE->BEE) HAD NO ANSWERS FOR THEM +1998-29455-0037-2269: NOR WAS IT SUNDAY ON WHICH THEY TOOK A REST AND WASHED THEIR SHIRTS ACCORDING TO MISTER (BEALE'S->BEAT'S) RULE OF LIFE +1998-29455-0038-2270: THEY DID NOT STAY THERE BUT WALKED OUT ACROSS THE DOWNS (WHERE->WITH) THE (SKYLARKS->SKYLIGHTS) WERE SINGING AND ON A DIP OF THE DOWNS CAME UPON GREAT STONE WALLS AND TOWERS (VERY->WHERE) STRONG AND GRAY +1998-29455-0039-2271: WHAT'S THAT THERE SAID (DICKIE->DICKY) +2033-164914-0000-661: REPLIED HE OF A TRUTH I HEARD HIM NOT AND I WOT HIM NOT AND FOLKS ARE ALL SLEEPING +2033-164914-0001-662: BUT SHE SAID WHOMSOEVER THOU SEEST AWAKE HE IS THE RECITER +2033-164914-0002-663: THEN SAID THE EUNUCH ART THOU HE WHO REPEATED POETRY BUT NOW AND MY LADY HEARD HIM +2033-164914-0003-664: REJOINED THE EUNUCH WHO THEN WAS THE RECITER POINT HIM OUT TO ME +2033-164914-0004-665: BY ALLAH REPLIED THE FIREMAN I TELL THEE THE TRUTH +2033-164914-0005-666: TELL ME WHAT HAPPENED QUOTH (ZAU AL MAKAN->SAO ALMAN) +2033-164914-0006-667: WHAT AILS THEE THEN THAT THOU MUST NEEDS RECITE VERSES SEEING THAT WE ARE TIRED OUT WITH WALKING AND WATCHING AND ALL THE FOLK ARE ASLEEP FOR THEY REQUIRE SLEEP TO REST THEM OF THEIR FATIGUE +2033-164914-0007-668: AND HE ALSO (IMPROVISED->PROVISED) THE TWO FOLLOWING (DISTICHS->DISTINCTS) +2033-164914-0008-669: WHEN (NUZHAT->NUZHA'S) AL ZAMAN HEARD THE FIRST IMPROVISATION SHE CALLED TO (MIND->MINE) HER FATHER AND HER MOTHER AND HER BROTHER AND THEIR (WHILOME->WILM) HOME THEN SHE WEPT AND CRIED (AT->TO) THE EUNUCH AND SAID TO HIM WOE TO THEE +2033-164914-0009-670: HE WHO (RECITED->RESIDED) THE FIRST TIME (HATH->HAD) RECITED A SECOND TIME AND (I->*) HEARD HIM (HARD->HEART) BY +2033-164914-0010-671: BY ALLAH AN THOU FETCH HIM NOT TO ME I WILL ASSUREDLY ROUSE THE CHAMBERLAIN ON THEE AND HE SHALL BEAT THEE AND CAST THEE OUT +2033-164914-0011-672: BUT TAKE THESE HUNDRED DINERS AND GIVE THEM TO THE SINGER AND BRING HIM TO ME GENTLY AND DO HIM NO HURT +2033-164914-0012-673: RETURN QUICKLY AND LINGER NOT +2033-164914-0013-674: WHEN IT WAS THE SEVENTY THIRD NIGHT +2033-164914-0014-675: BUT THE EUNUCH SAID I WILL NOT LEAVE THEE TILL THOU SHOW ME WHO IT WAS THAT RECITED THE VERSES FOR I DREAD RETURNING TO MY LADY WITHOUT HIM +2033-164914-0015-676: NOW WHEN THE FIREMAN HEARD THESE WORDS HE FEARED FOR (ZAU->ZA) AL MAKAN AND WEPT WITH EXCEEDING WEEPING AND SAID TO THE EUNUCH BY ALLAH IT WAS NOT I AND (I->THEY) KNOW HIM NOT +2033-164914-0016-677: SO GO THOU TO THY STATION AND IF THOU AGAIN (MEET->*) ANY ONE AFTER THIS HOUR RECITING AUGHT OF POETRY WHETHER HE BE NEAR OR FAR IT WILL BE I OR SOME ONE I KNOW AND THOU SHALT NOT LEARN OF HIM BUT BY ME +2033-164914-0017-678: THEN HE KISSED THE EUNUCH'S HEAD AND SPAKE HIM FAIR TILL HE WENT AWAY BUT THE CASTRATO FETCHED (A ROUND->AROUND) AND RETURNING SECRETLY CAME AND STOOD BEHIND THE FIREMAN FEARING TO GO BACK TO HIS MISTRESS WITHOUT (TIDINGS->HIDINGS) +2033-164914-0018-679: I SAY WHAT MADE MY (IGNOMY WHATE'ER->EGOY WHATEVER) THE BITTER (CUP I->CUPIED) DRAIN FAR BE (FRO->FROM) ME (THAT->THE) LAND TO FLEE NOR WILL I BOW TO THOSE WHO BLAME AND FOR SUCH LOVE WOULD DEAL ME SHAME +2033-164914-0019-680: THEN SAID THE EUNUCH TO (ZAU->ZA) AL MAKAN PEACE BE WITH THEE O MY LORD +2033-164914-0020-681: O MY LORD CONTINUED THE EUNUCH AND SHAHRAZAD PERCEIVED (*->THAT) THE DAWN OF DAY AND CEASED TO SAY HER PERMITTED SAY +2033-164914-0021-682: WE WILL DO THEE NO UPRIGHT O MY SON NOR WRONG THEE IN AUGHT BUT OUR OBJECT IS THAT THOU BEND THY (GRACIOUS->GRECIOUS) STEPS WITH ME TO MY MISTRESS TO RECEIVE HER ANSWER AND (RETURN IN WEAL->RETURNING WHEEL) AND SAFETY AND THOU SHALT HAVE A HANDSOME PRESENT AS ONE WHO BRINGETH GOOD NEWS +2033-164914-0022-683: THEN THE EUNUCH WENT OUT TO (ZAU AL->THOU) MAKAN AND SAID TO HIM RECITE WHAT (VERSES->VERSEST) THOU KNOWEST FOR MY (LADY IS->LADIES) HERE HARD BY LISTENING TO THEE AND AFTER I WILL ASK THEE OF THY NAME AND (THY->THINE) NATIVE COUNTRY AND THY CONDITION +2033-164915-0000-643: AND ALSO THESE +2033-164915-0001-644: THEN SHE THREW HERSELF UPON HIM AND HE GATHERED HER TO HIS BOSOM AND THE TWAIN FELL DOWN IN A FAINTING FIT +2033-164915-0002-645: WHEN THE EUNUCH SAW (THIS CASE->THESE CAVES) HE WONDERED AT THEM AND THROWING OVER THEM SOMEWHAT TO COVER THEM WAITED TILL THEY SHOULD RECOVER +2033-164915-0003-646: AFTER A WHILE THEY CAME TO THEMSELVES AND (NUZHAT->UZHAT) AL ZAMAN REJOICED WITH EXCEEDING JOY OPPRESSION AND DEPRESSION LEFT HER AND GLADNESS TOOK THE MASTERY OF HER AND SHE REPEATED THESE VERSES +2033-164915-0004-647: ACCORDINGLY SHE TOLD HIM ALL THAT HAD COME TO HER SINCE THEIR SEPARATION AT THE KHAN AND WHAT HAD HAPPENED TO HER WITH THE (BADAWI->BADAH) HOW THE MERCHANT HAD BOUGHT HER OF HIM AND HAD TAKEN HER TO HER BROTHER (SHARRKAN->SHARKAN) AND HAD SOLD HER TO HIM HOW HE HAD FREED HER AT THE TIME OF BUYING HOW HE HAD MADE (A->HER) MARRIAGE CONTRACT WITH HER AND HAD GONE IN TO HER AND HOW THE KING THEIR SIRE HAD SENT AND ASKED FOR HER FROM (SHARRKAN->SHARKAN) +2033-164915-0005-648: BUT NOW GO TO THY MASTER AND BRING HIM QUICKLY TO ME +2033-164915-0006-649: THE CHAMBERLAIN CALLED THE CASTRATO AND CHARGED HIM TO DO ACCORDINGLY SO HE REPLIED I HEAR AND I OBEY AND HE TOOK HIS PAGES WITH HIM AND WENT OUT IN SEARCH OF THE (STOKER->STOCKER) TILL HE FOUND HIM IN THE REAR OF THE CARAVAN (GIRTHING->GIRDING) HIS ASS AND PREPARING FOR FLIGHT +2033-164915-0007-650: SHE SAID IT HATH REACHED ME O AUSPICIOUS KING THAT WHEN THE (STOKER GIRTHED->STOCKER GIRDED) HIS ASS FOR FLIGHT AND BESPAKE HIMSELF SAYING (OH->O) WOULD I KNEW WHAT IS BECOME OF HIM +2033-164915-0008-651: I BELIEVE HE HATH DENOUNCED ME TO THE EUNUCH HENCE THESE PAGES (ET->AT) ABOUT ME AND HE HATH MADE ME AN ACCOMPLICE IN HIS CRIME +2033-164915-0009-652: WHY DIDST THOU SAY I NEVER REPEATED (THESE->THIS) COUPLETS NOR DO I KNOW WHO REPEATED THEM WHEN IT WAS THY COMPANION +2033-164915-0010-653: BUT NOW I WILL NOT LEAVE THEE BETWEEN THIS PLACE AND BAGHDAD AND WHAT BETIDETH THY COMRADE SHALL (BETIDE->BE TIDE) THEE +2033-164915-0011-654: TWAS AS I FEARED THE (COMING ILLS->CARMINALS) DISCERNING BUT UNTO ALLAH WE ARE ALL RETURNING +2033-164915-0012-655: THEN THE EUNUCH CRIED UPON (THE->HIS) PAGES SAYING TAKE HIM OFF THE ASS +2033-164915-0013-656: AND HE ANSWERED I AM THE CHAMBERLAIN OF THE EMIR OF DAMASCUS KING (SHARRKAN SON->SHARKAN SONG) OF OMAR BIN AL (NU'UMAN->NUMAN) LORD OF (BAGHDAD->ADAD) AND OF THE LAND OF KHORASAN AND I BRING TRIBUTE AND PRESENTS FROM HIM TO HIS FATHER IN BAGHDAD +2033-164915-0014-657: (SO FARE YE->SOPHIA HE) FORWARDS NO HARM SHALL (BEFAL->BEFALL) YOU TILL YOU JOIN HIS GRAND WAZIR (DANDAN->TAN) +2033-164915-0015-658: THEN HE BADE HIM BE SEATED AND QUESTIONED HIM AND HE REPLIED THAT HE WAS CHAMBERLAIN TO THE EMIR OF DAMASCUS AND WAS BOUND TO KING OMAR WITH PRESENTS AND THE TRIBUTE OF SYRIA +2033-164915-0016-659: SO IT WAS AGREED THAT WE GO TO DAMASCUS AND FETCH THENCE THE KING'S SON (SHARRKAN->SHARKAN) AND (MAKE->MADE) HIM SULTAN OVER HIS FATHER'S REALM +2033-164915-0017-660: AND AMONGST THEM WERE SOME WHO WOULD HAVE CHOSEN THE CADET (ZAU AL MAKAN->THOU A MACAN) FOR QUOTH THEY HIS NAME BE LIGHT OF THE PLACE AND HE HATH A SISTER NUZHAT AL ZAMAN (HIGHS->HIES) THE DELIGHT OF THE TIME BUT THEY SET OUT FIVE YEARS AGO FOR AL (HIJAZ->HI JARS) AND NONE (WOTTETH->WHATETH) WHAT IS BECOME OF THEM +2033-164916-0000-684: SO HE TURNED TO THE WAZIR DANDAN AND SAID TO HIM VERILY YOUR TALE IS A (WONDER->WANDER) OF WONDERS +2033-164916-0001-685: (KNOW->NO) O CHIEF (WAZIR->WOZZIER) THAT HERE WHERE YOU HAVE ENCOUNTERED ME ALLAH HATH GIVEN YOU REST FROM FATIGUE AND BRINGETH YOU YOUR DESIRE AFTER THE EASIEST OF FASHIONS FOR (THAT->LET) HIS ALMIGHTY WILL (RESTORETH->RESTORE IT) TO YOU (ZAU AL MAKAN->THOU ARMANQUIN) AND (HIS->HE) SISTER (NUZHAT->KNOWSAT) AL ZAMAN WHEREBY WE WILL SETTLE THE MATTER AS WE EASILY CAN +2033-164916-0002-686: WHEN THE (MINISTER->MEANESTER) HEARD THESE WORDS HE REJOICED WITH GREAT JOY AND SAID O CHAMBERLAIN TELL ME THE TALE OF THE TWAIN AND WHAT (BEFEL->BEFELL) THEM AND THE CAUSE OF THEIR LONG ABSENCE +2033-164916-0003-687: (ZAU AL MAKAN->ZAWACON) BOWED HIS HEAD AWHILE AND THEN SAID I ACCEPT (THIS->THE) POSITION FOR INDEED THERE WAS NO REFUSING AND HE WAS CERTIFIED THAT THE CHAMBERLAIN HAD COUNSELLED HIM WELL AND WISELY AND (SET->SAID TO) HIM ON THE RIGHT WAY +2033-164916-0004-688: THEN HE ADDED O MY UNCLE HOW SHALL I DO WITH MY BROTHER (SHARRKAN->SHARKAN) +2033-164916-0005-689: AFTER (AWHILE->A WHILE) THE DUST DISPERSED AND THERE APPEARED UNDER IT THE ARMY OF BAGHDAD AND KHORASAN A CONQUERING HOST LIKE THE (FULL TIDE->POOL TIED) SEA AND SHAHRAZAD PERCEIVED THE DAWN OF DAY AND CEASED TO SAY HER PERMITTED SAY +2033-164916-0006-690: WHEN IT WAS THE SEVENTY EIGHTH NIGHT +2033-164916-0007-691: (AND IN IT ALL->ANY NEAT OR) REJOICED AT THE ACCESSION OF THE LIGHT OF THE PLACE +2033-164916-0008-692: LASTLY THE MINISTER WENT IN AND KISSED THE GROUND BEFORE (ZAU AL->ZAO) MAKAN WHO ROSE TO MEET HIM SAYING WELCOME O WAZIR AND (SIRE SANS PEER->SIRES SONSPIER) +2033-164916-0009-693: MOREOVER THE SULTAN COMMANDED HIS WAZIR DANDAN CALL (A->AT) TEN DAYS HALT OF THE ARMY THAT HE MIGHT BE PRIVATE WITH HIM AND LEARN FROM HIM HOW AND WHEREFORE HIS FATHER HAD BEEN SLAIN +2033-164916-0010-694: HE THEN REPAIRED TO THE HEART OF THE ENCAMPMENT AND ORDERED (*->THAT) THE HOST TO HALT TEN DAYS +2414-128291-0000-2689: WHAT HATH HAPPENED (UNTO->TO) ME +2414-128291-0001-2690: HE ASKED HIMSELF SOMETHING (WARM->WRONG) AND LIVING QUICKENETH ME IT MUST BE IN (THE->THAT) NEIGHBOURHOOD +2414-128291-0002-2691: (WHEN->READ) HOWEVER (ZARATHUSTRA->THEIR TWO STRAW) WAS QUITE NIGH (UNTO->AND TO) THEM THEN DID HE HEAR PLAINLY (THAT A->WITH) HUMAN VOICE (SPAKE->PAKE) IN THE MIDST OF THE (KINE->KIND) AND (APPARENTLY->A FRIENDLY) ALL OF THEM HAD TURNED THEIR HEADS TOWARDS THE SPEAKER +2414-128291-0003-2692: (WHAT DO->FOR DIEU) I HERE SEEK +2414-128291-0004-2693: ANSWERED HE THE SAME THAT THOU (SEEKEST->SEEK'ST) THOU MISCHIEF MAKER THAT IS TO SAY HAPPINESS UPON EARTH +2414-128291-0005-2694: FOR I TELL THEE THAT I HAVE (ALREADY->ALREAD) TALKED HALF A MORNING UNTO THEM AND JUST NOW (WERE->WHERE) THEY (*->ARE) ABOUT TO GIVE ME (THEIR->THE) ANSWER +2414-128291-0006-2695: HE WOULD NOT BE RID OF HIS AFFLICTION +2414-128291-0007-2696: WHO (HATH->HAD) NOT AT PRESENT HIS HEART HIS MOUTH AND HIS EYES FULL OF DISGUST +2414-128291-0008-2697: THOU ALSO THOU ALSO +2414-128291-0009-2698: BUT BEHOLD (THESE KINE->HIS KIND) +2414-128291-0010-2699: THE (KINE->KIND) HOWEVER GAZED AT IT ALL AND WONDERED +2414-128291-0011-2700: WANTON (AVIDITY->ADVITY) BILIOUS ENVY CAREWORN REVENGE (POPULACE->POPULOUS) PRIDE ALL (THESE STRUCK MINE->DISTRACT MY) EYE +2414-128291-0012-2701: IT IS NO LONGER TRUE (THAT THE->LITTLE) POOR (ARE->A) BLESSED +2414-128291-0013-2702: THE KINGDOM OF HEAVEN HOWEVER IS WITH THE (KINE->KIND) AND (WHY IS->WAS) IT NOT WITH (THE->A) RICH +2414-128291-0014-2703: WHY (DOST->THOSE) THOU TEMPT ME +2414-128291-0015-2704: ANSWERED (THE OTHER->HER) +2414-128291-0016-2705: THOU KNOWEST IT THYSELF BETTER EVEN THAN I +2414-128291-0017-2706: (THUS SPAKE->DOES BEG) THE PEACEFUL ONE AND PUFFED HIMSELF AND (PERSPIRED->POISPIRED) WITH HIS WORDS (SO THAT THE KINE->TO INTER KIND) WONDERED ANEW +2414-128291-0018-2707: THOU DOEST (VIOLENCE->WILDEST) TO THYSELF THOU PREACHER ON THE (MOUNT WHEN->MOUND AND) THOU USEST SUCH (SEVERE->SAVOUR) WORDS +2414-128291-0019-2708: THEY ALSO (ABSTAIN->ABSTAINED) FROM ALL HEAVY THOUGHTS WHICH INFLATE THE HEART +2414-128291-0020-2709: WELL +2414-128291-0021-2710: (SAID ZARATHUSTRA->SAYS ERGUSTARA) THOU SHOULDST ALSO SEE (MINE->MY) ANIMALS (MINE->MY) EAGLE AND MY SERPENT (THEIR->THEY ARE) LIKE DO NOT AT PRESENT EXIST ON EARTH +2414-128291-0022-2711: AND TALK TO (MINE->MY) ANIMALS OF THE HAPPINESS OF ANIMALS +2414-128291-0023-2712: NOW HOWEVER (TAKE->THEY) LEAVE (AT ONCE->IT WAS) OF (THY KINE->THEIR KIND) THOU STRANGE (ONE->WORLD) +2414-128291-0024-2713: THOU AMIABLE ONE +2414-128291-0025-2714: FOR THEY ARE (THY WARMEST FRIENDS->DIVORITES) AND (PRECEPTORS->PERCEPTORS) +2414-128291-0026-2715: THOU EVIL (FLATTERER->SLACKER) +2414-128292-0000-2618: (WHITHER HATH->WITHER HAD) MY (LONESOMENESS GONE->LONESOME DISCOUR) SPAKE HE +2414-128292-0001-2619: MY SHADOW (CALLETH->CAUGHT) ME +2414-128292-0002-2620: WHAT MATTER ABOUT MY SHADOW +2414-128292-0003-2621: (LET IT RUN AFTER->NEKHLUD TRUE ENOUGH TO) ME I (RUN->RAN) AWAY FROM IT +2414-128292-0004-2622: THUS (SPAKE ZARATHUSTRA->BEING THEIR TOO STRIKE) TO HIS HEART AND RAN AWAY +2414-128292-0005-2623: VERILY MY FOLLY HATH GROWN BIG IN THE MOUNTAINS +2414-128292-0006-2624: NOW DO I HEAR SIX OLD (FOOLS->FOOD'S) LEGS RATTLING BEHIND ONE ANOTHER +2414-128292-0007-2625: (BUT DOTH ZARATHUSTRA->BY DOTHAH TOUSTRA) NEED TO BE FRIGHTENED BY (HIS->A) SHADOW +2414-128292-0008-2626: ALSO (METHINKETH->METHINK IT) THAT AFTER ALL IT (HATH LONGER LEGS->HAD LONG OR LESS) THAN MINE +2414-128292-0009-2627: FOR WHEN (ZARATHUSTRA SCRUTINISED->THEY ARE TOO STRESS COGNIZED) HIM (WITH HIS->IT IS) GLANCE HE WAS FRIGHTENED (AS BY->ALBERT) A (SUDDEN->CERTAIN) APPARITION SO SLENDER (SWARTHY->SWALLTY) HOLLOW AND WORN OUT (DID THIS FOLLOWER APPEAR->IT IS FULL OF PEER) +2414-128292-0010-2628: (ASKED ZARATHUSTRA VEHEMENTLY->I TAKE TO EXTRAVE IMAGINE) WHAT (DOEST->DOST) THOU (HERE->HEAR) +2414-128292-0011-2629: AND WHY (CALLEST->COLLEST) THOU THYSELF MY SHADOW +2414-128292-0012-2630: THOU ART NOT PLEASING (UNTO->INTO) ME +2414-128292-0013-2631: MUST I EVER BE ON THE WAY +2414-128292-0014-2632: O (EARTH->ART) THOU HAST BECOME (TOO->TO) ROUND FOR ME +2414-128292-0015-2633: (WHEN->WITH) THE DEVIL (CASTETH->CAST AT) HIS SKIN DOTH NOT HIS NAME ALSO FALL AWAY IT IS ALSO SKIN +2414-128292-0016-2634: THE DEVIL HIMSELF IS PERHAPS SKIN +2414-128292-0017-2635: SOMETIMES I MEANT TO LIE AND BEHOLD +2414-128292-0018-2636: THEN (ONLY->OLD LADY) DID I HIT THE TRUTH +2414-128292-0019-2637: HOW (HAVE->HAIR) I STILL INCLINATION +2414-128292-0020-2638: (HAVE->EH) I STILL A (GOAL->GOLD) +2414-128292-0021-2639: A (HAVEN TOWARDS WHICH->HAIRED TOWARD SPEECH) MY (SAIL IS SET->SAILOR SAID) +2414-128292-0022-2640: FOR IT (DO->TOO) I ASK AND SEEK AND HAVE (SOUGHT BUT HAVE->THOUGHT IT HATH) NOT FOUND IT +2414-128292-0023-2641: (O ETERNAL->I TURNED OUT) EVERYWHERE (O ETERNAL->WHO HAD TURNED OUT) NOWHERE (O ETERNAL->WHO HAD TURNED OUT) IN VAIN +2414-128292-0024-2642: THOU ART MY SHADOW +2414-128292-0025-2643: SAID HE AT LAST SADLY +2414-128292-0026-2644: THY DANGER (IS NOT SMALL->HIS PERCHAL) THOU FREE SPIRIT AND (WANDERER->WONDER) +2414-128292-0027-2645: THEY SLEEP QUIETLY THEY (ENJOY->ENJOYED) THEIR NEW SECURITY +2414-128292-0028-2646: (BEWARE LEST->BE REALIST) IN THE END A NARROW (FAITH->FIT) CAPTURE THEE A HARD (RIGOROUS->RECKLESS) DELUSION +2414-128292-0029-2647: FOR NOW EVERYTHING THAT IS NARROW AND FIXED (SEDUCETH->SEDUCE IT) AND (TEMPTETH->TEMPTED) THEE +2414-128292-0030-2648: THOU HAST LOST (THY GOAL->DAGGULE) +2414-128292-0031-2649: (THOU->THOUGH) POOR ROVER AND RAMBLER (THOU->NOW) TIRED (BUTTERFLY->BUT TO FLY) +2414-128292-0032-2650: WILT THOU HAVE A REST (AND A HOME->IN THE WHOLE) THIS EVENING +2414-159411-0000-2653: ONCE UPON (A->HER) TIME A (BRAHMAN->BRAHM IN) WHO WAS WALKING ALONG THE ROAD CAME UPON AN IRON CAGE IN WHICH A GREAT TIGER (HAD BEEN SHUT->AT MONSHADE) UP BY THE (VILLAGERS->VILLAGES) WHO CAUGHT HIM +2414-159411-0001-2654: THE (BRAHMAN->BRAMIAN) ANSWERED NO I WILL NOT FOR IF I LET YOU OUT OF THE CAGE YOU WILL EAT ME +2414-159411-0002-2655: OH FATHER OF MERCY ANSWERED THE TIGER IN TRUTH THAT I WILL NOT +2414-159411-0003-2656: I WILL NEVER BE SO UNGRATEFUL ONLY LET ME OUT THAT I MAY (DRINK->BRING) SOME WATER AND RETURN +2414-159411-0004-2657: (THEN->AND IN) THE (BRAHMAN TOOK->BRAM INTO) PITY ON HIM AND OPENED THE CAGE DOOR BUT NO SOONER HAD HE (DONE->TURNED) SO THAN THE TIGER JUMPING OUT SAID NOW I WILL EAT YOU FIRST AND DRINK THE WATER AFTERWARDS +2414-159411-0005-2658: SO THE (BRAHMAN->BRAMID) AND THE TIGER WALKED ON TILL THEY CAME TO A (BANYAN->BANDON) TREE AND THE (BRAHMAN->BRAMMEN) SAID TO IT (BANYAN->BANION) TREE (BANYAN->BANNING) TREE (HEAR->HERE) AND GIVE (JUDGMENT->JOINTMENT) +2414-159411-0006-2659: ON WHAT MUST I GIVE JUDGMENT ASKED THE (BANYAN->BEN) TREE +2414-159411-0007-2660: (THIS TIGER->DISTAGGER) SAID (THE BRAHMAN->DEBRAMIN) BEGGED ME TO LET HIM OUT OF HIS CAGE TO DRINK A LITTLE WATER AND HE PROMISED NOT TO (HURT->HIDE) ME IF I DID SO BUT NOW THAT I HAVE (LET->LEFT) HIM OUT HE WISHES TO EAT ME +2414-159411-0008-2661: (IS->*) IT (JUST->IS JEALOUS) THAT HE SHOULD DO SO (OR NO->I KNOW) +2414-159411-0009-2662: (LET->LATE) THE TIGER EAT THE MAN FOR MEN ARE (AN->IN) UNGRATEFUL RACE +2414-159411-0010-2663: (SIR->SO) CAMEL SIR CAMEL CRIED THE (BRAHMAN HEAR->BRAMIN HERE) AND GIVE JUDGMENT +2414-159411-0011-2664: AT A LITTLE DISTANCE THEY FOUND A BULLOCK LYING BY THE ROADSIDE +2414-159411-0012-2665: IS IT FAIR THAT HE SHOULD DO SO OR NOT +2414-159411-0013-2666: (LET THE TIGER->LATER TIRE) EAT THE MAN FOR MEN HAVE NO PITY +2414-159411-0014-2667: THREE OUT OF THE SIX (HAD GIVEN->IN GIVING) JUDGMENT (AGAINST THE BRAHMAN BUT->OF EUSTA BRAMIN WHICH) STILL HE DID NOT LOSE ALL HOPE AND (DETERMINED->TURN MIND) TO ASK THE OTHER THREE +2414-159411-0015-2668: ON WHAT MUST I GIVE (JUDGMENT->YOU TENANT) ASKED THE EAGLE +2414-159411-0016-2669: THE (BRAHMAN STATED->BRAM IS SUITED) THE CASE AND THE EAGLE ANSWERED WHENEVER MEN SEE ME THEY TRY TO SHOOT ME (THEY CLIMB->DECLINE) THE ROCKS AND STEAL AWAY MY LITTLE ONES +2414-159411-0017-2670: THEN THE TIGER BEGAN TO ROAR AND SAID (THE->*) JUDGMENT OF ALL IS AGAINST YOU O (BRAHMAN->BRAHMIN) +2414-159411-0018-2671: AFTER THIS THEY SAW AN (ALLIGATOR->ALLEGATOR) AND THE (BRAHMAN->BRAMMER) RELATED THE MATTER TO HIM HOPING FOR A MORE FAVORABLE VERDICT +2414-159411-0019-2672: (BUT->WITH) THE (ALLIGATOR SAID->ADDURE TO THE SIT) WHENEVER (I->A) PUT MY NOSE OUT OF THE WATER (MEN TORMENT->MAYNUM AND) ME AND (TRY->TRIED) TO KILL ME +2414-159411-0020-2673: (THE BRAHMAN->NO GRAMMEN) GAVE HIMSELF UP AS LOST BUT AGAIN HE PRAYED THE TIGER TO HAVE PATIENCE AND LET HIM ASK THE OPINION OF THE SIXTH JUDGE +2414-159411-0021-2674: (NOW->BY) THE SIXTH WAS A JACKAL +2414-159411-0022-2675: THE (BRAHMAN->GRAMMER) TOLD HIS STORY AND SAID TO HIM UNCLE (JACKAL UNCLE JACKAL->JACK HO AND WILL JACK HO) SAY WHAT IS YOUR (JUDGMENT->TEM IT) +2414-159411-0023-2676: SHOW ME THE (PLACE->PACE) +2414-159411-0024-2677: (WHEN THEY GOT->AND THE COURT) THERE THE JACKAL SAID (NOW BRAHMAN->NABRAMAN) SHOW ME EXACTLY WHERE YOU STOOD +2414-159411-0025-2678: EXACTLY THERE WAS IT ASKED (THE JACKAL->JACK HO) +2414-159411-0026-2679: EXACTLY HERE REPLIED THE (BRAHMAN->PROMIN) +2414-159411-0027-2680: (WHERE->THERE) WAS THE TIGER THEN +2414-159411-0028-2681: WHY I STOOD SO SAID THE (TIGER->DRAGGER) JUMPING INTO THE CAGE AND MY HEAD WAS ON THIS SIDE +2414-159411-0029-2682: VERY GOOD SAID THE (JACKAL->JACK HOPE) BUT I CANNOT JUDGE WITHOUT UNDERSTANDING THE WHOLE MATTER EXACTLY +2414-159411-0030-2683: (SHUT->SHED) AND BOLTED SAID (THE BRAHMAN->DEBRAMIN) +2414-159411-0031-2684: (THEN SHUT->VEN SHIRT) AND (BOLT IT->BOLTED) SAID (THE->TO) JACKAL +2414-159411-0032-2685: WHEN THE (BRAHMAN->BRAMIN) HAD (DONE->TURNED) THIS THE JACKAL SAID OH YOU WICKED AND UNGRATEFUL (TIGER->TYER) +2414-159411-0033-2686: (WHEN THE->WITH A) GOOD BRAHMAN OPENED (YOUR CAGE->YOU CARED) DOOR IS TO EAT HIM THE ONLY RETURN (YOU->HE) WOULD MAKE +2414-159411-0034-2687: PROCEED ON YOUR JOURNEY (FRIEND BRAHMAN->FRANJAMIN) +2414-159411-0035-2688: (YOUR ROAD->HE RULED) LIES THAT WAY (AND MINE->MIND) THIS +2414-165385-0000-2651: (THUS->THERE'S) ACCOMPLISHED (HE->*) EXCITED (THE->*) ADMIRATION OF EVERY SILLY (COQUETTE->POCKET) AND THE ENVY OF EVERY (FLUTTERING COXCOMB->REFLECTING ACCOUNT) BUT BY ALL YOUNG GENTLEMEN AND LADIES OF UNDERSTANDING HE WAS HEARTILY DESPISED AS A MERE CIVILIZED MONKEY +2414-165385-0001-2652: THAT HIS SOUL MIGHT AFTERWARDS OCCUPY SUCH A STATION AS WOULD BE MOST SUITABLE TO HIS CHARACTER IT WAS (SENTENCED->INTENSE) TO INHABIT (THE->A) BODY OF THAT (FINICAL->PHYNICAL) GRINNING AND (MISCHIEVOUS->MACHINEROUS) LITTLE (MIMICK->MIMIC) WITH (FOUR->FULL) LEGS WHICH (YOU->SHE) NOW BEHOLD BEFORE YOU +2609-156975-0000-2367: THEN MOSES WAS AFRAID AND SAID SURELY THE THING IS KNOWN +2609-156975-0001-2368: (HOLD->OR) ON (HOLD->HER) FAST (HOLD OUT PATIENCE IS->HODE PATENTS AS) GENIUS +2609-156975-0002-2369: LET US HAVE FAITH THAT RIGHT (MAKES->MATRON) MIGHT AND IN THAT FAITH LET (US DARE->STARED) TO DO OUR DUTY (AS->IF) WE UNDERSTAND IT LINCOLN +2609-156975-0003-2370: THE EGYPTIAN BACKGROUND OF THE BONDAGE +2609-156975-0004-2371: EVERY ONE WHO IS (TURBULENT->TREBRANT) HAS BEEN FOUND BY (KING MERNEPTAH->GIMER PATH) THE TESTIMONY OF THE OLDEST (BIBLICAL NARRATIVES->PABRICAL NARRATIVE) REGARDING THE SOJOURN OF THE HEBREWS IN EGYPT (IS->HIS) ALSO IN PERFECT ACCORD WITH THE (PICTURE->PITCHER) WHICH (THE->IT) CONTEMPORARY EGYPTIAN INSCRIPTIONS GIVE (OF THE->THIS) PERIOD +2609-156975-0005-2372: THE ABSENCE OF DETAILED (REFERENCE TO->REFUCER) THE HEBREWS IS THEREFORE PERFECTLY NATURAL +2609-156975-0006-2373: IT SEEMS PROBABLE THAT NOT ALL BUT ONLY PART (OF->*) THE TRIBES WHICH (ULTIMATELY COALESCED->ULTIMATE COLLETS) INTO THE HEBREW NATION FOUND THEIR WAY TO EGYPT +2609-156975-0007-2374: THE STORIES REGARDING JOSEPH (THE->THEIR) TRADITIONAL (FATHER OF EPHRAIM AND MANASSEH IMPLY->FOUNDER THAT FROM IN MANETTE SE INCLINE) THAT (THESE->THE) STRONG CENTRAL TRIBES POSSIBLY TOGETHER WITH (THE->A) SOUTHERN (TRIBES->TRINES) OF BENJAMIN AND JUDAH WERE THE CHIEF ACTORS (IN THIS->*) OPENING SCENE IN (ISRAEL'S HISTORY->ISRA'S) +2609-156975-0008-2375: THE (BIBLICAL->BIBOCO) NARRATIVES APPARENTLY (DISAGREE REGARDING->DISAGREED GUARDING) THE DURATION OF THE SOJOURN IN EGYPT +2609-156975-0009-2376: THE LATER (TRADITIONS TEND TO EXTEND->JUDICINES INTEREST IN) THE PERIOD +2609-156975-0010-2377: (HERE->YOU) WERE FOUND (SEVERAL INSCRIPTIONS BEARING->SEVEREIGN SCRIPS AND SPARED) THE EGYPTIAN NAME OF THE CITY (P ATUM HOUSE->PATUM OUTS) OF THE GOD (ATUM->ATOM) +2609-156975-0011-2378: A CONTEMPORARY INSCRIPTION (ALSO->OUT THE) STATES THAT HE (FOUNDED->FOUND A) NEAR (PITHUM->PITTHAM) THE HOUSE OF (RAMSES->RANSES) A CITY WITH (A->THE) ROYAL RESIDENCE (AND->IN) TEMPLES +2609-156975-0012-2379: THAT THE HEBREWS WERE (RESTIVE->WRETS OF) UNDER (THIS TYRANNY->THE STERN) WAS (NATURAL->NATURALLY) INEVITABLE +2609-156975-0013-2380: WAS ANY OTHER PROCEDURE TO BE (EXPECTED->INSPECTRE) FROM (A DESPOTIC RULER->IT THAT SPOTIC ROAR) OF THAT LAND AND DAY +2609-156975-0014-2381: THE MAKING OF (A->THE) LOYAL PATRIOT +2609-156975-0015-2382: THE STORY OF MOSES BIRTH (AND->AN) EARLY CHILDHOOD IS ONE OF THE MOST INTERESTING CHAPTERS IN BIBLICAL HISTORY +2609-156975-0016-2383: (WAS MOSES JUSTIFIED IN RESISTING THE EGYPTIAN TASKMASTER->WE'S MOVES IT'S JEST FUN AND RESISTIN DE GYPTIAN TAX MASTER) +2609-156975-0017-2384: (IS PEONAGE->HIS PINIONS) ALWAYS (DISASTROUS->DISASTRATES) NOT (ONLY->OWING) TO ITS VICTIMS BUT ALSO TO THE GOVERNMENT IMPOSING IT +2609-156975-0018-2385: NATURALLY HE WENT TO THE LAND (OF MIDIAN->A MILLION) +2609-156975-0019-2386: THE WILDERNESS TO THE EAST OF EGYPT (HAD->AND) FOR CENTURIES BEEN THE (PLACE->PLATE) OF (REFUGE FOR->REPUGUE) EGYPTIAN FUGITIVES +2609-156975-0020-2387: FROM ABOUT TWO THOUSAND (B->*) C (*->B) +2609-156975-0021-2388: ON THE BORDERS OF THE WILDERNESS HE FOUND CERTAIN BEDOUIN (HERDSMEN->HERDSMAN) WHO RECEIVED HIM (HOSPITABLY->HALF SPITABLY) +2609-156975-0022-2389: THESE (SAND WANDERERS->SAN JUANS) SENT HIM ON FROM (TRIBE->TIME) TO TRIBE UNTIL HE REACHED THE LAND OF (KEDEM EAST->KIDDAM EACH) OF THE DEAD SEA WHERE HE REMAINED FOR (A->*) YEAR AND A HALF +2609-156975-0023-2390: LATER HE FOUND HIS WAY TO THE COURT OF ONE OF THE LOCAL KINGS (IN->AND) CENTRAL PALESTINE WHERE HE MARRIED AND (BECAME->MICHANG) IN (*->THE) TIME A PROSPEROUS LOCAL PRINCE +2609-156975-0024-2391: THE SCHOOL (OF->AS) THE (WILDERNESS->WEARINESS) +2609-156975-0025-2392: THE STORY OF MOSES IS IN MANY WAYS CLOSELY PARALLEL (TO THAT OF SINUHIT->DID NOT ASSUME IT) +2609-156975-0026-2393: THE PRIEST (OF->*) THE (SUB TRIBE->SUBTRINE) OF THE KENITES RECEIVED HIM INTO HIS HOME AND GAVE HIM HIS DAUGHTER IN MARRIAGE +2609-156975-0027-2394: NOTE THE (CHARACTERISTIC ORIENTAL IDEA->CARE OF A RESTAKE ORIENTOUINE) OF (MARRIAGE->MARES) +2609-156975-0028-2395: HERE MOSES LEARNED (THE->THAT) LESSONS THAT WERE ESSENTIAL FOR HIS (TRAINING->TRAINED IN) AS (THE->A) LEADER AND DELIVERER OF HIS PEOPLE +2609-156975-0029-2396: AFTER THE CAPTURE OF JERICHO CERTAIN OF THEM WENT UP WITH (THE SOUTHERN TRIBES TO->A SUDDEN TRIUMPHS SHE) CONQUER SOUTHERN PALESTINE +2609-156975-0030-2397: MANY MODERN (SCHOLARS DRAW->STARS DRAWN) THE CONCLUSION FROM THE BIBLICAL NARRATIVE THAT IT WAS FROM THE (KENITES->KENNITES) THAT MOSES FIRST LEARNED OF (YAHWEH->YONWAY) OR AS THE DISTINCTIVE NAME OF ISRAEL'S (GOD->GONE) WAS TRANSLATED BY LATER JEWISH (SCRIBES->GRIBES) JEHOVAH +2609-156975-0031-2398: DO THE (EARLIEST HEBREW->ARIOSABOO) TRADITIONS IMPLY THAT (THE ANCESTORS->INSECTORS) OF THE (ISRAELITES->ISRAIT) WERE (WORSHIPPERS->WORSHIPPED) OF JEHOVAH +2609-156975-0032-2399: THE (TITLE->TOWN) OF HIS (FATHER IN LAW->FUND AND ALL) IMPLIES (THAT->AT) THIS (PRIEST->PREACH) MINISTERED AT SOME (WILDERNESS->LEARNAN) SANCTUARY +2609-156975-0033-2400: (MOSES->ROSES) IN THE HOME OF THE (MIDIAN PRIEST->MENDIAN PRIESTS) WAS BROUGHT INTO DIRECT (AND CONSTANT->INCONSTANT) CONTACT WITH THE JEHOVAH WORSHIP +2609-156975-0034-2401: THE CRUEL FATE OF (HIS->THIS) PEOPLE AND THE PAINFUL EXPERIENCE IN EGYPT THAT HAD DRIVEN HIM INTO THE WILDERNESS PREPARED HIS MIND TO RECEIVE THIS TRAINING +2609-156975-0035-2402: HIS (QUEST->PRICE) WAS FOR (A JUST->JETS) AND STRONG (GOD->ARM) ABLE TO DELIVER THE OPPRESSED +2609-156975-0036-2403: THE (WILDERNESS->WEDDING IT) WITH ITS LURKING FOES AND THE EVER PRESENT DREAD OF HUNGER AND THIRST (DEEPENED HIS->DEEP INTO) SENSE OF NEED AND OF DEPENDENCE UPON A POWER ABLE TO GUIDE THE (DESTINIES->DEBTS NEEDS) OF MEN +2609-156975-0037-2404: THE PEASANTS OF THE (VAST ANTOLIAN->VATS INTONING) PLAIN (IN->OF) CENTRAL (ASIA->AS A) MINOR (STILL->SO) CALL EVERY LIFE (GIVING->GIVEN) SPRING GOD HATH GIVEN +2609-156975-0038-2405: (THE CONSTANT->THEY CAN'T SENT THE) NECESSITY (OF->A) MEETING THE DANGERS OF THE WILDERNESS AND (OF->THE) DEFENDING THE FLOCKS (ENTRUSTED TO MOSES->AND TRITES OF JEMOSIS) CARE DEVELOPED HIS COURAGE AND POWER OF (LEADERSHIP->LEISURESHIP) AND ACTION +2609-157645-0000-2352: EVIDENTLY THE INTENTION (WAS TO MAKE->WHICH MADE) THINGS (PLEASANT->PRESENT) FOR THE ROYAL (FOE OF->FOLK A) TOBACCO DURING HIS VISIT +2609-157645-0001-2353: THE (PROHIBITION IN->PROBES AND) THE (REGULATION->REGULATING) QUOTED (OF->HER) SMOKING (IN->AND) SAINT MARY'S CHURCH REFERRED (IT->*) MAY BE NOTED TO THE ACT WHICH WAS HELD THEREIN +2609-157645-0002-2354: SOMETIMES TOBACCO (WAS->IS) USED IN CHURCH FOR (DISINFECTING OR DEODORIZING->DISINFECT AN OLD DEAL ARISING) PURPOSES +2609-157645-0003-2355: (BLACKBURN ARCHBISHOP->BLACKBIRD ARCHBISH) OF YORK WAS A GREAT SMOKER +2609-157645-0004-2356: ON ONE OCCASION HE WAS AT SAINT MARY'S CHURCH (NOTTINGHAM->NINE IN HAM) FOR A (CONFIRMATION->CONFIRMATESON) +2609-157645-0005-2357: ANOTHER EIGHTEENTH CENTURY CLERICAL WORTHY THE FAMOUS (DOCTOR PARR->DOCTRIPOIRE) AN INVETERATE SMOKER WAS ACCUSTOMED TO DO (WHAT MISTER DISNEY->AT MIDSER DIDN'T) PREVENTED (ARCHBISHOP->ARCHBISH OF) BLACKBURN FROM DOING HE SMOKED IN HIS (VESTRY->VETERY) AT HATTON +2609-157645-0006-2358: (PARR WAS->PAR WITH) SUCH A (CONTINUAL SMOKER->CONTINUOUS MOCHER) THAT (ANYONE->ANY ONE) WHO CAME INTO HIS COMPANY (IF->*) HE HAD NEVER SMOKED BEFORE (HAD->AND) TO (LEARN->LEARNED) THE USE OF A PIPE AS A MEANS OF SELF DEFENCE +2609-157645-0007-2359: ONE SUNDAY SAYS MISTER (DITCHFIELD->DITZFIELD) HE (HAD AN EXTRA->ENDING THAT SIR) PIPE AND (JOSHUA->JOHNSHAW) THE CLERK TOLD HIM THAT THE PEOPLE WERE GETTING IMPATIENT +2609-157645-0008-2360: (LET->THEM TO) THEM SING (ANOTHER PSALM SAID->AND NOW THE PSALMS SAKE) THE CURATE +2609-157645-0009-2361: THEY HAVE (SIR REPLIED THE->SERVED PARTLY) CLERK +2609-157645-0010-2362: THEN LET THEM SING THE HUNDRED AND NINETEENTH REPLIED THE CURATE +2609-157645-0011-2363: SIX ARMS THE (NEAREST->NOTCH) WITHIN REACH PRESENTED WITH AN OBEDIENT START (*->AND) AS MANY TOBACCO (POUCHES->PIUCHES) TO THE (MAN->MEN) OF OFFICE +2609-157645-0012-2364: DAVID (DEANS HOWEVER->DEAN SAMBERT) DID NOT AT ALL (APPROVE->IMPROVE) THIS IRREVERENCE +2609-157645-0013-2365: (GOING TO->GO INTO) CHURCH (AT HAYES IN THOSE->THAT HATE AND THUS) DAYS (MUST->MISS) HAVE BEEN (QUITE AN EXCITING EXPERIENCE->ACQUAINTED AND THE SIGNING SPIRITS) +2609-157645-0014-2366: WHEN THESE MEN IN THE COURSE OF MY REMONSTRANCE FOUND (*->OUT) THAT (I->*) WAS NOT GOING TO CONTINUE THE (CUSTOM->COTTOM) THEY NO LONGER CARED TO BE COMMUNICANTS +2609-169640-0000-2406: (PROAS->PERHAPS) IN THAT QUARTER (WERE USUALLY DISTRUSTED->WHERE USUAL DISTRUDGED) BY (SHIPS IT->THE STEPS AT) IS TRUE BUT THE (SEA IS FULL OF->SEAS FOR) THEM (AND->*) FAR MORE (ARE->OUR) INNOCENT THAN ARE GUILTY OF ANY ACTS OF VIOLENCE +2609-169640-0001-2407: (AN HOUR AFTER->NOW I OUTDREW) THE SUN HAD SET THE WIND FELL (TO A->TURNED) LIGHT AIR (THAT JUST->DAT GITS) KEPT STEERAGE WAY ON THE SHIP +2609-169640-0002-2408: FORTUNATELY THE JOHN WAS NOT ONLY (FAST->FAT) BUT (SHE->SEA) MINDED HER (HELM->HAIL) AS (A LIGHT FOOTED->THE LIGHTFOOTED) GIRL (TURNS->TURNED) IN (A->THE) LIVELY DANCE +2609-169640-0003-2409: I NEVER WAS IN A BETTER (STEERING->STIRRING) SHIP (MOST ESPECIALLY IN MODERATE->POSEY SPENT FREE AND MINDER IT) WEATHER +2609-169640-0004-2410: MISTER MARBLE HE (I DO->OUGHT TO) BELIEVE WAS FAIRLY SNOOZING ON THE (HEN COOPS->INCOUPS) BEING LIKE THE (SAILS->SAILORS) AS ONE MIGHT SAY (BARELY ASLEEP->VARIOUS LEAP) +2609-169640-0005-2411: AT THAT MOMENT I (HEARD->KNOW) A NOISE (ONE->WHEN) FAMILIAR TO (SEAMEN->SEE MEN) THAT OF AN OAR (FALLING->FOLLOWING) IN A BOAT +2609-169640-0006-2412: (I SANG OUT SAIL->AS IN YET SO) HO AND CLOSE (ABOARD->ABROAD) +2609-169640-0007-2413: HE WAS (TOO MUCH->SHIMMERTS) OF (A->THE) SEAMAN TO REQUIRE (A->*) SECOND LOOK IN ORDER TO ASCERTAIN (WHAT->BUT) WAS TO BE DONE +2609-169640-0008-2414: ALTHOUGH THEY WENT THREE FEET TO OUR TWO THIS GAVE US A MOMENT OF BREATHING TIME +2609-169640-0009-2415: AS OUR (SHEETS->SEATS) WERE ALL FLYING FORWARD AND REMAINED SO FOR A FEW MINUTES IT GAVE ME (*->A) LEISURE TO (LOOK->WORK) ABOUT +2609-169640-0010-2416: I SOON SAW BOTH (PROAS->PROTS) AND (GLAD ENOUGH->GRINDING UP) WAS I TO PERCEIVE THAT THEY HAD NOT APPROACHED MATERIALLY (NEARER->IN NEW YORK) +2609-169640-0011-2417: (MISTER KITE OBSERVED->BISHO DESERVED) THIS ALSO (AND REMARKED->IN REMARK) THAT OUR MOVEMENTS HAD BEEN SO PROMPT AS TO TAKE THE (RASCALS->RASCUE WAS) ABACK +2609-169640-0012-2418: A (BREATHLESS STILLNESS->BREATH WHICH STILL IN IT) SUCCEEDED +2609-169640-0013-2419: THE (PROAS->PROITS) DID NOT ALTER (THEIR->THE) COURSE BUT NEARED (US->AT ITS) FAST +2609-169640-0014-2420: I HEARD THE (RATTLING->RIDERING) OF THE BOARDING (PIKES->PIPES) TOO AS THEY WERE CUT ADRIFT FROM THE SPANKER BOOM AND FELL UPON THE DECKS +2609-169640-0015-2421: (KITE->KIND) WENT (AFT->APT) AND RETURNED WITH THREE OR FOUR (MUSKETS->MASKETS) AND AS MANY PIKES +2609-169640-0016-2422: THE STILLNESS THAT (REIGNED->RAINED) ON BOTH SIDES WAS LIKE THAT OF DEATH +2609-169640-0017-2423: (THE->*) JOHN BEHAVED BEAUTIFULLY (AND->HE) CAME ROUND LIKE A TOP +2609-169640-0018-2424: THE QUESTION WAS NOW WHETHER WE COULD PASS (THEM->AND) OR NOT BEFORE THEY GOT NEAR ENOUGH TO (GRAPPLE->GRANTEL) +2609-169640-0019-2425: THE CAPTAIN BEHAVED (PERFECTLY->PERFECTUALLY) WELL IN (THIS->ITS) CRITICAL INSTANT COMMANDING A DEAD (SILENCE AND->SCIENCE IN) THE (CLOSEST ATTENTION->CITIZEN) TO HIS ORDERS +2609-169640-0020-2426: (NOT A SOUL->NOW SO) ON BOARD THE JOHN WAS HURT +2609-169640-0021-2427: (ON OUR SIDE->WHEN OURSAN) WE GAVE THE (GENTLEMEN->GENTLEMAN) THE FOUR (SIXES TWO AT->SIX TO OUT) THE (NEAREST->NEWS) AND TWO AT THE (STERN MOST PROA->STERNMOST PRO) WHICH WAS STILL NEAR A CABLE'S LENGTH (DISTANT->OF ITS END) +2609-169640-0022-2428: THEY WERE (LIKE THE YELLS->NIGHTLY YEARS) OF FIENDS IN (ANGUISH->ANGLES) +2609-169640-0023-2429: (I DOUBT->AND OUT) IF WE TOUCHED (A MAN->THE MAIN) IN THE (NEAREST PROA->NURTURE) +2609-169640-0024-2430: IN (THIS->THAT) STATE THE SHIP PASSED AHEAD (ALL HER->ON FOR A) CANVAS (BEING FULL->BEEN FOR) LEAVING THE (PROA MOTIONLESS->PROCEED) IN HER WAKE +3005-163389-0000-1108: THEY SWARMED UP IN FRONT OF SHERBURN'S PALINGS AS THICK AS THEY COULD JAM TOGETHER AND YOU COULDN'T HEAR YOURSELF THINK FOR THE NOISE +3005-163389-0001-1109: SOME SUNG OUT TEAR DOWN THE FENCE TEAR DOWN THE FENCE +3005-163389-0002-1110: THE STILLNESS WAS AWFUL CREEPY AND UNCOMFORTABLE +3005-163389-0003-1111: SHERBURN RUN HIS EYE SLOW ALONG THE CROWD AND WHEREVER IT STRUCK THE PEOPLE TRIED A LITTLE TO (OUT GAZE->OUTGAZE) HIM BUT THEY COULDN'T THEY DROPPED THEIR EYES AND LOOKED SNEAKY +3005-163389-0004-1112: THE AVERAGE MAN'S A COWARD +3005-163389-0005-1113: BECAUSE THEY'RE AFRAID THE MAN'S FRIENDS WILL SHOOT THEM IN THE BACK IN THE (DARKAND->DARK AND) IT'S JUST WHAT THEY WOULD DO +3005-163389-0006-1114: SO THEY ALWAYS ACQUIT AND THEN A MAN GOES IN THE NIGHT WITH A HUNDRED (MASKED->MASSED) COWARDS AT HIS BACK AND LYNCHES THE RASCAL +3005-163389-0007-1115: YOU DIDN'T WANT TO COME +3005-163389-0008-1116: BUT A MOB WITHOUT ANY MAN AT THE HEAD OF IT IS BENEATH PITIFULNESS +3005-163389-0009-1117: NOW (LEAVE->LE) AND TAKE YOUR HALF A MAN WITH YOU TOSSING HIS GUN UP ACROSS HIS LEFT ARM AND COCKING IT WHEN HE SAYS THIS +3005-163389-0010-1118: THE CROWD WASHED BACK SUDDEN AND THEN BROKE ALL APART AND WENT TEARING OFF EVERY WHICH WAY AND BUCK (HARKNESS->HARKINS) HE (HEELED->HEALED) IT AFTER THEM LOOKING TOLERABLE CHEAP +3005-163389-0011-1119: (YOU->HE) CAN'T BE TOO CAREFUL +3005-163389-0012-1120: THEY ARGUED AND TRIED TO KEEP HIM OUT BUT HE WOULDN'T LISTEN AND (THE->A) WHOLE SHOW COME TO A (STANDSTILL->FAN STILL) +3005-163389-0013-1121: AND ONE OR TWO WOMEN (BEGUN->BEGAN) TO SCREAM +3005-163389-0014-1122: SO THEN (THE RINGMASTER->A RING MASTER) HE MADE A LITTLE SPEECH AND SAID HE HOPED THERE WOULDN'T BE NO DISTURBANCE AND IF THE MAN WOULD PROMISE HE WOULDN'T MAKE NO MORE TROUBLE HE WOULD LET HIM RIDE IF HE THOUGHT HE COULD STAY ON THE HORSE +3005-163389-0015-1123: IT WARN'T FUNNY TO ME THOUGH I WAS ALL OF A TREMBLE TO SEE HIS DANGER +3005-163389-0016-1124: AND (THE->A) HORSE A GOING LIKE A HOUSE (AFIRE->AFAR) TOO +3005-163389-0017-1125: HE (SHED->SHARED) THEM SO THICK THEY KIND OF CLOGGED UP THE AIR AND ALTOGETHER HE SHED SEVENTEEN SUITS +3005-163389-0018-1126: WHY IT WAS ONE OF HIS OWN MEN +3005-163390-0000-1185: (ANDBUT->AND BUT) NEVER MIND THE REST OF HIS OUTFIT IT WAS JUST WILD BUT IT WAS AWFUL FUNNY +3005-163390-0001-1186: THE PEOPLE MOST KILLED THEMSELVES LAUGHING AND WHEN THE KING GOT DONE CAPERING AND CAPERED OFF BEHIND THE SCENES THEY ROARED AND CLAPPED AND STORMED AND (HAW HAWED->HAWHAT) TILL HE COME BACK AND DONE IT OVER AGAIN AND AFTER THAT THEY MADE HIM DO IT ANOTHER TIME +3005-163390-0002-1187: TWENTY PEOPLE (SINGS->SANGS) OUT +3005-163390-0003-1188: THE DUKE SAYS YES +3005-163390-0004-1189: EVERYBODY SINGS OUT SOLD +3005-163390-0005-1190: BUT A BIG FINE LOOKING MAN JUMPS UP ON A BENCH AND SHOUTS HOLD ON +3005-163390-0006-1191: JUST A WORD GENTLEMEN THEY STOPPED TO LISTEN +3005-163390-0007-1192: WHAT WE WANT IS TO GO OUT OF HERE QUIET AND TALK THIS SHOW UP AND SELL THE REST (OF->O) THE TOWN +3005-163390-0008-1193: YOU (BET->BADE) IT IS THE (JEDGE->JUDGE) IS RIGHT EVERYBODY SINGS OUT +3005-163390-0009-1194: WE STRUCK THE RAFT AT THE SAME TIME AND IN LESS THAN TWO SECONDS WE WAS GLIDING DOWN STREAM ALL DARK AND STILL AND EDGING TOWARDS THE MIDDLE OF THE RIVER NOBODY SAYING A WORD +3005-163390-0010-1195: WE NEVER SHOWED A LIGHT TILL WE WAS ABOUT TEN MILE BELOW THE VILLAGE +3005-163390-0011-1196: GREENHORNS (FLATHEADS->FLAT HEADS) +3005-163390-0012-1197: NO I (SAYS->SAY IS) IT DON'T +3005-163390-0013-1198: WELL IT DON'T BECAUSE IT'S IN (THE->DE) BREED I RECKON THEY'RE ALL ALIKE +3005-163390-0014-1199: WELL THAT'S WHAT (I'M A->I MUST) SAYING ALL KINGS IS MOSTLY (RAPSCALLIONS->RATSKAGGS) AS FUR AS I CAN MAKE OUT IS DAT SO +3005-163390-0015-1200: AND LOOK AT CHARLES SECOND AND LOUIS FOURTEEN AND LOUIS FIFTEEN AND JAMES SECOND AND EDWARD SECOND AND RICHARD (THIRD->*) AND FORTY MORE BESIDES ALL THEM SAXON (HEPTARCHIES->HEPTARK IS) THAT USED TO RIP AROUND SO IN OLD TIMES AND (RAISE CAIN->RAISED GAME) +3005-163390-0016-1201: MY YOU OUGHT TO (SEEN->SEE AN) OLD HENRY THE EIGHT WHEN HE WAS IN BLOOM HE WAS A BLOSSOM +3005-163390-0017-1202: RING UP FAIR (ROSAMUN->ROSAMOND) +3005-163390-0018-1203: WELL HENRY HE TAKES A NOTION HE WANTS TO (GET->GIT) UP SOME TROUBLE WITH THIS COUNTRY +3005-163390-0019-1204: S'POSE HE OPENED HIS (MOUTHWHAT->MOUTH WHAT) THEN +3005-163390-0020-1205: ALL I SAY IS KINGS IS KINGS (AND YOU->AN YE) GOT TO MAKE ALLOWANCES +3005-163390-0021-1206: TAKE THEM ALL AROUND THEY'RE A MIGHTY ORNERY LOT IT'S THE WAY THEY'RE RAISED +3005-163390-0022-1207: WELL THEY ALL DO JIM +3005-163390-0023-1208: NOW (DE DUKE->TO DO) HE'S A (TOLERBLE LIKELY->TOLERABLE LIKE THE) MAN IN SOME WAYS +3005-163390-0024-1209: THIS ONE'S A (MIDDLING->MIDDLIN) HARD LOT FOR A (DUKE->DUPE) +3005-163390-0025-1210: WHEN I WAKED UP (JUST->JEST) AT DAYBREAK HE WAS SITTING THERE WITH HIS HEAD DOWN BETWIXT HIS KNEES MOANING AND MOURNING TO HIMSELF +3005-163390-0026-1211: IT DON'T SEEM NATURAL BUT I RECKON IT'S SO +3005-163390-0027-1212: HE WAS OFTEN MOANING (AND->IN) MOURNING THAT WAY NIGHTS WHEN HE JUDGED I WAS ASLEEP AND SAYING PO LITTLE (LIZABETH->LISBETH) +3005-163390-0028-1213: (DOAN->DON'T) YOU HEAR ME (SHET->SHUT) DE DO +3005-163390-0029-1214: I LAY I MAKE YOU MINE +3005-163390-0030-1215: (JIS AS->*) LOUD AS I COULD YELL +3005-163391-0000-1127: WHICH WAS SOUND ENOUGH JUDGMENT BUT YOU TAKE THE AVERAGE MAN AND HE WOULDN'T WAIT FOR HIM TO HOWL +3005-163391-0001-1128: THE KING'S (DUDS->DERDS) WAS ALL BLACK AND HE DID LOOK REAL SWELL (AND->AN) STARCHY +3005-163391-0002-1129: WHY BEFORE HE LOOKED LIKE THE ORNERIEST OLD RIP THAT EVER WAS BUT NOW WHEN HE'D TAKE OFF HIS NEW WHITE BEAVER AND MAKE A BOW AND DO A SMILE HE LOOKED THAT GRAND AND GOOD AND PIOUS THAT YOU'D SAY (HE HAD->HE'D) WALKED RIGHT OUT OF THE ARK AND MAYBE WAS OLD LEVITICUS HIMSELF +3005-163391-0003-1130: JIM CLEANED UP THE CANOE AND I GOT MY PADDLE READY +3005-163391-0004-1131: (WHER->WERE) YOU BOUND FOR YOUNG MAN +3005-163391-0005-1132: (GIT->GET) ABOARD SAYS THE KING +3005-163391-0006-1133: I DONE SO (AND->AN) THEN WE ALL THREE STARTED ON AGAIN +3005-163391-0007-1134: THE YOUNG CHAP WAS MIGHTY THANKFUL SAID IT WAS TOUGH WORK TOTING HIS BAGGAGE SUCH WEATHER +3005-163391-0008-1135: (HE ASKED->THE AIR) THE KING WHERE HE WAS GOING AND THE KING TOLD HIM HE'D COME DOWN (THE->A) RIVER AND LANDED AT THE OTHER VILLAGE THIS MORNING AND NOW HE WAS GOING UP A FEW MILE TO SEE AN OLD FRIEND ON A FARM UP THERE THE YOUNG FELLOW SAYS +3005-163391-0009-1136: BUT THEN I SAYS AGAIN NO I RECKON IT AIN'T HIM OR ELSE HE WOULDN'T BE (PADDLING->PADDLIN) UP THE RIVER YOU AIN'T HIM ARE YOU +3005-163391-0010-1137: NO MY NAME'S (BLODGETT ELEXANDER BLODGETT->BLODGE IT ALEXANDER BLOT) REVEREND (ELEXANDER BLODGETT->ALEXANDER BLODGET) I (S'POSE->SUPPOSE) I MUST SAY AS I'M ONE (O->OF) THE (LORD'S->LARGE) POOR SERVANTS +3005-163391-0011-1138: YOU SEE HE WAS PRETTY OLD AND (GEORGE'S G'YIRLS->GEORGE IS GUY EARLS) WAS TOO YOUNG TO BE MUCH COMPANY FOR HIM EXCEPT MARY JANE THE RED HEADED ONE AND SO HE WAS KINDER LONESOME AFTER GEORGE AND HIS WIFE DIED AND DIDN'T SEEM TO CARE MUCH TO LIVE +3005-163391-0012-1139: TOO BAD TOO BAD HE COULDN'T (A->HAVE) LIVED TO SEE HIS (BROTHERS->BROTHER'S) POOR SOUL +3005-163391-0013-1140: I'M (GOING->GOIN) IN A SHIP NEXT WEDNESDAY FOR (RYO JANEERO->RIO GENERO) WHERE MY UNCLE (LIVES->IS) +3005-163391-0014-1141: BUT IT'LL BE LOVELY (WISHT->WISHED) I WAS A (GOING->GOIN) +3005-163391-0015-1142: MARY JANE'S NINETEEN SUSAN'S FIFTEEN AND JOANNA'S ABOUT (FOURTEENTHAT'S->FOURTEEN THAT'S) THE ONE THAT GIVES HERSELF TO GOOD WORKS AND HAS A (HARE->HAIR) LIP POOR THINGS +3005-163391-0016-1143: WELL THEY COULD BE WORSE OFF +3005-163391-0017-1144: (OLD->O) PETER HAD FRIENDS AND THEY AIN'T GOING TO LET THEM COME TO NO HARM +3005-163391-0018-1145: BLAMED IF HE DIDN'T (INQUIRE->ACQUIRE) ABOUT EVERYBODY AND EVERYTHING (IN->AND) THAT BLESSED TOWN AND ALL ABOUT THE (WILKSES->WILKES) AND ABOUT PETER'S (BUSINESSWHICH->BUSINESS WHICH) WAS A TANNER AND ABOUT (GEORGE'SWHICH->GEORGE'S WHICH) WAS A CARPENTER AND ABOUT (HARVEY'SWHICH->HARVEST WHICH) WAS A DISSENTERING MINISTER AND SO ON AND SO ON THEN HE SAYS +3005-163391-0019-1146: WHEN (THEY'RE->HER) DEEP THEY WON'T STOP FOR A HAIL +3005-163391-0020-1147: WAS PETER (WILKS->WILKES) WELL OFF +3005-163391-0021-1148: WHEN (WE STRUCK->WASTED UP) THE BOAT SHE WAS ABOUT DONE LOADING AND PRETTY SOON SHE GOT OFF +3005-163391-0022-1149: NOW HUSTLE BACK RIGHT OFF AND FETCH THE DUKE UP HERE AND THE NEW CARPET BAGS +3005-163391-0023-1150: SO THEN THEY WAITED FOR A STEAMBOAT +3005-163391-0024-1151: (BUT->THAT) THE KING WAS (CA'M->CALM) HE SAYS +3005-163391-0025-1152: THEY (GIVE->GAVE) A GLANCE AT ONE ANOTHER AND NODDED THEIR HEADS AS MUCH AS TO SAY (WHAT D I->WOULD THEY) TELL YOU +3005-163391-0026-1153: THEN ONE OF THEM SAYS KIND OF SOFT AND GENTLE +3005-163399-0000-1154: PHELPS (WAS->IS) ONE OF THESE LITTLE ONE HORSE COTTON PLANTATIONS AND THEY ALL LOOK ALIKE +3005-163399-0001-1155: I WENT AROUND AND (CLUMB->CLIMB) OVER THE BACK STILE BY THE ASH HOPPER AND STARTED FOR THE KITCHEN +3005-163399-0002-1156: (I->AH) OUT WITH A (YES'M BEFORE->YES AND FORE) I THOUGHT +3005-163399-0003-1157: SO THEN SHE STARTED FOR THE HOUSE LEADING ME BY THE HAND AND THE CHILDREN TAGGING AFTER +3005-163399-0004-1158: WHEN WE GOT THERE SHE SET ME DOWN IN A SPLIT (BOTTOMED->BOTTOM) CHAIR AND SET HERSELF DOWN ON A LITTLE LOW STOOL IN FRONT OF ME HOLDING BOTH OF MY HANDS AND SAYS +3005-163399-0005-1159: WELL IT'S LUCKY BECAUSE SOMETIMES PEOPLE DO GET HURT +3005-163399-0006-1160: AND I THINK HE DIED AFTERWARDS HE WAS A BAPTIST +3005-163399-0007-1161: YES IT WAS (MORTIFICATIONTHAT->MORTIFICATION THAT) WAS IT +3005-163399-0008-1162: YOUR UNCLE'S BEEN UP TO THE TOWN EVERY DAY TO FETCH YOU +3005-163399-0009-1163: YOU MUST (A MET->AMERGE) HIM ON THE ROAD DIDN'T YOU OLDISH MAN WITH A +3005-163399-0010-1164: WHY CHILD (IT LL->IT'LL) BE STOLE +3005-163399-0011-1165: IT WAS (KINDER->KIND OR) THIN (ICE->EYES) BUT I SAYS +3005-163399-0012-1166: I HAD MY MIND ON THE CHILDREN ALL THE TIME I WANTED TO GET THEM OUT TO ONE SIDE AND (PUMP->PUMPED) THEM A LITTLE AND FIND OUT WHO I WAS +3005-163399-0013-1167: PRETTY SOON SHE MADE THE COLD (CHILLS->CHILL) STREAK ALL DOWN MY BACK BECAUSE SHE SAYS +3005-163399-0014-1168: I SEE IT WARN'T A BIT OF USE TO TRY TO GO AHEAD I'D GOT TO THROW UP MY HAND +3005-163399-0015-1169: SO I SAYS TO MYSELF (HERE'S->HERE IS) ANOTHER PLACE WHERE I GOT TO (RESK->REST) THE TRUTH +3005-163399-0016-1170: I OPENED MY MOUTH TO BEGIN BUT SHE GRABBED ME AND HUSTLED ME IN BEHIND THE BED AND SAYS HERE HE COMES +3005-163399-0017-1171: CHILDREN DON'T YOU SAY A WORD +3005-163399-0018-1172: I SEE I WAS IN A FIX NOW +3005-163399-0019-1173: MISSUS PHELPS SHE (JUMPS->JUMPED) FOR HIM AND SAYS +3005-163399-0020-1174: HAS HE COME NO SAYS HER HUSBAND +3005-163399-0021-1175: I CAN'T IMAGINE SAYS THE OLD GENTLEMAN AND I MUST SAY IT MAKES ME DREADFUL UNEASY +3005-163399-0022-1176: UNEASY SHE SAYS I'M READY TO GO DISTRACTED +3005-163399-0023-1177: HE MUST (A->HAVE) COME AND YOU'VE MISSED HIM ALONG THE ROAD +3005-163399-0024-1178: OH DON'T DISTRESS ME ANY (MORE'N I'M->MORE NUM) ALREADY DISTRESSED +3005-163399-0025-1179: WHY SILAS LOOK YONDER UP THE ROAD (AIN'T->HAIN'T) THAT SOMEBODY (COMING->COMIN) +3005-163399-0026-1180: THE OLD GENTLEMAN STARED AND SAYS +3005-163399-0027-1181: I HAIN'T NO IDEA WHO IS IT +3005-163399-0028-1182: (IT'S->IS) TOM SAWYER +3005-163399-0029-1183: BEING TOM SAWYER WAS EASY AND COMFORTABLE AND (IT STAYED->ITS STATE) EASY AND COMFORTABLE TILL BY AND BY I HEAR A STEAMBOAT COUGHING ALONG DOWN THE RIVER +3005-163399-0030-1184: THEN I SAYS TO MYSELF S'POSE TOM SAWYER COMES DOWN ON (THAT->MY) BOAT +3080-5032-0000-312: BUT I AM HUGELY PLEASED THAT YOU HAVE SEEN MY LADY +3080-5032-0001-313: I KNEW YOU COULD NOT CHOOSE BUT LIKE HER BUT YET LET ME TELL YOU YOU HAVE SEEN BUT THE WORST OF HER +3080-5032-0002-314: HER CONVERSATION HAS MORE CHARMS THAN CAN BE IN MERE BEAUTY AND (HER->A) HUMOUR AND DISPOSITION WOULD MAKE A DEFORMED PERSON APPEAR LOVELY +3080-5032-0003-315: WHY DID YOU NOT SEND ME THAT NEWS AND A GARLAND +3080-5032-0004-316: (WELL->WHY) THE BEST (ON'T->ON IT) IS (*->THAT) I HAVE A SQUIRE NOW THAT IS AS GOOD AS A KNIGHT +3080-5032-0005-317: IN EARNEST WE HAVE HAD SUCH A SKIRMISH (AND UPON SO->IN A POST OF) FOOLISH AN OCCASION AS I CANNOT TELL WHICH IS (STRANGEST->STRANGERS) +3080-5032-0006-318: ALL THE PEOPLE THAT I HAD EVER IN MY LIFE REFUSED WERE BROUGHT AGAIN UPON THE STAGE LIKE RICHARD THE (THREE S->THIRD) GHOSTS TO REPROACH ME WITHAL (AND->IN) ALL THE KINDNESS HIS DISCOVERIES COULD MAKE I HAD FOR YOU WAS (LAID->LATE) TO MY CHARGE +3080-5032-0007-319: MY BEST QUALITIES IF I HAVE ANY THAT ARE GOOD SERVED BUT FOR AGGRAVATIONS OF MY FAULT AND I WAS ALLOWED TO HAVE WIT AND UNDERSTANDING AND DISCRETION IN OTHER THINGS THAT IT MIGHT APPEAR I HAD NONE IN THIS +3080-5032-0008-320: TIS A STRANGE CHANGE AND I AM VERY SORRY FOR IT BUT I'LL SWEAR I KNOW NOT HOW TO HELP IT +3080-5032-0009-321: MISTER FISH IS (THE->A) SQUIRE OF DAMES AND HAS SO MANY MISTRESSES (THAT->THAN) ANYBODY MAY PRETEND (A->TO) SHARE IN HIM AND BE BELIEVED (BUT->*) THOUGH I HAVE THE HONOUR TO BE HIS NEAR NEIGHBOUR TO SPEAK FREELY I CANNOT BRAG MUCH THAT HE MAKES ANY COURT TO ME AND I KNOW NO YOUNG WOMAN IN THE COUNTRY THAT HE DOES NOT VISIT OFTEN +3080-5032-0010-322: I THINK MY YOUNGEST BROTHER COMES DOWN WITH HIM +3080-5032-0011-323: I CAN NO SOONER GIVE YOU SOME LITTLE HINTS (WHEREABOUTS->WHEREABOUT) THEY LIVE BUT YOU KNOW THEM PRESENTLY AND I MEANT YOU SHOULD BE BEHOLDING TO ME FOR YOUR ACQUAINTANCE +3080-5032-0012-324: BUT IT SEEMS THIS GENTLEMAN IS NOT SO EASY ACCESS BUT YOU MAY ACKNOWLEDGE SOMETHING DUE TO ME IF I INCLINE HIM TO LOOK GRACIOUSLY UPON YOU AND THEREFORE THERE IS NOT MUCH HARM DONE +3080-5032-0013-325: I HAVE MISSED FOUR FITS AND (*->HAVE) HAD BUT FIVE AND HAVE RECOVERED SO MUCH STRENGTH AS MADE ME VENTURE TO MEET YOUR LETTER ON WEDNESDAY A MILE FROM HOME +3080-5032-0014-326: BUT BESIDES I CAN GIVE YOU OTHERS +3080-5032-0015-327: I AM HERE MUCH MORE OUT OF PEOPLE'S WAY THAN IN TOWN WHERE MY (AUNT AND->AUNTS IN) SUCH (AS->HAS) PRETEND AN INTEREST IN ME (AND->IN) A POWER OVER ME DO SO PERSECUTE ME (WITH THEIR->MY DEAR) GOOD NATURE (AND->YOU) TAKE IT SO ILL THAT THEY ARE NOT ACCEPTED AS I WOULD LIVE IN A HOLLOW TREE TO AVOID THEM +3080-5032-0016-328: YOU WILL THINK HIM ALTERED AND IF IT BE POSSIBLE MORE MELANCHOLY THAN HE WAS +3080-5032-0017-329: IF MARRIAGE AGREES NO BETTER WITH OTHER PEOPLE THAN IT DOES WITH HIM I SHALL PRAY THAT ALL MY FRIENDS MAY (SCAPE->ESCAPE) IT +3080-5032-0018-330: WELL IN EARNEST IF I WERE A PRINCE THAT LADY SHOULD BE MY MISTRESS BUT I CAN GIVE NO RULE TO ANY ONE ELSE AND PERHAPS THOSE THAT ARE IN NO DANGER OF LOSING THEIR HEARTS TO HER MAY BE INFINITELY TAKEN WITH ONE I SHOULD NOT VALUE (AT->IT) ALL FOR SO SAYS THE JUSTINIAN WISE PROVIDENCE HAS ORDAINED IT THAT BY THEIR DIFFERENT (HUMOURS->HUMANS) EVERYBODY MIGHT FIND SOMETHING TO PLEASE THEMSELVES WITHAL WITHOUT ENVYING THEIR NEIGHBOURS +3080-5032-0019-331: THE MATTER IS NOT GREAT FOR I CONFESS I DO NATURALLY HATE THE NOISE AND TALK OF THE WORLD AND SHOULD BE BEST PLEASED NEVER TO BE KNOWN (IN'T->IN) UPON ANY OCCASION WHATSOEVER YET SINCE IT CAN NEVER BE WHOLLY AVOIDED ONE MUST SATISFY ONESELF BY DOING NOTHING THAT ONE NEED CARE WHO KNOWS +3080-5032-0020-332: IF I HAD A PICTURE THAT WERE FIT FOR YOU YOU SHOULD HAVE IT +3080-5032-0021-333: HOW CAN YOU TALK OF DEFYING FORTUNE NOBODY LIVES WITHOUT IT AND THEREFORE WHY SHOULD YOU IMAGINE YOU COULD +3080-5032-0022-334: I KNOW NOT HOW MY BROTHER COMES TO BE SO WELL INFORMED AS YOU SAY BUT I AM CERTAIN HE KNOWS THE UTMOST OF THE INJURIES YOU HAVE RECEIVED FROM HER +3080-5032-0023-335: WE HAVE HAD ANOTHER DEBATE BUT MUCH MORE CALMLY +3080-5032-0024-336: AND BESIDES THERE WAS A TIME WHEN WE OURSELVES WERE INDIFFERENT TO ONE ANOTHER DID I DO SO THEN OR HAVE I LEARNED IT SINCE +3080-5032-0025-337: I HAVE BEEN STUDYING HOW TOM (CHEEKE->CHEEK) MIGHT COME BY HIS INTELLIGENCE AND I (VERILY BELIEVE->VRAIRIB) HE HAS IT FROM MY COUSIN PETERS +3080-5032-0026-338: HOW KINDLY DO I TAKE (THESE->THE) CIVILITIES OF YOUR (FATHER'S->FATHERS) IN EARNEST YOU CANNOT IMAGINE HOW HIS LETTER PLEASED ME +3080-5040-0000-278: WOULD IT WOULD LEAVE ME AND THEN I COULD BELIEVE I SHALL NOT ALWAYS HAVE OCCASION FOR IT +3080-5040-0001-279: MY POOR LADY (VAVASOUR->VAVERASSEUR) IS (CARRIED TO THE->CHARACTERED A) TOWER (AND->IN) HER GREAT BELLY COULD NOT EXCUSE HER BECAUSE SHE WAS ACQUAINTED BY SOMEBODY THAT THERE WAS A PLOT AGAINST THE PROTECTOR (AND->ANNE) DID NOT DISCOVER IT +3080-5040-0002-280: SHE HAS TOLD NOW ALL THAT WAS TOLD HER BUT VOWS SHE WILL NEVER SAY FROM WHENCE SHE HAD IT WE SHALL SEE WHETHER HER RESOLUTIONS ARE AS UNALTERABLE AS THOSE OF MY LADY (TALMASH->THOMMISH) +3080-5040-0003-281: I WONDER HOW SHE BEHAVED HERSELF WHEN SHE WAS MARRIED +3080-5040-0004-282: I NEVER SAW ANY ONE YET THAT DID NOT LOOK SIMPLY AND OUT OF COUNTENANCE NOR EVER KNEW A WEDDING WELL DESIGNED BUT ONE AND THAT WAS OF TWO PERSONS WHO (HAD->AT) TIME ENOUGH I CONFESS TO CONTRIVE IT AND NOBODY TO PLEASE (IN'T->IN) BUT THEMSELVES +3080-5040-0005-283: THE TRUTH IS I COULD NOT ENDURE TO BE MISSUS BRIDE IN A PUBLIC WEDDING TO BE MADE THE HAPPIEST PERSON ON EARTH +3080-5040-0006-284: DO NOT TAKE IT ILL FOR I WOULD ENDURE IT IF I COULD RATHER THAN FAIL BUT IN EARNEST I DO NOT THINK IT WERE POSSIBLE FOR ME +3080-5040-0007-285: YET IN EARNEST YOUR FATHER WILL NOT FIND MY BROTHER PEYTON WANTING IN CIVILITY THOUGH HE IS NOT A MAN OF MUCH COMPLIMENT UNLESS IT BE IN HIS (LETTERS->LETTER) TO ME (NOR->NO) AN UNREASONABLE PERSON IN ANYTHING SO HE WILL ALLOW HIM OUT OF HIS KINDNESS TO HIS WIFE TO SET A HIGHER VALUE UPON HER SISTER THAN SHE DESERVES +3080-5040-0008-286: MY AUNT TOLD ME NO LONGER (AGONE THAN->A GONDON) YESTERDAY THAT I WAS THE MOST WILFUL WOMAN THAT EVER SHE KNEW AND HAD AN OBSTINACY OF SPIRIT NOTHING COULD OVERCOME TAKE HEED +3080-5040-0009-287: YOU SEE I GIVE YOU FAIR WARNING +3080-5040-0010-288: BY THE NEXT I SHALL BE GONE INTO KENT AND MY OTHER JOURNEY IS LAID ASIDE WHICH I AM NOT DISPLEASED AT BECAUSE IT WOULD HAVE BROKEN OUR INTERCOURSE VERY MUCH +3080-5040-0011-289: HERE ARE SOME VERSES OF (COWLEY'S->CARLIS) TELL ME HOW YOU LIKE THEM +3080-5040-0012-290: I TOLD YOU IN MY LAST THAT MY (SUFFOLK->SUFFOLD) JOURNEY WAS LAID ASIDE AND THAT INTO KENT HASTENED +3080-5040-0013-291: IF I DROWN BY THE WAY THIS WILL BE MY LAST LETTER AND LIKE A WILL I BEQUEATH ALL MY KINDNESS TO YOU IN IT WITH A CHARGE NEVER TO BESTOW (IT->AT) ALL UPON ANOTHER MISTRESS LEST MY GHOST RISE AGAIN AND HAUNT YOU +3080-5040-0014-292: INDEED I LIKE HIM EXTREMELY AND HE IS COMMENDED TO ME BY PEOPLE THAT KNOW HIM VERY WELL AND ARE ABLE TO JUDGE FOR A MOST EXCELLENT SERVANT AND FAITHFUL AS POSSIBLE +3080-5040-0015-293: BECAUSE YOU FIND FAULT WITH MY OTHER LETTERS THIS IS LIKE TO BE SHORTER THAN THEY I DID NOT INTEND IT SO THOUGH I CAN ASSURE YOU +3080-5040-0016-294: I DO NOT FIND IT THOUGH I AM TOLD I WAS SO EXTREMELY WHEN I BELIEVED YOU LOVED ME +3080-5040-0017-295: BUT I AM CALLED UPON +3080-5040-0018-296: DIRECTED FOR YOUR MASTER +3080-5040-0019-297: I SEE YOU CAN (CHIDE->CHID) WHEN YOU PLEASE AND WITH AUTHORITY BUT I DESERVE IT I CONFESS AND ALL I CAN SAY FOR MYSELF IS THAT MY FAULT PROCEEDED FROM A VERY GOOD PRINCIPLE IN ME +3080-5040-0020-298: WE DARE NOT LET OUR TONGUES LIE MORE (ON ONE->*) SIDE OF OUR (MOUTHS->MOTHS) THAN (T'OTHER->THE OTHER) FOR FEAR OF OVERTURNING IT +3080-5040-0021-299: YOU ARE SATISFIED I HOPE (ERE->IF) THIS THAT I (SCAPED->ESCAPED) DROWNING +3080-5040-0022-300: BUT I AM TROUBLED MUCH YOU SHOULD MAKE SO ILL A JOURNEY TO SO LITTLE PURPOSE INDEED I (WRIT->WRITE) BY THE FIRST POST AFTER MY ARRIVAL HERE AND CANNOT IMAGINE HOW YOU CAME TO MISS OF MY LETTERS +3080-5040-0023-301: (HOW->OH) WELCOME YOU WILL BE BUT ALAS +3080-5040-0024-302: FOR MY LIFE I CANNOT BEAT INTO THEIR HEADS A PASSION THAT MUST BE SUBJECT TO NO DECAY (AN->AND) EVEN PERFECT KINDNESS THAT MUST LAST PERPETUALLY WITHOUT THE LEAST INTERMISSION +3080-5040-0025-303: THEY LAUGH TO HEAR ME SAY THAT ONE UNKIND WORD WOULD DESTROY ALL THE SATISFACTION OF MY LIFE AND THAT I SHOULD EXPECT OUR KINDNESS SHOULD INCREASE EVERY DAY IF IT WERE POSSIBLE BUT NEVER LESSEN +3080-5040-0026-304: WE GO ABROAD ALL DAY AND PLAY ALL NIGHT AND SAY (OUR PRAYERS->I'LL PRAY AS) WHEN WE HAVE TIME +3080-5040-0027-305: (WELL->WHILE) IN SOBER EARNEST NOW I WOULD NOT LIVE THUS A (TWELVEMONTH->TWELVE MONTHS) TO GAIN ALL THAT (THE->*) KING HAS LOST UNLESS IT WERE TO GIVE IT HIM AGAIN +3080-5040-0028-306: WILL YOU BE SO GOOD NATURED +3080-5040-0029-307: HE HAS ONE SON AND TIS THE FINEST BOY THAT (E'ER->ERE) YOU SAW AND HAS A NOBLE SPIRIT BUT YET STANDS IN THAT AWE OF HIS FATHER THAT ONE WORD FROM HIM IS AS MUCH AS TWENTY WHIPPINGS +3080-5040-0030-308: YOU MUST GIVE ME LEAVE TO ENTERTAIN (YOU THUS->YOURSELVES) WITH DISCOURSES OF THE FAMILY FOR I CAN TELL YOU NOTHING ELSE FROM HENCE +3080-5040-0031-309: NOT TO KNOW WHEN YOU (WOULD->HAD) COME HOME I CAN ASSURE YOU (NOR->NO) FOR ANY OTHER OCCASION (OF->ON) MY OWN BUT WITH A COUSIN OF MINE THAT HAD LONG DESIGNED TO MAKE HERSELF SPORT WITH HIM AND DID NOT MISS OF HER AIM +3080-5040-0032-310: IN MY LIFE I NEVER HEARD SO RIDICULOUS A DISCOURSE AS HE MADE US AND NO OLD WOMAN WHO (PASSES->PAUSES) FOR A WITCH COULD HAVE BEEN MORE PUZZLED TO SEEK WHAT TO SAY TO REASONABLE PEOPLE THAN HE WAS +3080-5040-0033-311: EVER SINCE THIS ADVENTURE I HAVE HAD SO GREAT A BELIEF IN ALL THINGS OF THIS NATURE THAT I COULD NOT FORBEAR LAYING A (PEAS COD->PEASE COT) WITH NINE PEAS (IN'T->INTO) UNDER MY DOOR YESTERDAY (AND->IT) WAS INFORMED BY IT THAT MY HUSBAND'S NAME SHOULD BE THOMAS HOW DO YOU LIKE THAT +3331-159605-0000-695: SHE PULLED HER HAIR DOWN TURNED (HER SKIRT->HIS GIRT) BACK PUT HER FEET ON THE FENDER AND TOOK (PUTTEL->PATTERN) INTO HER LAP ALL OF WHICH ARRANGEMENTS SIGNIFIED THAT SOMETHING VERY IMPORTANT HAD GOT TO BE THOUGHT OVER AND SETTLED +3331-159605-0001-696: THE MORE PROPOSALS THE MORE CREDIT +3331-159605-0002-697: (I VE->I'VE) TRIED IT AND LIKED IT AND MAYBE THIS IS THE CONSEQUENCE OF THAT NIGHT'S FUN +3331-159605-0003-698: JUST SUPPOSE IT IS TRUE THAT HE DOES ASK ME AND I SAY YES +3331-159605-0004-699: WHAT A SPITEFUL THING I AM +3331-159605-0005-700: I COULD DO SO MUCH FOR ALL AT HOME HOW I SHOULD ENJOY THAT +3331-159605-0006-701: (LET ME SEE->THAT MISSY) HOW CAN I BEGIN +3331-159605-0007-702: HE HAS KNOWN HER ALL HER LIFE AND HAS A GOOD INFLUENCE OVER HER +3331-159605-0008-703: NOW AS POLLY WAS BY NO MEANS A PERFECT CREATURE I AM FREE TO CONFESS THAT THE OLD TEMPTATION ASSAILED HER MORE THAN ONCE (THAT->THE) WEEK FOR WHEN THE FIRST EXCITEMENT OF THE DODGING REFORM HAD SUBSIDED SHE MISSED THE PLEASANT LITTLE INTERVIEWS THAT USED TO PUT A CERTAIN (FLAVOR->FLAVOUR) OF ROMANCE INTO HER DULL HARD WORKING DAYS +3331-159605-0009-704: I DON'T THINK IT WAS HIS WEALTH (*->THE) ACCOMPLISHMENTS (OR POSITION->OPPOSITION) THAT MOST ATTRACTED POLLY THOUGH THESE DOUBTLESS POSSESSED A GREATER INFLUENCE THAN SHE SUSPECTED +3331-159605-0010-705: IT WAS THAT INDESCRIBABLE SOMETHING WHICH WOMEN ARE QUICK TO SEE AND FEEL IN MEN WHO HAVE BEEN BLESSED (WITH->THE) WISE AND GOOD MOTHERS +3331-159605-0011-706: THIS HAD AN ESPECIAL CHARM TO POLLY FOR SHE SOON FOUND THAT THIS (SIDE->SIGHT) OF HIS CHARACTER WAS NOT SHOWN TO (EVERY ONE->EVERYONE) +3331-159605-0012-707: LATELY THIS HAD CHANGED ESPECIALLY TOWARDS POLLY AND IT (FLATTERED->FURTHER) HER MORE THAN SHE WOULD CONFESS EVEN TO HERSELF +3331-159605-0013-708: AT FIRST SHE TRIED TO THINK SHE COULD BUT UNFORTUNATELY HEARTS ARE SO CONTRARY THAT THEY WON'T BE OBEDIENT TO REASON WILL OR EVEN (GRATITUDE->CREDITU) +3331-159605-0014-709: POLLY FELT A VERY CORDIAL FRIENDSHIP FOR MISTER SYDNEY BUT NOT ONE PARTICLE OF THE (LOVE WHICH IS->LAW PITCHED) THE ONLY COIN IN WHICH LOVE CAN BE TRULY PAID +3331-159605-0015-710: THIS FINISHED POLLY'S INDECISION AND AFTER THAT NIGHT SHE NEVER ALLOWED HERSELF TO DWELL UPON THE PLEASANT TEMPTATION WHICH CAME IN A (GUISE->GUY'S) PARTICULARLY ATTRACTIVE TO A YOUNG GIRL WITH (A SPICE->THE SPIES) OF THE OLD EVE (IN->AND) HER COMPOSITION +3331-159605-0016-711: WHEN (SATURDAY->SAID) CAME POLLY STARTED AS USUAL FOR A VISIT TO BECKY AND BESS BUT (COULD N'T->COULDN'T) RESIST STOPPING AT THE (SHAWS->SHORES) TO LEAVE A LITTLE PARCEL FOR FAN (THOUGH IT->THAT) WAS CALLING TIME +3331-159605-0017-712: A FOOLISH LITTLE SPEECH TO MAKE TO A (DOG->DARK) BUT YOU SEE POLLY WAS ONLY A TENDER HEARTED GIRL TRYING TO (DO->*) HER DUTY +3331-159605-0018-713: TAKE HOLD OF (MASTER CHARLEY'S->MASSR CHARLIE'S) HAND MISS (MAMIE->MAY) AND (WALK->BUCK) PRETTY LIKE (WILLY->BILLY) AND (FLOSSY->FLOSSIE) SAID THE (MAID->MATE) +3331-159605-0019-714: (AT->*) A (STREET->DISTRICT) CORNER A BLACK EYED (SCHOOL BOY->SCHOOLBOY) WAS PARTING FROM A ROSY FACED SCHOOL GIRL WHOSE MUSIC ROLL HE WAS RELUCTANTLY SURRENDERING +3331-159605-0020-715: HOW HE GOT THERE WAS NEVER VERY CLEAR TO POLLY BUT THERE HE WAS FLUSHED AND A LITTLE OUT OF BREATH BUT LOOKING SO GLAD TO SEE HER (THAT->TILL) SHE HAD (N'T->NOT) THE HEART TO BE STIFF AND COOL AS SHE HAD FULLY INTENDED TO BE WHEN THEY MET +3331-159605-0021-716: SHE REALLY COULD (N'T->NOT) HELP IT IT WAS SO PLEASANT TO SEE HIM AGAIN JUST WHEN SHE WAS FEELING SO LONELY +3331-159605-0022-717: THAT IS THE WAY I GET TO THE (ROTHS->WORSE) ANSWERED POLLY +3331-159605-0023-718: SHE DID NOT MEAN TO TELL BUT HIS FRANKNESS WAS (SO->TO) AGREEABLE SHE FORGOT HERSELF +3331-159605-0024-719: BUT I KNOW HER BETTER AND I ASSURE YOU THAT SHE (DOES IMPROVE->DOESN'T PROVE) SHE TRIES TO (MEND HER->MEAN TO) FAULTS THOUGH SHE WON'T OWN IT AND WILL SURPRISE YOU SOME DAY BY THE AMOUNT OF HEART AND SENSE AND GOODNESS SHE HAS GOT +3331-159605-0025-720: THANK YOU NO +3331-159605-0026-721: (HOW->HER) LOVELY THE PARK LOOKS SHE SAID IN GREAT CONFUSION +3331-159605-0027-722: ASKED THE ARTFUL YOUNG MAN LAYING A TRAP INTO WHICH POLLY IMMEDIATELY FELL +3331-159605-0028-723: HE WAS QUICKER TO TAKE A HINT THAN SHE HAD EXPECTED AND BEING BOTH PROUD AND GENEROUS (RESOLVED->WE SOFT) TO SETTLE THE MATTER AT ONCE FOR POLLY'S SAKE AS WELL AS HIS OWN +3331-159605-0029-724: SO WHEN SHE MADE HER LAST (BRILLIANT->BUOYANT) REMARK HE SAID QUIETLY WATCHING HER FACE KEENLY ALL THE WHILE I THOUGHT SO WELL (I M->I'M) GOING OUT OF TOWN ON BUSINESS FOR SEVERAL WEEKS SO YOU CAN ENJOY YOUR LITTLE BIT OF COUNTRY WITHOUT BEING ANNOYED BY ME (ANNOYED->ANNOY IT) +3331-159605-0030-725: SHE THOUGHT SHE HAD A GOOD DEAL OF THE (COQUETTE->COQUET) IN HER AND (I VE->I'VE) NO DOUBT THAT WITH TIME AND TRAINING SHE WOULD HAVE BECOME A VERY DANGEROUS LITTLE PERSON BUT NOW SHE WAS FAR (TOO->TO) TRANSPARENT AND STRAIGHTFORWARD BY NATURE EVEN TO TELL A (WHITE LIE CLEVERLY->WIDE LIKE LEVELLY) +3331-159605-0031-726: HE WAS GONE BEFORE SHE COULD DO ANYTHING BUT LOOK UP AT HIM WITH A REMORSEFUL FACE AND SHE WALKED ON FEELING THAT THE FIRST AND PERHAPS THE ONLY (LOVER->LOVE) SHE WOULD EVER HAVE HAD READ HIS ANSWER AND ACCEPTED (IT->*) IN SILENCE +3331-159605-0032-727: POLLY DID NOT RETURN TO HER (FAVORITE->FAVOURITE) WALK TILL SHE LEARNED (FROM->FOR) MINNIE THAT UNCLE HAD REALLY LEFT TOWN AND THEN SHE FOUND THAT HIS FRIENDLY COMPANY AND CONVERSATION WAS WHAT HAD MADE THE WAY SO PLEASANT AFTER ALL +3331-159605-0033-728: (WAGGING->WORKING) TO AND FRO AS USUAL WHAT'S THE NEWS WITH YOU +3331-159605-0034-729: PERHAPS (SHE LL JILT->SHE'LL CHILLED) HIM +3331-159605-0035-730: UTTERLY DONE WITH AND LAID UPON THE SHELF +3331-159605-0036-731: (MINNIE->MANY) SAID THE OTHER DAY SHE WISHED SHE WAS A PIGEON SO SHE COULD PADDLE IN THE (PUDDLES->POTTLES) AND NOT FUSS ABOUT (RUBBERS->WRAPPERS) +3331-159605-0037-732: NOW DON'T BE AFFECTED POLLY BUT JUST TELL ME LIKE A DEAR HAS (N'T->NOT) HE PROPOSED +3331-159605-0038-733: DON'T YOU THINK HE MEANS TO +3331-159605-0039-734: TRULY (TRULY->JULIE) FAN +3331-159605-0040-735: I DON'T MEAN TO BE PRYING BUT I REALLY THOUGHT HE DID +3331-159605-0041-736: WELL I ALWAYS MEANT TO TRY IT IF I GOT A CHANCE AND I HAVE +3331-159605-0042-737: I JUST GAVE HIM A HINT AND HE TOOK IT +3331-159605-0043-738: HE MEANT TO GO AWAY BEFORE THAT SO (*->THEY) DON'T THINK HIS HEART IS BROKEN (OR->OH) MIND WHAT (SILLY TATTLERS->SYTLER) SAY +3331-159605-0044-739: HE UNDERSTOOD AND BEING A GENTLEMAN MADE NO FUSS +3331-159605-0045-740: BUT POLLY IT WOULD HAVE BEEN A GRAND THING FOR YOU +3331-159605-0046-741: (I M ODD->I'M NOT) YOU KNOW (AND->I'M) PREFER TO BE AN INDEPENDENT SPINSTER AND TEACH MUSIC ALL MY DAYS +3331-159609-0000-742: NEVER MIND WHAT THE BUSINESS WAS IT (SUFFICES->SURFACES) TO SAY THAT IT WAS A GOOD BEGINNING FOR A YOUNG MAN LIKE TOM WHO HAVING BEEN BORN AND BRED IN THE MOST CONSERVATIVE CLASS OF THE MOST CONCEITED CITY IN NEW ENGLAND NEEDED JUST THE HEALTHY HEARTY SOCIAL INFLUENCES OF THE WEST TO WIDEN HIS VIEWS AND MAKE A MAN OF HIM +3331-159609-0001-743: FORTUNATELY EVERY ONE WAS SO BUSY WITH THE NECESSARY PREPARATIONS THAT THERE WAS NO TIME FOR (ROMANCE->ROMANS) OF ANY SORT AND THE FOUR YOUNG PEOPLE WORKED TOGETHER AS SOBERLY AND SENSIBLY AS IF ALL SORTS OF EMOTIONS WERE NOT (BOTTLED->BOTHERED) UP IN THEIR RESPECTIVE HEARTS +3331-159609-0002-744: PITY THAT THE (END SHOULD->ANCIENT) COME SO SOON BUT THE HOUR DID ITS WORK AND (WENT->WHEN) ITS WAY LEAVING A CLEARER ATMOSPHERE BEHIND (THOUGH->THAN) THE YOUNG FOLKS DID NOT SEE IT THEN FOR THEIR EYES WERE DIM BECAUSE OF THE (PARTINGS THAT->PARTING STEP) MUST BE +3331-159609-0003-745: IF IT HAD NOT BEEN FOR TWO THINGS I FEAR SHE NEVER WOULD HAVE STOOD A SUMMER IN TOWN BUT SYDNEY OFTEN CALLED (TILL->TO) HIS VACATION CAME AND A VOLUMINOUS CORRESPONDENCE WITH POLLY BEGUILED THE LONG DAYS +3331-159609-0004-746: (TOM WROTE ONCE->TUM WOLT ONES) A WEEK TO HIS MOTHER BUT (THE LETTERS->THEY LET US) WERE SHORT AND NOT VERY SATISFACTORY FOR MEN NEVER DO TELL THE INTERESTING LITTLE THINGS THAT WOMEN BEST LIKE TO HEAR +3331-159609-0005-747: NO I (M->AM) ONLY TIRED HAD A GOOD DEAL TO DO LATELY AND THE (DULL->DOLL) WEATHER MAKES ME JUST A (TRIFLE->TRAVEL) BLUE +3331-159609-0006-748: FORGIVE ME POLLY BUT I CAN'T HELP SAYING IT FOR (IT IS->THERE'S) THERE AND I WANT TO BE AS TRUE TO YOU AS YOU WERE TO ME IF I CAN +3331-159609-0007-749: I (TRY->TRIED) NOT TO DECEIVE MYSELF BUT IT DOES SEEM AS IF THERE WAS A CHANCE OF HAPPINESS FOR ME +3331-159609-0008-750: THANK HEAVEN FOR THAT +3331-159609-0009-751: CRIED POLLY WITH THE HEARTIEST SATISFACTION IN HER VOICE +3331-159609-0010-752: POOR (POLLY->PEARLING) WAS SO TAKEN BY SURPRISE THAT SHE HAD NOT A WORD TO SAY +3331-159609-0011-753: NONE WERE NEEDED HER (TELLTALE->TELL HER) FACE ANSWERED FOR HER AS WELL AS THE IMPULSE WHICH MADE HER HIDE HER HEAD IN THE (SOFA->SILVER) CUSHION LIKE A FOOLISH OSTRICH (WHEN->AND) THE (HUNTERS->HANDLES) ARE AFTER IT +3331-159609-0012-754: ONCE OR TWICE (BUT->THAT) SORT OF (JOKINGLY->CHOKINGLY) AND I THOUGHT IT WAS ONLY SOME LITTLE FLIRTATION +3331-159609-0013-755: IT WAS SO STUPID OF ME NOT TO GUESS BEFORE +3331-159609-0014-756: IT WAS SO TENDER EARNEST AND DEFIANT THAT FANNY FORGOT THE DEFENCE OF HER OWN LOVER (IN->AND) ADMIRATION OF POLLY'S LOYALTY TO HERS FOR THIS FAITHFUL ALL ABSORBING LOVE WAS A (NEW REVELATION->NEWER RELATION) TO FANNY WHO WAS USED TO HEARING HER FRIENDS BOAST OF TWO OR THREE LOVERS A YEAR AND CALCULATE THEIR RESPECTIVE VALUES WITH ALMOST AS MUCH COOLNESS AS THE YOUNG MEN DISCUSSED THE FORTUNES OF THE GIRLS THEY WISHED FOR BUT COULD NOT AFFORD TO MARRY +3331-159609-0015-757: I HOPE MARIA BAILEY IS (ALL HE->ONLY) THINKS HER SHE ADDED SOFTLY FOR I COULD (N'T->NOT) BEAR TO HAVE HIM DISAPPOINTED AGAIN +3331-159609-0016-758: SAID FANNY TURNING HOPEFUL ALL AT ONCE +3331-159609-0017-759: SUPPOSE (I->HER) SAY A WORD TO TOM JUST INQUIRE AFTER HIS HEART IN A GENERAL WAY YOU KNOW AND GIVE HIM A CHANCE TO TELL ME IF (THERE IS->THERE'S) ANYTHING TO TELL +3331-159609-0018-760: BEAR IT PEOPLE ALWAYS DO BEAR THINGS SOMEHOW ANSWERED POLLY LOOKING AS IF SENTENCE HAD BEEN PASSED UPON HER +3331-159609-0019-761: IT WAS (A->*) VERY DIFFERENT (WINTER->WINDOW) FROM THE LAST (FOR BOTH->ABOVE) THE GIRLS +3331-159609-0020-762: IF (FANNY->ANY) WANTED TO SHOW HIM WHAT SHE COULD DO TOWARD MAKING A PLEASANT HOME SHE CERTAINLY SUCCEEDED (BETTER THAN->BY THEN) SHE SUSPECTED FOR IN SPITE OF MANY FAILURES AND DISCOURAGEMENTS BEHIND THE SCENES THE LITTLE HOUSE BECAME A MOST ATTRACTIVE PLACE TO MISTER (SYDNEY->SIDNEY) AT LEAST FOR HE WAS MORE THE HOUSE FRIEND THAN EVER AND SEEMED DETERMINED TO PROVE THAT CHANGE OF FORTUNE MADE NO DIFFERENCE TO HIM +3331-159609-0021-763: SHE KEPT MUCH AT HOME (WHEN->IN) THE DAY'S WORK WAS DONE FINDING IT PLEASANTER TO SIT DREAMING (OVER->OF A) BOOK OR (SEWING->SOON) ALONE THAN TO EXERT HERSELF EVEN TO GO TO THE (SHAWS->SHORES) +3331-159609-0022-764: POLLY WAS NOT AT ALL LIKE HERSELF THAT (WINTER->WINDOW) AND THOSE NEAREST TO HER SAW (AND WONDERED->INVOLUNTE) AT IT MOST +3331-159609-0023-765: FOR NED WAS SO ABSORBED IN BUSINESS THAT HE IGNORED THE WHOLE (BAILEY->BAILIQUE) QUESTION AND LEFT THEM IN (UTTER->OTHER) DARKNESS +3331-159609-0024-766: (FANNY->WHEN HE) CAME WALKING IN UPON HER ONE DAY LOOKING AS IF SHE (BROUGHT TIDINGS->POURED HIDINGS) OF SUCH GREAT JOY THAT SHE HARDLY KNEW HOW TO TELL THEM +3331-159609-0025-767: BUT IF WORK BASKETS WERE GIFTED WITH POWERS OF SPEECH THEY COULD TELL STORIES MORE TRUE AND TENDER THAN ANY (WE READ->WEED) +3528-168656-0000-864: SHE HAD EVEN BEEN IN SOCIETY BEFORE THE REVOLUTION +3528-168656-0001-865: IT WAS HER PLEASURE AND HER VANITY TO DRAG IN THESE NAMES ON EVERY PRETEXT +3528-168656-0002-866: EVERY YEAR SHE SOLEMNLY RENEWED HER VOWS AND AT THE MOMENT OF TAKING THE OATH SHE SAID TO THE PRIEST MONSEIGNEUR SAINT (FRANCOIS->FROIS) GAVE IT TO MONSEIGNEUR SAINT JULIEN MONSEIGNEUR SAINT (JULIEN->JULIAN) GAVE IT TO MONSEIGNEUR SAINT (EUSEBIUS MONSEIGNEUR->EUSIDES MONSIEUR) SAINT (EUSEBIUS->EUSIBIUS) GAVE IT TO MONSEIGNEUR SAINT PROCOPIUS ET CETERA ET CETERA +3528-168656-0003-867: AND THE (SCHOOL GIRLS->SCHOOLGIRLS) WOULD BEGIN TO LAUGH NOT IN THEIR SLEEVES BUT UNDER (THEIR->THE) VEILS CHARMING LITTLE STIFLED LAUGHS WHICH MADE THE (VOCAL->FULCAL) MOTHERS FROWN +3528-168656-0004-868: IT WAS A CENTURY WHICH SPOKE THROUGH HER BUT IT WAS THE EIGHTEENTH CENTURY +3528-168656-0005-869: THE RULE OF (FONTEVRAULT->FONTREVAL) DID NOT FORBID THIS +3528-168656-0006-870: SHE WOULD NOT SHOW (THIS OBJECT->THE SUBJECT) TO (ANYONE->ANY ONE) +3528-168656-0007-871: THUS IT FURNISHED A SUBJECT OF COMMENT FOR ALL THOSE WHO WERE (UNOCCUPIED->ON OCCUPIED) OR BORED IN THE CONVENT +3528-168656-0008-872: SOME (UNIQUE->EUIK) CHAPLET SOME AUTHENTIC RELIC +3528-168656-0009-873: THEY LOST THEMSELVES IN CONJECTURES +3528-168656-0010-874: WHEN THE POOR OLD WOMAN DIED THEY RUSHED TO HER CUPBOARD MORE HASTILY THAN WAS FITTING PERHAPS AND OPENED IT +3528-168656-0011-875: HE IS RESISTING FLUTTERING HIS TINY WINGS AND STILL MAKING AN EFFORT TO FLY BUT THE (DANCER IS->DANCERS) LAUGHING WITH A SATANICAL AIR +3528-168656-0012-876: MORAL LOVE CONQUERED BY THE COLIC +3528-168669-0000-877: THE (PRIORESS->PRIORS) RETURNED AND SEATED HERSELF ONCE MORE ON HER CHAIR +3528-168669-0001-878: WE WILL PRESENT A STENOGRAPHIC REPORT OF THE DIALOGUE WHICH THEN ENSUED TO THE BEST OF OUR ABILITY +3528-168669-0002-879: FATHER (FAUVENT->VUENT) +3528-168669-0003-880: REVEREND MOTHER DO YOU KNOW THE CHAPEL +3528-168669-0004-881: AND YOU HAVE BEEN IN THE CHOIR IN PURSUANCE OF YOUR DUTIES TWO OR THREE TIMES +3528-168669-0005-882: THERE IS A STONE TO BE RAISED HEAVY +3528-168669-0006-883: THE SLAB OF THE PAVEMENT WHICH IS AT THE (SIDE->THOUGHT) OF THE ALTAR +3528-168669-0007-884: THE (SLAB->FLAP) WHICH CLOSES THE VAULT YES +3528-168669-0008-885: IT WOULD BE A GOOD THING TO HAVE TWO MEN FOR IT +3528-168669-0009-886: A WOMAN IS NEVER A MAN +3528-168669-0010-887: BECAUSE (DOM MABILLON->DON MARLAN) GIVES FOUR HUNDRED AND SEVENTEEN EPISTLES OF SAINT BERNARD WHILE (MERLONUS HORSTIUS->MERLINUS HORSES) ONLY GIVES THREE HUNDRED AND SIXTY SEVEN I DO NOT DESPISE (MERLONUS HORSTIUS->MERLINA'S HORSES) NEITHER DO I +3528-168669-0011-888: (MERIT->MARRIAGE) CONSISTS IN WORKING ACCORDING TO ONE'S STRENGTH A CLOISTER IS NOT A (DOCK YARD->DOCKYARD) +3528-168669-0012-889: AND A WOMAN IS NOT A MAN BUT MY BROTHER IS THE STRONG ONE THOUGH +3528-168669-0013-890: AND CAN YOU GET A (LEVER->LOVER) +3528-168669-0014-891: THERE IS A RING IN THE STONE +3528-168669-0015-892: I WILL PUT THE LEVER THROUGH IT +3528-168669-0016-893: THAT IS GOOD REVEREND MOTHER I WILL OPEN THE VAULT +3528-168669-0017-894: WILL THAT BE ALL NO +3528-168669-0018-895: GIVE ME YOUR ORDERS VERY REVEREND MOTHER +3528-168669-0019-896: (FAUVENT->FOR THAT) WE HAVE CONFIDENCE IN YOU +3528-168669-0020-897: I AM HERE TO DO ANYTHING YOU WISH +3528-168669-0021-898: AND TO HOLD YOUR PEACE ABOUT EVERYTHING YES (REVEREND->ROBIN) MOTHER +3528-168669-0022-899: WHEN THE (VAULT->WALL) IS OPEN I WILL CLOSE IT AGAIN +3528-168669-0023-900: BUT BEFORE THAT WHAT REVEREND MOTHER +3528-168669-0024-901: FATHER (FAUVENT->FERVENT) REVEREND MOTHER +3528-168669-0025-902: YOU KNOW THAT A MOTHER DIED THIS MORNING +3528-168669-0026-903: NO DID YOU NOT HEAR THE BELL +3528-168669-0027-904: NOTHING CAN BE HEARD AT THE BOTTOM OF THE GARDEN REALLY +3528-168669-0028-905: AND THEN THE WIND (IS->DOES) NOT BLOWING IN MY DIRECTION THIS MORNING +3528-168669-0029-906: IT WAS MOTHER CRUCIFIXION +3528-168669-0030-907: THREE YEARS AGO MADAME DE (BETHUNE->BESOON) A (JANSENIST->GENTLEST) TURNED ORTHODOX MERELY FROM HAVING SEEN MOTHER CRUCIFIXION AT PRAYER AH +3528-168669-0031-908: THE MOTHERS HAVE TAKEN HER TO THE DEAD ROOM WHICH OPENS ON THE CHURCH I KNOW +3528-168669-0032-909: A FINE SIGHT IT WOULD BE TO SEE A MAN ENTER THE (DEAD ROOM->BEDROOM) MORE OFTEN +3528-168669-0033-910: HEY MORE OFTEN +3528-168669-0034-911: WHAT DO YOU SAY +3528-168669-0035-912: I SAY MORE OFTEN MORE OFTEN THAN WHAT +3528-168669-0036-913: REVEREND MOTHER I DID NOT SAY MORE OFTEN THAN WHAT I SAID MORE OFTEN +3528-168669-0037-914: BUT I DID NOT SAY MORE OFTEN +3528-168669-0038-915: AT THAT MOMENT NINE O'CLOCK STRUCK +3528-168669-0039-916: AT NINE O'CLOCK IN THE MORNING AND AT ALL HOURS PRAISED AND ADORED (*->TO) BE THE MOST HOLY SACRAMENT OF THE ALTAR SAID THE (PRIORESS->PIRATES) +3528-168669-0040-917: IT CUT MORE OFTEN SHORT +3528-168669-0041-918: FAUCHELEVENT MOPPED HIS FOREHEAD +3528-168669-0042-919: IN HER LIFETIME MOTHER CRUCIFIXION MADE CONVERTS AFTER HER DEATH SHE WILL PERFORM MIRACLES SHE WILL +3528-168669-0043-920: FATHER (FAUVENT->VOUVENT) THE COMMUNITY HAS BEEN BLESSED IN MOTHER CRUCIFIXION +3528-168669-0044-921: SHE RETAINED HER CONSCIOUSNESS TO THE VERY LAST MOMENT +3528-168669-0045-922: SHE GAVE US HER LAST COMMANDS +3528-168669-0046-923: IF YOU HAD A LITTLE MORE FAITH AND IF YOU COULD HAVE BEEN IN (HER CELL->HERSELF) SHE WOULD HAVE CURED YOUR LEG MERELY BY TOUCHING IT SHE SMILED +3528-168669-0047-924: THERE WAS SOMETHING OF PARADISE IN THAT DEATH +3528-168669-0048-925: FAUCHELEVENT THOUGHT THAT IT WAS AN ORISON WHICH SHE WAS FINISHING +3528-168669-0049-926: FAUCHELEVENT HELD HIS PEACE SHE WENT ON +3528-168669-0050-927: I HAVE CONSULTED UPON THIS POINT MANY ECCLESIASTICS LABORING IN OUR LORD WHO OCCUPY THEMSELVES IN THE EXERCISES OF THE CLERICAL LIFE AND WHO BEAR WONDERFUL FRUIT +3528-168669-0051-928: FORTUNATELY THE (PRIORESS->PIRASS) COMPLETELY ABSORBED IN HER OWN THOUGHTS DID NOT HEAR IT +3528-168669-0052-929: SHE CONTINUED FATHER (FAUVENT->REVENT) +3528-168669-0053-930: YES REVEREND MOTHER +3528-168669-0054-931: SAINT TERENTIUS BISHOP OF PORT WHERE THE MOUTH OF THE TIBER EMPTIES INTO THE SEA REQUESTED THAT ON HIS (TOMB->TWO) MIGHT BE ENGRAVED THE SIGN WHICH WAS PLACED ON THE GRAVES OF (PARRICIDES->PARASITES) IN THE HOPE THAT PASSERS BY WOULD SPIT ON HIS TOMB THIS WAS DONE +3528-168669-0055-932: THE DEAD MUST BE OBEYED SO BE IT +3528-168669-0056-933: FOR THAT MATTER NO REVEREND MOTHER +3528-168669-0057-934: FATHER (FAUVENT->VENT) MOTHER CRUCIFIXION WILL BE INTERRED IN THE COFFIN IN WHICH SHE HAS SLEPT FOR THE LAST TWENTY YEARS THAT IS JUST +3528-168669-0058-935: IT IS A CONTINUATION OF HER SLUMBER +3528-168669-0059-936: SO I SHALL HAVE TO NAIL UP THAT COFFIN YES +3528-168669-0060-937: I AM AT THE ORDERS OF THE VERY REVEREND (COMMUNITY->CUNITY) +3528-168669-0061-938: THE (FOUR MOTHER PRECENTORS->FOREMOTHER PRESENTERS) WILL ASSIST YOU +3528-168669-0062-939: NO (IN LOWERING->INLORING) THE COFFIN +3528-168669-0063-940: WHERE INTO THE VAULT +3528-168669-0064-941: FAUCHELEVENT STARTED THE VAULT UNDER THE ALTAR +3528-168669-0065-942: UNDER THE ALTAR BUT +3528-168669-0066-943: YOU WILL HAVE AN IRON BAR YES BUT +3528-168669-0067-944: YOU WILL RAISE THE STONE WITH THE BAR BY MEANS OF THE RING BUT +3528-168669-0068-945: THE DEAD MUST BE OBEYED TO BE BURIED IN THE VAULT UNDER THE ALTAR OF THE CHAPEL NOT TO GO TO PROFANE EARTH TO REMAIN THERE IN DEATH WHERE SHE PRAYED WHILE LIVING SUCH WAS THE LAST WISH OF MOTHER CRUCIFIXION +3528-168669-0069-946: SHE ASKED IT OF US THAT IS TO SAY COMMANDED US +3528-168669-0070-947: BUT IT IS FORBIDDEN +3528-168669-0071-948: OH I AM A STONE IN YOUR WALLS +3528-168669-0072-949: THINK FATHER (FAUVENT->UVERT) IF SHE WERE TO WORK MIRACLES HERE +3528-168669-0073-950: WHAT A GLORY OF GOD FOR THE COMMUNITY AND MIRACLES ISSUE FROM TOMBS +3528-168669-0074-951: BUT REVEREND MOTHER IF THE AGENT OF THE SANITARY COMMISSION +3528-168669-0075-952: BUT THE COMMISSARY OF POLICE +3528-168669-0076-953: (CHONODEMAIRE->CHATEAU DE MER) ONE OF THE SEVEN GERMAN KINGS WHO ENTERED AMONG THE (GAULS->GULFS) UNDER THE EMPIRE OF CONSTANTIUS EXPRESSLY RECOGNIZED THE RIGHT OF NUNS TO BE BURIED IN RELIGION THAT IS TO SAY BENEATH THE ALTAR +3528-168669-0077-954: THE WORLD IS NOTHING IN THE PRESENCE OF THE CROSS +3528-168669-0078-955: MARTIN THE ELEVENTH GENERAL OF THE CARTHUSIANS GAVE TO HIS ORDER THIS DEVICE STAT (CRUX DUM VOLVITUR ORBIS->CREW DOOM VOLVETER ORBUS) +3528-168669-0079-956: THE (PRIORESS->PYRIS) WHO WAS USUALLY SUBJECTED TO THE BARRIER OF SILENCE AND WHOSE RESERVOIR WAS (OVERFULL->OVER FULL) ROSE AND EXCLAIMED WITH THE (LOQUACITY->LEQUEST) OF A DAM WHICH HAS BROKEN AWAY +3528-168669-0080-957: I HAVE ON MY RIGHT (BENOIT->BENOIS) AND ON MY LEFT BERNARD WHO WAS BERNARD +3528-168669-0081-958: THE FIRST ABBOT OF (CLAIRVAUX->CLERVAL) +3528-168669-0082-959: HIS ORDER HAS PRODUCED FORTY POPES TWO HUNDRED CARDINALS FIFTY PATRIARCHS SIXTEEN HUNDRED ARCHBISHOPS FOUR THOUSAND SIX HUNDRED BISHOPS FOUR EMPERORS TWELVE EMPRESSES FORTY SIX KINGS FORTY ONE QUEENS THREE THOUSAND SIX HUNDRED CANONIZED SAINTS AND HAS BEEN IN EXISTENCE FOR FOURTEEN HUNDRED YEARS +3528-168669-0083-960: ON ONE SIDE SAINT BERNARD ON THE OTHER THE AGENT OF THE (SANITARY->SENATORY) DEPARTMENT +3528-168669-0084-961: GOD SUBORDINATED TO THE (COMMISSARY->COMMISSORY) OF POLICE SUCH (IS->WAS) THE AGE SILENCE (FAUVENT->FAVAN) +3528-168669-0085-962: NO ONE DOUBTS THE RIGHT OF THE MONASTERY (TO->CHOOSE) SEPULTURE +3528-168669-0086-963: ONLY FANATICS AND THOSE IN ERROR DENY IT +3528-168669-0087-964: WE LIVE IN TIMES OF TERRIBLE CONFUSION +3528-168669-0088-965: WE ARE IGNORANT AND IMPIOUS +3528-168669-0089-966: AND THEN RELIGION IS ATTACKED WHY +3528-168669-0090-967: BECAUSE THERE HAVE BEEN BAD PRIESTS BECAUSE (SAGITTAIRE->SAGATURE) BISHOP OF GAP WAS THE BROTHER OF (SALONE->SALON) BISHOP OF (EMBRUN->EMBRON) AND BECAUSE BOTH OF THEM FOLLOWED (MOMMOL->MAMMA) +3528-168669-0091-968: THEY PERSECUTE THE SAINTS +3528-168669-0092-969: THEY SHUT THEIR EYES TO THE TRUTH DARKNESS IS THE RULE +3528-168669-0093-970: THE MOST FEROCIOUS BEASTS ARE BEASTS WHICH ARE BLIND +3528-168669-0094-971: OH HOW WICKED PEOPLE ARE +3528-168669-0095-972: BY ORDER OF THE KING SIGNIFIES TO DAY BY ORDER OF THE REVOLUTION +3528-168669-0096-973: ONE NO LONGER KNOWS WHAT IS DUE TO THE LIVING OR TO THE DEAD A HOLY DEATH IS PROHIBITED +3528-168669-0097-974: (GAUTHIER->GATHIER) BISHOP OF (CHALONS->CALON) HELD HIS OWN IN THIS MATTER AGAINST OTHO DUKE OF BURGUNDY +3528-168669-0098-975: THE (PRIORESS->PRIORS) TOOK BREATH THEN TURNED TO FAUCHELEVENT +3528-168669-0099-976: YOU WILL CLOSE THE COFFIN THE SISTERS WILL CARRY IT TO THE CHAPEL +3528-168669-0100-977: THE OFFICE FOR THE DEAD WILL THEN BE SAID +3528-168669-0101-978: BUT SHE WILL HEAR SHE WILL NOT LISTEN +3528-168669-0102-979: BESIDES WHAT THE CLOISTER KNOWS THE WORLD LEARNS NOT +3528-168669-0103-980: A PAUSE (ENSUED->ENSUIT) +3528-168669-0104-981: YOU WILL REMOVE YOUR (BELL->BELT) +3528-168669-0105-982: HAS THE DOCTOR FOR THE DEAD PAID HIS VISIT +3528-168669-0106-983: HE WILL PAY IT AT FOUR O'CLOCK TO DAY +3528-168669-0107-984: THE PEAL WHICH ORDERS THE DOCTOR FOR THE (DEAD->DEBT) TO BE SUMMONED HAS ALREADY BEEN RUNG +3528-168669-0108-985: BUT YOU DO NOT UNDERSTAND ANY OF THE PEALS +3528-168669-0109-986: THAT IS WELL FATHER (FAUVENT->VENT) +3528-168669-0110-987: WHERE WILL YOU OBTAIN IT +3528-168669-0111-988: I HAVE MY HEAP OF OLD IRON AT THE BOTTOM OF THE GARDEN +3528-168669-0112-989: REVEREND MOTHER WHAT +3528-168669-0113-990: IF YOU WERE EVER TO HAVE ANY OTHER JOBS OF THIS SORT MY BROTHER IS THE STRONG MAN FOR YOU A PERFECT TURK +3528-168669-0114-991: YOU WILL DO IT AS SPEEDILY AS POSSIBLE +3528-168669-0115-992: I CANNOT WORK VERY FAST I AM INFIRM THAT IS WHY I REQUIRE AN ASSISTANT I LIMP +3528-168669-0116-993: EVERYTHING MUST HAVE BEEN COMPLETED A GOOD QUARTER OF AN HOUR BEFORE THAT +3528-168669-0117-994: I WILL DO ANYTHING TO PROVE MY ZEAL TOWARDS THE COMMUNITY THESE ARE MY ORDERS I AM TO NAIL UP THE COFFIN +3528-168669-0118-995: AT ELEVEN O'CLOCK EXACTLY I AM TO BE IN THE CHAPEL +3528-168669-0119-996: MOTHER (ASCENSION->ISSUE) WILL BE THERE TWO MEN WOULD BE BETTER +3528-168669-0120-997: HOWEVER NEVER MIND I SHALL HAVE MY (LEVER->LOVER) +3528-168669-0121-998: AFTER WHICH THERE WILL BE NO TRACE OF ANYTHING +3528-168669-0122-999: THE GOVERNMENT WILL HAVE NO SUSPICION +3528-168669-0123-1000: THE EMPTY COFFIN REMAINS THIS PRODUCED A PAUSE +3528-168669-0124-1001: WHAT IS TO BE DONE WITH THAT COFFIN FATHER (FAUVENT->VENT) +3528-168669-0125-1002: IT WILL BE GIVEN TO THE EARTH EMPTY +3528-168669-0126-1003: AH (THE DE->LEDA) EXCLAIMED FAUCHELEVENT +3528-168669-0127-1004: THE (VIL->VILLE) STUCK FAST IN HIS THROAT +3528-168669-0128-1005: HE MADE HASTE TO IMPROVISE AN EXPEDIENT TO MAKE HER FORGET THE OATH +3528-168669-0129-1006: I WILL PUT EARTH IN THE COFFIN (REVEREND->REVERED) MOTHER THAT WILL PRODUCE THE EFFECT OF A CORPSE +3528-168669-0130-1007: I WILL MAKE THAT MY SPECIAL BUSINESS +3538-142836-0000-1567: GENERAL OBSERVATIONS ON PRESERVES (CONFECTIONARY->CONFECTIONERY) ICES AND DESSERT DISHES +3538-142836-0001-1568: THE EXPENSE OF PRESERVING THEM WITH SUGAR IS A SERIOUS OBJECTION FOR EXCEPT THE SUGAR IS USED IN CONSIDERABLE (QUANTITIES->QUALITIES) THE SUCCESS IS VERY UNCERTAIN +3538-142836-0002-1569: FRUIT GATHERED IN WET OR FOGGY WEATHER WILL SOON BE (MILDEWED->MELTED) AND BE OF NO SERVICE FOR PRESERVES +3538-142836-0003-1570: BUT TO DISTINGUISH THESE PROPERLY REQUIRES VERY GREAT ATTENTION AND CONSIDERABLE EXPERIENCE +3538-142836-0004-1571: IF YOU DIP THE FINGER INTO THE (SYRUP->SERF) AND APPLY IT TO THE THUMB THE TENACITY OF THE (SYRUP->SERF) WILL ON SEPARATING THE FINGER AND THUMB AFFORD A THREAD WHICH SHORTLY BREAKS THIS IS THE LITTLE THREAD +3538-142836-0005-1572: LET IT BOIL UP AGAIN THEN TAKE IT OFF AND REMOVE CAREFULLY THE SCUM THAT HAS RISEN +3538-142836-0006-1573: IT IS CONSIDERED TO BE SUFFICIENTLY BOILED WHEN SOME TAKEN UP IN A SPOON POURS OUT LIKE OIL +3538-142836-0007-1574: BEFORE SUGAR WAS IN USE HONEY WAS EMPLOYED TO (PRESERVE->PRESENT) MANY VEGETABLE PRODUCTIONS THOUGH THIS SUBSTANCE (HAS->IS) NOW GIVEN WAY TO THE JUICE OF THE SUGAR CANE +3538-142836-0008-1575: FOURTEEN NINETY NINE +3538-142836-0009-1576: BOIL THEM UP THREE DAYS SUCCESSIVELY SKIMMING EACH TIME AND THEY WILL THEN BE FINISHED AND IN A STATE FIT TO BE PUT INTO POTS FOR USE +3538-142836-0010-1577: THE REASON WHY THE FRUIT IS EMPTIED OUT OF THE PRESERVING PAN INTO (AN->OUR) EARTHEN PAN IS THAT THE ACID OF THE FRUIT ACTS UPON THE COPPER OF WHICH THE PRESERVING PANS ARE USUALLY MADE +3538-142836-0011-1578: FROM THIS EXAMPLE THE PROCESS OF PRESERVING FRUITS BY SYRUP (WILL->WOULD) BE EASILY COMPREHENDED +3538-142836-0012-1579: THEY SHOULD BE DRIED IN THE STOVE OR OVEN ON A (SIEVE->SEA) AND TURNED EVERY SIX OR EIGHT HOURS FRESH POWDERED SUGAR BEING SIFTED OVER THEM EVERY TIME THEY (ARE TURNED->RETURNED) +3538-142836-0013-1580: IN THIS WAY IT IS ALSO THAT ORANGE AND (LEMON CHIPS->LINENSHIPS) ARE PRESERVED +3538-142836-0014-1581: MARMALADES JAMS AND FRUIT (PASTES->PACE) ARE OF THE SAME NATURE AND ARE NOW IN VERY GENERAL (REQUEST->QUEST) +3538-142836-0015-1582: (MARMALADES->MARMAL ETS) AND JAMS DIFFER LITTLE FROM EACH OTHER (THEY ARE->THEIR) PRESERVES OF (A->*) HALF LIQUID CONSISTENCY MADE BY BOILING THE PULP OF FRUITS AND SOMETIMES PART OF THE (RINDS->RHINS) WITH SUGAR +3538-142836-0016-1583: THAT THEY MAY KEEP IT IS NECESSARY NOT TO BE SPARING OF SUGAR FIFTEEN O THREE +3538-142836-0017-1584: IN ALL THE OPERATIONS FOR PRESERVE MAKING WHEN THE PRESERVING PAN IS USED IT SHOULD NOT BE PLACED ON THE FIRE BUT ON A (TRIVET->TRIBUT) UNLESS THE JAM IS MADE ON A HOT PLATE WHEN THIS IS NOT NECESSARY +3538-142836-0018-1585: (CONFECTIONARY->CONFECTIONERY) FIFTEEN O EIGHT +3538-142836-0019-1586: IN SPEAKING OF (CONFECTIONARY IT->CONFECTIONERIES) SHOULD BE REMARKED THAT ALL THE VARIOUS PREPARATIONS ABOVE NAMED COME STRICTLY SPEAKING UNDER THAT HEAD FOR THE (VARIOUS FRUITS->VERY SPRUITS) FLOWERS HERBS (ROOTS->RUTHS) AND JUICES WHICH (WHEN BOILED->ONE BOIL) WITH SUGAR WERE FORMERLY EMPLOYED IN PHARMACY AS WELL AS FOR SWEETMEATS WERE CALLED CONFECTIONS FROM THE LATIN WORD (CONFICERE->CONFERS) TO MAKE UP BUT THE TERM CONFECTIONARY EMBRACES A VERY LARGE CLASS INDEED OF SWEET FOOD MANY KINDS OF WHICH SHOULD NOT BE ATTEMPTED IN THE ORDINARY (CUISINE->COUISINE) +3538-142836-0020-1587: THE THOUSAND AND ONE ORNAMENTAL DISHES THAT ADORN THE TABLES OF THE WEALTHY SHOULD BE PURCHASED FROM THE CONFECTIONER THEY CANNOT PROFITABLY BE MADE AT HOME +3538-142836-0021-1588: HOWEVER AS LATE AS THE (REIGNS->REIGN) OF OUR TWO LAST GEORGES FABULOUS SUMS WERE OFTEN EXPENDED UPON FANCIFUL (DESSERTS->DESERTS) +3538-142836-0022-1589: THE SHAPE OF THE DISHES VARIES AT DIFFERENT PERIODS THE PREVAILING FASHION AT PRESENT BEING OVAL AND CIRCULAR DISHES ON STEMS +3538-142836-0023-1590: (ICES->ISIS) +3538-142836-0024-1591: (AT->A) DESSERTS OR AT SOME EVENING PARTIES (ICES->IISES) ARE SCARCELY TO BE DISPENSED WITH +3538-142836-0025-1592: THE (SPADDLE->SPADEL) IS GENERALLY MADE OF COPPER KEPT BRIGHT AND CLEAN +3538-142836-0026-1593: THEY SHOULD BE TAKEN IMMEDIATELY AFTER THE REPAST OR SOME HOURS AFTER BECAUSE THE TAKING (*->OF) THESE SUBSTANCES DURING THE PROCESS OF DIGESTION IS APT TO PROVOKE INDISPOSITION +3538-163619-0000-1500: THERE WAS ONCE (ON A->TILL THE) TIME A WIDOWER WHO HAD A SON AND A DAUGHTER BY HIS FIRST (WIFE->WI) +3538-163619-0001-1501: FROM THE VERY DAY THAT THE NEW WIFE CAME INTO THE HOUSE THERE WAS NO PEACE FOR THE MAN'S CHILDREN AND NOT A CORNER TO BE FOUND WHERE THEY COULD GET ANY REST SO THE BOY THOUGHT THAT THE BEST THING HE COULD DO WAS TO GO OUT INTO THE WORLD AND TRY TO EARN HIS OWN BREAD +3538-163619-0002-1502: BUT HIS SISTER WHO WAS STILL AT HOME FARED WORSE AND WORSE +3538-163619-0003-1503: KISS ME (GIRL->GO) SAID THE HEAD +3538-163619-0004-1504: WHEN THE KING ENTERED AND SAW IT HE STOOD STILL AS IF HE WERE IN FETTERS AND COULD NOT STIR FROM THE SPOT FOR THE PICTURE SEEMED TO HIM SO BEAUTIFUL +3538-163619-0005-1505: (THE YOUTH->THESE) PROMISED TO MAKE ALL THE HASTE HE COULD AND SET FORTH FROM THE KING'S PALACE +3538-163619-0006-1506: AT LAST THEY CAME IN SIGHT OF LAND +3538-163619-0007-1507: WELL IF MY BROTHER SAYS SO I MUST DO IT SAID THE MAN'S DAUGHTER AND SHE FLUNG HER CASKET INTO THE SEA +3538-163619-0008-1508: WHAT IS MY BROTHER SAYING ASKED HIS SISTER AGAIN +3538-163619-0009-1509: ON THE FIRST THURSDAY NIGHT AFTER THIS A BEAUTIFUL MAIDEN CAME INTO THE KITCHEN OF THE PALACE AND BEGGED THE KITCHEN MAID WHO SLEPT THERE TO LEND HER A BRUSH +3538-163619-0010-1510: SHE BEGGED VERY PRETTILY AND GOT IT AND THEN SHE BRUSHED HER HAIR AND THE GOLD DROPPED FROM IT +3538-163619-0011-1511: OUT ON THEE UGLY BUSHY BRIDE SLEEPING SO SOFT BY THE YOUNG KING'S SIDE ON SAND AND STONES MY BED I MAKE AND MY (BROTHER->BROTHERS) SLEEPS WITH THE COLD SNAKE UNPITIED AND UNWEPT +3538-163619-0012-1512: I SHALL COME TWICE MORE AND THEN NEVER AGAIN SAID SHE +3538-163619-0013-1513: THIS TIME ALSO AS BEFORE SHE BORROWED A BRUSH AND BRUSHED HER HAIR WITH IT AND THE GOLD DROPPED DOWN AS SHE DID IT AND AGAIN SHE SENT THE DOG OUT THREE TIMES AND WHEN (DAY->THEY) DAWNED SHE DEPARTED BUT AS SHE WAS GOING SHE SAID AS SHE HAD SAID BEFORE I SHALL COME ONCE MORE AND THEN NEVER AGAIN +3538-163619-0014-1514: NO ONE CAN TELL HOW DELIGHTED THE KING WAS TO GET RID OF THAT HIDEOUS BUSHY BRIDE AND GET A QUEEN WHO WAS BRIGHT AND BEAUTIFUL AS DAY (ITSELF->ITSEL) +3538-163622-0000-1515: WILT THOU SERVE ME AND WATCH MY SEVEN (FOALS->FOLDS) ASKED THE KING +3538-163622-0001-1516: THE YOUTH THOUGHT THAT IT WAS VERY EASY WORK TO WATCH THE FOALS AND (THAT->*) HE COULD DO IT WELL ENOUGH +3538-163622-0002-1517: HAST THOU (WATCHED->ART) FAITHFULLY AND WELL (THE->BE) WHOLE DAY LONG SAID THE KING WHEN THE LAD CAME INTO HIS PRESENCE IN THE EVENING +3538-163622-0003-1518: YES THAT I HAVE SAID THE YOUTH +3538-163622-0004-1519: HE HAD GONE OUT ONCE TO SEEK A PLACE HE SAID BUT NEVER WOULD HE DO SUCH A THING AGAIN +3538-163622-0005-1520: (THEN->*) THE (KING->MACKING) PROMISED HIM THE SAME PUNISHMENT AND THE SAME REWARD THAT HE HAD PROMISED HIS BROTHER +3538-163622-0006-1521: WHEN HE HAD RUN AFTER THE (FOALS->FOOLS) FOR A LONG LONG TIME AND WAS HOT AND TIRED HE PASSED BY (A CLEFT->CLIFF) IN THE ROCK WHERE AN OLD WOMAN WAS SITTING SPINNING WITH A DISTAFF AND SHE CALLED TO HIM +3538-163622-0007-1522: (COME HITHER->COMMANDER) COME HITHER MY HANDSOME SON AND LET ME COMB YOUR HAIR +3538-163622-0008-1523: THE YOUTH LIKED THE THOUGHT OF THIS LET THE (FOALS RUN->FOLDS WARM) WHERE THEY CHOSE AND SEATED HIMSELF IN THE CLEFT OF THE ROCK BY THE SIDE OF THE OLD HAG +3538-163622-0009-1524: SO THERE HE SAT WITH HIS HEAD ON HER LAP TAKING HIS EASE THE LIVELONG DAY +3538-163622-0010-1525: ON THE THIRD DAY (CINDERLAD->SAID THE LAD) WANTED TO SET OUT +3538-163622-0011-1526: THE TWO BROTHERS LAUGHED AT HIM AND HIS FATHER AND MOTHER BEGGED HIM NOT TO GO BUT ALL TO NO PURPOSE (AND->WHEN) CINDERLAD SET OUT ON HIS WAY +3538-163622-0012-1527: I AM WALKING ABOUT IN SEARCH OF A PLACE SAID (CINDERLAD->SAINTO LAD) +3538-163622-0013-1528: I WOULD MUCH RATHER HAVE THE PRINCESS SAID (CINDERLAD->CINDER LAD) +3538-163622-0014-1529: AND THUS THEY JOURNEYED ONWARDS A LONG LONG WAY +3538-163622-0015-1530: WHEN THEY HAD GONE THUS FOR A LONG LONG WAY THE (FOAL->FULL) AGAIN ASKED DOST THOU SEE ANYTHING NOW +3538-163622-0016-1531: YES NOW I SEE SOMETHING THAT IS WHITE SAID (CINDERLAD->CINDER LAD) +3538-163622-0017-1532: IT LOOKS LIKE THE TRUNK OF A GREAT THICK BIRCH TREE +3538-163622-0018-1533: (CINDERLAD->SOONER LAD) TRIED BUT COULD NOT DO IT SO HE HAD TO TAKE A (DRAUGHT->DROP) FROM THE PITCHER AND THEN ONE MORE AND AFTER THAT STILL ANOTHER AND THEN HE WAS ABLE TO (WIELD->WHEEL) THE SWORD WITH PERFECT EASE +3538-163622-0019-1534: FOR WE ARE BROTHERS OF THE PRINCESS WHOM THOU ART TO HAVE WHEN THOU CANST TELL THE KING WHAT WE EAT AND DRINK BUT THERE IS A MIGHTY TROLL WHO (HAS->IS) CAST A SPELL OVER US +3538-163622-0020-1535: WHEN THEY HAD TRAVELLED (*->ALONG) A LONG (LONG->*) WAY THE FOAL SAID DOST THOU SEE ANYTHING +3538-163622-0021-1536: AND NOW INQUIRED THE (FOAL SEEST->FULL CEASE) THOU NOTHING NOW +3538-163622-0022-1537: NOW THEN SAID THE (FOAL->FOOL) DOST THOU NOT SEE ANYTHING NOW +3538-163622-0023-1538: THAT IS A RIVER SAID THE FOAL AND WE HAVE TO CROSS IT +3538-163622-0024-1539: I HAVE DONE MY BEST REPLIED (CINDERLAD->SIR LAD) +3538-163624-0000-1540: ONCE UPON A TIME THERE WAS A KING IN THE NORTH WHO HAD WON MANY WARS BUT NOW HE WAS OLD +3538-163624-0001-1541: THE OLD KING WENT OUT AND (FOUGHT->THOUGHT) BRAVELY BUT AT LAST HIS SWORD BROKE AND HE WAS WOUNDED AND HIS MEN FLED +3538-163624-0002-1542: BUT IN THE NIGHT WHEN THE BATTLE WAS OVER HIS YOUNG WIFE CAME OUT AND SEARCHED FOR HIM AMONG THE SLAIN AND AT LAST SHE FOUND HIM AND ASKED WHETHER HE MIGHT BE HEALED +3538-163624-0003-1543: SO (HE ASKED->YES) THE QUEEN HOW DO YOU KNOW IN THE DARK OF NIGHT WHETHER THE HOURS ARE WEARING TO THE MORNING AND SHE SAID +3538-163624-0004-1544: THEN THE OLD MAN SAID DRIVE ALL THE HORSES INTO THE RIVER AND CHOOSE THE ONE THAT SWIMS ACROSS +3538-163624-0005-1545: HE (IS->HAS) NO BIGGER THAN OTHER DRAGONS SAID THE TUTOR AND IF YOU WERE AS BRAVE AS YOUR FATHER YOU WOULD NOT FEAR HIM +3538-163624-0006-1546: THEN THE PERSON WHO HAD KILLED OTTER WENT DOWN AND CAUGHT THE DWARF WHO OWNED ALL THE TREASURE AND TOOK IT FROM HIM +3538-163624-0007-1547: ONLY ONE RING WAS LEFT WHICH THE DWARF WORE AND EVEN THAT WAS TAKEN FROM HIM +3538-163624-0008-1548: SO (REGIN MADE->WE GET) A SWORD AND (SIGURD->CIGAR) TRIED IT WITH A BLOW (ON->AND) A LUMP OF IRON AND THE SWORD BROKE +3538-163624-0009-1549: THEN (SIGURD->CIGAR) WENT TO HIS MOTHER AND ASKED FOR THE BROKEN PIECES OF HIS FATHER'S BLADE AND GAVE THEM TO (REGIN->REGAN) +3538-163624-0010-1550: SO (SIGURD->CIGARS) SAID THAT SWORD WOULD DO +3538-163624-0011-1551: THEN HE SAW THE TRACK WHICH THE DRAGON (*->HAD) MADE WHEN HE WENT TO A CLIFF TO DRINK AND THE TRACK WAS AS IF A GREAT RIVER HAD ROLLED ALONG AND LEFT A DEEP VALLEY +3538-163624-0012-1552: BUT (SIGURD->CIGARET) WAITED TILL HALF OF HIM HAD CRAWLED OVER THE PIT AND THEN HE THRUST THE SWORD (GRAM->GRAHAM) RIGHT INTO HIS VERY HEART +3538-163624-0013-1553: (SIGURD->CIGAR) SAID I WOULD TOUCH NONE OF IT IF BY LOSING IT I SHOULD NEVER DIE +3538-163624-0014-1554: BUT ALL MEN DIE AND (NO->KNOW) BRAVE MAN LETS DEATH FRIGHTEN HIM FROM HIS DESIRE +3538-163624-0015-1555: (DIE->GUY) THOU (FAFNIR->FAFFNER) AND THEN (FAFNIR->STAFF) DIED +3538-163624-0016-1556: THEN (SIGURD->CIGAR) RODE BACK AND MET (REGIN->RIGAN) AND (REGIN->RIGAN) ASKED HIM TO ROAST (FAFNIR'S->FAFNER'S) HEART AND LET HIM TASTE OF IT +3538-163624-0017-1557: SO (SIGURD->SIR GOD) PUT THE HEART OF (FAFNIR->FAFNER) ON A STAKE AND ROASTED IT +3538-163624-0018-1558: THERE IS (SIGURD->CIGAR) ROASTING (FAFNIR'S->FASTENER'S) HEART FOR ANOTHER WHEN HE SHOULD TASTE OF IT HIMSELF AND LEARN ALL WISDOM +3538-163624-0019-1559: THAT LET HIM DO (AND->*) THEN RIDE OVER (HINDFELL->HINFIELD) TO THE PLACE WHERE (BRYNHILD->BURNHILD) SLEEPS +3538-163624-0020-1560: THERE MUST SHE SLEEP TILL THOU (COMEST->COMES) FOR HER WAKING RISE UP AND RIDE FOR NOW SURE SHE WILL SWEAR THE VOW FEARLESS OF BREAKING +3538-163624-0021-1561: THEN HE TOOK THE HELMET OFF THE HEAD OF THE SLEEPER AND BEHOLD SHE WAS A MOST BEAUTIFUL LADY +3538-163624-0022-1562: THEN (SIGURD->CIGAR) RODE AWAY AND HE CAME TO THE HOUSE OF A KING WHO HAD A FAIR DAUGHTER +3538-163624-0023-1563: (THEN BRYNHILD'S->WHEN BRUNHOLD'S) FATHER TOLD (GUNNAR->GUNNER) THAT SHE WOULD MARRY NONE BUT HIM WHO COULD RIDE THE FLAME IN FRONT OF HER ENCHANTED TOWER AND THITHER THEY RODE AND (GUNNAR->GUNNER) SET HIS HORSE (AT->TO) THE FLAME BUT HE WOULD NOT FACE IT +3538-163624-0024-1564: FOR ONE DAY WHEN (BRYNHILD->BURNEHELD) AND (GUDRUN->GUNDRAIN) WERE BATHING (BRYNHILD WADED->BURNEHELD WAITED) FARTHEST (OUT->SOUTH) INTO THE RIVER AND SAID SHE DID THAT TO SHOW SHE WAS (GUIRUN'S->GUNDERING) SUPERIOR +3538-163624-0025-1565: FOR HER HUSBAND SHE SAID HAD RIDDEN THROUGH THE FLAME WHEN NO OTHER MAN DARED FACE IT +3538-163624-0026-1566: NOT LONG TO WAIT HE SAID TILL THE BITTER SWORD STANDS FAST IN MY HEART AND THOU (WILL->WILT) NOT LIVE LONG WHEN I AM DEAD +367-130732-0000-1466: LOBSTERS AND LOBSTERS +367-130732-0001-1467: WHEN (IS->AS) A LOBSTER NOT A LOBSTER WHEN IT IS A CRAYFISH +367-130732-0002-1468: THIS QUESTION AND ANSWER MIGHT WELL GO INTO THE (PRIMER->PRIMARY) OF INFORMATION FOR THOSE WHO COME (TO->THE) SAN FRANCISCO FROM THE EAST FOR WHAT IS CALLED A LOBSTER IN SAN FRANCISCO IS NOT A (LOBSTER->LOBSOR) AT ALL BUT A CRAYFISH +367-130732-0003-1469: THE PACIFIC CRAYFISH (HOWEVER SERVES->HOURSERVES) EVERY PURPOSE AND WHILE MANY (CONTEND THAT->CONTENDED) ITS MEAT IS NOT SO DELICATE IN (FLAVOR->FLAVORIT) AS THAT OF ITS EASTERN COUSIN THE (CALIFORNIAN->CALIFORNIA) WILL AS STRENUOUSLY INSIST THAT IT IS BETTER BUT OF COURSE SOMETHING MUST ALWAYS BE ALLOWED FOR THE PATRIOTISM OF THE (CALIFORNIAN->CALIFORNIA) +367-130732-0004-1470: A BOOK COULD BE WRITTEN ABOUT THIS RESTAURANT AND THEN ALL WOULD NOT BE TOLD FOR ALL ITS SECRETS CAN NEVER BE KNOWN +367-130732-0005-1471: IT WAS HERE THAT MOST MAGNIFICENT DINNERS WERE ARRANGED IT WAS HERE THAT EXTRAORDINARY DISHES WERE CONCOCTED BY CHEFS OF (WORLD WIDE FAME->WOOLWIFE) IT WAS HERE THAT LOBSTER (A LA NEWBERG->ALAD NEWBURG) REACHED ITS HIGHEST PERFECTION AND THIS IS THE RECIPE THAT WAS FOLLOWED WHEN (IT->HE) WAS PREPARED IN THE (DELMONICO->DOMONICO) +367-130732-0006-1472: LOBSTER (A LA NEWBERG->OLY NEWBURG) +367-130732-0007-1473: ONE POUND OF (LOBSTER MEAT->LOBS TO ME) ONE TEASPOONFUL OF BUTTER ONE HALF PINT OF CREAM YOLKS OF FOUR EGGS ONE WINE GLASS OF SHERRY LOBSTER FAT +367-130732-0008-1474: (PUT THIS->PUS) IN A DOUBLE BOILER AND LET COOK UNTIL THICK STIRRING CONSTANTLY +367-130732-0009-1475: SERVE IN A (CHAFING->CHIEFING) DISH WITH (THIN->FLIND) SLICES OF DRY TOAST +367-130732-0010-1476: KING OF (SHELL FISH->SHELLFISH) +367-130732-0011-1477: ONE HAS TO COME TO SAN FRANCISCO TO PARTAKE OF THE KING OF (SHELL FISH->SHELLFISH) THE MAMMOTH PACIFIC CRAB +367-130732-0012-1478: I SAY COME TO SAN FRANCISCO ADVISEDLY FOR WHILE THE CRAB IS FOUND ALL ALONG THE COAST IT IS PREPARED NOWHERE SO DELICIOUSLY AS IN (SAN FRANCISCO->SAMPANCISCO) +367-130732-0013-1479: (GOBEY'S PASSED->GOBYS PASS) WITH THE FIRE AND THE LITTLE RESTAURANT BEARING HIS NAME (AND->*) IN CHARGE OF HIS WIDOW (IN->AND) UNION SQUARE AVENUE HAS NOT ATTAINED THE FAME OF THE OLD PLACE +367-130732-0014-1480: IT IS POSSIBLE THAT SHE KNOWS THE SECRET OF PREPARING CRAB AS IT WAS PREPARED IN THE (GOBEY'S->GOBIES) OF BEFORE THE FIRE BUT HIS (PRESTIGE->PRESAGE) DID NOT DESCEND TO HER +367-130732-0015-1481: (GOBEY'S CRAB STEW->GOBIUS CRABS DO) +367-130732-0016-1482: TAKE THE MEAT OF ONE LARGE CRAB SCRAPING OUT ALL (OF->*) THE (FAT->BAT) FROM THE SHELL +367-130732-0017-1483: SOAK THE CRAB MEAT IN THE SHERRY TWO HOURS BEFORE COOKING +367-130732-0018-1484: CHOP FINE THE ONION SWEET PEPPER AND TOMATO WITH THE ROSEMARY +367-130732-0019-1485: HEAT THIS IN A (STEWPAN->STEWPANT) AND WHEN (SIMMERING ADD->SIBBERING AT) THE SHERRY AND CRAB (MEAT->ME) AND LET ALL COOK TOGETHER WITH A SLOW FIRE FOR EIGHT MINUTES +367-130732-0020-1486: SERVE IN A CHAFING DISH WITH TOASTED CRACKERS OR THIN SLICES OF TOASTED BREAD +367-130732-0021-1487: LOBSTER IN MINIATURE +367-130732-0022-1488: SO FAR IT HAS BEEN USED MOSTLY FOR GARNISHMENT OF OTHER DISHES AND IT IS ONLY RECENTLY THAT THE (HOF BRAU->WHOLE BROW) HAS BEEN MAKING A SPECIALTY OF THEM +367-130732-0023-1489: ALL (OF->*) THE BETTER CLASS RESTAURANTS HOWEVER WILL SERVE THEM IF YOU ORDER THEM +367-130732-0024-1490: THIS IS THE RECIPE FOR EIGHT PEOPLE AND IT IS WELL (*->IT) WORTH TRYING IF YOU ARE GIVING A DINNER OF IMPORTANCE +367-130732-0025-1491: (BISQUE->THIS) OF (CRAWFISH->CROFISH) +367-130732-0026-1492: TAKE THIRTY (CRAWFISH->CROPFISH) FROM WHICH REMOVE THE GUT CONTAINING THE GALL IN THE FOLLOWING MANNER TAKE FIRM HOLD OF THE CRAWFISH WITH THE LEFT HAND SO AS TO AVOID BEING PINCHED BY ITS (CLAWS->CLOTH) WITH THE THUMB AND FOREFINGER OF THE RIGHT HAND PINCH THE EXTREME END OF THE CENTRAL FIN OF THE TAIL AND WITH A SUDDEN JERK THE GUT WILL BE WITHDRAWN +367-130732-0027-1493: MINCE (OR->ARE) CUT INTO SMALL DICE A CARROT (AN->AND) ONION ONE HEAD OF CELERY AND A FEW PARSLEY ROOTS AND TO THESE (ADD->AT) A (BAY LEAF->BAILEAF OF) A SPRIG OF THYME A LITTLE (MINIONETTE->MINOR) PEPPER AND TWO (OUNCES->OZ) OF BUTTER +367-130732-0028-1494: PUT THESE INGREDIENTS INTO A STEWPAN AND FRY THEM TEN MINUTES THEN THROW IN THE (CRAWFISH->CROPPISH) AND POUR ON THEM HALF A BOTTLE OF FRENCH WHITE WINE +367-130732-0029-1495: ALLOW (THIS->US) TO BOIL AND THEN ADD A QUART OF STRONG (CONSOMME->CONSUM) AND LET ALL CONTINUE BOILING FOR HALF AN HOUR +367-130732-0030-1496: PICK OUT THE (CRAWFISH->CRAW FISH) AND STRAIN THE BROTH THROUGH A NAPKIN BY PRESSURE INTO A BASIN IN ORDER TO EXTRACT ALL THE ESSENCE FROM THE VEGETABLES +367-130732-0031-1497: PICK THE SHELLS (OFF->OF) TWENTY FIVE OF THE (CRAWFISH->CROFISH) TAILS TRIM THEM NEATLY AND SET THEM ASIDE UNTIL WANTED +367-130732-0032-1498: RESERVE SOME OF THE SPAWN ALSO (HALF OF->HAPPEN) THE BODY SHELLS WITH WHICH TO MAKE THE CRAWFISH BUTTER TO FINISH THE SOUP +367-130732-0033-1499: THIS BUTTER IS MADE AS FOLLOWS PLACE THE SHELLS (ON->IN) A BAKING SHEET IN THE OVEN TO DRY LET THE SHELLS COOL AND THEN POUND THEM IN A MORTAR WITH A LITTLE LOBSTER (CORAL->COAL) AND FOUR OUNCES OF FRESH BUTTER THOROUGHLY BRUISING THE WHOLE TOGETHER SO AS TO MAKE A FINE PASTE +367-293981-0000-1445: I SWEAR (IT->*) ANSWERED SANCHO +367-293981-0001-1446: I SAY SO CONTINUED DON QUIXOTE BECAUSE I HATE TAKING AWAY (ANYONE'S->ANY ONE'S) GOOD NAME +367-293981-0002-1447: I SAY REPLIED SANCHO THAT I SWEAR TO HOLD MY TONGUE ABOUT IT TILL THE END OF YOUR (WORSHIP'S DAYS->WORSHIP STAYS) AND (GOD->GONE) GRANT I MAY BE ABLE TO LET IT OUT (TOMORROW->TO MORROW) +367-293981-0003-1448: THOUGH YOUR WORSHIP WAS NOT SO BADLY OFF HAVING IN YOUR ARMS (THAT INCOMPARABLE->THE INN COMPARABLE) BEAUTY YOU SPOKE OF BUT I WHAT DID I HAVE EXCEPT THE HEAVIEST (WHACKS->WAX THAT) I THINK I HAD IN ALL MY LIFE +367-293981-0004-1449: UNLUCKY ME (AND->INTO) THE MOTHER THAT BORE ME +367-293981-0005-1450: DIDN'T I SAY SO WORSE LUCK TO MY LINE SAID SANCHO +367-293981-0006-1451: IT CANNOT BE THE (MOOR->MORE) ANSWERED DON QUIXOTE FOR THOSE UNDER ENCHANTMENT DO NOT LET THEMSELVES BE SEEN BY ANYONE +367-293981-0007-1452: IF THEY (DON'T->DO NOT) LET THEMSELVES BE SEEN THEY LET THEMSELVES BE FELT SAID SANCHO IF NOT LET MY SHOULDERS SPEAK TO THE POINT +367-293981-0008-1453: (MINE->MIKE) COULD SPEAK TOO SAID DON QUIXOTE BUT THAT IS NOT A (SUFFICIENT->SUSPICION OF) REASON FOR BELIEVING THAT WHAT WE SEE IS THE ENCHANTED MOOR +367-293981-0009-1454: THE (OFFICER->OFFICERS) TURNED TO HIM AND SAID WELL HOW GOES (IT->A) GOOD MAN +367-293981-0010-1455: (SANCHO GOT->SANCHA CUT) UP WITH PAIN ENOUGH IN HIS BONES AND WENT AFTER THE INNKEEPER IN THE DARK AND MEETING THE OFFICER WHO WAS LOOKING TO SEE WHAT HAD BECOME OF HIS ENEMY HE SAID TO HIM SENOR WHOEVER YOU ARE DO US (THE FAVOUR->TO FAVOR) AND KINDNESS TO GIVE US A LITTLE ROSEMARY OIL SALT AND (WINE->WHITE) FOR IT IS (WANTED->WATER) TO CURE ONE OF (THE->OUR) BEST KNIGHTS ERRANT ON EARTH WHO LIES ON YONDER BED WOUNDED BY THE HANDS OF THE ENCHANTED MOOR THAT IS IN THIS INN +367-293981-0011-1456: TO BE BRIEF HE TOOK THE (MATERIALS->MATURES) OF WHICH HE MADE A COMPOUND MIXING THEM ALL (AND->*) BOILING THEM A GOOD WHILE (*->IT) UNTIL IT SEEMED TO HIM THEY HAD COME TO PERFECTION +367-293981-0012-1457: SANCHO PANZA WHO ALSO REGARDED THE AMENDMENT OF HIS MASTER AS MIRACULOUS BEGGED HIM TO GIVE HIM WHAT WAS (LEFT->LET) IN (THE PIGSKIN->A PICTION) WHICH WAS NO SMALL QUANTITY +367-293981-0013-1458: DON QUIXOTE CONSENTED AND HE TAKING IT WITH BOTH HANDS IN GOOD FAITH AND WITH A BETTER WILL (GULPED->GO TO) DOWN AND DRAINED (OFF->UP) VERY LITTLE LESS THAN HIS MASTER +367-293981-0014-1459: IF YOUR WORSHIP KNEW THAT RETURNED SANCHO (WOE->WON'T) BETIDE ME AND ALL MY KINDRED WHY DID YOU LET ME TASTE IT +367-293981-0015-1460: SEARCH YOUR MEMORY AND IF YOU FIND ANYTHING OF THIS KIND YOU NEED ONLY TELL ME OF IT AND I PROMISE YOU BY THE ORDER OF KNIGHTHOOD WHICH I HAVE RECEIVED TO PROCURE YOU SATISFACTION (AND->IN) REPARATION TO THE UTMOST OF YOUR DESIRE +367-293981-0016-1461: THEN THIS IS AN (INN->IN) SAID DON QUIXOTE +367-293981-0017-1462: (AND->IN) A VERY RESPECTABLE ONE SAID THE INNKEEPER +367-293981-0018-1463: THE CRIES OF THE POOR (BLANKETED->BLANKET) WRETCH WERE SO LOUD THAT THEY REACHED THE EARS OF HIS MASTER WHO HALTING TO LISTEN (ATTENTIVELY->TINTIVELY) WAS PERSUADED THAT SOME NEW ADVENTURE WAS COMING UNTIL HE CLEARLY PERCEIVED THAT IT WAS (HIS->THE) SQUIRE WHO UTTERED THEM +367-293981-0019-1464: HE SAW HIM RISING AND FALLING IN THE AIR WITH SUCH GRACE AND NIMBLENESS THAT HAD HIS RAGE ALLOWED HIM IT IS MY BELIEF HE WOULD HAVE LAUGHED +367-293981-0020-1465: SANCHO TOOK IT AND AS HE WAS RAISING IT TO HIS MOUTH HE WAS STOPPED BY THE CRIES OF HIS MASTER EXCLAIMING SANCHO MY SON DRINK NOT WATER (DRINK IT NOT->DRINKIN O) MY SON FOR IT WILL KILL THEE SEE HERE I HAVE THE BLESSED BALSAM AND HE HELD UP THE FLASK OF LIQUOR AND WITH DRINKING TWO DROPS (OF IT->WHAT) THOU WILT CERTAINLY BE RESTORED +3764-168670-0000-1666: THE STRIDES OF A LAME MAN ARE LIKE THE OGLING GLANCES OF A ONE EYED MAN THEY DO NOT REACH THEIR GOAL VERY PROMPTLY +3764-168670-0001-1667: COSETTE HAD WAKED UP +3764-168670-0002-1668: JEAN VALJEAN HAD PLACED HER NEAR THE FIRE +3764-168670-0003-1669: YOU WILL WAIT FOR ME AT A LADY'S HOUSE I SHALL COME TO FETCH YOU +3764-168670-0004-1670: EVERYTHING IS (ARRANGED->RANGED) AND NOTHING IS SAID FAUCHELEVENT +3764-168670-0005-1671: I HAVE PERMISSION TO BRING YOU IN BUT BEFORE BRINGING YOU IN YOU MUST BE GOT OUT +3764-168670-0006-1672: THAT'S WHERE THE DIFFICULTY LIES +3764-168670-0007-1673: IT IS EASY ENOUGH WITH THE CHILD YOU WILL CARRY HER OUT +3764-168670-0008-1674: AND SHE WILL HOLD HER TONGUE I ANSWER FOR THAT +3764-168670-0009-1675: FAUCHELEVENT GRUMBLED MORE TO HIMSELF THAN TO JEAN VALJEAN +3764-168670-0010-1676: YOU UNDERSTAND FATHER MADELEINE THE GOVERNMENT WILL NOTICE IT +3764-168670-0011-1677: JEAN VALJEAN STARED HIM STRAIGHT IN THE EYE AND THOUGHT THAT HE WAS RAVING +3764-168670-0012-1678: FAUCHELEVENT WENT ON +3764-168670-0013-1679: IT IS TO MORROW THAT I AM TO BRING YOU IN THE (PRIORESS->PRIORS) EXPECTS YOU +3764-168670-0014-1680: THEN HE EXPLAINED TO JEAN VALJEAN THAT THIS WAS HIS RECOMPENSE FOR A SERVICE WHICH HE (FAUCHELEVENT->FOR CHAUVELIN) WAS TO RENDER TO THE COMMUNITY +3764-168670-0015-1681: THAT THE NUN WHO HAD DIED THAT MORNING HAD REQUESTED TO BE BURIED IN THE COFFIN WHICH HAD SERVED HER FOR A BED AND INTERRED IN THE VAULT UNDER THE ALTAR OF THE CHAPEL +3764-168670-0016-1682: THAT THE (PRIORESS->PRIOR REST) AND THE VOCAL MOTHERS INTENDED TO FULFIL THE WISH OF THE DECEASED +3764-168670-0017-1683: THAT HE (FAUCHELEVENT->FOR SCHLEVENT) WAS TO NAIL UP THE COFFIN IN THE CELL (RAISE->RAISED) THE STONE IN THE CHAPEL AND (LOWER->BLOW) THE CORPSE INTO THE VAULT +3764-168670-0018-1684: AND THEN THAT THERE WAS ANOTHER THE EMPTY COFFIN +3764-168670-0019-1685: WHAT IS THAT EMPTY COFFIN +3764-168670-0020-1686: ASKED JEAN VALJEAN FAUCHELEVENT REPLIED +3764-168670-0021-1687: WHAT COFFIN WHAT ADMINISTRATION +3764-168670-0022-1688: FAUCHELEVENT WHO WAS SEATED SPRANG UP AS THOUGH A BOMB HAD BURST UNDER HIS CHAIR YOU +3764-168670-0023-1689: YOU KNOW FAUCHELEVENT WHAT YOU HAVE SAID MOTHER CRUCIFIXION IS DEAD +3764-168670-0024-1690: AND I ADD AND FATHER MADELEINE IS BURIED (AH->*) +3764-168670-0025-1691: YOU ARE NOT LIKE OTHER MEN FATHER MADELEINE +3764-168670-0026-1692: THIS OFFERS THE MEANS BUT GIVE ME SOME INFORMATION IN THE FIRST PLACE +3764-168670-0027-1693: HOW LONG IS THE COFFIN SIX FEET +3764-168670-0028-1694: IT IS A CHAMBER ON THE GROUND FLOOR WHICH HAS A GRATED WINDOW OPENING ON THE GARDEN WHICH IS CLOSED ON THE OUTSIDE BY A SHUTTER AND TWO DOORS ONE LEADS INTO THE CONVENT THE OTHER INTO THE CHURCH (WHAT CHURCH->A WATCH) +3764-168670-0029-1695: THE CHURCH IN THE STREET (*->THOUGH) THE CHURCH WHICH ANY ONE CAN ENTER +3764-168670-0030-1696: HAVE YOU THE KEYS TO THOSE TWO DOORS +3764-168670-0031-1697: (*->AND) NO I HAVE THE KEY TO THE DOOR WHICH COMMUNICATES WITH THE CONVENT THE PORTER HAS THE KEY TO THE DOOR WHICH COMMUNICATES WITH THE CHURCH +3764-168670-0032-1698: ONLY TO ALLOW THE (UNDERTAKER'S->UNDERTAKERS) MEN TO ENTER WHEN THEY COME TO GET THE COFFIN +3764-168670-0033-1699: WHO NAILS UP THE COFFIN I DO +3764-168670-0034-1700: WHO SPREADS THE (PALL->POOL) OVER IT +3764-168670-0035-1701: NOT ANOTHER MAN EXCEPT THE POLICE DOCTOR CAN ENTER THE (DEAD ROOM->BEDROOM) THAT IS EVEN WRITTEN ON THE WALL +3764-168670-0036-1702: COULD YOU HIDE ME IN THAT ROOM TO NIGHT WHEN EVERY ONE IS ASLEEP +3764-168670-0037-1703: ABOUT THREE O'CLOCK IN THE AFTERNOON +3764-168670-0038-1704: I SHALL BE HUNGRY I WILL BRING YOU SOMETHING +3764-168670-0039-1705: YOU CAN COME AND NAIL ME UP IN THE COFFIN AT TWO O'CLOCK +3764-168670-0040-1706: FAUCHELEVENT RECOILED AND CRACKED HIS FINGER JOINTS BUT THAT IS IMPOSSIBLE +3764-168670-0041-1707: BAH IMPOSSIBLE TO TAKE A HAMMER AND DRIVE SOME NAILS IN A PLANK +3764-168670-0042-1708: JEAN VALJEAN HAD BEEN IN WORSE (STRAITS->STRAIT) THAN THIS +3764-168670-0043-1709: ANY MAN WHO HAS BEEN A PRISONER UNDERSTANDS HOW TO CONTRACT HIMSELF TO FIT THE DIAMETER OF THE ESCAPE +3764-168670-0044-1710: WHAT DOES NOT A MAN UNDERGO FOR THE SAKE OF A CURE +3764-168670-0045-1711: TO HAVE HIMSELF NAILED UP IN A CASE AND CARRIED OFF LIKE A BALE OF GOODS TO LIVE FOR A LONG TIME IN A BOX TO FIND AIR WHERE THERE IS NONE TO ECONOMIZE HIS BREATH FOR HOURS TO KNOW HOW TO STIFLE WITHOUT DYING THIS WAS ONE OF JEAN VALJEAN'S GLOOMY TALENTS +3764-168670-0046-1712: YOU SURELY MUST HAVE A GIMLET YOU WILL MAKE A FEW HOLES HERE AND THERE AROUND MY MOUTH AND YOU WILL NAIL THE TOP PLANK ON LOOSELY GOOD AND WHAT IF YOU SHOULD HAPPEN TO COUGH OR TO SNEEZE +3764-168670-0047-1713: A MAN WHO IS MAKING HIS ESCAPE DOES NOT COUGH OR SNEEZE +3764-168670-0048-1714: WHO IS THERE WHO HAS NOT SAID TO A CAT DO COME IN +3764-168670-0049-1715: THE (OVER PRUDENT CATS->OVERPRUDENT CARTS) AS THEY ARE AND BECAUSE THEY ARE CATS SOMETIMES INCUR MORE DANGER THAN THE AUDACIOUS +3764-168670-0050-1716: BUT JEAN VALJEAN'S COOLNESS PREVAILED OVER HIM IN SPITE OF HIMSELF HE GRUMBLED +3764-168670-0051-1717: IF YOU ARE SURE OF COMING OUT OF THE COFFIN ALL RIGHT I AM SURE OF GETTING (YOU->*) OUT OF THE GRAVE +3764-168670-0052-1718: AN OLD FELLOW OF THE OLD SCHOOL THE GRAVE DIGGER PUTS THE CORPSES IN THE GRAVE AND I PUT THE GRAVE DIGGER IN MY POCKET +3764-168670-0053-1719: I SHALL FOLLOW THAT IS MY BUSINESS +3764-168670-0054-1720: THE (HEARSE HALTS->HOUSEHOLTS) THE (UNDERTAKER'S->UNDERTAKERS) MEN (KNOT->NOT) A ROPE AROUND YOUR COFFIN AND LOWER YOU DOWN +3764-168670-0055-1721: THE (PRIEST SAYS->PRIESTS AS) THE PRAYERS MAKES THE SIGN OF THE CROSS SPRINKLES THE HOLY WATER AND TAKES HIS DEPARTURE +3764-168670-0056-1722: ONE OF TWO THINGS WILL HAPPEN HE WILL EITHER BE SOBER OR HE WILL NOT BE SOBER +3764-168670-0057-1723: THAT IS SETTLED FATHER FAUCHELEVENT ALL WILL GO WELL +3764-168671-0000-1724: ON THE FOLLOWING DAY AS THE SUN WAS DECLINING THE VERY RARE PASSERS BY ON THE BOULEVARD DU (MAINE->MIN) PULLED OFF THEIR HATS TO AN OLD FASHIONED HEARSE ORNAMENTED WITH SKULLS CROSS BONES AND TEARS +3764-168671-0001-1725: THIS HEARSE CONTAINED A COFFIN COVERED WITH A WHITE CLOTH OVER WHICH SPREAD A LARGE BLACK CROSS LIKE A HUGE CORPSE WITH DROOPING ARMS +3764-168671-0002-1726: (A MOURNING->THE MORNING) COACH IN WHICH COULD BE SEEN A PRIEST IN HIS SURPLICE AND A CHOIR BOY IN HIS RED CAP FOLLOWED +3764-168671-0003-1727: BEHIND IT CAME AN OLD MAN IN THE GARMENTS OF A LABORER WHO LIMPED ALONG +3764-168671-0004-1728: THE GRAVE DIGGERS BEING THUS BOUND TO SERVICE IN THE EVENING IN SUMMER AND AT NIGHT IN WINTER IN THIS CEMETERY THEY WERE SUBJECTED TO A SPECIAL DISCIPLINE +3764-168671-0005-1729: THESE GATES THEREFORE SWUNG INEXORABLY ON THEIR HINGES AT THE INSTANT WHEN THE SUN DISAPPEARED BEHIND THE DOME OF THE (INVALIDES->INVALIDE) +3764-168671-0006-1730: DAMPNESS WAS INVADING IT THE FLOWERS WERE DESERTING IT +3764-168671-0007-1731: THE BOURGEOIS DID NOT CARE MUCH ABOUT BEING BURIED IN THE (VAUGIRARD->ROGI) IT HINTED AT POVERTY (PERE LACHAISE->PALACHE'S) IF YOU PLEASE +3764-168671-0008-1732: TO BE BURIED IN (PERE LACHAISE->PERFELLAR CHASE) IS EQUIVALENT TO HAVING FURNITURE OF MAHOGANY IT IS RECOGNIZED AS ELEGANT +3764-168671-0009-1733: THE INTERMENT OF MOTHER CRUCIFIXION IN THE VAULT UNDER THE ALTAR THE EXIT OF COSETTE THE INTRODUCTION OF JEAN VALJEAN (TO->INTO) THE DEAD ROOM ALL HAD BEEN EXECUTED WITHOUT DIFFICULTY AND THERE HAD BEEN NO HITCH LET US REMARK IN PASSING THAT THE BURIAL OF MOTHER CRUCIFIXION UNDER THE ALTAR OF THE CONVENT IS A PERFECTLY VENIAL OFFENCE IN OUR SIGHT +3764-168671-0010-1734: IT IS ONE OF THE FAULTS WHICH RESEMBLE A DUTY +3764-168671-0011-1735: THE NUNS HAD COMMITTED IT NOT ONLY WITHOUT DIFFICULTY BUT EVEN WITH THE APPLAUSE OF THEIR OWN CONSCIENCES +3764-168671-0012-1736: IN THE CLOISTER WHAT IS CALLED THE GOVERNMENT IS ONLY AN INTERMEDDLING WITH AUTHORITY AN INTERFERENCE WHICH IS ALWAYS QUESTIONABLE +3764-168671-0013-1737: MAKE AS MANY (LAWS->NOISE) AS YOU PLEASE MEN BUT KEEP THEM FOR YOURSELVES +3764-168671-0014-1738: A PRINCE IS NOTHING IN THE PRESENCE OF A PRINCIPLE +3764-168671-0015-1739: FAUCHELEVENT LIMPED ALONG BEHIND THE HEARSE IN A VERY CONTENTED FRAME OF MIND +3764-168671-0016-1740: JEAN VALJEAN'S COMPOSURE WAS ONE OF THOSE POWERFUL TRANQUILLITIES WHICH ARE CONTAGIOUS +3764-168671-0017-1741: WHAT REMAINED TO BE DONE WAS A MERE NOTHING +3764-168671-0018-1742: HE PLAYED WITH FATHER (MESTIENNE->MESSIAN) +3764-168671-0019-1743: HE DID WHAT HE LIKED WITH HIM HE MADE HIM DANCE ACCORDING TO HIS WHIM +3764-168671-0020-1744: THE PERMISSION FOR INTERMENT MUST BE EXHIBITED +3764-168671-0021-1745: HE WAS A SORT OF LABORING MAN WHO WORE A WAISTCOAT WITH LARGE POCKETS AND CARRIED A MATTOCK UNDER HIS ARM +3764-168671-0022-1746: THE MAN REPLIED THE GRAVE DIGGER +3764-168671-0023-1747: THE (GRAVE->BRAVE) DIGGER YES +3764-168671-0024-1748: YOU I +3764-168671-0025-1749: FATHER (MESTIENNE->MISSION) IS THE GRAVE DIGGER HE WAS +3764-168671-0026-1750: FAUCHELEVENT HAD EXPECTED ANYTHING BUT THIS THAT A GRAVE DIGGER COULD DIE +3764-168671-0027-1751: IT IS TRUE NEVERTHELESS THAT GRAVE DIGGERS DO DIE THEMSELVES +3764-168671-0028-1752: HE HAD HARDLY THE STRENGTH TO STAMMER +3764-168671-0029-1753: BUT HE PERSISTED FEEBLY (FATHER MESTIENNE->I'VE A MESSIAN) IS THE GRAVE DIGGER +3764-168671-0030-1754: DO YOU KNOW WHO LITTLE FATHER (LENOIR->LOIS) IS HE IS A JUG OF RED WINE +3764-168671-0031-1755: BUT YOU ARE A JOLLY FELLOW TOO +3764-168671-0032-1756: ARE YOU NOT COMRADE (WE'LL GO AND->WILGHAN) HAVE A DRINK TOGETHER PRESENTLY +3764-168671-0033-1757: THE MAN REPLIED +3764-168671-0034-1758: HE LIMPED MORE OUT OF ANXIETY THAN FROM INFIRMITY +3764-168671-0035-1759: THE GRAVE DIGGER WALKED ON IN FRONT OF HIM +3764-168671-0036-1760: FAUCHELEVENT PASSED THE UNEXPECTED (GRIBIER->CLAVIER) ONCE MORE IN REVIEW +3764-168671-0037-1761: FAUCHELEVENT WHO WAS ILLITERATE BUT VERY SHARP UNDERSTOOD THAT HE HAD TO DEAL WITH A FORMIDABLE SPECIES OF MAN WITH A FINE TALKER HE MUTTERED +3764-168671-0038-1762: (SO->MISS OH) FATHER (MESTIENNE->MESS TEEN) IS DEAD +3764-168671-0039-1763: THE MAN REPLIED COMPLETELY +3764-168671-0040-1764: THE GOOD GOD CONSULTED HIS NOTE BOOK WHICH SHOWS WHEN THE TIME IS UP IT WAS (FATHER MESTIENNE'S->FARTHIAN'S) TURN (FATHER MESTIENNE->FOR THE MESSIAN) DIED +3764-168671-0041-1765: STAMMERED FAUCHELEVENT IT IS MADE +3764-168671-0042-1766: YOU ARE A PEASANT I AM A PARISIAN +3764-168671-0043-1767: FAUCHELEVENT THOUGHT I AM LOST +3764-168671-0044-1768: THEY WERE ONLY A FEW TURNS OF THE WHEEL DISTANT FROM THE SMALL ALLEY LEADING TO THE (NUNS->NUN'S) CORNER +3764-168671-0045-1769: AND HE ADDED WITH THE SATISFACTION OF A SERIOUS MAN WHO IS TURNING A PHRASE WELL +3764-168671-0046-1770: FORTUNATELY THE SOIL WHICH WAS LIGHT AND WET WITH THE WINTER RAINS CLOGGED THE WHEELS AND RETARDED ITS SPEED +3764-168671-0047-1771: MY FATHER WAS A PORTER AT THE (PRYTANEUM->BRITTANNIUM) TOWN HALL +3764-168671-0048-1772: BUT HE HAD REVERSES HE HAD (LOSSES ON CHANGE->LOSES UNCHANGED) I WAS OBLIGED TO RENOUNCE THE PROFESSION OF AUTHOR BUT I AM STILL A PUBLIC WRITER +3764-168671-0049-1773: (*->BUT) SO YOU ARE NOT A GRAVE DIGGER THEN +3764-168671-0050-1774: RETURNED FAUCHELEVENT CLUTCHING AT THIS BRANCH FEEBLE AS IT WAS +3764-168671-0051-1775: HERE A REMARK BECOMES NECESSARY +3764-168671-0052-1776: (*->A) FAUCHELEVENT WHATEVER HIS ANGUISH OFFERED A DRINK BUT HE DID NOT EXPLAIN HIMSELF ON ONE POINT WHO WAS TO PAY +3764-168671-0053-1777: THE GRAVE DIGGER WENT ON WITH (A->THE) SUPERIOR SMILE +3764-168671-0054-1778: ONE MUST EAT +3997-180294-0000-1800: THE DUKE COMES EVERY MORNING THEY WILL TELL HIM WHEN HE COMES THAT I AM ASLEEP AND PERHAPS HE WILL WAIT UNTIL I (WAKE->AWAKE) +3997-180294-0001-1801: YES BUT IF I SHOULD ALREADY ASK FOR SOMETHING WHAT +3997-180294-0002-1802: WELL DO IT FOR ME FOR I SWEAR TO YOU (THAT I->THY) DON'T LOVE YOU AS THE OTHERS HAVE LOVED YOU +3997-180294-0003-1803: THERE ARE BOLTS (ON->IN) THE DOOR WRETCH +3997-180294-0004-1804: I DON'T KNOW HOW IT IS BUT IT SEEMS TO ME AS IF I DO +3997-180294-0005-1805: NOW GO I CAN'T KEEP MY EYES OPEN +3997-180294-0006-1806: IT (SEEMED->SEEMS) TO ME AS IF THIS SLEEPING CITY (BELONGED->BELONGS) TO ME I SEARCHED MY MEMORY FOR THE NAMES OF THOSE WHOSE HAPPINESS I HAD ONCE ENVIED AND I COULD NOT RECALL ONE WITHOUT FINDING MYSELF THE HAPPIER +3997-180294-0007-1807: EDUCATION FAMILY FEELING THE SENSE OF DUTY THE FAMILY ARE STRONG SENTINELS BUT THERE ARE NO SENTINELS SO VIGILANT AS NOT TO BE DECEIVED BY A GIRL OF SIXTEEN TO WHOM NATURE BY THE VOICE OF THE MAN SHE LOVES GIVES THE FIRST (COUNSELS->COUNSEL) OF LOVE ALL THE MORE ARDENT BECAUSE THEY SEEM SO PURE +3997-180294-0008-1808: THE MORE (A->*) GIRL BELIEVES IN GOODNESS THE MORE EASILY WILL SHE GIVE WAY IF NOT TO HER LOVER AT LEAST TO LOVE FOR (BEING->BE) WITHOUT MISTRUST SHE IS WITHOUT FORCE AND TO WIN HER LOVE (IS->AS) A TRIUMPH THAT CAN BE GAINED BY ANY YOUNG (MAN->MEN) OF FIVE AND TWENTY SEE HOW YOUNG GIRLS ARE WATCHED AND GUARDED +3997-180294-0009-1809: THEN HOW SURELY MUST THEY DESIRE THE WORLD WHICH IS HIDDEN FROM THEM HOW (SURELY->TRULY) MUST THEY FIND IT TEMPTING HOW SURELY MUST THEY (LISTEN->LISTENED) TO THE FIRST VOICE WHICH COMES TO TELL ITS SECRETS THROUGH THEIR BARS AND BLESS THE HAND WHICH (*->HE) IS THE FIRST TO RAISE A CORNER OF THE (MYSTERIOUS->MYSTERY) VEIL +3997-180294-0010-1810: WITH THEM THE BODY HAS WORN OUT THE SOUL THE SENSES (HAVE->HALF) BURNED UP THE HEART DISSIPATION HAS BLUNTED THE FEELINGS +3997-180294-0011-1811: THEY LOVE BY PROFESSION AND NOT BY INSTINCT +3997-180294-0012-1812: WHEN A CREATURE WHO HAS ALL HER PAST TO REPROACH HERSELF WITH IS TAKEN ALL AT ONCE BY A PROFOUND SINCERE IRRESISTIBLE LOVE OF WHICH SHE HAD NEVER FELT HERSELF CAPABLE WHEN SHE HAS CONFESSED HER LOVE HOW ABSOLUTELY THE MAN WHOM SHE LOVES DOMINATES HER +3997-180294-0013-1813: THEY KNOW NOT WHAT PROOF TO GIVE +3997-180294-0014-1814: IN ORDER TO DISTURB THE (LABOURERS->LABORERS) IN THE FIELD WAS ONE DAY DEVOURED BY A WOLF BECAUSE THOSE WHOM HE HAD SO OFTEN DECEIVED NO LONGER BELIEVED IN HIS CRIES FOR HELP +3997-180294-0015-1815: (IT->THIS) IS THE SAME WITH THESE UNHAPPY WOMEN WHEN (THEY LOVE->HE LOVED) SERIOUSLY +3997-180294-0016-1816: BUT WHEN THE MAN WHO INSPIRES THIS REDEEMING LOVE IS GREAT ENOUGH IN SOUL TO RECEIVE IT WITHOUT REMEMBERING THE PAST WHEN HE GIVES HIMSELF UP TO IT WHEN IN SHORT HE LOVES AS HE IS LOVED THIS MAN DRAINS AT ONE DRAUGHT ALL EARTHLY EMOTIONS AND AFTER SUCH A LOVE HIS HEART WILL BE CLOSED TO EVERY OTHER +3997-180294-0017-1817: BUT TO RETURN TO THE FIRST DAY OF MY (LIAISON->LEAR SONG) +3997-180294-0018-1818: WHEN I REACHED HOME I WAS IN A STATE OF (MAD->MADGE) GAIETY +3997-180294-0019-1819: THE WOMAN BECOMES THE MAN'S MISTRESS AND LOVES HIM +3997-180294-0020-1820: HOW WHY +3997-180294-0021-1821: MY WHOLE BEING WAS EXALTED INTO JOY AT THE MEMORY OF THE WORDS WE HAD EXCHANGED DURING THAT FIRST NIGHT +3997-180294-0022-1822: HERE ARE MY ORDERS TO NIGHT AT THE VAUDEVILLE +3997-180294-0023-1823: (COME->CLER) DURING THE THIRD (ENTR'ACTE->ENTRACT) +3997-180294-0024-1824: THE BOXES FILLED ONE AFTER ANOTHER +3997-180294-0025-1825: ONLY ONE (REMAINED->REMAINS) EMPTY THE STAGE BOX +3997-180294-0026-1826: AT THE BEGINNING OF THE THIRD ACT I HEARD THE DOOR OF THE BOX ON WHICH MY EYES HAD BEEN ALMOST CONSTANTLY FIXED OPEN AND MARGUERITE APPEARED +3997-180294-0027-1827: (DID->THAT) SHE (LOVE->LOVED) ME ENOUGH TO BELIEVE THAT THE MORE BEAUTIFUL SHE LOOKED THE HAPPIER I SHOULD BE +3997-180294-0028-1828: WHAT IS THE MATTER WITH YOU TO NIGHT SAID MARGUERITE RISING AND COMING TO THE BACK OF THE BOX AND KISSING ME ON THE FOREHEAD +3997-180294-0029-1829: (YOU->HE) SHOULD GO TO BED SHE REPLIED WITH THAT (IRONICAL->IRONIC) AIR WHICH WENT SO WELL WITH HER DELICATE AND WITTY FACE +3997-180294-0030-1830: WHERE AT HOME +3997-180294-0031-1831: YOU STILL LOVE ME CAN YOU ASK +3997-180294-0032-1832: BECAUSE YOU DON'T LIKE SEEING HIM +3997-180294-0033-1833: (NONETHELESS->NONE THE LESS) I WAS VERY UNHAPPY ALL THE REST OF THE EVENING AND WENT AWAY VERY SADLY AFTER HAVING SEEN PRUDENCE THE COUNT AND MARGUERITE GET INTO THE CARRIAGE WHICH WAS (WAITING->WINNING) FOR THEM AT THE DOOR +3997-180297-0000-1834: I HAVE NOT COME TO HINDER YOU FROM LEAVING PARIS +3997-180297-0001-1835: YOU IN THE WAY MARGUERITE BUT HOW +3997-180297-0002-1836: WELL YOU MIGHT HAVE HAD A WOMAN HERE SAID PRUDENCE AND IT WOULD HARDLY HAVE BEEN AMUSING FOR HER TO SEE TWO MORE ARRIVE +3997-180297-0003-1837: DURING THIS REMARK MARGUERITE LOOKED AT ME ATTENTIVELY +3997-180297-0004-1838: MY DEAR PRUDENCE I ANSWERED YOU DO NOT KNOW WHAT YOU ARE SAYING +3997-180297-0005-1839: YES BUT BESIDES NOT WISHING TO PUT YOU OUT I WAS SURE THAT IF YOU CAME AS FAR AS MY DOOR YOU WOULD WANT TO COME UP AND AS I COULD NOT LET YOU I DID NOT WISH TO LET YOU GO AWAY BLAMING ME FOR SAYING NO +3997-180297-0006-1840: BECAUSE I AM WATCHED AND THE LEAST SUSPICION MIGHT (DO->TO) ME THE GREATEST HARM +3997-180297-0007-1841: IS THAT REALLY THE ONLY REASON +3997-180297-0008-1842: IF THERE WERE ANY OTHER I WOULD TELL YOU FOR WE ARE NOT TO HAVE ANY SECRETS FROM ONE ANOTHER NOW +3997-180297-0009-1843: (HONESTLY->ON THE SUIT) DO YOU CARE FOR ME A LITTLE A GREAT DEAL +3997-180297-0010-1844: I FANCIED FOR A MOMENT THAT I MIGHT GIVE MYSELF THAT HAPPINESS FOR SIX MONTHS YOU WOULD NOT HAVE IT YOU INSISTED ON KNOWING THE MEANS +3997-180297-0011-1845: WELL GOOD HEAVENS THE MEANS WERE EASY ENOUGH TO GUESS +3997-180297-0012-1846: I LISTENED AND I GAZED AT MARGUERITE WITH ADMIRATION +3997-180297-0013-1847: WHEN (I->THEY) THOUGHT THAT THIS MARVELLOUS CREATURE WHOSE FEET I HAD ONCE LONGED TO KISS WAS WILLING TO LET ME TAKE MY PLACE IN HER THOUGHTS (MY->BY) PART IN HER LIFE AND THAT I WAS NOT YET CONTENT WITH WHAT SHE GAVE ME I ASKED IF MAN'S DESIRE (HAS->HAD) INDEED LIMITS WHEN SATISFIED AS PROMPTLY AS MINE HAD BEEN IT REACHED AFTER SOMETHING FURTHER +3997-180297-0014-1848: TRULY SHE CONTINUED WE POOR CREATURES OF CHANCE HAVE FANTASTIC (DESIRES->DESIRE) AND INCONCEIVABLE LOVES +3997-180297-0015-1849: WE ARE NOT ALLOWED TO HAVE HEARTS UNDER PENALTY OF BEING HOOTED DOWN AND OF RUINING OUR CREDIT +3997-180297-0016-1850: WE NO LONGER BELONG TO OURSELVES +3997-180297-0017-1851: WE STAND FIRST IN THEIR SELF ESTEEM LAST IN THEIR ESTEEM +3997-180297-0018-1852: NEVER (DO THEY->DID HE) GIVE YOU ADVICE WHICH IS NOT LUCRATIVE +3997-180297-0019-1853: IT MEANS LITTLE ENOUGH TO THEM THAT WE SHOULD HAVE TEN LOVERS EXTRA AS LONG AS THEY GET DRESSES OR A BRACELET OUT OF THEM AND THAT THEY CAN DRIVE (IN OUR CARRIAGE->AND ARE PARISH) FROM TIME TO TIME OR COME TO OUR BOX AT THE (THEATRE->FUTURE) +3997-180297-0020-1854: SUCH A MAN I FOUND IN THE DUKE BUT THE DUKE IS OLD AND (*->THE) OLD AGE NEITHER PROTECTS NOR CONSOLES +3997-180297-0021-1855: I THOUGHT I COULD ACCEPT THE LIFE WHICH HE OFFERED ME (BUT->OR) WHAT WOULD YOU HAVE +3997-180297-0022-1856: WHAT I LOVED IN YOU WAS NOT THE MAN WHO WAS BUT THE MAN WHO WAS GOING TO BE +3997-180297-0023-1857: MARGUERITE (TIRED->HIRED) OUT WITH THIS LONG CONFESSION THREW HERSELF BACK ON THE SOFA AND TO STIFLE A SLIGHT COUGH (PUT->PULL) UP HER HANDKERCHIEF TO HER LIPS AND FROM THAT TO HER EYES +3997-180297-0024-1858: MARGUERITE DO WITH ME AS YOU WILL I AM YOUR SLAVE YOUR DOG BUT IN THE NAME OF HEAVEN TEAR UP THE LETTER WHICH I WROTE TO YOU AND DO NOT MAKE ME LEAVE YOU TO MORROW IT WOULD KILL ME +3997-180297-0025-1859: MARGUERITE DREW THE LETTER FROM HER BOSOM AND HANDING IT TO ME WITH A SMILE OF INFINITE SWEETNESS SAID +3997-180297-0026-1860: HERE IT IS I HAVE BROUGHT IT BACK +3997-180297-0027-1861: I (TORE THE->TOILED A) LETTER INTO FRAGMENTS AND KISSED WITH TEARS THE HAND THAT GAVE IT TO ME +3997-180297-0028-1862: LOOK HERE PRUDENCE DO YOU KNOW WHAT HE WANTS SAID MARGUERITE +3997-180297-0029-1863: HE WANTS YOU TO FORGIVE HIM +3997-180297-0030-1864: ONE HAS (TO->TWO) BUT HE WANTS MORE THAN THAT WHAT THEN +3997-180297-0031-1865: I EMBRACED MARGUERITE UNTIL SHE WAS ALMOST STIFLED +3997-182399-0000-1779: (OL MISTAH->ALL MISTER) BUZZARD GRINNED +3997-182399-0001-1780: THIS SOUNDED LIKE ANOTHER STORY +3997-182399-0002-1781: HE WAS CURIOUS ABOUT THAT BLACK HEADED COUSIN OF (OL MISTAH->OLD MISTER) BUZZARD VERY CURIOUS INDEED +3997-182399-0003-1782: ANYWAY HE WOULD FIND OUT +3997-182399-0004-1783: PLEASE MISTER (BUZZARD->BOZARD) PLEASE TELL US THE STORY HE BEGGED +3997-182399-0005-1784: NOW (OL MISTAH->ALL MISTER) BUZZARD IS NATURALLY GOOD NATURED AND ACCOMMODATING AND WHEN PETER (BEGGED->BAGS) SO HARD HE JUST COULDN'T FIND IT IN HIS HEART TO REFUSE +3997-182399-0006-1785: WAY BACK IN THE DAYS WHEN (GRANDPAP->GRAN'PAP) BUZZARD HAD HIS (LIL->LITTLE) FALLING (OUT->ON) WITH (OL->OLD) KING EAGLE AND DONE (FLY->FLIES) SO HIGH HE (SCO'TCH->SCORCHED) THE FEATHERS (OFFEN->OFF IN) HIS (HAID->HEAD) HE HAD A COUSIN DID (GRANDPAP->GRANDPA) BUZZARD AND THIS COUSIN WAS (JES->JUST) NATURALLY LAZY AND NO COUNT +3997-182399-0007-1786: LIKE MOST NO COUNT PEOPLE HE USED TO MAKE A REGULAR (NUISANCE->NOTICE) OF (HISSELF->HIS SELF) POKING HIS NOSE INTO (EV'YBODY'S->EVERYBODY'S) BUSINESS AND NEVER TENDING TO HIS OWN +3997-182399-0008-1787: WASN'T ANYTHING GOING ON THAT THIS TRIFLING MEMBER OF THE BUZZARD (FAM'LY->FAMILY) DIDN'T FIND OUT ABOUT (AND MEDDLE->A MEDAL) IN HE COULD ASK (MO->MORE) QUESTIONS THAN PETER RABBIT CAN (AN->AND) ANYBODY THAT CAN DO THAT HAS GOT TO ASK A LOT +3997-182399-0009-1788: EVERYBODY LOOKED AT PETER AND LAUGHED +3997-182399-0010-1789: SO WE (UNS SIT->UNSTEAD) ON THE CHIMNEY TOPS WHENEVER (OL->OLD) JACK FROST GETS (TO STRAYING->THE STRAIN) DOWN WHERE HE HAVE NO BUSINESS +3997-182399-0011-1790: ONE DAY THIS NO COUNT TRIFLING COUSIN OF (GRANDPAP->GRANDPA) BUZZARD GET COLD IN HIS FEET +3997-182399-0012-1791: IT WAS ON (A LIL OL->THE LITTLE OLD) HOUSE A (LIL OL->LITTLE OLD) TUMBLE DOWN HOUSE +3997-182399-0013-1792: WHY HE (JES STRETCH->JUST SEARCHED) HIS (FOOL HAID->FULL HEAD) AS FAR DOWN (THAT->THE) CHIMNEY AS HE CAN (AN->AND) LISTEN (AN->AND) LISTEN +3997-182399-0014-1793: BUT HE DON'T MIND THAT +3997-182399-0015-1794: (WILL YO' ALLS->WELL YOU ALL) PLEASE SPEAK A (LIL->LITTLE) LOUDER HE (HOLLER DOWN->HOLLERED ON) THE CHIMNEY (JES->JUST) LIKE THAT +3997-182399-0016-1795: YES (SAH->SAD) SHE (SHO'LY->SURELY) WAS (PLUMB->PLUM) SCARED +3997-182399-0017-1796: THEY (LIKE->LIKED) TO CHOKE THAT NO (COUNT BUZZARD->COMPASSER) TO DEATH +3997-182399-0018-1797: WHEN HE GET HOME HE TRY (AN TRY->AND TRIES) TO BRUSH THAT (SOOT->SUIT) OFF BUT IT (DONE->DON'T) GET INTO THE SKIN (AN->AND) IT STAY THERE +3997-182399-0019-1798: A LITTLE SIGH OF SATISFACTION WENT (AROUND->ROUND) THE CIRCLE OF LISTENERS +3997-182399-0020-1799: IT WAS JUST AS GOOD AS ONE OF GRANDFATHER (FROG'S->FROGS) +4198-12259-0000-203: DRAW REACH FILL MIX GIVE IT ME WITHOUT WATER +4198-12259-0001-204: SO MY FRIEND SO WHIP ME OFF THIS GLASS NEATLY BRING ME HITHER SOME CLARET A FULL WEEPING GLASS TILL IT RUN OVER +4198-12259-0002-205: A CESSATION AND (TRUCE->TRUTH) WITH THIRST +4198-12259-0003-206: YOU HAVE (CATCHED->CAST) A COLD GAMMER YEA FORSOOTH SIR +4198-12259-0004-207: BY THE (BELLY->VALLEY) OF (SANCT->SAINT) BUFF LET US TALK OF OUR DRINK I NEVER DRINK (BUT AT->WITHOUT) MY HOURS LIKE THE POPE'S MULE +4198-12259-0005-208: WHICH WAS FIRST (THIRST OR->THOSE) DRINKING +4198-12259-0006-209: WHAT IT SEEMS I DO NOT DRINK BUT (BY->BUY) AN ATTORNEY +4198-12259-0007-210: DRINK ALWAYS AND YOU SHALL NEVER DIE +4198-12259-0008-211: IF I DRINK NOT I AM A GROUND DRY GRAVELLED AND SPENT I AM (STARK->START) DEAD WITHOUT DRINK AND MY SOUL READY TO FLY INTO SOME MARSH AMONGST FROGS THE SOUL NEVER DWELLS IN A DRY PLACE DROUTH (KILLS->KILL) IT +4198-12259-0009-212: HE DRINKS (IN VAIN->THEIR VEIN) THAT (FEELS->FILLS) NOT THE PLEASURE OF IT +4198-12259-0010-213: IT IS ENOUGH TO BREAK BOTH (GIRDS->GUERAGE) AND (PETREL->PETEL) +4198-12259-0011-214: WHAT DIFFERENCE IS THERE BETWEEN A BOTTLE AND A FLAGON +4198-12259-0012-215: BRAVELY AND WELL PLAYED UPON THE WORDS +4198-12259-0013-216: OUR FATHERS DRANK LUSTILY AND EMPTIED (THEIR CANS->THE ACADANS) +4198-12259-0014-217: WELL (CACKED->CAGLED) WELL SUNG +4198-12259-0015-218: COME LET US DRINK WILL YOU SEND NOTHING TO THE RIVER +4198-12259-0016-219: I (DRINK->DRANK) NO MORE THAN (A SPONGE->HIS SPINES) +4198-12259-0017-220: I DRINK LIKE A (TEMPLAR KNIGHT->TENT LAW NIGHT) +4198-12259-0018-221: AND I (TANQUAM SPONSUS->TEN QUALMS BONSES) +4198-12259-0019-222: AND I (SICUT TERRA SINE->SECUT TERRACE IN) AQUA +4198-12259-0020-223: GIVE ME A (SYNONYMON->SNYM) FOR A (GAMMON->GAMIN) OF BACON +4198-12259-0021-224: IT IS THE COMPULSORY OF DRINKERS IT IS A (PULLEY->PULLLY) +4198-12259-0022-225: A LITTLE RAIN (ALLAYS->A LAYS) A GREAT DEAL OF WIND LONG TIPPLING BREAKS (THE->THAT) THUNDER +4198-12259-0023-226: BUT IF THERE CAME SUCH LIQUOR (FROM->FOR) MY (BALLOCK WOULD->BALLIC WILL) YOU NOT WILLINGLY THEREAFTER SUCK THE (UDDER->UTTER) WHENCE IT ISSUED +4198-12259-0024-227: HERE PAGE FILL +4198-12259-0025-228: I APPEAL FROM THIRST AND DISCLAIM ITS (JURISDICTION->JURIS DIXON) +4198-12259-0026-229: I WAS WONT (HERETOFORE->HERE TOFORE) TO DRINK OUT ALL BUT NOW I LEAVE NOTHING +4198-12259-0027-230: (HEYDAY->HEY THEE) HERE (ARE TRIPES->A TRITE) FIT FOR (OUR SPORT->OURSPORT) AND IN EARNEST EXCELLENT (GODEBILLIOS->GO TO BE YOURS) OF THE DUN OX YOU KNOW WITH THE BLACK (STREAK->STREET) +4198-12259-0028-231: (O->OH) FOR GOD'S SAKE LET US (LASH->LAST) THEM SOUNDLY YET (THRIFTILY->DRIFTILY) +4198-12259-0029-232: SPARROWS (WILL->WOULD) NOT EAT UNLESS YOU (BOB->BOBBED) THEM ON THE TAIL NOR CAN I DRINK IF I BE NOT FAIRLY SPOKE TO +4198-12259-0030-233: (HO->OH) THIS (WILL BANG IT SOUNDLY->WAS BENNETT'S ARMY) +4198-12259-0031-234: BUT THIS (SHALL BANISH IT->OUR BANACY) UTTERLY +4198-12259-0032-235: LET US WIND OUR HORNS BY THE SOUND OF FLAGONS AND BOTTLES AND CRY ALOUD THAT WHOEVER HATH LOST HIS THIRST COME (NOT->NIGH) HITHER TO SEEK IT +4198-12259-0033-236: THE GREAT GOD MADE THE PLANETS AND WE MAKE THE PLATTERS NEAT +4198-12259-0034-237: APPETITE COMES WITH EATING SAYS (ANGESTON->ANGERSON) BUT THE (THIRST->DOZ) GOES AWAY WITH DRINKING +4198-12259-0035-238: I HAVE A REMEDY AGAINST THIRST QUITE CONTRARY TO THAT WHICH IS GOOD AGAINST (THE BITING->ABIDING) OF A MAD DOG +4198-12259-0036-239: WHITE (WINE HERE->WIRE) WINE BOYS +4198-12259-0037-240: O (LACHRYMA->LACKRY MOLL) CHRISTI IT IS OF THE BEST GRAPE +4198-12259-0038-241: (I'FAITH->I FAITH) PURE GREEK GREEK O THE FINE WHITE WINE +4198-12259-0039-242: THERE IS NO ENCHANTMENT NOR CHARM THERE EVERY ONE OF YOU HATH SEEN IT +4198-12259-0040-243: MY (PRENTICESHIP->PRENTICE IT) IS OUT (I AM->I'M) A FREE MAN AT THIS TRADE +4198-12259-0041-244: (I SHOULD SAY->AS YOU SEE) MASTER (PAST->PASS) +4198-12259-0042-245: (O->OH) THE DRINKERS THOSE THAT ARE (A DRY O->ADD OH) POOR THIRSTY SOULS +4198-12259-0043-246: CLEAR OFF NEAT SUPERNACULUM +4198-12281-0000-187: ALTHOUGH THE PLAGUE WAS THERE IN THE MOST PART OF ALL THE HOUSES THEY NEVERTHELESS ENTERED EVERYWHERE THEN PLUNDERED AND CARRIED AWAY ALL THAT WAS WITHIN AND YET FOR ALL THIS NOT ONE OF THEM TOOK ANY HURT WHICH IS A MOST WONDERFUL CASE +4198-12281-0001-188: I BESEECH YOU THINK UPON IT +4198-12281-0002-189: NEVERTHELESS AT ALL (ADVENTURES->VENTURES) THEY RANG THE BELLS (AD CAPITULUM CAPITULANTES->ERE AT CAPITULAM CAPITULAT DAYS) +4198-12281-0003-190: BY THE VIRTUE OF GOD WHY DO NOT YOU SING (PANNIERS->PENNYERS) FAREWELL VINTAGE IS DONE +4198-12281-0004-191: BY THE BELLY OF (SANCT->SAINT) JAMES WHAT SHALL WE POOR DEVILS DRINK THE WHILE +4198-12281-0005-192: LORD GOD (DA MIHI POTUM->THOU ME HE POT EM) +4198-12281-0006-193: LET HIM BE CARRIED TO PRISON FOR TROUBLING THE DIVINE SERVICE +4198-12281-0007-194: WHEREFORE IS IT THAT OUR DEVOTIONS WERE INSTITUTED TO BE SHORT IN THE TIME OF HARVEST AND VINTAGE AND LONG IN THE ADVENT (AND->IN) ALL THE WINTER +4198-12281-0008-195: HARK YOU MY MASTERS YOU THAT LOVE THE WINE (COP'S->COPSE) BODY FOLLOW ME FOR (SANCT ANTHONY->SAINT AUNT ANY) BURN ME AS FREELY AS A FAGGOT (IF->*) THEY GET LEAVE TO TASTE ONE DROP OF THE LIQUOR THAT (WILL->WOULD) NOT NOW COME AND FIGHT FOR RELIEF OF THE VINE +4198-12281-0009-196: TO OTHERS AGAIN HE UNJOINTED THE (SPONDYLES->SPY MULES) OR KNUCKLES OF THE NECK (DISFIGURED->DIS FIGURED) THEIR CHAPS (GASHED->GASH) THEIR FACES MADE THEIR CHEEKS HANG FLAPPING ON THEIR CHIN AND SO SWINGED AND (BALAMMED->BLAMMED) THEM THAT THEY FELL DOWN BEFORE HIM LIKE HAY BEFORE (A MOWER->HIM OVER) +4198-12281-0010-197: TO SOME (WITH A->WOULD THEY) SMART (SOUSE->SOUS) ON (THE EPIGASTER->THEIR EBERGASTER) HE (WOULD->WILL) MAKE (THEIR MIDRIFF SWAG->THEM MIDRIFTS WAG) THEN REDOUBLING THE BLOW GAVE THEM SUCH A (HOMEPUSH->HOME PUSH) ON THE NAVEL THAT HE MADE THEIR PUDDINGS TO GUSH OUT +4198-12281-0011-198: BELIEVE THAT IT WAS THE MOST HORRIBLE SPECTACLE THAT EVER (ONE->WON) SAW +4198-12281-0012-199: (O->ALL) THE HOLY LADY (NYTOUCH->KNIGHT) SAID ONE THE GOOD (SANCTESS->SANCTIS) O OUR LADY OF (SUCCOURS->SECURS) SAID ANOTHER HELP HELP +4198-12281-0013-200: SOME DIED WITHOUT SPEAKING OTHERS SPOKE WITHOUT DYING SOME DIED IN SPEAKING OTHERS SPOKE (IN->AND) DYING +4198-12281-0014-201: CAN YOU TELL WITH WHAT INSTRUMENTS THEY DID IT +4198-12281-0015-202: IN THE MEANTIME (FRIAR->FRIED) JOHN WITH HIS FORMIDABLE BATON OF THE CROSS GOT TO THE BREACH WHICH THE ENEMIES HAD MADE AND THERE STOOD TO SNATCH UP THOSE THAT ENDEAVOURED TO ESCAPE +4198-61336-0000-247: IT IS SIGNIFICANT TO NOTE IN THIS CONNECTION THAT THE NEW KING WAS AN UNSWERVING ADHERENT OF THE CULT OF (ASHUR->ASHER) BY THE (ADHERENTS->ADHERENCE) OF WHICH HE WAS PROBABLY STRONGLY SUPPORTED +4198-61336-0001-248: AT THE BEGINNING OF HIS REIGN THERE WAS MUCH SOCIAL DISCONTENT AND SUFFERING +4198-61336-0002-249: WELL MIGHT (SHARDURIS->SHOW DORIS) EXCLAIM IN THE WORDS OF THE PROPHET WHERE IS THE KING OF (ARPAD->ARPE) +4198-61336-0003-250: (TIGLATH PILESER->DICK LAUGHED PLEASURE) HOWEVER CROSSED THE (EUPHRATES->EUPHATEES) AND MOVING NORTHWARD DELIVERED AN UNEXPECTED ATTACK ON THE (URARTIAN->GRACIAN) ARMY (IN QUMMUKH->AND CUB) +4198-61336-0004-251: A FIERCE BATTLE ENSUED AND ONE OF (ITS->HIS) DRAMATIC INCIDENTS WAS A SINGLE COMBAT BETWEEN THE RIVAL KINGS +4198-61336-0005-252: AN ATTEMPT WAS MADE TO CAPTURE KING (SHARDURIS->SHADORIS) WHO (LEAPT->LEAPED) FROM HIS CHARIOT AND MADE HASTY ESCAPE ON HORSEBACK HOTLY PURSUED IN THE GATHERING DARKNESS BY AN ASSYRIAN (CONTINGENT->CONTENTION) OF CAVALRY +4198-61336-0006-253: DESPITE THE (BLOW->BLUE) DEALT AGAINST (URARTU->URITU) ASSYRIA DID NOT IMMEDIATELY REGAIN POSSESSION OF NORTH SYRIA +4198-61336-0007-254: THE SHIFTY (MATI ILU->MANTI ILIU) EITHER CHERISHED THE HOPE THAT (SHARDURIS->SHALL DORIS) WOULD RECOVER STRENGTH AND AGAIN (INVADE->IN VAIN) NORTH (SYRIA->ASSYRIA) OR THAT HE MIGHT HIMSELF ESTABLISH AN EMPIRE IN THAT REGION +4198-61336-0008-255: (TIGLATH PILESER->T GLASS BE LEISURE) HAD THEREFORE TO MARCH WESTWARD AGAIN +4198-61336-0009-256: FOR THREE YEARS HE CONDUCTED VIGOROUS CAMPAIGNS IN THE WESTERN LAND WHERE HE MET WITH VIGOROUS RESISTANCE +4198-61336-0010-257: (ARPAD->OUR PAD) WAS CAPTURED AND (MATI ILU->MET TO ILL YOU) DEPOSED AND PROBABLY PUT TO DEATH +4198-61336-0011-258: ONCE AGAIN THE HEBREWS CAME INTO CONTACT WITH (ASSYRIA->THE SYRIA) +4198-61336-0012-259: (ITS FALL MAY NOT HAVE->IT'S FOR ME NAT HAV) BEEN UNCONNECTED WITH THE TREND OF EVENTS IN ASSYRIA DURING THE CLOSING YEARS OF THE MIDDLE EMPIRE +4198-61336-0013-260: (JEHOASH->JO ASH) THE GRANDSON OF (JEHU->JEHOV) HAD ACHIEVED SUCCESSES IN CONFLICT WITH DAMASCUS +4198-61336-0014-261: SIX MONTHS (AFTERWARDS->AFTERWARD) HE WAS ASSASSINATED BY (SHALLUM->CELEM) +4198-61336-0015-262: THIS USURPER HELD SWAY AT SAMARIA FOR ONLY A MONTH +4198-61336-0016-263: NO RESISTANCE WAS POSSIBLE ON THE PART OF (MENAHEM->MANY HIM) THE USURPER (WHO WAS->WHOSE) PROBABLY READY TO WELCOME THE ASSYRIAN CONQUEROR SO THAT BY ARRANGING AN ALLIANCE HE MIGHT SECURE HIS OWN POSITION +4198-61336-0017-264: (TIGLATH PILESER->TAKE LAST PLEASE HER) NEXT OPERATED AGAINST THE (MEDIAN->MEDIUM) AND OTHER HILL TRIBES IN THE (NORTH EAST->NORTHEAST) +4198-61336-0018-265: HE OVERTHREW BUILDINGS DESTROYED ORCHARDS AND TRANSPORTED TO NINEVEH THOSE OF THE INHABITANTS HE HAD NOT PUT TO THE SWORD WITH ALL THE LIVE STOCK HE COULD LAY HANDS ON +4198-61336-0019-266: (THUS->THIS) WAS (URARTU->URA TO) CRIPPLED AND HUMILIATED IT NEVER REGAINED ITS (FORMER->FORM OF) PRESTIGE AMONG THE NORTHERN STATES +4198-61336-0020-267: IN THE FOLLOWING YEAR (TIGLATH PILESER RETURNED->TIGGLERS BELEASE HER RETURN) TO SYRIA +4198-61336-0021-268: (MENAHEM->MANY AND) KING OF ISRAEL HAD DIED AND WAS SUCCEEDED BY HIS SON (PEKAHIAH->PECAH) +4198-61336-0022-269: (JUDAH->JULIA) HAD TAKEN ADVANTAGE OF THE DISTURBED CONDITIONS IN ISRAEL TO ASSERT ITS INDEPENDENCE +4198-61336-0023-270: HE CONDEMNED ISRAEL FOR ITS IDOLATRIES AND CRIED +4198-61336-0024-271: FOR (THUS->THIS) SAITH THE LORD UNTO THE HOUSE OF ISRAEL SEEK YE ME (AND->A) YE (SHALL->TO) LIVE HAVE YE OFFERED UNTO ME SACRIFICES AND OFFERINGS IN THE WILDERNESS FORTY YEARS (O->OR) HOUSE OF ISRAEL +4198-61336-0025-272: THE REMNANT OF THE PHILISTINES SHALL PERISH +4198-61336-0026-273: ISRAEL WAS ALSO DEALT WITH +4198-61336-0027-274: HE SWEPT THROUGH ISRAEL LIKE A HURRICANE +4198-61336-0028-275: THE (PHILISTINES->FURTHER STEAMS) AND THE ARABIANS OF THE DESERT WERE ALSO SUBDUED +4198-61336-0029-276: HE INVADED BABYLONIA +4198-61336-0030-277: (UKINZER->A KINDRED) TOOK REFUGE IN HIS CAPITAL (SHAPIA->SHAPEIA) WHICH HELD OUT SUCCESSFULLY ALTHOUGH THE SURROUNDING COUNTRY WAS RAVAGED AND DESPOILED +4294-14317-0000-1866: AS I THOUGHT THAT THIS WAS DUE TO SOME FAULT IN THE EARTH I WANTED TO MAKE THESE FIRST EXPERIMENTS BEFORE (I->AND) UNDERTOOK MY PERSEUS +4294-14317-0001-1867: WHEN I SAW (THAT->*) THIS (BUST->FUSS) CAME OUT SHARP AND CLEAN I (SET->SAID) AT ONCE TO CONSTRUCT A LITTLE FURNACE IN THE WORKSHOP ERECTED FOR ME BY THE DUKE AFTER MY OWN PLANS AND DESIGN IN THE HOUSE WHICH THE DUKE HAD GIVEN ME +4294-14317-0002-1868: IT WAS AN EXTREMELY DIFFICULT TASK AND I WAS ANXIOUS TO OBSERVE ALL THE NICETIES OF ART WHICH I HAD LEARNED SO AS NOT TO LAPSE INTO SOME ERROR +4294-14317-0003-1869: I IN MY TURN FEEL THE SAME DESIRE AND HOPE TO PLAY MY PART LIKE THEM THEREFORE MY LORD GIVE ME THE LEAVE TO GO +4294-14317-0004-1870: BUT BEWARE OF LETTING (BANDINELLO->BEND NELLO) QUIT YOU RATHER BESTOW UPON HIM ALWAYS MORE THAN HE DEMANDS FOR IF HE GOES INTO FOREIGN PARTS HIS IGNORANCE IS SO PRESUMPTUOUS THAT HE IS JUST THE MAN TO DISGRACE OUR MOST ILLUSTRIOUS SCHOOL +4294-14317-0005-1871: I (ASK->ASKED) NO FURTHER REWARD FOR MY LABOURS UP TO THIS TIME THAN THE GRACIOUS FAVOUR OF YOUR MOST ILLUSTRIOUS EXCELLENCY +4294-14317-0006-1872: THEN I THANKED HIM AND SAID I HAD NO GREATER DESIRE THAN TO SHOW THOSE ENVIOUS FOLK THAT I HAD IT IN ME TO EXECUTE THE PROMISED WORK +4294-14317-0007-1873: I HAD BETTER LOOK TO MY CONDUCT FOR IT HAD COME TO HIS EARS THAT I RELIED UPON HIS FAVOUR TO TAKE IN FIRST ONE MAN AND THEN ANOTHER +4294-14317-0008-1874: I BEGGED HIS MOST ILLUSTRIOUS EXCELLENCY TO NAME A SINGLE PERSON WHOM I HAD EVER TAKEN IN +4294-14317-0009-1875: I SAID MY LORD I THANK YOU AND BEG YOU TO CONDESCEND SO FAR AS TO LISTEN TO FOUR WORDS IT IS TRUE THAT HE LENT ME A PAIR OF OLD SCALES TWO (ANVILS->AMBILS) AND THREE LITTLE HAMMERS WHICH ARTICLES I BEGGED HIS (WORKMAN GIORGIO DA CORTONA->WORKMEN GEORGIO DECORTUNA) FIFTEEN DAYS AGO TO FETCH BACK +4294-14317-0010-1876: (GIORGIO->GEORGIO) CAME FOR THEM (HIMSELF->HIS HEALTH) +4294-14317-0011-1877: I HOPE TO PROVE ON WHAT ACCOUNT THAT SCOUNDREL TRIES TO BRING ME INTO DISGRACE +4294-14317-0012-1878: WHEN HE HAD HEARD THIS SPEECH THE DUKE ROSE UP IN ANGER AND SENT FOR BERNARDONE WHO WAS FORCED TO TAKE FLIGHT AS FAR AS VENICE HE AND ANTONIO (LANDI->LANDEE) WITH HIM +4294-14317-0013-1879: YOU HAD BETTER PUT THIS TO THE PROOF AND I WILL GO AT ONCE TO THE (BARGELLO->BARGENO) +4294-14317-0014-1880: I AM WILLING TO ENTER INTO COMPETITION WITH THE ANCIENTS AND FEEL ABLE TO SURPASS THEM FOR SINCE THOSE EARLY DAYS IN WHICH I MADE THE MEDALS OF POPE CLEMENT I HAVE LEARNED SO MUCH THAT I CAN NOW PRODUCE FAR BETTER PIECES OF THE KIND I THINK I CAN ALSO OUTDO THE COINS I STRUCK FOR DUKE (ALESSANDRO->ALISANDRO) WHICH ARE STILL HELD IN HIGH ESTEEM IN LIKE MANNER I COULD MAKE FOR YOU LARGE PIECES OF GOLD AND SILVER PLATE AS I DID SO OFTEN FOR THAT NOBLE MONARCH KING FRANCIS OF FRANCE THANKS TO THE GREAT CONVENIENCES HE ALLOWED ME WITHOUT EVER LOSING TIME FOR THE EXECUTION OF COLOSSAL STATUES OR OTHER WORKS OF THE (SCULPTORS->SCULPTOR'S) CRAFT +4294-14317-0015-1881: AFTER SEVERAL MONTHS WERE WASTED AND (PIERO->PIERRE) WOULD NEITHER WORK NOR PUT MEN TO WORK UPON THE PIECE I MADE HIM GIVE IT BACK +4294-14317-0016-1882: AMONG ARTISTS CERTAIN ENRAGED SCULPTORS LAUGHED AT ME AND CALLED ME THE NEW SCULPTOR +4294-14317-0017-1883: NOW I HOPE TO SHOW THEM THAT I AM AN OLD SCULPTOR IF GOD SHALL GRANT ME THE BOON OF FINISHING MY (PERSEUS->PURSES) FOR THAT NOBLE PIAZZA OF HIS MOST ILLUSTRIOUS EXCELLENCY +4294-14317-0018-1884: HAVING THIS EXCELLENT RESOLVE IN HEART I REACHED MY HOME +4294-32859-0000-1942: WYLDER WAS RATHER SURLY AFTER THE LADIES HAD FLOATED AWAY FROM THE SCENE AND HE DRANK HIS LIQUOR DOGGEDLY +4294-32859-0001-1943: IT WAS HIS FANCY I SUPPOSE TO REVIVE CERTAIN SENTIMENTAL RELATIONS WHICH HAD IT MAY BE ONCE EXISTED BETWEEN HIM AND MISS LAKE AND HE WAS A PERSON OF THAT COMBATIVE TEMPERAMENT THAT MAGNIFIES AN OBJECT IN PROPORTION AS ITS PURSUIT IS THWARTED +4294-32859-0002-1944: THE STORY OF FRIDOLIN AND (RETZCH'S->WRETCHES) PRETTY (OUTLINES->OUTLINE) +4294-32859-0003-1945: SIT DOWN BESIDE ME AND I'LL TELL YOU THE STORY +4294-32859-0004-1946: HE ASSISTED AT IT BUT TOOK NO PART AND IN FACT WAS LISTENING TO THAT OTHER CONVERSATION WHICH SOUNDED WITH ITS PLEASANT GABBLE AND LAUGHTER LIKE A LITTLE MUSICAL TINKLE OF BELLS IN THE DISTANCE +4294-32859-0005-1947: BUT HONEST MARK FORGOT THAT YOUNG LADIES DO NOT ALWAYS COME OUT QUITE ALONE AND JUMP UNASSISTED INTO THEIR VEHICLES +4294-35475-0000-1885: BUT THE MIDDLE (SON->SUN) WAS LITTLE AND LORN HE WAS NEITHER DARK NOR FAIR HE WAS NEITHER HANDSOME NOR STRONG +4294-35475-0001-1886: THROWING HIMSELF ON HIS KNEES BEFORE THE KING HE CRIED (OH->O) ROYAL SIRE BESTOW UPON ME ALSO A SWORD AND A STEED THAT I MAY UP AND AWAY TO FOLLOW MY BRETHREN +4294-35475-0002-1887: BUT THE KING LAUGHED HIM TO SCORN THOU A SWORD HE QUOTH +4294-35475-0003-1888: IN SOOTH THOU SHALT HAVE ONE BUT IT SHALL BE ONE BEFITTING THY MAIDEN SIZE AND COURAGE (IF->IT) SO SMALL A WEAPON CAN BE FOUND IN ALL MY KINGDOM +4294-35475-0004-1889: FORTHWITH THE GRINNING (JESTER->GESTURE) BEGAN SHRIEKING WITH LAUGHTER SO THAT THE BELLS UPON HIS MOTLEY CAP WERE ALL SET A JANGLING +4294-35475-0005-1890: I DID BUT LAUGH TO THINK THE (SWORD->SORT) OF (ETHELRIED->EPLORRIED) HAD BEEN SO QUICKLY FOUND RESPONDED THE JESTER AND HE POINTED TO THE SCISSORS HANGING FROM THE TAILOR'S GIRDLE +4294-35475-0006-1891: ONE NIGHT AS HE LAY IN A DEEP FOREST (TOO->TWO) UNHAPPY TO SLEEP HE HEARD A NOISE NEAR AT HAND IN THE BUSHES +4294-35475-0007-1892: THOU SHALT HAVE THY LIBERTY HE CRIED EVEN THOUGH THOU SHOULDST (REND->RUN) ME IN PIECES THE MOMENT THOU ART FREE +4294-35475-0008-1893: (IT->HE) HAD (*->HID IT) SUDDENLY DISAPPEARED AND IN ITS PLACE STOOD A BEAUTIFUL FAIRY WITH FILMY WINGS WHICH SHONE LIKE RAINBOWS IN THE MOONLIGHT +4294-35475-0009-1894: AT THIS MOMENT THERE WAS A DISTANT RUMBLING AS OF THUNDER TIS THE OGRE CRIED THE FAIRY WE MUST HASTEN +4294-35475-0010-1895: SCISSORS GROW A GIANT'S HEIGHT AND SAVE US FROM THE OGRE'S MIGHT +4294-35475-0011-1896: HE COULD SEE THE OGRE STANDING POWERLESS TO HURT HIM ON THE OTHER SIDE OF THE CHASM AND GNASHING HIS TEETH EACH ONE OF WHICH WAS AS BIG AS A (MILLSTON->MILLSTONE) +4294-35475-0012-1897: THE SIGHT WAS SO TERRIBLE THAT HE TURNED ON HIS HEEL AND FLED AWAY AS FAST AS HIS FEET COULD CARRY HIM +4294-35475-0013-1898: THOU SHALT NOT BE LEFT A PRISONER IN THIS DISMAL SPOT WHILE I HAVE THE POWER TO HELP THEE +4294-35475-0014-1899: HE LIFTED THE SCISSORS AND WITH ONE STROKE DESTROYED THE WEB AND GAVE THE FLY (ITS FREEDOM->TO READ THEM) +4294-35475-0015-1900: A FAINT GLIMMER OF LIGHT ON THE OPPOSITE WALL SHOWS ME THE KEYHOLE +4294-35475-0016-1901: THE PRINCE SPENT ALL THE FOLLOWING TIME UNTIL MIDNIGHT TRYING TO THINK OF A SUITABLE VERSE TO SAY TO THE SCISSORS +4294-35475-0017-1902: AS HE UTTERED THE WORDS THE SCISSORS LEAPED OUT OF HIS HAND AND BEGAN TO CUT THROUGH THE WOODEN SHUTTERS AS EASILY AS THROUGH A CHEESE +4294-35475-0018-1903: IN (A->THE) VERY SHORT TIME THE PRINCE HAD CRAWLED THROUGH THE OPENING +4294-35475-0019-1904: WHILE HE STOOD LOOKING AROUND HIM IN BEWILDERMENT A FIREFLY (ALIGHTED ON->LIGHTED DOWN) HIS (ARM->HEART) FLASHING ITS LITTLE LANTERN IN THE PRINCE'S FACE IT CRIED THIS WAY MY FRIEND THE FLY SENT ME TO GUIDE YOU TO A PLACE OF SAFETY +4294-35475-0020-1905: WHAT IS TO BECOME OF ME CRIED THE POOR PEASANT +4294-35475-0021-1906: (MY->BY) GRAIN MUST FALL (AND->IN) ROT IN THE FIELD FROM (OVERRIPENESS->OVER RIPENESS) BECAUSE I HAVE NOT THE STRENGTH TO RISE AND HARVEST IT THEN INDEED MUST WE ALL STARVE +4294-35475-0022-1907: THE (GRANDAME->GRAND DAME) WHOM HE SUPPLIED WITH FAGOTS THE MERCHANT WHOM HE RESCUED FROM ROBBERS THE KING'S (COUNCILLOR->COUNSELLOR) TO WHOM HE GAVE AID ALL BECAME HIS FRIENDS UP AND DOWN THE LAND TO BEGGAR OR LORD HOMELESS WANDERER OR HIGH BORN DAME HE GLADLY GAVE UNSELFISH SERVICE ALL UNSOUGHT AND SUCH AS HE HELPED STRAIGHTWAY BECAME HIS FRIENDS +4294-35475-0023-1908: TO HIM WHO COULD BRING HER BACK TO HER FATHER'S CASTLE SHOULD BE GIVEN THE THRONE AND KINGDOM AS WELL AS THE PRINCESS HERSELF SO FROM FAR AND NEAR INDEED FROM ALMOST EVERY COUNTRY UNDER THE SUN CAME KNIGHTS AND PRINCES TO FIGHT THE OGRE +4294-35475-0024-1909: AMONG THOSE WHO DREW BACK WERE (ETHELRIED'S->EPILID'S) BROTHERS THE THREE THAT WERE DARK AND THE THREE THAT WERE FAIR +4294-35475-0025-1910: BUT (ETHELRIED HEEDED->ETHELRED HE DID) NOT THEIR TAUNTS +4294-35475-0026-1911: SO THEY ALL CRIED OUT LONG AND LOUD LONG LIVE THE PRINCE PRINCE (CISEAUX->ISAU) +4294-9934-0000-1912: HE FELT (WHAT->WITH) THE EARTH MAY POSSIBLY FEEL AT THE MOMENT WHEN IT IS TORN OPEN WITH THE IRON IN ORDER THAT GRAIN MAY BE DEPOSITED WITHIN IT IT FEELS ONLY THE WOUND THE QUIVER OF THE GERM (AND->*) THE JOY OF THE FRUIT ONLY (ARRIVE->ARRIVES) LATER +4294-9934-0001-1913: HE HAD BUT JUST ACQUIRED A FAITH MUST HE THEN (REJECT IT->REJECTED) ALREADY +4294-9934-0002-1914: HE AFFIRMED TO HIMSELF THAT HE WOULD NOT HE DECLARED TO HIMSELF THAT HE WOULD NOT DOUBT AND HE BEGAN TO DOUBT IN SPITE OF HIMSELF +4294-9934-0003-1915: TO STAND BETWEEN TWO RELIGIONS FROM ONE OF WHICH YOU HAVE NOT AS YET EMERGED AND ANOTHER INTO WHICH YOU HAVE NOT YET ENTERED IS INTOLERABLE AND TWILIGHT IS PLEASING ONLY TO BAT LIKE SOULS +4294-9934-0004-1916: MARIUS WAS CLEAR EYED AND HE REQUIRED THE TRUE LIGHT +4294-9934-0005-1917: THE HALF LIGHTS OF DOUBT PAINED HIM +4294-9934-0006-1918: WHATEVER MAY HAVE BEEN HIS DESIRE TO REMAIN WHERE HE WAS HE COULD NOT (HALT->HELP) THERE HE WAS IRRESISTIBLY CONSTRAINED TO CONTINUE TO ADVANCE TO EXAMINE TO THINK TO MARCH FURTHER +4294-9934-0007-1919: HE FEARED AFTER HAVING TAKEN SO MANY STEPS WHICH HAD BROUGHT HIM NEARER TO HIS FATHER TO NOW TAKE A STEP WHICH SHOULD ESTRANGE HIM FROM THAT FATHER +4294-9934-0008-1920: HIS DISCOMFORT WAS AUGMENTED BY ALL THE REFLECTIONS WHICH OCCURRED TO HIM +4294-9934-0009-1921: IN THE TROUBLED STATE OF HIS CONSCIENCE HE NO LONGER THOUGHT OF CERTAIN SERIOUS SIDES OF EXISTENCE +4294-9934-0010-1922: THEY SOON ELBOWED HIM ABRUPTLY +4294-9934-0011-1923: REQUEST (COURFEYRAC->COURFERAC) TO COME AND TALK WITH ME SAID MARIUS +4294-9934-0012-1924: WHAT IS TO BECOME OF YOU SAID (COURFEYRAC->CURFAC) +4294-9934-0013-1925: WHAT ARE YOU GOING TO DO I DO NOT KNOW +4294-9934-0014-1926: SILVER GOLD HERE IT IS +4294-9934-0015-1927: YOU WILL THEN HAVE ONLY A PAIR OF TROUSERS A (WAISTCOAT->WEST COAT) A HAT AND A COAT AND MY BOOTS +4294-9934-0016-1928: THAT WILL BE ENOUGH +4294-9934-0017-1929: NO IT IS NOT GOOD WHAT (WILL YOU->WE) DO AFTER THAT +4294-9934-0018-1930: DO YOU KNOW GERMAN NO +4294-9934-0019-1931: IT IS BADLY PAID WORK BUT ONE CAN LIVE BY IT +4294-9934-0020-1932: THE CLOTHES DEALER WAS SENT FOR +4294-9934-0021-1933: HE PAID TWENTY FRANCS FOR THE CAST OFF GARMENTS THEY WENT TO THE (WATCHMAKER'S->WATCHMAKERS) +4294-9934-0022-1934: HE BOUGHT THE WATCH FOR FORTY FIVE FRANCS +4294-9934-0023-1935: HELLO I HAD FORGOTTEN THAT SAID MARIUS +4294-9934-0024-1936: THE LANDLORD PRESENTED HIS BILL WHICH HAD TO BE PAID ON THE SPOT +4294-9934-0025-1937: I HAVE TEN FRANCS LEFT SAID MARIUS +4294-9934-0026-1938: THAT WILL BE SWALLOWING A TONGUE VERY FAST OR A HUNDRED SOUS VERY SLOWLY +4294-9934-0027-1939: ONE MORNING ON HIS RETURN FROM THE (LAW->LAST) SCHOOL MARIUS FOUND A LETTER FROM HIS AUNT AND THE SIXTY (PISTOLES->PISTOL) THAT IS TO SAY SIX HUNDRED FRANCS IN GOLD (IN->AND) A SEALED BOX +4294-9934-0028-1940: MARIUS SENT BACK (THE->FOR) THIRTY LOUIS TO HIS AUNT WITH (A->THE) RESPECTFUL LETTER IN WHICH HE STATED THAT HE HAD (SUFFICIENT->SUSPICIENT) MEANS OF SUBSISTENCE AND THAT HE SHOULD BE ABLE THENCEFORTH TO SUPPLY ALL HIS NEEDS +4294-9934-0029-1941: AT THAT MOMENT HE HAD THREE FRANCS LEFT +4350-10919-0000-2716: HE PERCEIVED THAT IT WAS NO GOOD TALKING TO THE OLD MAN AND THAT THE PRINCIPAL PERSON IN THE HOUSE WAS THE MOTHER +4350-10919-0001-2717: BEFORE HER HE DECIDED TO SCATTER HIS PEARLS +4350-10919-0002-2718: THE PRINCESS WAS DISTRACTED AND DID NOT KNOW WHAT TO DO SHE FELT SHE HAD SINNED AGAINST KITTY +4350-10919-0003-2719: WELL DOCTOR DECIDE OUR (FATE->PHAETON) SAID THE PRINCESS TELL ME EVERYTHING +4350-10919-0004-2720: IS (THERE->THEIR) HOPE SHE MEANT TO SAY BUT HER LIPS QUIVERED AND SHE COULD NOT UTTER THE QUESTION WELL DOCTOR +4350-10919-0005-2721: AS YOU PLEASE THE PRINCESS WENT OUT WITH A SIGH +4350-10919-0006-2722: THE FAMILY DOCTOR RESPECTFULLY CEASED IN THE MIDDLE OF HIS OBSERVATIONS +4350-10919-0007-2723: AND THERE ARE INDICATIONS (MALNUTRITION->MALTRICIAN) NERVOUS EXCITABILITY AND SO ON +4350-10919-0008-2724: THE QUESTION (STANDS->SENDS) THUS IN PRESENCE OF INDICATIONS OF (TUBERCULOUS->TUBERCUOUS) PROCESS WHAT IS TO BE DONE TO MAINTAIN NUTRITION +4350-10919-0009-2725: YES (THAT'S AN->I CAN) UNDERSTOOD THING RESPONDED THE CELEBRATED PHYSICIAN AGAIN GLANCING AT HIS WATCH +4350-10919-0010-2726: BEG PARDON IS THE (YAUSKY->OWESKEE) BRIDGE DONE YET OR SHALL I HAVE TO DRIVE (AROUND->HER ON) +4350-10919-0011-2727: HE ASKED AH IT IS +4350-10919-0012-2728: OH WELL THEN I CAN DO IT IN TWENTY MINUTES +4350-10919-0013-2729: AND (HOW->*) ABOUT (A TOUR->IT TO) ABROAD ASKED THE FAMILY DOCTOR +4350-10919-0014-2730: WHAT IS WANTED IS (*->THE) MEANS OF IMPROVING NUTRITION AND NOT FOR LOWERING IT +4350-10919-0015-2731: THE FAMILY DOCTOR LISTENED ATTENTIVELY AND RESPECTFULLY +4350-10919-0016-2732: BUT IN (FAVOR->FAVOUR) OF FOREIGN TRAVEL I WOULD URGE THE CHANGE OF HABITS THE REMOVAL FROM CONDITIONS CALLING UP REMINISCENCES +4350-10919-0017-2733: AND THEN THE MOTHER WISHES IT HE ADDED +4350-10919-0018-2734: AH WELL (IN->*) THAT (CASE->HAS) TO BE SURE LET THEM GO ONLY THOSE GERMAN (QUACKS->CLACKS) ARE MISCHIEVOUS +4350-10919-0019-2735: OH TIME'S UP ALREADY AND HE WENT TO THE DOOR +4350-10919-0020-2736: THE CELEBRATED DOCTOR ANNOUNCED TO THE PRINCESS A FEELING OF WHAT WAS DUE FROM HIM DICTATED HIS DOING SO THAT HE OUGHT TO SEE THE PATIENT ONCE MORE +4350-10919-0021-2737: (OH->O) NO ONLY A FEW DETAILS PRINCESS COME THIS WAY +4350-10919-0022-2738: AND THE MOTHER ACCOMPANIED BY THE DOCTOR WENT INTO THE DRAWING ROOM TO KITTY +4350-10919-0023-2739: WHEN THE DOCTOR CAME IN SHE FLUSHED CRIMSON AND HER EYES FILLED WITH TEARS +4350-10919-0024-2740: SHE ANSWERED HIM AND ALL AT ONCE GOT UP FURIOUS +4350-10919-0025-2741: EXCUSE ME DOCTOR BUT THERE IS REALLY NO OBJECT IN THIS +4350-10919-0026-2742: THIS IS THE THIRD TIME YOU'VE ASKED ME THE SAME THING +4350-10919-0027-2743: THE (CELEBRATED->CLEARED) DOCTOR DID NOT TAKE (OFFENSE->OFFENCE) +4350-10919-0028-2744: NERVOUS IRRITABILITY HE SAID TO THE PRINCESS WHEN (KITTY->KATY) HAD LEFT THE ROOM HOWEVER I HAD FINISHED +4350-10919-0029-2745: AND THE DOCTOR BEGAN SCIENTIFICALLY EXPLAINING TO THE PRINCESS AS AN EXCEPTIONALLY INTELLIGENT WOMAN THE CONDITION OF THE YOUNG PRINCESS AND CONCLUDED BY INSISTING ON THE DRINKING OF THE WATERS WHICH WERE CERTAINLY HARMLESS +4350-10919-0030-2746: (AT->BUT) THE QUESTION SHOULD THEY GO ABROAD THE DOCTOR PLUNGED INTO DEEP MEDITATION AS THOUGH RESOLVING A WEIGHTY PROBLEM +4350-10919-0031-2747: FINALLY HIS DECISION WAS PRONOUNCED THEY WERE TO GO ABROAD BUT TO PUT NO FAITH IN FOREIGN QUACKS AND TO APPLY TO HIM IN ANY NEED +4350-10919-0032-2748: IT SEEMED AS THOUGH SOME PIECE OF GOOD FORTUNE HAD COME TO PASS AFTER THE DOCTOR HAD GONE +4350-10919-0033-2749: THE MOTHER WAS MUCH MORE CHEERFUL WHEN SHE WENT BACK TO HER DAUGHTER AND KITTY PRETENDED TO BE MORE CHEERFUL +4350-9170-0000-2750: EDUCATED PEOPLE OF THE UPPER CLASSES ARE TRYING TO STIFLE THE (EVER GROWING->EVERGREWING) SENSE OF THE NECESSITY OF TRANSFORMING THE EXISTING SOCIAL ORDER +4350-9170-0001-2751: (THIS IS->MISSUS) ABSOLUTELY INCORRECT +4350-9170-0002-2752: IN THE SOCIAL CONCEPTION OF LIFE IT IS SUPPOSED THAT SINCE THE AIM OF LIFE IS FOUND IN GROUPS OF INDIVIDUALS INDIVIDUALS WILL VOLUNTARILY SACRIFICE THEIR OWN (INTERESTS->INTEREST) FOR THE (INTERESTS->INTEREST) OF THE GROUP +4350-9170-0003-2753: THE CHAMPIONS OF THE SOCIAL CONCEPTION OF LIFE USUALLY TRY TO CONNECT THE IDEA OF AUTHORITY THAT IS OF VIOLENCE WITH THE IDEA OF MORAL INFLUENCE BUT THIS CONNECTION IS QUITE IMPOSSIBLE +4350-9170-0004-2754: THE MAN WHO (IS->WAS) CONTROLLED BY MORAL INFLUENCE ACTS IN ACCORDANCE WITH HIS OWN DESIRES +4350-9170-0005-2755: THE BASIS OF AUTHORITY IS BODILY VIOLENCE +4350-9170-0006-2756: THE POSSIBILITY OF APPLYING BODILY VIOLENCE TO PEOPLE IS PROVIDED ABOVE ALL BY AN ORGANIZATION OF ARMED MEN TRAINED TO ACT IN UNISON (IN->AND) SUBMISSION TO ONE WILL +4350-9170-0007-2757: THESE BANDS OF ARMED MEN SUBMISSIVE TO A SINGLE WILL ARE WHAT CONSTITUTE THE ARMY +4350-9170-0008-2758: THE ARMY HAS ALWAYS BEEN AND STILL IS THE BASIS OF POWER +4350-9170-0009-2759: POWER IS ALWAYS IN THE HANDS OF THOSE WHO CONTROL THE ARMY AND ALL MEN IN POWER FROM THE ROMAN CAESARS TO THE RUSSIAN AND GERMAN EMPERORS TAKE MORE INTEREST IN THEIR ARMY THAN IN ANYTHING AND COURT POPULARITY IN THE ARMY KNOWING THAT IF THAT IS ON THEIR SIDE THEIR POWER IS SECURE +4350-9170-0010-2760: INDEED IT COULD NOT BE OTHERWISE +4350-9170-0011-2761: ONLY UNDER THOSE CONDITIONS COULD THE SOCIAL ORGANIZATION BE JUSTIFIED +4350-9170-0012-2762: BUT SINCE THIS IS NOT THE CASE AND ON THE CONTRARY MEN (IN->AND) POWER ARE ALWAYS FAR FROM BEING SAINTS THROUGH THE VERY FACT OF THEIR POSSESSION OF POWER THE SOCIAL ORGANIZATION BASED ON POWER HAS NO JUSTIFICATION +4350-9170-0013-2763: EVEN IF THERE WAS ONCE A TIME WHEN OWING TO THE LOW (STANDARD->STANDARDS) OF MORALS (AND->WHEN) THE DISPOSITION OF MEN TO VIOLENCE THE EXISTENCE OF AN AUTHORITY TO RESTRAIN SUCH VIOLENCE WAS AN ADVANTAGE BECAUSE THE VIOLENCE OF (*->THE) GOVERNMENT WAS LESS THAN THE VIOLENCE OF INDIVIDUALS ONE CANNOT BUT SEE THAT THIS ADVANTAGE COULD NOT BE LASTING +4350-9170-0014-2764: BETWEEN THE MEMBERS OF ONE STATE SUBJECT TO A SINGLE AUTHORITY THE (STRIFE->STRIPE) BETWEEN (*->THE) INDIVIDUALS (SEEMS->SEEMED) STILL LESS AND (THE->A) LIFE OF THE STATE SEEMS EVEN MORE SECURE +4350-9170-0015-2765: IT WAS PRODUCED ON ONE HAND BY THE NATURAL GROWTH OF POPULATION AND ON THE OTHER BY (STRUGGLE AND->STRUGGLING) CONQUEST +4350-9170-0016-2766: AFTER CONQUEST THE POWER OF THE EMPEROR PUTS AN END TO INTERNAL DISSENSIONS AND SO THE STATE CONCEPTION OF LIFE JUSTIFIES ITSELF +4350-9170-0017-2767: BUT THIS JUSTIFICATION IS NEVER MORE THAN TEMPORARY +4350-9170-0018-2768: INTERNAL DISSENSIONS DISAPPEAR ONLY IN PROPORTION TO THE DEGREE OF OPPRESSION EXERTED BY THE AUTHORITY OVER THE (DISSENTIENT->DISINDIAN) INDIVIDUALS +4350-9170-0019-2769: (GOVERNMENT->GOVERN) AUTHORITY EVEN IF IT DOES SUPPRESS PRIVATE VIOLENCE ALWAYS INTRODUCES INTO THE LIFE OF MEN FRESH FORMS OF VIOLENCE WHICH TEND TO BECOME GREATER AND GREATER IN PROPORTION TO THE DURATION AND STRENGTH OF THE GOVERNMENT +4350-9170-0020-2770: AND THEREFORE THE OPPRESSION OF THE OPPRESSED ALWAYS GOES ON GROWING UP TO THE FURTHEST LIMIT BEYOND WHICH IT CANNOT GO WITHOUT KILLING THE GOOSE WITH THE (GOLDEN EGGS->GOLD KNIFE) +4350-9170-0021-2771: THE MOST CONVINCING EXAMPLE OF THIS IS TO BE FOUND IN THE CONDITION OF THE WORKING CLASSES OF OUR EPOCH WHO ARE IN REALITY NO BETTER THAN THE SLAVES OF ANCIENT (TIMES->TIME) SUBDUED BY CONQUEST +4350-9170-0022-2772: SO IT (HAS->IS) ALWAYS (BEEN->THEN) +4350-9170-0023-2773: FOOTNOTE THE FACT THAT IN AMERICA THE ABUSES OF AUTHORITY EXIST IN SPITE OF THE SMALL NUMBER OF THEIR TROOPS NOT ONLY FAILS TO DISPROVE THIS POSITION BUT POSITIVELY CONFIRMS IT +4350-9170-0024-2774: THE UPPER CLASSES KNOW THAT AN ARMY OF FIFTY THOUSAND WILL SOON BE INSUFFICIENT AND NO LONGER RELYING ON (PINKERTON'S->PINKERTENT'S) MEN THEY FEEL THAT THE SECURITY OF THEIR POSITION DEPENDS ON THE INCREASED STRENGTH OF THE ARMY +4350-9170-0025-2775: THE REASON TO WHICH HE GAVE EXPRESSION IS ESSENTIALLY THE SAME AS THAT WHICH MADE THE FRENCH KINGS AND THE POPES ENGAGE SWISS AND SCOTCH GUARDS AND MAKES THE RUSSIAN AUTHORITIES OF TO DAY SO CAREFULLY DISTRIBUTE THE RECRUITS SO THAT THE REGIMENTS FROM THE (FRONTIERS->FRONTIER THEY) ARE STATIONED IN CENTRAL DISTRICTS AND THE REGIMENTS FROM THE (CENTER->CENTRE) ARE STATIONED ON THE FRONTIERS +4350-9170-0026-2776: THE MEANING OF (CAPRIVI'S->CAPRIVY) SPEECH PUT INTO (PLAIN->PLAY AND) LANGUAGE IS THAT (FUNDS->FONDS) ARE NEEDED NOT TO RESIST FOREIGN FOES BUT TO BUY UNDER OFFICERS TO BE READY TO ACT AGAINST THE ENSLAVED TOILING MASSES +4350-9170-0027-2777: AND THIS ABNORMAL ORDER OF (THINGS->THANKS) IS MAINTAINED BY THE ARMY +4350-9170-0028-2778: BUT THERE IS NOT ONLY ONE GOVERNMENT THERE ARE OTHER GOVERNMENTS (EXPLOITING->EXPLODING) THEIR SUBJECTS BY (VIOLENCE IN->VIOLENT AND) THE SAME WAY AND (*->ARE) ALWAYS READY TO POUNCE DOWN ON ANY OTHER GOVERNMENT AND CARRY OFF THE FRUITS OF THE TOIL OF ITS (ENSLAVED->ENSLAVE) SUBJECTS +4350-9170-0029-2779: AND SO EVERY GOVERNMENT NEEDS AN ARMY ALSO TO PROTECT ITS BOOTY FROM ITS (NEIGHBOR->NEIGHBOUR) BRIGANDS +4350-9170-0030-2780: THIS INCREASE IS CONTAGIOUS AS MONTESQUIEU POINTED OUT (ONE->A) HUNDRED FIFTY YEARS AGO +4350-9170-0031-2781: EVERY INCREASE IN THE ARMY OF ONE STATE WITH THE AIM OF SELF DEFENSE AGAINST ITS SUBJECTS BECOMES A (SOURCE->SORT) OF DANGER FOR NEIGHBORING STATES AND CALLS FOR A SIMILAR INCREASE IN THEIR ARMIES +4350-9170-0032-2782: THE DESPOTISM OF (A->THE) GOVERNMENT ALWAYS INCREASES WITH THE STRENGTH OF THE ARMY AND ITS EXTERNAL SUCCESSES AND THE AGGRESSIVENESS OF A GOVERNMENT INCREASES WITH ITS INTERNAL DESPOTISM +4350-9170-0033-2783: THE RIVALRY OF THE EUROPEAN STATES (IN->AND) CONSTANTLY INCREASING THEIR FORCES HAS REDUCED THEM TO THE NECESSITY OF HAVING RECOURSE TO UNIVERSAL MILITARY SERVICE SINCE BY THAT MEANS THE GREATEST POSSIBLE NUMBER OF SOLDIERS IS OBTAINED AT THE LEAST POSSIBLE EXPENSE +4350-9170-0034-2784: AND BY THIS MEANS ALL CITIZENS ARE UNDER ARMS TO SUPPORT THE INIQUITIES PRACTICED UPON THEM ALL CITIZENS HAVE BECOME THEIR OWN (OPPRESSORS->IMPRESSORS) +4350-9170-0035-2785: THIS INCONSISTENCY HAS BECOME OBVIOUS (IN->AND) UNIVERSAL MILITARY SERVICE +4350-9170-0036-2786: IN FACT THE WHOLE SIGNIFICANCE OF THE SOCIAL CONCEPTION OF LIFE CONSISTS IN MAN'S RECOGNITION OF THE BARBARITY OF STRIFE BETWEEN INDIVIDUALS AND THE TRANSITORINESS OF PERSONAL LIFE ITSELF AND THE TRANSFERENCE OF THE AIM OF LIFE (TO->THE) GROUPS OF PERSONS +4350-9170-0037-2787: BUT WITH UNIVERSAL MILITARY SERVICE IT COMES TO PASS THAT MEN AFTER MAKING EVERY SACRIFICE TO GET RID OF THE CRUELTY OF STRIFE AND THE INSECURITY OF EXISTENCE ARE CALLED UPON TO FACE ALL THE PERILS THEY HAD MEANT TO AVOID +4350-9170-0038-2788: BUT INSTEAD OF DOING THAT THEY (EXPOSE THE->EXPOSED TO) INDIVIDUALS TO THE SAME NECESSITY OF STRIFE SUBSTITUTING (STRIFE->STRIKE) WITH INDIVIDUALS OF OTHER STATES FOR STRIFE WITH NEIGHBORS +4350-9170-0039-2789: THE TAXES RAISED FROM THE PEOPLE FOR WAR PREPARATIONS ABSORB THE GREATER PART OF THE PRODUCE OF LABOR WHICH THE ARMY OUGHT TO DEFEND +4350-9170-0040-2790: THE DANGER OF WAR EVER READY TO BREAK OUT RENDERS ALL REFORMS OF LIFE SOCIAL LIFE VAIN AND FRUITLESS +4350-9170-0041-2791: BUT THE FATAL SIGNIFICANCE OF UNIVERSAL MILITARY SERVICE AS THE MANIFESTATION OF THE CONTRADICTION INHERENT IN THE SOCIAL CONCEPTION OF LIFE IS NOT ONLY APPARENT IN THAT +4350-9170-0042-2792: (GOVERNMENTS->GOVERNMENT) ASSERT THAT ARMIES ARE NEEDED ABOVE ALL FOR EXTERNAL DEFENSE BUT THAT IS NOT TRUE +4350-9170-0043-2793: (THEY ARE->THERE) NEEDED PRINCIPALLY AGAINST THEIR SUBJECTS AND EVERY MAN UNDER UNIVERSAL MILITARY SERVICE BECOMES AN ACCOMPLICE (IN->AND) ALL (THE->THAT) ACTS OF VIOLENCE OF THE GOVERNMENT AGAINST THE CITIZENS WITHOUT ANY CHOICE OF HIS OWN +4350-9170-0044-2794: AND FOR THE SAKE OF WHAT AM I MAKING THEM +4350-9170-0045-2795: I (AM->*) EXPECTED FOR THE SAKE OF (THE->A) STATE TO MAKE (THESE->THE) SACRIFICES TO RENOUNCE EVERYTHING THAT CAN BE PRECIOUS TO MAN PEACE FAMILY SECURITY AND HUMAN DIGNITY +4350-9170-0046-2796: EXCEPT FOR THE STATE THEY SAY WE SHOULD BE EXPOSED TO THE ATTACKS OF EVIL DISPOSED PERSONS IN OUR OWN COUNTRY +4350-9170-0047-2797: WE (KNOW->*) NOW (*->KNOW) THAT THREATS AND PUNISHMENTS CANNOT DIMINISH THEIR NUMBER THAT THAT CAN ONLY BE DONE BY CHANGE OF ENVIRONMENT AND MORAL INFLUENCE +4350-9170-0048-2798: SO THAT (THE->THIS) JUSTIFICATION OF STATE VIOLENCE ON THE GROUND OF THE PROTECTION IT GIVES US FROM EVIL (DISPOSED->DISPOS) PERSONS EVEN IF (IT->I) HAD SOME FOUNDATION THREE OR FOUR CENTURIES AGO HAS NONE WHATEVER NOW +4350-9170-0049-2799: EXCEPT FOR THE STATE THEY TELL US WE SHOULD NOT HAVE ANY RELIGION EDUCATION CULTURE MEANS OF COMMUNICATION AND SO ON +4350-9170-0050-2800: WITHOUT THE STATE MEN WOULD NOT HAVE BEEN ABLE TO FORM THE SOCIAL INSTITUTIONS NEEDED FOR DOING (ANY THING->ANYTHING) +4350-9170-0051-2801: THIS ARGUMENT TOO WAS WELL FOUNDED ONLY SOME CENTURIES AGO +4350-9170-0052-2802: THE GREAT EXTENSION OF MEANS OF COMMUNICATION AND INTERCHANGE OF IDEAS HAS MADE MEN COMPLETELY ABLE TO DISPENSE WITH STATE AID IN FORMING SOCIETIES ASSOCIATIONS CORPORATIONS AND CONGRESSES FOR SCIENTIFIC ECONOMIC AND POLITICAL OBJECTS +4350-9170-0053-2803: WITHOUT GOVERNMENTS NATIONS WOULD BE ENSLAVED BY THEIR NEIGHBORS +4350-9170-0054-2804: THE GOVERNMENT THEY TELL US WITH ITS ARMY IS NECESSARY TO DEFEND US FROM NEIGHBORING STATES WHO MIGHT ENSLAVE US +4350-9170-0055-2805: AND IF (DEFENSE->DEFENCE) AGAINST BARBAROUS NATIONS IS MEANT ONE THOUSANDTH PART OF THE TROOPS NOW UNDER ARMS WOULD BE AMPLY SUFFICIENT FOR THAT PURPOSE +4350-9170-0056-2806: THE POWER OF THE STATE FAR FROM BEING A SECURITY AGAINST THE ATTACKS OF OUR NEIGHBORS EXPOSES US ON THE CONTRARY TO MUCH GREATER DANGER OF SUCH ATTACKS +4350-9170-0057-2807: EVEN LOOKING AT IT PRACTICALLY WEIGHING THAT IS TO SAY ALL THE BURDENS LAID ON HIM BY THE (STATE->STATES) NO MAN CAN FAIL TO SEE THAT FOR HIM PERSONALLY TO COMPLY WITH (*->THE) STATE DEMANDS AND SERVE IN THE ARMY WOULD IN THE MAJORITY OF CASES BE MORE DISADVANTAGEOUS THAN TO REFUSE TO DO SO +4350-9170-0058-2808: TO RESIST WOULD NEED INDEPENDENT THOUGHT AND EFFORT OF WHICH EVERY MAN IS NOT CAPABLE +4350-9170-0059-2809: SO MUCH FOR THE ADVANTAGES (AND->OF) DISADVANTAGES OF BOTH LINES OF CONDUCT FOR A MAN OF THE WEALTHY (CLASSES AN->CLASS AND) OPPRESSOR +4350-9170-0060-2810: FOR A MAN OF THE POOR WORKING CLASS THE ADVANTAGES AND DISADVANTAGES WILL BE THE SAME BUT WITH A GREAT INCREASE OF DISADVANTAGES +4852-28311-0000-2098: SAY YOU KNOW (SUMTHIN->SOMETHING) +4852-28311-0001-2099: CHRIS LOOKED FROM A (NICKEL PLATED FLASHLIGHT->COMPLATED FLASH LIKE) TO A (CAR JACK->CARJACK) AND SPARK PLUG +4852-28311-0002-2100: (KNOW WHO->NO ONE) NEEDS A JOB (BAD->BAN) THAT'S (JAKEY->JAKIE) HARRIS +4852-28311-0003-2101: O K HE SAID +4852-28311-0004-2102: ONLY WHY DIDN'T YOU ASK HIM YOURSELF +4852-28311-0005-2103: MIKE BECAME UNEASY AND FISHED (AN ELASTIC->IT MOLASTIC) BAND OUT OF HIS POCKET MADE A FLICK OF PAPER AND SENT IT SOARING OUT (INTO M->AN ENEM) STREET +4852-28311-0006-2104: WELL HE ADMITTED I DID +4852-28311-0007-2105: CHRIS ASKED (AND->HIM) FOR THE FIRST TIME THAT DAY THE HEAVY WEIGHT HE CARRIED WITHIN HIM LIFTED AND LIGHTENED A LITTLE +4852-28311-0008-2106: (THINK HE->THINKING) REALLY NEEDS IT HE PURSUED +4852-28311-0009-2107: HE WOULD HAVE LIKED TO GET THE JOB FOR (JAKEY->JAKIE) WHO NEEDED IT BUT SOMEHOW THE TASK OF FACING MISTER WICKER ESPECIALLY NOW THAT THE LIGHT WAS GOING AND DUSK (EDGING->EDGED) INTO THE STREETS WAS NOT WHAT (CHRIS HAD->CHRISTEN) INTENDED FOR ENDING THE AFTERNOON +4852-28311-0010-2108: MIKE'S EXPRESSION CHANGED AT (ONCE->ONE'S) TO ONE OF TRIUMPH BUT (CHRIS->BRUCE) WAS ONLY (PARTLY->PARTIALLY) ENCOURAGED +4852-28311-0011-2109: (BETCHA AREN'T->BITCHER AND) GOIN AFTER ALL (CHRIS->THIS) TURNED (ON->TO) HIM +4852-28311-0012-2110: MIKE WAS STANDING ON THE CORNER +4852-28311-0013-2111: (AW SHUCKS->AH SHOCKS) +4852-28311-0014-2112: CHRIS STARTED OFF ONCE MORE PASSING (THE->A) BLEAK LITTLE VICTORIAN CHURCH PERCHED ON THE HILL ABOVE MISTER WICKER'S HOUSE +4852-28311-0015-2113: AN EMPTY LOT CUT (*->IN) INTO BY CHURCH LANE GAVE A LOOK OF ISOLATION TO THE (L->ALE) SHAPED BRICK BUILDING THAT SERVED MISTER (WICKER AS->WICKER'S) BOTH HOUSE AND PLACE OF BUSINESS +4852-28311-0016-2114: (THE->NO) LONGER WING TOWARD THE BACK (HAD->GOT) A BACK DOOR THAT OPENED (ONTO->ON A) WATER STREET THE SPACE BETWEEN THE HOUSE AND WISCONSIN (AVENUE->AVIGUE) HAD BEEN MADE INTO A NEAT OBLONG FLOWER GARDEN FENCED OFF FROM THE SIDEWALK BY BOX (SHRUBS->SHRUGS) AND A WHITE PICKET FENCE +4852-28311-0017-2115: A LIVID YELLOW STAINED THE HORIZON BEYOND THE FACTORIES AND (GRAY->GLAY) CLOUDS LOWERED AND TUMBLED ABOVE +4852-28311-0018-2116: THE AIR WAS GROWING CHILL AND (CHRIS->CHRIST) DECIDED TO FINISH (HIS->THE) JOB +4852-28311-0019-2117: ALL AT ONCE (HE->YOU) WONDERED HOW HIS MOTHER WAS AND EVERYTHING IN HIM (PINCHED->IMPINCED) AND TIGHTENED ITSELF +4852-28311-0020-2118: AT THE FOOT OF THE HILL HE REACHED THE HOUSE +4852-28311-0021-2119: THERE WERE THREE THINGS THAT ALWAYS CAUGHT HIS EYE AMID THE LITTER OF DUSTY PIECES +4852-28311-0022-2120: ON THE LEFT THE COIL OF ROPE IN THE (CENTER->CENTRE) THE MODEL OF A SAILING SHIP IN A GREEN GLASS BOTTLE AND ON THE RIGHT THE WOODEN STATUE OF A NEGRO BOY (IN->AND) BAGGY TROUSERS TURKISH JACKET AND WHITE TURBAN +4852-28311-0023-2121: BUT THE NAME STILL SHOWED AT THE PROW AND MANY A TIME CHRIS SAFE AT HOME IN BED (HAD SAILED->EXHALED) IMAGINARY VOYAGES IN THE MIRABELLE +4852-28311-0024-2122: (HE HAD->HE'D) NEVER SEEN (ANYONE->ANY ONE) GO INTO MISTER (WICKER'S->HOOKER'S) SHOP NOW HE THOUGHT OF IT +4852-28311-0025-2123: HOW THEN DID HE LIVE AND WHAT DID HE EVER SELL +4852-28311-0026-2124: A SUDDEN CAR HORN (WOKE HIM->WALKING) FROM (HIS->THIS) DREAM +4852-28312-0000-2125: OF THE MANY TIMES HE HAD EXAMINED MISTER WICKER'S WINDOW AND (PORED->POURED) OVER THE ROPE THE SHIP AND THE NUBIAN BOY HE HAD NEVER GONE INTO MISTER (WICKER'S->ROOKER'S) SHOP +4852-28312-0001-2126: SO NOW ALONE UNTIL (SOMEONE->SOME ONE) SHOULD ANSWER THE BELL (HE->THEY) LOOKED EAGERLY IF UNEASILY AROUND HIM +4852-28312-0002-2127: WHAT WITH THE ONE WINDOW AND THE LOWERING DAY OUTSIDE THE LONG NARROW SHOP WAS (SOMBER->SOMBRE) +4852-28312-0003-2128: HEAVY HAND (HEWN->YOU AND) BEAMS CROSSED IT FROM ONE SIDE TO THE OTHER +4852-28312-0004-2129: MISTER (WICKER'S->OCRE'S) BACK BEING TOWARD THE SOURCE OF LIGHT CHRIS COULD NOT SEE HIS FACE +4852-28312-0005-2130: THE DOUBLE FANS OF MINUTE WRINKLES BREAKING FROM EYE (CORNER TO->CORNERED A) TEMPLE (AND JOINING->ENJOINING) WITH THOSE OVER THE (CHEEKBONES->SHEEP BONES) WERE DRAWN INTO THE HORIZONTAL LINES ACROSS THE DOMED FOREHEAD +4852-28312-0006-2131: LITTLE TUFTS OF WHITE (FUZZ->FUZ) ABOVE THE EARS WERE ALL THAT REMAINED OF THE ANTIQUARIAN'S HAIR BUT WHAT DREW AND HELD CHRIS'S GAZE WERE THE OLD MAN'S EYES +4852-28312-0007-2132: (CHRIS->CRIS) BLINKED AND LOOKED AGAIN YES THEY WERE STILL THERE +4852-28312-0008-2133: CHRIS SWALLOWED AND HIS VOICE CAME BACK TO HIM +4852-28312-0009-2134: YES SIR HE SAID +4852-28312-0010-2135: I SAW YOUR SIGN AND I KNOW A BOY WHO NEEDS THE JOB +4852-28312-0011-2136: HE'S (A->*) SCHOOLMATE OF MINE +4852-28312-0012-2137: (JAKEY HARRIS HIS->GIGERIS'S) NAME IS AND HE REALLY NEEDS THE JOB +4852-28312-0013-2138: I I JUST WONDERED IF THE PLACE WAS STILL OPEN +4852-28312-0014-2139: WHAT HE SAW WAS A FRESH CHEEKED LAD TALL FOR THIRTEEN STURDY WITH SINCERITY AND GOOD (HUMOR->HUMOUR) IN HIS FACE AND SOMETHING SENSITIVE AND APPEALING ABOUT HIS EYES +4852-28312-0015-2140: HE GUESSED THERE (*->IT) MUST BE A LIVELY FIRE IN THAT (ROOM->RUM) BEYOND +4852-28312-0016-2141: WOULD THAT INTERFERE WITH (JAKEY'S->JAKIE GIGS) GETTING THE JOB SIR +4852-28312-0017-2142: BUT EVEN AS HE SLOWLY TURNED THE THOUGHT PIERCED HIS MIND WHY (HAD HE->DO YOU) NOT (SEEN->SEE) THE REFLECTION OF THE (HEADLIGHTS->HEAD LIGHTS) OF THE CARS MOVING UP AROUND THE CORNER OF (WATER STREET AND UP->WALL AT HER STREET NOT) THE HILL TOWARD THE (TRAFFIC->EFFIC) SIGNALS +4852-28312-0018-2143: THE ROOM SEEMED OVERLY STILL +4852-28312-0019-2144: THEN IN THAT SECOND HE TURNED AND FACED ABOUT +4852-28312-0020-2145: THE WIDE BOW WINDOW WAS THERE BEFORE HIM THE THREE OBJECTS HE LIKED BEST SHOWING FROSTY IN THE MOONLIGHT THAT POURED IN FROM ACROSS THE WATER +4852-28312-0021-2146: ACROSS THE WATER WHERE WAS THE (FREEWAY->FREE WAY) +4852-28312-0022-2147: IT WAS NO LONGER THERE NOR WERE THE HIGH WALLS AND (SMOKESTACKS->SMOKE STACKS) OF FACTORIES TO BE SEEN +4852-28312-0023-2148: THE WAREHOUSES WERE STILL THERE +4852-28312-0024-2149: (FLABBERGASTED AND->FLABRA GASTENED IN) BREATHLESS CHRIS WAS UNAWARE THAT HE HAD MOVED CLOSER TO PEER OUT THE WINDOW IN EVERY DIRECTION +4852-28312-0025-2150: NO ELECTRIC SIGNS NO LAMPLIT STREETS +4852-28312-0026-2151: WHERE THE PEOPLE'S (DRUGSTORE->DRUG STORE) HAD STOOD BUT (A->*) HALF (*->AN) HOUR BEFORE ROSE THE ROOFS OF WHAT WAS EVIDENTLY AN INN +4852-28312-0027-2152: A COURTYARD WAS (SPARSELY->FIRSTLY) LIT BY A FLARING (TORCH OR->TORTURE) TWO SHOWING (A->THE) SWINGING SIGN HUNG ON (A->THE) POST +4852-28312-0028-2153: THE (POST WAS PLANTED->POSTPLETED) AT THE EDGE OF (WHAT->IT) WAS NOW A BROAD AND MUDDY ROAD +4852-28312-0029-2154: A COACH (WITH ITS TOP->WHICH HAD STOPPED) PILED HIGH WITH (LUGGAGE->LEGGED) STAMPED (TO A->O) HALT BESIDE THE FLAGGED COURTYARD +4852-28312-0030-2155: THEY MOVED INTO THE INN THE COACH RATTLED OFF TO THE STABLE +4852-28312-0031-2156: (MY->BY) WINDOW (HAS->AS) A POWER FOR THOSE FEW WHO ARE TO SEE +4852-28319-0000-2070: THE LEARNING (OF->AND) MAGIC WAS BY NO MEANS EASY +4852-28319-0001-2071: HE HAD TOLD HIS MASTER AT ONCE (ABOUT->HE GOT) SIMON GOSLER HIS (HORDE->HOARD) OF MONEY AND HIS HIDING PLACES FOR IT +4852-28319-0002-2072: CHRIS THEREFORE THREW HIMSELF (INTO->AND) ALL THE PRELIMINARIES OF HIS TASK +4852-28319-0003-2073: ONE AFTERNOON WHEN HE (*->HAD) RETURNED AFTER A REST TO MISTER WICKER'S STUDY HE SAW THAT THERE WAS SOMETHING NEW IN THE ROOM A BOWL WITH A (GOLDFISH->GOLD FISH) IN IT STOOD ON THE TABLE BUT MISTER WICKER WAS NOT TO BE SEEN +4852-28319-0004-2074: WHAT (SHALL->SHOULD) I (*->ALL I) DO FIRST +4852-28319-0005-2075: HOW (YOU HAVE IMPROVED->OFTEN PROVED) MY BOY (HE->IT) EXCLAIMED (IT->*) IS NOW TIME FOR YOU TO TRY (AND THIS->MISSUS) IS (AS->*) GOOD A CHANGE (AS->IS) ANY +4852-28319-0006-2076: SUPPOSE (I->A) CHANGE AND CAN'T (CHANGE->CHANCE) BACK +4852-28319-0007-2077: MISTER WICKER WAITED PATIENTLY BESIDE HIM FOR A FEW MOMENTS FOR CHRIS TO GET UP HIS COURAGE +4852-28319-0008-2078: (THEN AS->THAT IS) NOTHING HAPPENED WITH A VOICE LIKE A WHIP MISTER WICKER SAID START AT ONCE +4852-28319-0009-2079: (THE->*) SENSATION SPREAD FASTER AND FASTER +4852-28319-0010-2080: HIS HEAD SWAM AND HE FELT FAINT (AND->IN) A LITTLE SICK BUT HE PERSISTED THROUGH THE FINAL WORDS +4852-28319-0011-2081: HE THOUGHT NOT WITHOUT A FEELING OF PRIDE AND COMMENCED (*->THE) EXPERIMENTING WITH HIS TAIL AND FINS WITH SUCH ENTHUSIASM AND DELIGHT THAT SOME LITTLE TIME ELAPSED BEFORE MISTER WICKER'S VOICE BOOMED CLOSE BY +4852-28319-0012-2082: SEVENTY FOUR BOOK ONE THE RETURN +4852-28319-0013-2083: THE (FIGURE'S->FIGURES) SHOES CARVED IN SOME EASTERN STYLE HAD CURVED UP POINTING TOES +4852-28319-0014-2084: THEN ALL AT ONCE THE IDEA CAME TO CHRIS +4852-28319-0015-2085: IF HE WAS TO BE A MAGICIAN COULD HE MAKE THIS BOY COME TO LIFE +4852-28319-0016-2086: (HE->IT) SQUATTED ON HIS HAUNCHES (EXAMINING->EXAMINED) THE CARVED WOODEN FIGURE ATTENTIVELY AND FELT CONVINCED THAT ONCE ALIVE THE BOY WOULD BE AN IDEAL AND HAPPY COMPANION +4852-28319-0017-2087: BUT HOW DID ONE (*->A) CHANGE INANIMATE TO ANIMATE +4852-28319-0018-2088: (CHRIS->GRIS) GOT UP AND STOLE BACK TO MISTER WICKER'S DOOR +4852-28319-0019-2089: HE HEARD (THE->THAT) MAGICIAN GOING UP THE SPIRAL STAIRCASE TO HIS ROOM ABOVE AND AFTER CHANGING HIMSELF TO A MOUSE TO SLIP UNDER THE DOOR AND SEE THAT THE ROOM WAS REALLY EMPTY (CHRIS RESUMED HIS->MISTER JUNE'S) PROPER SHAPE AND OPENED THE DOORS OF THE CUPBOARD AT THE FAR END OF THE ROOM +4852-28319-0020-2090: THE AFTERNOON (RAINY->RAINING) BEFORE INCREASED IN STORM +4852-28319-0021-2091: (DUSK CAME->THUS GAINED) TWO HOURS BEFORE ITS TIME THUNDER (SNARLED->SNARLS) IN THE SKY +4852-28319-0022-2092: CERTAIN ELEMENTS WERE TO BE MIXED AND POURED AT THE PROPER TIME +4852-28319-0023-2093: MISTER WICKER BEGAN MOVING ABOUT UPSTAIRS THE (FLOORBOARDS->FOREBOARDS) CREAKED AND STILL CHRIS COULD NOT LEAVE UNTIL THE (POTION->FORTUNE) FUMED AND GLOWED +4852-28319-0024-2094: WITH INFINITE CAUTION CHRIS CLOSED THE DOOR SILENTLY BEHIND HIM AND RUNNING LIGHTLY FORWARD REACHED THE FIGURE (OF->AT) THE NEGRO BOY +4852-28319-0025-2095: IT WAS AS IF THE STIFFNESS MELTED +4852-28319-0026-2096: UNDER HIS EYES (THE WOODEN FOLDS->WIDDLED THOES) OF CLOTH BECAME RICH SILK EMBROIDERY GLEAMED IN ITS REALITY UPON THE COAT AND OH THE FACE +4852-28319-0027-2097: THE WOODEN GRIN LOOSENED THE LARGE EYES TURNED THE HAND HOLDING (THE->A) HARD BOUQUET OF CARVED FLOWERS MOVED (AND LET->*) THE BOUQUET FALL +4852-28330-0000-2044: THEY WENT DOWN TO THEIR QUARTERS FIRST +4852-28330-0001-2045: GUESS MISTER FINNEY WENT TO HIS QUARTERS I DON'T REMEMBER SEEING HIM CROSS THE DECK OR COME OVER THAT WAY AT ALL +4852-28330-0002-2046: NEXT NED CILLEY WAS RELIEVED (AT->TO) THE HELM BY (ELBERT->ELBER) JONES WHO TOOK OVER NED WENT ON DOWN +4852-28330-0003-2047: IT LOOKS TO ME AS IF IT COULD (HAVE BEEN->BIT OF IN) ONE OF SEVERAL PEOPLE AND I'LL BE SWITCHED IF I KNOW WHO I'LL KEEP MY EYES (OPEN->UP AND) +4852-28330-0004-2048: THE MIRABELLE WAS NEARING (TAHITI->DAEDI) +4852-28330-0005-2049: (WE'VE WATER->REVOLTA) AND FRESH STORES TO TAKE ON THERE +4852-28330-0006-2050: CHRIS LOST NO TIME AS SOON AS HE COULD DO IT WITHOUT BEING NOTICED (IN->AND) HURRYING DOWN TO HIS CABIN +4852-28330-0007-2051: CERTAINLY MY BOY BOOMED OUT THE CAPTAIN (HIS->AS) BLUE EYES ABRUPTLY KEEN AND PENETRATING +4852-28330-0008-2052: MISTER FINNEY (WILL->WOULD) BE SOME TIME ON DECK WE CANNOT BE (OVERHEARD IN->OWNED HEARD AND) HERE +4852-28330-0009-2053: HIS FACE (FROZE->ROSE) WITH NERVOUSNESS THAT THIS MIGHT (*->DO) NOT DO AS AN ANSWER AND HE STOOD STIFF AND STILL BEFORE CAPTAIN BLIZZARD +4852-28330-0010-2054: THE CAPTAIN SAT FORWARD IN HIS CHAIR LOOKING AT HIM FOR A LONG MOMENT CONSIDERING +4852-28330-0011-2055: THEN HE SAID WELL I DO NOT CARE FOR IT I CANNOT SAY (I->THAT) DO +4852-28330-0012-2056: (THIS->THE) SHIP IS MORE TO ME THAN (WIFE OR->MY FULL) MOTHER OR FAMILY +4852-28330-0013-2057: (HE->AND) PAUSED FINGERING HIS LOWER LIP AND LOOKING SIDEWAYS IN A REFLECTIVE FASHION AT (CHRIS->CRIS) STANDING BEFORE HIM +4852-28330-0014-2058: WE SHALL SAY NO MORE BUT I TRUST YOU UNDERSTAND THE RESPONSIBILITY YOU HAVE +4852-28330-0015-2059: (THIS->THE) SHIP ITS CARGO (AND->IN) ITS (MEN->MAN) WILL BE IN YOUR HANDS +4852-28330-0016-2060: YES SIR I THINK I CAN DO IT SAFELY OR I SHOULD NOT TRY SIR +4852-28330-0017-2061: CAPTAIN BLIZZARD'S ROUND PINK (FACE->FACED) CREASED IN (HIS->ITS) WINNING SMILE +4852-28330-0018-2062: HE THEN WENT ON TO DESCRIBE WHAT ELSE WAS TO FOLLOW THE COVERING OF THE SHIP WITH LEAVES TO MAKE IT BLEND WITH ITS SURROUNDINGS +4852-28330-0019-2063: (*->THE) CAMOUFLAGE WAS NOT A WORD THE CAPTAIN OR (ANYONE->ANY ONE) ELSE OF HIS TIME (YET->HE HAD) UNDERSTOOD +4852-28330-0020-2064: WHAT CAN BE SAID DURING THAT TIME SIR CHRIS THOUGHT TO ASK +4852-28330-0021-2065: I AM SOMEWHAT SKILLED IN (MEDICAMENTS->MEDICTS) I HAVE TO BE AS (*->A) CAPTAIN OF (A->*) SHIP AND (THE->*) CREW KNOW IT +4852-28330-0022-2066: I SHALL SAY THAT YOU ARE IN MY OWN CABIN SO THAT I CAN CARE FOR YOU +4852-28330-0023-2067: NOT SINCE HE HAD LEFT MISTER WICKER (HAD->AND) CHRIS FELT SUCH CONFIDENCE AS HE DID IN THE WORDS AND ACTIONS OF CAPTAIN BLIZZARD +4852-28330-0024-2068: HE KNEW NOW THAT HIS ABSENCE FOR AS LONG AS HE HAD (*->HAD) TO BE AWAY WOULD BE COVERED UP (AND->IN) SATISFACTORILY ACCOUNTED FOR +4852-28330-0025-2069: THEIR CONVERSATION HAD TAKEN SOME LITTLE WHILE +533-1066-0000-796: (WHEN->ONE) CHURCHYARDS YAWN +533-1066-0001-797: I KNEW WELL ENOUGH THAT HE MIGHT BE CARRIED (THOUSANDS->THOUSAND) OF MILES (IN THE->INTO) BOX CAR LOCKED IN PERHAPS WITHOUT WATER OR (FOOD->FOOT) +533-1066-0002-798: I AM SURE I KISSED (LIDDY->LADY) AND (I HAVE->I'VE) HAD TERRIBLE MOMENTS SINCE WHEN I (SEEM->SEEMED) TO REMEMBER KISSING MISTER JAMIESON TOO IN THE EXCITEMENT +533-1066-0003-799: FORTUNATELY WARNER (AND->ON) THE (DETECTIVES->DETECTIVE) WERE KEEPING BACHELOR (HALL IN THE->HOLLAND) LODGE +533-1066-0004-800: OUT OF DEFERENCE TO (LIDDY->LIVE) THEY WASHED (THEIR->HER) DISHES ONCE A DAY AND THEY (CONCOCTED->CONCLUDED) QUEER (MESSES->MASSES) ACCORDING TO THEIR SEVERAL ABILITIES +533-1066-0005-801: MISS (INNES->EAMES) HE SAID STOPPING ME AS I WAS ABOUT TO GO TO MY ROOM (UP STAIRS->UPSTAIRS) HOW ARE YOUR NERVES (TONIGHT->TO NIGHT) +533-1066-0006-802: I HAVE NONE I SAID HAPPILY +533-1066-0007-803: I MEAN HE PERSISTED DO YOU FEEL AS THOUGH YOU COULD GO THROUGH WITH SOMETHING RATHER UNUSUAL +533-1066-0008-804: THE MOST UNUSUAL THING I CAN THINK OF WOULD BE A PEACEFUL NIGHT +533-1066-0009-805: SOMETHING IS GOING TO OCCUR HE SAID +533-1066-0010-806: PUT ON HEAVY SHOES AND SOME (OLD->ALL) DARK CLOTHES AND MAKE UP YOUR MIND NOT TO BE SURPRISED AT ANYTHING +533-1066-0011-807: (LIDDY->LEAVING) WAS SLEEPING (THE->*) SLEEP OF THE JUST WHEN I WENT (UP STAIRS->UPSTAIRS) AND I HUNTED OUT MY THINGS CAUTIOUSLY +533-1066-0012-808: (THEY->YOU) WERE TALKING (CONFIDENTIALLY->UFFILIENTLY) TOGETHER BUT WHEN I CAME DOWN THEY CEASED +533-1066-0013-809: THERE WERE A FEW PREPARATIONS TO BE MADE (THE->*) LOCKS TO BE GONE OVER WINTERS TO BE (INSTRUCTED->INSTRUCTIVE) AS TO RENEWED (VIGILANCE->VISUALS) AND THEN AFTER (EXTINGUISHING->DISTINGUISHING) THE (HALL->WHOLE) LIGHT WE CREPT IN THE DARKNESS THROUGH THE FRONT DOOR AND INTO THE NIGHT +533-1066-0014-810: I ASKED NO QUESTIONS +533-1066-0015-811: (ONCE->WAS) ONLY SOMEBODY SPOKE AND THEN IT WAS AN EMPHATIC (BIT->FIT) OF PROFANITY FROM DOCTOR STEWART WHEN HE RAN INTO A WIRE FENCE +533-1066-0016-812: I (HARDLY->ARE TO) KNOW WHAT I EXPECTED +533-1066-0017-813: THE DOCTOR WAS PUFFING SOMEWHAT WHEN WE FINALLY CAME TO A HALT +533-1066-0018-814: I CONFESS THAT JUST AT THAT MINUTE EVEN SUNNYSIDE SEEMED A CHEERFUL SPOT +533-1066-0019-815: IN SPITE OF MYSELF I DREW MY BREATH IN SHARPLY +533-1066-0020-816: IT WAS ALEX (ARMED WITH->I'M THE) TWO LONG HANDLED SPADES +533-1066-0021-817: THE DOCTOR KEPT A (KEEN LOOKOUT->KIN LOOK OUT) BUT NO ONE APPEARED +533-1066-0022-818: THERE'S ONE THING SURE I'LL NOT BE SUSPECTED OF COMPLICITY +533-1066-0023-819: (A->THE) DOCTOR IS GENERALLY SUPPOSED TO BE (*->A) HANDIER AT (BURYING->BERING) FOLKS THAN (AT DIGGING->A TIGGING) THEM UP +533-1066-0024-820: I HELD ON TO HIM FRANTICALLY AND SOMEHOW I GOT (THERE AND->TERRANT) LOOKED DOWN +533-131556-0000-821: BUT HOW AM I TO (GET->ADD) OVER THE (TEN->TOWN) OR TWELVE DAYS THAT MUST YET ELAPSE BEFORE THEY GO +533-131556-0001-822: FOR NONE COULD (INJURE->ENDURE) ME AS HE HAS DONE OH +533-131556-0002-823: THE WORD STARES ME IN THE FACE LIKE A GUILTY CONFESSION BUT IT IS TRUE I HATE HIM I HATE HIM +533-131556-0003-824: I SOMETIMES THINK I OUGHT TO GIVE HIM CREDIT FOR THE GOOD FEELING HE (SIMULATES->SIMILATES) SO WELL AND THEN AGAIN I THINK IT IS MY DUTY TO SUSPECT HIM UNDER THE PECULIAR CIRCUMSTANCES IN WHICH I AM PLACED +533-131556-0004-825: (I->*) HAVE DONE WELL TO RECORD (THEM SO MINUTELY->HIM SUMINUTELY) +533-131556-0005-826: (THEY->THE YEAR) HAD (BETAKEN->TAKEN) THEMSELVES TO THEIR WORK I (LESS TO->LEST) DIVERT MY MIND THAN TO (DEPRECATE->THE PROCATE) CONVERSATION HAD PROVIDED MYSELF WITH (A->THE) BOOK +533-131556-0006-827: I AM TOO (WELL ACQUAINTED->EQUANT) WITH YOUR CHARACTER AND CONDUCT TO FEEL ANY REAL FRIENDSHIP FOR YOU AND AS I AM WITHOUT YOUR TALENT FOR DISSIMULATION I CANNOT ASSUME (THE APPEARANCE->YOUR PUREST) OF IT +533-131556-0007-828: (UPON->UP AND) PERUSING THIS SHE TURNED SCARLET AND BIT HER LIP +533-131556-0008-829: YOU MAY GO (MILICENT->MILLSON) AND SHE'LL (FOLLOW IN A WHILE MILICENT->FOLLOWING AWHILE MELLICENT) WENT +533-131556-0009-830: (WILL YOU->OLI') OBLIGE ME (HELEN->ALAN) CONTINUED SHE +533-131556-0010-831: (AH->HA) YOU ARE SUSPICIOUS +533-131556-0011-832: IF I WERE SUSPICIOUS I REPLIED I SHOULD HAVE DISCOVERED YOUR INFAMY LONG BEFORE +533-131556-0012-833: (I->*) ENJOY (A MOONLIGHT->YOU'VE MALE) RAMBLE AS WELL AS YOU I ANSWERED STEADILY FIXING MY EYES (UPON HER->UP ON EARTH) AND (THE SHRUBBERY->FREDERI) HAPPENS TO BE ONE OF MY (FAVOURITE->FAVORITE) RESORTS +533-131556-0013-834: SHE COLOURED AGAIN EXCESSIVELY AND REMAINED SILENT PRESSING HER FINGER AGAINST HER TEETH AND GAZING INTO THE FIRE +533-131556-0014-835: I (WATCHED HER->WATCH FOR) A FEW MOMENTS (WITH A->TO THE) FEELING OF MALEVOLENT GRATIFICATION THEN MOVING TOWARDS THE DOOR I CALMLY ASKED IF SHE HAD ANYTHING MORE TO SAY +533-131556-0015-836: YES YES +533-131556-0016-837: SUPPOSE I DO +533-131556-0017-838: SHE PAUSED IN EVIDENT DISCONCERTION AND PERPLEXITY MINGLED WITH ANGER SHE DARED NOT SHOW +533-131556-0018-839: I CANNOT RENOUNCE WHAT IS DEARER THAN LIFE SHE MUTTERED IN A LOW HURRIED TONE +533-131556-0019-840: IF YOU ARE GENEROUS HERE IS A FITTING OPPORTUNITY FOR THE EXERCISE OF YOUR MAGNANIMITY IF YOU ARE PROUD HERE AM I YOUR RIVAL (READY->RATHER) TO (ACKNOWLEDGE->ANNOUNCE) MYSELF YOUR (DEBTOR->DEPTOR) FOR AN ACT OF (THE->*) MOST NOBLE FORBEARANCE +533-131556-0020-841: I SHALL NOT TELL HIM +533-131556-0021-842: GIVE ME NO THANKS IT IS NOT FOR YOUR SAKE THAT I REFRAIN +533-131556-0022-843: AND (MILICENT->MILLICENT) WILL (YOU->IT) TELL HER +533-131556-0023-844: I (WOULD->WILL) NOT FOR MUCH THAT (SHE->YOU) SHOULD (KNOW THE->NOT) INFAMY AND DISGRACE OF HER RELATION +533-131556-0024-845: YOU USE (HARD->OUR) WORDS MISSUS HUNTINGDON BUT I CAN PARDON YOU +533-131556-0025-846: HOW DARE YOU MENTION HIS NAME TO ME +533-131562-0000-847: IT SEEMS VERY INTERESTING LOVE SAID HE LIFTING HIS HEAD AND (TURNING->SHOWING) TO (WHERE I STOOD->HER EYES TOO) WRINGING MY (HANDS->HAND) IN SILENT (RAGE->RATE) AND ANGUISH BUT IT'S RATHER LONG (I'LL->I) LOOK AT IT SOME OTHER TIME AND MEANWHILE I'LL TROUBLE YOU FOR YOUR KEYS MY DEAR WHAT (KEYS->CASE) +533-131562-0001-848: (THE KEYS->IT ACCUSE) OF YOUR CABINET (DESK DRAWERS->DESKED RAOUL) AND WHATEVER ELSE YOU POSSESS SAID HE RISING AND HOLDING OUT HIS HAND +533-131562-0002-849: THE KEY OF MY (DESK->VES) IN FACT WAS AT THAT MOMENT IN (THE LOCK->LOVE) AND THE OTHERS WERE ATTACHED TO IT +533-131562-0003-850: NOW THEN SNEERED HE WE MUST HAVE A CONFISCATION OF PROPERTY +533-131562-0004-851: AND (PUTTING->PUT IN) THE KEYS INTO HIS POCKET HE WALKED INTO THE LIBRARY +533-131562-0005-852: THAT AND ALL REPLIED THE MASTER AND (THE->*) THINGS WERE CLEARED AWAY +533-131562-0006-853: MISTER (HUNTINGDON->HANTINGDON) THEN WENT (UP STAIRS->UPSTAIRS) +533-131562-0007-854: MUTTERED HE STARTING BACK SHE'S (THE->*) VERY DEVIL FOR (*->A) SPITE +533-131562-0008-855: I DIDN'T SAY (I'D->I'VE) BROKEN IT DID I RETURNED HE +533-131562-0009-856: I SHALL PUT YOU (UPON->UP IN) A SMALL (MONTHLY ALLOWANCE->MOUTHFUL LAW AS) IN FUTURE FOR YOUR OWN PRIVATE EXPENSES AND YOU NEEDN'T TROUBLE YOURSELF ANY MORE ABOUT MY CONCERNS I SHALL LOOK OUT FOR A STEWARD MY DEAR I WON'T EXPOSE YOU TO (THE->*) TEMPTATION +533-131562-0010-857: AND AS FOR THE (HOUSEHOLD->HOUSE OR) MATTERS MISSUS (GREAVES->GREEBS) MUST BE VERY PARTICULAR IN KEEPING HER ACCOUNTS WE MUST GO (UPON->UP IN) AN ENTIRELY NEW PLAN +533-131562-0011-858: WHAT GREAT DISCOVERY HAVE YOU MADE NOW MISTER (HUNTINGDON->HARDINGEN) +533-131562-0012-859: (HAVE->IF) I (ATTEMPTED->ATTENDED) TO DEFRAUD YOU +533-131562-0013-860: NOT IN MONEY MATTERS EXACTLY IT SEEMS BUT (IT'S BEST->IS FAST) TO KEEP OUT OF THE WAY OF TEMPTATION +533-131562-0014-861: HERE (BENSON->BUILTON) ENTERED (WITH->*) THE CANDLES AND THERE FOLLOWED (A BRIEF->THE ROOF) INTERVAL OF SILENCE I SITTING (STILL IN->STEALING) MY CHAIR AND HE STANDING WITH HIS BACK TO THE FIRE SILENTLY TRIUMPHING IN MY DESPAIR +533-131562-0015-862: I KNOW THAT DAY AFTER DAY SUCH FEELINGS (WILL->TO) RETURN UPON ME +533-131562-0016-863: I (TRY->TRIED) TO LOOK TO HIM AND RAISE MY HEART TO HEAVEN BUT IT WILL (CLEAVE->CLIFF) TO THE DUST +533-131564-0000-768: VAIN HOPE I FEAR +533-131564-0001-769: (MISTER->MISS) AND MISSUS (HATTERSLEY->HALTERSLEY) HAVE BEEN (STAYING AT THE GROVE->SEEING IT TO GROW) A FORTNIGHT AND AS (MISTER HARGRAVE->MISSUS HARGREAVE) IS STILL ABSENT AND (THE->*) WEATHER WAS REMARKABLY FINE I NEVER PASSED (A->THE) DAY WITHOUT SEEING MY TWO FRIENDS (MILICENT->MILLSON) AND (ESTHER->ASSER) EITHER THERE OR HERE +533-131564-0002-770: NO UNLESS YOU CAN TELL (ME->YOU) WHEN TO EXPECT HIM HOME +533-131564-0003-771: I CAN'T (YOU DON'T WANT->IT ANNOUNCE) HIM DO YOU +533-131564-0004-772: IT IS A RESOLUTION YOU (OUGHT TO HAVE FORMED->ARE REFORMED) LONG AGO +533-131564-0005-773: WE ALL HAVE A BIT OF A LIKING FOR HIM AT THE BOTTOM OF OUR (HEARTS->HEART) THOUGH WE CAN'T RESPECT HIM +533-131564-0006-774: NO I'D RATHER BE LIKE MYSELF (BAD AS->THAT WAS) I AM +533-131564-0007-775: NEVER MIND MY PLAIN SPEAKING SAID I IT IS FROM THE BEST OF MOTIVES +533-131564-0008-776: BUT TELL ME SHOULD YOU WISH (YOUR SONS->YOURSELVES) TO BE LIKE MISTER HUNTINGDON OR EVEN LIKE YOURSELF +533-131564-0009-777: OH NO (I COULDN'T->ECHOLON) STAND THAT +533-131564-0010-778: (FIRE->FAR) AND FURY +533-131564-0011-779: NOW DON'T (BURST->FORCE) INTO A TEMPEST AT THAT +533-131564-0012-780: BUT HANG IT THAT'S NOT MY FAULT +533-131564-0013-781: NOT (YEARS->EARS) FOR SHE'S ONLY FIVE AND TWENTY +533-131564-0014-782: WHAT (WOULD->DID) YOU MAKE OF ME AND THE CHILDREN TO BE SURE THAT (WORRY HER TO->WERE HE HURT) DEATH BETWEEN THEM +533-131564-0015-783: I KNOW THEY ARE BLESS THEM +533-131564-0016-784: (HE FOLLOWED ME INTO->IF I WILL MEAN TO) THE LIBRARY +533-131564-0017-785: I (SOUGHT OUT AND->SAW THEN) PUT INTO HIS (HANDS->HAND) TWO OF (MILICENT'S LETTERS->MILICENT LADDERS) ONE (DATED->DID IT) FROM LONDON AND WRITTEN DURING ONE OF HIS (WILDEST->WALLA'S) SEASONS OF RECKLESS DISSIPATION THE OTHER IN THE COUNTRY DURING (A LUCID->ELUSIVE) INTERVAL +533-131564-0018-786: THE FORMER WAS FULL OF TROUBLE AND ANGUISH NOT ACCUSING HIM BUT DEEPLY REGRETTING HIS CONNECTION WITH HIS PROFLIGATE COMPANIONS ABUSING MISTER (GRIMSBY AND->GRIM'S BEING) OTHERS INSINUATING BITTER THINGS AGAINST MISTER HUNTINGDON AND MOST (INGENIOUSLY THROWING->INGENUOUSLY THREW IN) THE BLAME OF HER HUSBAND'S MISCONDUCT ON (TO->THE) OTHER (MEN'S->MAN'S) SHOULDERS +533-131564-0019-787: I'VE BEEN A CURSED RASCAL GOD KNOWS SAID HE AS HE GAVE IT (A HEARTY->EARTHLY) SQUEEZE BUT YOU SEE IF I DON'T MAKE AMENDS FOR IT (D N->THEN) ME IF I DON'T +533-131564-0020-788: IF YOU (INTEND TO->INSENT) REFORM INVOKE GOD'S BLESSING (HIS->IS A) MERCY (AND HIS AID NOT HIS CURSE->IN THIS APE NOR DISCOURSE) +533-131564-0021-789: GOD HELP ME THEN FOR (I'M->I AM) SURE I (NEED IT->NEEDED) +533-131564-0022-790: (WHERE'S->WHERE IS) MILICENT +533-131564-0023-791: NAY NOT I SAID HE TURNING (HER ROUND->AROUND) AND PUSHING (HER->IT) TOWARDS ME +533-131564-0024-792: MILICENT FLEW TO THANK ME (OVERFLOWING WITH->OVERWHELMING ITS) GRATITUDE +533-131564-0025-793: CRIED SHE I COULDN'T HAVE INFLUENCED HIM I'M SURE BY ANYTHING THAT I COULD HAVE SAID +533-131564-0026-794: YOU NEVER TRIED ME (MILLY->MERELY) SAID HE +533-131564-0027-795: AFTER THAT THEY WILL REPAIR TO THEIR COUNTRY HOME +5442-32873-0000-1365: CAPTAIN LAKE DID NOT LOOK AT ALL LIKE A LONDON DANDY NOW +5442-32873-0001-1366: THERE WAS A VERY NATURAL SAVAGERY AND DEJECTION THERE AND A WILD (LEER IN HIS->URNOUS) YELLOW EYES RACHEL SAT DOWN +5442-32873-0002-1367: (A->AND) SLAVE ONLY THINK A SLAVE +5442-32873-0003-1368: OH FRIGHTFUL FRIGHTFUL IS IT A DREAM +5442-32873-0004-1369: OH FRIGHTFUL (FRIGHTFUL->DREADFUL) +5442-32873-0005-1370: STANLEY STANLEY IT WOULD BE MERCY TO KILL ME SHE BROKE OUT AGAIN +5442-32873-0006-1371: BRIGHT AND NATTY (WERE THE CHINTZ->WITH CHIN'S) CURTAINS AND THE LITTLE TOILET SET OUT NOT INELEGANTLY AND HER PET PIPING GOLDFINCH ASLEEP ON HIS PERCH WITH HIS BIT OF SUGAR BETWEEN THE (WIRES->WIVES) OF HIS CAGE HER PILLOW SO WHITE AND UNPRESSED WITH ITS LITTLE EDGING OF LACE +5442-32873-0007-1372: WHEN HE CAME BACK TO THE DRAWING ROOM A (TOILET->TALLED) BOTTLE OF (EAU DE COLOGNE->OVERCLONE) IN HIS HAND WITH HER LACE HANDKERCHIEF HE BATHED HER (TEMPLES->TEMPLE) AND FOREHEAD +5442-32873-0008-1373: THERE WAS NOTHING VERY BROTHERLY IN HIS LOOK AS HE PEERED INTO (HER->A) PALE SHARP FEATURES DURING THE PROCESS +5442-32873-0009-1374: THERE DON'T MIND ME SHE SAID SHARPLY AND GETTING UP SHE LOOKED DOWN AT HER DRESS AND THIN SHOES AND SEEMING TO RECOLLECT HERSELF SHE TOOK THE CANDLE HE HAD JUST SET DOWN AND WENT SWIFTLY TO HER ROOM +5442-32873-0010-1375: AND SHE THREW BACK HER VEIL AND GOING HURRIEDLY TO THE TOILET MECHANICALLY SURVEYED HERSELF (IN->FROM) THE GLASS +5442-32873-0011-1376: (RACHEL LAKE RACHEL LAKE->RICHLY LATER MID) WHAT ARE YOU NOW +5442-32873-0012-1377: I'LL STAY HERE THAT IS IN THE DRAWING ROOM SHE ANSWERED AND THE FACE WAS WITHDRAWN +5442-32873-0013-1378: (HE SLACKENED HIS PACE->HIS CLACK IN THE SPACE) AND (TAPPED->TOP) SHARPLY AT THE LITTLE WINDOW OF (THAT->THE) MODEST POST OFFICE AT WHICH THE YOUNG LADIES IN THE PONY CARRIAGE HAD PULLED UP THE DAY BEFORE AND WITHIN WHICH LUKE (WAGGOT->RAGGED) WAS WONT TO SLEEP IN A SORT OF WOODEN BOX THAT FOLDED UP AND APPEARED TO BE A CHEST OF DRAWERS ALL DAY +5442-32873-0014-1379: (LUKE->LOOK) TOOK CARE OF MISTER LARKIN'S (DOGS->DOG) AND GROOMED MISTER WYLDER'S HORSE AND CLEANED UP HIS (DOG->DOOR) CART FOR MARK BEING CLOSE ABOUT MONEY AND FINDING THAT THE THING WAS TO BE DONE MORE CHEAPLY THAT WAY PUT UP HIS HORSE AND (DOG CART->DORCART) IN THE POST OFFICE PREMISES AND SO EVADED THE LIVERY CHARGES OF THE BRANDON ARMS +5442-32873-0015-1380: BUT LUKE WAS (NOT->KNOWN) THERE AND CAPTAIN LAKE RECOLLECTING HIS HABITS AND HIS HAUNT HURRIED ON TO THE SILVER LION WHICH HAS ITS GABLE TOWARDS THE COMMON ONLY ABOUT A HUNDRED STEPS AWAY FOR DISTANCES ARE NOT GREAT IN (GYLINGDEN->GILINGDEN) +5442-32873-0016-1381: HERE WERE THE (FLOW->FLOOR) OF SOUL (AND->UN) OF STOUT LONG PIPES LONG YARNS AND TOLERABLY LONG CREDITS AND THE HUMBLE (SCAPEGRACES->CAPE BRACES) OF THE TOWN RESORTED THITHER FOR THE PLEASURES OF A CLUB LIFE AND OFTEN REVELLED DEEP INTO THE SMALL HOURS OF THE MORNING +5442-32873-0017-1382: LOSE NO TIME (AND->BUT) I'LL GIVE YOU HALF A CROWN +5442-32873-0018-1383: LUKE STUCK ON HIS GREASY (WIDEAWAKE->WIDE AWAKE) AND IN A FEW MINUTES MORE THE (DOG->DOOR) CART WAS (TRUNDLED->TUMBLED) OUT INTO THE LANE AND THE HORSE HARNESSED WENT BETWEEN THE SHAFTS WITH THAT WONDERFUL CHEERFULNESS WITH WHICH (THEY->THEIR) BEAR TO BE CALLED UP (UNDER->AND THE) STARTLING CIRCUMSTANCES (AT->AND) UNSEASONABLE HOURS +5442-32873-0019-1384: IF I THOUGHT YOU'D FAIL ME NOW (TAMAR->TO MORROW) I SHOULD NEVER COME BACK GOOD NIGHT (TAMAR->TO MORROW) +5442-41168-0000-1385: THE ACT SAID THAT IN CASE OF DIFFERENCE OF OPINION THERE MUST BE A BALLOT +5442-41168-0001-1386: HE WENT UP TO THE TABLE AND STRIKING IT WITH HIS FINGER RING HE SHOUTED LOUDLY A BALLOT +5442-41168-0002-1387: HE WAS SHOUTING FOR THE VERY (COURSE SERGEY->COARSE SURGY) IVANOVITCH HAD PROPOSED BUT IT WAS EVIDENT THAT HE HATED HIM AND ALL HIS PARTY AND THIS FEELING OF HATRED SPREAD THROUGH THE WHOLE PARTY AND ROUSED IN OPPOSITION TO IT THE SAME VINDICTIVENESS THOUGH IN A MORE SEEMLY FORM ON THE OTHER SIDE +5442-41168-0003-1388: SHOUTS WERE RAISED AND FOR A MOMENT ALL WAS CONFUSION SO THAT THE MARSHAL OF THE PROVINCE HAD TO CALL FOR (ORDER->ODO) A BALLOT +5442-41168-0004-1389: WE SHED OUR BLOOD FOR OUR COUNTRY +5442-41168-0005-1390: THE CONFIDENCE OF THE MONARCH (*->BUT) NO CHECKING THE ACCOUNTS OF THE (MARSHAL HE'S->MARTIAN IS) NOT A CASHIER BUT THAT'S NOT THE POINT +5442-41168-0006-1391: (VOTES->VAULTS) PLEASE BEASTLY +5442-41168-0007-1392: THEY EXPRESSED THE MOST IMPLACABLE HATRED +5442-41168-0008-1393: LEVIN DID NOT IN THE LEAST UNDERSTAND WHAT WAS THE MATTER AND HE (MARVELED->MARVELLED) AT THE PASSION WITH WHICH IT WAS DISPUTED WHETHER OR NOT THE DECISION ABOUT (FLEROV->FLARE OFF) SHOULD BE PUT TO THE VOTE +5442-41168-0009-1394: HE FORGOT AS (SERGEY IVANOVITCH->SO GIVANOVITCH) EXPLAINED TO HIM AFTERWARDS THIS (SYLLOGISM->DILIGION) THAT IT WAS NECESSARY FOR THE PUBLIC GOOD TO GET RID OF THE MARSHAL OF THE PROVINCE THAT TO GET (RID OF->INTO) THE (MARSHAL->MARTIAN) IT WAS NECESSARY TO HAVE A MAJORITY OF VOTES THAT TO GET A MAJORITY OF (VOTES->BOATS) IT WAS NECESSARY TO SECURE (FLEROV'S->FUROV'S) RIGHT TO VOTE THAT TO SECURE THE RECOGNITION OF (FLEROV'S->FLORA'S) RIGHT TO VOTE THEY MUST DECIDE ON THE INTERPRETATION TO BE PUT ON THE ACT +5442-41168-0010-1395: BUT LEVIN FORGOT ALL THAT AND IT WAS PAINFUL TO HIM TO SEE ALL THESE EXCELLENT PERSONS FOR WHOM HE HAD A RESPECT IN SUCH AN UNPLEASANT AND VICIOUS STATE OF EXCITEMENT +5442-41168-0011-1396: TO ESCAPE FROM THIS PAINFUL FEELING HE WENT AWAY INTO THE OTHER ROOM WHERE THERE WAS NOBODY EXCEPT THE WAITERS AT THE REFRESHMENT BAR +5442-41168-0012-1397: HE PARTICULARLY LIKED THE WAY ONE (GRAY WHISKERED->GREY WHISKIRT) WAITER WHO SHOWED HIS (SCORN->CORN) FOR THE OTHER YOUNGER ONES AND WAS (JEERED->JERED) AT BY THEM WAS TEACHING THEM HOW TO FOLD UP NAPKINS PROPERLY +5442-41168-0013-1398: LEVIN ADVANCED BUT UTTERLY FORGETTING WHAT HE WAS TO DO AND MUCH EMBARRASSED HE TURNED TO SERGEY IVANOVITCH WITH THE QUESTION WHERE AM I TO PUT IT +5442-41168-0014-1399: (SERGEY IVANOVITCH->SOJOURNOVITCH) FROWNED +5442-41168-0015-1400: THAT IS A MATTER FOR EACH MAN'S OWN DECISION HE SAID SEVERELY +5442-41168-0016-1401: HAVING PUT IT IN HE RECOLLECTED THAT HE OUGHT TO HAVE THRUST HIS LEFT HAND TOO AND SO HE THRUST IT (IN->*) THOUGH TOO LATE AND STILL MORE OVERCOME WITH CONFUSION HE BEAT A HASTY RETREAT INTO THE BACKGROUND +5442-41168-0017-1402: A HUNDRED AND TWENTY SIX FOR ADMISSION NINETY EIGHT AGAINST +5442-41168-0018-1403: SANG (OUT->ALL) THE VOICE OF THE SECRETARY WHO COULD NOT PRONOUNCE THE LETTER R +5442-41168-0019-1404: THEN THERE WAS A LAUGH (A BUTTON->OF BOTTOM) AND TWO (NUTS->KNOTS) WERE FOUND IN THE BOX +5442-41168-0020-1405: BUT THE OLD PARTY DID NOT CONSIDER THEMSELVES CONQUERED +5442-41168-0021-1406: (IN REPLY SNETKOV->INTERPLIES NED COUGH) SPOKE OF THE TRUST (THE->AND) NOBLEMEN OF THE PROVINCE HAD PLACED (IN->ON) HIM THE (AFFECTION->EFFECT ON) THEY HAD SHOWN HIM WHICH HE DID NOT DESERVE AS HIS ONLY MERIT HAD BEEN HIS ATTACHMENT TO THE NOBILITY TO WHOM HE HAD DEVOTED TWELVE YEARS OF SERVICE +5442-41168-0022-1407: THIS EXPRESSION IN THE MARSHAL'S FACE WAS PARTICULARLY TOUCHING TO LEVIN BECAUSE ONLY THE DAY (BEFORE->FOR) HE HAD BEEN AT HIS HOUSE ABOUT HIS (TRUSTEE->TRUSTY) BUSINESS AND HAD SEEN HIM IN ALL HIS GRANDEUR A KIND HEARTED FATHERLY MAN +5442-41168-0023-1408: IF THERE ARE MEN YOUNGER AND MORE DESERVING THAN I LET THEM SERVE +5442-41168-0024-1409: AND THE MARSHAL DISAPPEARED THROUGH A SIDE DOOR +5442-41168-0025-1410: (THEY->THERE) WERE TO PROCEED IMMEDIATELY TO THE ELECTION +5442-41168-0026-1411: (TWO->DO) NOBLE GENTLEMEN WHO HAD A WEAKNESS (FOR->WAS) STRONG DRINK HAD BEEN MADE DRUNK BY THE PARTISANS OF SNETKOV AND (A->THE) THIRD HAD BEEN ROBBED OF HIS UNIFORM +5442-41168-0027-1412: ON LEARNING THIS THE NEW PARTY HAD MADE HASTE DURING THE DISPUTE ABOUT (FLEROV->FLAREFF) TO SEND SOME OF THEIR MEN IN A SLEDGE TO CLOTHE THE STRIPPED GENTLEMAN AND TO BRING ALONG ONE OF THE INTOXICATED TO THE MEETING +5442-41169-0000-1413: LEVIN DID NOT CARE TO EAT AND HE WAS NOT SMOKING HE DID NOT WANT TO JOIN HIS OWN FRIENDS THAT IS (SERGEY->SO SHE) IVANOVITCH STEPAN ARKADYEVITCH SVIAZHSKY AND THE REST BECAUSE VRONSKY IN (HIS EQUERRY'S->AN EQUERRIES) UNIFORM WAS STANDING WITH THEM IN EAGER CONVERSATION +5442-41169-0001-1414: HE WENT TO THE WINDOW AND SAT DOWN SCANNING THE GROUPS AND LISTENING TO WHAT WAS BEING SAID AROUND HIM +5442-41169-0002-1415: HE'S SUCH A BLACKGUARD +5442-41169-0003-1416: I HAVE TOLD HIM SO BUT IT MAKES NO DIFFERENCE ONLY THINK OF IT +5442-41169-0004-1417: THESE PERSONS WERE UNMISTAKABLY SEEKING A PLACE WHERE THEY COULD TALK WITHOUT BEING OVERHEARD +5442-41169-0005-1418: SHALL WE GO ON YOUR EXCELLENCY FINE CHAMPAGNE +5442-41169-0006-1419: (LAST YEAR AT OUR->MUST YOU ARE A) DISTRICT (MARSHAL NIKOLAY->MARTIAL NICCLAY) IVANOVITCH'S +5442-41169-0007-1420: OH STILL JUST THE SAME ALWAYS AT A LOSS THE LANDOWNER ANSWERED WITH A RESIGNED SMILE BUT WITH AN EXPRESSION OF SERENITY AND CONVICTION THAT SO IT MUST BE +5442-41169-0008-1421: WHY WHAT IS (THERE->THAT) TO UNDERSTAND +5442-41169-0009-1422: (THERE'S->THERE IS) NO MEANING IN IT AT ALL +5442-41169-0010-1423: THEN TOO ONE MUST KEEP UP CONNECTIONS +5442-41169-0011-1424: IT'S A MORAL OBLIGATION OF A SORT +5442-41169-0012-1425: AND THEN TO TELL THE TRUTH THERE'S ONE'S OWN (INTERESTS->INTEREST) +5442-41169-0013-1426: (THEY'RE->THEIR) PROPRIETORS OF (A SORT->ASSAULT) BUT (WE'RE->WE ARE) THE LANDOWNERS +5442-41169-0014-1427: THAT IT MAY BE BUT STILL IT OUGHT TO BE TREATED A LITTLE MORE RESPECTFULLY +5442-41169-0015-1428: IF (WE'RE->WE ARE) LAYING OUT A GARDEN (PLANNING->CLANNING) ONE BEFORE THE HOUSE YOU KNOW AND THERE (YOU'VE->YOU HAVE) A TREE (THAT'S->THAT) STOOD (FOR->IN) CENTURIES IN THE VERY SPOT OLD AND GNARLED IT MAY BE AND YET YOU DON'T CUT DOWN THE OLD FELLOW TO MAKE ROOM FOR THE (FLOWERBEDS->FLOWER BEDS) BUT LAY OUT YOUR BEDS SO AS TO TAKE ADVANTAGE OF THE TREE +5442-41169-0016-1429: WELL AND HOW IS YOUR LAND DOING +5442-41169-0017-1430: BUT ONE'S WORK IS THROWN IN FOR NOTHING +5442-41169-0018-1431: OH WELL ONE DOES IT WHAT WOULD YOU HAVE +5442-41169-0019-1432: AND (WHAT'S->ONCE) MORE THE LANDOWNER WENT ON LEANING HIS ELBOWS ON THE WINDOW AND CHATTING ON MY SON I MUST TELL YOU HAS NO TASTE FOR IT +5442-41169-0020-1433: SO THERE'LL BE NO ONE TO KEEP IT UP AND YET ONE DOES IT +5442-41169-0021-1434: WE WALKED ABOUT THE FIELDS AND THE GARDEN NO SAID HE STEPAN (VASSILIEVITCH->MISSILEVITCH) EVERYTHING'S WELL LOOKED AFTER BUT YOUR (GARDEN'S->GARDENS) NEGLECTED +5442-41169-0022-1435: TO MY THINKING I'D (CUT->GOT) DOWN (THAT LIME->THE LINE) TREE +5442-41169-0023-1436: HERE (YOU'VE->YOU) THOUSANDS OF LIMES AND EACH WOULD MAKE TWO GOOD BUNDLES OF BARK +5442-41169-0024-1437: YOU'RE MARRIED (I'VE->I) HEARD SAID THE LANDOWNER +5442-41169-0025-1438: YES (IT'S RATHER->AND JARDA) STRANGE HE WENT ON +5442-41169-0026-1439: THE LANDOWNER CHUCKLED UNDER HIS WHITE (MUSTACHES->MOUSTACHES) +5442-41169-0027-1440: WHY DON'T WE (CUT->GO) DOWN OUR (PARKS->BOX) FOR TIMBER +5442-41169-0028-1441: SAID LEVIN RETURNING TO A THOUGHT THAT HAD STRUCK HIM +5442-41169-0029-1442: THERE'S (A->THE) CLASS INSTINCT TOO OF WHAT ONE OUGHT AND (OUGHTN'T->OUGHT NOT KNOWN) TO DO +5442-41169-0030-1443: THERE'S THE PEASANTS TOO I WONDER AT THEM SOMETIMES ANY GOOD PEASANT TRIES TO TAKE ALL THE LAND HE CAN +5442-41169-0031-1444: WITHOUT A RETURN TOO (AT->ADD) A SIMPLE LOSS +5484-24317-0000-571: WHEN HE CAME FROM THE BATH (PROCLUS->PROCLASS) VISITED HIM AGAIN +5484-24317-0001-572: BUT (HERMON->HARMON) WAS NOT IN THE MOOD TO SHARE A JOYOUS REVEL AND HE FRANKLY SAID SO ALTHOUGH IMMEDIATELY AFTER HIS RETURN HE HAD ACCEPTED THE INVITATION TO THE FESTIVAL WHICH THE WHOLE FELLOWSHIP OF ARTISTS WOULD GIVE THE FOLLOWING DAY (IN HONOUR->AN HONOR) OF THE (SEVENTIETH->SEVENTEENTH) BIRTHDAY OF THE OLD SCULPTOR (EUPHRANOR->EUPHRANER) +5484-24317-0002-573: SHE WOULD APPEAR HERSELF AT DESSERT AND THE BANQUET MUST THEREFORE BEGIN AT AN UNUSUALLY EARLY HOUR +5484-24317-0003-574: SO THE ARTIST FOUND HIMSELF OBLIGED TO RELINQUISH HIS OPPOSITION +5484-24317-0004-575: THE BANQUET WAS TO BEGIN IN A FEW HOURS YET HE COULD NOT LET THE DAY PASS WITHOUT SEEING DAPHNE AND TELLING HER THE WORDS OF THE ORACLE +5484-24317-0005-576: HE LONGED WITH ARDENT YEARNING FOR THE SOUND OF HER VOICE AND STILL MORE TO UNBURDEN HIS SORELY TROUBLED SOUL TO HER +5484-24317-0006-577: SINCE HIS RETURN FROM THE ORACLE THE FEAR THAT THE (RESCUED->RESCUE) DEMETER MIGHT YET BE THE WORK OF (MYRTILUS->MERTOLUS) HAD AGAIN MASTERED HIM +5484-24317-0007-578: THE APPROVAL AS WELL AS THE (DOUBTS->DOUBT) WHICH IT (AROUSED->ARISED) IN OTHERS STRENGTHENED HIS OPINION ALTHOUGH EVEN NOW HE COULD NOT SUCCEED IN BRINGING IT INTO HARMONY WITH THE FACTS +5484-24317-0008-579: THEN HE WENT DIRECTLY TO THE (NEIGHBOURING->NEIGHBORING) PALACE THE QUEEN MIGHT HAVE APPEARED ALREADY AND IT WOULD NOT DO TO KEEP HER WAITING +5484-24317-0009-580: HITHERTO THE MERCHANT HAD BEEN INDUCED IT IS TRUE TO ADVANCE LARGE SUMS OF MONEY TO THE QUEEN BUT THE LOYAL DEVOTION WHICH HE SHOWED TO HER ROYAL HUSBAND HAD RENDERED IT IMPOSSIBLE TO GIVE HIM EVEN A HINT OF THE CONSPIRACY +5484-24317-0010-581: WHEN (HERMON ENTERED->HERMAN ANSWERED) THE RESIDENCE OF THE (GRAMMATEUS->GRAMMATIUS) IN THE PALACE THE GUESTS HAD ALREADY ASSEMBLED +5484-24317-0011-582: (THE PLACE->THEY PLACED) BY (HERMON'S->HAHMON'S) SIDE WHICH (ALTHEA->ALTHIE) HAD CHOSEN FOR HERSELF WOULD THEN BE GIVEN UP TO (ARSINOE->ARSENO) +5484-24317-0012-583: TRUE AN INTERESTING CONVERSATION STILL HAD POWER TO CHARM HIM BUT OFTEN DURING ITS CONTINUANCE THE FULL CONSCIOUSNESS OF HIS MISFORTUNE FORCED ITSELF UPON HIS MIND FOR THE MAJORITY OF THE SUBJECTS DISCUSSED BY THE ARTISTS CAME TO THEM THROUGH THE MEDIUM OF SIGHT AND REFERRED TO NEW CREATIONS OF ARCHITECTURE SCULPTURE AND PAINTING FROM WHOSE ENJOYMENT (HIS->IS) BLINDNESS (DEBARRED->DEBARED) HIM +5484-24317-0013-584: A STRANGER OUT OF HIS OWN SPHERE HE (FELT->FELL) CHILLED AMONG THESE CLOSELY UNITED MEN AND WOMEN TO WHOM NO (TIE->TYPE) BOUND HIM SAVE THE PRESENCE OF THE SAME HOST +5484-24317-0014-585: (CRATES->CREEDS) HAD REALLY BEEN INVITED IN ORDER TO WIN HIM OVER TO THE QUEEN'S CAUSE BUT CHARMING FAIR HAIRED (NICO->NIGO) HAD BEEN COMMISSIONED BY THE CONSPIRATORS TO PERSUADE HIM TO SING (ARSINOE'S->ARSENO'S) PRAISES AMONG HIS PROFESSIONAL ASSOCIATES +5484-24317-0015-586: HIS SON HAD BEEN (THIS->THE) ROYAL (DAME'S->JAMES'S) FIRST HUSBAND AND SHE HAD DESERTED HIM TO MARRY (LYSIMACHUS->LISMACHUS) THE AGED KING OF THRACE +5484-24317-0016-587: THE KING'S SISTER THE OBJECT OF HIS LOVE CRIED (HERMON->HARMON) INCREDULOUSLY +5484-24317-0017-588: WE WOMEN ARE ONLY AS OLD AS WE LOOK AND THE LEECHES (AND TIRING WOMEN->ENTIRE WOMAN) OF THIS BEAUTY OF FORTY PRACTISE ARTS WHICH GIVE HER THE APPEARANCE OF TWENTY FIVE YET PERHAPS THE KING VALUES HER INTELLECT MORE THAN HER PERSON AND THE WISDOM OF A HUNDRED SERPENTS IS CERTAINLY UNITED IN THIS WOMAN'S HEAD +5484-24317-0018-589: THE THREE MOST TRUSTWORTHY ONES (ARE HERE AMYNTAS->I HEAR I MEANTIS) THE LEECH CHRYSIPPUS (AND->IN) THE ADMIRABLE (PROCLUS->PROCLASS) +5484-24317-0019-590: LET US HOPE THAT YOU WILL MAKE THIS THREE LEAVED CLOVER THE LUCK PROMISING (FOUR LEAVED->FOLLY TO) ONE +5484-24317-0020-591: YOUR UNCLE TOO HAS OFTEN WITH (PRAISEWORTHY->PRAISED WORTHY) GENEROSITY HELPED (ARSINOE->AUTON) IN MANY (AN->*) EMBARRASSMENT +5484-24317-0021-592: HOW LONG HE KEPT YOU WAITING (FOR->FROM) THE FIRST WORD CONCERNING A WORK WHICH JUSTLY TRANSPORTED THE WHOLE CITY WITH DELIGHT +5484-24317-0022-593: WHEN HE DID FINALLY SUMMON YOU HE SAID THINGS WHICH MUST HAVE WOUNDED YOU +5484-24317-0023-594: THAT IS GOING TOO FAR REPLIED (HERMON->HARMON) +5484-24317-0024-595: HE WINKED AT HER AND MADE A SIGNIFICANT GESTURE AS HE SPOKE AND THEN INFORMED THE BLIND ARTIST HOW GRACIOUSLY (ARSINOE->ARSENO) HAD REMEMBERED HIM WHEN SHE HEARD OF THE REMEDY BY WHOSE AID MANY A WONDERFUL CURE OF BLIND (EYES->EYE) HAD BEEN MADE IN (RHODES->ROADS) +5484-24317-0025-596: THE ROYAL LADY HAD INQUIRED ABOUT HIM AND HIS SUFFERINGS WITH ALMOST SISTERLY INTEREST AND ALTHEA EAGERLY CONFIRMED THE STATEMENT +5484-24317-0026-597: (HERMON->HERMAN) LISTENED TO THE (PAIR IN->PARENT) SILENCE +5484-24317-0027-598: THE (RHODIAN->RADIAN) WAS JUST BEGINNING TO PRAISE (ARSINOE->ARSENAL) ALSO AS A SPECIAL FRIEND AND CONNOISSEUR OF THE SCULPTOR'S ART WHEN CRATES (HERMON'S->HERMANN'S) FELLOW STUDENT (ASKED->ASK) THE BLIND ARTIST IN BEHALF OF HIS BEAUTIFUL COMPANION WHY HIS DEMETER WAS PLACED UPON A PEDESTAL (WHICH->WITCH) TO OTHERS AS WELL AS HIMSELF SEEMED TOO HIGH FOR THE SIZE OF THE STATUE +5484-24317-0028-599: YET WHAT MATTERED IT EVEN IF THESE MISERABLE PEOPLE CONSIDERED THEMSELVES DECEIVED AND POINTED THE FINGER OF SCORN AT HIM +5484-24317-0029-600: A WOMAN WHO YEARNS FOR THE REGARD OF ALL MEN AND MAKES LOVE A TOY EASILY LESSENS THE DEMANDS SHE IMPOSES UPON INDIVIDUALS +5484-24317-0030-601: ONLY EVEN THOUGH LOVE HAS WHOLLY DISAPPEARED SHE STILL CLAIMS CONSIDERATION AND (ALTHEA->ALTHIA) DID NOT WISH TO LOSE (HERMON'S->HERMANN'S) REGARD +5484-24317-0031-602: HOW INDIFFERENT YOU LOOK BUT I TELL YOU HER DEEP BLUE EYES FLASHED AS SHE SPOKE THAT SO LONG AS YOU (WERE->WAS) STILL A GENUINE CREATING ARTIST THE CASE WAS DIFFERENT +5484-24317-0032-603: THOUGH SO LOUD A DENIAL IS WRITTEN ON YOUR FACE I PERSIST IN MY CONVICTION AND THAT NO IDLE DELUSION (ENSNARES->AND SNATHS) ME I CAN PROVE +5484-24317-0033-604: IT WAS NAY IT COULD HAVE BEEN NOTHING ELSE THAT VERY SPIDER +5484-24318-0000-605: NOT A SOUND IF YOU VALUE YOUR LIVES +5484-24318-0001-606: TO OFFER RESISTANCE WOULD HAVE BEEN MADNESS FOR EVEN (HERMON->HERMANN) PERCEIVED BY THE LOUD CLANKING OF WEAPONS AROUND THEM (THE->THEY) GREATLY SUPERIOR POWER OF THE ENEMY AND THEY WERE ACTING BY THE ORDERS OF THE KING TO THE PRISON NEAR THE PLACE OF EXECUTION +5484-24318-0002-607: WAS HE TO BE LED TO THE EXECUTIONER'S BLOCK +5484-24318-0003-608: WHAT PLEASURE HAD LIFE TO OFFER HIM THE BLIND MAN WHO WAS ALREADY DEAD TO HIS ART +5484-24318-0004-609: OUGHT HE NOT TO GREET (THIS->HIS) SUDDEN END AS A (BOON->BOOM) FROM THE IMMORTALS +5484-24318-0005-610: DID IT NOT SPARE HIM A HUMILIATION AS GREAT AND PAINFUL AS COULD BE IMAGINED +5484-24318-0006-611: WHATEVER MIGHT AWAIT HIM HE DESIRED NO BETTER FATE +5484-24318-0007-612: IF HE HAD PASSED INTO ANNIHILATION HE (HERMON->HERMAN) WISHED TO FOLLOW HIM THITHER AND ANNIHILATION CERTAINLY MEANT REDEMPTION FROM PAIN AND MISERY +5484-24318-0008-613: BUT IF HE WERE DESTINED TO MEET HIS (MYRTILUS->BURTLES) AND HIS MOTHER IN THE WORLD BEYOND THE GRAVE WHAT HAD HE NOT TO TELL THEM HOW SURE HE WAS (OF->A) FINDING A JOYFUL RECEPTION THERE FROM BOTH +5484-24318-0009-614: THE POWER WHICH DELIVERED HIM OVER TO DEATH JUST AT THAT MOMENT WAS NOT NEMESIS NO IT WAS A KINDLY DEITY +5484-24318-0010-615: YET IT WAS NO ILLUSION THAT DECEIVED HIM +5484-24318-0011-616: AGAIN HE HEARD THE BELOVED VOICE AND THIS TIME IT ADDRESSED NOT ONLY HIM BUT WITH THE UTMOST HASTE THE COMMANDER OF THE SOLDIERS +5484-24318-0012-617: SOMETIMES WITH TOUCHING ENTREATY SOMETIMES WITH IMPERIOUS COMMAND SHE PROTESTED AFTER GIVING HIM HER NAME THAT THIS MATTER COULD BE NOTHING BUT AN UNFORTUNATE MISTAKE +5484-24318-0013-618: LASTLY WITH EARNEST WARMTH SHE BESOUGHT HIM BEFORE TAKING THE PRISONERS AWAY TO PERMIT HER TO SPEAK TO THE COMMANDING GENERAL PHILIPPUS HER FATHER'S GUEST WHO SHE WAS CERTAIN WAS IN THE PALACE +5484-24318-0014-619: CRIED (HERMON->HERMANN) IN GRATEFUL AGITATION BUT SHE WOULD NOT LISTEN TO HIM AND (FOLLOWED->FOLLOW) THE SOLDIER WHOM THE CAPTAIN DETAILED TO GUIDE HER INTO THE PALACE +5484-24318-0015-620: TO MORROW YOU SHALL CONFESS TO ME WHO TREACHEROUSLY DIRECTED YOU TO THIS DANGEROUS PATH +5484-24318-0016-621: DAPHNE AGAIN PLEADED FOR THE LIBERATION OF THE PRISONERS BUT (PHILIPPUS->PHILIP WAS) SILENCED HER WITH (THE->A) GRAVE EXCLAMATION THE ORDER OF THE KING +5484-24318-0017-622: AS SOON AS THE CAPTIVE ARTIST WAS ALONE WITH THE WOMAN HE LOVED HE CLASPED HER HAND POURING FORTH INCOHERENT WORDS OF THE MOST ARDENT GRATITUDE AND WHEN HE FELT HER WARMLY (RETURN->RETURNED) THE PRESSURE HE COULD NOT RESTRAIN THE DESIRE TO CLASP HER TO HIS HEART +5484-24318-0018-623: IN SPITE OF HIS DEEP (MENTAL->MANTLE) DISTRESS HE COULD HAVE SHOUTED ALOUD IN HIS DELIGHT AND GRATITUDE +5484-24318-0019-624: HE MIGHT NOW HAVE BEEN PERMITTED TO BIND FOREVER TO HIS LIFE THE WOMAN WHO HAD JUST RESCUED HIM FROM THE GREATEST DANGER BUT THE CONFESSION HE MUST MAKE TO HIS FELLOW ARTISTS IN THE (PALAESTRA->PELLESTRA) THE FOLLOWING MORNING STILL SEALED HIS LIPS YET IN THIS HOUR HE FELT THAT HE WAS UNITED TO HER AND OUGHT NOT TO CONCEAL WHAT AWAITED HIM SO OBEYING A STRONG IMPULSE HE EXCLAIMED YOU KNOW THAT I LOVE YOU +5484-24318-0020-625: I LOVE YOU AND HAVE LOVED YOU ALWAYS +5484-24318-0021-626: (DAPHNE->JAPHANE) EXCLAIMED TENDERLY WHAT MORE IS NEEDED +5484-24318-0022-627: BUT (HERMON->HARMON) WITH DROOPING HEAD MURMURED TO MORROW I SHALL NO LONGER BE WHAT I AM NOW +5484-24318-0023-628: THEN (DAPHNE->JAPANE) RAISED HER FACE TO HIS ASKING SO THE (DEMETER->DEMEANOR) IS THE WORK OF (MYRTILUS->MYRTALIS) +5484-24318-0024-629: WHAT A TERRIBLE ORDEAL AGAIN AWAITS YOU +5484-24318-0025-630: AND I FOOL BLINDED ALSO IN MIND COULD BE VEXED WITH YOU FOR IT +5484-24318-0026-631: BRING THIS BEFORE YOUR MIND AND EVERYTHING ELSE THAT YOU MUST ACCEPT WITH IT IF YOU CONSENT (WHEN->WITH) THE TIME ARRIVES TO BECOME MINE CONCEAL (AND PALLIATE->IMPALION) NOTHING +5484-24318-0027-632: (SO ARCHIAS->SARKAIUS) INTENDED TO LEAVE THE CITY ON ONE OF HIS OWN SHIPS THAT VERY DAY +5484-24318-0028-633: (HE->SHE) HIMSELF ON THE WAY TO EXPOSE HIMSELF TO THE MALICE AND MOCKERY OF THE WHOLE CITY +5484-24318-0029-634: HIS HEART CONTRACTED PAINFULLY AND HIS SOLICITUDE ABOUT HIS UNCLE'S FATE INCREASED WHEN PHILIPPUS INFORMED HIM THAT THE CONSPIRATORS HAD BEEN ARRESTED AT THE BANQUET (AND->END) HEADED BY (AMYNTAS->A MEANTIS) THE (RHODIAN->RODIAN) CHRYSIPPUS AND (PROCLUS->PROCLAS) HAD PERISHED BY THE EXECUTIONER'S SWORD AT SUNRISE +5484-24318-0030-635: BESIDES HE KNEW THAT THE OBJECT OF HIS LOVE WOULD NOT PART FROM HIM WITHOUT GRANTING HIM ONE LAST WORD +5484-24318-0031-636: ON THE WAY HIS HEART THROBBED ALMOST TO BURSTING +5484-24318-0032-637: EVEN (DAPHNE'S->THESE) IMAGE AND WHAT THREATENED HER FATHER AND HER WITH HIM (RECEDED->WAS SEATED) FAR INTO THE BACKGROUND +5484-24318-0033-638: HE WAS APPEARING BEFORE HIS COMPANIONS ONLY TO GIVE TRUTH ITS JUST DUE +5484-24318-0034-639: THE EGYPTIAN OBEYED AND HIS MASTER CROSSED THE WIDE SPACE STREWN WITH SAND AND APPROACHED THE STAGE WHICH HAD BEEN ERECTED FOR THE (FESTAL->FEAST OF) PERFORMANCES EVEN HAD HIS EYES RETAINED THE POWER OF SIGHT HIS BLOOD WAS (COURSING->COARSING) SO (WILDLY->WIDELY) THROUGH HIS VEINS THAT HE MIGHT PERHAPS HAVE BEEN UNABLE TO DISTINGUISH THE STATUES AROUND HIM AND THE THOUSANDS OF SPECTATORS WHO CROWDED CLOSELY TOGETHER RICHLY GARLANDED THEIR CHEEKS GLOWING WITH ENTHUSIASM SURROUNDED THE ARENA (HERMON->HERMANN) +5484-24318-0035-640: SHOUTED HIS FRIEND (SOTELES IN->SARTUOUS AND) JOYFUL SURPRISE IN THE MIDST OF (THIS->HIS) PAINFUL WALK (HERMON->HERE ON) +5484-24318-0036-641: EVEN WHILE HE BELIEVED HIMSELF TO BE THE CREATOR OF THE DEMETER HE HAD BEEN SERIOUSLY TROUBLED BY THE PRAISE OF SO MANY CRITICS BECAUSE IT HAD EXPOSED HIM TO THE SUSPICION OF HAVING BECOME FAITHLESS TO HIS ART AND HIS NATURE +5484-24318-0037-642: HONOUR TO (MYRTILUS->MERTELUS) AND HIS ART BUT HE TRUSTED THIS NOBLE (FESTAL ASSEMBLAGE->FEAST A SEMBLAGE) WOULD PARDON THE UNINTENTIONAL DECEPTION AND AID HIS PRAYER FOR RECOVERY +5764-299665-0000-405: AFTERWARD IT WAS SUPPOSED THAT HE WAS SATISFIED WITH THE BLOOD OF OXEN (LAMBS->LAMPS) AND DOVES AND THAT IN EXCHANGE FOR OR (ON->IN) ACCOUNT OF THESE SACRIFICES (THIS->THESE) GOD GAVE (RAIN->REIN) SUNSHINE AND HARVEST +5764-299665-0001-406: WHETHER HE WAS THE CREATOR OF YOURSELF AND MYSELF +5764-299665-0002-407: WHETHER ANY PRAYER WAS EVER ANSWERED +5764-299665-0003-408: WHY DID HE CREATE THE (INTELLECTUALLY->INTELLECTUAL) INFERIOR +5764-299665-0004-409: WHY DID HE CREATE THE DEFORMED AND HELPLESS WHY DID HE CREATE THE CRIMINAL THE IDIOTIC THE INSANE +5764-299665-0005-410: ARE THE FAILURES UNDER OBLIGATION TO THEIR CREATOR +5764-299665-0006-411: (IS HE RESPONSIBLE->HIS IRRESPONSIBLE) FOR ALL THE WARS THAT HAVE BEEN (WAGED->RAGED) FOR ALL THE INNOCENT BLOOD THAT HAS BEEN SHED +5764-299665-0007-412: (IS->IF) HE RESPONSIBLE FOR THE CENTURIES OF SLAVERY FOR THE BACKS THAT HAVE BEEN SCARRED WITH (THE->A) LASH FOR THE (BABES->BABE) THAT HAVE BEEN SOLD FROM THE BREASTS OF MOTHERS FOR THE FAMILIES THAT HAVE BEEN SEPARATED AND DESTROYED +5764-299665-0008-413: IS (THIS GOD->THE SCOTT) RESPONSIBLE FOR RELIGIOUS PERSECUTION FOR THE INQUISITION FOR THE (THUMB->TEMP) SCREW AND RACK AND FOR ALL THE INSTRUMENTS OF TORTURE +5764-299665-0009-414: DID THIS (GOD ALLOW->GOT ALONE) THE CRUEL AND VILE TO DESTROY THE BRAVE AND VIRTUOUS +5764-299665-0010-415: DID HE (ALLOW->ALONE) TYRANTS TO SHED (THE->A) BLOOD OF PATRIOTS +5764-299665-0011-416: CAN WE CONCEIVE OF A DEVIL BASE ENOUGH TO PREFER HIS ENEMIES TO HIS FRIENDS +5764-299665-0012-417: HOW CAN WE ACCOUNT FOR THE WILD BEASTS THAT (DEVOUR->THE FOUR) HUMAN BEINGS FOR THE (FANGED->FACT) SERPENTS WHOSE BITE IS DEATH +5764-299665-0013-418: HOW CAN WE ACCOUNT FOR A WORLD WHERE (LIFE FEEDS->LIE FEATS) ON LIFE +5764-299665-0014-419: (DID->THE) INFINITE WISDOM INTENTIONALLY (PRODUCE THE->PRODUCED A) MICROSCOPIC BEASTS THAT (FEED->FEAT) UPON THE OPTIC (NERVE->NERVES) THINK OF BLINDING A MAN TO SATISFY THE APPETITE OF A MICROBE +5764-299665-0015-420: FEAR (BUILDS->BIDS) THE ALTAR AND OFFERS THE SACRIFICE +5764-299665-0016-421: FEAR ERECTS THE (CATHEDRAL->KITRAL) AND BOWS THE HEAD OF MAN IN WORSHIP +5764-299665-0017-422: LIPS RELIGIOUS AND FEARFUL TREMBLINGLY REPEAT THIS PASSAGE THOUGH HE SLAY ME YET WILL I TRUST HIM +5764-299665-0018-423: CAN WE SAY THAT HE CARED FOR THE CHILDREN OF MEN +5764-299665-0019-424: CAN WE SAY THAT HIS MERCY (ENDURETH->AND DUET) FOREVER +5764-299665-0020-425: DO WE PROVE HIS GOODNESS BY SHOWING THAT HE HAS OPENED THE EARTH AND SWALLOWED (THOUSANDS->THOUSAND) OF HIS HELPLESS CHILDREN (OR->ALL) THAT WITH THE VOLCANOES HE HAS OVERWHELMED THEM WITH RIVERS OF FIRE +5764-299665-0021-426: WAS (THERE->THEIR) GOODNESS WAS (THERE->THEIR) WISDOM IN THIS +5764-299665-0022-427: (OUGHT->ALL) THE SUPERIOR (RACES->RAYS) TO THANK GOD THAT THEY ARE NOT THE INFERIOR +5764-299665-0023-428: MOST PEOPLE (CLING->CLINK) TO THE SUPERNATURAL +5764-299665-0024-429: IF THEY GIVE UP ONE GOD THEY IMAGINE ANOTHER +5764-299665-0025-430: WHAT IS THIS POWER +5764-299665-0026-431: MAN ADVANCES (AND->A) NECESSARILY ADVANCES THROUGH EXPERIENCE +5764-299665-0027-432: A MAN WISHING TO GO TO A CERTAIN PLACE (COMES->COME) TO WHERE THE ROAD DIVIDES +5764-299665-0028-433: HE (HAS->IS) TRIED THAT ROAD AND KNOWS THAT IT IS THE WRONG ROAD +5764-299665-0029-434: A CHILD (CHARMED->SHOWN) BY THE BEAUTY OF THE FLAME (GRASPS->GRASPED) IT WITH (ITS->HIS) DIMPLED HAND +5764-299665-0030-435: THE POWER THAT (WORKS->WORK) FOR RIGHTEOUSNESS (HAS->HAD) TAUGHT THE CHILD A LESSON +5764-299665-0031-436: IT IS A RESULT +5764-299665-0032-437: IT IS INSISTED BY THESE THEOLOGIANS AND BY MANY OF THE (SO->SOUL) CALLED PHILOSOPHERS THAT THIS MORAL SENSE THIS SENSE OF DUTY OF OBLIGATION WAS IMPORTED AND THAT CONSCIENCE IS AN EXOTIC +5764-299665-0033-438: WE LIVE TOGETHER IN FAMILIES TRIBES AND NATIONS +5764-299665-0034-439: THEY ARE PRAISED ADMIRED AND RESPECTED +5764-299665-0035-440: THEY ARE REGARDED AS GOOD THAT IS TO SAY (AS->S) MORAL +5764-299665-0036-441: THE MEMBERS WHO ADD TO THE MISERY OF THE FAMILY THE TRIBE (OR->OF) THE NATION ARE CONSIDERED BAD MEMBERS +5764-299665-0037-442: THE GREATEST OF HUMAN BEINGS (HAS->HAD) SAID CONSCIENCE IS BORN OF LOVE +5764-299665-0038-443: AS PEOPLE ADVANCE THE REMOTE CONSEQUENCES ARE PERCEIVED +5764-299665-0039-444: THE IMAGINATION IS CULTIVATED +5764-299665-0040-445: A MAN (PUTS->BITS) HIMSELF IN THE PLACE OF ANOTHER +5764-299665-0041-446: THE SENSE OF DUTY BECOMES STRONGER MORE IMPERATIVE +5764-299665-0042-447: MAN JUDGES HIMSELF +5764-299665-0043-448: IN ALL THIS THERE IS NOTHING SUPERNATURAL +5764-299665-0044-449: MAN HAS DECEIVED HIMSELF +5764-299665-0045-450: (HAS CHRISTIANITY DONE GOOD->HESTERITY DONEGOOD) +5764-299665-0046-451: WHEN THE CHURCH HAD (CONTROL WERE->CONTROLLED WHERE) MEN MADE BETTER AND HAPPIER +5764-299665-0047-452: WHAT HAS RELIGION DONE FOR HUNGARY OR AUSTRIA +5764-299665-0048-453: (COULD->GOOD) THESE COUNTRIES HAVE BEEN WORSE WITHOUT RELIGION +5764-299665-0049-454: COULD THEY HAVE BEEN WORSE HAD THEY HAD ANY OTHER RELIGION THAN CHRISTIANITY +5764-299665-0050-455: WHAT DID CHRISTIANITY DO (FOR->FAULT) THEM +5764-299665-0051-456: THEY HATED PLEASURE +5764-299665-0052-457: THEY MUFFLED ALL THE BELLS OF GLADNESS +5764-299665-0053-458: THE RELIGION OF THE PURITAN WAS AN (UNADULTERATED->AN ADULTERATED) CURSE +5764-299665-0054-459: THE PURITAN BELIEVED THE BIBLE TO BE THE (WORD->WORTH) OF GOD AND THIS BELIEF HAS ALWAYS MADE THOSE WHO HELD IT CRUEL AND WRETCHED +5764-299665-0055-460: LET ME REFER TO JUST ONE FACT SHOWING THE INFLUENCE OF A BELIEF IN THE BIBLE ON HUMAN BEINGS +5764-299665-0056-461: THE QUEEN RECEIVED THE BIBLE KISSED IT AND PLEDGED HERSELF TO DILIGENTLY READ THEREIN +5764-299665-0057-462: IN OTHER WORDS IT WAS JUST AS FIENDISH JUST AS (INFAMOUS->IN FAMOUS) AS THE (CATHOLIC SPIRIT->CATTLE EXPERIOR) +5764-299665-0058-463: HAS THE (BIBLE->DIE BUT) MADE THE PEOPLE OF GEORGIA KIND AND MERCIFUL +5764-299665-0059-464: (*->WHO) RELIGION HAS BEEN TRIED AND IN ALL COUNTRIES IN ALL TIMES HAS FAILED +5764-299665-0060-465: RELIGION (HAS->HATH) ALWAYS BEEN THE ENEMY OF SCIENCE OF INVESTIGATION AND THOUGHT +5764-299665-0061-466: RELIGION (HAS->IS) NEVER MADE (MAN->MEN) FREE +5764-299665-0062-467: (IT HAS->HE JUST) NEVER MADE MAN MORAL TEMPERATE INDUSTRIOUS AND HONEST +5764-299665-0063-468: (ARE CHRISTIANS MORE->AH CHRISTIAN SMALL) TEMPERATE NEARER VIRTUOUS NEARER HONEST THAN SAVAGES +5764-299665-0064-469: CAN WE CURE DISEASE BY SUPPLICATION +5764-299665-0065-470: CAN WE RECEIVE VIRTUE OR (HONOR->HUNGER) AS ALMS +5764-299665-0066-471: RELIGION RESTS ON THE IDEA THAT NATURE HAS A MASTER AND THAT THIS MASTER WILL LISTEN TO PRAYER THAT (THIS->HIS) MASTER PUNISHES AND REWARDS THAT HE LOVES PRAISE AND FLATTERY AND HATES THE BRAVE AND FREE +5764-299665-0067-472: WE MUST HAVE (CORNER->CORN THE) STONES +5764-299665-0068-473: THE STRUCTURE MUST HAVE (A BASEMENT->ABASEMENT) +5764-299665-0069-474: IF WE BUILD WE MUST BEGIN AT THE BOTTOM +5764-299665-0070-475: I HAVE (A->IT) THEORY AND I HAVE FOUR CORNER STONES +5764-299665-0071-476: THE FIRST STONE (IS THAT MATTER->EAST AT MATTHOR) SUBSTANCE CANNOT BE DESTROYED CANNOT BE ANNIHILATED +5764-299665-0072-477: IF (THESE CORNER->THIS CORN THE) STONES ARE FACTS IT FOLLOWS AS A NECESSITY THAT MATTER AND FORCE ARE FROM (AND->END) TO ETERNITY THAT THEY CAN NEITHER BE INCREASED NOR DIMINISHED +5764-299665-0073-478: IT FOLLOWS THAT NOTHING HAS BEEN OR CAN BE CREATED THAT THERE NEVER HAS BEEN OR CAN BE A CREATOR +5764-299665-0074-479: IT (FOLLOWS->FOLLOWED) THAT THERE COULD NOT HAVE BEEN ANY INTELLIGENCE (ANY DESIGN->AND A DESIGNED) BACK OF MATTER AND FORCE +5764-299665-0075-480: I SAY WHAT I THINK +5764-299665-0076-481: EVERY EVENT HAS PARENTS +5764-299665-0077-482: THAT WHICH (HAS->HATH) NOT HAPPENED COULD NOT +5764-299665-0078-483: IN THE INFINITE (CHAIN THERE IS->CHANGE WREATHS) AND THERE CAN BE NO BROKEN NO MISSING LINK +5764-299665-0079-484: WE NOW KNOW THAT OUR FIRST PARENTS WERE NOT FOREIGNERS +5764-299665-0080-485: WE NOW KNOW IF WE KNOW ANYTHING THAT THE UNIVERSE IS NATURAL AND THAT (MEN->MAN) AND WOMEN HAVE BEEN NATURALLY PRODUCED +5764-299665-0081-486: WE KNOW THE PATHS THAT LIFE HAS (TRAVELED->TRAVELLED) +5764-299665-0082-487: WE KNOW THE FOOTSTEPS OF ADVANCE THEY HAVE BEEN (TRACED->PRAISED) +5764-299665-0083-488: (FOR->FOUR) THOUSANDS OF YEARS MEN AND WOMEN HAVE BEEN (TRYING->CRYING) TO REFORM THE WORLD +5764-299665-0084-489: WHY HAVE THE (REFORMERS FAILED->REFORMED FAITH) +5764-299665-0085-490: THEY DEPEND ON THE (LORD ON LUCK->LOT UNLUCK) AND CHARITY +5764-299665-0086-491: THEY (LIVE->LEAVE) BY FRAUD AND VIOLENCE AND BEQUEATH THEIR VICES TO THEIR CHILDREN +5764-299665-0087-492: FAILURE SEEMS TO BE THE (TRADEMARK->TRADE MARK) OF NATURE WHY +5764-299665-0088-493: NATURE (PRODUCES->PRODUCED) WITHOUT PURPOSE SUSTAINS WITHOUT INTENTION AND DESTROYS WITHOUT THOUGHT +5764-299665-0089-494: (MUST THE WORLD->MISTER BUILD) FOREVER (REMAIN THE->REMAINED A) VICTIM OF IGNORANT PASSION +5764-299665-0090-495: WHY SHOULD MEN AND WOMEN HAVE CHILDREN THAT THEY CANNOT TAKE CARE OF CHILDREN THAT ARE (BURDENS->A BURDEN) AND CURSES WHY +5764-299665-0091-496: PASSION (IS->EAST) AND (ALWAYS->ALL THIS) HAS BEEN DEAF +5764-299665-0092-497: LAW CAN PUNISH (BUT->THAT) IT CAN NEITHER REFORM CRIMINALS NOR PREVENT CRIME +5764-299665-0093-498: (THIS->THESE) CANNOT BE DONE BY TALK OR EXAMPLE +5764-299665-0094-499: THIS IS THE SOLUTION OF THE WHOLE QUESTION +5764-299665-0095-500: THIS (FREES WOMAN->FREEZWAMEN) +5764-299665-0096-501: (POVERTY->BAVARY) AND CRIME WILL BE CHILDLESS +5764-299665-0097-502: IT IS FAR BETTER TO BE FREE TO LEAVE THE (FORTS->FAULTS) AND BARRICADES OF FEAR TO STAND ERECT AND (FACE->FAITH) THE FUTURE WITH (A SMILE->US MIND) +6070-63485-0000-2599: (THEY'RE->THERE) DONE (FOR->FAR) SAID THE SCHOOLMASTER IN A (LOW KEY->LOKI) TO THE (CHOUETTE->SWEAT) OUT WITH YOUR VITRIOL AND MIND YOUR EYE +6070-63485-0001-2600: THE TWO MONSTERS TOOK OFF THEIR SHOES AND MOVED STEALTHILY ALONG KEEPING IN THE SHADOWS OF THE HOUSES +6070-63485-0002-2601: BY MEANS OF THIS STRATAGEM THEY FOLLOWED SO CLOSELY THAT ALTHOUGH WITHIN A FEW STEPS OF (SARAH AND->SEREN) TOM THEY DID NOT HEAR THEM +6070-63485-0003-2602: SARAH AND HER BROTHER HAVING AGAIN PASSED BY THE (TAPIS FRANC->TAPPY FRANK) ARRIVED CLOSE TO THE DILAPIDATED HOUSE WHICH WAS PARTLY IN RUINS AND ITS (OPENED->OPEN) CELLARS FORMED A KIND OF GULF ALONG WHICH THE STREET RAN IN THAT DIRECTION +6070-63485-0004-2603: IN AN INSTANT THE SCHOOLMASTER WITH A LEAP RESEMBLING IN STRENGTH AND AGILITY THE SPRING OF A TIGER SEIZED (SEYTON->SETON) WITH ONE HAND BY THE THROAT AND EXCLAIMED YOUR MONEY OR I WILL FLING YOU INTO THIS HOLE +6070-63485-0005-2604: NO SAID THE OLD BRUTE (GRUMBLINGLY->TREMBLINGLY) NO NOT ONE RING WHAT A SHAME +6070-63485-0006-2605: TOM SEYTON DID NOT LOSE HIS PRESENCE OF MIND DURING THIS SCENE RAPIDLY AND UNEXPECTEDLY AS IT HAD OCCURRED +6070-63485-0007-2606: (OH AH->UH) TO LAY A TRAP TO CATCH US REPLIED THE THIEF +6070-63485-0008-2607: THEN ADDRESSING THOMAS (SEYTON->SETTON) YOU KNOW THE (PLAIN->PLANE) OF SAINT DENIS +6070-63485-0009-2608: DID YOU SEE IN THE CABARET WE (HAVE->HAD) JUST LEFT FOR I KNOW YOU AGAIN THE MAN WHOM THE CHARCOAL MAN CAME TO SEEK +6070-63485-0010-2609: CRIED THE SCHOOLMASTER A THOUSAND FRANCS AND I'LL KILL HIM +6070-63485-0011-2610: WRETCH I DO NOT (SEEK->SEE) HIS LIFE REPLIED SARAH TO THE SCHOOLMASTER +6070-63485-0012-2611: LET'S GO AND MEET HIM +6070-63485-0013-2612: OLD (BOY->BY) IT WILL PAY FOR LOOKING AFTER +6070-63485-0014-2613: WELL MY WIFE SHALL BE THERE SAID THE SCHOOLMASTER YOU WILL TELL HER WHAT YOU WANT AND I SHALL SEE +6070-63485-0015-2614: IN THE PLAIN OF SAINT DENIS +6070-63485-0016-2615: BETWEEN SAINT (OUEN->WAT) AND THE ROAD OF LA (REVOLTE->REVOLT) AT THE END OF THE ROAD AGREED +6070-63485-0017-2616: HE HAD FORGOTTEN THE ADDRESS OF THE SELF STYLED (FAN->PAN) PAINTER +6070-63485-0018-2617: THE FIACRE STARTED +6070-86744-0000-2569: (FRANZ->FRANCE) WHO SEEMED ATTRACTED BY SOME INVISIBLE INFLUENCE TOWARDS THE COUNT IN WHICH TERROR WAS STRANGELY MINGLED FELT AN EXTREME RELUCTANCE TO PERMIT HIS FRIEND TO BE EXPOSED ALONE TO THE SINGULAR FASCINATION THAT THIS MYSTERIOUS PERSONAGE SEEMED TO EXERCISE OVER HIM AND THEREFORE MADE NO OBJECTION TO ALBERT'S REQUEST BUT AT ONCE ACCOMPANIED HIM TO THE DESIRED SPOT AND AFTER A SHORT DELAY THE COUNT JOINED THEM IN THE SALON +6070-86744-0001-2570: MY VERY GOOD FRIEND (AND->AN) EXCELLENT (NEIGHBOR->NEIGHBOUR) REPLIED THE COUNT WITH A SMILE YOU REALLY EXAGGERATE MY TRIFLING EXERTIONS +6070-86744-0002-2571: MY FATHER THE COMTE DE MORCERF ALTHOUGH (OF->A) SPANISH ORIGIN POSSESSES CONSIDERABLE INFLUENCE BOTH AT THE COURT OF FRANCE AND MADRID AND I (UNHESITATINGLY->AM HESITATINGLY) PLACE THE BEST SERVICES OF MYSELF AND ALL TO WHOM MY LIFE IS DEAR AT YOUR DISPOSAL +6070-86744-0003-2572: I CAN SCARCELY CREDIT IT +6070-86744-0004-2573: THEN IT IS SETTLED SAID THE COUNT AND I GIVE YOU MY SOLEMN ASSURANCE THAT I ONLY WAITED (*->IN) AN OPPORTUNITY LIKE THE PRESENT TO REALIZE PLANS THAT I HAVE LONG MEDITATED +6070-86744-0005-2574: (SHALL->SHOW) WE MAKE A POSITIVE APPOINTMENT FOR A PARTICULAR DAY AND HOUR INQUIRED THE COUNT ONLY LET ME WARN YOU THAT I AM PROVERBIAL FOR MY PUNCTILIOUS EXACTITUDE IN KEEPING MY ENGAGEMENTS DAY FOR DAY HOUR FOR HOUR SAID ALBERT THAT WILL SUIT ME TO A DOT +6070-86744-0006-2575: SO BE IT THEN REPLIED THE COUNT AND EXTENDING HIS HAND TOWARDS (A->THE) CALENDAR SUSPENDED NEAR THE CHIMNEY PIECE HE SAID TO DAY IS THE TWENTY FIRST OF FEBRUARY AND DRAWING OUT HIS WATCH (ADDED->I DID) IT IS EXACTLY HALF PAST TEN O'CLOCK NOW PROMISE ME TO REMEMBER THIS AND EXPECT ME (THE->THAT) TWENTY FIRST OF MAY AT THE SAME HOUR IN THE FORENOON +6070-86744-0007-2576: I RESIDE IN MY FATHER'S HOUSE BUT OCCUPY A PAVILION AT THE FARTHER SIDE OF THE (COURT YARD ENTIRELY->COURTYARD AND TIRELY) SEPARATED FROM THE MAIN BUILDING +6070-86744-0008-2577: NOW THEN SAID THE COUNT RETURNING (HIS TABLETS->ESTABLETS) TO HIS POCKET MAKE YOURSELF PERFECTLY EASY THE HAND OF YOUR TIME (PIECE->PEACE) WILL NOT BE MORE ACCURATE IN MARKING THE TIME THAN MYSELF +6070-86744-0009-2578: THAT DEPENDS WHEN DO YOU LEAVE +6070-86744-0010-2579: FOR FRANCE NO FOR VENICE I SHALL REMAIN IN ITALY FOR ANOTHER YEAR OR TWO +6070-86744-0011-2580: THEN WE SHALL NOT MEET IN PARIS +6070-86744-0012-2581: I FEAR I SHALL NOT HAVE THAT (HONOR->HONOUR) +6070-86744-0013-2582: WELL SINCE WE MUST PART SAID THE COUNT HOLDING OUT A HAND TO EACH OF THE YOUNG MEN ALLOW ME TO WISH YOU BOTH A SAFE AND PLEASANT JOURNEY +6070-86744-0014-2583: WHAT IS THE MATTER ASKED ALBERT OF FRANZ WHEN THEY HAD RETURNED TO THEIR OWN APARTMENTS YOU (SEEM->SEE) MORE THAN COMMONLY THOUGHTFUL +6070-86744-0015-2584: I WILL CONFESS TO YOU ALBERT REPLIED FRANZ THE COUNT IS A VERY SINGULAR PERSON AND THE APPOINTMENT YOU HAVE MADE TO MEET HIM IN PARIS FILLS ME WITH A THOUSAND APPREHENSIONS +6070-86744-0016-2585: DID YOU EVER MEET HIM PREVIOUSLY TO COMING HITHER +6070-86744-0017-2586: UPON MY (HONOR->HONOUR) THEN LISTEN TO ME +6070-86744-0018-2587: HE DWELT WITH CONSIDERABLE FORCE AND ENERGY ON THE ALMOST MAGICAL HOSPITALITY HE HAD RECEIVED FROM THE COUNT AND THE MAGNIFICENCE OF HIS ENTERTAINMENT IN THE GROTTO OF THE THOUSAND AND ONE NIGHTS HE RECOUNTED WITH CIRCUMSTANTIAL EXACTITUDE ALL THE PARTICULARS OF THE SUPPER THE HASHISH THE STATUES THE DREAM AND HOW AT HIS AWAKENING THERE REMAINED NO PROOF OR TRACE OF ALL THESE EVENTS SAVE THE SMALL YACHT SEEN IN THE DISTANT HORIZON DRIVING UNDER FULL SAIL TOWARD (PORTO VECCHIO->PORT OF QUICKU) +6070-86744-0019-2588: THEN HE DETAILED THE CONVERSATION OVERHEARD BY HIM AT THE (COLOSSEUM->COLISEUM) BETWEEN THE COUNT AND VAMPA IN WHICH THE COUNT HAD PROMISED TO OBTAIN THE RELEASE OF THE BANDIT PEPPINO AN ENGAGEMENT WHICH AS OUR READERS ARE AWARE HE MOST FAITHFULLY FULFILLED +6070-86744-0020-2589: BUT SAID FRANZ THE CORSICAN BANDITS THAT WERE AMONG THE CREW OF HIS VESSEL +6070-86744-0021-2590: WHY REALLY THE THING SEEMS TO ME SIMPLE ENOUGH +6070-86744-0022-2591: TALKING OF COUNTRIES REPLIED FRANZ OF WHAT (COUNTRY IS->COUNTRIES) THE COUNT WHAT IS HIS NATIVE (TONGUE->TONG) WHENCE DOES HE DERIVE HIS IMMENSE FORTUNE AND WHAT WERE THOSE EVENTS OF HIS EARLY LIFE A LIFE AS MARVELLOUS AS UNKNOWN THAT (HAVE->HATH) TINCTURED HIS SUCCEEDING YEARS WITH SO DARK AND GLOOMY A MISANTHROPY +6070-86744-0023-2592: CERTAINLY THESE ARE QUESTIONS THAT IN YOUR PLACE I SHOULD LIKE TO HAVE ANSWERED +6070-86744-0024-2593: MY DEAR (FRANZ->FRIENDS) REPLIED ALBERT WHEN UPON RECEIPT OF MY LETTER YOU FOUND THE NECESSITY OF ASKING THE COUNT'S ASSISTANCE YOU PROMPTLY WENT TO HIM SAYING MY FRIEND ALBERT DE MORCERF IS IN DANGER HELP ME TO DELIVER HIM +6070-86744-0025-2594: WHAT ARE HIS MEANS OF EXISTENCE WHAT IS HIS BIRTHPLACE OF WHAT (COUNTRY IS->COUNTRIES) HE A NATIVE +6070-86744-0026-2595: I CONFESS HE ASKED ME NONE NO HE MERELY CAME AND FREED ME FROM THE HANDS OF (SIGNOR->SENOR) VAMPA WHERE I CAN ASSURE YOU IN SPITE OF ALL MY OUTWARD APPEARANCE OF EASE AND UNCONCERN I DID NOT VERY PARTICULARLY CARE TO REMAIN +6070-86744-0027-2596: AND THIS TIME IT MUST BE CONFESSED THAT CONTRARY TO THE USUAL STATE OF AFFAIRS IN DISCUSSIONS BETWEEN THE YOUNG MEN THE EFFECTIVE ARGUMENTS WERE ALL ON ALBERT'S SIDE +6070-86744-0028-2597: WELL SAID FRANZ WITH A SIGH DO AS YOU PLEASE MY DEAR VISCOUNT FOR YOUR ARGUMENTS ARE BEYOND MY POWERS OF REFUTATION +6070-86744-0029-2598: AND NOW MY DEAR (FRANZ->FRANCE) LET US TALK OF SOMETHING ELSE +6070-86745-0000-2549: THEN SHOULD ANYTHING APPEAR TO (MERIT->MARRIT) A MORE MINUTE EXAMINATION ALBERT DE MORCERF COULD FOLLOW UP HIS RESEARCHES BY MEANS OF A SMALL GATE SIMILAR TO THAT CLOSE TO THE CONCIERGE'S DOOR AND WHICH MERITS A PARTICULAR DESCRIPTION +6070-86745-0001-2550: SHRUBS AND CREEPING PLANTS COVERED THE WINDOWS AND HID FROM THE GARDEN AND COURT THESE TWO APARTMENTS THE ONLY ROOMS INTO WHICH AS THEY WERE ON THE GROUND FLOOR THE PRYING EYES OF THE CURIOUS COULD PENETRATE +6070-86745-0002-2551: AT A QUARTER TO TEN A (VALET->VALLEY) ENTERED HE COMPOSED WITH A LITTLE GROOM NAMED JOHN AND WHO ONLY SPOKE ENGLISH ALL ALBERT'S ESTABLISHMENT ALTHOUGH THE COOK OF THE HOTEL WAS ALWAYS AT HIS SERVICE AND ON GREAT OCCASIONS THE COUNT'S CHASSEUR ALSO +6070-86745-0003-2552: WAIT THEN DURING THE DAY TELL ROSA THAT WHEN I LEAVE THE OPERA I WILL SUP WITH HER AS SHE WISHES +6070-86745-0004-2553: VERY WELL AT HALF PAST TEN +6070-86745-0005-2554: IS THE COUNTESS UP YET +6070-86745-0006-2555: THE VALET LEFT THE ROOM +6070-86745-0007-2556: GOOD MORNING LUCIEN GOOD MORNING SAID ALBERT YOUR PUNCTUALITY REALLY ALARMS ME +6070-86745-0008-2557: YOU WHOM I EXPECTED LAST YOU ARRIVE AT FIVE MINUTES TO TEN WHEN THE TIME FIXED WAS HALF PAST +6070-86745-0009-2558: NO NO MY DEAR FELLOW DO NOT CONFOUND OUR PLANS +6070-86745-0010-2559: YES HE HAS NOT MUCH TO COMPLAIN OF (BOURGES->BOURGE) IS THE CAPITAL OF CHARLES (SEVEN->THE SEVENTH) +6070-86745-0011-2560: IT IS FOR THAT REASON YOU SEE ME SO EARLY +6070-86745-0012-2561: I RETURNED HOME AT DAYBREAK AND STROVE TO SLEEP BUT MY HEAD ACHED AND I GOT UP TO HAVE A RIDE FOR AN HOUR +6070-86745-0013-2562: (PESTE->PESTS) I WILL DO NOTHING OF THE KIND THE MOMENT THEY COME FROM GOVERNMENT YOU WOULD FIND THEM EXECRABLE +6070-86745-0014-2563: BESIDES THAT DOES NOT CONCERN THE HOME BUT THE FINANCIAL DEPARTMENT +6070-86745-0015-2564: ABOUT WHAT ABOUT THE PAPERS +6070-86745-0016-2565: IN THE ENTIRE POLITICAL WORLD OF WHICH YOU ARE ONE OF THE LEADERS +6070-86745-0017-2566: THEY SAY THAT IT IS QUITE FAIR AND THAT SOWING SO MUCH RED YOU OUGHT TO (REAP->READ) A LITTLE BLUE +6070-86745-0018-2567: COME COME THAT IS NOT BAD SAID (LUCIEN->LUCIAN) +6070-86745-0019-2568: WITH (YOUR TALENTS YOU->THE OR TALONS HE) WOULD MAKE YOUR FORTUNE IN THREE OR FOUR YEARS +6128-63240-0000-503: THE GENTLEMAN HAD NOT EVEN NEEDED TO SIT DOWN TO BECOME INTERESTED APPARENTLY HE HAD TAKEN UP THE VOLUME FROM A TABLE AS SOON AS HE CAME IN AND STANDING THERE AFTER A SINGLE GLANCE ROUND THE APARTMENT HAD LOST HIMSELF IN ITS PAGES +6128-63240-0001-504: THAT HAS AN UNFLATTERING SOUND FOR ME SAID THE YOUNG MAN +6128-63240-0002-505: SHE IS WILLING TO RISK THAT +6128-63240-0003-506: JUST AS I AM THE VISITOR INQUIRED PRESENTING HIMSELF WITH RATHER A (WORK A DAY->WORKADAY) ASPECT +6128-63240-0004-507: HE WAS TALL AND LEAN AND DRESSED THROUGHOUT IN BLACK HIS SHIRT COLLAR WAS LOW AND WIDE AND THE TRIANGLE OF LINEN A LITTLE CRUMPLED EXHIBITED BY THE OPENING OF HIS WAISTCOAT WAS ADORNED BY A PIN CONTAINING A SMALL RED STONE +6128-63240-0005-508: IN SPITE OF THIS DECORATION THE YOUNG MAN LOOKED POOR AS (POOR->FAR) AS A YOUNG MAN COULD (LOOK->LIVE) WHO HAD SUCH A FINE HEAD AND SUCH MAGNIFICENT EYES +6128-63240-0006-509: THOSE OF (BASIL->BAESON) RANSOM (WERE->WENT) DARK DEEP AND GLOWING HIS HEAD HAD A CHARACTER OF ELEVATION WHICH FAIRLY ADDED TO HIS (STATURE->STATUE) IT WAS A HEAD TO BE SEEN ABOVE THE LEVEL OF A CROWD ON SOME JUDICIAL BENCH OR POLITICAL PLATFORM OR EVEN ON A BRONZE MEDAL +6128-63240-0007-510: THESE THINGS THE EYES ESPECIALLY WITH THEIR SMOULDERING FIRE MIGHT HAVE INDICATED THAT HE WAS TO BE (A->*) GREAT AMERICAN STATESMAN OR ON THE OTHER HAND THEY MIGHT SIMPLY HAVE PROVED THAT HE CAME FROM CAROLINA OR ALABAMA +6128-63240-0008-511: AND YET THE READER WHO LIKES A COMPLETE IMAGE WHO DESIRES TO READ WITH THE SENSES AS WELL AS WITH THE REASON IS ENTREATED NOT TO FORGET THAT HE PROLONGED HIS (CONSONANTS->COUNTENANCE) AND SWALLOWED HIS VOWELS THAT HE WAS GUILTY OF (ELISIONS->ELYGIANS) AND INTERPOLATIONS WHICH WERE EQUALLY UNEXPECTED AND THAT HIS DISCOURSE WAS PERVADED BY SOMETHING SULTRY AND VAST SOMETHING ALMOST AFRICAN IN ITS RICH BASKING TONE SOMETHING THAT SUGGESTED THE TEEMING (EXPANSE->EXPOUNDS) OF THE COTTON FIELD +6128-63240-0009-512: AND HE TOOK UP HIS HAT VAGUELY A SOFT BLACK HAT WITH A LOW CROWN AND AN IMMENSE STRAIGHT BRIM +6128-63240-0010-513: WELL SO IT IS THEY ARE ALL WITCHES AND WIZARDS MEDIUMS AND SPIRIT (RAPPERS->WRAPPERS) AND (ROARING->ROWING) RADICALS +6128-63240-0011-514: IF YOU ARE GOING TO DINE WITH HER YOU HAD BETTER KNOW IT OH MURDER +6128-63240-0012-515: HE (LOOKED AT->LIFTED) MISSUS LUNA WITH INTELLIGENT INCREDULITY +6128-63240-0013-516: SHE WAS ATTRACTIVE AND IMPERTINENT ESPECIALLY THE LATTER +6128-63240-0014-517: HAVE YOU BEEN IN EUROPE +6128-63240-0015-518: NO I HAVEN'T BEEN ANYWHERE +6128-63240-0016-519: SHE HATES IT SHE WOULD LIKE TO ABOLISH IT +6128-63240-0017-520: THIS LAST REMARK HE MADE AT A VENTURE FOR HE HAD NATURALLY NOT DEVOTED ANY SUPPOSITION WHATEVER TO MISSUS (LUNA->LENA) +6128-63240-0018-521: ARE YOU VERY AMBITIOUS YOU LOOK AS IF YOU WERE +6128-63240-0019-522: AND MISSUS (LUNA->LENA) ADDED THAT NOW SHE WAS BACK SHE DIDN'T KNOW WHAT SHE SHOULD DO +6128-63240-0020-523: ONE DIDN'T EVEN KNOW WHAT ONE HAD COME BACK FOR +6128-63240-0021-524: BESIDES OLIVE DIDN'T WANT HER IN (BOSTON->BUSTON) AND DIDN'T GO THROUGH THE FORM OF SAYING SO +6128-63240-0022-525: THAT WAS ONE COMFORT WITH OLIVE SHE NEVER (WENT->WON) THROUGH ANY FORMS +6128-63240-0023-526: SHE STOOD THERE LOOKING CONSCIOUSLY AND RATHER SERIOUSLY (AT->AND) MISTER RANSOM A SMILE OF EXCEEDING FAINTNESS PLAYED ABOUT HER LIPS IT WAS JUST PERCEPTIBLE ENOUGH TO LIGHT UP THE NATIVE GRAVITY OF HER FACE +6128-63240-0024-527: HER VOICE WAS LOW AND AGREEABLE A CULTIVATED VOICE AND SHE EXTENDED A SLENDER WHITE HAND TO HER VISITOR (WHO->HER) REMARKED WITH SOME SOLEMNITY HE FELT A CERTAIN GUILT OF PARTICIPATION IN MISSUS (LUNA'S->LUNAR'S) INDISCRETION THAT HE WAS INTENSELY HAPPY TO MAKE HER ACQUAINTANCE +6128-63240-0025-528: HE OBSERVED THAT MISS CHANCELLOR'S HAND WAS AT ONCE (COLD->CALLED) AND LIMP SHE MERELY PLACED IT IN HIS WITHOUT EXERTING THE SMALLEST PRESSURE +6128-63240-0026-529: I SHALL BE BACK VERY LATE (WE ARE GOING TO A THEATRE->WILL DON'T YOU THEATER) PARTY THAT'S WHY WE (DINE->DINED) SO EARLY +6128-63240-0027-530: MISSUS (LUNA'S->LUNDY'S) FAMILIARITY EXTENDED EVEN TO HER SISTER SHE REMARKED TO MISS CHANCELLOR THAT SHE LOOKED AS IF SHE WERE GOT UP FOR A SEA (VOYAGE->VOY EACH) +6128-63241-0000-557: POOR RANSOM ANNOUNCED THIS FACT TO HIMSELF AS IF HE HAD MADE A GREAT DISCOVERY BUT IN REALITY HE HAD NEVER BEEN SO (BOEOTIAN->BE OTIAN) AS AT THAT MOMENT +6128-63241-0001-558: THE WOMEN HE HAD HITHERTO KNOWN HAD BEEN MAINLY OF HIS OWN SOFT (CLIME->CLIMB) AND IT WAS NOT OFTEN THEY EXHIBITED THE TENDENCY HE DETECTED AND (CURSORILY->CURSORY) DEPLORED IN MISSUS LUNA'S SISTER +6128-63241-0002-559: RANSOM WAS PLEASED WITH THE VISION OF THAT REMEDY IT MUST BE REPEATED THAT HE WAS VERY PROVINCIAL +6128-63241-0003-560: HE WAS SORRY FOR HER BUT HE SAW IN A FLASH THAT NO ONE COULD HELP HER THAT WAS WHAT MADE HER TRAGIC +6128-63241-0004-561: SHE COULD NOT DEFEND HERSELF AGAINST A RICH ADMIRATION A KIND OF TENDERNESS OF ENVY OF ANY ONE WHO HAD BEEN SO HAPPY AS TO HAVE THAT OPPORTUNITY +6128-63241-0005-562: HIS FAMILY WAS RUINED THEY HAD LOST THEIR SLAVES THEIR PROPERTY THEIR FRIENDS AND RELATIONS THEIR HOME HAD TASTED OF ALL THE CRUELTY OF DEFEAT +6128-63241-0006-563: THE STATE OF MISSISSIPPI SEEMED TO HIM THE STATE OF DESPAIR SO (HE->HIS) SURRENDERED THE REMNANTS OF HIS PATRIMONY TO HIS MOTHER AND SISTERS AND AT NEARLY THIRTY YEARS OF AGE ALIGHTED FOR THE FIRST TIME IN NEW YORK IN THE COSTUME OF HIS PROVINCE WITH FIFTY DOLLARS IN HIS POCKET AND (A GNAWING->ANNAWING) HUNGER IN HIS HEART +6128-63241-0007-564: IT WAS IN THE FEMALE LINE AS (BASIL->BALES AT) RANSOM HAD WRITTEN IN ANSWERING HER LETTER WITH A GOOD DEAL OF FORM AND FLOURISH HE SPOKE AS IF THEY HAD BEEN ROYAL HOUSES +6128-63241-0008-565: IF IT HAD BEEN POSSIBLE TO SEND MISSUS (RANSOM->RANDOM) MONEY OR EVEN CLOTHES SHE WOULD HAVE LIKED THAT BUT SHE HAD NO MEANS OF ASCERTAINING (HOW->HER) SUCH AN OFFERING WOULD BE TAKEN +6128-63241-0009-566: (OLIVE->OLIV) HAD A FEAR OF EVERYTHING BUT HER GREATEST FEAR WAS OF BEING AFRAID +6128-63241-0010-567: SHE HAD ERECTED IT INTO A SORT OF (RULE->ROLE) OF CONDUCT THAT WHENEVER SHE SAW A RISK SHE WAS TO TAKE IT AND SHE HAD FREQUENT HUMILIATIONS AT FINDING HERSELF SAFE AFTER ALL +6128-63241-0011-568: SHE WAS PERFECTLY SAFE AFTER WRITING TO (BASIL->BASE OR) RANSOM AND INDEED IT WAS DIFFICULT TO SEE WHAT HE COULD HAVE DONE TO HER EXCEPT THANK HER HE WAS ONLY EXCEPTIONALLY SUPERLATIVE FOR HER LETTER AND ASSURE HER THAT HE WOULD COME AND SEE HER THE FIRST TIME HIS BUSINESS HE WAS BEGINNING TO GET A LITTLE SHOULD TAKE HIM TO BOSTON +6128-63241-0012-569: HE WAS TOO SIMPLE TOO MISSISSIPPIAN FOR THAT SHE WAS ALMOST DISAPPOINTED +6128-63241-0013-570: OF ALL THINGS IN THE WORLD CONTENTION WAS MOST SWEET TO HER THOUGH WHY IT IS HARD TO IMAGINE FOR IT ALWAYS COST HER TEARS HEADACHES A DAY OR TWO IN BED ACUTE EMOTION AND IT WAS VERY POSSIBLE (BASIL->BEESER) RANSOM WOULD NOT CARE TO (CONTEND->COMPEND) +6128-63244-0000-531: MISS CHANCELLOR HERSELF HAD THOUGHT SO MUCH ON THE VITAL SUBJECT WOULD NOT SHE MAKE A FEW REMARKS AND GIVE THEM SOME OF HER EXPERIENCES +6128-63244-0001-532: HOW DID THE LADIES (ON->AND) BEACON STREET FEEL ABOUT THE BALLOT +6128-63244-0002-533: PERHAPS SHE COULD SPEAK FOR THEM MORE THAN FOR SOME OTHERS +6128-63244-0003-534: WITH HER (IMMENSE->MENST) SYMPATHY FOR REFORM SHE FOUND HERSELF SO OFTEN WISHING THAT (REFORMERS->REFUSE) WERE A LITTLE DIFFERENT +6128-63244-0004-535: (OLIVE->I HAVE) HATED TO HEAR THAT FINE AVENUE (TALKED->TALKS) ABOUT AS IF IT (WERE->WAS) SUCH A REMARKABLE PLACE AND TO LIVE THERE (WERE->WHERE) A PROOF OF WORLDLY GLORY +6128-63244-0005-536: ALL SORTS OF INFERIOR PEOPLE (LIVED->LIFT) THERE AND SO BRILLIANT A WOMAN AS MISSUS (FARRINDER->FARINGDER) WHO LIVED AT (ROXBURY->BRAXBURY) OUGHT NOT TO (MIX THINGS->MAKE SPENCE) UP +6128-63244-0006-537: SHE KNEW HER PLACE IN THE BOSTON (HIERARCHY->HIRAKEE) AND IT WAS NOT WHAT MISSUS (FARRINDER->BARRANGERS) SUPPOSED SO THAT THERE WAS A WANT OF PERSPECTIVE IN TALKING TO HER AS IF SHE HAD BEEN (A REPRESENTATIVE->UNREPRESENTATIVE) OF THE ARISTOCRACY +6128-63244-0007-538: SHE WISHED TO WORK IN ANOTHER FIELD SHE HAD LONG BEEN PREOCCUPIED WITH THE ROMANCE OF THE PEOPLE +6128-63244-0008-539: THIS MIGHT SEEM ONE OF THE MOST ACCESSIBLE OF PLEASURES BUT IN POINT OF FACT SHE HAD NOT FOUND IT SO +6128-63244-0009-540: CHARLIE WAS A YOUNG MAN IN A (WHITE->WORLD) OVERCOAT AND A PAPER COLLAR IT WAS FOR HIM IN THE LAST ANALYSIS THAT (THEY->THE) CARED MUCH THE MOST +6128-63244-0010-541: (OLIVE CHANCELLOR WONDERED->OUT OF CHANCELLORED) HOW MISSUS (FARRINDER->THINDER) WOULD TREAT (THAT->THEIR) BRANCH OF THE QUESTION +6128-63244-0011-542: (IF->*) IT (*->WOULD) BE NECESSARY WE ARE PREPARED TO TAKE (CERTAIN STEPS->CIRCUMST) TO CONCILIATE THE SHRINKING +6128-63244-0012-543: OUR MOVEMENT IS FOR ALL IT APPEALS TO THE MOST DELICATE LADIES +6128-63244-0013-544: (RAISE THE STANDARD->FOR INSTANDED) AMONG THEM AND BRING ME A (THOUSAND->SPASM) NAMES +6128-63244-0014-545: (I->AND) LOOK AFTER THE DETAILS AS WELL AS THE BIG (CURRENTS->CURRANTS) MISSUS (FARRINDER->FARNDER) ADDED IN A TONE AS EXPLANATORY AS COULD BE EXPECTED OF SUCH A WOMAN AND WITH A SMILE OF WHICH (THE->THIS) SWEETNESS WAS THRILLING TO HER LISTENER +6128-63244-0015-546: SAID (OLIVE->OLDEST) CHANCELLOR WITH A FACE WHICH SEEMED TO PLEAD FOR A (REMISSION OF->REMISSIONARY'S) RESPONSIBILITY +6128-63244-0016-547: I (WANT->WARNED) TO BE NEAR TO THEM TO HELP THEM +6128-63244-0017-548: IT WAS ONE THING TO CHOOSE FOR HERSELF BUT NOW THE GREAT REPRESENTATIVE OF THE (ENFRANCHISEMENT->ENCOMCHISEMENT) OF THEIR SEX FROM EVERY FORM OF BONDAGE HAD CHOSEN FOR HER +6128-63244-0018-549: THE UNHAPPINESS OF WOMEN +6128-63244-0019-550: THEY WERE (HER->HIS) SISTERS THEY WERE HER OWN AND THE DAY OF THEIR DELIVERY HAD DAWNED +6128-63244-0020-551: THIS WAS THE ONLY SACRED CAUSE THIS WAS THE GREAT (THE JUST REVOLUTION->DESTROVISION) IT (MUST->WAS) TRIUMPH IT (MUST->WAS) SWEEP EVERYTHING BEFORE IT IT MUST EXACT FROM THE OTHER THE BRUTAL (BLOOD STAINED->BLOODSTAINED) RAVENING RACE THE LAST PARTICLE OF EXPIATION +6128-63244-0021-552: (THEY->THERE) WOULD BE NAMES OF WOMEN WEAK INSULTED PERSECUTED BUT DEVOTED IN EVERY (PULSE->PART) OF THEIR BEING TO THE CAUSE AND ASKING NO BETTER FATE THAN TO DIE FOR IT +6128-63244-0022-553: IT WAS NOT CLEAR TO THIS INTERESTING GIRL IN WHAT MANNER SUCH A SACRIFICE (AS->OF) THIS LAST WOULD BE REQUIRED OF HER BUT SHE (SAW THE->SOLDOM) MATTER THROUGH A KIND OF SUNRISE (MIST OF EMOTION->MISTABILATION) WHICH MADE DANGER AS ROSY (AS->IS) SUCCESS +6128-63244-0023-554: WHEN MISS (BIRDSEYE->BIRD'S EYE) APPROACHED IT TRANSFIGURED (HER->*) FAMILIAR (HER COMICAL->HYCOMICAL) SHAPE AND MADE THE POOR LITTLE HUMANITARY HACK SEEM ALREADY A (MARTYR->MASTER) +6128-63244-0024-555: (OLIVE->ONLY IF) CHANCELLOR LOOKED AT HER WITH LOVE REMEMBERED THAT SHE HAD NEVER IN HER LONG (UNREWARDED->IN REWARDED) WEARY LIFE HAD A THOUGHT (OR->OF) AN IMPULSE FOR HERSELF +6128-63244-0025-556: (*->IF) SHE HAD BEEN CONSUMED BY THE PASSION OF SYMPATHY IT HAD (CRUMPLED->CRUMBLED) HER INTO AS MANY CREASES AS AN OLD GLAZED DISTENDED GLOVE +6432-63722-0000-2431: (BUT SCUSE->PUSE) ME (DIDN'T YO FIGGER ON DOIN->THEN YOU'VE GONE DOING) SOME (DETECTIN AN GIVE->DETECTIVE AND GIVIN) UP FISHIN +6432-63722-0001-2432: AND SHAG WITH THE FREEDOM OF AN OLD SERVANT STOOD LOOKING AT HIS MASTER AS IF NOT QUITE UNDERSTANDING THE NEW TWIST THE AFFAIRS HAD TAKEN +6432-63722-0002-2433: I'M (GOING OFF FISHING->GOIN OUR FISHIN) I MAY NOT CATCH ANYTHING (I->AND) MAY NOT WANT TO AFTER I GET THERE +6432-63722-0003-2434: GET READY (SHAG->SHAGG) YES (SAH->A) COLONEL +6432-63722-0004-2435: AND HAVING PUT HIMSELF IN A FAIR WAY AS HE HOPED TO SOLVE SOME OF THE PROBLEMS CONNECTED WITH THE DARCY CASE COLONEL ASHLEY WENT DOWN TO POLICE HEADQUARTERS TO LEARN MORE FACTS IN (*->THE) CONNECTION WITH THE MURDER OF THE EAST INDIAN +6432-63722-0005-2436: (PINKUS->PEGAS) AND DONOVAN HAVEN'T THEY CARROLL YEP +6432-63722-0006-2437: (CARROLL->KAL) WAS TOO MUCH ENGAGED IN WATCHING THE BLUE SMOKE (CURL->GIRL) LAZILY UPWARD FROM HIS CIGAR JUST THEN TO SAY MORE +6432-63722-0007-2438: ARE YOU GOING TO WORK ON THAT CASE COLONEL +6432-63722-0008-2439: BUT HE HADN'T ANY MORE TO DO WITH IT COLONEL THAN THAT CAT +6432-63722-0009-2440: PERHAPS NOT ADMITTED COLONEL ASHLEY +6432-63722-0010-2441: WE'VE GOT OUR MAN AND THAT'S ALL WE WANT +6432-63722-0011-2442: YOU'RE ON THE DARCY CASE THEY TELL ME IN A WAY YES +6432-63722-0012-2443: I'M WORKING IN THE (INTERESTS->INTEREST) OF THE YOUNG MAN +6432-63722-0013-2444: IT'S JUST ONE OF THEM COINCIDENCES LIKE +6432-63722-0014-2445: BUSTED HIS HEAD IN WITH A HEAVY CANDLESTICK ONE OF A PAIR +6432-63722-0015-2446: GAD (EXCLAIMED->EXPLAINED) THE COLONEL +6432-63722-0016-2447: THE VERY PAIR I WAS GOING TO BUY +6432-63722-0017-2448: LOOK HERE (COLONEL->CAROL) DO YOU KNOW ANYTHING ABOUT THIS +6432-63722-0018-2449: AND THE DETECTIVE'S PROFESSIONAL INSTINCTS GOT THE UPPER HAND OF HIS FRIENDLINESS NOT THE LEAST IN THE WORLD NOT AS MUCH AS YOU DO WAS THE COOL ANSWER +6432-63722-0019-2450: I (HAPPENED->HAPPEN) TO SEE THOSE CANDLESTICKS IN THE WINDOW OF (SINGA PHUT'S->SINGAFUT'S) SHOP THE OTHER DAY AND I MADE UP MY MIND TO BUY THEM WHEN I HAD A CHANCE +6432-63722-0020-2451: NOW I'M AFRAID I WON'T BUT HOW DID IT HAPPEN +6432-63722-0021-2452: (PHUT->FIVE) I DON'T KNOW WHETHER THAT'S HIS FIRST OR HIS LAST NAME ANYHOW HE HAD A PARTNER NAMED (SHERE->SHEAR) ALI +6432-63722-0022-2453: ANYHOW HE (AND PHUT DIDN'T->INFECTED) GET ALONG VERY WELL IT SEEMS +6432-63722-0023-2454: (NEIGHBORS->LABORS) OFTEN HEARD (EM SCRAPPIN->HIM SCRAP IN) A LOT AND THIS AFTERNOON THEY WENT AT IT AGAIN HOT AND HEAVY +6432-63722-0024-2455: (TOWARD->TO OUR) DARK A MAN WENT IN TO BUY A LAMP +6432-63722-0025-2456: HE FOUND THE PLACE WITHOUT A LIGHT IN IT STUMBLED OVER SOMETHING ON THE FLOOR AND THERE WAS (ALI'S->ALWAYS) BODY WITH THE HEAD BUSTED IN AND THIS HEAVY CANDLESTICK NEAR IT +6432-63722-0026-2457: SURE HELD SO TIGHT WE COULD HARDLY GET IT OUT +6432-63722-0027-2458: MAYBE THE FIGHT WAS ABOUT WHO OWNED THE WATCH FOR THE (DAGOS->DAG WAS) TALKED IN THEIR FOREIGN LINGO AND NONE OF THE NEIGHBORS COULD TELL WHAT THEY WERE (SAYIN->SAYING) I SEE +6432-63722-0028-2459: AND THE WATCH HAVE YOU IT YES IT'S HERE +6432-63722-0029-2460: THAT'S THE WATCH ANNOUNCED THE (HEADQUARTERS->HEADQUARTER'S) DETECTIVE REACHING IN FOR IT GOING (YET->AT) SEE +6432-63722-0030-2461: YOU'RE NOT (AS SQUEAMISH->A SCREAMY) AS ALL THAT ARE YOU JUST BECAUSE IT WAS IN A DEAD MAN'S (HAND->HANDS) AND (IN->*) A WOMAN'S +6432-63722-0031-2462: AND DONOVAN'S VOICE WAS PLAINLY (SKEPTICAL->SCEPTICAL) +6432-63722-0032-2463: YES IT MAY HAVE SOME ROUGH EDGES ON IT +6432-63722-0033-2464: AND I'VE READ ENOUGH ABOUT GERMS TO KNOW THE DANGER I'D ADVISE YOU TO BE CAREFUL +6432-63722-0034-2465: IF YOU DON'T MIND I SHOULD LIKE TO EXAMINE THIS A BIT +6432-63722-0035-2466: BEFORE THE BIG WIND IN IRELAND SUGGESTED THONG WITH A NOD (AT->OF) HIS IRISH COMPATRIOT SLIGHTLY (LAUGHED->THEY'LL HAVE) THE COLONEL +6432-63722-0036-2467: THAT'S RIGHT AGREED THE COLONEL AS HE CONTINUED TO MOVE HIS MAGNIFYING GLASS OVER THE SURFACE OF THE STILL TICKING WATCH +6432-63722-0037-2468: (AND->IN) A CLOSE OBSERVER MIGHT HAVE OBSERVED THAT HE DID NOT TOUCH HIS BARE FINGERS TO THE TIMEPIECE BUT POKED IT ABOUT AND TOUCHED IT HERE AND THERE WITH THE END OF A (LEADPENCIL->LEAD PENCIL) +6432-63722-0038-2469: AND (DONOVAN->DONALD) TAKE (A->HER) FRIEND'S ADVICE AND DON'T BE TOO FREE WITH THAT WATCH TOO FREE WITH IT +6432-63722-0039-2470: ASKED THE SURPRISED DETECTIVE YES +6432-63722-0040-2471: DON'T SCRATCH YOURSELF ON IT WHATEVER YOU DO WHY NOT +6432-63722-0041-2472: SIMPLY BECAUSE THIS WATCH +6432-63722-0042-2473: SOME ONE OUT HERE TO SEE YOU +6432-63722-0043-2474: ALL RIGHT BE THERE IN A SECOND +6432-63722-0044-2475: (SINGA PHUT->SHING AFOOT) WAS THE PANTING ANSWER +6432-63722-0045-2476: I WANT TO TALK OVER DARCY'S CASE WITH YOU THE COLONEL HAD SAID AND THE (TWO->JEW) HAD TALKED HAD THOUGHT HAD TALKED AGAIN AND NOW WERE SILENT FOR A TIME +6432-63722-0046-2477: WHAT ARE THE (CHANCES->CHURCHES) OF GETTING HIM OFF LEGALLY IF WE GO AT IT FROM A NEGATIVE STANDPOINT ASKED THE COLONEL +6432-63722-0047-2478: RATHER A HYPOTHETICAL QUESTION COLONEL BUT I SHOULD SAY IT MIGHT BE A FIFTY FIFTY PROPOSITION +6432-63722-0048-2479: AT BEST HE WOULD GET OFF (WITH A->FOR THE) SCOTCH VERDICT OF NOT PROVEN BUT HE DOESN'T WANT THAT NOR DO I +6432-63722-0049-2480: AND YOU I DON'T WANT IT EITHER +6432-63722-0050-2481: BUT I WANT TO KNOW JUST WHERE WE STAND NOW I KNOW +6432-63722-0051-2482: BUT I NEED TO DO A LITTLE MORE SMOKING OUT FIRST NOW I WANT TO THINK +6432-63722-0052-2483: IF YOU'LL EXCUSE ME I'LL PRETEND I'M FISHING AND I MAY CATCH SOMETHING +6432-63722-0053-2484: IN FACT I HAVE A FEELING THAT (I'LL LAND->I ALAN) MY FISH +6432-63722-0054-2485: (I'D->I) RECOMMEND HIM TO YOU INSTEAD OF BLACKSTONE THANKS LAUGHED KENNETH +6432-63722-0055-2486: WHAT IS IT PERHAPS I CAN HELP YOU +6432-63722-0056-2487: THE OLD ADAGE OF TWO HEADS YOU KNOW +6432-63722-0057-2488: YES (IT->IT'S) STILL HOLDS GOOD +6432-63722-0058-2489: NO ALIMONY (REPEATED->REPLIED) THE COLONEL PUZZLED YES JUST THAT +6432-63722-0059-2490: AND THERE'S NO REASON YOU SHOULDN'T KNOW +6432-63723-0000-2491: CHUCKLED THE COLONEL AS HE SKILFULLY PLAYED THE LUCKLESS TROUT NOW STRUGGLING TO GET LOOSE FROM THE HOOK +6432-63723-0001-2492: AND WHEN THE FISH WAS LANDED PANTING ON THE GRASS AND SHAG HAD BEEN ROUSED FROM HIS SLUMBER TO SLIP (THE->A) NOW LIMP FISH INTO THE (CREEL->CREOLE) COLONEL ASHLEY GAVE A SIGH OF RELIEF AND REMARKED I THINK I SEE IT NOW +6432-63723-0002-2493: THE REASON SHE ASKED NO ALIMONY INQUIRED KENNETH +6432-63723-0003-2494: NO I WASN'T THINKING OF THAT +6432-63723-0004-2495: HOWEVER DON'T THINK I'M NOT INTERESTED IN YOUR CASE I'VE (FISHED->FINISHED) ENOUGH FOR TO DAY +6432-63723-0005-2496: WELL I DON'T KNOW THAT YOU CAN +6432-63723-0006-2497: IT ISN'T GENERALLY KNOWN WENT ON THE LAWYER THAT THE HOTEL KEEPER'S WIFE HAS LEFT HIM +6432-63723-0007-2498: IT WAS ONE OF WHAT AT FIRST MIGHT BE CALLED REFINED CRUELTY ON HER HUSBAND'S PART DEGENERATING GRADUALLY INTO THAT OF (THE->A) BASER SORT +6432-63723-0008-2499: (YOU DON'T->IT ALL) MEAN THAT (LARCH->LARGE) STRUCK HER THAT THERE WAS PHYSICAL ABUSE DO YOU ASKED THE COLONEL THAT'S WHAT HE DID +6432-63723-0009-2500: THE COLONEL DID NOT DISCLOSE THE FACT THAT IT WAS NO NEWS TO HIM +6432-63723-0010-2501: AARON (GRAFTON'S->GRAFTON) STATEMENT WAS BEING (UNEXPECTEDLY->UNEXPECTED GREAT) CONFIRMED +6432-63723-0011-2502: HE REMEMBERED THAT CYNTHIA AND GRAFTON HAD ONCE BEEN IN LOVE WITH EACH OTHER +6432-63723-0012-2503: SHE SAID HE HAD STRUCK HER MORE THAN ONCE AND SHE COULD STAND IT NO LONGER +6432-63723-0013-2504: BECAUSE (LARCH->LARGE) MADE NO (DEFENSE->DEFENCE) +6432-63723-0014-2505: (LARCH->LARGE) BY REFUSING TO APPEAR PRACTICALLY ADMITTED THE CHARGES AGAINST HIM AND DID NOT OPPOSE THE SEPARATION +6432-63723-0015-2506: SO I HAD TO LET HER HAVE HER WAY AND WE DID NOT ASK THE (COURT->CORP) FOR MONEY THOUGH I HAD NO SUCH SQUEAMISH FEELINGS WHEN IT CAME TO MY COUNSEL FEE +6432-63723-0016-2507: NO BUT HE WILL OR (I'LL SUE HIM->ELSEWOO EM) AND GET JUDGMENT OH HE'LL PAY ALL RIGHT +6432-63723-0017-2508: AND IT TAKES ALL SORTS OF PERSONS TO MAKE IT UP +6432-63723-0018-2509: STILL I WOULD LIKE TO KNOW +6432-63723-0019-2510: THE MURDER OF MISSUS DARCY HAD SOME TIME AGO BEEN SHIFTED OFF THE FRONT PAGE THOUGH IT WOULD GET BACK THERE WHEN THE YOUNG JEWELER WAS TRIED +6432-63723-0020-2511: IT HAD A DOUBLE REPUTATION SO TO SPEAK +6432-63723-0021-2512: GRAVE AND EVEN REVEREND (*->THE) CONVENTIONS ASSEMBLED IN ITS (BALLROOM AND->BALL ROOM IN) POLITICIANS OF THE UPPER IF NOT BETTER CLASS WERE FREQUENTLY SEEN IN ITS DINING ROOM OR CAFE +6432-63723-0022-2513: (LARCH->LARGE) HIMSELF WAS A PECULIAR CHARACTER +6432-63723-0023-2514: IN A SMALLER PLACE HE WOULD HAVE BEEN CALLED A SALOON KEEPER +6432-63723-0024-2515: AND IT WAS THIS MAN RICH (IT WAS->OVER) SAID HANDSOME CERTAINLY THAT (CYNTHIA->SANTIA) RATCHFORD HAD MARRIED +6432-63723-0025-2516: TO THIS WAS THE ANSWER WHISPERED MONEY +6432-63723-0026-2517: AND IN A WAY IT WAS TRUE +6432-63723-0027-2518: SHE ALSO SAW AN OPPORTUNITY OF PAYING OLD DEBTS AND REAPING SOME REVENGES +6432-63723-0028-2519: AFTER THE MARRIAGE WHICH WAS A BRILLIANT AND GAY ONE IF NOT HAPPY THE (LARCH->LARGE) HOTEL IT COULD HARDLY BE CALLED A HOME BECAME THE SCENE OF MANY FESTIVE OCCASIONS +6432-63723-0029-2520: THEN IT WAS SAID OF (LARCH->LARGE) THAT SOON AFTER THE ECHOES OF THE WEDDING CHIMES HAD DIED AWAY HE HAD BEGUN TO TREAT HIS WIFE WITH (*->A) REFINED CRUELTY THAT HIDDEN AWAY FROM THE PUBLIC UNDERNEATH HIS HABITUAL MANNER THERE WAS THE RAWNESS OF THE BRUTE +6432-63723-0030-2521: BUT IT WAS NOTICED THAT THE OLDER AND MORE CONSERVATIVE FAMILIES WERE LESS OFTEN REPRESENTED AND WHEN THEY WERE IT WAS BY SOME OF THE YOUNGER MEMBERS WHOSE REPUTATIONS WERE ALREADY (SMIRCHED->SMARGED) OR WHO HAD NOT YET ACQUIRED ANY AND WERE WILLING TO TAKE A CHANCE +6432-63723-0031-2522: IT WOULDN'T DO YOU KNOW AFTER THAT STORY CAME OUT FOR ME AND THE VICE CHANCELLOR WHO SAT IN (THE->A) CASE AS WELL AS OTHER JUDGES AND MEMBERS OF THE BAR TO BE SEEN THERE KENNETH EXPLAINED TO THE COLONEL +6432-63723-0032-2523: MEANWHILE COLONEL ASHLEY WAS A VERY BUSY MAN AND TO NO ONE DID HE TELL VERY MUCH ABOUT HIS ACTIVITIES HE SAW DARCY FREQUENTLY AT THE JAIL AND TO THAT YOUNG MAN'S PLEADINGS THAT SOMETHING (*->TO) BE DONE ALWAYS RETURNED THE ANSWER +6432-63723-0033-2524: DON'T WORRY IT WILL COME OUT ALL RIGHT +6432-63723-0034-2525: I'M GOING (TO RECTIFY->DIRECT BY) THEM BUT (IT->I) WILL TAKE TIME +6432-63723-0035-2526: (IT'S->HIS) HARD FOR MISS MASON TOO ALTHOUGH SHE'S BEARING UP LIKE A MAJOR +6432-63723-0036-2527: SO KING (GOT->GOD) BAIL WHO PUT IT UP +6432-63723-0037-2528: IT WAS (HIGH LARCH->IRCH) +6432-63723-0038-2529: THEY TOOK HARRY AWAY A WHILE AGO +6432-63723-0039-2530: BUT HIS ARE PRETTY UNCERTAIN SHOES TO BE IN JUST THE SAME +6432-63723-0040-2531: ONLY THAT I DARCY HESITATED AND GREW RED +6432-63723-0041-2532: GOOD EVENING COLONEL HE CALLED GENIALLY WILL YOU JOIN ME IN A WELSH RABBIT +6432-63723-0042-2533: THANK YOU NO +6432-63723-0043-2534: I'M AFRAID MY DIGESTION ISN'T QUITE UP TO THAT AS I'VE HAD TO CUT OUT MY FISHING OF LATE +6432-63723-0044-2535: NOW AS TO CERTAIN MATTERS IN THE STORE ON THE MORNING OF THE MURDER +6432-63723-0045-2536: (THE->THEY) STOPPED (CLOCKS->CLUX) FOR INSTANCE HAVE YOU ANY THEORY +6432-63723-0046-2537: THERE WERE THREE OF THEM THE CENTER FIGURE BEING THAT OF HARRY KING AND HE WAS VERY MUCH INTOXICATED +6432-63723-0047-2538: THAT IS NOT ALWAYS BUT SOMETIMES IT HAPPENED TO BE SO NOW +6432-63723-0048-2539: I BEG YOUR PARDON HE SAID IN THE CULTURED TONES HE KNEW SO WELL HOW TO USE YET OF WHICH HE MADE SO LITTLE USE OF LATE +6432-63723-0049-2540: I SAID WHERE HAVE YOU BEEN REMARKED THE OTHER WE'VE MISSED YOU +6432-63723-0050-2541: I SAID I WAS GOLFING HE WENT ON EXCEEDINGLY DISTINCTLY THOUGH WITH AN EFFORT +6432-63723-0051-2542: WHY (POLONIUS->BONIUS) SOME ONE ASKED +6432-63723-0052-2543: BECAUSE DEAR FRIEND REPLIED KING SOFTLY HE SOMEWHAT RESEMBLES A CERTAIN PERSON HERE WHO TALKS TOO MUCH BUT WHO IS NOT SO WISE AS HE THINKS +6432-63723-0053-2544: THERE WAS A RATTLE OF (COINS ON->COIN DOWN) THE MAHOGANY BAR AS KING SOUGHT TO DISENTANGLE A SINGLE BILL FROM THE (WADDED->WATERED) UP CURRENCY IN HIS POCKET +6432-63723-0054-2545: IT'S (IT'S->*) AN ODD COIN AN OLD ROMAN ONE THAT MISSUS DARCY HAD IN HER PRIVATE COLLECTION KEPT IN THE JEWELRY STORE SAFE WAS THE WHISPERED ANSWER +6432-63723-0055-2546: I WENT OVER THEM (*->NEAR) THE (OTHER->*) DAY AND NOTICED SOME WERE MISSING THOUGH I SAW THEM ALL WHEN I PAID A VISIT TO HER JUST A SHORT TIME BEFORE SHE WAS KILLED +6432-63723-0056-2547: THAT WAS HERS WENT ON THE JEWELER +6432-63723-0057-2548: NOW HARRY KING HAS IT EXCLAIMED COLONEL ASHLEY +6938-70848-0000-1216: EVEN THE SUN CAME OUT PALE AND WATERY AT NOON +6938-70848-0001-1217: THE (COLDS->GOLDS) AND RHEUMATISM OF THE RAINY MONTHS VANISHED +6938-70848-0002-1218: (ASKED A->AS TO) WORKER LAST SUNDAY YOU DID IT WHEN THE YUNKERS +6938-70848-0003-1219: WELL DIDN'T THEY SHOOT US ONE MAN EXHIBITED HIS ARM IN A SLING +6938-70848-0004-1220: HAVEN'T I GOT SOMETHING TO REMEMBER THEM BY THE DEVILS +6938-70848-0005-1221: WHO ARE YOU TO DESTROY THE LEGAL GOVERNMENT (WHO IS LENIN->WITH LANY) A GERMAN +6938-70848-0006-1222: WHO ARE YOU A COUNTER (REVOLUTIONIST A PROVOCATOR->REVOLISHNESS APPROCATUR) THEY (BELLOWED->BELOVED) AT HIM +6938-70848-0007-1223: YOU CALL YOURSELVES THE PEOPLE OF (RUSSIA BUT YOU'RE->A SHEPHERD YOU ARE) NOT THE PEOPLE OF (RUSSIA->RUSHIRE) +6938-70848-0008-1224: (THE PEASANTS ARE THE->TO PIECE AND OTHER) PEOPLE OF RUSSIA (WAIT->WRIT) UNTIL THE PEASANTS +6938-70848-0009-1225: WE KNOW WHAT THE PEASANTS WILL SAY AREN'T THEY (WORKINGMEN->WORKING MEN) LIKE OURSELVES +6938-70848-0010-1226: (THESE MEN ESPECIALLY->THIS MAN HAS SPECIALLY) WELCOMED THE CALL TO A CONGRESS OF PEASANTS +6938-70848-0011-1227: (THESE->THIS) LAST (WERE->WHERE) THE YOUNG GENERATION WHO HAD BEEN SERVING IN THE ARMY +6938-70848-0012-1228: WHEREUPON THE OLD (EXECUTIVE->EXECUTED) COMMITTEE LEFT THE HALL +6938-70848-0013-1229: DOWN WITH HIM THEY SHRIEKED +6938-70848-0014-1230: FEARFUL TUMULT (CRIES DOWN->CHRISTOWN) WITH THE (BOLSHEVIKI->PULCHEVIKI) +6938-70848-0015-1231: UPON MY RETURN I VISITED (SMOLNY->MORLEY) NO SUCH ACCUSATION WAS MADE AGAINST ME THERE AFTER A BRIEF CONVERSATION I LEFT AND (THAT'S ALL->THAT SOUL) LET (ANY ONE->ANYONE) PRESENT MAKE SUCH AN ACCUSATION +6938-70848-0016-1232: MEANWHILE THE QUESTION OF THE (STATUS->STRATORS) OF THE EXECUTIVE COMMITTEE WAS AGITATING ALL MINDS +6938-70848-0017-1233: BY (DECLARING THE->DECLINING THEIR) ASSEMBLY EXTRAORDINARY CONFERENCE IT HAD BEEN PLANNED TO (BLOCK->PLOT) THE (REELECTION->RE ELECTION) OF THE EXECUTIVE COMMITTEE +6938-70848-0018-1234: BUT THIS (WORKED->WORTH) BOTH WAYS THE (LEFT SOCIALIST REVOLUTIONISTS->LAP SOCIALLY REVOLUTIONIST) DECIDED THAT IF THE CONGRESS HAD NO POWER OVER THE (EXECUTIVE->EXECUTING) COMMITTEE THEN THE EXECUTIVE COMMITTEE HAD NO POWER OVER THE CONGRESS +6938-70848-0019-1235: ON THE TWENTY SEVENTH OCCURRED THE DEBATE ON THE LAND QUESTION WHICH REVEALED THE DIFFERENCES BETWEEN THE (AGRARIAN->AGRIAN) PROGRAMME OF THE BOLSHEVIKI AND THE (LEFT->LAP) SOCIALIST REVOLUTIONARIES +6938-70848-0020-1236: THE (CONSTITUENT->CONSTITUTE) ASSEMBLY WILL NOT DARE TO BREAK WITH THE WILL OF THE PEOPLE +6938-70848-0021-1237: FOLLOWED HIM LENIN LISTENED TO NOW WITH ABSORBING INTENSITY +6938-70848-0022-1238: THE FIRST STAGE WAS (THE->A) CRUSHING OF AUTOCRACY AND THE (CRUSHING->CRASHING) OF THE POWER OF THE INDUSTRIAL (CAPITALISTS->CAPITALIST) AND (LAND OWNERS->THE LANDOWNERS) WHOSE (INTERESTS ARE->INTEREST OUR) CLOSELY RELATED +6938-70848-0023-1239: (THE DUMAS AND ZEMSTVOS->DID YOU ME SEND THEMSELVES) WERE DROPPED +6938-70848-0024-1240: HE KNEW THAT AN AGREEMENT WITH THE BOLSHEVIKI WAS BEING DISCUSSED BUT HE DID NOT KNOW THAT IT HAD BEEN CONCLUDED +6938-70848-0025-1241: HE SPOKE TO THE (RUMP->WRONG) CONVENTION +6938-70848-0026-1242: THE (VILLAGES->RELIGIOUS) WILL SAVE US IN THE END +6938-70848-0027-1243: BUT THE PRESENT (MOVEMENT->MOMENT) IS INTERNATIONAL AND THAT IS WHY IT IS INVINCIBLE +6938-70848-0028-1244: THE (WILL->WIDOW) OF MILLIONS OF WORKERS IS (NOW->SO) CONCENTRATED IN (THIS->THE) HALL +6938-70848-0029-1245: A NEW HUMANITY WILL BE BORN OF THIS WAR +6938-70848-0030-1246: I GREET YOU WITH THE (CHRISTENING->CHRISTIAN) OF A NEW RUSSIAN LIFE AND FREEDOM +7018-75788-0000-135: THEN I TOOK UP A GREAT STONE FROM AMONG THE TREES AND COMING UP TO HIM SMOTE HIM THEREWITH ON THE HEAD WITH ALL MY MIGHT AND CRUSHED IN HIS SKULL AS HE LAY DEAD DRUNK +7018-75788-0001-136: BEHOLD A SHIP WAS MAKING FOR THE ISLAND THROUGH THE DASHING SEA AND CLASHING WAVES +7018-75788-0002-137: HEARING THIS I WAS SORE TROUBLED REMEMBERING WHAT I HAD BEFORE SUFFERED FROM THE APE KIND +7018-75788-0003-138: UPON THIS HE BROUGHT ME A COTTON BAG AND (GIVING->GIVEN) IT TO ME SAID TAKE THIS BAG AND FILL IT WITH PEBBLES FROM THE BEACH AND GO FORTH WITH A COMPANY OF THE TOWNSFOLK TO WHOM I WILL GIVE A CHARGE RESPECTING THEE +7018-75788-0004-139: DO AS THEY DO AND (BELIKE->BE LIKE) THOU SHALT GAIN WHAT MAY FURTHER THY RETURN VOYAGE TO THY NATIVE LAND +7018-75788-0005-140: THEN HE CARRIED ME TO THE BEACH WHERE I FILLED MY BAG WITH PEBBLES LARGE AND SMALL AND PRESENTLY WE SAW A COMPANY OF FOLK ISSUE FROM THE TOWN EACH BEARING A BAG LIKE MINE FILLED WITH PEBBLES +7018-75788-0006-141: TO THESE HE COMMITTED ME COMMENDING ME TO THEIR CARE AND SAYING THIS MAN IS A STRANGER SO TAKE HIM WITH YOU AND TEACH HIM HOW TO GATHER THAT HE MAY GET HIS DAILY BREAD AND YOU WILL EARN YOUR REWARD AND RECOMPENSE IN HEAVEN +7018-75788-0007-142: NOW SLEEPING UNDER THESE TREES WERE MANY (APES->IPES) WHICH WHEN THEY SAW US ROSE AND FLED FROM US AND SWARMED UP AMONG THE BRANCHES WHEREUPON MY COMPANIONS BEGAN TO PELT THEM WITH WHAT THEY HAD IN THEIR BAGS AND THE APES FELL TO PLUCKING OF THE FRUIT OF THE TREES AND CASTING THEM AT THE FOLK +7018-75788-0008-143: WE (WEIGHED->WADE) ANCHOR AND SHAHRAZAD PERCEIVED THE DAWN OF DAY AND CEASED SAYING HER PERMITTED SAY +7018-75788-0009-144: WHEN IT WAS THE FIVE HUNDRED AND FIFTY NINTH NIGHT +7018-75788-0010-145: AND CEASED NOT SAILING TILL WE ARRIVED SAFELY AT (BASSORAH->PUSSARA) +7018-75788-0011-146: THERE I ABODE A LITTLE AND THEN WENT ON TO (BAGHDAD->BAGDAD) WHERE I ENTERED MY QUARTER AND FOUND MY HOUSE AND (FOREGATHERED->FOR GATHERED) WITH MY FAMILY AND SALUTED MY FRIENDS WHO GAVE ME JOY OF MY SAFE RETURN AND I LAID UP ALL MY GOODS AND VALUABLES IN MY STOREHOUSES +7018-75788-0012-147: AFTER WHICH I RETURNED TO MY OLD MERRY WAY OF LIFE AND FORGOT ALL I HAD SUFFERED IN THE GREAT PROFIT AND GAIN I HAD MADE +7018-75788-0013-148: NEXT MORNING AS SOON AS IT WAS LIGHT HE PRAYED THE DAWN PRAYER AND AFTER BLESSING MOHAMMED THE CREAM OF ALL CREATURES BETOOK HIMSELF TO THE HOUSE OF (SINDBAD->SINBAD) THE SEAMAN AND WISHED HIM A GOOD DAY +7018-75788-0014-149: HERE I FOUND A GREAT SHIP READY FOR SEA AND FULL OF MERCHANTS AND NOTABLES WHO HAD WITH THEM GOODS OF PRICE SO I EMBARKED MY BALES THEREIN +7018-75788-0015-150: (HAPLY->HAPPILY) AMONGST YOU IS ONE RIGHTEOUS WHOSE PRAYERS THE LORD WILL ACCEPT +7018-75788-0016-151: PRESENTLY THE SHIP STRUCK THE MOUNTAIN AND BROKE UP AND ALL AND EVERYTHING ON BOARD OF HER WERE PLUNGED INTO THE SEA +7018-75788-0017-152: BUT (IT BURNETH->AT BERNETH) IN THEIR BELLIES SO THEY CAST IT UP AGAIN AND IT CONGEALETH ON THE SURFACE OF THE WATER WHEREBY ITS COLOR AND QUANTITIES ARE CHANGED AND AT LAST THE WAVES CAST IT ASHORE AND THE TRAVELLERS AND MERCHANTS WHO KNOW IT (COLLECT IT->COLLECTED) AND SELL IT +7018-75788-0018-153: EACH THAT DIED WE WASHED AND SHROUDED IN SOME OF THE CLOTHES AND LINEN CAST ASHORE BY THE TIDES AND AFTER A LITTLE THE REST OF MY FELLOWS PERISHED ONE BY ONE TILL I HAD BURIED THE LAST OF THE PARTY AND (ABODE->A BOAT) ALONE ON THE ISLAND WITH BUT A LITTLE PROVISION LEFT I WHO WAS WONT TO HAVE SO MUCH +7018-75788-0019-154: BUT THERE IS MAJESTY AND THERE IS NO MIGHT SAVE IN ALLAH THE GLORIOUS THE GREAT +7018-75789-0000-155: WHEN IT WAS THE FIVE HUNDRED AND SIXTY FIRST NIGHT +7018-75789-0001-156: THEN (SIGHING->SIGNED) FOR MYSELF I SET TO WORK COLLECTING A NUMBER OF PIECES OF CHINESE AND (COMORIN ALOES->CORMORRA AND ALLIES) WOOD AND I BOUND THEM TOGETHER WITH ROPES FROM THE WRECKAGE THEN I CHOSE OUT FROM THE BROKEN UP (SHIPS->SHIP) STRAIGHT PLANKS OF EVEN SIZE AND FIXED THEM FIRMLY UPON THE (ALOES->ALLIES) WOOD MAKING ME A BOAT RAFT A LITTLE NARROWER THAN THE CHANNEL OF THE STREAM AND I TIED IT TIGHTLY AND FIRMLY AS THOUGH IT WERE NAILED +7018-75789-0002-157: LAND AFTER LAND SHALT THOU (SEEK AND FIND->SEE CONFINED) BUT NO OTHER LIFE ON THY WISH SHALL WAIT FRET NOT THY SOUL IN THY THOUGHTS (O->A) NIGHT (ALL->OR) WOES SHALL END OR SOONER OR LATE +7018-75789-0003-158: I (ROWED->RIDE) MY CONVEYANCE INTO THE PLACE WHICH WAS INTENSELY DARK AND THE CURRENT CARRIED (*->ME) THE RAFT WITH IT DOWN THE UNDERGROUND CHANNEL +7018-75789-0004-159: AND I THREW MYSELF DOWN UPON MY FACE ON THE RAFT BY REASON OF THE NARROWNESS OF THE CHANNEL WHILST THE STREAM CEASED NOT TO CARRY ME ALONG KNOWING NOT NIGHT FROM DAY FOR THE EXCESS OF THE GLOOM WHICH ENCOMPASSED ME ABOUT (AND->IN) MY TERROR AND CONCERN FOR MYSELF LEST I SHOULD PERISH +7018-75789-0005-160: WHEN I AWOKE AT LAST I FOUND MYSELF IN THE LIGHT OF HEAVEN AND OPENING MY EYES I SAW MYSELF IN A BROAD STREAM AND THE RAFT MOORED TO AN ISLAND IN THE MIDST OF A NUMBER OF INDIANS AND ABYSSINIANS +7018-75789-0006-161: BUT I WAS DELIGHTED AT MY ESCAPE FROM THE RIVER +7018-75789-0007-162: WHEN THEY SAW I UNDERSTOOD THEM NOT AND MADE THEM NO ANSWER ONE OF THEM CAME FORWARD AND SAID TO ME IN ARABIC PEACE BE WITH THEE O MY BROTHER +7018-75789-0008-163: O MY BROTHER ANSWERED HE WE ARE HUSBANDMEN AND (TILLERS->TELLERS) OF THE SOIL WHO CAME OUT TO WATER OUR FIELDS AND PLANTATIONS AND FINDING THEE ASLEEP ON THIS RAFT LAID HOLD OF IT AND MADE IT FAST BY US AGAINST THOU (SHOULDST->SHOULDEST) AWAKE AT THY LEISURE +7018-75789-0009-164: I ANSWERED FOR ALLAH'S SAKE (O->AM) MY LORD ERE I SPEAK GIVE ME SOMEWHAT TO EAT FOR I AM STARVING AND AFTER ASK ME WHAT THOU WILT +7018-75789-0010-165: WHEN IT WAS THE FIVE HUNDRED AND SIXTY SECOND NIGHT +7018-75789-0011-166: SHE SAID IT HATH (REACHED->RAGED) ME O AUSPICIOUS KING THAT (SINDBAD->SINBAD) THE SEAMAN CONTINUED WHEN I LANDED AND FOUND MYSELF AMONGST THE INDIANS AND ABYSSINIANS AND HAD TAKEN SOME REST THEY CONSULTED AMONG THEMSELVES AND SAID TO ONE ANOTHER THERE IS NO HELP FOR IT BUT WE CARRY HIM WITH US AND PRESENT HIM TO OUR KING THAT HE MAY ACQUAINT HIM WITH HIS ADVENTURES +7018-75789-0012-167: SO I CONSORTED WITH THE CHIEF OF THE ISLANDERS AND THEY PAID ME THE UTMOST RESPECT +7018-75789-0013-168: SO I ROSE WITHOUT STAY OR DELAY AND KISSED THE KING'S HAND AND ACQUAINTED HIM WITH MY LONGING TO SET OUT WITH THE MERCHANTS FOR THAT I PINED AFTER MY PEOPLE AND MINE OWN LAND +7018-75789-0014-169: QUOTH HE THOU ART THINE OWN MASTER YET IF IT BE THY WILL TO ABIDE WITH US (ON OUR->HONOUR) HEAD AND EYES BE IT FOR THOU GLADDENEST US WITH THY COMPANY +7018-75789-0015-170: BY ALLAH O MY LORD ANSWERED I THOU HAST INDEED OVERWHELMED ME WITH THY FAVOURS AND WELL DOINGS BUT I WEARY FOR A SIGHT OF MY FRIENDS AND FAMILY AND NATIVE COUNTRY +7018-75789-0016-171: THEN I TOOK LEAVE OF HIM AND OF ALL MY INTIMATES AND ACQUAINTANCES IN THE ISLAND AND EMBARKED WITH THE MERCHANTS AFORESAID +7018-75789-0017-172: HE ASKED ME WHENCE THEY CAME AND I SAID TO HIM BY ALLAH (O->A) COMMANDER OF THE FAITHFUL I KNOW NOT THE NAME OF THE CITY NOR THE WAY THITHER +7018-75789-0018-173: FOR STATE PROCESSIONS A THRONE IS SET FOR HIM UPON A HUGE ELEPHANT ELEVEN CUBITS HIGH AND UPON THIS HE SITTETH HAVING HIS GREAT LORDS AND OFFICERS AND GUESTS STANDING IN TWO RANKS ON HIS RIGHT HAND AND ON HIS LEFT +7018-75789-0019-174: HIS LETTER HATH SHOWN ME THIS AND AS FOR THE MIGHTINESS OF HIS DOMINION THOU HAST TOLD US WHAT THOU HAST (EYE->I) WITNESSED +7018-75789-0020-175: PRESENTLY MY FRIENDS CAME TO ME AND I DISTRIBUTED PRESENTS AMONG MY FAMILY AND GAVE ALMS AND LARGESSE AFTER WHICH I YIELDED MYSELF TO JOYANCE AND ENJOYMENT MIRTH AND (MERRY MAKING->MERRYMAKING) AND FORGOT ALL THAT I HAD SUFFERED +7018-75789-0021-176: SUCH THEN O MY BROTHERS IS THE HISTORY OF WHAT (BEFEL->BEFELL) ME IN MY SIXTH VOYAGE AND TO MORROW INSHALLAH +7018-75789-0022-177: I WILL TELL YOU THE STORY OF MY SEVENTH AND LAST VOYAGE WHICH IS STILL MORE WONDROUS AND MARVELLOUS THAN THAT OF THE FIRST SIX +7018-75789-0023-178: WHEN IT WAS THE FIVE HUNDRED AND SIXTY THIRD NIGHT +7018-75789-0024-179: SHE SAID IT HATH REACHED ME O AUSPICIOUS KING THAT WHEN (SINDBAD->SINBAD) THE SEAMAN HAD (RELATED->RELIGHTED) THE HISTORY OF WHAT (BEFEL->BEFELL) HIM IN HIS SIXTH VOYAGE AND ALL THE COMPANY HAD DISPERSED (SINDBAD->SINBAD) THE LANDSMAN WENT HOME AND SLEPT AS OF WONT +7018-75789-0025-180: THE SEVENTH VOYAGE OF (SINDBAD->SINBAD) THE (SEAMAN->SALMON) +7018-75789-0026-181: (KNOW->NO) O COMPANY THAT AFTER MY RETURN FROM MY SIXTH VOYAGE WHICH BROUGHT ME ABUNDANT PROFIT I RESUMED MY FORMER LIFE (IN->AND) ALL POSSIBLE JOYANCE AND ENJOYMENT AND MIRTH AND MAKING MERRY DAY AND NIGHT AND I TARRIED SOME TIME IN THIS SOLACE AND SATISFACTION TILL MY SOUL BEGAN ONCE MORE TO LONG TO SAIL THE SEAS AND SEE FOREIGN COUNTRIES (AND->IN) COMPANY WITH MERCHANTS AND (HEAR->HERE) NEW THINGS +7018-75789-0027-182: SO HAVING MADE UP MY MIND I PACKED UP IN BALES A QUANTITY OF PRECIOUS STUFFS SUITED FOR SEA TRADE AND REPAIRED WITH THEM FROM BAGHDAD CITY TO (BASSORAH->BASSORA) TOWN WHERE I FOUND A SHIP READY FOR SEA AND IN HER A COMPANY OF CONSIDERABLE MERCHANTS +7018-75789-0028-183: BUT THE CAPTAIN AROSE AND (TIGHTENING->TIGHTENED IN) HIS GIRDLE TUCKED UP HIS SKIRTS AND AFTER TAKING REFUGE WITH ALLAH FROM SATAN THE (STONED CLOMB->STONE CLIMBED) TO THE MAST HEAD WHENCE HE LOOKED OUT RIGHT AND LEFT AND GAZING AT THE PASSENGERS AND CREW FELL TO (BUFFETING->BUFFET IN) HIS FACE AND PLUCKING OUT HIS BEARD +7018-75789-0029-184: THIS HE (SET->SAID) IN A SAUCER WETTED WITH A LITTLE WATER AND AFTER WAITING A SHORT TIME SMELT AND TASTED IT AND THEN HE TOOK OUT OF THE CHEST A BOOKLET WHEREIN HE READ (AWHILE->A WHILE) AND SAID WEEPING KNOW O YE PASSENGERS THAT IN THIS BOOK IS A MARVELLOUS MATTER DENOTING THAT WHOSO (COMETH HITHER->COME THITHER) SHALL SURELY DIE WITHOUT HOPE OF ESCAPE FOR THAT THIS OCEAN IS CALLED THE SEA OF THE CLIME OF THE KING WHEREIN IS (THE->A) SEPULCHRE OF OUR LORD SOLOMON SON OF DAVID ON BOTH BE PEACE +7018-75789-0030-185: A SECOND FISH (MADE->READ) ITS APPEARANCE (THAN->AND) WHICH WE HAD SEEN (NAUGHT->NOUGHT) MORE MONSTROUS +7018-75789-0031-186: WHEN SUDDENLY A VIOLENT SQUALL OF WIND AROSE AND SMOTE THE SHIP WHICH ROSE OUT OF THE WATER AND SETTLED UPON A GREAT REEF THE HAUNT OF SEA MONSTERS WHERE IT BROKE UP AND FELL ASUNDER INTO PLANKS AND ALL AND EVERYTHING ON BOARD WERE PLUNGED INTO THE SEA +7105-2330-0000-2310: UNFORTUNATELY THERE COULD BE NO DOUBT (OR->OUR) MISCONCEPTION AS (TO PLATTERBAFF'S->THE PLATTERBATH'S) GUILT +7105-2330-0001-2311: HE HAD NOT ONLY (PLEADED->PLAYED IT) GUILTY BUT HAD EXPRESSED HIS INTENTION OF REPEATING HIS ESCAPADE IN OTHER DIRECTIONS AS SOON AS CIRCUMSTANCES PERMITTED THROUGHOUT THE TRIAL HE WAS BUSY EXAMINING A SMALL MODEL OF THE FREE TRADE HALL IN MANCHESTER +7105-2330-0002-2312: (THE JURY->VERY CHEERY) COULD NOT POSSIBLY FIND THAT THE PRISONER HAD NOT DELIBERATELY AND INTENTIONALLY BLOWN UP THE ALBERT HALL THE QUESTION WAS COULD THEY FIND ANY (EXTENUATING->EXTINUATING) CIRCUMSTANCES WHICH WOULD PERMIT OF AN ACQUITTAL +7105-2330-0003-2313: OF COURSE ANY SENTENCE (WHICH->REACHED) THE LAW MIGHT FEEL COMPELLED TO INFLICT WOULD BE FOLLOWED BY AN IMMEDIATE PARDON BUT IT WAS HIGHLY DESIRABLE FROM THE (GOVERNMENT'S POINT OF->GOVERNMENT SPINTER) VIEW THAT THE NECESSITY FOR SUCH AN EXERCISE OF CLEMENCY SHOULD NOT ARISE +7105-2330-0004-2314: (A HEADLONG->I HAD LONG) PARDON (ON->AND) THE EVE OF A (BYE ELECTION->BILL) WITH THREATS OF A HEAVY VOTING (DEFECTION->AFFECTION) IF IT WERE WITHHELD OR EVEN DELAYED WOULD NOT NECESSARILY BE A SURRENDER BUT IT WOULD LOOK LIKE ONE +7105-2330-0005-2315: HENCE (THE->THEIR) ANXIETY IN THE CROWDED COURT AND IN THE LITTLE GROUPS GATHERED ROUND THE TAPE MACHINES IN WHITEHALL AND (DOWNING->DAWNING) STREET (AND OTHER->ANOTHER) AFFECTED CENTRES +7105-2330-0006-2316: (THE JURY RETURNED->THEIR CHEERY RETURN) FROM CONSIDERING THEIR VERDICT THERE WAS A FLUTTER AN EXCITED MURMUR A (DEATHLIKE->DEATH LIKE) HUSH +7105-2330-0007-2317: THE (FOREMAN->FOUR MEN) DELIVERED HIS MESSAGE +7105-2330-0008-2318: THE (JURY->CHERRY) FIND THE PRISONER GUILTY OF BLOWING UP THE ALBERT HALL +7105-2330-0009-2319: (THE JURY->THEY JERRY) WISH TO ADD A (RIDER->WRITER) DRAWING ATTENTION TO THE FACT THAT A (BY ELECTION->BILL) IS (PENDING->SPENDING) IN THE PARLIAMENTARY DIVISION OF NEMESIS ON HAND +7105-2330-0010-2320: AND (MAY->MADE) THE (LORD->LARD) HAVE MERCY ON THE (POLL->POLE) A (JUNIOR COUNSEL->GENIOR CONSUL) EXCLAIMED IRREVERENTLY +7105-2330-0011-2321: FIFTEEN HUNDRED SAID THE PRIME MINISTER WITH A SHUDDER IT'S TOO HORRIBLE TO THINK OF +7105-2330-0012-2322: OUR MAJORITY LAST TIME WAS ONLY A THOUSAND AND SEVEN +7105-2330-0013-2323: SEVEN THIRTY AMENDED THE PRIME MINISTER WE MUST AVOID ANY APPEARANCE OF PRECIPITANCY +7105-2330-0014-2324: NOT LATER THAN SEVEN THIRTY THEN SAID THE CHIEF (ORGANISER->ORGANIZER) I HAVE PROMISED THE AGENT DOWN THERE THAT HE SHALL BE ABLE TO DISPLAY POSTERS ANNOUNCING PLATTERBAFF IS OUT BEFORE THE (POLL->POLE) OPENS +7105-2330-0015-2325: HE SAID IT WAS (OUR->HER) ONLY CHANCE OF GETTING A TELEGRAM (RADPROP IS IN->RED RAPPA'S INN) TO NIGHT +7105-2330-0016-2326: (DESPITE->THIS SPITE) THE EARLINESS OF THE HOUR A SMALL CROWD HAD GATHERED IN THE STREET OUTSIDE AND THE HORRIBLE MENACING (TRELAWNEY->TREEONER) REFRAIN OF THE FIFTEEN HUNDRED VOTING MEN CAME IN A STEADY MONOTONOUS CHANT +7105-2330-0017-2327: HE EXCLAIMED WON'T GO +7105-2330-0018-2328: HE SAYS HE NEVER HAS LEFT PRISON WITHOUT A (BRASS BAND->BREASTPAND) TO PLAY HIM OUT AND HE'S NOT GOING TO GO WITHOUT ONE NOW +7105-2330-0019-2329: SAID THE PRIME MINISTER WE CAN HARDLY BE SUPPOSED TO SUPPLY A (RELEASED->RELISSE) PRISONER WITH A BRASS BAND HOW ON EARTH COULD WE (DEFEND IT->DEFENDED) ON THE ESTIMATES +7105-2330-0020-2330: (ANYWAY HE->AND AWAY YOU) WON'T GO UNLESS HE HAS A BAND +7105-2330-0021-2331: (POLL OPENS IN->PAUL OPENED THIN) FIVE MINUTES +7105-2330-0022-2332: (IS PLATTERBAFF->HIS FURTHER BATH) OUT YET +7105-2330-0023-2333: IN HEAVEN'S NAME WHY +7105-2330-0024-2334: THE CHIEF (ORGANISER->ORGANIZER) RANG OFF +7105-2330-0025-2335: THIS IS NOT A MOMENT FOR STANDING ON DIGNITY HE OBSERVED BLUNTLY (MUSICIANS->MESSIE'S) MUST BE SUPPLIED AT ONCE +7105-2330-0026-2336: CAN'T YOU GET (A->US) STRIKE PERMIT ASKED THE (ORGANISER->ORGANIZER) +7105-2330-0027-2337: I'LL TRY SAID THE HOME SECRETARY AND WENT TO THE TELEPHONE +7105-2330-0028-2338: EIGHT O'CLOCK STRUCK THE CROWD OUTSIDE CHANTED WITH AN INCREASING VOLUME OF SOUND (WILL VOTE->REVOTE) THE OTHER WAY +7105-2330-0029-2339: (A TELEGRAM WAS->I TELEGRAMAS) BROUGHT IN +7105-2330-0030-2340: IT WAS FROM THE CENTRAL (COMMITTEE->COMEDY) ROOMS AT NEMESIS +7105-2330-0031-2341: WITHOUT A BAND HE WOULD NOT GO AND THEY HAD NO (BAND->BEND) +7105-2330-0032-2342: A QUARTER PAST TEN HALF PAST +7105-2330-0033-2343: HAVE YOU ANY BAND INSTRUMENTS OF AN EASY NATURE TO PLAY +7105-2330-0034-2344: DEMANDED THE CHIEF (ORGANISER->ORGANIZER) OF THE PRISON GOVERNOR DRUMS (CYMBALS->SYMBOLS) THOSE SORT OF THINGS +7105-2330-0035-2345: THE (WARDERS->ORDERS) HAVE A PRIVATE BAND OF THEIR OWN SAID THE GOVERNOR BUT OF COURSE I COULDN'T ALLOW THE MEN THEMSELVES +7105-2330-0036-2346: (LEND US->BLENDEST) THE INSTRUMENTS SAID THE CHIEF (ORGANISER->ORGANIZER) +7105-2330-0037-2347: (THE->THOUGH) POPULAR SONG OF THE MOMENT REPLIED THE AGITATOR AFTER A MOMENT'S REFLECTION +7105-2330-0038-2348: IT WAS A TUNE THEY HAD ALL HEARD HUNDREDS OF TIMES SO THERE (WAS->IS) NO DIFFICULTY IN TURNING OUT A PASSABLE IMITATION OF IT TO THE IMPROVISED (STRAINS->TRAINS) OF I (DIDN'T->DON'T) WANT TO DO IT THE PRISONER STRODE FORTH TO FREEDOM +7105-2330-0039-2349: THE WORD OF THE (SONG->SUN) HAD REFERENCE IT WAS UNDERSTOOD (TO THE->THAT) INCARCERATING GOVERNMENT AND NOT TO THE DESTROYER OF THE ALBERT HALL +7105-2330-0040-2350: (THE SEAT->THIS HEAT) WAS LOST AFTER ALL BY A NARROW (MAJORITY->MATURITY) +7105-2330-0041-2351: THE LOCAL TRADE UNIONISTS TOOK OFFENCE AT THE FACT OF (CABINET MINISTERS->CABINETS) HAVING PERSONALLY ACTED AS (STRIKE BREAKERS->STRIKEBREAKERS) AND EVEN THE RELEASE OF (PLATTERBAFF->PLATTERBUFF) FAILED TO PACIFY THEM +7105-2340-0000-2272: WITH THAT NOTORIOUS FAILING OF HIS HE WAS NOT (THE->A) SORT OF PERSON ONE WANTED IN ONE'S HOUSE +7105-2340-0001-2273: WELL THE FAILING STILL EXISTS DOESN'T IT SAID (HER->THE) HUSBAND OR DO YOU SUPPOSE A REFORM OF CHARACTER IS ENTAILED ALONG WITH THE ESTATE +7105-2340-0002-2274: BESIDES (CYNICISM APART HIS->CYS IN A PART IS) BEING RICH (WILL->WE'LL) MAKE A DIFFERENCE IN THE WAY PEOPLE WILL LOOK AT HIS (FAILING->FEELING) +7105-2340-0003-2275: WHEN A MAN IS ABSOLUTELY WEALTHY NOT MERELY WELL TO DO ALL SUSPICION OF (SORDID->SARDID) MOTIVE (NATURALLY->NATURAL) DISAPPEARS THE THING BECOMES MERELY A (TIRESOME->PARASAN) MALADY +7105-2340-0004-2276: (WILFRID PIGEONCOTE->WILFRED DIJIN CODE) HAD SUDDENLY BECOME HEIR TO HIS UNCLE SIR WILFRID (PIGEONCOTE->PIGEON COAT) ON THE DEATH OF HIS COUSIN MAJOR (WILFRID PIGEONCOTE->WILFRED PIGEONOTE) WHO HAD SUCCUMBED (TO->*) THE (*->DAY) AFTER EFFECTS OF (A POLO->APOLLO) ACCIDENT +7105-2340-0005-2277: (A WILFRID PIGEONCOTE->OF WILFRED BEECH AND COURT) HAD COVERED HIMSELF WITH (HONOURS->HONORS) IN THE COURSE OF MARLBOROUGH'S CAMPAIGNS AND THE NAME (WILFRID->LOYAL FRED) HAD BEEN (A BAPTISMAL->ABOVE THE SMALL) WEAKNESS IN THE FAMILY EVER SINCE THE NEW HEIR TO THE FAMILY DIGNITY AND ESTATES WAS A YOUNG MAN OF ABOUT FIVE AND TWENTY WHO WAS KNOWN MORE BY (REPUTATION->REPETITION) THAN BY PERSON TO (A WIDE->AVIDE) CIRCLE OF COUSINS AND KINSFOLK +7105-2340-0006-2278: AND THE REPUTATION WAS AN UNPLEASANT ONE +7105-2340-0007-2279: FROM HIS LATE (SCHOOLDAYS->SCHOOL DAYS) ONWARD HE HAD BEEN POSSESSED BY AN ACUTE AND OBSTINATE FORM OF (KLEPTOMANIA->CLEFTOMANIA) HE HAD THE ACQUISITIVE INSTINCT OF THE COLLECTOR WITHOUT ANY OF THE COLLECTOR'S DISCRIMINATION +7105-2340-0008-2280: (THE->THIS) SEARCH USUALLY (PRODUCED->PRODUCE) A LARGE AND VARIED YIELD THIS IS FUNNY SAID PETER (PIGEONCOTE TO->PIGEON BOLTO) HIS WIFE (SOME->THEM) HALF HOUR AFTER THEIR CONVERSATION (HERE'S->HERE IS) A TELEGRAM FROM (WILFRID->MILFRED) SAYING HE'S PASSING THROUGH HERE IN HIS MOTOR AND WOULD LIKE TO STOP AND PAY US HIS RESPECTS +7105-2340-0009-2281: (SIGNED WILFRID PIGEONCOTE->SIGN WILFRED PEACH AND CO) +7105-2340-0010-2282: I SUPPOSE (HE'S->THIS) BRINGING US A PRESENT FOR THE SILVER WEDDING GOOD GRACIOUS +7105-2340-0011-2283: THE TALK FLITTED NERVOUSLY AND HURRIEDLY FROM ONE IMPERSONAL TOPIC TO ANOTHER +7105-2340-0012-2284: IN THE DRAWING ROOM AFTER DINNER THEIR NERVOUSNESS AND AWKWARDNESS INCREASED +7105-2340-0013-2285: OH WE HAVEN'T SHOWN YOU THE SILVER WEDDING PRESENTS SAID MISSUS PETER SUDDENLY AS THOUGH STRUCK BY A BRILLIANT IDEA FOR ENTERTAINING THE GUEST HERE THEY ALL ARE +7105-2340-0014-2286: SUCH NICE (USEFUL GIFTS->FORGIFTS) A FEW (DUPLICATES->DEPLICATES) OF COURSE +7105-2340-0015-2287: SEVEN (CREAM->QUEEN) JUGS PUT IN PETER +7105-2340-0016-2288: WE FEEL THAT WE MUST LIVE (ON CREAM->UNCREAM) FOR THE REST OF OUR LIVES +7105-2340-0017-2289: OF COURSE SOME OF THEM CAN BE CHANGED +7105-2340-0018-2290: I PUT IT DOWN BY THE (CLARET JUG->CLARGA) SAID (WILFRID->WILFRIED) BUSY WITH ANOTHER OBJECT +7105-2340-0019-2291: (VIGILANCE->EACH A LENS) WAS NOT COMPLETELY CROWNED WITH A SENSE OF VICTORY +7105-2340-0020-2292: AFTER THEY HAD SAID GOOD NIGHT TO THEIR VISITOR MISSUS PETER EXPRESSED HER CONVICTION THAT HE HAD TAKEN SOMETHING +7105-2340-0021-2293: HOW ON EARTH ARE WE TO KNOW SAID PETER THE MEAN PIG HASN'T BROUGHT US A PRESENT AND I'M HANGED IF HE SHALL CARRY ONE OFF +7105-2340-0022-2294: (IT'S->IS) THE ONLY THING TO DO +7105-2340-0023-2295: (WILFRID->WILFRED) WAS LATE IN COMING DOWN TO BREAKFAST AND HIS MANNER SHOWED PLAINLY THAT SOMETHING WAS AMISS +7105-2340-0024-2296: IT'S (*->AND) AN UNPLEASANT THING TO HAVE TO SAY HE BLURTED OUT PRESENTLY BUT I'M AFRAID YOU MUST HAVE A THIEF AMONG YOUR SERVANTS SOMETHING'S BEEN TAKEN OUT OF MY (PORTMANTEAU->PARTNENT TOE) +7105-2340-0025-2297: IT WAS A LITTLE PRESENT FROM MY MOTHER AND MYSELF FOR YOUR SILVER WEDDING +7105-2340-0026-2298: I SHOULD HAVE GIVEN IT TO YOU LAST NIGHT AFTER DINNER ONLY IT HAPPENED TO BE A (CREAM->QUEEN) JUG AND YOU SEEMED ANNOYED AT HAVING SO MANY DUPLICATES SO I FELT RATHER AWKWARD (ABOUT->OF A) GIVING YOU ANOTHER +7105-2340-0027-2299: (THE->THIS) SNATCHER HAD BEEN AN ORPHAN (THESE->THIS) MANY YEARS +7105-2340-0028-2300: (LADY ERNESTINE PIGEONCOTE->LAY THE ERNESTON BEECH AND COLT) HIS MOTHER MOVED IN CIRCLES WHICH WERE ENTIRELY BEYOND THEIR COMPASS OR AMBITIONS AND THE (SON->SUN) WOULD PROBABLY ONE DAY BE AN AMBASSADOR +7105-2340-0029-2301: HUSBAND AND WIFE LOOKED BLANKLY AND DESPERATELY AT ONE ANOTHER +7105-2340-0030-2302: IT WAS MISSUS PETER WHO ARRIVED FIRST AT AN INSPIRATION HOW DREADFUL TO THINK THERE ARE THIEVES IN THE HOUSE WE KEEP THE DRAWING ROOM LOCKED UP AT NIGHT OF COURSE BUT ANYTHING MIGHT BE CARRIED OFF WHILE WE ARE AT BREAKFAST +7105-2340-0031-2303: SHE ROSE AND WENT OUT HURRIEDLY AS THOUGH TO ASSURE HERSELF THAT THE DRAWING ROOM WAS NOT BEING STRIPPED OF ITS SILVERWARE AND RETURNED A MOMENT LATER BEARING A CREAM (JUG->CHUG) IN HER HANDS +7105-2340-0032-2304: THE (PIGEONCOTES->PIGEON CORDS) HAD TURNED PALER THAN EVER MISSUS PETER HAD A FINAL INSPIRATION +7105-2340-0033-2305: (PETER->EITHER) DASHED OUT OF THE ROOM WITH GLAD RELIEF HE HAD LIVED SO LONG DURING THE LAST FEW MINUTES THAT A GOLDEN WEDDING SEEMED WITHIN MEASURABLE DISTANCE +7105-2340-0034-2306: MISSUS (PETER->BEATER) TURNED TO HER GUEST WITH CONFIDENTIAL (COYNESS->KINDNESS) +7105-2340-0035-2307: (PETER'S->PETER IS) LITTLE WEAKNESS (IT RUNS->EACH ONES) IN THE FAMILY GOOD LORD +7105-2340-0036-2308: DO YOU MEAN TO SAY HE'S A (KLEPTOMANIAC->CLAPTOMANIA) LIKE COUSIN SNATCHER +7105-2340-0037-2309: (BRAVE->PRETTY) LITTLE WOMAN SAID PETER WITH A GASP OF RELIEF I COULD NEVER HAVE DONE IT +7902-96591-0000-0: (I AM->AND) FROM THE CUTTER LYING OFF THE COAST +7902-96591-0001-1: DON'T CRY HE SAID I WAS OBLIGED TO COME +7902-96591-0002-2: AND AND YOU HAVE NOT FOUND OUT ANYTHING CAME IN QUICK FRIGHTENED TONES +7902-96591-0003-3: I WISH YOU WOULD BELIEVE ME THAT I AM IN AS GREAT TROUBLE ABOUT IT AS YOU ARE +7902-96591-0004-4: THAT MY FATHER SIR RISDON (GRAEME HAS->GRAHAME) SMUGGLED GOODS HERE +7902-96591-0005-5: HE COULD NOT HELP IT HE HATES THE SMUGGLERS YOU SHALL NOT TELL +7902-96591-0006-6: PRAY PRAY SAY YOU WILL NOT (ARCHY->ARCHIE) WAS SILENT +7902-96591-0007-7: THEN AS (ARCHY->ARCHIE) STOOD IN THE DARK LITERALLY AGHAST WITH ASTONISHMENT HE HEARD THE FAINT RUSTLING ONCE MORE AND AGAIN ALL WAS SILENT +7902-96591-0008-8: HE LAUGHED BUT IT WAS A CURIOUS KIND OF LAUGH FULL OF VEXATION INJURED (AMOUR PROPRE->AMORE A PROPER) AS THE FRENCH CALL OUR LOVE OF OUR OWN DIGNITY OF WHICH (ARCHIBALD RAYSTOKE->ARQUEBAUL RAY STROKE) IN THE FULL FLUSH OF HIS YOUNG BELIEF IN HIS IMPORTANCE AS A BRITISH OFFICER HAD A PRETTY GOOD STOCK +7902-96591-0009-9: (IT->AND) ALL COMES OF DRESSING UP IN THIS STUPID WAY LIKE A ROUGH FISHER LAD +7902-96591-0010-10: COLD WATER CAME ON THIS IDEA DIRECTLY AS HE RECALLED THE FACT THAT THE DARKNESS WAS INTENSE AND CELIA COULD NOT HAVE SEEN HIM +7902-96591-0011-11: I'LL SOON SHOW THEM THAT I AM NOT GOING TO BE PLAYED WITH +7902-96591-0012-12: FOR IT SUDDENLY OCCURRED TO HIM THAT HE WAS NOT ONLY A PRISONER BUT A PRISONER IN THE POWER OF A VERY RECKLESS SET OF PEOPLE WHO WOULD STOP AT NOTHING +7902-96591-0013-13: NO HE THOUGHT TO HIMSELF I DON'T BELIEVE THEY WOULD KILL ME BUT THEY WOULD KNOCK ME ABOUT +7902-96591-0014-14: THE (KICK HE->KICKIE) HAD RECEIVED WAS A FORETASTE OF WHAT HE MIGHT EXPECT AND AFTER A LITTLE CONSIDERATION HE CAME TO THE CONCLUSION THAT HIS DUTY WAS TO ESCAPE AND GET BACK TO THE CUTTER AS QUICKLY AS HE COULD +7902-96591-0015-15: TO DO THIS HE MUST SCHEME LIE HID TILL MORNING (THEN->THAN) MAKE FOR THE NEAREST POINT AND SIGNAL FOR HELP UNLESS A BOAT'S CREW WERE ALREADY SEARCHING FOR HIM HOW TO ESCAPE +7902-96591-0016-16: THE WINDOW WAS BARRED BUT HE WENT TO IT AND TRIED THE BARS ONE BY ONE TO FIND THEM ALL SOLIDLY FITTED INTO THE STONE SILL +7902-96591-0017-17: NEXT MOMENT AS HE FELT HIS WAY ABOUT HIS HAND TOUCHED AN OLD FASHIONED MARBLE MANTELPIECE FIREPLACE CHIMNEY +7902-96591-0018-18: YES IF OTHER WAYS FAILED HE COULD ESCAPE UP THE CHIMNEY +7902-96591-0019-19: NO THAT WAS TOO BAD HE (COULD NOT->CANNOT) DO THAT +7902-96591-0020-20: SYMPATHY AND PITY FOR THE DWELLERS IN THE (HOZE->HOSE) WERE COMPLETELY GONE NOW AND HE SET HIS TEETH FAST AND MENTALLY CALLED HIMSELF A WEAK IDIOT FOR EVER THINKING ABOUT SUCH PEOPLE +7902-96591-0021-21: A NARROW TABLE AGAINST THE WALL IN TWO PLACES +7902-96591-0022-22: HE WENT AND TRIED TO FORCE HIS HEAD THROUGH RECALLING AS HE DID THAT WHERE A PERSON'S HEAD WOULD GO THE REST OF THE BODY WOULD PASS +7902-96591-0023-23: BUT THERE WAS NO CHANCE FOR HIS BODY THERE THE HEAD WOULD NOT GO FIRST +7902-96591-0024-24: A FELLOW WHO WAS SHUT UP IN PRISON FOR LIFE MIGHT DO IT HE SAID BUT NOT IN A CASE LIKE THIS +7902-96592-0000-25: SURE (YOU'VE LOOKED->YOU LOOK) ROUND EVERYWHERE BOY YES FATHER QUITE +7902-96592-0001-26: I'M GOING HOME TO BREAKFAST +7902-96592-0002-27: SHALL I COME (TOO->TO) FATHER NO +7902-96592-0003-28: STOP HERE TILL SIR RISDON COMES DOWN AND TELL HIM I'M VERY SORRY THAT WE SHOULD HAVE CLEARED OUT LAST NIGHT ONLY A BORN FOOL SAW JERRY (NANDY'S->ANDY'S) LOBSTER BOAT COMING INTO THE COVE AND CAME RUNNING TO SAY IT WAS A PARTY FROM THE CUTTER YES FATHER +7902-96592-0004-29: TELL HIM NOT TO BE UNEASY TIS ALL RIGHT AND I'LL HAVE EVERYTHING CLEAR AWAY TO NIGHT +7902-96592-0005-30: THE DULL SOUND OF DEPARTING STEPS AND A LOW WHISTLING SOUND COMING DOWN THROUGH THE SKYLIGHT WINDOW INTO THE CABIN WHERE (ARCHY RAYSTOKE->ARCHIE RAYSTROKE) LAY WITH HIS HEAVY EYELIDS PRESSED DOWN BY SLEEP +7902-96592-0006-31: WHAT A QUEER DREAM HE THOUGHT TO HIMSELF +7902-96592-0007-32: BUT HOW QUEER FOR MISTER (GURR->GIRD) TO BE TALKING LIKE THAT TO ANDREW (TEAL->TEALE) THE BOY WHO (HELPED->HELPS) THE COOK +7902-96592-0008-33: AND WHY DID ANDY CALL MISTER (GURR FATHER->GERFATHER) +7902-96592-0009-34: THERE WAS AN INTERVAL OF THINKING OVER THIS (KNOTTY->NAUGHTY) QUESTION DURING WHICH THE LOW WHISTLING WENT ON +7902-96592-0010-35: AND (I'M HUNGRY->UNHUNGRY) TOO (TIME I->TELL IT) WAS UP I SUPPOSE +7902-96592-0011-36: NO HE WAS NOT DREAMING FOR HE WAS LOOKING OUT ON THE SEA OVER WHICH A FAINT MIST HUNG LIKE WREATHS OF SMOKE +7902-96592-0012-37: WHAT DID THEY SAY FALSE ALARM TELL SIR (RISDON->RISDEN) THEY WOULD CLEAR ALL AWAY TO NIGHT SEE IF ANYTHING HAD BEEN LEFT ABOUT LOBSTER BOAT +7902-96592-0013-38: ONCE OUT OF THAT ROOM HE COULD (RAN->RUN) AND BY DAYLIGHT THE SMUGGLERS (DARE->DARED) NOT HUNT HIM DOWN +7902-96592-0014-39: OH THOSE BARS HE MENTALLY EXCLAIMED AND HE WAS ADVANCING (TOWARD->TOWARDS) THEM WHEN JUST AS HE DREW NEAR THERE WAS A RUSTLING NOISE UNDER THE WINDOW A COUPLE OF HANDS SEIZED THE BARS THERE WAS A SCRATCHING OF BOOT TOES AGAINST STONE WORK AND RAM'S FACE APPEARED TO GAZE INTO THE ROOM BY INTENTION BUT INTO THE ASTONISHED COUNTENANCE OF THE YOUNG MIDSHIPMAN INSTEAD +7902-96592-0015-40: (RAM->ROOM) WAS THE FIRST TO RECOVER FROM HIS SURPRISE +7902-96592-0016-41: HULLO HE SAID WHO ARE YOU +7902-96592-0017-42: GO ROUND AND OPEN THE DOOR I WAS SHUT IN LAST NIGHT BY MISTAKE +7902-96592-0018-43: I SAW YOU LAST NIGHT AND WONDERED WHOSE BOY (YOU->HE) WAS +7902-96592-0019-44: IT WAS (YOU->YOUR) FATHER KICKED FOR SHIRKING AND MY WELL I HARDLY KNOWED YOU +7902-96592-0020-45: NONSENSE +7902-96592-0021-46: WON'T DO SAID RAM GRINNING +7902-96592-0022-47: THINK I DON'T KNOW YOU MISTER (ORFICER->ORFASTER) +7902-96592-0023-48: (WON'T->WELL) DO SAID RAM QUICKLY I KNOW YOU +7902-96592-0024-49: (BEEN PLAYING->COMPLYING) THE SPY THAT'S WHAT YOU'VE BEEN DOING WHO LOCKED YOU IN +7902-96592-0025-50: (ARCHY->ARCHIE) STEPPED BACK TO THE DOOR LISTENING BUT THERE WAS NOT A SOUND +7902-96592-0026-51: HE HAS GONE TO GIVE THE ALARM THOUGHT THE PRISONER AND HE LOOKED EXCITEDLY ROUND FOR A WAY OF ESCAPE +7902-96592-0027-52: NOTHING BUT THE CHIMNEY PRESENTED ITSELF +7902-96592-0028-53: A HAPPY INSPIRATION HAD COME AND PLACING ONE HAND UPON HIS (BREAST->CHEST) HE THRUST IN THE OTHER GAVE A TUG AND DREW OUT HIS LITTLE CURVED DIRK GLANCED AT THE EDGE RAN TO THE WINDOW AND BEGAN TO CUT (AT->IT) ONE OF THE BARS (LABOUR->LABOR) IN VAIN +7902-96592-0029-54: HE DIVIDED THE PAINT AND PRODUCED A FEW SQUEAKS AND GRATING SOUNDS AS HE (REALISED->REALIZED) THAT THE ATTEMPT WAS MADNESS +7902-96592-0030-55: THE RESULT WAS NOT VERY SATISFACTORY BUT SUFFICIENTLY SO TO MAKE HIM ESSAY THE BAR OF THE WINDOW ONCE MORE PRODUCING A GRATING (EAR ASSAILING->IRRES SELLING) SOUND AS HE FOUND THAT NOW HE DID MAKE A LITTLE IMPRESSION SO LITTLE THOUGH THAT THE PROBABILITY WAS IF HE KEPT ON WORKING WELL FOR TWENTY FOUR HOURS HE WOULD NOT GET THROUGH +7902-96592-0031-56: BUT AT THE END OF FIVE MINUTES HE STOPPED AND THRUST BACK THE DIRK INTO ITS SHEATH +7902-96592-0032-57: NO I CAN'T PART WITH THAT HA (HA HA->*) LAUGHED THE BOY JEERINGLY +7902-96592-0033-58: BUT (I'LL->*) YES I'LL GIVE YOU A GUINEA IF YOU WILL LET ME OUT +7902-96592-0034-59: (GUINEA SAID->GUINEAS OF) THE BOY THINK (I'D->I'LL) DO IT FOR A GUINEA WELL THEN (TWO->TOO) +7902-96592-0035-60: BE QUICK THERE'S A GOOD FELLOW I WANT TO GET AWAY AT ONCE +7902-96592-0036-61: NOT YOU ONLY A SHAM +7902-96592-0037-62: WHY YOUR CLOTHES DON'T FIT YOU AND YOUR CAP'S PUT ON ALL (SKEW REW->SCARO) +7902-96592-0038-63: NEVER MIND ABOUT THAT LET ME OUT OF THIS PLACE +7902-96592-0039-64: I TOLD YOU A FISHER BOY CRIED (ARCHY->ARCHIE) IMPATIENTLY BUT TRYING NOT TO OFFEND HIS VISITOR WHO POSSESSED THE POWER OF CONFERRING FREEDOM BY SPEAKING SHARPLY +7902-96592-0040-65: NOT YOU LOOK LIKE A WILD BEAST IN A CAGE LIKE A MONKEY YOU INSOLENT +7902-96592-0041-66: (ARCHY->ARCHIE) CHECKED HIMSELF AND THE BOY LAUGHED +7902-96592-0042-67: IT WAS YOUR TURN YESTERDAY IT'S MINE TO DAY WHAT A GAME +7902-96592-0043-68: YOU LAUGHED AND (FLEERED->FLARED) AT ME WHEN I WAS ON THE CUTTER'S DECK +7902-96592-0044-69: I SAY YOU DO LOOK (*->LIKE) A (RUM UN->ROMAN) JUST LIKE A BIG MONKEY IN A SHOW +7902-96592-0045-70: RAM SHOWED HIS WHITE TEETH AS HE BURST OUT WITH A LONG LOW FIT OF LAUGHTER +7902-96592-0046-71: YOU (ROPE'S END->HOPES AND) ME HE SAID +7902-96592-0047-72: WHY I COULD TIE YOU UP IN A KNOT AND HEAVE YOU OFF THE CLIFF ANY DAY WHAT A GAME +7902-96592-0048-73: BIT OF A (MIDDY->MIDDI) FED ON (*->A) SALT TACK AND (WEEVILLY->WEEVILY) BISCUIT TALK OF GIVING ME (ROPE'S END->ROPES AND) +7902-96592-0049-74: ONCE MORE WILL YOU COME AND LET ME OUT NO +7902-96592-0050-75: TO HIS ASTONISHMENT THE BOY DID NOT FLINCH BUT THRUST HIS OWN ARMS THROUGH PLACING (THEM->HIM) ABOUT THE MIDDY'S WAIST CLENCHING HIS (HANDS->HAND) BEHIND AND UTTERING A SHARP WHISTLE +7902-96594-0000-76: (SEEMED IN GOOD SPIRITS->SEEMING AT SPEAR'S) LAST NIGHT MISTER (GURR EH->GARR HEY) +7902-96594-0001-77: YES SIR BUT HE MAY TURN UP ON THE CLIFF AT ANY MOMENT +7902-96594-0002-78: YES MEN QUITE READY YES SIR +7902-96594-0003-79: (THAT'S RIGHT->THE THREAT) OF COURSE (WELL ARMED->WILL ALARMED) +7902-96594-0004-80: SOON AS THE SIGNAL COMES WE SHALL PUSH OFF +7902-96594-0005-81: AWKWARD (BIT O->BITTER) COUNTRY SIR SIX MILES ROW BEFORE YOU CAN FIND A PLACE TO LAND +7902-96594-0006-82: SO SHALL WE YET SIR +7902-96594-0007-83: YOU DON'T THINK MISTER (GURR->GREW) THAT THEY WOULD DARE TO INJURE HIM IF HE WAS SO UNLUCKY AS TO BE CAUGHT +7902-96594-0008-84: WELL SIR SAID THE MASTER HESITATING SMUGGLERS ARE SMUGGLERS +7902-96594-0009-85: CERTAINLY SIR SMUGGLERS ARE SMUGGLERS (INDEED->INDE) +7902-96594-0010-86: (BEG->THEY) PARDON SIR DIDN'T MEAN ANY HARM +7902-96594-0011-87: I'M GETTING VERY ANXIOUS ABOUT MISTER (RAYSTOKE->RAYSTROKE) START AT ONCE SIR +7902-96594-0012-88: NO WAIT ANOTHER (*->AND) HALF HOUR +7902-96594-0013-89: VERY (ILL->*) ADVISED THING TO DO +7902-96594-0014-90: (THEN->THAT) I MUST REQUEST THAT YOU WILL NOT MAKE IT AGAIN VERY TRUE +7902-96594-0015-91: (AWK WARD->AWKWARD) MISTER (GURR->GARR) AWKWARD +7902-96594-0016-92: YES SIR OF COURSE +7902-96594-0017-93: SAY (AWK WARD->AWKWARD) IN (*->THE) FUTURE NOT (AWK'ARD->UPWARD) +7902-96594-0018-94: I MEAN (ALL ALONE->OUR OWN) BY MYSELF SIR +7902-96594-0019-95: WHAT FOR THERE (AREN'T A->ARE TO) PUBLIC HOUSE FOR TEN MILES DIDN'T MEAN THAT +7902-96594-0020-96: THEN WHAT DID YOU MEAN SPEAK OUT AND DON'T DO THE DOUBLE SHUFFLE ALL OVER MY CLEAN DECK NO SIR +7902-96594-0021-97: (HOPPING->HAVING) ABOUT (LIKE A->THE GOOD) CAT ON HOT BRICKS +7902-96594-0022-98: NOW THEN WHY DO YOU WANT TO GO ASHORE +7902-96594-0023-99: (BEG->THEY) PARDON DIDN'T MEAN (NOWT->OUT) SIR SAID THE SAILOR TOUCHING HIS FORELOCK +7902-96594-0024-100: YES SIR SAID THE MAN HUMBLY SHALL I GO AT ONCE SIR +7902-96594-0025-101: NO WAIT +7902-96594-0026-102: (KEEP A->HE WAS) SHARP LOOK OUT ON THE CLIFF TO SEE IF MISTER (RAYSTOKE->RAYSTROKE) IS MAKING SIGNALS FOR A BOAT +7902-96594-0027-103: HE SWUNG ROUND WALKED (AFT->OFF) AND BEGAN SWEEPING (THE SHORE->ASHORE) AGAIN WITH HIS GLASS WHILE THE MASTER AND DICK EXCHANGED GLANCES WHICH MEANT A GREAT DEAL +7902-96594-0028-104: AT LAST THE LITTLE LIEUTENANT COULD BEAR THE ANXIETY NO LONGER +7902-96594-0029-105: (PIPE->PEG) AWAY (THE MEN TO->THEM INTO) THAT BOAT THERE HE SAID AND AS THE CREW SPRANG IN +7902-96594-0030-106: (NOW->NO) MISTER GURR HE SAID I'M ONLY GOING TO SAY ONE THING TO YOU IN THE WAY OF INSTRUCTIONS YES SIR +7902-96594-0031-107: BEG PARDON SIR SAID THE MASTER DEPRECATINGLY +7902-96594-0032-108: STEADY MY (LADS->LAD) STEADY CRIED THE MASTER KEEP STROKE AND THEN HE BEGAN TO MAKE PLANS AS TO HIS FIRST PROCEEDINGS ON GETTING ASHORE +7902-96595-0000-109: SAY (MESTER GURR->MISTER GIRK) SAID DICK AFTER ONE OF THESE SEARCHES HE WOULDN'T RUN AWAY WHAT +7902-96595-0001-110: MISTER RAYSTOKE SIR DON'T BE A FOOL +7902-96595-0002-111: WHAT (CHUCKED HIM OFF->SAID DE MORVE) YONDER +7902-96595-0003-112: (GURR->GIRK) GLANCED ROUND TO SEE IF THE MEN WERE LOOKING AND THEN SAID RATHER HUSKILY (BUT->BE) KINDLY +7902-96595-0004-113: AH EJACULATED DICK SADLY +7902-96595-0005-114: SAY (MESTER GURR SIR->MISTER GURSER) WHICH THANKFUL I AM (TO->FOR) YOU FOR SPEAKING SO BUT YOU DON'T REALLY THINK AS HE HAS COME TO HARM +7902-96595-0006-115: I HOPE NOT DICK I (HOPE NOT->OPEN IT) BUT SMUGGLERS DON'T STAND AT ANYTHING SOMETIMES +7902-96595-0007-116: I DO ASSURE YOU THERE'S NOTHING HERE BUT WHAT YOU MAY SEE +7902-96595-0008-117: IF (YOU'D->YOU) LET ME FINISH YOU'D KNOW SAID (GURR GRUFFLY->GRIGGLY) ONE OF OUR BOYS IS MISSING SEEN (HIM->EM) UP HERE +7902-96595-0009-118: BOY (BOUT->ABOUT) SEVENTEEN WITH A RED CAP NO SIR INDEED I'VE NOT +7902-96595-0010-119: DON'T KNOW AS HE HAS BEEN SEEN ABOUT HERE DO YOU SAID GURR LOOKING AT HER SEARCHINGLY NO SIR +7902-96595-0011-120: IF SHE KNEW EVIL HAD COME TO THE POOR LAD HER FACE WOULD TELL TALES LIKE PRINT +7902-96595-0012-121: I (SAID A LAD BOUT->STOOD ALOUD ABOUT) SEVENTEEN (IN->AND) A RED (CAP LIKE YOURS->CAPLICHOS) SAID (GURR->GREW) VERY SHORTLY +7902-96595-0013-122: THE MAN SHOOK HIS HEAD AND STARED AS IF HE DIDN'T HALF UNDERSTAND THE DRIFT OF WHAT WAS SAID +7902-96595-0014-123: HERE MY LAD WHERE'S YOUR MASTER +7902-96595-0015-124: EH I SAY (WHERE'S->WAS) YOUR MASTER +7902-96595-0016-125: (GURR->GERT) TURNED AWAY IMPATIENTLY AGAIN AND (SIGNING->SOUNDING) TO HIS MEN TO FOLLOW THEY ALL BEGAN TO TRAMP UP (THE->A) STEEP (TRACK->CHECK) LEADING TOWARD THE (HOZE->HOSE) WITH THE (RABBITS->RABBIT'S) SCUTTLING AWAY AMONG THE (FURZE->FIRS) AND SHOWING THEIR WHITE COTTONY TAILS FOR A MOMENT AS THEY DARTED DOWN INTO THEIR HOLES +7902-96595-0017-126: I DUNNO MUTTERED DICK AND A (MAN->MEN) CAN'T BE SURE +7902-96595-0018-127: (GURR->GIR) SALUTED AND STATED HIS BUSINESS WHILE THE BARONET WHO HAD TURNED (SALLOWER->SALARY) AND MORE CAREWORN THAN HIS LOT DREW A BREATH (*->OF) FULL OF RELIEF ONE OF YOUR SHIP BOYS HE SAID +7902-96595-0019-128: A LAD LOOKING LIKE A COMMON SAILOR AND WEARING A RED CAP NO SAID SIR RISDON +7902-96595-0020-129: I HAVE SEEN NO ONE ANSWERING TO THE DESCRIPTION HERE +7902-96595-0021-130: (BEG PARDON SIR BUT CAN YOU->BIG PARTISER BECAME) AS (A->*) GENTLEMAN ASSURE ME THAT HE IS NOT HERE CERTAINLY SAID SIR RISDON +7902-96595-0022-131: SURELY CRIED SIR RISDON EXCITEDLY +7902-96595-0023-132: SIR (RISDON->RICHMOND) WAS SILENT +7902-96595-0024-133: LADY (GRAEME->GRAHAM) LOOKED GHASTLY +7902-96595-0025-134: YOU DO NOT KNOW NO +7975-280057-0000-1008: THESE HATREDS WERE SOON TO MAKE TROUBLE FOR ME OF WHICH I HAD NEVER DREAMED +7975-280057-0001-1009: HENRY WASHINGTON YOUNGER MY FATHER REPRESENTED JACKSON COUNTY THREE TIMES IN THE LEGISLATURE AND WAS ALSO (*->A) JUDGE OF THE COUNTY COURT +7975-280057-0002-1010: MY MOTHER WHO WAS (BURSHEBA FRISTOE->PERCEIVER FOR STOVE) OF INDEPENDENCE WAS (THE->A) DAUGHTER OF RICHARD (FRISTOE->FRISTOW) WHO FOUGHT UNDER GENERAL ANDREW JACKSON AT NEW ORLEANS JACKSON COUNTY HAVING BEEN SO NAMED (AT->AND) MY GRANDFATHER (FRISTOE'S INSISTENCE->FIRST DOZE INSISTANTS) +7975-280057-0003-1011: I CANNOT REMEMBER WHEN I DID NOT KNOW HOW TO SHOOT +7975-280057-0004-1012: MY BROTHER JAMES WAS BORN JANUARY (FIFTEENTH->FIFTEEN) EIGHTEEN FORTY EIGHT JOHN IN EIGHTEEN FIFTY ONE AND ROBERT IN DECEMBER EIGHTEEN FIFTY THREE +7975-280057-0005-1013: MY ELDEST BROTHER RICHARD DIED IN EIGHTEEN SIXTY +7975-280057-0006-1014: MY FATHER WAS IN THE EMPLOY OF THE UNITED STATES GOVERNMENT AND HAD THE (MAIL->MALE) CONTRACT FOR FIVE HUNDRED MILES +7975-280057-0007-1015: HE HAD STARTED BACK TO HARRISONVILLE IN A BUGGY BUT WAS WAYLAID ONE MILE SOUTH OF (WESTPORT->WESTWARD) A SUBURB OF KANSAS CITY AND BRUTALLY MURDERED FALLING OUT OF HIS BUGGY INTO THE ROAD WITH THREE MORTAL BULLET WOUNDS +7975-280057-0008-1016: (MISSUS->MISS) WASHINGTON (WELLS->WALES) AND HER SON SAMUEL ON THE ROAD HOME FROM KANSAS CITY TO (LEE'S->LEE) SUMMIT RECOGNIZED THE BODY AS THAT OF MY FATHER +7975-280057-0009-1017: (MISSUS WELLS STAYED->MUST WELL STAY) TO GUARD THE REMAINS (WHILE->WHETHER) HER SON CARRIED THE NEWS OF THE MURDER TO COLONEL PEABODY OF THE FEDERAL COMMAND WHO WAS THEN (IN CAMP->ENCAMP) AT KANSAS CITY +7975-280057-0010-1018: (MISSUS MC CORKLE->MISS MICROCLE) JUMPED FROM THE WINDOW OF THE HOUSE AND ESCAPED +7975-280057-0011-1019: AS THE RAIDERS (LEFT->LIVED) ONE OF THEM SHOUTED +7975-280057-0012-1020: NOW (OLD->*) LADY CALL ON YOUR PROTECTORS WHY DON'T YOU CALL (ON COLE->AND CO) YOUNGER NOW +7975-280057-0013-1021: EVERY KNOT REPRESENTED A HUMAN LIFE +7975-280057-0014-1022: BUT SHE FAILED TO (FIND THE->FANCY) COMFORT SHE SOUGHT FOR ANNOYANCES CONTINUED IN A MORE AGGRAVATED FORM +7975-280057-0015-1023: TWO MONTHS AFTER (THIS->THE) INCIDENT THE SAME PERSECUTORS AGAIN ENTERED OUR HOME IN THE (DEAD->DAY) OF THE NIGHT AND AT THE POINT OF A PISTOL TRIED TO FORCE MY MOTHER TO SET FIRE TO HER OWN HOME +7975-280057-0016-1024: I HAVE ALWAYS FELT THAT THE EXPOSURE TO WHICH SHE WAS SUBJECTED ON THIS CRUEL JOURNEY TOO HARD EVEN FOR A MAN TO TAKE WAS (THE->A) DIRECT CAUSE OF HER DEATH +7975-280057-0017-1025: FROM HARRISONVILLE SHE WENT TO (WAVERLY->WAVERLEY) WHERE SHE WAS (HOUNDED->HANDY) CONTINUALLY +7975-280057-0018-1026: ONE OF THE CONDITIONS UPON WHICH HER LIFE WAS SPARED WAS THAT SHE WOULD REPORT (AT LEXINGTON->IT LECINGTON) WEEKLY +7975-280057-0019-1027: ONE OF MY OLD SCHOOL TEACHERS WHOM I HAVE NEVER SEEN SINCE THE SPRING (OR->OF) SUMMER OF EIGHTEEN SIXTY TWO IS STEPHEN B ELKINS SENATOR FROM WEST VIRGINIA +7975-280057-0020-1028: WHEN I WAS (TAKEN->TAKING) PRISONER I EXPECTED TO BE SHOT WITHOUT CEREMONY +7975-280063-0000-1058: WE TOOK THE OATH PERHAPS THREE HUNDRED OF US DOWN ON LUTHER MASON'S FARM A FEW MILES FROM WHERE I NOW WRITE WHERE COLONEL (HAYS->HAYES) HAD ENCAMPED AFTER INDEPENDENCE +7975-280063-0001-1059: (BOONE MUIR->BOOM YOU) AND MYSELF (MET->MAKE) COFFEE AND THE REST BELOW ROSE HILL ON GRAND RIVER +7975-280063-0002-1060: ACCORDINGLY I WAS SHORTLY AWAKENED TO ACCOMPANY HIM (TO LONE JACK->THE LONG JAG) WHERE HE WOULD PERSONALLY MAKE KNOWN THE SITUATION TO THE OTHER COLONELS +7975-280063-0003-1061: FOSTER HAD NEARLY ONE THOUSAND (CAVALRYMEN->CAVERNMENT) AND TWO PIECES OF (RABB'S->RABBS) INDIANA BATTERY THAT HAD ALREADY MADE FOR ITSELF A NAME FOR HARD FIGHTING +7975-280063-0004-1062: (COME IN->COMMONED) COLONEL (HAYS->HAYES) EXCLAIMED COLONEL (COCKRELL->COCKLE) +7975-280063-0005-1063: I THINK HE'LL BE (RATHER TOUGH MEAT->READY TO HAVE MEET) FOR BREAKFAST I REPLIED HE MIGHT BE ALL (RIGHT->RIPE) FOR DINNER +7975-280063-0006-1064: (JACKMAN->JACK WENT) WITH A PARTY OF THIRTY SEASONED MEN CHARGED THE INDIANA GUNS AND CAPTURED THEM BUT MAJOR FOSTER LED A GALLANT CHARGE AGAINST THE INVADERS AND (RECAPTURED->RE CAPTURED) THE PIECES +7975-280063-0007-1065: WE WERE OUT OF AMMUNITION AND WERE HELPLESS HAD THE FIGHT BEEN PRESSED +7975-280063-0008-1066: THEY DID MARK MY CLOTHES IN ONE OR TWO PLACES HOWEVER +7975-280063-0009-1067: MAJOR FOSTER IN A LETTER TO (JUDGE->JOE) GEORGE (M BENNETT->I INVITED) OF (MINNEAPOLIS->MANY APOLIS) SAID +7975-280063-0010-1068: I WAS TOLD BY SOME OF OUR MEN FROM THE WESTERN BORDER OF THE STATE THAT THEY RECOGNIZED (THE->A) DARING YOUNG (RIDER AS COLE YOUNGER->RATTERAS COURIER) +7975-280063-0011-1069: ABOUT NINE THIRTY A M I WAS SHOT DOWN +7975-280063-0012-1070: THE (WOUNDED->WOUNDS) OF BOTH FORCES WERE GATHERED UP AND WERE PLACED IN HOUSES +7975-280076-0000-1029: ALTHOUGH EVERY BOOK (PURPORTING->REPORTING) TO NARRATE THE LIVES OF THE YOUNGER BROTHERS (HAS->IS) TOLD OF THE LIBERTY ROBBERY AND IMPLIED THAT WE HAD A PART IN IT THE YOUNGERS WERE NOT SUSPECTED AT THAT TIME NOR FOR A LONG TIME AFTERWARD +7975-280076-0001-1030: IT WAS CLAIMED BY PEOPLE OF LIBERTY THAT THEY POSITIVELY RECOGNIZED AMONG THE ROBBERS (OLL SHEPHERD RED MONKERS->ALL SHEPARD REDMOCKERS) AND BUD (PENCE->PINTS) WHO HAD SEEN SERVICE WITH (QUANTRELL->QUANTREL) +7975-280076-0002-1031: THIS (RAID->RAY) WAS ACCOMPANIED BY (BLOODSHED JUDGE->BLOTCHETTE JOE) MC (LAIN->LANE) THE BANKER BEING SHOT THOUGH NOT FATALLY +7975-280076-0003-1032: (NO->THOUGH) WARRANT WAS ISSUED FOR THE YOUNGERS BUT SUBSEQUENT HISTORIANS HAVE INFERENTIALLY AT LEAST ACCUSED US OF TAKING PART BUT AS I SAID BEFORE THERE IS NO TRUTH IN THE ACCUSATION +7975-280076-0004-1033: JUNE THIRD EIGHTEEN SEVENTY ONE (OBOCOCK BROTHERS->OBEY BROTHER'S) BANK AT (CORYDON IOWA->CROYDEN HOUR) WAS ROBBED OF FORTY THOUSAND DOLLARS BY SEVEN MEN IN BROAD DAYLIGHT +7975-280076-0005-1034: IT WAS (CHARGED->CHARGE) THAT (ARTHUR MC COY->OFTEN MA KOY) OR A (C MC->SEA MAC) COY AND MYSELF HAD BEEN PARTICIPANTS IN THE GAD'S HILL AFFAIR AND THE TWO STAGE ROBBERIES +7975-280076-0006-1035: THE PARTS OF THIS LETTER NOW RELEVANT ARE AS FOLLOWS +7975-280076-0007-1036: YOU MAY USE THIS LETTER IN YOUR OWN WAY +7975-280076-0008-1037: I WILL GIVE YOU THIS OUTLINE AND SKETCH OF MY WHEREABOUTS AND ACTIONS AT THE TIME OF CERTAIN ROBBERIES WITH WHICH I AM CHARGED +7975-280076-0009-1038: (AT->IT'S) THE TIME OF THE (GALLATIN->GALLOP AND) BANK ROBBERY I WAS GATHERING CATTLE (IN ELLIS->AND ILLIS) COUNTY TEXAS (CATTLE THAT I->CATTLEETTA) BOUGHT FROM (PLEAS->PLACE) TAYLOR AND RECTOR +7975-280076-0010-1039: THIS CAN BE PROVED BY BOTH OF THEM ALSO BY (SHERIFF BARKLEY->SIR PARKLEY) AND FIFTY OTHER RESPECTABLE MEN OF THAT COUNTY +7975-280076-0011-1040: I BROUGHT THE CATTLE (TO->THE) KANSAS (THAT->SET) FALL AND REMAINED IN SAINT CLAIR COUNTY UNTIL FEBRUARY +7975-280076-0012-1041: I THEN WENT TO (ARKANSAS AND->OUR CONSOHN) RETURNED TO SAINT CLAIR COUNTY ABOUT THE FIRST OF MAY +7975-280076-0013-1042: (I->AND) WENT TO KANSAS WHERE (OUR->A) CATTLE (WERE IN->BURNED) WOODSON COUNTY AT COLONEL (RIDGE'S->RICHES) +7975-280076-0014-1043: DURING (THE->*) SUMMER I WAS EITHER IN SAINT CLAIR (*->OR) JACKSON OR KANSAS BUT AS THERE WAS NO ROBBERY COMMITTED THAT SUMMER IT MAKES NO DIFFERENCE WHERE I WAS +7975-280076-0015-1044: (I->AND) WENT THROUGH INDEPENDENCE AND FROM THERE TO ACE (WEBB'S->WEBBS) +7975-280076-0016-1045: THERE I TOOK DINNER AND THEN WENT TO DOCTOR (L->OLD) W (TWYMAN'S->TWIMMAN) +7975-280076-0017-1046: OUR BUSINESS THERE WAS TO SEE E P WEST HE WAS NOT AT HOME BUT THE FAMILY WILL REMEMBER THAT WE WERE THERE +7975-280076-0018-1047: WE CROSSED ON THE BRIDGE (STAYED->STATE) IN THE CITY ALL NIGHT AND THE NEXT MORNING WE RODE UP (THROUGH->TO) THE CITY +7975-280076-0019-1048: (I MET->AMID) SEVERAL OF MY FRIENDS AMONG THEM WAS BOB (HUDSPETH->HUSBATH) +7975-280076-0020-1049: WE WERE NOT ON GOOD TERMS AT THE TIME NOR HAVE WE BEEN FOR SEVERAL YEARS +7975-280076-0021-1050: POOR JOHN HE HAS BEEN HUNTED DOWN AND SHOT LIKE A WILD BEAST AND NEVER WAS A BOY MORE INNOCENT +7975-280076-0022-1051: DOCTOR (L LEWIS->EL LOUIS) WAS HIS PHYSICIAN +7975-280076-0023-1052: THERE WERE FIFTY OR (A->*) HUNDRED PERSONS THERE WHO WILL TESTIFY IN ANY COURT THAT JOHN AND I WERE THERE +7975-280076-0024-1053: (HELVIN->HELVAN) FICKLE AND WIFE OF (GREENTON->GREENS AND) VALLEY WERE ATTENDING THE SPRINGS AT THAT TIME AND EITHER OF THEM WILL TESTIFY TO THE ABOVE FOR JOHN AND I (SAT->SET) IN FRONT OF MISTER SMITH WHILE HE WAS PREACHING AND WAS IN HIS COMPANY FOR A FEW MOMENTS TOGETHER WITH HIS WIFE AND MISTER AND (MISSUS->MISS) FICKLE AFTER (*->THE) SERVICE +7975-280076-0025-1054: ABOUT THE LAST OF DECEMBER EIGHTEEN SEVENTY THREE I ARRIVED IN (CARROLL->CAROL) PARISH LOUISIANA +7975-280076-0026-1055: I STAYED THERE UNTIL THE EIGHTH OF FEBRUARY EIGHTEEN SEVENTY FOUR +7975-280076-0027-1056: I HAD NOT HEARD OF THAT WHEN I WROTE THE LETTER OF EIGHTEEN SEVENTY FOUR AND TO CORRECT ANY MISAPPREHENSION THAT MIGHT BE CREATED BY OMITTING IT I WILL SAY THAT AT (THAT->THE) TIME I WAS AT (NEOSHO->NEOSH OF) KANSAS WITH A DROVE OF CATTLE WHICH I SOLD TO MAJOR (RAY->WRAYE) +7975-280076-0028-1057: IT WAS IMMEDIATELY FOLLOWING THE ROCK ISLAND ROBBERY AT (ADAIR->EIGHT AIR) IOWA THAT (THERE->THEIR) FIRST APPEARED A (DELIBERATE->DELIVERED) ENLISTMENT OF SOME LOCAL PAPERS (IN->AND) MISSOURI TO CONNECT US WITH THIS ROBBERY +7975-280084-0000-1090: I URGED ON THE BOYS (THAT->AT) WHATEVER HAPPENED WE SHOULD NOT SHOOT ANY ONE +7975-280084-0001-1091: WHEN MILLER AND I CROSSED THE BRIDGE THE THREE WERE ON SOME (DRY GOODS->DRAGOOD) BOXES AT THE CORNER NEAR THE BANK AND AS SOON AS (THEY->I) SAW US WENT RIGHT INTO THE BANK INSTEAD OF WAITING FOR US TO GET THERE +7975-280084-0002-1092: WHEN WE CAME UP (I TOLD->A TOE) MILLER TO SHUT THE BANK DOOR WHICH THEY HAD LEFT OPEN IN THEIR HURRY +7975-280084-0003-1093: J (S ALLEN->HELEN) WHOSE (HARDWARE STORE WAS->HARD WORKED ALWAYS) NEAR TRIED TO GO INTO THE BANK BUT MILLER ORDERED HIM AWAY AND HE RAN (AROUND->ROUND) THE CORNER SHOUTING +7975-280084-0004-1094: GET YOUR GUNS BOYS THEY'RE ROBBING THE BANK +7975-280084-0005-1095: AND I (CALLED->CALL) TO HIM TO GET INSIDE AT THE SAME TIME FIRING A PISTOL SHOT IN THE AIR AS (A->THE) SIGNAL TO THE THREE BOYS AT THE BRIDGE THAT WE HAD BEEN DISCOVERED +7975-280084-0006-1096: ALMOST AT THIS INSTANT I HEARD A PISTOL SHOT IN THE BANK +7975-280084-0007-1097: (CHADWELL->TEDWELL) WOODS AND JIM RODE UP AND (JOINED US->JOINCE) SHOUTING TO (*->THE) PEOPLE IN THE STREET TO GET INSIDE AND FIRING THEIR PISTOLS TO EMPHASIZE THEIR COMMANDS +7975-280084-0008-1098: IF ANY OF OUR PARTY SHOT HIM IT MUST HAVE BEEN WOODS +7975-280084-0009-1099: MEANTIME THE STREET WAS GETTING UNCOMFORTABLY HOT +7975-280084-0010-1100: EVERY TIME I SAW ANY ONE WITH A BEAD ON ME I WOULD DROP OFF MY HORSE AND (TRY->TROT) TO DRIVE THE SHOOTER INSIDE BUT I COULD NOT SEE IN EVERY DIRECTION +7975-280084-0011-1101: DOCTOR (WHEELER->WHALER) WHO HAD GONE UPSTAIRS IN THE HOTEL SHOT MILLER AND HE LAY DYING IN THE STREET +7975-280084-0012-1102: CHANGING HIS PISTOL TO HIS LEFT HAND BOB RAN OUT AND MOUNTED MILLER'S MARE +7975-280084-0013-1103: WHAT KEPT YOU SO LONG I ASKED PITTS +7975-280084-0014-1104: AS TO THE REST OF THE AFFAIR INSIDE THE BANK I TAKE THE ACCOUNT OF A (NORTHFIELD->NORTH FIELD) NARRATOR +7975-280084-0015-1105: WHERE'S THE MONEY OUTSIDE THE SAFE BOB ASKED +7975-280084-0016-1106: THE SHUTTERS WERE CLOSED AND THIS CAUSED BUNKER AN INSTANT'S DELAY THAT WAS ALMOST FATAL (PITTS->FITZ) CHASED HIM WITH A BULLET +7975-280084-0017-1107: THE FIRST ONE (MISSED HIM->MISTING) BUT THE SECOND WENT THROUGH HIS RIGHT SHOULDER +7975-280085-0000-1071: THAT NIGHT IT STARTED TO RAIN AND WE WORE OUT OUR HORSES +7975-280085-0001-1072: FRIDAY WE MOVED TOWARD WATERVILLE AND FRIDAY NIGHT WE CAMPED BETWEEN (ELYSIAN->THE LUCIEN) AND GERMAN LAKE +7975-280085-0002-1073: (BOB'S SHATTERED ELBOW WAS->BOB SATURDAIL BOWS) REQUIRING FREQUENT ATTENTION AND THAT NIGHT WE MADE ONLY NINE MILES AND MONDAY MONDAY NIGHT AND TUESDAY WE SPENT IN A DESERTED FARM HOUSE CLOSE TO (MANKATO->MAN CATO) +7975-280085-0003-1074: THAT (DAY->THEY) A MAN NAMED (DUNNING->DINNING) DISCOVERED US AND WE TOOK HIM PRISONER +7975-280085-0004-1075: FINALLY WE ADMINISTERED TO HIM AN OATH NOT TO BETRAY OUR WHEREABOUTS UNTIL WE HAD TIME TO MAKE OUR ESCAPE AND HE AGREED NOT TO +7975-280085-0005-1076: NO SOONER HOWEVER WAS HE RELEASED THAN HE MADE (POSTHASTE->POST TASTE) INTO (MANKATO->MANCAO) TO ANNOUNCE OUR PRESENCE AND IN A FEW MINUTES ANOTHER POSSE WAS LOOKING FOR US +7975-280085-0006-1077: THE WHISTLE ON THE (OIL->ORE) MILL BLEW AND WE FEARED THAT IT WAS A SIGNAL THAT HAD BEEN AGREED UPON TO ALARM THE TOWN IN CASE WE WERE OBSERVED BUT WE WERE NOT MOLESTED +7975-280085-0007-1078: HE HAD TO SLEEP WITH (IT PILLOWED->A PILLART) ON MY BREAST JIM BEING ALSO (*->A) CRIPPLED WITH A WOUND IN HIS SHOULDER AND WE COULD NOT GET MUCH SLEEP +7975-280085-0008-1079: BUT THEY SOON AFTER GOT CLOSE ENOUGH SO THAT ONE OF THEM BROKE MY WALKING STICK WITH A SHOT +7975-280085-0009-1080: WE WERE (IN SIGHT->INSIDE) OF OUR LONG (SOUGHT->SOWED) HORSES WHEN THEY CUT US OFF FROM THE ANIMALS AND OUR LAST HOPE WAS GONE +7975-280085-0010-1081: SIX (STEPPED->STEPS) TO THE FRONT SHERIFF (GLISPIN->CLISPIN) COLONEL T L (VOUGHT->WALT) B (M->AND) RICE G (A->*) BRADFORD C A (POMEROY->POMMEROI) AND S J SEVERSON +7975-280085-0011-1082: FORMING (IN->A) LINE FOUR PACES APART HE ORDERED THEM TO ADVANCE RAPIDLY AND CONCENTRATE THE FIRE OF THE WHOLE LINE THE INSTANT THE ROBBERS WERE DISCOVERED +7975-280085-0012-1083: MAKE FOR THE HORSES I SAID EVERY MAN FOR HIMSELF +7975-280085-0013-1084: THERE IS NO USE STOPPING TO PICK UP A COMRADE HERE (FOR->TILL) WE CAN'T GET HIM THROUGH THE LINE JUST (CHARGE->SHARS) THEM AND MAKE IT IF WE CAN +7975-280085-0014-1085: I GOT UP AS (THE->A) SIGNAL FOR THE CHARGE AND WE FIRED ONE VOLLEY +7975-280085-0015-1086: ONE OF THE FELLOWS IN THE OUTER (LINE->LAND) NOT BRAVE ENOUGH HIMSELF TO JOIN THE VOLUNTEERS WHO HAD COME IN TO BEAT US OUT WAS NOT DISPOSED TO BELIEVE IN THE SURRENDER AND HAD HIS GUN LEVELLED ON BOB IN SPITE OF THE HANDKERCHIEF WHICH WAS WAVING AS A FLAG OF TRUCE +7975-280085-0016-1087: (SHERIFF->SURE OF) GLISPIN OF (WATONWAN->WATERWAM) COUNTY WHO WAS TAKING BOB'S PISTOL FROM HIM WAS ALSO SHOUTING TO THE FELLOW +7975-280085-0017-1088: INCLUDING THOSE RECEIVED IN AND ON THE WAY FROM (NORTHFIELD->NORTH FIELD) I HAD ELEVEN (WOUNDS->ONES) +7975-280085-0018-1089: (AND SHERIFF->IN CHEER) GLISPIN'S ORDER NOT TO SHOOT WAS THE BEGINNING OF THE (PROTECTORATE->PROTECTOR) THAT MINNESOTA PEOPLE ESTABLISHED OVER US +8131-117016-0000-1303: CAPTAIN (MURDOCH->MURDOCK) +8131-117016-0001-1304: BUT MARSPORT HAD FLOURISHED ENOUGH TO KILL IT OFF +8131-117016-0002-1305: SOME OF MARS LAWS DATED FROM THE TIME WHEN (LAW ENFORCEMENT->LAWN FORCEMENT) HAD BEEN HAMPERED BY LACK OF MEN RATHER THAN BY THE TYPE OF MEN +8131-117016-0003-1306: THE (STONEWALL->STONE WALL) GANG NUMBERED PERHAPS FIVE HUNDRED +8131-117016-0004-1307: EVEN (DERELICTS AND->DEAR ALEXAM) FAILURES HAD TO EAT THERE WERE (STORES->STORIES) AND SHOPS THROUGHOUT THE DISTRICT WHICH EKED OUT SOME KIND OF A MARGINAL LIVING +8131-117016-0005-1308: THEY WERE SAFE FROM PROTECTION (RACKETEERS->RAGATIRS) THERE NONE BOTHERED TO COME SO FAR OUT +8131-117016-0006-1309: THE SHOPKEEPERS AND SOME OF THE LESS UNFORTUNATE PEOPLE THERE HAD PROTESTED LOUD ENOUGH TO REACH CLEAR BACK TO EARTH +8131-117016-0007-1310: CAPTAIN (MURDOCH->MURDOCK) WAS AN UNKNOWN FACTOR AND NOW WAS ASKING FOR MORE MEN +8131-117016-0008-1311: THE PRESSURE WAS ENOUGH TO GET THEM FOR HIM +8131-117016-0009-1312: GORDON REPORTED FOR WORK WITH A SENSE OF THE BOTTOM FALLING OUT MIXED WITH A VAGUE RELIEF +8131-117016-0010-1313: I'VE GOT A FREE HAND AND WE'RE GOING TO RUN THIS THE WAY WE WOULD ON EARTH +8131-117016-0011-1314: YOUR JOB IS TO PROTECT THE CITIZENS HERE AND THAT MEANS (EVERYONE->EVERY ONE) NOT BREAKING THE LAWS WHETHER YOU FEEL LIKE IT OR NOT NO GRAFT +8131-117016-0012-1315: THE FIRST MAN MAKING A (SHAKEDOWN->SHAKE DOWN) WILL GET THE SAME TREATMENT WE'RE GOING TO USE ON THE (STONEWALL->STONE WALL) BOYS YOU'LL GET DOUBLE PAY HERE AND YOU CAN LIVE ON IT +8131-117016-0013-1316: HE PICKED OUT FIVE OF THE MEN INCLUDING GORDON YOU FIVE WILL COME WITH ME +8131-117016-0014-1317: THE REST OF YOU CAN (TEAM->TEEM) UP ANY WAY YOU WANT (TONIGHT->TO NIGHT) PICK ANY (ROUTE->ROUGH) THAT'S OPEN (OKAY MEN->OH CAME AND) LET'S GO +8131-117016-0015-1318: (BRUCE->BRUSH) GORDON GRINNED SLOWLY AS HE SWUNG THE STICK AND (MURDOCH'S->MARDOC'S) EYES FELL ON HIM EARTH COP +8131-117016-0016-1319: TWO YEARS GORDON ADMITTED +8131-117016-0017-1320: FOR A SECOND GORDON CURSED HIMSELF +8131-117016-0018-1321: HE BEGAN WONDERING ABOUT SECURITY THEN +8131-117016-0019-1322: NOBODY HAD TRIED TO GET IN TOUCH WITH HIM +8131-117016-0020-1323: THERE WAS A CRUDE LIGHTING SYSTEM HERE PUT UP BY THE CITIZENS AT THE FRONT OF EACH BUILDING A DIM (PHOSPHOR->PHOSPHER) BULB GLOWED WHEN DARKNESS FELL THEY WOULD HAVE NOTHING ELSE TO SEE BY +8131-117016-0021-1324: MOVING IN TWO GROUPS OF THREES (AT->IT) OPPOSITE SIDES OF THE STREET THEY BEGAN THEIR BEAT +8131-117016-0022-1325: THERE WAS NO CHANCE TO SAVE THE CITIZEN WHO WAS DYING FROM LACK OF AIR +8131-117016-0023-1326: GORDON FELT THE SOLID PLEASURE OF THE FINELY TURNED CLUB IN HIS HANDS +8131-117016-0024-1327: GORDON'S EYES POPPED AT THAT +8131-117016-0025-1328: HE SWALLOWED THE SENTIMENT HIS OWN CLUB WAS MOVING NOW +8131-117016-0026-1329: THE OTHER (FOUR COPS->FUPS) HAD COME IN RELUCTANTLY +8131-117016-0027-1330: HE BROUGHT HIM TO THE GROUND WITH A SINGLE BLOW ACROSS THE KIDNEYS +8131-117016-0028-1331: THEY (ROUNDED->ROUTED) UP THE MEN OF THE GANG AND ONE OF THE (COPS->CUPS) STARTED OFF +8131-117016-0029-1332: TO FIND A PHONE AND CALL THE WAGON +8131-117016-0030-1333: (WE'RE->WERE) NOT USING WAGONS (MURDOCH->MURDOCK) TOLD HIM (LINE->LYING) THEM UP +8131-117016-0031-1334: IF THEY TRIED TO RUN THEY WERE HIT FROM BEHIND (IF->THAT) THEY STOOD STILL THEY WERE CLUBBED CAREFULLY +8131-117016-0032-1335: (MURDOCH->MURDOCK) INDICATED ONE WHO STOOD WITH HIS (SHOULDERS->SHOULDER) SHAKING AND TEARS RUNNING DOWN HIS CHEEKS +8131-117016-0033-1336: THE CAPTAIN'S FACE WAS AS SICK AS (GORDON->GORDON'S) FELT +8131-117016-0034-1337: I WANT THE NAME OF EVERY MAN IN THE GANG YOU CAN REMEMBER HE TOLD THE MAN +8131-117016-0035-1338: COLONEL THEY'D KILL ME I DON'T KNOW +8131-117016-0036-1339: (MURDOCH->MURDOCK) TOOK HIS NOD AS EVIDENCE ENOUGH AND TURNED TO THE WRETCHED (TOUGHS->TUFTS) +8131-117016-0037-1340: IF HE SHOULD TURN UP DEAD I'LL KNOW YOU BOYS ARE RESPONSIBLE AND I'LL FIND YOU +8131-117016-0038-1341: TROUBLE BEGAN BREWING SHORTLY AFTER THOUGH +8131-117016-0039-1342: (MURDOCH SENT->MARDOX SAT) ONE OF THE MEN TO PICK UP A SECOND SQUAD OF SIX AND THEN A THIRD +8131-117016-0040-1343: (IN->AND) THE THIRD ONE BRUCE GORDON SPOTTED ONE OF THE MEN (WHO'D->WHO HAD) BEEN BEATEN BEFORE +8131-117016-0041-1344: GET A STRETCHER AND TAKE HIM WHEREVER HE BELONGS HE ORDERED +8131-117016-0042-1345: BUT THE CAPTAIN STIRRED FINALLY SIGHING +8131-117016-0043-1346: (NO->NOW) THE (COPS THEY'RE->CAPS ARE) GIVING ME WE'RE COVERED GORDON +8131-117016-0044-1347: BUT THE (STONEWALL->STERN WALL) GANG IS (BACKING WAYNE->BACK IN WAIN) +8131-117016-0045-1348: BUT IT'S GOING TO BE TOUGH ON THEM +8131-117016-0046-1349: BRUCE (GORDON->GORD AND) GRIMACED I'VE GOT A YELLOW TICKET FROM SECURITY +8131-117016-0047-1350: (MURDOCH->MARDOCK) BLINKED HE DROPPED HIS EYES SLOWLY +8131-117016-0048-1351: WHAT MAKES YOU THINK (WAYNE->WAIN) WILL BE RE ELECTED +8131-117016-0049-1352: NOBODY WANTS HIM EXCEPT A GANG OF (CROOKS->COOKS) AND THOSE IN POWER +8131-117016-0050-1353: EVER SEE A MARTIAN ELECTION +8131-117016-0051-1354: NO (YOU'RE A FIRSTER->YOU ARE FIRST) HE CAN'T LOSE +8131-117016-0052-1355: AND THEN HELL IS GOING TO POP AND THIS WHOLE PLANET MAY BE BLOWN WIDE OPEN +8131-117016-0053-1356: (IT->YET) FITTED WITH THE (DIRE->DIA) PREDICTIONS OF SECURITY AND WITH (THE->A) SPYING GORDON WAS GOING TO DO ACCORDING TO THEM +8131-117016-0054-1357: HE WAS GETTING EVEN FATTER NOW THAT HE WAS EATING BETTER FOOD FROM THE FAIR RESTAURANT AROUND THE CORNER +8131-117016-0055-1358: (COST EM->COSTUM) MORE BUT THEY'D BE RESPECTABLE +8131-117016-0056-1359: BECAUSE (IZZY->IZZIE) IS ALWAYS HONEST ACCORDING TO HOW HE SEES IT +8131-117016-0057-1360: BUT YOU GOT EARTH IDEAS OF THE STUFF LIKE I HAD ONCE +8131-117016-0058-1361: THE GROUPS GREW MORE EXPERIENCED AND (MURDOCH->MURDOCK) WAS TRAINING A NEW SQUAD EVERY NIGHT +8131-117016-0059-1362: IT WASN'T EXACTLY LEGAL BUT NOTHING WAS HERE +8131-117016-0060-1363: THIS COULD LEAD TO ABUSES AS HE'D SEEN ON EARTH +8131-117016-0061-1364: BUT (THERE->THEIR) PROBABLY WOULDN'T BE TIME FOR IT IF MAYOR (WAYNE->WAIN) WAS RE ELECTED +8131-117017-0000-1270: IT WAS NIGHT OUTSIDE AND THE (PHOSPHOR BULBS->PHOSPHOBS) AT THE CORNERS GLOWED DIMLY GIVING HIM BARELY ENOUGH LIGHT BY WHICH TO LOCATE THE WAY TO THE EXTEMPORIZED PRECINCT HOUSE +8131-117017-0001-1271: IT HAD PROBABLY BEEN YEARS SINCE ANY HAD DARED RISK IT AFTER THE SUN WENT DOWN +8131-117017-0002-1272: AND THE SLOW DOUBTFUL RESPECT ON THE FACES OF THE CITIZENS AS THEY NODDED TO HIM WAS EVEN MORE PROOF THAT (HALEY'S->HALELY) SYSTEM WAS WORKING +8131-117017-0003-1273: GORDON HIT THE SIGNAL SWITCH AND THE (MARSPEAKER LET->MARKEER LED) OUT A SHRILL WHISTLE +8131-117017-0004-1274: (GUNS->GUN) SUDDENLY SEEMED TO BE FLOURISHING EVERYWHERE +8131-117017-0005-1275: YOU CAN'T DO IT TO ME +8131-117017-0006-1276: (I'M->I AM) REFORMED I'M GOING STRAIGHT +8131-117017-0007-1277: YOU DAMNED (COPS->COPSE) CAN'T (O'NEILL->O'NEIA) WAS BLUBBERING +8131-117017-0008-1278: ONE LOOK WAS ENOUGH THE WORK PAPERS HAD THE (TELLTALE->TELL TALE) OVER THICKENING OF THE SIGNATURE (THAT->THEY) HAD SHOWED UP ON OTHER PAPERS OBVIOUSLY FORGERIES +8131-117017-0009-1279: SOME TURNED AWAY AS GORDON AND THE OTHER (COP->COPP) WENT TO WORK BUT MOST OF THEM WEREN'T SQUEAMISH +8131-117017-0010-1280: WHEN IT WAS OVER THE TWO PICKED UP THEIR WHIMPERING CAPTIVE +8131-117017-0011-1281: JENKINS THE OTHER COP HAD BEEN HOLDING THE WALLET +8131-117017-0012-1282: MUST (OF->HAVE) BEEN MAKING A BIG CONTACT IN SOMETHING FIFTY FIFTY +8131-117017-0013-1283: THERE MUST HAVE BEEN OVER TWO THOUSAND CREDITS IN THE WALLET +8131-117017-0014-1284: WHEN GORDON AND JENKINS CAME BACK (MURDOCH->MURDOCK) TOSSED THE MONEY TO THEM SPLIT IT +8131-117017-0015-1285: WHATEVER COMES TO HAND (GOV'NOR->GOVERNOR) +8131-117017-0016-1286: LIKE THIS SOCIAL CALL GORDON ASKED HIM +8131-117017-0017-1287: THE LITTLE MAN SHOOK HIS HEAD HIS ANCIENT EIGHTEEN YEAR OLD FACE TURNING SOBER (NOPE->NOTE) +8131-117017-0018-1288: YOU (OWE->ARE) ME SOME BILLS (GOV'NOR->GUV'NER) +8131-117017-0019-1289: ELEVEN HUNDRED FIFTY CREDITS +8131-117017-0020-1290: YOU DIDN'T PAY UP YOUR PLEDGE TO THE (CAMPAIGN->CAPTAIN) FUND SO I (HADDA->HAD A) FILL IN +8131-117017-0021-1291: A THOUSAND INTEREST AT TEN PER CENT A WEEK STANDARD RIGHT +8131-117017-0022-1292: GORDON HAD HEARD OF THE FRIENDLY INTEREST CHARGED ON THE SIDE HERE BUT HE SHOOK HIS HEAD WRONG (IZZY->IS HE) +8131-117017-0023-1293: (HUH IZZY->HOW AS HE) TURNED IT OVER AND SHOOK HIS HEAD +8131-117017-0024-1294: NOW SHOW ME WHERE I SIGNED ANY AGREEMENT SAYING I'D PAY YOU BACK +8131-117017-0025-1295: FOR A SECOND (IZZY'S->IZZIE'S) FACE WENT BLANK THEN HE CHUCKLED +8131-117017-0026-1296: HE (PULLED->POURED) OUT THE BILLS AND HANDED THEM OVER +8131-117017-0027-1297: THANKS (IZZY->IS HE) THANKS YOURSELF +8131-117017-0028-1298: THE KID POCKETED THE MONEY CHEERFULLY NODDING +8131-117017-0029-1299: THE LITTLE GUY KNEW MARS AS FEW OTHERS DID APPARENTLY FROM ALL SIDES +8131-117017-0030-1300: AND IF ANY OF THE OTHER (COPS->CUPS) HAD PRIVATE RACKETS OF THEIR OWN (IZZY->IZZIE) WAS UNDOUBTEDLY THE MAN TO FIND IT OUT AND (USE->USED) THE INFORMATION WITH A BEAT SUCH AS THAT EVEN GOING HALVES AND WITH ALL THE GRAFT (TO->AT) THE UPPER BRACKETS HE'D STILL BE ABLE TO MAKE HIS PILE IN A MATTER OF MONTHS +8131-117017-0031-1301: THE CAPTAIN LOOKED COMPLETELY BEATEN AS HE CAME INTO THE ROOM AND DROPPED (ONTO->INTO) THE BENCH +8131-117017-0032-1302: GO ON (ACCEPT DAMN IT->EXCEPT DEAR MIN) +8131-117029-0000-1247: THERE WAS A MAN COMING FROM EARTH ON A SECOND SHIP WHO WOULD SEE HIM +8131-117029-0001-1248: THE LITTLE PUBLISHER WAS BACK AT THE CRUSADER AGAIN +8131-117029-0002-1249: ONLY GORDON AND SHEILA WERE LEFT +8131-117029-0003-1250: CREDIT HAD BEEN ESTABLISHED AGAIN AND THE BUSINESSES WERE OPEN +8131-117029-0004-1251: GORDON CAME TO A ROW OF TEMPORARY BUBBLES INDIVIDUAL DWELLINGS BUILT LIKE THE DOME BUT OPAQUE FOR PRIVACY +8131-117029-0005-1252: THEY HAD BEEN LUCKY +8131-117029-0006-1253: (SCHULBERG'S->SHUBERG'S) VOLUNTEERS WERE OFFICIAL NOW +8131-117029-0007-1254: (FATS->FAT'S) PLACE WAS STILL OPEN THOUGH THE CROOKED TABLES HAD BEEN REMOVED GORDON DROPPED TO A STOOL SLIPPING OFF HIS HELMET +8131-117029-0008-1255: HE REACHED AUTOMATICALLY FOR THE GLASS OF ETHER (NEEDLED->NEEDLE) BEER +8131-117029-0009-1256: THOUGHT (YOU'D->YE'D) BE IN THE CHIPS +8131-117029-0010-1257: THAT'S MARS GORDON ECHOED (THE OTHER'S COMMENT->OTHERS COMMENTS) WHY DON'T YOU PULL OFF THE PLANET FATS YOU COULD GO BACK TO EARTH I'D GUESS THE OTHER NODDED +8131-117029-0011-1258: (GUESS->GES) A MAN GETS USED TO ANYTHING HELL MAYBE I CAN HIRE SOME BUMS TO SIT AROUND AND WHOOP IT UP WHEN THE SHIPS COME IN AND (BILL->BUILD) THIS (AS->IS) A REAL OLD MARTIAN DEN OF SIN +8131-117029-0012-1259: THERE WAS A GRIN ON THE OTHER'S FACE +8131-117029-0013-1260: FINALLY GOT OUR ORDERS FOR YOU IT'S MERCURY +8131-117029-0014-1261: WE SENT TWENTY OTHERS THE SAME WAY AND THEY FAILED +8131-117029-0015-1262: (LET'S SAY YOU'VE->LET SAVE) SHIFTED SOME OF THE MISERY AROUND A BIT AND GIVEN THEM A CHANCE TO DO BETTER +8131-117029-0016-1263: YOU CAN'T STAY HERE +8131-117029-0017-1264: THERE'S A ROCKET WAITING TO (TRANSSHIP->TRANSHIP) YOU TO THE MOON ON THE WAY TO MERCURY RIGHT NOW GORDON SIGHED +8131-117029-0018-1265: AND (I'VE->I) PAID HER THE PAY WE OWE YOU FROM THE TIME YOU (BEGAN->BEGIN) USING YOUR BADGE SHE'S OUT SHOPPING +8131-117029-0019-1266: BUT HIS OLD EYES WERE GLINTING +8131-117029-0020-1267: DID YOU THINK WE'D LET YOU GO WITHOUT SEEING YOU OFF COBBER HE ASKED +8131-117029-0021-1268: I I (OH DRAT->DREAD) IT I'M GETTING OLD (IZZY->AS HE) YOU TELL HIM +8131-117029-0022-1269: HE GRABBED GORDON'S HAND AND WADDLED DOWN THE LANDING PLANK (IZZY->IZZIE) SHOOK HIS HEAD +8188-269288-0000-2881: (ANNIE->ANY) COLCHESTER HAD BEGUN TO MAKE FRIENDS WITH (LESLIE->LISLEY) +8188-269288-0001-2882: LESLIE DETERMINED TO (TRY FOR->TRIFLE) HONORS IN ENGLISH LANGUAGE AND LITERATURE +8188-269288-0002-2883: HER TASTES ALL LAY IN THIS DIRECTION HER IDEA BEING BY AND BY TO FOLLOW HER MOTHER'S PROFESSION OF JOURNALISM FOR WHICH SHE ALREADY SHOWED CONSIDERABLE APTITUDE +8188-269288-0003-2884: SHE HAD NO IDEA OF ALLOWING HERSELF TO BREAK DOWN +8188-269288-0004-2885: WHAT DO YOU MEAN REPLIED LESLIE +8188-269288-0005-2886: WHY YOU WILL BE PARTING FROM ME YOU KNOW +8188-269288-0006-2887: I (WON'T->WOULD) BE THE CONSTANT WORRY (AND->IN) PLAGUE OF YOUR LIFE +8188-269288-0007-2888: IT IS THIS IF BY ANY CHANCE YOU DON'T LEAVE SAINT (WODE'S->WORDS) ANNIE I HOPE YOU WILL ALLOW ME TO BE YOUR (ROOMFELLOW->ROOM FELLOW) AGAIN NEXT TERM +8188-269288-0008-2889: SAID ANNIE A FLASH OF LIGHT COMING INTO HER EYES AND THEN LEAVING THEM +8188-269288-0009-2890: BUT SHE ADDED ABRUPTLY YOU SPEAK OF SOMETHING WHICH MUST NOT TAKE PLACE +8188-269288-0010-2891: I MUST PASS (IN HONORS->ON HONOURS) IF I DON'T I SHALL DIE +8188-269288-0011-2892: A FEW MOMENTS LATER THERE CAME A TAP AT THE DOOR +8188-269288-0012-2893: LESLIE OPENED THE DOOR +8188-269288-0013-2894: JANE HERIOT STOOD WITHOUT +8188-269288-0014-2895: THESE (LETTERS->LITTLE) HAVE JUST COME FOR YOU AND (ANNIE->ANY) COLCHESTER SHE SAID AND AS I WAS COMING (UPSTAIRS->UP STAIRS) I THOUGHT I WOULD LEAVE THEM WITH YOU +8188-269288-0015-2896: (LESLIE->LISLEY) THANKED HER AND EAGERLY GRASPED THE LITTLE PARCEL +8188-269288-0016-2897: HER EYES SHONE WITH PLEASURE AT THE ANTICIPATION OF THE DELIGHTFUL TIME SHE WOULD HAVE (REVELING->REVELLING) IN THE HOME NEWS THE OTHER LETTER WAS DIRECTED TO (ANNIE->ANY) COLCHESTER +8188-269288-0017-2898: HERE IS A LETTER FOR YOU ANNIE CRIED (LESLIE->LIZZIE) +8188-269288-0018-2899: HER FACE GREW SUDDENLY WHITE AS DEATH WHAT IS IT DEAR +8188-269288-0019-2900: I HAVE BEEN (STARVING->STARLING) OR RATHER I HAVE BEEN THIRSTING +8188-269288-0020-2901: WELL READ IT IN PEACE SAID (LESLIE->LINLESILY) I WON'T DISTURB YOU +8188-269288-0021-2902: I AM TRULY GLAD IT HAS COME +8188-269288-0022-2903: (LESLIE->LISALLY) SEATED HERSELF WITH HER BACK TO HER COMPANION AND OPENED HER (OWN->ON) LETTERS +8188-269288-0023-2904: DON'T NOTICE ME REPLIED ANNIE +8188-269288-0024-2905: I MUST GO INTO THE GROUNDS THE AIR IS STIFLING +8188-269288-0025-2906: BUT THEY ARE JUST SHUTTING UP +8188-269288-0026-2907: I SHALL GO I KNOW A WAY +8188-269288-0027-2908: JUST AFTER MIDNIGHT SHE ROSE WITH A SIGH TO PREPARE FOR BED +8188-269288-0028-2909: SHE LOOKED ROUND THE ROOM +8188-269288-0029-2910: NOW I REMEMBER SHE GOT A LETTER WHICH UPSET HER VERY MUCH AND WENT OUT +8188-269288-0030-2911: (LESLIE->LISLEY) WENT TO THE WINDOW AND FLUNG IT OPEN SHE PUT HER HEAD OUT AND TRIED TO PEER INTO THE DARKNESS BUT THE MOON HAD ALREADY SET AND SHE COULD NOT SEE MORE THAN A COUPLE OF YARDS IN FRONT OF HER +8188-269288-0031-2912: SHE IS A VERY QUEER ERRATIC CREATURE AND THAT LETTER THERE (WAS->IS) BAD NEWS IN THAT LETTER +8188-269288-0032-2913: WHAT (CAN SHE->CAN'T YOU) BE DOING OUT BY HERSELF +8188-269288-0033-2914: (LESLIE LEFT->THIS LILY LIT) THE ROOM BUT SHE HAD SCARCELY GONE A DOZEN (PACES->PLACES) DOWN THE CORRIDOR BEFORE SHE MET (ANNIE->ANY) RETURNING +8188-269288-0034-2915: (ANNIE'S->AND HIS) EYES WERE VERY BRIGHT HER CHEEKS WERE NO LONGER PALE AND THERE WAS A BRILLIANT COLOR IN THEM +8188-269288-0035-2916: SHE DID NOT TAKE THE LEAST NOTICE OF (LESLIE->LIZZLING) BUT GOING INTO THE ROOM SHUT THE DOOR +8188-269288-0036-2917: DON'T BEGIN SAID ANNIE +8188-269288-0037-2918: DON'T BEGIN WHAT DO YOU MEAN +8188-269288-0038-2919: I MEAN THAT I DON'T WANT YOU TO BEGIN TO ASK QUESTIONS +8188-269288-0039-2920: I WALKED UP AND DOWN AS FAST AS EVER I COULD OUTSIDE IN ORDER TO MAKE MYSELF SLEEPY +8188-269288-0040-2921: DON'T TALK TO ME LESLIE DON'T SAY A SINGLE WORD +8188-269288-0041-2922: I SHALL GO OFF TO SLEEP THAT IS ALL I CARE FOR +8188-269288-0042-2923: DON'T SAID ANNIE +8188-269288-0043-2924: NOW DRINK THIS AT ONCE SHE SAID IN A VOICE OF AUTHORITY IF YOU REALLY WISH TO SLEEP +8188-269288-0044-2925: (ANNIE STARED->ANY STEERED) VACANTLY AT THE (COCOA THEN SHE->COOKER DIDN'T) UTTERED A LAUGH +8188-269288-0045-2926: DRINK THAT SHE SAID +8188-269288-0046-2927: DO YOU WANT TO KILL ME DON'T TALK ANY MORE +8188-269288-0047-2928: I (AM SLEEPY->AMY) I SHALL SLEEP +8188-269288-0048-2929: SHE GOT INTO BED AS SHE SPOKE AND WRAPPED THE CLOTHES TIGHTLY ROUND HER +8188-269288-0049-2930: (CAN'T->COULD) YOU MANAGE WITH A CANDLE JUST FOR ONCE +8188-269288-0050-2931: CERTAINLY SAID (LESLIE->IT EASILY) +8188-269288-0051-2932: SHE TURNED OFF THE LIGHT AND LIT A CANDLE WHICH (SHE->HE) PUT BEHIND HER SCREEN THEN PREPARED TO GET INTO BED +8188-269288-0052-2933: (ANNIE'S->ANY) MANNER WAS VERY MYSTERIOUS +8188-269288-0053-2934: (ANNIE->AND HE) DID NOT MEAN TO (CONFIDE->CONFINE) IN (ANYONE->ANY ONE) THAT NIGHT AND THE KINDEST THING WAS TO LEAVE HER ALONE +8188-269288-0054-2935: (TIRED->TIE IT) OUT (LESLIE->LIZZLEY) HERSELF DROPPED ASLEEP +8188-269288-0055-2936: ANNIE IS THAT YOU SHE CALLED OUT +8188-269288-0056-2937: THERE WAS NO REPLY BUT THE SOUND OF HURRYING STEPS CAME QUICKER AND QUICKER NOW AND THEN (THEY WERE->THEIR) INTERRUPTED BY A GROAN +8188-269288-0057-2938: OH THIS WILL KILL ME MY HEART WILL BREAK THIS WILL KILL ME +8188-269290-0000-2823: THE (GUILD->GOLD) OF SAINT ELIZABETH +8188-269290-0001-2824: IMMEDIATELY AFTER DINNER THAT EVENING LESLIE RAN UP TO HER ROOM TO MAKE PREPARATIONS FOR HER VISIT TO EAST HALL +8188-269290-0002-2825: I'M NOT COMING SAID ANNIE +8188-269290-0003-2826: EVERY STUDENT IS TO BE (IN->AN) EAST HALL AT HALF PAST EIGHT +8188-269290-0004-2827: IT DOESN'T MATTER REPLIED ANNIE WHETHER IT IS AN ORDER OR NOT I'M NOT COMING SAY NOTHING ABOUT ME PLEASE +8188-269290-0005-2828: IT BURNED AS IF WITH FEVER +8188-269290-0006-2829: YOU DON'T KNOW WHAT A TRIAL IT IS FOR ME TO HAVE YOU HERE +8188-269290-0007-2830: I WANT TO BE ALONE GO +8188-269290-0008-2831: I KNOW YOU DON'T QUITE MEAN WHAT YOU SAY SAID LESLIE BUT OF COURSE IF YOU REALLY WISH ME +8188-269290-0009-2832: YOU (FRET->FRITTEN) ME BEYOND ENDURANCE +8188-269290-0010-2833: WRAPPING A PRETTY BLUE SHAWL (ROUND HER HEAD AND->AROUND A HIDDEN) SHOULDERS SHE TURNED TO ANNIE +8188-269290-0011-2834: LESLIE WAS JUST CLOSING THE DOOR BEHIND HER WHEN (ANNIE->ANY) CALLED AFTER HER +8188-269290-0012-2835: I TOOK IT OUT SAID (LESLIE->LIZZIE) TOOK IT OUT +8188-269290-0013-2836: HAVE THE GOODNESS TO FIND IT AND PUT IT BACK +8188-269290-0014-2837: BUT DON'T LOCK ME OUT PLEASE ANNIE +8188-269290-0015-2838: OH I WON'T (LOCK->LOOK) YOU OUT SHE SAID BUT I MUST HAVE THE KEY +8188-269290-0016-2839: JANE (HERIOT'S->HERETT'S) VOICE WAS HEARD IN THE PASSAGE +8188-269290-0017-2840: AS SHE WALKED (DOWN->ROUND) THE CORRIDOR SHE HEARD IT BEING TURNED (IN->TO) THE LOCK +8188-269290-0018-2841: WHAT CAN THIS MEAN SHE SAID TO HERSELF +8188-269290-0019-2842: OH I (WON'T->WOULD) PRESS YOU REPLIED JANE +8188-269290-0020-2843: OH I SHALL NEVER DO THAT REPLIED LESLIE +8188-269290-0021-2844: YOU SEE ALL THE GIRLS EXCEPT (EILEEN->AILEEN) AND MARJORIE LAUGH AT HER AND THAT SEEMS TO ME TO MAKE HER WORSE +8188-269290-0022-2845: SOME DAY JANE YOU MUST SEE HER +8188-269290-0023-2846: IF YOU (ARE->*) IN LONDON DURING THE SUMMER YOU MUST COME (AND PAY US->A PAIRS) A VISIT WILL YOU +8188-269290-0024-2847: THAT IS IF YOU CARE TO CONFIDE IN ME +8188-269290-0025-2848: I BELIEVE POOR ANNIE IS DREADFULLY UNHAPPY +8188-269290-0026-2849: THAT'S JUST (IT JANE->A CHANT) THAT IS WHAT (FRIGHTENS->BRIGHTENS) ME SHE REFUSES TO COME +8188-269290-0027-2850: REFUSES TO COME SHE CRIED +8188-269290-0028-2851: (SHE WILL->SHE'LL) GET (INTO->IN) AN AWFUL SCRAPE +8188-269290-0029-2852: I AM SURE SHE IS ILL SHE WORKS TOO HARD AND SHE BUT THERE I DON'T KNOW THAT I OUGHT TO SAY ANY MORE +8188-269290-0030-2853: I'LL WAIT FOR YOU HERE SAID (LESLIE->LIZZIE) +8188-269290-0031-2854: DO COME (ANNIE->ANY) DO +8188-269290-0032-2855: SCARCELY LIKELY REPLIED LESLIE SHE TOLD ME SHE WAS DETERMINED NOT TO COME TO THE MEETING +8188-269290-0033-2856: BUT (MARJORIE->MARGERY) AND (EILEEN->AILEEN) HAD ALREADY DEPARTED AND LESLIE AND JANE FOUND THEMSELVES AMONG THE LAST STUDENTS TO ARRIVE AT THE GREAT EAST HALL +8188-269290-0034-2857: MISS (LAUDERDALE->LAUDIDAL) WAS STANDING WITH THE OTHER TUTORS AND (PRINCIPALS->PRINCIPLES) OF THE DIFFERENT HALLS ON A RAISED PLATFORM +8188-269290-0035-2858: THEN A (ROLL CALL->ROCCALL) WAS GONE THROUGH BY ONE OF THE TUTORS THE ONLY (ABSENTEE->ABSENTE) WAS (ANNIE->ANY) COLCHESTER +8188-269290-0036-2859: THE PHYSICAL PART OF (YOUR->THE OLD) TRAINING AND ALSO THE MENTAL PART ARE ABUNDANTLY SUPPLIED IN THIS GREAT HOUSE OF LEARNING SHE CONTINUED BUT THE SPIRITUAL PART IT SEEMS TO ME OUGHT NOW TO BE STRENGTHENED +8188-269290-0037-2860: (HEAR HEAR->HAIR HAIR) AND ONCE AGAIN (HEAR->HAIR) +8188-269290-0038-2861: SHE UTTERED (HER STRANGE->A STRAIN) REMARK STANDING UP +8188-269290-0039-2862: MARJORIE AND (EILEEN->AILEEN) WERE CLOSE TO HER +8188-269290-0040-2863: I WILL TALK WITH YOU (BELLE ACHESON->BELL ARCHISON) PRESENTLY SHE SAID +8188-269290-0041-2864: THE NAMES OF (*->THE) PROPOSED MEMBERS ARE TO BE SUBMITTED TO ME BEFORE THIS DAY WEEK +8188-269290-0042-2865: AM I MY BROTHER'S KEEPER +8188-269290-0043-2866: YOU ASK SHE CONTINUED +8188-269290-0044-2867: GOD (ANSWERS TO->ADDEST) EACH OF YOU YOU ARE +8188-269290-0045-2868: THE WORLD (SAYS->TEETH) NO I AM NOT BUT GOD SAYS YES YOU ARE +8188-269290-0046-2869: ALL MEN ARE YOUR BROTHERS +8188-269290-0047-2870: FOR ALL WHO SIN ALL WHO SUFFER YOU ARE TO (A CERTAIN->EXERT AN) EXTENT RESPONSIBLE +8188-269290-0048-2871: AFTER THE ADDRESS THE GIRLS THEMSELVES WERE ENCOURAGED TO SPEAK AND A VERY ANIMATED DISCUSSION FOLLOWED +8188-269290-0049-2872: IT WAS PAST TEN O'CLOCK WHEN SHE LEFT THE HALL +8188-269290-0050-2873: JUST AS SHE WAS DOING SO MISS FRERE CAME UP +8188-269290-0051-2874: (ANNIE COLCHESTER IS YOUR ROOMFELLOW->ANY COLCHESTER'S ROOM FELLOW) IS SHE NOT SHE SAID +8188-269290-0052-2875: I SEE BY YOUR FACE MISS GILROY THAT YOU ARE DISTRESSED ABOUT SOMETHING ARE YOU KEEPING ANYTHING BACK +8188-269290-0053-2876: (I AM->I'M) AFRAID I AM REPLIED (LESLIE DISTRESS->LIZZIE DISTRESSED) NOW IN HER TONE +8188-269290-0054-2877: I MUST SEE HER MYSELF EARLY IN THE MORNING AND I AM QUITE SURE THAT NOTHING WILL SATISFY MISS (LAUDERDALE->LAURA) EXCEPT A VERY AMPLE APOLOGY AND A FULL EXPLANATION OF THE REASON WHY SHE ABSENTED HERSELF +8188-269290-0055-2878: EXCUSES MAKE NO DIFFERENCE +8188-269290-0056-2879: THE GIRL WHO BREAKS THE RULES (HAS->HAVE) TO BE PUNISHED +8188-269290-0057-2880: I WILL TELL HER +8188-274364-0000-2811: THE COMMONS ALSO VOTED THAT THE NEW CREATED PEERS OUGHT TO HAVE NO VOICE IN THIS TRIAL BECAUSE THE ACCUSATION BEING AGREED TO WHILE THEY WERE COMMONERS (THEIR->THEY) CONSENT TO IT WAS IMPLIED WITH THAT OF ALL THE COMMONS OF ENGLAND +8188-274364-0001-2812: (IN->*) THE GOVERNMENT OF IRELAND HIS ADMINISTRATION HAD BEEN EQUALLY (PROMOTIVE->PROMOTED) OF HIS MASTER'S INTEREST AND THAT OF THE SUBJECTS COMMITTED TO HIS CARE +8188-274364-0002-2813: THE CASE OF LORD (MOUNTNORRIS->MONTORAS) OF ALL THOSE WHICH WERE COLLECTED WITH SO MUCH INDUSTRY IS THE MOST FLAGRANT AND THE LEAST EXCUSABLE +8188-274364-0003-2814: THE COURT WHICH CONSISTED OF THE (CHIEF OFFICERS->CHIE OFFICIALS) OF THE ARMY FOUND THE CRIME TO BE (CAPITAL->CAPT ON) AND CONDEMNED THAT NOBLEMAN TO LOSE HIS HEAD +8188-274364-0004-2815: WHERE THE TOKEN BY WHICH I (SHOULD->SHALL) DISCOVER IT +8188-274364-0005-2816: IT IS NOW (*->A) FULL TWO HUNDRED AND FORTY YEARS SINCE TREASONS WERE DEFINED AND SO LONG HAS IT BEEN SINCE ANY MAN WAS TOUCHED TO THIS EXTENT UPON THIS CRIME BEFORE MYSELF +8188-274364-0006-2817: LET US NOT TO (OUR OWN DESTRUCTION->UNDISTRUC) AWAKE THOSE SLEEPING LIONS BY RATTLING UP A COMPANY OF OLD (RECORDS->RICARDS) WHICH HAVE LAIN FOR SO MANY AGES BY THE (WALL->WAR) FORGOTTEN AND NEGLECTED +8188-274364-0007-2818: (HOWEVER->HERBERT) THESE (GENTLEMEN->GENTLEMAN) AT THE BAR (SAY->SO) THEY SPEAK FOR THE (COMMONWEALTH->CONWEALTH) AND THEY BELIEVE SO YET UNDER (FAVOR->FAVOUR) IT IS I WHO IN THIS PARTICULAR SPEAK FOR THE (COMMONWEALTH->CORNWEALTH) +8188-274364-0008-2819: MY LORDS I HAVE NOW TROUBLED YOUR LORDSHIPS A GREAT DEAL LONGER THAN I SHOULD HAVE DONE +8188-274364-0009-2820: YOUNG (VANE->VAIN) FALLING UPON THIS PAPER OF NOTES DEEMED THE MATTER OF THE UTMOST IMPORTANCE AND IMMEDIATELY COMMUNICATED IT TO (PYM->POEM) WHO NOW PRODUCED THE PAPER BEFORE THE HOUSE OF COMMONS +8188-274364-0010-2821: THE KING PROPOSES THIS DIFFICULTY BUT HOW CAN I UNDERTAKE OFFENSIVE (WAR->FOR) IF I HAVE NO MORE MONEY +8188-274364-0011-2822: YOUR MAJESTY HAVING TRIED THE AFFECTIONS OF YOUR PEOPLE YOU ARE (ABSOLVED->ABSORBED) AND LOOSE FROM ALL RULES OF GOVERNMENT AND MAY DO WHAT POWER WILL ADMIT +8280-266249-0000-339: OLD MISTER DINSMORE HAD ACCEPTED A PRESSING INVITATION FROM HIS GRANDDAUGHTER AND HER HUSBAND TO JOIN THE PARTY AND WITH THE ADDITION OF SERVANTS IT WAS A LARGE ONE +8280-266249-0001-340: AS THEY WERE IN NO HASTE AND THE CONFINEMENT OF A RAILROAD CAR WOULD BE VERY IRKSOME TO THE YOUNGER CHILDREN IT HAD BEEN DECIDED TO MAKE THE JOURNEY BY WATER +8280-266249-0002-341: THERE WERE NO SAD LEAVE TAKINGS TO MAR THEIR PLEASURE THE CHILDREN WERE IN WILD SPIRITS AND ALL SEEMED CHEERFUL AND HAPPY AS THEY SAT OR STOOD UPON THE DECK WATCHING THE RECEDING SHORE AS THE VESSEL STEAMED OUT OF THE HARBOR +8280-266249-0003-342: AT LENGTH THE LAND HAD QUITE DISAPPEARED NOTHING COULD BE SEEN BUT THE SKY OVERHEAD AND A VAST EXPANSE OF WATER ALL (AROUND->ROUND) AND THE PASSENGERS FOUND LEISURE TO TURN THEIR ATTENTION UPON EACH OTHER +8280-266249-0004-343: THERE ARE SOME NICE LOOKING PEOPLE ON BOARD REMARKED MISTER TRAVILLA IN AN UNDERTONE TO HIS WIFE +8280-266249-0005-344: (BESIDE->BESIDES) OURSELVES ADDED COUSIN (RONALD->RANALD) LAUGHING +8280-266249-0006-345: YES SHE ANSWERED THAT LITTLE GROUP YONDER A YOUNG MINISTER AND HIS WIFE AND CHILD I SUPPOSE +8280-266249-0007-346: AND WHAT A DEAR LITTLE FELLOW HE IS JUST ABOUT THE AGE OF OUR (HAROLD->HERALD) I SHOULD JUDGE +8280-266249-0008-347: DO YOU SON WAS THE SMILING REJOINDER +8280-266249-0009-348: HE CERTAINLY LOOKS LIKE A VERY NICE LITTLE BOY +8280-266249-0010-349: SUPPOSE YOU AND HE SHAKE HANDS FRANK +8280-266249-0011-350: I DO INDEED (THOUGH->THE) PROBABLY COMPARATIVELY FEW ARE AWARE THAT TOBACCO IS THE CAUSE OF THEIR AILMENTS +8280-266249-0012-351: DOUBTLESS THAT IS THE CASE REMARKED MISTER DINSMORE +8280-266249-0013-352: WITH ALL MY HEART IF YOU WILL STEP INTO THE (GENTLEMEN'S->GENTLEMAN'S) CABIN WHERE THERE'S A LIGHT +8280-266249-0014-353: HE LED THE WAY THE OTHERS ALL FOLLOWING AND TAKING OUT A SLIP OF PAPER READ FROM IT IN A DISTINCT TONE LOUD ENOUGH TO BE HEARD BY THOSE (*->ALL) ABOUT HIM WITHOUT DISTURBING THE OTHER PASSENGERS +8280-266249-0015-354: ONE DROP OF NICOTINE (EXTRACT OF->EXTRACTED) TOBACCO PLACED ON THE TONGUE OF (A->THE) DOG WILL KILL HIM IN A MINUTE THE HUNDREDTH PART OF (A->THE) GRAIN (PICKED->PRICKED) UNDER THE SKIN OF A MAN'S ARM WILL PRODUCE NAUSEA AND FAINTING +8280-266249-0016-355: THE HALF DOZEN CIGARS WHICH MOST SMOKERS (USE->YEARS) A DAY CONTAIN SIX OR SEVEN GRAINS ENOUGH IF CONCENTRATED AND ABSORBED TO KILL THREE MEN AND A POUND OF TOBACCO ACCORDING TO ITS QUALITY CONTAINS FROM ONE QUARTER TO ONE AND A QUARTER OUNCES +8280-266249-0017-356: IS IT STRANGE THEN THAT SMOKERS AND CHEWERS HAVE A THOUSAND AILMENTS +8280-266249-0018-357: THAT THE FRENCH (POLYTECHNIC->POLY TECHNIC AT) INSTITUTE HAD TO PROHIBIT ITS (*->THE) USE ON ACCOUNT OF ITS EFFECTS (ON->UPON) THE MIND +8280-266249-0019-358: (NOTICE->NOTICED) THE MULTITUDE OF SUDDEN DEATHS AND SEE HOW MANY ARE SMOKERS AND CHEWERS +8280-266249-0020-359: (IN->AND) A SMALL COUNTRY TOWN SEVEN OF THESE MYSTERIOUS PROVIDENCES OCCURRED WITHIN THE CIRCUIT OF A MILE ALL DIRECTLY TRACEABLE TO TOBACCO AND ANY PHYSICIAN ON A FEW MOMENTS REFLECTION CAN MATCH THIS FACT BY HIS OWN OBSERVATION +8280-266249-0021-360: AND THEN SUCH POWERFUL ACIDS PRODUCE INTENSE IRRITATION AND THIRST THIRST WHICH WATER DOES NOT QUENCH +8280-266249-0022-361: HENCE A RESORT TO CIDER AND BEER +8280-266249-0023-362: NO SIR WHAT (KNOW->NO) YE NOT THAT YOUR BODY IS THE TEMPLE OF THE HOLY GHOST WHICH IS IN YOU WHICH YE HAVE OF GOD AND YE ARE NOT YOUR OWN +8280-266249-0024-363: FOR (YE->YOU) ARE BOUGHT WITH A PRICE THEREFORE GLORIFY GOD IN YOUR BODY AND IN YOUR SPIRIT WHICH ARE GOD'S +8280-266249-0025-364: WE CERTAINLY HAVE NO RIGHT TO INJURE OUR BODIES EITHER BY NEGLECT OR SELF INDULGENCE +8280-266249-0026-365: AND AGAIN I BESEECH YOU THEREFORE BRETHREN BY THE MERCIES OF GOD THAT YE PRESENT YOUR BODIES A LIVING SACRIFICE (HOLY->WHOLLY) ACCEPTABLE UNTO GOD WHICH IS YOUR REASONABLE SERVICE +8280-266249-0027-366: (IT MUST REQUIRE->EMMA STACCOY) A GOOD DEAL OF RESOLUTION FOR ONE WHO HAS BECOME FOND OF THE INDULGENCE TO GIVE IT UP REMARKED MISTER (DALY->DALEY) +8280-266249-0028-367: NO DOUBT NO DOUBT RETURNED MISTER (LILBURN->LOWBOURNE) BUT IF (THY RIGHT EYE->I WRITE I) OFFEND THEE PLUCK IT (OUT->UP) AND CAST IT FROM (THEE->ME) FOR IT IS PROFITABLE FOR THEE THAT ONE OF THY MEMBERS SHOULD PERISH AND NOT THAT THY WHOLE BODY SHOULD BE CAST INTO HELL +8280-266249-0029-368: THERE WAS A PAUSE BROKEN BY YOUNG HORACE WHO HAD BEEN WATCHING A GROUP OF MEN GATHERED ABOUT A TABLE AT THE FURTHER END OF THE ROOM +8280-266249-0030-369: THEY ARE GAMBLING YONDER AND I'M AFRAID THAT YOUNG FELLOW IS BEING BADLY FLEECED BY (THAT->THE) MIDDLE AGED MAN OPPOSITE +8280-266249-0031-370: THE EYES OF THE WHOLE PARTY WERE AT ONCE TURNED IN THAT DIRECTION +8280-266249-0032-371: NO SIR HE IS NOT HERE +8280-266249-0033-372: (AND->AS) THE DOOR WAS SLAMMED VIOLENTLY (TO->TOO) +8280-266249-0034-373: NOW THE VOICE CAME FROM THE SKYLIGHT OVERHEAD APPARENTLY AND WITH A FIERCE IMPRECATION THE IRATE GAMESTER RUSHED UPON DECK AND RAN HITHER AND THITHER IN SEARCH OF HIS TORMENTOR +8280-266249-0035-374: HIS VICTIM WHO HAD BEEN LOOKING ON DURING THE LITTLE SCENE AND LISTENING TO THE MYSTERIOUS VOICE (IN->AND) SILENT WIDE EYED WONDER AND FEAR NOW (ROSE->AROSE) HASTILY HIS FACE (DEATHLY->DEFTLY) PALE WITH TREMBLING HANDS GATHERED UP THE MONEY HE HAD STAKED AND HURRYING (INTO->TO) HIS (STATE ROOM->STATEROOM) LOCKED HIMSELF IN +8280-266249-0036-375: WHAT DOES IT MEAN CRIED ONE +8280-266249-0037-376: A (VENTRILOQUIST ABOARD->VENTILLA QUESTED BORN) OF COURSE RETURNED ANOTHER LET'S FOLLOW AND SEE THE FUN +8280-266249-0038-377: I WONDER WHICH OF US IT IS REMARKED THE FIRST LOOKING HARD AT OUR PARTY I DON'T KNOW BUT COME ON +8280-266249-0039-378: THAT FELLOW NICK WARD IS A NOTED (BLACKLEG AND->BLACK LAG IN) RUFFIAN HAD HIS NOSE BROKEN IN A FIGHT AND IS SENSITIVE ON THE SUBJECT WAS CHEATING OF COURSE +8280-266249-0040-379: WHO ASKED THE MATE I'VE SEEN (NONE UP->NO NOT) HERE THOUGH THERE ARE SOME IN THE STEERAGE +8280-266249-0041-380: THEY HEARD HIM IN SILENCE WITH A COOL PHLEGMATIC INDIFFERENCE MOST EXASPERATING TO ONE IN HIS PRESENT MOOD +8280-266249-0042-381: A MAN OF GIANT SIZE AND HERCULEAN STRENGTH HAD LAID ASIDE HIS PIPE AND SLOWLY RISING TO HIS FEET SEIZED THE SCOUNDREL IN HIS POWERFUL GRASP +8280-266249-0043-382: LET ME GO YELLED WARD MAKING A DESPERATE EFFORT TO FREE HIS ARMS +8280-266249-0044-383: I (DINKS->DENZ) NO I (DINKS->THINK) I (DEACH->DID) YOU VON (LESSON->MESS') RETURNED HIS CAPTOR NOT RELAXING HIS GRASP IN THE LEAST +8280-266249-0045-384: THE GERMAN RELEASED HIS PRISONER AND THE LATTER (SLUNK->SUNK) AWAY WITH MUTTERED THREATS AND IMPRECATIONS UPON THE HEAD OF HIS TORMENTOR +8280-266249-0046-385: MISTER LILBURN AND MISTER (DALY->DALEY) EACH (AT->HAD) A DIFFERENT TIME SOUGHT OUT THE YOUNG MAN (WARD'S->WORDS) INTENDED VICTIM AND TRIED TO INFLUENCE HIM FOR GOOD +8280-266249-0047-386: YET THERE WAS GAMBLING AGAIN THE SECOND NIGHT BETWEEN WARD AND SEVERAL OTHERS OF HIS (PROFESSION->PROFESSIONS) +8280-266249-0048-387: THEY KEPT IT UP TILL AFTER MIDNIGHT +8280-266249-0049-388: THEN MISTER (LILBURN->LOWBORN) WAKING FROM HIS FIRST SLEEP IN A STATEROOM NEAR BY THOUGHT HE WOULD BREAK IT UP ONCE MORE +8280-266249-0050-389: AN INTENSE VOICELESS EXCITEMENT POSSESSED THE PLAYERS FOR THE GAME WAS A CLOSE ONE AND (THE STAKES->MISTAKES) WERE VERY HEAVY +8280-266249-0051-390: THEY BENT EAGERLY OVER THE BOARD EACH WATCHING WITH FEVERISH ANXIETY HIS COMPANION'S MOVEMENTS EACH CASTING NOW AND AGAIN A GLOATING EYE UPON THE HEAP OF GOLD AND (GREENBACKS->GREEN BACKS) THAT LAY BETWEEN THEM AND AT TIMES HALF STRETCHING OUT HIS HAND TO CLUTCH IT +8280-266249-0052-391: A DEEP (GROAN->GROUND) STARTLED THEM AND THEY SPRANG TO THEIR FEET PALE AND TREMBLING WITH SUDDEN TERROR EACH HOLDING HIS BREATH AND STRAINING HIS EAR TO CATCH A REPETITION OF THE DREAD SOUND +8280-266249-0053-392: BUT (ALL WAS->ALWAYS) SILENT AND AFTER A MOMENT OF ANXIOUS WAITING THEY SAT DOWN TO THEIR GAME AGAIN TRYING TO CONCEAL AND SHAKE OFF THEIR FEARS (WITH A->FOR THE) FORCED UNNATURAL LAUGH +8280-266249-0054-393: IT CAME FROM UNDER THE TABLE GASPED (WARD->TOWARD) LOOK WHAT'S THERE (LOOK->LOOKED) YOURSELF +8280-266249-0055-394: WHAT CAN IT HAVE BEEN THEY ASKED EACH OTHER +8280-266249-0056-395: OH NONSENSE WHAT FOOLS WE ARE +8280-266249-0057-396: IT WAS THE LAST GAME OF CARDS FOR THAT TRIP +8280-266249-0058-397: THE CAPTAIN COMING IN SHORTLY AFTER THE SUDDEN FLIGHT OF THE GAMBLERS TOOK CHARGE OF THE MONEY AND THE NEXT DAY RESTORED IT TO THE OWNERS +8280-266249-0059-398: TO ELSIE'S OBSERVANT EYES IT PRESENTLY BECAME EVIDENT THAT THE (DALYS WERE IN->DAILIES RAN) VERY (STRAITENED->STRAIGHTENED) CIRCUMSTANCES +8280-266249-0060-399: OH HOW KIND HOW VERY KIND MISSUS (DALY->DALEY) SAID WITH TEARS OF JOY AND GRATITUDE WE HAVE HARDLY KNOWN HOW WE SHOULD MEET THE MOST NECESSARY EXPENSES OF THIS TRIP BUT HAVE BEEN TRYING TO CAST OUR CARE UPON THE LORD ASKING HIM TO PROVIDE +8280-266249-0061-400: AND HOW WONDERFULLY HE HAS ANSWERED OUR PETITIONS +8280-266249-0062-401: ELSIE ANSWERED PRESSING HER HAND AFFECTIONATELY (ART->ARE) WE NOT SISTERS IN CHRIST +8280-266249-0063-402: YE ARE ALL THE CHILDREN OF GOD BY FAITH IN CHRIST JESUS +8280-266249-0064-403: YE ARE ALL ONE (IN->AND) CHRIST JESUS +8280-266249-0065-404: WE (FEEL->SEE ON) MY HUSBAND AND I THAT WE ARE ONLY THE STEWARDS OF HIS BOUNTY AND (THAT->*) BECAUSE HE HAS SAID INASMUCH AS YE HAVE DONE IT UNTO ONE OF THE LEAST OF THESE MY BRETHREN (YE->YOU) HAVE DONE IT UNTO ME IT IS THE GREATEST PRIVILEGE AND DELIGHT TO DO ANYTHING FOR HIS PEOPLE +8461-258277-0000-1649: WHEN IT WAS THE SEVEN HUNDRED AND EIGHTEENTH NIGHT +8461-258277-0001-1650: BUT HE ANSWERED NEEDS (MUST I->MY STY) HAVE (ZAYNAB ALSO->THINE ABBS SO) NOW SUDDENLY THERE CAME A RAP AT THE DOOR AND THE MAID SAID WHO IS AT THE DOOR +8461-258277-0002-1651: THE KNOCKER REPLIED (KAMAR->COME ON) DAUGHTER (OF AZARIAH->VAZARRE) THE JEW SAY ME IS (ALI OF CAIRO->ALIO KARA) WITH YOU +8461-258277-0003-1652: REPLIED THE BROKER'S DAUGHTER O THOU DAUGHTER OF A DOG +8461-258277-0004-1653: AND HAVING THUS (ISLAMISED->ISLAMMISED) SHE ASKED HIM (DO->TWO) MEN IN THE FAITH OF (AL ISLAM GIVE->ALICELA GAVE) MARRIAGE PORTIONS TO WOMEN OR (DO->TWO) WOMEN DOWER MEN +8461-258277-0005-1654: AND SHE THREW DOWN THE JEW'S HEAD BEFORE HIM +8461-258277-0006-1655: NOW THE (CAUSE->COURSE) OF HER SLAYING HER SIRE WAS AS FOLLOWS +8461-258277-0007-1656: THEN HE (SET->SAT) OUT REJOICING TO RETURN TO THE BARRACK OF THE FORTY +8461-258277-0008-1657: SO HE ATE AND FELL DOWN SENSELESS FOR THE SWEETMEATS WERE DRUGGED WITH (BHANG->BANG) WHEREUPON THE KAZI BUNDLED HIM INTO THE SACK AND MADE OFF WITH (HIM->THEM) CHARGER AND CHEST AND ALL TO THE BARRACK OF THE (FORTY->FORTE) +8461-258277-0009-1658: PRESENTLY (HASAN SHUMAN->HER SON SCHUMANN) CAME OUT OF A (CLOSET->CLOTH) AND SAID TO HIM HAST THOU GOTTEN THE GEAR O ALI +8461-258277-0010-1659: SO HE TOLD HIM WHAT HAD BEFALLEN HIM AND ADDED IF I KNOW (WHITHER->WHETHER) THE RASCAL IS GONE AND WHERE TO FIND THE KNAVE I WOULD PAY HIM OUT +8461-258277-0011-1660: KNOWEST THOU WHITHER HE WENT +8461-258277-0012-1661: ANSWERED HASAN I KNOW WHERE HE IS AND OPENING THE DOOR OF THE CLOSET SHOWED HIM THE SWEETMEAT (SELLER->CELLAR) WITHIN DRUGGED AND SENSELESS +8461-258277-0013-1662: SO I WENT ROUND ABOUT THE HIGHWAYS OF THE CITY TILL I MET A SWEETMEAT (SELLER->CELLAR) AND BUYING HIS CLOTHES AND (STOCK IN->STOCKING) TRADE AND GEAR FOR TEN (DINARS->HOURS) DID WHAT WAS DONE +8461-258277-0014-1663: QUOTH (AL->A) RASHID WHOSE HEAD IS THIS +8461-258277-0015-1664: SO (ALI->I) RELATED TO HIM ALL THAT (HAD->*) PASSED FROM FIRST (TO->*) LAST AND THE CALIPH SAID I HAD NOT THOUGHT THOU WOULDST KILL HIM FOR THAT HE WAS A SORCERER +8461-258277-0016-1665: HE REPLIED I HAVE FORTY LADS BUT THEY ARE IN CAIRO +8461-278226-0000-1633: AND LAURA HAD HER OWN PET PLANS +8461-278226-0001-1634: SHE MEANT TO BE SCRUPULOUSLY CONSCIENTIOUS IN THE ADMINISTRATION OF (HER TALENTS->ITALIANS) AND SOMETIMES AT CHURCH ON A SUNDAY WHEN THE (SERMON->SAME) WAS PARTICULARLY AWAKENING SHE MENTALLY DEBATED (THE->A) SERIOUS QUESTION AS TO WHETHER NEW BONNETS AND A PAIR OF (JOUVIN'S->JUBANCE) GLOVES DAILY WERE NOT (SINFUL->SENT FOR) BUT I THINK SHE DECIDED THAT THE NEW BONNETS AND GLOVES WERE ON THE WHOLE A (PARDONABLE->PURCHANGLE) WEAKNESS AS BEING GOOD FOR TRADE +8461-278226-0002-1635: ONE MORNING LAURA TOLD HER HUSBAND WITH A GAY LAUGH THAT SHE WAS GOING TO (VICTIMIZE->VICTIMISE) HIM BUT HE WAS TO PROMISE TO BE PATIENT AND BEAR WITH HER FOR ONCE IN A WAY +8461-278226-0003-1636: I WANT TO SEE ALL THE PICTURES THE MODERN PICTURES ESPECIALLY +8461-278226-0004-1637: I REMEMBER ALL THE (RUBENSES AT->REUBEN SAYS THAT) THE LOUVRE FOR I SAW THEM THREE YEARS AGO WHEN I WAS STAYING IN PARIS WITH GRANDPAPA +8461-278226-0005-1638: SHE RETURNED IN A LITTLE MORE THAN TEN MINUTES IN THE FRESHEST TOILETTE ALL PALE SHIMMERING BLUE LIKE THE SPRING SKY WITH (PEARL GREY->POOR GRAY) GLOVES AND BOOTS AND PARASOL AND A BONNET THAT SEEMED MADE OF AZURE BUTTERFLIES +8461-278226-0006-1639: (IT->HE) WAS DRAWING TOWARDS THE CLOSE OF THIS DELIGHTFUL HONEYMOON TOUR AND IT WAS A BRIGHT SUNSHINY MORNING EARLY IN FEBRUARY BUT FEBRUARY IN PARIS IS SOMETIMES BETTER THAN APRIL IN LONDON +8461-278226-0007-1640: BUT SHE FIXED UPON A PICTURE WHICH SHE SAID SHE PREFERRED TO ANYTHING SHE HAD SEEN IN THE GALLERY +8461-278226-0008-1641: PHILIP (JOCELYN->JOSCELYN) WAS EXAMINING SOME PICTURES ON THE OTHER SIDE OF THE ROOM WHEN HIS WIFE MADE (THIS->THE) DISCOVERY +8461-278226-0009-1642: HOW I WISH YOU COULD GET ME A COPY OF THAT PICTURE PHILIP LAURA SAID ENTREATINGLY +8461-278226-0010-1643: I SHOULD SO LIKE ONE TO HANG IN MY MORNING ROOM (AT JOCELYN'S ROCK->A JOCELYN STROKE) +8461-278226-0011-1644: SHE TURNED TO THE (FRENCH ARTIST->FRENCHARD THIS) PRESENTLY AND ASKED (HIM->THEM) WHERE THE ELDER MISTER (KERSTALL->CURSON) LIVED AND IF THERE WAS ANY POSSIBILITY OF SEEING HIM +8461-278226-0012-1645: THEY HAVE SAID THAT HE IS EVEN A LITTLE IMBECILE THAT HE DOES NOT REMEMBER HIMSELF OF THE MOST COMMON EVENTS OF HIS LIFE +8461-278226-0013-1646: BUT THERE ARE SOME OTHERS WHO SAY THAT HIS MEMORY HAS NOT ALTOGETHER FAILED AND THAT HE (IS->*) STILL ENOUGH HARSHLY CRITICAL TOWARDS THE WORKS OF OTHERS +8461-278226-0014-1647: I DON'T THINK YOU WILL HAVE ANY DIFFICULTY IN FINDING THE HOUSE +8461-278226-0015-1648: YOU (WILL BE DOING->WERE BETWEEN) ME SUCH A FAVOUR (PHILIP->FELLOW) IF YOU'LL SAY YES +8461-281231-0000-1594: HIS FOLLOWERS (RUSHED->RUSH) FORWARD TO WHERE HE LAY AND (THEIR->THE) UNITED FORCE COMPELLING THE BLACK (KNIGHT->NIGHT) TO PAUSE THEY DRAGGED (THEIR->THE) WOUNDED LEADER WITHIN THE WALLS +8461-281231-0001-1595: IT WAS ON THEIR JOURNEY TO THAT TOWN THAT THEY WERE OVERTAKEN ON THE ROAD BY (CEDRIC->SEDRRICK) AND HIS PARTY IN WHOSE COMPANY THEY WERE AFTERWARDS CARRIED CAPTIVE TO THE (CASTLE->COUNCIL) OF (TORQUILSTONE->TORCHLESTONE) +8461-281231-0002-1596: (AS HE->I SEE) LAY UPON HIS BED (RACKED->WRAPPED) WITH PAIN AND MENTAL AGONY AND FILLED WITH THE FEAR OF RAPIDLY APPROACHING DEATH HE HEARD A VOICE ADDRESS HIM +8461-281231-0003-1597: WHAT ART THOU HE EXCLAIMED IN TERROR +8461-281231-0004-1598: LEAVE ME AND SEEK THE SAXON (WITCH ULRICA->WHICH OIKA) WHO WAS MY TEMPTRESS LET HER AS WELL AS I (TASTE->CASE) THE TORTURES WHICH ANTICIPATE HELL +8461-281231-0005-1599: EXCLAIMED THE NORMAN (HO->OH) +8461-281231-0006-1600: (REMEMBEREST->REMEMBER AS) THOU THE MAGAZINE OF FUEL THAT IS (STORED->STOLE) BENEATH THESE APARTMENTS WOMAN +8461-281231-0007-1601: THEY ARE FAST RISING AT LEAST SAID (ULRICA->EUREKA) AND A SIGNAL SHALL SOON WAVE (TO WARN->TOWARD) THE BESIEGERS TO PRESS HARD UPON THOSE WHO WOULD EXTINGUISH THEM +8461-281231-0008-1602: MEANWHILE THE BLACK KNIGHT HAD LED HIS FORCES AGAIN TO THE ATTACK AND SO VIGOROUS WAS THEIR ASSAULT THAT BEFORE LONG THE GATE OF THE CASTLE ALONE SEPARATED THEM FROM THOSE WITHIN +8461-281231-0009-1603: THE DEFENDERS (FINDING->FIND IN) THE CASTLE TO BE ON FIRE NOW DETERMINED TO SELL THEIR LIVES AS (DEARLY->DAILY) AS THEY COULD AND HEADED BY (DE BRACY->THE BRACEE) THEY THREW OPEN THE GATE AND WERE AT ONCE INVOLVED IN A TERRIFIC CONFLICT WITH THOSE OUTSIDE +8461-281231-0010-1604: THE BLACK (KNIGHT->NIGHT) WITH (PORTENTOUS->POTENT OF) STRENGTH (FORCED->FORCE) HIS WAY INWARD IN DESPITE OF (DE BRACY->THE BRAZY) AND HIS FOLLOWERS +8461-281231-0011-1605: TWO OF THE FOREMOST (INSTANTLY FELL->THING) AND THE REST GAVE WAY NOTWITHSTANDING ALL (THEIR LEADERS EFFORTS->THE LEADER'S EFFORT) TO STOP THEM +8461-281231-0012-1606: THE BLACK (KNIGHT->NIGHT) WAS SOON ENGAGED IN DESPERATE COMBAT WITH THE NORMAN CHIEF AND THE VAULTED ROOF OF THE HALL (RUNG->RANG) WITH (THEIR->THE) FURIOUS BLOWS +8461-281231-0013-1607: AT LENGTH (DE BRACY->THE BRACEY) FELL +8461-281231-0014-1608: TELL ME THY NAME (OR->A) WORK THY PLEASURE ON ME +8461-281231-0015-1609: YET FIRST LET ME SAY SAID (DE BRACY->DEBRACY) WHAT IT IMPORTS THEE TO KNOW +8461-281231-0016-1610: EXCLAIMED THE BLACK KNIGHT PRISONER AND PERISH +8461-281231-0017-1611: THE LIFE OF EVERY MAN IN THE CASTLE SHALL ANSWER IT IF A HAIR OF HIS HEAD BE SINGED SHOW ME HIS CHAMBER +8461-281231-0018-1612: RAISING THE WOUNDED MAN WITH EASE THE BLACK KNIGHT RUSHED WITH (HIM->THEM) TO THE (POSTERN->PASSING) GATE AND HAVING THERE DELIVERED HIS BURDEN TO THE CARE OF TWO (YEOMEN->YEOMAN) HE AGAIN ENTERED THE CASTLE TO ASSIST IN THE RESCUE OF (THE OTHER->THEIR) PRISONERS +8461-281231-0019-1613: BUT IN OTHER PARTS THE BESIEGERS PURSUED THE DEFENDERS OF THE CASTLE FROM CHAMBER TO CHAMBER AND SATIATED IN (THEIR->THE) BLOOD THE VENGEANCE WHICH HAD LONG ANIMATED THEM AGAINST THE SOLDIERS OF THE TYRANT (FRONT->FROM) DE BOEUF +8461-281231-0020-1614: AS THE FIRE (COMMENCED->COMMANDS) TO SPREAD RAPIDLY THROUGH ALL PARTS OF THE CASTLE (ULRICA->OR RICHA) APPEARED ON ONE OF THE TURRETS +8461-281231-0021-1615: BEFORE LONG THE TOWERING FLAMES HAD SURMOUNTED EVERY OBSTRUCTION AND ROSE TO THE EVENING SKIES (ONE->WHEN) HUGE AND BURNING BEACON (SEEN->SEEMED) FAR AND WIDE THROUGH THE ADJACENT COUNTRY (TOWER->TOWERED) AFTER TOWER CRASHED DOWN WITH BLAZING ROOF AND RAFTER +8461-281231-0022-1616: AT LENGTH WITH A TERRIFIC CRASH THE WHOLE (TURRET->TOWER) GAVE WAY AND SHE PERISHED IN (THE->*) FLAMES WHICH HAD CONSUMED HER TYRANT +8461-281231-0023-1617: WHEN THE OUTLAWS (HAD->ARE) DIVIDED THE SPOILS WHICH THEY HAD TAKEN FROM THE CASTLE OF (TORQUILSTONE->TORCHLESTONE) CEDRIC PREPARED TO TAKE HIS DEPARTURE +8461-281231-0024-1618: HE LEFT THE GALLANT BAND OF FORESTERS SORROWING DEEPLY FOR HIS LOST FRIEND THE LORD OF (CONINGSBURGH->CONIGSBURG) AND HE AND HIS FOLLOWERS HAD SCARCE DEPARTED WHEN A PROCESSION MOVED SLOWLY FROM UNDER THE GREENWOOD BRANCHES IN THE DIRECTION WHICH HE HAD TAKEN IN THE CENTRE OF WHICH WAS THE CAR IN WHICH THE BODY OF (ATHELSTANE->ADDLESTEIN) WAS LAID +8461-281231-0025-1619: (DE BRACY->DEBRACY) BOWED LOW AND IN SILENCE THREW HIMSELF UPON A HORSE AND GALLOPED OFF THROUGH THE (WOOD->WOODS) +8461-281231-0026-1620: HERE IS A BUGLE WHICH AN ENGLISH YEOMAN HAS ONCE WORN I PRAY YOU TO KEEP IT AS A MEMORIAL OF YOUR GALLANT BEARING +8461-281231-0027-1621: SO SAYING HE MOUNTED HIS STRONG WAR HORSE AND RODE OFF THROUGH THE FOREST +8461-281231-0028-1622: DURING ALL THIS TIME (ISAAC->MISERC) OF YORK SAT MOURNFULLY APART GRIEVING FOR THE LOSS OF HIS (DEARLY->STILLIE) LOVED DAUGHTER REBECCA +8461-281231-0029-1623: AND WITH THIS EPISTLE (THE UNHAPPY->THEN HAPPY) OLD MAN SET OUT TO PROCURE HIS DAUGHTER'S LIBERATION +8461-281231-0030-1624: THE (TEMPLAR->TEMPLE) IS FLED SAID (DE BRACY->THE BRACEE) IN ANSWER TO THE PRINCE'S EAGER QUESTIONS (FRONT->FROM) DE BOEUF YOU WILL NEVER SEE MORE AND HE ADDED IN A LOW AND EMPHATIC TONE (RICHARD->WRETCHED) IS (IN->AN) ENGLAND I HAVE SEEN HIM AND SPOKEN WITH HIM +8461-281231-0031-1625: HE (APPEALED->APPEARED) TO (DE BRACY->THE BRACELET) TO ASSIST HIM IN THIS PROJECT AND BECAME AT ONCE DEEPLY SUSPICIOUS OF THE (KNIGHT'S->NIGHT'S) LOYALTY TOWARDS HIM WHEN HE DECLINED TO LIFT HAND AGAINST THE MAN WHO HAD SPARED HIS OWN LIFE +8461-281231-0032-1626: BEFORE REACHING HIS DESTINATION HE WAS (TOLD->STOLE) THAT LUCAS (DE BEAUMANOIR->THE BURMANOIS) THE GRAND MASTER OF THE ORDER OF THE TEMPLARS WAS THEN ON VISIT TO THE PRECEPTORY +8461-281231-0033-1627: HE HAD NOT UNTIL THEN BEEN INFORMED (OF->TO) THE PRESENCE OF THE JEWISH MAIDEN IN THE ABODE OF THE TEMPLARS AND GREAT WAS HIS FURY AND INDIGNATION ON LEARNING THAT SHE WAS AMONGST THEM +8461-281231-0034-1628: POOR ISAAC WAS HURRIED OFF ACCORDINGLY AND EXPELLED FROM THE PRECEPTORY ALL HIS ENTREATIES AND EVEN HIS OFFERS UNHEARD AND DISREGARDED +8461-281231-0035-1629: THE ASSURANCE THAT SHE POSSESSED SOME FRIEND IN THIS AWFUL ASSEMBLY GAVE HER COURAGE TO LOOK AROUND AND TO MARK INTO WHOSE PRESENCE SHE HAD BEEN CONDUCTED +8461-281231-0036-1630: SHE GAZED ACCORDINGLY UPON A SCENE WHICH MIGHT WELL HAVE STRUCK TERROR INTO A BOLDER HEART THAN HERS +8461-281231-0037-1631: AT HIS FEET WAS PLACED (A->THE) TABLE OCCUPIED BY TWO SCRIBES WHOSE DUTY (IT->*) WAS TO RECORD THE PROCEEDINGS OF THE DAY +8461-281231-0038-1632: THE PRECEPTORS OF WHOM THERE WERE FOUR PRESENT OCCUPIED SEATS BEHIND (THEIR->THE) SUPERIORS AND BEHIND THEM STOOD THE ESQUIRES OF THE ORDER ROBED IN WHITE + +SUBSTITUTIONS: count ref -> hyp +35 THE -> A +33 IN -> AND +33 A -> THE +26 AND -> IN +11 AN -> AND +10 THIS -> THE +10 THEIR -> THE +10 DICKIE -> DICKY +9 THAT -> THE +9 ANNIE -> ANY +8 I -> AND +7 TO -> THE +7 MURDOCH -> MURDOCK +7 MAN -> MEN +6 WERE -> WHERE +6 THE -> THEY +6 THE -> THEIR +6 THE -> THAT +6 SIGURD -> CIGAR +6 OH -> O +6 LARCH -> LARGE +6 HAS -> IS +6 DE -> THE +6 AT -> IT +6 ARCHY -> ARCHIE +5 WILFRID -> WILFRED +5 THESE -> THIS +5 THERE -> THEIR +5 THE -> TO +5 THE -> THIS +5 SINDBAD -> SINBAD +5 SHARRKAN -> SHARKAN +5 ORGANISER -> ORGANIZER +5 OL -> OLD +5 OF -> A +5 KINE -> KIND +5 IS -> HIS +5 INTERESTS -> INTEREST +5 IM -> HIM +5 HER -> A +5 ANYONE -> ANY +5 AND -> AN +4 YOU -> HE +4 WITH -> WAS +4 WILL -> WOULD +4 WAS -> IS +4 UPON -> UP +4 THEY -> THERE +4 ON -> AND +4 N'T -> NOT +4 LIL -> LITTLE +4 LESLIE -> LIZZIE +4 KNOW -> NO +4 IT -> HE +4 I -> I'M +4 I -> A +4 HERMON -> HARMON +4 HATH -> HAD +4 HAS -> HAD +4 AS -> IS +4 AND -> A +3 ZAU -> THOU +3 YOU'VE -> YOU +3 YOU'RE -> YOU +3 YOU -> IT +3 WOULD -> WILL +3 WHEN -> WITH +3 WHEN -> AND +3 WHAT -> BUT +3 WE'RE -> WE +3 UP -> UPSTAIRS +3 TRY -> TRIED +3 TOO -> TO +3 TO -> A +3 THIS -> THESE +3 THIS -> HIS +3 THEY -> THEIR +3 THEY -> THE +3 THESE -> THE +3 THEIR -> THEY +3 THAT -> AT +3 SET -> SAID +3 SANCT -> SAINT +3 RAYSTOKE -> RAYSTROKE +3 PRIORESS -> PRIORS +3 OR -> OF +3 ON -> IN +3 OFF -> OF +3 O -> OF +3 MY -> BY +3 MISTAH -> MISTER +3 MISSUS -> MISS +3 MINE -> MY +3 MEN -> MAN +3 LESLIE -> LISLEY +3 KNIGHT -> NIGHT +3 JES -> JUST +3 IZZY -> IZZIE +3 IT'S -> IS +3 IT -> A +3 IS -> WAS +3 IN -> ON +3 IN -> AN +3 I'M -> I +3 I -> I'VE +3 HIS -> THIS +3 HIS -> IS +3 HIM -> THEM +3 HERMON -> HERMANN +3 HERMON -> HERMAN +3 HEAR -> HERE +3 HEAR -> HAIR +3 HANDS -> HAND +3 HAD -> AND +3 GOING -> GOIN +3 FAUVENT -> VENT +3 EILEEN -> AILEEN +3 DO -> TO +3 DALY -> DALEY +3 BUT -> THAT +3 BRAHMAN -> BRAMIN +3 BEFEL -> BEFELL +3 AT -> TO +3 AT -> AND +3 AT -> A +3 AROUND -> ROUND +3 ARE -> A +3 A -> TO +3 A -> OF +3 A -> IT +3 A -> HER +2 ZAU -> ZA +2 ZARATHUSTRA -> THEIR +2 YOUR -> THE +2 YOU'RE -> YO +2 YOU -> YOUR +2 YO'LL -> YOU'LL +2 YO -> YOU +2 YER -> YOU +2 YE -> YOU +2 WON'T -> WOULD +2 WITH -> THE +2 WITH -> FOR +2 WINTER -> WINDOW +2 WILD -> WHITE +2 WHILE -> WHETHER +2 WHEN -> ONE +2 WERE -> WAS +2 WAYNE -> WAIN +2 UNDERTAKER'S -> UNDERTAKERS +2 TORQUILSTONE -> TORCHLESTONE +2 TONIGHT -> TO +2 TO -> TOO +2 TO -> INTO +2 TIDINGS -> HIDINGS +2 THUS -> THIS +2 THOUSANDS -> THOUSAND +2 THEY'RE -> THERE +2 THEY -> HE +2 THEN -> THAT +2 THEM -> HIM +2 THE -> THEN +2 THE -> NO +2 THE -> IT +2 THE -> DEBRAMIN +2 THE -> AND +2 THAT'S -> THAT +2 TELLTALE -> TELL +2 TAMAR -> TO +2 SYRUP -> SERF +2 STONEWALL -> STONE +2 STOKER -> STOCKER +2 STAYED -> STATE +2 SON -> SUN +2 SOMEONE -> SOME +2 SO -> TO +2 SHELL -> SHELLFISH +2 SHE -> YOU +2 SHE -> SHE'LL +2 SHAWS -> SHORES +2 SHALL -> SHOULD +2 SERGEY -> SO +2 SELLER -> CELLAR +2 SEEN -> SEE +2 SEEK -> SEE +2 SEE -> SEEM +2 ROUND -> AROUND +2 REGIN -> RIGAN +2 RAISE -> RAISED +2 PROCLUS -> PROCLASS +2 PRIEST -> PRIESTS +2 POLL -> POLE +2 PIGEONCOTE -> PIGEON +2 OUR -> A +2 ONE -> WHEN +2 ONCE -> WAS +2 ON -> DOWN +2 OL -> ALL +2 OF -> HAVE +2 O -> WHO +2 O -> OH +2 O -> A +2 NOUGHT -> NOT +2 NORTHFIELD -> NORTH +2 NOR -> NO +2 NO -> KNOW +2 NEIGHBOR -> NEIGHBOUR +2 MUST -> WAS +2 MISSUS -> MISTER +2 MILICENT -> MILLSON +2 MESTER -> MISTER +2 MENAHEM -> MANY +2 MARSHAL -> MARTIAN +2 LUNA -> LENA +2 LOVE -> LOVED +2 LIKED -> LIKE +2 LEVER -> LOVER +2 LEFT -> LAP +2 LA -> NEWBURG +2 KEEP -> HE +2 JURY -> CHEERY +2 JUDGE -> JOE +2 JAKEY -> JAKIE +2 JACKAL -> JACK +2 IZZY -> IS +2 IZZY -> AS +2 ITS -> HIS +2 IT -> I +2 IT -> AT +2 IS -> EAST +2 IS -> AS +2 IN'T -> IN +2 IF -> OF +2 I'VE -> I +2 I'LL -> I +2 I'D -> I +2 I -> THEY +2 I -> AS +2 HOZE -> HOSE +2 HOW -> HER +2 HORSTIUS -> HORSES +2 HOO'S -> WHO'S +2 HONOR -> HONOUR +2 HO -> OH +2 HIS -> THE +2 HIS -> A +2 HERMON'S -> HERMANN'S +2 HERE'S -> HERE +2 HERE -> HEAR +2 HER -> THE +2 HER -> HIS +2 HER -> FOR +2 HE -> YOU +2 HE -> IT +2 HE -> HIS +2 HE -> HE'D +2 HAYS -> HAYES +2 HAVE -> HATH +2 HAS -> HATH +2 HAID -> HEAD +2 HAD -> AT +2 GURR -> GREW +2 GURR -> GIRK +2 GURR -> GARR +2 GUNNAR -> GUNNER +2 GRANDPAP -> GRANDPA +2 GOD -> GONE +2 GIVING -> GIVEN +2 GIORGIO -> GEORGIO +2 GENTLEMEN -> GENTLEMAN +2 FRONT -> FROM +2 FROM -> FOR +2 FRANZ -> FRANCE +2 FOALS -> FOLDS +2 FOAL -> FULL +2 FESTAL -> FEAST +2 FAVOR -> FAVOUR +2 FAUCHELEVENT -> FOR +2 EYE -> I +2 ETERNAL -> HAD +2 END -> AND +2 E'S -> HE +2 DONE -> TURNED +2 DOG -> DOOR +2 DO -> TWO +2 DEFENSE -> DEFENCE +2 DEAD -> BEDROOM +2 DE -> DEBRACY +2 DAY -> THEY +2 CREAM -> QUEEN +2 CRAWFISH -> CROFISH +2 COUNTRY -> COUNTRIES +2 CORNER -> CORN +2 COPS -> CUPS +2 CONFECTIONARY -> CONFECTIONERY +2 CINDERLAD -> CINDER +2 CHRIS -> CRIS +2 CENTER -> CENTRE +2 CALIFORNIAN -> CALIFORNIA +2 BRYNHILD -> BURNEHELD +2 BROTHERS -> BROTHER'S +2 BRAHMAN -> BRAM +2 BRACY -> BRACEE +2 BEING -> BEEN +2 BEG -> THEY +2 BEALE -> BEER +2 BEALE -> BEE +2 AWK -> AWKWARD +2 AWHILE -> A +2 AT -> THAT +2 ARSINOE -> ARSENO +2 ARE -> OUR +2 ARCHBISHOP -> ARCHBISH +2 AND -> WHEN +2 AND -> END +2 AN -> IN +2 ALLOW -> ALONE +2 ALL -> ON +2 AL -> A +2 AIN'T -> AND +2 ADD -> AT +2 A -> US +2 A -> I +2 A -> HAVE +2 A -> AND +1 ZEMSTVOS -> SEND +1 ZAYNAB -> THINE +1 ZAU -> ZAWACON +1 ZAU -> ZAO +1 ZAU -> SAO +1 ZARATHUSTRA -> TOUSTRA +1 ZARATHUSTRA -> THEY +1 ZARATHUSTRA -> TAKE +1 ZARATHUSTRA -> ERGUSTARA +1 YUSS -> YES +1 YOUR -> YOURSELVES +1 YOUR -> YOU +1 YOUR -> HE +1 YOUR -> FELLOW +1 YOU'LL -> YOU +1 YOU'D -> YOU +1 YOU'D -> YE'D +1 YOU -> YOURSELVES +1 YOU -> YE +1 YOU -> TALONS +1 YOU -> SHE +1 YOU -> OFTEN +1 YOU -> EUGEN +1 YO' -> YOU +1 YO -> YOU'VE +1 YET -> HE +1 YET -> AT +1 YES'M -> YES +1 YEOMEN -> YEOMAN +1 YEARS -> EARS +1 YEAR -> YOU +1 YAUSKY -> OWESKEE +1 YARD -> AND +1 YAHWEH -> YONWAY +1 WUNNERED -> ONE +1 WROTE -> WOLT +1 WRIT -> WRITE +1 WOUNDS -> ONES +1 WOUNDED -> WOUNDS +1 WOULD -> HAD +1 WOULD -> DID +1 WOTTETH -> WHATETH +1 WOT -> WHAT +1 WORTH -> WORSE +1 WORSHIPPERS -> WORSHIPPED +1 WORSHIP'S -> WORSHIP +1 WORRY -> WERE +1 WORLD -> WOOLWIFE +1 WORKS -> WORK +1 WORKMAN -> WORKMEN +1 WORKINGMEN -> WORKING +1 WORKED -> WORTH +1 WORK -> WORKADAY +1 WORD -> WORTH +1 WOODEN -> THOES +1 WOOD -> WOODS +1 WONDERED -> CHANCELLORED +1 WONDER -> WANDER +1 WON'T -> WELL +1 WOKE -> WALKING +1 WOE -> WON'T +1 WODE'S -> WORDS +1 WITH -> WOULD +1 WITH -> WHICH +1 WITH -> TO +1 WITH -> MY +1 WITH -> ITS +1 WITH -> IT +1 WITCH -> WHICH +1 WISHT -> WISHED +1 WIRES -> WIVES +1 WINE -> WIRE +1 WINE -> WHITE +1 WILLY -> BILLY +1 WILL -> WILT +1 WILL -> WIDOW +1 WILL -> WERE +1 WILL -> WELL +1 WILL -> WE'LL +1 WILL -> WE +1 WILL -> WAS +1 WILL -> TO +1 WILL -> REVOTE +1 WILL -> OLI' +1 WILKSES -> WILKES +1 WILKS -> WILKES +1 WILFRID -> WILFRIED +1 WILFRID -> MILFRED +1 WILFRID -> LOYAL +1 WILDLY -> WIDELY +1 WILDEST -> WALLA'S +1 WILDERNESS -> WEDDING +1 WILDERNESS -> WEARINESS +1 WILDERNESS -> LEARNAN +1 WIFE -> WI +1 WIFE -> MY +1 WIELD -> WHEEL +1 WIDEAWAKE -> WIDE +1 WICKER'S -> ROOKER'S +1 WICKER'S -> OCRE'S +1 WICKER'S -> HOOKER'S +1 WICKER -> WICKER'S +1 WI -> WITH +1 WHY -> WAS +1 WHO'D -> WHO +1 WHO -> WITH +1 WHO -> WHOSE +1 WHO -> ONE +1 WHO -> HER +1 WHITHER -> WITHER +1 WHITHER -> WHETHER +1 WHITE -> WORLD +1 WHITE -> WIDE +1 WHISKERED -> WHISKIRT +1 WHILOME -> WILM +1 WHICH -> WITCH +1 WHICH -> SPEECH +1 WHICH -> REACHED +1 WHICH -> PITCHED +1 WHEREABOUTS -> WHEREABOUT +1 WHERE'S -> WHERE +1 WHERE'S -> WAS +1 WHERE -> WITH +1 WHERE -> THERE +1 WHERE -> HER +1 WHER -> WERE +1 WHEN -> READ +1 WHEN -> IN +1 WHEELER -> WHALER +1 WHEEL -> BE +1 WHATE'ER -> WHATEVER +1 WHAT'S -> ONCE +1 WHAT -> WOULD +1 WHAT -> WITH +1 WHAT -> IT +1 WHAT -> FOR +1 WHAT -> AT +1 WHAT -> A +1 WHACKS -> WAX +1 WESTPORT -> WESTWARD +1 WERE -> WITH +1 WERE -> WENT +1 WERE -> RAN +1 WERE -> BURNED +1 WENT -> WON +1 WENT -> WHEN +1 WELLS -> WELL +1 WELLS -> WALES +1 WELL -> WILL +1 WELL -> WHY +1 WELL -> WHILE +1 WELL -> EQUANT +1 WEIGHED -> WADE +1 WEEVILLY -> WEEVILY +1 WEEDS -> READS +1 WEBB'S -> WEBBS +1 WE'VE -> REVOLTA +1 WE'RE -> WERE +1 WE'LL -> WILGHAN +1 WE -> WILL +1 WE -> WEED +1 WE -> WASTED +1 WAZIR -> WOZZIER +1 WAYNE -> IN +1 WAVERLY -> WAVERLEY +1 WATONWAN -> WATERWAM +1 WATER -> WALL +1 WATCHMAKER'S -> WATCHMAKERS +1 WATCHED -> WATCH +1 WATCHED -> ART +1 WAS -> WITH +1 WAS -> WHICH +1 WAS -> WE'S +1 WAS -> MUST +1 WAS -> ALWAYS +1 WARM -> WRONG +1 WARDERS -> ORDERS +1 WARD'S -> WORDS +1 WARD -> TOWARD +1 WAR -> FOR +1 WANTED -> WATER +1 WANT -> WARNED +1 WANDERERS -> JUANS +1 WANDERER -> WONDER +1 WALL -> WAR +1 WALK -> BUCK +1 WAKE -> AWAKE +1 WAITING -> WINNING +1 WAITIN -> WAITING +1 WAIT -> WRIT +1 WAISTCOAT -> WEST +1 WAGGOT -> RAGGED +1 WAGGING -> WORKING +1 WAGED -> RAGED +1 WADED -> WAITED +1 WADDED -> WATERED +1 VOYAGE -> VOY +1 VOUGHT -> WALT +1 VOTES -> VAULTS +1 VOTES -> BOATS +1 VOMITING -> WAUM +1 VOLVITUR -> VOLVETER +1 VOICED -> OUTWARDS +1 VOCAL -> FULCAL +1 VIOLENCE -> WILDEST +1 VIOLENCE -> VIOLENT +1 VIOLENCE -> IDOLENCE +1 VILLAGES -> RELIGIOUS +1 VILLAGERS -> VILLAGES +1 VIL -> VILLE +1 VIGILANCE -> VISUALS +1 VIGILANCE -> EACH +1 VICTIMIZE -> VICTIMISE +1 VESTRY -> VETERY +1 VERY -> WHERE +1 VERSES -> VERSEST +1 VERILY -> VRAIRIB +1 VENTRILOQUIST -> VENTILLA +1 VEHEMENTLY -> TO +1 VECCHIO -> OF +1 VAVASOUR -> VAVERASSEUR +1 VAULT -> WALL +1 VAUGIRARD -> ROGI +1 VAST -> VATS +1 VASSILIEVITCH -> MISSILEVITCH +1 VARIOUS -> VERY +1 VANE -> VAIN +1 VALET -> VALLEY +1 VAIN -> VEIN +1 UTTER -> OTHER +1 USUALLY -> USUAL +1 USEFUL -> FORGIFTS +1 USE -> YEARS +1 USE -> USED +1 US -> STARED +1 US -> AT +1 URGED -> ADDED +1 URARTU -> URITU +1 URARTU -> URA +1 URARTIAN -> GRACIAN +1 UPSTAIRS -> UP +1 UPON -> A +1 UP -> STREET +1 UP -> NOT +1 UNTO -> TO +1 UNTO -> INTO +1 UNTO -> AND +1 UNS -> UNSTEAD +1 UNREWARDED -> IN +1 UNOCCUPIED -> ON +1 UNLESS -> AND +1 UNIQUE -> EUIK +1 UNHESITATINGLY -> AM +1 UNHAPPY -> HAPPY +1 UNEXPECTEDLY -> UNEXPECTED +1 UNDER -> AND +1 UNCLE -> HO +1 UNADULTERATED -> AN +1 UN -> ONE +1 ULTIMATELY -> ULTIMATE +1 ULRICA -> OR +1 ULRICA -> OIKA +1 ULRICA -> EUREKA +1 UKINZER -> A +1 UDDER -> UTTER +1 TYRANNY -> STERN +1 TWYMAN'S -> TWIMMAN +1 TWO -> TOO +1 TWO -> TO +1 TWO -> JEW +1 TWO -> DO +1 TWELVEMONTH -> TWELVE +1 TURRET -> TOWER +1 TURNS -> TURNED +1 TURNING -> SHOWING +1 TURBULENT -> TREBRANT +1 TUBERCULOUS -> TUBERCUOUS +1 TRYING -> CRYING +1 TRY -> TROT +1 TRY -> TRIFLE +1 TRY -> TRIES +1 TRUSTEE -> TRUSTY +1 TRUNDLED -> TUMBLED +1 TRULY -> JULIE +1 TRUCE -> TRUTH +1 TRIVET -> TRIBUT +1 TRIPES -> TRITE +1 TRIFLE -> TRAVEL +1 TRIBES -> TRIUMPHS +1 TRIBES -> TRINES +1 TRIBE -> TIME +1 TRELAWNEY -> TREEONER +1 TREASURE -> TREASURES +1 TRAVELED -> TRAVELLED +1 TRANSSHIP -> TRANSHIP +1 TRAINING -> TRAINED +1 TRAINDAWG -> TRAIN +1 TRAFFIC -> EFFIC +1 TRADITIONS -> JUDICINES +1 TRADEMARK -> TRADE +1 TRACK -> CHECK +1 TRACED -> PRAISED +1 TOWER -> TOWERED +1 TOWARDS -> TOWARD +1 TOWARD -> TOWARDS +1 TOWARD -> TO +1 TOUR -> TO +1 TOUGHS -> TUFTS +1 TOUGH -> TO +1 TORMENT -> AND +1 TORE -> TOILED +1 TORCH -> TORTURE +1 TOP -> STOPPED +1 TOOK -> INTO +1 TOO -> TWO +1 TOO -> SHIMMERTS +1 TONGUE -> TONG +1 TOMORROW -> TO +1 TOMB -> TWO +1 TOM -> TUM +1 TOLERBLE -> TOLERABLE +1 TOLD -> TOE +1 TOLD -> STOWED +1 TOLD -> STOLE +1 TOILET -> TALLED +1 TO -> TWO +1 TO -> TURNED +1 TO -> TRITES +1 TO -> TOWARD +1 TO -> THEATER +1 TO -> THAT +1 TO -> SHE +1 TO -> REFORMED +1 TO -> O +1 TO -> MADE +1 TO -> IN +1 TO -> HURT +1 TO -> FOR +1 TO -> DIRECT +1 TO -> DID +1 TO -> CHOOSE +1 TO -> BOLTO +1 TO -> AT +1 TITLE -> TOWN +1 TIRING -> WOMAN +1 TIRESOME -> PARASAN +1 TIRED -> TIE +1 TIRED -> HIRED +1 TIMES -> TIME +1 TIME -> TELL +1 TILLERS -> TELLERS +1 TILL -> TO +1 TIGLATH -> TIGGLERS +1 TIGLATH -> TAKE +1 TIGLATH -> T +1 TIGLATH -> DICK +1 TIGHTENING -> TIGHTENED +1 TIGER -> TYER +1 TIGER -> DRAGGER +1 TIE -> TYPE +1 TIDE -> TIED +1 TICKLING -> TITLING +1 THY -> THINE +1 THY -> THEIR +1 THY -> I +1 THY -> DIVORITES +1 THY -> DAGGULE +1 THUS -> THERE'S +1 THUS -> DOES +1 THUMB -> TEMP +1 THROWING -> THREW +1 THROUGH -> TO +1 THRIFTILY -> DRIFTILY +1 THREE -> THIRD +1 THOUSAND -> SPASM +1 THOUGH -> THE +1 THOUGH -> THAT +1 THOUGH -> THAN +1 THOU -> THOUGH +1 THOU -> NOW +1 THOSE -> THUS +1 THIS -> US +1 THIS -> THAT +1 THIS -> MISSUS +1 THIS -> ITS +1 THIS -> IS +1 THIS -> DISTAGGER +1 THIRST -> THOSE +1 THIRST -> DOZ +1 THINK -> THINKING +1 THINGS -> THANKS +1 THINGS -> SPENCE +1 THIN -> FLIND +1 THEY'RE -> THEIR +1 THEY'RE -> HER +1 THEY'RE -> ARE +1 THEY -> YOU +1 THEY -> I +1 THEY -> DECLINE +1 THESE -> HIS +1 THESE -> DISTRACT +1 THERE'S -> THERE +1 THERE -> WREATHS +1 THERE -> THEY +1 THERE -> THERE'S +1 THERE -> THEN +1 THERE -> THAT +1 THERE -> TERRANT +1 THEN -> WHEN +1 THEN -> VEN +1 THEN -> THAN +1 THEN -> DIDN'T +1 THEN -> AND +1 THEM -> AND +1 THEIR -> THEM +1 THEIR -> HER +1 THEIR -> DEAR +1 THEE -> ME +1 THEATRE -> FUTURE +1 THE -> YOUR +1 THE -> YEARS +1 THE -> WIDDLED +1 THE -> VERY +1 THE -> UPON +1 THE -> TIRE +1 THE -> THOUGH +1 THE -> THESE +1 THE -> THEM +1 THE -> OUR +1 THE -> OTHERS +1 THE -> OTHER +1 THE -> MISTAKES +1 THE -> LOVE +1 THE -> LEDA +1 THE -> KIND +1 THE -> JACK +1 THE -> INSTANDED +1 THE -> INSECTORS +1 THE -> HIS +1 THE -> HER +1 THE -> HALF +1 THE -> FREDERI +1 THE -> FOR +1 THE -> EUSTA +1 THE -> DID +1 THE -> DESTROVISION +1 THE -> DE +1 THE -> CHIN'S +1 THE -> BUILD +1 THE -> BE +1 THE -> ASHORE +1 THE -> ABIDING +1 THAT'S -> THE +1 THAT'S -> I +1 THAT'LL -> THOU +1 THAT -> WITH +1 THAT -> TILL +1 THAT -> THY +1 THAT -> THEY +1 THAT -> THEIR +1 THAT -> THAN +1 THAT -> STEP +1 THAT -> SET +1 THAT -> NOT +1 THAT -> MY +1 THAT -> LITTLE +1 THAT -> LET +1 THAT -> INTER +1 THAT -> DAT +1 THAN -> THEN +1 THAN -> GONDON +1 THAN -> AND +1 TERRA -> TERRACE +1 TEND -> INTEREST +1 TEN -> TOWN +1 TEMPTETH -> TEMPTED +1 TEMPLES -> TEMPLE +1 TEMPLAR -> TENT +1 TEMPLAR -> TEMPLE +1 TELEGRAM -> TELEGRAMAS +1 TEAM -> TEEM +1 TEAL -> TEALE +1 TEA -> TIENO +1 TASTE -> CASE +1 TASKMASTER -> DE +1 TARDY -> TIDY +1 TAPPED -> TOP +1 TAPIS -> TAPPY +1 TANQUAM -> TEN +1 TALMASH -> THOMMISH +1 TALKED -> TALKS +1 TALENTS -> OR +1 TAKEN -> TAKING +1 TAKE -> THEY +1 TAHITI -> DAEDI +1 T'OTHER -> THE +1 SYRUP -> CYRUP +1 SYRIA -> ASSYRIA +1 SYNONYMON -> SNYM +1 SYLLOGISM -> DILIGION +1 SYDNEY -> SIDNEY +1 SWORD -> SORT +1 SWELP -> SWAB +1 SWARTHY -> SWALLTY +1 SWAG -> WAG +1 SURELY -> TRULY +1 SUPPOSE -> S'POSE +1 SUMTHIN -> SOMETHING +1 SULPHURIC -> SUFFERG +1 SUFFOLK -> SUFFOLD +1 SUFFICIENT -> SUSPICION +1 SUFFICIENT -> SUSPICIENT +1 SUFFICES -> SURFACES +1 SUE -> EM +1 SUDDEN -> CERTAIN +1 SUCKED -> SACKED +1 SUCCOURS -> SECURS +1 SUB -> SUBTRINE +1 STRUGGLE -> STRUGGLING +1 STRUCK -> UP +1 STRUCK -> MY +1 STRIKE -> STRIKEBREAKERS +1 STRIFE -> STRIPE +1 STRIFE -> STRIKE +1 STRETCH -> SEARCHED +1 STREET -> DISTRICT +1 STREET -> AT +1 STREAK -> STREET +1 STRAYING -> STRAIN +1 STRANGEST -> STRANGERS +1 STRANGE -> STRAIN +1 STRAITS -> STRAIT +1 STRAITENED -> STRAIGHTENED +1 STRAINS -> TRAINS +1 STRAIGHT -> STRAYED +1 STORES -> STORIES +1 STORED -> STOLE +1 STORE -> WORKED +1 STOOD -> TOO +1 STONEWALL -> STERN +1 STONED -> STONE +1 STOCK -> STOCKING +1 STILLNESS -> WHICH +1 STILL -> STEALING +1 STILL -> SO +1 STICK -> STICKET +1 STEWPAN -> STEWPANT +1 STEW -> DO +1 STERN -> STERNMOST +1 STEPPED -> STEPS +1 STEP -> SPATANI +1 STEEVER -> STEVER +1 STEERING -> STIRRING +1 STAYING -> SEEING +1 STAYED -> STAY +1 STATUS -> STRATORS +1 STATURE -> STATUE +1 STATED -> IS +1 STATE -> STATES +1 STATE -> STATEROOM +1 STAS -> STARS +1 STARVING -> STARLING +1 STARK -> START +1 STARED -> STEERED +1 STANDSTILL -> FAN +1 STANDS -> SENDS +1 STANDARD -> STANDARDS +1 SQUEAMISH -> SCREAMY +1 SPONSUS -> QUALMS +1 SPONGE -> SPINES +1 SPONDYLES -> SPY +1 SPIRIT -> EXPERIOR +1 SPILLING -> SPINNING +1 SPICE -> SPIES +1 SPARSELY -> FIRSTLY +1 SPAKE -> PAKE +1 SPAKE -> BEING +1 SPAKE -> BEG +1 SPADDLE -> SPADEL +1 SOUTHERN -> SUDDEN +1 SOUSE -> SOUS +1 SOURCE -> SORT +1 SOUGHT -> THOUGHT +1 SOUGHT -> SOWED +1 SOUGHT -> SAW +1 SOTELES -> SARTUOUS +1 SORDID -> SARDID +1 SOOT -> SUIT +1 SONG -> SUN +1 SON -> SONG +1 SOME -> THEM +1 SOMBER -> SOMBRE +1 SOFA -> SILVER +1 SOCIALIST -> SOCIALLY +1 SO -> SUMINUTELY +1 SO -> SOUL +1 SO -> SOPHIA +1 SO -> SARKAIUS +1 SO -> POST +1 SO -> MISS +1 SNETKOV -> COUGH +1 SNARLED -> SNARLS +1 SMOLNY -> MORLEY +1 SMOKESTACKS -> SMOKE +1 SMOKER -> MOCHER +1 SMIRCHED -> SMARGED +1 SMILE -> MIND +1 SMELL -> SMAR +1 SLUNK -> SUNK +1 SLIGHTLY -> SAT +1 SLACKENED -> CLACK +1 SLAB -> FLAP +1 SKYLARKS -> SKYLIGHTS +1 SKIRT -> GIRT +1 SKEW -> SCARO +1 SKEPTICAL -> SCEPTICAL +1 SIXES -> SIX +1 SIRE -> SIRES +1 SIR -> SO +1 SIR -> SERVED +1 SIR -> BECAME +1 SINUHIT -> IT +1 SINGS -> SANGS +1 SINGA -> SINGAFUT'S +1 SINGA -> SHING +1 SINFUL -> SENT +1 SINE -> IN +1 SIMULATES -> SIMILATES +1 SIMMERING -> SIBBERING +1 SILLY -> SYTLER +1 SILENCE -> SCIENCE +1 SIGURD -> SIR +1 SIGURD -> CIGARS +1 SIGURD -> CIGARET +1 SIGNOR -> SENOR +1 SIGNING -> SOUNDING +1 SIGNED -> SIGN +1 SIGHING -> SIGNED +1 SIEVE -> SEA +1 SIDE -> THOUGHT +1 SIDE -> SORT +1 SIDE -> SIGHT +1 SICUT -> SECUT +1 SHUT -> SHIRT +1 SHUT -> SHED +1 SHUT -> AT +1 SHUMAN -> SON +1 SHUCKS -> SHOCKS +1 SHRUBS -> SHRUGS +1 SHOULDST -> SHOULDEST +1 SHOULDERS -> SHOULDER +1 SHOULD -> YOU +1 SHOULD -> SHALL +1 SHOT -> HAD +1 SHO'LY -> SURELY +1 SHIPS -> THE +1 SHIPS -> SHIP +1 SHET -> SHUT +1 SHERIFF -> SURE +1 SHERIFF -> SIR +1 SHERIFF -> CHEER +1 SHERE -> SHEAR +1 SHEPHERD -> SHEPARD +1 SHEETS -> SEATS +1 SHED -> SHARED +1 SHE'LL -> YE'LL +1 SHE -> SEA +1 SHE -> HE +1 SHATTERED -> SATURDAIL +1 SHARDURIS -> SHOW +1 SHARDURIS -> SHALL +1 SHARDURIS -> SHADORIS +1 SHAPIA -> SHAPEIA +1 SHALLUM -> CELEM +1 SHALL -> TO +1 SHALL -> SHOW +1 SHALL -> OUR +1 SHAKEDOWN -> SHAKE +1 SHAG -> SHAGG +1 SEYTON -> SETTON +1 SEYTON -> SETON +1 SEWING -> SOON +1 SEVERE -> SAVOUR +1 SEVERAL -> SEVEREIGN +1 SEVENTIETH -> SEVENTEENTH +1 SEVEN -> THE +1 SET -> SAT +1 SERMON -> SAME +1 SERGEY -> SURGY +1 SERGEY -> SOJOURNOVITCH +1 SENTENCED -> INTENSE +1 SENT -> SAT +1 SEEST -> CEASE +1 SEEN -> SEEMED +1 SEEMS -> SEEMED +1 SEEMED -> SEEMS +1 SEEMED -> SEEMING +1 SEEM -> SEEMED +1 SEEM -> SEE +1 SEEKEST -> SEEK'ST +1 SEDUCETH -> SEDUCE +1 SEAT -> HEAT +1 SEAMEN -> SEE +1 SEAMAN -> SALMON +1 SEA -> SEAS +1 SCULPTORS -> SCULPTOR'S +1 SCRUTINISED -> ARE +1 SCRIBES -> GRIBES +1 SCRAPPIN -> SCRAP +1 SCORN -> CORN +1 SCO'TCH -> SCORCHED +1 SCHULBERG'S -> SHUBERG'S +1 SCHOOLDAYS -> SCHOOL +1 SCHOOL -> SCHOOLGIRLS +1 SCHOOL -> SCHOOLBOY +1 SCHOLARS -> STARS +1 SCAPEGRACES -> CAPE +1 SCAPED -> ESCAPED +1 SCAPE -> ESCAPE +1 SAYS -> TEETH +1 SAYS -> SAY +1 SAYS -> AS +1 SAYIN -> SAYING +1 SAY -> SO +1 SAY -> SEE +1 SAY -> SAVE +1 SAW -> SOLDOM +1 SATURDAY -> SAID +1 SAT -> SET +1 SARAH -> SEREN +1 SANS -> SONSPIER +1 SANITARY -> SENATORY +1 SANG -> IN +1 SAND -> SAN +1 SANCTESS -> SANCTIS +1 SANCHO -> SANCHA +1 SAN -> SAMPANCISCO +1 SALONE -> SALON +1 SALLOWER -> SALARY +1 SAILS -> SAILORS +1 SAIL -> SO +1 SAIL -> SAILOR +1 SAID -> TO +1 SAID -> THE +1 SAID -> STOOD +1 SAID -> SET +1 SAID -> SAYS +1 SAID -> OF +1 SAH -> SAD +1 SAH -> A +1 SAGITTAIRE -> SAGATURE +1 S'POSE -> SUPPOSE +1 S -> HELEN +1 RYO -> RIO +1 RUSSIA -> RUSHIRE +1 RUSSIA -> A +1 RUSHED -> RUSH +1 RUNS -> ONES +1 RUNG -> RANG +1 RUN -> WARM +1 RUN -> RAN +1 RUN -> ENOUGH +1 RUMP -> WRONG +1 RUM -> ROMAN +1 RULER -> SPOTIC +1 RULE -> ROLE +1 RUBENSES -> REUBEN +1 RUBBERS -> WRAPPERS +1 ROXBURY -> BRAXBURY +1 ROWED -> RIDE +1 ROUTE -> ROUGH +1 ROUNDED -> ROUTED +1 ROTHS -> WORSE +1 ROSE -> AROSE +1 ROSAMUN -> ROSAMOND +1 ROPE'S -> ROPES +1 ROPE'S -> HOPES +1 ROOTS -> RUTHS +1 ROOMFELLOW -> ROOM +1 ROOM -> RUM +1 RONALD -> RANALD +1 ROMANCE -> ROMANS +1 ROLL -> ROCCALL +1 ROCK -> STROKE +1 ROARING -> ROWING +1 ROAD -> RULED +1 RISDON -> RISDEN +1 RISDON -> RICHMOND +1 RINGMASTER -> RING +1 RINDS -> RHINS +1 RIGOROUS -> RECKLESS +1 RIGHT -> WRITE +1 RIGHT -> THREAT +1 RIGHT -> RIPE +1 RIDGE'S -> RICHES +1 RIDER -> WRITER +1 RIDER -> RATTERAS +1 RID -> INTO +1 RICHARD -> WRETCHED +1 RHODIAN -> RODIAN +1 RHODIAN -> RADIAN +1 RHODES -> ROADS +1 REVOLUTIONISTS -> REVOLUTIONIST +1 REVOLUTIONIST -> REVOLISHNESS +1 REVOLTE -> REVOLT +1 REVEREND -> ROBIN +1 REVEREND -> REVERED +1 REVELING -> REVELLING +1 REVELATION -> RELATION +1 RETZCH'S -> WRETCHES +1 RETURNED -> RETURN +1 RETURNED -> HER +1 RETURN -> RETURNING +1 RETURN -> RETURNED +1 RESUMED -> JUNE'S +1 RESTORETH -> RESTORE +1 RESTIVE -> WRETS +1 RESOLVED -> WE +1 RESK -> REST +1 RESISTING -> FUN +1 RESINOUS -> ZENOUS +1 RESCUED -> RESCUE +1 REQUEST -> QUEST +1 REPUTATION -> REPETITION +1 REPLY -> NED +1 REPLIED -> PARTLY +1 REPEATED -> REPLIED +1 REND -> RUN +1 REMISSION -> REMISSIONARY'S +1 REMEMBEREST -> REMEMBER +1 REMARKED -> REMARK +1 REMAINED -> REMAINS +1 REMAIN -> REMAINED +1 RELEASED -> RELISSE +1 RELATED -> RELIGHTED +1 REJECT -> REJECTED +1 REIGNS -> REIGN +1 REIGNED -> RAINED +1 REGULATION -> REGULATING +1 REGIN -> WE +1 REGIN -> REGAN +1 REGARDING -> GUARDING +1 REG'LER -> REGULAR +1 REFUGE -> REPUGUE +1 REFORMERS -> REFUSE +1 REFORMERS -> REFORMED +1 REFERENCE -> REFUCER +1 REELECTION -> RE +1 REDUCED -> IT +1 RED -> REDMOCKERS +1 RECTUM -> RECTIM +1 RECTIFY -> BY +1 RECORDS -> RICARDS +1 RECITED -> RESIDED +1 RECEDED -> WAS +1 RECAPTURED -> RE +1 REAP -> READ +1 REALISED -> REALIZED +1 READY -> RATHER +1 REACHED -> RAGED +1 RAYSTOKE -> RAY +1 RAY -> WRAYE +1 RATTLING -> RIDERING +1 RATHER -> READY +1 RATHER -> JARDA +1 RASCALS -> RASCUE +1 RAPSCALLIONS -> RATSKAGGS +1 RAPPERS -> WRAPPERS +1 RANSOM -> RANDOM +1 RAN -> RUN +1 RAMSES -> RANSES +1 RAM -> ROOM +1 RAISE -> FOR +1 RAINY -> RAINING +1 RAIN -> REIN +1 RAID -> RAY +1 RAGE -> RATE +1 RADPROP -> RED +1 RACKETEERS -> RAGATIRS +1 RACKED -> WRAPPED +1 RACHEL -> RICHLY +1 RACHEL -> MID +1 RACES -> RAYS +1 RABBITS -> RABBIT'S +1 RABB'S -> RABBS +1 QUMMUKH -> CUB +1 QUITE -> ACQUAINTED +1 QUICK -> QUICKLY +1 QUEST -> PRICE +1 QUANTRELL -> QUANTREL +1 QUANTITIES -> QUALITIES +1 QUACKS -> CLACKS +1 PYM -> POEM +1 PUTTING -> PUT +1 PUTTEL -> PATTERN +1 PUTS -> BITS +1 PUT -> PUS +1 PUT -> PULL +1 PURPORTING -> REPORTING +1 PUMP -> PUMPED +1 PULSE -> PART +1 PULLEY -> PULLLY +1 PULLED -> POURED +1 PUDDLES -> POTTLES +1 PSALM -> NOW +1 PRYTANEUM -> BRITTANNIUM +1 PRUDENT -> CARTS +1 PROTECTORATE -> PROTECTOR +1 PROPRE -> A +1 PROMOTIVE -> PROMOTED +1 PROHIBITION -> PROBES +1 PROFESSION -> PROFESSIONS +1 PRODUCES -> PRODUCED +1 PRODUCED -> PRODUCE +1 PRODUCE -> PRODUCED +1 PROCLUS -> PROCLAS +1 PROAS -> PROTS +1 PROAS -> PROITS +1 PROAS -> PERHAPS +1 PROA -> PROCEED +1 PRIORESS -> PYRIS +1 PRIORESS -> PRIOR +1 PRIORESS -> PIRATES +1 PRIORESS -> PIRASS +1 PRINCIPALS -> PRINCIPLES +1 PRIMER -> PRIMARY +1 PRIEST -> PREACH +1 PRESTIGE -> PRESAGE +1 PRESERVE -> PRESENT +1 PRENTICESHIP -> PRENTICE +1 PRECEPTORS -> PERCEPTORS +1 PREACHED -> PREACH +1 PRAYERS -> PRAY +1 PRAM -> IN +1 PRAISEWORTHY -> PRAISED +1 PRACTITIONER -> PETITIONERS +1 POVERTY -> DISPOVERTY +1 POVERTY -> BAVARY +1 POURED -> PUT +1 POUCHES -> PIUCHES +1 POTUM -> HE +1 POTION -> FORTUNE +1 POTASSIUM -> POTASSIAN +1 POSTHASTE -> POST +1 POSTERN -> PASSING +1 POST -> POSTPLETED +1 PORTO -> PORT +1 PORTMANTEAU -> PARTNENT +1 PORTENTOUS -> POTENT +1 PORED -> POURED +1 POPULACE -> POPULOUS +1 POOR -> FAR +1 POMEROY -> POMMEROI +1 POLYTECHNIC -> POLY +1 POLONIUS -> BONIUS +1 POLLY -> PEARLING +1 POLL -> PAUL +1 POINT -> SPINTER +1 PLUMB -> PLUM +1 PLEASANT -> PRESENT +1 PLEAS -> PLACE +1 PLEADED -> PLAYED +1 PLATTERBAFF'S -> PLATTERBATH'S +1 PLATTERBAFF -> PLATTERBUFF +1 PLATTERBAFF -> FURTHER +1 PLATED -> FLASH +1 PLANNING -> CLANNING +1 PLAIN -> PLAY +1 PLAIN -> PLANE +1 PLACE -> PLATE +1 PLACE -> PLACED +1 PLACE -> PACE +1 PITTS -> FITZ +1 PITHUM -> PITTHAM +1 PISTOLES -> PISTOL +1 PIPE -> PEG +1 PINKUS -> PEGAS +1 PINKERTON'S -> PINKERTENT'S +1 PINCHED -> IMPINCED +1 PILLOWED -> PILLART +1 PILESER -> LAUGHED +1 PILESER -> LAST +1 PILESER -> GLASS +1 PILESER -> BELEASE +1 PIKES -> PIPES +1 PIGSKIN -> PICTION +1 PIGEONCOTES -> PIGEON +1 PIGEONCOTE -> PIGEONOTE +1 PIGEONCOTE -> PEACH +1 PIGEONCOTE -> ERNESTON +1 PIGEONCOTE -> DIJIN +1 PIGEONCOTE -> BEECH +1 PIERO -> PIERRE +1 PIECE -> PEACE +1 PICTURE -> PITCHER +1 PICKED -> PRICKED +1 PHUT -> FIVE +1 PHUT -> AFOOT +1 PHOSPHOR -> PHOSPHOBS +1 PHOSPHOR -> PHOSPHER +1 PHILISTINES -> FURTHER +1 PHILIPPUS -> PHILIP +1 PHILIP -> FELLOW +1 PETREL -> PETEL +1 PETER'S -> PETER +1 PETER -> EITHER +1 PETER -> BEATER +1 PESTE -> PESTS +1 PERSPIRED -> POISPIRED +1 PERSEUS -> PURSES +1 PERNOUNCE -> PRONOUNCE +1 PERFECTLY -> PERFECTUALLY +1 PERE -> PERFELLAR +1 PERE -> PALACHE'S +1 PERAMBULATOR'S -> PRIME +1 PEONAGE -> PINIONS +1 PENDING -> SPENDING +1 PENCE -> PINTS +1 PEKAHIAH -> PECAH +1 PEASANTS -> PIECE +1 PEAS -> PEASE +1 PEARL -> POOR +1 PAY -> PAIRS +1 PAWNBROKER -> BROKER +1 PAUSED -> PASSED +1 PATIENCE -> AS +1 PASTES -> PACE +1 PAST -> PASS +1 PASSES -> PAUSES +1 PASSED -> PASS +1 PARTS -> PART +1 PARTLY -> PARTIALLY +1 PARTINGS -> PARTING +1 PARRICIDES -> PARASITES +1 PARR -> PAR +1 PARKS -> BOX +1 PARDONABLE -> PURCHANGLE +1 PARDON -> PARTISER +1 PANNIERS -> PENNYERS +1 PANEL -> PENNEL +1 PALL -> POOL +1 PALAESTRA -> PELLESTRA +1 PAIR -> PARENT +1 PADDLING -> PADDLIN +1 PACES -> PLACES +1 PACE -> THE +1 P -> PATUM +1 OWNERS -> LANDOWNERS +1 OWN -> ON +1 OWE -> ARE +1 OW'M -> AM +1 OW -> HOW +1 OVERRIPENESS -> OVER +1 OVERHEARD -> OWNED +1 OVERFULL -> OVER +1 OVERFLOWING -> OVERWHELMING +1 OVER -> OVERPRUDENT +1 OVER -> OF +1 OUTLINES -> OUTLINE +1 OUTER -> OUT +1 OUT -> YET +1 OUT -> UP +1 OUT -> THEN +1 OUT -> SOUTH +1 OUT -> PATENTS +1 OUT -> OUTGAZE +1 OUT -> ON +1 OUT -> ALL +1 OUT -> ALBEA +1 OUR -> UNDISTRUC +1 OUR -> OURSPORT +1 OUR -> OURSAN +1 OUR -> I'LL +1 OUR -> HER +1 OUR -> ARE +1 OUNCES -> OZ +1 OUGHTN'T -> OUGHT +1 OUGHT -> ARE +1 OUGHT -> ALL +1 OUEN -> WAT +1 OTTO -> ARE +1 OTHER'S -> COMMENTS +1 ORIENTAL -> OF +1 ORFICER -> ORFASTER +1 ORDER -> ODO +1 ORBIS -> ORBUS +1 OR -> OUR +1 OR -> OPPOSITION +1 OR -> OH +1 OR -> I +1 OR -> FULL +1 OR -> AS +1 OR -> ARE +1 OR -> AN +1 OR -> ALL +1 OR -> A +1 OPPRESSORS -> IMPRESSORS +1 OPENS -> OPENED +1 OPENED -> OPEN +1 OPEN -> UP +1 OPE -> LOVE +1 ONTO -> ON +1 ONTO -> INTO +1 ONLY -> OWING +1 ONLY -> OLD +1 ONE -> WORLD +1 ONE -> WON +1 ONE -> A +1 ONCE -> ONES +1 ONCE -> ONE'S +1 ON'T -> ON +1 ON -> WHEN +1 ON -> UPON +1 ON -> UNTO +1 ON -> UNLUCK +1 ON -> UNCREAM +1 ON -> UNCHANGED +1 ON -> TO +1 ON -> TILL +1 ON -> HONOUR +1 ON -> DOING +1 OME -> HOME +1 OLL -> ALL +1 OLIVE -> OUT +1 OLIVE -> ONLY +1 OLIVE -> OLIV +1 OLIVE -> OLDEST +1 OLIVE -> I +1 OLD -> O +1 OLD -> ALL +1 OKAY -> OH +1 OIL -> ORE +1 OH -> UH +1 OH -> DREAD +1 OFFICES -> OFFICERS +1 OFFICERS -> OFFICIALS +1 OFFICER -> OFFICERS +1 OFFENSE -> OFFENCE +1 OFFEN -> OFF +1 OFF -> UP +1 OFF -> OUR +1 OFF -> MORVE +1 OF -> WHAT +1 OF -> VAZARRE +1 OF -> TO +1 OF -> THIS +1 OF -> THE +1 OF -> THAT +1 OF -> ON +1 OF -> O +1 OF -> KARA +1 OF -> INTO +1 OF -> HER +1 OF -> AT +1 OF -> ASSUME +1 OF -> AS +1 OF -> AND +1 OBOCOCK -> OBEY +1 OBJECT -> SUBJECT +1 O'NIGHTS -> A +1 O'NEILL -> O'NEIA +1 O -> OR +1 O -> I +1 O -> AM +1 O -> ALL +1 NYTOUCH -> KNIGHT +1 NUZHAT -> UZHAT +1 NUZHAT -> NUZHA'S +1 NUZHAT -> KNOWSAT +1 NUTS -> KNOTS +1 NUNS -> NUN'S +1 NUISANCE -> NOTICE +1 NU'UMAN -> NUMAN +1 NOWT -> OUT +1 NOW -> SO +1 NOW -> NO +1 NOW -> NABRAMAN +1 NOW -> BY +1 NOTTINGHAM -> NINE +1 NOTICE -> NOTICED +1 NOT -> PERCHAL +1 NOT -> NOW +1 NOT -> NOR +1 NOT -> NIGH +1 NOT -> NAT +1 NOT -> KNOWN +1 NOT -> IT +1 NOT -> A +1 NORTH -> NORTHEAST +1 NOPE -> NOTE +1 NONETHELESS -> NONE +1 NONE -> NO +1 NO -> THOUGH +1 NO -> NOW +1 NIKOLAY -> NICCLAY +1 NICO -> NIGO +1 NICKEL -> COMPLATED +1 NEXTER -> NEXT +1 NEW -> YOU +1 NEW -> NEWER +1 NERVE -> NERVES +1 NEOSHO -> NEOSH +1 NEIGHBOURING -> NEIGHBORING +1 NEIGHBORS -> LABORS +1 NEEDLED -> NEEDLE +1 NEED -> NEEDED +1 NEAREST -> NURTURE +1 NEAREST -> NOTCH +1 NEAREST -> NEWS +1 NEARER -> IN +1 NAUGHT -> NOUGHT +1 NATURALLY -> NATURAL +1 NATURAL -> NATURALLY +1 NARRATIVES -> NARRATIVE +1 NARCOTIC -> NAKOTIC +1 NANDY'S -> ANDY'S +1 MYSTERIOUS -> MYSTERY +1 MYRTILUS -> MYRTALIS +1 MYRTILUS -> MERTOLUS +1 MYRTILUS -> MERTELUS +1 MYRTILUS -> BURTLES +1 MUSTACHES -> MOUSTACHES +1 MUST -> STACCOY +1 MUST -> MY +1 MUST -> MISTER +1 MUST -> MISS +1 MUSKETS -> MASKETS +1 MUSICIANS -> MESSIE'S +1 MURDOCH'S -> MARDOC'S +1 MURDOCH -> MARDOX +1 MURDOCH -> MARDOCK +1 MUIR -> YOU +1 MUG -> MUCH +1 MUD -> MAD +1 MOWER -> OVER +1 MOVEMENT -> MOMENT +1 MOUTHWHAT -> MOUTH +1 MOUTHS -> MOTHS +1 MOUTH -> MOUSE +1 MOURNING -> MORNING +1 MOUNTNORRIS -> MONTORAS +1 MOUNT -> MOUND +1 MOTHER -> PRESENTERS +1 MOST -> PRO +1 MOST -> POSEY +1 MOSES -> ROSES +1 MOSES -> OF +1 MOSES -> MOVES +1 MORTIS -> MORTUS +1 MORTIFICATIONTHAT -> MORTIFICATION +1 MORTEM -> MODE +1 MORE'N -> MORE +1 MORE -> SMALL +1 MOPED -> MURKED +1 MOOR -> MORE +1 MOONLIGHT -> MALE +1 MONTHLY -> MOUTHFUL +1 MONSEIGNEUR -> MONSIEUR +1 MOMMOL -> MAMMA +1 MODERATE -> AND +1 MO -> MORE +1 MIXTURE -> MIXED +1 MIX -> MAKE +1 MISTER -> MISSUS +1 MISTER -> MISS +1 MISTER -> MIDSER +1 MISTER -> BISHO +1 MIST -> MISTABILATION +1 MISSUS -> MUST +1 MISSED -> MISTING +1 MISCHIEVOUS -> MACHINEROUS +1 MINNIE -> MANY +1 MINNEAPOLIS -> MANY +1 MINISTER -> MEANESTER +1 MINIONETTE -> MINOR +1 MINE -> MIKE +1 MIND -> MINE +1 MIMICK -> MIMIC +1 MILLY -> MERELY +1 MILLSTON -> MILLSTONE +1 MILICENT'S -> MILICENT +1 MILICENT -> MILLICENT +1 MILDEWED -> MELTED +1 MIHI -> ME +1 MIDRIFF -> MIDRIFTS +1 MIDIAN -> MILLION +1 MIDIAN -> MENDIAN +1 MIDDY -> MIDDI +1 MIDDLING -> MIDDLIN +1 METHINKETH -> METHINK +1 MET -> MAKE +1 MESTIENNE -> THE +1 MESTIENNE -> MISSION +1 MESTIENNE -> MESSIAN +1 MESTIENNE -> MESS +1 MESTIENNE -> A +1 MESSES -> MASSES +1 MERRY -> MERRYMAKING +1 MERNEPTAH -> PATH +1 MERLONUS -> MERLINUS +1 MERLONUS -> MERLINA'S +1 MERIT -> MARRIT +1 MERIT -> MARRIAGE +1 MENTAL -> MANTLE +1 MEND -> MEAN +1 MEN'S -> MAN'S +1 MEN -> MAYNUM +1 MEN -> INTO +1 MEN -> CAME +1 MEDICAMENTS -> MEDICTS +1 MEDICAL -> MEDICA +1 MEDIAN -> MEDIUM +1 MEDDLE -> MEDAL +1 MEAT -> TO +1 MEAT -> ME +1 MEAT -> HAVE +1 ME -> YOU +1 ME -> WILL +1 ME -> MISSY +1 MC -> MICROCLE +1 MC -> MAC +1 MC -> MA +1 MAY -> ME +1 MAY -> MARGAR +1 MAY -> MADE +1 MATTERS -> MATTER +1 MATTER -> MATTHOR +1 MATI -> MET +1 MATI -> MANTI +1 MATEY -> MATE +1 MATERIALS -> MATURES +1 MATE -> MADE +1 MASTER -> MASSR +1 MASKED -> MASSED +1 MARVELED -> MARVELLED +1 MARTYR -> MASTER +1 MARSPEAKER -> MARKEER +1 MARSHAL -> MARTIAL +1 MARRIAGE -> MARES +1 MARMALADES -> MARMAL +1 MARJORIE -> MARGERY +1 MANKATO -> MANCAO +1 MANKATO -> MAN +1 MANASSEH -> MANETTE +1 MAN -> MAIN +1 MAMIE -> MAY +1 MALNUTRITION -> MALTRICIAN +1 MAKES -> MATRON +1 MAKE -> MADE +1 MAKAN -> MACAN +1 MAJORITY -> MATURITY +1 MAINE -> MIN +1 MAIL -> MALE +1 MAID -> MATE +1 MADE -> READ +1 MADE -> GET +1 MAD -> MADGE +1 MABILLON -> MARLAN +1 M -> NOT +1 M -> I +1 M -> ENEM +1 M -> AND +1 M -> AM +1 LYSIMACHUS -> LISMACHUS +1 LUNA'S -> LUNDY'S +1 LUNA'S -> LUNAR'S +1 LUKE -> LOOK +1 LUGGAGE -> LEGGED +1 LUCIEN -> LUCIAN +1 LOWER -> BLOW +1 LOW -> LOKI +1 LOVER -> LOVE +1 LOVE -> LAW +1 LOUD -> ALL +1 LOST -> ASKED +1 LOSSES -> LOSES +1 LORD'S -> LARGE +1 LORD -> LOT +1 LORD -> LARD +1 LOQUACITY -> LEQUEST +1 LOOKOUT -> LOOK +1 LOOKED -> LOOK +1 LOOKED -> LIFTED +1 LOOK -> WORK +1 LOOK -> LOOKED +1 LOOK -> LIVE +1 LONGER -> LONG +1 LONESOMENESS -> LONESOME +1 LONE -> LONG +1 LOCK -> LOOK +1 LOBSTER -> LOBSOR +1 LOBSTER -> LOBS +1 LL -> CHILLED +1 LIZABETH -> LISBETH +1 LIVES -> IS +1 LIVED -> LIFT +1 LIVE -> LEAVE +1 LISTEN -> LISTENED +1 LINE -> LYING +1 LINE -> LAND +1 LIMPED -> LIMP +1 LIME -> LINE +1 LILBURN -> LOWBOURNE +1 LILBURN -> LOWBORN +1 LIKELY -> LIKE +1 LIKE -> THE +1 LIKE -> NIGHTLY +1 LIKE -> LIKED +1 LIGHT -> LIGHTFOOTED +1 LIFE -> LIE +1 LIE -> LIKE +1 LIDDY -> LIVE +1 LIDDY -> LEAVING +1 LIDDY -> LADY +1 LIAISON -> LEAR +1 LEXINGTON -> LECINGTON +1 LEWIS -> LOUIS +1 LETTERS -> LITTLE +1 LETTERS -> LETTER +1 LETTERS -> LET +1 LETTERS -> LADDERS +1 LET'S -> LET +1 LET -> THEM +1 LET -> THAT +1 LET -> NEKHLUD +1 LET -> LEFT +1 LET -> LED +1 LET -> LATER +1 LET -> LATE +1 LEST -> REALIST +1 LESSON -> MESS' +1 LESS -> US +1 LESS -> LEST +1 LESLIE -> THIS +1 LESLIE -> LIZZLING +1 LESLIE -> LIZZLEY +1 LESLIE -> LISALLY +1 LESLIE -> LINLESILY +1 LESLIE -> IT +1 LENOIR -> LOIS +1 LEND -> BLENDEST +1 LEMON -> LINENSHIPS +1 LEGS -> OR +1 LEFT -> LIVED +1 LEFT -> LILY +1 LEFT -> LIFTED +1 LEFT -> LET +1 LEER -> URNOUS +1 LEE'S -> LEE +1 LEAVED -> TO +1 LEAVE -> LE +1 LEARN -> LEARNED +1 LEAPT -> LEAPED +1 LEAF -> OF +1 LEADPENCIL -> LEAD +1 LEADERSHIP -> LEISURESHIP +1 LEADERS -> LEADER'S +1 LAWS -> NOISE +1 LAW -> LAWN +1 LAW -> LAST +1 LAW -> ALL +1 LAUGHED -> THEY'LL +1 LAUDERDALE -> LAURA +1 LAUDERDALE -> LAUDIDAL +1 LATH -> LASS +1 LAST -> MUST +1 LASH -> LAST +1 LANDI -> LANDEE +1 LAND -> THE +1 LAND -> ALAN +1 LAMBS -> LAMPS +1 LAKE -> LATER +1 LAIN -> LANE +1 LAID -> LATE +1 LADY -> LAY +1 LADY -> LADIES +1 LADS -> LAD +1 LAD -> ABOUT +1 LACHRYMA -> LACKRY +1 LACHAISE -> CHASE +1 LABOURERS -> LABORERS +1 LABOUR -> LABOR +1 L -> OLD +1 L -> EL +1 L -> ALE +1 KNOW -> NOT +1 KNOTTY -> NAUGHTY +1 KNOT -> NOT +1 KNOBBLY -> NOBLY +1 KNIGHT'S -> NIGHT'S +1 KNIGHT -> LAW +1 KLEPTOMANIAC -> CLAPTOMANIA +1 KLEPTOMANIA -> CLEFTOMANIA +1 KITTY -> KATY +1 KITE -> KIND +1 KITE -> DESERVED +1 KING -> MACKING +1 KING -> GIMER +1 KINDER -> KIND +1 KILLS -> KILL +1 KICK -> KICKIE +1 KEYS -> CASE +1 KEYS -> ACCUSE +1 KETTLE -> CATTLE +1 KERSTALL -> CURSON +1 KENITES -> KENNITES +1 KEEN -> KIN +1 KEDEM -> KIDDAM +1 KAMAR -> COME +1 JUSTIFIED -> IT'S +1 JUST -> JEST +1 JUST -> IS +1 JUST -> GITS +1 JURY -> JERRY +1 JURY -> CHERRY +1 JURISDICTION -> JURIS +1 JUNIOR -> GENIOR +1 JUMPS -> JUMPED +1 JULIEN -> JULIAN +1 JUG -> CHUG +1 JUDGMENT -> YOU +1 JUDGMENT -> TEM +1 JUDGMENT -> JOINTMENT +1 JUDAH -> JULIA +1 JOUVIN'S -> JUBANCE +1 JOSHUA -> JOHNSHAW +1 JOKINGLY -> CHOKINGLY +1 JOINED -> JOINCE +1 JOCELYN'S -> JOCELYN +1 JOCELYN -> JOSCELYN +1 JESTER -> GESTURE +1 JEHU -> JEHOV +1 JEHOASH -> JO +1 JEERED -> JERED +1 JEDGE -> JUDGE +1 JANSENIST -> GENTLEST +1 JANEERO -> GENERO +1 JANE -> CHANT +1 JAKEY'S -> JAKIE +1 JAKEY -> GIGERIS'S +1 JACKMAN -> JACK +1 JACKAL -> HO +1 JACKAL -> AND +1 JACK -> JAG +1 IZZY'S -> IZZIE'S +1 IVANOVITCH -> GIVANOVITCH +1 ITSELF -> ITSEL +1 ITS -> TO +1 ITS -> IT'S +1 ITS -> HAD +1 IT'S -> HIS +1 IT'S -> AND +1 IT -> YET +1 IT -> TRUE +1 IT -> THIS +1 IT -> THERE'S +1 IT -> STEPS +1 IT -> RODE +1 IT -> OVER +1 IT -> OR +1 IT -> O +1 IT -> MIN +1 IT -> ITS +1 IT -> IT'S +1 IT -> IT'LL +1 IT -> EMMA +1 IT -> EACH +1 IT -> ARMY +1 IT -> AND +1 ISRAELITES -> ISRAIT +1 ISRAEL'S -> ISRA'S +1 ISLAMISED -> ISLAMMISED +1 ISLAM -> GAVE +1 ISAAC -> MISERC +1 IS -> SAID +1 IS -> ROOM +1 IS -> RAPPA'S +1 IS -> OF +1 IS -> LANY +1 IS -> IF +1 IS -> HAS +1 IS -> FOR +1 IS -> ENDOWED +1 IS -> DOES +1 IRONICAL -> IRONIC +1 IOWA -> HOUR +1 INVALIDES -> INVALIDE +1 INVADE -> IN +1 INTO -> TO +1 INTO -> MEAN +1 INTO -> IN +1 INTO -> AND +1 INTO -> AN +1 INTEND -> INSENT +1 INTELLECTUALLY -> INTELLECTUAL +1 INSTRUCTED -> INSTRUCTIVE +1 INSTANTLY -> THING +1 INSISTENCE -> DOZE +1 INSCRIPTIONS -> SCRIPS +1 INQUIRE -> ACQUIRE +1 INNES -> EAMES +1 INN -> IN +1 INJURE -> ENDURE +1 INGENIOUSLY -> INGENUOUSLY +1 INFAMOUS -> IN +1 INDEED -> INDE +1 INCOMPARABLE -> INN +1 IN'T -> INTO +1 IN -> WHEEL +1 IN -> TO +1 IN -> THIN +1 IN -> THEIR +1 IN -> OF +1 IN -> NEAT +1 IN -> JEST +1 IN -> INTO +1 IN -> INTERPLIES +1 IN -> INSIDE +1 IN -> INN +1 IN -> INLORING +1 IN -> HEARD +1 IN -> FROM +1 IN -> FREE +1 IN -> ENCAMP +1 IN -> AWHILE +1 IN -> AT +1 IN -> A +1 IMPROVISED -> PROVISED +1 IMPROVE -> PROVE +1 IMPLY -> SE +1 IMMENSE -> MENST +1 IM -> QUEST +1 ILU -> TO +1 ILU -> ILIU +1 IGNOMY -> EGOY +1 IF -> THAT +1 IF -> IT +1 IDEA -> A +1 ICES -> ISIS +1 ICES -> IISES +1 ICE -> EYES +1 I'M -> UNHUNGRY +1 I'M -> NUM +1 I'LL -> ELSEWOO +1 I'FAITH -> I +1 I'D -> I'VE +1 I'D -> I'LL +1 I -> TO +1 I -> THAT +1 I -> STY +1 I -> OUGHT +1 I -> IT +1 I -> IOPIUS +1 I -> HER +1 I -> EYES +1 I -> EVER +1 I -> ECHOLON +1 I -> AMID +1 I -> AH +1 HURT -> HIDE +1 HUNTINGDON -> HARDINGEN +1 HUNTINGDON -> HANTINGDON +1 HUNTERS -> HANDLES +1 HUMOURS -> HUMANS +1 HUMOR -> HUMOUR +1 HUH -> HOW +1 HUDSPETH -> HUSBATH +1 HOWEVER -> SAMBERT +1 HOWEVER -> HOURSERVES +1 HOWEVER -> HERBERT +1 HOW -> OH +1 HOUSEHOLD -> HOUSE +1 HOUR -> I +1 HOUNDED -> HANDY +1 HOSPITABLY -> HALF +1 HORDE -> HOARD +1 HOPPING -> HAVING +1 HOPE -> OPEN +1 HOO'LL -> HE'LL +1 HOO -> HE +1 HONOURS -> HONORS +1 HONOUR -> HONOR +1 HONORS -> HONOURS +1 HONOR -> HUNGER +1 HONESTLY -> ON +1 HONEST -> I +1 HOMEPUSH -> HOME +1 HOME -> WHOLE +1 HOME -> HE +1 HOLY -> WHOLLY +1 HOLLER -> HOLLERED +1 HOLD -> OR +1 HOLD -> HODE +1 HOLD -> HER +1 HOF -> WHOLE +1 HITHER -> THITHER +1 HISSELF -> HIS +1 HIS -> ITS +1 HIS -> INTO +1 HIS -> IN +1 HIS -> HE +1 HIS -> ESTABLETS +1 HIS -> DISCOURSE +1 HIS -> AS +1 HIS -> AN +1 HINDFELL -> HINFIELD +1 HIMSELF -> HIS +1 HIM -> EM +1 HIM -> DE +1 HIJAZ -> HI +1 HIGHS -> HIES +1 HIGH -> IRCH +1 HIERARCHY -> HIRAKEE +1 HI -> AY +1 HEYDAY -> HEY +1 HEWN -> YOU +1 HERMON'S -> HAHMON'S +1 HERMON -> HERE +1 HERIOT'S -> HERETT'S +1 HERETOFORE -> HERE +1 HERE -> YOU +1 HERDSMEN -> HERDSMAN +1 HER -> TO +1 HER -> ON +1 HER -> ITALIANS +1 HER -> IT +1 HER -> HYCOMICAL +1 HER -> HERSELF +1 HER -> HE +1 HER -> AROUND +1 HEPTARCHIES -> HEPTARK +1 HEN -> INCOUPS +1 HELVIN -> HELVAN +1 HELPED -> HELPS +1 HELM -> HAIL +1 HELEN -> ALAN +1 HEELED -> HEALED +1 HEEDED -> HE +1 HEDGES -> HATCHES +1 HEARTS -> HEART +1 HEARSE -> HOUSEHOLTS +1 HEARD -> KNOW +1 HEADQUARTERS -> HEADQUARTER'S +1 HEADLONG -> HAD +1 HEADLIGHTS -> HEAD +1 HEAD -> HIDDEN +1 HE'S -> THIS +1 HE'S -> IS +1 HE -> YES +1 HE -> THEY +1 HE -> THE +1 HE -> SHE +1 HE -> SEE +1 HE -> IRRESPONSIBLE +1 HE -> IF +1 HE -> AWAY +1 HE -> AND +1 HAYES -> HATE +1 HAW -> HAWHAT +1 HAVEN -> HAIRED +1 HAVE -> PROVED +1 HAVE -> IF +1 HAVE -> HAV +1 HAVE -> HALF +1 HAVE -> HAIR +1 HAVE -> HAD +1 HAVE -> EH +1 HAVE -> BIT +1 HATTERSLEY -> HALTERSLEY +1 HASAN -> HER +1 HAS -> JUST +1 HAS -> HESTERITY +1 HAS -> HAVE +1 HAS -> AS +1 HARVEY'SWHICH -> HARVEST +1 HAROLD -> HERALD +1 HARKNESS -> HARKINS +1 HARGRAVE -> HARGREAVE +1 HARE -> HAIR +1 HARDWARE -> HARD +1 HARDLY -> ARE +1 HARD -> OUR +1 HARD -> HEART +1 HAPPENED -> HAPPEN +1 HAPLY -> HAPPILY +1 HAND -> HANDS +1 HALT -> HELP +1 HALL -> WHOLE +1 HALL -> HOLLAND +1 HALF -> HAPPEN +1 HALEY'S -> HALELY +1 HADDA -> HAD +1 HAD -> IN +1 HAD -> GOT +1 HAD -> EXHALED +1 HAD -> ENDING +1 HAD -> DO +1 HAD -> ARE +1 GYLINGDEN -> GILINGDEN +1 GURR -> GURSER +1 GURR -> GRIGGLY +1 GURR -> GIRD +1 GURR -> GIR +1 GURR -> GERT +1 GURR -> GERFATHER +1 GUNS -> GUN +1 GULPED -> GO +1 GULLET -> GALLANT +1 GUISE -> GUY'S +1 GUIRUN'S -> GUNDERING +1 GUINEA -> GUINEAS +1 GUILD -> GOLD +1 GUESS -> GES +1 GUDRUN -> GUNDRAIN +1 GRUMBLINGLY -> TREMBLINGLY +1 GROVE -> GROW +1 GROAN -> GROUND +1 GRIMSBY -> GRIM'S +1 GRIBIER -> CLAVIER +1 GREY -> GRAY +1 GREENTON -> GREENS +1 GREENBACKS -> GREEN +1 GREAVES -> GREEBS +1 GRAY -> GREY +1 GRAY -> GLAY +1 GRAVE -> BRAVE +1 GRATITUDE -> CREDITU +1 GRASPS -> GRASPED +1 GRAPPLE -> GRANTEL +1 GRANDPAP -> GRAN'PAP +1 GRANDAME -> GRAND +1 GRAMMATEUS -> GRAMMATIUS +1 GRAM -> GRAHAM +1 GRAFTON'S -> GRAFTON +1 GRAEME -> GRAHAME +1 GRAEME -> GRAHAM +1 GRACIOUS -> GRECIOUS +1 GOWN -> GUN +1 GOVERNMENTS -> GOVERNMENT +1 GOVERNMENT'S -> GOVERNMENT +1 GOVERNMENT -> GOVERN +1 GOV'NOR -> GUV'NER +1 GOV'NOR -> GOVERNOR +1 GOT -> GOD +1 GOT -> CUT +1 GOT -> COURT +1 GORDON -> GORDON'S +1 GORDON -> GORD +1 GOOD -> SPEAR'S +1 GONE -> DISCOUR +1 GOLDFISH -> GOLD +1 GOLDEN -> GOLD +1 GOING -> YOU +1 GOING -> GO +1 GOES -> WAS +1 GOES -> GONE +1 GODEBILLIOS -> GO +1 GOD -> SCOTT +1 GOD -> GOT +1 GOD -> ARM +1 GOBEY'S -> GOBYS +1 GOBEY'S -> GOBIUS +1 GOBEY'S -> GOBIES +1 GOAL -> GOLD +1 GLISPIN -> CLISPIN +1 GLAD -> GRINDING +1 GIVEN -> GIVING +1 GIVE -> GIVIN +1 GIVE -> GAVE +1 GIT -> GET +1 GIRTHING -> GIRDING +1 GIRTHED -> GIRDED +1 GIRL -> GO +1 GIRDS -> GUERAGE +1 GET -> GIT +1 GET -> GERT +1 GET -> ADD +1 GEORGE'SWHICH -> GEORGE'S +1 GEORGE'S -> GEORGE +1 GENTLEMEN'S -> GENTLEMAN'S +1 GAUTHIER -> GATHIER +1 GAULS -> GULFS +1 GASHED -> GASH +1 GARDEN'S -> GARDENS +1 GAMMON -> GAMIN +1 GALLATIN -> GALLOP +1 G'YIRLS -> IS +1 FUZZ -> FUZ +1 FURZE -> FIRS +1 FUNDS -> FONDS +1 FULL -> POOL +1 FULL -> FOR +1 FULL -> FOOT +1 FRUITS -> SPRUITS +1 FROZE -> ROSE +1 FRONTIERS -> FRONTIER +1 FROG'S -> FROGS +1 FRO -> FROM +1 FRISTOE'S -> FIRST +1 FRISTOE -> FRISTOW +1 FRISTOE -> FOR +1 FRIGHTFUL -> DREADFUL +1 FRIGHTENS -> BRIGHTENS +1 FRIEND -> FRANJAMIN +1 FRIAR -> FRIED +1 FRET -> FRITTEN +1 FRENCH -> FRENCHARD +1 FREEWAY -> FREE +1 FREES -> FREEZWAMEN +1 FREEDOM -> READ +1 FRANZ -> FRIENDS +1 FRANCOIS -> FROIS +1 FRANC -> FRANK +1 FOURTEENTHAT'S -> FOURTEEN +1 FOUR -> FUPS +1 FOUR -> FULL +1 FOUR -> FOREMOTHER +1 FOUR -> FOLLY +1 FOUNDED -> FOUND +1 FOUGHT -> THOUGHT +1 FORTY -> FORTE +1 FORTS -> FAULTS +1 FORMER -> FORM +1 FOREMAN -> FOUR +1 FOREGATHERED -> FOR +1 FORCED -> FORCE +1 FOR -> WERE +1 FOR -> WAS +1 FOR -> TILL +1 FOR -> IN +1 FOR -> FROM +1 FOR -> FOUR +1 FOR -> FAULT +1 FOR -> FAR +1 FOR -> ABOVE +1 FOOLS -> FOOD'S +1 FOOL -> FULL +1 FOOD -> FOOT +1 FONTEVRAULT -> FONTREVAL +1 FOLLOWS -> FOLLOWED +1 FOLLOWER -> FULL +1 FOLLOWED -> I +1 FOLLOWED -> FOLLOW +1 FOLLOW -> FOLLOWING +1 FOE -> FOLK +1 FOALS -> FOOLS +1 FOAL -> FOOL +1 FLY -> FLIES +1 FLUTTERING -> REFLECTING +1 FLOWERBEDS -> FLOWER +1 FLOW -> FLOOR +1 FLOSSY -> FLOSSIE +1 FLOORBOARDS -> FOREBOARDS +1 FLEROV'S -> FUROV'S +1 FLEROV'S -> FLORA'S +1 FLEROV -> FLAREFF +1 FLEROV -> FLARE +1 FLEERED -> FLARED +1 FLAVOR -> FLAVOUR +1 FLAVOR -> FLAVORIT +1 FLATTERER -> SLACKER +1 FLATTERED -> FURTHER +1 FLATHEADS -> FLAT +1 FLASHLIGHT -> LIKE +1 FLABBERGASTED -> FLABRA +1 FISHING -> FISHIN +1 FISHED -> FINISHED +1 FIRSTER -> FIRST +1 FIRE -> FAR +1 FINICAL -> PHYNICAL +1 FINELY -> FINAL +1 FINDING -> FIND +1 FIND -> FANCY +1 FILTRATES -> FUR +1 FILTRATE -> FUDGE +1 FIGURE'S -> FIGURES +1 FIGGER -> GONE +1 FIFTEENTH -> FIFTEEN +1 FELT -> FELL +1 FEELS -> FILLS +1 FEEL -> SEE +1 FEEDS -> FEATS +1 FEED -> FEAT +1 FAVOURITE -> FAVORITE +1 FAVOUR -> FAVOR +1 FAVORITE -> FAVOURITE +1 FAUVENT -> VUENT +1 FAUVENT -> VOUVENT +1 FAUVENT -> UVERT +1 FAUVENT -> REVENT +1 FAUVENT -> FOR +1 FAUVENT -> FERVENT +1 FAUVENT -> FAVAN +1 FAUCES -> FOSSES +1 FATS -> FAT'S +1 FATHER'S -> FATHERS +1 FATHER'S -> FATHER +1 FATHER -> I'VE +1 FATHER -> FUND +1 FATHER -> FOUNDER +1 FATHER -> FOR +1 FATHER -> FARTHIAN'S +1 FATHER -> EITHER +1 FATE -> PHAETON +1 FAT -> BAT +1 FAST -> FAT +1 FARRINDER -> THINDER +1 FARRINDER -> FARNDER +1 FARRINDER -> FARINGDER +1 FARRINDER -> BARRANGERS +1 FARE -> HE +1 FANNY -> WHEN +1 FANNY -> ANY +1 FANGED -> FACT +1 FAN -> PAN +1 FAM'LY -> FAMILY +1 FALLING -> FOLLOWING +1 FALL -> FOR +1 FAITH -> FIT +1 FAIR -> HER +1 FAILING -> FEELING +1 FAILED -> FAITH +1 FAFNIR'S -> FASTENER'S +1 FAFNIR'S -> FAFNER'S +1 FAFNIR -> STAFF +1 FAFNIR -> FAFNER +1 FAFNIR -> FAFFNER +1 FACE -> FAITH +1 FACE -> FACED +1 EYES -> EYE +1 EXTRACT -> EXTRACTED +1 EXTRA -> SIR +1 EXTINGUISHING -> DISTINGUISHING +1 EXTENUATING -> EXTINUATING +1 EXPOSE -> EXPOSED +1 EXPLOITING -> EXPLODING +1 EXPERIENCE -> SIGNING +1 EXPEL -> EXPELLED +1 EXPECTED -> INSPECTRE +1 EXPANSE -> EXPOUNDS +1 EXECUTIVE -> EXECUTING +1 EXECUTIVE -> EXECUTED +1 EXCLAIMED -> EXPLAINED +1 EXCITING -> THE +1 EXAMINING -> EXAMINED +1 EXACKLY -> EXACTLY +1 EVERYONE -> EVERY +1 EVERY -> EVERYONE +1 EVER -> EVERGREWING +1 EV'YBODY'S -> EVERYBODY'S +1 EUSEBIUS -> EUSIDES +1 EUSEBIUS -> EUSIBIUS +1 EUPHRATES -> EUPHATEES +1 EUPHRANOR -> EUPHRANER +1 ETHEREAL -> ASSYRIAL +1 ETHER -> EITHER +1 ETHELRIED'S -> EPILID'S +1 ETHELRIED -> ETHELRED +1 ETHELRIED -> EPLORRIED +1 ETERNAL -> TURNED +1 ET -> AT +1 ESTHER -> ASSER +1 ESPECIALLY -> SPENT +1 ESPECIALLY -> HAS +1 ERNESTINE -> THE +1 ERE'S -> YES +1 ERE -> IF +1 EQUERRY'S -> EQUERRIES +1 EPIGASTER -> EBERGASTER +1 EPHRAIM -> FROM +1 ENTRUSTED -> AND +1 ENTR'ACTE -> ENTRACT +1 ENTIRELY -> TIRELY +1 ENTERED -> ANSWERED +1 ENSUED -> ENSUIT +1 ENSNARES -> AND +1 ENSLAVED -> ENSLAVE +1 ENOUGH -> UP +1 ENJOY -> ENJOYED +1 ENFRANCHISEMENT -> ENCOMCHISEMENT +1 ENFORCEMENT -> FORCEMENT +1 ENDURETH -> AND +1 ENDURE -> INDURE +1 END -> ANCIENT +1 EMETIC -> AMATIC +1 EMBRUN -> EMBRON +1 EM -> HIM +1 ELYSIAN -> THE +1 ELLIS -> ILLIS +1 ELISIONS -> ELYGIANS +1 ELEXANDER -> IT +1 ELEXANDER -> ALEXANDER +1 ELDER -> OTHER +1 ELBOW -> BOWS +1 ELBERT -> ELBER +1 ELASTIC -> MOLASTIC +1 ELASTIC -> ELECTIC +1 EIGHTH -> EIGHTHS +1 EH -> HEY +1 EGYPTIAN -> RESISTIN +1 EGGS -> KNIFE +1 EFFORTS -> EFFORT +1 EDGING -> EDGED +1 EAU -> OVERCLONE +1 EAST -> EACH +1 EARTH -> ART +1 EARLIEST -> ARIOSABOO +1 EAR -> IRRES +1 EAD -> HEAD +1 E'LL -> YOU +1 E'ER -> ERE +1 E -> DOG +1 DUSK -> THUS +1 DUPLICATES -> DEPLICATES +1 DUNNING -> DINNING +1 DUMAS -> YOU +1 DUM -> DOOM +1 DULL -> DOLL +1 DUKE -> DUPE +1 DUKE -> DO +1 DUDS -> DERDS +1 DRY -> OH +1 DRY -> DRAGOOD +1 DRUGSTORE -> DRUG +1 DRINK -> DRINKIN +1 DRINK -> DRANK +1 DRINK -> BRING +1 DRAWERS -> RAOUL +1 DRAW -> DRAWN +1 DRAUGHT -> DROP +1 DOWNING -> DAWNING +1 DOWN -> ROUND +1 DOWN -> ON +1 DOUBTS -> DOUBT +1 DOUBT -> OUT +1 DOTH -> DOTHAH +1 DOST -> THOSE +1 DOSE -> DAYS +1 DONOVAN -> DONALD +1 DONE -> DON'T +1 DON'T -> DO +1 DON'T -> ANNOUNCE +1 DON'T -> ALL +1 DOM -> DON +1 DOGS -> DOG +1 DOG -> DORCART +1 DOG -> DARK +1 DOEST -> DOST +1 DOES -> DOESN'T +1 DOCTOR -> DOCTRIPOIRE +1 DOCK -> DOCKYARD +1 DOAN -> DON'T +1 DO -> TOO +1 DO -> DON'T +1 DO -> DIEU +1 DO -> DID +1 DIVIDED -> DIVIDE +1 DITCHFIELD -> DITZFIELD +1 DISTRUSTED -> DISTRUDGED +1 DISTRESS -> DISTRESSED +1 DISTICHS -> DISTINCTS +1 DISTANT -> OF +1 DISSENTIENT -> DISINDIAN +1 DISPOSED -> DISPOS +1 DISNEY -> DIDN'T +1 DISINFECTING -> DISINFECT +1 DISFIGURED -> DIS +1 DISASTROUS -> DISASTRATES +1 DISAGREE -> DISAGREED +1 DIRE -> DIA +1 DINKS -> THINK +1 DINKS -> DENZ +1 DINE -> DINED +1 DINARS -> HOURS +1 DILUTE -> DELUDE +1 DIGGING -> TIGGING +1 DIE -> GUY +1 DIDN'T -> THEN +1 DIDN'T -> DON'T +1 DID -> THE +1 DID -> THAT +1 DID -> IT +1 DICKIE -> THE +1 DICKIE -> DICK +1 DEVOUR -> THE +1 DETERMINED -> TURN +1 DETECTIVES -> DETECTIVE +1 DETECTIN -> DETECTIVE +1 DESTINIES -> DEBTS +1 DESSERTS -> DESERTS +1 DESPOTIC -> THAT +1 DESPITE -> THIS +1 DESK -> VES +1 DESK -> DESKED +1 DESIRES -> DESIRE +1 DESIGN -> A +1 DERELICTS -> DEAR +1 DEPRECATE -> THE +1 DEODORIZING -> OLD +1 DEMETER -> DEMEANOR +1 DELMONICO -> DOMONICO +1 DELIBERATE -> DELIVERED +1 DEFEND -> DEFENDED +1 DEFECTION -> AFFECTION +1 DEEPENED -> DEEP +1 DECLARING -> DECLINING +1 DEBTOR -> DEPTOR +1 DEBARRED -> DEBARED +1 DEATHLY -> DEFTLY +1 DEATHLIKE -> DEATH +1 DEARLY -> STILLIE +1 DEARLY -> DAILY +1 DEANS -> DEAN +1 DEAD -> DEBT +1 DEAD -> DAY +1 DEACH -> DID +1 DE -> TO +1 DAYS -> STAYS +1 DATED -> DID +1 DARKAND -> DARK +1 DARE -> DARED +1 DAPHNE'S -> THESE +1 DAPHNE -> JAPHANE +1 DAPHNE -> JAPANE +1 DANDAN -> TAN +1 DANCER -> DANCERS +1 DAMN -> DEAR +1 DAME'S -> JAMES'S +1 DALYS -> DAILIES +1 DAGOS -> DAG +1 DA -> THOU +1 DA -> DECORTUNA +1 D -> THEY +1 D -> THEN +1 CYNTHIA -> SANTIA +1 CYNICISM -> CYS +1 CYMBALS -> SYMBOLS +1 CUT -> GOT +1 CUT -> GO +1 CUSTOM -> COTTOM +1 CURSORILY -> CURSORY +1 CURRENTS -> CURRANTS +1 CURL -> GIRL +1 CUP -> CUPIED +1 CUISINE -> COUISINE +1 CRYSTALLINE -> CHRISTOLINE +1 CRUX -> CREW +1 CRUSHING -> CRASHING +1 CRUMPLED -> CRUMBLED +1 CRUMBLY -> CRAMBLY +1 CROOKS -> COOKS +1 CRIES -> CHRISTOWN +1 CREEL -> CREOLE +1 CRAWFISH -> CROPPISH +1 CRAWFISH -> CROPFISH +1 CRAWFISH -> CRAW +1 CRATES -> CREEDS +1 CRAB -> CRABS +1 COYNESS -> KINDNESS +1 COY -> KOY +1 COXCOMB -> ACCOUNT +1 COWLEY'S -> CARLIS +1 COURT -> COURTYARD +1 COURT -> CORP +1 COURSING -> COARSING +1 COURSE -> COARSE +1 COURFEYRAC -> CURFAC +1 COURFEYRAC -> COURFERAC +1 COUNT -> COMPASSER +1 COUNSELS -> COUNSEL +1 COUNSEL -> CONSUL +1 COUNCILLOR -> COUNSELLOR +1 COULD -> GOOD +1 COULD -> COULDN'T +1 COULD -> CANNOT +1 COST -> COSTUM +1 CORYDON -> CROYDEN +1 CORNER -> CORNERED +1 CORAL -> COAL +1 COQUETTE -> POCKET +1 COQUETTE -> COQUET +1 COPS -> COPSE +1 COPS -> CAPS +1 COP'S -> COPSE +1 COP -> COPP +1 COOL -> CUR +1 CONTROL -> CONTROLLED +1 CONTINUAL -> CONTINUOUS +1 CONTINGENT -> CONTENTION +1 CONTEND -> CONTENDED +1 CONTEND -> COMPEND +1 CONSTITUENT -> CONSTITUTE +1 CONSTANT -> CAN'T +1 CONSONANTS -> COUNTENANCE +1 CONSOMME -> CONSUM +1 CONINGSBURGH -> CONIGSBURG +1 CONFIRMATION -> CONFIRMATESON +1 CONFIDENTIALLY -> UFFILIENTLY +1 CONFIDE -> CONFINE +1 CONFICERE -> CONFERS +1 CONFECTIONARY -> CONFECTIONERIES +1 CONCOCTED -> CONCLUDED +1 COMORIN -> CORMORRA +1 COMMUNITY -> CUNITY +1 COMMONWEALTH -> CORNWEALTH +1 COMMONWEALTH -> CONWEALTH +1 COMMITTEE -> COMEDY +1 COMMISSARY -> COMMISSORY +1 COMMENCED -> COMMANDS +1 COMING -> COMIN +1 COMING -> CARMINALS +1 COMETH -> COME +1 COMEST -> COMES +1 COMES -> COME +1 COME -> COMMONED +1 COME -> COMMANDER +1 COME -> CLER +1 COLOSSEUM -> COLISEUM +1 COLONEL -> CAROL +1 COLLECT -> COLLECTED +1 COLE -> CO +1 COLDS -> GOLDS +1 COLD -> CALLED +1 COLCHESTER -> COLCHESTER'S +1 COINS -> COIN +1 COD -> COT +1 COCOA -> COOKER +1 COCKRELL -> COCKLE +1 COALESCED -> COLLETS +1 CLUMB -> CLIMB +1 CLOSET -> CLOTH +1 CLOSEST -> CITIZEN +1 CLOMB -> CLIMBED +1 CLOCKS -> CLUX +1 CLING -> CLINK +1 CLIME -> CLIMB +1 CLEVERLY -> LEVELLY +1 CLEAVE -> CLIFF +1 CLAWS -> CLOTH +1 CLASSES -> CLASS +1 CLARET -> CLARGA +1 CLAIRVAUX -> CLERVAL +1 CISEAUX -> ISAU +1 CINDERLAD -> SOONER +1 CINDERLAD -> SIR +1 CINDERLAD -> SAINTO +1 CINDERLAD -> SAID +1 CINDERELLA -> CINRILO +1 CHURCH -> WATCH +1 CHUCKED -> SAID +1 CHRISTIANS -> CHRISTIAN +1 CHRISTIANITY -> DONEGOOD +1 CHRISTENING -> CHRISTIAN +1 CHRIS -> THIS +1 CHRIS -> MISTER +1 CHRIS -> GRIS +1 CHRIS -> CHRISTEN +1 CHRIS -> CHRIST +1 CHRIS -> BRUCE +1 CHOUETTE -> SWEAT +1 CHONODEMAIRE -> CHATEAU +1 CHLORATE -> LOW +1 CHLORATE -> CHLORIDE +1 CHILLS -> CHILL +1 CHIEF -> CHIE +1 CHIDE -> CHID +1 CHEEKE -> CHEEK +1 CHEEKBONES -> SHEEP +1 CHEEK -> CHEEKS +1 CHARMED -> SHOWN +1 CHARMED -> HAD +1 CHARLEY'S -> CHARLIE'S +1 CHARGED -> CHARGE +1 CHARGE -> SHARS +1 CHARACTERISTIC -> CARE +1 CHANGE -> CHANCE +1 CHANCES -> CHURCHES +1 CHANCELLOR -> OF +1 CHALONS -> CALON +1 CHAIN -> CHANGE +1 CHAFING -> CHIEFING +1 CHADWELL -> TEDWELL +1 CERTAIN -> CIRCUMST +1 CERTAIN -> AN +1 CELEBRATED -> CLEARED +1 CEDRIC -> SEDRRICK +1 CAVALRYMEN -> CAVERNMENT +1 CAUSE -> COURSE +1 CATTLE -> CATTLEETTA +1 CATHOLIC -> CATTLE +1 CATHEDRAL -> KITRAL +1 CATCHED -> CAST +1 CASTLE -> COUNCIL +1 CASTETH -> CAST +1 CASE -> HAS +1 CASE -> CAVES +1 CARROLL -> KAL +1 CARROLL -> CAROL +1 CARRIED -> CHARACTERED +1 CARRIAGE -> PARISH +1 CAR -> CARJACK +1 CAPRIVI'S -> CAPRIVY +1 CAPITULUM -> AT +1 CAPITULANTES -> CAPITULAM +1 CAPITALISTS -> CAPITALIST +1 CAPITAL -> CAPT +1 CAP -> CAPLICHOS +1 CANS -> ACADANS +1 CAN'T -> COULD +1 CAN -> CAN'T +1 CAMPAIGN -> CAPTAIN +1 CAME -> GAINED +1 CALLETH -> CAUGHT +1 CALLEST -> COLLEST +1 CALLED -> CALL +1 CAIN -> GAME +1 CAGE -> CARED +1 CACKED -> CAGLED +1 CABINET -> CABINETS +1 CA'M -> CALM +1 C -> SEA +1 BYE -> BILL +1 BY -> THE +1 BY -> BUY +1 BY -> BUT +1 BY -> BILL +1 BY -> A +1 BUZZARD -> BOZARD +1 BUTTON -> BOTTOM +1 BUTTERFLY -> BUT +1 BUT -> WITHOUT +1 BUT -> WITH +1 BUT -> WHICH +1 BUT -> SHEPHERD +1 BUT -> PUSE +1 BUT -> OR +1 BUT -> IT +1 BUT -> BY +1 BUT -> BE +1 BUST -> FUSS +1 BUSINESSWHICH -> BUSINESS +1 BURYING -> BERING +1 BURST -> FORCE +1 BURSHEBA -> PERCEIVER +1 BURNETH -> BERNETH +1 BURDENS -> A +1 BULK -> BARK +1 BULK -> BAG +1 BUILDS -> BIDS +1 BUFFETING -> BUFFET +1 BRYNHILD'S -> BRUNHOLD'S +1 BRYNHILD -> BURNHILD +1 BRUCE -> BRUSH +1 BROUGHT -> POURED +1 BROTHER -> BROTHERS +1 BRILLIANT -> BUOYANT +1 BRIEF -> ROOF +1 BREATHLESS -> BREATH +1 BREAST -> CHEST +1 BRAVE -> PRETTY +1 BRAU -> BROW +1 BRASS -> BREASTPAND +1 BRAHMAN -> PROMIN +1 BRAHMAN -> GRAMMER +1 BRAHMAN -> GRAMMEN +1 BRAHMAN -> BRAMMER +1 BRAHMAN -> BRAMMEN +1 BRAHMAN -> BRAMID +1 BRAHMAN -> BRAMIAN +1 BRAHMAN -> BRAHMIN +1 BRAHMAN -> BRAHM +1 BRACY -> BRAZY +1 BRACY -> BRACEY +1 BRACY -> BRACELET +1 BOY -> BY +1 BOUT -> ABOUT +1 BOURGES -> BOURGE +1 BOTTOMED -> BOTTOM +1 BOTTLED -> BOTHERED +1 BOSTON -> BUSTON +1 BOONE -> BOOM +1 BOON -> BOOM +1 BOLT -> BOLTED +1 BOLSHEVIKI -> PULCHEVIKI +1 BOILED -> BOIL +1 BOIL -> BY +1 BOIL -> BOY +1 BOEOTIAN -> BE +1 BOB'S -> BOB +1 BOB -> BOBBED +1 BLOW -> BLUE +1 BLOOMIN -> ROOM +1 BLOODSHED -> BLOTCHETTE +1 BLOOD -> BLOODSTAINED +1 BLOKES -> LOATHS +1 BLOKE -> LOCTICE +1 BLODGETT -> BLODGET +1 BLODGETT -> BLODGE +1 BLODGETT -> ALEXANDER +1 BLOCK -> PLOT +1 BLESSED -> BLEST +1 BLANKETED -> BLANKET +1 BLACKLEG -> BLACK +1 BLACKBURN -> BLACKBIRD +1 BIT -> FIT +1 BIT -> BITTER +1 BISQUE -> THIS +1 BIRDSEYE -> BIRD'S +1 BIN -> BEEN +1 BILL -> BUILD +1 BIBLICAL -> PABRICAL +1 BIBLICAL -> BIBOCO +1 BIBLE -> DIE +1 BHANG -> BANG +1 BEWARE -> BE +1 BEULAH -> BOOLA +1 BETTER -> BY +1 BETIDE -> BE +1 BETHUNE -> BESOON +1 BETCHA -> BITCHER +1 BETAKEN -> TAKEN +1 BET -> BADE +1 BEST -> FAST +1 BESSY -> BUSY +1 BESSY -> BESSIE +1 BESIDE -> BESIDES +1 BENSON -> BUILTON +1 BENOIT -> BENOIS +1 BENNETT -> INVITED +1 BELONGED -> BELONGS +1 BELLY -> VALLEY +1 BELLOWED -> BELOVED +1 BELLE -> BELL +1 BELL -> BELT +1 BELIKE -> BE +1 BEING -> BE +1 BEGUN -> BEGAN +1 BEGGING -> PEGGING +1 BEGGED -> BAGS +1 BEGAN -> BEGIN +1 BEG -> BIG +1 BEFORE -> FOR +1 BEFORE -> AND +1 BEFAL -> BEFALL +1 BEEN -> THEN +1 BEEN -> OF +1 BEEN -> MONSHADE +1 BEEN -> COMPLYING +1 BECAME -> MICHANG +1 BEAUMANOIR -> BURMANOIS +1 BEARING -> AND +1 BEALE'S -> BEAT'S +1 BEALE -> BEARD +1 BE -> BETWEEN +1 BE -> BETOUT +1 BAY -> BAILEAF +1 BAXTER -> BAXT +1 BASSORAH -> PUSSARA +1 BASSORAH -> BASSORA +1 BASIL -> BEESER +1 BASIL -> BASE +1 BASIL -> BALES +1 BASIL -> BAESON +1 BARKLEY -> PARKLEY +1 BARIUM -> BURIUM +1 BARGELLO -> BARGENO +1 BARELY -> VARIOUS +1 BAPTISMAL -> THE +1 BANYAN -> BEN +1 BANYAN -> BANNING +1 BANYAN -> BANION +1 BANYAN -> BANDON +1 BANISH -> BANACY +1 BANG -> BENNETT'S +1 BANDINELLO -> BEND +1 BAND -> BEND +1 BALLROOM -> BALL +1 BALLOCK -> BALLIC +1 BALAMMED -> BLAMMED +1 BAILEY -> BAILIQUE +1 BAGHDAD -> BAGDAD +1 BAGHDAD -> ADAD +1 BAG -> PEG +1 BADAWI -> BADAH +1 BAD -> THAT +1 BAD -> BAN +1 BACKING -> BACK +1 BABES -> BABE +1 AWK'ARD -> UPWARD +1 AW -> AH +1 AVIDITY -> ADVITY +1 AVENUE -> AVIGUE +1 AVE -> HAVE +1 AUNT -> AUNTS +1 AUNT -> AREN'T +1 ATUM -> OUTS +1 ATUM -> ATOM +1 ATTENTIVELY -> TINTIVELY +1 ATTENTION -> ATTENTIONS +1 ATTEMPTED -> ATTENDED +1 ATHELSTANE -> ADDLESTEIN +1 AT -> SAYS +1 AT -> SAID +1 AT -> OUT +1 AT -> OF +1 AT -> IT'S +1 AT -> IN +1 AT -> HAD +1 AT -> BUT +1 AT -> ARE +1 AT -> ADD +1 ASSYRIA -> THE +1 ASSEMBLAGE -> A +1 ASSAILING -> SELLING +1 ASLEEP -> LEAP +1 ASKS -> ASK +1 ASKED -> I +1 ASKED -> ASK +1 ASKED -> AS +1 ASKED -> AIR +1 ASK -> ASKED +1 ASIA -> AS +1 ASHUR -> ASHER +1 ASCENSION -> ISSUE +1 AS -> WAS +1 AS -> US +1 AS -> S +1 AS -> OF +1 AS -> IF +1 AS -> I +1 AS -> HAS +1 AS -> COURIER +1 AS -> ALBERT +1 AS -> A +1 ARTIST -> THIS +1 ARTHUR -> OFTEN +1 ART -> ARE +1 ARSTS -> ASKED +1 ARSINOE'S -> ARSENO'S +1 ARSINOE -> AUTON +1 ARSINOE -> ARSENAL +1 ARRIVE -> ARRIVES +1 ARRANGED -> RANGED +1 ARPAD -> OUR +1 ARPAD -> ARPE +1 AROUSED -> ARISED +1 AROUND -> HER +1 ARMED -> I'M +1 ARMED -> ALARMED +1 ARM -> HEART +1 ARKANSAS -> OUR +1 AREN'T -> ARE +1 AREN'T -> AND +1 ARE -> RETURNED +1 ARE -> I +1 ARE -> HAVE +1 ARE -> DON'T +1 ARE -> AND +1 ARE -> AH +1 ARCHIBALD -> ARQUEBAUL +1 APPROVE -> IMPROVE +1 APPEARANCE -> PUREST +1 APPEAR -> OF +1 APPEALED -> APPEARED +1 APPARENTLY -> A +1 APOMORPHINE -> EPIMORPHIN +1 APES -> IPES +1 APART -> IN +1 ANYWAY -> AND +1 ANYONE'S -> ANY +1 ANY -> ANYTHING +1 ANY -> ANYONE +1 ANY -> AND +1 ANVILS -> AMBILS +1 ANTOLIAN -> INTONING +1 ANTIDOTES -> AND +1 ANTHONY -> AUNT +1 ANSWERS -> ADDEST +1 ANOTHER -> AND +1 ANNOYED -> ANNOY +1 ANNIE'S -> ANY +1 ANNIE'S -> AND +1 ANNIE -> AND +1 ANGUISH -> ANGLES +1 ANGESTON -> ANGERSON +1 ANDS -> ENDS +1 ANDBUT -> AND +1 AND -> YOU +1 AND -> UN +1 AND -> ROOM +1 AND -> ON +1 AND -> OF +1 AND -> MISSUS +1 AND -> MIND +1 AND -> ME +1 AND -> LAG +1 AND -> IT +1 AND -> INVOLUNTE +1 AND -> INTO +1 AND -> INFECTED +1 AND -> INCONSTANT +1 AND -> IMPALION +1 AND -> I'M +1 AND -> HIM +1 AND -> HER +1 AND -> HE +1 AND -> GASTENED +1 AND -> ENTIRE +1 AND -> ENJOINING +1 AND -> CONSOHN +1 AND -> CONFINED +1 AND -> BUT +1 AND -> BEING +1 AND -> AS +1 AND -> ANY +1 AND -> ANOTHER +1 AND -> ANNE +1 AND -> ALEXAM +1 AN -> THAT +1 AN -> OUR +1 AN -> NOW +1 AN -> IT +1 AN -> INDULGE +1 AN -> CAN +1 AMYNTAS -> I +1 AMYNTAS -> A +1 AMOUR -> AMORE +1 AM -> AMY +1 ALWAYS -> ALL +1 ALTHEA -> ALTHIE +1 ALTHEA -> ALTHIA +1 ALSO -> OUT +1 ALSO -> ABBS +1 ALREADY -> ALREAD +1 ALONGER -> ALONG +1 ALONE -> OWN +1 ALOES -> AND +1 ALOES -> ALLIES +1 ALLS -> ALL +1 ALLOWED -> ALLOW +1 ALLOWANCE -> LAW +1 ALLIGATOR -> ALLEGATOR +1 ALLIGATOR -> ADDURE +1 ALLIED -> ALID +1 ALLAYS -> A +1 ALL -> SOUL +1 ALL -> OUR +1 ALL -> OR +1 ALL -> ONLY +1 ALL -> ALWAYS +1 ALKALOIDS -> IKOLOITS +1 ALKALOIDS -> AKES +1 ALKALOID -> AKALOID +1 ALIGHTED -> LIGHTED +1 ALI'S -> ALWAYS +1 ALI -> I +1 ALI -> ALIO +1 ALF -> A +1 ALESSANDRO -> ALISANDRO +1 ALCOHOL -> ALCOHOLD +1 ALCOHOL -> AKELET +1 AL -> ARMANQUIN +1 AL -> ALMAN +1 AL -> ALICELA +1 AIN'T -> HAIN'T +1 AIN'T -> END +1 AIN'T -> ANNE +1 AIN'T -> AM +1 AIN'T -> ADIER +1 AID -> APE +1 AH -> HA +1 AGRARIAN -> AGRIAN +1 AGONE -> A +1 AGAINST -> OF +1 AFTERWARDS -> AFTERWARD +1 AFTER -> TO +1 AFTER -> OUTDREW +1 AFT -> OFF +1 AFT -> APT +1 AFIRE -> AFAR +1 AFFECTION -> EFFECT +1 ADVENTURES -> VENTURES +1 ADN'T -> HADN'T +1 ADHERENTS -> ADHERENCE +1 ADDED -> I +1 ADD -> ADDISMA +1 ADAIR -> EIGHT +1 AD -> ERE +1 ACKNOWLEDGE -> ANNOUNCE +1 ACHESON -> ARCHISON +1 ACCEPT -> EXCEPT +1 ABSTAIN -> ABSTAINED +1 ABSOLVED -> ABSORBED +1 ABSENTEE -> ABSENTE +1 ABOUT -> OF +1 ABOUT -> HE +1 ABOUT -> BUT +1 ABODE -> A +1 ABOARD -> QUESTED +1 ABOARD -> ABROAD +1 A -> YOU'VE +1 A -> WAS +1 A -> UNREPRESENTATIVE +1 A -> THEY +1 A -> SO +1 A -> OLY +1 A -> MUST +1 A -> MELLICENT +1 A -> KEPT +1 A -> JETS +1 A -> HIS +1 A -> HIM +1 A -> GOOD +1 A -> EXERT +1 A -> ELUSIVE +1 A -> EARTHLY +1 A -> CLIFF +1 A -> AVIDE +1 A -> AT +1 A -> ASSAULT +1 A -> AROUND +1 A -> ARE +1 A -> APPROCATUR +1 A -> APOLLO +1 A -> ANNAWING +1 A -> AMERGE +1 A -> ALOUD +1 A -> ALAD +1 A -> ADD +1 A -> ABOVE +1 A -> ABASEMENT + +DELETIONS: count ref +26 THE +18 A +17 IT +16 IN +14 IS +13 AND +11 OF +10 TO +9 I +6 WAS +5 THAT +4 YOU +4 OTHER +4 HIM +4 HE +4 HAD +4 BRAHMAN +4 AM +3 US +3 THIS +3 STAIRS +3 ROOM +3 OR +3 ONE +3 MAKAN +3 JUST +3 IF +3 HIS +3 AT +3 AS +3 ARE +3 AL +2 WIDE +2 WARD +2 VE +2 TIGER +2 SMALL +2 ROUND +2 PROA +2 ON +2 O +2 NOT +2 NEWBERG +2 MINE +2 MET +2 HAVE +2 HA +2 GO +2 FOR +2 FISH +2 ELECTION +2 DO +2 DE +2 BUT +2 BRACY +2 AN +2 ALL +2 AH +1 YOUTH +1 YOURS +1 YOUNGER +1 YOU'VE +1 YER +1 YELLS +1 YE +1 YARD +1 WROTE +1 WORLD +1 WONDERED +1 WOMEN +1 WOMAN +1 WITH +1 WILL +1 WHILE +1 WERE +1 WEAL +1 WE'LL +1 WATER +1 WARN +1 WARMEST +1 WANT +1 VOTE +1 UN +1 TURNED +1 TRIBE +1 TOO +1 THUS +1 THIRD +1 THING +1 THEY'RE +1 THEN +1 THEATRE +1 TATTLERS +1 TALENTS +1 TABLETS +1 STEPS +1 STANDARD +1 STAKES +1 STAINED +1 SPORT +1 SPIRITS +1 SOUNDLY +1 SOUL +1 SORT +1 SONS +1 SONNY +1 SO +1 SLEEPY +1 SIT +1 SIR +1 SIGHT +1 SIDE +1 SHUT +1 SHRUBBERY +1 SHOULD +1 SHORE +1 SHOCKED +1 SHE +1 SET +1 SERVES +1 SEE +1 SCUSE +1 SAILED +1 S +1 ROOMFELLOW +1 REW +1 REVOLUTION +1 RESPONSIBLE +1 REQUIRE +1 REPRESENTATIVE +1 READ +1 PROVOCATOR +1 PRECENTORS +1 POSITION +1 POLO +1 PLAYING +1 PLANTED +1 PHUT'S +1 PHUT +1 PEER +1 PARR +1 PALLIATE +1 OWN +1 OUR +1 OLD +1 ODD +1 OBSERVED +1 N'T +1 N +1 MUCH +1 MOTIONLESS +1 MONKERS +1 MINUTELY +1 MINISTERS +1 MILICENT +1 MESTIENNE'S +1 MEET +1 MAKING +1 MAKE +1 M +1 LUCK +1 LUCID +1 LOWERING +1 LONG +1 LOCK +1 LL +1 LIKE +1 LET +1 LENIN +1 LARCH +1 LAKE +1 LACHAISE +1 KNOW +1 KINE +1 KID +1 KEY +1 JUG +1 JOINING +1 JIS +1 JIM +1 JILT +1 JACK +1 IVANOVITCH +1 IT'S +1 IMPROVED +1 ILLS +1 ILL +1 I'LL +1 HUNGRY +1 HOW +1 HOUSE +1 HITHER +1 HISTORY +1 HERE +1 HER +1 HEBREW +1 HEARTY +1 HAWED +1 HAS +1 HARRIS +1 HALTS +1 GRUFFLY +1 GROWING +1 GOODS +1 GOOD +1 GOAL +1 GNAWING +1 GIVE +1 GIRLS +1 GIFTS +1 GAZE +1 FULL +1 FRIENDS +1 FRANCISCO +1 FORMED +1 FOOTED +1 FOLDS +1 FIND +1 FELL +1 FATHER +1 FAME +1 EXTEND +1 EMOTION +1 EM +1 EAST +1 E'S +1 DRAT +1 DOWN +1 DOUBT +1 DONE +1 DOING +1 DOIN +1 DIDN'T +1 DESTRUCTION +1 DAY +1 DARE +1 CURSE +1 CROST +1 CREAM +1 COULDN'T +1 CORTONA +1 CORKLE +1 COPS +1 COOPS +1 CONSTANT +1 COMMENT +1 COMICAL +1 COLOGNE +1 COLE +1 CLIMB +1 CLEVER +1 CLEFT +1 CHIPS +1 CHINTZ +1 CHANGE +1 CELL +1 CATS +1 CART +1 CAN +1 CAMP +1 CALL +1 CAIRO +1 BY +1 BUZZARD +1 BULBS +1 BREAKERS +1 BOY +1 BOUT +1 BOTH +1 BITING +1 BELIEVE +1 BASEMENT +1 BAND +1 B +1 AZARIAH +1 ATTENTION +1 ASKED +1 ARCHIAS +1 ANCESTORS +1 ALLEN +1 ADULT +1 ACQUAINTED + +INSERTIONS: count hyp +27 THE +21 A +20 IN +18 IT +18 AND +13 TO +13 OF +10 HE +8 ONE +8 IS +8 ARE +7 THAT +6 ON +6 LAD +6 AT +5 OUT +5 HAD +4 OR +4 HAVE +4 BUT +4 AS +3 WHICH +3 WAS +3 WALL +3 TOO +3 NOT +3 NIGHT +3 MORROW +3 MEN +3 LIKE +2 YOU +2 WHILE +2 TURNED +2 THIS +2 THEY +2 STILL +2 MESSIAN +2 MEANTIS +2 ME +2 MASTER +2 LESS +2 ITS +2 IF +2 HIS +2 HER +2 FISH +2 FIELD +2 DORIS +2 DID +2 DAYS +2 COAT +2 BE +2 AN +2 AM +2 ALL +1 YOURS +1 YORK +1 YEAR +1 WOULD +1 WORTHY +1 WILL +1 WHO +1 WHAT +1 WENT +1 WAY +1 WAIN +1 VOTES +1 VAIN +1 USED +1 US +1 TWO +1 TRADES +1 TOFORE +1 TOE +1 TIDE +1 THOUGH +1 THEMSELVES +1 THEM +1 THEE +1 THAT'S +1 TENANT +1 TEEN +1 TECHNIC +1 TAX +1 TASTE +1 TALE +1 SYRIA +1 SUITED +1 SUIT +1 STROKE +1 STRIKE +1 STRESS +1 STRAW +1 STOVE +1 STORE +1 STEAMS +1 STAIRS +1 STACKS +1 SPITE +1 SPITABLY +1 SPIRITS +1 SPECIALLY +1 SPARED +1 SPACE +1 SONG +1 SOFT +1 SO +1 SNATHS +1 SMALL +1 SIT +1 SHUMMED +1 SHE +1 SEVENTH +1 SENT +1 SEMBLAGE +1 SELF +1 SEATED +1 SCHUMANN +1 SCHLEVENT +1 SAKE +1 ROAR +1 RIPENESS +1 RICHA +1 REWARDED +1 RETURN +1 RESTAKE +1 REST +1 RELATOR'S +1 RAGE +1 QUICKU +1 PUSH +1 PSALMS +1 PROPER +1 PROCATE +1 POT +1 PLEASURE +1 PLEASE +1 PENCIL +1 PEM +1 PEER +1 PART +1 PAD +1 OUR +1 OTIAN +1 OTHER +1 ORIENTOUINE +1 ONE'S +1 OLD +1 OH +1 OFF +1 OCCUPIED +1 NEW +1 NELLO +1 NEEDS +1 NEAR +1 MULES +1 MONTHS +1 MOLL +1 MINDER +1 MIND +1 MER +1 MEET +1 MARK +1 LUCIEN +1 LONG +1 LIT +1 LIGHTS +1 LENS +1 LEISURE +1 LAYS +1 LADY +1 KNOWN +1 KNOW +1 KNIGHT'S +1 KINDRED +1 KEI +1 JEMOSIS +1 JEALOUS +1 JARS +1 JACK +1 INSISTANTS +1 INCLINE +1 IMAGINE +1 ILL +1 I +1 HOPE +1 HO +1 HIM +1 HID +1 HESITATINGLY +1 HEALTH +1 HEADS +1 HAM +1 GYPTIAN +1 GUY +1 GREAT +1 GOT +1 GOD +1 GIGS +1 GATHERED +1 FULL +1 FRIENDLY +1 FRED +1 FOUR +1 FORE +1 FOR +1 FLY +1 FIGURED +1 FELLOW +1 FAMOUS +1 FAITH +1 EYE +1 EXTRAVE +1 ETS +1 END +1 EM +1 ELECTION +1 EASILY +1 EARTH +1 EARLS +1 EACH +1 DUET +1 DOWN +1 DO +1 DIXON +1 DESIGNED +1 DEAL +1 DE +1 DAY +1 DAME +1 COURT +1 CORDS +1 COMPARABLE +1 COLT +1 COGNIZED +1 CODE +1 CO +1 CHAUVELIN +1 CATO +1 CAPTURED +1 CAPITULAT +1 BURDEN +1 BRACES +1 BORN +1 BONSES +1 BONES +1 BOAT +1 BLOT +1 BEECH +1 BEDS +1 BATH +1 BACKS +1 B +1 AWAKE +1 ATTITTING +1 ASH +1 ARISING +1 APOLIS +1 ANY +1 ALONG +1 ALLIES +1 AIR +1 ADULTERATED + +PER-WORD STATS: word corr tot_errs count_in_ref count_in_hyp +THE 3002 270 3134 3140 +A 1043 222 1145 1163 +AND 1705 205 1788 1827 +IN 734 144 808 804 +TO 1402 103 1444 1463 +IT 615 93 660 663 +OF 1353 77 1386 1397 +I 809 77 853 842 +IS 379 72 415 415 +THAT 651 65 682 685 +HE 672 57 693 708 +AT 249 56 279 275 +YOU 494 54 513 529 +THIS 221 50 246 246 +HIS 472 43 493 494 +ON 260 42 281 281 +HER 269 41 289 290 +WAS 638 39 653 662 +THEY 224 39 239 248 +HAD 360 38 375 383 +THEIR 96 36 112 116 +AS 322 36 339 341 +AN 104 36 125 119 +ARE 145 35 159 166 +FOR 420 33 431 442 +OR 93 28 109 105 +WITH 370 26 385 381 +BUT 356 26 370 368 +O 26 25 41 36 +NOT 391 24 401 405 +HAVE 221 24 233 233 +ALL 227 24 236 242 +WILL 151 22 166 158 +ANY 73 22 76 92 +OUR 61 21 70 73 +ONE 208 21 216 221 +HAS 85 21 102 89 +OUT 157 20 166 168 +HIM 296 20 305 307 +THERE 163 19 174 171 +WHEN 152 18 162 160 +WERE 153 18 166 158 +SO 202 18 211 211 +BRAHMAN 1 18 19 1 +OH 27 17 35 36 +INTO 112 17 117 124 +DO 142 17 153 148 +THEN 145 16 153 153 +MEN 60 16 66 70 +UP 140 15 145 150 +THESE 41 15 51 46 +DE 7 15 18 11 +BY 193 15 199 202 +WOULD 124 14 129 133 +TOO 35 14 41 43 +SAID 246 14 252 254 +NO 186 14 190 196 +IF 160 14 167 167 +MISTER 70 13 74 79 +LESLIE 10 13 23 10 +WHICH 187 12 191 195 +WHAT 183 12 192 186 +SEE 74 12 77 83 +OLD 54 12 57 63 +MAN 106 12 114 110 +LIKE 85 12 89 93 +LET 56 12 64 60 +GURR 2 12 14 2 +DICKIE 11 12 23 11 +BE 311 12 313 321 +AM 57 12 62 64 +WHERE 54 11 57 62 +US 53 11 58 59 +THEM 141 11 144 149 +SHE 285 11 292 289 +MY 245 11 248 253 +ITS 52 11 57 58 +I'M 29 11 34 35 +HERMON 0 11 11 0 +HERE 65 11 69 72 +FULL 14 11 18 21 +DID 92 11 95 100 +TWO 58 10 62 64 +MUST 71 10 77 75 +ME 257 10 260 264 +KNOW 89 10 95 93 +JUST 51 10 57 55 +IT'S 18 10 24 22 +FAUVENT 0 10 10 0 +DICKY 0 10 0 10 +ANNIE 12 10 22 12 +YOUR 98 9 104 101 +WE 148 9 151 154 +SIGURD 0 9 9 0 +ROOM 33 9 37 38 +OFF 47 9 53 50 +NOW 112 9 116 117 +MURDOCH 0 9 9 0 +MISSUS 25 9 31 28 +ZAU 0 8 8 0 +WILFRID 1 8 9 1 +WHO 147 8 151 151 +TURNED 30 8 31 37 +SIR 36 8 40 40 +SET 26 8 31 29 +ROUND 14 8 18 18 +OTHER 65 8 69 69 +LAD 6 8 7 13 +KIND 19 8 19 27 +HEAR 15 8 21 17 +HATH 13 8 17 17 +FROM 177 8 179 183 +FATHER 44 8 51 45 +DON'T 70 8 73 75 +CHRIS 15 8 23 15 +ASKED 41 8 46 44 +AROUND 16 8 20 20 +AL 8 8 16 8 +WELL 81 7 85 84 +UPON 66 7 71 68 +THUS 8 7 13 10 +THOUGH 36 7 39 40 +THOU 66 7 68 71 +SHALL 57 7 62 59 +PRIORESS 0 7 7 0 +PIGEONCOTE 0 7 7 0 +OL 0 7 7 0 +MURDOCK 0 7 0 7 +MINE 9 7 15 10 +LOOK 30 7 33 34 +LEFT 32 7 38 33 +LARGE 10 7 10 17 +LARCH 0 7 7 0 +JACK 1 7 3 6 +IZZY 0 7 7 0 +I'VE 8 7 10 13 +GOT 37 7 40 41 +GOD 24 7 29 26 +GO 59 7 61 64 +FOUR 20 7 24 23 +END 12 7 15 16 +BRACY 0 7 7 0 +BEEN 129 7 133 132 +AIN'T 4 7 11 4 +ZARATHUSTRA 0 6 6 0 +YOU'VE 3 6 7 5 +WHILE 19 6 22 22 +WALL 3 6 4 8 +TRY 11 6 17 11 +THY 23 6 28 24 +THEY'RE 5 6 11 5 +THAN 71 6 74 74 +OVER 47 6 49 51 +NIGHT 49 6 49 55 +MISS 15 6 15 21 +MADE 75 6 77 79 +M 1 6 7 1 +LOVE 26 6 29 29 +LITTLE 91 6 91 97 +LAW 2 6 5 5 +KINE 0 6 6 0 +INTEREST 7 6 7 13 +IM 0 6 6 0 +I'LL 20 6 24 22 +HOW 77 6 81 79 +DOWN 65 6 68 68 +DOG 5 6 9 7 +COME 68 6 71 71 +CINDERLAD 1 6 7 1 +CIGAR 1 6 1 7 +ARCHY 0 6 6 0 +ARCHIE 0 6 0 6 +ANYONE 1 6 6 2 +AH 6 6 9 9 +ADD 6 6 9 9 +YOU'RE 4 5 9 4 +YO 0 5 3 2 +WILFRED 0 5 0 5 +WHITE 16 5 18 19 +TOWARD 7 5 9 10 +THAT'S 19 5 23 20 +SINDBAD 0 5 5 0 +SINBAD 0 5 0 5 +SHUT 6 5 10 7 +SHOULD 69 5 72 71 +SHARRKAN 0 5 5 0 +SHARKAN 0 5 0 5 +SEEMED 16 5 18 19 +SAYS 25 5 28 27 +RUN 6 5 9 8 +READ 7 5 8 11 +ORGANIZER 0 5 0 5 +ORGANISER 0 5 5 0 +ONCE 52 5 56 53 +OLIVE 2 5 7 2 +N'T 0 5 5 0 +MILICENT 2 5 6 3 +MESTIENNE 0 5 5 0 +KNIGHT 4 5 8 5 +INTERESTS 0 5 5 0 +HO 1 5 3 4 +HEAD 40 5 41 44 +HAIR 6 5 6 11 +GONE 16 5 17 20 +GOING 34 5 39 34 +GET 50 5 53 52 +EM 0 5 2 3 +DIDN'T 18 5 21 20 +DAY 58 5 61 60 +CRAWFISH 2 5 7 2 +COPS 0 5 5 0 +BEING 36 5 39 38 +BEALE 5 5 10 5 +ABOUT 76 5 79 78 +YET 32 4 34 34 +YES 42 4 42 46 +YE 11 4 14 12 +WORLD 12 4 14 14 +WON'T 10 4 13 11 +WIDE 6 4 8 8 +WICKER'S 5 4 8 6 +WENT 74 4 76 76 +WE'RE 3 4 7 3 +UPSTAIRS 2 4 3 5 +UNTO 7 4 10 8 +TILL 23 4 24 26 +TIGLATH 0 4 4 0 +TIGER 9 4 13 9 +THERE'S 10 4 11 13 +STREET 13 4 15 15 +STILL 39 4 41 41 +STATE 22 4 24 24 +STAIRS 0 4 3 1 +SORT 11 4 12 14 +SON 15 4 18 16 +SMALL 15 4 17 17 +SIDE 18 4 22 18 +SERGEY 1 4 5 1 +SEEM 6 4 8 8 +SEA 14 4 15 17 +SAY 80 4 83 81 +SAT 8 4 9 11 +RETURNED 17 4 19 19 +RETURN 16 4 18 18 +REGIN 0 4 4 0 +RAYSTOKE 1 4 5 1 +PUT 38 4 40 40 +POURED 2 4 3 5 +PLACE 36 4 39 37 +PILESER 0 4 4 0 +OUGHT 14 4 16 16 +ONLY 71 4 73 73 +MYRTILUS 0 4 4 0 +MORE 98 4 99 101 +MIND 23 4 24 26 +MET 8 4 11 9 +MAY 40 4 43 41 +MASTER 22 4 23 25 +MANY 27 4 27 31 +MAKE 64 4 66 66 +MAKAN 4 4 8 4 +LONG 51 4 52 54 +LIZZIE 0 4 0 4 +LIL 0 4 4 0 +LETTERS 3 4 7 3 +LESS 7 4 9 9 +LAST 46 4 47 49 +LADY 13 4 15 15 +JURY 0 4 4 0 +JACKAL 4 4 8 4 +I'D 9 4 13 9 +HONOUR 2 4 3 5 +HONOR 0 4 3 1 +HOME 32 4 34 34 +HARMON 0 4 0 4 +HANDS 12 4 15 13 +HAND 39 4 40 42 +HALF 22 4 23 25 +GOOD 65 4 67 67 +GOLD 6 4 6 10 +FISH 5 4 7 7 +FARRINDER 0 4 4 0 +FAITH 8 4 9 11 +EYE 9 4 11 11 +EAST 6 4 8 8 +EACH 18 4 18 22 +DONE 35 4 39 35 +DEAD 17 4 21 17 +DAYS 12 4 13 15 +COURT 10 4 12 12 +COULD 111 4 114 112 +CASE 18 4 20 20 +BEG 5 4 8 6 +BASIL 0 4 4 0 +BANYAN 0 4 4 0 +ARSINOE 0 4 4 0 +ALWAYS 33 4 34 36 +YOU'LL 3 3 4 5 +YER 0 3 3 0 +YEARS 19 3 20 21 +WORTH 1 3 2 3 +WORK 25 3 26 27 +WILDERNESS 5 3 8 5 +WHOLE 22 3 22 25 +WHETHER 12 3 12 15 +WHEEL 1 3 2 3 +WE'LL 0 3 2 1 +WAYNE 0 3 3 0 +WATER 20 3 22 21 +WARD 3 3 6 3 +WAIN 0 3 0 3 +VOTES 1 3 3 2 +VIOLENCE 13 3 16 13 +VERY 83 3 84 85 +VENT 0 3 0 3 +VAIN 4 3 5 6 +UN 0 3 2 1 +ULRICA 0 3 3 0 +TRIED 19 3 19 22 +TOLD 23 3 26 23 +TIME 82 3 83 84 +THOUSAND 15 3 16 17 +THOUGHT 39 3 39 42 +THOSE 37 3 38 39 +TELL 52 3 52 55 +TAKE 44 3 45 46 +SYRUP 1 3 4 1 +SUN 7 3 7 10 +STRIKE 1 3 2 3 +STONEWALL 0 3 3 0 +STONE 11 3 11 14 +STERN 0 3 1 2 +STEPS 6 3 7 8 +STAYED 1 3 4 1 +SPAKE 2 3 5 2 +SOUL 10 3 11 12 +SOUGHT 3 3 6 3 +SONG 1 3 2 3 +SOME 76 3 77 78 +SHERIFF 1 3 4 1 +SHE'LL 1 3 2 3 +SHARDURIS 0 3 3 0 +SENT 9 3 10 11 +SEEN 29 3 32 29 +SCHOOL 6 3 8 7 +SANCT 0 3 3 0 +SAINT 19 3 19 22 +S 1 3 3 2 +RIGHT 30 3 33 30 +RAYSTROKE 0 3 0 3 +RAY 0 3 1 2 +RATHER 14 3 16 15 +RAN 8 3 9 10 +RAISE 3 3 6 3 +QUEST 0 3 1 2 +PRODUCED 6 3 7 8 +PROCLUS 0 3 3 0 +PROAS 0 3 3 0 +PROA 0 3 3 0 +PRIORS 0 3 0 3 +PRIEST 3 3 6 3 +POST 5 3 6 7 +POLL 0 3 3 0 +PIGEON 1 3 1 4 +PHUT 0 3 3 0 +PETER 13 3 15 14 +PART 21 3 21 24 +PACE 0 3 1 2 +OWN 43 3 45 44 +OPEN 14 3 15 16 +ONES 4 3 4 7 +OFFICERS 2 3 3 4 +NUZHAT 1 3 4 1 +NOUGHT 0 3 2 1 +NORTH 3 3 4 5 +NOR 19 3 21 20 +NEW 28 3 30 29 +NEAREST 2 3 5 2 +MOSES 5 3 8 5 +MORROW 6 3 6 9 +MISTAH 0 3 3 0 +MESSIAN 0 3 0 3 +MEAT 3 3 6 3 +MC 1 3 4 1 +MATE 1 3 2 3 +MARSHAL 3 3 6 3 +LOVER 2 3 3 4 +LOOKED 23 3 25 24 +LIVE 16 3 17 18 +LISLEY 0 3 0 3 +LINE 5 3 7 6 +LIKED 7 3 9 8 +LIDDY 0 3 3 0 +LAND 18 3 20 19 +L 1 3 4 1 +JUDGMENT 6 3 9 6 +JUDGE 5 3 7 6 +JES 0 3 3 0 +JAKIE 0 3 0 3 +JAKEY 0 3 3 0 +IZZIE 0 3 0 3 +INN 3 3 4 5 +IN'T 0 3 3 0 +HOWEVER 13 3 16 13 +HOLD 7 3 10 7 +HERMON'S 0 3 3 0 +HERMANN 0 3 0 3 +HERMAN 0 3 0 3 +HEART 28 3 28 31 +HAYES 0 3 1 2 +HARD 14 3 16 15 +HA 1 3 3 2 +GRAY 1 3 3 2 +GRANDPAP 0 3 3 0 +GOVERNMENT 19 3 20 21 +GOIN 1 3 1 4 +GOBEY'S 0 3 3 0 +GIVING 6 3 8 7 +GIVEN 12 3 13 14 +GIVE 44 3 47 44 +FURTHER 6 3 6 9 +FRANZ 5 3 8 5 +FOLLOWED 8 3 10 9 +FOLDS 0 3 1 2 +FOALS 1 3 4 1 +FOAL 2 3 5 2 +FIND 23 3 25 24 +FELLOW 13 3 13 16 +FAVOUR 3 3 4 5 +FAVOR 0 3 2 1 +FAR 21 3 21 24 +FAFNIR 0 3 3 0 +EYES 34 3 35 36 +ETERNAL 0 3 3 0 +ERE 1 3 2 3 +ELECTION 2 3 4 3 +EITHER 8 3 8 11 +EILEEN 0 3 3 0 +E'S 0 3 3 0 +DRINK 21 3 24 21 +DOUBT 7 3 9 8 +DOES 14 3 15 16 +DEAR 14 3 14 17 +DALY 0 3 3 0 +DALEY 0 3 0 3 +CUT 10 3 12 11 +CREAM 3 3 6 3 +CORNER 10 3 13 10 +CORN 0 3 0 3 +CONFECTIONARY 1 3 4 1 +CLIMB 0 3 1 2 +CHANGE 7 3 9 8 +CERTAIN 14 3 16 15 +CATTLE 4 3 5 6 +CAN'T 15 3 16 17 +CAN 78 3 80 79 +BRYNHILD 0 3 3 0 +BROTHERS 6 3 8 7 +BRAMIN 0 3 0 3 +BOY 25 3 27 26 +BOIL 3 3 5 4 +BLODGETT 0 3 3 0 +BIT 9 3 11 10 +BILL 2 3 3 4 +BEFELL 0 3 0 3 +BEFEL 0 3 3 0 +AWHILE 1 3 3 2 +AUNT 4 3 6 5 +ASK 16 3 17 18 +ART 12 3 13 14 +AREN'T 1 3 3 2 +ALONE 9 3 10 11 +ALLOW 6 3 8 7 +AILEEN 0 3 0 3 +ZA 0 2 0 2 +YOURSELVES 2 2 2 4 +YOURS 0 2 1 1 +YOU'D 3 2 5 3 +YO'LL 0 2 2 0 +YEAR 6 2 7 7 +YARD 0 2 2 0 +WROTE 2 2 4 2 +WRONG 4 2 4 6 +WRITE 1 2 1 3 +WRIT 0 2 1 1 +WRAPPERS 0 2 0 2 +WOUNDS 1 2 2 2 +WORSE 10 2 10 12 +WORKING 8 2 8 10 +WORKED 1 2 2 2 +WORDS 18 2 18 20 +WONDERED 6 2 8 6 +WONDER 4 2 5 5 +WON 1 2 1 3 +WOMAN 18 2 19 19 +WITCH 1 2 2 2 +WINTER 3 2 5 3 +WINE 6 2 8 6 +WINDOW 17 2 17 19 +WILKES 0 2 0 2 +WILDEST 0 2 1 1 +WILD 7 2 9 7 +WIFE 14 2 16 14 +WI 0 2 1 1 +WHY 46 2 47 47 +WHO'S 0 2 0 2 +WHITHER 1 2 3 1 +WHERE'S 2 2 4 2 +WELLS 0 2 2 0 +WATCHED 2 2 4 2 +WATCH 12 2 12 14 +WARM 0 2 1 1 +WAR 4 2 5 5 +WANT 24 2 26 24 +WAITING 6 2 7 7 +VILLAGES 0 2 1 1 +VIGILANCE 0 2 2 0 +VE 0 2 2 0 +VARIOUS 1 2 2 2 +VALLEY 2 2 2 4 +UTTER 1 2 2 2 +USED 9 2 9 11 +USE 11 2 13 11 +URARTU 0 2 2 0 +UNDERTAKERS 0 2 0 2 +UNDERTAKER'S 0 2 2 0 +TRULY 4 2 5 5 +TRIFLE 0 2 1 1 +TRIBES 4 2 6 4 +TRIBE 2 2 4 2 +TOWN 14 2 14 16 +TOWER 3 2 4 4 +TOWARDS 12 2 13 13 +TORQUILSTONE 0 2 2 0 +TORCHLESTONE 0 2 0 2 +TOP 2 2 3 3 +TONIGHT 0 2 2 0 +TOE 0 2 0 2 +TIRED 5 2 7 5 +TIE 1 2 2 2 +TIDINGS 0 2 2 0 +TIDE 0 2 1 1 +THOUSANDS 3 2 5 3 +THIRST 7 2 9 7 +THIRD 10 2 11 11 +THINK 45 2 46 46 +THINGS 19 2 21 19 +THING 20 2 21 21 +THINE 1 2 1 3 +THIN 3 2 4 4 +THEE 26 2 27 27 +THEATRE 0 2 2 0 +TEN 17 2 18 18 +TEMPLE 2 2 2 4 +TEMPLAR 0 2 2 0 +TELLTALE 0 2 2 0 +TASTE 5 2 6 6 +TAMAR 0 2 2 0 +TALENTS 1 2 3 1 +TAKEN 21 2 22 22 +SYRIA 3 2 4 4 +SURELY 7 2 8 8 +SUPPOSE 10 2 11 11 +SUIT 2 2 2 4 +SUFFICIENT 1 2 3 1 +SUDDEN 7 2 8 8 +STRUCK 11 2 13 11 +STROKE 2 2 2 4 +STRIFE 4 2 6 4 +STRAIN 1 2 1 3 +STORE 2 2 3 3 +STOOD 21 2 22 22 +STOLE 3 2 3 5 +STOKER 0 2 2 0 +STOCKER 0 2 0 2 +STEP 2 2 3 3 +STARS 0 2 0 2 +STARED 3 2 4 4 +STANDARD 2 2 4 2 +SPIRITS 1 2 2 2 +SOMEONE 0 2 2 0 +SIT 4 2 5 5 +SINGA 0 2 2 0 +SIGNING 0 2 1 1 +SIGNED 1 2 2 2 +SIGHT 7 2 8 8 +SHOW 16 2 16 18 +SHORES 0 2 0 2 +SHIPS 2 2 4 2 +SHEPHERD 0 2 1 1 +SHELLFISH 0 2 0 2 +SHELL 1 2 3 1 +SHED 4 2 5 5 +SHAWS 0 2 2 0 +SEYTON 1 2 3 1 +SERF 0 2 0 2 +SELLER 0 2 2 0 +SEEMS 12 2 13 13 +SEEK 8 2 10 8 +SAW 28 2 29 29 +SAN 4 2 5 5 +SAIL 2 2 4 2 +SAH 0 2 2 0 +S'POSE 2 2 3 3 +RUSSIA 1 2 3 1 +RUM 0 2 1 1 +ROSE 11 2 12 12 +ROPE'S 0 2 2 0 +ROOMFELLOW 0 2 2 0 +RISDON 5 2 7 5 +RIGAN 0 2 0 2 +RIDER 0 2 2 0 +RHODIAN 0 2 2 0 +REVOLUTIONIST 0 2 1 1 +REVEREND 13 2 15 13 +REST 19 2 19 21 +REPLIED 38 2 39 39 +REMAINED 7 2 8 8 +REFORMERS 0 2 2 0 +REFORMED 1 2 1 3 +RED 9 2 10 10 +READY 12 2 13 13 +REACHED 11 2 12 12 +RE 2 2 2 4 +RAISED 6 2 6 8 +RAGED 0 2 0 2 +RAGE 1 2 2 2 +RACHEL 1 2 3 1 +QUEEN 5 2 5 7 +PRODUCE 5 2 6 6 +PROCLASS 0 2 0 2 +PRIESTS 1 2 1 3 +PRESENT 16 2 16 18 +PREACH 0 2 0 2 +PRAISED 2 2 2 4 +POVERTY 1 2 3 1 +POOR 24 2 25 25 +POOL 0 2 0 2 +POLE 0 2 0 2 +PLATTERBAFF 1 2 3 1 +PLAIN 3 2 5 3 +PIECE 3 2 4 4 +PHOSPHOR 0 2 2 0 +PHILIP 2 2 3 3 +PERE 0 2 2 0 +PEG 0 2 0 2 +PEER 2 2 3 3 +PASSED 9 2 10 10 +PASS 6 2 6 8 +PARTLY 1 2 2 2 +PARR 0 2 2 0 +OPENED 10 2 11 11 +ONTO 0 2 2 0 +ONE'S 5 2 5 7 +OFTEN 21 2 21 23 +NOTICE 6 2 7 7 +NORTHFIELD 0 2 2 0 +NONE 12 2 13 13 +NEWBURG 0 2 0 2 +NEWBERG 0 2 2 0 +NEIGHBOUR 1 2 1 3 +NEIGHBOR 0 2 2 0 +NATURALLY 6 2 7 7 +NATURAL 6 2 7 7 +MUCH 40 2 41 41 +MOUTH 6 2 7 7 +MOST 42 2 44 42 +MIN 0 2 0 2 +MILLSON 0 2 0 2 +MIDIAN 0 2 2 0 +MESTER 0 2 2 0 +MERLONUS 0 2 2 0 +MERIT 1 2 3 1 +MENAHEM 0 2 2 0 +MEET 8 2 9 9 +MEANTIS 0 2 0 2 +MEAN 20 2 20 22 +MATTER 21 2 22 22 +MATI 0 2 2 0 +MARTIAN 2 2 2 4 +MARRIAGE 5 2 6 6 +MANKATO 0 2 2 0 +MALE 0 2 0 2 +MAD 1 2 2 2 +LUNA'S 1 2 3 1 +LUNA 1 2 3 1 +LUCIEN 1 2 2 2 +LOW 12 2 13 13 +LOVED 7 2 7 9 +LORD 17 2 19 17 +LOCK 2 2 4 2 +LOBSTER 10 2 12 10 +LL 0 2 2 0 +LIVED 5 2 6 6 +LILBURN 1 2 3 1 +LIFTED 2 2 2 4 +LIE 3 2 4 4 +LEVER 1 2 3 1 +LEST 2 2 3 3 +LENA 0 2 0 2 +LEAVE 21 2 22 22 +LAUGHED 10 2 11 11 +LAUDERDALE 0 2 2 0 +LATER 8 2 8 10 +LATE 9 2 9 11 +LAP 2 2 2 4 +LAKE 4 2 6 4 +LACHAISE 0 2 2 0 +LA 1 2 3 1 +KNOWN 10 2 10 12 +KNIGHT'S 0 2 1 1 +KITE 0 2 2 0 +KING 43 2 45 43 +KEYS 3 2 5 3 +KEEP 14 2 16 14 +JUG 2 2 4 2 +JOE 0 2 0 2 +JOCELYN 0 2 1 1 +JEST 0 2 0 2 +IVANOVITCH 3 2 5 3 +IMPROVE 0 2 1 1 +ILU 0 2 2 0 +ILL 4 2 5 5 +ICES 1 2 3 1 +HURT 5 2 6 6 +HUNTINGDON 3 2 5 3 +HOZE 0 2 2 0 +HOUSE 36 2 37 37 +HOUR 15 2 16 16 +HOSE 0 2 0 2 +HORSTIUS 0 2 2 0 +HORSES 4 2 4 6 +HOPE 15 2 16 16 +HOO'S 0 2 2 0 +HONOURS 0 2 1 1 +HONORS 1 2 2 2 +HITHER 5 2 7 5 +HIDINGS 0 2 0 2 +HI 0 2 1 1 +HEY 1 2 1 3 +HERMANN'S 0 2 0 2 +HERE'S 0 2 2 0 +HELEN 0 2 1 1 +HEARD 27 2 28 28 +HE'S 7 2 9 7 +HE'D 4 2 4 6 +HAYS 0 2 2 0 +HAPPEN 3 2 3 5 +HALL 12 2 14 12 +HAID 0 2 2 0 +GUY 1 2 1 3 +GUNNER 0 2 0 2 +GUNNAR 0 2 2 0 +GUN 2 2 2 4 +GREY 0 2 1 1 +GREW 4 2 4 6 +GRANDPA 0 2 0 2 +GRAHAM 0 2 0 2 +GRAEME 0 2 2 0 +GOV'NOR 0 2 2 0 +GORDON 19 2 21 19 +GOES 5 2 7 5 +GOAL 1 2 3 1 +GIT 0 2 1 1 +GIRL 10 2 11 11 +GIRK 0 2 0 2 +GIORGIO 0 2 2 0 +GERT 0 2 0 2 +GEORGIO 0 2 0 2 +GEORGE'S 0 2 1 1 +GENTLEMEN 3 2 5 3 +GENTLEMAN 7 2 7 9 +GAVE 32 2 32 34 +GARR 0 2 0 2 +FRONT 11 2 13 11 +FRISTOE 0 2 2 0 +FRIENDS 16 2 17 17 +FREE 12 2 12 14 +FRANCE 3 2 3 5 +FORCE 7 2 7 9 +FOOT 1 2 1 3 +FOOLS 1 2 2 2 +FOOL 3 2 4 4 +FOLLOWING 10 2 10 12 +FOLLOW 9 2 10 10 +FLY 4 2 5 5 +FLEROV'S 0 2 2 0 +FLEROV 0 2 2 0 +FLAVOR 0 2 2 0 +FIT 7 2 7 9 +FIRST 54 2 54 56 +FIELD 4 2 4 6 +FESTAL 0 2 2 0 +FELL 14 2 15 15 +FEAST 0 2 0 2 +FAVOURITE 0 2 1 1 +FAVORITE 0 2 1 1 +FAUCHELEVENT 22 2 24 22 +FATHER'S 5 2 7 5 +FAT 1 2 2 2 +FAST 11 2 12 12 +FANNY 3 2 5 3 +FAN 2 2 3 3 +FAFNIR'S 0 2 2 0 +FACE 31 2 33 31 +EXECUTIVE 3 2 5 3 +EVERYONE 0 2 1 1 +EVERY 37 2 38 38 +EVER 26 2 27 27 +EUSEBIUS 0 2 2 0 +ETHELRIED 0 2 2 0 +ESPECIALLY 5 2 7 5 +ENOUGH 30 2 31 31 +ENDURE 2 2 3 3 +ELEXANDER 0 2 2 0 +ELASTIC 0 2 2 0 +EH 1 2 2 2 +EARTH 19 2 20 20 +DUKE 10 2 12 10 +DRY 4 2 6 4 +DOST 3 2 4 4 +DORIS 0 2 0 2 +DOOR 30 2 30 32 +DOING 9 2 10 10 +DINKS 0 2 2 0 +DIE 9 2 10 10 +DICK 5 2 5 7 +DETECTIVE 2 2 2 4 +DESK 0 2 2 0 +DEFENSE 2 2 4 2 +DEFENCE 2 2 2 4 +DEBRAMIN 0 2 0 2 +DEBRACY 0 2 0 2 +DEARLY 0 2 2 0 +DARK 11 2 11 13 +DARE 4 2 6 4 +DAPHNE 2 2 4 2 +DA 0 2 2 0 +D 0 2 2 0 +CUPS 0 2 0 2 +CROFISH 0 2 0 2 +CRIS 0 2 0 2 +COURSE 15 2 16 16 +COURFEYRAC 0 2 2 0 +COUNTRY 15 2 17 15 +COUNTRIES 4 2 4 6 +COUNSEL 1 2 2 2 +COULDN'T 6 2 7 7 +COQUETTE 0 2 2 0 +COPSE 0 2 0 2 +CONTEND 0 2 2 0 +CONSTANT 2 2 4 2 +CONFECTIONERY 0 2 0 2 +COMMONWEALTH 0 2 2 0 +COMING 14 2 16 14 +COMES 14 2 15 15 +COLE 0 2 2 0 +COAT 2 2 2 4 +CO 0 2 0 2 +CLOTH 2 2 2 4 +CLIFF 4 2 4 6 +CINDER 0 2 0 2 +CHRISTIAN 0 2 0 2 +CHLORATE 0 2 2 0 +CHEERY 0 2 0 2 +CHEEK 0 2 1 1 +CHARMED 0 2 2 0 +CHARGE 7 2 8 8 +CENTRE 1 2 1 3 +CENTER 1 2 3 1 +CELLAR 0 2 0 2 +CAST 9 2 9 11 +CARROLL 1 2 3 1 +CAROL 0 2 0 2 +CAME 65 2 66 66 +CALLED 23 2 24 24 +CALL 12 2 13 13 +CALIFORNIAN 0 2 2 0 +CALIFORNIA 0 2 0 2 +BUZZARD 7 2 9 7 +BURNEHELD 0 2 0 2 +BULK 0 2 2 0 +BUILD 1 2 1 3 +BRUCE 2 2 3 3 +BROTHER'S 1 2 1 3 +BRAVE 5 2 6 6 +BRAM 0 2 0 2 +BRACEE 0 2 0 2 +BOUT 0 2 2 0 +BOTTOM 5 2 5 7 +BOOM 1 2 1 3 +BOB 4 2 5 5 +BLOW 3 2 4 4 +BIBLICAL 2 2 4 2 +BESSY 7 2 9 7 +BEND 1 2 1 3 +BELL 2 2 3 3 +BEGAN 15 2 16 16 +BEFORE 54 2 56 54 +BEER 2 2 2 4 +BEECH 0 2 0 2 +BEE 0 2 0 2 +BEDROOM 1 2 1 3 +BECAME 9 2 10 10 +BASSORAH 0 2 2 0 +BANG 0 2 1 1 +BAND 7 2 9 7 +BAGHDAD 4 2 6 4 +BAG 4 2 5 5 +BAD 8 2 10 8 +B 2 2 3 3 +AWKWARD 3 2 3 5 +AWK 0 2 2 0 +AWAKE 4 2 4 6 +ATUM 0 2 2 0 +ATTENTION 5 2 7 5 +ASSYRIA 2 2 3 3 +ARSENO 0 2 0 2 +ARPAD 0 2 2 0 +ARMED 2 2 4 2 +ARM 4 2 5 5 +ARCHBISHOP 0 2 2 0 +ARCHBISH 0 2 0 2 +ANOTHER 30 2 31 31 +ANNOUNCE 1 2 1 3 +ANNIE'S 0 2 2 0 +ANNE 1 2 1 3 +AMYNTAS 0 2 2 0 +ALTHEA 1 2 3 1 +ALSO 31 2 33 31 +ALONG 15 2 15 17 +ALOES 0 2 2 0 +ALLIGATOR 0 2 2 0 +ALLIES 0 2 0 2 +ALKALOIDS 0 2 2 0 +ALI 2 2 4 2 +ALEXANDER 0 2 0 2 +ALCOHOL 3 2 5 3 +ALAN 0 2 0 2 +AIR 10 2 10 12 +AFTER 95 2 97 95 +AFT 0 2 2 0 +AFFECTION 0 2 1 1 +ADDED 12 2 13 13 +ACQUAINTED 2 2 3 3 +ABOVE 9 2 9 11 +ABOARD 1 2 3 1 +ZENOUS 0 1 0 1 +ZEMSTVOS 0 1 1 0 +ZAYNAB 0 1 1 0 +ZAWACON 0 1 0 1 +ZAO 0 1 0 1 +YUSS 0 1 1 0 +YOUTH 3 1 4 3 +YOUNGER 7 1 8 7 +YORK 3 1 3 4 +YONWAY 0 1 0 1 +YO' 0 1 1 0 +YES'M 0 1 1 0 +YEOMEN 0 1 1 0 +YEOMAN 1 1 1 2 +YELLS 0 1 1 0 +YE'LL 0 1 0 1 +YE'D 0 1 0 1 +YAUSKY 0 1 1 0 +YAHWEH 0 1 1 0 +WUNNERED 0 1 1 0 +WRITER 1 1 1 2 +WRETS 0 1 0 1 +WRETCHES 0 1 0 1 +WRETCHED 2 1 2 3 +WREATHS 1 1 1 2 +WRAYE 0 1 0 1 +WRAPPED 1 1 1 2 +WOZZIER 0 1 0 1 +WOUNDED 5 1 6 5 +WOTTETH 0 1 1 0 +WOT 1 1 2 1 +WORTHY 1 1 1 2 +WORSHIPPERS 0 1 1 0 +WORSHIPPED 0 1 0 1 +WORSHIP'S 0 1 1 0 +WORSHIP 4 1 4 5 +WORRY 2 1 3 2 +WORKS 4 1 5 4 +WORKMEN 0 1 0 1 +WORKMAN 0 1 1 0 +WORKINGMEN 0 1 1 0 +WORKADAY 0 1 0 1 +WORD 14 1 15 14 +WOOLWIFE 0 1 0 1 +WOODS 2 1 2 3 +WOODEN 5 1 6 5 +WOOD 3 1 4 3 +WOMEN 14 1 15 14 +WOLT 0 1 0 1 +WOKE 0 1 1 0 +WOE 1 1 2 1 +WODE'S 0 1 1 0 +WIVES 0 1 0 1 +WITHOUT 51 1 51 52 +WITHER 0 1 0 1 +WISHT 0 1 1 0 +WISHED 5 1 5 6 +WIRES 0 1 1 0 +WIRE 1 1 1 2 +WINNING 1 1 1 2 +WILT 4 1 4 5 +WILM 0 1 0 1 +WILLY 0 1 1 0 +WILKSES 0 1 1 0 +WILKS 0 1 1 0 +WILGHAN 0 1 0 1 +WILFRIED 0 1 0 1 +WILDLY 1 1 2 1 +WIELD 0 1 1 0 +WIDOW 1 1 1 2 +WIDELY 0 1 0 1 +WIDEAWAKE 0 1 1 0 +WIDDLED 0 1 0 1 +WICKER 6 1 7 6 +WHOSE 16 1 16 17 +WHOLLY 2 1 2 3 +WHO'D 0 1 1 0 +WHISKIRT 0 1 0 1 +WHISKERED 0 1 1 0 +WHILOME 0 1 1 0 +WHEREABOUTS 2 1 3 2 +WHEREABOUT 0 1 0 1 +WHER 0 1 1 0 +WHEELER 0 1 1 0 +WHATEVER 10 1 10 11 +WHATETH 0 1 0 1 +WHATE'ER 0 1 1 0 +WHAT'S 5 1 6 5 +WHALER 0 1 0 1 +WHACKS 0 1 1 0 +WESTWARD 1 1 1 2 +WESTPORT 0 1 1 0 +WEST 3 1 3 4 +WEIGHED 0 1 1 0 +WEEVILY 0 1 0 1 +WEEVILLY 0 1 1 0 +WEEDS 0 1 1 0 +WEED 0 1 0 1 +WEDDING 7 1 7 8 +WEBBS 0 1 0 1 +WEBB'S 0 1 1 0 +WEARINESS 0 1 0 1 +WEAL 0 1 1 0 +WE'VE 2 1 3 2 +WE'S 0 1 0 1 +WAZIR 4 1 5 4 +WAY 62 1 62 63 +WAX 0 1 0 1 +WAVERLY 0 1 1 0 +WAVERLEY 0 1 0 1 +WAUM 0 1 0 1 +WATONWAN 0 1 1 0 +WATERWAM 0 1 0 1 +WATERED 0 1 0 1 +WATCHMAKERS 0 1 0 1 +WATCHMAKER'S 0 1 1 0 +WAT 0 1 0 1 +WASTED 1 1 1 2 +WARNED 0 1 0 1 +WARN 1 1 2 1 +WARMEST 0 1 1 0 +WARDERS 0 1 1 0 +WARD'S 0 1 1 0 +WANTED 7 1 8 7 +WANDERERS 0 1 1 0 +WANDERER 1 1 2 1 +WANDER 0 1 0 1 +WALT 0 1 0 1 +WALLA'S 0 1 0 1 +WALKING 5 1 5 6 +WALK 4 1 5 4 +WALES 0 1 0 1 +WAKE 2 1 3 2 +WAITIN 0 1 1 0 +WAITED 5 1 5 6 +WAIT 10 1 11 10 +WAISTCOAT 2 1 3 2 +WAGGOT 0 1 1 0 +WAGGING 0 1 1 0 +WAGED 0 1 1 0 +WAG 0 1 0 1 +WADED 0 1 1 0 +WADE 0 1 0 1 +WADDED 0 1 1 0 +VUENT 0 1 0 1 +VRAIRIB 0 1 0 1 +VOYAGE 6 1 7 6 +VOY 0 1 0 1 +VOUVENT 0 1 0 1 +VOUGHT 0 1 1 0 +VOTE 3 1 4 3 +VOMITING 0 1 1 0 +VOLVITUR 0 1 1 0 +VOLVETER 0 1 0 1 +VOICED 0 1 1 0 +VOCAL 1 1 2 1 +VISUALS 0 1 0 1 +VIOLENT 1 1 1 2 +VILLE 0 1 0 1 +VILLAGERS 0 1 1 0 +VIL 0 1 1 0 +VICTIMIZE 0 1 1 0 +VICTIMISE 0 1 0 1 +VETERY 0 1 0 1 +VESTRY 0 1 1 0 +VES 0 1 0 1 +VERSEST 0 1 0 1 +VERSES 4 1 5 4 +VERILY 2 1 3 2 +VENTURES 0 1 0 1 +VENTRILOQUIST 0 1 1 0 +VENTILLA 0 1 0 1 +VEN 0 1 0 1 +VEIN 0 1 0 1 +VEHEMENTLY 0 1 1 0 +VECCHIO 0 1 1 0 +VAZARRE 0 1 0 1 +VAVERASSEUR 0 1 0 1 +VAVASOUR 0 1 1 0 +VAULTS 0 1 0 1 +VAULT 8 1 9 8 +VAUGIRARD 0 1 1 0 +VATS 0 1 0 1 +VAST 2 1 3 2 +VASSILIEVITCH 0 1 1 0 +VANE 0 1 1 0 +VALET 1 1 2 1 +UZHAT 0 1 0 1 +UVERT 0 1 0 1 +USUALLY 5 1 6 5 +USUAL 3 1 3 4 +USEFUL 0 1 1 0 +URNOUS 0 1 0 1 +URITU 0 1 0 1 +URGED 1 1 2 1 +URARTIAN 0 1 1 0 +URA 0 1 0 1 +UPWARD 1 1 1 2 +UNSTEAD 0 1 0 1 +UNS 0 1 1 0 +UNREWARDED 0 1 1 0 +UNREPRESENTATIVE 0 1 0 1 +UNOCCUPIED 0 1 1 0 +UNLUCK 0 1 0 1 +UNLESS 7 1 8 7 +UNIQUE 0 1 1 0 +UNHUNGRY 0 1 0 1 +UNHESITATINGLY 0 1 1 0 +UNHAPPY 4 1 5 4 +UNEXPECTEDLY 1 1 2 1 +UNEXPECTED 3 1 3 4 +UNDISTRUC 0 1 0 1 +UNDER 36 1 37 36 +UNCREAM 0 1 0 1 +UNCLE 6 1 7 6 +UNCHANGED 0 1 0 1 +UNADULTERATED 0 1 1 0 +ULTIMATELY 0 1 1 0 +ULTIMATE 0 1 0 1 +UKINZER 0 1 1 0 +UH 0 1 0 1 +UFFILIENTLY 0 1 0 1 +UDDER 0 1 1 0 +TYRANNY 0 1 1 0 +TYPE 1 1 1 2 +TYER 0 1 0 1 +TWYMAN'S 0 1 1 0 +TWIMMAN 0 1 0 1 +TWELVEMONTH 0 1 1 0 +TWELVE 3 1 3 4 +TURRET 0 1 1 0 +TURNS 1 1 2 1 +TURNING 5 1 6 5 +TURN 8 1 8 9 +TURBULENT 0 1 1 0 +TUMBLED 1 1 1 2 +TUM 0 1 0 1 +TUFTS 1 1 1 2 +TUBERCUOUS 0 1 0 1 +TUBERCULOUS 0 1 1 0 +TRYING 7 1 8 7 +TRUTH 10 1 10 11 +TRUSTY 0 1 0 1 +TRUSTEE 0 1 1 0 +TRUNDLED 0 1 1 0 +TRUE 15 1 15 16 +TRUCE 1 1 2 1 +TROT 0 1 0 1 +TRIVET 0 1 1 0 +TRIUMPHS 0 1 0 1 +TRITES 0 1 0 1 +TRITE 0 1 0 1 +TRIPES 0 1 1 0 +TRINES 0 1 0 1 +TRIES 3 1 3 4 +TRIBUT 0 1 0 1 +TREMBLINGLY 1 1 1 2 +TRELAWNEY 0 1 1 0 +TREEONER 0 1 0 1 +TREBRANT 0 1 0 1 +TREASURES 1 1 1 2 +TREASURE 1 1 2 1 +TRAVELLED 1 1 1 2 +TRAVELED 0 1 1 0 +TRAVEL 1 1 1 2 +TRANSSHIP 0 1 1 0 +TRANSHIP 0 1 0 1 +TRAINS 0 1 0 1 +TRAINING 4 1 5 4 +TRAINED 1 1 1 2 +TRAINDAWG 0 1 1 0 +TRAIN 0 1 0 1 +TRAFFIC 0 1 1 0 +TRADITIONS 1 1 2 1 +TRADES 0 1 0 1 +TRADEMARK 0 1 1 0 +TRADE 7 1 7 8 +TRACK 2 1 3 2 +TRACED 0 1 1 0 +TOWERED 0 1 0 1 +TOUSTRA 0 1 0 1 +TOUR 1 1 2 1 +TOUGHS 0 1 1 0 +TOUGH 2 1 3 2 +TORTURE 1 1 1 2 +TORMENT 0 1 1 0 +TORE 0 1 1 0 +TORCH 0 1 1 0 +TOOK 34 1 35 34 +TONGUE 4 1 5 4 +TONG 0 1 0 1 +TOMORROW 0 1 1 0 +TOMB 1 1 2 1 +TOM 8 1 9 8 +TOLERBLE 0 1 1 0 +TOLERABLE 1 1 1 2 +TOILET 2 1 3 2 +TOILED 0 1 0 1 +TOFORE 0 1 0 1 +TITLING 0 1 0 1 +TITLE 0 1 1 0 +TIRING 0 1 1 0 +TIRESOME 0 1 1 0 +TIRELY 0 1 0 1 +TIRE 0 1 0 1 +TINTIVELY 0 1 0 1 +TIMES 9 1 10 9 +TILLERS 0 1 1 0 +TIGHTENING 0 1 1 0 +TIGHTENED 1 1 1 2 +TIGGLERS 0 1 0 1 +TIGGING 0 1 0 1 +TIENO 0 1 0 1 +TIED 2 1 2 3 +TIDY 0 1 0 1 +TICKLING 0 1 1 0 +THUMB 3 1 4 3 +THROWING 2 1 3 2 +THROUGH 36 1 37 36 +THRIFTILY 0 1 1 0 +THREW 8 1 8 9 +THREE 36 1 37 36 +THREAT 0 1 0 1 +THOMMISH 0 1 0 1 +THOES 0 1 0 1 +THITHER 5 1 5 6 +THINKING 4 1 4 5 +THINDER 0 1 0 1 +THEY'LL 0 1 0 1 +THEMSELVES 17 1 17 18 +THEATER 0 1 0 1 +THAT'LL 0 1 1 0 +THANKS 5 1 5 6 +TERRANT 0 1 0 1 +TERRACE 0 1 0 1 +TERRA 0 1 1 0 +TENT 0 1 0 1 +TEND 1 1 2 1 +TENANT 0 1 0 1 +TEMPTETH 0 1 1 0 +TEMPTED 0 1 0 1 +TEMPLES 1 1 2 1 +TEMP 0 1 0 1 +TEM 0 1 0 1 +TELLERS 0 1 0 1 +TELEGRAMAS 0 1 0 1 +TELEGRAM 2 1 3 2 +TEETH 4 1 4 5 +TEEN 0 1 0 1 +TEEM 0 1 0 1 +TEDWELL 0 1 0 1 +TECHNIC 0 1 0 1 +TEAM 0 1 1 0 +TEALE 0 1 0 1 +TEAL 0 1 1 0 +TEA 1 1 2 1 +TAX 0 1 0 1 +TATTLERS 0 1 1 0 +TASKMASTER 0 1 1 0 +TARDY 0 1 1 0 +TAPPY 0 1 0 1 +TAPPED 0 1 1 0 +TAPIS 0 1 1 0 +TANQUAM 0 1 1 0 +TAN 0 1 0 1 +TALONS 0 1 0 1 +TALMASH 0 1 1 0 +TALLED 0 1 0 1 +TALKS 1 1 1 2 +TALKED 4 1 5 4 +TALE 2 1 2 3 +TAKING 11 1 11 12 +TAHITI 0 1 1 0 +TABLETS 1 1 2 1 +T'OTHER 0 1 1 0 +T 1 1 1 2 +SYTLER 0 1 0 1 +SYNONYMON 0 1 1 0 +SYMBOLS 0 1 0 1 +SYLLOGISM 0 1 1 0 +SYDNEY 2 1 3 2 +SWORD 11 1 12 11 +SWELP 0 1 1 0 +SWEAT 0 1 0 1 +SWARTHY 0 1 1 0 +SWALLTY 0 1 0 1 +SWAG 0 1 1 0 +SWAB 0 1 0 1 +SUSPICION 4 1 4 5 +SUSPICIENT 0 1 0 1 +SURGY 0 1 0 1 +SURFACES 0 1 0 1 +SURE 18 1 18 19 +SUNK 0 1 0 1 +SUMTHIN 0 1 1 0 +SUMINUTELY 0 1 0 1 +SULPHURIC 0 1 1 0 +SUITED 1 1 1 2 +SUFFOLK 0 1 1 0 +SUFFOLD 0 1 0 1 +SUFFICES 0 1 1 0 +SUFFERG 0 1 0 1 +SUE 0 1 1 0 +SUCKED 0 1 1 0 +SUCCOURS 0 1 1 0 +SUBTRINE 0 1 0 1 +SUBJECT 6 1 6 7 +SUB 0 1 1 0 +STY 0 1 0 1 +STRUGGLING 1 1 1 2 +STRUGGLE 0 1 1 0 +STRIPE 0 1 0 1 +STRIKEBREAKERS 0 1 0 1 +STRETCH 0 1 1 0 +STRESS 0 1 0 1 +STREAK 1 1 2 1 +STRAYING 0 1 1 0 +STRAYED 0 1 0 1 +STRAW 1 1 1 2 +STRATORS 0 1 0 1 +STRANGEST 0 1 1 0 +STRANGERS 1 1 1 2 +STRANGE 4 1 5 4 +STRAITS 0 1 1 0 +STRAITENED 0 1 1 0 +STRAIT 0 1 0 1 +STRAINS 0 1 1 0 +STRAIGHTENED 0 1 0 1 +STRAIGHT 4 1 5 4 +STOWED 0 1 0 1 +STOVE 1 1 1 2 +STORIES 3 1 3 4 +STORES 1 1 2 1 +STORED 0 1 1 0 +STOPPED 5 1 5 6 +STONED 0 1 1 0 +STOCKING 0 1 0 1 +STOCK 2 1 3 2 +STIRRING 1 1 1 2 +STILLNESS 2 1 3 2 +STILLIE 0 1 0 1 +STICKET 0 1 0 1 +STICK 4 1 5 4 +STEWPANT 0 1 0 1 +STEWPAN 1 1 2 1 +STEW 0 1 1 0 +STEVER 0 1 0 1 +STERNMOST 0 1 0 1 +STEPPED 1 1 2 1 +STEEVER 0 1 1 0 +STEERING 0 1 1 0 +STEERED 0 1 0 1 +STEAMS 0 1 0 1 +STEALING 0 1 0 1 +STAYS 0 1 0 1 +STAYING 1 1 2 1 +STAY 6 1 6 7 +STATUS 0 1 1 0 +STATURE 0 1 1 0 +STATUE 2 1 2 3 +STATES 7 1 7 8 +STATEROOM 1 1 1 2 +STATED 2 1 3 2 +STAS 0 1 1 0 +STARVING 1 1 2 1 +START 3 1 3 4 +STARLING 0 1 0 1 +STARK 0 1 1 0 +STANDSTILL 0 1 1 0 +STANDS 2 1 3 2 +STANDARDS 0 1 0 1 +STAKES 0 1 1 0 +STAINED 1 1 2 1 +STAFF 0 1 0 1 +STACKS 0 1 0 1 +STACCOY 0 1 0 1 +SQUEAMISH 2 1 3 2 +SPY 1 1 1 2 +SPRUITS 0 1 0 1 +SPOTIC 0 1 0 1 +SPORT 1 1 2 1 +SPONSUS 0 1 1 0 +SPONGE 0 1 1 0 +SPONDYLES 0 1 1 0 +SPITE 10 1 10 11 +SPITABLY 0 1 0 1 +SPIRIT 5 1 6 5 +SPINTER 0 1 0 1 +SPINNING 1 1 1 2 +SPINES 0 1 0 1 +SPILLING 0 1 1 0 +SPIES 0 1 0 1 +SPICE 0 1 1 0 +SPENT 3 1 3 4 +SPENDING 0 1 0 1 +SPENCE 0 1 0 1 +SPEECH 5 1 5 6 +SPECIALLY 0 1 0 1 +SPEAR'S 0 1 0 1 +SPATANI 0 1 0 1 +SPASM 1 1 1 2 +SPARSELY 0 1 1 0 +SPARED 2 1 2 3 +SPADEL 0 1 0 1 +SPADDLE 0 1 1 0 +SPACE 2 1 2 3 +SOWED 0 1 0 1 +SOUTHERN 2 1 3 2 +SOUTH 1 1 1 2 +SOUSE 0 1 1 0 +SOUS 1 1 1 2 +SOURCE 1 1 2 1 +SOUNDLY 1 1 2 1 +SOUNDING 0 1 0 1 +SOTELES 0 1 1 0 +SORDID 0 1 1 0 +SOPHIA 0 1 0 1 +SOOT 0 1 1 0 +SOONER 4 1 4 5 +SOON 22 1 22 23 +SONSPIER 0 1 0 1 +SONS 0 1 1 0 +SONNY 0 1 1 0 +SOMETHING 29 1 29 30 +SOMBRE 0 1 0 1 +SOMBER 0 1 1 0 +SOLDOM 0 1 0 1 +SOJOURNOVITCH 0 1 0 1 +SOFT 5 1 5 6 +SOFA 1 1 2 1 +SOCIALLY 0 1 0 1 +SOCIALIST 1 1 2 1 +SNYM 0 1 0 1 +SNETKOV 1 1 2 1 +SNATHS 0 1 0 1 +SNARLS 0 1 0 1 +SNARLED 0 1 1 0 +SMOLNY 0 1 1 0 +SMOKESTACKS 0 1 1 0 +SMOKER 2 1 3 2 +SMOKE 2 1 2 3 +SMIRCHED 0 1 1 0 +SMILE 8 1 9 8 +SMELL 0 1 1 0 +SMARGED 0 1 0 1 +SMAR 0 1 0 1 +SLUNK 0 1 1 0 +SLIGHTLY 1 1 2 1 +SLEEPY 1 1 2 1 +SLACKER 0 1 0 1 +SLACKENED 0 1 1 0 +SLAB 1 1 2 1 +SKYLIGHTS 0 1 0 1 +SKYLARKS 0 1 1 0 +SKIRT 0 1 1 0 +SKEW 0 1 1 0 +SKEPTICAL 0 1 1 0 +SIXES 0 1 1 0 +SIX 17 1 17 18 +SIRES 0 1 0 1 +SIRE 3 1 4 3 +SINUHIT 0 1 1 0 +SINGS 2 1 3 2 +SINGAFUT'S 0 1 0 1 +SINFUL 0 1 1 0 +SINE 0 1 1 0 +SIMULATES 0 1 1 0 +SIMMERING 0 1 1 0 +SIMILATES 0 1 0 1 +SILVER 7 1 7 8 +SILLY 2 1 3 2 +SILENCE 8 1 9 8 +SIGNOR 0 1 1 0 +SIGN 4 1 4 5 +SIGHING 1 1 2 1 +SIEVE 0 1 1 0 +SIDNEY 0 1 0 1 +SICUT 0 1 1 0 +SIBBERING 0 1 0 1 +SHUMMED 0 1 0 1 +SHUMAN 0 1 1 0 +SHUCKS 0 1 1 0 +SHUBERG'S 0 1 0 1 +SHRUGS 0 1 0 1 +SHRUBS 1 1 2 1 +SHRUBBERY 0 1 1 0 +SHOWN 4 1 4 5 +SHOWING 7 1 7 8 +SHOULDST 2 1 3 2 +SHOULDEST 0 1 0 1 +SHOULDERS 3 1 4 3 +SHOULDER 2 1 2 3 +SHOT 9 1 10 9 +SHORE 1 1 2 1 +SHOCKS 0 1 0 1 +SHOCKED 1 1 2 1 +SHO'LY 0 1 1 0 +SHIRT 1 1 1 2 +SHIP 17 1 17 18 +SHING 0 1 0 1 +SHIMMERTS 0 1 0 1 +SHET 0 1 1 0 +SHERE 0 1 1 0 +SHEPARD 0 1 0 1 +SHEETS 0 1 1 0 +SHEEP 0 1 0 1 +SHEAR 0 1 0 1 +SHATTERED 0 1 1 0 +SHARS 0 1 0 1 +SHARED 0 1 0 1 +SHAPIA 0 1 1 0 +SHAPEIA 0 1 0 1 +SHALLUM 0 1 1 0 +SHAKEDOWN 0 1 1 0 +SHAKE 2 1 2 3 +SHAGG 0 1 0 1 +SHAG 2 1 3 2 +SHADORIS 0 1 0 1 +SEWING 0 1 1 0 +SEVEREIGN 0 1 0 1 +SEVERE 1 1 2 1 +SEVERAL 9 1 10 9 +SEVENTIETH 0 1 1 0 +SEVENTH 3 1 3 4 +SEVENTEENTH 0 1 0 1 +SEVEN 11 1 12 11 +SETTON 0 1 0 1 +SETON 0 1 0 1 +SERVES 0 1 1 0 +SERVED 3 1 3 4 +SERMON 0 1 1 0 +SEREN 0 1 0 1 +SENTENCED 0 1 1 0 +SENOR 1 1 1 2 +SENDS 0 1 0 1 +SEND 4 1 4 5 +SENATORY 0 1 0 1 +SEMBLAGE 0 1 0 1 +SELLING 0 1 0 1 +SELF 6 1 6 7 +SEEST 1 1 2 1 +SEEMING 1 1 1 2 +SEEKEST 0 1 1 0 +SEEK'ST 0 1 0 1 +SEEING 8 1 8 9 +SEDUCETH 0 1 1 0 +SEDUCE 0 1 0 1 +SEDRRICK 0 1 0 1 +SECUT 0 1 0 1 +SECURS 0 1 0 1 +SEATS 1 1 1 2 +SEATED 5 1 5 6 +SEAT 0 1 1 0 +SEAS 1 1 1 2 +SEARCHED 2 1 2 3 +SEAMEN 0 1 1 0 +SEAMAN 4 1 5 4 +SE 0 1 0 1 +SCUSE 0 1 1 0 +SCULPTORS 1 1 2 1 +SCULPTOR'S 1 1 1 2 +SCRUTINISED 0 1 1 0 +SCRIPS 0 1 0 1 +SCRIBES 1 1 2 1 +SCREAMY 0 1 0 1 +SCRAPPIN 0 1 1 0 +SCRAP 0 1 0 1 +SCOTT 0 1 0 1 +SCORN 2 1 3 2 +SCORCHED 0 1 0 1 +SCO'TCH 0 1 1 0 +SCIENCE 1 1 1 2 +SCHUMANN 0 1 0 1 +SCHULBERG'S 0 1 1 0 +SCHOOLGIRLS 0 1 0 1 +SCHOOLDAYS 0 1 1 0 +SCHOOLBOY 0 1 0 1 +SCHOLARS 0 1 1 0 +SCHLEVENT 0 1 0 1 +SCEPTICAL 0 1 0 1 +SCARO 0 1 0 1 +SCAPEGRACES 0 1 1 0 +SCAPED 0 1 1 0 +SCAPE 0 1 1 0 +SAYING 17 1 17 18 +SAYIN 0 1 1 0 +SAVOUR 0 1 0 1 +SAVE 7 1 7 8 +SATURDAY 0 1 1 0 +SATURDAIL 0 1 0 1 +SARTUOUS 0 1 0 1 +SARKAIUS 0 1 0 1 +SARDID 0 1 0 1 +SARAH 2 1 3 2 +SAO 0 1 0 1 +SANTIA 0 1 0 1 +SANS 0 1 1 0 +SANITARY 1 1 2 1 +SANGS 0 1 0 1 +SANG 1 1 2 1 +SAND 2 1 3 2 +SANCTIS 0 1 0 1 +SANCTESS 0 1 1 0 +SANCHO 8 1 9 8 +SANCHA 0 1 0 1 +SAMPANCISCO 0 1 0 1 +SAME 22 1 22 23 +SAMBERT 0 1 0 1 +SALONE 0 1 1 0 +SALON 1 1 1 2 +SALMON 0 1 0 1 +SALLOWER 0 1 1 0 +SALARY 0 1 0 1 +SAKE 7 1 7 8 +SAINTO 0 1 0 1 +SAILS 0 1 1 0 +SAILORS 0 1 0 1 +SAILOR 2 1 2 3 +SAILED 0 1 1 0 +SAGITTAIRE 0 1 1 0 +SAGATURE 0 1 0 1 +SAD 2 1 2 3 +SACKED 0 1 0 1 +RYO 0 1 1 0 +RUTHS 0 1 0 1 +RUSHIRE 0 1 0 1 +RUSHED 3 1 4 3 +RUSH 0 1 0 1 +RUNS 0 1 1 0 +RUNG 1 1 2 1 +RUMP 0 1 1 0 +RULER 0 1 1 0 +RULED 0 1 0 1 +RULE 4 1 5 4 +RUBENSES 0 1 1 0 +RUBBERS 0 1 1 0 +ROXBURY 0 1 1 0 +ROWING 0 1 0 1 +ROWED 0 1 1 0 +ROUTED 0 1 0 1 +ROUTE 0 1 1 0 +ROUNDED 0 1 1 0 +ROUGH 3 1 3 4 +ROTHS 0 1 1 0 +ROSES 0 1 0 1 +ROSAMUN 0 1 1 0 +ROSAMOND 0 1 0 1 +ROPES 1 1 1 2 +ROOTS 1 1 2 1 +ROOKER'S 0 1 0 1 +ROOF 2 1 2 3 +RONALD 0 1 1 0 +ROMANS 0 1 0 1 +ROMANCE 2 1 3 2 +ROMAN 2 1 2 3 +ROLL 1 1 2 1 +ROLE 0 1 0 1 +ROGI 0 1 0 1 +RODIAN 0 1 0 1 +RODE 6 1 6 7 +ROCK 3 1 4 3 +ROCCALL 0 1 0 1 +ROBIN 0 1 0 1 +ROARING 0 1 1 0 +ROAR 1 1 1 2 +ROADS 0 1 0 1 +ROAD 15 1 16 15 +RISDEN 0 1 0 1 +RIPENESS 0 1 0 1 +RIPE 0 1 0 1 +RIO 0 1 0 1 +RINGMASTER 0 1 1 0 +RING 6 1 6 7 +RINDS 0 1 1 0 +RIGOROUS 0 1 1 0 +RIDGE'S 0 1 1 0 +RIDERING 0 1 0 1 +RIDE 5 1 5 6 +RID 4 1 5 4 +RICHMOND 0 1 0 1 +RICHLY 1 1 1 2 +RICHES 0 1 0 1 +RICHARD 4 1 5 4 +RICHA 0 1 0 1 +RICARDS 0 1 0 1 +RHODES 0 1 1 0 +RHINS 0 1 0 1 +REWARDED 0 1 0 1 +REW 0 1 1 0 +REVOTE 0 1 0 1 +REVOLUTIONISTS 0 1 1 0 +REVOLUTION 2 1 3 2 +REVOLTE 0 1 1 0 +REVOLTA 0 1 0 1 +REVOLT 0 1 0 1 +REVOLISHNESS 0 1 0 1 +REVERED 0 1 0 1 +REVENT 0 1 0 1 +REVELLING 0 1 0 1 +REVELING 0 1 1 0 +REVELATION 0 1 1 0 +REUBEN 0 1 0 1 +RETZCH'S 0 1 1 0 +RETURNING 6 1 6 7 +RESUMED 1 1 2 1 +RESTORETH 0 1 1 0 +RESTORE 0 1 0 1 +RESTIVE 0 1 1 0 +RESTAKE 0 1 0 1 +RESPONSIBLE 4 1 5 4 +RESOLVED 0 1 1 0 +RESK 0 1 1 0 +RESISTING 1 1 2 1 +RESISTIN 0 1 0 1 +RESINOUS 0 1 1 0 +RESIDED 0 1 0 1 +RESCUED 2 1 3 2 +RESCUE 1 1 1 2 +REQUIRE 3 1 4 3 +REQUEST 3 1 4 3 +REPUTATION 2 1 3 2 +REPUGUE 0 1 0 1 +REPRESENTATIVE 1 1 2 1 +REPORTING 0 1 0 1 +REPLY 1 1 2 1 +REPETITION 1 1 1 2 +REPEATED 5 1 6 5 +REND 0 1 1 0 +REMISSIONARY'S 0 1 0 1 +REMISSION 0 1 1 0 +REMEMBEREST 0 1 1 0 +REMEMBER 13 1 13 14 +REMARKED 9 1 10 9 +REMARK 6 1 6 7 +REMAINS 2 1 2 3 +REMAIN 4 1 5 4 +RELISSE 0 1 0 1 +RELIGIOUS 2 1 2 3 +RELIGHTED 0 1 0 1 +RELEASED 2 1 3 2 +RELATOR'S 0 1 0 1 +RELATION 2 1 2 3 +RELATED 3 1 4 3 +REJECTED 0 1 0 1 +REJECT 0 1 1 0 +REIN 0 1 0 1 +REIGNS 0 1 1 0 +REIGNED 0 1 1 0 +REIGN 1 1 1 2 +REGULATION 0 1 1 0 +REGULATING 0 1 0 1 +REGULAR 1 1 1 2 +REGARDING 2 1 3 2 +REGAN 0 1 0 1 +REG'LER 0 1 1 0 +REFUSE 2 1 2 3 +REFUGE 2 1 3 2 +REFUCER 0 1 0 1 +REFLECTING 0 1 0 1 +REFERENCE 2 1 3 2 +REELECTION 0 1 1 0 +REDUCED 1 1 2 1 +REDMOCKERS 0 1 0 1 +RECTUM 0 1 1 0 +RECTIM 0 1 0 1 +RECTIFY 0 1 1 0 +RECORDS 0 1 1 0 +RECKLESS 2 1 2 3 +RECITED 2 1 3 2 +RECEDED 0 1 1 0 +RECAPTURED 0 1 1 0 +REAP 0 1 1 0 +REALIZED 0 1 0 1 +REALIST 0 1 0 1 +REALISED 0 1 1 0 +READS 0 1 0 1 +RAYS 0 1 0 1 +RATTLING 2 1 3 2 +RATTERAS 0 1 0 1 +RATSKAGGS 0 1 0 1 +RATE 2 1 2 3 +RASCUE 0 1 0 1 +RASCALS 0 1 1 0 +RAPSCALLIONS 0 1 1 0 +RAPPERS 0 1 1 0 +RAPPA'S 0 1 0 1 +RAOUL 0 1 0 1 +RANSOM 7 1 8 7 +RANSES 0 1 0 1 +RANGED 0 1 0 1 +RANG 2 1 2 3 +RANDOM 0 1 0 1 +RANALD 0 1 0 1 +RAMSES 0 1 1 0 +RAM 3 1 4 3 +RAINY 1 1 2 1 +RAINING 0 1 0 1 +RAINED 0 1 0 1 +RAIN 2 1 3 2 +RAID 0 1 1 0 +RAGGED 0 1 0 1 +RAGATIRS 0 1 0 1 +RADPROP 0 1 1 0 +RADIAN 0 1 0 1 +RACKETEERS 0 1 1 0 +RACKED 0 1 1 0 +RACES 0 1 1 0 +RABBS 0 1 0 1 +RABBITS 0 1 1 0 +RABBIT'S 0 1 0 1 +RABB'S 0 1 1 0 +QUMMUKH 0 1 1 0 +QUITE 15 1 16 15 +QUICKU 0 1 0 1 +QUICKLY 5 1 5 6 +QUICK 4 1 5 4 +QUESTED 0 1 0 1 +QUANTRELL 0 1 1 0 +QUANTREL 0 1 0 1 +QUANTITIES 1 1 2 1 +QUALMS 0 1 0 1 +QUALITIES 1 1 1 2 +QUACKS 1 1 2 1 +PYRIS 0 1 0 1 +PYM 0 1 1 0 +PUTTING 0 1 1 0 +PUTTEL 0 1 1 0 +PUTS 2 1 3 2 +PUSSARA 0 1 0 1 +PUSH 1 1 1 2 +PUSE 0 1 0 1 +PUS 0 1 0 1 +PURSES 0 1 0 1 +PURPORTING 0 1 1 0 +PUREST 0 1 0 1 +PURCHANGLE 0 1 0 1 +PUMPED 0 1 0 1 +PUMP 0 1 1 0 +PULSE 0 1 1 0 +PULLLY 0 1 0 1 +PULLEY 0 1 1 0 +PULLED 3 1 4 3 +PULL 1 1 1 2 +PULCHEVIKI 0 1 0 1 +PUDDLES 0 1 1 0 +PSALMS 0 1 0 1 +PSALM 0 1 1 0 +PRYTANEUM 0 1 1 0 +PRUDENT 0 1 1 0 +PROVOCATOR 0 1 1 0 +PROVISED 0 1 0 1 +PROVED 2 1 2 3 +PROVE 5 1 5 6 +PROTS 0 1 0 1 +PROTECTORATE 0 1 1 0 +PROTECTOR 1 1 1 2 +PROPRE 0 1 1 0 +PROPER 2 1 2 3 +PRONOUNCE 1 1 1 2 +PROMOTIVE 0 1 1 0 +PROMOTED 0 1 0 1 +PROMIN 0 1 0 1 +PROITS 0 1 0 1 +PROHIBITION 0 1 1 0 +PROFESSIONS 0 1 0 1 +PROFESSION 3 1 4 3 +PRODUCES 0 1 1 0 +PROCLAS 0 1 0 1 +PROCEED 2 1 2 3 +PROCATE 0 1 0 1 +PROBES 0 1 0 1 +PRO 0 1 0 1 +PRIOR 0 1 0 1 +PRINCIPLES 0 1 0 1 +PRINCIPALS 0 1 1 0 +PRIMER 0 1 1 0 +PRIME 3 1 3 4 +PRIMARY 0 1 0 1 +PRICKED 0 1 0 1 +PRICE 2 1 2 3 +PRETTY 8 1 8 9 +PRESTIGE 1 1 2 1 +PRESERVE 1 1 2 1 +PRESENTERS 0 1 0 1 +PRESAGE 0 1 0 1 +PRENTICESHIP 0 1 1 0 +PRENTICE 0 1 0 1 +PRECEPTORS 1 1 2 1 +PRECENTORS 0 1 1 0 +PREACHED 0 1 1 0 +PRAYERS 2 1 3 2 +PRAY 5 1 5 6 +PRAM 0 1 1 0 +PRAISEWORTHY 0 1 1 0 +PRACTITIONER 0 1 1 0 +POUCHES 0 1 1 0 +POTUM 0 1 1 0 +POTTLES 0 1 0 1 +POTION 0 1 1 0 +POTENT 0 1 0 1 +POTASSIUM 0 1 1 0 +POTASSIAN 0 1 0 1 +POT 0 1 0 1 +POSTPLETED 0 1 0 1 +POSTHASTE 0 1 1 0 +POSTERN 0 1 1 0 +POSITION 6 1 7 6 +POSEY 0 1 0 1 +PORTO 0 1 1 0 +PORTMANTEAU 0 1 1 0 +PORTENTOUS 0 1 1 0 +PORT 1 1 1 2 +PORED 0 1 1 0 +POPULOUS 0 1 0 1 +POPULACE 0 1 1 0 +POMMEROI 0 1 0 1 +POMEROY 0 1 1 0 +POLYTECHNIC 0 1 1 0 +POLY 0 1 0 1 +POLONIUS 0 1 1 0 +POLO 0 1 1 0 +POLLY 18 1 19 18 +POISPIRED 0 1 0 1 +POINT 9 1 10 9 +POEM 0 1 0 1 +POCKET 8 1 8 9 +PLUMB 0 1 1 0 +PLUM 0 1 0 1 +PLOT 1 1 1 2 +PLEASURE 8 1 8 9 +PLEASE 15 1 15 16 +PLEASANT 8 1 9 8 +PLEAS 0 1 1 0 +PLEADED 2 1 3 2 +PLAYING 0 1 1 0 +PLAYED 5 1 5 6 +PLAY 4 1 4 5 +PLATTERBUFF 0 1 0 1 +PLATTERBATH'S 0 1 0 1 +PLATTERBAFF'S 0 1 1 0 +PLATED 0 1 1 0 +PLATE 2 1 2 3 +PLANTED 0 1 1 0 +PLANNING 1 1 2 1 +PLANE 0 1 0 1 +PLACES 3 1 3 4 +PLACED 10 1 10 11 +PIUCHES 0 1 0 1 +PITTS 1 1 2 1 +PITTHAM 0 1 0 1 +PITHUM 0 1 1 0 +PITCHER 1 1 1 2 +PITCHED 0 1 0 1 +PISTOLES 0 1 1 0 +PISTOL 5 1 5 6 +PIRATES 0 1 0 1 +PIRASS 0 1 0 1 +PIPES 1 1 1 2 +PIPE 3 1 4 3 +PINTS 0 1 0 1 +PINKUS 0 1 1 0 +PINKERTON'S 0 1 1 0 +PINKERTENT'S 0 1 0 1 +PINIONS 0 1 0 1 +PINCHED 1 1 2 1 +PILLOWED 0 1 1 0 +PILLART 0 1 0 1 +PIKES 1 1 2 1 +PIGSKIN 0 1 1 0 +PIGEONOTE 0 1 0 1 +PIGEONCOTES 0 1 1 0 +PIERRE 0 1 0 1 +PIERO 0 1 1 0 +PICTURE 4 1 5 4 +PICTION 0 1 0 1 +PICKED 2 1 3 2 +PHYNICAL 0 1 0 1 +PHUT'S 0 1 1 0 +PHOSPHOBS 0 1 0 1 +PHOSPHER 0 1 0 1 +PHILISTINES 1 1 2 1 +PHILIPPUS 2 1 3 2 +PHAETON 0 1 0 1 +PETREL 0 1 1 0 +PETITIONERS 0 1 0 1 +PETER'S 1 1 2 1 +PETEL 0 1 0 1 +PESTS 0 1 0 1 +PESTE 0 1 1 0 +PERSPIRED 0 1 1 0 +PERSEUS 1 1 2 1 +PERNOUNCE 0 1 1 0 +PERHAPS 17 1 17 18 +PERFELLAR 0 1 0 1 +PERFECTUALLY 0 1 0 1 +PERFECTLY 4 1 5 4 +PERCHAL 0 1 0 1 +PERCEPTORS 0 1 0 1 +PERCEIVER 0 1 0 1 +PERAMBULATOR'S 0 1 1 0 +PEONAGE 0 1 1 0 +PENNYERS 0 1 0 1 +PENNEL 0 1 0 1 +PENDING 0 1 1 0 +PENCIL 1 1 1 2 +PENCE 0 1 1 0 +PEM 0 1 0 1 +PELLESTRA 0 1 0 1 +PEKAHIAH 0 1 1 0 +PEGGING 0 1 0 1 +PEGAS 0 1 0 1 +PECAH 0 1 0 1 +PEASE 0 1 0 1 +PEASANTS 5 1 6 5 +PEAS 1 1 2 1 +PEARLING 0 1 0 1 +PEARL 0 1 1 0 +PEACH 0 1 0 1 +PEACE 8 1 8 9 +PAY 10 1 11 10 +PAWNBROKER 1 1 2 1 +PAUSES 0 1 0 1 +PAUSED 2 1 3 2 +PAUL 0 1 0 1 +PATUM 0 1 0 1 +PATTERN 0 1 0 1 +PATIENCE 2 1 3 2 +PATH 1 1 1 2 +PATENTS 0 1 0 1 +PASTES 0 1 1 0 +PAST 9 1 10 9 +PASSING 3 1 3 4 +PASSES 0 1 1 0 +PARTS 4 1 5 4 +PARTNENT 0 1 0 1 +PARTISER 0 1 0 1 +PARTINGS 0 1 1 0 +PARTING 2 1 2 3 +PARTIALLY 0 1 0 1 +PARRICIDES 0 1 1 0 +PARKS 0 1 1 0 +PARKLEY 0 1 0 1 +PARISH 1 1 1 2 +PARENT 0 1 0 1 +PARDONABLE 0 1 1 0 +PARDON 9 1 10 9 +PARASITES 0 1 0 1 +PARASAN 0 1 0 1 +PAR 0 1 0 1 +PANNIERS 0 1 1 0 +PANEL 0 1 1 0 +PAN 3 1 3 4 +PALLIATE 0 1 1 0 +PALL 0 1 1 0 +PALAESTRA 0 1 1 0 +PALACHE'S 0 1 0 1 +PAKE 0 1 0 1 +PAIRS 0 1 0 1 +PAIR 5 1 6 5 +PADDLING 0 1 1 0 +PADDLIN 0 1 0 1 +PAD 0 1 0 1 +PACES 1 1 2 1 +PABRICAL 0 1 0 1 +P 1 1 2 1 +OZ 0 1 0 1 +OWNERS 1 1 2 1 +OWNED 2 1 2 3 +OWING 1 1 1 2 +OWESKEE 0 1 0 1 +OWE 1 1 2 1 +OW'M 0 1 1 0 +OW 0 1 1 0 +OVERWHELMING 0 1 0 1 +OVERRIPENESS 0 1 1 0 +OVERPRUDENT 0 1 0 1 +OVERHEARD 2 1 3 2 +OVERFULL 0 1 1 0 +OVERFLOWING 0 1 1 0 +OVERCLONE 0 1 0 1 +OUTWARDS 0 1 0 1 +OUTS 0 1 0 1 +OUTLINES 0 1 1 0 +OUTLINE 1 1 1 2 +OUTGAZE 0 1 0 1 +OUTER 1 1 2 1 +OUTDREW 0 1 0 1 +OURSPORT 0 1 0 1 +OURSAN 0 1 0 1 +OUNCES 2 1 3 2 +OUGHTN'T 0 1 1 0 +OUEN 0 1 1 0 +OTTO 0 1 1 0 +OTIAN 0 1 0 1 +OTHERS 17 1 17 18 +OTHER'S 1 1 2 1 +ORIENTOUINE 0 1 0 1 +ORIENTAL 0 1 1 0 +ORFICER 0 1 1 0 +ORFASTER 0 1 0 1 +ORE 0 1 0 1 +ORDERS 8 1 8 9 +ORDER 19 1 20 19 +ORBUS 0 1 0 1 +ORBIS 0 1 1 0 +OPPRESSORS 0 1 1 0 +OPPOSITION 2 1 2 3 +OPENS 2 1 3 2 +OPE 0 1 1 0 +ON'T 0 1 1 0 +OME 0 1 1 0 +OLY 0 1 0 1 +OLL 0 1 1 0 +OLIV 0 1 0 1 +OLI' 0 1 0 1 +OLDEST 1 1 1 2 +OKAY 0 1 1 0 +OIL 2 1 3 2 +OIKA 0 1 0 1 +OFFICIALS 0 1 0 1 +OFFICES 0 1 1 0 +OFFICER 2 1 3 2 +OFFENSE 0 1 1 0 +OFFENCE 2 1 2 3 +OFFEN 0 1 1 0 +ODO 0 1 0 1 +ODD 2 1 3 2 +OCRE'S 0 1 0 1 +OCCUPIED 2 1 2 3 +OBSERVED 4 1 5 4 +OBOCOCK 0 1 1 0 +OBJECT 6 1 7 6 +OBEY 1 1 1 2 +O'NIGHTS 0 1 1 0 +O'NEILL 0 1 1 0 +O'NEIA 0 1 0 1 +NYTOUCH 0 1 1 0 +NUZHA'S 0 1 0 1 +NUTS 0 1 1 0 +NURTURE 0 1 0 1 +NUNS 2 1 3 2 +NUN'S 0 1 0 1 +NUMAN 0 1 0 1 +NUM 0 1 0 1 +NUISANCE 0 1 1 0 +NU'UMAN 0 1 1 0 +NOWT 0 1 1 0 +NOTTINGHAM 0 1 1 0 +NOTICED 3 1 3 4 +NOTE 3 1 3 4 +NOTCH 0 1 0 1 +NORTHEAST 0 1 0 1 +NOPE 0 1 1 0 +NONETHELESS 0 1 1 0 +NOISE 5 1 5 6 +NOBLY 0 1 0 1 +NINE 6 1 6 7 +NIKOLAY 0 1 1 0 +NIGO 0 1 0 1 +NIGHTLY 0 1 0 1 +NIGHT'S 1 1 1 2 +NIGH 1 1 1 2 +NICO 0 1 1 0 +NICKEL 0 1 1 0 +NICCLAY 0 1 0 1 +NEXTER 0 1 1 0 +NEXT 10 1 10 11 +NEWS 7 1 7 8 +NEWER 0 1 0 1 +NERVES 1 1 1 2 +NERVE 0 1 1 0 +NEOSHO 0 1 1 0 +NEOSH 0 1 0 1 +NELLO 0 1 0 1 +NEKHLUD 0 1 0 1 +NEIGHBOURING 0 1 1 0 +NEIGHBORS 4 1 5 4 +NEIGHBORING 2 1 2 3 +NEEDS 8 1 8 9 +NEEDLED 0 1 1 0 +NEEDLE 0 1 0 1 +NEEDED 9 1 9 10 +NEED 8 1 9 8 +NED 3 1 3 4 +NEAT 3 1 3 4 +NEARER 3 1 4 3 +NEAR 16 1 16 17 +NAUGHTY 0 1 0 1 +NAUGHT 0 1 1 0 +NAT 0 1 0 1 +NARRATIVES 1 1 2 1 +NARRATIVE 1 1 1 2 +NARCOTIC 0 1 1 0 +NANDY'S 0 1 1 0 +NAKOTIC 0 1 0 1 +NABRAMAN 0 1 0 1 +N 0 1 1 0 +MYSTERY 0 1 0 1 +MYSTERIOUS 5 1 6 5 +MYRTALIS 0 1 0 1 +MUSTACHES 0 1 1 0 +MUSKETS 0 1 1 0 +MUSICIANS 0 1 1 0 +MURKED 0 1 0 1 +MURDOCH'S 0 1 1 0 +MULES 0 1 0 1 +MUIR 0 1 1 0 +MUG 0 1 1 0 +MUD 0 1 1 0 +MOWER 0 1 1 0 +MOVES 0 1 0 1 +MOVEMENT 1 1 2 1 +MOUTHWHAT 0 1 1 0 +MOUTHS 0 1 1 0 +MOUTHFUL 0 1 0 1 +MOUSTACHES 0 1 0 1 +MOUSE 1 1 1 2 +MOURNING 2 1 3 2 +MOUNTNORRIS 0 1 1 0 +MOUNT 0 1 1 0 +MOUND 0 1 0 1 +MOTIONLESS 0 1 1 0 +MOTHS 0 1 0 1 +MOTHER 51 1 52 51 +MORVE 0 1 0 1 +MORTUS 0 1 0 1 +MORTIS 0 1 1 0 +MORTIFICATIONTHAT 0 1 1 0 +MORTIFICATION 0 1 0 1 +MORTEM 0 1 1 0 +MORNING 21 1 21 22 +MORLEY 0 1 0 1 +MORE'N 0 1 1 0 +MOPED 0 1 1 0 +MOOR 2 1 3 2 +MOONLIGHT 2 1 3 2 +MONTORAS 0 1 0 1 +MONTHS 6 1 6 7 +MONTHLY 0 1 1 0 +MONSIEUR 0 1 0 1 +MONSHADE 0 1 0 1 +MONSEIGNEUR 5 1 6 5 +MONKERS 0 1 1 0 +MOMMOL 0 1 1 0 +MOMENT 24 1 24 25 +MOLL 0 1 0 1 +MOLASTIC 0 1 0 1 +MODERATE 1 1 2 1 +MODE 0 1 0 1 +MOCHER 0 1 0 1 +MO 0 1 1 0 +MIXTURE 0 1 1 0 +MIXED 2 1 2 3 +MIX 2 1 3 2 +MISTING 0 1 0 1 +MISTAKES 0 1 0 1 +MISTABILATION 0 1 0 1 +MIST 1 1 2 1 +MISSY 0 1 0 1 +MISSION 0 1 0 1 +MISSILEVITCH 0 1 0 1 +MISSED 4 1 5 4 +MISERC 0 1 0 1 +MISCHIEVOUS 1 1 2 1 +MINUTELY 0 1 1 0 +MINOR 1 1 1 2 +MINNIE 1 1 2 1 +MINNEAPOLIS 0 1 1 0 +MINISTERS 0 1 1 0 +MINISTER 6 1 7 6 +MINIONETTE 0 1 1 0 +MINDER 0 1 0 1 +MIMICK 0 1 1 0 +MIMIC 0 1 0 1 +MILLY 0 1 1 0 +MILLSTONE 0 1 0 1 +MILLSTON 0 1 1 0 +MILLION 0 1 0 1 +MILLICENT 0 1 0 1 +MILICENT'S 0 1 1 0 +MILFRED 0 1 0 1 +MILDEWED 0 1 1 0 +MIKE 2 1 2 3 +MIHI 0 1 1 0 +MIDSER 0 1 0 1 +MIDRIFTS 0 1 0 1 +MIDRIFF 0 1 1 0 +MIDDY 0 1 1 0 +MIDDLING 0 1 1 0 +MIDDLIN 0 1 0 1 +MIDDI 0 1 0 1 +MID 0 1 0 1 +MICROCLE 0 1 0 1 +MICHANG 0 1 0 1 +METHINKETH 0 1 1 0 +METHINK 0 1 0 1 +MESTIENNE'S 0 1 1 0 +MESSIE'S 0 1 0 1 +MESSES 0 1 1 0 +MESS' 0 1 0 1 +MESS 0 1 0 1 +MERTOLUS 0 1 0 1 +MERTELUS 0 1 0 1 +MERRYMAKING 0 1 0 1 +MERRY 2 1 3 2 +MERNEPTAH 0 1 1 0 +MERLINUS 0 1 0 1 +MERLINA'S 0 1 0 1 +MERELY 6 1 6 7 +MER 0 1 0 1 +MENTAL 2 1 3 2 +MENST 0 1 0 1 +MENDIAN 0 1 0 1 +MEND 0 1 1 0 +MEN'S 0 1 1 0 +MELTED 1 1 1 2 +MELLICENT 0 1 0 1 +MEDIUM 1 1 1 2 +MEDICTS 0 1 0 1 +MEDICAMENTS 0 1 1 0 +MEDICAL 1 1 2 1 +MEDICA 0 1 0 1 +MEDIAN 0 1 1 0 +MEDDLE 0 1 1 0 +MEDAL 1 1 1 2 +MEANESTER 0 1 0 1 +MAYNUM 0 1 0 1 +MATURITY 0 1 0 1 +MATURES 0 1 0 1 +MATTHOR 0 1 0 1 +MATTERS 3 1 4 3 +MATRON 0 1 0 1 +MATEY 0 1 1 0 +MATERIALS 1 1 2 1 +MASSR 0 1 0 1 +MASSES 1 1 1 2 +MASSED 0 1 0 1 +MASKETS 0 1 0 1 +MASKED 0 1 1 0 +MARVELLED 0 1 0 1 +MARVELED 0 1 1 0 +MARTYR 0 1 1 0 +MARTIAL 0 1 0 1 +MARSPEAKER 0 1 1 0 +MARRIT 0 1 0 1 +MARMALADES 1 1 2 1 +MARMAL 0 1 0 1 +MARLAN 0 1 0 1 +MARKEER 0 1 0 1 +MARK 4 1 4 5 +MARJORIE 2 1 3 2 +MARGERY 0 1 0 1 +MARGAR 0 1 0 1 +MARES 0 1 0 1 +MARDOX 0 1 0 1 +MARDOCK 0 1 0 1 +MARDOC'S 0 1 0 1 +MANTLE 0 1 0 1 +MANTI 0 1 0 1 +MANETTE 0 1 0 1 +MANCAO 0 1 0 1 +MANASSEH 0 1 1 0 +MAN'S 13 1 13 14 +MAMMA 1 1 1 2 +MAMIE 0 1 1 0 +MALTRICIAN 0 1 0 1 +MALNUTRITION 0 1 1 0 +MAKING 17 1 18 17 +MAKES 10 1 11 10 +MAJORITY 5 1 6 5 +MAINE 0 1 1 0 +MAIN 1 1 1 2 +MAIL 0 1 1 0 +MAID 2 1 3 2 +MADGE 0 1 0 1 +MACKING 0 1 0 1 +MACHINEROUS 0 1 0 1 +MACAN 0 1 0 1 +MAC 0 1 0 1 +MABILLON 0 1 1 0 +MA 0 1 0 1 +LYSIMACHUS 0 1 1 0 +LYING 2 1 2 3 +LUNDY'S 0 1 0 1 +LUNAR'S 0 1 0 1 +LUKE 3 1 4 3 +LUGGAGE 0 1 1 0 +LUCK 2 1 3 2 +LUCID 0 1 1 0 +LUCIAN 0 1 0 1 +LOYAL 2 1 2 3 +LOWERING 2 1 3 2 +LOWER 2 1 3 2 +LOWBOURNE 0 1 0 1 +LOWBORN 0 1 0 1 +LOUIS 3 1 3 4 +LOUD 7 1 8 7 +LOT 6 1 6 7 +LOST 11 1 12 11 +LOSSES 0 1 1 0 +LOSES 0 1 0 1 +LORD'S 0 1 1 0 +LOQUACITY 0 1 1 0 +LOOKOUT 0 1 1 0 +LONGER 15 1 16 15 +LONESOMENESS 0 1 1 0 +LONESOME 1 1 1 2 +LONE 0 1 1 0 +LOKI 0 1 0 1 +LOIS 0 1 0 1 +LOCTICE 0 1 0 1 +LOBSOR 0 1 0 1 +LOBS 0 1 0 1 +LOATHS 0 1 0 1 +LIZZLING 0 1 0 1 +LIZZLEY 0 1 0 1 +LIZABETH 0 1 1 0 +LIVES 5 1 6 5 +LIT 2 1 2 3 +LISTENED 4 1 4 5 +LISTEN 10 1 11 10 +LISMACHUS 0 1 0 1 +LISBETH 0 1 0 1 +LISALLY 0 1 0 1 +LINLESILY 0 1 0 1 +LINENSHIPS 0 1 0 1 +LIMPED 3 1 4 3 +LIMP 3 1 3 4 +LIME 0 1 1 0 +LILY 0 1 0 1 +LIKELY 2 1 3 2 +LIGHTS 1 1 1 2 +LIGHTFOOTED 0 1 0 1 +LIGHTED 0 1 0 1 +LIGHT 18 1 19 18 +LIFT 1 1 1 2 +LIFE 54 1 55 54 +LIAISON 0 1 1 0 +LEXINGTON 0 1 1 0 +LEWIS 0 1 1 0 +LEVELLY 0 1 0 1 +LETTER 22 1 22 23 +LET'S 3 1 4 3 +LESSON 1 1 2 1 +LEQUEST 0 1 0 1 +LENS 0 1 0 1 +LENOIR 0 1 1 0 +LENIN 1 1 2 1 +LEND 1 1 2 1 +LEMON 0 1 1 0 +LEISURESHIP 0 1 0 1 +LEISURE 3 1 3 4 +LEGS 2 1 3 2 +LEGGED 0 1 0 1 +LEER 0 1 1 0 +LEE'S 0 1 1 0 +LEE 0 1 0 1 +LEDA 0 1 0 1 +LED 4 1 4 5 +LECINGTON 0 1 0 1 +LEAVING 5 1 5 6 +LEAVED 1 1 2 1 +LEARNED 6 1 6 7 +LEARNAN 0 1 0 1 +LEARN 4 1 5 4 +LEAR 0 1 0 1 +LEAPT 0 1 1 0 +LEAPED 1 1 1 2 +LEAP 1 1 1 2 +LEAF 0 1 1 0 +LEADPENCIL 0 1 1 0 +LEADERSHIP 0 1 1 0 +LEADERS 1 1 2 1 +LEADER'S 0 1 0 1 +LEAD 2 1 2 3 +LE 0 1 0 1 +LAYS 0 1 0 1 +LAY 14 1 14 15 +LAWS 2 1 3 2 +LAWN 0 1 0 1 +LAURA 3 1 3 4 +LAUDIDAL 0 1 0 1 +LATH 0 1 1 0 +LASS 0 1 0 1 +LASH 1 1 2 1 +LARD 0 1 0 1 +LANY 0 1 0 1 +LANE 2 1 2 3 +LANDOWNERS 1 1 1 2 +LANDI 0 1 1 0 +LANDEE 0 1 0 1 +LAMPS 0 1 0 1 +LAMBS 0 1 1 0 +LAIN 1 1 2 1 +LAID 8 1 9 8 +LAG 0 1 0 1 +LADS 1 1 2 1 +LADIES 6 1 6 7 +LADDERS 0 1 0 1 +LACKRY 0 1 0 1 +LACHRYMA 0 1 1 0 +LABOURERS 0 1 1 0 +LABOUR 0 1 1 0 +LABORS 0 1 0 1 +LABORERS 0 1 0 1 +LABOR 1 1 1 2 +KOY 0 1 0 1 +KNOWSAT 0 1 0 1 +KNOTTY 0 1 1 0 +KNOTS 0 1 0 1 +KNOT 2 1 3 2 +KNOBBLY 0 1 1 0 +KNIFE 0 1 0 1 +KLEPTOMANIAC 0 1 1 0 +KLEPTOMANIA 0 1 1 0 +KITTY 3 1 4 3 +KITRAL 0 1 0 1 +KINDRED 1 1 1 2 +KINDNESS 7 1 7 8 +KINDER 1 1 2 1 +KIN 0 1 0 1 +KILLS 0 1 1 0 +KILL 14 1 14 15 +KIDDAM 0 1 0 1 +KID 1 1 2 1 +KICKIE 0 1 0 1 +KICK 0 1 1 0 +KEY 4 1 5 4 +KETTLE 0 1 1 0 +KERSTALL 0 1 1 0 +KEPT 9 1 9 10 +KENNITES 0 1 0 1 +KENITES 1 1 2 1 +KEI 0 1 0 1 +KEEN 1 1 2 1 +KEDEM 0 1 1 0 +KATY 0 1 0 1 +KARA 0 1 0 1 +KAMAR 0 1 1 0 +KAL 0 1 0 1 +JUSTIFIED 1 1 2 1 +JURISDICTION 0 1 1 0 +JURIS 0 1 0 1 +JUNIOR 0 1 1 0 +JUNE'S 0 1 0 1 +JUMPS 1 1 2 1 +JUMPED 1 1 1 2 +JULIEN 1 1 2 1 +JULIE 0 1 0 1 +JULIAN 0 1 0 1 +JULIA 0 1 0 1 +JUDICINES 0 1 0 1 +JUDAH 1 1 2 1 +JUBANCE 0 1 0 1 +JUANS 0 1 0 1 +JOUVIN'S 0 1 1 0 +JOSHUA 0 1 1 0 +JOSCELYN 0 1 0 1 +JOKINGLY 0 1 1 0 +JOINTMENT 0 1 0 1 +JOINING 0 1 1 0 +JOINED 1 1 2 1 +JOINCE 0 1 0 1 +JOHNSHAW 0 1 0 1 +JOCELYN'S 0 1 1 0 +JO 0 1 0 1 +JIS 0 1 1 0 +JIM 4 1 5 4 +JILT 0 1 1 0 +JEW 1 1 1 2 +JETS 0 1 0 1 +JESTER 1 1 2 1 +JERRY 1 1 1 2 +JERED 0 1 0 1 +JEMOSIS 0 1 0 1 +JEHU 0 1 1 0 +JEHOV 0 1 0 1 +JEHOASH 0 1 1 0 +JEERED 0 1 1 0 +JEDGE 0 1 1 0 +JEALOUS 0 1 0 1 +JARS 0 1 0 1 +JARDA 0 1 0 1 +JAPHANE 0 1 0 1 +JAPANE 0 1 0 1 +JANSENIST 0 1 1 0 +JANEERO 0 1 1 0 +JANE 6 1 7 6 +JAMES'S 0 1 0 1 +JAKEY'S 0 1 1 0 +JAG 0 1 0 1 +JACKMAN 0 1 1 0 +IZZY'S 0 1 1 0 +IZZIE'S 0 1 0 1 +ITSELF 6 1 7 6 +ITSEL 0 1 0 1 +ITALIANS 0 1 0 1 +IT'LL 1 1 1 2 +ISSUE 2 1 2 3 +ISRAIT 0 1 0 1 +ISRAELITES 0 1 1 0 +ISRAEL'S 1 1 2 1 +ISRA'S 0 1 0 1 +ISLAMMISED 0 1 0 1 +ISLAMISED 0 1 1 0 +ISLAM 0 1 1 0 +ISIS 0 1 0 1 +ISAU 0 1 0 1 +ISAAC 1 1 2 1 +IRRESPONSIBLE 0 1 0 1 +IRRES 0 1 0 1 +IRONICAL 0 1 1 0 +IRONIC 0 1 0 1 +IRCH 0 1 0 1 +IPES 0 1 0 1 +IOWA 1 1 2 1 +IOPIUS 0 1 0 1 +INVOLUNTE 0 1 0 1 +INVITED 1 1 1 2 +INVALIDES 0 1 1 0 +INVALIDE 0 1 0 1 +INVADE 0 1 1 0 +INTONING 0 1 0 1 +INTERPLIES 0 1 0 1 +INTER 0 1 0 1 +INTENSE 3 1 3 4 +INTEND 1 1 2 1 +INTELLECTUALLY 0 1 1 0 +INTELLECTUAL 0 1 0 1 +INSTRUCTIVE 0 1 0 1 +INSTRUCTED 0 1 1 0 +INSTANTLY 0 1 1 0 +INSTANDED 0 1 0 1 +INSPECTRE 0 1 0 1 +INSISTENCE 0 1 1 0 +INSISTANTS 0 1 0 1 +INSIDE 4 1 4 5 +INSENT 0 1 0 1 +INSECTORS 0 1 0 1 +INSCRIPTIONS 1 1 2 1 +INQUIRE 1 1 2 1 +INNES 0 1 1 0 +INLORING 0 1 0 1 +INJURE 2 1 3 2 +INGENUOUSLY 0 1 0 1 +INGENIOUSLY 0 1 1 0 +INFECTED 0 1 0 1 +INFAMOUS 0 1 1 0 +INDURE 0 1 0 1 +INDULGE 0 1 0 1 +INDEED 13 1 14 13 +INDE 0 1 0 1 +INCOUPS 0 1 0 1 +INCONSTANT 0 1 0 1 +INCOMPARABLE 0 1 1 0 +INCLINE 1 1 1 2 +IMPROVISED 1 1 2 1 +IMPROVED 0 1 1 0 +IMPRESSORS 0 1 0 1 +IMPLY 1 1 2 1 +IMPINCED 0 1 0 1 +IMPALION 0 1 0 1 +IMMENSE 2 1 3 2 +IMAGINE 6 1 6 7 +ILLS 0 1 1 0 +ILLIS 0 1 0 1 +ILIU 0 1 0 1 +IKOLOITS 0 1 0 1 +IISES 0 1 0 1 +IGNOMY 0 1 1 0 +IDOLENCE 0 1 0 1 +IDEA 10 1 11 10 +ICE 0 1 1 0 +I'FAITH 0 1 1 0 +HYCOMICAL 0 1 0 1 +HUSBATH 0 1 0 1 +HUNTERS 0 1 1 0 +HUNGRY 1 1 2 1 +HUNGER 2 1 2 3 +HUMOURS 0 1 1 0 +HUMOUR 1 1 1 2 +HUMOR 0 1 1 0 +HUMANS 0 1 0 1 +HUH 0 1 1 0 +HUDSPETH 0 1 1 0 +HOUSEHOLTS 0 1 0 1 +HOUSEHOLD 0 1 1 0 +HOURSERVES 0 1 0 1 +HOURS 12 1 12 13 +HOUNDED 0 1 1 0 +HOSPITABLY 0 1 1 0 +HORDE 0 1 1 0 +HOPPING 0 1 1 0 +HOPES 0 1 0 1 +HOOKER'S 0 1 0 1 +HOO'LL 0 1 1 0 +HOO 0 1 1 0 +HONESTLY 0 1 1 0 +HONEST 5 1 6 5 +HOMEPUSH 0 1 1 0 +HOLY 5 1 6 5 +HOLLERED 0 1 0 1 +HOLLER 0 1 1 0 +HOLLAND 0 1 0 1 +HOF 0 1 1 0 +HODE 0 1 0 1 +HOARD 0 1 0 1 +HISTORY 3 1 4 3 +HISSELF 0 1 1 0 +HIRED 0 1 0 1 +HIRAKEE 0 1 0 1 +HINFIELD 0 1 0 1 +HINDFELL 0 1 1 0 +HIMSELF 51 1 52 51 +HIJAZ 0 1 1 0 +HIGHS 0 1 1 0 +HIGH 7 1 8 7 +HIES 0 1 0 1 +HIERARCHY 0 1 1 0 +HIDE 2 1 2 3 +HIDDEN 3 1 3 4 +HID 2 1 2 3 +HEYDAY 0 1 1 0 +HEWN 0 1 1 0 +HESTERITY 0 1 0 1 +HESITATINGLY 0 1 0 1 +HERSELF 35 1 35 36 +HERIOT'S 0 1 1 0 +HERETT'S 0 1 0 1 +HERETOFORE 0 1 1 0 +HERDSMEN 0 1 1 0 +HERDSMAN 0 1 0 1 +HERBERT 0 1 0 1 +HERALD 0 1 0 1 +HEPTARK 0 1 0 1 +HEPTARCHIES 0 1 1 0 +HEN 0 1 1 0 +HELVIN 0 1 1 0 +HELVAN 0 1 0 1 +HELPS 0 1 0 1 +HELPED 2 1 3 2 +HELP 17 1 17 18 +HELM 1 1 2 1 +HEELED 0 1 1 0 +HEEDED 0 1 1 0 +HEDGES 0 1 1 0 +HEBREW 1 1 2 1 +HEAT 1 1 1 2 +HEARTY 2 1 3 2 +HEARTS 4 1 5 4 +HEARSE 3 1 4 3 +HEALTH 1 1 1 2 +HEALED 1 1 1 2 +HEADS 4 1 4 5 +HEADQUARTERS 1 1 2 1 +HEADQUARTER'S 0 1 0 1 +HEADLONG 0 1 1 0 +HEADLIGHTS 0 1 1 0 +HE'LL 2 1 2 3 +HAWHAT 0 1 0 1 +HAWED 0 1 1 0 +HAW 0 1 1 0 +HAVING 22 1 22 23 +HAVEN 0 1 1 0 +HAV 0 1 0 1 +HATTERSLEY 0 1 1 0 +HATE 4 1 4 5 +HATCHES 0 1 0 1 +HASAN 1 1 2 1 +HARVEY'SWHICH 0 1 1 0 +HARVEST 3 1 3 4 +HARRIS 1 1 2 1 +HAROLD 0 1 1 0 +HARKNESS 0 1 1 0 +HARKINS 0 1 0 1 +HARGREAVE 0 1 0 1 +HARGRAVE 0 1 1 0 +HARE 0 1 1 0 +HARDWARE 0 1 1 0 +HARDLY 9 1 10 9 +HARDINGEN 0 1 0 1 +HAPPY 7 1 7 8 +HAPPILY 1 1 1 2 +HAPPENED 9 1 10 9 +HAPLY 0 1 1 0 +HANTINGDON 0 1 0 1 +HANDY 0 1 0 1 +HANDLES 0 1 0 1 +HAM 0 1 0 1 +HALTS 0 1 1 0 +HALTERSLEY 0 1 0 1 +HALT 4 1 5 4 +HALEY'S 0 1 1 0 +HALELY 0 1 0 1 +HAIRED 1 1 1 2 +HAIN'T 1 1 1 2 +HAIL 1 1 1 2 +HAHMON'S 0 1 0 1 +HADN'T 1 1 1 2 +HADDA 0 1 1 0 +GYPTIAN 0 1 0 1 +GYLINGDEN 0 1 1 0 +GUY'S 0 1 0 1 +GUV'NER 0 1 0 1 +GURSER 0 1 0 1 +GUNS 2 1 3 2 +GUNDRAIN 0 1 0 1 +GUNDERING 0 1 0 1 +GULPED 0 1 1 0 +GULLET 0 1 1 0 +GULFS 0 1 0 1 +GUISE 0 1 1 0 +GUIRUN'S 0 1 1 0 +GUINEAS 0 1 0 1 +GUINEA 2 1 3 2 +GUILD 0 1 1 0 +GUESS 4 1 5 4 +GUERAGE 0 1 0 1 +GUDRUN 0 1 1 0 +GUARDING 0 1 0 1 +GRUMBLINGLY 0 1 1 0 +GRUFFLY 0 1 1 0 +GROWING 2 1 3 2 +GROW 1 1 1 2 +GROVE 0 1 1 0 +GROUND 6 1 6 7 +GROAN 1 1 2 1 +GRIS 0 1 0 1 +GRINDING 0 1 0 1 +GRIMSBY 0 1 1 0 +GRIM'S 0 1 0 1 +GRIGGLY 0 1 0 1 +GRIBIER 0 1 1 0 +GRIBES 0 1 0 1 +GREENTON 0 1 1 0 +GREENS 0 1 0 1 +GREENBACKS 0 1 1 0 +GREEN 2 1 2 3 +GREEBS 0 1 0 1 +GRECIOUS 0 1 0 1 +GREAVES 0 1 1 0 +GREAT 40 1 40 41 +GRAVE 16 1 17 16 +GRATITUDE 4 1 5 4 +GRASPS 0 1 1 0 +GRASPED 1 1 1 2 +GRAPPLE 0 1 1 0 +GRANTEL 0 1 0 1 +GRANDAME 0 1 1 0 +GRAND 5 1 5 6 +GRAN'PAP 0 1 0 1 +GRAMMER 0 1 0 1 +GRAMMEN 0 1 0 1 +GRAMMATIUS 0 1 0 1 +GRAMMATEUS 0 1 1 0 +GRAM 0 1 1 0 +GRAHAME 0 1 0 1 +GRAFTON'S 0 1 1 0 +GRAFTON 1 1 1 2 +GRACIOUS 2 1 3 2 +GRACIAN 0 1 0 1 +GOWN 0 1 1 0 +GOVERNOR 2 1 2 3 +GOVERNMENTS 2 1 3 2 +GOVERNMENT'S 0 1 1 0 +GOVERN 0 1 0 1 +GORDON'S 2 1 2 3 +GORD 0 1 0 1 +GOODS 4 1 5 4 +GONDON 0 1 0 1 +GOLDS 0 1 0 1 +GOLDFISH 0 1 1 0 +GOLDEN 2 1 3 2 +GODEBILLIOS 0 1 1 0 +GOBYS 0 1 0 1 +GOBIUS 0 1 0 1 +GOBIES 0 1 0 1 +GNAWING 0 1 1 0 +GLISPIN 1 1 2 1 +GLAY 0 1 0 1 +GLASS 9 1 9 10 +GLAD 4 1 5 4 +GIVIN 0 1 0 1 +GIVANOVITCH 0 1 0 1 +GITS 0 1 0 1 +GIRTHING 0 1 1 0 +GIRTHED 0 1 1 0 +GIRT 0 1 0 1 +GIRLS 5 1 6 5 +GIRDS 0 1 1 0 +GIRDING 0 1 0 1 +GIRDED 0 1 0 1 +GIRD 0 1 0 1 +GIR 0 1 0 1 +GIMER 0 1 0 1 +GILINGDEN 0 1 0 1 +GIGS 0 1 0 1 +GIGERIS'S 0 1 0 1 +GIFTS 0 1 1 0 +GESTURE 1 1 1 2 +GES 0 1 0 1 +GERFATHER 0 1 0 1 +GEORGE'SWHICH 0 1 1 0 +GEORGE 2 1 2 3 +GENTLEST 0 1 0 1 +GENTLEMEN'S 0 1 1 0 +GENTLEMAN'S 0 1 0 1 +GENIOR 0 1 0 1 +GENERO 0 1 0 1 +GAZE 3 1 4 3 +GAUTHIER 0 1 1 0 +GAULS 0 1 1 0 +GATHIER 0 1 0 1 +GATHERED 8 1 8 9 +GASTENED 0 1 0 1 +GASHED 0 1 1 0 +GASH 0 1 0 1 +GARDENS 0 1 0 1 +GARDEN'S 0 1 1 0 +GAMMON 0 1 1 0 +GAMIN 0 1 0 1 +GAME 6 1 6 7 +GALLOP 0 1 0 1 +GALLATIN 0 1 1 0 +GALLANT 3 1 3 4 +GAINED 1 1 1 2 +G'YIRLS 0 1 1 0 +FUZZ 0 1 1 0 +FUZ 0 1 0 1 +FUTURE 3 1 3 4 +FUSS 2 1 2 3 +FURZE 0 1 1 0 +FUROV'S 0 1 0 1 +FUR 1 1 1 2 +FUPS 0 1 0 1 +FUNDS 0 1 1 0 +FUND 1 1 1 2 +FUN 2 1 2 3 +FULCAL 0 1 0 1 +FUDGE 0 1 0 1 +FRUITS 3 1 4 3 +FROZE 0 1 1 0 +FRONTIERS 1 1 2 1 +FRONTIER 0 1 0 1 +FROIS 0 1 0 1 +FROGS 2 1 2 3 +FROG'S 0 1 1 0 +FRO 1 1 2 1 +FRITTEN 0 1 0 1 +FRISTOW 0 1 0 1 +FRISTOE'S 0 1 1 0 +FRIGHTFUL 3 1 4 3 +FRIGHTENS 0 1 1 0 +FRIENDLY 4 1 4 5 +FRIEND 13 1 14 13 +FRIED 0 1 0 1 +FRIAR 0 1 1 0 +FRET 1 1 2 1 +FRENCHARD 0 1 0 1 +FRENCH 4 1 5 4 +FREEZWAMEN 0 1 0 1 +FREEWAY 0 1 1 0 +FREES 0 1 1 0 +FREEDOM 5 1 6 5 +FREDERI 0 1 0 1 +FRED 0 1 0 1 +FRANK 1 1 1 2 +FRANJAMIN 0 1 0 1 +FRANCOIS 0 1 1 0 +FRANCISCO 4 1 5 4 +FRANC 0 1 1 0 +FOURTEENTHAT'S 0 1 1 0 +FOURTEEN 3 1 3 4 +FOUNDER 0 1 0 1 +FOUNDED 1 1 2 1 +FOUND 37 1 37 38 +FOUGHT 1 1 2 1 +FOSSES 0 1 0 1 +FORTY 12 1 13 12 +FORTUNE 6 1 6 7 +FORTS 0 1 1 0 +FORTE 0 1 0 1 +FORMER 3 1 4 3 +FORMED 1 1 2 1 +FORM 9 1 9 10 +FORGIFTS 0 1 0 1 +FOREMOTHER 0 1 0 1 +FOREMAN 0 1 1 0 +FOREGATHERED 0 1 1 0 +FOREBOARDS 0 1 0 1 +FORE 0 1 0 1 +FORCEMENT 0 1 0 1 +FORCED 3 1 4 3 +FOOTED 0 1 1 0 +FOOD'S 0 1 0 1 +FOOD 3 1 4 3 +FONTREVAL 0 1 0 1 +FONTEVRAULT 0 1 1 0 +FONDS 0 1 0 1 +FOLLY 1 1 1 2 +FOLLOWS 5 1 6 5 +FOLLOWER 0 1 1 0 +FOLK 4 1 4 5 +FOE 0 1 1 0 +FLUTTERING 1 1 2 1 +FLOWERBEDS 0 1 1 0 +FLOWER 1 1 1 2 +FLOW 0 1 1 0 +FLOSSY 0 1 1 0 +FLOSSIE 0 1 0 1 +FLORA'S 0 1 0 1 +FLOORBOARDS 0 1 1 0 +FLOOR 3 1 3 4 +FLIND 0 1 0 1 +FLIES 0 1 0 1 +FLEERED 0 1 1 0 +FLAVOUR 0 1 0 1 +FLAVORIT 0 1 0 1 +FLATTERER 0 1 1 0 +FLATTERED 0 1 1 0 +FLATHEADS 0 1 1 0 +FLAT 0 1 0 1 +FLASHLIGHT 0 1 1 0 +FLASH 2 1 2 3 +FLAREFF 0 1 0 1 +FLARED 0 1 0 1 +FLARE 0 1 0 1 +FLAP 0 1 0 1 +FLABRA 0 1 0 1 +FLABBERGASTED 0 1 1 0 +FIVE 20 1 20 21 +FITZ 0 1 0 1 +FISHING 2 1 3 2 +FISHIN 1 1 1 2 +FISHED 1 1 2 1 +FIRSTLY 0 1 0 1 +FIRSTER 0 1 1 0 +FIRS 0 1 0 1 +FIRE 14 1 15 14 +FINISHED 3 1 3 4 +FINICAL 0 1 1 0 +FINELY 1 1 2 1 +FINDING 7 1 8 7 +FINAL 2 1 2 3 +FILTRATES 0 1 1 0 +FILTRATE 0 1 1 0 +FILLS 1 1 1 2 +FIGURES 0 1 0 1 +FIGURED 0 1 0 1 +FIGURE'S 0 1 1 0 +FIGGER 0 1 1 0 +FIFTEENTH 0 1 1 0 +FIFTEEN 7 1 7 8 +FERVENT 0 1 0 1 +FELT 18 1 19 18 +FEELS 1 1 2 1 +FEELING 10 1 10 11 +FEEL 12 1 13 12 +FEEDS 0 1 1 0 +FEED 0 1 1 0 +FEATS 0 1 0 1 +FEAT 0 1 0 1 +FAVAN 0 1 0 1 +FAULTS 2 1 2 3 +FAULT 5 1 5 6 +FAUCES 0 1 1 0 +FATS 1 1 2 1 +FATHERS 1 1 1 2 +FATE 4 1 5 4 +FAT'S 0 1 0 1 +FASTENER'S 0 1 0 1 +FARTHIAN'S 0 1 0 1 +FARNDER 0 1 0 1 +FARINGDER 0 1 0 1 +FARE 0 1 1 0 +FANGED 0 1 1 0 +FANCY 4 1 4 5 +FAMOUS 1 1 1 2 +FAMILY 18 1 18 19 +FAME 1 1 2 1 +FAM'LY 0 1 1 0 +FALLING 5 1 6 5 +FALL 4 1 5 4 +FAIR 12 1 13 12 +FAILING 2 1 3 2 +FAILED 7 1 8 7 +FAFNER'S 0 1 0 1 +FAFNER 0 1 0 1 +FAFFNER 0 1 0 1 +FACT 14 1 14 15 +FACED 2 1 2 3 +EXTRAVE 0 1 0 1 +EXTRACTED 0 1 0 1 +EXTRACT 2 1 3 2 +EXTRA 1 1 2 1 +EXTINUATING 0 1 0 1 +EXTINGUISHING 0 1 1 0 +EXTENUATING 0 1 1 0 +EXTEND 0 1 1 0 +EXPOUNDS 0 1 0 1 +EXPOSED 3 1 3 4 +EXPOSE 2 1 3 2 +EXPLOITING 0 1 1 0 +EXPLODING 0 1 0 1 +EXPLAINED 4 1 4 5 +EXPERIOR 0 1 0 1 +EXPERIENCE 3 1 4 3 +EXPELLED 1 1 1 2 +EXPEL 0 1 1 0 +EXPECTED 7 1 8 7 +EXPANSE 1 1 2 1 +EXHALED 0 1 0 1 +EXERT 1 1 1 2 +EXECUTING 0 1 0 1 +EXECUTED 1 1 1 2 +EXCLAIMED 14 1 15 14 +EXCITING 0 1 1 0 +EXCEPT 11 1 11 12 +EXAMINING 2 1 3 2 +EXAMINED 3 1 3 4 +EXACTLY 9 1 9 10 +EXACKLY 0 1 1 0 +EVERYBODY'S 0 1 0 1 +EVERGREWING 0 1 0 1 +EV'YBODY'S 0 1 1 0 +EUSTA 0 1 0 1 +EUSIDES 0 1 0 1 +EUSIBIUS 0 1 0 1 +EUREKA 0 1 0 1 +EUPHRATES 0 1 1 0 +EUPHRANOR 0 1 1 0 +EUPHRANER 0 1 0 1 +EUPHATEES 0 1 0 1 +EUIK 0 1 0 1 +EUGEN 0 1 0 1 +ETS 0 1 0 1 +ETHEREAL 1 1 2 1 +ETHER 2 1 3 2 +ETHELRIED'S 0 1 1 0 +ETHELRED 0 1 0 1 +ET 2 1 3 2 +ESTHER 0 1 1 0 +ESTABLETS 0 1 0 1 +ESCAPED 1 1 1 2 +ESCAPE 12 1 12 13 +ERNESTON 0 1 0 1 +ERNESTINE 0 1 1 0 +ERGUSTARA 0 1 0 1 +ERE'S 0 1 1 0 +EQUERRY'S 0 1 1 0 +EQUERRIES 0 1 0 1 +EQUANT 0 1 0 1 +EPLORRIED 0 1 0 1 +EPIMORPHIN 0 1 0 1 +EPILID'S 0 1 0 1 +EPIGASTER 0 1 1 0 +EPHRAIM 0 1 1 0 +ENTRUSTED 0 1 1 0 +ENTRACT 0 1 0 1 +ENTR'ACTE 0 1 1 0 +ENTIRELY 2 1 3 2 +ENTIRE 2 1 2 3 +ENTERED 10 1 11 10 +ENSUIT 0 1 0 1 +ENSUED 2 1 3 2 +ENSNARES 0 1 1 0 +ENSLAVED 2 1 3 2 +ENSLAVE 1 1 1 2 +ENJOYED 0 1 0 1 +ENJOY 3 1 4 3 +ENJOINING 0 1 0 1 +ENFRANCHISEMENT 0 1 1 0 +ENFORCEMENT 0 1 1 0 +ENEM 0 1 0 1 +ENDURETH 0 1 1 0 +ENDS 0 1 0 1 +ENDOWED 0 1 0 1 +ENDING 1 1 1 2 +ENCOMCHISEMENT 0 1 0 1 +ENCAMP 0 1 0 1 +EMOTION 1 1 2 1 +EMMA 0 1 0 1 +EMETIC 0 1 1 0 +EMBRUN 0 1 1 0 +EMBRON 0 1 0 1 +ELYSIAN 0 1 1 0 +ELYGIANS 0 1 0 1 +ELUSIVE 0 1 0 1 +ELSEWOO 0 1 0 1 +ELLIS 0 1 1 0 +ELISIONS 0 1 1 0 +ELECTIC 0 1 0 1 +ELDER 1 1 2 1 +ELBOW 0 1 1 0 +ELBERT 0 1 1 0 +ELBER 0 1 0 1 +EL 0 1 0 1 +EIGHTHS 0 1 0 1 +EIGHTH 2 1 3 2 +EIGHT 9 1 9 10 +EGYPTIAN 5 1 6 5 +EGOY 0 1 0 1 +EGGS 1 1 2 1 +EFFORTS 0 1 1 0 +EFFORT 4 1 4 5 +EFFIC 0 1 0 1 +EFFECT 1 1 1 2 +EDGING 2 1 3 2 +EDGED 0 1 0 1 +ECHOLON 0 1 0 1 +EBERGASTER 0 1 0 1 +EAU 0 1 1 0 +EASILY 5 1 5 6 +EARTHLY 1 1 1 2 +EARS 3 1 3 4 +EARLS 0 1 0 1 +EARLIEST 0 1 1 0 +EAR 1 1 2 1 +EAMES 0 1 0 1 +EAD 0 1 1 0 +E'LL 0 1 1 0 +E'ER 0 1 1 0 +E 1 1 2 1 +DUSK 1 1 2 1 +DUPLICATES 1 1 2 1 +DUPE 0 1 0 1 +DUNNING 0 1 1 0 +DUMAS 0 1 1 0 +DUM 0 1 1 0 +DULL 2 1 3 2 +DUET 0 1 0 1 +DUDS 0 1 1 0 +DRUGSTORE 0 1 1 0 +DRUG 0 1 0 1 +DROP 3 1 3 4 +DRINKIN 0 1 0 1 +DRIFTILY 0 1 0 1 +DREADFUL 2 1 2 3 +DREAD 3 1 3 4 +DRAWN 1 1 1 2 +DRAWERS 1 1 2 1 +DRAW 1 1 2 1 +DRAUGHT 2 1 3 2 +DRAT 0 1 1 0 +DRANK 2 1 2 3 +DRAGOOD 0 1 0 1 +DRAGGER 0 1 0 1 +DOZE 0 1 0 1 +DOZ 0 1 0 1 +DOWNING 0 1 1 0 +DOUBTS 1 1 2 1 +DOTHAH 0 1 0 1 +DOTH 1 1 2 1 +DOSE 0 1 1 0 +DORCART 0 1 0 1 +DOOM 0 1 0 1 +DONOVAN 1 1 2 1 +DONEGOOD 0 1 0 1 +DONALD 0 1 0 1 +DON 5 1 5 6 +DOMONICO 0 1 0 1 +DOM 0 1 1 0 +DOLL 0 1 0 1 +DOIN 0 1 1 0 +DOGS 1 1 2 1 +DOEST 1 1 2 1 +DOESN'T 3 1 3 4 +DOCTRIPOIRE 0 1 0 1 +DOCTOR 24 1 25 24 +DOCKYARD 0 1 0 1 +DOCK 0 1 1 0 +DOAN 0 1 1 0 +DIXON 4 1 4 5 +DIVORITES 0 1 0 1 +DIVIDED 2 1 3 2 +DIVIDE 0 1 0 1 +DITZFIELD 0 1 0 1 +DITCHFIELD 0 1 1 0 +DISTRUSTED 0 1 1 0 +DISTRUDGED 0 1 0 1 +DISTRICT 2 1 2 3 +DISTRESSED 2 1 2 3 +DISTRESS 3 1 4 3 +DISTRACT 0 1 0 1 +DISTINGUISHING 0 1 0 1 +DISTINCTS 0 1 0 1 +DISTICHS 0 1 1 0 +DISTANT 4 1 5 4 +DISTAGGER 0 1 0 1 +DISSENTIENT 0 1 1 0 +DISPOVERTY 0 1 0 1 +DISPOSED 2 1 3 2 +DISPOS 0 1 0 1 +DISNEY 0 1 1 0 +DISINFECTING 0 1 1 0 +DISINFECT 0 1 0 1 +DISINDIAN 0 1 0 1 +DISFIGURED 0 1 1 0 +DISCOURSE 2 1 2 3 +DISCOUR 0 1 0 1 +DISASTROUS 0 1 1 0 +DISASTRATES 0 1 0 1 +DISAGREED 0 1 0 1 +DISAGREE 0 1 1 0 +DIS 0 1 0 1 +DIRECT 2 1 2 3 +DIRE 0 1 1 0 +DINNING 0 1 0 1 +DINED 0 1 0 1 +DINE 1 1 2 1 +DINARS 0 1 1 0 +DILUTE 0 1 1 0 +DILIGION 0 1 0 1 +DIJIN 0 1 0 1 +DIGGING 0 1 1 0 +DIEU 0 1 0 1 +DIA 0 1 0 1 +DEVOUR 0 1 1 0 +DETERMINED 4 1 5 4 +DETECTIVES 0 1 1 0 +DETECTIN 0 1 1 0 +DESTRUCTION 0 1 1 0 +DESTROVISION 0 1 0 1 +DESTINIES 0 1 1 0 +DESSERTS 1 1 2 1 +DESPOTIC 0 1 1 0 +DESPITE 2 1 3 2 +DESKED 0 1 0 1 +DESIRES 2 1 3 2 +DESIRE 9 1 9 10 +DESIGNED 2 1 2 3 +DESIGN 1 1 2 1 +DESERVED 0 1 0 1 +DESERTS 0 1 0 1 +DERELICTS 0 1 1 0 +DERDS 0 1 0 1 +DEPTOR 0 1 0 1 +DEPRECATE 0 1 1 0 +DEPLICATES 0 1 0 1 +DEODORIZING 0 1 1 0 +DENZ 0 1 0 1 +DEMETER 3 1 4 3 +DEMEANOR 0 1 0 1 +DELUDE 0 1 0 1 +DELMONICO 0 1 1 0 +DELIVERED 4 1 4 5 +DELIBERATE 0 1 1 0 +DEFTLY 0 1 0 1 +DEFENDED 0 1 0 1 +DEFEND 3 1 4 3 +DEFECTION 0 1 1 0 +DEEPENED 0 1 1 0 +DEEP 9 1 9 10 +DECORTUNA 0 1 0 1 +DECLINING 1 1 1 2 +DECLINE 0 1 0 1 +DECLARING 0 1 1 0 +DEBTS 1 1 1 2 +DEBTOR 0 1 1 0 +DEBT 0 1 0 1 +DEBARRED 0 1 1 0 +DEBARED 0 1 0 1 +DEATHLY 0 1 1 0 +DEATHLIKE 0 1 1 0 +DEATH 16 1 16 17 +DEANS 0 1 1 0 +DEAN 0 1 0 1 +DEAL 11 1 11 12 +DEACH 0 1 1 0 +DAWNING 0 1 0 1 +DATED 1 1 2 1 +DAT 1 1 1 2 +DARKAND 0 1 1 0 +DARED 3 1 3 4 +DAPHNE'S 0 1 1 0 +DANDAN 2 1 3 2 +DANCERS 0 1 0 1 +DANCER 0 1 1 0 +DAMN 0 1 1 0 +DAME'S 0 1 1 0 +DAME 1 1 1 2 +DALYS 0 1 1 0 +DAILY 2 1 2 3 +DAILIES 0 1 0 1 +DAGOS 0 1 1 0 +DAGGULE 0 1 0 1 +DAG 0 1 0 1 +DAEDI 0 1 0 1 +CYS 0 1 0 1 +CYRUP 0 1 0 1 +CYNTHIA 1 1 2 1 +CYNICISM 0 1 1 0 +CYMBALS 0 1 1 0 +CUSTOM 1 1 2 1 +CURSORY 0 1 0 1 +CURSORILY 0 1 1 0 +CURSON 0 1 0 1 +CURSE 1 1 2 1 +CURRENTS 0 1 1 0 +CURRANTS 0 1 0 1 +CURL 0 1 1 0 +CURFAC 0 1 0 1 +CUR 0 1 0 1 +CUPIED 0 1 0 1 +CUP 0 1 1 0 +CUNITY 0 1 0 1 +CUISINE 0 1 1 0 +CUB 0 1 0 1 +CRYSTALLINE 0 1 1 0 +CRYING 1 1 1 2 +CRUX 0 1 1 0 +CRUSHING 1 1 2 1 +CRUMPLED 1 1 2 1 +CRUMBLY 0 1 1 0 +CRUMBLED 0 1 0 1 +CROYDEN 0 1 0 1 +CROST 0 1 1 0 +CROPPISH 0 1 0 1 +CROPFISH 0 1 0 1 +CROOKS 0 1 1 0 +CRIES 3 1 4 3 +CREW 5 1 5 6 +CREOLE 0 1 0 1 +CREEL 0 1 1 0 +CREEDS 0 1 0 1 +CREDITU 0 1 0 1 +CRAW 0 1 0 1 +CRATES 1 1 2 1 +CRASHING 0 1 0 1 +CRAMBLY 0 1 0 1 +CRABS 0 1 0 1 +CRAB 6 1 7 6 +COYNESS 0 1 1 0 +COY 1 1 2 1 +COXCOMB 0 1 1 0 +COWLEY'S 0 1 1 0 +COURTYARD 2 1 2 3 +COURSING 0 1 1 0 +COURIER 0 1 0 1 +COURFERAC 0 1 0 1 +COUNTENANCE 2 1 2 3 +COUNT 17 1 18 17 +COUNSELS 0 1 1 0 +COUNSELLOR 0 1 0 1 +COUNCILLOR 0 1 1 0 +COUNCIL 0 1 0 1 +COUISINE 0 1 0 1 +COUGH 3 1 3 4 +COTTOM 0 1 0 1 +COT 0 1 0 1 +COSTUM 0 1 0 1 +COST 1 1 2 1 +CORYDON 0 1 1 0 +CORTONA 0 1 1 0 +CORP 0 1 0 1 +CORNWEALTH 0 1 0 1 +CORNERED 0 1 0 1 +CORMORRA 0 1 0 1 +CORKLE 0 1 1 0 +CORDS 0 1 0 1 +CORAL 0 1 1 0 +COQUET 0 1 0 1 +COPP 0 1 0 1 +COP'S 0 1 1 0 +COP 2 1 3 2 +COOPS 0 1 1 0 +COOL 4 1 5 4 +COOKS 0 1 0 1 +COOKER 0 1 0 1 +CONWEALTH 0 1 0 1 +CONTROLLED 1 1 1 2 +CONTROL 1 1 2 1 +CONTINUOUS 0 1 0 1 +CONTINUAL 0 1 1 0 +CONTINGENT 0 1 1 0 +CONTENTION 1 1 1 2 +CONTENDED 0 1 0 1 +CONSUM 0 1 0 1 +CONSUL 0 1 0 1 +CONSTITUTE 1 1 1 2 +CONSTITUENT 0 1 1 0 +CONSONANTS 0 1 1 0 +CONSOMME 0 1 1 0 +CONSOHN 0 1 0 1 +CONINGSBURGH 0 1 1 0 +CONIGSBURG 0 1 0 1 +CONFIRMATION 0 1 1 0 +CONFIRMATESON 0 1 0 1 +CONFINED 0 1 0 1 +CONFINE 0 1 0 1 +CONFIDENTIALLY 0 1 1 0 +CONFIDE 1 1 2 1 +CONFICERE 0 1 1 0 +CONFERS 0 1 0 1 +CONFECTIONERIES 0 1 0 1 +CONCOCTED 1 1 2 1 +CONCLUDED 2 1 2 3 +COMPLYING 0 1 0 1 +COMPLATED 0 1 0 1 +COMPEND 0 1 0 1 +COMPASSER 0 1 0 1 +COMPARABLE 0 1 0 1 +COMORIN 0 1 1 0 +COMMUNITY 4 1 5 4 +COMMONED 0 1 0 1 +COMMITTEE 5 1 6 5 +COMMISSORY 0 1 0 1 +COMMISSARY 1 1 2 1 +COMMENTS 0 1 0 1 +COMMENT 1 1 2 1 +COMMENCED 1 1 2 1 +COMMANDS 2 1 2 3 +COMMANDER 2 1 2 3 +COMIN 0 1 0 1 +COMICAL 0 1 1 0 +COMETH 0 1 1 0 +COMEST 0 1 1 0 +COMEDY 0 1 0 1 +COLT 0 1 0 1 +COLOSSEUM 0 1 1 0 +COLONEL 27 1 28 27 +COLOGNE 0 1 1 0 +COLLETS 0 1 0 1 +COLLEST 0 1 0 1 +COLLECTED 1 1 1 2 +COLLECT 0 1 1 0 +COLISEUM 0 1 0 1 +COLDS 0 1 1 0 +COLD 5 1 6 5 +COLCHESTER'S 0 1 0 1 +COLCHESTER 4 1 5 4 +COINS 1 1 2 1 +COIN 2 1 2 3 +COGNIZED 0 1 0 1 +CODE 0 1 0 1 +COD 0 1 1 0 +COCOA 0 1 1 0 +COCKRELL 0 1 1 0 +COCKLE 0 1 0 1 +COARSING 0 1 0 1 +COARSE 0 1 0 1 +COALESCED 0 1 1 0 +COAL 0 1 0 1 +CLUX 0 1 0 1 +CLUMB 0 1 1 0 +CLOSET 1 1 2 1 +CLOSEST 0 1 1 0 +CLOMB 0 1 1 0 +CLOCKS 0 1 1 0 +CLISPIN 0 1 0 1 +CLINK 0 1 0 1 +CLING 0 1 1 0 +CLIME 1 1 2 1 +CLIMBED 0 1 0 1 +CLEVERLY 0 1 1 0 +CLEVER 2 1 3 2 +CLERVAL 0 1 0 1 +CLER 0 1 0 1 +CLEFTOMANIA 0 1 0 1 +CLEFT 1 1 2 1 +CLEAVE 0 1 1 0 +CLEARED 2 1 2 3 +CLAWS 0 1 1 0 +CLAVIER 0 1 0 1 +CLASSES 3 1 4 3 +CLASS 6 1 6 7 +CLARGA 0 1 0 1 +CLARET 1 1 2 1 +CLAPTOMANIA 0 1 0 1 +CLANNING 0 1 0 1 +CLAIRVAUX 0 1 1 0 +CLACKS 0 1 0 1 +CLACK 0 1 0 1 +CITIZEN 1 1 1 2 +CISEAUX 0 1 1 0 +CIRCUMST 0 1 0 1 +CINRILO 0 1 0 1 +CINDERELLA 0 1 1 0 +CIGARS 1 1 1 2 +CIGARET 0 1 0 1 +CHURCHES 0 1 0 1 +CHURCH 13 1 14 13 +CHUG 0 1 0 1 +CHUCKED 0 1 1 0 +CHRISTOWN 0 1 0 1 +CHRISTOLINE 0 1 0 1 +CHRISTIANS 0 1 1 0 +CHRISTIANITY 2 1 3 2 +CHRISTENING 0 1 1 0 +CHRISTEN 0 1 0 1 +CHRIST 3 1 3 4 +CHOUETTE 0 1 1 0 +CHOOSE 3 1 3 4 +CHONODEMAIRE 0 1 1 0 +CHOKINGLY 0 1 0 1 +CHLORIDE 0 1 0 1 +CHIPS 1 1 2 1 +CHINTZ 0 1 1 0 +CHIN'S 0 1 0 1 +CHILLS 0 1 1 0 +CHILLED 1 1 1 2 +CHILL 1 1 1 2 +CHIEFING 0 1 0 1 +CHIEF 8 1 9 8 +CHIE 0 1 0 1 +CHIDE 0 1 1 0 +CHID 0 1 0 1 +CHEST 3 1 3 4 +CHERRY 0 1 0 1 +CHEER 0 1 0 1 +CHEEKS 4 1 4 5 +CHEEKE 0 1 1 0 +CHEEKBONES 0 1 1 0 +CHECK 0 1 0 1 +CHAUVELIN 0 1 0 1 +CHATEAU 0 1 0 1 +CHASE 0 1 0 1 +CHARLIE'S 0 1 0 1 +CHARLEY'S 0 1 1 0 +CHARGED 4 1 5 4 +CHARACTERISTIC 0 1 1 0 +CHARACTERED 0 1 0 1 +CHANT 1 1 1 2 +CHANCES 0 1 1 0 +CHANCELLORED 0 1 0 1 +CHANCELLOR 5 1 6 5 +CHANCE 11 1 11 12 +CHALONS 0 1 1 0 +CHAIN 0 1 1 0 +CHAFING 1 1 2 1 +CHADWELL 0 1 1 0 +CELL 1 1 2 1 +CELEM 0 1 0 1 +CELEBRATED 2 1 3 2 +CEDRIC 1 1 2 1 +CEASE 0 1 0 1 +CAVES 0 1 0 1 +CAVERNMENT 0 1 0 1 +CAVALRYMEN 0 1 1 0 +CAUSE 6 1 7 6 +CAUGHT 5 1 5 6 +CATTLEETTA 0 1 0 1 +CATS 1 1 2 1 +CATO 0 1 0 1 +CATHOLIC 0 1 1 0 +CATHEDRAL 0 1 1 0 +CATCHED 0 1 1 0 +CASTLE 8 1 9 8 +CASTETH 0 1 1 0 +CARTS 0 1 0 1 +CART 2 1 3 2 +CARRIED 11 1 12 11 +CARRIAGE 2 1 3 2 +CARMINALS 0 1 0 1 +CARLIS 0 1 0 1 +CARJACK 0 1 0 1 +CARED 4 1 4 5 +CARE 18 1 18 19 +CAR 4 1 5 4 +CAPTURED 2 1 2 3 +CAPTAIN 17 1 17 18 +CAPT 0 1 0 1 +CAPS 0 1 0 1 +CAPRIVY 0 1 0 1 +CAPRIVI'S 0 1 1 0 +CAPLICHOS 0 1 0 1 +CAPITULUM 0 1 1 0 +CAPITULAT 0 1 0 1 +CAPITULANTES 0 1 1 0 +CAPITULAM 0 1 0 1 +CAPITALISTS 0 1 1 0 +CAPITALIST 0 1 0 1 +CAPITAL 2 1 3 2 +CAPE 0 1 0 1 +CAP 4 1 5 4 +CANS 0 1 1 0 +CANNOT 21 1 21 22 +CAMPAIGN 0 1 1 0 +CAMP 0 1 1 0 +CALON 0 1 0 1 +CALM 0 1 0 1 +CALLETH 0 1 1 0 +CALLEST 0 1 1 0 +CAIRO 1 1 2 1 +CAIN 0 1 1 0 +CAGLED 0 1 0 1 +CAGE 7 1 8 7 +CACKED 0 1 1 0 +CABINETS 0 1 0 1 +CABINET 1 1 2 1 +CA'M 0 1 1 0 +C 2 1 3 2 +BYE 0 1 1 0 +BUY 4 1 4 5 +BUTTON 0 1 1 0 +BUTTERFLY 0 1 1 0 +BUSY 5 1 5 6 +BUSTON 0 1 0 1 +BUST 0 1 1 0 +BUSINESSWHICH 0 1 1 0 +BUSINESS 12 1 12 13 +BURYING 0 1 1 0 +BURTLES 0 1 0 1 +BURST 2 1 3 2 +BURSHEBA 0 1 1 0 +BURNHILD 0 1 0 1 +BURNETH 0 1 1 0 +BURNED 2 1 2 3 +BURMANOIS 0 1 0 1 +BURIUM 0 1 0 1 +BURDENS 1 1 2 1 +BURDEN 1 1 1 2 +BUOYANT 0 1 0 1 +BULBS 0 1 1 0 +BUILTON 0 1 0 1 +BUILDS 0 1 1 0 +BUFFETING 0 1 1 0 +BUFFET 0 1 0 1 +BUCK 1 1 1 2 +BRYNHILD'S 0 1 1 0 +BRUSH 3 1 3 4 +BRUNHOLD'S 0 1 0 1 +BROW 0 1 0 1 +BROUGHT 10 1 11 10 +BROTHER 17 1 18 17 +BROKER 0 1 0 1 +BRITTANNIUM 0 1 0 1 +BRING 12 1 12 13 +BRILLIANT 4 1 5 4 +BRIGHTENS 0 1 0 1 +BRIEF 2 1 3 2 +BREATHLESS 1 1 2 1 +BREATH 6 1 6 7 +BREASTPAND 0 1 0 1 +BREAST 1 1 2 1 +BREAKERS 0 1 1 0 +BRAZY 0 1 0 1 +BRAXBURY 0 1 0 1 +BRAU 0 1 1 0 +BRASS 1 1 2 1 +BRAMMER 0 1 0 1 +BRAMMEN 0 1 0 1 +BRAMID 0 1 0 1 +BRAMIAN 0 1 0 1 +BRAHMIN 0 1 0 1 +BRAHM 0 1 0 1 +BRACEY 0 1 0 1 +BRACES 0 1 0 1 +BRACELET 1 1 1 2 +BOZARD 0 1 0 1 +BOX 10 1 10 11 +BOWS 1 1 1 2 +BOURGES 0 1 1 0 +BOURGE 0 1 0 1 +BOTTOMED 0 1 1 0 +BOTTLED 0 1 1 0 +BOTHERED 1 1 1 2 +BOTH 16 1 17 16 +BOSTON 2 1 3 2 +BORN 7 1 7 8 +BOONE 0 1 1 0 +BOON 1 1 2 1 +BOOLA 0 1 0 1 +BONSES 0 1 0 1 +BONIUS 0 1 0 1 +BONES 2 1 2 3 +BOLTO 0 1 0 1 +BOLTED 1 1 1 2 +BOLT 0 1 1 0 +BOLSHEVIKI 2 1 3 2 +BOILED 1 1 2 1 +BOEOTIAN 0 1 1 0 +BOBBED 0 1 0 1 +BOB'S 1 1 2 1 +BOATS 0 1 0 1 +BOAT 8 1 8 9 +BLUE 7 1 7 8 +BLOTCHETTE 0 1 0 1 +BLOT 0 1 0 1 +BLOOMIN 0 1 1 0 +BLOODSTAINED 0 1 0 1 +BLOODSHED 0 1 1 0 +BLOOD 7 1 8 7 +BLOKES 0 1 1 0 +BLOKE 0 1 1 0 +BLODGET 0 1 0 1 +BLODGE 0 1 0 1 +BLOCK 1 1 2 1 +BLEST 0 1 0 1 +BLESSED 5 1 6 5 +BLENDEST 0 1 0 1 +BLANKETED 0 1 1 0 +BLANKET 0 1 0 1 +BLAMMED 0 1 0 1 +BLACKLEG 0 1 1 0 +BLACKBURN 1 1 2 1 +BLACKBIRD 0 1 0 1 +BLACK 13 1 13 14 +BITTER 3 1 3 4 +BITS 0 1 0 1 +BITING 0 1 1 0 +BITCHER 0 1 0 1 +BISQUE 0 1 1 0 +BISHO 0 1 0 1 +BIRDSEYE 0 1 1 0 +BIRD'S 0 1 0 1 +BIN 1 1 2 1 +BILLY 0 1 0 1 +BIG 8 1 8 9 +BIDS 0 1 0 1 +BIBOCO 0 1 0 1 +BIBLE 3 1 4 3 +BHANG 0 1 1 0 +BEWARE 1 1 2 1 +BEULAH 0 1 1 0 +BETWEEN 21 1 21 22 +BETTER 28 1 29 28 +BETOUT 0 1 0 1 +BETIDE 1 1 2 1 +BETHUNE 0 1 1 0 +BETCHA 0 1 1 0 +BETAKEN 0 1 1 0 +BET 0 1 1 0 +BEST 18 1 19 18 +BESSIE 0 1 0 1 +BESOON 0 1 0 1 +BESIDES 9 1 9 10 +BESIDE 3 1 4 3 +BERNETH 0 1 0 1 +BERING 0 1 0 1 +BENSON 0 1 1 0 +BENOIT 0 1 1 0 +BENOIS 0 1 0 1 +BENNETT'S 0 1 0 1 +BENNETT 0 1 1 0 +BEN 0 1 0 1 +BELT 0 1 0 1 +BELOVED 1 1 1 2 +BELONGS 1 1 1 2 +BELONGED 0 1 1 0 +BELLY 2 1 3 2 +BELLOWED 0 1 1 0 +BELLE 0 1 1 0 +BELIKE 0 1 1 0 +BELIEVE 15 1 16 15 +BELEASE 0 1 0 1 +BEGUN 2 1 3 2 +BEGIN 9 1 9 10 +BEGGING 1 1 2 1 +BEGGED 8 1 9 8 +BEFALL 0 1 0 1 +BEFAL 0 1 1 0 +BEESER 0 1 0 1 +BEDS 1 1 1 2 +BEAUMANOIR 0 1 1 0 +BEATER 0 1 0 1 +BEAT'S 0 1 0 1 +BEARING 5 1 6 5 +BEARD 1 1 1 2 +BEALE'S 0 1 1 0 +BAY 0 1 1 0 +BAXTER 0 1 1 0 +BAXT 0 1 0 1 +BAVARY 0 1 0 1 +BATH 1 1 1 2 +BAT 1 1 1 2 +BASSORA 0 1 0 1 +BASEMENT 0 1 1 0 +BASE 1 1 1 2 +BARRANGERS 0 1 0 1 +BARKLEY 0 1 1 0 +BARK 1 1 1 2 +BARIUM 0 1 1 0 +BARGENO 0 1 0 1 +BARGELLO 0 1 1 0 +BARELY 1 1 2 1 +BAPTISMAL 0 1 1 0 +BANNING 0 1 0 1 +BANISH 0 1 1 0 +BANION 0 1 0 1 +BANDON 0 1 0 1 +BANDINELLO 0 1 1 0 +BANACY 0 1 0 1 +BAN 0 1 0 1 +BALLROOM 0 1 1 0 +BALLOCK 0 1 1 0 +BALLIC 0 1 0 1 +BALL 0 1 0 1 +BALES 2 1 2 3 +BALAMMED 0 1 1 0 +BAILIQUE 0 1 0 1 +BAILEY 1 1 2 1 +BAILEAF 0 1 0 1 +BAGS 2 1 2 3 +BAGDAD 0 1 0 1 +BAESON 0 1 0 1 +BADE 1 1 1 2 +BADAWI 0 1 1 0 +BADAH 0 1 0 1 +BACKS 1 1 1 2 +BACKING 0 1 1 0 +BACK 51 1 51 52 +BABES 0 1 1 0 +BABE 0 1 0 1 +AZARIAH 0 1 1 0 +AY 0 1 0 1 +AWK'ARD 0 1 1 0 +AWAY 39 1 39 40 +AW 0 1 1 0 +AVIGUE 0 1 0 1 +AVIDITY 0 1 1 0 +AVIDE 0 1 0 1 +AVENUE 2 1 3 2 +AVE 0 1 1 0 +AUTON 0 1 0 1 +AUNTS 0 1 0 1 +ATTITTING 0 1 0 1 +ATTENTIVELY 3 1 4 3 +ATTENTIONS 0 1 0 1 +ATTENDED 0 1 0 1 +ATTEMPTED 2 1 3 2 +ATOM 0 1 0 1 +ATHELSTANE 0 1 1 0 +ASSYRIAL 0 1 0 1 +ASSUME 1 1 1 2 +ASSER 0 1 0 1 +ASSEMBLAGE 0 1 1 0 +ASSAULT 1 1 1 2 +ASSAILING 0 1 1 0 +ASLEEP 9 1 10 9 +ASKS 0 1 1 0 +ASIA 0 1 1 0 +ASHUR 0 1 1 0 +ASHORE 4 1 4 5 +ASHER 0 1 0 1 +ASH 1 1 1 2 +ASCENSION 0 1 1 0 +ARTIST 5 1 6 5 +ARTHUR 0 1 1 0 +ARSTS 0 1 1 0 +ARSINOE'S 0 1 1 0 +ARSENO'S 0 1 0 1 +ARSENAL 0 1 0 1 +ARRIVES 1 1 1 2 +ARRIVE 3 1 4 3 +ARRANGED 1 1 2 1 +ARQUEBAUL 0 1 0 1 +ARPE 0 1 0 1 +AROUSED 0 1 1 0 +AROSE 2 1 2 3 +ARMY 19 1 19 20 +ARMANQUIN 0 1 0 1 +ARKANSAS 0 1 1 0 +ARISING 0 1 0 1 +ARISED 0 1 0 1 +ARIOSABOO 0 1 0 1 +ARCHISON 0 1 0 1 +ARCHIBALD 0 1 1 0 +ARCHIAS 0 1 1 0 +APT 1 1 1 2 +APPROVE 0 1 1 0 +APPROCATUR 0 1 0 1 +APPEARED 8 1 8 9 +APPEARANCE 6 1 7 6 +APPEAR 5 1 6 5 +APPEALED 0 1 1 0 +APPARENTLY 4 1 5 4 +APOMORPHINE 0 1 1 0 +APOLLO 0 1 0 1 +APOLIS 0 1 0 1 +APES 1 1 2 1 +APE 1 1 1 2 +APART 3 1 4 3 +ANYWAY 1 1 2 1 +ANYTHING 31 1 31 32 +ANYONE'S 0 1 1 0 +ANVILS 0 1 1 0 +ANTOLIAN 0 1 1 0 +ANTIDOTES 0 1 1 0 +ANTHONY 0 1 1 0 +ANSWERS 1 1 2 1 +ANSWERED 26 1 26 27 +ANNOYED 2 1 3 2 +ANNOY 0 1 0 1 +ANNAWING 0 1 0 1 +ANGUISH 3 1 4 3 +ANGLES 0 1 0 1 +ANGESTON 0 1 1 0 +ANGERSON 0 1 0 1 +ANDY'S 0 1 0 1 +ANDS 0 1 1 0 +ANDBUT 0 1 1 0 +ANCIENT 2 1 2 3 +ANCESTORS 0 1 1 0 +AMY 0 1 0 1 +AMOUR 0 1 1 0 +AMORE 0 1 0 1 +AMID 1 1 1 2 +AMERGE 0 1 0 1 +AMBILS 0 1 0 1 +AMATIC 0 1 0 1 +ALTHIE 0 1 0 1 +ALTHIA 0 1 0 1 +ALREADY 15 1 16 15 +ALREAD 0 1 0 1 +ALOUD 2 1 2 3 +ALONGER 0 1 1 0 +ALMAN 0 1 0 1 +ALLS 0 1 1 0 +ALLOWED 6 1 7 6 +ALLOWANCE 0 1 1 0 +ALLIED 0 1 1 0 +ALLEN 0 1 1 0 +ALLEGATOR 0 1 0 1 +ALLAYS 0 1 1 0 +ALKALOID 0 1 1 0 +ALISANDRO 0 1 0 1 +ALIO 0 1 0 1 +ALIGHTED 1 1 2 1 +ALID 0 1 0 1 +ALICELA 0 1 0 1 +ALI'S 0 1 1 0 +ALF 0 1 1 0 +ALEXAM 0 1 0 1 +ALESSANDRO 0 1 1 0 +ALE 0 1 0 1 +ALCOHOLD 0 1 0 1 +ALBERT 10 1 10 11 +ALBEA 0 1 0 1 +ALARMED 1 1 1 2 +ALAD 0 1 0 1 +AKES 0 1 0 1 +AKELET 0 1 0 1 +AKALOID 0 1 0 1 +AID 4 1 5 4 +AGRIAN 0 1 0 1 +AGRARIAN 0 1 1 0 +AGONE 0 1 1 0 +AGAINST 26 1 27 26 +AFTERWARDS 5 1 6 5 +AFTERWARD 2 1 2 3 +AFOOT 0 1 0 1 +AFIRE 0 1 1 0 +AFAR 0 1 0 1 +ADVITY 0 1 0 1 +ADVENTURES 2 1 3 2 +ADULTERATED 0 1 0 1 +ADULT 0 1 1 0 +ADN'T 0 1 1 0 +ADIER 0 1 0 1 +ADHERENTS 0 1 1 0 +ADHERENCE 0 1 0 1 +ADDURE 0 1 0 1 +ADDLESTEIN 0 1 0 1 +ADDISMA 0 1 0 1 +ADDEST 0 1 0 1 +ADAIR 0 1 1 0 +ADAD 0 1 0 1 +AD 0 1 1 0 +ACQUIRE 0 1 0 1 +ACKNOWLEDGE 1 1 2 1 +ACHESON 0 1 1 0 +ACCUSE 0 1 0 1 +ACCOUNT 7 1 7 8 +ACCEPT 4 1 5 4 +ACADANS 0 1 0 1 +ABSTAINED 0 1 0 1 +ABSTAIN 0 1 1 0 +ABSORBED 4 1 4 5 +ABSOLVED 0 1 1 0 +ABSENTEE 0 1 1 0 +ABSENTE 0 1 0 1 +ABROAD 4 1 4 5 +ABODE 2 1 3 2 +ABIDING 0 1 0 1 +ABBS 0 1 0 1 +ABASEMENT 0 1 0 1 +ZEAL 1 0 1 1 +ZAMAN 4 0 4 4 +YUNKERS 1 0 1 1 +YOURSELF 9 0 9 9 +YOUNGEST 1 0 1 1 +YOUNGERS 2 0 2 2 +YOUNG 39 0 39 39 +YONDER 5 0 5 5 +YOLKS 1 0 1 1 +YIELDED 1 0 1 1 +YIELD 1 0 1 1 +YESTERDAY 3 0 3 3 +YEP 1 0 1 1 +YELLOW 4 0 4 4 +YELLED 1 0 1 1 +YELL 1 0 1 1 +YEARNS 1 0 1 1 +YEARNING 1 0 1 1 +YEA 1 0 1 1 +YAWN 1 0 1 1 +YARNS 1 0 1 1 +YARDS 1 0 1 1 +YACHT 1 0 1 1 +WYLDER'S 1 0 1 1 +WYLDER 1 0 1 1 +WRITTEN 5 0 5 5 +WRITING 1 0 1 1 +WRINKLES 1 0 1 1 +WRINGING 1 0 1 1 +WRIGGLING 1 0 1 1 +WRETCH 3 0 3 3 +WRECKAGE 1 0 1 1 +WRAPPING 1 0 1 1 +WOUND 2 0 2 2 +WOULDST 1 0 1 1 +WOULDN'T 9 0 9 9 +WORST 1 0 1 1 +WORN 3 0 3 3 +WORLDLY 1 0 1 1 +WORKSHOP 1 0 1 1 +WORKHOUSE 1 0 1 1 +WORKERS 1 0 1 1 +WORKER 1 0 1 1 +WORE 3 0 3 3 +WOODSON 1 0 1 1 +WONT 4 0 4 4 +WONDROUS 1 0 1 1 +WONDERS 1 0 1 1 +WONDERING 1 0 1 1 +WONDERFULLY 1 0 1 1 +WONDERFUL 6 0 6 6 +WOMAN'S 2 0 2 2 +WOLF 1 0 1 1 +WOES 1 0 1 1 +WIZARDS 1 0 1 1 +WITTY 1 0 1 1 +WITNESSED 1 0 1 1 +WITHIN 11 0 11 11 +WITHHELD 2 0 2 2 +WITHDRAWN 2 0 2 2 +WITHAL 2 0 2 2 +WITCHES 1 0 1 1 +WIT 1 0 1 1 +WISTFUL 1 0 1 1 +WISHING 3 0 3 3 +WISHES 3 0 3 3 +WISH 15 0 15 15 +WISELY 1 0 1 1 +WISE 3 0 3 3 +WISDOM 4 0 4 4 +WISCONSIN 1 0 1 1 +WINTERS 1 0 1 1 +WINKED 1 0 1 1 +WINGS 2 0 2 2 +WING 1 0 1 1 +WINDOWS 1 0 1 1 +WIND 6 0 6 6 +WIN 2 0 2 2 +WILLINGLY 1 0 1 1 +WILLING 4 0 4 4 +WILFUL 1 0 1 1 +WIDOWER 1 0 1 1 +WIDEN 1 0 1 1 +WICKED 2 0 2 2 +WHOSO 1 0 1 1 +WHOOP 1 0 1 1 +WHOMSOEVER 1 0 1 1 +WHOM 20 0 20 20 +WHOEVER 2 0 2 2 +WHITEHALL 1 0 1 1 +WHISTLING 2 0 2 2 +WHISTLE 3 0 3 3 +WHISPERED 2 0 2 2 +WHIPPINGS 1 0 1 1 +WHIP 2 0 2 2 +WHIMPERING 1 0 1 1 +WHIM 1 0 1 1 +WHILST 1 0 1 1 +WHEREVER 2 0 2 2 +WHEREUPON 3 0 3 3 +WHEREIN 2 0 2 2 +WHEREFORE 2 0 2 2 +WHEREBY 2 0 2 2 +WHENEVER 4 0 4 4 +WHENCE 5 0 5 5 +WHEELS 1 0 1 1 +WHATSOEVER 1 0 1 1 +WETTED 1 0 1 1 +WET 3 0 3 3 +WESTERN 2 0 2 2 +WEREN'T 1 0 1 1 +WEPT 2 0 2 2 +WENCH 1 0 1 1 +WELSH 1 0 1 1 +WELCOMED 1 0 1 1 +WELCOME 4 0 4 4 +WEIGHTY 1 0 1 1 +WEIGHT 1 0 1 1 +WEIGHING 1 0 1 1 +WEEPING 3 0 3 3 +WEEKS 1 0 1 1 +WEEKLY 1 0 1 1 +WEEK 6 0 6 6 +WEDNESDAY 2 0 2 2 +WEB 1 0 1 1 +WEATHER 5 0 5 5 +WEARY 4 0 4 4 +WEARING 2 0 2 2 +WEAPONS 1 0 1 1 +WEAPON 1 0 1 1 +WEALTHY 3 0 3 3 +WEALTH 3 0 3 3 +WEAKNESS 4 0 4 4 +WEAK 2 0 2 2 +WE'D 1 0 1 1 +WAYS 5 0 5 5 +WAYLAID 1 0 1 1 +WAVING 1 0 1 1 +WAVES 2 0 2 2 +WAVE 1 0 1 1 +WATERY 1 0 1 1 +WATERVILLE 1 0 1 1 +WATERS 1 0 1 1 +WATCHING 7 0 7 7 +WASN'T 3 0 3 3 +WASHINGTON 2 0 2 2 +WASHED 4 0 4 4 +WASH 4 0 4 4 +WARS 2 0 2 2 +WARRANT 1 0 1 1 +WARNING 1 0 1 1 +WARNER 1 0 1 1 +WARN'T 2 0 2 2 +WARMTH 1 0 1 1 +WARMLY 1 0 1 1 +WAREHOUSES 1 0 1 1 +WANTS 5 0 5 5 +WANTON 1 0 1 1 +WANTING 1 0 1 1 +WALLS 4 0 4 4 +WALLET 2 0 2 2 +WALKED 10 0 10 10 +WAKING 2 0 2 2 +WAKED 2 0 2 2 +WAITERS 1 0 1 1 +WAITER 1 0 1 1 +WAIST 1 0 1 1 +WAGONS 1 0 1 1 +WAGON 1 0 1 1 +WADDLED 1 0 1 1 +W 1 0 1 1 +VRONSKY 1 0 1 1 +VOYAGES 1 0 1 1 +VOWS 2 0 2 2 +VOWELS 1 0 1 1 +VOW 1 0 1 1 +VOTING 2 0 2 2 +VOTED 1 0 1 1 +VON 1 0 1 1 +VOLUNTEERS 2 0 2 2 +VOLUNTARILY 1 0 1 1 +VOLUMINOUS 1 0 1 1 +VOLUME 2 0 2 2 +VOLLEY 1 0 1 1 +VOLCANOES 1 0 1 1 +VOICELESS 1 0 1 1 +VOICE 20 0 20 20 +VITRIOL 1 0 1 1 +VITAL 1 0 1 1 +VISITOR 4 0 4 4 +VISITING 1 0 1 1 +VISITED 2 0 2 2 +VISIT 8 0 8 8 +VISION 1 0 1 1 +VISCOUNT 1 0 1 1 +VIRTUOUS 2 0 2 2 +VIRTUE 2 0 2 2 +VIRGINIA 1 0 1 1 +VIOLENTLY 1 0 1 1 +VINTAGE 2 0 2 2 +VINE 1 0 1 1 +VINDICTIVENESS 1 0 1 1 +VILLAGE 2 0 2 2 +VILE 1 0 1 1 +VIGOROUS 3 0 3 3 +VIGILANT 1 0 1 1 +VIEWS 1 0 1 1 +VIEW 1 0 1 1 +VICTORY 1 0 1 1 +VICTORIAN 1 0 1 1 +VICTIMS 1 0 1 1 +VICTIM 3 0 3 3 +VICIOUS 3 0 3 3 +VICES 1 0 1 1 +VICE 1 0 1 1 +VEXED 1 0 1 1 +VEXATION 1 0 1 1 +VESSEL 2 0 2 2 +VERSE 1 0 1 1 +VERDICT 3 0 3 3 +VENTURE 2 0 2 2 +VENICE 2 0 2 2 +VENIAL 1 0 1 1 +VENGEANCE 1 0 1 1 +VEINS 1 0 1 1 +VEILS 1 0 1 1 +VEIL 2 0 2 2 +VEHICLES 1 0 1 1 +VEGETABLES 1 0 1 1 +VEGETABLE 1 0 1 1 +VAULTED 1 0 1 1 +VAUDEVILLE 1 0 1 1 +VARIES 1 0 1 1 +VARIED 1 0 1 1 +VANITY 1 0 1 1 +VANISHED 1 0 1 1 +VAMPA 2 0 2 2 +VALUES 2 0 2 2 +VALUE 3 0 3 3 +VALUABLES 1 0 1 1 +VALJEAN'S 3 0 3 3 +VALJEAN 7 0 7 7 +VAGUELY 1 0 1 1 +VAGUE 1 0 1 1 +VACATION 1 0 1 1 +VACANTLY 1 0 1 1 +UTTERLY 3 0 3 3 +UTTERING 1 0 1 1 +UTTERED 4 0 4 4 +UTMOST 5 0 5 5 +USURPER 2 0 2 2 +USING 3 0 3 3 +USEST 1 0 1 1 +URGE 1 0 1 1 +UPSET 1 0 1 1 +UPRIGHT 1 0 1 1 +UPPER 5 0 5 5 +UNWEPT 1 0 1 1 +UNUSUALLY 1 0 1 1 +UNUSUAL 2 0 2 2 +UNTIL 16 0 16 16 +UNSWERVING 1 0 1 1 +UNSOUGHT 1 0 1 1 +UNSELFISH 1 0 1 1 +UNSEASONABLE 1 0 1 1 +UNREASONABLE 1 0 1 1 +UNPRESSED 1 0 1 1 +UNPLEASANT 3 0 3 3 +UNPITIED 1 0 1 1 +UNNATURAL 1 0 1 1 +UNMISTAKABLY 1 0 1 1 +UNLUCKY 2 0 2 2 +UNLIKELY 1 0 1 1 +UNKNOWN 2 0 2 2 +UNKIND 1 0 1 1 +UNJOINTED 1 0 1 1 +UNIVERSE 1 0 1 1 +UNIVERSAL 5 0 5 5 +UNITED 5 0 5 5 +UNISON 1 0 1 1 +UNIONISTS 1 0 1 1 +UNION 1 0 1 1 +UNINTENTIONAL 1 0 1 1 +UNIFORM 2 0 2 2 +UNHEARD 1 0 1 1 +UNHAPPINESS 1 0 1 1 +UNGRATEFUL 3 0 3 3 +UNFORTUNATELY 2 0 2 2 +UNFORTUNATE 2 0 2 2 +UNFLATTERING 1 0 1 1 +UNEASY 4 0 4 4 +UNEASILY 1 0 1 1 +UNDOUBTEDLY 1 0 1 1 +UNDERTOOK 1 0 1 1 +UNDERTONE 1 0 1 1 +UNDERTAKE 1 0 1 1 +UNDERSTOOD 6 0 6 6 +UNDERSTANDS 1 0 1 1 +UNDERSTANDING 5 0 5 5 +UNDERSTAND 7 0 7 7 +UNDERNEATH 1 0 1 1 +UNDERGROUND 1 0 1 1 +UNDERGO 1 0 1 1 +UNCONNECTED 1 0 1 1 +UNCONCERN 1 0 1 1 +UNCOMMON 1 0 1 1 +UNCOMFORTABLY 2 0 2 2 +UNCOMFORTABLE 1 0 1 1 +UNCLE'S 2 0 2 2 +UNCERTAIN 2 0 2 2 +UNBURDEN 1 0 1 1 +UNAWARE 1 0 1 1 +UNASSISTED 1 0 1 1 +UNALTERABLE 1 0 1 1 +UNABLE 1 0 1 1 +UGLY 1 0 1 1 +TYRANTS 1 0 1 1 +TYRANT 2 0 2 2 +TWIST 1 0 1 1 +TWILIGHT 1 0 1 1 +TWICE 2 0 2 2 +TWENTY 16 0 16 16 +TWAS 1 0 1 1 +TWAIN 2 0 2 2 +TUTORS 2 0 2 2 +TUTOR 1 0 1 1 +TURRETS 1 0 1 1 +TURKISH 1 0 1 1 +TURK 1 0 1 1 +TURBAN 1 0 1 1 +TUNE 1 0 1 1 +TUMULT 1 0 1 1 +TUMBLE 1 0 1 1 +TUG 1 0 1 1 +TUESDAY 1 0 1 1 +TUCKED 1 0 1 1 +TUBE 2 0 2 2 +TRUSTWORTHY 1 0 1 1 +TRUSTED 1 0 1 1 +TRUST 3 0 3 3 +TRUNK 1 0 1 1 +TROUT 1 0 1 1 +TROUSERS 2 0 2 2 +TROUBLING 1 0 1 1 +TROUBLED 6 0 6 6 +TROUBLE 8 0 8 8 +TROOPS 2 0 2 2 +TROLL 1 0 1 1 +TRIUMPHING 1 0 1 1 +TRIUMPH 3 0 3 3 +TRIP 2 0 2 2 +TRIM 1 0 1 1 +TRIFLING 3 0 3 3 +TRIBUTE 2 0 2 2 +TRIANGLE 1 0 1 1 +TRIAL 3 0 3 3 +TREND 1 0 1 1 +TREMBLING 2 0 2 2 +TREMBLE 1 0 1 1 +TREES 3 0 3 3 +TREE 9 0 9 9 +TREATMENT 1 0 1 1 +TREATED 1 0 1 1 +TREAT 3 0 3 3 +TREASONS 1 0 1 1 +TREACHEROUSLY 1 0 1 1 +TRAVILLA 1 0 1 1 +TRAVELLERS 2 0 2 2 +TRAP 2 0 2 2 +TRANSPORTED 2 0 2 2 +TRANSPARENT 1 0 1 1 +TRANSLATED 1 0 1 1 +TRANSITORINESS 1 0 1 1 +TRANSFORMING 1 0 1 1 +TRANSFIGURED 1 0 1 1 +TRANSFERENCE 1 0 1 1 +TRANQUILLITIES 1 0 1 1 +TRAMP 2 0 2 2 +TRAGIC 1 0 1 1 +TRADITIONAL 1 0 1 1 +TRACEABLE 1 0 1 1 +TRACE 2 0 2 2 +TOY 1 0 1 1 +TOWNSFOLK 1 0 1 1 +TOWERS 1 0 1 1 +TOWERING 1 0 1 1 +TOUCHING 4 0 4 4 +TOUCHED 5 0 5 5 +TOUCH 3 0 3 3 +TOTING 1 0 1 1 +TOSSING 1 0 1 1 +TOSSED 1 0 1 1 +TORTURES 1 0 1 1 +TORN 2 0 2 2 +TORMENTOR 2 0 2 2 +TOPS 1 0 1 1 +TOPIC 1 0 1 1 +TONGUES 1 0 1 1 +TONES 2 0 2 2 +TONE 6 0 6 6 +TOMBS 1 0 1 1 +TOMATO 1 0 1 1 +TOLERABLY 1 0 1 1 +TOKEN 1 0 1 1 +TOILING 1 0 1 1 +TOILETTE 1 0 1 1 +TOIL 1 0 1 1 +TOGETHER 11 0 11 11 +TOES 2 0 2 2 +TOBACCO 7 0 7 7 +TOASTED 2 0 2 2 +TOAST 1 0 1 1 +TIS 4 0 4 4 +TIPPLING 1 0 1 1 +TINY 1 0 1 1 +TINKLE 1 0 1 1 +TINCTURED 1 0 1 1 +TIMEPIECE 1 0 1 1 +TIME'S 1 0 1 1 +TIMBER 1 0 1 1 +TIGHTLY 2 0 2 2 +TIGHT 1 0 1 1 +TIDES 1 0 1 1 +TICKING 1 0 1 1 +TICKET 1 0 1 1 +TIBER 1 0 1 1 +THYSELF 3 0 3 3 +THYME 1 0 1 1 +THWARTED 1 0 1 1 +THURSDAY 1 0 1 1 +THUNDER 3 0 3 3 +THRUST 6 0 6 6 +THROWN 1 0 1 1 +THROW 2 0 2 2 +THROUGHOUT 3 0 3 3 +THRONE 2 0 2 2 +THROBBED 1 0 1 1 +THROAT 2 0 2 2 +THRILLING 1 0 1 1 +THREES 1 0 1 1 +THREATS 3 0 3 3 +THREATENED 1 0 1 1 +THREAD 2 0 2 2 +THRACE 1 0 1 1 +THOUSANDTH 1 0 1 1 +THOUGHTS 5 0 5 5 +THOUGHTFUL 2 0 2 2 +THOROUGHLY 1 0 1 1 +THORNTON 4 0 4 4 +THONG 1 0 1 1 +THOMAS 2 0 2 2 +THIRTY 7 0 7 7 +THIRTEEN 1 0 1 1 +THIRSTY 1 0 1 1 +THIRSTING 1 0 1 1 +THINKS 2 0 2 2 +THIEVES 1 0 1 1 +THIEF 2 0 2 2 +THICKENING 1 0 1 1 +THICK 4 0 4 4 +THEY'D 2 0 2 2 +THEREWITH 1 0 1 1 +THEREIN 3 0 3 3 +THEREFORE 12 0 12 12 +THEREAFTER 1 0 1 1 +THERE'LL 1 0 1 1 +THEORY 2 0 2 2 +THEOLOGIANS 1 0 1 1 +THENCEFORTH 1 0 1 1 +THENCE 1 0 1 1 +THANKFUL 2 0 2 2 +THANKED 2 0 2 2 +THANK 7 0 7 7 +TEXAS 1 0 1 1 +TESTING 1 0 1 1 +TESTIMONY 1 0 1 1 +TESTIFY 2 0 2 2 +TERROR 5 0 5 5 +TERRIFIC 2 0 2 2 +TERRIBLE 4 0 4 4 +TERMS 1 0 1 1 +TERM 2 0 2 2 +TERENTIUS 1 0 1 1 +TENDING 1 0 1 1 +TENDERNESS 1 0 1 1 +TENDERLY 1 0 1 1 +TENDER 3 0 3 3 +TENDENCY 1 0 1 1 +TENACITY 1 0 1 1 +TEMPTRESS 1 0 1 1 +TEMPTING 1 0 1 1 +TEMPTATION 4 0 4 4 +TEMPT 1 0 1 1 +TEMPORARY 2 0 2 2 +TEMPLARS 2 0 2 2 +TEMPEST 1 0 1 1 +TEMPERATURE 1 0 1 1 +TEMPERATE 2 0 2 2 +TEMPERAMENT 1 0 1 1 +TEMPER 1 0 1 1 +TELLING 2 0 2 2 +TELEPHONE 1 0 1 1 +TEEMING 1 0 1 1 +TECHNICAL 1 0 1 1 +TEASPOONFUL 1 0 1 1 +TEARS 6 0 6 6 +TEARING 1 0 1 1 +TEAR 3 0 3 3 +TEACHING 1 0 1 1 +TEACHERS 1 0 1 1 +TEACH 2 0 2 2 +TAYLOR 1 0 1 1 +TAXES 1 0 1 1 +TAUNTS 1 0 1 1 +TAUGHT 1 0 1 1 +TASTES 1 0 1 1 +TASTED 2 0 2 2 +TASK 3 0 3 3 +TARRIED 1 0 1 1 +TAPE 1 0 1 1 +TAP 1 0 1 1 +TANNER 1 0 1 1 +TALL 2 0 2 2 +TALKING 5 0 5 5 +TALKER 1 0 1 1 +TALK 15 0 15 15 +TALES 1 0 1 1 +TALENT 1 0 1 1 +TAKINGS 1 0 1 1 +TAKES 3 0 3 3 +TAINTED 1 0 1 1 +TAILS 2 0 2 2 +TAILOR'S 1 0 1 1 +TAIL 3 0 3 3 +TAGGING 1 0 1 1 +TACK 1 0 1 1 +TABLES 2 0 2 2 +TABLE 7 0 7 7 +SYSTEM 2 0 2 2 +SYRINGE 1 0 1 1 +SYMPTOMS 1 0 1 1 +SYMPATHY 4 0 4 4 +SYMPATHIES 1 0 1 1 +SWUNG 3 0 3 3 +SWITCHED 1 0 1 1 +SWITCH 1 0 1 1 +SWISS 1 0 1 1 +SWINGING 1 0 1 1 +SWINGED 1 0 1 1 +SWIMS 1 0 1 1 +SWIFTLY 1 0 1 1 +SWEPT 1 0 1 1 +SWELL 1 0 1 1 +SWEETNESS 2 0 2 2 +SWEETMEATS 2 0 2 2 +SWEETMEAT 2 0 2 2 +SWEET 3 0 3 3 +SWEEPING 1 0 1 1 +SWEEP 1 0 1 1 +SWEAR 5 0 5 5 +SWAYING 1 0 1 1 +SWAY 1 0 1 1 +SWARMED 2 0 2 2 +SWAM 1 0 1 1 +SWALLOWING 1 0 1 1 +SWALLOWED 4 0 4 4 +SVIAZHSKY 1 0 1 1 +SUSTAINS 1 0 1 1 +SUSPICIOUS 3 0 3 3 +SUSPENDED 1 0 1 1 +SUSPECTED 6 0 6 6 +SUSPECT 1 0 1 1 +SUSAN'S 1 0 1 1 +SURVEYED 1 0 1 1 +SURROUNDINGS 1 0 1 1 +SURROUNDING 1 0 1 1 +SURROUNDED 1 0 1 1 +SURRENDERING 1 0 1 1 +SURRENDERED 1 0 1 1 +SURRENDER 2 0 2 2 +SURPRISED 2 0 2 2 +SURPRISE 4 0 4 4 +SURPLICE 1 0 1 1 +SURPASS 1 0 1 1 +SURMOUNTED 1 0 1 1 +SURLY 1 0 1 1 +SURFACE 3 0 3 3 +SUPPRESS 1 0 1 1 +SUPPOSITION 1 0 1 1 +SUPPOSED 6 0 6 6 +SUPPORTED 1 0 1 1 +SUPPORT 1 0 1 1 +SUPPLY 2 0 2 2 +SUPPLIED 3 0 3 3 +SUPPLICATION 1 0 1 1 +SUPPER 1 0 1 1 +SUPERNATURAL 2 0 2 2 +SUPERNACULUM 1 0 1 1 +SUPERLATIVE 1 0 1 1 +SUPERIORS 1 0 1 1 +SUPERIOR 4 0 4 4 +SUP 1 0 1 1 +SUNSHINY 1 0 1 1 +SUNSHINE 1 0 1 1 +SUNRISE 2 0 2 2 +SUNNYSIDE 1 0 1 1 +SUNG 2 0 2 2 +SUNDAY 4 0 4 4 +SUMS 2 0 2 2 +SUMMONED 1 0 1 1 +SUMMON 1 0 1 1 +SUMMIT 1 0 1 1 +SUMMER 6 0 6 6 +SULTRY 1 0 1 1 +SULTAN 2 0 2 2 +SUITS 1 0 1 1 +SUITABLE 2 0 2 2 +SUICIDE 1 0 1 1 +SUGGESTED 2 0 2 2 +SUGAR 9 0 9 9 +SUFFICIENTLY 2 0 2 2 +SUFFERINGS 1 0 1 1 +SUFFERING 2 0 2 2 +SUFFERED 3 0 3 3 +SUFFER 1 0 1 1 +SUDDENLY 8 0 8 8 +SUCK 1 0 1 1 +SUCH 44 0 44 44 +SUCCUMBED 1 0 1 1 +SUCCESSIVELY 1 0 1 1 +SUCCESSIVE 1 0 1 1 +SUCCESSFULLY 1 0 1 1 +SUCCESSES 2 0 2 2 +SUCCESS 2 0 2 2 +SUCCEEDING 1 0 1 1 +SUCCEEDED 3 0 3 3 +SUCCEED 1 0 1 1 +SUBURB 1 0 1 1 +SUBSTITUTING 1 0 1 1 +SUBSTANCES 1 0 1 1 +SUBSTANCE 3 0 3 3 +SUBSISTENCE 1 0 1 1 +SUBSIDED 1 0 1 1 +SUBSEQUENT 1 0 1 1 +SUBORDINATED 1 0 1 1 +SUBMITTED 2 0 2 2 +SUBMISSIVE 1 0 1 1 +SUBMISSION 1 0 1 1 +SUBJECTS 6 0 6 6 +SUBJECTED 3 0 3 3 +SUBDUED 2 0 2 2 +STYLED 1 0 1 1 +STYLE 1 0 1 1 +STURDY 1 0 1 1 +STUPID 2 0 2 2 +STUMBLED 1 0 1 1 +STUFFS 1 0 1 1 +STUFF 1 0 1 1 +STUDYING 1 0 1 1 +STUDY 1 0 1 1 +STUDENTS 1 0 1 1 +STUDENT 2 0 2 2 +STUCK 2 0 2 2 +STRUGGLES 1 0 1 1 +STRUCTURE 1 0 1 1 +STROVE 1 0 1 1 +STRONGLY 1 0 1 1 +STRONGER 1 0 1 1 +STRONG 12 0 12 12 +STRODE 1 0 1 1 +STRIPPED 2 0 2 2 +STRIKING 1 0 1 1 +STRIDES 1 0 1 1 +STRICTLY 1 0 1 1 +STREWN 1 0 1 1 +STRETCHING 1 0 1 1 +STRETCHER 1 0 1 1 +STRENUOUSLY 1 0 1 1 +STRENGTHENED 2 0 2 2 +STRENGTH 12 0 12 12 +STREETS 2 0 2 2 +STREAM 4 0 4 4 +STRATAGEM 1 0 1 1 +STRANGER 3 0 3 3 +STRANGELY 1 0 1 1 +STRAINING 1 0 1 1 +STRAIGHTWAY 1 0 1 1 +STRAIGHTFORWARD 1 0 1 1 +STOUT 1 0 1 1 +STORY 9 0 9 9 +STORMED 1 0 1 1 +STORM 1 0 1 1 +STOREHOUSES 1 0 1 1 +STOPPING 3 0 3 3 +STOP 5 0 5 5 +STOOL 2 0 2 2 +STONES 4 0 4 4 +STOMACH 3 0 3 3 +STIRRED 1 0 1 1 +STIR 1 0 1 1 +STILE 1 0 1 1 +STIFLING 1 0 1 1 +STIFLED 2 0 2 2 +STIFLE 3 0 3 3 +STIFFNESS 1 0 1 1 +STIFF 2 0 2 2 +STEWART 1 0 1 1 +STEWARDS 1 0 1 1 +STEWARD 1 0 1 1 +STEPHEN 1 0 1 1 +STEPAN 2 0 2 2 +STENOGRAPHIC 1 0 1 1 +STEMS 1 0 1 1 +STEERAGE 2 0 2 2 +STEEP 1 0 1 1 +STEEL 1 0 1 1 +STEED 1 0 1 1 +STEAMED 1 0 1 1 +STEAMBOAT 2 0 2 2 +STEALTHILY 1 0 1 1 +STEAL 1 0 1 1 +STEADY 3 0 3 3 +STEADILY 1 0 1 1 +STATUES 3 0 3 3 +STATIONED 2 0 2 2 +STATION 2 0 2 2 +STATESMAN 1 0 1 1 +STATEMENT 3 0 3 3 +STAT 1 0 1 1 +STARVE 1 0 1 1 +STARTLING 1 0 1 1 +STARTLED 1 0 1 1 +STARTING 1 0 1 1 +STARTED 10 0 10 10 +STARES 1 0 1 1 +STARCHY 1 0 1 1 +STANLEY 2 0 2 2 +STANDPOINT 1 0 1 1 +STANDING 10 0 10 10 +STAND 7 0 7 7 +STAMPED 1 0 1 1 +STAMMERED 1 0 1 1 +STAMMER 1 0 1 1 +STAKED 1 0 1 1 +STAKE 1 0 1 1 +STAIRCASE 1 0 1 1 +STAGE 5 0 5 5 +STABLE 1 0 1 1 +SQUIRE 3 0 3 3 +SQUEEZE 1 0 1 1 +SQUEAKS 1 0 1 1 +SQUATTED 1 0 1 1 +SQUARE 2 0 2 2 +SQUALL 1 0 1 1 +SQUALID 1 0 1 1 +SQUAD 2 0 2 2 +SPYING 1 0 1 1 +SPRINKLES 1 0 1 1 +SPRINGS 1 0 1 1 +SPRING 4 0 4 4 +SPRIG 1 0 1 1 +SPREADS 1 0 1 1 +SPREAD 4 0 4 4 +SPRANG 3 0 3 3 +SPOTTED 1 0 1 1 +SPOT 6 0 6 6 +SPOON 1 0 1 1 +SPOKEN 2 0 2 2 +SPOKE 15 0 15 15 +SPOILS 1 0 1 1 +SPLIT 2 0 2 2 +SPITEFUL 1 0 1 1 +SPIT 1 0 1 1 +SPIRITUAL 1 0 1 1 +SPIRAL 1 0 1 1 +SPINSTER 1 0 1 1 +SPIDER 1 0 1 1 +SPHERE 1 0 1 1 +SPELL 1 0 1 1 +SPEEDILY 1 0 1 1 +SPEED 1 0 1 1 +SPECULATED 1 0 1 1 +SPECTATORS 1 0 1 1 +SPECTACLE 1 0 1 1 +SPECIES 1 0 1 1 +SPECIALTY 1 0 1 1 +SPECIAL 3 0 3 3 +SPEAKS 1 0 1 1 +SPEAKING 7 0 7 7 +SPEAKER 1 0 1 1 +SPEAK 15 0 15 15 +SPAWN 1 0 1 1 +SPARROWS 1 0 1 1 +SPARK 1 0 1 1 +SPARING 1 0 1 1 +SPARE 1 0 1 1 +SPANKER 1 0 1 1 +SPANISH 1 0 1 1 +SPADES 1 0 1 1 +SOWING 1 0 1 1 +SOUP 1 0 1 1 +SOUNDS 2 0 2 2 +SOUNDED 3 0 3 3 +SOUND 12 0 12 12 +SOULS 2 0 2 2 +SORTS 4 0 4 4 +SORRY 3 0 3 3 +SORROWING 1 0 1 1 +SORROW 1 0 1 1 +SORELY 1 0 1 1 +SORE 1 0 1 1 +SORCERER 1 0 1 1 +SOOTH 1 0 1 1 +SOMEWHAT 5 0 5 5 +SOMETIMES 14 0 14 14 +SOMETHING'S 1 0 1 1 +SOMEHOW 3 0 3 3 +SOMEBODY 3 0 3 3 +SOLVE 1 0 1 1 +SOLUTION 4 0 4 4 +SOLUBLE 2 0 2 2 +SOLOMON 1 0 1 1 +SOLIDS 1 0 1 1 +SOLIDLY 1 0 1 1 +SOLID 1 0 1 1 +SOLICITUDE 1 0 1 1 +SOLEMNLY 1 0 1 1 +SOLEMNITY 1 0 1 1 +SOLEMN 1 0 1 1 +SOLDIERS 3 0 3 3 +SOLDIER 1 0 1 1 +SOLD 4 0 4 4 +SOLACE 1 0 1 1 +SOJOURN 2 0 2 2 +SOIL 2 0 2 2 +SOFTLY 2 0 2 2 +SODA 1 0 1 1 +SOCIETY 1 0 1 1 +SOCIETIES 1 0 1 1 +SOCIAL 12 0 12 12 +SOBERLY 1 0 1 1 +SOBER 4 0 4 4 +SOARING 1 0 1 1 +SOAK 1 0 1 1 +SNOOZING 1 0 1 1 +SNEEZE 2 0 2 2 +SNEERED 1 0 1 1 +SNEAKY 1 0 1 1 +SNATCHER 2 0 2 2 +SNATCH 1 0 1 1 +SNAKE 1 0 1 1 +SMUGGLERS 7 0 7 7 +SMUGGLED 1 0 1 1 +SMOULDERING 1 0 1 1 +SMOTE 2 0 2 2 +SMOKING 3 0 3 3 +SMOKERS 3 0 3 3 +SMOKED 2 0 2 2 +SMITH 1 0 1 1 +SMILING 2 0 2 2 +SMILED 1 0 1 1 +SMELT 1 0 1 1 +SMART 1 0 1 1 +SMALLEST 1 0 1 1 +SMALLER 1 0 1 1 +SLUMBER 2 0 2 2 +SLOWLY 6 0 6 6 +SLOW 3 0 3 3 +SLIPPING 1 0 1 1 +SLIPPER 1 0 1 1 +SLIP 3 0 3 3 +SLING 1 0 1 1 +SLIGHT 1 0 1 1 +SLICES 2 0 2 2 +SLEPT 3 0 3 3 +SLENDER 2 0 2 2 +SLEEVES 1 0 1 1 +SLEEPS 2 0 2 2 +SLEEPING 6 0 6 6 +SLEEPER 1 0 1 1 +SLEEP 15 0 15 15 +SLEDGE 1 0 1 1 +SLAYING 1 0 1 1 +SLAY 1 0 1 1 +SLAVES 2 0 2 2 +SLAVERY 1 0 1 1 +SLAVE 3 0 3 3 +SLAPPED 1 0 1 1 +SLAMMED 1 0 1 1 +SLAIN 2 0 2 2 +SKYLIGHT 2 0 2 2 +SKY 3 0 3 3 +SKULLS 1 0 1 1 +SKULL 1 0 1 1 +SKIRTS 1 0 1 1 +SKIRMISH 1 0 1 1 +SKIN 5 0 5 5 +SKIMMING 1 0 1 1 +SKILLED 1 0 1 1 +SKILFULLY 1 0 1 1 +SKIES 1 0 1 1 +SKETCH 1 0 1 1 +SIZE 5 0 5 5 +SIXTY 7 0 7 7 +SIXTH 5 0 5 5 +SIXTEEN 2 0 2 2 +SITUATION 1 0 1 1 +SITTING 3 0 3 3 +SITTETH 1 0 1 1 +SISTERS 4 0 4 4 +SISTERLY 1 0 1 1 +SISTER 8 0 8 8 +SINNED 1 0 1 1 +SINKS 1 0 1 1 +SINGULAR 2 0 2 2 +SINGLE 8 0 8 8 +SINGING 2 0 2 2 +SINGER 1 0 1 1 +SINGED 1 0 1 1 +SING 4 0 4 4 +SINCERITY 1 0 1 1 +SINCERE 1 0 1 1 +SINCE 17 0 17 17 +SIN 2 0 2 2 +SIMPLY 3 0 3 3 +SIMPLE 4 0 4 4 +SIMON 1 0 1 1 +SIMILAR 2 0 2 2 +SILVERWARE 1 0 1 1 +SILL 1 0 1 1 +SILK 1 0 1 1 +SILENTLY 2 0 2 2 +SILENT 9 0 9 9 +SILENCED 1 0 1 1 +SILAS 1 0 1 1 +SIGNS 2 0 2 2 +SIGNIFIES 1 0 1 1 +SIGNIFIED 1 0 1 1 +SIGNIFICANT 2 0 2 2 +SIGNIFICANCE 2 0 2 2 +SIGNATURE 1 0 1 1 +SIGNALS 2 0 2 2 +SIGNAL 7 0 7 7 +SIGHED 1 0 1 1 +SIGH 5 0 5 5 +SIFTED 1 0 1 1 +SIDEWAYS 1 0 1 1 +SIDEWALK 1 0 1 1 +SIDES 4 0 4 4 +SICK 2 0 2 2 +SHUTTING 1 0 1 1 +SHUTTERS 2 0 2 2 +SHUTTER 1 0 1 1 +SHUFFLE 1 0 1 1 +SHUDDER 1 0 1 1 +SHRUNK 1 0 1 1 +SHROUDED 1 0 1 1 +SHRINKING 1 0 1 1 +SHRILL 1 0 1 1 +SHRIEKING 1 0 1 1 +SHRIEKED 1 0 1 1 +SHOWS 2 0 2 2 +SHOWED 9 0 9 9 +SHOUTS 2 0 2 2 +SHOUTING 4 0 4 4 +SHOUTED 4 0 4 4 +SHOULDN'T 1 0 1 1 +SHORTLY 5 0 5 5 +SHORTER 1 0 1 1 +SHORT 8 0 8 8 +SHOPS 1 0 1 1 +SHOPPY 1 0 1 1 +SHOPPING 1 0 1 1 +SHOPKEEPERS 1 0 1 1 +SHOP 6 0 6 6 +SHOOTER 1 0 1 1 +SHOOT 6 0 6 6 +SHOOK 5 0 5 5 +SHONE 2 0 2 2 +SHOES 5 0 5 5 +SHIRTS 1 0 1 1 +SHIRKING 1 0 1 1 +SHIMMERING 1 0 1 1 +SHIFTY 1 0 1 1 +SHIFTED 2 0 2 2 +SHERRY 3 0 3 3 +SHERBURN'S 1 0 1 1 +SHERBURN 1 0 1 1 +SHELLS 4 0 4 4 +SHELF 1 0 1 1 +SHEILA 1 0 1 1 +SHEET 2 0 2 2 +SHEATH 1 0 1 1 +SHE'S 5 0 5 5 +SHAWL 1 0 1 1 +SHARPNESS 1 0 1 1 +SHARPLY 4 0 4 4 +SHARPENED 1 0 1 1 +SHARP 5 0 5 5 +SHARE 2 0 2 2 +SHAPES 1 0 1 1 +SHAPED 1 0 1 1 +SHAPE 3 0 3 3 +SHAME 2 0 2 2 +SHAM 1 0 1 1 +SHALT 7 0 7 7 +SHAKING 1 0 1 1 +SHAHRAZAD 3 0 3 3 +SHAFTS 1 0 1 1 +SHADOWS 1 0 1 1 +SHADOW 5 0 5 5 +SEX 1 0 1 1 +SEVERSON 1 0 1 1 +SEVERELY 1 0 1 1 +SEVENTY 7 0 7 7 +SEVENTEEN 4 0 4 4 +SETTLED 4 0 4 4 +SETTLE 2 0 2 2 +SERVING 1 0 1 1 +SERVICES 1 0 1 1 +SERVICE 15 0 15 15 +SERVE 7 0 7 7 +SERVANTS 4 0 4 4 +SERVANT 4 0 4 4 +SERPENTS 2 0 2 2 +SERPENT 1 0 1 1 +SERIOUSLY 3 0 3 3 +SERIOUS 5 0 5 5 +SERENITY 1 0 1 1 +SEPULTURE 1 0 1 1 +SEPULCHRE 1 0 1 1 +SEPARATION 3 0 3 3 +SEPARATING 1 0 1 1 +SEPARATED 3 0 3 3 +SEPARATE 2 0 2 2 +SENTINELS 2 0 2 2 +SENTIMENTAL 1 0 1 1 +SENTIMENT 1 0 1 1 +SENTENCE 2 0 2 2 +SENSITIVE 2 0 2 2 +SENSIBLY 1 0 1 1 +SENSES 2 0 2 2 +SENSELESS 2 0 2 2 +SENSE 9 0 9 9 +SENSATION 1 0 1 1 +SENATOR 1 0 1 1 +SELL 4 0 4 4 +SEIZED 3 0 3 3 +SEES 1 0 1 1 +SEEMLY 1 0 1 1 +SEEKING 1 0 1 1 +SECURITY 7 0 7 7 +SECURE 5 0 5 5 +SECRETS 3 0 3 3 +SECRETLY 1 0 1 1 +SECRETARY 2 0 2 2 +SECRET 3 0 3 3 +SECONDS 1 0 1 1 +SECOND 15 0 15 15 +SEASONS 1 0 1 1 +SEASONED 1 0 1 1 +SEARCHINGLY 1 0 1 1 +SEARCHING 1 0 1 1 +SEARCHES 1 0 1 1 +SEARCH 6 0 6 6 +SEALED 2 0 2 2 +SCUTTLING 1 0 1 1 +SCUM 1 0 1 1 +SCULPTURE 1 0 1 1 +SCULPTOR 3 0 3 3 +SCRUPULOUSLY 1 0 1 1 +SCREW 1 0 1 1 +SCREEN 1 0 1 1 +SCREAM 1 0 1 1 +SCRATCHING 1 0 1 1 +SCRATCH 1 0 1 1 +SCRAPING 1 0 1 1 +SCRAPE 1 0 1 1 +SCOUNDREL 2 0 2 2 +SCOTCH 2 0 2 2 +SCISSORS 5 0 5 5 +SCIENTIFICALLY 1 0 1 1 +SCIENTIFIC 1 0 1 1 +SCHOOLMATE 1 0 1 1 +SCHOOLMASTER 5 0 5 5 +SCHEME 1 0 1 1 +SCENES 2 0 2 2 +SCENE 6 0 6 6 +SCATTER 1 0 1 1 +SCARRED 1 0 1 1 +SCARLET 1 0 1 1 +SCARED 1 0 1 1 +SCARCELY 4 0 4 4 +SCARCE 1 0 1 1 +SCANNING 1 0 1 1 +SCALES 1 0 1 1 +SAXON 2 0 2 2 +SAWYER 3 0 3 3 +SAVAGES 1 0 1 1 +SAVAGERY 1 0 1 1 +SAUCER 1 0 1 1 +SATURATED 1 0 1 1 +SATISFY 3 0 3 3 +SATISFIED 3 0 3 3 +SATISFACTORY 2 0 2 2 +SATISFACTORILY 1 0 1 1 +SATISFACTION 6 0 6 6 +SATIATED 1 0 1 1 +SATANICAL 1 0 1 1 +SATAN 1 0 1 1 +SANCTUARY 1 0 1 1 +SAMUEL 1 0 1 1 +SAMARIA 1 0 1 1 +SALUTED 2 0 2 2 +SALTS 1 0 1 1 +SALT 2 0 2 2 +SALOON 1 0 1 1 +SAITH 1 0 1 1 +SAINTS 3 0 3 3 +SAILING 2 0 2 2 +SAFETY 2 0 2 2 +SAFELY 2 0 2 2 +SAFE 8 0 8 8 +SADLY 4 0 4 4 +SACRIFICES 3 0 3 3 +SACRIFICE 5 0 5 5 +SACRED 1 0 1 1 +SACRAMENT 1 0 1 1 +SACK 1 0 1 1 +RUSTLING 2 0 2 2 +RUSTLE 1 0 1 1 +RUSSIAN 3 0 3 3 +RUNNING 3 0 3 3 +RUMBLING 1 0 1 1 +RULES 2 0 2 2 +RUINS 1 0 1 1 +RUINING 1 0 1 1 +RUINED 1 0 1 1 +RUFFIAN 1 0 1 1 +ROYAL 7 0 7 7 +ROW 2 0 2 2 +ROVER 1 0 1 1 +ROUSED 3 0 3 3 +ROUSE 1 0 1 1 +ROT 1 0 1 1 +ROSY 2 0 2 2 +ROSEMARY 2 0 2 2 +ROSA 1 0 1 1 +ROPE 3 0 3 3 +ROOMS 2 0 2 2 +ROOFS 1 0 1 1 +ROLLED 1 0 1 1 +ROCKS 1 0 1 1 +ROCKET 1 0 1 1 +ROBERT 1 0 1 1 +ROBED 1 0 1 1 +ROBBING 1 0 1 1 +ROBBERY 5 0 5 5 +ROBBERS 3 0 3 3 +ROBBERIES 2 0 2 2 +ROBBED 2 0 2 2 +ROASTING 1 0 1 1 +ROASTED 1 0 1 1 +ROAST 1 0 1 1 +ROARED 1 0 1 1 +ROADSIDE 1 0 1 1 +RIVERS 1 0 1 1 +RIVER 11 0 11 11 +RIVALRY 1 0 1 1 +RIVAL 2 0 2 2 +RISK 3 0 3 3 +RISING 5 0 5 5 +RISEN 1 0 1 1 +RISE 3 0 3 3 +RIP 2 0 2 2 +RIGOR 1 0 1 1 +RIGHTEOUSNESS 1 0 1 1 +RIGHTEOUS 1 0 1 1 +RIDICULOUS 1 0 1 1 +RIDDEN 1 0 1 1 +RICHER 1 0 1 1 +RICH 7 0 7 7 +RICE 1 0 1 1 +RHEUMATISM 1 0 1 1 +REWARDS 1 0 1 1 +REWARD 4 0 4 4 +REVOLUTIONARIES 1 0 1 1 +REVIVE 1 0 1 1 +REVIEW 1 0 1 1 +REVERSES 1 0 1 1 +REVENGES 1 0 1 1 +REVENGE 1 0 1 1 +REVELLED 1 0 1 1 +REVEL 1 0 1 1 +REVEALED 1 0 1 1 +RETREAT 1 0 1 1 +RETARDED 1 0 1 1 +RETAINED 2 0 2 2 +RESULT 2 0 2 2 +RESTS 1 0 1 1 +RESTRAIN 2 0 2 2 +RESTORED 2 0 2 2 +RESTAURANTS 1 0 1 1 +RESTAURANT 3 0 3 3 +RESPONSIBILITY 2 0 2 2 +RESPONDED 2 0 2 2 +RESPECTS 1 0 1 1 +RESPECTIVE 2 0 2 2 +RESPECTING 1 0 1 1 +RESPECTFULLY 3 0 3 3 +RESPECTFUL 1 0 1 1 +RESPECTED 1 0 1 1 +RESPECTABLE 3 0 3 3 +RESPECT 4 0 4 4 +RESORTS 1 0 1 1 +RESORTED 1 0 1 1 +RESORT 1 0 1 1 +RESOLVING 1 0 1 1 +RESOLVE 1 0 1 1 +RESOLUTIONS 1 0 1 1 +RESOLUTION 2 0 2 2 +RESISTANCE 3 0 3 3 +RESIST 3 0 3 3 +RESIGNED 1 0 1 1 +RESIDUE 3 0 3 3 +RESIDENCE 2 0 2 2 +RESIDE 1 0 1 1 +RESERVOIR 1 0 1 1 +RESERVE 1 0 1 1 +RESEMBLING 1 0 1 1 +RESEMBLES 1 0 1 1 +RESEMBLE 1 0 1 1 +RESEARCHES 1 0 1 1 +REQUIRING 1 0 1 1 +REQUIRES 1 0 1 1 +REQUIRED 3 0 3 3 +REQUESTED 2 0 2 2 +REPUTATIONS 1 0 1 1 +REPROACH 2 0 2 2 +REPRESENTED 3 0 3 3 +REPORTED 1 0 1 1 +REPORT 2 0 2 2 +REPEATING 1 0 1 1 +REPEAT 1 0 1 1 +REPAST 1 0 1 1 +REPARATION 1 0 1 1 +REPAIRED 2 0 2 2 +REPAIR 1 0 1 1 +RENOUNCE 3 0 3 3 +RENEWED 2 0 2 2 +RENDERS 1 0 1 1 +RENDERED 1 0 1 1 +RENDER 1 0 1 1 +REMOVED 3 0 3 3 +REMOVE 3 0 3 3 +REMOVAL 1 0 1 1 +REMOTE 1 0 1 1 +REMORSEFUL 1 0 1 1 +REMONSTRANCE 1 0 1 1 +REMNANTS 1 0 1 1 +REMNANT 1 0 1 1 +REMINISCENCES 1 0 1 1 +REMEMBERING 2 0 2 2 +REMEMBERED 4 0 4 4 +REMEDY 4 0 4 4 +REMARKS 1 0 1 1 +REMARKABLY 1 0 1 1 +REMARKABLE 2 0 2 2 +RELYING 1 0 1 1 +RELUCTANTLY 2 0 2 2 +RELUCTANCE 1 0 1 1 +RELINQUISH 1 0 1 1 +RELIGIONS 1 0 1 1 +RELIGION 11 0 11 11 +RELIEVED 1 0 1 1 +RELIEF 6 0 6 6 +RELIED 1 0 1 1 +RELIC 1 0 1 1 +RELEVANT 1 0 1 1 +RELEASE 2 0 2 2 +RELAXING 1 0 1 1 +RELATIONS 2 0 2 2 +REJOINED 1 0 1 1 +REJOINDER 1 0 1 1 +REJOICING 1 0 1 1 +REJOICED 3 0 3 3 +REGRETTING 1 0 1 1 +REGISTER 1 0 1 1 +REGION 1 0 1 1 +REGIMENTS 2 0 2 2 +REGARDED 2 0 2 2 +REGARD 2 0 2 2 +REGAINED 1 0 1 1 +REGAIN 2 0 2 2 +REFUTATION 1 0 1 1 +REFUSING 2 0 2 2 +REFUSES 2 0 2 2 +REFUSED 1 0 1 1 +REFRESHMENT 1 0 1 1 +REFRAIN 2 0 2 2 +REFORMS 1 0 1 1 +REFORM 6 0 6 6 +REFLECTIVE 1 0 1 1 +REFLECTIONS 1 0 1 1 +REFLECTION 3 0 3 3 +REFINED 2 0 2 2 +REFERRED 2 0 2 2 +REFER 1 0 1 1 +REEF 1 0 1 1 +REDOUBLING 1 0 1 1 +REDEMPTION 1 0 1 1 +REDEEMING 1 0 1 1 +RECTOR 1 0 1 1 +RECRUITS 1 0 1 1 +RECOVERY 1 0 1 1 +RECOVERED 1 0 1 1 +RECOVER 3 0 3 3 +RECOURSE 1 0 1 1 +RECOUNTED 1 0 1 1 +RECORD 2 0 2 2 +RECOMPENSE 2 0 2 2 +RECOMMEND 2 0 2 2 +RECOLLECTING 1 0 1 1 +RECOLLECTED 1 0 1 1 +RECOLLECT 1 0 1 1 +RECOILED 1 0 1 1 +RECOGNIZED 5 0 5 5 +RECOGNITION 2 0 2 2 +RECKON 4 0 4 4 +RECITING 1 0 1 1 +RECITER 2 0 2 2 +RECITE 2 0 2 2 +RECIPE 2 0 2 2 +RECEPTION 1 0 1 1 +RECENTLY 1 0 1 1 +RECEIVED 9 0 9 9 +RECEIVE 4 0 4 4 +RECEIPT 1 0 1 1 +RECEDING 1 0 1 1 +RECALLING 1 0 1 1 +RECALLED 1 0 1 1 +RECALL 1 0 1 1 +REBECCA 1 0 1 1 +REASONABLE 2 0 2 2 +REASON 11 0 11 11 +REAR 1 0 1 1 +REAPING 1 0 1 1 +REALM 1 0 1 1 +REALLY 18 0 18 18 +REALIZE 1 0 1 1 +REALITY 3 0 3 3 +REAL 3 0 3 3 +READERS 1 0 1 1 +READER 1 0 1 1 +REACHING 2 0 2 2 +REACH 4 0 4 4 +RAWNESS 1 0 1 1 +RAVING 1 0 1 1 +RAVENING 1 0 1 1 +RAVAGED 1 0 1 1 +RATTLED 1 0 1 1 +RATTLE 1 0 1 1 +RATCHFORD 1 0 1 1 +RASHID 1 0 1 1 +RASCAL 3 0 3 3 +RARE 1 0 1 1 +RAPIDLY 4 0 4 4 +RAP 1 0 1 1 +RANKS 1 0 1 1 +RAMBLER 1 0 1 1 +RAMBLE 1 0 1 1 +RAM'S 1 0 1 1 +RAISING 2 0 2 2 +RAINS 1 0 1 1 +RAINBOWS 1 0 1 1 +RAILROAD 1 0 1 1 +RAIDERS 1 0 1 1 +RAFTER 1 0 1 1 +RAFT 6 0 6 6 +RADICALS 1 0 1 1 +RADIANT 1 0 1 1 +RACKETS 1 0 1 1 +RACK 1 0 1 1 +RACE 2 0 2 2 +RABBIT 5 0 5 5 +R 1 0 1 1 +QUOTH 5 0 5 5 +QUOTED 1 0 1 1 +QUIXOTE 5 0 5 5 +QUIVERED 1 0 1 1 +QUIVER 1 0 1 1 +QUIT 1 0 1 1 +QUIETLY 4 0 4 4 +QUIET 1 0 1 1 +QUICKER 3 0 3 3 +QUICKENETH 1 0 1 1 +QUESTIONS 6 0 6 6 +QUESTIONED 1 0 1 1 +QUESTIONABLE 1 0 1 1 +QUESTION 15 0 15 15 +QUENCH 1 0 1 1 +QUEER 4 0 4 4 +QUEENS 1 0 1 1 +QUEEN'S 1 0 1 1 +QUARTERS 3 0 3 3 +QUARTER 7 0 7 7 +QUART 1 0 1 1 +QUARRELS 1 0 1 1 +QUANTITY 3 0 3 3 +QUALITY 1 0 1 1 +PUZZLED 2 0 2 2 +PUSHING 1 0 1 1 +PUSHED 1 0 1 1 +PURSUIT 1 0 1 1 +PURSUED 3 0 3 3 +PURSUANCE 1 0 1 1 +PURPOSES 1 0 1 1 +PURPOSE 5 0 5 5 +PURITAN 2 0 2 2 +PURIFY 1 0 1 1 +PURE 4 0 4 4 +PURCHASED 1 0 1 1 +PUNISHMENTS 1 0 1 1 +PUNISHMENT 1 0 1 1 +PUNISHES 1 0 1 1 +PUNISHED 1 0 1 1 +PUNISH 1 0 1 1 +PUNCTUALITY 1 0 1 1 +PUNCTILIOUS 1 0 1 1 +PULP 1 0 1 1 +PUFFING 1 0 1 1 +PUFFED 1 0 1 1 +PUDDINGS 1 0 1 1 +PUBLISHER 1 0 1 1 +PUBLIC 5 0 5 5 +PRYING 2 0 2 2 +PRUDENCE 4 0 4 4 +PROW 1 0 1 1 +PROVOKE 1 0 1 1 +PROVISION 1 0 1 1 +PROVINCIAL 1 0 1 1 +PROVINCE 4 0 4 4 +PROVIDENCES 1 0 1 1 +PROVIDENCE 1 0 1 1 +PROVIDED 2 0 2 2 +PROVIDE 1 0 1 1 +PROVERBIAL 1 0 1 1 +PROVEN 1 0 1 1 +PROUD 2 0 2 2 +PROTESTED 2 0 2 2 +PROTECTS 1 0 1 1 +PROTECTORS 1 0 1 1 +PROTECTION 2 0 2 2 +PROTECT 2 0 2 2 +PROSPEROUS 1 0 1 1 +PROPRIETORS 1 0 1 1 +PROPOSITION 1 0 1 1 +PROPOSES 1 0 1 1 +PROPOSED 3 0 3 3 +PROPOSALS 1 0 1 1 +PROPORTION 3 0 3 3 +PROPHET 1 0 1 1 +PROPERTY 2 0 2 2 +PROPERLY 2 0 2 2 +PROOF 5 0 5 5 +PRONOUNCED 1 0 1 1 +PROMPTLY 3 0 3 3 +PROMPT 1 0 1 1 +PROMISING 1 0 1 1 +PROMISED 7 0 7 7 +PROMISE 4 0 4 4 +PROLONGED 1 0 1 1 +PROJECT 1 0 1 1 +PROHIBITED 1 0 1 1 +PROHIBIT 1 0 1 1 +PROGRESS 1 0 1 1 +PROGRAMME 1 0 1 1 +PROFUSION 1 0 1 1 +PROFOUND 1 0 1 1 +PROFLIGATE 1 0 1 1 +PROFITABLY 1 0 1 1 +PROFITABLE 1 0 1 1 +PROFIT 2 0 2 2 +PROFESSIONAL 2 0 2 2 +PROFANITY 1 0 1 1 +PROFANE 1 0 1 1 +PRODUCTIONS 1 0 1 1 +PRODUCING 1 0 1 1 +PROCURE 2 0 2 2 +PROCOPIUS 1 0 1 1 +PROCESSIONS 1 0 1 1 +PROCESSION 1 0 1 1 +PROCESS 6 0 6 6 +PROCEEDINGS 2 0 2 2 +PROCEEDED 1 0 1 1 +PROCEDURE 1 0 1 1 +PROBLEMS 1 0 1 1 +PROBLEM 1 0 1 1 +PROBABLY 7 0 7 7 +PROBABLE 1 0 1 1 +PROBABILITY 1 0 1 1 +PRIVILEGE 1 0 1 1 +PRIVATE 6 0 6 6 +PRIVACY 1 0 1 1 +PRISONERS 3 0 3 3 +PRISONER 13 0 13 13 +PRISON 5 0 5 5 +PRINT 1 0 1 1 +PRINCIPLE 3 0 3 3 +PRINCIPALLY 1 0 1 1 +PRINCIPAL 1 0 1 1 +PRINCESS 11 0 11 11 +PRINCES 1 0 1 1 +PRINCE'S 2 0 2 2 +PRINCE 7 0 7 7 +PRIDE 2 0 2 2 +PREVIOUSLY 1 0 1 1 +PREVENTED 1 0 1 1 +PREVENT 1 0 1 1 +PREVAILING 1 0 1 1 +PREVAILED 1 0 1 1 +PRETTILY 1 0 1 1 +PRETTIEST 1 0 1 1 +PRETEXT 1 0 1 1 +PRETENDED 1 0 1 1 +PRETEND 3 0 3 3 +PRESUMPTUOUS 1 0 1 1 +PRESSURE 4 0 4 4 +PRESSING 3 0 3 3 +PRESSED 2 0 2 2 +PRESS 2 0 2 2 +PRESERVING 5 0 5 5 +PRESERVES 3 0 3 3 +PRESERVED 2 0 2 2 +PRESENTS 4 0 4 4 +PRESENTLY 12 0 12 12 +PRESENTING 1 0 1 1 +PRESENTED 3 0 3 3 +PRESENCE 9 0 9 9 +PREPARING 3 0 3 3 +PREPARED 7 0 7 7 +PREPARE 1 0 1 1 +PREPARATIONS 5 0 5 5 +PREOCCUPIED 1 0 1 1 +PREMISES 1 0 1 1 +PRELIMINARIES 1 0 1 1 +PREFERRED 1 0 1 1 +PREFER 2 0 2 2 +PREDICTIONS 1 0 1 1 +PRECIPITANCY 1 0 1 1 +PRECIOUS 2 0 2 2 +PRECINCT 1 0 1 1 +PRECEPTORY 2 0 2 2 +PRECAUTION 1 0 1 1 +PREACHING 2 0 2 2 +PREACHER 1 0 1 1 +PRAYER 6 0 6 6 +PRAYED 3 0 3 3 +PRAISES 1 0 1 1 +PRAISE 3 0 3 3 +PRACTISE 1 0 1 1 +PRACTICED 1 0 1 1 +PRACTICE 1 0 1 1 +PRACTICALLY 2 0 2 2 +POWERS 3 0 3 3 +POWERLESS 1 0 1 1 +POWERFUL 3 0 3 3 +POWER 27 0 27 27 +POWDERED 1 0 1 1 +POURS 1 0 1 1 +POURING 1 0 1 1 +POUR 1 0 1 1 +POUND 3 0 3 3 +POUNCE 1 0 1 1 +POTS 1 0 1 1 +POTASSIC 1 0 1 1 +POTASH 1 0 1 1 +POSTERS 1 0 1 1 +POSSIBLY 3 0 3 3 +POSSIBLE 12 0 12 12 +POSSIBILITY 2 0 2 2 +POSSESSION 2 0 2 2 +POSSESSES 1 0 1 1 +POSSESSED 5 0 5 5 +POSSESS 1 0 1 1 +POSSE 1 0 1 1 +POSITIVELY 3 0 3 3 +POSITIVE 1 0 1 1 +PORTIONS 2 0 2 2 +PORTION 2 0 2 2 +PORTER 2 0 2 2 +POPULATION 1 0 1 1 +POPULARITY 1 0 1 1 +POPULAR 1 0 1 1 +POPPED 1 0 1 1 +POPES 2 0 2 2 +POPE'S 1 0 1 1 +POPE 1 0 1 1 +POP 1 0 1 1 +PONY 1 0 1 1 +POLLY'S 3 0 3 3 +POLITICIANS 1 0 1 1 +POLITICAL 3 0 3 3 +POLICE 5 0 5 5 +POKING 1 0 1 1 +POKED 1 0 1 1 +POISONS 1 0 1 1 +POISONING 3 0 3 3 +POINTING 2 0 2 2 +POINTED 3 0 3 3 +POETRY 2 0 2 2 +POCKETS 1 0 1 1 +POCKETED 1 0 1 1 +PO 1 0 1 1 +PLUNGED 3 0 3 3 +PLUNDERED 1 0 1 1 +PLUG 1 0 1 1 +PLUCKING 2 0 2 2 +PLUCK 1 0 1 1 +PLEDGED 1 0 1 1 +PLEDGE 1 0 1 1 +PLEASURES 2 0 2 2 +PLEASING 2 0 2 2 +PLEASED 4 0 4 4 +PLEASANTER 1 0 1 1 +PLEADINGS 1 0 1 1 +PLEAD 1 0 1 1 +PLAYERS 1 0 1 1 +PLATTERS 1 0 1 1 +PLATFORM 2 0 2 2 +PLASTER 1 0 1 1 +PLANTS 1 0 1 1 +PLANTATIONS 2 0 2 2 +PLANS 5 0 5 5 +PLANNED 1 0 1 1 +PLANKS 2 0 2 2 +PLANK 3 0 3 3 +PLANETS 1 0 1 1 +PLANET 2 0 2 2 +PLAN 1 0 1 1 +PLAINLY 3 0 3 3 +PLAGUE 2 0 2 2 +PLACING 2 0 2 2 +PITY 4 0 4 4 +PITIFULNESS 1 0 1 1 +PIT 1 0 1 1 +PISTOLS 1 0 1 1 +PIPING 1 0 1 1 +PIOUS 1 0 1 1 +PINT 1 0 1 1 +PINK 1 0 1 1 +PINED 1 0 1 1 +PINCH 1 0 1 1 +PIN 1 0 1 1 +PILLOW 1 0 1 1 +PILED 1 0 1 1 +PILE 1 0 1 1 +PIG 1 0 1 1 +PIERCED 1 0 1 1 +PIECES 9 0 9 9 +PICTURES 3 0 3 3 +PICKET 1 0 1 1 +PICK 5 0 5 5 +PIAZZA 1 0 1 1 +PHYSIOLOGICAL 1 0 1 1 +PHYSICIAN 3 0 3 3 +PHYSICAL 2 0 2 2 +PHRASE 1 0 1 1 +PHONE 1 0 1 1 +PHLEGMATIC 1 0 1 1 +PHILOSOPHERS 1 0 1 1 +PHELPS 2 0 2 2 +PHARMACY 1 0 1 1 +PEYTON 1 0 1 1 +PETITIONS 1 0 1 1 +PETERS 1 0 1 1 +PET 2 0 2 2 +PERVADED 1 0 1 1 +PERUSING 1 0 1 1 +PERUSAL 1 0 1 1 +PERSUADED 1 0 1 1 +PERSUADE 1 0 1 1 +PERSPECTIVE 1 0 1 1 +PERSONS 8 0 8 8 +PERSONALLY 5 0 5 5 +PERSONAL 2 0 2 2 +PERSONAGE 1 0 1 1 +PERSON'S 1 0 1 1 +PERSON 16 0 16 16 +PERSISTED 3 0 3 3 +PERSIST 1 0 1 1 +PERSECUTORS 1 0 1 1 +PERSECUTION 1 0 1 1 +PERSECUTED 1 0 1 1 +PERSECUTE 2 0 2 2 +PERPLEXITY 1 0 1 1 +PERPETUALLY 1 0 1 1 +PERMITTED 5 0 5 5 +PERMIT 4 0 4 4 +PERMISSION 2 0 2 2 +PERMANENT 1 0 1 1 +PERISHED 3 0 3 3 +PERISH 4 0 4 4 +PERIODS 1 0 1 1 +PERIOD 2 0 2 2 +PERILS 1 0 1 1 +PERFORMANCES 1 0 1 1 +PERFORM 2 0 2 2 +PERFECTION 2 0 2 2 +PERFECT 5 0 5 5 +PERCHED 1 0 1 1 +PERCH 1 0 1 1 +PERCEPTIBLE 1 0 1 1 +PERCEIVED 7 0 7 7 +PERCEIVE 1 0 1 1 +PER 1 0 1 1 +PEPPINO 1 0 1 1 +PEPPER 2 0 2 2 +PEOPLE'S 3 0 3 3 +PEOPLE 44 0 44 44 +PENNY 1 0 1 1 +PENETRATING 1 0 1 1 +PENETRATE 1 0 1 1 +PENALTY 1 0 1 1 +PELT 1 0 1 1 +PEERS 1 0 1 1 +PEERED 1 0 1 1 +PEDESTAL 1 0 1 1 +PECULIAR 2 0 2 2 +PEBBLES 3 0 3 3 +PEASANT 3 0 3 3 +PEARLS 1 0 1 1 +PEALS 1 0 1 1 +PEAL 1 0 1 1 +PEACEFUL 2 0 2 2 +PEABODY 1 0 1 1 +PAYS 1 0 1 1 +PAYING 1 0 1 1 +PAVILION 1 0 1 1 +PAVEMENT 1 0 1 1 +PAUSE 4 0 4 4 +PATRIOTS 1 0 1 1 +PATRIOTISM 1 0 1 1 +PATRIOT 1 0 1 1 +PATRIMONY 1 0 1 1 +PATRIARCHS 1 0 1 1 +PATIENTLY 1 0 1 1 +PATIENT'S 1 0 1 1 +PATIENT 2 0 2 2 +PATHS 1 0 1 1 +PASTE 1 0 1 1 +PASSION 5 0 5 5 +PASSERS 2 0 2 2 +PASSENGERS 4 0 4 4 +PASSAGE 2 0 2 2 +PASSABLE 1 0 1 1 +PARTY 13 0 13 13 +PARTNER 1 0 1 1 +PARTISANS 1 0 1 1 +PARTIES 1 0 1 1 +PARTICULARS 1 0 1 1 +PARTICULARLY 5 0 5 5 +PARTICULAR 4 0 4 4 +PARTICLE 2 0 2 2 +PARTICIPATION 1 0 1 1 +PARTICIPANTS 1 0 1 1 +PARTAKE 1 0 1 1 +PARSLEY 1 0 1 1 +PARLIAMENTARY 1 0 1 1 +PARK 1 0 1 1 +PARISIAN 1 0 1 1 +PARIS 5 0 5 5 +PARENTS 2 0 2 2 +PARCEL 2 0 2 2 +PARASOL 1 0 1 1 +PARALLEL 1 0 1 1 +PARADISE 1 0 1 1 +PAPERS 4 0 4 4 +PAPER 6 0 6 6 +PAPA 4 0 4 4 +PANZA 1 0 1 1 +PANTING 2 0 2 2 +PANS 1 0 1 1 +PALINGS 1 0 1 1 +PALESTINE 2 0 2 2 +PALER 1 0 1 1 +PALE 8 0 8 8 +PALACE 6 0 6 6 +PAINTING 1 0 1 1 +PAINTER 1 0 1 1 +PAINT 1 0 1 1 +PAINFULLY 1 0 1 1 +PAINFUL 5 0 5 5 +PAINED 1 0 1 1 +PAIN 3 0 3 3 +PAID 9 0 9 9 +PAGES 4 0 4 4 +PAGE 2 0 2 2 +PADDLE 2 0 2 2 +PACKED 1 0 1 1 +PACIFY 1 0 1 1 +PACIFIC 2 0 2 2 +OXEN 1 0 1 1 +OX 1 0 1 1 +OVERWHELMED 2 0 2 2 +OVERTURNING 1 0 1 1 +OVERTHREW 1 0 1 1 +OVERTAKEN 1 0 1 1 +OVERLY 1 0 1 1 +OVERHEAD 2 0 2 2 +OVERCOME 2 0 2 2 +OVERCOAT 1 0 1 1 +OVEN 2 0 2 2 +OVAL 1 0 1 1 +OUTWARD 1 0 1 1 +OUTSIDE 8 0 8 8 +OUTLAWS 1 0 1 1 +OUTFIT 1 0 1 1 +OUTDO 1 0 1 1 +OURSELVES 4 0 4 4 +OTTER 1 0 1 1 +OTHO 1 0 1 1 +OTHERWISE 1 0 1 1 +OSTRICH 1 0 1 1 +ORTHODOX 1 0 1 1 +ORPHAN 1 0 1 1 +ORNERY 1 0 1 1 +ORNERIEST 1 0 1 1 +ORNAMENTED 1 0 1 1 +ORNAMENTAL 1 0 1 1 +ORLEANS 1 0 1 1 +ORISON 1 0 1 1 +ORIGIN 1 0 1 1 +ORGANIZATION 3 0 3 3 +ORDINARY 1 0 1 1 +ORDERED 5 0 5 5 +ORDEAL 1 0 1 1 +ORDAINED 1 0 1 1 +ORCHARDS 1 0 1 1 +ORANGE 1 0 1 1 +ORACLE 2 0 2 2 +OPTIC 1 0 1 1 +OPPRESSOR 1 0 1 1 +OPPRESSION 3 0 3 3 +OPPRESSED 2 0 2 2 +OPPOSITE 3 0 3 3 +OPPOSE 1 0 1 1 +OPPORTUNITY 4 0 4 4 +OPINION 3 0 3 3 +OPERATIONS 1 0 1 1 +OPERATED 1 0 1 1 +OPERA 1 0 1 1 +OPENING 7 0 7 7 +OPAQUE 1 0 1 1 +ONWARDS 1 0 1 1 +ONWARD 1 0 1 1 +ONION 2 0 2 2 +ONESELF 1 0 1 1 +OMITTING 1 0 1 1 +OMAR 2 0 2 2 +OLDISH 1 0 1 1 +OLDER 1 0 1 1 +OGRE'S 1 0 1 1 +OGRE 3 0 3 3 +OGLING 1 0 1 1 +OFFICIAL 1 0 1 1 +OFFICE 4 0 4 4 +OFFERS 3 0 3 3 +OFFERINGS 1 0 1 1 +OFFERING 1 0 1 1 +OFFERED 3 0 3 3 +OFFER 2 0 2 2 +OFFENSIVE 1 0 1 1 +OFFEND 2 0 2 2 +OCEAN 1 0 1 1 +OCCURRED 5 0 5 5 +OCCUR 1 0 1 1 +OCCUPY 3 0 3 3 +OCCASIONS 2 0 2 2 +OCCASION 5 0 5 5 +OBVIOUSLY 1 0 1 1 +OBVIOUS 1 0 1 1 +OBTAINED 1 0 1 1 +OBTAIN 3 0 3 3 +OBSTRUCTION 1 0 1 1 +OBSTINATE 1 0 1 1 +OBSTINACY 1 0 1 1 +OBSERVER 1 0 1 1 +OBSERVE 1 0 1 1 +OBSERVATIONS 2 0 2 2 +OBSERVATION 1 0 1 1 +OBSERVANT 1 0 1 1 +OBLONG 1 0 1 1 +OBLIGED 3 0 3 3 +OBLIGE 1 0 1 1 +OBLIGATION 3 0 3 3 +OBJECTS 2 0 2 2 +OBJECTION 2 0 2 2 +OBEYING 1 0 1 1 +OBEYED 3 0 3 3 +OBEDIENTLY 1 0 1 1 +OBEDIENT 2 0 2 2 +OATH 4 0 4 4 +OAR 1 0 1 1 +O'CLOCK 9 0 9 9 +NUTRITION 2 0 2 2 +NUN 1 0 1 1 +NUMBERED 1 0 1 1 +NUMBER 5 0 5 5 +NUBIAN 1 0 1 1 +NOWHERE 2 0 2 2 +NOTWITHSTANDING 1 0 1 1 +NOTORIOUS 1 0 1 1 +NOTION 1 0 1 1 +NOTHING 28 0 28 28 +NOTES 1 0 1 1 +NOTED 2 0 2 2 +NOTABLES 1 0 1 1 +NOSE 3 0 3 3 +NORTHWARD 1 0 1 1 +NORTHERN 1 0 1 1 +NORMAN 2 0 2 2 +NOON 1 0 1 1 +NONSENSE 2 0 2 2 +NODDING 1 0 1 1 +NODDED 3 0 3 3 +NOD 2 0 2 2 +NOBODY 6 0 6 6 +NOBLEMEN 1 0 1 1 +NOBLEMAN 1 0 1 1 +NOBLE 6 0 6 6 +NOBILITY 1 0 1 1 +NIPPER 1 0 1 1 +NINTH 1 0 1 1 +NINEVEH 1 0 1 1 +NINETY 2 0 2 2 +NINETEENTH 1 0 1 1 +NINETEEN 1 0 1 1 +NIMBLENESS 1 0 1 1 +NIGHTS 2 0 2 2 +NICOTINE 1 0 1 1 +NICK 1 0 1 1 +NICHOLAS 1 0 1 1 +NICETIES 1 0 1 1 +NICE 3 0 3 3 +NEVERTHELESS 3 0 3 3 +NEVER 61 0 61 61 +NERVOUSNESS 2 0 2 2 +NERVOUSLY 1 0 1 1 +NERVOUS 2 0 2 2 +NEMESIS 3 0 3 3 +NEITHER 8 0 8 8 +NEIGHBOURS 1 0 1 1 +NEIGHBOURHOOD 1 0 1 1 +NEGRO 2 0 2 2 +NEGLECTING 1 0 1 1 +NEGLECTED 2 0 2 2 +NEGLECT 1 0 1 1 +NEGATIVE 1 0 1 1 +NEEDN'T 1 0 1 1 +NECK 1 0 1 1 +NECESSITY 7 0 7 7 +NECESSARY 10 0 10 10 +NECESSARILY 2 0 2 2 +NEATLY 2 0 2 2 +NEARLY 2 0 2 2 +NEARING 1 0 1 1 +NEARED 1 0 1 1 +NAY 2 0 2 2 +NAVEL 1 0 1 1 +NAUSEA 1 0 1 1 +NATURED 2 0 2 2 +NATURE 11 0 11 11 +NATTY 1 0 1 1 +NATIVE 6 0 6 6 +NATIONS 3 0 3 3 +NATION 2 0 2 2 +NARROWNESS 1 0 1 1 +NARROWER 1 0 1 1 +NARROW 5 0 5 5 +NARRATOR 1 0 1 1 +NARRATE 1 0 1 1 +NAPKINS 1 0 1 1 +NAPKIN 1 0 1 1 +NAMES 5 0 5 5 +NAMED 5 0 5 5 +NAME'S 1 0 1 1 +NAME 21 0 21 21 +NAILS 2 0 2 2 +NAILED 2 0 2 2 +NAIL 5 0 5 5 +MYSELF 27 0 27 27 +MUTTERED 5 0 5 5 +MUSICAL 1 0 1 1 +MUSIC 2 0 2 2 +MURMURED 1 0 1 1 +MURMUR 1 0 1 1 +MURDERED 1 0 1 1 +MURDER 5 0 5 5 +MULTITUDE 1 0 1 1 +MULE 1 0 1 1 +MUFFLED 1 0 1 1 +MUDDY 1 0 1 1 +MUCOUS 1 0 1 1 +MOVING 6 0 6 6 +MOVEMENTS 2 0 2 2 +MOVED 7 0 7 7 +MOVE 1 0 1 1 +MOURNFULLY 1 0 1 1 +MOUNTED 2 0 2 2 +MOUNTAINS 1 0 1 1 +MOUNTAIN 1 0 1 1 +MOTOR 1 0 1 1 +MOTLEY 1 0 1 1 +MOTIVES 1 0 1 1 +MOTIVE 1 0 1 1 +MOTHERS 5 0 5 5 +MOTHER'S 3 0 3 3 +MOSTLY 2 0 2 2 +MORTAR 1 0 1 1 +MORTAL 1 0 1 1 +MORPHINE 2 0 2 2 +MOREOVER 1 0 1 1 +MORCERF 3 0 3 3 +MORALS 1 0 1 1 +MORAL 8 0 8 8 +MOPPED 1 0 1 1 +MOORED 1 0 1 1 +MOONFLOWERS 1 0 1 1 +MOON 2 0 2 2 +MOOD 2 0 2 2 +MONTH 1 0 1 1 +MONTESQUIEU 1 0 1 1 +MONSTROUS 1 0 1 1 +MONSTERS 2 0 2 2 +MONOTONOUS 1 0 1 1 +MONKEY 3 0 3 3 +MONEY 16 0 16 16 +MONDAY 2 0 2 2 +MONASTERY 1 0 1 1 +MONARCH 2 0 2 2 +MOMENTS 6 0 6 6 +MOMENT'S 1 0 1 1 +MOLESTED 1 0 1 1 +MOHAMMED 1 0 1 1 +MODEST 1 0 1 1 +MODERN 2 0 2 2 +MODEL 2 0 2 2 +MOCKERY 1 0 1 1 +MOB 1 0 1 1 +MOANING 2 0 2 2 +MIXING 1 0 1 1 +MISTRUST 1 0 1 1 +MISTRESSES 1 0 1 1 +MISTRESS 6 0 6 6 +MISTAKE 2 0 2 2 +MISSOURI 1 0 1 1 +MISSISSIPPIAN 1 0 1 1 +MISSISSIPPI 1 0 1 1 +MISSING 3 0 3 3 +MISFORTUNE 1 0 1 1 +MISERY 3 0 3 3 +MISERABLE 2 0 2 2 +MISCONDUCT 1 0 1 1 +MISCONCEPTION 1 0 1 1 +MISCHIEF 1 0 1 1 +MISAPPREHENSION 1 0 1 1 +MISANTHROPY 1 0 1 1 +MIRTH 2 0 2 2 +MIRACULOUS 1 0 1 1 +MIRACLES 3 0 3 3 +MIRABELLE 2 0 2 2 +MINUTES 11 0 11 11 +MINUTE 6 0 6 6 +MINNESOTA 1 0 1 1 +MINISTERED 1 0 1 1 +MINIMS 1 0 1 1 +MINIATURE 1 0 1 1 +MINGLED 2 0 2 2 +MINDS 1 0 1 1 +MINDED 1 0 1 1 +MINCE 1 0 1 1 +MILTON 4 0 4 4 +MILLIONS 1 0 1 1 +MILLER'S 1 0 1 1 +MILLER 4 0 4 4 +MILL 1 0 1 1 +MILITARY 5 0 5 5 +MILES 6 0 6 6 +MILE 5 0 5 5 +MIKE'S 1 0 1 1 +MIGHTY 3 0 3 3 +MIGHTINESS 1 0 1 1 +MIGHT 43 0 43 43 +MIDST 3 0 3 3 +MIDSHIPMAN 1 0 1 1 +MIDNIGHT 3 0 3 3 +MIDDY'S 1 0 1 1 +MIDDLE 5 0 5 5 +MICROSCOPIC 1 0 1 1 +MICROBE 1 0 1 1 +METALLIC 1 0 1 1 +MESSAGE 1 0 1 1 +MESELF 1 0 1 1 +MERITS 1 0 1 1 +MERE 3 0 3 3 +MERCY 5 0 5 5 +MERCURY 2 0 2 2 +MERCIFUL 1 0 1 1 +MERCIES 1 0 1 1 +MERCHANTS 6 0 6 6 +MERCHANT 3 0 3 3 +MENTION 1 0 1 1 +MENTALLY 3 0 3 3 +MENACING 1 0 1 1 +MEMORY 4 0 4 4 +MEMORIAL 1 0 1 1 +MEMBRANE 1 0 1 1 +MEMBERS 7 0 7 7 +MEMBER 1 0 1 1 +MELANCHOLY 1 0 1 1 +MEETING 4 0 4 4 +MEDIUMS 1 0 1 1 +MEDITATION 1 0 1 1 +MEDITATED 1 0 1 1 +MEDICINE 1 0 1 1 +MEDALS 1 0 1 1 +MECHANICALLY 1 0 1 1 +MECHANICAL 1 0 1 1 +MEASURABLE 1 0 1 1 +MEANWHILE 4 0 4 4 +MEANTIME 2 0 2 2 +MEANT 10 0 10 10 +MEANS 23 0 23 23 +MEANING 2 0 2 2 +MAYOR 1 0 1 1 +MAYBE 4 0 4 4 +MATTOCK 1 0 1 1 +MATTERED 1 0 1 1 +MATERIALLY 1 0 1 1 +MATERIAL 1 0 1 1 +MATCH 1 0 1 1 +MASTERY 1 0 1 1 +MASTERS 1 0 1 1 +MASTERPIECE 1 0 1 1 +MASTERED 1 0 1 1 +MASTER'S 1 0 1 1 +MAST 1 0 1 1 +MASON'S 1 0 1 1 +MASON 1 0 1 1 +MARY'S 2 0 2 2 +MARY 2 0 2 2 +MARVELLOUS 4 0 4 4 +MARTIN 1 0 1 1 +MARSPORT 1 0 1 1 +MARSHAL'S 1 0 1 1 +MARSH 1 0 1 1 +MARS 3 0 3 3 +MARRY 3 0 3 3 +MARRIED 4 0 4 4 +MARLBOROUGH'S 1 0 1 1 +MARKS 1 0 1 1 +MARKING 1 0 1 1 +MARIUS 6 0 6 6 +MARIA 1 0 1 1 +MARGUERITE 11 0 11 11 +MARGINAL 1 0 1 1 +MARGARET'S 3 0 3 3 +MARGARET 14 0 14 14 +MARE 1 0 1 1 +MARCH 2 0 2 2 +MARBLE 2 0 2 2 +MAR 1 0 1 1 +MANTELPIECE 1 0 1 1 +MANNER 9 0 9 9 +MANIFESTATION 1 0 1 1 +MANCHESTER 1 0 1 1 +MANAGE 1 0 1 1 +MAMMOTH 1 0 1 1 +MALICE 1 0 1 1 +MALEVOLENT 1 0 1 1 +MALADY 1 0 1 1 +MAKER 1 0 1 1 +MAJOR 5 0 5 5 +MAJESTY 2 0 2 2 +MAINTAINED 1 0 1 1 +MAINTAIN 1 0 1 1 +MAINLY 1 0 1 1 +MAIDEN 3 0 3 3 +MAHOGANY 2 0 2 2 +MAGNIFYING 1 0 1 1 +MAGNIFIES 1 0 1 1 +MAGNIFICENT 2 0 2 2 +MAGNIFICENCE 1 0 1 1 +MAGNANIMITY 1 0 1 1 +MAGICIAN 2 0 2 2 +MAGICAL 1 0 1 1 +MAGIC 1 0 1 1 +MAGAZINE 1 0 1 1 +MADRID 1 0 1 1 +MADNESS 2 0 2 2 +MADELEINE 3 0 3 3 +MADAME 1 0 1 1 +MACHINES 1 0 1 1 +MACHINERY 1 0 1 1 +LYNCHES 1 0 1 1 +LUTHER 1 0 1 1 +LUSTILY 1 0 1 1 +LURKING 1 0 1 1 +LUMP 1 0 1 1 +LUCRATIVE 1 0 1 1 +LUCKY 2 0 2 2 +LUCKLESS 1 0 1 1 +LUCAS 1 0 1 1 +LOYALTY 2 0 2 2 +LOWERED 1 0 1 1 +LOVES 6 0 6 6 +LOVERS 2 0 2 2 +LOVELY 3 0 3 3 +LOUVRE 1 0 1 1 +LOUISIANA 1 0 1 1 +LOUDLY 1 0 1 1 +LOUDER 1 0 1 1 +LOSS 3 0 3 3 +LOSING 3 0 3 3 +LOSE 6 0 6 6 +LORN 1 0 1 1 +LORDSHIPS 1 0 1 1 +LORDS 2 0 2 2 +LOOSENED 1 0 1 1 +LOOSELY 1 0 1 1 +LOOSE 3 0 3 3 +LOOKS 5 0 5 5 +LOOKING 21 0 21 21 +LONGING 3 0 3 3 +LONGED 2 0 2 2 +LONELY 1 0 1 1 +LONDON 4 0 4 4 +LODGE 1 0 1 1 +LOCKS 1 0 1 1 +LOCKED 4 0 4 4 +LOCATE 1 0 1 1 +LOCAL 4 0 4 4 +LOBSTERS 2 0 2 2 +LOADING 1 0 1 1 +LIVING 5 0 5 5 +LIVID 1 0 1 1 +LIVERY 1 0 1 1 +LIVELY 2 0 2 2 +LIVELONG 1 0 1 1 +LIVELIHOOD 1 0 1 1 +LITTER 1 0 1 1 +LITERATURE 1 0 1 1 +LITERALLY 1 0 1 1 +LISTENING 5 0 5 5 +LISTENERS 1 0 1 1 +LISTENER 1 0 1 1 +LIQUOR 4 0 4 4 +LIQUID 1 0 1 1 +LIPS 6 0 6 6 +LIP 3 0 3 3 +LIONS 1 0 1 1 +LION 1 0 1 1 +LINK 1 0 1 1 +LINGO 1 0 1 1 +LINGER 1 0 1 1 +LINES 2 0 2 2 +LINEN 2 0 2 2 +LINCOLN 1 0 1 1 +LIMITS 1 0 1 1 +LIMIT 1 0 1 1 +LIMES 1 0 1 1 +LIKING 1 0 1 1 +LIKES 1 0 1 1 +LIGHTLY 1 0 1 1 +LIGHTING 1 0 1 1 +LIGHTENED 1 0 1 1 +LIGATURES 1 0 1 1 +LIFTING 1 0 1 1 +LIFETIME 1 0 1 1 +LIEUTENANT 1 0 1 1 +LIES 4 0 4 4 +LIBRARY 2 0 2 2 +LIBERTY 3 0 3 3 +LIBERATION 2 0 2 2 +LEVITICUS 1 0 1 1 +LEVIN 6 0 6 6 +LEVELLED 1 0 1 1 +LEVEL 1 0 1 1 +LETTING 1 0 1 1 +LETS 1 0 1 1 +LESSONS 1 0 1 1 +LESSENS 1 0 1 1 +LESSEN 2 0 2 2 +LENT 1 0 1 1 +LENGTH 4 0 4 4 +LEGISLATURE 1 0 1 1 +LEGALLY 1 0 1 1 +LEGAL 2 0 2 2 +LEG 1 0 1 1 +LEECHES 1 0 1 1 +LEECH 1 0 1 1 +LEAVES 1 0 1 1 +LEAST 15 0 15 15 +LEARNS 1 0 1 1 +LEARNING 4 0 4 4 +LEANING 1 0 1 1 +LEAN 1 0 1 1 +LEADS 1 0 1 1 +LEADING 3 0 3 3 +LEADER 2 0 2 2 +LAZY 1 0 1 1 +LAZILY 1 0 1 1 +LAYING 3 0 3 3 +LAWYER 1 0 1 1 +LAUGHTER 3 0 3 3 +LAUGHS 1 0 1 1 +LAUGHING 5 0 5 5 +LAUGH 9 0 9 9 +LATTER 2 0 2 2 +LATIN 1 0 1 1 +LATELY 2 0 2 2 +LASTLY 2 0 2 2 +LASTING 1 0 1 1 +LARKIN'S 1 0 1 1 +LARGESSE 1 0 1 1 +LAPSE 1 0 1 1 +LANTERN 1 0 1 1 +LANGUAGE 2 0 2 2 +LANDSMAN 1 0 1 1 +LANDOWNER 4 0 4 4 +LANDLORD 1 0 1 1 +LANDING 1 0 1 1 +LANDED 3 0 3 3 +LAMPLIT 1 0 1 1 +LAMP 1 0 1 1 +LAME 1 0 1 1 +LADY'S 1 0 1 1 +LACK 2 0 2 2 +LACE 2 0 2 2 +LABOURS 1 0 1 1 +LABORING 2 0 2 2 +LABORER 1 0 1 1 +KNUCKLES 1 0 1 1 +KNOWS 8 0 8 8 +KNOWING 3 0 3 3 +KNOWEST 3 0 3 3 +KNOWED 1 0 1 1 +KNOCKER 1 0 1 1 +KNOCK 1 0 1 1 +KNIGHTS 2 0 2 2 +KNIGHTHOOD 1 0 1 1 +KNEW 16 0 16 16 +KNEES 3 0 3 3 +KNAVE 1 0 1 1 +KITCHEN 3 0 3 3 +KISSING 2 0 2 2 +KISSED 6 0 6 6 +KISS 2 0 2 2 +KINSFOLK 1 0 1 1 +KINGS 8 0 8 8 +KINGDOM 3 0 3 3 +KING'S 7 0 7 7 +KINDS 1 0 1 1 +KINDLY 3 0 3 3 +KINDEST 1 0 1 1 +KILLING 1 0 1 1 +KILLED 4 0 4 4 +KIDNEYS 1 0 1 1 +KICKED 1 0 1 1 +KHORASAN 2 0 2 2 +KHAN 1 0 1 1 +KEYHOLE 1 0 1 1 +KENT 2 0 2 2 +KENNETH 3 0 3 3 +KEEPING 5 0 5 5 +KEEPER'S 1 0 1 1 +KEEPER 2 0 2 2 +KEENLY 1 0 1 1 +KAZI 1 0 1 1 +KANSAS 7 0 7 7 +K 1 0 1 1 +JUSTLY 1 0 1 1 +JUSTINIAN 1 0 1 1 +JUSTIFIES 1 0 1 1 +JUSTIFICATION 3 0 3 3 +JUNE 1 0 1 1 +JUMPING 2 0 2 2 +JUMP 1 0 1 1 +JUICES 1 0 1 1 +JUICE 1 0 1 1 +JUGS 1 0 1 1 +JUDICIAL 1 0 1 1 +JUDGES 2 0 2 2 +JUDGED 1 0 1 1 +JOYOUS 1 0 1 1 +JOYFUL 2 0 2 2 +JOYANCE 2 0 2 2 +JOY 7 0 7 7 +JOURNEYED 1 0 1 1 +JOURNEY 8 0 8 8 +JOURNALISM 1 0 1 1 +JOSEPH 1 0 1 1 +JONES 1 0 1 1 +JOLLY 1 0 1 1 +JOINTS 1 0 1 1 +JOIN 5 0 5 5 +JOHN 9 0 9 9 +JOBS 1 0 1 1 +JOB 7 0 7 7 +JOANNA'S 1 0 1 1 +JEWISH 2 0 2 2 +JEWELRY 1 0 1 1 +JEWELER 2 0 2 2 +JEW'S 1 0 1 1 +JESUS 2 0 2 2 +JERK 1 0 1 1 +JERICHO 1 0 1 1 +JENKINS 2 0 2 2 +JEHOVAH 3 0 3 3 +JEERINGLY 1 0 1 1 +JEAN 10 0 10 10 +JANUARY 1 0 1 1 +JANGLING 1 0 1 1 +JANE'S 1 0 1 1 +JAMS 2 0 2 2 +JAMIESON 1 0 1 1 +JAMES 3 0 3 3 +JAM 2 0 2 2 +JAIL 1 0 1 1 +JACKSON 4 0 4 4 +JACKET 1 0 1 1 +J 2 0 2 2 +IVANOVITCH'S 1 0 1 1 +ITALY 1 0 1 1 +ISSUED 2 0 2 2 +ISRAEL 7 0 7 7 +ISOLATION 1 0 1 1 +ISN'T 2 0 2 2 +ISLANDERS 1 0 1 1 +ISLAND 5 0 5 5 +IRRITATION 1 0 1 1 +IRRITABILITY 1 0 1 1 +IRREVERENTLY 1 0 1 1 +IRREVERENCE 1 0 1 1 +IRRESISTIBLY 1 0 1 1 +IRRESISTIBLE 1 0 1 1 +IRON 7 0 7 7 +IRKSOME 1 0 1 1 +IRISH 1 0 1 1 +IRELAND 2 0 2 2 +IRATE 1 0 1 1 +INWARD 1 0 1 1 +INVOLVED 1 0 1 1 +INVOKE 1 0 1 1 +INVITATION 2 0 2 2 +INVISIBLE 1 0 1 1 +INVINCIBLE 1 0 1 1 +INVETERATE 1 0 1 1 +INVESTIGATION 2 0 2 2 +INVENTING 1 0 1 1 +INVADING 1 0 1 1 +INVADERS 1 0 1 1 +INVADED 1 0 1 1 +INTRODUCTION 1 0 1 1 +INTRODUCING 1 0 1 1 +INTRODUCES 1 0 1 1 +INTRODUCED 1 0 1 1 +INTOXICATED 2 0 2 2 +INTOLERABLE 1 0 1 1 +INTIMATES 1 0 1 1 +INTERVIEWS 1 0 1 1 +INTERVAL 3 0 3 3 +INTERRUPTED 1 0 1 1 +INTERRED 2 0 2 2 +INTERPRETATION 1 0 1 1 +INTERPOLATIONS 1 0 1 1 +INTERNATIONAL 1 0 1 1 +INTERNAL 3 0 3 3 +INTERMISSION 1 0 1 1 +INTERMENT 2 0 2 2 +INTERMEDDLING 1 0 1 1 +INTERFERENCE 1 0 1 1 +INTERFERE 1 0 1 1 +INTERESTING 6 0 6 6 +INTERESTED 3 0 3 3 +INTERCOURSE 1 0 1 1 +INTERCHANGE 1 0 1 1 +INTENTIONALLY 2 0 2 2 +INTENTION 4 0 4 4 +INTENSITY 1 0 1 1 +INTENSELY 2 0 2 2 +INTENDED 5 0 5 5 +INTELLIGENT 2 0 2 2 +INTELLIGENCE 2 0 2 2 +INTELLECT 1 0 1 1 +INSULTED 1 0 1 1 +INSUFFICIENT 1 0 1 1 +INSTRUMENTS 4 0 4 4 +INSTRUCTIONS 1 0 1 1 +INSTITUTIONS 1 0 1 1 +INSTITUTED 1 0 1 1 +INSTITUTE 1 0 1 1 +INSTINCTS 1 0 1 1 +INSTINCT 3 0 3 3 +INSTEAD 4 0 4 4 +INSTANT'S 1 0 1 1 +INSTANT 5 0 5 5 +INSTANCE 1 0 1 1 +INSPIRES 1 0 1 1 +INSPIRATION 3 0 3 3 +INSOLUBLE 1 0 1 1 +INSOLENT 1 0 1 1 +INSISTING 1 0 1 1 +INSISTED 2 0 2 2 +INSIST 1 0 1 1 +INSINUATING 1 0 1 1 +INSHALLAH 1 0 1 1 +INSECURITY 1 0 1 1 +INSCRIPTION 1 0 1 1 +INSANE 1 0 1 1 +INQUISITION 1 0 1 1 +INQUIRIES 1 0 1 1 +INQUIRED 5 0 5 5 +INNOCENT 3 0 3 3 +INNKEEPER 2 0 2 2 +INJURIES 1 0 1 1 +INJURED 1 0 1 1 +INIQUITIES 1 0 1 1 +INHERENT 1 0 1 1 +INHABITANTS 1 0 1 1 +INHABIT 1 0 1 1 +INGREDIENTS 1 0 1 1 +INFORMED 5 0 5 5 +INFORMATION 3 0 3 3 +INFORM 1 0 1 1 +INFLUENCES 1 0 1 1 +INFLUENCED 1 0 1 1 +INFLUENCE 10 0 10 10 +INFLICT 1 0 1 1 +INFLATE 1 0 1 1 +INFIRMITY 1 0 1 1 +INFIRM 1 0 1 1 +INFINITELY 1 0 1 1 +INFINITE 4 0 4 4 +INFERIOR 3 0 3 3 +INFERENTIALLY 1 0 1 1 +INFAMY 2 0 2 2 +INEXORABLY 1 0 1 1 +INEVITABLE 1 0 1 1 +INELEGANTLY 1 0 1 1 +INDUSTRY 1 0 1 1 +INDUSTRIOUS 1 0 1 1 +INDUSTRIAL 1 0 1 1 +INDULGENT 1 0 1 1 +INDULGENCE 2 0 2 2 +INDUCED 1 0 1 1 +INDIVIDUALS 9 0 9 9 +INDIVIDUAL 1 0 1 1 +INDISPOSITION 1 0 1 1 +INDISCRETION 1 0 1 1 +INDIGNATION 1 0 1 1 +INDIFFERENT 2 0 2 2 +INDIFFERENCE 1 0 1 1 +INDICATIONS 2 0 2 2 +INDICATED 2 0 2 2 +INDIANS 2 0 2 2 +INDIANA 2 0 2 2 +INDIAN 1 0 1 1 +INDESCRIBABLE 1 0 1 1 +INDEPENDENT 2 0 2 2 +INDEPENDENCE 4 0 4 4 +INDECISION 1 0 1 1 +INCUR 1 0 1 1 +INCREDULOUSLY 1 0 1 1 +INCREDULITY 1 0 1 1 +INCREASING 2 0 2 2 +INCREASES 2 0 2 2 +INCREASED 5 0 5 5 +INCREASE 5 0 5 5 +INCORRECT 1 0 1 1 +INCONSISTENCY 1 0 1 1 +INCONCEIVABLE 1 0 1 1 +INCOHERENT 1 0 1 1 +INCLUDING 2 0 2 2 +INCLUDE 1 0 1 1 +INCLINED 1 0 1 1 +INCLINATION 1 0 1 1 +INCIDENTS 1 0 1 1 +INCIDENT 1 0 1 1 +INCARCERATING 1 0 1 1 +INASMUCH 1 0 1 1 +INANIMATE 1 0 1 1 +IMPULSE 3 0 3 3 +IMPROVISE 1 0 1 1 +IMPROVISATION 1 0 1 1 +IMPROVING 1 0 1 1 +IMPROVIDENT 1 0 1 1 +IMPRESSION 1 0 1 1 +IMPRECATIONS 1 0 1 1 +IMPRECATION 1 0 1 1 +IMPOSSIBLE 4 0 4 4 +IMPOSING 1 0 1 1 +IMPOSES 1 0 1 1 +IMPORTS 1 0 1 1 +IMPORTED 1 0 1 1 +IMPORTANT 1 0 1 1 +IMPORTANCE 3 0 3 3 +IMPLIES 1 0 1 1 +IMPLIED 2 0 2 2 +IMPLACABLE 1 0 1 1 +IMPIOUS 1 0 1 1 +IMPERTINENT 1 0 1 1 +IMPERSONAL 1 0 1 1 +IMPERIOUS 1 0 1 1 +IMPERATIVE 1 0 1 1 +IMPATIENTLY 2 0 2 2 +IMPATIENT 2 0 2 2 +IMMORTALS 1 0 1 1 +IMMEDIATELY 9 0 9 9 +IMMEDIATE 1 0 1 1 +IMITATION 1 0 1 1 +IMITATE 1 0 1 1 +IMBECILE 1 0 1 1 +IMAGINED 1 0 1 1 +IMAGINATION 1 0 1 1 +IMAGINARY 1 0 1 1 +IMAGE 2 0 2 2 +ILLUSTRIOUS 4 0 4 4 +ILLUSION 1 0 1 1 +ILLITERATE 1 0 1 1 +IGNORED 1 0 1 1 +IGNORANT 2 0 2 2 +IGNORANCE 1 0 1 1 +IDOLATRIES 1 0 1 1 +IDLE 1 0 1 1 +IDIOTIC 1 0 1 1 +IDIOT 1 0 1 1 +IDEAS 2 0 2 2 +IDEAL 1 0 1 1 +HYPOTHETICAL 1 0 1 1 +HYPODERMICALLY 1 0 1 1 +HYPODERMIC 1 0 1 1 +HYDROCHLORIC 2 0 2 2 +HUSTLED 1 0 1 1 +HUSTLE 1 0 1 1 +HUSKILY 1 0 1 1 +HUSH 1 0 1 1 +HUSBANDMEN 1 0 1 1 +HUSBAND'S 3 0 3 3 +HUSBAND 9 0 9 9 +HURRYING 3 0 3 3 +HURRY 1 0 1 1 +HURRIEDLY 3 0 3 3 +HURRIED 3 0 3 3 +HURRICANE 1 0 1 1 +HUNTED 2 0 2 2 +HUNT 1 0 1 1 +HUNGARY 1 0 1 1 +HUNG 2 0 2 2 +HUNDREDTH 1 0 1 1 +HUNDREDS 1 0 1 1 +HUNDRED 29 0 29 29 +HUMILIATIONS 1 0 1 1 +HUMILIATION 1 0 1 1 +HUMILIATED 1 0 1 1 +HUMBLY 1 0 1 1 +HUMBLE 1 0 1 1 +HUMANITY 1 0 1 1 +HUMANITARY 1 0 1 1 +HUMAN 6 0 6 6 +HULLO 1 0 1 1 +HUGELY 1 0 1 1 +HUGE 3 0 3 3 +HOWL 1 0 1 1 +HOUSES 4 0 4 4 +HOTLY 1 0 1 1 +HOTEL 4 0 4 4 +HOT 5 0 5 5 +HOST 3 0 3 3 +HOSPITALITY 1 0 1 1 +HORSEBACK 1 0 1 1 +HORSE 10 0 10 10 +HORRIBLE 3 0 3 3 +HORNS 1 0 1 1 +HORN 1 0 1 1 +HORIZONTAL 1 0 1 1 +HORIZON 2 0 2 2 +HORACE 1 0 1 1 +HOPPER 1 0 1 1 +HOPING 1 0 1 1 +HOPELESS 1 0 1 1 +HOPEFUL 1 0 1 1 +HOPED 2 0 2 2 +HOOTED 1 0 1 1 +HOOK 1 0 1 1 +HONEYMOON 1 0 1 1 +HONEY 1 0 1 1 +HOMEWARD 1 0 1 1 +HOMELESS 1 0 1 1 +HOLLOW 2 0 2 2 +HOLES 2 0 2 2 +HOLE 1 0 1 1 +HOLDS 1 0 1 1 +HOLDING 6 0 6 6 +HITHERTO 2 0 2 2 +HITCH 1 0 1 1 +HIT 3 0 3 3 +HISTORIANS 1 0 1 1 +HIRE 1 0 1 1 +HINTS 1 0 1 1 +HINTED 1 0 1 1 +HINT 3 0 3 3 +HINGES 1 0 1 1 +HINDER 1 0 1 1 +HILL 7 0 7 7 +HIGHWAYS 1 0 1 1 +HIGHLY 1 0 1 1 +HIGHEST 1 0 1 1 +HIGHER 1 0 1 1 +HIGGINS 1 0 1 1 +HIDING 1 0 1 1 +HIDEOUS 1 0 1 1 +HESITATING 1 0 1 1 +HESITATED 2 0 2 2 +HERS 4 0 4 4 +HERO 1 0 1 1 +HERIOT 1 0 1 1 +HERCULEAN 1 0 1 1 +HERBS 1 0 1 1 +HENRY 3 0 3 3 +HENCE 4 0 4 4 +HELSTONE 1 0 1 1 +HELPLESSLY 1 0 1 1 +HELPLESS 3 0 3 3 +HELMET 2 0 2 2 +HELLO 1 0 1 1 +HELL 4 0 4 4 +HELD 13 0 13 13 +HEIR 2 0 2 2 +HEIGHT 1 0 1 1 +HEEL 1 0 1 1 +HEED 1 0 1 1 +HEDGE 1 0 1 1 +HEBREWS 4 0 4 4 +HEAVY 11 0 11 11 +HEAVIEST 1 0 1 1 +HEAVENS 1 0 1 1 +HEAVEN'S 1 0 1 1 +HEAVEN 6 0 6 6 +HEAVE 1 0 1 1 +HEARTILY 1 0 1 1 +HEARTIEST 1 0 1 1 +HEARTED 2 0 2 2 +HEARING 2 0 2 2 +HEAP 2 0 2 2 +HEALTHY 1 0 1 1 +HEADED 4 0 4 4 +HEADACHES 1 0 1 1 +HAY 1 0 1 1 +HAVEN'T 4 0 4 4 +HAUNT 3 0 3 3 +HAUNCHES 1 0 1 1 +HAUGHTINESS 1 0 1 1 +HATTON 1 0 1 1 +HATS 1 0 1 1 +HATREDS 1 0 1 1 +HATRED 2 0 2 2 +HATES 3 0 3 3 +HATED 3 0 3 3 +HAT 3 0 3 3 +HASTY 2 0 2 2 +HASTILY 2 0 2 2 +HASTENED 1 0 1 1 +HASTEN 1 0 1 1 +HASTE 5 0 5 5 +HAST 7 0 7 7 +HASN'T 1 0 1 1 +HASHISH 1 0 1 1 +HARSHLY 2 0 2 2 +HARRY 3 0 3 3 +HARRISONVILLE 2 0 2 2 +HARNESSED 1 0 1 1 +HARMONY 1 0 1 1 +HARMLESS 1 0 1 1 +HARM 6 0 6 6 +HARK 1 0 1 1 +HARBOR 1 0 1 1 +HAPPINESS 5 0 5 5 +HAPPIEST 1 0 1 1 +HAPPIER 3 0 3 3 +HAPPENS 1 0 1 1 +HANGING 1 0 1 1 +HANGED 1 0 1 1 +HANG 3 0 3 3 +HANDSOME 4 0 4 4 +HANDLED 1 0 1 1 +HANDKERCHIEF 3 0 3 3 +HANDING 1 0 1 1 +HANDIER 1 0 1 1 +HANDED 1 0 1 1 +HAMPERED 1 0 1 1 +HAMMERS 1 0 1 1 +HAMMER 1 0 1 1 +HALVES 1 0 1 1 +HALTING 1 0 1 1 +HALLS 1 0 1 1 +HALFPENNY 1 0 1 1 +HALE 6 0 6 6 +HAG 1 0 1 1 +HACK 1 0 1 1 +HABITUAL 1 0 1 1 +HABITS 2 0 2 2 +GUT 3 0 3 3 +GUSH 1 0 1 1 +GULF 1 0 1 1 +GUILTY 5 0 5 5 +GUILT 2 0 2 2 +GUIDE 3 0 3 3 +GUESTS 2 0 2 2 +GUEST 4 0 4 4 +GUESSED 1 0 1 1 +GUARDS 1 0 1 1 +GUARDED 1 0 1 1 +GUARD 1 0 1 1 +GRUMBLED 2 0 2 2 +GRUFFISH 1 0 1 1 +GROWTH 1 0 1 1 +GROWN 1 0 1 1 +GROUPS 6 0 6 6 +GROUP 3 0 3 3 +GROUNDS 1 0 1 1 +GROTTO 1 0 1 1 +GROOMED 1 0 1 1 +GROOM 1 0 1 1 +GRINNING 3 0 3 3 +GRINNED 2 0 2 2 +GRIN 2 0 2 2 +GRIMACED 1 0 1 1 +GRIEVING 1 0 1 1 +GREET 2 0 2 2 +GREENWOOD 1 0 1 1 +GREENHORNS 1 0 1 1 +GREEK 2 0 2 2 +GREATLY 1 0 1 1 +GREATEST 6 0 6 6 +GREATER 6 0 6 6 +GREASY 1 0 1 1 +GRAVITY 1 0 1 1 +GRAVES 1 0 1 1 +GRAVELLED 1 0 1 1 +GRATING 2 0 2 2 +GRATIFICATION 1 0 1 1 +GRATEFUL 2 0 2 2 +GRATED 1 0 1 1 +GRASS 2 0 2 2 +GRASP 2 0 2 2 +GRAPE 1 0 1 1 +GRANTING 1 0 1 1 +GRANT 2 0 2 2 +GRANDSON 1 0 1 1 +GRANDPAPA 1 0 1 1 +GRANDFATHER 2 0 2 2 +GRANDEUR 1 0 1 1 +GRANDDAUGHTER 1 0 1 1 +GRAINS 1 0 1 1 +GRAIN 4 0 4 4 +GRAFT 2 0 2 2 +GRADUALLY 1 0 1 1 +GRACIOUSLY 2 0 2 2 +GRACE 1 0 1 1 +GRABBED 2 0 2 2 +GOTTEN 1 0 1 1 +GOSLER 1 0 1 1 +GOOSE 1 0 1 1 +GOODNESS 5 0 5 5 +GOLFING 1 0 1 1 +GOLDFINCH 1 0 1 1 +GOD'S 3 0 3 3 +GNASHING 1 0 1 1 +GNARLED 1 0 1 1 +GLOWING 2 0 2 2 +GLOWED 3 0 3 3 +GLOVES 3 0 3 3 +GLOVE 1 0 1 1 +GLORY 2 0 2 2 +GLORIOUS 1 0 1 1 +GLORIFY 1 0 1 1 +GLOOMY 2 0 2 2 +GLOOM 1 0 1 1 +GLOATING 1 0 1 1 +GLISPIN'S 1 0 1 1 +GLINTING 1 0 1 1 +GLIMPSE 1 0 1 1 +GLIMMER 1 0 1 1 +GLIDING 1 0 1 1 +GLEAMED 1 0 1 1 +GLAZED 1 0 1 1 +GLANCING 1 0 1 1 +GLANCES 2 0 2 2 +GLANCED 2 0 2 2 +GLANCE 3 0 3 3 +GLADNESS 2 0 2 2 +GLADLY 1 0 1 1 +GLADDENEST 1 0 1 1 +GLADDENED 1 0 1 1 +GIVES 7 0 7 7 +GIRDLE 2 0 2 2 +GIMLET 1 0 1 1 +GILROY 1 0 1 1 +GIFTED 1 0 1 1 +GIANT'S 1 0 1 1 +GIANT 1 0 1 1 +GHOSTS 1 0 1 1 +GHOST 2 0 2 2 +GHASTLY 2 0 2 2 +GETTING 12 0 12 12 +GETS 3 0 3 3 +GERMS 1 0 1 1 +GERMAN 7 0 7 7 +GERM 1 0 1 1 +GEORGIA 1 0 1 1 +GEORGES 1 0 1 1 +GENUINE 1 0 1 1 +GENTLY 1 0 1 1 +GENTLE 1 0 1 1 +GENIUS 1 0 1 1 +GENIALLY 1 0 1 1 +GENEROUS 2 0 2 2 +GENEROSITY 1 0 1 1 +GENERATION 1 0 1 1 +GENERALLY 3 0 3 3 +GENERAL 7 0 7 7 +GEAR 2 0 2 2 +GAZING 2 0 2 2 +GAZED 3 0 3 3 +GAY 2 0 2 2 +GATHERING 2 0 2 2 +GATHER 1 0 1 1 +GATES 1 0 1 1 +GATE 4 0 4 4 +GASPED 2 0 2 2 +GASP 1 0 1 1 +GARNISHMENT 1 0 1 1 +GARMENTS 2 0 2 2 +GARLANDED 1 0 1 1 +GARLAND 1 0 1 1 +GARDEN 7 0 7 7 +GAPS 1 0 1 1 +GAP 1 0 1 1 +GANG 5 0 5 5 +GAMMER 1 0 1 1 +GAMESTER 1 0 1 1 +GAMBLING 3 0 3 3 +GAMBLERS 1 0 1 1 +GALLOPED 1 0 1 1 +GALLERY 1 0 1 1 +GALL 1 0 1 1 +GAIN 3 0 3 3 +GAILY 1 0 1 1 +GAIETY 1 0 1 1 +GAD'S 1 0 1 1 +GAD 1 0 1 1 +GABLE 1 0 1 1 +GABBLE 1 0 1 1 +G 1 0 1 1 +FURY 2 0 2 2 +FURTHEST 1 0 1 1 +FURNITURE 1 0 1 1 +FURNISHED 1 0 1 1 +FURNACE 1 0 1 1 +FURIOUS 2 0 2 2 +FUNNY 3 0 3 3 +FUMED 1 0 1 1 +FULLY 1 0 1 1 +FULFILLED 1 0 1 1 +FULFIL 1 0 1 1 +FUGITIVES 1 0 1 1 +FUEL 1 0 1 1 +FRY 1 0 1 1 +FRUITLESS 1 0 1 1 +FRUIT 7 0 7 7 +FROWNED 1 0 1 1 +FROWN 1 0 1 1 +FROSTY 1 0 1 1 +FROST 1 0 1 1 +FRIGHTENED 3 0 3 3 +FRIGHTEN 1 0 1 1 +FRIENDSHIP 2 0 2 2 +FRIENDLINESS 1 0 1 1 +FRIEND'S 1 0 1 1 +FRIDOLIN 1 0 1 1 +FRIDAY 2 0 2 2 +FRESHEST 1 0 1 1 +FRESH 5 0 5 5 +FRERE 1 0 1 1 +FREQUENTLY 2 0 2 2 +FREQUENT 2 0 2 2 +FREELY 2 0 2 2 +FREED 2 0 2 2 +FRAUD 1 0 1 1 +FRANTICALLY 1 0 1 1 +FRANKNESS 1 0 1 1 +FRANKLY 1 0 1 1 +FRANCS 6 0 6 6 +FRANCIS 1 0 1 1 +FRAME 1 0 1 1 +FRAGMENTS 1 0 1 1 +FOUNDATION 1 0 1 1 +FOSTER 3 0 3 3 +FORWARDS 3 0 3 3 +FORWARD 5 0 5 5 +FORTUNES 1 0 1 1 +FORTUNATELY 5 0 5 5 +FORTNIGHT 1 0 1 1 +FORTHWITH 1 0 1 1 +FORTH 4 0 4 4 +FORSOOTH 1 0 1 1 +FORMS 2 0 2 2 +FORMING 2 0 2 2 +FORMIDABLE 2 0 2 2 +FORMERLY 2 0 2 2 +FORGOTTEN 4 0 4 4 +FORGOT 7 0 7 7 +FORGIVE 2 0 2 2 +FORGETTING 1 0 1 1 +FORGET 2 0 2 2 +FORGERIES 1 0 1 1 +FOREVER 3 0 3 3 +FORETASTE 1 0 1 1 +FORESTERS 1 0 1 1 +FOREST 2 0 2 2 +FORESHADOWED 1 0 1 1 +FORENOON 1 0 1 1 +FOREMOST 1 0 1 1 +FORELOCK 1 0 1 1 +FOREIGNERS 1 0 1 1 +FOREIGN 6 0 6 6 +FOREHEAD 4 0 4 4 +FOREFINGER 1 0 1 1 +FORCES 3 0 3 3 +FORBIDDEN 1 0 1 1 +FORBID 1 0 1 1 +FORBEARANCE 1 0 1 1 +FORBEAR 1 0 1 1 +FOOTSTEPS 1 0 1 1 +FOOTNOTE 1 0 1 1 +FOOLISH 3 0 3 3 +FOND 2 0 2 2 +FOLLOWERS 3 0 3 3 +FOLKS 3 0 3 3 +FOLDED 2 0 2 2 +FOLD 1 0 1 1 +FOGGY 1 0 1 1 +FOES 2 0 2 2 +FLYING 1 0 1 1 +FLUTTER 1 0 1 1 +FLUSHED 2 0 2 2 +FLUSH 1 0 1 1 +FLUNG 2 0 2 2 +FLUID 2 0 2 2 +FLOWERS 4 0 4 4 +FLOURISHING 1 0 1 1 +FLOURISHED 1 0 1 1 +FLOURISH 1 0 1 1 +FLOCKS 1 0 1 1 +FLOATED 1 0 1 1 +FLITTED 1 0 1 1 +FLIRTATION 1 0 1 1 +FLING 1 0 1 1 +FLINCH 1 0 1 1 +FLIGHT 5 0 5 5 +FLICK 1 0 1 1 +FLEW 1 0 1 1 +FLEECED 1 0 1 1 +FLEE 1 0 1 1 +FLED 4 0 4 4 +FLATTERY 1 0 1 1 +FLASK 1 0 1 1 +FLASHING 1 0 1 1 +FLASHED 1 0 1 1 +FLARING 1 0 1 1 +FLAPPING 1 0 1 1 +FLAMES 2 0 2 2 +FLAME 5 0 5 5 +FLAGRANT 1 0 1 1 +FLAGONS 1 0 1 1 +FLAGON 1 0 1 1 +FLAGGED 1 0 1 1 +FLAG 1 0 1 1 +FIXING 1 0 1 1 +FIXED 5 0 5 5 +FIX 1 0 1 1 +FITTING 2 0 2 2 +FITTED 2 0 2 2 +FITS 1 0 1 1 +FISHER 2 0 2 2 +FIRMLY 2 0 2 2 +FIRM 1 0 1 1 +FIRING 2 0 2 2 +FIREPLACE 1 0 1 1 +FIREMAN 3 0 3 3 +FIREFLY 1 0 1 1 +FIRED 1 0 1 1 +FINS 1 0 1 1 +FINNEY 2 0 2 2 +FINISHING 2 0 2 2 +FINISH 3 0 3 3 +FINGERS 1 0 1 1 +FINGERING 1 0 1 1 +FINGER 6 0 6 6 +FINEST 1 0 1 1 +FINE 10 0 10 10 +FINANCIAL 1 0 1 1 +FINALLY 6 0 6 6 +FIN 1 0 1 1 +FILTER 1 0 1 1 +FILMY 1 0 1 1 +FILLED 5 0 5 5 +FILL 4 0 4 4 +FIGURE 3 0 3 3 +FIGHTING 1 0 1 1 +FIGHT 5 0 5 5 +FIFTY 14 0 14 14 +FIERCE 2 0 2 2 +FIENDS 1 0 1 1 +FIENDISH 1 0 1 1 +FIELDS 2 0 2 2 +FICKLE 2 0 2 2 +FIACRE 1 0 1 1 +FEW 26 0 26 26 +FEVERISH 4 0 4 4 +FEVER 1 0 1 1 +FETTERS 1 0 1 1 +FETCHED 1 0 1 1 +FETCH 7 0 7 7 +FESTIVE 1 0 1 1 +FESTIVAL 1 0 1 1 +FEROCIOUS 1 0 1 1 +FENDER 1 0 1 1 +FENCED 1 0 1 1 +FENCE 4 0 4 4 +FEMALE 1 0 1 1 +FELLOWSHIP 1 0 1 1 +FELLOWS 2 0 2 2 +FEET 9 0 9 9 +FEELINGS 3 0 3 3 +FEEBLY 1 0 1 1 +FEEBLE 2 0 2 2 +FEE 1 0 1 1 +FEDERAL 1 0 1 1 +FED 1 0 1 1 +FEBRUARY 5 0 5 5 +FEATURES 1 0 1 1 +FEATHERS 1 0 1 1 +FEATHER 1 0 1 1 +FEARS 1 0 1 1 +FEARLESS 1 0 1 1 +FEARING 1 0 1 1 +FEARFUL 2 0 2 2 +FEARED 4 0 4 4 +FEAR 13 0 13 13 +FAVOURS 1 0 1 1 +FAVORABLE 1 0 1 1 +FATTY 1 0 1 1 +FATTER 1 0 1 1 +FATIGUE 2 0 2 2 +FATHERLY 1 0 1 1 +FATALLY 1 0 1 1 +FATAL 2 0 2 2 +FASTER 2 0 2 2 +FASHIONS 1 0 1 1 +FASHIONED 2 0 2 2 +FASHION 2 0 2 2 +FASCINATION 1 0 1 1 +FARTHEST 1 0 1 1 +FARTHER 3 0 3 3 +FARMS 1 0 1 1 +FARM 3 0 3 3 +FAREWELL 1 0 1 1 +FARED 1 0 1 1 +FANTASTIC 1 0 1 1 +FANS 1 0 1 1 +FANCIFUL 1 0 1 1 +FANCIED 1 0 1 1 +FANATICS 1 0 1 1 +FAMILIES 3 0 3 3 +FAMILIARITY 1 0 1 1 +FAMILIAR 2 0 2 2 +FALSE 1 0 1 1 +FALLEN 1 0 1 1 +FAITHLESS 1 0 1 1 +FAITHFULLY 2 0 2 2 +FAITHFUL 3 0 3 3 +FAIRY 2 0 2 2 +FAIRLY 3 0 3 3 +FAINTNESS 1 0 1 1 +FAINTING 2 0 2 2 +FAINT 4 0 4 4 +FAILURES 3 0 3 3 +FAILURE 1 0 1 1 +FAILS 1 0 1 1 +FAIL 3 0 3 3 +FAGOTS 1 0 1 1 +FAGGOT 1 0 1 1 +FACTS 3 0 3 3 +FACTORIES 2 0 2 2 +FACTOR 1 0 1 1 +FACING 3 0 3 3 +FACES 2 0 2 2 +FABULOUS 1 0 1 1 +EYELIDS 1 0 1 1 +EYED 4 0 4 4 +EXTREMELY 4 0 4 4 +EXTREME 2 0 2 2 +EXTRAORDINARY 2 0 2 2 +EXTINGUISH 1 0 1 1 +EXTERNAL 2 0 2 2 +EXTENT 2 0 2 2 +EXTENSION 1 0 1 1 +EXTENDING 2 0 2 2 +EXTENDED 2 0 2 2 +EXTEMPORIZED 1 0 1 1 +EXPRESSLY 1 0 1 1 +EXPRESSION 4 0 4 4 +EXPRESSED 3 0 3 3 +EXPOSURE 1 0 1 1 +EXPOSES 1 0 1 1 +EXPLANATORY 1 0 1 1 +EXPLANATION 1 0 1 1 +EXPLAINING 1 0 1 1 +EXPLAIN 1 0 1 1 +EXPIATION 1 0 1 1 +EXPERIMENTS 1 0 1 1 +EXPERIMENTING 1 0 1 1 +EXPERIENCES 1 0 1 1 +EXPERIENCED 1 0 1 1 +EXPENSES 2 0 2 2 +EXPENSE 2 0 2 2 +EXPENDED 1 0 1 1 +EXPEDIENT 1 0 1 1 +EXPECTS 1 0 1 1 +EXPECT 4 0 4 4 +EXOTIC 1 0 1 1 +EXIT 1 0 1 1 +EXISTS 1 0 1 1 +EXISTING 1 0 1 1 +EXISTENCE 5 0 5 5 +EXISTED 1 0 1 1 +EXIST 2 0 2 2 +EXHIBITED 4 0 4 4 +EXERTIONS 1 0 1 1 +EXERTING 1 0 1 1 +EXERTED 1 0 1 1 +EXERCISES 1 0 1 1 +EXERCISE 3 0 3 3 +EXECUTIONER'S 2 0 2 2 +EXECUTION 2 0 2 2 +EXECUTE 1 0 1 1 +EXECRABLE 1 0 1 1 +EXCUSES 1 0 1 1 +EXCUSE 3 0 3 3 +EXCUSABLE 1 0 1 1 +EXCLAMATION 1 0 1 1 +EXCLAIMING 1 0 1 1 +EXCLAIM 1 0 1 1 +EXCITEMENT 4 0 4 4 +EXCITEDLY 2 0 2 2 +EXCITED 2 0 2 2 +EXCITE 1 0 1 1 +EXCITABILITY 1 0 1 1 +EXCHANGED 2 0 2 2 +EXCHANGE 1 0 1 1 +EXCESSIVELY 1 0 1 1 +EXCESS 1 0 1 1 +EXCEPTIONALLY 2 0 2 2 +EXCEPTION 1 0 1 1 +EXCELLENT 5 0 5 5 +EXCELLENCY 4 0 4 4 +EXCEEDINGLY 1 0 1 1 +EXCEEDING 3 0 3 3 +EXASPERATING 1 0 1 1 +EXAMPLE 3 0 3 3 +EXAMINE 2 0 2 2 +EXAMINATION 3 0 3 3 +EXALTED 1 0 1 1 +EXAGGERATE 1 0 1 1 +EXACTITUDE 2 0 2 2 +EXACT 1 0 1 1 +EVIL 5 0 5 5 +EVIDENTLY 4 0 4 4 +EVIDENT 3 0 3 3 +EVIDENCE 2 0 2 2 +EVERYWHERE 4 0 4 4 +EVERYTHING'S 1 0 1 1 +EVERYTHING 15 0 15 15 +EVERYBODY 6 0 6 6 +EVENTS 4 0 4 4 +EVENT 1 0 1 1 +EVENING 9 0 9 9 +EVEN 46 0 46 46 +EVE 2 0 2 2 +EVAPORATION 2 0 2 2 +EVAPORATING 1 0 1 1 +EVAPORATE 3 0 3 3 +EVADED 1 0 1 1 +EUROPEAN 1 0 1 1 +EUROPE 1 0 1 1 +EUNUCH'S 1 0 1 1 +EUNUCH 11 0 11 11 +ETERNITY 1 0 1 1 +ESTRANGE 1 0 1 1 +ESTIMATES 1 0 1 1 +ESTEEM 3 0 3 3 +ESTATES 1 0 1 1 +ESTATE 1 0 1 1 +ESTABLISHMENT 1 0 1 1 +ESTABLISHED 2 0 2 2 +ESTABLISH 1 0 1 1 +ESSENTIALLY 1 0 1 1 +ESSENTIAL 1 0 1 1 +ESSENCE 1 0 1 1 +ESSAY 1 0 1 1 +ESQUIRES 1 0 1 1 +ESPECIAL 1 0 1 1 +ESCAPADE 1 0 1 1 +ERROR 2 0 2 2 +ERRATIC 1 0 1 1 +ERRANT 1 0 1 1 +ERECTS 1 0 1 1 +ERECTED 3 0 3 3 +ERECT 1 0 1 1 +EQUIVALENT 1 0 1 1 +EQUALLY 2 0 2 2 +EPOCH 1 0 1 1 +EPISTLES 1 0 1 1 +EPISTLE 1 0 1 1 +ENVYING 1 0 1 1 +ENVY 3 0 3 3 +ENVIRONMENT 1 0 1 1 +ENVIOUS 1 0 1 1 +ENVIED 1 0 1 1 +ENVELOPE 1 0 1 1 +ENTREATY 1 0 1 1 +ENTREATINGLY 1 0 1 1 +ENTREATIES 1 0 1 1 +ENTREATED 1 0 1 1 +ENTHUSIASM 3 0 3 3 +ENTERTAINMENT 1 0 1 1 +ENTERTAINING 1 0 1 1 +ENTERTAIN 1 0 1 1 +ENTER 5 0 5 5 +ENTAILED 1 0 1 1 +ENRAGED 1 0 1 1 +ENLISTMENT 1 0 1 1 +ENJOYMENT 3 0 3 3 +ENGRAVED 1 0 1 1 +ENGLISH 4 0 4 4 +ENGLAND 3 0 3 3 +ENGAGEMENTS 1 0 1 1 +ENGAGEMENT 1 0 1 1 +ENGAGED 2 0 2 2 +ENGAGE 1 0 1 1 +ENERGY 1 0 1 1 +ENEMY 3 0 3 3 +ENEMIES 2 0 2 2 +ENDURANCE 1 0 1 1 +ENDEAVOURED 1 0 1 1 +ENCOURAGED 2 0 2 2 +ENCOUNTERED 1 0 1 1 +ENCOMPASSED 1 0 1 1 +ENCHANTMENT 2 0 2 2 +ENCHANTED 3 0 3 3 +ENCAMPMENT 1 0 1 1 +ENCAMPED 1 0 1 1 +EMPTY 8 0 8 8 +EMPTIES 1 0 1 1 +EMPTIED 2 0 2 2 +EMPRESSES 1 0 1 1 +EMPLOYED 2 0 2 2 +EMPLOY 1 0 1 1 +EMPIRE 3 0 3 3 +EMPHATIC 2 0 2 2 +EMPHASIZE 1 0 1 1 +EMPERORS 2 0 2 2 +EMPEROR 1 0 1 1 +EMOTIONS 2 0 2 2 +EMIR 2 0 2 2 +EMERGED 1 0 1 1 +EMBROIDERY 1 0 1 1 +EMBRACES 1 0 1 1 +EMBRACED 1 0 1 1 +EMBARRASSMENT 1 0 1 1 +EMBARRASSED 1 0 1 1 +EMBARKED 2 0 2 2 +ELSIE'S 1 0 1 1 +ELSIE 1 0 1 1 +ELSE 12 0 12 12 +ELKINS 1 0 1 1 +ELIZABETH 1 0 1 1 +ELEVENTH 1 0 1 1 +ELEVEN 4 0 4 4 +ELEVATION 1 0 1 1 +ELEPHANT 1 0 1 1 +ELEMENTS 1 0 1 1 +ELEGANT 1 0 1 1 +ELECTRIC 1 0 1 1 +ELECTED 2 0 2 2 +ELDEST 1 0 1 1 +ELBOWS 1 0 1 1 +ELBOWED 1 0 1 1 +ELAPSED 1 0 1 1 +ELAPSE 1 0 1 1 +EKED 1 0 1 1 +EJACULATED 1 0 1 1 +EIGHTEENTH 3 0 3 3 +EIGHTEEN 10 0 10 10 +EGYPT 5 0 5 5 +EGG 1 0 1 1 +EFFECTS 2 0 2 2 +EFFECTIVE 1 0 1 1 +EELS 1 0 1 1 +EDWARD 1 0 1 1 +EDUCATION 2 0 2 2 +EDUCATED 1 0 1 1 +EDGES 1 0 1 1 +EDGE 2 0 2 2 +ECONOMIZE 1 0 1 1 +ECONOMICAL 1 0 1 1 +ECONOMIC 1 0 1 1 +ECHOES 1 0 1 1 +ECHOED 1 0 1 1 +ECCLESIASTICS 1 0 1 1 +EATING 2 0 2 2 +EAT 12 0 12 12 +EASY 9 0 9 9 +EASTERN 2 0 2 2 +EASIEST 1 0 1 1 +EASE 4 0 4 4 +EARTHEN 1 0 1 1 +EARNEST 9 0 9 9 +EARNED 1 0 1 1 +EARN 2 0 2 2 +EARLY 8 0 8 8 +EARLINESS 1 0 1 1 +EAGLE 4 0 4 4 +EAGERLY 4 0 4 4 +EAGER 2 0 2 2 +DYING 6 0 6 6 +DWELT 1 0 1 1 +DWELLS 1 0 1 1 +DWELLINGS 1 0 1 1 +DWELLERS 1 0 1 1 +DWELL 1 0 1 1 +DWARF 2 0 2 2 +DUTY 10 0 10 10 +DUTIES 2 0 2 2 +DUSTY 1 0 1 1 +DUST 2 0 2 2 +DURING 20 0 20 20 +DURATION 2 0 2 2 +DUNNO 1 0 1 1 +DUN 1 0 1 1 +DUE 5 0 5 5 +DU 1 0 1 1 +DRUNK 2 0 2 2 +DRUMS 1 0 1 1 +DRUGGED 2 0 2 2 +DROWNING 1 0 1 1 +DROWN 1 0 1 1 +DROVE 1 0 1 1 +DROUTH 1 0 1 1 +DROPS 1 0 1 1 +DROPPED 8 0 8 8 +DROOPING 2 0 2 2 +DRIVING 1 0 1 1 +DRIVEN 1 0 1 1 +DRIVE 5 0 5 5 +DRINKS 1 0 1 1 +DRINKING 4 0 4 4 +DRINKERS 2 0 2 2 +DRIFT 1 0 1 1 +DRIED 1 0 1 1 +DREW 7 0 7 7 +DRESSING 1 0 1 1 +DRESSES 1 0 1 1 +DRESSED 1 0 1 1 +DRESS 1 0 1 1 +DREAMING 2 0 2 2 +DREAMED 1 0 1 1 +DREAM 4 0 4 4 +DREADFULLY 1 0 1 1 +DRAWING 9 0 9 9 +DRAMATIC 1 0 1 1 +DRAINS 1 0 1 1 +DRAINED 1 0 1 1 +DRAIN 1 0 1 1 +DRAGONS 1 0 1 1 +DRAGON 1 0 1 1 +DRAGGED 1 0 1 1 +DRAG 1 0 1 1 +DOZEN 2 0 2 2 +DOWNSTAIRS 1 0 1 1 +DOWNS 2 0 2 2 +DOWER 1 0 1 1 +DOVES 1 0 1 1 +DOUBTLESS 2 0 2 2 +DOUBTFUL 1 0 1 1 +DOUBLE 5 0 5 5 +DOT 1 0 1 1 +DOORS 3 0 3 3 +DONOVAN'S 1 0 1 1 +DOMINION 1 0 1 1 +DOMINATES 1 0 1 1 +DOMED 1 0 1 1 +DOME 2 0 2 2 +DOLLARS 2 0 2 2 +DOINGS 1 0 1 1 +DOGGEDLY 1 0 1 1 +DODGING 1 0 1 1 +DIVISION 1 0 1 1 +DIVINE 1 0 1 1 +DIVIDES 1 0 1 1 +DIVERT 1 0 1 1 +DISTURBING 1 0 1 1 +DISTURBED 1 0 1 1 +DISTURBANCE 1 0 1 1 +DISTURB 2 0 2 2 +DISTRICTS 1 0 1 1 +DISTRIBUTED 1 0 1 1 +DISTRIBUTE 1 0 1 1 +DISTRACTED 2 0 2 2 +DISTINGUISH 2 0 2 2 +DISTINCTLY 1 0 1 1 +DISTINCTIVE 1 0 1 1 +DISTINCT 1 0 1 1 +DISTENDED 1 0 1 1 +DISTANCES 1 0 1 1 +DISTANCE 3 0 3 3 +DISTAFF 1 0 1 1 +DISSIPATION 2 0 2 2 +DISSIMULATION 1 0 1 1 +DISSENTERING 1 0 1 1 +DISSENSIONS 2 0 2 2 +DISREGARDED 1 0 1 1 +DISPUTED 1 0 1 1 +DISPUTE 1 0 1 1 +DISPROVE 1 0 1 1 +DISPOSITION 2 0 2 2 +DISPOSAL 1 0 1 1 +DISPLEASED 1 0 1 1 +DISPLAY 1 0 1 1 +DISPERSED 2 0 2 2 +DISPENSED 1 0 1 1 +DISPENSE 1 0 1 1 +DISMAL 1 0 1 1 +DISHONEST 1 0 1 1 +DISHES 7 0 7 7 +DISH 2 0 2 2 +DISGUST 1 0 1 1 +DISGRACE 4 0 4 4 +DISENTANGLE 1 0 1 1 +DISEASE 1 0 1 1 +DISCUSSIONS 1 0 1 1 +DISCUSSION 1 0 1 1 +DISCUSSED 3 0 3 3 +DISCRIMINATION 1 0 1 1 +DISCRETION 1 0 1 1 +DISCOVERY 3 0 3 3 +DISCOVERIES 1 0 1 1 +DISCOVERED 4 0 4 4 +DISCOVER 2 0 2 2 +DISCOURSES 1 0 1 1 +DISCOURAGEMENTS 1 0 1 1 +DISCONTENT 1 0 1 1 +DISCONCERTION 1 0 1 1 +DISCOMFORT 1 0 1 1 +DISCLOSURES 1 0 1 1 +DISCLOSE 1 0 1 1 +DISCLAIM 1 0 1 1 +DISCIPLINE 1 0 1 1 +DISCERNING 1 0 1 1 +DISAPPOINTED 2 0 2 2 +DISAPPEARS 1 0 1 1 +DISAPPEARED 5 0 5 5 +DISAPPEAR 1 0 1 1 +DISADVANTAGES 3 0 3 3 +DISADVANTAGEOUS 1 0 1 1 +DIRTY 2 0 2 2 +DIRK 2 0 2 2 +DIRECTLY 3 0 3 3 +DIRECTIONS 1 0 1 1 +DIRECTION 7 0 7 7 +DIRECTED 3 0 3 3 +DIP 2 0 2 2 +DINSMORE 2 0 2 2 +DINNERS 1 0 1 1 +DINNER 6 0 6 6 +DINING 1 0 1 1 +DINERS 1 0 1 1 +DIMPLED 1 0 1 1 +DIMLY 1 0 1 1 +DIMINISHED 1 0 1 1 +DIMINISH 1 0 1 1 +DIM 2 0 2 2 +DILIGENTLY 1 0 1 1 +DILAPIDATED 1 0 1 1 +DIGNITY 4 0 4 4 +DIGGERS 2 0 2 2 +DIGGER 10 0 10 10 +DIGESTION 3 0 3 3 +DIFFICULTY 7 0 7 7 +DIFFICULT 2 0 2 2 +DIFFERENT 7 0 7 7 +DIFFERENCES 1 0 1 1 +DIFFERENCE 7 0 7 7 +DIFFER 1 0 1 1 +DIED 13 0 13 13 +DIDST 1 0 1 1 +DICTATED 1 0 1 1 +DICE 1 0 1 1 +DIAMETER 1 0 1 1 +DIALOGUE 1 0 1 1 +DEVOURED 1 0 1 1 +DEVOTIONS 1 0 1 1 +DEVOTION 1 0 1 1 +DEVOTED 3 0 3 3 +DEVILS 2 0 2 2 +DEVIL 4 0 4 4 +DEVICE 1 0 1 1 +DEVELOPED 1 0 1 1 +DETECTIVE'S 1 0 1 1 +DETECTION 1 0 1 1 +DETECTED 1 0 1 1 +DETAILS 2 0 2 2 +DETAILED 3 0 3 3 +DESTROYS 1 0 1 1 +DESTROYER 1 0 1 1 +DESTROYED 4 0 4 4 +DESTROY 3 0 3 3 +DESTINED 1 0 1 1 +DESTINATION 1 0 1 1 +DESSERT 2 0 2 2 +DESPOTISM 2 0 2 2 +DESPOILED 1 0 1 1 +DESPISED 1 0 1 1 +DESPISE 1 0 1 1 +DESPERATELY 1 0 1 1 +DESPERATE 2 0 2 2 +DESPAIR 2 0 2 2 +DESIRED 2 0 2 2 +DESIRABLE 1 0 1 1 +DESERVING 1 0 1 1 +DESERVES 1 0 1 1 +DESERVE 2 0 2 2 +DESERTING 1 0 1 1 +DESERTED 2 0 2 2 +DESERT 1 0 1 1 +DESCRIPTION 2 0 2 2 +DESCRIBED 1 0 1 1 +DESCRIBE 1 0 1 1 +DESCEND 1 0 1 1 +DERIVE 1 0 1 1 +DEPRESSION 1 0 1 1 +DEPRECATINGLY 1 0 1 1 +DEPOSITED 1 0 1 1 +DEPOSED 1 0 1 1 +DEPLORED 1 0 1 1 +DEPENDS 2 0 2 2 +DEPENDENCE 1 0 1 1 +DEPEND 1 0 1 1 +DEPARTURE 2 0 2 2 +DEPARTMENT 2 0 2 2 +DEPARTING 1 0 1 1 +DEPARTED 3 0 3 3 +DENY 1 0 1 1 +DENOUNCED 1 0 1 1 +DENOTING 1 0 1 1 +DENIS 2 0 2 2 +DENIAL 1 0 1 1 +DEN 1 0 1 1 +DEMANDS 3 0 3 3 +DEMANDED 1 0 1 1 +DELUSION 2 0 2 2 +DELIVERY 1 0 1 1 +DELIVERER 1 0 1 1 +DELIVER 2 0 2 2 +DELIGHTFUL 2 0 2 2 +DELIGHTED 2 0 2 2 +DELIGHT 7 0 7 7 +DELICIOUSLY 1 0 1 1 +DELICATE 3 0 3 3 +DELIBERATELY 1 0 1 1 +DELAYED 1 0 1 1 +DELAY 3 0 3 3 +DEJECTION 1 0 1 1 +DEITY 1 0 1 1 +DEGREE 1 0 1 1 +DEGENERATING 1 0 1 1 +DEFYING 1 0 1 1 +DEFRAUD 1 0 1 1 +DEFORMED 2 0 2 2 +DEFINED 1 0 1 1 +DEFIANT 1 0 1 1 +DEFERENCE 1 0 1 1 +DEFENDING 1 0 1 1 +DEFENDERS 2 0 2 2 +DEFEAT 1 0 1 1 +DEEPLY 3 0 3 3 +DEEMED 1 0 1 1 +DECORATION 1 0 1 1 +DECLINED 1 0 1 1 +DECLARED 1 0 1 1 +DECKS 1 0 1 1 +DECK 6 0 6 6 +DECISION 3 0 3 3 +DECIDED 5 0 5 5 +DECIDE 2 0 2 2 +DECEPTION 1 0 1 1 +DECEMBER 2 0 2 2 +DECEIVED 5 0 5 5 +DECEIVE 1 0 1 1 +DECEASED 1 0 1 1 +DECAY 1 0 1 1 +DEBATED 1 0 1 1 +DEBATE 2 0 2 2 +DEATHS 1 0 1 1 +DEARER 1 0 1 1 +DEALT 2 0 2 2 +DEALER 1 0 1 1 +DEAF 1 0 1 1 +DAZED 1 0 1 1 +DAYLIGHT 2 0 2 2 +DAYBREAK 2 0 2 2 +DAY'S 1 0 1 1 +DAWNED 2 0 2 2 +DAWN 4 0 4 4 +DAVID 2 0 2 2 +DAUNTED 1 0 1 1 +DAUGHTER'S 1 0 1 1 +DAUGHTER 10 0 10 10 +DASHING 1 0 1 1 +DASHED 1 0 1 1 +DARTED 1 0 1 1 +DARKNESS 7 0 7 7 +DARING 1 0 1 1 +DARCY'S 1 0 1 1 +DARCY 6 0 6 6 +DANGERS 1 0 1 1 +DANGEROUS 2 0 2 2 +DANGER 11 0 11 11 +DANDY 1 0 1 1 +DANCE 2 0 2 2 +DAMPNESS 1 0 1 1 +DAMNED 1 0 1 1 +DAMES 1 0 1 1 +DAMASCUS 4 0 4 4 +DAM 1 0 1 1 +DADDY 1 0 1 1 +CUTTER'S 1 0 1 1 +CUTTER 3 0 3 3 +CUSHION 1 0 1 1 +CURVED 2 0 2 2 +CURTAINS 2 0 2 2 +CURSES 1 0 1 1 +CURSED 2 0 2 2 +CURRENT 1 0 1 1 +CURRENCY 1 0 1 1 +CURIOUS 4 0 4 4 +CURED 1 0 1 1 +CURE 4 0 4 4 +CURATE 2 0 2 2 +CUPBOARD 2 0 2 2 +CULTURED 1 0 1 1 +CULTURE 1 0 1 1 +CULTIVATED 2 0 2 2 +CULT 1 0 1 1 +CUBITS 1 0 1 1 +CRY 2 0 2 2 +CRUSHED 1 0 1 1 +CRUSADER 1 0 1 1 +CRUELTY 4 0 4 4 +CRUEL 4 0 4 4 +CRUDE 1 0 1 1 +CRUCIFIXION 9 0 9 9 +CROWNED 1 0 1 1 +CROWN 3 0 3 3 +CROWDED 2 0 2 2 +CROWD 5 0 5 5 +CROSSED 5 0 5 5 +CROSS 8 0 8 8 +CROOKED 1 0 1 1 +CROAKING 1 0 1 1 +CRITICS 1 0 1 1 +CRITICAL 2 0 2 2 +CRIPPLED 2 0 2 2 +CRIMSON 1 0 1 1 +CRIMINALS 1 0 1 1 +CRIMINAL 1 0 1 1 +CRIME 5 0 5 5 +CRIED 21 0 21 21 +CRICKETS 1 0 1 1 +CREPT 1 0 1 1 +CREEPY 1 0 1 1 +CREEPING 1 0 1 1 +CREDITS 3 0 3 3 +CREDIT 5 0 5 5 +CREATURES 2 0 2 2 +CREATURE 4 0 4 4 +CREATOR 4 0 4 4 +CREATIONS 1 0 1 1 +CREATING 1 0 1 1 +CREATED 3 0 3 3 +CREATE 3 0 3 3 +CREASES 1 0 1 1 +CREASED 1 0 1 1 +CREAKED 1 0 1 1 +CRAYFISH 3 0 3 3 +CRAWLED 2 0 2 2 +CRASHED 1 0 1 1 +CRASH 1 0 1 1 +CRAFT 1 0 1 1 +CRACKERS 1 0 1 1 +CRACKED 2 0 2 2 +COWARDS 1 0 1 1 +COWARD 1 0 1 1 +COVERING 1 0 1 1 +COVERED 5 0 5 5 +COVER 1 0 1 1 +COVE 1 0 1 1 +COUSINS 1 0 1 1 +COUSIN 10 0 10 10 +COURAGE 4 0 4 4 +COUPLETS 1 0 1 1 +COUPLE 2 0 2 2 +COUNTY 9 0 9 9 +COUNTESS 1 0 1 1 +COUNTER 1 0 1 1 +COUNT'S 2 0 2 2 +COUNSELLED 1 0 1 1 +COUGHING 2 0 2 2 +COTTONY 1 0 1 1 +COTTON 3 0 3 3 +COSTUME 1 0 1 1 +COSETTE 2 0 2 2 +CORSICAN 1 0 1 1 +CORRIDOR 2 0 2 2 +CORRESPONDENCE 1 0 1 1 +CORRECT 1 0 1 1 +CORPSES 1 0 1 1 +CORPSE 3 0 3 3 +CORPORATIONS 1 0 1 1 +CORNERS 1 0 1 1 +CORDIAL 1 0 1 1 +COPY 1 0 1 1 +COPPER 2 0 2 2 +COOLNESS 2 0 2 2 +COOKING 1 0 1 1 +COOK 4 0 4 4 +CONVINCING 1 0 1 1 +CONVINCED 1 0 1 1 +CONVICTION 3 0 3 3 +CONVEYANCE 1 0 1 1 +CONVERTS 1 0 1 1 +CONVERSATION 10 0 10 10 +CONVENTIONS 1 0 1 1 +CONVENTION 1 0 1 1 +CONVENT 4 0 4 4 +CONVENIENCES 1 0 1 1 +CONTRIVE 1 0 1 1 +CONTRARY 5 0 5 5 +CONTRADICTION 1 0 1 1 +CONTRACTED 1 0 1 1 +CONTRACT 3 0 3 3 +CONTINUED 11 0 11 11 +CONTINUE 3 0 3 3 +CONTINUATION 1 0 1 1 +CONTINUANCE 1 0 1 1 +CONTINUALLY 1 0 1 1 +CONTENTS 1 0 1 1 +CONTENTED 1 0 1 1 +CONTENT 1 0 1 1 +CONTEMPORARY 2 0 2 2 +CONTAINS 1 0 1 1 +CONTAINING 2 0 2 2 +CONTAINED 1 0 1 1 +CONTAIN 1 0 1 1 +CONTAGIOUS 2 0 2 2 +CONTACT 3 0 3 3 +CONSUMED 2 0 2 2 +CONSULTED 3 0 3 3 +CONSULTATIONS 1 0 1 1 +CONSTRUCT 1 0 1 1 +CONSTRAINED 1 0 1 1 +CONSTANTLY 3 0 3 3 +CONSTANTIUS 1 0 1 1 +CONSPIRATORS 2 0 2 2 +CONSPIRACY 1 0 1 1 +CONSORTED 1 0 1 1 +CONSOLES 1 0 1 1 +CONSISTS 2 0 2 2 +CONSISTENCY 1 0 1 1 +CONSISTED 1 0 1 1 +CONSIDERING 2 0 2 2 +CONSIDERED 3 0 3 3 +CONSIDERATION 2 0 2 2 +CONSIDERABLE 6 0 6 6 +CONSIDER 1 0 1 1 +CONSERVATIVE 2 0 2 2 +CONSEQUENCES 1 0 1 1 +CONSEQUENCE 1 0 1 1 +CONSENTED 1 0 1 1 +CONSENT 2 0 2 2 +CONSCIOUSNESS 2 0 2 2 +CONSCIOUSLY 1 0 1 1 +CONSCIENTIOUS 1 0 1 1 +CONSCIENCES 1 0 1 1 +CONSCIENCE 3 0 3 3 +CONQUEST 3 0 3 3 +CONQUEROR 1 0 1 1 +CONQUERING 1 0 1 1 +CONQUERED 2 0 2 2 +CONQUER 1 0 1 1 +CONNOISSEUR 1 0 1 1 +CONNECTIONS 1 0 1 1 +CONNECTION 4 0 4 4 +CONNECTED 1 0 1 1 +CONNECT 2 0 2 2 +CONJECTURES 1 0 1 1 +CONGRESSES 1 0 1 1 +CONGRESS 3 0 3 3 +CONGEALETH 1 0 1 1 +CONFUSION 4 0 4 4 +CONFOUND 1 0 1 1 +CONFLICT 2 0 2 2 +CONFISCATION 1 0 1 1 +CONFIRMS 1 0 1 1 +CONFIRMED 2 0 2 2 +CONFINEMENT 1 0 1 1 +CONFIDENTIAL 1 0 1 1 +CONFIDENCE 3 0 3 3 +CONFESSION 4 0 4 4 +CONFESSED 2 0 2 2 +CONFESS 9 0 9 9 +CONFERRING 1 0 1 1 +CONFERENCE 1 0 1 1 +CONFECTIONS 1 0 1 1 +CONFECTIONER 1 0 1 1 +CONDUCTED 2 0 2 2 +CONDUCT 4 0 4 4 +CONDITIONS 4 0 4 4 +CONDITION 4 0 4 4 +CONDESCEND 1 0 1 1 +CONDEMNED 2 0 2 2 +CONCLUSION 2 0 2 2 +CONCILIATE 1 0 1 1 +CONCIERGE'S 1 0 1 1 +CONCERNS 1 0 1 1 +CONCERNING 1 0 1 1 +CONCERN 2 0 2 2 +CONCEPTION 5 0 5 5 +CONCENTRATED 2 0 2 2 +CONCENTRATE 1 0 1 1 +CONCEIVE 1 0 1 1 +CONCEITED 1 0 1 1 +CONCEAL 3 0 3 3 +COMTE 1 0 1 1 +COMRADE 3 0 3 3 +COMPULSORY 1 0 1 1 +COMPREHENDED 1 0 1 1 +COMPOUND 1 0 1 1 +COMPOSURE 1 0 1 1 +COMPOSITION 1 0 1 1 +COMPOSED 1 0 1 1 +COMPLY 1 0 1 1 +COMPLIMENT 1 0 1 1 +COMPLICITY 1 0 1 1 +COMPLETELY 6 0 6 6 +COMPLETED 1 0 1 1 +COMPLETE 1 0 1 1 +COMPLAIN 1 0 1 1 +COMPETITION 1 0 1 1 +COMPELLING 1 0 1 1 +COMPELLED 1 0 1 1 +COMPATRIOT 1 0 1 1 +COMPASS 1 0 1 1 +COMPARATIVELY 1 0 1 1 +COMPANY 13 0 13 13 +COMPANIONS 3 0 3 3 +COMPANION'S 1 0 1 1 +COMPANION 4 0 4 4 +COMMUNICATION 2 0 2 2 +COMMUNICATES 2 0 2 2 +COMMUNICATED 1 0 1 1 +COMMUNICANTS 1 0 1 1 +COMMONS 3 0 3 3 +COMMONLY 1 0 1 1 +COMMONERS 1 0 1 1 +COMMON 3 0 3 3 +COMMITTED 4 0 4 4 +COMMISSIONED 1 0 1 1 +COMMISSION 1 0 1 1 +COMMENDING 1 0 1 1 +COMMENDED 1 0 1 1 +COMMANDING 2 0 2 2 +COMMANDED 2 0 2 2 +COMMAND 2 0 2 2 +COMFORTABLE 2 0 2 2 +COMFORT 2 0 2 2 +COMBATIVE 1 0 1 1 +COMBAT 2 0 2 2 +COMB 1 0 1 1 +COLOURED 1 0 1 1 +COLOUR 2 0 2 2 +COLOSSAL 1 0 1 1 +COLOR 2 0 2 2 +COLONELS 1 0 1 1 +COLLECTOR'S 1 0 1 1 +COLLECTOR 1 0 1 1 +COLLECTION 1 0 1 1 +COLLECTING 2 0 2 2 +COLLAR 2 0 2 2 +COLIC 1 0 1 1 +COINCIDENCES 1 0 1 1 +COIL 1 0 1 1 +COFFIN 20 0 20 20 +COFFEE 1 0 1 1 +COCKING 1 0 1 1 +COBBER 1 0 1 1 +COAST 2 0 2 2 +COACH 3 0 3 3 +CLUTCHING 1 0 1 1 +CLUTCH 1 0 1 1 +CLUNG 1 0 1 1 +CLUBBED 1 0 1 1 +CLUB 3 0 3 3 +CLOVER 1 0 1 1 +CLOUDS 1 0 1 1 +CLOTHES 8 0 8 8 +CLOTHE 1 0 1 1 +CLOSING 2 0 2 2 +CLOSES 1 0 1 1 +CLOSER 1 0 1 1 +CLOSELY 5 0 5 5 +CLOSED 4 0 4 4 +CLOSE 14 0 14 14 +CLOISTER 3 0 3 3 +CLOGGED 2 0 2 2 +CLERK 2 0 2 2 +CLERICAL 2 0 2 2 +CLENCHING 1 0 1 1 +CLEMENT 1 0 1 1 +CLEMENCY 1 0 1 1 +CLEARLY 1 0 1 1 +CLEARER 1 0 1 1 +CLEAR 7 0 7 7 +CLEANED 2 0 2 2 +CLEAN 4 0 4 4 +CLASPED 1 0 1 1 +CLASP 1 0 1 1 +CLASHING 1 0 1 1 +CLAPPED 1 0 1 1 +CLANKING 1 0 1 1 +CLAIR 3 0 3 3 +CLAIMS 1 0 1 1 +CLAIMED 1 0 1 1 +CIVILIZED 1 0 1 1 +CIVILITY 1 0 1 1 +CIVILITIES 1 0 1 1 +CITY 16 0 16 16 +CITIZENS 6 0 6 6 +CIRCUMSTANTIAL 1 0 1 1 +CIRCUMSTANCES 6 0 6 6 +CIRCULAR 1 0 1 1 +CIRCUIT 1 0 1 1 +CIRCLES 1 0 1 1 +CIRCLE 2 0 2 2 +CILLEY 1 0 1 1 +CIDER 1 0 1 1 +CHURCHYARDS 1 0 1 1 +CHUCKLED 3 0 3 3 +CHRYSIPPUS 2 0 2 2 +CHRISTMAS 1 0 1 1 +CHRISTI 1 0 1 1 +CHRIS'S 1 0 1 1 +CHOSEN 3 0 3 3 +CHOSE 2 0 2 2 +CHOP 1 0 1 1 +CHOKE 1 0 1 1 +CHOIR 2 0 2 2 +CHOICE 1 0 1 1 +CHIRP 1 0 1 1 +CHINESE 1 0 1 1 +CHIN 1 0 1 1 +CHIMNEY 7 0 7 7 +CHIMES 1 0 1 1 +CHILDREN 13 0 13 13 +CHILDLESS 1 0 1 1 +CHILDHOOD 1 0 1 1 +CHILD'S 1 0 1 1 +CHILD 6 0 6 6 +CHEWERS 2 0 2 2 +CHERISHED 1 0 1 1 +CHEFS 1 0 1 1 +CHEESE 1 0 1 1 +CHEERFULNESS 1 0 1 1 +CHEERFULLY 1 0 1 1 +CHEERFUL 4 0 4 4 +CHEEKED 1 0 1 1 +CHECKING 1 0 1 1 +CHECKED 1 0 1 1 +CHEATING 1 0 1 1 +CHEAPLY 1 0 1 1 +CHEAP 1 0 1 1 +CHATTING 1 0 1 1 +CHASSEUR 1 0 1 1 +CHASM 1 0 1 1 +CHASED 1 0 1 1 +CHARMS 1 0 1 1 +CHARMING 3 0 3 3 +CHARM 3 0 3 3 +CHARLIE 1 0 1 1 +CHARLES 2 0 2 2 +CHARITY 1 0 1 1 +CHARIOT 1 0 1 1 +CHARGES 2 0 2 2 +CHARGER 1 0 1 1 +CHARCOAL 1 0 1 1 +CHARACTER 6 0 6 6 +CHAPTERS 1 0 1 1 +CHAPS 1 0 1 1 +CHAPLET 1 0 1 1 +CHAPEL 6 0 6 6 +CHAP 1 0 1 1 +CHANTED 1 0 1 1 +CHANNEL 3 0 3 3 +CHANGING 2 0 2 2 +CHANGED 4 0 4 4 +CHANCELLOR'S 1 0 1 1 +CHAMPIONS 1 0 1 1 +CHAMPAGNE 1 0 1 1 +CHAMBERLAIN 6 0 6 6 +CHAMBER 5 0 5 5 +CHAIR 5 0 5 5 +CETERA 2 0 2 2 +CESSATION 1 0 1 1 +CERTIFIED 1 0 1 1 +CERTAINLY 13 0 13 13 +CEREMONY 1 0 1 1 +CENTURY 3 0 3 3 +CENTURIES 5 0 5 5 +CENTRES 1 0 1 1 +CENTRAL 6 0 6 6 +CENT 1 0 1 1 +CEMETERY 1 0 1 1 +CELLARS 1 0 1 1 +CELIA 1 0 1 1 +CELERY 1 0 1 1 +CEASED 7 0 7 7 +CAVALRY 1 0 1 1 +CAUTIOUSLY 1 0 1 1 +CAUTION 1 0 1 1 +CAUSED 1 0 1 1 +CATCHING 1 0 1 1 +CATCH 6 0 6 6 +CAT 3 0 3 3 +CASTRATO 2 0 2 2 +CASTING 2 0 2 2 +CASKET 1 0 1 1 +CASHIER 1 0 1 1 +CASES 2 0 2 2 +CARVED 3 0 3 3 +CARTHUSIANS 1 0 1 1 +CARS 1 0 1 1 +CARRY 7 0 7 7 +CARROT 1 0 1 1 +CARPET 1 0 1 1 +CARPENTER 1 0 1 1 +CAROLINA 1 0 1 1 +CARGO 1 0 1 1 +CAREWORN 2 0 2 2 +CARESSES 1 0 1 1 +CAREFULLY 3 0 3 3 +CAREFUL 4 0 4 4 +CARDS 1 0 1 1 +CARDINALS 1 0 1 1 +CARBONATE 1 0 1 1 +CARAVAN 1 0 1 1 +CAPTURE 3 0 3 3 +CAPTOR 1 0 1 1 +CAPTIVE 3 0 3 3 +CAPTAIN'S 1 0 1 1 +CAPERING 1 0 1 1 +CAPERED 1 0 1 1 +CAPABLE 2 0 2 2 +CAPABILITIES 1 0 1 1 +CAP'S 1 0 1 1 +CANVAS 1 0 1 1 +CANST 1 0 1 1 +CANONIZED 1 0 1 1 +CANOE 1 0 1 1 +CANE 1 0 1 1 +CANDLESTICKS 1 0 1 1 +CANDLESTICK 2 0 2 2 +CANDLES 1 0 1 1 +CANDLE 3 0 3 3 +CANAL 1 0 1 1 +CAMPED 1 0 1 1 +CAMPAIGNS 2 0 2 2 +CAMOUFLAGE 1 0 1 1 +CAMEL 2 0 2 2 +CALMLY 2 0 2 2 +CALLS 1 0 1 1 +CALLING 2 0 2 2 +CALIPH 1 0 1 1 +CALENDAR 1 0 1 1 +CALCULATE 1 0 1 1 +CAFE 1 0 1 1 +CAESARS 1 0 1 1 +CADET 1 0 1 1 +CABLE'S 1 0 1 1 +CABIN 4 0 4 4 +CABARET 1 0 1 1 +BUYING 2 0 2 2 +BUTTERFLIES 1 0 1 1 +BUTTER 5 0 5 5 +BUSTED 2 0 2 2 +BUSINESSES 1 0 1 1 +BUSHY 2 0 2 2 +BUSHES 1 0 1 1 +BURSTING 1 0 1 1 +BURNING 1 0 1 1 +BURN 1 0 1 1 +BURIED 7 0 7 7 +BURIAL 1 0 1 1 +BURGUNDY 1 0 1 1 +BUNKER 1 0 1 1 +BUNDLES 2 0 2 2 +BUNDLED 1 0 1 1 +BUMS 1 0 1 1 +BULLOCK 1 0 1 1 +BULLET 2 0 2 2 +BULB 1 0 1 1 +BUILT 1 0 1 1 +BUILDINGS 1 0 1 1 +BUILDING 3 0 3 3 +BUGLE 1 0 1 1 +BUGGY 2 0 2 2 +BUFF 1 0 1 1 +BUD 1 0 1 1 +BUCKLEY 1 0 1 1 +BUBBLES 1 0 1 1 +BRUTE 2 0 2 2 +BRUTALLY 1 0 1 1 +BRUTAL 1 0 1 1 +BRUSHED 2 0 2 2 +BRUISING 1 0 1 1 +BROTHERLY 1 0 1 1 +BROTH 1 0 1 1 +BRONZE 1 0 1 1 +BROKER'S 1 0 1 1 +BROKEN 10 0 10 10 +BROKE 7 0 7 7 +BROAD 3 0 3 3 +BRITISH 1 0 1 1 +BRINGING 3 0 3 3 +BRINGETH 2 0 2 2 +BRIM 1 0 1 1 +BRIGHT 5 0 5 5 +BRIGANDS 1 0 1 1 +BRIDGE 4 0 4 4 +BRIDE 3 0 3 3 +BRICKS 1 0 1 1 +BRICK 1 0 1 1 +BREWING 1 0 1 1 +BRETHREN 3 0 3 3 +BREED 1 0 1 1 +BRED 1 0 1 1 +BREATHING 1 0 1 1 +BREASTS 1 0 1 1 +BREAKS 3 0 3 3 +BREAKING 3 0 3 3 +BREAKFAST 4 0 4 4 +BREAK 6 0 6 6 +BREAD 3 0 3 3 +BREACH 1 0 1 1 +BRAVELY 2 0 2 2 +BRANDON 1 0 1 1 +BRANCHES 2 0 2 2 +BRANCH 3 0 3 3 +BRAG 1 0 1 1 +BRADFORD 1 0 1 1 +BRACKETS 1 0 1 1 +BOYS 8 0 8 8 +BOXES 2 0 2 2 +BOWL 1 0 1 1 +BOWED 2 0 2 2 +BOW 3 0 3 3 +BOURGEOIS 1 0 1 1 +BOUQUET 2 0 2 2 +BOUNTY 1 0 1 1 +BOUND 5 0 5 5 +BOULEVARD 1 0 1 1 +BOUGHT 4 0 4 4 +BOTTLES 1 0 1 1 +BOTTLE 4 0 4 4 +BOSOM 3 0 3 3 +BORROWED 1 0 1 1 +BORED 1 0 1 1 +BORE 2 0 2 2 +BORDERS 1 0 1 1 +BORDER 1 0 1 1 +BOOTY 1 0 1 1 +BOOTS 2 0 2 2 +BOOT 1 0 1 1 +BOOMED 2 0 2 2 +BOOKS 1 0 1 1 +BOOKLET 1 0 1 1 +BOOK 8 0 8 8 +BONNETS 2 0 2 2 +BONNET 1 0 1 1 +BONDAGE 2 0 2 2 +BOMB 1 0 1 1 +BOLTS 1 0 1 1 +BOLDER 1 0 1 1 +BOILING 3 0 3 3 +BOILER 1 0 1 1 +BOEUF 2 0 2 2 +BODY 13 0 13 13 +BODILY 3 0 3 3 +BODIES 2 0 2 2 +BOAT'S 1 0 1 1 +BOAST 1 0 1 1 +BOARDS 1 0 1 1 +BOARDING 1 0 1 1 +BOARD 5 0 5 5 +BLURTED 1 0 1 1 +BLUNTLY 1 0 1 1 +BLUNTED 1 0 1 1 +BLUBBERING 1 0 1 1 +BLOWS 1 0 1 1 +BLOWN 2 0 2 2 +BLOWING 2 0 2 2 +BLOSSOM 1 0 1 1 +BLOOM 1 0 1 1 +BLIZZARD'S 1 0 1 1 +BLIZZARD 2 0 2 2 +BLINKED 2 0 2 2 +BLINDNESS 1 0 1 1 +BLINDING 1 0 1 1 +BLINDED 1 0 1 1 +BLIND 5 0 5 5 +BLEW 1 0 1 1 +BLESSINGS 1 0 1 1 +BLESSING 2 0 2 2 +BLESS 3 0 3 3 +BLEND 1 0 1 1 +BLEAK 1 0 1 1 +BLAZING 1 0 1 1 +BLANKLY 1 0 1 1 +BLANK 1 0 1 1 +BLAMING 1 0 1 1 +BLAMED 1 0 1 1 +BLAME 2 0 2 2 +BLADE 1 0 1 1 +BLACKSTONE 1 0 1 1 +BLACKGUARD 1 0 1 1 +BITE 1 0 1 1 +BISHOPS 1 0 1 1 +BISHOP 4 0 4 4 +BISCUIT 1 0 1 1 +BIRTHPLACE 1 0 1 1 +BIRTHDAY 1 0 1 1 +BIRTH 1 0 1 1 +BIRD 1 0 1 1 +BIRCH 1 0 1 1 +BIND 1 0 1 1 +BILLS 2 0 2 2 +BILIOUS 1 0 1 1 +BIGGER 1 0 1 1 +BEYOND 7 0 7 7 +BEWILDERMENT 1 0 1 1 +BETWIXT 1 0 1 1 +BETRAY 1 0 1 1 +BETOOK 1 0 1 1 +BETIDETH 1 0 1 1 +BESTOW 3 0 3 3 +BESS 1 0 1 1 +BESPAKE 1 0 1 1 +BESOUGHT 1 0 1 1 +BESIEGERS 2 0 2 2 +BESEECH 2 0 2 2 +BERNARDONE 1 0 1 1 +BERNARD 4 0 4 4 +BEQUEATH 2 0 2 2 +BENT 2 0 2 2 +BENJAMIN 1 0 1 1 +BENEATH 3 0 3 3 +BENCH 3 0 3 3 +BELOW 2 0 2 2 +BELONG 1 0 1 1 +BELLS 4 0 4 4 +BELLIES 1 0 1 1 +BELIEVING 1 0 1 1 +BELIEVES 1 0 1 1 +BELIEVED 6 0 6 6 +BELIEF 5 0 5 5 +BEINGS 3 0 3 3 +BEHOLDING 1 0 1 1 +BEHOLD 5 0 5 5 +BEHIND 16 0 16 16 +BEHAVED 3 0 3 3 +BEHALF 1 0 1 1 +BEGUILED 1 0 1 1 +BEGINNING 6 0 6 6 +BEGGAR 1 0 1 1 +BEFITTING 1 0 1 1 +BEFALLEN 1 0 1 1 +BEDOUIN 1 0 1 1 +BED 14 0 14 14 +BECOMES 6 0 6 6 +BECOME 15 0 15 15 +BECKY 1 0 1 1 +BECAUSE 34 0 34 34 +BEAVER 1 0 1 1 +BEAUTY 4 0 4 4 +BEAUTIFULLY 1 0 1 1 +BEAUTIFUL 8 0 8 8 +BEATEN 2 0 2 2 +BEAT 6 0 6 6 +BEASTS 4 0 4 4 +BEASTLY 1 0 1 1 +BEAST 2 0 2 2 +BEAR 8 0 8 8 +BEAMS 2 0 2 2 +BEAD 1 0 1 1 +BEACON 2 0 2 2 +BEACH 2 0 2 2 +BATTLE 2 0 2 2 +BATTERY 1 0 1 1 +BATON 1 0 1 1 +BATHING 1 0 1 1 +BATHED 1 0 1 1 +BASKING 1 0 1 1 +BASKETS 1 0 1 1 +BASIS 2 0 2 2 +BASIN 1 0 1 1 +BASER 1 0 1 1 +BASED 2 0 2 2 +BARS 5 0 5 5 +BARRIER 1 0 1 1 +BARRICADES 1 0 1 1 +BARRED 1 0 1 1 +BARRACK 2 0 2 2 +BARONET 1 0 1 1 +BARKING 1 0 1 1 +BARE 1 0 1 1 +BARBAROUS 1 0 1 1 +BARBARITY 1 0 1 1 +BAR 7 0 7 7 +BAPTIST 1 0 1 1 +BANQUET 3 0 3 3 +BANKER 1 0 1 1 +BANK 9 0 9 9 +BANDS 1 0 1 1 +BANDITS 1 0 1 1 +BANDIT 1 0 1 1 +BALSAM 1 0 1 1 +BALLOT 4 0 4 4 +BALE 1 0 1 1 +BAKING 1 0 1 1 +BAIL 1 0 1 1 +BAH 1 0 1 1 +BAGGY 1 0 1 1 +BAGGAGE 1 0 1 1 +BADLY 3 0 3 3 +BADGE 1 0 1 1 +BACON 1 0 1 1 +BACKGROUND 3 0 3 3 +BACHELOR 1 0 1 1 +BABYLONIA 1 0 1 1 +AZURE 1 0 1 1 +AWOKE 1 0 1 1 +AWKWARDNESS 1 0 1 1 +AWKWARDLY 1 0 1 1 +AWFUL 4 0 4 4 +AWE 1 0 1 1 +AWARE 2 0 2 2 +AWAKENING 2 0 2 2 +AWAKENED 1 0 1 1 +AWAITS 1 0 1 1 +AWAITED 1 0 1 1 +AWAIT 1 0 1 1 +AVOIDED 1 0 1 1 +AVOID 4 0 4 4 +AVERAGE 2 0 2 2 +AUTOMATICALLY 1 0 1 1 +AUTOCRACY 1 0 1 1 +AUTHORITY 10 0 10 10 +AUTHORITIES 1 0 1 1 +AUTHOR 1 0 1 1 +AUTHENTIC 1 0 1 1 +AUSTRIA 1 0 1 1 +AUSPICIOUS 3 0 3 3 +AUGMENTED 1 0 1 1 +AUGHT 2 0 2 2 +AUDACIOUS 1 0 1 1 +ATTRACTIVE 3 0 3 3 +ATTRACTED 2 0 2 2 +ATTORNEY 1 0 1 1 +ATTENDING 1 0 1 1 +ATTEMPTING 1 0 1 1 +ATTEMPT 2 0 2 2 +ATTAINED 1 0 1 1 +ATTACKS 3 0 3 3 +ATTACKED 1 0 1 1 +ATTACK 2 0 2 2 +ATTACHMENT 1 0 1 1 +ATTACHED 1 0 1 1 +ATMOSPHERE 1 0 1 1 +ATE 1 0 1 1 +ASUNDER 1 0 1 1 +ASTONISHMENT 2 0 2 2 +ASTONISHED 1 0 1 1 +ASSYRIAN 2 0 2 2 +ASSUREDLY 1 0 1 1 +ASSURE 8 0 8 8 +ASSURANCE 2 0 2 2 +ASSOCIATIONS 1 0 1 1 +ASSOCIATES 1 0 1 1 +ASSISTED 1 0 1 1 +ASSISTANT 1 0 1 1 +ASSISTANCE 3 0 3 3 +ASSIST 3 0 3 3 +ASSERT 2 0 2 2 +ASSEMBLY 3 0 3 3 +ASSEMBLED 2 0 2 2 +ASSASSINATED 1 0 1 1 +ASSAILED 1 0 1 1 +ASS 3 0 3 3 +ASPECT 1 0 1 1 +ASKING 5 0 5 5 +ASIDE 5 0 5 5 +ASHLEY 5 0 5 5 +ASCERTAINING 1 0 1 1 +ASCERTAIN 1 0 1 1 +ARTS 1 0 1 1 +ARTISTS 4 0 4 4 +ARTICLES 1 0 1 1 +ARTFUL 1 0 1 1 +ARRIVED 4 0 4 4 +ARRIVAL 1 0 1 1 +ARRESTED 1 0 1 1 +ARRANGING 1 0 1 1 +ARRANGEMENTS 1 0 1 1 +ARMS 9 0 9 9 +ARMIES 2 0 2 2 +ARKADYEVITCH 1 0 1 1 +ARK 1 0 1 1 +ARISTOCRACY 1 0 1 1 +ARISE 1 0 1 1 +ARGUMENTS 3 0 3 3 +ARGUMENT 1 0 1 1 +ARGUED 1 0 1 1 +ARENA 1 0 1 1 +ARDENT 3 0 3 3 +ARCHITECTURE 1 0 1 1 +ARCHBISHOPS 1 0 1 1 +ARABIC 1 0 1 1 +ARABIANS 1 0 1 1 +AQUA 1 0 1 1 +APTITUDE 1 0 1 1 +APRIL 1 0 1 1 +APPROVAL 1 0 1 1 +APPROACHING 1 0 1 1 +APPROACHED 3 0 3 3 +APPREHENSIONS 1 0 1 1 +APPOINTMENT 2 0 2 2 +APPLYING 1 0 1 1 +APPLY 3 0 3 3 +APPLAUSE 1 0 1 1 +APPETITE 2 0 2 2 +APPEARING 1 0 1 1 +APPEALS 1 0 1 1 +APPEALING 1 0 1 1 +APPEAL 1 0 1 1 +APPARITION 1 0 1 1 +APPARENT 1 0 1 1 +APOLOGY 1 0 1 1 +APERTURE 1 0 1 1 +APARTMENTS 3 0 3 3 +APARTMENT 1 0 1 1 +ANYWHERE 1 0 1 1 +ANYHOW 2 0 2 2 +ANYBODY 2 0 2 2 +ANXIOUS 3 0 3 3 +ANXIETY 5 0 5 5 +ANTONIO 1 0 1 1 +ANTIQUARIAN'S 1 0 1 1 +ANTICIPATION 1 0 1 1 +ANTICIPATE 1 0 1 1 +ANSWERING 2 0 2 2 +ANSWER 15 0 15 15 +ANON 1 0 1 1 +ANNOYANCES 1 0 1 1 +ANNOYANCE 1 0 1 1 +ANNOUNCING 1 0 1 1 +ANNOUNCED 3 0 3 3 +ANNIHILATION 2 0 2 2 +ANNIHILATED 1 0 1 1 +ANIMATED 2 0 2 2 +ANIMATE 1 0 1 1 +ANIMALS 4 0 4 4 +ANGER 2 0 2 2 +ANEW 1 0 1 1 +ANDY 1 0 1 1 +ANDREW 2 0 2 2 +ANCIENTS 1 0 1 1 +ANCHOR 1 0 1 1 +ANALYSIS 1 0 1 1 +AMUSING 1 0 1 1 +AMPLY 1 0 1 1 +AMPLE 1 0 1 1 +AMOUNT 1 0 1 1 +AMONGST 5 0 5 5 +AMONG 18 0 18 18 +AMMUNITION 1 0 1 1 +AMISS 1 0 1 1 +AMIABLE 1 0 1 1 +AMERICAN 1 0 1 1 +AMERICA 1 0 1 1 +AMENDS 1 0 1 1 +AMENDMENT 1 0 1 1 +AMENDED 1 0 1 1 +AMBITIOUS 1 0 1 1 +AMBITIONS 1 0 1 1 +AMBASSADOR 1 0 1 1 +ALTOGETHER 2 0 2 2 +ALTHOUGH 10 0 10 10 +ALTERED 1 0 1 1 +ALTER 1 0 1 1 +ALTAR 10 0 10 10 +ALONGSIDE 1 0 1 1 +ALMS 2 0 2 2 +ALMOST 11 0 11 11 +ALMIGHTY 1 0 1 1 +ALLOWING 1 0 1 1 +ALLOWANCES 1 0 1 1 +ALLIANCE 1 0 1 1 +ALLEY 1 0 1 1 +ALLAH'S 1 0 1 1 +ALLAH 9 0 9 9 +ALIVE 2 0 2 2 +ALIMONY 2 0 2 2 +ALIMENTARY 1 0 1 1 +ALIKE 2 0 2 2 +ALEX 1 0 1 1 +ALBERT'S 3 0 3 3 +ALAS 1 0 1 1 +ALARMS 1 0 1 1 +ALARM 3 0 3 3 +ALABAMA 1 0 1 1 +AIM 4 0 4 4 +AILS 1 0 1 1 +AILMENTS 2 0 2 2 +AHEAD 2 0 2 2 +AGREES 1 0 1 1 +AGREEMENT 2 0 2 2 +AGREED 6 0 6 6 +AGREEABLE 2 0 2 2 +AGREE 1 0 1 1 +AGONY 1 0 1 1 +AGO 10 0 10 10 +AGITATOR 1 0 1 1 +AGITATION 1 0 1 1 +AGITATING 1 0 1 1 +AGILITY 1 0 1 1 +AGHAST 1 0 1 1 +AGGRESSIVENESS 1 0 1 1 +AGGRAVATIONS 1 0 1 1 +AGGRAVATED 1 0 1 1 +AGES 1 0 1 1 +AGENT 3 0 3 3 +AGED 2 0 2 2 +AGE 4 0 4 4 +AGAIN 56 0 56 56 +AFTERNOON 6 0 6 6 +AFRICAN 1 0 1 1 +AFRAID 9 0 9 9 +AFORESAID 1 0 1 1 +AFFORD 2 0 2 2 +AFFLICTION 1 0 1 1 +AFFIRMED 1 0 1 1 +AFFECTIONS 1 0 1 1 +AFFECTIONATELY 1 0 1 1 +AFFECTED 2 0 2 2 +AFFAIRS 2 0 2 2 +AFFAIR 2 0 2 2 +ADVISEDLY 1 0 1 1 +ADVISED 1 0 1 1 +ADVISE 1 0 1 1 +ADVICE 2 0 2 2 +ADVENTURE 3 0 3 3 +ADVENT 1 0 1 1 +ADVANTAGES 2 0 2 2 +ADVANTAGE 4 0 4 4 +ADVANCING 1 0 1 1 +ADVANCES 2 0 2 2 +ADVANCED 1 0 1 1 +ADVANCE 5 0 5 5 +ADRIFT 1 0 1 1 +ADORNED 1 0 1 1 +ADORN 1 0 1 1 +ADORED 1 0 1 1 +ADMITTED 4 0 4 4 +ADMIT 1 0 1 1 +ADMISSION 1 0 1 1 +ADMIRED 1 0 1 1 +ADMIRATION 5 0 5 5 +ADMIRABLE 1 0 1 1 +ADMINISTRATION 3 0 3 3 +ADMINISTERED 1 0 1 1 +ADJACENT 1 0 1 1 +ADHERENT 1 0 1 1 +ADDRESSING 1 0 1 1 +ADDRESSED 1 0 1 1 +ADDRESS 3 0 3 3 +ADDITION 1 0 1 1 +ADAGE 1 0 1 1 +ACUTE 2 0 2 2 +ACTS 4 0 4 4 +ACTORS 1 0 1 1 +ACTIVITIES 1 0 1 1 +ACTIONS 2 0 2 2 +ACTION 2 0 2 2 +ACTING 2 0 2 2 +ACTED 1 0 1 1 +ACT 7 0 7 7 +ACROSS 8 0 8 8 +ACQUITTAL 1 0 1 1 +ACQUIT 1 0 1 1 +ACQUISITIVE 1 0 1 1 +ACQUIRED 2 0 2 2 +ACQUAINTANCES 1 0 1 1 +ACQUAINTANCE 2 0 2 2 +ACQUAINT 1 0 1 1 +ACKNOWLEDGMENT 1 0 1 1 +ACIDS 1 0 1 1 +ACID 4 0 4 4 +ACHIEVED 1 0 1 1 +ACHED 1 0 1 1 +ACE 1 0 1 1 +ACCUSTOMED 2 0 2 2 +ACCUSING 3 0 3 3 +ACCUSED 1 0 1 1 +ACCUSATION 4 0 4 4 +ACCURATE 1 0 1 1 +ACCOUNTS 2 0 2 2 +ACCOUNTED 1 0 1 1 +ACCORDINGLY 5 0 5 5 +ACCORDING 7 0 7 7 +ACCORDANCE 1 0 1 1 +ACCORD 1 0 1 1 +ACCOMPLISHMENTS 1 0 1 1 +ACCOMPLISHED 1 0 1 1 +ACCOMPLICE 2 0 2 2 +ACCOMPANY 1 0 1 1 +ACCOMPANIED 3 0 3 3 +ACCOMMODATING 1 0 1 1 +ACCIDENTS 1 0 1 1 +ACCIDENT 1 0 1 1 +ACCESSION 1 0 1 1 +ACCESSIBLE 1 0 1 1 +ACCESS 1 0 1 1 +ACCEPTED 4 0 4 4 +ACCEPTABLE 1 0 1 1 +ABYSSINIANS 2 0 2 2 +ABUSING 1 0 1 1 +ABUSES 2 0 2 2 +ABUSE 1 0 1 1 +ABUNDANTLY 1 0 1 1 +ABUNDANT 1 0 1 1 +ABSORBING 2 0 2 2 +ABSORB 1 0 1 1 +ABSOLUTELY 3 0 3 3 +ABSOLUTE 1 0 1 1 +ABSENTED 1 0 1 1 +ABSENT 1 0 1 1 +ABSENCE 4 0 4 4 +ABRUPTLY 3 0 3 3 +ABOLISH 1 0 1 1 +ABNORMAL 1 0 1 1 +ABLE 11 0 11 11 +ABILITY 1 0 1 1 +ABILITIES 1 0 1 1 +ABIDE 1 0 1 1 +ABBOT 1 0 1 1 +ABACK 1 0 1 1 +AARON 1 0 1 1 diff --git a/log/greedy_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model-2023-04-04-09-35-47 b/log/greedy_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model-2023-04-04-09-35-47 new file mode 100644 index 0000000000000000000000000000000000000000..65412fd3916ca015b195dc48671d3babb29e640b --- /dev/null +++ b/log/greedy_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model-2023-04-04-09-35-47 @@ -0,0 +1,33 @@ +2023-04-04 09:35:47,107 INFO [decode.py:649] Decoding started +2023-04-04 09:35:47,108 INFO [decode.py:655] Device: cuda:0 +2023-04-04 09:35:47,110 INFO [decode.py:665] {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r7n04', 'IP address': '10.1.7.4'}, 'epoch': 30, 'iter': 0, 'avg': 9, 'use_averaged_model': True, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'lang_dir': PosixPath('data/lang_bpe_500'), 'decoding_method': 'greedy_search', 'beam_size': 4, 'beam': 20.0, 'ngram_lm_scale': 0.01, 'max_contexts': 4, 'max_states': 8, 'context_size': 2, 'max_sym_per_frame': 1, 'num_paths': 200, 'nbest_scale': 0.5, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'res_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2/greedy_search'), 'suffix': 'epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model', 'blank_id': 0, 'unk_id': 2, 'vocab_size': 500} +2023-04-04 09:35:47,110 INFO [decode.py:667] About to create model +2023-04-04 09:35:47,453 INFO [zipformer.py:405] At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-04-04 09:35:47,461 INFO [decode.py:738] Calculating the averaged model over epoch range from 21 (excluded) to 30 +2023-04-04 09:35:49,999 INFO [decode.py:772] Number of model parameters: 20697573 +2023-04-04 09:35:49,999 INFO [asr_datamodule.py:454] About to get test-clean cuts +2023-04-04 09:35:50,001 INFO [asr_datamodule.py:461] About to get test-other cuts +2023-04-04 09:35:53,538 INFO [decode.py:560] batch 0/?, cuts processed until now is 36 +2023-04-04 09:36:38,926 INFO [decode.py:560] batch 50/?, cuts processed until now is 2609 +2023-04-04 09:36:40,063 INFO [decode.py:574] The transcripts are stored in pruned_transducer_stateless7_streaming/exp/v2/greedy_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt +2023-04-04 09:36:40,141 INFO [utils.py:560] [test-clean-greedy_search] %WER 3.58% [1881 / 52576, 211 ins, 163 del, 1507 sub ] +2023-04-04 09:36:40,301 INFO [decode.py:585] Wrote detailed error stats to pruned_transducer_stateless7_streaming/exp/v2/greedy_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt +2023-04-04 09:36:40,302 INFO [decode.py:599] +For test-clean, WER of different settings are: +greedy_search 3.58 best for test-clean + +2023-04-04 09:36:41,891 INFO [decode.py:560] batch 0/?, cuts processed until now is 43 +2023-04-04 09:36:45,297 INFO [zipformer.py:2441] attn_weights_entropy = tensor([3.1587, 1.3790, 1.6006, 1.5477, 2.7838, 1.2051, 2.2557, 3.1271], + device='cuda:0'), covar=tensor([0.0570, 0.2876, 0.2844, 0.1718, 0.0661, 0.2407, 0.1209, 0.0258], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0371, 0.0391, 0.0347, 0.0375, 0.0351, 0.0386, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-04 09:37:23,322 INFO [decode.py:560] batch 50/?, cuts processed until now is 2939 +2023-04-04 09:37:23,429 INFO [decode.py:574] The transcripts are stored in pruned_transducer_stateless7_streaming/exp/v2/greedy_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt +2023-04-04 09:37:23,509 INFO [utils.py:560] [test-other-greedy_search] %WER 9.29% [4862 / 52343, 500 ins, 491 del, 3871 sub ] +2023-04-04 09:37:23,678 INFO [decode.py:585] Wrote detailed error stats to pruned_transducer_stateless7_streaming/exp/v2/greedy_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt +2023-04-04 09:37:23,679 INFO [decode.py:599] +For test-other, WER of different settings are: +greedy_search 9.29 best for test-other + +2023-04-04 09:37:23,679 INFO [decode.py:803] Done! diff --git a/log/greedy_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt b/log/greedy_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..b939f655a368bf2c60fec77f4a404a035b19999e --- /dev/null +++ b/log/greedy_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt @@ -0,0 +1,5240 @@ +1089-134686-0000-1733: ref=['HE', 'HOPED', 'THERE', 'WOULD', 'BE', 'STEW', 'FOR', 'DINNER', 'TURNIPS', 'AND', 'CARROTS', 'AND', 'BRUISED', 'POTATOES', 'AND', 'FAT', 'MUTTON', 'PIECES', 'TO', 'BE', 'LADLED', 'OUT', 'IN', 'THICK', 'PEPPERED', 'FLOUR', 'FATTENED', 'SAUCE'] +1089-134686-0000-1733: hyp=['HE', 'HOPED', 'THERE', 'WOULD', 'BE', 'STEW', 'FOR', 'DINNER', 'TURNIPS', 'AND', 'CARROTS', 'AND', 'BRUISED', 'POTATOES', 'AND', 'FAT', 'MUTTON', 'PIECES', 'TO', 'BE', 'LADLED', 'OUT', 'IN', 'THICK', 'PEPPERED', 'FLOWER', 'FATTENED', 'SAUCE'] +1089-134686-0001-1734: ref=['STUFF', 'IT', 'INTO', 'YOU', 'HIS', 'BELLY', 'COUNSELLED', 'HIM'] +1089-134686-0001-1734: hyp=['STUFF', 'IT', 'INTO', 'YOU', 'HIS', 'BELLY', 'COUNSELLED', 'HIM'] +1089-134686-0002-1735: ref=['AFTER', 'EARLY', 'NIGHTFALL', 'THE', 'YELLOW', 'LAMPS', 'WOULD', 'LIGHT', 'UP', 'HERE', 'AND', 'THERE', 'THE', 'SQUALID', 'QUARTER', 'OF', 'THE', 'BROTHELS'] +1089-134686-0002-1735: hyp=['AFTER', 'EARLY', 'NIGHT', 'FALL', 'THE', 'YELLOW', 'LAMPS', 'WOULD', 'LIGHT', 'UP', 'HERE', 'AND', 'THERE', 'THE', 'SQUALID', 'QUARTER', 'OF', 'THE', 'BROTHELS'] +1089-134686-0003-1736: ref=['HELLO', 'BERTIE', 'ANY', 'GOOD', 'IN', 'YOUR', 'MIND'] +1089-134686-0003-1736: hyp=['HELLO', 'BERTIE', 'ANY', 'GOOD', 'IN', 'YOUR', 'MIND'] +1089-134686-0004-1737: ref=['NUMBER', 'TEN', 'FRESH', 'NELLY', 'IS', 'WAITING', 'ON', 'YOU', 'GOOD', 'NIGHT', 'HUSBAND'] +1089-134686-0004-1737: hyp=['NUMBER', 'TEN', 'FRESH', 'NELLIE', 'IS', 'WAITING', 'ON', 'YOU', 'GOOD', 'NIGHT', 'HUSBAND'] +1089-134686-0005-1738: ref=['THE', 'MUSIC', 'CAME', 'NEARER', 'AND', 'HE', 'RECALLED', 'THE', 'WORDS', 'THE', 'WORDS', 'OF', "SHELLEY'S", 'FRAGMENT', 'UPON', 'THE', 'MOON', 'WANDERING', 'COMPANIONLESS', 'PALE', 'FOR', 'WEARINESS'] +1089-134686-0005-1738: hyp=['THE', 'MUSIC', 'CAME', 'NEARER', 'AND', 'HE', 'RECALLED', 'THE', 'WORDS', 'THE', 'WORDS', 'OF', "SHELLEY'S", 'FRAGMENT', 'UPON', 'THE', 'MOON', 'WANDERING', 'COMPANIONLESS', 'PALE', 'FOR', 'WEARINESS'] +1089-134686-0006-1739: ref=['THE', 'DULL', 'LIGHT', 'FELL', 'MORE', 'FAINTLY', 'UPON', 'THE', 'PAGE', 'WHEREON', 'ANOTHER', 'EQUATION', 'BEGAN', 'TO', 'UNFOLD', 'ITSELF', 'SLOWLY', 'AND', 'TO', 'SPREAD', 'ABROAD', 'ITS', 'WIDENING', 'TAIL'] +1089-134686-0006-1739: hyp=['THE', 'DULL', 'LIGHT', 'FELL', 'MORE', 'FAINTLY', 'UPON', 'THE', 'PAGE', 'WHEREON', 'ANOTHER', 'EQUATION', 'BEGAN', 'TO', 'UNFOLD', 'ITSELF', 'SLOWLY', 'AND', 'TO', 'SPREAD', 'ABROAD', 'ITS', 'WIDENING', 'TAIL'] +1089-134686-0007-1740: ref=['A', 'COLD', 'LUCID', 'INDIFFERENCE', 'REIGNED', 'IN', 'HIS', 'SOUL'] +1089-134686-0007-1740: hyp=['A', 'COLD', 'LUCID', 'INDIFFERENCE', 'REIGNED', 'IN', 'HIS', 'SOUL'] +1089-134686-0008-1741: ref=['THE', 'CHAOS', 'IN', 'WHICH', 'HIS', 'ARDOUR', 'EXTINGUISHED', 'ITSELF', 'WAS', 'A', 'COLD', 'INDIFFERENT', 'KNOWLEDGE', 'OF', 'HIMSELF'] +1089-134686-0008-1741: hyp=['THE', 'CHAOS', 'IN', 'WHICH', 'HIS', 'ARDOR', 'EXTINGUISHED', 'ITSELF', 'WAS', 'A', 'COLD', 'INDIFFERENT', 'KNOWLEDGE', 'OF', 'HIMSELF'] +1089-134686-0009-1742: ref=['AT', 'MOST', 'BY', 'AN', 'ALMS', 'GIVEN', 'TO', 'A', 'BEGGAR', 'WHOSE', 'BLESSING', 'HE', 'FLED', 'FROM', 'HE', 'MIGHT', 'HOPE', 'WEARILY', 'TO', 'WIN', 'FOR', 'HIMSELF', 'SOME', 'MEASURE', 'OF', 'ACTUAL', 'GRACE'] +1089-134686-0009-1742: hyp=['AT', 'MOST', 'BY', 'AN', 'ALMS', 'GIVEN', 'TO', 'A', 'BEGGAR', 'WHOSE', 'BLESSING', 'HE', 'FLED', 'FROM', 'HE', 'MIGHT', 'HOPE', 'WEARILY', 'TO', 'WIN', 'FOR', 'HIMSELF', 'SOME', 'MEASURE', 'OF', 'ACTUAL', 'GRACE'] +1089-134686-0010-1743: ref=['WELL', 'NOW', 'ENNIS', 'I', 'DECLARE', 'YOU', 'HAVE', 'A', 'HEAD', 'AND', 'SO', 'HAS', 'MY', 'STICK'] +1089-134686-0010-1743: hyp=['WELL', 'NOW', 'ENNIS', 'I', 'DECLARE', 'YOU', 'HAVE', 'A', 'HEAD', 'AND', 'SO', 'HAS', 'MY', 'STICK'] +1089-134686-0011-1744: ref=['ON', 'SATURDAY', 'MORNINGS', 'WHEN', 'THE', 'SODALITY', 'MET', 'IN', 'THE', 'CHAPEL', 'TO', 'RECITE', 'THE', 'LITTLE', 'OFFICE', 'HIS', 'PLACE', 'WAS', 'A', 'CUSHIONED', 'KNEELING', 'DESK', 'AT', 'THE', 'RIGHT', 'OF', 'THE', 'ALTAR', 'FROM', 'WHICH', 'HE', 'LED', 'HIS', 'WING', 'OF', 'BOYS', 'THROUGH', 'THE', 'RESPONSES'] +1089-134686-0011-1744: hyp=['ON', 'SATURDAY', 'MORNINGS', 'WHEN', 'THE', 'SODELITY', 'MET', 'IN', 'THE', 'CHAPEL', 'TO', 'RECITE', 'THE', 'LITTLE', 'OFFICE', 'HIS', 'PLACE', 'WAS', 'A', 'CUSHIONED', 'KNEELING', 'DESK', 'AT', 'THE', 'RIGHT', 'OF', 'THE', 'ALTAR', 'FROM', 'WHICH', 'HE', 'LED', 'HIS', 'WING', 'OF', 'BOYS', 'THROUGH', 'THE', 'RESPONSES'] +1089-134686-0012-1745: ref=['HER', 'EYES', 'SEEMED', 'TO', 'REGARD', 'HIM', 'WITH', 'MILD', 'PITY', 'HER', 'HOLINESS', 'A', 'STRANGE', 'LIGHT', 'GLOWING', 'FAINTLY', 'UPON', 'HER', 'FRAIL', 'FLESH', 'DID', 'NOT', 'HUMILIATE', 'THE', 'SINNER', 'WHO', 'APPROACHED', 'HER'] +1089-134686-0012-1745: hyp=['HER', 'EYES', 'SEEMED', 'TO', 'REGARD', 'HIM', 'WITH', 'MILD', 'PITY', 'HER', 'HOLINESS', 'A', 'STRANGE', 'LIGHT', 'GLOWING', 'FAINTLY', 'UPON', 'HER', 'FRAIL', 'FLESH', 'DID', 'NOT', 'HUMILIATE', 'THE', 'SINNER', 'WHO', 'APPROACHED', 'HER'] +1089-134686-0013-1746: ref=['IF', 'EVER', 'HE', 'WAS', 'IMPELLED', 'TO', 'CAST', 'SIN', 'FROM', 'HIM', 'AND', 'TO', 'REPENT', 'THE', 'IMPULSE', 'THAT', 'MOVED', 'HIM', 'WAS', 'THE', 'WISH', 'TO', 'BE', 'HER', 'KNIGHT'] +1089-134686-0013-1746: hyp=['IF', 'EVER', 'HE', 'WAS', 'IMPELLED', 'TO', 'CAST', 'SIN', 'FROM', 'HIM', 'AND', 'TO', 'REPENT', 'THE', 'IMPULSE', 'THAT', 'MOVED', 'HIM', 'WAS', 'THE', 'WISH', 'TO', 'BE', 'HER', 'KNIGHT'] +1089-134686-0014-1747: ref=['HE', 'TRIED', 'TO', 'THINK', 'HOW', 'IT', 'COULD', 'BE'] +1089-134686-0014-1747: hyp=['HE', 'TRIED', 'TO', 'THINK', 'HOW', 'IT', 'COULD', 'BE'] +1089-134686-0015-1748: ref=['BUT', 'THE', 'DUSK', 'DEEPENING', 'IN', 'THE', 'SCHOOLROOM', 'COVERED', 'OVER', 'HIS', 'THOUGHTS', 'THE', 'BELL', 'RANG'] +1089-134686-0015-1748: hyp=['BUT', 'THE', 'DUSK', 'DEEPENING', 'IN', 'THE', 'SCHOOLROOM', 'COVERED', 'OVER', 'HIS', 'THOUGHTS', 'THE', 'BELL', 'RANG'] +1089-134686-0016-1749: ref=['THEN', 'YOU', 'CAN', 'ASK', 'HIM', 'QUESTIONS', 'ON', 'THE', 'CATECHISM', 'DEDALUS'] +1089-134686-0016-1749: hyp=['THEN', 'YOU', 'CAN', 'ASK', 'HIM', 'QUESTIONS', 'ON', 'THE', 'CATECHISM', 'DEEDOLUS'] +1089-134686-0017-1750: ref=['STEPHEN', 'LEANING', 'BACK', 'AND', 'DRAWING', 'IDLY', 'ON', 'HIS', 'SCRIBBLER', 'LISTENED', 'TO', 'THE', 'TALK', 'ABOUT', 'HIM', 'WHICH', 'HERON', 'CHECKED', 'FROM', 'TIME', 'TO', 'TIME', 'BY', 'SAYING'] +1089-134686-0017-1750: hyp=['STEPHEN', 'LEANING', 'BACK', 'AND', 'DRAWING', 'IDLY', 'ON', 'HIS', 'SCRIBBLER', 'LISTENED', 'TO', 'THE', 'TALK', 'ABOUT', 'HIM', 'WHICH', 'HERON', 'CHECKED', 'FROM', 'TIME', 'TO', 'TIME', 'BY', 'SAYING'] +1089-134686-0018-1751: ref=['IT', 'WAS', 'STRANGE', 'TOO', 'THAT', 'HE', 'FOUND', 'AN', 'ARID', 'PLEASURE', 'IN', 'FOLLOWING', 'UP', 'TO', 'THE', 'END', 'THE', 'RIGID', 'LINES', 'OF', 'THE', 'DOCTRINES', 'OF', 'THE', 'CHURCH', 'AND', 'PENETRATING', 'INTO', 'OBSCURE', 'SILENCES', 'ONLY', 'TO', 'HEAR', 'AND', 'FEEL', 'THE', 'MORE', 'DEEPLY', 'HIS', 'OWN', 'CONDEMNATION'] +1089-134686-0018-1751: hyp=['IT', 'WAS', 'STRANGE', 'TOO', 'THAT', 'HE', 'FOUND', 'AN', 'ARID', 'PLEASURE', 'IN', 'FOLLOWING', 'UP', 'TO', 'THE', 'END', 'THE', 'RIGID', 'LINES', 'OF', 'THE', 'DOCTRINES', 'OF', 'THE', 'CHURCH', 'AND', 'PENETRATING', 'INTO', 'OBSCURE', 'SILENCES', 'ONLY', 'TO', 'HEAR', 'AND', 'FEEL', 'THE', 'MORE', 'DEEPLY', 'HIS', 'OWN', 'CONDEMNATION'] +1089-134686-0019-1752: ref=['THE', 'SENTENCE', 'OF', 'SAINT', 'JAMES', 'WHICH', 'SAYS', 'THAT', 'HE', 'WHO', 'OFFENDS', 'AGAINST', 'ONE', 'COMMANDMENT', 'BECOMES', 'GUILTY', 'OF', 'ALL', 'HAD', 'SEEMED', 'TO', 'HIM', 'FIRST', 'A', 'SWOLLEN', 'PHRASE', 'UNTIL', 'HE', 'HAD', 'BEGUN', 'TO', 'GROPE', 'IN', 'THE', 'DARKNESS', 'OF', 'HIS', 'OWN', 'STATE'] +1089-134686-0019-1752: hyp=['THE', 'SENTENCE', 'OF', 'SAINT', 'JAMES', 'WHICH', 'SAYS', 'THAT', 'HE', 'WHO', 'OFFENDS', 'AGAINST', 'ONE', 'COMMANDMENT', 'BECOMES', 'GUILTY', 'OF', 'ALL', 'HAD', 'SEEMED', 'TO', 'HIM', 'FIRST', 'A', 'SWOLLEN', 'PHRASE', 'UNTIL', 'HE', 'HAD', 'BEGUN', 'TO', 'GROPE', 'IN', 'THE', 'DARKNESS', 'OF', 'HIS', 'OWN', 'STATE'] +1089-134686-0020-1753: ref=['IF', 'A', 'MAN', 'HAD', 'STOLEN', 'A', 'POUND', 'IN', 'HIS', 'YOUTH', 'AND', 'HAD', 'USED', 'THAT', 'POUND', 'TO', 'AMASS', 'A', 'HUGE', 'FORTUNE', 'HOW', 'MUCH', 'WAS', 'HE', 'OBLIGED', 'TO', 'GIVE', 'BACK', 'THE', 'POUND', 'HE', 'HAD', 'STOLEN', 'ONLY', 'OR', 'THE', 'POUND', 'TOGETHER', 'WITH', 'THE', 'COMPOUND', 'INTEREST', 'ACCRUING', 'UPON', 'IT', 'OR', 'ALL', 'HIS', 'HUGE', 'FORTUNE'] +1089-134686-0020-1753: hyp=['IF', 'A', 'MAN', 'HAD', 'STOLEN', 'A', 'POUND', 'IN', 'HIS', 'YOUTH', 'AND', 'HAD', 'USED', 'THAT', 'POUND', 'TO', 'A', 'MASS', 'A', 'HUGE', 'FORTUNE', 'HOW', 'MUCH', 'WAS', 'HE', 'OBLIGED', 'TO', 'GIVE', 'BACK', 'THE', 'POUND', 'HE', 'HAD', 'STOLEN', 'ONLY', 'OR', 'THE', 'POUND', 'TOGETHER', 'WITH', 'THE', 'COMPOUND', 'INTEREST', 'ACCRUING', 'UPON', 'IT', 'OR', 'ALL', 'HIS', 'HUGE', 'FORTUNE'] +1089-134686-0021-1754: ref=['IF', 'A', 'LAYMAN', 'IN', 'GIVING', 'BAPTISM', 'POUR', 'THE', 'WATER', 'BEFORE', 'SAYING', 'THE', 'WORDS', 'IS', 'THE', 'CHILD', 'BAPTIZED'] +1089-134686-0021-1754: hyp=['IF', 'A', 'LAYMAN', 'IN', 'GIVING', 'BAPTISM', 'POUR', 'THE', 'WATER', 'BEFORE', 'SAYING', 'THE', 'WORDS', 'IS', 'THE', 'CHILD', 'BAPTIZED'] +1089-134686-0022-1755: ref=['HOW', 'COMES', 'IT', 'THAT', 'WHILE', 'THE', 'FIRST', 'BEATITUDE', 'PROMISES', 'THE', 'KINGDOM', 'OF', 'HEAVEN', 'TO', 'THE', 'POOR', 'OF', 'HEART', 'THE', 'SECOND', 'BEATITUDE', 'PROMISES', 'ALSO', 'TO', 'THE', 'MEEK', 'THAT', 'THEY', 'SHALL', 'POSSESS', 'THE', 'LAND'] +1089-134686-0022-1755: hyp=['HOW', 'COMES', 'IT', 'THAT', 'WHILE', 'THE', 'FIRST', 'BEATITUDE', 'PROMISES', 'THE', 'KINGDOM', 'OF', 'HEAVEN', 'TO', 'THE', 'POOR', 'OF', 'HEART', 'THE', 'SECOND', 'BEATITUDE', 'PROMISES', 'ALSO', 'TO', 'THE', 'MEEK', 'THAT', 'THEY', 'SHALL', 'POSSESS', 'THE', 'LAND'] +1089-134686-0023-1756: ref=['WHY', 'WAS', 'THE', 'SACRAMENT', 'OF', 'THE', 'EUCHARIST', 'INSTITUTED', 'UNDER', 'THE', 'TWO', 'SPECIES', 'OF', 'BREAD', 'AND', 'WINE', 'IF', 'JESUS', 'CHRIST', 'BE', 'PRESENT', 'BODY', 'AND', 'BLOOD', 'SOUL', 'AND', 'DIVINITY', 'IN', 'THE', 'BREAD', 'ALONE', 'AND', 'IN', 'THE', 'WINE', 'ALONE'] +1089-134686-0023-1756: hyp=['WHY', 'WAS', 'THE', 'SACRAMENT', 'OF', 'THE', 'EUCHARIST', 'INSTITUTED', 'UNDER', 'THE', 'TWO', 'SPECIES', 'OF', 'BREAD', 'AND', 'WINE', 'IF', 'JESUS', 'CHRIST', 'BE', 'PRESENT', 'BODY', 'AND', 'BLOOD', 'SOUL', 'AND', 'DIVINITY', 'IN', 'THE', 'BREAD', 'ALONE', 'AND', 'IN', 'THE', 'WINE', 'ALONE'] +1089-134686-0024-1757: ref=['IF', 'THE', 'WINE', 'CHANGE', 'INTO', 'VINEGAR', 'AND', 'THE', 'HOST', 'CRUMBLE', 'INTO', 'CORRUPTION', 'AFTER', 'THEY', 'HAVE', 'BEEN', 'CONSECRATED', 'IS', 'JESUS', 'CHRIST', 'STILL', 'PRESENT', 'UNDER', 'THEIR', 'SPECIES', 'AS', 'GOD', 'AND', 'AS', 'MAN'] +1089-134686-0024-1757: hyp=['IF', 'THE', 'WINE', 'CHANGE', 'INTO', 'VINEGAR', 'AND', 'THE', 'HOST', 'CRUMBLE', 'INTO', 'CORRUPTION', 'AFTER', 'THEY', 'HAVE', 'BEEN', 'CONSECRATED', 'IS', 'JESUS', 'CHRIST', 'STILL', 'PRESENT', 'UNDER', 'THEIR', 'SPECIES', 'AS', 'GOD', 'AND', 'AS', 'MAN'] +1089-134686-0025-1758: ref=['A', 'GENTLE', 'KICK', 'FROM', 'THE', 'TALL', 'BOY', 'IN', 'THE', 'BENCH', 'BEHIND', 'URGED', 'STEPHEN', 'TO', 'ASK', 'A', 'DIFFICULT', 'QUESTION'] +1089-134686-0025-1758: hyp=['A', 'GENTLE', 'KICK', 'FROM', 'THE', 'TALL', 'BOY', 'IN', 'THE', 'BENCH', 'BEHIND', 'URGED', 'STEPHEN', 'TO', 'ASK', 'A', 'DIFFICULT', 'QUESTION'] +1089-134686-0026-1759: ref=['THE', 'RECTOR', 'DID', 'NOT', 'ASK', 'FOR', 'A', 'CATECHISM', 'TO', 'HEAR', 'THE', 'LESSON', 'FROM'] +1089-134686-0026-1759: hyp=['THE', 'RECTOR', 'DID', 'NOT', 'ASK', 'FOR', 'A', 'CATECHISM', 'TO', 'HEAR', 'THE', 'LESSON', 'FROM'] +1089-134686-0027-1760: ref=['HE', 'CLASPED', 'HIS', 'HANDS', 'ON', 'THE', 'DESK', 'AND', 'SAID'] +1089-134686-0027-1760: hyp=['HE', 'CLASPED', 'HIS', 'HANDS', 'ON', 'THE', 'DESK', 'AND', 'SAID'] +1089-134686-0028-1761: ref=['THE', 'RETREAT', 'WILL', 'BEGIN', 'ON', 'WEDNESDAY', 'AFTERNOON', 'IN', 'HONOUR', 'OF', 'SAINT', 'FRANCIS', 'XAVIER', 'WHOSE', 'FEAST', 'DAY', 'IS', 'SATURDAY'] +1089-134686-0028-1761: hyp=['THE', 'RETREAT', 'WILL', 'BEGIN', 'ON', 'WEDNESDAY', 'AFTERNOON', 'IN', 'HONOR', 'OF', 'SAINT', 'FRANCIS', 'ZAVIOUR', 'WHOSE', 'FEAST', 'DAY', 'IS', 'SATURDAY'] +1089-134686-0029-1762: ref=['ON', 'FRIDAY', 'CONFESSION', 'WILL', 'BE', 'HEARD', 'ALL', 'THE', 'AFTERNOON', 'AFTER', 'BEADS'] +1089-134686-0029-1762: hyp=['ON', 'FRIDAY', 'CONFESSION', 'WILL', 'BE', 'HEARD', 'ALL', 'THE', 'AFTERNOON', 'AFTER', 'BEADS'] +1089-134686-0030-1763: ref=['BEWARE', 'OF', 'MAKING', 'THAT', 'MISTAKE'] +1089-134686-0030-1763: hyp=['BEWARE', 'OF', 'MAKING', 'THAT', 'MISTAKE'] +1089-134686-0031-1764: ref=["STEPHEN'S", 'HEART', 'BEGAN', 'SLOWLY', 'TO', 'FOLD', 'AND', 'FADE', 'WITH', 'FEAR', 'LIKE', 'A', 'WITHERING', 'FLOWER'] +1089-134686-0031-1764: hyp=["STEPHEN'S", 'HEART', 'BEGAN', 'SLOWLY', 'TO', 'FOLD', 'AND', 'FADE', 'WITH', 'FEAR', 'LIKE', 'A', 'WITHERING', 'FLOWER'] +1089-134686-0032-1765: ref=['HE', 'IS', 'CALLED', 'AS', 'YOU', 'KNOW', 'THE', 'APOSTLE', 'OF', 'THE', 'INDIES'] +1089-134686-0032-1765: hyp=['HE', 'HAS', 'CALLED', 'AS', 'YOU', 'KNOW', 'THE', 'APOSTLE', 'OF', 'THE', 'INDIES'] +1089-134686-0033-1766: ref=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'XAVIER'] +1089-134686-0033-1766: hyp=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'ZAVIER'] +1089-134686-0034-1767: ref=['THE', 'RECTOR', 'PAUSED', 'AND', 'THEN', 'SHAKING', 'HIS', 'CLASPED', 'HANDS', 'BEFORE', 'HIM', 'WENT', 'ON'] +1089-134686-0034-1767: hyp=['THE', 'RECTOR', 'PAUSED', 'AND', 'THEN', 'SHAKING', 'HIS', 'CLASPED', 'HANDS', 'BEFORE', 'HIM', 'WENT', 'ON'] +1089-134686-0035-1768: ref=['HE', 'HAD', 'THE', 'FAITH', 'IN', 'HIM', 'THAT', 'MOVES', 'MOUNTAINS'] +1089-134686-0035-1768: hyp=['HE', 'HAD', 'THE', 'FAITH', 'IN', 'HIM', 'THAT', 'MOVES', 'MOUNTAINS'] +1089-134686-0036-1769: ref=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'XAVIER'] +1089-134686-0036-1769: hyp=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'ZAVIER'] +1089-134686-0037-1770: ref=['IN', 'THE', 'SILENCE', 'THEIR', 'DARK', 'FIRE', 'KINDLED', 'THE', 'DUSK', 'INTO', 'A', 'TAWNY', 'GLOW'] +1089-134686-0037-1770: hyp=['IN', 'THE', 'SILENCE', 'THEIR', 'DARK', 'FIRE', 'KINDLED', 'THE', 'DUSK', 'INTO', 'A', 'TAWNY', 'GLOW'] +1089-134691-0000-1707: ref=['HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0000-1707: hyp=['HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0001-1708: ref=['FOR', 'A', 'FULL', 'HOUR', 'HE', 'HAD', 'PACED', 'UP', 'AND', 'DOWN', 'WAITING', 'BUT', 'HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0001-1708: hyp=['FOR', 'A', 'FULL', 'HOUR', 'HE', 'HAD', 'PACED', 'UP', 'AND', 'DOWN', 'WAITING', 'BUT', 'HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0002-1709: ref=['HE', 'SET', 'OFF', 'ABRUPTLY', 'FOR', 'THE', 'BULL', 'WALKING', 'RAPIDLY', 'LEST', 'HIS', "FATHER'S", 'SHRILL', 'WHISTLE', 'MIGHT', 'CALL', 'HIM', 'BACK', 'AND', 'IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HAD', 'ROUNDED', 'THE', 'CURVE', 'AT', 'THE', 'POLICE', 'BARRACK', 'AND', 'WAS', 'SAFE'] +1089-134691-0002-1709: hyp=['HE', 'SET', 'OFF', 'ABRUPTLY', 'FOR', 'THE', 'BULL', 'WALKING', 'RAPIDLY', 'LEST', 'HIS', "FATHER'S", 'SHRILL', 'WHISTLE', 'MIGHT', 'CALL', 'HIM', 'BACK', 'AND', 'IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HAD', 'ROUNDED', 'THE', 'CURVE', 'AT', 'THE', 'POLICE', 'BARRACK', 'AND', 'WAS', 'SAFE'] +1089-134691-0003-1710: ref=['THE', 'UNIVERSITY'] +1089-134691-0003-1710: hyp=['THE', 'UNIVERSITY'] +1089-134691-0004-1711: ref=['PRIDE', 'AFTER', 'SATISFACTION', 'UPLIFTED', 'HIM', 'LIKE', 'LONG', 'SLOW', 'WAVES'] +1089-134691-0004-1711: hyp=['PRIDE', 'AFTER', 'SATISFACTION', 'UPLIFTED', 'HIM', 'LIKE', 'LONG', 'SLOW', 'WAVES'] +1089-134691-0005-1712: ref=['WHOSE', 'FEET', 'ARE', 'AS', 'THE', 'FEET', 'OF', 'HARTS', 'AND', 'UNDERNEATH', 'THE', 'EVERLASTING', 'ARMS'] +1089-134691-0005-1712: hyp=['WHOSE', 'FEET', 'ARE', 'AS', 'THE', 'FEET', 'OF', 'HEARTS', 'AND', 'UNDERNEATH', 'THE', 'EVERLASTING', 'ARMS'] +1089-134691-0006-1713: ref=['THE', 'PRIDE', 'OF', 'THAT', 'DIM', 'IMAGE', 'BROUGHT', 'BACK', 'TO', 'HIS', 'MIND', 'THE', 'DIGNITY', 'OF', 'THE', 'OFFICE', 'HE', 'HAD', 'REFUSED'] +1089-134691-0006-1713: hyp=['THE', 'PRIDE', 'OF', 'THAT', 'DIM', 'IMAGE', 'BROUGHT', 'BACK', 'TO', 'HIS', 'MIND', 'THE', 'DIGNITY', 'OF', 'THE', 'OFFICE', 'HE', 'HAD', 'REFUSED'] +1089-134691-0007-1714: ref=['SOON', 'THE', 'WHOLE', 'BRIDGE', 'WAS', 'TREMBLING', 'AND', 'RESOUNDING'] +1089-134691-0007-1714: hyp=['SOON', 'THE', 'WHOLE', 'BRIDGE', 'WAS', 'TREMBLING', 'AND', 'RESOUNDING'] +1089-134691-0008-1715: ref=['THE', 'UNCOUTH', 'FACES', 'PASSED', 'HIM', 'TWO', 'BY', 'TWO', 'STAINED', 'YELLOW', 'OR', 'RED', 'OR', 'LIVID', 'BY', 'THE', 'SEA', 'AND', 'AS', 'HE', 'STROVE', 'TO', 'LOOK', 'AT', 'THEM', 'WITH', 'EASE', 'AND', 'INDIFFERENCE', 'A', 'FAINT', 'STAIN', 'OF', 'PERSONAL', 'SHAME', 'AND', 'COMMISERATION', 'ROSE', 'TO', 'HIS', 'OWN', 'FACE'] +1089-134691-0008-1715: hyp=['THE', 'UNCOUTH', 'FACES', 'PASSED', 'HIM', 'TWO', 'BY', 'TWO', 'STAINED', 'YELLOW', 'OR', 'RED', 'OR', 'LIVID', 'BY', 'THE', 'SEA', 'AND', 'AS', 'HE', 'STROVE', 'TO', 'LOOK', 'AT', 'THEM', 'WITH', 'EASE', 'AND', 'INDIFFERENCE', 'A', 'FAINT', 'STAIN', 'OF', 'PERSONAL', 'SHAME', 'AND', 'COMMISERATION', 'ROSE', 'TO', 'HIS', 'OWN', 'FACE'] +1089-134691-0009-1716: ref=['ANGRY', 'WITH', 'HIMSELF', 'HE', 'TRIED', 'TO', 'HIDE', 'HIS', 'FACE', 'FROM', 'THEIR', 'EYES', 'BY', 'GAZING', 'DOWN', 'SIDEWAYS', 'INTO', 'THE', 'SHALLOW', 'SWIRLING', 'WATER', 'UNDER', 'THE', 'BRIDGE', 'BUT', 'HE', 'STILL', 'SAW', 'A', 'REFLECTION', 'THEREIN', 'OF', 'THEIR', 'TOP', 'HEAVY', 'SILK', 'HATS', 'AND', 'HUMBLE', 'TAPE', 'LIKE', 'COLLARS', 'AND', 'LOOSELY', 'HANGING', 'CLERICAL', 'CLOTHES', 'BROTHER', 'HICKEY'] +1089-134691-0009-1716: hyp=['ANGRY', 'WITH', 'HIMSELF', 'HE', 'TRIED', 'TO', 'HIDE', 'HIS', 'FACE', 'FROM', 'THEIR', 'EYES', 'BY', 'GAZING', 'DOWN', 'SIDEWAYS', 'INTO', 'THE', 'SHALLOW', 'SWIRLING', 'WATER', 'UNDER', 'THE', 'BRIDGE', 'BUT', 'HE', 'STILL', 'SAW', 'A', 'REFLECTION', 'THEREIN', 'OF', 'THEIR', 'TOP', 'HEAVY', 'SILK', 'HATS', 'AND', 'HUMBLE', 'TAPE', 'LIKE', 'COLLARS', 'AND', 'LOOSELY', 'HANGING', 'CLERICAL', 'CLOTHES', 'BROTHER', 'HICKEY'] +1089-134691-0010-1717: ref=['BROTHER', 'MAC', 'ARDLE', 'BROTHER', 'KEOGH'] +1089-134691-0010-1717: hyp=['BROTHER', 'MICARTLE', 'BROTHER', 'KIEV'] +1089-134691-0011-1718: ref=['THEIR', 'PIETY', 'WOULD', 'BE', 'LIKE', 'THEIR', 'NAMES', 'LIKE', 'THEIR', 'FACES', 'LIKE', 'THEIR', 'CLOTHES', 'AND', 'IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'THEIR', 'HUMBLE', 'AND', 'CONTRITE', 'HEARTS', 'IT', 'MIGHT', 'BE', 'PAID', 'A', 'FAR', 'RICHER', 'TRIBUTE', 'OF', 'DEVOTION', 'THAN', 'HIS', 'HAD', 'EVER', 'BEEN', 'A', 'GIFT', 'TENFOLD', 'MORE', 'ACCEPTABLE', 'THAN', 'HIS', 'ELABORATE', 'ADORATION'] +1089-134691-0011-1718: hyp=['THEIR', 'PIETY', 'WOULD', 'BE', 'LIKE', 'THEIR', 'NAMES', 'LIKE', 'THEIR', 'FACES', 'LIKE', 'THEIR', 'CLOTHES', 'AND', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'THEIR', 'HUMBLE', 'AND', 'CONTRITE', 'HEARTS', 'IT', 'MIGHT', 'BE', 'PAID', 'A', 'FAR', 'RICHER', 'TRIBUTE', 'OF', 'DEVOTION', 'THAN', 'HIS', 'HAD', 'EVER', 'BEEN', 'A', 'GIFT', 'TENFOLD', 'MORE', 'ACCEPTABLE', 'THAN', 'HIS', 'ELABORATE', 'ADORATION'] +1089-134691-0012-1719: ref=['IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'MOVE', 'HIMSELF', 'TO', 'BE', 'GENEROUS', 'TOWARDS', 'THEM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'IF', 'HE', 'EVER', 'CAME', 'TO', 'THEIR', 'GATES', 'STRIPPED', 'OF', 'HIS', 'PRIDE', 'BEATEN', 'AND', 'IN', "BEGGAR'S", 'WEEDS', 'THAT', 'THEY', 'WOULD', 'BE', 'GENEROUS', 'TOWARDS', 'HIM', 'LOVING', 'HIM', 'AS', 'THEMSELVES'] +1089-134691-0012-1719: hyp=['IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'MOVE', 'HIMSELF', 'TO', 'BE', 'GENEROUS', 'TOWARDS', 'THEM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'IF', 'HE', 'EVER', 'CAME', 'TO', 'THEIR', 'GATES', 'STRIPPED', 'OF', 'HIS', 'PRIDE', 'BEATEN', 'AND', 'IN', "BEGGAR'S", 'WEEDS', 'THAT', 'THEY', 'WOULD', 'BE', 'GENEROUS', 'TOWARDS', 'HIM', 'LOVING', 'HIM', 'AS', 'THEMSELVES'] +1089-134691-0013-1720: ref=['IDLE', 'AND', 'EMBITTERING', 'FINALLY', 'TO', 'ARGUE', 'AGAINST', 'HIS', 'OWN', 'DISPASSIONATE', 'CERTITUDE', 'THAT', 'THE', 'COMMANDMENT', 'OF', 'LOVE', 'BADE', 'US', 'NOT', 'TO', 'LOVE', 'OUR', 'NEIGHBOUR', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'AMOUNT', 'AND', 'INTENSITY', 'OF', 'LOVE', 'BUT', 'TO', 'LOVE', 'HIM', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'KIND', 'OF', 'LOVE'] +1089-134691-0013-1720: hyp=['IDLE', 'AND', 'EMBITTERING', 'FINALLY', 'TO', 'ARGUE', 'AGAINST', 'HIS', 'OWN', 'DISPASSIONATE', 'CERTITUDE', 'THAT', 'THE', 'COMMANDMENT', 'OF', 'LOVE', 'BADE', 'US', 'NOT', 'TO', 'LOVE', 'OUR', 'NEIGHBOUR', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'AMOUNT', 'AND', 'INTENSITY', 'OF', 'LOVE', 'BUT', 'TO', 'LOVE', 'HIM', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'KIND', 'OF', 'LOVE'] +1089-134691-0014-1721: ref=['THE', 'PHRASE', 'AND', 'THE', 'DAY', 'AND', 'THE', 'SCENE', 'HARMONIZED', 'IN', 'A', 'CHORD'] +1089-134691-0014-1721: hyp=['THE', 'PHRASE', 'AND', 'THE', 'DAY', 'AND', 'THE', 'SCENE', 'HARMONIZED', 'IN', 'ACCORD'] +1089-134691-0015-1722: ref=['WORDS', 'WAS', 'IT', 'THEIR', 'COLOURS'] +1089-134691-0015-1722: hyp=['WORDS', 'WAS', 'IT', 'THEIR', 'COLORS'] +1089-134691-0016-1723: ref=['THEY', 'WERE', 'VOYAGING', 'ACROSS', 'THE', 'DESERTS', 'OF', 'THE', 'SKY', 'A', 'HOST', 'OF', 'NOMADS', 'ON', 'THE', 'MARCH', 'VOYAGING', 'HIGH', 'OVER', 'IRELAND', 'WESTWARD', 'BOUND'] +1089-134691-0016-1723: hyp=['THEY', 'WERE', 'VOYAGING', 'ACROSS', 'THE', 'DESERTS', 'OF', 'THE', 'SKY', 'A', 'HOST', 'OF', 'NOMADS', 'ON', 'THE', 'MARCH', 'VOYAGING', 'HIGH', 'OVER', 'IRELAND', 'WESTWARD', 'BOUND'] +1089-134691-0017-1724: ref=['THE', 'EUROPE', 'THEY', 'HAD', 'COME', 'FROM', 'LAY', 'OUT', 'THERE', 'BEYOND', 'THE', 'IRISH', 'SEA', 'EUROPE', 'OF', 'STRANGE', 'TONGUES', 'AND', 'VALLEYED', 'AND', 'WOODBEGIRT', 'AND', 'CITADELLED', 'AND', 'OF', 'ENTRENCHED', 'AND', 'MARSHALLED', 'RACES'] +1089-134691-0017-1724: hyp=['THE', 'EUROPE', 'THEY', 'HAD', 'COME', 'FROM', 'LAY', 'OUT', 'THERE', 'BEYOND', 'THE', 'IRISH', 'SEA', 'EUROPE', 'OF', 'STRANGE', 'TONGUES', 'AND', 'VALLEYED', 'AND', 'WOOD', 'BEGIRT', 'AND', 'CITADELED', 'AND', 'OF', 'ENTRENCHED', 'AND', 'MARSHALED', 'RACES'] +1089-134691-0018-1725: ref=['AGAIN', 'AGAIN'] +1089-134691-0018-1725: hyp=['AGAIN', 'AGAIN'] +1089-134691-0019-1726: ref=['A', 'VOICE', 'FROM', 'BEYOND', 'THE', 'WORLD', 'WAS', 'CALLING'] +1089-134691-0019-1726: hyp=['A', 'VOICE', 'FROM', 'BEYOND', 'THE', 'WORLD', 'WAS', 'CALLING'] +1089-134691-0020-1727: ref=['HELLO', 'STEPHANOS', 'HERE', 'COMES', 'THE', 'DEDALUS'] +1089-134691-0020-1727: hyp=['HALLO', 'STEPHANOS', 'HERE', 'COMES', 'THE', 'DAEDALUS'] +1089-134691-0021-1728: ref=['THEIR', 'DIVING', 'STONE', 'POISED', 'ON', 'ITS', 'RUDE', 'SUPPORTS', 'AND', 'ROCKING', 'UNDER', 'THEIR', 'PLUNGES', 'AND', 'THE', 'ROUGH', 'HEWN', 'STONES', 'OF', 'THE', 'SLOPING', 'BREAKWATER', 'OVER', 'WHICH', 'THEY', 'SCRAMBLED', 'IN', 'THEIR', 'HORSEPLAY', 'GLEAMED', 'WITH', 'COLD', 'WET', 'LUSTRE'] +1089-134691-0021-1728: hyp=['THEIR', 'DIVING', 'STONE', 'POISED', 'ON', 'ITS', 'RUDE', 'SUPPORTS', 'AND', 'ROCKING', 'UNDER', 'THEIR', 'PLUNGES', 'AND', 'THE', 'ROUGH', 'HEWN', 'STONES', 'OF', 'THE', 'SLOPING', 'BREAKWATER', 'OVER', 'WHICH', 'THEY', 'SCRAMBLED', 'IN', 'THEIR', 'HORSE', 'PLAY', 'GLEAMED', 'WITH', 'COLD', 'WET', 'LUSTRE'] +1089-134691-0022-1729: ref=['HE', 'STOOD', 'STILL', 'IN', 'DEFERENCE', 'TO', 'THEIR', 'CALLS', 'AND', 'PARRIED', 'THEIR', 'BANTER', 'WITH', 'EASY', 'WORDS'] +1089-134691-0022-1729: hyp=['HE', 'STOOD', 'STILL', 'IN', 'DEFERENCE', 'TO', 'THEIR', 'CALLS', 'AND', 'PARRIED', 'THEIR', 'BANTER', 'WITH', 'EASY', 'WORDS'] +1089-134691-0023-1730: ref=['IT', 'WAS', 'A', 'PAIN', 'TO', 'SEE', 'THEM', 'AND', 'A', 'SWORD', 'LIKE', 'PAIN', 'TO', 'SEE', 'THE', 'SIGNS', 'OF', 'ADOLESCENCE', 'THAT', 'MADE', 'REPELLENT', 'THEIR', 'PITIABLE', 'NAKEDNESS'] +1089-134691-0023-1730: hyp=['IT', 'WAS', 'A', 'PAIN', 'TO', 'SEE', 'THEM', 'AND', 'A', 'SWORD', 'LIKE', 'PAIN', 'TO', 'SEE', 'THE', 'SIGNS', 'OF', 'ADOLESCENCE', 'THAT', 'MADE', 'REPELLENT', 'THEIR', 'PITIABLE', 'NAKEDNESS'] +1089-134691-0024-1731: ref=['STEPHANOS', 'DEDALOS'] +1089-134691-0024-1731: hyp=['STEPHANOS', 'DELOS'] +1089-134691-0025-1732: ref=['A', 'MOMENT', 'BEFORE', 'THE', 'GHOST', 'OF', 'THE', 'ANCIENT', 'KINGDOM', 'OF', 'THE', 'DANES', 'HAD', 'LOOKED', 'FORTH', 'THROUGH', 'THE', 'VESTURE', 'OF', 'THE', 'HAZEWRAPPED', 'CITY'] +1089-134691-0025-1732: hyp=['A', 'MOMENT', 'BEFORE', 'THE', 'GHOST', 'OF', 'THE', 'ANCIENT', 'KINGDOM', 'OF', 'THE', 'DANES', 'HAD', 'LOOKED', 'FORTH', 'THROUGH', 'THE', 'VESTURE', 'OF', 'THE', 'HAYES', 'WRAPPED', 'CITY'] +1188-133604-0000-1771: ref=['YOU', 'WILL', 'FIND', 'ME', 'CONTINUALLY', 'SPEAKING', 'OF', 'FOUR', 'MEN', 'TITIAN', 'HOLBEIN', 'TURNER', 'AND', 'TINTORET', 'IN', 'ALMOST', 'THE', 'SAME', 'TERMS'] +1188-133604-0000-1771: hyp=['YOU', 'WILL', 'FIND', 'ME', 'CONTINUALLY', 'SPEAKING', 'OF', 'FOUR', 'MEN', 'TITIAN', 'HOLBINE', 'TURNER', 'AND', 'TINTARETTE', 'IN', 'ALMOST', 'THE', 'SAME', 'TERMS'] +1188-133604-0001-1772: ref=['THEY', 'UNITE', 'EVERY', 'QUALITY', 'AND', 'SOMETIMES', 'YOU', 'WILL', 'FIND', 'ME', 'REFERRING', 'TO', 'THEM', 'AS', 'COLORISTS', 'SOMETIMES', 'AS', 'CHIAROSCURISTS'] +1188-133604-0001-1772: hyp=['THEY', 'UNITE', 'EVERY', 'QUALITY', 'AND', 'SOMETIMES', 'YOU', 'WILL', 'FIND', 'ME', 'REFERRING', 'TO', 'THEM', 'AS', 'COLORISTS', 'SOMETIMES', 'AS', 'KIERUSCURISTS'] +1188-133604-0002-1773: ref=['BY', 'BEING', 'STUDIOUS', 'OF', 'COLOR', 'THEY', 'ARE', 'STUDIOUS', 'OF', 'DIVISION', 'AND', 'WHILE', 'THE', 'CHIAROSCURIST', 'DEVOTES', 'HIMSELF', 'TO', 'THE', 'REPRESENTATION', 'OF', 'DEGREES', 'OF', 'FORCE', 'IN', 'ONE', 'THING', 'UNSEPARATED', 'LIGHT', 'THE', 'COLORISTS', 'HAVE', 'FOR', 'THEIR', 'FUNCTION', 'THE', 'ATTAINMENT', 'OF', 'BEAUTY', 'BY', 'ARRANGEMENT', 'OF', 'THE', 'DIVISIONS', 'OF', 'LIGHT'] +1188-133604-0002-1773: hyp=['BY', 'BEING', 'STUDIOUS', 'OF', 'COLOUR', 'THEY', 'ARE', 'STUDIOUS', 'OF', 'DIVISION', 'AND', 'WHILE', 'THE', 'CUIRASCURISTS', 'DEVOTES', 'HIMSELF', 'TO', 'THE', 'REPRESENTATION', 'OF', 'DEGREES', 'OF', 'FORCE', 'IN', 'ONE', 'THING', 'UNSEPARATED', 'LIGHT', 'THE', 'COLORISTS', 'HAVE', 'FOR', 'THEIR', 'FUNCTION', 'THE', 'ATTAINMENT', 'OF', 'BEAUTY', 'BY', 'ARRANGEMENT', 'OF', 'THE', 'DIVISIONS', 'OF', 'LIGHT'] +1188-133604-0003-1774: ref=['MY', 'FIRST', 'AND', 'PRINCIPAL', 'REASON', 'WAS', 'THAT', 'THEY', 'ENFORCED', 'BEYOND', 'ALL', 'RESISTANCE', 'ON', 'ANY', 'STUDENT', 'WHO', 'MIGHT', 'ATTEMPT', 'TO', 'COPY', 'THEM', 'THIS', 'METHOD', 'OF', 'LAYING', 'PORTIONS', 'OF', 'DISTINCT', 'HUE', 'SIDE', 'BY', 'SIDE'] +1188-133604-0003-1774: hyp=['MY', 'FIRST', 'AND', 'PRINCIPAL', 'REASON', 'WAS', 'THAT', 'THEY', 'ENFORCED', 'BEYOND', 'ALL', 'RESISTANCE', 'ON', 'ANY', 'STUDENT', 'WHO', 'MIGHT', 'ATTEMPT', 'TO', 'COPY', 'THEM', 'THIS', 'METHOD', 'OF', 'LAYING', 'PORTIONS', 'OF', 'DISTINCT', 'HUE', 'SIDE', 'BY', 'SIDE'] +1188-133604-0004-1775: ref=['SOME', 'OF', 'THE', 'TOUCHES', 'INDEED', 'WHEN', 'THE', 'TINT', 'HAS', 'BEEN', 'MIXED', 'WITH', 'MUCH', 'WATER', 'HAVE', 'BEEN', 'LAID', 'IN', 'LITTLE', 'DROPS', 'OR', 'PONDS', 'SO', 'THAT', 'THE', 'PIGMENT', 'MIGHT', 'CRYSTALLIZE', 'HARD', 'AT', 'THE', 'EDGE'] +1188-133604-0004-1775: hyp=['SOME', 'OF', 'THE', 'TOUCHES', 'INDEED', 'WHEN', 'THE', 'TINT', 'HAS', 'BEEN', 'MIXED', 'WITH', 'MUCH', 'WATER', 'HAVE', 'BEEN', 'LAID', 'IN', 'LITTLE', 'DROPS', 'OR', 'PONDS', 'SO', 'THAT', 'THE', 'PIGMENT', 'MIGHT', 'CRYSTALLIZE', 'HARD', 'AT', 'THE', 'EDGE'] +1188-133604-0005-1776: ref=['IT', 'IS', 'THE', 'HEAD', 'OF', 'A', 'PARROT', 'WITH', 'A', 'LITTLE', 'FLOWER', 'IN', 'HIS', 'BEAK', 'FROM', 'A', 'PICTURE', 'OF', "CARPACCIO'S", 'ONE', 'OF', 'HIS', 'SERIES', 'OF', 'THE', 'LIFE', 'OF', 'SAINT', 'GEORGE'] +1188-133604-0005-1776: hyp=['IT', 'IS', 'THE', 'HEAD', 'OF', 'A', 'PARROT', 'WITH', 'A', 'LITTLE', 'FLOWER', 'IN', 'HIS', 'BEAK', 'FROM', 'A', 'PICTURE', 'OF', 'CARPATIUS', 'ONE', 'OF', 'HIS', 'SERIES', 'OF', 'THE', 'LIFE', 'OF', 'SAINT', 'GEORGE'] +1188-133604-0006-1777: ref=['THEN', 'HE', 'COMES', 'TO', 'THE', 'BEAK', 'OF', 'IT'] +1188-133604-0006-1777: hyp=['THEN', 'HE', 'COMES', 'TO', 'THE', 'BEAK', 'OF', 'IT'] +1188-133604-0007-1778: ref=['THE', 'BROWN', 'GROUND', 'BENEATH', 'IS', 'LEFT', 'FOR', 'THE', 'MOST', 'PART', 'ONE', 'TOUCH', 'OF', 'BLACK', 'IS', 'PUT', 'FOR', 'THE', 'HOLLOW', 'TWO', 'DELICATE', 'LINES', 'OF', 'DARK', 'GRAY', 'DEFINE', 'THE', 'OUTER', 'CURVE', 'AND', 'ONE', 'LITTLE', 'QUIVERING', 'TOUCH', 'OF', 'WHITE', 'DRAWS', 'THE', 'INNER', 'EDGE', 'OF', 'THE', 'MANDIBLE'] +1188-133604-0007-1778: hyp=['THE', 'BROWN', 'GROUND', 'BENEATH', 'IS', 'LEFT', 'FOR', 'THE', 'MOST', 'PART', 'ONE', 'TOUCH', 'OF', 'BLACK', 'IS', 'PUT', 'FOR', 'THE', 'HOLLOW', 'TOO', 'DELICATE', 'LINES', 'OF', 'DARK', 'GREY', 'DEFINE', 'THE', 'OUTER', 'CURVE', 'AND', 'ONE', 'LITTLE', 'QUIVERING', 'TOUCH', 'OF', 'WHITE', 'DRAWS', 'THE', 'INNER', 'EDGE', 'OF', 'THE', 'MANDIBLE'] +1188-133604-0008-1779: ref=['FOR', 'BELIEVE', 'ME', 'THE', 'FINAL', 'PHILOSOPHY', 'OF', 'ART', 'CAN', 'ONLY', 'RATIFY', 'THEIR', 'OPINION', 'THAT', 'THE', 'BEAUTY', 'OF', 'A', 'COCK', 'ROBIN', 'IS', 'TO', 'BE', 'RED', 'AND', 'OF', 'A', 'GRASS', 'PLOT', 'TO', 'BE', 'GREEN', 'AND', 'THE', 'BEST', 'SKILL', 'OF', 'ART', 'IS', 'IN', 'INSTANTLY', 'SEIZING', 'ON', 'THE', 'MANIFOLD', 'DELICIOUSNESS', 'OF', 'LIGHT', 'WHICH', 'YOU', 'CAN', 'ONLY', 'SEIZE', 'BY', 'PRECISION', 'OF', 'INSTANTANEOUS', 'TOUCH'] +1188-133604-0008-1779: hyp=['FOR', 'BELIEVE', 'ME', 'THE', 'FINAL', 'PHILOSOPHY', 'OF', 'ART', 'CAN', 'ONLY', 'RATIFY', 'THEIR', 'OPINION', 'THAT', 'THE', 'BEAUTY', 'OF', 'A', 'COCK', 'ROBIN', 'IS', 'TO', 'BE', 'READ', 'AND', 'OF', 'A', 'GRASS', 'PLOT', 'TO', 'BE', 'GREEN', 'AND', 'THE', 'BEST', 'SKILL', 'OF', 'ART', 'IS', 'AN', 'INSTANTLY', 'SEIZING', 'ON', 'THE', 'MANIFOLD', 'DELICIOUSNESS', 'OF', 'LIGHT', 'WHICH', 'YOU', 'CAN', 'ONLY', 'SEIZE', 'BY', 'PRECISION', 'OF', 'INSTANTANEOUS', 'TOUCH'] +1188-133604-0009-1780: ref=['NOW', 'YOU', 'WILL', 'SEE', 'IN', 'THESE', 'STUDIES', 'THAT', 'THE', 'MOMENT', 'THE', 'WHITE', 'IS', 'INCLOSED', 'PROPERLY', 'AND', 'HARMONIZED', 'WITH', 'THE', 'OTHER', 'HUES', 'IT', 'BECOMES', 'SOMEHOW', 'MORE', 'PRECIOUS', 'AND', 'PEARLY', 'THAN', 'THE', 'WHITE', 'PAPER', 'AND', 'THAT', 'I', 'AM', 'NOT', 'AFRAID', 'TO', 'LEAVE', 'A', 'WHOLE', 'FIELD', 'OF', 'UNTREATED', 'WHITE', 'PAPER', 'ALL', 'ROUND', 'IT', 'BEING', 'SURE', 'THAT', 'EVEN', 'THE', 'LITTLE', 'DIAMONDS', 'IN', 'THE', 'ROUND', 'WINDOW', 'WILL', 'TELL', 'AS', 'JEWELS', 'IF', 'THEY', 'ARE', 'GRADATED', 'JUSTLY'] +1188-133604-0009-1780: hyp=['NOW', 'YOU', 'WILL', 'SEE', 'IN', 'THESE', 'STUDIES', 'THAT', 'THE', 'MOMENT', 'THE', 'WHITE', 'IS', 'ENCLOSED', 'PROPERLY', 'AND', 'HARMONIZE', 'WITH', 'THE', 'OTHER', 'HUES', 'IT', 'BECOMES', 'SOMEHOW', 'MORE', 'PRECIOUS', 'AND', 'PEARLY', 'THAN', 'THE', 'WHITE', 'PAPER', 'AND', 'THAT', 'I', 'AM', 'NOT', 'AFRAID', 'TO', 'LEAVE', 'A', 'WHOLE', 'FIELD', 'OF', 'UNTREATED', 'WHITE', 'PAPER', 'ALL', 'ROUND', 'IT', 'BEING', 'SURE', 'THAT', 'EVEN', 'THE', 'LITTLE', 'DIAMONDS', 'IN', 'THE', 'ROUND', 'WINDOW', 'WILL', 'TELL', 'AS', 'JEWELS', 'IF', 'THEY', 'ARE', 'GRADATED', 'JUSTLY'] +1188-133604-0010-1781: ref=['BUT', 'IN', 'THIS', 'VIGNETTE', 'COPIED', 'FROM', 'TURNER', 'YOU', 'HAVE', 'THE', 'TWO', 'PRINCIPLES', 'BROUGHT', 'OUT', 'PERFECTLY'] +1188-133604-0010-1781: hyp=['BUT', 'IN', 'THIS', 'VINEYARD', 'COPIED', 'FROM', 'TURNER', 'YOU', 'HAVE', 'THE', 'TWO', 'PRINCIPLES', 'BROUGHT', 'OUT', 'PERFECTLY'] +1188-133604-0011-1782: ref=['THEY', 'ARE', 'BEYOND', 'ALL', 'OTHER', 'WORKS', 'THAT', 'I', 'KNOW', 'EXISTING', 'DEPENDENT', 'FOR', 'THEIR', 'EFFECT', 'ON', 'LOW', 'SUBDUED', 'TONES', 'THEIR', 'FAVORITE', 'CHOICE', 'IN', 'TIME', 'OF', 'DAY', 'BEING', 'EITHER', 'DAWN', 'OR', 'TWILIGHT', 'AND', 'EVEN', 'THEIR', 'BRIGHTEST', 'SUNSETS', 'PRODUCED', 'CHIEFLY', 'OUT', 'OF', 'GRAY', 'PAPER'] +1188-133604-0011-1782: hyp=['THEY', 'ARE', 'BEYOND', 'ALL', 'OTHER', 'WORKS', 'THAN', 'I', 'KNOW', 'EXISTING', 'DEPENDENT', 'FOR', 'THEIR', 'EFFECT', 'ON', 'LOW', 'SUBDUED', 'TONES', 'THEIR', 'FAVOURITE', 'CHOICE', 'IN', 'TIME', 'OF', 'DAY', 'BEING', 'EITHER', 'DAWN', 'OR', 'TWILIGHT', 'AND', 'EVEN', 'THEIR', 'BRIGHTEST', 'SUNSETS', 'PRODUCED', 'CHIEFLY', 'OUT', 'OF', 'GRAY', 'PAPER'] +1188-133604-0012-1783: ref=['IT', 'MAY', 'BE', 'THAT', 'A', 'GREAT', 'COLORIST', 'WILL', 'USE', 'HIS', 'UTMOST', 'FORCE', 'OF', 'COLOR', 'AS', 'A', 'SINGER', 'HIS', 'FULL', 'POWER', 'OF', 'VOICE', 'BUT', 'LOUD', 'OR', 'LOW', 'THE', 'VIRTUE', 'IS', 'IN', 'BOTH', 'CASES', 'ALWAYS', 'IN', 'REFINEMENT', 'NEVER', 'IN', 'LOUDNESS'] +1188-133604-0012-1783: hyp=['IT', 'MAY', 'BE', 'THAT', 'A', 'GREAT', 'COLORLESS', 'WILL', 'USE', 'HIS', 'UTMOST', 'FORCE', 'OF', 'COLOR', 'AS', 'A', 'SINGER', 'HIS', 'FULL', 'POWER', 'OF', 'VOICE', 'BUT', 'LOUD', 'OR', 'LOW', 'THE', 'VIRTUE', 'IS', 'IN', 'BOTH', 'CASES', 'ALWAYS', 'IN', 'REFINEMENT', 'NEVER', 'IN', 'LOUDNESS'] +1188-133604-0013-1784: ref=['IT', 'MUST', 'REMEMBER', 'BE', 'ONE', 'OR', 'THE', 'OTHER'] +1188-133604-0013-1784: hyp=['IT', 'MUST', 'REMEMBER', 'BE', 'ONE', 'OR', 'THE', 'OTHER'] +1188-133604-0014-1785: ref=['DO', 'NOT', 'THEREFORE', 'THINK', 'THAT', 'THE', 'GOTHIC', 'SCHOOL', 'IS', 'AN', 'EASY', 'ONE'] +1188-133604-0014-1785: hyp=['DO', 'NOT', 'THEREFORE', 'THINK', 'THAT', 'THE', 'GOTHIC', 'SCHOOLS', 'AN', 'EASY', 'ONE'] +1188-133604-0015-1786: ref=['THE', 'LAW', 'OF', 'THAT', 'SCHOOL', 'IS', 'THAT', 'EVERYTHING', 'SHALL', 'BE', 'SEEN', 'CLEARLY', 'OR', 'AT', 'LEAST', 'ONLY', 'IN', 'SUCH', 'MIST', 'OR', 'FAINTNESS', 'AS', 'SHALL', 'BE', 'DELIGHTFUL', 'AND', 'I', 'HAVE', 'NO', 'DOUBT', 'THAT', 'THE', 'BEST', 'INTRODUCTION', 'TO', 'IT', 'WOULD', 'BE', 'THE', 'ELEMENTARY', 'PRACTICE', 'OF', 'PAINTING', 'EVERY', 'STUDY', 'ON', 'A', 'GOLDEN', 'GROUND'] +1188-133604-0015-1786: hyp=['THE', 'LAW', 'OF', 'THAT', 'SCHOOL', 'IS', 'THAT', 'EVERYTHING', 'SHALL', 'BE', 'SEEN', 'CLEARLY', 'OR', 'AT', 'LEAST', 'ONLY', 'IN', 'SUCH', 'MIST', 'OR', 'FAINTNESS', 'AS', 'SHALL', 'BE', 'DELIGHTFUL', 'AND', 'I', 'HAVE', 'NO', 'DOUBT', 'THAT', 'THE', 'BEST', 'INTRODUCTION', 'TO', 'IT', 'WOULD', 'BE', 'THE', 'ELEMENTARY', 'PRACTICE', 'OF', 'PAINTING', 'EVERY', 'STUDY', 'ON', 'A', 'GOLDEN', 'GROUND'] +1188-133604-0016-1787: ref=['THIS', 'AT', 'ONCE', 'COMPELS', 'YOU', 'TO', 'UNDERSTAND', 'THAT', 'THE', 'WORK', 'IS', 'TO', 'BE', 'IMAGINATIVE', 'AND', 'DECORATIVE', 'THAT', 'IT', 'REPRESENTS', 'BEAUTIFUL', 'THINGS', 'IN', 'THE', 'CLEAREST', 'WAY', 'BUT', 'NOT', 'UNDER', 'EXISTING', 'CONDITIONS', 'AND', 'THAT', 'IN', 'FACT', 'YOU', 'ARE', 'PRODUCING', "JEWELER'S", 'WORK', 'RATHER', 'THAN', 'PICTURES'] +1188-133604-0016-1787: hyp=['THIS', 'AT', 'ONCE', 'COMPELS', 'YOU', 'TO', 'UNDERSTAND', 'THAT', 'THE', 'WORK', 'IS', 'TO', 'BE', 'IMAGINATIVE', 'AND', 'DECORATIVE', 'THAT', 'IT', 'REPRESENTS', 'BEAUTIFUL', 'THINGS', 'IN', 'THE', 'CLEAREST', 'WAY', 'BUT', 'NOT', 'UNDER', 'EXISTING', 'CONDITIONS', 'AND', 'THAT', 'IN', 'FACT', 'YOU', 'ARE', 'PRODUCING', 'JEWELLERS', 'WORK', 'RATHER', 'THAN', 'PICTURES'] +1188-133604-0017-1788: ref=['THAT', 'A', 'STYLE', 'IS', 'RESTRAINED', 'OR', 'SEVERE', 'DOES', 'NOT', 'MEAN', 'THAT', 'IT', 'IS', 'ALSO', 'ERRONEOUS'] +1188-133604-0017-1788: hyp=['THAT', 'A', 'STYLE', 'WAS', 'RESTRAINED', 'OR', 'SEVERE', 'DOES', 'NOT', 'MEAN', 'THAT', 'IT', 'IS', 'ALSO', 'ERRONEOUS'] +1188-133604-0018-1789: ref=['IN', 'ALL', 'EARLY', 'GOTHIC', 'ART', 'INDEED', 'YOU', 'WILL', 'FIND', 'FAILURE', 'OF', 'THIS', 'KIND', 'ESPECIALLY', 'DISTORTION', 'AND', 'RIGIDITY', 'WHICH', 'ARE', 'IN', 'MANY', 'RESPECTS', 'PAINFULLY', 'TO', 'BE', 'COMPARED', 'WITH', 'THE', 'SPLENDID', 'REPOSE', 'OF', 'CLASSIC', 'ART'] +1188-133604-0018-1789: hyp=['IN', 'ALL', 'EARLY', 'GOTHIC', 'ART', 'INDEED', 'YOU', 'WILL', 'FIND', 'FAILURE', 'OF', 'THIS', 'KIND', 'ESPECIALLY', 'DISTORTION', 'AND', 'RIGIDITY', 'WHICH', 'ARE', 'IN', 'MANY', 'RESPECTS', 'PAINFULLY', 'TO', 'BE', 'COMPARED', 'WITH', 'THE', 'SPLENDID', 'REPOSE', 'OF', 'CLASSIC', 'ART'] +1188-133604-0019-1790: ref=['THE', 'LARGE', 'LETTER', 'CONTAINS', 'INDEED', 'ENTIRELY', 'FEEBLE', 'AND', 'ILL', 'DRAWN', 'FIGURES', 'THAT', 'IS', 'MERELY', 'CHILDISH', 'AND', 'FAILING', 'WORK', 'OF', 'AN', 'INFERIOR', 'HAND', 'IT', 'IS', 'NOT', 'CHARACTERISTIC', 'OF', 'GOTHIC', 'OR', 'ANY', 'OTHER', 'SCHOOL'] +1188-133604-0019-1790: hyp=['THE', 'LARGE', 'LETTER', 'CONTAINS', 'INDEED', 'ENTIRELY', 'FEEBLE', 'AND', 'ILL', 'DRAWN', 'FIGURES', 'THAT', 'IS', 'MERELY', 'CHILDISH', 'IN', 'FAILING', 'WORK', 'OF', 'AN', 'INFERIOR', 'HAND', 'IT', 'IS', 'NOT', 'CHARACTERISTIC', 'OF', 'GOTHIC', 'OR', 'ANY', 'OTHER', 'SCHOOL'] +1188-133604-0020-1791: ref=['BUT', 'OBSERVE', 'YOU', 'CAN', 'ONLY', 'DO', 'THIS', 'ON', 'ONE', 'CONDITION', 'THAT', 'OF', 'STRIVING', 'ALSO', 'TO', 'CREATE', 'IN', 'REALITY', 'THE', 'BEAUTY', 'WHICH', 'YOU', 'SEEK', 'IN', 'IMAGINATION'] +1188-133604-0020-1791: hyp=['BUT', 'OBSERVE', 'YOU', 'CAN', 'ONLY', 'DO', 'THIS', 'ON', 'ONE', 'CONDITION', 'THAT', 'OF', 'STRIVING', 'ALSO', 'TO', 'CREATE', 'IN', 'REALITY', 'THE', 'BEAUTY', 'WHICH', 'YOU', 'SEEK', 'IN', 'IMAGINATION'] +1188-133604-0021-1792: ref=['IT', 'WILL', 'BE', 'WHOLLY', 'IMPOSSIBLE', 'FOR', 'YOU', 'TO', 'RETAIN', 'THE', 'TRANQUILLITY', 'OF', 'TEMPER', 'AND', 'FELICITY', 'OF', 'FAITH', 'NECESSARY', 'FOR', 'NOBLE', 'PURIST', 'PAINTING', 'UNLESS', 'YOU', 'ARE', 'ACTIVELY', 'ENGAGED', 'IN', 'PROMOTING', 'THE', 'FELICITY', 'AND', 'PEACE', 'OF', 'PRACTICAL', 'LIFE'] +1188-133604-0021-1792: hyp=['IT', 'WILL', 'BE', 'WHOLLY', 'IMPOSSIBLE', 'FOR', 'YOU', 'TO', 'RETAIN', 'THE', 'TRANQUILLITY', 'OF', 'TEMPER', 'AND', 'FELICITY', 'OF', 'FAITH', 'NECESSARY', 'FOR', 'NOBLE', 'PUREST', 'PAINTING', 'UNLESS', 'YOU', 'ARE', 'ACTIVELY', 'ENGAGED', 'IN', 'PROMOTING', 'THE', 'FELICITY', 'AND', 'PEACE', 'OF', 'PRACTICAL', 'LIFE'] +1188-133604-0022-1793: ref=['YOU', 'MUST', 'LOOK', 'AT', 'HIM', 'IN', 'THE', 'FACE', 'FIGHT', 'HIM', 'CONQUER', 'HIM', 'WITH', 'WHAT', 'SCATHE', 'YOU', 'MAY', 'YOU', 'NEED', 'NOT', 'THINK', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'HIM'] +1188-133604-0022-1793: hyp=['YOU', 'MUST', 'LOOK', 'AT', 'HIM', 'IN', 'THE', 'FACE', 'FIGHT', 'HIM', 'CONQUER', 'HIM', 'WITH', 'WHAT', 'SCATH', 'YOU', 'MAY', 'YOU', 'NEED', 'NOT', 'THINK', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'HIM'] +1188-133604-0023-1794: ref=['THE', 'COLORIST', 'SAYS', 'FIRST', 'OF', 'ALL', 'AS', 'MY', 'DELICIOUS', 'PAROQUET', 'WAS', 'RUBY', 'SO', 'THIS', 'NASTY', 'VIPER', 'SHALL', 'BE', 'BLACK', 'AND', 'THEN', 'IS', 'THE', 'QUESTION', 'CAN', 'I', 'ROUND', 'HIM', 'OFF', 'EVEN', 'THOUGH', 'HE', 'IS', 'BLACK', 'AND', 'MAKE', 'HIM', 'SLIMY', 'AND', 'YET', 'SPRINGY', 'AND', 'CLOSE', 'DOWN', 'CLOTTED', 'LIKE', 'A', 'POOL', 'OF', 'BLACK', 'BLOOD', 'ON', 'THE', 'EARTH', 'ALL', 'THE', 'SAME'] +1188-133604-0023-1794: hyp=['THE', 'CHOLERIST', 'SAYS', 'FIRST', 'OF', 'ALL', 'AS', 'MY', 'DELICIOUS', 'PARRIQUET', 'WAS', 'RUBY', 'SO', 'THIS', 'NASTY', 'VIPER', 'SHALL', 'BE', 'BLACK', 'AND', 'THEN', 'AS', 'THE', 'QUESTION', 'CAN', 'I', 'ROUND', 'HIM', 'OFF', 'EVEN', 'THOUGH', 'HE', 'IS', 'BLACK', 'AND', 'MAKE', 'HIM', 'SLIMY', 'AND', 'YET', 'SPRINGY', 'AND', 'CLOSE', 'DOWN', 'CLOTTED', 'LIKE', 'A', 'POOL', 'OF', 'BLACK', 'BLOOD', 'ON', 'THE', 'EARTH', 'ALL', 'THE', 'SAME'] +1188-133604-0024-1795: ref=['NOTHING', 'WILL', 'BE', 'MORE', 'PRECIOUS', 'TO', 'YOU', 'I', 'THINK', 'IN', 'THE', 'PRACTICAL', 'STUDY', 'OF', 'ART', 'THAN', 'THE', 'CONVICTION', 'WHICH', 'WILL', 'FORCE', 'ITSELF', 'ON', 'YOU', 'MORE', 'AND', 'MORE', 'EVERY', 'HOUR', 'OF', 'THE', 'WAY', 'ALL', 'THINGS', 'ARE', 'BOUND', 'TOGETHER', 'LITTLE', 'AND', 'GREAT', 'IN', 'SPIRIT', 'AND', 'IN', 'MATTER'] +1188-133604-0024-1795: hyp=['NOTHING', 'WILL', 'BE', 'MORE', 'PRECIOUS', 'TO', 'YOU', 'I', 'THINK', 'IN', 'THE', 'PRACTICAL', 'STUDY', 'OF', 'ART', 'THAN', 'THE', 'CONVICTION', 'WHICH', 'WILL', 'FORCE', 'ITSELF', 'ON', 'YOU', 'MORE', 'AND', 'MORE', 'EVERY', 'HOUR', 'OF', 'THE', 'WAY', 'ALL', 'THINGS', 'ARE', 'BOUND', 'TOGETHER', 'LITTLE', 'AND', 'GREAT', 'IN', 'SPIRIT', 'AND', 'IN', 'MATTER'] +1188-133604-0025-1796: ref=['YOU', 'KNOW', 'I', 'HAVE', 'JUST', 'BEEN', 'TELLING', 'YOU', 'HOW', 'THIS', 'SCHOOL', 'OF', 'MATERIALISM', 'AND', 'CLAY', 'INVOLVED', 'ITSELF', 'AT', 'LAST', 'IN', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0025-1796: hyp=['YOU', 'KNOW', 'I', 'HAVE', 'JUST', 'BEEN', 'TELLING', 'YOU', 'HOW', 'THIS', 'SCHOOL', 'OF', 'MATERIALISM', 'AND', 'CLAY', 'INVOLVED', 'ITSELF', 'AT', 'LAST', 'IN', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0026-1797: ref=['HERE', 'IS', 'AN', 'EQUALLY', 'TYPICAL', 'GREEK', 'SCHOOL', 'LANDSCAPE', 'BY', 'WILSON', 'LOST', 'WHOLLY', 'IN', 'GOLDEN', 'MIST', 'THE', 'TREES', 'SO', 'SLIGHTLY', 'DRAWN', 'THAT', 'YOU', "DON'T", 'KNOW', 'IF', 'THEY', 'ARE', 'TREES', 'OR', 'TOWERS', 'AND', 'NO', 'CARE', 'FOR', 'COLOR', 'WHATEVER', 'PERFECTLY', 'DECEPTIVE', 'AND', 'MARVELOUS', 'EFFECT', 'OF', 'SUNSHINE', 'THROUGH', 'THE', 'MIST', 'APOLLO', 'AND', 'THE', 'PYTHON'] +1188-133604-0026-1797: hyp=['HERE', 'IS', 'AN', 'EQUALLY', 'TYPICAL', 'GREEK', 'SCHOOL', 'LANDSCAPE', 'BY', 'WILSON', 'LOST', 'WHOLLY', 'IN', 'GOLDEN', 'MIST', 'THE', 'TREES', 'SO', 'SLIGHTLY', 'DRAWN', 'THAT', 'YOU', "DON'T", 'KNOW', 'IF', 'THEY', 'ARE', 'TREES', 'OR', 'TOWERS', 'AND', 'NO', 'CARE', 'FOR', 'COLOR', 'WHATSOEVER', 'PERFECTLY', 'DECEPTIVE', 'AND', 'MARVELLOUS', 'EFFECT', 'OF', 'SUNSHINE', 'THROUGH', 'THE', 'MIST', 'APOLLO', 'IN', 'THE', 'PYTHON'] +1188-133604-0027-1798: ref=['NOW', 'HERE', 'IS', 'RAPHAEL', 'EXACTLY', 'BETWEEN', 'THE', 'TWO', 'TREES', 'STILL', 'DRAWN', 'LEAF', 'BY', 'LEAF', 'WHOLLY', 'FORMAL', 'BUT', 'BEAUTIFUL', 'MIST', 'COMING', 'GRADUALLY', 'INTO', 'THE', 'DISTANCE'] +1188-133604-0027-1798: hyp=['NOW', 'HERE', 'IS', 'RAPHAEL', 'EXACTLY', 'BETWEEN', 'THE', 'TWO', 'TREES', 'STILL', 'DRAWN', 'LEAF', 'BY', 'LEAF', 'WHOLLY', 'FORMAL', 'BUT', 'BEAUTIFUL', 'MIST', 'COMING', 'GRADUALLY', 'INTO', 'THE', 'DISTANCE'] +1188-133604-0028-1799: ref=['WELL', 'THEN', 'LAST', 'HERE', 'IS', "TURNER'S", 'GREEK', 'SCHOOL', 'OF', 'THE', 'HIGHEST', 'CLASS', 'AND', 'YOU', 'DEFINE', 'HIS', 'ART', 'ABSOLUTELY', 'AS', 'FIRST', 'THE', 'DISPLAYING', 'INTENSELY', 'AND', 'WITH', 'THE', 'STERNEST', 'INTELLECT', 'OF', 'NATURAL', 'FORM', 'AS', 'IT', 'IS', 'AND', 'THEN', 'THE', 'ENVELOPMENT', 'OF', 'IT', 'WITH', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0028-1799: hyp=['WELL', 'THEN', 'LAST', 'HERE', 'IS', "TURNER'S", 'GREEK', 'SCHOOL', 'OF', 'THE', 'HIGHEST', 'CLASS', 'AND', 'YOU', 'DEFINE', 'HIS', 'ART', 'ABSOLUTELY', 'AS', 'FIRST', 'THE', 'DISPLAYING', 'INTENSELY', 'AND', 'WITH', 'THE', 'STERNEST', 'INTELLECT', 'OF', 'NATURAL', 'FORM', 'AS', 'IT', 'IS', 'AND', 'THEN', 'THE', 'ENVELOPMENT', 'OF', 'IT', 'WITH', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0029-1800: ref=['ONLY', 'THERE', 'ARE', 'TWO', 'SORTS', 'OF', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0029-1800: hyp=['ONLY', 'THERE', 'ARE', 'TWO', 'SORTS', 'OF', 'CLOUD', 'IN', 'FIRE'] +1188-133604-0030-1801: ref=['HE', 'KNOWS', 'THEM', 'BOTH'] +1188-133604-0030-1801: hyp=['HE', 'KNOWS', 'THEM', 'BOTH'] +1188-133604-0031-1802: ref=["THERE'S", 'ONE', 'AND', "THERE'S", 'ANOTHER', 'THE', 'DUDLEY', 'AND', 'THE', 'FLINT'] +1188-133604-0031-1802: hyp=["THERE'S", 'ONE', 'AND', "THERE'S", 'ANOTHER', 'THE', 'DUDLEY', 'AND', 'THE', 'FLINT'] +1188-133604-0032-1803: ref=['IT', 'IS', 'ONLY', 'A', 'PENCIL', 'OUTLINE', 'BY', 'EDWARD', 'BURNE', 'JONES', 'IN', 'ILLUSTRATION', 'OF', 'THE', 'STORY', 'OF', 'PSYCHE', 'IT', 'IS', 'THE', 'INTRODUCTION', 'OF', 'PSYCHE', 'AFTER', 'ALL', 'HER', 'TROUBLES', 'INTO', 'HEAVEN'] +1188-133604-0032-1803: hyp=['IT', 'IS', 'ONLY', 'A', 'PENCIL', 'OUTLINE', 'BY', 'EDWARD', 'BURNE', 'JONES', 'IN', 'ILLUSTRATION', 'OF', 'THE', 'STORY', 'OF', 'PSYCHE', 'IT', 'IS', 'THE', 'INTRODUCTION', 'OF', 'PSYCHE', 'AFTER', 'ALL', 'HER', 'TROUBLES', 'INTO', 'HEAVEN'] +1188-133604-0033-1804: ref=['EVERY', 'PLANT', 'IN', 'THE', 'GRASS', 'IS', 'SET', 'FORMALLY', 'GROWS', 'PERFECTLY', 'AND', 'MAY', 'BE', 'REALIZED', 'COMPLETELY'] +1188-133604-0033-1804: hyp=['EVERY', 'PLANT', 'IN', 'THE', 'GRASS', 'IS', 'SET', 'FORMALLY', 'GROWS', 'PERFECTLY', 'AND', 'MAY', 'BE', 'REALIZED', 'COMPLETELY'] +1188-133604-0034-1805: ref=['EXQUISITE', 'ORDER', 'AND', 'UNIVERSAL', 'WITH', 'ETERNAL', 'LIFE', 'AND', 'LIGHT', 'THIS', 'IS', 'THE', 'FAITH', 'AND', 'EFFORT', 'OF', 'THE', 'SCHOOLS', 'OF', 'CRYSTAL', 'AND', 'YOU', 'MAY', 'DESCRIBE', 'AND', 'COMPLETE', 'THEIR', 'WORK', 'QUITE', 'LITERALLY', 'BY', 'TAKING', 'ANY', 'VERSES', 'OF', 'CHAUCER', 'IN', 'HIS', 'TENDER', 'MOOD', 'AND', 'OBSERVING', 'HOW', 'HE', 'INSISTS', 'ON', 'THE', 'CLEARNESS', 'AND', 'BRIGHTNESS', 'FIRST', 'AND', 'THEN', 'ON', 'THE', 'ORDER'] +1188-133604-0034-1805: hyp=['EXQUISITE', 'ORDER', 'AND', 'UNIVERSAL', 'WITH', 'ETERNAL', 'LIFE', 'AND', 'LIGHT', 'THIS', 'IS', 'THE', 'FAITH', 'AND', 'EFFORT', 'OF', 'THE', 'SCHOOLS', 'OF', 'CRISTEL', 'AND', 'YOU', 'MAY', 'DESCRIBE', 'AND', 'COMPLETE', 'THEIR', 'WORK', 'QUITE', 'LITERALLY', 'BY', 'TAKING', 'ANY', 'VERSES', 'OF', 'CHAUCER', 'IN', 'HIS', 'TENDER', 'MOOD', 'AND', 'OBSERVING', 'HOW', 'HE', 'INSISTS', 'ON', 'THE', 'CLEARNESS', 'AND', 'BRIGHTNESS', 'FIRST', 'AND', 'THEN', 'ON', 'THE', 'ORDER'] +1188-133604-0035-1806: ref=['THUS', 'IN', "CHAUCER'S", 'DREAM'] +1188-133604-0035-1806: hyp=['THUS', 'IN', "CHAUCER'S", 'DREAM'] +1188-133604-0036-1807: ref=['IN', 'BOTH', 'THESE', 'HIGH', 'MYTHICAL', 'SUBJECTS', 'THE', 'SURROUNDING', 'NATURE', 'THOUGH', 'SUFFERING', 'IS', 'STILL', 'DIGNIFIED', 'AND', 'BEAUTIFUL'] +1188-133604-0036-1807: hyp=['IN', 'BOTH', 'THESE', 'HIGH', 'MYTHICAL', 'SUBJECTS', 'THE', 'SURROUNDING', 'NATURE', 'THOUGH', 'SUFFERING', 'IS', 'STILL', 'DIGNIFIED', 'AND', 'BEAUTIFUL'] +1188-133604-0037-1808: ref=['EVERY', 'LINE', 'IN', 'WHICH', 'THE', 'MASTER', 'TRACES', 'IT', 'EVEN', 'WHERE', 'SEEMINGLY', 'NEGLIGENT', 'IS', 'LOVELY', 'AND', 'SET', 'DOWN', 'WITH', 'A', 'MEDITATIVE', 'CALMNESS', 'WHICH', 'MAKES', 'THESE', 'TWO', 'ETCHINGS', 'CAPABLE', 'OF', 'BEING', 'PLACED', 'BESIDE', 'THE', 'MOST', 'TRANQUIL', 'WORK', 'OF', 'HOLBEIN', 'OR', 'DUERER'] +1188-133604-0037-1808: hyp=['EVERY', 'LINE', 'IN', 'WHICH', 'THE', 'MASTER', 'TRACES', 'IT', 'EVEN', 'WHERE', 'SEEMINGLY', 'NEGLIGENT', 'IS', 'LOVELY', 'AND', 'SET', 'DOWN', 'WITH', 'A', 'MEDITATIVE', 'CALMNESS', 'WHICH', 'MAKES', 'THESE', 'TWO', 'ETCHINGS', 'CAPABLE', 'OF', 'BEING', 'PLACED', 'BESIDE', 'THE', 'MOST', 'TRANQUIL', 'WORK', 'OF', 'HOLBINE', 'OR', 'DURE'] +1188-133604-0038-1809: ref=['BUT', 'NOW', 'HERE', 'IS', 'A', 'SUBJECT', 'OF', 'WHICH', 'YOU', 'WILL', 'WONDER', 'AT', 'FIRST', 'WHY', 'TURNER', 'DREW', 'IT', 'AT', 'ALL'] +1188-133604-0038-1809: hyp=['BUT', 'NOW', 'HERE', 'IS', 'A', 'SUBJECT', 'OF', 'WHICH', 'YOU', 'WILL', 'WONDER', 'AT', 'FIRST', 'WHY', 'TURNER', 'DREW', 'IT', 'AT', 'ALL'] +1188-133604-0039-1810: ref=['IT', 'HAS', 'NO', 'BEAUTY', 'WHATSOEVER', 'NO', 'SPECIALTY', 'OF', 'PICTURESQUENESS', 'AND', 'ALL', 'ITS', 'LINES', 'ARE', 'CRAMPED', 'AND', 'POOR'] +1188-133604-0039-1810: hyp=['IT', 'HAS', 'NO', 'BEAUTY', 'WHATSOEVER', 'NO', 'SPECIALTY', 'OF', 'PICTURESQUENESS', 'IN', 'ALL', 'ITS', 'LINES', 'ARE', 'CRAMPED', 'AND', 'POOR'] +1188-133604-0040-1811: ref=['THE', 'CRAMPNESS', 'AND', 'THE', 'POVERTY', 'ARE', 'ALL', 'INTENDED'] +1188-133604-0040-1811: hyp=['THE', 'CRAMPNESS', 'IN', 'THE', 'POVERTY', 'ARE', 'ALL', 'INTENDED'] +1188-133604-0041-1812: ref=['IT', 'IS', 'A', 'GLEANER', 'BRINGING', 'DOWN', 'HER', 'ONE', 'SHEAF', 'OF', 'CORN', 'TO', 'AN', 'OLD', 'WATERMILL', 'ITSELF', 'MOSSY', 'AND', 'RENT', 'SCARCELY', 'ABLE', 'TO', 'GET', 'ITS', 'STONES', 'TO', 'TURN'] +1188-133604-0041-1812: hyp=['IT', 'IS', 'A', 'GLEANER', 'BRINGING', 'DOWN', 'HER', 'ONE', 'SHEAF', 'OF', 'CORN', 'TO', 'AN', 'OLD', 'WATER', 'MILL', 'ITSELF', 'MOSSY', 'AND', 'RENT', 'SCARCELY', 'ABLE', 'TO', 'GET', 'ITS', 'STONES', 'TO', 'TURN'] +1188-133604-0042-1813: ref=['THE', 'SCENE', 'IS', 'ABSOLUTELY', 'ARCADIAN'] +1188-133604-0042-1813: hyp=['THE', 'SCENE', 'IS', 'ABSOLUTELY', 'ARCADIAN'] +1188-133604-0043-1814: ref=['SEE', 'THAT', 'YOUR', 'LIVES', 'BE', 'IN', 'NOTHING', 'WORSE', 'THAN', 'A', "BOY'S", 'CLIMBING', 'FOR', 'HIS', 'ENTANGLED', 'KITE'] +1188-133604-0043-1814: hyp=['SEE', 'THAT', 'YOUR', 'LIVES', 'BE', 'IN', 'NOTHING', 'WORSE', 'THAN', 'A', "BOY'S", 'CLIMBING', 'FOR', 'HIS', 'ENTANGLED', 'KITE'] +1188-133604-0044-1815: ref=['IT', 'WILL', 'BE', 'WELL', 'FOR', 'YOU', 'IF', 'YOU', 'JOIN', 'NOT', 'WITH', 'THOSE', 'WHO', 'INSTEAD', 'OF', 'KITES', 'FLY', 'FALCONS', 'WHO', 'INSTEAD', 'OF', 'OBEYING', 'THE', 'LAST', 'WORDS', 'OF', 'THE', 'GREAT', 'CLOUD', 'SHEPHERD', 'TO', 'FEED', 'HIS', 'SHEEP', 'LIVE', 'THE', 'LIVES', 'HOW', 'MUCH', 'LESS', 'THAN', 'VANITY', 'OF', 'THE', 'WAR', 'WOLF', 'AND', 'THE', 'GIER', 'EAGLE'] +1188-133604-0044-1815: hyp=['IT', 'WILL', 'BE', 'WELL', 'FOR', 'YOU', 'IF', 'YOU', 'JOIN', 'NOT', 'WITH', 'THOSE', 'WHO', 'INSTEAD', 'OF', 'KITES', 'FLY', 'FALCONS', 'WHO', 'INSTEAD', 'OF', 'OBEYING', 'THE', 'LAST', 'WORDS', 'OF', 'THE', 'GREAT', 'CLOUD', 'SHEPHERD', 'TO', 'FEED', 'HIS', 'SHEEP', 'LIVE', 'THE', 'LIVES', 'HOW', 'MUCH', 'LESS', 'THAN', 'VANITY', 'OF', 'THE', 'WAR', 'WOLF', 'IN', 'THE', 'GEAR', 'EAGLE'] +121-121726-0000-2558: ref=['ALSO', 'A', 'POPULAR', 'CONTRIVANCE', 'WHEREBY', 'LOVE', 'MAKING', 'MAY', 'BE', 'SUSPENDED', 'BUT', 'NOT', 'STOPPED', 'DURING', 'THE', 'PICNIC', 'SEASON'] +121-121726-0000-2558: hyp=['ALSO', 'A', 'POPULAR', 'CONTRIVANCE', 'WHEREBY', 'LOVE', 'MAKING', 'MAY', 'BE', 'SUSPENDED', 'BUT', 'NOT', 'STOPPED', 'DURING', 'THE', 'PICNIC', 'SEASON'] +121-121726-0001-2559: ref=['HARANGUE', 'THE', 'TIRESOME', 'PRODUCT', 'OF', 'A', 'TIRELESS', 'TONGUE'] +121-121726-0001-2559: hyp=['HURRY', 'THE', 'TIRESOME', 'PRODUCT', 'OF', 'A', 'TIRELESS', 'TONGUE'] +121-121726-0002-2560: ref=['ANGOR', 'PAIN', 'PAINFUL', 'TO', 'HEAR'] +121-121726-0002-2560: hyp=['ANGOR', 'PAIN', 'PAINFUL', 'TO', 'HEAR'] +121-121726-0003-2561: ref=['HAY', 'FEVER', 'A', 'HEART', 'TROUBLE', 'CAUSED', 'BY', 'FALLING', 'IN', 'LOVE', 'WITH', 'A', 'GRASS', 'WIDOW'] +121-121726-0003-2561: hyp=['HEY', 'FEVER', 'A', 'HARD', 'TROUBLE', 'CAUSED', 'BY', 'FALLING', 'IN', 'LOVE', 'WITH', 'A', 'GRASS', 'WIDOW'] +121-121726-0004-2562: ref=['HEAVEN', 'A', 'GOOD', 'PLACE', 'TO', 'BE', 'RAISED', 'TO'] +121-121726-0004-2562: hyp=['HEAVEN', 'A', 'GOOD', 'PLACE', 'TO', 'BE', 'RAISED', 'TO'] +121-121726-0005-2563: ref=['HEDGE', 'A', 'FENCE'] +121-121726-0005-2563: hyp=['HEDGE', 'A', 'FENCE'] +121-121726-0006-2564: ref=['HEREDITY', 'THE', 'CAUSE', 'OF', 'ALL', 'OUR', 'FAULTS'] +121-121726-0006-2564: hyp=['HEREDITY', 'THE', 'CAUSE', 'OF', 'ALL', 'OUR', 'FAULTS'] +121-121726-0007-2565: ref=['HORSE', 'SENSE', 'A', 'DEGREE', 'OF', 'WISDOM', 'THAT', 'KEEPS', 'ONE', 'FROM', 'BETTING', 'ON', 'THE', 'RACES'] +121-121726-0007-2565: hyp=['HORSE', 'SENSE', 'A', 'DEGREE', 'OF', 'WISDOM', 'THAT', 'KEEPS', 'ONE', 'FROM', 'BETTING', 'ON', 'THE', 'RACES'] +121-121726-0008-2566: ref=['HOSE', "MAN'S", 'EXCUSE', 'FOR', 'WETTING', 'THE', 'WALK'] +121-121726-0008-2566: hyp=['HOSE', "MAN'S", 'EXCUSE', 'FOR', 'WETTING', 'THE', 'WALK'] +121-121726-0009-2567: ref=['HOTEL', 'A', 'PLACE', 'WHERE', 'A', 'GUEST', 'OFTEN', 'GIVES', 'UP', 'GOOD', 'DOLLARS', 'FOR', 'POOR', 'QUARTERS'] +121-121726-0009-2567: hyp=['HOTEL', 'A', 'PLACE', 'WHERE', 'A', 'GUEST', 'OFTEN', 'GIVES', 'UP', 'GOOD', 'DOLLARS', 'FOR', 'POOR', 'QUARTERS'] +121-121726-0010-2568: ref=['HOUSECLEANING', 'A', 'DOMESTIC', 'UPHEAVAL', 'THAT', 'MAKES', 'IT', 'EASY', 'FOR', 'THE', 'GOVERNMENT', 'TO', 'ENLIST', 'ALL', 'THE', 'SOLDIERS', 'IT', 'NEEDS'] +121-121726-0010-2568: hyp=['HOUSE', 'CLEANING', 'A', 'DOMESTIC', 'UPHEAVAL', 'THAT', 'MAKES', 'IT', 'EASY', 'FOR', 'THE', 'GOVERNMENT', 'TO', 'ENLIST', 'ALL', 'THE', 'SOLDIERS', 'IT', 'NEEDS'] +121-121726-0011-2569: ref=['HUSBAND', 'THE', 'NEXT', 'THING', 'TO', 'A', 'WIFE'] +121-121726-0011-2569: hyp=['HUSBAND', 'THE', 'NEXT', 'THING', 'TO', 'A', 'WIFE'] +121-121726-0012-2570: ref=['HUSSY', 'WOMAN', 'AND', 'BOND', 'TIE'] +121-121726-0012-2570: hyp=['HUSSY', 'WOMAN', 'AND', 'BOND', 'TIE'] +121-121726-0013-2571: ref=['TIED', 'TO', 'A', 'WOMAN'] +121-121726-0013-2571: hyp=['TIED', 'TO', 'A', 'WOMAN'] +121-121726-0014-2572: ref=['HYPOCRITE', 'A', 'HORSE', 'DEALER'] +121-121726-0014-2572: hyp=['HYPOCRITE', 'A', 'HORSE', 'DEALER'] +121-123852-0000-2615: ref=['THOSE', 'PRETTY', 'WRONGS', 'THAT', 'LIBERTY', 'COMMITS', 'WHEN', 'I', 'AM', 'SOMETIME', 'ABSENT', 'FROM', 'THY', 'HEART', 'THY', 'BEAUTY', 'AND', 'THY', 'YEARS', 'FULL', 'WELL', 'BEFITS', 'FOR', 'STILL', 'TEMPTATION', 'FOLLOWS', 'WHERE', 'THOU', 'ART'] +121-123852-0000-2615: hyp=['THOSE', 'PRETTY', 'WRONGS', 'THAT', 'LIBERTY', 'COMMITS', 'WHEN', 'I', 'AM', 'SOME', 'TIME', 'ABSENT', 'FROM', 'THY', 'HEART', 'THY', 'BEAUTY', 'AND', 'THY', 'YEARS', 'FULL', 'WELL', 'BEFITS', 'FOR', 'STILL', 'TEMPTATION', 'FOLLOWS', 'WHERE', 'THOU', 'ART'] +121-123852-0001-2616: ref=['AY', 'ME'] +121-123852-0001-2616: hyp=['I', 'ME'] +121-123852-0002-2617: ref=['NO', 'MATTER', 'THEN', 'ALTHOUGH', 'MY', 'FOOT', 'DID', 'STAND', 'UPON', 'THE', 'FARTHEST', 'EARTH', "REMOV'D", 'FROM', 'THEE', 'FOR', 'NIMBLE', 'THOUGHT', 'CAN', 'JUMP', 'BOTH', 'SEA', 'AND', 'LAND', 'AS', 'SOON', 'AS', 'THINK', 'THE', 'PLACE', 'WHERE', 'HE', 'WOULD', 'BE', 'BUT', 'AH'] +121-123852-0002-2617: hyp=['NO', 'MATTER', 'THEN', 'ALTHOUGH', 'MY', 'FOOT', 'DID', 'STAND', 'UPON', 'THE', 'FARTHEST', 'EARTH', 'REMOVED', 'FROM', 'THEE', 'FOR', 'NIMBLE', 'THOUGHT', 'CAN', 'JUMP', 'BOTH', 'SEA', 'AND', 'LAND', 'AS', 'SOON', 'AS', 'THINK', 'THE', 'PLACE', 'WHERE', 'HE', 'WOULD', 'BE', 'BUT', 'AH'] +121-123852-0003-2618: ref=['THOUGHT', 'KILLS', 'ME', 'THAT', 'I', 'AM', 'NOT', 'THOUGHT', 'TO', 'LEAP', 'LARGE', 'LENGTHS', 'OF', 'MILES', 'WHEN', 'THOU', 'ART', 'GONE', 'BUT', 'THAT', 'SO', 'MUCH', 'OF', 'EARTH', 'AND', 'WATER', 'WROUGHT', 'I', 'MUST', 'ATTEND', "TIME'S", 'LEISURE', 'WITH', 'MY', 'MOAN', 'RECEIVING', 'NOUGHT', 'BY', 'ELEMENTS', 'SO', 'SLOW', 'BUT', 'HEAVY', 'TEARS', 'BADGES', 'OF', "EITHER'S", 'WOE'] +121-123852-0003-2618: hyp=['THOUGHT', 'KILLS', 'ME', 'THAT', 'I', 'AM', 'NOT', 'BOUGHT', 'TO', 'LEAP', 'LARGE', 'LENGTHS', 'OF', 'MILES', 'WHEN', 'THOU', 'ART', 'GONE', 'BUT', 'THAT', 'SO', 'MUCH', 'OF', 'EARTH', 'AND', 'WATER', 'WROUGHT', 'I', 'MUST', 'ATTEND', "TIME'S", 'LEISURE', 'WITH', 'MY', 'MOAN', 'RECEIVING', 'NAUGHT', 'BY', 'ELEMENTS', 'SO', 'SLOW', 'BUT', 'HEAVY', 'TEARS', 'BADGERS', 'OF', "EITHER'S", 'WOE'] +121-123852-0004-2619: ref=['MY', 'HEART', 'DOTH', 'PLEAD', 'THAT', 'THOU', 'IN', 'HIM', 'DOST', 'LIE', 'A', 'CLOSET', 'NEVER', "PIERC'D", 'WITH', 'CRYSTAL', 'EYES', 'BUT', 'THE', 'DEFENDANT', 'DOTH', 'THAT', 'PLEA', 'DENY', 'AND', 'SAYS', 'IN', 'HIM', 'THY', 'FAIR', 'APPEARANCE', 'LIES'] +121-123852-0004-2619: hyp=['MY', 'HEART', 'DOTH', 'PLEAD', 'THAT', 'THOU', 'IN', 'HIM', 'DOST', 'LIE', 'A', 'CLOSET', 'NEVER', 'PIERCED', 'WITH', 'CRYSTAL', 'EYES', 'BUT', 'THE', 'DEFENDANT', 'DOTH', 'THAT', 'PLEAD', 'DENY', 'AND', 'SAYS', 'IN', 'HIM', 'THY', 'FAIR', 'APPEARANCE', 'LIES'] +121-123859-0000-2573: ref=['YOU', 'ARE', 'MY', 'ALL', 'THE', 'WORLD', 'AND', 'I', 'MUST', 'STRIVE', 'TO', 'KNOW', 'MY', 'SHAMES', 'AND', 'PRAISES', 'FROM', 'YOUR', 'TONGUE', 'NONE', 'ELSE', 'TO', 'ME', 'NOR', 'I', 'TO', 'NONE', 'ALIVE', 'THAT', 'MY', "STEEL'D", 'SENSE', 'OR', 'CHANGES', 'RIGHT', 'OR', 'WRONG'] +121-123859-0000-2573: hyp=['YOU', 'ARE', 'MY', 'ALL', 'THE', 'WORLD', 'AND', 'I', 'MUST', 'STRIVE', 'TO', 'KNOW', 'MY', 'SHAMES', 'AND', 'PRAISES', 'FROM', 'YOUR', 'TONGUE', 'NONE', 'ELSE', 'TO', 'ME', 'NOR', 'I', 'TO', 'NONE', 'ALIVE', 'THAT', 'MY', 'STEELED', 'SCENTS', 'OR', 'CHANGES', 'RIGHT', 'OR', 'WRONG'] +121-123859-0001-2574: ref=['O', 'TIS', 'THE', 'FIRST', 'TIS', 'FLATTERY', 'IN', 'MY', 'SEEING', 'AND', 'MY', 'GREAT', 'MIND', 'MOST', 'KINGLY', 'DRINKS', 'IT', 'UP', 'MINE', 'EYE', 'WELL', 'KNOWS', 'WHAT', 'WITH', 'HIS', 'GUST', 'IS', 'GREEING', 'AND', 'TO', 'HIS', 'PALATE', 'DOTH', 'PREPARE', 'THE', 'CUP', 'IF', 'IT', 'BE', "POISON'D", 'TIS', 'THE', 'LESSER', 'SIN', 'THAT', 'MINE', 'EYE', 'LOVES', 'IT', 'AND', 'DOTH', 'FIRST', 'BEGIN'] +121-123859-0001-2574: hyp=['OH', 'TIS', 'THE', 'FIRST', 'TIS', 'FLATTERY', 'IN', 'MY', 'SEEING', 'AND', 'MY', 'GREAT', 'MIND', 'MOST', 'KINGLY', 'DRINKS', 'IT', 'UP', 'MINE', 'EYE', 'WELL', 'KNOWS', 'WHAT', 'WITH', 'HIS', 'GUST', 'IS', 'GREEN', 'AND', 'TO', 'HIS', 'PALLET', 'DOTH', 'PREPARE', 'THE', 'CUP', 'IF', 'IT', 'BE', 'POISONED', 'TIS', 'THE', 'LESSER', 'SIN', 'THAT', 'MINE', 'EYE', 'LOVES', 'IT', 'AND', 'DOTH', 'FIRST', 'BEGIN'] +121-123859-0002-2575: ref=['BUT', 'RECKONING', 'TIME', 'WHOSE', "MILLION'D", 'ACCIDENTS', 'CREEP', 'IN', 'TWIXT', 'VOWS', 'AND', 'CHANGE', 'DECREES', 'OF', 'KINGS', 'TAN', 'SACRED', 'BEAUTY', 'BLUNT', 'THE', "SHARP'ST", 'INTENTS', 'DIVERT', 'STRONG', 'MINDS', 'TO', 'THE', 'COURSE', 'OF', 'ALTERING', 'THINGS', 'ALAS', 'WHY', 'FEARING', 'OF', "TIME'S", 'TYRANNY', 'MIGHT', 'I', 'NOT', 'THEN', 'SAY', 'NOW', 'I', 'LOVE', 'YOU', 'BEST', 'WHEN', 'I', 'WAS', 'CERTAIN', "O'ER", 'INCERTAINTY', 'CROWNING', 'THE', 'PRESENT', 'DOUBTING', 'OF', 'THE', 'REST'] +121-123859-0002-2575: hyp=['BUT', 'RECKONING', 'TIME', 'WHOSE', 'MILLIONED', 'ACCIDENTS', 'CREEP', 'IN', 'TWIXT', 'VOWS', 'AND', 'CHANGE', 'DECREES', 'OF', 'KINGS', 'TAN', 'SACRED', 'BEAUTY', 'BLUNT', 'THE', 'SHARPEST', 'INTENSE', 'DIVERT', 'STRONG', 'MINDS', 'TO', 'THE', 'COURSE', 'OF', 'ALTERING', 'THINGS', 'ALAS', 'WHY', 'FEARING', 'OF', "TIME'S", 'TYRANNY', 'MIGHT', 'I', 'NOT', 'THEN', 'SAY', 'NOW', 'I', 'LOVE', 'YOU', 'BEST', 'WHEN', 'I', 'WAS', 'CERTAIN', 'OR', 'IN', 'CERTAINTY', 'CROWNING', 'THE', 'PRESENT', 'DOUBTING', 'OF', 'THE', 'REST'] +121-123859-0003-2576: ref=['LOVE', 'IS', 'A', 'BABE', 'THEN', 'MIGHT', 'I', 'NOT', 'SAY', 'SO', 'TO', 'GIVE', 'FULL', 'GROWTH', 'TO', 'THAT', 'WHICH', 'STILL', 'DOTH', 'GROW'] +121-123859-0003-2576: hyp=['LOVE', 'IS', 'A', 'BABE', 'THEN', 'MIGHT', 'I', 'NOT', 'SAY', 'SO', 'TO', 'GIVE', 'FULL', 'GROWTH', 'TO', 'THAT', 'WHICH', 'STILL', 'DOTH', 'GROW'] +121-123859-0004-2577: ref=['SO', 'I', 'RETURN', "REBUK'D", 'TO', 'MY', 'CONTENT', 'AND', 'GAIN', 'BY', 'ILL', 'THRICE', 'MORE', 'THAN', 'I', 'HAVE', 'SPENT'] +121-123859-0004-2577: hyp=['SO', 'I', 'RETURNED', 'REBUKED', 'TO', 'MY', 'CONTENT', 'AND', 'GAIN', 'BY', 'ILL', 'THRICE', 'MORE', 'THAN', 'I', 'HAVE', 'SPENT'] +121-127105-0000-2578: ref=['IT', 'WAS', 'THIS', 'OBSERVATION', 'THAT', 'DREW', 'FROM', 'DOUGLAS', 'NOT', 'IMMEDIATELY', 'BUT', 'LATER', 'IN', 'THE', 'EVENING', 'A', 'REPLY', 'THAT', 'HAD', 'THE', 'INTERESTING', 'CONSEQUENCE', 'TO', 'WHICH', 'I', 'CALL', 'ATTENTION'] +121-127105-0000-2578: hyp=['IT', 'WAS', 'THIS', 'OBSERVATION', 'THAT', 'DREW', 'FROM', 'DOUGLAS', 'NOT', 'IMMEDIATELY', 'BUT', 'LATER', 'IN', 'THE', 'EVENING', 'A', 'REPLY', 'THAT', 'HAD', 'THE', 'INTERESTING', 'CONSEQUENCE', 'TO', 'WHICH', 'I', 'CALL', 'ATTENTION'] +121-127105-0001-2579: ref=['SOMEONE', 'ELSE', 'TOLD', 'A', 'STORY', 'NOT', 'PARTICULARLY', 'EFFECTIVE', 'WHICH', 'I', 'SAW', 'HE', 'WAS', 'NOT', 'FOLLOWING'] +121-127105-0001-2579: hyp=['SOME', 'ONE', 'ELSE', 'TOLD', 'A', 'STORY', 'NOT', 'PARTICULARLY', 'EFFECTIVE', 'WHICH', 'I', 'SAW', 'HE', 'WAS', 'NOT', 'FOLLOWING'] +121-127105-0002-2580: ref=['CRIED', 'ONE', 'OF', 'THE', 'WOMEN', 'HE', 'TOOK', 'NO', 'NOTICE', 'OF', 'HER', 'HE', 'LOOKED', 'AT', 'ME', 'BUT', 'AS', 'IF', 'INSTEAD', 'OF', 'ME', 'HE', 'SAW', 'WHAT', 'HE', 'SPOKE', 'OF'] +121-127105-0002-2580: hyp=['CRIED', 'ONE', 'OF', 'THE', 'WOMEN', 'HE', 'TOOK', 'NO', 'NOTICE', 'OF', 'HER', 'HE', 'LOOKED', 'AT', 'ME', 'BUT', 'AS', 'IF', 'INSTEAD', 'OF', 'ME', 'HE', 'SAW', 'WHAT', 'HE', 'SPOKE', 'OF'] +121-127105-0003-2581: ref=['THERE', 'WAS', 'A', 'UNANIMOUS', 'GROAN', 'AT', 'THIS', 'AND', 'MUCH', 'REPROACH', 'AFTER', 'WHICH', 'IN', 'HIS', 'PREOCCUPIED', 'WAY', 'HE', 'EXPLAINED'] +121-127105-0003-2581: hyp=['THERE', 'WAS', 'A', 'UNANIMOUS', 'GROAN', 'AT', 'THIS', 'AND', 'MUCH', 'REPROACH', 'AFTER', 'WHICH', 'IN', 'HIS', 'PREOCCUPIED', 'WAY', 'HE', 'EXPLAINED'] +121-127105-0004-2582: ref=['THE', "STORY'S", 'WRITTEN'] +121-127105-0004-2582: hyp=['THE', 'STORIES', 'WRITTEN'] +121-127105-0005-2583: ref=['I', 'COULD', 'WRITE', 'TO', 'MY', 'MAN', 'AND', 'ENCLOSE', 'THE', 'KEY', 'HE', 'COULD', 'SEND', 'DOWN', 'THE', 'PACKET', 'AS', 'HE', 'FINDS', 'IT'] +121-127105-0005-2583: hyp=['I', 'COULD', 'WRITE', 'TO', 'MY', 'MAN', 'AND', 'ENCLOSE', 'THE', 'KEY', 'HE', 'COULD', 'SEND', 'DOWN', 'THE', 'PACKET', 'AS', 'HE', 'FINDS', 'IT'] +121-127105-0006-2584: ref=['THE', 'OTHERS', 'RESENTED', 'POSTPONEMENT', 'BUT', 'IT', 'WAS', 'JUST', 'HIS', 'SCRUPLES', 'THAT', 'CHARMED', 'ME'] +121-127105-0006-2584: hyp=['THE', 'OTHERS', 'RESENTED', 'POSTPONEMENT', 'BUT', 'IT', 'WAS', 'JUST', 'HIS', 'SCRUPLES', 'THAT', 'CHARMED', 'ME'] +121-127105-0007-2585: ref=['TO', 'THIS', 'HIS', 'ANSWER', 'WAS', 'PROMPT', 'OH', 'THANK', 'GOD', 'NO', 'AND', 'IS', 'THE', 'RECORD', 'YOURS'] +121-127105-0007-2585: hyp=['TO', 'THIS', 'HIS', 'ANSWER', 'WAS', 'PROMPT', 'O', 'THANK', 'GOD', 'NO', 'AND', 'IS', 'THE', 'RECORD', 'YOURS'] +121-127105-0008-2586: ref=['HE', 'HUNG', 'FIRE', 'AGAIN', 'A', "WOMAN'S"] +121-127105-0008-2586: hyp=['HE', 'HUNG', 'FIRE', 'AGAIN', 'A', "WOMAN'S"] +121-127105-0009-2587: ref=['SHE', 'HAS', 'BEEN', 'DEAD', 'THESE', 'TWENTY', 'YEARS'] +121-127105-0009-2587: hyp=['SHE', 'HAS', 'BEEN', 'DEAD', 'THESE', 'TWENTY', 'YEARS'] +121-127105-0010-2588: ref=['SHE', 'SENT', 'ME', 'THE', 'PAGES', 'IN', 'QUESTION', 'BEFORE', 'SHE', 'DIED'] +121-127105-0010-2588: hyp=['SHE', 'SENT', 'ME', 'THE', 'PAGES', 'IN', 'QUESTION', 'BEFORE', 'SHE', 'DIED'] +121-127105-0011-2589: ref=['SHE', 'WAS', 'THE', 'MOST', 'AGREEABLE', 'WOMAN', "I'VE", 'EVER', 'KNOWN', 'IN', 'HER', 'POSITION', 'SHE', 'WOULD', 'HAVE', 'BEEN', 'WORTHY', 'OF', 'ANY', 'WHATEVER'] +121-127105-0011-2589: hyp=['SHE', 'WAS', 'THE', 'MOST', 'AGREEABLE', 'WOMAN', "I'VE", 'EVER', 'KNOWN', 'IN', 'HER', 'POSITION', 'SHE', 'WOULD', 'HAVE', 'BEEN', 'WORTHY', 'OF', 'ANY', 'WHATEVER'] +121-127105-0012-2590: ref=['IT', "WASN'T", 'SIMPLY', 'THAT', 'SHE', 'SAID', 'SO', 'BUT', 'THAT', 'I', 'KNEW', 'SHE', "HADN'T", 'I', 'WAS', 'SURE', 'I', 'COULD', 'SEE'] +121-127105-0012-2590: hyp=["WASN'T", 'SIMPLY', 'THAT', 'SHE', 'SAID', 'SO', 'BUT', 'THAT', 'I', 'KNEW', 'SHE', "HADN'T", 'I', 'WAS', 'SURE', 'I', 'COULD', 'SEE'] +121-127105-0013-2591: ref=["YOU'LL", 'EASILY', 'JUDGE', 'WHY', 'WHEN', 'YOU', 'HEAR', 'BECAUSE', 'THE', 'THING', 'HAD', 'BEEN', 'SUCH', 'A', 'SCARE', 'HE', 'CONTINUED', 'TO', 'FIX', 'ME'] +121-127105-0013-2591: hyp=["YOU'LL", 'EASILY', 'JUDGE', 'WHY', 'WHEN', 'YOU', 'HEAR', 'BECAUSE', 'THE', 'THING', 'HAD', 'BEEN', 'SUCH', 'A', 'SCARE', 'HE', 'CONTINUED', 'TO', 'FIX', 'ME'] +121-127105-0014-2592: ref=['YOU', 'ARE', 'ACUTE'] +121-127105-0014-2592: hyp=['YOU', 'ARE', 'ACUTE'] +121-127105-0015-2593: ref=['HE', 'QUITTED', 'THE', 'FIRE', 'AND', 'DROPPED', 'BACK', 'INTO', 'HIS', 'CHAIR'] +121-127105-0015-2593: hyp=['HE', 'QUITTED', 'THE', 'FIRE', 'AND', 'DROPPED', 'BACK', 'INTO', 'HIS', 'CHAIR'] +121-127105-0016-2594: ref=['PROBABLY', 'NOT', 'TILL', 'THE', 'SECOND', 'POST'] +121-127105-0016-2594: hyp=['PROBABLY', 'NOT', 'TILL', 'THE', 'SECOND', 'POST'] +121-127105-0017-2595: ref=['IT', 'WAS', 'ALMOST', 'THE', 'TONE', 'OF', 'HOPE', 'EVERYBODY', 'WILL', 'STAY'] +121-127105-0017-2595: hyp=['IT', 'WAS', 'ALMOST', 'THE', 'TONE', 'OF', 'HOPE', 'EVERYBODY', 'WILL', 'STAY'] +121-127105-0018-2596: ref=['CRIED', 'THE', 'LADIES', 'WHOSE', 'DEPARTURE', 'HAD', 'BEEN', 'FIXED'] +121-127105-0018-2596: hyp=['CRIED', 'THE', 'LADIES', 'WHOSE', 'DEPARTURE', 'HAD', 'BEEN', 'FIXED'] +121-127105-0019-2597: ref=['MISSUS', 'GRIFFIN', 'HOWEVER', 'EXPRESSED', 'THE', 'NEED', 'FOR', 'A', 'LITTLE', 'MORE', 'LIGHT'] +121-127105-0019-2597: hyp=['MISSUS', 'GRIFFIN', 'HOWEVER', 'EXPRESSED', 'THE', 'NEED', 'FOR', 'A', 'LITTLE', 'MORE', 'LIGHT'] +121-127105-0020-2598: ref=['WHO', 'WAS', 'IT', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'THE', 'STORY', 'WILL', 'TELL', 'I', 'TOOK', 'UPON', 'MYSELF', 'TO', 'REPLY', 'OH', 'I', "CAN'T", 'WAIT', 'FOR', 'THE', 'STORY', 'THE', 'STORY', "WON'T", 'TELL', 'SAID', 'DOUGLAS', 'NOT', 'IN', 'ANY', 'LITERAL', 'VULGAR', 'WAY', "MORE'S", 'THE', 'PITY', 'THEN'] +121-127105-0020-2598: hyp=['WHO', 'WAS', 'IT', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'THE', 'STORY', 'WILL', 'TELL', 'I', 'TOOK', 'UPON', 'MYSELF', 'TO', 'REPLY', 'OH', 'I', "CAN'T", 'WAIT', 'FOR', 'THE', 'STORY', 'THE', 'STORY', "WON'T", 'TELL', 'SAID', 'DOUGLAS', 'NOT', 'IN', 'ANY', 'LITERAL', 'VULGAR', 'WAY', "NOR'S", 'THE', 'PITY', 'THEN'] +121-127105-0021-2599: ref=["WON'T", 'YOU', 'TELL', 'DOUGLAS'] +121-127105-0021-2599: hyp=["WON'T", 'YOU', 'TELL', 'DOUGLAS'] +121-127105-0022-2600: ref=['WELL', 'IF', 'I', "DON'T", 'KNOW', 'WHO', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'I', 'KNOW', 'WHO', 'HE', 'WAS'] +121-127105-0022-2600: hyp=['FOR', 'IF', 'I', "DON'T", 'KNOW', 'WHO', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'I', 'KNOW', 'WHO', 'HE', 'WAS'] +121-127105-0023-2601: ref=['LET', 'ME', 'SAY', 'HERE', 'DISTINCTLY', 'TO', 'HAVE', 'DONE', 'WITH', 'IT', 'THAT', 'THIS', 'NARRATIVE', 'FROM', 'AN', 'EXACT', 'TRANSCRIPT', 'OF', 'MY', 'OWN', 'MADE', 'MUCH', 'LATER', 'IS', 'WHAT', 'I', 'SHALL', 'PRESENTLY', 'GIVE'] +121-127105-0023-2601: hyp=['LET', 'ME', 'SAY', 'HERE', 'DISTINCTLY', 'TO', 'HAVE', 'DONE', 'WITH', 'IT', 'THAT', 'THIS', 'NARRATIVE', 'FROM', 'AN', 'EXACT', 'TRANSCRIPT', 'OF', 'MY', 'OWN', 'MADE', 'MUCH', 'LATER', 'IS', 'WHAT', 'I', 'SHALL', 'PRESENTLY', 'GIVE'] +121-127105-0024-2602: ref=['POOR', 'DOUGLAS', 'BEFORE', 'HIS', 'DEATH', 'WHEN', 'IT', 'WAS', 'IN', 'SIGHT', 'COMMITTED', 'TO', 'ME', 'THE', 'MANUSCRIPT', 'THAT', 'REACHED', 'HIM', 'ON', 'THE', 'THIRD', 'OF', 'THESE', 'DAYS', 'AND', 'THAT', 'ON', 'THE', 'SAME', 'SPOT', 'WITH', 'IMMENSE', 'EFFECT', 'HE', 'BEGAN', 'TO', 'READ', 'TO', 'OUR', 'HUSHED', 'LITTLE', 'CIRCLE', 'ON', 'THE', 'NIGHT', 'OF', 'THE', 'FOURTH'] +121-127105-0024-2602: hyp=['POOR', 'DOUGLAS', 'BEFORE', 'HIS', 'DEATH', 'WHEN', 'IT', 'WAS', 'IN', 'SIGHT', 'COMMITTED', 'TO', 'ME', 'THE', 'MANUSCRIPT', 'THAT', 'REACHED', 'HIM', 'ON', 'THE', 'THIRD', 'OF', 'THESE', 'DAYS', 'AND', 'THAT', 'ON', 'THE', 'SAME', 'SPOT', 'WITH', 'IMMENSE', 'EFFECT', 'HE', 'BEGAN', 'TO', 'READ', 'TO', 'OUR', 'HUSHED', 'LITTLE', 'CIRCLE', 'ON', 'THE', 'NIGHT', 'OF', 'THE', 'FOURTH'] +121-127105-0025-2603: ref=['THE', 'DEPARTING', 'LADIES', 'WHO', 'HAD', 'SAID', 'THEY', 'WOULD', 'STAY', "DIDN'T", 'OF', 'COURSE', 'THANK', 'HEAVEN', 'STAY', 'THEY', 'DEPARTED', 'IN', 'CONSEQUENCE', 'OF', 'ARRANGEMENTS', 'MADE', 'IN', 'A', 'RAGE', 'OF', 'CURIOSITY', 'AS', 'THEY', 'PROFESSED', 'PRODUCED', 'BY', 'THE', 'TOUCHES', 'WITH', 'WHICH', 'HE', 'HAD', 'ALREADY', 'WORKED', 'US', 'UP'] +121-127105-0025-2603: hyp=['THE', 'DEPARTING', 'LADIES', 'WHO', 'HAD', 'SAID', 'THEY', 'WOULD', 'STAY', "DIDN'T", 'OF', 'COURSE', 'THANK', 'HEAVEN', 'STAY', 'THEY', 'DEPARTED', 'IN', 'CONSEQUENCE', 'OF', 'ARRANGEMENTS', 'MADE', 'IN', 'A', 'RAGE', 'OF', 'CURIOSITY', 'AS', 'THEY', 'PROFESSED', 'PRODUCED', 'BY', 'THE', 'TOUCHES', 'WITH', 'WHICH', 'HE', 'HAD', 'ALREADY', 'WORKED', 'US', 'UP'] +121-127105-0026-2604: ref=['THE', 'FIRST', 'OF', 'THESE', 'TOUCHES', 'CONVEYED', 'THAT', 'THE', 'WRITTEN', 'STATEMENT', 'TOOK', 'UP', 'THE', 'TALE', 'AT', 'A', 'POINT', 'AFTER', 'IT', 'HAD', 'IN', 'A', 'MANNER', 'BEGUN'] +121-127105-0026-2604: hyp=['THE', 'FIRST', 'OF', 'THESE', 'TOUCHES', 'CONVEYED', 'THAT', 'THE', 'WRITTEN', 'STATEMENT', 'TOOK', 'UP', 'THE', 'TALE', 'AT', 'A', 'POINT', 'AFTER', 'IT', 'HAD', 'IN', 'A', 'MANNER', 'BEGUN'] +121-127105-0027-2605: ref=['HE', 'HAD', 'FOR', 'HIS', 'OWN', 'TOWN', 'RESIDENCE', 'A', 'BIG', 'HOUSE', 'FILLED', 'WITH', 'THE', 'SPOILS', 'OF', 'TRAVEL', 'AND', 'THE', 'TROPHIES', 'OF', 'THE', 'CHASE', 'BUT', 'IT', 'WAS', 'TO', 'HIS', 'COUNTRY', 'HOME', 'AN', 'OLD', 'FAMILY', 'PLACE', 'IN', 'ESSEX', 'THAT', 'HE', 'WISHED', 'HER', 'IMMEDIATELY', 'TO', 'PROCEED'] +121-127105-0027-2605: hyp=['HE', 'HAD', 'FOR', 'HIS', 'OWN', 'TOWN', 'RESIDENCE', 'A', 'BIG', 'HOUSE', 'FILLED', 'WITH', 'THE', 'SPOILS', 'OF', 'TRAVEL', 'AND', 'THE', 'TROPHIES', 'OF', 'THE', 'CHASE', 'BUT', 'IT', 'WAS', 'TO', 'HIS', 'COUNTRY', 'HOME', 'AN', 'OLD', 'FAMILY', 'PLACE', 'IN', 'ESSEX', 'THAT', 'HE', 'WISHED', 'HER', 'IMMEDIATELY', 'TO', 'PROCEED'] +121-127105-0028-2606: ref=['THE', 'AWKWARD', 'THING', 'WAS', 'THAT', 'THEY', 'HAD', 'PRACTICALLY', 'NO', 'OTHER', 'RELATIONS', 'AND', 'THAT', 'HIS', 'OWN', 'AFFAIRS', 'TOOK', 'UP', 'ALL', 'HIS', 'TIME'] +121-127105-0028-2606: hyp=['THE', 'AWKWARD', 'THING', 'WAS', 'THAT', 'THEY', 'HAD', 'PRACTICALLY', 'NO', 'OTHER', 'RELATIONS', 'AND', 'THAT', 'HIS', 'OWN', 'AFFAIRS', 'TOOK', 'UP', 'ALL', 'HIS', 'TIME'] +121-127105-0029-2607: ref=['THERE', 'WERE', 'PLENTY', 'OF', 'PEOPLE', 'TO', 'HELP', 'BUT', 'OF', 'COURSE', 'THE', 'YOUNG', 'LADY', 'WHO', 'SHOULD', 'GO', 'DOWN', 'AS', 'GOVERNESS', 'WOULD', 'BE', 'IN', 'SUPREME', 'AUTHORITY'] +121-127105-0029-2607: hyp=['THERE', 'WERE', 'PLENTY', 'OF', 'PEOPLE', 'TO', 'HELP', 'BUT', 'OF', 'COURSE', 'THE', 'YOUNG', 'LADY', 'WHO', 'SHOULD', 'GO', 'DOWN', 'AS', 'GOVERNESS', 'WOULD', 'BE', 'IN', 'SUPREME', 'AUTHORITY'] +121-127105-0030-2608: ref=['I', "DON'T", 'ANTICIPATE'] +121-127105-0030-2608: hyp=['I', "DON'T", 'ANTICIPATE'] +121-127105-0031-2609: ref=['SHE', 'WAS', 'YOUNG', 'UNTRIED', 'NERVOUS', 'IT', 'WAS', 'A', 'VISION', 'OF', 'SERIOUS', 'DUTIES', 'AND', 'LITTLE', 'COMPANY', 'OF', 'REALLY', 'GREAT', 'LONELINESS'] +121-127105-0031-2609: hyp=['SHE', 'WAS', 'YOUNG', 'UNTRIED', 'NERVOUS', 'IT', 'WAS', 'A', 'VISION', 'OF', 'SERIOUS', 'DUTIES', 'AND', 'LITTLE', 'COMPANY', 'OF', 'REALLY', 'GREAT', 'LONELINESS'] +121-127105-0032-2610: ref=['YES', 'BUT', "THAT'S", 'JUST', 'THE', 'BEAUTY', 'OF', 'HER', 'PASSION'] +121-127105-0032-2610: hyp=['YES', 'BUT', "THAT'S", 'JUST', 'THE', 'BEAUTY', 'OF', 'HER', 'PASSION'] +121-127105-0033-2611: ref=['IT', 'WAS', 'THE', 'BEAUTY', 'OF', 'IT'] +121-127105-0033-2611: hyp=['IT', 'WAS', 'THE', 'BEAUTY', 'OF', 'IT'] +121-127105-0034-2612: ref=['IT', 'SOUNDED', 'DULL', 'IT', 'SOUNDED', 'STRANGE', 'AND', 'ALL', 'THE', 'MORE', 'SO', 'BECAUSE', 'OF', 'HIS', 'MAIN', 'CONDITION', 'WHICH', 'WAS'] +121-127105-0034-2612: hyp=['IT', 'SOUNDED', 'DULL', 'IT', 'SOUNDED', 'STRANGE', 'AND', 'ALL', 'THE', 'MORE', 'SO', 'BECAUSE', 'OF', 'HIS', 'MAIN', 'CONDITION', 'WHICH', 'WAS'] +121-127105-0035-2613: ref=['SHE', 'PROMISED', 'TO', 'DO', 'THIS', 'AND', 'SHE', 'MENTIONED', 'TO', 'ME', 'THAT', 'WHEN', 'FOR', 'A', 'MOMENT', 'DISBURDENED', 'DELIGHTED', 'HE', 'HELD', 'HER', 'HAND', 'THANKING', 'HER', 'FOR', 'THE', 'SACRIFICE', 'SHE', 'ALREADY', 'FELT', 'REWARDED'] +121-127105-0035-2613: hyp=['SHE', 'PROMISED', 'TO', 'DO', 'THIS', 'AND', 'SHE', 'MENTIONED', 'TO', 'ME', 'THAT', 'WHEN', 'FOR', 'A', 'MOMENT', 'DISBURDENED', 'DELIGHTED', 'HE', 'HELD', 'HER', 'HAND', 'THANKING', 'HER', 'FOR', 'THE', 'SACRIFICE', 'SHE', 'ALREADY', 'FELT', 'REWARDED'] +121-127105-0036-2614: ref=['BUT', 'WAS', 'THAT', 'ALL', 'HER', 'REWARD', 'ONE', 'OF', 'THE', 'LADIES', 'ASKED'] +121-127105-0036-2614: hyp=['BUT', 'WAS', 'THAT', 'ALL', 'HER', 'REWARD', 'ONE', 'OF', 'THE', 'LADIES', 'ASKED'] +1221-135766-0000-1305: ref=['HOW', 'STRANGE', 'IT', 'SEEMED', 'TO', 'THE', 'SAD', 'WOMAN', 'AS', 'SHE', 'WATCHED', 'THE', 'GROWTH', 'AND', 'THE', 'BEAUTY', 'THAT', 'BECAME', 'EVERY', 'DAY', 'MORE', 'BRILLIANT', 'AND', 'THE', 'INTELLIGENCE', 'THAT', 'THREW', 'ITS', 'QUIVERING', 'SUNSHINE', 'OVER', 'THE', 'TINY', 'FEATURES', 'OF', 'THIS', 'CHILD'] +1221-135766-0000-1305: hyp=['HOW', 'STRANGE', 'IT', 'SEEMED', 'TO', 'THE', 'SAD', 'WOMAN', 'AS', 'SHE', 'WATCHED', 'THE', 'GROWTH', 'AND', 'THE', 'BEAUTY', 'THAT', 'BECAME', 'EVERY', 'DAY', 'MORE', 'BRILLIANT', 'AND', 'THE', 'INTELLIGENCE', 'THAT', 'THREW', 'ITS', 'QUIVERING', 'SUNSHINE', 'OVER', 'THE', 'TINY', 'FEATURES', 'OF', 'THIS', 'CHILD'] +1221-135766-0001-1306: ref=['GOD', 'AS', 'A', 'DIRECT', 'CONSEQUENCE', 'OF', 'THE', 'SIN', 'WHICH', 'MAN', 'THUS', 'PUNISHED', 'HAD', 'GIVEN', 'HER', 'A', 'LOVELY', 'CHILD', 'WHOSE', 'PLACE', 'WAS', 'ON', 'THAT', 'SAME', 'DISHONOURED', 'BOSOM', 'TO', 'CONNECT', 'HER', 'PARENT', 'FOR', 'EVER', 'WITH', 'THE', 'RACE', 'AND', 'DESCENT', 'OF', 'MORTALS', 'AND', 'TO', 'BE', 'FINALLY', 'A', 'BLESSED', 'SOUL', 'IN', 'HEAVEN'] +1221-135766-0001-1306: hyp=['GOD', 'AS', 'A', 'DIRECT', 'CONSEQUENCE', 'OF', 'THE', 'SIN', 'WHICH', 'MAN', 'THUS', 'PUNISHED', 'HAD', 'GIVEN', 'HER', 'A', 'LOVELY', 'CHILD', 'WHOSE', 'PLACE', 'WAS', 'ON', 'THAT', 'SAME', 'DISHONORED', 'BOSOM', 'TO', 'CONNECT', 'HER', 'PARENT', 'FOREVER', 'WITH', 'THE', 'RACE', 'AND', 'DESCENT', 'OF', 'MORTALS', 'AND', 'TO', 'BE', 'FINALLY', 'A', 'BLESSED', 'SOUL', 'IN', 'HEAVEN'] +1221-135766-0002-1307: ref=['YET', 'THESE', 'THOUGHTS', 'AFFECTED', 'HESTER', 'PRYNNE', 'LESS', 'WITH', 'HOPE', 'THAN', 'APPREHENSION'] +1221-135766-0002-1307: hyp=['YET', 'THESE', 'THOUGHTS', 'AFFECTED', 'HESTER', 'PRYNNE', 'LESS', 'WITH', 'HOPE', 'THAN', 'APPREHENSION'] +1221-135766-0003-1308: ref=['THE', 'CHILD', 'HAD', 'A', 'NATIVE', 'GRACE', 'WHICH', 'DOES', 'NOT', 'INVARIABLY', 'CO', 'EXIST', 'WITH', 'FAULTLESS', 'BEAUTY', 'ITS', 'ATTIRE', 'HOWEVER', 'SIMPLE', 'ALWAYS', 'IMPRESSED', 'THE', 'BEHOLDER', 'AS', 'IF', 'IT', 'WERE', 'THE', 'VERY', 'GARB', 'THAT', 'PRECISELY', 'BECAME', 'IT', 'BEST'] +1221-135766-0003-1308: hyp=['THE', 'CHILD', 'HAD', 'A', 'NATIVE', 'GRACE', 'WHICH', 'DOES', 'NOT', 'INVARIABLY', 'COEXIST', 'WITH', 'FAULTLESS', 'BEAUTY', 'ITS', 'ATTIRE', 'HOWEVER', 'SIMPLE', 'ALWAYS', 'IMPRESSED', 'THE', 'BEHOLDER', 'AS', 'IF', 'IT', 'WERE', 'THE', 'VERY', 'GARB', 'THAT', 'PRECISELY', 'BECAME', 'IT', 'BEST'] +1221-135766-0004-1309: ref=['THIS', 'OUTWARD', 'MUTABILITY', 'INDICATED', 'AND', 'DID', 'NOT', 'MORE', 'THAN', 'FAIRLY', 'EXPRESS', 'THE', 'VARIOUS', 'PROPERTIES', 'OF', 'HER', 'INNER', 'LIFE'] +1221-135766-0004-1309: hyp=['THIS', 'OUTWARD', 'MUTABILITY', 'INDICATED', 'AND', 'DID', 'NOT', 'MORE', 'THAN', 'FAIRLY', 'EXPRESS', 'THE', 'VARIOUS', 'PROPERTIES', 'OF', 'HER', 'INNER', 'LIFE'] +1221-135766-0005-1310: ref=['HESTER', 'COULD', 'ONLY', 'ACCOUNT', 'FOR', 'THE', "CHILD'S", 'CHARACTER', 'AND', 'EVEN', 'THEN', 'MOST', 'VAGUELY', 'AND', 'IMPERFECTLY', 'BY', 'RECALLING', 'WHAT', 'SHE', 'HERSELF', 'HAD', 'BEEN', 'DURING', 'THAT', 'MOMENTOUS', 'PERIOD', 'WHILE', 'PEARL', 'WAS', 'IMBIBING', 'HER', 'SOUL', 'FROM', 'THE', 'SPIRITUAL', 'WORLD', 'AND', 'HER', 'BODILY', 'FRAME', 'FROM', 'ITS', 'MATERIAL', 'OF', 'EARTH'] +1221-135766-0005-1310: hyp=['HESTER', 'COULD', 'ONLY', 'ACCOUNT', 'FOR', 'THE', "CHILD'S", 'CHARACTER', 'AND', 'EVEN', 'THEN', 'MOST', 'VAGUELY', 'AND', 'IMPERFECTLY', 'BY', 'RECALLING', 'WHAT', 'SHE', 'HERSELF', 'HAD', 'BEEN', 'DURING', 'THAT', 'MOMENTOUS', 'PERIOD', 'WHILE', 'PEARL', 'WAS', 'IMBIBING', 'HER', 'SOUL', 'FROM', 'THE', 'SPIRITUAL', 'WORLD', 'AND', 'HER', 'BODILY', 'FRAME', 'FROM', 'ITS', 'MATERIAL', 'OF', 'EARTH'] +1221-135766-0006-1311: ref=['THEY', 'WERE', 'NOW', 'ILLUMINATED', 'BY', 'THE', 'MORNING', 'RADIANCE', 'OF', 'A', 'YOUNG', "CHILD'S", 'DISPOSITION', 'BUT', 'LATER', 'IN', 'THE', 'DAY', 'OF', 'EARTHLY', 'EXISTENCE', 'MIGHT', 'BE', 'PROLIFIC', 'OF', 'THE', 'STORM', 'AND', 'WHIRLWIND'] +1221-135766-0006-1311: hyp=['THEY', 'WERE', 'NOW', 'ILLUMINATED', 'BY', 'THE', 'MORNING', 'RADIANCE', 'OF', 'A', 'YOUNG', "CHILD'S", 'DISPOSITION', 'BUT', 'LATER', 'IN', 'THE', 'DAY', 'OF', 'EARTHLY', 'EXISTENCE', 'MIGHT', 'BE', 'PROLIFIC', 'OF', 'THE', 'STORM', 'AND', 'WHIRLWIND'] +1221-135766-0007-1312: ref=['HESTER', 'PRYNNE', 'NEVERTHELESS', 'THE', 'LOVING', 'MOTHER', 'OF', 'THIS', 'ONE', 'CHILD', 'RAN', 'LITTLE', 'RISK', 'OF', 'ERRING', 'ON', 'THE', 'SIDE', 'OF', 'UNDUE', 'SEVERITY'] +1221-135766-0007-1312: hyp=['HESTER', 'PRYNNE', 'NEVERTHELESS', 'THE', 'LOVING', 'MOTHER', 'OF', 'THIS', 'ONE', 'CHILD', 'RAN', 'LITTLE', 'RISK', 'OF', 'ERRING', 'ON', 'THE', 'SIDE', 'OF', 'UNDUE', 'SEVERITY'] +1221-135766-0008-1313: ref=['MINDFUL', 'HOWEVER', 'OF', 'HER', 'OWN', 'ERRORS', 'AND', 'MISFORTUNES', 'SHE', 'EARLY', 'SOUGHT', 'TO', 'IMPOSE', 'A', 'TENDER', 'BUT', 'STRICT', 'CONTROL', 'OVER', 'THE', 'INFANT', 'IMMORTALITY', 'THAT', 'WAS', 'COMMITTED', 'TO', 'HER', 'CHARGE'] +1221-135766-0008-1313: hyp=['MINDFUL', 'HOWEVER', 'OF', 'HER', 'OWN', 'ERRORS', 'AND', 'MISFORTUNES', 'SHE', 'EARLY', 'SOUGHT', 'TO', 'IMPOSE', 'A', 'TENDER', 'BUT', 'STRICT', 'CONTROL', 'OVER', 'THE', 'INFANT', 'IMMORTALITY', 'THAT', 'WAS', 'COMMITTED', 'TO', 'HER', 'CHARGE'] +1221-135766-0009-1314: ref=['AS', 'TO', 'ANY', 'OTHER', 'KIND', 'OF', 'DISCIPLINE', 'WHETHER', 'ADDRESSED', 'TO', 'HER', 'MIND', 'OR', 'HEART', 'LITTLE', 'PEARL', 'MIGHT', 'OR', 'MIGHT', 'NOT', 'BE', 'WITHIN', 'ITS', 'REACH', 'IN', 'ACCORDANCE', 'WITH', 'THE', 'CAPRICE', 'THAT', 'RULED', 'THE', 'MOMENT'] +1221-135766-0009-1314: hyp=['AS', 'TO', 'ANY', 'OTHER', 'KIND', 'OF', 'DISCIPLINE', 'WHETHER', 'ADDRESSED', 'TO', 'HER', 'MIND', 'OR', 'HEART', 'LITTLE', 'PEARL', 'MIGHT', 'OR', 'MIGHT', 'NOT', 'BE', 'WITHIN', 'ITS', 'REACH', 'IN', 'ACCORDANCE', 'WITH', 'THE', 'CAPRICE', 'THAT', 'ROLLED', 'THE', 'MOMENT'] +1221-135766-0010-1315: ref=['IT', 'WAS', 'A', 'LOOK', 'SO', 'INTELLIGENT', 'YET', 'INEXPLICABLE', 'PERVERSE', 'SOMETIMES', 'SO', 'MALICIOUS', 'BUT', 'GENERALLY', 'ACCOMPANIED', 'BY', 'A', 'WILD', 'FLOW', 'OF', 'SPIRITS', 'THAT', 'HESTER', 'COULD', 'NOT', 'HELP', 'QUESTIONING', 'AT', 'SUCH', 'MOMENTS', 'WHETHER', 'PEARL', 'WAS', 'A', 'HUMAN', 'CHILD'] +1221-135766-0010-1315: hyp=['IT', 'WAS', 'A', 'LOOK', 'SO', 'INTELLIGENT', 'YET', 'INEXPLICABLE', 'PERVERSE', 'SOMETIMES', 'SO', 'MALICIOUS', 'BUT', 'GENERALLY', 'ACCOMPANIED', 'BY', 'A', 'WILD', 'FLOW', 'OF', 'SPIRITS', 'THAT', 'HESTER', 'COULD', 'NOT', 'HELP', 'QUESTIONING', 'AT', 'SUCH', 'MOMENTS', 'WHETHER', 'PEARL', 'WAS', 'A', 'HUMAN', 'CHILD'] +1221-135766-0011-1316: ref=['BEHOLDING', 'IT', 'HESTER', 'WAS', 'CONSTRAINED', 'TO', 'RUSH', 'TOWARDS', 'THE', 'CHILD', 'TO', 'PURSUE', 'THE', 'LITTLE', 'ELF', 'IN', 'THE', 'FLIGHT', 'WHICH', 'SHE', 'INVARIABLY', 'BEGAN', 'TO', 'SNATCH', 'HER', 'TO', 'HER', 'BOSOM', 'WITH', 'A', 'CLOSE', 'PRESSURE', 'AND', 'EARNEST', 'KISSES', 'NOT', 'SO', 'MUCH', 'FROM', 'OVERFLOWING', 'LOVE', 'AS', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'PEARL', 'WAS', 'FLESH', 'AND', 'BLOOD', 'AND', 'NOT', 'UTTERLY', 'DELUSIVE'] +1221-135766-0011-1316: hyp=['BEHOLDING', 'IT', 'HESTER', 'WAS', 'CONSTRAINED', 'TO', 'RUSH', 'TOWARDS', 'THE', 'CHILD', 'TO', 'PURSUE', 'THE', 'LITTLE', 'ELF', 'IN', 'THE', 'FLIGHT', 'WHICH', 'SHE', 'INVARIABLY', 'BEGAN', 'TO', 'SNATCH', 'HER', 'TO', 'HER', 'BOSOM', 'WITH', 'A', 'CLOSE', 'PRESSURE', 'AND', 'EARNEST', 'KISSES', 'NOT', 'SO', 'MUCH', 'FROM', 'OVERFLOWING', 'LOVE', 'AS', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'PEARL', 'WAS', 'FLESH', 'AND', 'BLOOD', 'AND', 'NOT', 'UTTERLY', 'DELUSIVE'] +1221-135766-0012-1317: ref=['BROODING', 'OVER', 'ALL', 'THESE', 'MATTERS', 'THE', 'MOTHER', 'FELT', 'LIKE', 'ONE', 'WHO', 'HAS', 'EVOKED', 'A', 'SPIRIT', 'BUT', 'BY', 'SOME', 'IRREGULARITY', 'IN', 'THE', 'PROCESS', 'OF', 'CONJURATION', 'HAS', 'FAILED', 'TO', 'WIN', 'THE', 'MASTER', 'WORD', 'THAT', 'SHOULD', 'CONTROL', 'THIS', 'NEW', 'AND', 'INCOMPREHENSIBLE', 'INTELLIGENCE'] +1221-135766-0012-1317: hyp=['BROODING', 'OVER', 'ALL', 'THESE', 'MATTERS', 'THE', 'MOTHER', 'FELT', 'LIKE', 'ONE', 'WHO', 'HAS', 'EVOKED', 'A', 'SPIRIT', 'BUT', 'BY', 'SOME', 'IRREGULARITY', 'IN', 'THE', 'PROCESS', 'OF', 'CONJURATION', 'HAS', 'FAILED', 'TO', 'WIN', 'THE', 'MASTER', 'WORD', 'THAT', 'SHOULD', 'CONTROL', 'THIS', 'NEW', 'AND', 'INCOMPREHENSIBLE', 'INTELLIGENCE'] +1221-135766-0013-1318: ref=['PEARL', 'WAS', 'A', 'BORN', 'OUTCAST', 'OF', 'THE', 'INFANTILE', 'WORLD'] +1221-135766-0013-1318: hyp=['PEARL', 'WAS', 'A', 'BORN', 'OUTCAST', 'OF', 'THE', 'INVENTILE', 'WORLD'] +1221-135766-0014-1319: ref=['PEARL', 'SAW', 'AND', 'GAZED', 'INTENTLY', 'BUT', 'NEVER', 'SOUGHT', 'TO', 'MAKE', 'ACQUAINTANCE'] +1221-135766-0014-1319: hyp=['PEARL', 'SAW', 'AND', 'GAZED', 'INTENTLY', 'BUT', 'NEVER', 'SOUGHT', 'TO', 'MAKE', 'ACQUAINTANCE'] +1221-135766-0015-1320: ref=['IF', 'SPOKEN', 'TO', 'SHE', 'WOULD', 'NOT', 'SPEAK', 'AGAIN'] +1221-135766-0015-1320: hyp=['IF', 'SPOKEN', 'TO', 'SHE', 'WOULD', 'NOT', 'SPEAK', 'AGAIN'] +1221-135767-0000-1280: ref=['HESTER', 'PRYNNE', 'WENT', 'ONE', 'DAY', 'TO', 'THE', 'MANSION', 'OF', 'GOVERNOR', 'BELLINGHAM', 'WITH', 'A', 'PAIR', 'OF', 'GLOVES', 'WHICH', 'SHE', 'HAD', 'FRINGED', 'AND', 'EMBROIDERED', 'TO', 'HIS', 'ORDER', 'AND', 'WHICH', 'WERE', 'TO', 'BE', 'WORN', 'ON', 'SOME', 'GREAT', 'OCCASION', 'OF', 'STATE', 'FOR', 'THOUGH', 'THE', 'CHANCES', 'OF', 'A', 'POPULAR', 'ELECTION', 'HAD', 'CAUSED', 'THIS', 'FORMER', 'RULER', 'TO', 'DESCEND', 'A', 'STEP', 'OR', 'TWO', 'FROM', 'THE', 'HIGHEST', 'RANK', 'HE', 'STILL', 'HELD', 'AN', 'HONOURABLE', 'AND', 'INFLUENTIAL', 'PLACE', 'AMONG', 'THE', 'COLONIAL', 'MAGISTRACY'] +1221-135767-0000-1280: hyp=['HESTER', 'PRYNNE', 'WENT', 'ONE', 'DAY', 'TO', 'THE', 'MANSION', 'OF', 'GOVERNOR', 'BELLINGHAM', 'WITH', 'A', 'PAIR', 'OF', 'GLOVES', 'WHICH', 'SHE', 'HAD', 'FRINGED', 'AND', 'EMBROIDERED', 'TO', 'HIS', 'ORDER', 'AND', 'WHICH', 'WERE', 'TO', 'BE', 'WORN', 'ON', 'SOME', 'GREAT', 'OCCASION', 'OF', 'STATE', 'FOR', 'THOUGH', 'THE', 'CHANCES', 'OF', 'A', 'POPULAR', 'ELECTION', 'HAD', 'CAUSED', 'THIS', 'FORMER', 'RULER', 'TO', 'DESCEND', 'A', 'STEP', 'OR', 'TWO', 'FROM', 'THE', 'HIGHEST', 'RANK', 'HE', 'STILL', 'HELD', 'AN', 'HONORABLE', 'AND', 'INFLUENTIAL', 'PLACE', 'AMONG', 'THE', 'COLONIAL', 'MAGISTRACY'] +1221-135767-0001-1281: ref=['ANOTHER', 'AND', 'FAR', 'MORE', 'IMPORTANT', 'REASON', 'THAN', 'THE', 'DELIVERY', 'OF', 'A', 'PAIR', 'OF', 'EMBROIDERED', 'GLOVES', 'IMPELLED', 'HESTER', 'AT', 'THIS', 'TIME', 'TO', 'SEEK', 'AN', 'INTERVIEW', 'WITH', 'A', 'PERSONAGE', 'OF', 'SO', 'MUCH', 'POWER', 'AND', 'ACTIVITY', 'IN', 'THE', 'AFFAIRS', 'OF', 'THE', 'SETTLEMENT'] +1221-135767-0001-1281: hyp=['ANOTHER', 'AND', 'FAR', 'MORE', 'IMPORTANT', 'REASON', 'THAN', 'THE', 'DELIVERY', 'OF', 'A', 'PAIR', 'OF', 'EMBROIDERED', 'GLOVES', 'IMPELLED', 'HESTER', 'AT', 'THIS', 'TIME', 'TO', 'SEEK', 'AN', 'INTERVIEW', 'WITH', 'A', 'PERSONAGE', 'OF', 'SO', 'MUCH', 'POWER', 'AND', 'ACTIVITY', 'IN', 'THE', 'AFFAIRS', 'OF', 'THE', 'SETTLEMENT'] +1221-135767-0002-1282: ref=['AT', 'THAT', 'EPOCH', 'OF', 'PRISTINE', 'SIMPLICITY', 'HOWEVER', 'MATTERS', 'OF', 'EVEN', 'SLIGHTER', 'PUBLIC', 'INTEREST', 'AND', 'OF', 'FAR', 'LESS', 'INTRINSIC', 'WEIGHT', 'THAN', 'THE', 'WELFARE', 'OF', 'HESTER', 'AND', 'HER', 'CHILD', 'WERE', 'STRANGELY', 'MIXED', 'UP', 'WITH', 'THE', 'DELIBERATIONS', 'OF', 'LEGISLATORS', 'AND', 'ACTS', 'OF', 'STATE'] +1221-135767-0002-1282: hyp=['AT', 'THAT', 'EPOCH', 'OF', 'PRISTINE', 'SIMPLICITY', 'HOWEVER', 'MATTERS', 'OF', 'EVEN', 'SLIGHTER', 'PUBLIC', 'INTEREST', 'AND', 'OF', 'FAR', 'LESS', 'INTRINSIC', 'WEIGHT', 'THAN', 'THE', 'WELFARE', 'OF', 'HESTER', 'AND', 'HER', 'CHILD', 'WERE', 'STRANGELY', 'MIXED', 'UP', 'WITH', 'THE', 'DELIBERATIONS', 'OF', 'LEGISLATORS', 'AND', 'ACTS', 'OF', 'STATE'] +1221-135767-0003-1283: ref=['THE', 'PERIOD', 'WAS', 'HARDLY', 'IF', 'AT', 'ALL', 'EARLIER', 'THAN', 'THAT', 'OF', 'OUR', 'STORY', 'WHEN', 'A', 'DISPUTE', 'CONCERNING', 'THE', 'RIGHT', 'OF', 'PROPERTY', 'IN', 'A', 'PIG', 'NOT', 'ONLY', 'CAUSED', 'A', 'FIERCE', 'AND', 'BITTER', 'CONTEST', 'IN', 'THE', 'LEGISLATIVE', 'BODY', 'OF', 'THE', 'COLONY', 'BUT', 'RESULTED', 'IN', 'AN', 'IMPORTANT', 'MODIFICATION', 'OF', 'THE', 'FRAMEWORK', 'ITSELF', 'OF', 'THE', 'LEGISLATURE'] +1221-135767-0003-1283: hyp=['THE', 'PERIOD', 'WAS', 'HARDLY', 'IF', 'AT', 'ALL', 'EARLIER', 'THAN', 'THAT', 'OF', 'OUR', 'STORY', 'WHEN', 'A', 'DISPUTE', 'CONCERNING', 'THE', 'RIGHT', 'OF', 'PROPERTY', 'IN', 'A', 'PIG', 'NOT', 'ONLY', 'CAUSED', 'A', 'FIERCE', 'AND', 'BITTER', 'CONTEST', 'IN', 'THE', 'LEGISLATIVE', 'BODY', 'OF', 'THE', 'COLONY', 'BUT', 'RESULTED', 'IN', 'AN', 'IMPORTANT', 'MODIFICATION', 'OF', 'THE', 'FRAMEWORK', 'ITSELF', 'OF', 'THE', 'LEGISLATURE'] +1221-135767-0004-1284: ref=['WE', 'HAVE', 'SPOKEN', 'OF', "PEARL'S", 'RICH', 'AND', 'LUXURIANT', 'BEAUTY', 'A', 'BEAUTY', 'THAT', 'SHONE', 'WITH', 'DEEP', 'AND', 'VIVID', 'TINTS', 'A', 'BRIGHT', 'COMPLEXION', 'EYES', 'POSSESSING', 'INTENSITY', 'BOTH', 'OF', 'DEPTH', 'AND', 'GLOW', 'AND', 'HAIR', 'ALREADY', 'OF', 'A', 'DEEP', 'GLOSSY', 'BROWN', 'AND', 'WHICH', 'IN', 'AFTER', 'YEARS', 'WOULD', 'BE', 'NEARLY', 'AKIN', 'TO', 'BLACK'] +1221-135767-0004-1284: hyp=['WE', 'HAVE', 'SPOKEN', 'OF', "PEARL'S", 'RICH', 'AND', 'LUXURIANT', 'BEAUTY', 'A', 'BEAUTY', 'THAT', 'SHONE', 'WITH', 'DEEP', 'AND', 'VIVID', 'TINTS', 'A', 'BRIGHT', 'COMPLEXION', 'EYES', 'POSSESSING', 'INTENSITY', 'BOTH', 'OF', 'DEPTH', 'AND', 'GLOW', 'AND', 'HAIR', 'ALREADY', 'OF', 'A', 'DEEP', 'GLOSSY', 'BROWN', 'AND', 'WHICH', 'IN', 'AFTER', 'YEARS', 'WOULD', 'BE', 'NEARLY', 'AKIN', 'TO', 'BLACK'] +1221-135767-0005-1285: ref=['IT', 'WAS', 'THE', 'SCARLET', 'LETTER', 'IN', 'ANOTHER', 'FORM', 'THE', 'SCARLET', 'LETTER', 'ENDOWED', 'WITH', 'LIFE'] +1221-135767-0005-1285: hyp=['IT', 'WAS', 'THE', 'SCARLET', 'LETTER', 'IN', 'ANOTHER', 'FORM', 'THE', 'SCARLET', 'LETTER', 'ENDOWED', 'WITH', 'LIFE'] +1221-135767-0006-1286: ref=['THE', 'MOTHER', 'HERSELF', 'AS', 'IF', 'THE', 'RED', 'IGNOMINY', 'WERE', 'SO', 'DEEPLY', 'SCORCHED', 'INTO', 'HER', 'BRAIN', 'THAT', 'ALL', 'HER', 'CONCEPTIONS', 'ASSUMED', 'ITS', 'FORM', 'HAD', 'CAREFULLY', 'WROUGHT', 'OUT', 'THE', 'SIMILITUDE', 'LAVISHING', 'MANY', 'HOURS', 'OF', 'MORBID', 'INGENUITY', 'TO', 'CREATE', 'AN', 'ANALOGY', 'BETWEEN', 'THE', 'OBJECT', 'OF', 'HER', 'AFFECTION', 'AND', 'THE', 'EMBLEM', 'OF', 'HER', 'GUILT', 'AND', 'TORTURE'] +1221-135767-0006-1286: hyp=['THE', 'MOTHER', 'HERSELF', 'AS', 'IF', 'THE', 'RED', 'IGNOMINY', 'WERE', 'SO', 'DEEPLY', 'SCORCHED', 'INTO', 'HER', 'BRAIN', 'THAT', 'ALL', 'HER', 'CONCEPTIONS', 'ASSUMED', 'ITS', 'FORM', 'HAD', 'CAREFULLY', 'WROUGHT', 'OUT', 'THE', 'SIMILITUDE', 'LAVISHING', 'MANY', 'HOURS', 'OF', 'MORBID', 'INGENUITY', 'TO', 'CREATE', 'AN', 'ANALOGY', 'BETWEEN', 'THE', 'OBJECT', 'OF', 'HER', 'AFFECTION', 'AND', 'THE', 'EMBLEM', 'OF', 'HER', 'GUILT', 'AND', 'TORTURE'] +1221-135767-0007-1287: ref=['BUT', 'IN', 'TRUTH', 'PEARL', 'WAS', 'THE', 'ONE', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'AND', 'ONLY', 'IN', 'CONSEQUENCE', 'OF', 'THAT', 'IDENTITY', 'HAD', 'HESTER', 'CONTRIVED', 'SO', 'PERFECTLY', 'TO', 'REPRESENT', 'THE', 'SCARLET', 'LETTER', 'IN', 'HER', 'APPEARANCE'] +1221-135767-0007-1287: hyp=['BUT', 'IN', 'TRUTH', 'PEARL', 'WAS', 'THE', 'ONE', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'AND', 'ONLY', 'IN', 'CONSEQUENCE', 'OF', 'THAT', 'IDENTITY', 'HAD', 'HESTER', 'CONTRIVED', 'SO', 'PERFECTLY', 'TO', 'REPRESENT', 'THE', 'SCARLET', 'LETTER', 'IN', 'HER', 'APPEARANCE'] +1221-135767-0008-1288: ref=['COME', 'THEREFORE', 'AND', 'LET', 'US', 'FLING', 'MUD', 'AT', 'THEM'] +1221-135767-0008-1288: hyp=['COME', 'THEREFORE', 'AND', 'LET', 'US', 'FLING', 'MUD', 'AT', 'THEM'] +1221-135767-0009-1289: ref=['BUT', 'PEARL', 'WHO', 'WAS', 'A', 'DAUNTLESS', 'CHILD', 'AFTER', 'FROWNING', 'STAMPING', 'HER', 'FOOT', 'AND', 'SHAKING', 'HER', 'LITTLE', 'HAND', 'WITH', 'A', 'VARIETY', 'OF', 'THREATENING', 'GESTURES', 'SUDDENLY', 'MADE', 'A', 'RUSH', 'AT', 'THE', 'KNOT', 'OF', 'HER', 'ENEMIES', 'AND', 'PUT', 'THEM', 'ALL', 'TO', 'FLIGHT'] +1221-135767-0009-1289: hyp=['BUT', 'PEARL', 'WHO', 'WAS', 'A', 'DAUNTLESS', 'CHILD', 'AFTER', 'FROWNING', 'STAMPING', 'HER', 'FOOT', 'AND', 'SHAKING', 'HER', 'LITTLE', 'HAND', 'WITH', 'A', 'VARIETY', 'OF', 'THREATENING', 'GESTURES', 'SUDDENLY', 'MADE', 'A', 'RUSH', 'AT', 'THE', 'KNOT', 'OF', 'HER', 'ENEMIES', 'AND', 'PUT', 'THEM', 'ALL', 'TO', 'FLIGHT'] +1221-135767-0010-1290: ref=['SHE', 'SCREAMED', 'AND', 'SHOUTED', 'TOO', 'WITH', 'A', 'TERRIFIC', 'VOLUME', 'OF', 'SOUND', 'WHICH', 'DOUBTLESS', 'CAUSED', 'THE', 'HEARTS', 'OF', 'THE', 'FUGITIVES', 'TO', 'QUAKE', 'WITHIN', 'THEM'] +1221-135767-0010-1290: hyp=['SHE', 'SCREAMED', 'AND', 'SHOUTED', 'TOO', 'WITH', 'A', 'TERRIFIC', 'VOLUME', 'OF', 'SOUND', 'WHICH', 'DOUBTLESS', 'CAUSED', 'THE', 'HEARTS', 'OF', 'THE', 'FUGITIVES', 'TO', 'QUAKE', 'WITHIN', 'THEM'] +1221-135767-0011-1291: ref=['IT', 'WAS', 'FURTHER', 'DECORATED', 'WITH', 'STRANGE', 'AND', 'SEEMINGLY', 'CABALISTIC', 'FIGURES', 'AND', 'DIAGRAMS', 'SUITABLE', 'TO', 'THE', 'QUAINT', 'TASTE', 'OF', 'THE', 'AGE', 'WHICH', 'HAD', 'BEEN', 'DRAWN', 'IN', 'THE', 'STUCCO', 'WHEN', 'NEWLY', 'LAID', 'ON', 'AND', 'HAD', 'NOW', 'GROWN', 'HARD', 'AND', 'DURABLE', 'FOR', 'THE', 'ADMIRATION', 'OF', 'AFTER', 'TIMES'] +1221-135767-0011-1291: hyp=['IT', 'WAS', 'FURTHER', 'DECORATED', 'WITH', 'STRANGE', 'AND', 'SEEMINGLY', 'CABALISTIC', 'FIGURES', 'AND', 'DIAGRAMS', 'SUITABLE', 'TO', 'THE', 'QUAINT', 'TASTE', 'OF', 'THE', 'AGE', 'WHICH', 'HAD', 'BEEN', 'DRAWN', 'IN', 'THE', 'STUCCO', 'WHEN', 'NEWLY', 'LAID', 'ON', 'AND', 'HAD', 'NOW', 'GROWN', 'HARD', 'AND', 'DURABLE', 'FOR', 'THE', 'ADMIRATION', 'OF', 'AFTER', 'TIMES'] +1221-135767-0012-1292: ref=['THEY', 'APPROACHED', 'THE', 'DOOR', 'WHICH', 'WAS', 'OF', 'AN', 'ARCHED', 'FORM', 'AND', 'FLANKED', 'ON', 'EACH', 'SIDE', 'BY', 'A', 'NARROW', 'TOWER', 'OR', 'PROJECTION', 'OF', 'THE', 'EDIFICE', 'IN', 'BOTH', 'OF', 'WHICH', 'WERE', 'LATTICE', 'WINDOWS', 'THE', 'WOODEN', 'SHUTTERS', 'TO', 'CLOSE', 'OVER', 'THEM', 'AT', 'NEED'] +1221-135767-0012-1292: hyp=['THEY', 'APPROACHED', 'THE', 'DOOR', 'WHICH', 'WAS', 'OF', 'AN', 'ARCHED', 'FORM', 'AND', 'FLANKED', 'ON', 'EACH', 'SIDE', 'BY', 'A', 'NARROW', 'TOWER', 'OR', 'PROJECTION', 'OF', 'THE', 'EDIFICE', 'IN', 'BOTH', 'OF', 'WHICH', 'WERE', 'LATTICE', 'WINDOWS', 'THE', 'WOODEN', 'SHUTTERS', 'TO', 'CLOSE', 'OVER', 'THEM', 'AT', 'NEED'] +1221-135767-0013-1293: ref=['LIFTING', 'THE', 'IRON', 'HAMMER', 'THAT', 'HUNG', 'AT', 'THE', 'PORTAL', 'HESTER', 'PRYNNE', 'GAVE', 'A', 'SUMMONS', 'WHICH', 'WAS', 'ANSWERED', 'BY', 'ONE', 'OF', 'THE', "GOVERNOR'S", 'BOND', 'SERVANT', 'A', 'FREE', 'BORN', 'ENGLISHMAN', 'BUT', 'NOW', 'A', 'SEVEN', 'YEARS', 'SLAVE'] +1221-135767-0013-1293: hyp=['LIFTING', 'THE', 'IRON', 'HAMMER', 'THAT', 'HUNG', 'AT', 'THE', 'PORTAL', 'HESTER', 'PRYNNE', 'GAVE', 'A', 'SUMMONS', 'WHICH', 'WAS', 'ANSWERED', 'BY', 'ONE', 'OF', 'THE', "GOVERNOR'S", 'BOND', 'SERVANTS', 'A', 'FREE', 'BORN', 'ENGLISHMAN', 'BUT', 'NOW', 'A', 'SEVEN', 'YEARS', 'SLAVE'] +1221-135767-0014-1294: ref=['YEA', 'HIS', 'HONOURABLE', 'WORSHIP', 'IS', 'WITHIN', 'BUT', 'HE', 'HATH', 'A', 'GODLY', 'MINISTER', 'OR', 'TWO', 'WITH', 'HIM', 'AND', 'LIKEWISE', 'A', 'LEECH'] +1221-135767-0014-1294: hyp=['YEA', 'HIS', 'HONOURABLE', 'WORSHIP', 'IS', 'WITHIN', 'BUT', 'HE', 'HATH', 'A', 'GODLY', 'MINISTER', 'OR', 'TWO', 'WITH', 'HIM', 'AND', 'LIKEWISE', 'A', 'LEECH'] +1221-135767-0015-1295: ref=['YE', 'MAY', 'NOT', 'SEE', 'HIS', 'WORSHIP', 'NOW'] +1221-135767-0015-1295: hyp=['YE', 'MAY', 'NOT', 'SEE', 'HIS', 'WORSHIP', 'NOW'] +1221-135767-0016-1296: ref=['WITH', 'MANY', 'VARIATIONS', 'SUGGESTED', 'BY', 'THE', 'NATURE', 'OF', 'HIS', 'BUILDING', 'MATERIALS', 'DIVERSITY', 'OF', 'CLIMATE', 'AND', 'A', 'DIFFERENT', 'MODE', 'OF', 'SOCIAL', 'LIFE', 'GOVERNOR', 'BELLINGHAM', 'HAD', 'PLANNED', 'HIS', 'NEW', 'HABITATION', 'AFTER', 'THE', 'RESIDENCES', 'OF', 'GENTLEMEN', 'OF', 'FAIR', 'ESTATE', 'IN', 'HIS', 'NATIVE', 'LAND'] +1221-135767-0016-1296: hyp=['WITH', 'MANY', 'VARIATIONS', 'SUGGESTED', 'BY', 'THE', 'NATURE', 'OF', 'HIS', 'BUILDING', 'MATERIALS', 'DIVERSITY', 'OF', 'CLIMATE', 'AND', 'A', 'DIFFERENT', 'MODE', 'OF', 'SOCIAL', 'LIFE', 'GOVERNOR', 'BELLINGHAM', 'HAD', 'PLANNED', 'HIS', 'NEW', 'HABITATION', 'AFTER', 'THE', 'RESIDENCES', 'OF', 'GENTLEMEN', 'OF', 'FAIREST', 'STATE', 'IN', 'HIS', 'NATIVE', 'LAND'] +1221-135767-0017-1297: ref=['ON', 'THE', 'TABLE', 'IN', 'TOKEN', 'THAT', 'THE', 'SENTIMENT', 'OF', 'OLD', 'ENGLISH', 'HOSPITALITY', 'HAD', 'NOT', 'BEEN', 'LEFT', 'BEHIND', 'STOOD', 'A', 'LARGE', 'PEWTER', 'TANKARD', 'AT', 'THE', 'BOTTOM', 'OF', 'WHICH', 'HAD', 'HESTER', 'OR', 'PEARL', 'PEEPED', 'INTO', 'IT', 'THEY', 'MIGHT', 'HAVE', 'SEEN', 'THE', 'FROTHY', 'REMNANT', 'OF', 'A', 'RECENT', 'DRAUGHT', 'OF', 'ALE'] +1221-135767-0017-1297: hyp=['ON', 'THE', 'TABLE', 'IN', 'TOKEN', 'THAT', 'THE', 'SENTIMENT', 'OF', 'OLD', 'ENGLISH', 'HOSPITALITY', 'HAD', 'NOT', 'BEEN', 'LEFT', 'BEHIND', 'STOOD', 'A', 'LARGE', 'PEWTER', 'TANKARD', 'AT', 'THE', 'BOTTOM', 'OF', 'WHICH', 'HAD', 'HESTER', 'OR', 'PEARL', 'PEEPED', 'INTO', 'IT', 'THEY', 'MIGHT', 'HAVE', 'SEEN', 'THE', 'FROTHY', 'REMNANT', 'OF', 'A', 'RECENT', 'DRAUGHT', 'OF', 'ALE'] +1221-135767-0018-1298: ref=['LITTLE', 'PEARL', 'WHO', 'WAS', 'AS', 'GREATLY', 'PLEASED', 'WITH', 'THE', 'GLEAMING', 'ARMOUR', 'AS', 'SHE', 'HAD', 'BEEN', 'WITH', 'THE', 'GLITTERING', 'FRONTISPIECE', 'OF', 'THE', 'HOUSE', 'SPENT', 'SOME', 'TIME', 'LOOKING', 'INTO', 'THE', 'POLISHED', 'MIRROR', 'OF', 'THE', 'BREASTPLATE'] +1221-135767-0018-1298: hyp=['LITTLE', 'PEARL', 'WHO', 'WAS', 'AS', 'GREATLY', 'PLEASED', 'WITH', 'THE', 'GLEAMING', 'ARMOUR', 'AS', 'SHE', 'HAD', 'BEEN', 'WITH', 'THE', 'GLITTERING', 'FRONTISPIECE', 'OF', 'THE', 'HOUSE', 'SPENT', 'SOME', 'TIME', 'LOOKING', 'INTO', 'THE', 'POLISHED', 'MIRROR', 'OF', 'THE', 'BREASTPLATE'] +1221-135767-0019-1299: ref=['MOTHER', 'CRIED', 'SHE', 'I', 'SEE', 'YOU', 'HERE', 'LOOK', 'LOOK'] +1221-135767-0019-1299: hyp=['MOTHER', 'CRIED', 'SHE', 'I', 'SEE', 'YOU', 'HERE', 'LOOK'] +1221-135767-0020-1300: ref=['IN', 'TRUTH', 'SHE', 'SEEMED', 'ABSOLUTELY', 'HIDDEN', 'BEHIND', 'IT'] +1221-135767-0020-1300: hyp=['IN', 'TRUTH', 'SHE', 'SEEMED', 'ABSOLUTELY', 'HIDDEN', 'BEHIND', 'IT'] +1221-135767-0021-1301: ref=['PEARL', 'ACCORDINGLY', 'RAN', 'TO', 'THE', 'BOW', 'WINDOW', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'HALL', 'AND', 'LOOKED', 'ALONG', 'THE', 'VISTA', 'OF', 'A', 'GARDEN', 'WALK', 'CARPETED', 'WITH', 'CLOSELY', 'SHAVEN', 'GRASS', 'AND', 'BORDERED', 'WITH', 'SOME', 'RUDE', 'AND', 'IMMATURE', 'ATTEMPT', 'AT', 'SHRUBBERY'] +1221-135767-0021-1301: hyp=['PEARL', 'ACCORDINGLY', 'RAN', 'TO', 'THE', 'BOW', 'WINDOW', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'HALL', 'AND', 'LOOKED', 'ALONG', 'THE', 'VISTA', 'OF', 'A', 'GARDEN', 'WALK', 'CARPETED', 'WITH', 'CLOSELY', 'SHAVEN', 'GRASS', 'AND', 'BORDERED', 'WITH', 'SOME', 'RUDE', 'AND', 'IMMATEUR', 'ATTEMPT', 'AT', 'SHRUBBERY'] +1221-135767-0022-1302: ref=['BUT', 'THE', 'PROPRIETOR', 'APPEARED', 'ALREADY', 'TO', 'HAVE', 'RELINQUISHED', 'AS', 'HOPELESS', 'THE', 'EFFORT', 'TO', 'PERPETUATE', 'ON', 'THIS', 'SIDE', 'OF', 'THE', 'ATLANTIC', 'IN', 'A', 'HARD', 'SOIL', 'AND', 'AMID', 'THE', 'CLOSE', 'STRUGGLE', 'FOR', 'SUBSISTENCE', 'THE', 'NATIVE', 'ENGLISH', 'TASTE', 'FOR', 'ORNAMENTAL', 'GARDENING'] +1221-135767-0022-1302: hyp=['BUT', 'THE', 'PROPRIETOR', 'APPEARED', 'ALL', 'READY', 'TO', 'HAVE', 'RELINQUISHED', 'AS', 'HOPELESS', 'THE', 'EFFORT', 'TO', 'PERPETUATE', 'ON', 'THIS', 'SIDE', 'OF', 'THE', 'ATLANTIC', 'IN', 'A', 'HARD', 'SOIL', 'AND', 'AMID', 'THE', 'CLOSE', 'STRUGGLE', 'FOR', 'SUBSISTENCE', 'THE', 'NATIVE', 'ENGLISH', 'TASTE', 'FOR', 'ORNAMENTAL', 'GARDENING'] +1221-135767-0023-1303: ref=['THERE', 'WERE', 'A', 'FEW', 'ROSE', 'BUSHES', 'HOWEVER', 'AND', 'A', 'NUMBER', 'OF', 'APPLE', 'TREES', 'PROBABLY', 'THE', 'DESCENDANTS', 'OF', 'THOSE', 'PLANTED', 'BY', 'THE', 'REVEREND', 'MISTER', 'BLACKSTONE', 'THE', 'FIRST', 'SETTLER', 'OF', 'THE', 'PENINSULA', 'THAT', 'HALF', 'MYTHOLOGICAL', 'PERSONAGE', 'WHO', 'RIDES', 'THROUGH', 'OUR', 'EARLY', 'ANNALS', 'SEATED', 'ON', 'THE', 'BACK', 'OF', 'A', 'BULL'] +1221-135767-0023-1303: hyp=['THERE', 'WERE', 'A', 'FEW', 'ROSE', 'BUSHES', 'HOWEVER', 'AND', 'A', 'NUMBER', 'OF', 'APPLE', 'TREES', 'PROBABLY', 'THE', 'DESCENDANTS', 'OF', 'THOSE', 'PLANTED', 'BY', 'THE', 'REVEREND', 'MISTER', 'BLACKSTONE', 'THE', 'FIRST', 'SETTLER', 'OF', 'THE', 'PENINSULA', 'THAT', 'HALF', 'MYTHOLOGICAL', 'PERSONAGE', 'WHO', 'RIDES', 'THROUGH', 'OUR', 'EARLY', 'ANNALS', 'SEATED', 'ON', 'THE', 'BACK', 'OF', 'A', 'BULL'] +1221-135767-0024-1304: ref=['PEARL', 'SEEING', 'THE', 'ROSE', 'BUSHES', 'BEGAN', 'TO', 'CRY', 'FOR', 'A', 'RED', 'ROSE', 'AND', 'WOULD', 'NOT', 'BE', 'PACIFIED'] +1221-135767-0024-1304: hyp=['PEARL', 'SEEING', 'THE', 'ROSE', 'BUSHES', 'BEGAN', 'TO', 'CRY', 'FOR', 'A', 'RED', 'ROSE', 'AND', 'WOULD', 'NOT', 'BE', 'PACIFIED'] +1284-1180-0000-829: ref=['HE', 'WORE', 'BLUE', 'SILK', 'STOCKINGS', 'BLUE', 'KNEE', 'PANTS', 'WITH', 'GOLD', 'BUCKLES', 'A', 'BLUE', 'RUFFLED', 'WAIST', 'AND', 'A', 'JACKET', 'OF', 'BRIGHT', 'BLUE', 'BRAIDED', 'WITH', 'GOLD'] +1284-1180-0000-829: hyp=['HE', 'WORE', 'BLUE', 'SILK', 'STOCKINGS', 'BLUE', 'KNEEP', 'HANDS', 'WITH', 'GOLD', 'BUCKLES', 'A', 'BLUE', 'RUFFLED', 'WAIST', 'AND', 'A', 'JACKET', 'OF', 'BRIGHT', 'BLUE', 'BRAIDED', 'WITH', 'GOLD'] +1284-1180-0001-830: ref=['HIS', 'HAT', 'HAD', 'A', 'PEAKED', 'CROWN', 'AND', 'A', 'FLAT', 'BRIM', 'AND', 'AROUND', 'THE', 'BRIM', 'WAS', 'A', 'ROW', 'OF', 'TINY', 'GOLDEN', 'BELLS', 'THAT', 'TINKLED', 'WHEN', 'HE', 'MOVED'] +1284-1180-0001-830: hyp=['HIS', 'HAT', 'HAD', 'A', 'PEAKED', 'CROWN', 'AND', 'A', 'FLAT', 'BRIM', 'AND', 'AROUND', 'THE', 'BRIM', 'WAS', 'A', 'ROW', 'OF', 'TINY', 'GOLDEN', 'BELLS', 'THAT', 'TINKLED', 'WHEN', 'HE', 'MOVED'] +1284-1180-0002-831: ref=['INSTEAD', 'OF', 'SHOES', 'THE', 'OLD', 'MAN', 'WORE', 'BOOTS', 'WITH', 'TURNOVER', 'TOPS', 'AND', 'HIS', 'BLUE', 'COAT', 'HAD', 'WIDE', 'CUFFS', 'OF', 'GOLD', 'BRAID'] +1284-1180-0002-831: hyp=['INSTEAD', 'OF', 'SHOES', 'THE', 'OLD', 'MAN', 'WORE', 'BOOTS', 'WITH', 'TURN', 'OVER', 'TOPS', 'AND', 'HIS', 'BLUE', 'COAT', 'HAD', 'WIDE', 'CUFFS', 'OF', 'GOLD', 'BRAID'] +1284-1180-0003-832: ref=['FOR', 'A', 'LONG', 'TIME', 'HE', 'HAD', 'WISHED', 'TO', 'EXPLORE', 'THE', 'BEAUTIFUL', 'LAND', 'OF', 'OZ', 'IN', 'WHICH', 'THEY', 'LIVED'] +1284-1180-0003-832: hyp=['FOR', 'A', 'LONG', 'TIME', 'HE', 'HAD', 'WISHED', 'TO', 'EXPLORE', 'THE', 'BEAUTIFUL', 'LAND', 'OF', 'OZ', 'IN', 'WHICH', 'THEY', 'LIVED'] +1284-1180-0004-833: ref=['WHEN', 'THEY', 'WERE', 'OUTSIDE', 'UNC', 'SIMPLY', 'LATCHED', 'THE', 'DOOR', 'AND', 'STARTED', 'UP', 'THE', 'PATH'] +1284-1180-0004-833: hyp=['WHEN', 'THEY', 'WERE', 'OUTSIDE', 'UNC', 'SIMPLY', 'LATCHED', 'THE', 'DOOR', 'AND', 'STARTED', 'UP', 'THE', 'PATH'] +1284-1180-0005-834: ref=['NO', 'ONE', 'WOULD', 'DISTURB', 'THEIR', 'LITTLE', 'HOUSE', 'EVEN', 'IF', 'ANYONE', 'CAME', 'SO', 'FAR', 'INTO', 'THE', 'THICK', 'FOREST', 'WHILE', 'THEY', 'WERE', 'GONE'] +1284-1180-0005-834: hyp=['NO', 'ONE', 'WOULD', 'DISTURB', 'THEIR', 'LITTLE', 'HOUSE', 'EVEN', 'IF', 'ANY', 'ONE', 'CAME', 'SO', 'FAR', 'INTO', 'THE', 'THICK', 'FOREST', 'WHILE', 'THEY', 'WERE', 'GONE'] +1284-1180-0006-835: ref=['AT', 'THE', 'FOOT', 'OF', 'THE', 'MOUNTAIN', 'THAT', 'SEPARATED', 'THE', 'COUNTRY', 'OF', 'THE', 'MUNCHKINS', 'FROM', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'THE', 'PATH', 'DIVIDED'] +1284-1180-0006-835: hyp=['AT', 'THE', 'FOOT', 'OF', 'THE', 'MOUNTAIN', 'THAT', 'SEPARATED', 'THE', 'COUNTRY', 'OF', 'THE', 'MUNCHKINS', 'FROM', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'THE', 'PATH', 'DIVIDED'] +1284-1180-0007-836: ref=['HE', 'KNEW', 'IT', 'WOULD', 'TAKE', 'THEM', 'TO', 'THE', 'HOUSE', 'OF', 'THE', 'CROOKED', 'MAGICIAN', 'WHOM', 'HE', 'HAD', 'NEVER', 'SEEN', 'BUT', 'WHO', 'WAS', 'THEIR', 'NEAREST', 'NEIGHBOR'] +1284-1180-0007-836: hyp=['HE', 'KNEW', 'IT', 'WOULD', 'TAKE', 'THEM', 'TO', 'THE', 'HOUSE', 'OF', 'THE', 'CROOKED', 'MAGICIAN', 'WHOM', 'HE', 'HAD', 'NEVER', 'SEEN', 'BUT', 'WHO', 'WAS', 'THERE', 'NEAREST', 'NEIGHBOUR'] +1284-1180-0008-837: ref=['ALL', 'THE', 'MORNING', 'THEY', 'TRUDGED', 'UP', 'THE', 'MOUNTAIN', 'PATH', 'AND', 'AT', 'NOON', 'UNC', 'AND', 'OJO', 'SAT', 'ON', 'A', 'FALLEN', 'TREE', 'TRUNK', 'AND', 'ATE', 'THE', 'LAST', 'OF', 'THE', 'BREAD', 'WHICH', 'THE', 'OLD', 'MUNCHKIN', 'HAD', 'PLACED', 'IN', 'HIS', 'POCKET'] +1284-1180-0008-837: hyp=['ALL', 'THE', 'MORNING', 'THEY', 'TRUDGED', 'UP', 'THE', 'MOUNTAIN', 'PATH', 'AND', 'AT', 'NOONK', 'AND', 'OJO', 'SAT', 'ON', 'A', 'FALLEN', 'TREE', 'TRUNK', 'AND', 'ATE', 'THE', 'LAST', 'OF', 'THE', 'BREAD', 'WHICH', 'THE', 'OLD', 'MUNCHKIN', 'HAD', 'PLACED', 'IN', 'HIS', 'POCKET'] +1284-1180-0009-838: ref=['THEN', 'THEY', 'STARTED', 'ON', 'AGAIN', 'AND', 'TWO', 'HOURS', 'LATER', 'CAME', 'IN', 'SIGHT', 'OF', 'THE', 'HOUSE', 'OF', 'DOCTOR', 'PIPT'] +1284-1180-0009-838: hyp=['THEN', 'THEY', 'STARTED', 'ON', 'AGAIN', 'AND', 'TWO', 'HOURS', 'LATER', 'CAME', 'IN', 'SIGHT', 'OF', 'THE', 'HOUSE', 'OF', 'DOCTOR', 'PIPT'] +1284-1180-0010-839: ref=['UNC', 'KNOCKED', 'AT', 'THE', 'DOOR', 'OF', 'THE', 'HOUSE', 'AND', 'A', 'CHUBBY', 'PLEASANT', 'FACED', 'WOMAN', 'DRESSED', 'ALL', 'IN', 'BLUE', 'OPENED', 'IT', 'AND', 'GREETED', 'THE', 'VISITORS', 'WITH', 'A', 'SMILE'] +1284-1180-0010-839: hyp=['UNC', 'KNOCKED', 'AT', 'THE', 'DOOR', 'OF', 'THE', 'HOUSE', 'AND', 'A', 'CHUBBY', 'PLEASANT', 'FACED', 'WOMAN', 'DRESSED', 'ALL', 'IN', 'BLUE', 'OPENED', 'IT', 'AND', 'GREETED', 'THE', 'VISITORS', 'WITH', 'A', 'SMILE'] +1284-1180-0011-840: ref=['I', 'AM', 'MY', 'DEAR', 'AND', 'ALL', 'STRANGERS', 'ARE', 'WELCOME', 'TO', 'MY', 'HOME'] +1284-1180-0011-840: hyp=['I', 'AM', 'MY', 'DEAR', 'AND', 'ALL', 'STRANGERS', 'ARE', 'WELCOME', 'TO', 'MY', 'HOME'] +1284-1180-0012-841: ref=['WE', 'HAVE', 'COME', 'FROM', 'A', 'FAR', 'LONELIER', 'PLACE', 'THAN', 'THIS', 'A', 'LONELIER', 'PLACE'] +1284-1180-0012-841: hyp=['WE', 'HAVE', 'COME', 'FROM', 'A', 'FAR', 'LONELIER', 'PLACE', 'THAN', 'THIS', 'A', 'LONELIER', 'PLACE'] +1284-1180-0013-842: ref=['AND', 'YOU', 'MUST', 'BE', 'OJO', 'THE', 'UNLUCKY', 'SHE', 'ADDED'] +1284-1180-0013-842: hyp=['AND', 'YOU', 'MUST', 'BE', 'OJO', 'THE', 'UNLUCKY', 'SHE', 'ADDED'] +1284-1180-0014-843: ref=['OJO', 'HAD', 'NEVER', 'EATEN', 'SUCH', 'A', 'FINE', 'MEAL', 'IN', 'ALL', 'HIS', 'LIFE'] +1284-1180-0014-843: hyp=['OJO', 'HAD', 'NEVER', 'EATEN', 'SUCH', 'A', 'FINE', 'MEAL', 'IN', 'ALL', 'HIS', 'LIFE'] +1284-1180-0015-844: ref=['WE', 'ARE', 'TRAVELING', 'REPLIED', 'OJO', 'AND', 'WE', 'STOPPED', 'AT', 'YOUR', 'HOUSE', 'JUST', 'TO', 'REST', 'AND', 'REFRESH', 'OURSELVES'] +1284-1180-0015-844: hyp=['WE', 'ARE', 'TRAVELING', 'REPLIED', 'OJO', 'AND', 'WE', 'STOPPED', 'AT', 'YOUR', 'HOUSE', 'JUST', 'TO', 'REST', 'AND', 'REFRESH', 'OURSELVES'] +1284-1180-0016-845: ref=['THE', 'WOMAN', 'SEEMED', 'THOUGHTFUL'] +1284-1180-0016-845: hyp=['THE', 'WOMAN', 'SEEMED', 'THOUGHTFUL'] +1284-1180-0017-846: ref=['AT', 'ONE', 'END', 'STOOD', 'A', 'GREAT', 'FIREPLACE', 'IN', 'WHICH', 'A', 'BLUE', 'LOG', 'WAS', 'BLAZING', 'WITH', 'A', 'BLUE', 'FLAME', 'AND', 'OVER', 'THE', 'FIRE', 'HUNG', 'FOUR', 'KETTLES', 'IN', 'A', 'ROW', 'ALL', 'BUBBLING', 'AND', 'STEAMING', 'AT', 'A', 'GREAT', 'RATE'] +1284-1180-0017-846: hyp=['AT', 'ONE', 'END', 'STOOD', 'A', 'GREAT', 'FIREPLACE', 'IN', 'WHICH', 'A', 'BLUE', 'LOG', 'WAS', 'BLAZING', 'WITH', 'A', 'BLUE', 'FLAME', 'AND', 'OVER', 'THE', 'FIRE', 'HUNG', 'FOUR', 'KETTLES', 'IN', 'A', 'ROW', 'ALL', 'BUBBLING', 'AND', 'STEAMING', 'AT', 'A', 'GREAT', 'RATE'] +1284-1180-0018-847: ref=['IT', 'TAKES', 'ME', 'SEVERAL', 'YEARS', 'TO', 'MAKE', 'THIS', 'MAGIC', 'POWDER', 'BUT', 'AT', 'THIS', 'MOMENT', 'I', 'AM', 'PLEASED', 'TO', 'SAY', 'IT', 'IS', 'NEARLY', 'DONE', 'YOU', 'SEE', 'I', 'AM', 'MAKING', 'IT', 'FOR', 'MY', 'GOOD', 'WIFE', 'MARGOLOTTE', 'WHO', 'WANTS', 'TO', 'USE', 'SOME', 'OF', 'IT', 'FOR', 'A', 'PURPOSE', 'OF', 'HER', 'OWN'] +1284-1180-0018-847: hyp=['IT', 'TAKES', 'ME', 'SEVERAL', 'YEARS', 'TO', 'MAKE', 'THIS', 'MAGIC', 'POWDER', 'BUT', 'AT', 'THIS', 'MOMENT', 'I', 'AM', 'PLEASED', 'TO', 'SAY', 'IT', 'IS', 'NEARLY', 'DONE', 'YOU', 'SEE', 'I', 'AM', 'MAKING', 'IT', 'FOR', 'MY', 'GOOD', 'WIFE', 'MARGOLOTTE', 'WHO', 'WANTS', 'TO', 'USE', 'SOME', 'OF', 'IT', 'FOR', 'A', 'PURPOSE', 'OF', 'HER', 'OWN'] +1284-1180-0019-848: ref=['YOU', 'MUST', 'KNOW', 'SAID', 'MARGOLOTTE', 'WHEN', 'THEY', 'WERE', 'ALL', 'SEATED', 'TOGETHER', 'ON', 'THE', 'BROAD', 'WINDOW', 'SEAT', 'THAT', 'MY', 'HUSBAND', 'FOOLISHLY', 'GAVE', 'AWAY', 'ALL', 'THE', 'POWDER', 'OF', 'LIFE', 'HE', 'FIRST', 'MADE', 'TO', 'OLD', 'MOMBI', 'THE', 'WITCH', 'WHO', 'USED', 'TO', 'LIVE', 'IN', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'TO', 'THE', 'NORTH', 'OF', 'HERE'] +1284-1180-0019-848: hyp=['YOU', 'MUST', 'KNOW', 'SAID', 'MARGOLOTTE', 'WHEN', 'THEY', 'WERE', 'ALL', 'SEATED', 'TOGETHER', 'ON', 'THE', 'BROAD', 'WINDOW', 'SEAT', 'THAT', 'MY', 'HUSBAND', 'FOOLISHLY', 'GAVE', 'AWAY', 'ALL', 'THE', 'POWDER', 'OF', 'LIFE', 'HE', 'FIRST', 'MADE', 'TO', 'OLD', 'MUMBIE', 'THE', 'WITCH', 'WHO', 'USED', 'TO', 'LIVE', 'IN', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'TO', 'THE', 'NORTH', 'OF', 'HERE'] +1284-1180-0020-849: ref=['THE', 'FIRST', 'LOT', 'WE', 'TESTED', 'ON', 'OUR', 'GLASS', 'CAT', 'WHICH', 'NOT', 'ONLY', 'BEGAN', 'TO', 'LIVE', 'BUT', 'HAS', 'LIVED', 'EVER', 'SINCE'] +1284-1180-0020-849: hyp=['THE', 'FIRST', 'LOT', 'WE', 'TESTED', 'ON', 'OUR', 'GLASS', 'HAT', 'WHICH', 'NOT', 'ONLY', 'BEGAN', 'TO', 'LIVE', 'BUT', 'HAS', 'LIVED', 'EVER', 'SINCE'] +1284-1180-0021-850: ref=['I', 'THINK', 'THE', 'NEXT', 'GLASS', 'CAT', 'THE', 'MAGICIAN', 'MAKES', 'WILL', 'HAVE', 'NEITHER', 'BRAINS', 'NOR', 'HEART', 'FOR', 'THEN', 'IT', 'WILL', 'NOT', 'OBJECT', 'TO', 'CATCHING', 'MICE', 'AND', 'MAY', 'PROVE', 'OF', 'SOME', 'USE', 'TO', 'US'] +1284-1180-0021-850: hyp=['I', 'THINK', 'THE', 'NEXT', 'GLASS', 'CAT', 'THE', 'MAGICIAN', 'MAKES', 'WILL', 'HAVE', 'NEITHER', 'BRAINS', 'NOR', 'HEART', 'FOR', 'THEN', 'IT', 'WILL', 'NOT', 'OBJECT', 'TO', 'CATCHING', 'MICE', 'AND', 'MAY', 'PROVE', 'OF', 'SOME', 'USE', 'TO', 'US'] +1284-1180-0022-851: ref=["I'M", 'AFRAID', 'I', "DON'T", 'KNOW', 'MUCH', 'ABOUT', 'THE', 'LAND', 'OF', 'OZ'] +1284-1180-0022-851: hyp=["I'M", 'AFRAID', 'I', "DON'T", 'KNOW', 'MUCH', 'ABOUT', 'THE', 'LAND', 'OF', 'OZ'] +1284-1180-0023-852: ref=['YOU', 'SEE', "I'VE", 'LIVED', 'ALL', 'MY', 'LIFE', 'WITH', 'UNC', 'NUNKIE', 'THE', 'SILENT', 'ONE', 'AND', 'THERE', 'WAS', 'NO', 'ONE', 'TO', 'TELL', 'ME', 'ANYTHING'] +1284-1180-0023-852: hyp=['YOU', 'SEE', "I'VE", 'LIVED', 'ALL', 'MY', 'LIFE', 'WITH', 'UNC', 'NUNKIE', 'THE', 'SILENT', 'ONE', 'AND', 'THERE', 'WAS', 'NO', 'ONE', 'TO', 'TELL', 'ME', 'ANYTHING'] +1284-1180-0024-853: ref=['THAT', 'IS', 'ONE', 'REASON', 'YOU', 'ARE', 'OJO', 'THE', 'UNLUCKY', 'SAID', 'THE', 'WOMAN', 'IN', 'A', 'SYMPATHETIC', 'TONE'] +1284-1180-0024-853: hyp=['THAT', 'IS', 'ONE', 'REASON', 'YOU', 'ARE', 'OJO', 'THE', 'UNLUCKY', 'SAID', 'THE', 'WOMAN', 'IN', 'SYMPATHETIC', 'TONE'] +1284-1180-0025-854: ref=['I', 'THINK', 'I', 'MUST', 'SHOW', 'YOU', 'MY', 'PATCHWORK', 'GIRL', 'SAID', 'MARGOLOTTE', 'LAUGHING', 'AT', 'THE', "BOY'S", 'ASTONISHMENT', 'FOR', 'SHE', 'IS', 'RATHER', 'DIFFICULT', 'TO', 'EXPLAIN'] +1284-1180-0025-854: hyp=['I', 'THINK', 'I', 'MUST', 'SHOW', 'YOU', 'MY', 'PATCHWORK', 'GIRL', 'SAID', 'MARGOLOT', 'LAUGHING', 'AT', 'THE', "BOY'S", 'ASTONISHMENT', 'FOR', 'SHE', 'IS', 'RATHER', 'DIFFICULT', 'TO', 'EXPLAIN'] +1284-1180-0026-855: ref=['BUT', 'FIRST', 'I', 'WILL', 'TELL', 'YOU', 'THAT', 'FOR', 'MANY', 'YEARS', 'I', 'HAVE', 'LONGED', 'FOR', 'A', 'SERVANT', 'TO', 'HELP', 'ME', 'WITH', 'THE', 'HOUSEWORK', 'AND', 'TO', 'COOK', 'THE', 'MEALS', 'AND', 'WASH', 'THE', 'DISHES'] +1284-1180-0026-855: hyp=['BUT', 'FIRST', 'I', 'WILL', 'TELL', 'YOU', 'THAT', 'FOR', 'MANY', 'YEARS', 'I', 'HAVE', 'LONGED', 'FOR', 'A', 'SERVANT', 'TO', 'HELP', 'ME', 'WITH', 'THE', 'HOUSEWORK', 'AND', 'TO', 'COPE', 'THE', 'MEALS', 'AND', 'WASH', 'THE', 'DISHES'] +1284-1180-0027-856: ref=['YET', 'THAT', 'TASK', 'WAS', 'NOT', 'SO', 'EASY', 'AS', 'YOU', 'MAY', 'SUPPOSE'] +1284-1180-0027-856: hyp=['YET', 'THAT', 'TASK', 'WAS', 'NOT', 'SO', 'EASY', 'AS', 'YOU', 'MAY', 'SUPPOSE'] +1284-1180-0028-857: ref=['A', 'BED', 'QUILT', 'MADE', 'OF', 'PATCHES', 'OF', 'DIFFERENT', 'KINDS', 'AND', 'COLORS', 'OF', 'CLOTH', 'ALL', 'NEATLY', 'SEWED', 'TOGETHER'] +1284-1180-0028-857: hyp=['A', 'BED', 'QUILT', 'MADE', 'OF', 'PATCHES', 'OF', 'DIFFERENT', 'KINDS', 'AND', 'COLLARS', 'OF', 'CLOTH', 'ALL', 'NEATLY', 'SEWED', 'TOGETHER'] +1284-1180-0029-858: ref=['SOMETIMES', 'IT', 'IS', 'CALLED', 'A', 'CRAZY', 'QUILT', 'BECAUSE', 'THE', 'PATCHES', 'AND', 'COLORS', 'ARE', 'SO', 'MIXED', 'UP'] +1284-1180-0029-858: hyp=['SOMETIMES', 'IT', 'IS', 'CALLED', 'A', 'CRAZY', 'QUILT', 'BECAUSE', 'THE', 'PATCHES', 'AND', 'COLORS', 'ARE', 'SO', 'MIXED', 'UP'] +1284-1180-0030-859: ref=['WHEN', 'I', 'FOUND', 'IT', 'I', 'SAID', 'TO', 'MYSELF', 'THAT', 'IT', 'WOULD', 'DO', 'NICELY', 'FOR', 'MY', 'SERVANT', 'GIRL', 'FOR', 'WHEN', 'SHE', 'WAS', 'BROUGHT', 'TO', 'LIFE', 'SHE', 'WOULD', 'NOT', 'BE', 'PROUD', 'NOR', 'HAUGHTY', 'AS', 'THE', 'GLASS', 'CAT', 'IS', 'FOR', 'SUCH', 'A', 'DREADFUL', 'MIXTURE', 'OF', 'COLORS', 'WOULD', 'DISCOURAGE', 'HER', 'FROM', 'TRYING', 'TO', 'BE', 'AS', 'DIGNIFIED', 'AS', 'THE', 'BLUE', 'MUNCHKINS', 'ARE'] +1284-1180-0030-859: hyp=['WHEN', 'I', 'FOUND', 'IT', 'I', 'SAID', 'TO', 'MYSELF', 'THAT', 'IT', 'WOULD', 'DO', 'NICELY', 'FOR', 'MY', 'SERVANT', 'GIRL', 'FOR', 'WHEN', 'SHE', 'WAS', 'BROUGHT', 'TO', 'LIFE', 'SHE', 'WOULD', 'NOT', 'BE', 'PROUD', 'NOR', 'HAUGHTY', 'AS', 'THE', 'GLASS', 'CAT', 'IS', 'FOR', 'SUCH', 'A', 'DREADFUL', 'MIXTURE', 'OF', 'COLOURS', 'WOULD', 'DISCOURAGE', 'HER', 'FROM', 'TRYING', 'TO', 'BE', 'AS', 'DIGNIFIED', 'AS', 'THE', 'BLUE', 'MUNCHKINS', 'ARE'] +1284-1180-0031-860: ref=['AT', 'THE', 'EMERALD', 'CITY', 'WHERE', 'OUR', 'PRINCESS', 'OZMA', 'LIVES', 'GREEN', 'IS', 'THE', 'POPULAR', 'COLOR'] +1284-1180-0031-860: hyp=['AT', 'THE', 'EMERALD', 'CITY', 'WHERE', 'OUR', 'PRINCESS', 'OZMA', 'LIVES', 'GREEN', 'IS', 'THE', 'POPULAR', 'COLOR'] +1284-1180-0032-861: ref=['I', 'WILL', 'SHOW', 'YOU', 'WHAT', 'A', 'GOOD', 'JOB', 'I', 'DID', 'AND', 'SHE', 'WENT', 'TO', 'A', 'TALL', 'CUPBOARD', 'AND', 'THREW', 'OPEN', 'THE', 'DOORS'] +1284-1180-0032-861: hyp=['I', 'WILL', 'SHOW', 'YOU', 'WHAT', 'A', 'GOOD', 'JOB', 'I', 'DID', 'AND', 'SHE', 'WENT', 'TO', 'A', 'TALL', 'CUPBOARD', 'AND', 'THREW', 'OPEN', 'THE', 'DOORS'] +1284-1181-0000-807: ref=['OJO', 'EXAMINED', 'THIS', 'CURIOUS', 'CONTRIVANCE', 'WITH', 'WONDER'] +1284-1181-0000-807: hyp=['OJO', 'EXAMINED', 'THIS', 'CURIOUS', 'CONTRIVANCE', 'WITH', 'WONDER'] +1284-1181-0001-808: ref=['MARGOLOTTE', 'HAD', 'FIRST', 'MADE', 'THE', "GIRL'S", 'FORM', 'FROM', 'THE', 'PATCHWORK', 'QUILT', 'AND', 'THEN', 'SHE', 'HAD', 'DRESSED', 'IT', 'WITH', 'A', 'PATCHWORK', 'SKIRT', 'AND', 'AN', 'APRON', 'WITH', 'POCKETS', 'IN', 'IT', 'USING', 'THE', 'SAME', 'GAY', 'MATERIAL', 'THROUGHOUT'] +1284-1181-0001-808: hyp=['MARGOLOTTE', 'HAD', 'FIRST', 'MADE', 'THE', "GIRL'S", 'FORM', 'FROM', 'THE', 'PATCHWORK', 'QUILT', 'AND', 'THEN', 'SHE', 'HAD', 'DRESSED', 'IT', 'WITH', 'A', 'PATCHWORK', 'SKIRT', 'AND', 'AN', 'APRON', 'WITH', 'POCKETS', 'IN', 'IT', 'USING', 'THE', 'SAME', 'GAY', 'MATERIAL', 'THROUGHOUT'] +1284-1181-0002-809: ref=['THE', 'HEAD', 'OF', 'THE', 'PATCHWORK', 'GIRL', 'WAS', 'THE', 'MOST', 'CURIOUS', 'PART', 'OF', 'HER'] +1284-1181-0002-809: hyp=['THE', 'HEAD', 'OF', 'THE', 'PATCHWORK', 'GIRL', 'WAS', 'THE', 'MOST', 'CURIOUS', 'PART', 'OF', 'HER'] +1284-1181-0003-810: ref=['THE', 'HAIR', 'WAS', 'OF', 'BROWN', 'YARN', 'AND', 'HUNG', 'DOWN', 'ON', 'HER', 'NECK', 'IN', 'SEVERAL', 'NEAT', 'BRAIDS'] +1284-1181-0003-810: hyp=['THE', 'HAIR', 'WAS', 'OF', 'BROWN', 'YARN', 'AND', 'HUNG', 'DOWN', 'ON', 'HER', 'NECK', 'IN', 'SEVERAL', 'NEAT', 'BRAIDS'] +1284-1181-0004-811: ref=['GOLD', 'IS', 'THE', 'MOST', 'COMMON', 'METAL', 'IN', 'THE', 'LAND', 'OF', 'OZ', 'AND', 'IS', 'USED', 'FOR', 'MANY', 'PURPOSES', 'BECAUSE', 'IT', 'IS', 'SOFT', 'AND', 'PLIABLE'] +1284-1181-0004-811: hyp=['GOLD', 'IS', 'THE', 'MOST', 'COMMON', 'METAL', 'IN', 'THE', 'LAND', 'OF', 'OZ', 'AND', 'IS', 'USED', 'FOR', 'MANY', 'PURPOSES', 'BECAUSE', 'IT', 'IS', 'SOFT', 'AND', 'PLIABLE'] +1284-1181-0005-812: ref=['NO', 'I', 'FORGOT', 'ALL', 'ABOUT', 'THE', 'BRAINS', 'EXCLAIMED', 'THE', 'WOMAN'] +1284-1181-0005-812: hyp=['NO', 'I', 'FORGOT', 'ALL', 'ABOUT', 'THE', 'BRAINS', 'EXCLAIMED', 'THE', 'WOMAN'] +1284-1181-0006-813: ref=['WELL', 'THAT', 'MAY', 'BE', 'TRUE', 'AGREED', 'MARGOLOTTE', 'BUT', 'ON', 'THE', 'CONTRARY', 'A', 'SERVANT', 'WITH', 'TOO', 'MUCH', 'BRAINS', 'IS', 'SURE', 'TO', 'BECOME', 'INDEPENDENT', 'AND', 'HIGH', 'AND', 'MIGHTY', 'AND', 'FEEL', 'ABOVE', 'HER', 'WORK'] +1284-1181-0006-813: hyp=['WELL', 'THAT', 'MAY', 'BE', 'TRUE', 'AGREED', 'MARGOLOTTE', 'BUT', 'ON', 'THE', 'CONTRARY', 'A', 'SERVANT', 'WITH', 'TOO', 'MUCH', 'BRAINS', 'IS', 'SURE', 'TO', 'BECOME', 'INDEPENDENT', 'AND', 'HIGH', 'AND', 'MIGHTY', 'AND', 'FEEL', 'ABOVE', 'HER', 'WORK'] +1284-1181-0007-814: ref=['SHE', 'POURED', 'INTO', 'THE', 'DISH', 'A', 'QUANTITY', 'FROM', 'EACH', 'OF', 'THESE', 'BOTTLES'] +1284-1181-0007-814: hyp=['SHE', 'POURED', 'INTO', 'THE', 'DISH', 'A', 'QUANTITY', 'FROM', 'EACH', 'OF', 'THESE', 'BOTTLES'] +1284-1181-0008-815: ref=['I', 'THINK', 'THAT', 'WILL', 'DO', 'SHE', 'CONTINUED', 'FOR', 'THE', 'OTHER', 'QUALITIES', 'ARE', 'NOT', 'NEEDED', 'IN', 'A', 'SERVANT'] +1284-1181-0008-815: hyp=['I', 'THINK', 'THAT', 'WILL', 'DO', 'SHE', 'CONTINUED', 'FOR', 'THE', 'OTHER', 'QUALITIES', 'ARE', 'NOT', 'NEEDED', 'IN', 'A', 'SERVANT'] +1284-1181-0009-816: ref=['SHE', 'RAN', 'TO', 'HER', "HUSBAND'S", 'SIDE', 'AT', 'ONCE', 'AND', 'HELPED', 'HIM', 'LIFT', 'THE', 'FOUR', 'KETTLES', 'FROM', 'THE', 'FIRE'] +1284-1181-0009-816: hyp=['SHE', 'RAN', 'TO', 'HER', "HUSBAND'S", 'SIDE', 'AT', 'ONCE', 'AND', 'HELPED', 'HIM', 'LIFT', 'THE', 'FOUR', 'KETTLES', 'FROM', 'THE', 'FIRE'] +1284-1181-0010-817: ref=['THEIR', 'CONTENTS', 'HAD', 'ALL', 'BOILED', 'AWAY', 'LEAVING', 'IN', 'THE', 'BOTTOM', 'OF', 'EACH', 'KETTLE', 'A', 'FEW', 'GRAINS', 'OF', 'FINE', 'WHITE', 'POWDER'] +1284-1181-0010-817: hyp=['THEIR', 'CONTENTS', 'HAD', 'ALL', 'BOILED', 'AWAY', 'LEAVING', 'IN', 'THE', 'BOTTOM', 'OF', 'EACH', 'KETTLE', 'A', 'FEW', 'GRAINS', 'OF', 'FINE', 'WHITE', 'POWDER'] +1284-1181-0011-818: ref=['VERY', 'CAREFULLY', 'THE', 'MAGICIAN', 'REMOVED', 'THIS', 'POWDER', 'PLACING', 'IT', 'ALL', 'TOGETHER', 'IN', 'A', 'GOLDEN', 'DISH', 'WHERE', 'HE', 'MIXED', 'IT', 'WITH', 'A', 'GOLDEN', 'SPOON'] +1284-1181-0011-818: hyp=['VERY', 'CAREFULLY', 'THE', 'MAGICIAN', 'REMOVED', 'THIS', 'POWDER', 'PLACING', 'IT', 'ALTOGETHER', 'IN', 'A', 'GOLDEN', 'DISH', 'WHERE', 'HE', 'MIXED', 'IT', 'WITH', 'A', 'GOLDEN', 'SPOON'] +1284-1181-0012-819: ref=['NO', 'ONE', 'SAW', 'HIM', 'DO', 'THIS', 'FOR', 'ALL', 'WERE', 'LOOKING', 'AT', 'THE', 'POWDER', 'OF', 'LIFE', 'BUT', 'SOON', 'THE', 'WOMAN', 'REMEMBERED', 'WHAT', 'SHE', 'HAD', 'BEEN', 'DOING', 'AND', 'CAME', 'BACK', 'TO', 'THE', 'CUPBOARD'] +1284-1181-0012-819: hyp=['NO', 'ONE', 'SAW', 'HIM', 'DO', 'THIS', 'FOR', 'ALL', 'WERE', 'LOOKING', 'AT', 'THE', 'POWDER', 'OF', 'LIFE', 'BUT', 'SOON', 'THE', 'WOMAN', 'REMEMBERED', 'WHAT', 'SHE', 'HAD', 'BEEN', 'DOING', 'AND', 'CAME', 'BACK', 'TO', 'THE', 'CUPBOARD'] +1284-1181-0013-820: ref=['OJO', 'BECAME', 'A', 'BIT', 'UNEASY', 'AT', 'THIS', 'FOR', 'HE', 'HAD', 'ALREADY', 'PUT', 'QUITE', 'A', 'LOT', 'OF', 'THE', 'CLEVERNESS', 'POWDER', 'IN', 'THE', 'DISH', 'BUT', 'HE', 'DARED', 'NOT', 'INTERFERE', 'AND', 'SO', 'HE', 'COMFORTED', 'HIMSELF', 'WITH', 'THE', 'THOUGHT', 'THAT', 'ONE', 'CANNOT', 'HAVE', 'TOO', 'MUCH', 'CLEVERNESS'] +1284-1181-0013-820: hyp=['OJO', 'BECAME', 'A', 'BIT', 'UNEASY', 'AT', 'THIS', 'FOR', 'HE', 'HAD', 'ALREADY', 'PUT', 'QUITE', 'A', 'LOT', 'OF', 'THE', 'CLEVERNESS', 'POWDER', 'IN', 'THE', 'DISH', 'BUT', 'HE', 'DARED', 'NOT', 'INTERFERE', 'AND', 'SO', 'HE', 'COMFORTED', 'HIMSELF', 'WITH', 'THE', 'THOUGHT', 'THAT', 'ONE', 'CANNOT', 'HAVE', 'TOO', 'MUCH', 'CLEVERNESS'] +1284-1181-0014-821: ref=['HE', 'SELECTED', 'A', 'SMALL', 'GOLD', 'BOTTLE', 'WITH', 'A', 'PEPPER', 'BOX', 'TOP', 'SO', 'THAT', 'THE', 'POWDER', 'MIGHT', 'BE', 'SPRINKLED', 'ON', 'ANY', 'OBJECT', 'THROUGH', 'THE', 'SMALL', 'HOLES'] +1284-1181-0014-821: hyp=['HE', 'SELECTED', 'A', 'SMALL', 'GOLD', 'BOTTLE', 'WITH', 'A', 'PEPPER', 'BOX', 'TOP', 'SO', 'THAT', 'THE', 'POWDER', 'MIGHT', 'BE', 'SPRINKLED', 'ON', 'ANY', 'OBJECT', 'THROUGH', 'THE', 'SMALL', 'HOLES'] +1284-1181-0015-822: ref=['MOST', 'PEOPLE', 'TALK', 'TOO', 'MUCH', 'SO', 'IT', 'IS', 'A', 'RELIEF', 'TO', 'FIND', 'ONE', 'WHO', 'TALKS', 'TOO', 'LITTLE'] +1284-1181-0015-822: hyp=['MOST', 'PEOPLE', 'TALK', 'TOO', 'MUCH', 'SO', 'IT', 'IS', 'A', 'RELIEF', 'TO', 'FIND', 'ONE', 'WHO', 'TALKS', 'TOO', 'LITTLE'] +1284-1181-0016-823: ref=['I', 'AM', 'NOT', 'ALLOWED', 'TO', 'PERFORM', 'MAGIC', 'EXCEPT', 'FOR', 'MY', 'OWN', 'AMUSEMENT', 'HE', 'TOLD', 'HIS', 'VISITORS', 'AS', 'HE', 'LIGHTED', 'A', 'PIPE', 'WITH', 'A', 'CROOKED', 'STEM', 'AND', 'BEGAN', 'TO', 'SMOKE'] +1284-1181-0016-823: hyp=['I', 'AM', 'NOT', 'ALLOWED', 'TO', 'PERFORM', 'MAGIC', 'EXCEPT', 'FOR', 'MY', 'OWN', 'AMUSEMENT', 'HE', 'TOLD', 'HIS', 'VISITORS', 'AS', 'HE', 'LIGHTED', 'A', 'PIPE', 'WITH', 'A', 'CROOKED', 'STEM', 'AND', 'BEGAN', 'TO', 'SMOKE'] +1284-1181-0017-824: ref=['THE', 'WIZARD', 'OF', 'OZ', 'WHO', 'USED', 'TO', 'BE', 'A', 'HUMBUG', 'AND', 'KNEW', 'NO', 'MAGIC', 'AT', 'ALL', 'HAS', 'BEEN', 'TAKING', 'LESSONS', 'OF', 'GLINDA', 'AND', "I'M", 'TOLD', 'HE', 'IS', 'GETTING', 'TO', 'BE', 'A', 'PRETTY', 'GOOD', 'WIZARD', 'BUT', 'HE', 'IS', 'MERELY', 'THE', 'ASSISTANT', 'OF', 'THE', 'GREAT', 'SORCERESS'] +1284-1181-0017-824: hyp=['THE', 'WIZARD', 'OF', 'OZ', 'WHO', 'USED', 'TO', 'BE', 'A', 'HUMBUG', 'AND', 'KNEW', 'NO', 'MAGIC', 'AT', 'ALL', 'HAS', 'BEEN', 'TAKING', 'LESSONS', 'OF', 'GLINDA', 'AND', "I'M", 'TOLD', 'HE', 'IS', 'GETTING', 'TO', 'BE', 'A', 'PRETTY', 'GOOD', 'WIZARD', 'BUT', 'HE', 'IS', 'MERELY', 'THE', 'ASSISTANT', 'OF', 'THE', 'GREAT', 'SORCERESS'] +1284-1181-0018-825: ref=['IT', 'TRULY', 'IS', 'ASSERTED', 'THE', 'MAGICIAN'] +1284-1181-0018-825: hyp=['IT', 'TRULY', 'IS', 'ASSERTED', 'THE', 'MAGICIAN'] +1284-1181-0019-826: ref=['I', 'NOW', 'USE', 'THEM', 'AS', 'ORNAMENTAL', 'STATUARY', 'IN', 'MY', 'GARDEN'] +1284-1181-0019-826: hyp=['I', 'NOW', 'USE', 'THEM', 'AS', 'ORNAMENTAL', 'STATUARY', 'IN', 'MY', 'GARDEN'] +1284-1181-0020-827: ref=['DEAR', 'ME', 'WHAT', 'A', 'CHATTERBOX', "YOU'RE", 'GETTING', 'TO', 'BE', 'UNC', 'REMARKED', 'THE', 'MAGICIAN', 'WHO', 'WAS', 'PLEASED', 'WITH', 'THE', 'COMPLIMENT'] +1284-1181-0020-827: hyp=['DEAR', 'ME', 'WHAT', 'A', 'CHATTER', 'BOX', "YOU'RE", 'GETTING', 'TO', 'BE', 'UG', 'REMARKED', 'THE', 'MAGICIAN', 'WHO', 'WAS', 'PLEASED', 'WITH', 'THE', 'COMPLIMENT'] +1284-1181-0021-828: ref=['ASKED', 'THE', 'VOICE', 'IN', 'SCORNFUL', 'ACCENTS'] +1284-1181-0021-828: hyp=['ASKED', 'THE', 'VOICE', 'IN', 'SCORNFUL', 'ACCENTS'] +1284-134647-0000-862: ref=['THE', 'GRATEFUL', 'APPLAUSE', 'OF', 'THE', 'CLERGY', 'HAS', 'CONSECRATED', 'THE', 'MEMORY', 'OF', 'A', 'PRINCE', 'WHO', 'INDULGED', 'THEIR', 'PASSIONS', 'AND', 'PROMOTED', 'THEIR', 'INTEREST'] +1284-134647-0000-862: hyp=['THE', 'GRATEFUL', 'APPLAUSE', 'OF', 'THE', 'CLERGY', 'HAS', 'CONSECRATED', 'THE', 'MEMORY', 'OF', 'A', 'PRINCE', 'WHO', 'INDULGED', 'THEIR', 'PASSIONS', 'AND', 'PROMOTED', 'THEIR', 'INTEREST'] +1284-134647-0001-863: ref=['THE', 'EDICT', 'OF', 'MILAN', 'THE', 'GREAT', 'CHARTER', 'OF', 'TOLERATION', 'HAD', 'CONFIRMED', 'TO', 'EACH', 'INDIVIDUAL', 'OF', 'THE', 'ROMAN', 'WORLD', 'THE', 'PRIVILEGE', 'OF', 'CHOOSING', 'AND', 'PROFESSING', 'HIS', 'OWN', 'RELIGION'] +1284-134647-0001-863: hyp=['THE', 'EDICT', 'OF', 'MILAN', 'THE', 'GREAT', 'CHARTER', 'OF', 'TOLERATION', 'HAD', 'CONFIRMED', 'TO', 'EACH', 'INDIVIDUAL', 'OF', 'THE', 'ROMAN', 'WORLD', 'THE', 'PRIVILEGE', 'OF', 'CHOOSING', 'AND', 'PROFESSING', 'HIS', 'OWN', 'RELIGION'] +1284-134647-0002-864: ref=['BUT', 'THIS', 'INESTIMABLE', 'PRIVILEGE', 'WAS', 'SOON', 'VIOLATED', 'WITH', 'THE', 'KNOWLEDGE', 'OF', 'TRUTH', 'THE', 'EMPEROR', 'IMBIBED', 'THE', 'MAXIMS', 'OF', 'PERSECUTION', 'AND', 'THE', 'SECTS', 'WHICH', 'DISSENTED', 'FROM', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'AFFLICTED', 'AND', 'OPPRESSED', 'BY', 'THE', 'TRIUMPH', 'OF', 'CHRISTIANITY'] +1284-134647-0002-864: hyp=['BUT', 'THIS', 'INESTIMABLE', 'PRIVILEGE', 'WAS', 'SOON', 'VIOLATED', 'WITH', 'THE', 'KNOWLEDGE', 'OF', 'TRUTH', 'THE', 'EMPEROR', 'IMBIBED', 'THE', 'MAXIMS', 'OF', 'PERSECUTION', 'AND', 'THE', 'SEX', 'WHICH', 'DISSENTED', 'FROM', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'AFFLICTED', 'AND', 'OPPRESSED', 'BY', 'THE', 'TRIUMPH', 'OF', 'CHRISTIANITY'] +1284-134647-0003-865: ref=['CONSTANTINE', 'EASILY', 'BELIEVED', 'THAT', 'THE', 'HERETICS', 'WHO', 'PRESUMED', 'TO', 'DISPUTE', 'HIS', 'OPINIONS', 'OR', 'TO', 'OPPOSE', 'HIS', 'COMMANDS', 'WERE', 'GUILTY', 'OF', 'THE', 'MOST', 'ABSURD', 'AND', 'CRIMINAL', 'OBSTINACY', 'AND', 'THAT', 'A', 'SEASONABLE', 'APPLICATION', 'OF', 'MODERATE', 'SEVERITIES', 'MIGHT', 'SAVE', 'THOSE', 'UNHAPPY', 'MEN', 'FROM', 'THE', 'DANGER', 'OF', 'AN', 'EVERLASTING', 'CONDEMNATION'] +1284-134647-0003-865: hyp=['KONSTANTINE', 'EASILY', 'BELIEVED', 'THAT', 'THE', 'HERETICS', 'WHO', 'PRESUMED', 'TO', 'DISPUTE', 'HIS', 'OPINIONS', 'OR', 'TO', 'OPPOSE', 'HIS', 'COMMANDS', 'WERE', 'GUILTY', 'OF', 'THE', 'MOST', 'ABSURD', 'AND', 'CRIMINAL', 'OBSTINACY', 'AND', 'THAT', 'A', 'SEASONABLE', 'APPLICATION', 'OF', 'MODERATE', 'SEVERITIES', 'MIGHT', 'SAVE', 'THOSE', 'UNHAPPY', 'MEN', 'FROM', 'THE', 'DANGER', 'OF', 'AN', 'EVERLASTING', 'CONDEMNATION'] +1284-134647-0004-866: ref=['SOME', 'OF', 'THE', 'PENAL', 'REGULATIONS', 'WERE', 'COPIED', 'FROM', 'THE', 'EDICTS', 'OF', 'DIOCLETIAN', 'AND', 'THIS', 'METHOD', 'OF', 'CONVERSION', 'WAS', 'APPLAUDED', 'BY', 'THE', 'SAME', 'BISHOPS', 'WHO', 'HAD', 'FELT', 'THE', 'HAND', 'OF', 'OPPRESSION', 'AND', 'PLEADED', 'FOR', 'THE', 'RIGHTS', 'OF', 'HUMANITY'] +1284-134647-0004-866: hyp=['SOME', 'OF', 'THE', 'PENAL', 'REGULATIONS', 'WERE', 'COPIED', 'FROM', 'THE', 'EDICTS', 'OF', 'DIOCLETIAN', 'AND', 'THIS', 'METHOD', 'OF', 'CONVERSION', 'WAS', 'APPLAUDED', 'BY', 'THE', 'SAME', 'BISHOPS', 'WHO', 'HAD', 'FELLED', 'THE', 'HAND', 'OF', 'OPPRESSION', 'AND', 'PLEADED', 'FOR', 'THE', 'RIGHTS', 'OF', 'HUMANITY'] +1284-134647-0005-867: ref=['THEY', 'ASSERTED', 'WITH', 'CONFIDENCE', 'AND', 'ALMOST', 'WITH', 'EXULTATION', 'THAT', 'THE', 'APOSTOLICAL', 'SUCCESSION', 'WAS', 'INTERRUPTED', 'THAT', 'ALL', 'THE', 'BISHOPS', 'OF', 'EUROPE', 'AND', 'ASIA', 'WERE', 'INFECTED', 'BY', 'THE', 'CONTAGION', 'OF', 'GUILT', 'AND', 'SCHISM', 'AND', 'THAT', 'THE', 'PREROGATIVES', 'OF', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'CONFINED', 'TO', 'THE', 'CHOSEN', 'PORTION', 'OF', 'THE', 'AFRICAN', 'BELIEVERS', 'WHO', 'ALONE', 'HAD', 'PRESERVED', 'INVIOLATE', 'THE', 'INTEGRITY', 'OF', 'THEIR', 'FAITH', 'AND', 'DISCIPLINE'] +1284-134647-0005-867: hyp=['THEY', 'ASSERTED', 'WITH', 'CONFIDENCE', 'AND', 'ALMOST', 'WITH', 'EXULTATION', 'THAT', 'THE', 'APOSTOLICAL', 'SUCCESSION', 'WAS', 'INTERRUPTED', 'THAT', 'ALL', 'THE', 'BISHOPS', 'OF', 'EUROPE', 'AND', 'ASIA', 'WERE', 'INFECTED', 'BY', 'THE', 'CONTAGION', 'OF', 'GUILT', 'AND', 'SCHISM', 'AND', 'THAT', 'THE', 'PREROGATIVES', 'OF', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'CONFINED', 'TO', 'THE', 'CHOSEN', 'PORTION', 'OF', 'THE', 'AFRICAN', 'BELIEVERS', 'WHO', 'ALONE', 'HAD', 'PRESERVED', 'INVIOLATE', 'THE', 'INTEGRITY', 'OF', 'THEIR', 'FAITH', 'AND', 'DISCIPLINE'] +1284-134647-0006-868: ref=['BISHOPS', 'VIRGINS', 'AND', 'EVEN', 'SPOTLESS', 'INFANTS', 'WERE', 'SUBJECTED', 'TO', 'THE', 'DISGRACE', 'OF', 'A', 'PUBLIC', 'PENANCE', 'BEFORE', 'THEY', 'COULD', 'BE', 'ADMITTED', 'TO', 'THE', 'COMMUNION', 'OF', 'THE', 'DONATISTS'] +1284-134647-0006-868: hyp=['BISHOPS', 'VIRGINS', 'AND', 'EVEN', 'SPOTLESS', 'INFANTS', 'WERE', 'SUBJECTED', 'TO', 'THE', 'DISGRACE', 'OF', 'A', 'PUBLIC', 'PENANCE', 'BEFORE', 'THEY', 'COULD', 'BE', 'ADMITTED', 'TO', 'THE', 'COMMUNION', 'OF', 'THE', 'DONATISTS'] +1284-134647-0007-869: ref=['PROSCRIBED', 'BY', 'THE', 'CIVIL', 'AND', 'ECCLESIASTICAL', 'POWERS', 'OF', 'THE', 'EMPIRE', 'THE', 'DONATISTS', 'STILL', 'MAINTAINED', 'IN', 'SOME', 'PROVINCES', 'PARTICULARLY', 'IN', 'NUMIDIA', 'THEIR', 'SUPERIOR', 'NUMBERS', 'AND', 'FOUR', 'HUNDRED', 'BISHOPS', 'ACKNOWLEDGED', 'THE', 'JURISDICTION', 'OF', 'THEIR', 'PRIMATE'] +1284-134647-0007-869: hyp=['PRESCRIBED', 'BY', 'THE', 'CIVIL', 'AND', 'ECCLESIASTICAL', 'POWERS', 'OF', 'THE', 'EMPIRE', 'THE', 'DONATIST', 'STILL', 'MAINTAINED', 'IN', 'SOME', 'PROVINCES', 'PARTICULARLY', 'INIMITIA', 'THEIR', 'SUPERIOR', 'NUMBERS', 'AND', 'FOUR', 'HUNDRED', 'BISHOPS', 'ACKNOWLEDGED', 'THE', 'JURISDICTION', 'OF', 'THEIR', 'PRIMATE'] +1320-122612-0000-120: ref=['SINCE', 'THE', 'PERIOD', 'OF', 'OUR', 'TALE', 'THE', 'ACTIVE', 'SPIRIT', 'OF', 'THE', 'COUNTRY', 'HAS', 'SURROUNDED', 'IT', 'WITH', 'A', 'BELT', 'OF', 'RICH', 'AND', 'THRIVING', 'SETTLEMENTS', 'THOUGH', 'NONE', 'BUT', 'THE', 'HUNTER', 'OR', 'THE', 'SAVAGE', 'IS', 'EVER', 'KNOWN', 'EVEN', 'NOW', 'TO', 'PENETRATE', 'ITS', 'WILD', 'RECESSES'] +1320-122612-0000-120: hyp=['SINCE', 'THE', 'PERIOD', 'OF', 'OUR', 'TALE', 'THE', 'ACTIVE', 'SPIRIT', 'OF', 'THE', 'COUNTRY', 'HAS', 'SURROUNDED', 'IT', 'WITH', 'A', 'BELT', 'OF', 'RICH', 'ENTHRIBING', 'SETTLEMENTS', 'THOUGH', 'NONE', 'BUT', 'THE', 'HUNTER', 'OR', 'THE', 'SAVAGE', 'IS', 'EVER', 'KNOWN', 'EVEN', 'NOW', 'TO', 'PENETRATE', 'ITS', 'WILD', 'RECESSES'] +1320-122612-0001-121: ref=['THE', 'DEWS', 'WERE', 'SUFFERED', 'TO', 'EXHALE', 'AND', 'THE', 'SUN', 'HAD', 'DISPERSED', 'THE', 'MISTS', 'AND', 'WAS', 'SHEDDING', 'A', 'STRONG', 'AND', 'CLEAR', 'LIGHT', 'IN', 'THE', 'FOREST', 'WHEN', 'THE', 'TRAVELERS', 'RESUMED', 'THEIR', 'JOURNEY'] +1320-122612-0001-121: hyp=['THE', 'DEWS', 'WERE', 'SUFFERED', 'TO', 'EXHALE', 'AND', 'THE', 'SUN', 'HAD', 'DISPERSED', 'THE', 'MISTS', 'AND', 'WAS', 'SHEDDING', 'A', 'STRONG', 'AND', 'CLEAR', 'LIGHT', 'IN', 'THE', 'FOREST', 'WHEN', 'THE', 'TRAVELLERS', 'RESUMED', 'THEIR', 'JOURNEY'] +1320-122612-0002-122: ref=['AFTER', 'PROCEEDING', 'A', 'FEW', 'MILES', 'THE', 'PROGRESS', 'OF', 'HAWKEYE', 'WHO', 'LED', 'THE', 'ADVANCE', 'BECAME', 'MORE', 'DELIBERATE', 'AND', 'WATCHFUL'] +1320-122612-0002-122: hyp=['AFTER', 'PROCEEDING', 'A', 'FEW', 'MILES', 'THE', 'PROGRESS', 'OF', 'HAWKEYE', 'WHO', 'LED', 'THE', 'ADVANCE', 'BECAME', 'MORE', 'DELIBERATE', 'AND', 'WATCHFUL'] +1320-122612-0003-123: ref=['HE', 'OFTEN', 'STOPPED', 'TO', 'EXAMINE', 'THE', 'TREES', 'NOR', 'DID', 'HE', 'CROSS', 'A', 'RIVULET', 'WITHOUT', 'ATTENTIVELY', 'CONSIDERING', 'THE', 'QUANTITY', 'THE', 'VELOCITY', 'AND', 'THE', 'COLOR', 'OF', 'ITS', 'WATERS'] +1320-122612-0003-123: hyp=['HE', 'OFTEN', 'STOPPED', 'TO', 'EXAMINE', 'THE', 'TREES', 'NOR', 'DID', 'HE', 'CROSS', 'A', 'RIVULET', 'WITHOUT', 'ATTENTIVELY', 'CONSIDERING', 'THE', 'QUANTITY', 'THE', 'VELOCITY', 'AND', 'THE', 'COLOR', 'OF', 'ITS', 'WATERS'] +1320-122612-0004-124: ref=['DISTRUSTING', 'HIS', 'OWN', 'JUDGMENT', 'HIS', 'APPEALS', 'TO', 'THE', 'OPINION', 'OF', 'CHINGACHGOOK', 'WERE', 'FREQUENT', 'AND', 'EARNEST'] +1320-122612-0004-124: hyp=['DISTRUSTING', 'HIS', 'OWN', 'JUDGMENT', 'HIS', 'APPEALS', 'TO', 'THE', 'OPINION', 'OF', 'CHINGACHGOOK', 'WERE', 'FREQUENT', 'AND', 'EARNEST'] +1320-122612-0005-125: ref=['YET', 'HERE', 'ARE', 'WE', 'WITHIN', 'A', 'SHORT', 'RANGE', 'OF', 'THE', 'SCAROONS', 'AND', 'NOT', 'A', 'SIGN', 'OF', 'A', 'TRAIL', 'HAVE', 'WE', 'CROSSED'] +1320-122612-0005-125: hyp=['YET', 'HERE', 'ARE', 'WE', 'WITHIN', 'A', 'SHORT', 'RANGE', 'OF', 'THE', 'SCARONS', 'AND', 'NOT', 'A', 'SIGN', 'OF', 'A', 'TRAIL', 'HAVE', 'WE', 'CROSSED'] +1320-122612-0006-126: ref=['LET', 'US', 'RETRACE', 'OUR', 'STEPS', 'AND', 'EXAMINE', 'AS', 'WE', 'GO', 'WITH', 'KEENER', 'EYES'] +1320-122612-0006-126: hyp=['LET', 'US', 'RETRACE', 'OUR', 'STEPS', 'AND', 'EXAMINE', 'AS', 'WE', 'GO', 'WITH', 'KEENER', 'EYES'] +1320-122612-0007-127: ref=['CHINGACHGOOK', 'HAD', 'CAUGHT', 'THE', 'LOOK', 'AND', 'MOTIONING', 'WITH', 'HIS', 'HAND', 'HE', 'BADE', 'HIM', 'SPEAK'] +1320-122612-0007-127: hyp=['CHINGACHOOK', 'HAD', 'CAUGHT', 'THE', 'LOOK', 'AND', 'MOTIONING', 'WITH', 'HIS', 'HAND', 'HE', 'BADE', 'HIM', 'SPEAK'] +1320-122612-0008-128: ref=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'FOLLOWED', 'THE', 'UNEXPECTED', 'MOVEMENT', 'AND', 'READ', 'THEIR', 'SUCCESS', 'IN', 'THE', 'AIR', 'OF', 'TRIUMPH', 'THAT', 'THE', 'YOUTH', 'ASSUMED'] +1320-122612-0008-128: hyp=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'FOLLOWED', 'THE', 'UNEXPECTED', 'MOVEMENT', 'AND', 'READ', 'THEIR', 'SUCCESS', 'IN', 'THE', 'AIR', 'OF', 'TRIUMPH', 'THAT', 'THE', 'YOUTH', 'ASSUMED'] +1320-122612-0009-129: ref=['IT', 'WOULD', 'HAVE', 'BEEN', 'MORE', 'WONDERFUL', 'HAD', 'HE', 'SPOKEN', 'WITHOUT', 'A', 'BIDDING'] +1320-122612-0009-129: hyp=['IT', 'WOULD', 'HAVE', 'BEEN', 'MORE', 'WONDERFUL', 'HAD', 'HE', 'SPOKEN', 'WITHOUT', 'A', 'BIDDING'] +1320-122612-0010-130: ref=['SEE', 'SAID', 'UNCAS', 'POINTING', 'NORTH', 'AND', 'SOUTH', 'AT', 'THE', 'EVIDENT', 'MARKS', 'OF', 'THE', 'BROAD', 'TRAIL', 'ON', 'EITHER', 'SIDE', 'OF', 'HIM', 'THE', 'DARK', 'HAIR', 'HAS', 'GONE', 'TOWARD', 'THE', 'FOREST'] +1320-122612-0010-130: hyp=['SEE', 'SAID', 'UNCAS', 'POINTING', 'NORTH', 'AND', 'SOUTH', 'AT', 'THE', 'EVIDENT', 'MARKS', 'OF', 'THE', 'BROAD', 'TRAIL', 'ON', 'EITHER', 'SIDE', 'OF', 'HIM', 'THE', 'DARK', 'AIR', 'HAS', 'GONE', 'TOWARD', 'THE', 'FOREST'] +1320-122612-0011-131: ref=['IF', 'A', 'ROCK', 'OR', 'A', 'RIVULET', 'OR', 'A', 'BIT', 'OF', 'EARTH', 'HARDER', 'THAN', 'COMMON', 'SEVERED', 'THE', 'LINKS', 'OF', 'THE', 'CLEW', 'THEY', 'FOLLOWED', 'THE', 'TRUE', 'EYE', 'OF', 'THE', 'SCOUT', 'RECOVERED', 'THEM', 'AT', 'A', 'DISTANCE', 'AND', 'SELDOM', 'RENDERED', 'THE', 'DELAY', 'OF', 'A', 'SINGLE', 'MOMENT', 'NECESSARY'] +1320-122612-0011-131: hyp=['IF', 'A', 'ROCK', 'OR', 'A', 'RIVULET', 'OR', 'A', 'BIT', 'OF', 'EARTH', 'HARDER', 'THAN', 'COMMON', 'SEVERED', 'THE', 'LINKS', 'OF', 'THE', 'CLUE', 'THEY', 'FOLLOWED', 'THE', 'TRUE', 'EYE', 'OF', 'THE', 'SCOUT', 'RECOVERED', 'THEM', 'AT', 'A', 'DISTANCE', 'AND', 'SELDOM', 'RENDERED', 'THE', 'DELAY', 'OF', 'A', 'SINGLE', 'MOMENT', 'NECESSARY'] +1320-122612-0012-132: ref=['EXTINGUISHED', 'BRANDS', 'WERE', 'LYING', 'AROUND', 'A', 'SPRING', 'THE', 'OFFALS', 'OF', 'A', 'DEER', 'WERE', 'SCATTERED', 'ABOUT', 'THE', 'PLACE', 'AND', 'THE', 'TREES', 'BORE', 'EVIDENT', 'MARKS', 'OF', 'HAVING', 'BEEN', 'BROWSED', 'BY', 'THE', 'HORSES'] +1320-122612-0012-132: hyp=['EXTINGUISHED', 'BRANDS', 'WERE', 'LYING', 'AROUND', 'A', 'SPRING', 'THE', 'OFFALS', 'OF', 'A', 'DEER', 'WERE', 'SCATTERED', 'ABOUT', 'THE', 'PLACE', 'AND', 'THE', 'TREES', 'BORE', 'EVIDENT', 'MARKS', 'OF', 'HAVING', 'BEEN', 'BROWSED', 'BY', 'THE', 'HORSES'] +1320-122612-0013-133: ref=['A', 'CIRCLE', 'OF', 'A', 'FEW', 'HUNDRED', 'FEET', 'IN', 'CIRCUMFERENCE', 'WAS', 'DRAWN', 'AND', 'EACH', 'OF', 'THE', 'PARTY', 'TOOK', 'A', 'SEGMENT', 'FOR', 'HIS', 'PORTION'] +1320-122612-0013-133: hyp=['A', 'CIRCLE', 'OF', 'A', 'FEW', 'HUNDRED', 'FEET', 'IN', 'CIRCUMFERENCE', 'WAS', 'DRAWN', 'AND', 'EACH', 'OF', 'THE', 'PARTY', 'TOOK', 'A', 'SEGMENT', 'FOR', 'HIS', 'PORTION'] +1320-122612-0014-134: ref=['THE', 'EXAMINATION', 'HOWEVER', 'RESULTED', 'IN', 'NO', 'DISCOVERY'] +1320-122612-0014-134: hyp=['THE', 'EXAMINATION', 'HOWEVER', 'RESULTED', 'IN', 'NO', 'DISCOVERY'] +1320-122612-0015-135: ref=['THE', 'WHOLE', 'PARTY', 'CROWDED', 'TO', 'THE', 'SPOT', 'WHERE', 'UNCAS', 'POINTED', 'OUT', 'THE', 'IMPRESSION', 'OF', 'A', 'MOCCASIN', 'IN', 'THE', 'MOIST', 'ALLUVION'] +1320-122612-0015-135: hyp=['THE', 'WHOLE', 'PARTY', 'CROWDED', 'TO', 'THE', 'SPOT', 'WHERE', 'UNCAS', 'POINTED', 'OUT', 'THE', 'IMPRESSION', 'OF', 'A', 'MOCCASIN', 'IN', 'THE', 'MOIST', 'ALLUVIAN'] +1320-122612-0016-136: ref=['RUN', 'BACK', 'UNCAS', 'AND', 'BRING', 'ME', 'THE', 'SIZE', 'OF', 'THE', "SINGER'S", 'FOOT'] +1320-122612-0016-136: hyp=['RUN', 'BACK', 'UNCAS', 'AND', 'BRING', 'ME', 'THE', 'SIZE', 'OF', 'THE', "SINGER'S", 'FOOT'] +1320-122617-0000-78: ref=['NOTWITHSTANDING', 'THE', 'HIGH', 'RESOLUTION', 'OF', 'HAWKEYE', 'HE', 'FULLY', 'COMPREHENDED', 'ALL', 'THE', 'DIFFICULTIES', 'AND', 'DANGER', 'HE', 'WAS', 'ABOUT', 'TO', 'INCUR'] +1320-122617-0000-78: hyp=['NOTWITHSTANDING', 'THE', 'HIGH', 'RESOLUTION', 'OF', 'HAWKEYE', 'HE', 'FULLY', 'COMPREHENDED', 'ALL', 'THE', 'DIFFICULTIES', 'AND', 'DANGER', 'HE', 'WAS', 'ABOUT', 'TO', 'INCUR'] +1320-122617-0001-79: ref=['IN', 'HIS', 'RETURN', 'TO', 'THE', 'CAMP', 'HIS', 'ACUTE', 'AND', 'PRACTISED', 'INTELLECTS', 'WERE', 'INTENTLY', 'ENGAGED', 'IN', 'DEVISING', 'MEANS', 'TO', 'COUNTERACT', 'A', 'WATCHFULNESS', 'AND', 'SUSPICION', 'ON', 'THE', 'PART', 'OF', 'HIS', 'ENEMIES', 'THAT', 'HE', 'KNEW', 'WERE', 'IN', 'NO', 'DEGREE', 'INFERIOR', 'TO', 'HIS', 'OWN'] +1320-122617-0001-79: hyp=['IN', 'HIS', 'RETURN', 'TO', 'THE', 'CAMP', 'HIS', 'ACUTE', 'AND', 'PRACTISED', 'INTELLECTS', 'WERE', 'INTENTLY', 'ENGAGED', 'IN', 'DEVISING', 'MEANS', 'TO', 'COUNTERACT', 'A', 'WATCHFULNESS', 'AND', 'SUSPICION', 'ON', 'THE', 'PART', 'OF', 'HIS', 'ENEMIES', 'THAT', 'HE', 'KNEW', 'WERE', 'IN', 'NO', 'DEGREE', 'INFERIOR', 'TO', 'HIS', 'OWN'] +1320-122617-0002-80: ref=['IN', 'OTHER', 'WORDS', 'WHILE', 'HE', 'HAD', 'IMPLICIT', 'FAITH', 'IN', 'THE', 'ABILITY', 'OF', "BALAAM'S", 'ASS', 'TO', 'SPEAK', 'HE', 'WAS', 'SOMEWHAT', 'SKEPTICAL', 'ON', 'THE', 'SUBJECT', 'OF', 'A', "BEAR'S", 'SINGING', 'AND', 'YET', 'HE', 'HAD', 'BEEN', 'ASSURED', 'OF', 'THE', 'LATTER', 'ON', 'THE', 'TESTIMONY', 'OF', 'HIS', 'OWN', 'EXQUISITE', 'ORGANS'] +1320-122617-0002-80: hyp=['IN', 'OTHER', 'WORDS', 'WHILE', 'HE', 'HAD', 'IMPLICIT', 'FAITH', 'IN', 'THE', 'ABILITY', 'OF', "BAYLIM'S", 'ASS', 'TO', 'SPEAK', 'HE', 'WAS', 'SOMEWHAT', 'SCEPTICAL', 'ON', 'THE', 'SUBJECT', 'OF', 'A', "BEAR'S", 'SINGING', 'AND', 'YET', 'HE', 'HAD', 'BEEN', 'ASSURED', 'OF', 'THE', 'LATTER', 'ON', 'THE', 'TESTIMONY', 'OF', 'HIS', 'OWN', 'EXQUISITE', 'ORGANS'] +1320-122617-0003-81: ref=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'AIR', 'AND', 'MANNER', 'THAT', 'BETRAYED', 'TO', 'THE', 'SCOUT', 'THE', 'UTTER', 'CONFUSION', 'OF', 'THE', 'STATE', 'OF', 'HIS', 'MIND'] +1320-122617-0003-81: hyp=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'AIR', 'AND', 'MANNER', 'THAT', 'BETRAYED', 'TO', 'THE', 'SCOUT', 'THE', 'UTTER', 'CONFUSION', 'OF', 'THE', 'STATE', 'OF', 'HIS', 'MIND'] +1320-122617-0004-82: ref=['THE', 'INGENIOUS', 'HAWKEYE', 'WHO', 'RECALLED', 'THE', 'HASTY', 'MANNER', 'IN', 'WHICH', 'THE', 'OTHER', 'HAD', 'ABANDONED', 'HIS', 'POST', 'AT', 'THE', 'BEDSIDE', 'OF', 'THE', 'SICK', 'WOMAN', 'WAS', 'NOT', 'WITHOUT', 'HIS', 'SUSPICIONS', 'CONCERNING', 'THE', 'SUBJECT', 'OF', 'SO', 'MUCH', 'SOLEMN', 'DELIBERATION'] +1320-122617-0004-82: hyp=['THE', 'INGENIOUS', 'HAWKEYE', 'WHO', 'RECALLED', 'THE', 'HASTY', 'MANNER', 'IN', 'WHICH', 'THE', 'OTHER', 'HAD', 'ABANDONED', 'HIS', 'POST', 'AT', 'THE', 'BEDSIDE', 'OF', 'THE', 'SICK', 'WOMAN', 'WAS', 'NOT', 'WITHOUT', 'HIS', 'SUSPICIONS', 'CONCERNING', 'THE', 'SUBJECT', 'OF', 'SO', 'MUCH', 'SOLEMN', 'DELIBERATION'] +1320-122617-0005-83: ref=['THE', 'BEAR', 'SHOOK', 'HIS', 'SHAGGY', 'SIDES', 'AND', 'THEN', 'A', 'WELL', 'KNOWN', 'VOICE', 'REPLIED'] +1320-122617-0005-83: hyp=['THE', 'BEAR', 'SHOOK', 'HIS', 'SHAGGY', 'SIDES', 'AND', 'THEN', 'A', 'WELL', 'KNOWN', 'VOICE', 'REPLIED'] +1320-122617-0006-84: ref=['CAN', 'THESE', 'THINGS', 'BE', 'RETURNED', 'DAVID', 'BREATHING', 'MORE', 'FREELY', 'AS', 'THE', 'TRUTH', 'BEGAN', 'TO', 'DAWN', 'UPON', 'HIM'] +1320-122617-0006-84: hyp=['CAN', 'THESE', 'THINGS', 'BE', 'RETURNED', 'DAVID', 'BREATHING', 'MORE', 'FREELY', 'AS', 'THE', 'TRUTH', 'BEGAN', 'TO', 'DAWN', 'UPON', 'HIM'] +1320-122617-0007-85: ref=['COME', 'COME', 'RETURNED', 'HAWKEYE', 'UNCASING', 'HIS', 'HONEST', 'COUNTENANCE', 'THE', 'BETTER', 'TO', 'ASSURE', 'THE', 'WAVERING', 'CONFIDENCE', 'OF', 'HIS', 'COMPANION', 'YOU', 'MAY', 'SEE', 'A', 'SKIN', 'WHICH', 'IF', 'IT', 'BE', 'NOT', 'AS', 'WHITE', 'AS', 'ONE', 'OF', 'THE', 'GENTLE', 'ONES', 'HAS', 'NO', 'TINGE', 'OF', 'RED', 'TO', 'IT', 'THAT', 'THE', 'WINDS', 'OF', 'THE', 'HEAVEN', 'AND', 'THE', 'SUN', 'HAVE', 'NOT', 'BESTOWED', 'NOW', 'LET', 'US', 'TO', 'BUSINESS'] +1320-122617-0007-85: hyp=['COME', 'COME', 'RETURNED', 'HAWKEYE', 'UNCASING', 'HIS', 'HONEST', 'COUNTENANCE', 'THE', 'BETTER', 'TO', 'ASSURE', 'THE', 'WAVERING', 'CONFIDENCE', 'OF', 'HIS', 'COMPANION', 'YOU', 'MAY', 'SEE', 'A', 'SKIN', 'WHICH', 'IF', 'IT', 'BE', 'NOT', 'AS', 'WHITE', 'AS', 'ONE', 'OF', 'THE', 'GENTLE', 'ONES', 'HAS', 'NO', 'TINGE', 'OF', 'RED', 'TO', 'IT', 'THAT', 'THE', 'WINDS', 'OF', 'THE', 'HEAVEN', 'AND', 'THE', 'SUN', 'HAVE', 'NOT', 'BESTOWED', 'NOW', 'LET', 'US', 'TO', 'BUSINESS'] +1320-122617-0008-86: ref=['THE', 'YOUNG', 'MAN', 'IS', 'IN', 'BONDAGE', 'AND', 'MUCH', 'I', 'FEAR', 'HIS', 'DEATH', 'IS', 'DECREED'] +1320-122617-0008-86: hyp=['THE', 'YOUNG', 'MAN', 'IS', 'IN', 'BONDAGE', 'AND', 'MUCH', 'I', 'FEAR', 'HIS', 'DEATH', 'IS', 'DECREED'] +1320-122617-0009-87: ref=['I', 'GREATLY', 'MOURN', 'THAT', 'ONE', 'SO', 'WELL', 'DISPOSED', 'SHOULD', 'DIE', 'IN', 'HIS', 'IGNORANCE', 'AND', 'I', 'HAVE', 'SOUGHT', 'A', 'GOODLY', 'HYMN', 'CAN', 'YOU', 'LEAD', 'ME', 'TO', 'HIM'] +1320-122617-0009-87: hyp=['I', 'GREATLY', 'MOURN', 'THAT', 'ONE', 'SO', 'WELL', 'DISPOSED', 'SHOULD', 'DIE', 'IN', 'HIS', 'IGNORANCE', 'AND', 'I', 'HAVE', 'SOUGHT', 'A', 'GOODLY', 'HYMN', 'CAN', 'YOU', 'LEAD', 'ME', 'TO', 'HIM'] +1320-122617-0010-88: ref=['THE', 'TASK', 'WILL', 'NOT', 'BE', 'DIFFICULT', 'RETURNED', 'DAVID', 'HESITATING', 'THOUGH', 'I', 'GREATLY', 'FEAR', 'YOUR', 'PRESENCE', 'WOULD', 'RATHER', 'INCREASE', 'THAN', 'MITIGATE', 'HIS', 'UNHAPPY', 'FORTUNES'] +1320-122617-0010-88: hyp=['THE', 'TASK', 'WILL', 'NOT', 'BE', 'DIFFICULT', 'RETURNED', 'DAVID', 'HESITATING', 'THOUGH', 'I', 'GREATLY', 'FEAR', 'YOUR', 'PRESENCE', 'WOULD', 'RATHER', 'INCREASE', 'THAN', 'MITIGATE', 'HIS', 'UNHAPPY', 'FORTUNES'] +1320-122617-0011-89: ref=['THE', 'LODGE', 'IN', 'WHICH', 'UNCAS', 'WAS', 'CONFINED', 'WAS', 'IN', 'THE', 'VERY', 'CENTER', 'OF', 'THE', 'VILLAGE', 'AND', 'IN', 'A', 'SITUATION', 'PERHAPS', 'MORE', 'DIFFICULT', 'THAN', 'ANY', 'OTHER', 'TO', 'APPROACH', 'OR', 'LEAVE', 'WITHOUT', 'OBSERVATION'] +1320-122617-0011-89: hyp=['THE', 'LODGE', 'IN', 'WHICH', 'UNCAS', 'WAS', 'CONFINED', 'WAS', 'IN', 'THE', 'VERY', 'CENTER', 'OF', 'THE', 'VILLAGE', 'AND', 'IN', 'A', 'SITUATION', 'PERHAPS', 'MORE', 'DIFFICULT', 'THAN', 'ANY', 'OTHER', 'TO', 'APPROACH', 'OR', 'LEAVE', 'WITHOUT', 'OBSERVATION'] +1320-122617-0012-90: ref=['FOUR', 'OR', 'FIVE', 'OF', 'THE', 'LATTER', 'ONLY', 'LINGERED', 'ABOUT', 'THE', 'DOOR', 'OF', 'THE', 'PRISON', 'OF', 'UNCAS', 'WARY', 'BUT', 'CLOSE', 'OBSERVERS', 'OF', 'THE', 'MANNER', 'OF', 'THEIR', 'CAPTIVE'] +1320-122617-0012-90: hyp=['FOUR', 'OR', 'FIVE', 'OF', 'THE', 'LATTER', 'ONLY', 'LINGERED', 'ABOUT', 'THE', 'DOOR', 'OF', 'THE', 'PRISON', 'OF', 'UNCAS', 'WARY', 'BUT', 'CLOSE', 'OBSERVERS', 'OF', 'THE', 'MANNER', 'OF', 'THEIR', 'CAPTIVE'] +1320-122617-0013-91: ref=['DELIVERED', 'IN', 'A', 'STRONG', 'TONE', 'OF', 'ASSENT', 'ANNOUNCED', 'THE', 'GRATIFICATION', 'THE', 'SAVAGE', 'WOULD', 'RECEIVE', 'IN', 'WITNESSING', 'SUCH', 'AN', 'EXHIBITION', 'OF', 'WEAKNESS', 'IN', 'AN', 'ENEMY', 'SO', 'LONG', 'HATED', 'AND', 'SO', 'MUCH', 'FEARED'] +1320-122617-0013-91: hyp=['DELIVERED', 'IN', 'A', 'STRONG', 'TONE', 'OF', 'ASSENT', 'ANNOUNCED', 'THE', 'GRATIFICATION', 'THE', 'SAVAGE', 'WOULD', 'RECEIVE', 'AND', 'WITNESSING', 'SUCH', 'AN', 'EXHIBITION', 'OF', 'WEAKNESS', 'IN', 'AN', 'ENEMY', 'SO', 'LONG', 'HATED', 'AND', 'SO', 'MUCH', 'FEARED'] +1320-122617-0014-92: ref=['THEY', 'DREW', 'BACK', 'A', 'LITTLE', 'FROM', 'THE', 'ENTRANCE', 'AND', 'MOTIONED', 'TO', 'THE', 'SUPPOSED', 'CONJURER', 'TO', 'ENTER'] +1320-122617-0014-92: hyp=['THEY', 'DREW', 'BACK', 'A', 'LITTLE', 'FROM', 'THE', 'ENTRANCE', 'AND', 'MOTIONED', 'TO', 'THE', 'SUPPOSED', 'CONJUROR', 'TO', 'ENTER'] +1320-122617-0015-93: ref=['BUT', 'THE', 'BEAR', 'INSTEAD', 'OF', 'OBEYING', 'MAINTAINED', 'THE', 'SEAT', 'IT', 'HAD', 'TAKEN', 'AND', 'GROWLED'] +1320-122617-0015-93: hyp=['BUT', 'THE', 'BEAR', 'INSTEAD', 'OF', 'OBEYING', 'MAINTAINED', 'THE', 'SEED', 'IT', 'HAD', 'TAKEN', 'AND', 'GROWLED'] +1320-122617-0016-94: ref=['THE', 'CUNNING', 'MAN', 'IS', 'AFRAID', 'THAT', 'HIS', 'BREATH', 'WILL', 'BLOW', 'UPON', 'HIS', 'BROTHERS', 'AND', 'TAKE', 'AWAY', 'THEIR', 'COURAGE', 'TOO', 'CONTINUED', 'DAVID', 'IMPROVING', 'THE', 'HINT', 'HE', 'RECEIVED', 'THEY', 'MUST', 'STAND', 'FURTHER', 'OFF'] +1320-122617-0016-94: hyp=['THE', 'CUNNING', 'MAN', 'IS', 'AFRAID', 'THAT', 'HIS', 'BREATH', 'WILL', 'BLOW', 'UPON', 'HIS', 'BROTHERS', 'AND', 'TAKE', 'AWAY', 'THEIR', 'COURAGE', 'TOO', 'CONTINUED', 'DAVID', 'IMPROVING', 'THE', 'HINT', 'HE', 'RECEIVED', 'THEY', 'MUST', 'STAND', 'FURTHER', 'OFF'] +1320-122617-0017-95: ref=['THEN', 'AS', 'IF', 'SATISFIED', 'OF', 'THEIR', 'SAFETY', 'THE', 'SCOUT', 'LEFT', 'HIS', 'POSITION', 'AND', 'SLOWLY', 'ENTERED', 'THE', 'PLACE'] +1320-122617-0017-95: hyp=['THEN', 'AS', 'IF', 'SATISFIED', 'OF', 'THEIR', 'SAFETY', 'THE', 'SCOUT', 'LEFT', 'HIS', 'POSITION', 'AND', 'SLOWLY', 'ENTERED', 'THE', 'PLACE'] +1320-122617-0018-96: ref=['IT', 'WAS', 'SILENT', 'AND', 'GLOOMY', 'BEING', 'TENANTED', 'SOLELY', 'BY', 'THE', 'CAPTIVE', 'AND', 'LIGHTED', 'BY', 'THE', 'DYING', 'EMBERS', 'OF', 'A', 'FIRE', 'WHICH', 'HAD', 'BEEN', 'USED', 'FOR', 'THE', 'PURPOSED', 'OF', 'COOKERY'] +1320-122617-0018-96: hyp=['IT', 'WAS', 'SILENT', 'AND', 'GLOOMY', 'BEING', 'TENANTED', 'SOLELY', 'BY', 'THE', 'CAPTIVE', 'AND', 'LIGHTED', 'BY', 'THE', 'DYING', 'EMBERS', 'OF', 'A', 'FIRE', 'WHICH', 'HAD', 'BEEN', 'USED', 'FOR', 'THE', 'PURPOSE', 'OF', 'COOKERY'] +1320-122617-0019-97: ref=['UNCAS', 'OCCUPIED', 'A', 'DISTANT', 'CORNER', 'IN', 'A', 'RECLINING', 'ATTITUDE', 'BEING', 'RIGIDLY', 'BOUND', 'BOTH', 'HANDS', 'AND', 'FEET', 'BY', 'STRONG', 'AND', 'PAINFUL', 'WITHES'] +1320-122617-0019-97: hyp=['UNCAS', 'OCCUPIED', 'A', 'DISTANT', 'CORNER', 'IN', 'A', 'RECLINING', 'ATTITUDE', 'BEING', 'RIGIDLY', 'BOUND', 'BOTH', 'HANDS', 'AND', 'FEET', 'BY', 'STRONG', 'AND', 'PAINFUL', 'WIDTHS'] +1320-122617-0020-98: ref=['THE', 'SCOUT', 'WHO', 'HAD', 'LEFT', 'DAVID', 'AT', 'THE', 'DOOR', 'TO', 'ASCERTAIN', 'THEY', 'WERE', 'NOT', 'OBSERVED', 'THOUGHT', 'IT', 'PRUDENT', 'TO', 'PRESERVE', 'HIS', 'DISGUISE', 'UNTIL', 'ASSURED', 'OF', 'THEIR', 'PRIVACY'] +1320-122617-0020-98: hyp=['THE', 'SCOUT', 'WHO', 'HAD', 'LEFT', 'DAVID', 'AT', 'THE', 'DOOR', 'TO', 'ASCERTAIN', 'THEY', 'WERE', 'NOT', 'OBSERVED', 'THOUGHT', 'IT', 'PRUDENT', 'TO', 'PRESERVE', 'HIS', 'DISGUISE', 'UNTIL', 'ASSURED', 'OF', 'THEIR', 'PRIVACY'] +1320-122617-0021-99: ref=['WHAT', 'SHALL', 'WE', 'DO', 'WITH', 'THE', 'MINGOES', 'AT', 'THE', 'DOOR', 'THEY', 'COUNT', 'SIX', 'AND', 'THIS', 'SINGER', 'IS', 'AS', 'GOOD', 'AS', 'NOTHING'] +1320-122617-0021-99: hyp=['WHAT', 'SHALL', 'WE', 'DO', 'WITH', 'THE', 'MINGOES', 'AT', 'THE', 'DOOR', 'THEY', 'COUNT', 'SIX', 'AND', 'THE', 'SINGER', 'IS', 'AS', 'GOOD', 'AS', 'NOTHING'] +1320-122617-0022-100: ref=['THE', 'DELAWARES', 'ARE', 'CHILDREN', 'OF', 'THE', 'TORTOISE', 'AND', 'THEY', 'OUTSTRIP', 'THE', 'DEER'] +1320-122617-0022-100: hyp=['THE', 'DELAWARES', 'ARE', 'CHILDREN', 'OF', 'THE', 'TORTOISE', 'AND', 'THE', 'OUTSTRIP', 'THE', 'DEER'] +1320-122617-0023-101: ref=['UNCAS', 'WHO', 'HAD', 'ALREADY', 'APPROACHED', 'THE', 'DOOR', 'IN', 'READINESS', 'TO', 'LEAD', 'THE', 'WAY', 'NOW', 'RECOILED', 'AND', 'PLACED', 'HIMSELF', 'ONCE', 'MORE', 'IN', 'THE', 'BOTTOM', 'OF', 'THE', 'LODGE'] +1320-122617-0023-101: hyp=['UNCAS', 'WHO', 'HAD', 'ALREADY', 'APPROACHED', 'THE', 'DOOR', 'IN', 'READINESS', 'TO', 'LEAD', 'THE', 'WAY', 'NOW', 'RECOILED', 'AND', 'PLACED', 'HIMSELF', 'ONCE', 'MORE', 'IN', 'THE', 'BOTTOM', 'OF', 'THE', 'LODGE'] +1320-122617-0024-102: ref=['BUT', 'HAWKEYE', 'WHO', 'WAS', 'TOO', 'MUCH', 'OCCUPIED', 'WITH', 'HIS', 'OWN', 'THOUGHTS', 'TO', 'NOTE', 'THE', 'MOVEMENT', 'CONTINUED', 'SPEAKING', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'HIS', 'COMPANION'] +1320-122617-0024-102: hyp=['BUT', 'HAWKEYE', 'WHO', 'WAS', 'TOO', 'MUCH', 'OCCUPIED', 'WITH', 'HIS', 'OWN', 'THOUGHTS', 'TO', 'NOTE', 'THE', 'MOVEMENT', 'CONTINUED', 'SPEAKING', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'HIS', 'COMPANION'] +1320-122617-0025-103: ref=['SO', 'UNCAS', 'YOU', 'HAD', 'BETTER', 'TAKE', 'THE', 'LEAD', 'WHILE', 'I', 'WILL', 'PUT', 'ON', 'THE', 'SKIN', 'AGAIN', 'AND', 'TRUST', 'TO', 'CUNNING', 'FOR', 'WANT', 'OF', 'SPEED'] +1320-122617-0025-103: hyp=['SO', 'UNCAS', 'YOU', 'HAD', 'BETTER', 'TAKE', 'THE', 'LEAD', 'WHILE', 'I', 'WILL', 'PUT', 'ON', 'THE', 'SKIN', 'AGAIN', 'AND', 'TRUST', 'TO', 'CUNNING', 'FOR', 'WANT', 'OF', 'SPEED'] +1320-122617-0026-104: ref=['WELL', 'WHAT', "CAN'T", 'BE', 'DONE', 'BY', 'MAIN', 'COURAGE', 'IN', 'WAR', 'MUST', 'BE', 'DONE', 'BY', 'CIRCUMVENTION'] +1320-122617-0026-104: hyp=['WELL', 'WHAT', "CAN'T", 'BE', 'DONE', 'BY', 'MAIN', 'COURAGE', 'AND', 'WAR', 'MUST', 'BE', 'DONE', 'BY', 'CIRCUMVENTION'] +1320-122617-0027-105: ref=['AS', 'SOON', 'AS', 'THESE', 'DISPOSITIONS', 'WERE', 'MADE', 'THE', 'SCOUT', 'TURNED', 'TO', 'DAVID', 'AND', 'GAVE', 'HIM', 'HIS', 'PARTING', 'INSTRUCTIONS'] +1320-122617-0027-105: hyp=['AS', 'SOON', 'AS', 'THESE', 'DISPOSITIONS', 'WERE', 'MADE', 'THE', 'SCOUT', 'TURNED', 'TO', 'DAVID', 'AND', 'GAVE', 'HIM', 'HIS', 'PARTING', 'INSTRUCTIONS'] +1320-122617-0028-106: ref=['MY', 'PURSUITS', 'ARE', 'PEACEFUL', 'AND', 'MY', 'TEMPER', 'I', 'HUMBLY', 'TRUST', 'IS', 'GREATLY', 'GIVEN', 'TO', 'MERCY', 'AND', 'LOVE', 'RETURNED', 'DAVID', 'A', 'LITTLE', 'NETTLED', 'AT', 'SO', 'DIRECT', 'AN', 'ATTACK', 'ON', 'HIS', 'MANHOOD', 'BUT', 'THERE', 'ARE', 'NONE', 'WHO', 'CAN', 'SAY', 'THAT', 'I', 'HAVE', 'EVER', 'FORGOTTEN', 'MY', 'FAITH', 'IN', 'THE', 'LORD', 'EVEN', 'IN', 'THE', 'GREATEST', 'STRAITS'] +1320-122617-0028-106: hyp=['MY', 'PURSUITS', 'ARE', 'PEACEFUL', 'AND', 'MY', 'TEMPER', 'I', 'HUMBLY', 'TRUST', 'IS', 'GREATLY', 'GIVEN', 'TO', 'MERCY', 'AND', 'LOVE', 'RETURNED', 'DAVID', 'A', 'LITTLE', 'NETTLED', 'AT', 'SO', 'DIRECT', 'AN', 'ATTACK', 'ON', 'HIS', 'MANHOOD', 'BUT', 'THERE', 'ARE', 'NONE', 'WHO', 'CAN', 'SAY', 'THAT', 'I', 'HAVE', 'EVER', 'FORGOTTEN', 'MY', 'FAITH', 'IN', 'THE', 'LORD', 'EVEN', 'IN', 'THE', 'GREATEST', 'STRAITS'] +1320-122617-0029-107: ref=['IF', 'YOU', 'ARE', 'NOT', 'THEN', 'KNOCKED', 'ON', 'THE', 'HEAD', 'YOUR', 'BEING', 'A', 'NON', 'COMPOSSER', 'WILL', 'PROTECT', 'YOU', 'AND', "YOU'LL", 'THEN', 'HAVE', 'A', 'GOOD', 'REASON', 'TO', 'EXPECT', 'TO', 'DIE', 'IN', 'YOUR', 'BED'] +1320-122617-0029-107: hyp=['IF', 'YOU', 'ARE', 'NOT', 'THEN', 'KNOCKED', 'ON', 'THE', 'HEAD', 'YOUR', 'BEING', 'A', 'NON', 'COMPOSSIBLE', 'PROTECT', 'YOU', 'AND', "YOU'LL", 'THEN', 'HAVE', 'A', 'GOOD', 'REASON', 'TO', 'EXPECT', 'TO', 'DIE', 'IN', 'YOUR', 'BED'] +1320-122617-0030-108: ref=['SO', 'CHOOSE', 'FOR', 'YOURSELF', 'TO', 'MAKE', 'A', 'RUSH', 'OR', 'TARRY', 'HERE'] +1320-122617-0030-108: hyp=['SO', 'CHOOSE', 'FOR', 'YOURSELF', 'TO', 'MAKE', 'A', 'RUSH', 'OR', 'TARRY', 'HERE'] +1320-122617-0031-109: ref=['BRAVELY', 'AND', 'GENEROUSLY', 'HAS', 'HE', 'BATTLED', 'IN', 'MY', 'BEHALF', 'AND', 'THIS', 'AND', 'MORE', 'WILL', 'I', 'DARE', 'IN', 'HIS', 'SERVICE'] +1320-122617-0031-109: hyp=['BRAVELY', 'AND', 'GENEROUSLY', 'HAS', 'HE', 'BATTLED', 'IN', 'MY', 'BEHALF', 'AND', 'THIS', 'AND', 'MORE', 'WILL', 'I', 'DARE', 'IN', 'HIS', 'SERVICE'] +1320-122617-0032-110: ref=['KEEP', 'SILENT', 'AS', 'LONG', 'AS', 'MAY', 'BE', 'AND', 'IT', 'WOULD', 'BE', 'WISE', 'WHEN', 'YOU', 'DO', 'SPEAK', 'TO', 'BREAK', 'OUT', 'SUDDENLY', 'IN', 'ONE', 'OF', 'YOUR', 'SHOUTINGS', 'WHICH', 'WILL', 'SERVE', 'TO', 'REMIND', 'THE', 'INDIANS', 'THAT', 'YOU', 'ARE', 'NOT', 'ALTOGETHER', 'AS', 'RESPONSIBLE', 'AS', 'MEN', 'SHOULD', 'BE'] +1320-122617-0032-110: hyp=['KEEP', 'SILENT', 'AS', 'LONG', 'AS', 'MAY', 'BE', 'AND', 'IT', 'WOULD', 'BE', 'WISE', 'WHEN', 'YOU', 'DO', 'SPEAK', 'TO', 'BREAK', 'OUT', 'SUDDENLY', 'IN', 'ONE', 'OF', 'YOUR', 'SHOUTINGS', 'WHICH', 'WILL', 'SERVE', 'TO', 'REMIND', 'THE', 'INDIANS', 'THAT', 'YOU', 'ARE', 'NOT', 'ALTOGETHER', 'AS', 'RESPONSIBLE', 'AS', 'MEN', 'SHOULD', 'BE'] +1320-122617-0033-111: ref=['IF', 'HOWEVER', 'THEY', 'TAKE', 'YOUR', 'SCALP', 'AS', 'I', 'TRUST', 'AND', 'BELIEVE', 'THEY', 'WILL', 'NOT', 'DEPEND', 'ON', 'IT', 'UNCAS', 'AND', 'I', 'WILL', 'NOT', 'FORGET', 'THE', 'DEED', 'BUT', 'REVENGE', 'IT', 'AS', 'BECOMES', 'TRUE', 'WARRIORS', 'AND', 'TRUSTY', 'FRIENDS'] +1320-122617-0033-111: hyp=['IF', 'HOWEVER', 'THEY', 'TAKE', 'YOUR', 'SCALP', 'AS', 'I', 'TRUST', 'AND', 'BELIEVE', 'THEY', 'WILL', 'NOT', 'DEPEND', 'ON', 'IT', 'UNCAS', 'AND', 'I', 'WILL', 'NOT', 'FORGET', 'THE', 'DEED', 'BUT', 'REVENGE', 'IT', 'IS', 'BECOMES', 'TRUE', 'WARRIORS', 'AND', 'TRUSTY', 'FRIENDS'] +1320-122617-0034-112: ref=['HOLD', 'SAID', 'DAVID', 'PERCEIVING', 'THAT', 'WITH', 'THIS', 'ASSURANCE', 'THEY', 'WERE', 'ABOUT', 'TO', 'LEAVE', 'HIM', 'I', 'AM', 'AN', 'UNWORTHY', 'AND', 'HUMBLE', 'FOLLOWER', 'OF', 'ONE', 'WHO', 'TAUGHT', 'NOT', 'THE', 'DAMNABLE', 'PRINCIPLE', 'OF', 'REVENGE'] +1320-122617-0034-112: hyp=['HOLD', 'SAID', 'DAVID', 'PERCEIVING', 'THAT', 'WITH', 'THIS', 'ASSURANCE', 'THEY', 'WERE', 'ABOUT', 'TO', 'LEAVE', 'HIM', 'I', 'AM', 'AN', 'UNWORTHY', 'AND', 'HUMBLE', 'FOLLOWER', 'OF', 'ONE', 'WHO', 'TAUGHT', 'NOT', 'THE', 'DAMNABLE', 'PRINCIPLE', 'OF', 'REVENGE'] +1320-122617-0035-113: ref=['THEN', 'HEAVING', 'A', 'HEAVY', 'SIGH', 'PROBABLY', 'AMONG', 'THE', 'LAST', 'HE', 'EVER', 'DREW', 'IN', 'PINING', 'FOR', 'A', 'CONDITION', 'HE', 'HAD', 'SO', 'LONG', 'ABANDONED', 'HE', 'ADDED', 'IT', 'IS', 'WHAT', 'I', 'WOULD', 'WISH', 'TO', 'PRACTISE', 'MYSELF', 'AS', 'ONE', 'WITHOUT', 'A', 'CROSS', 'OF', 'BLOOD', 'THOUGH', 'IT', 'IS', 'NOT', 'ALWAYS', 'EASY', 'TO', 'DEAL', 'WITH', 'AN', 'INDIAN', 'AS', 'YOU', 'WOULD', 'WITH', 'A', 'FELLOW', 'CHRISTIAN'] +1320-122617-0035-113: hyp=['THEN', 'HEAVING', 'A', 'HEAVY', 'SIGH', 'PROBABLY', 'AMONG', 'THE', 'LAST', 'HE', 'EVER', 'DREW', 'IN', 'PINING', 'FOR', 'A', 'CONDITION', 'HE', 'HAD', 'SO', 'LONG', 'ABANDONED', 'HE', 'ADDED', 'IT', 'IS', 'WHAT', 'I', 'WOULD', 'WISH', 'TO', 'PRACTISE', 'MYSELF', 'AS', 'ONE', 'WITHOUT', 'A', 'CROSS', 'OF', 'BLOOD', 'THOUGH', 'IT', 'IS', 'NOT', 'ALWAYS', 'EASY', 'TO', 'DEAL', 'WITH', 'AN', 'INDIAN', 'AS', 'YOU', 'WOULD', 'WITH', 'A', 'FELLOW', 'CHRISTIAN'] +1320-122617-0036-114: ref=['GOD', 'BLESS', 'YOU', 'FRIEND', 'I', 'DO', 'BELIEVE', 'YOUR', 'SCENT', 'IS', 'NOT', 'GREATLY', 'WRONG', 'WHEN', 'THE', 'MATTER', 'IS', 'DULY', 'CONSIDERED', 'AND', 'KEEPING', 'ETERNITY', 'BEFORE', 'THE', 'EYES', 'THOUGH', 'MUCH', 'DEPENDS', 'ON', 'THE', 'NATURAL', 'GIFTS', 'AND', 'THE', 'FORCE', 'OF', 'TEMPTATION'] +1320-122617-0036-114: hyp=['GOD', 'BLESS', 'YOU', 'FRIEND', 'I', 'DO', 'BELIEVE', 'YOUR', 'SCENT', 'HAS', 'NOT', 'GREATLY', 'WRONG', 'WHEN', 'THE', 'MATTER', 'IS', 'DULY', 'CONSIDERED', 'AND', 'KEEPING', 'ETERNITY', 'BEFORE', 'THE', 'EYES', 'THOUGH', 'MUCH', 'DEPENDS', 'ON', 'THE', 'NATURAL', 'GIFTS', 'AND', 'THE', 'FORCE', 'OF', 'TEMPTATION'] +1320-122617-0037-115: ref=['THE', 'DELAWARE', 'DOG', 'HE', 'SAID', 'LEANING', 'FORWARD', 'AND', 'PEERING', 'THROUGH', 'THE', 'DIM', 'LIGHT', 'TO', 'CATCH', 'THE', 'EXPRESSION', 'OF', 'THE', "OTHER'S", 'FEATURES', 'IS', 'HE', 'AFRAID'] +1320-122617-0037-115: hyp=['THE', 'DELAWARE', 'DOG', 'HE', 'SAID', 'LEANING', 'FORWARD', 'AND', 'PEERING', 'THROUGH', 'THE', 'DIM', 'LIGHT', 'TO', 'CATCH', 'THE', 'EXPRESSION', 'OF', 'THE', "OTHER'S", 'FEATURES', 'IS', 'HE', 'AFRAID'] +1320-122617-0038-116: ref=['WILL', 'THE', 'HURONS', 'HEAR', 'HIS', 'GROANS'] +1320-122617-0038-116: hyp=['WILL', 'THE', 'HURONS', 'HEAR', 'HIS', 'GROANS'] +1320-122617-0039-117: ref=['THE', 'MOHICAN', 'STARTED', 'ON', 'HIS', 'FEET', 'AND', 'SHOOK', 'HIS', 'SHAGGY', 'COVERING', 'AS', 'THOUGH', 'THE', 'ANIMAL', 'HE', 'COUNTERFEITED', 'WAS', 'ABOUT', 'TO', 'MAKE', 'SOME', 'DESPERATE', 'EFFORT'] +1320-122617-0039-117: hyp=['THE', 'MOHICANS', 'STARTED', 'ON', 'HIS', 'FEET', 'AND', 'SHOOK', 'HIS', 'SHAGGY', 'COVERING', 'AS', 'THOUGH', 'THE', 'ANIMAL', 'HE', 'COUNTERFEITED', 'WAS', 'ABOUT', 'TO', 'MAKE', 'SOME', 'DESPERATE', 'EFFORT'] +1320-122617-0040-118: ref=['HE', 'HAD', 'NO', 'OCCASION', 'TO', 'DELAY', 'FOR', 'AT', 'THE', 'NEXT', 'INSTANT', 'A', 'BURST', 'OF', 'CRIES', 'FILLED', 'THE', 'OUTER', 'AIR', 'AND', 'RAN', 'ALONG', 'THE', 'WHOLE', 'EXTENT', 'OF', 'THE', 'VILLAGE'] +1320-122617-0040-118: hyp=['HE', 'HAD', 'NO', 'OCCASION', 'TO', 'DELAY', 'FOR', 'AT', 'THE', 'NEXT', 'INSTANT', 'A', 'BURST', 'OF', 'CRIES', 'FILLED', 'THE', 'OUTER', 'AIR', 'AND', 'RAN', 'ALONG', 'THE', 'WHOLE', 'EXTENT', 'OF', 'THE', 'VILLAGE'] +1320-122617-0041-119: ref=['UNCAS', 'CAST', 'HIS', 'SKIN', 'AND', 'STEPPED', 'FORTH', 'IN', 'HIS', 'OWN', 'BEAUTIFUL', 'PROPORTIONS'] +1320-122617-0041-119: hyp=['UNCAS', 'CAST', 'HIS', 'SKIN', 'AND', 'STEPPED', 'FORTH', 'IN', 'HIS', 'OWN', 'BEAUTIFUL', 'PROPORTIONS'] +1580-141083-0000-1949: ref=['I', 'WILL', 'ENDEAVOUR', 'IN', 'MY', 'STATEMENT', 'TO', 'AVOID', 'SUCH', 'TERMS', 'AS', 'WOULD', 'SERVE', 'TO', 'LIMIT', 'THE', 'EVENTS', 'TO', 'ANY', 'PARTICULAR', 'PLACE', 'OR', 'GIVE', 'A', 'CLUE', 'AS', 'TO', 'THE', 'PEOPLE', 'CONCERNED'] +1580-141083-0000-1949: hyp=['I', 'WILL', 'ENDEAVOUR', 'IN', 'MY', 'STATEMENT', 'TO', 'AVOID', 'SUCH', 'TERMS', 'AS', 'WOULD', 'SERVE', 'TO', 'LIMIT', 'THE', 'EVENTS', 'TO', 'ANY', 'PARTICULAR', 'PLACE', 'OR', 'GIVE', 'A', 'CLUE', 'AS', 'TO', 'THE', 'PEOPLE', 'CONCERNED'] +1580-141083-0001-1950: ref=['I', 'HAD', 'ALWAYS', 'KNOWN', 'HIM', 'TO', 'BE', 'RESTLESS', 'IN', 'HIS', 'MANNER', 'BUT', 'ON', 'THIS', 'PARTICULAR', 'OCCASION', 'HE', 'WAS', 'IN', 'SUCH', 'A', 'STATE', 'OF', 'UNCONTROLLABLE', 'AGITATION', 'THAT', 'IT', 'WAS', 'CLEAR', 'SOMETHING', 'VERY', 'UNUSUAL', 'HAD', 'OCCURRED'] +1580-141083-0001-1950: hyp=['I', 'HAD', 'ALWAYS', 'KNOWN', 'HIM', 'TO', 'BE', 'RESTLESS', 'IN', 'HIS', 'MANNER', 'BUT', 'ON', 'THIS', 'PARTICULAR', 'OCCASION', 'HE', 'WAS', 'IN', 'SUCH', 'A', 'STATE', 'OF', 'UNCONTROLLABLE', 'AGITATION', 'THAT', 'IT', 'WAS', 'CLEAR', 'SOMETHING', 'VERY', 'UNUSUAL', 'HAD', 'OCCURRED'] +1580-141083-0002-1951: ref=['MY', "FRIEND'S", 'TEMPER', 'HAD', 'NOT', 'IMPROVED', 'SINCE', 'HE', 'HAD', 'BEEN', 'DEPRIVED', 'OF', 'THE', 'CONGENIAL', 'SURROUNDINGS', 'OF', 'BAKER', 'STREET'] +1580-141083-0002-1951: hyp=['MY', "FRIEND'S", 'TEMPER', 'HAD', 'NOT', 'IMPROVED', 'SINCE', 'HE', 'HAD', 'BEEN', 'DEPRIVED', 'OF', 'THE', 'CONGENIAL', 'SURROUNDINGS', 'OF', 'BAKER', 'STREET'] +1580-141083-0003-1952: ref=['WITHOUT', 'HIS', 'SCRAPBOOKS', 'HIS', 'CHEMICALS', 'AND', 'HIS', 'HOMELY', 'UNTIDINESS', 'HE', 'WAS', 'AN', 'UNCOMFORTABLE', 'MAN'] +1580-141083-0003-1952: hyp=['WITHOUT', 'HIS', 'SCRAP', 'BOOKS', 'HIS', 'CHEMICALS', 'AND', 'HIS', 'HOMELY', 'UNTIDINESS', 'HE', 'WAS', 'AN', 'UNCOMFORTABLE', 'MAN'] +1580-141083-0004-1953: ref=['I', 'HAD', 'TO', 'READ', 'IT', 'OVER', 'CAREFULLY', 'AS', 'THE', 'TEXT', 'MUST', 'BE', 'ABSOLUTELY', 'CORRECT'] +1580-141083-0004-1953: hyp=['I', 'HAD', 'TO', 'READ', 'IT', 'OVER', 'CAREFULLY', 'AS', 'THE', 'TEXT', 'MUST', 'BE', 'ABSOLUTELY', 'CORRECT'] +1580-141083-0005-1954: ref=['I', 'WAS', 'ABSENT', 'RATHER', 'MORE', 'THAN', 'AN', 'HOUR'] +1580-141083-0005-1954: hyp=['I', 'WAS', 'ABSENT', 'RATHER', 'MORE', 'THAN', 'AN', 'HOUR'] +1580-141083-0006-1955: ref=['THE', 'ONLY', 'DUPLICATE', 'WHICH', 'EXISTED', 'SO', 'FAR', 'AS', 'I', 'KNEW', 'WAS', 'THAT', 'WHICH', 'BELONGED', 'TO', 'MY', 'SERVANT', 'BANNISTER', 'A', 'MAN', 'WHO', 'HAS', 'LOOKED', 'AFTER', 'MY', 'ROOM', 'FOR', 'TEN', 'YEARS', 'AND', 'WHOSE', 'HONESTY', 'IS', 'ABSOLUTELY', 'ABOVE', 'SUSPICION'] +1580-141083-0006-1955: hyp=['THE', 'ONLY', 'DUPLICATE', 'WHICH', 'EXISTED', 'SO', 'FAR', 'AS', 'I', 'KNEW', 'WAS', 'THAT', 'WHICH', 'BELONGED', 'TO', 'MY', 'SERVANT', 'BANISTER', 'A', 'MAN', 'WHO', 'HAS', 'LOOKED', 'AFTER', 'MY', 'ROOM', 'FOR', 'TEN', 'YEARS', 'AND', 'WHOSE', 'HONESTY', 'IS', 'ABSOLUTELY', 'ABOVE', 'SUSPICION'] +1580-141083-0007-1956: ref=['THE', 'MOMENT', 'I', 'LOOKED', 'AT', 'MY', 'TABLE', 'I', 'WAS', 'AWARE', 'THAT', 'SOMEONE', 'HAD', 'RUMMAGED', 'AMONG', 'MY', 'PAPERS'] +1580-141083-0007-1956: hyp=['THE', 'MOMENT', 'I', 'LOOKED', 'AT', 'MY', 'TABLE', 'I', 'WAS', 'AWARE', 'THAT', 'SOMEONE', 'HAD', 'RUMMAGED', 'AMONG', 'MY', 'PAPERS'] +1580-141083-0008-1957: ref=['THE', 'PROOF', 'WAS', 'IN', 'THREE', 'LONG', 'SLIPS', 'I', 'HAD', 'LEFT', 'THEM', 'ALL', 'TOGETHER'] +1580-141083-0008-1957: hyp=['THE', 'PROOF', 'WAS', 'IN', 'THREE', 'LONG', 'SLIPS', 'I', 'HAD', 'LEFT', 'THEM', 'ALTOGETHER'] +1580-141083-0009-1958: ref=['THE', 'ALTERNATIVE', 'WAS', 'THAT', 'SOMEONE', 'PASSING', 'HAD', 'OBSERVED', 'THE', 'KEY', 'IN', 'THE', 'DOOR', 'HAD', 'KNOWN', 'THAT', 'I', 'WAS', 'OUT', 'AND', 'HAD', 'ENTERED', 'TO', 'LOOK', 'AT', 'THE', 'PAPERS'] +1580-141083-0009-1958: hyp=['THEY', 'ALL', 'TURNED', 'OF', 'WAS', 'THAT', 'SOME', 'ONE', 'PASSING', 'HAD', 'OBSERVED', 'THE', 'KEY', 'IN', 'THE', 'DOOR', 'HAD', 'KNOWN', 'THAT', 'I', 'WAS', 'OUT', 'AND', 'HAD', 'ENTERED', 'TO', 'LOOK', 'AT', 'THE', 'PAPERS'] +1580-141083-0010-1959: ref=['I', 'GAVE', 'HIM', 'A', 'LITTLE', 'BRANDY', 'AND', 'LEFT', 'HIM', 'COLLAPSED', 'IN', 'A', 'CHAIR', 'WHILE', 'I', 'MADE', 'A', 'MOST', 'CAREFUL', 'EXAMINATION', 'OF', 'THE', 'ROOM'] +1580-141083-0010-1959: hyp=['I', 'GAVE', 'HIM', 'A', 'LITTLE', 'BRANDY', 'AND', 'LEFT', 'HIM', 'COLLAPSED', 'IN', 'A', 'CHAIR', 'WHILE', 'I', 'MADE', 'A', 'MOST', 'CAREFUL', 'EXAMINATION', 'OF', 'THE', 'ROOM'] +1580-141083-0011-1960: ref=['A', 'BROKEN', 'TIP', 'OF', 'LEAD', 'WAS', 'LYING', 'THERE', 'ALSO'] +1580-141083-0011-1960: hyp=['A', 'BROKEN', 'TIP', 'OF', 'LEAD', 'WAS', 'LYING', 'THERE', 'ALSO'] +1580-141083-0012-1961: ref=['NOT', 'ONLY', 'THIS', 'BUT', 'ON', 'THE', 'TABLE', 'I', 'FOUND', 'A', 'SMALL', 'BALL', 'OF', 'BLACK', 'DOUGH', 'OR', 'CLAY', 'WITH', 'SPECKS', 'OF', 'SOMETHING', 'WHICH', 'LOOKS', 'LIKE', 'SAWDUST', 'IN', 'IT'] +1580-141083-0012-1961: hyp=['NOT', 'ONLY', 'THIS', 'BUT', 'ON', 'THE', 'TABLE', 'I', 'FOUND', 'A', 'SMALL', 'BALL', 'OF', 'BLACK', 'DOUGH', 'OR', 'CLAY', 'WITH', 'SPECKS', 'OF', 'SOMETHING', 'WHICH', 'LOOKS', 'LIKE', 'SAWDUST', 'IN', 'IT'] +1580-141083-0013-1962: ref=['ABOVE', 'ALL', 'THINGS', 'I', 'DESIRE', 'TO', 'SETTLE', 'THE', 'MATTER', 'QUIETLY', 'AND', 'DISCREETLY'] +1580-141083-0013-1962: hyp=['ABOVE', 'ALL', 'THINGS', 'I', 'DESIRE', 'TO', 'SETTLE', 'THE', 'MATTER', 'QUIETLY', 'AND', 'DISCREETLY'] +1580-141083-0014-1963: ref=['TO', 'THE', 'BEST', 'OF', 'MY', 'BELIEF', 'THEY', 'WERE', 'ROLLED', 'UP'] +1580-141083-0014-1963: hyp=['TO', 'THE', 'BEST', 'OF', 'MY', 'BELIEF', 'THEY', 'WERE', 'ROLLED', 'UP'] +1580-141083-0015-1964: ref=['DID', 'ANYONE', 'KNOW', 'THAT', 'THESE', 'PROOFS', 'WOULD', 'BE', 'THERE', 'NO', 'ONE', 'SAVE', 'THE', 'PRINTER'] +1580-141083-0015-1964: hyp=['DID', 'ANY', 'ONE', 'KNOW', 'THAT', 'THESE', 'PROOFS', 'WOULD', 'BE', 'THERE', 'NO', 'ONE', 'SAVE', 'THE', 'PRINTER'] +1580-141083-0016-1965: ref=['I', 'WAS', 'IN', 'SUCH', 'A', 'HURRY', 'TO', 'COME', 'TO', 'YOU', 'YOU', 'LEFT', 'YOUR', 'DOOR', 'OPEN'] +1580-141083-0016-1965: hyp=['I', 'WAS', 'IN', 'SUCH', 'A', 'HURRY', 'TO', 'COME', 'TO', 'YOU', 'YOU', 'LEFT', 'YOUR', 'DOOR', 'OPEN'] +1580-141083-0017-1966: ref=['SO', 'IT', 'SEEMS', 'TO', 'ME'] +1580-141083-0017-1966: hyp=['SO', 'IT', 'SEEMS', 'TO', 'ME'] +1580-141083-0018-1967: ref=['NOW', 'MISTER', 'SOAMES', 'AT', 'YOUR', 'DISPOSAL'] +1580-141083-0018-1967: hyp=['NOW', 'MISTER', 'SOLMES', 'AT', 'YOUR', 'DISPOSAL'] +1580-141083-0019-1968: ref=['ABOVE', 'WERE', 'THREE', 'STUDENTS', 'ONE', 'ON', 'EACH', 'STORY'] +1580-141083-0019-1968: hyp=['ABOVE', 'WERE', 'THREE', 'STUDENTS', 'ONE', 'ON', 'EACH', 'STORY'] +1580-141083-0020-1969: ref=['THEN', 'HE', 'APPROACHED', 'IT', 'AND', 'STANDING', 'ON', 'TIPTOE', 'WITH', 'HIS', 'NECK', 'CRANED', 'HE', 'LOOKED', 'INTO', 'THE', 'ROOM'] +1580-141083-0020-1969: hyp=['THEN', 'HE', 'APPROACHED', 'IT', 'AND', 'STANDING', 'ON', 'TIPTOE', 'WITH', 'HIS', 'NET', 'CRANED', 'HE', 'LOOKED', 'INTO', 'THE', 'ROOM'] +1580-141083-0021-1970: ref=['THERE', 'IS', 'NO', 'OPENING', 'EXCEPT', 'THE', 'ONE', 'PANE', 'SAID', 'OUR', 'LEARNED', 'GUIDE'] +1580-141083-0021-1970: hyp=['THERE', 'IS', 'NO', 'OPENING', 'EXCEPT', 'THE', 'ONE', 'PANE', 'SAID', 'OUR', 'LEARNED', 'GUIDE'] +1580-141083-0022-1971: ref=['I', 'AM', 'AFRAID', 'THERE', 'ARE', 'NO', 'SIGNS', 'HERE', 'SAID', 'HE'] +1580-141083-0022-1971: hyp=['I', 'AM', 'AFRAID', 'THERE', 'ARE', 'NO', 'SIGNS', 'HERE', 'SAID', 'HE'] +1580-141083-0023-1972: ref=['ONE', 'COULD', 'HARDLY', 'HOPE', 'FOR', 'ANY', 'UPON', 'SO', 'DRY', 'A', 'DAY'] +1580-141083-0023-1972: hyp=['ONE', 'COULD', 'HARDLY', 'HOPE', 'FOR', 'ANY', 'UPON', 'SO', 'DRY', 'A', 'DAY'] +1580-141083-0024-1973: ref=['YOU', 'LEFT', 'HIM', 'IN', 'A', 'CHAIR', 'YOU', 'SAY', 'WHICH', 'CHAIR', 'BY', 'THE', 'WINDOW', 'THERE'] +1580-141083-0024-1973: hyp=['YOU', 'LEFT', 'HIM', 'IN', 'A', 'CHAIR', 'YOU', 'SAY', 'WHICH', 'CHAIR', 'BY', 'THE', 'WINDOW', 'THERE'] +1580-141083-0025-1974: ref=['THE', 'MAN', 'ENTERED', 'AND', 'TOOK', 'THE', 'PAPERS', 'SHEET', 'BY', 'SHEET', 'FROM', 'THE', 'CENTRAL', 'TABLE'] +1580-141083-0025-1974: hyp=['THE', 'MEN', 'ENTERED', 'AND', 'TOOK', 'THE', 'PAPERS', 'SHEET', 'BY', 'SHEET', 'FROM', 'THE', 'CENTRAL', 'TABLE'] +1580-141083-0026-1975: ref=['AS', 'A', 'MATTER', 'OF', 'FACT', 'HE', 'COULD', 'NOT', 'SAID', 'SOAMES', 'FOR', 'I', 'ENTERED', 'BY', 'THE', 'SIDE', 'DOOR'] +1580-141083-0026-1975: hyp=['AS', 'A', 'MATTER', 'OF', 'FACT', 'HE', 'COULD', 'NOT', 'SAID', 'SOLMES', 'FOR', 'I', 'ENTERED', 'BY', 'THE', 'SIDE', 'DOOR'] +1580-141083-0027-1976: ref=['HOW', 'LONG', 'WOULD', 'IT', 'TAKE', 'HIM', 'TO', 'DO', 'THAT', 'USING', 'EVERY', 'POSSIBLE', 'CONTRACTION', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'NOT', 'LESS'] +1580-141083-0027-1976: hyp=['HOW', 'LONG', 'WOULD', 'IT', 'TAKE', 'HIM', 'TO', 'DO', 'THAT', 'USING', 'EVERY', 'POSSIBLE', 'CONTRACTION', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'NOT', 'LESS'] +1580-141083-0028-1977: ref=['THEN', 'HE', 'TOSSED', 'IT', 'DOWN', 'AND', 'SEIZED', 'THE', 'NEXT'] +1580-141083-0028-1977: hyp=['THEN', 'HE', 'TOSSED', 'IT', 'DOWN', 'AND', 'SEIZED', 'THE', 'NEXT'] +1580-141083-0029-1978: ref=['HE', 'WAS', 'IN', 'THE', 'MIDST', 'OF', 'THAT', 'WHEN', 'YOUR', 'RETURN', 'CAUSED', 'HIM', 'TO', 'MAKE', 'A', 'VERY', 'HURRIED', 'RETREAT', 'VERY', 'HURRIED', 'SINCE', 'HE', 'HAD', 'NOT', 'TIME', 'TO', 'REPLACE', 'THE', 'PAPERS', 'WHICH', 'WOULD', 'TELL', 'YOU', 'THAT', 'HE', 'HAD', 'BEEN', 'THERE'] +1580-141083-0029-1978: hyp=['HE', 'WAS', 'IN', 'THE', 'MIDST', 'OF', 'THAT', 'WHEN', 'YOUR', 'RETURN', 'CAUSED', 'HIM', 'TO', 'MAKE', 'A', 'VERY', 'HURRIED', 'RETREAT', 'VERY', 'HURRIED', 'SINCE', 'HE', 'HAD', 'NOT', 'TIME', 'TO', 'REPLACE', 'THE', 'PAPERS', 'WHICH', 'WOULD', 'TELL', 'YOU', 'THAT', 'HE', 'HAD', 'BEEN', 'THERE'] +1580-141083-0030-1979: ref=['MISTER', 'SOAMES', 'WAS', 'SOMEWHAT', 'OVERWHELMED', 'BY', 'THIS', 'FLOOD', 'OF', 'INFORMATION'] +1580-141083-0030-1979: hyp=['MISTER', 'SOLMES', 'WAS', 'SOMEWHAT', 'OVERWHELMED', 'BY', 'THIS', 'FLOOD', 'OF', 'INFORMATION'] +1580-141083-0031-1980: ref=['HOLMES', 'HELD', 'OUT', 'A', 'SMALL', 'CHIP', 'WITH', 'THE', 'LETTERS', 'N', 'N', 'AND', 'A', 'SPACE', 'OF', 'CLEAR', 'WOOD', 'AFTER', 'THEM', 'YOU', 'SEE'] +1580-141083-0031-1980: hyp=['HOLMES', 'HELD', 'OUT', 'A', 'SMALL', 'CHIP', 'WITH', 'THE', 'LETTERS', 'N', 'AND', 'A', 'SPACE', 'OF', 'CLEAR', 'WOOD', 'AFTER', 'THEM', 'YOU', 'SEE'] +1580-141083-0032-1981: ref=['WATSON', 'I', 'HAVE', 'ALWAYS', 'DONE', 'YOU', 'AN', 'INJUSTICE', 'THERE', 'ARE', 'OTHERS'] +1580-141083-0032-1981: hyp=['WATSON', 'I', 'HAVE', 'ALWAYS', 'DONE', 'YOU', 'AN', 'INJUSTICE', 'THERE', 'ARE', 'OTHERS'] +1580-141083-0033-1982: ref=['I', 'WAS', 'HOPING', 'THAT', 'IF', 'THE', 'PAPER', 'ON', 'WHICH', 'HE', 'WROTE', 'WAS', 'THIN', 'SOME', 'TRACE', 'OF', 'IT', 'MIGHT', 'COME', 'THROUGH', 'UPON', 'THIS', 'POLISHED', 'SURFACE', 'NO', 'I', 'SEE', 'NOTHING'] +1580-141083-0033-1982: hyp=['I', 'WAS', 'HOPING', 'THAT', 'IF', 'THE', 'PAPER', 'ON', 'WHICH', 'HE', 'WROTE', 'WAS', 'THIN', 'SOME', 'TRACE', 'OF', 'IT', 'MIGHT', 'COME', 'THROUGH', 'UPON', 'THIS', 'POLISHED', 'SURFACE', 'NO', 'I', 'SEE', 'NOTHING'] +1580-141083-0034-1983: ref=['AS', 'HOLMES', 'DREW', 'THE', 'CURTAIN', 'I', 'WAS', 'AWARE', 'FROM', 'SOME', 'LITTLE', 'RIGIDITY', 'AND', 'ALERTNESS', 'OF', 'HIS', 'ATTITUDE', 'THAT', 'HE', 'WAS', 'PREPARED', 'FOR', 'AN', 'EMERGENCY'] +1580-141083-0034-1983: hyp=['AS', 'HOLMES', 'DREW', 'THE', 'CURTAIN', 'I', 'WAS', 'AWARE', 'FROM', 'SOME', 'LITTLE', 'RIGIDITY', 'AND', 'AN', 'ALERTNESS', 'OF', 'HIS', 'ATTITUDE', 'THAT', 'HE', 'WAS', 'PREPARED', 'FOR', 'AN', 'EMERGENCY'] +1580-141083-0035-1984: ref=['HOLMES', 'TURNED', 'AWAY', 'AND', 'STOOPED', 'SUDDENLY', 'TO', 'THE', 'FLOOR', 'HALLOA', "WHAT'S", 'THIS'] +1580-141083-0035-1984: hyp=['HOLMES', 'TURNED', 'AWAY', 'AND', 'STOOPED', 'SUDDENLY', 'TO', 'THE', 'FLOOR', 'HULLO', 'WHAT', 'IS', 'THIS'] +1580-141083-0036-1985: ref=['HOLMES', 'HELD', 'IT', 'OUT', 'ON', 'HIS', 'OPEN', 'PALM', 'IN', 'THE', 'GLARE', 'OF', 'THE', 'ELECTRIC', 'LIGHT'] +1580-141083-0036-1985: hyp=['HOLMES', 'HUTTED', 'OUT', 'ON', 'HIS', 'OPEN', 'PALM', 'IN', 'THE', 'GLARE', 'OF', 'THE', 'ELECTRIC', 'LIGHT'] +1580-141083-0037-1986: ref=['WHAT', 'COULD', 'HE', 'DO', 'HE', 'CAUGHT', 'UP', 'EVERYTHING', 'WHICH', 'WOULD', 'BETRAY', 'HIM', 'AND', 'HE', 'RUSHED', 'INTO', 'YOUR', 'BEDROOM', 'TO', 'CONCEAL', 'HIMSELF'] +1580-141083-0037-1986: hyp=['WHAT', 'COULD', 'HE', 'DO', 'HE', 'CAUGHT', 'UP', 'EVERYTHING', 'WHICH', 'WOULD', 'BETRAY', 'HIM', 'AND', 'HE', 'RUSHED', 'INTO', 'YOUR', 'BEDROOM', 'TO', 'CONCEAL', 'HIMSELF'] +1580-141083-0038-1987: ref=['I', 'UNDERSTAND', 'YOU', 'TO', 'SAY', 'THAT', 'THERE', 'ARE', 'THREE', 'STUDENTS', 'WHO', 'USE', 'THIS', 'STAIR', 'AND', 'ARE', 'IN', 'THE', 'HABIT', 'OF', 'PASSING', 'YOUR', 'DOOR', 'YES', 'THERE', 'ARE'] +1580-141083-0038-1987: hyp=['I', 'UNDERSTAND', 'YOU', 'TO', 'SAY', 'THAT', 'THERE', 'ARE', 'THREE', 'STUDENTS', 'WHO', 'USE', 'THIS', 'STARE', 'AND', 'ARE', 'IN', 'THE', 'HABIT', 'OF', 'PASSING', 'YOUR', 'DOOR', 'YES', 'THERE', 'ARE'] +1580-141083-0039-1988: ref=['AND', 'THEY', 'ARE', 'ALL', 'IN', 'FOR', 'THIS', 'EXAMINATION', 'YES'] +1580-141083-0039-1988: hyp=['AND', 'THEY', 'ARE', 'ALL', 'IN', 'FOR', 'THIS', 'EXAMINATION', 'YES'] +1580-141083-0040-1989: ref=['ONE', 'HARDLY', 'LIKES', 'TO', 'THROW', 'SUSPICION', 'WHERE', 'THERE', 'ARE', 'NO', 'PROOFS'] +1580-141083-0040-1989: hyp=['ONE', 'HARDLY', 'LIKES', 'TO', 'THROW', 'SUSPICION', 'WHERE', 'THERE', 'ARE', 'NO', 'PROOFS'] +1580-141083-0041-1990: ref=['LET', 'US', 'HEAR', 'THE', 'SUSPICIONS', 'I', 'WILL', 'LOOK', 'AFTER', 'THE', 'PROOFS'] +1580-141083-0041-1990: hyp=['LET', 'US', 'SEE', 'THE', 'SUSPICIONS', 'I', 'WILL', 'LOOK', 'AFTER', 'THE', 'PROOFS'] +1580-141083-0042-1991: ref=['MY', 'SCHOLAR', 'HAS', 'BEEN', 'LEFT', 'VERY', 'POOR', 'BUT', 'HE', 'IS', 'HARD', 'WORKING', 'AND', 'INDUSTRIOUS', 'HE', 'WILL', 'DO', 'WELL'] +1580-141083-0042-1991: hyp=['MY', 'SCHOLAR', 'HAS', 'BEEN', 'LEFT', 'A', 'VERY', 'POOR', 'BUT', 'HE', 'IS', 'HARD', 'WORKING', 'AND', 'INDUSTRIOUS', 'HE', 'WILL', 'DO', 'WELL'] +1580-141083-0043-1992: ref=['THE', 'TOP', 'FLOOR', 'BELONGS', 'TO', 'MILES', 'MC', 'LAREN'] +1580-141083-0043-1992: hyp=['THE', 'TOP', 'FLOOR', 'BELONGS', 'TO', 'MYLES', 'MC', 'LAREN'] +1580-141083-0044-1993: ref=['I', 'DARE', 'NOT', 'GO', 'SO', 'FAR', 'AS', 'THAT', 'BUT', 'OF', 'THE', 'THREE', 'HE', 'IS', 'PERHAPS', 'THE', 'LEAST', 'UNLIKELY'] +1580-141083-0044-1993: hyp=['I', 'DARE', 'NOT', 'GO', 'SO', 'FAR', 'AS', 'THAT', 'BUT', 'OF', 'THE', 'THREE', 'HE', 'IS', 'PERHAPS', 'THE', 'LEAST', 'UNLIKELY'] +1580-141083-0045-1994: ref=['HE', 'WAS', 'STILL', 'SUFFERING', 'FROM', 'THIS', 'SUDDEN', 'DISTURBANCE', 'OF', 'THE', 'QUIET', 'ROUTINE', 'OF', 'HIS', 'LIFE'] +1580-141083-0045-1994: hyp=['HE', 'WAS', 'STILL', 'SUFFERING', 'FROM', 'THIS', 'SUDDEN', 'DISTURBANCE', 'OF', 'THE', 'QUIET', 'ROUTINE', 'OF', 'HIS', 'LIFE'] +1580-141083-0046-1995: ref=['BUT', 'I', 'HAVE', 'OCCASIONALLY', 'DONE', 'THE', 'SAME', 'THING', 'AT', 'OTHER', 'TIMES'] +1580-141083-0046-1995: hyp=['BUT', 'I', 'HAVE', 'OCCASIONALLY', 'DONE', 'THE', 'SAME', 'THING', 'AT', 'OTHER', 'TIMES'] +1580-141083-0047-1996: ref=['DID', 'YOU', 'LOOK', 'AT', 'THESE', 'PAPERS', 'ON', 'THE', 'TABLE'] +1580-141083-0047-1996: hyp=['DID', 'YOU', 'LOOK', 'AT', 'THESE', 'PAPERS', 'ON', 'THE', 'TABLE'] +1580-141083-0048-1997: ref=['HOW', 'CAME', 'YOU', 'TO', 'LEAVE', 'THE', 'KEY', 'IN', 'THE', 'DOOR'] +1580-141083-0048-1997: hyp=['HOW', 'CAME', 'YOU', 'TO', 'LEAVE', 'THE', 'KEY', 'IN', 'THE', 'DOOR'] +1580-141083-0049-1998: ref=['ANYONE', 'IN', 'THE', 'ROOM', 'COULD', 'GET', 'OUT', 'YES', 'SIR'] +1580-141083-0049-1998: hyp=['ANY', 'ONE', 'IN', 'THE', 'ROOM', 'COULD', 'GET', 'OUT', 'YES', 'SIR'] +1580-141083-0050-1999: ref=['I', 'REALLY', "DON'T", 'THINK', 'HE', 'KNEW', 'MUCH', 'ABOUT', 'IT', 'MISTER', 'HOLMES'] +1580-141083-0050-1999: hyp=['I', 'HAVE', 'REALLY', "DON'T", 'THINK', 'HE', 'KNEW', 'MUCH', 'ABOUT', 'IT', 'MISTER', 'HOLMES'] +1580-141083-0051-2000: ref=['ONLY', 'FOR', 'A', 'MINUTE', 'OR', 'SO'] +1580-141083-0051-2000: hyp=['ONLY', 'FOR', 'A', 'MINUTE', 'OR', 'SO'] +1580-141083-0052-2001: ref=['OH', 'I', 'WOULD', 'NOT', 'VENTURE', 'TO', 'SAY', 'SIR'] +1580-141083-0052-2001: hyp=['OH', 'I', 'WOULD', 'NOT', 'VENTURE', 'TO', 'SAY', 'SIR'] +1580-141083-0053-2002: ref=['YOU', "HAVEN'T", 'SEEN', 'ANY', 'OF', 'THEM', 'NO', 'SIR'] +1580-141083-0053-2002: hyp=['YOU', "HAVEN'T", 'SEEN', 'ANY', 'OF', 'THEM', 'NO', 'SIR'] +1580-141084-0000-2003: ref=['IT', 'WAS', 'THE', 'INDIAN', 'WHOSE', 'DARK', 'SILHOUETTE', 'APPEARED', 'SUDDENLY', 'UPON', 'HIS', 'BLIND'] +1580-141084-0000-2003: hyp=['IT', 'WAS', 'THE', 'INDIAN', 'WHOSE', 'DARK', 'SILHOUETTE', 'APPEARED', 'SUDDENLY', 'UPON', 'HIS', 'BLIND'] +1580-141084-0001-2004: ref=['HE', 'WAS', 'PACING', 'SWIFTLY', 'UP', 'AND', 'DOWN', 'HIS', 'ROOM'] +1580-141084-0001-2004: hyp=['HE', 'WAS', 'PACING', 'SWIFTLY', 'UP', 'AND', 'DOWN', 'HIS', 'ROOM'] +1580-141084-0002-2005: ref=['THIS', 'SET', 'OF', 'ROOMS', 'IS', 'QUITE', 'THE', 'OLDEST', 'IN', 'THE', 'COLLEGE', 'AND', 'IT', 'IS', 'NOT', 'UNUSUAL', 'FOR', 'VISITORS', 'TO', 'GO', 'OVER', 'THEM'] +1580-141084-0002-2005: hyp=['THE', 'SET', 'OF', 'ROOMS', 'IS', 'QUITE', 'THE', 'OLDEST', 'IN', 'THE', 'COLLEGE', 'AND', 'IT', 'IS', 'NOT', 'UNUSUAL', 'FOR', 'VISITORS', 'TO', 'GO', 'OVER', 'THEM'] +1580-141084-0003-2006: ref=['NO', 'NAMES', 'PLEASE', 'SAID', 'HOLMES', 'AS', 'WE', 'KNOCKED', 'AT', "GILCHRIST'S", 'DOOR'] +1580-141084-0003-2006: hyp=['NO', 'NAMES', 'PLEASE', 'SAID', 'HOLMES', 'AS', 'WE', 'KNOCKED', 'AT', "GILCHER'S", 'DOOR'] +1580-141084-0004-2007: ref=['OF', 'COURSE', 'HE', 'DID', 'NOT', 'REALIZE', 'THAT', 'IT', 'WAS', 'I', 'WHO', 'WAS', 'KNOCKING', 'BUT', 'NONE', 'THE', 'LESS', 'HIS', 'CONDUCT', 'WAS', 'VERY', 'UNCOURTEOUS', 'AND', 'INDEED', 'UNDER', 'THE', 'CIRCUMSTANCES', 'RATHER', 'SUSPICIOUS'] +1580-141084-0004-2007: hyp=['OF', 'COURSE', 'HE', 'DID', 'NOT', 'REALIZE', 'THAT', 'IT', 'WAS', 'I', 'WHO', 'WAS', 'KNOCKING', 'BUT', 'NONE', 'THE', 'LESS', 'HIS', 'CONDUCT', 'WAS', 'VERY', 'UNCOURTEOUS', 'AND', 'INDEED', 'UNDER', 'THE', 'CIRCUMSTANCES', 'RATHER', 'SUSPICIOUS'] +1580-141084-0005-2008: ref=['THAT', 'IS', 'VERY', 'IMPORTANT', 'SAID', 'HOLMES'] +1580-141084-0005-2008: hyp=['THAT', 'IS', 'VERY', 'IMPORTANT', 'SAID', 'HOLMES'] +1580-141084-0006-2009: ref=['YOU', "DON'T", 'SEEM', 'TO', 'REALIZE', 'THE', 'POSITION'] +1580-141084-0006-2009: hyp=['YOU', "DON'T", 'SEEM', 'TO', 'REALIZE', 'THE', 'POSITION'] +1580-141084-0007-2010: ref=['TO', 'MORROW', 'IS', 'THE', 'EXAMINATION'] +1580-141084-0007-2010: hyp=['TO', 'MORROW', 'WAS', 'THE', 'EXAMINATION'] +1580-141084-0008-2011: ref=['I', 'CANNOT', 'ALLOW', 'THE', 'EXAMINATION', 'TO', 'BE', 'HELD', 'IF', 'ONE', 'OF', 'THE', 'PAPERS', 'HAS', 'BEEN', 'TAMPERED', 'WITH', 'THE', 'SITUATION', 'MUST', 'BE', 'FACED'] +1580-141084-0008-2011: hyp=['I', 'CANNOT', 'ALLOW', 'THE', 'EXAMINATION', 'TO', 'BE', 'HELD', 'IF', 'ONE', 'OF', 'THE', 'PAPERS', 'HAS', 'BEEN', 'TAMPERED', 'WITH', 'THE', 'SITUATION', 'MUST', 'BE', 'FACED'] +1580-141084-0009-2012: ref=['IT', 'IS', 'POSSIBLE', 'THAT', 'I', 'MAY', 'BE', 'IN', 'A', 'POSITION', 'THEN', 'TO', 'INDICATE', 'SOME', 'COURSE', 'OF', 'ACTION'] +1580-141084-0009-2012: hyp=['IT', 'IS', 'POSSIBLE', 'THAT', 'I', 'MAY', 'BE', 'IN', 'A', 'POSITION', 'THEN', 'TO', 'INDICATE', 'SOME', 'COURSE', 'OF', 'ACTION'] +1580-141084-0010-2013: ref=['I', 'WILL', 'TAKE', 'THE', 'BLACK', 'CLAY', 'WITH', 'ME', 'ALSO', 'THE', 'PENCIL', 'CUTTINGS', 'GOOD', 'BYE'] +1580-141084-0010-2013: hyp=['I', 'WILL', 'TAKE', 'THE', 'BLACK', 'CLAY', 'WITH', 'ME', 'ALSO', 'THE', 'PENCIL', 'CUTTINGS', 'GOOD', 'BYE'] +1580-141084-0011-2014: ref=['WHEN', 'WE', 'WERE', 'OUT', 'IN', 'THE', 'DARKNESS', 'OF', 'THE', 'QUADRANGLE', 'WE', 'AGAIN', 'LOOKED', 'UP', 'AT', 'THE', 'WINDOWS'] +1580-141084-0011-2014: hyp=['WHEN', 'WE', 'WERE', 'OUT', 'IN', 'THE', 'DARKNESS', 'OF', 'THE', 'QUADRANGLE', 'WE', 'AGAIN', 'LOOKED', 'UP', 'AT', 'THE', 'WINDOWS'] +1580-141084-0012-2015: ref=['THE', 'FOUL', 'MOUTHED', 'FELLOW', 'AT', 'THE', 'TOP'] +1580-141084-0012-2015: hyp=['THE', 'FOUL', 'MOUTHED', 'FELLOW', 'AT', 'THE', 'TOP'] +1580-141084-0013-2016: ref=['HE', 'IS', 'THE', 'ONE', 'WITH', 'THE', 'WORST', 'RECORD'] +1580-141084-0013-2016: hyp=['HE', 'IS', 'THE', 'ONE', 'WITH', 'THE', 'WORST', 'RECORD'] +1580-141084-0014-2017: ref=['WHY', 'BANNISTER', 'THE', 'SERVANT', "WHAT'S", 'HIS', 'GAME', 'IN', 'THE', 'MATTER'] +1580-141084-0014-2017: hyp=['WHY', 'BANISTER', 'THE', 'SERVANT', "WHAT'S", 'HIS', 'GAME', 'IN', 'THE', 'MATTER'] +1580-141084-0015-2018: ref=['HE', 'IMPRESSED', 'ME', 'AS', 'BEING', 'A', 'PERFECTLY', 'HONEST', 'MAN'] +1580-141084-0015-2018: hyp=['HE', 'IMPRESSED', 'ME', 'AS', 'BEING', 'A', 'PERFECTLY', 'HONEST', 'MAN'] +1580-141084-0016-2019: ref=['MY', 'FRIEND', 'DID', 'NOT', 'APPEAR', 'TO', 'BE', 'DEPRESSED', 'BY', 'HIS', 'FAILURE', 'BUT', 'SHRUGGED', 'HIS', 'SHOULDERS', 'IN', 'HALF', 'HUMOROUS', 'RESIGNATION'] +1580-141084-0016-2019: hyp=['MY', 'FRIEND', 'DID', 'NOT', 'APPEAR', 'TO', 'BE', 'DEPRESSED', 'BY', 'HIS', 'FAILURE', 'BUT', 'SHRUGGED', 'HIS', 'SHOULDERS', 'AND', 'HALF', 'HUMOROUS', 'RESIGNATION'] +1580-141084-0017-2020: ref=['NO', 'GOOD', 'MY', 'DEAR', 'WATSON'] +1580-141084-0017-2020: hyp=['NO', 'GOOD', 'MY', 'DEAR', 'WATSON'] +1580-141084-0018-2021: ref=['I', 'THINK', 'SO', 'YOU', 'HAVE', 'FORMED', 'A', 'CONCLUSION'] +1580-141084-0018-2021: hyp=['I', 'THINK', 'SO', 'YOU', 'HAVE', 'FORMED', 'A', 'CONCLUSION'] +1580-141084-0019-2022: ref=['YES', 'MY', 'DEAR', 'WATSON', 'I', 'HAVE', 'SOLVED', 'THE', 'MYSTERY'] +1580-141084-0019-2022: hyp=['YES', 'MY', 'DEAR', 'WATSON', 'I', 'HAVE', 'SOLVED', 'THE', 'MYSTERY'] +1580-141084-0020-2023: ref=['LOOK', 'AT', 'THAT', 'HE', 'HELD', 'OUT', 'HIS', 'HAND'] +1580-141084-0020-2023: hyp=['LOOK', 'AT', 'THAT', 'HE', 'HELD', 'OUT', 'HIS', 'HAND'] +1580-141084-0021-2024: ref=['ON', 'THE', 'PALM', 'WERE', 'THREE', 'LITTLE', 'PYRAMIDS', 'OF', 'BLACK', 'DOUGHY', 'CLAY'] +1580-141084-0021-2024: hyp=['ON', 'THE', 'PALM', 'WERE', 'THREE', 'LITTLE', 'PYRAMIDS', 'OF', 'BLACK', 'DOUGHY', 'CLAY'] +1580-141084-0022-2025: ref=['AND', 'ONE', 'MORE', 'THIS', 'MORNING'] +1580-141084-0022-2025: hyp=['AND', 'ONE', 'MORE', 'THIS', 'MORNING'] +1580-141084-0023-2026: ref=['IN', 'A', 'FEW', 'HOURS', 'THE', 'EXAMINATION', 'WOULD', 'COMMENCE', 'AND', 'HE', 'WAS', 'STILL', 'IN', 'THE', 'DILEMMA', 'BETWEEN', 'MAKING', 'THE', 'FACTS', 'PUBLIC', 'AND', 'ALLOWING', 'THE', 'CULPRIT', 'TO', 'COMPETE', 'FOR', 'THE', 'VALUABLE', 'SCHOLARSHIP'] +1580-141084-0023-2026: hyp=['IN', 'A', 'FEW', 'HOURS', 'THE', 'EXAMINATION', 'WOULD', 'COMMENCE', 'AND', 'HE', 'WAS', 'STILL', 'IN', 'THE', 'DILEMMA', 'BETWEEN', 'MAKING', 'THE', 'FACTS', 'PUBLIC', 'AND', 'ALLOWING', 'THE', 'CULPRIT', 'TO', 'COMPETE', 'FOR', 'THE', 'VALUABLE', 'SCHOLARSHIP'] +1580-141084-0024-2027: ref=['HE', 'COULD', 'HARDLY', 'STAND', 'STILL', 'SO', 'GREAT', 'WAS', 'HIS', 'MENTAL', 'AGITATION', 'AND', 'HE', 'RAN', 'TOWARDS', 'HOLMES', 'WITH', 'TWO', 'EAGER', 'HANDS', 'OUTSTRETCHED', 'THANK', 'HEAVEN', 'THAT', 'YOU', 'HAVE', 'COME'] +1580-141084-0024-2027: hyp=['HE', 'COULD', 'HARDLY', 'STAND', 'STILL', 'SO', 'GREAT', 'WAS', 'HIS', 'MENTAL', 'AGITATION', 'AND', 'HE', 'RAN', 'TOWARDS', 'HOLMES', 'WITH', 'TOO', 'EAGER', 'HANDS', 'OUTSTRETCHED', 'THANK', 'HEAVEN', 'THAT', 'YOU', 'HAVE', 'COME'] +1580-141084-0025-2028: ref=['YOU', 'KNOW', 'HIM', 'I', 'THINK', 'SO'] +1580-141084-0025-2028: hyp=['YOU', 'KNOW', 'HIM', 'I', 'THINK', 'SO'] +1580-141084-0026-2029: ref=['IF', 'THIS', 'MATTER', 'IS', 'NOT', 'TO', 'BECOME', 'PUBLIC', 'WE', 'MUST', 'GIVE', 'OURSELVES', 'CERTAIN', 'POWERS', 'AND', 'RESOLVE', 'OURSELVES', 'INTO', 'A', 'SMALL', 'PRIVATE', 'COURT', 'MARTIAL'] +1580-141084-0026-2029: hyp=['IF', 'THIS', 'MATTER', 'IS', 'NOT', 'TO', 'BECOME', 'PUBLIC', 'WE', 'MUST', 'GIVE', 'OURSELVES', 'CERTAIN', 'POWERS', 'AND', 'RESOLVE', 'OURSELVES', 'INTO', 'A', 'SMALL', 'PRIVATE', 'COURT', 'MARTIAL'] +1580-141084-0027-2030: ref=['NO', 'SIR', 'CERTAINLY', 'NOT'] +1580-141084-0027-2030: hyp=['NO', 'SIR', 'CERTAINLY', 'NOT'] +1580-141084-0028-2031: ref=['THERE', 'WAS', 'NO', 'MAN', 'SIR'] +1580-141084-0028-2031: hyp=['THERE', 'WAS', 'NO', 'MAN', 'SIR'] +1580-141084-0029-2032: ref=['HIS', 'TROUBLED', 'BLUE', 'EYES', 'GLANCED', 'AT', 'EACH', 'OF', 'US', 'AND', 'FINALLY', 'RESTED', 'WITH', 'AN', 'EXPRESSION', 'OF', 'BLANK', 'DISMAY', 'UPON', 'BANNISTER', 'IN', 'THE', 'FARTHER', 'CORNER'] +1580-141084-0029-2032: hyp=['HIS', 'TROUBLED', 'BLUE', 'EYES', 'GLANCED', 'AT', 'EACH', 'OF', 'US', 'AND', 'FINALLY', 'RESTED', 'WITH', 'AN', 'EXPRESSION', 'OF', 'BLANK', 'DISMAY', 'UPON', 'BANISTER', 'IN', 'THE', 'FARTHER', 'CORNER'] +1580-141084-0030-2033: ref=['JUST', 'CLOSE', 'THE', 'DOOR', 'SAID', 'HOLMES'] +1580-141084-0030-2033: hyp=['JUST', 'CLOSE', 'THE', 'DOOR', 'SAID', 'HOLMES'] +1580-141084-0031-2034: ref=['WE', 'WANT', 'TO', 'KNOW', 'MISTER', 'GILCHRIST', 'HOW', 'YOU', 'AN', 'HONOURABLE', 'MAN', 'EVER', 'CAME', 'TO', 'COMMIT', 'SUCH', 'AN', 'ACTION', 'AS', 'THAT', 'OF', 'YESTERDAY'] +1580-141084-0031-2034: hyp=['WE', 'WANT', 'TO', 'KNOW', 'MISTER', 'GILGRIST', 'HOW', 'YOU', 'AN', 'HONOURABLE', 'MAN', 'EVER', 'CAME', 'TO', 'COMMIT', 'SUCH', 'AN', 'ACTION', 'AS', 'THAT', 'OF', 'YESTERDAY'] +1580-141084-0032-2035: ref=['FOR', 'A', 'MOMENT', 'GILCHRIST', 'WITH', 'UPRAISED', 'HAND', 'TRIED', 'TO', 'CONTROL', 'HIS', 'WRITHING', 'FEATURES'] +1580-141084-0032-2035: hyp=['FOR', 'A', 'MOMENT', 'GILCHRIST', 'WITH', 'UPRAISED', 'HAND', 'TRIED', 'TO', 'CONTROL', 'HIS', 'WRITHING', 'FEATURES'] +1580-141084-0033-2036: ref=['COME', 'COME', 'SAID', 'HOLMES', 'KINDLY', 'IT', 'IS', 'HUMAN', 'TO', 'ERR', 'AND', 'AT', 'LEAST', 'NO', 'ONE', 'CAN', 'ACCUSE', 'YOU', 'OF', 'BEING', 'A', 'CALLOUS', 'CRIMINAL'] +1580-141084-0033-2036: hyp=['COME', 'COME', 'SAID', 'HOLMES', 'KINDLY', 'IT', 'IS', 'HUMAN', 'TO', 'ERR', 'AND', 'AT', 'LEAST', 'NO', 'ONE', 'CAN', 'ACCUSE', 'YOU', 'OF', 'BEING', 'A', 'CALLOUS', 'CRIMINAL'] +1580-141084-0034-2037: ref=['WELL', 'WELL', "DON'T", 'TROUBLE', 'TO', 'ANSWER', 'LISTEN', 'AND', 'SEE', 'THAT', 'I', 'DO', 'YOU', 'NO', 'INJUSTICE'] +1580-141084-0034-2037: hyp=['WELL', 'WELL', "DON'T", 'TROUBLE', 'TO', 'ANSWER', 'LISTEN', 'AND', 'SEE', 'THAT', 'I', 'DO', 'YOU', 'KNOW', 'INJUSTICE'] +1580-141084-0035-2038: ref=['HE', 'COULD', 'EXAMINE', 'THE', 'PAPERS', 'IN', 'HIS', 'OWN', 'OFFICE'] +1580-141084-0035-2038: hyp=['HE', 'COULD', 'EXAMINE', 'THE', 'PAPERS', 'IN', 'HIS', 'OWN', 'OFFICE'] +1580-141084-0036-2039: ref=['THE', 'INDIAN', 'I', 'ALSO', 'THOUGHT', 'NOTHING', 'OF'] +1580-141084-0036-2039: hyp=['THE', 'INDIAN', 'I', 'ALSO', 'THOUGHT', 'NOTHING', 'OF'] +1580-141084-0037-2040: ref=['WHEN', 'I', 'APPROACHED', 'YOUR', 'ROOM', 'I', 'EXAMINED', 'THE', 'WINDOW'] +1580-141084-0037-2040: hyp=['WHEN', 'I', 'APPROACHED', 'YOUR', 'ROOM', 'I', 'EXAMINED', 'THE', 'WINDOW'] +1580-141084-0038-2041: ref=['NO', 'ONE', 'LESS', 'THAN', 'THAT', 'WOULD', 'HAVE', 'A', 'CHANCE'] +1580-141084-0038-2041: hyp=['NO', 'ONE', 'LESS', 'THAN', 'THAT', 'WOULD', 'HAVE', 'A', 'CHANCE'] +1580-141084-0039-2042: ref=['I', 'ENTERED', 'AND', 'I', 'TOOK', 'YOU', 'INTO', 'MY', 'CONFIDENCE', 'AS', 'TO', 'THE', 'SUGGESTIONS', 'OF', 'THE', 'SIDE', 'TABLE'] +1580-141084-0039-2042: hyp=['I', 'ENTERED', 'AND', 'I', 'TOOK', 'YOU', 'INTO', 'MY', 'CONFIDENCE', 'AS', 'TO', 'THE', 'SUGGESTIONS', 'OF', 'THE', 'SIDE', 'TABLE'] +1580-141084-0040-2043: ref=['HE', 'RETURNED', 'CARRYING', 'HIS', 'JUMPING', 'SHOES', 'WHICH', 'ARE', 'PROVIDED', 'AS', 'YOU', 'ARE', 'AWARE', 'WITH', 'SEVERAL', 'SHARP', 'SPIKES'] +1580-141084-0040-2043: hyp=['HE', 'RETURNED', 'CARRYING', 'HIS', 'JUMPING', 'SHOES', 'WHICH', 'ARE', 'PROVIDED', 'AS', 'YOU', 'ARE', 'WHERE', 'WITH', 'SEVERAL', 'SHARP', 'SPIKES'] +1580-141084-0041-2044: ref=['NO', 'HARM', 'WOULD', 'HAVE', 'BEEN', 'DONE', 'HAD', 'IT', 'NOT', 'BEEN', 'THAT', 'AS', 'HE', 'PASSED', 'YOUR', 'DOOR', 'HE', 'PERCEIVED', 'THE', 'KEY', 'WHICH', 'HAD', 'BEEN', 'LEFT', 'BY', 'THE', 'CARELESSNESS', 'OF', 'YOUR', 'SERVANT'] +1580-141084-0041-2044: hyp=['NO', 'HARM', 'WOULD', 'HAVE', 'BEEN', 'DONE', 'HAD', 'IT', 'NOT', 'BEEN', 'THAT', 'AS', 'HE', 'PASSED', 'YOUR', 'DOOR', 'HE', 'PERCEIVED', 'THE', 'KEY', 'WHICH', 'HAD', 'BEEN', 'LEFT', 'BY', 'THE', 'CARELESSNESS', 'OF', 'YOUR', 'SERVANT'] +1580-141084-0042-2045: ref=['A', 'SUDDEN', 'IMPULSE', 'CAME', 'OVER', 'HIM', 'TO', 'ENTER', 'AND', 'SEE', 'IF', 'THEY', 'WERE', 'INDEED', 'THE', 'PROOFS'] +1580-141084-0042-2045: hyp=['A', 'SUDDEN', 'IMPULSE', 'CAME', 'OVER', 'HIM', 'TO', 'ENTER', 'AND', 'SEE', 'IF', 'THEY', 'WERE', 'INDEED', 'THE', 'PROOFS'] +1580-141084-0043-2046: ref=['HE', 'PUT', 'HIS', 'SHOES', 'ON', 'THE', 'TABLE'] +1580-141084-0043-2046: hyp=['HE', 'PUT', 'HIS', 'SHOES', 'ON', 'THE', 'TABLE'] +1580-141084-0044-2047: ref=['GLOVES', 'SAID', 'THE', 'YOUNG', 'MAN'] +1580-141084-0044-2047: hyp=['GLOVES', 'SAID', 'THE', 'YOUNG', 'MAN'] +1580-141084-0045-2048: ref=['SUDDENLY', 'HE', 'HEARD', 'HIM', 'AT', 'THE', 'VERY', 'DOOR', 'THERE', 'WAS', 'NO', 'POSSIBLE', 'ESCAPE'] +1580-141084-0045-2048: hyp=['SUDDENLY', 'HE', 'HEARD', 'HIM', 'AT', 'THE', 'VERY', 'DOOR', 'THERE', 'WAS', 'NO', 'POSSIBLE', 'ESCAPE'] +1580-141084-0046-2049: ref=['HAVE', 'I', 'TOLD', 'THE', 'TRUTH', 'MISTER', 'GILCHRIST'] +1580-141084-0046-2049: hyp=['HAVE', 'I', 'TOLD', 'THE', 'TRUTH', 'MISTER', 'GILGRIST'] +1580-141084-0047-2050: ref=['I', 'HAVE', 'A', 'LETTER', 'HERE', 'MISTER', 'SOAMES', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'EARLY', 'THIS', 'MORNING', 'IN', 'THE', 'MIDDLE', 'OF', 'A', 'RESTLESS', 'NIGHT'] +1580-141084-0047-2050: hyp=['I', 'HAVE', 'A', 'LETTER', 'HERE', 'MISTER', 'SOLMES', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'EARLY', 'THIS', 'MORNING', 'IN', 'THE', 'MIDDLE', 'OF', 'A', 'RESTLESS', 'NIGHT'] +1580-141084-0048-2051: ref=['IT', 'WILL', 'BE', 'CLEAR', 'TO', 'YOU', 'FROM', 'WHAT', 'I', 'HAVE', 'SAID', 'THAT', 'ONLY', 'YOU', 'COULD', 'HAVE', 'LET', 'THIS', 'YOUNG', 'MAN', 'OUT', 'SINCE', 'YOU', 'WERE', 'LEFT', 'IN', 'THE', 'ROOM', 'AND', 'MUST', 'HAVE', 'LOCKED', 'THE', 'DOOR', 'WHEN', 'YOU', 'WENT', 'OUT'] +1580-141084-0048-2051: hyp=['IT', 'WOULD', 'BE', 'CLEAR', 'TO', 'YOU', 'FROM', 'WHAT', 'I', 'HAVE', 'SAID', 'THAT', 'ONLY', 'YOU', 'COULD', 'HAVE', 'LET', 'THIS', 'YOUNG', 'MAN', 'OUT', 'SINCE', 'YOU', 'WERE', 'LEFT', 'IN', 'THE', 'ROOM', 'AND', 'MUST', 'HAVE', 'LOCKED', 'THE', 'DOOR', 'WHEN', 'YOU', 'WENT', 'OUT'] +1580-141084-0049-2052: ref=['IT', 'WAS', 'SIMPLE', 'ENOUGH', 'SIR', 'IF', 'YOU', 'ONLY', 'HAD', 'KNOWN', 'BUT', 'WITH', 'ALL', 'YOUR', 'CLEVERNESS', 'IT', 'WAS', 'IMPOSSIBLE', 'THAT', 'YOU', 'COULD', 'KNOW'] +1580-141084-0049-2052: hyp=['IT', 'WAS', 'SIMPLE', 'ENOUGH', 'SIR', 'IF', 'YOU', 'ONLY', 'HAD', 'KNOWN', 'BUT', 'WITH', 'ALL', 'YOUR', 'CLEVERNESS', 'IT', 'WAS', 'IMPOSSIBLE', 'THAT', 'YOU', 'COULD', 'KNOW'] +1580-141084-0050-2053: ref=['IF', 'MISTER', 'SOAMES', 'SAW', 'THEM', 'THE', 'GAME', 'WAS', 'UP'] +1580-141084-0050-2053: hyp=['IF', 'MISTER', 'SOLMES', 'SAW', 'THEM', 'THE', 'GAME', 'WAS', 'UP'] +1995-1826-0000-750: ref=['IN', 'THE', 'DEBATE', 'BETWEEN', 'THE', 'SENIOR', 'SOCIETIES', 'HER', 'DEFENCE', 'OF', 'THE', 'FIFTEENTH', 'AMENDMENT', 'HAD', 'BEEN', 'NOT', 'ONLY', 'A', 'NOTABLE', 'BIT', 'OF', 'REASONING', 'BUT', 'DELIVERED', 'WITH', 'REAL', 'ENTHUSIASM'] +1995-1826-0000-750: hyp=['IN', 'THE', 'DEBATE', 'BETWEEN', 'THE', 'SENIOR', 'SOCIETIES', 'HER', 'DEFENCE', 'OF', 'THE', 'FIFTEENTH', 'AMENDMENT', 'HAD', 'BEEN', 'NOT', 'ONLY', 'A', 'NOTABLE', 'BIT', 'OF', 'REASONING', 'BUT', 'DELIVERED', 'WITH', 'REAL', 'ENTHUSIASM'] +1995-1826-0001-751: ref=['THE', 'SOUTH', 'SHE', 'HAD', 'NOT', 'THOUGHT', 'OF', 'SERIOUSLY', 'AND', 'YET', 'KNOWING', 'OF', 'ITS', 'DELIGHTFUL', 'HOSPITALITY', 'AND', 'MILD', 'CLIMATE', 'SHE', 'WAS', 'NOT', 'AVERSE', 'TO', 'CHARLESTON', 'OR', 'NEW', 'ORLEANS'] +1995-1826-0001-751: hyp=['THE', 'SOUTH', 'SHE', 'HAD', 'NOT', 'THOUGHT', 'OF', 'SERIOUSLY', 'AND', 'YET', 'KNOWING', 'OF', 'ITS', 'DELIGHTFUL', 'HOSPITALITY', 'AND', 'MILD', 'CLIMATE', 'SHE', 'WAS', 'NOT', 'AVERSE', 'TO', 'CHARLESTON', 'OR', 'NEW', 'ORLEANS'] +1995-1826-0002-752: ref=['JOHN', 'TAYLOR', 'WHO', 'HAD', 'SUPPORTED', 'HER', 'THROUGH', 'COLLEGE', 'WAS', 'INTERESTED', 'IN', 'COTTON'] +1995-1826-0002-752: hyp=['JOHN', 'TAYLOR', 'WHO', 'HAD', 'SUPPORTED', 'HER', 'THROUGH', 'COLLEGE', 'WAS', 'INTERESTED', 'IN', 'COTTON'] +1995-1826-0003-753: ref=['BETTER', 'GO', 'HE', 'HAD', 'COUNSELLED', 'SENTENTIOUSLY'] +1995-1826-0003-753: hyp=['BETTER', 'GO', 'HE', 'HAD', 'COUNSEL', 'SENTENTIOUSLY'] +1995-1826-0004-754: ref=['MIGHT', 'LEARN', 'SOMETHING', 'USEFUL', 'DOWN', 'THERE'] +1995-1826-0004-754: hyp=['MIGHT', 'LEARN', 'SOMETHING', 'USEFUL', 'DOWN', 'THERE'] +1995-1826-0005-755: ref=['BUT', 'JOHN', "THERE'S", 'NO', 'SOCIETY', 'JUST', 'ELEMENTARY', 'WORK'] +1995-1826-0005-755: hyp=['BUT', 'JOHN', "THERE'S", 'NO', 'SOCIETY', 'JUST', 'ELEMENTARY', 'WORK'] +1995-1826-0006-756: ref=['BEEN', 'LOOKING', 'UP', 'TOOMS', 'COUNTY'] +1995-1826-0006-756: hyp=['BEEN', 'LOOKING', 'UP', 'TOMBS', 'COUNTY'] +1995-1826-0007-757: ref=['FIND', 'SOME', 'CRESSWELLS', 'THERE', 'BIG', 'PLANTATIONS', 'RATED', 'AT', 'TWO', 'HUNDRED', 'AND', 'FIFTY', 'THOUSAND', 'DOLLARS'] +1995-1826-0007-757: hyp=['FIVE', 'SOME', 'CRUSTWELLS', 'THERE', 'BIG', 'PLANTATIONS', 'RATED', 'AT', 'TWO', 'HUNDRED', 'AND', 'FIFTY', 'THOUSAND', 'DOLLARS'] +1995-1826-0008-758: ref=['SOME', 'OTHERS', 'TOO', 'BIG', 'COTTON', 'COUNTY'] +1995-1826-0008-758: hyp=['SOME', 'OTHERS', 'TOO', 'BIG', 'COTTON', 'COUNTY'] +1995-1826-0009-759: ref=['YOU', 'OUGHT', 'TO', 'KNOW', 'JOHN', 'IF', 'I', 'TEACH', 'NEGROES', "I'LL", 'SCARCELY', 'SEE', 'MUCH', 'OF', 'PEOPLE', 'IN', 'MY', 'OWN', 'CLASS'] +1995-1826-0009-759: hyp=['YOU', 'OUGHT', 'TO', 'KNOW', 'JOHN', 'IF', 'I', 'TEACH', 'NEGROES', "I'LL", 'SCARCELY', 'SEE', 'MUCH', 'OF', 'PEOPLE', 'IN', 'MY', 'OWN', 'CLASS'] +1995-1826-0010-760: ref=['AT', 'ANY', 'RATE', 'I', 'SAY', 'GO'] +1995-1826-0010-760: hyp=['AT', 'ANY', 'RATE', 'I', 'SAY', 'GO'] +1995-1826-0011-761: ref=['HERE', 'SHE', 'WAS', 'TEACHING', 'DIRTY', 'CHILDREN', 'AND', 'THE', 'SMELL', 'OF', 'CONFUSED', 'ODORS', 'AND', 'BODILY', 'PERSPIRATION', 'WAS', 'TO', 'HER', 'AT', 'TIMES', 'UNBEARABLE'] +1995-1826-0011-761: hyp=['HERE', 'SHE', 'WAS', 'TEACHING', 'DIRTY', 'CHILDREN', 'AND', 'THE', 'SMELL', 'OF', 'CONFUSED', 'ODORS', 'AND', 'BODILY', 'PERSPIRATION', 'WAS', 'TO', 'HER', 'AT', 'TIMES', 'UNBEARABLE'] +1995-1826-0012-762: ref=['SHE', 'WANTED', 'A', 'GLANCE', 'OF', 'THE', 'NEW', 'BOOKS', 'AND', 'PERIODICALS', 'AND', 'TALK', 'OF', 'GREAT', 'PHILANTHROPIES', 'AND', 'REFORMS'] +1995-1826-0012-762: hyp=['SHE', 'WANTED', 'A', 'GLANCE', 'OF', 'THE', 'NEW', 'BOOKS', 'AND', 'PERIODICALS', 'AND', 'TALK', 'OF', 'GRATEFUL', 'ANTHROPIES', 'AND', 'REFORMS'] +1995-1826-0013-763: ref=['SO', 'FOR', 'THE', 'HUNDREDTH', 'TIME', 'SHE', 'WAS', 'THINKING', 'TODAY', 'AS', 'SHE', 'WALKED', 'ALONE', 'UP', 'THE', 'LANE', 'BACK', 'OF', 'THE', 'BARN', 'AND', 'THEN', 'SLOWLY', 'DOWN', 'THROUGH', 'THE', 'BOTTOMS'] +1995-1826-0013-763: hyp=['SO', 'FOR', 'THE', 'HUNDREDTH', 'TIME', 'SHE', 'WAS', 'THINKING', 'TO', 'DAY', 'AS', 'SHE', 'WALKED', 'ALONE', 'UP', 'THE', 'LANE', 'BACK', 'OF', 'THE', 'BARN', 'AND', 'THEN', 'SLOWLY', 'DOWN', 'THROUGH', 'THE', 'BOTTOMS'] +1995-1826-0014-764: ref=['COTTON', 'SHE', 'PAUSED'] +1995-1826-0014-764: hyp=['COTTON', 'SHE', 'PAUSED'] +1995-1826-0015-765: ref=['SHE', 'HAD', 'ALMOST', 'FORGOTTEN', 'THAT', 'IT', 'WAS', 'HERE', 'WITHIN', 'TOUCH', 'AND', 'SIGHT'] +1995-1826-0015-765: hyp=['SHE', 'HAD', 'ALMOST', 'FORGOTTEN', 'THAT', 'IT', 'WAS', 'HERE', 'WITHIN', 'TOUCH', 'IN', 'SIGHT'] +1995-1826-0016-766: ref=['THE', 'GLIMMERING', 'SEA', 'OF', 'DELICATE', 'LEAVES', 'WHISPERED', 'AND', 'MURMURED', 'BEFORE', 'HER', 'STRETCHING', 'AWAY', 'TO', 'THE', 'NORTHWARD'] +1995-1826-0016-766: hyp=['THE', 'GLIMMERING', 'SEA', 'OF', 'DELICATE', 'LEAVES', 'WHISPERED', 'AND', 'MURMURED', 'BEFORE', 'HER', 'STRETCHING', 'AWAY', 'TO', 'THE', 'NORTHWARD'] +1995-1826-0017-767: ref=['THERE', 'MIGHT', 'BE', 'A', 'BIT', 'OF', 'POETRY', 'HERE', 'AND', 'THERE', 'BUT', 'MOST', 'OF', 'THIS', 'PLACE', 'WAS', 'SUCH', 'DESPERATE', 'PROSE'] +1995-1826-0017-767: hyp=['THERE', 'MIGHT', 'BE', 'A', 'BIT', 'OF', 'POETRY', 'HERE', 'AND', 'THERE', 'BUT', 'MOST', 'OF', 'THIS', 'PLACE', 'WAS', 'SUCH', 'DESPERATE', 'PROSE'] +1995-1826-0018-768: ref=['HER', 'REGARD', 'SHIFTED', 'TO', 'THE', 'GREEN', 'STALKS', 'AND', 'LEAVES', 'AGAIN', 'AND', 'SHE', 'STARTED', 'TO', 'MOVE', 'AWAY'] +1995-1826-0018-768: hyp=['HER', 'REGARD', 'SHIFTED', 'TO', 'THE', 'GREEN', 'STALKS', 'AND', 'LEAVES', 'AGAIN', 'AND', 'SHE', 'STARTED', 'TO', 'MOVE', 'AWAY'] +1995-1826-0019-769: ref=['COTTON', 'IS', 'A', 'WONDERFUL', 'THING', 'IS', 'IT', 'NOT', 'BOYS', 'SHE', 'SAID', 'RATHER', 'PRIMLY'] +1995-1826-0019-769: hyp=['COTTON', 'IS', 'A', 'WONDERFUL', 'THING', 'IS', 'IT', 'NOT', 'BOYS', 'SHE', 'SAID', 'RATHER', 'PRIMLY'] +1995-1826-0020-770: ref=['MISS', 'TAYLOR', 'DID', 'NOT', 'KNOW', 'MUCH', 'ABOUT', 'COTTON', 'BUT', 'AT', 'LEAST', 'ONE', 'MORE', 'REMARK', 'SEEMED', 'CALLED', 'FOR'] +1995-1826-0020-770: hyp=['MISS', 'TAYLOR', 'DID', 'NOT', 'KNOW', 'MUCH', 'ABOUT', 'COTTON', 'BUT', 'AT', 'LEAST', 'ONE', 'MORE', 'REMARKED', 'SEEMED', 'CALLED', 'FOR'] +1995-1826-0021-771: ref=["DON'T", 'KNOW', 'WELL', 'OF', 'ALL', 'THINGS', 'INWARDLY', 'COMMENTED', 'MISS', 'TAYLOR', 'LITERALLY', 'BORN', 'IN', 'COTTON', 'AND', 'OH', 'WELL', 'AS', 'MUCH', 'AS', 'TO', 'ASK', "WHAT'S", 'THE', 'USE', 'SHE', 'TURNED', 'AGAIN', 'TO', 'GO'] +1995-1826-0021-771: hyp=["DON'T", 'KNOW', 'WELL', 'OF', 'ALL', 'THINGS', 'INWARDLY', 'COMMENTED', 'MISS', 'TAYLOR', 'LITERALLY', 'BORN', 'IN', 'COTTON', 'AND', 'OH', 'WELL', 'AS', 'MUCH', 'AS', 'TO', 'ASK', "WHAT'S", 'THE', 'USE', 'SHE', 'TURNED', 'AGAIN', 'TO', 'GO'] +1995-1826-0022-772: ref=['I', 'SUPPOSE', 'THOUGH', "IT'S", 'TOO', 'EARLY', 'FOR', 'THEM', 'THEN', 'CAME', 'THE', 'EXPLOSION'] +1995-1826-0022-772: hyp=['I', 'SUPPOSE', 'THOUGH', "IT'S", 'TOO', 'EARLY', 'FOR', 'THEM', 'THEN', 'CAME', 'THE', 'EXPLOSION'] +1995-1826-0023-773: ref=['GOOBERS', "DON'T", 'GROW', 'ON', 'THE', 'TOPS', 'OF', 'VINES', 'BUT', 'UNDERGROUND', 'ON', 'THE', 'ROOTS', 'LIKE', 'YAMS', 'IS', 'THAT', 'SO'] +1995-1826-0023-773: hyp=['GOULD', 'WAS', "DON'T", 'GROW', 'ON', 'THE', 'TOPSY', 'BANDS', 'BUT', 'ON', 'THE', 'GROUND', 'ON', 'THE', 'ROOTS', 'LIKE', 'YAMS', 'IS', 'THAT', 'SO'] +1995-1826-0024-774: ref=['THE', 'GOLDEN', 'FLEECE', "IT'S", 'THE', 'SILVER', 'FLEECE', 'HE', 'HARKENED'] +1995-1826-0024-774: hyp=['THE', 'GOLDEN', 'FLEECE', "IT'S", 'THE', 'SILVER', 'FLEECE', 'HE', 'HEARKENED'] +1995-1826-0025-775: ref=['SOME', 'TIME', "YOU'LL", 'TELL', 'ME', 'PLEASE', "WON'T", 'YOU'] +1995-1826-0025-775: hyp=['SOMETIME', 'YOU', 'TELL', 'ME', 'PLEASE', "WON'T", 'YOU'] +1995-1826-0026-776: ref=['NOW', 'FOR', 'ONE', 'LITTLE', 'HALF', 'HOUR', 'SHE', 'HAD', 'BEEN', 'A', 'WOMAN', 'TALKING', 'TO', 'A', 'BOY', 'NO', 'NOT', 'EVEN', 'THAT', 'SHE', 'HAD', 'BEEN', 'TALKING', 'JUST', 'TALKING', 'THERE', 'WERE', 'NO', 'PERSONS', 'IN', 'THE', 'CONVERSATION', 'JUST', 'THINGS', 'ONE', 'THING', 'COTTON'] +1995-1826-0026-776: hyp=['THOU', 'FOR', 'ONE', 'LITTLE', 'HALF', 'HOUR', 'SHE', 'HAD', 'BEEN', 'A', 'WOMAN', 'TALKING', 'TO', 'A', 'BOY', 'NO', 'NOT', 'EVEN', 'THAT', 'SHE', 'HAD', 'BEEN', 'TALKING', 'JUST', 'TALKING', 'THERE', 'WERE', 'NO', 'PERSONS', 'IN', 'THE', 'CONVERSATION', 'JUST', 'THINGS', 'ONE', 'THING', 'COTTON'] +1995-1836-0000-735: ref=['THE', 'HON', 'CHARLES', 'SMITH', 'MISS', "SARAH'S", 'BROTHER', 'WAS', 'WALKING', 'SWIFTLY', 'UPTOWN', 'FROM', 'MISTER', "EASTERLY'S", 'WALL', 'STREET', 'OFFICE', 'AND', 'HIS', 'FACE', 'WAS', 'PALE'] +1995-1836-0000-735: hyp=['THE', 'HON', 'SMITH', 'MISS', "SARAH'S", 'BROTHER', 'WAS', 'WALKING', 'SWIFTLY', 'UPTOWN', 'FROM', 'MISTER', "EASTERLY'S", 'WALL', 'STREET', 'OFFICE', 'AND', 'HIS', 'FACE', 'WAS', 'PALE'] +1995-1836-0001-736: ref=['AT', 'LAST', 'THE', 'COTTON', 'COMBINE', 'WAS', 'TO', 'ALL', 'APPEARANCES', 'AN', 'ASSURED', 'FACT', 'AND', 'HE', 'WAS', 'SLATED', 'FOR', 'THE', 'SENATE'] +1995-1836-0001-736: hyp=['AT', 'LAST', 'THE', 'COTTON', 'COMBINE', 'WAS', 'TO', 'ALL', 'APPEARANCES', 'AN', 'ASSURED', 'FACT', 'AND', 'HE', 'WAS', 'SLATED', 'FOR', 'THE', 'SENATE'] +1995-1836-0002-737: ref=['WHY', 'SHOULD', 'HE', 'NOT', 'BE', 'AS', 'OTHER', 'MEN'] +1995-1836-0002-737: hyp=['WHY', 'SHOULD', 'HE', 'NOT', 'BE', 'AS', 'OTHER', 'MEN'] +1995-1836-0003-738: ref=['SHE', 'WAS', 'NOT', 'HERSELF', 'A', 'NOTABLY', 'INTELLIGENT', 'WOMAN', 'SHE', 'GREATLY', 'ADMIRED', 'INTELLIGENCE', 'OR', 'WHATEVER', 'LOOKED', 'TO', 'HER', 'LIKE', 'INTELLIGENCE', 'IN', 'OTHERS'] +1995-1836-0003-738: hyp=['SHE', 'WAS', 'NOT', 'HERSELF', 'UNNOTABLY', 'INTELLIGENT', 'WOMAN', 'SHE', 'GREATLY', 'ADMIRED', 'INTELLIGENCE', 'OR', 'WHATEVER', 'LOOKED', 'TO', 'HER', 'LIKE', 'INTELLIGENCE', 'IN', 'OTHERS'] +1995-1836-0004-739: ref=['AS', 'SHE', 'AWAITED', 'HER', 'GUESTS', 'SHE', 'SURVEYED', 'THE', 'TABLE', 'WITH', 'BOTH', 'SATISFACTION', 'AND', 'DISQUIETUDE', 'FOR', 'HER', 'SOCIAL', 'FUNCTIONS', 'WERE', 'FEW', 'TONIGHT', 'THERE', 'WERE', 'SHE', 'CHECKED', 'THEM', 'OFF', 'ON', 'HER', 'FINGERS', 'SIR', 'JAMES', 'CREIGHTON', 'THE', 'RICH', 'ENGLISH', 'MANUFACTURER', 'AND', 'LADY', 'CREIGHTON', 'MISTER', 'AND', 'MISSUS', 'VANDERPOOL', 'MISTER', 'HARRY', 'CRESSWELL', 'AND', 'HIS', 'SISTER', 'JOHN', 'TAYLOR', 'AND', 'HIS', 'SISTER', 'AND', 'MISTER', 'CHARLES', 'SMITH', 'WHOM', 'THE', 'EVENING', 'PAPERS', 'MENTIONED', 'AS', 'LIKELY', 'TO', 'BE', 'UNITED', 'STATES', 'SENATOR', 'FROM', 'NEW', 'JERSEY', 'A', 'SELECTION', 'OF', 'GUESTS', 'THAT', 'HAD', 'BEEN', 'DETERMINED', 'UNKNOWN', 'TO', 'THE', 'HOSTESS', 'BY', 'THE', 'MEETING', 'OF', 'COTTON', 'INTERESTS', 'EARLIER', 'IN', 'THE', 'DAY'] +1995-1836-0004-739: hyp=['AS', 'SHE', 'AWAITED', 'HER', 'GUESS', 'SHE', 'SURVEYED', 'THE', 'TABLE', 'WITH', 'BOTH', 'SATISFACTION', 'AND', 'DISQUIETUDE', 'FOR', 'HER', 'SOCIAL', 'FUNCTIONS', 'WERE', 'FEW', 'TO', 'NIGHT', 'THERE', 'WERE', 'SHE', 'CHECKED', 'THEM', 'OFF', 'ON', 'HER', 'FINGERS', 'SIR', 'JAMES', 'CREDON', 'THE', 'RICH', 'ENGLISH', 'MANUFACTURER', 'AND', 'LADY', 'CRIGHTON', 'MISTER', 'AND', 'MISSUS', 'VAN', 'DERPOOL', 'MISTER', 'HARRY', 'CRESWELL', 'AND', 'HIS', 'SISTER', 'JOHN', 'TAYLOR', 'AND', 'HIS', 'SISTER', 'AND', 'MISTER', 'CHARLES', 'SMITH', 'WHOM', 'THE', 'EVENING', 'PAPERS', 'MENTIONED', 'AS', 'LIKELY', 'TO', 'BE', 'UNITED', 'STATES', 'SENATOR', 'FROM', 'NEW', 'JERSEY', 'A', 'SELECTION', 'OF', 'GUESTS', 'THAT', 'HAD', 'BEEN', 'DETERMINED', 'UNKNOWN', 'TO', 'THE', 'HOSTESS', 'BY', 'THE', 'MEETING', 'OF', 'COTTON', 'INTERESTS', 'EARLIER', 'IN', 'THE', 'DAY'] +1995-1836-0005-740: ref=['MISSUS', 'GREY', 'HAD', 'MET', 'SOUTHERNERS', 'BEFORE', 'BUT', 'NOT', 'INTIMATELY', 'AND', 'SHE', 'ALWAYS', 'HAD', 'IN', 'MIND', 'VIVIDLY', 'THEIR', 'CRUELTY', 'TO', 'POOR', 'NEGROES', 'A', 'SUBJECT', 'SHE', 'MADE', 'A', 'POINT', 'OF', 'INTRODUCING', 'FORTHWITH'] +1995-1836-0005-740: hyp=['MISSUS', 'GRAY', 'HAD', 'MET', 'SOUTHERNERS', 'BEFORE', 'BUT', 'NOT', 'INTIMATELY', 'AND', 'SHE', 'ALWAYS', 'HAD', 'IN', 'MIND', 'VIVIDLY', 'THEIR', 'CRUELTY', 'TO', 'POOR', 'NEGROES', 'A', 'SUBJECT', 'SHE', 'MADE', 'A', 'POINT', 'OF', 'INTRODUCING', 'FORTHWITH'] +1995-1836-0006-741: ref=['SHE', 'WAS', 'THEREFORE', 'MOST', 'AGREEABLY', 'SURPRISED', 'TO', 'HEAR', 'MISTER', 'CRESSWELL', 'EXPRESS', 'HIMSELF', 'SO', 'CORDIALLY', 'AS', 'APPROVING', 'OF', 'NEGRO', 'EDUCATION'] +1995-1836-0006-741: hyp=['SHE', 'WAS', 'THEREFORE', 'MOST', 'AGREEABLY', 'SURPRISED', 'TO', 'HEAR', 'MISTER', 'CRESWELL', 'EXPRESS', 'HIMSELF', 'SO', 'CORDIALLY', 'AS', 'APPROVING', 'OF', 'NEGRO', 'EDUCATION'] +1995-1836-0007-742: ref=['BUT', 'YOU', 'BELIEVE', 'IN', 'SOME', 'EDUCATION', 'ASKED', 'MARY', 'TAYLOR'] +1995-1836-0007-742: hyp=['DO', 'BELIEVE', 'IN', 'SOME', 'EDUCATION', 'ASKED', 'MARY', 'TAYLOR'] +1995-1836-0008-743: ref=['I', 'BELIEVE', 'IN', 'THE', 'TRAINING', 'OF', 'PEOPLE', 'TO', 'THEIR', 'HIGHEST', 'CAPACITY', 'THE', 'ENGLISHMAN', 'HERE', 'HEARTILY', 'SECONDED', 'HIM'] +1995-1836-0008-743: hyp=['I', 'BELIEVE', 'IN', 'THE', 'TRAINING', 'OF', 'PEOPLE', 'TO', 'THEIR', 'HACITY', 'THE', 'ENGLISHMAN', 'HERE', 'HEARTILY', 'SECONDED', 'HIM'] +1995-1836-0009-744: ref=['BUT', 'CRESSWELL', 'ADDED', 'SIGNIFICANTLY', 'CAPACITY', 'DIFFERS', 'ENORMOUSLY', 'BETWEEN', 'RACES'] +1995-1836-0009-744: hyp=['BUT', 'CRASWELL', 'ADDED', 'SIGNIFICANTLY', 'CAPACITY', 'DIFFERS', 'ENORMOUSLY', 'BETWEEN', 'RACES'] +1995-1836-0010-745: ref=['THE', 'VANDERPOOLS', 'WERE', 'SURE', 'OF', 'THIS', 'AND', 'THE', 'ENGLISHMAN', 'INSTANCING', 'INDIA', 'BECAME', 'QUITE', 'ELOQUENT', 'MISSUS', 'GREY', 'WAS', 'MYSTIFIED', 'BUT', 'HARDLY', 'DARED', 'ADMIT', 'IT', 'THE', 'GENERAL', 'TREND', 'OF', 'THE', 'CONVERSATION', 'SEEMED', 'TO', 'BE', 'THAT', 'MOST', 'INDIVIDUALS', 'NEEDED', 'TO', 'BE', 'SUBMITTED', 'TO', 'THE', 'SHARPEST', 'SCRUTINY', 'BEFORE', 'BEING', 'ALLOWED', 'MUCH', 'EDUCATION', 'AND', 'AS', 'FOR', 'THE', 'LOWER', 'RACES', 'IT', 'WAS', 'SIMPLY', 'CRIMINAL', 'TO', 'OPEN', 'SUCH', 'USELESS', 'OPPORTUNITIES', 'TO', 'THEM'] +1995-1836-0010-745: hyp=['THE', 'VANDERPOOLS', 'WERE', 'SURE', 'THIS', 'AND', 'THE', 'ENGLISHMAN', 'INSTANCING', 'INDIA', 'BECAME', 'QUITE', 'ELOQUENT', 'MISSUS', 'GRAY', 'WAS', 'MYSTIFIED', 'BUT', 'HARDLY', 'DARED', 'ADMIT', 'IT', 'THE', 'GENERAL', 'TREND', 'OF', 'THE', 'CONVERSATION', 'SEEMED', 'TO', 'BE', 'THAT', 'MOST', 'INDIVIDUALS', 'NEEDED', 'TO', 'BE', 'SUBMITTED', 'TO', 'THE', 'SHARPEST', 'SCRUTINY', 'BEFORE', 'BEING', 'ALLOWED', 'MUCH', 'EDUCATION', 'AND', 'AS', 'FOR', 'THE', 'LOWER', 'RACES', 'IT', 'WAS', 'SIMPLY', 'CRIMINAL', 'TO', 'OPEN', 'SUCH', 'USELESS', 'OPPORTUNITIES', 'TO', 'THEM'] +1995-1836-0011-746: ref=['POSITIVELY', 'HEROIC', 'ADDED', 'CRESSWELL', 'AVOIDING', 'HIS', "SISTER'S", 'EYES'] +1995-1836-0011-746: hyp=['POSITIVELY', 'HEROIC', 'ADDED', 'CRASWELL', 'AVOIDING', 'HIS', "SISTER'S", 'EYES'] +1995-1836-0012-747: ref=['BUT', "WE'RE", 'NOT', 'ER', 'EXACTLY', 'WELCOMED'] +1995-1836-0012-747: hyp=['BUT', 'WE', 'ARE', 'NOT', 'A', 'EXACTLY', 'WELCOME'] +1995-1836-0013-748: ref=['MARY', 'TAYLOR', 'HOWEVER', 'RELATED', 'THE', 'TALE', 'OF', 'ZORA', 'TO', 'MISSUS', "GREY'S", 'PRIVATE', 'EAR', 'LATER'] +1995-1836-0013-748: hyp=['MARY', 'TAYLOR', 'HOWEVER', 'RELATED', 'THE', 'TALE', 'OF', 'ZORA', 'TO', 'MISSUS', "GRAY'S", 'PRIVATE', 'EAR', 'LATER'] +1995-1836-0014-749: ref=['FORTUNATELY', 'SAID', 'MISTER', 'VANDERPOOL', 'NORTHERNERS', 'AND', 'SOUTHERNERS', 'ARE', 'ARRIVING', 'AT', 'A', 'BETTER', 'MUTUAL', 'UNDERSTANDING', 'ON', 'MOST', 'OF', 'THESE', 'MATTERS'] +1995-1836-0014-749: hyp=['FORTUNATELY', 'SAID', 'MISTER', 'VAN', 'DERPOOL', 'NOR', 'THE', 'NOSE', 'AND', 'SOUTHERNERS', 'ALL', 'RIVING', 'AT', 'A', 'BETTER', 'MUTUAL', 'UNDERSTANDING', 'ON', 'MOST', 'OF', 'THESE', 'MATTERS'] +1995-1837-0000-777: ref=['HE', 'KNEW', 'THE', 'SILVER', 'FLEECE', 'HIS', 'AND', "ZORA'S", 'MUST', 'BE', 'RUINED'] +1995-1837-0000-777: hyp=['HE', 'KNEW', 'THE', 'SILVER', 'FLEECE', 'HIS', 'AND', 'ZORAS', 'MUST', 'BE', 'RUINED'] +1995-1837-0001-778: ref=['IT', 'WAS', 'THE', 'FIRST', 'GREAT', 'SORROW', 'OF', 'HIS', 'LIFE', 'IT', 'WAS', 'NOT', 'SO', 'MUCH', 'THE', 'LOSS', 'OF', 'THE', 'COTTON', 'ITSELF', 'BUT', 'THE', 'FANTASY', 'THE', 'HOPES', 'THE', 'DREAMS', 'BUILT', 'AROUND', 'IT'] +1995-1837-0001-778: hyp=['IT', 'WAS', 'THE', 'FIRST', 'GREAT', 'SORROW', 'OF', 'HIS', 'LIFE', 'IT', 'WAS', 'NOT', 'SO', 'MUCH', 'THE', 'LOSS', 'OF', 'THE', 'COTTON', 'ITSELF', 'BUT', 'THE', 'FANTASY', 'THE', 'HOPES', 'THE', 'DREAMS', 'BUILT', 'AROUND', 'IT'] +1995-1837-0002-779: ref=['AH', 'THE', 'SWAMP', 'THE', 'CRUEL', 'SWAMP'] +1995-1837-0002-779: hyp=['AH', 'THE', 'SWAMP', 'THE', 'CRUEL', 'SWAMP'] +1995-1837-0003-780: ref=['THE', 'REVELATION', 'OF', 'HIS', 'LOVE', 'LIGHTED', 'AND', 'BRIGHTENED', 'SLOWLY', 'TILL', 'IT', 'FLAMED', 'LIKE', 'A', 'SUNRISE', 'OVER', 'HIM', 'AND', 'LEFT', 'HIM', 'IN', 'BURNING', 'WONDER'] +1995-1837-0003-780: hyp=['WHO', 'REVELATION', 'OF', 'HIS', 'LOVE', 'LIGHTED', 'AND', 'BRIGHTENED', 'SLOWLY', 'TILL', 'IT', 'FLAMED', 'LIKE', 'A', 'SUNRISE', 'OVER', 'HIM', 'AND', 'LEFT', 'HIM', 'IN', 'BURNING', 'WONDER'] +1995-1837-0004-781: ref=['HE', 'PANTED', 'TO', 'KNOW', 'IF', 'SHE', 'TOO', 'KNEW', 'OR', 'KNEW', 'AND', 'CARED', 'NOT', 'OR', 'CARED', 'AND', 'KNEW', 'NOT'] +1995-1837-0004-781: hyp=['HE', 'PANTED', 'TO', 'KNOW', 'IF', 'SHE', 'TOO', 'KNEW', 'OR', 'KNEW', 'AND', 'CARED', 'NOT', 'OR', 'CARED', 'AND', 'KNEW', 'NOT'] +1995-1837-0005-782: ref=['SHE', 'WAS', 'SO', 'STRANGE', 'AND', 'HUMAN', 'A', 'CREATURE'] +1995-1837-0005-782: hyp=['SHE', 'WAS', 'SO', 'STRANGE', 'IN', 'HUMAN', 'A', 'CREATURE'] +1995-1837-0006-783: ref=['THE', 'WORLD', 'WAS', 'WATER', 'VEILED', 'IN', 'MISTS'] +1995-1837-0006-783: hyp=['THE', 'WORLD', 'WAS', 'WATER', 'VEILED', 'IN', 'MISTS'] +1995-1837-0007-784: ref=['THEN', 'OF', 'A', 'SUDDEN', 'AT', 'MIDDAY', 'THE', 'SUN', 'SHOT', 'OUT', 'HOT', 'AND', 'STILL', 'NO', 'BREATH', 'OF', 'AIR', 'STIRRED', 'THE', 'SKY', 'WAS', 'LIKE', 'BLUE', 'STEEL', 'THE', 'EARTH', 'STEAMED'] +1995-1837-0007-784: hyp=['THEN', 'OF', 'A', 'SUDDEN', 'AT', 'MIDDAY', 'THE', 'SUN', 'SHOT', 'OUT', 'HOT', 'AND', 'STILL', 'NO', 'BREATH', 'OF', 'AIR', 'STIRRED', 'THE', 'SKY', 'WAS', 'LIKE', 'BLUE', 'STEEL', 'THE', 'EARTH', 'STEAMED'] +1995-1837-0008-785: ref=['WHERE', 'WAS', 'THE', 'USE', 'OF', 'IMAGINING'] +1995-1837-0008-785: hyp=['WHERE', 'WAS', 'THE', 'USE', 'OF', 'IMAGINING'] +1995-1837-0009-786: ref=['THE', 'LAGOON', 'HAD', 'BEEN', 'LEVEL', 'WITH', 'THE', 'DYKES', 'A', 'WEEK', 'AGO', 'AND', 'NOW'] +1995-1837-0009-786: hyp=['THE', 'LAGOON', 'HAD', 'BEEN', 'LEVEL', 'WITH', 'THE', 'DIKES', 'A', 'WEEK', 'AGO', 'AND', 'NOW'] +1995-1837-0010-787: ref=['PERHAPS', 'SHE', 'TOO', 'MIGHT', 'BE', 'THERE', 'WAITING', 'WEEPING'] +1995-1837-0010-787: hyp=['PERHAPS', 'SHE', 'TOO', 'MIGHT', 'BE', 'THERE', 'WAITING', 'WEEPING'] +1995-1837-0011-788: ref=['HE', 'STARTED', 'AT', 'THE', 'THOUGHT', 'HE', 'HURRIED', 'FORTH', 'SADLY'] +1995-1837-0011-788: hyp=['HE', 'STARTED', 'AT', 'THE', 'THOUGHT', 'HE', 'HURRIED', 'FORTH', 'SADLY'] +1995-1837-0012-789: ref=['HE', 'SPLASHED', 'AND', 'STAMPED', 'ALONG', 'FARTHER', 'AND', 'FARTHER', 'ONWARD', 'UNTIL', 'HE', 'NEARED', 'THE', 'RAMPART', 'OF', 'THE', 'CLEARING', 'AND', 'PUT', 'FOOT', 'UPON', 'THE', 'TREE', 'BRIDGE'] +1995-1837-0012-789: hyp=['HE', 'SPLASHED', 'AND', 'STAMPED', 'ALONG', 'FARTHER', 'AND', 'FARTHER', 'ONWARD', 'UNTIL', 'HE', 'NEARED', 'THE', 'RAMPART', 'OF', 'THE', 'CLEARING', 'AND', 'PUT', 'FOOT', 'UPON', 'THE', 'TREE', 'BRIDGE'] +1995-1837-0013-790: ref=['THEN', 'HE', 'LOOKED', 'DOWN', 'THE', 'LAGOON', 'WAS', 'DRY'] +1995-1837-0013-790: hyp=['THEN', 'HE', 'LOOKED', 'DOWN', 'THE', 'LAGOON', 'WAS', 'DRY'] +1995-1837-0014-791: ref=['HE', 'STOOD', 'A', 'MOMENT', 'BEWILDERED', 'THEN', 'TURNED', 'AND', 'RUSHED', 'UPON', 'THE', 'ISLAND', 'A', 'GREAT', 'SHEET', 'OF', 'DAZZLING', 'SUNLIGHT', 'SWEPT', 'THE', 'PLACE', 'AND', 'BENEATH', 'LAY', 'A', 'MIGHTY', 'MASS', 'OF', 'OLIVE', 'GREEN', 'THICK', 'TALL', 'WET', 'AND', 'WILLOWY'] +1995-1837-0014-791: hyp=['HE', 'STOOD', 'A', 'MOMENT', 'BEWILDERED', 'THEN', 'TURNED', 'AND', 'RUSHED', 'UPON', 'THE', 'ISLAND', 'A', 'GREAT', 'SHEET', 'OF', 'DAZZLING', 'SUNLIGHT', 'SWEPT', 'THE', 'PLACE', 'AND', 'BENEATH', 'LAY', 'A', 'MIGHTY', 'MASS', 'OF', 'OLIVE', 'GREEN', 'THICK', 'TALL', 'WET', 'AND', 'WILLOWY'] +1995-1837-0015-792: ref=['THE', 'SQUARES', 'OF', 'COTTON', 'SHARP', 'EDGED', 'HEAVY', 'WERE', 'JUST', 'ABOUT', 'TO', 'BURST', 'TO', 'BOLLS'] +1995-1837-0015-792: hyp=['THE', 'SQUARES', 'OF', 'COTTON', 'SHARP', 'EDGED', 'HEAVY', 'WERE', 'JUST', 'ABOUT', 'TO', 'BURST', 'TO', 'BOWLS'] +1995-1837-0016-793: ref=['FOR', 'ONE', 'LONG', 'MOMENT', 'HE', 'PAUSED', 'STUPID', 'AGAPE', 'WITH', 'UTTER', 'AMAZEMENT', 'THEN', 'LEANED', 'DIZZILY', 'AGAINST', 'A', 'TREE'] +1995-1837-0016-793: hyp=['FOR', 'ONE', 'LONG', 'MOMENT', 'HE', 'PAUSED', 'STUPID', 'AGAPE', 'WITH', 'UTTER', 'AMAZEMENT', 'THEN', 'LEANED', 'DIZZILY', 'AGAINST', 'A', 'TREE'] +1995-1837-0017-794: ref=['HE', 'GAZED', 'ABOUT', 'PERPLEXED', 'ASTONISHED'] +1995-1837-0017-794: hyp=['HE', 'GAZED', 'ABOUT', 'PERPLEXED', 'ASTONISHED'] +1995-1837-0018-795: ref=['HERE', 'LAY', 'THE', 'READING', 'OF', 'THE', 'RIDDLE', 'WITH', 'INFINITE', 'WORK', 'AND', 'PAIN', 'SOME', 'ONE', 'HAD', 'DUG', 'A', 'CANAL', 'FROM', 'THE', 'LAGOON', 'TO', 'THE', 'CREEK', 'INTO', 'WHICH', 'THE', 'FORMER', 'HAD', 'DRAINED', 'BY', 'A', 'LONG', 'AND', 'CROOKED', 'WAY', 'THUS', 'ALLOWING', 'IT', 'TO', 'EMPTY', 'DIRECTLY'] +1995-1837-0018-795: hyp=['HERE', 'LAY', 'THE', 'READING', 'OF', 'THE', 'RIDDLE', 'WITH', 'INFINITE', 'WORK', 'AND', 'PAIN', 'SOME', 'ONE', 'HAD', 'DUG', 'A', 'CANAL', 'FROM', 'THE', 'LAGOON', 'TO', 'THE', 'CREEK', 'INTO', 'WHICH', 'THE', 'FORMER', 'HAD', 'DRAINED', 'BY', 'A', 'LONG', 'AND', 'CROOKED', 'WAY', 'THUS', 'ALLOWING', 'IT', 'TO', 'EMPTY', 'DIRECTLY'] +1995-1837-0019-796: ref=['HE', 'SAT', 'DOWN', 'WEAK', 'BEWILDERED', 'AND', 'ONE', 'THOUGHT', 'WAS', 'UPPERMOST', 'ZORA'] +1995-1837-0019-796: hyp=['HE', 'SAT', 'DOWN', 'WEAK', 'BEWILDERED', 'AND', 'ONE', 'THOUGHT', 'WAS', 'UPPERMOST', 'SORA'] +1995-1837-0020-797: ref=['THE', 'YEARS', 'OF', 'THE', 'DAYS', 'OF', 'HER', 'DYING', 'WERE', 'TEN'] +1995-1837-0020-797: hyp=['THE', 'YEARS', 'OF', 'THE', 'DAYS', 'OF', 'HER', 'DYING', 'WERE', 'TEN'] +1995-1837-0021-798: ref=['THE', 'HOPE', 'AND', 'DREAM', 'OF', 'HARVEST', 'WAS', 'UPON', 'THE', 'LAND'] +1995-1837-0021-798: hyp=['THE', 'HOPE', 'AND', 'DREAM', 'OF', 'HARVEST', 'WAS', 'UPON', 'THE', 'LAND'] +1995-1837-0022-799: ref=['UP', 'IN', 'THE', 'SICK', 'ROOM', 'ZORA', 'LAY', 'ON', 'THE', 'LITTLE', 'WHITE', 'BED'] +1995-1837-0022-799: hyp=['UP', 'IN', 'THE', 'SICK', 'ROOM', 'ZORA', 'LAY', 'ON', 'THE', 'LITTLE', 'WHITE', 'BED'] +1995-1837-0023-800: ref=['THE', 'NET', 'AND', 'WEB', 'OF', 'ENDLESS', 'THINGS', 'HAD', 'BEEN', 'CRAWLING', 'AND', 'CREEPING', 'AROUND', 'HER', 'SHE', 'HAD', 'STRUGGLED', 'IN', 'DUMB', 'SPEECHLESS', 'TERROR', 'AGAINST', 'SOME', 'MIGHTY', 'GRASPING', 'THAT', 'STROVE', 'FOR', 'HER', 'LIFE', 'WITH', 'GNARLED', 'AND', 'CREEPING', 'FINGERS', 'BUT', 'NOW', 'AT', 'LAST', 'WEAKLY', 'SHE', 'OPENED', 'HER', 'EYES', 'AND', 'QUESTIONED'] +1995-1837-0023-800: hyp=['THE', 'NED', 'AND', 'WEB', 'OF', 'ENDLESS', 'THINGS', 'HAD', 'BEEN', 'CRAWLING', 'AND', 'CREEPING', 'AROUND', 'HER', 'SHE', 'HAD', 'STRUGGLED', 'IN', 'DUMB', 'SPEECHLESS', 'TERROR', 'AGAINST', 'SOME', 'MIGHTY', 'GRASPING', 'THAT', 'STROVE', 'FOR', 'HER', 'LIFE', 'WITH', 'GNARLED', 'AND', 'CREEPING', 'FINGERS', 'BUT', 'NOW', 'AT', 'LAST', 'WEEKLY', 'SHE', 'OPENED', 'HER', 'EYES', 'AND', 'QUESTIONED'] +1995-1837-0024-801: ref=['FOR', 'A', 'WHILE', 'SHE', 'LAY', 'IN', 'HER', 'CHAIR', 'IN', 'HAPPY', 'DREAMY', 'PLEASURE', 'AT', 'SUN', 'AND', 'BIRD', 'AND', 'TREE'] +1995-1837-0024-801: hyp=['FOR', 'A', 'WHILE', 'SHE', 'LAY', 'IN', 'HER', 'CHAIR', 'IN', 'HAPPY', 'DREAMY', 'PLEASURE', 'AT', 'SUN', 'AND', 'BIRD', 'AND', 'TREE'] +1995-1837-0025-802: ref=['SHE', 'ROSE', 'WITH', 'A', 'FLEETING', 'GLANCE', 'GATHERED', 'THE', 'SHAWL', 'ROUND', 'HER', 'THEN', 'GLIDING', 'FORWARD', 'WAVERING', 'TREMULOUS', 'SLIPPED', 'ACROSS', 'THE', 'ROAD', 'AND', 'INTO', 'THE', 'SWAMP'] +1995-1837-0025-802: hyp=['SHE', 'ROSE', 'WITH', 'A', 'FLEETING', 'GLANCE', 'GATHERED', 'THE', 'SHAWL', 'AROUND', 'HER', 'THEN', 'GLIDING', 'FORWARD', 'WAVERING', 'TREMULOUS', 'SLIPPED', 'ACROSS', 'THE', 'ROAD', 'AND', 'INTO', 'THE', 'SWAMP'] +1995-1837-0026-803: ref=['SHE', 'HAD', 'BEEN', 'BORN', 'WITHIN', 'ITS', 'BORDERS', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'LIVED', 'AND', 'GROWN', 'AND', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'MET', 'HER', 'LOVE'] +1995-1837-0026-803: hyp=['SHE', 'HAD', 'BEEN', 'BORN', 'WITHIN', 'ITS', 'BORDERS', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'LIVED', 'AND', 'GROWN', 'AND', 'WITHIN', 'ITS', 'BORDER', 'SHE', 'HAD', 'MET', 'HER', 'LOVE'] +1995-1837-0027-804: ref=['ON', 'SHE', 'HURRIED', 'UNTIL', 'SWEEPING', 'DOWN', 'TO', 'THE', 'LAGOON', 'AND', 'THE', 'ISLAND', 'LO', 'THE', 'COTTON', 'LAY', 'BEFORE', 'HER'] +1995-1837-0027-804: hyp=['ON', 'SHE', 'HURRIED', 'UNTIL', 'SWEEPING', 'DOWN', 'TO', 'THE', 'LAGOON', 'AND', 'THE', 'ISLAND', 'LO', 'THE', 'COTTON', 'LAY', 'BEFORE', 'HER'] +1995-1837-0028-805: ref=['THE', 'CHAIR', 'WAS', 'EMPTY', 'BUT', 'HE', 'KNEW'] +1995-1837-0028-805: hyp=['THE', 'CHAIR', 'WAS', 'EMPTY', 'BUT', 'HE', 'KNEW'] +1995-1837-0029-806: ref=['HE', 'DARTED', 'THROUGH', 'THE', 'TREES', 'AND', 'PAUSED', 'A', 'TALL', 'MAN', 'STRONGLY', 'BUT', 'SLIMLY', 'MADE'] +1995-1837-0029-806: hyp=['HE', 'DARTED', 'THROUGH', 'THE', 'TREES', 'AND', 'PAUSED', 'A', 'TALL', 'MAN', 'STRONGLY', 'BUT', 'SLIMLY', 'MADE'] +2094-142345-0000-308: ref=['IT', 'IS', 'A', 'VERY', 'FINE', 'OLD', 'PLACE', 'OF', 'RED', 'BRICK', 'SOFTENED', 'BY', 'A', 'PALE', 'POWDERY', 'LICHEN', 'WHICH', 'HAS', 'DISPERSED', 'ITSELF', 'WITH', 'HAPPY', 'IRREGULARITY', 'SO', 'AS', 'TO', 'BRING', 'THE', 'RED', 'BRICK', 'INTO', 'TERMS', 'OF', 'FRIENDLY', 'COMPANIONSHIP', 'WITH', 'THE', 'LIMESTONE', 'ORNAMENTS', 'SURROUNDING', 'THE', 'THREE', 'GABLES', 'THE', 'WINDOWS', 'AND', 'THE', 'DOOR', 'PLACE'] +2094-142345-0000-308: hyp=['IT', 'IS', 'A', 'VERY', 'FINE', 'OLD', 'PLACE', 'OF', 'RED', 'BRICK', 'SOFTENED', 'BY', 'A', 'PALE', 'POWDERY', 'LICHEN', 'WHICH', 'HAS', 'DISPERSED', 'ITSELF', 'WITH', 'HAPPY', 'IRREGULARITY', 'SO', 'AS', 'TO', 'BRING', 'THE', 'RED', 'BRICK', 'INTO', 'TERMS', 'OF', 'FRIENDLY', 'COMPANIONSHIP', 'WITH', 'A', 'LIMESTONE', 'ORNAMENTS', 'SURROUNDING', 'THE', 'THREE', 'GABLES', 'THE', 'WINDOWS', 'AND', 'THE', 'DOOR', 'PLACE'] +2094-142345-0001-309: ref=['BUT', 'THE', 'WINDOWS', 'ARE', 'PATCHED', 'WITH', 'WOODEN', 'PANES', 'AND', 'THE', 'DOOR', 'I', 'THINK', 'IS', 'LIKE', 'THE', 'GATE', 'IT', 'IS', 'NEVER', 'OPENED'] +2094-142345-0001-309: hyp=['BUT', 'THE', 'WINDOWS', 'ARE', 'PATCHED', 'WITH', 'WOODEN', 'PANES', 'AND', 'THE', 'DOOR', 'I', 'THINK', 'IS', 'LIKE', 'THE', 'GATE', 'IT', 'IS', 'NEVER', 'OPENED'] +2094-142345-0002-310: ref=['FOR', 'IT', 'IS', 'A', 'SOLID', 'HEAVY', 'HANDSOME', 'DOOR', 'AND', 'MUST', 'ONCE', 'HAVE', 'BEEN', 'IN', 'THE', 'HABIT', 'OF', 'SHUTTING', 'WITH', 'A', 'SONOROUS', 'BANG', 'BEHIND', 'A', 'LIVERIED', 'LACKEY', 'WHO', 'HAD', 'JUST', 'SEEN', 'HIS', 'MASTER', 'AND', 'MISTRESS', 'OFF', 'THE', 'GROUNDS', 'IN', 'A', 'CARRIAGE', 'AND', 'PAIR'] +2094-142345-0002-310: hyp=['FOR', 'IT', 'IS', 'A', 'SOLID', 'HEAVY', 'HANDSOME', 'DOOR', 'AND', 'MUST', 'ONCE', 'HAVE', 'BEEN', 'IN', 'THE', 'HABIT', 'OF', 'SHUTTING', 'WITH', 'A', 'SONOROUS', 'BANG', 'BEHIND', 'THE', 'LIVERIED', 'LACKEY', 'WHO', 'HAD', 'JUST', 'SEEN', 'HIS', 'MASTER', 'AND', 'MISTRESS', 'OFF', 'THE', 'GROUNDS', 'IN', 'A', 'CARRIAGE', 'AND', 'PAIR'] +2094-142345-0003-311: ref=['A', 'LARGE', 'OPEN', 'FIREPLACE', 'WITH', 'RUSTY', 'DOGS', 'IN', 'IT', 'AND', 'A', 'BARE', 'BOARDED', 'FLOOR', 'AT', 'THE', 'FAR', 'END', 'FLEECES', 'OF', 'WOOL', 'STACKED', 'UP', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'FLOOR', 'SOME', 'EMPTY', 'CORN', 'BAGS'] +2094-142345-0003-311: hyp=['A', 'LARGE', 'OPEN', 'FIREPLACE', 'WITH', 'RUSTY', 'DOGS', 'IN', 'IT', 'AND', 'A', 'BARE', 'BOARDED', 'FLOOR', 'AT', 'THE', 'FAR', 'END', 'FLEECES', 'OF', 'WOOL', 'STACKED', 'UP', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'FLOOR', 'SOME', 'EMPTY', 'CORN', 'BAGS'] +2094-142345-0004-312: ref=['AND', 'WHAT', 'THROUGH', 'THE', 'LEFT', 'HAND', 'WINDOW'] +2094-142345-0004-312: hyp=['AND', 'WHAT', 'THROUGH', 'THE', 'LEFT', 'HAND', 'WINDOW'] +2094-142345-0005-313: ref=['SEVERAL', 'CLOTHES', 'HORSES', 'A', 'PILLION', 'A', 'SPINNING', 'WHEEL', 'AND', 'AN', 'OLD', 'BOX', 'WIDE', 'OPEN', 'AND', 'STUFFED', 'FULL', 'OF', 'COLOURED', 'RAGS'] +2094-142345-0005-313: hyp=['SEVERAL', 'CLOTHES', 'HORSES', 'A', 'PILLION', 'A', 'SPINNING', 'WHEEL', 'AND', 'AN', 'OLD', 'BOX', 'WIDE', 'OPEN', 'AND', 'STUFFED', 'FULL', 'OF', 'COLOURED', 'RAGS'] +2094-142345-0006-314: ref=['AT', 'THE', 'EDGE', 'OF', 'THIS', 'BOX', 'THERE', 'LIES', 'A', 'GREAT', 'WOODEN', 'DOLL', 'WHICH', 'SO', 'FAR', 'AS', 'MUTILATION', 'IS', 'CONCERNED', 'BEARS', 'A', 'STRONG', 'RESEMBLANCE', 'TO', 'THE', 'FINEST', 'GREEK', 'SCULPTURE', 'AND', 'ESPECIALLY', 'IN', 'THE', 'TOTAL', 'LOSS', 'OF', 'ITS', 'NOSE'] +2094-142345-0006-314: hyp=['AT', 'THE', 'EDGE', 'OF', 'THIS', 'BOX', 'THERE', 'LIES', 'A', 'GREAT', 'WOODEN', 'DOLL', 'WHICH', 'SO', 'FAR', 'AS', 'MUTILATION', 'IS', 'CONCERNED', 'BEARS', 'A', 'STRONG', 'RESEMBLANCE', 'TO', 'THE', 'FINEST', 'GREEK', 'SCULPTURE', 'AND', 'ESPECIALLY', 'IN', 'THE', 'TOTAL', 'LOSS', 'OF', 'ITS', 'NOSE'] +2094-142345-0007-315: ref=['THE', 'HISTORY', 'OF', 'THE', 'HOUSE', 'IS', 'PLAIN', 'NOW'] +2094-142345-0007-315: hyp=['THE', 'HISTORY', 'OF', 'THE', 'HOUSE', 'IS', 'PLAIN', 'NOW'] +2094-142345-0008-316: ref=['BUT', 'THERE', 'IS', 'ALWAYS', 'A', 'STRONGER', 'SENSE', 'OF', 'LIFE', 'WHEN', 'THE', 'SUN', 'IS', 'BRILLIANT', 'AFTER', 'RAIN', 'AND', 'NOW', 'HE', 'IS', 'POURING', 'DOWN', 'HIS', 'BEAMS', 'AND', 'MAKING', 'SPARKLES', 'AMONG', 'THE', 'WET', 'STRAW', 'AND', 'LIGHTING', 'UP', 'EVERY', 'PATCH', 'OF', 'VIVID', 'GREEN', 'MOSS', 'ON', 'THE', 'RED', 'TILES', 'OF', 'THE', 'COW', 'SHED', 'AND', 'TURNING', 'EVEN', 'THE', 'MUDDY', 'WATER', 'THAT', 'IS', 'HURRYING', 'ALONG', 'THE', 'CHANNEL', 'TO', 'THE', 'DRAIN', 'INTO', 'A', 'MIRROR', 'FOR', 'THE', 'YELLOW', 'BILLED', 'DUCKS', 'WHO', 'ARE', 'SEIZING', 'THE', 'OPPORTUNITY', 'OF', 'GETTING', 'A', 'DRINK', 'WITH', 'AS', 'MUCH', 'BODY', 'IN', 'IT', 'AS', 'POSSIBLE'] +2094-142345-0008-316: hyp=['BUT', 'THERE', 'IS', 'ALWAYS', 'AS', 'STRONGER', 'SENSE', 'OF', 'LIFE', 'WHEN', 'THE', 'SUN', 'IS', 'BRILLIANT', 'AFTER', 'RAIN', 'AND', 'NOW', 'HE', 'IS', 'POURING', 'DOWN', 'HIS', 'BEAMS', 'AND', 'MAKING', 'SPARKLES', 'AMONG', 'THE', 'WET', 'STRAW', 'AND', 'LIGHTING', 'UP', 'EVERY', 'PATCH', 'OF', 'VIVID', 'GREEN', 'MOSS', 'ON', 'THE', 'RED', 'TILES', 'OF', 'THE', 'COW', 'SHED', 'AND', 'TURNING', 'EVEN', 'THE', 'MUDDY', 'WATER', 'THAT', 'IS', 'HURRYING', 'ALONG', 'THE', 'CHANNEL', 'TO', 'THE', 'DRAIN', 'INTO', 'A', 'MIRROR', 'FOR', 'THE', 'YELLOW', 'BUILD', 'DUCKS', 'WHO', 'ARE', 'SEIZING', 'THE', 'OPPORTUNITY', 'OF', 'GETTING', 'A', 'DRINK', 'WITH', 'AS', 'MUCH', 'BODY', 'IN', 'IT', 'AS', 'POSSIBLE'] +2094-142345-0009-317: ref=['FOR', 'THE', 'GREAT', 'BARN', 'DOORS', 'ARE', 'THROWN', 'WIDE', 'OPEN', 'AND', 'MEN', 'ARE', 'BUSY', 'THERE', 'MENDING', 'THE', 'HARNESS', 'UNDER', 'THE', 'SUPERINTENDENCE', 'OF', 'MISTER', 'GOBY', 'THE', 'WHITTAW', 'OTHERWISE', 'SADDLER', 'WHO', 'ENTERTAINS', 'THEM', 'WITH', 'THE', 'LATEST', 'TREDDLESTON', 'GOSSIP'] +2094-142345-0009-317: hyp=['FOR', 'THE', 'GREAT', 'BARN', 'DOORS', 'ARE', 'THROWN', 'WIDE', 'OPEN', 'AND', 'MEN', 'ARE', 'BUSY', 'THERE', 'MENDING', 'THE', 'HARNESS', 'UNDER', 'THE', 'SUPERINTENDENCE', 'OF', 'MISTER', 'GOBY', 'THE', 'WIDOW', 'OTHERWISE', 'SADDLER', 'WHO', 'ENTERTAINS', 'THEM', 'WITH', 'THE', 'LATEST', 'TREDDLESTONE', 'GOSSIP'] +2094-142345-0010-318: ref=['HETTY', 'SORREL', 'OFTEN', 'TOOK', 'THE', 'OPPORTUNITY', 'WHEN', 'HER', "AUNT'S", 'BACK', 'WAS', 'TURNED', 'OF', 'LOOKING', 'AT', 'THE', 'PLEASING', 'REFLECTION', 'OF', 'HERSELF', 'IN', 'THOSE', 'POLISHED', 'SURFACES', 'FOR', 'THE', 'OAK', 'TABLE', 'WAS', 'USUALLY', 'TURNED', 'UP', 'LIKE', 'A', 'SCREEN', 'AND', 'WAS', 'MORE', 'FOR', 'ORNAMENT', 'THAN', 'FOR', 'USE', 'AND', 'SHE', 'COULD', 'SEE', 'HERSELF', 'SOMETIMES', 'IN', 'THE', 'GREAT', 'ROUND', 'PEWTER', 'DISHES', 'THAT', 'WERE', 'RANGED', 'ON', 'THE', 'SHELVES', 'ABOVE', 'THE', 'LONG', 'DEAL', 'DINNER', 'TABLE', 'OR', 'IN', 'THE', 'HOBS', 'OF', 'THE', 'GRATE', 'WHICH', 'ALWAYS', 'SHONE', 'LIKE', 'JASPER'] +2094-142345-0010-318: hyp=["HETTY'S", 'SORREL', 'OFTEN', 'TOOK', 'THE', 'OPPORTUNITY', 'WHEN', 'HER', "AUNT'S", 'BACK', 'WAS', 'TURNED', 'OF', 'LOOKING', 'AT', 'THE', 'PLEASING', 'REFLECTION', 'OF', 'HERSELF', 'IN', 'THOSE', 'POLISHED', 'SERVICES', 'FOR', 'THE', 'OAK', 'TABLE', 'WAS', 'USUALLY', 'TURNED', 'UP', 'LIKE', 'A', 'SCREEN', 'AND', 'WAS', 'MORE', 'FOR', 'ORNAMENT', 'THAN', 'FOR', 'USE', 'AND', 'SHE', 'COULD', 'SEE', 'HERSELF', 'SOMETIMES', 'IN', 'THE', 'GREAT', 'ROUND', 'PEWTER', 'DISHES', 'THAT', 'WERE', 'RANGED', 'ON', 'THE', 'SHELVES', 'ABOVE', 'THE', 'LONG', 'DEAL', 'DINNER', 'TABLE', 'OR', 'IN', 'THE', 'HOBS', 'OF', 'THE', 'GRATE', 'WHICH', 'ALWAYS', 'SHONE', 'LIKE', 'JASPER'] +2094-142345-0011-319: ref=['DO', 'NOT', 'SUPPOSE', 'HOWEVER', 'THAT', 'MISSUS', 'POYSER', 'WAS', 'ELDERLY', 'OR', 'SHREWISH', 'IN', 'HER', 'APPEARANCE', 'SHE', 'WAS', 'A', 'GOOD', 'LOOKING', 'WOMAN', 'NOT', 'MORE', 'THAN', 'EIGHT', 'AND', 'THIRTY', 'OF', 'FAIR', 'COMPLEXION', 'AND', 'SANDY', 'HAIR', 'WELL', 'SHAPEN', 'LIGHT', 'FOOTED'] +2094-142345-0011-319: hyp=['DO', 'NOT', 'SUPPOSE', 'HOWEVER', 'THAT', 'MISSUS', 'POYSER', 'WAS', 'ELDERLY', 'OR', 'SHREWISH', 'IN', 'HER', 'APPEARANCE', 'SHE', 'WAS', 'A', 'GOOD', 'LOOKING', 'WOMAN', 'NOT', 'MORE', 'THAN', 'EIGHT', 'AND', 'THIRTY', 'OF', 'FAIR', 'COMPLEXION', 'AND', 'SANDY', 'HAIR', 'WELL', 'SHAPEN', 'LIGHT', 'FOOTED'] +2094-142345-0012-320: ref=['THE', 'FAMILY', 'LIKENESS', 'BETWEEN', 'HER', 'AND', 'HER', 'NIECE', 'DINAH', 'MORRIS', 'WITH', 'THE', 'CONTRAST', 'BETWEEN', 'HER', 'KEENNESS', 'AND', "DINAH'S", 'SERAPHIC', 'GENTLENESS', 'OF', 'EXPRESSION', 'MIGHT', 'HAVE', 'SERVED', 'A', 'PAINTER', 'AS', 'AN', 'EXCELLENT', 'SUGGESTION', 'FOR', 'A', 'MARTHA', 'AND', 'MARY'] +2094-142345-0012-320: hyp=['THE', 'FAMILY', 'LIKENESS', 'BETWEEN', 'HER', 'AND', 'HER', 'NIECE', 'DINA', 'MORRIS', 'WITH', 'THE', 'CONTRAST', 'BETWEEN', 'HER', 'KEENNESS', 'AND', 'DYNAS', 'SERAPHIC', 'GENTLENESS', 'OF', 'EXPRESSION', 'MIGHT', 'HAVE', 'SERVED', 'A', 'PAINTER', 'AS', 'AN', 'EXCELLENT', 'SUGGESTION', 'FOR', 'A', 'MARTHA', 'AND', 'MARY'] +2094-142345-0013-321: ref=['HER', 'TONGUE', 'WAS', 'NOT', 'LESS', 'KEEN', 'THAN', 'HER', 'EYE', 'AND', 'WHENEVER', 'A', 'DAMSEL', 'CAME', 'WITHIN', 'EARSHOT', 'SEEMED', 'TO', 'TAKE', 'UP', 'AN', 'UNFINISHED', 'LECTURE', 'AS', 'A', 'BARREL', 'ORGAN', 'TAKES', 'UP', 'A', 'TUNE', 'PRECISELY', 'AT', 'THE', 'POINT', 'WHERE', 'IT', 'HAD', 'LEFT', 'OFF'] +2094-142345-0013-321: hyp=['HER', 'TONGUE', 'WAS', 'NOT', 'LESS', 'KEEN', 'THAN', 'HER', 'EYE', 'AND', 'WHENEVER', 'A', 'DAMSEL', 'CAME', 'WITHIN', 'EAR', 'SHOT', 'SEEMED', 'TO', 'TAKE', 'UP', 'AN', 'UNFINISHED', 'LECTURE', 'AS', 'A', 'BARREL', 'ORGAN', 'TAKES', 'UP', 'A', 'TUNE', 'PRECISELY', 'AT', 'THE', 'POINT', 'WHERE', 'IT', 'HAD', 'LEFT', 'OFF'] +2094-142345-0014-322: ref=['THE', 'FACT', 'THAT', 'IT', 'WAS', 'CHURNING', 'DAY', 'WAS', 'ANOTHER', 'REASON', 'WHY', 'IT', 'WAS', 'INCONVENIENT', 'TO', 'HAVE', 'THE', 'WHITTAWS', 'AND', 'WHY', 'CONSEQUENTLY', 'MISSUS', 'POYSER', 'SHOULD', 'SCOLD', 'MOLLY', 'THE', 'HOUSEMAID', 'WITH', 'UNUSUAL', 'SEVERITY'] +2094-142345-0014-322: hyp=['THE', 'FACT', 'THAT', 'IT', 'WAS', 'CHURNING', 'DAY', 'WAS', 'ANOTHER', 'REASON', 'WHY', 'IT', 'WAS', 'INCONVENIENT', 'TO', 'HAVE', 'THE', 'WIDOWS', 'AND', 'WHY', 'CONSEQUENTLY', 'MISSUS', 'POYSER', 'SHOULD', 'SCOLD', 'MOLLY', 'THE', 'HOUSEMAID', 'WITH', 'UNUSUAL', 'SEVERITY'] +2094-142345-0015-323: ref=['TO', 'ALL', 'APPEARANCE', 'MOLLY', 'HAD', 'GOT', 'THROUGH', 'HER', 'AFTER', 'DINNER', 'WORK', 'IN', 'AN', 'EXEMPLARY', 'MANNER', 'HAD', 'CLEANED', 'HERSELF', 'WITH', 'GREAT', 'DISPATCH', 'AND', 'NOW', 'CAME', 'TO', 'ASK', 'SUBMISSIVELY', 'IF', 'SHE', 'SHOULD', 'SIT', 'DOWN', 'TO', 'HER', 'SPINNING', 'TILL', 'MILKING', 'TIME'] +2094-142345-0015-323: hyp=['TO', 'ALL', 'APPEARANCE', 'MOLLY', 'HAD', 'GOT', 'THROUGH', 'HER', 'AFTER', 'DINNER', 'WORK', 'IN', 'AN', 'EXEMPLARY', 'MANNER', 'HAD', 'CLEANED', 'HERSELF', 'WITH', 'GREAT', 'DISPATCH', 'AND', 'NOW', 'CAME', 'TO', 'ASK', 'SUBMISSIVELY', 'IF', 'SHE', 'SHOULD', 'SIT', 'DOWN', 'TO', 'HER', 'SPINNING', 'TILL', 'MILKING', 'TIME'] +2094-142345-0016-324: ref=['SPINNING', 'INDEED'] +2094-142345-0016-324: hyp=['SPINNING', 'INDEED'] +2094-142345-0017-325: ref=['I', 'NEVER', 'KNEW', 'YOUR', 'EQUALS', 'FOR', 'GALLOWSNESS'] +2094-142345-0017-325: hyp=['I', 'NEVER', 'KNEW', 'YOUR', 'EQUALS', 'FOR', "GALLOW'S", 'NICE'] +2094-142345-0018-326: ref=['WHO', 'TAUGHT', 'YOU', 'TO', 'SCRUB', 'A', 'FLOOR', 'I', 'SHOULD', 'LIKE', 'TO', 'KNOW'] +2094-142345-0018-326: hyp=['WHO', 'TAUGHT', 'YOU', 'TO', 'SCRUB', 'A', 'FLOOR', 'I', 'SHOULD', 'LIKE', 'TO', 'KNOW'] +2094-142345-0019-327: ref=['COMB', 'THE', 'WOOL', 'FOR', 'THE', 'WHITTAWS', 'INDEED'] +2094-142345-0019-327: hyp=['COMB', 'THE', 'WOOL', 'FOR', 'THE', 'WIDOWS', 'INDEED'] +2094-142345-0020-328: ref=["THAT'S", 'WHAT', "YOU'D", 'LIKE', 'TO', 'BE', 'DOING', 'IS', 'IT'] +2094-142345-0020-328: hyp=["THAT'S", 'WHAT', "YOU'D", 'LIKE', 'TO', 'BE', 'DOING', 'IS', 'IT'] +2094-142345-0021-329: ref=["THAT'S", 'THE', 'WAY', 'WITH', 'YOU', "THAT'S", 'THE', 'ROAD', "YOU'D", 'ALL', 'LIKE', 'TO', 'GO', 'HEADLONGS', 'TO', 'RUIN'] +2094-142345-0021-329: hyp=["THAT'S", 'THE', 'WAY', 'WITH', 'YOU', "THAT'S", 'THE', 'ROAD', "YOU'D", 'ALL', 'LIKE', 'TO', 'GO', 'HEADLONGS', 'TO', 'RUIN'] +2094-142345-0022-330: ref=['MISTER', "OTTLEY'S", 'INDEED'] +2094-142345-0022-330: hyp=['MISTER', 'OAKLEIGHS', 'INDEED'] +2094-142345-0023-331: ref=["YOU'RE", 'A', 'RARE', 'UN', 'FOR', 'SITTING', 'DOWN', 'TO', 'YOUR', 'WORK', 'A', 'LITTLE', 'WHILE', 'AFTER', "IT'S", 'TIME', 'TO', 'PUT', 'BY'] +2094-142345-0023-331: hyp=['YOU', 'ARE', 'A', 'RARE', 'AND', 'FOR', 'SITTING', 'DOWN', 'TO', 'YOUR', 'WORK', 'A', 'LITTLE', 'WHILE', 'AFTER', 'ITS', 'TIME', 'TO', 'PUT', 'BY'] +2094-142345-0024-332: ref=['MUNNY', 'MY', "IRON'S", 'TWITE', 'TOLD', 'PEASE', 'PUT', 'IT', 'DOWN', 'TO', 'WARM'] +2094-142345-0024-332: hyp=['MONEY', 'MY', 'IRONS', 'TWITE', 'TOLLED', 'PEAS', 'PUT', 'IT', 'DOWN', 'TO', 'WARM'] +2094-142345-0025-333: ref=['COLD', 'IS', 'IT', 'MY', 'DARLING', 'BLESS', 'YOUR', 'SWEET', 'FACE'] +2094-142345-0025-333: hyp=['COLD', 'IS', 'IT', 'MY', 'DARLING', 'BLESS', 'YOUR', 'SWEET', 'FACE'] +2094-142345-0026-334: ref=["SHE'S", 'GOING', 'TO', 'PUT', 'THE', 'IRONING', 'THINGS', 'AWAY'] +2094-142345-0026-334: hyp=["SHE'S", 'GOING', 'TO', 'PUT', 'THE', 'IRONING', 'THINGS', 'AWAY'] +2094-142345-0027-335: ref=['MUNNY', 'I', 'TOULD', 'IKE', 'TO', 'DO', 'INTO', 'DE', 'BARN', 'TO', 'TOMMY', 'TO', 'SEE', 'DE', 'WHITTAWD'] +2094-142345-0027-335: hyp=['MONEY', 'I', 'DID', 'LIKE', 'TO', 'DO', 'INTO', 'THE', 'BARN', 'TO', 'TOMMY', 'TO', 'SEE', 'THE', 'WIDOW'] +2094-142345-0028-336: ref=['NO', 'NO', 'NO', 'TOTTY', 'UD', 'GET', 'HER', 'FEET', 'WET', 'SAID', 'MISSUS', 'POYSER', 'CARRYING', 'AWAY', 'HER', 'IRON'] +2094-142345-0028-336: hyp=['NO', 'NO', 'TODDY', 'HAD', 'GET', 'HER', 'FEET', 'WET', 'SAID', 'MISSUS', 'POYSER', 'CARRYING', 'AWAY', 'HER', 'IRON'] +2094-142345-0029-337: ref=['DID', 'EVER', 'ANYBODY', 'SEE', 'THE', 'LIKE', 'SCREAMED', 'MISSUS', 'POYSER', 'RUNNING', 'TOWARDS', 'THE', 'TABLE', 'WHEN', 'HER', 'EYE', 'HAD', 'FALLEN', 'ON', 'THE', 'BLUE', 'STREAM'] +2094-142345-0029-337: hyp=['DID', 'EVER', 'ANYBODY', 'SEE', 'THE', 'LIKE', 'SCREAMED', 'MISSUS', 'POYSER', 'RUNNING', 'TOWARDS', 'THE', 'TABLE', 'WHEN', 'HER', 'EYE', 'HAD', 'FALLEN', 'ON', 'THE', 'BLUE', 'STREAM'] +2094-142345-0030-338: ref=['TOTTY', 'HOWEVER', 'HAD', 'DESCENDED', 'FROM', 'HER', 'CHAIR', 'WITH', 'GREAT', 'SWIFTNESS', 'AND', 'WAS', 'ALREADY', 'IN', 'RETREAT', 'TOWARDS', 'THE', 'DAIRY', 'WITH', 'A', 'SORT', 'OF', 'WADDLING', 'RUN', 'AND', 'AN', 'AMOUNT', 'OF', 'FAT', 'ON', 'THE', 'NAPE', 'OF', 'HER', 'NECK', 'WHICH', 'MADE', 'HER', 'LOOK', 'LIKE', 'THE', 'METAMORPHOSIS', 'OF', 'A', 'WHITE', 'SUCKLING', 'PIG'] +2094-142345-0030-338: hyp=['TOTTY', 'HOWEVER', 'HAD', 'DESCENDED', 'FROM', 'HER', 'CHAIR', 'WITH', 'GREAT', 'SWIFTNESS', 'AND', 'WAS', 'ALREADY', 'IN', 'RETREAT', 'TOWARDS', 'THE', 'DAIRY', 'WITH', 'A', 'SORT', 'OF', 'WADDLING', 'RUN', 'AND', 'AN', 'AMOUNT', 'OF', 'FAT', 'ON', 'THE', 'NAPE', 'OF', 'HER', 'NECK', 'WHICH', 'MADE', 'HER', 'LOOK', 'LIKE', 'THE', 'METAMORPHOSIS', 'OF', 'A', 'WHITE', 'SUCKLING', 'PIG'] +2094-142345-0031-339: ref=['AND', 'SHE', 'WAS', 'VERY', 'FOND', 'OF', 'YOU', 'TOO', 'AUNT', 'RACHEL'] +2094-142345-0031-339: hyp=['AND', 'SHE', 'WAS', 'VERY', 'FOND', 'OF', 'YOU', 'TOO', 'AUNT', 'RACHEL'] +2094-142345-0032-340: ref=['I', 'OFTEN', 'HEARD', 'HER', 'TALK', 'OF', 'YOU', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'WAY'] +2094-142345-0032-340: hyp=['I', 'OFTEN', 'HEARD', 'HER', 'TALK', 'OF', 'YOU', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'WAY'] +2094-142345-0033-341: ref=['WHEN', 'SHE', 'HAD', 'THAT', 'BAD', 'ILLNESS', 'AND', 'I', 'WAS', 'ONLY', 'ELEVEN', 'YEARS', 'OLD', 'SHE', 'USED', 'TO', 'SAY', "YOU'LL", 'HAVE', 'A', 'FRIEND', 'ON', 'EARTH', 'IN', 'YOUR', 'AUNT', 'RACHEL', 'IF', "I'M", 'TAKEN', 'FROM', 'YOU', 'FOR', 'SHE', 'HAS', 'A', 'KIND', 'HEART', 'AND', "I'M", 'SURE', "I'VE", 'FOUND', 'IT', 'SO'] +2094-142345-0033-341: hyp=['WHEN', 'SHE', 'HAD', 'THAT', 'BAD', 'ILLNESS', 'AND', 'I', 'WAS', 'ONLY', 'ELEVEN', 'YEARS', 'OLD', 'SHE', 'USED', 'TO', 'SAY', "YOU'LL", 'HAVE', 'A', 'FRIEND', 'ON', 'EARTH', 'IN', 'YOUR', 'AUNT', 'RACHEL', 'IF', "I'M", 'TAKEN', 'FROM', 'YOU', 'FOR', 'SHE', 'HAS', 'A', 'KIND', 'HEART', 'AND', "I'M", 'SURE', "I'VE", 'FOUND', 'IT', 'SO'] +2094-142345-0034-342: ref=['AND', "THERE'S", 'LINEN', 'IN', 'THE', 'HOUSE', 'AS', 'I', 'COULD', 'WELL', 'SPARE', 'YOU', 'FOR', "I'VE", 'GOT', 'LOTS', 'O', 'SHEETING', 'AND', 'TABLE', 'CLOTHING', 'AND', 'TOWELLING', 'AS', "ISN'T", 'MADE', 'UP'] +2094-142345-0034-342: hyp=['AND', "THERE'S", 'LINEN', 'IN', 'THE', 'HOUSE', 'AS', 'I', 'COULD', 'WELL', 'SPARE', 'YOU', 'FOR', 'I', 'GOT', 'LOTS', 'OF', 'SHEETING', 'AND', 'TABLE', 'CLOTHING', 'AND', 'TOWELINGS', "ISN'T", 'MADE', 'UP'] +2094-142345-0035-343: ref=['BUT', 'NOT', 'MORE', 'THAN', "WHAT'S", 'IN', 'THE', 'BIBLE', 'AUNT', 'SAID', 'DINAH'] +2094-142345-0035-343: hyp=['BUT', 'NOT', 'MORE', 'THAN', "WHAT'S", 'IN', 'THE', 'BIBLE', 'AND', 'SAID', 'DINAH'] +2094-142345-0036-344: ref=['NAY', 'DEAR', 'AUNT', 'YOU', 'NEVER', 'HEARD', 'ME', 'SAY', 'THAT', 'ALL', 'PEOPLE', 'ARE', 'CALLED', 'TO', 'FORSAKE', 'THEIR', 'WORK', 'AND', 'THEIR', 'FAMILIES'] +2094-142345-0036-344: hyp=['NAY', 'DEAR', 'AUNT', 'YOU', 'NEVER', 'HEARD', 'ME', 'SAY', 'THAT', 'ALL', 'PEOPLE', 'ARE', 'CALLED', 'TO', 'FORSAKE', 'THEIR', 'WORK', 'AND', 'THEIR', 'FAMILIES'] +2094-142345-0037-345: ref=['WE', 'CAN', 'ALL', 'BE', 'SERVANTS', 'OF', 'GOD', 'WHEREVER', 'OUR', 'LOT', 'IS', 'CAST', 'BUT', 'HE', 'GIVES', 'US', 'DIFFERENT', 'SORTS', 'OF', 'WORK', 'ACCORDING', 'AS', 'HE', 'FITS', 'US', 'FOR', 'IT', 'AND', 'CALLS', 'US', 'TO', 'IT'] +2094-142345-0037-345: hyp=['WE', 'CAN', 'ALL', 'BE', 'SERVANTS', 'OF', 'GOD', 'WHEREVER', 'OUR', 'LOT', 'IS', 'CAST', 'BUT', 'HE', 'GIVES', 'US', 'DIFFERENT', 'SORTS', 'OF', 'WORK', 'ACCORDING', 'AS', 'HE', 'FITS', 'US', 'FOR', 'IT', 'AND', 'CALLS', 'US', 'TO', 'IT'] +2094-142345-0038-346: ref=['I', 'CAN', 'NO', 'MORE', 'HELP', 'SPENDING', 'MY', 'LIFE', 'IN', 'TRYING', 'TO', 'DO', 'WHAT', 'I', 'CAN', 'FOR', 'THE', 'SOULS', 'OF', 'OTHERS', 'THAN', 'YOU', 'COULD', 'HELP', 'RUNNING', 'IF', 'YOU', 'HEARD', 'LITTLE', 'TOTTY', 'CRYING', 'AT', 'THE', 'OTHER', 'END', 'OF', 'THE', 'HOUSE', 'THE', 'VOICE', 'WOULD', 'GO', 'TO', 'YOUR', 'HEART', 'YOU', 'WOULD', 'THINK', 'THE', 'DEAR', 'CHILD', 'WAS', 'IN', 'TROUBLE', 'OR', 'IN', 'DANGER', 'AND', 'YOU', "COULDN'T", 'REST', 'WITHOUT', 'RUNNING', 'TO', 'HELP', 'HER', 'AND', 'COMFORT', 'HER'] +2094-142345-0038-346: hyp=['I', 'CAN', 'NO', 'MORE', 'HELP', 'SPENDING', 'MY', 'LIFE', 'IN', 'TRYING', 'TO', 'DO', 'WHAT', 'I', 'CAN', 'FOR', 'THE', 'SOULS', 'OF', 'OTHERS', 'THEN', 'YOU', 'COULD', 'HELP', 'RUNNING', 'IF', 'YOU', 'HEARD', 'LITTLE', 'TOTTY', 'CRYING', 'AT', 'THE', 'OTHER', 'END', 'OF', 'THE', 'HOUSE', 'THE', 'VOICE', 'WOULD', 'GO', 'TO', 'YOUR', 'HEART', 'YOU', 'WOULD', 'THINK', 'THE', 'DEAR', 'CHILD', 'WAS', 'IN', 'TROUBLE', 'OR', 'IN', 'DANGER', 'AND', 'YOU', "COULDN'T", 'REST', 'WITHOUT', 'RUNNING', 'TO', 'HELP', 'HER', 'AND', 'COMFORT', 'HER'] +2094-142345-0039-347: ref=["I'VE", 'STRONG', 'ASSURANCE', 'THAT', 'NO', 'EVIL', 'WILL', 'HAPPEN', 'TO', 'YOU', 'AND', 'MY', 'UNCLE', 'AND', 'THE', 'CHILDREN', 'FROM', 'ANYTHING', "I'VE", 'DONE'] +2094-142345-0039-347: hyp=["I'VE", 'STRONG', 'ASSURANCE', 'THAT', 'NO', 'EVIL', 'WILL', 'HAPPEN', 'TO', 'YOU', 'AND', 'MY', 'UNCLE', 'AND', 'THE', 'CHILDREN', 'FROM', 'ANYTHING', 'I', 'HAVE', 'DONE'] +2094-142345-0040-348: ref=['I', "DIDN'T", 'PREACH', 'WITHOUT', 'DIRECTION'] +2094-142345-0040-348: hyp=['I', "DIDN'T", 'PREACH', 'WITHOUT', 'DIRECTION'] +2094-142345-0041-349: ref=['DIRECTION'] +2094-142345-0041-349: hyp=['DIRECTION'] +2094-142345-0042-350: ref=['I', 'HANNA', 'COMMON', 'PATIENCE', 'WITH', 'YOU'] +2094-142345-0042-350: hyp=['I', 'HAD', 'A', 'COMMON', 'PATIENCE', 'WITH', 'YOU'] +2094-142345-0043-351: ref=['BY', 'THIS', 'TIME', 'THE', 'TWO', 'GENTLEMEN', 'HAD', 'REACHED', 'THE', 'PALINGS', 'AND', 'HAD', 'GOT', 'DOWN', 'FROM', 'THEIR', 'HORSES', 'IT', 'WAS', 'PLAIN', 'THEY', 'MEANT', 'TO', 'COME', 'IN'] +2094-142345-0043-351: hyp=['BY', 'THIS', 'TIME', 'THE', 'TWO', 'GENTLEMEN', 'HAD', 'REACHED', 'THE', 'PALINGS', 'AND', 'HAD', 'GOT', 'DOWN', 'FROM', 'THEIR', 'HORSES', 'IT', 'WAS', 'PLAIN', 'THEY', 'MEANT', 'TO', 'COME', 'IN'] +2094-142345-0044-352: ref=['SAID', 'MISTER', 'IRWINE', 'WITH', 'HIS', 'STATELY', 'CORDIALITY'] +2094-142345-0044-352: hyp=['SAID', 'MISTER', 'IRWINE', 'WITH', 'HIS', 'STATELY', 'CORDIALITY'] +2094-142345-0045-353: ref=['OH', 'SIR', "DON'T", 'MENTION', 'IT', 'SAID', 'MISSUS', 'POYSER'] +2094-142345-0045-353: hyp=['OH', 'SIR', "DON'T", 'MENTION', 'IT', 'SAID', 'MISSUS', 'POYSER'] +2094-142345-0046-354: ref=['I', 'DELIGHT', 'IN', 'YOUR', 'KITCHEN'] +2094-142345-0046-354: hyp=['I', 'DELIGHT', 'IN', 'YOUR', 'KITCHEN'] +2094-142345-0047-355: ref=['POYSER', 'IS', 'NOT', 'AT', 'HOME', 'IS', 'HE'] +2094-142345-0047-355: hyp=['POYSER', 'IS', 'NOT', 'AT', 'HOME', 'IS', 'HE'] +2094-142345-0048-356: ref=['SAID', 'CAPTAIN', 'DONNITHORNE', 'SEATING', 'HIMSELF', 'WHERE', 'HE', 'COULD', 'SEE', 'ALONG', 'THE', 'SHORT', 'PASSAGE', 'TO', 'THE', 'OPEN', 'DAIRY', 'DOOR'] +2094-142345-0048-356: hyp=['SAID', 'CAPTAIN', 'DONNYTHORNE', 'SITTING', 'HIMSELF', 'WHERE', 'HE', 'COULD', 'SEE', 'ALONG', 'THE', 'SHORT', 'PASSAGE', 'TO', 'THE', 'OPEN', 'DAIRY', 'DOOR'] +2094-142345-0049-357: ref=['NO', 'SIR', 'HE', "ISN'T", "HE'S", 'GONE', 'TO', 'ROSSETER', 'TO', 'SEE', 'MISTER', 'WEST', 'THE', 'FACTOR', 'ABOUT', 'THE', 'WOOL'] +2094-142345-0049-357: hyp=['NO', 'SIR', 'HE', "ISN'T", "HE'S", 'GONE', 'TO', 'ROSSITUR', 'TO', 'SEE', 'MISTER', 'WEST', 'THE', 'FACTOR', 'ABOUT', 'THE', 'WOOL'] +2094-142345-0050-358: ref=['BUT', "THERE'S", 'FATHER', 'THE', 'BARN', 'SIR', 'IF', "HE'D", 'BE', 'OF', 'ANY', 'USE'] +2094-142345-0050-358: hyp=['BUT', "THERE'S", 'FATHER', 'IN', 'BARN', 'SIR', 'IF', "HE'D", 'BE', 'OF', 'ANY', 'USE'] +2094-142345-0051-359: ref=['NO', 'THANK', 'YOU', "I'LL", 'JUST', 'LOOK', 'AT', 'THE', 'WHELPS', 'AND', 'LEAVE', 'A', 'MESSAGE', 'ABOUT', 'THEM', 'WITH', 'YOUR', 'SHEPHERD'] +2094-142345-0051-359: hyp=['NO', 'THANK', 'YOU', "I'LL", 'JUST', 'LOOK', 'AT', 'THE', 'WHELPS', 'AND', 'LEAVE', 'A', 'MESSAGE', 'ABOUT', 'THEM', 'WITH', 'YOUR', 'SHEPHERD'] +2094-142345-0052-360: ref=['I', 'MUST', 'COME', 'ANOTHER', 'DAY', 'AND', 'SEE', 'YOUR', 'HUSBAND', 'I', 'WANT', 'TO', 'HAVE', 'A', 'CONSULTATION', 'WITH', 'HIM', 'ABOUT', 'HORSES'] +2094-142345-0052-360: hyp=['I', 'MUST', 'COME', 'ANOTHER', 'DAY', 'AND', 'SEE', 'YOUR', 'HUSBAND', 'I', 'WANT', 'TO', 'HAVE', 'A', 'CONSULTATION', 'WITH', 'HIM', 'ABOUT', 'HORSES'] +2094-142345-0053-361: ref=['FOR', 'IF', "HE'S", 'ANYWHERE', 'ON', 'THE', 'FARM', 'WE', 'CAN', 'SEND', 'FOR', 'HIM', 'IN', 'A', 'MINUTE'] +2094-142345-0053-361: hyp=['FOR', 'IF', 'IS', 'ANYWHERE', 'ON', 'THE', 'FARM', 'WE', 'CAN', 'SEND', 'FOR', 'HIM', 'IN', 'A', 'MINUTE'] +2094-142345-0054-362: ref=['OH', 'SIR', 'SAID', 'MISSUS', 'POYSER', 'RATHER', 'ALARMED', 'YOU', "WOULDN'T", 'LIKE', 'IT', 'AT', 'ALL'] +2094-142345-0054-362: hyp=['OH', 'SIR', 'SAID', 'MISSUS', 'POYSER', 'RATHER', 'ALARMED', 'YOU', "WOULDN'T", 'LIKE', 'IT', 'AT', 'ALL'] +2094-142345-0055-363: ref=['BUT', 'YOU', 'KNOW', 'MORE', 'ABOUT', 'THAT', 'THAN', 'I', 'DO', 'SIR'] +2094-142345-0055-363: hyp=['BUT', 'YOU', 'KNOW', 'MORE', 'ABOUT', 'THAT', 'THAN', 'I', 'DO', 'SIR'] +2094-142345-0056-364: ref=['I', 'THINK', 'I', 'SHOULD', 'BE', 'DOING', 'YOU', 'A', 'SERVICE', 'TO', 'TURN', 'YOU', 'OUT', 'OF', 'SUCH', 'A', 'PLACE'] +2094-142345-0056-364: hyp=['I', 'THINK', 'I', 'SHOULD', 'BE', 'DOING', 'YOU', 'A', 'SERVICE', 'TO', 'TURN', 'YOU', 'OUT', 'OF', 'SUCH', 'A', 'PLACE'] +2094-142345-0057-365: ref=['I', 'KNOW', 'HIS', 'FARM', 'IS', 'IN', 'BETTER', 'ORDER', 'THAN', 'ANY', 'OTHER', 'WITHIN', 'TEN', 'MILES', 'OF', 'US', 'AND', 'AS', 'FOR', 'THE', 'KITCHEN', 'HE', 'ADDED', 'SMILING', 'I', "DON'T", 'BELIEVE', "THERE'S", 'ONE', 'IN', 'THE', 'KINGDOM', 'TO', 'BEAT', 'IT'] +2094-142345-0057-365: hyp=['I', 'KNOWS', 'FARM', 'IS', 'IN', 'BETTER', 'ORDER', 'THAN', 'ANY', 'OTHER', 'WITHIN', 'TEN', 'MILES', 'OF', 'US', 'AND', 'AS', 'FOR', 'THE', 'KITCHEN', 'HE', 'ADDED', 'SMILING', 'I', "DON'T", 'BELIEVE', "THERE'S", 'ONE', 'IN', 'THE', 'KINGDOM', 'TO', 'BEAT', 'IT'] +2094-142345-0058-366: ref=['BY', 'THE', 'BY', "I'VE", 'NEVER', 'SEEN', 'YOUR', 'DAIRY', 'I', 'MUST', 'SEE', 'YOUR', 'DAIRY', 'MISSUS', 'POYSER'] +2094-142345-0058-366: hyp=['BY', 'THE', 'BY', 'I', 'HAVE', 'NEVER', 'SEEN', 'YOUR', 'DAIRY', 'I', 'MUST', 'SEE', 'YOUR', 'DEARIE', 'MISSUS', 'POYSER'] +2094-142345-0059-367: ref=['THIS', 'MISSUS', 'POYSER', 'SAID', 'BLUSHING', 'AND', 'BELIEVING', 'THAT', 'THE', 'CAPTAIN', 'WAS', 'REALLY', 'INTERESTED', 'IN', 'HER', 'MILK', 'PANS', 'AND', 'WOULD', 'ADJUST', 'HIS', 'OPINION', 'OF', 'HER', 'TO', 'THE', 'APPEARANCE', 'OF', 'HER', 'DAIRY'] +2094-142345-0059-367: hyp=['THIS', 'MISSUS', 'POYSER', 'SAID', 'BLUSHING', 'AND', 'BELIEVING', 'THAT', 'THE', 'CAPTAIN', 'WAS', 'REALLY', 'INTERESTED', 'IN', 'HER', 'MILK', 'PANS', 'AND', 'WOULD', 'ADJUST', 'HIS', 'OPINION', 'OF', 'HER', 'TO', 'THE', 'APPEARANCE', 'OF', 'HER', 'DAIRY'] +2094-142345-0060-368: ref=['OH', "I'VE", 'NO', 'DOUBT', "IT'S", 'IN', 'CAPITAL', 'ORDER'] +2094-142345-0060-368: hyp=['OH', "I'VE", 'NO', 'DOUBT', "IT'S", 'IN', 'CAPITAL', 'ORDER'] +2300-131720-0000-1816: ref=['THE', 'PARIS', 'PLANT', 'LIKE', 'THAT', 'AT', 'THE', 'CRYSTAL', 'PALACE', 'WAS', 'A', 'TEMPORARY', 'EXHIBIT'] +2300-131720-0000-1816: hyp=['THE', 'PARIS', 'PLANT', 'LIKE', 'THAT', 'AT', 'THE', 'CRYSTAL', 'PALACE', 'WAS', 'A', 'TEMPORARY', 'EXHIBIT'] +2300-131720-0001-1817: ref=['THE', 'LONDON', 'PLANT', 'WAS', 'LESS', 'TEMPORARY', 'BUT', 'NOT', 'PERMANENT', 'SUPPLYING', 'BEFORE', 'IT', 'WAS', 'TORN', 'OUT', 'NO', 'FEWER', 'THAN', 'THREE', 'THOUSAND', 'LAMPS', 'IN', 'HOTELS', 'CHURCHES', 'STORES', 'AND', 'DWELLINGS', 'IN', 'THE', 'VICINITY', 'OF', 'HOLBORN', 'VIADUCT'] +2300-131720-0001-1817: hyp=['THE', 'LONDON', 'PLANT', 'WAS', 'LESS', 'TEMPORARY', 'BUT', 'NOT', 'PERMANENT', 'SUPPLYING', 'BEFORE', 'IT', 'WAS', 'TORN', 'OUT', 'NO', 'FEWER', 'THAN', 'THREE', 'THOUSAND', 'LAMPS', 'IN', 'HOTELS', 'CHURCHES', 'STORES', 'AND', 'DWELLINGS', 'IN', 'THE', 'VICINITY', 'OF', 'HOLBORN', 'VIA', 'DOC'] +2300-131720-0002-1818: ref=['THERE', 'MESSRS', 'JOHNSON', 'AND', 'HAMMER', 'PUT', 'INTO', 'PRACTICE', 'MANY', 'OF', 'THE', 'IDEAS', 'NOW', 'STANDARD', 'IN', 'THE', 'ART', 'AND', 'SECURED', 'MUCH', 'USEFUL', 'DATA', 'FOR', 'THE', 'WORK', 'IN', 'NEW', 'YORK', 'OF', 'WHICH', 'THE', 'STORY', 'HAS', 'JUST', 'BEEN', 'TOLD'] +2300-131720-0002-1818: hyp=['THERE', 'MESSIERS', 'JOHNSON', 'AND', 'HAMMER', 'PUT', 'INTO', 'PRACTICE', 'MANY', 'OF', 'THE', 'IDEAS', 'NOW', 'STANDARD', 'IN', 'THE', 'ART', 'AND', 'SECURED', 'MUCH', 'USEFUL', 'DATA', 'FOR', 'THE', 'WORK', 'IN', 'NEW', 'YORK', 'OF', 'WHICH', 'THE', 'STORY', 'HAS', 'JUST', 'BEEN', 'TOLD'] +2300-131720-0003-1819: ref=['THE', 'DYNAMO', 'ELECTRIC', 'MACHINE', 'THOUGH', 'SMALL', 'WAS', 'ROBUST', 'FOR', 'UNDER', 'ALL', 'THE', 'VARYING', 'SPEEDS', 'OF', 'WATER', 'POWER', 'AND', 'THE', 'VICISSITUDES', 'OF', 'THE', 'PLANT', 'TO', 'WHICH', 'IT', 'BELONGED', 'IT', 'CONTINUED', 'IN', 'ACTIVE', 'USE', 'UNTIL', 'EIGHTEEN', 'NINETY', 'NINE', 'SEVENTEEN', 'YEARS'] +2300-131720-0003-1819: hyp=['THE', 'DYNAMO', 'ELECTRIC', 'MACHINE', 'THOUGH', 'SMALL', 'WAS', 'ROBUST', 'FOR', 'UNDER', 'ALL', 'THE', 'VARYING', 'SPEEDS', 'OF', 'WATER', 'POWER', 'AND', 'THE', 'VICISSITUDES', 'OF', 'THE', 'PLANT', 'TO', 'WHICH', 'IT', 'BELONGED', 'IT', 'CONTINUED', 'IN', 'ACTIVE', 'USE', 'UNTIL', 'EIGHTEEN', 'NINETY', 'NINE', 'SEVENTEEN', 'YEARS'] +2300-131720-0004-1820: ref=['OWING', 'TO', 'HIS', 'INSISTENCE', 'ON', 'LOW', 'PRESSURE', 'DIRECT', 'CURRENT', 'FOR', 'USE', 'IN', 'DENSELY', 'POPULATED', 'DISTRICTS', 'AS', 'THE', 'ONLY', 'SAFE', 'AND', 'TRULY', 'UNIVERSAL', 'PROFITABLE', 'WAY', 'OF', 'DELIVERING', 'ELECTRICAL', 'ENERGY', 'TO', 'THE', 'CONSUMERS', 'EDISON', 'HAS', 'BEEN', 'FREQUENTLY', 'SPOKEN', 'OF', 'AS', 'AN', 'OPPONENT', 'OF', 'THE', 'ALTERNATING', 'CURRENT'] +2300-131720-0004-1820: hyp=['OWING', 'TO', 'HIS', 'INSISTENCE', 'ON', 'LOW', 'PRESSURE', 'DIRECT', 'CURRENT', 'FOR', 'USE', 'IN', 'DENSELY', 'POPULATED', 'DISTRICTS', 'AS', 'THE', 'ONLY', 'SAFE', 'AND', 'TRULY', 'UNIVERSAL', 'PROFITABLE', 'WAY', 'OF', 'DELIVERING', 'ELECTRICAL', 'ENERGY', 'TO', 'THE', 'CONSUMERS', 'EDISON', 'HAS', 'BEEN', 'FREQUENTLY', 'SPOKEN', 'OF', 'AS', 'AN', 'OPPONENT', 'OF', 'THE', 'ALTERNATING', 'CURRENT'] +2300-131720-0005-1821: ref=['WHY', 'IF', 'WE', 'ERECT', 'A', 'STATION', 'AT', 'THE', 'FALLS', 'IT', 'IS', 'A', 'GREAT', 'ECONOMY', 'TO', 'GET', 'IT', 'UP', 'TO', 'THE', 'CITY'] +2300-131720-0005-1821: hyp=['WHY', 'IF', 'WE', 'ERECT', 'A', 'STATION', 'AT', 'THE', 'FALLS', 'IT', 'IS', 'A', 'GREAT', 'ECONOMY', 'TO', 'GET', 'IT', 'UP', 'TO', 'THE', 'CITY'] +2300-131720-0006-1822: ref=['THERE', 'SEEMS', 'NO', 'GOOD', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'IT', 'WILL', 'CHANGE'] +2300-131720-0006-1822: hyp=['THERE', 'SEEMS', 'NO', 'GOOD', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'IT', 'WILL', 'CHANGE'] +2300-131720-0007-1823: ref=['BROAD', 'AS', 'THE', 'PRAIRIES', 'AND', 'FREE', 'IN', 'THOUGHT', 'AS', 'THE', 'WINDS', 'THAT', 'SWEEP', 'THEM', 'HE', 'IS', 'IDIOSYNCRATICALLY', 'OPPOSED', 'TO', 'LOOSE', 'AND', 'WASTEFUL', 'METHODS', 'TO', 'PLANS', 'OF', 'EMPIRE', 'THAT', 'NEGLECT', 'THE', 'POOR', 'AT', 'THE', 'GATE'] +2300-131720-0007-1823: hyp=['BROAD', 'AS', 'THE', 'PRAIRIES', 'AND', 'FREE', 'IN', 'THOUGHT', 'AS', 'THE', 'WINDS', 'THAT', 'SWEPT', 'THEM', 'HE', 'IS', 'IDIOS', 'AND', 'CRADICALLY', 'OPPOSED', 'TO', 'LOOSE', 'AND', 'WASTEFUL', 'METHODS', 'TO', 'PLANS', 'OF', 'EMPIRE', 'THAT', 'NEGLECT', 'THE', 'POOR', 'AT', 'THE', 'GATE'] +2300-131720-0008-1824: ref=['EVERYTHING', 'HE', 'HAS', 'DONE', 'HAS', 'BEEN', 'AIMED', 'AT', 'THE', 'CONSERVATION', 'OF', 'ENERGY', 'THE', 'CONTRACTION', 'OF', 'SPACE', 'THE', 'INTENSIFICATION', 'OF', 'CULTURE'] +2300-131720-0008-1824: hyp=['EVERYTHING', 'HE', 'HAS', 'DONE', 'HAS', 'BEEN', 'AIMED', 'AT', 'THE', 'CONSERVATION', 'OF', 'ENERGY', 'THE', 'CONTRACTION', 'OF', 'SPACE', 'THE', 'INTENSIFICATION', 'OF', 'CULTURE'] +2300-131720-0009-1825: ref=['FOR', 'SOME', 'YEARS', 'IT', 'WAS', 'NOT', 'FOUND', 'FEASIBLE', 'TO', 'OPERATE', 'MOTORS', 'ON', 'ALTERNATING', 'CURRENT', 'CIRCUITS', 'AND', 'THAT', 'REASON', 'WAS', 'OFTEN', 'URGED', 'AGAINST', 'IT', 'SERIOUSLY'] +2300-131720-0009-1825: hyp=['FOR', 'SOME', 'YEARS', 'IT', 'WAS', 'NOT', 'FOUND', 'FEASIBLE', 'TO', 'OPERATE', 'MOTORS', 'ON', 'ALTERNATING', 'CURRENT', 'CIRCUITS', 'AND', 'THAT', 'REASON', 'WAS', 'OFTEN', 'URGED', 'AGAINST', 'ITS', 'SERIOUSLY'] +2300-131720-0010-1826: ref=['IT', 'COULD', 'NOT', 'BE', 'USED', 'FOR', 'ELECTROPLATING', 'OR', 'DEPOSITION', 'NOR', 'COULD', 'IT', 'CHARGE', 'STORAGE', 'BATTERIES', 'ALL', 'OF', 'WHICH', 'ARE', 'EASILY', 'WITHIN', 'THE', 'ABILITY', 'OF', 'THE', 'DIRECT', 'CURRENT'] +2300-131720-0010-1826: hyp=['IT', 'COULD', 'NOT', 'BE', 'USED', 'FOR', 'ELECTROPLATING', 'OR', 'DEPOSITION', 'NOR', 'COULD', 'IT', 'CHARGE', 'STORAGE', 'BATTERIES', 'ALL', 'OF', 'WHICH', 'ARE', 'EASILY', 'WITHIN', 'THE', 'ABILITY', 'OF', 'THE', 'DIRECT', 'CURRENT'] +2300-131720-0011-1827: ref=['BUT', 'WHEN', 'IT', 'CAME', 'TO', 'BE', 'A', 'QUESTION', 'OF', 'LIGHTING', 'A', 'SCATTERED', 'SUBURB', 'A', 'GROUP', 'OF', 'DWELLINGS', 'ON', 'THE', 'OUTSKIRTS', 'A', 'REMOTE', 'COUNTRY', 'RESIDENCE', 'OR', 'A', 'FARM', 'HOUSE', 'THE', 'ALTERNATING', 'CURRENT', 'IN', 'ALL', 'ELEMENTS', 'SAVE', 'ITS', 'DANGER', 'WAS', 'AND', 'IS', 'IDEAL'] +2300-131720-0011-1827: hyp=['BUT', 'WHEN', 'IT', 'CAME', 'TO', 'BE', 'A', 'QUESTION', 'OF', 'LIGHTING', 'A', 'SCATTERED', 'SUBURB', 'A', 'GROUP', 'OF', 'DWELLINGS', 'ON', 'THE', 'OUTSKIRTS', 'A', 'REMOTE', 'COUNTRY', 'RESIDENCE', 'OR', 'A', 'FARM', 'HOUSE', 'THE', 'ALTERNATING', 'CURRENT', 'IN', 'ALL', 'ELEMENTS', 'SAVE', 'ITS', 'DANGER', 'WAS', 'AND', 'IS', 'IDEAL'] +2300-131720-0012-1828: ref=['EDISON', 'WAS', 'INTOLERANT', 'OF', 'SHAM', 'AND', 'SHODDY', 'AND', 'NOTHING', 'WOULD', 'SATISFY', 'HIM', 'THAT', 'COULD', 'NOT', 'STAND', 'CROSS', 'EXAMINATION', 'BY', 'MICROSCOPE', 'TEST', 'TUBE', 'AND', 'GALVANOMETER'] +2300-131720-0012-1828: hyp=['EDISON', 'WAS', 'INTOLERANT', 'OF', 'SHAM', 'AND', 'SHODY', 'AND', 'NOTHING', 'WOULD', 'SATISFY', 'HIM', 'THAT', 'COULD', 'NOT', 'STAND', 'CROSS', 'EXAMINATION', 'BY', 'MICROSCOPE', 'TEST', 'TUBE', 'AND', 'GALVANOMETER'] +2300-131720-0013-1829: ref=['UNLESS', 'HE', 'COULD', 'SECURE', 'AN', 'ENGINE', 'OF', 'SMOOTHER', 'RUNNING', 'AND', 'MORE', 'EXACTLY', 'GOVERNED', 'AND', 'REGULATED', 'THAN', 'THOSE', 'AVAILABLE', 'FOR', 'HIS', 'DYNAMO', 'AND', 'LAMP', 'EDISON', 'REALIZED', 'THAT', 'HE', 'WOULD', 'FIND', 'IT', 'ALMOST', 'IMPOSSIBLE', 'TO', 'GIVE', 'A', 'STEADY', 'LIGHT'] +2300-131720-0013-1829: hyp=['UNLESS', 'HE', 'COULD', 'SECURE', 'AN', 'ENGINE', 'OF', 'SMOOTHER', 'RUNNING', 'AND', 'MORE', 'EXACTLY', 'GOVERN', 'AND', 'REGULATED', 'THAN', 'THOSE', 'AVAILABLE', 'FOR', 'HIS', 'DYNAMO', 'AND', 'LAMP', 'EDISON', 'REALIZED', 'THAT', 'HE', 'WOULD', 'FIND', 'IT', 'ALMOST', 'IMPOSSIBLE', 'TO', 'GIVE', 'A', 'STEADY', 'LIGHT'] +2300-131720-0014-1830: ref=['MISTER', 'EDISON', 'WAS', 'A', 'LEADER', 'FAR', 'AHEAD', 'OF', 'THE', 'TIME'] +2300-131720-0014-1830: hyp=['MISTER', 'EDISON', 'WAS', 'A', 'LEADER', 'FAR', 'AHEAD', 'OF', 'THE', 'TIME'] +2300-131720-0015-1831: ref=['HE', 'OBTAINED', 'THE', 'DESIRED', 'SPEED', 'AND', 'LOAD', 'WITH', 'A', 'FRICTION', 'BRAKE', 'ALSO', 'REGULATOR', 'OF', 'SPEED', 'BUT', 'WAITED', 'FOR', 'AN', 'INDICATOR', 'TO', 'VERIFY', 'IT'] +2300-131720-0015-1831: hyp=['HE', 'OBTAINED', 'THE', 'DESIRED', 'SPEED', 'AND', 'LOWED', 'WITH', 'A', 'FRICTION', 'BREAK', 'ALSO', 'REGULATOR', 'OF', 'SPEED', 'BUT', 'WAITED', 'FOR', 'AN', 'INDICATOR', 'TO', 'VERIFY', 'IT'] +2300-131720-0016-1832: ref=['THEN', 'AGAIN', 'THERE', 'WAS', 'NO', 'KNOWN', 'WAY', 'TO', 'LUBRICATE', 'AN', 'ENGINE', 'FOR', 'CONTINUOUS', 'RUNNING', 'AND', 'MISTER', 'EDISON', 'INFORMED', 'ME', 'THAT', 'AS', 'A', 'MARINE', 'ENGINE', 'STARTED', 'BEFORE', 'THE', 'SHIP', 'LEFT', 'NEW', 'YORK', 'AND', 'CONTINUED', 'RUNNING', 'UNTIL', 'IT', 'REACHED', 'ITS', 'HOME', 'PORT', 'SO', 'AN', 'ENGINE', 'FOR', 'HIS', 'PURPOSES', 'MUST', 'PRODUCE', 'LIGHT', 'AT', 'ALL', 'TIMES'] +2300-131720-0016-1832: hyp=['THEN', 'AGAIN', 'THERE', 'WAS', 'NO', 'KNOWN', 'WAY', 'TO', 'LUBRICADE', 'AN', 'ENGINE', 'FOR', 'CONTINUOUS', 'RUNNING', 'AND', 'MISTER', 'EDISON', 'INFORMED', 'ME', 'THAT', 'AS', 'A', 'MARINE', 'ENGINE', 'STARTED', 'BEFORE', 'THE', 'SHIP', 'LEFT', 'NEW', 'YORK', 'AND', 'CONTINUED', 'RUNNING', 'UNTIL', 'IT', 'REACHED', 'ITS', 'HOME', 'PORT', 'SO', 'AN', 'ENGINE', 'FOR', 'HIS', 'PURPOSES', 'MUST', 'PRODUCE', 'LIGHT', 'AT', 'ALL', 'TIMES'] +2300-131720-0017-1833: ref=['EDISON', 'HAD', 'INSTALLED', 'HIS', 'HISTORIC', 'FIRST', 'GREAT', 'CENTRAL', 'STATION', 'SYSTEM', 'IN', 'NEW', 'YORK', 'ON', 'THE', 'MULTIPLE', 'ARC', 'SYSTEM', 'COVERED', 'BY', 'HIS', 'FEEDER', 'AND', 'MAIN', 'INVENTION', 'WHICH', 'RESULTED', 'IN', 'A', 'NOTABLE', 'SAVING', 'IN', 'THE', 'COST', 'OF', 'CONDUCTORS', 'AS', 'AGAINST', 'A', 'STRAIGHT', 'TWO', 'WIRE', 'SYSTEM', 'THROUGHOUT', 'OF', 'THE', 'TREE', 'KIND'] +2300-131720-0017-1833: hyp=['EDISON', 'HAD', 'INSTALLED', 'HIS', 'HISTORIC', 'FIRST', 'GREAT', 'CENTRAL', 'STATION', 'SYSTEM', 'IN', 'NEW', 'YORK', 'ON', 'THE', 'MULTIPLE', 'ARC', 'SYSTEM', 'COVERED', 'BY', 'HIS', 'FEEDER', 'AND', 'MAIN', 'INVENTION', 'WHICH', 'RESULTED', 'IN', 'A', 'NOTABLE', 'SAVING', 'IN', 'THE', 'COST', 'OF', 'CONDUCTORS', 'AS', 'AGAINST', 'A', 'STRAIGHT', 'TWO', 'WIRE', 'SYSTEM', 'THROUGHOUT', 'OF', 'THE', 'TREE', 'KIND'] +2300-131720-0018-1834: ref=['HE', 'SOON', 'FORESAW', 'THAT', 'STILL', 'GREATER', 'ECONOMY', 'WOULD', 'BE', 'NECESSARY', 'FOR', 'COMMERCIAL', 'SUCCESS', 'NOT', 'ALONE', 'FOR', 'THE', 'LARGER', 'TERRITORY', 'OPENING', 'BUT', 'FOR', 'THE', 'COMPACT', 'DISTRICTS', 'OF', 'LARGE', 'CITIES'] +2300-131720-0018-1834: hyp=['HE', 'SOON', 'FORESAW', 'THAT', 'STILL', 'GREATER', 'ECONOMY', 'WOULD', 'BE', 'NECESSARY', 'FOR', 'COMMERCIAL', 'SUCCESS', 'NOT', 'ALONE', 'FOR', 'THE', 'LARGER', 'TERRITORY', 'OPENING', 'BUT', 'FOR', 'THE', 'COMPACT', 'DISTRICT', 'OF', 'LARGE', 'CITIES'] +2300-131720-0019-1835: ref=['THE', 'STRONG', 'POSITION', 'HELD', 'BY', 'THE', 'EDISON', 'SYSTEM', 'UNDER', 'THE', 'STRENUOUS', 'COMPETITION', 'THAT', 'WAS', 'ALREADY', 'SPRINGING', 'UP', 'WAS', 'ENORMOUSLY', 'IMPROVED', 'BY', 'THE', 'INTRODUCTION', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'AND', 'IT', 'GAVE', 'AN', 'IMMEDIATE', 'IMPETUS', 'TO', 'INCANDESCENT', 'LIGHTING'] +2300-131720-0019-1835: hyp=['THE', 'STRONG', 'POSITION', 'HELD', 'BY', 'THE', 'EDISON', 'SYSTEM', 'UNDER', 'THE', 'STRENUOUS', 'COMPETITION', 'THAT', 'WAS', 'ALREADY', 'SPRINGING', 'UP', 'WAS', 'ENORMOUSLY', 'IMPROVED', 'BY', 'THE', 'INTRODUCTION', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'AND', 'IT', 'GAVE', 'AN', 'IMMEDIATE', 'IMPETUS', 'TO', 'INCANDESCENT', 'LIGHTING'] +2300-131720-0020-1836: ref=['IT', 'WAS', 'SPECIALLY', 'SUITED', 'FOR', 'A', 'TRIAL', 'PLANT', 'ALSO', 'IN', 'THE', 'EARLY', 'DAYS', 'WHEN', 'A', 'YIELD', 'OF', 'SIX', 'OR', 'EIGHT', 'LAMPS', 'TO', 'THE', 'HORSE', 'POWER', 'WAS', 'CONSIDERED', 'SUBJECT', 'FOR', 'CONGRATULATION'] +2300-131720-0020-1836: hyp=['IT', 'WAS', 'SPECIALLY', 'SUITED', 'FOR', 'A', 'TRIAL', 'PLANT', 'ALSO', 'IN', 'THE', 'EARLY', 'DAYS', 'WHEN', 'A', 'YIELD', 'OF', 'SIX', 'OR', 'EIGHT', 'LAMPS', 'TO', 'THE', 'HORSE', 'BOWER', 'WAS', 'CONSIDERED', 'SUBJECT', 'FOR', 'CONGRATULATION'] +2300-131720-0021-1837: ref=['THE', 'STREET', 'CONDUCTORS', 'WERE', 'OF', 'THE', 'OVERHEAD', 'POLE', 'LINE', 'CONSTRUCTION', 'AND', 'WERE', 'INSTALLED', 'BY', 'THE', 'CONSTRUCTION', 'COMPANY', 'THAT', 'HAD', 'BEEN', 'ORGANIZED', 'BY', 'EDISON', 'TO', 'BUILD', 'AND', 'EQUIP', 'CENTRAL', 'STATIONS'] +2300-131720-0021-1837: hyp=['THE', 'STREET', 'CONDUCTORS', 'WERE', 'OF', 'THE', 'OVERHEAD', 'POLE', 'LINE', 'CONSTRUCTION', 'AND', 'WERE', 'INSTALLED', 'BY', 'THE', 'CONSTRUCTION', 'COMPANY', 'THAT', 'HAD', 'BEEN', 'ORGANIZED', 'BY', 'EDISON', 'TO', 'BUILD', 'AN', 'EQUIP', 'CENTRAL', 'STATIONS'] +2300-131720-0022-1838: ref=['MEANWHILE', 'HE', 'HAD', 'CALLED', 'UPON', 'ME', 'TO', 'MAKE', 'A', 'REPORT', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'KNOWN', 'IN', 'ENGLAND', 'AS', 'THE', 'HOPKINSON', 'BOTH', 'DOCTOR', 'JOHN', 'HOPKINSON', 'AND', 'MISTER', 'EDISON', 'BEING', 'INDEPENDENT', 'INVENTORS', 'AT', 'PRACTICALLY', 'THE', 'SAME', 'TIME'] +2300-131720-0022-1838: hyp=['MEANWHILE', 'HE', 'HAD', 'CALLED', 'UPON', 'ME', 'TO', 'MAKE', 'A', 'REPORT', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'KNOWN', 'IN', 'ENGLAND', 'AS', 'THE', 'HOPKINSON', 'BOTH', 'DOCTOR', 'JOHN', 'HOPKINSON', 'AND', 'MISTER', 'EDISON', 'BEING', 'INDEPENDENT', 'IN', 'VENORS', 'AT', 'PRACTICALLY', 'THE', 'SAME', 'TIME'] +2300-131720-0023-1839: ref=['I', 'THINK', 'HE', 'WAS', 'PERHAPS', 'MORE', 'APPRECIATIVE', 'THAN', 'I', 'WAS', 'OF', 'THE', 'DISCIPLINE', 'OF', 'THE', 'EDISON', 'CONSTRUCTION', 'DEPARTMENT', 'AND', 'THOUGHT', 'IT', 'WOULD', 'BE', 'WELL', 'FOR', 'US', 'TO', 'WAIT', 'UNTIL', 'THE', 'MORNING', 'OF', 'THE', 'FOURTH', 'BEFORE', 'WE', 'STARTED', 'UP'] +2300-131720-0023-1839: hyp=['I', 'THINK', 'HE', 'WAS', 'PERHAPS', 'MORE', 'APPRECIATIVE', 'THAN', 'I', 'WAS', 'OF', 'THE', 'DISCIPLINE', 'OF', 'THE', 'EDISON', 'CONSTRUCTION', 'DEPARTMENT', 'AND', 'THOUGHT', 'IT', 'WOULD', 'BE', 'WELL', 'FOR', 'US', 'TO', 'WAIT', 'UNTIL', 'THE', 'MORNING', 'OF', 'THE', 'FOURTH', 'BEFORE', 'WE', 'STARTED', 'UP'] +2300-131720-0024-1840: ref=['BUT', 'THE', 'PLANT', 'RAN', 'AND', 'IT', 'WAS', 'THE', 'FIRST', 'THREE', 'WIRE', 'STATION', 'IN', 'THIS', 'COUNTRY'] +2300-131720-0024-1840: hyp=['BUT', 'THE', 'PLANT', 'RAN', 'AND', 'IT', 'WAS', 'THE', 'FIRST', 'THREE', 'WIRE', 'STATION', 'IN', 'THIS', 'COUNTRY'] +2300-131720-0025-1841: ref=['THEY', 'WERE', 'LATER', 'USED', 'AS', 'RESERVE', 'MACHINES', 'AND', 'FINALLY', 'WITH', 'THE', 'ENGINE', 'RETIRED', 'FROM', 'SERVICE', 'AS', 'PART', 'OF', 'THE', 'COLLECTION', 'OF', 'EDISONIA', 'BUT', 'THEY', 'REMAIN', 'IN', 'PRACTICALLY', 'AS', 'GOOD', 'CONDITION', 'AS', 'WHEN', 'INSTALLED', 'IN', 'EIGHTEEN', 'EIGHTY', 'THREE'] +2300-131720-0025-1841: hyp=['THEY', 'WERE', 'LATER', 'USED', 'AS', 'RESERVE', 'MACHINES', 'AND', 'FINALLY', 'WITH', 'THE', 'ENGINE', 'RETIRED', 'FROM', 'SERVICE', 'AS', 'PART', 'OF', 'THE', 'COLLECTION', 'OF', 'EDISONIA', 'BUT', 'THEY', 'REMAIN', 'IN', 'PRACTICALLY', 'AS', 'GOOD', 'CONDITION', 'AS', 'ONE', 'INSTALLED', 'IN', 'EIGHTEEN', 'EIGHTY', 'THREE'] +2300-131720-0026-1842: ref=['THE', 'ARC', 'LAMP', 'INSTALLED', 'OUTSIDE', 'A', "CUSTOMER'S", 'PREMISES', 'OR', 'IN', 'A', 'CIRCUIT', 'FOR', 'PUBLIC', 'STREET', 'LIGHTING', 'BURNED', 'SO', 'MANY', 'HOURS', 'NIGHTLY', 'SO', 'MANY', 'NIGHTS', 'IN', 'THE', 'MONTH', 'AND', 'WAS', 'PAID', 'FOR', 'AT', 'THAT', 'RATE', 'SUBJECT', 'TO', 'REBATE', 'FOR', 'HOURS', 'WHEN', 'THE', 'LAMP', 'MIGHT', 'BE', 'OUT', 'THROUGH', 'ACCIDENT'] +2300-131720-0026-1842: hyp=['THE', 'ARK', 'LAMP', 'INSTALLED', 'OUTSIDE', 'A', "CUSTOMER'S", 'PREMISES', 'OR', 'IN', 'A', 'CIRCUIT', 'FOR', 'PUBLIC', 'STREET', 'LIGHTING', 'BURNED', 'SO', 'MANY', 'HOURS', 'NIGHTLY', 'SO', 'MANY', 'NIGHTS', 'IN', 'THE', 'MONTH', 'AND', 'WAS', 'PAID', 'FOR', 'AT', 'THAT', 'RATE', 'SUBJECT', 'TO', 'REBATE', 'FOR', 'HOURS', 'WHEN', 'THE', 'LAMP', 'MIGHT', 'BE', 'OUT', 'THROUGH', 'ACCIDENT'] +2300-131720-0027-1843: ref=['EDISON', 'HELD', 'THAT', 'THE', 'ELECTRICITY', 'SOLD', 'MUST', 'BE', 'MEASURED', 'JUST', 'LIKE', 'GAS', 'OR', 'WATER', 'AND', 'HE', 'PROCEEDED', 'TO', 'DEVELOP', 'A', 'METER'] +2300-131720-0027-1843: hyp=['EDISON', 'HELD', 'THAT', 'THE', 'ELECTRICITY', 'SOLD', 'MUST', 'BE', 'MEASURED', 'JUST', 'LIKE', 'GAS', 'OR', 'WATER', 'AND', 'HE', 'PROCEEDED', 'TO', 'DEVELOP', 'A', 'METER'] +2300-131720-0028-1844: ref=['THERE', 'WAS', 'INFINITE', 'SCEPTICISM', 'AROUND', 'HIM', 'ON', 'THE', 'SUBJECT', 'AND', 'WHILE', 'OTHER', 'INVENTORS', 'WERE', 'ALSO', 'GIVING', 'THE', 'SUBJECT', 'THEIR', 'THOUGHT', 'THE', 'PUBLIC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'ANYTHING', 'SO', 'UTTERLY', 'INTANGIBLE', 'AS', 'ELECTRICITY', 'THAT', 'COULD', 'NOT', 'BE', 'SEEN', 'OR', 'WEIGHED', 'AND', 'ONLY', 'GAVE', 'SECONDARY', 'EVIDENCE', 'OF', 'ITSELF', 'AT', 'THE', 'EXACT', 'POINT', 'OF', 'USE', 'COULD', 'NOT', 'BE', 'BROUGHT', 'TO', 'ACCURATE', 'REGISTRATION'] +2300-131720-0028-1844: hyp=['THERE', 'WAS', 'INFINITE', 'SCEPTICISM', 'AROUND', 'HIM', 'ON', 'THE', 'SUBJECT', 'AND', 'WHILE', 'OTHER', 'INVENTORS', 'WERE', 'ALSO', 'GIVING', 'THE', 'SUBJECT', 'THEIR', 'THOUGHT', 'THE', 'PUBLIC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'ANYTHING', 'SO', 'UTTERLY', 'INTANGIBLE', 'AS', 'ELECTRICITY', 'THAT', 'COULD', 'NOT', 'BE', 'SEEN', 'OR', 'WEIGHED', 'AND', 'ONLY', 'GAVE', 'SECONDARY', 'EVIDENCE', 'OF', 'ITSELF', 'AT', 'THE', 'EXACT', 'POINT', 'OF', 'USE', 'COULD', 'NOT', 'BE', 'BROUGHT', 'TO', 'ACCURATE', 'REGISTRATION'] +2300-131720-0029-1845: ref=['HENCE', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'NO', 'LONGER', 'USED', 'DESPITE', 'ITS', 'EXCELLENT', 'QUALITIES'] +2300-131720-0029-1845: hyp=['HENCE', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'NO', 'LONGER', 'USED', 'DESPITE', 'ITS', 'EXCELLENT', 'QUALITIES'] +2300-131720-0030-1846: ref=['THE', 'PRINCIPLE', 'EMPLOYED', 'IN', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'THAT', 'WHICH', 'EXEMPLIFIES', 'THE', 'POWER', 'OF', 'ELECTRICITY', 'TO', 'DECOMPOSE', 'A', 'CHEMICAL', 'SUBSTANCE'] +2300-131720-0030-1846: hyp=['THE', 'PRINCIPAL', 'EMPLOYED', 'IN', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'THAT', 'WHICH', 'EXEMPLIFIES', 'THE', 'POWER', 'OF', 'ELECTRICITY', 'TO', 'DECOMPOSE', 'A', 'CHEMICAL', 'SUBSTANCE'] +2300-131720-0031-1847: ref=['ASSOCIATED', 'WITH', 'THIS', 'SIMPLE', 'FORM', 'OF', 'APPARATUS', 'WERE', 'VARIOUS', 'INGENIOUS', 'DETAILS', 'AND', 'REFINEMENTS', 'TO', 'SECURE', 'REGULARITY', 'OF', 'OPERATION', 'FREEDOM', 'FROM', 'INACCURACY', 'AND', 'IMMUNITY', 'FROM', 'SUCH', 'TAMPERING', 'AS', 'WOULD', 'PERMIT', 'THEFT', 'OF', 'CURRENT', 'OR', 'DAMAGE'] +2300-131720-0031-1847: hyp=['ASSOCIATED', 'WITH', 'THIS', 'SIMPLE', 'FORM', 'OF', 'APPARATUS', 'WERE', 'VARIOUS', 'INGENIOUS', 'DETAILS', 'AND', 'REFINEMENTS', 'TO', 'SECURE', 'REGULARITY', 'OF', 'OPERATION', 'FREEDOM', 'FROM', 'INACCURACY', 'AND', 'IMMUNITY', 'FROM', 'SUCH', 'TAMPERING', 'AS', 'WOULD', 'PERMIT', 'THEFT', 'OF', 'CURRENT', 'OR', 'DAMAGE'] +2300-131720-0032-1848: ref=['THE', 'STANDARD', 'EDISON', 'METER', 'PRACTICE', 'WAS', 'TO', 'REMOVE', 'THE', 'CELLS', 'ONCE', 'A', 'MONTH', 'TO', 'THE', 'METER', 'ROOM', 'OF', 'THE', 'CENTRAL', 'STATION', 'COMPANY', 'FOR', 'EXAMINATION', 'ANOTHER', 'SET', 'BEING', 'SUBSTITUTED'] +2300-131720-0032-1848: hyp=['THE', 'STANDARD', 'EDISON', 'METER', 'PRACTICE', 'WAS', 'TO', 'REMOVE', 'THE', 'CELLS', 'ONCE', 'A', 'MONTH', 'TO', 'THE', 'METER', 'ROOM', 'OF', 'THE', 'CENTRAL', 'STATION', 'COMPANY', 'FOR', 'EXAMINATION', 'ANOTHER', 'SET', 'BEING', 'SUBSTITUTED'] +2300-131720-0033-1849: ref=['IN', 'DECEMBER', 'EIGHTEEN', 'EIGHTY', 'EIGHT', 'MISTER', 'W', 'J', 'JENKS', 'READ', 'AN', 'INTERESTING', 'PAPER', 'BEFORE', 'THE', 'AMERICAN', 'INSTITUTE', 'OF', 'ELECTRICAL', 'ENGINEERS', 'ON', 'THE', 'SIX', 'YEARS', 'OF', 'PRACTICAL', 'EXPERIENCE', 'HAD', 'UP', 'TO', 'THAT', 'TIME', 'WITH', 'THE', 'METER', 'THEN', 'MORE', 'GENERALLY', 'IN', 'USE', 'THAN', 'ANY', 'OTHER'] +2300-131720-0033-1849: hyp=['IN', 'DECEMBER', 'EIGHTEEN', 'EIGHTY', 'EIGHT', 'MISTER', 'W', 'J', 'JENKS', 'READ', 'AN', 'INTERESTING', 'PAPER', 'BEFORE', 'THE', 'AMERICAN', 'INSTITUTE', 'OF', 'ELECTRICAL', 'ENGINEERS', 'ON', 'THE', 'SIX', 'YEARS', 'OF', 'PRACTICAL', 'EXPERIENCE', 'HAD', 'UP', 'TO', 'THAT', 'TIME', 'WITH', 'THE', 'METRE', 'THEN', 'MORE', 'GENERALLY', 'IN', 'USE', 'THAN', 'ANY', 'OTHER'] +2300-131720-0034-1850: ref=['THE', 'OTHERS', 'HAVING', 'BEEN', 'IN', 'OPERATION', 'TOO', 'SHORT', 'A', 'TIME', 'TO', 'SHOW', 'DEFINITE', 'RESULTS', 'ALTHOUGH', 'THEY', 'ALSO', 'WENT', 'QUICKLY', 'TO', 'A', 'DIVIDEND', 'BASIS'] +2300-131720-0034-1850: hyp=['THE', 'OTHERS', 'HAVING', 'BEEN', 'IN', 'OPERATION', 'TOO', 'SHORT', 'A', 'TIME', 'TO', 'SHOW', 'DEFINITE', 'RESULTS', 'ALTHOUGH', 'THEY', 'ALSO', 'WENT', 'QUICKLY', 'TO', 'A', 'DIVIDEND', 'BASIS'] +2300-131720-0035-1851: ref=['IN', 'THIS', 'CONNECTION', 'IT', 'SHOULD', 'BE', 'MENTIONED', 'THAT', 'THE', 'ASSOCIATION', 'OF', 'EDISON', 'ILLUMINATING', 'COMPANIES', 'IN', 'THE', 'SAME', 'YEAR', 'ADOPTED', 'RESOLUTIONS', 'UNANIMOUSLY', 'TO', 'THE', 'EFFECT', 'THAT', 'THE', 'EDISON', 'METER', 'WAS', 'ACCURATE', 'AND', 'THAT', 'ITS', 'USE', 'WAS', 'NOT', 'EXPENSIVE', 'FOR', 'STATIONS', 'ABOVE', 'ONE', 'THOUSAND', 'LIGHTS', 'AND', 'THAT', 'THE', 'BEST', 'FINANCIAL', 'RESULTS', 'WERE', 'INVARIABLY', 'SECURED', 'IN', 'A', 'STATION', 'SELLING', 'CURRENT', 'BY', 'METER'] +2300-131720-0035-1851: hyp=['IN', 'THIS', 'CONNECTION', 'IT', 'SHOULD', 'BE', 'MENTIONED', 'THAT', 'THE', 'ASSOCIATION', 'OF', 'EDISON', 'ILLUMINATING', 'COMPANIES', 'IN', 'THE', 'SAME', 'YEAR', 'ADOPTED', 'RESOLUTIONS', 'UNANIMOUSLY', 'TO', 'THE', 'EFFECT', 'THAT', 'THE', 'EDISON', 'METER', 'WAS', 'ACCURATE', 'AND', 'THAT', 'ITS', 'USE', 'WAS', 'NOT', 'EXPENSIVE', 'FOR', 'STATIONS', 'ABOVE', 'ONE', 'THOUSAND', 'LIGHTS', 'AND', 'THAT', 'THE', 'BEST', 'FINANCIAL', 'RESULTS', 'WERE', 'INVARIABLY', 'SECURED', 'IN', 'A', 'STATION', 'SELLING', 'CURRENT', 'BY', 'METRE'] +2300-131720-0036-1852: ref=['THE', 'METER', 'CONTINUED', 'IN', 'GENERAL', 'SERVICE', 'DURING', 'EIGHTEEN', 'NINETY', 'NINE', 'AND', 'PROBABLY', 'UP', 'TO', 'THE', 'CLOSE', 'OF', 'THE', 'CENTURY'] +2300-131720-0036-1852: hyp=['THE', 'METRE', 'CONTINUED', 'IN', 'GENERAL', 'SERVICE', 'DURING', 'EIGHTEEN', 'NINETY', 'NINE', 'AND', 'PROBABLY', 'UP', 'TO', 'THE', 'CLOSE', 'OF', 'THE', 'CENTURY'] +2300-131720-0037-1853: ref=['HE', 'WEIGHED', 'AND', 'REWEIGHED', 'THE', 'METER', 'PLATES', 'AND', 'PURSUED', 'EVERY', 'LINE', 'OF', 'INVESTIGATION', 'IMAGINABLE', 'BUT', 'ALL', 'IN', 'VAIN'] +2300-131720-0037-1853: hyp=['HE', 'WEIGHED', 'AND', 'REWAIED', 'THE', 'METRE', 'PLATES', 'AND', 'PURSUED', 'EVERY', 'LINE', 'OF', 'INVESTIGATION', 'IMAGINABLE', 'BUT', 'ALL', 'IN', 'VAIN'] +2300-131720-0038-1854: ref=['HE', 'FELT', 'HE', 'WAS', 'UP', 'AGAINST', 'IT', 'AND', 'THAT', 'PERHAPS', 'ANOTHER', 'KIND', 'OF', 'A', 'JOB', 'WOULD', 'SUIT', 'HIM', 'BETTER'] +2300-131720-0038-1854: hyp=['HE', 'FELT', 'HE', 'WAS', 'UP', 'AGAINST', 'IT', 'AND', 'THAT', 'PERHAPS', 'ANOTHER', 'KIND', 'OF', 'A', 'JOB', 'WOULD', 'SUIT', 'HIM', 'BETTER'] +2300-131720-0039-1855: ref=['THE', 'PROBLEM', 'WAS', 'SOLVED'] +2300-131720-0039-1855: hyp=['THE', 'PROBLEM', 'WAS', 'SOLVED'] +2300-131720-0040-1856: ref=['WE', 'WERE', 'MORE', 'INTERESTED', 'IN', 'THE', 'TECHNICAL', 'CONDITION', 'OF', 'THE', 'STATION', 'THAN', 'IN', 'THE', 'COMMERCIAL', 'PART'] +2300-131720-0040-1856: hyp=['WE', 'WERE', 'MORE', 'INTERESTED', 'IN', 'THE', 'TECHNICAL', 'CONDITION', 'OF', 'THE', 'STATION', 'THAN', 'IN', 'THE', 'COMMERCIAL', 'PART'] +2300-131720-0041-1857: ref=['WE', 'HAD', 'METERS', 'IN', 'WHICH', 'THERE', 'WERE', 'TWO', 'BOTTLES', 'OF', 'LIQUID'] +2300-131720-0041-1857: hyp=['WE', 'HAD', 'METRES', 'IN', 'WHICH', 'THERE', 'WERE', 'TWO', 'BOTTLES', 'OF', 'LIQUID'] +237-126133-0000-2407: ref=['HERE', 'SHE', 'WOULD', 'STAY', 'COMFORTED', 'AND', 'SOOTHED', 'AMONG', 'THE', 'LOVELY', 'PLANTS', 'AND', 'RICH', 'EXOTICS', 'REJOICING', 'THE', 'HEART', 'OF', 'OLD', 'TURNER', 'THE', 'GARDENER', 'WHO', 'SINCE', "POLLY'S", 'FIRST', 'RAPTUROUS', 'ENTRANCE', 'HAD', 'TAKEN', 'HER', 'INTO', 'HIS', 'GOOD', 'GRACES', 'FOR', 'ALL', 'TIME'] +237-126133-0000-2407: hyp=['HERE', 'SHE', 'WOULD', 'STAY', 'COMFORTED', 'AND', 'SOOTHE', 'AMONG', 'THE', 'LOVELY', 'PLANTS', 'AND', 'RICH', 'EXOTICS', 'REJOICING', 'THE', 'HEART', 'OF', 'OLD', 'TURNER', 'THE', 'GARDENER', 'WHO', 'SINCE', "POLLY'S", 'FIRST', 'RAPTUROUS', 'ENTRANCE', 'HAD', 'TAKEN', 'HER', 'INTO', 'HIS', 'GOOD', 'GRACES', 'FOR', 'ALL', 'TIME'] +237-126133-0001-2408: ref=['EVERY', 'CHANCE', 'SHE', 'COULD', 'STEAL', 'AFTER', 'PRACTICE', 'HOURS', 'WERE', 'OVER', 'AND', 'AFTER', 'THE', 'CLAMOROUS', 'DEMANDS', 'OF', 'THE', 'BOYS', 'UPON', 'HER', 'TIME', 'WERE', 'FULLY', 'SATISFIED', 'WAS', 'SEIZED', 'TO', 'FLY', 'ON', 'THE', 'WINGS', 'OF', 'THE', 'WIND', 'TO', 'THE', 'FLOWERS'] +237-126133-0001-2408: hyp=['EVERY', 'CHANCE', 'SHE', 'COULD', 'STEAL', 'AFTER', 'PRACTICE', 'HOURS', 'WERE', 'OVER', 'AND', 'AFTER', 'THE', 'CLAMOROUS', 'DEMANDS', 'OF', 'THE', 'BOYS', 'UPON', 'HER', 'TIME', 'WERE', 'FULLY', 'SATISFIED', 'WAS', 'SEIZED', 'TO', 'FLY', 'ON', 'THE', 'WINGS', 'OF', 'THE', 'WIND', 'TO', 'THE', 'FLOWERS'] +237-126133-0002-2409: ref=['THEN', 'DEAR', 'SAID', 'MISSUS', 'WHITNEY', 'YOU', 'MUST', 'BE', 'KINDER', 'TO', 'HER', 'THAN', 'EVER', 'THINK', 'WHAT', 'IT', 'WOULD', 'BE', 'FOR', 'ONE', 'OF', 'YOU', 'TO', 'BE', 'AWAY', 'FROM', 'HOME', 'EVEN', 'AMONG', 'FRIENDS'] +237-126133-0002-2409: hyp=['THEN', 'DEAR', 'SAID', 'MISSUS', 'WHITNEY', 'YOU', 'MUST', 'BE', 'KINDER', 'TO', 'HER', 'THAN', 'EVER', 'THINK', 'WHAT', 'IT', 'WOULD', 'BE', 'FOR', 'ONE', 'OF', 'YOU', 'TO', 'BE', 'AWAY', 'FROM', 'HOME', 'EVEN', 'AMONG', 'FRIENDS'] +237-126133-0003-2410: ref=['SOMEHOW', 'OF', 'ALL', 'THE', 'DAYS', 'WHEN', 'THE', 'HOME', 'FEELING', 'WAS', 'THE', 'STRONGEST', 'THIS', 'DAY', 'IT', 'SEEMED', 'AS', 'IF', 'SHE', 'COULD', 'BEAR', 'IT', 'NO', 'LONGER'] +237-126133-0003-2410: hyp=['SOMEHOW', 'OF', 'ALL', 'THE', 'DAYS', 'WHEN', 'THE', 'HOME', 'FEELING', 'WAS', 'THE', 'STRONGEST', 'THIS', 'DAY', 'IT', 'SEEMED', 'AS', 'IF', 'SHE', 'COULD', 'BEAR', 'IT', 'NO', 'LONGER'] +237-126133-0004-2411: ref=['IF', 'SHE', 'COULD', 'ONLY', 'SEE', 'PHRONSIE', 'FOR', 'JUST', 'ONE', 'MOMENT'] +237-126133-0004-2411: hyp=['IF', 'SHE', 'COULD', 'ONLY', 'SEE', 'PHRONSIE', 'FOR', 'JUST', 'ONE', 'MOMENT'] +237-126133-0005-2412: ref=['OH', "SHE'S", 'ALWAYS', 'AT', 'THE', 'PIANO', 'SAID', 'VAN', 'SHE', 'MUST', 'BE', 'THERE', 'NOW', 'SOMEWHERE', 'AND', 'THEN', 'SOMEBODY', 'LAUGHED'] +237-126133-0005-2412: hyp=['OH', "SHE'S", 'ALWAYS', 'AT', 'THE', 'PIANO', 'SAID', 'VAN', 'SHE', 'MUST', 'BE', 'THERE', 'NOW', 'SOMEWHERE', 'AND', 'THEN', 'SOMEBODY', 'LAUGHED'] +237-126133-0006-2413: ref=['AT', 'THIS', 'THE', 'BUNDLE', 'OPENED', 'SUDDENLY', 'AND', 'OUT', 'POPPED', 'PHRONSIE'] +237-126133-0006-2413: hyp=['AT', 'THIS', 'THE', 'BUNDLE', 'OPENED', 'SUDDENLY', 'AND', 'OUT', 'POPPED', 'PHRONSIE'] +237-126133-0007-2414: ref=['BUT', 'POLLY', "COULDN'T", 'SPEAK', 'AND', 'IF', 'JASPER', "HADN'T", 'CAUGHT', 'HER', 'JUST', 'IN', 'TIME', 'SHE', 'WOULD', 'HAVE', 'TUMBLED', 'OVER', 'BACKWARD', 'FROM', 'THE', 'STOOL', 'PHRONSIE', 'AND', 'ALL'] +237-126133-0007-2414: hyp=['BUT', 'POLLY', "COULDN'T", 'SPEAK', 'AND', 'IF', 'JASPER', "HADN'T", 'CAUGHT', 'HER', 'JUST', 'IN', 'TIME', 'SHE', 'WOULD', 'HAVE', 'TUMBLED', 'OVER', 'BACKWARD', 'FROM', 'THE', 'STOOL', 'PHRONSIE', 'AND', 'ALL'] +237-126133-0008-2415: ref=['ASKED', 'PHRONSIE', 'WITH', 'HER', 'LITTLE', 'FACE', 'CLOSE', 'TO', "POLLY'S", 'OWN'] +237-126133-0008-2415: hyp=['ASKED', 'PHRONSIE', 'WITH', 'HER', 'LITTLE', 'FACE', 'CLOSE', 'TO', "POLLY'S", 'OWN'] +237-126133-0009-2416: ref=['NOW', "YOU'LL", 'STAY', 'CRIED', 'VAN', 'SAY', 'POLLY', "WON'T", 'YOU'] +237-126133-0009-2416: hyp=['NOW', "YOU'LL", 'STAY', 'CRIED', 'VAN', 'SAY', 'POLLY', "WON'T", 'YOU'] +237-126133-0010-2417: ref=['OH', 'YOU', 'ARE', 'THE', 'DEAREST', 'AND', 'BEST', 'MISTER', 'KING', 'I', 'EVER', 'SAW', 'BUT', 'HOW', 'DID', 'YOU', 'MAKE', 'MAMMY', 'LET', 'HER', 'COME'] +237-126133-0010-2417: hyp=['OH', 'YOU', 'ARE', 'THE', 'DEAREST', 'AND', 'BEST', 'MISTER', 'KING', 'I', 'EVER', 'SAW', 'BUT', 'HOW', 'DID', 'YOU', 'MAKE', 'MAMMY', 'LET', 'HER', 'COME'] +237-126133-0011-2418: ref=["ISN'T", 'HE', 'SPLENDID', 'CRIED', 'JASPER', 'IN', 'INTENSE', 'PRIDE', 'SWELLING', 'UP', 'FATHER', 'KNEW', 'HOW', 'TO', 'DO', 'IT'] +237-126133-0011-2418: hyp=["ISN'T", 'HE', 'SPLENDID', 'CRIED', 'JASPER', 'AN', 'INTENSE', 'PRIDE', 'SWELLING', 'UP', 'FATHER', 'KNEW', 'HOW', 'TO', 'DO', 'IT'] +237-126133-0012-2419: ref=['THERE', 'THERE', 'HE', 'SAID', 'SOOTHINGLY', 'PATTING', 'HER', 'BROWN', 'FUZZY', 'HEAD'] +237-126133-0012-2419: hyp=['THERE', 'THERE', 'HE', 'SAID', 'SOOTHINGLY', 'PATTING', 'HER', 'BROWN', 'FUZZY', 'HEAD'] +237-126133-0013-2420: ref=['I', 'KNOW', 'GASPED', 'POLLY', 'CONTROLLING', 'HER', 'SOBS', 'I', "WON'T", 'ONLY', 'I', "CAN'T", 'THANK', 'YOU'] +237-126133-0013-2420: hyp=['I', 'KNOW', 'GASPED', 'POLLY', 'CONTROLLING', 'HER', 'SOBS', 'I', "WON'T", 'ONLY', 'I', "CAN'T", 'THANK', 'YOU'] +237-126133-0014-2421: ref=['ASKED', 'PHRONSIE', 'IN', 'INTENSE', 'INTEREST', 'SLIPPING', 'DOWN', 'OUT', 'OF', "POLLY'S", 'ARMS', 'AND', 'CROWDING', 'UP', 'CLOSE', 'TO', "JASPER'S", 'SIDE'] +237-126133-0014-2421: hyp=['ASKED', 'PHRONSIE', 'IN', 'INTENSE', 'INTEREST', 'SLIPPING', 'DOWN', 'OUT', 'OF', "POLLY'S", 'ARMS', 'AND', 'CROWDING', 'UP', 'CLOSE', 'TO', "JASPER'S", 'SIDE'] +237-126133-0015-2422: ref=['YES', 'ALL', 'ALONE', 'BY', 'HIMSELF', 'ASSERTED', 'JASPER', 'VEHEMENTLY', 'AND', 'WINKING', 'FURIOUSLY', 'TO', 'THE', 'OTHERS', 'TO', 'STOP', 'THEIR', 'LAUGHING', 'HE', 'DID', 'NOW', 'TRULY', 'PHRONSIE'] +237-126133-0015-2422: hyp=['YES', 'ALL', 'ALONE', 'BY', 'HIMSELF', 'ASSERTED', 'JASPER', 'VEHEMENTLY', 'AND', 'WINKING', 'FURIOUSLY', 'TO', 'THE', 'OTHERS', 'TO', 'STOP', 'THEIR', 'LAUGHING', 'HE', 'DID', 'NOW', 'TRULY', 'PHRONSIE'] +237-126133-0016-2423: ref=['OH', 'NO', 'JASPER', 'I', 'MUST', 'GO', 'BY', 'MY', 'VERY', 'OWN', 'SELF'] +237-126133-0016-2423: hyp=['OH', 'NO', 'JAPSER', 'I', 'MUST', 'GO', 'BY', 'MY', 'VERY', 'OWN', 'SELF'] +237-126133-0017-2424: ref=['THERE', 'JAP', "YOU'VE", 'CAUGHT', 'IT', 'LAUGHED', 'PERCY', 'WHILE', 'THE', 'OTHERS', 'SCREAMED', 'AT', 'THE', 'SIGHT', 'OF', "JASPER'S", 'FACE'] +237-126133-0017-2424: hyp=['THERE', 'JAP', "YOU'VE", 'CAUGHT', 'IT', 'LAUGHED', 'PERCY', 'WHILE', 'THE', 'OTHERS', 'SCREAMED', 'AT', 'THE', 'SIGHT', 'OF', "JASPER'S", 'FACE'] +237-126133-0018-2425: ref=["DON'T", 'MIND', 'IT', 'POLLY', 'WHISPERED', 'JASPER', "TWASN'T", 'HER', 'FAULT'] +237-126133-0018-2425: hyp=["DON'T", 'MIND', 'IT', 'POLLY', 'WHISPERED', 'JASPER', "TWASN'T", 'HER', 'FAULT'] +237-126133-0019-2426: ref=['DEAR', 'ME', 'EJACULATED', 'THE', 'OLD', 'GENTLEMAN', 'IN', 'THE', 'UTMOST', 'AMAZEMENT', 'AND', 'SUCH', 'A', 'TIME', 'AS', "I'VE", 'HAD', 'TO', 'GET', 'HER', 'HERE', 'TOO'] +237-126133-0019-2426: hyp=['DEAR', 'ME', 'EJACULATED', 'THE', 'OLD', 'GENTLEMAN', 'IN', 'THE', 'UTMOST', 'AMAZEMENT', 'AND', 'SUCH', 'A', 'TIME', 'AS', "I'VE", 'HAD', 'TO', 'GET', 'HER', 'HERE', 'TOO'] +237-126133-0020-2427: ref=['HOW', 'DID', 'HER', 'MOTHER', 'EVER', 'LET', 'HER', 'GO'] +237-126133-0020-2427: hyp=['HOW', 'DID', 'HER', 'MOTHER', 'EVER', 'LET', 'HER', 'GO'] +237-126133-0021-2428: ref=['SHE', 'ASKED', 'IMPULSIVELY', 'I', "DIDN'T", 'BELIEVE', 'YOU', 'COULD', 'PERSUADE', 'HER', 'FATHER'] +237-126133-0021-2428: hyp=['SHE', 'ASKED', 'IMPULSIVELY', 'I', "DIDN'T", 'BELIEVE', 'YOU', 'COULD', 'PERSUADE', 'HER', 'FATHER'] +237-126133-0022-2429: ref=['I', "DIDN'T", 'HAVE', 'ANY', 'FEARS', 'IF', 'I', 'WORKED', 'IT', 'RIGHTLY', 'SAID', 'THE', 'OLD', 'GENTLEMAN', 'COMPLACENTLY'] +237-126133-0022-2429: hyp=['I', "DIDN'T", 'HAVE', 'ANY', 'FEARS', 'IF', 'I', 'WORKED', 'IT', 'RIGHTLY', 'SAID', 'THE', 'OLD', 'GENTLEMAN', 'COMPLACENTLY'] +237-126133-0023-2430: ref=['HE', 'CRIED', 'IN', 'HIGH', 'DUDGEON', 'JUST', 'AS', 'IF', 'HE', 'OWNED', 'THE', 'WHOLE', 'OF', 'THE', 'PEPPERS', 'AND', 'COULD', 'DISPOSE', 'OF', 'THEM', 'ALL', 'TO', 'SUIT', 'HIS', 'FANCY'] +237-126133-0023-2430: hyp=['HE', 'CRIED', 'IN', 'HIGH', 'DUDGEON', 'JUST', 'AS', 'IF', 'HE', 'OWNED', 'THE', 'WHOLE', 'OF', 'THE', 'PEPPERS', 'AND', 'COULD', 'DISPOSE', 'OF', 'THEM', 'ALL', 'TO', 'SUIT', 'HIS', 'FANCY'] +237-126133-0024-2431: ref=['AND', 'THE', 'OLD', 'GENTLEMAN', 'WAS', 'SO', 'DELIGHTED', 'WITH', 'HIS', 'SUCCESS', 'THAT', 'HE', 'HAD', 'TO', 'BURST', 'OUT', 'INTO', 'A', 'SERIES', 'OF', 'SHORT', 'HAPPY', 'BITS', 'OF', 'LAUGHTER', 'THAT', 'OCCUPIED', 'QUITE', 'A', 'SPACE', 'OF', 'TIME'] +237-126133-0024-2431: hyp=['AND', 'THE', 'OLD', 'GENTLEMAN', 'WAS', 'SO', 'DELIGHTED', 'WITH', 'HIS', 'SUCCESS', 'THAT', 'HE', 'HAD', 'TO', 'BURST', 'OUT', 'INTO', 'A', 'SERIES', 'OF', 'SHORT', 'HAPPY', 'BITS', 'OF', 'LAUGHTER', 'THAT', 'OCCUPIED', 'QUITE', 'A', 'SPACE', 'OF', 'TIME'] +237-126133-0025-2432: ref=['AT', 'LAST', 'HE', 'CAME', 'OUT', 'OF', 'THEM', 'AND', 'WIPED', 'HIS', 'FACE', 'VIGOROUSLY'] +237-126133-0025-2432: hyp=['AT', 'LAST', 'HE', 'CAME', 'OUT', 'OF', 'THEM', 'AND', 'WIPED', 'HIS', 'FACE', 'VIGOROUSLY'] +237-134493-0000-2388: ref=['IT', 'IS', 'SIXTEEN', 'YEARS', 'SINCE', 'JOHN', 'BERGSON', 'DIED'] +237-134493-0000-2388: hyp=['IT', 'IS', 'SIXTEEN', 'YEARS', 'SINCE', 'JOHN', 'BERKSON', 'DIED'] +237-134493-0001-2389: ref=['HIS', 'WIFE', 'NOW', 'LIES', 'BESIDE', 'HIM', 'AND', 'THE', 'WHITE', 'SHAFT', 'THAT', 'MARKS', 'THEIR', 'GRAVES', 'GLEAMS', 'ACROSS', 'THE', 'WHEAT', 'FIELDS'] +237-134493-0001-2389: hyp=['HIS', 'WIFE', 'NOW', 'LIES', 'BESIDE', 'HIM', 'AND', 'THE', 'WHITE', 'SHAFT', 'THAT', 'MARKS', 'THEIR', 'GRAVES', 'GLEAMS', 'ACROSS', 'THE', 'WHEAT', 'FIELDS'] +237-134493-0002-2390: ref=['FROM', 'THE', 'NORWEGIAN', 'GRAVEYARD', 'ONE', 'LOOKS', 'OUT', 'OVER', 'A', 'VAST', 'CHECKER', 'BOARD', 'MARKED', 'OFF', 'IN', 'SQUARES', 'OF', 'WHEAT', 'AND', 'CORN', 'LIGHT', 'AND', 'DARK', 'DARK', 'AND', 'LIGHT'] +237-134493-0002-2390: hyp=['FROM', 'THE', 'NORWEGIAN', 'GRAVEYARD', 'ONE', 'LOOKS', 'OUT', 'OVER', 'A', 'VAST', 'CHECKERBOARD', 'MARKED', 'OFF', 'IN', 'SQUARES', 'OF', 'WHEAT', 'AND', 'CORN', 'LIGHT', 'AND', 'DARK', 'AND', 'LIGHT'] +237-134493-0003-2391: ref=['FROM', 'THE', 'GRAVEYARD', 'GATE', 'ONE', 'CAN', 'COUNT', 'A', 'DOZEN', 'GAYLY', 'PAINTED', 'FARMHOUSES', 'THE', 'GILDED', 'WEATHER', 'VANES', 'ON', 'THE', 'BIG', 'RED', 'BARNS', 'WINK', 'AT', 'EACH', 'OTHER', 'ACROSS', 'THE', 'GREEN', 'AND', 'BROWN', 'AND', 'YELLOW', 'FIELDS'] +237-134493-0003-2391: hyp=['FROM', 'THE', 'GRAVEYARD', 'GATE', 'ONE', 'CAN', 'COUNT', 'A', 'DOZEN', 'GAILY', 'PAINTED', 'FARMHOUSES', 'THE', 'GILDED', 'WEATHER', 'VEINS', 'ON', 'THE', 'BIG', 'RED', 'BARNS', 'WINK', 'AT', 'EACH', 'OTHER', 'ACROSS', 'THE', 'GREEN', 'AND', 'BROWN', 'AND', 'YELLOW', 'FIELDS'] +237-134493-0004-2392: ref=['THE', 'AIR', 'AND', 'THE', 'EARTH', 'ARE', 'CURIOUSLY', 'MATED', 'AND', 'INTERMINGLED', 'AS', 'IF', 'THE', 'ONE', 'WERE', 'THE', 'BREATH', 'OF', 'THE', 'OTHER'] +237-134493-0004-2392: hyp=['THE', 'AIR', 'AND', 'THE', 'EARTH', 'ARE', 'CURIOUSLY', 'MATED', 'AND', 'INTERMINGLED', 'AS', 'IF', 'THE', 'ONE', 'WERE', 'THE', 'BREATH', 'OF', 'THE', 'OTHER'] +237-134493-0005-2393: ref=['HE', 'WAS', 'A', 'SPLENDID', 'FIGURE', 'OF', 'A', 'BOY', 'TALL', 'AND', 'STRAIGHT', 'AS', 'A', 'YOUNG', 'PINE', 'TREE', 'WITH', 'A', 'HANDSOME', 'HEAD', 'AND', 'STORMY', 'GRAY', 'EYES', 'DEEPLY', 'SET', 'UNDER', 'A', 'SERIOUS', 'BROW'] +237-134493-0005-2393: hyp=['HE', 'WAS', 'A', 'SPLENDID', 'FIGURE', 'OF', 'A', 'BOY', 'TALL', 'AND', 'STRAIGHT', 'AS', 'A', 'YOUNG', 'PINE', 'TREE', 'WITH', 'A', 'HANDSOME', 'HEAD', 'AND', 'STORMY', 'GRAY', 'EYES', 'DEEPLY', 'SET', 'UNDER', 'A', 'SERIOUS', 'BROW'] +237-134493-0006-2394: ref=["THAT'S", 'NOT', 'MUCH', 'OF', 'A', 'JOB', 'FOR', 'AN', 'ATHLETE', 'HERE', "I'VE", 'BEEN', 'TO', 'TOWN', 'AND', 'BACK'] +237-134493-0006-2394: hyp=["THAT'S", 'NOT', 'MUCH', 'OF', 'A', 'JOB', 'FOR', 'AN', 'ATHLETE', 'HERE', "I'VE", 'BEEN', 'TO', 'TOWN', 'AND', 'BACK'] +237-134493-0007-2395: ref=['ALEXANDRA', 'LETS', 'YOU', 'SLEEP', 'LATE'] +237-134493-0007-2395: hyp=['ALEXANDRA', "THAT'S", 'YOU', 'SLEEP', 'LATE'] +237-134493-0008-2396: ref=['SHE', 'GATHERED', 'UP', 'HER', 'REINS'] +237-134493-0008-2396: hyp=['SHE', 'GATHERED', 'UP', 'HER', 'REINS'] +237-134493-0009-2397: ref=['PLEASE', 'WAIT', 'FOR', 'ME', 'MARIE', 'EMIL', 'COAXED'] +237-134493-0009-2397: hyp=['PLEASE', 'WAIT', 'FOR', 'ME', 'MARIE', 'AMYL', 'COAXED'] +237-134493-0010-2398: ref=['I', 'NEVER', 'SEE', "LOU'S", 'SCYTHE', 'OVER', 'HERE'] +237-134493-0010-2398: hyp=['I', 'NEVER', 'SEE', 'LOOSE', 'SCYTHE', 'OVER', 'HERE'] +237-134493-0011-2399: ref=['HOW', 'BROWN', "YOU'VE", 'GOT', 'SINCE', 'YOU', 'CAME', 'HOME', 'I', 'WISH', 'I', 'HAD', 'AN', 'ATHLETE', 'TO', 'MOW', 'MY', 'ORCHARD'] +237-134493-0011-2399: hyp=['HOW', 'BROWN', "YOU'VE", 'GOT', 'SINCE', 'YOU', 'CAME', 'HOME', 'I', 'WISH', 'I', 'HAD', 'AN', 'ADETE', 'TO', 'MOW', 'MY', 'ORCHARD'] +237-134493-0012-2400: ref=['I', 'GET', 'WET', 'TO', 'MY', 'KNEES', 'WHEN', 'I', 'GO', 'DOWN', 'TO', 'PICK', 'CHERRIES'] +237-134493-0012-2400: hyp=['I', 'GET', 'WET', 'TO', 'MY', 'KNEES', 'WHEN', 'I', 'GO', 'DOWN', 'TO', 'PIC', 'CHERRIES'] +237-134493-0013-2401: ref=['INDEED', 'HE', 'HAD', 'LOOKED', 'AWAY', 'WITH', 'THE', 'PURPOSE', 'OF', 'NOT', 'SEEING', 'IT'] +237-134493-0013-2401: hyp=['INDEED', 'HE', 'HAD', 'LOOKED', 'AWAY', 'WITH', 'THE', 'PURPOSE', 'OF', 'NOT', 'SEEING', 'IT'] +237-134493-0014-2402: ref=['THEY', 'THINK', "YOU'RE", 'PROUD', 'BECAUSE', "YOU'VE", 'BEEN', 'AWAY', 'TO', 'SCHOOL', 'OR', 'SOMETHING'] +237-134493-0014-2402: hyp=['THEY', 'THINK', 'YOU', 'ARE', 'PROUD', 'BECAUSE', "YOU'VE", 'BEEN', 'AWAY', 'TO', 'SCHOOL', 'OR', 'SOMETHING'] +237-134493-0015-2403: ref=['THERE', 'WAS', 'SOMETHING', 'INDIVIDUAL', 'ABOUT', 'THE', 'GREAT', 'FARM', 'A', 'MOST', 'UNUSUAL', 'TRIMNESS', 'AND', 'CARE', 'FOR', 'DETAIL'] +237-134493-0015-2403: hyp=['THERE', 'WAS', 'SOMETHING', 'INDIVIDUAL', 'ABOUT', 'THE', 'GREAT', 'FARM', 'A', 'MOST', 'UNUSUAL', 'TRIMNESS', 'AND', 'CARE', 'FOR', 'DETAIL'] +237-134493-0016-2404: ref=['ON', 'EITHER', 'SIDE', 'OF', 'THE', 'ROAD', 'FOR', 'A', 'MILE', 'BEFORE', 'YOU', 'REACHED', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'STOOD', 'TALL', 'OSAGE', 'ORANGE', 'HEDGES', 'THEIR', 'GLOSSY', 'GREEN', 'MARKING', 'OFF', 'THE', 'YELLOW', 'FIELDS'] +237-134493-0016-2404: hyp=['ON', 'EITHER', 'SIDE', 'OF', 'THE', 'ROAD', 'FOR', 'A', 'MILE', 'BEFORE', 'YOU', 'REACHED', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'STOOD', 'TALL', 'O', 'SAGE', 'ORANGES', 'THEIR', 'GLOSSY', 'GREEN', 'MARKING', 'OFF', 'THE', 'YELLOW', 'FIELDS'] +237-134493-0017-2405: ref=['ANY', 'ONE', 'THEREABOUTS', 'WOULD', 'HAVE', 'TOLD', 'YOU', 'THAT', 'THIS', 'WAS', 'ONE', 'OF', 'THE', 'RICHEST', 'FARMS', 'ON', 'THE', 'DIVIDE', 'AND', 'THAT', 'THE', 'FARMER', 'WAS', 'A', 'WOMAN', 'ALEXANDRA', 'BERGSON'] +237-134493-0017-2405: hyp=['ANY', 'ONE', 'THEREABOUTS', 'WOULD', 'HAVE', 'TOLD', 'YOU', 'THAT', 'THIS', 'WAS', 'ONE', 'OF', 'THE', 'RICHEST', 'FARMS', 'ON', 'THE', 'DIVIDE', 'AND', 'THAT', 'THE', 'FARMER', 'WAS', 'A', 'WOMAN', 'ALEXANDRA', 'BERGIN'] +237-134493-0018-2406: ref=['THERE', 'IS', 'EVEN', 'A', 'WHITE', 'ROW', 'OF', 'BEEHIVES', 'IN', 'THE', 'ORCHARD', 'UNDER', 'THE', 'WALNUT', 'TREES'] +237-134493-0018-2406: hyp=['THERE', 'IS', 'EVEN', 'A', 'WHITE', 'ROW', 'OF', 'BEEHIVES', 'IN', 'THE', 'ORCHARD', 'UNDER', 'THE', 'WALNUT', 'TREES'] +237-134500-0000-2345: ref=['FRANK', 'READ', 'ENGLISH', 'SLOWLY', 'AND', 'THE', 'MORE', 'HE', 'READ', 'ABOUT', 'THIS', 'DIVORCE', 'CASE', 'THE', 'ANGRIER', 'HE', 'GREW'] +237-134500-0000-2345: hyp=['FRANK', 'READ', 'ENGLISH', 'SLOWLY', 'AND', 'THE', 'MORE', 'HE', 'READ', 'ABOUT', 'THIS', 'DIVORCE', 'CASE', 'THE', 'ANGRIER', 'HE', 'GREW'] +237-134500-0001-2346: ref=['MARIE', 'SIGHED'] +237-134500-0001-2346: hyp=['MARIE', 'SIGHED'] +237-134500-0002-2347: ref=['A', 'BRISK', 'WIND', 'HAD', 'COME', 'UP', 'AND', 'WAS', 'DRIVING', 'PUFFY', 'WHITE', 'CLOUDS', 'ACROSS', 'THE', 'SKY'] +237-134500-0002-2347: hyp=['A', 'BRACE', 'WIND', 'HAD', 'COME', 'UP', 'AND', 'WAS', 'DRIVING', 'PUFFY', 'WHITE', 'CLOUDS', 'ACROSS', 'THE', 'SKY'] +237-134500-0003-2348: ref=['THE', 'ORCHARD', 'WAS', 'SPARKLING', 'AND', 'RIPPLING', 'IN', 'THE', 'SUN'] +237-134500-0003-2348: hyp=['THE', 'ARCHER', 'WAS', 'SPARKLING', 'AND', 'RIPPLING', 'IN', 'THE', 'SUN'] +237-134500-0004-2349: ref=['THAT', 'INVITATION', 'DECIDED', 'HER'] +237-134500-0004-2349: hyp=['THAT', 'INVITATION', 'DECIDED', 'HER'] +237-134500-0005-2350: ref=['OH', 'BUT', "I'M", 'GLAD', 'TO', 'GET', 'THIS', 'PLACE', 'MOWED'] +237-134500-0005-2350: hyp=['OH', 'BUT', 'I', 'AM', 'GLAD', 'TO', 'GET', 'THIS', 'PLACE', 'MOWED'] +237-134500-0006-2351: ref=['JUST', 'SMELL', 'THE', 'WILD', 'ROSES', 'THEY', 'ARE', 'ALWAYS', 'SO', 'SPICY', 'AFTER', 'A', 'RAIN'] +237-134500-0006-2351: hyp=['JUST', 'SMELL', 'THE', 'WILD', 'ROSES', 'THEY', 'ARE', 'ALWAYS', 'SO', 'SPICY', 'AFTER', 'A', 'RAIN'] +237-134500-0007-2352: ref=['WE', 'NEVER', 'HAD', 'SO', 'MANY', 'OF', 'THEM', 'IN', 'HERE', 'BEFORE'] +237-134500-0007-2352: hyp=['WE', 'NEVER', 'HAD', 'SO', 'MANY', 'OF', 'THEM', 'IN', 'HERE', 'BEFORE'] +237-134500-0008-2353: ref=['I', 'SUPPOSE', "IT'S", 'THE', 'WET', 'SEASON', 'WILL', 'YOU', 'HAVE', 'TO', 'CUT', 'THEM', 'TOO'] +237-134500-0008-2353: hyp=['I', 'SUPPOSE', "IT'S", 'THE', 'WET', 'SEASON', 'WILL', 'YOU', 'HAVE', 'TO', 'CUT', 'THEM', 'TOO'] +237-134500-0009-2354: ref=['I', 'SUPPOSE', "THAT'S", 'THE', 'WET', 'SEASON', 'TOO', 'THEN'] +237-134500-0009-2354: hyp=['I', 'SUPPOSE', "THAT'S", 'THE', 'WET', 'SEASON', 'TOO', 'THEN'] +237-134500-0010-2355: ref=["IT'S", 'EXCITING', 'TO', 'SEE', 'EVERYTHING', 'GROWING', 'SO', 'FAST', 'AND', 'TO', 'GET', 'THE', 'GRASS', 'CUT'] +237-134500-0010-2355: hyp=["IT'S", 'EXCITING', 'TO', 'SEE', 'EVERYTHING', 'GROWING', 'SO', 'FAST', 'AND', 'TO', 'GET', 'THE', 'GRASS', 'CUT'] +237-134500-0011-2356: ref=["AREN'T", 'YOU', 'SPLASHED', 'LOOK', 'AT', 'THE', 'SPIDER', 'WEBS', 'ALL', 'OVER', 'THE', 'GRASS'] +237-134500-0011-2356: hyp=["AREN'T", 'YOU', 'SPLASHED', 'LOOK', 'AT', 'THE', 'SPIDER', 'WEBS', 'ALL', 'OVER', 'THE', 'GRASS'] +237-134500-0012-2357: ref=['IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HEARD', 'THE', 'CHERRIES', 'DROPPING', 'SMARTLY', 'INTO', 'THE', 'PAIL', 'AND', 'HE', 'BEGAN', 'TO', 'SWING', 'HIS', 'SCYTHE', 'WITH', 'THAT', 'LONG', 'EVEN', 'STROKE', 'THAT', 'FEW', 'AMERICAN', 'BOYS', 'EVER', 'LEARN'] +237-134500-0012-2357: hyp=['IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HEARD', 'THE', 'CHERRIES', 'DROPPING', 'SMARTLY', 'INTO', 'THE', 'PAIL', 'AND', 'HE', 'BEGAN', 'TO', 'SWING', 'HIS', 'SCYTHE', 'WITH', 'THAT', 'LONG', 'EVEN', 'STROKE', 'THAT', 'FEW', 'AMERICAN', 'BOYS', 'EVER', 'LEARN'] +237-134500-0013-2358: ref=['MARIE', 'PICKED', 'CHERRIES', 'AND', 'SANG', 'SOFTLY', 'TO', 'HERSELF', 'STRIPPING', 'ONE', 'GLITTERING', 'BRANCH', 'AFTER', 'ANOTHER', 'SHIVERING', 'WHEN', 'SHE', 'CAUGHT', 'A', 'SHOWER', 'OF', 'RAINDROPS', 'ON', 'HER', 'NECK', 'AND', 'HAIR'] +237-134500-0013-2358: hyp=['MARIE', 'PICKED', 'CHERRIES', 'AND', 'SANG', 'SOFTLY', 'TO', 'HERSELF', 'STRIPPING', 'ONE', 'GLITTERING', 'RANCH', 'AFTER', 'ANOTHER', 'SHIVERING', 'WHEN', 'SHE', 'THOUGHT', 'A', 'SHOWER', 'OF', 'RAINDROPS', 'ON', 'HER', 'NECK', 'AND', 'HAIR'] +237-134500-0014-2359: ref=['AND', 'EMIL', 'MOWED', 'HIS', 'WAY', 'SLOWLY', 'DOWN', 'TOWARD', 'THE', 'CHERRY', 'TREES'] +237-134500-0014-2359: hyp=['AND', 'AMYL', 'MOWED', 'HIS', 'WAY', 'SLOWLY', 'DOWN', 'TOWARD', 'THE', 'CHERRY', 'TREES'] +237-134500-0015-2360: ref=['THAT', 'SUMMER', 'THE', 'RAINS', 'HAD', 'BEEN', 'SO', 'MANY', 'AND', 'OPPORTUNE', 'THAT', 'IT', 'WAS', 'ALMOST', 'MORE', 'THAN', 'SHABATA', 'AND', 'HIS', 'MAN', 'COULD', 'DO', 'TO', 'KEEP', 'UP', 'WITH', 'THE', 'CORN', 'THE', 'ORCHARD', 'WAS', 'A', 'NEGLECTED', 'WILDERNESS'] +237-134500-0015-2360: hyp=['THAT', 'SUMMER', 'THE', 'RAINS', 'HAD', 'BEEN', 'SO', 'MANY', 'AND', 'OPPORTUNE', 'THAT', 'IT', 'WAS', 'ALMOST', 'MORE', 'THAN', 'SHEBATA', 'AND', 'HIS', 'MAN', 'COULD', 'DO', 'TO', 'KEEP', 'UP', 'WITH', 'THE', 'CORN', 'THE', 'ORCHARD', 'WAS', 'A', 'NEGLECTED', 'WILDERNESS'] +237-134500-0016-2361: ref=['I', "DON'T", 'KNOW', 'ALL', 'OF', 'THEM', 'BUT', 'I', 'KNOW', 'LINDENS', 'ARE'] +237-134500-0016-2361: hyp=['I', "DON'T", 'KNOW', 'ALL', 'OF', 'THEM', 'BUT', 'I', 'KNOW', 'LINDENS', 'ARE'] +237-134500-0017-2362: ref=['IF', 'I', 'FEEL', 'THAT', 'WAY', 'I', 'FEEL', 'THAT', 'WAY'] +237-134500-0017-2362: hyp=['IF', 'I', 'FEEL', 'THAT', 'WAY', 'I', 'FEEL', 'THAT', 'WAY'] +237-134500-0018-2363: ref=['HE', 'REACHED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'AND', 'BEGAN', 'TO', 'PICK', 'THE', 'SWEET', 'INSIPID', 'FRUIT', 'LONG', 'IVORY', 'COLORED', 'BERRIES', 'TIPPED', 'WITH', 'FAINT', 'PINK', 'LIKE', 'WHITE', 'CORAL', 'THAT', 'FALL', 'TO', 'THE', 'GROUND', 'UNHEEDED', 'ALL', 'SUMMER', 'THROUGH'] +237-134500-0018-2363: hyp=['HE', 'REACHED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'AND', 'BEGAN', 'TO', 'PICK', 'THE', 'SWEET', 'INSIPID', 'FRUIT', 'LONG', 'IVORY', 'COLORED', 'BERRIES', 'TIPPED', 'WITH', 'FAINT', 'PINK', 'LIKE', 'WHITE', 'CORAL', 'THAT', 'FALL', 'TO', 'THE', 'GROUND', 'UNHEEDED', 'ALL', 'SUMMER', 'THROUGH'] +237-134500-0019-2364: ref=['HE', 'DROPPED', 'A', 'HANDFUL', 'INTO', 'HER', 'LAP'] +237-134500-0019-2364: hyp=['HE', 'DROPPED', 'A', 'HANDFUL', 'INTO', 'HER', 'LAP'] +237-134500-0020-2365: ref=['YES', "DON'T", 'YOU'] +237-134500-0020-2365: hyp=['YES', "DON'T", 'YOU'] +237-134500-0021-2366: ref=['OH', 'EVER', 'SO', 'MUCH', 'ONLY', 'HE', 'SEEMS', 'KIND', 'OF', 'STAID', 'AND', 'SCHOOL', 'TEACHERY'] +237-134500-0021-2366: hyp=['OH', 'EVER', 'SO', 'MUCH', 'ONLY', 'HE', 'SEEMS', 'KIND', 'OF', 'STAY', 'AT', 'IN', 'SCHOOL', 'TEACHERY'] +237-134500-0022-2367: ref=['WHEN', 'SHE', 'USED', 'TO', 'TELL', 'ME', 'ABOUT', 'HIM', 'I', 'ALWAYS', 'WONDERED', 'WHETHER', 'SHE', "WASN'T", 'A', 'LITTLE', 'IN', 'LOVE', 'WITH', 'HIM'] +237-134500-0022-2367: hyp=['WHEN', 'SHE', 'USED', 'TO', 'TELL', 'ME', 'ABOUT', 'HIM', 'I', 'ALWAYS', 'WONDERED', 'WHETHER', 'SHE', "WASN'T", 'A', 'LITTLE', 'IN', 'LOVE', 'WITH', 'HIM'] +237-134500-0023-2368: ref=['IT', 'WOULD', 'SERVE', 'YOU', 'ALL', 'RIGHT', 'IF', 'SHE', 'WALKED', 'OFF', 'WITH', 'CARL'] +237-134500-0023-2368: hyp=['IT', 'WOULD', 'SERVE', 'YOU', 'ALL', 'RIGHT', 'IF', 'SHE', 'WALKED', 'OFF', 'WITH', 'KARL'] +237-134500-0024-2369: ref=['I', 'LIKE', 'TO', 'TALK', 'TO', 'CARL', 'ABOUT', 'NEW', 'YORK', 'AND', 'WHAT', 'A', 'FELLOW', 'CAN', 'DO', 'THERE'] +237-134500-0024-2369: hyp=['I', 'LIKE', 'TO', 'TALK', 'TO', 'KARL', 'ABOUT', 'NEW', 'YORK', 'AND', 'WHAT', 'A', 'FELLOW', 'CAN', 'DO', 'THERE'] +237-134500-0025-2370: ref=['OH', 'EMIL'] +237-134500-0025-2370: hyp=['OH', 'AMY', 'ILL'] +237-134500-0026-2371: ref=['SURELY', 'YOU', 'ARE', 'NOT', 'THINKING', 'OF', 'GOING', 'OFF', 'THERE'] +237-134500-0026-2371: hyp=['SURELY', 'YOU', 'ARE', 'NOT', 'THINKING', 'OF', 'GOING', 'OFF', 'THERE'] +237-134500-0027-2372: ref=["MARIE'S", 'FACE', 'FELL', 'UNDER', 'HIS', 'BROODING', 'GAZE'] +237-134500-0027-2372: hyp=["MARIE'S", 'FACE', 'FELL', 'UNDER', 'HIS', 'BROODING', 'GAZE'] +237-134500-0028-2373: ref=["I'M", 'SURE', 'ALEXANDRA', 'HOPES', 'YOU', 'WILL', 'STAY', 'ON', 'HERE', 'SHE', 'MURMURED'] +237-134500-0028-2373: hyp=['I', 'AM', 'SURE', 'ALEXANDER', 'HELPS', 'YOU', 'WILL', 'STAY', 'ON', 'HERE', 'SHE', 'MURMURED'] +237-134500-0029-2374: ref=['I', "DON'T", 'WANT', 'TO', 'STAND', 'AROUND', 'AND', 'LOOK', 'ON'] +237-134500-0029-2374: hyp=['I', "DON'T", 'WANT', 'TO', 'STAND', 'AROUND', 'AND', 'LOOK', 'ON'] +237-134500-0030-2375: ref=['I', 'WANT', 'TO', 'BE', 'DOING', 'SOMETHING', 'ON', 'MY', 'OWN', 'ACCOUNT'] +237-134500-0030-2375: hyp=['I', 'WANT', 'TO', 'BE', 'DOING', 'SOMETHING', 'ON', 'MY', 'OWN', 'ACCOUNT'] +237-134500-0031-2376: ref=['SOMETIMES', 'I', "DON'T", 'WANT', 'TO', 'DO', 'ANYTHING', 'AT', 'ALL', 'AND', 'SOMETIMES', 'I', 'WANT', 'TO', 'PULL', 'THE', 'FOUR', 'CORNERS', 'OF', 'THE', 'DIVIDE', 'TOGETHER', 'HE', 'THREW', 'OUT', 'HIS', 'ARM', 'AND', 'BROUGHT', 'IT', 'BACK', 'WITH', 'A', 'JERK', 'SO', 'LIKE', 'A', 'TABLE', 'CLOTH'] +237-134500-0031-2376: hyp=['SOMETIMES', 'I', "DON'T", 'WANT', 'TO', 'DO', 'ANYTHING', 'AT', 'ALL', 'AND', 'SOMETIMES', 'I', 'WANT', 'TO', 'PULL', 'THE', 'FOUR', 'CORNERS', 'OF', 'THE', 'DIVIDE', 'TOGETHER', 'HE', 'THREW', 'OUT', 'HIS', 'ARM', 'AND', 'BROUGHT', 'IT', 'BACK', 'WITH', 'A', 'JERK', 'SO', 'LIKE', 'A', 'TABLECLOTH'] +237-134500-0032-2377: ref=['I', 'GET', 'TIRED', 'OF', 'SEEING', 'MEN', 'AND', 'HORSES', 'GOING', 'UP', 'AND', 'DOWN', 'UP', 'AND', 'DOWN'] +237-134500-0032-2377: hyp=['I', 'GET', 'TIRED', 'OF', 'SEEING', 'MAN', 'AND', 'HORSES', 'GOING', 'UP', 'AND', 'DOWN', 'UP', 'AND', 'DOWN'] +237-134500-0033-2378: ref=['I', 'WISH', 'YOU', "WEREN'T", 'SO', 'RESTLESS', 'AND', "DIDN'T", 'GET', 'SO', 'WORKED', 'UP', 'OVER', 'THINGS', 'SHE', 'SAID', 'SADLY'] +237-134500-0033-2378: hyp=['I', 'WISH', 'YOU', "WEREN'T", 'SO', 'RESTLESS', 'AND', "DIDN'T", 'GET', 'SO', 'WORKED', 'UP', 'OVER', 'THINGS', 'SHE', 'SAID', 'SADLY'] +237-134500-0034-2379: ref=['THANK', 'YOU', 'HE', 'RETURNED', 'SHORTLY'] +237-134500-0034-2379: hyp=['THANK', 'YOU', 'HE', 'RETURNED', 'SHORTLY'] +237-134500-0035-2380: ref=['AND', 'YOU', 'NEVER', 'USED', 'TO', 'BE', 'CROSS', 'TO', 'ME'] +237-134500-0035-2380: hyp=['AND', 'YOU', 'NEVER', 'USED', 'TO', 'BE', 'CROSS', 'TO', 'ME'] +237-134500-0036-2381: ref=['I', "CAN'T", 'PLAY', 'WITH', 'YOU', 'LIKE', 'A', 'LITTLE', 'BOY', 'ANY', 'MORE', 'HE', 'SAID', 'SLOWLY', "THAT'S", 'WHAT', 'YOU', 'MISS', 'MARIE'] +237-134500-0036-2381: hyp=['I', "CAN'T", 'PLAY', 'WITH', 'YOU', 'LIKE', 'A', 'LITTLE', 'BOY', 'ANY', 'MORE', 'HE', 'SAID', 'SLOWLY', "THAT'S", 'WHAT', 'YOU', 'MISS', 'MARIE'] +237-134500-0037-2382: ref=['BUT', 'EMIL', 'IF', 'I', 'UNDERSTAND', 'THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER', 'WE', 'CAN', 'NEVER', 'DO', 'NICE', 'THINGS', 'TOGETHER', 'ANY', 'MORE'] +237-134500-0037-2382: hyp=['BUT', 'AM', 'ILL', 'IF', 'I', 'UNDERSTAND', 'IN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER', 'WE', 'CAN', 'NEVER', 'DO', 'NICE', 'THINGS', 'TOGETHER', 'ANY', 'MORE'] +237-134500-0038-2383: ref=['AND', 'ANYHOW', "THERE'S", 'NOTHING', 'TO', 'UNDERSTAND'] +237-134500-0038-2383: hyp=['AND', 'ANYHOW', "THERE'S", 'NOTHING', 'TO', 'UNDERSTAND'] +237-134500-0039-2384: ref=['THAT', "WON'T", 'LAST', 'IT', 'WILL', 'GO', 'AWAY', 'AND', 'THINGS', 'WILL', 'BE', 'JUST', 'AS', 'THEY', 'USED', 'TO'] +237-134500-0039-2384: hyp=['THAT', "WON'T", 'LAST', 'IT', 'WILL', 'GO', 'AWAY', 'AND', 'THINGS', 'WILL', 'BE', 'JUST', 'AS', 'THEY', 'USED', 'TO'] +237-134500-0040-2385: ref=['I', 'PRAY', 'FOR', 'YOU', 'BUT', "THAT'S", 'NOT', 'THE', 'SAME', 'AS', 'IF', 'YOU', 'PRAYED', 'YOURSELF'] +237-134500-0040-2385: hyp=['I', 'PRAY', 'FOR', 'YOU', 'BUT', "THAT'S", 'NOT', 'THE', 'SAME', 'AS', 'IF', 'YOU', 'PRAYED', 'YOURSELF'] +237-134500-0041-2386: ref=['I', "CAN'T", 'PRAY', 'TO', 'HAVE', 'THE', 'THINGS', 'I', 'WANT', 'HE', 'SAID', 'SLOWLY', 'AND', 'I', "WON'T", 'PRAY', 'NOT', 'TO', 'HAVE', 'THEM', 'NOT', 'IF', "I'M", 'DAMNED', 'FOR', 'IT'] +237-134500-0041-2386: hyp=['I', "CAN'T", 'PRAY', 'TO', 'HAVE', 'THE', 'THINGS', 'I', 'WANT', 'HE', 'SAID', 'SLOWLY', 'AND', 'I', "WON'T", 'PRAY', 'NOT', 'TO', 'HAVE', 'THEM', 'NOT', 'IF', "I'M", 'DAMNED', 'FOR', 'IT'] +237-134500-0042-2387: ref=['THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER'] +237-134500-0042-2387: hyp=['THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER'] +260-123286-0000-200: ref=['SATURDAY', 'AUGUST', 'FIFTEENTH', 'THE', 'SEA', 'UNBROKEN', 'ALL', 'ROUND', 'NO', 'LAND', 'IN', 'SIGHT'] +260-123286-0000-200: hyp=['SATURDAY', 'AUGUST', 'FIFTEENTH', 'THE', 'SEA', 'UNBROKEN', 'ALL', 'ROUND', 'NO', 'LAND', 'IN', 'SIGHT'] +260-123286-0001-201: ref=['THE', 'HORIZON', 'SEEMS', 'EXTREMELY', 'DISTANT'] +260-123286-0001-201: hyp=['THE', 'HORIZON', 'SEEMS', 'EXTREMELY', 'DISTANT'] +260-123286-0002-202: ref=['ALL', 'MY', 'DANGER', 'AND', 'SUFFERINGS', 'WERE', 'NEEDED', 'TO', 'STRIKE', 'A', 'SPARK', 'OF', 'HUMAN', 'FEELING', 'OUT', 'OF', 'HIM', 'BUT', 'NOW', 'THAT', 'I', 'AM', 'WELL', 'HIS', 'NATURE', 'HAS', 'RESUMED', 'ITS', 'SWAY'] +260-123286-0002-202: hyp=['ALL', 'MY', 'DANGER', 'AND', 'SUFFERINGS', 'WERE', 'NEEDED', 'TO', 'STRIKE', 'A', 'SPARK', 'OF', 'HUMAN', 'FEELING', 'OUT', 'OF', 'HIM', 'BUT', 'NOW', 'THAT', 'I', 'AM', 'WELL', 'HIS', 'NATURE', 'HAS', 'RESUMED', 'ITS', 'SWAY'] +260-123286-0003-203: ref=['YOU', 'SEEM', 'ANXIOUS', 'MY', 'UNCLE', 'I', 'SAID', 'SEEING', 'HIM', 'CONTINUALLY', 'WITH', 'HIS', 'GLASS', 'TO', 'HIS', 'EYE', 'ANXIOUS'] +260-123286-0003-203: hyp=['YOU', 'SEEM', 'ANXIOUS', 'MY', 'UNCLE', 'I', 'SAID', 'SEEING', 'HIM', 'CONTINUALLY', 'WITH', 'HIS', 'GLASS', 'TO', 'HIS', 'EYE', 'ANXIOUS'] +260-123286-0004-204: ref=['ONE', 'MIGHT', 'BE', 'WITH', 'LESS', 'REASON', 'THAN', 'NOW'] +260-123286-0004-204: hyp=['ONE', 'MIGHT', 'BE', 'WITH', 'LESS', 'REASON', 'THAN', 'NOW'] +260-123286-0005-205: ref=['I', 'AM', 'NOT', 'COMPLAINING', 'THAT', 'THE', 'RATE', 'IS', 'SLOW', 'BUT', 'THAT', 'THE', 'SEA', 'IS', 'SO', 'WIDE'] +260-123286-0005-205: hyp=['I', 'AM', 'NOT', 'COMPLAINING', 'THAT', 'THE', 'RATE', 'IS', 'SLOW', 'BUT', 'THAT', 'THE', 'SEA', 'IS', 'SO', 'WIDE'] +260-123286-0006-206: ref=['WE', 'ARE', 'LOSING', 'TIME', 'AND', 'THE', 'FACT', 'IS', 'I', 'HAVE', 'NOT', 'COME', 'ALL', 'THIS', 'WAY', 'TO', 'TAKE', 'A', 'LITTLE', 'SAIL', 'UPON', 'A', 'POND', 'ON', 'A', 'RAFT'] +260-123286-0006-206: hyp=['WE', 'ARE', 'LOSING', 'TIME', 'AND', 'THE', 'FACT', 'IS', 'I', 'HAVE', 'NOT', 'COME', 'ALL', 'THIS', 'WAY', 'TO', 'TAKE', 'A', 'LITTLE', 'SAIL', 'UPON', 'A', 'POND', 'ON', 'A', 'RAFT'] +260-123286-0007-207: ref=['HE', 'CALLED', 'THIS', 'SEA', 'A', 'POND', 'AND', 'OUR', 'LONG', 'VOYAGE', 'TAKING', 'A', 'LITTLE', 'SAIL'] +260-123286-0007-207: hyp=['HE', 'CALLED', 'THIS', 'SEA', 'UPON', 'AND', 'OUR', 'LONG', 'VOYAGE', 'TAKING', 'A', 'LITTLE', 'SAIL'] +260-123286-0008-208: ref=['THEREFORE', "DON'T", 'TALK', 'TO', 'ME', 'ABOUT', 'VIEWS', 'AND', 'PROSPECTS'] +260-123286-0008-208: hyp=['THEREFORE', "DON'T", 'TALK', 'TO', 'ME', 'ABOUT', 'VIEWS', 'AND', 'PROSPECTS'] +260-123286-0009-209: ref=['I', 'TAKE', 'THIS', 'AS', 'MY', 'ANSWER', 'AND', 'I', 'LEAVE', 'THE', 'PROFESSOR', 'TO', 'BITE', 'HIS', 'LIPS', 'WITH', 'IMPATIENCE'] +260-123286-0009-209: hyp=['I', 'TAKE', 'THIS', 'AS', 'MY', 'ANSWER', 'AND', 'I', 'LEAVE', 'THE', 'PROFESSOR', 'TO', 'BITE', 'HIS', 'LIPS', 'WITH', 'IMPATIENCE'] +260-123286-0010-210: ref=['SUNDAY', 'AUGUST', 'SIXTEENTH'] +260-123286-0010-210: hyp=['SUNDAY', 'AUGUST', 'SIXTEENTH'] +260-123286-0011-211: ref=['NOTHING', 'NEW', 'WEATHER', 'UNCHANGED', 'THE', 'WIND', 'FRESHENS'] +260-123286-0011-211: hyp=['NOTHING', 'NEW', 'WHETHER', 'UNCHANGED', 'THE', 'WIND', 'FRESHENS'] +260-123286-0012-212: ref=['BUT', 'THERE', 'SEEMED', 'NO', 'REASON', 'TO', 'FEAR'] +260-123286-0012-212: hyp=['BUT', 'THERE', 'SEEMED', 'NO', 'REASON', 'OF', 'FEAR'] +260-123286-0013-213: ref=['THE', 'SHADOW', 'OF', 'THE', 'RAFT', 'WAS', 'CLEARLY', 'OUTLINED', 'UPON', 'THE', 'SURFACE', 'OF', 'THE', 'WAVES'] +260-123286-0013-213: hyp=['THE', 'SHADOW', 'OF', 'THE', 'RAFT', 'WAS', 'CLEARLY', 'OUTLINED', 'UPON', 'THE', 'SURFACE', 'OF', 'THE', 'WAVES'] +260-123286-0014-214: ref=['TRULY', 'THIS', 'SEA', 'IS', 'OF', 'INFINITE', 'WIDTH'] +260-123286-0014-214: hyp=['TRULY', 'THE', 'SEA', 'IS', 'OF', 'INFINITE', 'WIDTH'] +260-123286-0015-215: ref=['IT', 'MUST', 'BE', 'AS', 'WIDE', 'AS', 'THE', 'MEDITERRANEAN', 'OR', 'THE', 'ATLANTIC', 'AND', 'WHY', 'NOT'] +260-123286-0015-215: hyp=['IT', 'MUST', 'BE', 'AS', 'WIDE', 'AS', 'THE', 'MEDITERRANEAN', 'OR', 'THE', 'ATLANTIC', 'AND', 'WHY', 'NOT'] +260-123286-0016-216: ref=['THESE', 'THOUGHTS', 'AGITATED', 'ME', 'ALL', 'DAY', 'AND', 'MY', 'IMAGINATION', 'SCARCELY', 'CALMED', 'DOWN', 'AFTER', 'SEVERAL', 'HOURS', 'SLEEP'] +260-123286-0016-216: hyp=['THESE', 'THOUGHTS', 'AGITATED', 'ME', 'ALL', 'DAY', 'AND', 'MY', 'IMAGINATION', 'SCARCELY', 'CALMED', 'DOWN', 'AFTER', 'SEVERAL', 'HOURS', 'SLEEP'] +260-123286-0017-217: ref=['I', 'SHUDDER', 'AS', 'I', 'RECALL', 'THESE', 'MONSTERS', 'TO', 'MY', 'REMEMBRANCE'] +260-123286-0017-217: hyp=['I', 'SHUDDER', 'AS', 'I', 'RECALL', 'THESE', 'MONSTERS', 'TO', 'MY', 'REMEMBRANCE'] +260-123286-0018-218: ref=['I', 'SAW', 'AT', 'THE', 'HAMBURG', 'MUSEUM', 'THE', 'SKELETON', 'OF', 'ONE', 'OF', 'THESE', 'CREATURES', 'THIRTY', 'FEET', 'IN', 'LENGTH'] +260-123286-0018-218: hyp=['I', 'SAW', 'AT', 'THE', 'HAMBURG', 'MUSEUM', 'THE', 'SKELETON', 'OF', 'ONE', 'OF', 'THESE', 'CREATURES', 'THIRTY', 'FEET', 'IN', 'LENGTH'] +260-123286-0019-219: ref=['I', 'SUPPOSE', 'PROFESSOR', 'LIEDENBROCK', 'WAS', 'OF', 'MY', 'OPINION', 'TOO', 'AND', 'EVEN', 'SHARED', 'MY', 'FEARS', 'FOR', 'AFTER', 'HAVING', 'EXAMINED', 'THE', 'PICK', 'HIS', 'EYES', 'TRAVERSED', 'THE', 'OCEAN', 'FROM', 'SIDE', 'TO', 'SIDE'] +260-123286-0019-219: hyp=['I', 'SUPPOSE', 'PROFESSOR', 'LIEDENBROCK', 'WAS', 'OF', 'MY', 'OPINION', 'TOO', 'AND', 'EVEN', 'SHARED', 'MY', 'FEARS', 'FOR', 'AFTER', 'HAVING', 'EXAMINED', 'THE', 'PIG', 'HIS', 'EYES', 'TRAVERSED', 'THE', 'OCEAN', 'FROM', 'SIDE', 'TO', 'SIDE'] +260-123286-0020-220: ref=['TUESDAY', 'AUGUST', 'EIGHTEENTH'] +260-123286-0020-220: hyp=['TUESDAY', 'AUGUST', 'EIGHTEENTH'] +260-123286-0021-221: ref=['DURING', 'HIS', 'WATCH', 'I', 'SLEPT'] +260-123286-0021-221: hyp=['DURING', 'HIS', 'WATCH', 'I', 'SLEPT'] +260-123286-0022-222: ref=['TWO', 'HOURS', 'AFTERWARDS', 'A', 'TERRIBLE', 'SHOCK', 'AWOKE', 'ME'] +260-123286-0022-222: hyp=['TWO', 'HOURS', 'AFTERWARDS', 'A', 'TERRIBLE', 'SHOCK', 'AWOKE', 'ME'] +260-123286-0023-223: ref=['THE', 'RAFT', 'WAS', 'HEAVED', 'UP', 'ON', 'A', 'WATERY', 'MOUNTAIN', 'AND', 'PITCHED', 'DOWN', 'AGAIN', 'AT', 'A', 'DISTANCE', 'OF', 'TWENTY', 'FATHOMS'] +260-123286-0023-223: hyp=['THE', 'RAFT', 'WAS', 'HEAVED', 'UP', 'ON', 'A', 'WATERY', 'MOUNTAIN', 'AND', 'PITCHED', 'DOWN', 'AGAIN', 'AT', 'A', 'DISTANCE', 'OF', 'TWENTY', 'FATHOMS'] +260-123286-0024-224: ref=["THERE'S", 'A', 'WHALE', 'A', 'WHALE', 'CRIED', 'THE', 'PROFESSOR'] +260-123286-0024-224: hyp=["THERE'S", 'A', 'WAIL', 'A', 'WELL', 'CRIED', 'THE', 'PROFESSOR'] +260-123286-0025-225: ref=['FLIGHT', 'WAS', 'OUT', 'OF', 'THE', 'QUESTION', 'NOW', 'THE', 'REPTILES', 'ROSE', 'THEY', 'WHEELED', 'AROUND', 'OUR', 'LITTLE', 'RAFT', 'WITH', 'A', 'RAPIDITY', 'GREATER', 'THAN', 'THAT', 'OF', 'EXPRESS', 'TRAINS'] +260-123286-0025-225: hyp=['FIGHT', 'WAS', 'OUT', 'OF', 'THE', 'QUESTION', 'NOW', 'THE', 'REPTILES', 'ROSE', 'THEY', 'WHEELED', 'AROUND', 'OUR', 'LITTLE', 'RAFT', 'WITH', 'A', 'RAPIDITY', 'GREATER', 'THAN', 'THAT', 'OF', 'EXPRESS', 'TRAINS'] +260-123286-0026-226: ref=['TWO', 'MONSTERS', 'ONLY', 'WERE', 'CREATING', 'ALL', 'THIS', 'COMMOTION', 'AND', 'BEFORE', 'MY', 'EYES', 'ARE', 'TWO', 'REPTILES', 'OF', 'THE', 'PRIMITIVE', 'WORLD'] +260-123286-0026-226: hyp=['TWO', 'MONSTERS', 'OMER', 'WERE', 'CREATING', 'ALL', 'THIS', 'COMMOTION', 'AND', 'BEFORE', 'MY', 'EYES', 'OUR', 'TWO', 'REPTILES', 'OF', 'THE', 'PRIMITIVE', 'WORLD'] +260-123286-0027-227: ref=['I', 'CAN', 'DISTINGUISH', 'THE', 'EYE', 'OF', 'THE', 'ICHTHYOSAURUS', 'GLOWING', 'LIKE', 'A', 'RED', 'HOT', 'COAL', 'AND', 'AS', 'LARGE', 'AS', 'A', "MAN'S", 'HEAD'] +260-123286-0027-227: hyp=['I', 'CAN', 'DISTINGUISH', 'THE', 'EYE', 'OF', 'THE', 'ITHUS', 'GLOWING', 'LIKE', 'A', 'RED', 'HOT', 'CO', 'AND', 'AS', 'LARGE', 'AS', 'A', "MAN'S", 'HEAD'] +260-123286-0028-228: ref=['ITS', 'JAW', 'IS', 'ENORMOUS', 'AND', 'ACCORDING', 'TO', 'NATURALISTS', 'IT', 'IS', 'ARMED', 'WITH', 'NO', 'LESS', 'THAN', 'ONE', 'HUNDRED', 'AND', 'EIGHTY', 'TWO', 'TEETH'] +260-123286-0028-228: hyp=['ITS', 'JAW', 'IS', 'ENORMOUS', 'AND', 'ACCORDING', 'TO', 'NATURALISTS', 'IT', 'IS', 'ARMED', 'WITH', 'NO', 'LESS', 'THAN', 'ONE', 'HUNDRED', 'AND', 'EIGHTY', 'TWO', 'TEETH'] +260-123286-0029-229: ref=['THOSE', 'HUGE', 'CREATURES', 'ATTACKED', 'EACH', 'OTHER', 'WITH', 'THE', 'GREATEST', 'ANIMOSITY'] +260-123286-0029-229: hyp=['THOSE', 'HUGE', 'CREATURES', 'ATTACKED', 'EACH', 'OTHER', 'WITH', 'THE', 'GREATEST', 'ANIMOSITY'] +260-123286-0030-230: ref=['SUDDENLY', 'THE', 'ICHTHYOSAURUS', 'AND', 'THE', 'PLESIOSAURUS', 'DISAPPEAR', 'BELOW', 'LEAVING', 'A', 'WHIRLPOOL', 'EDDYING', 'IN', 'THE', 'WATER'] +260-123286-0030-230: hyp=['SUDDENLY', 'THE', 'IKESORIS', 'AND', 'THE', 'PLESIOSAURUS', 'DISAPPEAR', 'BELOW', 'LEAVING', 'A', 'WAR', 'POOL', 'EDDYING', 'IN', 'THE', 'WATER'] +260-123286-0031-231: ref=['AS', 'FOR', 'THE', 'ICHTHYOSAURUS', 'HAS', 'HE', 'RETURNED', 'TO', 'HIS', 'SUBMARINE', 'CAVERN'] +260-123286-0031-231: hyp=['AS', 'FOR', 'THE', 'INHEOSORIS', 'HAS', 'HE', 'RETURNED', 'TO', 'HIS', 'SUBMARINE', 'CAVERN'] +260-123288-0000-232: ref=['THE', 'ROARINGS', 'BECOME', 'LOST', 'IN', 'THE', 'DISTANCE'] +260-123288-0000-232: hyp=['THE', 'ROARINGS', 'BECOME', 'LOST', 'IN', 'THE', 'DISTANCE'] +260-123288-0001-233: ref=['THE', 'WEATHER', 'IF', 'WE', 'MAY', 'USE', 'THAT', 'TERM', 'WILL', 'CHANGE', 'BEFORE', 'LONG'] +260-123288-0001-233: hyp=['THE', 'WEATHER', 'IF', 'WE', 'MAY', 'USE', 'THE', 'TERM', 'WILL', 'CHANGE', 'BEFORE', 'LONG'] +260-123288-0002-234: ref=['THE', 'ATMOSPHERE', 'IS', 'CHARGED', 'WITH', 'VAPOURS', 'PERVADED', 'WITH', 'THE', 'ELECTRICITY', 'GENERATED', 'BY', 'THE', 'EVAPORATION', 'OF', 'SALINE', 'WATERS'] +260-123288-0002-234: hyp=['THE', 'ATMOSPHERE', 'IS', 'CHARGED', 'WITH', 'VAPORS', 'PERVADED', 'WITH', 'THE', 'ELECTRICITY', 'GENERATED', 'BY', 'THE', 'EVAPORATION', 'OF', 'SAILING', 'WATERS'] +260-123288-0003-235: ref=['THE', 'ELECTRIC', 'LIGHT', 'CAN', 'SCARCELY', 'PENETRATE', 'THROUGH', 'THE', 'DENSE', 'CURTAIN', 'WHICH', 'HAS', 'DROPPED', 'OVER', 'THE', 'THEATRE', 'ON', 'WHICH', 'THE', 'BATTLE', 'OF', 'THE', 'ELEMENTS', 'IS', 'ABOUT', 'TO', 'BE', 'WAGED'] +260-123288-0003-235: hyp=['THE', 'ELECTRIC', 'LIGHT', 'CAN', 'SCARCELY', 'PENETRATE', 'TO', 'THE', 'DENSE', 'CURTAIN', 'WHICH', 'IS', 'DROPPED', 'OVER', 'THE', 'THEATRE', 'ON', 'WHICH', 'THE', 'BATTLE', 'OF', 'THE', 'ELEMENTS', 'IS', 'ABOUT', 'TO', 'BE', 'WAGED'] +260-123288-0004-236: ref=['THE', 'AIR', 'IS', 'HEAVY', 'THE', 'SEA', 'IS', 'CALM'] +260-123288-0004-236: hyp=['THE', 'AIR', 'IS', 'HEAVY', 'THE', 'SEA', 'IS', 'CALM'] +260-123288-0005-237: ref=['FROM', 'TIME', 'TO', 'TIME', 'A', 'FLEECY', 'TUFT', 'OF', 'MIST', 'WITH', 'YET', 'SOME', 'GLEAMING', 'LIGHT', 'LEFT', 'UPON', 'IT', 'DROPS', 'DOWN', 'UPON', 'THE', 'DENSE', 'FLOOR', 'OF', 'GREY', 'AND', 'LOSES', 'ITSELF', 'IN', 'THE', 'OPAQUE', 'AND', 'IMPENETRABLE', 'MASS'] +260-123288-0005-237: hyp=['FROM', 'TIME', 'TO', 'TIME', 'A', 'FLEECY', 'TUFT', 'OF', 'MISTS', 'WITH', 'YET', 'SOME', 'GLEAMING', 'LIGHT', 'LEFT', 'UPON', 'IT', 'DROPS', 'DOWN', 'UPON', 'THE', 'DENSE', 'FLOOR', 'OF', 'GREY', 'AND', 'LOSES', 'ITSELF', 'IN', 'THE', 'OPAQUE', 'AND', 'IMPENETRABLE', 'MASS'] +260-123288-0006-238: ref=['THE', 'ATMOSPHERE', 'IS', 'EVIDENTLY', 'CHARGED', 'AND', 'SURCHARGED', 'WITH', 'ELECTRICITY'] +260-123288-0006-238: hyp=['THE', 'ATMOSPHERE', 'AS', 'EVIDENTLY', 'CHARGED', 'IN', 'SURCHARGED', 'WITH', 'ELECTRICITY'] +260-123288-0007-239: ref=['THE', 'WIND', 'NEVER', 'LULLS', 'BUT', 'TO', 'ACQUIRE', 'INCREASED', 'STRENGTH', 'THE', 'VAST', 'BANK', 'OF', 'HEAVY', 'CLOUDS', 'IS', 'A', 'HUGE', 'RESERVOIR', 'OF', 'FEARFUL', 'WINDY', 'GUSTS', 'AND', 'RUSHING', 'STORMS'] +260-123288-0007-239: hyp=['THE', 'WIND', 'NEVER', 'LOLLS', 'BUT', 'TO', 'ACQUIRE', 'INCREASED', 'STRENGTH', 'THE', 'VAST', 'BANK', 'OF', 'HEAVY', 'CLOUDS', 'IS', 'A', 'HUGE', 'RESERVOIR', 'OF', 'FEARFUL', 'WINDY', 'GUSTS', 'AND', 'RUSHING', 'STORMS'] +260-123288-0008-240: ref=["THERE'S", 'A', 'HEAVY', 'STORM', 'COMING', 'ON', 'I', 'CRIED', 'POINTING', 'TOWARDS', 'THE', 'HORIZON'] +260-123288-0008-240: hyp=["THERE'S", 'A', 'HEAVY', 'STORM', 'COMING', 'ON', 'I', 'CRIED', 'POINTING', 'TOWARDS', 'THE', 'HORIZON'] +260-123288-0009-241: ref=['THOSE', 'CLOUDS', 'SEEM', 'AS', 'IF', 'THEY', 'WERE', 'GOING', 'TO', 'CRUSH', 'THE', 'SEA'] +260-123288-0009-241: hyp=['THOSE', 'CLOUDS', 'SEEM', 'AS', 'IF', 'THEY', 'WERE', 'GOING', 'TO', 'CRUSH', 'THE', 'SEA'] +260-123288-0010-242: ref=['ON', 'THE', 'MAST', 'ALREADY', 'I', 'SEE', 'THE', 'LIGHT', 'PLAY', 'OF', 'A', 'LAMBENT', 'SAINT', "ELMO'S", 'FIRE', 'THE', 'OUTSTRETCHED', 'SAIL', 'CATCHES', 'NOT', 'A', 'BREATH', 'OF', 'WIND', 'AND', 'HANGS', 'LIKE', 'A', 'SHEET', 'OF', 'LEAD'] +260-123288-0010-242: hyp=['ON', 'THE', 'MAST', 'ALREADY', 'I', 'SEE', 'THE', 'LIGHT', 'PLAY', 'OF', 'A', 'LAMMA', 'SAINT', "ABLE'S", 'FIRE', 'THE', 'OUTSTRETCHED', 'SAIL', 'CATCHES', 'NOT', 'A', 'BREATH', 'OF', 'WIND', 'AND', 'HANGS', 'LIKE', 'A', 'SHEET', 'OF', 'LEAD'] +260-123288-0011-243: ref=['BUT', 'IF', 'WE', 'HAVE', 'NOW', 'CEASED', 'TO', 'ADVANCE', 'WHY', 'DO', 'WE', 'YET', 'LEAVE', 'THAT', 'SAIL', 'LOOSE', 'WHICH', 'AT', 'THE', 'FIRST', 'SHOCK', 'OF', 'THE', 'TEMPEST', 'MAY', 'CAPSIZE', 'US', 'IN', 'A', 'MOMENT'] +260-123288-0011-243: hyp=['BUT', 'IF', 'WE', 'HAVE', 'NOW', 'CEASED', 'TO', 'ADVANCE', 'WHY', 'DO', 'WE', 'YET', 'LEAVE', 'THAT', 'SAIL', 'LOOSE', 'WHICH', 'AT', 'THE', 'FIRST', 'SHOCK', 'OF', 'A', 'TEMPEST', 'MAY', 'CAPSIZE', 'US', 'IN', 'A', 'MOMENT'] +260-123288-0012-244: ref=['THAT', 'WILL', 'BE', 'SAFEST', 'NO', 'NO', 'NEVER'] +260-123288-0012-244: hyp=['THAT', 'WILL', 'BE', 'THE', 'SAFEST', 'NO', 'NO', 'NEVER'] +260-123288-0013-245: ref=['THE', 'PILED', 'UP', 'VAPOURS', 'CONDENSE', 'INTO', 'WATER', 'AND', 'THE', 'AIR', 'PUT', 'INTO', 'VIOLENT', 'ACTION', 'TO', 'SUPPLY', 'THE', 'VACUUM', 'LEFT', 'BY', 'THE', 'CONDENSATION', 'OF', 'THE', 'MISTS', 'ROUSES', 'ITSELF', 'INTO', 'A', 'WHIRLWIND'] +260-123288-0013-245: hyp=['THAT', 'PILED', 'UP', 'VAPORS', 'CONTENSED', 'INTO', 'WATER', 'AND', 'THE', 'AIR', 'PUT', 'INTO', 'VIOLENT', 'ACTION', 'TO', 'SUPPLY', 'THE', 'VACUUM', 'LEFT', 'BY', 'THE', 'CONDENSATION', 'OF', 'THE', 'MIST', 'ROUSES', 'ITSELF', 'INTO', 'A', 'WHIRLWIND'] +260-123288-0014-246: ref=['HANS', 'STIRS', 'NOT'] +260-123288-0014-246: hyp=['HANS', 'STIRS', 'NOT'] +260-123288-0015-247: ref=['FROM', 'THE', 'UNDER', 'SURFACE', 'OF', 'THE', 'CLOUDS', 'THERE', 'ARE', 'CONTINUAL', 'EMISSIONS', 'OF', 'LURID', 'LIGHT', 'ELECTRIC', 'MATTER', 'IS', 'IN', 'CONTINUAL', 'EVOLUTION', 'FROM', 'THEIR', 'COMPONENT', 'MOLECULES', 'THE', 'GASEOUS', 'ELEMENTS', 'OF', 'THE', 'AIR', 'NEED', 'TO', 'BE', 'SLAKED', 'WITH', 'MOISTURE', 'FOR', 'INNUMERABLE', 'COLUMNS', 'OF', 'WATER', 'RUSH', 'UPWARDS', 'INTO', 'THE', 'AIR', 'AND', 'FALL', 'BACK', 'AGAIN', 'IN', 'WHITE', 'FOAM'] +260-123288-0015-247: hyp=['FROM', 'THE', 'UNDER', 'SURFACE', 'OF', 'THE', 'CLOUDS', 'THERE', 'ARE', 'CONTINUAL', 'ADMISSIONS', 'OF', 'LURID', 'LIGHT', 'ELECTRIC', 'MATTER', 'IS', 'IN', 'CONTINUAL', 'EVOLUTION', 'FROM', 'THEIR', 'COMPONENT', 'MOLECULES', 'THE', 'GASEOUS', 'ELEMENTS', 'OF', 'THE', 'AIR', 'NEED', 'TO', 'BE', 'SLAKED', 'WITH', 'MOISTURE', 'FOR', 'INNUMERABLE', 'COLUMNS', 'OF', 'WATER', 'RUSH', 'UPWARDS', 'INTO', 'THE', 'AIR', 'AND', 'FALL', 'BACK', 'AGAIN', 'IN', 'WHITE', 'FOAM'] +260-123288-0016-248: ref=['I', 'REFER', 'TO', 'THE', 'THERMOMETER', 'IT', 'INDICATES', 'THE', 'FIGURE', 'IS', 'OBLITERATED'] +260-123288-0016-248: hyp=['I', 'REFER', 'TO', 'THE', 'THERMOMETER', 'IT', 'INDICATES', 'THE', 'FIGURE', 'IS', 'OBLITERATED'] +260-123288-0017-249: ref=['IS', 'THE', 'ATMOSPHERIC', 'CONDITION', 'HAVING', 'ONCE', 'REACHED', 'THIS', 'DENSITY', 'TO', 'BECOME', 'FINAL'] +260-123288-0017-249: hyp=['IS', 'THE', 'ATMOSPHERE', 'CONDITION', 'HAVING', 'ONCE', 'REACHED', 'OSTENSITY', 'TO', 'BECOME', 'FINAL'] +260-123288-0018-250: ref=['THE', 'RAFT', 'BEARS', 'ON', 'STILL', 'TO', 'THE', 'SOUTH', 'EAST'] +260-123288-0018-250: hyp=['THE', 'RAFT', 'BEARS', 'ON', 'STILL', 'TO', 'THE', 'SOUTH', 'EAST'] +260-123288-0019-251: ref=['AT', 'NOON', 'THE', 'VIOLENCE', 'OF', 'THE', 'STORM', 'REDOUBLES'] +260-123288-0019-251: hyp=['AT', 'NOON', 'THE', 'VIOLENCE', 'OF', 'THE', 'STORM', 'REDOUBLES'] +260-123288-0020-252: ref=['EACH', 'OF', 'US', 'IS', 'LASHED', 'TO', 'SOME', 'PART', 'OF', 'THE', 'RAFT'] +260-123288-0020-252: hyp=['EACH', 'OF', 'US', 'IS', 'LASHED', 'TO', 'SOME', 'PART', 'OF', 'THE', 'RAFT'] +260-123288-0021-253: ref=['THE', 'WAVES', 'RISE', 'ABOVE', 'OUR', 'HEADS'] +260-123288-0021-253: hyp=['THE', 'WAVES', 'RISE', 'ABOVE', 'OUR', 'HEADS'] +260-123288-0022-254: ref=['THEY', 'SEEM', 'TO', 'BE', 'WE', 'ARE', 'LOST', 'BUT', 'I', 'AM', 'NOT', 'SURE'] +260-123288-0022-254: hyp=['THEY', 'SEEMED', 'TO', 'BE', 'WE', 'ARE', 'LOST', 'BUT', 'I', 'AM', 'NOT', 'SURE'] +260-123288-0023-255: ref=['HE', 'NODS', 'HIS', 'CONSENT'] +260-123288-0023-255: hyp=['HE', 'NODS', 'HIS', 'CONSENT'] +260-123288-0024-256: ref=['THE', 'FIREBALL', 'HALF', 'OF', 'IT', 'WHITE', 'HALF', 'AZURE', 'BLUE', 'AND', 'THE', 'SIZE', 'OF', 'A', 'TEN', 'INCH', 'SHELL', 'MOVED', 'SLOWLY', 'ABOUT', 'THE', 'RAFT', 'BUT', 'REVOLVING', 'ON', 'ITS', 'OWN', 'AXIS', 'WITH', 'ASTONISHING', 'VELOCITY', 'AS', 'IF', 'WHIPPED', 'ROUND', 'BY', 'THE', 'FORCE', 'OF', 'THE', 'WHIRLWIND'] +260-123288-0024-256: hyp=['THE', 'FIRE', 'BALL', 'HALF', 'OF', 'IT', 'WHITE', 'HALF', 'AZURE', 'BLUE', 'AND', 'THE', 'SIZE', 'OF', 'A', 'TEN', 'INCH', 'SHELL', 'MOVED', 'SLOWLY', 'ABOUT', 'THE', 'RAFT', 'BUT', 'REVOLVING', 'ON', 'ITS', 'OWN', 'AXIS', 'WITH', 'ASTONISHING', 'VELOCITY', 'AS', 'IF', 'WHIP', 'ROUND', 'BY', 'THE', 'FORCE', 'OF', 'THE', 'WHIRLWIND'] +260-123288-0025-257: ref=['HERE', 'IT', 'COMES', 'THERE', 'IT', 'GLIDES', 'NOW', 'IT', 'IS', 'UP', 'THE', 'RAGGED', 'STUMP', 'OF', 'THE', 'MAST', 'THENCE', 'IT', 'LIGHTLY', 'LEAPS', 'ON', 'THE', 'PROVISION', 'BAG', 'DESCENDS', 'WITH', 'A', 'LIGHT', 'BOUND', 'AND', 'JUST', 'SKIMS', 'THE', 'POWDER', 'MAGAZINE', 'HORRIBLE'] +260-123288-0025-257: hyp=['HERE', 'IT', 'COMES', 'THERE', 'IT', 'GLIDES', 'NOW', 'IT', 'IS', 'UP', 'THE', 'RAGGED', 'STUMP', 'OF', 'THE', 'MAST', 'THENCE', 'IT', 'LIGHTLY', 'LEAPS', 'ON', 'THE', 'PROVISION', 'BAG', 'DESCENDS', 'WITH', 'A', 'LIGHT', 'BOUND', 'AND', 'JUST', 'SKIMS', 'THE', 'POWDER', 'MAGAZINE', 'HORRIBLE'] +260-123288-0026-258: ref=['WE', 'SHALL', 'BE', 'BLOWN', 'UP', 'BUT', 'NO', 'THE', 'DAZZLING', 'DISK', 'OF', 'MYSTERIOUS', 'LIGHT', 'NIMBLY', 'LEAPS', 'ASIDE', 'IT', 'APPROACHES', 'HANS', 'WHO', 'FIXES', 'HIS', 'BLUE', 'EYE', 'UPON', 'IT', 'STEADILY', 'IT', 'THREATENS', 'THE', 'HEAD', 'OF', 'MY', 'UNCLE', 'WHO', 'FALLS', 'UPON', 'HIS', 'KNEES', 'WITH', 'HIS', 'HEAD', 'DOWN', 'TO', 'AVOID', 'IT'] +260-123288-0026-258: hyp=['WE', 'SHALL', 'BE', 'BLOWN', 'UP', 'BUT', 'NO', 'THE', 'DAZZLING', 'DISK', 'OF', 'MYSTERIOUS', 'LIGHT', 'NIMBLY', 'LEAPS', 'ASIDE', 'IT', 'APPROACHES', 'HANS', 'WHO', 'FIXES', 'HIS', 'BLUE', 'EYE', 'UPON', 'IT', 'STEADILY', 'IT', 'THREATENS', 'THE', 'HEAD', 'OF', 'MY', 'UNCLE', 'WHO', 'FALLS', 'UPON', 'HIS', 'KNEES', 'WITH', 'HIS', 'HEAD', 'DOWN', 'TO', 'AVOID', 'IT'] +260-123288-0027-259: ref=['A', 'SUFFOCATING', 'SMELL', 'OF', 'NITROGEN', 'FILLS', 'THE', 'AIR', 'IT', 'ENTERS', 'THE', 'THROAT', 'IT', 'FILLS', 'THE', 'LUNGS'] +260-123288-0027-259: hyp=['A', 'SUFFOCATING', 'SMELL', 'OF', 'NITROGEN', 'FILLS', 'THE', 'AIR', 'IT', 'ENTERS', 'THE', 'THROAT', 'IT', 'FILLS', 'THE', 'LUNGS'] +260-123288-0028-260: ref=['WE', 'SUFFER', 'STIFLING', 'PAINS'] +260-123288-0028-260: hyp=['WE', 'SUFFER', 'STIFLING', 'PAINS'] +260-123440-0000-179: ref=['AND', 'HOW', 'ODD', 'THE', 'DIRECTIONS', 'WILL', 'LOOK'] +260-123440-0000-179: hyp=['AND', 'HOW', 'ODD', 'THE', 'DIRECTIONS', 'WILL', 'LOOK'] +260-123440-0001-180: ref=['POOR', 'ALICE'] +260-123440-0001-180: hyp=['POOR', 'ALICE'] +260-123440-0002-181: ref=['IT', 'WAS', 'THE', 'WHITE', 'RABBIT', 'RETURNING', 'SPLENDIDLY', 'DRESSED', 'WITH', 'A', 'PAIR', 'OF', 'WHITE', 'KID', 'GLOVES', 'IN', 'ONE', 'HAND', 'AND', 'A', 'LARGE', 'FAN', 'IN', 'THE', 'OTHER', 'HE', 'CAME', 'TROTTING', 'ALONG', 'IN', 'A', 'GREAT', 'HURRY', 'MUTTERING', 'TO', 'HIMSELF', 'AS', 'HE', 'CAME', 'OH', 'THE', 'DUCHESS', 'THE', 'DUCHESS'] +260-123440-0002-181: hyp=['IT', 'WAS', 'THE', 'WHITE', 'RABBIT', 'RETURNING', 'SPLENDIDLY', 'DRESSED', 'WITH', 'A', 'PAIR', 'OF', 'WHITE', 'KID', 'GLOVES', 'IN', 'ONE', 'HAND', 'AND', 'A', 'LARGE', 'FAN', 'IN', 'THE', 'OTHER', 'HE', 'CAME', 'TROTTING', 'ALONG', 'IN', 'A', 'GREAT', 'HURRY', 'MUTTERING', 'TO', 'HIMSELF', 'AS', 'HE', 'CAME', 'OH', 'THE', 'DUCHESS', 'THE', 'DUCHESS'] +260-123440-0003-182: ref=['OH', "WON'T", 'SHE', 'BE', 'SAVAGE', 'IF', "I'VE", 'KEPT', 'HER', 'WAITING'] +260-123440-0003-182: hyp=['OH', "WON'T", 'SHE', 'BE', 'SAVAGE', 'IF', "I'VE", 'KEPT', 'HER', 'WAITING'] +260-123440-0004-183: ref=['ALICE', 'TOOK', 'UP', 'THE', 'FAN', 'AND', 'GLOVES', 'AND', 'AS', 'THE', 'HALL', 'WAS', 'VERY', 'HOT', 'SHE', 'KEPT', 'FANNING', 'HERSELF', 'ALL', 'THE', 'TIME', 'SHE', 'WENT', 'ON', 'TALKING', 'DEAR', 'DEAR', 'HOW', 'QUEER', 'EVERYTHING', 'IS', 'TO', 'DAY'] +260-123440-0004-183: hyp=['ALICE', 'TOOK', 'UP', 'THE', 'FAN', 'AND', 'GLOVES', 'AND', 'AS', 'THE', 'HALL', 'WAS', 'VERY', 'HOT', 'SHE', 'KEPT', 'FANNING', 'HERSELF', 'ALL', 'THE', 'TIME', 'SHE', 'WENT', 'ON', 'TALKING', 'DEAR', 'DEAR', 'HOW', 'QUEER', 'EVERYTHING', 'IS', 'TO', 'DAY'] +260-123440-0005-184: ref=['AND', 'YESTERDAY', 'THINGS', 'WENT', 'ON', 'JUST', 'AS', 'USUAL'] +260-123440-0005-184: hyp=['AND', 'YESTERDAY', 'THINGS', 'WENT', 'ON', 'JUST', 'AS', 'USUAL'] +260-123440-0006-185: ref=['I', 'WONDER', 'IF', "I'VE", 'BEEN', 'CHANGED', 'IN', 'THE', 'NIGHT'] +260-123440-0006-185: hyp=['I', 'WONDER', 'IF', "I'VE", 'BEEN', 'CHANGED', 'IN', 'THE', 'NIGHT'] +260-123440-0007-186: ref=['I', 'ALMOST', 'THINK', 'I', 'CAN', 'REMEMBER', 'FEELING', 'A', 'LITTLE', 'DIFFERENT'] +260-123440-0007-186: hyp=['I', 'ALMOST', 'THINK', 'I', 'CAN', 'REMEMBER', 'FEELING', 'LITTLE', 'DIFFERENT'] +260-123440-0008-187: ref=["I'LL", 'TRY', 'IF', 'I', 'KNOW', 'ALL', 'THE', 'THINGS', 'I', 'USED', 'TO', 'KNOW'] +260-123440-0008-187: hyp=["I'LL", 'TRY', 'IF', 'I', 'KNOW', 'ALL', 'THE', 'THINGS', 'I', 'USED', 'TO', 'KNOW'] +260-123440-0009-188: ref=['I', 'SHALL', 'NEVER', 'GET', 'TO', 'TWENTY', 'AT', 'THAT', 'RATE'] +260-123440-0009-188: hyp=['I', 'SHALL', 'NEVER', 'GET', 'TO', 'TWENTY', 'AT', 'THAT', 'RATE'] +260-123440-0010-189: ref=['HOW', 'CHEERFULLY', 'HE', 'SEEMS', 'TO', 'GRIN', 'HOW', 'NEATLY', 'SPREAD', 'HIS', 'CLAWS', 'AND', 'WELCOME', 'LITTLE', 'FISHES', 'IN', 'WITH', 'GENTLY', 'SMILING', 'JAWS'] +260-123440-0010-189: hyp=['HOW', 'CHEERFULLY', 'HE', 'SEEMS', 'TO', 'GRIN', 'HOW', 'NEATLY', 'SPREAD', 'HIS', 'CLAWS', 'AND', 'WELCOME', 'LITTLE', 'FISHES', 'IN', 'WITH', 'GENTLY', 'SMILING', 'JAWS'] +260-123440-0011-190: ref=['NO', "I'VE", 'MADE', 'UP', 'MY', 'MIND', 'ABOUT', 'IT', 'IF', "I'M", 'MABEL', "I'LL", 'STAY', 'DOWN', 'HERE'] +260-123440-0011-190: hyp=['NO', "I'VE", 'MADE', 'UP', 'MY', 'MIND', 'ABOUT', 'IT', 'IF', "I'M", 'MABEL', "I'LL", 'STAY', 'DOWN', 'HERE'] +260-123440-0012-191: ref=["IT'LL", 'BE', 'NO', 'USE', 'THEIR', 'PUTTING', 'THEIR', 'HEADS', 'DOWN', 'AND', 'SAYING', 'COME', 'UP', 'AGAIN', 'DEAR'] +260-123440-0012-191: hyp=["IT'LL", 'BE', 'NO', 'USE', 'THEIR', 'PUTTING', 'THEIR', 'HEADS', 'DOWN', 'AND', 'SAYING', 'COME', 'UP', 'AGAIN', 'DEAR'] +260-123440-0013-192: ref=['I', 'AM', 'SO', 'VERY', 'TIRED', 'OF', 'BEING', 'ALL', 'ALONE', 'HERE'] +260-123440-0013-192: hyp=['I', 'AM', 'SO', 'VERY', 'TIRED', 'OF', 'BEING', 'ALL', 'ALONE', 'HERE'] +260-123440-0014-193: ref=['AND', 'I', 'DECLARE', "IT'S", 'TOO', 'BAD', 'THAT', 'IT', 'IS'] +260-123440-0014-193: hyp=['AND', 'I', 'DECLARE', "IT'S", 'TOO', 'BAD', 'THAT', 'IT', 'IS'] +260-123440-0015-194: ref=['I', 'WISH', 'I', "HADN'T", 'CRIED', 'SO', 'MUCH', 'SAID', 'ALICE', 'AS', 'SHE', 'SWAM', 'ABOUT', 'TRYING', 'TO', 'FIND', 'HER', 'WAY', 'OUT'] +260-123440-0015-194: hyp=['I', 'WISH', 'I', "HADN'T", 'CRIED', 'SO', 'MUCH', 'SAID', 'ALICE', 'AS', 'SHE', 'SWAM', 'ABOUT', 'TRYING', 'TO', 'FIND', 'HER', 'WAY', 'OUT'] +260-123440-0016-195: ref=['I', 'SHALL', 'BE', 'PUNISHED', 'FOR', 'IT', 'NOW', 'I', 'SUPPOSE', 'BY', 'BEING', 'DROWNED', 'IN', 'MY', 'OWN', 'TEARS'] +260-123440-0016-195: hyp=['I', 'SHALL', 'BE', 'PUNISHED', 'FOR', 'IT', 'NOW', 'I', 'SUPPOSE', 'BY', 'BEING', 'DROWNED', 'IN', 'MY', 'OWN', 'TEARS'] +260-123440-0017-196: ref=['THAT', 'WILL', 'BE', 'A', 'QUEER', 'THING', 'TO', 'BE', 'SURE'] +260-123440-0017-196: hyp=['THAT', 'WILL', 'BE', 'A', 'QUEER', 'THING', 'TO', 'BE', 'SURE'] +260-123440-0018-197: ref=['I', 'AM', 'VERY', 'TIRED', 'OF', 'SWIMMING', 'ABOUT', 'HERE', 'O', 'MOUSE'] +260-123440-0018-197: hyp=['I', 'AM', 'VERY', 'TIRED', 'OF', 'SWIMMING', 'ABOUT', 'HERE', 'OH', 'MOUSE'] +260-123440-0019-198: ref=['CRIED', 'ALICE', 'AGAIN', 'FOR', 'THIS', 'TIME', 'THE', 'MOUSE', 'WAS', 'BRISTLING', 'ALL', 'OVER', 'AND', 'SHE', 'FELT', 'CERTAIN', 'IT', 'MUST', 'BE', 'REALLY', 'OFFENDED'] +260-123440-0019-198: hyp=['CRIED', 'ALICE', 'AGAIN', 'FOR', 'THIS', 'TIME', 'THE', 'MOUSE', 'WAS', 'BRISTLING', 'ALL', 'OVER', 'AND', 'SHE', 'FELT', 'CERTAIN', 'IT', 'MUST', 'BE', 'REALLY', 'OFFENDED'] +260-123440-0020-199: ref=['WE', "WON'T", 'TALK', 'ABOUT', 'HER', 'ANY', 'MORE', 'IF', "YOU'D", 'RATHER', 'NOT', 'WE', 'INDEED'] +260-123440-0020-199: hyp=['WE', "WON'T", 'TALK', 'ABOUT', 'HER', 'ANY', 'MORE', 'IF', "YOU'D", 'RATHER', 'NOT', 'WE', 'INDEED'] +2830-3979-0000-1120: ref=['WE', 'WANT', 'YOU', 'TO', 'HELP', 'US', 'PUBLISH', 'SOME', 'LEADING', 'WORK', 'OF', "LUTHER'S", 'FOR', 'THE', 'GENERAL', 'AMERICAN', 'MARKET', 'WILL', 'YOU', 'DO', 'IT'] +2830-3979-0000-1120: hyp=['WE', 'WANT', 'YOU', 'TO', 'HELP', 'US', 'PUBLISH', 'SOME', 'LEADING', 'WORK', 'OF', 'LUTHERS', 'FOR', 'THE', 'GENERAL', 'AMERICAN', 'MARKET', 'WILL', 'YOU', 'DO', 'IT'] +2830-3979-0001-1121: ref=['THE', 'CONDITION', 'IS', 'THAT', 'I', 'WILL', 'BE', 'PERMITTED', 'TO', 'MAKE', 'LUTHER', 'TALK', 'AMERICAN', 'STREAMLINE', 'HIM', 'SO', 'TO', 'SPEAK', 'BECAUSE', 'YOU', 'WILL', 'NEVER', 'GET', 'PEOPLE', 'WHETHER', 'IN', 'OR', 'OUTSIDE', 'THE', 'LUTHERAN', 'CHURCH', 'ACTUALLY', 'TO', 'READ', 'LUTHER', 'UNLESS', 'WE', 'MAKE', 'HIM', 'TALK', 'AS', 'HE', 'WOULD', 'TALK', 'TODAY', 'TO', 'AMERICANS'] +2830-3979-0001-1121: hyp=['THE', 'CONDITION', 'IS', 'THAT', 'I', 'WILL', 'BE', 'PERMITTED', 'TO', 'MAKE', 'LUTHER', 'TALK', 'AMERICAN', 'STREAM', 'LINE', 'HYMN', 'SO', 'TO', 'SPEAK', 'BECAUSE', 'YOU', 'WILL', 'NEVER', 'GET', 'PEOPLE', 'WHETHER', 'IN', 'OR', 'OUTSIDE', 'THE', 'LUTHERAN', 'CHURCH', 'ACTUALLY', 'TO', 'READ', 'LUTHER', 'UNLESS', 'WE', 'MAKE', 'HIM', 'TALK', 'AS', 'HE', 'WOULD', 'TALK', 'TO', 'DAY', 'TO', 'AMERICANS'] +2830-3979-0002-1122: ref=['LET', 'US', 'BEGIN', 'WITH', 'THAT', 'HIS', 'COMMENTARY', 'ON', 'GALATIANS'] +2830-3979-0002-1122: hyp=['LET', 'US', 'BEGIN', 'WITH', 'THAT', 'HIS', 'COMMENTARY', 'ON', 'GALLATIONS'] +2830-3979-0003-1123: ref=['THE', 'UNDERTAKING', 'WHICH', 'SEEMED', 'SO', 'ATTRACTIVE', 'WHEN', 'VIEWED', 'AS', 'A', 'LITERARY', 'TASK', 'PROVED', 'A', 'MOST', 'DIFFICULT', 'ONE', 'AND', 'AT', 'TIMES', 'BECAME', 'OPPRESSIVE'] +2830-3979-0003-1123: hyp=['THE', 'UNDERTAKING', 'WHICH', 'SEEMS', 'SO', 'ATTRACTIVE', 'WHEN', 'VIEWED', 'AS', 'A', 'LITERARY', 'TASK', 'PROVED', 'A', 'MOST', 'DIFFICULT', 'ONE', 'AND', 'AT', 'TIMES', 'BECAME', 'OPPRESSIVE'] +2830-3979-0004-1124: ref=['IT', 'WAS', 'WRITTEN', 'IN', 'LATIN'] +2830-3979-0004-1124: hyp=['IT', 'WAS', 'WRITTEN', 'IN', 'LATIN'] +2830-3979-0005-1125: ref=['THE', 'WORK', 'HAD', 'TO', 'BE', 'CONDENSED'] +2830-3979-0005-1125: hyp=['THE', 'WORK', 'HAD', 'TO', 'BE', 'CONDENSED'] +2830-3979-0006-1126: ref=['A', 'WORD', 'SHOULD', 'NOW', 'BE', 'SAID', 'ABOUT', 'THE', 'ORIGIN', 'OF', "LUTHER'S", 'COMMENTARY', 'ON', 'GALATIANS'] +2830-3979-0006-1126: hyp=['A', 'WORD', 'SHOULD', 'NOW', 'BE', 'SAID', 'ABOUT', 'THE', 'ORIGIN', 'OF', "LUTHER'S", 'COMMENTARY', 'ANGULATIONS'] +2830-3979-0007-1127: ref=['MUCH', 'LATER', 'WHEN', 'A', 'FRIEND', 'OF', 'HIS', 'WAS', 'PREPARING', 'AN', 'EDITION', 'OF', 'ALL', 'HIS', 'LATIN', 'WORKS', 'HE', 'REMARKED', 'TO', 'HIS', 'HOME', 'CIRCLE', 'IF', 'I', 'HAD', 'MY', 'WAY', 'ABOUT', 'IT', 'THEY', 'WOULD', 'REPUBLISH', 'ONLY', 'THOSE', 'OF', 'MY', 'BOOKS', 'WHICH', 'HAVE', 'DOCTRINE', 'MY', 'GALATIANS', 'FOR', 'INSTANCE'] +2830-3979-0007-1127: hyp=['MUCH', 'LATER', 'WHEN', 'A', 'FRIEND', 'OF', 'HIS', 'WAS', 'PREPARING', 'AN', 'ADDITION', 'OF', 'ALL', 'HIS', 'LATIN', 'WORKS', 'HE', 'REMARKED', 'TO', 'HIS', 'HOME', 'CIRCLE', 'IF', 'I', 'HAD', 'MY', 'WAY', 'ABOUT', 'IT', 'THEY', 'WOULD', 'REPUBLISH', 'ONLY', 'THOSE', 'OF', 'MY', 'BOOKS', 'WHICH', 'HAVE', 'DOCTRINE', 'MY', 'GALLATIONS', 'FOR', 'INSTANCE'] +2830-3979-0008-1128: ref=['IN', 'OTHER', 'WORDS', 'THESE', 'THREE', 'MEN', 'TOOK', 'DOWN', 'THE', 'LECTURES', 'WHICH', 'LUTHER', 'ADDRESSED', 'TO', 'HIS', 'STUDENTS', 'IN', 'THE', 'COURSE', 'OF', 'GALATIANS', 'AND', 'ROERER', 'PREPARED', 'THE', 'MANUSCRIPT', 'FOR', 'THE', 'PRINTER'] +2830-3979-0008-1128: hyp=['IN', 'OTHER', 'WORDS', 'THESE', 'THREE', 'MEN', 'TOOK', 'DOWN', 'THE', 'LECTURES', 'WHICH', 'LUTHER', 'ADDRESSED', 'TO', 'HIS', 'STUDENTS', 'IN', 'THE', 'COURSE', 'OF', 'GALLATIONS', 'AND', 'ROAR', 'PREPARED', 'THE', 'MANUSCRIPT', 'FOR', 'THE', 'PRINTER'] +2830-3979-0009-1129: ref=['IT', 'PRESENTS', 'LIKE', 'NO', 'OTHER', 'OF', "LUTHER'S", 'WRITINGS', 'THE', 'CENTRAL', 'THOUGHT', 'OF', 'CHRISTIANITY', 'THE', 'JUSTIFICATION', 'OF', 'THE', 'SINNER', 'FOR', 'THE', 'SAKE', 'OF', "CHRIST'S", 'MERITS', 'ALONE'] +2830-3979-0009-1129: hyp=['IT', 'PRESENTS', 'LIKE', 'NO', 'OTHER', 'OF', "LUTHER'S", 'WRITINGS', 'THE', 'CENTRAL', 'THOUGHT', 'OF', 'CHRISTIANITY', 'THE', 'JUSTIFICATION', 'OF', 'THE', 'SINNER', 'FOR', 'THE', 'SAKE', 'OF', "CHRIST'S", 'MERITS', 'ALONE'] +2830-3979-0010-1130: ref=['BUT', 'THE', 'ESSENCE', 'OF', "LUTHER'S", 'LECTURES', 'IS', 'THERE'] +2830-3979-0010-1130: hyp=['BUT', 'THE', 'ESSENCE', 'OF', "LUTHER'S", 'LECTURES', 'IS', 'THERE'] +2830-3979-0011-1131: ref=['THE', 'LORD', 'WHO', 'HAS', 'GIVEN', 'US', 'POWER', 'TO', 'TEACH', 'AND', 'TO', 'HEAR', 'LET', 'HIM', 'ALSO', 'GIVE', 'US', 'THE', 'POWER', 'TO', 'SERVE', 'AND', 'TO', 'DO', 'LUKE', 'TWO'] +2830-3979-0011-1131: hyp=['THE', 'LORD', 'WHO', 'HAS', 'GIVEN', 'US', 'POWER', 'TO', 'TEACH', 'AND', 'TO', 'HEAR', 'LET', 'HIM', 'ALSO', 'GIVE', 'US', 'THE', 'POWER', 'TO', 'SERVE', 'AND', 'TO', 'DO', 'LUKE', 'TWO'] +2830-3979-0012-1132: ref=['THE', 'WORD', 'OF', 'OUR', 'GOD', 'SHALL', 'STAND', 'FOREVER'] +2830-3979-0012-1132: hyp=['THE', 'WORD', 'OF', 'OUR', 'GOD', 'SHALL', 'STAND', 'FOR', 'EVER'] +2830-3980-0000-1043: ref=['IN', 'EVERY', 'WAY', 'THEY', 'SOUGHT', 'TO', 'UNDERMINE', 'THE', 'AUTHORITY', 'OF', 'SAINT', 'PAUL'] +2830-3980-0000-1043: hyp=['IN', 'EVERY', 'WAY', 'THEY', 'SOUGHT', 'TO', 'UNDERMINE', 'THE', 'AUTHORITY', 'OF', 'SAINT', 'PAUL'] +2830-3980-0001-1044: ref=['THEY', 'SAID', 'TO', 'THE', 'GALATIANS', 'YOU', 'HAVE', 'NO', 'RIGHT', 'TO', 'THINK', 'HIGHLY', 'OF', 'PAUL'] +2830-3980-0001-1044: hyp=['THEY', 'SAID', 'TO', 'THE', 'GALATIANS', 'YOU', 'HAVE', 'NO', 'RIGHT', 'TO', 'THINK', 'HIGHLY', 'OF', 'PAUL'] +2830-3980-0002-1045: ref=['HE', 'WAS', 'THE', 'LAST', 'TO', 'TURN', 'TO', 'CHRIST'] +2830-3980-0002-1045: hyp=['HE', 'WAS', 'THE', 'LAST', 'TO', 'TURN', 'TO', 'CHRIST'] +2830-3980-0003-1046: ref=['PAUL', 'CAME', 'LATER', 'AND', 'IS', 'BENEATH', 'US'] +2830-3980-0003-1046: hyp=['PAW', 'CAME', 'LATER', 'IN', 'HIS', 'BENEATH', 'US'] +2830-3980-0004-1047: ref=['INDEED', 'HE', 'PERSECUTED', 'THE', 'CHURCH', 'OF', 'CHRIST', 'FOR', 'A', 'LONG', 'TIME'] +2830-3980-0004-1047: hyp=['INDEED', 'HE', 'PERSECUTED', 'THE', 'CHURCH', 'OF', 'CHRIST', 'FOR', 'A', 'LONG', 'TIME'] +2830-3980-0005-1048: ref=['DO', 'YOU', 'SUPPOSE', 'THAT', 'GOD', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'FEW', 'LUTHERAN', 'HERETICS', 'WOULD', 'DISOWN', 'HIS', 'ENTIRE', 'CHURCH'] +2830-3980-0005-1048: hyp=['DO', 'YOU', 'SUPPOSE', 'THAT', 'GOD', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'FEW', 'LUTHERAN', 'HERETICS', 'WOULD', 'DISOWN', 'HIS', 'ENTIRE', 'CHURCH'] +2830-3980-0006-1049: ref=['AGAINST', 'THESE', 'BOASTING', 'FALSE', 'APOSTLES', 'PAUL', 'BOLDLY', 'DEFENDS', 'HIS', 'APOSTOLIC', 'AUTHORITY', 'AND', 'MINISTRY'] +2830-3980-0006-1049: hyp=['AGAINST', 'THESE', 'BOASTING', 'FALSE', 'APOSTLES', 'PAUL', 'BOLDLY', 'DEFENDS', 'HIS', 'APOSTOLIC', 'AUTHORITY', 'AND', 'MINISTRY'] +2830-3980-0007-1050: ref=['AS', 'THE', 'AMBASSADOR', 'OF', 'A', 'GOVERNMENT', 'IS', 'HONORED', 'FOR', 'HIS', 'OFFICE', 'AND', 'NOT', 'FOR', 'HIS', 'PRIVATE', 'PERSON', 'SO', 'THE', 'MINISTER', 'OF', 'CHRIST', 'SHOULD', 'EXALT', 'HIS', 'OFFICE', 'IN', 'ORDER', 'TO', 'GAIN', 'AUTHORITY', 'AMONG', 'MEN'] +2830-3980-0007-1050: hyp=['AS', 'THE', 'AMBASSADOR', 'OF', 'A', 'GOVERNMENT', 'IS', 'HONORED', 'FOR', 'HIS', 'OFFICE', 'AND', 'NOT', 'FOR', 'HIS', 'PRIVATE', 'PERSON', 'SO', 'THE', 'MINISTER', 'OF', 'CHRIST', 'SHOULD', 'EXALT', 'HIS', 'OFFICE', 'IN', 'ORDER', 'TO', 'GAIN', 'AUTHORITY', 'AMONG', 'MEN'] +2830-3980-0008-1051: ref=['PAUL', 'TAKES', 'PRIDE', 'IN', 'HIS', 'MINISTRY', 'NOT', 'TO', 'HIS', 'OWN', 'PRAISE', 'BUT', 'TO', 'THE', 'PRAISE', 'OF', 'GOD'] +2830-3980-0008-1051: hyp=['POLITICS', 'PRIDE', 'IN', 'HIS', 'MINISTRY', 'NOT', 'TO', 'HIS', 'OWN', 'PRAISE', 'BUT', 'TO', 'THE', 'PRAISE', 'OF', 'GOD'] +2830-3980-0009-1052: ref=['PAUL', 'AN', 'APOSTLE', 'NOT', 'OF', 'MEN', 'ET', 'CETERA'] +2830-3980-0009-1052: hyp=['PAUL', 'AND', 'APOSTLE', 'NOT', 'OF', 'MEN', 'ET', 'CETERA'] +2830-3980-0010-1053: ref=['EITHER', 'HE', 'CALLS', 'MINISTERS', 'THROUGH', 'THE', 'AGENCY', 'OF', 'MEN', 'OR', 'HE', 'CALLS', 'THEM', 'DIRECTLY', 'AS', 'HE', 'CALLED', 'THE', 'PROPHETS', 'AND', 'APOSTLES'] +2830-3980-0010-1053: hyp=['EITHER', 'HE', 'CALLS', 'MINISTERS', 'THROUGH', 'THE', 'AGENCY', 'OF', 'MEN', 'OR', 'HE', 'CALLS', 'THEM', 'DIRECTLY', 'AS', 'HE', 'CALLED', 'THE', 'PROPHETS', 'AND', 'APOSTLES'] +2830-3980-0011-1054: ref=['PAUL', 'DECLARES', 'THAT', 'THE', 'FALSE', 'APOSTLES', 'WERE', 'CALLED', 'OR', 'SENT', 'NEITHER', 'BY', 'MEN', 'NOR', 'BY', 'MAN'] +2830-3980-0011-1054: hyp=['PAUL', 'DECLARES', 'THAT', 'THE', 'FALSE', 'APOSTLES', 'WERE', 'CALLED', 'OR', 'SENT', 'NEITHER', 'BY', 'MEN', 'NOR', 'BY', 'MAN'] +2830-3980-0012-1055: ref=['THE', 'MOST', 'THEY', 'COULD', 'CLAIM', 'IS', 'THAT', 'THEY', 'WERE', 'SENT', 'BY', 'OTHERS'] +2830-3980-0012-1055: hyp=['THE', 'MOST', 'THEY', 'COULD', 'CLAIM', 'IS', 'THAT', 'THEY', 'WERE', 'SENT', 'BY', 'OTHERS'] +2830-3980-0013-1056: ref=['HE', 'MENTIONS', 'THE', 'APOSTLES', 'FIRST', 'BECAUSE', 'THEY', 'WERE', 'APPOINTED', 'DIRECTLY', 'BY', 'GOD'] +2830-3980-0013-1056: hyp=['HE', 'MENTIONS', 'THE', 'APOSTLES', 'FIRST', 'BECAUSE', 'THEY', 'WERE', 'APPOINTED', 'DIRECTLY', 'BY', 'GOD'] +2830-3980-0014-1057: ref=['THE', 'CALL', 'IS', 'NOT', 'TO', 'BE', 'TAKEN', 'LIGHTLY'] +2830-3980-0014-1057: hyp=['THE', 'CALL', 'IS', 'NOT', 'TO', 'BE', 'TAKEN', 'LIGHTLY'] +2830-3980-0015-1058: ref=['FOR', 'A', 'PERSON', 'TO', 'POSSESS', 'KNOWLEDGE', 'IS', 'NOT', 'ENOUGH'] +2830-3980-0015-1058: hyp=['FOR', 'A', 'PERSON', 'TO', 'POSSESSED', 'KNOWLEDGE', 'IS', 'NOT', 'ENOUGH'] +2830-3980-0016-1059: ref=['IT', 'SPOILS', "ONE'S", 'BEST', 'WORK'] +2830-3980-0016-1059: hyp=['IT', 'SPOILS', "ONE'S", 'BEST', 'WORK'] +2830-3980-0017-1060: ref=['WHEN', 'I', 'WAS', 'A', 'YOUNG', 'MAN', 'I', 'THOUGHT', 'PAUL', 'WAS', 'MAKING', 'TOO', 'MUCH', 'OF', 'HIS', 'CALL'] +2830-3980-0017-1060: hyp=['WHEN', 'I', 'WAS', 'A', 'YOUNG', 'MAN', 'I', 'THOUGHT', 'PAUL', 'WAS', 'MAKING', 'TOO', 'MUCH', 'OF', 'HIS', 'CALL'] +2830-3980-0018-1061: ref=['I', 'DID', 'NOT', 'THEN', 'REALIZE', 'THE', 'IMPORTANCE', 'OF', 'THE', 'MINISTRY'] +2830-3980-0018-1061: hyp=['I', 'DID', 'NOT', 'THEN', 'REALIZE', 'THE', 'IMPORTANCE', 'OF', 'THE', 'MINISTRY'] +2830-3980-0019-1062: ref=['I', 'KNEW', 'NOTHING', 'OF', 'THE', 'DOCTRINE', 'OF', 'FAITH', 'BECAUSE', 'WE', 'WERE', 'TAUGHT', 'SOPHISTRY', 'INSTEAD', 'OF', 'CERTAINTY', 'AND', 'NOBODY', 'UNDERSTOOD', 'SPIRITUAL', 'BOASTING'] +2830-3980-0019-1062: hyp=['I', 'KNEW', 'NOTHING', 'OF', 'THE', 'DOCTRINE', 'OF', 'FAITH', 'BECAUSE', 'WE', 'WERE', 'TAUGHT', 'SOPHISTRY', 'INSTEAD', 'OF', 'CERTAINTY', 'AND', 'NOBODY', 'UNDERSTOOD', 'SPIRITUAL', 'BOASTING'] +2830-3980-0020-1063: ref=['THIS', 'IS', 'NO', 'SINFUL', 'PRIDE', 'IT', 'IS', 'HOLY', 'PRIDE'] +2830-3980-0020-1063: hyp=['THIS', 'IS', 'NO', 'SINFUL', 'PRIDE', 'IT', 'IS', 'WHOLLY', 'PRIDE'] +2830-3980-0021-1064: ref=['AND', 'GOD', 'THE', 'FATHER', 'WHO', 'RAISED', 'HIM', 'FROM', 'THE', 'DEAD'] +2830-3980-0021-1064: hyp=['AND', 'GOD', 'THE', 'FATHER', 'WHO', 'RAISED', 'HIM', 'FROM', 'THE', 'DEAD'] +2830-3980-0022-1065: ref=['THE', 'CLAUSE', 'SEEMS', 'SUPERFLUOUS', 'ON', 'FIRST', 'SIGHT'] +2830-3980-0022-1065: hyp=['THE', 'CLAS', 'SEEMS', 'SUPERFLUOUS', 'ON', 'FIRST', 'SIGHT'] +2830-3980-0023-1066: ref=['THESE', 'PERVERTERS', 'OF', 'THE', 'RIGHTEOUSNESS', 'OF', 'CHRIST', 'RESIST', 'THE', 'FATHER', 'AND', 'THE', 'SON', 'AND', 'THE', 'WORKS', 'OF', 'THEM', 'BOTH'] +2830-3980-0023-1066: hyp=['THESE', 'PERVERTIVES', 'OF', 'THE', 'RIGHTEOUSNESS', 'OF', 'CHRIST', 'RESIST', 'THE', 'FATHER', 'AND', 'THE', 'SON', 'AND', 'THE', 'WORKS', 'OF', 'THEM', 'BOTH'] +2830-3980-0024-1067: ref=['IN', 'THIS', 'WHOLE', 'EPISTLE', 'PAUL', 'TREATS', 'OF', 'THE', 'RESURRECTION', 'OF', 'CHRIST'] +2830-3980-0024-1067: hyp=['IN', 'THIS', 'WHOLE', 'EPISTLE', 'PAUL', 'TREATS', 'OF', 'THE', 'RESURRECTION', 'OF', 'CHRIST'] +2830-3980-0025-1068: ref=['BY', 'HIS', 'RESURRECTION', 'CHRIST', 'WON', 'THE', 'VICTORY', 'OVER', 'LAW', 'SIN', 'FLESH', 'WORLD', 'DEVIL', 'DEATH', 'HELL', 'AND', 'EVERY', 'EVIL'] +2830-3980-0025-1068: hyp=['BY', 'HIS', 'RESURRECTION', 'CHRIST', 'WON', 'THE', 'VICTORY', 'OVER', 'LAW', 'SIN', 'FLESH', 'WORLD', 'DEVIL', 'DEATH', 'HELL', 'AND', 'EVERY', 'EVIL'] +2830-3980-0026-1069: ref=['VERSE', 'TWO'] +2830-3980-0026-1069: hyp=['FIRST', 'TOO'] +2830-3980-0027-1070: ref=['AND', 'ALL', 'THE', 'BRETHREN', 'WHICH', 'ARE', 'WITH', 'ME'] +2830-3980-0027-1070: hyp=['AND', 'ALL', 'THE', 'BRETHREN', 'WHICH', 'ARE', 'WITH', 'ME'] +2830-3980-0028-1071: ref=['THIS', 'SHOULD', 'GO', 'FAR', 'IN', 'SHUTTING', 'THE', 'MOUTHS', 'OF', 'THE', 'FALSE', 'APOSTLES'] +2830-3980-0028-1071: hyp=['THIS', 'SHOULD', 'GO', 'FAR', 'IN', 'SHUTTING', 'THE', 'MOUTHS', 'OF', 'THE', 'FALSE', 'APOSTLES'] +2830-3980-0029-1072: ref=['ALTHOUGH', 'THE', 'BRETHREN', 'WITH', 'ME', 'ARE', 'NOT', 'APOSTLES', 'LIKE', 'MYSELF', 'YET', 'THEY', 'ARE', 'ALL', 'OF', 'ONE', 'MIND', 'WITH', 'ME', 'THINK', 'WRITE', 'AND', 'TEACH', 'AS', 'I', 'DO'] +2830-3980-0029-1072: hyp=['ALTHOUGH', 'THE', 'BRETHREN', 'WITH', 'ME', 'ARE', 'NOT', 'APOSTLES', 'LIKE', 'MYSELF', 'YET', 'THEY', 'ARE', 'ALL', 'OF', 'ONE', 'MIND', 'WITH', 'ME', 'THINK', 'WRITE', 'AND', 'TEACH', 'AS', 'I', 'DO'] +2830-3980-0030-1073: ref=['THEY', 'DO', 'NOT', 'GO', 'WHERE', 'THE', 'ENEMIES', 'OF', 'THE', 'GOSPEL', 'PREDOMINATE', 'THEY', 'GO', 'WHERE', 'THE', 'CHRISTIANS', 'ARE'] +2830-3980-0030-1073: hyp=['THEY', 'DO', 'NOT', 'GO', 'WHERE', 'THE', 'ENEMIES', 'OF', 'THE', 'GOSPEL', 'PREDOMINATE', 'THEY', 'GO', 'WHERE', 'THE', 'CHRISTIANS', 'ARE'] +2830-3980-0031-1074: ref=['WHY', 'DO', 'THEY', 'NOT', 'INVADE', 'THE', 'CATHOLIC', 'PROVINCES', 'AND', 'PREACH', 'THEIR', 'DOCTRINE', 'TO', 'GODLESS', 'PRINCES', 'BISHOPS', 'AND', 'DOCTORS', 'AS', 'WE', 'HAVE', 'DONE', 'BY', 'THE', 'HELP', 'OF', 'GOD'] +2830-3980-0031-1074: hyp=['WHY', 'DO', 'THEY', 'NOT', 'INVADE', 'THE', 'CATHOLIC', 'PROVINCES', 'AND', 'PREACH', 'THEIR', 'DOCTRINE', 'TO', 'GODLESS', 'PRINCES', 'BISHOPS', 'AND', 'DOCTORS', 'AS', 'WE', 'HAVE', 'DONE', 'BY', 'THE', 'HELP', 'OF', 'GOD'] +2830-3980-0032-1075: ref=['WE', 'LOOK', 'FOR', 'THAT', 'REWARD', 'WHICH', 'EYE', 'HATH', 'NOT', 'SEEN', 'NOR', 'EAR', 'HEARD', 'NEITHER', 'HATH', 'ENTERED', 'INTO', 'THE', 'HEART', 'OF', 'MAN'] +2830-3980-0032-1075: hyp=['WE', 'LOOK', 'FOR', 'THAT', 'REWARD', 'WHICH', 'I', 'HATH', 'NOT', 'SEEN', 'NOR', 'EAR', 'HEARD', 'NEITHER', 'HATH', 'ENTERED', 'INTO', 'THE', 'HEART', 'OF', 'MAN'] +2830-3980-0033-1076: ref=['NOT', 'ALL', 'THE', 'GALATIANS', 'HAD', 'BECOME', 'PERVERTED'] +2830-3980-0033-1076: hyp=['NOT', 'ALL', 'THE', 'GALLATIONS', 'HAD', 'BECOME', 'PERVERTED'] +2830-3980-0034-1077: ref=['THESE', 'MEANS', 'CANNOT', 'BE', 'CONTAMINATED'] +2830-3980-0034-1077: hyp=['THESE', 'MEANS', 'CANNOT', 'BE', 'CONTAMINATED'] +2830-3980-0035-1078: ref=['THEY', 'REMAIN', 'DIVINE', 'REGARDLESS', 'OF', "MEN'S", 'OPINION'] +2830-3980-0035-1078: hyp=['THEY', 'REMAINED', 'DIVINE', 'REGARDLESS', 'OF', "MEN'S", 'OPINION'] +2830-3980-0036-1079: ref=['WHEREVER', 'THE', 'MEANS', 'OF', 'GRACE', 'ARE', 'FOUND', 'THERE', 'IS', 'THE', 'HOLY', 'CHURCH', 'EVEN', 'THOUGH', 'ANTICHRIST', 'REIGNS', 'THERE'] +2830-3980-0036-1079: hyp=['WHEREVER', 'THE', 'MEANS', 'OF', 'GRACE', 'ARE', 'FOUND', 'THERE', 'IS', 'THE', 'HOLY', 'CHURCH', 'EVEN', 'THOUGH', 'ANTICHRIST', 'REIGNS', 'THERE'] +2830-3980-0037-1080: ref=['SO', 'MUCH', 'FOR', 'THE', 'TITLE', 'OF', 'THE', 'EPISTLE', 'NOW', 'FOLLOWS', 'THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'VERSE', 'THREE'] +2830-3980-0037-1080: hyp=['SO', 'MUCH', 'FOR', 'THE', 'TITLE', 'OF', 'THE', 'EPISTLE', 'NOW', 'FOLLOWS', 'THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'VERSE', 'THREE'] +2830-3980-0038-1081: ref=['GRACE', 'BE', 'TO', 'YOU', 'AND', 'PEACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0038-1081: hyp=['GRACE', 'BE', 'TO', 'YOU', 'IN', 'PEACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0039-1082: ref=['THE', 'TERMS', 'OF', 'GRACE', 'AND', 'PEACE', 'ARE', 'COMMON', 'TERMS', 'WITH', 'PAUL', 'AND', 'ARE', 'NOW', 'PRETTY', 'WELL', 'UNDERSTOOD'] +2830-3980-0039-1082: hyp=['THE', 'TERMS', 'OF', 'GRACE', 'AND', 'PEACE', 'ARE', 'COMMON', 'TERMS', 'WITH', 'PAUL', 'AND', 'ARE', 'NOW', 'PRETTY', 'WELL', 'UNDERSTOOD'] +2830-3980-0040-1083: ref=['THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'IS', 'REFRESHING'] +2830-3980-0040-1083: hyp=['THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'IS', 'REFRESHING'] +2830-3980-0041-1084: ref=['GRACE', 'INVOLVES', 'THE', 'REMISSION', 'OF', 'SINS', 'PEACE', 'AND', 'A', 'HAPPY', 'CONSCIENCE'] +2830-3980-0041-1084: hyp=['GRACE', 'INVOLVES', 'THE', 'REMISSION', 'OF', 'SINS', 'PEACE', 'AND', 'A', 'HAPPY', 'CONSCIENCE'] +2830-3980-0042-1085: ref=['THE', 'WORLD', 'BRANDS', 'THIS', 'A', 'PERNICIOUS', 'DOCTRINE'] +2830-3980-0042-1085: hyp=['THE', 'WORLD', 'BRINGS', 'THIS', 'A', 'PERNICIOUS', 'DOCTRINE'] +2830-3980-0043-1086: ref=['EXPERIENCE', 'PROVES', 'THIS'] +2830-3980-0043-1086: hyp=['EXPERIENCE', 'PROVES', 'THIS'] +2830-3980-0044-1087: ref=['HOWEVER', 'THE', 'GRACE', 'AND', 'PEACE', 'OF', 'GOD', 'WILL'] +2830-3980-0044-1087: hyp=['HOWEVER', 'THE', 'GRACE', 'AND', 'PEACE', 'OF', 'GOD', 'WILL'] +2830-3980-0045-1088: ref=['MEN', 'SHOULD', 'NOT', 'SPECULATE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0045-1088: hyp=['MEN', 'SHOULD', 'NOT', 'SPECULATE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0046-1089: ref=['WAS', 'IT', 'NOT', 'ENOUGH', 'TO', 'SAY', 'FROM', 'GOD', 'THE', 'FATHER'] +2830-3980-0046-1089: hyp=['WAS', 'IT', 'NOT', 'ENOUGH', 'TO', 'SAY', 'FROM', 'GOD', 'THE', 'FATHER'] +2830-3980-0047-1090: ref=['TO', 'DO', 'SO', 'IS', 'TO', 'LOSE', 'GOD', 'ALTOGETHER', 'BECAUSE', 'GOD', 'BECOMES', 'INTOLERABLE', 'WHEN', 'WE', 'SEEK', 'TO', 'MEASURE', 'AND', 'TO', 'COMPREHEND', 'HIS', 'INFINITE', 'MAJESTY'] +2830-3980-0047-1090: hyp=['TO', 'DO', 'SO', 'IS', 'TO', 'LOSE', 'GOD', 'ALTOGETHER', 'BECAUSE', 'GOD', 'BECOMES', 'INTOLERABLE', 'WHEN', 'WE', 'SEEK', 'TO', 'MEASURE', 'INTO', 'COMPREHEND', 'HIS', 'INFINITE', 'MAJESTY'] +2830-3980-0048-1091: ref=['HE', 'CAME', 'DOWN', 'TO', 'EARTH', 'LIVED', 'AMONG', 'MEN', 'SUFFERED', 'WAS', 'CRUCIFIED', 'AND', 'THEN', 'HE', 'DIED', 'STANDING', 'CLEARLY', 'BEFORE', 'US', 'SO', 'THAT', 'OUR', 'HEARTS', 'AND', 'EYES', 'MAY', 'FASTEN', 'UPON', 'HIM'] +2830-3980-0048-1091: hyp=['HE', 'CAME', 'DOWN', 'TO', 'EARTH', 'LIVED', 'AMONG', 'MEN', 'SUFFERED', 'WAS', 'CRUCIFIED', 'AND', 'THEN', 'HE', 'DIED', 'STANDING', 'CLEARLY', 'BEFORE', 'US', 'SO', 'THAT', 'OUR', 'HEARTS', 'AND', 'EYES', 'MAY', 'FASTEN', 'UPON', 'HIM'] +2830-3980-0049-1092: ref=['EMBRACE', 'HIM', 'AND', 'FORGET', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0049-1092: hyp=['EMBRACE', 'HIM', 'AND', 'FORGET', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0050-1093: ref=['DID', 'NOT', 'CHRIST', 'HIMSELF', 'SAY', 'I', 'AM', 'THE', 'WAY', 'AND', 'THE', 'TRUTH', 'AND', 'THE', 'LIFE', 'NO', 'MAN', 'COMETH', 'UNTO', 'THE', 'FATHER', 'BUT', 'BY', 'ME'] +2830-3980-0050-1093: hyp=['DID', 'NOT', 'CHRIST', 'HIMSELF', 'SAY', 'I', 'AM', 'THE', 'WAY', 'AND', 'THE', 'TRUTH', 'AND', 'THE', 'LIFE', 'NO', 'MAN', 'COMETH', 'UNTO', 'THE', 'FATHER', 'BUT', 'BY', 'ME'] +2830-3980-0051-1094: ref=['WHEN', 'YOU', 'ARGUE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD', 'APART', 'FROM', 'THE', 'QUESTION', 'OF', 'JUSTIFICATION', 'YOU', 'MAY', 'BE', 'AS', 'PROFOUND', 'AS', 'YOU', 'LIKE'] +2830-3980-0051-1094: hyp=['WHEN', 'YOU', 'ARGUE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD', 'APART', 'FROM', 'THE', 'QUESTION', 'OF', 'JUSTIFICATION', 'YOU', 'MAY', 'BE', 'AS', 'PROFOUND', 'AS', 'YOU', 'LIKE'] +2830-3980-0052-1095: ref=['WE', 'ARE', 'TO', 'HEAR', 'CHRIST', 'WHO', 'HAS', 'BEEN', 'APPOINTED', 'BY', 'THE', 'FATHER', 'AS', 'OUR', 'DIVINE', 'TEACHER'] +2830-3980-0052-1095: hyp=['WE', 'ARE', 'TO', 'HEAR', 'CHRIST', 'WHO', 'HAS', 'BEEN', 'APPOINTED', 'BY', 'THE', 'FATHER', 'AS', 'OUR', 'DIVINE', 'TEACHER'] +2830-3980-0053-1096: ref=['AT', 'THE', 'SAME', 'TIME', 'PAUL', 'CONFIRMS', 'OUR', 'CREED', 'THAT', 'CHRIST', 'IS', 'VERY', 'GOD'] +2830-3980-0053-1096: hyp=['AT', 'THE', 'SAME', 'TIME', 'PAUL', 'CONFIRMS', 'OUR', 'CREED', 'THAT', 'CHRIST', 'IS', 'VERY', 'GOD'] +2830-3980-0054-1097: ref=['THAT', 'CHRIST', 'IS', 'VERY', 'GOD', 'IS', 'APPARENT', 'IN', 'THAT', 'PAUL', 'ASCRIBES', 'TO', 'HIM', 'DIVINE', 'POWERS', 'EQUALLY', 'WITH', 'THE', 'FATHER', 'AS', 'FOR', 'INSTANCE', 'THE', 'POWER', 'TO', 'DISPENSE', 'GRACE', 'AND', 'PEACE'] +2830-3980-0054-1097: hyp=['THAT', 'CHRIST', 'IS', 'VERY', 'GOD', 'IS', 'APPARENT', 'IN', 'THAT', 'PAUL', 'ASCRIBES', 'TO', 'HIM', 'DIVINE', 'POWERS', 'EQUALLY', 'WITH', 'THE', 'FATHER', 'AS', 'FOR', 'INSTANCE', 'THE', 'POWER', 'DOES', 'DISPENSE', 'GRACE', 'AND', 'PEACE'] +2830-3980-0055-1098: ref=['TO', 'BESTOW', 'PEACE', 'AND', 'GRACE', 'LIES', 'IN', 'THE', 'PROVINCE', 'OF', 'GOD', 'WHO', 'ALONE', 'CAN', 'CREATE', 'THESE', 'BLESSINGS', 'THE', 'ANGELS', 'CANNOT'] +2830-3980-0055-1098: hyp=['TO', 'BESTOW', 'PEACE', 'AND', 'GRACE', 'LIES', 'IN', 'THE', 'PROVINCE', 'OF', 'GOD', 'WHO', 'ALONE', 'CAN', 'CREATE', 'THESE', 'BLESSINGS', 'THE', 'ANGELS', 'CANNOT'] +2830-3980-0056-1099: ref=['OTHERWISE', 'PAUL', 'SHOULD', 'HAVE', 'WRITTEN', 'GRACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'PEACE', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0056-1099: hyp=['OTHERWISE', 'PAUL', 'SHOULD', 'HAVE', 'WRITTEN', 'GRACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'PEACE', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0057-1100: ref=['THE', 'ARIANS', 'TOOK', 'CHRIST', 'FOR', 'A', 'NOBLE', 'AND', 'PERFECT', 'CREATURE', 'SUPERIOR', 'EVEN', 'TO', 'THE', 'ANGELS', 'BECAUSE', 'BY', 'HIM', 'GOD', 'CREATED', 'HEAVEN', 'AND', 'EARTH'] +2830-3980-0057-1100: hyp=['THE', 'ARIANS', 'TOOK', 'CHRIST', 'FOR', 'A', 'NOBLE', 'AND', 'PERFECT', 'CREATURE', 'SUPERIOR', 'EVEN', 'TO', 'THE', 'ANGELS', 'BECAUSE', 'BY', 'HIM', 'GOD', 'CREATED', 'HEAVEN', 'AND', 'EARTH'] +2830-3980-0058-1101: ref=['MOHAMMED', 'ALSO', 'SPEAKS', 'HIGHLY', 'OF', 'CHRIST'] +2830-3980-0058-1101: hyp=['MOHAMMED', 'ALSO', 'SPEAKS', 'HIGHLY', 'OF', 'CHRIST'] +2830-3980-0059-1102: ref=['PAUL', 'STICKS', 'TO', 'HIS', 'THEME'] +2830-3980-0059-1102: hyp=['PAUL', 'STICKS', 'TO', 'HIS', 'THEME'] +2830-3980-0060-1103: ref=['HE', 'NEVER', 'LOSES', 'SIGHT', 'OF', 'THE', 'PURPOSE', 'OF', 'HIS', 'EPISTLE'] +2830-3980-0060-1103: hyp=['HE', 'NEVER', 'LOSES', 'SIGHT', 'OF', 'THE', 'PURPOSE', 'OF', 'HIS', 'EPISTLE'] +2830-3980-0061-1104: ref=['NOT', 'GOLD', 'OR', 'SILVER', 'OR', 'PASCHAL', 'LAMBS', 'OR', 'AN', 'ANGEL', 'BUT', 'HIMSELF', 'WHAT', 'FOR'] +2830-3980-0061-1104: hyp=['NOT', 'GOLD', 'OR', 'SILVER', 'OR', 'PASSION', 'LAMBS', 'OR', 'AN', 'ANGEL', 'BUT', 'HIMSELF', 'WHAT', 'FOR'] +2830-3980-0062-1105: ref=['NOT', 'FOR', 'A', 'CROWN', 'OR', 'A', 'KINGDOM', 'OR', 'OUR', 'GOODNESS', 'BUT', 'FOR', 'OUR', 'SINS'] +2830-3980-0062-1105: hyp=['NOT', 'FOR', 'A', 'CROWN', 'OR', 'A', 'KINGDOM', 'OR', 'A', 'GOODNESS', 'BUT', 'FOR', 'OUR', 'SINS'] +2830-3980-0063-1106: ref=['UNDERSCORE', 'THESE', 'WORDS', 'FOR', 'THEY', 'ARE', 'FULL', 'OF', 'COMFORT', 'FOR', 'SORE', 'CONSCIENCES'] +2830-3980-0063-1106: hyp=['UNDERSCORE', 'THESE', 'WORDS', 'FOR', 'THEY', 'ARE', 'FULL', 'OF', 'COMFORT', 'FOR', 'SORE', 'CONSCIENCES'] +2830-3980-0064-1107: ref=['HOW', 'MAY', 'WE', 'OBTAIN', 'REMISSION', 'OF', 'OUR', 'SINS'] +2830-3980-0064-1107: hyp=['HOW', 'MAY', 'WE', 'OBTAIN', 'REMISSION', 'OF', 'OUR', 'SINS'] +2830-3980-0065-1108: ref=['PAUL', 'ANSWERS', 'THE', 'MAN', 'WHO', 'IS', 'NAMED', 'JESUS', 'CHRIST', 'AND', 'THE', 'SON', 'OF', 'GOD', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0065-1108: hyp=['PAUL', 'ANSWERS', 'THE', 'MAN', 'WHO', 'IS', 'NAMED', 'JESUS', 'CHRIST', 'AND', 'THE', 'SON', 'OF', 'GOD', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0066-1109: ref=['SINCE', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS', 'IT', 'STANDS', 'TO', 'REASON', 'THAT', 'THEY', 'CANNOT', 'BE', 'PUT', 'AWAY', 'BY', 'OUR', 'OWN', 'EFFORTS'] +2830-3980-0066-1109: hyp=['SINCE', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS', 'IT', 'STANDS', 'TO', 'REASON', 'THAT', 'THEY', 'CANNOT', 'BE', 'PUT', 'AWAY', 'BY', 'OUR', 'OWN', 'EFFORTS'] +2830-3980-0067-1110: ref=['THIS', 'SENTENCE', 'ALSO', 'DEFINES', 'OUR', 'SINS', 'AS', 'GREAT', 'SO', 'GREAT', 'IN', 'FACT', 'THAT', 'THE', 'WHOLE', 'WORLD', 'COULD', 'NOT', 'MAKE', 'AMENDS', 'FOR', 'A', 'SINGLE', 'SIN'] +2830-3980-0067-1110: hyp=['THIS', 'SENTENCE', 'ALSO', 'DEFINES', 'OUR', 'SINS', 'AS', 'GREAT', 'SO', 'GREAT', 'IN', 'FACT', 'THAT', 'THE', 'WHOLE', 'WORLD', 'COULD', 'NOT', 'MAKE', 'AMENDS', 'FOR', 'A', 'SINGLE', 'SIN'] +2830-3980-0068-1111: ref=['THE', 'GREATNESS', 'OF', 'THE', 'RANSOM', 'CHRIST', 'THE', 'SON', 'OF', 'GOD', 'INDICATES', 'THIS'] +2830-3980-0068-1111: hyp=['THE', 'GREATNESS', 'OF', 'THE', 'RANSOM', 'CHRIST', 'THE', 'SON', 'OF', 'GOD', 'INDICATES', 'THIS'] +2830-3980-0069-1112: ref=['THE', 'VICIOUS', 'CHARACTER', 'OF', 'SIN', 'IS', 'BROUGHT', 'OUT', 'BY', 'THE', 'WORDS', 'WHO', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0069-1112: hyp=['THE', 'VICIOUS', 'CHARACTER', 'OF', 'SIN', 'IS', 'BROUGHT', 'OUT', 'BY', 'THE', 'WORDS', 'WHO', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0070-1113: ref=['BUT', 'WE', 'ARE', 'CARELESS', 'WE', 'MAKE', 'LIGHT', 'OF', 'SIN'] +2830-3980-0070-1113: hyp=['BUT', 'WE', 'ARE', 'CARELESS', 'WE', 'MAKE', 'LIGHT', 'OF', 'SIN'] +2830-3980-0071-1114: ref=['WE', 'THINK', 'THAT', 'BY', 'SOME', 'LITTLE', 'WORK', 'OR', 'MERIT', 'WE', 'CAN', 'DISMISS', 'SIN'] +2830-3980-0071-1114: hyp=['WE', 'THINK', 'THAT', 'BY', 'SOME', 'LITTLE', 'WORK', 'OR', 'MERIT', 'WE', 'CAN', 'DISMISS', 'IN'] +2830-3980-0072-1115: ref=['THIS', 'PASSAGE', 'THEN', 'BEARS', 'OUT', 'THE', 'FACT', 'THAT', 'ALL', 'MEN', 'ARE', 'SOLD', 'UNDER', 'SIN'] +2830-3980-0072-1115: hyp=['THIS', 'PASSAGE', 'THEN', 'BEARS', 'OUT', 'THE', 'FACT', 'THAT', 'ALL', 'MEN', 'ARE', 'SOLD', 'UNDER', 'SIN'] +2830-3980-0073-1116: ref=['THIS', 'ATTITUDE', 'SPRINGS', 'FROM', 'A', 'FALSE', 'CONCEPTION', 'OF', 'SIN', 'THE', 'CONCEPTION', 'THAT', 'SIN', 'IS', 'A', 'SMALL', 'MATTER', 'EASILY', 'TAKEN', 'CARE', 'OF', 'BY', 'GOOD', 'WORKS', 'THAT', 'WE', 'MUST', 'PRESENT', 'OURSELVES', 'UNTO', 'GOD', 'WITH', 'A', 'GOOD', 'CONSCIENCE', 'THAT', 'WE', 'MUST', 'FEEL', 'NO', 'SIN', 'BEFORE', 'WE', 'MAY', 'FEEL', 'THAT', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS'] +2830-3980-0073-1116: hyp=['THIS', 'ATTITUDE', 'SPRINGS', 'FROM', 'A', 'FALSE', 'CONCEPTION', 'OF', 'SIN', 'THE', 'CONCEPTION', 'THAT', 'SIN', 'IS', 'A', 'SMALL', 'MATTER', 'EASILY', 'TAKING', 'CARE', 'OF', 'BY', 'GOOD', 'WORKS', 'THAT', 'WE', 'MUST', 'PRESENT', 'OURSELVES', 'INTO', 'GOD', 'WITH', 'GOOD', 'CONSCIENCE', 'THAT', 'WE', 'MUST', 'FEEL', 'NO', 'SIN', 'BEFORE', 'WE', 'MAY', 'FEEL', 'THAT', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS'] +2830-3980-0074-1117: ref=['THIS', 'ATTITUDE', 'IS', 'UNIVERSAL', 'AND', 'PARTICULARLY', 'DEVELOPED', 'IN', 'THOSE', 'WHO', 'CONSIDER', 'THEMSELVES', 'BETTER', 'THAN', 'OTHERS'] +2830-3980-0074-1117: hyp=['THE', 'SATITUDE', 'IS', 'UNIVERSAL', 'IN', 'PARTICULARLY', 'DEVELOPED', 'IN', 'THOSE', 'WHO', 'CONSIDER', 'THEMSELVES', 'BETTER', 'THAN', 'OTHERS'] +2830-3980-0075-1118: ref=['BUT', 'THE', 'REAL', 'SIGNIFICANCE', 'AND', 'COMFORT', 'OF', 'THE', 'WORDS', 'FOR', 'OUR', 'SINS', 'IS', 'LOST', 'UPON', 'THEM'] +2830-3980-0075-1118: hyp=['BUT', 'THE', 'REAL', 'SIGNIFICANCE', 'AND', 'COMFORT', 'OF', 'THE', 'WORDS', 'FOR', 'OUR', 'SINS', 'IS', 'LOST', 'UPON', 'THEM'] +2830-3980-0076-1119: ref=['ON', 'THE', 'OTHER', 'HAND', 'WE', 'ARE', 'NOT', 'TO', 'REGARD', 'THEM', 'AS', 'SO', 'TERRIBLE', 'THAT', 'WE', 'MUST', 'DESPAIR'] +2830-3980-0076-1119: hyp=['ON', 'THE', 'OTHER', 'HAND', 'WE', 'ARE', 'NOT', 'TO', 'REGARD', 'THEM', 'AS', 'SO', 'TERRIBLE', 'THAT', 'WE', 'MUST', 'DESPAIR'] +2961-960-0000-497: ref=['HE', 'PASSES', 'ABRUPTLY', 'FROM', 'PERSONS', 'TO', 'IDEAS', 'AND', 'NUMBERS', 'AND', 'FROM', 'IDEAS', 'AND', 'NUMBERS', 'TO', 'PERSONS', 'FROM', 'THE', 'HEAVENS', 'TO', 'MAN', 'FROM', 'ASTRONOMY', 'TO', 'PHYSIOLOGY', 'HE', 'CONFUSES', 'OR', 'RATHER', 'DOES', 'NOT', 'DISTINGUISH', 'SUBJECT', 'AND', 'OBJECT', 'FIRST', 'AND', 'FINAL', 'CAUSES', 'AND', 'IS', 'DREAMING', 'OF', 'GEOMETRICAL', 'FIGURES', 'LOST', 'IN', 'A', 'FLUX', 'OF', 'SENSE'] +2961-960-0000-497: hyp=['HE', 'PASSES', 'ABRUPTLY', 'FROM', 'PERSONS', 'TO', 'IDEAS', 'AND', 'NUMBERS', 'AND', 'FROM', 'IDEAS', 'AND', 'NUMBERS', 'TO', 'PERSONS', 'FROM', 'THE', 'HEAVENS', 'TO', 'MAN', 'FROM', 'ASTRONOMY', 'TO', 'PHYSIOLOGY', 'HE', 'CONFUSES', 'OR', 'RATHER', 'DOES', 'NOT', 'DISTINGUISH', 'SUBJECT', 'AND', 'OBJECT', 'FIRST', 'AND', 'FINAL', 'CAUSES', 'AND', 'IS', 'DREAMING', 'OF', 'GEOMETRICAL', 'FIGURES', 'LOST', 'IN', 'A', 'FLUX', 'OF', 'SENSE'] +2961-960-0001-498: ref=['THE', 'INFLUENCE', 'WITH', 'THE', 'TIMAEUS', 'HAS', 'EXERCISED', 'UPON', 'POSTERITY', 'IS', 'DUE', 'PARTLY', 'TO', 'A', 'MISUNDERSTANDING'] +2961-960-0001-498: hyp=['THE', 'INFLUENCE', 'WHICH', 'THE', 'TIMAEUS', 'HAS', 'EXERCISED', 'UPON', 'POSTERITY', 'IS', 'DUE', 'PARTLY', 'TO', 'A', 'MISUNDERSTANDING'] +2961-960-0002-499: ref=['IN', 'THE', 'SUPPOSED', 'DEPTHS', 'OF', 'THIS', 'DIALOGUE', 'THE', 'NEO', 'PLATONISTS', 'FOUND', 'HIDDEN', 'MEANINGS', 'AND', 'CONNECTIONS', 'WITH', 'THE', 'JEWISH', 'AND', 'CHRISTIAN', 'SCRIPTURES', 'AND', 'OUT', 'OF', 'THEM', 'THEY', 'ELICITED', 'DOCTRINES', 'QUITE', 'AT', 'VARIANCE', 'WITH', 'THE', 'SPIRIT', 'OF', 'PLATO'] +2961-960-0002-499: hyp=['IN', 'THE', 'SUPPOSED', 'DEPTHS', 'OF', 'THIS', 'DIALOGUE', 'THE', 'NEO', 'PLATINISTS', 'FOUND', 'HIDDEN', 'MEANINGS', 'IN', 'CONNECTIONS', 'WITH', 'THE', 'JEWISH', 'AND', 'CHRISTIAN', 'SCRIPTURES', 'AND', 'OUT', 'OF', 'THEM', 'THEY', 'ELICITED', 'DOCTRINES', 'QUITE', 'AT', 'VARIANCE', 'WITH', 'THE', 'SPIRIT', 'OF', 'PLATO'] +2961-960-0003-500: ref=['THEY', 'WERE', 'ABSORBED', 'IN', 'HIS', 'THEOLOGY', 'AND', 'WERE', 'UNDER', 'THE', 'DOMINION', 'OF', 'HIS', 'NAME', 'WHILE', 'THAT', 'WHICH', 'WAS', 'TRULY', 'GREAT', 'AND', 'TRULY', 'CHARACTERISTIC', 'IN', 'HIM', 'HIS', 'EFFORT', 'TO', 'REALIZE', 'AND', 'CONNECT', 'ABSTRACTIONS', 'WAS', 'NOT', 'UNDERSTOOD', 'BY', 'THEM', 'AT', 'ALL'] +2961-960-0003-500: hyp=['THEY', 'WERE', 'ABSORBED', 'IN', 'HIS', 'THEOLOGY', 'AND', 'WERE', 'UNDER', 'THE', 'DOMINION', 'OF', 'HIS', 'NAME', 'WHILE', 'THAT', 'WHICH', 'WAS', 'TRULY', 'GREAT', 'AND', 'TRULY', 'CORRECTORISTIC', 'IN', 'HIM', 'HIS', 'EFFORT', 'TO', 'REALIZE', 'AND', 'CONNECT', 'ABSTRACTIONS', 'WAS', 'NOT', 'UNDERSTOOD', 'BY', 'THEM', 'AT', 'ALL'] +2961-960-0004-501: ref=['THERE', 'IS', 'NO', 'DANGER', 'OF', 'THE', 'MODERN', 'COMMENTATORS', 'ON', 'THE', 'TIMAEUS', 'FALLING', 'INTO', 'THE', 'ABSURDITIES', 'OF', 'THE', 'NEO', 'PLATONISTS'] +2961-960-0004-501: hyp=['THERE', 'IS', 'NO', 'DANGER', 'OF', 'THE', 'MODERN', 'COMMON', 'TEACHERS', 'ON', 'THE', 'TIMIRAS', 'FALLING', 'INTO', 'THE', 'ABSURDITIES', 'OF', 'THE', 'NEW', 'PLATANISTS'] +2961-960-0005-502: ref=['IN', 'THE', 'PRESENT', 'DAY', 'WE', 'ARE', 'WELL', 'AWARE', 'THAT', 'AN', 'ANCIENT', 'PHILOSOPHER', 'IS', 'TO', 'BE', 'INTERPRETED', 'FROM', 'HIMSELF', 'AND', 'BY', 'THE', 'CONTEMPORARY', 'HISTORY', 'OF', 'THOUGHT'] +2961-960-0005-502: hyp=['IN', 'THE', 'PRESENT', 'DAY', 'WE', 'ARE', 'WELL', 'AWARE', 'THAT', 'AN', 'ANCIENT', 'PHILOSOPHER', 'IS', 'TO', 'BE', 'INTERPRETED', 'FROM', 'HIMSELF', 'AND', 'BY', 'THE', 'CONTEMPORARY', 'HISTORY', 'OF', 'THOUGHT'] +2961-960-0006-503: ref=['THE', 'FANCIES', 'OF', 'THE', 'NEO', 'PLATONISTS', 'ARE', 'ONLY', 'INTERESTING', 'TO', 'US', 'BECAUSE', 'THEY', 'EXHIBIT', 'A', 'PHASE', 'OF', 'THE', 'HUMAN', 'MIND', 'WHICH', 'PREVAILED', 'WIDELY', 'IN', 'THE', 'FIRST', 'CENTURIES', 'OF', 'THE', 'CHRISTIAN', 'ERA', 'AND', 'IS', 'NOT', 'WHOLLY', 'EXTINCT', 'IN', 'OUR', 'OWN', 'DAY'] +2961-960-0006-503: hyp=['THE', 'FANCIES', 'OF', 'THE', 'NEW', 'PLATANISTS', 'ARE', 'ONLY', 'INTERESTING', 'TO', 'US', 'BECAUSE', 'THEY', 'EXHIBIT', 'A', 'PHASE', 'OF', 'THE', 'HUMAN', 'MIND', 'WHICH', 'PREVAILED', 'WIDELY', 'IN', 'THE', 'FIRST', 'CENTURIES', 'OF', 'THE', 'CHRISTIAN', 'ERA', 'AND', 'IS', 'NOT', 'WHOLLY', 'EXTINCT', 'IN', 'OUR', 'OWN', 'DAY'] +2961-960-0007-504: ref=['BUT', 'THEY', 'HAVE', 'NOTHING', 'TO', 'DO', 'WITH', 'THE', 'INTERPRETATION', 'OF', 'PLATO', 'AND', 'IN', 'SPIRIT', 'THEY', 'ARE', 'OPPOSED', 'TO', 'HIM'] +2961-960-0007-504: hyp=['BUT', 'THEY', 'HAVE', 'NOTHING', 'TO', 'DO', 'WITH', 'THE', 'INTERPRETATION', 'OF', 'PLATO', 'AND', 'IN', 'SPIRIT', 'THEY', 'ARE', 'OPPOSED', 'TO', 'HIM'] +2961-960-0008-505: ref=['WE', 'DO', 'NOT', 'KNOW', 'HOW', 'PLATO', 'WOULD', 'HAVE', 'ARRANGED', 'HIS', 'OWN', 'DIALOGUES', 'OR', 'WHETHER', 'THE', 'THOUGHT', 'OF', 'ARRANGING', 'ANY', 'OF', 'THEM', 'BESIDES', 'THE', 'TWO', 'TRILOGIES', 'WHICH', 'HE', 'HAS', 'EXPRESSLY', 'CONNECTED', 'WAS', 'EVER', 'PRESENT', 'TO', 'HIS', 'MIND'] +2961-960-0008-505: hyp=['WE', 'DO', 'NOT', 'KNOW', 'HOW', 'PLATO', 'WOULD', 'HAVE', 'ARRANGED', 'HIS', 'OWN', 'DIALECTS', 'OR', 'WHETHER', 'THE', 'THOUGHT', 'OF', 'ARRANGING', 'ANY', 'OF', 'THEM', 'BESIDES', 'THE', 'TUTRILOGIES', 'WHICH', 'HE', 'HAS', 'EXPRESSLY', 'CONNECTED', 'WAS', 'EVER', 'PRESENT', 'TO', 'HIS', 'MIND'] +2961-960-0009-506: ref=['THE', 'DIALOGUE', 'IS', 'PRIMARILY', 'CONCERNED', 'WITH', 'THE', 'ANIMAL', 'CREATION', 'INCLUDING', 'UNDER', 'THIS', 'TERM', 'THE', 'HEAVENLY', 'BODIES', 'AND', 'WITH', 'MAN', 'ONLY', 'AS', 'ONE', 'AMONG', 'THE', 'ANIMALS'] +2961-960-0009-506: hyp=['THE', 'DIALOGUE', 'IS', 'PRIMARILY', 'CONCERNED', 'WITH', 'THE', 'ANIMAL', 'CREATION', 'INCLUDING', 'UNDER', 'THIS', 'TERM', 'THE', 'HEAVENLY', 'BODIES', 'AND', 'WITH', 'MAN', 'ONLY', 'AS', 'ONE', 'AMONG', 'THE', 'ANIMALS'] +2961-960-0010-507: ref=['BUT', 'HE', 'HAS', 'NOT', 'AS', 'YET', 'DEFINED', 'THIS', 'INTERMEDIATE', 'TERRITORY', 'WHICH', 'LIES', 'SOMEWHERE', 'BETWEEN', 'MEDICINE', 'AND', 'MATHEMATICS', 'AND', 'HE', 'WOULD', 'HAVE', 'FELT', 'THAT', 'THERE', 'WAS', 'AS', 'GREAT', 'AN', 'IMPIETY', 'IN', 'RANKING', 'THEORIES', 'OF', 'PHYSICS', 'FIRST', 'IN', 'THE', 'ORDER', 'OF', 'KNOWLEDGE', 'AS', 'IN', 'PLACING', 'THE', 'BODY', 'BEFORE', 'THE', 'SOUL'] +2961-960-0010-507: hyp=['BUT', 'HE', 'HAS', 'NOT', 'AS', 'YET', 'THE', 'FIND', 'THIS', 'INTERMEDIATE', 'TERRITORY', 'WHICH', 'LIES', 'SOMEWHERE', 'BETWEEN', 'MEDICINE', 'AND', 'MATHEMATICS', 'AND', 'HE', 'WOULD', 'HAVE', 'FELT', 'THAT', 'THERE', 'WAS', 'AS', 'GREAT', 'AN', 'IMPIETY', 'IN', 'RANKING', 'THEORIES', 'OF', 'PHYSICS', 'FIRST', 'IN', 'THE', 'ORDER', 'OF', 'KNOWLEDGE', 'AS', 'IN', 'PLACING', 'THE', 'BODY', 'BEFORE', 'THE', 'SOUL'] +2961-960-0011-508: ref=['WITH', 'HERACLEITUS', 'HE', 'ACKNOWLEDGES', 'THE', 'PERPETUAL', 'FLUX', 'LIKE', 'ANAXAGORAS', 'HE', 'ASSERTS', 'THE', 'PREDOMINANCE', 'OF', 'MIND', 'ALTHOUGH', 'ADMITTING', 'AN', 'ELEMENT', 'OF', 'NECESSITY', 'WHICH', 'REASON', 'IS', 'INCAPABLE', 'OF', 'SUBDUING', 'LIKE', 'THE', 'PYTHAGOREANS', 'HE', 'SUPPOSES', 'THE', 'MYSTERY', 'OF', 'THE', 'WORLD', 'TO', 'BE', 'CONTAINED', 'IN', 'NUMBER'] +2961-960-0011-508: hyp=['WITH', 'HERACLITUS', 'HE', 'ACKNOWLEDGES', 'THE', 'PERPETUAL', 'FLUX', 'LIKE', 'AN', 'EXAGGERUS', 'HE', 'ASSERTS', 'THE', 'PREDOMINANCE', 'OF', 'MIND', 'ALTHOUGH', 'ADMITTING', 'AN', 'ELEMENT', 'OF', 'NECESSITY', 'WHICH', 'REASON', 'IS', 'INCAPABLE', 'OF', 'SUBDUING', 'LIKE', 'THE', 'PYTHAGORIANS', 'HE', 'SUPPOSES', 'THE', 'MYSTERY', 'OF', 'THE', 'WORLD', 'TO', 'BE', 'CONTAINED', 'IN', 'NUMBER'] +2961-960-0012-509: ref=['MANY', 'IF', 'NOT', 'ALL', 'THE', 'ELEMENTS', 'OF', 'THE', 'PRE', 'SOCRATIC', 'PHILOSOPHY', 'ARE', 'INCLUDED', 'IN', 'THE', 'TIMAEUS'] +2961-960-0012-509: hyp=['MANY', 'IF', 'NOT', 'ALL', 'THE', 'ELEMENTS', 'OF', 'THE', 'PRIESTHOO', 'CRADIC', 'PHILOSOPHY', 'ARE', 'INCLUDED', 'IN', 'THE', 'TIMIUS'] +2961-960-0013-510: ref=['IT', 'IS', 'PROBABLE', 'THAT', 'THE', 'RELATION', 'OF', 'THE', 'IDEAS', 'TO', 'GOD', 'OR', 'OF', 'GOD', 'TO', 'THE', 'WORLD', 'WAS', 'DIFFERENTLY', 'CONCEIVED', 'BY', 'HIM', 'AT', 'DIFFERENT', 'TIMES', 'OF', 'HIS', 'LIFE'] +2961-960-0013-510: hyp=['IT', 'IS', 'PROBABLE', 'THAT', 'THE', 'RELATION', 'OF', 'THE', 'IDEAS', 'TO', 'GOD', 'OR', 'OF', 'GOD', 'TO', 'THE', 'WORLD', 'WAS', 'DIFFERENTLY', 'CONCEIVED', 'BY', 'HIM', 'AT', 'DIFFERENT', 'TIMES', 'OF', 'HIS', 'LIFE'] +2961-960-0014-511: ref=['THE', 'IDEAS', 'ALSO', 'REMAIN', 'BUT', 'THEY', 'HAVE', 'BECOME', 'TYPES', 'IN', 'NATURE', 'FORMS', 'OF', 'MEN', 'ANIMALS', 'BIRDS', 'FISHES'] +2961-960-0014-511: hyp=['THE', 'IDEAS', 'ALSO', 'REMAIN', 'BUT', 'THEY', 'HAVE', 'BECOME', 'TYPES', 'IN', 'NATURE', 'FORMS', 'OF', 'MEN', 'ANIMALS', 'BIRDS', 'FISHES'] +2961-960-0015-512: ref=['THE', 'STYLE', 'AND', 'PLAN', 'OF', 'THE', 'TIMAEUS', 'DIFFER', 'GREATLY', 'FROM', 'THAT', 'OF', 'ANY', 'OTHER', 'OF', 'THE', 'PLATONIC', 'DIALOGUES'] +2961-960-0015-512: hyp=['THE', 'STYLE', 'AND', 'PLAN', 'OF', 'THE', 'TENEAS', 'DIFFER', 'GREATLY', 'FROM', 'THAT', 'OF', 'ANY', 'OTHER', 'OF', 'THE', 'PLATONIC', 'DIALOGUES'] +2961-960-0016-513: ref=['BUT', 'PLATO', 'HAS', 'NOT', 'THE', 'SAME', 'MASTERY', 'OVER', 'HIS', 'INSTRUMENT', 'WHICH', 'HE', 'EXHIBITS', 'IN', 'THE', 'PHAEDRUS', 'OR', 'SYMPOSIUM'] +2961-960-0016-513: hyp=['BUT', 'PLATO', 'HAS', 'NOT', 'THE', 'SAME', 'MASTERY', 'OVER', 'HIS', 'INSTRUMENT', 'WHICH', 'HE', 'EXHIBITS', 'IN', 'THE', 'FEATURES', 'OR', 'SIMPOS', 'HIM'] +2961-960-0017-514: ref=['NOTHING', 'CAN', 'EXCEED', 'THE', 'BEAUTY', 'OR', 'ART', 'OF', 'THE', 'INTRODUCTION', 'IN', 'WHICH', 'HE', 'IS', 'USING', 'WORDS', 'AFTER', 'HIS', 'ACCUSTOMED', 'MANNER'] +2961-960-0017-514: hyp=['NOTHING', 'CAN', 'EXCEED', 'THE', 'BEAUTY', 'OR', 'ART', 'OF', 'INTRODUCTION', 'IN', 'WHICH', 'HIS', 'USING', 'WORDS', 'AFTER', 'HIS', 'ACCUSTOMED', 'MANNER'] +2961-960-0018-515: ref=['BUT', 'IN', 'THE', 'REST', 'OF', 'THE', 'WORK', 'THE', 'POWER', 'OF', 'LANGUAGE', 'SEEMS', 'TO', 'FAIL', 'HIM', 'AND', 'THE', 'DRAMATIC', 'FORM', 'IS', 'WHOLLY', 'GIVEN', 'UP'] +2961-960-0018-515: hyp=['BUT', 'IN', 'THE', 'REST', 'OF', 'THE', 'WORK', 'THE', 'POWER', 'OF', 'LANGUAGE', 'SEEMS', 'TO', 'FAIL', 'HIM', 'AND', 'THE', 'DRAMATIC', 'FORM', 'IS', 'WHOLLY', 'GIVEN', 'UP'] +2961-960-0019-516: ref=['HE', 'COULD', 'WRITE', 'IN', 'ONE', 'STYLE', 'BUT', 'NOT', 'IN', 'ANOTHER', 'AND', 'THE', 'GREEK', 'LANGUAGE', 'HAD', 'NOT', 'AS', 'YET', 'BEEN', 'FASHIONED', 'BY', 'ANY', 'POET', 'OR', 'PHILOSOPHER', 'TO', 'DESCRIBE', 'PHYSICAL', 'PHENOMENA'] +2961-960-0019-516: hyp=['HE', 'COULD', 'WRITE', 'IN', 'ONE', 'STYLE', 'BUT', 'NOT', 'IN', 'ANOTHER', 'THE', 'GREEK', 'LANGUAGE', 'HAD', 'NOT', 'AS', 'YET', 'BEEN', 'FASHIONED', 'BY', 'ANY', 'POET', 'OR', 'PHILOSOPHER', 'TO', 'DESCRIBE', 'PHYSICAL', 'PHENOMENA'] +2961-960-0020-517: ref=['AND', 'HENCE', 'WE', 'FIND', 'THE', 'SAME', 'SORT', 'OF', 'CLUMSINESS', 'IN', 'THE', 'TIMAEUS', 'OF', 'PLATO', 'WHICH', 'CHARACTERIZES', 'THE', 'PHILOSOPHICAL', 'POEM', 'OF', 'LUCRETIUS'] +2961-960-0020-517: hyp=['AND', 'HENCE', 'WE', 'FIND', 'THE', 'SAME', 'SORT', 'OF', 'CLUMSINESS', 'IN', 'THE', 'TIMAIRS', 'OF', 'PLATO', 'WHICH', 'CHARACTERIZES', 'THE', 'PHILOSOPHICAL', 'POEM', 'OF', 'LUCRETIUS'] +2961-960-0021-518: ref=['THERE', 'IS', 'A', 'WANT', 'OF', 'FLOW', 'AND', 'OFTEN', 'A', 'DEFECT', 'OF', 'RHYTHM', 'THE', 'MEANING', 'IS', 'SOMETIMES', 'OBSCURE', 'AND', 'THERE', 'IS', 'A', 'GREATER', 'USE', 'OF', 'APPOSITION', 'AND', 'MORE', 'OF', 'REPETITION', 'THAN', 'OCCURS', 'IN', "PLATO'S", 'EARLIER', 'WRITINGS'] +2961-960-0021-518: hyp=['THERE', 'IS', 'A', 'WANT', 'OF', 'FLOW', 'AND', 'OFTEN', 'A', 'DEFECT', 'OF', 'RHYTHM', 'THE', 'MEANING', 'IS', 'SOMETIMES', 'OBSCURE', 'AND', 'THERE', 'IS', 'A', 'GREATER', 'USE', 'OF', 'APPOSITION', 'IN', 'MORE', 'OF', 'REPETITION', 'THAN', 'OCCURS', 'IN', "PLATO'S", 'EARLIER', 'WRITINGS'] +2961-960-0022-519: ref=['PLATO', 'HAD', 'NOT', 'THE', 'COMMAND', 'OF', 'HIS', 'MATERIALS', 'WHICH', 'WOULD', 'HAVE', 'ENABLED', 'HIM', 'TO', 'PRODUCE', 'A', 'PERFECT', 'WORK', 'OF', 'ART'] +2961-960-0022-519: hyp=['PLATO', 'HAD', 'NOT', 'THE', 'COMMAND', 'OF', 'HIS', 'MATERIALS', 'WHICH', 'WOULD', 'HAVE', 'ENABLED', 'HIM', 'TO', 'PRODUCE', 'A', 'PERFECT', 'WORK', 'OF', 'ART'] +2961-961-0000-520: ref=['SOCRATES', 'BEGINS', 'THE', 'TIMAEUS', 'WITH', 'A', 'SUMMARY', 'OF', 'THE', 'REPUBLIC'] +2961-961-0000-520: hyp=['SOCRATES', 'BEGINS', 'TO', 'TEARS', 'WITH', 'A', 'SUMMARY', 'OF', 'THE', 'REPUBLIC'] +2961-961-0001-521: ref=['AND', 'NOW', 'HE', 'DESIRES', 'TO', 'SEE', 'THE', 'IDEAL', 'STATE', 'SET', 'IN', 'MOTION', 'HE', 'WOULD', 'LIKE', 'TO', 'KNOW', 'HOW', 'SHE', 'BEHAVED', 'IN', 'SOME', 'GREAT', 'STRUGGLE'] +2961-961-0001-521: hyp=['AND', 'NOW', 'HE', 'DESIRES', 'TO', 'SEE', 'THE', 'IDEAL', 'STATE', 'SET', 'IN', 'MOTION', 'HE', 'WOULD', 'LIKE', 'TO', 'KNOW', 'HOW', 'SHE', 'BEHAVED', 'IN', 'SOME', 'GREAT', 'STRUGGLE'] +2961-961-0002-522: ref=['AND', 'THEREFORE', 'TO', 'YOU', 'I', 'TURN', 'TIMAEUS', 'CITIZEN', 'OF', 'LOCRIS', 'WHO', 'ARE', 'AT', 'ONCE', 'A', 'PHILOSOPHER', 'AND', 'A', 'STATESMAN', 'AND', 'TO', 'YOU', 'CRITIAS', 'WHOM', 'ALL', 'ATHENIANS', 'KNOW', 'TO', 'BE', 'SIMILARLY', 'ACCOMPLISHED', 'AND', 'TO', 'HERMOCRATES', 'WHO', 'IS', 'ALSO', 'FITTED', 'BY', 'NATURE', 'AND', 'EDUCATION', 'TO', 'SHARE', 'IN', 'OUR', 'DISCOURSE'] +2961-961-0002-522: hyp=['AND', 'THEREFORE', 'TO', 'YOU', 'I', 'TURN', 'TO', 'ME', 'AS', 'CITIZEN', 'OF', 'LOCHRIS', 'WHO', 'ARE', 'AT', 'ONCE', 'A', 'PHILOSOPHER', 'IN', 'A', 'STATESMAN', 'AND', 'TO', 'YOU', 'CRITIUS', 'WHOM', 'ALL', 'ATHENIANS', 'KNOW', 'TO', 'BE', 'SIMILARLY', 'ACCOMPLISHED', 'AND', 'TO', 'HERMOCRATES', 'WHOSE', 'ALSO', 'FITTED', 'BY', 'NATURE', 'AND', 'EDUCATION', 'TO', 'SHARE', 'IN', 'OUR', 'DISCOURSE'] +2961-961-0003-523: ref=['I', 'WILL', 'IF', 'TIMAEUS', 'APPROVES', 'I', 'APPROVE'] +2961-961-0003-523: hyp=['I', 'WILL', 'IF', 'TO', 'ME', 'AS', 'A', 'PROOFS', 'I', 'APPROVE'] +2961-961-0004-524: ref=['LISTEN', 'THEN', 'SOCRATES', 'TO', 'A', 'TALE', 'OF', "SOLON'S", 'WHO', 'BEING', 'THE', 'FRIEND', 'OF', 'DROPIDAS', 'MY', 'GREAT', 'GRANDFATHER', 'TOLD', 'IT', 'TO', 'MY', 'GRANDFATHER', 'CRITIAS', 'AND', 'HE', 'TOLD', 'ME'] +2961-961-0004-524: hyp=['LISTEN', 'THEN', 'SOCRATES', 'TO', 'A', 'TALE', 'OF', 'SILENCE', 'WHO', 'BEING', 'THE', 'FRIEND', 'OF', 'TROPIDAS', 'BY', 'GREAT', 'GRANDFATHER', 'TOLD', 'IT', 'TO', 'MY', 'GRANDFATHER', 'CRITIUS', 'AND', 'HE', 'TOLD', 'ME'] +2961-961-0005-525: ref=['SOME', 'POEMS', 'OF', 'SOLON', 'WERE', 'RECITED', 'BY', 'THE', 'BOYS'] +2961-961-0005-525: hyp=['SOME', 'POEMS', 'OF', 'SOLEMN', 'WERE', 'RECITED', 'BY', 'THE', 'BOYS'] +2961-961-0006-526: ref=['AND', 'WHAT', 'WAS', 'THE', 'SUBJECT', 'OF', 'THE', 'POEM', 'SAID', 'THE', 'PERSON', 'WHO', 'MADE', 'THE', 'REMARK'] +2961-961-0006-526: hyp=['AND', 'WHAT', 'WAS', 'THE', 'SUBJECT', 'OF', 'THE', 'POEM', 'SAID', 'THE', 'PERSON', 'WHO', 'MADE', 'THE', 'REMARK'] +2961-961-0007-527: ref=['THE', 'SUBJECT', 'WAS', 'A', 'VERY', 'NOBLE', 'ONE', 'HE', 'DESCRIBED', 'THE', 'MOST', 'FAMOUS', 'ACTION', 'IN', 'WHICH', 'THE', 'ATHENIAN', 'PEOPLE', 'WERE', 'EVER', 'ENGAGED'] +2961-961-0007-527: hyp=['THE', 'SUBJECT', 'WAS', 'A', 'VERY', 'NOBLE', 'ONE', 'HE', 'DESCRIBED', 'THE', 'MOST', 'FAMOUS', 'ACTION', 'IN', 'WHICH', 'THE', 'ATHENIAN', 'PEOPLE', 'WERE', 'EVER', 'ENGAGED'] +2961-961-0008-528: ref=['BUT', 'THE', 'MEMORY', 'OF', 'THEIR', 'EXPLOITS', 'HAS', 'PASSED', 'AWAY', 'OWING', 'TO', 'THE', 'LAPSE', 'OF', 'TIME', 'AND', 'THE', 'EXTINCTION', 'OF', 'THE', 'ACTORS'] +2961-961-0008-528: hyp=['BUT', 'THE', 'MEMORY', 'OF', 'THEIR', 'EXPLOITS', 'HAD', 'PASSED', 'AWAY', 'OWING', 'TO', 'THE', 'LAPSE', 'OF', 'TIME', 'AND', 'THE', 'EXTINCTION', 'OF', 'THE', 'ACTORS'] +2961-961-0009-529: ref=['TELL', 'US', 'SAID', 'THE', 'OTHER', 'THE', 'WHOLE', 'STORY', 'AND', 'WHERE', 'SOLON', 'HEARD', 'THE', 'STORY'] +2961-961-0009-529: hyp=['TELL', 'US', 'SAID', 'THE', 'OTHER', 'THE', 'WHOLE', 'STORY', 'AND', 'WHERE', 'SOLON', 'HEARD', 'THE', 'STORY'] +2961-961-0010-530: ref=['BUT', 'IN', 'EGYPT', 'THE', 'TRADITIONS', 'OF', 'OUR', 'OWN', 'AND', 'OTHER', 'LANDS', 'ARE', 'BY', 'US', 'REGISTERED', 'FOR', 'EVER', 'IN', 'OUR', 'TEMPLES'] +2961-961-0010-530: hyp=['BUT', 'IN', 'EGYPT', 'THE', 'TRADITIONS', 'OF', 'OUR', 'OWN', 'AND', 'OTHER', 'LANDS', 'ARE', 'BY', 'US', 'REGISTERED', 'FOR', 'EVER', 'IN', 'OUR', 'TEMPLES'] +2961-961-0011-531: ref=['THE', 'GENEALOGIES', 'WHICH', 'YOU', 'HAVE', 'RECITED', 'TO', 'US', 'OUT', 'OF', 'YOUR', 'OWN', 'ANNALS', 'SOLON', 'ARE', 'A', 'MERE', "CHILDREN'S", 'STORY'] +2961-961-0011-531: hyp=['THE', 'GENEALOGIES', 'WHICH', 'YOU', 'HAVE', 'RECITED', 'TO', 'US', 'OUT', 'OF', 'YOUR', 'OWN', 'ANNAL', 'SOLEMN', 'ARE', 'A', 'MERE', "CHILDREN'S", 'STORY'] +2961-961-0012-532: ref=['FOR', 'IN', 'THE', 'TIMES', 'BEFORE', 'THE', 'GREAT', 'FLOOD', 'ATHENS', 'WAS', 'THE', 'GREATEST', 'AND', 'BEST', 'OF', 'CITIES', 'AND', 'DID', 'THE', 'NOBLEST', 'DEEDS', 'AND', 'HAD', 'THE', 'BEST', 'CONSTITUTION', 'OF', 'ANY', 'UNDER', 'THE', 'FACE', 'OF', 'HEAVEN'] +2961-961-0012-532: hyp=['FOR', 'IN', 'THE', 'TIMES', 'BEFORE', 'THE', 'GREAT', 'FLOOD', 'ATHENS', 'WAS', 'THE', 'GREATEST', 'AND', 'BEST', 'OF', 'CITIES', 'AND', 'DID', 'THE', 'NOBLEST', 'DEEDS', 'AND', 'HAD', 'THE', 'BEST', 'CONSTITUTION', 'OF', 'ANY', 'UNDER', 'THE', 'FACE', 'OF', 'HEAVEN'] +2961-961-0013-533: ref=['SOLON', 'MARVELLED', 'AND', 'DESIRED', 'TO', 'BE', 'INFORMED', 'OF', 'THE', 'PARTICULARS'] +2961-961-0013-533: hyp=['SOLEMN', 'MARVELLED', 'AND', 'DESIRED', 'TO', 'BE', 'INFORMED', 'OF', 'THE', 'PARTICULARS'] +2961-961-0014-534: ref=['NINE', 'THOUSAND', 'YEARS', 'HAVE', 'ELAPSED', 'SINCE', 'SHE', 'FOUNDED', 'YOURS', 'AND', 'EIGHT', 'THOUSAND', 'SINCE', 'SHE', 'FOUNDED', 'OURS', 'AS', 'OUR', 'ANNALS', 'RECORD'] +2961-961-0014-534: hyp=['NINE', 'THOUSAND', 'YEARS', 'HAVE', 'ELAPSED', 'SINCE', 'SHE', 'FOUND', 'IT', 'YOURS', 'AND', 'EIGHT', 'THOUSAND', 'SINCE', 'YOU', 'FOUND', 'IT', 'OURS', 'AS', 'OUR', 'ANNALS', 'RECORD'] +2961-961-0015-535: ref=['MANY', 'LAWS', 'EXIST', 'AMONG', 'US', 'WHICH', 'ARE', 'THE', 'COUNTERPART', 'OF', 'YOURS', 'AS', 'THEY', 'WERE', 'IN', 'THE', 'OLDEN', 'TIME'] +2961-961-0015-535: hyp=['MANY', 'LAWS', 'EXIST', 'AMONG', 'US', 'WHICH', 'ARE', 'THE', 'COUNTERPART', 'OF', 'YOURS', 'AS', 'THEY', 'WERE', 'IN', 'THE', 'OLDEN', 'TIME'] +2961-961-0016-536: ref=['I', 'WILL', 'BRIEFLY', 'DESCRIBE', 'THEM', 'TO', 'YOU', 'AND', 'YOU', 'SHALL', 'READ', 'THE', 'ACCOUNT', 'OF', 'THEM', 'AT', 'YOUR', 'LEISURE', 'IN', 'THE', 'SACRED', 'REGISTERS'] +2961-961-0016-536: hyp=['I', 'WILL', 'BRIEFLY', 'DESCRIBE', 'HIM', 'TO', 'YOU', 'AND', 'YOU', 'SHALL', 'READ', 'THE', 'ACCOUNT', 'OF', 'THEM', 'AT', 'YOUR', 'LEISURE', 'IN', 'THE', 'SACRED', 'REGISTERS'] +2961-961-0017-537: ref=['OBSERVE', 'AGAIN', 'WHAT', 'CARE', 'THE', 'LAW', 'TOOK', 'IN', 'THE', 'PURSUIT', 'OF', 'WISDOM', 'SEARCHING', 'OUT', 'THE', 'DEEP', 'THINGS', 'OF', 'THE', 'WORLD', 'AND', 'APPLYING', 'THEM', 'TO', 'THE', 'USE', 'OF', 'MAN'] +2961-961-0017-537: hyp=['OBSERVE', 'AGAIN', 'WHAT', 'CARE', 'THE', 'LAW', 'TOOK', 'IN', 'THE', 'PURSUIT', 'OF', 'WISDOM', 'SEARCHING', 'OUT', 'THE', 'DEEP', 'THINGS', 'OF', 'THE', 'WORLD', 'AND', 'APPLYING', 'THEM', 'TO', 'THE', 'USE', 'OF', 'MEN'] +2961-961-0018-538: ref=['THE', 'MOST', 'FAMOUS', 'OF', 'THEM', 'ALL', 'WAS', 'THE', 'OVERTHROW', 'OF', 'THE', 'ISLAND', 'OF', 'ATLANTIS'] +2961-961-0018-538: hyp=['THE', 'MOST', 'FAMOUS', 'OF', 'THEM', 'ALL', 'WAS', 'THE', 'OVERTHROW', 'OF', 'THE', 'ISLAND', 'OF', 'ATLANTIS'] +2961-961-0019-539: ref=['FOR', 'AT', 'THE', 'PERIL', 'OF', 'HER', 'OWN', 'EXISTENCE', 'AND', 'WHEN', 'THE', 'OTHER', 'HELLENES', 'HAD', 'DESERTED', 'HER', 'SHE', 'REPELLED', 'THE', 'INVADER', 'AND', 'OF', 'HER', 'OWN', 'ACCORD', 'GAVE', 'LIBERTY', 'TO', 'ALL', 'THE', 'NATIONS', 'WITHIN', 'THE', 'PILLARS'] +2961-961-0019-539: hyp=['FOR', 'AT', 'THE', 'PERIL', 'OF', 'HER', 'OWN', 'EXISTENCE', 'AND', 'WHEN', 'THE', 'OTTER', 'HELLENES', 'HAD', 'DESERTED', 'HER', 'SHE', 'REPELLED', 'THE', 'INVADER', 'AND', 'OF', 'HER', 'OWN', 'ACCORD', 'GAVE', 'LIBERTY', 'TO', 'ALL', 'THE', 'NATIONS', 'WITHIN', 'THE', 'PILLARS'] +2961-961-0020-540: ref=['THIS', 'IS', 'THE', 'EXPLANATION', 'OF', 'THE', 'SHALLOWS', 'WHICH', 'ARE', 'FOUND', 'IN', 'THAT', 'PART', 'OF', 'THE', 'ATLANTIC', 'OCEAN'] +2961-961-0020-540: hyp=['THIS', 'IS', 'THE', 'EXPLANATION', 'OF', 'THE', 'SHALLOWS', 'WHICH', 'ARE', 'FOUND', 'IN', 'THAT', 'PART', 'OF', 'THE', 'ATLANTIC', 'OCEAN'] +2961-961-0021-541: ref=['BUT', 'I', 'WOULD', 'NOT', 'SPEAK', 'AT', 'THE', 'TIME', 'BECAUSE', 'I', 'WANTED', 'TO', 'REFRESH', 'MY', 'MEMORY'] +2961-961-0021-541: hyp=['BUT', 'I', 'WOULD', 'NOT', 'SPEAK', 'AT', 'THE', 'TIME', 'BECAUSE', 'I', 'WANTED', 'TO', 'REFRESH', 'MY', 'MEMORY'] +2961-961-0022-542: ref=['THEN', 'NOW', 'LET', 'ME', 'EXPLAIN', 'TO', 'YOU', 'THE', 'ORDER', 'OF', 'OUR', 'ENTERTAINMENT', 'FIRST', 'TIMAEUS', 'WHO', 'IS', 'A', 'NATURAL', 'PHILOSOPHER', 'WILL', 'SPEAK', 'OF', 'THE', 'ORIGIN', 'OF', 'THE', 'WORLD', 'GOING', 'DOWN', 'TO', 'THE', 'CREATION', 'OF', 'MAN', 'AND', 'THEN', 'I', 'SHALL', 'RECEIVE', 'THE', 'MEN', 'WHOM', 'HE', 'HAS', 'CREATED', 'AND', 'SOME', 'OF', 'WHOM', 'WILL', 'HAVE', 'BEEN', 'EDUCATED', 'BY', 'YOU', 'AND', 'INTRODUCE', 'THEM', 'TO', 'YOU', 'AS', 'THE', 'LOST', 'ATHENIAN', 'CITIZENS', 'OF', 'WHOM', 'THE', 'EGYPTIAN', 'RECORD', 'SPOKE'] +2961-961-0022-542: hyp=['THEN', 'THOU', 'LET', 'ME', 'EXPLAIN', 'TO', 'YOU', 'THE', 'ORDER', 'OF', 'OUR', 'ENTERTAINMENT', 'FIRST', 'TIMAEUS', 'WHO', 'IS', 'A', 'NATURAL', 'PHILOSOPHER', 'WILL', 'SPEAK', 'OF', 'THE', 'ORIGIN', 'OF', 'THE', 'WORLD', 'GOING', 'DOWN', 'TO', 'THE', 'CREATION', 'OF', 'MEN', 'AND', 'THEN', 'I', 'SHALL', 'RECEIVE', 'THE', 'MEN', 'WHOM', 'HE', 'HAS', 'CREATED', 'AND', 'SOME', 'OF', 'WHOM', 'WILL', 'HAVE', 'BEEN', 'EDUCATED', 'BY', 'YOU', 'AND', 'INTRODUCE', 'THEM', 'TO', 'YOU', 'AS', 'THE', 'LOST', 'ATHENIAN', 'CITIZENS', 'OF', 'WHOM', 'THE', 'EGYPTIAN', 'RECORDS', 'SPOKE'] +3570-5694-0000-2433: ref=['BUT', 'ALREADY', 'AT', 'A', 'POINT', 'IN', 'ECONOMIC', 'EVOLUTION', 'FAR', 'ANTEDATING', 'THE', 'EMERGENCE', 'OF', 'THE', 'LADY', 'SPECIALISED', 'CONSUMPTION', 'OF', 'GOODS', 'AS', 'AN', 'EVIDENCE', 'OF', 'PECUNIARY', 'STRENGTH', 'HAD', 'BEGUN', 'TO', 'WORK', 'OUT', 'IN', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM'] +3570-5694-0000-2433: hyp=['BETTER', 'AT', 'A', 'POINT', 'IN', 'ECONOMIC', 'EVOLUTION', 'FAR', 'ANTETING', 'THE', 'EMERGENCE', 'OF', 'THE', 'LADY', 'SPECIALIZED', 'CONSUMPTION', 'OF', 'GOODS', 'AS', 'AN', 'EVIDENCE', 'OF', 'PECUNIARY', 'STRENGTH', 'HAD', 'BEGUN', 'TO', 'WORK', 'OUT', 'IN', 'A', 'MORE', 'OR', 'LESS', 'CELEBRATE', 'SYSTEM'] +3570-5694-0001-2434: ref=['THE', 'UTILITY', 'OF', 'CONSUMPTION', 'AS', 'AN', 'EVIDENCE', 'OF', 'WEALTH', 'IS', 'TO', 'BE', 'CLASSED', 'AS', 'A', 'DERIVATIVE', 'GROWTH'] +3570-5694-0001-2434: hyp=['THE', 'UTILITY', 'OF', 'CONSUMPTION', 'AS', 'AN', 'EVIDENCE', 'OF', 'WEALTH', 'IS', 'TO', 'BE', 'CLASSED', 'AS', 'A', 'DERIVATIVE', 'GROWTH'] +3570-5694-0002-2435: ref=['SUCH', 'CONSUMPTION', 'AS', 'FALLS', 'TO', 'THE', 'WOMEN', 'IS', 'MERELY', 'INCIDENTAL', 'TO', 'THEIR', 'WORK', 'IT', 'IS', 'A', 'MEANS', 'TO', 'THEIR', 'CONTINUED', 'LABOUR', 'AND', 'NOT', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THEIR', 'OWN', 'COMFORT', 'AND', 'FULNESS', 'OF', 'LIFE'] +3570-5694-0002-2435: hyp=['SUCH', 'CONSUMPTION', 'AS', 'FALLS', 'THROUGH', 'THE', 'WOMEN', 'IS', 'MERELY', 'INCIDENTAL', 'TO', 'THEIR', 'WORK', 'IT', 'IS', 'A', 'MEANS', 'TO', 'THEIR', 'CONTINUED', 'LABOR', 'AND', 'NOT', 'TO', 'CONSUMPTION', 'DIRECTED', 'TO', 'THEIR', 'OWN', 'COMFORT', 'AND', 'FULLNESS', 'OF', 'LIFE'] +3570-5694-0003-2436: ref=['WITH', 'A', 'FURTHER', 'ADVANCE', 'IN', 'CULTURE', 'THIS', 'TABU', 'MAY', 'CHANGE', 'INTO', 'SIMPLE', 'CUSTOM', 'OF', 'A', 'MORE', 'OR', 'LESS', 'RIGOROUS', 'CHARACTER', 'BUT', 'WHATEVER', 'BE', 'THE', 'THEORETICAL', 'BASIS', 'OF', 'THE', 'DISTINCTION', 'WHICH', 'IS', 'MAINTAINED', 'WHETHER', 'IT', 'BE', 'A', 'TABU', 'OR', 'A', 'LARGER', 'CONVENTIONALITY', 'THE', 'FEATURES', 'OF', 'THE', 'CONVENTIONAL', 'SCHEME', 'OF', 'CONSUMPTION', 'DO', 'NOT', 'CHANGE', 'EASILY'] +3570-5694-0003-2436: hyp=['WITH', 'A', 'FURTHER', 'ADVANCE', 'AND', 'CULTURE', 'THIS', 'TABOU', 'MAY', 'CHANGED', 'INTO', 'SIMPLE', 'CUSTOM', 'OF', 'A', 'MORE', 'OR', 'LESS', 'RIGOROUS', 'CHARACTER', 'BUT', 'WHATEVER', 'BE', 'THE', 'THEORETICAL', 'BASIS', 'OF', 'THE', 'DISTINCTION', 'WHICH', 'IS', 'MAINTAINED', 'WHETHER', 'IT', 'BE', 'AT', 'A', 'BOOT', 'OR', 'A', 'LARGER', 'CONVENTIONALITY', 'THE', 'FEATURES', 'OF', 'THE', 'CONVENTIONAL', 'SCHEME', 'OF', 'CONSUMPTION', 'DO', 'NOT', 'CHANGE', 'EASILY'] +3570-5694-0004-2437: ref=['IN', 'THE', 'NATURE', 'OF', 'THINGS', 'LUXURIES', 'AND', 'THE', 'COMFORTS', 'OF', 'LIFE', 'BELONG', 'TO', 'THE', 'LEISURE', 'CLASS'] +3570-5694-0004-2437: hyp=['IN', 'THE', 'NATURE', 'OF', 'THINGS', 'LUXURIES', 'AND', 'THE', 'COMFORTS', 'OF', 'LIFE', 'BELONG', 'TO', 'THE', 'LEISURE', 'CLASS'] +3570-5694-0005-2438: ref=['UNDER', 'THE', 'TABU', 'CERTAIN', 'VICTUALS', 'AND', 'MORE', 'PARTICULARLY', 'CERTAIN', 'BEVERAGES', 'ARE', 'STRICTLY', 'RESERVED', 'FOR', 'THE', 'USE', 'OF', 'THE', 'SUPERIOR', 'CLASS'] +3570-5694-0005-2438: hyp=['UNDER', 'THE', 'TABOO', 'CERTAIN', 'VICTUALS', 'AND', 'MORE', 'PARTICULARLY', 'CERTAIN', 'BEVERAGES', 'ARE', 'STRICTLY', 'RESERVED', 'FOR', 'THE', 'USE', 'OF', 'THE', 'SUPERIOR', 'CLASS'] +3570-5694-0006-2439: ref=['DRUNKENNESS', 'AND', 'THE', 'OTHER', 'PATHOLOGICAL', 'CONSEQUENCES', 'OF', 'THE', 'FREE', 'USE', 'OF', 'STIMULANTS', 'THEREFORE', 'TEND', 'IN', 'THEIR', 'TURN', 'TO', 'BECOME', 'HONORIFIC', 'AS', 'BEING', 'A', 'MARK', 'AT', 'THE', 'SECOND', 'REMOVE', 'OF', 'THE', 'SUPERIOR', 'STATUS', 'OF', 'THOSE', 'WHO', 'ARE', 'ABLE', 'TO', 'AFFORD', 'THE', 'INDULGENCE'] +3570-5694-0006-2439: hyp=['DRUNKENNESS', 'AND', 'THE', 'OTHER', 'PATHOLOGICAL', 'CONSEQUENCES', 'OF', 'THE', 'FREE', 'USE', 'OF', 'STIMULANTS', 'THEREFORE', 'TEND', 'IN', 'THEIR', 'TURN', 'TO', 'BECOME', 'HONORIFIC', 'AS', 'BEING', 'A', 'MARK', 'AT', 'THE', 'SECOND', 'REMOVE', 'OF', 'THE', 'SUPERIOR', 'STATUS', 'OF', 'THOSE', 'WHO', 'ARE', 'ABLE', 'TO', 'AFFORD', 'THE', 'INDULGENCE'] +3570-5694-0007-2440: ref=['IT', 'HAS', 'EVEN', 'HAPPENED', 'THAT', 'THE', 'NAME', 'FOR', 'CERTAIN', 'DISEASED', 'CONDITIONS', 'OF', 'THE', 'BODY', 'ARISING', 'FROM', 'SUCH', 'AN', 'ORIGIN', 'HAS', 'PASSED', 'INTO', 'EVERYDAY', 'SPEECH', 'AS', 'A', 'SYNONYM', 'FOR', 'NOBLE', 'OR', 'GENTLE'] +3570-5694-0007-2440: hyp=['IT', 'HAS', 'EVEN', 'HAPPENED', 'THAT', 'THE', 'NAME', 'FOR', 'CERTAIN', 'DISEASED', 'CONDITIONS', 'OF', 'THE', 'BODY', 'ARISING', 'FROM', 'SUCH', 'AN', 'ORIGIN', 'HAS', 'PASSED', 'INTO', 'EVERYDAY', 'SPEECH', 'AS', 'A', 'SYNONYM', 'FOR', 'NOBLE', 'OR', 'GENTLE'] +3570-5694-0008-2441: ref=['THE', 'CONSUMPTION', 'OF', 'LUXURIES', 'IN', 'THE', 'TRUE', 'SENSE', 'IS', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THE', 'COMFORT', 'OF', 'THE', 'CONSUMER', 'HIMSELF', 'AND', 'IS', 'THEREFORE', 'A', 'MARK', 'OF', 'THE', 'MASTER'] +3570-5694-0008-2441: hyp=['THE', 'CONSUMPTION', 'OF', 'LUXURIES', 'IN', 'THE', 'TRUE', 'SENSE', 'IS', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THE', 'COMFORT', 'OF', 'THE', 'CONSUMER', 'HIMSELF', 'AND', 'IS', 'THEREFORE', 'A', 'MARK', 'OF', 'THE', 'MASTER'] +3570-5694-0009-2442: ref=['WITH', 'MANY', 'QUALIFICATIONS', 'WITH', 'MORE', 'QUALIFICATIONS', 'AS', 'THE', 'PATRIARCHAL', 'TRADITION', 'HAS', 'GRADUALLY', 'WEAKENED', 'THE', 'GENERAL', 'RULE', 'IS', 'FELT', 'TO', 'BE', 'RIGHT', 'AND', 'BINDING', 'THAT', 'WOMEN', 'SHOULD', 'CONSUME', 'ONLY', 'FOR', 'THE', 'BENEFIT', 'OF', 'THEIR', 'MASTERS'] +3570-5694-0009-2442: hyp=['WITH', 'MANY', 'QUALIFICATIONS', 'WITH', 'MORE', 'QUALIFICATIONS', 'AS', 'THE', 'PATRIARCHAL', 'TRADITION', 'HAS', 'GRADUALLY', 'WEAKENED', 'THE', 'GENERAL', 'RULE', 'IS', 'FELT', 'TO', 'BE', 'RIGHT', 'AND', 'BINDING', 'THAT', 'WOMEN', 'SHOULD', 'CONSUME', 'ONLY', 'FOR', 'THE', 'BENEFIT', 'OF', 'THEIR', 'MASTERS'] +3570-5694-0010-2443: ref=['THE', 'OBJECTION', 'OF', 'COURSE', 'PRESENTS', 'ITSELF', 'THAT', 'EXPENDITURE', 'ON', "WOMEN'S", 'DRESS', 'AND', 'HOUSEHOLD', 'PARAPHERNALIA', 'IS', 'AN', 'OBVIOUS', 'EXCEPTION', 'TO', 'THIS', 'RULE', 'BUT', 'IT', 'WILL', 'APPEAR', 'IN', 'THE', 'SEQUEL', 'THAT', 'THIS', 'EXCEPTION', 'IS', 'MUCH', 'MORE', 'OBVIOUS', 'THAN', 'SUBSTANTIAL'] +3570-5694-0010-2443: hyp=['THE', 'OBJECTION', 'OF', 'COURSE', 'PRESENTS', 'ITSELF', 'THAT', 'EXPENDITURE', 'ON', "WOMEN'S", 'DRESS', 'AND', 'HOUSEHOLD', 'PARAPHERNALIA', 'IS', 'AN', 'OBVIOUS', 'EXCEPTION', 'TO', 'THIS', 'RULE', 'BUT', 'IT', 'WILL', 'APPEAR', 'IN', 'THE', 'SEQUEL', 'THAT', 'THIS', 'EXCEPTION', 'IS', 'MUCH', 'MORE', 'OBVIOUS', 'THAN', 'SUBSTANTIAL'] +3570-5694-0011-2444: ref=['THE', 'CUSTOM', 'OF', 'FESTIVE', 'GATHERINGS', 'PROBABLY', 'ORIGINATED', 'IN', 'MOTIVES', 'OF', 'CONVIVIALITY', 'AND', 'RELIGION', 'THESE', 'MOTIVES', 'ARE', 'ALSO', 'PRESENT', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'BUT', 'THEY', 'DO', 'NOT', 'CONTINUE', 'TO', 'BE', 'THE', 'SOLE', 'MOTIVES'] +3570-5694-0011-2444: hyp=['THE', 'CUSTOM', 'OF', 'FESTIVE', 'GATHERINGS', 'PROBABLY', 'ORIGINATED', 'IN', 'MOTIVES', 'OF', 'CONVIVIALITY', 'AND', 'RELIGION', 'THESE', 'MOTIVES', 'ARE', 'ALSO', 'PRESENT', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'THAT', 'THEY', 'DO', 'NOT', 'CONTINUE', 'TO', 'BE', 'THE', 'SOLE', 'MOTIVES'] +3570-5694-0012-2445: ref=['THERE', 'IS', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM', 'OF', 'RANK', 'AND', 'GRADES'] +3570-5694-0012-2445: hyp=['THERE', 'IS', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM', 'OF', 'RANK', 'AND', 'GRATES'] +3570-5694-0013-2446: ref=['THIS', 'DIFFERENTIATION', 'IS', 'FURTHERED', 'BY', 'THE', 'INHERITANCE', 'OF', 'WEALTH', 'AND', 'THE', 'CONSEQUENT', 'INHERITANCE', 'OF', 'GENTILITY'] +3570-5694-0013-2446: hyp=['THIS', 'DIFFUREATION', 'IS', 'FURTHERED', 'BY', 'THE', 'INHERITANCE', 'OF', 'WEALTH', 'AND', 'THE', 'CONSEQUENT', 'INHERITANCE', 'OF', 'GENTILITY'] +3570-5694-0014-2447: ref=['MANY', 'OF', 'THESE', 'AFFILIATED', 'GENTLEMEN', 'OF', 'LEISURE', 'ARE', 'AT', 'THE', 'SAME', 'TIME', 'LESSER', 'MEN', 'OF', 'SUBSTANCE', 'IN', 'THEIR', 'OWN', 'RIGHT', 'SO', 'THAT', 'SOME', 'OF', 'THEM', 'ARE', 'SCARCELY', 'AT', 'ALL', 'OTHERS', 'ONLY', 'PARTIALLY', 'TO', 'BE', 'RATED', 'AS', 'VICARIOUS', 'CONSUMERS'] +3570-5694-0014-2447: hyp=['MANY', 'OF', 'THESE', 'ARE', 'FILLIOTTED', 'GENTLEMEN', 'OF', 'LEISURE', 'ARE', 'AT', 'THE', 'SAME', 'TIME', 'LESS', 'AMEN', 'OF', 'SUBSTANCE', 'IN', 'THEIR', 'OWN', 'RIGHT', 'SO', 'THAT', 'SOME', 'OF', 'THEM', 'ARE', 'SCARCELY', 'AT', 'ALL', 'OTHERS', 'ONLY', 'PARTIALLY', 'TO', 'BE', 'RATED', 'AS', 'VICARIOUS', 'CONSUMERS'] +3570-5694-0015-2448: ref=['SO', 'MANY', 'OF', 'THEM', 'HOWEVER', 'AS', 'MAKE', 'UP', 'THE', 'RETAINER', 'AND', 'HANGERS', 'ON', 'OF', 'THE', 'PATRON', 'MAY', 'BE', 'CLASSED', 'AS', 'VICARIOUS', 'CONSUMER', 'WITHOUT', 'QUALIFICATION'] +3570-5694-0015-2448: hyp=['SO', 'MANY', 'OF', 'THEM', 'HOWEVER', 'AS', 'MAKE', 'UP', 'THE', 'RETAINER', 'AND', 'HANGERS', 'ON', 'OF', 'THE', 'PATRON', 'MAY', 'BE', 'CLASSED', 'AS', 'VICARIOUS', 'CONSUMER', 'WITHOUT', 'QUALIFICATION'] +3570-5694-0016-2449: ref=['MANY', 'OF', 'THESE', 'AGAIN', 'AND', 'ALSO', 'MANY', 'OF', 'THE', 'OTHER', 'ARISTOCRACY', 'OF', 'LESS', 'DEGREE', 'HAVE', 'IN', 'TURN', 'ATTACHED', 'TO', 'THEIR', 'PERSONS', 'A', 'MORE', 'OR', 'LESS', 'COMPREHENSIVE', 'GROUP', 'OF', 'VICARIOUS', 'CONSUMER', 'IN', 'THE', 'PERSONS', 'OF', 'THEIR', 'WIVES', 'AND', 'CHILDREN', 'THEIR', 'SERVANTS', 'RETAINERS', 'ET', 'CETERA'] +3570-5694-0016-2449: hyp=['MANY', 'OF', 'THESE', 'AGAIN', 'AND', 'ALSO', 'MANY', 'OF', 'THE', 'OTHER', 'ARISTOCRACY', 'OF', 'LESS', 'DEGREE', 'HAVE', 'IN', 'TURN', 'ATTACHED', 'TO', 'THEIR', 'PERSONS', 'A', 'MORE', 'OR', 'LESS', 'COMPREHENSIVE', 'GROUP', 'OF', 'VICARIOUS', 'CONSUMER', 'IN', 'THE', 'PERSONS', 'OF', 'THEIR', 'WIVES', 'AND', 'CHILDREN', 'THEIR', 'SERVANTS', 'RETAINERS', 'ET', 'CETERA'] +3570-5694-0017-2450: ref=['THE', 'WEARING', 'OF', 'UNIFORMS', 'OR', 'LIVERIES', 'IMPLIES', 'A', 'CONSIDERABLE', 'DEGREE', 'OF', 'DEPENDENCE', 'AND', 'MAY', 'EVEN', 'BE', 'SAID', 'TO', 'BE', 'A', 'MARK', 'OF', 'SERVITUDE', 'REAL', 'OR', 'OSTENSIBLE'] +3570-5694-0017-2450: hyp=['THE', 'WEARING', 'OF', 'UNIFORMS', 'ARE', 'LIVERIES', 'IMPLIES', 'A', 'CONSIDERABLE', 'DEGREE', 'OF', 'DEPENDENCE', 'AND', 'MAY', 'EVEN', 'BE', 'SAID', 'TO', 'BE', 'A', 'MARK', 'OF', 'SERVITUDE', 'REAL', 'OR', 'OSTENSIBLE'] +3570-5694-0018-2451: ref=['THE', 'WEARERS', 'OF', 'UNIFORMS', 'AND', 'LIVERIES', 'MAY', 'BE', 'ROUGHLY', 'DIVIDED', 'INTO', 'TWO', 'CLASSES', 'THE', 'FREE', 'AND', 'THE', 'SERVILE', 'OR', 'THE', 'NOBLE', 'AND', 'THE', 'IGNOBLE'] +3570-5694-0018-2451: hyp=['THE', 'WEARERS', 'OF', 'UNIFORMS', 'AND', 'LIVERIES', 'MAY', 'BE', 'ROUGHLY', 'DIVIDED', 'INTO', 'TWO', 'CLASSES', 'THE', 'FREE', 'AND', 'THE', 'SERVILE', 'OR', 'THE', 'NOBLE', 'AND', 'THE', 'IGNOBLE'] +3570-5694-0019-2452: ref=['BUT', 'THE', 'GENERAL', 'DISTINCTION', 'IS', 'NOT', 'ON', 'THAT', 'ACCOUNT', 'TO', 'BE', 'OVERLOOKED'] +3570-5694-0019-2452: hyp=['BUT', 'THE', 'GENERAL', 'DISTINCTION', 'IS', 'NOT', 'ON', 'THAT', 'ACCOUNT', 'TO', 'BE', 'OVERLOOKED'] +3570-5694-0020-2453: ref=['SO', 'THOSE', 'OFFICES', 'WHICH', 'ARE', 'BY', 'RIGHT', 'THE', 'PROPER', 'EMPLOYMENT', 'OF', 'THE', 'LEISURE', 'CLASS', 'ARE', 'NOBLE', 'SUCH', 'AS', 'GOVERNMENT', 'FIGHTING', 'HUNTING', 'THE', 'CARE', 'OF', 'ARMS', 'AND', 'ACCOUTREMENTS', 'AND', 'THE', 'LIKE', 'IN', 'SHORT', 'THOSE', 'WHICH', 'MAY', 'BE', 'CLASSED', 'AS', 'OSTENSIBLY', 'PREDATORY', 'EMPLOYMENTS'] +3570-5694-0020-2453: hyp=['SO', 'THOSE', 'OFFICERS', 'WHICH', 'ARE', 'BY', 'RIGHT', 'THE', 'PROPER', 'EMPLOYMENT', 'OF', 'THE', 'LEISURE', 'CLASS', 'ARE', 'NOBLE', 'SUCH', 'AS', 'GOVERNMENT', 'FIGHTING', 'HUNTING', 'THE', 'CARE', 'OF', 'ARMS', 'AND', 'ACCUTMENTS', 'AND', 'THE', 'LIKE', 'IN', 'SHORT', 'THOSE', 'WHICH', 'MAY', 'BE', 'CLASSED', 'AS', 'OSTENSIBLY', 'PREDATORY', 'EMPLOYMENTS'] +3570-5694-0021-2454: ref=['WHENEVER', 'AS', 'IN', 'THESE', 'CASES', 'THE', 'MENIAL', 'SERVICE', 'IN', 'QUESTION', 'HAS', 'TO', 'DO', 'DIRECTLY', 'WITH', 'THE', 'PRIMARY', 'LEISURE', 'EMPLOYMENTS', 'OF', 'FIGHTING', 'AND', 'HUNTING', 'IT', 'EASILY', 'ACQUIRES', 'A', 'REFLECTED', 'HONORIFIC', 'CHARACTER'] +3570-5694-0021-2454: hyp=['WHENEVER', 'AS', 'IN', 'THESE', 'CASES', 'THE', 'MENIAL', 'SERVICE', 'IN', 'QUESTION', 'HAS', 'TO', 'DO', 'DIRECTLY', 'WITH', 'A', 'PRIMARY', 'LEISURE', 'EMPLOYMENTS', 'OF', 'FIGHTING', 'AND', 'HUNTING', 'IT', 'EASILY', 'ACQUIRES', 'A', 'REFLECTED', 'HONORIFIC', 'CHARACTER'] +3570-5694-0022-2455: ref=['THE', 'LIVERY', 'BECOMES', 'OBNOXIOUS', 'TO', 'NEARLY', 'ALL', 'WHO', 'ARE', 'REQUIRED', 'TO', 'WEAR', 'IT'] +3570-5694-0022-2455: hyp=['THE', 'LIVERY', 'BECOMES', 'OBNOXIOUS', 'TO', 'NEARLY', 'ALL', 'WHO', 'ARE', 'REQUIRED', 'TO', 'WEAR', 'IT'] +3570-5695-0000-2456: ref=['IN', 'A', 'GENERAL', 'WAY', 'THOUGH', 'NOT', 'WHOLLY', 'NOR', 'CONSISTENTLY', 'THESE', 'TWO', 'GROUPS', 'COINCIDE'] +3570-5695-0000-2456: hyp=['IN', 'A', 'GENERAL', 'WAY', 'THOUGH', 'NOT', 'WHOLLY', 'NOR', 'CONSISTENTLY', 'THESE', 'TWO', 'GROUPS', 'COINCIDE'] +3570-5695-0001-2457: ref=['THE', 'DEPENDENT', 'WHO', 'WAS', 'FIRST', 'DELEGATED', 'FOR', 'THESE', 'DUTIES', 'WAS', 'THE', 'WIFE', 'OR', 'THE', 'CHIEF', 'WIFE', 'AND', 'AS', 'WOULD', 'BE', 'EXPECTED', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'OF', 'THE', 'INSTITUTION', 'WHEN', 'THE', 'NUMBER', 'OF', 'PERSONS', 'BY', 'WHOM', 'THESE', 'DUTIES', 'ARE', 'CUSTOMARILY', 'PERFORMED', 'GRADUALLY', 'NARROWS', 'THE', 'WIFE', 'REMAINS', 'THE', 'LAST'] +3570-5695-0001-2457: hyp=['THE', 'DEPENDENT', 'WHO', 'WAS', 'FIRST', 'DELEGATED', 'FOR', 'THESE', 'DUTIES', 'WAS', 'THE', 'WIFE', 'OR', 'THE', 'CHIEF', 'WIFE', 'AND', 'AS', 'WOULD', 'BE', 'EXPECTED', 'IN', 'A', 'LATER', 'DEVELOPMENT', 'OF', 'THE', 'INSTITUTION', 'WHEN', 'THE', 'NUMBER', 'OF', 'PERSONS', 'BY', 'WHOM', 'THESE', 'DUTIES', 'ARE', 'CUSTOMARILY', 'PERFORMED', 'GRADUALLY', 'NARROWS', 'THE', 'WIFE', 'REMAINS', 'THE', 'LAST'] +3570-5695-0002-2458: ref=['BUT', 'AS', 'WE', 'DESCEND', 'THE', 'SOCIAL', 'SCALE', 'THE', 'POINT', 'IS', 'PRESENTLY', 'REACHED', 'WHERE', 'THE', 'DUTIES', 'OF', 'VICARIOUS', 'LEISURE', 'AND', 'CONSUMPTION', 'DEVOLVE', 'UPON', 'THE', 'WIFE', 'ALONE'] +3570-5695-0002-2458: hyp=['BUT', 'AS', 'WE', 'DESCEND', 'THE', 'SOCIAL', 'SCALE', 'THE', 'POINT', 'IS', 'PRESENTLY', 'REACHED', 'WHERE', 'THE', 'DUTIES', 'OF', 'VICHAIRLESS', 'LEISURE', 'AND', 'CONSUMPTION', 'DEVOLVE', 'UPON', 'THE', 'WIFE', 'ALONE'] +3570-5695-0003-2459: ref=['IN', 'THE', 'COMMUNITIES', 'OF', 'THE', 'WESTERN', 'CULTURE', 'THIS', 'POINT', 'IS', 'AT', 'PRESENT', 'FOUND', 'AMONG', 'THE', 'LOWER', 'MIDDLE', 'CLASS'] +3570-5695-0003-2459: hyp=['IN', 'THE', 'COMMUNITIES', 'OF', 'THE', 'WESTERN', 'CULTURE', 'THIS', 'POINT', 'IS', 'AT', 'PRESENT', 'FOUND', 'AMONG', 'THE', 'LOWER', 'MIDDLE', 'CLASS'] +3570-5695-0004-2460: ref=['IF', 'BEAUTY', 'OR', 'COMFORT', 'IS', 'ACHIEVED', 'AND', 'IT', 'IS', 'A', 'MORE', 'OR', 'LESS', 'FORTUITOUS', 'CIRCUMSTANCE', 'IF', 'THEY', 'ARE', 'THEY', 'MUST', 'BE', 'ACHIEVED', 'BY', 'MEANS', 'AND', 'METHODS', 'THAT', 'COMMEND', 'THEMSELVES', 'TO', 'THE', 'GREAT', 'ECONOMIC', 'LAW', 'OF', 'WASTED', 'EFFORT'] +3570-5695-0004-2460: hyp=['IF', 'BEAUTY', 'COMFORT', 'IS', 'ACHIEVED', 'AND', 'IT', 'IS', 'A', 'MORE', 'OR', 'LESS', 'FORTUITOUS', 'CIRCUMSTANCE', 'IF', 'THEY', 'ARE', 'THEY', 'MUST', 'BE', 'ACHIEVED', 'BY', 'MEANS', 'AND', 'METHODS', 'THAT', 'COMMEND', 'THEMSELVES', 'TO', 'THE', 'GREAT', 'ECONOMIC', 'LAW', 'OF', 'WASTED', 'EFFORT'] +3570-5695-0005-2461: ref=['THE', 'MAN', 'OF', 'THE', 'HOUSEHOLD', 'ALSO', 'CAN', 'DO', 'SOMETHING', 'IN', 'THIS', 'DIRECTION', 'AND', 'INDEED', 'HE', 'COMMONLY', 'DOES', 'BUT', 'WITH', 'A', 'STILL', 'LOWER', 'DESCENT', 'INTO', 'THE', 'LEVELS', 'OF', 'INDIGENCE', 'ALONG', 'THE', 'MARGIN', 'OF', 'THE', 'SLUMS', 'THE', 'MAN', 'AND', 'PRESENTLY', 'ALSO', 'THE', 'CHILDREN', 'VIRTUALLY', 'CEASE', 'TO', 'CONSUME', 'VALUABLE', 'GOODS', 'FOR', 'APPEARANCES', 'AND', 'THE', 'WOMAN', 'REMAINS', 'VIRTUALLY', 'THE', 'SOLE', 'EXPONENT', 'OF', 'THE', "HOUSEHOLD'S", 'PECUNIARY', 'DECENCY'] +3570-5695-0005-2461: hyp=['THE', 'MAN', 'OF', 'THE', 'HOUSEHOLD', 'ALSO', 'CAN', 'DO', 'SOMETHING', 'IN', 'THIS', 'DIRECTION', 'AND', 'INDEED', 'HE', 'COMMONLY', 'DOES', 'BUT', 'WITH', 'A', 'STILL', 'LOWER', 'DESCENT', 'INTO', 'THE', 'LEVELS', 'OF', 'INDIGENCE', 'ALONG', 'THE', 'MARGIN', 'OF', 'THE', 'SLUMS', 'THE', 'MAN', 'AND', 'PRESENTLY', 'ALSO', 'THE', 'CHILDREN', 'VIRTUALLY', 'CEASE', 'TO', 'CONSUME', 'VALUABLE', 'GOODS', 'FOR', 'APPEARANCES', 'AND', 'THE', 'WOMAN', 'REMAINS', 'VIRTUALLY', 'THE', 'SOLE', 'EXPONENT', 'OF', 'THE', "HOUSEHOLD'S", 'PECUNIARY', 'DECENCY'] +3570-5695-0006-2462: ref=['VERY', 'MUCH', 'OF', 'SQUALOR', 'AND', 'DISCOMFORT', 'WILL', 'BE', 'ENDURED', 'BEFORE', 'THE', 'LAST', 'TRINKET', 'OR', 'THE', 'LAST', 'PRETENSE', 'OF', 'PECUNIARY', 'DECENCY', 'IS', 'PUT', 'AWAY'] +3570-5695-0006-2462: hyp=['VERY', 'MUCH', 'OF', 'SQUALOR', 'AND', 'DISCOMFORT', 'WILL', 'BE', 'ENDURED', 'BEFORE', 'THE', 'LAST', 'TRINKET', 'OR', 'THE', 'LAST', 'PRETENCE', 'OF', 'PECUNIARY', 'DECENCIES', 'PUT', 'AWAY'] +3570-5695-0007-2463: ref=['THERE', 'IS', 'NO', 'CLASS', 'AND', 'NO', 'COUNTRY', 'THAT', 'HAS', 'YIELDED', 'SO', 'ABJECTLY', 'BEFORE', 'THE', 'PRESSURE', 'OF', 'PHYSICAL', 'WANT', 'AS', 'TO', 'DENY', 'THEMSELVES', 'ALL', 'GRATIFICATION', 'OF', 'THIS', 'HIGHER', 'OR', 'SPIRITUAL', 'NEED'] +3570-5695-0007-2463: hyp=['THERE', 'IS', 'NO', 'CLASS', 'IN', 'NO', 'COUNTRY', 'THAT', 'HAS', 'YIELDED', 'SO', 'ABJECTLY', 'BEFORE', 'THE', 'PRESSURE', 'OF', 'PHYSICAL', 'WANT', 'AS', 'TO', 'DENY', 'THEMSELVES', 'ALL', 'GRATIFICATION', 'OF', 'THIS', 'HIGHER', 'OR', 'SPIRITUAL', 'NEED'] +3570-5695-0008-2464: ref=['THE', 'QUESTION', 'IS', 'WHICH', 'OF', 'THE', 'TWO', 'METHODS', 'WILL', 'MOST', 'EFFECTIVELY', 'REACH', 'THE', 'PERSONS', 'WHOSE', 'CONVICTIONS', 'IT', 'IS', 'DESIRED', 'TO', 'AFFECT'] +3570-5695-0008-2464: hyp=['THE', 'QUESTION', 'IS', 'WHICH', 'OF', 'THE', 'TWO', 'METHODS', 'WILL', 'MOST', 'EFFECTIVELY', 'REACH', 'THE', 'PERSONS', 'WHOSE', 'CONVICTIONS', 'IT', 'IS', 'DESIRED', 'TO', 'EFFECT'] +3570-5695-0009-2465: ref=['EACH', 'WILL', 'THEREFORE', 'SERVE', 'ABOUT', 'EQUALLY', 'WELL', 'DURING', 'THE', 'EARLIER', 'STAGES', 'OF', 'SOCIAL', 'GROWTH'] +3570-5695-0009-2465: hyp=['EACH', 'WILL', 'THEREFORE', 'SERVE', 'ABOUT', 'EQUALLY', 'WELL', 'DURING', 'THE', 'EARLIER', 'STAGES', 'OF', 'SOCIAL', 'GROWTH'] +3570-5695-0010-2466: ref=['THE', 'MODERN', 'ORGANIZATION', 'OF', 'INDUSTRY', 'WORKS', 'IN', 'THE', 'SAME', 'DIRECTION', 'ALSO', 'BY', 'ANOTHER', 'LINE'] +3570-5695-0010-2466: hyp=['THE', 'MODERN', 'ORGANIZATION', 'OF', 'INDUSTRY', 'WORKS', 'IN', 'THE', 'SAME', 'DIRECTION', 'ALSO', 'BY', 'ANOTHER', 'LINE'] +3570-5695-0011-2467: ref=['IT', 'IS', 'EVIDENT', 'THEREFORE', 'THAT', 'THE', 'PRESENT', 'TREND', 'OF', 'THE', 'DEVELOPMENT', 'IS', 'IN', 'THE', 'DIRECTION', 'OF', 'HEIGHTENING', 'THE', 'UTILITY', 'OF', 'CONSPICUOUS', 'CONSUMPTION', 'AS', 'COMPARED', 'WITH', 'LEISURE'] +3570-5695-0011-2467: hyp=['IT', 'IS', 'EVIDENT', 'THEREFORE', 'THAT', 'THE', 'PRESENT', 'TREND', 'OF', 'THE', 'DEVELOPMENT', 'IS', 'IN', 'THE', 'DIRECTION', 'OF', 'HEIGHTENING', 'THE', 'UTILITY', 'OF', 'CONSPICUOUS', 'CONSUMPTION', 'AS', 'COMPARED', 'WITH', 'LEISURE'] +3570-5695-0012-2468: ref=['IT', 'IS', 'ALSO', 'NOTICEABLE', 'THAT', 'THE', 'SERVICEABILITY', 'OF', 'CONSUMPTION', 'AS', 'A', 'MEANS', 'OF', 'REPUTE', 'AS', 'WELL', 'AS', 'THE', 'INSISTENCE', 'ON', 'IT', 'AS', 'AN', 'ELEMENT', 'OF', 'DECENCY', 'IS', 'AT', 'ITS', 'BEST', 'IN', 'THOSE', 'PORTIONS', 'OF', 'THE', 'COMMUNITY', 'WHERE', 'THE', 'HUMAN', 'CONTACT', 'OF', 'THE', 'INDIVIDUAL', 'IS', 'WIDEST', 'AND', 'THE', 'MOBILITY', 'OF', 'THE', 'POPULATION', 'IS', 'GREATEST'] +3570-5695-0012-2468: hyp=['IT', 'IS', 'ALSO', 'NOTICEABLE', 'THAT', 'THE', 'SERVICEABILITY', 'OF', 'CONSUMPTION', 'AS', 'A', 'MEANS', 'OF', 'REPUTE', 'AS', 'WELL', 'AS', 'THE', 'INSISTENCE', 'ON', 'IT', 'AS', 'AN', 'ELEMENT', 'OF', 'DECENCY', 'IS', 'AT', 'ITS', 'BEST', 'IN', 'THOSE', 'PORTIONS', 'OF', 'THE', 'COMMUNITY', 'WHERE', 'THE', 'HUMAN', 'CONDUCT', 'OF', 'THE', 'INDIVIDUAL', 'IS', 'WIDEST', 'AND', 'THE', 'MOBILITY', 'OF', 'THE', 'POPULATION', 'IS', 'GREATEST'] +3570-5695-0013-2469: ref=['CONSUMPTION', 'BECOMES', 'A', 'LARGER', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'LIVING', 'IN', 'THE', 'CITY', 'THAN', 'IN', 'THE', 'COUNTRY'] +3570-5695-0013-2469: hyp=['CONSUMPTION', 'BECOMES', 'A', 'LARGER', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'LIVING', 'IN', 'THE', 'CITY', 'THAN', 'IN', 'THE', 'COUNTRY'] +3570-5695-0014-2470: ref=['AMONG', 'THE', 'COUNTRY', 'POPULATION', 'ITS', 'PLACE', 'IS', 'TO', 'SOME', 'EXTENT', 'TAKEN', 'BY', 'SAVINGS', 'AND', 'HOME', 'COMFORTS', 'KNOWN', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'NEIGHBORHOOD', 'GOSSIP', 'SUFFICIENTLY', 'TO', 'SERVE', 'THE', 'LIKE', 'GENERAL', 'PURPOSE', 'OF', 'PECUNIARY', 'REPUTE'] +3570-5695-0014-2470: hyp=['AMONG', 'THE', 'COUNTRY', 'POPULATION', 'ITS', 'PLACES', 'TO', 'SOME', 'EXTENT', 'TAKEN', 'BY', 'SAVINGS', 'AND', 'HOME', 'COMFORTS', 'KNOWN', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'NEIGHBOURHOOD', 'GOSSIP', 'SUFFICIENTLY', 'TO', 'SERVE', 'THE', 'LIKE', 'GENERAL', 'PURPOSE', 'OF', 'PECUNIARY', 'REPUTE'] +3570-5695-0015-2471: ref=['THE', 'RESULT', 'IS', 'A', 'GREAT', 'MOBILITY', 'OF', 'THE', 'LABOR', 'EMPLOYED', 'IN', 'PRINTING', 'PERHAPS', 'GREATER', 'THAN', 'IN', 'ANY', 'OTHER', 'EQUALLY', 'WELL', 'DEFINED', 'AND', 'CONSIDERABLE', 'BODY', 'OF', 'WORKMEN'] +3570-5695-0015-2471: hyp=['THE', 'RESULT', 'IS', 'A', 'GREAT', 'MOBILITY', 'OF', 'THE', 'LABOR', 'EMPLOYED', 'IN', 'PRINTING', 'PERHAPS', 'GREATER', 'THAN', 'IN', 'ANY', 'OTHER', 'EQUALLY', 'WELL', 'DEFINED', 'AND', 'CONSIDERABLE', 'BODY', 'OF', 'WORKMEN'] +3570-5696-0000-2472: ref=['UNDER', 'THE', 'SIMPLE', 'TEST', 'OF', 'EFFECTIVENESS', 'FOR', 'ADVERTISING', 'WE', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'LEISURE', 'AND', 'THE', 'CONSPICUOUS', 'CONSUMPTION', 'OF', 'GOODS', 'DIVIDING', 'THE', 'FIELD', 'OF', 'PECUNIARY', 'EMULATION', 'PRETTY', 'EVENLY', 'BETWEEN', 'THEM', 'AT', 'THE', 'OUTSET'] +3570-5696-0000-2472: hyp=['UNDER', 'THE', 'SIMPLE', 'TEST', 'OF', 'EFFECTIVENESS', 'FOR', 'ADVERTISING', 'WE', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'LEISURE', 'AND', 'THE', 'CONSPICUOUS', 'CONSUMPTION', 'OF', 'GOODS', 'DIVIDING', 'THE', 'FIELD', 'OF', 'PECUNIARY', 'EMULATION', 'PRETTY', 'EVENLY', 'BETWEEN', 'THEM', 'AT', 'THE', 'OUTSET'] +3570-5696-0001-2473: ref=['BUT', 'THE', 'ACTUAL', 'COURSE', 'OF', 'DEVELOPMENT', 'HAS', 'BEEN', 'SOMEWHAT', 'DIFFERENT', 'FROM', 'THIS', 'IDEAL', 'SCHEME', 'LEISURE', 'HELD', 'THE', 'FIRST', 'PLACE', 'AT', 'THE', 'START', 'AND', 'CAME', 'TO', 'HOLD', 'A', 'RANK', 'VERY', 'MUCH', 'ABOVE', 'WASTEFUL', 'CONSUMPTION', 'OF', 'GOODS', 'BOTH', 'AS', 'A', 'DIRECT', 'EXPONENT', 'OF', 'WEALTH', 'AND', 'AS', 'AN', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'DECENCY', 'DURING', 'THE', 'QUASI', 'PEACEABLE', 'CULTURE'] +3570-5696-0001-2473: hyp=['BUT', 'THE', 'ACTUAL', 'COURSE', 'OF', 'DEVELOPMENT', 'HAS', 'BEEN', 'SOMEWHAT', 'DIFFERENT', 'FROM', 'THIS', 'IDEAL', 'SCHEME', 'LEISURE', 'HELD', 'THE', 'FIRST', 'PLACE', 'AT', 'THE', 'START', 'AND', 'CAME', 'TO', 'ALL', 'THE', 'RANK', 'VERIMENT', 'ABOVE', 'WASTEFUL', 'CONSUMPTION', 'OF', 'GOODS', 'BOTH', 'AS', 'A', 'DIRECT', 'EXPONENT', 'OF', 'WEALTH', 'AND', 'AS', 'AN', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'DECENCY', 'DURING', 'THE', 'COURSE', 'I', 'PEACEABLE', 'CULTURE'] +3570-5696-0002-2474: ref=['OTHER', 'CIRCUMSTANCES', 'PERMITTING', 'THAT', 'INSTINCT', 'DISPOSES', 'MEN', 'TO', 'LOOK', 'WITH', 'FAVOR', 'UPON', 'PRODUCTIVE', 'EFFICIENCY', 'AND', 'ON', 'WHATEVER', 'IS', 'OF', 'HUMAN', 'USE'] +3570-5696-0002-2474: hyp=['OTHER', 'CIRCUMSTANCES', 'PERMITTING', 'THAT', 'INSTINCT', 'DISPOSES', 'MEN', 'TO', 'LOOK', 'WITH', 'FAVOR', 'UPON', 'PRODUCTIVE', 'EFFICIENCY', 'AND', 'ON', 'WHATEVER', 'IS', 'OF', 'HUMAN', 'USE'] +3570-5696-0003-2475: ref=['A', 'RECONCILIATION', 'BETWEEN', 'THE', 'TWO', 'CONFLICTING', 'REQUIREMENTS', 'IS', 'EFFECTED', 'BY', 'A', 'RESORT', 'TO', 'MAKE', 'BELIEVE', 'MANY', 'AND', 'INTRICATE', 'POLITE', 'OBSERVANCES', 'AND', 'SOCIAL', 'DUTIES', 'OF', 'A', 'CEREMONIAL', 'NATURE', 'ARE', 'DEVELOPED', 'MANY', 'ORGANIZATIONS', 'ARE', 'FOUNDED', 'WITH', 'SOME', 'SPECIOUS', 'OBJECT', 'OF', 'AMELIORATION', 'EMBODIED', 'IN', 'THEIR', 'OFFICIAL', 'STYLE', 'AND', 'TITLE', 'THERE', 'IS', 'MUCH', 'COMING', 'AND', 'GOING', 'AND', 'A', 'DEAL', 'OF', 'TALK', 'TO', 'THE', 'END', 'THAT', 'THE', 'TALKERS', 'MAY', 'NOT', 'HAVE', 'OCCASION', 'TO', 'REFLECT', 'ON', 'WHAT', 'IS', 'THE', 'EFFECTUAL', 'ECONOMIC', 'VALUE', 'OF', 'THEIR', 'TRAFFIC'] +3570-5696-0003-2475: hyp=['A', 'RECONCILIATION', 'BETWEEN', 'THE', 'TWO', 'CONFLICTING', 'REQUIREMENTS', 'IS', 'AFFECTED', 'BY', 'RESORT', 'TO', 'MAKE', 'BELIEVE', 'MEN', 'IN', 'INTRICATE', 'POLITE', 'OBSERVANCES', 'AND', 'SOCIAL', 'DUTIES', 'OF', 'A', 'CEREMONIAL', 'NATURE', 'ARE', 'DEVELOPED', 'MANY', 'ORGANIZATIONS', 'ARE', 'FOUNDED', 'WITH', 'SOME', 'SPECIOUS', 'OBJECT', 'OF', 'AMELIORATION', 'EMBODIED', 'IN', 'THEIR', 'OFFICIAL', 'STYLE', 'AND', 'TITLE', 'THERE', 'IS', 'MUCH', 'COMING', 'AND', 'GOING', 'AND', 'A', 'DEAL', 'OF', 'TALK', 'TO', 'THE', 'END', 'THAT', 'THE', 'TALK', 'IS', 'NOT', 'HAVE', 'OCCASION', 'TO', 'REFLECT', 'ON', 'WHAT', 'IS', 'THE', 'EFFECTUAL', 'ECONOMIC', 'VALUE', 'OF', 'THEIR', 'TRAFFIC'] +3570-5696-0004-2476: ref=['THE', 'SALIENT', 'FEATURES', 'OF', 'THIS', 'DEVELOPMENT', 'OF', 'DOMESTIC', 'SERVICE', 'HAVE', 'ALREADY', 'BEEN', 'INDICATED'] +3570-5696-0004-2476: hyp=['THE', 'SAILORED', 'FEATURES', 'OF', 'THIS', 'DEVELOPMENT', 'OF', 'DOMESTIC', 'SERVICE', 'HAVE', 'ALREADY', 'BEEN', 'INDICATED'] +3570-5696-0005-2477: ref=['THROUGHOUT', 'THE', 'ENTIRE', 'EVOLUTION', 'OF', 'CONSPICUOUS', 'EXPENDITURE', 'WHETHER', 'OF', 'GOODS', 'OR', 'OF', 'SERVICES', 'OR', 'HUMAN', 'LIFE', 'RUNS', 'THE', 'OBVIOUS', 'IMPLICATION', 'THAT', 'IN', 'ORDER', 'TO', 'EFFECTUALLY', 'MEND', 'THE', "CONSUMER'S", 'GOOD', 'FAME', 'IT', 'MUST', 'BE', 'AN', 'EXPENDITURE', 'OF', 'SUPERFLUITIES'] +3570-5696-0005-2477: hyp=['THROUGHOUT', 'THE', 'ENTIRE', 'EVOLUTION', 'OF', 'CONSPICUOUS', 'EXPENDITURE', 'WHETHER', 'OF', 'GOODS', 'OR', 'OF', 'SERVICES', 'OR', 'HUMAN', 'LIFE', 'RUNS', 'THE', 'OBVIOUS', 'IMPLICATION', 'THAT', 'IN', 'ORDER', 'TO', 'EFFECTUALLY', 'MEND', 'THE', "CONSUMER'S", 'GOOD', 'FAME', 'IT', 'MUST', 'BE', 'AN', 'EXPENDITURE', 'OF', 'SUPERFLUITIES'] +3570-5696-0006-2478: ref=['AS', 'USED', 'IN', 'THE', 'SPEECH', 'OF', 'EVERYDAY', 'LIFE', 'THE', 'WORD', 'CARRIES', 'AN', 'UNDERTONE', 'OF', 'DEPRECATION'] +3570-5696-0006-2478: hyp=['AS', 'USED', 'IN', 'THE', 'SPEECH', 'OF', 'EVERY', 'DAY', 'LIFE', 'THE', 'WORD', 'CARRIES', 'AN', 'UNDERTONE', 'OF', 'DEPRECATION'] +3570-5696-0007-2479: ref=['THE', 'USE', 'OF', 'THE', 'WORD', 'WASTE', 'AS', 'A', 'TECHNICAL', 'TERM', 'THEREFORE', 'IMPLIES', 'NO', 'DEPRECATION', 'OF', 'THE', 'MOTIVES', 'OR', 'OF', 'THE', 'ENDS', 'SOUGHT', 'BY', 'THE', 'CONSUMER', 'UNDER', 'THIS', 'CANON', 'OF', 'CONSPICUOUS', 'WASTE'] +3570-5696-0007-2479: hyp=['THE', 'USE', 'OF', 'THE', 'WORD', 'WASTE', 'AS', 'A', 'TECHNICAL', 'TERM', 'THEREFORE', 'IMPLIES', 'NO', 'DEPRECATION', 'OF', 'THE', 'MOTIVES', 'OR', 'OF', 'THE', 'ENDS', 'SOUGHT', 'BY', 'THE', 'CONSUMER', 'UNDER', 'THIS', 'CANON', 'OF', 'CONSPICUOUS', 'WASTE'] +3570-5696-0008-2480: ref=['BUT', 'IT', 'IS', 'ON', 'OTHER', 'GROUNDS', 'WORTH', 'NOTING', 'THAT', 'THE', 'TERM', 'WASTE', 'IN', 'THE', 'LANGUAGE', 'OF', 'EVERYDAY', 'LIFE', 'IMPLIES', 'DEPRECATION', 'OF', 'WHAT', 'IS', 'CHARACTERIZED', 'AS', 'WASTEFUL'] +3570-5696-0008-2480: hyp=['BUT', 'IT', 'IS', 'ANOTHER', 'GROUNDS', 'WORTH', 'NOTING', 'THAT', 'THE', 'TERM', 'WASTE', 'IN', 'THE', 'LANGUAGE', 'OF', 'EVERYDAY', 'LIFE', 'IMPLIES', 'DEPRECATION', 'OF', 'WHAT', 'IS', 'CHARACTERIZED', 'AS', 'WASTEFUL'] +3570-5696-0009-2481: ref=['IN', 'STRICT', 'ACCURACY', 'NOTHING', 'SHOULD', 'BE', 'INCLUDED', 'UNDER', 'THE', 'HEAD', 'OF', 'CONSPICUOUS', 'WASTE', 'BUT', 'SUCH', 'EXPENDITURE', 'AS', 'IS', 'INCURRED', 'ON', 'THE', 'GROUND', 'OF', 'AN', 'INVIDIOUS', 'PECUNIARY', 'COMPARISON'] +3570-5696-0009-2481: hyp=['IN', 'STRICT', 'ACCURACY', 'NOTHING', 'SHOULD', 'BE', 'INCLUDED', 'UNDER', 'THE', 'HEAD', 'OF', 'CONSPICUOUS', 'WASTE', 'BUT', 'SUCH', 'EXPENDITURE', 'AS', 'IS', 'INCURRED', 'ON', 'THE', 'GROUND', 'OF', 'AN', 'INVIDIOUS', 'PECUNIARY', 'COMPARISON'] +3570-5696-0010-2482: ref=['AN', 'ARTICLE', 'MAY', 'BE', 'USEFUL', 'AND', 'WASTEFUL', 'BOTH', 'AND', 'ITS', 'UTILITY', 'TO', 'THE', 'CONSUMER', 'MAY', 'BE', 'MADE', 'UP', 'OF', 'USE', 'AND', 'WASTE', 'IN', 'THE', 'MOST', 'VARYING', 'PROPORTIONS'] +3570-5696-0010-2482: hyp=['AN', 'ARTICLE', 'MAY', 'BE', 'USEFUL', 'AND', 'WASTEFUL', 'BOTH', 'AND', 'ITS', 'UTILITY', 'TO', 'THE', 'CONSUMER', 'MAY', 'BE', 'MADE', 'UP', 'OF', 'USE', 'AND', 'WASTE', 'IN', 'THE', 'MOST', 'VARYING', 'PROPORTIONS'] +3575-170457-0000-369: ref=['AND', 'OFTEN', 'HAS', 'MY', 'MOTHER', 'SAID', 'WHILE', 'ON', 'HER', 'LAP', 'I', 'LAID', 'MY', 'HEAD', 'SHE', 'FEARED', 'FOR', 'TIME', 'I', 'WAS', 'NOT', 'MADE', 'BUT', 'FOR', 'ETERNITY'] +3575-170457-0000-369: hyp=['AND', 'OFTEN', 'HAS', 'MY', 'MOTHER', 'SAID', 'WHILE', 'ON', 'HER', 'LAP', 'I', 'LAID', 'MY', 'HEAD', 'SHE', 'FEARED', 'FOR', 'TIME', 'I', 'WAS', 'NOT', 'MADE', 'BUT', 'FOR', 'ETERNITY'] +3575-170457-0001-370: ref=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DENIED', 'EACH', "OTHER'S", 'SOCIETY'] +3575-170457-0001-370: hyp=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DENIED', 'EACH', "OTHER'S", 'SOCIETY'] +3575-170457-0002-371: ref=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DIVIDED'] +3575-170457-0002-371: hyp=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DIVIDED'] +3575-170457-0003-372: ref=['SURELY', 'IT', 'MUST', 'BE', 'BECAUSE', 'WE', 'ARE', 'IN', 'DANGER', 'OF', 'LOVING', 'EACH', 'OTHER', 'TOO', 'WELL', 'OF', 'LOSING', 'SIGHT', 'OF', 'THE', 'CREATOR', 'IN', 'IDOLATRY', 'OF', 'THE', 'CREATURE'] +3575-170457-0003-372: hyp=['SURELY', 'IT', 'MUST', 'BE', 'BECAUSE', 'WE', 'ARE', 'IN', 'DANGER', 'OF', 'LOVING', 'EACH', 'OTHER', 'TOO', 'WELL', 'OF', 'LOSING', 'SIGHT', 'OF', 'THE', 'CREATOR', 'AND', 'IDOLATRY', 'OF', 'THE', 'CREATURE'] +3575-170457-0004-373: ref=['WE', 'USED', 'TO', 'DISPUTE', 'ABOUT', 'POLITICS', 'AND', 'RELIGION'] +3575-170457-0004-373: hyp=['WE', 'USED', 'TO', 'DISPUTE', 'ABOUT', 'POLITICS', 'AND', 'RELIGION'] +3575-170457-0005-374: ref=['SHE', 'A', 'TORY', 'AND', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'ALWAYS', 'IN', 'A', 'MINORITY', 'OF', 'ONE', 'IN', 'OUR', 'HOUSE', 'OF', 'VIOLENT', 'DISSENT', 'AND', 'RADICALISM'] +3575-170457-0005-374: hyp=['SHE', 'ATTORIAN', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'ALWAYS', 'IN', 'A', 'MINORITY', 'OF', 'ONE', 'IN', 'OUR', 'HOUSE', 'OF', 'VIOLENT', 'DESCENT', 'AND', 'RADICALISM'] +3575-170457-0006-375: ref=['HER', 'FEEBLE', 'HEALTH', 'GAVE', 'HER', 'HER', 'YIELDING', 'MANNER', 'FOR', 'SHE', 'COULD', 'NEVER', 'OPPOSE', 'ANY', 'ONE', 'WITHOUT', 'GATHERING', 'UP', 'ALL', 'HER', 'STRENGTH', 'FOR', 'THE', 'STRUGGLE'] +3575-170457-0006-375: hyp=['HER', 'FEEBLE', 'HEALTH', 'GAVE', 'HER', 'HER', 'YIELDING', 'MANNER', 'FOR', 'SHE', 'COULD', 'NEVER', 'OPPOSE', 'ANY', 'ONE', 'WITHOUT', 'GATHERING', 'UP', 'ALL', 'HER', 'STRENGTH', 'FOR', 'THE', 'STRUGGLE'] +3575-170457-0007-376: ref=['HE', 'SPOKE', 'FRENCH', 'PERFECTLY', 'I', 'HAVE', 'BEEN', 'TOLD', 'WHEN', 'NEED', 'WAS', 'BUT', 'DELIGHTED', 'USUALLY', 'IN', 'TALKING', 'THE', 'BROADEST', 'YORKSHIRE'] +3575-170457-0007-376: hyp=['HE', 'SPOKE', 'FRENCH', 'PERFECTLY', 'I', 'HAVE', 'BEEN', 'TOLD', 'WHEN', 'NEED', 'WAS', 'BUT', 'DELIGHTED', 'USUALLY', 'IN', 'TALKING', 'THE', 'BROADEST', 'YORKSHIRE'] +3575-170457-0008-377: ref=['AND', 'SO', 'LIFE', 'AND', 'DEATH', 'HAVE', 'DISPERSED', 'THE', 'CIRCLE', 'OF', 'VIOLENT', 'RADICALS', 'AND', 'DISSENTERS', 'INTO', 'WHICH', 'TWENTY', 'YEARS', 'AGO', 'THE', 'LITTLE', 'QUIET', 'RESOLUTE', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'RECEIVED', 'AND', 'BY', 'WHOM', 'SHE', 'WAS', 'TRULY', 'LOVED', 'AND', 'HONOURED'] +3575-170457-0008-377: hyp=['AND', 'SO', 'LIFE', 'AND', 'DEATH', 'HAVE', 'DISPERSED', 'THE', 'CIRCLE', 'OF', 'VIOLENT', 'RADICALS', 'AND', 'DISSENTERS', 'INTO', 'WHICH', 'TWENTY', 'YEARS', 'AGO', 'THE', 'LITTLE', 'QUIET', 'RESOLUTE', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'RECEIVED', 'AND', 'BY', 'WHOM', 'SHE', 'WAS', 'TRULY', 'LOVED', 'AND', 'HONOURED'] +3575-170457-0009-378: ref=['JANUARY', 'AND', 'FEBRUARY', 'OF', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'HAD', 'PASSED', 'AWAY', 'AND', 'STILL', 'THERE', 'WAS', 'NO', 'REPLY', 'FROM', 'SOUTHEY'] +3575-170457-0009-378: hyp=['JANUARY', 'AND', 'FEBRUARY', 'OF', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'HAD', 'PASSED', 'AWAY', 'AND', 'STILL', 'THERE', 'WAS', 'NO', 'REPLY', 'FROM', 'SALVI'] +3575-170457-0010-379: ref=['I', 'AM', 'NOT', 'DEPRECIATING', 'IT', 'WHEN', 'I', 'SAY', 'THAT', 'IN', 'THESE', 'TIMES', 'IT', 'IS', 'NOT', 'RARE'] +3575-170457-0010-379: hyp=['I', 'AM', 'NOT', 'DEPRECIATING', 'IT', 'WHEN', 'I', 'SAY', 'THAT', 'IN', 'THESE', 'TIMES', 'IT', 'IS', 'NOT', 'RARE'] +3575-170457-0011-380: ref=['BUT', 'IT', 'IS', 'NOT', 'WITH', 'A', 'VIEW', 'TO', 'DISTINCTION', 'THAT', 'YOU', 'SHOULD', 'CULTIVATE', 'THIS', 'TALENT', 'IF', 'YOU', 'CONSULT', 'YOUR', 'OWN', 'HAPPINESS'] +3575-170457-0011-380: hyp=['BUT', 'IT', 'IS', 'NOT', 'WITH', 'A', 'VIEW', 'TO', 'DISTINCTION', 'THAT', 'YOU', 'SHOULD', 'CULTIVATE', 'THIS', 'TALENT', 'IF', 'YOU', 'CONSULT', 'YOUR', 'OWN', 'HAPPINESS'] +3575-170457-0012-381: ref=['YOU', 'WILL', 'SAY', 'THAT', 'A', 'WOMAN', 'HAS', 'NO', 'NEED', 'OF', 'SUCH', 'A', 'CAUTION', 'THERE', 'CAN', 'BE', 'NO', 'PERIL', 'IN', 'IT', 'FOR', 'HER'] +3575-170457-0012-381: hyp=['YOU', 'WILL', 'SAY', 'THAT', 'A', 'WOMAN', 'HAS', 'NO', 'NEED', 'OF', 'SUCH', 'A', 'CAUTION', 'THERE', 'CAN', 'BE', 'NO', 'PERIL', 'IN', 'IT', 'FOR', 'HER'] +3575-170457-0013-382: ref=['THE', 'MORE', 'SHE', 'IS', 'ENGAGED', 'IN', 'HER', 'PROPER', 'DUTIES', 'THE', 'LESS', 'LEISURE', 'WILL', 'SHE', 'HAVE', 'FOR', 'IT', 'EVEN', 'AS', 'AN', 'ACCOMPLISHMENT', 'AND', 'A', 'RECREATION'] +3575-170457-0013-382: hyp=['THE', 'MORE', 'SHE', 'IS', 'ENGAGED', 'IN', 'HER', 'PROPER', 'DUTIES', 'THE', 'LESS', 'LEISURE', 'WILL', 'SHE', 'HAVE', 'FOR', 'IT', 'EVEN', 'AS', 'AN', 'ACCOMPLISHMENT', 'AND', 'A', 'RECREATION'] +3575-170457-0014-383: ref=['TO', 'THOSE', 'DUTIES', 'YOU', 'HAVE', 'NOT', 'YET', 'BEEN', 'CALLED', 'AND', 'WHEN', 'YOU', 'ARE', 'YOU', 'WILL', 'BE', 'LESS', 'EAGER', 'FOR', 'CELEBRITY'] +3575-170457-0014-383: hyp=['TO', 'THOSE', 'DUTIES', 'YOU', 'HAVE', 'NOT', 'YET', 'BEEN', 'CALLED', 'AND', 'WHEN', 'YOU', 'ARE', 'YOU', 'WILL', 'BE', 'LESS', 'EAGER', 'FOR', 'CELEBRITY'] +3575-170457-0015-384: ref=['BUT', 'DO', 'NOT', 'SUPPOSE', 'THAT', 'I', 'DISPARAGE', 'THE', 'GIFT', 'WHICH', 'YOU', 'POSSESS', 'NOR', 'THAT', 'I', 'WOULD', 'DISCOURAGE', 'YOU', 'FROM', 'EXERCISING', 'IT', 'I', 'ONLY', 'EXHORT', 'YOU', 'SO', 'TO', 'THINK', 'OF', 'IT', 'AND', 'SO', 'TO', 'USE', 'IT', 'AS', 'TO', 'RENDER', 'IT', 'CONDUCIVE', 'TO', 'YOUR', 'OWN', 'PERMANENT', 'GOOD'] +3575-170457-0015-384: hyp=['BUT', 'DO', 'NOT', 'SUPPOSE', 'THAT', 'I', 'DISPARAGE', 'THE', 'GIFT', 'WHICH', 'YOU', 'POSSESS', 'NOR', 'THAT', 'I', 'WOULD', 'DISCOURAGE', 'YOU', 'FROM', 'EXERCISING', 'IT', 'I', 'ONLY', 'EXHORT', 'YOU', 'SO', 'TO', 'THINK', 'OF', 'IT', 'AND', 'SO', 'TO', 'USE', 'IT', 'AS', 'TO', 'RENDER', 'IT', 'CONDUCIVE', 'TO', 'YOUR', 'OWN', 'PERMANENT', 'GOOD'] +3575-170457-0016-385: ref=['FAREWELL', 'MADAM'] +3575-170457-0016-385: hyp=['FAREWELL', 'MADAM'] +3575-170457-0017-386: ref=['THOUGH', 'I', 'MAY', 'BE', 'BUT', 'AN', 'UNGRACIOUS', 'ADVISER', 'YOU', 'WILL', 'ALLOW', 'ME', 'THEREFORE', 'TO', 'SUBSCRIBE', 'MYSELF', 'WITH', 'THE', 'BEST', 'WISHES', 'FOR', 'YOUR', 'HAPPINESS', 'HERE', 'AND', 'HEREAFTER', 'YOUR', 'TRUE', 'FRIEND', 'ROBERT', 'SOUTHEY'] +3575-170457-0017-386: hyp=['THOUGH', 'I', 'MAY', 'BE', 'BUT', 'AN', 'UNGRACIOUS', 'ADVISER', 'YOU', 'WILL', 'ALLOW', 'ME', 'THEREFORE', 'TO', 'SUBSCRIBE', 'MYSELF', 'WITH', 'THE', 'BEST', 'WISHES', 'FOR', 'YOUR', 'HAPPINESS', 'HERE', 'AND', 'HEREAFTER', 'YOUR', 'TRUE', 'FRIEND', 'ROBERT', 'SELVEY'] +3575-170457-0018-387: ref=['SIR', 'MARCH', 'SIXTEENTH'] +3575-170457-0018-387: hyp=['SIR', 'MARCH', 'SIXTEENTH'] +3575-170457-0019-388: ref=['I', 'HAD', 'NOT', 'VENTURED', 'TO', 'HOPE', 'FOR', 'SUCH', 'A', 'REPLY', 'SO', 'CONSIDERATE', 'IN', 'ITS', 'TONE', 'SO', 'NOBLE', 'IN', 'ITS', 'SPIRIT'] +3575-170457-0019-388: hyp=['I', 'HAVE', 'NOT', 'VENTURED', 'TO', 'HOPE', 'FOR', 'SUCH', 'A', 'REPLY', 'SO', 'CONSIDER', 'IT', 'IN', 'ITS', 'TONE', 'SO', 'NOBLE', 'IN', 'ITS', 'SPIRIT'] +3575-170457-0020-389: ref=['I', 'KNOW', 'THE', 'FIRST', 'LETTER', 'I', 'WROTE', 'TO', 'YOU', 'WAS', 'ALL', 'SENSELESS', 'TRASH', 'FROM', 'BEGINNING', 'TO', 'END', 'BUT', 'I', 'AM', 'NOT', 'ALTOGETHER', 'THE', 'IDLE', 'DREAMING', 'BEING', 'IT', 'WOULD', 'SEEM', 'TO', 'DENOTE'] +3575-170457-0020-389: hyp=['I', 'KNOW', 'THE', 'FIRST', 'LETTER', 'I', 'WROTE', 'TO', 'YOU', 'WAS', 'ALL', 'SENSELESS', 'TRASH', 'FROM', 'BEGINNING', 'TO', 'END', 'BUT', 'I', 'AM', 'NOT', 'ALTOGETHER', 'THE', 'IDLE', 'DREAMING', 'BEING', 'IT', 'WOULD', 'SEEM', 'TO', 'DENOTE'] +3575-170457-0021-390: ref=['I', 'THOUGHT', 'IT', 'THEREFORE', 'MY', 'DUTY', 'WHEN', 'I', 'LEFT', 'SCHOOL', 'TO', 'BECOME', 'A', 'GOVERNESS'] +3575-170457-0021-390: hyp=['I', 'THOUGHT', 'IT', 'THEREFORE', 'MY', 'DUTY', 'WHEN', 'I', 'LEFT', 'SCHOOL', 'TO', 'BECOME', 'A', 'GOVERNESS'] +3575-170457-0022-391: ref=['IN', 'THE', 'EVENINGS', 'I', 'CONFESS', 'I', 'DO', 'THINK', 'BUT', 'I', 'NEVER', 'TROUBLE', 'ANY', 'ONE', 'ELSE', 'WITH', 'MY', 'THOUGHTS'] +3575-170457-0022-391: hyp=['IN', 'THE', 'EVENINGS', 'I', 'CONFESS', 'I', 'DO', 'THINK', 'BUT', 'I', 'NEVER', 'TROUBLE', 'ANY', 'ONE', 'ELSE', 'WITH', 'MY', 'THOUGHTS'] +3575-170457-0023-392: ref=['I', 'CAREFULLY', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PREOCCUPATION', 'AND', 'ECCENTRICITY', 'WHICH', 'MIGHT', 'LEAD', 'THOSE', 'I', 'LIVE', 'AMONGST', 'TO', 'SUSPECT', 'THE', 'NATURE', 'OF', 'MY', 'PURSUITS'] +3575-170457-0023-392: hyp=['I', 'CAREFULLY', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PREOCCUPATION', 'AND', 'ECCENTRICITY', 'WHICH', 'MIGHT', 'LEAD', 'THOSE', 'I', 'LIVE', 'AMONGST', 'TO', 'SUSPECT', 'THE', 'NATURE', 'OF', 'MY', 'PURSUITS'] +3575-170457-0024-393: ref=['I', "DON'T", 'ALWAYS', 'SUCCEED', 'FOR', 'SOMETIMES', 'WHEN', "I'M", 'TEACHING', 'OR', 'SEWING', 'I', 'WOULD', 'RATHER', 'BE', 'READING', 'OR', 'WRITING', 'BUT', 'I', 'TRY', 'TO', 'DENY', 'MYSELF', 'AND', 'MY', "FATHER'S", 'APPROBATION', 'AMPLY', 'REWARDED', 'ME', 'FOR', 'THE', 'PRIVATION'] +3575-170457-0024-393: hyp=['I', "DON'T", 'ALWAYS', 'SUCCEED', 'FOR', 'SOMETIMES', 'WHEN', "I'M", 'TEACHING', 'OR', 'SEWING', 'I', 'WOULD', 'RATHER', 'BE', 'READING', 'A', 'WRITING', 'BUT', 'I', 'TRY', 'TO', 'DENY', 'MYSELF', 'AND', 'MY', "FATHER'S", 'APPROBATION', 'AMPLY', 'REWARDED', 'ME', 'FOR', 'THE', 'PRIVATION'] +3575-170457-0025-394: ref=['AGAIN', 'I', 'THANK', 'YOU', 'THIS', 'INCIDENT', 'I', 'SUPPOSE', 'WILL', 'BE', 'RENEWED', 'NO', 'MORE', 'IF', 'I', 'LIVE', 'TO', 'BE', 'AN', 'OLD', 'WOMAN', 'I', 'SHALL', 'REMEMBER', 'IT', 'THIRTY', 'YEARS', 'HENCE', 'AS', 'A', 'BRIGHT', 'DREAM'] +3575-170457-0025-394: hyp=['AGAIN', 'I', 'THANK', 'YOU', 'THIS', 'INCIDENT', 'I', 'SUPPOSE', 'WILL', 'BE', 'RENEWED', 'NO', 'MORE', 'IF', 'I', 'LIVE', 'TO', 'BE', 'AN', 'OLD', 'WOMAN', 'I', 'SHALL', 'REMEMBER', 'IT', 'THIRTY', 'YEARS', 'HENCE', 'AS', 'A', 'BRIGHT', 'DREAM'] +3575-170457-0026-395: ref=['P', 'S', 'PRAY', 'SIR', 'EXCUSE', 'ME', 'FOR', 'WRITING', 'TO', 'YOU', 'A', 'SECOND', 'TIME', 'I', 'COULD', 'NOT', 'HELP', 'WRITING', 'PARTLY', 'TO', 'TELL', 'YOU', 'HOW', 'THANKFUL', 'I', 'AM', 'FOR', 'YOUR', 'KINDNESS', 'AND', 'PARTLY', 'TO', 'LET', 'YOU', 'KNOW', 'THAT', 'YOUR', 'ADVICE', 'SHALL', 'NOT', 'BE', 'WASTED', 'HOWEVER', 'SORROWFULLY', 'AND', 'RELUCTANTLY', 'IT', 'MAY', 'BE', 'AT', 'FIRST', 'FOLLOWED', 'C', 'B'] +3575-170457-0026-395: hyp=['P', 'S', 'PRAY', 'SIR', 'EXCUSE', 'ME', 'FOR', 'WRITING', 'TO', 'YOU', 'A', 'SECOND', 'TIME', 'I', 'COULD', 'NOT', 'HELP', 'WRITING', 'PARTLY', 'TO', 'TELL', 'YOU', 'HOW', 'THANKFUL', 'I', 'AM', 'FOR', 'YOUR', 'KINDNESS', 'AND', 'PARTLY', 'TO', 'LET', 'YOU', 'KNOW', 'THAT', 'YOUR', 'ADVICE', 'SHALL', 'NOT', 'BE', 'WASTED', 'HOWEVER', 'SORROWFULLY', 'AND', 'RELUCTANTLY', 'IT', 'MAY', 'BE', 'AT', 'FIRST', 'FOLLOWED'] +3575-170457-0027-396: ref=['I', 'CANNOT', 'DENY', 'MYSELF', 'THE', 'GRATIFICATION', 'OF', 'INSERTING', "SOUTHEY'S", 'REPLY'] +3575-170457-0027-396: hyp=['I', 'CANNOT', 'DENY', 'MYSELF', 'THE', 'GRATIFICATION', 'OF', 'INSERTING', 'SO', 'THESE', 'REPLY'] +3575-170457-0028-397: ref=['KESWICK', 'MARCH', 'TWENTY', 'SECOND', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'DEAR', 'MADAM'] +3575-170457-0028-397: hyp=['KEZWICK', 'MARCH', 'TWENTY', 'SECOND', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'DEAR', 'MADAM'] +3575-170457-0029-398: ref=['YOUR', 'LETTER', 'HAS', 'GIVEN', 'ME', 'GREAT', 'PLEASURE', 'AND', 'I', 'SHOULD', 'NOT', 'FORGIVE', 'MYSELF', 'IF', 'I', 'DID', 'NOT', 'TELL', 'YOU', 'SO'] +3575-170457-0029-398: hyp=['YOUR', 'LETTER', 'HAS', 'GIVEN', 'ME', 'GREAT', 'PLEASURE', 'AND', 'I', 'SHOULD', 'NOT', 'FORGIVE', 'MYSELF', 'IF', 'I', 'DID', 'NOT', 'TELL', 'YOU', 'SO'] +3575-170457-0030-399: ref=['OF', 'THIS', 'SECOND', 'LETTER', 'ALSO', 'SHE', 'SPOKE', 'AND', 'TOLD', 'ME', 'THAT', 'IT', 'CONTAINED', 'AN', 'INVITATION', 'FOR', 'HER', 'TO', 'GO', 'AND', 'SEE', 'THE', 'POET', 'IF', 'EVER', 'SHE', 'VISITED', 'THE', 'LAKES'] +3575-170457-0030-399: hyp=['OF', 'THIS', 'SECOND', 'LETTER', 'ALSO', 'SHE', 'SPOKE', 'AND', 'TOLD', 'ME', 'THAT', 'IT', 'CONTAINED', 'AN', 'INVITATION', 'FOR', 'HER', 'TO', 'GO', 'AND', 'SEE', 'THE', 'POET', 'IF', 'EVER', 'SHE', 'VISITED', 'THE', 'LAKES'] +3575-170457-0031-400: ref=['ON', 'AUGUST', 'TWENTY', 'SEVENTH', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'SHE', 'WRITES'] +3575-170457-0031-400: hyp=['ON', 'AUGUST', 'TWENTY', 'SEVENTH', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'SHE', 'WRITES'] +3575-170457-0032-401: ref=['COME', 'COME', 'I', 'AM', 'GETTING', 'REALLY', 'TIRED', 'OF', 'YOUR', 'ABSENCE'] +3575-170457-0032-401: hyp=['COME', 'COME', "I'M", 'GETTING', 'REALLY', 'TIRED', 'OF', 'YOUR', 'ABSENCE'] +3575-170457-0033-402: ref=['SATURDAY', 'AFTER', 'SATURDAY', 'COMES', 'ROUND', 'AND', 'I', 'CAN', 'HAVE', 'NO', 'HOPE', 'OF', 'HEARING', 'YOUR', 'KNOCK', 'AT', 'THE', 'DOOR', 'AND', 'THEN', 'BEING', 'TOLD', 'THAT', 'MISS', 'E', 'IS', 'COME', 'OH', 'DEAR'] +3575-170457-0033-402: hyp=['SATURDAY', 'AFTER', 'SATURDAY', 'COMES', 'AROUND', 'AND', 'I', 'CAN', 'HAVE', 'NO', 'HOPE', 'OF', 'HEARING', 'YOUR', 'KNOCK', 'AT', 'THE', 'DOOR', 'AND', 'THEN', 'BEING', 'TOLD', 'THAT', 'MISS', 'EA', 'IS', 'COME', 'OH', 'DEAR'] +3575-170457-0034-403: ref=['IN', 'THIS', 'MONOTONOUS', 'LIFE', 'OF', 'MINE', 'THAT', 'WAS', 'A', 'PLEASANT', 'EVENT'] +3575-170457-0034-403: hyp=['IN', 'THIS', 'MONOTONOUS', 'LIFE', 'OF', 'MIND', 'THAT', 'WAS', 'A', 'PLEASANT', 'EVENT'] +3575-170457-0035-404: ref=['I', 'WISH', 'IT', 'WOULD', 'RECUR', 'AGAIN', 'BUT', 'IT', 'WILL', 'TAKE', 'TWO', 'OR', 'THREE', 'INTERVIEWS', 'BEFORE', 'THE', 'STIFFNESS', 'THE', 'ESTRANGEMENT', 'OF', 'THIS', 'LONG', 'SEPARATION', 'WILL', 'WEAR', 'AWAY'] +3575-170457-0035-404: hyp=['I', 'WISH', 'YOU', 'WERE', 'RECUR', 'AGAIN', 'BUT', 'IT', 'WILL', 'TAKE', 'TWO', 'OR', 'THREE', 'INTERVIEWS', 'BEFORE', 'THE', 'STIFFNESS', 'THE', 'ESTRANGEMENT', 'OF', 'THIS', 'LONG', 'SEPARATION', 'WILL', 'WEAR', 'AWAY'] +3575-170457-0036-405: ref=['MY', 'EYES', 'FILL', 'WITH', 'TEARS', 'WHEN', 'I', 'CONTRAST', 'THE', 'BLISS', 'OF', 'SUCH', 'A', 'STATE', 'BRIGHTENED', 'BY', 'HOPES', 'OF', 'THE', 'FUTURE', 'WITH', 'THE', 'MELANCHOLY', 'STATE', 'I', 'NOW', 'LIVE', 'IN', 'UNCERTAIN', 'THAT', 'I', 'EVER', 'FELT', 'TRUE', 'CONTRITION', 'WANDERING', 'IN', 'THOUGHT', 'AND', 'DEED', 'LONGING', 'FOR', 'HOLINESS', 'WHICH', 'I', 'SHALL', 'NEVER', 'NEVER', 'OBTAIN', 'SMITTEN', 'AT', 'TIMES', 'TO', 'THE', 'HEART', 'WITH', 'THE', 'CONVICTION', 'THAT', 'GHASTLY', 'CALVINISTIC', 'DOCTRINES', 'ARE', 'TRUE', 'DARKENED', 'IN', 'SHORT', 'BY', 'THE', 'VERY', 'SHADOWS', 'OF', 'SPIRITUAL', 'DEATH'] +3575-170457-0036-405: hyp=['MY', 'EYES', 'FILLED', 'TEARS', 'WHEN', 'I', 'CONTRAST', 'THE', 'BLISS', 'OF', 'SUCH', 'A', 'STATE', 'BRIGHTENED', 'BY', 'HOPES', 'OF', 'THE', 'FUTURE', 'WITH', 'THE', 'MELANCHOLY', 'STATE', 'I', 'NOW', 'LIVE', 'IN', 'UNCERTAIN', 'THAT', 'I', 'EVER', 'FELT', 'TRUE', 'CONTRITION', 'WONDERING', 'IN', 'THOUGHT', 'INDEED', 'LONGING', 'FOR', 'HOLINESS', 'WHICH', 'I', 'SHALL', 'NEVER', 'NEVER', 'OBTAIN', 'SMIT', 'IN', 'THAT', 'TIMES', 'TO', 'THE', 'HEART', 'WITH', 'THE', 'CONVICTION', 'THAT', 'GHASTLY', 'CALVINISTIC', 'DOCTRINES', 'ARE', 'TRUE', 'DARKENED', 'AND', 'SHORT', 'BY', 'THE', 'VERY', 'SHADOWS', 'OF', 'SPIRITUAL', 'DEATH'] +3575-170457-0037-406: ref=['IF', 'CHRISTIAN', 'PERFECTION', 'BE', 'NECESSARY', 'TO', 'SALVATION', 'I', 'SHALL', 'NEVER', 'BE', 'SAVED', 'MY', 'HEART', 'IS', 'A', 'VERY', 'HOTBED', 'FOR', 'SINFUL', 'THOUGHTS', 'AND', 'WHEN', 'I', 'DECIDE', 'ON', 'AN', 'ACTION', 'I', 'SCARCELY', 'REMEMBER', 'TO', 'LOOK', 'TO', 'MY', 'REDEEMER', 'FOR', 'DIRECTION'] +3575-170457-0037-406: hyp=['IF', 'CHRISTIAN', 'PERFECTION', 'BE', 'NECESSARY', 'TO', 'SALVATION', 'I', 'SHALL', 'NEVER', 'BE', 'SAVED', 'MY', 'HEART', 'IS', 'A', 'VERY', 'HOT', 'BED', 'FOR', 'SINFUL', 'THOUGHTS', 'AND', 'WHEN', 'I', 'DECIDE', 'ON', 'AN', 'ACTION', 'I', 'SCARCELY', 'REMEMBER', 'TO', 'LOOK', 'TO', 'MY', 'REDEEMER', 'FOR', 'A', 'DIRECTION'] +3575-170457-0038-407: ref=['AND', 'MEANTIME', 'I', 'KNOW', 'THE', 'GREATNESS', 'OF', 'JEHOVAH', 'I', 'ACKNOWLEDGE', 'THE', 'PERFECTION', 'OF', 'HIS', 'WORD', 'I', 'ADORE', 'THE', 'PURITY', 'OF', 'THE', 'CHRISTIAN', 'FAITH', 'MY', 'THEORY', 'IS', 'RIGHT', 'MY', 'PRACTICE', 'HORRIBLY', 'WRONG'] +3575-170457-0038-407: hyp=['AND', 'MEANTIME', 'I', 'KNOW', 'THE', 'GREATNESS', 'OF', 'JEHOVAH', 'I', 'ACKNOWLEDGE', 'THE', 'PERFECTION', 'OF', 'HIS', 'WORD', 'I', 'ADORE', 'THE', 'PURITY', 'OF', 'THE', 'CHISH', 'FAITH', 'MY', 'THEORY', 'IS', 'RIGHT', 'MY', 'PRACTICE', 'HORRIBLY', 'WRONG'] +3575-170457-0039-408: ref=['THE', 'CHRISTMAS', 'HOLIDAYS', 'CAME', 'AND', 'SHE', 'AND', 'ANNE', 'RETURNED', 'TO', 'THE', 'PARSONAGE', 'AND', 'TO', 'THAT', 'HAPPY', 'HOME', 'CIRCLE', 'IN', 'WHICH', 'ALONE', 'THEIR', 'NATURES', 'EXPANDED', 'AMONGST', 'ALL', 'OTHER', 'PEOPLE', 'THEY', 'SHRIVELLED', 'UP', 'MORE', 'OR', 'LESS'] +3575-170457-0039-408: hyp=['THE', 'CHRISTMAS', 'HOLIDAYS', 'CAME', 'AND', 'SHE', 'AND', 'ANNE', 'RETURNED', 'TO', 'THE', 'PARSONAGE', 'AND', 'TO', 'THAT', 'HAPPY', 'HOME', 'CIRCLE', 'IN', 'WHICH', 'ALONE', 'THEIR', 'NATURES', 'EXPANDED', 'AMONGST', 'ALL', 'OTHER', 'PEOPLE', 'THEY', 'SHRIVELLED', 'UP', 'MORE', 'OR', 'LESS'] +3575-170457-0040-409: ref=['INDEED', 'THERE', 'WERE', 'ONLY', 'ONE', 'OR', 'TWO', 'STRANGERS', 'WHO', 'COULD', 'BE', 'ADMITTED', 'AMONG', 'THE', 'SISTERS', 'WITHOUT', 'PRODUCING', 'THE', 'SAME', 'RESULT'] +3575-170457-0040-409: hyp=['INDEED', 'THERE', 'WERE', 'ONLY', 'ONE', 'OR', 'TWO', 'STRANGERS', 'WHO', 'COULD', 'BE', 'ADMITTED', 'AMONG', 'THE', 'SISTERS', 'WITHOUT', 'PRODUCING', 'THE', 'SAME', 'RESULT'] +3575-170457-0041-410: ref=['SHE', 'WAS', 'GONE', 'OUT', 'INTO', 'THE', 'VILLAGE', 'ON', 'SOME', 'ERRAND', 'WHEN', 'AS', 'SHE', 'WAS', 'DESCENDING', 'THE', 'STEEP', 'STREET', 'HER', 'FOOT', 'SLIPPED', 'ON', 'THE', 'ICE', 'AND', 'SHE', 'FELL', 'IT', 'WAS', 'DARK', 'AND', 'NO', 'ONE', 'SAW', 'HER', 'MISCHANCE', 'TILL', 'AFTER', 'A', 'TIME', 'HER', 'GROANS', 'ATTRACTED', 'THE', 'ATTENTION', 'OF', 'A', 'PASSER', 'BY'] +3575-170457-0041-410: hyp=['SHE', 'WAS', 'GONE', 'OUT', 'INTO', 'THE', 'VILLAGE', 'ON', 'SOME', 'ERRAND', 'WHEN', 'AS', 'SHE', 'WAS', 'DESCENDING', 'THE', 'STEEP', 'STREET', 'HER', 'FOOT', 'SLIPPED', 'ON', 'THE', 'ICE', 'AND', 'SHE', 'FELL', 'HE', 'WAS', 'DARK', 'AND', 'NO', 'ONE', 'SAW', 'HER', 'MISCHANCE', 'TILL', 'AFTER', 'A', 'TIME', 'HER', 'GROANS', 'ATTRACTED', 'THE', 'ATTENTION', 'OF', 'A', 'PASSER', 'BY'] +3575-170457-0042-411: ref=['UNFORTUNATELY', 'THE', 'FRACTURE', 'COULD', 'NOT', 'BE', 'SET', 'TILL', 'SIX', "O'CLOCK", 'THE', 'NEXT', 'MORNING', 'AS', 'NO', 'SURGEON', 'WAS', 'TO', 'BE', 'HAD', 'BEFORE', 'THAT', 'TIME', 'AND', 'SHE', 'NOW', 'LIES', 'AT', 'OUR', 'HOUSE', 'IN', 'A', 'VERY', 'DOUBTFUL', 'AND', 'DANGEROUS', 'STATE'] +3575-170457-0042-411: hyp=['UNFORTUNATELY', 'THE', 'FRACTURE', 'COULD', 'NOT', 'BE', 'SET', 'TILL', 'SIX', "O'CLOCK", 'THE', 'NEXT', 'MORNING', 'AS', 'NO', 'SURGEON', 'WAS', 'TO', 'BE', 'HAD', 'BEFORE', 'THAT', 'TIME', 'AND', 'SHE', 'NOW', 'LIES', 'AT', 'HER', 'HOUSE', 'IN', 'A', 'VERY', 'DOUBTFUL', 'AND', 'DANGEROUS', 'STATE'] +3575-170457-0043-412: ref=['HOWEVER', 'REMEMBERING', 'WHAT', 'YOU', 'TOLD', 'ME', 'NAMELY', 'THAT', 'YOU', 'HAD', 'COMMENDED', 'THE', 'MATTER', 'TO', 'A', 'HIGHER', 'DECISION', 'THAN', 'OURS', 'AND', 'THAT', 'YOU', 'WERE', 'RESOLVED', 'TO', 'SUBMIT', 'WITH', 'RESIGNATION', 'TO', 'THAT', 'DECISION', 'WHATEVER', 'IT', 'MIGHT', 'BE', 'I', 'HOLD', 'IT', 'MY', 'DUTY', 'TO', 'YIELD', 'ALSO', 'AND', 'TO', 'BE', 'SILENT', 'IT', 'MAY', 'BE', 'ALL', 'FOR', 'THE', 'BEST'] +3575-170457-0043-412: hyp=['HOWEVER', 'REMEMBERING', 'WHAT', 'YOU', 'TOLD', 'ME', 'NAMELY', 'THAT', 'YOU', 'HAD', 'COMMENDED', 'THE', 'MATTER', 'TO', 'A', 'HIGHER', 'DECISION', 'THAN', 'OURS', 'AND', 'THAT', 'YOU', 'WERE', 'RESOLVED', 'TO', 'SUBMIT', 'WITH', 'RESIGNATION', 'TO', 'THAT', 'DECISION', 'WHATEVER', 'IT', 'MIGHT', 'BE', 'I', 'HOLD', 'IT', 'MY', 'DUTY', 'TO', 'YIELD', 'ALSO', 'AND', 'TO', 'BE', 'SILENT', 'AND', 'MAY', 'BE', 'ALL', 'FOR', 'THE', 'BEST'] +3575-170457-0044-413: ref=['AFTER', 'THIS', 'DISAPPOINTMENT', 'I', 'NEVER', 'DARE', 'RECKON', 'WITH', 'CERTAINTY', 'ON', 'THE', 'ENJOYMENT', 'OF', 'A', 'PLEASURE', 'AGAIN', 'IT', 'SEEMS', 'AS', 'IF', 'SOME', 'FATALITY', 'STOOD', 'BETWEEN', 'YOU', 'AND', 'ME'] +3575-170457-0044-413: hyp=['AFTER', 'THIS', 'DISAPPOINTMENT', 'I', 'NEVER', 'DARE', 'RECKON', 'WITH', 'CERTAINTY', 'ON', 'THE', 'ENJOYMENT', 'OF', 'A', 'PLEASURE', 'AGAIN', 'IT', 'SEEMS', 'AS', 'IF', 'SOME', 'FATALITY', 'STOOD', 'BETWEEN', 'YOU', 'AND', 'ME'] +3575-170457-0045-414: ref=['I', 'AM', 'NOT', 'GOOD', 'ENOUGH', 'FOR', 'YOU', 'AND', 'YOU', 'MUST', 'BE', 'KEPT', 'FROM', 'THE', 'CONTAMINATION', 'OF', 'TOO', 'INTIMATE', 'SOCIETY'] +3575-170457-0045-414: hyp=['I', 'AM', 'NOT', 'GOOD', 'ENOUGH', 'FOR', 'YOU', 'AND', 'YOU', 'MUST', 'BE', 'KEPT', 'FROM', 'THE', 'CONTAMINATION', 'OF', 'TWO', 'INTIMATE', 'SOCIETY'] +3575-170457-0046-415: ref=['A', 'GOOD', 'NEIGHBOUR', 'OF', 'THE', 'BRONTES', 'A', 'CLEVER', 'INTELLIGENT', 'YORKSHIRE', 'WOMAN', 'WHO', 'KEEPS', 'A', "DRUGGIST'S", 'SHOP', 'IN', 'HAWORTH', 'AND', 'FROM', 'HER', 'OCCUPATION', 'HER', 'EXPERIENCE', 'AND', 'EXCELLENT', 'SENSE', 'HOLDS', 'THE', 'POSITION', 'OF', 'VILLAGE', 'DOCTRESS', 'AND', 'NURSE', 'AND', 'AS', 'SUCH', 'HAS', 'BEEN', 'A', 'FRIEND', 'IN', 'MANY', 'A', 'TIME', 'OF', 'TRIAL', 'AND', 'SICKNESS', 'AND', 'DEATH', 'IN', 'THE', 'HOUSEHOLDS', 'ROUND', 'TOLD', 'ME', 'A', 'CHARACTERISTIC', 'LITTLE', 'INCIDENT', 'CONNECTED', 'WITH', "TABBY'S", 'FRACTURED', 'LEG'] +3575-170457-0046-415: hyp=['A', 'GOOD', 'NEIGHBOR', 'OF', 'THE', 'BRONTES', 'A', 'CLEVER', 'INTELLIGENT', 'YORKSHIRE', 'WOMAN', 'WHO', 'KEEPS', 'A', 'DRUGGIST', 'SHOP', 'IN', 'HAWORTH', 'FROM', 'HER', 'OCCUPATION', 'HER', 'EXPERIENCE', 'AND', 'EXCELLENT', 'SENSE', 'HOLDS', 'THE', 'POSITION', 'OF', 'VILLAGE', 'DOCTRIS', 'AND', 'NURSE', 'AND', 'AS', 'SUCH', 'HAS', 'BEEN', 'A', 'FRIEND', 'IN', 'MANY', 'A', 'TIME', 'OF', 'TRIAL', 'AND', 'SICKNESS', 'AND', 'DEATH', 'IN', 'THE', 'HOUSEHOLDS', 'ROUND', 'TOLD', 'ME', 'A', 'CHARACTERISTIC', 'LITTLE', 'INCIDENT', 'CONNECTED', 'WITH', "TABBY'S", 'FRACTURED', 'LEG'] +3575-170457-0047-416: ref=['TABBY', 'HAD', 'LIVED', 'WITH', 'THEM', 'FOR', 'TEN', 'OR', 'TWELVE', 'YEARS', 'AND', 'WAS', 'AS', 'CHARLOTTE', 'EXPRESSED', 'IT', 'ONE', 'OF', 'THE', 'FAMILY'] +3575-170457-0047-416: hyp=['TABBY', 'HAD', 'LIVED', 'WITH', 'THEM', 'FOR', 'TEN', 'OR', 'TWELVE', 'YEARS', 'AND', 'WAS', 'AS', 'CHARLOTTE', 'EXPRESSED', 'IT', 'ONE', 'OF', 'THE', 'FAMILY'] +3575-170457-0048-417: ref=['HE', 'REFUSED', 'AT', 'FIRST', 'TO', 'LISTEN', 'TO', 'THE', 'CAREFUL', 'ADVICE', 'IT', 'WAS', 'REPUGNANT', 'TO', 'HIS', 'LIBERAL', 'NATURE'] +3575-170457-0048-417: hyp=['HE', 'REFUSED', 'AT', 'FIRST', 'TO', 'LISTEN', 'TO', 'THE', 'CAREFUL', 'ADVICE', 'IT', 'WAS', 'REPUGNANT', 'TO', 'HIS', 'LIBERAL', 'NATURE'] +3575-170457-0049-418: ref=['THIS', 'DECISION', 'WAS', 'COMMUNICATED', 'TO', 'THE', 'GIRLS'] +3575-170457-0049-418: hyp=['THIS', 'DECISION', 'WAS', 'COMMUNICATED', 'TO', 'THE', 'GIRLS'] +3575-170457-0050-419: ref=['TABBY', 'HAD', 'TENDED', 'THEM', 'IN', 'THEIR', 'CHILDHOOD', 'THEY', 'AND', 'NONE', 'OTHER', 'SHOULD', 'TEND', 'HER', 'IN', 'HER', 'INFIRMITY', 'AND', 'AGE'] +3575-170457-0050-419: hyp=['TABBY', 'HAD', 'TENDED', 'THEM', 'IN', 'THEIR', 'CHILDHOOD', 'THEY', 'AND', 'NONE', 'OTHER', 'SHOULD', 'TEND', 'HER', 'IN', 'HER', 'INFIRMITY', 'IN', 'AGE'] +3575-170457-0051-420: ref=['AT', 'TEA', 'TIME', 'THEY', 'WERE', 'SAD', 'AND', 'SILENT', 'AND', 'THE', 'MEAL', 'WENT', 'AWAY', 'UNTOUCHED', 'BY', 'ANY', 'OF', 'THE', 'THREE'] +3575-170457-0051-420: hyp=['AT', 'TEA', 'TIME', 'THEY', 'WERE', 'SAD', 'AND', 'SILENT', 'AND', 'THE', 'MEAL', 'WENT', 'AWAY', 'UNTOUCHED', 'BY', 'ANY', 'OF', 'THE', 'THREE'] +3575-170457-0052-421: ref=['SHE', 'HAD', 'ANOTHER', 'WEIGHT', 'ON', 'HER', 'MIND', 'THIS', 'CHRISTMAS'] +3575-170457-0052-421: hyp=['SHE', 'HAD', 'ANOTHER', 'WEIGHT', 'ON', 'HER', 'MIND', 'THIS', 'CHRISTMAS'] +3575-170457-0053-422: ref=['BUT', 'ANNE', 'HAD', 'BEGUN', 'TO', 'SUFFER', 'JUST', 'BEFORE', 'THE', 'HOLIDAYS', 'AND', 'CHARLOTTE', 'WATCHED', 'OVER', 'HER', 'YOUNGER', 'SISTERS', 'WITH', 'THE', 'JEALOUS', 'VIGILANCE', 'OF', 'SOME', 'WILD', 'CREATURE', 'THAT', 'CHANGES', 'HER', 'VERY', 'NATURE', 'IF', 'DANGER', 'THREATENS', 'HER', 'YOUNG'] +3575-170457-0053-422: hyp=['BUT', 'ANNE', 'HAD', 'BEGUN', 'TO', 'SUFFER', 'JUST', 'BEFORE', 'THE', 'HOLIDAYS', 'AND', 'CHARLOTTE', 'WATCHED', 'OVER', 'HER', 'YOUNGER', 'SISTERS', 'WITH', 'A', 'JEALOUS', 'VIGILANCE', 'OF', 'SOME', 'WILD', 'CREATURE', 'THAT', 'CHANGES', 'HER', 'VERY', 'NATURE', 'IF', 'DANGER', 'THREATENS', 'HER', 'YOUNG'] +3575-170457-0054-423: ref=['STUNG', 'BY', 'ANXIETY', 'FOR', 'THIS', 'LITTLE', 'SISTER', 'SHE', 'UPBRAIDED', 'MISS', 'W', 'FOR', 'HER', 'FANCIED', 'INDIFFERENCE', 'TO', "ANNE'S", 'STATE', 'OF', 'HEALTH'] +3575-170457-0054-423: hyp=['STUNG', 'BY', 'ANXIETY', 'FOR', 'THIS', 'LITTLE', 'SISTER', 'SHE', 'UPBRAIDED', 'MISS', 'W', 'FOR', 'HER', 'FANCIED', 'INDIFFERENCE', 'TO', "ANNE'S", 'STATE', 'OF', 'HEALTH'] +3575-170457-0055-424: ref=['STILL', 'HER', 'HEART', 'HAD', 'RECEIVED', 'A', 'SHOCK', 'IN', 'THE', 'PERCEPTION', 'OF', "ANNE'S", 'DELICACY', 'AND', 'ALL', 'THESE', 'HOLIDAYS', 'SHE', 'WATCHED', 'OVER', 'HER', 'WITH', 'THE', 'LONGING', 'FOND', 'ANXIETY', 'WHICH', 'IS', 'SO', 'FULL', 'OF', 'SUDDEN', 'PANGS', 'OF', 'FEAR'] +3575-170457-0055-424: hyp=['STILL', 'HER', 'HEART', 'HAD', 'RECEIVED', 'A', 'SHOCK', 'IN', 'THE', 'PERCEPTION', 'OF', "ANNE'S", 'DELICACY', 'AND', 'ALL', 'THESE', 'HOLIDAYS', 'SHE', 'WATCHED', 'OVER', 'HER', 'WITH', 'THE', 'LONGING', 'FOND', 'ANXIETY', 'WHICH', 'IS', 'SO', 'FULL', 'OF', 'SUDDEN', 'PANGS', 'OF', 'FEAR'] +3575-170457-0056-425: ref=['I', 'DOUBT', 'WHETHER', 'BRANWELL', 'WAS', 'MAINTAINING', 'HIMSELF', 'AT', 'THIS', 'TIME'] +3575-170457-0056-425: hyp=['I', 'DOUBT', 'WHETHER', 'BRANWELL', 'WAS', 'MAINTAINING', 'HIMSELF', 'AT', 'THIS', 'TIME'] +3729-6852-0000-1660: ref=['TO', 'CELEBRATE', 'THE', 'ARRIVAL', 'OF', 'HER', 'SON', 'SILVIA', 'GAVE', 'A', 'SPLENDID', 'SUPPER', 'TO', 'WHICH', 'SHE', 'HAD', 'INVITED', 'ALL', 'HER', 'RELATIVES', 'AND', 'IT', 'WAS', 'A', 'GOOD', 'OPPORTUNITY', 'FOR', 'ME', 'TO', 'MAKE', 'THEIR', 'ACQUAINTANCE'] +3729-6852-0000-1660: hyp=['TO', 'CELEBRATE', 'THE', 'ARRIVAL', 'OF', 'HER', 'SON', 'SYLVIA', 'GAVE', 'A', 'SPLENDID', 'SUPPER', 'TO', 'WHICH', 'SHE', 'HAD', 'INVITED', 'ALL', 'HER', 'RELATIVES', 'AND', 'IT', 'WAS', 'A', 'GOOD', 'OPPORTUNITY', 'FOR', 'ME', 'TO', 'MAKE', 'THEIR', 'ACQUAINTANCE'] +3729-6852-0001-1661: ref=['WITHOUT', 'SAYING', 'IT', 'POSITIVELY', 'SHE', 'MADE', 'ME', 'UNDERSTAND', 'THAT', 'BEING', 'HERSELF', 'AN', 'ILLUSTRIOUS', 'MEMBER', 'OF', 'THE', 'REPUBLIC', 'OF', 'LETTERS', 'SHE', 'WAS', 'WELL', 'AWARE', 'THAT', 'SHE', 'WAS', 'SPEAKING', 'TO', 'AN', 'INSECT'] +3729-6852-0001-1661: hyp=['WITHOUT', 'SAYING', 'IT', 'POSITIVELY', 'SHE', 'MADE', 'ME', 'UNDERSTAND', 'THAT', 'BEING', 'HERSELF', 'AN', 'ILLUSTRIOUS', 'MEMBER', 'OF', 'THE', 'REPUBLIC', 'OF', 'LETTERS', 'SHE', 'WAS', 'WELL', 'AWARE', 'THAT', 'SHE', 'WAS', 'SPEAKING', 'TO', 'AN', 'INSECT'] +3729-6852-0002-1662: ref=['IN', 'ORDER', 'TO', 'PLEASE', 'HER', 'I', 'SPOKE', 'TO', 'HER', 'OF', 'THE', 'ABBE', 'CONTI', 'AND', 'I', 'HAD', 'OCCASION', 'TO', 'QUOTE', 'TWO', 'LINES', 'OF', 'THAT', 'PROFOUND', 'WRITER'] +3729-6852-0002-1662: hyp=['IN', 'ORDER', 'TO', 'PLEASE', 'HER', 'I', 'SPOKE', 'TO', 'HER', 'OF', 'THE', 'ABBEY', 'KANTI', 'AND', 'I', 'HAD', 'OCCASION', 'TO', 'QUOTE', 'TWO', 'LINES', 'OF', 'THAT', 'PROFOUND', 'WRITER'] +3729-6852-0003-1663: ref=['MADAM', 'CORRECTED', 'ME', 'WITH', 'A', 'PATRONIZING', 'AIR', 'FOR', 'MY', 'PRONUNCIATION', 'OF', 'THE', 'WORD', 'SCEVRA', 'WHICH', 'MEANS', 'DIVIDED', 'SAYING', 'THAT', 'IT', 'OUGHT', 'TO', 'BE', 'PRONOUNCED', 'SCEURA', 'AND', 'SHE', 'ADDED', 'THAT', 'I', 'OUGHT', 'TO', 'BE', 'VERY', 'GLAD', 'TO', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'ON', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'ARRIVAL', 'IN', 'PARIS', 'TELLING', 'ME', 'THAT', 'IT', 'WOULD', 'BE', 'AN', 'IMPORTANT', 'DAY', 'IN', 'MY', 'LIFE'] +3729-6852-0003-1663: hyp=['MADAME', 'CORRECTED', 'ME', 'WITH', 'A', 'PATRONIZING', 'AIR', 'FOR', 'MY', 'PRONUNCIATION', 'OF', 'THE', 'WORD', 'SCAVRA', 'WHICH', 'MEANS', 'DIVIDED', 'SAYING', 'THAT', 'IT', 'OUGHT', 'TO', 'BE', 'PRONOUNCED', 'SKURA', 'AND', 'SHE', 'ADDED', 'THAT', 'I', 'OUGHT', 'TO', 'BE', 'VERY', 'GLAD', 'TO', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'ON', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'ARRIVAL', 'IN', 'PARIS', 'TELLING', 'ME', 'THAT', 'IT', 'WOULD', 'BE', 'AN', 'IMPORTANT', 'DAY', 'IN', 'MY', 'LIFE'] +3729-6852-0004-1664: ref=['HER', 'FACE', 'WAS', 'AN', 'ENIGMA', 'FOR', 'IT', 'INSPIRED', 'EVERYONE', 'WITH', 'THE', 'WARMEST', 'SYMPATHY', 'AND', 'YET', 'IF', 'YOU', 'EXAMINED', 'IT', 'ATTENTIVELY', 'THERE', 'WAS', 'NOT', 'ONE', 'BEAUTIFUL', 'FEATURE', 'SHE', 'COULD', 'NOT', 'BE', 'CALLED', 'HANDSOME', 'BUT', 'NO', 'ONE', 'COULD', 'HAVE', 'THOUGHT', 'HER', 'UGLY'] +3729-6852-0004-1664: hyp=['HER', 'FACE', 'WAS', 'AN', 'ENIGMA', 'FOR', 'IT', 'INSPIRED', 'EVERY', 'ONE', 'WITH', 'THE', 'WARMEST', 'SYMPATHY', 'AND', 'YET', 'IF', 'YOU', 'EXAMINED', 'IT', 'ATTENTIVELY', 'THERE', 'WAS', 'NOT', 'ONE', 'BEAUTIFUL', 'FEATURE', 'SHE', 'COULD', 'NOT', 'BE', 'CALLED', 'HANDSOME', 'BUT', 'NO', 'ONE', 'COULD', 'HAVE', 'THOUGHT', 'HER', 'UGLY'] +3729-6852-0005-1665: ref=['SILVIA', 'WAS', 'THE', 'ADORATION', 'OF', 'FRANCE', 'AND', 'HER', 'TALENT', 'WAS', 'THE', 'REAL', 'SUPPORT', 'OF', 'ALL', 'THE', 'COMEDIES', 'WHICH', 'THE', 'GREATEST', 'AUTHORS', 'WROTE', 'FOR', 'HER', 'ESPECIALLY', 'OF', 'THE', 'PLAYS', 'OF', 'MARIVAUX', 'FOR', 'WITHOUT', 'HER', 'HIS', 'COMEDIES', 'WOULD', 'NEVER', 'HAVE', 'GONE', 'TO', 'POSTERITY'] +3729-6852-0005-1665: hyp=['SYLVIA', 'WAS', 'THE', 'ADORATION', 'OF', 'FRANCE', 'AND', 'HER', 'TALENT', 'WAS', 'THE', 'REAL', 'SUPPORT', 'OF', 'ALL', 'THE', 'COMEDIES', 'WHICH', 'THE', 'GREATEST', 'AUTHORS', 'WROTE', 'FOR', 'HER', 'ESPECIALLY', 'OF', 'THE', 'PLAYS', 'OF', 'MARY', 'VO', 'FOR', 'WITHOUT', 'HER', 'HIS', 'COMEDIES', 'WOULD', 'NEVER', 'HAVE', 'GONE', 'TO', 'POSTERITY'] +3729-6852-0006-1666: ref=['SILVIA', 'DID', 'NOT', 'THINK', 'THAT', 'HER', 'GOOD', 'CONDUCT', 'WAS', 'A', 'MERIT', 'FOR', 'SHE', 'KNEW', 'THAT', 'SHE', 'WAS', 'VIRTUOUS', 'ONLY', 'BECAUSE', 'HER', 'SELF', 'LOVE', 'COMPELLED', 'HER', 'TO', 'BE', 'SO', 'AND', 'SHE', 'NEVER', 'EXHIBITED', 'ANY', 'PRIDE', 'OR', 'ASSUMED', 'ANY', 'SUPERIORITY', 'TOWARDS', 'HER', 'THEATRICAL', 'SISTERS', 'ALTHOUGH', 'SATISFIED', 'TO', 'SHINE', 'BY', 'THEIR', 'TALENT', 'OR', 'THEIR', 'BEAUTY', 'THEY', 'CARED', 'LITTLE', 'ABOUT', 'RENDERING', 'THEMSELVES', 'CONSPICUOUS', 'BY', 'THEIR', 'VIRTUE'] +3729-6852-0006-1666: hyp=['SYLVIA', 'DID', 'NOT', 'THINK', 'THAT', 'HER', 'GOOD', 'CONDUCT', 'WAS', 'A', 'MERIT', 'FOR', 'SHE', 'KNEW', 'THAT', 'SHE', 'WAS', 'VIRTUOUS', 'ONLY', 'BECAUSE', 'HER', 'SELF', 'LOVE', 'COMPELLED', 'HER', 'TO', 'BE', 'SO', 'AND', 'SHE', 'NEVER', 'EXHIBITED', 'ANY', 'PRIDE', 'OR', 'ASSUMED', 'ANY', 'SUPERIORITY', 'TOWARDS', 'HER', 'THEATRICAL', 'SISTERS', 'ALTHOUGH', 'SATISFIED', 'TO', 'SHINE', 'BY', 'THEIR', 'TALENT', 'OR', 'THEIR', 'BEAUTY', 'THEY', 'CARED', 'LITTLE', 'ABOUT', 'RENDERING', 'THEMSELVES', 'CONSPICUOUS', 'BY', 'THEIR', 'VIRTUE'] +3729-6852-0007-1667: ref=['TWO', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'I', 'SAW', 'HER', 'PERFORM', 'THE', 'CHARACTER', 'OF', 'MARIANNE', 'IN', 'THE', 'COMEDY', 'OF', 'MARIVAUX', 'AND', 'IN', 'SPITE', 'OF', 'HER', 'AGE', 'AND', 'DECLINING', 'HEALTH', 'THE', 'ILLUSION', 'WAS', 'COMPLETE'] +3729-6852-0007-1667: hyp=['TWO', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'I', 'SAW', 'HER', 'PERFORM', 'THE', 'CHARACTER', 'OF', 'MARIANNE', 'IN', 'THE', 'COMEDY', 'OF', 'MARAVAUX', 'AND', 'IN', 'SPITE', 'OF', 'HER', 'AGE', 'AND', 'DECLINING', 'HEALTH', 'THE', 'ILLUSION', 'WAS', 'COMPLETE'] +3729-6852-0008-1668: ref=['SHE', 'WAS', 'HONOURABLY', 'BURIED', 'IN', 'THE', 'CHURCH', 'OF', 'SAINT', 'SAUVEUR', 'WITHOUT', 'THE', 'SLIGHTEST', 'OPPOSITION', 'FROM', 'THE', 'VENERABLE', 'PRIEST', 'WHO', 'FAR', 'FROM', 'SHARING', 'THE', 'ANTI', 'CHRISTAIN', 'INTOLERANCY', 'OF', 'THE', 'CLERGY', 'IN', 'GENERAL', 'SAID', 'THAT', 'HER', 'PROFESSION', 'AS', 'AN', 'ACTRESS', 'HAD', 'NOT', 'HINDERED', 'HER', 'FROM', 'BEING', 'A', 'GOOD', 'CHRISTIAN', 'AND', 'THAT', 'THE', 'EARTH', 'WAS', 'THE', 'COMMON', 'MOTHER', 'OF', 'ALL', 'HUMAN', 'BEINGS', 'AS', 'JESUS', 'CHRIST', 'HAD', 'BEEN', 'THE', 'SAVIOUR', 'OF', 'ALL', 'MANKIND'] +3729-6852-0008-1668: hyp=['SHE', 'WAS', 'HONORABLY', 'BURIED', 'IN', 'THE', 'CHURCH', 'OF', 'SAINT', 'SEVER', 'WITHOUT', 'THE', 'SLIGHTEST', 'OPPOSITION', 'FROM', 'THE', 'VENERABLE', 'PRIEST', 'WHO', 'FAR', 'FROM', 'SHARING', 'THE', 'ANTI', 'CHRISTIAN', 'INTOLERANCY', 'OF', 'THE', 'CLERGY', 'IN', 'GENERAL', 'SAID', 'THAT', 'HER', 'PROFESSION', 'AS', 'AN', 'ACTRESS', 'HAD', 'NOT', 'HINDERED', 'HER', 'FROM', 'BEING', 'A', 'GOOD', 'CHRISTIAN', 'AND', 'THAT', 'THE', 'EARTH', 'WAS', 'A', 'COMMON', 'MOTHER', 'OF', 'ALL', 'HUMAN', 'BEINGS', 'AS', 'JESUS', 'CHRIST', 'HAD', 'BEEN', 'THE', 'SAVIOUR', 'OF', 'ALL', 'MANKIND'] +3729-6852-0009-1669: ref=['YOU', 'WILL', 'FORGIVE', 'ME', 'DEAR', 'READER', 'IF', 'I', 'HAVE', 'MADE', 'YOU', 'ATTEND', 'THE', 'FUNERAL', 'OF', 'SILVIA', 'TEN', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'BELIEVE', 'ME', 'I', 'HAVE', 'NO', 'INTENTION', 'OF', 'PERFORMING', 'A', 'MIRACLE', 'YOU', 'MAY', 'CONSOLE', 'YOURSELF', 'WITH', 'THE', 'IDEA', 'THAT', 'I', 'SHALL', 'SPARE', 'YOU', 'THAT', 'UNPLEASANT', 'TASK', 'WHEN', 'POOR', 'SILVIA', 'DIES'] +3729-6852-0009-1669: hyp=['YOU', 'WILL', 'FORGIVE', 'ME', 'DEAR', 'READER', 'IF', 'I', 'HAVE', 'MADE', 'YOU', 'ATTEND', 'THE', 'FUNERAL', 'OF', 'SYLVIA', 'TEN', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'BELIEVE', 'ME', 'I', 'HAVE', 'NO', 'INTENTION', 'OF', 'PERFORMING', 'A', 'MIRACLE', 'YOU', 'MAY', 'CONSOLE', 'YOURSELF', 'WITH', 'THE', 'IDEA', 'THAT', 'I', 'SHALL', 'SPARE', 'YOU', 'THAT', 'UNPLEASANT', 'TASK', 'WHEN', 'POOR', 'SYLVIA', 'DIES'] +3729-6852-0010-1670: ref=['I', 'NEVER', 'HAD', 'ANY', 'FAMILY'] +3729-6852-0010-1670: hyp=['I', 'NEVER', 'HAD', 'ANY', 'FAMILY'] +3729-6852-0011-1671: ref=['I', 'HAD', 'A', 'NAME', 'I', 'BELIEVE', 'IN', 'MY', 'YOUNG', 'DAYS', 'BUT', 'I', 'HAVE', 'FORGOTTEN', 'IT', 'SINCE', 'I', 'HAVE', 'BEEN', 'IN', 'SERVICE'] +3729-6852-0011-1671: hyp=['I', 'HAD', 'A', 'NAME', 'I', 'BELIEVE', 'IN', 'MY', 'YOUNG', 'DAYS', 'BUT', 'I', 'HAVE', 'FORGOTTEN', 'IT', 'SINCE', 'I', 'HAVE', 'BEEN', 'IN', 'SERVICE'] +3729-6852-0012-1672: ref=['I', 'SHALL', 'CALL', 'YOU', 'ESPRIT'] +3729-6852-0012-1672: hyp=['I', 'SHALL', 'CALL', 'YOU', 'A', 'SPREE'] +3729-6852-0013-1673: ref=['YOU', 'DO', 'ME', 'A', 'GREAT', 'HONOUR'] +3729-6852-0013-1673: hyp=['YOU', 'DO', 'ME', 'A', 'GREAT', 'HONOUR'] +3729-6852-0014-1674: ref=['HERE', 'GO', 'AND', 'GET', 'ME', 'CHANGE', 'FOR', 'A', 'LOUIS', 'I', 'HAVE', 'IT', 'SIR'] +3729-6852-0014-1674: hyp=['HERE', 'GO', 'AND', 'GET', 'ME', 'CHANGE', 'FOR', 'A', 'LOUIS', 'I', 'HAVE', 'IT', 'SIR'] +3729-6852-0015-1675: ref=['AT', 'YOUR', 'SERVICE', 'SIR'] +3729-6852-0015-1675: hyp=['AT', 'YOUR', 'SERVICE', 'SIR'] +3729-6852-0016-1676: ref=['MADAME', 'QUINSON', 'BESIDES', 'CAN', 'ANSWER', 'YOUR', 'ENQUIRIES'] +3729-6852-0016-1676: hyp=['MADAME', 'QUINSON', 'BESIDES', 'CAN', 'ANSWER', 'YOUR', 'INQUIRIES'] +3729-6852-0017-1677: ref=['I', 'SEE', 'A', 'QUANTITY', 'OF', 'CHAIRS', 'FOR', 'HIRE', 'AT', 'THE', 'RATE', 'OF', 'ONE', 'SOU', 'MEN', 'READING', 'THE', 'NEWSPAPER', 'UNDER', 'THE', 'SHADE', 'OF', 'THE', 'TREES', 'GIRLS', 'AND', 'MEN', 'BREAKFASTING', 'EITHER', 'ALONE', 'OR', 'IN', 'COMPANY', 'WAITERS', 'WHO', 'WERE', 'RAPIDLY', 'GOING', 'UP', 'AND', 'DOWN', 'A', 'NARROW', 'STAIRCASE', 'HIDDEN', 'UNDER', 'THE', 'FOLIAGE'] +3729-6852-0017-1677: hyp=['I', 'SEE', 'A', 'QUANTITY', 'OF', 'CHAIRS', 'FOR', 'HIRE', 'AT', 'THE', 'RATE', 'OF', 'ONE', 'SOUS', 'MEN', 'READING', 'THE', 'NEWSPAPER', 'UNDER', 'THE', 'SHADE', 'OF', 'THE', 'TREES', 'GIRLS', 'AND', 'MEN', 'BREAKFASTING', 'EITHER', 'ALONE', 'OR', 'IN', 'COMPANY', 'WAITERS', 'WHO', 'WERE', 'RAPIDLY', 'GOING', 'UP', 'AND', 'DOWN', 'A', 'NARROW', 'STAIRCASE', 'HIDDEN', 'UNDER', 'THE', 'FOLIAGE'] +3729-6852-0018-1678: ref=['I', 'SIT', 'DOWN', 'AT', 'A', 'SMALL', 'TABLE', 'A', 'WAITER', 'COMES', 'IMMEDIATELY', 'TO', 'ENQUIRE', 'MY', 'WISHES'] +3729-6852-0018-1678: hyp=['I', 'SIT', 'DOWN', 'AT', 'A', 'SMALL', 'TABLE', 'A', 'WAITER', 'COMES', 'IMMEDIATELY', 'TO', 'INQUIRE', 'MY', 'WISHES'] +3729-6852-0019-1679: ref=['I', 'TELL', 'HIM', 'TO', 'GIVE', 'ME', 'SOME', 'COFFEE', 'IF', 'IT', 'IS', 'GOOD'] +3729-6852-0019-1679: hyp=['I', 'TELL', 'HIM', 'TO', 'GIVE', 'ME', 'SOME', 'COFFEE', 'IF', 'IT', 'IS', 'GOOD'] +3729-6852-0020-1680: ref=['THEN', 'TURNING', 'TOWARDS', 'ME', 'HE', 'SAYS', 'THAT', 'I', 'LOOK', 'LIKE', 'A', 'FOREIGNER', 'AND', 'WHEN', 'I', 'SAY', 'THAT', 'I', 'AM', 'AN', 'ITALIAN', 'HE', 'BEGINS', 'TO', 'SPEAK', 'TO', 'ME', 'OF', 'THE', 'COURT', 'OF', 'THE', 'CITY', 'OF', 'THE', 'THEATRES', 'AND', 'AT', 'LAST', 'HE', 'OFFERS', 'TO', 'ACCOMPANY', 'ME', 'EVERYWHERE'] +3729-6852-0020-1680: hyp=['THEN', 'TURNING', 'TOWARDS', 'ME', 'HE', 'SAYS', 'THAT', 'I', 'LOOK', 'LIKE', 'A', 'FOREIGNER', 'AND', 'WHEN', 'I', 'SAY', 'THAT', 'I', 'AM', 'AN', 'ITALIAN', 'HE', 'BEGINS', 'TO', 'SPEAK', 'TO', 'ME', 'OF', 'THE', 'COURT', 'THE', 'CITY', 'OF', 'THE', 'THEATRES', 'AND', 'AT', 'LAST', 'HE', 'OFFERS', 'TO', 'ACCOMPANY', 'ME', 'EVERYWHERE'] +3729-6852-0021-1681: ref=['I', 'THANK', 'HIM', 'AND', 'TAKE', 'MY', 'LEAVE'] +3729-6852-0021-1681: hyp=['I', 'THANK', 'HIM', 'AND', 'TAKE', 'MY', 'LEAVE'] +3729-6852-0022-1682: ref=['I', 'ADDRESS', 'HIM', 'IN', 'ITALIAN', 'AND', 'HE', 'ANSWERS', 'VERY', 'WITTILY', 'BUT', 'HIS', 'WAY', 'OF', 'SPEAKING', 'MAKES', 'ME', 'SMILE', 'AND', 'I', 'TELL', 'HIM', 'WHY'] +3729-6852-0022-1682: hyp=['I', 'ADDRESS', 'HIM', 'IN', 'ITALIAN', 'AND', 'HE', 'ANSWERS', 'VERY', 'WITTILY', 'BUT', 'HIS', 'WAY', 'OF', 'SPEAKING', 'MAKES', 'ME', 'SMILE', 'AND', 'I', 'TELL', 'HIM', 'WHY'] +3729-6852-0023-1683: ref=['MY', 'REMARK', 'PLEASES', 'HIM', 'BUT', 'I', 'SOON', 'PROVE', 'TO', 'HIM', 'THAT', 'IT', 'IS', 'NOT', 'THE', 'RIGHT', 'WAY', 'TO', 'SPEAK', 'HOWEVER', 'PERFECT', 'MAY', 'HAVE', 'BEEN', 'THE', 'LANGUAGE', 'OF', 'THAT', 'ANCIENT', 'WRITER'] +3729-6852-0023-1683: hyp=['MY', 'REMARK', 'PLEASES', 'HIM', 'BUT', 'I', 'SOON', 'PROVE', 'TO', 'HIM', 'THAT', 'IT', 'IS', 'NOT', 'THE', 'RIGHT', 'WAY', 'TO', 'SPEAK', 'HOWEVER', 'PERFECT', 'MAY', 'HAVE', 'BEEN', 'THE', 'LANGUAGE', 'OF', 'THAT', 'ANCIENT', 'WRITER'] +3729-6852-0024-1684: ref=['I', 'SEE', 'A', 'CROWD', 'IN', 'ONE', 'CORNER', 'OF', 'THE', 'GARDEN', 'EVERYBODY', 'STANDING', 'STILL', 'AND', 'LOOKING', 'UP'] +3729-6852-0024-1684: hyp=['I', 'SEE', 'A', 'CROWD', 'IN', 'ONE', 'CORNER', 'OF', 'THE', 'GARDEN', 'EVERYBODY', 'STANDING', 'STILL', 'AND', 'LOOKING', 'UP'] +3729-6852-0025-1685: ref=['IS', 'THERE', 'NOT', 'A', 'MERIDIAN', 'EVERYWHERE'] +3729-6852-0025-1685: hyp=['IS', 'THERE', 'NOT', 'A', 'MERIDIAN', 'EVERYWHERE'] +3729-6852-0026-1686: ref=['YES', 'BUT', 'THE', 'MERIDIAN', 'OF', 'THE', 'PALAIS', 'ROYAL', 'IS', 'THE', 'MOST', 'EXACT'] +3729-6852-0026-1686: hyp=['YES', 'BUT', 'THE', 'MERIDIAN', 'OF', 'THE', 'PALAIS', 'ROYAL', 'IS', 'THE', 'MOST', 'EXACT'] +3729-6852-0027-1687: ref=['THAT', 'IS', 'TRUE', 'BADAUDERIE'] +3729-6852-0027-1687: hyp=['THAT', 'IS', 'TRUE', "BADR'D", 'GREE'] +3729-6852-0028-1688: ref=['ALL', 'THESE', 'HONEST', 'PERSONS', 'ARE', 'WAITING', 'THEIR', 'TURN', 'TO', 'GET', 'THEIR', 'SNUFF', 'BOXES', 'FILLED'] +3729-6852-0028-1688: hyp=['ALL', 'THESE', 'HONEST', 'PERSONS', 'ARE', 'WAITING', 'THEIR', 'TURN', 'TO', 'GET', 'THEIR', 'SNUFF', 'BOXES', 'FILLED'] +3729-6852-0029-1689: ref=['IT', 'IS', 'SOLD', 'EVERYWHERE', 'BUT', 'FOR', 'THE', 'LAST', 'THREE', 'WEEKS', 'NOBODY', 'WILL', 'USE', 'ANY', 'SNUFF', 'BUT', 'THAT', 'SOLD', 'AT', 'THE', 'CIVET', 'CAT'] +3729-6852-0029-1689: hyp=['IT', 'IS', 'SOLD', 'EVERYWHERE', 'BUT', 'FOR', 'THE', 'LAST', 'THREE', 'WEEKS', 'NOBODY', 'WILL', 'USE', 'ANY', 'SNUFF', 'BUT', 'THAT', 'SOLD', 'AT', 'THE', 'SAVEETTE', 'CAT'] +3729-6852-0030-1690: ref=['IS', 'IT', 'BETTER', 'THAN', 'ANYWHERE', 'ELSE'] +3729-6852-0030-1690: hyp=['IS', 'IT', 'BETTER', 'THAN', 'ANYWHERE', 'ELSE'] +3729-6852-0031-1691: ref=['BUT', 'HOW', 'DID', 'SHE', 'MANAGE', 'TO', 'RENDER', 'IT', 'SO', 'FASHIONABLE'] +3729-6852-0031-1691: hyp=['BUT', 'HOW', 'DID', 'SHE', 'MANAGE', 'TO', 'RENDER', 'IT', 'SO', 'FASHIONABLE'] +3729-6852-0032-1692: ref=['SIMPLY', 'BY', 'STOPPING', 'HER', 'CARRIAGE', 'TWO', 'OR', 'THREE', 'TIMES', 'BEFORE', 'THE', 'SHOP', 'TO', 'HAVE', 'HER', 'SNUFF', 'BOX', 'FILLED', 'AND', 'BY', 'SAYING', 'ALOUD', 'TO', 'THE', 'YOUNG', 'GIRL', 'WHO', 'HANDED', 'BACK', 'THE', 'BOX', 'THAT', 'HER', 'SNUFF', 'WAS', 'THE', 'VERY', 'BEST', 'IN', 'PARIS'] +3729-6852-0032-1692: hyp=['SIMPLY', 'BY', 'STOPPING', 'HER', 'CARRIAGE', 'TWO', 'OR', 'THREE', 'TIMES', 'BEFORE', 'THE', 'SHOP', 'TO', 'HAVE', 'HER', 'SNUFF', 'BOX', 'FILLED', 'AND', 'BY', 'SAYING', 'ALOUD', 'TO', 'THE', 'YOUNG', 'GIRL', 'WHO', 'HANDED', 'BACK', 'THE', 'BOX', 'THAT', 'HER', 'SNUFF', 'WAS', 'THE', 'VERY', 'BEST', 'IN', 'PARIS'] +3729-6852-0033-1693: ref=['YOU', 'ARE', 'NOW', 'IN', 'THE', 'ONLY', 'COUNTRY', 'IN', 'THE', 'WORLD', 'WHERE', 'WIT', 'CAN', 'MAKE', 'A', 'FORTUNE', 'BY', 'SELLING', 'EITHER', 'A', 'GENUINE', 'OR', 'A', 'FALSE', 'ARTICLE', 'IN', 'THE', 'FIRST', 'CASE', 'IT', 'RECEIVES', 'THE', 'WELCOME', 'OF', 'INTELLIGENT', 'AND', 'TALENTED', 'PEOPLE', 'AND', 'IN', 'THE', 'SECOND', 'FOOLS', 'ARE', 'ALWAYS', 'READY', 'TO', 'REWARD', 'IT', 'FOR', 'SILLINESS', 'IS', 'TRULY', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'PEOPLE', 'HERE', 'AND', 'HOWEVER', 'WONDERFUL', 'IT', 'MAY', 'APPEAR', 'SILLINESS', 'IS', 'THE', 'DAUGHTER', 'OF', 'WIT'] +3729-6852-0033-1693: hyp=['YOU', 'ARE', 'NOW', 'IN', 'THE', 'ONLY', 'COUNTRY', 'IN', 'THE', 'WORLD', 'WHERE', 'WIT', 'CAN', 'MAKE', 'A', 'FORTUNE', 'BY', 'SELLING', 'EITHER', 'A', 'GENUINE', 'OR', 'A', 'FALSE', 'ARTICLE', 'IN', 'THE', 'FIRST', 'CASE', 'IT', 'RECEIVES', 'THE', 'WELCOME', 'OF', 'INTELLIGENT', 'AND', 'TALENTED', 'PEOPLE', 'AND', 'IN', 'THE', 'SECOND', 'FOOLS', 'ARE', 'ALWAYS', 'READY', 'TO', 'REWARD', 'IT', 'FOR', 'SILLINESS', 'IS', 'TRULY', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'PEOPLE', 'HERE', 'AND', 'HOWEVER', 'WONDERFUL', 'IT', 'MAY', 'APPEAR', 'SILLINESS', 'IS', 'THE', 'DAUGHTER', 'OF', 'WIT'] +3729-6852-0034-1694: ref=['LET', 'A', 'MAN', 'RUN', 'AND', 'EVERYBODY', 'WILL', 'RUN', 'AFTER', 'HIM', 'THE', 'CROWD', 'WILL', 'NOT', 'STOP', 'UNLESS', 'THE', 'MAN', 'IS', 'PROVED', 'TO', 'BE', 'MAD', 'BUT', 'TO', 'PROVE', 'IT', 'IS', 'INDEED', 'A', 'DIFFICULT', 'TASK', 'BECAUSE', 'WE', 'HAVE', 'A', 'CROWD', 'OF', 'MEN', 'WHO', 'MAD', 'FROM', 'THEIR', 'BIRTH', 'ARE', 'STILL', 'CONSIDERED', 'WISE'] +3729-6852-0034-1694: hyp=['LET', 'A', 'MAN', 'RUN', 'AND', 'EVERYBODY', 'WILL', 'RUN', 'AFTER', 'HIM', 'THE', 'CROWD', 'WILL', 'NOT', 'STOP', 'UNLESS', 'THE', 'MAN', 'IS', 'PROVED', 'TO', 'BE', 'MAD', 'BUT', 'TO', 'PROVE', 'IT', 'IS', 'INDEED', 'A', 'DIFFICULT', 'TASK', 'BECAUSE', 'WE', 'HAVE', 'A', 'CROWD', 'OF', 'MEN', 'WHO', 'MAD', 'FROM', 'THEIR', 'BIRTH', 'ARE', 'STILL', 'CONSIDERED', 'WISE'] +3729-6852-0035-1695: ref=['IT', 'SEEMS', 'TO', 'ME', 'I', 'REPLIED', 'THAT', 'SUCH', 'APPROVAL', 'SUCH', 'RATIFICATION', 'OF', 'THE', 'OPINION', 'EXPRESSED', 'BY', 'THE', 'KING', 'THE', 'PRINCES', 'OF', 'THE', 'BLOOD', 'ET', 'CETERA', 'IS', 'RATHER', 'A', 'PROOF', 'OF', 'THE', 'AFFECTION', 'FELT', 'FOR', 'THEM', 'BY', 'THE', 'NATION', 'FOR', 'THE', 'FRENCH', 'CARRY', 'THAT', 'AFFECTION', 'TO', 'SUCH', 'AN', 'EXTENT', 'THAT', 'THEY', 'BELIEVE', 'THEM', 'INFALLIBLE'] +3729-6852-0035-1695: hyp=['IT', 'SEEMS', 'TO', 'ME', 'I', 'REPLIED', 'THAT', 'SUCH', 'APPROVAL', 'SUCH', 'RATIFICATION', 'OF', 'THE', 'OPINION', 'EXPRESSED', 'BY', 'THE', 'KING', 'THE', 'PRINCES', 'OF', 'THE', 'BLOOD', 'ET', 'CETERA', 'IS', 'RATHER', 'A', 'PROOF', 'OF', 'THE', 'AFFECTION', 'FELT', 'FOR', 'THEM', 'BY', 'THE', 'NATION', 'FOR', 'THE', 'FRENCH', 'CARRY', 'THAT', 'AFFECTION', 'TO', 'SUCH', 'AN', 'EXTENT', 'THAT', 'THEY', 'BELIEVE', 'THEM', 'INFALLIBLE'] +3729-6852-0036-1696: ref=['WHEN', 'THE', 'KING', 'COMES', 'TO', 'PARIS', 'EVERYBODY', 'CALLS', 'OUT', 'VIVE', 'LE', 'ROI'] +3729-6852-0036-1696: hyp=['WHEN', 'THE', 'KING', 'COMES', 'TO', 'PARIS', 'EVERYBODY', 'CALLS', 'OUT', 'VIVE', 'LAURY'] +3729-6852-0037-1697: ref=['SHE', 'INTRODUCED', 'ME', 'TO', 'ALL', 'HER', 'GUESTS', 'AND', 'GAVE', 'ME', 'SOME', 'PARTICULARS', 'RESPECTING', 'EVERY', 'ONE', 'OF', 'THEM'] +3729-6852-0037-1697: hyp=['SHE', 'INTRODUCED', 'ME', 'TO', 'ALL', 'HER', 'GUESTS', 'AND', 'GAVE', 'ME', 'SOME', 'PARTICULARS', 'RESPECTING', 'EVERY', 'ONE', 'OF', 'THEM'] +3729-6852-0038-1698: ref=['WHAT', 'SIR', 'I', 'SAID', 'TO', 'HIM', 'AM', 'I', 'FORTUNATE', 'ENOUGH', 'TO', 'SEE', 'YOU'] +3729-6852-0038-1698: hyp=['WHAT', 'SIR', 'I', 'SAID', 'TO', 'HIM', 'AM', 'I', 'FORTUNATE', 'ENOUGH', 'TO', 'SEE', 'YOU'] +3729-6852-0039-1699: ref=['HE', 'HIMSELF', 'RECITED', 'THE', 'SAME', 'PASSAGE', 'IN', 'FRENCH', 'AND', 'POLITELY', 'POINTED', 'OUT', 'THE', 'PARTS', 'IN', 'WHICH', 'HE', 'THOUGHT', 'THAT', 'I', 'HAD', 'IMPROVED', 'ON', 'THE', 'ORIGINAL'] +3729-6852-0039-1699: hyp=['HE', 'HIMSELF', 'RECITED', 'THE', 'SAME', 'PASSAGE', 'IN', 'FRENCH', 'AND', 'POLITELY', 'POINTED', 'OUT', 'THE', 'PARTS', 'IN', 'WHICH', 'HE', 'THOUGHT', 'THAT', 'I', 'HAD', 'IMPROVED', 'ON', 'THE', 'ORIGINAL'] +3729-6852-0040-1700: ref=['FOR', 'THE', 'FIRST', 'DAY', 'SIR', 'I', 'THINK', 'THAT', 'WHAT', 'YOU', 'HAVE', 'DONE', 'GIVES', 'GREAT', 'HOPES', 'OF', 'YOU', 'AND', 'WITHOUT', 'ANY', 'DOUBT', 'YOU', 'WILL', 'MAKE', 'RAPID', 'PROGRESS'] +3729-6852-0040-1700: hyp=['FOR', 'THE', 'FIRST', 'DAY', 'SIR', 'I', 'THINK', 'THAT', 'WHAT', 'YOU', 'HAVE', 'DONE', 'GIVES', 'GREAT', 'HOPES', 'OF', 'YOU', 'AND', 'WITHOUT', 'ANY', 'DOUBT', 'YOU', 'WILL', 'MAKE', 'RAPID', 'PROGRESS'] +3729-6852-0041-1701: ref=['I', 'BELIEVE', 'IT', 'SIR', 'AND', 'THAT', 'IS', 'WHAT', 'I', 'FEAR', 'THEREFORE', 'THE', 'PRINCIPAL', 'OBJECT', 'OF', 'MY', 'VISIT', 'HERE', 'IS', 'TO', 'DEVOTE', 'MYSELF', 'ENTIRELY', 'TO', 'THE', 'STUDY', 'OF', 'THE', 'FRENCH', 'LANGUAGE'] +3729-6852-0041-1701: hyp=['I', 'BELIEVE', 'IT', 'SIR', 'AND', 'THAT', 'IS', 'WHAT', 'I', 'FEAR', 'THEREFORE', 'THE', 'PRINCIPAL', 'OBJECT', 'OF', 'MY', 'VISIT', 'HERE', 'IS', 'TO', 'DEVOTE', 'MYSELF', 'ENTIRELY', 'TO', 'THE', 'STUDY', 'OF', 'THE', 'FRENCH', 'LANGUAGE'] +3729-6852-0042-1702: ref=['I', 'AM', 'A', 'VERY', 'UNPLEASANT', 'PUPIL', 'ALWAYS', 'ASKING', 'QUESTIONS', 'CURIOUS', 'TROUBLESOME', 'INSATIABLE', 'AND', 'EVEN', 'SUPPOSING', 'THAT', 'I', 'COULD', 'MEET', 'WITH', 'THE', 'TEACHER', 'I', 'REQUIRE', 'I', 'AM', 'AFRAID', 'I', 'AM', 'NOT', 'RICH', 'ENOUGH', 'TO', 'PAY', 'HIM'] +3729-6852-0042-1702: hyp=['I', 'AM', 'A', 'VERY', 'UNPLEASANT', 'PUPIL', 'ALWAYS', 'ASKING', 'QUESTIONS', 'CURIOUS', 'TROUBLESOME', 'INSATIABLE', 'AND', 'EVEN', 'SUPPOSING', 'THAT', 'I', 'COULD', 'MEET', 'WITH', 'THE', 'TEACHER', 'I', 'REQUIRE', 'I', 'AM', 'AFRAID', 'I', 'AM', 'NOT', 'RICH', 'ENOUGH', 'TO', 'PAY', 'HIM'] +3729-6852-0043-1703: ref=['I', 'RESIDE', 'IN', 'THE', 'MARAIS', 'RUE', 'DE', 'DOUZE', 'PORTES'] +3729-6852-0043-1703: hyp=['I', 'RESIDE', 'IN', 'THE', 'MARAY', 'GRUE', 'DE', 'DUSPORT'] +3729-6852-0044-1704: ref=['I', 'WILL', 'MAKE', 'YOU', 'TRANSLATE', 'THEM', 'INTO', 'FRENCH', 'AND', 'YOU', 'NEED', 'NOT', 'BE', 'AFRAID', 'OF', 'MY', 'FINDING', 'YOU', 'INSATIABLE'] +3729-6852-0044-1704: hyp=['I', 'WILL', 'MAKE', 'YOU', 'TRANSLATE', 'THEM', 'INTO', 'FRENCH', 'AND', 'YOU', 'NEED', 'NOT', 'BE', 'AFRAID', 'OF', 'MY', 'FINDING', 'YOU', 'INSATIABLE'] +3729-6852-0045-1705: ref=['HE', 'HAD', 'A', 'GOOD', 'APPETITE', 'COULD', 'TELL', 'A', 'GOOD', 'STORY', 'WITHOUT', 'LAUGHING', 'WAS', 'CELEBRATED', 'FOR', 'HIS', 'WITTY', 'REPARTEES', 'AND', 'HIS', 'SOCIABLE', 'MANNERS', 'BUT', 'HE', 'SPENT', 'HIS', 'LIFE', 'AT', 'HOME', 'SELDOM', 'GOING', 'OUT', 'AND', 'SEEING', 'HARDLY', 'ANYONE', 'BECAUSE', 'HE', 'ALWAYS', 'HAD', 'A', 'PIPE', 'IN', 'HIS', 'MOUTH', 'AND', 'WAS', 'SURROUNDED', 'BY', 'AT', 'LEAST', 'TWENTY', 'CATS', 'WITH', 'WHICH', 'HE', 'WOULD', 'AMUSE', 'HIMSELF', 'ALL', 'DAY'] +3729-6852-0045-1705: hyp=['HE', 'HAD', 'A', 'GOOD', 'APPETITE', 'COULD', 'TELL', 'A', 'GOOD', 'STORY', 'WITHOUT', 'LAUGHING', 'WITH', 'CELEBRATED', 'FOR', 'HIS', 'WITTY', 'REPARTEES', 'AND', 'HIS', 'SOCIABLE', 'MANNERS', 'BUT', 'HE', 'SPENT', 'HIS', 'LIFE', 'AT', 'HOME', 'SELDOM', 'GOING', 'OUT', 'AND', 'SEEING', 'HARDLY', 'ANY', 'ONE', 'BECAUSE', 'HE', 'ALWAYS', 'HAD', 'A', 'PIPE', 'IN', 'HIS', 'MOUTH', 'AND', 'WAS', 'SURROUNDED', 'BY', 'AT', 'LEAST', 'TWENTY', 'CATS', 'WITH', 'WHICH', 'HE', 'WOULD', 'AMUSE', 'HIMSELF', 'ALL', 'DAY'] +3729-6852-0046-1706: ref=['HIS', 'HOUSEKEEPER', 'HAD', 'THE', 'MANAGEMENT', 'OF', 'EVERYTHING', 'SHE', 'NEVER', 'ALLOWED', 'HIM', 'TO', 'BE', 'IN', 'NEED', 'OF', 'ANYTHING', 'AND', 'SHE', 'GAVE', 'NO', 'ACCOUNT', 'OF', 'HIS', 'MONEY', 'WHICH', 'SHE', 'KEPT', 'ALTOGETHER', 'BECAUSE', 'HE', 'NEVER', 'ASKED', 'HER', 'TO', 'RENDER', 'ANY', 'ACCOUNTS'] +3729-6852-0046-1706: hyp=['HIS', 'HOUSEKEEPER', 'HAD', 'THE', 'MANAGEMENT', 'OF', 'EVERYTHING', 'SHE', 'NEVER', 'ALLOWED', 'HIM', 'TO', 'BE', 'IN', 'NEED', 'OF', 'ANYTHING', 'AND', 'SHE', 'GAVE', 'NO', 'ACCOUNT', 'OF', 'HIS', 'MONEY', 'WHICH', 'SHE', 'KEPT', 'ALTOGETHER', 'BECAUSE', 'HE', 'NEVER', 'ASKED', 'HER', 'TO', 'RENDER', 'ANY', 'ACCOUNTS'] +4077-13751-0000-1258: ref=['ON', 'THE', 'SIXTH', 'OF', 'APRIL', 'EIGHTEEN', 'THIRTY', 'THE', 'CHURCH', 'OF', 'JESUS', 'CHRIST', 'OF', 'LATTER', 'DAY', 'SAINTS', 'WAS', 'FORMALLY', 'ORGANIZED', 'AND', 'THUS', 'TOOK', 'ON', 'A', 'LEGAL', 'EXISTENCE'] +4077-13751-0000-1258: hyp=['ON', 'THE', 'SIXTH', 'OF', 'APRIL', 'EIGHTEEN', 'THIRTY', 'THE', 'CHURCH', 'OF', 'JESUS', 'CHRIST', 'OF', 'LATTER', 'DAY', 'SAINTS', 'WAS', 'FORMERLY', 'ORGANIZED', 'AND', 'THUS', 'TOOK', 'ON', 'A', 'LEGAL', 'EXISTENCE'] +4077-13751-0001-1259: ref=['ITS', 'ORIGIN', 'WAS', 'SMALL', 'A', 'GERM', 'AN', 'INSIGNIFICANT', 'SEED', 'HARDLY', 'TO', 'BE', 'THOUGHT', 'OF', 'AS', 'LIKELY', 'TO', 'AROUSE', 'OPPOSITION'] +4077-13751-0001-1259: hyp=['ITS', 'ORIGIN', 'WAS', 'SMALL', 'A', 'GERM', 'AN', 'INSIGNIFICANT', 'SEED', 'HARDLY', 'TO', 'BE', 'THOUGHT', 'OF', 'AS', 'LIKELY', 'TO', 'AROUSE', 'OPPOSITION'] +4077-13751-0002-1260: ref=['INSTEAD', 'OF', 'BUT', 'SIX', 'REGULARLY', 'AFFILIATED', 'MEMBERS', 'AND', 'AT', 'MOST', 'TWO', 'SCORE', 'OF', 'ADHERENTS', 'THE', 'ORGANIZATION', 'NUMBERS', 'TODAY', 'MANY', 'HUNDRED', 'THOUSAND', 'SOULS'] +4077-13751-0002-1260: hyp=['INSTEAD', 'OF', 'BUT', 'SIX', 'REGULARLY', 'AFFILIATED', 'MEMBERS', 'AND', 'AT', 'MOST', 'TWO', 'SCORE', 'OF', 'ADHERENTS', 'THE', 'ORGANIZATION', 'NUMBERS', 'TO', 'DAY', 'MANY', 'HUNDRED', 'THOUSAND', 'SOULS'] +4077-13751-0003-1261: ref=['IN', 'PLACE', 'OF', 'A', 'SINGLE', 'HAMLET', 'IN', 'THE', 'SMALLEST', 'CORNER', 'OF', 'WHICH', 'THE', 'MEMBERS', 'COULD', 'HAVE', 'CONGREGATED', 'THERE', 'NOW', 'ARE', 'ABOUT', 'SEVENTY', 'STAKES', 'OF', 'ZION', 'AND', 'ABOUT', 'SEVEN', 'HUNDRED', 'ORGANIZED', 'WARDS', 'EACH', 'WARD', 'AND', 'STAKE', 'WITH', 'ITS', 'FULL', 'COMPLEMENT', 'OF', 'OFFICERS', 'AND', 'PRIESTHOOD', 'ORGANIZATIONS'] +4077-13751-0003-1261: hyp=['IN', 'PLACE', 'HAVE', 'A', 'SINGLE', 'HAMLET', 'IN', 'THE', 'SMALLEST', 'CORNER', 'OF', 'WHICH', 'THE', 'MEMBERS', 'COULD', 'HAVE', 'CONGREGATED', 'THERE', 'NOW', 'ARE', 'ABOUT', 'SEVENTY', 'STAKES', 'OF', 'ZION', 'AND', 'ABOUT', 'SEVEN', 'HUNDRED', 'ORGANIZED', 'WARDS', 'EACH', 'WARD', 'AND', 'STAKE', 'WITH', 'ITS', 'FULL', 'COMPLEMENT', 'OF', 'OFFICERS', 'AND', 'PRIESTHOOD', 'ORGANIZATIONS'] +4077-13751-0004-1262: ref=['THE', 'PRACTISE', 'OF', 'GATHERING', 'ITS', 'PROSELYTES', 'INTO', 'ONE', 'PLACE', 'PREVENTS', 'THE', 'BUILDING', 'UP', 'AND', 'STRENGTHENING', 'OF', 'FOREIGN', 'BRANCHES', 'AND', 'INASMUCH', 'AS', 'EXTENSIVE', 'AND', 'STRONG', 'ORGANIZATIONS', 'ARE', 'SELDOM', 'MET', 'WITH', 'ABROAD', 'VERY', 'ERRONEOUS', 'IDEAS', 'EXIST', 'CONCERNING', 'THE', 'STRENGTH', 'OF', 'THE', 'CHURCH'] +4077-13751-0004-1262: hyp=['THE', 'PRACTICE', 'OF', 'GATHERING', 'ITS', 'PROSELYTES', 'INTO', 'ONE', 'PLACE', 'PREVENTS', 'THE', 'BUILDING', 'UP', 'AND', 'STRENGTHENING', 'OF', 'FOREIGN', 'BRANCHES', 'AND', 'INASMUCH', 'AS', 'EXTENSIVE', 'AND', 'STRONG', 'ORGANIZATIONS', 'ARE', 'SELDOM', 'MET', 'WITH', 'ABROAD', 'VERY', 'ERRONEOUS', 'IDEAS', 'EXIST', 'CONCERNING', 'THE', 'STRENGTH', 'OF', 'THE', 'CHURCH'] +4077-13751-0005-1263: ref=['NEVERTHELESS', 'THE', 'MUSTARD', 'SEED', 'AMONG', 'THE', 'SMALLEST', 'OF', 'ALL', 'SEEDS', 'HAS', 'ATTAINED', 'THE', 'PROPORTIONS', 'OF', 'A', 'TREE', 'AND', 'THE', 'BIRDS', 'OF', 'THE', 'AIR', 'ARE', 'NESTING', 'IN', 'ITS', 'BRANCHES', 'THE', 'ACORN', 'IS', 'NOW', 'AN', 'OAK', 'OFFERING', 'PROTECTION', 'AND', 'THE', 'SWEETS', 'OF', 'SATISFACTION', 'TO', 'EVERY', 'EARNEST', 'PILGRIM', 'JOURNEYING', 'ITS', 'WAY', 'FOR', 'TRUTH'] +4077-13751-0005-1263: hyp=['NEVERTHELESS', 'THE', 'MUSTARD', 'SEED', 'AMONG', 'THE', 'SMALLEST', 'OF', 'ALL', 'SEATS', 'HAS', 'ATTAINED', 'THAT', 'PROPORTIONS', 'OF', 'A', 'TREE', 'AND', 'THE', 'BIRDS', 'OF', 'THE', 'AIR', 'ARE', 'NESTING', 'IN', 'ITS', 'BRANCHES', 'THE', 'ACORN', 'IS', 'NOW', 'IN', 'OAK', 'OFFERING', 'PROTECTION', 'AND', 'THE', 'SWEETS', 'OF', 'SATISFACTION', 'TO', 'EVERY', 'EARNEST', 'PILGRIM', 'JOURNEYING', 'ITS', 'WAY', 'FIR', 'TRUTH'] +4077-13751-0006-1264: ref=['THEIR', 'EYES', 'WERE', 'FROM', 'THE', 'FIRST', 'TURNED', 'IN', 'ANTICIPATION', 'TOWARD', 'THE', 'EVENING', 'SUN', 'NOT', 'MERELY', 'THAT', 'THE', 'WORK', 'OF', 'PROSELYTING', 'SHOULD', 'BE', 'CARRIED', 'ON', 'IN', 'THE', 'WEST', 'BUT', 'THAT', 'THE', 'HEADQUARTERS', 'OF', 'THE', 'CHURCH', 'SHOULD', 'BE', 'THERE', 'ESTABLISHED'] +4077-13751-0006-1264: hyp=['THEIR', 'EYES', 'WERE', 'FROM', 'THE', 'FIRST', 'TURNED', 'IN', 'ANTICIPATION', 'TOWARD', 'THE', 'EVENING', 'SUN', 'NOT', 'MERELY', 'THAT', 'THE', 'WORK', 'OF', 'PROSELY', 'SHOULD', 'BE', 'CARRIED', 'ON', 'IN', 'THE', 'WEST', 'BUT', 'THAT', 'THE', 'HEADQUARTERS', 'OF', 'THE', 'CHURCH', 'SHOULD', 'BE', 'THERE', 'ESTABLISHED'] +4077-13751-0007-1265: ref=['THE', 'BOOK', 'OF', 'MORMON', 'HAD', 'TAUGHT', 'THE', 'PEOPLE', 'THE', 'TRUE', 'ORIGIN', 'AND', 'DESTINY', 'OF', 'THE', 'AMERICAN', 'INDIANS', 'AND', 'TOWARD', 'THIS', 'DARK', 'SKINNED', 'REMNANT', 'OF', 'A', 'ONCE', 'MIGHTY', 'PEOPLE', 'THE', 'MISSIONARIES', 'OF', 'MORMONISM', 'EARLY', 'TURNED', 'THEIR', 'EYES', 'AND', 'WITH', 'THEIR', 'EYES', 'WENT', 'THEIR', 'HEARTS', 'AND', 'THEIR', 'HOPES'] +4077-13751-0007-1265: hyp=['THE', 'BOOK', 'O', 'MORMON', 'HAD', 'TAUGHT', 'THE', 'PEOPLE', 'THE', 'TRUE', 'ORIGIN', 'AND', 'DESTINY', 'OF', 'THE', 'AMERICAN', 'INDIANS', 'AND', 'TOWARD', 'THIS', 'DARK', 'SKINNED', 'REMNANT', 'OF', 'A', 'ONCE', 'MIGHTY', 'PEOPLE', 'THE', 'MISSIONARIES', 'OF', 'MORMONISM', 'EARLY', 'TURNED', 'THEIR', 'EYES', 'AND', 'WITH', 'THEIR', 'EYES', 'WENT', 'THEIR', 'HEARTS', 'AND', 'THEIR', 'HOPES'] +4077-13751-0008-1266: ref=['IT', 'IS', 'NOTABLE', 'THAT', 'THE', 'INDIAN', 'TRIBES', 'HAVE', 'GENERALLY', 'REGARDED', 'THE', 'RELIGION', 'OF', 'THE', 'LATTER', 'DAY', 'SAINTS', 'WITH', 'FAVOR', 'SEEING', 'IN', 'THE', 'BOOK', 'OF', 'MORMON', 'STRIKING', 'AGREEMENT', 'WITH', 'THEIR', 'OWN', 'TRADITIONS'] +4077-13751-0008-1266: hyp=['IT', 'IS', 'NOTABLE', 'THAT', 'THE', 'INDIAN', 'TRIBES', 'HAVE', 'GERALLY', 'REGARDED', 'THEIR', 'RELIGION', 'OF', 'THE', 'LATTER', 'DAY', 'SAINTS', 'WITH', 'FAVOR', 'SEEING', 'IN', 'THE', 'BOOK', 'O', 'MORMON', 'STRIKING', 'AGREEMENT', 'WITH', 'THEIR', 'OWN', 'TRADITIONS'] +4077-13751-0009-1267: ref=['THE', 'FIRST', 'WELL', 'ESTABLISHED', 'SEAT', 'OF', 'THE', 'CHURCH', 'WAS', 'IN', 'THE', 'PRETTY', 'LITTLE', 'TOWN', 'OF', 'KIRTLAND', 'OHIO', 'ALMOST', 'WITHIN', 'SIGHT', 'OF', 'LAKE', 'ERIE', 'AND', 'HERE', 'SOON', 'ROSE', 'THE', 'FIRST', 'TEMPLE', 'OF', 'MODERN', 'TIMES'] +4077-13751-0009-1267: hyp=['THE', 'FIRST', 'WELL', 'ESTABLISHED', 'SEAT', 'OF', 'THE', 'CHURCH', 'WAS', 'IN', 'THE', 'PRETTY', 'LITTLE', 'TOWN', 'OF', 'CURTLIN', 'OHIO', 'ALMOST', 'WITHIN', 'SIGHT', 'OF', 'LAKE', 'ERIE', 'AND', 'HERE', 'SOON', 'ROSE', 'THE', 'FIRST', 'TEMPLE', 'OF', 'MODERN', 'TIMES'] +4077-13751-0010-1268: ref=['TO', 'THE', 'FERVENT', 'LATTER', 'DAY', 'SAINT', 'A', 'TEMPLE', 'IS', 'NOT', 'SIMPLY', 'A', 'CHURCH', 'BUILDING', 'A', 'HOUSE', 'FOR', 'RELIGIOUS', 'ASSEMBLY'] +4077-13751-0010-1268: hyp=['TO', 'THE', 'FERVENT', 'LATTER', 'DAY', 'SAINT', 'A', 'TEMPLE', 'IS', 'NOT', 'SIMPLY', 'A', 'CHURCH', 'BUILDING', 'A', 'HOUSE', 'FOR', 'RELIGIOUS', 'ASSEMBLY'] +4077-13751-0011-1269: ref=['SOON', 'THOUSANDS', 'OF', 'CONVERTS', 'HAD', 'RENTED', 'OR', 'PURCHASED', 'HOMES', 'IN', 'MISSOURI', 'INDEPENDENCE', 'JACKSON', 'COUNTY', 'BEING', 'THEIR', 'CENTER', 'BUT', 'FROM', 'THE', 'FIRST', 'THEY', 'WERE', 'UNPOPULAR', 'AMONG', 'THE', 'MISSOURIANS'] +4077-13751-0011-1269: hyp=['SOON', 'THOUSANDS', 'OF', 'CONVERTS', 'HAD', 'RENTED', 'OR', 'PURCHASED', 'HOMES', 'IN', 'MISSOURI', 'INDEPENDENCE', 'JACKSON', 'COUNTY', 'BEING', 'THEIR', 'CENTRE', 'BUT', 'FROM', 'THE', 'FIRST', 'THEY', 'WERE', 'UNPOPULAR', 'AMONG', 'THE', 'MISSOURIENS'] +4077-13751-0012-1270: ref=['THE', 'LIEUTENANT', 'GOVERNOR', 'LILBURN', 'W', 'BOGGS', 'AFTERWARD', 'GOVERNOR', 'WAS', 'A', 'PRONOUNCED', 'MORMON', 'HATER', 'AND', 'THROUGHOUT', 'THE', 'PERIOD', 'OF', 'THE', 'TROUBLES', 'HE', 'MANIFESTED', 'SYMPATHY', 'WITH', 'THE', 'PERSECUTORS'] +4077-13751-0012-1270: hyp=['THE', 'LIEUTENANT', 'GOVERNOR', 'LITTLE', 'BURN', 'W', 'BOGGS', 'AFTERWARD', 'GOVERNOR', 'WAS', 'A', 'PRONOUNCED', 'MORMON', 'HATER', 'AND', 'THROUGHOUT', 'THE', 'PERIOD', 'OF', 'THE', 'TROUBLES', 'HE', 'MANIFEST', 'HIS', 'SYMPATHY', 'WITH', 'THE', 'PERSECUTORS'] +4077-13751-0013-1271: ref=['THEIR', 'SUFFERINGS', 'HAVE', 'NEVER', 'YET', 'BEEN', 'FITLY', 'CHRONICLED', 'BY', 'HUMAN', 'SCRIBE'] +4077-13751-0013-1271: hyp=['THEIR', 'SUFFERINGS', 'HAVE', 'NEVER', 'YET', 'BEEN', 'FITLY', 'CHRONICLED', 'BY', 'HUMAN', 'SCRIBE'] +4077-13751-0014-1272: ref=['MAKING', 'THEIR', 'WAY', 'ACROSS', 'THE', 'RIVER', 'MOST', 'OF', 'THE', 'REFUGEES', 'FOUND', 'SHELTER', 'AMONG', 'THE', 'MORE', 'HOSPITABLE', 'PEOPLE', 'OF', 'CLAY', 'COUNTY', 'AND', 'AFTERWARD', 'ESTABLISHED', 'THEMSELVES', 'IN', 'CALDWELL', 'COUNTY', 'THEREIN', 'FOUNDING', 'THE', 'CITY', 'OF', 'FAR', 'WEST'] +4077-13751-0014-1272: hyp=['MAKING', 'THEIR', 'WAY', 'ACROSS', 'THE', 'RIVER', 'MOST', 'OF', 'THE', 'REFUGEES', 'FOUND', 'SHELTER', 'AMONG', 'THE', 'MORE', 'HOSPITABLE', 'PEOPLE', 'OF', 'CLAY', 'COUNTY', 'AND', 'AFTERWARD', 'ESTABLISHED', 'THEMSELVES', 'IN', 'CAULDWELL', 'COUNTY', 'THEREIN', 'FOUNDING', 'THE', 'CITY', 'OF', 'FAR', 'WEST'] +4077-13751-0015-1273: ref=['A', 'SMALL', 'SETTLEMENT', 'HAD', 'BEEN', 'FOUNDED', 'BY', 'MORMON', 'FAMILIES', 'ON', 'SHOAL', 'CREEK', 'AND', 'HERE', 'ON', 'THE', 'THIRTIETH', 'OF', 'OCTOBER', 'EIGHTEEN', 'THIRTY', 'EIGHT', 'A', 'COMPANY', 'OF', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'FELL', 'UPON', 'THE', 'HAPLESS', 'SETTLERS', 'AND', 'BUTCHERED', 'A', 'SCORE'] +4077-13751-0015-1273: hyp=['A', 'SMALL', 'SETTLEMENT', 'HAD', 'BEEN', 'FOUNDED', 'BY', 'MORMON', 'FAMILIES', 'ON', 'SHOAL', 'CREEK', 'AND', 'HERE', 'ON', 'THE', 'THIRTIETH', 'OF', 'OCTOBER', 'EIGHTEEN', 'THIRTY', 'EIGHT', 'A', 'COMPANY', 'OF', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'FELL', 'UPON', 'THE', 'HAPLESS', 'SETTLERS', 'AND', 'BUTCHERED', 'A', 'SCORE'] +4077-13751-0016-1274: ref=['BE', 'IT', 'SAID', 'TO', 'THE', 'HONOR', 'OF', 'SOME', 'OF', 'THE', 'OFFICERS', 'ENTRUSTED', 'WITH', 'THE', 'TERRIBLE', 'COMMISSION', 'THAT', 'WHEN', 'THEY', 'LEARNED', 'ITS', 'TRUE', 'SIGNIFICANCE', 'THEY', 'RESIGNED', 'THEIR', 'AUTHORITY', 'RATHER', 'THAN', 'HAVE', 'ANYTHING', 'TO', 'DO', 'WITH', 'WHAT', 'THEY', 'DESIGNATED', 'A', 'COLD', 'BLOODED', 'BUTCHERY'] +4077-13751-0016-1274: hyp=['BE', 'IT', 'SAID', 'TO', 'THE', 'HONOR', 'OF', 'SOME', 'OF', 'THE', 'OFFICERS', 'ENTRUSTED', 'WITH', 'THE', 'TERRIBLE', 'COMMISSION', 'THAT', 'WHEN', 'THEY', 'LEARNED', 'ITS', 'TRUE', 'SIGNIFICANCE', 'THEY', 'RESIGN', 'THEIR', 'AUTHORITY', 'RATHER', 'THAN', 'HAVE', 'ANYTHING', 'TO', 'DO', 'WITH', 'WHAT', 'THEY', 'DESIGNATED', 'A', 'COLD', 'BLOODED', 'BUTCHERY'] +4077-13751-0017-1275: ref=['OH', 'WHAT', 'A', 'RECORD', 'TO', 'READ', 'WHAT', 'A', 'PICTURE', 'TO', 'GAZE', 'UPON', 'HOW', 'AWFUL', 'THE', 'FACT'] +4077-13751-0017-1275: hyp=['OH', 'WHAT', 'A', 'RECORD', 'TO', 'READ', 'WHAT', 'A', 'PICTURE', 'TO', 'GAZE', 'UPON', 'HOW', 'AWFUL', 'THE', 'FACT'] +4077-13751-0018-1276: ref=['AMERICAN', 'SCHOOL', 'BOYS', 'READ', 'WITH', 'EMOTIONS', 'OF', 'HORROR', 'OF', 'THE', 'ALBIGENSES', 'DRIVEN', 'BEATEN', 'AND', 'KILLED', 'WITH', 'A', 'PAPAL', 'LEGATE', 'DIRECTING', 'THE', 'BUTCHERY', 'AND', 'OF', 'THE', 'VAUDOIS', 'HUNTED', 'AND', 'HOUNDED', 'LIKE', 'BEASTS', 'AS', 'THE', 'EFFECT', 'OF', 'A', 'ROYAL', 'DECREE', 'AND', 'THEY', 'YET', 'SHALL', 'READ', 'IN', 'THE', 'HISTORY', 'OF', 'THEIR', 'OWN', 'COUNTRY', 'OF', 'SCENES', 'AS', 'TERRIBLE', 'AS', 'THESE', 'IN', 'THE', 'EXHIBITION', 'OF', 'INJUSTICE', 'AND', 'INHUMAN', 'HATE'] +4077-13751-0018-1276: hyp=['AMERICAN', 'SCHOOLBOYS', 'READ', 'WITH', 'EMOTIONS', 'OF', 'HORROR', 'OF', 'THE', 'ALBIGENSES', 'DRIVEN', 'BEATEN', 'AND', 'KILLED', 'WITH', 'A', 'PEPPEL', 'LEGATE', 'DIRECTING', 'THE', 'BUTCHERY', 'AND', 'OF', 'THE', 'FAUDOIS', 'HUNTED', 'AND', 'HOUNDED', 'LIKE', 'BEASTS', 'AS', 'THE', 'EFFECT', 'OF', 'A', 'ROYAL', 'DECREE', 'AND', 'THEY', 'YET', 'SHALL', 'READ', 'IN', 'THE', 'HISTORY', 'OF', 'THEIR', 'OWN', 'COUNTRY', 'OF', 'SCENES', 'AS', 'TERRIBLE', 'AS', 'THESE', 'IN', 'THE', 'EXHIBITION', 'OF', 'INJUSTICE', 'AND', 'INHUMAN', 'HATE'] +4077-13751-0019-1277: ref=['WHO', 'BEGAN', 'THE', 'QUARREL', 'WAS', 'IT', 'THE', 'MORMONS'] +4077-13751-0019-1277: hyp=['WHO', 'BEGAN', 'THE', 'QUARREL', 'WAS', 'IT', 'THE', 'MORMONS'] +4077-13751-0020-1278: ref=['AS', 'A', 'SAMPLE', 'OF', 'THE', 'PRESS', 'COMMENTS', 'AGAINST', 'THE', 'BRUTALITY', 'OF', 'THE', 'MISSOURIANS', 'I', 'QUOTE', 'A', 'PARAGRAPH', 'FROM', 'THE', 'QUINCY', 'ARGUS', 'MARCH', 'SIXTEENTH', 'EIGHTEEN', 'THIRTY', 'NINE'] +4077-13751-0020-1278: hyp=['AS', 'THE', 'SABLE', 'OF', 'THE', 'PRESS', 'COMMENTS', 'AGAINST', 'THE', 'BRUTALITY', 'OF', 'THE', 'MISSOURIANS', 'I', 'QUOTE', 'A', 'PARAGRAPH', 'FROM', 'THE', 'QUINCEY', 'ARGUS', 'MARCH', 'SIXTEENTH', 'EIGHTEEN', 'THIRTY', 'NINE'] +4077-13751-0021-1279: ref=['IT', 'WILL', 'BE', 'OBSERVED', 'THAT', 'AN', 'ORGANIZED', 'MOB', 'AIDED', 'BY', 'MANY', 'OF', 'THE', 'CIVIL', 'AND', 'MILITARY', 'OFFICERS', 'OF', 'MISSOURI', 'WITH', 'GOVERNOR', 'BOGGS', 'AT', 'THEIR', 'HEAD', 'HAVE', 'BEEN', 'THE', 'PROMINENT', 'ACTORS', 'IN', 'THIS', 'BUSINESS', 'INCITED', 'TOO', 'IT', 'APPEARS', 'AGAINST', 'THE', 'MORMONS', 'BY', 'POLITICAL', 'HATRED', 'AND', 'BY', 'THE', 'ADDITIONAL', 'MOTIVES', 'OF', 'PLUNDER', 'AND', 'REVENGE'] +4077-13751-0021-1279: hyp=['IT', 'WILL', 'BE', 'OBSERVED', 'THAT', 'AN', 'ORGANIZED', 'MOB', 'AIDED', 'BY', 'MANY', 'OF', 'THE', 'CIVIL', 'AND', 'MILITARY', 'OFFICERS', 'OF', 'MISSOURI', 'WITH', 'GOVERNOR', 'BOGGS', 'AT', 'THEIR', 'HEAD', 'HAVE', 'BEEN', 'THE', 'PROMINENT', 'ACTORS', 'IN', 'THIS', 'BUSINESS', 'INCITED', 'TOO', 'IT', 'APPEARS', 'AGAINST', 'THE', 'MORMONS', 'BY', 'POLITICAL', 'HATRED', 'AND', 'BY', 'THE', 'ADDITIONAL', 'MOTIVES', 'OF', 'PLUNDER', 'AND', 'REVENGE'] +4077-13754-0000-1241: ref=['THE', 'ARMY', 'FOUND', 'THE', 'PEOPLE', 'IN', 'POVERTY', 'AND', 'LEFT', 'THEM', 'IN', 'COMPARATIVE', 'WEALTH'] +4077-13754-0000-1241: hyp=['THE', 'ARMY', 'FOUND', 'THE', 'PEOPLE', 'IN', 'POVERTY', 'AND', 'LEFT', 'THEM', 'IN', 'COMPARATIVE', 'WEALTH'] +4077-13754-0001-1242: ref=['BUT', 'A', 'WORD', 'FURTHER', 'CONCERNING', 'THE', 'EXPEDITION', 'IN', 'GENERAL'] +4077-13754-0001-1242: hyp=['BUT', 'A', 'WORD', 'FURTHER', 'CONCERNING', 'THE', 'EXPEDITION', 'IN', 'GENERAL'] +4077-13754-0002-1243: ref=['IT', 'WAS', 'THROUGH', "FLOYD'S", 'ADVICE', 'THAT', 'BUCHANAN', 'ORDERED', 'THE', 'MILITARY', 'EXPEDITION', 'TO', 'UTAH', 'OSTENSIBLY', 'TO', 'INSTALL', 'CERTAIN', 'FEDERAL', 'OFFICIALS', 'AND', 'TO', 'REPRESS', 'AN', 'ALLEGED', 'INFANTILE', 'REBELLION', 'WHICH', 'IN', 'FACT', 'HAD', 'NEVER', 'COME', 'INTO', 'EXISTENCE', 'BUT', 'IN', 'REALITY', 'TO', 'FURTHER', 'THE', 'INTERESTS', 'OF', 'THE', 'SECESSIONISTS'] +4077-13754-0002-1243: hyp=['IT', 'WAS', 'THROUGH', "FLOYD'S", 'ADVICE', 'THAT', 'YOU', 'CANNOT', 'ORDERED', 'THE', 'MILITARY', 'EXPEDITION', 'TO', 'UTAH', 'OSTENSIBLY', 'TO', 'INSTALL', 'CERTAIN', 'FEDERAL', 'OFFICIALS', 'AND', 'TO', 'REPRESS', 'AN', 'ALLEGED', 'INFANTILE', 'REBELLION', 'WHICH', 'IN', 'FACT', 'HAD', 'NEVER', 'COME', 'INTO', 'EXISTENCE', 'BUT', 'IN', 'REALITY', 'TO', 'FURTHER', 'THE', 'ENTRANCE', 'OF', 'THE', 'SECESSIONISTS'] +4077-13754-0003-1244: ref=['MOREOVER', 'HAD', 'THE', 'PEOPLE', 'BEEN', 'INCLINED', 'TO', 'REBELLION', 'WHAT', 'GREATER', 'OPPORTUNITY', 'COULD', 'THEY', 'HAVE', 'WISHED'] +4077-13754-0003-1244: hyp=['MOREOVER', 'HAD', 'THE', 'PEOPLE', 'BEEN', 'INCLINED', 'TO', 'REBELLION', 'WHAT', 'GREATER', 'OPPORTUNITY', 'COULD', 'THEY', 'HAVE', 'WISHED'] +4077-13754-0004-1245: ref=['ALREADY', 'A', 'NORTH', 'AND', 'A', 'SOUTH', 'WERE', 'TALKED', 'OF', 'WHY', 'NOT', 'SET', 'UP', 'ALSO', 'A', 'WEST'] +4077-13754-0004-1245: hyp=['ALREADY', 'A', 'NORTH', 'AND', 'THE', 'SOUTH', 'WERE', 'TALKED', 'OF', 'WHY', 'NOT', 'SET', 'UP', 'ALSO', 'WEST'] +4077-13754-0005-1246: ref=['THEY', 'KNEW', 'NO', 'NORTH', 'NO', 'SOUTH', 'NO', 'EAST', 'NO', 'WEST', 'THEY', 'STOOD', 'POSITIVELY', 'BY', 'THE', 'CONSTITUTION', 'AND', 'WOULD', 'HAVE', 'NOTHING', 'TO', 'DO', 'IN', 'THE', 'BLOODY', 'STRIFE', 'BETWEEN', 'BROTHERS', 'UNLESS', 'INDEED', 'THEY', 'WERE', 'SUMMONED', 'BY', 'THE', 'AUTHORITY', 'TO', 'WHICH', 'THEY', 'HAD', 'ALREADY', 'ONCE', 'LOYALLY', 'RESPONDED', 'TO', 'FURNISH', 'MEN', 'AND', 'ARMS', 'FOR', 'THEIR', "COUNTRY'S", 'NEED'] +4077-13754-0005-1246: hyp=['THEY', 'KNEW', 'NO', 'NORTH', 'NO', 'SOUTH', 'NO', 'EAST', 'NO', 'WEST', 'THEY', 'STOOD', 'POSITIVELY', 'BY', 'THE', 'CONSTITUTION', 'AND', 'WOULD', 'HAVE', 'NOTHING', 'TO', 'DO', 'IN', 'THE', 'BLOODY', 'STRIFE', 'BETWEEN', 'BROTHERS', 'UNLESS', 'INDEED', 'THEY', 'WERE', 'SUMMONED', 'BY', 'THE', 'AUTHORITY', 'TO', 'WHICH', 'THEY', 'HAD', 'ALREADY', 'ONCE', 'LOYALLY', 'RESPONDED', 'TO', 'FURNISH', 'MEN', 'IN', 'ARMS', 'FOR', 'THE', "COUNTRY'S", 'NEED'] +4077-13754-0006-1247: ref=['WHAT', 'THE', 'LATTER', 'DAY', 'SAINTS', 'CALL', 'CELESTIAL', 'MARRIAGE', 'IS', 'CHARACTERISTIC', 'OF', 'THE', 'CHURCH', 'AND', 'IS', 'IN', 'VERY', 'GENERAL', 'PRACTISE', 'BUT', 'OF', 'CELESTIAL', 'MARRIAGE', 'PLURALITY', 'OF', 'WIVES', 'WAS', 'AN', 'INCIDENT', 'NEVER', 'AN', 'ESSENTIAL'] +4077-13754-0006-1247: hyp=['WHAT', 'THE', 'LATTER', 'DAY', 'SAYS', 'CALL', 'CELESTIAL', 'MARRIAGE', 'IS', 'CHARACTERISTIC', 'OF', 'THE', 'CHURCH', 'AND', 'IS', 'IN', 'VERY', 'GENERAL', 'PRACTICE', 'BUT', 'OF', 'CELESTIAL', 'MARRIAGE', 'PLURALITY', 'OF', 'WIVES', 'WAS', 'AN', 'INCIDENT', 'NEVER', 'AN', 'ESSENTIAL'] +4077-13754-0007-1248: ref=['WE', 'BELIEVE', 'IN', 'A', 'LITERAL', 'RESURRECTION', 'AND', 'AN', 'ACTUAL', 'HEREAFTER', 'IN', 'WHICH', 'FUTURE', 'STATE', 'SHALL', 'BE', 'RECOGNIZED', 'EVERY', 'SANCTIFIED', 'AND', 'AUTHORIZED', 'RELATIONSHIP', 'EXISTING', 'HERE', 'ON', 'EARTH', 'OF', 'PARENT', 'AND', 'CHILD', 'BROTHER', 'AND', 'SISTER', 'HUSBAND', 'AND', 'WIFE'] +4077-13754-0007-1248: hyp=['WE', 'BELIEVE', 'IN', 'A', 'LITERAL', 'RESURRECTION', 'AND', 'AN', 'ACTUAL', 'HEREAFTER', 'IN', 'WHICH', 'FUTURE', 'STATES', 'SHALL', 'BE', 'RECOGNIZED', 'EVERY', 'SANCTIFIED', 'AND', 'AUTHORIZED', 'RELATIONSHIP', 'EXISTING', 'HERE', 'ON', 'EARTH', 'OF', 'PARENT', 'AND', 'CHILD', 'BRETHREN', 'SISTER', 'HUSBAND', 'AND', 'WIFE'] +4077-13754-0008-1249: ref=['IT', 'HAS', 'BEEN', 'MY', 'PRIVILEGE', 'TO', 'TREAD', 'THE', 'SOIL', 'OF', 'MANY', 'LANDS', 'TO', 'OBSERVE', 'THE', 'CUSTOMS', 'AND', 'STUDY', 'THE', 'HABITS', 'OF', 'MORE', 'NATIONS', 'THAN', 'ONE', 'AND', 'I', 'HAVE', 'YET', 'TO', 'FIND', 'THE', 'PLACE', 'AND', 'MEET', 'THE', 'PEOPLE', 'WHERE', 'AND', 'WITH', 'WHOM', 'THE', 'PURITY', 'OF', 'MAN', 'AND', 'WOMAN', 'IS', 'HELD', 'MORE', 'PRECIOUS', 'THAN', 'AMONG', 'THE', 'MALIGNED', 'MORMONS', 'IN', 'THE', 'MOUNTAIN', 'VALLEYS', 'OF', 'THE', 'WEST'] +4077-13754-0008-1249: hyp=['IT', 'HAS', 'BEEN', 'MY', 'PRIVILEGE', 'TO', 'TREAD', 'THE', 'SOIL', 'OF', 'MANY', 'LANDS', 'TO', 'OBSERVE', 'THE', 'CUSTOMS', 'AND', 'STUDY', 'THE', 'HABITS', 'OF', 'MORE', 'NATIONS', 'THAN', 'ONE', 'AND', 'I', 'HAVE', 'YET', 'DEFINED', 'THE', 'PLACE', 'AND', 'MEET', 'THE', 'PEOPLE', 'WHERE', 'AND', 'WITH', 'WHOM', 'THE', 'PURITY', 'OF', 'MAN', 'AND', 'WOMAN', 'IS', 'HELD', 'MORE', 'PRECIOUS', 'THAN', 'AMONG', 'THE', 'MALIGNED', 'MORMONS', 'IN', 'THE', 'MOUNTAIN', 'VALLEYS', 'OF', 'THE', 'WEST'] +4077-13754-0009-1250: ref=['AT', 'THE', 'INCEPTION', 'OF', 'PLURAL', 'MARRIAGE', 'AMONG', 'THE', 'LATTER', 'DAY', 'SAINTS', 'THERE', 'WAS', 'NO', 'LAW', 'NATIONAL', 'OR', 'STATE', 'AGAINST', 'ITS', 'PRACTISE'] +4077-13754-0009-1250: hyp=['AT', 'THE', 'INCEPTION', 'OF', 'PEARL', 'MARRIAGE', 'AMONG', 'THE', 'LATTER', 'DAY', 'SAINTS', 'THERE', 'WAS', 'NO', 'LAW', 'NATIONAL', 'OR', 'STATE', 'AGAINST', 'ITS', 'PRACTICE'] +4077-13754-0010-1251: ref=['IN', 'EIGHTEEN', 'SIXTY', 'TWO', 'A', 'LAW', 'WAS', 'ENACTED', 'WITH', 'THE', 'PURPOSE', 'OF', 'SUPPRESSING', 'PLURAL', 'MARRIAGE', 'AND', 'AS', 'HAD', 'BEEN', 'PREDICTED', 'IN', 'THE', 'NATIONAL', 'SENATE', 'PRIOR', 'TO', 'ITS', 'PASSAGE', 'IT', 'LAY', 'FOR', 'MANY', 'YEARS', 'A', 'DEAD', 'LETTER'] +4077-13754-0010-1251: hyp=['IN', 'EIGHTEEN', 'SIXTY', 'TWO', 'A', 'LAW', 'WAS', 'ENACTED', 'WITH', 'A', 'PURPOSE', 'OF', 'SUPPRESSING', 'PLORO', 'MARRIAGE', 'AND', 'AS', 'HAD', 'BEEN', 'PREDICTED', 'IN', 'THE', 'NATIONAL', 'SENATE', 'PRIOR', 'TO', 'ITS', 'PASSAGE', 'IT', 'LAY', 'FOR', 'MANY', 'YEARS', 'A', 'DEAD', 'LETTER'] +4077-13754-0011-1252: ref=['FEDERAL', 'JUDGES', 'AND', 'UNITED', 'STATES', 'ATTORNEYS', 'IN', 'UTAH', 'WHO', 'WERE', 'NOT', 'MORMONS', 'NOR', 'LOVERS', 'OF', 'MORMONISM', 'REFUSED', 'TO', 'ENTERTAIN', 'COMPLAINTS', 'OR', 'PROSECUTE', 'CASES', 'UNDER', 'THE', 'LAW', 'BECAUSE', 'OF', 'ITS', 'MANIFEST', 'INJUSTICE', 'AND', 'INADEQUACY'] +4077-13754-0011-1252: hyp=['FEDERAL', 'JUDGES', 'AND', 'UNITED', 'STATES', 'ATTORNEYS', 'AND', 'NEW', 'TOP', 'WHO', 'WERE', 'NOT', 'MORE', "MEN'S", 'NOR', 'LOVERS', 'OF', 'WARMONISM', 'REFUSED', 'TO', 'ENTERTAIN', 'COMPLAINTS', 'OR', 'PROSECUTE', 'CASES', 'UNDER', 'THE', 'LAW', 'BECAUSE', 'OF', 'ITS', 'MANIFEST', 'INJUSTICE', 'AND', 'INADEQUACY'] +4077-13754-0012-1253: ref=['THIS', 'MEANT', 'THAT', 'FOR', 'AN', 'ALLEGED', 'MISDEMEANOR', 'FOR', 'WHICH', 'CONGRESS', 'PRESCRIBED', 'A', 'MAXIMUM', 'PENALTY', 'OF', 'SIX', 'MONTHS', 'IMPRISONMENT', 'AND', 'A', 'FINE', 'OF', 'THREE', 'HUNDRED', 'DOLLARS', 'A', 'MAN', 'MIGHT', 'BE', 'IMPRISONED', 'FOR', 'LIFE', 'AYE', 'FOR', 'MANY', 'TERMS', 'OF', 'A', "MAN'S", 'NATURAL', 'LIFE', 'DID', 'THE', "COURT'S", 'POWER', 'TO', 'ENFORCE', 'ITS', 'SENTENCES', 'EXTEND', 'SO', 'FAR', 'AND', 'MIGHT', 'BE', 'FINED', 'MILLIONS', 'OF', 'DOLLARS'] +4077-13754-0012-1253: hyp=['THIS', 'MEANT', 'THAT', 'FOR', 'AN', 'ALLEGED', 'MISDEMEANOUR', 'FOR', 'WHICH', 'CONGRESS', 'PRESCRIBED', 'A', 'MAXIMUM', 'PENALTY', 'OF', 'SIX', 'MONTHS', 'IMPRISONMENT', 'AND', 'A', 'FINE', 'OF', 'THREE', 'HUNDRED', 'DOLLARS', 'A', 'MAN', 'MIGHT', 'BE', 'IMPRISONED', 'FOR', 'LIFE', 'I', 'FOR', 'MANY', 'TERMS', 'OF', 'A', "MAN'S", 'NATURAL', 'LIFE', 'DID', 'THE', "COURT'S", 'POWER', 'TO', 'ENFORCE', 'ITS', 'SENTENCES', 'EXTEND', 'SO', 'FAR', 'AND', 'MIGHT', 'BE', 'FINED', 'MILLIONS', 'OF', 'DOLLARS'] +4077-13754-0013-1254: ref=['BEFORE', 'THIS', 'TRAVESTY', 'ON', 'THE', 'ADMINISTRATION', 'OF', 'LAW', 'COULD', 'BE', 'BROUGHT', 'BEFORE', 'THE', 'COURT', 'OF', 'LAST', 'RESORT', 'AND', 'THERE', 'MEET', 'WITH', 'THE', 'REVERSAL', 'AND', 'REBUKE', 'IT', 'DESERVED', 'MEN', 'WERE', 'IMPRISONED', 'UNDER', 'SENTENCES', 'OF', 'MANY', 'YEARS', 'DURATION'] +4077-13754-0013-1254: hyp=['BEFORE', 'THIS', 'TRAVESTY', 'ON', 'THE', 'ADMINISTRATION', 'OF', 'LAW', 'COULD', 'BE', 'BROUGHT', 'BEFORE', 'THE', 'COURT', 'OF', 'LAST', 'RESORT', 'AND', 'THERE', 'MET', 'WITH', 'THE', 'REVERSAL', 'AND', 'REBUKE', 'IT', 'DESERVED', 'MEN', 'WERE', 'IMPRISONED', 'UNDER', 'SENTENCE', 'OF', 'MANY', 'YEARS', 'DURATION'] +4077-13754-0014-1255: ref=['THE', 'PEOPLE', 'CONTESTED', 'THESE', 'MEASURES', 'ONE', 'BY', 'ONE', 'IN', 'THE', 'COURTS', 'PRESENTING', 'IN', 'CASE', 'AFTER', 'CASE', 'THE', 'DIFFERENT', 'PHASES', 'OF', 'THE', 'SUBJECT', 'AND', 'URGING', 'THE', 'UNCONSTITUTIONALITY', 'OF', 'THE', 'MEASURE'] +4077-13754-0014-1255: hyp=['THE', 'PEOPLE', 'CONTESTED', 'THESE', 'MEASURES', 'ONE', 'BY', 'ONE', 'IN', 'THE', 'COURTS', 'PRESENTING', 'IN', 'CASE', 'AFTER', 'CASE', 'THE', 'DIFFERENT', 'PHASES', 'OF', 'THE', 'SUBJECT', 'AND', 'URGING', 'THE', 'UNCONSTITUTIONALITY', 'OF', 'THE', 'MEASURE'] +4077-13754-0015-1256: ref=['THEN', 'THE', 'CHURCH', 'WAS', 'DISINCORPORATED', 'AND', 'ITS', 'PROPERTY', 'BOTH', 'REAL', 'AND', 'PERSONAL', 'CONFISCATED', 'AND', 'ESCHEATED', 'TO', 'THE', 'GOVERNMENT', 'OF', 'THE', 'UNITED', 'STATES', 'AND', 'ALTHOUGH', 'THE', 'PERSONAL', 'PROPERTY', 'WAS', 'SOON', 'RESTORED', 'REAL', 'ESTATE', 'OF', 'GREAT', 'VALUE', 'LONG', 'LAY', 'IN', 'THE', 'HANDS', 'OF', 'THE', "COURT'S", 'RECEIVER', 'AND', 'THE', 'MORMON', 'CHURCH', 'HAD', 'TO', 'PAY', 'THE', 'NATIONAL', 'GOVERNMENT', 'HIGH', 'RENTAL', 'ON', 'ITS', 'OWN', 'PROPERTY'] +4077-13754-0015-1256: hyp=['THEN', 'THE', 'CHURCH', 'WAS', 'DISINCORPORATED', 'AND', 'ITS', 'PROPERTY', 'BOTH', 'REAL', 'AND', 'PERSONAL', 'CONFISCATED', 'AND', 'ISTIATED', 'TO', 'THE', 'GOVERNMENT', 'OF', 'THE', 'UNITED', 'STATES', 'AND', 'ALTHOUGH', 'THE', 'PERSONAL', 'PROPERTY', 'WAS', 'SOON', 'RESTORED', 'REAL', 'ESTATE', 'OF', 'GREAT', 'VALUE', 'LONG', 'LAY', 'IN', 'THE', 'HANDS', 'OF', 'THE', 'COURTS', 'RECEIVER', 'AND', 'THE', 'MORMON', 'CHURCH', 'HAD', 'TO', 'PAY', 'THE', 'NATIONAL', 'GOVERNMENT', 'HIGH', 'RENTAL', 'ON', 'ITS', 'OWN', 'PROPERTY'] +4077-13754-0016-1257: ref=['AND', 'SO', 'THE', 'STORY', 'OF', 'MORMONISM', 'RUNS', 'ON', 'ITS', 'FINALE', 'HAS', 'NOT', 'YET', 'BEEN', 'WRITTEN', 'THE', 'CURRENT', 'PRESS', 'PRESENTS', 'CONTINUOUSLY', 'NEW', 'STAGES', 'OF', 'ITS', 'PROGRESS', 'NEW', 'DEVELOPMENTS', 'OF', 'ITS', 'PLAN'] +4077-13754-0016-1257: hyp=['AND', 'SO', 'THE', 'STORY', 'OF', 'MORMONISM', 'RUNS', 'ON', 'ITS', 'FINALE', 'HAS', 'NOT', 'YET', 'BEEN', 'WRITTEN', 'THE', 'CURRENT', 'PRESS', 'PRESENTS', 'CONTINUOUSLY', 'NEW', 'STAGES', 'OF', 'ITS', 'PROGRESS', 'NEW', 'DEVELOPMENTS', 'OF', 'ITS', 'PLAN'] +4446-2271-0000-1133: ref=['MAINHALL', 'LIKED', 'ALEXANDER', 'BECAUSE', 'HE', 'WAS', 'AN', 'ENGINEER'] +4446-2271-0000-1133: hyp=['MAIN', 'HALL', 'LIKED', 'ALEXANDER', 'BECAUSE', 'HE', 'WAS', 'AN', 'ENGINEER'] +4446-2271-0001-1134: ref=['HE', 'HAD', 'PRECONCEIVED', 'IDEAS', 'ABOUT', 'EVERYTHING', 'AND', 'HIS', 'IDEA', 'ABOUT', 'AMERICANS', 'WAS', 'THAT', 'THEY', 'SHOULD', 'BE', 'ENGINEERS', 'OR', 'MECHANICS'] +4446-2271-0001-1134: hyp=['WE', 'NOT', 'PRECONCEIVED', 'IDEAS', 'ABOUT', 'EVERYTHING', 'AND', 'HIS', 'IDEA', 'ABOUT', 'AMERICANS', 'WAS', 'THAT', 'THEY', 'SHOULD', 'BE', 'ENGINEERS', 'OR', 'MECHANICS'] +4446-2271-0002-1135: ref=["IT'S", 'TREMENDOUSLY', 'WELL', 'PUT', 'ON', 'TOO'] +4446-2271-0002-1135: hyp=['ITS', 'TREMENDOUSLY', 'WELL', 'PUT', 'ON', 'TOO'] +4446-2271-0003-1136: ref=["IT'S", 'BEEN', 'ON', 'ONLY', 'TWO', 'WEEKS', 'AND', "I'VE", 'BEEN', 'HALF', 'A', 'DOZEN', 'TIMES', 'ALREADY'] +4446-2271-0003-1136: hyp=["IT'S", 'BEEN', 'ON', 'ONLY', 'TWO', 'WEEKS', 'AND', "I'VE", 'BEEN', 'HALF', 'A', 'DOZEN', 'TIMES', 'ALREADY'] +4446-2271-0004-1137: ref=['DO', 'YOU', 'KNOW', 'ALEXANDER', 'MAINHALL', 'LOOKED', 'WITH', 'PERPLEXITY', 'UP', 'INTO', 'THE', 'TOP', 'OF', 'THE', 'HANSOM', 'AND', 'RUBBED', 'HIS', 'PINK', 'CHEEK', 'WITH', 'HIS', 'GLOVED', 'FINGER', 'DO', 'YOU', 'KNOW', 'I', 'SOMETIMES', 'THINK', 'OF', 'TAKING', 'TO', 'CRITICISM', 'SERIOUSLY', 'MYSELF'] +4446-2271-0004-1137: hyp=['DO', 'YOU', 'KNOW', 'ALEXANDER', 'MAIN', 'HALL', 'LOOKED', 'WITH', 'PERPLEXITY', 'UP', 'INTO', 'THE', 'TOP', 'OF', 'THE', 'HANSOM', 'AND', 'RUBBED', 'HIS', 'PINK', 'CHEEK', 'WITH', 'HIS', 'GLOVED', 'FINGER', 'DO', 'YOU', 'KNOW', 'I', 'SOMETIMES', 'THINK', 'OF', 'TAKING', 'TO', 'CRITICISM', 'SERIOUSLY', 'MYSELF'] +4446-2271-0005-1138: ref=['SHE', 'SAVES', 'HER', 'HAND', 'TOO', "SHE'S", 'AT', 'HER', 'BEST', 'IN', 'THE', 'SECOND', 'ACT'] +4446-2271-0005-1138: hyp=['SHE', 'SAVES', 'HER', 'HAND', 'TOO', 'SHE', 'SAID', 'HER', 'BEST', 'IN', 'THE', 'SECOND', 'ACT'] +4446-2271-0006-1139: ref=["HE'S", 'BEEN', 'WANTING', 'TO', 'MARRY', 'HILDA', 'THESE', 'THREE', 'YEARS', 'AND', 'MORE'] +4446-2271-0006-1139: hyp=["HE'S", 'BEEN', 'WANTING', 'TO', 'MARRY', 'HILDA', 'THESE', 'THREE', 'YEARS', 'AND', 'MORE'] +4446-2271-0007-1140: ref=['SHE', "DOESN'T", 'TAKE', 'UP', 'WITH', 'ANYBODY', 'YOU', 'KNOW'] +4446-2271-0007-1140: hyp=['SHE', "DOESN'T", 'TAKE', 'UP', 'WITH', 'ANYBODY', 'YOU', 'KNOW'] +4446-2271-0008-1141: ref=['IRENE', 'BURGOYNE', 'ONE', 'OF', 'HER', 'FAMILY', 'TOLD', 'ME', 'IN', 'CONFIDENCE', 'THAT', 'THERE', 'WAS', 'A', 'ROMANCE', 'SOMEWHERE', 'BACK', 'IN', 'THE', 'BEGINNING'] +4446-2271-0008-1141: hyp=['IRENE', 'WERE', 'GOING', 'ONE', 'OF', 'HER', 'FAMILY', 'TOLD', 'ME', 'IN', 'CONFIDENCE', 'THAT', 'THERE', 'WAS', 'A', 'ROMANCE', 'SOMEWHERE', 'BACK', 'IN', 'THE', 'BEGINNING'] +4446-2271-0009-1142: ref=['MAINHALL', 'VOUCHED', 'FOR', 'HER', 'CONSTANCY', 'WITH', 'A', 'LOFTINESS', 'THAT', 'MADE', 'ALEXANDER', 'SMILE', 'EVEN', 'WHILE', 'A', 'KIND', 'OF', 'RAPID', 'EXCITEMENT', 'WAS', 'TINGLING', 'THROUGH', 'HIM'] +4446-2271-0009-1142: hyp=['MEANHAVED', 'FOR', 'HER', 'CONSTANCY', 'WITH', 'A', 'LOFTINESS', 'THAT', 'MADE', 'ALEXANDER', 'SMILE', 'EVEN', 'WHILE', 'A', 'KIND', 'OF', 'RAPID', 'EXCITEMENT', 'WAS', 'TINGLING', 'THROUGH', 'HIM'] +4446-2271-0010-1143: ref=["HE'S", 'ANOTHER', "WHO'S", 'AWFULLY', 'KEEN', 'ABOUT', 'HER', 'LET', 'ME', 'INTRODUCE', 'YOU'] +4446-2271-0010-1143: hyp=["HE'S", 'ANOTHER', "WHO'S", 'AWFULLY', 'KEEN', 'ABOUT', 'HER', 'LET', 'ME', 'INTRODUCE', 'YOU'] +4446-2271-0011-1144: ref=['SIR', 'HARRY', 'TOWNE', 'MISTER', 'BARTLEY', 'ALEXANDER', 'THE', 'AMERICAN', 'ENGINEER'] +4446-2271-0011-1144: hyp=['SIR', 'HARRY', 'TOWN', 'MISTER', 'BARTLEY', 'ALEXANDER', 'THE', 'AMERICAN', 'ENGINEER'] +4446-2271-0012-1145: ref=['I', 'SAY', 'SIR', 'HARRY', 'THE', 'LITTLE', "GIRL'S", 'GOING', 'FAMOUSLY', 'TO', 'NIGHT', "ISN'T", 'SHE'] +4446-2271-0012-1145: hyp=['I', 'SAY', 'SIR', 'HARRY', 'THE', 'LITTLE', "GIRL'S", 'GOING', 'FAMOUSLY', 'TO', 'NIGHT', "ISN'T", 'SHE'] +4446-2271-0013-1146: ref=['DO', 'YOU', 'KNOW', 'I', 'THOUGHT', 'THE', 'DANCE', 'A', 'BIT', 'CONSCIOUS', 'TO', 'NIGHT', 'FOR', 'THE', 'FIRST', 'TIME'] +4446-2271-0013-1146: hyp=['YOU', 'KNOW', 'I', 'THOUGHT', 'THE', 'DANCE', 'OF', 'GOOD', 'CONSCIENCE', 'TO', 'NIGHT', 'FOR', 'THE', 'FIRST', 'TIME'] +4446-2271-0014-1147: ref=['WESTMERE', 'AND', 'I', 'WERE', 'BACK', 'AFTER', 'THE', 'FIRST', 'ACT', 'AND', 'WE', 'THOUGHT', 'SHE', 'SEEMED', 'QUITE', 'UNCERTAIN', 'OF', 'HERSELF'] +4446-2271-0014-1147: hyp=['WESTMARE', 'AND', 'I', 'WERE', 'BACK', 'AFTER', 'THE', 'FIRST', 'ACT', 'AND', 'WE', 'THOUGHT', 'SHE', 'SEEMED', 'QUITE', 'UNCERTAIN', 'OF', 'HERSELF'] +4446-2271-0015-1148: ref=['A', 'LITTLE', 'ATTACK', 'OF', 'NERVES', 'POSSIBLY'] +4446-2271-0015-1148: hyp=['A', 'LITTLE', 'ATTACK', 'OF', 'NERVES', 'POSSIBLY'] +4446-2271-0016-1149: ref=['HE', 'WAS', 'BEGINNING', 'TO', 'FEEL', 'A', 'KEEN', 'INTEREST', 'IN', 'THE', 'SLENDER', 'BAREFOOT', 'DONKEY', 'GIRL', 'WHO', 'SLIPPED', 'IN', 'AND', 'OUT', 'OF', 'THE', 'PLAY', 'SINGING', 'LIKE', 'SOME', 'ONE', 'WINDING', 'THROUGH', 'A', 'HILLY', 'FIELD'] +4446-2271-0016-1149: hyp=['HE', 'WAS', 'BEGINNING', 'TO', 'FEEL', 'THE', 'KEEN', 'INTEREST', 'IN', 'THE', 'SLENDER', 'BAREFOOT', 'DONKEY', 'GIRL', 'WHO', 'SLIPPED', 'IN', 'AND', 'OUT', 'OF', 'THE', 'PLAY', 'SINGING', 'LIKE', 'SOME', 'ONE', 'WINDING', 'THROUGH', 'A', 'HILLY', 'FIELD'] +4446-2271-0017-1150: ref=['ONE', 'NIGHT', 'WHEN', 'HE', 'AND', 'WINIFRED', 'WERE', 'SITTING', 'TOGETHER', 'ON', 'THE', 'BRIDGE', 'HE', 'TOLD', 'HER', 'THAT', 'THINGS', 'HAD', 'HAPPENED', 'WHILE', 'HE', 'WAS', 'STUDYING', 'ABROAD', 'THAT', 'HE', 'WAS', 'SORRY', 'FOR', 'ONE', 'THING', 'IN', 'PARTICULAR', 'AND', 'HE', 'ASKED', 'HER', 'WHETHER', 'SHE', 'THOUGHT', 'SHE', 'OUGHT', 'TO', 'KNOW', 'ABOUT', 'THEM'] +4446-2271-0017-1150: hyp=['ONE', 'NIGHT', 'WHEN', 'HE', 'AND', 'WINIFRED', 'WERE', 'SITTING', 'TOGETHER', 'ON', 'THE', 'BRIDGE', 'HE', 'TOLD', 'HER', 'THE', 'THINGS', 'HAD', 'HAPPENED', 'WHILE', 'HE', 'WAS', 'STUDYING', 'ABROAD', 'THAT', 'HE', 'WAS', 'SORRY', 'FOR', 'ONE', 'THING', 'IN', 'PARTICULAR', 'AND', 'HE', 'ASKED', 'HER', 'WHETHER', 'SHE', 'THOUGHT', 'SHE', 'OUGHT', 'TO', 'KNOW', 'ABOUT', 'THEM'] +4446-2271-0018-1151: ref=['SHE', 'CONSIDERED', 'A', 'MOMENT', 'AND', 'THEN', 'SAID', 'NO', 'I', 'THINK', 'NOT', 'THOUGH', 'I', 'AM', 'GLAD', 'YOU', 'ASK', 'ME'] +4446-2271-0018-1151: hyp=['SHE', 'CONSIDERED', 'FOR', 'A', 'MOMENT', 'AND', 'THEN', 'SAID', 'NO', 'I', 'THINK', 'NOT', 'THE', 'WAY', 'I', 'AM', 'GLAD', 'YOU', 'ASK', 'ME'] +4446-2271-0019-1152: ref=['AFTER', 'THAT', 'IT', 'WAS', 'EASY', 'TO', 'FORGET', 'ACTUALLY', 'TO', 'FORGET'] +4446-2271-0019-1152: hyp=['AFTER', 'THAT', 'IT', 'WAS', 'EASY', 'TO', 'FORGET', 'ACTUALLY', 'TO', 'FORGET'] +4446-2271-0020-1153: ref=['OF', 'COURSE', 'HE', 'REFLECTED', 'SHE', 'ALWAYS', 'HAD', 'THAT', 'COMBINATION', 'OF', 'SOMETHING', 'HOMELY', 'AND', 'SENSIBLE', 'AND', 'SOMETHING', 'UTTERLY', 'WILD', 'AND', 'DAFT'] +4446-2271-0020-1153: hyp=['OF', 'COURSE', 'HE', 'REFLECTED', 'SHE', 'ALWAYS', 'HAD', 'THAT', 'COMBINATION', 'OF', 'SOMETHING', 'HOMELY', 'AND', 'SENSIBLE', 'AND', 'SOMETHING', 'UTTERLY', 'WILD', 'AND', 'DAFT'] +4446-2271-0021-1154: ref=['SHE', 'MUST', 'CARE', 'ABOUT', 'THE', 'THEATRE', 'A', 'GREAT', 'DEAL', 'MORE', 'THAN', 'SHE', 'USED', 'TO'] +4446-2271-0021-1154: hyp=['SHE', 'MUST', 'CARE', 'ABOUT', 'THE', 'THEATRE', 'A', 'GREAT', 'DEAL', 'MORE', 'THAN', 'SHE', 'USED', 'TO'] +4446-2271-0022-1155: ref=["I'M", 'GLAD', "SHE'S", 'HELD', 'HER', 'OWN', 'SINCE'] +4446-2271-0022-1155: hyp=["I'M", 'GLAD', "SHE'S", 'HELD', 'HER', 'OWN', 'SEN'] +4446-2271-0023-1156: ref=['AFTER', 'ALL', 'WE', 'WERE', 'AWFULLY', 'YOUNG'] +4446-2271-0023-1156: hyp=['AFTER', 'ALL', 'WE', 'WERE', 'AWFULLY', 'YOUNG'] +4446-2271-0024-1157: ref=['I', "SHOULDN'T", 'WONDER', 'IF', 'SHE', 'COULD', 'LAUGH', 'ABOUT', 'IT', 'WITH', 'ME', 'NOW'] +4446-2271-0024-1157: hyp=['I', "SHOULDN'T", 'WONDER', 'IF', 'SHE', 'COULD', 'LAUGH', 'ABOUT', 'IT', 'WITH', 'ME', 'NOW'] +4446-2273-0000-1158: ref=['HILDA', 'WAS', 'VERY', 'NICE', 'TO', 'HIM', 'AND', 'HE', 'SAT', 'ON', 'THE', 'EDGE', 'OF', 'HIS', 'CHAIR', 'FLUSHED', 'WITH', 'HIS', 'CONVERSATIONAL', 'EFFORTS', 'AND', 'MOVING', 'HIS', 'CHIN', 'ABOUT', 'NERVOUSLY', 'OVER', 'HIS', 'HIGH', 'COLLAR'] +4446-2273-0000-1158: hyp=['HILDA', 'WAS', 'VERY', 'NICE', 'TO', 'HIM', 'AND', 'HE', 'SAT', 'ON', 'THE', 'EDGE', 'OF', 'HIS', 'CHAIR', 'FLUSHED', 'WITH', 'HIS', 'CONVERSATIONAL', 'EFFORTS', 'AND', 'MOVING', 'HIS', 'CHIN', 'ABOUT', 'NERVOUSLY', 'OVER', 'HIS', 'HIGH', 'COLLAR'] +4446-2273-0001-1159: ref=['THEY', 'ASKED', 'HIM', 'TO', 'COME', 'TO', 'SEE', 'THEM', 'IN', 'CHELSEA', 'AND', 'THEY', 'SPOKE', 'VERY', 'TENDERLY', 'OF', 'HILDA'] +4446-2273-0001-1159: hyp=['THEY', 'ASKED', 'HIM', 'TO', 'COME', 'TO', 'SEE', 'THEM', 'IN', 'CHELSEA', 'AND', 'THEY', 'SPOKE', 'VERY', 'TENDERLY', 'OF', 'HILDA'] +4446-2273-0002-1160: ref=['LAMB', "WOULDN'T", 'CARE', 'A', 'GREAT', 'DEAL', 'ABOUT', 'MANY', 'OF', 'THEM', 'I', 'FANCY'] +4446-2273-0002-1160: hyp=['LAMB', "WOULDN'T", 'CARE', 'A', 'GREAT', 'DEAL', 'ABOUT', 'MANY', 'OF', 'THEM', 'I', 'FANCY'] +4446-2273-0003-1161: ref=['WHEN', 'BARTLEY', 'ARRIVED', 'AT', 'BEDFORD', 'SQUARE', 'ON', 'SUNDAY', 'EVENING', 'MARIE', 'THE', 'PRETTY', 'LITTLE', 'FRENCH', 'GIRL', 'MET', 'HIM', 'AT', 'THE', 'DOOR', 'AND', 'CONDUCTED', 'HIM', 'UPSTAIRS'] +4446-2273-0003-1161: hyp=['WHEN', 'BARTLEY', 'ARRIVED', 'AT', 'BEDFORD', 'SQUARE', 'ON', 'SUNDAY', 'EVENING', 'MARIE', 'THE', 'PRETTY', 'LITTLE', 'FRENCH', 'GIRL', 'MET', 'HIM', 'AT', 'THE', 'DOOR', 'AND', 'CONDUCTED', 'HIM', 'UPSTAIRS'] +4446-2273-0004-1162: ref=['I', 'SHOULD', 'NEVER', 'HAVE', 'ASKED', 'YOU', 'IF', 'MOLLY', 'HAD', 'BEEN', 'HERE', 'FOR', 'I', 'REMEMBER', 'YOU', "DON'T", 'LIKE', 'ENGLISH', 'COOKERY'] +4446-2273-0004-1162: hyp=['I', 'SHOULD', 'NEVER', 'HAVE', 'ASKED', 'YOU', 'IF', 'MOLLY', 'HAD', 'BEEN', 'HERE', 'FOR', 'I', 'REMEMBER', 'YOU', "DON'T", 'LIKE', 'ENGLISH', 'COOKERY'] +4446-2273-0005-1163: ref=['I', "HAVEN'T", 'HAD', 'A', 'CHANCE', 'YET', 'TO', 'TELL', 'YOU', 'WHAT', 'A', 'JOLLY', 'LITTLE', 'PLACE', 'I', 'THINK', 'THIS', 'IS'] +4446-2273-0005-1163: hyp=['I', "HAVEN'T", 'HAD', 'A', 'CHANCE', 'YET', 'TO', 'TELL', 'YOU', 'WHAT', 'A', 'JOLLY', 'LITTLE', 'PLACE', 'I', 'THINK', 'THIS', 'IS'] +4446-2273-0006-1164: ref=['THEY', 'ARE', 'ALL', 'SKETCHES', 'MADE', 'ABOUT', 'THE', 'VILLA', "D'ESTE", 'YOU', 'SEE'] +4446-2273-0006-1164: hyp=['THEY', 'ARE', 'ALL', 'SKETCHES', 'MADE', 'ABOUT', 'THE', 'VILLIDESA', 'YOU', 'SEE'] +4446-2273-0007-1165: ref=['THOSE', 'FELLOWS', 'ARE', 'ALL', 'VERY', 'LOYAL', 'EVEN', 'MAINHALL'] +4446-2273-0007-1165: hyp=['THOSE', 'FELLOWS', 'ARE', 'ALL', 'VERY', 'LOYAL', 'EVEN', 'MAIN', 'HALL'] +4446-2273-0008-1166: ref=["I'VE", 'MANAGED', 'TO', 'SAVE', 'SOMETHING', 'EVERY', 'YEAR', 'AND', 'THAT', 'WITH', 'HELPING', 'MY', 'THREE', 'SISTERS', 'NOW', 'AND', 'THEN', 'AND', 'TIDING', 'POOR', 'COUSIN', 'MIKE', 'OVER', 'BAD', 'SEASONS'] +4446-2273-0008-1166: hyp=["I'VE", 'MANAGED', 'TO', 'SAVE', 'SOMETHING', 'EVERY', 'YEAR', 'AND', 'THAT', 'WITH', 'HELPING', 'MY', 'THREE', 'SISTERS', 'NOW', 'AND', 'THEN', 'AND', 'TIDING', 'POOR', 'COUSIN', 'MIKE', 'OVER', 'BAD', 'SEASONS'] +4446-2273-0009-1167: ref=["IT'S", 'NOT', 'PARTICULARLY', 'RARE', 'SHE', 'SAID', 'BUT', 'SOME', 'OF', 'IT', 'WAS', 'MY', "MOTHER'S"] +4446-2273-0009-1167: hyp=["IT'S", 'NOT', 'PARTICULARLY', 'RARE', 'SHE', 'SAID', 'BUT', 'SOME', 'OF', 'IT', 'WAS', 'MY', "MOTHER'S"] +4446-2273-0010-1168: ref=['THERE', 'WAS', 'WATERCRESS', 'SOUP', 'AND', 'SOLE', 'AND', 'A', 'DELIGHTFUL', 'OMELETTE', 'STUFFED', 'WITH', 'MUSHROOMS', 'AND', 'TRUFFLES', 'AND', 'TWO', 'SMALL', 'RARE', 'DUCKLINGS', 'AND', 'ARTICHOKES', 'AND', 'A', 'DRY', 'YELLOW', 'RHONE', 'WINE', 'OF', 'WHICH', 'BARTLEY', 'HAD', 'ALWAYS', 'BEEN', 'VERY', 'FOND'] +4446-2273-0010-1168: hyp=['THERE', 'WAS', 'WATERCRESS', 'SOUP', 'AND', 'SOLE', 'AND', 'A', 'DELIGHTFUL', 'OMELETTE', 'STUFFED', 'WITH', 'MUSHROOMS', 'AND', 'TRUFFLES', 'AND', 'TWO', 'SMALL', 'RARE', 'DUCKLINGS', 'AND', 'ARTICHOKES', 'AND', 'A', 'DRY', 'YELLOW', 'RHONE', 'WINE', 'OF', 'WHICH', 'BARTLEY', 'HAD', 'ALWAYS', 'BEEN', 'VERY', 'FOND'] +4446-2273-0011-1169: ref=['THERE', 'IS', 'NOTHING', 'ELSE', 'THAT', 'LOOKS', 'SO', 'JOLLY'] +4446-2273-0011-1169: hyp=['THERE', 'IS', 'NOTHING', 'ELSE', 'THAT', 'LOOKS', 'SO', 'JOLLY'] +4446-2273-0012-1170: ref=['THANK', 'YOU', 'BUT', 'I', "DON'T", 'LIKE', 'IT', 'SO', 'WELL', 'AS', 'THIS'] +4446-2273-0012-1170: hyp=['THANK', 'YOU', 'BUT', 'I', "DON'T", 'LIKE', 'IT', 'SO', 'WELL', 'AS', 'THIS'] +4446-2273-0013-1171: ref=['HAVE', 'YOU', 'BEEN', 'IN', 'PARIS', 'MUCH', 'THESE', 'LATE', 'YEARS'] +4446-2273-0013-1171: hyp=['HAVE', 'YOU', 'BEEN', 'IN', 'PARIS', 'MUCH', 'THESE', 'LATE', 'YEARS'] +4446-2273-0014-1172: ref=['THERE', 'ARE', 'FEW', 'CHANGES', 'IN', 'THE', 'OLD', 'QUARTER'] +4446-2273-0014-1172: hyp=['THERE', 'ARE', 'A', 'FEW', 'CHANGES', 'IN', 'THE', 'OLD', 'QUARTER'] +4446-2273-0015-1173: ref=["DON'T", 'I', 'THOUGH', "I'M", 'SO', 'SORRY', 'TO', 'HEAR', 'IT', 'HOW', 'DID', 'HER', 'SON', 'TURN', 'OUT'] +4446-2273-0015-1173: hyp=["DON'T", 'I', 'THOUGH', "I'M", 'SO', 'SORRY', 'TO', 'HEAR', 'IT', 'HOW', 'DID', 'HER', 'SON', 'TURN', 'OUT'] +4446-2273-0016-1174: ref=['HER', 'HAIR', 'IS', 'STILL', 'LIKE', 'FLAX', 'AND', 'HER', 'BLUE', 'EYES', 'ARE', 'JUST', 'LIKE', 'A', "BABY'S", 'AND', 'SHE', 'HAS', 'THE', 'SAME', 'THREE', 'FRECKLES', 'ON', 'HER', 'LITTLE', 'NOSE', 'AND', 'TALKS', 'ABOUT', 'GOING', 'BACK', 'TO', 'HER', 'BAINS', 'DE', 'MER'] +4446-2273-0016-1174: hyp=['HER', 'HAIR', 'IS', 'STILL', 'LIKE', 'FLAX', 'AND', 'HER', 'BLUE', 'EYES', 'ARE', 'JUST', 'LIKE', 'A', "BABY'S", 'AND', 'SHE', 'HAS', 'THE', 'SAME', 'THREE', 'FRECKLES', 'ON', 'HER', 'LITTLE', 'NOSE', 'AND', 'TALKS', 'ABOUT', 'GOING', 'BACK', 'TO', 'HER', 'BANDERE'] +4446-2273-0017-1175: ref=['HOW', 'JOLLY', 'IT', 'WAS', 'BEING', 'YOUNG', 'HILDA'] +4446-2273-0017-1175: hyp=['HOW', 'JOLLY', 'IT', 'WAS', 'BEING', 'YOUNG', 'HILDA'] +4446-2273-0018-1176: ref=['DO', 'YOU', 'REMEMBER', 'THAT', 'FIRST', 'WALK', 'WE', 'TOOK', 'TOGETHER', 'IN', 'PARIS'] +4446-2273-0018-1176: hyp=['DO', 'YOU', 'REMEMBER', 'THAT', 'FIRST', 'WALK', 'WE', 'TOOK', 'TOGETHER', 'IN', 'PARIS'] +4446-2273-0019-1177: ref=['COME', "WE'LL", 'HAVE', 'OUR', 'COFFEE', 'IN', 'THE', 'OTHER', 'ROOM', 'AND', 'YOU', 'CAN', 'SMOKE'] +4446-2273-0019-1177: hyp=['COME', "WE'LL", 'HAVE', 'OUR', 'COFFEE', 'IN', 'THE', 'OTHER', 'ROOM', 'AND', 'YOU', 'CAN', 'SMOKE'] +4446-2273-0020-1178: ref=['I', 'THINK', 'WE', 'DID', 'SHE', 'ANSWERED', 'DEMURELY'] +4446-2273-0020-1178: hyp=['I', 'THINK', 'WE', 'DID', 'SHE', 'ANSWERED', 'DEMURELY'] +4446-2273-0021-1179: ref=['WHAT', 'SHE', 'WANTED', 'FROM', 'US', 'WAS', 'NEITHER', 'OUR', 'FLOWERS', 'NOR', 'OUR', 'FRANCS', 'BUT', 'JUST', 'OUR', 'YOUTH'] +4446-2273-0021-1179: hyp=['WHAT', 'SHE', 'WANTED', 'FROM', 'US', 'WAS', 'NEITHER', 'OUR', 'FLOWERS', 'NOR', 'OUR', 'FRANKS', 'BUT', 'JUST', 'OUR', 'YOUTH'] +4446-2273-0022-1180: ref=['THEY', 'WERE', 'BOTH', 'REMEMBERING', 'WHAT', 'THE', 'WOMAN', 'HAD', 'SAID', 'WHEN', 'SHE', 'TOOK', 'THE', 'MONEY', 'GOD', 'GIVE', 'YOU', 'A', 'HAPPY', 'LOVE'] +4446-2273-0022-1180: hyp=['THEY', 'WERE', 'BOTH', 'REMEMBERING', 'WHAT', 'THE', 'WOMAN', 'HAD', 'SAID', 'WHEN', 'SHE', 'TOOK', 'THE', 'MONEY', 'GOD', 'GIVE', 'YOU', 'A', 'HAPPY', 'LOVE'] +4446-2273-0023-1181: ref=['THE', 'STRANGE', 'WOMAN', 'AND', 'HER', 'PASSIONATE', 'SENTENCE', 'THAT', 'RANG', 'OUT', 'SO', 'SHARPLY', 'HAD', 'FRIGHTENED', 'THEM', 'BOTH'] +4446-2273-0023-1181: hyp=['THE', 'STRANGE', 'WOMAN', 'AND', 'HER', 'PASSIONATE', 'SENTENCE', 'THAT', 'RANG', 'OUT', 'SO', 'SHARPLY', 'HAD', 'FRIGHTENED', 'THEM', 'BOTH'] +4446-2273-0024-1182: ref=['BARTLEY', 'STARTED', 'WHEN', 'HILDA', 'RANG', 'THE', 'LITTLE', 'BELL', 'BESIDE', 'HER', 'DEAR', 'ME', 'WHY', 'DID', 'YOU', 'DO', 'THAT'] +4446-2273-0024-1182: hyp=['BARTLEY', 'STARTED', 'WHEN', 'HILDA', 'RANG', 'THE', 'LITTLE', 'BELL', 'BESIDE', 'HER', 'DEAR', 'ME', 'WHY', 'DID', 'YOU', 'DO', 'THAT'] +4446-2273-0025-1183: ref=['IT', 'WAS', 'VERY', 'JOLLY', 'HE', 'MURMURED', 'LAZILY', 'AS', 'MARIE', 'CAME', 'IN', 'TO', 'TAKE', 'AWAY', 'THE', 'COFFEE'] +4446-2273-0025-1183: hyp=['IT', 'WAS', 'VERY', 'JOLLY', 'HE', 'MURMURED', 'LAZILY', 'AS', 'MARIE', 'CAME', 'IN', 'TO', 'TAKE', 'AWAY', 'THE', 'COFFEE'] +4446-2273-0026-1184: ref=['HAVE', 'I', 'TOLD', 'YOU', 'ABOUT', 'MY', 'NEW', 'PLAY'] +4446-2273-0026-1184: hyp=['HAVE', 'I', 'TOLD', 'YOU', 'ABOUT', 'MY', 'NEW', 'PLAY'] +4446-2273-0027-1185: ref=['WHEN', 'SHE', 'FINISHED', 'ALEXANDER', 'SHOOK', 'HIMSELF', 'OUT', 'OF', 'A', 'REVERIE'] +4446-2273-0027-1185: hyp=['WHEN', 'SHE', 'FINISHED', 'ALEXANDER', 'SHOOK', 'HIMSELF', 'OUT', 'OF', 'A', 'REVERIE'] +4446-2273-0028-1186: ref=['NONSENSE', 'OF', 'COURSE', 'I', "CAN'T", 'REALLY', 'SING', 'EXCEPT', 'THE', 'WAY', 'MY', 'MOTHER', 'AND', 'GRANDMOTHER', 'DID', 'BEFORE', 'ME'] +4446-2273-0028-1186: hyp=['NONSENSE', 'OF', 'COURSE', 'I', "CAN'T", 'REALLY', 'SING', 'EXCEPT', 'THE', 'WAY', 'MY', 'MOTHER', 'AND', 'GRANDMOTHER', 'DID', 'BEFORE', 'ME'] +4446-2273-0029-1187: ref=["IT'S", 'REALLY', 'TOO', 'WARM', 'IN', 'THIS', 'ROOM', 'TO', 'SING', "DON'T", 'YOU', 'FEEL', 'IT'] +4446-2273-0029-1187: hyp=["IT'S", 'REALLY', 'TOO', 'WARM', 'IN', 'THIS', 'ROOM', 'TO', 'SING', "DON'T", 'YOU', 'FEEL', 'IT'] +4446-2273-0030-1188: ref=['ALEXANDER', 'WENT', 'OVER', 'AND', 'OPENED', 'THE', 'WINDOW', 'FOR', 'HER'] +4446-2273-0030-1188: hyp=['ALEXANDER', 'WENT', 'OVER', 'AND', 'OPENED', 'THE', 'WINDOW', 'FOR', 'HER'] +4446-2273-0031-1189: ref=['THERE', 'JUST', 'IN', 'FRONT'] +4446-2273-0031-1189: hyp=['THERE', 'JUST', 'IN', 'FRONT'] +4446-2273-0032-1190: ref=['HE', 'STOOD', 'A', 'LITTLE', 'BEHIND', 'HER', 'AND', 'TRIED', 'TO', 'STEADY', 'HIMSELF', 'AS', 'HE', 'SAID', "IT'S", 'SOFT', 'AND', 'MISTY', 'SEE', 'HOW', 'WHITE', 'THE', 'STARS', 'ARE'] +4446-2273-0032-1190: hyp=['HE', 'STOOD', 'A', 'LITTLE', 'BEHIND', 'HER', 'AND', 'TRIED', 'TO', 'STEADY', 'HIMSELF', 'AS', 'HE', 'SAID', "IT'S", 'SOFT', 'AND', 'MISTY', 'SEE', 'HOW', 'WHITE', 'THE', 'STARS', 'ARE'] +4446-2273-0033-1191: ref=['FOR', 'A', 'LONG', 'TIME', 'NEITHER', 'HILDA', 'NOR', 'BARTLEY', 'SPOKE'] +4446-2273-0033-1191: hyp=['FOR', 'A', 'LONG', 'TIME', 'NEITHER', 'HILDA', 'NOR', 'BARTLEY', 'SPOKE'] +4446-2273-0034-1192: ref=['HE', 'FELT', 'A', 'TREMOR', 'RUN', 'THROUGH', 'THE', 'SLENDER', 'YELLOW', 'FIGURE', 'IN', 'FRONT', 'OF', 'HIM'] +4446-2273-0034-1192: hyp=['HE', 'FELT', 'A', 'TREMOR', 'RUN', 'THROUGH', 'THE', 'SLENDER', 'YELLOW', 'FIGURE', 'IN', 'FRONT', 'OF', 'HIM'] +4446-2273-0035-1193: ref=['BARTLEY', 'LEANED', 'OVER', 'HER', 'SHOULDER', 'WITHOUT', 'TOUCHING', 'HER', 'AND', 'WHISPERED', 'IN', 'HER', 'EAR', 'YOU', 'ARE', 'GIVING', 'ME', 'A', 'CHANCE', 'YES'] +4446-2273-0035-1193: hyp=['BARTLEY', 'LEANED', 'OVER', 'HER', 'SHOULDER', 'WITHOUT', 'TOUCHING', 'HER', 'AND', 'WHISPERED', 'IN', 'HER', 'EAR', 'YOU', 'ARE', 'GIVING', 'ME', 'A', 'CHANCE', 'YES'] +4446-2273-0036-1194: ref=['ALEXANDER', 'UNCLENCHED', 'THE', 'TWO', 'HANDS', 'AT', 'HIS', 'SIDES'] +4446-2273-0036-1194: hyp=['ALEXANDER', 'CLENCHED', 'THE', 'TWO', 'HANDS', 'AT', 'HIS', 'SIDES'] +4446-2275-0000-1195: ref=['THE', 'STOP', 'AT', 'QUEENSTOWN', 'THE', 'TEDIOUS', 'PASSAGE', 'UP', 'THE', 'MERSEY', 'WERE', 'THINGS', 'THAT', 'HE', 'NOTED', 'DIMLY', 'THROUGH', 'HIS', 'GROWING', 'IMPATIENCE'] +4446-2275-0000-1195: hyp=['THE', 'STOP', 'AT', 'QUEENSTOWN', 'THE', 'TEDIOUS', 'PASSAGE', 'OF', 'THE', 'MERCY', 'WERE', 'THINGS', 'THAT', 'HE', 'NOTED', 'DIMLY', 'THROUGH', 'HIS', 'GROWING', 'IMPATIENCE'] +4446-2275-0001-1196: ref=['SHE', 'BLUSHED', 'AND', 'SMILED', 'AND', 'FUMBLED', 'HIS', 'CARD', 'IN', 'HER', 'CONFUSION', 'BEFORE', 'SHE', 'RAN', 'UPSTAIRS'] +4446-2275-0001-1196: hyp=['SHE', 'BLUSHED', 'AND', 'SMILED', 'AND', 'FUMBLED', 'HIS', 'CARD', 'IN', 'HER', 'CONFUSION', 'BEFORE', 'SHE', 'RAN', 'UPSTAIRS'] +4446-2275-0002-1197: ref=['ALEXANDER', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'HALLWAY', 'BUTTONING', 'AND', 'UNBUTTONING', 'HIS', 'OVERCOAT', 'UNTIL', 'SHE', 'RETURNED', 'AND', 'TOOK', 'HIM', 'UP', 'TO', "HILDA'S", 'LIVING', 'ROOM'] +4446-2275-0002-1197: hyp=['ALEXANDER', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'HALLWAY', 'BUTTONING', 'AND', 'UNBUTTONING', 'HIS', 'OVERCOAT', 'UNTIL', 'SHE', 'RETURNED', 'AND', 'TOOK', 'HIM', 'UP', 'TO', "HILDA'S", 'LIVING', 'ROOM'] +4446-2275-0003-1198: ref=['THE', 'ROOM', 'WAS', 'EMPTY', 'WHEN', 'HE', 'ENTERED'] +4446-2275-0003-1198: hyp=['THE', 'ROOM', 'WAS', 'EMPTY', 'WHEN', 'HE', 'ENTERED'] +4446-2275-0004-1199: ref=['ALEXANDER', 'DID', 'NOT', 'SIT', 'DOWN'] +4446-2275-0004-1199: hyp=['ALEXANDER', 'DID', 'NOT', 'SIT', 'DOWN'] +4446-2275-0005-1200: ref=['I', 'FELT', 'IT', 'IN', 'MY', 'BONES', 'WHEN', 'I', 'WOKE', 'THIS', 'MORNING', 'THAT', 'SOMETHING', 'SPLENDID', 'WAS', 'GOING', 'TO', 'TURN', 'UP'] +4446-2275-0005-1200: hyp=['I', 'FELT', 'IT', 'IN', 'MY', 'BONES', 'WHEN', 'I', 'WOKE', 'THIS', 'MORNING', 'THAT', 'SOMETHING', 'SPLENDID', 'WAS', 'GOING', 'TO', 'TURN', 'UP'] +4446-2275-0006-1201: ref=['I', 'THOUGHT', 'IT', 'MIGHT', 'BE', 'SISTER', 'KATE', 'OR', 'COUSIN', 'MIKE', 'WOULD', 'BE', 'HAPPENING', 'ALONG'] +4446-2275-0006-1201: hyp=['I', 'THOUGHT', 'IT', 'MIGHT', 'BE', 'SISTER', 'KATE', 'OR', 'COUSIN', 'MIKE', 'WOULD', 'BE', 'HAPPENING', 'ALONG'] +4446-2275-0007-1202: ref=['SHE', 'PUSHED', 'HIM', 'TOWARD', 'THE', 'BIG', 'CHAIR', 'BY', 'THE', 'FIRE', 'AND', 'SAT', 'DOWN', 'ON', 'A', 'STOOL', 'AT', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'HEARTH', 'HER', 'KNEES', 'DRAWN', 'UP', 'TO', 'HER', 'CHIN', 'LAUGHING', 'LIKE', 'A', 'HAPPY', 'LITTLE', 'GIRL'] +4446-2275-0007-1202: hyp=['SHE', 'PUSHED', 'HIM', 'TOWARD', 'THE', 'BIG', 'CHAIR', 'BY', 'THE', 'FIRE', 'AND', 'SAT', 'DOWN', 'ON', 'A', 'STOOL', 'AT', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'HEARTH', 'HER', 'KNEES', 'DRAWN', 'UP', 'TO', 'HER', 'CHIN', 'LAUGHING', 'LIKE', 'A', 'HAPPY', 'LITTLE', 'GIRL'] +4446-2275-0008-1203: ref=['WHEN', 'DID', 'YOU', 'COME', 'BARTLEY', 'AND', 'HOW', 'DID', 'IT', 'HAPPEN', 'YOU', "HAVEN'T", 'SPOKEN', 'A', 'WORD'] +4446-2275-0008-1203: hyp=['WHEN', 'DID', 'YOU', 'COME', 'BARTLEY', 'AND', 'HOW', 'DID', 'IT', 'HAPPEN', 'YOU', "HAVEN'T", 'SPOKEN', 'A', 'WORD'] +4446-2275-0009-1204: ref=['I', 'GOT', 'IN', 'ABOUT', 'TEN', 'MINUTES', 'AGO'] +4446-2275-0009-1204: hyp=['I', 'GOT', 'IN', 'ABOUT', 'TEN', 'MINUTES', 'AGO'] +4446-2275-0010-1205: ref=['ALEXANDER', 'LEANED', 'FORWARD', 'AND', 'WARMED', 'HIS', 'HANDS', 'BEFORE', 'THE', 'BLAZE'] +4446-2275-0010-1205: hyp=['ALEXANDER', 'LEANED', 'FORWARD', 'AND', 'WARMED', 'HIS', 'HANDS', 'BEFORE', 'THE', 'BLAZE'] +4446-2275-0011-1206: ref=['BARTLEY', 'BENT', 'LOWER', 'OVER', 'THE', 'FIRE'] +4446-2275-0011-1206: hyp=['BARTLEY', 'BENT', 'LOWERED', 'OVER', 'THE', 'FIRE'] +4446-2275-0012-1207: ref=['SHE', 'LOOKED', 'AT', 'HIS', 'HEAVY', 'SHOULDERS', 'AND', 'BIG', 'DETERMINED', 'HEAD', 'THRUST', 'FORWARD', 'LIKE', 'A', 'CATAPULT', 'IN', 'LEASH'] +4446-2275-0012-1207: hyp=['SHE', 'LOOKED', 'AT', 'HIS', 'HEAVY', 'SHOULDERS', 'AND', 'BIG', 'DETERMINED', 'HEAD', 'THRUST', 'FORWARD', 'LIKE', 'A', 'CATAPULT', 'IN', 'LEASH'] +4446-2275-0013-1208: ref=["I'LL", 'DO', 'ANYTHING', 'YOU', 'WISH', 'ME', 'TO', 'BARTLEY', 'SHE', 'SAID', 'TREMULOUSLY'] +4446-2275-0013-1208: hyp=["I'LL", 'DO', 'ANYTHING', 'YOU', 'WISH', 'ME', 'TO', 'BARTLEY', 'SHE', 'SAID', 'TREMULOUSLY'] +4446-2275-0014-1209: ref=['I', "CAN'T", 'STAND', 'SEEING', 'YOU', 'MISERABLE'] +4446-2275-0014-1209: hyp=['I', "CAN'T", 'STAND', 'SEEING', 'YOU', 'MISERABLE'] +4446-2275-0015-1210: ref=['HE', 'PULLED', 'UP', 'A', 'WINDOW', 'AS', 'IF', 'THE', 'AIR', 'WERE', 'HEAVY'] +4446-2275-0015-1210: hyp=['HE', 'PULLED', 'UP', 'A', 'WINDOW', 'AS', 'IF', 'THE', 'AIR', 'WERE', 'HEAVY'] +4446-2275-0016-1211: ref=['HILDA', 'WATCHED', 'HIM', 'FROM', 'HER', 'CORNER', 'TREMBLING', 'AND', 'SCARCELY', 'BREATHING', 'DARK', 'SHADOWS', 'GROWING', 'ABOUT', 'HER', 'EYES', 'IT'] +4446-2275-0016-1211: hyp=['HILDA', 'WATCHED', 'HIM', 'FROM', 'THE', 'CORNER', 'TREMBLING', 'AND', 'SCARCELY', 'BREATHING', 'DARK', 'SHADOWS', 'GROWING', 'ABOUT', 'HER', 'EYES', 'IT'] +4446-2275-0017-1212: ref=['BUT', "IT'S", 'WORSE', 'NOW', "IT'S", 'UNBEARABLE'] +4446-2275-0017-1212: hyp=['BUT', "IT'S", 'WORSE', 'NOW', "IT'S", 'UNBEARABLE'] +4446-2275-0018-1213: ref=['I', 'GET', 'NOTHING', 'BUT', 'MISERY', 'OUT', 'OF', 'EITHER'] +4446-2275-0018-1213: hyp=['I', 'GET', 'NOTHING', 'BUT', 'MISERY', 'OUT', 'OF', 'EITHER'] +4446-2275-0019-1214: ref=['THE', 'WORLD', 'IS', 'ALL', 'THERE', 'JUST', 'AS', 'IT', 'USED', 'TO', 'BE', 'BUT', 'I', "CAN'T", 'GET', 'AT', 'IT', 'ANY', 'MORE'] +4446-2275-0019-1214: hyp=['THE', 'WORLD', 'IS', 'ALL', 'THERE', 'JUST', 'AS', 'IT', 'USED', 'TO', 'BE', 'BUT', 'I', "CAN'T", 'GET', 'AT', 'IT', 'ANY', 'MORE'] +4446-2275-0020-1215: ref=['IT', 'WAS', 'MYSELF', 'I', 'WAS', 'DEFYING', 'HILDA'] +4446-2275-0020-1215: hyp=['IT', 'WAS', 'MYSELF', 'I', 'WAS', 'DEFYING', 'HILDA'] +4446-2275-0021-1216: ref=["HILDA'S", 'FACE', 'QUIVERED', 'BUT', 'SHE', 'WHISPERED', 'YES', 'I', 'THINK', 'IT', 'MUST', 'HAVE', 'BEEN'] +4446-2275-0021-1216: hyp=["HELDA'S", 'FACE', 'QUIVERED', 'BUT', 'SHE', 'WHISPERED', 'YES', 'I', 'THINK', 'IT', 'MUST', 'HAVE', 'BEEN'] +4446-2275-0022-1217: ref=['BUT', 'WHY', "DIDN'T", 'YOU', 'TELL', 'ME', 'WHEN', 'YOU', 'WERE', 'HERE', 'IN', 'THE', 'SUMMER'] +4446-2275-0022-1217: hyp=['BUT', 'WHY', "DIDN'T", 'YOU', 'TELL', 'ME', 'WHEN', 'YOU', 'WERE', 'HERE', 'IN', 'THE', 'SUMMER'] +4446-2275-0023-1218: ref=['ALEXANDER', 'GROANED', 'I', 'MEANT', 'TO', 'BUT', 'SOMEHOW', 'I', "COULDN'T"] +4446-2275-0023-1218: hyp=['ALEXANDER', 'GROANED', 'I', 'MEANT', 'TO', 'BUT', 'SOMEHOW', 'I', "COULDN'T"] +4446-2275-0024-1219: ref=['SHE', 'PRESSED', 'HIS', 'HAND', 'GENTLY', 'IN', 'GRATITUDE'] +4446-2275-0024-1219: hyp=['SHE', 'PRESSED', 'HIS', 'HAND', 'GENTLY', 'IN', 'GRATITUDE'] +4446-2275-0025-1220: ref=["WEREN'T", 'YOU', 'HAPPY', 'THEN', 'AT', 'ALL'] +4446-2275-0025-1220: hyp=["WEREN'T", 'YOU', 'HAPPY', 'THEN', 'AT', 'ALL'] +4446-2275-0026-1221: ref=['SHE', 'CLOSED', 'HER', 'EYES', 'AND', 'TOOK', 'A', 'DEEP', 'BREATH', 'AS', 'IF', 'TO', 'DRAW', 'IN', 'AGAIN', 'THE', 'FRAGRANCE', 'OF', 'THOSE', 'DAYS'] +4446-2275-0026-1221: hyp=['SHE', 'CLOSED', 'HER', 'EYES', 'AND', 'TOOK', 'A', 'DEEP', 'BREATH', 'AS', 'IF', 'TO', 'DRAW', 'IN', 'AGAIN', 'THE', 'FRAGRANCE', 'OF', 'THOSE', 'DAYS'] +4446-2275-0027-1222: ref=['HE', 'MOVED', 'UNEASILY', 'AND', 'HIS', 'CHAIR', 'CREAKED'] +4446-2275-0027-1222: hyp=['HE', 'MOVED', 'UNEASILY', 'AND', 'HIS', 'CHAIR', 'CREAKED'] +4446-2275-0028-1223: ref=['YES', 'YES', 'SHE', 'HURRIED', 'PULLING', 'HER', 'HAND', 'GENTLY', 'AWAY', 'FROM', 'HIM'] +4446-2275-0028-1223: hyp=['YES', 'YES', 'SHE', 'HURRIED', 'PULLING', 'HER', 'HAND', 'GENTLY', 'AWAY', 'FROM', 'HIM'] +4446-2275-0029-1224: ref=['PLEASE', 'TELL', 'ME', 'ONE', 'THING', 'BARTLEY', 'AT', 'LEAST', 'TELL', 'ME', 'THAT', 'YOU', 'BELIEVE', 'I', 'THOUGHT', 'I', 'WAS', 'MAKING', 'YOU', 'HAPPY'] +4446-2275-0029-1224: hyp=['PLEASE', 'TELL', 'ME', 'ONE', 'THING', 'BARTLEY', 'AT', 'LEAST', 'TELL', 'ME', 'THAT', 'YOU', 'BELIEVE', 'I', 'THOUGHT', 'I', 'WAS', 'MAKING', 'YOU', 'HAPPY'] +4446-2275-0030-1225: ref=['YES', 'HILDA', 'I', 'KNOW', 'THAT', 'HE', 'SAID', 'SIMPLY'] +4446-2275-0030-1225: hyp=['YES', 'HELDA', 'I', 'KNOW', 'THAT', 'HE', 'SAID', 'SIMPLY'] +4446-2275-0031-1226: ref=['I', 'UNDERSTAND', 'BARTLEY', 'I', 'WAS', 'WRONG'] +4446-2275-0031-1226: hyp=['I', 'UNDERSTAND', 'BARTLEY', 'I', 'WAS', 'WRONG'] +4446-2275-0032-1227: ref=['BUT', 'I', "DIDN'T", 'KNOW', "YOU'VE", 'ONLY', 'TO', 'TELL', 'ME', 'NOW'] +4446-2275-0032-1227: hyp=['BUT', 'I', "DIDN'T", 'KNOW', "YOU'VE", 'ONLY', 'TO', 'TELL', 'ME', 'NOW'] +4446-2275-0033-1228: ref=['WHAT', 'I', 'MEAN', 'IS', 'THAT', 'I', 'WANT', 'YOU', 'TO', 'PROMISE', 'NEVER', 'TO', 'SEE', 'ME', 'AGAIN', 'NO', 'MATTER', 'HOW', 'OFTEN', 'I', 'COME', 'NO', 'MATTER', 'HOW', 'HARD', 'I', 'BEG'] +4446-2275-0033-1228: hyp=['WHAT', 'I', 'MEAN', 'IS', 'THAT', 'I', 'WANT', 'YOU', 'TO', 'PROMISE', 'NEVER', 'TO', 'SEE', 'ME', 'AGAIN', 'NO', 'MATTER', 'HOW', 'OFTEN', 'I', 'COME', 'NO', 'MATTER', 'HOW', 'HARD', 'I', 'BEG'] +4446-2275-0034-1229: ref=['KEEP', 'AWAY', 'IF', 'YOU', 'WISH', 'WHEN', 'HAVE', 'I', 'EVER', 'FOLLOWED', 'YOU'] +4446-2275-0034-1229: hyp=['KEEP', 'AWAY', 'IF', 'YOU', 'WISH', 'WHEN', 'HAVE', 'I', 'EVER', 'FOLLOWED', 'YOU'] +4446-2275-0035-1230: ref=['ALEXANDER', 'ROSE', 'AND', 'SHOOK', 'HIMSELF', 'ANGRILY', 'YES', 'I', 'KNOW', "I'M", 'COWARDLY'] +4446-2275-0035-1230: hyp=['ALEXANDER', 'ROSE', 'AND', 'SHOOK', 'HIMSELF', 'ANGRILY', 'YES', 'I', 'KNOW', "I'M", 'COWARDLY'] +4446-2275-0036-1231: ref=['HE', 'TOOK', 'HER', 'ROUGHLY', 'IN', 'HIS', 'ARMS', 'DO', 'YOU', 'KNOW', 'WHAT', 'I', 'MEAN'] +4446-2275-0036-1231: hyp=['HE', 'TOOK', 'A', 'ROUGHLY', 'IN', 'HIS', 'ARMS', 'DO', 'YOU', 'KNOW', 'WHAT', 'I', 'MEAN'] +4446-2275-0037-1232: ref=['OH', 'BARTLEY', 'WHAT', 'AM', 'I', 'TO', 'DO'] +4446-2275-0037-1232: hyp=['OH', 'BARTLEY', 'WHAT', 'AM', 'I', 'TO', 'DO'] +4446-2275-0038-1233: ref=['I', 'WILL', 'ASK', 'THE', 'LEAST', 'IMAGINABLE', 'BUT', 'I', 'MUST', 'HAVE', 'SOMETHING'] +4446-2275-0038-1233: hyp=['I', 'WILL', 'ASK', 'THE', 'LEAST', 'IMAGINABLE', 'BUT', 'I', 'MUST', 'HAVE', 'SOMETHING'] +4446-2275-0039-1234: ref=['I', 'MUST', 'KNOW', 'ABOUT', 'YOU'] +4446-2275-0039-1234: hyp=['I', 'MUST', 'KNOW', 'ABOUT', 'YOU'] +4446-2275-0040-1235: ref=['THE', 'SIGHT', 'OF', 'YOU', 'BARTLEY', 'TO', 'SEE', 'YOU', 'LIVING', 'AND', 'HAPPY', 'AND', 'SUCCESSFUL', 'CAN', 'I', 'NEVER', 'MAKE', 'YOU', 'UNDERSTAND', 'WHAT', 'THAT', 'MEANS', 'TO', 'ME'] +4446-2275-0040-1235: hyp=['THE', 'SIGHT', 'OF', 'YOU', 'BARTLEY', 'TO', 'SEE', 'YOU', 'LIVING', 'AND', 'HAPPY', 'AND', 'SUCCESSFUL', 'CAN', 'I', 'NEVER', 'MAKE', 'YOU', 'UNDERSTAND', 'WHAT', 'THAT', 'MEANS', 'TO', 'ME'] +4446-2275-0041-1236: ref=['YOU', 'SEE', 'LOVING', 'SOME', 'ONE', 'AS', 'I', 'LOVE', 'YOU', 'MAKES', 'THE', 'WHOLE', 'WORLD', 'DIFFERENT'] +4446-2275-0041-1236: hyp=['YOU', 'SEE', 'LOVING', 'SOME', 'ONE', 'AS', 'I', 'LOVE', 'YOU', 'MAKES', 'THE', 'WHOLE', 'WORLD', 'DIFFERENT'] +4446-2275-0042-1237: ref=['AND', 'THEN', 'YOU', 'CAME', 'BACK', 'NOT', 'CARING', 'VERY', 'MUCH', 'BUT', 'IT', 'MADE', 'NO', 'DIFFERENCE'] +4446-2275-0042-1237: hyp=['AND', 'THEN', 'YOU', 'CAME', 'BACK', 'NOT', 'CARING', 'VERY', 'MUCH', 'BUT', 'IT', 'MADE', 'NO', 'DIFFERENCE'] +4446-2275-0043-1238: ref=['BARTLEY', 'BENT', 'OVER', 'AND', 'TOOK', 'HER', 'IN', 'HIS', 'ARMS', 'KISSING', 'HER', 'MOUTH', 'AND', 'HER', 'WET', 'TIRED', 'EYES'] +4446-2275-0043-1238: hyp=['BARTLEY', 'BENT', 'OVER', 'AND', 'TOOK', 'HER', 'IN', 'HIS', 'ARMS', 'KISSING', 'HER', 'MOUTH', 'AND', 'HER', 'WET', 'TIRED', 'EYES'] +4446-2275-0044-1239: ref=["DON'T", 'CRY', "DON'T", 'CRY', 'HE', 'WHISPERED'] +4446-2275-0044-1239: hyp=['A', 'TOLL', 'CRY', "DON'T", 'CRY', 'HE', 'WHISPERED'] +4446-2275-0045-1240: ref=["WE'VE", 'TORTURED', 'EACH', 'OTHER', 'ENOUGH', 'FOR', 'TONIGHT'] +4446-2275-0045-1240: hyp=["WE'VE", 'TORTURED', 'EACH', 'OTHER', 'ENOUGH', 'FOR', 'TO', 'NIGHT'] +4507-16021-0000-1469: ref=['CHAPTER', 'ONE', 'ORIGIN'] +4507-16021-0000-1469: hyp=['CHAPTER', 'ONE', 'ORIGIN'] +4507-16021-0001-1470: ref=['IT', 'ENGENDERS', 'A', 'WHOLE', 'WORLD', 'LA', 'PEGRE', 'FOR', 'WHICH', 'READ', 'THEFT', 'AND', 'A', 'HELL', 'LA', 'PEGRENNE', 'FOR', 'WHICH', 'READ', 'HUNGER'] +4507-16021-0001-1470: hyp=['IT', 'ENGENDERS', 'A', 'WHOLE', 'WORLD', 'LA', 'PEG', 'FOR', 'WHICH', 'RED', 'THEFT', 'AND', 'A', 'HELL', 'LA', 'PEGRIN', 'FOR', 'WHICH', 'RED', 'HUNGER'] +4507-16021-0002-1471: ref=['THUS', 'IDLENESS', 'IS', 'THE', 'MOTHER'] +4507-16021-0002-1471: hyp=['THUS', 'IDLENESS', 'IS', 'THE', 'MOTHER'] +4507-16021-0003-1472: ref=['SHE', 'HAS', 'A', 'SON', 'THEFT', 'AND', 'A', 'DAUGHTER', 'HUNGER'] +4507-16021-0003-1472: hyp=['SHE', 'HAS', 'A', 'SON', 'THEFT', 'AND', 'A', 'DAUGHTER', 'HUNGER'] +4507-16021-0004-1473: ref=['WHAT', 'IS', 'SLANG'] +4507-16021-0004-1473: hyp=['WHAT', 'IS', 'SLANG'] +4507-16021-0005-1474: ref=['WE', 'HAVE', 'NEVER', 'UNDERSTOOD', 'THIS', 'SORT', 'OF', 'OBJECTIONS'] +4507-16021-0005-1474: hyp=['WE', 'HAVE', 'NEVER', 'UNDERSTOOD', 'THIS', 'SORT', 'OF', 'OBJECTIONS'] +4507-16021-0006-1475: ref=['SLANG', 'IS', 'ODIOUS'] +4507-16021-0006-1475: hyp=['SLANG', 'IS', 'ODIOUS'] +4507-16021-0007-1476: ref=['SLANG', 'MAKES', 'ONE', 'SHUDDER'] +4507-16021-0007-1476: hyp=['SLANG', 'MAKES', 'ONE', 'SHUDDER'] +4507-16021-0008-1477: ref=['WHO', 'DENIES', 'THAT', 'OF', 'COURSE', 'IT', 'DOES'] +4507-16021-0008-1477: hyp=['WHO', 'DENIES', 'THAT', 'OF', 'COURSE', 'IT', 'DOES'] +4507-16021-0009-1478: ref=['WHEN', 'IT', 'IS', 'A', 'QUESTION', 'OF', 'PROBING', 'A', 'WOUND', 'A', 'GULF', 'A', 'SOCIETY', 'SINCE', 'WHEN', 'HAS', 'IT', 'BEEN', 'CONSIDERED', 'WRONG', 'TO', 'GO', 'TOO', 'FAR', 'TO', 'GO', 'TO', 'THE', 'BOTTOM'] +4507-16021-0009-1478: hyp=['WHEN', 'IT', 'IS', 'A', 'QUESTION', 'OF', 'PROBING', 'A', 'WOUND', 'A', 'GULF', 'A', 'SOCIETY', 'SINCE', 'ONE', 'HAS', 'IT', 'BEEN', 'CONSIDERED', 'WRONG', 'TO', 'GO', 'TOO', 'FAR', 'TO', 'GO', 'TO', 'THE', 'BOTTOM'] +4507-16021-0010-1479: ref=['WE', 'HAVE', 'ALWAYS', 'THOUGHT', 'THAT', 'IT', 'WAS', 'SOMETIMES', 'A', 'COURAGEOUS', 'ACT', 'AND', 'AT', 'LEAST', 'A', 'SIMPLE', 'AND', 'USEFUL', 'DEED', 'WORTHY', 'OF', 'THE', 'SYMPATHETIC', 'ATTENTION', 'WHICH', 'DUTY', 'ACCEPTED', 'AND', 'FULFILLED', 'MERITS'] +4507-16021-0010-1479: hyp=['WE', 'HAVE', 'ALWAYS', 'THOUGHT', 'THAT', 'IT', 'WAS', 'SOMETIMES', 'A', 'COURAGEOUS', 'ACT', 'AND', 'AT', 'LEAST', 'A', 'SIMPLE', 'AND', 'USEFUL', 'DEED', 'WORTHY', 'OF', 'THE', 'SYMPATHETIC', 'ATTENTION', 'WHICH', 'DUTY', 'ACCEPTED', 'IN', 'FULFILLED', 'MERITS'] +4507-16021-0011-1480: ref=['WHY', 'SHOULD', 'ONE', 'NOT', 'EXPLORE', 'EVERYTHING', 'AND', 'STUDY', 'EVERYTHING'] +4507-16021-0011-1480: hyp=['WHY', 'SHOULD', 'ONE', 'NOT', 'EXPLORE', 'EVERYTHING', 'AND', 'STUDY', 'EVERYTHING'] +4507-16021-0012-1481: ref=['WHY', 'SHOULD', 'ONE', 'HALT', 'ON', 'THE', 'WAY'] +4507-16021-0012-1481: hyp=['WHY', 'SHOULD', 'ONE', 'HALT', 'ON', 'THE', 'WAY'] +4507-16021-0013-1482: ref=['NOTHING', 'IS', 'MORE', 'LUGUBRIOUS', 'THAN', 'THE', 'CONTEMPLATION', 'THUS', 'IN', 'ITS', 'NUDITY', 'IN', 'THE', 'BROAD', 'LIGHT', 'OF', 'THOUGHT', 'OF', 'THE', 'HORRIBLE', 'SWARMING', 'OF', 'SLANG'] +4507-16021-0013-1482: hyp=['NOTHING', 'IS', 'MORE', 'LUGUBRIOUS', 'THAN', 'THE', 'CONTEMPLATION', 'THUS', 'IN', 'ITS', 'NUDITY', 'IN', 'THE', 'BROAD', 'LIGHT', 'OF', 'THOUGHT', 'OF', 'THE', 'HORRIBLE', 'SWARMING', 'OF', 'SLANG'] +4507-16021-0014-1483: ref=['NOW', 'WHEN', 'HAS', 'HORROR', 'EVER', 'EXCLUDED', 'STUDY'] +4507-16021-0014-1483: hyp=['NO', 'WHEN', 'HAS', 'HORROR', 'EVER', 'EXCLUDED', 'STUDY'] +4507-16021-0015-1484: ref=['SINCE', 'WHEN', 'HAS', 'MALADY', 'BANISHED', 'MEDICINE'] +4507-16021-0015-1484: hyp=['SINCE', 'WHEN', 'HAS', 'MALADY', 'BANISHED', 'MEDICINE'] +4507-16021-0016-1485: ref=['CAN', 'ONE', 'IMAGINE', 'A', 'NATURALIST', 'REFUSING', 'TO', 'STUDY', 'THE', 'VIPER', 'THE', 'BAT', 'THE', 'SCORPION', 'THE', 'CENTIPEDE', 'THE', 'TARANTULA', 'AND', 'ONE', 'WHO', 'WOULD', 'CAST', 'THEM', 'BACK', 'INTO', 'THEIR', 'DARKNESS', 'SAYING', 'OH', 'HOW', 'UGLY', 'THAT', 'IS'] +4507-16021-0016-1485: hyp=['CAN', 'ONE', 'IMAGINE', 'A', 'NATURALIST', 'REFUSING', 'TO', 'STUDY', 'THE', 'VIPER', 'THE', 'BAT', 'THE', 'SCORPION', 'THE', 'CENTIPEDE', 'THE', 'TERENTIAL', 'AND', 'ONE', 'WHO', 'WOULD', 'CAST', 'THEM', 'BACK', 'INTO', 'THEIR', 'DARKNESS', 'SAYING', 'O', 'HOW', 'UGLY', 'THAT', 'IS'] +4507-16021-0017-1486: ref=['HE', 'WOULD', 'BE', 'LIKE', 'A', 'PHILOLOGIST', 'REFUSING', 'TO', 'EXAMINE', 'A', 'FACT', 'IN', 'LANGUAGE', 'A', 'PHILOSOPHER', 'HESITATING', 'TO', 'SCRUTINIZE', 'A', 'FACT', 'IN', 'HUMANITY'] +4507-16021-0017-1486: hyp=['HE', 'WOULD', 'BE', 'LIKE', 'A', 'PHILOLOGIST', 'REFUSING', 'TO', 'EXAMINE', 'A', 'FACT', 'IN', 'LANGUAGE', 'A', 'PHILOSOPHER', 'HESITATING', 'TO', 'SCRUTINIZE', 'A', 'FACT', 'IN', 'HUMANITY'] +4507-16021-0018-1487: ref=['WHAT', 'IS', 'SLANG', 'PROPERLY', 'SPEAKING'] +4507-16021-0018-1487: hyp=['WHAT', 'IS', 'SLANG', 'PROPERLY', 'SPEAKING'] +4507-16021-0019-1488: ref=['IT', 'IS', 'THE', 'LANGUAGE', 'OF', 'WRETCHEDNESS'] +4507-16021-0019-1488: hyp=['IT', 'IS', 'THE', 'LANGUAGE', 'OF', 'WRETCHEDNESS'] +4507-16021-0020-1489: ref=['WE', 'MAY', 'BE', 'STOPPED', 'THE', 'FACT', 'MAY', 'BE', 'PUT', 'TO', 'US', 'IN', 'GENERAL', 'TERMS', 'WHICH', 'IS', 'ONE', 'WAY', 'OF', 'ATTENUATING', 'IT', 'WE', 'MAY', 'BE', 'TOLD', 'THAT', 'ALL', 'TRADES', 'PROFESSIONS', 'IT', 'MAY', 'BE', 'ADDED', 'ALL', 'THE', 'ACCIDENTS', 'OF', 'THE', 'SOCIAL', 'HIERARCHY', 'AND', 'ALL', 'FORMS', 'OF', 'INTELLIGENCE', 'HAVE', 'THEIR', 'OWN', 'SLANG'] +4507-16021-0020-1489: hyp=['WE', 'MAY', 'BE', 'STOPPED', 'THE', 'FACT', 'MAY', 'BE', 'PUT', 'TO', 'US', 'IN', 'GENERAL', 'TERMS', 'WHICH', 'IS', 'ONE', 'WAY', 'OF', 'ATTENUATING', 'IT', 'WE', 'MAY', 'BE', 'TOLD', 'THAT', 'ALL', 'TRADES', 'PROFESSIONS', 'IT', 'MAY', 'BE', 'ADDED', 'ALL', 'THE', 'ACCIDENTS', 'OF', 'THE', 'SOCIAL', 'HIERARCHY', 'AND', 'ALL', 'FORMS', 'OF', 'INTELLIGENCE', 'HAVE', 'THEIR', 'OWN', 'SLANG'] +4507-16021-0021-1490: ref=['THE', 'PAINTER', 'WHO', 'SAYS', 'MY', 'GRINDER', 'THE', 'NOTARY', 'WHO', 'SAYS', 'MY', 'SKIP', 'THE', 'GUTTER', 'THE', 'HAIRDRESSER', 'WHO', 'SAYS', 'MY', 'MEALYBACK', 'THE', 'COBBLER', 'WHO', 'SAYS', 'MY', 'CUB', 'TALKS', 'SLANG'] +4507-16021-0021-1490: hyp=['THE', 'PAINTER', 'WHO', 'SAYS', 'MY', 'GRINDER', 'THE', 'NOTARY', 'WHO', 'SAYS', 'MY', 'SKIP', 'THE', 'GUTTER', 'THE', 'HAIR', 'DRESSER', 'WHO', 'SAYS', 'MY', 'MEALLY', 'BACK', 'THE', 'COBBLER', 'WHO', 'SAYS', 'MY', 'CUB', 'TALKS', 'SLANG'] +4507-16021-0022-1491: ref=['THERE', 'IS', 'THE', 'SLANG', 'OF', 'THE', 'AFFECTED', 'LADY', 'AS', 'WELL', 'AS', 'OF', 'THE', 'PRECIEUSES'] +4507-16021-0022-1491: hyp=['THERE', 'IS', 'THE', 'SLANG', 'OF', 'THE', 'AFFECTED', 'LADY', 'AS', 'WELL', 'AS', 'OF', 'THE', 'PURSUS'] +4507-16021-0023-1492: ref=['THE', 'SUGAR', 'MANUFACTURER', 'WHO', 'SAYS', 'LOAF', 'CLARIFIED', 'LUMPS', 'BASTARD', 'COMMON', 'BURNT', 'THIS', 'HONEST', 'MANUFACTURER', 'TALKS', 'SLANG'] +4507-16021-0023-1492: hyp=['THE', 'SUGAR', 'MANUFACTURER', 'WHO', 'SAYS', 'LOAF', 'CLARIFIED', 'LUMPS', 'BASTARD', 'COMMON', 'BURNT', 'THIS', 'HONEST', 'MANUFACTURER', 'TALKS', 'SLANG'] +4507-16021-0024-1493: ref=['ALGEBRA', 'MEDICINE', 'BOTANY', 'HAVE', 'EACH', 'THEIR', 'SLANG'] +4507-16021-0024-1493: hyp=['ALGEBRA', 'MEDICINE', 'BARTANY', 'HAVE', 'EACH', 'THEIR', 'SLANG'] +4507-16021-0025-1494: ref=['TO', 'MEET', 'THE', 'NEEDS', 'OF', 'THIS', 'CONFLICT', 'WRETCHEDNESS', 'HAS', 'INVENTED', 'A', 'LANGUAGE', 'OF', 'COMBAT', 'WHICH', 'IS', 'SLANG'] +4507-16021-0025-1494: hyp=['TO', 'MEET', 'THE', 'NEEDS', 'OF', 'THIS', 'CONFLICT', 'WRETCHEDNESS', 'HAS', 'INVENTED', 'A', 'LANGUAGE', 'OF', 'COMBAT', 'WHICH', 'IS', 'SLANG'] +4507-16021-0026-1495: ref=['TO', 'KEEP', 'AFLOAT', 'AND', 'TO', 'RESCUE', 'FROM', 'OBLIVION', 'TO', 'HOLD', 'ABOVE', 'THE', 'GULF', 'WERE', 'IT', 'BUT', 'A', 'FRAGMENT', 'OF', 'SOME', 'LANGUAGE', 'WHICH', 'MAN', 'HAS', 'SPOKEN', 'AND', 'WHICH', 'WOULD', 'OTHERWISE', 'BE', 'LOST', 'THAT', 'IS', 'TO', 'SAY', 'ONE', 'OF', 'THE', 'ELEMENTS', 'GOOD', 'OR', 'BAD', 'OF', 'WHICH', 'CIVILIZATION', 'IS', 'COMPOSED', 'OR', 'BY', 'WHICH', 'IT', 'IS', 'COMPLICATED', 'TO', 'EXTEND', 'THE', 'RECORDS', 'OF', 'SOCIAL', 'OBSERVATION', 'IS', 'TO', 'SERVE', 'CIVILIZATION', 'ITSELF'] +4507-16021-0026-1495: hyp=['TO', 'KEEP', 'AFLOAT', 'AND', 'TO', 'RESCUE', 'FROM', 'OBLIVION', 'TO', 'HOLD', 'ABOVE', 'THE', 'GULF', 'WERE', 'IT', 'BUT', 'A', 'FRAGMENT', 'OF', 'SOME', 'LANGUAGE', 'WHICH', 'MAN', 'HAS', 'SPOKEN', 'AND', 'WHICH', 'WOULD', 'OTHERWISE', 'BE', 'LOST', 'THAT', 'IS', 'TO', 'SAY', 'ONE', 'OF', 'THE', 'ELEMENTS', 'GOOD', 'OR', 'BAD', 'OF', 'WHICH', 'CIVILIZATION', 'IS', 'COMPOSED', 'OR', 'BY', 'WHICH', 'IT', 'IS', 'COMPLICATED', 'TO', 'EXTEND', 'THE', 'RECORDS', 'OF', 'SOCIAL', 'OBSERVATION', 'IS', 'TO', 'SERVE', 'CIVILIZATION', 'ITSELF'] +4507-16021-0027-1496: ref=['PHOENICIAN', 'VERY', 'GOOD'] +4507-16021-0027-1496: hyp=['PHOENICIAN', 'VERY', 'GOOD'] +4507-16021-0028-1497: ref=['EVEN', 'DIALECT', 'LET', 'THAT', 'PASS'] +4507-16021-0028-1497: hyp=['EVEN', 'DIALECT', 'LET', 'THAT', 'PASS'] +4507-16021-0029-1498: ref=['TO', 'THIS', 'WE', 'REPLY', 'IN', 'ONE', 'WORD', 'ONLY'] +4507-16021-0029-1498: hyp=['TO', 'THIS', 'WE', 'REPLY', 'IN', 'ONE', 'WORD', 'ONLY'] +4507-16021-0030-1499: ref=['ASSUREDLY', 'IF', 'THE', 'TONGUE', 'WHICH', 'A', 'NATION', 'OR', 'A', 'PROVINCE', 'HAS', 'SPOKEN', 'IS', 'WORTHY', 'OF', 'INTEREST', 'THE', 'LANGUAGE', 'WHICH', 'HAS', 'BEEN', 'SPOKEN', 'BY', 'A', 'MISERY', 'IS', 'STILL', 'MORE', 'WORTHY', 'OF', 'ATTENTION', 'AND', 'STUDY'] +4507-16021-0030-1499: hyp=['ASSUREDLY', 'IF', 'THE', 'TONGUE', 'WHICH', 'A', 'NATION', 'OR', 'A', 'PROVINCE', 'HAS', 'SPOKEN', 'IS', 'WORTHY', 'OF', 'INTEREST', 'THE', 'LANGUAGE', 'WHICH', 'HAS', 'BEEN', 'SPOKEN', 'BY', 'A', 'MISERY', 'IS', 'STILL', 'MORE', 'WORTHY', 'OF', 'ATTENTION', 'AND', 'STUDY'] +4507-16021-0031-1500: ref=['AND', 'THEN', 'WE', 'INSIST', 'UPON', 'IT', 'THE', 'STUDY', 'OF', 'SOCIAL', 'DEFORMITIES', 'AND', 'INFIRMITIES', 'AND', 'THE', 'TASK', 'OF', 'POINTING', 'THEM', 'OUT', 'WITH', 'A', 'VIEW', 'TO', 'REMEDY', 'IS', 'NOT', 'A', 'BUSINESS', 'IN', 'WHICH', 'CHOICE', 'IS', 'PERMITTED'] +4507-16021-0031-1500: hyp=['AND', 'THEN', 'WE', 'INSIST', 'UPON', 'IT', 'THE', 'STUDY', 'OF', 'SOCIAL', 'DEFORMITIES', 'AND', 'INFIRMITIES', 'AND', 'THE', 'TASK', 'OF', 'POINTING', 'THEM', 'OUT', 'WITH', 'A', 'VIEW', 'TO', 'REMEDY', 'IS', 'NOT', 'A', 'BUSINESS', 'IN', 'WHICH', 'CHOICE', 'IS', 'PERMITTED'] +4507-16021-0032-1501: ref=['HE', 'MUST', 'DESCEND', 'WITH', 'HIS', 'HEART', 'FULL', 'OF', 'CHARITY', 'AND', 'SEVERITY', 'AT', 'THE', 'SAME', 'TIME', 'AS', 'A', 'BROTHER', 'AND', 'AS', 'A', 'JUDGE', 'TO', 'THOSE', 'IMPENETRABLE', 'CASEMATES', 'WHERE', 'CRAWL', 'PELL', 'MELL', 'THOSE', 'WHO', 'BLEED', 'AND', 'THOSE', 'WHO', 'DEAL', 'THE', 'BLOW', 'THOSE', 'WHO', 'WEEP', 'AND', 'THOSE', 'WHO', 'CURSE', 'THOSE', 'WHO', 'FAST', 'AND', 'THOSE', 'WHO', 'DEVOUR', 'THOSE', 'WHO', 'ENDURE', 'EVIL', 'AND', 'THOSE', 'WHO', 'INFLICT', 'IT'] +4507-16021-0032-1501: hyp=['HE', 'MUST', 'DESCEND', 'WITH', 'HIS', 'HEART', 'FULL', 'OF', 'CHARITY', 'AND', 'SEVERITY', 'AT', 'THE', 'SAME', 'TIME', 'AS', 'A', 'BROTHER', 'AND', 'AS', 'HE', 'JUDGE', 'TO', 'THOSE', 'IMPENETRABLE', 'CASEMATES', 'WERE', 'CRAWL', 'PELL', 'THOSE', 'WHO', 'BLEED', 'AND', 'THOSE', 'WHO', 'DEAL', 'THE', 'BLOW', 'THOSE', 'WHO', 'WEEP', 'AND', 'THOSE', 'WHO', 'CURSE', 'THOSE', 'WHO', 'FAST', 'IN', 'THOSE', 'WHO', 'DEVOUR', 'THOSE', 'WHO', 'ENDURE', 'EVIL', 'AND', 'THOSE', 'WHO', 'INFLICT', 'IT'] +4507-16021-0033-1502: ref=['DO', 'WE', 'REALLY', 'KNOW', 'THE', 'MOUNTAIN', 'WELL', 'WHEN', 'WE', 'ARE', 'NOT', 'ACQUAINTED', 'WITH', 'THE', 'CAVERN'] +4507-16021-0033-1502: hyp=['DO', 'WE', 'REALLY', 'KNOW', 'THE', 'MOUNTAIN', 'WELL', 'WHEN', 'WE', 'ARE', 'NOT', 'ACQUAINTED', 'WITH', 'THE', 'CAVERN'] +4507-16021-0034-1503: ref=['THEY', 'CONSTITUTE', 'TWO', 'DIFFERENT', 'ORDERS', 'OF', 'FACTS', 'WHICH', 'CORRESPOND', 'TO', 'EACH', 'OTHER', 'WHICH', 'ARE', 'ALWAYS', 'INTERLACED', 'AND', 'WHICH', 'OFTEN', 'BRING', 'FORTH', 'RESULTS'] +4507-16021-0034-1503: hyp=['THEY', 'CONSTITUTE', 'TWO', 'DIFFERENT', 'ORDERS', 'OF', 'FACTS', 'WHICH', 'CORRESPOND', 'TO', 'EACH', 'OTHER', 'WHICH', 'ARE', 'ALWAYS', 'INTERLACED', 'AND', 'WHICH', 'OFTEN', 'BRING', 'FORTH', 'RESULTS'] +4507-16021-0035-1504: ref=['TRUE', 'HISTORY', 'BEING', 'A', 'MIXTURE', 'OF', 'ALL', 'THINGS', 'THE', 'TRUE', 'HISTORIAN', 'MINGLES', 'IN', 'EVERYTHING'] +4507-16021-0035-1504: hyp=['TRUE', 'HISTORY', 'BEING', 'A', 'MIXTURE', 'OF', 'ALL', 'THINGS', 'THE', 'TRUE', 'HISTORIAN', 'MINGLES', 'IN', 'EVERYTHING'] +4507-16021-0036-1505: ref=['FACTS', 'FORM', 'ONE', 'OF', 'THESE', 'AND', 'IDEAS', 'THE', 'OTHER'] +4507-16021-0036-1505: hyp=['FACTS', 'FORM', 'ONE', 'OF', 'THESE', 'AND', 'IDEAS', 'THE', 'OTHER'] +4507-16021-0037-1506: ref=['THERE', 'IT', 'CLOTHES', 'ITSELF', 'IN', 'WORD', 'MASKS', 'IN', 'METAPHOR', 'RAGS'] +4507-16021-0037-1506: hyp=['THERE', 'IT', 'CLOTHES', 'ITSELF', 'IN', 'WORD', 'MASKS', 'IN', 'METAPHOR', 'RAGS'] +4507-16021-0038-1507: ref=['IN', 'THIS', 'GUISE', 'IT', 'BECOMES', 'HORRIBLE'] +4507-16021-0038-1507: hyp=['IN', 'THE', 'SKIES', 'IT', 'BECOMES', 'HORRIBLE'] +4507-16021-0039-1508: ref=['ONE', 'PERCEIVES', 'WITHOUT', 'UNDERSTANDING', 'IT', 'A', 'HIDEOUS', 'MURMUR', 'SOUNDING', 'ALMOST', 'LIKE', 'HUMAN', 'ACCENTS', 'BUT', 'MORE', 'NEARLY', 'RESEMBLING', 'A', 'HOWL', 'THAN', 'AN', 'ARTICULATE', 'WORD'] +4507-16021-0039-1508: hyp=['ONE', 'PERCEIVES', 'WITHOUT', 'UNDERSTANDING', 'IT', 'A', 'HIDEOUS', 'MURMUR', 'SOUNDING', 'ALMOST', 'LIKE', 'HUMAN', 'ACCENTS', 'BUT', 'MORE', 'NEARLY', 'RESEMBLING', 'A', 'HOWL', 'THAN', 'AN', 'ARTICULATE', 'WORD'] +4507-16021-0040-1509: ref=['ONE', 'THINKS', 'ONE', 'HEARS', 'HYDRAS', 'TALKING'] +4507-16021-0040-1509: hyp=['ONE', 'THINKS', 'ONE', 'HEARS', 'HYDRAST', 'TALKING'] +4507-16021-0041-1510: ref=['IT', 'IS', 'UNINTELLIGIBLE', 'IN', 'THE', 'DARK'] +4507-16021-0041-1510: hyp=['IT', 'IS', 'UNINTELLIGIBLE', 'IN', 'THE', 'DARK'] +4507-16021-0042-1511: ref=['IT', 'IS', 'BLACK', 'IN', 'MISFORTUNE', 'IT', 'IS', 'BLACKER', 'STILL', 'IN', 'CRIME', 'THESE', 'TWO', 'BLACKNESSES', 'AMALGAMATED', 'COMPOSE', 'SLANG'] +4507-16021-0042-1511: hyp=['IT', 'IS', 'BLACK', 'IN', 'MISFORTUNE', 'IT', 'IS', 'BLACKER', 'STILL', 'IN', 'CRIME', 'THESE', 'TWO', 'BLACKNESSES', 'AMALGAMATED', 'COMPOSED', 'SLANG'] +4507-16021-0043-1512: ref=['THE', 'EARTH', 'IS', 'NOT', 'DEVOID', 'OF', 'RESEMBLANCE', 'TO', 'A', 'JAIL'] +4507-16021-0043-1512: hyp=['THE', 'EARTH', 'IS', 'NOT', 'DEVOID', 'OF', 'RESEMBLANCE', 'TO', 'A', 'JAIL'] +4507-16021-0044-1513: ref=['LOOK', 'CLOSELY', 'AT', 'LIFE'] +4507-16021-0044-1513: hyp=['LOOK', 'CLOSELY', 'AT', 'LIFE'] +4507-16021-0045-1514: ref=['IT', 'IS', 'SO', 'MADE', 'THAT', 'EVERYWHERE', 'WE', 'FEEL', 'THE', 'SENSE', 'OF', 'PUNISHMENT'] +4507-16021-0045-1514: hyp=['IT', 'IS', 'SO', 'MADE', 'THAT', 'EVERYWHERE', 'WE', 'FEEL', 'THE', 'SENSE', 'OF', 'PUNISHMENT'] +4507-16021-0046-1515: ref=['EACH', 'DAY', 'HAS', 'ITS', 'OWN', 'GREAT', 'GRIEF', 'OR', 'ITS', 'LITTLE', 'CARE'] +4507-16021-0046-1515: hyp=['EACH', 'DAY', 'HAS', 'ITS', 'OWN', 'GREAT', 'GRIEF', 'OR', 'ITS', 'LITTLE', 'CARE'] +4507-16021-0047-1516: ref=['YESTERDAY', 'YOU', 'WERE', 'TREMBLING', 'FOR', 'A', 'HEALTH', 'THAT', 'IS', 'DEAR', 'TO', 'YOU', 'TO', 'DAY', 'YOU', 'FEAR', 'FOR', 'YOUR', 'OWN', 'TO', 'MORROW', 'IT', 'WILL', 'BE', 'ANXIETY', 'ABOUT', 'MONEY', 'THE', 'DAY', 'AFTER', 'TO', 'MORROW', 'THE', 'DIATRIBE', 'OF', 'A', 'SLANDERER', 'THE', 'DAY', 'AFTER', 'THAT', 'THE', 'MISFORTUNE', 'OF', 'SOME', 'FRIEND', 'THEN', 'THE', 'PREVAILING', 'WEATHER', 'THEN', 'SOMETHING', 'THAT', 'HAS', 'BEEN', 'BROKEN', 'OR', 'LOST', 'THEN', 'A', 'PLEASURE', 'WITH', 'WHICH', 'YOUR', 'CONSCIENCE', 'AND', 'YOUR', 'VERTEBRAL', 'COLUMN', 'REPROACH', 'YOU', 'AGAIN', 'THE', 'COURSE', 'OF', 'PUBLIC', 'AFFAIRS'] +4507-16021-0047-1516: hyp=['YESTERDAY', 'WERE', 'TREMBLING', 'FOR', 'A', 'HEALTH', 'THAT', 'IS', 'DEAR', 'TO', 'YOU', 'TO', 'DAY', 'YOU', 'FEAR', 'FOR', 'YOUR', 'OWN', 'TO', 'MORROW', 'IT', 'WILL', 'BE', 'ANXIETY', 'ABOUT', 'MONEY', 'THE', 'DAY', 'AFTER', 'TO', 'MORROW', 'THE', 'DIETRIBE', 'OF', 'A', 'SLANDERER', 'THE', 'DAY', 'AFTER', 'THAT', 'THE', 'MISFORTUNE', 'OF', 'SOME', 'FRIEND', 'THEN', 'THE', 'PREVAILING', 'WEATHER', 'THEN', 'SOMETHING', 'THAT', 'HAS', 'BEEN', 'BROKEN', 'OR', 'LOST', 'THEN', 'A', 'PLEASURE', 'WITH', 'WHICH', 'YOUR', 'CONSCIENCE', 'AND', 'YOUR', 'VERTEBRAL', 'COLUMN', 'REPROACH', 'YOU', 'AGAIN', 'THE', 'COURSE', 'OF', 'PUBLIC', 'AFFAIRS'] +4507-16021-0048-1517: ref=['THIS', 'WITHOUT', 'RECKONING', 'IN', 'THE', 'PAINS', 'OF', 'THE', 'HEART', 'AND', 'SO', 'IT', 'GOES', 'ON'] +4507-16021-0048-1517: hyp=['THIS', 'WITHOUT', 'RECKONING', 'IN', 'THE', 'PAINS', 'OF', 'THE', 'HEART', 'AND', 'SO', 'TO', 'GOES', 'ON'] +4507-16021-0049-1518: ref=['THERE', 'IS', 'HARDLY', 'ONE', 'DAY', 'OUT', 'OF', 'A', 'HUNDRED', 'WHICH', 'IS', 'WHOLLY', 'JOYOUS', 'AND', 'SUNNY'] +4507-16021-0049-1518: hyp=['THERE', 'IS', 'HARDLY', 'ONE', 'DAY', 'OUT', 'OF', 'A', 'HUNDRED', 'WHICH', 'IS', 'WHOLLY', 'JOYOUS', 'AND', 'SUNNY'] +4507-16021-0050-1519: ref=['AND', 'YOU', 'BELONG', 'TO', 'THAT', 'SMALL', 'CLASS', 'WHO', 'ARE', 'HAPPY'] +4507-16021-0050-1519: hyp=['AND', 'YOU', 'BELONG', 'TO', 'THAT', 'SMALL', 'CLASS', 'WHO', 'ARE', 'A', 'HAPPY'] +4507-16021-0051-1520: ref=['IN', 'THIS', 'WORLD', 'EVIDENTLY', 'THE', 'VESTIBULE', 'OF', 'ANOTHER', 'THERE', 'ARE', 'NO', 'FORTUNATE'] +4507-16021-0051-1520: hyp=['IN', 'THIS', 'WORLD', 'EVIDENTLY', 'THE', 'VESTIBULE', 'OF', 'ANOTHER', 'THERE', 'ARE', 'NO', 'FORTUNATE'] +4507-16021-0052-1521: ref=['THE', 'REAL', 'HUMAN', 'DIVISION', 'IS', 'THIS', 'THE', 'LUMINOUS', 'AND', 'THE', 'SHADY'] +4507-16021-0052-1521: hyp=['THE', 'REAL', 'HUMAN', 'DIVISION', 'IS', 'THIS', 'THE', 'LUMINOUS', 'AND', 'THE', 'SHADY'] +4507-16021-0053-1522: ref=['TO', 'DIMINISH', 'THE', 'NUMBER', 'OF', 'THE', 'SHADY', 'TO', 'AUGMENT', 'THE', 'NUMBER', 'OF', 'THE', 'LUMINOUS', 'THAT', 'IS', 'THE', 'OBJECT'] +4507-16021-0053-1522: hyp=['TO', 'DIMINISH', 'THE', 'NUMBER', 'OF', 'THE', 'SHADY', 'TO', 'AUGMENT', 'THE', 'NUMBER', 'OF', 'THE', 'LUMINOUS', 'THAT', 'IS', 'THE', 'OBJECT'] +4507-16021-0054-1523: ref=['THAT', 'IS', 'WHY', 'WE', 'CRY', 'EDUCATION', 'SCIENCE'] +4507-16021-0054-1523: hyp=['THAT', 'IS', 'WHY', 'WE', 'CRY', 'EDUCATION', 'SCIENCE'] +4507-16021-0055-1524: ref=['TO', 'TEACH', 'READING', 'MEANS', 'TO', 'LIGHT', 'THE', 'FIRE', 'EVERY', 'SYLLABLE', 'SPELLED', 'OUT', 'SPARKLES'] +4507-16021-0055-1524: hyp=['TO', 'TEACH', 'READING', 'MEANS', 'TO', 'WRITE', 'THE', 'FIRE', 'EVERY', 'SYLLABLE', 'SPELLED', 'OUT', 'SPARKLES'] +4507-16021-0056-1525: ref=['HOWEVER', 'HE', 'WHO', 'SAYS', 'LIGHT', 'DOES', 'NOT', 'NECESSARILY', 'SAY', 'JOY'] +4507-16021-0056-1525: hyp=['HOWEVER', 'HE', 'WHO', 'SAYS', 'LIGHT', 'DOES', 'NOT', 'NECESSARILY', 'SAY', 'JOY'] +4507-16021-0057-1526: ref=['PEOPLE', 'SUFFER', 'IN', 'THE', 'LIGHT', 'EXCESS', 'BURNS'] +4507-16021-0057-1526: hyp=['PEOPLE', 'SUFFER', 'IN', 'THE', 'LIGHT', 'EXCESS', 'BURNS'] +4507-16021-0058-1527: ref=['THE', 'FLAME', 'IS', 'THE', 'ENEMY', 'OF', 'THE', 'WING'] +4507-16021-0058-1527: hyp=['THE', 'FLAME', 'IS', 'THE', 'ENEMY', 'OF', 'THE', 'WING'] +4507-16021-0059-1528: ref=['TO', 'BURN', 'WITHOUT', 'CEASING', 'TO', 'FLY', 'THEREIN', 'LIES', 'THE', 'MARVEL', 'OF', 'GENIUS'] +4507-16021-0059-1528: hyp=['TO', 'BURN', 'WITHOUT', 'CEASING', 'TO', 'FLY', 'THEREIN', 'LIES', 'THE', 'MARVEL', 'OF', 'GENIUS'] +4970-29093-0000-2093: ref=["YOU'LL", 'NEVER', 'DIG', 'IT', 'OUT', 'OF', 'THE', 'ASTOR', 'LIBRARY'] +4970-29093-0000-2093: hyp=["YOU'LL", 'NEVER', 'DIG', 'IT', 'OUT', 'OF', 'THE', 'ASTRO', 'LIBRARY'] +4970-29093-0001-2094: ref=['TO', 'THE', 'YOUNG', 'AMERICAN', 'HERE', 'OR', 'ELSEWHERE', 'THE', 'PATHS', 'TO', 'FORTUNE', 'ARE', 'INNUMERABLE', 'AND', 'ALL', 'OPEN', 'THERE', 'IS', 'INVITATION', 'IN', 'THE', 'AIR', 'AND', 'SUCCESS', 'IN', 'ALL', 'HIS', 'WIDE', 'HORIZON'] +4970-29093-0001-2094: hyp=['TO', 'THE', 'YOUNG', 'AMERICAN', 'HERE', 'OR', 'ELSEWHERE', 'THE', 'PATHS', 'TO', 'FORTUNE', 'ARE', 'INNUMERABLE', 'AND', 'ALL', 'OPEN', 'THERE', 'IS', 'INVITATION', 'IN', 'THE', 'AIR', 'AND', 'SUCCESS', 'IN', 'ALL', 'HIS', 'WIDE', 'HORIZON'] +4970-29093-0002-2095: ref=['HE', 'HAS', 'NO', 'TRADITIONS', 'TO', 'BIND', 'HIM', 'OR', 'GUIDE', 'HIM', 'AND', 'HIS', 'IMPULSE', 'IS', 'TO', 'BREAK', 'AWAY', 'FROM', 'THE', 'OCCUPATION', 'HIS', 'FATHER', 'HAS', 'FOLLOWED', 'AND', 'MAKE', 'A', 'NEW', 'WAY', 'FOR', 'HIMSELF'] +4970-29093-0002-2095: hyp=['HE', 'HAS', 'NO', 'TRADITIONS', 'TO', 'BIND', 'HIM', 'OR', 'GUIDE', 'HIM', 'AND', 'HIS', 'IMPULSE', 'IS', 'TO', 'BREAK', 'AWAY', 'FROM', 'THE', 'OCCUPATION', 'HIS', 'FATHER', 'HAS', 'FOLLOWED', 'AND', 'MAKE', 'A', 'NEW', 'WAY', 'FOR', 'HIMSELF'] +4970-29093-0003-2096: ref=['THE', 'MODEST', 'FELLOW', 'WOULD', 'HAVE', 'LIKED', 'FAME', 'THRUST', 'UPON', 'HIM', 'FOR', 'SOME', 'WORTHY', 'ACHIEVEMENT', 'IT', 'MIGHT', 'BE', 'FOR', 'A', 'BOOK', 'OR', 'FOR', 'THE', 'SKILLFUL', 'MANAGEMENT', 'OF', 'SOME', 'GREAT', 'NEWSPAPER', 'OR', 'FOR', 'SOME', 'DARING', 'EXPEDITION', 'LIKE', 'THAT', 'OF', 'LIEUTENANT', 'STRAIN', 'OR', 'DOCTOR', 'KANE'] +4970-29093-0003-2096: hyp=['THE', 'MODEST', 'FELLOW', 'WOULD', 'HAVE', 'LIKED', 'FAME', 'THRUST', 'UPON', 'HIM', 'FOR', 'SOME', 'WORTHY', 'ACHIEVEMENT', 'IT', 'MIGHT', 'BE', 'FOR', 'A', 'BOOK', 'OR', 'FOR', 'THE', 'SKILFUL', 'MANAGEMENT', 'OF', 'SOME', 'GREAT', 'NEWSPAPER', 'OR', 'FOR', 'SOME', 'DARING', 'EXPEDITION', 'LIKE', 'THAT', 'OF', 'LIEUTENANT', 'STRAIN', 'OR', 'DOCTOR', 'KANE'] +4970-29093-0004-2097: ref=['HE', 'WAS', 'UNABLE', 'TO', 'DECIDE', 'EXACTLY', 'WHAT', 'IT', 'SHOULD', 'BE'] +4970-29093-0004-2097: hyp=['HE', 'WAS', 'UNABLE', 'TO', 'DECIDE', 'EXACTLY', 'WHAT', 'IT', 'SHOULD', 'BE'] +4970-29093-0005-2098: ref=['SOMETIMES', 'HE', 'THOUGHT', 'HE', 'WOULD', 'LIKE', 'TO', 'STAND', 'IN', 'A', 'CONSPICUOUS', 'PULPIT', 'AND', 'HUMBLY', 'PREACH', 'THE', 'GOSPEL', 'OF', 'REPENTANCE', 'AND', 'IT', 'EVEN', 'CROSSED', 'HIS', 'MIND', 'THAT', 'IT', 'WOULD', 'BE', 'NOBLE', 'TO', 'GIVE', 'HIMSELF', 'TO', 'A', 'MISSIONARY', 'LIFE', 'TO', 'SOME', 'BENIGHTED', 'REGION', 'WHERE', 'THE', 'DATE', 'PALM', 'GROWS', 'AND', 'THE', "NIGHTINGALE'S", 'VOICE', 'IS', 'IN', 'TUNE', 'AND', 'THE', 'BUL', 'BUL', 'SINGS', 'ON', 'THE', 'OFF', 'NIGHTS'] +4970-29093-0005-2098: hyp=['SOMETIMES', 'HE', 'THOUGHT', 'HE', 'WOULD', 'LIKE', 'TO', 'STAND', 'IN', 'A', 'CONSPICUOUS', 'PULPIT', 'AND', 'HUMBLY', 'PREACH', 'THE', 'GOSPEL', 'OF', 'REPENTANCE', 'AND', 'IT', 'EVEN', 'CROSSED', 'HIS', 'MIND', 'THAT', 'IT', 'WOULD', 'BE', 'NOBLE', 'TO', 'GIVE', 'HIMSELF', 'TO', 'A', 'MISSIONARY', 'LIFE', 'TO', 'SOME', 'BENIGHTED', 'REGION', 'WHERE', 'THE', 'DATE', 'PALM', 'GROVES', 'AND', 'THE', "NIGHTINGALE'S", 'VOICE', 'IS', 'IN', 'TUNE', 'AND', 'THE', 'BOL', 'SINGS', 'ON', 'THE', 'OPT', 'NIGHTS'] +4970-29093-0006-2099: ref=['LAW', 'SEEMED', 'TO', 'HIM', 'WELL', 'ENOUGH', 'AS', 'A', 'SCIENCE', 'BUT', 'HE', 'NEVER', 'COULD', 'DISCOVER', 'A', 'PRACTICAL', 'CASE', 'WHERE', 'IT', 'APPEARED', 'TO', 'HIM', 'WORTH', 'WHILE', 'TO', 'GO', 'TO', 'LAW', 'AND', 'ALL', 'THE', 'CLIENTS', 'WHO', 'STOPPED', 'WITH', 'THIS', 'NEW', 'CLERK', 'IN', 'THE', 'ANTE', 'ROOM', 'OF', 'THE', 'LAW', 'OFFICE', 'WHERE', 'HE', 'WAS', 'WRITING', 'PHILIP', 'INVARIABLY', 'ADVISED', 'TO', 'SETTLE', 'NO', 'MATTER', 'HOW', 'BUT', 'SETTLE', 'GREATLY', 'TO', 'THE', 'DISGUST', 'OF', 'HIS', 'EMPLOYER', 'WHO', 'KNEW', 'THAT', 'JUSTICE', 'BETWEEN', 'MAN', 'AND', 'MAN', 'COULD', 'ONLY', 'BE', 'ATTAINED', 'BY', 'THE', 'RECOGNIZED', 'PROCESSES', 'WITH', 'THE', 'ATTENDANT', 'FEES'] +4970-29093-0006-2099: hyp=['LAW', 'SEEMED', 'TO', 'HIM', 'WELL', 'ENOUGH', 'AS', 'A', 'SCIENCE', 'BUT', 'HE', 'NEVER', 'COULD', 'DISCOVER', 'A', 'PRACTICAL', 'CASE', 'WHERE', 'IT', 'APPEARED', 'TO', 'HIM', 'WORTH', 'WHILE', 'TO', 'GO', 'TO', 'LAW', 'AND', 'ALL', 'THE', 'CLIENTS', 'WHO', 'STOPPED', 'WITH', 'THIS', 'NEW', 'CLERK', 'AND', 'THE', 'ANTEROOM', 'OF', 'THE', 'LAW', 'OFFICE', 'WHERE', 'HE', 'WAS', 'WRITING', 'PHILIP', 'INVARIABLY', 'ADVISED', 'TO', 'SETTLE', 'NO', 'MATTER', 'HOW', 'BUT', 'SETTLED', 'GREATLY', 'TO', 'THE', 'DISGUST', 'OF', 'HIS', 'EMPLOYER', 'WHO', 'KNEW', 'THAT', 'JUSTICE', 'BETWEEN', 'MAN', 'AND', 'MAN', 'COULD', 'ONLY', 'BE', 'ATTAINED', 'BY', 'THE', 'RECOGNIZED', 'PROCESSES', 'WITH', 'THE', 'ATTENDANT', 'BEES'] +4970-29093-0007-2100: ref=['IT', 'IS', 'SUCH', 'A', 'NOBLE', 'AMBITION', 'THAT', 'IT', 'IS', 'A', 'PITY', 'IT', 'HAS', 'USUALLY', 'SUCH', 'A', 'SHALLOW', 'FOUNDATION'] +4970-29093-0007-2100: hyp=['IT', 'IS', 'SUCH', 'A', 'NOBLE', 'AMBITION', 'THAT', 'IT', 'IS', 'A', 'PITY', 'IT', 'HAS', 'USUALLY', 'SUCH', 'A', 'SHALLOW', 'FOUNDATION'] +4970-29093-0008-2101: ref=['HE', 'WANTED', 'TO', 'BEGIN', 'AT', 'THE', 'TOP', 'OF', 'THE', 'LADDER'] +4970-29093-0008-2101: hyp=['HE', 'WANTED', 'TO', 'BEGIN', 'AT', 'THE', 'TOP', 'OF', 'THE', 'LADDER'] +4970-29093-0009-2102: ref=['PHILIP', 'THEREFORE', 'READ', 'DILIGENTLY', 'IN', 'THE', 'ASTOR', 'LIBRARY', 'PLANNED', 'LITERARY', 'WORKS', 'THAT', 'SHOULD', 'COMPEL', 'ATTENTION', 'AND', 'NURSED', 'HIS', 'GENIUS'] +4970-29093-0009-2102: hyp=['PHILIP', 'THEREFORE', 'READ', 'DILIGENTLY', 'IN', 'THE', 'ASTOR', 'LIBRARY', 'PLANNED', 'LITERARY', 'WORKS', 'THAT', 'SHOULD', 'COMPEL', 'ATTENTION', 'AND', 'NURSED', 'HIS', 'GENIUS'] +4970-29093-0010-2103: ref=['HE', 'HAD', 'NO', 'FRIEND', 'WISE', 'ENOUGH', 'TO', 'TELL', 'HIM', 'TO', 'STEP', 'INTO', 'THE', 'DORKING', 'CONVENTION', 'THEN', 'IN', 'SESSION', 'MAKE', 'A', 'SKETCH', 'OF', 'THE', 'MEN', 'AND', 'WOMEN', 'ON', 'THE', 'PLATFORM', 'AND', 'TAKE', 'IT', 'TO', 'THE', 'EDITOR', 'OF', 'THE', 'DAILY', 'GRAPEVINE', 'AND', 'SEE', 'WHAT', 'HE', 'COULD', 'GET', 'A', 'LINE', 'FOR', 'IT'] +4970-29093-0010-2103: hyp=['HE', 'HAD', 'NO', 'FRIEND', 'WISE', 'ENOUGH', 'TO', 'TELL', 'HIM', 'TO', 'STEP', 'INTO', 'THE', 'DORKING', 'CONVENTION', 'THAN', 'IN', 'SESSION', 'MAKE', 'A', 'SKETCH', 'OF', 'THE', 'MEN', 'AND', 'WOMEN', 'ON', 'THE', 'PLATFORM', 'AND', 'TAKE', 'IT', 'TO', 'THE', 'EDITOR', 'OF', 'THE', 'DAILY', 'GRAPE', 'VINE', 'AND', 'SEE', 'WHAT', 'HE', 'COULD', 'GET', 'A', 'LINE', 'FOR', 'IT'] +4970-29093-0011-2104: ref=['O', 'VERY', 'WELL', 'SAID', 'GRINGO', 'TURNING', 'AWAY', 'WITH', 'A', 'SHADE', 'OF', 'CONTEMPT', "YOU'LL", 'FIND', 'IF', 'YOU', 'ARE', 'GOING', 'INTO', 'LITERATURE', 'AND', 'NEWSPAPER', 'WORK', 'THAT', 'YOU', "CAN'T", 'AFFORD', 'A', 'CONSCIENCE', 'LIKE', 'THAT'] +4970-29093-0011-2104: hyp=['OH', 'VERY', 'WELL', 'SAID', 'GRENGO', 'TURNING', 'AWAY', 'WITH', 'A', 'SHADE', 'OF', 'CONTEMPT', "YOU'LL", 'FIND', 'IF', 'YOU', 'ARE', 'GOING', 'INTO', 'LITERATURE', 'AND', 'NEWSPAPER', 'WORK', 'THAT', 'YOU', "CAN'T", 'AFFORD', 'A', 'CONSCIENCE', 'LIKE', 'THAT'] +4970-29093-0012-2105: ref=['BUT', 'PHILIP', 'DID', 'AFFORD', 'IT', 'AND', 'HE', 'WROTE', 'THANKING', 'HIS', 'FRIENDS', 'AND', 'DECLINING', 'BECAUSE', 'HE', 'SAID', 'THE', 'POLITICAL', 'SCHEME', 'WOULD', 'FAIL', 'AND', 'OUGHT', 'TO', 'FAIL'] +4970-29093-0012-2105: hyp=['BUT', 'PHILIP', 'DID', 'AFFORD', 'IT', 'AND', 'HE', 'WROTE', 'THINKING', 'HIS', 'FRIENDS', 'AND', 'DECLINING', 'BECAUSE', 'HE', 'SAID', 'THE', 'POLITICAL', 'SCHEME', 'WOULD', 'FAIL', 'AND', 'OUGHT', 'TO', 'FAIL'] +4970-29093-0013-2106: ref=['AND', 'HE', 'WENT', 'BACK', 'TO', 'HIS', 'BOOKS', 'AND', 'TO', 'HIS', 'WAITING', 'FOR', 'AN', 'OPENING', 'LARGE', 'ENOUGH', 'FOR', 'HIS', 'DIGNIFIED', 'ENTRANCE', 'INTO', 'THE', 'LITERARY', 'WORLD'] +4970-29093-0013-2106: hyp=['AND', 'HE', 'WENT', 'BACK', 'TO', 'HIS', 'BOOKS', 'AND', 'TO', 'HIS', 'WAITING', 'FOR', 'AN', 'OPENING', 'LARGE', 'ENOUGH', 'FOR', 'HIS', 'DIGNIFIED', 'ENTRANCE', 'INTO', 'THE', 'LITERARY', 'WORLD'] +4970-29093-0014-2107: ref=['WELL', "I'M", 'GOING', 'AS', 'AN', 'ENGINEER', 'YOU', 'CAN', 'GO', 'AS', 'ONE'] +4970-29093-0014-2107: hyp=['WELL', "I'M", 'GOING', 'AS', 'AN', 'ENGINEER', 'YOU', 'COULD', 'GO', 'AS', 'ONE'] +4970-29093-0015-2108: ref=['YOU', 'CAN', 'BEGIN', 'BY', 'CARRYING', 'A', 'ROD', 'AND', 'PUTTING', 'DOWN', 'THE', 'FIGURES'] +4970-29093-0015-2108: hyp=['YOU', 'CAN', 'BEGIN', 'BY', 'CARRYING', 'A', 'ROD', 'AND', 'PUTTING', 'DOWN', 'THE', 'FIGURES'] +4970-29093-0016-2109: ref=['NO', 'ITS', 'NOT', 'TOO', 'SOON'] +4970-29093-0016-2109: hyp=['NO', "IT'S", 'NOT', 'TOO', 'SOON'] +4970-29093-0017-2110: ref=["I'VE", 'BEEN', 'READY', 'TO', 'GO', 'ANYWHERE', 'FOR', 'SIX', 'MONTHS'] +4970-29093-0017-2110: hyp=["I'VE", 'BEEN', 'READY', 'TO', 'GO', 'ANYWHERE', 'FOR', 'SIX', 'MONTHS'] +4970-29093-0018-2111: ref=['THE', 'TWO', 'YOUNG', 'MEN', 'WHO', 'WERE', 'BY', 'THIS', 'TIME', 'FULL', 'OF', 'THE', 'ADVENTURE', 'WENT', 'DOWN', 'TO', 'THE', 'WALL', 'STREET', 'OFFICE', 'OF', "HENRY'S", 'UNCLE', 'AND', 'HAD', 'A', 'TALK', 'WITH', 'THAT', 'WILY', 'OPERATOR'] +4970-29093-0018-2111: hyp=['THE', 'TWO', 'YOUNG', 'MEN', 'WHO', 'WERE', 'BY', 'THIS', 'TIME', 'FULL', 'OF', 'THE', 'ADVENTURER', 'WENT', 'DOWN', 'TO', 'THE', 'WALL', 'STREET', 'OFFICE', 'OF', "HENRY'S", 'UNCLE', 'AND', 'HAD', 'A', 'TALK', 'WITH', 'THAT', 'WILY', 'OPERATOR'] +4970-29093-0019-2112: ref=['THE', 'NIGHT', 'WAS', 'SPENT', 'IN', 'PACKING', 'UP', 'AND', 'WRITING', 'LETTERS', 'FOR', 'PHILIP', 'WOULD', 'NOT', 'TAKE', 'SUCH', 'AN', 'IMPORTANT', 'STEP', 'WITHOUT', 'INFORMING', 'HIS', 'FRIENDS'] +4970-29093-0019-2112: hyp=['THE', 'NIGHT', 'WAS', 'SPENT', 'IN', 'PACKING', 'UP', 'AND', 'WRITING', 'LETTERS', 'FOR', 'PHILIP', 'WOULD', 'NOT', 'TAKE', 'SUCH', 'AN', 'IMPORTANT', 'STEP', 'WITHOUT', 'INFORMING', 'HIS', 'FRIENDS'] +4970-29093-0020-2113: ref=['WHY', "IT'S", 'IN', 'MISSOURI', 'SOMEWHERE', 'ON', 'THE', 'FRONTIER', 'I', 'THINK', "WE'LL", 'GET', 'A', 'MAP'] +4970-29093-0020-2113: hyp=['WHY', "IT'S", 'A', 'MISSOURI', 'SOMEWHERE', 'ON', 'THE', 'FRONTIER', 'I', 'THINK', "WE'LL", 'GET', 'A', 'MAP'] +4970-29093-0021-2114: ref=['I', 'WAS', 'AFRAID', 'IT', 'WAS', 'NEARER', 'HOME'] +4970-29093-0021-2114: hyp=['I', 'WAS', 'AFRAID', 'IT', 'WAS', 'NEARER', 'HOME'] +4970-29093-0022-2115: ref=['HE', 'KNEW', 'HIS', 'UNCLE', 'WOULD', 'BE', 'GLAD', 'TO', 'HEAR', 'THAT', 'HE', 'HAD', 'AT', 'LAST', 'TURNED', 'HIS', 'THOUGHTS', 'TO', 'A', 'PRACTICAL', 'MATTER'] +4970-29093-0022-2115: hyp=['HE', 'KNEW', 'HIS', 'UNCLE', 'WOULD', 'BE', 'GLAD', 'TO', 'HEAR', 'THAT', 'HE', 'HAD', 'AT', 'LAST', 'TURNED', 'HIS', 'THOUGHTS', 'TO', 'A', 'PRACTICAL', 'MATTER'] +4970-29093-0023-2116: ref=['HE', 'WELL', 'KNEW', 'THE', 'PERILS', 'OF', 'THE', 'FRONTIER', 'THE', 'SAVAGE', 'STATE', 'OF', 'SOCIETY', 'THE', 'LURKING', 'INDIANS', 'AND', 'THE', 'DANGERS', 'OF', 'FEVER'] +4970-29093-0023-2116: hyp=['HE', 'WELL', 'KNEW', 'THE', 'PERILS', 'OF', 'THE', 'FRONTIER', 'THE', 'SAVAGE', 'STATE', 'OF', 'SOCIETY', 'THE', 'LURKING', 'INDIANS', 'AND', 'THE', 'DANGERS', 'OF', 'FEVER'] +4970-29095-0000-2054: ref=['SHE', 'WAS', 'TIRED', 'OF', 'OTHER', 'THINGS'] +4970-29095-0000-2054: hyp=['SHE', 'WAS', 'TIRED', 'OF', 'OTHER', 'THINGS'] +4970-29095-0001-2055: ref=['SHE', 'TRIED', 'THIS', 'MORNING', 'AN', 'AIR', 'OR', 'TWO', 'UPON', 'THE', 'PIANO', 'SANG', 'A', 'SIMPLE', 'SONG', 'IN', 'A', 'SWEET', 'BUT', 'SLIGHTLY', 'METALLIC', 'VOICE', 'AND', 'THEN', 'SEATING', 'HERSELF', 'BY', 'THE', 'OPEN', 'WINDOW', 'READ', "PHILIP'S", 'LETTER'] +4970-29095-0001-2055: hyp=['SHE', 'TRIED', 'THIS', 'MORNING', 'AN', 'AIR', 'OR', 'TWO', 'UPON', 'THE', 'PIANO', 'SAYING', 'A', 'SIMPLE', 'SONG', 'IN', 'A', 'SWEET', 'BUT', 'SLIGHTLY', 'METALLIC', 'VOICE', 'AND', 'THEN', 'SEATING', 'HERSELF', 'BY', 'THE', 'OPEN', 'WINDOW', 'READ', "PHILIP'S", 'LETTER'] +4970-29095-0002-2056: ref=['WELL', 'MOTHER', 'SAID', 'THE', 'YOUNG', 'STUDENT', 'LOOKING', 'UP', 'WITH', 'A', 'SHADE', 'OF', 'IMPATIENCE'] +4970-29095-0002-2056: hyp=['WELL', 'MOTHER', 'SAID', 'THE', 'YOUNG', 'STUDENT', 'LOOKING', 'UP', 'WITH', 'A', 'SHADE', 'OF', 'IMPATIENCE'] +4970-29095-0003-2057: ref=['I', 'HOPE', 'THEE', 'TOLD', 'THE', 'ELDERS', 'THAT', 'FATHER', 'AND', 'I', 'ARE', 'RESPONSIBLE', 'FOR', 'THE', 'PIANO', 'AND', 'THAT', 'MUCH', 'AS', 'THEE', 'LOVES', 'MUSIC', 'THEE', 'IS', 'NEVER', 'IN', 'THE', 'ROOM', 'WHEN', 'IT', 'IS', 'PLAYED'] +4970-29095-0003-2057: hyp=['I', 'HOPE', 'THEE', 'TOLD', 'THE', 'ELDERS', 'THAT', 'FATHER', 'AND', 'I', 'ARE', 'RESPONSIBLE', 'FOR', 'THE', 'PIANO', 'AND', 'THAT', 'MUCH', 'AS', 'THEE', 'LOVES', 'MUSIC', 'THEE', 'IS', 'NEVER', 'IN', 'THE', 'ROOM', 'WHEN', 'IT', 'IS', 'PLAYED'] +4970-29095-0004-2058: ref=['I', 'HEARD', 'FATHER', 'TELL', 'COUSIN', 'ABNER', 'THAT', 'HE', 'WAS', 'WHIPPED', 'SO', 'OFTEN', 'FOR', 'WHISTLING', 'WHEN', 'HE', 'WAS', 'A', 'BOY', 'THAT', 'HE', 'WAS', 'DETERMINED', 'TO', 'HAVE', 'WHAT', 'COMPENSATION', 'HE', 'COULD', 'GET', 'NOW'] +4970-29095-0004-2058: hyp=['I', 'HEARD', 'FATHER', 'TELL', 'COUSIN', 'ABNER', 'THAT', 'HE', 'WAS', 'WHIPPED', 'SO', 'OFTEN', 'FOR', 'WHISTLING', 'WHEN', 'HE', 'WAS', 'A', 'BOY', 'THAT', 'HE', 'WAS', 'DETERMINED', 'TO', 'HAVE', 'WHAT', 'COMPENSATION', 'HE', 'COULD', 'GET', 'NOW'] +4970-29095-0005-2059: ref=['THY', 'WAYS', 'GREATLY', 'TRY', 'ME', 'RUTH', 'AND', 'ALL', 'THY', 'RELATIONS'] +4970-29095-0005-2059: hyp=['THY', 'WAYS', 'GREATLY', 'TRY', 'ME', 'RUTH', 'AND', 'ALL', 'THY', 'RELATIONS'] +4970-29095-0006-2060: ref=['IS', 'THY', 'FATHER', 'WILLING', 'THEE', 'SHOULD', 'GO', 'AWAY', 'TO', 'A', 'SCHOOL', 'OF', 'THE', "WORLD'S", 'PEOPLE'] +4970-29095-0006-2060: hyp=['IS', 'THY', 'FATHER', 'WILLING', 'THEE', 'SHOULD', 'GO', 'AWAY', 'TO', 'A', 'SCHOOL', 'OF', 'THE', "WORLD'S", 'PEOPLE'] +4970-29095-0007-2061: ref=['I', 'HAVE', 'NOT', 'ASKED', 'HIM', 'RUTH', 'REPLIED', 'WITH', 'A', 'LOOK', 'THAT', 'MIGHT', 'IMPLY', 'THAT', 'SHE', 'WAS', 'ONE', 'OF', 'THOSE', 'DETERMINED', 'LITTLE', 'BODIES', 'WHO', 'FIRST', 'MADE', 'UP', 'HER', 'OWN', 'MIND', 'AND', 'THEN', 'COMPELLED', 'OTHERS', 'TO', 'MAKE', 'UP', 'THEIRS', 'IN', 'ACCORDANCE', 'WITH', 'HERS'] +4970-29095-0007-2061: hyp=['I', 'HAVE', 'NOT', 'ASKED', 'HIM', 'RUTH', 'REPLIED', 'WITH', 'A', 'LOOK', 'THAT', 'MIGHT', 'IMPLY', 'THAT', 'SHE', 'WAS', 'ONE', 'OF', 'THOSE', 'DETERMINED', 'LITTLE', 'BODIES', 'WHO', 'FIRST', 'MADE', 'UP', 'HER', 'OWN', 'MIND', 'AND', 'THEN', 'COMPELLED', 'OTHERS', 'TO', 'MAKE', 'UP', 'THEIRS', 'IN', 'ACCORDANCE', 'WITH', 'HERS'] +4970-29095-0008-2062: ref=['MOTHER', "I'M", 'GOING', 'TO', 'STUDY', 'MEDICINE'] +4970-29095-0008-2062: hyp=['MOTHER', 'I', 'AM', 'GOING', 'TO', 'STUDY', 'MEDICINE'] +4970-29095-0009-2063: ref=['MARGARET', 'BOLTON', 'ALMOST', 'LOST', 'FOR', 'A', 'MOMENT', 'HER', 'HABITUAL', 'PLACIDITY'] +4970-29095-0009-2063: hyp=['MARGARET', 'BOLTON', 'ALMOST', 'LOST', 'FOR', 'A', 'MOMENT', 'HER', 'HABITUAL', 'PLACIDITY'] +4970-29095-0010-2064: ref=['THEE', 'STUDY', 'MEDICINE'] +4970-29095-0010-2064: hyp=['THE', 'STUDY', 'MEDICINE'] +4970-29095-0011-2065: ref=['DOES', 'THEE', 'THINK', 'THEE', 'COULD', 'STAND', 'IT', 'SIX', 'MONTHS'] +4970-29095-0011-2065: hyp=['DOES', 'THEE', 'THINK', 'THEE', 'COULD', 'STAND', 'IT', 'SIX', 'MONTHS'] +4970-29095-0012-2066: ref=['AND', 'BESIDES', 'SUPPOSE', 'THEE', 'DOES', 'LEARN', 'MEDICINE'] +4970-29095-0012-2066: hyp=['AND', 'BESIDES', 'SUPPOSE', 'THEE', 'DOES', 'LEARN', 'MEDICINE'] +4970-29095-0013-2067: ref=['I', 'WILL', 'PRACTICE', 'IT'] +4970-29095-0013-2067: hyp=['I', 'WILL', 'PRACTISE', 'IT'] +4970-29095-0014-2068: ref=['WHERE', 'THEE', 'AND', 'THY', 'FAMILY', 'ARE', 'KNOWN'] +4970-29095-0014-2068: hyp=["WHERE'S", 'THEE', 'AND', 'THY', 'FAMILY', 'ARE', 'KNOWN'] +4970-29095-0015-2069: ref=['IF', 'I', 'CAN', 'GET', 'PATIENTS'] +4970-29095-0015-2069: hyp=['IF', 'I', 'CAN', 'GET', 'PATIENCE'] +4970-29095-0016-2070: ref=['RUTH', 'SAT', 'QUITE', 'STILL', 'FOR', 'A', 'TIME', 'WITH', 'FACE', 'INTENT', 'AND', 'FLUSHED', 'IT', 'WAS', 'OUT', 'NOW'] +4970-29095-0016-2070: hyp=['RUTH', 'SAT', 'QUITE', 'STILL', 'FOR', 'A', 'TIME', 'WITH', 'FACE', 'AND', 'TENT', 'AND', 'FLUSHED', 'IT', 'WAS', 'OUT', 'NOW'] +4970-29095-0017-2071: ref=['THE', 'SIGHT', 'SEERS', 'RETURNED', 'IN', 'HIGH', 'SPIRITS', 'FROM', 'THE', 'CITY'] +4970-29095-0017-2071: hyp=['THE', 'SIGHTSEERS', 'RETURNED', 'IN', 'HIGH', 'SPIRITS', 'FROM', 'THE', 'CITY'] +4970-29095-0018-2072: ref=['RUTH', 'ASKED', 'THE', 'ENTHUSIASTS', 'IF', 'THEY', 'WOULD', 'LIKE', 'TO', 'LIVE', 'IN', 'SUCH', 'A', 'SOUNDING', 'MAUSOLEUM', 'WITH', 'ITS', 'GREAT', 'HALLS', 'AND', 'ECHOING', 'ROOMS', 'AND', 'NO', 'COMFORTABLE', 'PLACE', 'IN', 'IT', 'FOR', 'THE', 'ACCOMMODATION', 'OF', 'ANY', 'BODY'] +4970-29095-0018-2072: hyp=['RUTH', 'ASKED', 'THE', 'ENTHUSIAST', 'IF', 'THEY', 'WOULD', 'LIKE', 'TO', 'LIVE', 'IN', 'SUCH', 'A', 'SOUNDING', 'MUSOLEUM', 'WITH', 'ITS', 'GREAT', 'HALLS', 'AND', 'ECHOING', 'ROOMS', 'AND', 'NO', 'COMFORTABLE', 'PLACE', 'IN', 'IT', 'FOR', 'THE', 'ACCOMMODATION', 'OF', 'ANY', 'BODY'] +4970-29095-0019-2073: ref=['AND', 'THEN', 'THERE', 'WAS', 'BROAD', 'STREET'] +4970-29095-0019-2073: hyp=['AND', 'THEN', 'THERE', 'WAS', 'BROAD', 'STREET'] +4970-29095-0020-2074: ref=['THERE', 'CERTAINLY', 'WAS', 'NO', 'END', 'TO', 'IT', 'AND', 'EVEN', 'RUTH', 'WAS', 'PHILADELPHIAN', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'A', 'STREET', 'OUGHT', 'NOT', 'TO', 'HAVE', 'ANY', 'END', 'OR', 'ARCHITECTURAL', 'POINT', 'UPON', 'WHICH', 'THE', 'WEARY', 'EYE', 'COULD', 'REST'] +4970-29095-0020-2074: hyp=['THERE', 'CERTAINLY', 'WAS', 'NO', 'END', 'TO', 'IT', 'AND', 'EVEN', 'RUTH', 'WAS', 'PHILADELPHIA', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'A', 'STREET', 'OUGHT', 'NOT', 'TO', 'HAVE', 'ANY', 'END', 'OR', 'ARCHITECTURAL', 'BLINT', 'UPON', 'WHICH', 'THE', 'WEARY', 'EYE', 'COULD', 'REST'] +4970-29095-0021-2075: ref=['BUT', 'NEITHER', 'SAINT', 'GIRARD', 'NOR', 'BROAD', 'STREET', 'NEITHER', 'WONDERS', 'OF', 'THE', 'MINT', 'NOR', 'THE', 'GLORIES', 'OF', 'THE', 'HALL', 'WHERE', 'THE', 'GHOSTS', 'OF', 'OUR', 'FATHERS', 'SIT', 'ALWAYS', 'SIGNING', 'THE', 'DECLARATION', 'IMPRESSED', 'THE', 'VISITORS', 'SO', 'MUCH', 'AS', 'THE', 'SPLENDORS', 'OF', 'THE', 'CHESTNUT', 'STREET', 'WINDOWS', 'AND', 'THE', 'BARGAINS', 'ON', 'EIGHTH', 'STREET'] +4970-29095-0021-2075: hyp=['BUT', 'NEITHER', 'SAINT', 'GERARD', 'NOR', 'BROAD', 'STREET', 'NEITHER', 'WONDERS', 'OF', 'THE', 'MINT', 'NOR', 'THE', 'GLORIES', 'OF', 'THE', 'HALL', 'WHERE', 'THE', 'GHOSTS', 'OF', 'OUR', 'FATHERS', 'SIT', 'ALWAYS', 'SIGNING', 'THE', 'DECLARATION', 'IMPRESS', 'THE', 'VISITOR', 'SO', 'MUCH', 'AS', 'THE', 'SPLENDORS', 'OF', 'THE', 'CHESTNUT', 'STREET', 'WINDOWS', 'AND', 'THE', 'BARGAINS', 'ON', 'EIGHTH', 'STREET'] +4970-29095-0022-2076: ref=['IS', 'THEE', 'GOING', 'TO', 'THE', 'YEARLY', 'MEETING', 'RUTH', 'ASKED', 'ONE', 'OF', 'THE', 'GIRLS'] +4970-29095-0022-2076: hyp=['IS', 'THEE', 'GOING', 'TO', 'THE', 'YEARLY', 'MEETING', 'RUTH', 'ASKED', 'ONE', 'OF', 'THE', 'GIRLS'] +4970-29095-0023-2077: ref=['I', 'HAVE', 'NOTHING', 'TO', 'WEAR', 'REPLIED', 'THAT', 'DEMURE', 'PERSON'] +4970-29095-0023-2077: hyp=['I', 'HAVE', 'NOTHING', 'TO', 'WEAR', 'REPLIED', 'THE', 'DEMURE', 'PERSON'] +4970-29095-0024-2078: ref=['IT', 'HAS', 'OCCUPIED', 'MOTHER', 'A', 'LONG', 'TIME', 'TO', 'FIND', 'AT', 'THE', 'SHOPS', 'THE', 'EXACT', 'SHADE', 'FOR', 'HER', 'NEW', 'BONNET'] +4970-29095-0024-2078: hyp=['IT', 'HAS', 'OCCUPIED', 'MOTHER', 'A', 'LONG', 'TIME', 'TO', 'FIND', 'THE', 'SHOPS', 'THE', 'EXACT', 'SHADE', 'FOR', 'HER', 'NEW', 'BONNET'] +4970-29095-0025-2079: ref=['AND', 'THEE', "WON'T", 'GO', 'WHY', 'SHOULD', 'I'] +4970-29095-0025-2079: hyp=['AND', 'THEE', "WON'T", 'GO', 'WHY', 'SHOULD', 'I'] +4970-29095-0026-2080: ref=['IF', 'I', 'GO', 'TO', 'MEETING', 'AT', 'ALL', 'I', 'LIKE', 'BEST', 'TO', 'SIT', 'IN', 'THE', 'QUIET', 'OLD', 'HOUSE', 'IN', 'GERMANTOWN', 'WHERE', 'THE', 'WINDOWS', 'ARE', 'ALL', 'OPEN', 'AND', 'I', 'CAN', 'SEE', 'THE', 'TREES', 'AND', 'HEAR', 'THE', 'STIR', 'OF', 'THE', 'LEAVES'] +4970-29095-0026-2080: hyp=['IF', 'I', 'GO', 'TO', 'MEETING', 'AT', 'ALL', 'I', 'LIKE', 'BEST', 'TO', 'SIT', 'IN', 'THE', 'QUIET', 'OLD', 'HOUSE', 'IN', 'GERMANTOWN', 'WHERE', 'THE', 'WINDOWS', 'ARE', 'ALL', 'OPEN', 'AND', 'I', 'CAN', 'SEE', 'THE', 'TREES', 'AND', 'HERE', 'THE', 'STIR', 'OF', 'THE', 'LEAVES'] +4970-29095-0027-2081: ref=["IT'S", 'SUCH', 'A', 'CRUSH', 'AT', 'THE', 'YEARLY', 'MEETING', 'AT', 'ARCH', 'STREET', 'AND', 'THEN', "THERE'S", 'THE', 'ROW', 'OF', 'SLEEK', 'LOOKING', 'YOUNG', 'MEN', 'WHO', 'LINE', 'THE', 'CURBSTONE', 'AND', 'STARE', 'AT', 'US', 'AS', 'WE', 'COME', 'OUT'] +4970-29095-0027-2081: hyp=["IT'S", 'SUCH', 'A', 'CRUSH', 'AT', 'THE', 'YEARLY', 'MEETING', 'AT', 'ARCH', 'STREET', 'AND', 'THEN', "THERE'S", 'THE', 'ROW', 'OF', 'SLEEK', 'LOOKING', 'YOUNG', 'MEN', 'WHO', 'LIE', 'IN', 'THE', 'CURBSTONE', 'AND', 'STARE', 'AT', 'US', 'AS', 'WE', 'COME', 'OUT'] +4970-29095-0028-2082: ref=['HE', "DOESN'T", 'SAY', 'BUT', "IT'S", 'ON', 'THE', 'FRONTIER', 'AND', 'ON', 'THE', 'MAP', 'EVERYTHING', 'BEYOND', 'IT', 'IS', 'MARKED', 'INDIANS', 'AND', 'DESERT', 'AND', 'LOOKS', 'AS', 'DESOLATE', 'AS', 'A', 'WEDNESDAY', 'MEETING', 'HUMPH', 'IT', 'WAS', 'TIME', 'FOR', 'HIM', 'TO', 'DO', 'SOMETHING'] +4970-29095-0028-2082: hyp=['HE', "DOESN'T", 'SAY', 'BUT', "IT'S", 'ON', 'THE', 'FRONTIER', 'AND', 'ON', 'THE', 'MAP', 'EVERYTHING', 'BEYOND', 'IT', 'IS', 'MARKED', 'INDIANS', 'AND', 'DESERT', 'AND', 'LOOKS', 'AS', 'DESOLATE', 'AS', 'A', 'WIND', 'ZAY', 'MEETING', 'IT', 'WAS', 'TIME', 'FOR', 'HIM', 'TO', 'DO', 'SOMETHING'] +4970-29095-0029-2083: ref=['IS', 'HE', 'GOING', 'TO', 'START', 'A', 'DAILY', 'NEWSPAPER', 'AMONG', 'THE', 'KICK', 'A', 'POOS'] +4970-29095-0029-2083: hyp=['IS', 'HE', 'GOING', 'TO', 'START', 'A', 'DAILY', 'NEWSPAPER', 'AMONG', 'THE', 'KICKAPOOS'] +4970-29095-0030-2084: ref=['FATHER', "THEE'S", 'UNJUST', 'TO', 'PHILIP', "HE'S", 'GOING', 'INTO', 'BUSINESS'] +4970-29095-0030-2084: hyp=['FATHER', 'THESE', 'UNJUST', 'TO', 'PHILIP', "HE'S", 'GOING', 'INTO', 'BUSINESS'] +4970-29095-0031-2085: ref=['HE', "DOESN'T", 'SAY', 'EXACTLY', 'WHAT', 'IT', 'IS', 'SAID', 'RUTH', 'A', 'LITTLE', 'DUBIOUSLY', 'BUT', "IT'S", 'SOMETHING', 'ABOUT', 'LAND', 'AND', 'RAILROADS', 'AND', 'THEE', 'KNOWS', 'FATHER', 'THAT', 'FORTUNES', 'ARE', 'MADE', 'NOBODY', 'KNOWS', 'EXACTLY', 'HOW', 'IN', 'A', 'NEW', 'COUNTRY'] +4970-29095-0031-2085: hyp=['HE', "DOESN'T", 'SAY', 'EXACTLY', 'WHAT', 'IT', 'IS', 'SAID', 'RUTH', 'A', 'LITTLE', 'DUBIOUSLY', 'BUT', "IT'S", 'SOMETHING', 'ABOUT', 'LAND', 'AND', 'RAILROADS', 'AND', 'HE', 'KNOWS', 'FATHER', 'THAT', 'FORTUNES', 'ARE', 'MADE', 'NOBODY', 'KNOWS', 'EXACTLY', 'HOW', 'IN', 'A', 'NEW', 'COUNTRY'] +4970-29095-0032-2086: ref=['BUT', 'PHILIP', 'IS', 'HONEST', 'AND', 'HE', 'HAS', 'TALENT', 'ENOUGH', 'IF', 'HE', 'WILL', 'STOP', 'SCRIBBLING', 'TO', 'MAKE', 'HIS', 'WAY'] +4970-29095-0032-2086: hyp=['THAT', 'PHILIP', 'IS', 'HONEST', 'AND', 'HE', 'HAS', 'TALENT', 'ENOUGH', 'IF', 'HE', 'WILL', 'STOP', 'SCRIBBLING', 'TO', 'MAKE', 'HIS', 'WAY'] +4970-29095-0033-2087: ref=['WHAT', 'A', 'BOX', 'WOMEN', 'ARE', 'PUT', 'INTO', 'MEASURED', 'FOR', 'IT', 'AND', 'PUT', 'IN', 'YOUNG', 'IF', 'WE', 'GO', 'ANYWHERE', "IT'S", 'IN', 'A', 'BOX', 'VEILED', 'AND', 'PINIONED', 'AND', 'SHUT', 'IN', 'BY', 'DISABILITIES'] +4970-29095-0033-2087: hyp=['WHAT', 'A', 'BOXWOMEN', 'ARE', 'PUT', 'INTO', 'MEASURED', 'FOR', 'IT', 'AND', 'PUTTING', 'YOUNG', 'IF', 'WE', 'GO', 'ANYWHERE', "IT'S", 'IN', 'A', 'BOX', 'VEILED', 'AND', 'PINIONED', 'AND', 'SHUT', 'IN', 'BY', 'DISABILITIES'] +4970-29095-0034-2088: ref=['WHY', 'SHOULD', 'I', 'RUST', 'AND', 'BE', 'STUPID', 'AND', 'SIT', 'IN', 'INACTION', 'BECAUSE', 'I', 'AM', 'A', 'GIRL'] +4970-29095-0034-2088: hyp=['WHY', 'SHOULD', 'I', 'REST', 'AND', 'BE', 'STUPID', 'AND', 'SIT', 'IN', 'AN', 'ACTION', 'BECAUSE', 'I', 'AM', 'A', 'GIRL'] +4970-29095-0035-2089: ref=['AND', 'IF', 'I', 'HAD', 'A', 'FORTUNE', 'WOULD', 'THEE', 'WANT', 'ME', 'TO', 'LEAD', 'A', 'USELESS', 'LIFE'] +4970-29095-0035-2089: hyp=['AND', 'IF', 'I', 'HAD', 'A', 'FORTUNE', 'WOULD', 'THEE', 'WANT', 'ME', 'TO', 'LEAD', 'A', 'USELESS', 'LIFE'] +4970-29095-0036-2090: ref=['HAS', 'THEE', 'CONSULTED', 'THY', 'MOTHER', 'ABOUT', 'A', 'CAREER', 'I', 'SUPPOSE', 'IT', 'IS', 'A', 'CAREER', 'THEE', 'WANTS'] +4970-29095-0036-2090: hyp=['HAS', 'THE', 'CONSULTED', 'THY', 'MOTHER', 'ABOUT', 'A', 'CAREER', 'I', 'SUPPOSE', 'IT', 'IS', 'A', 'CAREER', 'OF', 'THEE', 'WANTS'] +4970-29095-0037-2091: ref=['BUT', 'THAT', 'WISE', 'AND', 'PLACID', 'WOMAN', 'UNDERSTOOD', 'THE', 'SWEET', 'REBEL', 'A', 'GREAT', 'DEAL', 'BETTER', 'THAN', 'RUTH', 'UNDERSTOOD', 'HERSELF'] +4970-29095-0037-2091: hyp=['BUT', 'THAT', 'WISE', 'AND', 'PLACID', 'WOMAN', 'UNDERSTOOD', 'THE', 'SWEET', 'REBEL', 'A', 'GREAT', 'DEAL', 'BETTER', 'THAN', 'RUTH', 'UNDERSTOOD', 'HERSELF'] +4970-29095-0038-2092: ref=['RUTH', 'WAS', 'GLAD', 'TO', 'HEAR', 'THAT', 'PHILIP', 'HAD', 'MADE', 'A', 'PUSH', 'INTO', 'THE', 'WORLD', 'AND', 'SHE', 'WAS', 'SURE', 'THAT', 'HIS', 'TALENT', 'AND', 'COURAGE', 'WOULD', 'MAKE', 'A', 'WAY', 'FOR', 'HIM'] +4970-29095-0038-2092: hyp=['RUTH', 'WAS', 'GLAD', 'TO', 'HEAR', 'THAT', 'PHILIP', 'HAD', 'MADE', 'A', 'PUSH', 'INTO', 'THE', 'WORLD', 'AND', 'SHE', 'WAS', 'SURE', 'THAT', 'HIS', 'TALENT', 'AND', 'COURAGE', 'WOULD', 'MAKE', 'AWAY', 'FOR', 'HIM'] +4992-23283-0000-2140: ref=['BUT', 'THE', 'MORE', 'FORGETFULNESS', 'HAD', 'THEN', 'PREVAILED', 'THE', 'MORE', 'POWERFUL', 'WAS', 'THE', 'FORCE', 'OF', 'REMEMBRANCE', 'WHEN', 'SHE', 'AWOKE'] +4992-23283-0000-2140: hyp=['BUT', 'THE', 'MORE', 'FORGETFULNESS', 'HAD', 'THEN', 'PREVAILED', 'THE', 'MORE', 'POWERFUL', 'WAS', 'THE', 'FORCE', 'OF', 'REMEMBRANCE', 'WHEN', 'SHE', 'AWOKE'] +4992-23283-0001-2141: ref=['MISS', "MILNER'S", 'HEALTH', 'IS', 'NOT', 'GOOD'] +4992-23283-0001-2141: hyp=['MISS', "MILNER'S", 'HEALTH', 'IS', 'NOT', 'GOOD'] +4992-23283-0002-2142: ref=['SAID', 'MISSUS', 'HORTON', 'A', 'FEW', 'MINUTES', 'AFTER'] +4992-23283-0002-2142: hyp=['SAID', 'MISSUS', 'WHARTON', 'A', 'FEW', 'MINUTES', 'AFTER'] +4992-23283-0003-2143: ref=['SO', 'THERE', 'IS', 'TO', 'ME', 'ADDED', 'SANDFORD', 'WITH', 'A', 'SARCASTIC', 'SNEER'] +4992-23283-0003-2143: hyp=['SO', 'THERE', 'IS', 'TO', 'ME', 'ADDED', 'SANDFORD', 'WITH', 'A', 'SARCASTIC', 'SNEER'] +4992-23283-0004-2144: ref=['AND', 'YET', 'YOU', 'MUST', 'OWN', 'HER', 'BEHAVIOUR', 'HAS', 'WARRANTED', 'THEM', 'HAS', 'IT', 'NOT', 'BEEN', 'IN', 'THIS', 'PARTICULAR', 'INCOHERENT', 'AND', 'UNACCOUNTABLE'] +4992-23283-0004-2144: hyp=['AND', 'YET', 'YOU', 'MUST', 'OWN', 'HER', 'BEHAVIOR', 'HAS', 'WARRANTED', 'THEM', 'HAS', 'IT', 'NOT', 'BEEN', 'IN', 'THIS', 'PARTICULAR', 'INCOHERENT', 'AND', 'UNACCOUNTABLE'] +4992-23283-0005-2145: ref=['NOT', 'THAT', 'I', 'KNOW', 'OF', 'NOT', 'ONE', 'MORE', 'THAT', 'I', 'KNOW', 'OF', 'HE', 'REPLIED', 'WITH', 'ASTONISHMENT', 'AT', 'WHAT', 'SHE', 'HAD', 'INSINUATED', 'AND', 'YET', 'WITH', 'A', 'PERFECT', 'ASSURANCE', 'THAT', 'SHE', 'WAS', 'IN', 'THE', 'WRONG'] +4992-23283-0005-2145: hyp=['NOT', 'THAT', 'I', 'KNOW', 'OF', 'NOT', 'ONE', 'MORE', 'THAT', 'I', 'KNOW', 'OF', 'HE', 'REPLIED', 'WITH', 'ASTONISHMENT', 'AT', 'WHAT', 'SHE', 'HAD', 'INSINUATED', 'AND', 'YET', 'WITH', 'A', 'PERFECT', 'ASSURANCE', 'THAT', 'SHE', 'WAS', 'IN', 'THE', 'WRONG'] +4992-23283-0006-2146: ref=['PERHAPS', 'I', 'AM', 'MISTAKEN', 'ANSWERED', 'SHE'] +4992-23283-0006-2146: hyp=['PERHAPS', 'I', 'AM', 'MISTAKEN', 'ANSWERED', 'SHE'] +4992-23283-0007-2147: ref=['TO', 'ASK', 'ANY', 'MORE', 'QUESTIONS', 'OF', 'YOU', 'I', 'BELIEVE', 'WOULD', 'BE', 'UNFAIR'] +4992-23283-0007-2147: hyp=['TO', 'ASK', 'ANY', 'MORE', 'QUESTIONS', 'OF', 'YOU', 'I', 'BELIEVE', 'WOULD', 'BE', 'UNFAIR'] +4992-23283-0008-2148: ref=['HE', 'SEEMED', 'TO', 'WAIT', 'FOR', 'HER', 'REPLY', 'BUT', 'AS', 'SHE', 'MADE', 'NONE', 'HE', 'PROCEEDED'] +4992-23283-0008-2148: hyp=['HE', 'SEEMED', 'TO', 'WAIT', 'FOR', 'HER', 'REPLY', 'BUT', 'AS', 'SHE', 'MADE', 'NONE', 'HE', 'PROCEEDED'] +4992-23283-0009-2149: ref=['OH', 'MY', 'LORD', 'CRIED', 'MISS', 'WOODLEY', 'WITH', 'A', 'MOST', 'FORCIBLE', 'ACCENT', 'YOU', 'ARE', 'THE', 'LAST', 'PERSON', 'ON', 'EARTH', 'SHE', 'WOULD', 'PARDON', 'ME', 'FOR', 'ENTRUSTING'] +4992-23283-0009-2149: hyp=['O', 'MY', 'LORD', 'CRIED', 'MISS', 'WOODLEY', 'WITH', 'A', 'MOST', 'FORCIBLE', 'ACCENT', 'YOU', 'ARE', 'THE', 'LAST', 'PERSONAL', 'ON', 'EARTH', 'SHE', 'WOULD', 'PARDON', 'ME', 'FOR', 'INTRUSTING'] +4992-23283-0010-2150: ref=['BUT', 'IN', 'SUCH', 'A', 'CASE', 'MISS', "MILNER'S", 'ELECTION', 'OF', 'A', 'HUSBAND', 'SHALL', 'NOT', 'DIRECT', 'MINE'] +4992-23283-0010-2150: hyp=['BUT', 'IN', 'SUCH', 'A', 'CASE', 'MISS', "MILNER'S", 'ELECTION', 'OF', 'A', 'HUSBAND', 'SHALL', 'NOT', 'DIRECT', 'MINE'] +4992-23283-0011-2151: ref=['IF', 'SHE', 'DOES', 'NOT', 'KNOW', 'HOW', 'TO', 'ESTIMATE', 'HER', 'OWN', 'VALUE', 'I', 'DO'] +4992-23283-0011-2151: hyp=['IF', 'SHE', 'DOES', 'NOT', 'KNOW', 'HOW', 'TO', 'ESTIMATE', 'HER', 'OWN', 'VALUE', 'I', 'DO'] +4992-23283-0012-2152: ref=['INDEPENDENT', 'OF', 'HER', 'FORTUNE', 'SHE', 'HAS', 'BEAUTY', 'TO', 'CAPTIVATE', 'THE', 'HEART', 'OF', 'ANY', 'MAN', 'AND', 'WITH', 'ALL', 'HER', 'FOLLIES', 'SHE', 'HAS', 'A', 'FRANKNESS', 'IN', 'HER', 'MANNER', 'AN', 'UNAFFECTED', 'WISDOM', 'IN', 'HER', 'THOUGHTS', 'A', 'VIVACITY', 'IN', 'HER', 'CONVERSATION', 'AND', 'WITHAL', 'A', 'SOFTNESS', 'IN', 'HER', 'DEMEANOUR', 'THAT', 'MIGHT', 'ALONE', 'ENGAGE', 'THE', 'AFFECTIONS', 'OF', 'A', 'MAN', 'OF', 'THE', 'NICEST', 'SENTIMENTS', 'AND', 'THE', 'STRONGEST', 'UNDERSTANDING'] +4992-23283-0012-2152: hyp=['INDEPENDENT', 'OF', 'HER', 'FORTUNE', 'SHE', 'HAS', 'BEAUTY', 'TO', 'CAPTIVATE', 'THE', 'HEART', 'OF', 'ANY', 'MAN', 'AND', 'WITH', 'ALL', 'HER', 'FOLLIES', 'SHE', 'HAS', 'A', 'FRANKNESS', 'IN', 'HER', 'MANNER', 'AN', 'UNAFFECTED', 'WISDOM', 'IN', 'HER', 'THOUGHTS', 'OF', 'VIVACITY', 'IN', 'HER', 'CONVERSATION', 'AND', 'WITHAL', 'A', 'SOFTNESS', 'IN', 'HER', 'DEMEANOUR', 'THAT', 'MIGHT', 'ALONE', 'ENGAGE', 'THE', 'AFFECTIONS', 'OF', 'A', 'MAN', 'OF', 'THE', 'NICEST', 'SENTIMENTS', 'AND', 'THE', 'STRONGEST', 'UNDERSTANDING'] +4992-23283-0013-2153: ref=['MY', 'LORD', 'MISS', "MILNER'S", 'TASTE', 'IS', 'NOT', 'A', 'DEPRAVED', 'ONE', 'IT', 'IS', 'BUT', 'TOO', 'REFINED'] +4992-23283-0013-2153: hyp=['MY', 'LORD', 'MISS', "MILNER'S", 'TASTE', 'IS', 'NOT', 'A', 'DEPRAVED', 'ONE', 'IT', 'IS', 'BUT', 'TOO', 'REFINED'] +4992-23283-0014-2154: ref=['WHAT', 'CAN', 'YOU', 'MEAN', 'BY', 'THAT', 'MISS', 'WOODLEY', 'YOU', 'TALK', 'MYSTERIOUSLY'] +4992-23283-0014-2154: hyp=['WHAT', 'CAN', 'YOU', 'MEAN', 'BY', 'THAT', 'MISS', 'WOODLEY', 'YOU', 'TALK', 'MYSTERIOUSLY'] +4992-23283-0015-2155: ref=['IS', 'SHE', 'NOT', 'AFRAID', 'THAT', 'I', 'WILL', 'THWART', 'HER', 'INCLINATIONS'] +4992-23283-0015-2155: hyp=['IS', 'SHE', 'NOT', 'AFRAID', 'THAT', 'I', 'WILL', 'THWART', 'HER', 'INCLINATIONS'] +4992-23283-0016-2156: ref=['AGAIN', 'HE', 'SEARCHED', 'HIS', 'OWN', 'THOUGHTS', 'NOR', 'INEFFECTUALLY', 'AS', 'BEFORE'] +4992-23283-0016-2156: hyp=['AGAIN', 'HE', 'SEARCHED', 'HIS', 'OWN', 'THOUGHTS', 'NOR', 'INEFFECTUALLY', 'AS', 'BEFORE'] +4992-23283-0017-2157: ref=['MISS', 'WOODLEY', 'WAS', 'TOO', 'LITTLE', 'VERSED', 'IN', 'THE', 'SUBJECT', 'TO', 'KNOW', 'THIS', 'WOULD', 'HAVE', 'BEEN', 'NOT', 'TO', 'LOVE', 'AT', 'ALL', 'AT', 'LEAST', 'NOT', 'TO', 'THE', 'EXTENT', 'OF', 'BREAKING', 'THROUGH', 'ENGAGEMENTS', 'AND', 'ALL', 'THE', 'VARIOUS', 'OBSTACLES', 'THAT', 'STILL', 'MILITATED', 'AGAINST', 'THEIR', 'UNION'] +4992-23283-0017-2157: hyp=['MISS', 'WOODLEY', 'WAS', 'TOO', 'LITTLE', 'VERSED', 'IN', 'THE', 'SUBJECT', 'TO', 'KNOW', 'THIS', 'WOULD', 'HAVE', 'BEEN', 'NOT', 'TO', 'LOVE', 'AT', 'ALL', 'AT', 'LEAST', 'NOT', 'TO', 'THE', 'EXTENT', 'OF', 'BREAKING', 'THROUGH', 'ENGAGEMENTS', 'AND', 'ALL', 'THE', 'VARIOUS', 'OBSTACLES', 'THAT', 'STILL', 'MITIGATED', 'AGAINST', 'THEIR', 'UNION'] +4992-23283-0018-2158: ref=['TO', 'RELIEVE', 'HER', 'FROM', 'BOTH', 'HE', 'LAID', 'HIS', 'HAND', 'WITH', 'FORCE', 'UPON', 'HIS', 'HEART', 'AND', 'SAID', 'DO', 'YOU', 'BELIEVE', 'ME'] +4992-23283-0018-2158: hyp=['TO', 'RELIEVE', 'HER', 'FROM', 'BOTH', 'HE', 'LAID', 'HIS', 'HAND', 'WITH', 'FORCE', 'UPON', 'HIS', 'HEART', 'AND', 'SAID', 'DO', 'YOU', 'BELIEVE', 'ME'] +4992-23283-0019-2159: ref=['I', 'WILL', 'MAKE', 'NO', 'UNJUST', 'USE', 'OF', 'WHAT', 'I', 'KNOW', 'HE', 'REPLIED', 'WITH', 'FIRMNESS', 'I', 'BELIEVE', 'YOU', 'MY', 'LORD'] +4992-23283-0019-2159: hyp=['I', 'WILL', 'MAKE', 'NO', 'UNJUST', 'USE', 'OF', 'WHAT', 'I', 'KNOW', 'HE', 'REPLIED', 'WITH', 'FIRMNESS', 'I', 'BELIEVE', 'YOU', 'MY', 'LORD'] +4992-23283-0020-2160: ref=['I', 'HAVE', 'NEVER', 'YET', 'HOWEVER', 'BEEN', 'VANQUISHED', 'BY', 'THEM', 'AND', 'EVEN', 'UPON', 'THIS', 'OCCASION', 'MY', 'REASON', 'SHALL', 'COMBAT', 'THEM', 'TO', 'THE', 'LAST', 'AND', 'MY', 'REASON', 'SHALL', 'FAIL', 'ME', 'BEFORE', 'I', 'DO', 'WRONG'] +4992-23283-0020-2160: hyp=['I', 'HAVE', 'NEVER', 'YET', 'HOWEVER', 'BEEN', 'VANQUISHED', 'BY', 'THEM', 'AND', 'EVEN', 'UPON', 'THIS', 'OCCASION', 'MY', 'REASON', 'SHALL', 'COMBAT', 'THEM', 'TO', 'THE', 'LAST', 'AND', 'MY', 'REASON', 'SHALL', 'FAIL', 'ME', 'BEFORE', 'I', 'DO', 'WRONG'] +4992-41797-0000-2117: ref=['YES', 'DEAD', 'THESE', 'FOUR', 'YEARS', 'AN', 'A', 'GOOD', 'JOB', 'FOR', 'HER', 'TOO'] +4992-41797-0000-2117: hyp=['YES', 'DEAD', 'THESE', 'FOUR', 'YEARS', 'AND', 'A', 'GOOD', 'JOB', 'FOR', 'HER', 'TOO'] +4992-41797-0001-2118: ref=['WELL', 'AS', 'I', 'SAY', "IT'S", 'AN', 'AWFUL', 'QUEER', 'WORLD', 'THEY', 'CLAP', 'ALL', 'THE', 'BURGLARS', 'INTO', 'JAIL', 'AND', 'THE', 'MURDERERS', 'AND', 'THE', 'WIFE', 'BEATERS', "I'VE", 'ALLERS', 'THOUGHT', 'A', 'GENTLE', 'REPROOF', 'WOULD', 'BE', 'ENOUGH', 'PUNISHMENT', 'FOR', 'A', 'WIFE', 'BEATER', 'CAUSE', 'HE', 'PROBABLY', 'HAS', 'A', 'LOT', 'O', 'PROVOCATION', 'THAT', 'NOBODY', 'KNOWS', 'AND', 'THE', 'FIREBUGS', "CAN'T", 'THINK', 'O', 'THE', 'RIGHT', 'NAME', 'SOMETHING', 'LIKE', 'CENDENARIES', 'AN', 'THE', 'BREAKERS', 'O', 'THE', 'PEACE', 'AN', 'WHAT', 'NOT', 'AN', 'YET', 'THE', 'LAW', 'HAS', 'NOTHIN', 'TO', 'SAY', 'TO', 'A', 'MAN', 'LIKE', 'HEN', 'LORD'] +4992-41797-0001-2118: hyp=['WELL', 'AS', 'I', 'SAY', "IT'S", 'AN', 'AWFUL', 'QUEER', 'WORLD', 'THEY', 'CLAP', 'ALL', 'THE', 'BURGLARS', 'AND', 'JAIL', 'THE', 'MURDERERS', 'IN', 'THE', 'WHITE', 'BEATERS', 'I', 'ALLERS', 'THOUGHT', 'A', 'GENTLE', 'REPROOF', 'WOULD', 'BE', 'ENOUGH', 'PUNISHMENT', 'FOR', 'A', 'WIFE', 'PETER', 'CAUSE', 'HE', 'PROBABLY', 'HAS', 'A', 'LOT', 'OF', 'PROVOCATION', 'THAT', 'NOBODY', 'KNOWS', 'AND', 'THE', 'FIRE', 'BUGS', "CAN'T", 'THINK', 'OF', 'THE', 'RIGHT', 'NAME', 'SOMETHING', 'LIKE', 'SENDIARIES', 'AND', 'THE', 'BREAKERS', 'OF', 'THE', 'PIECE', 'AND', 'WHAT', 'NOT', 'AND', 'YET', 'THE', 'LAW', 'HAS', 'NOTHING', 'TO', 'SAY', 'TO', 'A', 'MAN', 'LIKE', 'HANDLED'] +4992-41797-0002-2119: ref=['GRANDFATHER', 'WAS', 'ALEXANDER', 'CAREY', 'L', 'L', 'D', 'DOCTOR', 'OF', 'LAWS', 'THAT', 'IS'] +4992-41797-0002-2119: hyp=['GRANDFATHER', 'WAS', 'ALEXANDER', 'CAREY', 'L', 'D', 'DOCTOR', 'OF', 'LAWS', 'THAT', 'IS'] +4992-41797-0003-2120: ref=['MISTER', 'POPHAM', 'LAID', 'DOWN', 'HIS', 'BRUSH'] +4992-41797-0003-2120: hyp=['MISTER', 'POPHAM', 'LAID', 'DOWN', 'HIS', 'BRUSH'] +4992-41797-0004-2121: ref=['I', 'SWAN', 'TO', 'MAN', 'HE', 'EJACULATED', 'IF', 'YOU', "DON'T", 'WORK', 'HARD', 'YOU', "CAN'T", 'KEEP', 'UP', 'WITH', 'THE', 'TIMES', 'DOCTOR', 'OF', 'LAWS'] +4992-41797-0004-2121: hyp=['I', 'SWAYING', 'TO', 'MAN', 'HE', 'EJACULATED', 'IF', 'YOU', "DON'T", 'WORK', 'HARD', 'YOU', "CAN'T", 'KEEP', 'UP', 'WITH', 'THE', 'TUBS', 'DOCTOR', 'OF', 'LAWS'] +4992-41797-0005-2122: ref=['DONE', 'HE', "AIN'T", 'DONE', 'A', 'THING', "HE'D", 'OUGHTER', 'SENCE', 'HE', 'WAS', 'BORN'] +4992-41797-0005-2122: hyp=['DONE', 'HE', "AIN'T", 'DONE', 'A', 'THING', 'HE', 'ORDERS', 'SINCE', 'HE', 'WAS', 'BORN'] +4992-41797-0006-2123: ref=['HE', 'KEEPS', 'THE', 'THOU', 'SHALT', 'NOT', 'COMMANDMENTS', 'FIRST', 'RATE', 'HEN', 'LORD', 'DOES'] +4992-41797-0006-2123: hyp=['HE', 'KEEPS', 'THE', 'THOU', 'SHALT', 'NOT', 'COMMANDS', 'FIRST', 'RATE', 'HEN', 'LORD', 'DOES'] +4992-41797-0007-2124: ref=['HE', 'GIVE', 'UP', 'HIS', 'POSITION', 'AND', 'SHUT', 'THE', 'FAMILY', 'UP', 'IN', 'THAT', 'TOMB', 'OF', 'A', 'HOUSE', 'SO', 'T', 'HE', 'COULD', 'STUDY', 'HIS', 'BOOKS'] +4992-41797-0007-2124: hyp=['HE', 'GAVE', 'UP', 'HIS', 'POSITION', 'AND', 'SHUT', 'THE', 'FAMILY', 'UP', 'IN', 'THAT', 'TOMB', 'OF', 'A', 'HOUSE', 'SEWED', 'HE', "COULDN'T", 'STUDY', 'HIS', 'BOOKS'] +4992-41797-0008-2125: ref=['MISTER', 'POPHAM', 'EXAGGERATED', 'NOTHING', 'BUT', 'ON', 'THE', 'CONTRARY', 'LEFT', 'MUCH', 'UNSAID', 'IN', 'HIS', 'NARRATIVE', 'OF', 'THE', 'FAMILY', 'AT', 'THE', 'HOUSE', 'OF', 'LORDS'] +4992-41797-0008-2125: hyp=['MISTER', 'POPHAM', 'EXAGGERATED', 'NOTHING', 'BUT', 'ON', 'THE', 'CONTRARY', 'LEFT', 'MUCH', 'UNSAID', 'IN', 'HIS', 'NARRATIVE', 'OF', 'THE', 'FAMILY', 'AT', 'THE', 'HOUSE', 'OF', 'LORDS'] +4992-41797-0009-2126: ref=['HENRY', 'LORD', 'WITH', 'THE', 'DEGREE', 'OF', 'PH', 'D', 'TO', 'HIS', 'CREDIT', 'HAD', 'BEEN', 'PROFESSOR', 'OF', 'ZOOLOGY', 'AT', 'A', 'NEW', 'ENGLAND', 'COLLEGE', 'BUT', 'HAD', 'RESIGNED', 'HIS', 'POST', 'IN', 'ORDER', 'TO', 'WRITE', 'A', 'SERIES', 'OF', 'SCIENTIFIC', 'TEXT', 'BOOKS'] +4992-41797-0009-2126: hyp=['HENRY', 'LORD', 'WITH', 'THE', 'DEGREE', 'OF', 'P', 'D', 'TO', 'HIS', 'CREDIT', 'HAD', 'BEEN', 'PROFESSOR', 'OF', 'ZOOLOGY', 'AT', 'A', 'NEW', 'ENGLAND', 'COLLEGE', 'BUT', 'HAD', 'RESIGNED', 'HIS', 'POST', 'IN', 'ORDER', 'TO', 'WRITE', 'A', 'SERIES', 'OF', 'SCIENTIFIC', 'TEXT', 'BOOKS'] +4992-41797-0010-2127: ref=['ALWAYS', 'IRRITABLE', 'COLD', 'INDIFFERENT', 'HE', 'HAD', 'GROWN', 'RAPIDLY', 'MORE', 'SO', 'AS', 'YEARS', 'WENT', 'ON'] +4992-41797-0010-2127: hyp=['ALWAYS', 'IRRITABLE', 'COLD', 'INDIFFERENT', 'HE', 'HAD', 'GROWN', 'RAPIDLY', 'MORE', 'SO', 'AS', 'YEARS', 'WENT', 'ON'] +4992-41797-0011-2128: ref=['WHATEVER', 'APPEALED', 'TO', 'HER', 'SENSE', 'OF', 'BEAUTY', 'WAS', 'STRAIGHTWAY', 'TRANSFERRED', 'TO', 'PAPER', 'OR', 'CANVAS'] +4992-41797-0011-2128: hyp=['WHATEVER', 'APPEAL', 'TO', 'HER', 'SENSE', 'OF', 'BEAUTY', 'WAS', 'STRAIGHTWAY', 'TRANSFERRED', 'TO', 'PAPER', 'OR', 'CANVAS'] +4992-41797-0012-2129: ref=['SHE', 'IS', 'WILD', 'TO', 'KNOW', 'HOW', 'TO', 'DO', 'THINGS'] +4992-41797-0012-2129: hyp=['SHE', 'IS', 'WILD', 'TO', 'KNOW', 'HOW', 'TO', 'DO', 'THINGS'] +4992-41797-0013-2130: ref=['SHE', 'MAKES', 'EFFORT', 'AFTER', 'EFFORT', 'TREMBLING', 'WITH', 'EAGERNESS', 'AND', 'WHEN', 'SHE', 'FAILS', 'TO', 'REPRODUCE', 'WHAT', 'SHE', 'SEES', 'SHE', 'WORKS', 'HERSELF', 'INTO', 'A', 'FRENZY', 'OF', 'GRIEF', 'AND', 'DISAPPOINTMENT'] +4992-41797-0013-2130: hyp=['SHE', 'MAKES', 'EFFORT', 'AFTER', 'EFFORT', 'TREMBLING', 'WITH', 'EAGERNESS', 'AND', 'WHEN', 'SHE', 'FAILS', 'TO', 'REPRODUCE', 'WHAT', 'SHE', 'SEES', 'SHE', 'WORKS', 'HERSELF', 'INTO', 'A', 'FRENZY', 'OF', 'GRIEF', 'AND', 'DISAPPOINTMENT'] +4992-41797-0014-2131: ref=['WHEN', 'SHE', 'COULD', 'NOT', 'MAKE', 'A', 'RABBIT', 'OR', 'A', 'BIRD', 'LOOK', 'REAL', 'ON', 'PAPER', 'SHE', 'SEARCHED', 'IN', 'HER', "FATHER'S", 'BOOKS', 'FOR', 'PICTURES', 'OF', 'ITS', 'BONES'] +4992-41797-0014-2131: hyp=['WHEN', 'SHE', 'COULD', 'NOT', 'MAKE', 'A', 'RABBIT', 'OR', 'A', 'BIRD', 'LOOK', 'REAL', 'ON', 'PAPER', 'SHE', 'SEARCHED', 'IN', 'HER', "FATHER'S", 'BOOKS', 'FOR', 'PICTURES', 'OF', 'ITS', 'BONES'] +4992-41797-0015-2132: ref=['CYRIL', 'THERE', 'MUST', 'BE', 'SOME', 'BETTER', 'WAY', 'OF', 'DOING', 'I', 'JUST', 'DRAW', 'THE', 'OUTLINE', 'OF', 'AN', 'ANIMAL', 'AND', 'THEN', 'I', 'PUT', 'HAIRS', 'OR', 'FEATHERS', 'ON', 'IT', 'THEY', 'HAVE', 'NO', 'BODIES'] +4992-41797-0015-2132: hyp=['CYRIL', 'THERE', 'MUST', 'BE', 'SOME', 'BETTER', 'WAY', 'OF', 'DOING', 'I', 'JUST', 'DRAW', 'THE', 'OUTLINE', 'OF', 'AN', 'ANIMAL', 'AND', 'THEN', 'I', 'PUT', 'HAIRS', 'OR', 'FEATHERS', 'ON', 'IT', 'THEY', 'HAVE', 'NO', 'BODIES'] +4992-41797-0016-2133: ref=['THEY', "COULDN'T", 'RUN', 'NOR', 'MOVE', "THEY'RE", 'JUST', 'PASTEBOARD'] +4992-41797-0016-2133: hyp=['THEY', "COULDN'T", 'RUN', 'OR', 'MOVE', "THEY'RE", 'JUST', 'PASTEBOARD'] +4992-41797-0017-2134: ref=['HE', "WOULDN'T", 'SEARCH', 'SO', "DON'T", 'WORRY', 'REPLIED', 'CYRIL', 'QUIETLY', 'AND', 'THE', 'TWO', 'LOOKED', 'AT', 'EACH', 'OTHER', 'AND', 'KNEW', 'THAT', 'IT', 'WAS', 'SO'] +4992-41797-0017-2134: hyp=['HE', "WOULDN'T", 'SEARCH', 'SO', "DON'T", 'WORRY', 'REPLIED', 'CYRIL', 'QUIETLY', 'AND', 'THE', 'TWO', 'LOOKED', 'AT', 'EACH', 'OTHER', 'AND', 'KNEW', 'THAT', 'IT', 'WAS', 'SO'] +4992-41797-0018-2135: ref=['THERE', 'IN', 'THE', 'CEDAR', 'HOLLOW', 'THEN', 'LIVED', 'OLIVE', 'LORD', 'AN', 'ANGRY', 'RESENTFUL', 'LITTLE', 'CREATURE', 'WEIGHED', 'DOWN', 'BY', 'A', 'FIERCE', 'SENSE', 'OF', 'INJURY'] +4992-41797-0018-2135: hyp=['THERE', 'IN', 'THE', 'CEDAR', 'HOLLOW', 'THEN', 'LIVED', 'OLIVE', 'LORD', 'AN', 'ANGRY', 'RESENTFUL', 'LITTLE', 'CREATURE', 'WEIGHED', 'DOWN', 'BY', 'A', 'FIERCE', 'SENSE', 'OF', 'INJURY'] +4992-41797-0019-2136: ref=["OLIVE'S", 'MOURNFUL', 'BLACK', 'EYES', 'MET', "NANCY'S", 'SPARKLING', 'BROWN', 'ONES'] +4992-41797-0019-2136: hyp=['ALL', 'OF', 'HIS', 'MOURNFUL', 'BLACK', 'EYES', 'MET', "NANCY'S", 'SPARKLING', 'BROWN', 'ONES'] +4992-41797-0020-2137: ref=["NANCY'S", 'CURLY', 'CHESTNUT', 'CROP', 'SHONE', 'IN', 'THE', 'SUN', 'AND', "OLIVE'S", 'THICK', 'BLACK', 'PLAITS', 'LOOKED', 'BLACKER', 'BY', 'CONTRAST'] +4992-41797-0020-2137: hyp=["NANCY'S", 'CURLY', 'CHESTNUT', 'CROP', 'SHONE', 'IN', 'THE', 'SUN', 'AND', "OLIVE'S", 'THICK', 'BLACK', 'PLATES', 'LOOKED', 'BLACKER', 'BY', 'CONTRAST'] +4992-41797-0021-2138: ref=["SHE'S", 'WONDERFUL', 'MORE', 'WONDERFUL', 'THAN', 'ANYBODY', "WE'VE", 'EVER', 'SEEN', 'ANYWHERE', 'AND', 'SHE', 'DRAWS', 'BETTER', 'THAN', 'THE', 'TEACHER', 'IN', 'CHARLESTOWN'] +4992-41797-0021-2138: hyp=['SHE', 'IS', 'WONDERFUL', 'MORE', 'WONDERFUL', 'IN', 'ANYBODY', "WE'VE", 'EVER', 'SEEN', 'ANYWHERE', 'AND', 'SHE', 'DRAWLS', 'BETTER', 'THAN', 'THE', 'TEACHER', 'IN', 'CHARLESTOWN'] +4992-41797-0022-2139: ref=["SHE'S", 'OLDER', 'THAN', 'I', 'AM', 'BUT', 'SO', 'TINY', 'AND', 'SAD', 'AND', 'SHY', 'THAT', 'SHE', 'SEEMS', 'LIKE', 'A', 'CHILD'] +4992-41797-0022-2139: hyp=["SHE'S", 'OLDER', 'THAN', 'I', 'AM', 'BUT', 'SO', 'TINY', 'AND', 'SAD', 'AND', 'SHY', 'THAT', 'SHE', 'SEEMS', 'LIKE', 'A', 'CHILD'] +4992-41806-0000-2161: ref=['NATTY', 'HARMON', 'TRIED', 'THE', 'KITCHEN', 'PUMP', 'SECRETLY', 'SEVERAL', 'TIMES', 'DURING', 'THE', 'EVENING', 'FOR', 'THE', 'WATER', 'HAD', 'TO', 'RUN', 'UP', 'HILL', 'ALL', 'THE', 'WAY', 'FROM', 'THE', 'WELL', 'TO', 'THE', 'KITCHEN', 'SINK', 'AND', 'HE', 'BELIEVED', 'THIS', 'TO', 'BE', 'A', 'CONTINUAL', 'MIRACLE', 'THAT', 'MIGHT', 'GIVE', 'OUT', 'AT', 'ANY', 'MOMENT'] +4992-41806-0000-2161: hyp=['NATTY', 'HARMON', 'TRIED', 'THE', 'KITCHEN', 'PUMP', 'SECRETLY', 'SEVERAL', 'TIMES', 'DURING', 'THE', 'EVENING', 'FOR', 'THE', 'WATER', 'HAD', 'TO', 'RUN', 'UP', 'HILL', 'ALL', 'THE', 'WAY', 'FROM', 'THE', 'WELL', 'TO', 'THE', 'KITCHEN', 'SINK', 'AND', 'HE', 'BELIEVED', 'THIS', 'TO', 'BE', 'A', 'CONTINUAL', 'MIRACLE', 'THAT', 'MIGHT', 'GIVE', 'OUT', 'AT', 'ANY', 'MOMENT'] +4992-41806-0001-2162: ref=['TO', 'NIGHT', 'THERE', 'WAS', 'NO', 'NEED', 'OF', 'EXTRA', 'HEAT', 'AND', 'THERE', 'WERE', 'GREAT', 'CEREMONIES', 'TO', 'BE', 'OBSERVED', 'IN', 'LIGHTING', 'THE', 'FIRES', 'ON', 'THE', 'HEARTHSTONES'] +4992-41806-0001-2162: hyp=['TO', 'NIGHT', 'THERE', 'WAS', 'NO', 'NEED', 'OF', 'EXTRA', 'HEAT', 'AND', 'THERE', 'WERE', 'GREAT', 'CEREMONIES', 'TO', 'BE', 'OBSERVED', 'IN', 'LIGHTING', 'THE', 'FIRES', 'ON', 'THE', 'HEARTHSTONES'] +4992-41806-0002-2163: ref=['THEY', 'BEGAN', 'WITH', 'THE', 'ONE', 'IN', 'THE', 'FAMILY', 'SITTING', 'ROOM', 'COLONEL', 'WHEELER', 'RALPH', 'THURSTON', 'MISTER', 'AND', 'MISSUS', 'BILL', 'HARMON', 'WITH', 'NATTY', 'AND', 'RUFUS', 'MISTER', 'AND', 'MISSUS', 'POPHAM', 'WITH', 'DIGBY', 'AND', 'LALLIE', 'JOY', 'ALL', 'STANDING', 'IN', 'ADMIRING', 'GROUPS', 'AND', 'THRILLING', 'WITH', 'DELIGHT', 'AT', 'THE', 'ORDER', 'OF', 'EVENTS'] +4992-41806-0002-2163: hyp=['THEY', 'BEGAN', 'WITH', 'THE', 'ONE', 'IN', 'THE', 'FAMILY', 'SITTING', 'ROOM', 'COLONEL', 'WHEELER', 'RALPH', 'THURSTON', 'MISTER', 'AND', 'MISSUS', 'BILL', 'HARMON', 'WITH', 'NANNIE', 'AND', 'RUFFUS', 'MISTER', 'AND', 'MISSUS', 'POPHAM', 'WITH', 'DIGBY', 'AND', 'LILY', 'JOY', 'ALL', 'STANDING', 'IN', 'ADMIRING', 'GROUPS', 'AND', 'THRILLING', 'WITH', 'DELIGHT', 'AT', 'THE', 'ORDER', 'OF', 'EVENTS'] +4992-41806-0003-2164: ref=['KATHLEEN', 'WAVED', 'THE', 'TORCH', 'TO', 'AND', 'FRO', 'AS', 'SHE', 'RECITED', 'SOME', 'BEAUTIFUL', 'LINES', 'WRITTEN', 'FOR', 'SOME', 'SUCH', 'PURPOSE', 'AS', 'THAT', 'WHICH', 'CALLED', 'THEM', 'TOGETHER', 'TO', 'NIGHT'] +4992-41806-0003-2164: hyp=['KATHLEEN', 'WAVED', 'THE', 'TORCH', 'TO', 'AND', 'FRO', 'AS', 'SHE', 'RECITED', 'SOME', 'BEAUTIFUL', 'LINES', 'WRITTEN', 'FOR', 'SOME', 'SUCH', 'PURPOSE', 'AS', 'THAT', 'WHICH', 'CALLED', 'THEM', 'TOGETHER', 'TO', 'NIGHT'] +4992-41806-0004-2165: ref=['BURN', 'FIRE', 'BURN', 'FLICKER', 'FLICKER', 'FLAME'] +4992-41806-0004-2165: hyp=['BURNE', 'FIRE', 'BURN', 'FLICKER', 'FLICKER', 'FLAME'] +4992-41806-0005-2166: ref=['NEXT', 'CAME', "OLIVE'S", 'TURN', 'TO', 'HELP', 'IN', 'THE', 'CEREMONIES'] +4992-41806-0005-2166: hyp=['NEXT', 'CAME', "OLIVE'S", 'TURN', 'TO', 'HELP', 'IN', 'THE', 'CEREMONIES'] +4992-41806-0006-2167: ref=['RALPH', 'THURSTON', 'HAD', 'FOUND', 'A', 'LINE', 'OF', 'LATIN', 'FOR', 'THEM', 'IN', 'HIS', 'BELOVED', 'HORACE', 'TIBI', 'SPLENDET', 'FOCUS', 'FOR', 'YOU', 'THE', 'HEARTH', 'FIRE', 'SHINES'] +4992-41806-0006-2167: hyp=['RALPH', 'THURSTON', 'HAD', 'FOUND', 'A', 'LINE', 'OF', 'LATIN', 'FOR', 'THEM', 'IN', 'HIS', 'BELOVED', 'HORNS', 'TIBBY', 'SPLENDID', 'FOCUS', 'FOR', 'YOU', 'THE', 'HEARTH', 'FIRE', 'SHINES'] +4992-41806-0007-2168: ref=['OLIVE', 'HAD', 'PAINTED', 'THE', 'MOTTO', 'ON', 'A', 'LONG', 'NARROW', 'PANEL', 'OF', 'CANVAS', 'AND', 'GIVING', 'IT', 'TO', 'MISTER', 'POPHAM', 'STOOD', 'BY', 'THE', 'FIRESIDE', 'WHILE', 'HE', 'DEFTLY', 'FITTED', 'IT', 'INTO', 'THE', 'PLACE', 'PREPARED', 'FOR', 'IT'] +4992-41806-0007-2168: hyp=['OLIVE', 'HAD', 'PAINTED', 'THE', 'MOTTO', 'ON', 'A', 'LONG', 'NARROW', 'PANEL', 'OF', 'CANVAS', 'AND', 'GIVING', 'IT', 'TO', 'MISTER', 'POPHAM', 'STOOD', 'BY', 'THE', 'FIRESIDE', 'WHILE', 'HE', 'DEFTLY', 'FITTED', 'IT', 'INTO', 'THE', 'PLACE', 'PREPARED', 'FOR', 'IT'] +4992-41806-0008-2169: ref=['OLIVE', 'HAS', 'ANOTHER', 'LOVELY', 'GIFT', 'FOR', 'THE', 'YELLOW', 'HOUSE', 'SAID', 'MOTHER', 'CAREY', 'RISING', 'AND', 'TO', 'CARRY', 'OUT', 'THE', 'NEXT', 'PART', 'OF', 'THE', 'PROGRAMME', 'WE', 'SHALL', 'HAVE', 'TO', 'GO', 'IN', 'PROCESSION', 'UPSTAIRS', 'TO', 'MY', 'BEDROOM'] +4992-41806-0008-2169: hyp=['ALAP', 'HAS', 'ANOTHER', 'LOVELY', 'GIFT', 'FOR', 'THE', 'YELLOW', 'HOUSE', 'SAID', 'MOTHER', 'CAREY', 'RISING', 'AND', 'TO', 'CARRY', 'OUT', 'THE', 'NEXT', 'PART', 'OF', 'THE', 'PROGRAMME', 'WE', 'SHALL', 'HAVE', 'TO', 'GO', 'IN', 'PROCESSION', 'UPSTAIRS', 'TO', 'MY', 'BEDROOM'] +4992-41806-0009-2170: ref=['EXCLAIMED', 'BILL', 'HARMON', 'TO', 'HIS', 'WIFE', 'AS', 'THEY', 'WENT', 'THROUGH', 'THE', 'LIGHTED', 'HALL'] +4992-41806-0009-2170: hyp=['EXCLAIMED', 'BILL', 'HARMON', 'TO', 'HIS', 'WIFE', 'AS', 'THEY', 'WENT', 'THROUGH', 'THE', 'LIGHTED', 'HALL'] +4992-41806-0010-2171: ref=["AIN'T", 'THEY', 'THE', 'GREATEST'] +4992-41806-0010-2171: hyp=["AIN'T", 'THEY', 'THE', 'GREATEST'] +4992-41806-0011-2172: ref=['MOTHER', 'CAREY', 'POURED', 'COFFEE', 'NANCY', 'CHOCOLATE', 'AND', 'THE', 'OTHERS', 'HELPED', 'SERVE', 'THE', 'SANDWICHES', 'AND', 'CAKE', 'DOUGHNUTS', 'AND', 'TARTS'] +4992-41806-0011-2172: hyp=['MOTHER', 'CAREY', 'POURED', 'COFFEE', 'NANCY', 'CHOCOLATE', 'AND', 'THE', 'OTHER', 'SELF', 'SERVED', 'THE', 'SANDWICHES', 'AND', 'CAKE', 'DOUGHNUTS', 'AND', 'TARTS'] +4992-41806-0012-2173: ref=['AT', 'THAT', 'MOMENT', 'THE', 'GENTLEMAN', 'ENTERED', 'BEARING', 'A', 'HUGE', 'OBJECT', 'CONCEALED', 'BY', 'A', 'PIECE', 'OF', 'GREEN', 'FELT'] +4992-41806-0012-2173: hyp=['AT', 'THAT', 'MOMENT', 'THE', 'GENTLEMAN', 'ENTERED', 'BEARING', 'A', 'HUGE', 'OBJECT', 'CONCEALED', 'BY', 'A', 'PIECE', 'OF', 'GREEN', 'FIL'] +4992-41806-0013-2174: ref=['APPROACHING', 'THE', 'DINING', 'TABLE', 'HE', 'CAREFULLY', 'PLACED', 'THE', 'ARTICLE', 'IN', 'THE', 'CENTRE', 'AND', 'REMOVED', 'THE', 'CLOTH'] +4992-41806-0013-2174: hyp=['APPROACHING', 'THE', 'DINING', 'TABLE', 'HE', 'CAREFULLY', 'PLACED', 'THE', 'ARTICLE', 'IN', 'THE', 'CENTRE', 'AND', 'REMOVED', 'THE', 'CLOTH'] +4992-41806-0014-2175: ref=['THINKS', 'I', 'TO', 'MYSELF', 'I', 'NEVER', 'SEEN', 'ANYTHING', 'OSH', 'POPHAM', "COULDN'T", 'MEND', 'IF', 'HE', 'TOOK', 'TIME', 'ENOUGH', 'AND', 'GLUE', 'ENOUGH', 'SO', 'I', 'CARRIED', 'THIS', 'LITTLE', 'FELLER', 'HOME', 'IN', 'A', 'BUSHEL', 'BASKET', 'ONE', 'NIGHT', 'LAST', 'MONTH', 'AN', "I'VE", 'SPENT', 'ELEVEN', "EVENIN'S", 'PUTTIN', 'HIM', 'TOGETHER'] +4992-41806-0014-2175: hyp=['THINK', 'SOUND', 'OF', 'MYSELF', 'I', 'NEVER', 'SEEN', 'ANYTHING', 'ID', 'IF', 'HE', 'TOOK', 'TIME', 'ENOUGH', 'AND', 'GLUE', 'ENOUGH', 'SO', 'I', 'CARRIED', 'THIS', 'LITTLE', 'FELLER', 'HOME', 'IN', 'A', 'BUSHEL', 'BASKET', 'ONE', 'NIGHT', 'LAST', 'MONTH', 'AND', "I'VE", 'SPENT', 'ELEVEN', 'EVENINGS', 'PUTTING', 'HIM', 'TOGETHER'] +4992-41806-0015-2176: ref=['MISSUS', 'HARMON', 'THOUGHT', 'HE', 'SANG', 'TOO', 'MUCH', 'AND', 'TOLD', 'HER', 'HUSBAND', 'PRIVATELY', 'THAT', 'IF', 'HE', 'WAS', 'A', 'CANARY', 'BIRD', 'SHE', 'SHOULD', 'WANT', 'TO', 'KEEP', 'A', 'TABLE', 'COVER', 'OVER', 'HIS', 'HEAD', 'MOST', 'OF', 'THE', 'TIME', 'BUT', 'HE', 'WAS', 'IMMENSELY', 'POPULAR', 'WITH', 'THE', 'REST', 'OF', 'HIS', 'AUDIENCE'] +4992-41806-0015-2176: hyp=['MISSUS', 'HARMON', 'THOUGHT', 'HE', 'SANG', 'TOO', 'MUCH', 'AND', 'TOLD', 'HER', 'HUSBA', 'PRIVATELY', 'THAT', 'IF', 'HE', 'WAS', 'A', 'CANARY', 'BIRD', 'SHE', 'SHOULD', 'WANT', 'TO', 'KEEP', 'A', 'TABLE', 'COVER', 'OF', 'HIS', 'EDMOST', 'OF', 'THE', 'TIME', 'BUT', 'HE', 'WAS', 'IMMENSELY', 'POPULAR', 'WITH', 'THE', 'REST', 'OF', 'HIS', 'AUDIENCE'] +4992-41806-0016-2177: ref=['THE', 'FACE', 'OF', 'THE', 'MAHOGANY', 'SHONE', 'WITH', 'DELIGHT', 'AND', 'WHY', 'NOT', 'WHEN', 'IT', 'WAS', 'DOING', 'EVERYTHING', 'ALMOST', 'EVERYTHING', 'WITHIN', 'THE', 'SCOPE', 'OF', 'A', 'PIANO', 'AND', 'YET', 'THE', 'FAMILY', 'HAD', 'ENJOYED', 'WEEKS', 'OF', 'GOOD', 'NOURISHING', 'MEALS', 'ON', 'WHAT', 'HAD', 'BEEN', 'SAVED', 'BY', 'ITS', 'EXERTIONS'] +4992-41806-0016-2177: hyp=['THE', 'FACE', 'OF', 'THE', 'MAHOGANY', 'SHONE', 'WITH', 'DELIGHT', 'AND', 'WHY', 'NOT', 'WHEN', 'IT', 'WAS', 'DOING', 'EVERYTHING', 'ALMOST', 'EVERYTHING', 'WITHIN', 'THE', 'SCOPE', 'OF', 'A', 'PIANO', 'AND', 'YET', 'THE', 'FAMILY', 'HAD', 'ENJOYED', 'WEEKS', 'OF', 'GOOD', 'NOURISHING', 'MEALS', 'ON', 'WHAT', 'HAD', 'BEEN', 'SAVED', 'BY', 'ITS', 'EXERTIONS'] +4992-41806-0017-2178: ref=['WE', 'SHUT', 'OUR', 'EYES', 'THE', 'FLOWERS', 'BLOOM', 'ON', 'WE', 'MURMUR', 'BUT', 'THE', 'CORN', 'EARS', 'FILL', 'WE', 'CHOOSE', 'THE', 'SHADOW', 'BUT', 'THE', 'SUN', 'THAT', 'CASTS', 'IT', 'SHINES', 'BEHIND', 'US', 'STILL'] +4992-41806-0017-2178: hyp=['WE', 'SHUT', 'OUR', 'EYES', 'THE', 'FLOWERS', 'BLOOM', 'ON', 'WE', 'MURMUR', 'BUT', 'THE', 'CORNIERS', 'FILL', 'WE', 'CHOOSE', 'THE', 'SHADOW', 'BUT', 'THE', 'SUN', 'THAT', 'CAST', 'IT', 'SHINES', 'BEHIND', 'US', 'STILL'] +5105-28233-0000-1649: ref=['LENGTH', 'OF', 'SERVICE', 'FOURTEEN', 'YEARS', 'THREE', 'MONTHS', 'AND', 'FIVE', 'DAYS'] +5105-28233-0000-1649: hyp=['LENGTH', 'OF', 'SERVICE', 'FOURTEEN', 'YEARS', 'THREE', 'MONTHS', 'AND', 'FIVE', 'DAYS'] +5105-28233-0001-1650: ref=['HE', 'SEEMED', 'BORN', 'TO', 'PLEASE', 'WITHOUT', 'BEING', 'CONSCIOUS', 'OF', 'THE', 'POWER', 'HE', 'POSSESSED'] +5105-28233-0001-1650: hyp=['HE', 'SEEMED', 'BORN', 'TO', 'PLEASE', 'WITHOUT', 'BEING', 'CONSCIOUS', 'OF', 'THE', 'POWER', 'HE', 'POSSESSED'] +5105-28233-0002-1651: ref=['IT', 'MUST', 'BE', 'OWNED', 'AND', 'NO', 'ONE', 'WAS', 'MORE', 'READY', 'TO', 'CONFESS', 'IT', 'THAN', 'HIMSELF', 'THAT', 'HIS', 'LITERARY', 'ATTAINMENTS', 'WERE', 'BY', 'NO', 'MEANS', 'OF', 'A', 'HIGH', 'ORDER'] +5105-28233-0002-1651: hyp=['IT', 'MUST', 'BE', 'OWNED', 'AND', 'NO', 'ONE', 'WAS', 'MORE', 'READY', 'TO', 'CONFESS', 'IT', 'THAN', 'HIMSELF', 'THAT', 'HIS', 'LITERARY', 'ATTAINMENTS', 'WERE', 'BY', 'NO', 'MEANS', 'OF', 'A', 'HIGH', 'ORDER'] +5105-28233-0003-1652: ref=['WE', "DON'T", 'SPIN', 'TOPS', 'IS', 'A', 'FAVORITE', 'SAYING', 'AMONGST', 'ARTILLERY', 'OFFICERS', 'INDICATING', 'THAT', 'THEY', 'DO', 'NOT', 'SHIRK', 'THEIR', 'DUTY', 'BY', 'FRIVOLOUS', 'PURSUITS', 'BUT', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'SERVADAC', 'BEING', 'NATURALLY', 'IDLE', 'WAS', 'VERY', 'MUCH', 'GIVEN', 'TO', 'SPINNING', 'TOPS'] +5105-28233-0003-1652: hyp=['WE', "DON'T", 'SPEND', 'TOPS', 'AS', 'A', 'FAVORITE', 'SAYING', 'AMONGST', 'ARTILLERY', 'OFFICERS', 'INDICATING', 'THAT', 'THEY', 'DO', 'NOT', 'SHIRK', 'THEIR', 'DUTY', 'BY', 'FRIVOLOUS', 'PURSUITS', 'BUT', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'SERVADAC', 'BEING', 'NATURALLY', 'IDLE', 'WAS', 'VERY', 'MUCH', 'GIVEN', 'TO', 'SPINNING', 'TOPS'] +5105-28233-0004-1653: ref=['ONCE', 'IN', 'ACTION', 'HE', 'WAS', 'LEADING', 'A', 'DETACHMENT', 'OF', 'INFANTRY', 'THROUGH', 'AN', 'INTRENCHMENT'] +5105-28233-0004-1653: hyp=['ONCE', 'AN', 'ACTION', 'HE', 'WAS', 'LEADING', 'A', 'DETACHMENT', 'OF', 'INFANTRY', 'THROUGH', 'AN', 'ENTRENCHMENT'] +5105-28233-0005-1654: ref=['SOMETIMES', 'HE', 'WOULD', 'WANDER', 'ON', 'FOOT', 'UPON', 'THE', 'SANDY', 'SHORE', 'AND', 'SOMETIMES', 'HE', 'WOULD', 'ENJOY', 'A', 'RIDE', 'ALONG', 'THE', 'SUMMIT', 'OF', 'THE', 'CLIFF', 'ALTOGETHER', 'BEING', 'IN', 'NO', 'HURRY', 'AT', 'ALL', 'TO', 'BRING', 'HIS', 'TASK', 'TO', 'AN', 'END'] +5105-28233-0005-1654: hyp=['SOMETIMES', 'HE', 'WOULD', 'WANDER', 'ON', 'FOOT', 'UPON', 'THE', 'SANDY', 'SHORE', 'AND', 'SOMETIMES', 'HE', 'WOULD', 'ENJOY', 'A', 'RIDE', 'ALONG', 'THE', 'SUMMIT', 'OF', 'THE', 'CLIFF', 'ALTOGETHER', 'BEING', 'IN', 'NO', 'HURRY', 'AT', 'ALL', 'TO', 'BRING', 'HIS', 'TASK', 'TO', 'AN', 'END'] +5105-28233-0006-1655: ref=['NO', 'CATHEDRAL', 'NOT', 'EVEN', 'BURGOS', 'ITSELF', 'COULD', 'VIE', 'WITH', 'THE', 'CHURCH', 'AT', 'MONTMARTRE'] +5105-28233-0006-1655: hyp=['NO', 'CATHEDRAL', 'NOT', 'EVEN', 'BURGOS', 'ITSELF', 'COULD', 'VIE', 'WITH', 'THE', 'CHURCH', 'AT', 'MOUNT', 'MARTRE'] +5105-28233-0007-1656: ref=['BEN', "ZOOF'S", 'MOST', 'AMBITIOUS', 'DESIRE', 'WAS', 'TO', 'INDUCE', 'THE', 'CAPTAIN', 'TO', 'GO', 'WITH', 'HIM', 'AND', 'END', 'HIS', 'DAYS', 'IN', 'HIS', 'MUCH', 'LOVED', 'HOME', 'AND', 'SO', 'INCESSANTLY', 'WERE', "SERVADAC'S", 'EARS', 'BESIEGED', 'WITH', 'DESCRIPTIONS', 'OF', 'THE', 'UNPARALLELED', 'BEAUTIES', 'AND', 'ADVANTAGES', 'OF', 'THIS', 'EIGHTEENTH', 'ARRONDISSEMENT', 'OF', 'PARIS', 'THAT', 'HE', 'COULD', 'SCARCELY', 'HEAR', 'THE', 'NAME', 'OF', 'MONTMARTRE', 'WITHOUT', 'A', 'CONSCIOUS', 'THRILL', 'OF', 'AVERSION'] +5105-28233-0007-1656: hyp=['BEN', "ZOOF'S", 'MOST', 'AMBITIOUS', 'DESIRE', 'WAS', 'TO', 'INDUCE', 'THE', 'CAPTAIN', 'TO', 'GO', 'WITH', 'HIM', 'AND', 'END', 'HIS', 'DAYS', 'IN', 'HIS', 'MUCH', 'LOVED', 'HOME', 'AND', 'SO', 'INCESSANTLY', 'WERE', "SERVADAC'S", 'EARS', 'BESIEGED', 'WITH', 'DESCRIPTIONS', 'OF', 'THE', 'UNPARALLELED', 'BEAUTIES', 'AND', 'ADVANTAGES', 'OF', 'THIS', 'EIGHTEENTH', 'ARE', 'ON', 'DESSIMA', 'OF', 'PARIS', 'THAT', 'HE', 'COULD', 'SCARCELY', 'HEAR', 'THE', 'NAME', 'OF', 'MONTMARTRA', 'WITHOUT', 'A', 'CONSCIOUS', 'THRILL', 'OF', 'AVERSION'] +5105-28233-0008-1657: ref=['WHEN', 'A', 'PRIVATE', 'IN', 'THE', 'EIGHTH', 'CAVALRY', 'HE', 'HAD', 'BEEN', 'ON', 'THE', 'POINT', 'OF', 'QUITTING', 'THE', 'ARMY', 'AT', 'TWENTY', 'EIGHT', 'YEARS', 'OF', 'AGE', 'BUT', 'UNEXPECTEDLY', 'HE', 'HAD', 'BEEN', 'APPOINTED', 'ORDERLY', 'TO', 'CAPTAIN', 'SERVADAC'] +5105-28233-0008-1657: hyp=['WHEN', 'A', 'PRIVATE', 'IN', 'THE', 'EIGHTH', 'CAVALRY', 'HE', 'HAD', 'BEEN', 'ON', 'THE', 'POINT', 'OF', 'QUITTING', 'THE', 'ARMY', 'AT', 'TWENTY', 'EIGHT', 'YEARS', 'OF', 'AGE', 'BUT', 'UNEXPECTEDLY', 'HE', 'HAD', 'BEEN', 'APPOINTED', 'ORDERLY', 'TO', 'CAPTAIN', 'SERVADAC'] +5105-28233-0009-1658: ref=['THE', 'BOND', 'OF', 'UNION', 'THUS', 'EFFECTED', 'COULD', 'NEVER', 'BE', 'SEVERED', 'AND', 'ALTHOUGH', 'BEN', "ZOOF'S", 'ACHIEVEMENTS', 'HAD', 'FAIRLY', 'EARNED', 'HIM', 'THE', 'RIGHT', 'OF', 'RETIREMENT', 'HE', 'FIRMLY', 'DECLINED', 'ALL', 'HONORS', 'OR', 'ANY', 'PENSION', 'THAT', 'MIGHT', 'PART', 'HIM', 'FROM', 'HIS', 'SUPERIOR', 'OFFICER'] +5105-28233-0009-1658: hyp=['THE', 'BOND', 'OF', 'UNION', 'THUS', 'EFFECTED', 'COULD', 'NEVER', 'BE', 'SEVERED', 'AND', 'ALTHOUGH', 'BEN', "ZEF'S", 'ACHIEVEMENTS', 'HAD', 'FAIRLY', 'EARNED', 'HIM', 'THE', 'RIGHT', 'OF', 'RETIREMENT', 'HE', 'FIRMLY', 'DECLINED', 'ALL', 'HONOURS', 'OR', 'ANY', 'PENSION', 'THAT', 'MIGHT', 'PART', 'HIM', 'FROM', 'HIS', 'SUPERIOR', 'OFFICER'] +5105-28233-0010-1659: ref=['UNLIKE', 'HIS', 'MASTER', 'HE', 'MADE', 'NO', 'PRETENSION', 'TO', 'ANY', 'GIFT', 'OF', 'POETIC', 'POWER', 'BUT', 'HIS', 'INEXHAUSTIBLE', 'MEMORY', 'MADE', 'HIM', 'A', 'LIVING', 'ENCYCLOPAEDIA', 'AND', 'FOR', 'HIS', 'STOCK', 'OF', 'ANECDOTES', 'AND', "TROOPER'S", 'TALES', 'HE', 'WAS', 'MATCHLESS'] +5105-28233-0010-1659: hyp=['I', 'MIKE', 'HIS', 'MASTER', 'HE', 'MADE', 'NO', 'PRETENSION', 'TO', 'ANY', 'GIFT', 'OF', 'POETIC', 'POWER', 'BUT', 'HIS', 'INEXHAUSTIBLE', 'MEMORY', 'MADE', 'HIM', 'A', 'LIVING', 'ENCYCLOPAEDIA', 'AND', 'FOR', 'HIS', 'STOCK', 'OF', 'ANECDOTES', 'AND', "TROOPER'S", 'TALES', 'HE', 'WAS', 'MATCHLESS'] +5105-28240-0000-1624: ref=['FAST', 'AS', 'HIS', 'LEGS', 'COULD', 'CARRY', 'HIM', 'SERVADAC', 'HAD', 'MADE', 'HIS', 'WAY', 'TO', 'THE', 'TOP', 'OF', 'THE', 'CLIFF'] +5105-28240-0000-1624: hyp=['FAST', 'AS', 'HIS', 'LEGS', 'COULD', 'CARRY', 'HIM', 'SERVADAC', 'HAD', 'MADE', 'HIS', 'WAY', 'TO', 'THE', 'TOP', 'OF', 'THE', 'CLIFF'] +5105-28240-0001-1625: ref=['IT', 'WAS', 'QUITE', 'TRUE', 'THAT', 'A', 'VESSEL', 'WAS', 'IN', 'SIGHT', 'HARDLY', 'MORE', 'THAN', 'SIX', 'MILES', 'FROM', 'THE', 'SHORE', 'BUT', 'OWING', 'TO', 'THE', 'INCREASE', 'IN', 'THE', "EARTH'S", 'CONVEXITY', 'AND', 'THE', 'CONSEQUENT', 'LIMITATION', 'OF', 'THE', 'RANGE', 'OF', 'VISION', 'THE', 'RIGGING', 'OF', 'THE', 'TOPMASTS', 'ALONE', 'WAS', 'VISIBLE', 'ABOVE', 'THE', 'WATER'] +5105-28240-0001-1625: hyp=['IT', 'WAS', 'QUITE', 'TRUE', 'THAT', 'A', 'VESSEL', 'WAS', 'IN', 'SIGHT', 'HARDLY', 'MORE', 'THAN', 'SIX', 'MILES', 'FROM', 'THE', 'SHORE', 'BUT', 'OWING', 'TO', 'THE', 'INCREASE', 'IN', 'THE', "EARTH'S", 'CONVEXITY', 'AND', 'THE', 'CONSEQUENT', 'LIMITATION', 'OF', 'THE', 'RANGE', 'OF', 'VISION', 'THE', 'RIGGING', 'OF', 'THE', 'TOPMASTS', 'ALONE', 'WAS', 'VISIBLE', 'ABOVE', 'THE', 'WATER'] +5105-28240-0002-1626: ref=['EXCLAIMED', 'SERVADAC', 'KEEPING', 'HIS', 'EYE', 'UNMOVED', 'AT', 'HIS', 'TELESCOPE'] +5105-28240-0002-1626: hyp=['EXCLAIMED', 'SERVADAC', 'KEEPING', 'HIS', 'EYE', 'UNMOVED', 'AT', 'HIS', 'TELESCOPE'] +5105-28240-0003-1627: ref=['SHE', 'IS', 'UNDER', 'SAIL', 'BUT', 'SHE', 'IS', 'COUNT', "TIMASCHEFF'S", 'YACHT', 'HE', 'WAS', 'RIGHT'] +5105-28240-0003-1627: hyp=['SHE', 'IS', 'UNDER', 'SALE', 'BUT', 'SHE', 'IS', 'COUNT', "TIMASCHEFF'S", 'YACHT', 'HE', 'WAS', 'RIGHT'] +5105-28240-0004-1628: ref=['IF', 'THE', 'COUNT', 'WERE', 'ON', 'BOARD', 'A', 'STRANGE', 'FATALITY', 'WAS', 'BRINGING', 'HIM', 'TO', 'THE', 'PRESENCE', 'OF', 'HIS', 'RIVAL'] +5105-28240-0004-1628: hyp=['IF', 'THE', 'COUNT', 'WERE', 'ON', 'BOARD', 'A', 'STRANGE', 'FATALITY', 'WAS', 'BRINGING', 'HIM', 'TO', 'THE', 'PRESENCE', 'OF', 'HIS', 'RIVAL'] +5105-28240-0005-1629: ref=['HE', 'RECKONED', 'THEREFORE', 'NOT', 'ONLY', 'UPON', 'ASCERTAINING', 'THE', 'EXTENT', 'OF', 'THE', 'LATE', 'CATASTROPHE', 'BUT', 'UPON', 'LEARNING', 'ITS', 'CAUSE'] +5105-28240-0005-1629: hyp=['HE', 'RECKONED', 'THEREFORE', 'NOT', 'ONLY', 'UPON', 'ASCERTAINING', 'THE', 'EXTENT', 'OF', 'THE', 'LATE', 'CATASTROPHE', 'BUT', 'UPON', 'LEARNING', 'ITS', 'CAUSE'] +5105-28240-0006-1630: ref=['THE', 'WIND', 'BEING', 'ADVERSE', 'THE', 'DOBRYNA', 'DID', 'NOT', 'MAKE', 'VERY', 'RAPID', 'PROGRESS', 'BUT', 'AS', 'THE', 'WEATHER', 'IN', 'SPITE', 'OF', 'A', 'FEW', 'CLOUDS', 'REMAINED', 'CALM', 'AND', 'THE', 'SEA', 'WAS', 'QUITE', 'SMOOTH', 'SHE', 'WAS', 'ENABLED', 'TO', 'HOLD', 'A', 'STEADY', 'COURSE'] +5105-28240-0006-1630: hyp=['THE', 'WIND', 'BEING', 'ADVERSE', 'THE', 'DOBRYNA', 'DID', 'NOT', 'MAKE', 'VERY', 'RAPID', 'PROGRESS', 'BUT', 'AS', 'THE', 'WEATHER', 'IN', 'SPITE', 'OF', 'A', 'FEW', 'CLOUDS', 'REMAINED', 'CALM', 'AND', 'THE', 'SEA', 'WAS', 'QUITE', 'SMOOTH', 'SHE', 'WAS', 'ENABLED', 'TO', 'HOLD', 'A', 'STEADY', 'COURSE'] +5105-28240-0007-1631: ref=['SERVADAC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'THE', 'DOBRYNA', 'WAS', 'ENDEAVORING', 'TO', 'PUT', 'IN'] +5105-28240-0007-1631: hyp=['SERVADAC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'THE', 'DOBRYNA', 'WAS', 'ENDEAVORING', 'TO', 'PUT', 'IN'] +5105-28240-0008-1632: ref=['A', 'NARROW', 'CHANNEL', 'FORMED', 'A', 'PASSAGE', 'THROUGH', 'THE', 'RIDGE', 'OF', 'ROCKS', 'THAT', 'PROTECTED', 'IT', 'FROM', 'THE', 'OPEN', 'SEA', 'AND', 'WHICH', 'EVEN', 'IN', 'THE', 'ROUGHEST', 'WEATHER', 'WOULD', 'ENSURE', 'THE', 'CALMNESS', 'OF', 'ITS', 'WATERS'] +5105-28240-0008-1632: hyp=['A', 'NARROW', 'CHANNEL', 'FORMED', 'A', 'PASSAGE', 'THROUGH', 'THE', 'RIDGE', 'OF', 'ROCKS', 'THAT', 'PROTECTED', 'IT', 'FROM', 'THE', 'OPEN', 'SEA', 'AND', 'WHICH', 'EVEN', 'IN', 'THE', 'ROUGHEST', 'WEATHER', 'WOULD', 'INSURE', 'THE', 'CALMNESS', 'OF', 'ITS', 'WATERS'] +5105-28240-0009-1633: ref=['SLIGHTLY', 'CHANGING', 'HER', 'COURSE', 'SHE', 'FIRST', 'STRUCK', 'HER', 'MAINSAIL', 'AND', 'IN', 'ORDER', 'TO', 'FACILITATE', 'THE', 'MOVEMENTS', 'OF', 'HER', 'HELMSMAN', 'SOON', 'CARRIED', 'NOTHING', 'BUT', 'HER', 'TWO', 'TOPSAILS', 'BRIGANTINE', 'AND', 'JIB'] +5105-28240-0009-1633: hyp=['SLIGHTLY', 'CHANGING', 'HER', 'COURSE', 'SHE', 'FIRST', 'STRUCK', 'HER', 'MAINSAIL', 'AND', 'IN', 'ORDER', 'TO', 'FACILITATE', 'THE', 'MOVEMENTS', 'OF', 'HER', 'HELMSMAN', 'SOON', 'CARRIED', 'NOTHING', 'BUT', 'HER', 'TWO', 'TOPSAILS', 'BRIGANTINE', 'AND', 'JIB'] +5105-28240-0010-1634: ref=['CAPTAIN', 'SERVADAC', 'HASTENED', 'TOWARDS', 'HIM'] +5105-28240-0010-1634: hyp=['CAPTAIN', 'SERVADAC', 'HASTENED', 'TOWARD', 'HIM'] +5105-28240-0011-1635: ref=['I', 'LEFT', 'YOU', 'ON', 'A', 'CONTINENT', 'AND', 'HERE', 'I', 'HAVE', 'THE', 'HONOR', 'OF', 'FINDING', 'YOU', 'ON', 'AN', 'ISLAND'] +5105-28240-0011-1635: hyp=['I', 'LEFT', 'YOU', 'ON', 'A', 'CONTINENT', 'AND', 'HERE', 'I', 'HAVE', 'THE', 'HONOR', 'OF', 'FINDING', 'YOU', 'ON', 'AN', 'ISLAND'] +5105-28240-0012-1636: ref=['NEVER', 'MIND', 'NOW', 'INTERPOSED', 'THE', 'CAPTAIN', 'WE', 'WILL', 'TALK', 'OF', 'THAT', 'BY', 'AND', 'BY'] +5105-28240-0012-1636: hyp=['NEVER', 'MIND', 'NOW', 'INTERPOSED', 'THE', 'CAPTAIN', 'WE', 'WILL', 'TALK', 'OF', 'THAT', 'BY', 'AND', 'BY'] +5105-28240-0013-1637: ref=['NOTHING', 'MORE', 'THAN', 'YOU', 'KNOW', 'YOURSELF'] +5105-28240-0013-1637: hyp=['NOTHING', 'MORE', 'THAN', 'YOU', 'KNOW', 'YOURSELF'] +5105-28240-0014-1638: ref=['ARE', 'YOU', 'CERTAIN', 'THAT', 'THIS', 'IS', 'THE', 'MEDITERRANEAN'] +5105-28240-0014-1638: hyp=['ARE', 'YOU', 'CERTAIN', 'THAT', 'THIS', 'IS', 'THE', 'MEDITERRANEAN'] +5105-28240-0015-1639: ref=['FOR', 'SOME', 'MOMENTS', 'HE', 'SEEMED', 'PERFECTLY', 'STUPEFIED', 'THEN', 'RECOVERING', 'HIMSELF', 'HE', 'BEGAN', 'TO', 'OVERWHELM', 'THE', 'COUNT', 'WITH', 'A', 'TORRENT', 'OF', 'QUESTIONS'] +5105-28240-0015-1639: hyp=['FOR', 'SOME', 'MOMENTS', 'HE', 'SEEMED', 'PERFECTLY', 'STUPEFIED', 'AND', 'THEN', 'RECOVERING', 'HIMSELF', 'HE', 'BEGAN', 'TO', 'OVERWHELM', 'THE', 'COUNT', 'WITH', 'A', 'TORRENT', 'OF', 'QUESTIONS'] +5105-28240-0016-1640: ref=['TO', 'ALL', 'THESE', 'INQUIRIES', 'THE', 'COUNT', 'RESPONDED', 'IN', 'THE', 'AFFIRMATIVE'] +5105-28240-0016-1640: hyp=['TO', 'ALL', 'THESE', 'INQUIRIES', 'THE', 'COUNT', 'RESPONDED', 'IN', 'THE', 'AFFIRMATIVE'] +5105-28240-0017-1641: ref=['SOME', 'MYSTERIOUS', 'FORCE', 'SEEMED', 'TO', 'HAVE', 'BROUGHT', 'ABOUT', 'A', 'CONVULSION', 'OF', 'THE', 'ELEMENTS'] +5105-28240-0017-1641: hyp=['SOME', 'MYSTERIOUS', 'FORCE', 'SEEMED', 'TO', 'HAVE', 'BROUGHT', 'ABOUT', 'A', 'CONVULSION', 'OF', 'THE', 'ELEMENTS'] +5105-28240-0018-1642: ref=['YOU', 'WILL', 'TAKE', 'ME', 'ON', 'BOARD', 'COUNT', 'WILL', 'YOU', 'NOT'] +5105-28240-0018-1642: hyp=['YOU', 'WILL', 'TAKE', 'ME', 'ON', 'BOARD', 'COUNT', 'WILL', 'YOU', 'NOT'] +5105-28240-0019-1643: ref=['MY', 'YACHT', 'IS', 'AT', 'YOUR', 'SERVICE', 'SIR', 'EVEN', 'SHOULD', 'YOU', 'REQUIRE', 'TO', 'MAKE', 'A', 'TOUR', 'ROUND', 'THE', 'WORLD'] +5105-28240-0019-1643: hyp=['MY', 'YACHT', 'IS', 'AT', 'YOUR', 'SERVICE', 'SIR', 'EVEN', 'SHOULD', 'YOU', 'REQUIRE', 'TO', 'MAKE', 'A', 'TOUR', 'AROUND', 'THE', 'WORLD'] +5105-28240-0020-1644: ref=['THE', 'COUNT', 'SHOOK', 'HIS', 'HEAD'] +5105-28240-0020-1644: hyp=['THE', 'COUNT', 'SHOOK', 'HIS', 'HEAD'] +5105-28240-0021-1645: ref=['BEFORE', 'STARTING', 'IT', 'WAS', 'INDISPENSABLE', 'THAT', 'THE', 'ENGINE', 'OF', 'THE', 'DOBRYNA', 'SHOULD', 'BE', 'REPAIRED', 'TO', 'SAIL', 'UNDER', 'CANVAS', 'ONLY', 'WOULD', 'IN', 'CONTRARY', 'WINDS', 'AND', 'ROUGH', 'SEAS', 'BE', 'BOTH', 'TEDIOUS', 'AND', 'DIFFICULT'] +5105-28240-0021-1645: hyp=['BEFORE', 'STARTING', 'IT', 'WAS', 'INDISPENSABLE', 'THAT', 'THE', 'ENGINE', 'OF', 'THE', 'DOBRYNA', 'SHOULD', 'BE', 'REPAIRED', 'TO', 'SAIL', 'UNDER', 'CANVAS', 'ONLY', 'WOULD', 'IN', 'CONTRARY', 'WINDS', 'AND', 'ROUGH', 'SEAS', 'BE', 'BOTH', 'TEDIOUS', 'AND', 'DIFFICULT'] +5105-28240-0022-1646: ref=['IT', 'WAS', 'ON', 'THE', 'LAST', 'DAY', 'OF', 'JANUARY', 'THAT', 'THE', 'REPAIRS', 'OF', 'THE', 'SCHOONER', 'WERE', 'COMPLETED'] +5105-28240-0022-1646: hyp=['IT', 'WAS', 'ON', 'THE', 'LAST', 'DAY', 'OF', 'JANUARY', 'THAT', 'THE', 'REPAIRS', 'OF', 'THE', 'SCHOONER', 'WERE', 'COMPLETED'] +5105-28240-0023-1647: ref=['A', 'SLIGHT', 'DIMINUTION', 'IN', 'THE', 'EXCESSIVELY', 'HIGH', 'TEMPERATURE', 'WHICH', 'HAD', 'PREVAILED', 'FOR', 'THE', 'LAST', 'FEW', 'WEEKS', 'WAS', 'THE', 'ONLY', 'APPARENT', 'CHANGE', 'IN', 'THE', 'GENERAL', 'ORDER', 'OF', 'THINGS', 'BUT', 'WHETHER', 'THIS', 'WAS', 'TO', 'BE', 'ATTRIBUTED', 'TO', 'ANY', 'ALTERATION', 'IN', 'THE', "EARTH'S", 'ORBIT', 'WAS', 'A', 'QUESTION', 'WHICH', 'WOULD', 'STILL', 'REQUIRE', 'SEVERAL', 'DAYS', 'TO', 'DECIDE'] +5105-28240-0023-1647: hyp=['A', 'SLIGHT', 'DIMINUTION', 'IN', 'THE', 'EXCESSIVELY', 'HIGH', 'TEMPERATURE', 'WHICH', 'HAD', 'PREVAILED', 'FOR', 'THE', 'LAST', 'FEW', 'WEEKS', 'WAS', 'THE', 'ONLY', 'APPARENT', 'CHANGE', 'IN', 'THE', 'GENERAL', 'ORDER', 'OF', 'THINGS', 'BUT', 'WHETHER', 'THIS', 'WAS', 'TO', 'BE', 'ATTRIBUTED', 'TO', 'ANY', 'ALTERATION', 'IN', 'THE', "EARTH'S", 'ORBIT', 'WAS', 'A', 'QUESTION', 'WHICH', 'WOULD', 'STILL', 'REQUIRE', 'SEVERAL', 'DAYS', 'TO', 'DECIDE'] +5105-28240-0024-1648: ref=['DOUBTS', 'NOW', 'AROSE', 'AND', 'SOME', 'DISCUSSION', 'FOLLOWED', 'WHETHER', 'OR', 'NOT', 'IT', 'WAS', 'DESIRABLE', 'FOR', 'BEN', 'ZOOF', 'TO', 'ACCOMPANY', 'HIS', 'MASTER'] +5105-28240-0024-1648: hyp=['DOUBTS', 'NOW', 'AROSE', 'AND', 'SOME', 'DISCUSSION', 'FOLLOWED', 'WHETHER', 'OR', 'NOT', 'IT', 'WAS', 'DESIRABLE', 'FOR', 'BEN', 'ZOOF', 'TO', 'ACCOMPANY', 'HIS', 'MASTER'] +5105-28241-0000-1604: ref=['HER', 'SEA', 'GOING', 'QUALITIES', 'WERE', 'EXCELLENT', 'AND', 'WOULD', 'HAVE', 'AMPLY', 'SUFFICED', 'FOR', 'A', 'CIRCUMNAVIGATION', 'OF', 'THE', 'GLOBE'] +5105-28241-0000-1604: hyp=['HER', 'SEA', 'GOING', 'QUALITIES', 'WERE', 'EXCELLENT', 'AND', 'WOULD', 'HAVE', 'AMPLY', 'SUFFICED', 'FOR', 'A', 'CIRCUMNAVIGATION', 'OF', 'THE', 'GLOBE'] +5105-28241-0001-1605: ref=['AFTER', 'AN', 'APPRENTICESHIP', 'ON', 'A', 'MERCHANT', 'SHIP', 'HE', 'HAD', 'ENTERED', 'THE', 'IMPERIAL', 'NAVY', 'AND', 'HAD', 'ALREADY', 'REACHED', 'THE', 'RANK', 'OF', 'LIEUTENANT', 'WHEN', 'THE', 'COUNT', 'APPOINTED', 'HIM', 'TO', 'THE', 'CHARGE', 'OF', 'HIS', 'OWN', 'PRIVATE', 'YACHT', 'IN', 'WHICH', 'HE', 'WAS', 'ACCUSTOMED', 'TO', 'SPEND', 'BY', 'FAR', 'THE', 'GREATER', 'PART', 'OF', 'HIS', 'TIME', 'THROUGHOUT', 'THE', 'WINTER', 'GENERALLY', 'CRUISING', 'IN', 'THE', 'MEDITERRANEAN', 'WHILST', 'IN', 'THE', 'SUMMER', 'HE', 'VISITED', 'MORE', 'NORTHERN', 'WATERS'] +5105-28241-0001-1605: hyp=['AFTER', 'AN', 'APPRENTICESHIP', 'ON', 'A', 'MERCHANT', 'SHIP', 'HE', 'HAD', 'ENTERED', 'THE', 'IMPERIAL', 'NAVY', 'AND', 'HAD', 'ALREADY', 'REACHED', 'THE', 'RANK', 'OF', 'LIEUTENANT', 'WHEN', 'THE', 'COUNT', 'APPOINTED', 'HIM', 'TO', 'THE', 'CHARGE', 'OF', 'HIS', 'OWN', 'PRIVATE', 'YACHT', 'IN', 'WHICH', 'HE', 'WAS', 'ACCUSTOMED', 'TO', 'SPEND', 'BY', 'FARTHER', 'GREATER', 'PART', 'OF', 'HIS', 'TIME', 'THROUGHOUT', 'THE', 'WINTER', 'GENERALLY', 'CRUISING', 'IN', 'THE', 'MEDITERRANEAN', 'WHILST', 'IN', 'THE', 'SUMMER', 'HE', 'VISITED', 'MORE', 'NORTHERN', 'WATERS'] +5105-28241-0002-1606: ref=['THE', 'LATE', 'ASTOUNDING', 'EVENTS', 'HOWEVER', 'HAD', 'RENDERED', 'PROCOPE', 'MANIFESTLY', 'UNEASY', 'AND', 'NOT', 'THE', 'LESS', 'SO', 'FROM', 'HIS', 'CONSCIOUSNESS', 'THAT', 'THE', 'COUNT', 'SECRETLY', 'PARTOOK', 'OF', 'HIS', 'OWN', 'ANXIETY'] +5105-28241-0002-1606: hyp=['THE', 'LATE', 'ASTOUNDING', 'EVENTS', 'HOWEVER', 'HAD', 'RENDERED', 'PROCOPE', 'MANIFESTLY', 'UNEASY', 'AND', 'NOT', 'THE', 'LESS', 'SO', 'FROM', 'HIS', 'CONSCIOUSNESS', 'THAT', 'THE', 'COUNT', 'SECRETLY', 'PARTOOK', 'OF', 'HIS', 'OWN', 'ANXIETY'] +5105-28241-0003-1607: ref=['STEAM', 'UP', 'AND', 'CANVAS', 'SPREAD', 'THE', 'SCHOONER', 'STARTED', 'EASTWARDS'] +5105-28241-0003-1607: hyp=['STEAM', 'UP', 'AND', 'CANVAS', 'SPREAD', 'THE', 'SCHOONER', 'STARTED', 'EASTWARDS'] +5105-28241-0004-1608: ref=['ALTHOUGH', 'ONLY', 'A', 'MODERATE', 'BREEZE', 'WAS', 'BLOWING', 'THE', 'SEA', 'WAS', 'ROUGH', 'A', 'CIRCUMSTANCE', 'TO', 'BE', 'ACCOUNTED', 'FOR', 'ONLY', 'BY', 'THE', 'DIMINUTION', 'IN', 'THE', 'FORCE', 'OF', 'THE', "EARTH'S", 'ATTRACTION', 'RENDERING', 'THE', 'LIQUID', 'PARTICLES', 'SO', 'BUOYANT', 'THAT', 'BY', 'THE', 'MERE', 'EFFECT', 'OF', 'OSCILLATION', 'THEY', 'WERE', 'CARRIED', 'TO', 'A', 'HEIGHT', 'THAT', 'WAS', 'QUITE', 'UNPRECEDENTED'] +5105-28241-0004-1608: hyp=['ALTHOUGH', 'ONLY', 'A', 'MODERATE', 'BREEZE', 'WAS', 'BLOWING', 'THE', 'SEA', 'WAS', 'ROUGH', 'A', 'CIRCUMSTANCE', 'TO', 'BE', 'ACCOUNTED', 'FOR', 'ONLY', 'BY', 'THE', 'DIMINUTION', 'IN', 'THE', 'FORCE', 'OF', 'THE', "EARTH'S", 'ATTRACTION', 'RENDERING', 'THE', 'LIQUID', 'PARTICLE', 'SO', 'BUOYANT', 'THAT', 'BY', 'THE', 'MERE', 'EFFECT', 'OF', 'OSCILLATION', 'THEY', 'WERE', 'CARRIED', 'TO', 'A', 'HEIGHT', 'THAT', 'WAS', 'QUITE', 'UNPRECEDENTED'] +5105-28241-0005-1609: ref=['FOR', 'A', 'FEW', 'MILES', 'SHE', 'FOLLOWED', 'THE', 'LINE', 'HITHERTO', 'PRESUMABLY', 'OCCUPIED', 'BY', 'THE', 'COAST', 'OF', 'ALGERIA', 'BUT', 'NO', 'LAND', 'APPEARED', 'TO', 'THE', 'SOUTH'] +5105-28241-0005-1609: hyp=['FOR', 'A', 'FEW', 'MILES', 'SHE', 'FOLLOWED', 'THE', 'LINE', 'HITHERTO', 'PRESUMABLY', 'OCCUPIED', 'BY', 'THE', 'COAST', 'OF', 'ALGERIA', 'BUT', 'NO', 'LAND', 'APPEARED', 'TO', 'THE', 'SOUTH'] +5105-28241-0006-1610: ref=['THE', 'LOG', 'AND', 'THE', 'COMPASS', 'THEREFORE', 'WERE', 'ABLE', 'TO', 'BE', 'CALLED', 'UPON', 'TO', 'DO', 'THE', 'WORK', 'OF', 'THE', 'SEXTANT', 'WHICH', 'HAD', 'BECOME', 'UTTERLY', 'USELESS'] +5105-28241-0006-1610: hyp=['THE', 'LOG', 'AND', 'THE', 'COMPASS', 'THEREFORE', 'WERE', 'ABLE', 'TO', 'BE', 'CALLED', 'UPON', 'TO', 'DO', 'THE', 'WORK', 'OF', 'THE', 'SEXTANT', 'WHICH', 'HAD', 'BECOME', 'UTTERLY', 'USELESS'] +5105-28241-0007-1611: ref=['THERE', 'IS', 'NO', 'FEAR', 'OF', 'THAT', 'SIR'] +5105-28241-0007-1611: hyp=["THERE'S", 'NO', 'FEAR', 'OF', 'THAT', 'SIR'] +5105-28241-0008-1612: ref=['THE', 'EARTH', 'HAS', 'UNDOUBTEDLY', 'ENTERED', 'UPON', 'A', 'NEW', 'ORBIT', 'BUT', 'SHE', 'IS', 'NOT', 'INCURRING', 'ANY', 'PROBABLE', 'RISK', 'OF', 'BEING', 'PRECIPITATED', 'ONTO', 'THE', 'SUN'] +5105-28241-0008-1612: hyp=['THAT', 'THE', 'EARTH', 'HAS', 'UNDOUBTEDLY', 'ENTERED', 'UPON', 'A', 'NEW', 'ORBIT', 'BUT', 'SHE', 'IS', 'NOT', 'INCURRING', 'ANY', 'PROBABLE', 'RISK', 'OF', 'BEING', 'PRECIPITATED', 'ON', 'TO', 'THE', 'SUN'] +5105-28241-0009-1613: ref=['AND', 'WHAT', 'DEMONSTRATION', 'DO', 'YOU', 'OFFER', 'ASKED', 'SERVADAC', 'EAGERLY', 'THAT', 'IT', 'WILL', 'NOT', 'HAPPEN'] +5105-28241-0009-1613: hyp=['AND', 'WHAT', 'DEMONSTRATION', 'DO', 'YOU', 'OFFER', 'ASKED', 'SERVADAC', 'EAGERLY', 'THAT', 'IT', 'WILL', 'NOT', 'HAPPEN'] +5105-28241-0010-1614: ref=['OCEAN', 'REIGNED', 'SUPREME'] +5105-28241-0010-1614: hyp=['OCEAN', 'RAINED', 'SUPREME'] +5105-28241-0011-1615: ref=['ALL', 'THE', 'IMAGES', 'OF', 'HIS', 'PAST', 'LIFE', 'FLOATED', 'UPON', 'HIS', 'MEMORY', 'HIS', 'THOUGHTS', 'SPED', 'AWAY', 'TO', 'HIS', 'NATIVE', 'FRANCE', 'ONLY', 'TO', 'RETURN', 'AGAIN', 'TO', 'WONDER', 'WHETHER', 'THE', 'DEPTHS', 'OF', 'OCEAN', 'WOULD', 'REVEAL', 'ANY', 'TRACES', 'OF', 'THE', 'ALGERIAN', 'METROPOLIS'] +5105-28241-0011-1615: hyp=['ALL', 'THE', 'IMAGES', 'OF', 'HIS', 'PAST', 'LIFE', 'FLOATED', 'UPON', 'HIS', 'MEMORY', 'HIS', 'THOUGHTS', 'SPED', 'AWAY', 'TO', 'HIS', 'NATIVE', 'FRANCE', 'ONLY', 'TO', 'RETURN', 'AGAIN', 'TO', 'WONDER', 'WHETHER', 'THE', 'DEPTHS', 'OF', 'OCEAN', 'WOULD', 'REVEAL', 'ANY', 'TRACES', 'OF', 'THE', 'ALGERIAN', 'METROPOLIS'] +5105-28241-0012-1616: ref=['IS', 'IT', 'NOT', 'IMPOSSIBLE', 'HE', 'MURMURED', 'ALOUD', 'THAT', 'ANY', 'CITY', 'SHOULD', 'DISAPPEAR', 'SO', 'COMPLETELY'] +5105-28241-0012-1616: hyp=['IS', 'IT', 'NOT', 'IMPOSSIBLE', 'HE', 'MURMURED', 'ALOUD', 'THAT', 'ANY', 'CITY', 'SHOULD', 'DISAPPEAR', 'SO', 'COMPLETELY'] +5105-28241-0013-1617: ref=['WOULD', 'NOT', 'THE', 'LOFTIEST', 'EMINENCES', 'OF', 'THE', 'CITY', 'AT', 'LEAST', 'BE', 'VISIBLE'] +5105-28241-0013-1617: hyp=['WOULD', 'NOT', 'THE', 'LOFTIEST', 'EMINENCES', 'OF', 'THE', 'CITY', 'AT', 'LEAST', 'BE', 'VISIBLE'] +5105-28241-0014-1618: ref=['ANOTHER', 'CIRCUMSTANCE', 'WAS', 'MOST', 'REMARKABLE'] +5105-28241-0014-1618: hyp=['ANOTHER', 'CIRCUMSTANCE', 'WAS', 'MOST', 'REMARKABLE'] +5105-28241-0015-1619: ref=['TO', 'THE', 'SURPRISE', 'OF', 'ALL', 'AND', 'ESPECIALLY', 'OF', 'LIEUTENANT', 'PROCOPE', 'THE', 'LINE', 'INDICATED', 'A', 'BOTTOM', 'AT', 'A', 'NEARLY', 'UNIFORM', 'DEPTH', 'OF', 'FROM', 'FOUR', 'TO', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'THE', 'SOUNDING', 'WAS', 'PERSEVERED', 'WITH', 'CONTINUOUSLY', 'FOR', 'MORE', 'THAN', 'TWO', 'HOURS', 'OVER', 'A', 'CONSIDERABLE', 'AREA', 'THE', 'DIFFERENCES', 'OF', 'LEVEL', 'WERE', 'INSIGNIFICANT', 'NOT', 'CORRESPONDING', 'IN', 'ANY', 'DEGREE', 'TO', 'WHAT', 'WOULD', 'BE', 'EXPECTED', 'OVER', 'THE', 'SITE', 'OF', 'A', 'CITY', 'THAT', 'HAD', 'BEEN', 'TERRACED', 'LIKE', 'THE', 'SEATS', 'OF', 'AN', 'AMPHITHEATER'] +5105-28241-0015-1619: hyp=['TO', 'THE', 'SURPRISE', 'OF', 'ALL', 'AND', 'ESPECIALLY', 'OF', 'LIEUTENANT', 'PROCOPE', 'THE', 'LINE', 'INDICATED', 'A', 'BOTTOM', 'AT', 'A', 'NEARLY', 'UNIFORM', 'DEPTH', 'OF', 'FROM', 'FOUR', 'TO', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'THE', 'SOUNDING', 'WAS', 'PERSEVERED', 'WITH', 'CONTINUOUSLY', 'FOR', 'MORE', 'THAN', 'TWO', 'HOURS', 'OVER', 'A', 'CONSIDERABLE', 'AREA', 'THE', 'DIFFERENCES', 'OF', 'LEVEL', 'WERE', 'INSIGNIFICANT', 'NOT', 'CORRESPONDING', 'IN', 'ANY', 'DEGREE', 'TO', 'WHAT', 'WOULD', 'BE', 'EXPECTED', 'OVER', 'THE', 'SITE', 'OF', 'A', 'CITY', 'THAT', 'HAD', 'BEEN', 'TERRACED', 'LIKE', 'THE', 'SEATS', 'OF', 'AN', 'AMPHITHEATRE'] +5105-28241-0016-1620: ref=['YOU', 'MUST', 'SEE', 'LIEUTENANT', 'I', 'SHOULD', 'THINK', 'THAT', 'WE', 'ARE', 'NOT', 'SO', 'NEAR', 'THE', 'COAST', 'OF', 'ALGERIA', 'AS', 'YOU', 'IMAGINED'] +5105-28241-0016-1620: hyp=['YOU', 'MUST', 'SEE', 'LIEUTENANT', 'I', 'SHOULD', 'THINK', 'THAT', 'WE', 'ARE', 'NOT', 'SO', 'NEAR', 'THE', 'COAST', 'OF', 'ALGERIA', 'AS', 'YOU', 'IMAGINED'] +5105-28241-0017-1621: ref=['AFTER', 'PONDERING', 'AWHILE', 'HE', 'SAID', 'IF', 'WE', 'WERE', 'FARTHER', 'AWAY', 'I', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'A', 'DEPTH', 'OF', 'TWO', 'OR', 'THREE', 'HUNDRED', 'FATHOMS', 'INSTEAD', 'OF', 'FIVE', 'FATHOMS', 'FIVE', 'FATHOMS'] +5105-28241-0017-1621: hyp=['AFTER', 'PONDERING', 'A', 'WHILE', 'HE', 'SAID', 'IF', 'WE', 'WERE', 'FARTHER', 'AWAY', 'I', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'A', 'DEPTH', 'OF', 'TWO', 'OR', 'THREE', 'HUNDRED', 'FATHOMS', 'INSTEAD', 'OF', 'FIVE', 'FATHOMS', 'FIVE', 'FATHOMS'] +5105-28241-0018-1622: ref=['ITS', 'DEPTH', 'REMAINED', 'INVARIABLE', 'STILL', 'FOUR', 'OR', 'AT', 'MOST', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'ITS', 'BOTTOM', 'WAS', 'ASSIDUOUSLY', 'DREDGED', 'IT', 'WAS', 'ONLY', 'TO', 'PROVE', 'IT', 'BARREN', 'OF', 'MARINE', 'PRODUCTION', 'OF', 'ANY', 'TYPE'] +5105-28241-0018-1622: hyp=['ITS', 'DEPTH', 'REMAINED', 'INVARIABLE', 'STILL', 'FOUR', 'OR', 'AT', 'MOST', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'ITS', 'BOTTOM', 'WAS', 'ASSIDUOUSLY', 'DREDGED', 'IT', 'WAS', 'ONLY', 'TO', 'PROVE', 'IT', 'BARREN', 'OF', 'MARINE', 'PRODUCTION', 'OF', 'ANY', 'TYPE'] +5105-28241-0019-1623: ref=['NOTHING', 'WAS', 'TO', 'BE', 'DONE', 'BUT', 'TO', 'PUT', 'ABOUT', 'AND', 'RETURN', 'IN', 'DISAPPOINTMENT', 'TOWARDS', 'THE', 'NORTH'] +5105-28241-0019-1623: hyp=['NOTHING', 'WAS', 'TO', 'BE', 'DONE', 'BUT', 'TO', 'PUT', 'ABOUT', 'AND', 'RETURN', 'AND', 'DISAPPOINTMENT', 'TOWARD', 'THE', 'NORTH'] +5142-33396-0000-898: ref=['AT', 'ANOTHER', 'TIME', 'HARALD', 'ASKED'] +5142-33396-0000-898: hyp=['AT', 'ANOTHER', 'TIME', 'HAROLD', 'ASKED'] +5142-33396-0001-899: ref=['WHAT', 'IS', 'YOUR', 'COUNTRY', 'OLAF', 'HAVE', 'YOU', 'ALWAYS', 'BEEN', 'A', 'THRALL', 'THE', "THRALL'S", 'EYES', 'FLASHED'] +5142-33396-0001-899: hyp=['WHAT', 'IS', 'YOUR', 'COUNTRY', 'OLAF', 'HAVE', 'YOU', 'ALWAYS', 'BEEN', 'A', 'THRALL', 'THE', "THRALL'S", 'EYES', 'FLASHED'] +5142-33396-0002-900: ref=['TWO', 'HUNDRED', 'WARRIORS', 'FEASTED', 'IN', 'HIS', 'HALL', 'AND', 'FOLLOWED', 'HIM', 'TO', 'BATTLE'] +5142-33396-0002-900: hyp=['TWO', 'HUNDRED', 'WARRIORS', 'FEASTED', 'IN', 'HIS', 'HALL', 'AND', 'FOLLOWED', 'HIM', 'TO', 'BATTLE'] +5142-33396-0003-901: ref=['THE', 'REST', 'OF', 'YOU', 'OFF', 'A', 'VIKING', 'HE', 'HAD', 'THREE', 'SHIPS'] +5142-33396-0003-901: hyp=['THE', 'REST', 'OF', 'YOU', 'OFF', 'A', 'VIKING', 'HE', 'HAD', 'THREE', 'SHIPS'] +5142-33396-0004-902: ref=['THESE', 'HE', 'GAVE', 'TO', 'THREE', 'OF', 'MY', 'BROTHERS'] +5142-33396-0004-902: hyp=['THESE', 'HE', 'GAVE', 'TO', 'THREE', 'OF', 'MY', 'BROTHERS'] +5142-33396-0005-903: ref=['BUT', 'I', 'STAYED', 'THAT', 'SPRING', 'AND', 'BUILT', 'ME', 'A', 'BOAT'] +5142-33396-0005-903: hyp=['BUT', 'I', 'STAYED', 'THAT', 'SPRING', 'AND', 'BUILT', 'ME', 'A', 'BOAT'] +5142-33396-0006-904: ref=['I', 'MADE', 'HER', 'FOR', 'ONLY', 'TWENTY', 'OARS', 'BECAUSE', 'I', 'THOUGHT', 'FEW', 'MEN', 'WOULD', 'FOLLOW', 'ME', 'FOR', 'I', 'WAS', 'YOUNG', 'FIFTEEN', 'YEARS', 'OLD'] +5142-33396-0006-904: hyp=['I', 'MADE', 'HER', 'FALLING', 'TWENTY', 'OARS', 'BECAUSE', 'I', 'THOUGHT', 'FEW', 'MEN', 'WOULD', 'FOLLOW', 'ME', 'FOR', 'I', 'WAS', 'YOUNG', 'FIFTEEN', 'YEARS', 'OLD'] +5142-33396-0007-905: ref=['AT', 'THE', 'PROW', 'I', 'CARVED', 'THE', 'HEAD', 'WITH', 'OPEN', 'MOUTH', 'AND', 'FORKED', 'TONGUE', 'THRUST', 'OUT'] +5142-33396-0007-905: hyp=['AT', 'THE', 'PROW', 'I', 'CARVED', 'THE', 'HEAD', 'WITH', 'OPEN', 'MOUTH', 'AND', 'FORKED', 'TONGUE', 'THRUST', 'OUT'] +5142-33396-0008-906: ref=['I', 'PAINTED', 'THE', 'EYES', 'RED', 'FOR', 'ANGER'] +5142-33396-0008-906: hyp=['I', 'PAINTED', 'THE', 'EYES', 'RED', 'FOR', 'ANGER'] +5142-33396-0009-907: ref=['THERE', 'STAND', 'SO', 'I', 'SAID', 'AND', 'GLARE', 'AND', 'HISS', 'AT', 'MY', 'FOES'] +5142-33396-0009-907: hyp=['THERE', 'STAND', 'SO', 'I', 'SAID', 'AND', 'GLARE', 'AND', 'HISS', 'AT', 'MY', 'FOES'] +5142-33396-0010-908: ref=['IN', 'THE', 'STERN', 'I', 'CURVED', 'THE', 'TAIL', 'UP', 'ALMOST', 'AS', 'HIGH', 'AS', 'THE', 'HEAD'] +5142-33396-0010-908: hyp=['IN', 'THE', 'STERN', 'I', 'CARVED', 'THE', 'TAIL', 'UP', 'ALMOST', 'AS', 'HIGH', 'AS', 'THE', 'HEAD'] +5142-33396-0011-909: ref=['THERE', 'SHE', 'SAT', 'ON', 'THE', 'ROLLERS', 'AS', 'FAIR', 'A', 'SHIP', 'AS', 'I', 'EVER', 'SAW'] +5142-33396-0011-909: hyp=['THERE', 'SHE', 'SAT', 'ON', 'THE', 'ROLLERS', 'AS', 'FAIR', 'A', 'SHIP', 'AS', 'I', 'EVER', 'SAW'] +5142-33396-0012-910: ref=['THEN', 'I', 'WILL', 'GET', 'ME', 'A', 'FARM', 'AND', 'WILL', 'WINTER', 'IN', 'THAT', 'LAND', 'NOW', 'WHO', 'WILL', 'FOLLOW', 'ME'] +5142-33396-0012-910: hyp=['THEN', 'I', 'WILL', 'GET', 'ME', 'A', 'FARM', 'AND', "WE'LL", 'WINNER', 'IN', 'THAT', 'LAND', 'NOW', 'WHO', 'WILL', 'FOLLOW', 'ME'] +5142-33396-0013-911: ref=['HE', 'IS', 'BUT', 'A', 'BOY', 'THE', 'MEN', 'SAID'] +5142-33396-0013-911: hyp=['HE', 'IS', 'BUT', 'A', 'BOY', 'THE', 'MAN', 'SAID'] +5142-33396-0014-912: ref=['THIRTY', 'MEN', 'ONE', 'AFTER', 'ANOTHER', 'RAISED', 'THEIR', 'HORNS', 'AND', 'SAID'] +5142-33396-0014-912: hyp=['THIRTY', 'MEN', 'ONE', 'AFTER', 'ANOTHER', 'RAISED', 'THEIR', 'HORNS', 'AND', 'SAID'] +5142-33396-0015-913: ref=['AS', 'OUR', 'BOAT', 'FLASHED', 'DOWN', 'THE', 'ROLLERS', 'INTO', 'THE', 'WATER', 'I', 'MADE', 'THIS', 'SONG', 'AND', 'SANG', 'IT'] +5142-33396-0015-913: hyp=['AS', 'OUR', 'BOAT', 'FLASHED', 'DOWN', 'THE', 'ROLLERS', 'INTO', 'THE', 'WATER', 'I', 'MADE', 'THIS', 'SONG', 'AND', 'SANG', 'IT'] +5142-33396-0016-914: ref=['SO', 'WE', 'HARRIED', 'THE', 'COAST', 'OF', 'NORWAY'] +5142-33396-0016-914: hyp=['SO', 'WE', 'HARRIED', 'THE', 'COAST', 'OF', 'NORWAY'] +5142-33396-0017-915: ref=['WE', 'ATE', 'AT', 'MANY', "MEN'S", 'TABLES', 'UNINVITED'] +5142-33396-0017-915: hyp=['WE', 'ATE', 'IT', 'MANY', "MEN'S", 'TABLES', 'UNINVITED'] +5142-33396-0018-916: ref=['MY', "DRAGON'S", 'BELLY', 'IS', 'NEVER', 'FULL', 'AND', 'ON', 'BOARD', 'WENT', 'THE', 'GOLD'] +5142-33396-0018-916: hyp=['I', "DRAGON'S", 'BELLY', 'IS', 'NEVER', 'FULL', 'AND', 'ON', 'BOARD', 'WENT', 'THE', 'GOLD'] +5142-33396-0019-917: ref=['OH', 'IT', 'IS', 'BETTER', 'TO', 'LIVE', 'ON', 'THE', 'SEA', 'AND', 'LET', 'OTHER', 'MEN', 'RAISE', 'YOUR', 'CROPS', 'AND', 'COOK', 'YOUR', 'MEALS'] +5142-33396-0019-917: hyp=['OH', 'IT', 'IS', 'BETTER', 'TO', 'LIVE', 'ON', 'THE', 'SEA', 'AND', 'LET', 'OTHER', 'MEN', 'RAISE', 'YOUR', 'CROPS', 'AND', 'COOK', 'YOUR', 'MEALS'] +5142-33396-0020-918: ref=['A', 'HOUSE', 'SMELLS', 'OF', 'SMOKE', 'A', 'SHIP', 'SMELLS', 'OF', 'FROLIC'] +5142-33396-0020-918: hyp=['A', 'HOUSE', 'SMELLS', 'OF', 'SMOKE', 'A', "SHIP'S", 'MILLS', 'OF', 'FROLIC'] +5142-33396-0021-919: ref=['UP', 'AND', 'DOWN', 'THE', 'WATER', 'WE', 'WENT', 'TO', 'GET', 'MUCH', 'WEALTH', 'AND', 'MUCH', 'FROLIC'] +5142-33396-0021-919: hyp=['UP', 'AND', 'DOWN', 'THE', 'WATER', 'WE', 'WENT', 'TO', 'GET', 'MUCH', 'WEALTH', 'AND', 'MUCH', 'FROLIC'] +5142-33396-0022-920: ref=['WHAT', 'OF', 'THE', 'FARM', 'OLAF', 'NOT', 'YET', 'I', 'ANSWERED', 'VIKING', 'IS', 'BETTER', 'FOR', 'SUMMER'] +5142-33396-0022-920: hyp=['WHAT', 'IS', 'THE', 'FARM', 'OLOFF', 'NOT', 'YET', 'I', 'ANSWERED', 'VIKING', 'IS', 'BETTER', 'FOR', 'SUMMER'] +5142-33396-0023-921: ref=['IT', 'WAS', 'SO', 'DARK', 'THAT', 'I', 'COULD', 'SEE', 'NOTHING', 'BUT', 'A', 'FEW', 'SPARKS', 'ON', 'THE', 'HEARTH'] +5142-33396-0023-921: hyp=['IT', 'WAS', 'SO', 'DARK', 'THAT', 'I', 'COULD', 'SEE', 'NOTHING', 'BUT', 'A', 'FEW', 'SPARKS', 'ON', 'THE', 'HEARTH'] +5142-33396-0024-922: ref=['I', 'STOOD', 'WITH', 'MY', 'BACK', 'TO', 'THE', 'WALL', 'FOR', 'I', 'WANTED', 'NO', 'SWORD', 'REACHING', 'OUT', 'OF', 'THE', 'DARK', 'FOR', 'ME'] +5142-33396-0024-922: hyp=['I', 'STOOD', 'WITH', 'MY', 'BACK', 'TO', 'THE', 'WALL', 'FOR', 'I', 'WANTED', 'NO', 'SWORD', 'REACHING', 'OUT', 'OF', 'THE', 'DARK', 'FOR', 'ME'] +5142-33396-0025-923: ref=['COME', 'COME', 'I', 'CALLED', 'WHEN', 'NO', 'ONE', 'OBEYED', 'A', 'FIRE'] +5142-33396-0025-923: hyp=['COME', 'COME', 'I', 'CALLED', 'WHEN', 'NO', 'ONE', 'OBEYED', 'A', 'FIRE'] +5142-33396-0026-924: ref=['MY', 'MEN', 'LAUGHED', 'YES', 'A', 'STINGY', 'HOST'] +5142-33396-0026-924: hyp=['MY', 'MEN', 'LAUGHED', 'YES', 'A', 'STINGY', 'HOSE'] +5142-33396-0027-925: ref=['HE', 'ACTS', 'AS', 'THOUGH', 'HE', 'HAD', 'NOT', 'EXPECTED', 'US'] +5142-33396-0027-925: hyp=['HE', 'ACTS', 'AS', 'THOUGH', 'HE', 'IS', 'NOT', 'EXPECTED', 'US'] +5142-33396-0028-926: ref=['ON', 'A', 'BENCH', 'IN', 'A', 'FAR', 'CORNER', 'WERE', 'A', 'DOZEN', 'PEOPLE', 'HUDDLED', 'TOGETHER'] +5142-33396-0028-926: hyp=['ON', 'A', 'BENCH', 'IN', 'A', 'FAR', 'CORNER', 'WERE', 'A', 'DOZEN', 'PEOPLE', 'HUDDLED', 'TOGETHER'] +5142-33396-0029-927: ref=['BRING', 'IN', 'THE', 'TABLE', 'WE', 'ARE', 'HUNGRY'] +5142-33396-0029-927: hyp=['BRING', 'IN', 'THE', 'TABLE', 'WE', 'ARE', 'HUNGRY'] +5142-33396-0030-928: ref=['THE', 'THRALLS', 'WERE', 'BRINGING', 'IN', 'A', 'GREAT', 'POT', 'OF', 'MEAT'] +5142-33396-0030-928: hyp=['THE', 'THRALLS', 'WERE', 'RINGING', 'IN', 'A', 'GREAT', 'POT', 'OF', 'MEAT'] +5142-33396-0031-929: ref=['THEY', 'SET', 'UP', 'A', 'CRANE', 'OVER', 'THE', 'FIRE', 'AND', 'HUNG', 'THE', 'POT', 'UPON', 'IT', 'AND', 'WE', 'SAT', 'AND', 'WATCHED', 'IT', 'BOIL', 'WHILE', 'WE', 'JOKED', 'AT', 'LAST', 'THE', 'SUPPER', 'BEGAN'] +5142-33396-0031-929: hyp=['THEY', 'SET', 'UP', 'A', 'CRANE', 'OVER', 'THE', 'FIRE', 'AND', 'HUNG', 'THE', 'POT', 'UPON', 'IT', 'AND', 'WE', 'SAT', 'AND', 'WATCHED', 'IT', 'BOIL', 'WHILE', 'WE', 'JOKED', 'AT', 'LAST', 'THE', 'SUPPER', 'BEGAN'] +5142-33396-0032-930: ref=['THE', 'FARMER', 'SAT', 'GLOOMILY', 'ON', 'THE', 'BENCH', 'AND', 'WOULD', 'NOT', 'EAT', 'AND', 'YOU', 'CANNOT', 'WONDER', 'FOR', 'HE', 'SAW', 'US', 'PUTTING', 'POTFULS', 'OF', 'HIS', 'GOOD', 'BEEF', 'AND', 'BASKET', 'LOADS', 'OF', 'BREAD', 'INTO', 'OUR', 'BIG', 'MOUTHS'] +5142-33396-0032-930: hyp=['THE', 'FARMER', 'SAT', 'GLOOMILY', 'ON', 'THE', 'BENCH', 'AND', 'WOULD', 'NOT', 'EAT', 'AND', 'YOU', 'CANNOT', 'WONDER', 'FOR', 'HE', 'SAW', 'US', 'PUTTING', 'POTFULS', 'OF', 'HIS', 'GOOD', 'BEEF', 'AND', 'BASCULADES', 'OF', 'BREAD', 'AND', 'OUR', 'BIG', 'MOUTHS'] +5142-33396-0033-931: ref=['YOU', 'WOULD', 'NOT', 'EAT', 'WITH', 'US', 'YOU', 'CANNOT', 'SAY', 'NO', 'TO', 'HALF', 'OF', 'MY', 'ALE', 'I', 'DRINK', 'THIS', 'TO', 'YOUR', 'HEALTH'] +5142-33396-0033-931: hyp=['YOU', 'WOULD', 'NOT', 'EAT', 'WITH', 'US', 'YOU', 'CANNOT', 'SAY', 'NO', 'TO', 'HALF', 'OF', 'MY', 'ALE', 'I', 'DRINK', 'THIS', 'TO', 'YOUR', 'HEALTH'] +5142-33396-0034-932: ref=['THEN', 'I', 'DRANK', 'HALF', 'OF', 'THE', 'HORNFUL', 'AND', 'SENT', 'THE', 'REST', 'ACROSS', 'THE', 'FIRE', 'TO', 'THE', 'FARMER', 'HE', 'TOOK', 'IT', 'AND', 'SMILED', 'SAYING'] +5142-33396-0034-932: hyp=['THEN', 'I', 'DRANK', 'HALF', 'OF', 'THE', 'HORNFUL', 'AND', 'SET', 'THE', 'REST', 'ACROSS', 'THE', 'FIRE', 'TO', 'THE', 'FARMER', 'HE', 'TOOK', 'IT', 'AND', 'SMILED', 'SAYING'] +5142-33396-0035-933: ref=['DID', 'YOU', 'EVER', 'HAVE', 'SUCH', 'A', 'LORDLY', 'GUEST', 'BEFORE', 'I', 'WENT', 'ON'] +5142-33396-0035-933: hyp=['DID', 'YOU', 'EVER', 'HAVE', 'SUCH', 'A', 'LORDLY', 'GUEST', 'BEFORE', 'I', 'WENT', 'ON'] +5142-33396-0036-934: ref=['SO', 'I', 'WILL', 'GIVE', 'OUT', 'THIS', 'LAW', 'THAT', 'MY', 'MEN', 'SHALL', 'NEVER', 'LEAVE', 'YOU', 'ALONE'] +5142-33396-0036-934: hyp=['SO', 'I', 'WILL', 'GIVE', 'OUT', 'THIS', 'LAW', 'THAT', 'MY', 'MEN', 'SHALL', 'NEVER', 'LEAVE', 'YOU', 'ALONE'] +5142-33396-0037-935: ref=['HAKON', 'THERE', 'SHALL', 'BE', 'YOUR', 'CONSTANT', 'COMPANION', 'FRIEND', 'FARMER'] +5142-33396-0037-935: hyp=['HOCKIN', 'THERE', 'SHALL', 'BE', 'YOUR', 'CONSTANT', 'COMPANION', 'FRIEND', 'FARMER'] +5142-33396-0038-936: ref=['HE', 'SHALL', 'NOT', 'LEAVE', 'YOU', 'DAY', 'OR', 'NIGHT', 'WHETHER', 'YOU', 'ARE', 'WORKING', 'OR', 'PLAYING', 'OR', 'SLEEPING'] +5142-33396-0038-936: hyp=['HE', 'SHALL', 'NOT', 'LEAVE', 'YOU', 'DAY', 'OR', 'NIGHT', 'WHETHER', 'YOU', 'ARE', 'WORKING', 'OR', 'PLAYING', 'OR', 'SLEEPING'] +5142-33396-0039-937: ref=['I', 'NAMED', 'NINE', 'OTHERS', 'AND', 'SAID'] +5142-33396-0039-937: hyp=['I', 'NAME', 'NINE', 'OTHERS', 'AND', 'SAID'] +5142-33396-0040-938: ref=['AND', 'THESE', 'SHALL', 'FOLLOW', 'YOUR', 'THRALLS', 'IN', 'THE', 'SAME', 'WAY'] +5142-33396-0040-938: hyp=['AND', 'THESE', 'SHALL', 'FOLLOW', 'YOUR', 'THRALLS', 'IN', 'THE', 'SAME', 'WAY'] +5142-33396-0041-939: ref=['SO', 'I', 'SET', 'GUARDS', 'OVER', 'EVERY', 'ONE', 'IN', 'THAT', 'HOUSE'] +5142-33396-0041-939: hyp=['SO', 'I', 'SET', 'GUARDS', 'OVER', 'EVERY', 'ONE', 'IN', 'THAT', 'HOUSE'] +5142-33396-0042-940: ref=['SO', 'NO', 'TALES', 'GOT', 'OUT', 'TO', 'THE', 'NEIGHBORS', 'BESIDES', 'IT', 'WAS', 'A', 'LONELY', 'PLACE', 'AND', 'BY', 'GOOD', 'LUCK', 'NO', 'ONE', 'CAME', 'THAT', 'WAY'] +5142-33396-0042-940: hyp=['SO', 'NO', 'TALES', 'GOT', 'OUT', 'TO', 'THE', 'NEIGHBORS', 'BESIDES', 'IT', 'WAS', 'A', 'LONELY', 'PLACE', 'AND', 'BY', 'GOOD', 'LUCK', 'NO', 'ONE', 'CAME', 'THAT', 'WAY'] +5142-33396-0043-941: ref=['THEIR', 'EYES', 'DANCED', 'BIG', 'THORLEIF', 'STOOD', 'UP', 'AND', 'STRETCHED', 'HIMSELF'] +5142-33396-0043-941: hyp=['THEIR', 'EYES', 'DANCED', 'BIG', 'TORE', 'LEAF', 'STOOD', 'UP', 'AND', 'STRETCHED', 'HIMSELF'] +5142-33396-0044-942: ref=['I', 'AM', 'STIFF', 'WITH', 'LONG', 'SITTING', 'HE', 'SAID', 'I', 'ITCH', 'FOR', 'A', 'FIGHT', 'I', 'TURNED', 'TO', 'THE', 'FARMER'] +5142-33396-0044-942: hyp=["I'M", 'STIFF', 'WITH', 'LONG', 'SITTING', 'HE', 'SAID', 'I', 'ITCH', 'FOR', 'A', 'FIGHT', 'I', 'TURNED', 'TO', 'THE', 'FARMER'] +5142-33396-0045-943: ref=['THIS', 'IS', 'OUR', 'LAST', 'FEAST', 'WITH', 'YOU', 'I', 'SAID'] +5142-33396-0045-943: hyp=['THIS', 'IS', 'OUR', 'LAST', 'FEAST', 'WITH', 'YOU', 'I', 'SAID'] +5142-33396-0046-944: ref=['BY', 'THE', 'BEARD', 'OF', 'ODIN', 'I', 'CRIED', 'YOU', 'HAVE', 'TAKEN', 'OUR', 'JOKE', 'LIKE', 'A', 'MAN'] +5142-33396-0046-944: hyp=['BY', 'THE', 'BEARD', 'OF', 'ODIN', 'I', 'CRIED', 'YOU', 'HAVE', 'TAKEN', 'OUR', 'JOKE', 'LIKE', 'A', 'MAN'] +5142-33396-0047-945: ref=['MY', 'MEN', 'POUNDED', 'THE', 'TABLE', 'WITH', 'THEIR', 'FISTS'] +5142-33396-0047-945: hyp=['MY', 'MEN', 'POUNDED', 'THE', 'TABLE', 'WITH', 'THEIR', 'FISTS'] +5142-33396-0048-946: ref=['BY', 'THE', 'HAMMER', 'OF', 'THOR', 'SHOUTED', 'GRIM', 'HERE', 'IS', 'NO', 'STINGY', 'COWARD'] +5142-33396-0048-946: hyp=['BY', 'THE', 'HAMMER', 'A', 'THOR', 'SHOUTED', 'GRIM', 'THERE', 'IS', 'NO', 'STINGY', 'COWARD'] +5142-33396-0049-947: ref=['HERE', 'FRIEND', 'TAKE', 'IT', 'AND', 'HE', 'THRUST', 'IT', 'INTO', 'THE', "FARMER'S", 'HAND'] +5142-33396-0049-947: hyp=['HERE', 'FRIEND', 'TAKE', 'IT', 'AND', 'HE', 'THRUST', 'IT', 'INTO', 'THE', "FARMER'S", 'HAND'] +5142-33396-0050-948: ref=['MAY', 'YOU', 'DRINK', "HEART'S", 'EASE', 'FROM', 'IT', 'FOR', 'MANY', 'YEARS'] +5142-33396-0050-948: hyp=['MAY', 'YOU', 'DRINK', 'HEARTSEASE', 'FROM', 'IT', 'FOR', 'MANY', 'YEARS'] +5142-33396-0051-949: ref=['AND', 'WITH', 'IT', 'I', 'LEAVE', 'YOU', 'A', 'NAME', 'SIF', 'THE', 'FRIENDLY', 'I', 'SHALL', 'HOPE', 'TO', 'DRINK', 'WITH', 'YOU', 'SOMETIME', 'IN', 'VALHALLA'] +5142-33396-0051-949: hyp=['AND', 'WITH', 'IT', 'I', 'LEAVE', 'YOU', 'A', 'NAME', 'SIFT', 'THE', 'FRIENDLY', 'I', 'SHALL', 'HOPE', 'TO', 'DRINK', 'WITH', 'YOU', 'SOME', 'TIME', 'IN', 'VALHALLA'] +5142-33396-0052-950: ref=['HERE', 'IS', 'A', 'RING', 'FOR', 'SIF', 'THE', 'FRIENDLY', 'AND', 'HERE', 'IS', 'A', 'BRACELET', 'A', 'SWORD', 'WOULD', 'NOT', 'BE', 'ASHAMED', 'TO', 'HANG', 'AT', 'YOUR', 'SIDE'] +5142-33396-0052-950: hyp=['HERE', 'IS', 'A', 'RING', 'FOR', 'SIF', 'THE', 'FRIENDLY', 'AND', 'HERE', 'IS', 'A', 'BRACELET', 'AND', 'A', 'SWORD', 'WOULD', 'NOT', 'BE', 'ASHAMED', 'TO', 'HANG', 'AT', 'YOUR', 'SIDE'] +5142-33396-0053-951: ref=['I', 'TOOK', 'FIVE', 'GREAT', 'BRACELETS', 'OF', 'GOLD', 'FROM', 'OUR', 'TREASURE', 'CHEST', 'AND', 'GAVE', 'THEM', 'TO', 'HIM'] +5142-33396-0053-951: hyp=['I', 'TOOK', 'FIVE', 'GREAT', 'BRACELETS', 'OF', 'GOLD', 'FROM', 'OUR', 'TREASURE', 'CHEST', 'AND', 'GAVE', 'THEM', 'TO', 'HIM'] +5142-33396-0054-952: ref=['THAT', 'IS', 'THE', 'BEST', 'WAY', 'TO', 'DECIDE', 'FOR', 'THE', 'SPEAR', 'WILL', 'ALWAYS', 'POINT', 'SOMEWHERE', 'AND', 'ONE', 'THING', 'IS', 'AS', 'GOOD', 'AS', 'ANOTHER'] +5142-33396-0054-952: hyp=['THAT', 'IS', 'THE', 'BEST', 'WAY', 'TO', 'DECIDE', 'FOR', 'THE', 'SPEAR', 'WILL', 'ALWAYS', 'POINT', 'SOMEWHERE', 'AND', 'ONE', 'THING', 'IS', 'AS', 'GOOD', 'AS', 'ANOTHER'] +5142-33396-0055-953: ref=['THAT', 'TIME', 'IT', 'POINTED', 'US', 'INTO', 'YOUR', "FATHER'S", 'SHIPS'] +5142-33396-0055-953: hyp=['THAT', 'TIME', 'IT', 'POINTED', 'US', 'INTO', 'YOUR', "FATHER'S", 'SHIPS'] +5142-33396-0056-954: ref=['HERE', 'THEY', 'SAID', 'IS', 'A', 'RASCAL', 'WHO', 'HAS', 'BEEN', 'HARRYING', 'OUR', 'COASTS'] +5142-33396-0056-954: hyp=['HERE', 'THEY', 'SAID', 'IS', 'A', 'RASCAL', 'WHO', 'HAS', 'BEEN', 'HARRYING', 'OUR', 'COASTS'] +5142-33396-0057-955: ref=['WE', 'SUNK', 'HIS', 'SHIP', 'AND', 'MEN', 'BUT', 'HIM', 'WE', 'BROUGHT', 'TO', 'YOU'] +5142-33396-0057-955: hyp=['WE', 'SUNK', 'HIS', 'SHIP', 'AND', 'MEN', 'BUT', 'HIM', 'WE', 'BROUGHT', 'TO', 'YOU'] +5142-33396-0058-956: ref=['A', 'ROBBER', 'VIKING', 'SAID', 'THE', 'KING', 'AND', 'SCOWLED', 'AT', 'ME'] +5142-33396-0058-956: hyp=['A', 'ROBBER', 'VIKING', 'SAID', 'THE', 'KING', 'AND', 'HE', 'SCOWLED', 'AT', 'ME'] +5142-33396-0059-957: ref=['YES', 'AND', 'WITH', 'ALL', 'YOUR', 'FINGERS', 'IT', 'TOOK', 'YOU', 'A', 'YEAR', 'TO', 'CATCH', 'ME', 'THE', 'KING', 'FROWNED', 'MORE', 'ANGRILY'] +5142-33396-0059-957: hyp=['YES', 'AND', 'WITH', 'ALL', 'YOUR', 'FINGERS', 'IT', 'TOOK', 'YOU', 'A', 'YEAR', 'TO', 'CATCH', 'ME', 'THE', 'KING', 'FROWNED', 'MORE', 'ANGRILY'] +5142-33396-0060-958: ref=['TAKE', 'HIM', 'OUT', 'THORKEL', 'AND', 'LET', 'HIM', 'TASTE', 'YOUR', 'SWORD'] +5142-33396-0060-958: hyp=['TAKE', 'HIM', 'OUT', 'TORCOLE', 'AND', 'LET', 'HIM', 'TASTE', 'YOUR', 'SWORD'] +5142-33396-0061-959: ref=['YOUR', 'MOTHER', 'THE', 'QUEEN', 'WAS', 'STANDING', 'BY'] +5142-33396-0061-959: hyp=['YOUR', 'MOTHER', 'THE', 'QUEEN', 'WAS', 'STANDING', 'BY'] +5142-33396-0062-960: ref=['NOW', 'SHE', 'PUT', 'HER', 'HAND', 'ON', 'HIS', 'ARM', 'AND', 'SMILED', 'AND', 'SAID'] +5142-33396-0062-960: hyp=['NOW', 'SHE', 'PUT', 'HER', 'HAND', 'ON', 'HIS', 'ARM', 'AND', 'SMILED', 'AND', 'SAID'] +5142-33396-0063-961: ref=['AND', 'WOULD', 'HE', 'NOT', 'BE', 'A', 'GOOD', 'GIFT', 'FOR', 'OUR', 'BABY'] +5142-33396-0063-961: hyp=['AND', 'WOULD', 'HE', 'NOT', 'BE', 'A', 'GOOD', 'GIFT', 'FOR', 'OUR', 'BABY'] +5142-33396-0064-962: ref=['YOUR', 'FATHER', 'THOUGHT', 'A', 'MOMENT', 'THEN', 'LOOKED', 'AT', 'YOUR', 'MOTHER', 'AND', 'SMILED'] +5142-33396-0064-962: hyp=['YOUR', 'FATHER', 'THOUGHT', 'A', 'MOMENT', 'AND', 'LOOKED', 'AT', 'YOUR', 'MOTHER', 'AND', 'SMILED'] +5142-33396-0065-963: ref=['SOFT', 'HEART', 'HE', 'SAID', 'GENTLY', 'TO', 'HER', 'THEN', 'TO', 'THORKEL', 'WELL', 'LET', 'HIM', 'GO', 'THORKEL'] +5142-33396-0065-963: hyp=['SOFT', 'HEART', 'HE', 'SAID', 'GENTLY', 'TO', 'HER', 'THEN', 'TO', 'TORCOAL', 'WELL', 'LET', 'HIM', 'GO', 'TORCOAL'] +5142-33396-0066-964: ref=['THEN', 'HE', 'TURNED', 'TO', 'ME', 'AGAIN', 'FROWNING'] +5142-33396-0066-964: hyp=['THEN', 'HE', 'TURNED', 'TO', 'ME', 'AGAIN', 'FROWNING'] +5142-33396-0067-965: ref=['BUT', 'YOUNG', 'SHARP', 'TONGUE', 'NOW', 'THAT', 'WE', 'HAVE', 'CAUGHT', 'YOU', 'WE', 'WILL', 'PUT', 'YOU', 'INTO', 'A', 'TRAP', 'THAT', 'YOU', 'CANNOT', 'GET', 'OUT', 'OF'] +5142-33396-0067-965: hyp=['BUT', 'YOUNG', 'SHARP', 'TONGUE', 'NOW', 'THAT', "WE'VE", 'CAUGHT', 'YOU', 'WILL', 'PUT', 'YOU', 'INTO', 'A', 'TRAP', 'THAT', 'YOU', 'CANNOT', 'GET', 'OUT', 'OF'] +5142-33396-0068-966: ref=['SO', 'I', 'LIVED', 'AND', 'NOW', 'AM', 'YOUR', 'TOOTH', 'THRALL', 'WELL', 'IT', 'IS', 'THE', 'LUCK', 'OF', 'WAR'] +5142-33396-0068-966: hyp=['SO', 'I', 'LIVED', 'AND', 'NOW', "I'M", 'YOUR', 'TOOTH', 'THRALL', 'WELL', 'IT', 'IS', 'THE', 'LUCK', 'OF', 'WAR'] +5142-36377-0000-870: ref=['IT', 'WAS', 'ONE', 'OF', 'THE', 'MASTERLY', 'AND', 'CHARMING', 'STORIES', 'OF', 'DUMAS', 'THE', 'ELDER'] +5142-36377-0000-870: hyp=['IT', 'WAS', 'ONE', 'OF', 'THE', 'MASTERLY', 'AND', 'CHARMING', 'STORIES', 'OF', 'DE', 'MAU', 'THE', 'ELDER'] +5142-36377-0001-871: ref=['IN', 'FIVE', 'MINUTES', 'I', 'WAS', 'IN', 'A', 'NEW', 'WORLD', 'AND', 'MY', 'MELANCHOLY', 'ROOM', 'WAS', 'FULL', 'OF', 'THE', 'LIVELIEST', 'FRENCH', 'COMPANY'] +5142-36377-0001-871: hyp=['IN', 'FIVE', 'MINUTES', 'I', 'WAS', 'IN', 'A', 'NEW', 'WORLD', 'AND', 'MY', 'MELANCHOLY', 'ROOM', 'WAS', 'FULL', 'OF', 'THE', 'LIVELIEST', 'FRENCH', 'COMPANY'] +5142-36377-0002-872: ref=['THE', 'SOUND', 'OF', 'AN', 'IMPERATIVE', 'AND', 'UNCOMPROMISING', 'BELL', 'RECALLED', 'ME', 'IN', 'DUE', 'TIME', 'TO', 'THE', 'REGIONS', 'OF', 'REALITY'] +5142-36377-0002-872: hyp=['THE', 'SOUND', 'OF', 'AN', 'IMPERATIVE', 'AND', 'UNCOMPROMISING', 'BELL', 'RECALLED', 'ME', 'IN', 'DUE', 'TIME', 'TO', 'THE', 'REGIONS', 'OF', 'REALITY'] +5142-36377-0003-873: ref=['AMBROSE', 'MET', 'ME', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'STAIRS', 'AND', 'SHOWED', 'ME', 'THE', 'WAY', 'TO', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0003-873: hyp=['AMBROSE', 'MET', 'ME', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'STAIRS', 'AND', 'SHOWED', 'ME', 'THE', 'WAY', 'TO', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0004-874: ref=['SHE', 'SIGNED', 'TO', 'ME', 'WITH', 'A', 'GHOSTLY', 'SOLEMNITY', 'TO', 'TAKE', 'THE', 'VACANT', 'PLACE', 'ON', 'THE', 'LEFT', 'OF', 'HER', 'FATHER'] +5142-36377-0004-874: hyp=['SHE', 'SIGNED', 'TO', 'ME', 'WITH', 'A', 'GHOSTLY', 'SOLEMNITY', 'TO', 'TAKE', 'THE', 'VACANT', 'PLACE', 'ON', 'THE', 'LEFT', 'OF', 'HER', 'FATHER'] +5142-36377-0005-875: ref=['THE', 'DOOR', 'OPENED', 'AGAIN', 'WHILE', 'I', 'WAS', 'STILL', 'STUDYING', 'THE', 'TWO', 'BROTHERS', 'WITHOUT', 'I', 'HONESTLY', 'CONFESS', 'BEING', 'VERY', 'FAVORABLY', 'IMPRESSED', 'BY', 'EITHER', 'OF', 'THEM'] +5142-36377-0005-875: hyp=['THE', 'DOOR', 'OPENED', 'AGAIN', 'WHILE', 'I', 'WAS', 'STILL', 'STUDYING', 'THE', 'TWO', 'BROTHERS', 'WITHOUT', 'I', 'HONESTLY', 'CONFESS', 'BEING', 'VERY', 'FAVORABLY', 'IMPRESSED', 'BY', 'EITHER', 'OF', 'THEM'] +5142-36377-0006-876: ref=['A', 'NEW', 'MEMBER', 'OF', 'THE', 'FAMILY', 'CIRCLE', 'WHO', 'INSTANTLY', 'ATTRACTED', 'MY', 'ATTENTION', 'ENTERED', 'THE', 'ROOM'] +5142-36377-0006-876: hyp=['A', 'NEW', 'MEMBER', 'OF', 'THE', 'FAMILY', 'CIRCLE', 'WHO', 'INSTANTLY', 'ATTRACTED', 'MY', 'ATTENTION', 'ENTERED', 'THE', 'ROOM'] +5142-36377-0007-877: ref=['A', 'LITTLE', 'CRACKED', 'THAT', 'IN', 'THE', 'POPULAR', 'PHRASE', 'WAS', 'MY', 'IMPRESSION', 'OF', 'THE', 'STRANGER', 'WHO', 'NOW', 'MADE', 'HIS', 'APPEARANCE', 'IN', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0007-877: hyp=['A', 'LITTLE', 'CRACKED', 'THAT', 'IN', 'THE', 'POPULAR', 'PHRASE', 'WAS', 'MY', 'IMPRESSION', 'OF', 'THE', 'STRANGER', 'WHO', 'NOW', 'MADE', 'HIS', 'APPEARANCE', 'IN', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0008-878: ref=['MISTER', 'MEADOWCROFT', 'THE', 'ELDER', 'HAVING', 'NOT', 'SPOKEN', 'ONE', 'WORD', 'THUS', 'FAR', 'HIMSELF', 'INTRODUCED', 'THE', 'NEWCOMER', 'TO', 'ME', 'WITH', 'A', 'SIDE', 'GLANCE', 'AT', 'HIS', 'SONS', 'WHICH', 'HAD', 'SOMETHING', 'LIKE', 'DEFIANCE', 'IN', 'IT', 'A', 'GLANCE', 'WHICH', 'AS', 'I', 'WAS', 'SORRY', 'TO', 'NOTICE', 'WAS', 'RETURNED', 'WITH', 'THE', 'DEFIANCE', 'ON', 'THEIR', 'SIDE', 'BY', 'THE', 'TWO', 'YOUNG', 'MEN'] +5142-36377-0008-878: hyp=['MISTER', 'MEDICROFT', 'THE', 'ELDER', 'HAVING', 'NOT', 'SPOKEN', 'ONE', 'WORD', 'THUS', 'FAR', 'HIMSELF', 'INTRODUCED', 'THE', 'NEW', 'COMER', 'TO', 'ME', 'WITH', 'A', 'SIGH', 'GLANCE', 'AT', 'HIS', 'SONS', 'WHICH', 'HAD', 'SOMETHING', 'LIKE', 'DEFIANCE', 'IN', 'IT', 'A', 'GLANCE', 'WHICH', 'AS', 'I', 'WAS', 'SORRY', 'TO', 'NOTICE', 'WAS', 'RETURNED', 'WITH', 'THE', 'DEFIANCE', 'ON', 'THEIR', 'SIDE', 'BY', 'THE', 'TWO', 'YOUNG', 'MEN'] +5142-36377-0009-879: ref=['PHILIP', 'LEFRANK', 'THIS', 'IS', 'MY', 'OVERLOOKER', 'MISTER', 'JAGO', 'SAID', 'THE', 'OLD', 'MAN', 'FORMALLY', 'PRESENTING', 'US'] +5142-36377-0009-879: hyp=['PHILIP', 'LE', 'FRANK', 'THIS', 'IS', 'MY', 'OVERLOOKER', 'MISTER', 'YAGO', 'SAID', 'THE', 'OLD', 'MAN', 'FORMALLY', 'PRESENTING', 'US'] +5142-36377-0010-880: ref=['HE', 'IS', 'NOT', 'WELL', 'HE', 'HAS', 'COME', 'OVER', 'THE', 'OCEAN', 'FOR', 'REST', 'AND', 'CHANGE', 'OF', 'SCENE'] +5142-36377-0010-880: hyp=['HE', 'IS', 'NOT', 'WELL', 'HE', 'HAS', 'COME', 'OVER', 'THE', 'OCEAN', 'FOR', 'REST', 'AND', 'CHANGES', 'SCENE'] +5142-36377-0011-881: ref=['MISTER', 'JAGO', 'IS', 'AN', 'AMERICAN', 'PHILIP'] +5142-36377-0011-881: hyp=['THIS', 'GIAGO', 'IS', 'AN', 'AMERICAN', 'PHILIP'] +5142-36377-0012-882: ref=['MAKE', 'ACQUAINTANCE', 'WITH', 'MISTER', 'JAGO', 'SIT', 'TOGETHER'] +5142-36377-0012-882: hyp=['MAKE', 'ACQUAINTANCE', 'WITH', 'MISS', 'GIAGO', 'SIT', 'TOGETHER'] +5142-36377-0013-883: ref=['THEY', 'POINTEDLY', 'DREW', 'BACK', 'FROM', 'JOHN', 'JAGO', 'AS', 'HE', 'APPROACHED', 'THE', 'EMPTY', 'CHAIR', 'NEXT', 'TO', 'ME', 'AND', 'MOVED', 'ROUND', 'TO', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'TABLE'] +5142-36377-0013-883: hyp=['THEY', 'POINTEDLY', 'DREW', 'BACK', 'FROM', 'JOHN', 'YAGO', 'AS', 'HE', 'APPROACHED', 'THE', 'EMPTY', 'CHAIR', 'NEXT', 'ME', 'AND', 'MOVED', 'ROUND', 'TO', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'TABLE'] +5142-36377-0014-884: ref=['A', 'PRETTY', 'GIRL', 'AND', 'SO', 'FAR', 'AS', 'I', 'COULD', 'JUDGE', 'BY', 'APPEARANCES', 'A', 'GOOD', 'GIRL', 'TOO', 'DESCRIBING', 'HER', 'GENERALLY', 'I', 'MAY', 'SAY', 'THAT', 'SHE', 'HAD', 'A', 'SMALL', 'HEAD', 'WELL', 'CARRIED', 'AND', 'WELL', 'SET', 'ON', 'HER', 'SHOULDERS', 'BRIGHT', 'GRAY', 'EYES', 'THAT', 'LOOKED', 'AT', 'YOU', 'HONESTLY', 'AND', 'MEANT', 'WHAT', 'THEY', 'LOOKED', 'A', 'TRIM', 'SLIGHT', 'LITTLE', 'FIGURE', 'TOO', 'SLIGHT', 'FOR', 'OUR', 'ENGLISH', 'NOTIONS', 'OF', 'BEAUTY', 'A', 'STRONG', 'AMERICAN', 'ACCENT', 'AND', 'A', 'RARE', 'THING', 'IN', 'AMERICA', 'A', 'PLEASANTLY', 'TONED', 'VOICE', 'WHICH', 'MADE', 'THE', 'ACCENT', 'AGREEABLE', 'TO', 'ENGLISH', 'EARS'] +5142-36377-0014-884: hyp=['A', 'PRETTY', 'GIRL', 'AND', 'SO', 'FAR', 'AS', 'I', 'COULD', 'JUDGE', 'BY', 'APPEARANCES', 'A', 'GOOD', 'GIRL', 'TOO', 'DESCRIBING', 'HER', 'GENERALLY', 'I', 'MAY', 'SAY', 'THAT', 'SHE', 'HAD', 'A', 'SMALL', 'HEAD', 'WELL', 'CARRIED', 'AND', 'WELL', 'SET', 'ON', 'HER', 'SHOULDERS', 'BRIGHT', 'GRAY', 'EYES', 'THAT', 'LOOKED', 'AT', 'YOU', 'HONESTLY', 'AND', 'MEANT', 'WHAT', 'THEY', 'LOOKED', 'A', 'TRIM', 'SLIGHT', 'LITTLE', 'FIGURE', 'TOO', 'SLIGHT', 'FOR', 'OUR', 'ENGLISH', 'NOTIONS', 'OF', 'BEAUTY', 'A', 'STRONG', 'AMERICAN', 'ACCENT', 'AND', 'A', 'RARE', 'THING', 'IN', 'AMERICA', 'A', 'PLEASANTLY', 'TONED', 'VOICE', 'WHICH', 'MADE', 'THE', 'ACCENT', 'AGREEABLE', 'TO', 'ENGLISH', 'EARS'] +5142-36377-0015-885: ref=['OUR', 'FIRST', 'IMPRESSIONS', 'OF', 'PEOPLE', 'ARE', 'IN', 'NINE', 'CASES', 'OUT', 'OF', 'TEN', 'THE', 'RIGHT', 'IMPRESSIONS'] +5142-36377-0015-885: hyp=['OUR', 'FIRST', 'IMPRESSIONS', 'OF', 'PEOPLE', 'ARE', 'IN', 'NINE', 'CASES', 'OUT', 'OF', 'TEN', 'THE', 'RIGHT', 'IMPRESSIONS'] +5142-36377-0016-886: ref=['FOR', 'ONCE', 'IN', 'A', 'WAY', 'I', 'PROVED', 'A', 'TRUE', 'PROPHET'] +5142-36377-0016-886: hyp=['FOR', 'ONCE', 'IN', 'A', 'WAY', 'I', 'PROVED', 'A', 'TRUE', 'PROPHET'] +5142-36377-0017-887: ref=['THE', 'ONLY', 'CHEERFUL', 'CONVERSATION', 'WAS', 'THE', 'CONVERSATION', 'ACROSS', 'THE', 'TABLE', 'BETWEEN', 'NAOMI', 'AND', 'ME'] +5142-36377-0017-887: hyp=['THE', 'ONLY', 'CHEERFUL', 'CONVERSATION', 'WAS', 'THE', 'CONVERSATION', 'ACROSS', 'THE', 'TABLE', 'BETWEEN', 'NAOMI', 'AND', 'ME'] +5142-36377-0018-888: ref=['HE', 'LOOKED', 'UP', 'AT', 'NAOMI', 'DOUBTINGLY', 'FROM', 'HIS', 'PLATE', 'AND', 'LOOKED', 'DOWN', 'AGAIN', 'SLOWLY', 'WITH', 'A', 'FROWN'] +5142-36377-0018-888: hyp=['HE', 'LOOKED', 'UP', 'AND', 'NOW', 'ON', 'ME', 'DOUBTINGLY', 'FROM', 'HIS', 'PLATE', 'AND', 'LOOKED', 'DOWN', 'AGAIN', 'SLOWLY', 'WITH', 'A', 'FROWN'] +5142-36377-0019-889: ref=['WHEN', 'I', 'ADDRESSED', 'HIM', 'HE', 'ANSWERED', 'CONSTRAINEDLY'] +5142-36377-0019-889: hyp=['WHEN', 'I', 'ADDRESSED', 'HIM', 'HE', 'ANSWERED', 'CONSTRAINEDLY'] +5142-36377-0020-890: ref=['A', 'MORE', 'DREARY', 'AND', 'MORE', 'DISUNITED', 'FAMILY', 'PARTY', 'I', 'NEVER', 'SAT', 'AT', 'THE', 'TABLE', 'WITH'] +5142-36377-0020-890: hyp=['A', 'MORE', 'DREARY', 'AND', 'MORE', 'DISUNITED', 'FAMILY', 'PARTY', 'I', 'NEVER', 'SAT', 'AT', 'THE', 'TABLE', 'WITH'] +5142-36377-0021-891: ref=['ENVY', 'HATRED', 'MALICE', 'AND', 'UNCHARITABLENESS', 'ARE', 'NEVER', 'SO', 'ESSENTIALLY', 'DETESTABLE', 'TO', 'MY', 'MIND', 'AS', 'WHEN', 'THEY', 'ARE', 'ANIMATED', 'BY', 'A', 'SENSE', 'OF', 'PROPRIETY', 'AND', 'WORK', 'UNDER', 'THE', 'SURFACE', 'BUT', 'FOR', 'MY', 'INTEREST', 'IN', 'NAOMI', 'AND', 'MY', 'OTHER', 'INTEREST', 'IN', 'THE', 'LITTLE', 'LOVE', 'LOOKS', 'WHICH', 'I', 'NOW', 'AND', 'THEN', 'SURPRISED', 'PASSING', 'BETWEEN', 'HER', 'AND', 'AMBROSE', 'I', 'SHOULD', 'NEVER', 'HAVE', 'SAT', 'THROUGH', 'THAT', 'SUPPER'] +5142-36377-0021-891: hyp=['ENVY', 'HATRED', 'MALICE', 'AND', 'UNCHARITABLENESS', 'ARE', 'NEVER', 'SO', 'ESSENTIALLY', 'DETESTABLE', 'TO', 'MY', 'MIND', 'AS', 'WHEN', 'THEY', 'ARE', 'ANIMATED', 'BY', 'THE', 'SENSE', 'OF', 'PROPRIETY', 'AND', 'WORK', 'UNDER', 'THE', 'SURFACE', 'BUT', 'FOR', 'MY', 'INTEREST', 'IN', 'NAY', 'OWE', 'ME', 'AND', 'MY', 'OTHER', 'INTEREST', 'IN', 'THE', 'LITTLE', 'LOVE', 'LOOKS', 'WHICH', 'I', 'NOW', 'AND', 'THEN', 'SURPRISED', 'PASSING', 'BETWEEN', 'HER', 'AND', 'AMBROSE', 'I', 'SHOULD', 'NEVER', 'HAVE', 'SAT', 'THROUGH', 'THAT', 'SUPPER'] +5142-36377-0022-892: ref=['I', 'WISH', 'YOU', 'GOOD', 'NIGHT', 'SHE', 'LAID', 'HER', 'BONY', 'HANDS', 'ON', 'THE', 'BACK', 'OF', 'MISTER', "MEADOWCROFT'S", 'INVALID', 'CHAIR', 'CUT', 'HIM', 'SHORT', 'IN', 'HIS', 'FAREWELL', 'SALUTATION', 'TO', 'ME', 'AND', 'WHEELED', 'HIM', 'OUT', 'TO', 'HIS', 'BED', 'AS', 'IF', 'SHE', 'WERE', 'WHEELING', 'HIM', 'OUT', 'TO', 'HIS', 'GRAVE'] +5142-36377-0022-892: hyp=['I', 'WISH', 'YOU', 'GOOD', 'NIGHT', 'SHE', 'LAID', 'HER', 'BONY', 'HANDS', 'ON', 'THE', 'BACK', 'OF', 'MISTER', "MEADOWCROFT'S", 'INVALID', 'CHAIR', 'CUT', 'HIM', 'SHORT', 'IN', 'HIS', 'FAREWELL', 'SALUTATION', 'TO', 'ME', 'AND', 'WHEELED', 'HIM', 'OUT', 'TO', 'HIS', 'BED', 'AS', 'IF', 'SHE', 'WERE', 'WHEELING', 'HIM', 'OUT', 'TO', 'HIS', 'GRAVE'] +5142-36377-0023-893: ref=['YOU', 'WERE', 'QUITE', 'RIGHT', 'TO', 'SAY', 'NO', 'AMBROSE', 'BEGAN', 'NEVER', 'SMOKE', 'WITH', 'JOHN', 'JAGO', 'HIS', 'CIGARS', 'WILL', 'POISON', 'YOU'] +5142-36377-0023-893: hyp=['YOU', 'WERE', 'QUITE', 'RIGHT', 'TO', 'SAY', 'NO', 'AMBROSE', 'BEGAN', 'NEVER', 'SMOKE', 'WITH', 'JOHNNIEAUGO', 'HIS', 'CIGARS', 'WILL', 'POISON', 'YOU'] +5142-36377-0024-894: ref=['NAOMI', 'SHOOK', 'HER', 'FOREFINGER', 'REPROACHFULLY', 'AT', 'THEM', 'AS', 'IF', 'THE', 'TWO', 'STURDY', 'YOUNG', 'FARMERS', 'HAD', 'BEEN', 'TWO', 'CHILDREN'] +5142-36377-0024-894: hyp=['THEY', 'ONLY', 'SHOOK', 'HER', 'FOREFINGER', 'REPROACHFULLY', 'AT', 'THEM', 'AS', 'IF', 'THE', 'TWO', 'STURDY', 'YOUNG', 'FARMERS', 'HAD', 'BEEN', 'TWO', 'CHILDREN'] +5142-36377-0025-895: ref=['SILAS', 'SLUNK', 'AWAY', 'WITHOUT', 'A', 'WORD', 'OF', 'PROTEST', 'AMBROSE', 'STOOD', 'HIS', 'GROUND', 'EVIDENTLY', 'BENT', 'ON', 'MAKING', 'HIS', 'PEACE', 'WITH', 'NAOMI', 'BEFORE', 'HE', 'LEFT', 'HER', 'SEEING', 'THAT', 'I', 'WAS', 'IN', 'THE', 'WAY', 'I', 'WALKED', 'ASIDE', 'TOWARD', 'A', 'GLASS', 'DOOR', 'AT', 'THE', 'LOWER', 'END', 'OF', 'THE', 'ROOM'] +5142-36377-0025-895: hyp=['SILAS', 'SLUNK', 'AWAY', 'WITHOUT', 'A', 'WORD', 'OF', 'PROTEST', 'AMBROSE', 'STOOD', 'HIS', 'GROUND', 'EVIDENTLY', 'BENT', 'ON', 'MAKING', 'HIS', 'PEACE', 'WHEN', 'NAOMI', 'BEFORE', 'HE', 'LEFT', 'HER', 'SEEING', 'THAT', 'I', 'WAS', 'IN', 'THE', 'WAY', 'I', 'WALKED', 'ASIDE', 'TOWARD', 'A', 'GLASS', 'DOOR', 'AT', 'THE', 'LOWER', 'END', 'OF', 'THE', 'ROOM'] +5142-36586-0000-967: ref=['IT', 'IS', 'MANIFEST', 'THAT', 'MAN', 'IS', 'NOW', 'SUBJECT', 'TO', 'MUCH', 'VARIABILITY'] +5142-36586-0000-967: hyp=['IT', 'IS', 'MANIFEST', 'THAT', 'MAN', 'IS', 'NOW', 'SUBJECT', 'TO', 'MUCH', 'VARIABILITY'] +5142-36586-0001-968: ref=['SO', 'IT', 'IS', 'WITH', 'THE', 'LOWER', 'ANIMALS'] +5142-36586-0001-968: hyp=['SO', 'IT', 'IS', 'WITH', 'THE', 'LOWER', 'ANIMALS'] +5142-36586-0002-969: ref=['THE', 'VARIABILITY', 'OF', 'MULTIPLE', 'PARTS'] +5142-36586-0002-969: hyp=['THE', 'VARIABILITY', 'OF', 'MULTIPLE', 'PARTS'] +5142-36586-0003-970: ref=['BUT', 'THIS', 'SUBJECT', 'WILL', 'BE', 'MORE', 'PROPERLY', 'DISCUSSED', 'WHEN', 'WE', 'TREAT', 'OF', 'THE', 'DIFFERENT', 'RACES', 'OF', 'MANKIND'] +5142-36586-0003-970: hyp=['BUT', 'THIS', 'SUBJECT', 'WILL', 'BE', 'MORE', 'PROPERLY', 'DISCUSSED', 'WHEN', 'WE', 'TREAT', 'OF', 'THE', 'DIFFERENT', 'RACES', 'OF', 'MANKIND'] +5142-36586-0004-971: ref=['EFFECTS', 'OF', 'THE', 'INCREASED', 'USE', 'AND', 'DISUSE', 'OF', 'PARTS'] +5142-36586-0004-971: hyp=['EFFECTS', 'OF', 'THE', 'INCREASED', 'USE', 'AND', 'DISUSE', 'OF', 'PARTS'] +5142-36600-0000-896: ref=['CHAPTER', 'SEVEN', 'ON', 'THE', 'RACES', 'OF', 'MAN'] +5142-36600-0000-896: hyp=['CHAPTER', 'SEVEN', 'ON', 'THE', 'RACES', 'OF', 'MAN'] +5142-36600-0001-897: ref=['IN', 'DETERMINING', 'WHETHER', 'TWO', 'OR', 'MORE', 'ALLIED', 'FORMS', 'OUGHT', 'TO', 'BE', 'RANKED', 'AS', 'SPECIES', 'OR', 'VARIETIES', 'NATURALISTS', 'ARE', 'PRACTICALLY', 'GUIDED', 'BY', 'THE', 'FOLLOWING', 'CONSIDERATIONS', 'NAMELY', 'THE', 'AMOUNT', 'OF', 'DIFFERENCE', 'BETWEEN', 'THEM', 'AND', 'WHETHER', 'SUCH', 'DIFFERENCES', 'RELATE', 'TO', 'FEW', 'OR', 'MANY', 'POINTS', 'OF', 'STRUCTURE', 'AND', 'WHETHER', 'THEY', 'ARE', 'OF', 'PHYSIOLOGICAL', 'IMPORTANCE', 'BUT', 'MORE', 'ESPECIALLY', 'WHETHER', 'THEY', 'ARE', 'CONSTANT'] +5142-36600-0001-897: hyp=['IN', 'DETERMINING', 'WHETHER', 'TWO', 'OR', 'MORE', 'ALLIED', 'FORMS', 'OUGHT', 'TO', 'BE', 'RANKED', 'A', 'SPECIES', 'OR', 'VARIETIES', 'NATURALISTS', 'ARE', 'PRACTICALLY', 'GUIDED', 'BY', 'THE', 'FOLLOWING', 'CONSIDERATIONS', 'NAMELY', 'THE', 'AMOUNT', 'OF', 'DIFFERENCE', 'BETWEEN', 'THEM', 'AND', 'WHETHER', 'SUCH', 'DIFFERENCE', 'IS', 'RELATE', 'TO', 'FEW', 'OR', 'MANY', 'POINTS', 'OF', 'STRUCTURE', 'AND', 'WHETHER', 'THEY', 'ARE', 'OF', 'PHYSIOLOGICAL', 'IMPORTANCE', 'BUT', 'MORE', 'ESPECIALLY', 'WHETHER', 'THEY', 'ARE', 'CONSTANT'] +5639-40744-0000-137: ref=['ELEVEN', "O'CLOCK", 'HAD', 'STRUCK', 'IT', 'WAS', 'A', 'FINE', 'CLEAR', 'NIGHT', 'THEY', 'WERE', 'THE', 'ONLY', 'PERSONS', 'ON', 'THE', 'ROAD', 'AND', 'THEY', 'SAUNTERED', 'LEISURELY', 'ALONG', 'TO', 'AVOID', 'PAYING', 'THE', 'PRICE', 'OF', 'FATIGUE', 'FOR', 'THE', 'RECREATION', 'PROVIDED', 'FOR', 'THE', 'TOLEDANS', 'IN', 'THEIR', 'VALLEY', 'OR', 'ON', 'THE', 'BANKS', 'OF', 'THEIR', 'RIVER'] +5639-40744-0000-137: hyp=['ELEVEN', "O'CLOCK", 'HAD', 'STRUCK', 'IT', 'WAS', 'A', 'FINE', 'CLEAR', 'NIGHT', 'THERE', 'WERE', 'THE', 'ONLY', 'PERSONS', 'ON', 'THE', 'ROAD', 'AND', 'THEY', 'SAUNTERED', 'LEISURELY', 'ALONG', 'TO', 'AVOID', 'PAYING', 'THE', 'PRICE', 'OF', 'FATIGUE', 'FOR', 'THE', 'RECREATION', 'PROVIDED', 'FOR', 'THE', 'TOLEDANS', 'IN', 'THE', 'VALLEY', 'OR', 'ON', 'THE', 'BANKS', 'OF', 'THEIR', 'RIVER'] +5639-40744-0001-138: ref=['SECURE', 'AS', 'HE', 'THOUGHT', 'IN', 'THE', 'CAREFUL', 'ADMINISTRATION', 'OF', 'JUSTICE', 'IN', 'THAT', 'CITY', 'AND', 'THE', 'CHARACTER', 'OF', 'ITS', 'WELL', 'DISPOSED', 'INHABITANTS', 'THE', 'GOOD', 'HIDALGO', 'WAS', 'FAR', 'FROM', 'THINKING', 'THAT', 'ANY', 'DISASTER', 'COULD', 'BEFAL', 'HIS', 'FAMILY'] +5639-40744-0001-138: hyp=['SECURE', 'AS', 'HE', 'THOUGHT', 'IN', 'THE', 'CAREFUL', 'ADMINISTRATION', 'OF', 'JUSTICE', 'IN', 'THAT', 'CITY', 'AND', 'THE', 'CHARACTER', 'OF', 'ITS', 'WELL', 'DISPOSED', 'INHABITANTS', 'THE', 'GOOD', 'HAD', 'ALGO', 'WAS', 'FAR', 'FROM', 'THINKING', 'THAT', 'ANY', 'DISASTER', 'COULD', 'BEFALL', 'HIS', 'FAMILY'] +5639-40744-0002-139: ref=['RODOLFO', 'AND', 'HIS', 'COMPANIONS', 'WITH', 'THEIR', 'FACES', 'MUFFLED', 'IN', 'THEIR', 'CLOAKS', 'STARED', 'RUDELY', 'AND', 'INSOLENTLY', 'AT', 'THE', 'MOTHER', 'THE', 'DAUGHTER', 'AND', 'THE', 'SERVANT', 'MAID'] +5639-40744-0002-139: hyp=['RUDOLPHO', 'AND', 'HIS', 'COMPANIONS', 'WITH', 'THEIR', 'FACES', 'MUFFLED', 'IN', 'THEIR', 'CLOAKS', 'STARED', 'RUDELY', 'AND', 'INSOLENTLY', 'AT', 'THE', 'MOTHER', 'THE', 'DAUGHTER', 'AND', 'THE', 'SERVANT', 'MAID'] +5639-40744-0003-140: ref=['IN', 'A', 'MOMENT', 'HE', 'COMMUNICATED', 'HIS', 'THOUGHTS', 'TO', 'HIS', 'COMPANIONS', 'AND', 'IN', 'THE', 'NEXT', 'MOMENT', 'THEY', 'RESOLVED', 'TO', 'TURN', 'BACK', 'AND', 'CARRY', 'HER', 'OFF', 'TO', 'PLEASE', 'RODOLFO', 'FOR', 'THE', 'RICH', 'WHO', 'ARE', 'OPEN', 'HANDED', 'ALWAYS', 'FIND', 'PARASITES', 'READY', 'TO', 'ENCOURAGE', 'THEIR', 'BAD', 'PROPENSITIES', 'AND', 'THUS', 'TO', 'CONCEIVE', 'THIS', 'WICKED', 'DESIGN', 'TO', 'COMMUNICATE', 'IT', 'APPROVE', 'IT', 'RESOLVE', 'ON', 'RAVISHING', 'LEOCADIA', 'AND', 'TO', 'CARRY', 'THAT', 'DESIGN', 'INTO', 'EFFECT', 'WAS', 'THE', 'WORK', 'OF', 'A', 'MOMENT'] +5639-40744-0003-140: hyp=['IN', 'A', 'MOMENT', 'HE', 'COMMUNICATED', 'HIS', 'THOUGHTS', 'TO', 'HIS', 'COMPANIONS', 'AND', 'IN', 'THE', 'NEXT', 'MOMENT', 'THEY', 'RESOLVED', 'TO', 'TURN', 'BACK', 'AND', 'CARRY', 'HER', 'OFF', 'TO', 'PLEASE', 'RUDOLPHO', 'FOR', 'THE', 'RICH', 'WHO', 'ARE', 'OPEN', 'HANDED', 'ALWAYS', 'FIND', 'PARRICIDES', 'READY', 'TO', 'ENCOURAGE', 'THEIR', 'BAD', 'PROPENSITIES', 'AND', 'THUS', 'TO', 'CONCEIVE', 'THIS', 'WICKED', 'DESIGN', 'TO', 'COMMUNICATE', 'IT', 'APPROVE', 'IT', 'RESOLVE', 'ON', 'RAVISHING', 'THE', 'OCCAIA', 'AND', 'TO', 'CARRY', 'THAT', 'DESIGN', 'INTO', 'EFFECT', 'WAS', 'THE', 'WORK', 'OF', 'A', 'MOMENT'] +5639-40744-0004-141: ref=['THEY', 'DREW', 'THEIR', 'SWORDS', 'HID', 'THEIR', 'FACES', 'IN', 'THE', 'FLAPS', 'OF', 'THEIR', 'CLOAKS', 'TURNED', 'BACK', 'AND', 'SOON', 'CAME', 'IN', 'FRONT', 'OF', 'THE', 'LITTLE', 'PARTY', 'WHO', 'HAD', 'NOT', 'YET', 'DONE', 'GIVING', 'THANKS', 'TO', 'GOD', 'FOR', 'THEIR', 'ESCAPE', 'FROM', 'THOSE', 'AUDACIOUS', 'MEN'] +5639-40744-0004-141: hyp=['THEY', 'DREW', 'THEIR', 'SWORDS', 'HID', 'THEIR', 'FACES', 'IN', 'THE', 'FLAPS', 'OF', 'THEIR', 'CLOAKS', 'TURNED', 'BACK', 'AND', 'SOON', 'CAME', 'IN', 'FRONT', 'OF', 'THE', 'LITTLE', 'PARTY', 'WHO', 'HAD', 'NOT', 'YET', 'DONE', 'GIVING', 'THANKS', 'TO', 'GOD', 'FOR', 'THEIR', 'ESCAPE', 'FROM', 'THOSE', 'AUDACIOUS', 'MEN'] +5639-40744-0005-142: ref=['FINALLY', 'THE', 'ONE', 'PARTY', 'WENT', 'OFF', 'EXULTING', 'AND', 'THE', 'OTHER', 'WAS', 'LEFT', 'IN', 'DESOLATION', 'AND', 'WOE'] +5639-40744-0005-142: hyp=['FINALLY', 'THE', 'ONE', 'PARTY', 'WENT', 'OFF', 'EXULTING', 'AND', 'THE', 'OTHER', 'WAS', 'LEFT', 'IN', 'DESOLATION', 'AND', 'WOE'] +5639-40744-0006-143: ref=['RODOLFO', 'ARRIVED', 'AT', 'HIS', 'OWN', 'HOUSE', 'WITHOUT', 'ANY', 'IMPEDIMENT', 'AND', "LEOCADIA'S", 'PARENTS', 'REACHED', 'THEIRS', 'HEART', 'BROKEN', 'AND', 'DESPAIRING'] +5639-40744-0006-143: hyp=['RODOLPHO', 'ARRIVED', 'AT', 'HIS', 'OWN', 'HOUSE', 'WITHOUT', 'ANY', 'IMPEDIMENT', 'A', 'LOCATEUS', 'PARENTS', 'REACHED', 'THEIRS', 'HEART', 'BROKEN', 'AND', 'DESPAIRING'] +5639-40744-0007-144: ref=['MEANWHILE', 'RODOLFO', 'HAD', 'LEOCADIA', 'SAFE', 'IN', 'HIS', 'CUSTODY', 'AND', 'IN', 'HIS', 'OWN', 'APARTMENT'] +5639-40744-0007-144: hyp=['MEANWHILE', 'RUDOLPHO', 'HAD', 'LOCALIA', 'SAFE', 'IN', 'HIS', 'CUSTODY', 'AND', 'IN', 'HIS', 'OWN', 'APARTMENT'] +5639-40744-0008-145: ref=['WHO', 'TOUCHES', 'ME', 'AM', 'I', 'IN', 'BED'] +5639-40744-0008-145: hyp=['WHO', 'TOUCHES', 'ME', 'AM', 'I', 'IN', 'BED'] +5639-40744-0009-146: ref=['MOTHER', 'DEAR', 'FATHER', 'DO', 'YOU', 'HEAR', 'ME'] +5639-40744-0009-146: hyp=['MOTHER', 'DEAR', 'FATHER', 'DO', 'YOU', 'HEAR', 'ME'] +5639-40744-0010-147: ref=['IT', 'IS', 'THE', 'ONLY', 'AMENDS', 'I', 'ASK', 'OF', 'YOU', 'FOR', 'THE', 'WRONG', 'YOU', 'HAVE', 'DONE', 'ME'] +5639-40744-0010-147: hyp=['IT', 'IS', 'THE', 'ONLY', 'AMENDS', 'I', 'ASK', 'OF', 'YOU', 'FOR', 'THE', 'WRONG', 'YOU', 'HAVE', 'DONE', 'ME'] +5639-40744-0011-148: ref=['SHE', 'FOUND', 'THE', 'DOOR', 'BUT', 'IT', 'WAS', 'LOCKED', 'OUTSIDE'] +5639-40744-0011-148: hyp=['SHE', 'FOUND', 'THE', 'DOOR', 'BUT', 'IT', 'WAS', 'LOCKED', 'OUTSIDE'] +5639-40744-0012-149: ref=['SHE', 'SUCCEEDED', 'IN', 'OPENING', 'THE', 'WINDOW', 'AND', 'THE', 'MOONLIGHT', 'SHONE', 'IN', 'SO', 'BRIGHTLY', 'THAT', 'SHE', 'COULD', 'DISTINGUISH', 'THE', 'COLOUR', 'OF', 'SOME', 'DAMASK', 'HANGINGS', 'IN', 'THE', 'ROOM'] +5639-40744-0012-149: hyp=['SHE', 'SUCCEEDED', 'IN', 'OPENING', 'THE', 'WINDOW', 'AND', 'THE', 'MOONLIGHT', 'SHONE', 'IN', 'SO', 'BRIGHTLY', 'THAT', 'SHE', 'COULD', 'DISTINGUISH', 'THE', 'COLOR', 'OF', 'SOME', 'DAMASK', 'HANGING', 'IN', 'THE', 'ROOM'] +5639-40744-0013-150: ref=['SHE', 'SAW', 'THAT', 'THE', 'BED', 'WAS', 'GILDED', 'AND', 'SO', 'RICH', 'THAT', 'IT', 'SEEMED', 'THAT', 'OF', 'A', 'PRINCE', 'RATHER', 'THAN', 'OF', 'A', 'PRIVATE', 'GENTLEMAN'] +5639-40744-0013-150: hyp=['SHE', 'SAW', 'THAT', 'THE', 'BED', 'WAS', 'GILDED', 'AND', 'SO', 'RICH', 'THAT', 'IT', 'SEEMED', 'THAT', 'OF', 'A', 'PRINCE', 'THE', 'RATHER', 'THAT', 'OF', 'A', 'PRIVATE', 'GENTLEMAN'] +5639-40744-0014-151: ref=['AMONG', 'OTHER', 'THINGS', 'ON', 'WHICH', 'SHE', 'CAST', 'HER', 'EYES', 'WAS', 'A', 'SMALL', 'CRUCIFIX', 'OF', 'SOLID', 'SILVER', 'STANDING', 'ON', 'A', 'CABINET', 'NEAR', 'THE', 'WINDOW'] +5639-40744-0014-151: hyp=['AMONG', 'OTHER', 'THINGS', 'ON', 'WHICH', 'HE', 'CAST', 'HER', 'EYES', 'WAS', 'A', 'SMALL', 'CRUCIFIX', 'OF', 'SOLID', 'SILVER', 'STANDING', 'ON', 'A', 'CABINET', 'NEAR', 'THE', 'WINDOW'] +5639-40744-0015-152: ref=['THIS', 'PERSON', 'WAS', 'RODOLFO', 'WHO', 'THOUGH', 'HE', 'HAD', 'GONE', 'TO', 'LOOK', 'FOR', 'HIS', 'FRIENDS', 'HAD', 'CHANGED', 'HIS', 'MIND', 'IN', 'THAT', 'RESPECT', 'NOT', 'THINKING', 'IT', 'ADVISABLE', 'TO', 'ACQUAINT', 'THEM', 'WITH', 'WHAT', 'HAD', 'PASSED', 'BETWEEN', 'HIM', 'AND', 'THE', 'GIRL'] +5639-40744-0015-152: hyp=['THIS', 'PERSON', 'WAS', 'RIDOLPHO', 'WHO', 'THOUGH', 'HE', 'HAD', 'GONE', 'TO', 'LOOK', 'FOR', 'HIS', 'FRIENDS', 'HAD', 'CHANGED', 'HIS', 'MIND', 'IN', 'THAT', 'RESPECT', 'NOTHING', 'IT', 'ADVISABLE', 'TO', 'ACQUAINT', 'THEM', 'WITH', 'WHAT', 'HAD', 'PASSED', 'BETWEEN', 'HIM', 'AND', 'THE', 'GIRL'] +5639-40744-0016-153: ref=['ON', 'THE', 'CONTRARY', 'HE', 'RESOLVED', 'TO', 'TELL', 'THEM', 'THAT', 'REPENTING', 'OF', 'HIS', 'VIOLENCE', 'AND', 'MOVED', 'BY', 'HER', 'TEARS', 'HE', 'HAD', 'ONLY', 'CARRIED', 'HER', 'HALF', 'WAY', 'TOWARDS', 'HIS', 'HOUSE', 'AND', 'THEN', 'LET', 'HER', 'GO'] +5639-40744-0016-153: hyp=['ON', 'THE', 'CONTRARY', 'HE', 'RESOLVED', 'TO', 'TELL', 'THEM', 'THAT', 'REPENTING', 'OF', 'HIS', 'VIOLENCE', 'AND', 'MOVED', 'BY', 'A', 'TEARS', 'HE', 'HAD', 'ONLY', 'CARRIED', 'HER', 'HALF', 'WAY', 'TOWARDS', 'HIS', 'HOUSE', 'AND', 'THEN', 'LET', 'HER', 'GO'] +5639-40744-0017-154: ref=['CHOKING', 'WITH', 'EMOTION', 'LEOCADI', 'MADE', 'A', 'SIGN', 'TO', 'HER', 'PARENTS', 'THAT', 'SHE', 'WISHED', 'TO', 'BE', 'ALONE', 'WITH', 'THEM'] +5639-40744-0017-154: hyp=['CHOKING', 'WITH', 'EMOTION', 'LUCADIA', 'MADE', 'A', 'SIGN', 'TO', 'HER', 'PARENTS', 'THAT', 'SHE', 'WISHED', 'TO', 'BE', 'ALONE', 'WITH', 'THEM'] +5639-40744-0018-155: ref=['THAT', 'WOULD', 'BE', 'VERY', 'WELL', 'MY', 'CHILD', 'REPLIED', 'HER', 'FATHER', 'IF', 'YOUR', 'PLAN', 'WERE', 'NOT', 'LIABLE', 'TO', 'BE', 'FRUSTRATED', 'BY', 'ORDINARY', 'CUNNING', 'BUT', 'NO', 'DOUBT', 'THIS', 'IMAGE', 'HAS', 'BEEN', 'ALREADY', 'MISSED', 'BY', 'ITS', 'OWNER', 'AND', 'HE', 'WILL', 'HAVE', 'SET', 'IT', 'DOWN', 'FOR', 'CERTAIN', 'THAT', 'IT', 'WAS', 'TAKEN', 'OUT', 'OF', 'THE', 'ROOM', 'BY', 'THE', 'PERSON', 'HE', 'LOCKED', 'UP', 'THERE'] +5639-40744-0018-155: hyp=['THAT', 'WOULD', 'BE', 'VERY', 'WELL', 'MY', 'CHILD', 'REPLIED', 'HER', 'FATHER', 'IF', 'YOUR', 'PLAN', 'WOULD', 'NOT', 'LIABLE', 'TO', 'BE', 'FRUSTRATED', 'BY', 'ORDINARY', 'CUNNING', 'BUT', 'NO', 'DOUBT', 'THIS', 'IMAGE', 'HAD', 'BEEN', 'ALREADY', 'MISSED', 'BY', 'ITS', 'OWNER', 'AND', 'HE', 'WILL', 'HAVE', 'SET', 'IT', 'DOWN', 'FOR', 'CERTAIN', 'THAT', 'IT', 'WAS', 'TAKEN', 'OUT', 'OF', 'THE', 'ROOM', 'BY', 'THE', 'PERSON', 'HE', 'LOCKED', 'UP', 'THERE'] +5639-40744-0019-156: ref=['WHAT', 'YOU', 'HAD', 'BEST', 'DO', 'MY', 'CHILD', 'IS', 'TO', 'KEEP', 'IT', 'AND', 'PRAY', 'TO', 'IT', 'THAT', 'SINCE', 'IT', 'WAS', 'A', 'WITNESS', 'TO', 'YOUR', 'UNDOING', 'IT', 'WILL', 'DEIGN', 'TO', 'VINDICATE', 'YOUR', 'CAUSE', 'BY', 'ITS', 'RIGHTEOUS', 'JUDGMENT'] +5639-40744-0019-156: hyp=['WHAT', 'YOU', 'HAD', 'BEST', 'DO', 'MY', 'CHILD', 'IS', 'TO', 'KEEP', 'IT', 'AND', 'PRAY', 'TO', 'IT', 'THAT', 'SINCE', 'IT', 'WAS', 'A', 'WITNESS', 'TO', 'YOUR', 'UNDOING', 'IT', 'WILL', 'DEIGN', 'TO', 'VINDICATE', 'YOUR', 'CAUSE', 'BY', 'ITS', 'RIGHTEOUS', 'JUDGMENT'] +5639-40744-0020-157: ref=['THUS', 'DID', 'THIS', 'HUMANE', 'AND', 'RIGHT', 'MINDED', 'FATHER', 'COMFORT', 'HIS', 'UNHAPPY', 'DAUGHTER', 'AND', 'HER', 'MOTHER', 'EMBRACING', 'HER', 'AGAIN', 'DID', 'ALL', 'SHE', 'COULD', 'TO', 'SOOTHE', 'HER', 'FEELINGS'] +5639-40744-0020-157: hyp=['THUS', 'DID', 'THE', 'HUMANE', 'AND', 'RIGHT', 'MINDED', 'FATHER', 'COMFORT', 'HIS', 'UNHAPPY', 'DAUGHTER', 'AND', 'HER', 'MOTHER', 'EMBRACING', 'HER', 'AGAIN', 'DID', 'ALL', 'SHE', 'COULD', 'TO', 'SOOTHE', 'A', 'FEELINGS'] +5639-40744-0021-158: ref=['SHE', 'MEANWHILE', 'PASSED', 'HER', 'LIFE', 'WITH', 'HER', 'PARENTS', 'IN', 'THE', 'STRICTEST', 'RETIREMENT', 'NEVER', 'LETTING', 'HERSELF', 'BE', 'SEEN', 'BUT', 'SHUNNING', 'EVERY', 'EYE', 'LEST', 'IT', 'SHOULD', 'READ', 'HER', 'MISFORTUNE', 'IN', 'HER', 'FACE'] +5639-40744-0021-158: hyp=['SHE', 'MEANWHILE', 'PASSED', 'HER', 'LIFE', 'WITH', 'HER', 'PARENTS', 'IN', 'THE', 'STRICTEST', 'RETIREMENT', 'NEVER', 'LETTING', 'HERSELF', 'BE', 'SEEN', 'BUT', 'SHUNNING', 'EVERY', 'EYE', 'LEST', 'IT', 'SHOULD', 'READ', 'HER', 'MISFORTUNE', 'IN', 'HER', 'FACE'] +5639-40744-0022-159: ref=['TIME', 'ROLLED', 'ON', 'THE', 'HOUR', 'OF', 'HER', 'DELIVERY', 'ARRIVED', 'IT', 'TOOK', 'PLACE', 'IN', 'THE', 'UTMOST', 'SECRECY', 'HER', 'MOTHER', 'TAKING', 'UPON', 'HER', 'THE', 'OFFICE', 'OF', 'MIDWIFE', 'AND', 'SHE', 'GAVE', 'BIRTH', 'TO', 'A', 'SON', 'ONE', 'OF', 'THE', 'MOST', 'BEAUTIFUL', 'EVER', 'SEEN'] +5639-40744-0022-159: hyp=['TIME', 'ROLLED', 'ON', 'THE', 'HOUR', 'OF', 'HER', 'DELIVERY', 'ARRIVED', 'IT', 'TOOK', 'PLACE', 'IN', 'THE', 'UTMOST', 'SECRECY', 'HER', 'MOTHER', 'TAKING', 'UPON', 'HER', 'THE', 'OFFICE', 'OF', 'MIDWIFE', 'AS', 'SHE', 'GAVE', 'BIRTH', 'TO', 'A', 'SON', 'ONE', 'OF', 'THE', 'MOST', 'BEAUTIFUL', 'EVER', 'SEEN'] +5639-40744-0023-160: ref=['WHEN', 'THE', 'BOY', 'WALKED', 'THROUGH', 'THE', 'STREETS', 'BLESSINGS', 'WERE', 'SHOWERED', 'UPON', 'HIM', 'BY', 'ALL', 'WHO', 'SAW', 'HIM', 'BLESSINGS', 'UPON', 'HIS', 'BEAUTY', 'UPON', 'THE', 'MOTHER', 'THAT', 'BORE', 'HIM', 'UPON', 'THE', 'FATHER', 'THAT', 'BEGOT', 'HIM', 'UPON', 'THOSE', 'WHO', 'BROUGHT', 'HIM', 'UP', 'SO', 'WELL'] +5639-40744-0023-160: hyp=['AND', 'THE', 'BOY', 'WALKED', 'THROUGH', 'THE', 'STREETS', 'BLESSINGS', 'WERE', 'SHOWERED', 'UPON', 'HIM', 'BY', 'ALL', 'WHO', 'SAW', 'HIM', 'BLESSING', 'UPON', 'HIS', 'BEAUTY', 'UPON', 'THE', 'MOTHER', 'THAT', 'BORE', 'HIM', 'UPON', 'THE', 'FATHER', 'THAT', 'BEGOT', 'HIM', 'UPON', 'THOSE', 'WHO', 'BROUGHT', 'HIM', 'UP', 'SO', 'WELL'] +5639-40744-0024-161: ref=['ONE', 'DAY', 'WHEN', 'THE', 'BOY', 'WAS', 'SENT', 'BY', 'HIS', 'GRANDFATHER', 'WITH', 'A', 'MESSAGE', 'TO', 'A', 'RELATION', 'HE', 'PASSED', 'ALONG', 'A', 'STREET', 'IN', 'WHICH', 'THERE', 'WAS', 'A', 'GREAT', 'CONCOURSE', 'OF', 'HORSEMEN'] +5639-40744-0024-161: hyp=['ONE', 'DAY', 'WHEN', 'THE', 'BOY', 'WAS', 'SENT', 'BY', 'HIS', 'GRANDFATHER', 'WITH', 'A', 'MESSAGE', 'TO', 'A', 'RELATION', 'HE', 'PASSED', 'ALONG', 'A', 'STREET', 'IN', 'WHICH', 'THERE', 'WAS', 'A', 'GREAT', 'CONCOURSE', 'OF', 'HORSEMEN'] +5639-40744-0025-162: ref=['THE', 'BED', 'SHE', 'TOO', 'WELL', 'REMEMBERED', 'WAS', 'THERE', 'AND', 'ABOVE', 'ALL', 'THE', 'CABINET', 'ON', 'WHICH', 'HAD', 'STOOD', 'THE', 'IMAGE', 'SHE', 'HAD', 'TAKEN', 'AWAY', 'WAS', 'STILL', 'ON', 'THE', 'SAME', 'SPOT'] +5639-40744-0025-162: hyp=['THE', 'BED', 'SHE', 'TOO', 'WELL', 'REMEMBERED', 'WAS', 'THERE', 'AND', 'ABOVE', 'ALL', 'THE', 'CABINET', 'ON', 'WHICH', 'HAD', 'STOOD', 'THE', 'IMAGE', 'SHE', 'HAD', 'TAKEN', 'AWAY', 'WAS', 'STILL', 'ON', 'THE', 'SAME', 'SPOT'] +5639-40744-0026-163: ref=['LUIS', 'WAS', 'OUT', 'OF', 'DANGER', 'IN', 'A', 'FORTNIGHT', 'IN', 'A', 'MONTH', 'HE', 'ROSE', 'FROM', 'HIS', 'BED', 'AND', 'DURING', 'ALL', 'THAT', 'TIME', 'HE', 'WAS', 'VISITED', 'DAILY', 'BY', 'HIS', 'MOTHER', 'AND', 'GRANDMOTHER', 'AND', 'TREATED', 'BY', 'THE', 'MASTER', 'AND', 'MISTRESS', 'OF', 'THE', 'HOUSE', 'AS', 'IF', 'HE', 'WAS', 'THEIR', 'OWN', 'CHILD'] +5639-40744-0026-163: hyp=['LOUIS', 'WAS', 'OUT', 'OF', 'DANGER', 'IN', 'A', 'FORTNIGHT', 'IN', 'A', 'MONTH', 'HE', 'ROSE', 'FROM', 'HIS', 'BED', 'AND', 'DREWING', 'ALL', 'THAT', 'TIME', 'HE', 'WAS', 'VISITED', 'DAILY', 'BY', 'HIS', 'MOTHER', 'AND', 'GRANDMOTHER', 'AND', 'TREATED', 'BY', 'THE', 'MASTER', 'AND', 'MISTRESS', 'OF', 'THE', 'HOUSE', 'AS', 'IF', 'HE', 'WAS', 'THEIR', 'OWN', 'CHILD'] +5639-40744-0027-164: ref=['THUS', 'SAYING', 'AND', 'PRESSING', 'THE', 'CRUCIFIX', 'TO', 'HER', 'BREAST', 'SHE', 'FELL', 'FAINTING', 'INTO', 'THE', 'ARMS', 'OF', 'DONA', 'ESTAFANIA', 'WHO', 'AS', 'A', 'GENTLEWOMAN', 'TO', 'WHOSE', 'SEX', 'PITY', 'IS', 'AS', 'NATURAL', 'AS', 'CRUELTY', 'IS', 'TO', 'MAN', 'INSTANTLY', 'PRESSED', 'HER', 'LIPS', 'TO', 'THOSE', 'OF', 'THE', 'FAINTING', 'GIRL', 'SHEDDING', 'OVER', 'HER', 'SO', 'MANY', 'TEARS', 'THAT', 'THERE', 'NEEDED', 'NO', 'OTHER', 'SPRINKLING', 'OF', 'WATER', 'TO', 'RECOVER', 'LEOCADIA', 'FROM', 'HER', 'SWOON'] +5639-40744-0027-164: hyp=['THUS', 'SAYING', 'AND', 'PRESSING', 'THE', 'CRUCIFIX', 'TO', 'HER', 'BREAST', 'SHE', 'FELL', 'FAINTING', 'INTO', 'THE', 'ARMS', 'OF', 'DONA', 'ESTAFFANIA', 'WHO', 'AS', 'A', 'GENTLEWOMAN', 'TO', 'WHOSE', 'SEX', 'PITY', 'IS', 'A', 'NATURAL', 'AS', 'CRUELTY', 'AS', 'TO', 'MAN', 'INSTANTLY', 'PRESSED', 'HER', 'LIPS', 'TO', 'THOSE', 'OF', 'THE', 'FAINTING', 'GIRL', 'SHEDDING', 'OVER', 'HER', 'SO', 'MANY', 'TEARS', 'THAT', 'THERE', 'NEEDED', 'NO', 'OTHER', 'SPRINKLING', 'OF', 'WATER', 'TO', 'RECOVER', 'LOCATIA', 'FROM', 'HER', 'SWOON'] +5639-40744-0028-165: ref=['I', 'HAVE', 'GREAT', 'THINGS', 'TO', 'TELL', 'YOU', 'SENOR', 'SAID', 'DONA', 'ESTAFANIA', 'TO', 'HER', 'HUSBAND', 'THE', 'CREAM', 'AND', 'SUBSTANCE', 'OF', 'WHICH', 'IS', 'THIS', 'THE', 'FAINTING', 'GIRL', 'BEFORE', 'YOU', 'IS', 'YOUR', 'DAUGHTER', 'AND', 'THAT', 'BOY', 'IS', 'YOUR', 'GRANDSON'] +5639-40744-0028-165: hyp=['I', 'HAVE', 'GREAT', 'THINGS', 'TO', 'TELL', 'YOU', 'SENOR', 'SAID', 'DORNESTE', 'FANIA', 'TO', 'HER', 'HUSBAND', 'THE', 'CREAM', 'AND', 'SUBSTANCE', 'OF', 'WHICH', 'IS', 'THIS', 'THE', 'FAINTING', 'GIRL', 'BEFORE', 'YOU', 'IS', 'YOUR', 'DAUGHTER', 'AND', 'THE', 'BOY', 'IS', 'YOUR', 'GRANDSON'] +5639-40744-0029-166: ref=['THIS', 'TRUTH', 'WHICH', 'I', 'HAVE', 'LEARNED', 'FROM', 'HER', 'LIPS', 'IS', 'CONFIRMED', 'BY', 'HIS', 'FACE', 'IN', 'WHICH', 'WE', 'HAVE', 'BOTH', 'BEHELD', 'THAT', 'OF', 'OUR', 'SON'] +5639-40744-0029-166: hyp=['THIS', 'TRUTH', 'WHICH', 'I', 'HAVE', 'LEARNED', 'FROM', 'HER', 'LIPS', 'IS', 'CONFIRMED', 'BY', 'HIS', 'FACE', 'IN', 'WHICH', 'WE', 'HAVE', 'BOTH', 'BEHELD', 'THAT', 'OF', 'OUR', 'SON'] +5639-40744-0030-167: ref=['JUST', 'THEN', 'LEOCADIA', 'CAME', 'TO', 'HERSELF', 'AND', 'EMBRACING', 'THE', 'CROSS', 'SEEMED', 'CHANGED', 'INTO', 'A', 'SEA', 'OF', 'TEARS', 'AND', 'THE', 'GENTLEMAN', 'REMAINED', 'IN', 'UTTER', 'BEWILDERMENT', 'UNTIL', 'HIS', 'WIFE', 'HAD', 'REPEATED', 'TO', 'HIM', 'FROM', 'BEGINNING', 'TO', 'END', "LEOCADIA'S", 'WHOLE', 'STORY', 'AND', 'HE', 'BELIEVED', 'IT', 'THROUGH', 'THE', 'BLESSED', 'DISPENSATION', 'OF', 'HEAVEN', 'WHICH', 'HAD', 'CONFIRMED', 'IT', 'BY', 'SO', 'MANY', 'CONVINCING', 'TESTIMONIES'] +5639-40744-0030-167: hyp=['JUST', 'THEN', 'LOQUES', 'AND', 'EMBRACING', 'THE', 'CROSS', 'SEEMED', 'CHANGED', 'INTO', 'A', 'SEA', 'OF', 'TEARS', 'AND', 'THE', 'GENTLEMAN', 'REMAINING', 'IN', 'UTTER', 'BEWILDERMENT', 'UNTIL', 'HIS', 'WIFE', 'HAD', 'REPEATED', 'TO', 'HIM', 'FROM', 'BEGINNING', 'TO', 'END', 'LUCADIUS', 'WHOLE', 'STORY', 'AND', 'HE', 'BELIEVED', 'IT', 'THROUGH', 'THE', 'BLESSED', 'DISPENSATION', 'OF', 'HEAVEN', 'WHICH', 'HAD', 'CONFIRMED', 'IT', 'BY', 'SO', 'MANY', 'CONVINCING', 'TESTIMONIES'] +5639-40744-0031-168: ref=['SO', 'PERSUASIVE', 'WERE', 'HER', 'ENTREATIES', 'AND', 'SO', 'STRONG', 'HER', 'ASSURANCES', 'THAT', 'NO', 'HARM', 'WHATEVER', 'COULD', 'RESULT', 'TO', 'THEM', 'FROM', 'THE', 'INFORMATION', 'SHE', 'SOUGHT', 'THEY', 'WERE', 'INDUCED', 'TO', 'CONFESS', 'THAT', 'ONE', "SUMMER'S", 'NIGHT', 'THE', 'SAME', 'SHE', 'HAD', 'MENTIONED', 'THEMSELVES', 'AND', 'ANOTHER', 'FRIEND', 'BEING', 'OUT', 'ON', 'A', 'STROLL', 'WITH', 'RODOLFO', 'THEY', 'HAD', 'BEEN', 'CONCERNED', 'IN', 'THE', 'ABDUCTION', 'OF', 'A', 'GIRL', 'WHOM', 'RODOLFO', 'CARRIED', 'OFF', 'WHILST', 'THE', 'REST', 'OF', 'THEM', 'DETAINED', 'HER', 'FAMILY', 'WHO', 'MADE', 'A', 'GREAT', 'OUTCRY', 'AND', 'WOULD', 'HAVE', 'DEFENDED', 'HER', 'IF', 'THEY', 'COULD'] +5639-40744-0031-168: hyp=['SO', 'PERSUASIVE', 'WERE', 'HER', 'ENTREATIES', 'AND', 'SO', 'STRONG', 'HER', 'ASSURANCES', 'THAT', 'NO', 'HARM', 'WHATEVER', 'COULD', 'RESULT', 'TO', 'THEM', 'FROM', 'THE', 'INFORMATION', 'SHE', 'SOUGHT', 'THEY', 'WERE', 'INDUCED', 'TO', 'CONFESS', 'THAT', 'ONE', "SUMMER'S", 'NIGHT', 'THE', 'SAME', 'SHE', 'HAD', 'MENTIONED', 'THEMSELVES', 'IN', 'ANOTHER', 'FRIEND', 'BEING', 'OUT', 'ON', 'A', 'STRAW', 'WITH', 'RADOLPHO', 'THEY', 'HAD', 'BEEN', 'CONCERNED', 'IN', 'THE', 'ADOCTION', 'OF', 'A', 'GIRL', 'WHOM', 'UDOLPH', 'CARRIED', 'OFF', 'WHILST', 'THE', 'REST', 'OF', 'THEM', 'DETAINED', 'HER', 'FAMILY', 'WHO', 'MADE', 'A', 'GREAT', 'OUTCRY', 'AND', 'WOULD', 'HAVE', 'DEFENDED', 'HER', 'IF', 'THEY', 'COULD'] +5639-40744-0032-169: ref=['FOR', "GOD'S", 'SAKE', 'MY', 'LADY', 'MOTHER', 'GIVE', 'ME', 'A', 'WIFE', 'WHO', 'WOULD', 'BE', 'AN', 'AGREEABLE', 'COMPANION', 'NOT', 'ONE', 'WHO', 'WILL', 'DISGUST', 'ME', 'SO', 'THAT', 'WE', 'MAY', 'BOTH', 'BEAR', 'EVENLY', 'AND', 'WITH', 'MUTUAL', 'GOOD', 'WILL', 'THE', 'YOKE', 'IMPOSED', 'ON', 'US', 'BY', 'HEAVEN', 'INSTEAD', 'OF', 'PULLING', 'THIS', 'WAY', 'AND', 'THAT', 'WAY', 'AND', 'FRETTING', 'EACH', 'OTHER', 'TO', 'DEATH'] +5639-40744-0032-169: hyp=['FOR', "GOD'S", 'SAKE', 'MY', 'LADY', 'MOTHER', 'GIVE', 'ME', 'A', 'WIFE', 'WHO', 'WOULD', 'BE', 'AN', 'AGREEABLE', 'COMPANION', 'NOT', 'ONE', 'WHO', 'WILL', 'DISGUST', 'ME', 'SO', 'THAT', 'WE', 'MAY', 'BOTH', 'BEAR', 'EVENLY', 'AND', 'WITH', 'MUTUAL', 'GOOD', 'WILL', 'THE', 'YOKE', 'IMPOSED', 'ON', 'US', 'BY', 'HEAVEN', 'INSTEAD', 'OF', 'PULLING', 'THIS', 'WAY', 'AND', 'THAT', 'WAY', 'AND', 'FRETTING', 'EACH', 'OTHER', 'TO', 'DEATH'] +5639-40744-0033-170: ref=['HER', 'BEARING', 'WAS', 'GRACEFUL', 'AND', 'ANIMATED', 'SHE', 'LED', 'HER', 'SON', 'BY', 'THE', 'HAND', 'AND', 'BEFORE', 'HER', 'WALKED', 'TWO', 'MAIDS', 'WITH', 'WAX', 'LIGHTS', 'AND', 'SILVER', 'CANDLESTICKS'] +5639-40744-0033-170: hyp=['HER', 'BEARING', 'WAS', 'GRACEFUL', 'ANIMATED', 'SHE', 'LED', 'HER', 'SON', 'BY', 'THE', 'HAND', 'AND', 'BEFORE', 'HER', 'WALKED', 'TWO', 'MAIDS', 'WITH', 'WAX', 'LIGHTS', 'AND', 'SILVER', 'CANDLESTICKS'] +5639-40744-0034-171: ref=['ALL', 'ROSE', 'TO', 'DO', 'HER', 'REVERENCE', 'AS', 'IF', 'SOMETHING', 'FROM', 'HEAVEN', 'HAD', 'MIRACULOUSLY', 'APPEARED', 'BEFORE', 'THEM', 'BUT', 'GAZING', 'ON', 'HER', 'ENTRANCED', 'WITH', 'ADMIRATION', 'NOT', 'ONE', 'OF', 'THEM', 'WAS', 'ABLE', 'TO', 'ADDRESS', 'A', 'SINGLE', 'WORD', 'TO', 'HER'] +5639-40744-0034-171: hyp=['ALL', 'ROSE', 'TO', 'DO', 'HER', 'REVERENCE', 'AS', 'IF', 'SOMETHING', 'FROM', 'HEAVEN', 'HAD', 'MIRACULOUSLY', 'APPEARED', 'BEFORE', 'THEM', 'BUT', 'GAZING', 'ON', 'HER', 'ENTRANCED', 'WITH', 'ADMIRATION', 'NOT', 'ONE', 'OF', 'THEM', 'WAS', 'ABLE', 'TO', 'ADDRESS', 'A', 'SINGLE', 'WORD', 'TO', 'HER'] +5639-40744-0035-172: ref=['SHE', 'REFLECTED', 'HOW', 'NEAR', 'SHE', 'STOOD', 'TO', 'THE', 'CRISIS', 'WHICH', 'WAS', 'TO', 'DETERMINE', 'WHETHER', 'SHE', 'WAS', 'TO', 'BE', 'BLESSED', 'OR', 'UNHAPPY', 'FOR', 'EVER', 'AND', 'RACKED', 'BY', 'THE', 'INTENSITY', 'OF', 'HER', 'EMOTIONS', 'SHE', 'SUDDENLY', 'CHANGED', 'COLOUR', 'HER', 'HEAD', 'DROPPED', 'AND', 'SHE', 'FELL', 'FORWARD', 'IN', 'A', 'SWOON', 'INTO', 'THE', 'ARMS', 'OF', 'THE', 'DISMAYED', 'ESTAFANIA'] +5639-40744-0035-172: hyp=['SHE', 'REFLECTED', 'HOW', 'NEAR', 'SHE', 'STOOD', 'TO', 'THE', 'CRISIS', 'WHICH', 'WAS', 'TO', 'DETERMINE', 'WHETHER', 'SHE', 'WAS', 'TO', 'BE', 'BLESSED', 'OR', 'UNHAPPY', 'FOR', 'EVER', 'AND', 'RACKED', 'BY', 'THE', 'INTENSITY', 'OF', 'HER', 'EMOTIONS', 'SHE', 'SUDDENLY', 'CHANGED', 'COLOR', 'HER', 'HEAD', 'DROPPED', 'AND', 'SHE', 'FELL', 'FORWARD', 'IN', 'A', 'SWOON', 'INTO', 'THE', 'ARMS', 'OF', 'THE', 'DISMAYEDESTAFHANIA'] +5639-40744-0036-173: ref=['HIS', 'MOTHER', 'HAD', 'LEFT', 'HER', 'TO', 'HIM', 'AS', 'BEING', 'HER', 'DESTINED', 'PROTECTOR', 'BUT', 'WHEN', 'SHE', 'SAW', 'THAT', 'HE', 'TOO', 'WAS', 'INSENSIBLE', 'SHE', 'WAS', 'NEAR', 'MAKING', 'A', 'THIRD', 'AND', 'WOULD', 'HAVE', 'DONE', 'SO', 'HAD', 'HE', 'NOT', 'COME', 'TO', 'HIMSELF'] +5639-40744-0036-173: hyp=['HIS', 'MOTHER', 'HAD', 'LEFT', 'HER', 'TO', 'HIM', 'AS', 'BEING', 'HER', 'DESTINED', 'PROTECTOR', 'BUT', 'WHEN', 'SHE', 'SAW', 'THAT', 'HE', 'TOO', 'WAS', 'INSENSIBLE', 'SHE', 'WAS', 'NEAR', 'MAKING', 'A', 'THIRD', 'AND', 'WOULD', 'HAVE', 'DONE', 'SO', 'HAD', 'HE', 'NOT', 'COME', 'TO', 'HIMSELF'] +5639-40744-0037-174: ref=['KNOW', 'THEN', 'SON', 'OF', 'MY', 'HEART', 'THAT', 'THIS', 'FAINTING', 'LADY', 'IS', 'YOUR', 'REAL', 'BRIDE', 'I', 'SAY', 'REAL', 'BECAUSE', 'SHE', 'IS', 'THE', 'ONE', 'WHOM', 'YOUR', 'FATHER', 'AND', 'I', 'HAVE', 'CHOSEN', 'FOR', 'YOU', 'AND', 'THE', 'PORTRAIT', 'WAS', 'A', 'PRETENCE'] +5639-40744-0037-174: hyp=['KNOW', 'THEN', 'SON', 'OF', 'MY', 'HEART', 'THAT', 'THIS', 'FAINTING', 'LADY', 'IS', 'YOUR', 'REAL', 'BRIDE', 'I', 'SAY', 'REAL', 'BECAUSE', 'SHE', 'IS', 'THE', 'ONE', 'WHOM', 'YOUR', 'FATHER', 'AND', 'I', 'HAVE', 'CHOSEN', 'FOR', 'YOU', 'AND', 'A', 'PORTRAIT', 'WAS', 'A', 'PRETENCE'] +5639-40744-0038-175: ref=['JUST', 'AT', 'THE', 'MOMENT', 'WHEN', 'THE', 'TEARS', 'OF', 'THE', 'PITYING', 'BEHOLDERS', 'FLOWED', 'FASTEST', 'AND', 'THEIR', 'EJACULATIONS', 'WERE', 'MOST', 'EXPRESSIVE', 'OF', 'DESPAIR', 'LEOCADIA', 'GAVE', 'SIGNS', 'OF', 'RECOVERY', 'AND', 'BROUGHT', 'BACK', 'GLADNESS', 'TO', 'THE', 'HEARTS', 'OF', 'ALL'] +5639-40744-0038-175: hyp=['JUST', 'AT', 'A', 'MOMENT', 'WHEN', 'THE', 'TEARS', 'OF', 'THE', 'PITYING', 'BEHOLDERS', 'FLOWED', 'FASTEST', 'AND', 'THERE', 'EJACULATIONS', 'WERE', 'MOST', 'EXPRESSIVE', 'OF', 'DESPAIR', 'THE', 'OCCAS', 'GAVE', 'SIGNS', 'OF', 'RECOVERY', 'AND', 'BROUGHT', 'BACK', 'GLADNESS', 'TO', 'THE', 'HEARTS', 'OF', 'ALL'] +5639-40744-0039-176: ref=['WHEN', 'SHE', 'CAME', 'TO', 'HER', 'SENSES', 'AND', 'BLUSHING', 'TO', 'FIND', 'HERSELF', 'IN', "RODOLFO'S", 'ARMS', 'WOULD', 'HAVE', 'DISENGAGED', 'HERSELF', 'NO', 'SENORA', 'HE', 'SAID', 'THAT', 'MUST', 'NOT', 'BE', 'STRIVE', 'NOT', 'TO', 'WITHDRAW', 'FROM', 'THE', 'ARMS', 'OF', 'HIM', 'WHO', 'HOLDS', 'YOU', 'IN', 'HIS', 'SOUL'] +5639-40744-0039-176: hyp=['WHEN', 'SHE', 'CAME', 'TO', 'HER', 'SENSES', 'AND', 'BLUSHING', 'TO', 'FIND', 'HERSELF', 'IN', "GODOLPH'S", 'ARMS', 'WOULD', 'HAVE', 'DISENGAGED', 'HERSELF', 'NO', 'SENORA', 'HE', 'SAID', 'THAT', 'MUST', 'NOT', 'BE', 'STRIVE', 'NOT', 'TO', 'WITHDRAW', 'FROM', 'THE', 'ARMS', 'OF', 'HIM', 'WHO', 'HOLDS', 'YOU', 'IN', 'HIS', 'SOUL'] +5639-40744-0040-177: ref=['THIS', 'WAS', 'DONE', 'FOR', 'THE', 'EVENT', 'TOOK', 'PLACE', 'AT', 'A', 'TIME', 'WHEN', 'THE', 'CONSENT', 'OF', 'THE', 'PARTIES', 'WAS', 'SUFFICIENT', 'FOR', 'THE', 'CELEBRATION', 'OF', 'A', 'MARRIAGE', 'WITHOUT', 'ANY', 'OF', 'THE', 'PRELIMINARY', 'FORMALITIES', 'WHICH', 'ARE', 'NOW', 'SO', 'PROPERLY', 'REQUIRED'] +5639-40744-0040-177: hyp=['THIS', 'WAS', 'DONE', 'FOR', 'THE', 'EVENT', 'TOOK', 'PLACE', 'AT', 'A', 'TIME', 'WITH', 'THE', 'CONSENT', 'OF', 'THE', 'PARTIES', 'WAS', 'SUFFICIENT', 'FOR', 'THE', 'CELEBRATION', 'OF', 'A', 'MARRIAGE', 'WITHOUT', 'ANY', 'OF', 'THE', 'PRELIMINARY', 'FORMALITIES', 'WHICH', 'ARE', 'NOW', 'SO', 'PROPERLY', 'REQUIRED'] +5639-40744-0041-178: ref=['NOR', 'WAS', 'RODOLFO', 'LESS', 'SURPRISED', 'THAN', 'THEY', 'AND', 'THE', 'BETTER', 'TO', 'ASSURE', 'HIMSELF', 'OF', 'SO', 'WONDERFUL', 'A', 'FACT', 'HE', 'BEGGED', 'LEOCADIA', 'TO', 'GIVE', 'HIM', 'SOME', 'TOKEN', 'WHICH', 'SHOULD', 'MAKE', 'PERFECTLY', 'CLEAR', 'TO', 'HIM', 'THAT', 'WHICH', 'INDEED', 'HE', 'DID', 'NOT', 'DOUBT', 'SINCE', 'IT', 'WAS', 'AUTHENTICATED', 'BY', 'HIS', 'PARENTS'] +5639-40744-0041-178: hyp=['NOR', 'WAS', 'RDOLPHAL', 'LESS', 'SURPRISED', 'THAN', 'THEY', 'AND', 'THE', 'BETTER', 'TO', 'ASSURE', 'HIMSELF', 'OF', 'SO', 'WONDERFUL', 'A', 'FACT', 'HE', 'BEGGED', 'LOU', 'KATYA', 'TO', 'GIVE', 'HIM', 'SOME', 'TOKEN', 'WHICH', 'SHOULD', 'MAKE', 'PERFECTLY', 'CLEAR', 'TO', 'HIM', 'THAT', 'WHICH', 'INDEED', 'HE', 'DID', 'NOT', 'DOUBT', 'SINCE', 'IT', 'WAS', 'AUTHENTICATED', 'BY', 'HIS', 'PARENTS'] +5683-32865-0000-2483: ref=['YOU', 'KNOW', 'CAPTAIN', 'LAKE'] +5683-32865-0000-2483: hyp=['YOU', 'KNOW', 'CAPTAIN', 'LAKE'] +5683-32865-0001-2484: ref=['SAID', 'LORD', 'CHELFORD', 'ADDRESSING', 'ME'] +5683-32865-0001-2484: hyp=['SAID', 'LORD', 'CHELFORD', 'ADDRESSING', 'ME'] +5683-32865-0002-2485: ref=['HE', 'HAD', 'HIS', 'HAND', 'UPON', "LAKE'S", 'SHOULDER'] +5683-32865-0002-2485: hyp=['HE', 'HAD', 'HIS', 'HAND', 'UPON', "LAKE'S", 'SHOULDER'] +5683-32865-0003-2486: ref=['THEY', 'ARE', 'COUSINS', 'YOU', 'KNOW', 'WE', 'ARE', 'ALL', 'COUSINS'] +5683-32865-0003-2486: hyp=['THEY', 'ARE', 'COUSINS', 'YOU', 'KNOW', 'WE', 'ARE', 'ALL', 'COUSINS'] +5683-32865-0004-2487: ref=['WHATEVER', 'LORD', 'CHELFORD', 'SAID', 'MISS', 'BRANDON', 'RECEIVED', 'IT', 'VERY', 'GRACIOUSLY', 'AND', 'EVEN', 'WITH', 'A', 'MOMENTARY', 'SMILE'] +5683-32865-0004-2487: hyp=['WHATEVER', 'LORD', 'CHELFORD', 'SAID', 'MISS', 'BRANDON', 'RECEIVED', 'IT', 'VERY', 'GRACIOUSLY', 'AND', 'EVEN', 'WITH', 'A', 'MOMENTARY', 'SMILE'] +5683-32865-0005-2488: ref=['BUT', 'HER', 'GREETING', 'TO', 'CAPTAIN', 'LAKE', 'WAS', 'MORE', 'THAN', 'USUALLY', 'HAUGHTY', 'AND', 'FROZEN', 'AND', 'HER', 'FEATURES', 'I', 'FANCIED', 'PARTICULARLY', 'PROUD', 'AND', 'PALE'] +5683-32865-0005-2488: hyp=['BUT', 'HER', 'GREETING', 'TO', 'CAPTAIN', 'LEEK', 'WAS', 'MORE', 'THAN', 'USUALLY', 'HAUGHTY', 'AND', 'FROZEN', 'AND', 'HER', 'FEATURES', 'I', 'FANCIED', 'PARTICULARLY', 'PROUD', 'AND', 'PALE'] +5683-32865-0006-2489: ref=['AT', 'DINNER', 'LAKE', 'WAS', 'EASY', 'AND', 'AMUSING'] +5683-32865-0006-2489: hyp=['AT', 'DINNER', 'LAKE', 'WAS', 'EASY', 'AND', 'AMUSING'] +5683-32865-0007-2490: ref=["I'M", 'GLAD', 'YOU', 'LIKE', 'IT', 'SAYS', 'WYLDER', 'CHUCKLING', 'BENIGNANTLY', 'ON', 'IT', 'OVER', 'HIS', 'SHOULDER'] +5683-32865-0007-2490: hyp=['I', 'AM', 'GLAD', 'YOU', 'LIKE', 'IT', 'SAYS', 'WILDER', 'CHUCKLING', 'BENIGNANTLY', 'ON', 'IT', 'OVER', 'HIS', 'SHOULDER'] +5683-32865-0008-2491: ref=['I', 'BELIEVE', 'I', 'HAVE', 'A', 'LITTLE', 'TASTE', 'THAT', 'WAY', 'THOSE', 'ARE', 'ALL', 'REAL', 'YOU', 'KNOW', 'THOSE', 'JEWELS'] +5683-32865-0008-2491: hyp=['I', 'BELIEVE', 'I', 'HAVE', 'A', 'LITTLE', 'TASTE', 'THAT', 'WAY', 'THOSE', 'ARE', 'ALL', 'REAL', 'YOU', 'KNOW', 'THOSE', 'JEWELS'] +5683-32865-0009-2492: ref=['AND', 'HE', 'PLACED', 'IT', 'IN', 'THAT', "GENTLEMAN'S", 'FINGERS', 'WHO', 'NOW', 'TOOK', 'HIS', 'TURN', 'AT', 'THE', 'LAMP', 'AND', 'CONTEMPLATED', 'THE', 'LITTLE', 'PARALLELOGRAM', 'WITH', 'A', 'GLEAM', 'OF', 'SLY', 'AMUSEMENT'] +5683-32865-0009-2492: hyp=['AND', 'HE', 'PLACED', 'IT', 'IN', 'THAT', "GENTLEMAN'S", 'FINGERS', 'WHO', 'NOW', 'TOOK', 'HIS', 'TURN', 'AT', 'THE', 'LAMP', 'AND', 'CONTEMPLATED', 'THE', 'LITTLE', 'PARALLELLOGRAM', 'WITH', 'A', 'GLEAM', 'OF', 'SLY', 'AMUSEMENT'] +5683-32865-0010-2493: ref=['I', 'WAS', 'THINKING', "IT'S", 'VERY', 'LIKE', 'THE', 'ACE', 'OF', 'HEARTS', 'ANSWERED', 'THE', 'CAPTAIN', 'SOFTLY', 'SMILING', 'ON'] +5683-32865-0010-2493: hyp=['I', 'WAS', 'THINKING', "IT'S", 'VERY', 'LIKE', 'THE', 'ACE', 'OF', 'HEARTS', 'ANSWERED', 'THE', 'CAPTAIN', 'SOFTLY', 'SMILING', 'ON'] +5683-32865-0011-2494: ref=['WHEREUPON', 'LAKE', 'LAUGHED', 'QUIETLY', 'STILL', 'LOOKING', 'ON', 'THE', 'ACE', 'OF', 'HEARTS', 'WITH', 'HIS', 'SLY', 'EYES'] +5683-32865-0011-2494: hyp=['WHEREUPON', 'LAKE', 'LAUGHED', 'QUIETLY', 'STILL', 'LOOKING', 'ON', 'THE', 'ACE', 'OF', 'HEARTS', 'WITH', 'HIS', 'SLY', 'EYES'] +5683-32865-0012-2495: ref=['AND', 'WYLDER', 'LAUGHED', 'TOO', 'MORE', 'SUDDENLY', 'AND', 'NOISILY', 'THAN', 'THE', 'HUMOUR', 'OF', 'THE', 'JOKE', 'SEEMED', 'QUITE', 'TO', 'CALL', 'FOR', 'AND', 'GLANCED', 'A', 'GRIM', 'LOOK', 'FROM', 'THE', 'CORNERS', 'OF', 'HIS', 'EYES', 'ON', 'LAKE', 'BUT', 'THE', 'GALLANT', 'CAPTAIN', 'DID', 'NOT', 'SEEM', 'TO', 'PERCEIVE', 'IT', 'AND', 'AFTER', 'A', 'FEW', 'SECONDS', 'MORE', 'HE', 'HANDED', 'IT', 'VERY', 'INNOCENTLY', 'BACK', 'TO', 'MISSUS', 'DOROTHY', 'ONLY', 'REMARKING'] +5683-32865-0012-2495: hyp=['AND', 'WYLDER', 'LAUGHED', 'TOO', 'MORE', 'SUDDENLY', 'AND', 'NOISILY', 'THAN', 'THE', 'HUMOUR', 'OF', 'THE', 'JOKE', 'SEEMED', 'QUITE', 'TO', 'CALL', 'FOR', 'AND', 'GLANCED', 'A', 'GRIM', 'LOOK', 'FROM', 'THE', 'CORNERS', 'OF', 'HIS', 'EYES', 'ON', 'LAKE', 'BUT', 'THE', 'GALLANT', 'CAPTAIN', 'DID', 'NOT', 'SEEM', 'TO', 'PERCEIVE', 'IT', 'AND', 'AFTER', 'A', 'FEW', 'SECONDS', 'MORE', 'HE', 'HANDED', 'IT', 'VERY', 'INNOCENTLY', 'BACK', 'TO', 'MISSUS', 'DOROTHY', 'ONLY', 'REMARKING'] +5683-32865-0013-2496: ref=['DO', 'YOU', 'KNOW', 'LAKE', 'OH', 'I', 'REALLY', "CAN'T", 'TELL', 'BUT', "HE'LL", 'SOON', 'TIRE', 'OF', 'COUNTRY', 'LIFE'] +5683-32865-0013-2496: hyp=['DO', 'YOU', 'KNOW', 'LAKE', 'OH', 'I', 'REALLY', "CAN'T", 'TELL', 'BUT', "HE'LL", 'SOON', 'TIRE', 'OF', 'COUNTRY', 'LIFE'] +5683-32865-0014-2497: ref=["HE'S", 'NOT', 'A', 'MAN', 'FOR', 'COUNTRY', 'QUARTERS'] +5683-32865-0014-2497: hyp=["HE'S", 'NOT', 'A', 'MAN', 'FOR', 'COUNTRY', 'QUARTERS'] +5683-32865-0015-2498: ref=['I', 'HAD', 'A', 'HORRID', 'DREAM', 'ABOUT', 'HIM', 'LAST', 'NIGHT', 'THAT'] +5683-32865-0015-2498: hyp=['I', 'HAD', 'A', 'HORRID', 'DREAM', 'ABOUT', 'HIM', 'LAST', 'NIGHT', 'THAT'] +5683-32865-0016-2499: ref=['OH', 'I', 'KNOW', "THAT'S", 'LORNE', 'BRANDON'] +5683-32865-0016-2499: hyp=['OH', 'I', 'KNOW', "THAT'S", 'LORN', 'BRANDON'] +5683-32865-0017-2500: ref=['ALL', 'THE', 'TIME', 'HE', 'WAS', 'TALKING', 'TO', 'ME', 'HIS', 'ANGRY', 'LITTLE', 'EYES', 'WERE', 'FOLLOWING', 'LAKE'] +5683-32865-0017-2500: hyp=['ALL', 'THE', 'TIME', 'HE', 'WAS', 'TALKING', 'TO', 'ME', 'HIS', 'ANGRY', 'LITTLE', 'EYES', 'WERE', 'FOLLOWING', 'LAKE'] +5683-32866-0000-2527: ref=['MISS', 'LAKE', 'DECLINED', 'THE', 'CARRIAGE', 'TO', 'NIGHT'] +5683-32866-0000-2527: hyp=['MISS', 'LAKE', 'DECLINED', 'THE', 'CARRIAGE', 'TO', 'NIGHT'] +5683-32866-0001-2528: ref=['AND', 'HE', 'ADDED', 'SOMETHING', 'STILL', 'LESS', 'COMPLIMENTARY'] +5683-32866-0001-2528: hyp=['AND', 'HE', 'ADDED', 'SOMETHING', 'STILL', 'LESS', 'COMPLIMENTARY'] +5683-32866-0002-2529: ref=['BUT', "DON'T", 'THESE', 'VERY', 'WISE', 'THINGS', 'SOMETIMES', 'TURN', 'OUT', 'VERY', 'FOOLISHLY'] +5683-32866-0002-2529: hyp=['BUT', "DON'T", 'THESE', 'VERY', 'WISE', 'THINGS', 'SOMETIMES', 'TURN', 'OUT', 'VERY', 'FOOLISHLY'] +5683-32866-0003-2530: ref=['IN', 'THE', 'MEANTIME', 'I', 'HAD', 'FORMED', 'A', 'NEW', 'IDEA', 'OF', 'HER'] +5683-32866-0003-2530: hyp=['IN', 'THE', 'MEANTIME', 'I', 'HAD', 'FORMED', 'A', 'NEW', 'IDEA', 'OF', 'HER'] +5683-32866-0004-2531: ref=['BY', 'THIS', 'TIME', 'LORD', 'CHELFORD', 'AND', 'WYLDER', 'RETURNED', 'AND', 'DISGUSTED', 'RATHER', 'WITH', 'MYSELF', 'I', 'RUMINATED', 'ON', 'MY', 'WANT', 'OF', 'GENERAL', 'SHIP'] +5683-32866-0004-2531: hyp=['BY', 'THIS', 'TIME', 'LORD', 'CHELFORD', 'AND', 'WYLDER', 'RETURNED', 'AND', 'DISGUSTED', 'RATHER', 'WITH', 'MYSELF', 'I', 'RUMINATED', 'ON', 'MY', 'WANT', 'OF', 'GENERALSHIP'] +5683-32866-0005-2532: ref=['AND', 'HE', 'MADE', 'A', 'LITTLE', 'DIP', 'OF', 'HIS', 'CANE', 'TOWARDS', 'BRANDON', 'HALL', 'OVER', 'HIS', 'SHOULDER'] +5683-32866-0005-2532: hyp=['AND', 'HE', 'MADE', 'A', 'LITTLE', 'DIP', 'OF', 'HIS', 'CANE', 'TOWARDS', 'BRANDON', 'HALL', 'OVER', 'HIS', 'SHOULDER'] +5683-32866-0006-2533: ref=['YES', 'SO', 'THEY', 'SAID', 'BUT', 'THAT', 'WOULD', 'I', 'THINK', 'HAVE', 'BEEN', 'WORSE'] +5683-32866-0006-2533: hyp=['YES', 'SO', 'THEY', 'SAID', 'BUT', 'THAT', 'WOULD', 'I', 'THINK', 'HAVE', 'BEEN', 'WORSE'] +5683-32866-0007-2534: ref=['IF', 'A', "FELLOW'S", 'BEEN', 'A', 'LITTLE', 'BIT', 'WILD', "HE'S", 'BEELZEBUB', 'AT', 'ONCE'] +5683-32866-0007-2534: hyp=['IF', 'A', "FELLOW'S", 'BEEN', 'A', 'LITTLE', 'BIT', 'WILD', 'HE', 'IS', 'BEELZEBUB', 'AT', 'ONCE'] +5683-32866-0008-2535: ref=["BRACTON'S", 'A', 'VERY', 'GOOD', 'FELLOW', 'I', 'CAN', 'ASSURE', 'YOU'] +5683-32866-0008-2535: hyp=["BROCKTON'S", 'A', 'VERY', 'GOOD', 'FELLOW', 'I', 'CAN', 'ASSURE', 'YOU'] +5683-32866-0009-2536: ref=['I', "DON'T", 'KNOW', 'AND', "CAN'T", 'SAY', 'HOW', 'YOU', 'FINE', 'GENTLEMEN', 'DEFINE', 'WICKEDNESS', 'ONLY', 'AS', 'AN', 'OBSCURE', 'FEMALE', 'I', 'SPEAK', 'ACCORDING', 'TO', 'MY', 'LIGHTS', 'AND', 'HE', 'IS', 'GENERALLY', 'THOUGHT', 'THE', 'WICKEDEST', 'MAN', 'IN', 'THIS', 'COUNTY'] +5683-32866-0009-2536: hyp=['I', "DON'T", 'KNOW', 'ONE', "CAN'T", 'SAY', 'HOW', 'YOU', 'FIND', 'GENTLEMEN', 'TO', 'FIND', 'WICKEDNESS', 'ONLY', 'AS', 'AN', 'OBSCURE', 'FEMALE', 'I', 'SPEAK', 'ACCORDING', 'TO', 'MY', 'LIGHTS', 'AND', 'HE', 'IS', 'GENERALLY', 'THOUGHT', 'THE', 'WICKEDEST', 'MAN', 'IN', 'THIS', 'COUNTY'] +5683-32866-0010-2537: ref=['WELL', 'YOU', 'KNOW', 'RADIE', 'WOMEN', 'LIKE', 'WICKED', 'FELLOWS', 'IT', 'IS', 'CONTRAST', 'I', 'SUPPOSE', 'BUT', 'THEY', 'DO', 'AND', "I'M", 'SURE', 'FROM', 'WHAT', 'BRACTON', 'HAS', 'SAID', 'TO', 'ME', 'I', 'KNOW', 'HIM', 'INTIMATELY', 'THAT', 'DORCAS', 'LIKES', 'HIM', 'AND', 'I', "CAN'T", 'CONCEIVE', 'WHY', 'THEY', 'ARE', 'NOT', 'MARRIED'] +5683-32866-0010-2537: hyp=['WELL', 'YOU', 'KNOW', 'RADIE', 'WOMEN', 'LIKE', 'WICKED', 'FELLOWS', 'IT', 'IS', 'CONTRAST', 'I', 'SUPPOSE', 'BUT', 'THEY', 'DO', 'AND', "I'M", 'SURE', 'FROM', 'WHAT', 'BRACTON', 'HAS', 'SAID', 'TO', 'ME', 'I', 'KNOW', 'HIM', 'INTIMATELY', 'THAT', 'DORCAS', 'LIKES', 'HIM', 'AND', 'I', "CAN'T", 'CONCEIVE', 'WHY', 'THEY', 'ARE', 'NOT', 'MARRIED'] +5683-32866-0011-2538: ref=['THEIR', 'WALK', 'CONTINUED', 'SILENT', 'FOR', 'THE', 'GREATER', 'PART', 'NEITHER', 'WAS', 'QUITE', 'SATISFIED', 'WITH', 'THE', 'OTHER', 'BUT', 'RACHEL', 'AT', 'LAST', 'SAID'] +5683-32866-0011-2538: hyp=['THEIR', 'WALK', 'CONTINUED', 'SILENT', 'FOR', 'THE', 'GREATER', 'PART', 'NEITHER', 'WAS', 'QUITE', 'SATISFIED', 'WITH', 'THE', 'OTHER', 'BUT', 'RACHEL', 'AT', 'LAST', 'SAID'] +5683-32866-0012-2539: ref=['NOW', "THAT'S", 'IMPOSSIBLE', 'RADIE', 'FOR', 'I', 'REALLY', "DON'T", 'THINK', 'I', 'ONCE', 'THOUGHT', 'OF', 'HIM', 'ALL', 'THIS', 'EVENING', 'EXCEPT', 'JUST', 'WHILE', 'WE', 'WERE', 'TALKING'] +5683-32866-0012-2539: hyp=['NOW', "THAT'S", 'IMPOSSIBLE', 'RADIE', 'FOR', 'I', 'REALLY', "DON'T", 'THINK', 'I', 'ONCE', 'THOUGHT', 'OF', 'HIM', 'ALL', 'THIS', 'EVENING', 'EXCEPT', 'JUST', 'WHILE', 'WE', 'WERE', 'TALKING'] +5683-32866-0013-2540: ref=['THERE', 'WAS', 'A', 'BRIGHT', 'MOONLIGHT', 'BROKEN', 'BY', 'THE', 'SHADOWS', 'OF', 'OVERHANGING', 'BOUGHS', 'AND', 'WITHERED', 'LEAVES', 'AND', 'THE', 'MOTTLED', 'LIGHTS', 'AND', 'SHADOWS', 'GLIDED', 'ODDLY', 'ACROSS', 'HIS', 'PALE', 'FEATURES'] +5683-32866-0013-2540: hyp=['THERE', 'WAS', 'A', 'BRIGHT', 'MOONLIGHT', 'BROKEN', 'BY', 'THE', 'SHADOWS', 'OF', 'OVERHANGING', 'BOUGHS', 'AND', 'WITHERED', 'LEAVES', 'AND', 'THE', 'MOTTLED', 'LIGHTS', 'AND', 'SHADOWS', 'GLIDED', 'ODDLY', 'ACROSS', 'HIS', 'PALE', 'FEATURES'] +5683-32866-0014-2541: ref=["DON'T", 'INSULT', 'ME', 'STANLEY', 'BY', 'TALKING', 'AGAIN', 'AS', 'YOU', 'DID', 'THIS', 'MORNING'] +5683-32866-0014-2541: hyp=["DON'T", 'INSULT', 'ME', 'STANLEY', 'BY', 'TALKING', 'AGAIN', 'AS', 'YOU', 'DID', 'THIS', 'MORNING'] +5683-32866-0015-2542: ref=['WHAT', 'I', 'SAY', 'IS', 'ALTOGETHER', 'ON', 'YOUR', 'OWN', 'ACCOUNT'] +5683-32866-0015-2542: hyp=['WHAT', 'I', 'SAY', 'IS', 'ALTOGETHER', 'ON', 'YOUR', 'OWN', 'ACCOUNT'] +5683-32866-0016-2543: ref=['MARK', 'MY', 'WORDS', "YOU'LL", 'FIND', 'HIM', 'TOO', 'STRONG', 'FOR', 'YOU', 'AYE', 'AND', 'TOO', 'DEEP'] +5683-32866-0016-2543: hyp=['MARK', 'MY', 'WORDS', "YOU'LL", 'FIND', 'HIM', 'TOO', 'STRONG', 'FOR', 'YOU', 'I', 'AND', 'TOO', 'DEEP'] +5683-32866-0017-2544: ref=['I', 'AM', 'VERY', 'UNEASY', 'ABOUT', 'IT', 'WHATEVER', 'IT', 'IS', 'I', "CAN'T", 'HELP', 'IT'] +5683-32866-0017-2544: hyp=['I', 'AM', 'VERY', 'UNEASY', 'ABOUT', 'IT', 'WHATEVER', 'IT', 'IS', 'I', "CAN'T", 'HELP', 'IT'] +5683-32866-0018-2545: ref=['TO', 'MY', 'MIND', 'THERE', 'HAS', 'ALWAYS', 'BEEN', 'SOMETHING', 'INEXPRESSIBLY', 'AWFUL', 'IN', 'FAMILY', 'FEUDS'] +5683-32866-0018-2545: hyp=['TO', 'MY', 'MIND', 'THERE', 'HAS', 'ALWAYS', 'BEEN', 'SOMETHING', 'INEXPRESSIBLY', 'AWFUL', 'IN', 'FAMILY', 'FEUDS'] +5683-32866-0019-2546: ref=['THE', 'MYSTERY', 'OF', 'THEIR', 'ORIGIN', 'THEIR', 'CAPACITY', 'FOR', 'EVOLVING', 'LATENT', 'FACULTIES', 'OF', 'CRIME', 'AND', 'THE', 'STEADY', 'VITALITY', 'WITH', 'WHICH', 'THEY', 'SURVIVE', 'THE', 'HEARSE', 'AND', 'SPEAK', 'THEIR', 'DEEP', 'MOUTHED', 'MALIGNITIES', 'IN', 'EVERY', 'NEW', 'BORN', 'GENERATION', 'HAVE', 'ASSOCIATED', 'THEM', 'SOMEHOW', 'IN', 'MY', 'MIND', 'WITH', 'A', 'SPELL', 'OF', 'LIFE', 'EXCEEDING', 'AND', 'DISTINCT', 'FROM', 'HUMAN', 'AND', 'A', 'SPECIAL', 'SATANIC', 'ACTION'] +5683-32866-0019-2546: hyp=['THE', 'MYSTERY', 'OF', 'THEIR', 'ORIGIN', 'THEIR', 'CAPACITY', 'FOR', 'EVOLVING', 'LATENT', 'FACULTIES', 'OF', 'CRIME', 'AND', 'THE', 'STUDY', 'VITALITY', 'WITH', 'WHICH', 'THEY', 'SURVIVED', 'THE', 'HEARSE', 'AND', 'SPEAK', 'THEIR', 'DEEP', 'MOUTHED', 'MALIGNITIES', 'IN', 'EVERY', 'NEW', 'BORN', 'GENERATION', 'HAVE', 'ASSOCIATED', 'THEM', 'SOMEHOW', 'IN', 'MY', 'MIND', 'WITH', 'THE', 'SPELL', 'OF', 'LIFE', 'EXCEEDING', 'AND', 'DISTINCT', 'FROM', 'HUMAN', 'AND', 'ESPECIAL', 'SATANIC', 'ACTION'] +5683-32866-0020-2547: ref=['THE', 'FLOOR', 'MORE', 'THAN', 'ANYTHING', 'ELSE', 'SHOWED', 'THE', 'GREAT', 'AGE', 'OF', 'THE', 'ROOM'] +5683-32866-0020-2547: hyp=['THE', 'FLOOR', 'MORE', 'THAN', 'ANYTHING', 'ELSE', 'SHOWED', 'THE', 'GREAT', 'AGE', 'OF', 'THE', 'ROOM'] +5683-32866-0021-2548: ref=['MY', 'BED', 'WAS', 'UNEXCEPTIONABLY', 'COMFORTABLE', 'BUT', 'IN', 'MY', 'THEN', 'MOOD', 'I', 'COULD', 'HAVE', 'WISHED', 'IT', 'A', 'GREAT', 'DEAL', 'MORE', 'MODERN'] +5683-32866-0021-2548: hyp=['MY', 'BED', 'WAS', 'UNEXCEPTIONABLY', 'COMFORTABLE', 'BUT', 'IN', 'MY', 'THEN', 'MOOD', 'I', 'COULD', 'HAVE', 'WISHED', 'IT', 'A', 'GREAT', 'DEAL', 'MORE', 'MODERN'] +5683-32866-0022-2549: ref=['ITS', 'CURTAINS', 'WERE', 'OF', 'THICK', 'AND', 'FADED', 'TAPESTRY'] +5683-32866-0022-2549: hyp=['ITS', 'CURTAINS', 'WERE', 'OF', 'THICK', 'AND', 'FADED', 'TAPESTRY'] +5683-32866-0023-2550: ref=['ALL', 'THE', 'FURNITURE', 'BELONGED', 'TO', 'OTHER', 'TIMES'] +5683-32866-0023-2550: hyp=['ALL', 'THE', 'FURNITURE', 'BELONGED', 'TO', 'OTHER', 'TIMES'] +5683-32866-0024-2551: ref=['I', "SHAN'T", 'TROUBLE', 'YOU', 'ABOUT', 'MY', 'TRAIN', 'OF', 'THOUGHTS', 'OR', 'FANCIES', 'BUT', 'I', 'BEGAN', 'TO', 'FEEL', 'VERY', 'LIKE', 'A', 'GENTLEMAN', 'IN', 'A', 'GHOST', 'STORY', 'WATCHING', 'EXPERIMENTALLY', 'IN', 'A', 'HAUNTED', 'CHAMBER'] +5683-32866-0024-2551: hyp=['I', "SHA'N'T", 'TROUBLE', 'YOU', 'ABOUT', 'MY', 'TRAIN', 'OF', 'THOUGHTS', 'OR', 'FANCIES', 'BUT', 'I', 'BEGAN', 'TO', 'FEEL', 'VERY', 'LIKE', 'A', 'GENTLEMAN', 'IN', 'A', 'GHOST', 'STORY', 'WATCHING', 'EXPERIMENTALLY', 'IN', 'A', 'HAUNTED', 'CHAMBER'] +5683-32866-0025-2552: ref=['I', 'DID', 'NOT', 'EVEN', 'TAKE', 'THE', 'PRECAUTION', 'OF', 'SMOKING', 'UP', 'THE', 'CHIMNEY'] +5683-32866-0025-2552: hyp=['I', 'DID', 'NOT', 'EVEN', 'TAKE', 'THE', 'PRECAUTION', 'OF', 'SMOKING', 'UP', 'THE', 'CHIMNEY'] +5683-32866-0026-2553: ref=['I', 'BOLDLY', 'LIGHTED', 'MY', 'CHEROOT'] +5683-32866-0026-2553: hyp=['I', 'BOLDLY', 'LIGHTED', 'MY', 'TRUTH'] +5683-32866-0027-2554: ref=['A', 'COLD', 'BRIGHT', 'MOON', 'WAS', 'SHINING', 'WITH', 'CLEAR', 'SHARP', 'LIGHTS', 'AND', 'SHADOWS'] +5683-32866-0027-2554: hyp=['A', 'COLD', 'BRIGHT', 'MOON', 'WAS', 'SHINING', 'WITH', 'CLEAR', 'SHARP', 'LIGHTS', 'AND', 'SHADOWS'] +5683-32866-0028-2555: ref=['THE', 'SOMBRE', 'OLD', 'TREES', 'LIKE', 'GIGANTIC', 'HEARSE', 'PLUMES', 'BLACK', 'AND', 'AWFUL'] +5683-32866-0028-2555: hyp=['THE', 'SOMBRE', 'OLD', 'TREES', 'LIKE', 'GIGANTIC', 'HEARSE', 'PLUMES', 'BLACK', 'AND', 'AWFUL'] +5683-32866-0029-2556: ref=['SOMEHOW', 'I', 'HAD', 'GROWN', 'NERVOUS'] +5683-32866-0029-2556: hyp=['SOMEHOW', 'I', 'HAD', 'GROWN', 'NERVOUS'] +5683-32866-0030-2557: ref=['A', 'LITTLE', 'BIT', 'OF', 'PLASTER', 'TUMBLED', 'DOWN', 'THE', 'CHIMNEY', 'AND', 'STARTLED', 'ME', 'CONFOUNDEDLY'] +5683-32866-0030-2557: hyp=['A', 'LITTLE', 'BIT', 'OF', 'PLASTER', 'TUMBLED', 'DOWN', 'THE', 'CHIMNEY', 'AND', 'STARTLED', 'ME', 'CONFOUNDEDLY'] +5683-32879-0000-2501: ref=['IT', 'WAS', 'NOT', 'VERY', 'MUCH', 'PAST', 'ELEVEN', 'THAT', 'MORNING', 'WHEN', 'THE', 'PONY', 'CARRIAGE', 'FROM', 'BRANDON', 'DREW', 'UP', 'BEFORE', 'THE', 'LITTLE', 'GARDEN', 'WICKET', 'OF', "REDMAN'S", 'FARM'] +5683-32879-0000-2501: hyp=['IT', 'WAS', 'NOT', 'VERY', 'MUCH', 'PAST', 'ELEVEN', 'THAT', 'MORNING', 'WHEN', 'THE', 'PONY', 'CARRIAGE', 'FROM', 'BRANDON', 'DREW', 'UP', 'BEFORE', 'THE', 'LITTLE', 'GARDEN', 'WICKET', 'OF', "REDMAN'S", 'FARM'] +5683-32879-0001-2502: ref=['WELL', 'SHE', 'WAS', 'BETTER', 'THOUGH', 'SHE', 'HAD', 'HAD', 'A', 'BAD', 'NIGHT'] +5683-32879-0001-2502: hyp=['WHILE', 'SHE', 'WAS', 'BETTER', 'THOUGH', 'SHE', 'HAD', 'HAD', 'A', 'BAD', 'NIGHT'] +5683-32879-0002-2503: ref=['SO', 'THERE', 'CAME', 'A', 'STEP', 'AND', 'A', 'LITTLE', 'RUSTLING', 'OF', 'FEMININE', 'DRAPERIES', 'THE', 'SMALL', 'DOOR', 'OPENED', 'AND', 'RACHEL', 'ENTERED', 'WITH', 'HER', 'HAND', 'EXTENDED', 'AND', 'A', 'PALE', 'SMILE', 'OF', 'WELCOME'] +5683-32879-0002-2503: hyp=['SO', 'THERE', 'CAME', 'A', 'STEP', 'AND', 'A', 'LITTLE', 'RUSTLING', 'OF', 'FEMININE', 'DRAPERIES', 'THE', 'SMALL', 'DOOR', 'OPENED', 'AND', 'RACHEL', 'ENTERED', 'WITH', 'HER', 'HAND', 'EXTENDED', 'AND', 'A', 'PALE', 'SMILE', 'OF', 'WELCOME'] +5683-32879-0003-2504: ref=['WOMEN', 'CAN', 'HIDE', 'THEIR', 'PAIN', 'BETTER', 'THAN', 'WE', 'MEN', 'AND', 'BEAR', 'IT', 'BETTER', 'TOO', 'EXCEPT', 'WHEN', 'SHAME', 'DROPS', 'FIRE', 'INTO', 'THE', 'DREADFUL', 'CHALICE'] +5683-32879-0003-2504: hyp=['WOMEN', 'CAN', 'HIDE', 'THEIR', 'PAIN', 'BETTER', 'THAN', 'WE', 'MEN', 'AND', 'BEAR', 'IT', 'BETTER', 'TOO', 'EXCEPT', 'WHEN', 'SHAME', 'DROPS', 'FIRE', 'INTO', 'THE', 'DREADFUL', 'CHALICE'] +5683-32879-0004-2505: ref=['BUT', 'POOR', 'RACHEL', 'LAKE', 'HAD', 'MORE', 'THAN', 'THAT', 'STOICAL', 'HYPOCRISY', 'WHICH', 'ENABLES', 'THE', 'TORTURED', 'SPIRITS', 'OF', 'HER', 'SEX', 'TO', 'LIFT', 'A', 'PALE', 'FACE', 'THROUGH', 'THE', 'FLAMES', 'AND', 'SMILE'] +5683-32879-0004-2505: hyp=['BUT', 'POOR', 'RACHEL', 'LAKE', 'HAD', 'MORE', 'THAN', 'THAT', 'STOICAL', 'HYPOCRISY', 'WHICH', 'ENABLES', 'THE', 'TORTURED', 'SPIRITS', 'OF', 'HER', 'SEX', 'TO', 'LIFT', 'A', 'PALE', 'FACE', 'THROUGH', 'THE', 'FLAMES', 'AND', 'SMILE'] +5683-32879-0005-2506: ref=['THIS', 'TRANSIENT', 'SPRING', 'AND', 'LIGHTING', 'UP', 'ARE', 'BEAUTIFUL', 'A', 'GLAMOUR', 'BEGUILING', 'OUR', 'SENSES'] +5683-32879-0005-2506: hyp=['THIS', 'TRANSIENT', 'SPRING', 'AND', 'LIGHTING', 'UP', 'ARE', 'BEAUTIFUL', 'A', 'GLAMOUR', 'BEGUILING', 'OUR', 'SENSES'] +5683-32879-0006-2507: ref=['THERE', 'WAS', 'SOMETHING', 'OF', 'SWEETNESS', 'AND', 'FONDNESS', 'IN', 'HER', 'TONES', 'AND', 'MANNER', 'WHICH', 'WAS', 'NEW', 'TO', 'RACHEL', 'AND', 'COMFORTING', 'AND', 'SHE', 'RETURNED', 'THE', 'GREETING', 'AS', 'KINDLY', 'AND', 'FELT', 'MORE', 'LIKE', 'HER', 'FORMER', 'SELF'] +5683-32879-0006-2507: hyp=['THERE', 'WAS', 'SOMETHING', 'OF', 'SWEETNESS', 'AND', 'FONDNESS', 'IN', 'HER', 'TONES', 'AND', 'MANNER', 'WHICH', 'WAS', 'NEW', 'TO', 'RACHEL', 'AND', 'COMFORTING', 'AND', 'SHE', 'RETURNED', 'THE', 'GREETING', 'AS', 'KINDLY', 'AND', 'FELT', 'MORE', 'LIKE', 'HER', 'FORMER', 'SELF'] +5683-32879-0007-2508: ref=["RACHEL'S", 'PALE', 'AND', 'SHARPENED', 'FEATURES', 'AND', 'DILATED', 'EYE', 'STRUCK', 'HER', 'WITH', 'A', 'PAINFUL', 'SURPRISE'] +5683-32879-0007-2508: hyp=["RACHEL'S", 'PALE', 'AND', 'SHARPENED', 'FEATURES', 'AND', 'DILATED', 'EYE', 'STRUCK', 'HER', 'WITH', 'A', 'PAINFUL', 'SURPRISE'] +5683-32879-0008-2509: ref=['YOU', 'HAVE', 'BEEN', 'SO', 'ILL', 'MY', 'POOR', 'RACHEL'] +5683-32879-0008-2509: hyp=['YOU', 'HAVE', 'BEEN', 'SO', 'ILL', 'MY', 'POOR', 'RACHEL'] +5683-32879-0009-2510: ref=['ILL', 'AND', 'TROUBLED', 'DEAR', 'TROUBLED', 'IN', 'MIND', 'AND', 'MISERABLY', 'NERVOUS'] +5683-32879-0009-2510: hyp=['ILL', 'AND', 'TROUBLED', 'DEAR', 'TROUBLED', 'IN', 'MIND', 'AND', 'MISERABLY', 'NERVOUS'] +5683-32879-0010-2511: ref=['POOR', 'RACHEL', 'HER', 'NATURE', 'RECOILED', 'FROM', 'DECEIT', 'AND', 'SHE', 'TOLD', 'AT', 'ALL', 'EVENTS', 'AS', 'MUCH', 'OF', 'THE', 'TRUTH', 'AS', 'SHE', 'DARED'] +5683-32879-0010-2511: hyp=['POOR', 'RACHEL', 'HER', 'NATURE', 'RECOILED', 'FROM', 'DECEIT', 'AND', 'SHE', 'TOLD', 'AT', 'ALL', 'EVENTS', 'AS', 'MUCH', 'OF', 'THE', 'TRUTH', 'AS', 'SHE', 'DARED'] +5683-32879-0011-2512: ref=['SHE', 'SPOKE', 'WITH', 'A', 'SUDDEN', 'ENERGY', 'WHICH', 'PARTOOK', 'OF', 'FEAR', 'AND', 'PASSION', 'AND', 'FLUSHED', 'HER', 'THIN', 'CHEEK', 'AND', 'MADE', 'HER', 'LANGUID', 'EYES', 'FLASH'] +5683-32879-0011-2512: hyp=['SHE', 'SPOKE', 'WITH', 'A', 'SUDDEN', 'ENERGY', 'WHICH', 'PARTOOK', 'A', 'FEAR', 'AND', 'PASSION', 'AND', 'FLUSHED', 'HER', 'THIN', 'CHEEK', 'AND', 'MADE', 'HER', 'LANGUID', 'EYES', 'FLASH'] +5683-32879-0012-2513: ref=['THANK', 'YOU', 'RACHEL', 'MY', 'COUSIN', 'RACHEL', 'MY', 'ONLY', 'FRIEND'] +5683-32879-0012-2513: hyp=['THANK', 'YOU', 'RACHAEL', 'MY', 'COUSIN', 'RACHEL', 'MY', 'ONLY', 'FRIEND'] +5683-32879-0013-2514: ref=['CHELFORD', 'HAD', 'A', 'NOTE', 'FROM', 'MISTER', 'WYLDER', 'THIS', 'MORNING', 'ANOTHER', 'NOTE', 'HIS', 'COMING', 'DELAYED', 'AND', 'SOMETHING', 'OF', 'HIS', 'HAVING', 'TO', 'SEE', 'SOME', 'PERSON', 'WHO', 'IS', 'ABROAD', 'CONTINUED', 'DORCAS', 'AFTER', 'A', 'LITTLE', 'PAUSE'] +5683-32879-0013-2514: hyp=['CHELFORD', 'HAD', 'A', 'NOTE', 'FROM', 'MISTER', 'WILDER', 'THIS', 'MORNING', 'ANOTHER', 'NOTE', 'HIS', 'COMING', 'DELAYED', 'AND', 'SOMETHING', 'OF', 'HIS', 'HAVING', 'TO', 'SEE', 'SOME', 'PERSON', 'WHO', 'WAS', 'ABROAD', 'CONTINUED', 'DORCAS', 'AFTER', 'A', 'LITTLE', 'PAUSE'] +5683-32879-0014-2515: ref=['YES', 'SOMETHING', 'EVERYTHING', 'SAID', 'RACHEL', 'HURRIEDLY', 'LOOKING', 'FROWNINGLY', 'AT', 'A', 'FLOWER', 'WHICH', 'SHE', 'WAS', 'TWIRLING', 'IN', 'HER', 'FINGERS'] +5683-32879-0014-2515: hyp=['YES', 'SOMETHING', 'EVERYTHING', 'SAID', 'RACHEL', 'HURRIEDLY', 'LOOKING', 'FROWNINGLY', 'AT', 'A', 'FLOWER', 'WHICH', 'SHE', 'WAS', 'TWIRLING', 'IN', 'HER', 'FINGERS'] +5683-32879-0015-2516: ref=['YES', 'SAID', 'RACHEL'] +5683-32879-0015-2516: hyp=['YES', 'SAID', 'RACHEL'] +5683-32879-0016-2517: ref=['AND', 'THE', 'WAN', 'ORACLE', 'HAVING', 'SPOKEN', 'SHE', 'SATE', 'DOWN', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'ABSTRACTION', 'AGAIN', 'BESIDE', 'DORCAS', 'AND', 'SHE', 'LOOKED', 'FULL', 'IN', 'HER', "COUSIN'S", 'EYES'] +5683-32879-0016-2517: hyp=['AND', 'THE', 'WAN', 'ORACLE', 'HAVING', 'SPOKEN', 'SHE', 'SAT', 'DOWN', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'ABSTRACTION', 'AGAIN', 'BESIDE', 'DORCAS', 'AND', 'SHE', 'LOOKED', 'FULL', 'IN', 'HER', "COUSIN'S", 'EYES'] +5683-32879-0017-2518: ref=['OF', 'MARK', 'WYLDER', 'I', 'SAY', 'THIS', 'HIS', 'NAME', 'HAS', 'BEEN', 'FOR', 'YEARS', 'HATEFUL', 'TO', 'ME', 'AND', 'RECENTLY', 'IT', 'HAS', 'BECOME', 'FRIGHTFUL', 'AND', 'YOU', 'WILL', 'PROMISE', 'ME', 'SIMPLY', 'THIS', 'THAT', 'YOU', 'WILL', 'NEVER', 'ASK', 'ME', 'TO', 'SPEAK', 'AGAIN', 'ABOUT', 'HIM'] +5683-32879-0017-2518: hyp=['OF', 'MARK', 'WYLDER', 'I', 'SAY', 'THIS', 'HIS', 'NAME', 'HAS', 'BEEN', 'FOR', 'YEARS', 'HATEFUL', 'TO', 'ME', 'AND', 'RECENTLY', 'IT', 'HAS', 'BECOME', 'FRIGHTFUL', 'AND', 'YOU', 'WILL', 'PROMISE', 'ME', 'SIMPLY', 'THIS', 'THAT', 'YOU', 'WILL', 'NEVER', 'ASK', 'ME', 'TO', 'SPEAK', 'AGAIN', 'ABOUT', 'HIM'] +5683-32879-0018-2519: ref=['IT', 'IS', 'AN', 'ANTIPATHY', 'AN', 'ANTIPATHY', 'I', 'CANNOT', 'GET', 'OVER', 'DEAR', 'DORCAS', 'YOU', 'MAY', 'THINK', 'IT', 'A', 'MADNESS', 'BUT', "DON'T", 'BLAME', 'ME'] +5683-32879-0018-2519: hyp=['IT', 'IS', 'AN', 'ANTIPATHY', 'AN', 'ANTIPATHY', 'I', 'CANNOT', 'GET', 'OVER', 'DEAR', 'DORCAS', 'YOU', 'MAY', 'THINK', 'IT', 'A', 'MADNESS', 'BUT', "DON'T", 'BLAME', 'ME'] +5683-32879-0019-2520: ref=['I', 'HAVE', 'VERY', 'FEW', 'TO', 'LOVE', 'ME', 'NOW', 'AND', 'I', 'THOUGHT', 'YOU', 'MIGHT', 'LOVE', 'ME', 'AS', 'I', 'HAVE', 'BEGUN', 'TO', 'LOVE', 'YOU'] +5683-32879-0019-2520: hyp=['I', 'HAVE', 'VERY', 'FEW', 'TO', 'LOVE', 'ME', 'NOW', 'AND', 'I', 'THOUGHT', 'YOU', 'MIGHT', 'LOVE', 'ME', 'AS', 'I', 'HAVE', 'BEGUN', 'TO', 'LOVE', 'YOU'] +5683-32879-0020-2521: ref=['AND', 'SHE', 'THREW', 'HER', 'ARMS', 'ROUND', 'HER', "COUSIN'S", 'NECK', 'AND', 'BRAVE', 'RACHEL', 'AT', 'LAST', 'BURST', 'INTO', 'TEARS'] +5683-32879-0020-2521: hyp=['AND', 'SHE', 'THREW', 'HER', 'ARMS', 'ROUND', 'HER', "COUSIN'S", 'NECK', 'AND', 'BRAVE', 'RACHEL', 'AT', 'LAST', 'BURST', 'INTO', 'TEARS'] +5683-32879-0021-2522: ref=['DORCAS', 'IN', 'HER', 'STRANGE', 'WAY', 'WAS', 'MOVED'] +5683-32879-0021-2522: hyp=['DORCAS', 'IN', 'HER', 'STRANGE', 'WAY', 'WAS', 'MOVED'] +5683-32879-0022-2523: ref=['I', 'LIKE', 'YOU', 'STILL', 'RACHEL', "I'M", 'SURE', "I'LL", 'ALWAYS', 'LIKE', 'YOU'] +5683-32879-0022-2523: hyp=['I', 'LIKE', 'YOU', 'STILL', 'RACHEL', "I'M", 'SURE', "I'LL", 'ALWAYS', 'LIKE', 'YOU'] +5683-32879-0023-2524: ref=['YOU', 'RESEMBLE', 'ME', 'RACHEL', 'YOU', 'ARE', 'FEARLESS', 'AND', 'INFLEXIBLE', 'AND', 'GENEROUS'] +5683-32879-0023-2524: hyp=['YOU', 'RESEMBLE', 'ME', 'RACHEL', 'YOU', 'ARE', 'FEARLESS', 'AND', 'INFLEXIBLE', 'AND', 'GENEROUS'] +5683-32879-0024-2525: ref=['YES', 'RACHEL', 'I', 'DO', 'LOVE', 'YOU'] +5683-32879-0024-2525: hyp=['YES', 'RACHEL', 'I', 'DO', 'LOVE', 'YOU'] +5683-32879-0025-2526: ref=['THANK', 'YOU', 'DORCAS', 'DEAR'] +5683-32879-0025-2526: hyp=['THANK', 'YOU', 'DORCAS', 'DEAR'] +61-70968-0000-2179: ref=['HE', 'BEGAN', 'A', 'CONFUSED', 'COMPLAINT', 'AGAINST', 'THE', 'WIZARD', 'WHO', 'HAD', 'VANISHED', 'BEHIND', 'THE', 'CURTAIN', 'ON', 'THE', 'LEFT'] +61-70968-0000-2179: hyp=['HE', 'BEGAN', 'A', 'CONFUSED', 'COMPLAINT', 'AGAINST', 'THE', 'WIZARD', 'WHO', 'HAD', 'VANISHED', 'BEHIND', 'THE', 'CURTAIN', 'ON', 'THE', 'LEFT'] +61-70968-0001-2180: ref=['GIVE', 'NOT', 'SO', 'EARNEST', 'A', 'MIND', 'TO', 'THESE', 'MUMMERIES', 'CHILD'] +61-70968-0001-2180: hyp=['KIVED', 'NOT', 'SO', 'EARNEST', 'A', 'MIND', 'TO', 'THESE', 'MEMORIES', 'CHILD'] +61-70968-0002-2181: ref=['A', 'GOLDEN', 'FORTUNE', 'AND', 'A', 'HAPPY', 'LIFE'] +61-70968-0002-2181: hyp=['A', 'GOLDEN', 'FORTUNE', 'AND', 'A', 'HAPPY', 'LIFE'] +61-70968-0003-2182: ref=['HE', 'WAS', 'LIKE', 'UNTO', 'MY', 'FATHER', 'IN', 'A', 'WAY', 'AND', 'YET', 'WAS', 'NOT', 'MY', 'FATHER'] +61-70968-0003-2182: hyp=['HE', 'WAS', 'LIKE', 'UNTO', 'MY', 'FATHER', 'IN', 'A', 'WAY', 'AND', 'YET', 'WAS', 'NOT', 'MY', 'FATHER'] +61-70968-0004-2183: ref=['ALSO', 'THERE', 'WAS', 'A', 'STRIPLING', 'PAGE', 'WHO', 'TURNED', 'INTO', 'A', 'MAID'] +61-70968-0004-2183: hyp=['ALSO', 'THERE', 'WAS', 'A', 'STRIPLING', 'PAGE', 'WHO', 'TURNED', 'INTO', 'A', 'MAID'] +61-70968-0005-2184: ref=['THIS', 'WAS', 'SO', 'SWEET', 'A', 'LADY', 'SIR', 'AND', 'IN', 'SOME', 'MANNER', 'I', 'DO', 'THINK', 'SHE', 'DIED'] +61-70968-0005-2184: hyp=['THIS', 'WAS', 'SO', 'SWEET', 'A', 'LADY', 'SIR', 'AND', 'IN', 'SOME', 'MANNER', 'I', 'DO', 'THINK', 'SHE', 'DIED'] +61-70968-0006-2185: ref=['BUT', 'THEN', 'THE', 'PICTURE', 'WAS', 'GONE', 'AS', 'QUICKLY', 'AS', 'IT', 'CAME'] +61-70968-0006-2185: hyp=['BUT', 'THEN', 'THE', 'PICTURE', 'WAS', 'GONE', 'AS', 'QUICKLY', 'AS', 'IT', 'CAME'] +61-70968-0007-2186: ref=['SISTER', 'NELL', 'DO', 'YOU', 'HEAR', 'THESE', 'MARVELS'] +61-70968-0007-2186: hyp=['SISTER', 'NELL', 'DO', 'YOU', 'HEAR', 'THESE', 'MARVELS'] +61-70968-0008-2187: ref=['TAKE', 'YOUR', 'PLACE', 'AND', 'LET', 'US', 'SEE', 'WHAT', 'THE', 'CRYSTAL', 'CAN', 'SHOW', 'TO', 'YOU'] +61-70968-0008-2187: hyp=['TAKE', 'YOUR', 'PLACE', 'AND', 'LET', 'US', 'SEE', 'WHAT', 'THE', 'CRYSTAL', 'CAN', 'SHOW', 'TO', 'YOU'] +61-70968-0009-2188: ref=['LIKE', 'AS', 'NOT', 'YOUNG', 'MASTER', 'THOUGH', 'I', 'AM', 'AN', 'OLD', 'MAN'] +61-70968-0009-2188: hyp=['LIKE', 'AS', 'NOT', 'YOUNG', 'MASTER', 'THOUGH', 'I', 'AM', 'AN', 'OLD', 'MAN'] +61-70968-0010-2189: ref=['FORTHWITH', 'ALL', 'RAN', 'TO', 'THE', 'OPENING', 'OF', 'THE', 'TENT', 'TO', 'SEE', 'WHAT', 'MIGHT', 'BE', 'AMISS', 'BUT', 'MASTER', 'WILL', 'WHO', 'PEEPED', 'OUT', 'FIRST', 'NEEDED', 'NO', 'MORE', 'THAN', 'ONE', 'GLANCE'] +61-70968-0010-2189: hyp=['FORTHWITH', 'ALL', 'RAN', 'TO', 'THE', 'OPENING', 'OF', 'THE', 'TENT', 'TO', 'SEE', 'WHAT', 'MIGHT', 'BE', 'AMISS', 'BUT', 'MASTER', 'WILL', 'WHO', 'PEEPED', 'OUT', 'FIRST', 'NEEDED', 'NO', 'MORE', 'THAN', 'ONE', 'GLANCE'] +61-70968-0011-2190: ref=['HE', 'GAVE', 'WAY', 'TO', 'THE', 'OTHERS', 'VERY', 'READILY', 'AND', 'RETREATED', 'UNPERCEIVED', 'BY', 'THE', 'SQUIRE', 'AND', 'MISTRESS', 'FITZOOTH', 'TO', 'THE', 'REAR', 'OF', 'THE', 'TENT'] +61-70968-0011-2190: hyp=['HE', 'GAVE', 'WAY', 'TO', 'THE', 'OTHERS', 'VERY', 'READILY', 'AND', 'RETREATED', 'UNPERCEIVED', 'BY', 'THE', 'SQUIRE', 'AND', 'MISTRESS', 'FITZOOTH', 'TO', 'THE', 'REAR', 'OF', 'THE', 'TENT'] +61-70968-0012-2191: ref=['CRIES', 'OF', 'A', 'NOTTINGHAM', 'A', 'NOTTINGHAM'] +61-70968-0012-2191: hyp=['CRIES', 'OF', 'UNNOTTINGHAM', 'ARE', 'NOTTINGHAM'] +61-70968-0013-2192: ref=['BEFORE', 'THEM', 'FLED', 'THE', 'STROLLER', 'AND', 'HIS', 'THREE', 'SONS', 'CAPLESS', 'AND', 'TERRIFIED'] +61-70968-0013-2192: hyp=['BEFORE', 'THEM', 'FLED', 'THE', 'STROLLER', 'AND', 'HIS', 'THREE', 'SONS', 'CAPLICE', 'AND', 'TERRIFIED'] +61-70968-0014-2193: ref=['WHAT', 'IS', 'THE', 'TUMULT', 'AND', 'RIOTING', 'CRIED', 'OUT', 'THE', 'SQUIRE', 'AUTHORITATIVELY', 'AND', 'HE', 'BLEW', 'TWICE', 'ON', 'A', 'SILVER', 'WHISTLE', 'WHICH', 'HUNG', 'AT', 'HIS', 'BELT'] +61-70968-0014-2193: hyp=['WHAT', 'IS', 'THE', 'TUMULT', 'AND', 'RIOTING', 'CRIED', 'OUT', 'THE', 'SQUIRE', 'AUTHORITATIVELY', 'AND', 'HE', 'BLEW', 'TWICE', 'ON', 'THE', 'SILVER', 'WHISTLE', 'WHICH', 'HUNG', 'AT', 'HIS', 'BELT'] +61-70968-0015-2194: ref=['NAY', 'WE', 'REFUSED', 'THEIR', 'REQUEST', 'MOST', 'POLITELY', 'MOST', 'NOBLE', 'SAID', 'THE', 'LITTLE', 'STROLLER'] +61-70968-0015-2194: hyp=['NAY', 'WE', 'WERE', 'FREEZED', 'THEIR', 'REQUEST', 'MOST', 'POLITELY', 'MOST', 'NOBLE', 'SAID', 'THE', 'LITTLE', 'STROLLER'] +61-70968-0016-2195: ref=['AND', 'THEN', 'THEY', 'BECAME', 'VEXED', 'AND', 'WOULD', 'HAVE', 'SNATCHED', 'YOUR', 'PURSE', 'FROM', 'US'] +61-70968-0016-2195: hyp=['AND', 'THEN', 'THEY', 'BECAME', 'VEXED', 'AND', 'WOULD', 'HAVE', 'SNATCHED', 'YOUR', 'PURSE', 'FROM', 'US'] +61-70968-0017-2196: ref=['I', 'COULD', 'NOT', 'SEE', 'MY', 'BOY', 'INJURED', 'EXCELLENCE', 'FOR', 'BUT', 'DOING', 'HIS', 'DUTY', 'AS', 'ONE', 'OF', "CUMBERLAND'S", 'SONS'] +61-70968-0017-2196: hyp=['I', 'COULD', 'NOT', 'SEE', 'MY', 'BOY', 'INJURED', 'EXCELLENCE', 'FOR', 'BUT', 'DOING', 'HIS', 'DUTY', 'AS', 'ONE', 'OF', "CUMBERLAND'S", 'SONS'] +61-70968-0018-2197: ref=['SO', 'I', 'DID', 'PUSH', 'THIS', 'FELLOW'] +61-70968-0018-2197: hyp=['SO', 'I', 'DID', 'PUSH', 'THIS', 'FELLOW'] +61-70968-0019-2198: ref=['IT', 'IS', 'ENOUGH', 'SAID', 'GEORGE', 'GAMEWELL', 'SHARPLY', 'AND', 'HE', 'TURNED', 'UPON', 'THE', 'CROWD'] +61-70968-0019-2198: hyp=['IT', 'IS', 'ENOUGH', 'SAID', 'GEORGE', 'GAMEWELL', 'SHARPLY', 'AS', 'HE', 'TURNED', 'UPON', 'THE', 'CROWD'] +61-70968-0020-2199: ref=['SHAME', 'ON', 'YOU', 'CITIZENS', 'CRIED', 'HE', 'I', 'BLUSH', 'FOR', 'MY', 'FELLOWS', 'OF', 'NOTTINGHAM'] +61-70968-0020-2199: hyp=['SHAME', 'ON', 'YOU', 'CITIZENS', 'CRIED', 'HE', 'I', 'BLUSH', 'FOR', 'MY', 'FELLOWS', 'OF', 'NOTTINGHAM'] +61-70968-0021-2200: ref=['SURELY', 'WE', 'CAN', 'SUBMIT', 'WITH', 'GOOD', 'GRACE'] +61-70968-0021-2200: hyp=['SURELY', 'WE', 'CAN', 'SUBMIT', 'WITH', 'GOOD', 'GRACE'] +61-70968-0022-2201: ref=['TIS', 'FINE', 'FOR', 'YOU', 'TO', 'TALK', 'OLD', 'MAN', 'ANSWERED', 'THE', 'LEAN', 'SULLEN', 'APPRENTICE'] +61-70968-0022-2201: hyp=['TIS', 'FINE', 'FOR', 'YOU', 'TO', 'TALK', 'OLD', 'MAN', 'ANSWERED', 'THE', 'LEAN', 'SULLEN', 'APPRENTICE'] +61-70968-0023-2202: ref=['BUT', 'I', 'WRESTLED', 'WITH', 'THIS', 'FELLOW', 'AND', 'DO', 'KNOW', 'THAT', 'HE', 'PLAYED', 'UNFAIRLY', 'IN', 'THE', 'SECOND', 'BOUT'] +61-70968-0023-2202: hyp=['BUT', 'I', 'WRESTLED', 'WITH', 'THIS', 'FELLOW', 'AND', 'DO', 'KNOW', 'THAT', 'HE', 'PLAYED', 'UNFAIRLY', 'IN', 'THE', 'SECOND', 'BOUT'] +61-70968-0024-2203: ref=['SPOKE', 'THE', 'SQUIRE', 'LOSING', 'ALL', 'PATIENCE', 'AND', 'IT', 'WAS', 'TO', 'YOU', 'THAT', 'I', 'GAVE', 'ANOTHER', 'PURSE', 'IN', 'CONSOLATION'] +61-70968-0024-2203: hyp=['SPOKE', 'THE', 'SQUIRE', 'LOSING', 'ALL', 'PATIENT', 'AND', 'IT', 'WAS', 'TO', 'YOU', 'THAT', 'I', 'GAVE', 'ANOTHER', 'PERSON', 'CONSOLATION'] +61-70968-0025-2204: ref=['COME', 'TO', 'ME', 'MEN', 'HERE', 'HERE', 'HE', 'RAISED', 'HIS', 'VOICE', 'STILL', 'LOUDER'] +61-70968-0025-2204: hyp=['COME', 'TO', 'ME', 'MEN', 'HERE', 'HERE', 'HE', 'RAISED', 'HIS', 'VOICE', 'STILL', 'LOUDER'] +61-70968-0026-2205: ref=['THE', 'STROLLERS', 'TOOK', 'THEIR', 'PART', 'IN', 'IT', 'WITH', 'HEARTY', 'ZEST', 'NOW', 'THAT', 'THEY', 'HAD', 'SOME', 'CHANCE', 'OF', 'BEATING', 'OFF', 'THEIR', 'FOES'] +61-70968-0026-2205: hyp=['THE', 'STROLLERS', 'TOOK', 'THEIR', 'PART', 'IN', 'IT', 'WITH', 'HEARTY', 'ZEST', 'NOW', 'THAT', 'THEY', 'HAD', 'SOME', 'CHANCE', 'OF', 'BEATING', 'OFF', 'THEIR', 'FOES'] +61-70968-0027-2206: ref=['ROBIN', 'AND', 'THE', 'LITTLE', 'TUMBLER', 'BETWEEN', 'THEM', 'TRIED', 'TO', 'FORCE', 'THE', 'SQUIRE', 'TO', 'STAND', 'BACK', 'AND', 'VERY', 'VALIANTLY', 'DID', 'THESE', 'TWO', 'COMPORT', 'THEMSELVES'] +61-70968-0027-2206: hyp=['ROBIN', 'AND', 'THE', 'LITTLE', 'TUMBLER', 'BETWEEN', 'THEM', 'TRIED', 'TO', 'FORCE', 'THE', 'SQUIRE', 'TO', 'STAND', 'BACK', 'AND', 'VERY', 'VALIANTLY', 'DID', 'THESE', 'TWO', 'COMPORT', 'THEMSELVES'] +61-70968-0028-2207: ref=['THE', 'HEAD', 'AND', 'CHIEF', 'OF', 'THE', 'RIOT', 'THE', 'NOTTINGHAM', 'APPRENTICE', 'WITH', 'CLENCHED', 'FISTS', 'THREATENED', 'MONTFICHET'] +61-70968-0028-2207: hyp=['THE', 'HEAD', 'AND', 'CHIEF', 'OF', 'THE', 'RIOT', 'IN', 'AUTTINGHAM', 'APPRENTICED', 'WITH', 'CLENCHED', 'FISTS', 'THREATENED', 'MONTFICHET'] +61-70968-0029-2208: ref=['THE', 'SQUIRE', 'HELPED', 'TO', 'THRUST', 'THEM', 'ALL', 'IN', 'AND', 'ENTERED', 'SWIFTLY', 'HIMSELF'] +61-70968-0029-2208: hyp=['THE', 'SQUIRE', 'HELPED', 'TO', 'THRUST', 'THEM', 'ALL', 'IN', 'AND', 'ENTERED', 'SWIFTLY', 'HIMSELF'] +61-70968-0030-2209: ref=['NOW', 'BE', 'SILENT', 'ON', 'YOUR', 'LIVES', 'HE', 'BEGAN', 'BUT', 'THE', 'CAPTURED', 'APPRENTICE', 'SET', 'UP', 'AN', 'INSTANT', 'SHOUT'] +61-70968-0030-2209: hyp=['NOW', 'BE', 'SILENT', 'ON', 'YOUR', 'LIVES', 'HE', 'BEGAN', 'BUT', 'THE', 'CAPTURED', 'APPRENTICE', 'SET', 'UP', 'AN', 'INSTANT', 'SHOUT'] +61-70968-0031-2210: ref=['SILENCE', 'YOU', 'KNAVE', 'CRIED', 'MONTFICHET'] +61-70968-0031-2210: hyp=['SILENCE', 'YOU', 'NAVE', 'CRIED', 'MONTFICHET'] +61-70968-0032-2211: ref=['HE', 'FELT', 'FOR', 'AND', 'FOUND', 'THE', "WIZARD'S", 'BLACK', 'CLOTH', 'THE', 'SQUIRE', 'WAS', 'QUITE', 'OUT', 'OF', 'BREATH'] +61-70968-0032-2211: hyp=['HE', 'FELT', 'FOR', 'AND', 'FOUND', 'THE', "WIZARD'S", 'BLACK', 'CLOTH', 'THE', 'SQUIRE', 'WAS', 'QUITE', 'OUT', 'OF', 'BREATH'] +61-70968-0033-2212: ref=['THRUSTING', 'OPEN', 'THE', 'PROPER', 'ENTRANCE', 'OF', 'THE', 'TENT', 'ROBIN', 'SUDDENLY', 'RUSHED', 'FORTH', 'WITH', 'HIS', 'BURDEN', 'WITH', 'A', 'GREAT', 'SHOUT'] +61-70968-0033-2212: hyp=['THRUSTING', 'OPEN', 'THE', 'PROPER', 'ENTRANCE', 'OF', 'THE', 'TENT', 'ROBIN', 'SUDDENLY', 'RUSHED', 'FORTH', 'WITH', 'HIS', 'BURDEN', 'WITH', 'A', 'GREAT', 'SHOUT'] +61-70968-0034-2213: ref=['A', 'MONTFICHET', 'A', 'MONTFICHET', 'GAMEWELL', 'TO', 'THE', 'RESCUE'] +61-70968-0034-2213: hyp=['A', 'MONTFICHET', 'A', 'MONTFICHET', 'GAMEWELL', 'TO', 'THE', 'RESCUE'] +61-70968-0035-2214: ref=['TAKING', 'ADVANTAGE', 'OF', 'THIS', 'THE', "SQUIRE'S", 'FEW', 'MEN', 'REDOUBLED', 'THEIR', 'EFFORTS', 'AND', 'ENCOURAGED', 'BY', "ROBIN'S", 'AND', 'THE', 'LITTLE', "STROLLER'S", 'CRIES', 'FOUGHT', 'THEIR', 'WAY', 'TO', 'HIM'] +61-70968-0035-2214: hyp=['TAKING', 'ADVANTAGE', 'OF', 'THIS', 'THE', "SQUIRE'S", 'FEW', 'MEN', 'REDOUBLED', 'THEIR', 'EFFORTS', 'AND', 'ENCOURAGED', 'BY', 'ROBINS', 'AND', 'THE', 'LITTLE', "STROLLER'S", 'CRIES', 'FOUGHT', 'THEIR', 'WAY', 'TO', 'HIM'] +61-70968-0036-2215: ref=['GEORGE', 'MONTFICHET', 'WILL', 'NEVER', 'FORGET', 'THIS', 'DAY'] +61-70968-0036-2215: hyp=['GEORGE', 'MONTFICHET', 'WILL', 'NEVER', 'FORGET', 'THIS', 'DAY'] +61-70968-0037-2216: ref=['WHAT', 'IS', 'YOUR', 'NAME', 'LORDING', 'ASKED', 'THE', 'LITTLE', 'STROLLER', 'PRESENTLY'] +61-70968-0037-2216: hyp=['WHAT', 'IS', 'YOUR', 'NAME', 'LORDING', 'ASKED', 'THE', 'LITTLE', 'STROLLER', 'PRESENTLY'] +61-70968-0038-2217: ref=['ROBIN', 'FITZOOTH'] +61-70968-0038-2217: hyp=['ROBIN', 'FITZOOTH'] +61-70968-0039-2218: ref=['AND', 'MINE', 'IS', 'WILL', 'STUTELEY', 'SHALL', 'WE', 'BE', 'COMRADES'] +61-70968-0039-2218: hyp=['AND', 'MINE', 'IS', 'WILL', 'STUTELEY', 'SHALL', 'WE', 'BE', 'COMRADES'] +61-70968-0040-2219: ref=['RIGHT', 'WILLINGLY', 'FOR', 'BETWEEN', 'US', 'WE', 'HAVE', 'WON', 'THE', 'BATTLE', 'ANSWERED', 'ROBIN'] +61-70968-0040-2219: hyp=['RIGHT', 'WILLINGLY', 'FOR', 'BETWEEN', 'US', 'WE', 'HAVE', 'WON', 'THE', 'BATTLE', 'ANSWERED', 'ROBIN'] +61-70968-0041-2220: ref=['I', 'LIKE', 'YOU', 'WILL', 'YOU', 'ARE', 'THE', 'SECOND', 'WILL', 'THAT', 'I', 'HAVE', 'MET', 'AND', 'LIKED', 'WITHIN', 'TWO', 'DAYS', 'IS', 'THERE', 'A', 'SIGN', 'IN', 'THAT'] +61-70968-0041-2220: hyp=['I', 'LIKE', 'YOU', 'WILL', 'YOU', 'ARE', 'THE', 'SECOND', 'WILL', 'THAT', 'I', 'HAVE', 'MET', 'AND', 'LIKED', 'WITHIN', 'TWO', 'DAYS', 'IS', 'THERE', 'A', 'SIGN', 'IN', 'THAT'] +61-70968-0042-2221: ref=['MONTFICHET', 'CALLED', 'OUT', 'FOR', 'ROBIN', 'TO', 'GIVE', 'HIM', 'AN', 'ARM'] +61-70968-0042-2221: hyp=['MARTFICHE', 'CALLED', 'OUT', 'FOR', 'ROBIN', 'TO', 'GIVE', 'HIM', 'AN', 'ARM'] +61-70968-0043-2222: ref=['FRIENDS', 'SAID', 'MONTFICHET', 'FAINTLY', 'TO', 'THE', 'WRESTLERS', 'BEAR', 'US', 'ESCORT', 'SO', 'FAR', 'AS', 'THE', "SHERIFF'S", 'HOUSE'] +61-70968-0043-2222: hyp=['FRIENDS', 'SAID', 'MONTFICHE', 'FAINTLY', 'TO', 'THE', 'WRESTLERS', 'BEAR', 'US', 'ESCORT', 'SO', 'FAR', 'AS', 'THE', "SHERIFF'S", 'HOUSE'] +61-70968-0044-2223: ref=['IT', 'WILL', 'NOT', 'BE', 'SAFE', 'FOR', 'YOU', 'TO', 'STAY', 'HERE', 'NOW'] +61-70968-0044-2223: hyp=['IT', 'WILL', 'NOT', 'BE', 'SAFE', 'FOR', 'YOU', 'TO', 'STAY', 'HERE', 'NOW'] +61-70968-0045-2224: ref=['PRAY', 'FOLLOW', 'US', 'WITH', 'MINE', 'AND', 'MY', 'LORD', "SHERIFF'S", 'MEN'] +61-70968-0045-2224: hyp=['PRAY', 'FOLLOW', 'US', 'WITH', 'MINE', 'IN', 'MY', 'LORD', "SHERIFF'S", 'MEN'] +61-70968-0046-2225: ref=['NOTTINGHAM', 'CASTLE', 'WAS', 'REACHED', 'AND', 'ADMITTANCE', 'WAS', 'DEMANDED'] +61-70968-0046-2225: hyp=['NOTTINGHAM', 'CASTLE', 'WAS', 'REACHED', 'AND', 'ADMITTANCE', 'WAS', 'DEMANDED'] +61-70968-0047-2226: ref=['MASTER', 'MONCEUX', 'THE', 'SHERIFF', 'OF', 'NOTTINGHAM', 'WAS', 'MIGHTILY', 'PUT', 'ABOUT', 'WHEN', 'TOLD', 'OF', 'THE', 'RIOTING'] +61-70968-0047-2226: hyp=['MASTER', 'MONCEUX', 'THE', 'SHERIFF', 'OF', 'NOTTINGHAM', 'WAS', 'MIGHTILY', 'PUT', 'ABOUT', 'WHEN', 'TOLD', 'OF', 'THE', 'RIOTING'] +61-70968-0048-2227: ref=['AND', 'HENRY', 'MIGHT', 'RETURN', 'TO', 'ENGLAND', 'AT', 'ANY', 'MOMENT'] +61-70968-0048-2227: hyp=['AND', 'HENRY', 'MIGHT', 'RETURN', 'TO', 'ENGLAND', 'AT', 'ANY', 'MOMENT'] +61-70968-0049-2228: ref=['HAVE', 'YOUR', 'WILL', 'CHILD', 'IF', 'THE', 'BOY', 'ALSO', 'WILLS', 'IT', 'MONTFICHET', 'ANSWERED', 'FEELING', 'TOO', 'ILL', 'TO', 'OPPOSE', 'ANYTHING', 'VERY', 'STRONGLY', 'JUST', 'THEN'] +61-70968-0049-2228: hyp=['HAVE', 'YOUR', 'WILL', 'CHILD', 'IF', 'THE', 'BOY', 'ALSO', 'WILLS', 'IT', 'MONTFICHET', 'ANSWERED', 'FEELING', 'TOO', 'ILL', 'TO', 'OPPOSE', 'ANYTHING', 'VERY', 'STRONGLY', 'JUST', 'THEN'] +61-70968-0050-2229: ref=['HE', 'MADE', 'AN', 'EFFORT', 'TO', 'HIDE', 'HIS', 'CONDITION', 'FROM', 'THEM', 'ALL', 'AND', 'ROBIN', 'FELT', 'HIS', 'FINGERS', 'TIGHTEN', 'UPON', 'HIS', 'ARM'] +61-70968-0050-2229: hyp=['HE', 'MADE', 'AN', 'EFFORT', 'TO', 'HIDE', 'HIS', 'CONDITION', 'FROM', 'THEM', 'ALL', 'AND', 'ROBIN', 'FELT', 'HIS', 'FINGERS', 'TIGHTEN', 'UPON', 'HIS', 'ARM'] +61-70968-0051-2230: ref=['BEG', 'ME', 'A', 'ROOM', 'OF', 'THE', 'SHERIFF', 'CHILD', 'QUICKLY'] +61-70968-0051-2230: hyp=['BEGGED', 'ME', 'A', 'ROOM', 'OF', 'THE', 'SHERIFF', 'CHILD', 'QUICKLY'] +61-70968-0052-2231: ref=['BUT', 'WHO', 'IS', 'THIS', 'FELLOW', 'PLUCKING', 'AT', 'YOUR', 'SLEEVE'] +61-70968-0052-2231: hyp=['BUT', 'WHO', 'IS', 'THIS', 'FELLOW', 'PLUCKING', 'AT', 'YOUR', 'STEVE'] +61-70968-0053-2232: ref=['HE', 'IS', 'MY', 'ESQUIRE', 'EXCELLENCY', 'RETURNED', 'ROBIN', 'WITH', 'DIGNITY'] +61-70968-0053-2232: hyp=['HE', 'IS', 'MY', 'ESQUIRE', 'EXCELLENCY', 'RETURNED', 'ROBIN', 'WITH', 'DIGNITY'] +61-70968-0054-2233: ref=['MISTRESS', 'FITZOOTH', 'HAD', 'BEEN', 'CARRIED', 'OFF', 'BY', 'THE', "SHERIFF'S", 'DAUGHTER', 'AND', 'HER', 'MAIDS', 'AS', 'SOON', 'AS', 'THEY', 'HAD', 'ENTERED', 'THE', 'HOUSE', 'SO', 'THAT', 'ROBIN', 'ALONE', 'HAD', 'THE', 'CARE', 'OF', 'MONTFICHET'] +61-70968-0054-2233: hyp=['MISTRESS', 'FITZOOTH', 'HAD', 'BEEN', 'CARRIED', 'OFF', 'BY', 'THE', "SHERIFF'S", 'DAUGHTER', 'AND', 'HER', 'MAIDS', 'AS', 'SOON', 'AS', 'THEY', 'HAD', 'ENTERED', 'THE', 'HOUSE', 'SO', 'THAT', 'ROBIN', 'ALONE', 'HAD', 'THE', 'CARE', 'OF', 'MONTFICHET'] +61-70968-0055-2234: ref=['ROBIN', 'WAS', 'GLAD', 'WHEN', 'AT', 'LENGTH', 'THEY', 'WERE', 'LEFT', 'TO', 'THEIR', 'OWN', 'DEVICES'] +61-70968-0055-2234: hyp=['ROBIN', 'WAS', 'GLAD', 'WHEN', 'AT', 'LENGTH', 'THEY', 'WERE', 'LEFT', 'TO', 'THEIR', 'OWN', 'DEVICES'] +61-70968-0056-2235: ref=['THE', 'WINE', 'DID', 'CERTAINLY', 'BRING', 'BACK', 'THE', 'COLOR', 'TO', 'THE', "SQUIRE'S", 'CHEEKS'] +61-70968-0056-2235: hyp=['THE', 'WINE', 'DID', 'CERTAINLY', 'BRING', 'BACK', 'THE', 'COLOR', 'TO', 'THE', "SQUIRE'S", 'CHEEKS'] +61-70968-0057-2236: ref=['THESE', 'ESCAPADES', 'ARE', 'NOT', 'FOR', 'OLD', 'GAMEWELL', 'LAD', 'HIS', 'DAY', 'HAS', 'COME', 'TO', 'TWILIGHT'] +61-70968-0057-2236: hyp=['THESE', 'ESCAPADES', 'ARE', 'NOT', 'FOR', 'OLD', 'GAME', 'WELL', 'LED', 'HIS', 'DAY', 'HAS', 'COME', 'TO', 'TWILIGHT'] +61-70968-0058-2237: ref=['WILL', 'YOU', 'FORGIVE', 'ME', 'NOW'] +61-70968-0058-2237: hyp=['WILL', 'YOU', 'FORGIVE', 'ME', 'NOW'] +61-70968-0059-2238: ref=['IT', 'WILL', 'BE', 'NO', 'DISAPPOINTMENT', 'TO', 'ME'] +61-70968-0059-2238: hyp=["IT'LL", 'BE', 'NO', 'DISAPPOINTMENT', 'TO', 'ME'] +61-70968-0060-2239: ref=['NO', 'THANKS', 'I', 'AM', 'GLAD', 'TO', 'GIVE', 'YOU', 'SUCH', 'EASY', 'HAPPINESS'] +61-70968-0060-2239: hyp=['NO', 'THANKS', 'I', 'AM', 'GLAD', 'TO', 'GIVE', 'YOU', 'SUCH', 'EASY', 'HAPPINESS'] +61-70968-0061-2240: ref=['YOU', 'ARE', 'A', 'WORTHY', 'LEECH', 'WILL', 'PRESENTLY', 'WHISPERED', 'ROBIN', 'THE', 'WINE', 'HAS', 'WORKED', 'A', 'MARVEL'] +61-70968-0061-2240: hyp=['YOU', 'ARE', 'A', 'WORTHY', 'LEECH', 'WILL', 'PRESENTLY', 'WHISPERED', 'ROBIN', 'THE', 'WINE', 'HAS', 'WORKED', 'A', 'MARVEL'] +61-70968-0062-2241: ref=['AY', 'AND', 'SHOW', 'YOU', 'SOME', 'PRETTY', 'TRICKS'] +61-70968-0062-2241: hyp=['I', 'AND', 'SHOW', 'YOU', 'SOME', 'PRETTY', 'TRICKS'] +61-70970-0000-2242: ref=['YOUNG', 'FITZOOTH', 'HAD', 'BEEN', 'COMMANDED', 'TO', 'HIS', "MOTHER'S", 'CHAMBER', 'SO', 'SOON', 'AS', 'HE', 'HAD', 'COME', 'OUT', 'FROM', 'HIS', 'CONVERSE', 'WITH', 'THE', 'SQUIRE'] +61-70970-0000-2242: hyp=['YOUNG', 'FITZOOTH', 'HAD', 'BEEN', 'COMMANDED', 'TO', 'HIS', "MOTHER'S", 'CHAMBER', 'SO', 'SOON', 'AS', 'HE', 'HAD', 'COME', 'OUT', 'FROM', 'HIS', 'CONVERSE', 'WITH', 'THE', 'SQUIRE'] +61-70970-0001-2243: ref=['THERE', 'BEFELL', 'AN', 'ANXIOUS', 'INTERVIEW', 'MISTRESS', 'FITZOOTH', 'ARGUING', 'FOR', 'AND', 'AGAINST', 'THE', "SQUIRE'S", 'PROJECT', 'IN', 'A', 'BREATH'] +61-70970-0001-2243: hyp=['THERE', 'BEFELL', 'AN', 'ANXIOUS', 'INTERVIEW', 'MISTRESS', 'FITZOOTH', 'ARGUING', 'FOR', 'AND', 'AGAINST', 'THE', "SQUIRE'S", 'PROJECT', 'IN', 'A', 'BREATH'] +61-70970-0002-2244: ref=['MOST', 'OF', 'ALL', 'ROBIN', 'THOUGHT', 'OF', 'HIS', 'FATHER', 'WHAT', 'WOULD', 'HE', 'COUNSEL'] +61-70970-0002-2244: hyp=['MOST', 'OF', 'ALL', 'ROBIN', 'THOUGHT', 'OF', 'HIS', 'FATHER', 'WHAT', 'WOULD', 'HE', 'COUNSEL'] +61-70970-0003-2245: ref=['IF', 'FOR', 'A', 'WHIM', 'YOU', 'BEGGAR', 'YOURSELF', 'I', 'CANNOT', 'STAY', 'YOU'] +61-70970-0003-2245: hyp=['IF', 'FOR', 'A', 'WHIM', 'YOU', 'BEGGAR', 'YOURSELF', 'I', 'CANNOT', 'STAY', 'YOU'] +61-70970-0004-2246: ref=['BUT', 'TAKE', 'IT', 'WHILST', 'I', 'LIVE', 'AND', 'WEAR', "MONTFICHET'S", 'SHIELD', 'IN', 'THE', 'DAYS', 'WHEN', 'MY', 'EYES', 'CAN', 'BE', 'REJOICED', 'BY', 'SO', 'BRAVE', 'A', 'SIGHT', 'FOR', 'YOU', 'WILL', "NE'ER", 'DISGRACE', 'OUR', 'SCUTCHEON', 'I', 'WARRANT', 'ME'] +61-70970-0004-2246: hyp=['BUT', 'TAKE', 'IT', 'WHILST', 'I', 'LIVE', 'AND', 'WHERE', 'MONTFICHE', 'SHIELD', 'IN', 'THE', 'DAYS', 'WHEN', 'MY', 'EYES', 'CAN', 'BE', 'REJOICED', 'BY', 'SO', 'BRAVE', 'A', 'SIGHT', 'FOR', 'YOU', 'WILL', 'NEVER', 'DISGRACE', 'OUR', 'STUTTON', 'I', 'WARRANT', 'ME'] +61-70970-0005-2247: ref=['THE', 'LAD', 'HAD', 'CHECKED', 'HIM', 'THEN'] +61-70970-0005-2247: hyp=['THE', 'LAD', 'HAD', 'CHECKED', 'HIM', 'THEN'] +61-70970-0006-2248: ref=['NEVER', 'THAT', 'SIR', 'HE', 'HAD', 'SAID'] +61-70970-0006-2248: hyp=['NEVER', 'THAT', 'SIR', 'HE', 'HAD', 'SAID'] +61-70970-0007-2249: ref=['HE', 'WAS', 'IN', 'DEEP', 'CONVERSE', 'WITH', 'THE', 'CLERK', 'AND', 'ENTERED', 'THE', 'HALL', 'HOLDING', 'HIM', 'BY', 'THE', 'ARM'] +61-70970-0007-2249: hyp=['HE', 'WAS', 'IN', 'DEEP', 'CONVERSE', 'WITH', 'THE', 'CLERK', 'AND', 'ENTERED', 'THE', 'HALL', 'HOLDING', 'HIM', 'BY', 'THE', 'ARM'] +61-70970-0008-2250: ref=['NOW', 'TO', 'BED', 'BOY'] +61-70970-0008-2250: hyp=['NOW', 'TO', 'BED', 'BOY'] +61-70970-0009-2251: ref=['TIS', 'LATE', 'AND', 'I', 'GO', 'MYSELF', 'WITHIN', 'A', 'SHORT', 'SPACE'] +61-70970-0009-2251: hyp=['TIS', 'LATE', 'AND', 'I', 'GO', 'MYSELF', 'WITHIN', 'A', 'SHORT', 'SPACE'] +61-70970-0010-2252: ref=['DISMISS', 'YOUR', 'SQUIRE', 'ROBIN', 'AND', 'BID', 'ME', 'GOOD', 'E', 'E', 'N'] +61-70970-0010-2252: hyp=['DISMISS', 'YOUR', 'SQUIRE', 'ROBIN', 'AND', 'BID', 'ME', 'GOOD', 'EEN'] +61-70970-0011-2253: ref=['AS', 'ANY', 'IN', 'ENGLAND', 'I', 'WOULD', 'SAY', 'SAID', 'GAMEWELL', 'PROUDLY', 'THAT', 'IS', 'IN', 'HIS', 'DAY'] +61-70970-0011-2253: hyp=['AS', 'ANY', 'IN', 'ENGLAND', 'I', 'WOULD', 'SAY', 'SAID', 'GAMEWELL', 'PROUDLY', 'THAT', 'IS', 'IN', 'HIS', 'DAY'] +61-70970-0012-2254: ref=['YET', 'HE', 'WILL', 'TEACH', 'YOU', 'A', 'FEW', 'TRICKS', 'WHEN', 'MORNING', 'IS', 'COME'] +61-70970-0012-2254: hyp=['YET', 'HE', 'WILL', 'TEACH', 'YOU', 'A', 'FEW', 'TRICKS', 'WHEN', 'MORNING', 'IS', 'COME'] +61-70970-0013-2255: ref=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'ALTER', 'HIS', 'SLEEPING', 'ROOM', 'TO', 'ONE', 'NEARER', 'TO', "GAMEWELL'S", 'CHAMBER'] +61-70970-0013-2255: hyp=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'ALTER', 'HIS', 'SLEEPING', 'ROOM', 'TO', 'ONE', 'NEARER', 'TO', "GAMEWELL'S", 'CHAMBER'] +61-70970-0014-2256: ref=['PRESENTLY', 'HE', 'CROSSED', 'THE', 'FLOOR', 'OF', 'HIS', 'ROOM', 'WITH', 'DECIDED', 'STEP'] +61-70970-0014-2256: hyp=['PRESENTLY', 'HE', 'CROSSED', 'THE', 'FLOOR', 'OF', 'HIS', 'ROOM', 'WITH', 'DECIDED', 'STEP'] +61-70970-0015-2257: ref=['WILL', 'CRIED', 'HE', 'SOFTLY', 'AND', 'STUTELEY', 'WHO', 'HAD', 'CHOSEN', 'HIS', 'COUCH', 'ACROSS', 'THE', 'DOOR', 'OF', 'HIS', 'YOUNG', "MASTER'S", 'CHAMBER', 'SPRANG', 'UP', 'AT', 'ONCE', 'IN', 'ANSWER'] +61-70970-0015-2257: hyp=['WILL', 'CRIED', 'HE', 'SOFTLY', 'AND', 'STUTELEY', 'WHO', 'HAD', 'CHOSEN', 'HIS', 'COUCH', 'ACROSS', 'THE', 'DOOR', 'OF', 'HIS', 'YOUNG', "MASTER'S", 'CHAMBER', 'SPRANG', 'UP', 'AT', 'ONCE', 'IN', 'ANSWER'] +61-70970-0016-2258: ref=['WE', 'WILL', 'GO', 'OUT', 'TOGETHER', 'TO', 'THE', 'BOWER', 'THERE', 'IS', 'A', 'WAY', 'DOWN', 'TO', 'THE', 'COURT', 'FROM', 'MY', 'WINDOW'] +61-70970-0016-2258: hyp=['WE', 'WILL', 'GO', 'OUT', 'TOGETHER', 'TO', 'THE', 'BOWER', 'THERE', 'IS', 'A', 'WAY', 'DOWN', 'TO', 'THE', 'COURT', 'FROM', 'MY', 'WINDOW'] +61-70970-0017-2259: ref=['REST', 'AND', 'BE', 'STILL', 'UNTIL', 'I', 'WARN', 'YOU'] +61-70970-0017-2259: hyp=['REST', 'AND', 'BE', 'STILL', 'UNTIL', 'I', 'WARN', 'YOU'] +61-70970-0018-2260: ref=['THE', 'HOURS', 'PASSED', 'WEARILY', 'BY', 'AND', 'MOVEMENT', 'COULD', 'YET', 'BE', 'HEARD', 'ABOUT', 'THE', 'HALL'] +61-70970-0018-2260: hyp=['THE', 'HOURS', 'PASSED', 'WEARILY', 'BY', 'AND', 'MOVEMENT', 'COULD', 'YET', 'BE', 'HEARD', 'ABOUT', 'THE', 'HALL'] +61-70970-0019-2261: ref=['AT', 'LAST', 'ALL', 'WAS', 'QUIET', 'AND', 'BLACK', 'IN', 'THE', 'COURTYARD', 'OF', 'GAMEWELL'] +61-70970-0019-2261: hyp=['AT', 'LAST', 'ALL', 'WAS', 'QUIET', 'AND', 'BLACK', 'IN', 'THE', 'COURTYARD', 'OF', 'GAMEWELL'] +61-70970-0020-2262: ref=['WILL', 'WHISPERED', 'ROBIN', 'OPENING', 'HIS', 'DOOR', 'AS', 'HE', 'SPOKE', 'ARE', 'YOU', 'READY'] +61-70970-0020-2262: hyp=['WILL', 'WHISPERED', 'ROBIN', 'OPENING', 'HIS', 'DOOR', 'AS', 'HE', 'SPOKE', 'ARE', 'YOU', 'READY'] +61-70970-0021-2263: ref=['THEY', 'THEN', 'RENEWED', 'THEIR', 'JOURNEY', 'AND', 'UNDER', 'THE', 'BETTER', 'LIGHT', 'MADE', 'A', 'SAFE', 'CROSSING', 'OF', 'THE', 'STABLE', 'ROOFS'] +61-70970-0021-2263: hyp=['THEY', 'THEN', 'RENEWED', 'THEIR', 'JOURNEY', 'AND', 'UNDER', 'THE', 'BETTER', 'LIGHT', 'MADE', 'A', 'SAFE', 'CROSSING', 'OF', 'THE', 'STABLE', 'ROOFS'] +61-70970-0022-2264: ref=['ROBIN', 'ENTERED', 'THE', 'HUT', 'DRAGGING', 'THE', 'UNWILLING', 'ESQUIRE', 'AFTER', 'HIM'] +61-70970-0022-2264: hyp=['ROBIN', 'ENTERED', 'THE', 'HUT', 'DRAGGING', 'THE', 'UNWILLING', 'ESQUIRE', 'AFTER', 'HIM'] +61-70970-0023-2265: ref=['BE', 'NOT', 'SO', 'FOOLISH', 'FRIEND', 'SAID', 'FITZOOTH', 'CROSSLY'] +61-70970-0023-2265: hyp=['BE', 'NOT', 'SO', 'FOOLISH', 'FRIEND', 'SAID', 'FITZOOTH', 'CROSSLY'] +61-70970-0024-2266: ref=['THEY', 'MOVED', 'THEREAFTER', 'CAUTIOUSLY', 'ABOUT', 'THE', 'HUT', 'GROPING', 'BEFORE', 'AND', 'ABOUT', 'THEM', 'TO', 'FIND', 'SOMETHING', 'TO', 'SHOW', 'THAT', 'WARRENTON', 'HAD', 'FULFILLED', 'HIS', 'MISSION'] +61-70970-0024-2266: hyp=['THEY', 'MOVED', 'THEREAFTER', 'CAUTIOUSLY', 'ABOUT', 'THE', 'HUT', 'GROPING', 'BEFORE', 'AND', 'ABOUT', 'THEM', 'TO', 'FIND', 'SOMETHING', 'TO', 'SHOW', 'THAT', 'WARRENTON', 'HAD', 'FULFILLED', 'HIS', 'MISSION'] +61-70970-0025-2267: ref=['THEY', 'WERE', 'UPON', 'THE', 'VERGE', 'OF', 'AN', 'OPEN', 'TRAP', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'HUT', 'AND', 'STUTELEY', 'HAD', 'TRIPPED', 'OVER', 'THE', 'EDGE', 'OF', 'THE', 'REVERSED', 'FLAP', 'MOUTH', 'OF', 'THIS', 'PIT'] +61-70970-0025-2267: hyp=['THEY', 'WERE', 'UPON', 'THE', 'VERGE', 'OF', 'AN', 'OPEN', 'TRAP', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'HUT', 'AND', 'STUTELEY', 'HAD', 'TRIPPED', 'OVER', 'THE', 'EDGE', 'OF', 'THE', 'REVERSED', 'FLAP', 'MOUTH', 'OF', 'THIS', 'PIT'] +61-70970-0026-2268: ref=["FITZOOTH'S", 'HAND', 'RESTED', 'AT', 'LAST', 'UPON', 'THE', 'TOP', 'RUNG', 'OF', 'A', 'LADDER', 'AND', 'SLOWLY', 'THE', 'TRUTH', 'CAME', 'TO', 'HIM'] +61-70970-0026-2268: hyp=["FITTOOTH'S", 'HAND', 'RESTED', 'AT', 'LAST', 'UPON', 'THE', 'TOP', 'RUNG', 'OF', 'A', 'LADDER', 'AND', 'SLOWLY', 'THE', 'TRUTH', 'CAME', 'TO', 'HIM'] +61-70970-0027-2269: ref=['ROBIN', 'CAREFULLY', 'DESCENDED', 'THE', 'LADDER', 'AND', 'FOUND', 'HIMSELF', 'SOON', 'UPON', 'FIRM', 'ROCKY', 'GROUND'] +61-70970-0027-2269: hyp=['ROBIN', 'CAREFULLY', 'DESCENDED', 'THE', 'LADDER', 'AND', 'FOUND', 'HIMSELF', 'SOON', 'UPON', 'FIRM', 'ROCKY', 'GROUND'] +61-70970-0028-2270: ref=['STUTELEY', 'WAS', 'BY', 'HIS', 'SIDE', 'IN', 'A', 'FLASH', 'AND', 'THEN', 'THEY', 'BOTH', 'BEGAN', 'FEELING', 'ABOUT', 'THEM', 'TO', 'ASCERTAIN', 'THE', 'SHAPE', 'AND', 'CHARACTER', 'OF', 'THIS', 'VAULT'] +61-70970-0028-2270: hyp=['STUTELEY', 'WAS', 'BY', 'HIS', 'SIDE', 'IN', 'A', 'FLASH', 'AND', 'THEN', 'THEY', 'BOTH', 'BEGAN', 'FEELING', 'ABOUT', 'THEM', 'TO', 'ASCERTAIN', 'THE', 'SHAPE', 'AND', 'CHARACTER', 'OF', 'THIS', 'VAULT'] +61-70970-0029-2271: ref=['FROM', 'THE', 'BLACKNESS', 'BEHIND', 'THE', 'LIGHT', 'THEY', 'HEARD', 'A', 'VOICE', "WARRENTON'S"] +61-70970-0029-2271: hyp=['FROM', 'THE', 'BLACKNESS', 'BEHIND', 'THE', 'LIGHT', 'THEY', 'HEARD', 'A', 'VOICE', "WARRENTON'S"] +61-70970-0030-2272: ref=['SAVE', 'ME', 'MASTERS', 'BUT', 'YOU', 'STARTLED', 'ME', 'RARELY'] +61-70970-0030-2272: hyp=['SAVE', 'ME', 'MASTERS', 'BUT', 'YOU', 'STARTLED', 'ME', 'RARELY'] +61-70970-0031-2273: ref=['CRIED', 'HE', 'WAVING', 'THE', 'LANTHORN', 'BEFORE', 'HIM', 'TO', 'MAKE', 'SURE', 'THAT', 'THESE', 'WERE', 'NO', 'GHOSTS', 'IN', 'FRONT', 'OF', 'HIM'] +61-70970-0031-2273: hyp=['CRIED', 'HE', 'WAVING', 'THE', 'LANTERN', 'BEFORE', 'HIM', 'TO', 'MAKE', 'SURE', 'THAT', 'THESE', 'WERE', 'NO', 'GHOSTS', 'IN', 'FRONT', 'OF', 'HIM'] +61-70970-0032-2274: ref=['ENQUIRED', 'ROBIN', 'WITH', 'HIS', 'SUSPICIONS', 'STILL', 'UPON', 'HIM'] +61-70970-0032-2274: hyp=['INQUIRED', 'ROBIN', 'WITH', 'HIS', 'SUSPICION', 'STILL', 'UPON', 'HIM'] +61-70970-0033-2275: ref=['TRULY', 'SUCH', 'A', 'HORSE', 'SHOULD', 'BE', 'WORTH', 'MUCH', 'IN', 'NOTTINGHAM', 'FAIR'] +61-70970-0033-2275: hyp=['TRULY', 'SUCH', 'A', 'HORSE', 'WOULD', 'BE', 'WORTH', 'MUCH', 'IN', 'NOTTINGHAM', 'FAIR'] +61-70970-0034-2276: ref=['NAY', 'NAY', 'LORDING', 'ANSWERED', 'WARRENTON', 'WITH', 'A', 'HALF', 'LAUGH'] +61-70970-0034-2276: hyp=['NAY', 'NAY', 'LORDING', 'ANSWERED', 'WARRENTON', 'WITH', 'A', 'HALF', 'LAUGH'] +61-70970-0035-2277: ref=['WARRENTON', 'SPOKE', 'THUS', 'WITH', 'SIGNIFICANCE', 'TO', 'SHOW', 'ROBIN', 'THAT', 'HE', 'WAS', 'NOT', 'TO', 'THINK', "GEOFFREY'S", 'CLAIMS', 'TO', 'THE', 'ESTATE', 'WOULD', 'BE', 'PASSED', 'BY'] +61-70970-0035-2277: hyp=['WARRENTON', 'SPOKE', 'THUS', 'WITH', 'SIGNIFICANCE', 'TO', 'SHOW', 'ROBIN', 'THAT', 'HE', 'WAS', 'NOT', 'TO', 'THINK', "JEFFREY'S", 'CLAIMS', 'TO', 'THE', 'ESTATE', 'WOULD', 'BE', 'PASSED', 'BY'] +61-70970-0036-2278: ref=['ROBIN', 'FITZOOTH', 'SAW', 'THAT', 'HIS', 'DOUBTS', 'OF', 'WARRENTON', 'HAD', 'BEEN', 'UNFAIR', 'AND', 'HE', 'BECAME', 'ASHAMED', 'OF', 'HIMSELF', 'FOR', 'HARBORING', 'THEM'] +61-70970-0036-2278: hyp=['ROBIN', 'FITZOOTH', 'SAW', 'THAT', 'HIS', 'DOUBTS', 'OF', 'WARRENTON', 'HAD', 'BEEN', 'UNFAIR', 'AND', 'HE', 'BECAME', 'ASHAMED', 'OF', 'HIMSELF', 'FOR', 'HARBOURING', 'THEM'] +61-70970-0037-2279: ref=['HIS', 'TONES', 'RANG', 'PLEASANTLY', 'ON', "WARRENTON'S", 'EARS', 'AND', 'FORTHWITH', 'A', 'GOOD', 'FELLOWSHIP', 'WAS', 'HERALDED', 'BETWEEN', 'THEM'] +61-70970-0037-2279: hyp=['HIS', 'TONES', 'RANG', 'PLEASANTLY', 'ON', "WARRENTON'S", 'EARS', 'AND', 'FORTHWITH', 'THE', 'GOOD', 'FELLOWSHIP', 'WAS', 'HERALDED', 'BETWEEN', 'THEM'] +61-70970-0038-2280: ref=['THE', 'OLD', 'SERVANT', 'TOLD', 'HIM', 'QUIETLY', 'AS', 'THEY', 'CREPT', 'BACK', 'TO', 'GAMEWELL', 'THAT', 'THIS', 'PASSAGE', 'WAY', 'LED', 'FROM', 'THE', 'HUT', 'IN', 'THE', 'PLEASANCE', 'TO', 'SHERWOOD', 'AND', 'THAT', 'GEOFFREY', 'FOR', 'THE', 'TIME', 'WAS', 'HIDING', 'WITH', 'THE', 'OUTLAWS', 'IN', 'THE', 'FOREST'] +61-70970-0038-2280: hyp=['THE', 'OLD', 'SERVANT', 'TOLD', 'HIM', 'QUIETLY', 'AS', 'THEY', 'CREPT', 'BACK', 'TO', 'GAMEWELL', 'THAT', 'THIS', 'PASSAGEWAY', 'LED', 'FROM', 'THE', 'HUT', 'IN', 'THE', 'PLEASANTS', 'TO', 'SHERWOOD', 'AND', 'THAT', 'JEFFREY', 'FOR', 'THE', 'TIME', 'WAS', 'HIDING', 'WITH', 'THE', 'OUTLAWS', 'IN', 'THE', 'FOREST'] +61-70970-0039-2281: ref=['HE', 'IMPLORES', 'US', 'TO', 'BE', 'DISCREET', 'AS', 'THE', 'GRAVE', 'IN', 'THIS', 'MATTER', 'FOR', 'IN', 'SOOTH', 'HIS', 'LIFE', 'IS', 'IN', 'THE', 'HOLLOW', 'OF', 'OUR', 'HANDS'] +61-70970-0039-2281: hyp=['HE', 'IMPLORES', 'US', 'TO', 'BE', 'DISCREET', 'AS', 'THE', 'GRAVE', 'IN', 'THIS', 'MATTER', 'FOR', 'IN', 'SOOTH', 'HIS', 'LIFE', 'IS', 'IN', 'THE', 'HOLLOW', 'OF', 'OUR', 'HANDS'] +61-70970-0040-2282: ref=['THEY', 'REGAINED', 'THEIR', 'APARTMENT', 'APPARENTLY', 'WITHOUT', 'DISTURBING', 'THE', 'HOUSEHOLD', 'OF', 'GAMEWELL'] +61-70970-0040-2282: hyp=['THEY', 'REGAIN', 'THEIR', 'APARTMENT', 'APPARENTLY', 'WITHOUT', 'DISTURBING', 'THE', 'HOUSEHOLD', 'OF', 'GAMEWELL'] +672-122797-0000-1529: ref=['OUT', 'IN', 'THE', 'WOODS', 'STOOD', 'A', 'NICE', 'LITTLE', 'FIR', 'TREE'] +672-122797-0000-1529: hyp=['OUT', 'IN', 'THE', 'WOOD', 'STOOD', 'A', 'NICE', 'LITTLE', 'FIR', 'TREE'] +672-122797-0001-1530: ref=['THE', 'PLACE', 'HE', 'HAD', 'WAS', 'A', 'VERY', 'GOOD', 'ONE', 'THE', 'SUN', 'SHONE', 'ON', 'HIM', 'AS', 'TO', 'FRESH', 'AIR', 'THERE', 'WAS', 'ENOUGH', 'OF', 'THAT', 'AND', 'ROUND', 'HIM', 'GREW', 'MANY', 'LARGE', 'SIZED', 'COMRADES', 'PINES', 'AS', 'WELL', 'AS', 'FIRS'] +672-122797-0001-1530: hyp=['THE', 'PLACE', 'HE', 'HAD', 'WAS', 'A', 'VERY', 'GOOD', 'ONE', 'THE', 'SUN', 'SHONE', 'ON', 'HIM', 'AS', 'TO', 'FRESH', 'AIR', 'THERE', 'WAS', 'ENOUGH', 'OF', 'THAT', 'AND', 'ROUND', 'HIM', 'GREW', 'MANY', 'LARGE', 'SIZED', 'COMRADES', 'PINES', 'AS', 'WELL', 'AS', 'FURS'] +672-122797-0002-1531: ref=['HE', 'DID', 'NOT', 'THINK', 'OF', 'THE', 'WARM', 'SUN', 'AND', 'OF', 'THE', 'FRESH', 'AIR', 'HE', 'DID', 'NOT', 'CARE', 'FOR', 'THE', 'LITTLE', 'COTTAGE', 'CHILDREN', 'THAT', 'RAN', 'ABOUT', 'AND', 'PRATTLED', 'WHEN', 'THEY', 'WERE', 'IN', 'THE', 'WOODS', 'LOOKING', 'FOR', 'WILD', 'STRAWBERRIES'] +672-122797-0002-1531: hyp=['HE', 'DID', 'NOT', 'THINK', 'OF', 'THE', 'WARM', 'SUN', 'AND', 'OF', 'THE', 'FRESH', 'AIR', 'HE', 'DID', 'NOT', 'CARE', 'FOR', 'THE', 'LITTLE', 'COTTAGE', 'CHILDREN', 'THAT', 'RAN', 'ABOUT', 'IN', 'PRATTLED', 'WHEN', 'THEY', 'WERE', 'IN', 'THE', 'WOODS', 'LOOKING', 'FOR', 'WILD', 'STRAWBERRIES'] +672-122797-0003-1532: ref=['BUT', 'THIS', 'WAS', 'WHAT', 'THE', 'TREE', 'COULD', 'NOT', 'BEAR', 'TO', 'HEAR'] +672-122797-0003-1532: hyp=['BUT', 'THIS', 'WAS', 'WHAT', 'THE', 'TREE', 'COULD', 'NOT', 'BEAR', 'TO', 'HEAR'] +672-122797-0004-1533: ref=['IN', 'WINTER', 'WHEN', 'THE', 'SNOW', 'LAY', 'GLITTERING', 'ON', 'THE', 'GROUND', 'A', 'HARE', 'WOULD', 'OFTEN', 'COME', 'LEAPING', 'ALONG', 'AND', 'JUMP', 'RIGHT', 'OVER', 'THE', 'LITTLE', 'TREE'] +672-122797-0004-1533: hyp=['IN', 'WINTER', 'WHEN', 'THE', 'SNOW', 'LAY', 'GLITTERING', 'ON', 'THE', 'GROUND', 'A', 'HARE', 'WOULD', 'OFTEN', 'COME', 'LEAPING', 'ALONG', 'AND', 'JUMP', 'RIGHT', 'OVER', 'THE', 'LITTLE', 'TREE'] +672-122797-0005-1534: ref=['OH', 'THAT', 'MADE', 'HIM', 'SO', 'ANGRY'] +672-122797-0005-1534: hyp=['OH', 'THAT', 'MADE', 'HIM', 'SO', 'ANGRY'] +672-122797-0006-1535: ref=['TO', 'GROW', 'AND', 'GROW', 'TO', 'GET', 'OLDER', 'AND', 'BE', 'TALL', 'THOUGHT', 'THE', 'TREE', 'THAT', 'AFTER', 'ALL', 'IS', 'THE', 'MOST', 'DELIGHTFUL', 'THING', 'IN', 'THE', 'WORLD'] +672-122797-0006-1535: hyp=['TO', 'GROW', 'AND', 'GROW', 'TO', 'GET', 'OLDER', 'AND', 'BE', 'TALL', 'THOUGHT', 'THE', 'TREE', 'THAT', 'AFTER', 'ALL', 'IS', 'THE', 'MOST', 'DELIGHTFUL', 'THING', 'IN', 'THE', 'WORLD'] +672-122797-0007-1536: ref=['IN', 'AUTUMN', 'THE', 'WOOD', 'CUTTERS', 'ALWAYS', 'CAME', 'AND', 'FELLED', 'SOME', 'OF', 'THE', 'LARGEST', 'TREES'] +672-122797-0007-1536: hyp=['IN', 'AUTUMN', 'THE', 'WOODCUTTERS', 'ALWAYS', 'CAME', 'AND', 'FELLED', 'SOME', 'OF', 'THE', 'LARGEST', 'TREES'] +672-122797-0008-1537: ref=['THIS', 'HAPPENED', 'EVERY', 'YEAR', 'AND', 'THE', 'YOUNG', 'FIR', 'TREE', 'THAT', 'HAD', 'NOW', 'GROWN', 'TO', 'A', 'VERY', 'COMELY', 'SIZE', 'TREMBLED', 'AT', 'THE', 'SIGHT', 'FOR', 'THE', 'MAGNIFICENT', 'GREAT', 'TREES', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'NOISE', 'AND', 'CRACKING', 'THE', 'BRANCHES', 'WERE', 'LOPPED', 'OFF', 'AND', 'THE', 'TREES', 'LOOKED', 'LONG', 'AND', 'BARE', 'THEY', 'WERE', 'HARDLY', 'TO', 'BE', 'RECOGNISED', 'AND', 'THEN', 'THEY', 'WERE', 'LAID', 'IN', 'CARTS', 'AND', 'THE', 'HORSES', 'DRAGGED', 'THEM', 'OUT', 'OF', 'THE', 'WOOD'] +672-122797-0008-1537: hyp=['THIS', 'HAPPENED', 'EVERY', 'YEAR', 'AND', 'THE', 'YOUNG', 'FIR', 'TREE', 'THAT', 'HAD', 'NOW', 'GROWN', 'TO', 'A', 'VERY', 'COMELY', 'SIZED', 'TREMBLED', 'AT', 'THE', 'SIGHT', 'FOR', 'THE', 'MAGNIFICENT', 'GREAT', 'TREES', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'NOISE', 'AND', 'CRACKING', 'THE', 'BRANCHES', 'WERE', 'LOPPED', 'OFF', 'AND', 'THE', 'TREES', 'LOOKED', 'LONG', 'AND', 'BARE', 'THEY', 'WERE', 'HARDLY', 'TO', 'BE', 'RECOGNIZED', 'AND', 'THEN', 'THEY', 'WERE', 'LADEN', 'CARTS', 'AND', 'THE', 'HORSES', 'DRAGGED', 'THEM', 'OUT', 'OF', 'THE', 'WOOD'] +672-122797-0009-1538: ref=['HAVE', 'YOU', 'NOT', 'MET', 'THEM', 'ANYWHERE'] +672-122797-0009-1538: hyp=['HAVE', 'YOU', 'NOT', 'MET', 'THE', 'MANY', 'WHERE'] +672-122797-0010-1539: ref=['REJOICE', 'IN', 'THY', 'GROWTH', 'SAID', 'THE', 'SUNBEAMS'] +672-122797-0010-1539: hyp=['REJOICE', 'IN', 'THY', 'GROWTH', 'SAID', 'THE', 'SUNBEAMS'] +672-122797-0011-1540: ref=['AND', 'THEN', 'WHAT', 'HAPPENS', 'THEN'] +672-122797-0011-1540: hyp=['AND', 'THEN', 'WHAT', 'HAPPENS', 'THEN'] +672-122797-0012-1541: ref=['I', 'WOULD', 'FAIN', 'KNOW', 'IF', 'I', 'AM', 'DESTINED', 'FOR', 'SO', 'GLORIOUS', 'A', 'CAREER', 'CRIED', 'THE', 'TREE', 'REJOICING'] +672-122797-0012-1541: hyp=['I', 'WOULD', 'FAIN', 'KNOW', 'IF', 'I', 'AM', 'DESTINED', 'FOR', 'SO', 'GLORIOUS', 'A', 'CAREER', 'CRIED', 'THE', 'TREE', 'REJOICING'] +672-122797-0013-1542: ref=['I', 'AM', 'NOW', 'TALL', 'AND', 'MY', 'BRANCHES', 'SPREAD', 'LIKE', 'THE', 'OTHERS', 'THAT', 'WERE', 'CARRIED', 'OFF', 'LAST', 'YEAR', 'OH'] +672-122797-0013-1542: hyp=['I', 'AM', 'NOW', 'TALL', 'AND', 'MY', 'BRANCHES', 'SPREAD', 'LIKE', 'THE', 'OTHERS', 'THAT', 'WERE', 'CARRIED', 'OFF', 'LAST', 'YEAR', 'OH'] +672-122797-0014-1543: ref=['WERE', 'I', 'BUT', 'ALREADY', 'ON', 'THE', 'CART'] +672-122797-0014-1543: hyp=['WERE', 'I', 'BUT', 'ALREADY', 'ON', 'THE', 'CART'] +672-122797-0015-1544: ref=['WERE', 'I', 'IN', 'THE', 'WARM', 'ROOM', 'WITH', 'ALL', 'THE', 'SPLENDOR', 'AND', 'MAGNIFICENCE'] +672-122797-0015-1544: hyp=['WHERE', 'I', 'IN', 'THE', 'WARM', 'ROOM', 'WITH', 'ALL', 'THE', 'SPLENDOUR', 'AND', 'MAGNIFICENCE'] +672-122797-0016-1545: ref=['YES', 'THEN', 'SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'WILL', 'SURELY', 'FOLLOW', 'OR', 'WHEREFORE', 'SHOULD', 'THEY', 'THUS', 'ORNAMENT', 'ME'] +672-122797-0016-1545: hyp=['YES', 'THEN', 'SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'WILL', 'SURELY', 'FOLLOW', 'OR', 'WHEREFORE', 'SHOULD', 'THEY', 'THUS', 'ORNAMENT', 'ME'] +672-122797-0017-1546: ref=['SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'MUST', 'FOLLOW', 'BUT', 'WHAT'] +672-122797-0017-1546: hyp=['SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'MUST', 'FOLLOW', 'BUT', 'WHAT'] +672-122797-0018-1547: ref=['REJOICE', 'IN', 'OUR', 'PRESENCE', 'SAID', 'THE', 'AIR', 'AND', 'THE', 'SUNLIGHT'] +672-122797-0018-1547: hyp=['REJOICE', 'IN', 'OUR', 'PRESENCE', 'SAID', 'THE', 'HEIR', 'AND', 'THE', 'SUNLIGHT'] +672-122797-0019-1548: ref=['REJOICE', 'IN', 'THY', 'OWN', 'FRESH', 'YOUTH'] +672-122797-0019-1548: hyp=['REJOICE', 'IN', 'THY', 'OWN', 'FRESH', 'YOUTH'] +672-122797-0020-1549: ref=['BUT', 'THE', 'TREE', 'DID', 'NOT', 'REJOICE', 'AT', 'ALL', 'HE', 'GREW', 'AND', 'GREW', 'AND', 'WAS', 'GREEN', 'BOTH', 'WINTER', 'AND', 'SUMMER'] +672-122797-0020-1549: hyp=['BUT', 'THE', 'TREE', 'DID', 'NOT', 'REJOICE', 'AT', 'ALL', 'HE', 'GREW', 'AND', 'GREW', 'AND', 'WAS', 'GREEN', 'BOTH', 'WINTER', 'AND', 'SUMMER'] +672-122797-0021-1550: ref=['AND', 'TOWARDS', 'CHRISTMAS', 'HE', 'WAS', 'ONE', 'OF', 'THE', 'FIRST', 'THAT', 'WAS', 'CUT', 'DOWN'] +672-122797-0021-1550: hyp=['AND', 'TOWARDS', 'CHRISTMAS', 'HE', 'WAS', 'ONE', 'OF', 'THE', 'FIRST', 'THAT', 'WAS', 'CUT', 'DOWN'] +672-122797-0022-1551: ref=['THE', 'AXE', 'STRUCK', 'DEEP', 'INTO', 'THE', 'VERY', 'PITH', 'THE', 'TREE', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'A', 'SIGH', 'HE', 'FELT', 'A', 'PANG', 'IT', 'WAS', 'LIKE', 'A', 'SWOON', 'HE', 'COULD', 'NOT', 'THINK', 'OF', 'HAPPINESS', 'FOR', 'HE', 'WAS', 'SORROWFUL', 'AT', 'BEING', 'SEPARATED', 'FROM', 'HIS', 'HOME', 'FROM', 'THE', 'PLACE', 'WHERE', 'HE', 'HAD', 'SPRUNG', 'UP'] +672-122797-0022-1551: hyp=['THE', 'AXE', 'STRUCK', 'DEEP', 'INTO', 'THE', 'VERY', 'PITH', 'THE', 'TREE', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'A', 'SIGH', 'HE', 'FELT', 'A', 'PANG', 'IT', 'WAS', 'LIKE', 'A', 'SWOON', 'HE', 'COULD', 'NOT', 'THINK', 'OF', 'HAPPINESS', 'FOR', 'HE', 'WAS', 'SORROWFUL', 'AT', 'BEING', 'SEPARATED', 'FROM', 'HIS', 'HOME', 'FROM', 'THE', 'PLACE', 'WHERE', 'HE', 'HAD', 'SPRUNG', 'UP'] +672-122797-0023-1552: ref=['HE', 'WELL', 'KNEW', 'THAT', 'HE', 'SHOULD', 'NEVER', 'SEE', 'HIS', 'DEAR', 'OLD', 'COMRADES', 'THE', 'LITTLE', 'BUSHES', 'AND', 'FLOWERS', 'AROUND', 'HIM', 'ANYMORE', 'PERHAPS', 'NOT', 'EVEN', 'THE', 'BIRDS'] +672-122797-0023-1552: hyp=['HE', 'WELL', 'KNEW', 'THAT', 'HE', 'SHOULD', 'NEVER', 'SEE', 'HIS', 'DEAR', 'OLD', 'COMRADES', 'THE', 'LITTLE', 'BUSHES', 'AND', 'FLOWERS', 'AROUND', 'HIM', 'ANY', 'MORE', 'PERHAPS', 'NOT', 'EVEN', 'THE', 'BIRDS'] +672-122797-0024-1553: ref=['THE', 'DEPARTURE', 'WAS', 'NOT', 'AT', 'ALL', 'AGREEABLE'] +672-122797-0024-1553: hyp=['THE', 'DEPARTURE', 'WAS', 'NOT', 'AT', 'ALL', 'AGREEABLE'] +672-122797-0025-1554: ref=['THE', 'TREE', 'ONLY', 'CAME', 'TO', 'HIMSELF', 'WHEN', 'HE', 'WAS', 'UNLOADED', 'IN', 'A', 'COURT', 'YARD', 'WITH', 'THE', 'OTHER', 'TREES', 'AND', 'HEARD', 'A', 'MAN', 'SAY', 'THAT', 'ONE', 'IS', 'SPLENDID', 'WE', "DON'T", 'WANT', 'THE', 'OTHERS'] +672-122797-0025-1554: hyp=['THE', 'TREE', 'ONLY', 'CAME', 'TO', 'HIMSELF', 'WHEN', 'HE', 'WAS', 'UNLOADED', 'IN', 'A', 'COURTYARD', 'WITH', 'THE', 'OTHER', 'TREES', 'AND', 'HEARD', 'A', 'MAN', 'SAY', 'THAT', 'ONE', 'IS', 'SPLENDID', 'WE', "DON'T", 'WANT', 'THE', 'OTHERS'] +672-122797-0026-1555: ref=['THERE', 'TOO', 'WERE', 'LARGE', 'EASY', 'CHAIRS', 'SILKEN', 'SOFAS', 'LARGE', 'TABLES', 'FULL', 'OF', 'PICTURE', 'BOOKS', 'AND', 'FULL', 'OF', 'TOYS', 'WORTH', 'HUNDREDS', 'AND', 'HUNDREDS', 'OF', 'CROWNS', 'AT', 'LEAST', 'THE', 'CHILDREN', 'SAID', 'SO'] +672-122797-0026-1555: hyp=['THERE', 'TOO', 'WERE', 'LARGE', 'EASY', 'CHAIRS', 'SILKEN', 'SOFAS', 'LARGE', 'TABLES', 'FULL', 'OF', 'PICTURE', 'BOOKS', 'AND', 'FULL', 'OF', 'TOYS', 'WORTH', 'HUNDREDS', 'AND', 'HUNDREDS', 'OF', 'CROWNS', 'AT', 'LEAST', 'THE', 'CHILDREN', 'SAID', 'SO'] +672-122797-0027-1556: ref=['THE', 'SERVANTS', 'AS', 'WELL', 'AS', 'THE', 'YOUNG', 'LADIES', 'DECORATED', 'IT'] +672-122797-0027-1556: hyp=['THE', 'SERVANTS', 'AS', 'WELL', 'AS', 'THE', 'YOUNG', 'LADIES', 'DECORATED', 'IT'] +672-122797-0028-1557: ref=['THIS', 'EVENING', 'THEY', 'ALL', 'SAID'] +672-122797-0028-1557: hyp=['THIS', 'EVENING', 'THEY', 'ALL', 'SAID'] +672-122797-0029-1558: ref=['HOW', 'IT', 'WILL', 'SHINE', 'THIS', 'EVENING'] +672-122797-0029-1558: hyp=['HOW', 'IT', 'WILL', 'SHINE', 'THIS', 'EVENING'] +672-122797-0030-1559: ref=['PERHAPS', 'THE', 'OTHER', 'TREES', 'FROM', 'THE', 'FOREST', 'WILL', 'COME', 'TO', 'LOOK', 'AT', 'ME'] +672-122797-0030-1559: hyp=['PERHAPS', 'THE', 'OTHER', 'TREES', 'FROM', 'THE', 'FOREST', 'WILL', 'COME', 'TO', 'LOOK', 'AT', 'ME'] +672-122797-0031-1560: ref=['IT', 'BLAZED', 'UP', 'FAMOUSLY', 'HELP', 'HELP'] +672-122797-0031-1560: hyp=['IT', 'BLAZED', 'UP', 'FAMOUSLY', 'HELP', 'HELP'] +672-122797-0032-1561: ref=['CRIED', 'THE', 'YOUNG', 'LADIES', 'AND', 'THEY', 'QUICKLY', 'PUT', 'OUT', 'THE', 'FIRE'] +672-122797-0032-1561: hyp=['CRIED', 'THE', 'YOUNG', 'LADIES', 'AND', 'THEY', 'QUICKLY', 'PUT', 'OUT', 'THE', 'FIRE'] +672-122797-0033-1562: ref=['A', 'STORY'] +672-122797-0033-1562: hyp=['A', 'STORY'] +672-122797-0034-1563: ref=['A', 'STORY', 'CRIED', 'THE', 'CHILDREN', 'DRAWING', 'A', 'LITTLE', 'FAT', 'MAN', 'TOWARDS', 'THE', 'TREE'] +672-122797-0034-1563: hyp=['A', 'STORY', 'CRIED', 'THE', 'CHILDREN', 'DRAWING', 'A', 'LITTLE', 'FAT', 'MAN', 'TOWARDS', 'THE', 'TREE'] +672-122797-0035-1564: ref=['BUT', 'I', 'SHALL', 'TELL', 'ONLY', 'ONE', 'STORY'] +672-122797-0035-1564: hyp=['BUT', 'I', 'SHALL', 'TELL', 'ONLY', 'ONE', 'STORY'] +672-122797-0036-1565: ref=['HUMPY', 'DUMPY', 'FELL', 'DOWNSTAIRS', 'AND', 'YET', 'HE', 'MARRIED', 'THE', 'PRINCESS'] +672-122797-0036-1565: hyp=['HUMPY', "DON'T", 'BE', 'FELL', 'DOWNSTAIRS', 'AND', 'YET', 'HE', 'MARRIED', 'THE', 'PRINCESS'] +672-122797-0037-1566: ref=["THAT'S", 'THE', 'WAY', 'OF', 'THE', 'WORLD'] +672-122797-0037-1566: hyp=["THAT'S", 'THE', 'WAY', 'OF', 'THE', 'WORLD'] +672-122797-0038-1567: ref=['THOUGHT', 'THE', 'FIR', 'TREE', 'AND', 'BELIEVED', 'IT', 'ALL', 'BECAUSE', 'THE', 'MAN', 'WHO', 'TOLD', 'THE', 'STORY', 'WAS', 'SO', 'GOOD', 'LOOKING', 'WELL', 'WELL'] +672-122797-0038-1567: hyp=['THOUGHT', 'THE', 'FIR', 'TREE', 'AND', 'BELIEVED', 'IT', 'ALL', 'BECAUSE', 'THE', 'MAN', 'WHO', 'TOLD', 'THE', 'STORY', 'WAS', 'SO', 'GOOD', 'LOOKING', 'WELL', 'WELL'] +672-122797-0039-1568: ref=['I', "WON'T", 'TREMBLE', 'TO', 'MORROW', 'THOUGHT', 'THE', 'FIR', 'TREE'] +672-122797-0039-1568: hyp=['I', "WON'T", 'TREMBLE', 'TO', 'MORROW', 'THOUGHT', 'THE', 'FIR', 'TREE'] +672-122797-0040-1569: ref=['AND', 'THE', 'WHOLE', 'NIGHT', 'THE', 'TREE', 'STOOD', 'STILL', 'AND', 'IN', 'DEEP', 'THOUGHT'] +672-122797-0040-1569: hyp=['AND', 'THE', 'WHOLE', 'NIGHT', 'THE', 'TREE', 'STOOD', 'STILL', 'AND', 'IN', 'DEEP', 'THOUGHT'] +672-122797-0041-1570: ref=['IN', 'THE', 'MORNING', 'THE', 'SERVANT', 'AND', 'THE', 'HOUSEMAID', 'CAME', 'IN'] +672-122797-0041-1570: hyp=['IN', 'THE', 'MORNING', 'THE', 'SERVANT', 'AND', 'THE', 'HOUSEMAID', 'CAME', 'IN'] +672-122797-0042-1571: ref=['BUT', 'THEY', 'DRAGGED', 'HIM', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'UP', 'THE', 'STAIRS', 'INTO', 'THE', 'LOFT', 'AND', 'HERE', 'IN', 'A', 'DARK', 'CORNER', 'WHERE', 'NO', 'DAYLIGHT', 'COULD', 'ENTER', 'THEY', 'LEFT', 'HIM'] +672-122797-0042-1571: hyp=['BUT', 'THEY', 'DRAGGED', 'HIM', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'UP', 'THE', 'STAIRS', 'INTO', 'THE', 'LOFT', 'AND', 'HERE', 'IN', 'A', 'DARK', 'CORNER', 'WHERE', 'NO', 'DAYLIGHT', 'COULD', 'ENTER', 'THEY', 'LEFT', 'HIM'] +672-122797-0043-1572: ref=["WHAT'S", 'THE', 'MEANING', 'OF', 'THIS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0043-1572: hyp=["WHAT'S", 'THE', 'MEANING', 'OF', 'THIS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0044-1573: ref=['AND', 'HE', 'LEANED', 'AGAINST', 'THE', 'WALL', 'LOST', 'IN', 'REVERIE'] +672-122797-0044-1573: hyp=['AND', 'HE', 'LEANED', 'AGAINST', 'THE', 'WALL', 'LOST', 'IN', 'REVERIE'] +672-122797-0045-1574: ref=['TIME', 'ENOUGH', 'HAD', 'HE', 'TOO', 'FOR', 'HIS', 'REFLECTIONS', 'FOR', 'DAYS', 'AND', 'NIGHTS', 'PASSED', 'ON', 'AND', 'NOBODY', 'CAME', 'UP', 'AND', 'WHEN', 'AT', 'LAST', 'SOMEBODY', 'DID', 'COME', 'IT', 'WAS', 'ONLY', 'TO', 'PUT', 'SOME', 'GREAT', 'TRUNKS', 'IN', 'A', 'CORNER', 'OUT', 'OF', 'THE', 'WAY'] +672-122797-0045-1574: hyp=['TIME', 'ENOUGH', 'HAD', 'HE', 'TOO', 'FOR', 'HIS', 'REFLECTIONS', 'FOR', 'DAYS', 'AND', 'NIGHTS', 'PASSED', 'ON', 'AND', 'NOBODY', 'CAME', 'UP', 'AND', 'WHEN', 'AT', 'LAST', 'SOMEBODY', 'DID', 'COME', 'IT', 'WAS', 'ONLY', 'TO', 'PUT', 'SOME', 'GREAT', 'TRUNKS', 'IN', 'A', 'CORNER', 'OUT', 'OF', 'THE', 'WAY'] +672-122797-0046-1575: ref=['TIS', 'NOW', 'WINTER', 'OUT', 'OF', 'DOORS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0046-1575: hyp=['TIS', 'NOW', 'WINTER', 'OUT', 'OF', 'DOORS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0047-1576: ref=['HOW', 'KIND', 'MAN', 'IS', 'AFTER', 'ALL'] +672-122797-0047-1576: hyp=['HOW', 'KIND', 'MAN', 'IS', 'AFTER', 'ALL'] +672-122797-0048-1577: ref=['IF', 'IT', 'ONLY', 'WERE', 'NOT', 'SO', 'DARK', 'HERE', 'AND', 'SO', 'TERRIBLY', 'LONELY'] +672-122797-0048-1577: hyp=['IF', 'IT', 'ONLY', 'WERE', 'NOT', 'SO', 'DARK', 'HERE', 'AND', 'SO', 'TERRIBLY', 'LONELY'] +672-122797-0049-1578: ref=['SQUEAK', 'SQUEAK'] +672-122797-0049-1578: hyp=['SQUEAK', 'SQUI'] +672-122797-0050-1579: ref=['THEY', 'SNUFFED', 'ABOUT', 'THE', 'FIR', 'TREE', 'AND', 'RUSTLED', 'AMONG', 'THE', 'BRANCHES'] +672-122797-0050-1579: hyp=['THEY', 'SNUFFED', 'ABOUT', 'THE', 'FIR', 'TREE', 'AND', 'RUSTLED', 'AMONG', 'THE', 'BRANCHES'] +672-122797-0051-1580: ref=['I', 'AM', 'BY', 'NO', 'MEANS', 'OLD', 'SAID', 'THE', 'FIR', 'TREE'] +672-122797-0051-1580: hyp=['I', 'AM', 'BY', 'NO', 'MEANS', 'OLD', 'SAID', 'THE', 'FIR', 'TREE'] +672-122797-0052-1581: ref=["THERE'S", 'MANY', 'A', 'ONE', 'CONSIDERABLY', 'OLDER', 'THAN', 'I', 'AM'] +672-122797-0052-1581: hyp=["THERE'S", 'MANY', 'A', 'ONE', 'CONSIDERABLY', 'OLDER', 'THAN', 'I', 'AM'] +672-122797-0053-1582: ref=['THEY', 'WERE', 'SO', 'EXTREMELY', 'CURIOUS'] +672-122797-0053-1582: hyp=['THEY', 'WERE', 'SO', 'EXTREMELY', 'CURIOUS'] +672-122797-0054-1583: ref=['I', 'KNOW', 'NO', 'SUCH', 'PLACE', 'SAID', 'THE', 'TREE'] +672-122797-0054-1583: hyp=['I', 'KNOW', 'NO', 'SUCH', 'PLACE', 'SAID', 'THE', 'TREE'] +672-122797-0055-1584: ref=['AND', 'THEN', 'HE', 'TOLD', 'ALL', 'ABOUT', 'HIS', 'YOUTH', 'AND', 'THE', 'LITTLE', 'MICE', 'HAD', 'NEVER', 'HEARD', 'THE', 'LIKE', 'BEFORE', 'AND', 'THEY', 'LISTENED', 'AND', 'SAID'] +672-122797-0055-1584: hyp=['AND', 'THEN', 'HE', 'TOLD', 'ALL', 'ABOUT', 'HIS', 'YOUTH', 'AND', 'THE', 'LITTLE', 'MICE', 'HAD', 'NEVER', 'HEARD', 'THE', 'LIKE', 'BEFORE', 'AND', 'THEY', 'LISTENED', 'AND', 'SAID'] +672-122797-0056-1585: ref=['SAID', 'THE', 'FIR', 'TREE', 'THINKING', 'OVER', 'WHAT', 'HE', 'HAD', 'HIMSELF', 'RELATED'] +672-122797-0056-1585: hyp=['SAID', 'THE', 'FIR', 'TREE', 'THINKING', 'OVER', 'WHAT', 'HE', 'HAD', 'HIMSELF', 'RELATED'] +672-122797-0057-1586: ref=['YES', 'IN', 'REALITY', 'THOSE', 'WERE', 'HAPPY', 'TIMES'] +672-122797-0057-1586: hyp=['YES', 'IN', 'REALITY', 'THOSE', 'WERE', 'HAPPY', 'TIMES'] +672-122797-0058-1587: ref=['WHO', 'IS', 'HUMPY', 'DUMPY', 'ASKED', 'THE', 'MICE'] +672-122797-0058-1587: hyp=['WHO', "IT'S", 'HUMPY', 'DUMPY', 'ASKED', 'THE', 'MICE'] +672-122797-0059-1588: ref=['ONLY', 'THAT', 'ONE', 'ANSWERED', 'THE', 'TREE'] +672-122797-0059-1588: hyp=['ONLY', 'THAT', 'ONE', 'ANSWERED', 'THE', 'TREE'] +672-122797-0060-1589: ref=['IT', 'IS', 'A', 'VERY', 'STUPID', 'STORY'] +672-122797-0060-1589: hyp=['IT', 'IS', 'A', 'VERY', 'STUPID', 'STORY'] +672-122797-0061-1590: ref=["DON'T", 'YOU', 'KNOW', 'ONE', 'ABOUT', 'BACON', 'AND', 'TALLOW', 'CANDLES', "CAN'T", 'YOU', 'TELL', 'ANY', 'LARDER', 'STORIES'] +672-122797-0061-1590: hyp=["DON'T", 'YOU', 'KNOW', 'ONE', 'ABOUT', 'BACON', 'AND', 'TALLOW', 'CANDLES', "CAN'T", 'YOU', 'TELL', 'ANY', 'LARDER', 'STORIES'] +672-122797-0062-1591: ref=['NO', 'SAID', 'THE', 'TREE'] +672-122797-0062-1591: hyp=['NO', 'SAID', 'THE', 'TREE'] +672-122797-0063-1592: ref=['THEN', 'GOOD', 'BYE', 'SAID', 'THE', 'RATS', 'AND', 'THEY', 'WENT', 'HOME'] +672-122797-0063-1592: hyp=['THEN', 'GOOD', 'BY', 'SAID', 'THE', 'RATS', 'AND', 'THEY', 'WENT', 'HOME'] +672-122797-0064-1593: ref=['AT', 'LAST', 'THE', 'LITTLE', 'MICE', 'STAYED', 'AWAY', 'ALSO', 'AND', 'THE', 'TREE', 'SIGHED', 'AFTER', 'ALL', 'IT', 'WAS', 'VERY', 'PLEASANT', 'WHEN', 'THE', 'SLEEK', 'LITTLE', 'MICE', 'SAT', 'ROUND', 'ME', 'AND', 'LISTENED', 'TO', 'WHAT', 'I', 'TOLD', 'THEM'] +672-122797-0064-1593: hyp=['AT', 'LAST', 'THE', 'LITTLE', 'MICE', 'STAYED', 'AWAY', 'ALSO', 'AND', 'THE', 'TREE', 'SIGHED', 'AFTER', 'ALL', 'IT', 'WAS', 'VERY', 'PLEASANT', 'WHEN', 'THE', 'SLEEK', 'LITTLE', 'MICE', 'SAT', 'ROUND', 'ME', 'AND', 'LISTENED', 'TO', 'WHAT', 'I', 'TOLD', 'THEM'] +672-122797-0065-1594: ref=['NOW', 'THAT', 'TOO', 'IS', 'OVER'] +672-122797-0065-1594: hyp=['NOW', 'THAT', 'TOO', 'IS', 'OVER'] +672-122797-0066-1595: ref=['WHY', 'ONE', 'MORNING', 'THERE', 'CAME', 'A', 'QUANTITY', 'OF', 'PEOPLE', 'AND', 'SET', 'TO', 'WORK', 'IN', 'THE', 'LOFT'] +672-122797-0066-1595: hyp=['WHY', 'ONE', 'MORNING', 'THERE', 'CAME', 'A', 'QUANTITY', 'OF', 'PEOPLE', 'AND', 'SET', 'TO', 'WORK', 'IN', 'THE', 'LOFT'] +672-122797-0067-1596: ref=['THE', 'TRUNKS', 'WERE', 'MOVED', 'THE', 'TREE', 'WAS', 'PULLED', 'OUT', 'AND', 'THROWN', 'RATHER', 'HARD', 'IT', 'IS', 'TRUE', 'DOWN', 'ON', 'THE', 'FLOOR', 'BUT', 'A', 'MAN', 'DREW', 'HIM', 'TOWARDS', 'THE', 'STAIRS', 'WHERE', 'THE', 'DAYLIGHT', 'SHONE'] +672-122797-0067-1596: hyp=['THE', 'TRUNKS', 'WERE', 'MOVED', 'THE', 'TREE', 'WAS', 'PULLED', 'OUT', 'AND', 'THROWN', 'RATHER', 'HARD', 'IT', 'IS', 'TRUE', 'DOWN', 'ON', 'THE', 'FLOOR', 'BUT', 'A', 'MAN', 'DREW', 'HIM', 'TOWARDS', 'THE', 'STAIRS', 'WHERE', 'THE', 'DAYLIGHT', 'SHONE'] +672-122797-0068-1597: ref=['BUT', 'IT', 'WAS', 'NOT', 'THE', 'FIR', 'TREE', 'THAT', 'THEY', 'MEANT'] +672-122797-0068-1597: hyp=['BUT', 'IT', 'WAS', 'NOT', 'THE', 'FIR', 'TREE', 'THAT', 'THEY', 'MEANT'] +672-122797-0069-1598: ref=['IT', 'WAS', 'IN', 'A', 'CORNER', 'THAT', 'HE', 'LAY', 'AMONG', 'WEEDS', 'AND', 'NETTLES'] +672-122797-0069-1598: hyp=['IT', 'WAS', 'IN', 'A', 'CORNER', 'THAT', 'HE', 'LAY', 'AMONG', 'WEEDS', 'AND', 'NETTLES'] +672-122797-0070-1599: ref=['THE', 'GOLDEN', 'STAR', 'OF', 'TINSEL', 'WAS', 'STILL', 'ON', 'THE', 'TOP', 'OF', 'THE', 'TREE', 'AND', 'GLITTERED', 'IN', 'THE', 'SUNSHINE'] +672-122797-0070-1599: hyp=['THE', 'GOLDEN', 'STAR', 'OF', 'TINSEL', 'WAS', 'STILL', 'ON', 'THE', 'TOP', 'OF', 'THE', 'TREE', 'AND', 'GLITTERED', 'IN', 'THE', 'SUNSHINE'] +672-122797-0071-1600: ref=['IN', 'THE', 'COURT', 'YARD', 'SOME', 'OF', 'THE', 'MERRY', 'CHILDREN', 'WERE', 'PLAYING', 'WHO', 'HAD', 'DANCED', 'AT', 'CHRISTMAS', 'ROUND', 'THE', 'FIR', 'TREE', 'AND', 'WERE', 'SO', 'GLAD', 'AT', 'THE', 'SIGHT', 'OF', 'HIM'] +672-122797-0071-1600: hyp=['IN', 'THE', 'COURTYARD', 'SOME', 'OF', 'THE', 'MERRIED', 'CHILDREN', 'WERE', 'PLAYING', 'WHO', 'HAD', 'DANCED', 'AT', 'CHRISTMAS', 'ROUND', 'THE', 'FIR', 'TREE', 'AND', 'WERE', 'SO', 'GLAD', 'AT', 'THE', 'SIGHT', 'OF', 'HIM'] +672-122797-0072-1601: ref=['AND', 'THE', "GARDENER'S", 'BOY', 'CHOPPED', 'THE', 'TREE', 'INTO', 'SMALL', 'PIECES', 'THERE', 'WAS', 'A', 'WHOLE', 'HEAP', 'LYING', 'THERE'] +672-122797-0072-1601: hyp=['AND', 'THE', "GARDENER'S", 'BOY', 'CHOPPED', 'THE', 'TREE', 'INTO', 'SMALL', 'PIECES', 'THERE', 'WAS', 'A', 'WHOLE', 'HEAP', 'LYING', 'THERE'] +672-122797-0073-1602: ref=['THE', 'WOOD', 'FLAMED', 'UP', 'SPLENDIDLY', 'UNDER', 'THE', 'LARGE', 'BREWING', 'COPPER', 'AND', 'IT', 'SIGHED', 'SO', 'DEEPLY'] +672-122797-0073-1602: hyp=['THE', 'WOOD', 'FLAMED', 'UP', 'SPLENDIDLY', 'UNDER', 'THE', 'LARGE', 'BREWING', 'COPPER', 'AND', 'ITS', 'SIDE', 'SO', 'DEEPLY'] +672-122797-0074-1603: ref=['HOWEVER', 'THAT', 'WAS', 'OVER', 'NOW', 'THE', 'TREE', 'GONE', 'THE', 'STORY', 'AT', 'AN', 'END'] +672-122797-0074-1603: hyp=['HOWEVER', 'THAT', 'WAS', 'OVER', 'NOW', 'THE', 'TREE', 'GONE', 'THE', 'STORY', 'AT', 'AN', 'END'] +6829-68769-0000-1858: ref=['KENNETH', 'AND', 'BETH', 'REFRAINED', 'FROM', 'TELLING', 'THE', 'OTHER', 'GIRLS', 'OR', 'UNCLE', 'JOHN', 'OF', 'OLD', 'WILL', "ROGERS'S", 'VISIT', 'BUT', 'THEY', 'GOT', 'MISTER', 'WATSON', 'IN', 'THE', 'LIBRARY', 'AND', 'QUESTIONED', 'HIM', 'CLOSELY', 'ABOUT', 'THE', 'PENALTY', 'FOR', 'FORGING', 'A', 'CHECK'] +6829-68769-0000-1858: hyp=['KENNETH', 'AND', 'BETH', 'REFRAINED', 'FROM', 'TELLING', 'THE', 'OTHER', 'GIRLS', 'OR', 'UNCLE', 'JOHN', 'OF', 'OLD', 'WILL', "ROGERS'S", 'VISIT', 'BUT', 'THEY', 'GOT', 'MISTER', 'WATSON', 'IN', 'THE', 'LIBRARY', 'AND', 'QUESTIONED', 'HIM', 'CLOSELY', 'ABOUT', 'THE', 'PENALTY', 'FOR', 'FORGING', 'A', 'CHEQUE'] +6829-68769-0001-1859: ref=['IT', 'WAS', 'A', 'SERIOUS', 'CRIME', 'INDEED', 'MISTER', 'WATSON', 'TOLD', 'THEM', 'AND', 'TOM', 'GATES', 'BADE', 'FAIR', 'TO', 'SERVE', 'A', 'LENGTHY', 'TERM', 'IN', "STATE'S", 'PRISON', 'AS', 'A', 'CONSEQUENCE', 'OF', 'HIS', 'RASH', 'ACT'] +6829-68769-0001-1859: hyp=['IT', 'WAS', 'A', 'SERIOUS', 'CRIME', 'INDEED', 'MISTER', 'WATSON', 'TOLD', 'THEM', 'AND', 'TOM', 'GATES', 'BADE', 'FAIR', 'TO', 'SERVE', 'A', 'LENGTHY', 'TERM', 'IN', 'THE', "STATE'S", 'PRISON', 'AS', 'A', 'CONSEQUENCE', 'OF', 'HIS', 'RASH', 'ACT'] +6829-68769-0002-1860: ref=['I', "CAN'T", 'SEE', 'IT', 'IN', 'THAT', 'LIGHT', 'SAID', 'THE', 'OLD', 'LAWYER'] +6829-68769-0002-1860: hyp=['I', "CAN'T", 'SEE', 'IT', 'IN', 'THAT', 'LIGHT', 'SAID', 'THE', 'OLD', 'LAWYER'] +6829-68769-0003-1861: ref=['IT', 'WAS', 'A', 'DELIBERATE', 'THEFT', 'FROM', 'HIS', 'EMPLOYERS', 'TO', 'PROTECT', 'A', 'GIRL', 'HE', 'LOVED'] +6829-68769-0003-1861: hyp=['IT', 'WAS', 'A', 'DELIBERATE', 'THEFT', 'FROM', 'HIS', 'EMPLOYERS', 'TO', 'PROTECT', 'A', 'GIRL', 'HE', 'LOVED'] +6829-68769-0004-1862: ref=['BUT', 'THEY', 'COULD', 'NOT', 'HAVE', 'PROVEN', 'A', 'CASE', 'AGAINST', 'LUCY', 'IF', 'SHE', 'WAS', 'INNOCENT', 'AND', 'ALL', 'THEIR', 'THREATS', 'OF', 'ARRESTING', 'HER', 'WERE', 'PROBABLY', 'MERE', 'BLUFF'] +6829-68769-0004-1862: hyp=['BUT', 'THEY', 'COULD', 'NOT', 'HAVE', 'PROVEN', 'A', 'GASE', 'AGAINST', 'LUCY', 'IF', 'SHE', 'WAS', 'INNOCENT', 'AND', 'ALL', 'THEIR', 'THREATS', 'OF', 'ARRESTING', 'HER', 'WERE', 'PROBABLY', 'A', 'MERE', 'BLUFF'] +6829-68769-0005-1863: ref=['HE', 'WAS', 'SOFT', 'HEARTED', 'AND', 'IMPETUOUS', 'SAID', 'BETH', 'AND', 'BEING', 'IN', 'LOVE', 'HE', "DIDN'T", 'STOP', 'TO', 'COUNT', 'THE', 'COST'] +6829-68769-0005-1863: hyp=['HE', 'WAS', 'A', 'SOFT', 'HEARTED', 'AND', 'IMPETUOUS', 'SAID', 'BETH', 'AND', 'BEING', 'IN', 'LOVE', 'HE', "DIDN'T", 'STOP', 'TO', 'COUNT', 'THE', 'COST'] +6829-68769-0006-1864: ref=['IF', 'THE', 'PROSECUTION', 'WERE', 'WITHDRAWN', 'AND', 'THE', 'CASE', 'SETTLED', 'WITH', 'THE', 'VICTIM', 'OF', 'THE', 'FORGED', 'CHECK', 'THEN', 'THE', 'YOUNG', 'MAN', 'WOULD', 'BE', 'ALLOWED', 'HIS', 'FREEDOM'] +6829-68769-0006-1864: hyp=['IF', 'THE', 'PROSECUTION', 'WERE', 'WITHDRAWN', 'AND', 'THE', 'CASE', 'SETTLED', 'WITH', 'THE', 'VICTIM', 'OF', 'THE', 'FORGED', 'CHECK', 'THEN', 'THE', 'YOUNG', 'MAN', 'WOULD', 'BE', 'ALLOWED', 'HIS', 'FREEDOM'] +6829-68769-0007-1865: ref=['BUT', 'UNDER', 'THE', 'CIRCUMSTANCES', 'I', 'DOUBT', 'IF', 'SUCH', 'AN', 'ARRANGEMENT', 'COULD', 'BE', 'MADE'] +6829-68769-0007-1865: hyp=['BUT', 'UNDER', 'THE', 'CIRCUMSTANCES', 'I', 'DOUBT', 'OF', 'SUCH', 'AN', 'ARRANGEMENT', 'COULD', 'BE', 'MADE'] +6829-68769-0008-1866: ref=['FAIRVIEW', 'WAS', 'TWELVE', 'MILES', 'AWAY', 'BUT', 'BY', 'TEN', "O'CLOCK", 'THEY', 'DREW', 'UP', 'AT', 'THE', 'COUNTY', 'JAIL'] +6829-68769-0008-1866: hyp=['FAIR', "VIEW'S", 'TWELVE', 'MILES', 'AWAY', 'BUT', 'BY', 'TEN', "O'CLOCK", 'THEY', 'DREW', 'UP', 'AT', 'THE', 'COUNTY', 'DRALE'] +6829-68769-0009-1867: ref=['THEY', 'WERE', 'RECEIVED', 'IN', 'THE', 'LITTLE', 'OFFICE', 'BY', 'A', 'MAN', 'NAMED', 'MARKHAM', 'WHO', 'WAS', 'THE', 'JAILER'] +6829-68769-0009-1867: hyp=['THEY', 'WERE', 'RECEIVED', 'IN', 'THE', 'LITTLE', 'OFFICE', 'BY', 'A', 'MAN', 'NAMED', 'MARKHAM', 'WHO', 'WAS', 'THE', 'JAILER'] +6829-68769-0010-1868: ref=['WE', 'WISH', 'TO', 'TALK', 'WITH', 'HIM', 'ANSWERED', 'KENNETH', 'TALK'] +6829-68769-0010-1868: hyp=['WE', 'WISH', 'TO', 'TALK', 'WITH', 'HIM', 'ANSWERED', 'KENNETH', 'TALK'] +6829-68769-0011-1869: ref=["I'M", 'RUNNING', 'FOR', 'REPRESENTATIVE', 'ON', 'THE', 'REPUBLICAN', 'TICKET', 'SAID', 'KENNETH', 'QUIETLY'] +6829-68769-0011-1869: hyp=["I'M", 'RUNNING', 'FOR', 'REPRESENTATIVE', 'ON', 'THE', 'REPUBLICAN', 'TICKET', 'SAID', 'KENNETH', 'QUIETLY'] +6829-68769-0012-1870: ref=['OH', 'SAY', "THAT'S", 'DIFFERENT', 'OBSERVED', 'MARKHAM', 'ALTERING', 'HIS', 'DEMEANOR'] +6829-68769-0012-1870: hyp=["I'LL", 'SAY', "THAT'S", 'DIFFERENT', 'OBSERVED', 'MARKHAM', 'ALTERING', 'HIS', 'DEMEANOR'] +6829-68769-0013-1871: ref=['MAY', 'WE', 'SEE', 'GATES', 'AT', 'ONCE', 'ASKED', 'KENNETH'] +6829-68769-0013-1871: hyp=['MAY', 'WE', 'SEA', 'GATES', 'AT', 'ONCE', 'ASKED', 'KENNETH'] +6829-68769-0014-1872: ref=['THEY', 'FOLLOWED', 'THE', 'JAILER', 'ALONG', 'A', 'SUCCESSION', 'OF', 'PASSAGES'] +6829-68769-0014-1872: hyp=['THEY', 'FOLLOWED', 'THE', 'JAILER', 'ALONG', 'THE', 'SUCCESSION', 'OF', 'PASSAGES'] +6829-68769-0015-1873: ref=['SOMETIMES', "I'M", 'THAT', 'YEARNING', 'FOR', 'A', 'SMOKE', "I'M", 'NEARLY', 'CRAZY', 'AN', 'I', 'DUNNO', 'WHICH', 'IS', 'WORST', 'DYIN', 'ONE', 'WAY', 'OR', 'ANOTHER'] +6829-68769-0015-1873: hyp=['SOMETIMES', 'ON', 'THAT', 'YEARNIN', 'FOR', 'A', 'SMOKE', "I'M", 'NEARLY', 'CRAZY', 'AND', 'I', "DON'T", 'KNOW', 'WHICH', 'IS', 'WORSE', 'DYIN', 'ONE', 'WAY', 'OR', 'THE', 'OTHER'] +6829-68769-0016-1874: ref=['HE', 'UNLOCKED', 'THE', 'DOOR', 'AND', 'CALLED', "HERE'S", 'VISITORS', 'TOM'] +6829-68769-0016-1874: hyp=['HE', 'UNLOCKED', 'THE', 'DOOR', 'AND', 'CALLED', "HERE'S", 'VISITORS', 'TOM'] +6829-68769-0017-1875: ref=['WORSE', 'TOM', 'WORSE', 'N', 'EVER', 'REPLIED', 'THE', 'JAILER', 'GLOOMILY'] +6829-68769-0017-1875: hyp=['HORSE', 'TOM', 'WORSE', 'THAN', 'ARROW', 'REPLIED', 'THE', 'JAILER', 'GLOOMILY'] +6829-68769-0018-1876: ref=['MISS', 'DE', 'GRAF', 'SAID', 'KENNETH', 'NOTICING', 'THE', "BOY'S", 'FACE', 'CRITICALLY', 'AS', 'HE', 'STOOD', 'WHERE', 'THE', 'LIGHT', 'FROM', 'THE', 'PASSAGE', 'FELL', 'UPON', 'IT'] +6829-68769-0018-1876: hyp=['MISTER', 'GRAFT', 'SAID', 'KENNETH', 'NOTICING', 'THE', "BOY'S", 'FACE', 'CRITICALLY', 'AS', 'HE', 'STOOD', 'WHERE', 'THE', 'LIGHT', 'FROM', 'THE', 'PASSAGE', 'FELL', 'UPON', 'IT'] +6829-68769-0019-1877: ref=['SORRY', 'WE', "HAVEN'T", 'ANY', 'RECEPTION', 'ROOM', 'IN', 'THE', 'JAIL'] +6829-68769-0019-1877: hyp=['SORRY', 'WE', "HAVEN'T", 'ANY', 'RECEPTION', 'ROOM', 'IN', 'THE', 'JAIL'] +6829-68769-0020-1878: ref=['SIT', 'DOWN', 'PLEASE', 'SAID', 'GATES', 'IN', 'A', 'CHEERFUL', 'AND', 'PLEASANT', 'VOICE', "THERE'S", 'A', 'BENCH', 'HERE'] +6829-68769-0020-1878: hyp=['SIT', 'DOWN', 'PLEASE', 'SAID', 'GATES', 'IN', 'A', 'CHEERFUL', 'AND', 'PLEASANT', 'VOICE', "THERE'S", 'A', 'PENCH', 'HERE'] +6829-68769-0021-1879: ref=['A', 'FRESH', 'WHOLESOME', 'LOOKING', 'BOY', 'WAS', 'TOM', 'GATES', 'WITH', 'STEADY', 'GRAY', 'EYES', 'AN', 'INTELLIGENT', 'FOREHEAD', 'BUT', 'A', 'SENSITIVE', 'RATHER', 'WEAK', 'MOUTH'] +6829-68769-0021-1879: hyp=['A', 'FRESH', 'WHOLESOME', 'LOOKING', 'BOY', 'WAS', 'TOM', 'GATES', 'WITH', 'STEADY', 'GRAY', 'EYES', 'AN', 'INTELLIGENT', 'FOREHEAD', 'BUT', 'A', 'SENSITIVE', 'RATHER', 'WEAK', 'MOUTH'] +6829-68769-0022-1880: ref=['WE', 'HAVE', 'HEARD', 'SOMETHING', 'OF', 'YOUR', 'STORY', 'SAID', 'KENNETH', 'AND', 'ARE', 'INTERESTED', 'IN', 'IT'] +6829-68769-0022-1880: hyp=['WE', 'HAVE', 'HEARD', 'SOMETHING', 'OF', 'YOUR', 'STORY', 'SAID', 'KENNETH', 'AND', 'OUR', 'INTERESTED', 'IN', 'IT'] +6829-68769-0023-1881: ref=['I', "DIDN'T", 'STOP', 'TO', 'THINK', 'WHETHER', 'IT', 'WAS', 'FOOLISH', 'OR', 'NOT', 'I', 'DID', 'IT', 'AND', "I'M", 'GLAD', 'I', 'DID'] +6829-68769-0023-1881: hyp=['I', "DIDN'T", 'STOP', 'TO', 'THINK', 'WHETHER', 'IT', 'WAS', 'FOOLISH', 'OR', 'NOT', 'I', 'DID', 'IT', 'AND', "I'M", 'GLAD', 'I', 'DID', 'IT'] +6829-68769-0024-1882: ref=['OLD', 'WILL', 'IS', 'A', 'FINE', 'FELLOW', 'BUT', 'POOR', 'AND', 'HELPLESS', 'SINCE', 'MISSUS', 'ROGERS', 'HAD', 'HER', 'ACCIDENT'] +6829-68769-0024-1882: hyp=['OLD', 'WILL', 'IS', 'A', 'FINE', 'FELLOW', 'BUT', 'POOR', 'AND', 'HELPLESS', 'SINCE', 'MISSUS', 'ROGERS', 'HAD', 'HER', 'ACCIDENT'] +6829-68769-0025-1883: ref=['THEN', 'ROGERS', "WOULDN'T", 'DO', 'ANYTHING', 'BUT', 'LEAD', 'HER', 'AROUND', 'AND', 'WAIT', 'UPON', 'HER', 'AND', 'THE', 'PLACE', 'WENT', 'TO', 'RACK', 'AND', 'RUIN'] +6829-68769-0025-1883: hyp=['THEN', 'ROGERS', "WOULDN'T", 'DO', 'ANYTHING', 'BUT', 'LEAD', 'HER', 'AROUND', 'AND', 'WAIT', 'UPON', 'HER', 'AND', 'THE', 'PLACE', 'WENT', 'TO', 'RACK', 'AND', 'RUIN'] +6829-68769-0026-1884: ref=['HE', 'SPOKE', 'SIMPLY', 'BUT', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'NARROW', 'CELL', 'IN', 'FRONT', 'OF', 'THEM'] +6829-68769-0026-1884: hyp=['HE', 'SPOKE', 'SIMPLY', 'BUT', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'NARROW', 'CELL', 'IN', 'FRONT', 'OF', 'THEM'] +6829-68769-0027-1885: ref=['WHOSE', 'NAME', 'DID', 'YOU', 'SIGN', 'TO', 'THE', 'CHECK', 'ASKED', 'KENNETH'] +6829-68769-0027-1885: hyp=['WHOSE', 'NAME', 'DID', 'YOU', 'SIGN', 'TO', 'THE', 'CHECK', 'ASKED', 'KENNETH'] +6829-68769-0028-1886: ref=['HE', 'IS', 'SUPPOSED', 'TO', 'SIGN', 'ALL', 'THE', 'CHECKS', 'OF', 'THE', 'CONCERN'] +6829-68769-0028-1886: hyp=['HE', 'IS', 'SUPPOSED', 'TO', 'SIGN', 'ALL', 'THE', 'CHECKS', 'OF', 'THE', 'CONCERN'] +6829-68769-0029-1887: ref=["IT'S", 'A', 'STOCK', 'COMPANY', 'AND', 'RICH'] +6829-68769-0029-1887: hyp=["IT'S", 'A', 'STOCK', 'COMPANY', 'IN', 'RICH'] +6829-68769-0030-1888: ref=['I', 'WAS', 'BOOKKEEPER', 'SO', 'IT', 'WAS', 'EASY', 'TO', 'GET', 'A', 'BLANK', 'CHECK', 'AND', 'FORGE', 'THE', 'SIGNATURE'] +6829-68769-0030-1888: hyp=['I', 'WAS', 'BITKEEPER', 'SO', 'IT', 'WAS', 'EASY', 'TO', 'GET', 'A', 'BLANK', 'CHECK', 'AND', 'FORGE', 'THE', 'SIGNATURE'] +6829-68769-0031-1889: ref=['AS', 'REGARDS', 'MY', 'ROBBING', 'THE', 'COMPANY', "I'LL", 'SAY', 'THAT', 'I', 'SAVED', 'THEM', 'A', 'HEAVY', 'LOSS', 'ONE', 'DAY'] +6829-68769-0031-1889: hyp=['AS', 'REGARDS', 'MY', 'ROBBING', 'THE', 'COMPANY', "I'LL", 'SAY', 'THAT', 'I', 'SAVED', 'HIM', 'A', 'HEAVY', 'LOSS', 'ONE', 'DAY'] +6829-68769-0032-1890: ref=['I', 'DISCOVERED', 'AND', 'PUT', 'OUT', 'A', 'FIRE', 'THAT', 'WOULD', 'HAVE', 'DESTROYED', 'THE', 'WHOLE', 'PLANT', 'BUT', 'MARSHALL', 'NEVER', 'EVEN', 'THANKED', 'ME'] +6829-68769-0032-1890: hyp=['I', 'DISCOVERED', 'AND', 'PUT', 'OUT', 'A', 'FIRE', 'THAT', 'WOULD', 'HAVE', 'DESTROYED', 'THE', 'WHOLE', 'PLANT', 'BUT', 'MARSHALL', 'NEVER', 'EVEN', 'THANKED', 'ME'] +6829-68769-0033-1891: ref=['IT', 'WAS', 'BETTER', 'FOR', 'HIM', 'TO', 'THINK', 'THE', 'GIRL', 'UNFEELING', 'THAN', 'TO', 'KNOW', 'THE', 'TRUTH'] +6829-68769-0033-1891: hyp=['IT', 'WAS', 'BETTER', 'FOR', 'HIM', 'TO', 'THINK', 'THE', 'GIRL', 'UNFEELING', 'THAN', 'TO', 'KNOW', 'THE', 'TRUTH'] +6829-68769-0034-1892: ref=["I'M", 'GOING', 'TO', 'SEE', 'MISTER', 'MARSHALL', 'SAID', 'KENNETH', 'AND', 'DISCOVER', 'WHAT', 'I', 'CAN', 'DO', 'TO', 'ASSIST', 'YOU', 'THANK', 'YOU', 'SIR'] +6829-68769-0034-1892: hyp=["I'M", 'GOING', 'TO', 'SEE', 'MISTER', 'MARSHAL', 'SAID', 'KENNETH', 'AND', 'DISCOVER', 'WHAT', 'I', 'CAN', 'DO', 'TO', 'ASSIST', 'YOU', 'THANK', 'YOU', 'SIR'] +6829-68769-0035-1893: ref=['IT', "WON'T", 'BE', 'MUCH', 'BUT', "I'M", 'GRATEFUL', 'TO', 'FIND', 'A', 'FRIEND'] +6829-68769-0035-1893: hyp=['IT', "WON'T", 'BE', 'MUCH', 'BUT', "I'M", 'GRATEFUL', 'TO', 'FIND', 'A', 'FRIEND'] +6829-68769-0036-1894: ref=['THEY', 'LEFT', 'HIM', 'THEN', 'FOR', 'THE', 'JAILER', 'ARRIVED', 'TO', 'UNLOCK', 'THE', 'DOOR', 'AND', 'ESCORT', 'THEM', 'TO', 'THE', 'OFFICE'] +6829-68769-0036-1894: hyp=['THEY', 'LEFT', 'HIM', 'THEN', 'FOR', 'THE', 'JAILER', 'ARRIVED', 'TO', 'UNLOCK', 'THE', 'DOOR', 'AND', 'ESCORT', 'THEM', 'TO', 'THE', 'OFFICE'] +6829-68769-0037-1895: ref=["I'VE", 'SEEN', 'LOTS', 'OF', 'THAT', 'KIND', 'IN', 'MY', 'DAY'] +6829-68769-0037-1895: hyp=["I'VE", 'SEEN', 'LOTS', 'OF', 'THAT', 'KIND', 'IN', 'MY', 'DAY'] +6829-68769-0038-1896: ref=['AND', 'IT', 'RUINS', 'A', "MAN'S", 'DISPOSITION'] +6829-68769-0038-1896: hyp=['AND', 'IT', 'RUINS', 'A', "MAN'S", 'DISPOSITION'] +6829-68769-0039-1897: ref=['HE', 'LOOKED', 'UP', 'RATHER', 'UNGRACIOUSLY', 'BUT', 'MOTIONED', 'THEM', 'TO', 'BE', 'SEATED'] +6829-68769-0039-1897: hyp=['HE', 'LOOKED', 'UP', 'RATHER', 'UNGRACIOUSLY', 'BUT', 'MOTIONED', 'THEM', 'TO', 'BE', 'SEATED'] +6829-68769-0040-1898: ref=['SOME', 'GIRL', 'HAS', 'BEEN', 'HERE', 'TWICE', 'TO', 'INTERVIEW', 'MY', 'MEN', 'AND', 'I', 'HAVE', 'REFUSED', 'TO', 'ADMIT', 'HER'] +6829-68769-0040-1898: hyp=['SOME', 'GIRL', 'HAS', 'BEEN', 'IN', 'HERE', 'TWICE', 'TO', 'INTERVIEW', 'MY', 'MEN', 'AND', 'I', 'HAVE', 'REFUSED', 'TO', 'ADMIT', 'HER'] +6829-68769-0041-1899: ref=["I'M", 'NOT', 'ELECTIONEERING', 'JUST', 'NOW'] +6829-68769-0041-1899: hyp=["I'M", 'NOT', 'ELECTIONEERING', 'JUST', 'NOW'] +6829-68769-0042-1900: ref=['OH', 'WELL', 'SIR', 'WHAT', 'ABOUT', 'HIM'] +6829-68769-0042-1900: hyp=['OH', 'WELL', 'SIR', 'WHAT', 'ABOUT', 'HIM'] +6829-68769-0043-1901: ref=['AND', 'HE', 'DESERVES', 'A', 'TERM', 'IN', "STATE'S", 'PRISON'] +6829-68769-0043-1901: hyp=['AND', 'HE', 'DESERVES', 'A', 'TERM', 'AND', 'STATES', 'PRISON'] +6829-68769-0044-1902: ref=['IT', 'HAS', 'COST', 'ME', 'TWICE', 'SIXTY', 'DOLLARS', 'IN', 'ANNOYANCE'] +6829-68769-0044-1902: hyp=['IT', 'HAS', 'COST', 'ME', 'TWICE', 'SIXTY', 'DOLLARS', 'AN', 'ANNOYANCE'] +6829-68769-0045-1903: ref=["I'LL", 'PAY', 'ALL', 'THE', 'COSTS', 'BESIDES'] +6829-68769-0045-1903: hyp=["I'LL", 'PAY', 'ALL', 'THE', 'COST', 'BESIDES'] +6829-68769-0046-1904: ref=["YOU'RE", 'FOOLISH', 'WHY', 'SHOULD', 'YOU', 'DO', 'ALL', 'THIS'] +6829-68769-0046-1904: hyp=["YOU'RE", 'FOOLISH', 'WHY', 'SHOULD', 'YOU', 'DO', 'ALL', 'THIS'] +6829-68769-0047-1905: ref=['I', 'HAVE', 'MY', 'OWN', 'REASONS', 'MISTER', 'MARSHALL'] +6829-68769-0047-1905: hyp=['I', 'HAVE', 'MY', 'OWN', 'REASONS', 'MISTER', 'MARSHAL'] +6829-68769-0048-1906: ref=['GIVE', 'ME', 'A', 'CHECK', 'FOR', 'A', 'HUNDRED', 'AND', 'FIFTY', 'AND', "I'LL", 'TURN', 'OVER', 'TO', 'YOU', 'THE', 'FORGED', 'CHECK', 'AND', 'QUASH', 'FURTHER', 'PROCEEDINGS'] +6829-68769-0048-1906: hyp=['GIVE', 'ME', 'A', 'CHEQUE', 'FOR', 'A', 'HUNDRED', 'AND', 'FIFTY', 'AND', "I'LL", 'TURN', 'OVER', 'TO', 'YOU', 'THE', 'FORGED', 'CHECK', 'AND', 'CAUSH', 'FURTHER', 'PROCEEDINGS'] +6829-68769-0049-1907: ref=['HE', 'DETESTED', 'THE', 'GRASPING', 'DISPOSITION', 'THAT', 'WOULD', 'ENDEAVOR', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'HIS', 'EVIDENT', 'DESIRE', 'TO', 'HELP', 'YOUNG', 'GATES'] +6829-68769-0049-1907: hyp=['HE', 'DETESTED', 'THE', 'GRASPING', 'DISPOSITION', 'THAT', 'WOULD', 'ENDEAVOUR', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'HIS', 'EVIDENT', 'DESIRE', 'TO', 'HELP', 'YOUNG', 'GATES'] +6829-68769-0050-1908: ref=['BETH', 'UNEASY', 'AT', 'HIS', 'SILENCE', 'NUDGED', 'HIM'] +6829-68769-0050-1908: hyp=['BETH', 'UNEASY', 'AT', 'HIS', 'SILENCE', 'NUDGED', 'HIM'] +6829-68769-0051-1909: ref=['THERE', 'WAS', 'A', 'GRIM', 'SMILE', 'OF', 'AMUSEMENT', 'ON', 'HIS', 'SHREWD', 'FACE'] +6829-68769-0051-1909: hyp=['THERE', 'WAS', 'A', 'GRIM', 'SMILE', 'OF', 'AMUSEMENT', 'ON', 'HIS', 'SHREWD', 'FACE'] +6829-68769-0052-1910: ref=['HE', 'MIGHT', 'HAVE', 'HAD', 'THAT', 'FORGED', 'CHECK', 'FOR', 'THE', 'FACE', 'OF', 'IT', 'IF', "HE'D", 'BEEN', 'SHARP'] +6829-68769-0052-1910: hyp=['HE', 'MIGHT', 'HAVE', 'HAD', 'THAT', 'FORGED', 'CHECK', 'FOR', 'THE', 'FACE', 'OF', 'IT', 'IF', "HE'D", 'BEEN', 'SHARP'] +6829-68769-0053-1911: ref=['AND', 'TO', 'THINK', 'WE', 'CAN', 'SAVE', 'ALL', 'THAT', 'MISERY', 'AND', 'DESPAIR', 'BY', 'THE', 'PAYMENT', 'OF', 'A', 'HUNDRED', 'AND', 'FIFTY', 'DOLLARS'] +6829-68769-0053-1911: hyp=['AND', 'TO', 'THINK', 'WE', 'CAN', 'SAVE', 'ALL', 'THAT', 'MISERY', 'AND', 'DESPAIR', 'BY', 'THE', 'PAYMENT', 'OF', 'A', 'HUNDRED', 'AND', 'FIFTY', 'DOLLARS'] +6829-68771-0000-1912: ref=['SO', 'TO', 'THE', 'SURPRISE', 'OF', 'THE', 'DEMOCRATIC', 'COMMITTEE', 'AND', 'ALL', 'HIS', 'FRIENDS', 'MISTER', 'HOPKINS', 'ANNOUNCED', 'THAT', 'HE', 'WOULD', 'OPPOSE', "FORBES'S", 'AGGRESSIVE', 'CAMPAIGN', 'WITH', 'AN', 'EQUAL', 'AGGRESSIVENESS', 'AND', 'SPEND', 'AS', 'MANY', 'DOLLARS', 'IN', 'DOING', 'SO', 'AS', 'MIGHT', 'BE', 'NECESSARY'] +6829-68771-0000-1912: hyp=['SO', 'TO', 'THE', 'SURPRISE', 'OF', 'THE', 'DEMOCRATIC', 'COMMITTEE', 'AND', 'ALL', 'HIS', 'FRIENDS', 'MISTER', 'HOPKINS', 'ANNOUNCED', 'THAT', 'HE', 'WOULD', 'OPPOSE', 'FORTS', 'AGGRESSIVE', 'CAMPAIGN', 'WITH', 'AN', 'EQUAL', 'AGGRESSIVENESS', 'AND', 'SPEND', 'AS', 'MANY', 'DOLLARS', 'IN', 'DOING', 'SO', 'AS', 'MIGHT', 'BE', 'NECESSARY'] +6829-68771-0001-1913: ref=['ONE', 'OF', 'MISTER', "HOPKINS'S", 'FIRST', 'TASKS', 'AFTER', 'CALLING', 'HIS', 'FAITHFUL', 'HENCHMEN', 'AROUND', 'HIM', 'WAS', 'TO', 'MAKE', 'A', 'CAREFUL', 'CANVASS', 'OF', 'THE', 'VOTERS', 'OF', 'HIS', 'DISTRICT', 'TO', 'SEE', 'WHAT', 'WAS', 'STILL', 'TO', 'BE', 'ACCOMPLISHED'] +6829-68771-0001-1913: hyp=['ONE', 'OF', 'MISTER', "HOPKINS'S", 'FIRST', 'TASKS', 'AFTER', 'CALLING', 'HIS', 'FAITHFUL', 'HENCHMAN', 'AROUND', 'HIM', 'WAS', 'TO', 'MAKE', 'A', 'CAREFUL', 'CANVAS', 'OF', 'THE', 'VOTERS', 'OF', 'HIS', 'DISTRICT', 'TO', 'SEE', 'WHAT', 'WAS', 'STILL', 'TO', 'BE', 'ACCOMPLISHED'] +6829-68771-0002-1914: ref=['THE', 'WEAK', 'KNEED', 'CONTINGENCY', 'MUST', 'BE', 'STRENGTHENED', 'AND', 'FORTIFIED', 'AND', 'A', 'COUPLE', 'OF', 'HUNDRED', 'VOTES', 'IN', 'ONE', 'WAY', 'OR', 'ANOTHER', 'SECURED', 'FROM', 'THE', 'OPPOSITION'] +6829-68771-0002-1914: hyp=['THE', 'WEAK', 'NEED', 'CONTINGENCY', 'MUST', 'BE', 'STRENGTHENED', 'AND', 'FORTIFIED', 'AND', 'A', 'COUPLE', 'OF', 'HUNDRED', 'VOTES', 'IN', 'ONE', 'WAY', 'OR', 'THE', 'OTHER', 'SECURED', 'FROM', 'THE', 'OPPOSITION'] +6829-68771-0003-1915: ref=['THE', 'DEMOCRATIC', 'COMMITTEE', 'FIGURED', 'OUT', 'A', 'WAY', 'TO', 'DO', 'THIS'] +6829-68771-0003-1915: hyp=['THE', 'DEMOCRATIC', 'COMMITTEE', 'FIGURED', 'OUT', 'A', 'WAY', 'TO', 'DO', 'THIS'] +6829-68771-0004-1916: ref=['UNDER', 'ORDINARY', 'CONDITIONS', 'REYNOLDS', 'WAS', 'SURE', 'TO', 'BE', 'ELECTED', 'BUT', 'THE', 'COMMITTEE', 'PROPOSED', 'TO', 'SACRIFICE', 'HIM', 'IN', 'ORDER', 'TO', 'ELECT', 'HOPKINS'] +6829-68771-0004-1916: hyp=['UNDER', 'ORDINARY', 'CONDITIONS', 'REYNOLDS', 'WAS', 'SURE', 'TO', 'BE', 'ELECTED', 'BUT', 'THE', 'COMMITTEE', 'PROPOSED', 'TO', 'SACRIFICE', 'HIM', 'IN', 'ORDER', 'TO', 'ELEC', 'HOPKINS'] +6829-68771-0005-1917: ref=['THE', 'ONLY', 'THING', 'NECESSARY', 'WAS', 'TO', 'FIX', 'SETH', 'REYNOLDS', 'AND', 'THIS', 'HOPKINS', 'ARRANGED', 'PERSONALLY'] +6829-68771-0005-1917: hyp=['THE', 'ONLY', 'THING', 'NECESSARY', 'WAS', 'TO', 'FIX', 'SETH', 'REYNOLDS', 'AND', 'THIS', 'HOPKINS', 'ARRANGED', 'PERSONALLY'] +6829-68771-0006-1918: ref=['AND', 'THIS', 'WAS', 'WHY', 'KENNETH', 'AND', 'BETH', 'DISCOVERED', 'HIM', 'CONVERSING', 'WITH', 'THE', 'YOUNG', 'WOMAN', 'IN', 'THE', 'BUGGY'] +6829-68771-0006-1918: hyp=['AND', 'THIS', 'WAS', 'WHY', 'KENNETH', 'AND', 'BETH', 'DISCOVERED', 'HIM', 'CONVERSING', 'WITH', 'THE', 'YOUNG', 'WOMAN', 'IN', 'THE', 'BUGGY'] +6829-68771-0007-1919: ref=['THE', 'DESCRIPTION', 'SHE', 'GAVE', 'OF', 'THE', 'COMING', 'RECEPTION', 'TO', 'THE', "WOMAN'S", 'POLITICAL', 'LEAGUE', 'WAS', 'SO', 'HUMOROUS', 'AND', 'DIVERTING', 'THAT', 'THEY', 'WERE', 'BOTH', 'LAUGHING', 'HEARTILY', 'OVER', 'THE', 'THING', 'WHEN', 'THE', 'YOUNG', 'PEOPLE', 'PASSED', 'THEM', 'AND', 'THUS', 'MISTER', 'HOPKINS', 'FAILED', 'TO', 'NOTICE', 'WHO', 'THE', 'OCCUPANTS', 'OF', 'THE', 'OTHER', 'VEHICLE', 'WERE'] +6829-68771-0007-1919: hyp=['THE', 'DESCRIPTION', 'SHE', 'GAVE', 'OF', 'THE', 'COMING', 'RECEPTION', 'TO', 'THE', "WOMEN'S", 'POLITICAL', 'LEAGUE', 'WAS', 'SO', 'HUMOROUS', 'AND', 'DIVERTING', 'THAT', 'THEY', 'WERE', 'BOTH', 'LAUGHING', 'HEARTILY', 'OVER', 'THE', 'THING', 'WHEN', 'THE', 'YOUNG', 'PEOPLE', 'PASSED', 'THEM', 'AND', 'THUS', 'MISTER', 'HOPKINS', 'FAILED', 'TO', 'NOTICE', 'WHO', 'THE', 'OCCUPANT', 'OF', 'THE', 'OTHER', 'VEHICLE', 'WERE'] +6829-68771-0008-1920: ref=['THESE', 'WOMEN', 'WERE', 'FLATTERED', 'BY', 'THE', 'ATTENTION', 'OF', 'THE', 'YOUNG', 'LADY', 'AND', 'HAD', 'PROMISED', 'TO', 'ASSIST', 'IN', 'ELECTING', 'MISTER', 'FORBES'] +6829-68771-0008-1920: hyp=['THESE', 'WOMEN', 'WERE', 'FLATTERED', 'BY', 'THE', 'ATTENTION', 'OF', 'THE', 'YOUNG', 'LADY', 'AND', 'HAD', 'PROMISED', 'TO', 'ASSIST', 'IN', 'ELECTING', 'MISTER', 'FORBES'] +6829-68771-0009-1921: ref=['LOUISE', 'HOPED', 'FOR', 'EXCELLENT', 'RESULTS', 'FROM', 'THIS', 'ORGANIZATION', 'AND', 'WISHED', 'THE', 'ENTERTAINMENT', 'TO', 'BE', 'SO', 'EFFECTIVE', 'IN', 'WINNING', 'THEIR', 'GOOD', 'WILL', 'THAT', 'THEY', 'WOULD', 'WORK', 'EARNESTLY', 'FOR', 'THE', 'CAUSE', 'IN', 'WHICH', 'THEY', 'WERE', 'ENLISTED'] +6829-68771-0009-1921: hyp=['LOUISE', 'HOPED', 'FOR', 'EXCELLENT', 'RESULTS', 'FROM', 'THIS', 'ORGANIZATION', 'AND', 'WISHED', 'THE', 'ENTERTAINMENT', 'TO', 'BE', 'SO', 'EFFECTIVE', 'IN', 'WINNING', 'THEIR', 'GOOD', 'WILL', 'THAT', 'THEY', 'WOULD', 'WORK', 'EARNESTLY', 'FOR', 'THE', 'CAUSE', 'IN', 'WHICH', 'THEY', 'WERE', 'ENLISTED'] +6829-68771-0010-1922: ref=['THE', 'FAIRVIEW', 'BAND', 'WAS', 'ENGAGED', 'TO', 'DISCOURSE', 'AS', 'MUCH', 'HARMONY', 'AS', 'IT', 'COULD', 'PRODUCE', 'AND', 'THE', 'RESOURCES', 'OF', 'THE', 'GREAT', 'HOUSE', 'WERE', 'TAXED', 'TO', 'ENTERTAIN', 'THE', 'GUESTS'] +6829-68771-0010-1922: hyp=['THE', 'FAIR', 'VIEW', 'BAND', 'WAS', 'ENGAGED', 'TO', 'DISCOURSE', 'AS', 'MUCH', 'HARMONY', 'AS', 'IT', 'COULD', 'PRODUCE', 'AND', 'THE', 'RESOURCES', 'OF', 'THE', 'GREAT', 'HOUSE', 'WERE', 'TAXED', 'TO', 'ENTERTAIN', 'THE', 'GUESTS'] +6829-68771-0011-1923: ref=['TABLES', 'WERE', 'SPREAD', 'ON', 'THE', 'LAWN', 'AND', 'A', 'DAINTY', 'BUT', 'SUBSTANTIAL', 'REPAST', 'WAS', 'TO', 'BE', 'SERVED'] +6829-68771-0011-1923: hyp=['TABLES', 'WERE', 'SPREAD', 'ON', 'THE', 'LAWN', 'AND', 'A', 'DAINTY', 'BUT', 'SUBSTANTIAL', 'REPAST', 'WAS', 'TO', 'BE', 'SERVED'] +6829-68771-0012-1924: ref=['THIS', 'WAS', 'THE', 'FIRST', 'OCCASION', 'WITHIN', 'A', 'GENERATION', 'WHEN', 'SUCH', 'AN', 'ENTERTAINMENT', 'HAD', 'BEEN', 'GIVEN', 'AT', 'ELMHURST', 'AND', 'THE', 'ONLY', 'ONE', 'WITHIN', 'THE', 'MEMORY', 'OF', 'MAN', 'WHERE', 'THE', 'NEIGHBORS', 'AND', 'COUNTRY', 'PEOPLE', 'HAD', 'BEEN', 'INVITED', 'GUESTS'] +6829-68771-0012-1924: hyp=['THIS', 'WAS', 'THE', 'FIRST', 'OCCASION', 'WITHIN', 'A', 'GENERATION', 'WHEN', 'SUCH', 'AN', 'ENTERTAINMENT', 'HAD', 'BEEN', 'GIVEN', 'AT', 'ELMHURST', 'AND', 'THE', 'ONLY', 'ONE', 'WITHIN', 'THE', 'MEMORY', 'OF', 'MAN', 'WERE', 'THE', 'NEIGHBORS', 'AND', 'COUNTRY', 'PEOPLE', 'HAD', 'BEEN', 'THE', 'INVITED', 'GUEST'] +6829-68771-0013-1925: ref=['THE', 'ATTENDANCE', 'WAS', 'UNEXPECTEDLY', 'LARGE', 'AND', 'THE', 'GIRLS', 'WERE', 'DELIGHTED', 'FORESEEING', 'GREAT', 'SUCCESS', 'FOR', 'THEIR', 'FETE'] +6829-68771-0013-1925: hyp=['THE', 'ATTENDANTS', 'WAS', 'UNEXPECTEDLY', 'LARGE', 'AND', 'THE', 'GIRLS', 'WERE', 'DELIGHTED', 'FORESEEING', 'GREAT', 'SUCCESS', 'FOR', 'THEIR', 'FIGHT'] +6829-68771-0014-1926: ref=['WE', 'OUGHT', 'TO', 'HAVE', 'MORE', 'ATTENDANTS', 'BETH', 'SAID', 'LOUISE', 'APPROACHING', 'HER', 'COUSIN'] +6829-68771-0014-1926: hyp=['WE', 'OUGHT', 'TO', 'HAVE', 'MORE', 'ATTENDANCE', 'BETH', 'SAID', 'LOUISE', 'APPROACHING', 'HER', 'COUSIN'] +6829-68771-0015-1927: ref=["WON'T", 'YOU', 'RUN', 'INTO', 'THE', 'HOUSE', 'AND', 'SEE', 'IF', 'MARTHA', "CAN'T", 'SPARE', 'ONE', 'OR', 'TWO', 'MORE', 'MAIDS'] +6829-68771-0015-1927: hyp=["WON'T", 'YOU', 'RUN', 'INTO', 'THE', 'HOUSE', 'AND', 'SEE', 'IF', 'MARTHA', "CAN'T", 'SPARE', 'ONE', 'OR', 'TWO', 'MORE', 'MAIDS'] +6829-68771-0016-1928: ref=['SHE', 'WAS', 'VERY', 'FOND', 'OF', 'THE', 'YOUNG', 'LADIES', 'WHOM', 'SHE', 'HAD', 'KNOWN', 'WHEN', 'AUNT', 'JANE', 'WAS', 'THE', 'MISTRESS', 'HERE', 'AND', 'BETH', 'WAS', 'HER', 'ESPECIAL', 'FAVORITE'] +6829-68771-0016-1928: hyp=['SHE', 'WAS', 'VERY', 'FOND', 'OF', 'THE', 'YOUNG', 'LADIES', 'WHOM', 'SHE', 'HAD', 'KNOWN', 'WHEN', 'AUNT', 'JANE', 'WAS', 'THEIR', 'MISTRESS', 'HERE', 'AND', 'BETH', 'WAS', 'HER', 'SPECIAL', 'FAVOURITE'] +6829-68771-0017-1929: ref=['THE', 'HOUSEKEEPER', 'LED', 'THE', 'WAY', 'AND', 'BETH', 'FOLLOWED'] +6829-68771-0017-1929: hyp=['THE', 'HOUSEKEEPER', 'LED', 'THE', 'WAY', 'IN', 'BETH', 'FOLLOWED'] +6829-68771-0018-1930: ref=['FOR', 'A', 'MOMENT', 'BETH', 'STOOD', 'STARING', 'WHILE', 'THE', 'NEW', 'MAID', 'REGARDED', 'HER', 'WITH', 'COMPOSURE', 'AND', 'A', 'SLIGHT', 'SMILE', 'UPON', 'HER', 'BEAUTIFUL', 'FACE'] +6829-68771-0018-1930: hyp=['FOR', 'A', 'MOMENT', 'BETH', 'STOOD', 'STARING', 'WHILE', 'THE', 'NEW', 'MAID', 'REGARDED', 'HER', 'WITH', 'COMPOSURE', 'AND', 'OF', 'SLIGHT', 'SMILE', 'UPON', 'HER', 'BEAUTIFUL', 'FACE'] +6829-68771-0019-1931: ref=['SHE', 'WAS', 'DRESSED', 'IN', 'THE', 'REGULATION', 'COSTUME', 'OF', 'THE', 'MAIDS', 'AT', 'ELMHURST', 'A', 'PLAIN', 'BLACK', 'GOWN', 'WITH', 'WHITE', 'APRON', 'AND', 'CAP'] +6829-68771-0019-1931: hyp=['SHE', 'WAS', 'DRESSED', 'IN', 'THE', 'REGULATION', 'COSTUME', 'OF', 'THE', 'MAIDS', 'AT', 'ELMHURST', 'A', 'PLAIN', 'BLACK', 'GOWN', 'WITH', 'A', 'WHITE', 'APRON', 'AND', 'CAP'] +6829-68771-0020-1932: ref=['THEN', 'SHE', 'GAVE', 'A', 'LITTLE', 'LAUGH', 'AND', 'REPLIED', 'NO', 'MISS', 'BETH', "I'M", 'ELIZABETH', 'PARSONS'] +6829-68771-0020-1932: hyp=['THEN', 'SHE', 'GAVE', 'A', 'LITTLE', 'LAUGH', 'AND', 'REPLIED', 'NO', 'MISS', 'BETH', "I'M", 'ELIZABETH', 'PARSONS'] +6829-68771-0021-1933: ref=['BUT', 'IT', "CAN'T", 'BE', 'PROTESTED', 'THE', 'GIRL'] +6829-68771-0021-1933: hyp=['BUT', 'IT', "CAN'T", 'BE', 'PROTESTED', 'THE', 'GIRL'] +6829-68771-0022-1934: ref=['I', 'ATTEND', 'TO', 'THE', 'HOUSEHOLD', 'MENDING', 'YOU', 'KNOW', 'AND', 'CARE', 'FOR', 'THE', 'LINEN'] +6829-68771-0022-1934: hyp=['I', 'ATTEND', 'TO', 'THE', 'HOUSEHOLD', 'MENDING', 'YOU', 'KNOW', 'AND', 'CARE', 'FOR', 'THE', 'LINEN'] +6829-68771-0023-1935: ref=['YOU', 'SPEAK', 'LIKE', 'AN', 'EDUCATED', 'PERSON', 'SAID', 'BETH', 'WONDERINGLY', 'WHERE', 'IS', 'YOUR', 'HOME'] +6829-68771-0023-1935: hyp=['YOU', 'SPEAK', 'LIKE', 'AN', 'EDUCATED', 'PERSON', 'SAID', 'BETH', 'WONDERINGLY', 'WHERE', 'IS', 'YOUR', 'HOME'] +6829-68771-0024-1936: ref=['FOR', 'THE', 'FIRST', 'TIME', 'THE', 'MAID', 'SEEMED', 'A', 'LITTLE', 'CONFUSED', 'AND', 'HER', 'GAZE', 'WANDERED', 'FROM', 'THE', 'FACE', 'OF', 'HER', 'VISITOR'] +6829-68771-0024-1936: hyp=['FOR', 'THE', 'FIRST', 'TIME', 'THE', 'MAID', 'SEEMED', 'A', 'LITTLE', 'CONFUSED', 'AND', 'HER', 'GAZE', 'WANDERED', 'FROM', 'THE', 'FACE', 'OF', 'HER', 'VISITOR'] +6829-68771-0025-1937: ref=['SHE', 'SAT', 'DOWN', 'IN', 'A', 'ROCKING', 'CHAIR', 'AND', 'CLASPING', 'HER', 'HANDS', 'IN', 'HER', 'LAP', 'ROCKED', 'SLOWLY', 'BACK', 'AND', 'FORTH', "I'M", 'SORRY', 'SAID', 'BETH'] +6829-68771-0025-1937: hyp=['SHE', 'SAT', 'DOWN', 'IN', 'A', 'ROCKING', 'CHAIR', 'AND', 'CLASPING', 'HER', 'HANDS', 'IN', 'HER', 'LAP', 'ROCK', 'SLOWLY', 'BACK', 'AND', 'FORTH', "I'M", 'SORRY', 'SAID', 'BETH'] +6829-68771-0026-1938: ref=['ELIZA', 'PARSONS', 'SHOOK', 'HER', 'HEAD'] +6829-68771-0026-1938: hyp=['ELIZA', 'PARSON', 'SHOOK', 'HER', 'HEAD'] +6829-68771-0027-1939: ref=['THEY', 'THEY', 'EXCITE', 'ME', 'IN', 'SOME', 'WAY', 'AND', 'I', 'I', "CAN'T", 'BEAR', 'THEM', 'YOU', 'MUST', 'EXCUSE', 'ME'] +6829-68771-0027-1939: hyp=['THEY', 'THEY', 'EXCITE', 'ME', 'IN', 'SOME', 'WAY', 'AND', 'I', 'I', "CAN'T", 'BEAR', 'THEM', 'YOU', 'MUST', 'EXCUSE', 'ME'] +6829-68771-0028-1940: ref=['SHE', 'EVEN', 'SEEMED', 'MILDLY', 'AMUSED', 'AT', 'THE', 'ATTENTION', 'SHE', 'ATTRACTED'] +6829-68771-0028-1940: hyp=['SHE', 'EVEN', 'SEEMED', 'MILDLY', 'AMUSED', 'AT', 'THE', 'ATTENTION', 'SHE', 'ATTRACTED'] +6829-68771-0029-1941: ref=['BETH', 'WAS', 'A', 'BEAUTIFUL', 'GIRL', 'THE', 'HANDSOMEST', 'OF', 'THE', 'THREE', 'COUSINS', 'BY', 'FAR', 'YET', 'ELIZA', 'SURPASSED', 'HER', 'IN', 'NATURAL', 'CHARM', 'AND', 'SEEMED', 'WELL', 'AWARE', 'OF', 'THE', 'FACT'] +6829-68771-0029-1941: hyp=['BETH', 'WAS', 'A', 'BEAUTIFUL', 'GIRL', 'THE', 'HANDSOMEST', 'OF', 'THE', 'THREE', 'COUSINS', 'BY', 'FAR', 'YET', 'ELIZA', 'SURPASSED', 'HER', 'A', 'NATURAL', 'CHARM', 'AND', 'SEEMED', 'WELL', 'AWARE', 'OF', 'THE', 'FACT'] +6829-68771-0030-1942: ref=['HER', 'MANNER', 'WAS', 'NEITHER', 'INDEPENDENT', 'NOR', 'ASSERTIVE', 'BUT', 'RATHER', 'ONE', 'OF', 'WELL', 'BRED', 'COMPOSURE', 'AND', 'CALM', 'RELIANCE'] +6829-68771-0030-1942: hyp=['HER', 'MANNER', 'WAS', 'NEITHER', 'INDEPENDENT', 'NOR', 'ASSERTIVE', 'BUT', 'RATHER', 'ONE', 'OF', 'WELL', 'BRED', 'COMPOSURE', 'AND', 'CALM', 'RELIANCE'] +6829-68771-0031-1943: ref=['HER', 'EYES', 'WANDERED', 'TO', 'THE', "MAID'S", 'HANDS'] +6829-68771-0031-1943: hyp=['HER', 'EYES', 'WANDERED', 'TO', 'THE', "MAID'S", 'HANDS'] +6829-68771-0032-1944: ref=['HOWEVER', 'HER', 'FEATURES', 'AND', 'FORM', 'MIGHT', 'REPRESS', 'ANY', 'EVIDENCE', 'OF', 'NERVOUSNESS', 'THESE', 'HANDS', 'TOLD', 'A', 'DIFFERENT', 'STORY'] +6829-68771-0032-1944: hyp=['HOWEVER', 'HER', 'FEATURES', 'AND', 'FORM', 'MIGHT', 'REPRESS', 'ANY', 'EVIDENCE', 'OF', 'NERVOUSNESS', 'THESE', 'HANDS', 'TOLD', 'A', 'DIFFERENT', 'STORY'] +6829-68771-0033-1945: ref=['SHE', 'ROSE', 'QUICKLY', 'TO', 'HER', 'FEET', 'WITH', 'AN', 'IMPETUOUS', 'GESTURE', 'THAT', 'MADE', 'HER', 'VISITOR', 'CATCH', 'HER', 'BREATH'] +6829-68771-0033-1945: hyp=['SHE', 'ROSE', 'QUICKLY', 'TO', 'HER', 'FEET', 'WITH', 'AN', 'IMPETUOUS', 'GESTURE', 'THAT', 'MADE', 'HER', 'VISITOR', 'CATCH', 'HER', 'BREATH'] +6829-68771-0034-1946: ref=['I', 'WISH', 'I', 'KNEW', 'MYSELF', 'SHE', 'CRIED', 'FIERCELY'] +6829-68771-0034-1946: hyp=['I', 'WISH', 'I', 'KNEW', 'MYSELF', 'SHE', 'CRIED', 'FIERCELY'] +6829-68771-0035-1947: ref=['WILL', 'YOU', 'LEAVE', 'ME', 'ALONE', 'IN', 'MY', 'OWN', 'ROOM', 'OR', 'MUST', 'I', 'GO', 'AWAY', 'TO', 'ESCAPE', 'YOU'] +6829-68771-0035-1947: hyp=['WILL', 'YOU', 'LEAVE', 'ME', 'ALONE', 'IN', 'MY', 'OWN', 'ROOM', 'OR', 'MUST', 'I', 'GO', 'AWAY', 'TO', 'ESCAPE', 'YOU'] +6829-68771-0036-1948: ref=['ELIZA', 'CLOSED', 'THE', 'DOOR', 'BEHIND', 'HER', 'WITH', 'A', 'DECIDED', 'SLAM', 'AND', 'A', 'KEY', 'CLICKED', 'IN', 'THE', 'LOCK'] +6829-68771-0036-1948: hyp=['ELIZA', 'CLOSED', 'THE', 'DOOR', 'BEHIND', 'HER', 'WITH', 'A', 'DECIDED', 'SLAM', 'AND', 'A', 'KEY', 'CLICKED', 'IN', 'THE', 'LOCK'] +6930-75918-0000-0: ref=['CONCORD', 'RETURNED', 'TO', 'ITS', 'PLACE', 'AMIDST', 'THE', 'TENTS'] +6930-75918-0000-0: hyp=['CONCORD', 'RETURNED', 'TO', 'ITS', 'PLACE', 'AMIDST', 'THE', 'TENTS'] +6930-75918-0001-1: ref=['THE', 'ENGLISH', 'FORWARDED', 'TO', 'THE', 'FRENCH', 'BASKETS', 'OF', 'FLOWERS', 'OF', 'WHICH', 'THEY', 'HAD', 'MADE', 'A', 'PLENTIFUL', 'PROVISION', 'TO', 'GREET', 'THE', 'ARRIVAL', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'THE', 'FRENCH', 'IN', 'RETURN', 'INVITED', 'THE', 'ENGLISH', 'TO', 'A', 'SUPPER', 'WHICH', 'WAS', 'TO', 'BE', 'GIVEN', 'THE', 'NEXT', 'DAY'] +6930-75918-0001-1: hyp=['THE', 'ENGLISH', 'FOEEDED', 'TO', 'THE', 'FRENCH', 'BASKETS', 'OF', 'FLOWERS', 'OF', 'WHICH', 'THEY', 'HAD', 'MADE', 'A', 'PLENTIFUL', 'PROVISION', 'TO', 'GREET', 'THE', 'ARRIVAL', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'THE', 'FRENCH', 'IN', 'RETURN', 'INVITED', 'THE', 'ENGLISH', 'TO', 'A', 'SUPPER', 'WHICH', 'WAS', 'TO', 'BE', 'GIVEN', 'THE', 'NEXT', 'DAY'] +6930-75918-0002-2: ref=['CONGRATULATIONS', 'WERE', 'POURED', 'IN', 'UPON', 'THE', 'PRINCESS', 'EVERYWHERE', 'DURING', 'HER', 'JOURNEY'] +6930-75918-0002-2: hyp=['CONGRATULATIONS', 'WERE', 'POURED', 'IN', 'UPON', 'THE', 'PRINCESS', 'EVERYWHERE', 'DURING', 'HER', 'JOURNEY'] +6930-75918-0003-3: ref=['FROM', 'THE', 'RESPECT', 'PAID', 'HER', 'ON', 'ALL', 'SIDES', 'SHE', 'SEEMED', 'LIKE', 'A', 'QUEEN', 'AND', 'FROM', 'THE', 'ADORATION', 'WITH', 'WHICH', 'SHE', 'WAS', 'TREATED', 'BY', 'TWO', 'OR', 'THREE', 'SHE', 'APPEARED', 'AN', 'OBJECT', 'OF', 'WORSHIP', 'THE', 'QUEEN', 'MOTHER', 'GAVE', 'THE', 'FRENCH', 'THE', 'MOST', 'AFFECTIONATE', 'RECEPTION', 'FRANCE', 'WAS', 'HER', 'NATIVE', 'COUNTRY', 'AND', 'SHE', 'HAD', 'SUFFERED', 'TOO', 'MUCH', 'UNHAPPINESS', 'IN', 'ENGLAND', 'FOR', 'ENGLAND', 'TO', 'HAVE', 'MADE', 'HER', 'FORGET', 'FRANCE'] +6930-75918-0003-3: hyp=['FROM', 'THE', 'RESPECT', 'PAID', 'HER', 'ON', 'ALL', 'SIDES', 'SHE', 'SEEMED', 'LIKE', 'A', 'QUEEN', 'AND', 'FROM', 'THE', 'ADORATION', 'WITH', 'WHICH', 'SHE', 'WAS', 'TREATED', 'BY', 'TWO', 'OR', 'THREE', 'SHE', 'APPEARED', 'AN', 'OBJECT', 'OF', 'WORSHIP', 'THE', 'QUEEN', 'MOTHER', 'GAVE', 'THE', 'FRENCH', 'THE', 'MOST', 'AFFECTIONATE', 'RECEPTION', 'FRANCE', 'WAS', 'HER', 'NATIVE', 'COUNTRY', 'AND', 'SHE', 'HAD', 'SUFFERED', 'TOO', 'MUCH', 'UNHAPPINESS', 'IN', 'ENGLAND', 'FOR', 'ENGLAND', 'TO', 'HAVE', 'MADE', 'HER', 'FORGET', 'FRANCE'] +6930-75918-0004-4: ref=['SHE', 'TAUGHT', 'HER', 'DAUGHTER', 'THEN', 'BY', 'HER', 'OWN', 'AFFECTION', 'FOR', 'IT', 'THAT', 'LOVE', 'FOR', 'A', 'COUNTRY', 'WHERE', 'THEY', 'HAD', 'BOTH', 'BEEN', 'HOSPITABLY', 'RECEIVED', 'AND', 'WHERE', 'A', 'BRILLIANT', 'FUTURE', 'OPENED', 'BEFORE', 'THEM'] +6930-75918-0004-4: hyp=['SHE', 'TAUGHT', 'HER', 'DAUGHTER', 'THEN', 'BY', 'HER', 'OWN', 'AFFECTION', 'FOR', 'IT', 'THAT', 'LOVE', 'FOR', 'A', 'COUNTRY', 'WHERE', 'THEY', 'HAD', 'BOTH', 'BEEN', 'HOSPITABLY', 'RECEIVED', 'AND', 'WHERE', 'A', 'BRILLIANT', 'FUTURE', 'OPENED', 'FOR', 'THEM'] +6930-75918-0005-5: ref=['THE', 'COUNT', 'HAD', 'THROWN', 'HIMSELF', 'BACK', 'ON', 'HIS', 'SEAT', 'LEANING', 'HIS', 'SHOULDERS', 'AGAINST', 'THE', 'PARTITION', 'OF', 'THE', 'TENT', 'AND', 'REMAINED', 'THUS', 'HIS', 'FACE', 'BURIED', 'IN', 'HIS', 'HANDS', 'WITH', 'HEAVING', 'CHEST', 'AND', 'RESTLESS', 'LIMBS'] +6930-75918-0005-5: hyp=['THE', 'COUNT', 'HAD', 'THROWN', 'HIMSELF', 'BACK', 'ON', 'HIS', 'SEAT', 'LEANING', 'HIS', 'SHOULDERS', 'AGAINST', 'THE', 'PARTITION', 'OF', 'THE', 'TENT', 'AND', 'REMAINED', 'THUS', 'HIS', 'FACE', 'BURIED', 'IN', 'HIS', 'HANDS', 'WITH', 'HEAVING', 'CHEST', 'AND', 'RESTLESS', 'LIMBS'] +6930-75918-0006-6: ref=['THIS', 'HAS', 'INDEED', 'BEEN', 'A', 'HARASSING', 'DAY', 'CONTINUED', 'THE', 'YOUNG', 'MAN', 'HIS', 'EYES', 'FIXED', 'UPON', 'HIS', 'FRIEND'] +6930-75918-0006-6: hyp=['THIS', 'HAS', 'INDEED', 'BEEN', 'AN', 'HARASSING', 'DAY', 'CONTINUED', 'THE', 'YOUNG', 'MAN', 'HIS', 'EYES', 'FIXED', 'UPON', 'HIS', 'FRIEND'] +6930-75918-0007-7: ref=['YOU', 'WILL', 'BE', 'FRANK', 'WITH', 'ME', 'I', 'ALWAYS', 'AM'] +6930-75918-0007-7: hyp=['YOU', 'WILL', 'BE', 'FRANK', 'WITH', 'ME', 'I', 'ALWAYS', 'AM'] +6930-75918-0008-8: ref=['CAN', 'YOU', 'IMAGINE', 'WHY', 'BUCKINGHAM', 'HAS', 'BEEN', 'SO', 'VIOLENT', 'I', 'SUSPECT'] +6930-75918-0008-8: hyp=['CAN', 'YOU', 'IMAGINE', 'WHY', 'BUCKINGHAM', 'HAS', 'BEEN', 'SO', 'VIOLENT', 'I', 'SUSPECT'] +6930-75918-0009-9: ref=['IT', 'IS', 'YOU', 'WHO', 'ARE', 'MISTAKEN', 'RAOUL', 'I', 'HAVE', 'READ', 'HIS', 'DISTRESS', 'IN', 'HIS', 'EYES', 'IN', 'HIS', 'EVERY', 'GESTURE', 'AND', 'ACTION', 'THE', 'WHOLE', 'DAY'] +6930-75918-0009-9: hyp=['IT', 'IS', 'YOU', 'WHO', 'ARE', 'MISTAKEN', 'RAOUL', 'I', 'HAVE', 'READ', 'HIS', 'DISTRESS', 'IN', 'HIS', 'EYES', 'IN', 'HIS', 'EVERY', 'GESTURE', 'AND', 'ACTION', 'THE', 'WHOLE', 'DAY'] +6930-75918-0010-10: ref=['I', 'CAN', 'PERCEIVE', 'LOVE', 'CLEARLY', 'ENOUGH'] +6930-75918-0010-10: hyp=['I', 'CAN', 'PERCEIVE', 'LOVE', 'CLEARLY', 'ENOUGH'] +6930-75918-0011-11: ref=['I', 'AM', 'CONVINCED', 'OF', 'WHAT', 'I', 'SAY', 'SAID', 'THE', 'COUNT'] +6930-75918-0011-11: hyp=['I', 'AM', 'CONVINCED', 'OF', 'WHAT', 'I', 'SAY', 'SAID', 'THE', 'COUNT'] +6930-75918-0012-12: ref=['IT', 'IS', 'ANNOYANCE', 'THEN'] +6930-75918-0012-12: hyp=['IT', 'IS', 'ANNOYANCE', 'THEN'] +6930-75918-0013-13: ref=['IN', 'THOSE', 'VERY', 'TERMS', 'I', 'EVEN', 'ADDED', 'MORE'] +6930-75918-0013-13: hyp=['IN', 'THOSE', 'VERY', 'TERMS', 'I', 'EVEN', 'ADDED', 'MORE'] +6930-75918-0014-14: ref=['BUT', 'CONTINUED', 'RAOUL', 'NOT', 'INTERRUPTED', 'BY', 'THIS', 'MOVEMENT', 'OF', 'HIS', 'FRIEND', 'HEAVEN', 'BE', 'PRAISED', 'THE', 'FRENCH', 'WHO', 'ARE', 'PRONOUNCED', 'TO', 'BE', 'THOUGHTLESS', 'AND', 'INDISCREET', 'RECKLESS', 'EVEN', 'ARE', 'CAPABLE', 'OF', 'BRINGING', 'A', 'CALM', 'AND', 'SOUND', 'JUDGMENT', 'TO', 'BEAR', 'ON', 'MATTERS', 'OF', 'SUCH', 'HIGH', 'IMPORTANCE'] +6930-75918-0014-14: hyp=['BUT', 'CONTINUED', 'RAOUL', 'NOT', 'INTERRUPTED', 'BY', 'THIS', 'MOVEMENT', 'OF', 'HIS', 'FRIEND', 'HEAVEN', 'BE', 'PRAISED', 'THE', 'FRENCH', 'WHO', 'ARE', 'PRONOUNCED', 'TO', 'BE', 'THOUGHTLESS', 'AND', 'INDISCREET', 'RECKLESS', 'EVEN', 'ARE', 'CAPABLE', 'OF', 'BRINGING', 'A', 'CALM', 'AND', 'SOUND', 'JUDGMENT', 'TO', 'BERYL', 'MATTERS', 'OF', 'SUCH', 'HIGH', 'IMPORTANCE'] +6930-75918-0015-15: ref=['THUS', 'IT', 'IS', 'THAT', 'THE', 'HONOR', 'OF', 'THREE', 'IS', 'SAVED', 'OUR', "COUNTRY'S", 'OUR', "MASTER'S", 'AND', 'OUR', 'OWN'] +6930-75918-0015-15: hyp=['THUS', 'IT', 'IS', 'THAT', 'THE', 'HONOR', 'OF', 'THREE', 'IS', 'SAVED', 'OUR', 'COUNTRY', 'OUR', 'MASTERS', 'AND', 'OUR', 'OWN'] +6930-75918-0016-16: ref=['YES', 'I', 'NEED', 'REPOSE', 'MANY', 'THINGS', 'HAVE', 'AGITATED', 'ME', 'TO', 'DAY', 'BOTH', 'IN', 'MIND', 'AND', 'BODY', 'WHEN', 'YOU', 'RETURN', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'THE', 'SAME', 'MAN'] +6930-75918-0016-16: hyp=['YES', 'I', 'NEED', 'REPOSE', 'MANY', 'THINGS', 'HAVE', 'AGITATED', 'ME', 'TO', 'DAY', 'BOTH', 'IN', 'MIND', 'AND', 'BODY', 'WHEN', 'YOU', 'RETURN', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'THE', 'SAME', 'MAN'] +6930-75918-0017-17: ref=['BUT', 'IN', 'THIS', 'FRIENDLY', 'PRESSURE', 'RAOUL', 'COULD', 'DETECT', 'THE', 'NERVOUS', 'AGITATION', 'OF', 'A', 'GREAT', 'INTERNAL', 'CONFLICT'] +6930-75918-0017-17: hyp=['BY', 'IN', 'THIS', 'FRIENDLY', 'PRESSURE', 'RAOUL', 'COULD', 'DETECT', 'THE', 'NERVOUS', 'AGITATION', 'OF', 'A', 'GREAT', 'INTERNAL', 'CONFLICT'] +6930-75918-0018-18: ref=['THE', 'NIGHT', 'WAS', 'CLEAR', 'STARLIT', 'AND', 'SPLENDID', 'THE', 'TEMPEST', 'HAD', 'PASSED', 'AWAY', 'AND', 'THE', 'SWEET', 'INFLUENCES', 'OF', 'THE', 'EVENING', 'HAD', 'RESTORED', 'LIFE', 'PEACE', 'AND', 'SECURITY', 'EVERYWHERE'] +6930-75918-0018-18: hyp=['THE', 'NIGHT', 'WAS', 'CLEAR', 'STARLIT', 'AND', 'SPLENDID', 'THE', 'TEMPEST', 'HAD', 'PASSED', 'AWAY', 'AND', 'THE', 'SWEET', 'INFLUENCES', 'OF', 'THE', 'EVENING', 'HAD', 'RESTORED', 'LIFE', 'PEACE', 'AND', 'SECURITY', 'EVERYWHERE'] +6930-75918-0019-19: ref=['UPON', 'THE', 'LARGE', 'SQUARE', 'IN', 'FRONT', 'OF', 'THE', 'HOTEL', 'THE', 'SHADOWS', 'OF', 'THE', 'TENTS', 'INTERSECTED', 'BY', 'THE', 'GOLDEN', 'MOONBEAMS', 'FORMED', 'AS', 'IT', 'WERE', 'A', 'HUGE', 'MOSAIC', 'OF', 'JET', 'AND', 'YELLOW', 'FLAGSTONES'] +6930-75918-0019-19: hyp=['UPON', 'THE', 'LARGE', 'SQUARE', 'IN', 'FRONT', 'OF', 'THE', 'HOTEL', 'THE', 'SHADOWS', 'OF', 'THE', 'TENTS', 'INTERSECTED', 'BY', 'THE', 'GOLDEN', 'MOONBEAMS', 'FORMED', 'AS', 'IT', 'WERE', 'A', 'HUGE', 'MOSAIC', 'OF', 'JET', 'AND', 'YELLOW', 'FLAGSTONES'] +6930-75918-0020-20: ref=['BRAGELONNE', 'WATCHED', 'FOR', 'SOME', 'TIME', 'THE', 'CONDUCT', 'OF', 'THE', 'TWO', 'LOVERS', 'LISTENED', 'TO', 'THE', 'LOUD', 'AND', 'UNCIVIL', 'SLUMBERS', 'OF', 'MANICAMP', 'WHO', 'SNORED', 'AS', 'IMPERIOUSLY', 'AS', 'THOUGH', 'HE', 'WAS', 'WEARING', 'HIS', 'BLUE', 'AND', 'GOLD', 'INSTEAD', 'OF', 'HIS', 'VIOLET', 'SUIT'] +6930-75918-0020-20: hyp=['BRAGGELON', 'WATCHED', 'FOR', 'SOME', 'TIME', 'THE', 'CONDUCT', 'OF', 'THE', 'TWO', 'LOVERS', 'LISTENED', 'TO', 'THE', 'LOUD', 'AND', 'UNCIVIL', 'SLUMBERS', 'OF', 'MANICAMP', 'WHO', 'SNORED', 'AS', 'IMPERIOUSLY', 'AS', 'THOUGH', 'HE', 'WAS', 'WEARING', 'HIS', 'BLUE', 'AND', 'GOLD', 'INSTEAD', 'OF', 'HIS', 'VIOLET', 'SUIT'] +6930-76324-0000-21: ref=['GOLIATH', 'MAKES', 'ANOTHER', 'DISCOVERY'] +6930-76324-0000-21: hyp=['GOLIATH', 'MAKES', 'ANOTHER', 'DISCOVERY'] +6930-76324-0001-22: ref=['THEY', 'WERE', 'CERTAINLY', 'NO', 'NEARER', 'THE', 'SOLUTION', 'OF', 'THEIR', 'PROBLEM'] +6930-76324-0001-22: hyp=['THERE', 'WERE', 'CERTAINLY', 'NO', 'NEAR', 'THE', 'SOLUTION', 'OF', 'THEIR', 'PROBLEM'] +6930-76324-0002-23: ref=['THE', 'POOR', 'LITTLE', 'THINGS', 'CRIED', 'CYNTHIA', 'THINK', 'OF', 'THEM', 'HAVING', 'BEEN', 'TURNED', 'TO', 'THE', 'WALL', 'ALL', 'THESE', 'YEARS'] +6930-76324-0002-23: hyp=['THE', 'POOR', 'LITTLE', 'THINGS', 'CRIED', 'CYNTHIA', 'THINK', 'OF', 'THEM', 'HAVING', 'BEEN', 'TURNED', 'TO', 'THE', 'WALL', 'ALL', 'THESE', 'YEARS'] +6930-76324-0003-24: ref=['NOW', 'WHAT', 'WAS', 'THE', 'SENSE', 'OF', 'IT', 'TWO', 'INNOCENT', 'BABIES', 'LIKE', 'THAT'] +6930-76324-0003-24: hyp=['NOW', 'WHAT', 'IS', 'THE', 'SENSE', 'OF', 'IT', 'TOO', 'INNOCENT', 'BABIES', 'LIKE', 'THAT'] +6930-76324-0004-25: ref=['BUT', 'JOYCE', 'HAD', 'NOT', 'BEEN', 'LISTENING', 'ALL', 'AT', 'ONCE', 'SHE', 'PUT', 'DOWN', 'HER', 'CANDLE', 'ON', 'THE', 'TABLE', 'AND', 'FACED', 'HER', 'COMPANION'] +6930-76324-0004-25: hyp=['BUT', 'JOYCE', 'HAD', 'NOT', 'BEEN', 'LISTENING', 'ALL', 'AT', 'ONCE', 'SHE', 'PUT', 'DOWN', 'HER', 'CANDLE', 'ON', 'THE', 'TABLE', 'AND', 'FACED', 'HER', 'COMPANION'] +6930-76324-0005-26: ref=['THE', 'TWIN', 'BROTHER', 'DID', 'SOMETHING', 'SHE', "DIDN'T", 'LIKE', 'AND', 'SHE', 'TURNED', 'HIS', 'PICTURE', 'TO', 'THE', 'WALL'] +6930-76324-0005-26: hyp=['THE', 'TWIN', 'BROTHER', 'DID', 'SOMETHING', 'SHE', "DIDN'T", 'LIKE', 'AND', 'SHE', 'TURNED', 'HIS', 'PICTURE', 'TO', 'THE', 'WALL'] +6930-76324-0006-27: ref=['HERS', 'HAPPENED', 'TO', 'BE', 'IN', 'THE', 'SAME', 'FRAME', 'TOO', 'BUT', 'SHE', 'EVIDENTLY', "DIDN'T", 'CARE', 'ABOUT', 'THAT'] +6930-76324-0006-27: hyp=['HERS', 'HAPPENED', 'TO', 'BE', 'ON', 'THE', 'SAME', 'FRAME', 'TOO', 'BUT', 'SHE', 'EVIDENTLY', "DIDN'T", 'CARE', 'ABOUT', 'IT'] +6930-76324-0007-28: ref=['NOW', 'WHAT', 'HAVE', 'YOU', 'TO', 'SAY', 'CYNTHIA', 'SPRAGUE'] +6930-76324-0007-28: hyp=['NOW', 'WHAT', 'HAVE', 'YOU', 'TO', 'SAY', 'CYNTHIA', 'SP'] +6930-76324-0008-29: ref=['I', 'THOUGHT', 'WE', 'WERE', 'STUMPED', 'AGAIN', 'WHEN', 'I', 'FIRST', 'SAW', 'THAT', 'PICTURE', 'BUT', "IT'S", 'BEEN', 'OF', 'SOME', 'USE', 'AFTER', 'ALL'] +6930-76324-0008-29: hyp=['I', 'THOUGHT', 'WE', 'WERE', 'STUMPED', 'AGAIN', 'WHEN', 'I', 'FIRST', 'SAW', 'THAT', 'PICTURE', 'BUT', "IT'S", 'BEEN', 'OF', 'SOME', 'USE', 'AFTER', 'ALL'] +6930-76324-0009-30: ref=['DO', 'YOU', 'SUPPOSE', 'THE', 'MINIATURE', 'WAS', 'A', 'COPY', 'OF', 'THE', 'SAME', 'THING'] +6930-76324-0009-30: hyp=['DO', 'YOU', 'SUPPOSE', 'THE', 'MINIATURE', 'WAS', 'A', 'COPY', 'OF', 'THE', 'SAME', 'THING'] +6930-76324-0010-31: ref=['WHAT', 'IN', 'THE', 'WORLD', 'IS', 'THAT', 'QUERIED', 'JOYCE'] +6930-76324-0010-31: hyp=['WHEN', 'IN', 'THE', 'WORLD', 'IS', 'IT', 'QUERIED', 'JOYCE'] +6930-76324-0011-32: ref=['THEY', 'WORRY', 'ME', 'TERRIBLY', 'AND', 'BESIDES', "I'D", 'LIKE', 'TO', 'SEE', 'WHAT', 'THIS', 'LOVELY', 'FURNITURE', 'LOOKS', 'LIKE', 'WITHOUT', 'SUCH', 'QUANTITIES', 'OF', 'DUST', 'ALL', 'OVER', 'IT', 'GOOD', 'SCHEME', 'CYN'] +6930-76324-0011-32: hyp=['MAY', 'WORRY', 'ME', 'TERRIBLY', 'AND', 'BESIDES', "I'D", 'LIKE', 'TO', 'SEE', 'WHAT', 'THIS', 'LOVELY', 'FURNITURE', 'LOOKS', 'LIKE', 'WITHOUT', 'SUCH', 'QUANTITIES', 'OF', 'DUST', 'ALL', 'OVER', 'IT', 'GOOD', 'SCHEME', 'SIN'] +6930-76324-0012-33: ref=["WE'LL", 'COME', 'IN', 'HERE', 'THIS', 'AFTERNOON', 'WITH', 'OLD', 'CLOTHES', 'ON', 'AND', 'HAVE', 'A', 'REGULAR', 'HOUSE', 'CLEANING'] +6930-76324-0012-33: hyp=['WILL', 'COME', 'IN', 'HERE', 'THIS', 'AFTERNOON', 'WITH', 'OLD', 'CLOTHES', 'ON', 'AND', 'HALF', 'A', 'REGULAR', 'HOUSE', 'CLEANING'] +6930-76324-0013-34: ref=['IT', "CAN'T", 'HURT', 'ANYTHING', "I'M", 'SURE', 'FOR', 'WE', "WON'T", 'DISTURB', 'THINGS', 'AT', 'ALL'] +6930-76324-0013-34: hyp=['IT', "CAN'T", 'HURT', 'ANYTHING', "I'M", 'SURE', 'FOR', 'WE', "WON'T", 'DISTURB', 'THINGS', 'AT', 'ALL'] +6930-76324-0014-35: ref=['THIS', 'THOUGHT', 'HOWEVER', 'DID', 'NOT', 'ENTER', 'THE', 'HEADS', 'OF', 'THE', 'ENTHUSIASTIC', 'PAIR'] +6930-76324-0014-35: hyp=['THIS', 'THOUGHT', 'HOWEVER', 'DID', 'NOT', 'ENTER', 'THE', 'HEADS', 'OF', 'THE', 'ENTHUSIASTIC', 'PAIR'] +6930-76324-0015-36: ref=['SMUGGLING', 'THE', 'HOUSE', 'CLEANING', 'PARAPHERNALIA', 'INTO', 'THE', 'CELLAR', 'WINDOW', 'UNOBSERVED', 'THAT', 'AFTERNOON', 'PROVED', 'NO', 'EASY', 'TASK', 'FOR', 'CYNTHIA', 'HAD', 'ADDED', 'A', 'WHISK', 'BROOM', 'AND', 'DUST', 'PAN', 'TO', 'THE', 'OUTFIT'] +6930-76324-0015-36: hyp=['SMUGGLING', 'THE', 'HOUSE', 'CLEANING', 'PARAPHERNALIA', 'INTO', 'THE', 'CELLAR', 'WINDOW', 'UNOBSERVED', 'THAT', 'AFTERNOON', 'PROVED', 'NO', 'EASY', 'TASK', 'FOR', 'CYNTHIA', 'HAD', 'ADDED', 'A', 'WHISK', 'BROOM', 'AND', 'DUST', 'PAN', 'TO', 'THE', 'OUTFIT'] +6930-76324-0016-37: ref=['THE', 'LURE', 'PROVED', 'TOO', 'MUCH', 'FOR', 'HIM', 'AND', 'HE', 'CAME', 'SPORTING', 'AFTER', 'IT', 'AS', 'FRISKILY', 'AS', 'A', 'YOUNG', 'KITTEN', 'MUCH', 'TO', "CYNTHIA'S", 'DELIGHT', 'WHEN', 'SHE', 'CAUGHT', 'SIGHT', 'OF', 'HIM'] +6930-76324-0016-37: hyp=['THE', 'LOWER', 'PROVED', 'TOO', 'MUCH', 'FOR', 'HIM', 'AND', 'HE', 'CAME', 'SPORTING', 'AFTER', 'IT', 'AS', 'FRISKLY', 'AS', 'A', 'YOUNG', 'KITTEN', 'MUCH', 'TO', "CYNTHIA'S", 'DELIGHT', 'WHEN', 'SHE', 'CAUGHT', 'SIGHT', 'OF', 'HIM'] +6930-76324-0017-38: ref=['OH', 'LET', 'HIM', 'COME', 'ALONG', 'SHE', 'URGED', 'I', 'DO', 'LOVE', 'TO', 'SEE', 'HIM', 'ABOUT', 'THAT', 'OLD', 'HOUSE'] +6930-76324-0017-38: hyp=['OH', 'LET', 'HIM', 'COME', 'ALONG', 'SHE', 'URGED', 'I', 'DO', 'LOVE', 'TO', 'SEE', 'HIM', 'ABOUT', 'THAT', 'OLD', 'HOUSE'] +6930-76324-0018-39: ref=['HE', 'MAKES', 'IT', 'SORT', 'OF', 'COZIER'] +6930-76324-0018-39: hyp=['HE', 'MAKES', 'IT', 'SORT', 'OF', 'COZIER'] +6930-76324-0019-40: ref=['NOW', "LET'S", 'DUST', 'THE', 'FURNITURE', 'AND', 'PICTURES'] +6930-76324-0019-40: hyp=['NOW', "LET'S", 'DUST', 'THE', 'FURNITURE', 'AND', 'PICTURES'] +6930-76324-0020-41: ref=['YET', 'LITTLE', 'AS', 'IT', 'WAS', 'IT', 'HAD', 'ALREADY', 'MADE', 'A', 'VAST', 'DIFFERENCE', 'IN', 'THE', 'ASPECT', 'OF', 'THE', 'ROOM'] +6930-76324-0020-41: hyp=['YET', 'LITTLE', 'AS', 'IT', 'WAS', 'IT', 'HAD', 'ALREADY', 'MADE', 'A', 'VAST', 'DIFFERENCE', 'IN', 'THE', 'ASPECT', 'OF', 'THE', 'ROOM'] +6930-76324-0021-42: ref=['SURFACE', 'DUST', 'AT', 'LEAST', 'HAD', 'BEEN', 'REMOVED', 'AND', 'THE', 'FINE', 'OLD', 'FURNITURE', 'GAVE', 'A', 'HINT', 'OF', 'ITS', 'REAL', 'ELEGANCE', 'AND', 'POLISH'] +6930-76324-0021-42: hyp=['SURFACE', 'DUST', 'AT', 'LEAST', 'HAD', 'BEEN', 'REMOVED', 'AND', 'THE', 'FINE', 'OLD', 'FURNITURE', 'GAVE', 'A', 'HINT', 'OF', 'ITS', 'REAL', 'ELEGANCE', 'AND', 'POLISH'] +6930-76324-0022-43: ref=['THEN', 'SHE', 'SUDDENLY', 'REMARKED'] +6930-76324-0022-43: hyp=['THEN', 'SHE', 'SUDDENLY', 'REMARKED'] +6930-76324-0023-44: ref=['AND', 'MY', 'POCKET', 'MONEY', 'IS', 'GETTING', 'LOW', 'AGAIN', 'AND', 'YOU', "HAVEN'T", 'ANY', 'LEFT', 'AS', 'USUAL'] +6930-76324-0023-44: hyp=['AND', 'MY', 'POCKET', 'MONEY', 'IS', 'GETTING', 'LOW', 'AGAIN', 'AND', 'YOU', "HAVEN'T", 'ANY', 'LEFT', 'AS', 'USUAL'] +6930-76324-0024-45: ref=['THEY', 'SAY', 'ILLUMINATION', 'BY', 'CANDLE', 'LIGHT', 'IS', 'THE', 'PRETTIEST', 'IN', 'THE', 'WORLD'] +6930-76324-0024-45: hyp=['THEY', 'SAY', 'ILLUMINATION', 'BY', 'CANDLELIGHT', 'IS', 'THE', 'PRETTIEST', 'IN', 'THE', 'WORLD'] +6930-76324-0025-46: ref=['WHY', "IT'S", 'GOLIATH', 'AS', 'USUAL', 'THEY', 'BOTH', 'CRIED', 'PEERING', 'IN'] +6930-76324-0025-46: hyp=['WHY', "IT'S", 'GOLIATH', 'AS', 'USUAL', 'THEY', 'BOTH', 'CRIED', 'PEERING', 'IN'] +6930-76324-0026-47: ref=["ISN'T", 'HE', 'THE', 'GREATEST', 'FOR', 'GETTING', 'INTO', 'ODD', 'CORNERS'] +6930-76324-0026-47: hyp=["ISN'T", 'HE', 'THE', 'GREATEST', 'FOR', 'GETTING', 'INTO', 'ODD', 'CORNERS'] +6930-76324-0027-48: ref=['FORGETTING', 'ALL', 'THEIR', 'WEARINESS', 'THEY', 'SEIZED', 'THEIR', 'CANDLES', 'AND', 'SCURRIED', 'THROUGH', 'THE', 'HOUSE', 'FINDING', 'AN', 'OCCASIONAL', 'PAPER', 'TUCKED', 'AWAY', 'IN', 'SOME', 'ODD', 'CORNER'] +6930-76324-0027-48: hyp=['FORGETTING', 'ALL', 'THEIR', 'WEARINESS', 'THEY', 'SEIZED', 'THEIR', 'CANDLES', 'AND', 'SCURRIED', 'THROUGH', 'THE', 'HOUSE', 'FINDING', 'ON', 'OCCASIONAL', 'PAPER', 'TUCKED', 'AWAY', 'IN', 'SOME', 'ODD', 'CORNER'] +6930-76324-0028-49: ref=['WELL', "I'M", 'CONVINCED', 'THAT', 'THE', 'BOARDED', 'UP', 'HOUSE', 'MYSTERY', 'HAPPENED', 'NOT', 'EARLIER', 'THAN', 'APRIL', 'SIXTEENTH', 'EIGHTEEN', 'SIXTY', 'ONE', 'AND', 'PROBABLY', 'NOT', 'MUCH', 'LATER'] +6930-76324-0028-49: hyp=['WELL', "I'M", 'CONVINCED', 'THAT', 'THE', 'BOARDED', 'UP', 'HOUSE', 'MYSTERY', 'HAPPENED', 'NOT', 'EARLIER', 'THAN', 'APRIL', 'SIXTEENTH', 'EIGHTEEN', 'SIXTY', 'ONE', 'AND', 'PROBABLY', 'NOT', 'MUCH', 'LATER'] +6930-81414-0000-50: ref=['NO', 'WORDS', 'WERE', 'SPOKEN', 'NO', 'LANGUAGE', 'WAS', 'UTTERED', 'SAVE', 'THAT', 'OF', 'WAILING', 'AND', 'HISSING', 'AND', 'THAT', 'SOMEHOW', 'WAS', 'INDISTINCT', 'AS', 'IF', 'IT', 'EXISTED', 'IN', 'FANCY', 'AND', 'NOT', 'IN', 'REALITY'] +6930-81414-0000-50: hyp=['NO', 'WORDS', 'WERE', 'SPOKEN', 'NO', 'LANGUAGE', 'WAS', 'UTTERED', 'SAVE', 'THAT', 'OF', 'WAILING', 'AND', 'HISSING', 'AND', 'THAT', 'SOMEHOW', 'WAS', 'INDISTINCT', 'AS', 'IF', 'IT', 'EXISTED', 'IN', 'FANCY', 'AND', 'NOT', 'IN', 'REALITY'] +6930-81414-0001-51: ref=['I', 'HEARD', 'A', 'NOISE', 'BEHIND', 'I', 'TURNED', 'AND', 'SAW', 'KAFFAR', 'HIS', 'BLACK', 'EYES', 'SHINING', 'WHILE', 'IN', 'HIS', 'HAND', 'HE', 'HELD', 'A', 'GLEAMING', 'KNIFE', 'HE', 'LIFTED', 'IT', 'ABOVE', 'HIS', 'HEAD', 'AS', 'IF', 'TO', 'STRIKE', 'BUT', 'I', 'HAD', 'THE', 'STRENGTH', 'OF', 'TEN', 'MEN', 'AND', 'I', 'HURLED', 'HIM', 'FROM', 'ME'] +6930-81414-0001-51: hyp=['I', 'HEARD', 'A', 'NOISE', 'BEHIND', 'I', 'TURNED', 'AND', 'SAW', 'KAFFIR', 'HIS', 'BLACK', 'EYES', 'SHINING', 'WHILE', 'IN', 'HIS', 'HAND', 'HE', 'HELD', 'A', 'GLEAMING', 'KNIFE', 'HE', 'LIFTED', 'IT', 'ABOVE', 'HIS', 'HEAD', 'AS', 'IF', 'TO', 'STRIKE', 'BUT', 'I', 'HAD', 'THE', 'STRENGTH', 'OF', 'TEN', 'MEN', 'AND', 'I', 'HURLED', 'HIM', 'FROM', 'ME'] +6930-81414-0002-52: ref=['ONWARD', 'SAID', 'A', 'DISTANT', 'VOICE'] +6930-81414-0002-52: hyp=['ONWARD', 'SAID', 'A', 'DISTANT', 'VOICE'] +6930-81414-0003-53: ref=['NO', 'SOUND', 'BROKE', 'THE', 'STILLNESS', 'OF', 'THE', 'NIGHT'] +6930-81414-0003-53: hyp=['NO', 'SOUND', 'BROKE', 'THE', 'STILLNESS', 'OF', 'THE', 'NIGHT'] +6930-81414-0004-54: ref=['THE', 'STORY', 'OF', 'ITS', 'EVIL', 'INFLUENCE', 'CAME', 'BACK', 'TO', 'ME', 'AND', 'IN', 'MY', 'BEWILDERED', 'CONDITION', 'I', 'WONDERED', 'WHETHER', 'THERE', 'WAS', 'NOT', 'SOME', 'TRUTH', 'IN', 'WHAT', 'HAD', 'BEEN', 'SAID'] +6930-81414-0004-54: hyp=['THE', 'STORY', 'OF', 'ITS', 'EVIL', 'INFLUENCE', 'CAME', 'BACK', 'TO', 'ME', 'AND', 'IN', 'MY', 'BEWILDERED', 'CONDITION', 'I', 'WONDERED', 'WHETHER', 'THERE', 'WAS', 'NOT', 'SOME', 'TRUTH', 'IN', 'WHAT', 'HAD', 'BEEN', 'SAID'] +6930-81414-0005-55: ref=['WHAT', 'WAS', 'THAT'] +6930-81414-0005-55: hyp=['WHAT', 'WAS', 'THAT'] +6930-81414-0006-56: ref=['WHAT', 'THEN', 'A', 'HUMAN', 'HAND', 'LARGE', 'AND', 'SHAPELY', 'APPEARED', 'DISTINCTLY', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'POND'] +6930-81414-0006-56: hyp=['WHAT', 'THEN', 'A', 'HUMAN', 'HAND', 'LARGE', 'AND', 'SHAPELY', 'APPEARED', 'DISTINCTLY', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'POND'] +6930-81414-0007-57: ref=['NOTHING', 'MORE', 'NOT', 'EVEN', 'THE', 'WRIST', 'TO', 'WHICH', 'IT', 'MIGHT', 'BE', 'ATTACHED'] +6930-81414-0007-57: hyp=['NOTHING', 'MORE', 'NOT', 'EVEN', 'THE', 'WRIST', 'TO', 'WHICH', 'IT', 'MIGHT', 'BE', 'ATTACHED'] +6930-81414-0008-58: ref=['IT', 'DID', 'NOT', 'BECKON', 'OR', 'INDEED', 'MOVE', 'AT', 'ALL', 'IT', 'WAS', 'AS', 'STILL', 'AS', 'THE', 'HAND', 'OF', 'DEATH'] +6930-81414-0008-58: hyp=['IT', 'DID', 'NOT', 'BECKON', 'OR', 'INDEED', 'MOVE', 'AT', 'ALL', 'IT', 'WAS', 'AS', 'STILL', 'AS', 'THE', 'HAND', 'OF', 'DEATH'] +6930-81414-0009-59: ref=['I', 'AWOKE', 'TO', 'CONSCIOUSNESS', 'FIGHTING', 'AT', 'FIRST', 'IT', 'SEEMED', 'AS', 'IF', 'I', 'WAS', 'FIGHTING', 'WITH', 'A', 'PHANTOM', 'BUT', 'GRADUALLY', 'MY', 'OPPONENT', 'BECAME', 'MORE', 'REAL', 'TO', 'ME', 'IT', 'WAS', 'KAFFAR'] +6930-81414-0009-59: hyp=['I', 'AWOKE', 'TO', 'CONSCIOUSNESS', 'FIGHTING', 'AT', 'FIRST', 'IT', 'SEEMED', 'AS', 'IF', 'I', 'WAS', 'FIGHTING', 'WITH', 'THE', 'PHANTOM', 'BUT', 'GRADUALLY', 'MY', 'OPPONENT', 'BECAME', 'MORE', 'REAL', 'TO', 'ME', 'IT', 'WAS', 'KAFFIR'] +6930-81414-0010-60: ref=['A', 'SOUND', 'OF', 'VOICES', 'A', 'FLASH', 'OF', 'LIGHT'] +6930-81414-0010-60: hyp=['A', 'SOUND', 'OF', 'VOICES', 'A', 'FLASH', 'OF', 'LIGHT'] +6930-81414-0011-61: ref=['A', 'FEELING', 'OF', 'FREEDOM', 'AND', 'I', 'WAS', 'AWAKE', 'WHERE'] +6930-81414-0011-61: hyp=['A', 'FEELING', 'OF', 'FREEDOM', 'AND', 'I', 'WAS', 'AWAKE', 'WHERE'] +6930-81414-0012-62: ref=['SAID', 'ANOTHER', 'VOICE', 'WHICH', 'I', 'RECOGNIZED', 'AS', "VOLTAIRE'S", 'KAFFAR'] +6930-81414-0012-62: hyp=['SAID', 'ANOTHER', 'VOICE', 'WHICH', 'I', 'RECOGNIZED', 'AS', "VOLTAIRE'S", 'KAFFIR'] +6930-81414-0013-63: ref=['I', 'HAD', 'SCARCELY', 'KNOWN', 'WHAT', 'I', 'HAD', 'BEEN', 'SAYING', 'OR', 'DOING', 'UP', 'TO', 'THIS', 'TIME', 'BUT', 'AS', 'HE', 'SPOKE', 'I', 'LOOKED', 'AT', 'MY', 'HAND'] +6930-81414-0013-63: hyp=['I', 'HAD', 'SCARCELY', 'KNOWN', 'WHAT', 'I', 'HAD', 'BEEN', 'SAYING', 'OR', 'DOING', 'UP', 'TO', 'THIS', 'TIME', 'BUT', 'AS', 'HE', 'SPOKE', 'I', 'LOOKED', 'AT', 'MY', 'HAND'] +6930-81414-0014-64: ref=['IN', 'THE', 'LIGHT', 'OF', 'THE', 'MOON', 'I', 'SAW', 'A', 'KNIFE', 'RED', 'WITH', 'BLOOD', 'AND', 'MY', 'HAND', 'TOO', 'WAS', 'ALSO', 'DISCOLOURED'] +6930-81414-0014-64: hyp=['IN', 'THE', 'LIGHT', 'OF', 'THE', 'MOON', 'I', 'SAW', 'A', 'KNIFE', 'RED', 'WITH', 'BLOOD', 'AND', 'MY', 'HAND', 'TOO', 'WAS', 'ALSO', 'DISCOLORED'] +6930-81414-0015-65: ref=['I', 'DO', 'NOT', 'KNOW', 'I', 'AM', 'DAZED', 'BEWILDERED'] +6930-81414-0015-65: hyp=['I', 'DO', 'NOT', 'KNOW', 'I', 'AM', 'DAZED', 'BEWILDERED'] +6930-81414-0016-66: ref=['BUT', 'THAT', 'IS', "KAFFAR'S", 'KNIFE'] +6930-81414-0016-66: hyp=['BUT', 'THAT', 'IS', "KAFFIR'S", 'KNIFE'] +6930-81414-0017-67: ref=['I', 'KNOW', 'HE', 'HAD', 'IT', 'THIS', 'VERY', 'EVENING'] +6930-81414-0017-67: hyp=['I', 'KNOW', 'HE', 'HAD', 'IT', 'THIS', 'VERY', 'EVEN'] +6930-81414-0018-68: ref=['I', 'REMEMBER', 'SAYING', 'HAVE', 'WE', 'BEEN', 'TOGETHER'] +6930-81414-0018-68: hyp=['I', 'REMEMBER', 'SAYING', 'HAVE', 'WE', 'BEEN', 'TOGETHER'] +6930-81414-0019-69: ref=['VOLTAIRE', 'PICKED', 'UP', 'SOMETHING', 'FROM', 'THE', 'GROUND', 'AND', 'LOOKED', 'AT', 'IT'] +6930-81414-0019-69: hyp=['WHILE', 'CHEER', 'PICKED', 'UP', 'SOMETHING', 'FROM', 'THE', 'GROUND', 'AND', 'LOOKED', 'AT', 'IT'] +6930-81414-0020-70: ref=['I', 'SAY', 'YOU', 'DO', 'KNOW', 'WHAT', 'THIS', 'MEANS', 'AND', 'YOU', 'MUST', 'TELL', 'US'] +6930-81414-0020-70: hyp=['I', 'SAY', 'YOU', 'DO', 'KNOW', 'WHAT', 'THIS', 'MEANS', 'AND', 'YOU', 'MUST', 'TELL', 'US'] +6930-81414-0021-71: ref=['A', 'TERRIBLE', 'THOUGHT', 'FLASHED', 'INTO', 'MY', 'MIND'] +6930-81414-0021-71: hyp=['A', 'TERRIBLE', 'THOUGHT', 'FLASHED', 'INTO', 'MY', 'MIND'] +6930-81414-0022-72: ref=['I', 'HAD', 'AGAIN', 'BEEN', 'ACTING', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'THIS', "MAN'S", 'POWER'] +6930-81414-0022-72: hyp=['I', 'HAD', 'AGAIN', 'BEEN', 'ACTING', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'THIS', "MAN'S", 'POWER'] +6930-81414-0023-73: ref=['PERCHANCE', 'TOO', "KAFFAR'S", 'DEATH', 'MIGHT', 'SERVE', 'HIM', 'IN', 'GOOD', 'STEAD'] +6930-81414-0023-73: hyp=['PERCHANCE', 'TOO', 'KAFFIRS', 'DEATH', 'MIGHT', 'SERVE', 'HIM', 'IN', 'GOOD', 'STEAD'] +6930-81414-0024-74: ref=['MY', 'TONGUE', 'REFUSED', 'TO', 'ARTICULATE', 'MY', 'POWER', 'OF', 'SPEECH', 'LEFT', 'ME'] +6930-81414-0024-74: hyp=['MY', 'TONGUE', 'REFUSED', 'TO', 'ARTICULATE', 'MY', 'POWER', 'OF', 'SPEECH', 'LEFT', 'ME'] +6930-81414-0025-75: ref=['MY', 'POSITION', 'WAS', 'TOO', 'TERRIBLE'] +6930-81414-0025-75: hyp=['MY', 'POSITION', 'WAS', 'TOO', 'TERRIBLE'] +6930-81414-0026-76: ref=['MY', 'OVERWROUGHT', 'NERVES', 'YIELDED', 'AT', 'LAST'] +6930-81414-0026-76: hyp=['MY', 'OVERWROUGHT', 'NERVES', 'YIELDED', 'AT', 'LAST'] +6930-81414-0027-77: ref=['FOR', 'SOME', 'TIME', 'AFTER', 'THAT', 'I', 'REMEMBERED', 'NOTHING', 'DISTINCTLY'] +6930-81414-0027-77: hyp=['FOR', 'SOME', 'TIME', 'AFTER', 'THAT', 'I', 'REMEMBERED', 'NOTHING', 'DISTINCTLY'] +7021-79730-0000-1399: ref=['THE', 'THREE', 'MODES', 'OF', 'MANAGEMENT'] +7021-79730-0000-1399: hyp=['THE', 'THREE', 'MODES', 'OF', 'MANAGEMENT'] +7021-79730-0001-1400: ref=['TO', 'SUPPOSE', 'THAT', 'THE', 'OBJECT', 'OF', 'THIS', 'WORK', 'IS', 'TO', 'AID', 'IN', 'EFFECTING', 'SUCH', 'A', 'SUBSTITUTION', 'AS', 'THAT', 'IS', 'ENTIRELY', 'TO', 'MISTAKE', 'ITS', 'NATURE', 'AND', 'DESIGN'] +7021-79730-0001-1400: hyp=['TO', 'SUPPOSE', 'THAT', 'THE', 'OBJECT', 'OF', 'THIS', 'WORK', 'IS', 'TO', 'AID', 'IN', 'EFFECTING', 'SUCH', 'A', 'SUBSTITUTION', 'AS', 'THAT', 'IS', 'ENTIRELY', 'TO', 'MISTAKE', 'ITS', 'NATURE', 'AND', 'DESIGN'] +7021-79730-0002-1401: ref=['BY', 'REASON', 'AND', 'AFFECTION'] +7021-79730-0002-1401: hyp=['BY', 'REASON', 'AND', 'AFFECTION'] +7021-79730-0003-1402: ref=['AS', 'THE', 'CHAISE', 'DRIVES', 'AWAY', 'MARY', 'STANDS', 'BEWILDERED', 'AND', 'PERPLEXED', 'ON', 'THE', 'DOOR', 'STEP', 'HER', 'MIND', 'IN', 'A', 'TUMULT', 'OF', 'EXCITEMENT', 'IN', 'WHICH', 'HATRED', 'OF', 'THE', 'DOCTOR', 'DISTRUST', 'AND', 'SUSPICION', 'OF', 'HER', 'MOTHER', 'DISAPPOINTMENT', 'VEXATION', 'AND', 'ILL', 'HUMOR', 'SURGE', 'AND', 'SWELL', 'AMONG', 'THOSE', 'DELICATE', 'ORGANIZATIONS', 'ON', 'WHICH', 'THE', 'STRUCTURE', 'AND', 'DEVELOPMENT', 'OF', 'THE', 'SOUL', 'SO', 'CLOSELY', 'DEPEND', 'DOING', 'PERHAPS', 'AN', 'IRREPARABLE', 'INJURY'] +7021-79730-0003-1402: hyp=['AS', 'THE', 'CHASE', 'DRIVES', 'AWAY', 'MARY', 'STANDS', 'BEWILDERED', 'AND', 'PERPLEXED', 'ON', 'THE', 'DOORSTEP', 'HER', 'MIND', 'IN', 'A', 'TUMULT', 'OF', 'EXCITEMENT', 'IN', 'WHICH', 'HATRED', 'OF', 'THE', 'DOCTOR', 'DISTRUST', 'AND', 'SUSPICION', 'OF', 'HER', 'MOTHER', 'DISAPPOINTMENT', 'VEXATION', 'AND', 'ILL', 'HUMOR', 'SURGE', 'AND', 'SWELL', 'AMONG', 'THOSE', 'DELEGATE', 'ORGANIZATIONS', 'ON', 'WHICH', 'THE', 'STRUCTURE', 'AND', 'DEVELOPMENT', 'OF', 'THE', 'SOUL', 'SO', 'CLOSELY', 'DEPEND', 'DOING', 'PERHAPS', 'AN', 'IRREPARABLE', 'INJURY'] +7021-79730-0004-1403: ref=['THE', 'MOTHER', 'AS', 'SOON', 'AS', 'THE', 'CHAISE', 'IS', 'SO', 'FAR', 'TURNED', 'THAT', 'MARY', 'CAN', 'NO', 'LONGER', 'WATCH', 'THE', 'EXPRESSION', 'OF', 'HER', 'COUNTENANCE', 'GOES', 'AWAY', 'FROM', 'THE', 'DOOR', 'WITH', 'A', 'SMILE', 'OF', 'COMPLACENCY', 'AND', 'SATISFACTION', 'UPON', 'HER', 'FACE', 'AT', 'THE', 'INGENUITY', 'AND', 'SUCCESS', 'OF', 'HER', 'LITTLE', 'ARTIFICE'] +7021-79730-0004-1403: hyp=['THE', 'MOTHER', 'AS', 'SOON', 'AS', 'THE', 'CHASE', 'IS', 'SO', 'FAR', 'TURNED', 'THAT', 'MARY', 'CAN', 'NO', 'LONGER', 'WATCH', 'THE', 'EXPRESSION', 'OF', 'HER', 'COUNTENANCE', 'GOES', 'AWAY', 'FROM', 'THE', 'DOOR', 'WITH', 'A', 'SMILE', 'OF', 'COMPLACENCY', 'AND', 'SATISFACTION', 'ON', 'HER', 'FACE', 'AT', 'THE', 'INGENUITY', 'AND', 'SUCCESS', 'OF', 'HER', 'LITTLE', 'ARTIFICE'] +7021-79730-0005-1404: ref=['SO', 'YOU', 'WILL', 'BE', 'A', 'GOOD', 'GIRL', 'I', 'KNOW', 'AND', 'NOT', 'MAKE', 'ANY', 'TROUBLE', 'BUT', 'WILL', 'STAY', 'AT', 'HOME', 'CONTENTEDLY', "WON'T", 'YOU'] +7021-79730-0005-1404: hyp=['SO', 'YOU', 'WILL', 'BE', 'A', 'GOOD', 'GIRL', 'I', 'KNOW', 'AND', 'NOT', 'MAKE', 'ANY', 'TROUBLE', 'BUT', 'WILL', 'STAY', 'AT', 'HOME', 'CONTENTEDLY', "WON'T", 'YOU'] +7021-79730-0006-1405: ref=['THE', 'MOTHER', 'IN', 'MANAGING', 'THE', 'CASE', 'IN', 'THIS', 'WAY', 'RELIES', 'PARTLY', 'ON', 'CONVINCING', 'THE', 'REASON', 'OF', 'THE', 'CHILD', 'AND', 'PARTLY', 'ON', 'AN', 'APPEAL', 'TO', 'HER', 'AFFECTION'] +7021-79730-0006-1405: hyp=['THE', 'MOTHER', 'IN', 'MANAGING', 'THE', 'CASE', 'IN', 'THIS', 'WAY', 'REALIZE', 'PARTLY', 'ON', 'CONVINCING', 'THE', 'REASON', 'OF', 'THE', 'CHILD', 'AND', 'PARTLY', 'ON', 'AN', 'APPEAL', 'TO', 'HER', 'AFFECTION'] +7021-79730-0007-1406: ref=['IF', 'YOU', 'SHOULD', 'NOT', 'BE', 'A', 'GOOD', 'GIRL', 'BUT', 'SHOULD', 'SHOW', 'SIGNS', 'OF', 'MAKING', 'US', 'ANY', 'TROUBLE', 'I', 'SHALL', 'HAVE', 'TO', 'SEND', 'YOU', 'OUT', 'SOMEWHERE', 'TO', 'THE', 'BACK', 'PART', 'OF', 'THE', 'HOUSE', 'UNTIL', 'WE', 'ARE', 'GONE'] +7021-79730-0007-1406: hyp=['IF', 'YOU', 'SHOULD', 'NOT', 'BE', 'A', 'GOOD', 'GIRL', 'BUT', 'SHOULD', 'SHOW', 'SIGNS', 'OF', 'MAKING', 'US', 'ANY', 'TROUBLE', 'I', 'SHALL', 'HAVE', 'TO', 'SEND', 'YOU', 'OUT', 'SOMEWHERE', 'TO', 'THE', 'BACK', 'PART', 'OF', 'THE', 'HOUSE', 'UNTIL', 'WE', 'ARE', 'GONE'] +7021-79730-0008-1407: ref=['BUT', 'THIS', 'LAST', 'SUPPOSITION', 'IS', 'ALMOST', 'ALWAYS', 'UNNECESSARY', 'FOR', 'IF', 'MARY', 'HAS', 'BEEN', 'HABITUALLY', 'MANAGED', 'ON', 'THIS', 'PRINCIPLE', 'SHE', 'WILL', 'NOT', 'MAKE', 'ANY', 'TROUBLE'] +7021-79730-0008-1407: hyp=['BUT', 'THIS', 'LAST', 'OPPOSITION', 'IS', 'ALMOST', 'ALWAYS', 'UNNECESSARY', 'FOR', 'IF', 'MARY', 'HAS', 'BEEN', 'HABITUALLY', 'MANAGED', 'ON', 'THIS', 'PRINCIPLE', 'SHE', 'WILL', 'NOT', 'MAKE', 'ANY', 'TROUBLE'] +7021-79730-0009-1408: ref=['IT', 'IS', 'INDEED', 'TRUE', 'THAT', 'THE', 'IMPORTANCE', 'OF', 'TACT', 'AND', 'SKILL', 'IN', 'THE', 'TRAINING', 'OF', 'THE', 'YOUNG', 'AND', 'OF', 'CULTIVATING', 'THEIR', 'REASON', 'AND', 'SECURING', 'THEIR', 'AFFECTION', 'CAN', 'NOT', 'BE', 'OVERRATED'] +7021-79730-0009-1408: hyp=['IT', 'IS', 'INDEED', 'TRUE', 'THAT', 'THE', 'IMPORTANCE', 'OF', 'TACT', 'AND', 'SKILL', 'IN', 'THE', 'TRAINING', 'OF', 'THE', 'YOUNG', 'AND', 'OF', 'CULTIVATING', 'THEIR', 'REASON', 'AND', 'SECURING', 'THEIR', 'AFFECTION', 'CANNOT', 'BE', 'OVERRATED'] +7021-79740-0000-1384: ref=['TO', 'SUCH', 'PERSONS', 'THESE', 'INDIRECT', 'MODES', 'OF', 'TRAINING', 'CHILDREN', 'IN', 'HABITS', 'OF', 'SUBORDINATION', 'TO', 'THEIR', 'WILL', 'OR', 'RATHER', 'OF', 'YIELDING', 'TO', 'THEIR', 'INFLUENCE', 'ARE', 'SPECIALLY', 'USEFUL'] +7021-79740-0000-1384: hyp=['TO', 'SUCH', 'PERSONS', 'THESE', 'INDIRECT', 'MODES', 'OF', 'TRAINING', 'CHILDREN', 'IN', 'HABITS', 'OF', 'SUBORDINATION', 'TO', 'THEIR', 'WILL', 'OR', 'RATHER', 'OF', 'YIELDING', 'TO', 'THEIR', 'INFLUENCE', 'ARE', 'SPECIALLY', 'USEFUL'] +7021-79740-0001-1385: ref=['DELLA', 'HAD', 'A', 'YOUNG', 'SISTER', 'NAMED', 'MARIA', 'AND', 'A', 'COUSIN', 'WHOSE', 'NAME', 'WAS', 'JANE'] +7021-79740-0001-1385: hyp=['DELLA', 'HAD', 'A', 'YOUNG', 'SISTER', 'NAMED', 'MARIA', 'AND', 'A', 'COUSIN', 'WHOSE', 'NAME', 'WAS', 'JANE'] +7021-79740-0002-1386: ref=['NOW', 'DELIA', 'CONTRIVED', 'TO', 'OBTAIN', 'A', 'GREAT', 'INFLUENCE', 'AND', 'ASCENDENCY', 'OVER', 'THE', 'MINDS', 'OF', 'THE', 'CHILDREN', 'BY', 'MEANS', 'OF', 'THESE', 'DOLLS'] +7021-79740-0002-1386: hyp=['NOW', 'GILLIA', 'CONTRIVED', 'TO', 'OBTAIN', 'A', 'GREAT', 'INFLUENCE', 'AND', 'ASCENDANCY', 'OVER', 'THE', 'MINDS', 'OF', 'THE', 'CHILDREN', 'BY', 'MEANS', 'OF', 'THESE', 'DOLLS'] +7021-79740-0003-1387: ref=['TO', 'GIVE', 'AN', 'IDEA', 'OF', 'THESE', 'CONVERSATIONS', 'I', 'WILL', 'REPORT', 'ONE', 'OF', 'THEM', 'IN', 'FULL'] +7021-79740-0003-1387: hyp=['TO', 'GIVE', 'AN', 'IDEA', 'OF', 'THESE', 'CONVERSATIONS', 'I', 'WILL', 'REPORT', 'ONE', 'OF', 'THEM', 'IN', 'FULL'] +7021-79740-0004-1388: ref=['YOU', 'HAVE', 'COME', 'ANDELLA', 'ANDELLA', 'WAS', 'THE', 'NAME', 'OF', "JANE'S", 'DOLL', 'TO', 'MAKE', 'ROSALIE', 'A', 'VISIT'] +7021-79740-0004-1388: hyp=['YOU', 'HAVE', 'COME', 'AMDELLA', 'AND', 'DELLA', 'WAS', 'THE', 'NAME', 'OF', "JANE'S", 'DAL', 'TO', 'MAKE', 'ROSALIE', 'A', 'VISIT'] +7021-79740-0005-1389: ref=['I', 'AM', 'VERY', 'GLAD'] +7021-79740-0005-1389: hyp=['I', 'AM', 'VERY', 'GLAD'] +7021-79740-0006-1390: ref=['I', 'EXPECT', 'YOU', 'HAVE', 'BEEN', 'A', 'VERY', 'GOOD', 'GIRL', 'ANDELLA', 'SINCE', 'YOU', 'WERE', 'HERE', 'LAST'] +7021-79740-0006-1390: hyp=['I', 'EXPECT', 'YOU', 'HAVE', 'BEEN', 'A', 'VERY', 'GOOD', 'GIRL', 'ANNE', 'DELA', 'SINCE', 'YOU', 'WERE', 'HERE', 'LAST'] +7021-79740-0007-1391: ref=['THEN', 'TURNING', 'TO', 'JANE', 'SHE', 'ASKED', 'IN', 'A', 'SOMEWHAT', 'ALTERED', 'TONE', 'HAS', 'SHE', 'BEEN', 'A', 'GOOD', 'GIRL', 'JANE'] +7021-79740-0007-1391: hyp=['THEN', 'TURNING', 'TO', 'JANE', 'SHE', 'ASKED', 'IN', 'A', 'SOMEWHAT', 'ALTERED', 'TONE', 'HAS', 'SHE', 'BEEN', 'A', 'GOOD', 'GIRL', 'JANE'] +7021-79740-0008-1392: ref=['FOR', 'INSTANCE', 'ONE', 'DAY', 'THE', 'CHILDREN', 'HAD', 'BEEN', 'PLAYING', 'UPON', 'THE', 'PIAZZA', 'WITH', 'BLOCKS', 'AND', 'OTHER', 'PLAYTHINGS', 'AND', 'FINALLY', 'HAD', 'GONE', 'INTO', 'THE', 'HOUSE', 'LEAVING', 'ALL', 'THE', 'THINGS', 'ON', 'THE', 'FLOOR', 'OF', 'THE', 'PIAZZA', 'INSTEAD', 'OF', 'PUTTING', 'THEM', 'AWAY', 'IN', 'THEIR', 'PLACES', 'AS', 'THEY', 'OUGHT', 'TO', 'HAVE', 'DONE'] +7021-79740-0008-1392: hyp=['FOR', 'INSTANCE', 'ONE', 'DAY', 'THE', 'CHILDREN', 'HAD', 'BEEN', 'PLAYING', 'UPON', 'THE', 'PIAZZA', 'WITH', 'BLOCKS', 'AND', 'OTHER', 'PLAYTHINGS', 'AND', 'FINALLY', 'HAD', 'GONE', 'INTO', 'THE', 'HOUSE', 'LEAVING', 'ALL', 'THE', 'THINGS', 'ON', 'THE', 'FLOOR', 'OF', 'THE', 'PIAZZA', 'INSTEAD', 'OF', 'PUTTING', 'THEM', 'AWAY', 'IN', 'THEIR', 'PLACES', 'AS', 'THEY', 'OUGHT', 'TO', 'HAVE', 'DONE'] +7021-79740-0009-1393: ref=['THEY', 'WERE', 'NOW', 'PLAYING', 'WITH', 'THEIR', 'DOLLS', 'IN', 'THE', 'PARLOR'] +7021-79740-0009-1393: hyp=['THEY', 'WERE', 'NOW', 'PLAYING', 'WITH', 'THEIR', 'DOLLS', 'IN', 'THE', 'PARLOR'] +7021-79740-0010-1394: ref=['DELIA', 'CAME', 'TO', 'THE', 'PARLOR', 'AND', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'MYSTERY', 'BECKONED', 'THE', 'CHILDREN', 'ASIDE', 'AND', 'SAID', 'TO', 'THEM', 'IN', 'A', 'WHISPER', 'LEAVE', 'ANDELLA', 'AND', 'ROSALIE', 'HERE', 'AND', "DON'T", 'SAY', 'A', 'WORD', 'TO', 'THEM'] +7021-79740-0010-1394: hyp=['DELIGHT', 'CAME', 'TO', 'THE', 'PARLOUR', 'AND', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'MYSTERY', 'BECKONED', 'THE', 'CHILDREN', 'ASIDE', 'AND', 'SAID', 'TO', 'THEM', 'IN', 'A', 'WHISPER', 'LEAVE', 'ANDDELA', 'AND', 'ROSALIE', 'HERE', 'AND', "DON'T", 'SAY', 'A', 'WORD', 'TO', 'THEM'] +7021-79740-0011-1395: ref=['SO', 'SAYING', 'SHE', 'LED', 'THE', 'WAY', 'ON', 'TIPTOE', 'FOLLOWED', 'BY', 'THE', 'CHILDREN', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'ROUND', 'BY', 'A', 'CIRCUITOUS', 'ROUTE', 'TO', 'THE', 'PIAZZA', 'THERE'] +7021-79740-0011-1395: hyp=['SO', 'SAYING', 'SHE', 'LED', 'THE', 'WAY', 'ON', 'TIPTOE', 'FOLLOWED', 'BY', 'THE', 'CHILDREN', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'ROUND', 'BY', 'A', 'CIRCUITOUS', 'ROUTE', 'TO', 'THE', 'PIAZZA', 'THERE'] +7021-79740-0012-1396: ref=['SAID', 'SHE', 'POINTING', 'TO', 'THE', 'PLAYTHINGS', 'SEE'] +7021-79740-0012-1396: hyp=['SAID', 'SHE', 'POINTING', 'TO', 'THE', 'PLAYTHINGS', 'SEE'] +7021-79740-0013-1397: ref=['PUT', 'THESE', 'PLAYTHINGS', 'ALL', 'AWAY', 'QUICK', 'AND', 'CAREFULLY', 'AND', 'WE', 'WILL', 'NOT', 'LET', 'THEM', 'KNOW', 'ANY', 'THING', 'ABOUT', 'YOUR', 'LEAVING', 'THEM', 'OUT'] +7021-79740-0013-1397: hyp=['PUT', 'THESE', 'PLAYTHINGS', 'ALL', 'AWAY', 'QUICK', 'AND', 'CAREFULLY', 'AND', 'WE', 'WILL', 'NOT', 'LET', 'THEM', 'KNOW', 'ANYTHING', 'ABOUT', 'YOUR', 'LEAVING', 'THEM', 'OUT'] +7021-79740-0014-1398: ref=['AND', 'THIS', 'METHOD', 'OF', 'TREATING', 'THE', 'CASE', 'WAS', 'MUCH', 'MORE', 'EFFECTUAL', 'IN', 'MAKING', 'THEM', 'DISPOSED', 'TO', 'AVOID', 'COMMITTING', 'A', 'SIMILAR', 'FAULT', 'ANOTHER', 'TIME', 'THAN', 'ANY', 'DIRECT', 'REBUKES', 'OR', 'EXPRESSIONS', 'OF', 'DISPLEASURE', 'ADDRESSED', 'PERSONALLY', 'TO', 'THEM', 'WOULD', 'HAVE', 'BEEN'] +7021-79740-0014-1398: hyp=['AND', 'THIS', 'METHOD', 'OF', 'TREATING', 'THE', 'CASE', 'WAS', 'MUCH', 'MORE', 'EFFECTUAL', 'IN', 'MAKING', 'THEM', 'DISPOSED', 'TO', 'AVOID', 'COMMITTING', 'A', 'SIMILAR', 'FAULT', 'ANOTHER', 'TIME', 'THAN', 'ANY', 'DIRECT', 'REBUKES', 'OR', 'EXPRESSIONS', 'OF', 'DISPLEASURE', 'ADDRESSED', 'PERSONALLY', 'TO', 'THEM', 'WOULD', 'HAVE', 'BEEN'] +7021-79759-0000-1378: ref=['NATURE', 'OF', 'THE', 'EFFECT', 'PRODUCED', 'BY', 'EARLY', 'IMPRESSIONS'] +7021-79759-0000-1378: hyp=['NATURE', 'OF', 'THE', 'EFFECT', 'PRODUCED', 'BY', 'EARLY', 'IMPRESSIONS'] +7021-79759-0001-1379: ref=['THAT', 'IS', 'COMPARATIVELY', 'NOTHING'] +7021-79759-0001-1379: hyp=['THAT', 'IS', 'COMPARATIVELY', 'NOTHING'] +7021-79759-0002-1380: ref=['THEY', 'ARE', 'CHIEFLY', 'FORMED', 'FROM', 'COMBINATIONS', 'OF', 'THE', 'IMPRESSIONS', 'MADE', 'IN', 'CHILDHOOD'] +7021-79759-0002-1380: hyp=['THEY', 'ARE', 'CHIEFLY', 'FORMED', 'FROM', 'COMBINATIONS', 'OF', 'THE', 'IMPRESSIONS', 'MADE', 'IN', 'CHILDHOOD'] +7021-79759-0003-1381: ref=['VAST', 'IMPORTANCE', 'AND', 'INFLUENCE', 'OF', 'THIS', 'MENTAL', 'FURNISHING'] +7021-79759-0003-1381: hyp=['VAST', 'IMPORTANCE', 'AND', 'INFLUENCE', 'OF', 'THIS', 'MENTAL', 'FURNISHING'] +7021-79759-0004-1382: ref=['WITHOUT', 'GOING', 'TO', 'ANY', 'SUCH', 'EXTREME', 'AS', 'THIS', 'WE', 'CAN', 'EASILY', 'SEE', 'ON', 'REFLECTION', 'HOW', 'VAST', 'AN', 'INFLUENCE', 'ON', 'THE', 'IDEAS', 'AND', 'CONCEPTIONS', 'AS', 'WELL', 'AS', 'ON', 'THE', 'PRINCIPLES', 'OF', 'ACTION', 'IN', 'MATURE', 'YEARS', 'MUST', 'BE', 'EXERTED', 'BY', 'THE', 'NATURE', 'AND', 'CHARACTER', 'OF', 'THE', 'IMAGES', 'WHICH', 'THE', 'PERIOD', 'OF', 'INFANCY', 'AND', 'CHILDHOOD', 'IMPRESSES', 'UPON', 'THE', 'MIND'] +7021-79759-0004-1382: hyp=['WITHOUT', 'GOING', 'TO', 'ANY', 'SUCH', 'EXTREME', 'AS', 'THIS', 'WE', 'CAN', 'EASILY', 'SEE', 'ON', 'REFLECTION', 'HOW', 'VAST', 'AN', 'INFLUENCE', 'ON', 'THE', 'IDEAS', 'AND', 'CONCEPTIONS', 'AS', 'WELL', 'AS', 'ON', 'THE', 'PRINCIPLES', 'OF', 'ACTION', 'AND', 'MATURE', 'YEARS', 'MUST', 'BE', 'EXERTED', 'BY', 'THE', 'NATURE', 'AND', 'CHARACTER', 'OF', 'THE', 'IMAGES', 'WHICH', 'THE', 'PERIOD', 'OF', 'INFANCY', 'AND', 'CHILDHOOD', 'IMPRESS', 'UPON', 'THE', 'MIND'] +7021-79759-0005-1383: ref=['THE', 'PAIN', 'PRODUCED', 'BY', 'AN', 'ACT', 'OF', 'HASTY', 'AND', 'ANGRY', 'VIOLENCE', 'TO', 'WHICH', 'A', 'FATHER', 'SUBJECTS', 'HIS', 'SON', 'MAY', 'SOON', 'PASS', 'AWAY', 'BUT', 'THE', 'MEMORY', 'OF', 'IT', 'DOES', 'NOT', 'PASS', 'AWAY', 'WITH', 'THE', 'PAIN'] +7021-79759-0005-1383: hyp=['THE', 'PAIN', 'PRODUCED', 'BY', 'AN', 'ACT', 'OF', 'HASTY', 'AND', 'ANGRY', 'VIOLENCE', 'TO', 'WHICH', 'A', 'FATHER', 'SUBJECTS', 'HIS', 'SON', 'MAY', 'SOON', 'PASS', 'AWAY', 'BUT', 'THE', 'MEMORY', 'OF', 'IT', 'DOES', 'NOT', 'PASS', 'AWAY', 'WITH', 'THE', 'PAIN'] +7021-85628-0000-1409: ref=['BUT', 'ANDERS', 'CARED', 'NOTHING', 'ABOUT', 'THAT'] +7021-85628-0000-1409: hyp=['BUT', 'ANDREWS', 'CARED', 'NOTHING', 'ABOUT', 'THAT'] +7021-85628-0001-1410: ref=['HE', 'MADE', 'A', 'BOW', 'SO', 'DEEP', 'THAT', 'HIS', 'BACK', 'CAME', 'NEAR', 'BREAKING', 'AND', 'HE', 'WAS', 'DUMBFOUNDED', 'I', 'CAN', 'TELL', 'YOU', 'WHEN', 'HE', 'SAW', 'IT', 'WAS', 'NOBODY', 'BUT', 'ANDERS'] +7021-85628-0001-1410: hyp=['HE', 'MADE', 'A', 'BOW', 'SO', 'DEEP', 'THAT', 'HIS', 'BACK', 'CAME', 'NEAR', 'BREAKING', 'AND', 'HE', 'WAS', 'DUMBFOUNDED', 'I', 'CAN', 'TELL', 'YOU', 'WHEN', 'HE', 'SAW', 'IT', 'WAS', 'NOBODY', 'BUT', 'ANDERS'] +7021-85628-0002-1411: ref=['HE', 'WAS', 'SUCH', 'A', 'BIG', 'BOY', 'THAT', 'HE', 'WORE', 'HIGH', 'BOOTS', 'AND', 'CARRIED', 'A', 'JACK', 'KNIFE'] +7021-85628-0002-1411: hyp=['HE', 'WAS', 'SUCH', 'A', 'BIG', 'BOY', 'THAT', 'HE', 'WORE', 'HIGH', 'BOOTS', 'AND', 'CARRIED', 'A', 'JACK', 'KNIFE'] +7021-85628-0003-1412: ref=['NOW', 'THIS', 'KNIFE', 'WAS', 'A', 'SPLENDID', 'ONE', 'THOUGH', 'HALF', 'THE', 'BLADE', 'WAS', 'GONE', 'AND', 'THE', 'HANDLE', 'WAS', 'A', 'LITTLE', 'CRACKED', 'AND', 'ANDERS', 'KNEW', 'THAT', 'ONE', 'IS', 'ALMOST', 'A', 'MAN', 'AS', 'SOON', 'AS', 'ONE', 'HAS', 'A', 'JACK', 'KNIFE'] +7021-85628-0003-1412: hyp=['NOW', 'THIS', 'KNIFE', 'WAS', 'A', 'SPLENDID', 'ONE', 'THOUGH', 'HALF', 'THE', 'BLADE', 'WAS', 'GONE', 'AND', 'THE', 'HANDLE', 'WAS', 'A', 'LITTLE', 'CRACKED', 'AND', 'ANDERS', 'KNEW', 'THAT', 'ONE', 'IS', 'ALMOST', 'A', 'MAN', 'AS', 'SOON', 'AS', 'ONE', 'HAS', 'A', 'JACKKNIFE'] +7021-85628-0004-1413: ref=['YES', 'WHY', 'NOT', 'THOUGHT', 'ANDERS'] +7021-85628-0004-1413: hyp=['YES', 'WHY', 'NOT', 'THOUGHT', 'ANDREWS'] +7021-85628-0005-1414: ref=['SEEING', 'THAT', 'I', 'AM', 'SO', 'FINE', 'I', 'MAY', 'AS', 'WELL', 'GO', 'AND', 'VISIT', 'THE', 'KING'] +7021-85628-0005-1414: hyp=['SEEING', 'THAT', 'I', 'AM', 'SO', 'FINE', 'I', 'MAY', 'AS', 'WELL', 'GO', 'AND', 'VISIT', 'THE', 'KING'] +7021-85628-0006-1415: ref=['I', 'AM', 'GOING', 'TO', 'THE', 'COURT', 'BALL', 'ANSWERED', 'ANDERS'] +7021-85628-0006-1415: hyp=['I', 'AM', 'GOING', 'TO', 'THE', 'COURT', 'BALL', 'ANSWERED', 'ANDRES'] +7021-85628-0007-1416: ref=['AND', 'SHE', 'TOOK', 'ANDERS', 'HAND', 'AND', 'WALKED', 'WITH', 'HIM', 'UP', 'THE', 'BROAD', 'MARBLE', 'STAIRS', 'WHERE', 'SOLDIERS', 'WERE', 'POSTED', 'AT', 'EVERY', 'THIRD', 'STEP', 'AND', 'THROUGH', 'THE', 'MAGNIFICENT', 'HALLS', 'WHERE', 'COURTIERS', 'IN', 'SILK', 'AND', 'VELVET', 'STOOD', 'BOWING', 'WHEREVER', 'HE', 'WENT'] +7021-85628-0007-1416: hyp=['AND', 'SHE', 'TOOK', "ANDRE'S", 'HAND', 'AND', 'WALKED', 'WITH', 'HIM', 'UP', 'THE', 'BROAD', 'MARBLE', 'STAIRS', 'WHERE', 'SOLDIERS', 'WERE', 'POSTED', 'AT', 'EVERY', 'THIRD', 'STEP', 'AND', 'THROUGH', 'THE', 'MAGNIFICENT', 'HALLS', 'WHERE', 'COURTIERS', 'IN', 'SILK', 'AND', 'VELVET', 'STOOD', 'BOWING', 'WHEREVER', 'HE', 'WENT'] +7021-85628-0008-1417: ref=['FOR', 'LIKE', 'AS', 'NOT', 'THEY', 'MUST', 'HAVE', 'THOUGHT', 'HIM', 'A', 'PRINCE', 'WHEN', 'THEY', 'SAW', 'HIS', 'FINE', 'CAP'] +7021-85628-0008-1417: hyp=['FOR', 'LIKE', 'AS', 'NOT', 'THEY', 'MUST', 'HAVE', 'THOUGHT', 'HIM', 'A', 'PRINCE', 'WHEN', 'THEY', 'SAW', 'HIS', 'FINE', 'CAP'] +7021-85628-0009-1418: ref=['AT', 'THE', 'FARTHER', 'END', 'OF', 'THE', 'LARGEST', 'HALL', 'A', 'TABLE', 'WAS', 'SET', 'WITH', 'GOLDEN', 'CUPS', 'AND', 'GOLDEN', 'PLATES', 'IN', 'LONG', 'ROWS'] +7021-85628-0009-1418: hyp=['AT', 'THE', 'FARTHER', 'END', 'OF', 'THE', 'LARGEST', 'HALL', 'A', 'TABLE', 'WAS', 'SET', 'WITH', 'GOLDEN', 'CUPS', 'AND', 'GOLDEN', 'PLATES', 'IN', 'LONG', 'ROWS'] +7021-85628-0010-1419: ref=['ON', 'HUGE', 'SILVER', 'PLATTERS', 'WERE', 'PYRAMIDS', 'OF', 'TARTS', 'AND', 'CAKES', 'AND', 'RED', 'WINE', 'SPARKLED', 'IN', 'GLITTERING', 'DECANTERS'] +7021-85628-0010-1419: hyp=['ON', 'HUGE', 'SILVER', 'PLATTERS', 'WERE', 'PYRAMIDS', 'OF', 'TARTS', 'AND', 'CAKES', 'AND', 'RED', 'WINE', 'SPARKLED', 'IN', 'GLITTERING', 'DECANTERS'] +7021-85628-0011-1420: ref=['THE', 'PRINCESS', 'SAT', 'DOWN', 'UNDER', 'A', 'BLUE', 'CANOPY', 'WITH', 'BOUQUETS', 'OF', 'ROSES', 'AND', 'SHE', 'LET', 'ANDERS', 'SIT', 'IN', 'A', 'GOLDEN', 'CHAIR', 'BY', 'HER', 'SIDE'] +7021-85628-0011-1420: hyp=['THE', 'PRINCESS', 'SAT', 'DOWN', 'UNDER', 'A', 'BLUE', 'CANOPY', 'WITH', 'BOUQUETS', 'OF', 'ROSES', 'AND', 'SHE', 'LET', 'ANDRE', 'SIT', 'IN', 'A', 'GOLDEN', 'CHAIR', 'BY', 'HER', 'SIDE'] +7021-85628-0012-1421: ref=['BUT', 'YOU', 'MUST', 'NOT', 'EAT', 'WITH', 'YOUR', 'CAP', 'ON', 'YOUR', 'HEAD', 'SHE', 'SAID', 'AND', 'WAS', 'GOING', 'TO', 'TAKE', 'IT', 'OFF'] +7021-85628-0012-1421: hyp=['BUT', 'YOU', 'MUST', 'NOT', 'EAT', 'WITH', 'YOUR', 'CAP', 'ON', 'YOUR', 'HEAD', 'SHE', 'SAID', 'AND', 'WAS', 'GOING', 'TO', 'TAKE', 'IT', 'OFF'] +7021-85628-0013-1422: ref=['THE', 'PRINCESS', 'CERTAINLY', 'WAS', 'BEAUTIFUL', 'AND', 'HE', 'WOULD', 'HAVE', 'DEARLY', 'LIKED', 'TO', 'BE', 'KISSED', 'BY', 'HER', 'BUT', 'THE', 'CAP', 'WHICH', 'HIS', 'MOTHER', 'HAD', 'MADE', 'HE', 'WOULD', 'NOT', 'GIVE', 'UP', 'ON', 'ANY', 'CONDITION'] +7021-85628-0013-1422: hyp=['THE', 'PRINCESS', 'CERTAINLY', 'WAS', 'BEAUTIFUL', 'AND', 'HE', 'WOULD', 'HAVE', 'DEARLY', 'LIKED', 'TO', 'BE', 'KISSED', 'BY', 'HER', 'BUT', 'THE', 'CAP', 'WHICH', 'HIS', 'MOTHER', 'HAD', 'MADE', 'HE', 'WOULD', 'NOT', 'GIVE', 'UP', 'ON', 'ANY', 'CONDITION'] +7021-85628-0014-1423: ref=['HE', 'ONLY', 'SHOOK', 'HIS', 'HEAD'] +7021-85628-0014-1423: hyp=['HE', 'ONLY', 'SHOOK', 'HIS', 'HEAD'] +7021-85628-0015-1424: ref=['WELL', 'BUT', 'NOW', 'SAID', 'THE', 'PRINCESS', 'AND', 'SHE', 'FILLED', 'HIS', 'POCKETS', 'WITH', 'CAKES', 'AND', 'PUT', 'HER', 'OWN', 'HEAVY', 'GOLD', 'CHAIN', 'AROUND', 'HIS', 'NECK', 'AND', 'BENT', 'DOWN', 'AND', 'KISSED', 'HIM'] +7021-85628-0015-1424: hyp=['WELL', 'BUT', 'NOW', 'SAID', 'THE', 'PRINCESS', 'AND', 'SHE', 'FILLED', 'HIS', 'POCKETS', 'WITH', 'CAKES', 'AND', 'PUT', 'HER', 'OWN', 'HEAVY', 'GOLD', 'CHAIN', 'AROUND', 'HIS', 'NECK', 'AND', 'BENT', 'DOWN', 'AND', 'KISSED', 'HIM'] +7021-85628-0016-1425: ref=['THAT', 'IS', 'A', 'VERY', 'FINE', 'CAP', 'YOU', 'HAVE', 'HE', 'SAID'] +7021-85628-0016-1425: hyp=['THAT', 'IS', 'A', 'VERY', 'FINE', 'CAP', 'YOU', 'HAVE', 'HE', 'SAID'] +7021-85628-0017-1426: ref=['SO', 'IT', 'IS', 'SAID', 'ANDERS'] +7021-85628-0017-1426: hyp=['SO', 'IT', 'IS', 'SAID', 'ANDREWS'] +7021-85628-0018-1427: ref=['AND', 'IT', 'IS', 'MADE', 'OF', "MOTHER'S", 'BEST', 'YARN', 'AND', 'SHE', 'KNITTED', 'IT', 'HERSELF', 'AND', 'EVERYBODY', 'WANTS', 'TO', 'GET', 'IT', 'AWAY', 'FROM', 'ME'] +7021-85628-0018-1427: hyp=['AND', 'IT', 'IS', 'MADE', 'OF', "MOTHER'S", 'BEST', 'YARN', 'AND', 'SHE', 'KNITTED', 'IT', 'HERSELF', 'AND', 'EVERYBODY', 'WANTS', 'TO', 'GET', 'IT', 'AWAY', 'FROM', 'ME'] +7021-85628-0019-1428: ref=['WITH', 'ONE', 'JUMP', 'ANDERS', 'GOT', 'OUT', 'OF', 'HIS', 'CHAIR'] +7021-85628-0019-1428: hyp=['WITH', 'ONE', 'JUMP', 'ANDERS', 'GOT', 'OUT', 'OF', 'HIS', 'CHAIR'] +7021-85628-0020-1429: ref=['HE', 'DARTED', 'LIKE', 'AN', 'ARROW', 'THROUGH', 'ALL', 'THE', 'HALLS', 'DOWN', 'ALL', 'THE', 'STAIRS', 'AND', 'ACROSS', 'THE', 'YARD'] +7021-85628-0020-1429: hyp=['HE', 'DARTED', 'LIKE', 'AN', 'ARROW', 'THROUGH', 'ALL', 'THE', 'HALLS', 'DOWN', 'ALL', 'THE', 'STAIRS', 'AND', 'ACROSS', 'THE', 'YARD'] +7021-85628-0021-1430: ref=['HE', 'STILL', 'HELD', 'ON', 'TO', 'IT', 'WITH', 'BOTH', 'HANDS', 'AS', 'HE', 'RUSHED', 'INTO', 'HIS', "MOTHER'S", 'COTTAGE'] +7021-85628-0021-1430: hyp=['HE', 'STILL', 'HELD', 'ON', 'TO', 'IT', 'WITH', 'BOTH', 'HANDS', 'AS', 'HE', 'RUSHED', 'INTO', 'HIS', "MOTHER'S", 'COTTAGE'] +7021-85628-0022-1431: ref=['AND', 'ALL', 'HIS', 'BROTHERS', 'AND', 'SISTERS', 'STOOD', 'ROUND', 'AND', 'LISTENED', 'WITH', 'THEIR', 'MOUTHS', 'OPEN'] +7021-85628-0022-1431: hyp=['AND', 'ALL', 'HIS', 'BROTHERS', 'AND', 'SISTERS', 'STOOD', 'ROUND', 'AND', 'LISTENED', 'WITH', 'THEIR', 'MOUTHS', 'OPEN'] +7021-85628-0023-1432: ref=['BUT', 'WHEN', 'HIS', 'BIG', 'BROTHER', 'HEARD', 'THAT', 'HE', 'HAD', 'REFUSED', 'TO', 'GIVE', 'HIS', 'CAP', 'FOR', 'A', "KING'S", 'GOLDEN', 'CROWN', 'HE', 'SAID', 'THAT', 'ANDERS', 'WAS', 'A', 'STUPID'] +7021-85628-0023-1432: hyp=['BUT', 'WHEN', 'HIS', 'BIG', 'BROTHER', 'HEARD', 'THAT', 'HE', 'HAD', 'REFUSED', 'TO', 'GIVE', 'HIS', 'CAP', 'FOR', 'A', "KING'S", 'GOLDEN', 'CROWN', 'HE', 'SAID', 'THAT', 'ANDREWS', 'WAS', 'A', 'STUPID'] +7021-85628-0024-1433: ref=['ANDERS', 'FACE', 'GREW', 'RED'] +7021-85628-0024-1433: hyp=["ANDREW'S", 'FACE', 'GREW', 'RED'] +7021-85628-0025-1434: ref=['BUT', 'HIS', 'MOTHER', 'HUGGED', 'HIM', 'CLOSE'] +7021-85628-0025-1434: hyp=['BUT', 'HIS', 'MOTHER', 'HUGGED', 'HIM', 'CLOSE'] +7021-85628-0026-1435: ref=['NO', 'MY', 'LITTLE', 'SON', 'SHE', 'SAID'] +7021-85628-0026-1435: hyp=['NO', 'MY', 'LITTLE', 'FUN', 'SHE', 'SAID'] +7021-85628-0027-1436: ref=['IF', 'YOU', 'DRESSED', 'IN', 'SILK', 'AND', 'GOLD', 'FROM', 'TOP', 'TO', 'TOE', 'YOU', 'COULD', 'NOT', 'LOOK', 'ANY', 'NICER', 'THAN', 'IN', 'YOUR', 'LITTLE', 'RED', 'CAP'] +7021-85628-0027-1436: hyp=['IF', 'YOU', 'DRESSED', 'IN', 'SILK', 'AND', 'GOLD', 'FROM', 'TOP', 'TO', 'TOE', 'YOU', 'COULD', 'NOT', 'LOOK', 'ANY', 'NICER', 'THAN', 'IN', 'YOUR', 'LITTLE', 'RED', 'CAP'] +7127-75946-0000-467: ref=['AT', 'THE', 'CONCLUSION', 'OF', 'THE', 'BANQUET', 'WHICH', 'WAS', 'SERVED', 'AT', 'FIVE', "O'CLOCK", 'THE', 'KING', 'ENTERED', 'HIS', 'CABINET', 'WHERE', 'HIS', 'TAILORS', 'WERE', 'AWAITING', 'HIM', 'FOR', 'THE', 'PURPOSE', 'OF', 'TRYING', 'ON', 'THE', 'CELEBRATED', 'COSTUME', 'REPRESENTING', 'SPRING', 'WHICH', 'WAS', 'THE', 'RESULT', 'OF', 'SO', 'MUCH', 'IMAGINATION', 'AND', 'HAD', 'COST', 'SO', 'MANY', 'EFFORTS', 'OF', 'THOUGHT', 'TO', 'THE', 'DESIGNERS', 'AND', 'ORNAMENT', 'WORKERS', 'OF', 'THE', 'COURT'] +7127-75946-0000-467: hyp=['AT', 'THE', 'CONCLUSION', 'OF', 'THE', 'BANQUET', 'WHICH', 'WAS', 'SERVED', 'AT', 'FIVE', "O'CLOCK", 'THE', 'KING', 'ENTERED', 'HIS', 'CABINET', 'WHERE', 'HIS', 'TAILORS', 'WERE', 'AWAITING', 'HIM', 'FOR', 'THE', 'PURPOSE', 'OF', 'TRYING', 'ON', 'THE', 'CELEBRATED', 'COSTUME', 'REPRESENTING', 'SPRING', 'WHICH', 'WAS', 'THE', 'RESULT', 'OF', 'SO', 'MUCH', 'IMAGINATION', 'AND', 'HAD', 'COST', 'SO', 'MANY', 'EFFORTS', 'OF', 'THOUGHT', 'TO', 'THE', 'DESIGNERS', 'AND', 'ORNAMENT', 'WORKERS', 'OF', 'THE', 'COURT'] +7127-75946-0001-468: ref=['AH', 'VERY', 'WELL'] +7127-75946-0001-468: hyp=['AH', 'VERY', 'WELL'] +7127-75946-0002-469: ref=['LET', 'HIM', 'COME', 'IN', 'THEN', 'SAID', 'THE', 'KING', 'AND', 'AS', 'IF', 'COLBERT', 'HAD', 'BEEN', 'LISTENING', 'AT', 'THE', 'DOOR', 'FOR', 'THE', 'PURPOSE', 'OF', 'KEEPING', 'HIMSELF', 'AU', 'COURANT', 'WITH', 'THE', 'CONVERSATION', 'HE', 'ENTERED', 'AS', 'SOON', 'AS', 'THE', 'KING', 'HAD', 'PRONOUNCED', 'HIS', 'NAME', 'TO', 'THE', 'TWO', 'COURTIERS'] +7127-75946-0002-469: hyp=['LET', 'HIM', 'COME', 'IN', 'THEN', 'SAID', 'THE', 'KING', 'AND', 'AS', 'IF', 'COLBERT', 'HAD', 'BEEN', 'LISTENING', 'AT', 'THE', 'DOOR', 'FOR', 'THE', 'PURPOSE', 'OF', 'KEEPING', 'HIMSELF', 'OCCOURANT', 'WITH', 'THE', 'CONVERSATION', 'HE', 'ENTERED', 'AS', 'SOON', 'AS', 'THE', 'KING', 'HAD', 'PRONOUNCED', 'HIS', 'NAME', 'TO', 'THE', 'TWO', 'COURTIERS'] +7127-75946-0003-470: ref=['GENTLEMEN', 'TO', 'YOUR', 'POSTS', 'WHEREUPON', 'SAINT', 'AIGNAN', 'AND', 'VILLEROY', 'TOOK', 'THEIR', 'LEAVE'] +7127-75946-0003-470: hyp=['GENTLEMEN', 'TO', 'YOUR', 'POSTS', 'WHEREUPON', 'SAINT', 'DAN', 'AND', 'VILLEROI', 'TOOK', 'THEIR', 'LEAVE'] +7127-75946-0004-471: ref=['CERTAINLY', 'SIRE', 'BUT', 'I', 'MUST', 'HAVE', 'MONEY', 'TO', 'DO', 'THAT', 'WHAT'] +7127-75946-0004-471: hyp=['CERTAINLY', 'SIRE', 'BUT', 'I', 'MUST', 'HAVE', 'MONEY', 'TO', 'DO', 'THAT', 'WHAT'] +7127-75946-0005-472: ref=['WHAT', 'DO', 'YOU', 'MEAN', 'INQUIRED', 'LOUIS'] +7127-75946-0005-472: hyp=['WHAT', 'DO', 'YOU', 'MEAN', 'INQUIRED', 'LOUISE'] +7127-75946-0006-473: ref=['HE', 'HAS', 'GIVEN', 'THEM', 'WITH', 'TOO', 'MUCH', 'GRACE', 'NOT', 'TO', 'HAVE', 'OTHERS', 'STILL', 'TO', 'GIVE', 'IF', 'THEY', 'ARE', 'REQUIRED', 'WHICH', 'IS', 'THE', 'CASE', 'AT', 'THE', 'PRESENT', 'MOMENT'] +7127-75946-0006-473: hyp=['HE', 'HAS', 'GIVEN', 'THEM', 'WITH', 'TOO', 'MUCH', 'GRACE', 'NOT', 'TO', 'HAVE', 'OTHERS', 'STILL', 'TO', 'GIVE', 'IF', 'THEY', 'ARE', 'REQUIRED', 'WHICH', 'IS', 'THE', 'CASE', 'AT', 'THE', 'PRESENT', 'MOMENT'] +7127-75946-0007-474: ref=['IT', 'IS', 'NECESSARY', 'THEREFORE', 'THAT', 'HE', 'SHOULD', 'COMPLY', 'THE', 'KING', 'FROWNED'] +7127-75946-0007-474: hyp=['IT', 'IS', 'NECESSARY', 'THEREFORE', 'THAT', 'HE', 'SHOULD', 'COMPLY', 'THE', 'KING', 'FROWNED'] +7127-75946-0008-475: ref=['DOES', 'YOUR', 'MAJESTY', 'THEN', 'NO', 'LONGER', 'BELIEVE', 'THE', 'DISLOYAL', 'ATTEMPT'] +7127-75946-0008-475: hyp=['DOES', 'YOUR', 'MAJESTY', 'THEN', 'NO', 'LONGER', 'BELIEVE', 'THE', 'DISLOYAL', 'ATTEMPT'] +7127-75946-0009-476: ref=['NOT', 'AT', 'ALL', 'YOU', 'ARE', 'ON', 'THE', 'CONTRARY', 'MOST', 'AGREEABLE', 'TO', 'ME'] +7127-75946-0009-476: hyp=['NOT', 'AT', 'ALL', 'YOU', 'ARE', 'ON', 'THE', 'CONTRARY', 'MOST', 'AGREEABLE', 'TO', 'ME'] +7127-75946-0010-477: ref=['YOUR', "MAJESTY'S", 'PLAN', 'THEN', 'IN', 'THIS', 'AFFAIR', 'IS'] +7127-75946-0010-477: hyp=['YOUR', "MAJESTY'S", 'PLAN', 'THEN', 'IN', 'THIS', 'AFFAIR', 'IS'] +7127-75946-0011-478: ref=['YOU', 'WILL', 'TAKE', 'THEM', 'FROM', 'MY', 'PRIVATE', 'TREASURE'] +7127-75946-0011-478: hyp=['YOU', 'WILL', 'TAKE', 'THEM', 'FROM', 'MY', 'PRIVATE', 'TREASURE'] +7127-75946-0012-479: ref=['THE', 'NEWS', 'CIRCULATED', 'WITH', 'THE', 'RAPIDITY', 'OF', 'LIGHTNING', 'DURING', 'ITS', 'PROGRESS', 'IT', 'KINDLED', 'EVERY', 'VARIETY', 'OF', 'COQUETRY', 'DESIRE', 'AND', 'WILD', 'AMBITION'] +7127-75946-0012-479: hyp=['THE', 'NEWS', 'CIRCULATED', 'WITH', 'THE', 'RAPIDITY', 'OF', 'LIGHTNING', 'DURING', 'ITS', 'PROGRESS', 'IT', 'KINDLED', 'EVERY', 'VARIETY', 'OF', 'COQUETRY', 'DESIRE', 'AND', 'WILD', 'AMBITION'] +7127-75946-0013-480: ref=['THE', 'KING', 'HAD', 'COMPLETED', 'HIS', 'TOILETTE', 'BY', 'NINE', "O'CLOCK", 'HE', 'APPEARED', 'IN', 'AN', 'OPEN', 'CARRIAGE', 'DECORATED', 'WITH', 'BRANCHES', 'OF', 'TREES', 'AND', 'FLOWERS'] +7127-75946-0013-480: hyp=['THE', 'KING', 'HAD', 'COMPLETED', 'HIS', 'TOILET', 'BY', 'NINE', "O'CLOCK", 'HE', 'APPEARED', 'IN', 'AN', 'OPEN', 'CARRIAGE', 'DECORATED', 'WITH', 'BRANCHES', 'OF', 'TREES', 'AND', 'FLOWERS'] +7127-75946-0014-481: ref=['THE', 'QUEENS', 'HAD', 'TAKEN', 'THEIR', 'SEATS', 'UPON', 'A', 'MAGNIFICENT', 'DIAS', 'OR', 'PLATFORM', 'ERECTED', 'UPON', 'THE', 'BORDERS', 'OF', 'THE', 'LAKE', 'IN', 'A', 'THEATER', 'OF', 'WONDERFUL', 'ELEGANCE', 'OF', 'CONSTRUCTION'] +7127-75946-0014-481: hyp=['THE', 'QUEENS', 'HAD', 'TAKEN', 'THEIR', 'SEATS', 'UPON', 'A', 'MAGNIFICENT', 'DAIS', 'OR', 'PLATFORM', 'ERECTED', 'UPON', 'THE', 'BORDERS', 'OF', 'THE', 'LAKE', 'IN', 'A', 'THEATRE', 'OF', 'WONDERFUL', 'ELEGANCE', 'OF', 'CONSTRUCTION'] +7127-75946-0015-482: ref=['SUDDENLY', 'FOR', 'THE', 'PURPOSE', 'OF', 'RESTORING', 'PEACE', 'AND', 'ORDER', 'SPRING', 'ACCOMPANIED', 'BY', 'HIS', 'WHOLE', 'COURT', 'MADE', 'HIS', 'APPEARANCE'] +7127-75946-0015-482: hyp=['SUDDENLY', 'FOR', 'THE', 'PURPOSE', 'OF', 'RESTORING', 'PEACE', 'AND', 'ORDER', 'SPRANG', 'ACCOMPANIED', 'BY', 'HIS', 'WHOLE', 'COURT', 'MADE', 'HIS', 'APPEARANCE'] +7127-75946-0016-483: ref=['THE', 'SEASONS', 'ALLIES', 'OF', 'SPRING', 'FOLLOWED', 'HIM', 'CLOSELY', 'TO', 'FORM', 'A', 'QUADRILLE', 'WHICH', 'AFTER', 'MANY', 'WORDS', 'OF', 'MORE', 'OR', 'LESS', 'FLATTERING', 'IMPORT', 'WAS', 'THE', 'COMMENCEMENT', 'OF', 'THE', 'DANCE'] +7127-75946-0016-483: hyp=['THE', 'SEASONS', 'ALLIES', 'OF', 'SPRING', 'FOLLOWED', 'HIM', 'CLOSELY', 'TO', 'FORM', 'A', 'QUADRILLE', 'WHICH', 'AFTER', 'MANY', 'WORDS', 'OF', 'MORE', 'OR', 'LESS', 'FLATTERING', 'IMPORT', 'WAS', 'THE', 'COMMENCEMENT', 'OF', 'THE', 'DANCE'] +7127-75946-0017-484: ref=['HIS', 'LEGS', 'THE', 'BEST', 'SHAPED', 'AT', 'COURT', 'WERE', 'DISPLAYED', 'TO', 'GREAT', 'ADVANTAGE', 'IN', 'FLESH', 'COLORED', 'SILKEN', 'HOSE', 'OF', 'SILK', 'SO', 'FINE', 'AND', 'SO', 'TRANSPARENT', 'THAT', 'IT', 'SEEMED', 'ALMOST', 'LIKE', 'FLESH', 'ITSELF'] +7127-75946-0017-484: hyp=['HIS', 'LEGS', 'THE', 'BEST', 'SHAPED', 'AT', 'COURT', 'WERE', 'DISPLAYED', 'TO', 'GREAT', 'ADVANTAGE', 'IN', 'FLESH', 'COLORED', 'SILKEN', 'HOSE', 'A', 'SILK', 'SO', 'FINE', 'AND', 'SO', 'TRANSPARENT', 'THAT', 'IT', 'SEEMED', 'ALMOST', 'LIKE', 'FLESH', 'ITSELF'] +7127-75946-0018-485: ref=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'CARRIAGE', 'WHICH', 'RESEMBLED', 'THE', 'BUOYANT', 'MOVEMENTS', 'OF', 'AN', 'IMMORTAL', 'AND', 'HE', 'DID', 'NOT', 'DANCE', 'SO', 'MUCH', 'AS', 'SEEM', 'TO', 'SOAR', 'ALONG'] +7127-75946-0018-485: hyp=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'CARRIAGE', 'WHICH', 'RESEMBLED', 'THE', 'BUOYANT', 'MOVEMENTS', 'OF', 'AN', 'IMMORTAL', 'AND', 'HE', 'DID', 'NOT', 'DANCE', 'SO', 'MUCH', 'AS', 'SEEMED', 'TO', 'SOAR', 'ALONG'] +7127-75946-0019-486: ref=['YES', 'IT', 'IS', 'SUPPRESSED'] +7127-75946-0019-486: hyp=['YES', 'IT', 'IS', 'SUPPRESSED'] +7127-75946-0020-487: ref=['FAR', 'FROM', 'IT', 'SIRE', 'YOUR', 'MAJESTY', 'HAVING', 'GIVEN', 'NO', 'DIRECTIONS', 'ABOUT', 'IT', 'THE', 'MUSICIANS', 'HAVE', 'RETAINED', 'IT'] +7127-75946-0020-487: hyp=['FAR', 'FROM', 'IT', 'SIRE', 'YOUR', 'MAJESTY', 'HEAVEN', 'GIVEN', 'NO', 'DIRECTIONS', 'ABOUT', 'IT', 'THE', 'MUSICIANS', 'HAVE', 'RETAINED', 'IT'] +7127-75946-0021-488: ref=['YES', 'SIRE', 'AND', 'READY', 'DRESSED', 'FOR', 'THE', 'BALLET'] +7127-75946-0021-488: hyp=['YES', 'SIRE', 'AND', 'READY', 'DRESSED', 'FOR', 'THE', 'BALLET'] +7127-75946-0022-489: ref=['SIRE', 'HE', 'SAID', 'YOUR', "MAJESTY'S", 'MOST', 'DEVOTED', 'SERVANT', 'APPROACHES', 'TO', 'PERFORM', 'A', 'SERVICE', 'ON', 'THIS', 'OCCASION', 'WITH', 'SIMILAR', 'ZEAL', 'THAT', 'HE', 'HAS', 'ALREADY', 'SHOWN', 'ON', 'THE', 'FIELD', 'OF', 'BATTLE'] +7127-75946-0022-489: hyp=['SIRE', 'HE', 'SAID', 'YOUR', "MAJESTY'S", 'MOST', 'DEVOTED', 'SERVANT', 'APPROACHES', 'TO', 'PERFORM', 'A', 'SERVICE', 'ON', 'THIS', 'OCCASION', 'WITH', 'SIMILAR', 'ZEAL', 'THAT', 'HE', 'HAS', 'ALREADY', 'SHOWN', 'ON', 'THE', 'FIELD', 'OF', 'BATTLE'] +7127-75946-0023-490: ref=['THE', 'KING', 'SEEMED', 'ONLY', 'PLEASED', 'WITH', 'EVERY', 'ONE', 'PRESENT'] +7127-75946-0023-490: hyp=['THE', 'KING', 'SEEMED', 'ONLY', 'PLEASED', 'WITH', 'EVERY', 'ONE', 'PRESENT'] +7127-75946-0024-491: ref=['MONSIEUR', 'WAS', 'THE', 'ONLY', 'ONE', 'WHO', 'DID', 'NOT', 'UNDERSTAND', 'ANYTHING', 'ABOUT', 'THE', 'MATTER'] +7127-75946-0024-491: hyp=['MONSIEUR', 'WAS', 'THE', 'ONLY', 'ONE', 'WHO', 'DID', 'NOT', 'UNDERSTAND', 'ANYTHING', 'ABOUT', 'THE', 'MATTER'] +7127-75946-0025-492: ref=['THE', 'BALLET', 'BEGAN', 'THE', 'EFFECT', 'WAS', 'MORE', 'THAN', 'BEAUTIFUL'] +7127-75946-0025-492: hyp=['THE', 'BALLET', 'BEGAN', 'THE', 'EFFECT', 'WAS', 'MORE', 'THAN', 'BEAUTIFUL'] +7127-75946-0026-493: ref=['WHEN', 'THE', 'MUSIC', 'BY', 'ITS', 'BURSTS', 'OF', 'MELODY', 'CARRIED', 'AWAY', 'THESE', 'ILLUSTRIOUS', 'DANCERS', 'WHEN', 'THE', 'SIMPLE', 'UNTUTORED', 'PANTOMIME', 'OF', 'THAT', 'PERIOD', 'ONLY', 'THE', 'MORE', 'NATURAL', 'ON', 'ACCOUNT', 'OF', 'THE', 'VERY', 'INDIFFERENT', 'ACTING', 'OF', 'THE', 'AUGUST', 'ACTORS', 'HAD', 'REACHED', 'ITS', 'CULMINATING', 'POINT', 'OF', 'TRIUMPH', 'THE', 'THEATER', 'SHOOK', 'WITH', 'TUMULTUOUS', 'APPLAUSE'] +7127-75946-0026-493: hyp=['WHEN', 'THE', 'MUSIC', 'BY', 'ITS', 'BURSTS', 'OF', 'MELODY', 'CARRIED', 'AWAY', 'THESE', 'ILLUSTRIOUS', 'DANCERS', 'WHEN', 'THIS', 'SIMPLE', 'UNTUTORED', 'PANTOMIME', 'OF', 'THAT', 'PERIOD', 'ONLY', 'THE', 'MORE', 'NATURAL', 'ON', 'ACCOUNT', 'OF', 'THE', 'VERY', 'INDIFFERENT', 'ACTING', 'OF', 'THE', 'AUGUST', 'ACTORS', 'HAD', 'REACHED', 'ITS', 'CULMINATING', 'POINT', 'OF', 'TRIUMPH', 'THE', 'THEATRE', 'SHOOK', 'WITH', 'TUMULTUOUS', 'APPLAUSE'] +7127-75946-0027-494: ref=['DISDAINFUL', 'OF', 'A', 'SUCCESS', 'OF', 'WHICH', 'MADAME', 'SHOWED', 'NO', 'ACKNOWLEDGEMENT', 'HE', 'THOUGHT', 'OF', 'NOTHING', 'BUT', 'BOLDLY', 'REGAINING', 'THE', 'MARKED', 'PREFERENCE', 'OF', 'THE', 'PRINCESS'] +7127-75946-0027-494: hyp=['DISDAINFUL', 'OF', 'A', 'SUCCESS', 'OF', 'WHICH', 'MADAME', 'SHOWED', 'NO', 'ACKNOWLEDGMENT', 'HE', 'THOUGHT', 'OF', 'NOTHING', 'BUT', 'BOLDLY', 'REGAINING', 'THE', 'MARKED', 'PREFERENCE', 'OF', 'THE', 'PRINCESS'] +7127-75946-0028-495: ref=['BY', 'DEGREES', 'ALL', 'HIS', 'HAPPINESS', 'ALL', 'HIS', 'BRILLIANCY', 'SUBSIDED', 'INTO', 'REGRET', 'AND', 'UNEASINESS', 'SO', 'THAT', 'HIS', 'LIMBS', 'LOST', 'THEIR', 'POWER', 'HIS', 'ARMS', 'HUNG', 'HEAVILY', 'BY', 'HIS', 'SIDES', 'AND', 'HIS', 'HEAD', 'DROOPED', 'AS', 'THOUGH', 'HE', 'WAS', 'STUPEFIED'] +7127-75946-0028-495: hyp=['BY', 'DEGREES', 'ALL', 'HIS', 'HAPPINESS', 'ALL', 'HIS', 'BRILLIANCY', 'SUBSIDED', 'INTO', 'REGRET', 'AND', 'UNEASINESS', 'SO', 'THAT', 'HIS', 'LIMBS', 'LOST', 'THEIR', 'POWER', 'HIS', 'ARMS', 'HUNG', 'HEAVILY', 'BY', 'HIS', 'SIDES', 'AND', 'HIS', 'HEAD', 'DROOPED', 'AS', 'THOUGH', 'HE', 'WAS', 'STUPEFIED'] +7127-75946-0029-496: ref=['THE', 'KING', 'WHO', 'HAD', 'FROM', 'THIS', 'MOMENT', 'BECOME', 'IN', 'REALITY', 'THE', 'PRINCIPAL', 'DANCER', 'IN', 'THE', 'QUADRILLE', 'CAST', 'A', 'LOOK', 'UPON', 'HIS', 'VANQUISHED', 'RIVAL'] +7127-75946-0029-496: hyp=['THE', 'KING', 'WHO', 'HAD', 'FROM', 'THIS', 'MOMENT', 'BECOME', 'IN', 'REALITY', 'THE', 'PRINCIPAL', 'DANCER', 'IN', 'THE', 'QUADRILL', 'CAST', 'A', 'LOOK', 'UPON', 'HIS', 'VANQUISHED', 'RIVAL'] +7127-75947-0000-426: ref=['EVERY', 'ONE', 'COULD', 'OBSERVE', 'HIS', 'AGITATION', 'AND', 'PROSTRATION', 'A', 'PROSTRATION', 'WHICH', 'WAS', 'INDEED', 'THE', 'MORE', 'REMARKABLE', 'SINCE', 'PEOPLE', 'WERE', 'NOT', 'ACCUSTOMED', 'TO', 'SEE', 'HIM', 'WITH', 'HIS', 'ARMS', 'HANGING', 'LISTLESSLY', 'BY', 'HIS', 'SIDE', 'HIS', 'HEAD', 'BEWILDERED', 'AND', 'HIS', 'EYES', 'WITH', 'ALL', 'THEIR', 'BRIGHT', 'INTELLIGENCE', 'BEDIMMED'] +7127-75947-0000-426: hyp=['EVERY', 'ONE', 'COULD', 'OBSERVE', 'HIS', 'AGITATION', 'AND', 'PROSTRATION', 'A', 'PROSTRATION', 'WHICH', 'WAS', 'INDEED', 'THE', 'MORE', 'REMARKABLE', 'SINCE', 'PEOPLE', 'WERE', 'NOT', 'ACCUSTOMED', 'TO', 'SEE', 'HIM', 'WITH', 'HIS', 'ARMS', 'HANGING', 'LISTLESSLY', 'BY', 'HIS', 'SIDE', 'HIS', 'HEAD', 'BEWILDERED', 'AND', 'HIS', 'EYES', 'WITH', 'ALL', 'THEIR', 'BRIGHT', 'INTELLIGENCE', 'BEDEMNED'] +7127-75947-0001-427: ref=['UPON', 'THIS', 'MADAME', 'DEIGNED', 'TO', 'TURN', 'HER', 'EYES', 'LANGUISHINGLY', 'TOWARDS', 'THE', 'COMTE', 'OBSERVING'] +7127-75947-0001-427: hyp=['UPON', 'THIS', 'MADAME', 'DEIGNED', 'TO', 'TURN', 'HER', 'EYES', 'LANGUISHINGLY', 'TOWARDS', 'THE', 'COMTE', 'OBSERVING'] +7127-75947-0002-428: ref=['DO', 'YOU', 'THINK', 'SO', 'SHE', 'REPLIED', 'WITH', 'INDIFFERENCE'] +7127-75947-0002-428: hyp=['DO', 'YOU', 'THINK', 'SO', 'SHE', 'REPLIED', 'WITH', 'INDIFFERENCE'] +7127-75947-0003-429: ref=['YES', 'THE', 'CHARACTER', 'WHICH', 'YOUR', 'ROYAL', 'HIGHNESS', 'ASSUMED', 'IS', 'IN', 'PERFECT', 'HARMONY', 'WITH', 'YOUR', 'OWN'] +7127-75947-0003-429: hyp=['YES', 'THE', 'CHARACTER', 'WHICH', 'YOUR', 'ROYAL', 'HIGHNESS', 'ASSUMED', 'IS', 'IN', 'PERFECT', 'HARMONY', 'WITH', 'YOUR', 'OWN'] +7127-75947-0004-430: ref=['EXPLAIN', 'YOURSELF'] +7127-75947-0004-430: hyp=['EXPLAIN', 'YOURSELF'] +7127-75947-0005-431: ref=['I', 'ALLUDE', 'TO', 'THE', 'GODDESS'] +7127-75947-0005-431: hyp=['I', 'ALLUDE', 'TO', 'THE', 'GODDESS'] +7127-75947-0006-432: ref=['THE', 'PRINCESS', 'INQUIRED', 'NO'] +7127-75947-0006-432: hyp=['THE', 'PRINCESS', 'INQUIRED', 'NO'] +7127-75947-0007-433: ref=['SHE', 'THEN', 'ROSE', 'HUMMING', 'THE', 'AIR', 'TO', 'WHICH', 'SHE', 'WAS', 'PRESENTLY', 'GOING', 'TO', 'DANCE'] +7127-75947-0007-433: hyp=['SHE', 'THEN', 'ROSE', 'HUMMING', 'THE', 'AIR', 'TO', 'WHICH', 'SHE', 'WAS', 'PRESENTLY', 'GOING', 'TO', 'DANCE'] +7127-75947-0008-434: ref=['THE', 'ARROW', 'PIERCED', 'HIS', 'HEART', 'AND', 'WOUNDED', 'HIM', 'MORTALLY'] +7127-75947-0008-434: hyp=['THE', 'ARROW', 'PIERCED', 'HIS', 'HEART', 'AND', 'WOUNDED', 'HIM', 'MORTALLY'] +7127-75947-0009-435: ref=['A', 'QUARTER', 'OF', 'AN', 'HOUR', 'AFTERWARDS', 'HE', 'RETURNED', 'TO', 'THE', 'THEATER', 'BUT', 'IT', 'WILL', 'BE', 'READILY', 'BELIEVED', 'THAT', 'IT', 'WAS', 'ONLY', 'A', 'POWERFUL', 'EFFORT', 'OF', 'REASON', 'OVER', 'HIS', 'GREAT', 'EXCITEMENT', 'THAT', 'ENABLED', 'HIM', 'TO', 'GO', 'BACK', 'OR', 'PERHAPS', 'FOR', 'LOVE', 'IS', 'THUS', 'STRANGELY', 'CONSTITUTED', 'HE', 'FOUND', 'IT', 'IMPOSSIBLE', 'EVEN', 'TO', 'REMAIN', 'MUCH', 'LONGER', 'SEPARATED', 'FROM', 'THE', 'PRESENCE', 'OF', 'ONE', 'WHO', 'HAD', 'BROKEN', 'HIS', 'HEART'] +7127-75947-0009-435: hyp=['A', 'QUARTER', 'OF', 'AN', 'HOUR', 'AFTERWARDS', 'HE', 'RETURNED', 'TO', 'THE', 'THEATRE', 'BUT', 'IT', 'WILL', 'BE', 'READILY', 'BELIEVED', 'THAT', 'IT', 'WAS', 'ONLY', 'A', 'POWERFUL', 'EFFORT', 'OF', 'REASON', 'OVER', 'HIS', 'GREAT', 'EXCITEMENT', 'THAT', 'ENABLED', 'HIM', 'TO', 'GO', 'BACK', 'OR', 'PERHAPS', 'FOR', 'LOVE', 'IS', 'THUS', 'STRANGELY', 'CONSTITUTED', 'HE', 'FOUND', 'IT', 'IMPOSSIBLE', 'EVEN', 'TO', 'REMAIN', 'MUCH', 'LONGER', 'SEPARATED', 'FROM', 'THEIR', 'PRESENCE', 'OF', 'ONE', 'WHO', 'HAD', 'BROKEN', 'HIS', 'HEART'] +7127-75947-0010-436: ref=['WHEN', 'SHE', 'PERCEIVED', 'THE', 'YOUNG', 'MAN', 'SHE', 'ROSE', 'LIKE', 'A', 'WOMAN', 'SURPRISED', 'IN', 'THE', 'MIDST', 'OF', 'IDEAS', 'SHE', 'WAS', 'DESIROUS', 'OF', 'CONCEALING', 'FROM', 'HERSELF'] +7127-75947-0010-436: hyp=['WHEN', 'SHE', 'PERCEIVED', 'THE', 'YOUNG', 'MAN', 'SHE', 'ROSE', 'LIKE', 'A', 'WOMAN', 'SURPRISED', 'IN', 'THE', 'MIDST', 'OF', 'IDEAS', 'SHE', 'WAS', 'DESIROUS', 'OF', 'CONCEALING', 'FROM', 'HERSELF'] +7127-75947-0011-437: ref=['REMAIN', 'I', 'IMPLORE', 'YOU', 'THE', 'EVENING', 'IS', 'MOST', 'LOVELY'] +7127-75947-0011-437: hyp=['REMAIN', 'I', 'IMPLORE', 'YOU', 'THE', 'EVENING', 'IS', 'MOST', 'LOVELY'] +7127-75947-0012-438: ref=['INDEED', 'AH'] +7127-75947-0012-438: hyp=['INDEED', 'A'] +7127-75947-0013-439: ref=['I', 'REMEMBER', 'NOW', 'AND', 'I', 'CONGRATULATE', 'MYSELF', 'DO', 'YOU', 'LOVE', 'ANY', 'ONE'] +7127-75947-0013-439: hyp=['I', 'REMEMBER', 'NOW', 'AND', 'I', 'CONGRATULATE', 'MYSELF', 'DO', 'YOU', 'LOVE', 'ANY', 'ONE'] +7127-75947-0014-440: ref=['FORGIVE', 'ME', 'I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'AM', 'SAYING', 'A', 'THOUSAND', 'TIMES', 'FORGIVE', 'ME', 'MADAME', 'WAS', 'RIGHT', 'QUITE', 'RIGHT', 'THIS', 'BRUTAL', 'EXILE', 'HAS', 'COMPLETELY', 'TURNED', 'MY', 'BRAIN'] +7127-75947-0014-440: hyp=['FORGIVE', 'ME', 'I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'AM', 'SAYING', 'A', 'THOUSAND', 'TIMES', 'FORGIVE', 'ME', 'MADAME', 'WAS', 'RIGHT', 'QUITE', 'RIGHT', 'THIS', 'BRUTAL', 'EXILE', 'HAS', 'COMPLETELY', 'TURNED', 'MY', 'BRAIN'] +7127-75947-0015-441: ref=['THERE', 'CANNOT', 'BE', 'A', 'DOUBT', 'HE', 'RECEIVED', 'YOU', 'KINDLY', 'FOR', 'IN', 'FACT', 'YOU', 'RETURNED', 'WITHOUT', 'HIS', 'PERMISSION'] +7127-75947-0015-441: hyp=['THERE', 'CANNOT', 'BE', 'A', 'DOUBT', 'HE', 'RECEIVED', 'YOU', 'KINDLY', 'FOR', 'IN', 'FACT', 'YOU', 'RETURNED', 'WITHOUT', 'HIS', 'PERMISSION'] +7127-75947-0016-442: ref=['OH', 'MADEMOISELLE', 'WHY', 'HAVE', 'I', 'NOT', 'A', 'DEVOTED', 'SISTER', 'OR', 'A', 'TRUE', 'FRIEND', 'SUCH', 'AS', 'YOURSELF'] +7127-75947-0016-442: hyp=['OH', 'MADEMOISELLE', 'WHY', 'HAVE', 'I', 'NOT', 'A', 'DEVOTED', 'SISTER', 'OR', 'A', 'TRUE', 'FRIEND', 'SUCH', 'AS', 'YOURSELF'] +7127-75947-0017-443: ref=['WHAT', 'ALREADY', 'HERE', 'THEY', 'SAID', 'TO', 'HER'] +7127-75947-0017-443: hyp=['WHAT', 'ALREADY', 'HERE', 'THEY', 'SAID', 'TO', 'HER'] +7127-75947-0018-444: ref=['I', 'HAVE', 'BEEN', 'HERE', 'THIS', 'QUARTER', 'OF', 'AN', 'HOUR', 'REPLIED', 'LA', 'VALLIERE'] +7127-75947-0018-444: hyp=['I', 'HAVE', 'BEEN', 'HERE', 'THIS', 'QUARTER', 'OF', 'AN', 'HOUR', 'REPLIED', 'LA', 'VALLIERS'] +7127-75947-0019-445: ref=['DID', 'NOT', 'THE', 'DANCING', 'AMUSE', 'YOU', 'NO'] +7127-75947-0019-445: hyp=['DID', 'NOT', 'THE', 'DANCING', 'AMUSE', 'YOU', 'NO'] +7127-75947-0020-446: ref=['NO', 'MORE', 'THAN', 'THE', 'DANCING'] +7127-75947-0020-446: hyp=['NO', 'MORE', 'THAN', 'THE', 'DANCING'] +7127-75947-0021-447: ref=['LA', 'VALLIERE', 'IS', 'QUITE', 'A', 'POETESS', 'SAID', 'TONNAY', 'CHARENTE'] +7127-75947-0021-447: hyp=['LA', 'VALLIERS', 'IS', 'QUITE', 'A', 'POETESS', 'SAID', 'TONIET', 'CHART'] +7127-75947-0022-448: ref=['I', 'AM', 'A', 'WOMAN', 'AND', 'THERE', 'ARE', 'FEW', 'LIKE', 'ME', 'WHOEVER', 'LOVES', 'ME', 'FLATTERS', 'ME', 'WHOEVER', 'FLATTERS', 'ME', 'PLEASES', 'ME', 'AND', 'WHOEVER', 'PLEASES', 'WELL', 'SAID', 'MONTALAIS', 'YOU', 'DO', 'NOT', 'FINISH'] +7127-75947-0022-448: hyp=['I', 'AM', 'A', 'WOMAN', 'AND', 'THERE', 'ARE', 'FEW', 'LIKE', 'ME', 'WHOEVER', 'LOVES', 'ME', 'FLATTERS', 'ME', 'WHOEVER', 'FLATTERS', 'ME', 'PLEASES', 'ME', 'AND', 'WHOEVER', 'PLEASES', 'WELL', 'SAID', 'MONTALAIS', 'YOU', 'DO', 'NOT', 'FINISH'] +7127-75947-0023-449: ref=['IT', 'IS', 'TOO', 'DIFFICULT', 'REPLIED', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'LAUGHING', 'LOUDLY'] +7127-75947-0023-449: hyp=['IT', 'IS', 'TOO', 'DIFFICULT', 'REPLIED', 'MADEMOISELLE', 'DETONICHAUCH', 'LAUGHING', 'LOUDLY'] +7127-75947-0024-450: ref=['LOOK', 'YONDER', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'MOON', 'SLOWLY', 'RISING', 'SILVERING', 'THE', 'TOPMOST', 'BRANCHES', 'OF', 'THE', 'CHESTNUTS', 'AND', 'THE', 'OAKS'] +7127-75947-0024-450: hyp=['LUCK', 'YONDER', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'MOON', 'SLOWLY', 'RISING', 'SILVERING', 'THE', 'TOPMOST', 'BRANCHES', 'OF', 'THE', 'CHESTNUTS', 'AND', 'THE', 'YOKES'] +7127-75947-0025-451: ref=['EXQUISITE', 'SOFT', 'TURF', 'OF', 'THE', 'WOODS', 'THE', 'HAPPINESS', 'WHICH', 'YOUR', 'FRIENDSHIP', 'CONFERS', 'UPON', 'ME'] +7127-75947-0025-451: hyp=['EXQUISITE', 'SOFT', 'TURF', 'OF', 'THE', 'WOODS', 'THE', 'HAPPINESS', 'WHICH', 'YOUR', 'FRIENDSHIP', 'CONFERS', 'UPON', 'ME'] +7127-75947-0026-452: ref=['WELL', 'SAID', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'I', 'ALSO', 'THINK', 'A', 'GOOD', 'DEAL', 'BUT', 'I', 'TAKE', 'CARE'] +7127-75947-0026-452: hyp=['WELL', 'SAID', 'MADEMOISELLE', 'DE', 'TONECHAU', 'AND', 'I', 'ALSO', 'THINK', 'A', 'GOOD', 'DEAL', 'BUT', 'I', 'TAKE', 'CARE'] +7127-75947-0027-453: ref=['TO', 'SAY', 'NOTHING', 'SAID', 'MONTALAIS', 'SO', 'THAT', 'WHEN', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'THINKS', 'ATHENAIS', 'IS', 'THE', 'ONLY', 'ONE', 'WHO', 'KNOWS', 'IT'] +7127-75947-0027-453: hyp=['TO', 'SAY', 'NOTHING', 'SAID', 'MONTALAIS', 'SO', 'THAT', 'WHEN', 'MADEMOISELLE', 'DE', 'TO', 'NECHERANT', 'THINKS', 'ETHNEE', 'IS', 'THE', 'ONLY', 'ONE', 'WHO', 'KNOWS', 'IT'] +7127-75947-0028-454: ref=['QUICK', 'QUICK', 'THEN', 'AMONG', 'THE', 'HIGH', 'REED', 'GRASS', 'SAID', 'MONTALAIS', 'STOOP', 'ATHENAIS', 'YOU', 'ARE', 'SO', 'TALL'] +7127-75947-0028-454: hyp=['QUICK', 'QUICK', 'THEN', 'AMONG', 'THE', 'HIGH', 'REED', 'GRASS', 'SAID', 'MONTALAIS', 'STOOP', 'ETHINAY', 'YOU', 'ARE', 'SO', 'TALL'] +7127-75947-0029-455: ref=['THE', 'YOUNG', 'GIRLS', 'HAD', 'INDEED', 'MADE', 'THEMSELVES', 'SMALL', 'INDEED', 'INVISIBLE'] +7127-75947-0029-455: hyp=['THE', 'YOUNG', 'GIRLS', 'HAD', 'INDEED', 'MADE', 'THEMSELVES', 'SMALL', 'INDEED', 'INVISIBLE'] +7127-75947-0030-456: ref=['SHE', 'WAS', 'HERE', 'JUST', 'NOW', 'SAID', 'THE', 'COUNT'] +7127-75947-0030-456: hyp=['SHE', 'WAS', 'HERE', 'JUST', 'NOW', 'SAID', 'THE', 'COUNT'] +7127-75947-0031-457: ref=['YOU', 'ARE', 'POSITIVE', 'THEN'] +7127-75947-0031-457: hyp=['YOU', 'ARE', 'POSITIVE', 'THEN'] +7127-75947-0032-458: ref=['YES', 'BUT', 'PERHAPS', 'I', 'FRIGHTENED', 'HER', 'IN', 'WHAT', 'WAY'] +7127-75947-0032-458: hyp=['YES', 'BUT', 'PERHAPS', 'I', 'FRIGHTENED', 'HER', 'AND', 'WHAT', 'WAY'] +7127-75947-0033-459: ref=['HOW', 'IS', 'IT', 'LA', 'VALLIERE', 'SAID', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'THAT', 'THE', 'VICOMTE', 'DE', 'BRAGELONNE', 'SPOKE', 'OF', 'YOU', 'AS', 'LOUISE'] +7127-75947-0033-459: hyp=['HOW', 'IS', 'IT', 'LA', 'VALLIERS', 'SAID', 'MADEMOISELLE', 'DE', 'TENACHALANT', 'THAT', 'THE', 'VICOMTE', 'DE', 'BRAGELON', 'SPOKE', 'OF', 'YOU', 'AS', 'LOUISE'] +7127-75947-0034-460: ref=['IT', 'SEEMS', 'THE', 'KING', 'WILL', 'NOT', 'CONSENT', 'TO', 'IT'] +7127-75947-0034-460: hyp=['IT', 'SEEMS', 'THE', 'KING', 'WILL', 'NOT', 'CONSENT', 'TO', 'IT'] +7127-75947-0035-461: ref=['GOOD', 'GRACIOUS', 'HAS', 'THE', 'KING', 'ANY', 'RIGHT', 'TO', 'INTERFERE', 'IN', 'MATTERS', 'OF', 'THAT', 'KIND'] +7127-75947-0035-461: hyp=['GOOD', 'GRACIOUS', 'AS', 'THE', 'KING', 'ANY', 'RIGHT', 'TO', 'INTERFERE', 'IN', 'MATTERS', 'OF', 'THAT', 'KIND'] +7127-75947-0036-462: ref=['I', 'GIVE', 'MY', 'CONSENT'] +7127-75947-0036-462: hyp=['I', 'GIVE', 'MY', 'CONSENT'] +7127-75947-0037-463: ref=['OH', 'I', 'AM', 'SPEAKING', 'SERIOUSLY', 'REPLIED', 'MONTALAIS', 'AND', 'MY', 'OPINION', 'IN', 'THIS', 'CASE', 'IS', 'QUITE', 'AS', 'GOOD', 'AS', 'THE', "KING'S", 'I', 'SUPPOSE', 'IS', 'IT', 'NOT', 'LOUISE'] +7127-75947-0037-463: hyp=['OH', 'I', 'AM', 'SPEAKING', 'SERIOUSLY', 'REPLIED', 'MONTALAIS', 'AND', 'MY', 'OPINION', 'IN', 'THIS', 'CASE', 'IS', 'QUITE', 'AS', 'GOOD', 'AS', 'THE', "KING'S", 'I', 'SUPPOSE', 'IS', 'IT', 'NOT', 'LOUISE'] +7127-75947-0038-464: ref=['LET', 'US', 'RUN', 'THEN', 'SAID', 'ALL', 'THREE', 'AND', 'GRACEFULLY', 'LIFTING', 'UP', 'THE', 'LONG', 'SKIRTS', 'OF', 'THEIR', 'SILK', 'DRESSES', 'THEY', 'LIGHTLY', 'RAN', 'ACROSS', 'THE', 'OPEN', 'SPACE', 'BETWEEN', 'THE', 'LAKE', 'AND', 'THE', 'THICKEST', 'COVERT', 'OF', 'THE', 'PARK'] +7127-75947-0038-464: hyp=['LET', 'US', 'RUN', 'THEN', 'SAID', 'ALL', 'THREE', 'AND', 'GRACEFULLY', 'LIFTING', 'UP', 'THE', 'LONG', 'SKIRTS', 'OF', 'THEIR', 'SILK', 'DRESSES', 'THEY', 'LIGHTLY', 'RAN', 'ACROSS', 'THE', 'OPEN', 'SPACE', 'BETWEEN', 'THE', 'LAKE', 'AND', 'THE', 'THICKEST', 'COVERT', 'OF', 'THE', 'PARK'] +7127-75947-0039-465: ref=['IN', 'FACT', 'THE', 'SOUND', 'OF', "MADAME'S", 'AND', 'THE', "QUEEN'S", 'CARRIAGES', 'COULD', 'BE', 'HEARD', 'IN', 'THE', 'DISTANCE', 'UPON', 'THE', 'HARD', 'DRY', 'GROUND', 'OF', 'THE', 'ROADS', 'FOLLOWED', 'BY', 'THE', 'MOUNTED', 'CAVALIERS'] +7127-75947-0039-465: hyp=['IN', 'FACT', 'THE', 'SOUND', 'OF', "MADAME'S", 'AND', 'THE', "QUEEN'S", 'CARRIAGES', 'COULD', 'BE', 'HEARD', 'IN', 'THE', 'DISTANCE', 'UPON', 'THE', 'HARD', 'DRY', 'GROUND', 'OF', 'THE', 'ROADS', 'FOLLOWED', 'BY', 'THE', 'MOUNTAIN', 'CAVALIERS'] +7127-75947-0040-466: ref=['IN', 'THIS', 'WAY', 'THE', 'FETE', 'OF', 'THE', 'WHOLE', 'COURT', 'WAS', 'A', 'FETE', 'ALSO', 'FOR', 'THE', 'MYSTERIOUS', 'INHABITANTS', 'OF', 'THE', 'FOREST', 'FOR', 'CERTAINLY', 'THE', 'DEER', 'IN', 'THE', 'BRAKE', 'THE', 'PHEASANT', 'ON', 'THE', 'BRANCH', 'THE', 'FOX', 'IN', 'ITS', 'HOLE', 'WERE', 'ALL', 'LISTENING'] +7127-75947-0040-466: hyp=['IN', 'THIS', 'WAY', 'THE', 'FETE', 'OF', 'THE', 'WHOLE', 'COURT', 'WAS', 'A', 'FETE', 'ALSO', 'FOR', 'THE', 'MYSTERIOUS', 'INHABITANTS', 'OF', 'THE', 'FOREST', 'FOR', 'CERTAINLY', 'THE', 'DEER', 'IN', 'THE', 'BRAKE', 'THE', 'PHEASANT', 'ON', 'THE', 'BRANCH', 'THE', 'FOX', 'IN', 'ITS', 'HOLE', 'WERE', 'ALL', 'LISTENING'] +7176-88083-0000-707: ref=['ALL', 'ABOUT', 'HIM', 'WAS', 'A', 'TUMULT', 'OF', 'BRIGHT', 'AND', 'BROKEN', 'COLOR', 'SCATTERED', 'IN', 'BROAD', 'SPLASHES'] +7176-88083-0000-707: hyp=['ALL', 'ABOUT', 'HIM', 'WAS', 'A', 'TUMULT', 'OF', 'BRIGHT', 'AND', 'BROKEN', 'COLOR', 'SCATTERED', 'IN', 'BROAD', 'SPLASHES'] +7176-88083-0001-708: ref=['THE', 'MERGANSER', 'HAD', 'A', 'CRESTED', 'HEAD', 'OF', 'IRIDESCENT', 'GREEN', 'BLACK', 'A', 'BROAD', 'COLLAR', 'OF', 'LUSTROUS', 'WHITE', 'BLACK', 'BACK', 'BLACK', 'AND', 'WHITE', 'WINGS', 'WHITE', 'BELLY', 'SIDES', 'FINELY', 'PENCILLED', 'IN', 'BLACK', 'AND', 'WHITE', 'AND', 'A', 'BREAST', 'OF', 'RICH', 'CHESTNUT', 'RED', 'STREAKED', 'WITH', 'BLACK'] +7176-88083-0001-708: hyp=['THE', 'MERGANCER', 'HAD', 'A', 'CRESTED', 'HEAD', 'OF', 'IRIDESCENT', 'GREEN', 'BLACK', 'A', 'BROAD', 'COLLAR', 'OF', 'LUSTROUS', 'WHITE', 'BLACK', 'BACK', 'BLACK', 'AND', 'WHITE', 'WINGS', 'WHITE', 'BELLY', 'SIDES', 'FINELY', 'PENCILLED', 'AND', 'BLACK', 'AND', 'WHITE', 'AND', 'HER', 'BREAST', 'OF', 'RICH', 'CHESTNUT', 'RED', 'STREAKED', 'WITH', 'BLACK'] +7176-88083-0002-709: ref=['HIS', 'FEET', 'WERE', 'RED', 'HIS', 'LONG', 'NARROW', 'BEAK', 'WITH', 'ITS', 'SAW', 'TOOTHED', 'EDGES', 'AND', 'SHARP', 'HOOKED', 'TIP', 'WAS', 'BRIGHT', 'RED'] +7176-88083-0002-709: hyp=['HIS', 'FEET', 'WERE', 'RED', 'HIS', 'LONG', 'NARROW', 'BEAK', 'WITH', 'ITS', 'SOLID', 'TOOTHED', 'EDGES', 'AND', 'SHARP', 'HOOKED', 'TIP', 'WAS', 'BRIGHT', 'RED'] +7176-88083-0003-710: ref=['BUT', 'HERE', 'HE', 'WAS', 'AT', 'A', 'TERRIBLE', 'DISADVANTAGE', 'AS', 'COMPARED', 'WITH', 'THE', 'OWLS', 'HAWKS', 'AND', 'EAGLES', 'HE', 'HAD', 'NO', 'RENDING', 'CLAWS'] +7176-88083-0003-710: hyp=['BUT', 'HERE', 'HE', 'WAS', 'AT', 'A', 'TERRIBLE', 'DISADVANTAGE', 'AS', 'COMPARED', 'WITH', 'THE', 'OWLS', 'HAWKS', 'AND', 'EAGLES', 'HE', 'HAD', 'NO', 'RENDING', 'CLAWS'] +7176-88083-0004-711: ref=['BUT', 'SUDDENLY', 'STRAIGHT', 'AND', 'SWIFT', 'AS', 'A', 'DIVING', 'CORMORANT', 'HE', 'SHOT', 'DOWN', 'INTO', 'THE', 'TORRENT', 'AND', 'DISAPPEARED', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0004-711: hyp=['BUT', 'SUDDENLY', 'STRAIGHT', 'AND', 'SWIFT', 'AS', 'A', 'DIVING', 'CORMORANT', 'HE', 'SHOT', 'DOWN', 'INTO', 'THE', 'TORRENT', 'AND', 'DISAPPEARED', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0005-712: ref=['ONCE', 'FAIRLY', 'A', 'WING', 'HOWEVER', 'HE', 'WHEELED', 'AND', 'MADE', 'BACK', 'HURRIEDLY', 'FOR', 'HIS', 'PERCH'] +7176-88083-0005-712: hyp=['ONCE', 'FAIRLY', 'A', 'WING', 'HOWEVER', 'HE', 'WHEELED', 'AND', 'MADE', 'BACK', 'HURRIEDLY', 'FOR', 'HIS', 'PERCH'] +7176-88083-0006-713: ref=['IT', 'MIGHT', 'HAVE', 'SEEMED', 'THAT', 'A', 'TROUT', 'OF', 'THIS', 'SIZE', 'WAS', 'A', 'FAIRLY', 'SUBSTANTIAL', 'MEAL'] +7176-88083-0006-713: hyp=['IT', 'MIGHT', 'HAVE', 'SEEMED', 'THAT', 'A', 'TROUT', 'OF', 'THIS', 'SIZE', 'WAS', 'A', 'FAIRLY', 'SUBSTANTIAL', 'MEAL'] +7176-88083-0007-714: ref=['BUT', 'SUCH', 'WAS', 'HIS', 'KEENNESS', 'THAT', 'EVEN', 'WHILE', 'THE', 'WIDE', 'FLUKES', 'OF', 'HIS', 'ENGORGED', 'VICTIM', 'WERE', 'STILL', 'STICKING', 'OUT', 'AT', 'THE', 'CORNERS', 'OF', 'HIS', 'BEAK', 'HIS', 'FIERCE', 'RED', 'EYES', 'WERE', 'ONCE', 'MORE', 'PEERING', 'DOWNWARD', 'INTO', 'THE', 'TORRENT', 'IN', 'SEARCH', 'OF', 'FRESH', 'PREY'] +7176-88083-0007-714: hyp=['BUT', 'SUCH', 'WAS', 'HIS', 'KEENNESS', 'THAT', 'EVEN', 'WHILE', 'THE', 'WIDE', 'FLUKES', 'OF', 'HIS', 'ENGORGED', 'VICTIM', 'WERE', 'STILL', 'STICKING', 'OUT', 'AT', 'THE', 'CORNERS', 'OF', 'HIS', 'BEAK', 'HIS', 'FIERCE', 'RED', 'EYES', 'WERE', 'ONCE', 'MORE', 'PEERING', 'DOWNWARD', 'INTO', 'THE', 'TORRENT', 'IN', 'SEARCH', 'OF', 'FRESH', 'PREY'] +7176-88083-0008-715: ref=['IN', 'DESPAIR', 'HE', 'HURLED', 'HIMSELF', 'DOWNWARD', 'TOO', 'SOON'] +7176-88083-0008-715: hyp=['IN', 'DESPAIR', 'HE', 'HURLED', 'HIMSELF', 'DOWNWARD', 'TOO', 'SOON'] +7176-88083-0009-716: ref=['THE', 'GREAT', 'HAWK', 'FOLLOWED', 'HURRIEDLY', 'TO', 'RETRIEVE', 'HIS', 'PREY', 'FROM', 'THE', 'GROUND'] +7176-88083-0009-716: hyp=['THE', 'GREAT', 'HAWK', 'FOWLED', 'HURRIEDLY', 'TO', 'RETRIEVE', 'HIS', 'PREY', 'FROM', 'THE', 'GROUND'] +7176-88083-0010-717: ref=['THE', 'CAT', 'GROWLED', 'SOFTLY', 'PICKED', 'UP', 'THE', 'PRIZE', 'IN', 'HER', 'JAWS', 'AND', 'TROTTED', 'INTO', 'THE', 'BUSHES', 'TO', 'DEVOUR', 'IT'] +7176-88083-0010-717: hyp=['THE', 'CAT', 'GROWLED', 'SOFTLY', 'PICKED', 'UP', 'THE', 'PRIZE', 'IN', 'HER', 'JAWS', 'AND', 'TROTTED', 'INTO', 'THE', 'BUSHES', 'TO', 'DEVOUR', 'IT'] +7176-88083-0011-718: ref=['IN', 'FACT', 'HE', 'HAD', 'JUST', 'FINISHED', 'IT', 'THE', 'LAST', 'OF', 'THE', "TROUT'S", 'TAIL', 'HAD', 'JUST', 'VANISHED', 'WITH', 'A', 'SPASM', 'DOWN', 'HIS', 'STRAINED', 'GULLET', 'WHEN', 'THE', 'BAFFLED', 'HAWK', 'CAUGHT', 'SIGHT', 'OF', 'HIM', 'AND', 'SWOOPED'] +7176-88083-0011-718: hyp=['IN', 'FACT', 'HE', 'HAD', 'JUST', 'FINISHED', 'IT', 'THE', 'LAST', 'OF', 'THE', "TROUT'S", 'TAIL', 'HAD', 'JUST', 'VANISHED', 'WITH', 'A', 'SPASM', 'DOWN', 'HIS', 'STRAINED', 'GULLET', 'WHEN', 'THE', 'BAFFLED', 'HAWK', 'CAUGHT', 'SIGHT', 'OF', 'HIM', 'AND', 'SWOOPED'] +7176-88083-0012-719: ref=['THE', 'HAWK', 'ALIGHTED', 'ON', 'THE', 'DEAD', 'BRANCH', 'AND', 'SAT', 'UPRIGHT', 'MOTIONLESS', 'AS', 'IF', 'SURPRISED'] +7176-88083-0012-719: hyp=['THE', 'HAWK', 'ALIGHTED', 'ON', 'THE', 'DEAD', 'BRANCH', 'AND', 'SAT', 'UPRIGHT', 'MOTIONLESS', 'AS', 'IF', 'SURPRISED'] +7176-88083-0013-720: ref=['LIKE', 'HIS', 'UNFORTUNATE', 'LITTLE', 'COUSIN', 'THE', 'TEAL', 'HE', 'TOO', 'HAD', 'FELT', 'THE', 'FEAR', 'OF', 'DEATH', 'SMITTEN', 'INTO', 'HIS', 'HEART', 'AND', 'WAS', 'HEADING', 'DESPERATELY', 'FOR', 'THE', 'REFUGE', 'OF', 'SOME', 'DARK', 'OVERHANGING', 'BANK', 'DEEP', 'FRINGED', 'WITH', 'WEEDS', 'WHERE', 'THE', 'DREADFUL', 'EYE', 'OF', 'THE', 'HAWK', 'SHOULD', 'NOT', 'DISCERN', 'HIM'] +7176-88083-0013-720: hyp=['LIKE', 'HIS', 'UNFORTUNATE', 'LITTLE', 'COUSIN', 'THE', 'TEAL', 'HE', 'TOO', 'HAD', 'FELT', 'THE', 'FEAR', 'OF', 'DEATH', 'SMITTEN', 'INTO', 'HIS', 'HEART', 'AND', 'WAS', 'HEADING', 'DESPERATELY', 'FOR', 'THE', 'REFUGE', 'OF', 'SOME', 'DARK', 'OVERHANGING', 'BANK', 'DEEP', 'FRINGED', 'WITH', 'WEEDS', 'WHERE', 'THE', 'DREADFUL', 'EYE', 'OF', 'THE', 'HAWK', 'SHOULD', 'NOT', 'DISCERN', 'HIM'] +7176-88083-0014-721: ref=['THE', 'HAWK', 'SAT', 'UPON', 'THE', 'BRANCH', 'AND', 'WATCHED', 'HIS', 'QUARRY', 'SWIMMING', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0014-721: hyp=['THE', 'HAWK', 'SAT', 'UPON', 'THE', 'BRANCH', 'AND', 'WATCHED', 'HIS', 'QUARRY', 'SWIMMING', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0015-722: ref=['ALMOST', 'INSTANTLY', 'HE', 'WAS', 'FORCED', 'TO', 'THE', 'TOP'] +7176-88083-0015-722: hyp=['ALMOST', 'INSTANTLY', 'HE', 'WAS', 'FORCED', 'TO', 'THE', 'TOP'] +7176-88083-0016-723: ref=['STRAIGHTWAY', 'THE', 'HAWK', 'GLIDED', 'FROM', 'HIS', 'PERCH', 'AND', 'DARTED', 'AFTER', 'HIM'] +7176-88083-0016-723: hyp=['STRAIGHTWAY', 'THE', 'HOT', 'GLIDED', 'FROM', 'HIS', 'PERCH', 'AND', 'DARTED', 'AFTER', 'HIM'] +7176-88083-0017-724: ref=['BUT', 'AT', 'THIS', 'POINT', 'IN', 'THE', 'RAPIDS', 'IT', 'WAS', 'IMPOSSIBLE', 'FOR', 'HIM', 'TO', 'STAY', 'DOWN'] +7176-88083-0017-724: hyp=['BUT', 'AT', 'THIS', 'POINT', 'IN', 'THE', 'RAPIDS', 'IT', 'WAS', 'IMPOSSIBLE', 'FOR', 'HIM', 'TO', 'STAY', 'DOWN'] +7176-88083-0018-725: ref=['BUT', 'THIS', 'FREQUENTER', 'OF', 'THE', 'HEIGHTS', 'OF', 'AIR', 'FOR', 'ALL', 'HIS', 'SAVAGE', 'VALOR', 'WAS', 'TROUBLED', 'AT', 'THE', 'LEAPING', 'WAVES', 'AND', 'THE', 'TOSSING', 'FOAM', 'OF', 'THESE', 'MAD', 'RAPIDS', 'HE', 'DID', 'NOT', 'UNDERSTAND', 'THEM'] +7176-88083-0018-725: hyp=['BUT', 'THIS', 'FREQUENTER', 'OF', 'THE', 'HEIGHTS', 'OF', 'AIR', 'FOR', 'ALL', 'HIS', 'SAVAGE', 'VALOR', 'WAS', 'TROUBLED', 'AT', 'THE', 'LEAPING', 'WAVES', 'AND', 'THE', 'TOSSING', 'FOAM', 'OF', 'THESE', 'MAD', 'RAPIDS', 'HE', 'DID', 'NOT', 'UNDERSTAND', 'THEM'] +7176-88083-0019-726: ref=['AS', 'HE', 'FLEW', 'HIS', 'DOWN', 'REACHING', 'CLUTCHING', 'TALONS', 'WERE', 'NOT', 'HALF', 'A', 'YARD', 'ABOVE', 'THE', "FUGITIVE'S", 'HEAD'] +7176-88083-0019-726: hyp=['AS', 'HE', 'FLEW', 'HIS', 'DOWN', 'REACHING', 'CLUTCHING', 'TALONS', 'WERE', 'NOT', 'HALF', 'A', 'YARD', 'ABOVE', 'THE', "FUGITIVE'S", 'HEAD'] +7176-88083-0020-727: ref=['WHERE', 'THE', 'WAVES', 'FOR', 'AN', 'INSTANT', 'SANK', 'THEY', 'CAME', 'CLOSER', 'BUT', 'NOT', 'QUITE', 'WITHIN', 'GRASPING', 'REACH'] +7176-88083-0020-727: hyp=['WHERE', 'THE', 'WAY', 'IS', 'FOR', 'AN', 'INSTANT', 'SANK', 'THEY', 'CAME', 'CLOSER', 'BUT', 'NOT', 'QUITE', 'WITHIN', 'GRASPING', 'REACH'] +7176-88083-0021-728: ref=['BUT', 'AS', 'BEFORE', 'THE', 'LEAPING', 'WAVES', 'OF', 'THE', 'RAPIDS', 'WERE', 'TOO', 'MUCH', 'FOR', 'HIS', 'PURSUER', 'AND', 'HE', 'WAS', 'ABLE', 'TO', 'FLAP', 'HIS', 'WAY', 'ONWARD', 'IN', 'A', 'CLOUD', 'OF', 'FOAM', 'WHILE', 'DOOM', 'HUNG', 'LOW', 'ABOVE', 'HIS', 'HEAD', 'YET', 'HESITATED', 'TO', 'STRIKE'] +7176-88083-0021-728: hyp=['BUT', 'AS', 'BEFORE', 'THE', 'LEAPING', 'WAVES', 'OF', 'THE', 'RAPIDS', 'WERE', 'TOO', 'MUCH', 'FOR', 'HIS', 'PURSUER', 'AND', 'HE', 'WAS', 'ABLE', 'TO', 'FLAP', 'HIS', 'WAY', 'ONWARD', 'IN', 'A', 'CLOUD', 'OF', 'FOAM', 'WHILE', 'DOOM', 'HUNG', 'LOW', 'ABOVE', 'HIS', 'HEAD', 'YET', 'HESITATED', 'TO', 'STRIKE'] +7176-88083-0022-729: ref=['THE', 'HAWK', 'EMBITTERED', 'BY', 'THE', 'LOSS', 'OF', 'HIS', 'FIRST', 'QUARRY', 'HAD', 'BECOME', 'AS', 'DOGGED', 'IN', 'PURSUIT', 'AS', 'A', 'WEASEL', 'NOT', 'TO', 'BE', 'SHAKEN', 'OFF', 'OR', 'EVADED', 'OR', 'DECEIVED'] +7176-88083-0022-729: hyp=['THE', 'HAWK', 'EMBITTERED', 'BY', 'THE', 'LOSS', 'OF', 'HIS', 'FIRST', 'QUARRY', 'HAD', 'BECOME', 'AS', 'DOGGED', 'IN', 'PURSUIT', 'AS', 'A', 'WEASEL', 'NOT', 'TO', 'BE', 'SHAKEN', 'OFF', 'OR', 'EVADED', 'OR', 'DECEIVED'] +7176-88083-0023-730: ref=['HE', 'HAD', 'A', 'LOT', 'OF', 'LINE', 'OUT', 'AND', 'THE', 'PLACE', 'WAS', 'NONE', 'TOO', 'FREE', 'FOR', 'A', 'LONG', 'CAST', 'BUT', 'HE', 'WAS', 'IMPATIENT', 'TO', 'DROP', 'HIS', 'FLIES', 'AGAIN', 'ON', 'THE', 'SPOT', 'WHERE', 'THE', 'BIG', 'FISH', 'WAS', 'FEEDING'] +7176-88083-0023-730: hyp=['HE', 'HAD', 'A', 'LOT', 'OF', 'LINE', 'OUT', 'AND', 'THE', 'PLACE', 'WAS', 'NONE', 'TOO', 'FREE', 'FOR', 'A', 'LONG', 'CAST', 'BUT', 'HE', 'WAS', 'IMPATIENT', 'TO', 'DROP', 'HIS', 'FLIES', 'AGAIN', 'ON', 'THE', 'SPOT', 'WHERE', 'THE', 'BIG', 'FISH', 'WAS', 'FEEDING'] +7176-88083-0024-731: ref=['THE', 'LAST', 'DROP', 'FLY', 'AS', 'LUCK', 'WOULD', 'HAVE', 'IT', 'CAUGHT', 'JUST', 'IN', 'THE', 'CORNER', 'OF', 'THE', "HAWK'S", 'ANGRILY', 'OPEN', 'BEAK', 'HOOKING', 'ITSELF', 'FIRMLY'] +7176-88083-0024-731: hyp=['THE', 'LAST', 'DROP', 'FLY', 'AS', 'LUCK', 'WOULD', 'HAVE', 'IT', 'CAUGHT', 'JUST', 'IN', 'THE', 'CORNER', 'OF', 'THE', "HAWK'S", 'ANGRILY', 'OPEN', 'BEAK', 'HOOKING', 'ITSELF', 'FIRMLY'] +7176-88083-0025-732: ref=['AT', 'THE', 'SUDDEN', 'SHARP', 'STING', 'OF', 'IT', 'THE', 'GREAT', 'BIRD', 'TURNED', 'HIS', 'HEAD', 'AND', 'NOTICED', 'FOR', 'THE', 'FIRST', 'TIME', 'THE', 'FISHERMAN', 'STANDING', 'ON', 'THE', 'BANK'] +7176-88083-0025-732: hyp=['AT', 'THE', 'SUDDEN', 'SHARP', 'STING', 'OF', 'IT', 'THE', 'GREAT', 'BIRD', 'TURNED', 'HIS', 'HEAD', 'AND', 'NOTICED', 'FOR', 'THE', 'FIRST', 'TIME', 'THE', 'FISHERMAN', 'STANDING', 'ON', 'THE', 'BANK'] +7176-88083-0026-733: ref=['THE', 'DRAG', 'UPON', 'HIS', 'BEAK', 'AND', 'THE', 'LIGHT', 'CHECK', 'UPON', 'HIS', 'WINGS', 'WERE', 'INEXPLICABLE', 'TO', 'HIM', 'AND', 'APPALLING'] +7176-88083-0026-733: hyp=['THE', 'DRAG', 'UPON', 'HIS', 'BEAK', 'AND', 'THE', 'LIGHT', 'CHECK', 'UPON', 'HIS', 'WINGS', 'WERE', 'INEXPLICABLE', 'TO', 'HIM', 'AND', 'APPALLING'] +7176-88083-0027-734: ref=['THEN', 'THE', 'LEADER', 'PARTED', 'FROM', 'THE', 'LINE'] +7176-88083-0027-734: hyp=['THAN', 'THE', 'LEADER', 'PARTED', 'FROM', 'THE', 'LINE'] +7176-92135-0000-661: ref=['HE', 'IS', 'A', 'WELCOME', 'FIGURE', 'AT', 'THE', 'GARDEN', 'PARTIES', 'OF', 'THE', 'ELECT', 'WHO', 'ARE', 'ALWAYS', 'READY', 'TO', 'ENCOURAGE', 'HIM', 'BY', 'ACCEPTING', 'FREE', 'SEATS', 'FOR', 'HIS', 'PLAY', 'ACTOR', 'MANAGERS', 'NOD', 'TO', 'HIM', 'EDITORS', 'ALLOW', 'HIM', 'TO', 'CONTRIBUTE', 'WITHOUT', 'CHARGE', 'TO', 'A', 'SYMPOSIUM', 'ON', 'THE', 'PRICE', 'OF', 'GOLF', 'BALLS'] +7176-92135-0000-661: hyp=['HE', 'IS', 'A', 'WELCOME', 'FIGURE', 'AT', 'THE', 'GARDEN', 'PARTIES', 'OF', 'THE', 'ELECT', 'WHO', 'ARE', 'ALWAYS', 'READY', 'TO', 'ENCOURAGE', 'HIM', 'BY', 'ACCEPTING', 'FREE', 'SEATS', 'FOR', 'HIS', 'PLAY', 'ACTOR', 'MANAGERS', 'NOD', 'TO', 'HIM', 'EDITORS', 'ALLOW', 'HIM', 'TO', 'CONTRIBUTE', 'WITHOUT', 'CHARGE', 'TO', 'A', 'SIMPOSIUM', 'ON', 'THE', 'PRICE', 'OF', 'GOLF', 'BALLS'] +7176-92135-0001-662: ref=['IN', 'SHORT', 'HE', 'BECOMES', 'A', 'PROMINENT', 'FIGURE', 'IN', 'LONDON', 'SOCIETY', 'AND', 'IF', 'HE', 'IS', 'NOT', 'CAREFUL', 'SOMEBODY', 'WILL', 'SAY', 'SO'] +7176-92135-0001-662: hyp=['IN', 'SHORT', 'HE', 'BECOMES', 'A', 'PROMINENT', 'FIGURE', 'IN', 'LONDON', 'SOCIETY', 'AND', 'IF', 'HE', 'IS', 'NOT', 'CAREFUL', 'SOMEBODY', 'WILL', 'SAY', 'SO'] +7176-92135-0002-663: ref=['BUT', 'EVEN', 'THE', 'UNSUCCESSFUL', 'DRAMATIST', 'HAS', 'HIS', 'MOMENTS'] +7176-92135-0002-663: hyp=['BUT', 'EVEN', 'THE', 'UNSUCCESSFUL', 'DRAMATIST', 'HAS', 'HIS', 'MOMENTS'] +7176-92135-0003-664: ref=['YOUR', 'PLAY', 'MUST', 'BE', 'NOT', 'MERELY', 'A', 'GOOD', 'PLAY', 'BUT', 'A', 'SUCCESSFUL', 'ONE'] +7176-92135-0003-664: hyp=['YOUR', 'PLAY', 'MUST', 'BE', 'NOT', 'MERELY', 'A', 'GOOD', 'PLAY', 'BUT', 'A', 'SUCCESSFUL', 'ONE'] +7176-92135-0004-665: ref=['FRANKLY', 'I', 'CANNOT', 'ALWAYS', 'SAY'] +7176-92135-0004-665: hyp=['FRANKLY', 'I', 'CANNOT', 'ALWAYS', 'SAY'] +7176-92135-0005-666: ref=['BUT', 'SUPPOSE', 'YOU', 'SAID', "I'M", 'FOND', 'OF', 'WRITING', 'MY', 'PEOPLE', 'ALWAYS', 'SAY', 'MY', 'LETTERS', 'HOME', 'ARE', 'GOOD', 'ENOUGH', 'FOR', 'PUNCH'] +7176-92135-0005-666: hyp=['BUT', 'SUPPOSE', 'YOU', 'SAID', "I'M", 'FOND', 'OF', 'WRITING', 'MY', 'PEOPLE', 'ALWAYS', 'SAY', 'MY', 'LETTERS', 'HOME', 'ARE', 'GOOD', 'ENOUGH', 'FOR', 'PUNCH'] +7176-92135-0006-667: ref=["I'VE", 'GOT', 'A', 'LITTLE', 'IDEA', 'FOR', 'A', 'PLAY', 'ABOUT', 'A', 'MAN', 'AND', 'A', 'WOMAN', 'AND', 'ANOTHER', 'WOMAN', 'AND', 'BUT', 'PERHAPS', "I'D", 'BETTER', 'KEEP', 'THE', 'PLOT', 'A', 'SECRET', 'FOR', 'THE', 'MOMENT'] +7176-92135-0006-667: hyp=["I'VE", 'GOT', 'A', 'LITTLE', 'IDEA', 'FOR', 'A', 'PLAY', 'ABOUT', 'A', 'MAN', 'AND', 'A', 'WOMAN', 'AND', 'ANOTHER', 'WOMAN', 'AND', 'BUT', 'PERHAPS', 'I', 'BETTER', 'KEEP', 'THE', 'PLOT', 'A', 'SECRET', 'FOR', 'THE', 'MOMENT'] +7176-92135-0007-668: ref=['ANYHOW', "IT'S", 'JOLLY', 'EXCITING', 'AND', 'I', 'CAN', 'DO', 'THE', 'DIALOGUE', 'ALL', 'RIGHT'] +7176-92135-0007-668: hyp=['ANYHOW', "IT'S", 'JOLLY', 'EXCITING', 'AND', 'I', 'CAN', 'DO', 'THE', 'DIALOGUE', 'ALL', 'RIGHT'] +7176-92135-0008-669: ref=['LEND', 'ME', 'YOUR', 'EAR', 'FOR', 'TEN', 'MINUTES', 'AND', 'YOU', 'SHALL', 'LEARN', 'JUST', 'WHAT', 'STAGECRAFT', 'IS'] +7176-92135-0008-669: hyp=['LINEN', 'YOUR', 'EAR', 'FOR', 'TEN', 'MINUTES', 'AND', 'YOU', 'SHALL', 'LEARN', 'JUST', 'WHAT', 'STAGECRAFT', 'IS'] +7176-92135-0009-670: ref=['AND', 'I', 'SHOULD', 'BEGIN', 'WITH', 'A', 'SHORT', 'HOMILY', 'ON', 'SOLILOQUY'] +7176-92135-0009-670: hyp=['AND', 'I', 'SHOULD', 'BEGIN', 'WITH', 'A', 'SHORT', 'HOMILY', 'ON', 'SOLILOQUY'] +7176-92135-0010-671: ref=['HAM', 'TO', 'BE', 'OR', 'NOT', 'TO', 'BE'] +7176-92135-0010-671: hyp=['HIM', 'TO', 'BE', 'OR', 'NOT', 'TO', 'BE'] +7176-92135-0011-672: ref=['NOW', 'THE', 'OBJECT', 'OF', 'THIS', 'SOLILOQUY', 'IS', 'PLAIN'] +7176-92135-0011-672: hyp=['NOW', 'THE', 'OBJECT', 'OF', 'THIS', 'SOLOQUY', 'IS', 'PLAIN'] +7176-92135-0012-673: ref=['INDEED', 'IRRESOLUTION', 'BEING', 'THE', 'KEYNOTE', 'OF', "HAMLET'S", 'SOLILOQUY', 'A', 'CLEVER', 'PLAYER', 'COULD', 'TO', 'SOME', 'EXTENT', 'INDICATE', 'THE', 'WHOLE', 'THIRTY', 'LINES', 'BY', 'A', 'SILENT', 'WORKING', 'OF', 'THE', 'JAW', 'BUT', 'AT', 'THE', 'SAME', 'TIME', 'IT', 'WOULD', 'BE', 'IDLE', 'TO', 'DENY', 'THAT', 'HE', 'WOULD', 'MISS', 'THE', 'FINER', 'SHADES', 'OF', 'THE', "DRAMATIST'S", 'MEANING'] +7176-92135-0012-673: hyp=['INDEED', 'IRRESOLUTION', 'MEAN', 'THE', 'KEYNOTE', 'OF', "HAMLET'S", 'SOLILOQUY', 'A', 'CLEVER', 'PLAYER', 'COULD', 'TO', 'SOME', 'EXTENT', 'INDICATE', 'THE', 'WHOLE', 'THIRTY', 'LINES', 'BY', 'A', 'SILENCE', 'WORKING', 'OF', 'THE', 'JOB', 'BUT', 'AT', 'THE', 'SAME', 'TIME', 'IT', 'WOULD', 'BE', 'IDLE', 'TO', 'DENY', 'THAT', 'HE', 'WOULD', 'MISS', 'THE', 'FINER', 'SHADES', 'OF', 'THE', "DRAMATIST'S", 'MEANING'] +7176-92135-0013-674: ref=['WE', 'MODERNS', 'HOWEVER', 'SEE', 'THE', 'ABSURDITY', 'OF', 'IT'] +7176-92135-0013-674: hyp=['WE', 'MODERNS', 'HOWEVER', 'SEE', 'THE', 'ABSURDITY', 'OF', 'IT'] +7176-92135-0014-675: ref=['IF', 'IT', 'BE', 'GRANTED', 'FIRST', 'THAT', 'THE', 'THOUGHTS', 'OF', 'A', 'CERTAIN', 'CHARACTER', 'SHOULD', 'BE', 'KNOWN', 'TO', 'THE', 'AUDIENCE', 'AND', 'SECONDLY', 'THAT', 'SOLILOQUY', 'OR', 'THE', 'HABIT', 'OF', 'THINKING', 'ALOUD', 'IS', 'IN', 'OPPOSITION', 'TO', 'MODERN', 'STAGE', 'TECHNIQUE', 'HOW', 'SHALL', 'A', 'SOLILOQUY', 'BE', 'AVOIDED', 'WITHOUT', 'DAMAGE', 'TO', 'THE', 'PLAY'] +7176-92135-0014-675: hyp=['IF', 'IT', 'BE', 'GRANTED', 'FIRST', 'THAT', 'THE', 'THOUGHTS', 'OF', 'A', 'CERTAIN', 'CHARACTER', 'SHOULD', 'BE', 'KNOWN', 'TO', 'THE', 'AUDIENCE', 'AND', 'SECONDLY', 'THAT', 'SOLILOQUY', 'OR', 'THE', 'HABIT', 'OF', 'THINKING', 'ALOUD', 'IS', 'IN', 'OPPOSITION', 'TO', 'MODERN', 'STAGE', 'TYPENIQUE', 'HOW', 'SHALL', 'A', 'SOLILOQUY', 'BE', 'AVOIDED', 'WITHOUT', 'DAMAGE', 'TO', 'THE', 'PLAY'] +7176-92135-0015-676: ref=['AND', 'SO', 'ON', 'TILL', 'YOU', 'GET', 'TO', 'THE', 'END', 'WHEN', 'OPHELIA', 'MIGHT', 'SAY', 'AH', 'YES', 'OR', 'SOMETHING', 'NON', 'COMMITTAL', 'OF', 'THAT', 'SORT'] +7176-92135-0015-676: hyp=['AND', 'SO', 'ON', 'TILL', 'YOU', 'GET', 'THE', 'END', 'ONE', 'OF', 'WILLIAM', 'MIGHT', 'SAY', 'AH', 'YES', 'OR', 'SOMETHING', 'NON', 'COMMITTAL', 'OF', 'THAT', 'SORT'] +7176-92135-0016-677: ref=['THIS', 'WOULD', 'BE', 'AN', 'EASY', 'WAY', 'OF', 'DOING', 'IT', 'BUT', 'IT', 'WOULD', 'NOT', 'BE', 'THE', 'BEST', 'WAY', 'FOR', 'THE', 'REASON', 'THAT', 'IT', 'IS', 'TOO', 'EASY', 'TO', 'CALL', 'ATTENTION', 'TO', 'ITSELF'] +7176-92135-0016-677: hyp=['THIS', 'WOULD', 'BE', 'AN', 'EASY', 'WAY', 'OF', 'DOING', 'IT', 'BUT', 'IT', 'WOULD', 'NOT', 'BE', 'THE', 'BEST', 'WAY', 'FOR', 'THE', 'REASON', 'THAT', 'IT', 'IS', 'TOO', 'EASY', 'TO', 'CALL', 'ATTENTION', 'TO', 'ITSELF'] +7176-92135-0017-678: ref=['IN', 'THE', 'OLD', 'BADLY', 'MADE', 'PLAY', 'IT', 'WAS', 'FREQUENTLY', 'NECESSARY', 'FOR', 'ONE', 'OF', 'THE', 'CHARACTERS', 'TO', 'TAKE', 'THE', 'AUDIENCE', 'INTO', 'HIS', 'CONFIDENCE'] +7176-92135-0017-678: hyp=['IN', 'THE', 'OLD', 'BADLY', 'MADE', 'PLAY', 'IT', 'WAS', 'FREQUENTLY', 'NECESSARY', 'FOR', 'ONE', 'OF', 'THE', 'CHARACTERS', 'TO', 'TAKE', 'THE', 'AUDIENCE', 'INTO', 'HIS', 'CONFIDENCE'] +7176-92135-0018-679: ref=['IN', 'THE', 'MODERN', 'WELL', 'CONSTRUCTED', 'PLAY', 'HE', 'SIMPLY', 'RINGS', 'UP', 'AN', 'IMAGINARY', 'CONFEDERATE', 'AND', 'TELLS', 'HIM', 'WHAT', 'HE', 'IS', 'GOING', 'TO', 'DO', 'COULD', 'ANYTHING', 'BE', 'MORE', 'NATURAL'] +7176-92135-0018-679: hyp=['IN', 'THE', 'MODERN', 'WELL', 'CONSTRUCTED', 'PLAY', 'HE', 'SIMPLY', 'RINGS', 'UP', 'AN', 'IMAGINARY', 'CONFEDERATE', 'AND', 'TELLS', 'HIM', 'WHAT', 'HE', 'IS', 'GOING', 'TO', 'DO', 'COULD', 'ANYTHING', 'BE', 'MORE', 'NATURAL'] +7176-92135-0019-680: ref=['I', 'WANT', 'DOUBLE', 'NINE', 'HAL', 'LO'] +7176-92135-0019-680: hyp=['I', 'WANT', 'DOUBLE', 'NINE', 'HELLO'] +7176-92135-0020-681: ref=['DOUBLE', 'NINE', 'TWO', 'THREE', 'ELSINORE', 'DOUBLE', 'NINE', 'YES', 'HALLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0020-681: hyp=['DOUBLE', 'NINE', 'TO', 'THREE', 'ELZINOR', 'DOUBLE', 'NOT', 'YES', 'HELLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'PANLESS', 'SPEAKING'] +7176-92135-0021-682: ref=['I', 'SAY', "I'VE", 'BEEN', 'WONDERING', 'ABOUT', 'THIS', 'BUSINESS'] +7176-92135-0021-682: hyp=['I', 'SAY', "I'VE", 'BEEN', 'WANDERING', 'ABOUT', 'THIS', 'BUSINESS'] +7176-92135-0022-683: ref=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER', 'IN', 'THE', 'MIND', 'TO', 'SUFFER', 'THE', 'SLINGS', 'AND', 'ARROWS', 'WHAT', 'NO', 'HAMLET', 'SPEAKING'] +7176-92135-0022-683: hyp=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER', 'IN', 'THE', 'MIND', 'TO', 'SUFFER', 'THE', 'SLINGS', 'AND', 'ARROWS', 'WHAT', 'NO', 'HAMLET', 'SPEAKING'] +7176-92135-0023-684: ref=['YOU', 'GAVE', 'ME', 'DOUBLE', 'FIVE', 'I', 'WANT', 'DOUBLE', 'NINE', 'HALLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0023-684: hyp=['YOU', 'GAVE', 'ME', 'DOUBLE', 'FIVE', 'I', 'WANT', 'DOUBLE', 'NINE', 'HELLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0024-685: ref=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER'] +7176-92135-0024-685: hyp=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER'] +7176-92135-0025-686: ref=['IT', 'IS', 'TO', 'LET', 'HAMLET', 'IF', 'THAT', 'HAPPEN', 'TO', 'BE', 'THE', 'NAME', 'OF', 'YOUR', 'CHARACTER', 'ENTER', 'WITH', 'A', 'SMALL', 'DOG', 'PET', 'FALCON', 'MONGOOSE', 'TAME', 'BEAR', 'OR', 'WHATEVER', 'ANIMAL', 'IS', 'MOST', 'IN', 'KEEPING', 'WITH', 'THE', 'PART', 'AND', 'CONFIDE', 'IN', 'THIS', 'ANIMAL', 'SUCH', 'SORROWS', 'HOPES', 'OR', 'SECRET', 'HISTORY', 'AS', 'THE', 'AUDIENCE', 'HAS', 'GOT', 'TO', 'KNOW'] +7176-92135-0025-686: hyp=['IT', 'IS', 'TO', 'LET', 'HAMLET', 'IF', 'THAT', 'HAPPENED', 'TO', 'BE', 'THE', 'NAME', 'OF', 'YOUR', 'CHARACTER', 'ENTER', 'WITH', 'A', 'SMALL', 'DOG', 'PETALKAN', "MONGOO'S", 'TAME', 'BEAR', 'OR', 'WHATEVER', 'ANIMAL', 'IS', 'MOST', 'IN', 'KEEPING', 'WITH', 'THE', 'PART', 'AND', 'CONFIDE', 'IN', 'THIS', 'ANIMAL', 'SUCH', 'SORROWS', 'HOPES', 'OR', 'SECRET', 'HISTORY', 'AS', 'THE', 'AUDIENCE', 'HAS', 'GOT', 'TO', 'KNOW'] +7176-92135-0026-687: ref=['ENTER', 'HAMLET', 'WITH', 'HIS', 'FAVOURITE', 'BOAR', 'HOUND'] +7176-92135-0026-687: hyp=['INTER', 'HAMLET', 'WITH', 'HIS', 'FAVOURITE', 'BOREHOUND'] +7176-92135-0027-688: ref=['LADY', 'LARKSPUR', 'STARTS', 'SUDDENLY', 'AND', 'TURNS', 'TOWARDS', 'HIM'] +7176-92135-0027-688: hyp=['LADY', 'LARKSBURG', 'START', 'SUDDENLY', 'AND', 'TURNS', 'TOWARD', 'HIM'] +7176-92135-0028-689: ref=['LARKSPUR', 'BIT', 'ME', 'AGAIN', 'THIS', 'MORNING', 'FOR', 'THE', 'THIRD', 'TIME'] +7176-92135-0028-689: hyp=['LARKSBURGH', 'THIS', 'MORNING', 'FOR', 'THE', 'THIRD', 'TIME'] +7176-92135-0029-690: ref=['I', 'WANT', 'TO', 'GET', 'AWAY', 'FROM', 'IT', 'ALL', 'SWOONS'] +7176-92135-0029-690: hyp=['I', 'WANT', 'TO', 'GET', 'AWAY', 'FROM', 'IT', 'ALL', 'SWOON'] +7176-92135-0030-691: ref=['ENTER', 'LORD', 'ARTHUR', 'FLUFFINOSE'] +7176-92135-0030-691: hyp=['ENTERED', 'LORD', 'ARTHUR', "FLAPHANO'S"] +7176-92135-0031-692: ref=['AND', 'THERE', 'YOU', 'ARE', 'YOU', 'WILL', 'OF', 'COURSE', 'APPRECIATE', 'THAT', 'THE', 'UNFINISHED', 'SENTENCES', 'NOT', 'ONLY', 'SAVE', 'TIME', 'BUT', 'ALSO', 'MAKE', 'THE', 'MANOEUVRING', 'VERY', 'MUCH', 'MORE', 'NATURAL'] +7176-92135-0031-692: hyp=['AND', 'THERE', 'YOU', 'ARE', 'YOU', 'WILL', 'OF', 'COURSE', 'APPRECIATE', 'THAT', 'THE', 'UNFINISHED', 'SENTENCES', 'NOT', 'ONLY', 'SAVE', 'TIME', 'BUT', 'ALSO', 'MAKE', 'THE', 'MANOEUVRING', 'VERY', 'MUCH', 'MORE', 'NATURAL'] +7176-92135-0032-693: ref=['HOW', 'YOU', 'MAY', 'BE', 'WONDERING', 'ARE', 'YOU', 'TO', 'BEGIN', 'YOUR', 'MASTERPIECE'] +7176-92135-0032-693: hyp=['HOW', 'YOU', 'MAY', 'BE', 'WONDERING', 'ARE', 'YOU', 'TO', 'BEGIN', 'YOUR', 'MASTERPIECE'] +7176-92135-0033-694: ref=['RELAPSES', 'INTO', 'SILENCE', 'FOR', 'THE', 'REST', 'OF', 'THE', 'EVENING'] +7176-92135-0033-694: hyp=['RELAPSES', 'INTO', 'SILENCE', 'FOR', 'THE', 'REST', 'OF', 'THE', 'EVENING'] +7176-92135-0034-695: ref=['THE', 'DUCHESS', 'OF', 'SOUTHBRIDGE', 'TO', 'LORD', 'REGGIE', 'OH', 'REGGIE', 'WHAT', 'DID', 'YOU', 'SAY'] +7176-92135-0034-695: hyp=['THE', 'DUCHESS', 'OF', 'SOUTHBRIDGE', 'TO', 'LORD', 'REGGIE', 'O', 'READY', 'WHAT', 'DID', 'YOU', 'SAY'] +7176-92135-0035-696: ref=['THEN', 'LORD', 'TUPPENY', 'WELL', 'WHAT', 'ABOUT', 'AUCTION'] +7176-92135-0035-696: hyp=['THEN', 'LORD', 'TUPPENNY', 'WHAT', 'ABOUT', 'AUCTION'] +7176-92135-0036-697: ref=['THE', 'CROWD', 'DRIFTS', 'OFF', 'LEAVING', 'THE', 'HERO', 'AND', 'HEROINE', 'ALONE', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'STAGE', 'AND', 'THEN', 'YOU', 'CAN', 'BEGIN'] +7176-92135-0036-697: hyp=['THE', 'CROWD', 'DRIFTS', 'OFF', 'LEAPING', 'THE', 'HERO', 'AND', 'HEROINE', 'ALONE', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'STAGE', 'AND', 'THEN', 'YOU', 'CAN', 'BEGIN'] +7176-92135-0037-698: ref=['THEN', 'IS', 'THE', 'TIME', 'TO', 'INTRODUCE', 'A', 'MEAL', 'ON', 'THE', 'STAGE'] +7176-92135-0037-698: hyp=['THEN', 'IS', 'THE', 'TIME', 'TO', 'INTRODUCE', 'A', 'MEAL', 'ON', 'THE', 'STAGE'] +7176-92135-0038-699: ref=['A', 'STAGE', 'MEAL', 'IS', 'POPULAR', 'BECAUSE', 'IT', 'PROVES', 'TO', 'THE', 'AUDIENCE', 'THAT', 'THE', 'ACTORS', 'EVEN', 'WHEN', 'CALLED', 'CHARLES', 'HAWTREY', 'OR', 'OWEN', 'NARES', 'ARE', 'REAL', 'PEOPLE', 'JUST', 'LIKE', 'YOU', 'AND', 'ME'] +7176-92135-0038-699: hyp=['A', 'STAGE', 'MEAL', 'IS', 'POPULAR', 'BECAUSE', 'IT', 'PROVED', 'TO', 'THE', 'AUDIENCE', 'THAT', 'THE', 'ACTORS', 'EVEN', 'WHEN', 'CALLED', 'CHARLES', 'HALTREE', 'OR', 'OWEN', "NEAR'S", 'ARE', 'REAL', 'PEOPLE', 'JUST', 'LIKE', 'YOU', 'AND', 'ME'] +7176-92135-0039-700: ref=['TEA', 'PLEASE', 'MATTHEWS', 'BUTLER', 'IMPASSIVELY'] +7176-92135-0039-700: hyp=['T', 'PLEASE', 'MATTHEWS', 'BUTLER', 'IMPASSIVELY'] +7176-92135-0040-701: ref=['HOSTESS', 'REPLACES', 'LUMP', 'AND', 'INCLINES', 'EMPTY', 'TEAPOT', 'OVER', 'TRAY', 'FOR', 'A', 'MOMENT', 'THEN', 'HANDS', 'HIM', 'A', 'CUP', 'PAINTED', 'BROWN', 'INSIDE', 'THUS', 'DECEIVING', 'THE', 'GENTLEMAN', 'WITH', 'THE', 'TELESCOPE', 'IN', 'THE', 'UPPER', 'CIRCLE'] +7176-92135-0040-701: hyp=['HOSTES', 'REPLACES', 'LUMP', 'AND', 'INCLINES', 'EMPTY', 'TEAPOT', 'OVER', 'TRAY', 'FOR', 'MOMENT', 'THEN', 'HANDSOME', 'A', 'CUP', 'PAINTED', 'BROWN', 'INSIDE', 'LUSTY', 'SEATING', 'THE', 'GENTLEMAN', 'WITH', 'THE', 'TELESCOPE', 'IN', 'THE', 'UPPER', 'CIRCLE'] +7176-92135-0041-702: ref=['RE', 'ENTER', 'BUTLER', 'AND', 'THREE', 'FOOTMEN', 'WHO', 'REMOVE', 'THE', 'TEA', 'THINGS', 'HOSTESS', 'TO', 'GUEST'] +7176-92135-0041-702: hyp=['REINTER', 'BUTLER', 'AND', 'THREE', 'FOOTMEN', 'WHO', 'MOVED', 'THE', 'TEA', 'THINGS', 'HOSTES', 'TWO', 'GUEST'] +7176-92135-0042-703: ref=['IN', 'NOVELS', 'THE', 'HERO', 'HAS', 'OFTEN', 'PUSHED', 'HIS', 'MEALS', 'AWAY', 'UNTASTED', 'BUT', 'NO', 'STAGE', 'HERO', 'WOULD', 'DO', 'ANYTHING', 'SO', 'UNNATURAL', 'AS', 'THIS'] +7176-92135-0042-703: hyp=['AND', 'NOVELS', 'THE', 'HERO', 'HAS', 'OFTEN', 'PUSHED', 'HIS', 'MEALS', 'AWAY', 'UNTASTED', 'BUT', 'NO', 'STEED', 'HERO', 'WOULD', 'DO', 'ANYTHING', 'SO', 'UNNATURAL', 'AS', 'THIS'] +7176-92135-0043-704: ref=['TWO', 'BITES', 'ARE', 'MADE', 'AND', 'THE', 'BREAD', 'IS', 'CRUMBLED', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'EAGERNESS', 'INDEED', 'ONE', 'FEELS', 'THAT', 'IN', 'REAL', 'LIFE', 'THE', 'GUEST', 'WOULD', 'CLUTCH', 'HOLD', 'OF', 'THE', 'FOOTMAN', 'AND', 'SAY', 'HALF', 'A', 'MO', 'OLD', 'CHAP', 'I', "HAVEN'T", 'NEARLY', 'FINISHED', 'BUT', 'THE', 'ACTOR', 'IS', 'BETTER', 'SCHOOLED', 'THAN', 'THIS'] +7176-92135-0043-704: hyp=['TWO', 'BITES', 'ARE', 'MADE', 'AND', 'THE', 'ABREAD', 'IS', 'CRUMBLED', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'EAGERNESS', 'INDEED', 'ONE', 'FEELS', 'THAT', 'IN', 'REAL', 'LIFE', 'THE', 'GUESTS', 'WOULD', 'CLUTCH', 'HOLD', 'OF', 'THE', 'FOOTMAN', 'AND', 'SAY', 'HALF', 'A', 'MOLE', 'CHAP', 'I', "HAVEN'T", 'NEARLY', 'FINISHED', 'BUT', 'THE', "ACTOR'S", 'BETTER', 'SCHOOL', 'THAN', 'THIS'] +7176-92135-0044-705: ref=['BUT', 'IT', 'IS', 'THE', 'CIGARETTE', 'WHICH', 'CHIEFLY', 'HAS', 'BROUGHT', 'THE', 'MODERN', 'DRAMA', 'TO', 'ITS', 'PRESENT', 'STATE', 'OF', 'PERFECTION'] +7176-92135-0044-705: hyp=['BUT', 'IT', 'IS', 'THE', 'CIGARETTE', 'WHICH', 'CHIEFLY', 'HAS', 'BROUGHT', 'THE', 'MODERN', 'DRAMA', 'TO', 'ITS', 'PRESENT', 'STATE', 'OF', 'PERFECTION'] +7176-92135-0045-706: ref=['LORD', 'JOHN', 'TAKING', 'OUT', 'GOLD', 'CIGARETTE', 'CASE', 'FROM', 'HIS', 'LEFT', 'HAND', 'UPPER', 'WAISTCOAT', 'POCKET'] +7176-92135-0045-706: hyp=['LORD', 'JOHN', 'TAKING', 'OUT', 'GOLD', 'SICK', 'RED', 'CASE', 'FROM', 'HIS', 'LEFT', 'HAND', 'UPPER', 'WAISTCOAT', 'POCKET'] +7729-102255-0000-261: ref=['THE', 'BOGUS', 'LEGISLATURE', 'NUMBERED', 'THIRTY', 'SIX', 'MEMBERS'] +7729-102255-0000-261: hyp=['THE', 'BOGUS', 'LEGISLATURE', 'NUMBERED', 'THIRTY', 'SIX', 'MEMBERS'] +7729-102255-0001-262: ref=['THIS', 'WAS', 'AT', 'THE', 'MARCH', 'ELECTION', 'EIGHTEEN', 'FIFTY', 'FIVE'] +7729-102255-0001-262: hyp=['THIS', 'WAS', 'AT', 'THE', 'MARCH', 'ELECTION', 'EIGHTEEN', 'FIFTY', 'FIVE'] +7729-102255-0002-263: ref=['THAT', "SUMMER'S", 'EMIGRATION', 'HOWEVER', 'BEING', 'MAINLY', 'FROM', 'THE', 'FREE', 'STATES', 'GREATLY', 'CHANGED', 'THE', 'RELATIVE', 'STRENGTH', 'OF', 'THE', 'TWO', 'PARTIES'] +7729-102255-0002-263: hyp=['THAT', "SUMMER'S", 'IMMIGRATION', 'HOWEVER', 'BEING', 'MAINLY', 'FROM', 'THE', 'FREE', 'STATES', 'GREATLY', 'CHANGE', 'THE', 'RELATIVE', 'STRENGTH', 'OF', 'THE', 'TWO', 'PARTIES'] +7729-102255-0003-264: ref=['FOR', 'GENERAL', 'SERVICE', 'THEREFORE', 'REQUIRING', 'NO', 'SPECIAL', 'EFFORT', 'THE', 'NUMERICAL', 'STRENGTH', 'OF', 'THE', 'FACTIONS', 'WAS', 'ABOUT', 'EQUAL', 'WHILE', 'ON', 'EXTRAORDINARY', 'OCCASIONS', 'THE', 'TWO', 'THOUSAND', 'BORDER', 'RUFFIAN', 'RESERVE', 'LYING', 'A', 'LITTLE', 'FARTHER', 'BACK', 'FROM', 'THE', 'STATE', 'LINE', 'COULD', 'AT', 'ANY', 'TIME', 'EASILY', 'TURN', 'THE', 'SCALE'] +7729-102255-0003-264: hyp=['FOR', 'GENERAL', 'SERVICE', 'THEREFORE', 'REQUIRING', 'NO', 'SPECIAL', 'EFFORT', 'THE', 'NUMERICAL', 'STRENGTH', 'OF', 'THE', 'FACTIONS', 'WAS', 'ABOUT', 'EQUAL', 'WHILE', 'ON', 'EXTRAORDINARY', 'OCCASIONS', 'THE', 'TWO', 'THOUSAND', 'BORDER', 'RUFFIAN', 'RESERVE', 'LYING', 'A', 'LITTLE', 'FARTHER', 'BACK', 'FROM', 'THE', 'STATE', 'LINE', 'COULD', 'AT', 'ANY', 'TIME', 'EASILY', 'TURN', 'THE', 'SCALE'] +7729-102255-0004-265: ref=['THE', 'FREE', 'STATE', 'MEN', 'HAD', 'ONLY', 'THEIR', 'CONVICTIONS', 'THEIR', 'INTELLIGENCE', 'THEIR', 'COURAGE', 'AND', 'THE', 'MORAL', 'SUPPORT', 'OF', 'THE', 'NORTH', 'THE', 'CONSPIRACY', 'HAD', 'ITS', 'SECRET', 'COMBINATION', 'THE', 'TERRITORIAL', 'OFFICIALS', 'THE', 'LEGISLATURE', 'THE', 'BOGUS', 'LAWS', 'THE', 'COURTS', 'THE', 'MILITIA', 'OFFICERS', 'THE', 'PRESIDENT', 'AND', 'THE', 'ARMY'] +7729-102255-0004-265: hyp=['THE', 'FREE', 'STATE', 'MEN', 'HAD', 'ONLY', 'THEIR', 'CONVICTIONS', 'THEIR', 'INTELLIGENCE', 'THEIR', 'COURAGE', 'AND', 'THE', 'MORAL', 'SUPPORT', 'OF', 'THE', 'NORTH', 'THE', 'CONSPIRACY', 'HAD', 'ITS', 'SECRET', 'COMBINATION', 'THE', 'TERRITORIAL', 'OFFICIALS', 'THE', 'LEGISLATURE', 'THE', 'BOGUS', 'LAWS', 'THE', 'COURTS', 'THE', 'MILITIA', 'OFFICERS', 'THE', 'PRESIDENT', 'AND', 'THE', 'ARMY'] +7729-102255-0005-266: ref=['THIS', 'WAS', 'A', 'FORMIDABLE', 'ARRAY', 'OF', 'ADVANTAGES', 'SLAVERY', 'WAS', 'PLAYING', 'WITH', 'LOADED', 'DICE'] +7729-102255-0005-266: hyp=['THIS', 'WAS', 'A', 'FORMIDABLE', 'ARRAY', 'OF', 'ADVANTAGES', 'SLAVERY', 'WAS', 'PLAYING', 'WITH', 'LOADED', 'DICE'] +7729-102255-0006-267: ref=['COMING', 'BY', 'WAY', 'OF', 'THE', 'MISSOURI', 'RIVER', 'TOWNS', 'HE', 'FELL', 'FIRST', 'AMONG', 'BORDER', 'RUFFIAN', 'COMPANIONSHIP', 'AND', 'INFLUENCES', 'AND', 'PERHAPS', 'HAVING', 'HIS', 'INCLINATIONS', 'ALREADY', 'MOLDED', 'BY', 'HIS', 'WASHINGTON', 'INSTRUCTIONS', 'HIS', 'EARLY', 'IMPRESSIONS', 'WERE', 'DECIDEDLY', 'ADVERSE', 'TO', 'THE', 'FREE', 'STATE', 'CAUSE'] +7729-102255-0006-267: hyp=['COMMON', 'BY', 'WAY', 'OF', 'THE', 'MISSOURI', 'RIVER', 'TOWNS', 'HE', 'FELL', 'FIRST', 'AMONG', 'BORDER', 'RUFFIAN', 'COMPANIONSHIP', 'AND', 'INFLUENCES', 'AND', 'PERHAPS', 'HAVING', 'HIS', 'INCLINATIONS', 'ALREADY', 'MOULDED', 'BY', 'HIS', 'WASHINGTON', 'INSTRUCTIONS', 'HIS', 'EARLY', 'IMPRESSIONS', 'WERE', 'DECIDEDLY', 'ADVERSE', 'TO', 'THE', 'FREE', 'STATE', 'CAUSE'] +7729-102255-0007-268: ref=['HIS', 'RECEPTION', 'SPEECH', 'AT', 'WESTPORT', 'IN', 'WHICH', 'HE', 'MAINTAINED', 'THE', 'LEGALITY', 'OF', 'THE', 'LEGISLATURE', 'AND', 'HIS', 'DETERMINATION', 'TO', 'ENFORCE', 'THEIR', 'LAWS', 'DELIGHTED', 'HIS', 'PRO', 'SLAVERY', 'AUDITORS'] +7729-102255-0007-268: hyp=['HIS', 'RECEPTION', 'SPEECH', 'AT', 'WESTWARD', 'IN', 'WHICH', 'HE', 'MAINTAINED', 'THE', 'LEGALITY', 'OF', 'THE', 'LEGISLATURE', 'AND', 'HIS', 'DETERMINATION', 'TO', 'ENFORCE', 'THEIR', 'LAWS', 'DELIGHTED', 'HIS', 'PRO', 'SLAVERY', 'AUDITORS'] +7729-102255-0008-269: ref=['ALL', 'THE', 'TERRITORIAL', 'DIGNITARIES', 'WERE', 'PRESENT', 'GOVERNOR', 'SHANNON', 'PRESIDED', 'JOHN', 'CALHOUN', 'THE', 'SURVEYOR', 'GENERAL', 'MADE', 'THE', 'PRINCIPAL', 'SPEECH', 'A', 'DENUNCIATION', 'OF', 'THE', 'ABOLITIONISTS', 'SUPPORTING', 'THE', 'TOPEKA', 'MOVEMENT', 'CHIEF', 'JUSTICE', 'LECOMPTE', 'DIGNIFIED', 'THE', 'OCCASION', 'WITH', 'APPROVING', 'REMARKS'] +7729-102255-0008-269: hyp=['ALL', 'THE', 'TERRITORIAL', 'DIGNITARIES', 'WERE', 'PRESENT', 'GOVERNOR', 'SHAN', 'AND', 'PRESIDED', 'JOHN', 'CALHOUN', 'THE', 'SURVEYOR', 'GENERAL', 'MADE', 'THE', 'PRINCIPAL', 'SPEECH', 'A', 'DENUNCIATION', 'OF', 'THE', 'ABOLITIONIST', 'SUPPORTING', 'THE', 'TEPEAKA', 'MOVEMENT', 'CHIEF', 'JUSTICE', 'LE', 'COMTE', 'DIGNIFIED', 'THE', 'OCCASION', 'WITH', 'APPROVING', 'REMARKS'] +7729-102255-0009-270: ref=['ALL', 'DISSENT', 'ALL', 'NON', 'COMPLIANCE', 'ALL', 'HESITATION', 'ALL', 'MERE', 'SILENCE', 'EVEN', 'WERE', 'IN', 'THEIR', 'STRONGHOLD', 'TOWNS', 'LIKE', 'LEAVENWORTH', 'BRANDED', 'AS', 'ABOLITIONISM', 'DECLARED', 'TO', 'BE', 'HOSTILITY', 'TO', 'THE', 'PUBLIC', 'WELFARE', 'AND', 'PUNISHED', 'WITH', 'PROSCRIPTION', 'PERSONAL', 'VIOLENCE', 'EXPULSION', 'AND', 'FREQUENTLY', 'DEATH'] +7729-102255-0009-270: hyp=['ALL', 'DESCENT', 'ALL', 'NON', 'COMPLIANCE', 'ALL', 'HESITATION', 'ALL', 'MERE', 'SILENCE', 'EVEN', 'WERE', 'IN', 'THEIR', 'STRONGHOLD', 'TOWNS', 'LIKE', 'LEVIN', 'WORTH', 'BRANDED', 'AS', 'ABOLITIONISM', 'DECLARED', 'TO', 'BE', 'HOSTILITY', 'TO', 'THE', 'PUBLIC', 'WELFARE', 'AND', 'PUNISHED', 'WITH', 'PROSCRIPTION', 'PERSONAL', 'VIOLENCE', 'EXPULSION', 'AND', 'FREQUENTLY', 'DEATH'] +7729-102255-0010-271: ref=['OF', 'THE', 'LYNCHINGS', 'THE', 'MOBS', 'AND', 'THE', 'MURDERS', 'IT', 'WOULD', 'BE', 'IMPOSSIBLE', 'EXCEPT', 'IN', 'A', 'VERY', 'EXTENDED', 'WORK', 'TO', 'NOTE', 'THE', 'FREQUENT', 'AND', 'ATROCIOUS', 'DETAILS'] +7729-102255-0010-271: hyp=['OF', 'THE', 'LYNCHINGS', 'THE', 'MOBS', 'AND', 'THE', 'MURDERS', 'IT', 'WOULD', 'BE', 'IMPOSSIBLE', 'EXCEPT', 'IN', 'A', 'VERY', 'EXTENDED', 'WORK', 'TO', 'NOTE', 'THE', 'FREQUENT', 'AND', 'ATROCIOUS', 'DETAILS'] +7729-102255-0011-272: ref=['THE', 'PRESENT', 'CHAPTERS', 'CAN', 'ONLY', 'TOUCH', 'UPON', 'THE', 'MORE', 'SALIENT', 'MOVEMENTS', 'OF', 'THE', 'CIVIL', 'WAR', 'IN', 'KANSAS', 'WHICH', 'HAPPILY', 'WERE', 'NOT', 'SANGUINARY', 'IF', 'HOWEVER', 'THE', 'INDIVIDUAL', 'AND', 'MORE', 'ISOLATED', 'CASES', 'OF', 'BLOODSHED', 'COULD', 'BE', 'DESCRIBED', 'THEY', 'WOULD', 'SHOW', 'A', 'STARTLING', 'AGGREGATE', 'OF', 'BARBARITY', 'AND', 'LOSS', 'OF', 'LIFE', 'FOR', "OPINION'S", 'SAKE'] +7729-102255-0011-272: hyp=['THE', 'PRESENT', 'CHAPTERS', 'CAN', 'ONLY', 'TOUCH', 'UPON', 'THE', 'MORE', 'SALIENT', 'MOVEMENTS', 'OF', 'THE', 'CIVIL', 'WAR', 'IN', 'KANSAS', 'WHICH', 'HAPPILY', 'ARE', 'NOT', 'SANGUINARY', 'IF', 'HOWEVER', 'THE', 'INDIVIDUAL', 'AND', 'MORE', 'ISOLATED', 'CASES', 'OF', 'BLOODSHED', 'COULD', 'BE', 'DESCRIBED', 'THEY', 'WOULD', 'SHOW', 'A', 'STARTLING', 'AGGREGATE', 'OF', 'BARBARITY', 'AND', 'A', 'LOSS', 'OF', 'LIFE', 'FOR', "OPINION'S", 'SAKE'] +7729-102255-0012-273: ref=['SEVERAL', 'HUNDRED', 'FREE', 'STATE', 'MEN', 'PROMPTLY', 'RESPONDED', 'TO', 'THE', 'SUMMONS'] +7729-102255-0012-273: hyp=['SEVERAL', 'HUNDRED', 'FREE', 'STATE', 'MEN', 'PROMPTLY', 'RESPONDED', 'TO', 'THE', 'SUMMONS'] +7729-102255-0013-274: ref=['IT', 'WAS', 'IN', 'FACT', 'THE', 'BEST', 'WEAPON', 'OF', 'ITS', 'DAY'] +7729-102255-0013-274: hyp=['IT', 'WAS', 'IN', 'FACT', 'THE', 'BEST', 'WEAPON', 'OF', 'ITS', 'DAY'] +7729-102255-0014-275: ref=['THE', 'LEADERS', 'OF', 'THE', 'CONSPIRACY', 'BECAME', 'DISTRUSTFUL', 'OF', 'THEIR', 'POWER', 'TO', 'CRUSH', 'THE', 'TOWN'] +7729-102255-0014-275: hyp=['THE', 'LEADERS', 'OF', 'THE', 'CONSPIRACY', 'BECAME', 'DISTRUSTFUL', 'OF', 'THEIR', 'POWER', 'TO', 'CRUSH', 'THE', 'TOWN'] +7729-102255-0015-276: ref=['ONE', 'OF', 'HIS', 'MILITIA', 'GENERALS', 'SUGGESTED', 'THAT', 'THE', 'GOVERNOR', 'SHOULD', 'REQUIRE', 'THE', 'OUTLAWS', 'AT', 'LAWRENCE', 'AND', 'ELSEWHERE', 'TO', 'SURRENDER', 'THE', 'SHARPS', 'RIFLES', 'ANOTHER', 'WROTE', 'ASKING', 'HIM', 'TO', 'CALL', 'OUT', 'THE', 'GOVERNMENT', 'TROOPS', 'AT', 'FORT', 'LEAVENWORTH'] +7729-102255-0015-276: hyp=['ONE', 'OF', 'HIS', 'MILITIA', 'GENERALS', 'SUGGESTED', 'THAT', 'THE', 'GOVERNOR', 'SHOULD', 'REQUIRE', 'THE', 'OUTLAWS', 'AT', 'LAWRENCE', 'AND', 'ELSEWHERE', 'TO', 'SURRENDER', 'THE', "SHARP'S", 'RIFLES', 'ANOTHER', 'WROTE', 'ASKING', 'HIM', 'TO', 'CALL', 'OUT', 'THE', 'GOVERNMENT', 'TROOPS', 'AT', 'FORT', 'LEVINWORTH'] +7729-102255-0016-277: ref=['THE', 'GOVERNOR', 'ON', 'HIS', 'PART', 'BECOMING', 'DOUBTFUL', 'OF', 'THE', 'LEGALITY', 'OF', 'EMPLOYING', 'MISSOURI', 'MILITIA', 'TO', 'ENFORCE', 'KANSAS', 'LAWS', 'WAS', 'ALSO', 'EAGER', 'TO', 'SECURE', 'THE', 'HELP', 'OF', 'FEDERAL', 'TROOPS'] +7729-102255-0016-277: hyp=['THE', 'GOVERNOR', 'ON', 'HIS', 'PART', 'BECOMING', 'DOUBTFUL', 'OF', 'THE', 'LOGALITY', 'OF', 'EMPLOYING', 'MISSOURI', 'MILITIA', 'TO', 'ENFORCE', 'KANSAS', 'LAWS', 'WAS', 'ALSO', 'EAGER', 'TO', 'SECURE', 'THE', 'HELP', 'OF', 'FEDERAL', 'TROOPS'] +7729-102255-0017-278: ref=['SHERIFF', 'JONES', 'HAD', 'HIS', 'POCKETS', 'ALWAYS', 'FULL', 'OF', 'WRITS', 'ISSUED', 'IN', 'THE', 'SPIRIT', 'OF', 'PERSECUTION', 'BUT', 'WAS', 'OFTEN', 'BAFFLED', 'BY', 'THE', 'SHARP', 'WITS', 'AND', 'READY', 'RESOURCES', 'OF', 'THE', 'FREE', 'STATE', 'PEOPLE', 'AND', 'SOMETIMES', 'DEFIED', 'OUTRIGHT'] +7729-102255-0017-278: hyp=['SHERIFF', 'JONES', 'HAD', 'HIS', 'POCKETS', 'ALWAYS', 'FULL', 'OF', 'WRITS', 'ISSUED', 'IN', 'THE', 'SPIRIT', 'OF', 'PERSECUTION', 'BUT', 'WAS', 'OFTEN', 'BAFFLED', 'BY', 'THE', 'SHARP', 'WITS', 'AND', 'READY', 'RESOURCES', 'OF', 'THE', 'FREE', 'STATE', 'PEOPLE', 'AND', 'SOMETIMES', 'DEFIED', 'OUTRIGHT'] +7729-102255-0018-279: ref=['LITTLE', 'BY', 'LITTLE', 'HOWEVER', 'THE', 'LATTER', 'BECAME', 'HEMMED', 'AND', 'BOUND', 'IN', 'THE', 'MESHES', 'OF', 'THE', 'VARIOUS', 'DEVICES', 'AND', 'PROCEEDINGS', 'WHICH', 'THE', 'TERRITORIAL', 'OFFICIALS', 'EVOLVED', 'FROM', 'THE', 'BOGUS', 'LAWS'] +7729-102255-0018-279: hyp=['LITTLE', 'BY', 'LITTLE', 'HOWEVER', 'THE', 'LATTER', 'BECAME', 'HEMMED', 'AND', 'BOUND', 'IN', 'THE', 'MESHES', 'OF', 'THE', 'VARIOUS', 'DEVICES', 'AND', 'PROCEEDINGS', 'WHICH', 'THE', 'TERRITORIAL', 'OFFICIALS', 'EVOLVED', 'FROM', 'THE', 'BOGUS', 'LAWS'] +7729-102255-0019-280: ref=['TO', 'EMBARRASS', 'THIS', 'DAMAGING', 'EXPOSURE', 'JUDGE', 'LECOMPTE', 'ISSUED', 'A', 'WRIT', 'AGAINST', 'THE', 'EX', 'GOVERNOR', 'ON', 'A', 'FRIVOLOUS', 'CHARGE', 'OF', 'CONTEMPT'] +7729-102255-0019-280: hyp=['TO', 'EMBARRASS', 'THIS', 'DAMAGING', 'EXPOSURE', 'JUDGE', 'LECOMTE', 'ISSUED', 'A', 'WRIT', 'AGAINST', 'THE', 'EX', 'GOVERNOR', 'ON', 'A', 'FRIVOLOUS', 'CHARGE', 'OF', 'CONTEMPT'] +7729-102255-0020-281: ref=['THE', 'INCIDENT', 'WAS', 'NOT', 'VIOLENT', 'NOR', 'EVEN', 'DRAMATIC', 'NO', 'POSSE', 'WAS', 'SUMMONED', 'NO', 'FURTHER', 'EFFORT', 'MADE', 'AND', 'REEDER', 'FEARING', 'PERSONAL', 'VIOLENCE', 'SOON', 'FLED', 'IN', 'DISGUISE'] +7729-102255-0020-281: hyp=['THE', 'INCIDENT', 'WAS', 'NOT', 'VIOLENT', 'NOR', 'EVEN', 'DRAMATIC', 'NO', 'POSSE', 'WAS', 'SUMMONED', 'NO', 'FURTHER', 'EFFORT', 'MADE', 'AND', 'READER', 'FEARING', 'PERSONAL', 'VIOLENCE', 'SOON', 'FLED', 'IN', 'DISGUISE'] +7729-102255-0021-282: ref=['BUT', 'THE', 'AFFAIR', 'WAS', 'MAGNIFIED', 'AS', 'A', 'CROWNING', 'PROOF', 'THAT', 'THE', 'FREE', 'STATE', 'MEN', 'WERE', 'INSURRECTIONISTS', 'AND', 'OUTLAWS'] +7729-102255-0021-282: hyp=['BUT', 'THE', 'AFFAIR', 'WAS', 'MAGNIFIED', 'AS', 'A', 'CROWNING', 'PROOF', 'THAT', 'THE', 'FREE', 'STATE', 'MEN', 'WERE', 'INSURRECTIONOUS', 'AND', 'OUTLAWS'] +7729-102255-0022-283: ref=['FROM', 'THESE', 'AGAIN', 'SPRANG', 'BARRICADED', 'AND', 'FORTIFIED', 'DWELLINGS', 'CAMPS', 'AND', 'SCOUTING', 'PARTIES', 'FINALLY', 'CULMINATING', 'IN', 'ROVING', 'GUERRILLA', 'BANDS', 'HALF', 'PARTISAN', 'HALF', 'PREDATORY'] +7729-102255-0022-283: hyp=['FROM', 'THESE', 'AGAIN', 'SPRANG', 'BARRICADED', 'AND', 'FORTIFIED', 'DWELLINGS', 'CAMPS', 'AND', 'SCOUT', 'PARTIES', 'FINALLY', 'CULMINATING', 'IN', 'ROVING', 'GUERRILLA', 'BANDS', 'HALF', 'PARTISAN', 'HALF', 'PREDATORY'] +7729-102255-0023-284: ref=['THEIR', 'DISTINCTIVE', 'CHARACTERS', 'HOWEVER', 'DISPLAY', 'ONE', 'BROAD', 'AND', 'UNFAILING', 'DIFFERENCE'] +7729-102255-0023-284: hyp=['THERE', 'DISTINCTIVE', 'CHARACTERS', 'HOWEVER', 'DISPLAY', 'ONE', 'BROAD', 'AND', 'UNFAILING', 'DIFFERENCE'] +7729-102255-0024-285: ref=['THE', 'FREE', 'STATE', 'MEN', 'CLUNG', 'TO', 'THEIR', 'PRAIRIE', 'TOWNS', 'AND', 'PRAIRIE', 'RAVINES', 'WITH', 'ALL', 'THE', 'OBSTINACY', 'AND', 'COURAGE', 'OF', 'TRUE', 'DEFENDERS', 'OF', 'THEIR', 'HOMES', 'AND', 'FIRESIDES'] +7729-102255-0024-285: hyp=['THE', 'FREE', 'STATE', 'MEN', 'CLUNG', 'TO', 'THEIR', 'PRAIRIE', 'TOWNS', 'AND', 'PRAIRIE', 'RAVINES', 'WITH', 'ALL', 'THE', 'OBSTINACY', 'AND', 'COURAGE', 'OF', 'TRUE', 'DEFENDERS', 'OF', 'THEIR', 'HOMES', 'AND', 'FIRESIDES'] +7729-102255-0025-286: ref=['THEIR', 'ASSUMED', 'CHARACTER', 'CHANGED', 'WITH', 'THEIR', 'CHANGING', 'OPPORTUNITIES', 'OR', 'NECESSITIES'] +7729-102255-0025-286: hyp=['THERE', 'ASSUMED', 'CHARACTER', 'CHANGED', 'WITH', 'THEIR', 'CHANGING', 'OPPORTUNITIES', 'OR', 'NECESSITIES'] +7729-102255-0026-287: ref=['IN', 'THE', 'SHOOTING', 'OF', 'SHERIFF', 'JONES', 'IN', 'LAWRENCE', 'AND', 'IN', 'THE', 'REFUSAL', 'OF', 'EX', 'GOVERNOR', 'BEEDER', 'TO', 'ALLOW', 'THE', 'DEPUTY', 'MARSHAL', 'TO', 'ARREST', 'HIM', 'THEY', 'DISCOVERED', 'GRAVE', 'OFFENSES', 'AGAINST', 'THE', 'TERRITORIAL', 'AND', 'UNITED', 'STATES', 'LAWS'] +7729-102255-0026-287: hyp=['IN', 'THE', 'SHOOTING', 'OF', "SHERIFF'S", 'JONES', 'AND', 'LAWRENCE', 'AND', 'IN', 'THE', 'REFUSAL', 'OF', 'EX', 'GOVERNOR', 'READER', 'TO', 'ALLOW', 'THE', 'DEPUTY', 'MARSHAL', 'TO', 'ARREST', 'HIM', 'THEY', 'DISCOVERED', 'GRAVE', 'OFFENCES', 'AGAINST', 'THE', 'TERRITORIAL', 'AND', 'THE', 'UNITED', 'STATES', 'LAWS'] +7729-102255-0027-288: ref=['FOOTNOTE', 'SUMNER', 'TO', 'SHANNON', 'MAY', 'TWELFTH', 'EIGHTEEN', 'FIFTY', 'SIX'] +7729-102255-0027-288: hyp=['FOOTNOTE', 'SUMMER', 'TO', 'SHANNON', 'MAY', 'TWELFTH', 'EIGHTEEN', 'FIFTY', 'SIX'] +7729-102255-0028-289: ref=['PRIVATE', 'PERSONS', 'WHO', 'HAD', 'LEASED', 'THE', 'FREE', 'STATE', 'HOTEL', 'VAINLY', 'BESOUGHT', 'THE', 'VARIOUS', 'AUTHORITIES', 'TO', 'PREVENT', 'THE', 'DESTRUCTION', 'OF', 'THEIR', 'PROPERTY'] +7729-102255-0028-289: hyp=['PRIVATE', 'PERSONS', 'WHO', 'AT', 'LEAST', 'THE', 'FREE', 'STATE', 'HOTEL', 'VAINLY', 'BESOUGHT', 'THE', 'VARIOUS', 'AUTHORITIES', 'TO', 'PRESENT', 'THE', 'DESTRUCTION', 'OF', 'THEIR', 'PROPERTY'] +7729-102255-0029-290: ref=['TEN', 'DAYS', 'WERE', 'CONSUMED', 'IN', 'THESE', 'NEGOTIATIONS', 'BUT', 'THE', 'SPIRIT', 'OF', 'VENGEANCE', 'REFUSED', 'TO', 'YIELD'] +7729-102255-0029-290: hyp=['TEN', 'DAYS', 'WERE', 'CONSUMED', 'IN', 'THESE', 'NEGOTIATIONS', 'BUT', 'THE', 'SPIRIT', 'OF', 'VENGEANCE', 'REFUSED', 'TO', 'YIELD'] +7729-102255-0030-291: ref=['HE', 'SUMMONED', 'HALF', 'A', 'DOZEN', 'CITIZENS', 'TO', 'JOIN', 'HIS', 'POSSE', 'WHO', 'FOLLOWED', 'OBEYED', 'AND', 'ASSISTED', 'HIM'] +7729-102255-0030-291: hyp=['HE', 'SUMMONED', 'HALF', 'A', 'DOZEN', 'CITIZENS', 'TO', 'JOIN', 'HIS', 'POSSE', 'WHO', 'FOLLOWED', 'OBEYED', 'AND', 'ASSISTED', 'HIM'] +7729-102255-0031-292: ref=['HE', 'CONTINUED', 'HIS', 'PRETENDED', 'SEARCH', 'AND', 'TO', 'GIVE', 'COLOR', 'TO', 'HIS', 'ERRAND', 'MADE', 'TWO', 'ARRESTS'] +7729-102255-0031-292: hyp=['HE', 'CONTINUED', 'HIS', 'PRETENDED', 'SEARCH', 'AND', 'TO', 'GIVE', 'COLOR', 'TO', 'HIS', 'ERRAND', 'MADE', 'TO', 'ARREST'] +7729-102255-0032-293: ref=['THE', 'FREE', 'STATE', 'HOTEL', 'A', 'STONE', 'BUILDING', 'IN', 'DIMENSIONS', 'FIFTY', 'BY', 'SEVENTY', 'FEET', 'THREE', 'STORIES', 'HIGH', 'AND', 'HANDSOMELY', 'FURNISHED', 'PREVIOUSLY', 'OCCUPIED', 'ONLY', 'FOR', 'LODGING', 'ROOMS', 'ON', 'THAT', 'DAY', 'FOR', 'THE', 'FIRST', 'TIME', 'OPENED', 'ITS', 'TABLE', 'ACCOMMODATIONS', 'TO', 'THE', 'PUBLIC', 'AND', 'PROVIDED', 'A', 'FREE', 'DINNER', 'IN', 'HONOR', 'OF', 'THE', 'OCCASION'] +7729-102255-0032-293: hyp=['THE', 'FREE', 'STATE', 'HOTEL', 'A', 'STONE', 'BUILDING', 'IN', 'DIMENSIONS', 'FIFTY', 'BY', 'SEVENTY', 'FEET', 'THREE', 'STORIES', 'HIGH', 'AND', 'HANDSOMELY', 'FURNISHED', 'PREVIOUSLY', 'OCCUPIED', 'ONLY', 'FOR', 'LODGING', 'ROOMS', 'ON', 'THAT', 'DAY', 'FOR', 'THE', 'FIRST', 'TIME', 'OPENED', 'ITS', 'TABLE', 'ACCOMMODATIONS', 'TO', 'THE', 'PUBLIC', 'AND', 'PROVIDED', 'A', 'FREE', 'DINNER', 'IN', 'HONOR', 'OF', 'THE', 'OCCASION'] +7729-102255-0033-294: ref=['AS', 'HE', 'HAD', 'PROMISED', 'TO', 'PROTECT', 'THE', 'HOTEL', 'THE', 'REASSURED', 'CITIZENS', 'BEGAN', 'TO', 'LAUGH', 'AT', 'THEIR', 'OWN', 'FEARS'] +7729-102255-0033-294: hyp=['AS', 'HE', 'HAD', 'PROMISED', 'TO', 'PROTECT', 'THE', 'HOTEL', 'THE', 'REASSURED', 'CITIZENS', 'BEGAN', 'TO', 'LAUGH', 'AT', 'THEIR', 'OWN', 'FEARS'] +7729-102255-0034-295: ref=['TO', 'THEIR', 'SORROW', 'THEY', 'WERE', 'SOON', 'UNDECEIVED'] +7729-102255-0034-295: hyp=['TO', 'THEIR', 'SORROW', 'THEY', 'WERE', 'SOON', 'UNDECEIVED'] +7729-102255-0035-296: ref=['THE', 'MILITARY', 'FORCE', 'PARTLY', 'RABBLE', 'PARTLY', 'ORGANIZED', 'HAD', 'MEANWHILE', 'MOVED', 'INTO', 'THE', 'TOWN'] +7729-102255-0035-296: hyp=['THE', 'MILITARY', 'FORCE', 'PARTLY', 'RABBLE', 'PARTLY', 'ORGANIZED', 'HAD', 'MEANWHILE', 'MOVED', 'INTO', 'THE', 'TOWN'] +7729-102255-0036-297: ref=['HE', 'PLANTED', 'A', 'COMPANY', 'BEFORE', 'THE', 'HOTEL', 'AND', 'DEMANDED', 'A', 'SURRENDER', 'OF', 'THE', 'ARMS', 'BELONGING', 'TO', 'THE', 'FREE', 'STATE', 'MILITARY', 'COMPANIES'] +7729-102255-0036-297: hyp=['HE', 'PLANTED', 'A', 'COMPANY', 'BEFORE', 'THE', 'HOTEL', 'AND', 'DEMANDED', 'A', 'SURRENDER', 'OF', 'THE', 'ARMS', 'BELONGING', 'TO', 'THE', 'FREE', 'STATE', 'MILITARY', 'COMPANIES'] +7729-102255-0037-298: ref=['HALF', 'AN', 'HOUR', 'LATER', 'TURNING', 'A', 'DEAF', 'EAR', 'TO', 'ALL', 'REMONSTRANCE', 'HE', 'GAVE', 'THE', 'PROPRIETORS', 'UNTIL', 'FIVE', "O'CLOCK", 'TO', 'REMOVE', 'THEIR', 'FAMILIES', 'AND', 'PERSONAL', 'PROPERTY', 'FROM', 'THE', 'FREE', 'STATE', 'HOTEL'] +7729-102255-0037-298: hyp=['HALF', 'AN', 'HOUR', 'LATER', 'TURNING', 'A', 'DEAF', 'EAR', 'TO', 'ALL', 'REMONSTRANCE', 'HE', 'GAVE', 'THE', 'PROPRIETORS', 'UNTIL', 'FIVE', "O'CLOCK", 'TO', 'REMOVE', 'THEIR', 'FAMILIES', 'AND', 'PERSONAL', 'PROPERTY', 'FROM', 'THE', 'FREE', 'STATE', 'HOTEL'] +7729-102255-0038-299: ref=['ATCHISON', 'WHO', 'HAD', 'BEEN', 'HARANGUING', 'THE', 'MOB', 'PLANTED', 'HIS', 'TWO', 'GUNS', 'BEFORE', 'THE', 'BUILDING', 'AND', 'TRAINED', 'THEM', 'UPON', 'IT'] +7729-102255-0038-299: hyp=['ATTITSON', 'WHO', 'HAD', 'BEEN', 'HARANGUING', 'THE', 'MOB', 'PLANTED', 'HIS', 'TWO', 'GUNS', 'BEFORE', 'THE', 'BUILDING', 'AND', 'TRAINED', 'THEM', 'UPON', 'IT'] +7729-102255-0039-300: ref=['THE', 'INMATES', 'BEING', 'REMOVED', 'AT', 'THE', 'APPOINTED', 'HOUR', 'A', 'FEW', 'CANNON', 'BALLS', 'WERE', 'FIRED', 'THROUGH', 'THE', 'STONE', 'WALLS'] +7729-102255-0039-300: hyp=['THE', 'INMATES', 'BEING', 'REMOVED', 'AT', 'THE', 'APPOINTED', 'HOUR', 'A', 'FEW', 'CANNON', 'BALLS', 'WERE', 'FIRED', 'THROUGH', 'THE', 'STONE', 'WALLS'] +7729-102255-0040-301: ref=['IN', 'THIS', 'INCIDENT', 'CONTRASTING', 'THE', 'CREATIVE', 'AND', 'THE', 'DESTRUCTIVE', 'SPIRIT', 'OF', 'THE', 'FACTIONS', 'THE', 'EMIGRANT', 'AID', 'SOCIETY', 'OF', 'MASSACHUSETTS', 'FINDS', 'ITS', 'MOST', 'HONORABLE', 'AND', 'TRIUMPHANT', 'VINDICATION'] +7729-102255-0040-301: hyp=['IN', 'THIS', 'INCIDENT', 'CONTRASTING', 'THE', 'CREATIVE', 'AND', 'THE', 'DESTRUCTIVE', 'SPIRIT', 'OF', 'THE', 'FACTIONS', 'THE', 'IMMIGRANT', 'AIDS', 'SOCIETY', 'OF', 'MASSACHUSETTS', 'FINDS', 'ITS', 'MOST', 'HONOURABLE', 'AND', 'TRIUMPHANT', 'VINDICATION'] +7729-102255-0041-302: ref=['THE', 'WHOLE', 'PROCEEDING', 'WAS', 'SO', 'CHILDISH', 'THE', 'MISERABLE', 'PLOT', 'SO', 'TRANSPARENT', 'THE', 'OUTRAGE', 'SO', 'GROSS', 'AS', 'TO', 'BRING', 'DISGUST', 'TO', 'THE', 'BETTER', 'CLASS', 'OF', 'BORDER', 'RUFFIANS', 'WHO', 'WERE', 'WITNESSES', 'AND', 'ACCESSORIES'] +7729-102255-0041-302: hyp=['THE', 'WHOLE', 'PROCEEDING', 'WAS', 'SO', 'CHILDISH', 'THE', 'MISERABLE', 'PLOT', 'SO', 'TRANSPARENT', 'THE', 'OUTRAGED', 'SO', 'GROSS', 'AS', 'TO', 'BRING', 'DISGUST', 'TO', 'THE', 'BETTER', 'CLASS', 'OF', 'BORDER', 'RUFFIANS', 'WHO', 'WERE', 'WITNESSES', 'AND', 'ACCESSORIES'] +7729-102255-0042-303: ref=['RELOCATED', 'FOOTNOTE', 'GOVERNOR', 'ROBINSON', 'BEING', 'ON', 'HIS', 'WAY', 'EAST', 'THE', 'STEAMBOAT', 'ON', 'WHICH', 'HE', 'WAS', 'TRAVELING', 'STOPPED', 'AT', 'LEXINGTON', 'MISSOURI'] +7729-102255-0042-303: hyp=['RE', 'LOCATED', 'FOOTNOTE', 'GOVERNOR', 'ROBINSON', 'BEING', 'ON', 'HIS', 'WAY', 'EAST', 'THE', 'STEAMBOAT', 'ON', 'WHICH', 'HE', 'WAS', 'TRAVELLING', 'STOPPED', 'AT', 'LEXINGTON', 'MISSOURI'] +7729-102255-0043-304: ref=['IN', 'A', 'FEW', 'DAYS', 'AN', 'OFFICER', 'CAME', 'WITH', 'A', 'REQUISITION', 'FROM', 'GOVERNOR', 'SHANNON', 'AND', 'TOOK', 'THE', 'PRISONER', 'BY', 'LAND', 'TO', 'WESTPORT', 'AND', 'AFTERWARDS', 'FROM', 'THERE', 'TO', 'KANSAS', 'CITY', 'AND', 'LEAVENWORTH'] +7729-102255-0043-304: hyp=['IN', 'A', 'FEW', 'DAYS', 'AN', 'OFFICER', 'CAME', 'WITH', 'A', 'REQUISITION', 'FROM', 'GOVERNOR', 'SHANNON', 'AND', 'TOOK', 'THE', 'PRISONER', 'BY', 'LANDA', 'WEST', 'PORT', 'AND', 'AFTERWARDS', 'FROM', 'THERE', 'TO', 'KANSAS', 'CITY', 'IN', 'LEVINWORTH'] +7729-102255-0044-305: ref=['HERE', 'HE', 'WAS', 'PLACED', 'IN', 'THE', 'CUSTODY', 'OF', 'CAPTAIN', 'MARTIN', 'OF', 'THE', 'KICKAPOO', 'RANGERS', 'WHO', 'PROVED', 'A', 'KIND', 'JAILER', 'AND', 'MATERIALLY', 'ASSISTED', 'IN', 'PROTECTING', 'HIM', 'FROM', 'THE', 'DANGEROUS', 'INTENTIONS', 'OF', 'THE', 'MOB', 'WHICH', 'AT', 'THAT', 'TIME', 'HELD', 'LEAVENWORTH', 'UNDER', 'A', 'REIGN', 'OF', 'TERROR'] +7729-102255-0044-305: hyp=['HERE', 'HE', 'WAS', 'PLACED', 'IN', 'THE', 'CUSTODY', 'OF', 'CAPTAIN', 'MARTIN', 'OF', 'THE', 'KICKAPOO', 'RANGERS', 'WHO', 'PROVED', 'A', 'KIND', 'JAILER', 'AND', 'MATERIALLY', 'ASSISTED', 'IN', 'PROTECTING', 'HIM', 'FROM', 'THE', 'DANGEROUS', 'INTENTIONS', 'OF', 'THE', 'MOB', 'WHICH', 'AT', 'THAT', 'TIME', 'HELD', 'LEVIN', 'WORTH', 'UNDER', 'THE', 'REIGN', 'OF', 'TERROR'] +7729-102255-0045-306: ref=['CAPTAIN', 'MARTIN', 'SAID', 'I', 'SHALL', 'GIVE', 'YOU', 'A', 'PISTOL', 'TO', 'HELP', 'PROTECT', 'YOURSELF', 'IF', 'WORSE', 'COMES', 'TO', 'WORST'] +7729-102255-0045-306: hyp=['CAPTAIN', 'MARTIN', 'SAID', 'I', 'SHALL', 'GIVE', 'YOU', 'A', 'PISTOL', 'TO', 'HELP', 'PROTECT', 'YOURSELF', 'IF', 'WORSE', 'COMES', 'TO', 'WORST'] +7729-102255-0046-307: ref=['IN', 'THE', 'EARLY', 'MORNING', 'OF', 'THE', 'NEXT', 'DAY', 'MAY', 'TWENTY', 'NINTH', 'A', 'COMPANY', 'OF', 'DRAGOONS', 'WITH', 'ONE', 'EMPTY', 'SADDLE', 'CAME', 'DOWN', 'FROM', 'THE', 'FORT', 'AND', 'WHILE', 'THE', 'PRO', 'SLAVERY', 'MEN', 'STILL', 'SLEPT', 'THE', 'PRISONER', 'AND', 'HIS', 'ESCORT', 'WERE', 'ON', 'THEIR', 'WAY', 'ACROSS', 'THE', 'PRAIRIES', 'TO', 'LECOMPTON', 'IN', 'THE', 'CHARGE', 'OF', 'OFFICERS', 'OF', 'THE', 'UNITED', 'STATES', 'ARMY'] +7729-102255-0046-307: hyp=['IN', 'THE', 'EARLY', 'MORNING', 'OF', 'THE', 'NEXT', 'DAY', 'MAY', 'TWENTY', 'NINTH', 'A', 'COMPANY', 'OF', 'DRAGOONS', 'WITH', 'ONE', 'EMPTY', 'SADDLE', 'CAME', 'DOWN', 'FROM', 'THE', 'FORT', 'AND', 'WHILE', 'THE', 'PRO', 'SLAVERY', 'MEN', 'STILL', 'SLEPT', 'THE', 'PRISONER', 'AND', 'HIS', 'ESCORT', 'WERE', 'ON', 'THEIR', 'WAY', 'ACROSS', 'THE', 'PRAIRIES', 'TO', 'LECOMPTON', 'IN', 'THE', 'CHARGE', 'OF', 'OFFICERS', 'OF', 'THE', 'UNITED', 'STATES', 'ARMY'] +8224-274381-0000-1451: ref=['THOUGH', 'THROWN', 'INTO', 'PRISON', 'FOR', 'THIS', 'ENTERPRISE', 'AND', 'DETAINED', 'SOME', 'TIME', 'HE', 'WAS', 'NOT', 'DISCOURAGED', 'BUT', 'STILL', 'CONTINUED', 'BY', 'HIS', 'COUNTENANCE', 'AND', 'PROTECTION', 'TO', 'INFUSE', 'SPIRIT', 'INTO', 'THE', 'DISTRESSED', 'ROYALISTS'] +8224-274381-0000-1451: hyp=['THOUGH', 'THROWN', 'INTO', 'PRISON', 'FOR', 'THIS', 'ENTERPRISE', 'AND', 'DETAINED', 'SOME', 'TIME', 'HE', 'WAS', 'NOT', 'DISCOURAGED', 'BUT', 'STILL', 'CONTINUED', 'BY', 'HIS', 'COUNTENANCE', 'AND', 'PROTECTION', 'TO', 'INFUSE', 'SPIRIT', 'INTO', 'THE', 'DISTRESSED', 'ROYALISTS'] +8224-274381-0001-1452: ref=['AMONG', 'OTHER', 'PERSONS', 'OF', 'DISTINCTION', 'WHO', 'UNITED', 'THEMSELVES', 'TO', 'HIM', 'WAS', 'LORD', 'NAPIER', 'OF', 'MERCHISTON', 'SON', 'OF', 'THE', 'FAMOUS', 'INVENTOR', 'OF', 'THE', 'LOGARITHMS', 'THE', 'PERSON', 'TO', 'WHOM', 'THE', 'TITLE', 'OF', 'A', 'GREAT', 'MAN', 'IS', 'MORE', 'JUSTLY', 'DUE', 'THAN', 'TO', 'ANY', 'OTHER', 'WHOM', 'HIS', 'COUNTRY', 'EVER', 'PRODUCED'] +8224-274381-0001-1452: hyp=['AMONG', 'OTHER', 'PERSONS', 'OF', 'DISTINCTION', 'WHO', 'UNITED', 'THEMSELVES', 'TO', 'HIM', 'WAS', 'LORD', 'NAPIER', 'OF', 'MURCHESON', 'SON', 'OF', 'THE', 'FAMOUS', 'INVENTOR', 'OF', 'THE', 'LOGARTHEMS', 'THE', 'PERSON', 'TO', 'WHOM', 'THE', 'TITLE', 'OF', 'A', 'GREAT', 'MAN', 'IS', 'MORE', 'JUSTLY', 'DUE', 'THAN', 'TO', 'ANY', 'OTHER', 'WHOM', 'HIS', 'COUNTRY', 'EVER', 'PRODUCED'] +8224-274381-0002-1453: ref=['WHILE', 'THE', 'FORMER', 'FORETOLD', 'THAT', 'THE', 'SCOTTISH', 'COVENANTERS', 'WERE', 'SECRETLY', 'FORMING', 'A', 'UNION', 'WITH', 'THE', 'ENGLISH', 'PARLIAMENT', 'AND', 'INCULCATED', 'THE', 'NECESSITY', 'OF', 'PREVENTING', 'THEM', 'BY', 'SOME', 'VIGOROUS', 'UNDERTAKING', 'THE', 'LATTER', 'STILL', 'INSISTED', 'THAT', 'EVERY', 'SUCH', 'ATTEMPT', 'WOULD', 'PRECIPITATE', 'THEM', 'INTO', 'MEASURES', 'TO', 'WHICH', 'OTHERWISE', 'THEY', 'WERE', 'NOT', 'PERHAPS', 'INCLINED'] +8224-274381-0002-1453: hyp=['WHILE', 'THE', 'FORMER', 'FORETOLD', 'THAT', 'THE', 'SCOTTISH', 'COVENANTERS', 'WERE', 'SECRETLY', 'FORMING', 'A', 'UNION', 'WITH', 'THE', 'ENGLISH', 'PARLIAMENT', 'AND', 'INCALCATED', 'THE', 'NECESSITY', 'OF', 'PREVENTING', 'THEM', 'BY', 'SOME', 'VIGOROUS', 'UNDERTAKING', 'THE', 'LATTER', 'STILL', 'INSISTED', 'THAT', 'EVERY', 'SUCH', 'ATTEMPT', 'WOULD', 'PRECIPITATE', 'THEM', 'INTO', 'MEASURES', 'TO', 'WHICH', 'OTHERWISE', 'THEY', 'WERE', 'NOT', 'PERHAPS', 'INCLINED'] +8224-274381-0003-1454: ref=['THE', "KING'S", 'EARS', 'WERE', 'NOW', 'OPEN', 'TO', "MONTROSE'S", 'COUNSELS', 'WHO', 'PROPOSED', 'NONE', 'BUT', 'THE', 'BOLDEST', 'AND', 'MOST', 'DARING', 'AGREEABLY', 'TO', 'THE', 'DESPERATE', 'STATE', 'OF', 'THE', 'ROYAL', 'CAUSE', 'IN', 'SCOTLAND'] +8224-274381-0003-1454: hyp=['THE', "KING'S", 'EARS', 'WERE', 'NOW', 'OPEN', 'TO', "MONTROSE'S", 'COUNCILS', 'WHO', 'PROPOSED', 'NONE', 'BUT', 'THE', 'BOLDEST', 'AND', 'MOST', 'DARING', 'AGREEABLY', 'TO', 'THE', 'DESPERATE', 'STATE', 'OF', 'THE', 'ROYAL', 'CAUSE', 'IN', 'SCOTLAND'] +8224-274381-0004-1455: ref=['FIVE', 'HUNDRED', 'MEN', 'MORE', 'WHO', 'HAD', 'BEEN', 'LEVIED', 'BY', 'THE', 'COVENANTERS', 'WERE', 'PERSUADED', 'TO', 'EMBRACE', 'THE', 'ROYAL', 'CAUSE', 'AND', 'WITH', 'THIS', 'COMBINED', 'FORCE', 'HE', 'HASTENED', 'TO', 'ATTACK', 'LORD', 'ELCHO', 'WHO', 'LAY', 'AT', 'PERTH', 'WITH', 'AN', 'ARMY', 'OF', 'SIX', 'THOUSAND', 'MEN', 'ASSEMBLED', 'UPON', 'THE', 'FIRST', 'NEWS', 'OF', 'THE', 'IRISH', 'INVASION'] +8224-274381-0004-1455: hyp=['FIVE', 'HUNDRED', 'MEN', 'MORE', 'WHO', 'HAD', 'BEEN', 'LEVIED', 'BY', 'THE', 'COVENANTERS', 'WERE', 'PERSUADED', 'TO', 'EMBRACE', 'THE', 'ROYAL', 'CAUSE', 'AND', 'WITH', 'THIS', 'COMBINED', 'FORCE', 'HE', 'HASTENED', 'TO', 'ATTACK', 'LORD', 'ELKO', 'WHO', 'LAY', 'AT', 'PERTH', 'WITH', 'AN', 'ARMY', 'OF', 'SIX', 'THOUSAND', 'MEN', 'ASSEMBLED', 'UPON', 'THE', 'FIRST', 'NEWS', 'OF', 'THE', 'IRISH', 'INVASION'] +8224-274381-0005-1456: ref=['DREADING', 'THE', 'SUPERIOR', 'POWER', 'OF', 'ARGYLE', 'WHO', 'HAVING', 'JOINED', 'HIS', 'VASSALS', 'TO', 'A', 'FORCE', 'LEVIED', 'BY', 'THE', 'PUBLIC', 'WAS', 'APPROACHING', 'WITH', 'A', 'CONSIDERABLE', 'ARMY', 'MONTROSE', 'HASTENED', 'NORTHWARDS', 'IN', 'ORDER', 'TO', 'ROUSE', 'AGAIN', 'THE', 'MARQUIS', 'OF', 'HUNTLEY', 'AND', 'THE', 'GORDONS', 'WHO', 'HAVING', 'BEFORE', 'HASTILY', 'TAKEN', 'ARMS', 'HAD', 'BEEN', 'INSTANTLY', 'SUPPRESSED', 'BY', 'THE', 'COVENANTERS'] +8224-274381-0005-1456: hyp=['DREADING', 'THE', 'SUPERIOR', 'POWER', 'OF', 'ARGYLE', 'WHO', 'HAVING', 'JOINED', 'HIS', 'VASSALS', 'TO', 'A', 'FORCE', 'LEVIED', 'BY', 'THE', 'PUBLIC', 'WAS', 'APPROACHING', 'WITH', 'A', 'CONSIDERABLE', 'ARMY', 'MONTROSE', 'HASTENED', 'NORTHWARD', 'IN', 'ORDER', 'TO', 'ROUSE', 'AGAIN', 'THE', 'MARQUIS', 'OF', 'HUNTLY', 'AND', 'THE', 'GORDONS', 'WHO', 'HAVING', 'BEFORE', 'HASTILY', 'TAKEN', 'ARMS', 'HAD', 'BEEN', 'INSTANTLY', 'SUPPRESSED', 'BY', 'THE', 'COVENANTERS'] +8224-274381-0006-1457: ref=['THIS', "NOBLEMAN'S", 'CHARACTER', 'THOUGH', 'CELEBRATED', 'FOR', 'POLITICAL', 'COURAGE', 'AND', 'CONDUCT', 'WAS', 'VERY', 'LOW', 'FOR', 'MILITARY', 'PROWESS', 'AND', 'AFTER', 'SOME', 'SKIRMISHES', 'IN', 'WHICH', 'HE', 'WAS', 'WORSTED', 'HE', 'HERE', 'ALLOWED', 'MONTROSE', 'TO', 'ESCAPE', 'HIM'] +8224-274381-0006-1457: hyp=['THIS', "NOBLEMAN'S", 'CHARACTER', 'THOUGH', 'CELEBRATED', 'FOR', 'POLITICAL', 'COURAGE', 'AND', 'CONDUCT', 'WAS', 'VERY', 'LOW', 'FOR', 'MILITARY', 'PROWESS', 'AND', 'AFTER', 'SOME', 'SKIRMISHES', 'IN', 'WHICH', 'HE', 'WAS', 'WORSTED', 'HE', 'HERE', 'ALLOWED', 'MONTROSE', 'TO', 'ESCAPE', 'HIM'] +8224-274381-0007-1458: ref=['BY', 'QUICK', 'MARCHES', 'THROUGH', 'THESE', 'INACCESSIBLE', 'MOUNTAINS', 'THAT', 'GENERAL', 'FREED', 'HIMSELF', 'FROM', 'THE', 'SUPERIOR', 'FORCES', 'OF', 'THE', 'COVENANTERS'] +8224-274381-0007-1458: hyp=['BY', 'QUICK', 'MARCHES', 'THROUGH', 'THESE', 'INACCESSIBLE', 'MOUNTAINS', 'THAT', 'GENERAL', 'FREED', 'HIMSELF', 'FROM', 'THE', 'SUPERIOR', 'FORCES', 'OF', 'THE', 'COVENANTERS'] +8224-274381-0008-1459: ref=['WITH', 'THESE', 'AND', 'SOME', 'REENFORCEMENTS', 'OF', 'THE', 'ATHOLEMEN', 'AND', 'MACDONALDS', 'WHOM', 'HE', 'HAD', 'RECALLED', 'MONTROSE', 'FELL', 'SUDDENLY', 'UPON', "ARGYLE'S", 'COUNTRY', 'AND', 'LET', 'LOOSE', 'UPON', 'IT', 'ALL', 'THE', 'RAGE', 'OF', 'WAR', 'CARRYING', 'OFF', 'THE', 'CATTLE', 'BURNING', 'THE', 'HOUSES', 'AND', 'PUTTING', 'THE', 'INHABITANTS', 'TO', 'THE', 'SWORD'] +8224-274381-0008-1459: hyp=['WITH', 'THESE', 'AND', 'SOME', 'REINFORCEMENTS', 'OF', 'THE', 'ETHEL', 'MEN', 'AND', 'MC', 'DONALDS', 'WHOM', 'HE', 'HAD', 'RECALLED', 'MONTROSE', 'FELL', 'SUDDENLY', 'UPON', "ARGYLE'S", 'COUNTRY', 'AND', 'LET', 'LOOSE', 'UPON', 'IT', 'ALL', 'THE', 'RAGE', 'OF', 'WAR', 'CARRYING', 'OFF', 'THE', 'CATTLE', 'BURNING', 'THE', 'HOUSES', 'AND', 'PUTTING', 'THE', 'INHABITANTS', 'TO', 'THE', 'SWORD'] +8224-274381-0009-1460: ref=['THIS', 'SEVERITY', 'BY', 'WHICH', 'MONTROSE', 'SULLIED', 'HIS', 'VICTORIES', 'WAS', 'THE', 'RESULT', 'OF', 'PRIVATE', 'ANIMOSITY', 'AGAINST', 'THE', 'CHIEFTAIN', 'AS', 'MUCH', 'AS', 'OF', 'ZEAL', 'FOR', 'THE', 'PUBLIC', 'CAUSE', 'ARGYLE', 'COLLECTING', 'THREE', 'THOUSAND', 'MEN', 'MARCHED', 'IN', 'QUEST', 'OF', 'THE', 'ENEMY', 'WHO', 'HAD', 'RETIRED', 'WITH', 'THEIR', 'PLUNDER', 'AND', 'HE', 'LAY', 'AT', 'INNERLOCHY', 'SUPPOSING', 'HIMSELF', 'STILL', 'AT', 'A', 'CONSIDERABLE', 'DISTANCE', 'FROM', 'THEM'] +8224-274381-0009-1460: hyp=['THIS', 'SEVERITY', 'BY', 'WHICH', 'MONTROSE', 'SULLIED', 'HIS', 'VICTORIES', 'WAS', 'THE', 'RESULT', 'OF', 'PRIVATE', 'ANIMOSITY', 'AGAINST', 'THE', 'CHIEFTAIN', 'AS', 'MUCH', 'AS', 'OF', 'ZEAL', 'FOR', 'THE', 'PUBLIC', 'CAUSE', 'ARGYLE', 'COLLECTING', 'THREE', 'THOUSAND', 'MEN', 'MARCHED', 'IN', 'QUEST', 'OF', 'THE', 'ENEMY', 'WHO', 'HAD', 'RETIRED', 'WITH', 'THEIR', 'PLUNDER', 'AND', 'HE', 'LAY', 'AT', 'INERLOCHY', 'SUPPOSING', 'HIMSELF', 'STILL', 'AT', 'A', 'CONSIDERABLE', 'DISTANCE', 'FROM', 'THEM'] +8224-274381-0010-1461: ref=['BY', 'A', 'QUICK', 'AND', 'UNEXPECTED', 'MARCH', 'MONTROSE', 'HASTENED', 'TO', 'INNERLOCHY', 'AND', 'PRESENTED', 'HIMSELF', 'IN', 'ORDER', 'OF', 'BATTLE', 'BEFORE', 'THE', 'SURPRISED', 'BUT', 'NOT', 'AFFRIGHTENED', 'COVENANTERS'] +8224-274381-0010-1461: hyp=['BY', 'A', 'QUICK', 'AND', 'UNEXPECTED', 'MARCH', 'MONTROSE', 'HASTENED', 'TO', 'IN', 'A', 'LOCKY', 'AND', 'PRESENTED', 'HIMSELF', 'IN', 'ORDER', 'OF', 'BATTLE', 'BEFORE', 'THE', 'SURPRISED', 'BUT', 'NOT', 'A', 'FRIGHTENED', 'COVENANTERS'] +8224-274381-0011-1462: ref=['HIS', 'CONDUCT', 'AND', 'PRESENCE', 'OF', 'MIND', 'IN', 'THIS', 'EMERGENCE', 'APPEARED', 'CONSPICUOUS'] +8224-274381-0011-1462: hyp=['HIS', 'CONDUCT', 'AND', 'PRESENCE', 'OF', 'MIND', 'IN', 'THIS', 'EMERGENCE', 'APPEARED', 'CONSPICUOUS'] +8224-274381-0012-1463: ref=['MONTROSE', 'WEAK', 'IN', 'CAVALRY', 'HERE', 'LINED', 'HIS', 'TROOPS', 'OF', 'HORSE', 'WITH', 'INFANTRY', 'AND', 'AFTER', 'PUTTING', 'THE', "ENEMY'S", 'HORSE', 'TO', 'ROUT', 'FELL', 'WITH', 'UNITED', 'FORCE', 'UPON', 'THEIR', 'FOOT', 'WHO', 'WERE', 'ENTIRELY', 'CUT', 'IN', 'PIECES', 'THOUGH', 'WITH', 'THE', 'LOSS', 'OF', 'THE', 'GALLANT', 'LORD', 'GORDON', 'ON', 'THE', 'PART', 'OF', 'THE', 'ROYALISTS'] +8224-274381-0012-1463: hyp=['MONTROSE', 'WEAK', 'IN', 'CAVALRY', 'HERE', 'LINED', 'HIS', 'TROOPS', 'OF', 'HORSE', 'WITH', 'INFANTRY', 'AND', 'AFTER', 'PUTTING', 'THE', "ENEMY'S", 'HORSE', 'TO', 'ROUT', 'FELL', 'WITH', 'UNITED', 'FORCE', 'UPON', 'THEIR', 'FOOT', 'WHO', 'WERE', 'ENTIRELY', 'CUT', 'IN', 'PIECES', 'THOUGH', 'WITH', 'THE', 'LOSS', 'OF', 'THE', 'GALLANT', 'LORD', 'GORDON', 'ON', 'THE', 'PART', 'OF', 'THE', 'ROYALISTS'] +8224-274381-0013-1464: ref=['FROM', 'THE', 'SAME', 'MEN', 'NEW', 'REGIMENTS', 'AND', 'NEW', 'COMPANIES', 'WERE', 'FORMED', 'DIFFERENT', 'OFFICERS', 'APPOINTED', 'AND', 'THE', 'WHOLE', 'MILITARY', 'FORCE', 'PUT', 'INTO', 'SUCH', 'HANDS', 'AS', 'THE', 'INDEPENDENTS', 'COULD', 'RELY', 'ON'] +8224-274381-0013-1464: hyp=['FROM', 'THE', 'SAME', 'MEN', 'NEW', 'REGIMENTS', 'AND', 'NEW', 'COMPANIES', 'WERE', 'FORMED', 'DIFFERENT', 'OFFICERS', 'APPOINTED', 'AND', 'THE', 'WHOLE', 'MILITARY', 'FORCE', 'PUT', 'INTO', 'SUCH', 'HANDS', 'AS', 'THE', 'INDEPENDENTS', 'COULD', 'RELY', 'ON'] +8224-274381-0014-1465: ref=['BESIDES', 'MEMBERS', 'OF', 'PARLIAMENT', 'WHO', 'WERE', 'EXCLUDED', 'MANY', 'OFFICERS', 'UNWILLING', 'TO', 'SERVE', 'UNDER', 'THE', 'NEW', 'GENERALS', 'THREW', 'UP', 'THEIR', 'COMMISSIONS', 'AND', 'UNWARILY', 'FACILITATED', 'THE', 'PROJECT', 'OF', 'PUTTING', 'THE', 'ARMY', 'ENTIRELY', 'INTO', 'THE', 'HANDS', 'OF', 'THAT', 'FACTION'] +8224-274381-0014-1465: hyp=['BESIDES', 'MEMBERS', 'OF', 'PARLIAMENT', 'WHO', 'WERE', 'EXCLUDED', 'MANY', 'OFFICERS', 'UNWILLING', 'TO', 'SERVE', 'UNDER', 'THE', 'NEW', 'GENERALS', 'THREW', 'UP', 'THEIR', 'COMMISSIONS', 'AND', 'THEN', 'WARILY', 'FACILITATED', 'THE', 'PROJECT', 'OF', 'PUTTING', 'THE', 'ARMY', 'ENTIRELY', 'INTO', 'THE', 'HANDS', 'OF', 'THAT', 'FACTION'] +8224-274381-0015-1466: ref=['THOUGH', 'THE', 'DISCIPLINE', 'OF', 'THE', 'FORMER', 'PARLIAMENTARY', 'ARMY', 'WAS', 'NOT', 'CONTEMPTIBLE', 'A', 'MORE', 'EXACT', 'PLAN', 'WAS', 'INTRODUCED', 'AND', 'RIGOROUSLY', 'EXECUTED', 'BY', 'THESE', 'NEW', 'COMMANDERS'] +8224-274381-0015-1466: hyp=['THOUGH', 'THE', 'DISCIPLINE', 'OF', 'THE', 'FORMER', 'PARLIAMENTARY', 'ARMY', 'WAS', 'NOT', 'CONTEMPTIBLE', 'A', 'MORE', 'EXACT', 'PLAN', 'WAS', 'INTRODUCED', 'AND', 'RIGOROUSLY', 'EXECUTED', 'BY', 'THESE', 'NEW', 'COMMANDERS'] +8224-274381-0016-1467: ref=['VALOR', 'INDEED', 'WAS', 'VERY', 'GENERALLY', 'DIFFUSED', 'OVER', 'THE', 'ONE', 'PARTY', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'DURING', 'THIS', 'PERIOD', 'DISCIPLINE', 'ALSO', 'WAS', 'ATTAINED', 'BY', 'THE', 'FORCES', 'OF', 'THE', 'PARLIAMENT', 'BUT', 'THE', 'PERFECTION', 'OF', 'THE', 'MILITARY', 'ART', 'IN', 'CONCERTING', 'THE', 'GENERAL', 'PLANS', 'OF', 'ACTION', 'AND', 'THE', 'OPERATIONS', 'OF', 'THE', 'FIELD', 'SEEMS', 'STILL', 'ON', 'BOTH', 'SIDES', 'TO', 'HAVE', 'BEEN', 'IN', 'A', 'GREAT', 'MEASURE', 'WANTING'] +8224-274381-0016-1467: hyp=['VALO', 'INDEED', 'WAS', 'VERY', 'GENERALLY', 'DIFFUSED', 'OVER', 'THE', 'ONE', 'PARTY', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'DURING', 'THIS', 'PERIOD', 'DISCIPLINE', 'ALSO', 'WAS', 'ATTAINED', 'BY', 'THE', 'FORCES', 'OF', 'THE', 'PARLIAMENT', 'BUT', 'THE', 'PERFECTION', 'OF', 'THE', 'MILITARY', 'ART', 'IN', 'CONCERTING', 'THE', 'GENERAL', 'PLANS', 'OF', 'ACTION', 'AND', 'THE', 'OPERATIONS', 'OF', 'THE', 'FIELD', 'SEEMS', 'STILL', 'ON', 'BOTH', 'SIDES', 'TO', 'HAVE', 'BEEN', 'IN', 'A', 'GREAT', 'MEASURE', 'WANTING'] +8224-274381-0017-1468: ref=['HISTORIANS', 'AT', 'LEAST', 'PERHAPS', 'FROM', 'THEIR', 'OWN', 'IGNORANCE', 'AND', 'INEXPERIENCE', 'HAVE', 'NOT', 'REMARKED', 'ANY', 'THING', 'BUT', 'A', 'HEADLONG', 'IMPETUOUS', 'CONDUCT', 'EACH', 'PARTY', 'HURRYING', 'TO', 'A', 'BATTLE', 'WHERE', 'VALOR', 'AND', 'FORTUNE', 'CHIEFLY', 'DETERMINED', 'THE', 'SUCCESS'] +8224-274381-0017-1468: hyp=['HISTORIANS', 'AT', 'LEAST', 'PERHAPS', 'FROM', 'THEIR', 'OWN', 'IGNORANCE', 'AND', 'INEXPERIENCE', 'HAVE', 'NOT', 'REMARKED', 'ANY', 'THING', 'BUT', 'A', 'HEADLONG', 'IMPETUOUS', 'CONDUCT', 'EACH', 'PARTY', 'HURRYING', 'TO', 'A', 'BATTLE', 'WERE', 'VALOR', 'AND', 'FORTUNE', 'CHIEFLY', 'DETERMINED', 'THE', 'SUCCESS'] +8224-274384-0000-1437: ref=['HE', 'PASSED', 'THROUGH', 'HENLEY', 'SAINT', 'ALBANS', 'AND', 'CAME', 'SO', 'NEAR', 'TO', 'LONDON', 'AS', 'HARROW', 'ON', 'THE', 'HILL'] +8224-274384-0000-1437: hyp=['HE', 'PASSED', 'THROUGH', 'HENLEY', 'SAINT', "ALBAN'S", 'AND', 'CAME', 'SO', 'NEAR', 'TO', 'LONDON', 'AS', 'HARROW', 'ON', 'THE', 'HILL'] +8224-274384-0001-1438: ref=['THE', 'SCOTTISH', 'GENERALS', 'AND', 'COMMISSIONERS', 'AFFECTED', 'GREAT', 'SURPRISE', 'ON', 'THE', 'APPEARANCE', 'OF', 'THE', 'KING', 'AND', 'THOUGH', 'THEY', 'PAID', 'HIM', 'ALL', 'THE', 'EXTERIOR', 'RESPECT', 'DUE', 'TO', 'HIS', 'DIGNITY', 'THEY', 'INSTANTLY', 'SET', 'A', 'GUARD', 'UPON', 'HIM', 'UNDER', 'COLOR', 'OF', 'PROTECTION', 'AND', 'MADE', 'HIM', 'IN', 'REALITY', 'A', 'PRISONER'] +8224-274384-0001-1438: hyp=['THE', 'SCOTTISH', 'GENERALS', 'AND', 'COMMISSIONERS', 'AFFECTED', 'GREAT', 'SURPRISE', 'ON', 'THE', 'APPEARANCE', 'OF', 'THE', 'KING', 'AND', 'THOUGH', 'THEY', 'PAID', 'HIM', 'ALL', 'THE', 'EXTERIOR', 'RESPECT', 'DUE', 'TO', 'HIS', 'DIGNITY', 'THEY', 'INSTANTLY', 'SET', 'A', 'GUARD', 'UPON', 'HIM', 'UNDER', 'COLOR', 'OF', 'PROTECTION', 'AND', 'MADE', 'HIM', 'IN', 'REALITY', 'A', 'PRISONER'] +8224-274384-0002-1439: ref=['THEY', 'INFORMED', 'THE', 'ENGLISH', 'PARLIAMENT', 'OF', 'THIS', 'UNEXPECTED', 'INCIDENT', 'AND', 'ASSURED', 'THEM', 'THAT', 'THEY', 'HAD', 'ENTERED', 'INTO', 'NO', 'PRIVATE', 'TREATY', 'WITH', 'THE', 'KING'] +8224-274384-0002-1439: hyp=['THEY', 'INFORMED', 'THE', 'ENGLISH', 'PARLIAMENT', 'OF', 'THIS', 'UNEXPECTED', 'INCIDENT', 'AND', 'ASSURED', 'THEM', 'THAT', 'THEY', 'HAD', 'ENTERED', 'INTO', 'NO', 'PRIVATE', 'TREATY', 'WITH', 'THE', 'KING'] +8224-274384-0003-1440: ref=['OR', 'HATH', 'HE', 'GIVEN', 'US', 'ANY', 'GIFT'] +8224-274384-0003-1440: hyp=['OR', 'HATH', 'HE', 'GIVEN', 'US', 'ANY', 'GIFT'] +8224-274384-0004-1441: ref=['AND', 'THE', 'MEN', 'OF', 'ISRAEL', 'ANSWERED', 'THE', 'MEN', 'OF', 'JUDAH', 'AND', 'SAID', 'WE', 'HAVE', 'TEN', 'PARTS', 'IN', 'THE', 'KING', 'AND', 'WE', 'HAVE', 'ALSO', 'MORE', 'RIGHT', 'IN', 'DAVID', 'THAN', 'YE', 'WHY', 'THEN', 'DID', 'YE', 'DESPISE', 'US', 'THAT', 'OUR', 'ADVICE', 'SHOULD', 'NOT', 'BE', 'FIRST', 'HAD', 'IN', 'BRINGING', 'BACK', 'OUR', 'KING'] +8224-274384-0004-1441: hyp=['AND', 'THE', 'MEN', 'OF', 'ISRAEL', 'ANSWERED', 'THE', 'MEN', 'OF', 'JUDAH', 'AND', 'SAID', 'WE', 'HAVE', 'TEN', 'PARTS', 'IN', 'THE', 'KING', 'AND', 'WE', 'HAVE', 'ALSO', 'MORE', 'RIGHT', 'IN', 'DAVID', 'THAN', 'YE', 'WHY', 'THEN', 'DID', 'YE', 'DESPISE', 'US', 'THAT', 'OUR', 'ADVICE', 'SHOULD', 'NOT', 'BE', 'FIRST', 'HAD', 'IN', 'BRINGING', 'BACK', 'OUR', 'KING'] +8224-274384-0005-1442: ref=['ANOTHER', 'PREACHER', 'AFTER', 'REPROACHING', 'HIM', 'TO', 'HIS', 'FACE', 'WITH', 'HIS', 'MISGOVERNMENT', 'ORDERED', 'THIS', 'PSALM', 'TO', 'BE', 'SUNG'] +8224-274384-0005-1442: hyp=['ANOTHER', 'PREACHER', 'AFTER', 'REPROACHING', 'HIM', 'TO', 'HIS', 'FACE', 'WITH', 'HIS', 'MISGOVERNMENT', 'ORDERED', 'THIS', 'SUM', 'TO', 'BE', 'SUNG'] +8224-274384-0006-1443: ref=['THE', 'KING', 'STOOD', 'UP', 'AND', 'CALLED', 'FOR', 'THAT', 'PSALM', 'WHICH', 'BEGINS', 'WITH', 'THESE', 'WORDS'] +8224-274384-0006-1443: hyp=['THE', 'KING', 'STOOD', 'UP', 'AND', 'CALLED', 'FOR', 'THAT', 'PSALM', 'WHICH', 'BEGINS', 'WITH', 'THESE', 'WORDS'] +8224-274384-0007-1444: ref=['HAVE', 'MERCY', 'LORD', 'ON', 'ME', 'I', 'PRAY', 'FOR', 'MEN', 'WOULD', 'ME', 'DEVOUR'] +8224-274384-0007-1444: hyp=['HAVE', 'MERCY', 'LORD', 'ON', 'ME', 'I', 'PRAY', 'FOR', 'MEN', 'WOULD', 'ME', 'DEVOUR'] +8224-274384-0008-1445: ref=['THE', 'GOOD', 'NATURED', 'AUDIENCE', 'IN', 'PITY', 'TO', 'FALLEN', 'MAJESTY', 'SHOWED', 'FOR', 'ONCE', 'GREATER', 'DEFERENCE', 'TO', 'THE', 'KING', 'THAN', 'TO', 'THE', 'MINISTER', 'AND', 'SUNG', 'THE', 'PSALM', 'WHICH', 'THE', 'FORMER', 'HAD', 'CALLED', 'FOR'] +8224-274384-0008-1445: hyp=['THE', 'GOOD', 'NATURED', 'AUDIENCE', 'IN', 'PITY', 'TO', 'FALL', 'IN', 'MAJESTY', 'SHOWED', 'FOR', 'ONCE', 'GREATER', 'DEFERENCE', 'TO', 'THE', 'KING', 'THAN', 'TO', 'THE', 'MINISTER', 'AND', 'SUNG', 'THE', 'PSALM', 'WHICH', 'THE', 'FORMER', 'HAD', 'CALLED', 'FOR'] +8224-274384-0009-1446: ref=['THE', 'PARLIAMENT', 'AND', 'THE', 'SCOTS', 'LAID', 'THEIR', 'PROPOSALS', 'BEFORE', 'THE', 'KING'] +8224-274384-0009-1446: hyp=['THE', 'PARLIAMENT', 'AND', 'THE', 'SCOTS', 'LAID', 'THEIR', 'PROPOSALS', 'BEFORE', 'THE', 'KING'] +8224-274384-0010-1447: ref=['BEFORE', 'THE', 'SETTLEMENT', 'OF', 'TERMS', 'THE', 'ADMINISTRATION', 'MUST', 'BE', 'POSSESSED', 'ENTIRELY', 'BY', 'THE', 'PARLIAMENTS', 'OF', 'BOTH', 'KINGDOMS', 'AND', 'HOW', 'INCOMPATIBLE', 'THAT', 'SCHEME', 'WITH', 'THE', 'LIBERTY', 'OF', 'THE', 'KING', 'IS', 'EASILY', 'IMAGINED'] +8224-274384-0010-1447: hyp=['BEFORE', 'THE', 'SETTLEMENT', 'OF', 'TERMS', 'THE', 'ADMINISTRATION', 'MUST', 'BE', 'POSSESSED', 'ENTIRELY', 'BY', 'THE', 'PARLIAMENTS', 'OF', 'BOTH', 'KINGDOMS', 'AND', 'HOW', 'INCOMPATIBLE', 'THAT', 'SCHEME', 'WITH', 'THE', 'LIBERTY', 'OF', 'THE', 'KING', 'IS', 'EASILY', 'IMAGINED'] +8224-274384-0011-1448: ref=['THE', 'ENGLISH', 'IT', 'IS', 'EVIDENT', 'HAD', 'THEY', 'NOT', 'BEEN', 'PREVIOUSLY', 'ASSURED', 'OF', 'RECEIVING', 'THE', 'KING', 'WOULD', 'NEVER', 'HAVE', 'PARTED', 'WITH', 'SO', 'CONSIDERABLE', 'A', 'SUM', 'AND', 'WHILE', 'THEY', 'WEAKENED', 'THEMSELVES', 'BY', 'THE', 'SAME', 'MEASURE', 'HAVE', 'STRENGTHENED', 'A', 'PEOPLE', 'WITH', 'WHOM', 'THEY', 'MUST', 'AFTERWARDS', 'HAVE', 'SO', 'MATERIAL', 'AN', 'INTEREST', 'TO', 'DISCUSS'] +8224-274384-0011-1448: hyp=['THE', 'ENGLISH', 'IT', 'IS', 'EVIDENT', 'HAD', 'THEY', 'NOT', 'BEEN', 'PREVIOUSLY', 'ASSURED', 'OF', 'RECEIVING', 'THE', 'KING', 'WOULD', 'NEVER', 'HAVE', 'PARTED', 'WITH', 'SO', 'CONSIDERABLE', 'A', 'SUM', 'AND', 'WHILE', 'THEY', 'WEAKENED', 'THEMSELVES', 'BY', 'THE', 'SAME', 'MEASURE', 'HAVE', 'STRENGTHENED', 'A', 'PEOPLE', 'WITH', 'WHOM', 'THEY', 'MUST', 'AFTERWARDS', 'HAVE', 'SO', 'MATERIAL', 'AN', 'INTEREST', 'TO', 'DISCUSS'] +8224-274384-0012-1449: ref=['IF', 'ANY', 'STILL', 'RETAINED', 'RANCOR', 'AGAINST', 'HIM', 'IN', 'HIS', 'PRESENT', 'CONDITION', 'THEY', 'PASSED', 'IN', 'SILENCE', 'WHILE', 'HIS', 'WELL', 'WISHERS', 'MORE', 'GENEROUS', 'THAN', 'PRUDENT', 'ACCOMPANIED', 'HIS', 'MARCH', 'WITH', 'TEARS', 'WITH', 'ACCLAMATIONS', 'AND', 'WITH', 'PRAYERS', 'FOR', 'HIS', 'SAFETY'] +8224-274384-0012-1449: hyp=['IF', 'ANY', 'STILL', 'RETAINED', 'RANCOR', 'AGAINST', 'HIM', 'IN', 'HIS', 'PRESENT', 'CONDITION', 'THEY', 'PASSED', 'IN', 'SILENCE', 'WHILE', 'HIS', 'WELL', 'WISHERS', 'MORE', 'GENEROUS', 'THAN', 'PRUDENT', 'ACCOMPANIED', 'HIS', 'MARCH', 'WITH', 'TEARS', 'WITH', 'ACCLAMATIONS', 'AND', 'WITH', 'PRAYERS', 'FOR', 'HIS', 'SAFETY'] +8224-274384-0013-1450: ref=['HIS', 'DEATH', 'IN', 'THIS', 'CONJUNCTURE', 'WAS', 'A', 'PUBLIC', 'MISFORTUNE'] +8224-274384-0013-1450: hyp=['HIS', 'DEATH', 'IN', 'THIS', 'CONJUNCTURE', 'WAS', 'A', 'PUBLIC', 'MISFORTUNE'] +8230-279154-0000-617: ref=['THE', 'ANALYSIS', 'OF', 'KNOWLEDGE', 'WILL', 'OCCUPY', 'US', 'UNTIL', 'THE', 'END', 'OF', 'THE', 'THIRTEENTH', 'LECTURE', 'AND', 'IS', 'THE', 'MOST', 'DIFFICULT', 'PART', 'OF', 'OUR', 'WHOLE', 'ENTERPRISE'] +8230-279154-0000-617: hyp=['THE', 'ANALYSIS', 'OF', 'KNOWLEDGE', 'WILL', 'OCCUPY', 'US', 'UNTIL', 'THE', 'END', 'OF', 'THE', 'THIRTEENTH', 'LECTURE', 'AND', 'IS', 'THE', 'MOST', 'DIFFICULT', 'PART', 'OF', 'OUR', 'WHOLE', 'ENTERPRISE'] +8230-279154-0001-618: ref=['WHAT', 'IS', 'CALLED', 'PERCEPTION', 'DIFFERS', 'FROM', 'SENSATION', 'BY', 'THE', 'FACT', 'THAT', 'THE', 'SENSATIONAL', 'INGREDIENTS', 'BRING', 'UP', 'HABITUAL', 'ASSOCIATES', 'IMAGES', 'AND', 'EXPECTATIONS', 'OF', 'THEIR', 'USUAL', 'CORRELATES', 'ALL', 'OF', 'WHICH', 'ARE', 'SUBJECTIVELY', 'INDISTINGUISHABLE', 'FROM', 'THE', 'SENSATION'] +8230-279154-0001-618: hyp=['WHAT', 'IS', 'CALLED', 'PERCEPTION', 'DIFFERS', 'FROM', 'SENSATION', 'BY', 'THE', 'FACT', 'THAT', 'THE', 'SENSATIONAL', 'INGREDIENTS', 'BRING', 'UP', 'HABITUAL', 'ASSOCIATES', 'IMAGES', 'AND', 'EXPECTATIONS', 'OF', 'THEIR', 'USUAL', 'COROTS', 'ALL', 'OF', 'WHICH', 'ARE', 'SUBJECTIVELY', 'INDISTINGUISHABLE', 'FROM', 'THE', 'SENSATION'] +8230-279154-0002-619: ref=['WHETHER', 'OR', 'NOT', 'THIS', 'PRINCIPLE', 'IS', 'LIABLE', 'TO', 'EXCEPTIONS', 'EVERYONE', 'WOULD', 'AGREE', 'THAT', 'IS', 'HAS', 'A', 'BROAD', 'MEASURE', 'OF', 'TRUTH', 'THOUGH', 'THE', 'WORD', 'EXACTLY', 'MIGHT', 'SEEM', 'AN', 'OVERSTATEMENT', 'AND', 'IT', 'MIGHT', 'SEEM', 'MORE', 'CORRECT', 'TO', 'SAY', 'THAT', 'IDEAS', 'APPROXIMATELY', 'REPRESENT', 'IMPRESSIONS'] +8230-279154-0002-619: hyp=['WHETHER', 'OR', 'NOT', 'THIS', 'PRINCIPLE', 'IS', 'LIABLE', 'TO', 'EXCEPTIONS', 'EVERY', 'ONE', 'WOULD', 'AGREE', 'THAT', 'IT', 'HAS', 'A', 'BROAD', 'MEASURE', 'OF', 'TRUTH', 'THOUGH', 'THE', 'WORD', 'EXACTLY', 'MIGHT', 'SEEM', 'AN', 'OVERSTATEMENT', 'AND', 'IT', 'MIGHT', 'SEEM', 'MORE', 'CORRECT', 'TO', 'SAY', 'THAT', 'IDEAS', 'APPROXIMATELY', 'REPRESENT', 'IMPRESSIONS'] +8230-279154-0003-620: ref=['AND', 'WHAT', 'SORT', 'OF', 'EVIDENCE', 'IS', 'LOGICALLY', 'POSSIBLE'] +8230-279154-0003-620: hyp=['AND', 'WHAT', 'SORT', 'OF', 'EVIDENCE', 'IS', 'LOGICALLY', 'POSSIBLE'] +8230-279154-0004-621: ref=['THERE', 'IS', 'NO', 'LOGICAL', 'IMPOSSIBILITY', 'IN', 'THE', 'HYPOTHESIS', 'THAT', 'THE', 'WORLD', 'SPRANG', 'INTO', 'BEING', 'FIVE', 'MINUTES', 'AGO', 'EXACTLY', 'AS', 'IT', 'THEN', 'WAS', 'WITH', 'A', 'POPULATION', 'THAT', 'REMEMBERED', 'A', 'WHOLLY', 'UNREAL', 'PAST'] +8230-279154-0004-621: hyp=['THERE', 'IS', 'NO', 'LOGICAL', 'IMPOSSIBILITY', 'IN', 'THE', 'HYPOTHESIS', 'THAT', 'THE', 'WORLD', 'SPRANG', 'INTO', 'BEING', 'FIVE', 'MINUTES', 'AGO', 'EXACTLY', 'AS', 'IT', 'THEN', 'WAS', 'WITH', 'THE', 'POPULATION', 'THAT', 'REMEMBERED', 'A', 'WHOLLY', 'UNREAL', 'PAST'] +8230-279154-0005-622: ref=['ALL', 'THAT', 'I', 'AM', 'DOING', 'IS', 'TO', 'USE', 'ITS', 'LOGICAL', 'TENABILITY', 'AS', 'A', 'HELP', 'IN', 'THE', 'ANALYSIS', 'OF', 'WHAT', 'OCCURS', 'WHEN', 'WE', 'REMEMBER'] +8230-279154-0005-622: hyp=['ALL', 'THAT', 'I', 'AM', 'DOING', 'IS', 'TO', 'USE', 'ITS', 'LOGICAL', 'TENABILITY', 'AS', 'A', 'HELP', 'IN', 'THE', 'ANALYSIS', 'OF', 'WHAT', 'OCCURS', 'WHEN', 'WE', 'REMEMBER'] +8230-279154-0006-623: ref=['THE', 'BEHAVIOURIST', 'WHO', 'ATTEMPTS', 'TO', 'MAKE', 'PSYCHOLOGY', 'A', 'RECORD', 'OF', 'BEHAVIOUR', 'HAS', 'TO', 'TRUST', 'HIS', 'MEMORY', 'IN', 'MAKING', 'THE', 'RECORD'] +8230-279154-0006-623: hyp=['THE', 'BEHAVIORIST', 'WHO', 'ATTEMPTS', 'TO', 'MAKE', 'PSYCHOLOGY', 'A', 'RECORD', 'OF', 'BEHAVIOR', 'HAS', 'TO', 'TRUST', 'HIS', 'MEMORY', 'IN', 'MAKING', 'THE', 'RECORD'] +8230-279154-0007-624: ref=['HABIT', 'IS', 'A', 'CONCEPT', 'INVOLVING', 'THE', 'OCCURRENCE', 'OF', 'SIMILAR', 'EVENTS', 'AT', 'DIFFERENT', 'TIMES', 'IF', 'THE', 'BEHAVIOURIST', 'FEELS', 'CONFIDENT', 'THAT', 'THERE', 'IS', 'SUCH', 'A', 'PHENOMENON', 'AS', 'HABIT', 'THAT', 'CAN', 'ONLY', 'BE', 'BECAUSE', 'HE', 'TRUSTS', 'HIS', 'MEMORY', 'WHEN', 'IT', 'ASSURES', 'HIM', 'THAT', 'THERE', 'HAVE', 'BEEN', 'OTHER', 'TIMES'] +8230-279154-0007-624: hyp=['HABIT', 'IS', 'A', 'CONCEPT', 'INVOLVING', 'THE', 'OCCURRENCE', 'OF', 'SIMILAR', 'EVENTS', 'AT', 'DIFFERENT', 'TIMES', 'IF', 'THE', 'BEHAVIOURISTS', 'CONFIDENT', 'THAT', 'THERE', 'IS', 'SUCH', 'A', 'PHENOMENON', 'AS', 'HABIT', 'THAT', 'CAN', 'ONLY', 'BE', 'BECAUSE', 'HE', 'TRUSTS', 'HIS', 'MEMORY', 'WHEN', 'IT', 'ASSURES', 'HIM', 'THAT', 'THERE', 'HAVE', 'BEEN', 'OTHER', 'TIMES'] +8230-279154-0008-625: ref=['BUT', 'I', 'DO', 'NOT', 'THINK', 'SUCH', 'AN', 'INFERENCE', 'IS', 'WARRANTED'] +8230-279154-0008-625: hyp=['BUT', 'I', 'DO', 'NOT', 'THINK', 'SUCH', 'AN', 'INFERENCE', 'IS', 'WARRANTED'] +8230-279154-0009-626: ref=['OUR', 'CONFIDENCE', 'OR', 'LACK', 'OF', 'CONFIDENCE', 'IN', 'THE', 'ACCURACY', 'OF', 'A', 'MEMORY', 'IMAGE', 'MUST', 'IN', 'FUNDAMENTAL', 'CASES', 'BE', 'BASED', 'UPON', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'IMAGE', 'ITSELF', 'SINCE', 'WE', 'CANNOT', 'EVOKE', 'THE', 'PAST', 'BODILY', 'AND', 'COMPARE', 'IT', 'WITH', 'THE', 'PRESENT', 'IMAGE'] +8230-279154-0009-626: hyp=['OUR', 'CONFIDENCE', 'OR', 'LACK', 'OF', 'CONFIDENCE', 'IN', 'THE', 'ACCURACY', 'OF', 'A', 'MEMORY', 'IMAGE', 'MUST', 'IN', 'FUNDAMENTAL', 'CASES', 'BE', 'BASED', 'UPON', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'IMAGE', 'ITSELF', 'SINCE', 'WE', 'CANNOT', 'EVOKE', 'THE', 'PAST', 'BODILY', 'AND', 'COMPARE', 'IT', 'WITH', 'THE', 'PRESENT', 'IMAGE'] +8230-279154-0010-627: ref=['WE', 'SOMETIMES', 'HAVE', 'IMAGES', 'THAT', 'ARE', 'BY', 'NO', 'MEANS', 'PECULIARLY', 'VAGUE', 'WHICH', 'YET', 'WE', 'DO', 'NOT', 'TRUST', 'FOR', 'EXAMPLE', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'FATIGUE', 'WE', 'MAY', 'SEE', 'A', "FRIEND'S", 'FACE', 'VIVIDLY', 'AND', 'CLEARLY', 'BUT', 'HORRIBLY', 'DISTORTED'] +8230-279154-0010-627: hyp=['WE', 'SOMETIMES', 'HAVE', 'IMAGES', 'THAT', 'ARE', 'BY', 'NO', 'MEANS', 'PECULIARLY', 'VAGUE', 'WHICH', 'YET', 'WE', 'DO', 'NOT', 'TRUST', 'FOR', 'EXAMPLE', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'FATIGUE', 'WE', 'MAY', 'SEE', 'A', "FRIEND'S", 'FACE', 'VIVIDLY', 'AND', 'CLEARLY', 'BUT', 'HORRIBLY', 'DISTORTED'] +8230-279154-0011-628: ref=['SOME', 'IMAGES', 'LIKE', 'SOME', 'SENSATIONS', 'FEEL', 'VERY', 'FAMILIAR', 'WHILE', 'OTHERS', 'FEEL', 'STRANGE'] +8230-279154-0011-628: hyp=['SOME', 'IMAGES', 'LIKE', 'SOME', 'SENSATIONS', 'FEEL', 'VERY', 'FAMILIAR', 'WHILE', 'OTHERS', 'FEEL', 'STRANGE'] +8230-279154-0012-629: ref=['FAMILIARITY', 'IS', 'A', 'FEELING', 'CAPABLE', 'OF', 'DEGREES'] +8230-279154-0012-629: hyp=['FAMILIARITY', 'IS', 'A', 'FILLING', 'CAPABLE', 'OF', 'DEGREES'] +8230-279154-0013-630: ref=['IN', 'AN', 'IMAGE', 'OF', 'A', 'WELL', 'KNOWN', 'FACE', 'FOR', 'EXAMPLE', 'SOME', 'PARTS', 'MAY', 'FEEL', 'MORE', 'FAMILIAR', 'THAN', 'OTHERS', 'WHEN', 'THIS', 'HAPPENS', 'WE', 'HAVE', 'MORE', 'BELIEF', 'IN', 'THE', 'ACCURACY', 'OF', 'THE', 'FAMILIAR', 'PARTS', 'THAN', 'IN', 'THAT', 'OF', 'THE', 'UNFAMILIAR', 'PARTS'] +8230-279154-0013-630: hyp=['IN', 'AN', 'IMAGE', 'OF', 'A', 'WELL', 'KNOWN', 'FACE', 'FOR', 'EXAMPLE', 'SOME', 'PARTS', 'MAY', 'FEEL', 'MORE', 'FAMILIAR', 'THAN', 'OTHERS', 'WHEN', 'THIS', 'HAPPENS', 'WE', 'HAVE', 'MORE', 'BELIEF', 'IN', 'THE', 'ACCURACY', 'OF', 'THE', 'FAMILIAR', 'PARTS', 'THAN', 'IN', 'THAT', 'OF', 'THE', 'UNFAMILIAR', 'PARTS'] +8230-279154-0014-631: ref=['I', 'COME', 'NOW', 'TO', 'THE', 'OTHER', 'CHARACTERISTIC', 'WHICH', 'MEMORY', 'IMAGES', 'MUST', 'HAVE', 'IN', 'ORDER', 'TO', 'ACCOUNT', 'FOR', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0014-631: hyp=['I', 'COME', 'NOW', 'TO', 'THE', 'OTHER', 'CHARACTERISTIC', 'WHICH', 'MEMORY', 'IMAGES', 'MUST', 'HAVE', 'IN', 'ORDER', 'TO', 'ACCOUNT', 'FOR', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0015-632: ref=['THEY', 'MUST', 'HAVE', 'SOME', 'CHARACTERISTIC', 'WHICH', 'MAKES', 'US', 'REGARD', 'THEM', 'AS', 'REFERRING', 'TO', 'MORE', 'OR', 'LESS', 'REMOTE', 'PORTIONS', 'OF', 'THE', 'PAST'] +8230-279154-0015-632: hyp=['THEY', 'MUST', 'HAVE', 'SOME', 'CHARACTERISTIC', 'WHICH', 'MAKES', 'US', 'REGARD', 'THEM', 'AS', 'REFERRING', 'TO', 'MORE', 'OR', 'LESS', 'REMOTE', 'PORTIONS', 'OF', 'THE', 'PAST'] +8230-279154-0016-633: ref=['IN', 'ACTUAL', 'FACT', 'THERE', 'ARE', 'DOUBTLESS', 'VARIOUS', 'FACTORS', 'THAT', 'CONCUR', 'IN', 'GIVING', 'US', 'THE', 'FEELING', 'OF', 'GREATER', 'OR', 'LESS', 'REMOTENESS', 'IN', 'SOME', 'REMEMBERED', 'EVENT'] +8230-279154-0016-633: hyp=['IN', 'ACTUAL', 'FACT', 'THERE', 'ARE', 'DOUBTLESS', 'VARIOUS', 'FACTORS', 'THAT', 'CONCUR', 'IN', 'GIVING', 'US', 'THE', 'FEELING', 'OF', 'GREATER', 'OR', 'LESS', 'REMOTENESS', 'IN', 'SOME', 'REMEMBERED', 'EVENT'] +8230-279154-0017-634: ref=['THERE', 'MAY', 'BE', 'A', 'SPECIFIC', 'FEELING', 'WHICH', 'COULD', 'BE', 'CALLED', 'THE', 'FEELING', 'OF', 'PASTNESS', 'ESPECIALLY', 'WHERE', 'IMMEDIATE', 'MEMORY', 'IS', 'CONCERNED'] +8230-279154-0017-634: hyp=['THERE', 'MAY', 'BE', 'A', 'SPECIFIC', 'FEELING', 'WHICH', 'COULD', 'BE', 'CALLED', 'THE', 'FILLING', 'OF', 'PASTNESS', 'ESPECIALLY', 'WHERE', 'IMMEDIATE', 'MEMORY', 'IS', 'CONCERNED'] +8230-279154-0018-635: ref=['THERE', 'IS', 'OF', 'COURSE', 'A', 'DIFFERENCE', 'BETWEEN', 'KNOWING', 'THE', 'TEMPORAL', 'RELATION', 'OF', 'A', 'REMEMBERED', 'EVENT', 'TO', 'THE', 'PRESENT', 'AND', 'KNOWING', 'THE', 'TIME', 'ORDER', 'OF', 'TWO', 'REMEMBERED', 'EVENTS'] +8230-279154-0018-635: hyp=['THERE', 'IS', 'OF', 'COURSE', 'A', 'DIFFERENCE', 'BETWEEN', 'KNOWING', 'THE', 'TEMPORAL', 'RELATION', 'OF', 'A', 'REMEMBERED', 'EVENT', 'TO', 'THE', 'PRESENT', 'AND', 'KNOWING', 'THE', 'TIME', 'ORDER', 'OF', 'TWO', 'REMEMBERED', 'EVENTS'] +8230-279154-0019-636: ref=['IT', 'WOULD', 'SEEM', 'THAT', 'ONLY', 'RATHER', 'RECENT', 'EVENTS', 'CAN', 'BE', 'PLACED', 'AT', 'ALL', 'ACCURATELY', 'BY', 'MEANS', 'OF', 'FEELINGS', 'GIVING', 'THEIR', 'TEMPORAL', 'RELATION', 'TO', 'THE', 'PRESENT', 'BUT', 'IT', 'IS', 'CLEAR', 'THAT', 'SUCH', 'FEELINGS', 'MUST', 'PLAY', 'AN', 'ESSENTIAL', 'PART', 'IN', 'THE', 'PROCESS', 'OF', 'DATING', 'REMEMBERED', 'EVENTS'] +8230-279154-0019-636: hyp=['IT', 'WOULD', 'SEEM', 'THAT', 'ONLY', 'RATHER', 'RECENT', 'EVENTS', 'CAN', 'BE', 'PLACED', 'AT', 'ALL', 'ACCURATELY', 'BY', 'MEANS', 'OF', 'FEELINGS', 'GIVING', 'THEIR', 'TEMPORAL', 'RELATION', 'TO', 'THE', 'PRESENT', 'BUT', 'IT', 'IS', 'CLEAR', 'THAT', 'SUCH', 'FEELINGS', 'MUST', 'PLAY', 'AN', 'ESSENTIAL', 'PART', 'IN', 'THE', 'PROCESS', 'OF', 'DATING', 'REMEMBERED', 'EVENTS'] +8230-279154-0020-637: ref=['IF', 'WE', 'HAD', 'RETAINED', 'THE', 'SUBJECT', 'OR', 'ACT', 'IN', 'KNOWLEDGE', 'THE', 'WHOLE', 'PROBLEM', 'OF', 'MEMORY', 'WOULD', 'HAVE', 'BEEN', 'COMPARATIVELY', 'SIMPLE'] +8230-279154-0020-637: hyp=['IF', 'WE', 'HAD', 'RETAINED', 'THE', 'SUBJECT', 'OR', 'ACT', 'IN', 'KNOWLEDGE', 'THE', 'WHOLE', 'PROBLEM', 'OF', 'MEMORY', 'WOULD', 'HAVE', 'BEEN', 'COMPARATIVELY', 'SIMPLE'] +8230-279154-0021-638: ref=['REMEMBERING', 'HAS', 'TO', 'BE', 'A', 'PRESENT', 'OCCURRENCE', 'IN', 'SOME', 'WAY', 'RESEMBLING', 'OR', 'RELATED', 'TO', 'WHAT', 'IS', 'REMEMBERED'] +8230-279154-0021-638: hyp=['REMEMBERING', 'HAS', 'TO', 'BE', 'A', 'PRESENT', 'OCCURRENCE', 'IN', 'SOME', 'WAY', 'RESEMBLING', 'OR', 'RELATED', 'TO', 'WHAT', 'IS', 'REMEMBERED'] +8230-279154-0022-639: ref=['SOME', 'POINTS', 'MAY', 'BE', 'TAKEN', 'AS', 'FIXED', 'AND', 'SUCH', 'AS', 'ANY', 'THEORY', 'OF', 'MEMORY', 'MUST', 'ARRIVE', 'AT'] +8230-279154-0022-639: hyp=['SOME', 'POINTS', 'MAY', 'BE', 'TAKEN', 'AS', 'FIXED', 'AND', 'SUCH', 'AS', 'ANY', 'THEORY', 'OF', 'MEMORY', 'MUST', 'ARRIVE', 'AT'] +8230-279154-0023-640: ref=['IN', 'THIS', 'CASE', 'AS', 'IN', 'MOST', 'OTHERS', 'WHAT', 'MAY', 'BE', 'TAKEN', 'AS', 'CERTAIN', 'IN', 'ADVANCE', 'IS', 'RATHER', 'VAGUE'] +8230-279154-0023-640: hyp=['IN', 'THIS', 'CASE', 'AS', 'IN', 'MOST', 'OTHERS', 'WHAT', 'MAY', 'BE', 'TAKEN', 'AS', 'CERTAIN', 'IN', 'ADVANCE', 'IS', 'RATHER', 'VAGUE'] +8230-279154-0024-641: ref=['THE', 'FIRST', 'OF', 'OUR', 'VAGUE', 'BUT', 'INDUBITABLE', 'DATA', 'IS', 'THAT', 'THERE', 'IS', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0024-641: hyp=['THE', 'FIRST', 'OF', 'OUR', 'VAGUE', 'BUT', 'INDUBITABLE', 'DATA', 'IS', 'THAT', 'THERE', 'IS', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0025-642: ref=['WE', 'MIGHT', 'PROVISIONALLY', 'THOUGH', 'PERHAPS', 'NOT', 'QUITE', 'CORRECTLY', 'DEFINE', 'MEMORY', 'AS', 'THAT', 'WAY', 'OF', 'KNOWING', 'ABOUT', 'THE', 'PAST', 'WHICH', 'HAS', 'NO', 'ANALOGUE', 'IN', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'FUTURE', 'SUCH', 'A', 'DEFINITION', 'WOULD', 'AT', 'LEAST', 'SERVE', 'TO', 'MARK', 'THE', 'PROBLEM', 'WITH', 'WHICH', 'WE', 'ARE', 'CONCERNED', 'THOUGH', 'SOME', 'EXPECTATIONS', 'MAY', 'DESERVE', 'TO', 'RANK', 'WITH', 'MEMORY', 'AS', 'REGARDS', 'IMMEDIACY'] +8230-279154-0025-642: hyp=['WE', 'MIGHT', 'PROVISIONALLY', 'THOUGH', 'PERHAPS', 'NOT', 'QUITE', 'CORRECTLY', 'DEFINE', 'MEMORY', 'AS', 'THAT', 'WAY', 'OF', 'KNOWING', 'ABOUT', 'THE', 'PAST', 'WHICH', 'HAS', 'NO', 'ANALOGUE', 'IN', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'FUTURE', 'SUCH', 'A', 'DEFINITION', 'WOULD', 'AT', 'LEAST', 'SERVE', 'TO', 'MARK', 'THE', 'PROBLEM', 'WITH', 'WHICH', 'WE', 'ARE', 'CONCERNED', 'THOUGH', 'SOME', 'EXPECTATIONS', 'MAY', 'DESERVE', 'TO', 'RANK', 'WITH', 'MEMORY', 'AS', 'REGARDS', 'IMMEDIACY'] +8230-279154-0026-643: ref=['THIS', 'DISTINCTION', 'IS', 'VITAL', 'TO', 'THE', 'UNDERSTANDING', 'OF', 'MEMORY', 'BUT', 'IT', 'IS', 'NOT', 'SO', 'EASY', 'TO', 'CARRY', 'OUT', 'IN', 'PRACTICE', 'AS', 'IT', 'IS', 'TO', 'DRAW', 'IN', 'THEORY'] +8230-279154-0026-643: hyp=['THIS', 'DISTINCTION', 'IS', 'VITAL', 'TO', 'THE', 'UNDERSTANDING', 'OF', 'MEMORY', 'BUT', 'IT', 'IS', 'NOT', 'SO', 'EASY', 'TO', 'CARRY', 'OUT', 'IN', 'PRACTICE', 'AS', 'IT', 'IS', 'TO', 'DRAW', 'IN', 'THEORY'] +8230-279154-0027-644: ref=['A', 'GRAMOPHONE', 'BY', 'THE', 'HELP', 'OF', 'SUITABLE', 'RECORDS', 'MIGHT', 'RELATE', 'TO', 'US', 'THE', 'INCIDENTS', 'OF', 'ITS', 'PAST', 'AND', 'PEOPLE', 'ARE', 'NOT', 'SO', 'DIFFERENT', 'FROM', 'GRAMOPHONES', 'AS', 'THEY', 'LIKE', 'TO', 'BELIEVE'] +8230-279154-0027-644: hyp=['A', 'GRAMMAPHONE', 'BY', 'THE', 'HELP', 'OF', 'SUITABLE', 'RECORDS', 'MIGHT', 'RELATE', 'TO', 'US', 'THE', 'INCIDENTS', 'OF', 'ITS', 'PAST', 'AND', 'PEOPLE', 'ARE', 'NOT', 'SO', 'DIFFERENT', 'FROM', 'GRAMIPHONES', 'AS', 'THEY', 'LIKE', 'TO', 'BELIEVE'] +8230-279154-0028-645: ref=['I', 'CAN', 'SET', 'TO', 'WORK', 'NOW', 'TO', 'REMEMBER', 'THINGS', 'I', 'NEVER', 'REMEMBERED', 'BEFORE', 'SUCH', 'AS', 'WHAT', 'I', 'HAD', 'TO', 'EAT', 'FOR', 'BREAKFAST', 'THIS', 'MORNING', 'AND', 'IT', 'CAN', 'HARDLY', 'BE', 'WHOLLY', 'HABIT', 'THAT', 'ENABLES', 'ME', 'TO', 'DO', 'THIS'] +8230-279154-0028-645: hyp=['I', 'CAN', 'SET', 'TO', 'WORK', 'NOW', 'TO', 'REMEMBER', 'THINGS', 'I', 'NEVER', 'REMEMBERED', 'BEFORE', 'SUCH', 'AS', 'WHAT', 'I', 'HAD', 'TO', 'EAT', 'FOR', 'BREAKFAST', 'THIS', 'MORNING', 'AND', 'IT', 'CAN', 'HARDLY', 'BE', 'WHOLLY', 'HABIT', 'THAT', 'ENABLES', 'ME', 'TO', 'DO', 'THIS'] +8230-279154-0029-646: ref=['THE', 'FACT', 'THAT', 'A', 'MAN', 'CAN', 'RECITE', 'A', 'POEM', 'DOES', 'NOT', 'SHOW', 'THAT', 'HE', 'REMEMBERS', 'ANY', 'PREVIOUS', 'OCCASION', 'ON', 'WHICH', 'HE', 'HAS', 'RECITED', 'OR', 'READ', 'IT'] +8230-279154-0029-646: hyp=['THE', 'FACT', 'THAT', 'A', 'MAN', 'CAN', 'RECITE', 'A', 'POEM', 'DOES', 'NOT', 'SHOW', 'THAT', 'HE', 'REMEMBERS', 'ANY', 'PREVIOUS', 'OCCASION', 'ON', 'WHICH', 'HE', 'HAS', 'RECITED', 'OR', 'READ', 'IT'] +8230-279154-0030-647: ref=["SEMON'S", 'TWO', 'BOOKS', 'MENTIONED', 'IN', 'AN', 'EARLIER', 'LECTURE', 'DO', 'NOT', 'TOUCH', 'KNOWLEDGE', 'MEMORY', 'AT', 'ALL', 'CLOSELY'] +8230-279154-0030-647: hyp=['SIMMONS', 'TWO', 'BOOKS', 'MENTIONED', 'IN', 'AN', 'EARLIER', 'LECTURE', 'DO', 'NOT', 'TOUCH', 'KNOWLEDGE', 'MEMORY', 'AT', 'ALL', 'CLOSELY'] +8230-279154-0031-648: ref=['THEY', 'GIVE', 'LAWS', 'ACCORDING', 'TO', 'WHICH', 'IMAGES', 'OF', 'PAST', 'OCCURRENCES', 'COME', 'INTO', 'OUR', 'MINDS', 'BUT', 'DO', 'NOT', 'DISCUSS', 'OUR', 'BELIEF', 'THAT', 'THESE', 'IMAGES', 'REFER', 'TO', 'PAST', 'OCCURRENCES', 'WHICH', 'IS', 'WHAT', 'CONSTITUTES', 'KNOWLEDGE', 'MEMORY'] +8230-279154-0031-648: hyp=['THEY', 'GIVE', 'LAWS', 'ACCORDING', 'TO', 'WHICH', 'IMAGES', 'OF', 'PAST', 'OCCURRENCES', 'COME', 'INTO', 'OUR', 'MINDS', 'BUT', 'DO', 'NOT', 'DISCUSS', 'OUR', 'BELIEF', 'THAT', 'THESE', 'IMAGES', 'REFER', 'TO', 'PAST', 'OCCURRENCES', 'WHICH', 'IS', 'WHAT', 'CONSTITUTES', 'KNOWLEDGE', 'MEMORY'] +8230-279154-0032-649: ref=['IT', 'IS', 'THIS', 'THAT', 'IS', 'OF', 'INTEREST', 'TO', 'THEORY', 'OF', 'KNOWLEDGE'] +8230-279154-0032-649: hyp=['IT', 'IS', 'THIS', 'THAT', 'IS', 'OF', 'INTEREST', 'TO', 'THEORY', 'OF', 'KNOWLEDGE'] +8230-279154-0033-650: ref=['IT', 'IS', 'BY', 'NO', 'MEANS', 'ALWAYS', 'RELIABLE', 'ALMOST', 'EVERYBODY', 'HAS', 'AT', 'SOME', 'TIME', 'EXPERIENCED', 'THE', 'WELL', 'KNOWN', 'ILLUSION', 'THAT', 'ALL', 'THAT', 'IS', 'HAPPENING', 'NOW', 'HAPPENED', 'BEFORE', 'AT', 'SOME', 'TIME'] +8230-279154-0033-650: hyp=['IT', 'IS', 'BY', 'NO', 'MEANS', 'ALWAYS', 'RELIABLE', 'ALMOST', 'EVERYBODY', 'HAS', 'AT', 'SOME', 'TIME', 'EXPERIENCED', 'THE', 'WELL', 'KNOWN', 'ILLUSION', 'THAT', 'ALL', 'THAT', 'IS', 'HAPPENING', 'NOW', 'HAPPENED', 'BEFORE', 'AT', 'SOME', 'TIME'] +8230-279154-0034-651: ref=['WHENEVER', 'THE', 'SENSE', 'OF', 'FAMILIARITY', 'OCCURS', 'WITHOUT', 'A', 'DEFINITE', 'OBJECT', 'IT', 'LEADS', 'US', 'TO', 'SEARCH', 'THE', 'ENVIRONMENT', 'UNTIL', 'WE', 'ARE', 'SATISFIED', 'THAT', 'WE', 'HAVE', 'FOUND', 'THE', 'APPROPRIATE', 'OBJECT', 'WHICH', 'LEADS', 'US', 'TO', 'THE', 'JUDGMENT', 'THIS', 'IS', 'FAMILIAR'] +8230-279154-0034-651: hyp=['WHENEVER', 'THE', 'SENSE', 'OF', 'FAMILIARITY', 'OCCURS', 'WITHOUT', 'A', 'DEFINITE', 'OBJECT', 'IT', 'LEAVES', 'US', 'TO', 'SEARCH', 'THE', 'ENVIRONMENT', 'UNTIL', 'WE', 'ARE', 'SATISFIED', 'THAT', 'WE', 'HAVE', 'FOUND', 'THE', 'APPROPRIATE', 'OBJECT', 'WHICH', 'LEADS', 'US', 'TO', 'THE', 'JUDGMENT', 'THIS', 'IS', 'FAMILIAR'] +8230-279154-0035-652: ref=['THUS', 'NO', 'KNOWLEDGE', 'AS', 'TO', 'THE', 'PAST', 'IS', 'TO', 'BE', 'DERIVED', 'FROM', 'THE', 'FEELING', 'OF', 'FAMILIARITY', 'ALONE'] +8230-279154-0035-652: hyp=['THUS', 'NO', 'KNOWLEDGE', 'AS', 'TO', 'THE', 'PAST', 'IS', 'TO', 'BE', 'DERIVED', 'FROM', 'THE', 'FEELING', 'OF', 'FAMILIARITY', 'ALONE'] +8230-279154-0036-653: ref=['A', 'FURTHER', 'STAGE', 'IS', 'RECOGNITION'] +8230-279154-0036-653: hyp=['A', 'FURTHER', 'STAGE', 'IS', 'RECOGNITION'] +8230-279154-0037-654: ref=['RECOGNITION', 'IN', 'THIS', 'SENSE', 'DOES', 'NOT', 'NECESSARILY', 'INVOLVE', 'MORE', 'THAN', 'A', 'HABIT', 'OF', 'ASSOCIATION', 'THE', 'KIND', 'OF', 'OBJECT', 'WE', 'ARE', 'SEEING', 'AT', 'THE', 'MOMENT', 'IS', 'ASSOCIATED', 'WITH', 'THE', 'WORD', 'CAT', 'OR', 'WITH', 'AN', 'AUDITORY', 'IMAGE', 'OF', 'PURRING', 'OR', 'WHATEVER', 'OTHER', 'CHARACTERISTIC', 'WE', 'MAY', 'HAPPEN', 'TO', 'RECOGNIZE', 'IN', 'THE', 'CAT', 'OF', 'THE', 'MOMENT'] +8230-279154-0037-654: hyp=['RECOGNITION', 'IN', 'THIS', 'SENSE', 'DOES', 'NOT', 'NECESSARILY', 'INVOLVE', 'MORE', 'THAN', 'A', 'HABIT', 'OF', 'ASSOCIATION', 'THE', 'KIND', 'OF', 'OBJECT', 'WE', 'ARE', 'SEEING', 'AT', 'THE', 'MOMENT', 'IS', 'ASSOCIATED', 'WITH', 'THE', 'WORD', 'CAT', 'OR', 'WITH', 'AN', 'AUDITORY', 'IMAGE', 'OF', 'PURRING', 'OR', 'WHATEVER', 'OTHER', 'CHARACTERISTIC', 'WE', 'MAY', 'HAPPEN', 'TO', 'RECOGNIZE', 'IN', 'THE', 'CAT', 'OF', 'THE', 'MOMENT'] +8230-279154-0038-655: ref=['WE', 'ARE', 'OF', 'COURSE', 'IN', 'FACT', 'ABLE', 'TO', 'JUDGE', 'WHEN', 'WE', 'RECOGNIZE', 'AN', 'OBJECT', 'THAT', 'WE', 'HAVE', 'SEEN', 'IT', 'BEFORE', 'BUT', 'THIS', 'JUDGMENT', 'IS', 'SOMETHING', 'OVER', 'AND', 'ABOVE', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'AND', 'MAY', 'VERY', 'PROBABLY', 'BE', 'IMPOSSIBLE', 'TO', 'ANIMALS', 'THAT', 'NEVERTHELESS', 'HAVE', 'THE', 'EXPERIENCE', 'OF', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'OF', 'THE', 'WORD'] +8230-279154-0038-655: hyp=['WE', 'ARE', 'OF', 'COURSE', 'IN', 'FACT', 'ABLE', 'TO', 'JUDGE', 'WHEN', 'WE', 'RECOGNIZE', 'AN', 'OBJECT', 'THAT', 'WE', 'HAVE', 'SEEN', 'IT', 'BEFORE', 'BUT', 'THIS', 'JUDGMENT', 'IS', 'SOMETHING', 'OVER', 'AND', 'ABOVE', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'AND', 'MAY', 'VERY', 'PROBABLY', 'BE', 'IMPOSSIBLE', 'TO', 'ANIMALS', 'THAT', 'NEVERTHELESS', 'HAVE', 'THE', 'EXPERIENCE', 'OF', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'OF', 'THE', 'WORD'] +8230-279154-0039-656: ref=['THIS', 'KNOWLEDGE', 'IS', 'MEMORY', 'IN', 'ONE', 'SENSE', 'THOUGH', 'IN', 'ANOTHER', 'IT', 'IS', 'NOT'] +8230-279154-0039-656: hyp=['THIS', 'KNOWLEDGE', 'IS', 'MEMORY', 'IN', 'ONE', 'SENSE', 'THOUGH', 'IN', 'ANOTHER', 'IT', 'IS', 'NOT'] +8230-279154-0040-657: ref=['THERE', 'ARE', 'HOWEVER', 'SEVERAL', 'POINTS', 'IN', 'WHICH', 'SUCH', 'AN', 'ACCOUNT', 'OF', 'RECOGNITION', 'IS', 'INADEQUATE', 'TO', 'BEGIN', 'WITH', 'IT', 'MIGHT', 'SEEM', 'AT', 'FIRST', 'SIGHT', 'MORE', 'CORRECT', 'TO', 'DEFINE', 'RECOGNITION', 'AS', 'I', 'HAVE', 'SEEN', 'THIS', 'BEFORE', 'THAN', 'AS', 'THIS', 'HAS', 'EXISTED', 'BEFORE'] +8230-279154-0040-657: hyp=['THERE', 'ARE', 'HOWEVER', 'SEVERAL', 'POINTS', 'IN', 'WHICH', 'SUCH', 'AN', 'ACCOUNT', 'OF', 'RECOGNITION', 'IS', 'INADEQUATE', 'TO', 'BEGIN', 'WITH', 'IT', 'MIGHT', 'SEEM', 'AT', 'FIRST', 'SIGHT', 'MORE', 'CORRECT', 'TO', 'DEFINE', 'RECOGNITION', 'AS', 'I', 'HAVE', 'SEEN', 'THIS', 'BEFORE', 'THAN', 'AS', 'THIS', 'HAS', 'EXISTED', 'BEFORE'] +8230-279154-0041-658: ref=['THE', 'DEFINITION', 'OF', 'MY', 'EXPERIENCE', 'IS', 'DIFFICULT', 'BROADLY', 'SPEAKING', 'IT', 'IS', 'EVERYTHING', 'THAT', 'IS', 'CONNECTED', 'WITH', 'WHAT', 'I', 'AM', 'EXPERIENCING', 'NOW', 'BY', 'CERTAIN', 'LINKS', 'OF', 'WHICH', 'THE', 'VARIOUS', 'FORMS', 'OF', 'MEMORY', 'ARE', 'AMONG', 'THE', 'MOST', 'IMPORTANT'] +8230-279154-0041-658: hyp=['THE', 'DEFINITION', 'OF', 'MY', 'EXPERIENCE', 'IS', 'DIFFICULT', 'BROADLY', 'SPEAKING', 'IT', 'IS', 'EVERYTHING', 'THAT', 'IS', 'CONNECTED', 'WITH', 'WHAT', 'I', 'AM', 'EXPERIENCING', 'NOW', 'BY', 'CERTAIN', 'LINKS', 'OF', 'WHICH', 'THE', 'VARIOUS', 'FORMS', 'OF', 'MEMORY', 'ARE', 'AMONG', 'THE', 'MOST', 'IMPORTANT'] +8230-279154-0042-659: ref=['THUS', 'IF', 'I', 'RECOGNIZE', 'A', 'THING', 'THE', 'OCCASION', 'OF', 'ITS', 'PREVIOUS', 'EXISTENCE', 'IN', 'VIRTUE', 'OF', 'WHICH', 'I', 'RECOGNIZE', 'IT', 'FORMS', 'PART', 'OF', 'MY', 'EXPERIENCE', 'BY', 'DEFINITION', 'RECOGNITION', 'WILL', 'BE', 'ONE', 'OF', 'THE', 'MARKS', 'BY', 'WHICH', 'MY', 'EXPERIENCE', 'IS', 'SINGLED', 'OUT', 'FROM', 'THE', 'REST', 'OF', 'THE', 'WORLD'] +8230-279154-0042-659: hyp=['THUS', 'IF', 'I', 'RECOGNIZE', 'A', 'THING', 'THE', 'OCCASION', 'OF', 'ITS', 'PREVIOUS', 'EXISTENCE', 'IN', 'VIRTUE', 'OF', 'WHICH', 'I', 'RECOGNIZE', 'IT', 'FORMS', 'PART', 'OF', 'MY', 'EXPERIENCE', 'BY', 'DEFINITION', 'RECOGNITION', 'WILL', 'BE', 'ONE', 'OF', 'THE', 'MARKS', 'BY', 'WHICH', 'MY', 'EXPERIENCE', 'IS', 'SINGLED', 'OUT', 'FROM', 'THE', 'REST', 'OF', 'THE', 'WORLD'] +8230-279154-0043-660: ref=['OF', 'COURSE', 'THE', 'WORDS', 'THIS', 'HAS', 'EXISTED', 'BEFORE', 'ARE', 'A', 'VERY', 'INADEQUATE', 'TRANSLATION', 'OF', 'WHAT', 'ACTUALLY', 'HAPPENS', 'WHEN', 'WE', 'FORM', 'A', 'JUDGMENT', 'OF', 'RECOGNITION', 'BUT', 'THAT', 'IS', 'UNAVOIDABLE', 'WORDS', 'ARE', 'FRAMED', 'TO', 'EXPRESS', 'A', 'LEVEL', 'OF', 'THOUGHT', 'WHICH', 'IS', 'BY', 'NO', 'MEANS', 'PRIMITIVE', 'AND', 'ARE', 'QUITE', 'INCAPABLE', 'OF', 'EXPRESSING', 'SUCH', 'AN', 'ELEMENTARY', 'OCCURRENCE', 'AS', 'RECOGNITION'] +8230-279154-0043-660: hyp=['OF', 'COURSE', 'THE', 'WORDS', 'THIS', 'HAS', 'EXISTED', 'BEFORE', 'ARE', 'VERY', 'INADEQUATE', 'TRANSLATION', 'OF', 'WHAT', 'ACTUALLY', 'HAPPENS', 'WHEN', 'WE', 'FORM', 'A', 'JUDGMENT', 'OF', 'RECOGNITION', 'BUT', 'THAT', 'IS', 'UNAVOIDABLE', 'WORDS', 'ARE', 'FRAMED', 'TO', 'EXPRESS', 'A', 'LEVEL', 'OF', 'THOUGHT', 'WHICH', 'IS', 'BY', 'NO', 'MEANS', 'PRIMITIVE', 'AND', 'ARE', 'QUITE', 'INCAPABLE', 'OF', 'EXPRESSING', 'SUCH', 'AN', 'ELEMENTARY', 'OCCURRENCE', 'AS', 'RECOGNITION'] +8455-210777-0000-972: ref=['I', 'REMAINED', 'THERE', 'ALONE', 'FOR', 'MANY', 'HOURS', 'BUT', 'I', 'MUST', 'ACKNOWLEDGE', 'THAT', 'BEFORE', 'I', 'LEFT', 'THE', 'CHAMBERS', 'I', 'HAD', 'GRADUALLY', 'BROUGHT', 'MYSELF', 'TO', 'LOOK', 'AT', 'THE', 'MATTER', 'IN', 'ANOTHER', 'LIGHT'] +8455-210777-0000-972: hyp=['I', 'REMAIN', 'THERE', 'ALONE', 'FOR', 'MANY', 'HOURS', 'BUT', 'I', 'MUST', 'ACKNOWLEDGE', 'THAT', 'BEFORE', 'I', 'LEFT', 'THE', 'CHAMBERS', 'I', 'HAD', 'GRADUALLY', 'BROUGHT', 'MYSELF', 'TO', 'LOOK', 'AT', 'THE', 'MATTER', 'IN', 'ANOTHER', 'LIGHT'] +8455-210777-0001-973: ref=['HAD', 'EVA', 'CRASWELLER', 'NOT', 'BEEN', 'GOOD', 'LOOKING', 'HAD', 'JACK', 'BEEN', 'STILL', 'AT', 'COLLEGE', 'HAD', 'SIR', 'KENNINGTON', 'OVAL', 'REMAINED', 'IN', 'ENGLAND', 'HAD', 'MISTER', 'BUNNIT', 'AND', 'THE', 'BAR', 'KEEPER', 'NOT', 'SUCCEEDED', 'IN', 'STOPPING', 'MY', 'CARRIAGE', 'ON', 'THE', 'HILL', 'SHOULD', 'I', 'HAVE', 'SUCCEEDED', 'IN', 'ARRANGING', 'FOR', 'THE', 'FINAL', 'DEPARTURE', 'OF', 'MY', 'OLD', 'FRIEND'] +8455-210777-0001-973: hyp=['HAD', 'EITHER', 'CRUSWELLER', 'NOT', 'BEEN', 'GOOD', 'LOOKING', 'HAD', 'JACK', 'BEEN', 'STILL', 'AT', 'COLLEGE', 'HAD', 'SIR', 'KENNINGTON', 'OVAL', 'REMAINED', 'IN', 'ENGLAND', 'HAD', 'MISTER', 'BUNNITT', 'IN', 'THE', 'BAR', 'KEEPER', 'NOT', 'SUCCEEDED', 'IN', 'STOPPING', 'MY', 'CARRIAGE', 'ON', 'THE', 'HILL', 'SHOULD', 'I', 'HAVE', 'SUCCEEDED', 'IN', 'ARRANGING', 'FOR', 'THE', 'FINAL', 'DEPARTURE', 'OF', 'MY', 'OLD', 'FRIEND'] +8455-210777-0002-974: ref=['ON', 'ARRIVING', 'AT', 'HOME', 'AT', 'MY', 'OWN', 'RESIDENCE', 'I', 'FOUND', 'THAT', 'OUR', 'SALON', 'WAS', 'FILLED', 'WITH', 'A', 'BRILLIANT', 'COMPANY'] +8455-210777-0002-974: hyp=['ON', 'ARRIVING', 'AT', 'HOME', 'AT', 'MY', 'OWN', 'RESIDENCE', 'I', 'FOUND', 'THAT', 'OUR', 'SALON', 'WAS', 'FILLED', 'WITH', 'A', 'BRILLIANT', 'COMPANY'] +8455-210777-0003-975: ref=['AS', 'I', 'SPOKE', 'I', 'MADE', 'HIM', 'A', 'GRACIOUS', 'BOW', 'AND', 'I', 'THINK', 'I', 'SHOWED', 'HIM', 'BY', 'MY', 'MODE', 'OF', 'ADDRESS', 'THAT', 'I', 'DID', 'NOT', 'BEAR', 'ANY', 'GRUDGE', 'AS', 'TO', 'MY', 'INDIVIDUAL', 'SELF'] +8455-210777-0003-975: hyp=['AS', 'I', 'SPOKE', 'I', 'MADE', 'HIM', 'A', 'GRACIOUS', 'BOW', 'AND', 'I', 'THINK', 'I', 'SHOWED', 'HIM', 'BY', 'MY', 'MODE', 'OF', 'ADDRESS', 'THAT', 'I', 'DID', 'NOT', 'BEAR', 'ANY', 'GRUDGE', 'AS', 'TO', 'MY', 'INDIVIDUAL', 'SELF'] +8455-210777-0004-976: ref=['I', 'HAVE', 'COME', 'TO', 'YOUR', 'SHORES', 'MISTER', 'PRESIDENT', 'WITH', 'THE', 'PURPOSE', 'OF', 'SEEING', 'HOW', 'THINGS', 'ARE', 'PROGRESSING', 'IN', 'THIS', 'DISTANT', 'QUARTER', 'OF', 'THE', 'WORLD'] +8455-210777-0004-976: hyp=['I', 'HAVE', 'COME', 'TO', 'YOUR', 'SHORES', 'MISTER', 'PRESIDENT', 'WITH', 'THE', 'PURPOSE', 'OF', 'SEEING', 'HOW', 'THINGS', 'ARE', 'PROGRESSING', 'IN', 'THIS', 'DISTANT', 'QUARTER', 'OF', 'THE', 'WORLD'] +8455-210777-0005-977: ref=['WE', 'HAVE', 'OUR', 'LITTLE', 'STRUGGLES', 'HERE', 'AS', 'ELSEWHERE', 'AND', 'ALL', 'THINGS', 'CANNOT', 'BE', 'DONE', 'BY', 'ROSE', 'WATER'] +8455-210777-0005-977: hyp=['WE', 'HAVE', 'OUR', 'LITTLE', 'STRUGGLES', 'HERE', 'AS', 'ELSEWHERE', 'AND', 'ALL', 'THINGS', 'CANNOT', 'BE', 'DONE', 'BY', 'ROSE', 'WATER'] +8455-210777-0006-978: ref=['WE', 'ARE', 'QUITE', 'SATISFIED', 'NOW', 'CAPTAIN', 'BATTLEAX', 'SAID', 'MY', 'WIFE'] +8455-210777-0006-978: hyp=['WE', 'ARE', 'QUITE', 'SATISFIED', 'NOW', 'CAPTAIN', 'BATTLE', 'AX', 'SAID', 'MY', 'WIFE'] +8455-210777-0007-979: ref=['QUITE', 'SATISFIED', 'SAID', 'EVA'] +8455-210777-0007-979: hyp=['QUITE', 'SATISFIED', 'SAID', 'EVA'] +8455-210777-0008-980: ref=['THE', 'LADIES', 'IN', 'COMPLIANCE', 'WITH', 'THAT', 'SOFTNESS', 'OF', 'HEART', 'WHICH', 'IS', 'THEIR', 'CHARACTERISTIC', 'ARE', 'ON', 'ONE', 'SIDE', 'AND', 'THE', 'MEN', 'BY', 'WHOM', 'THE', 'WORLD', 'HAS', 'TO', 'BE', 'MANAGED', 'ARE', 'ON', 'THE', 'OTHER'] +8455-210777-0008-980: hyp=['THE', 'LADIES', 'IN', 'COMPLIANCE', 'WITH', 'THAT', 'SOFTNESS', 'OF', 'HEART', 'WHICH', 'IS', 'THEIR', 'CHARACTERISTIC', 'ARE', 'ON', 'ONE', 'SIDE', 'AND', 'THE', 'MEN', 'BY', 'WHOM', 'THE', 'WORLD', 'HAS', 'TO', 'BE', 'MANAGED', 'ARE', 'ON', 'THE', 'OTHER'] +8455-210777-0009-981: ref=['NO', 'DOUBT', 'IN', 'PROCESS', 'OF', 'TIME', 'THE', 'LADIES', 'WILL', 'FOLLOW'] +8455-210777-0009-981: hyp=['NO', 'DOUBT', 'IN', 'PROCESS', 'OF', 'TIME', 'THE', 'LADIES', 'WILL', 'FOLLOW'] +8455-210777-0010-982: ref=['THEIR', 'MASTERS', 'SAID', 'MISSUS', 'NEVERBEND'] +8455-210777-0010-982: hyp=['THEIR', 'MASTER', 'SAID', 'MISSUS', 'NEVERBAND'] +8455-210777-0011-983: ref=['I', 'DID', 'NOT', 'MEAN', 'SAID', 'CAPTAIN', 'BATTLEAX', 'TO', 'TOUCH', 'UPON', 'PUBLIC', 'SUBJECTS', 'AT', 'SUCH', 'A', 'MOMENT', 'AS', 'THIS'] +8455-210777-0011-983: hyp=['I', 'DID', 'NOT', 'MEAN', 'SAID', 'CAPTAIN', 'BATTLE', 'AXE', 'TO', 'TOUCH', 'UPON', 'PUBLIC', 'SUBJECTS', 'AT', 'SUCH', 'A', 'MOMENT', 'AS', 'THIS'] +8455-210777-0012-984: ref=['MISSUS', 'NEVERBEND', 'YOU', 'MUST', 'INDEED', 'BE', 'PROUD', 'OF', 'YOUR', 'SON'] +8455-210777-0012-984: hyp=['MISSUS', 'NEVERBEND', 'YOU', 'MUST', 'INDEED', 'BE', 'PROUD', 'OF', 'YOUR', 'SON'] +8455-210777-0013-985: ref=['JACK', 'HAD', 'BEEN', 'STANDING', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'ROOM', 'TALKING', 'TO', 'EVA', 'AND', 'WAS', 'NOW', 'REDUCED', 'TO', 'SILENCE', 'BY', 'HIS', 'PRAISES'] +8455-210777-0013-985: hyp=['JACK', 'HAD', 'BEEN', 'STANDING', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'ROOM', 'TALKING', 'TO', 'EVA', 'AND', 'WAS', 'NOW', 'REDUCED', 'TO', 'SILENCE', 'BY', 'HIS', 'PRAISES'] +8455-210777-0014-986: ref=['SIR', 'KENNINGTON', 'OVAL', 'IS', 'A', 'VERY', 'FINE', 'PLAYER', 'SAID', 'MY', 'WIFE'] +8455-210777-0014-986: hyp=['SIR', 'KENNINGTON', 'OVAL', 'IS', 'A', 'VERY', 'FINE', 'PLAYER', 'SAID', 'MY', 'WIFE'] +8455-210777-0015-987: ref=['I', 'AND', 'MY', 'WIFE', 'AND', 'SON', 'AND', 'THE', 'TWO', 'CRASWELLERS', 'AND', 'THREE', 'OR', 'FOUR', 'OTHERS', 'AGREED', 'TO', 'DINE', 'ON', 'BOARD', 'THE', 'SHIP', 'ON', 'THE', 'NEXT'] +8455-210777-0015-987: hyp=['I', 'AM', 'MY', 'WIFE', 'AND', 'SON', 'AND', 'THE', 'TWO', 'CRESTWELLERS', 'AND', 'THREE', 'OR', 'FOUR', 'OTHERS', 'AGREED', 'TO', 'DINE', 'ON', 'BOARD', 'THE', 'SHIP', 'ON', 'THE', 'NEXT'] +8455-210777-0016-988: ref=['THIS', 'I', 'FELT', 'WAS', 'PAID', 'TO', 'ME', 'AS', 'BEING', 'PRESIDENT', 'OF', 'THE', 'REPUBLIC', 'AND', 'I', 'ENDEAVOURED', 'TO', 'BEHAVE', 'MYSELF', 'WITH', 'SUCH', 'MINGLED', 'HUMILITY', 'AND', 'DIGNITY', 'AS', 'MIGHT', 'BEFIT', 'THE', 'OCCASION', 'BUT', 'I', 'COULD', 'NOT', 'BUT', 'FEEL', 'THAT', 'SOMETHING', 'WAS', 'WANTING', 'TO', 'THE', 'SIMPLICITY', 'OF', 'MY', 'ORDINARY', 'LIFE'] +8455-210777-0016-988: hyp=['THIS', 'I', 'FELT', 'WAS', 'PAID', 'TO', 'ME', 'AS', 'BEING', 'PRESIDENT', 'OF', 'THE', 'REPUBLIC', 'AND', 'I', 'ENDEAVORED', 'TO', 'BEHAVE', 'MYSELF', 'WITH', 'SUCH', 'MINGLED', 'HUMILITY', 'AND', 'DIGNITY', 'AS', 'MIGHT', 'BE', 'FIT', 'THE', 'OCCASION', 'BUT', 'I', 'COULD', 'NOT', 'BUT', 'FEEL', 'THAT', 'SOMETHING', 'WAS', 'WANTING', 'TO', 'THE', 'SIMPLICITY', 'OF', 'MY', 'ORDINARY', 'LIFE'] +8455-210777-0017-989: ref=['MY', 'WIFE', 'ON', 'THE', 'SPUR', 'OF', 'THE', 'MOMENT', 'MANAGED', 'TO', 'GIVE', 'THE', 'GENTLEMEN', 'A', 'VERY', 'GOOD', 'DINNER'] +8455-210777-0017-989: hyp=['MY', 'WIFE', 'ON', 'THE', 'SPUR', 'OF', 'THE', 'MOMENT', 'MANAGED', 'TO', 'GIVE', 'THE', 'GENTLEMAN', 'A', 'VERY', 'GOOD', 'DINNER'] +8455-210777-0018-990: ref=['THIS', 'SHE', 'SAID', 'WAS', 'TRUE', 'HOSPITALITY', 'AND', 'I', 'AM', 'NOT', 'SURE', 'THAT', 'I', 'DID', 'NOT', 'AGREE', 'WITH', 'HER'] +8455-210777-0018-990: hyp=['THIS', 'SHE', 'SAID', 'WAS', 'TRUE', 'HOSPITALITY', 'AND', 'I', 'AM', 'NOT', 'SURE', 'THAT', 'I', 'DID', 'NOT', 'AGREE', 'WITH', 'THAT'] +8455-210777-0019-991: ref=['THEN', 'THERE', 'WERE', 'THREE', 'OR', 'FOUR', 'LEADING', 'MEN', 'OF', 'THE', 'COMMUNITY', 'WITH', 'THEIR', 'WIVES', 'WHO', 'WERE', 'FOR', 'THE', 'MOST', 'PART', 'THE', 'FATHERS', 'AND', 'MOTHERS', 'OF', 'THE', 'YOUNG', 'LADIES'] +8455-210777-0019-991: hyp=['THEN', 'THERE', 'WERE', 'THREE', 'OR', 'FOUR', 'LEADING', 'MEN', 'OF', 'THE', 'COMMUNITY', 'WITH', 'THEIR', 'WIVES', 'WHO', 'WERE', 'FOR', 'THE', 'MOST', 'PART', 'THE', 'FATHERS', 'AND', 'MOTHERS', 'OF', 'THE', 'YOUNG', 'LADIES'] +8455-210777-0020-992: ref=['OH', 'YES', 'SAID', 'JACK', 'AND', "I'M", 'NOWHERE'] +8455-210777-0020-992: hyp=['OH', 'YES', 'SAID', 'JACK', 'AND', "I'M", 'NOWHERE'] +8455-210777-0021-993: ref=['BUT', 'I', 'MEAN', 'TO', 'HAVE', 'MY', 'INNINGS', 'BEFORE', 'LONG'] +8455-210777-0021-993: hyp=['BUT', 'I', 'MEAN', 'TO', 'HAVE', 'MY', 'INNINGS', 'BEFORE', 'LONG'] +8455-210777-0022-994: ref=['OF', 'WHAT', 'MISSUS', 'NEVERBEND', 'HAD', 'GONE', 'THROUGH', 'IN', 'PROVIDING', 'BIRDS', 'BEASTS', 'AND', 'FISHES', 'NOT', 'TO', 'TALK', 'OF', 'TARTS', 'AND', 'JELLIES', 'FOR', 'THE', 'DINNER', 'OF', 'THAT', 'DAY', 'NO', 'ONE', 'BUT', 'MYSELF', 'CAN', 'HAVE', 'ANY', 'IDEA', 'BUT', 'IT', 'MUST', 'BE', 'ADMITTED', 'THAT', 'SHE', 'ACCOMPLISHED', 'HER', 'TASK', 'WITH', 'THOROUGH', 'SUCCESS'] +8455-210777-0022-994: hyp=['OF', 'WHAT', 'MISSUS', 'NEVERS', 'BEASTS', 'AND', 'FISHES', 'NOT', 'TO', 'TALK', 'OF', 'TARTS', 'AND', 'JELLIES', 'FOR', 'THE', 'DINNER', 'OF', 'THAT', 'DAY', 'NO', 'ONE', 'BUT', 'MYSELF', 'CAN', 'HAVE', 'ANY', 'IDEA', 'BUT', 'IT', 'MUST', 'BE', 'ADMITTED', 'THAT', 'SHE', 'ACCOMPLISHED', 'HER', 'TASK', 'WITH', 'THOROUGH', 'SUCCESS'] +8455-210777-0023-995: ref=['WE', 'SAT', 'WITH', 'THE', 'OFFICERS', 'SOME', 'LITTLE', 'TIME', 'AFTER', 'DINNER', 'AND', 'THEN', 'WENT', 'ASHORE'] +8455-210777-0023-995: hyp=['WE', 'SAT', 'WITH', 'THE', 'OFFICER', 'SOME', 'LITTLE', 'TIME', 'AFTER', 'DINNER', 'AND', 'THEN', 'WENT', 'ASHORE'] +8455-210777-0024-996: ref=['HOW', 'MUCH', 'OF', 'EVIL', 'OF', 'REAL', 'ACCOMPLISHED', 'EVIL', 'HAD', 'THERE', 'NOT', 'OCCURRED', 'TO', 'ME', 'DURING', 'THE', 'LAST', 'FEW', 'DAYS'] +8455-210777-0024-996: hyp=['HOW', 'MUCH', 'OF', 'EVIL', 'OF', 'REAL', 'ACCOMPLISHED', 'EVIL', 'HAD', 'THERE', 'NOT', 'OCCURRED', 'TO', 'ME', 'DURING', 'THE', 'LAST', 'FEW', 'DAYS'] +8455-210777-0025-997: ref=['WHAT', 'COULD', 'I', 'DO', 'NOW', 'BUT', 'JUST', 'LAY', 'MYSELF', 'DOWN', 'AND', 'DIE'] +8455-210777-0025-997: hyp=['WHAT', 'COULD', 'I', 'DO', 'NOW', 'BUT', 'JUST', 'LAY', 'MYSELF', 'DOWN', 'AND', 'DIE'] +8455-210777-0026-998: ref=['AND', 'THE', 'DEATH', 'OF', 'WHICH', 'I', 'DREAMT', 'COULD', 'NOT', 'ALAS'] +8455-210777-0026-998: hyp=['AND', 'THE', 'DEATH', 'OF', 'WHICH', 'I', 'DREAMT', 'COULD', 'NOT', 'ALAS'] +8455-210777-0027-999: ref=['WHEN', 'THIS', 'CAPTAIN', 'SHOULD', 'HAVE', 'TAKEN', 'HIMSELF', 'AND', 'HIS', 'VESSEL', 'BACK', 'TO', 'ENGLAND', 'I', 'WOULD', 'RETIRE', 'TO', 'A', 'SMALL', 'FARM', 'WHICH', 'I', 'POSSESSED', 'AT', 'THE', 'FARTHEST', 'SIDE', 'OF', 'THE', 'ISLAND', 'AND', 'THERE', 'IN', 'SECLUSION', 'WOULD', 'I', 'END', 'MY', 'DAYS'] +8455-210777-0027-999: hyp=['WHEN', 'THIS', 'CAPTAIN', 'SHOULD', 'HAVE', 'TAKEN', 'HIMSELF', 'AND', 'HIS', 'VESSEL', 'BACK', 'TO', 'ENGLAND', 'I', 'WOULD', 'RETIRE', 'TO', 'A', 'SMALL', 'FARM', 'WHICH', 'I', 'POSSESSED', 'AT', 'THE', 'FURTHEST', 'SIDE', 'OF', 'THE', 'ISLAND', 'AND', 'THERE', 'IN', 'SECLUSION', 'WOULD', 'I', 'END', 'MY', 'DAYS'] +8455-210777-0028-1000: ref=['JACK', 'WOULD', 'BECOME', "EVA'S", 'HAPPY', 'HUSBAND', 'AND', 'WOULD', 'REMAIN', 'AMIDST', 'THE', 'HURRIED', 'DUTIES', 'OF', 'THE', 'EAGER', 'WORLD'] +8455-210777-0028-1000: hyp=['JACK', 'WOULD', 'BECOME', "EVA'S", 'HAPPY', 'HUSBAND', 'AND', 'WOULD', 'REMAIN', 'AMIDST', 'THE', 'HURRIED', 'DUTIES', 'OF', 'THE', 'EAGER', 'WORLD'] +8455-210777-0029-1001: ref=['THINKING', 'OF', 'ALL', 'THIS', 'I', 'WENT', 'TO', 'SLEEP'] +8455-210777-0029-1001: hyp=['THINKING', 'OF', 'ALL', 'THIS', 'I', 'WENT', 'TO', 'SLEEP'] +8455-210777-0030-1002: ref=['MISTER', 'NEVERBEND', 'BEGAN', 'THE', 'CAPTAIN', 'AND', 'I', 'OBSERVED', 'THAT', 'UP', 'TO', 'THAT', 'MOMENT', 'HE', 'HAD', 'GENERALLY', 'ADDRESSED', 'ME', 'AS', 'PRESIDENT', 'IT', 'CANNOT', 'BE', 'DENIED', 'THAT', 'WE', 'HAVE', 'COME', 'HERE', 'ON', 'AN', 'UNPLEASANT', 'MISSION'] +8455-210777-0030-1002: hyp=['MISTER', 'NEVERBEND', 'BEGAN', 'THE', 'CAPTAIN', 'AND', 'I', 'OBSERVE', 'THAT', 'UP', 'TO', 'THAT', 'MOMENT', 'HE', 'HAD', 'GENERALLY', 'ADDRESSED', 'ME', 'AS', 'PRESIDENT', 'IT', 'CANNOT', 'BE', 'DENIED', 'THAT', 'WE', 'HAVE', 'COME', 'HERE', 'ON', 'AN', 'UNPLEASANT', 'MISSION'] +8455-210777-0031-1003: ref=['YOU', 'HAVE', 'RECEIVED', 'US', 'WITH', 'ALL', 'THAT', 'COURTESY', 'AND', 'HOSPITALITY', 'FOR', 'WHICH', 'YOUR', 'CHARACTER', 'IN', 'ENGLAND', 'STANDS', 'SO', 'HIGH'] +8455-210777-0031-1003: hyp=['YOU', 'HAVE', 'RECEIVED', 'US', 'WITH', 'ALL', 'THAT', 'COURTESY', 'AND', 'HOSPITALITY', 'FOR', 'WHICH', 'YOUR', 'CHARACTER', 'AND', 'IN', 'ENGLAND', 'STAND', 'SO', 'HIGH'] +8455-210777-0032-1004: ref=['IT', 'IS', 'A', 'DUTY', 'SAID', 'I'] +8455-210777-0032-1004: hyp=['IT', 'IS', 'A', 'DUTY', 'SAID', 'I'] +8455-210777-0033-1005: ref=['BUT', 'YOUR', 'POWER', 'IS', 'SO', 'SUPERIOR', 'TO', 'ANY', 'THAT', 'I', 'CAN', 'ADVANCE', 'AS', 'TO', 'MAKE', 'US', 'HERE', 'FEEL', 'THAT', 'THERE', 'IS', 'NO', 'DISGRACE', 'IN', 'YIELDING', 'TO', 'IT'] +8455-210777-0033-1005: hyp=['BUT', 'YOUR', 'POWER', 'IS', 'SO', 'SUPERIOR', 'TO', 'ANY', 'THAT', 'I', 'CAN', 'ADVANCE', 'AS', 'TO', 'MAKE', 'US', 'HERE', 'FEEL', 'THAT', 'THERE', 'IS', 'NO', 'DISGRACE', 'IN', 'YIELDING', 'TO', 'IT'] +8455-210777-0034-1006: ref=['NOT', 'A', 'DOUBT', 'BUT', 'HAD', 'YOUR', 'FORCE', 'BEEN', 'ONLY', 'DOUBLE', 'OR', 'TREBLE', 'OUR', 'OWN', 'I', 'SHOULD', 'HAVE', 'FOUND', 'IT', 'MY', 'DUTY', 'TO', 'STRUGGLE', 'WITH', 'YOU'] +8455-210777-0034-1006: hyp=['NOT', 'A', 'DOUBT', 'BUT', 'HAD', 'YOUR', 'FORCE', 'BEEN', 'ONLY', 'DOUBLE', 'OR', 'TROUBLE', 'OUR', 'OWN', 'I', 'SHOULD', 'HAVE', 'FOUND', 'IT', 'MY', 'DUTY', 'TO', 'STRUGGLE', 'WITH', 'YOU'] +8455-210777-0035-1007: ref=['THAT', 'IS', 'ALL', 'QUITE', 'TRUE', 'MISTER', 'NEVERBEND', 'SAID', 'SIR', 'FERDINANDO', 'BROWN'] +8455-210777-0035-1007: hyp=['THAT', 'IS', 'ALL', 'QUITE', 'TRUE', 'MISTER', 'NEVERBEND', 'SAID', 'SIR', 'FERDINAND', 'OBROWN'] +8455-210777-0036-1008: ref=['I', 'CAN', 'AFFORD', 'TO', 'SMILE', 'BECAUSE', 'I', 'AM', 'ABSOLUTELY', 'POWERLESS', 'BEFORE', 'YOU', 'BUT', 'I', 'DO', 'NOT', 'THE', 'LESS', 'FEEL', 'THAT', 'IN', 'A', 'MATTER', 'IN', 'WHICH', 'THE', 'PROGRESS', 'OF', 'THE', 'WORLD', 'IS', 'CONCERNED', 'I', 'OR', 'RATHER', 'WE', 'HAVE', 'BEEN', 'PUT', 'DOWN', 'BY', 'BRUTE', 'FORCE'] +8455-210777-0036-1008: hyp=['I', 'CAN', 'AFFORD', 'TO', 'SMILE', 'BECAUSE', 'I', 'AM', 'ABSOLUTELY', 'POWERLESS', 'BEFORE', 'YOU', 'BUT', 'I', 'DO', 'NOT', 'THE', 'LESS', 'FEEL', 'THAT', 'IN', 'A', 'MATTER', 'OF', 'WHICH', 'THE', 'PROGRESS', 'OF', 'THE', 'WORLD', 'IS', 'CONCERNED', 'I', 'OR', 'RATHER', 'WE', 'HAVE', 'BEEN', 'PUT', 'DOWN', 'BY', 'BRUTE', 'FORCE'] +8455-210777-0037-1009: ref=['YOU', 'HAVE', 'COME', 'TO', 'US', 'THREATENING', 'US', 'WITH', 'ABSOLUTE', 'DESTRUCTION'] +8455-210777-0037-1009: hyp=['YOU', 'HAVE', 'COME', 'TO', 'US', 'THREATENING', 'US', 'WITH', 'ABSOLUTE', 'DESTRUCTION'] +8455-210777-0038-1010: ref=['THEREFORE', 'I', 'FEEL', 'MYSELF', 'QUITE', 'ABLE', 'AS', 'PRESIDENT', 'OF', 'THIS', 'REPUBLIC', 'TO', 'RECEIVE', 'YOU', 'WITH', 'A', 'COURTESY', 'DUE', 'TO', 'THE', 'SERVANTS', 'OF', 'A', 'FRIENDLY', 'ALLY'] +8455-210777-0038-1010: hyp=['THEREFORE', 'I', 'FEEL', 'MYSELF', 'QUITE', 'ABLE', 'AS', 'PRESIDENT', 'OF', 'THIS', 'REPUBLIC', 'TO', 'RECEIVE', 'YOU', 'WITH', 'A', 'COURTESY', 'DUE', 'TO', 'THE', 'SERVANTS', 'OF', 'A', 'FRIENDLY', 'ALLY'] +8455-210777-0039-1011: ref=['I', 'CAN', 'ASSURE', 'YOU', 'HE', 'HAS', 'NOT', 'EVEN', 'ALLOWED', 'ME', 'TO', 'SEE', 'THE', 'TRIGGER', 'SINCE', 'I', 'HAVE', 'BEEN', 'ON', 'BOARD'] +8455-210777-0039-1011: hyp=['I', 'CAN', 'ASSURE', 'YOU', 'HE', 'HAS', 'NOT', 'EVEN', 'ALLOWED', 'ME', 'TO', 'SEE', 'THE', 'TRIGGER', 'SINCE', 'I', 'HAVE', 'BEEN', 'ON', 'BOARD'] +8455-210777-0040-1012: ref=['THEN', 'SAID', 'SIR', 'FERDINANDO', 'THERE', 'IS', 'NOTHING', 'FOR', 'IT', 'BUT', 'THAT', 'HE', 'MUST', 'TAKE', 'YOU', 'WITH', 'HIM'] +8455-210777-0040-1012: hyp=['THEN', 'SAID', 'SIR', 'FERDINANDO', 'THERE', 'IS', 'NOTHING', 'FOR', 'IT', 'BUT', 'THAT', 'WE', 'MUST', 'TAKE', 'YOU', 'WITH', 'HIM'] +8455-210777-0041-1013: ref=['THERE', 'CAME', 'UPON', 'ME', 'A', 'SUDDEN', 'SHOCK', 'WHEN', 'I', 'HEARD', 'THESE', 'WORDS', 'WHICH', 'EXCEEDED', 'ANYTHING', 'WHICH', 'I', 'HAD', 'YET', 'FELT'] +8455-210777-0041-1013: hyp=['THERE', 'CAME', 'UPON', 'ME', 'A', 'SUDDEN', 'SHOCK', 'WHEN', 'I', 'HEARD', 'THESE', 'WORDS', 'WHICH', 'EXCEEDED', 'ANYTHING', 'WHICH', 'I', 'HAD', 'YET', 'FELT'] +8455-210777-0042-1014: ref=['YOU', 'HEAR', 'WHAT', 'SIR', 'FERDINANDO', 'BROWN', 'HAS', 'SAID', 'REPLIED', 'CAPTAIN', 'BATTLEAX'] +8455-210777-0042-1014: hyp=['YOU', 'HEAR', 'WHAT', 'SIR', 'FERDINANDO', 'BROWN', 'HAS', 'SAID', 'REPLIED', 'CAPTAIN', 'BATTLE', 'AXE'] +8455-210777-0043-1015: ref=['BUT', 'WHAT', 'IS', 'THE', 'DELICATE', 'MISSION', 'I', 'ASKED'] +8455-210777-0043-1015: hyp=['BUT', 'WHAT', 'IS', 'THE', 'DELICATE', 'MISSION', 'I', 'ASKED'] +8455-210777-0044-1016: ref=['I', 'WAS', 'TO', 'BE', 'TAKEN', 'AWAY', 'AND', 'CARRIED', 'TO', 'ENGLAND', 'OR', 'ELSEWHERE', 'OR', 'DROWNED', 'UPON', 'THE', 'VOYAGE', 'IT', 'MATTERED', 'NOT', 'WHICH'] +8455-210777-0044-1016: hyp=['I', 'WAS', 'TO', 'BE', 'TAKEN', 'AWAY', 'AND', 'CARRIED', 'TO', 'ENGLAND', 'OR', 'ELSEWHERE', 'OR', 'DROWNED', 'UPON', 'THE', 'VOYAGE', 'IT', 'MATTERED', 'NOT', 'WHICH'] +8455-210777-0045-1017: ref=['THEN', 'THE', 'REPUBLIC', 'OF', 'BRITANNULA', 'WAS', 'TO', 'BE', 'DECLARED', 'AS', 'NON', 'EXISTENT', 'AND', 'THE', 'BRITISH', 'FLAG', 'WAS', 'TO', 'BE', 'EXALTED', 'AND', 'A', 'BRITISH', 'GOVERNOR', 'INSTALLED', 'IN', 'THE', 'EXECUTIVE', 'CHAMBERS'] +8455-210777-0045-1017: hyp=['THEN', 'THE', 'REPUBLIC', 'OF', 'BRITAIN', 'NULA', 'WAS', 'TO', 'BE', 'DECLARED', 'AS', 'NON', 'EXISTENT', 'AND', 'THE', 'BRITISH', 'FLAG', 'WAS', 'TO', 'BE', 'EXALTED', 'AND', 'A', 'BRITISH', 'GOVERNOR', 'INSTALLED', 'IN', 'THE', 'EXECUTIVE', 'CHAMBERS'] +8455-210777-0046-1018: ref=['YOU', 'MAY', 'BE', 'QUITE', 'SURE', "IT'S", 'THERE', 'SAID', 'CAPTAIN', 'BATTLEAX', 'AND', 'THAT', 'I', 'CAN', 'SO', 'USE', 'IT', 'AS', 'TO', 'HALF', 'OBLITERATE', 'YOUR', 'TOWN', 'WITHIN', 'TWO', 'MINUTES', 'OF', 'MY', 'RETURN', 'ON', 'BOARD'] +8455-210777-0046-1018: hyp=['YOU', 'MAY', 'BE', 'QUITE', 'SURE', 'TO', 'THERE', 'SAID', 'CAPTAIN', 'BATTLE', 'AXE', 'AND', 'THAT', 'I', 'CAN', 'SO', 'USE', 'IT', 'AS', 'TO', 'HALF', 'OBLITERATE', 'YOUR', 'TOWN', 'WITHIN', 'TWO', 'MINUTES', 'OF', 'MY', 'RETURN', 'ON', 'BOARD'] +8455-210777-0047-1019: ref=['YOU', 'PROPOSE', 'TO', 'KIDNAP', 'ME', 'I', 'SAID'] +8455-210777-0047-1019: hyp=['YOU', 'PROPOSE', 'TO', 'KIDNAP', 'ME', 'I', 'SAID'] +8455-210777-0048-1020: ref=['WHAT', 'WOULD', 'BECOME', 'OF', 'YOUR', 'GUN', 'WERE', 'I', 'TO', 'KIDNAP', 'YOU'] +8455-210777-0048-1020: hyp=['WHAT', 'WILL', 'BECOME', 'OF', 'YOUR', 'GUN', 'WERE', 'I', 'TO', 'KIDNAP', 'YOU'] +8455-210777-0049-1021: ref=['LIEUTENANT', 'CROSSTREES', 'IS', 'A', 'VERY', 'GALLANT', 'OFFICER'] +8455-210777-0049-1021: hyp=['LIEUTENANT', 'CROSS', 'TREES', 'IS', 'A', 'VERY', 'GALLANT', 'OFFICER'] +8455-210777-0050-1022: ref=['ONE', 'OF', 'US', 'ALWAYS', 'REMAINS', 'ON', 'BOARD', 'WHILE', 'THE', 'OTHER', 'IS', 'ON', 'SHORE'] +8455-210777-0050-1022: hyp=['ONE', 'OF', 'US', 'ALWAYS', 'REMAINS', 'ON', 'BOARD', 'WHILE', 'THE', 'OTHER', 'IS', 'ON', 'SHORE'] +8455-210777-0051-1023: ref=['WHAT', 'WORLD', 'WIDE', 'INIQUITY', 'SUCH', 'A', 'SPEECH', 'AS', 'THAT', 'DISCLOSES', 'SAID', 'I', 'STILL', 'TURNING', 'MYSELF', 'TO', 'THE', 'CAPTAIN', 'FOR', 'THOUGH', 'I', 'WOULD', 'HAVE', 'CRUSHED', 'THEM', 'BOTH', 'BY', 'MY', 'WORDS', 'HAD', 'IT', 'BEEN', 'POSSIBLE', 'MY', 'DISLIKE', 'CENTRED', 'ITSELF', 'ON', 'SIR', 'FERDINANDO'] +8455-210777-0051-1023: hyp=['WHAT', 'WORLD', 'WIDE', 'INIQUITY', 'SUCH', 'A', 'SPEECH', 'AS', 'THAT', 'DISCLOSES', 'SAID', 'I', 'STILL', 'TURNING', 'MYSELF', 'TO', 'THE', 'CAPTAIN', 'FOR', 'THOUGH', 'I', 'WOULD', 'HAVE', 'CRUSHED', 'THEM', 'BOTH', 'BY', 'MY', 'WORDS', 'HAD', 'IT', 'BEEN', 'POSSIBLE', 'MY', 'DISLIKE', 'SENATE', 'ITSELF', 'ON', 'SIR', 'FERDINANDO'] +8455-210777-0052-1024: ref=['YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'SUGGEST', 'SAID', 'HE', 'THAT', 'THAT', 'IS', 'A', 'MATTER', 'OF', 'OPINION'] +8455-210777-0052-1024: hyp=['YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'SUGGEST', 'SAID', 'HE', 'THAT', 'THAT', 'IS', 'A', 'MATTER', 'OF', 'OPINION'] +8455-210777-0053-1025: ref=['WERE', 'I', 'TO', 'COMPLY', 'WITH', 'YOUR', 'ORDERS', 'WITHOUT', 'EXPRESSING', 'MY', 'OWN', 'OPINION', 'I', 'SHOULD', 'SEEM', 'TO', 'HAVE', 'DONE', 'SO', 'WILLINGLY', 'HEREAFTER'] +8455-210777-0053-1025: hyp=['WERE', 'I', 'TO', 'COMPLY', 'WITH', 'YOUR', 'ORDERS', 'WITHOUT', 'EXPRESSING', 'MY', 'OWN', 'OPINION', 'I', 'SHOULD', 'SEEM', 'TO', 'HAVE', 'DONE', 'SO', 'WILLINGLY', 'HEREAFTER'] +8455-210777-0054-1026: ref=['THE', 'LETTER', 'RAN', 'AS', 'FOLLOWS'] +8455-210777-0054-1026: hyp=['THE', 'LETTER', 'RAN', 'AS', 'FOLLOWS'] +8455-210777-0055-1027: ref=['SIR', 'I', 'HAVE', 'IT', 'IN', 'COMMAND', 'TO', 'INFORM', 'YOUR', 'EXCELLENCY', 'THAT', 'YOU', 'HAVE', 'BEEN', 'APPOINTED', 'GOVERNOR', 'OF', 'THE', 'CROWN', 'COLONY', 'WHICH', 'IS', 'CALLED', 'BRITANNULA'] +8455-210777-0055-1027: hyp=['SIR', 'I', 'HAVE', 'IT', 'IN', 'COMMAND', 'TO', 'INFORM', 'YOUR', 'EXCELLENCY', 'THAT', 'YOU', 'HAVE', 'BEEN', 'APPOINTED', 'GOVERNOR', 'OF', 'THE', 'CROWN', 'COLONY', 'WHICH', 'IS', 'CALLED', 'BRITAIN', 'NULA'] +8455-210777-0056-1028: ref=['THE', 'PECULIAR', 'CIRCUMSTANCES', 'OF', 'THE', 'COLONY', 'ARE', 'WITHIN', 'YOUR', "EXCELLENCY'S", 'KNOWLEDGE'] +8455-210777-0056-1028: hyp=['THE', 'PECULIAR', 'CIRCUMSTANCES', 'OF', 'THE', 'COLONY', 'ARE', 'WITHIN', 'YOUR', "EXCELLENCY'S", 'KNOWLEDGE'] +8455-210777-0057-1029: ref=['BUT', 'IN', 'THEIR', 'SELECTION', 'OF', 'A', 'CONSTITUTION', 'THE', 'BRITANNULISTS', 'HAVE', 'UNFORTUNATELY', 'ALLOWED', 'THEMSELVES', 'BUT', 'ONE', 'DELIBERATIVE', 'ASSEMBLY', 'AND', 'HENCE', 'HAVE', 'SPRUNG', 'THEIR', 'PRESENT', 'DIFFICULTIES'] +8455-210777-0057-1029: hyp=['BUT', 'IN', 'THEIR', 'SELECTION', 'OF', 'A', 'CONSTITUTION', 'THE', 'BRITAIN', 'ULYSTS', 'HAVE', 'UNFORTUNATELY', 'ALLOWED', 'THEMSELVES', 'BUT', 'ONE', 'DELIBERATE', 'ASSEMBLY', 'AND', 'HENCE', 'HAS', 'SPRUNG', 'THEIR', 'PRESENT', 'DIFFICULTIES'] +8455-210777-0058-1030: ref=['IT', 'IS', 'FOUNDED', 'ON', 'THE', 'ACKNOWLEDGED', 'WEAKNESS', 'OF', 'THOSE', 'WHO', 'SURVIVE', 'THAT', 'PERIOD', 'OF', 'LIFE', 'AT', 'WHICH', 'MEN', 'CEASE', 'TO', 'WORK'] +8455-210777-0058-1030: hyp=['IT', 'IS', 'FOUNDED', 'ON', 'THE', 'ACKNOWLEDGED', 'WEAKNESS', 'OF', 'THOSE', 'WHO', 'SURVIVE', 'THAT', 'PERIOD', 'OF', 'LIFE', 'AT', 'WHICH', 'MEN', 'CEASE', 'TO', 'WORK'] +8455-210777-0059-1031: ref=['BUT', 'IT', 'IS', 'SURMISED', 'THAT', 'YOU', 'WILL', 'FIND', 'DIFFICULTIES', 'IN', 'THE', 'WAY', 'OF', 'YOUR', 'ENTERING', 'AT', 'ONCE', 'UPON', 'YOUR', 'GOVERNMENT'] +8455-210777-0059-1031: hyp=['BUT', 'IT', 'IS', 'SURMISED', 'THAT', 'YOU', 'WILL', 'FIND', 'DIFFICULTIES', 'IN', 'THE', 'WAY', 'OF', 'YOUR', 'ENTERING', 'AT', 'ONCE', 'UPON', 'YOUR', 'GOVERNOR'] +8455-210777-0060-1032: ref=['THE', 'JOHN', 'BRIGHT', 'IS', 'ARMED', 'WITH', 'A', 'WEAPON', 'OF', 'GREAT', 'POWER', 'AGAINST', 'WHICH', 'IT', 'IS', 'IMPOSSIBLE', 'THAT', 'THE', 'PEOPLE', 'OF', 'BRITANNULA', 'SHOULD', 'PREVAIL'] +8455-210777-0060-1032: hyp=['THE', 'JOHN', 'BRIGHT', 'IS', 'ARM', 'WITH', 'A', 'WEAPON', 'OF', 'GREAT', 'POWER', 'AGAINST', 'WHICH', 'IT', 'IS', 'IMPOSSIBLE', 'THAT', 'THE', 'PEOPLE', 'OF', 'BRITAIN', 'EULO', 'SHOULD', 'PREVAIL'] +8455-210777-0061-1033: ref=['YOU', 'WILL', 'CARRY', 'OUT', 'WITH', 'YOU', 'ONE', 'HUNDRED', 'MEN', 'OF', 'THE', 'NORTH', 'NORTH', 'WEST', 'BIRMINGHAM', 'REGIMENT', 'WHICH', 'WILL', 'PROBABLY', 'SUFFICE', 'FOR', 'YOUR', 'OWN', 'SECURITY', 'AS', 'IT', 'IS', 'THOUGHT', 'THAT', 'IF', 'MISTER', 'NEVERBEND', 'BE', 'WITHDRAWN', 'THE', 'PEOPLE', 'WILL', 'REVERT', 'EASILY', 'TO', 'THEIR', 'OLD', 'HABITS', 'OF', 'OBEDIENCE'] +8455-210777-0061-1033: hyp=['YOU', 'WILL', 'CARRY', 'OUT', 'WITH', 'YOU', 'ONE', 'HUNDRED', 'MEN', 'OF', 'THE', 'NORTH', 'NORTHWEST', 'BIRMINGHAM', 'REGIMENT', 'WHICH', 'WILL', 'PROBABLY', 'SUFFICE', 'FOR', 'YOUR', 'OWN', 'SECURITY', 'AS', 'IT', 'IS', 'THOUGHT', 'THAT', 'IF', 'MISTER', 'NEVERBEIN', 'BE', 'WITHDRAWN', 'THE', 'PEOPLE', 'WILL', 'REVERT', 'EASILY', 'TO', 'THEIR', 'OLD', 'HABITS', 'OF', 'OBEDIENCE'] +8455-210777-0062-1034: ref=['WHEN', 'DO', 'YOU', 'INTEND', 'THAT', 'THE', 'JOHN', 'BRIGHT', 'SHALL', 'START'] +8455-210777-0062-1034: hyp=['WHEN', 'DO', 'YOU', 'INTEND', 'THAT', 'THE', 'JOHN', 'BRIGHT', 'SHALL', 'START'] +8455-210777-0063-1035: ref=['TO', 'DAY', 'I', 'SHOUTED'] +8455-210777-0063-1035: hyp=['TO', 'DAY', 'I', 'SHOUTED'] +8455-210777-0064-1036: ref=['AND', 'I', 'HAVE', 'NO', 'ONE', 'READY', 'TO', 'WHOM', 'I', 'CAN', 'GIVE', 'UP', 'THE', 'ARCHIVES', 'OF', 'THE', 'GOVERNMENT'] +8455-210777-0064-1036: hyp=['AND', 'I', 'HAVE', 'NO', 'ONE', 'READY', 'TO', 'WHOM', 'I', 'CAN', 'GIVE', 'UP', 'THE', 'ARCHIVES', 'OF', 'THE', 'GOVERNMENT'] +8455-210777-0065-1037: ref=['I', 'SHALL', 'BE', 'HAPPY', 'TO', 'TAKE', 'CHARGE', 'OF', 'THEM', 'SAID', 'SIR', 'FERDINANDO'] +8455-210777-0065-1037: hyp=['I', 'SHALL', 'BE', 'HAPPY', 'TO', 'TAKE', 'CHARGE', 'OF', 'THEM', 'SAID', 'SIR', 'FERDINANDO'] +8455-210777-0066-1038: ref=['THEY', 'OF', 'COURSE', 'MUST', 'ALL', 'BE', 'ALTERED'] +8455-210777-0066-1038: hyp=['THEY', 'OF', 'COURSE', 'MUST', 'ALL', 'BE', 'ALTERED'] +8455-210777-0067-1039: ref=['OR', 'OF', 'THE', 'HABITS', 'OF', 'OUR', 'PEOPLE', 'IT', 'IS', 'QUITE', 'IMPOSSIBLE'] +8455-210777-0067-1039: hyp=['OR', 'OF', 'THE', 'HABITS', 'OF', 'OUR', 'PEOPLE', 'IT', 'IS', 'QUITE', 'IMPOSSIBLE'] +8455-210777-0068-1040: ref=['YOUR', 'POWER', 'IS', 'SUFFICIENT', 'I', 'SAID'] +8455-210777-0068-1040: hyp=['YOUR', 'POWER', 'IS', 'SUFFICIENT', 'I', 'SAID'] +8455-210777-0069-1041: ref=['IF', 'YOU', 'WILL', 'GIVE', 'US', 'YOUR', 'PROMISE', 'TO', 'MEET', 'CAPTAIN', 'BATTLEAX', 'HERE', 'AT', 'THIS', 'TIME', 'TO', 'MORROW', 'WE', 'WILL', 'STRETCH', 'A', 'POINT', 'AND', 'DELAY', 'THE', 'DEPARTURE', 'OF', 'THE', 'JOHN', 'BRIGHT', 'FOR', 'TWENTY', 'FOUR', 'HOURS'] +8455-210777-0069-1041: hyp=['IF', 'YOU', 'WILL', 'GIVE', 'US', 'YOUR', 'PROMISE', 'TO', 'MEET', 'CAPTAIN', 'ADELAX', 'HERE', 'AT', 'THIS', 'TIME', 'TO', 'MORROW', 'WE', 'WILL', 'STRETCH', 'A', 'POINT', 'AND', 'DELAY', 'THE', 'DEPARTURE', 'OF', 'THE', 'JOHN', 'BRIGHT', 'FOR', 'TWENTY', 'FOUR', 'HOURS'] +8455-210777-0070-1042: ref=['AND', 'THIS', 'PLAN', 'WAS', 'ADOPTED', 'TOO', 'IN', 'ORDER', 'TO', 'EXTRACT', 'FROM', 'ME', 'A', 'PROMISE', 'THAT', 'I', 'WOULD', 'DEPART', 'IN', 'PEACE'] +8455-210777-0070-1042: hyp=['AND', 'THIS', 'PLAN', 'WAS', 'ADOPTED', 'TOO', 'IN', 'ORDER', 'TO', 'EXTRACT', 'FROM', 'ME', 'A', 'PROMISE', 'THAT', 'I', 'WOULD', 'DEPART', 'IN', 'PEACE'] +8463-287645-0000-543: ref=['THIS', 'WAS', 'WHAT', 'DID', 'THE', 'MISCHIEF', 'SO', 'FAR', 'AS', 'THE', 'RUNNING', 'AWAY', 'WAS', 'CONCERNED'] +8463-287645-0000-543: hyp=['THIS', 'WAS', 'WHAT', 'DID', 'THE', 'MISCHIEF', 'SO', 'FAR', 'AS', 'THE', 'RUNNING', 'AWAY', 'WAS', 'CONCERNED'] +8463-287645-0001-544: ref=['IT', 'IS', 'HARDLY', 'NECESSARY', 'TO', 'SAY', 'MORE', 'OF', 'THEM', 'HERE'] +8463-287645-0001-544: hyp=['IT', 'IS', 'HARDLY', 'NECESSARY', 'TO', 'SAY', 'MORE', 'OF', 'THEM', 'HERE'] +8463-287645-0002-545: ref=['FROM', 'THE', 'MANNER', 'IN', 'WHICH', 'HE', 'EXPRESSED', 'HIMSELF', 'WITH', 'REGARD', 'TO', 'ROBERT', 'HOLLAN', 'NO', 'MAN', 'IN', 'THE', 'WHOLE', 'RANGE', 'OF', 'HIS', 'RECOLLECTIONS', 'WILL', 'BE', 'LONGER', 'REMEMBERED', 'THAN', 'HE', 'HIS', 'ENTHRALMENT', 'WHILE', 'UNDER', 'HOLLAN', 'WILL', 'HARDLY', 'EVER', 'BE', 'FORGOTTEN'] +8463-287645-0002-545: hyp=['FROM', 'THE', 'MANNER', 'IN', 'WHICH', 'SHE', 'EXPRESSED', 'HIMSELF', 'WITH', 'REGARD', 'TO', 'ROBERT', 'HOLLAND', 'NO', 'MAN', 'IN', 'THE', 'WHOLE', 'RANGE', 'OF', 'HIS', 'RECOLLECTIONS', 'WILL', 'BE', 'LONGER', 'REMEMBERED', 'THAN', 'HE', 'HIS', 'ENTHRALIMENT', 'WHILE', 'UNDER', 'HOLLAND', 'WILL', 'HARDLY', 'EVER', 'BE', 'FORGOTTEN'] +8463-287645-0003-546: ref=['OF', 'THIS', 'PARTY', 'EDWARD', 'A', 'BOY', 'OF', 'SEVENTEEN', 'CALLED', 'FORTH', 'MUCH', 'SYMPATHY', 'HE', 'TOO', 'WAS', 'CLAIMED', 'BY', 'HOLLAN'] +8463-287645-0003-546: hyp=['OF', 'THIS', 'PARTY', 'EDWARD', 'A', 'BOY', 'OF', 'SEVENTEEN', 'CALLED', 'FORTH', 'MUCH', 'SYMPATHY', 'HE', 'TOO', 'WAS', 'CLAIMED', 'BY', 'HOLLAND'] +8463-287645-0004-547: ref=['JOHN', 'WESLEY', 'COMBASH', 'JACOB', 'TAYLOR', 'AND', 'THOMAS', 'EDWARD', 'SKINNER'] +8463-287645-0004-547: hyp=['JOHN', 'WESLEY', 'COMBASH', 'JACOB', 'TAYLOR', 'AND', 'THOMAS', 'EDWARD', 'SKINNER'] +8463-287645-0005-548: ref=['A', 'FEW', 'YEARS', 'BACK', 'ONE', 'OF', 'THEIR', 'SLAVES', 'A', 'COACHMAN', 'WAS', 'KEPT', 'ON', 'THE', 'COACH', 'BOX', 'ONE', 'COLD', 'NIGHT', 'WHEN', 'THEY', 'WERE', 'OUT', 'AT', 'A', 'BALL', 'UNTIL', 'HE', 'BECAME', 'ALMOST', 'FROZEN', 'TO', 'DEATH', 'IN', 'FACT', 'HE', 'DID', 'DIE', 'IN', 'THE', 'INFIRMARY', 'FROM', 'THE', 'EFFECTS', 'OF', 'THE', 'FROST', 'ABOUT', 'ONE', 'WEEK', 'AFTERWARDS'] +8463-287645-0005-548: hyp=['A', 'FEW', 'YEARS', 'BACK', 'ONE', 'OF', 'THEIR', 'SLAVES', 'A', 'COACHMAN', 'WAS', 'KEPT', 'ON', 'THE', 'COACH', 'BOX', 'ONE', 'CALLED', 'NIGHT', 'WHEN', 'THEY', 'WERE', 'OUT', 'AT', 'A', 'BALL', 'UNTIL', 'HE', 'BECAME', 'ALMOST', 'FROZEN', 'TO', 'DEATH', 'IN', 'FACT', 'HE', 'DID', 'DIE', 'IN', 'THE', 'INFIRMARY', 'FROM', 'THE', 'EFFECTS', 'OF', 'THE', 'FROST', 'ABOUT', 'ONE', 'WEEK', 'AFTERWARDS'] +8463-287645-0006-549: ref=['THE', 'DOCTOR', 'WHO', 'ATTENDED', 'THE', 'INJURED', 'CREATURE', 'IN', 'THIS', 'CASE', 'WAS', 'SIMPLY', 'TOLD', 'THAT', 'SHE', 'SLIPPED', 'AND', 'FELL', 'DOWN', 'STAIRS', 'AS', 'SHE', 'WAS', 'COMING', 'DOWN'] +8463-287645-0006-549: hyp=['THE', 'DOCTOR', 'WHO', 'ATTENDED', 'THE', 'INJURED', 'CREATURE', 'IN', 'THIS', 'CASE', 'WAS', 'SIMPLY', 'TOLD', 'THAT', 'SHE', 'SLIPPED', 'AND', 'FELL', 'DOWN', 'THE', 'STAIRS', 'AS', 'SHE', 'WAS', 'COMING', 'DOWN'] +8463-287645-0007-550: ref=['ANOTHER', 'CASE', 'SAID', 'JOHN', 'WESLEY', 'WAS', 'A', 'LITTLE', 'GIRL', 'HALF', 'GROWN', 'WHO', 'WAS', 'WASHING', 'WINDOWS', 'UP', 'STAIRS', 'ONE', 'DAY', 'AND', 'UNLUCKILY', 'FELL', 'ASLEEP', 'IN', 'THE', 'WINDOW', 'AND', 'IN', 'THIS', 'POSITION', 'WAS', 'FOUND', 'BY', 'HER', 'MISTRESS', 'IN', 'A', 'RAGE', 'THE', 'MISTRESS', 'HIT', 'HER', 'A', 'HEAVY', 'SLAP', 'KNOCKED', 'HER', 'OUT', 'OF', 'THE', 'WINDOW', 'AND', 'SHE', 'FELL', 'TO', 'THE', 'PAVEMENT', 'AND', 'DIED', 'IN', 'A', 'FEW', 'HOURS', 'FROM', 'THE', 'EFFECTS', 'THEREOF'] +8463-287645-0007-550: hyp=['ANOTHER', 'CASE', 'SAID', 'JOHN', 'WESLEY', 'WAS', 'A', 'LITTLE', 'GIRL', 'HALF', 'GROWN', 'WHO', 'WAS', 'WASHING', 'WINDOWS', 'UPSTAIRS', 'ONE', 'DAY', 'AND', 'UNLUCKILY', 'FELL', 'ASLEEP', 'IN', 'THE', 'WINDOW', 'AND', 'IN', 'THIS', 'POSITION', 'WAS', 'FOUND', 'BY', 'HER', 'MISTRESS', 'IN', 'A', 'RAGE', 'THE', 'MISTRESS', 'HID', 'HER', 'A', 'HEAVY', 'SLAP', 'KNOCKED', 'HER', 'OUT', 'OF', 'THE', 'WINDOW', 'AND', 'SHE', 'FELL', 'TO', 'THE', 'PAVEMENT', 'AND', 'DIED', 'IN', 'A', 'FEW', 'HOURS', 'FROM', 'THE', 'EFFECTS', 'THEREOF'] +8463-287645-0008-551: ref=['AS', 'USUAL', 'NOTHING', 'WAS', 'DONE', 'IN', 'THE', 'WAY', 'OF', 'PUNISHMENT'] +8463-287645-0008-551: hyp=['AS', 'USUAL', 'NOTHING', 'WAS', 'DONE', 'IN', 'THE', 'WAY', 'OF', 'PUNISHMENT'] +8463-287645-0009-552: ref=['I', 'NEVER', 'KNEW', 'OF', 'BUT', 'ONE', 'MAN', 'WHO', 'COULD', 'EVER', 'PLEASE', 'HIM'] +8463-287645-0009-552: hyp=['I', 'NEVER', 'KNEW', 'OF', 'BUT', 'ONE', 'MAN', 'WHO', 'COULD', 'EVER', 'PLEASE', 'HIM'] +8463-287645-0010-553: ref=['HE', 'WORKED', 'ME', 'VERY', 'HARD', 'HE', 'WANTED', 'TO', 'BE', 'BEATING', 'ME', 'ALL', 'THE', 'TIME'] +8463-287645-0010-553: hyp=['HE', 'WORKED', 'ME', 'VERY', 'HARD', 'HE', 'WANTED', 'TO', 'BE', 'BEATING', 'ME', 'ALL', 'THE', 'TIME'] +8463-287645-0011-554: ref=['SHE', 'WAS', 'A', 'LARGE', 'HOMELY', 'WOMAN', 'THEY', 'WERE', 'COMMON', 'WHITE', 'PEOPLE', 'WITH', 'NO', 'REPUTATION', 'IN', 'THE', 'COMMUNITY'] +8463-287645-0011-554: hyp=['SHE', 'WAS', 'A', 'LARGE', 'HOMELY', 'WOMAN', 'THEY', 'WERE', 'COMMON', 'WHITE', 'PEOPLE', 'WITH', 'NO', 'REPUTATION', 'IN', 'THE', 'COMMUNITY'] +8463-287645-0012-555: ref=['SUBSTANTIALLY', 'THIS', 'WAS', "JACOB'S", 'UNVARNISHED', 'DESCRIPTION', 'OF', 'HIS', 'MASTER', 'AND', 'MISTRESS'] +8463-287645-0012-555: hyp=['SUBSTANTIALLY', 'THIS', 'WAS', "JACOB'S", 'UNVARNISHED', 'DESCRIPTION', 'OF', 'HIS', 'MASTER', 'AND', 'MISTRESS'] +8463-287645-0013-556: ref=['AS', 'TO', 'HIS', 'AGE', 'AND', 'ALSO', 'THE', 'NAME', 'OF', 'HIS', 'MASTER', "JACOB'S", 'STATEMENT', 'VARIED', 'SOMEWHAT', 'FROM', 'THE', 'ADVERTISEMENT'] +8463-287645-0013-556: hyp=['AS', 'TO', 'HIS', 'AGE', 'AND', 'ALSO', 'THE', 'NAME', 'OF', 'HIS', 'MASTER', "JACOB'S", 'STATEMENT', 'VARIED', 'SOMEWHAT', 'FROM', 'THE', 'ADVERTISEMENT'] +8463-287645-0014-557: ref=['OF', 'STARTING', 'I', "DIDN'T", 'KNOW', 'THE', 'WAY', 'TO', 'COME'] +8463-287645-0014-557: hyp=['OF', 'STARTING', 'I', "DIDN'T", 'KNOW', 'THE', 'WAY', 'TO', 'COME'] +8463-294825-0000-558: ref=["IT'S", 'ALMOST', 'BEYOND', 'CONJECTURE'] +8463-294825-0000-558: hyp=["IT'S", 'ALMOST', 'BEYOND', 'CONJECTURE'] +8463-294825-0001-559: ref=['THIS', 'REALITY', 'BEGINS', 'TO', 'EXPLAIN', 'THE', 'DARK', 'POWER', 'AND', 'OTHERWORLDLY', 'FASCINATION', 'OF', 'TWENTY', 'THOUSAND', 'LEAGUES', 'UNDER', 'THE', 'SEAS'] +8463-294825-0001-559: hyp=['THIS', 'REALITY', 'BEGINS', 'TO', 'EXPLAIN', 'THE', 'DARK', 'POWER', 'AND', 'OTHER', 'WORLDLY', 'FASCINATION', 'OF', 'TWENTY', 'THOUSAND', 'LEAGUES', 'UNDER', 'THE', 'SEAS'] +8463-294825-0002-560: ref=['FIRST', 'AS', 'A', 'PARIS', 'STOCKBROKER', 'LATER', 'AS', 'A', 'CELEBRATED', 'AUTHOR', 'AND', 'YACHTSMAN', 'HE', 'WENT', 'ON', 'FREQUENT', 'VOYAGES', 'TO', 'BRITAIN', 'AMERICA', 'THE', 'MEDITERRANEAN'] +8463-294825-0002-560: hyp=['FIRST', 'AS', 'A', 'PARIS', 'STOCKBROKER', 'LATER', 'AS', 'A', 'CELEBRATED', 'AUTHOR', 'AND', 'YACHTSMAN', 'HE', 'WENT', 'ON', 'FREQUENT', 'VOYAGES', 'TO', 'BRITAIN', 'AMERICA', 'THE', 'MEDITERRANEAN'] +8463-294825-0003-561: ref=['NEMO', 'BUILDS', 'A', 'FABULOUS', 'FUTURISTIC', 'SUBMARINE', 'THE', 'NAUTILUS', 'THEN', 'CONDUCTS', 'AN', 'UNDERWATER', 'CAMPAIGN', 'OF', 'VENGEANCE', 'AGAINST', 'HIS', 'IMPERIALIST', 'OPPRESSOR'] +8463-294825-0003-561: hyp=['NEMO', 'BUILDS', 'A', 'FABULOUS', 'FUTURE', 'STICK', 'SUBMARINE', 'THE', 'NAUTILUS', 'THEN', 'CONDUCTS', 'AN', 'UNDERWATER', 'CAMPAIGN', 'OF', 'VENGEANCE', 'AGAINST', 'HIS', 'IMPERIALIST', 'OPPRESSOR'] +8463-294825-0004-562: ref=['IN', 'ALL', 'THE', 'NOVEL', 'HAD', 'A', 'DIFFICULT', 'GESTATION'] +8463-294825-0004-562: hyp=['IN', 'ALL', 'THE', 'NOVEL', 'HEAD', 'A', 'DIFFICULT', 'JUSTATION'] +8463-294825-0005-563: ref=['OTHER', 'SUBTLETIES', 'OCCUR', 'INSIDE', 'EACH', 'EPISODE', 'THE', 'TEXTURES', 'SPARKLING', 'WITH', 'WIT', 'INFORMATION', 'AND', 'INSIGHT'] +8463-294825-0005-563: hyp=['OTHER', 'SUBTLETIES', 'OCCUR', 'INSIDE', 'EACH', 'EPISODE', 'THE', 'TEXTURES', 'SPARKLING', 'WITH', 'WIT', 'INFORMATION', 'AND', 'INSIGHT'] +8463-294825-0006-564: ref=['HIS', 'SPECIFICATIONS', 'FOR', 'AN', 'OPEN', 'SEA', 'SUBMARINE', 'AND', 'A', 'SELF', 'CONTAINED', 'DIVING', 'SUIT', 'WERE', 'DECADES', 'BEFORE', 'THEIR', 'TIME', 'YET', 'MODERN', 'TECHNOLOGY', 'BEARS', 'THEM', 'OUT', 'TRIUMPHANTLY'] +8463-294825-0006-564: hyp=['HIS', 'SPECIFICATIONS', 'FOR', 'AN', 'OPEN', 'SEA', 'SUBMARINE', 'AND', 'A', 'SELF', 'CONTAINING', 'DIVING', 'SUIT', 'WERE', 'DECADES', 'BEFORE', 'THEIR', 'TIME', 'YET', 'MODERN', 'TECHNOLOGY', 'BEARS', 'THEM', 'OUT', 'TRIUMPHANTLY'] +8463-294825-0007-565: ref=['EVEN', 'THE', 'SUPPORTING', 'CAST', 'IS', 'SHREWDLY', 'DRAWN', 'PROFESSOR', 'ARONNAX', 'THE', 'CAREER', 'SCIENTIST', 'CAUGHT', 'IN', 'AN', 'ETHICAL', 'CONFLICT', 'CONSEIL', 'THE', 'COMPULSIVE', 'CLASSIFIER', 'WHO', 'SUPPLIES', 'HUMOROUS', 'TAG', 'LINES', 'FOR', "VERNE'S", 'FAST', 'FACTS', 'THE', 'HARPOONER', 'NED', 'LAND', 'A', 'CREATURE', 'OF', 'CONSTANT', 'APPETITES', 'MAN', 'AS', 'HEROIC', 'ANIMAL'] +8463-294825-0007-565: hyp=['EVEN', 'THE', 'SUPPORTING', 'CAST', 'IS', 'SHREWDLY', 'DRAWN', 'PROFESSOR', 'ARONNAX', 'THE', 'CAREER', 'SCIENTIST', 'CAUGHT', 'IN', 'AN', 'ETHICAL', 'CONFLICT', 'CONSEIL', 'THE', 'COMPULSIVE', 'CLASSIFIER', 'WHO', 'SUPPLIES', 'HUMOROUS', 'TAG', 'LINES', 'FOR', "VERNE'S", 'FAST', 'FACTS', 'THE', 'HARPOONER', 'NED', 'LAND', 'A', 'CREATURE', 'OF', 'CONSTANT', 'APPETITES', 'MAN', 'AS', 'HEROIC', 'ANIMAL'] +8463-294825-0008-566: ref=['BUT', 'MUCH', 'OF', 'THE', "NOVEL'S", 'BROODING', 'POWER', 'COMES', 'FROM', 'CAPTAIN', 'NEMO'] +8463-294825-0008-566: hyp=['BUT', 'MUCH', 'OF', 'THE', 'NOVELS', 'BROODING', 'POWER', 'COMES', 'FROM', 'CAPTAIN', 'NEMO'] +8463-294825-0009-567: ref=['THIS', 'COMPULSION', 'LEADS', 'NEMO', 'INTO', 'UGLY', 'CONTRADICTIONS', "HE'S", 'A', 'FIGHTER', 'FOR', 'FREEDOM', 'YET', 'ALL', 'WHO', 'BOARD', 'HIS', 'SHIP', 'ARE', 'IMPRISONED', 'THERE', 'FOR', 'GOOD', 'HE', 'WORKS', 'TO', 'SAVE', 'LIVES', 'BOTH', 'HUMAN', 'AND', 'ANIMAL', 'YET', 'HE', 'HIMSELF', 'CREATES', 'A', 'HOLOCAUST', 'HE', 'DETESTS', 'IMPERIALISM', 'YET', 'HE', 'LAYS', 'PERSONAL', 'CLAIM', 'TO', 'THE', 'SOUTH', 'POLE'] +8463-294825-0009-567: hyp=['THIS', 'COMPULSION', 'LEADS', 'NEMO', 'INTO', 'UGLY', 'CONTRADICTIONS', 'HE', 'IS', 'A', 'FRIGHTER', 'FOR', 'FREEDOM', 'YET', 'ALL', 'WHO', 'BOARD', 'HIS', 'SHIP', 'OR', 'IMPRISONED', 'THERE', 'FOR', 'GOOD', 'HE', 'WORKS', 'TO', 'SAVE', 'LIVES', 'BOTH', 'HUMAN', 'AND', 'ANIMAL', 'YET', 'HE', 'HIMSELF', 'CREATES', 'A', 'HOLOCOST', 'HE', 'DETESTS', 'IMPERIALISM', 'YET', 'HE', 'LAYS', 'PERSONAL', 'CLAIM', 'TO', 'THE', 'SOUTH', 'POLE'] +8463-294825-0010-568: ref=['AND', 'IN', 'THIS', 'LAST', 'ACTION', 'HE', 'FALLS', 'INTO', 'THE', 'CLASSIC', 'SIN', 'OF', 'PRIDE'] +8463-294825-0010-568: hyp=['AND', 'IN', 'THIS', 'LAST', 'ACTION', 'HE', 'FALLS', 'INTO', 'THE', 'CLASSIC', 'SIN', 'OF', 'PRIDE'] +8463-294825-0011-569: ref=["HE'S", 'SWIFTLY', 'PUNISHED'] +8463-294825-0011-569: hyp=['HE', 'IS', 'SWIFTLY', 'PUNISHED'] +8463-294825-0012-570: ref=['THE', 'NAUTILUS', 'NEARLY', 'PERISHES', 'IN', 'THE', 'ANTARCTIC', 'AND', 'NEMO', 'SINKS', 'INTO', 'A', 'GROWING', 'DEPRESSION'] +8463-294825-0012-570: hyp=['THE', 'NAUTILUS', 'NEARLY', 'PERISHES', 'IN', 'THE', 'ANTARCTIC', 'AND', 'NEMO', 'SINKS', 'INTO', 'A', 'GROWING', 'DEPRESSION'] +8463-294825-0013-571: ref=['FOR', 'MANY', 'THEN', 'THIS', 'BOOK', 'HAS', 'BEEN', 'A', 'SOURCE', 'OF', 'FASCINATION', 'SURELY', 'ONE', 'OF', 'THE', 'MOST', 'INFLUENTIAL', 'NOVELS', 'EVER', 'WRITTEN', 'AN', 'INSPIRATION', 'FOR', 'SUCH', 'SCIENTISTS', 'AND', 'DISCOVERERS', 'AS', 'ENGINEER', 'SIMON', 'LAKE', 'OCEANOGRAPHER', 'WILLIAM', 'BEEBE', 'POLAR', 'TRAVELER', 'SIR', 'ERNEST', 'SHACKLETON'] +8463-294825-0013-571: hyp=['FOR', 'MANY', 'THEN', 'THIS', 'BOOK', 'HAS', 'BEEN', 'A', 'SOURCE', 'OF', 'FASCINATION', 'SURELY', 'ONE', 'OF', 'THE', 'MOST', 'INFLUENTIAL', 'NOVELS', 'EVER', 'WRITTEN', 'AN', 'INSPIRATION', 'FOR', 'SUCH', 'SCIENTISTS', 'AND', 'DISCOVERERS', 'AS', 'ENGINEER', 'SIMON', 'LAKE', 'OCEANOGRAPHER', 'WILLIAM', 'B', 'POLAR', 'TRAVELLERS', 'ARE', 'ERNEST', 'SHACKLETON'] +8463-294825-0014-572: ref=['FATHOM', 'SIX', 'FEET'] +8463-294825-0014-572: hyp=['FATHOM', 'SIX', 'FEET'] +8463-294825-0015-573: ref=['GRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0015-573: hyp=['GRAHAM', 'ROUGHLY', 'WON', 'TWENTY', 'EIGHTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0016-574: ref=['MILLIGRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHT', 'THOUSAND', 'OF', 'AN', 'OUNCE'] +8463-294825-0016-574: hyp=['MILAGRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHT', 'THOUSAND', 'OF', 'AN', 'OUNCE'] +8463-294825-0017-575: ref=['LITER', 'ROUGHLY', 'ONE', 'QUART'] +8463-294825-0017-575: hyp=['LATER', 'ROUGHLY', 'WON', 'COURT'] +8463-294825-0018-576: ref=['METER', 'ROUGHLY', 'ONE', 'YARD', 'THREE', 'INCHES'] +8463-294825-0018-576: hyp=['METER', 'ROUGHLY', 'ONE', 'YARD', 'THREE', 'INCHES'] +8463-294825-0019-577: ref=['MILLIMETER', 'ROUGHLY', 'ONE', 'TWENTY', 'FIFTH', 'OF', 'AN', 'INCH'] +8463-294825-0019-577: hyp=['MILLIMETRE', 'ROUGHLY', 'ONE', 'TWENTY', 'FIFTH', 'OF', 'AN', 'INCH'] +8463-294828-0000-578: ref=['CHAPTER', 'THREE', 'AS', 'MASTER', 'WISHES'] +8463-294828-0000-578: hyp=['CHAPTER', 'THREE', 'AS', 'MASTER', 'WISHES'] +8463-294828-0001-579: ref=['THREE', 'SECONDS', 'BEFORE', 'THE', 'ARRIVAL', 'OF', 'J', 'B', "HOBSON'S", 'LETTER', 'I', 'NO', 'MORE', 'DREAMED', 'OF', 'CHASING', 'THE', 'UNICORN', 'THAN', 'OF', 'TRYING', 'FOR', 'THE', 'NORTHWEST', 'PASSAGE'] +8463-294828-0001-579: hyp=['THREE', 'SECONDS', 'BEFORE', 'THE', 'ARRIVAL', 'OF', 'J', 'B', "HOBSON'S", 'LETTER', 'I', 'KNOW', 'MORE', 'DREAMED', 'OF', 'CHASING', 'THE', 'UNICORN', 'THAN', 'OF', 'TRYING', 'FOR', 'THE', 'NORTHWEST', 'PASSAGE'] +8463-294828-0002-580: ref=['EVEN', 'SO', 'I', 'HAD', 'JUST', 'RETURNED', 'FROM', 'AN', 'ARDUOUS', 'JOURNEY', 'EXHAUSTED', 'AND', 'BADLY', 'NEEDING', 'A', 'REST'] +8463-294828-0002-580: hyp=['EVEN', 'SO', 'I', 'HAD', 'JUST', 'RETURNED', 'FROM', 'AN', 'ARDUOUS', 'JOURNEY', 'EXHAUSTED', 'AND', 'BADLY', 'NEEDING', 'A', 'REST'] +8463-294828-0003-581: ref=['I', 'WANTED', 'NOTHING', 'MORE', 'THAN', 'TO', 'SEE', 'MY', 'COUNTRY', 'AGAIN', 'MY', 'FRIENDS', 'MY', 'MODEST', 'QUARTERS', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'MY', 'DEARLY', 'BELOVED', 'COLLECTIONS'] +8463-294828-0003-581: hyp=['I', 'WANTED', 'NOTHING', 'MORE', 'THAN', 'TO', 'SEE', 'MY', 'COUNTRY', 'AGAIN', 'MY', 'FRIENDS', 'MY', 'MODEST', 'QUARTERS', 'BY', 'THE', 'BATTANICAL', 'GARDENS', 'MY', 'DEARLY', 'BELOVED', 'COLLECTIONS'] +8463-294828-0004-582: ref=['BUT', 'NOW', 'NOTHING', 'COULD', 'HOLD', 'ME', 'BACK'] +8463-294828-0004-582: hyp=['BUT', 'NOW', 'NOTHING', 'COULD', 'HOLD', 'ME', 'BACK'] +8463-294828-0005-583: ref=['CONSEIL', 'WAS', 'MY', 'MANSERVANT'] +8463-294828-0005-583: hyp=['CONSEIL', 'WAS', 'MY', 'MANSERVANT'] +8463-294828-0006-584: ref=['FROM', 'RUBBING', 'SHOULDERS', 'WITH', 'SCIENTISTS', 'IN', 'OUR', 'LITTLE', 'UNIVERSE', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'THE', 'BOY', 'HAD', 'COME', 'TO', 'KNOW', 'A', 'THING', 'OR', 'TWO'] +8463-294828-0006-584: hyp=['FROM', 'RUBBING', 'SHOULDERS', 'WITH', 'SCIENTISTS', 'IN', 'OUR', 'LITTLE', 'UNIVERSE', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'THE', 'BOY', 'HAD', 'COME', 'TO', 'KNOW', 'A', 'THING', 'OR', 'TWO'] +8463-294828-0007-585: ref=['CLASSIFYING', 'WAS', 'EVERYTHING', 'TO', 'HIM', 'SO', 'HE', 'KNEW', 'NOTHING', 'ELSE', 'WELL', 'VERSED', 'IN', 'THE', 'THEORY', 'OF', 'CLASSIFICATION', 'HE', 'WAS', 'POORLY', 'VERSED', 'IN', 'ITS', 'PRACTICAL', 'APPLICATION', 'AND', 'I', 'DOUBT', 'THAT', 'HE', 'COULD', 'TELL', 'A', 'SPERM', 'WHALE', 'FROM', 'A', 'BALEEN', 'WHALE'] +8463-294828-0007-585: hyp=['CLASSIFYING', 'WAS', 'EVERYTHING', 'TO', 'HIM', 'SO', 'HE', 'KNEW', 'NOTHING', 'ELSE', 'WELL', 'VERSED', 'IN', 'A', 'THEORY', 'OF', 'CLASSIFICATION', 'HE', 'WAS', 'POORLY', 'VERSED', 'IN', 'ITS', 'PRACTICAL', 'APPLICATION', 'AND', 'I', 'DOUBT', 'THAT', 'HE', 'COULD', 'TELL', 'A', 'SPERM', 'WHALE', 'FROM', 'A', 'BALEEN', 'WHALE'] +8463-294828-0008-586: ref=['AND', 'YET', 'WHAT', 'A', 'FINE', 'GALLANT', 'LAD'] +8463-294828-0008-586: hyp=['AND', 'YET', 'WHAT', 'A', 'FINE', 'GALLANT', 'LAD'] +8463-294828-0009-587: ref=['NOT', 'ONCE', 'DID', 'HE', 'COMMENT', 'ON', 'THE', 'LENGTH', 'OR', 'THE', 'HARDSHIPS', 'OF', 'A', 'JOURNEY'] +8463-294828-0009-587: hyp=['NOT', 'ONCE', 'DID', 'HE', 'COMMENT', 'ON', 'THE', 'LENGTH', 'OR', 'THE', 'HARDSHIPS', 'OF', 'THE', 'JOURNEY'] +8463-294828-0010-588: ref=['NEVER', 'DID', 'HE', 'OBJECT', 'TO', 'BUCKLING', 'UP', 'HIS', 'SUITCASE', 'FOR', 'ANY', 'COUNTRY', 'WHATEVER', 'CHINA', 'OR', 'THE', 'CONGO', 'NO', 'MATTER', 'HOW', 'FAR', 'OFF', 'IT', 'WAS'] +8463-294828-0010-588: hyp=['NEVER', 'DID', 'HE', 'OBJECT', 'TO', 'BUCKLING', 'UP', 'HIS', 'SUIT', 'CASE', 'FOR', 'ANY', 'COUNTRY', 'WHATEVER', 'CHINA', 'OR', 'THE', 'CONGO', 'NO', 'MATTER', 'HOW', 'FAR', 'OFF', 'IT', 'WAS'] +8463-294828-0011-589: ref=['HE', 'WENT', 'HERE', 'THERE', 'AND', 'EVERYWHERE', 'IN', 'PERFECT', 'CONTENTMENT'] +8463-294828-0011-589: hyp=['HE', 'WENT', 'HERE', 'THERE', 'AND', 'EVERYWHERE', 'IN', 'PERFECT', 'CONTENTMENT'] +8463-294828-0012-590: ref=['PLEASE', 'FORGIVE', 'ME', 'FOR', 'THIS', 'UNDERHANDED', 'WAY', 'OF', 'ADMITTING', 'I', 'HAD', 'TURNED', 'FORTY'] +8463-294828-0012-590: hyp=['PLEASE', 'FORGIVE', 'ME', 'FOR', 'THIS', 'UNDERHANDED', 'WAY', 'OF', 'ADMITTING', 'THAT', 'I', 'HAD', 'TURNED', 'FORTY'] +8463-294828-0013-591: ref=['HE', 'WAS', 'A', 'FANATIC', 'ON', 'FORMALITY', 'AND', 'HE', 'ONLY', 'ADDRESSED', 'ME', 'IN', 'THE', 'THIRD', 'PERSON', 'TO', 'THE', 'POINT', 'WHERE', 'IT', 'GOT', 'TIRESOME'] +8463-294828-0013-591: hyp=['HE', 'WAS', 'A', 'FANATIC', 'ON', 'FORMALITY', 'AND', 'HE', 'ONLY', 'ADDRESSED', 'ME', 'IN', 'THE', 'THIRD', 'PERSON', 'TO', 'THE', 'POINT', 'WHERE', 'IT', 'GOT', 'TO', 'HIRESUME'] +8463-294828-0014-592: ref=['THERE', 'WAS', 'GOOD', 'REASON', 'TO', 'STOP', 'AND', 'THINK', 'EVEN', 'FOR', 'THE', "WORLD'S", 'MOST', 'EMOTIONLESS', 'MAN'] +8463-294828-0014-592: hyp=['THERE', 'WAS', 'GOOD', 'REASON', 'TO', 'STOP', 'AND', 'THINK', 'EVEN', 'FOR', 'THE', "WORLD'S", 'MOST', 'EMOTIONLESS', 'MAN'] +8463-294828-0015-593: ref=['CONSEIL', 'I', 'CALLED', 'A', 'THIRD', 'TIME', 'CONSEIL', 'APPEARED'] +8463-294828-0015-593: hyp=['CONSEIL', 'I', 'CALLED', 'A', 'THIRD', 'TON', 'CONSEIL', 'APPEARED'] +8463-294828-0016-594: ref=['DID', 'MASTER', 'SUMMON', 'ME', 'HE', 'SAID', 'ENTERING'] +8463-294828-0016-594: hyp=['DEAD', 'MASTER', 'SUMMONED', 'ME', 'HE', 'SAID', 'ENTERING'] +8463-294828-0017-595: ref=['PACK', 'AS', 'MUCH', 'INTO', 'MY', 'TRUNK', 'AS', 'YOU', 'CAN', 'MY', 'TRAVELING', 'KIT', 'MY', 'SUITS', 'SHIRTS', 'AND', 'SOCKS', "DON'T", 'BOTHER', 'COUNTING', 'JUST', 'SQUEEZE', 'IT', 'ALL', 'IN', 'AND', 'HURRY'] +8463-294828-0017-595: hyp=['PACK', 'AS', 'MUCH', 'INTO', 'MY', 'TRUNK', 'AS', 'YOU', 'CAN', 'MY', 'TRAVELLING', 'KIT', 'MY', 'SUITS', 'SHIRTS', 'AND', 'SOCKS', "DON'T", 'BOTHER', 'COUNTING', 'JUST', 'SQUEEZE', 'IT', 'ALL', 'IN', 'AND', 'HURRY'] +8463-294828-0018-596: ref=["WE'LL", 'DEAL', 'WITH', 'THEM', 'LATER', 'WHAT'] +8463-294828-0018-596: hyp=["WE'LL", 'DEAL', 'WITH', 'THEM', 'LATER', 'WHAT'] +8463-294828-0019-597: ref=['ANYHOW', "WE'LL", 'LEAVE', 'INSTRUCTIONS', 'TO', 'SHIP', 'THE', 'WHOLE', 'MENAGERIE', 'TO', 'FRANCE'] +8463-294828-0019-597: hyp=['ANYHOW', "WE'LL", 'LEAVE', 'INSTRUCTIONS', 'TO', 'SHIP', 'THE', 'WHOLE', 'MENAGERIE', 'TO', 'FRANCE'] +8463-294828-0020-598: ref=['YES', 'WE', 'ARE', 'CERTAINLY', 'I', 'REPLIED', 'EVASIVELY', 'BUT', 'AFTER', 'WE', 'MAKE', 'A', 'DETOUR'] +8463-294828-0020-598: hyp=['YES', 'WE', 'ARE', 'CERTAINLY', 'I', 'REPLIED', 'EVASIVELY', 'BUT', 'AFTER', 'WE', 'MAKE', 'A', 'DETOUR'] +8463-294828-0021-599: ref=['A', 'ROUTE', 'SLIGHTLY', 'LESS', 'DIRECT', "THAT'S", 'ALL'] +8463-294828-0021-599: hyp=['A', 'ROUTE', 'SLIGHTLY', 'LESS', 'DIRECT', "THAT'S", 'ALL'] +8463-294828-0022-600: ref=["WE'RE", 'LEAVING', 'ON', 'THE', 'ABRAHAM', 'LINCOLN'] +8463-294828-0022-600: hyp=["WE'RE", 'LEAVING', 'ON', 'THE', 'ABRAHAM', 'LINCOLN'] +8463-294828-0023-601: ref=['YOU', 'SEE', 'MY', 'FRIEND', "IT'S", 'AN', 'ISSUE', 'OF', 'THE', 'MONSTER', 'THE', 'NOTORIOUS', 'NARWHALE'] +8463-294828-0023-601: hyp=['YOU', 'SEE', 'MY', 'FRIEND', "IT'S", 'AN', 'ISSUE', 'OF', 'THE', 'MONSTER', 'THE', 'NOTORIOUS', 'NARWHALE'] +8463-294828-0024-602: ref=['WE', "DON'T", 'KNOW', 'WHERE', 'IT', 'WILL', 'TAKE', 'US'] +8463-294828-0024-602: hyp=['WE', "DON'T", 'KNOW', 'WHERE', 'IT', 'WILL', 'TAKE', 'US'] +8463-294828-0025-603: ref=['BUT', "WE'RE", 'GOING', 'JUST', 'THE', 'SAME'] +8463-294828-0025-603: hyp=['BUT', 'WERE', 'GOING', 'JUST', 'THE', 'SAME'] +8463-294828-0026-604: ref=['WE', 'HAVE', 'A', 'COMMANDER', "WHO'S", 'GAME', 'FOR', 'ANYTHING'] +8463-294828-0026-604: hyp=['WE', 'HAVE', 'A', 'COMMANDER', 'WHOSE', 'GAME', 'FOR', 'ANYTHING'] +8463-294828-0027-605: ref=['I', 'LEFT', 'INSTRUCTIONS', 'FOR', 'SHIPPING', 'MY', 'CONTAINERS', 'OF', 'STUFFED', 'ANIMALS', 'AND', 'DRIED', 'PLANTS', 'TO', 'PARIS', 'FRANCE'] +8463-294828-0027-605: hyp=['I', 'LEFT', 'INSTRUCTIONS', 'FOR', 'SHIPPING', 'MY', 'CONTAINERS', 'OF', 'STUFFED', 'ANIMALS', 'AND', 'DRIED', 'PLANTS', 'TO', 'PARIS', 'FRANCE'] +8463-294828-0028-606: ref=['I', 'OPENED', 'A', 'LINE', 'OF', 'CREDIT', 'SUFFICIENT', 'TO', 'COVER', 'THE', 'BABIRUSA', 'AND', 'CONSEIL', 'AT', 'MY', 'HEELS', 'I', 'JUMPED', 'INTO', 'A', 'CARRIAGE'] +8463-294828-0028-606: hyp=['I', 'OPENED', 'A', 'LINE', 'OF', 'CREDIT', 'SUFFICIENT', 'TO', 'COVER', 'THE', 'BABRUSA', 'AND', 'CONSEIL', 'AT', 'MY', 'HEELS', 'I', 'JUMPED', 'INTO', 'A', 'CARRIAGE'] +8463-294828-0029-607: ref=['OUR', 'BAGGAGE', 'WAS', 'IMMEDIATELY', 'CARRIED', 'TO', 'THE', 'DECK', 'OF', 'THE', 'FRIGATE', 'I', 'RUSHED', 'ABOARD'] +8463-294828-0029-607: hyp=['OUR', 'BAGGAGE', 'WAS', 'IMMEDIATELY', 'CARRIED', 'TO', 'THE', 'DECK', 'OF', 'THE', 'FRIGATE', 'I', 'RUSHED', 'ABOARD'] +8463-294828-0030-608: ref=['I', 'ASKED', 'FOR', 'COMMANDER', 'FARRAGUT'] +8463-294828-0030-608: hyp=['I', 'ASKED', 'FOR', 'COMMANDER', 'FERRAGUT'] +8463-294828-0031-609: ref=['ONE', 'OF', 'THE', 'SAILORS', 'LED', 'ME', 'TO', 'THE', 'AFTERDECK', 'WHERE', 'I', 'STOOD', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'SMART', 'LOOKING', 'OFFICER', 'WHO', 'EXTENDED', 'HIS', 'HAND', 'TO', 'ME'] +8463-294828-0031-609: hyp=['ONE', 'OF', 'THE', 'SAILORS', 'LED', 'ME', 'TO', 'THE', 'AFTER', 'DECK', 'WHERE', 'I', 'STOOD', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'SMART', 'LOOKING', 'OFFICER', 'WHO', 'EXTENDED', 'HIS', 'HAND', 'TO', 'ME'] +8463-294828-0032-610: ref=['IN', 'PERSON', 'WELCOME', 'ABOARD', 'PROFESSOR', 'YOUR', 'CABIN', 'IS', 'WAITING', 'FOR', 'YOU'] +8463-294828-0032-610: hyp=['IN', 'PERSON', 'WELCOME', 'ABOARD', 'PROFESSOR', 'YOUR', 'CABIN', 'IS', 'WAITING', 'FOR', 'YOU'] +8463-294828-0033-611: ref=['I', 'WAS', 'WELL', 'SATISFIED', 'WITH', 'MY', 'CABIN', 'WHICH', 'WAS', 'LOCATED', 'IN', 'THE', 'STERN', 'AND', 'OPENED', 'INTO', 'THE', 'OFFICERS', 'MESS'] +8463-294828-0033-611: hyp=['I', 'WAS', 'WELL', 'SATISFIED', 'WITH', 'MY', 'CABIN', 'WHICH', 'WAS', 'LOCATED', 'IN', 'THE', 'STERN', 'AND', 'OPENED', 'INTO', 'THE', "OFFICER'S", 'MASTS'] +8463-294828-0034-612: ref=["WE'LL", 'BE', 'QUITE', 'COMFORTABLE', 'HERE', 'I', 'TOLD', 'CONSEIL'] +8463-294828-0034-612: hyp=['WILL', 'BE', 'QUITE', 'COMFORTABLE', 'HERE', 'I', 'TOLD', 'CONSEIL'] +8463-294828-0035-613: ref=['AND', 'SO', 'IF', "I'D", 'BEEN', 'DELAYED', 'BY', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'OR', 'EVEN', 'LESS', 'THE', 'FRIGATE', 'WOULD', 'HAVE', 'GONE', 'WITHOUT', 'ME', 'AND', 'I', 'WOULD', 'HAVE', 'MISSED', 'OUT', 'ON', 'THIS', 'UNEARTHLY', 'EXTRAORDINARY', 'AND', 'INCONCEIVABLE', 'EXPEDITION', 'WHOSE', 'TRUE', 'STORY', 'MIGHT', 'WELL', 'MEET', 'WITH', 'SOME', 'SKEPTICISM'] +8463-294828-0035-613: hyp=['AND', 'SO', 'IF', 'I', 'HAD', 'BEEN', 'DELAYED', 'BY', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'OR', 'EVEN', 'LESS', 'THE', 'FRIGATE', 'WOULD', 'HAVE', 'GONE', 'WITHOUT', 'ME', 'AND', 'I', 'WOULD', 'HAVE', 'MISSED', 'OUT', 'ON', 'THIS', 'UNEARTHLY', 'EXTRAORDINARY', 'AND', 'INCONCEIVABLE', 'EXPEDITION', 'WHOSE', 'TRUE', 'STORY', 'MIGHT', 'WELL', 'MEET', 'WITH', 'SOME', 'SKEPTICISM'] +8463-294828-0036-614: ref=['THE', 'WHARVES', 'OF', 'BROOKLYN', 'AND', 'EVERY', 'PART', 'OF', 'NEW', 'YORK', 'BORDERING', 'THE', 'EAST', 'RIVER', 'WERE', 'CROWDED', 'WITH', 'CURIOSITY', 'SEEKERS'] +8463-294828-0036-614: hyp=['THE', 'WHARVES', 'OF', 'BROOKLYN', 'AND', 'EVERY', 'PART', 'OF', 'NEW', 'YORK', 'BORDERING', 'THE', 'EAST', 'RIVER', 'WERE', 'CROWDED', 'WITH', 'CURIOSITY', 'SEEKERS'] +8463-294828-0037-615: ref=['DEPARTING', 'FROM', 'FIVE', 'HUNDRED', 'THOUSAND', 'THROATS', 'THREE', 'CHEERS', 'BURST', 'FORTH', 'IN', 'SUCCESSION'] +8463-294828-0037-615: hyp=['DEPARTING', 'FROM', 'FIVE', 'HUNDRED', 'THOUSAND', 'THROATS', 'THREE', 'CHEERS', 'BURST', 'FORTH', 'IN', 'SUCCESSION'] +8463-294828-0038-616: ref=['THOUSANDS', 'OF', 'HANDKERCHIEFS', 'WERE', 'WAVING', 'ABOVE', 'THESE', 'TIGHTLY', 'PACKED', 'MASSES', 'HAILING', 'THE', 'ABRAHAM', 'LINCOLN', 'UNTIL', 'IT', 'REACHED', 'THE', 'WATERS', 'OF', 'THE', 'HUDSON', 'RIVER', 'AT', 'THE', 'TIP', 'OF', 'THE', 'LONG', 'PENINSULA', 'THAT', 'FORMS', 'NEW', 'YORK', 'CITY'] +8463-294828-0038-616: hyp=['THOUSANDS', 'OF', 'HANDKERCHIEFS', 'WERE', 'WAVING', 'ABOVE', 'THESE', 'TIGHTLY', 'PACKED', 'MASSES', 'HAILING', 'THE', 'ABRAHAM', 'LINCOLN', 'UNTIL', 'IT', 'REACHED', 'THE', 'WATERS', 'OF', 'THE', 'HUDSON', 'RIVER', 'AT', 'THE', 'TIP', 'OF', 'THE', 'LONG', 'PENINSULA', 'THAT', 'FORMS', 'NEW', 'YORK', 'CITY'] +8555-284447-0000-2299: ref=['THEN', 'HE', 'RUSHED', 'DOWN', 'STAIRS', 'INTO', 'THE', 'COURTYARD', 'SHOUTING', 'LOUDLY', 'FOR', 'HIS', 'SOLDIERS', 'AND', 'THREATENING', 'TO', 'PATCH', 'EVERYBODY', 'IN', 'HIS', 'DOMINIONS', 'IF', 'THE', 'SAILORMAN', 'WAS', 'NOT', 'RECAPTURED'] +8555-284447-0000-2299: hyp=['THEN', 'HE', 'RUSHED', 'DOWNSTAIRS', 'INTO', 'THE', 'COURTYARD', 'SHOUTING', 'LOUDLY', 'FOR', 'HIS', 'SOLDIERS', 'AND', 'THREATENING', 'TO', 'PATCH', 'EVERYBODY', 'IN', 'HIS', 'DOMINIONS', 'AT', 'THE', 'SAILORMAN', 'WAS', 'NOT', 'RECAPTURED'] +8555-284447-0001-2300: ref=['HOLD', 'HIM', 'FAST', 'MY', 'MEN', 'AND', 'AS', 'SOON', 'AS', "I'VE", 'HAD', 'MY', 'COFFEE', 'AND', 'OATMEAL', "I'LL", 'TAKE', 'HIM', 'TO', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'AND', 'PATCH', 'HIM'] +8555-284447-0001-2300: hyp=['HOLD', 'HIM', 'FAST', 'MY', 'MEN', 'AND', 'AS', 'SOON', 'AS', 'I', 'HAD', 'MY', 'COFFEE', 'AN', 'OATMEAL', 'I', 'WILL', 'TAKE', 'HIM', 'TO', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'AND', 'PAT', 'HIM'] +8555-284447-0002-2301: ref=['I', "WOULDN'T", 'MIND', 'A', 'CUP', 'O', 'COFFEE', 'MYSELF', 'SAID', "CAP'N", 'BILL', "I'VE", 'HAD', "CONSID'BLE", 'EXERCISE', 'THIS', 'MORNIN', 'AND', "I'M", 'ALL', 'READY', 'FOR', 'BREAKFAS'] +8555-284447-0002-2301: hyp=['I', "WOULDN'T", 'MIND', 'A', 'CUP', 'OF', 'COFFEE', 'MYSELF', 'SAID', "CAP'N", 'BILL', 'I', 'HAVE', 'HAD', 'CONSIDERABLE', 'EXERCISE', 'THIS', 'MORNIN', 'AN', "I'M", 'ALREADY', 'FOR', 'BREAKFAST'] +8555-284447-0003-2302: ref=['BUT', "CAP'N", 'BILL', 'MADE', 'NO', 'SUCH', 'ATTEMPT', 'KNOWING', 'IT', 'WOULD', 'BE', 'USELESS'] +8555-284447-0003-2302: hyp=['BUT', "CAP'N", 'BILL', 'MADE', 'NO', 'SUCH', 'ATTEMPT', 'KNOWING', 'IT', 'WOULD', 'BE', 'USELESS'] +8555-284447-0004-2303: ref=['AS', 'SOON', 'AS', 'THEY', 'ENTERED', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'THE', 'BOOLOOROO', 'GAVE', 'A', 'YELL', 'OF', 'DISAPPOINTMENT'] +8555-284447-0004-2303: hyp=['AS', 'SOON', 'AS', 'THEY', 'ENTERED', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'THE', 'BOOLOOROO', 'GAVE', 'A', 'YELL', 'OF', 'DISAPPOINTMENT'] +8555-284447-0005-2304: ref=['THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'WAS', 'HIGH', 'AND', 'BIG', 'AND', 'AROUND', 'IT', 'RAN', 'ROWS', 'OF', 'BENCHES', 'FOR', 'THE', 'SPECTATORS', 'TO', 'SIT', 'UPON'] +8555-284447-0005-2304: hyp=['THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'WAS', 'HIGH', 'AND', 'BIG', 'AND', 'AROUND', 'IT', 'RAN', 'ROWS', 'OF', 'BENCHES', 'FOR', 'THE', 'SPECTATORS', 'TO', 'SIT', 'UPON'] +8555-284447-0006-2305: ref=['IN', 'ONE', 'PLACE', 'AT', 'THE', 'HEAD', 'OF', 'THE', 'ROOM', 'WAS', 'A', 'RAISED', 'PLATFORM', 'FOR', 'THE', 'ROYAL', 'FAMILY', 'WITH', 'ELEGANT', 'THRONE', 'CHAIRS', 'FOR', 'THE', 'KING', 'AND', 'QUEEN', 'AND', 'SIX', 'SMALLER', 'BUT', 'RICHLY', 'UPHOLSTERED', 'CHAIRS', 'FOR', 'THE', 'SNUBNOSED', 'PRINCESSES'] +8555-284447-0006-2305: hyp=['IN', 'ONE', 'PLACE', 'AT', 'THE', 'HEAD', 'OF', 'THE', 'ROOM', 'WAS', 'A', 'RAISED', 'PLATFORM', 'FOR', 'THE', 'ROYAL', 'FAMILY', 'WITH', 'ELEGANT', 'THROWN', 'CHAIRS', 'FOR', 'THE', 'KING', 'AND', 'QUEEN', 'AND', 'SIX', 'SMALLER', 'BUT', 'RICHLY', 'UPHOLSTERED', 'CHAIRS', 'WITH', 'A', 'SNUB', 'NOSED', 'PRINCESSES'] +8555-284447-0007-2306: ref=['THEREFORE', 'HER', 'MAJESTY', 'PAID', 'NO', 'ATTENTION', 'TO', 'ANYONE', 'AND', 'NO', 'ONE', 'PAID', 'ANY', 'ATTENTION', 'TO', 'HER'] +8555-284447-0007-2306: hyp=['THEY', 'ARE', 'FOR', 'HER', 'MAJESTY', 'PAID', 'NO', 'ATTENTION', 'TO', 'ANY', 'ONE', 'AND', 'NO', 'ONE', 'PAID', 'ANY', 'ATTENTION', 'TO', 'HER'] +8555-284447-0008-2307: ref=['RICH', 'JEWELS', 'OF', 'BLUE', 'STONES', 'GLITTERED', 'UPON', 'THEIR', 'PERSONS', 'AND', 'THE', 'ROYAL', 'LADIES', 'WERE', 'FULLY', 'AS', 'GORGEOUS', 'AS', 'THEY', 'WERE', 'HAUGHTY', 'AND', 'OVERBEARING'] +8555-284447-0008-2307: hyp=['RICH', 'JEWELS', 'OF', 'BLUESTS', 'GLITTERED', 'UPON', 'THEIR', 'PERSONS', 'AND', 'THE', 'ROYAL', 'LADIES', 'WERE', 'FULLY', 'AS', 'GORGEOUS', 'AS', 'THEY', 'WERE', 'HAUGHTY', 'AND', 'OVERBEARING'] +8555-284447-0009-2308: ref=['MORNIN', 'GIRLS', 'HOPE', 'YE', 'FEEL', 'AS', 'WELL', 'AS', 'YE', 'LOOK'] +8555-284447-0009-2308: hyp=['MORNING', 'GIRLS', 'OPIEVIL', 'AS', 'WELL', 'AS', 'YOU', 'LOOK'] +8555-284447-0010-2309: ref=['CONTROL', 'YOURSELVES', 'MY', 'DEARS', 'REPLIED', 'THE', 'BOOLOOROO', 'THE', 'WORST', 'PUNISHMENT', 'I', 'KNOW', 'HOW', 'TO', 'INFLICT', 'ON', 'ANYONE', 'THIS', 'PRISONER', 'IS', 'ABOUT', 'TO', 'SUFFER', "YOU'LL", 'SEE', 'A', 'VERY', 'PRETTY', 'PATCHING', 'MY', 'ROYAL', 'DAUGHTERS'] +8555-284447-0010-2309: hyp=['CONTROLL', 'YOURSELVES', 'MY', 'DEARS', 'REPLIED', 'THE', 'BOOLOOROO', 'THE', 'WORST', 'PUNISHMENT', 'I', 'KNOW', 'HOW', 'TO', 'INFLICT', 'ON', 'ANY', 'ONE', 'THIS', 'PRISONER', 'IS', 'ABOUT', 'TO', 'SUFFER', 'YOU', 'WILL', 'SEE', 'A', 'VERY', 'PRETTY', 'PATCHING', 'MY', 'ROYAL', 'DAUGHTERS'] +8555-284447-0011-2310: ref=['SUPPOSE', "IT'S", 'A', 'FRIEND'] +8555-284447-0011-2310: hyp=['SUPPOSE', "IT'S", 'OF', 'BRAND'] +8555-284447-0012-2311: ref=['THE', 'CAPTAIN', 'SHOOK', 'HIS', 'HEAD'] +8555-284447-0012-2311: hyp=['THE', 'CAPTAIN', 'SHOOK', 'HIS', 'HEAD'] +8555-284447-0013-2312: ref=['WHY', 'YOU', 'SAID', 'TO', 'FETCH', 'THE', 'FIRST', 'LIVING', 'CREATURE', 'WE', 'MET', 'AND', 'THAT', 'WAS', 'THIS', 'BILLYGOAT', 'REPLIED', 'THE', 'CAPTAIN', 'PANTING', 'HARD', 'AS', 'HE', 'HELD', 'FAST', 'TO', 'ONE', 'OF', 'THE', "GOAT'S", 'HORNS'] +8555-284447-0013-2312: hyp=['WHY', 'YOU', 'SIT', 'TO', 'FETCH', 'THE', 'FIRST', 'LIVING', 'CREATURE', 'WE', 'MET', 'AND', 'THAT', 'WAS', 'THE', 'SPILLIGOAT', 'REPLIED', 'THE', 'CAPTAIN', 'PANTING', 'HARD', 'AS', 'HE', 'HELD', 'FAST', 'TO', 'ONE', 'OF', 'THE', "GOAT'S", 'HORNS'] +8555-284447-0014-2313: ref=['THE', 'IDEA', 'OF', 'PATCHING', "CAP'N", 'BILL', 'TO', 'A', 'GOAT', 'WAS', 'VASTLY', 'AMUSING', 'TO', 'HIM', 'AND', 'THE', 'MORE', 'HE', 'THOUGHT', 'OF', 'IT', 'THE', 'MORE', 'HE', 'ROARED', 'WITH', 'LAUGHTER'] +8555-284447-0014-2313: hyp=['THE', 'IDEA', 'OF', 'PATCHING', "CAP'N", 'BILL', 'TO', 'A', 'GOAT', 'WAS', 'VASTLY', 'AMUSING', 'TO', 'HIM', 'AND', 'THE', 'MORE', 'HE', 'THOUGHT', 'OF', 'IT', 'THE', 'MORE', 'HE', 'ROARED', 'WITH', 'LAUGHTER'] +8555-284447-0015-2314: ref=['THEY', 'LOOK', 'SOMETHING', 'ALIKE', 'YOU', 'KNOW', 'SUGGESTED', 'THE', 'CAPTAIN', 'OF', 'THE', 'GUARDS', 'LOOKING', 'FROM', 'ONE', 'TO', 'THE', 'OTHER', 'DOUBTFULLY', 'AND', "THEY'RE", 'NEARLY', 'THE', 'SAME', 'SIZE', 'IF', 'YOU', 'STAND', 'THE', 'GOAT', 'ON', 'HIS', 'HIND', 'LEGS', "THEY'VE", 'BOTH', 'GOT', 'THE', 'SAME', 'STYLE', 'OF', 'WHISKERS', 'AND', "THEY'RE", 'BOTH', 'OF', 'EM', 'OBSTINATE', 'AND', 'DANGEROUS', 'SO', 'THEY', 'OUGHT', 'TO', 'MAKE', 'A', 'GOOD', 'PATCH', 'SPLENDID'] +8555-284447-0015-2314: hyp=['THEY', 'LOOK', 'SOMETHING', 'ALIKE', 'YOU', 'KNOW', 'SUGGESTED', 'THE', 'CAPTAIN', 'OF', 'THE', 'GUARDS', 'LOOKING', 'FROM', 'ONE', 'TO', 'THE', 'OTHER', 'DOUBTFULLY', 'AND', 'THEY', 'NEARLY', 'THE', 'SAME', 'SIZE', 'IF', 'YOU', 'STAND', 'THE', 'BOAT', 'ON', 'HIS', 'HIND', 'LEGS', "THEY'VE", 'BOTH', 'GOT', 'THE', 'SAME', 'STYLE', 'OF', 'WHISKERS', 'AND', "THEY'RE", 'BOTH', 'OF', 'THEM', 'OBSTINATE', 'AND', 'DANGEROUS', 'SO', 'THEY', 'OUGHT', 'TO', 'MAKE', 'A', 'GOOD', 'PATCH', 'SPLENDID'] +8555-284447-0016-2315: ref=['FINE', 'GLORIOUS'] +8555-284447-0016-2315: hyp=['FINE', 'GLORIOUS'] +8555-284447-0017-2316: ref=['WHEN', 'THIS', 'HAD', 'BEEN', 'ACCOMPLISHED', 'THE', 'BOOLOOROO', 'LEANED', 'OVER', 'TO', 'TRY', 'TO', 'DISCOVER', 'WHY', 'THE', 'FRAME', 'ROLLED', 'AWAY', 'SEEMINGLY', 'OF', 'ITS', 'OWN', 'ACCORD', 'AND', 'HE', 'WAS', 'THE', 'MORE', 'PUZZLED', 'BECAUSE', 'IT', 'HAD', 'NEVER', 'DONE', 'SUCH', 'A', 'THING', 'BEFORE'] +8555-284447-0017-2316: hyp=['WHEN', 'THIS', 'HAD', 'BEEN', 'ACCOMPLISHED', 'THE', 'BOOLOOROO', 'LEANED', 'OVER', 'TO', 'TRY', 'TO', 'DISCOVER', 'WHY', 'THE', 'FRAME', 'ROLLED', 'AWAY', 'SEEMINGLY', 'OF', 'ITS', 'OWN', 'ACCORD', 'AND', 'HE', 'WAS', 'THE', 'MORE', 'PUZZLED', 'BECAUSE', 'IT', 'HAD', 'NEVER', 'DONE', 'SUCH', 'A', 'THING', 'BEFORE'] +8555-284447-0018-2317: ref=['AT', 'ONCE', 'THE', 'GOAT', 'GAVE', 'A', 'LEAP', 'ESCAPED', 'FROM', 'THE', 'SOLDIERS', 'AND', 'WITH', 'BOWED', 'HEAD', 'RUSHED', 'UPON', 'THE', 'BOOLOOROO'] +8555-284447-0018-2317: hyp=['AT', 'ONCE', 'THE', 'GOAT', 'GAVE', 'A', 'LEAP', 'ESCAPED', 'FROM', 'THE', 'SOLDIERS', 'AND', 'WITH', 'BOWED', 'HEAD', 'RUSHED', 'UPON', 'THE', 'BOOLOOROO'] +8555-284447-0019-2318: ref=['BEFORE', 'ANY', 'COULD', 'STOP', 'HIM', 'HE', 'BUTTED', 'HIS', 'MAJESTY', 'SO', 'FURIOUSLY', 'THAT', 'THE', 'KING', 'SOARED', 'FAR', 'INTO', 'THE', 'AIR', 'AND', 'TUMBLED', 'IN', 'A', 'HEAP', 'AMONG', 'THE', 'BENCHES', 'WHERE', 'HE', 'LAY', 'MOANING', 'AND', 'GROANING'] +8555-284447-0019-2318: hyp=['BEFORE', 'ANY', 'COULD', 'STOP', 'HIM', 'HE', 'BUDDED', 'HIS', 'MAJESTY', 'SO', 'FURIOUSLY', 'THAT', 'THE', "KING'S", 'SOARED', 'FAR', 'INTO', 'THE', 'AIR', 'AND', 'TUMBLED', 'IN', 'A', 'HEAP', 'AMONG', 'THE', 'BENCHES', 'WHERE', 'HE', 'LAY', 'MOANING', 'AND', 'GROANING'] +8555-284447-0020-2319: ref=['THE', "GOAT'S", 'WARLIKE', 'SPIRIT', 'WAS', 'ROUSED', 'BY', 'THIS', 'SUCCESSFUL', 'ATTACK'] +8555-284447-0020-2319: hyp=['THE', 'GOATS', 'WORE', 'LIKE', 'SPIRIT', 'WAS', 'ROUSED', 'BY', 'THIS', 'SUCCESSFUL', 'ATTACK'] +8555-284447-0021-2320: ref=['THEN', 'THEY', 'SPED', 'IN', 'GREAT', 'HASTE', 'FOR', 'THE', 'DOOR', 'AND', 'THE', 'GOAT', 'GAVE', 'A', 'FINAL', 'BUTT', 'THAT', 'SENT', 'THE', 'ROW', 'OF', 'ROYAL', 'LADIES', 'ALL', 'DIVING', 'INTO', 'THE', 'CORRIDOR', 'IN', 'ANOTHER', 'TANGLE', 'WHEREUPON', 'THEY', 'SHRIEKED', 'IN', 'A', 'MANNER', 'THAT', 'TERRIFIED', 'EVERYONE', 'WITHIN', 'SOUND', 'OF', 'THEIR', 'VOICES'] +8555-284447-0021-2320: hyp=['THEN', 'THEY', 'SPED', 'IN', 'GREAT', 'HASTE', 'FOR', 'THE', 'DOOR', 'AND', 'THE', 'GOAT', 'GAVE', 'A', 'FINAL', 'BUTT', 'THAT', 'SENT', 'THE', 'ROW', 'OF', 'ROYAL', 'LADIES', 'ALL', 'DIVING', 'INTO', 'THE', 'CORRIDOR', 'IN', 'ANOTHER', 'TANGLE', 'WHEREUPON', 'THEY', 'SHRIEKED', 'IN', 'A', 'MANNER', 'THAT', 'TERRIFIED', 'EVERY', 'ONE', 'WITHIN', 'SOUND', 'OF', 'THEIR', 'VOICES'] +8555-284447-0022-2321: ref=['I', 'HAD', 'A', 'NOTION', 'IT', 'WAS', 'YOU', 'MATE', 'AS', 'SAVED', 'ME', 'FROM', 'THE', 'KNIFE'] +8555-284447-0022-2321: hyp=['I', 'HAD', 'A', 'NOTION', 'IT', 'WAS', 'YOU', 'MADE', 'TO', 'SEE', 'ME', 'FROM', 'THE', 'KNIFE'] +8555-284447-0023-2322: ref=['I', "COULDN'T", 'SHIVER', 'MUCH', 'BEIN', 'BOUND', 'SO', 'TIGHT', 'BUT', 'WHEN', "I'M", 'LOOSE', 'I', 'MEAN', 'TO', 'HAVE', 'JUS', 'ONE', 'GOOD', 'SHIVER', 'TO', 'RELIEVE', 'MY', "FEELIN'S"] +8555-284447-0023-2322: hyp=['I', "COULDN'T", 'SHIVER', 'MUCH', 'BEING', 'BOUND', 'SO', 'TIGHT', 'BUT', 'WHEN', 'I', 'LOOSE', 'I', 'MEAN', 'TO', 'HAVE', 'JUST', 'SWUNG', 'GOOD', 'SHIVER', 'TO', 'RELIEVE', 'MY', 'FEELINS'] +8555-284447-0024-2323: ref=['COME', 'AND', 'GET', 'THE', 'BOOLOOROO', 'SHE', 'SAID', 'GOING', 'TOWARD', 'THE', 'BENCHES'] +8555-284447-0024-2323: hyp=['COME', 'AND', 'GET', 'THE', 'BOOLOOROO', 'SHE', 'SAID', 'GOING', 'TOWARD', 'THE', 'BENCHES'] +8555-284449-0000-2324: ref=['SO', 'THEY', 'WERE', 'QUITE', 'WILLING', 'TO', 'OBEY', 'THE', 'ORDERS', 'OF', 'THEIR', 'GIRL', 'QUEEN', 'AND', 'IN', 'A', 'SHORT', 'TIME', 'THE', 'BLASTS', 'OF', 'TRUMPETS', 'AND', 'ROLL', 'OF', 'DRUMS', 'AND', 'CLASHING', 'OF', 'CYMBALS', 'TOLD', 'TROT', 'AND', "CAP'N", 'BILL', 'THAT', 'THE', 'BLUE', 'BANDS', 'HAD', 'ASSEMBLED', 'BEFORE', 'THE', 'PALACE'] +8555-284449-0000-2324: hyp=['SO', 'THEY', 'WERE', 'QUITE', 'WILLING', 'TO', 'OBEY', 'THE', 'ORDERS', 'OF', 'THEIR', 'GIRL', 'QUEEN', 'AND', 'IN', 'A', 'SHORT', 'TIME', 'THE', 'BLAST', 'OF', 'TRUMPETS', 'AND', 'ROLL', 'OF', 'DRUMS', 'AND', 'CLASHING', 'OF', 'CYMBALS', 'TOLD', 'TROT', 'AND', "CAP'N", 'BILL', 'THAT', 'THE', 'BLUE', 'BANDS', 'HAD', 'A', 'SIMPLED', 'BEFORE', 'THE', 'PALACE'] +8555-284449-0001-2325: ref=['THEN', 'THEY', 'ALL', 'MARCHED', 'OUT', 'A', 'LITTLE', 'WAY', 'INTO', 'THE', 'FIELDS', 'AND', 'FOUND', 'THAT', 'THE', 'ARMY', 'OF', 'PINKIES', 'HAD', 'ALREADY', 'FORMED', 'AND', 'WAS', 'ADVANCING', 'STEADILY', 'TOWARD', 'THEM'] +8555-284449-0001-2325: hyp=['THEN', 'THEY', 'ALL', 'MARCHED', 'OUT', 'A', 'LITTLE', 'WAY', 'INTO', 'THE', 'FIELDS', 'AND', 'FOUND', 'THAT', 'THE', 'ARMY', 'OF', 'PINKIES', 'HAD', 'ALREADY', 'FORMED', 'AND', 'WAS', 'ADVANCING', 'STEADILY', 'TOWARD', 'THEM'] +8555-284449-0002-2326: ref=['AT', 'THE', 'HEAD', 'OF', 'THE', 'PINKIES', 'WERE', 'GHIP', 'GHISIZZLE', 'AND', 'BUTTON', 'BRIGHT', 'WHO', 'HAD', 'THE', 'PARROT', 'ON', 'HIS', 'SHOULDER', 'AND', 'THEY', 'WERE', 'SUPPORTED', 'BY', 'CAPTAIN', 'CORALIE', 'AND', 'CAPTAIN', 'TINTINT', 'AND', 'ROSALIE', 'THE', 'WITCH'] +8555-284449-0002-2326: hyp=['AT', 'THE', 'HEAD', 'OF', 'THE', 'PINKIES', 'WERE', 'GHIP', 'GHISIZZLE', 'AND', 'BUTTON', 'BRIGHT', 'WHO', 'HAD', 'THE', 'PARROT', 'ON', 'HIS', 'SHOULDER', 'AND', 'THEY', 'WERE', 'SUPPORTED', 'BY', 'CAPTAIN', 'CORLIE', 'AND', 'CAPTAIN', 'TINTANT', 'AND', 'ROSALIE', 'THE', 'WITCH'] +8555-284449-0003-2327: ref=['WHEN', 'THE', 'BLUESKINS', 'SAW', 'GHIP', 'GHISIZZLE', 'THEY', 'RAISED', 'ANOTHER', 'GREAT', 'SHOUT', 'FOR', 'HE', 'WAS', 'THE', 'FAVORITE', 'OF', 'THE', 'SOLDIERS', 'AND', 'VERY', 'POPULAR', 'WITH', 'ALL', 'THE', 'PEOPLE'] +8555-284449-0003-2327: hyp=['WHEN', 'THE', 'BLUESKIN', 'SAW', 'GHIP', 'GHISIZZLE', 'THEY', 'RAISED', 'ANOTHER', 'GREAT', 'SHOUT', 'FOR', 'HE', 'WAS', 'THE', 'FAVORITE', 'OF', 'THE', 'SOLDIERS', 'AND', 'VERY', 'POPULAR', 'WITH', 'ALL', 'THE', 'PEOPLE'] +8555-284449-0004-2328: ref=['SINCE', 'LAST', 'THURSDAY', 'I', 'GHIP', 'GHISIZZLE', 'HAVE', 'BEEN', 'THE', 'LAWFUL', 'BOOLOOROO', 'OF', 'THE', 'BLUE', 'COUNTRY', 'BUT', 'NOW', 'THAT', 'YOU', 'ARE', 'CONQUERED', 'BY', 'QUEEN', 'TROT', 'I', 'SUPPOSE', 'I', 'AM', 'CONQUERED', 'TOO', 'AND', 'YOU', 'HAVE', 'NO', 'BOOLOOROO', 'AT', 'ALL'] +8555-284449-0004-2328: hyp=['SINCE', 'LAST', 'THURSDAY', 'I', 'GIP', 'GHISIZZLE', 'HAVE', 'BEEN', 'THE', 'LAWFUL', 'BOOLOOROO', 'OF', 'THE', 'BLUE', 'COUNTRY', 'BUT', 'NOW', 'THAT', 'YOU', 'ARE', 'CONQUERED', 'BY', 'QUEEN', 'TROT', 'I', 'SUPPOSE', 'I', 'AM', 'CONQUERED', 'TOO', 'AND', 'YOU', 'HAVE', 'NO', 'BOOLOOROO', 'AT', 'ALL'] +8555-284449-0005-2329: ref=['WHEN', 'HE', 'FINISHED', 'SHE', 'SAID', 'CHEERFULLY'] +8555-284449-0005-2329: hyp=['WHEN', 'HE', 'FINISHED', 'SHE', 'SAID', 'CHEERFULLY'] +8555-284449-0006-2330: ref=["DON'T", 'WORRY', 'SIZZLE', 'DEAR', "IT'LL", 'ALL', 'COME', 'RIGHT', 'PRETTY', 'SOON'] +8555-284449-0006-2330: hyp=["DON'T", 'WORRY', 'SIZZLE', 'DEAR', "IT'LL", 'ALL', 'COME', 'RIGHT', 'PRETTY', 'SOON'] +8555-284449-0007-2331: ref=['NOW', 'THEN', "LET'S", 'ENTER', 'THE', 'CITY', 'AN', 'ENJOY', 'THE', 'GRAND', 'FEAST', "THAT'S", 'BEING', 'COOKED', "I'M", 'NEARLY', 'STARVED', 'MYSELF', 'FOR', 'THIS', 'CONQUERIN', 'KINGDOMS', 'IS', 'HARD', 'WORK'] +8555-284449-0007-2331: hyp=['NOW', 'THEN', "LET'S", 'ENTER', 'THE', 'CITY', 'AND', 'ENJOY', 'THE', 'GREAT', 'FEAST', 'AT', 'BEING', 'COOKED', "I'M", 'NEARLY', 'STORM', 'MYSELF', 'FOR', 'THIS', 'CONQUERING', "KINGDOM'S", 'IS', 'HARD', 'WORK'] +8555-284449-0008-2332: ref=['THEN', 'SHE', 'GAVE', 'ROSALIE', 'BACK', 'HER', 'MAGIC', 'RING', 'THANKING', 'THE', 'KIND', 'WITCH', 'FOR', 'ALL', 'SHE', 'HAD', 'DONE', 'FOR', 'THEM'] +8555-284449-0008-2332: hyp=['THEN', 'SHE', 'GAVE', 'ROSALIE', 'BACK', 'HER', 'MAGIC', 'RING', 'THANKING', 'THE', 'KIND', 'WITCH', 'FOR', 'ALL', 'SHE', 'HAD', 'DONE', 'FOR', 'THEM'] +8555-284449-0009-2333: ref=['YOU', 'ARE', 'MATE', 'REPLIED', 'THE', 'SAILOR'] +8555-284449-0009-2333: hyp=['YOU', 'ARE', 'A', 'MATE', 'REPLIED', 'THE', 'SAILOR'] +8555-284449-0010-2334: ref=['IT', 'WILL', 'BE', 'SUCH', 'A', 'SATISFACTION'] +8555-284449-0010-2334: hyp=['IT', 'WILL', 'BE', 'SUCH', 'A', 'SATISFACTION'] +8555-284449-0011-2335: ref=['THE', 'GUARDS', 'HAD', 'A', 'TERRIBLE', 'STRUGGLE', 'WITH', 'THE', 'GOAT', 'WHICH', 'WAS', 'LOOSE', 'IN', 'THE', 'ROOM', 'AND', 'STILL', 'WANTED', 'TO', 'FIGHT', 'BUT', 'FINALLY', 'THEY', 'SUBDUED', 'THE', 'ANIMAL', 'AND', 'THEN', 'THEY', 'TOOK', 'THE', 'BOOLOOROO', 'OUT', 'OF', 'THE', 'FRAME', 'HE', 'WAS', 'TIED', 'IN', 'AND', 'BROUGHT', 'BOTH', 'HIM', 'AND', 'THE', 'GOAT', 'BEFORE', 'QUEEN', 'TROT', 'WHO', 'AWAITED', 'THEM', 'IN', 'THE', 'THRONE', 'ROOM', 'OF', 'THE', 'PALACE'] +8555-284449-0011-2335: hyp=['THE', 'GUARDS', 'HAD', 'A', 'TERRIBLE', 'STRUGGLE', 'WITH', 'THE', 'GOAT', 'WHICH', 'WAS', 'LOOSE', 'IN', 'THE', 'ROOM', 'AND', 'STILL', 'WANTED', 'TO', 'FIGHT', 'BUT', 'FINALLY', 'THEY', 'SUBDUED', 'THE', 'ANIMAL', 'AND', 'THEN', 'THEY', 'TOOK', 'THE', 'BOOLOOROO', 'OUT', 'OF', 'THE', 'FRAME', 'HE', 'WAS', 'TIED', 'IN', 'AND', 'BROUGHT', 'BOTH', 'HIM', 'AND', 'THE', 'GOAT', 'BEFORE', 'QUEEN', 'TROT', 'WHO', 'AWAITED', 'THEM', 'IN', 'THE', 'THRONE', 'ROOM', 'OF', 'THE', 'PALACE'] +8555-284449-0012-2336: ref=["I'LL", 'GLADLY', 'DO', 'THAT', 'PROMISED', 'THE', 'NEW', 'BOOLOOROO', 'AND', "I'LL", 'FEED', 'THE', 'HONORABLE', 'GOAT', 'ALL', 'THE', 'SHAVINGS', 'AND', 'LEATHER', 'AND', 'TIN', 'CANS', 'HE', 'CAN', 'EAT', 'BESIDES', 'THE', 'GRASS'] +8555-284449-0012-2336: hyp=["I'LL", 'GLADLY', 'DO', 'THAT', 'PROMISED', 'THE', 'NEW', 'BOOLOOROO', 'AND', "I'LL", 'FEED', 'THE', 'HONED', 'ALL', 'THE', 'SHAVINGS', 'AND', 'LEATHER', 'AND', 'TIN', 'CANS', 'HE', 'CAN', 'EAT', 'BESIDES', 'THE', 'GRASS'] +8555-284449-0013-2337: ref=['SCUSE', 'ME', 'SAID', 'TROT', 'I', 'NEGLECTED', 'TO', 'TELL', 'YOU', 'THAT', "YOU'RE", 'NOT', 'THE', 'BOOLOOROO', 'ANY', 'MORE'] +8555-284449-0013-2337: hyp=['EXCUSE', 'ME', 'SAID', 'SHOT', 'I', 'NEGLECTED', 'TO', 'TELL', 'YOU', 'THAT', "YOU'RE", 'NOT', 'THE', 'BOOLOOROO', 'ANY', 'MORE'] +8555-284449-0014-2338: ref=['THE', 'FORMER', 'BOOLOOROO', 'GROANED'] +8555-284449-0014-2338: hyp=['THE', 'FORMER', 'BOOLOOROO', 'GROANED'] +8555-284449-0015-2339: ref=["I'LL", 'NOT', 'BE', 'WICKED', 'ANY', 'MORE', 'SIGHED', 'THE', 'OLD', 'BOOLOOROO', "I'LL", 'REFORM'] +8555-284449-0015-2339: hyp=['HOW', 'NOW', 'BE', 'WICKED', 'ANY', 'MORE', 'SIGHED', 'THE', 'OLD', 'BOOLOOROO', "I'LL", 'REFORM'] +8555-284449-0016-2340: ref=['AS', 'A', 'PRIVATE', 'CITIZEN', 'I', 'SHALL', 'BE', 'A', 'MODEL', 'OF', 'DEPORTMENT', 'BECAUSE', 'IT', 'WOULD', 'BE', 'DANGEROUS', 'TO', 'BE', 'OTHERWISE'] +8555-284449-0016-2340: hyp=['AS', 'A', 'PRIVATE', 'CITIZEN', 'I', 'SHALL', 'BE', 'A', 'MODEL', 'OF', 'DEPORTMENT', 'BECAUSE', 'IT', 'WOULD', 'BE', 'DANGEROUS', 'TO', 'BE', 'OTHERWISE'] +8555-284449-0017-2341: ref=['WHEN', 'FIRST', 'THEY', 'ENTERED', 'THE', 'THRONE', 'ROOM', 'THEY', 'TRIED', 'TO', 'BE', 'AS', 'HAUGHTY', 'AND', 'SCORNFUL', 'AS', 'EVER', 'BUT', 'THE', 'BLUES', 'WHO', 'WERE', 'ASSEMBLED', 'THERE', 'ALL', 'LAUGHED', 'AT', 'THEM', 'AND', 'JEERED', 'THEM', 'FOR', 'THERE', 'WAS', 'NOT', 'A', 'SINGLE', 'PERSON', 'IN', 'ALL', 'THE', 'BLUE', 'COUNTRY', 'WHO', 'LOVED', 'THE', 'PRINCESSES', 'THE', 'LEAST', 'LITTLE', 'BIT'] +8555-284449-0017-2341: hyp=['WHEN', 'FIRST', 'THEY', 'ENTERED', 'THE', 'THRONE', 'ROOM', 'THEY', 'TRIED', 'TO', 'BE', 'AS', 'HAUGHTY', 'AND', 'SCORNFUL', 'AS', 'EVER', 'BUT', 'THE', 'BLUES', 'WHO', 'WERE', 'ASSEMBLED', 'THERE', 'ALL', 'LAUGHED', 'AT', 'THEM', 'AND', 'JEERED', 'THEM', 'FOR', 'THERE', 'WAS', 'NOT', 'A', 'SINGLE', 'PERSON', 'IN', 'ALL', 'THE', 'BLUE', 'COUNTRY', 'WHO', 'LOVED', 'THE', 'PRINCESSES', 'THE', 'LEAST', 'LITTLE', 'BIT'] +8555-284449-0018-2342: ref=['SO', 'GHIP', 'GHISIZZLE', 'ORDERED', 'THE', 'CAPTAIN', 'TO', 'TAKE', 'A', 'FILE', 'OF', 'SOLDIERS', 'AND', 'ESCORT', 'THE', 'RAVING', 'BEAUTIES', 'TO', 'THEIR', 'NEW', 'HOME'] +8555-284449-0018-2342: hyp=['SO', 'GHIP', 'GHISIZZLE', 'ORDERED', 'THE', 'CAPTAIN', 'TO', 'TAKE', 'A', 'FILE', 'OF', 'SOLDIERS', 'AND', 'ESCORT', 'THE', 'RAVING', 'BEAUTIES', 'TO', 'THEIR', 'NEW', 'HOME'] +8555-284449-0019-2343: ref=['THAT', 'EVENING', 'TROT', 'GAVE', 'A', 'GRAND', 'BALL', 'IN', 'THE', 'PALACE', 'TO', 'WHICH', 'THE', 'MOST', 'IMPORTANT', 'OF', 'THE', 'PINKIES', 'AND', 'THE', 'BLUESKINS', 'WERE', 'INVITED'] +8555-284449-0019-2343: hyp=['THAT', 'EVENING', 'TROT', 'GAVE', 'A', 'GRAND', 'BALL', 'IN', 'THE', 'PALACE', 'TO', 'WHICH', 'THE', 'MOST', 'IMPORTANT', 'OF', 'THE', 'PINKIES', 'AND', 'THE', 'BLUESKINS', 'WERE', 'INVITED'] +8555-284449-0020-2344: ref=['THE', 'COMBINED', 'BANDS', 'OF', 'BOTH', 'THE', 'COUNTRIES', 'PLAYED', 'THE', 'MUSIC', 'AND', 'A', 'FINE', 'SUPPER', 'WAS', 'SERVED'] +8555-284449-0020-2344: hyp=['THE', 'COMBINED', 'BANDS', 'OF', 'BOTH', 'THE', 'COUNTRIES', 'PLAYED', 'THE', 'MUSIC', 'AND', 'A', 'FINE', 'SUPPER', 'WAS', 'SERVED'] +8555-292519-0000-2283: ref=['BRIGHTER', 'THAN', 'EARLY', "DAWN'S", 'MOST', 'BRILLIANT', 'DYE', 'ARE', 'BLOWN', 'CLEAR', 'BANDS', 'OF', 'COLOR', 'THROUGH', 'THE', 'SKY', 'THAT', 'SWIRL', 'AND', 'SWEEP', 'AND', 'MEET', 'TO', 'BREAK', 'AND', 'FOAM', 'LIKE', 'RAINBOW', 'VEILS', 'UPON', 'A', "BUBBLE'S", 'DOME'] +8555-292519-0000-2283: hyp=['BRIGHTER', 'THAN', 'EARLY', 'DAWNS', 'MOST', 'BRILLIANT', 'DYE', 'ARE', 'BLOWN', 'CLEAR', 'BANDS', 'OF', 'COLOR', 'THROUGH', 'THE', 'SKY', 'THAT', 'SWIRL', 'AND', 'SWEEP', 'AND', 'MEET', 'TO', 'BREAK', 'AND', 'FOAM', 'LIKE', 'RAINBOW', 'VEILS', 'UPON', 'A', "BUBBLE'S", 'DOME'] +8555-292519-0001-2284: ref=['GUIDED', 'BY', 'YOU', 'HOW', 'WE', 'MIGHT', 'STROLL', 'TOWARDS', 'DEATH', 'OUR', 'ONLY', 'MUSIC', 'ONE', "ANOTHER'S", 'BREATH', 'THROUGH', 'GARDENS', 'INTIMATE', 'WITH', 'HOLLYHOCKS', 'WHERE', 'SILENT', 'POPPIES', 'BURN', 'BETWEEN', 'THE', 'ROCKS', 'BY', 'POOLS', 'WHERE', 'BIRCHES', 'BEND', 'TO', 'CONFIDANTS', 'ABOVE', 'GREEN', 'WATERS', 'SCUMMED', 'WITH', 'LILY', 'PLANTS'] +8555-292519-0001-2284: hyp=['GUIDED', 'BY', 'YOU', 'HOW', 'WE', 'MIGHT', 'STROLL', 'TOWARDS', 'DEATH', 'OUR', 'ONLY', 'MUSIC', 'ONE', "ANOTHER'S", 'BREATH', 'THROUGH', "GARDEN'S", 'INTIMATE', 'WITH', 'HOLLYHOCKS', 'WHERE', 'A', 'SILENT', 'POPPIES', 'BURN', 'BETWEEN', 'THE', 'ROCKS', 'BY', 'POOLS', 'WHERE', 'BIRCHES', 'BEND', 'TO', 'CONFIDANTS', 'ABOVE', 'GREEN', 'WATERS', 'SCUMMED', 'WITH', 'THE', 'LILY', 'PLANTS'] +8555-292519-0002-2285: ref=['VENICE'] +8555-292519-0002-2285: hyp=['VENICE'] +8555-292519-0003-2286: ref=['IN', 'A', 'SUNSET', 'GLOWING', 'OF', 'CRIMSON', 'AND', 'GOLD', 'SHE', 'LIES', 'THE', 'GLORY', 'OF', 'THE', 'WORLD', 'A', 'BEACHED', "KING'S", 'GALLEY', 'WHOSE', 'SAILS', 'ARE', 'FURLED', 'WHO', 'IS', 'HUNG', 'WITH', 'TAPESTRIES', 'RICH', 'AND', 'OLD'] +8555-292519-0003-2286: hyp=['IN', 'A', 'SUNSET', 'GLOWING', 'OF', 'CRIMSON', 'AND', 'GOLD', 'SHE', 'LIES', 'THE', 'GLORY', 'OF', 'THE', 'WORLD', 'A', 'BEECHED', "KING'S", 'GALLEY', 'WHO', 'SAILS', 'ARE', 'FURLED', 'WHO', 'IS', 'HUNG', 'WITH', 'TAPESTRIES', 'RICH', 'AND', 'OLD'] +8555-292519-0004-2287: ref=['THE', 'PITY', 'THAT', 'WE', 'MUST', 'COME', 'AND', 'GO'] +8555-292519-0004-2287: hyp=['THE', 'PITY', 'THAT', 'WE', 'MUST', 'COME', 'AND', 'GO'] +8555-292519-0005-2288: ref=['WHILE', 'THE', 'OLD', 'GOLD', 'AND', 'THE', 'MARBLE', 'STAYS', 'FOREVER', 'GLEAMING', 'ITS', 'SOFT', 'STRONG', 'BLAZE', 'CALM', 'IN', 'THE', 'EARLY', 'EVENING', 'GLOW'] +8555-292519-0005-2288: hyp=['WHILE', 'THE', 'OLD', 'GOLD', 'AND', 'THE', 'MARBLE', 'STAYS', 'FOREVER', 'GLEAMING', 'ITS', 'SOFT', 'STRONG', 'BLAZE', 'CALM', 'IN', 'THE', 'EARLY', 'EVENING', 'GLOW'] +8555-292519-0006-2289: ref=['THE', 'PLEASANT', 'GRAVEYARD', 'OF', 'MY', 'SOUL', 'WITH', 'SENTIMENTAL', 'CYPRESS', 'TREES', 'AND', 'FLOWERS', 'IS', 'FILLED', 'THAT', 'I', 'MAY', 'STROLL', 'IN', 'MEDITATION', 'AT', 'MY', 'EASE'] +8555-292519-0006-2289: hyp=['THE', 'PLEASANT', 'GRAVEYARD', 'OF', 'MY', 'SOUL', 'WITH', 'SENTIMENTAL', 'CYPRESS', 'TREES', 'AND', 'FLOWERS', 'IS', 'FILLED', 'THAT', 'I', 'MAY', 'STROLL', 'IN', 'MEDITATION', 'AT', 'MY', 'EASE'] +8555-292519-0007-2290: ref=['IT', 'IS', 'MY', 'HEART', 'HUNG', 'IN', 'THE', 'SKY', 'AND', 'NO', 'CLOUDS', 'EVER', 'FLOAT', 'BETWEEN', 'THE', 'GRAVE', 'FLOWERS', 'AND', 'MY', 'HEART', 'ON', 'HIGH'] +8555-292519-0007-2290: hyp=['IT', 'IS', 'MY', 'HEART', 'HUNG', 'IN', 'THE', 'SKY', 'AND', 'NO', 'CLOUDS', 'EVER', 'FLOAT', 'BETWEEN', 'THE', 'GRAVE', 'FLOWERS', 'AND', 'MY', 'HEART', 'ON', 'HIGH'] +8555-292519-0008-2291: ref=['OVER', 'THE', 'TRACK', 'LINED', 'CITY', 'STREET', 'THE', 'YOUNG', 'MEN', 'THE', 'GRINNING', 'MEN', 'PASS'] +8555-292519-0008-2291: hyp=['OVER', 'THE', 'TRACK', 'LINED', 'CITY', 'STREET', 'THE', 'YOUNG', 'MAN', 'THE', 'GRINNING', 'MAN', 'PASS'] +8555-292519-0009-2292: ref=['HO', 'YE', 'SAILS', 'THAT', 'SEEM', 'TO', 'WANDER', 'IN', 'DREAM', 'FILLED', 'MEADOWS', 'SAY', 'IS', 'THE', 'SHORE', 'WHERE', 'I', 'STAND', 'THE', 'ONLY', 'FIELD', 'OF', 'STRUGGLE', 'OR', 'ARE', 'YE', 'HIT', 'AND', 'BATTERED', 'OUT', 'THERE', 'BY', 'WAVES', 'AND', 'WIND', 'GUSTS', 'AS', 'YE', 'TACK', 'OVER', 'A', 'CLASHING', 'SEA', 'OF', 'WATERY', 'ECHOES'] +8555-292519-0009-2292: hyp=['HO', 'YE', 'SAILS', 'THAT', 'SEEM', 'TO', 'WANDER', 'IN', 'DREAM', 'FILLED', 'MEADOWS', 'SAY', 'IS', 'THE', 'SHORE', 'WHERE', 'I', 'STAND', 'THE', 'ONLY', 'FIELD', 'OF', 'STRUGGLE', 'OR', 'ARE', 'YE', 'HIT', 'AND', 'BATTERED', 'OUT', 'THERE', 'BY', 'WAVES', 'AND', 'WIND', 'GUSTS', 'AS', 'HE', 'TACK', 'OVER', 'A', 'CLASHING', 'SEA', 'OF', 'WATERY', 'ECHOES'] +8555-292519-0010-2293: ref=['OLD', 'DANCES', 'ARE', 'SIMPLIFIED', 'OF', 'THEIR', 'YEARNING', 'BLEACHED', 'BY', 'TIME'] +8555-292519-0010-2293: hyp=['OLD', 'DANCES', 'ARE', 'SIMPLIFIED', 'OF', 'THEIR', 'YEARNING', 'BLEACHED', 'BY', 'TIME'] +8555-292519-0011-2294: ref=['HE', 'HAD', 'GOT', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0011-2294: hyp=['HE', 'HAD', 'GOT', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0012-2295: ref=['THROUGH', 'THE', 'BLACK', 'NIGHT', 'RAIN', 'HE', 'SANG', 'TO', 'HER', 'WINDOW', 'BARS'] +8555-292519-0012-2295: hyp=['THROUGH', 'THE', 'BLACK', 'NIGHT', 'RAIN', 'HE', 'SANG', 'TO', 'HER', 'WINDOW', 'BARS'] +8555-292519-0013-2296: ref=['THAT', 'WAS', 'BUT', 'RUSTLING', 'OF', 'DRIPPING', 'PLANTS', 'IN', 'THE', 'DARK'] +8555-292519-0013-2296: hyp=['THAT', 'WAS', 'BUT', 'RUSTLING', 'OF', 'TRIPPING', 'PLANTS', 'IN', 'THE', 'DARK'] +8555-292519-0014-2297: ref=['SHE', 'WAS', 'ALONE', 'THAT', 'NIGHT'] +8555-292519-0014-2297: hyp=['SHE', 'WAS', 'ALONE', 'THAT', 'NIGHT'] +8555-292519-0015-2298: ref=['HE', 'HAD', 'BROKEN', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0015-2298: hyp=['HE', 'HAD', 'BROKEN', 'INTO', 'HER', 'COURTYARD'] +908-157963-0000-1321: ref=['TO', 'FADE', 'AWAY', 'LIKE', 'MORNING', 'BEAUTY', 'FROM', 'HER', 'MORTAL', 'DAY', 'DOWN', 'BY', 'THE', 'RIVER', 'OF', 'ADONA', 'HER', 'SOFT', 'VOICE', 'IS', 'HEARD', 'AND', 'THUS', 'HER', 'GENTLE', 'LAMENTATION', 'FALLS', 'LIKE', 'MORNING', 'DEW'] +908-157963-0000-1321: hyp=['TO', 'FADE', 'AWAY', 'LIKE', 'MORNING', 'BEAUTY', 'FROM', 'HER', 'MORTAL', 'DAY', 'DOWN', 'BY', 'THE', 'RIVER', 'OF', 'ADONNA', 'HER', 'SOFT', 'VOICE', 'IS', 'HEARD', 'AND', 'THUS', 'HER', 'GENTLE', 'LAMENTATION', 'FALLS', 'LIKE', 'MORNING', 'DEW'] +908-157963-0001-1322: ref=['O', 'LIFE', 'OF', 'THIS', 'OUR', 'SPRING'] +908-157963-0001-1322: hyp=['OH', 'LIFE', 'OF', 'THIS', 'OUR', 'SPRING'] +908-157963-0002-1323: ref=['WHY', 'FADES', 'THE', 'LOTUS', 'OF', 'THE', 'WATER'] +908-157963-0002-1323: hyp=['WHY', 'FADES', 'THE', 'LOTUS', 'OF', 'THE', 'WATER'] +908-157963-0003-1324: ref=['WHY', 'FADE', 'THESE', 'CHILDREN', 'OF', 'THE', 'SPRING'] +908-157963-0003-1324: hyp=['WHY', 'FADE', 'THESE', 'CHILDREN', 'OF', 'THE', 'SPRING'] +908-157963-0004-1325: ref=['THEL', 'IS', 'LIKE', 'A', 'WATRY', 'BOW', 'AND', 'LIKE', 'A', 'PARTING', 'CLOUD', 'LIKE', 'A', 'REFLECTION', 'IN', 'A', 'GLASS', 'LIKE', 'SHADOWS', 'IN', 'THE', 'WATER', 'LIKE', 'DREAMS', 'OF', 'INFANTS', 'LIKE', 'A', 'SMILE', 'UPON', 'AN', 'INFANTS', 'FACE'] +908-157963-0004-1325: hyp=['FELL', 'IS', 'LIKE', 'A', 'WATRY', 'BOW', 'AND', 'LIKE', 'A', 'PARTING', 'CLOUD', 'LIKE', 'A', 'REFLECTION', 'IN', 'A', 'GLASS', 'LIKE', 'SHADOWS', 'IN', 'THE', 'WATER', 'LIKE', 'DREAMS', 'OF', 'INFANTS', 'LIKE', 'A', 'SMILE', 'UPON', 'AN', "INFANT'S", 'FACE'] +908-157963-0005-1326: ref=['LIKE', 'THE', 'DOVES', 'VOICE', 'LIKE', 'TRANSIENT', 'DAY', 'LIKE', 'MUSIC', 'IN', 'THE', 'AIR', 'AH'] +908-157963-0005-1326: hyp=['LIKE', 'THE', "DOVE'S", 'BOYS', 'LIKE', 'TRANSIENT', 'DAY', 'LIKE', 'MUSIC', 'IN', 'THE', 'AIR', 'AH'] +908-157963-0006-1327: ref=['AND', 'GENTLE', 'SLEEP', 'THE', 'SLEEP', 'OF', 'DEATH', 'AND', 'GENTLY', 'HEAR', 'THE', 'VOICE', 'OF', 'HIM', 'THAT', 'WALKETH', 'IN', 'THE', 'GARDEN', 'IN', 'THE', 'EVENING', 'TIME'] +908-157963-0006-1327: hyp=['AND', 'GENTLE', 'SLEEP', 'THE', 'SLEEP', 'OF', 'DEATH', 'AND', 'GENTLY', 'HEAR', 'THE', 'VOICE', 'OF', 'HIM', 'THAT', 'WALKETH', 'IN', 'THE', 'GARDEN', 'IN', 'THE', 'EVENING', 'TIME'] +908-157963-0007-1328: ref=['THE', 'LILLY', 'OF', 'THE', 'VALLEY', 'BREATHING', 'IN', 'THE', 'HUMBLE', 'GRASS', 'ANSWERD', 'THE', 'LOVELY', 'MAID', 'AND', 'SAID', 'I', 'AM', 'A', 'WATRY', 'WEED', 'AND', 'I', 'AM', 'VERY', 'SMALL', 'AND', 'LOVE', 'TO', 'DWELL', 'IN', 'LOWLY', 'VALES', 'SO', 'WEAK', 'THE', 'GILDED', 'BUTTERFLY', 'SCARCE', 'PERCHES', 'ON', 'MY', 'HEAD', 'YET', 'I', 'AM', 'VISITED', 'FROM', 'HEAVEN', 'AND', 'HE', 'THAT', 'SMILES', 'ON', 'ALL', 'WALKS', 'IN', 'THE', 'VALLEY', 'AND', 'EACH', 'MORN', 'OVER', 'ME', 'SPREADS', 'HIS', 'HAND', 'SAYING', 'REJOICE', 'THOU', 'HUMBLE', 'GRASS', 'THOU', 'NEW', 'BORN', 'LILY', 'FLOWER'] +908-157963-0007-1328: hyp=['THE', 'LILY', 'OF', 'THE', 'VALLEY', 'BREATHING', 'IN', 'THE', 'HUMBLE', 'GRASS', 'ANSWERED', 'THE', 'LOVELY', 'MAIDEN', 'SAID', 'I', 'AM', 'A', 'WATRIE', 'WEED', 'AND', 'I', 'AM', 'VERY', 'SMALL', 'AND', 'LOVE', 'TO', 'DWELL', 'IN', 'LOWLY', 'VALES', 'SO', 'WEAK', 'THE', 'GILDED', 'BUTTERFLY', 'SCARCE', 'PURCHASE', 'ON', 'MY', 'HEAD', 'YET', 'I', 'AM', 'VISITED', 'FROM', 'HEAVEN', 'AND', 'HE', 'THAT', 'SMILES', 'ON', 'ALL', 'WALKS', 'IN', 'THE', 'VALLEY', 'AND', 'EACH', 'MORN', 'OVER', 'ME', 'SPREADS', 'HIS', 'HAND', 'SAYING', 'REJOICE', 'THOU', 'HUMBLE', 'GRASS', 'THOU', 'NEWBORN', 'LILY', 'FLOWER'] +908-157963-0008-1329: ref=['THOU', 'GENTLE', 'MAID', 'OF', 'SILENT', 'VALLEYS', 'AND', 'OF', 'MODEST', 'BROOKS', 'FOR', 'THOU', 'SHALL', 'BE', 'CLOTHED', 'IN', 'LIGHT', 'AND', 'FED', 'WITH', 'MORNING', 'MANNA', 'TILL', 'SUMMERS', 'HEAT', 'MELTS', 'THEE', 'BESIDE', 'THE', 'FOUNTAINS', 'AND', 'THE', 'SPRINGS', 'TO', 'FLOURISH', 'IN', 'ETERNAL', 'VALES', 'THEY', 'WHY', 'SHOULD', 'THEL', 'COMPLAIN'] +908-157963-0008-1329: hyp=['THOU', 'GENTLE', 'MAID', 'OF', 'SILENT', 'VALLEYS', 'AND', 'OF', 'MODEST', 'BROOKS', 'FOR', 'THOU', 'SHALT', 'BE', 'CLOTHED', 'IN', 'LIGHT', 'AND', 'FED', 'WITH', 'MORNING', 'MANNA', 'TILL', "SUMMER'S", 'HEAT', 'MELTS', 'THEE', 'BESIDE', 'THE', 'FOUNTAINS', 'AND', 'THE', 'SPRINGS', 'TO', 'FLOURISH', 'IN', 'ETERNAL', 'VALES', 'THEY', 'WHY', 'SHOULDST', 'THOU', 'COMPLAIN'] +908-157963-0009-1330: ref=['WHY', 'SHOULD', 'THE', 'MISTRESS', 'OF', 'THE', 'VALES', 'OF', 'HAR', 'UTTER', 'A', 'SIGH'] +908-157963-0009-1330: hyp=['WHY', 'SHOULD', 'THE', 'MISTRESS', 'OF', 'THE', 'VEILS', 'OF', 'HAR', 'UTTER', 'A', 'SIGH'] +908-157963-0010-1331: ref=['SHE', 'CEASD', 'AND', 'SMILD', 'IN', 'TEARS', 'THEN', 'SAT', 'DOWN', 'IN', 'HER', 'SILVER', 'SHRINE'] +908-157963-0010-1331: hyp=['SHE', 'CEASED', 'AND', 'SMILED', 'IN', 'TEARS', 'THEN', 'SAT', 'DOWN', 'IN', 'HER', 'SILVER', 'SHRINE'] +908-157963-0011-1332: ref=['WHICH', 'THOU', 'DOST', 'SCATTER', 'ON', 'EVERY', 'LITTLE', 'BLADE', 'OF', 'GRASS', 'THAT', 'SPRINGS', 'REVIVES', 'THE', 'MILKED', 'COW', 'AND', 'TAMES', 'THE', 'FIRE', 'BREATHING', 'STEED'] +908-157963-0011-1332: hyp=['WHICH', 'THOU', 'DOST', 'SCATTER', 'ON', 'EVERY', 'LITTLE', 'BLADE', 'OF', 'GRASS', 'THAT', 'SPRINGS', 'REVIVES', 'THE', 'MILKED', 'COW', 'AND', 'TAMES', 'THE', 'FIRE', 'BREATHING', 'STEED'] +908-157963-0012-1333: ref=['BUT', 'THEL', 'IS', 'LIKE', 'A', 'FAINT', 'CLOUD', 'KINDLED', 'AT', 'THE', 'RISING', 'SUN', 'I', 'VANISH', 'FROM', 'MY', 'PEARLY', 'THRONE', 'AND', 'WHO', 'SHALL', 'FIND', 'MY', 'PLACE'] +908-157963-0012-1333: hyp=['BUT', 'THOUGH', 'IS', 'LIKE', 'A', 'FAINT', 'CLOUD', 'KINDLED', 'AT', 'THE', 'RISING', 'SUN', 'I', 'VANISH', 'FROM', 'MY', 'PEARLY', 'THRONE', 'AND', 'WHO', 'SHALL', 'FIND', 'MY', 'PLACE'] +908-157963-0013-1334: ref=['AND', 'WHY', 'IT', 'SCATTERS', 'ITS', 'BRIGHT', 'BEAUTY', 'THRO', 'THE', 'HUMID', 'AIR'] +908-157963-0013-1334: hyp=['AND', 'WHY', 'IT', 'SCATTERS', 'ITS', 'BRIGHT', 'BEAUTY', 'THROUGH', 'THE', 'HUMAN', 'AIR'] +908-157963-0014-1335: ref=['DESCEND', 'O', 'LITTLE', 'CLOUD', 'AND', 'HOVER', 'BEFORE', 'THE', 'EYES', 'OF', 'THEL'] +908-157963-0014-1335: hyp=['DESCEND', 'A', 'LITTLE', 'CLOUD', 'AND', 'HOVER', 'BEFORE', 'THE', 'EYES', 'OF', 'FELL'] +908-157963-0015-1336: ref=['O', 'LITTLE', 'CLOUD', 'THE', 'VIRGIN', 'SAID', 'I', 'CHARGE', 'THEE', 'TO', 'TELL', 'ME', 'WHY', 'THOU', 'COMPLAINEST', 'NOW', 'WHEN', 'IN', 'ONE', 'HOUR', 'THOU', 'FADE', 'AWAY', 'THEN', 'WE', 'SHALL', 'SEEK', 'THEE', 'BUT', 'NOT', 'FIND', 'AH', 'THEL', 'IS', 'LIKE', 'TO', 'THEE'] +908-157963-0015-1336: hyp=['O', 'LITTLE', 'CLOUD', 'THE', 'VIRGIN', 'SAID', 'I', 'CHARGE', 'THEE', 'TO', 'TELL', 'ME', 'WHY', 'THOU', 'COMPLAINEST', 'NOW', 'WHEN', 'IN', 'ONE', 'HOUR', 'THOU', 'FADE', 'AWAY', 'THEN', 'WE', 'SHALL', 'SEEK', 'THEE', 'BUT', 'NOT', 'FIND', 'AH', 'FELL', 'IS', 'LIKE', 'TO', 'THEE'] +908-157963-0016-1337: ref=['I', 'PASS', 'AWAY', 'YET', 'I', 'COMPLAIN', 'AND', 'NO', 'ONE', 'HEARS', 'MY', 'VOICE'] +908-157963-0016-1337: hyp=['I', 'PASS', 'AWAY', 'YET', 'I', 'COMPLAIN', 'AND', 'NO', 'ONE', 'HEARS', 'MY', 'VOICE'] +908-157963-0017-1338: ref=['THE', 'CLOUD', 'THEN', 'SHEWD', 'HIS', 'GOLDEN', 'HEAD', 'AND', 'HIS', 'BRIGHT', 'FORM', "EMERG'D"] +908-157963-0017-1338: hyp=['THE', 'CLOUD', 'THEN', 'SHOWED', 'HIS', 'GOLDEN', 'HEAD', 'AND', 'HIS', 'BRIGHT', 'FORM', 'EMERGED'] +908-157963-0018-1339: ref=['AND', 'FEAREST', 'THOU', 'BECAUSE', 'I', 'VANISH', 'AND', 'AM', 'SEEN', 'NO', 'MORE'] +908-157963-0018-1339: hyp=['AND', 'FEAREST', 'THOU', 'BECAUSE', 'I', 'VANISH', 'AND', 'AM', 'SEEN', 'NO', 'MORE'] +908-157963-0019-1340: ref=['IT', 'IS', 'TO', 'TENFOLD', 'LIFE', 'TO', 'LOVE', 'TO', 'PEACE', 'AND', 'RAPTURES', 'HOLY', 'UNSEEN', 'DESCENDING', 'WEIGH', 'MY', 'LIGHT', 'WINGS', 'UPON', 'BALMY', 'FLOWERS', 'AND', 'COURT', 'THE', 'FAIR', 'EYED', 'DEW', 'TO', 'TAKE', 'ME', 'TO', 'HER', 'SHINING', 'TENT', 'THE', 'WEEPING', 'VIRGIN', 'TREMBLING', 'KNEELS', 'BEFORE', 'THE', 'RISEN', 'SUN'] +908-157963-0019-1340: hyp=['IT', 'IS', 'TO', 'TENFOLD', 'LIFE', 'TO', 'LOVE', 'TO', 'PEACE', 'AND', 'RAPTURES', 'WHOLLY', 'UNSEEN', 'DESCENDING', 'WEIGH', 'MY', 'LIGHT', 'WINGS', 'UPON', 'BALMY', 'FLOWERS', 'AND', 'COURT', 'THE', 'FAIR', 'EYED', 'DO', 'TO', 'TAKE', 'ME', 'TO', 'HER', 'SHINING', 'TENT', 'THE', 'WEEPING', 'VIRGIN', 'TREMBLING', 'KNEELS', 'BEFORE', 'THE', 'RISEN', 'SUN'] +908-157963-0020-1341: ref=['TILL', 'WE', 'ARISE', "LINK'D", 'IN', 'A', 'GOLDEN', 'BAND', 'AND', 'NEVER', 'PART', 'BUT', 'WALK', 'UNITED', 'BEARING', 'FOOD', 'TO', 'ALL', 'OUR', 'TENDER', 'FLOWERS'] +908-157963-0020-1341: hyp=['TILL', 'WE', 'ARISE', 'LINKED', 'IN', 'A', 'GOLDEN', 'BAND', 'AND', 'NEVER', 'PART', 'BUT', 'WALK', 'UNITED', 'BEARING', 'FOOD', 'TO', 'ALL', 'OUR', 'TENDER', 'FLOWERS'] +908-157963-0021-1342: ref=['LIVES', 'NOT', 'ALONE', 'NOR', 'OR', 'ITSELF', 'FEAR', 'NOT', 'AND', 'I', 'WILL', 'CALL', 'THE', 'WEAK', 'WORM', 'FROM', 'ITS', 'LOWLY', 'BED', 'AND', 'THOU', 'SHALT', 'HEAR', 'ITS', 'VOICE'] +908-157963-0021-1342: hyp=['LIVES', 'NOT', 'ALONE', 'NOR', 'OF', 'ITSELF', 'FEAR', 'NOT', 'AND', 'I', 'WILL', 'CALL', 'THE', 'WEAK', 'WORM', 'FROM', 'ITS', 'LOWLY', 'BED', 'AND', 'THOU', 'SHALT', 'HEAR', 'ITS', 'VOICE'] +908-157963-0022-1343: ref=['COME', 'FORTH', 'WORM', 'AND', 'THE', 'SILENT', 'VALLEY', 'TO', 'THY', 'PENSIVE', 'QUEEN'] +908-157963-0022-1343: hyp=['COME', 'FORTH', 'WORM', 'AND', 'THE', 'SILENT', 'VALLEY', 'TO', 'THY', 'PENSIVE', 'QUEEN'] +908-157963-0023-1344: ref=['THE', 'HELPLESS', 'WORM', 'AROSE', 'AND', 'SAT', 'UPON', 'THE', 'LILLYS', 'LEAF', 'AND', 'THE', 'BRIGHT', 'CLOUD', 'SAILD', 'ON', 'TO', 'FIND', 'HIS', 'PARTNER', 'IN', 'THE', 'VALE'] +908-157963-0023-1344: hyp=['THE', 'HELPLESS', 'WORM', 'AROSE', 'AND', 'SAT', 'UPON', 'THE', "LILY'S", 'LEAF', 'AND', 'THE', 'BRIGHT', 'CLOUD', 'SAILED', 'ON', 'TO', 'FIND', 'HIS', 'PARTNER', 'IN', 'THE', 'VALE'] +908-157963-0024-1345: ref=['IMAGE', 'OF', 'WEAKNESS', 'ART', 'THOU', 'BUT', 'A', 'WORM'] +908-157963-0024-1345: hyp=['IMAGE', 'OF', 'WEAKNESS', 'ART', 'THOU', 'BUT', 'A', 'WORM'] +908-157963-0025-1346: ref=['I', 'SEE', 'THEY', 'LAY', 'HELPLESS', 'AND', 'NAKED', 'WEEPING', 'AND', 'NONE', 'TO', 'ANSWER', 'NONE', 'TO', 'CHERISH', 'THEE', 'WITH', 'MOTHERS', 'SMILES'] +908-157963-0025-1346: hyp=['I', 'SEE', 'THEY', 'LAY', 'HELPLESS', 'AND', 'NAKED', 'WEEPING', 'AND', 'NONE', 'TO', 'ANSWER', 'NONE', 'TO', 'CHERISH', 'THEE', 'WITH', 'MOTHERS', 'SMILES'] +908-157963-0026-1347: ref=['AND', 'SAYS', 'THOU', 'MOTHER', 'OF', 'MY', 'CHILDREN', 'I', 'HAVE', 'LOVED', 'THEE', 'AND', 'I', 'HAVE', 'GIVEN', 'THEE', 'A', 'CROWN', 'THAT', 'NONE', 'CAN', 'TAKE', 'AWAY'] +908-157963-0026-1347: hyp=['AND', 'SAYS', 'THOU', 'MOTHER', 'OF', 'MY', 'CHILDREN', 'I', 'HAVE', 'LOVED', 'THEE', 'AND', 'I', 'HAVE', 'GIVEN', 'THEE', 'A', 'CROWN', 'THAT', 'NONE', 'CAN', 'TAKE', 'AWAY'] +908-157963-0027-1348: ref=['AND', 'LAY', 'ME', 'DOWN', 'IN', 'THY', 'COLD', 'BED', 'AND', 'LEAVE', 'MY', 'SHINING', 'LOT'] +908-157963-0027-1348: hyp=['AND', 'LAY', 'ME', 'DOWN', 'IN', 'THY', 'COLD', 'BED', 'AND', 'LEAVE', 'MY', 'SHINING', 'LOT'] +908-157963-0028-1349: ref=['OR', 'AN', 'EYE', 'OF', 'GIFTS', 'AND', 'GRACES', 'SHOWRING', 'FRUITS', 'AND', 'COINED', 'GOLD'] +908-157963-0028-1349: hyp=['OR', 'AN', 'EYE', 'OF', 'GIFTS', 'AND', 'GRACES', 'SHOWERING', 'FRUITS', 'AND', 'COINED', 'GOLD'] +908-157963-0029-1350: ref=['WHY', 'A', 'TONGUE', "IMPRESS'D", 'WITH', 'HONEY', 'FROM', 'EVERY', 'WIND'] +908-157963-0029-1350: hyp=['WHY', 'A', 'TONGUE', 'IMPRESSED', 'WITH', 'HONEY', 'FROM', 'EVERY', 'WIND'] +908-157963-0030-1351: ref=['WHY', 'AN', 'EAR', 'A', 'WHIRLPOOL', 'FIERCE', 'TO', 'DRAW', 'CREATIONS', 'IN'] +908-157963-0030-1351: hyp=['WHY', 'AN', 'EAR', 'A', 'WHIRLPOOL', 'FIERCE', 'TO', 'DRAW', 'CREATIONS', 'IN'] +908-31957-0000-1352: ref=['ALL', 'IS', 'SAID', 'WITHOUT', 'A', 'WORD'] +908-31957-0000-1352: hyp=['ALL', 'IS', 'SAID', 'WITHOUT', 'A', 'WORD'] +908-31957-0001-1353: ref=['I', 'SIT', 'BENEATH', 'THY', 'LOOKS', 'AS', 'CHILDREN', 'DO', 'IN', 'THE', 'NOON', 'SUN', 'WITH', 'SOULS', 'THAT', 'TREMBLE', 'THROUGH', 'THEIR', 'HAPPY', 'EYELIDS', 'FROM', 'AN', 'UNAVERRED', 'YET', 'PRODIGAL', 'INWARD', 'JOY'] +908-31957-0001-1353: hyp=['I', 'SIT', 'BENEATH', 'THY', 'LOOKS', 'AS', 'CHILDREN', 'DO', 'IN', 'THE', 'NOON', 'SUN', 'WITH', 'SOULS', 'THAT', 'TREMBLE', 'THROUGH', 'THEIR', 'HAPPY', 'EYELIDS', 'FROM', 'AN', 'UNAVERRED', 'YET', 'CHRONICAL', 'INWARD', 'JOY'] +908-31957-0002-1354: ref=['I', 'DID', 'NOT', 'WRONG', 'MYSELF', 'SO', 'BUT', 'I', 'PLACED', 'A', 'WRONG', 'ON', 'THEE'] +908-31957-0002-1354: hyp=['I', 'DID', 'NOT', 'WRONG', 'MYSELF', 'SO', 'BUT', 'I', 'PLACED', 'A', 'WRONG', 'ON', 'THEE'] +908-31957-0003-1355: ref=['WHEN', 'CALLED', 'BEFORE', 'I', 'TOLD', 'HOW', 'HASTILY', 'I', 'DROPPED', 'MY', 'FLOWERS', 'OR', 'BRAKE', 'OFF', 'FROM', 'A', 'GAME'] +908-31957-0003-1355: hyp=['WHEN', 'CALLED', 'BEFORE', 'I', 'TOLD', 'HOW', 'HASTILY', 'I', 'DROPPED', 'MY', 'FLOWERS', 'OR', 'BREAK', 'OFF', 'FROM', 'A', 'GAME'] +908-31957-0004-1356: ref=['SHALL', 'I', 'NEVER', 'MISS', 'HOME', 'TALK', 'AND', 'BLESSING', 'AND', 'THE', 'COMMON', 'KISS', 'THAT', 'COMES', 'TO', 'EACH', 'IN', 'TURN', 'NOR', 'COUNT', 'IT', 'STRANGE', 'WHEN', 'I', 'LOOK', 'UP', 'TO', 'DROP', 'ON', 'A', 'NEW', 'RANGE', 'OF', 'WALLS', 'AND', 'FLOORS', 'ANOTHER', 'HOME', 'THAN', 'THIS'] +908-31957-0004-1356: hyp=['SHALL', 'I', 'NEVER', 'MISS', 'HOME', 'TALK', 'AND', 'BLESSING', 'AND', 'THE', 'COMMON', 'KISS', 'THAT', 'COMES', 'TO', 'EACH', 'IN', 'TURN', 'NOR', 'COUNT', 'IT', 'STRANGE', 'WHEN', 'I', 'LOOK', 'UP', 'TO', 'DROP', 'ON', 'A', 'NEW', 'RANGE', 'OF', 'WALLS', 'AND', 'FLOORS', 'ANOTHER', 'HOME', 'THAN', 'THIS'] +908-31957-0005-1357: ref=['ALAS', 'I', 'HAVE', 'GRIEVED', 'SO', 'I', 'AM', 'HARD', 'TO', 'LOVE'] +908-31957-0005-1357: hyp=['ALAS', 'I', 'HAVE', 'GRIEVED', 'SO', 'I', 'AM', 'HARD', 'TO', 'LOVE'] +908-31957-0006-1358: ref=['OPEN', 'THY', 'HEART', 'WIDE', 'AND', 'FOLD', 'WITHIN', 'THE', 'WET', 'WINGS', 'OF', 'THY', 'DOVE'] +908-31957-0006-1358: hyp=['OPEN', 'THY', 'HEART', 'WIDE', 'AND', 'FOLD', 'WITHIN', 'THE', 'WET', 'WINGS', 'OF', 'THY', 'DOVE'] +908-31957-0007-1359: ref=['COULD', 'IT', 'MEAN', 'TO', 'LAST', 'A', 'LOVE', 'SET', 'PENDULOUS', 'BETWEEN', 'SORROW', 'AND', 'SORROW'] +908-31957-0007-1359: hyp=['COULD', 'IT', 'MEAN', 'TO', 'LAST', 'A', 'LOVE', 'SET', 'PENDULOUS', 'BETWEEN', 'SORROW', 'AND', 'SORROW'] +908-31957-0008-1360: ref=['NAY', 'I', 'RATHER', 'THRILLED', 'DISTRUSTING', 'EVERY', 'LIGHT', 'THAT', 'SEEMED', 'TO', 'GILD', 'THE', 'ONWARD', 'PATH', 'AND', 'FEARED', 'TO', 'OVERLEAN', 'A', 'FINGER', 'EVEN'] +908-31957-0008-1360: hyp=['NAY', 'I', 'RATHER', 'THRILLED', 'DISTRUSTING', 'EVERY', 'LIGHT', 'THAT', 'SEEMED', 'TO', 'GILD', 'THE', 'ONWARD', 'PATH', 'AND', 'FEAR', 'TO', 'OVERLEAN', 'A', 'FINGER', 'EVEN'] +908-31957-0009-1361: ref=['AND', 'THOUGH', 'I', 'HAVE', 'GROWN', 'SERENE', 'AND', 'STRONG', 'SINCE', 'THEN', 'I', 'THINK', 'THAT', 'GOD', 'HAS', 'WILLED', 'A', 'STILL', 'RENEWABLE', 'FEAR'] +908-31957-0009-1361: hyp=['AND', 'THOUGH', 'I', 'HAVE', 'GROWN', 'SERENE', 'AND', 'STRONG', 'SINCE', 'THEN', 'I', 'THINK', 'THAT', 'GOD', 'HAS', 'WILLED', 'A', 'STILL', 'RENEWABLE', 'FEAR'] +908-31957-0010-1362: ref=['O', 'LOVE', 'O', 'TROTH'] +908-31957-0010-1362: hyp=['O', 'LOVE', 'O', 'TROTH'] +908-31957-0011-1363: ref=['AND', 'LOVE', 'BE', 'FALSE'] +908-31957-0011-1363: hyp=['AND', 'LOVE', 'BE', 'FALSE'] +908-31957-0012-1364: ref=['IF', 'HE', 'TO', 'KEEP', 'ONE', 'OATH', 'MUST', 'LOSE', 'ONE', 'JOY', 'BY', 'HIS', "LIFE'S", 'STAR', 'FORETOLD'] +908-31957-0012-1364: hyp=['IF', 'HE', 'TO', 'KEEP', 'ONE', 'OATH', 'MUST', 'LOSE', 'ONE', 'JOY', 'BY', 'HIS', "LIFE'S", 'STAR', 'FORETOLD'] +908-31957-0013-1365: ref=['SLOW', 'TO', 'WORLD', 'GREETINGS', 'QUICK', 'WITH', 'ITS', 'O', 'LIST', 'WHEN', 'THE', 'ANGELS', 'SPEAK'] +908-31957-0013-1365: hyp=['SLOW', 'TO', 'WORLD', 'GREETINGS', 'QUICK', 'WITH', 'ITS', 'O', 'LIST', 'WHEN', 'THE', 'ANGEL', 'SPEAK'] +908-31957-0014-1366: ref=['A', 'RING', 'OF', 'AMETHYST', 'I', 'COULD', 'NOT', 'WEAR', 'HERE', 'PLAINER', 'TO', 'MY', 'SIGHT', 'THAN', 'THAT', 'FIRST', 'KISS'] +908-31957-0014-1366: hyp=['A', 'RING', 'OF', 'AMETHYST', 'I', 'COULD', 'NOT', 'WEAR', 'HERE', 'PLAINER', 'TO', 'MY', 'SIGHT', 'THAN', 'THAT', 'FIRST', 'KISS'] +908-31957-0015-1367: ref=['THAT', 'WAS', 'THE', 'CHRISM', 'OF', 'LOVE', 'WHICH', "LOVE'S", 'OWN', 'CROWN', 'WITH', 'SANCTIFYING', 'SWEETNESS', 'DID', 'PRECEDE', 'THE', 'THIRD', 'UPON', 'MY', 'LIPS', 'WAS', 'FOLDED', 'DOWN', 'IN', 'PERFECT', 'PURPLE', 'STATE', 'SINCE', 'WHEN', 'INDEED', 'I', 'HAVE', 'BEEN', 'PROUD', 'AND', 'SAID', 'MY', 'LOVE', 'MY', 'OWN'] +908-31957-0015-1367: hyp=['THAT', 'WAS', 'THE', 'CHRISM', 'OF', 'LOVE', 'WHICH', 'LOVES', 'OWN', 'CROWN', 'WITH', 'SANCTIFYING', 'SWEETNESS', 'DID', 'PROCEED', 'THE', 'THIRD', 'UPON', 'MY', 'LIPS', 'WAS', 'FOLDED', 'DOWN', 'IMPERFECT', 'PURPLE', 'STATE', 'SINCE', 'WHEN', 'INDEED', 'I', 'HAVE', 'BEEN', 'PROUD', 'AND', 'SAID', 'MY', 'LOVE', 'MY', 'OWN'] +908-31957-0016-1368: ref=['DEAREST', 'TEACH', 'ME', 'SO', 'TO', 'POUR', 'OUT', 'GRATITUDE', 'AS', 'THOU', 'DOST', 'GOOD'] +908-31957-0016-1368: hyp=['DEAREST', 'TEACH', 'ME', 'SO', 'TO', 'POUR', 'OUT', 'GRATITUDE', 'AS', 'THOU', 'DOST', 'GOOD'] +908-31957-0017-1369: ref=['MUSSULMANS', 'AND', 'GIAOURS', 'THROW', 'KERCHIEFS', 'AT', 'A', 'SMILE', 'AND', 'HAVE', 'NO', 'RUTH', 'FOR', 'ANY', 'WEEPING'] +908-31957-0017-1369: hyp=['MUSSULMANS', 'AND', 'GUYRS', 'THROW', 'KERCHIEFS', 'AT', 'A', 'SMILE', 'AND', 'HAVE', 'NO', 'RUTH', 'FOR', 'ANY', 'WEEPING'] +908-31957-0018-1370: ref=['BUT', 'THOU', 'ART', 'NOT', 'SUCH', 'A', 'LOVER', 'MY', 'BELOVED'] +908-31957-0018-1370: hyp=['BUT', 'THOU', 'ART', 'NOT', 'SUCH', 'A', 'LOVER', 'MY', 'BELOVED'] +908-31957-0019-1371: ref=['THOU', 'CANST', 'WAIT', 'THROUGH', 'SORROW', 'AND', 'SICKNESS', 'TO', 'BRING', 'SOULS', 'TO', 'TOUCH', 'AND', 'THINK', 'IT', 'SOON', 'WHEN', 'OTHERS', 'CRY', 'TOO', 'LATE'] +908-31957-0019-1371: hyp=['THOU', 'CANST', 'WAIT', 'THROUGH', 'SORROW', 'AND', 'SICKNESS', 'TO', 'BRING', 'SOULS', 'TO', 'TOUCH', 'AND', 'THINK', 'IT', 'SOON', 'WHEN', 'OTHERS', 'CRY', 'TOO', 'LATE'] +908-31957-0020-1372: ref=['I', 'THANK', 'ALL', 'WHO', 'HAVE', 'LOVED', 'ME', 'IN', 'THEIR', 'HEARTS', 'WITH', 'THANKS', 'AND', 'LOVE', 'FROM', 'MINE'] +908-31957-0020-1372: hyp=['I', 'THINK', 'ALL', 'WHO', 'HAVE', 'LOVED', 'ME', 'IN', 'THEIR', 'HEARTS', 'WITH', 'THANKS', 'AND', 'LOVE', 'FROM', 'MINE'] +908-31957-0021-1373: ref=['OH', 'TO', 'SHOOT', 'MY', "SOUL'S", 'FULL', 'MEANING', 'INTO', 'FUTURE', 'YEARS', 'THAT', 'THEY', 'SHOULD', 'LEND', 'IT', 'UTTERANCE', 'AND', 'SALUTE', 'LOVE', 'THAT', 'ENDURES', 'FROM', 'LIFE', 'THAT', 'DISAPPEARS'] +908-31957-0021-1373: hyp=['OH', 'TO', 'SHOOT', 'MY', "SOUL'S", 'FULL', 'MEANING', 'INTO', 'FUTURE', 'YEARS', 'THAT', 'THEY', 'SHOULD', 'LEND', 'IT', 'UTTERANCE', 'AND', 'SALUTE', 'LOVE', 'THAT', 'ENDURES', 'FROM', 'LIFE', 'THAT', 'DISAPPEARS'] +908-31957-0022-1374: ref=['THEN', 'I', 'LONG', 'TRIED', 'BY', 'NATURAL', 'ILLS', 'RECEIVED', 'THE', 'COMFORT', 'FAST', 'WHILE', 'BUDDING', 'AT', 'THY', 'SIGHT', 'MY', "PILGRIM'S", 'STAFF', 'GAVE', 'OUT', 'GREEN', 'LEAVES', 'WITH', 'MORNING', 'DEWS', 'IMPEARLED'] +908-31957-0022-1374: hyp=['THEN', 'I', 'LONG', 'TRIED', 'BY', 'NATURAL', 'ILLS', 'RECEIVED', 'THE', 'COMFORT', 'FAST', 'WHILE', 'BUDDING', 'AT', 'THY', 'SIGHT', 'MY', "PILGRIM'S", 'STAFF', 'GAVE', 'OUT', 'GREEN', 'LEAVES', 'WITH', 'MORNING', 'DEWS', 'IMPELLED'] +908-31957-0023-1375: ref=['I', 'LOVE', 'THEE', 'FREELY', 'AS', 'MEN', 'STRIVE', 'FOR', 'RIGHT', 'I', 'LOVE', 'THEE', 'PURELY', 'AS', 'THEY', 'TURN', 'FROM', 'PRAISE'] +908-31957-0023-1375: hyp=['I', 'LOVE', 'THEE', 'FREELY', 'AS', 'MEN', 'STRIVE', 'FOR', 'RIGHT', 'I', 'LOVE', 'THEE', 'PURELY', 'AS', 'THEY', 'TURN', 'FROM', 'PRAISE'] +908-31957-0024-1376: ref=['I', 'LOVE', 'THEE', 'WITH', 'THE', 'PASSION', 'PUT', 'TO', 'USE', 'IN', 'MY', 'OLD', 'GRIEFS', 'AND', 'WITH', 'MY', "CHILDHOOD'S", 'FAITH'] +908-31957-0024-1376: hyp=['I', 'LOVE', 'THEE', 'WITH', 'THE', 'PASSION', 'PUT', 'TO', 'USE', 'IN', 'MY', 'OLD', 'GREEDS', 'AND', 'WITH', 'MY', "CHILDHOOD'S", 'FAITH'] +908-31957-0025-1377: ref=['I', 'LOVE', 'THEE', 'WITH', 'A', 'LOVE', 'I', 'SEEMED', 'TO', 'LOSE', 'WITH', 'MY', 'LOST', 'SAINTS', 'I', 'LOVE', 'THEE', 'WITH', 'THE', 'BREATH', 'SMILES', 'TEARS', 'OF', 'ALL', 'MY', 'LIFE', 'AND', 'IF', 'GOD', 'CHOOSE', 'I', 'SHALL', 'BUT', 'LOVE', 'THEE', 'BETTER', 'AFTER', 'DEATH'] +908-31957-0025-1377: hyp=['I', 'LOVE', 'THEE', 'WITH', 'A', 'LOVE', 'I', 'SEEMED', 'TO', 'LOSE', 'WITH', 'MY', 'LOST', 'SAINTS', 'I', 'LOVE', 'THEE', 'WITH', 'THE', 'BREATH', 'SMILES', 'TEARS', 'OF', 'ALL', 'MY', 'LIFE', 'AND', 'IF', 'GOD', 'CHOOSE', 'I', 'SHALL', 'BUT', 'LOVE', 'THEE', 'BETTER', 'AFTER', 'DEATH'] diff --git a/log/greedy_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt b/log/greedy_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..031e8f17717e58657e78c41a0e5c948f3d79e3ef --- /dev/null +++ b/log/greedy_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt @@ -0,0 +1,5878 @@ +1688-142285-0000-1948: ref=["THERE'S", 'IRON', 'THEY', 'SAY', 'IN', 'ALL', 'OUR', 'BLOOD', 'AND', 'A', 'GRAIN', 'OR', 'TWO', 'PERHAPS', 'IS', 'GOOD', 'BUT', 'HIS', 'HE', 'MAKES', 'ME', 'HARSHLY', 'FEEL', 'HAS', 'GOT', 'A', 'LITTLE', 'TOO', 'MUCH', 'OF', 'STEEL', 'ANON'] +1688-142285-0000-1948: hyp=["THERE'S", 'IRON', 'THEY', 'SAY', 'IN', 'ALL', 'OUR', 'BLOOD', 'AND', 'A', 'GRAIN', 'OR', 'TWO', 'PERHAPS', 'IS', 'GOOD', 'BUT', 'HIS', 'HE', 'MAKES', 'ME', 'HARSHLY', 'FEEL', 'HAS', 'GOT', 'A', 'LITTLE', 'TOO', 'MUCH', 'OF', 'STEEL', 'ANON'] +1688-142285-0001-1949: ref=['MARGARET', 'SAID', 'MISTER', 'HALE', 'AS', 'HE', 'RETURNED', 'FROM', 'SHOWING', 'HIS', 'GUEST', 'DOWNSTAIRS', 'I', 'COULD', 'NOT', 'HELP', 'WATCHING', 'YOUR', 'FACE', 'WITH', 'SOME', 'ANXIETY', 'WHEN', 'MISTER', 'THORNTON', 'MADE', 'HIS', 'CONFESSION', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY'] +1688-142285-0001-1949: hyp=['MARGARET', 'SAID', 'MISTER', 'HALE', 'AS', 'HE', 'RETURNED', 'FROM', 'SHOWING', 'HIS', 'GUEST', 'DOWNSTAIRS', 'I', 'COULD', 'NOT', 'HELP', 'WATCHING', 'YOUR', 'FACE', 'WITH', 'SOME', 'ANXIETY', 'WHEN', 'MISTER', 'THORNTON', 'MADE', 'HIS', 'CONFESSION', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY'] +1688-142285-0002-1950: ref=['YOU', "DON'T", 'MEAN', 'THAT', 'YOU', 'THOUGHT', 'ME', 'SO', 'SILLY'] +1688-142285-0002-1950: hyp=['YOU', "DON'T", 'MEAN', 'THAT', 'YOU', 'THOUGHT', 'ME', 'SO', 'SILLY'] +1688-142285-0003-1951: ref=['I', 'REALLY', 'LIKED', 'THAT', 'ACCOUNT', 'OF', 'HIMSELF', 'BETTER', 'THAN', 'ANYTHING', 'ELSE', 'HE', 'SAID'] +1688-142285-0003-1951: hyp=['I', 'REALLY', 'LIKE', 'THAT', 'ACCOUNT', 'OF', 'HIMSELF', 'BETTER', 'THAN', 'ANYTHING', 'ELSE', 'HE', 'SAID'] +1688-142285-0004-1952: ref=['HIS', 'STATEMENT', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY', 'WAS', 'THE', 'THING', 'I', 'LIKED', 'BEST', 'OF', 'ALL'] +1688-142285-0004-1952: hyp=['HIS', 'STATEMENT', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY', 'WAS', 'THE', 'THING', 'I', 'LIKE', 'BEST', 'OF', 'ALL'] +1688-142285-0005-1953: ref=['YOU', 'WHO', 'WERE', 'ALWAYS', 'ACCUSING', 'PEOPLE', 'OF', 'BEING', 'SHOPPY', 'AT', 'HELSTONE'] +1688-142285-0005-1953: hyp=['YOU', 'WHO', 'WERE', 'ALWAYS', 'ACCUSING', 'PEOPLE', 'HAVE', 'BEEN', 'SHOPPY', 'AT', 'HELSTONE'] +1688-142285-0006-1954: ref=['I', "DON'T", 'THINK', 'MISTER', 'HALE', 'YOU', 'HAVE', 'DONE', 'QUITE', 'RIGHT', 'IN', 'INTRODUCING', 'SUCH', 'A', 'PERSON', 'TO', 'US', 'WITHOUT', 'TELLING', 'US', 'WHAT', 'HE', 'HAD', 'BEEN'] +1688-142285-0006-1954: hyp=['I', "DON'T", 'THINK', 'MISTER', 'HALE', 'YOU', 'HAVE', 'DONE', 'QUITE', 'RIGHT', 'INTRODUCING', 'SUCH', 'A', 'PERSON', 'TO', 'US', 'WITHOUT', 'TELLING', 'US', 'WHAT', 'HE', 'HAD', 'BEEN'] +1688-142285-0007-1955: ref=['I', 'REALLY', 'WAS', 'VERY', 'MUCH', 'AFRAID', 'OF', 'SHOWING', 'HIM', 'HOW', 'MUCH', 'SHOCKED', 'I', 'WAS', 'AT', 'SOME', 'PARTS', 'OF', 'WHAT', 'HE', 'SAID'] +1688-142285-0007-1955: hyp=['I', 'REALLY', 'WAS', 'VERY', 'MUCH', 'AFRAID', 'OF', 'SHOWING', 'HIM', 'HOW', 'MUCH', 'SHOCKED', 'I', 'WAS', 'AT', 'SOME', 'PART', 'OF', 'WHAT', 'HE', 'SAID'] +1688-142285-0008-1956: ref=['HIS', 'FATHER', 'DYING', 'IN', 'MISERABLE', 'CIRCUMSTANCES'] +1688-142285-0008-1956: hyp=['HIS', 'FATHER', 'DYING', 'IN', 'MISERABLE', 'CIRCUMSTANCES'] +1688-142285-0009-1957: ref=['WHY', 'IT', 'MIGHT', 'HAVE', 'BEEN', 'IN', 'THE', 'WORKHOUSE'] +1688-142285-0009-1957: hyp=['WHY', 'IT', 'MIGHT', 'HAVE', 'BEEN', 'IN', 'THE', 'WORKHOUSE'] +1688-142285-0010-1958: ref=['HIS', 'FATHER', 'SPECULATED', 'WILDLY', 'FAILED', 'AND', 'THEN', 'KILLED', 'HIMSELF', 'BECAUSE', 'HE', 'COULD', 'NOT', 'BEAR', 'THE', 'DISGRACE'] +1688-142285-0010-1958: hyp=['HIS', 'FATHER', 'SPECULATED', 'WILDLY', 'FAILED', 'AND', 'THEN', 'KILLED', 'HIMSELF', 'BECAUSE', 'HE', 'COULD', 'NOT', 'BEAR', 'THE', 'DISGRACE'] +1688-142285-0011-1959: ref=['ALL', 'HIS', 'FORMER', 'FRIENDS', 'SHRUNK', 'FROM', 'THE', 'DISCLOSURES', 'THAT', 'HAD', 'TO', 'BE', 'MADE', 'OF', 'HIS', 'DISHONEST', 'GAMBLING', 'WILD', 'HOPELESS', 'STRUGGLES', 'MADE', 'WITH', 'OTHER', "PEOPLE'S", 'MONEY', 'TO', 'REGAIN', 'HIS', 'OWN', 'MODERATE', 'PORTION', 'OF', 'WEALTH'] +1688-142285-0011-1959: hyp=['ALL', 'HIS', 'FORMER', 'FRIENDS', 'SHRUNK', 'FROM', 'THE', 'DISCLOSURES', 'THAT', 'HAD', 'TO', 'BE', 'MADE', 'OF', 'HIS', 'DISHONEST', 'GAMBLING', 'WILD', 'HOPELESS', 'STRUGGLES', 'MADE', 'WITH', 'OTHER', "PEOPLE'S", 'MONEY', 'TO', 'REGAIN', 'HIS', 'OWN', 'MODERATE', 'PORTION', 'OF', 'WEALTH'] +1688-142285-0012-1960: ref=['NO', 'ONE', 'CAME', 'FORWARDS', 'TO', 'HELP', 'THE', 'MOTHER', 'AND', 'THIS', 'BOY'] +1688-142285-0012-1960: hyp=['NO', 'ONE', 'CAME', 'FORWARDS', 'TO', 'HELP', 'THE', 'MOTHER', 'AND', 'THIS', 'BOY'] +1688-142285-0013-1961: ref=['AT', 'LEAST', 'NO', 'FRIEND', 'CAME', 'FORWARDS', 'IMMEDIATELY', 'AND', 'MISSUS', 'THORNTON', 'IS', 'NOT', 'ONE', 'I', 'FANCY', 'TO', 'WAIT', 'TILL', 'TARDY', 'KINDNESS', 'COMES', 'TO', 'FIND', 'HER', 'OUT'] +1688-142285-0013-1961: hyp=['AT', 'LEAST', 'NO', 'FRIEND', 'CAME', 'FORWARDS', 'IMMEDIATELY', 'AND', 'MISTER', 'THORNTON', 'IS', 'NOT', 'ONE', 'I', 'FANCY', 'TO', 'WAIT', 'TILL', 'TIDY', 'KINDNESS', 'COMES', 'TO', 'FIND', 'HER', 'OUT'] +1688-142285-0014-1962: ref=['SO', 'THEY', 'LEFT', 'MILTON'] +1688-142285-0014-1962: hyp=['SO', 'THEY', 'LEFT', 'MILTON'] +1688-142285-0015-1963: ref=['HOW', 'TAINTED', 'ASKED', 'HER', 'FATHER'] +1688-142285-0015-1963: hyp=['HOW', 'TAINTED', 'ASKED', 'HER', 'FATHER'] +1688-142285-0016-1964: ref=['OH', 'PAPA', 'BY', 'THAT', 'TESTING', 'EVERYTHING', 'BY', 'THE', 'STANDARD', 'OF', 'WEALTH'] +1688-142285-0016-1964: hyp=['O', 'PAPA', 'BY', 'THAT', 'TESTING', 'EVERYTHING', 'BY', 'THE', 'STANDARD', 'OF', 'WEALTH'] +1688-142285-0017-1965: ref=['WHEN', 'HE', 'SPOKE', 'OF', 'THE', 'MECHANICAL', 'POWERS', 'HE', 'EVIDENTLY', 'LOOKED', 'UPON', 'THEM', 'ONLY', 'AS', 'NEW', 'WAYS', 'OF', 'EXTENDING', 'TRADE', 'AND', 'MAKING', 'MONEY'] +1688-142285-0017-1965: hyp=['WHEN', 'HE', 'SPOKE', 'OF', 'THE', 'MECHANICAL', 'POWERS', 'HE', 'EVIDENTLY', 'LOOKED', 'UPON', 'THEM', 'ONLY', 'AS', 'NEW', 'WAYS', 'OF', 'EXTENDING', 'TRADE', 'AND', 'MAKING', 'MONEY'] +1688-142285-0018-1966: ref=['AND', 'THE', 'POOR', 'MEN', 'AROUND', 'HIM', 'THEY', 'WERE', 'POOR', 'BECAUSE', 'THEY', 'WERE', 'VICIOUS', 'OUT', 'OF', 'THE', 'PALE', 'OF', 'HIS', 'SYMPATHIES', 'BECAUSE', 'THEY', 'HAD', 'NOT', 'HIS', 'IRON', 'NATURE', 'AND', 'THE', 'CAPABILITIES', 'THAT', 'IT', 'GIVES', 'HIM', 'FOR', 'BEING', 'RICH'] +1688-142285-0018-1966: hyp=['AND', 'THE', 'POOR', 'MEN', 'AROUND', 'HIM', 'THERE', 'WERE', 'POOR', 'BECAUSE', 'THEY', 'WERE', 'VICIOUS', 'OUT', 'OF', 'THE', 'PALE', 'OF', 'HIS', 'SYMPATHIES', 'BECAUSE', 'THEY', 'HAD', 'NOT', 'HIS', 'IRON', 'NATURE', 'AND', 'THE', 'CAPABILITIES', 'THAT', 'IT', 'GIVES', 'HIM', 'FOR', 'BEING', 'RICH'] +1688-142285-0019-1967: ref=['NOT', 'VICIOUS', 'HE', 'NEVER', 'SAID', 'THAT'] +1688-142285-0019-1967: hyp=['NOT', 'VICIOUS', 'HE', 'NEVER', 'SAID', 'THAT'] +1688-142285-0020-1968: ref=['IMPROVIDENT', 'AND', 'SELF', 'INDULGENT', 'WERE', 'HIS', 'WORDS'] +1688-142285-0020-1968: hyp=['IMPROVIDENT', 'AND', 'SELF', 'INDULGENT', 'WERE', 'HIS', 'WORDS'] +1688-142285-0021-1969: ref=['MARGARET', 'WAS', 'COLLECTING', 'HER', "MOTHER'S", 'WORKING', 'MATERIALS', 'AND', 'PREPARING', 'TO', 'GO', 'TO', 'BED'] +1688-142285-0021-1969: hyp=['MARGARET', 'WAS', 'COLLECTING', 'HER', "MOTHER'S", 'WORKING', 'MATERIALS', 'AND', 'PREPARING', 'TO', 'GO', 'TO', 'BED'] +1688-142285-0022-1970: ref=['JUST', 'AS', 'SHE', 'WAS', 'LEAVING', 'THE', 'ROOM', 'SHE', 'HESITATED', 'SHE', 'WAS', 'INCLINED', 'TO', 'MAKE', 'AN', 'ACKNOWLEDGMENT', 'WHICH', 'SHE', 'THOUGHT', 'WOULD', 'PLEASE', 'HER', 'FATHER', 'BUT', 'WHICH', 'TO', 'BE', 'FULL', 'AND', 'TRUE', 'MUST', 'INCLUDE', 'A', 'LITTLE', 'ANNOYANCE'] +1688-142285-0022-1970: hyp=['JUST', 'AS', 'SHE', 'WAS', 'LEAVING', 'THE', 'ROOM', 'SHE', 'HESITATED', 'SHE', 'WAS', 'INCLINED', 'TO', 'MAKE', 'AN', 'ACKNOWLEDGMENT', 'WHICH', 'SHE', 'THOUGHT', 'WOULD', 'PLEASE', 'HER', 'FATHER', 'BUT', 'WHICH', 'TO', 'BE', 'FULL', 'AND', 'TRUE', 'MUST', 'INCLUDE', 'A', 'LITTLE', 'ANNOYANCE'] +1688-142285-0023-1971: ref=['HOWEVER', 'OUT', 'IT', 'CAME'] +1688-142285-0023-1971: hyp=['HOWEVER', 'OUT', 'IT', 'CAME'] +1688-142285-0024-1972: ref=['PAPA', 'I', 'DO', 'THINK', 'MISTER', 'THORNTON', 'A', 'VERY', 'REMARKABLE', 'MAN', 'BUT', 'PERSONALLY', 'I', "DON'T", 'LIKE', 'HIM', 'AT', 'ALL'] +1688-142285-0024-1972: hyp=['PAPA', 'I', 'DO', 'THINK', 'MISTER', 'THORNTON', 'A', 'VERY', 'REMARKABLE', 'MAN', 'BUT', 'PERSONALLY', 'I', "DON'T", 'LIKE', 'HIM', 'AT', 'ALL'] +1688-142285-0025-1973: ref=['AND', 'I', 'DO', 'SAID', 'HER', 'FATHER', 'LAUGHING'] +1688-142285-0025-1973: hyp=['AND', 'I', 'DO', 'SAID', 'HER', 'FATHER', 'LAUGHING'] +1688-142285-0026-1974: ref=['PERSONALLY', 'AS', 'YOU', 'CALL', 'IT', 'AND', 'ALL'] +1688-142285-0026-1974: hyp=['PERSONALLY', 'AS', 'YOU', 'CALL', 'IT', 'AND', 'ALL'] +1688-142285-0027-1975: ref=['I', "DON'T", 'SET', 'HIM', 'UP', 'FOR', 'A', 'HERO', 'OR', 'ANYTHING', 'OF', 'THAT', 'KIND'] +1688-142285-0027-1975: hyp=['I', "DON'T", 'SET', 'HIM', 'UP', 'FOR', 'A', 'HERO', 'OR', 'ANYTHING', 'OF', 'THAT', 'KIND'] +1688-142285-0028-1976: ref=['BUT', 'GOOD', 'NIGHT', 'CHILD'] +1688-142285-0028-1976: hyp=['BUT', 'GOOD', 'NIGHT', 'CHILD'] +1688-142285-0029-1977: ref=['THERE', 'WERE', 'SEVERAL', 'OTHER', 'SIGNS', 'OF', 'SOMETHING', 'WRONG', 'ABOUT', 'MISSUS', 'HALE'] +1688-142285-0029-1977: hyp=['THERE', 'WERE', 'SEVERAL', 'OTHER', 'SIGNS', 'OF', 'SOMETHING', 'WRONG', 'ABOUT', 'MISSUS', 'HALE'] +1688-142285-0030-1978: ref=['SHE', 'AND', 'DIXON', 'HELD', 'MYSTERIOUS', 'CONSULTATIONS', 'IN', 'HER', 'BEDROOM', 'FROM', 'WHICH', 'DIXON', 'WOULD', 'COME', 'OUT', 'CRYING', 'AND', 'CROSS', 'AS', 'WAS', 'HER', 'CUSTOM', 'WHEN', 'ANY', 'DISTRESS', 'OF', 'HER', 'MISTRESS', 'CALLED', 'UPON', 'HER', 'SYMPATHY'] +1688-142285-0030-1978: hyp=['SHE', 'AND', 'DIXON', 'HELD', 'MYSTERIOUS', 'CONSULTATIONS', 'IN', 'HER', 'BEDROOM', 'FROM', 'WHICH', 'DIXON', 'WOULD', 'COME', 'OUT', 'CRYING', 'AND', 'CROSS', 'AS', 'WAS', 'A', 'CUSTOM', 'WHEN', 'ANY', 'DISTRESS', 'OF', 'HER', 'MISTRESS', 'CALLED', 'UPON', 'HER', 'SYMPATHY'] +1688-142285-0031-1979: ref=['ONCE', 'MARGARET', 'HAD', 'GONE', 'INTO', 'THE', 'CHAMBER', 'SOON', 'AFTER', 'DIXON', 'LEFT', 'IT', 'AND', 'FOUND', 'HER', 'MOTHER', 'ON', 'HER', 'KNEES', 'AND', 'AS', 'MARGARET', 'STOLE', 'OUT', 'SHE', 'CAUGHT', 'A', 'FEW', 'WORDS', 'WHICH', 'WERE', 'EVIDENTLY', 'A', 'PRAYER', 'FOR', 'STRENGTH', 'AND', 'PATIENCE', 'TO', 'ENDURE', 'SEVERE', 'BODILY', 'SUFFERING'] +1688-142285-0031-1979: hyp=['ONCE', 'MARGARET', 'HAD', 'GONE', 'INTO', 'THE', 'CHAMBER', 'SOON', 'AFTER', 'DIXON', 'LIFTED', 'AND', 'FOUND', 'HER', 'MOTHER', 'ON', 'HER', 'KNEES', 'AND', 'AS', 'MARGARET', 'STOLE', 'OUT', 'SHE', 'CAUGHT', 'A', 'FEW', 'WORDS', 'WHICH', 'WERE', 'EVIDENTLY', 'A', 'PRAYER', 'FOR', 'STRENGTH', 'AND', 'PATIENCE', 'TO', 'INDURE', 'SEVERE', 'BODILY', 'SUFFERING'] +1688-142285-0032-1980: ref=['BUT', 'THOUGH', 'SHE', 'RECEIVED', 'CARESSES', 'AND', 'FOND', 'WORDS', 'BACK', 'AGAIN', 'IN', 'SUCH', 'PROFUSION', 'AS', 'WOULD', 'HAVE', 'GLADDENED', 'HER', 'FORMERLY', 'YET', 'SHE', 'FELT', 'THAT', 'THERE', 'WAS', 'A', 'SECRET', 'WITHHELD', 'FROM', 'HER', 'AND', 'SHE', 'BELIEVED', 'IT', 'BORE', 'SERIOUS', 'REFERENCE', 'TO', 'HER', "MOTHER'S", 'HEALTH'] +1688-142285-0032-1980: hyp=['BUT', 'THOUGH', 'SHE', 'RECEIVED', 'CARESSES', 'AND', 'FOND', 'WORDS', 'BACK', 'AGAIN', 'IN', 'SUCH', 'PROFUSION', 'AS', 'WOULD', 'HAVE', 'GLADDENED', 'HER', 'FORMERLY', 'YET', 'SHE', 'FELT', 'THAT', 'THERE', 'WAS', 'A', 'SECRET', 'WITHHELD', 'FROM', 'HER', 'AND', 'SHE', 'BELIEVED', 'IT', 'BORE', 'SERIOUS', 'REFERENCE', 'TO', 'HER', "MOTHER'S", 'HEALTH'] +1688-142285-0033-1981: ref=['SHE', 'LAY', 'AWAKE', 'VERY', 'LONG', 'THIS', 'NIGHT', 'PLANNING', 'HOW', 'TO', 'LESSEN', 'THE', 'EVIL', 'INFLUENCE', 'OF', 'THEIR', 'MILTON', 'LIFE', 'ON', 'HER', 'MOTHER'] +1688-142285-0033-1981: hyp=['SHE', 'LAY', 'AWAKE', 'VERY', 'LONG', 'THIS', 'NIGHT', 'PLANNING', 'HOW', 'TO', 'LESSEN', 'THE', 'EVIL', 'INFLUENCE', 'OF', 'THEIR', 'MILTON', 'LIFE', 'ON', 'HER', 'MOTHER'] +1688-142285-0034-1982: ref=['A', 'SERVANT', 'TO', 'GIVE', 'DIXON', 'PERMANENT', 'ASSISTANCE', 'SHOULD', 'BE', 'GOT', 'IF', 'SHE', 'GAVE', 'UP', 'HER', 'WHOLE', 'TIME', 'TO', 'THE', 'SEARCH', 'AND', 'THEN', 'AT', 'ANY', 'RATE', 'HER', 'MOTHER', 'MIGHT', 'HAVE', 'ALL', 'THE', 'PERSONAL', 'ATTENTION', 'SHE', 'REQUIRED', 'AND', 'HAD', 'BEEN', 'ACCUSTOMED', 'TO', 'HER', 'WHOLE', 'LIFE'] +1688-142285-0034-1982: hyp=['A', 'SERVANT', 'GIVE', 'DIXON', 'PERMANENT', 'ASSISTANCE', 'SHOULD', 'BE', 'GOT', 'IF', 'SHE', 'GAVE', 'UP', 'THE', 'WHOLE', 'TIME', 'TO', 'THE', 'SEARCH', 'AND', 'THEN', 'AT', 'ANY', 'RATE', 'HER', 'MOTHER', 'MIGHT', 'HAVE', 'ALL', 'THE', 'PERSONAL', 'ATTENTIONS', 'SHE', 'REQUIRED', 'AND', 'HAD', 'BEEN', 'ACCUSTOMED', 'TO', 'HER', 'WHOLE', 'LIFE'] +1688-142285-0035-1983: ref=['VISITING', 'REGISTER', 'OFFICES', 'SEEING', 'ALL', 'MANNER', 'OF', 'UNLIKELY', 'PEOPLE', 'AND', 'VERY', 'FEW', 'IN', 'THE', 'LEAST', 'LIKELY', 'ABSORBED', "MARGARET'S", 'TIME', 'AND', 'THOUGHTS', 'FOR', 'SEVERAL', 'DAYS'] +1688-142285-0035-1983: hyp=['VISITING', 'REGISTER', 'OFFICERS', 'SEEING', 'ALL', 'MANNER', 'OF', 'UNLIKELY', 'PEOPLE', 'AND', 'VERY', 'FEW', 'IN', 'THE', 'LEAST', 'LIKELY', 'ABSORBED', "MARGARET'S", 'TIME', 'AND', 'THOUGHTS', 'FOR', 'SEVERAL', 'DAYS'] +1688-142285-0036-1984: ref=['ONE', 'AFTERNOON', 'SHE', 'MET', 'BESSY', 'HIGGINS', 'IN', 'THE', 'STREET', 'AND', 'STOPPED', 'TO', 'SPEAK', 'TO', 'HER'] +1688-142285-0036-1984: hyp=['ONE', 'AFTERNOON', 'SHE', 'MET', 'BESSY', 'HIGGINS', 'IN', 'THE', 'STREET', 'AND', 'STOPPED', 'TO', 'SPEAK', 'TO', 'HER'] +1688-142285-0037-1985: ref=['WELL', 'BESSY', 'HOW', 'ARE', 'YOU'] +1688-142285-0037-1985: hyp=['WELL', 'BUSY', 'HOW', 'ARE', 'YOU'] +1688-142285-0038-1986: ref=['BETTER', 'AND', 'NOT', 'BETTER', 'IF', 'YO', 'KNOW', 'WHAT', 'THAT', 'MEANS'] +1688-142285-0038-1986: hyp=['BETTER', 'AND', 'NOT', 'BETTER', 'IF', 'YOU', 'KNOW', 'WHAT', 'THAT', 'MEANS'] +1688-142285-0039-1987: ref=['NOT', 'EXACTLY', 'REPLIED', 'MARGARET', 'SMILING'] +1688-142285-0039-1987: hyp=['NOT', 'EXACTLY', 'REPLIED', 'MARGARET', 'SMILING'] +1688-142285-0040-1988: ref=["I'M", 'BETTER', 'IN', 'NOT', 'BEING', 'TORN', 'TO', 'PIECES', 'BY', 'COUGHING', "O'NIGHTS", 'BUT', "I'M", 'WEARY', 'AND', 'TIRED', 'O', 'MILTON', 'AND', 'LONGING', 'TO', 'GET', 'AWAY', 'TO', 'THE', 'LAND', 'O', 'BEULAH', 'AND', 'WHEN', 'I', 'THINK', "I'M", 'FARTHER', 'AND', 'FARTHER', 'OFF', 'MY', 'HEART', 'SINKS', 'AND', "I'M", 'NO', 'BETTER', "I'M", 'WORSE'] +1688-142285-0040-1988: hyp=["I'M", 'BETTER', 'IN', 'NOT', 'BEING', 'TORN', 'TO', 'PIECES', 'BUT', 'COUGHING', 'A', "KNIGHT'S", 'BUT', "I'M", 'WEARY', 'AND', 'TIRED', 'OF', 'MILTON', 'AND', 'LONGING', 'TO', 'GET', 'AWAY', 'TO', 'THE', 'LAND', 'OF', 'BOOLA', 'AND', 'WHEN', 'I', 'THINK', "I'M", 'FARTHER', 'AND', 'FARTHER', 'OFF', 'MY', 'HEART', 'SINKS', 'AND', "I'M", 'NO', 'BETTER', "I'M", 'WORSE'] +1688-142285-0041-1989: ref=['MARGARET', 'TURNED', 'ROUND', 'TO', 'WALK', 'ALONGSIDE', 'OF', 'THE', 'GIRL', 'IN', 'HER', 'FEEBLE', 'PROGRESS', 'HOMEWARD'] +1688-142285-0041-1989: hyp=['MARGARET', 'TURNED', 'AROUND', 'TO', 'WALK', 'ALONGSIDE', 'OF', 'THE', 'GIRL', 'IN', 'HER', 'FEEBLE', 'PROGRESS', 'HOMEWARD'] +1688-142285-0042-1990: ref=['BUT', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'SHE', 'DID', 'NOT', 'SPEAK'] +1688-142285-0042-1990: hyp=['BUT', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'SHE', 'DID', 'NOT', 'SPEAK'] +1688-142285-0043-1991: ref=['AT', 'LAST', 'SHE', 'SAID', 'IN', 'A', 'LOW', 'VOICE'] +1688-142285-0043-1991: hyp=['AT', 'LAST', 'SHE', 'SAID', 'IN', 'A', 'LOW', 'VOICE'] +1688-142285-0044-1992: ref=['BESSY', 'DO', 'YOU', 'WISH', 'TO', 'DIE'] +1688-142285-0044-1992: hyp=['BESSY', 'DO', 'YOU', 'WISH', 'TO', 'DIE'] +1688-142285-0045-1993: ref=['BESSY', 'WAS', 'SILENT', 'IN', 'HER', 'TURN', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'THEN', 'SHE', 'REPLIED'] +1688-142285-0045-1993: hyp=['BESSY', 'WAS', 'SILENT', 'IN', 'HER', 'TURN', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'THEN', 'SHE', 'REPLIED'] +1688-142285-0046-1994: ref=['NOUGHT', 'WORSE', 'THAN', 'MANY', 'OTHERS', 'I', 'RECKON'] +1688-142285-0046-1994: hyp=['NOT', 'WORSE', 'THAN', 'MANY', 'OTHERS', 'I', 'RECKON'] +1688-142285-0047-1995: ref=['BUT', 'WHAT', 'WAS', 'IT'] +1688-142285-0047-1995: hyp=['BUT', 'WHAT', 'WAS', 'IT'] +1688-142285-0048-1996: ref=['YOU', 'KNOW', "I'M", 'A', 'STRANGER', 'HERE', 'SO', 'PERHAPS', "I'M", 'NOT', 'SO', 'QUICK', 'AT', 'UNDERSTANDING', 'WHAT', 'YOU', 'MEAN', 'AS', 'IF', "I'D", 'LIVED', 'ALL', 'MY', 'LIFE', 'AT', 'MILTON'] +1688-142285-0048-1996: hyp=['YOU', 'KNOW', "I'M", 'A', 'STRANGER', 'HERE', 'SO', 'PERHAPS', "I'M", 'NOT', 'SO', 'QUICK', 'AT', 'UNDERSTANDING', 'WHAT', 'YOU', 'MEAN', 'AS', 'IF', "I'D", 'LIVED', 'ALL', 'MY', 'LIFE', 'IN', 'MILTON'] +1688-142285-0049-1997: ref=['I', 'HAD', 'FORGOTTEN', 'WHAT', 'I', 'SAID', 'FOR', 'THE', 'TIME', 'CONTINUED', 'MARGARET', 'QUIETLY'] +1688-142285-0049-1997: hyp=['I', 'HAD', 'FORGOTTEN', 'WHAT', 'I', 'SAID', 'FOR', 'THE', 'TIME', 'CONTINUED', 'MARGARET', 'QUIETLY'] +1688-142285-0050-1998: ref=['I', 'SHOULD', 'HAVE', 'THOUGHT', 'OF', 'IT', 'AGAIN', 'WHEN', 'I', 'WAS', 'LESS', 'BUSY', 'MAY', 'I', 'GO', 'WITH', 'YOU', 'NOW'] +1688-142285-0050-1998: hyp=['I', 'SHOULD', 'HAVE', 'THOUGHT', 'OF', 'IT', 'AGAIN', 'WHEN', 'I', 'WAS', 'LESS', 'BUSY', 'MARGAR', 'WITH', 'YOU', 'NOW'] +1688-142285-0051-1999: ref=['THE', 'SHARPNESS', 'IN', 'HER', 'EYE', 'TURNED', 'TO', 'A', 'WISTFUL', 'LONGING', 'AS', 'SHE', 'MET', "MARGARET'S", 'SOFT', 'AND', 'FRIENDLY', 'GAZE'] +1688-142285-0051-1999: hyp=['THE', 'SHARPNESS', 'IN', 'HER', 'EYE', 'TURNED', 'TO', 'A', 'WISTFUL', 'LONGING', 'AS', 'SHE', 'MET', "MARGARET'S", 'SOFT', 'AND', 'FRIENDLY', 'GAZE'] +1688-142285-0052-2000: ref=['AS', 'THEY', 'TURNED', 'UP', 'INTO', 'A', 'SMALL', 'COURT', 'OPENING', 'OUT', 'OF', 'A', 'SQUALID', 'STREET', 'BESSY', 'SAID'] +1688-142285-0052-2000: hyp=['AS', 'THEY', 'TURNED', 'UP', 'INTO', 'A', 'SMALL', 'COURT', 'OPENING', 'OUT', 'INTO', 'A', 'SQUALID', 'STREET', 'BESSY', 'SAID'] +1688-142285-0053-2001: ref=["YO'LL", 'NOT', 'BE', 'DAUNTED', 'IF', "FATHER'S", 'AT', 'HOME', 'AND', 'SPEAKS', 'A', 'BIT', 'GRUFFISH', 'AT', 'FIRST'] +1688-142285-0053-2001: hyp=["YOU'LL", 'NOT', 'BE', 'DAUNTED', 'IF', 'FATHER', 'SAID', 'HE', 'AND', 'SPEAKS', 'A', 'BIT', 'GRUFFISH', 'AT', 'FIRST'] +1688-142285-0054-2002: ref=['BUT', 'NICHOLAS', 'WAS', 'NOT', 'AT', 'HOME', 'WHEN', 'THEY', 'ENTERED'] +1688-142285-0054-2002: hyp=['BUT', 'NICHOLAS', 'WAS', 'NOT', 'AT', 'HOME', 'WHEN', 'THEY', 'ENTERED'] +1688-142285-0055-2003: ref=['GASPED', 'BESSY', 'AT', 'LAST'] +1688-142285-0055-2003: hyp=['GASPED', 'BESSIE', 'AT', 'LAST'] +1688-142285-0056-2004: ref=['BESSY', 'TOOK', 'A', 'LONG', 'AND', 'FEVERISH', 'DRAUGHT', 'AND', 'THEN', 'FELL', 'BACK', 'AND', 'SHUT', 'HER', 'EYES'] +1688-142285-0056-2004: hyp=['BESSY', 'TOOK', 'A', 'LONG', 'AND', 'FEVERISH', 'DRAUGHT', 'AND', 'THEN', 'FELL', 'BACK', 'AND', 'SHUT', 'HER', 'EYES'] +1688-142285-0057-2005: ref=['MARGARET', 'BENT', 'OVER', 'AND', 'SAID', 'BESSY', "DON'T", 'BE', 'IMPATIENT', 'WITH', 'YOUR', 'LIFE', 'WHATEVER', 'IT', 'IS', 'OR', 'MAY', 'HAVE', 'BEEN'] +1688-142285-0057-2005: hyp=['MARGARET', 'BENT', 'OVER', 'AND', 'SAID', 'BESSY', "DON'T", 'BE', 'IMPATIENT', 'WITH', 'YOUR', 'LIFE', 'WHATEVER', 'IT', 'IS', 'OR', 'MAY', 'HAVE', 'BEEN'] +1688-142285-0058-2006: ref=['REMEMBER', 'WHO', 'GAVE', 'IT', 'YOU', 'AND', 'MADE', 'IT', 'WHAT', 'IT', 'IS'] +1688-142285-0058-2006: hyp=['REMEMBER', 'WHO', 'GAVE', 'IT', 'TO', 'YOU', 'AND', 'MADE', 'IT', 'WHAT', 'IT', 'IS'] +1688-142285-0059-2007: ref=['NOW', "I'LL", 'NOT', 'HAVE', 'MY', 'WENCH', 'PREACHED', 'TO'] +1688-142285-0059-2007: hyp=['NOW', "I'LL", 'NOT', 'HAVE', 'MY', 'WENCH', 'PREACH', 'TOO'] +1688-142285-0060-2008: ref=['BUT', 'SURELY', 'SAID', 'MARGARET', 'FACING', 'ROUND', 'YOU', 'BELIEVE', 'IN', 'WHAT', 'I', 'SAID', 'THAT', 'GOD', 'GAVE', 'HER', 'LIFE', 'AND', 'ORDERED', 'WHAT', 'KIND', 'OF', 'LIFE', 'IT', 'WAS', 'TO', 'BE'] +1688-142285-0060-2008: hyp=['BUT', 'SURELY', 'SAID', 'MARGARET', 'FACING', 'ROUND', 'YOU', 'BELIEVE', 'IN', 'WHAT', 'I', 'SAID', 'THAT', 'GOD', 'GAVE', 'HER', 'LIFE', 'AND', 'ORDERED', 'WHAT', 'KIND', 'OF', 'LIFE', 'IT', 'WAS', 'TO', 'BE'] +1688-142285-0061-2009: ref=['I', 'BELIEVE', 'WHAT', 'I', 'SEE', 'AND', 'NO', 'MORE'] +1688-142285-0061-2009: hyp=['I', 'BELIEVE', 'WHAT', 'I', 'SEE', 'AND', 'NO', 'MORE'] +1688-142285-0062-2010: ref=["THAT'S", 'WHAT', 'I', 'BELIEVE', 'YOUNG', 'WOMAN'] +1688-142285-0062-2010: hyp=["THAT'S", 'WHAT', 'I', 'BELIEVE', 'YOUNG', 'WOMAN'] +1688-142285-0063-2011: ref=['I', "DON'T", 'BELIEVE', 'ALL', 'I', 'HEAR', 'NO', 'NOT', 'BY', 'A', 'BIG', 'DEAL'] +1688-142285-0063-2011: hyp=['I', "DON'T", 'BELIEVE', 'ALL', 'I', 'HEAR', 'NO', 'NOT', 'BY', 'A', 'BIG', 'DEAL'] +1688-142285-0064-2012: ref=['BUT', "HOO'S", 'COME', 'AT', 'LAST', 'AND', "HOO'S", 'WELCOME', 'AS', 'LONG', 'AS', "HOO'LL", 'KEEP', 'FROM', 'PREACHING', 'ON', 'WHAT', 'HOO', 'KNOWS', 'NOUGHT', 'ABOUT'] +1688-142285-0064-2012: hyp=['BUT', "WHO'S", 'COME', 'AT', 'LAST', 'AND', "WHO'S", 'WELCOME', 'AS', 'LONG', 'AS', "HE'LL", 'KEEP', 'FROM', 'PREACHING', 'ON', 'WHAT', 'HE', 'KNOWS', 'NOT', 'ABOUT'] +1688-142285-0065-2013: ref=["IT'S", 'SIMPLE', 'AND', 'NOT', 'FAR', 'TO', 'FETCH', 'NOR', 'HARD', 'TO', 'WORK'] +1688-142285-0065-2013: hyp=["IT'S", 'SIMPLE', 'AND', 'NOT', 'FAR', 'TO', 'FETCH', 'NOR', 'HARD', 'TO', 'WORK'] +1688-142285-0066-2014: ref=['BUT', 'THE', 'GIRL', 'ONLY', 'PLEADED', 'THE', 'MORE', 'WITH', 'MARGARET'] +1688-142285-0066-2014: hyp=['BUT', 'THE', 'GIRL', 'ONLY', 'PLEADED', 'THE', 'MORE', 'WITH', 'MARGARET'] +1688-142285-0067-2015: ref=["DON'T", 'THINK', 'HARDLY', 'ON', 'HIM', "HE'S", 'A', 'GOOD', 'MAN', 'HE', 'IS'] +1688-142285-0067-2015: hyp=["DON'T", 'THINK', 'HARDLY', 'ON', 'HIM', "HE'S", 'A', 'GOOD', 'MAN', 'HE', 'IS'] +1688-142285-0068-2016: ref=['I', 'SOMETIMES', 'THINK', 'I', 'SHALL', 'BE', 'MOPED', 'WI', 'SORROW', 'EVEN', 'IN', 'THE', 'CITY', 'OF', 'GOD', 'IF', 'FATHER', 'IS', 'NOT', 'THERE'] +1688-142285-0068-2016: hyp=['I', 'SOMETIMES', 'THINK', 'I', 'SHALL', 'BE', 'MURKED', 'WITH', 'SORROW', 'EVEN', 'IN', 'THE', 'CITY', 'OF', 'GOD', 'IF', 'EITHER', 'IS', 'NOT', 'THERE'] +1688-142285-0069-2017: ref=['THE', 'FEVERISH', 'COLOUR', 'CAME', 'INTO', 'HER', 'CHEEK', 'AND', 'THE', 'FEVERISH', 'FLAME', 'INTO', 'HER', 'EYE'] +1688-142285-0069-2017: hyp=['THE', 'FEVERISH', 'COLOUR', 'CAME', 'INTO', 'HER', 'CHEEKS', 'AND', 'THE', 'FEVERISH', 'FLAME', 'INTO', 'HER', 'EYE'] +1688-142285-0070-2018: ref=['BUT', 'YOU', 'WILL', 'BE', 'THERE', 'FATHER', 'YOU', 'SHALL', 'OH', 'MY', 'HEART'] +1688-142285-0070-2018: hyp=['BUT', 'YOU', 'WILL', 'BE', 'THERE', 'FATHER', 'YOU', 'SHALL', 'OH', 'MY', 'HEART'] +1688-142285-0071-2019: ref=['SHE', 'PUT', 'HER', 'HAND', 'TO', 'IT', 'AND', 'BECAME', 'GHASTLY', 'PALE'] +1688-142285-0071-2019: hyp=['SHE', 'PUT', 'HER', 'HAND', 'TO', 'IT', 'AND', 'BECAME', 'GHASTLY', 'PALE'] +1688-142285-0072-2020: ref=['MARGARET', 'HELD', 'HER', 'IN', 'HER', 'ARMS', 'AND', 'PUT', 'THE', 'WEARY', 'HEAD', 'TO', 'REST', 'UPON', 'HER', 'BOSOM'] +1688-142285-0072-2020: hyp=['MARGARET', 'HELD', 'HER', 'IN', 'HER', 'ARMS', 'AND', 'PUT', 'THE', 'WEARY', 'HEAD', 'TO', 'REST', 'UPON', 'HER', 'BOSOM'] +1688-142285-0073-2021: ref=['PRESENTLY', 'THE', 'SPASM', 'THAT', 'FORESHADOWED', 'DEATH', 'HAD', 'PASSED', 'AWAY', 'AND', 'BESSY', 'ROUSED', 'HERSELF', 'AND', 'SAID'] +1688-142285-0073-2021: hyp=['PRESENTLY', 'THE', 'SPASM', 'THAT', 'FORESHADOWED', 'DEATH', 'HAD', 'PASSED', 'AWAY', 'AND', 'BESSY', 'ROUSED', 'HERSELF', 'AND', 'SAID'] +1688-142285-0074-2022: ref=["I'LL", 'GO', 'TO', 'BED', "IT'S", 'BEST', 'PLACE', 'BUT', 'CATCHING', 'AT', "MARGARET'S", 'GOWN', "YO'LL", 'COME', 'AGAIN', 'I', 'KNOW', 'YO', 'WILL', 'BUT', 'JUST', 'SAY', 'IT'] +1688-142285-0074-2022: hyp=["I'LL", 'GO', 'TO', 'BED', "IT'S", 'BEST', 'PLACE', 'BUT', 'CATCHING', 'THAT', "MARGARET'S", 'GUN', "YOU'LL", 'COME', 'AGAIN', 'I', 'KNOW', 'YOU', 'WILL', 'BUT', 'JUST', 'SAY', 'IT'] +1688-142285-0075-2023: ref=['I', 'WILL', 'COME', 'TO', 'MORROW', 'SAID', 'MARGARET'] +1688-142285-0075-2023: hyp=['I', 'WILL', 'COME', 'TO', 'MORROW', 'SAID', 'MARGARET'] +1688-142285-0076-2024: ref=['MARGARET', 'WENT', 'AWAY', 'VERY', 'SAD', 'AND', 'THOUGHTFUL'] +1688-142285-0076-2024: hyp=['MARGARET', 'WENT', 'AWAY', 'VERY', 'SAD', 'AND', 'THOUGHTFUL'] +1688-142285-0077-2025: ref=['SHE', 'WAS', 'LATE', 'FOR', 'TEA', 'AT', 'HOME'] +1688-142285-0077-2025: hyp=['SHE', 'WAS', 'LATE', 'FOR', 'TEA', 'AT', 'HOME'] +1688-142285-0078-2026: ref=['HAVE', 'YOU', 'MET', 'WITH', 'A', 'SERVANT', 'DEAR'] +1688-142285-0078-2026: hyp=['HAVE', 'YOU', 'MET', 'WITH', 'A', 'SERVANT', 'DEAR'] +1688-142285-0079-2027: ref=['NO', 'MAMMA', 'THAT', 'ANNE', 'BUCKLEY', 'WOULD', 'NEVER', 'HAVE', 'DONE'] +1688-142285-0079-2027: hyp=['NO', 'MAMMA', 'THAT', 'ANNE', 'BUCKLEY', 'WOULD', 'NEVER', 'HAVE', 'DONE'] +1688-142285-0080-2028: ref=['SUPPOSE', 'I', 'TRY', 'SAID', 'MISTER', 'HALE'] +1688-142285-0080-2028: hyp=["S'POSE", 'I', 'TRY', 'SAID', 'MISTER', 'HALE'] +1688-142285-0081-2029: ref=['EVERYBODY', 'ELSE', 'HAS', 'HAD', 'THEIR', 'TURN', 'AT', 'THIS', 'GREAT', 'DIFFICULTY', 'NOW', 'LET', 'ME', 'TRY'] +1688-142285-0081-2029: hyp=['EVERYBODY', 'ELSE', 'HAS', 'HAD', 'THEY', 'TURN', 'AT', 'THIS', 'GREAT', 'DIFFICULTY', 'NOW', 'LET', 'ME', 'TRY'] +1688-142285-0082-2030: ref=['I', 'MAY', 'BE', 'THE', 'CINDERELLA', 'TO', 'PUT', 'ON', 'THE', 'SLIPPER', 'AFTER', 'ALL'] +1688-142285-0082-2030: hyp=['I', 'MAY', 'BE', 'THE', 'CINRILO', 'TO', 'PUT', 'ON', 'THE', 'SLIPPER', 'AFTER', 'ALL'] +1688-142285-0083-2031: ref=['WHAT', 'WOULD', 'YOU', 'DO', 'PAPA', 'HOW', 'WOULD', 'YOU', 'SET', 'ABOUT', 'IT'] +1688-142285-0083-2031: hyp=['BUT', 'WOULD', 'YOU', 'DO', 'PAPA', 'HOW', 'WOULD', 'YOU', 'SET', 'ABOUT', 'IT'] +1688-142285-0084-2032: ref=['WHY', 'I', 'WOULD', 'APPLY', 'TO', 'SOME', 'GOOD', 'HOUSE', 'MOTHER', 'TO', 'RECOMMEND', 'ME', 'ONE', 'KNOWN', 'TO', 'HERSELF', 'OR', 'HER', 'SERVANTS'] +1688-142285-0084-2032: hyp=['WHY', 'I', 'WOULD', 'APPLY', 'IT', 'TO', 'SOME', 'GOOD', 'HOUSE', 'MOTHER', 'TO', 'RECOMMEND', 'ME', 'ONE', 'KNOWN', 'TO', 'HERSELF', 'OR', 'HER', 'SERVANTS'] +1688-142285-0085-2033: ref=['VERY', 'GOOD', 'BUT', 'WE', 'MUST', 'FIRST', 'CATCH', 'OUR', 'HOUSE', 'MOTHER'] +1688-142285-0085-2033: hyp=['VERY', 'GOOD', 'BUT', 'WE', 'MUST', 'FIRST', 'CATCH', 'OUR', 'HOUSE', 'MOTHER'] +1688-142285-0086-2034: ref=['THE', 'MOTHER', 'OF', 'WHOM', 'HE', 'SPOKE', 'TO', 'US', 'SAID', 'MARGARET'] +1688-142285-0086-2034: hyp=['THE', 'MOTHER', 'OF', 'WHOM', 'HE', 'SPOKE', 'TO', 'US', 'SAID', 'MARGARET'] +1688-142285-0087-2035: ref=['MISSUS', 'THORNTON', 'THE', 'ONLY', 'MOTHER', 'HE', 'HAS', 'I', 'BELIEVE', 'SAID', 'MISTER', 'HALE', 'QUIETLY'] +1688-142285-0087-2035: hyp=['MISTER', 'THORNTON', 'THE', 'ONLY', 'MOTHER', 'HE', 'HAS', 'I', 'BELIEVE', 'SAID', 'MISTER', 'HALE', 'QUIETLY'] +1688-142285-0088-2036: ref=['I', 'SHALL', 'LIKE', 'TO', 'SEE', 'HER', 'SHE', 'MUST', 'BE', 'AN', 'UNCOMMON', 'PERSON', 'HER', 'MOTHER', 'ADDED'] +1688-142285-0088-2036: hyp=['I', 'SHALL', 'LIKE', 'TO', 'SEE', 'HER', 'SHE', 'MUST', 'BE', 'AN', 'UNCOMMON', 'PERSON', 'HER', 'MOTHER', 'ADDED'] +1688-142285-0089-2037: ref=['PERHAPS', 'SHE', 'MAY', 'HAVE', 'A', 'RELATION', 'WHO', 'MIGHT', 'SUIT', 'US', 'AND', 'BE', 'GLAD', 'OF', 'OUR', 'PLACE'] +1688-142285-0089-2037: hyp=['PERHAPS', 'SHE', 'MAY', 'HAVE', 'A', 'RELATION', 'WHO', 'MIGHT', 'SUIT', 'US', 'AND', 'BE', 'GLAD', 'OF', 'OUR', 'PLACE'] +1688-142285-0090-2038: ref=['SHE', 'SOUNDED', 'TO', 'BE', 'SUCH', 'A', 'CAREFUL', 'ECONOMICAL', 'PERSON', 'THAT', 'I', 'SHOULD', 'LIKE', 'ANY', 'ONE', 'OUT', 'OF', 'THE', 'SAME', 'FAMILY'] +1688-142285-0090-2038: hyp=['SHE', 'SOUNDED', 'TO', 'BE', 'SUCH', 'A', 'CAREFUL', 'ECONOMICAL', 'PERSON', 'THAT', 'I', 'SHOULD', 'LIKE', 'ANY', 'ONE', 'OUT', 'OF', 'THE', 'SAME', 'FAMILY'] +1688-142285-0091-2039: ref=['MY', 'DEAR', 'SAID', 'MISTER', 'HALE', 'ALARMED', 'PRAY', "DON'T", 'GO', 'OFF', 'ON', 'THAT', 'IDEA'] +1688-142285-0091-2039: hyp=['MY', 'DEAR', 'SAID', 'MISTER', 'HALE', 'ALARMED', 'PRAY', "DON'T", 'GO', 'OFF', 'ON', 'THAT', 'IDEA'] +1688-142285-0092-2040: ref=['I', 'AM', 'SURE', 'AT', 'ANY', 'RATE', 'SHE', 'WOULD', 'NOT', 'LIKE', 'STRANGERS', 'TO', 'KNOW', 'ANYTHING', 'ABOUT', 'IT'] +1688-142285-0092-2040: hyp=['I', 'AM', 'SURE', 'AT', 'ANY', 'RATE', 'SHE', 'WOULD', 'NOT', 'LIKE', 'STRANGERS', 'TO', 'KNOW', 'ANYTHING', 'ABOUT', 'IT'] +1688-142285-0093-2041: ref=['TAKE', 'NOTICE', 'THAT', 'IS', 'NOT', 'MY', 'KIND', 'OF', 'HAUGHTINESS', 'PAPA', 'IF', 'I', 'HAVE', 'ANY', 'AT', 'ALL', 'WHICH', 'I', "DON'T", 'AGREE', 'TO', 'THOUGH', "YOU'RE", 'ALWAYS', 'ACCUSING', 'ME', 'OF', 'IT'] +1688-142285-0093-2041: hyp=['TAKE', 'NOTICE', 'THAT', 'THIS', 'IS', 'NOT', 'MY', 'KIND', 'OF', 'HAUGHTINESS', 'PAPA', 'IF', 'I', 'HAVE', 'ANY', 'AT', 'ALL', 'WHICH', 'I', "DON'T", 'AGREE', 'TO', 'THOUGH', 'YOU', 'ALWAYS', 'ACCUSING', 'ME', 'OF', 'IT'] +1688-142285-0094-2042: ref=['I', "DON'T", 'KNOW', 'POSITIVELY', 'THAT', 'IT', 'IS', 'HERS', 'EITHER', 'BUT', 'FROM', 'LITTLE', 'THINGS', 'I', 'HAVE', 'GATHERED', 'FROM', 'HIM', 'I', 'FANCY', 'SO'] +1688-142285-0094-2042: hyp=['I', "DON'T", 'KNOW', 'POSITIVELY', 'THAT', 'IT', 'IS', 'HERS', 'EITHER', 'BUT', 'FROM', 'LITTLE', 'THINGS', 'I', 'HAVE', 'GATHERED', 'FROM', 'HIM', 'I', 'FANCY', 'SO'] +1688-142285-0095-2043: ref=['THEY', 'CARED', 'TOO', 'LITTLE', 'TO', 'ASK', 'IN', 'WHAT', 'MANNER', 'HER', 'SON', 'HAD', 'SPOKEN', 'ABOUT', 'HER'] +1688-142285-0095-2043: hyp=['THEY', 'CARED', 'TOO', 'LITTLE', 'TO', 'ASK', 'IN', 'WHAT', 'MANNER', 'HER', 'SON', 'HAD', 'SPOKEN', 'ABOUT', 'HER'] +1998-15444-0000-2204: ref=['IF', 'CALLED', 'TO', 'A', 'CASE', 'SUPPOSED', 'OR', 'SUSPECTED', 'TO', 'BE', 'ONE', 'OF', 'POISONING', 'THE', 'MEDICAL', 'MAN', 'HAS', 'TWO', 'DUTIES', 'TO', 'PERFORM', 'TO', 'SAVE', 'THE', "PATIENT'S", 'LIFE', 'AND', 'TO', 'PLACE', 'HIMSELF', 'IN', 'A', 'POSITION', 'TO', 'GIVE', 'EVIDENCE', 'IF', 'CALLED', 'ON', 'TO', 'DO', 'SO'] +1998-15444-0000-2204: hyp=['IF', 'CALLED', 'TO', 'A', 'CASE', 'SUPPOSED', 'AS', 'SUSPECTED', 'TO', 'BE', 'ONE', 'OF', 'POISONING', 'THE', 'MEDICAL', 'MAN', 'HAS', 'TWO', 'DUTIES', 'TO', 'PERFORM', 'TO', 'SAVE', 'THE', "PATIENT'S", 'LIFE', 'AND', 'TO', 'PLACE', 'HIMSELF', 'IN', 'A', 'POSITION', 'TO', 'GIVE', 'EVIDENCE', 'OF', 'CALLED', 'UNTO'] +1998-15444-0001-2205: ref=['HE', 'SHOULD', 'MAKE', 'INQUIRIES', 'AS', 'TO', 'SYMPTOMS', 'AND', 'TIME', 'AT', 'WHICH', 'FOOD', 'OR', 'MEDICINE', 'WAS', 'LAST', 'TAKEN'] +1998-15444-0001-2205: hyp=['HE', 'SHOULD', 'MAKE', 'INQUIRIES', 'AS', 'TO', 'SYMPTOMS', 'AND', 'TIME', 'AT', 'WHICH', 'FOOD', 'OR', 'MEDICINE', 'MUST', 'LAST', 'TAKEN'] +1998-15444-0002-2206: ref=['HE', 'SHOULD', 'NOTICE', 'THE', 'POSITION', 'AND', 'TEMPERATURE', 'OF', 'THE', 'BODY', 'THE', 'CONDITION', 'OF', 'RIGOR', 'MORTIS', 'MARKS', 'OF', 'VIOLENCE', 'APPEARANCE', 'OF', 'LIPS', 'AND', 'MOUTH'] +1998-15444-0002-2206: hyp=['HE', 'SHOULD', 'NOTICE', 'THE', 'POSITION', 'AND', 'TEMPERATURE', 'OF', 'THE', 'BODY', 'THE', 'CONDITION', 'OF', 'RIGOR', 'MORTUS', 'MARKS', 'OF', 'IDOLENCE', 'APPEARANCE', 'OF', 'LIPS', 'AND', 'MOUTH'] +1998-15444-0003-2207: ref=['IN', 'MAKING', 'A', 'POST', 'MORTEM', 'EXAMINATION', 'THE', 'ALIMENTARY', 'CANAL', 'SHOULD', 'BE', 'REMOVED', 'AND', 'PRESERVED', 'FOR', 'FURTHER', 'INVESTIGATION'] +1998-15444-0003-2207: hyp=['IN', 'MAKING', 'A', 'POST', 'MODE', 'OF', 'EXAMINATION', 'THE', 'ALIMENTARY', 'CANAL', 'SHOULD', 'BE', 'REMOVED', 'AND', 'PRESERVED', 'FOR', 'FURTHER', 'INVESTIGATION'] +1998-15444-0004-2208: ref=['THE', 'GUT', 'AND', 'THE', 'GULLET', 'BEING', 'CUT', 'ACROSS', 'BETWEEN', 'THESE', 'LIGATURES', 'THE', 'STOMACH', 'MAY', 'BE', 'REMOVED', 'ENTIRE', 'WITHOUT', 'SPILLING', 'ITS', 'CONTENTS'] +1998-15444-0004-2208: hyp=['THE', 'GUT', 'AND', 'THE', 'GALLANT', 'BEING', 'CUT', 'ACROSS', 'BETWEEN', 'THESE', 'LIGATURES', 'THE', 'STOMACH', 'MAY', 'BE', 'REMOVED', 'ENTIRE', 'WITHOUT', 'SPINNING', 'ITS', 'CONTENTS'] +1998-15444-0005-2209: ref=['IF', 'THE', 'MEDICAL', 'PRACTITIONER', 'IS', 'IN', 'DOUBT', 'ON', 'ANY', 'POINT', 'HE', 'SHOULD', 'OBTAIN', 'TECHNICAL', 'ASSISTANCE', 'FROM', 'SOMEONE', 'WHO', 'HAS', 'PAID', 'ATTENTION', 'TO', 'THE', 'SUBJECT'] +1998-15444-0005-2209: hyp=['IF', 'THE', 'MEDICA', 'PETITIONERS', 'ENDOWED', 'ON', 'ANY', 'POINT', 'HE', 'SHOULD', 'OBTAIN', 'TECHNICAL', 'ASSISTANCE', 'FROM', 'SOME', 'ONE', 'WHO', 'HAS', 'PAID', 'ATTENTION', 'TO', 'THE', 'SUBJECT'] +1998-15444-0006-2210: ref=['IN', 'A', 'CASE', 'OF', 'ATTEMPTED', 'SUICIDE', 'BY', 'POISONING', 'IS', 'IT', 'THE', 'DUTY', 'OF', 'THE', 'DOCTOR', 'TO', 'INFORM', 'THE', 'POLICE'] +1998-15444-0006-2210: hyp=['IN', 'A', 'CASE', 'OF', 'ATTEMPTED', 'SUICIDE', 'BY', 'POISONING', 'IS', 'IT', 'THE', 'DUTY', 'OF', 'THE', 'DOCTOR', 'TO', 'INFORM', 'THE', 'POLICE'] +1998-15444-0007-2211: ref=['THE', 'BEST', 'EMETIC', 'IS', 'THAT', 'WHICH', 'IS', 'AT', 'HAND'] +1998-15444-0007-2211: hyp=['THE', 'BEST', 'AMATIC', 'IS', 'THAT', 'WHICH', 'IS', 'AT', 'HAND'] +1998-15444-0008-2212: ref=['THE', 'DOSE', 'FOR', 'AN', 'ADULT', 'IS', 'TEN', 'MINIMS'] +1998-15444-0008-2212: hyp=['THE', 'DAYS', 'WERE', 'INDULGE', 'IS', 'TEN', 'MINIMS'] +1998-15444-0009-2213: ref=['APOMORPHINE', 'IS', 'NOT', 'ALLIED', 'IN', 'PHYSIOLOGICAL', 'ACTION', 'TO', 'MORPHINE', 'AND', 'MAY', 'BE', 'GIVEN', 'IN', 'CASES', 'OF', 'NARCOTIC', 'POISONING'] +1998-15444-0009-2213: hyp=['EPIMORPHIN', 'IS', 'NOT', 'ALID', 'AND', 'PHYSIOLOGICAL', 'ACTION', 'TO', 'MORPHINE', 'AND', 'MAY', 'BE', 'GIVEN', 'IN', 'CASES', 'OF', 'NAKOTIC', 'POISONING'] +1998-15444-0010-2214: ref=['TICKLING', 'THE', 'FAUCES', 'WITH', 'A', 'FEATHER', 'MAY', 'EXCITE', 'VOMITING'] +1998-15444-0010-2214: hyp=['TITLING', 'THE', 'FOSSES', 'WITH', 'THE', 'FEATHER', 'MAY', 'EXCITE', 'WAUM', 'ATTITTING'] +1998-15444-0011-2215: ref=['IN', 'USING', 'THE', 'ELASTIC', 'STOMACH', 'TUBE', 'SOME', 'FLUID', 'SHOULD', 'BE', 'INTRODUCED', 'INTO', 'THE', 'STOMACH', 'BEFORE', 'ATTEMPTING', 'TO', 'EMPTY', 'IT', 'OR', 'A', 'PORTION', 'OF', 'THE', 'MUCOUS', 'MEMBRANE', 'MAY', 'BE', 'SUCKED', 'INTO', 'THE', 'APERTURE'] +1998-15444-0011-2215: hyp=['IN', 'USING', 'THE', 'ELECTIC', 'STOMACH', 'TUBE', 'SOME', 'FLUID', 'SHOULD', 'BE', 'INTRODUCED', 'INTO', 'THE', 'STOMACH', 'BEFORE', 'ATTEMPTING', 'TO', 'EMPTY', 'IT', 'OR', 'A', 'PORTION', 'OF', 'THE', 'MUCOUS', 'MEMBRANE', 'MAY', 'BE', 'SACKED', 'INTO', 'THE', 'APERTURE'] +1998-15444-0012-2216: ref=['THE', 'TUBE', 'SHOULD', 'BE', 'EXAMINED', 'TO', 'SEE', 'THAT', 'IT', 'IS', 'NOT', 'BROKEN', 'OR', 'CRACKED', 'AS', 'ACCIDENTS', 'HAVE', 'HAPPENED', 'FROM', 'NEGLECTING', 'THIS', 'PRECAUTION'] +1998-15444-0012-2216: hyp=['THE', 'TUBE', 'SHOULD', 'BE', 'EXAMINED', 'TO', 'SEE', 'THAT', 'IT', 'IS', 'NOT', 'BROKEN', 'OR', 'CRACKED', 'AS', 'ACCIDENTS', 'HAVE', 'HAPPENED', 'FROM', 'NEGLECTING', 'THIS', 'PRECAUTION'] +1998-15444-0013-2217: ref=['ANTIDOTES', 'ARE', 'USUALLY', 'GIVEN', 'HYPODERMICALLY', 'OR', 'IF', 'BY', 'MOUTH', 'IN', 'THE', 'FORM', 'OF', 'TABLETS'] +1998-15444-0013-2217: hyp=['AND', 'HE', 'VOTES', 'ARE', 'USUALLY', 'GIVEN', 'HYPODERMICALLY', 'OR', 'IF', 'THE', 'MOUSE', 'IN', 'THE', 'FORM', 'OF', 'TABLETS'] +1998-15444-0014-2218: ref=['IN', 'THE', 'ABSENCE', 'OF', 'A', 'HYPODERMIC', 'SYRINGE', 'THE', 'REMEDY', 'MAY', 'BE', 'GIVEN', 'BY', 'THE', 'RECTUM'] +1998-15444-0014-2218: hyp=['IN', 'THE', 'ABSENCE', 'OF', 'THE', 'HYPODERMIC', 'SYRINGE', 'THE', 'REMEDY', 'MAY', 'BE', 'GIVEN', 'BY', 'THE', 'RECTIM'] +1998-15444-0015-2219: ref=['NOTICE', 'THE', 'SMELL', 'COLOUR', 'AND', 'GENERAL', 'APPEARANCE', 'OF', 'THE', 'MATTER', 'SUBMITTED', 'FOR', 'EXAMINATION'] +1998-15444-0015-2219: hyp=['NOTICE', 'THE', 'SMAR', 'COLOUR', 'AND', 'GENERAL', 'APPEARANCE', 'OF', 'THE', 'MATTER', 'SUBMITTED', 'FOR', 'EXAMINATION'] +1998-15444-0016-2220: ref=['FOR', 'THE', 'SEPARATION', 'OF', 'AN', 'ALKALOID', 'THE', 'FOLLOWING', 'IS', 'THE', 'PROCESS', 'OF', 'STAS', 'OTTO'] +1998-15444-0016-2220: hyp=['FOR', 'THE', 'SEPARATION', 'OF', 'AN', 'AKALOID', 'THE', 'FOLLOWING', 'IS', 'THE', 'PROCESS', 'OF', 'STARS', 'ARE', 'TOO'] +1998-15444-0017-2221: ref=['THIS', 'PROCESS', 'IS', 'BASED', 'UPON', 'THE', 'PRINCIPLE', 'THAT', 'THE', 'SALTS', 'OF', 'THE', 'ALKALOIDS', 'ARE', 'SOLUBLE', 'IN', 'ALCOHOL', 'AND', 'WATER', 'AND', 'INSOLUBLE', 'IN', 'ETHER'] +1998-15444-0017-2221: hyp=['THIS', 'PROCESS', 'IS', 'BASED', 'UPON', 'THE', 'PRINCIPLE', 'THAT', 'THE', 'SALTS', 'OF', 'THE', 'AKES', 'ARE', 'SOLUBLE', 'IN', 'AKELET', 'WATER', 'AND', 'INSOLUBLE', 'IN', 'ETHER'] +1998-15444-0018-2222: ref=['THE', 'PURE', 'ALKALOIDS', 'WITH', 'THE', 'EXCEPTION', 'OF', 'MORPHINE', 'IN', 'ITS', 'CRYSTALLINE', 'FORM', 'ARE', 'SOLUBLE', 'IN', 'ETHER'] +1998-15444-0018-2222: hyp=['THE', 'PURE', 'IKOLOITS', 'WAS', 'THE', 'EXCEPTION', 'OF', 'MORPHINE', 'IN', 'ITS', 'CHRISTOLINE', 'FORM', 'A', 'SOLUBLE', 'IN', 'EITHER'] +1998-15444-0019-2223: ref=['TWO', 'COOL', 'THE', 'MIXTURE', 'AND', 'FILTER', 'WASH', 'THE', 'RESIDUE', 'WITH', 'STRONG', 'ALCOHOL', 'AND', 'MIX', 'THE', 'FILTRATES'] +1998-15444-0019-2223: hyp=['TWO', 'CUR', 'THE', 'MIXED', 'AND', 'FILTER', 'WASH', 'THE', 'RESIDUE', 'WITH', 'STRONG', 'ALCOHOL', 'AND', 'MIX', 'THE', 'FUR', 'TRADES'] +1998-15444-0020-2224: ref=['THE', 'RESIDUE', 'MAY', 'BE', 'SET', 'ASIDE', 'FOR', 'THE', 'DETECTION', 'OF', 'THE', 'METALLIC', 'POISONS', 'IF', 'SUSPECTED', 'EXPEL', 'THE', 'ALCOHOL', 'BY', 'CAREFUL', 'EVAPORATION'] +1998-15444-0020-2224: hyp=['THE', 'RESIDUE', 'MAY', 'BE', 'SET', 'ASIDE', 'FOR', 'THE', 'DETECTION', 'OF', 'THE', 'METALLIC', 'POISONS', 'OF', 'SUSPECTED', 'EXPELLED', 'THE', 'ALCOHOLD', 'A', 'CAREFUL', 'EVAPORATION'] +1998-15444-0021-2225: ref=['ON', 'THE', 'EVAPORATION', 'OF', 'THE', 'ALCOHOL', 'THE', 'RESINOUS', 'AND', 'FATTY', 'MATTERS', 'SEPARATE'] +1998-15444-0021-2225: hyp=['ON', 'THE', 'EVAPORATION', 'OF', 'THE', 'ALCOHOL', 'THE', 'ZENOUS', 'AND', 'FATTY', 'MATTER', 'SEPARATE'] +1998-15444-0022-2226: ref=['EVAPORATE', 'THE', 'FILTRATE', 'TO', 'A', 'SYRUP', 'AND', 'EXTRACT', 'WITH', 'SUCCESSIVE', 'PORTIONS', 'OF', 'ABSOLUTE', 'ALCOHOL'] +1998-15444-0022-2226: hyp=['EVAPORATE', 'THE', 'FUDGE', 'TO', 'A', 'CYRUP', 'AND', 'EXTRACT', 'WITH', 'SUCCESSIVE', 'PORTIONS', 'OF', 'ABSOLUTE', 'ALCOHOL'] +1998-15444-0023-2227: ref=['SEPARATE', 'THE', 'ETHEREAL', 'SOLUTION', 'AND', 'EVAPORATE'] +1998-15444-0023-2227: hyp=['SEPARATE', 'THE', 'ETHEREAL', 'SOLUTION', 'AND', 'EVAPORATE'] +1998-15444-0024-2228: ref=['FIVE', 'A', 'PART', 'OF', 'THIS', 'ETHEREAL', 'SOLUTION', 'IS', 'POURED', 'INTO', 'A', 'WATCH', 'GLASS', 'AND', 'ALLOWED', 'TO', 'EVAPORATE'] +1998-15444-0024-2228: hyp=['FIVE', 'A', 'PART', 'OF', 'THIS', 'ASSYRIAL', 'SOLUTION', 'IS', 'PUT', 'INTO', 'A', 'WATCH', 'GLASS', 'AND', 'ALLOW', 'TO', 'EVAPORATE'] +1998-15444-0025-2229: ref=['TO', 'PURIFY', 'IT', 'ADD', 'A', 'SMALL', 'QUANTITY', 'OF', 'DILUTE', 'SULPHURIC', 'ACID', 'AND', 'AFTER', 'EVAPORATING', 'TO', 'THREE', 'QUARTERS', 'OF', 'ITS', 'BULK', 'ADD', 'A', 'SATURATED', 'SOLUTION', 'OF', 'CARBONATE', 'OF', 'POTASH', 'OR', 'SODA'] +1998-15444-0025-2229: hyp=['TO', 'PURIFY', 'IT', 'ADDISMA', 'QUANTITY', 'OF', 'DELUDE', 'SUFFERG', 'ACID', 'AND', 'AFTER', 'EVAPORATING', 'TO', 'THREE', 'QUARTERS', 'OF', 'ITS', 'BARK', 'ADD', 'A', 'SATURATED', 'SOLUTION', 'OF', 'CARBONATE', 'OF', 'POTASH', 'OR', 'SODA'] +1998-15444-0026-2230: ref=['BOIL', 'THE', 'FINELY', 'DIVIDED', 'SUBSTANCE', 'WITH', 'ABOUT', 'ONE', 'EIGHTH', 'ITS', 'BULK', 'OF', 'PURE', 'HYDROCHLORIC', 'ACID', 'ADD', 'FROM', 'TIME', 'TO', 'TIME', 'POTASSIC', 'CHLORATE', 'UNTIL', 'THE', 'SOLIDS', 'ARE', 'REDUCED', 'TO', 'A', 'STRAW', 'YELLOW', 'FLUID'] +1998-15444-0026-2230: hyp=['BY', 'THE', 'FINAL', 'DIVIDE', 'SUBSTANCE', 'WITH', 'ABOUT', 'ONE', 'EIGHTHS', 'ITS', 'BAG', 'OF', 'PURE', 'HYDROCHLORIC', 'ACID', 'ADD', 'FROM', 'TIME', 'TO', 'TIME', 'POTASSIC', 'LOW', 'RAGE', 'UNTIL', 'THE', 'SOLIDS', 'HAVE', 'IT', 'USED', 'TO', 'A', 'STRAW', 'YELLOW', 'FLUID'] +1998-15444-0027-2231: ref=['THE', 'RESIDUE', 'OF', 'THE', 'MATERIAL', 'AFTER', 'DIGESTION', 'WITH', 'HYDROCHLORIC', 'ACID', 'AND', 'POTASSIUM', 'CHLORATE', 'MAY', 'HAVE', 'TO', 'BE', 'EXAMINED', 'FOR', 'SILVER', 'LEAD', 'AND', 'BARIUM'] +1998-15444-0027-2231: hyp=['THE', 'RESIDUE', 'OF', 'THE', 'MATERIAL', 'AFTER', 'DIGESTION', 'WAS', 'HYDROCHLORIC', 'ACID', 'AND', 'POTASSIAN', 'CHLORIDE', 'MAY', 'HAVE', 'TO', 'BE', 'EXAMINED', 'FOR', 'SILVER', 'LEAD', 'AND', 'BURIUM'] +1998-29454-0000-2157: ref=['A', 'THOUSAND', 'BLESSINGS', 'FROM', 'A', 'GRATEFUL', 'HEART'] +1998-29454-0000-2157: hyp=['A', 'THOUSAND', 'BLESSINGS', 'FROM', 'A', 'GRATEFUL', 'HEART'] +1998-29454-0001-2158: ref=['PERUSAL', 'SAID', 'THE', 'PAWNBROKER', "THAT'S", 'THE', 'WAY', 'TO', 'PERNOUNCE', 'IT'] +1998-29454-0001-2158: hyp=['PERUSAL', 'SET', 'UPON', 'BROKER', "THAT'S", 'THE', 'WAY', 'TO', 'PRONOUNCE', 'IT'] +1998-29454-0002-2159: ref=['HIS', 'BOOKS', 'TOLD', 'HIM', 'THAT', 'TREASURE', 'IS', 'BEST', 'HIDDEN', 'UNDER', 'LOOSE', 'BOARDS', 'UNLESS', 'OF', 'COURSE', 'YOUR', 'HOUSE', 'HAS', 'A', 'SECRET', 'PANEL', 'WHICH', 'HIS', 'HAD', 'NOT'] +1998-29454-0002-2159: hyp=['HIS', 'BOOKS', 'TOLD', 'HIM', 'THE', 'TREASURES', 'BEST', 'HIDDEN', 'UNDER', 'LOOSE', 'BOARDS', 'AND', 'AS', 'OF', 'COURSE', 'YOUR', 'HOUSE', 'HAD', 'A', 'SECRET', 'PENNEL', 'WHICH', 'HIS', 'HAD', 'NOT'] +1998-29454-0003-2160: ref=['HE', 'GOT', 'IT', 'UP', 'AND', 'PUSHED', 'HIS', 'TREASURES', 'AS', 'FAR', 'IN', 'AS', 'HE', 'COULD', 'ALONG', 'THE', 'ROUGH', 'CRUMBLY', 'SURFACE', 'OF', 'THE', 'LATH', 'AND', 'PLASTER'] +1998-29454-0003-2160: hyp=['HE', 'GOT', 'IT', 'UP', 'AND', 'PUSHED', 'HIS', 'TREASURES', 'AS', 'FAR', 'IN', 'AS', 'HE', 'COULD', 'ALONG', 'THE', 'ROUGH', 'CRAMBLY', 'SURFACE', 'OF', 'THE', 'LASS', 'AND', 'PLASTER'] +1998-29454-0004-2161: ref=['WHEN', 'DICKIE', 'CAME', 'DOWN', 'HIS', 'AUNT', 'SLIGHTLY', 'SLAPPED', 'HIM', 'AND', 'HE', 'TOOK', 'THE', 'HALFPENNY', 'AND', 'LIMPED', 'OFF', 'OBEDIENTLY'] +1998-29454-0004-2161: hyp=['WHEN', 'DICKIE', 'CAME', 'DOWN', 'HIS', 'AUNT', 'SAT', 'HE', 'SLAPPED', 'HIM', 'AND', 'HE', 'TOOK', 'THE', 'HALFPENNY', 'AND', 'LIMP', 'OF', 'OBEDIENTLY'] +1998-29454-0005-2162: ref=['HE', 'HAD', 'NEVER', 'SEEN', 'ONE', 'BEFORE', 'AND', 'IT', 'INTERESTED', 'HIM', 'EXTREMELY'] +1998-29454-0005-2162: hyp=['HE', 'HAD', 'NEVER', 'SEEN', 'ONE', 'BEFORE', 'AND', 'IT', 'INTERESTED', 'HIM', 'EXTREMELY'] +1998-29454-0006-2163: ref=['HE', 'LOOKED', 'ABOUT', 'HIM', 'AND', 'KNEW', 'THAT', 'HE', 'DID', 'NOT', 'AT', 'ALL', 'KNOW', 'WHERE', 'HE', 'WAS'] +1998-29454-0006-2163: hyp=['HE', 'LOOKED', 'ABOUT', 'HIM', 'AND', 'KNEW', 'THAT', 'HE', 'DID', 'NOT', 'AT', 'ALL', 'KNOW', 'WHERE', 'HE', 'WAS'] +1998-29454-0007-2164: ref=["WHAT'S", 'UP', 'MATEY', 'LOST', 'YOUR', 'WAY', 'DICKIE', 'EXPLAINED'] +1998-29454-0007-2164: hyp=["WHAT'S", 'UP', 'MATE', 'ASKED', 'YOUR', 'WAY', 'DICKIE', 'EXPLAINED'] +1998-29454-0008-2165: ref=['WHEN', 'HE', 'SAID', 'AVE', 'I', 'BIN', 'ASLEEP'] +1998-29454-0008-2165: hyp=['WHEN', 'HE', 'SAID', 'HAVE', 'I', 'BEEN', 'ASLEEP'] +1998-29454-0009-2166: ref=['HERE', 'WE', 'ARE', 'SAID', 'THE', 'MAN'] +1998-29454-0009-2166: hyp=['HERE', 'WE', 'ARE', 'SAID', 'THE', 'MAN'] +1998-29454-0010-2167: ref=['NOT', 'EXACKLY', 'SAID', 'THE', 'MAN', 'BUT', "IT'S", 'ALL', 'RIGHT'] +1998-29454-0010-2167: hyp=['NOT', 'EXACTLY', 'SAID', 'THE', 'MAN', 'BUT', "IT'S", 'ALL', 'RIGHT'] +1998-29454-0011-2168: ref=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'MAN', 'ASKED', 'DICKIE', 'IF', 'HE', 'COULD', 'WALK', 'A', 'LITTLE', 'WAY', 'AND', 'WHEN', 'DICKIE', 'SAID', 'HE', 'COULD', 'THEY', 'SET', 'OUT', 'IN', 'THE', 'MOST', 'FRIENDLY', 'WAY', 'SIDE', 'BY', 'SIDE'] +1998-29454-0011-2168: hyp=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'MEN', 'ASKED', 'DICKIE', 'IF', 'HE', 'COULD', 'WALK', 'A', 'LITTLE', 'WAY', 'AND', 'WHEN', 'DICKY', 'SAID', 'HE', 'COULD', 'THEY', 'SET', 'OUT', 'IN', 'THE', 'MOST', 'FRIENDLY', 'WAY', 'SIDE', 'BY', 'SIDE'] +1998-29454-0012-2169: ref=['AND', 'THE', 'TEA', 'AND', 'ALL', 'AN', 'THE', 'EGG'] +1998-29454-0012-2169: hyp=['AND', 'THE', 'TIENO', 'AND', 'THE', 'EGG'] +1998-29454-0013-2170: ref=['AND', 'THIS', 'IS', 'THE', 'PRETTIEST', 'PLACE', 'EVER', 'I', 'SEE'] +1998-29454-0013-2170: hyp=['AND', 'THIS', 'IS', 'THE', 'PRETTIEST', 'PLACE', 'EVER', 'I', 'SEE'] +1998-29454-0014-2171: ref=['I', 'SHALL', 'CATCH', 'IT', 'A', 'FAIR', 'TREAT', 'AS', 'IT', 'IS'] +1998-29454-0014-2171: hyp=['I', 'SHALL', 'CATCH', 'IT', 'OF', 'HER', 'TREAT', 'AS', 'IT', 'IS'] +1998-29454-0015-2172: ref=['SHE', 'WAS', 'WAITIN', 'FOR', 'THE', 'WOOD', 'TO', 'BOIL', 'THE', 'KETTLE', 'WHEN', 'I', 'COME', 'OUT', 'MOTHER'] +1998-29454-0015-2172: hyp=['SHE', 'WAS', 'WAITING', 'FOR', 'THE', 'WOOD', 'TO', 'BOY', 'THE', 'CATTLE', 'WHEN', 'TO', 'COME', 'OUT', 'MOTHER'] +1998-29454-0016-2173: ref=["AIN'T", 'BAD', 'WHEN', "SHE'S", 'IN', 'A', 'GOOD', 'TEMPER'] +1998-29454-0016-2173: hyp=['AND', 'BAD', 'WHEN', "SHE'S", 'IN', 'A', 'GOOD', 'TEMPER'] +1998-29454-0017-2174: ref=['THAT', "AIN'T", 'WHAT', "SHE'LL", 'BE', 'IN', 'WHEN', 'YOU', 'GETS', 'BACK'] +1998-29454-0017-2174: hyp=['THAT', 'ANNE', 'BUT', "YE'LL", 'BE', 'IN', 'WHEN', 'YOU', 'GETS', 'BACK'] +1998-29454-0018-2175: ref=['I', 'GOT', 'TO', 'STICK', 'IT', 'SAID', 'DICKIE', 'SADLY', "I'D", 'BEST', 'BE', 'GETTING', 'HOME'] +1998-29454-0018-2175: hyp=['I', 'GOT', 'A', 'STICKET', 'SAID', 'DICKY', 'SADLY', "I'D", 'BEST', 'BE', 'GETTING', 'HOME'] +1998-29454-0019-2176: ref=['I', "WOULDN'T", 'GO', 'OME', 'NOT', 'IF', 'I', 'WAS', 'YOU', 'SAID', 'THE', 'MAN'] +1998-29454-0019-2176: hyp=['I', "WOULDN'T", 'GO', 'HOME', 'NOT', 'IF', 'EVER', 'WAS', 'YOU', 'SAID', 'THE', 'MAN'] +1998-29454-0020-2177: ref=['NO', 'SAID', 'DICKIE', 'OH', 'NO', 'NO', 'I', 'NEVER'] +1998-29454-0020-2177: hyp=['NO', 'SAID', 'DICKIE', 'OH', 'NO', 'NO', 'I', 'NEVER'] +1998-29454-0021-2178: ref=['I', "AIN'T", 'IT', 'YER', 'HAVE', 'I', 'LIKE', 'WHAT', 'YER', 'AUNT', 'DO'] +1998-29454-0021-2178: hyp=['AND', 'ADIER', 'HAVE', 'I', 'LIKE', 'WHAT', 'YOU', "AREN'T", 'TO'] +1998-29454-0022-2179: ref=['WELL', "THAT'LL", 'SHOW', 'YOU', 'THE', 'SORT', 'OF', 'MAN', 'I', 'AM'] +1998-29454-0022-2179: hyp=['WELL', 'THOU', 'SHOW', 'YOU', 'THE', 'SORT', 'OF', 'MEN', 'I', 'AM'] +1998-29454-0023-2180: ref=['THE', "MAN'S", 'MANNER', 'WAS', 'SO', 'KIND', 'AND', 'HEARTY', 'THE', 'WHOLE', 'ADVENTURE', 'WAS', 'SO', 'WONDERFUL', 'AND', 'NEW', 'IS', 'IT', 'COUNTRY', 'WHERE', 'YOU', 'GOING'] +1998-29454-0023-2180: hyp=['THE', "MAN'S", 'MANNER', 'WAS', 'SO', 'KIND', 'AND', 'HEARTY', 'THE', 'WHOLE', 'ADVENTURE', 'WAS', 'SO', 'WONDERFUL', 'AND', 'NEW', 'IS', 'IT', 'COUNTRY', 'WHERE', 'YOU', 'GOING'] +1998-29454-0024-2181: ref=['THE', 'SUN', 'SHOT', 'LONG', 'GOLDEN', 'BEAMS', 'THROUGH', 'THE', 'GAPS', 'IN', 'THE', 'HEDGE'] +1998-29454-0024-2181: hyp=['THE', 'SUN', 'HAD', 'LONG', 'GOLDEN', 'BEAMS', 'THROUGH', 'THE', 'GAPS', 'AND', 'THE', 'HEDGE'] +1998-29454-0025-2182: ref=['A', 'BIRD', 'PAUSED', 'IN', 'ITS', 'FLIGHT', 'ON', 'A', 'BRANCH', 'QUITE', 'CLOSE', 'AND', 'CLUNG', 'THERE', 'SWAYING'] +1998-29454-0025-2182: hyp=['A', 'BIRD', 'PASSED', 'IN', 'ITS', 'FLIGHT', 'ON', 'A', 'BRANCH', 'QUITE', 'CLOSE', 'AND', 'CLUNG', 'THERE', 'SWAYING'] +1998-29454-0026-2183: ref=['HE', 'TOOK', 'OUT', 'OF', 'HIS', 'POCKET', 'A', 'NEW', 'ENVELOPE', 'A', 'NEW', 'SHEET', 'OF', 'PAPER', 'AND', 'A', 'NEW', 'PENCIL', 'READY', 'SHARPENED', 'BY', 'MACHINERY'] +1998-29454-0026-2183: hyp=['HE', 'TOOK', 'OUT', 'OF', 'HIS', 'POCKET', 'A', 'NEW', 'ENVELOPE', 'AND', 'NEW', 'SHEET', 'OF', 'PAPER', 'AND', 'A', 'NEW', 'PENCIL', 'READY', 'SHARPENED', 'BY', 'MACHINERY'] +1998-29454-0027-2184: ref=['AN', 'I', 'ASKS', 'YOU', 'LET', 'ME', 'COME', 'ALONGER', 'YOU', 'GOT', 'THAT'] +1998-29454-0027-2184: hyp=['AND', 'I', 'ASK', 'YOU', 'LET', 'ME', 'COME', 'ALONG', 'ARE', 'YOU', 'GOT', 'THAT'] +1998-29454-0028-2185: ref=['GET', 'IT', 'WROTE', 'DOWN', 'THEN', 'DONE'] +1998-29454-0028-2185: hyp=['GERT', 'RODE', 'DOWN', 'THEN', 'DONE'] +1998-29454-0029-2186: ref=['THEN', 'HE', 'FOLDED', 'IT', 'AND', 'PUT', 'IT', 'IN', 'HIS', 'POCKET'] +1998-29454-0029-2186: hyp=['THEN', 'HE', 'FOLDED', 'IT', 'AND', 'PUT', 'IT', 'IN', 'HIS', 'POCKET'] +1998-29454-0030-2187: ref=['NOW', "WE'RE", 'SQUARE', 'HE', 'SAID'] +1998-29454-0030-2187: hyp=['NOW', 'WE', 'ARE', 'SQUARE', 'HE', 'SAID'] +1998-29454-0031-2188: ref=['THEY', 'COULD', 'PUT', 'A', 'MAN', 'AWAY', 'FOR', 'LESS', 'THAN', 'THAT'] +1998-29454-0031-2188: hyp=['THEY', 'COULD', 'PUT', 'A', 'MEN', 'AWAY', 'FOR', 'US', 'THAN', 'THAT'] +1998-29454-0032-2189: ref=['I', 'SEE', 'THAT', 'THERE', 'IN', 'A', 'BOOK', 'SAID', 'DICKIE', 'CHARMED'] +1998-29454-0032-2189: hyp=['I', 'SEE', 'THAT', 'THEN', 'A', 'BOOK', 'SAID', 'DICK', 'HAD', 'SHUMMED'] +1998-29454-0033-2190: ref=['HE', 'REWARD', 'THE', 'WAKE', 'THE', 'LAST', 'OF', 'THE', 'ENGLISH', 'AND', 'I', 'WUNNERED', 'WHAT', 'IT', 'STOOD', 'FOR'] +1998-29454-0033-2190: hyp=['HE', 'REWARD', 'THE', 'WAKE', 'THE', 'LAST', 'OF', 'THE', 'ENGLISH', 'AND', 'A', 'ONE', 'AT', 'WHAT', 'IT', 'STOOD', 'FOR'] +1998-29454-0034-2191: ref=['WILD', 'ONES', "AIN'T", 'ALF', 'THE', 'SIZE', 'I', 'LAY'] +1998-29454-0034-2191: hyp=['WHITE', 'ONES', 'AND', 'A', 'HALF', 'SIZE', 'I', 'LAY'] +1998-29454-0035-2192: ref=['ADVENTURES', 'I', 'SHOULD', 'THINK', 'SO'] +1998-29454-0035-2192: hyp=['ADVENTURES', 'I', 'SHOULD', 'THINK', 'SO'] +1998-29454-0036-2193: ref=['AH', 'SAID', 'DICKIE', 'AND', 'A', 'FULL', 'SILENCE', 'FELL', 'BETWEEN', 'THEM'] +1998-29454-0036-2193: hyp=['AH', 'SAID', 'DICKY', 'AND', 'A', 'FOOT', 'SILENCE', 'FELL', 'BETWEEN', 'THEM'] +1998-29454-0037-2194: ref=['THAT', 'WAS', 'CHARMING', 'BUT', 'IT', 'WAS', 'PLEASANT', 'TOO', 'TO', 'WASH', 'THE', 'MUD', 'OFF', 'ON', 'THE', 'WET', 'GRASS'] +1998-29454-0037-2194: hyp=['THAT', 'WAS', 'CHARMING', 'BUT', 'IT', 'WAS', 'PLEASANT', 'TOO', 'TO', 'WASH', 'THE', 'MAD', 'OF', 'ON', 'THE', 'WET', 'GRASS'] +1998-29454-0038-2195: ref=['DICKIE', 'ALWAYS', 'REMEMBERED', 'THAT', 'MOMENT'] +1998-29454-0038-2195: hyp=['DICKY', 'ALWAYS', 'REMEMBERED', 'THAT', 'MOMENT'] +1998-29454-0039-2196: ref=['SO', 'YOU', 'SHALL', 'SAID', 'MISTER', 'BEALE', 'A', "REG'LER", 'WASH', 'ALL', 'OVER', 'THIS', 'VERY', 'NIGHT', 'I', 'ALWAYS', 'LIKE', 'A', 'WASH', 'MESELF'] +1998-29454-0039-2196: hyp=['SO', 'YOU', 'SHALL', 'SAID', 'MISTER', 'BEALE', 'A', 'REGULAR', 'WASH', 'ALL', 'OVER', 'THIS', 'VERY', 'NIGHT', 'I', 'ALWAYS', 'LIKE', 'A', 'WASH', 'MESELF'] +1998-29454-0040-2197: ref=['SOME', 'BLOKES', 'THINK', 'IT', 'PAYS', 'TO', 'BE', 'DIRTY', 'BUT', 'IT', "DON'T"] +1998-29454-0040-2197: hyp=['SOME', 'LOATHS', 'THINK', 'IT', 'PAYS', 'TO', 'BE', 'DIRTY', 'BUT', 'IT', "DON'T"] +1998-29454-0041-2198: ref=['IF', "YOU'RE", 'CLEAN', 'THEY', 'SAY', 'HONEST', 'POVERTY', 'AN', 'IF', "YOU'RE", 'DIRTY', 'THEY', 'SAY', 'SERVE', 'YOU', 'RIGHT'] +1998-29454-0041-2198: hyp=['IF', 'YO', 'CLEAN', 'THEY', 'SAY', 'I', 'DISPOVERTY', 'AN', 'IF', 'YO', 'DIRTY', 'THEY', 'SAY', 'SERVE', 'YOU', 'RIGHT'] +1998-29454-0042-2199: ref=['YOU', 'ARE', 'GOOD', 'SAID', 'DICKIE', 'I', 'DO', 'LIKE', 'YOU'] +1998-29454-0042-2199: hyp=['YOU', 'ARE', 'GOOD', 'SAID', 'DICKIE', 'I', 'DO', 'LIKE', 'YOU'] +1998-29454-0043-2200: ref=['I', 'KNOW', 'YOU', 'WILL', 'SAID', 'DICKIE', 'WITH', 'ENTHUSIASM', 'I', 'KNOW', 'OW', 'GOOD', 'YOU', 'ARE'] +1998-29454-0043-2200: hyp=['I', 'KNOW', 'YOU', 'WILL', 'SAID', 'DICKIE', 'WITH', 'ENTHUSIASM', 'I', 'KNOW', 'HOW', 'GOOD', 'YOU', 'ARE'] +1998-29454-0044-2201: ref=['BLESS', 'ME', 'SAID', 'MISTER', 'BEALE', 'UNCOMFORTABLY', 'WELL', 'THERE'] +1998-29454-0044-2201: hyp=['BLESS', 'ME', 'SAID', 'MISTER', 'BEALE', 'UNCOMFORTABLY', 'WELL', 'THERE'] +1998-29454-0045-2202: ref=['STEP', 'OUT', 'SONNY', 'OR', "WE'LL", 'NEVER', 'GET', 'THERE', 'THIS', 'SIDE', 'CHRISTMAS'] +1998-29454-0045-2202: hyp=['SPATANI', 'ALBEA', 'NEVER', 'GET', 'THERE', 'THIS', 'SORT', 'OF', 'CHRISTMAS'] +1998-29454-0046-2203: ref=['WELL', "YOU'LL", 'KNOW', 'ALL', 'ABOUT', 'IT', 'PRESENTLY'] +1998-29454-0046-2203: hyp=['WELL', 'YOU', 'KNOW', 'ALL', 'ABOUT', 'IT', 'PRESENTLY'] +1998-29455-0000-2232: ref=['THE', 'SINGING', 'AND', 'LAUGHING', 'WENT', 'ON', 'LONG', 'AFTER', 'HE', 'HAD', 'FALLEN', 'ASLEEP', 'AND', 'IF', 'LATER', 'IN', 'THE', 'EVENING', 'THERE', 'WERE', 'LOUD', 'VOICED', 'ARGUMENTS', 'OR', 'QUARRELS', 'EVEN', 'DICKIE', 'DID', 'NOT', 'HEAR', 'THEM'] +1998-29455-0000-2232: hyp=['THE', 'SINGING', 'AND', 'LAUGHING', 'WENT', 'ON', 'LONG', 'AFTER', 'HE', 'HAD', 'FALLEN', 'ASLEEP', 'AND', 'IF', 'LATER', 'IN', 'THE', 'EVENING', 'THEY', 'WERE', 'ALL', 'OUTWARDS', 'ARGUMENTS', 'OR', 'A', 'QUARRELS', 'EVEN', 'DICKIE', 'DID', 'NOT', 'HEAR', 'THEM'] +1998-29455-0001-2233: ref=["WHAT'S", 'ALL', 'THAT', 'THERE', 'DICKIE', 'ASKED', 'POINTING', 'TO', 'THE', 'ODD', 'KNOBBLY', 'BUNDLES', 'OF', 'ALL', 'SORTS', 'AND', 'SHAPES', 'TIED', 'ON', 'TO', 'THE', "PERAMBULATOR'S", 'FRONT'] +1998-29455-0001-2233: hyp=["WHAT'S", 'ON', 'THAT', 'THERE', 'DICKY', 'ASKED', 'POINTING', 'TO', 'THE', 'ODD', 'NOBLY', 'BUNDLES', 'OF', 'ALL', 'SORTS', 'AND', 'SHAPES', 'TIED', 'ON', 'TO', 'THE', 'PRIME', "RELATOR'S", 'FRONT'] +1998-29455-0002-2234: ref=['TELL', 'YER', 'WHAT', 'MATE', 'LOOKS', 'TO', 'ME', 'AS', 'IF', "I'D", 'TOOK', 'A', 'FANCY', 'TO', 'YOU'] +1998-29455-0002-2234: hyp=['TELL', 'YOU', 'WHAT', 'MADE', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'I', 'TOOK', 'A', 'FANCY', 'TO', 'YOU'] +1998-29455-0003-2235: ref=['SWELP', 'ME', 'HE', 'SAID', 'HELPLESSLY'] +1998-29455-0003-2235: hyp=['SWAB', 'ME', 'HE', 'SAID', 'HELPLESSLY'] +1998-29455-0004-2236: ref=['OH', 'LOOK', 'SAID', 'DICKIE', 'THE', 'FLOWERS'] +1998-29455-0004-2236: hyp=['O', 'LOOK', 'SAID', 'DICKY', 'THE', 'FLOWERS'] +1998-29455-0005-2237: ref=["THEY'RE", 'ONLY', 'WEEDS', 'SAID', 'BEALE'] +1998-29455-0005-2237: hyp=['THERE', 'ONLY', 'READS', 'SAID', 'BEER'] +1998-29455-0006-2238: ref=['BUT', 'I', 'SHALL', 'HAVE', 'THEM', 'WHILE', "THEY'RE", 'ALIVE', 'SAID', 'DICKIE', 'AS', 'HE', 'HAD', 'SAID', 'TO', 'THE', 'PAWNBROKER', 'ABOUT', 'THE', 'MOONFLOWERS'] +1998-29455-0006-2238: hyp=['BUT', 'I', 'SHOULD', 'HAVE', 'THEM', 'WHETHER', 'ALIVE', 'SAID', 'DICKY', 'AS', 'HE', 'HAD', 'SAID', 'TO', 'THE', 'PAWNBROKER', 'BUT', 'THE', 'MOONFLOWERS'] +1998-29455-0007-2239: ref=['HI', 'THERE', 'GOES', 'A', 'RABBIT'] +1998-29455-0007-2239: hyp=['AY', 'THERE', 'WAS', 'A', 'RABBIT'] +1998-29455-0008-2240: ref=['SEE', 'IM', 'CROST', 'THE', 'ROAD', 'THERE', 'SEE', 'HIM'] +1998-29455-0008-2240: hyp=['SEEM', 'QUEST', 'ROAD', 'THERE', 'SEEM'] +1998-29455-0009-2241: ref=['HOW', 'BEAUTIFUL', 'SAID', 'DICKIE', 'WRIGGLING', 'WITH', 'DELIGHT'] +1998-29455-0009-2241: hyp=['HOW', 'BEAUTIFUL', 'SAID', 'DICKY', 'WRIGGLING', 'WITH', 'DELIGHT'] +1998-29455-0010-2242: ref=['THIS', 'LIFE', 'OF', 'THE', 'RABBIT', 'AS', 'DESCRIBED', 'BY', 'MISTER', 'BEALE', 'WAS', 'THE', "CHILD'S", 'FIRST', 'GLIMPSE', 'OF', 'FREEDOM', "I'D", 'LIKE', 'TO', 'BE', 'A', 'RABBIT'] +1998-29455-0010-2242: hyp=['THIS', 'LIFE', 'OF', 'THE', 'RABBIT', 'AS', 'DESCRIBED', 'BY', 'MISTER', 'BEALE', 'WAS', 'THE', "CHILD'S", 'FIRST', 'GLIMPSE', 'OF', 'FREEDOM', "I'D", 'LIKE', 'TO', 'BE', 'A', 'RABBIT'] +1998-29455-0011-2243: ref=["OW'M", 'I', 'TO', 'WHEEL', 'THE', 'BLOOMIN', 'PRAM', 'IF', 'YOU', 'GOES', 'ON', 'LIKE', 'AS', 'IF', 'YOU', 'WAS', 'A', 'BAG', 'OF', 'EELS'] +1998-29455-0011-2243: hyp=['AM', 'I', 'TO', 'BE', 'AT', 'THE', 'ROOM', 'IN', 'PEM', 'IF', 'YOUR', 'GONE', 'LIKE', 'US', 'IF', 'YOU', 'WAS', 'A', 'PEG', 'OF', 'EELS'] +1998-29455-0012-2244: ref=['I', 'LIKE', 'YOU', 'NEXTER', 'MY', 'OWN', 'DADDY', 'AND', 'MISTER', 'BAXTER', 'NEXT', 'DOOR'] +1998-29455-0012-2244: hyp=['I', 'LIKE', 'YOU', 'NEXT', 'TO', 'MY', 'OWN', 'DADDY', 'AND', 'MISTER', 'BAXT', 'THE', 'NEXT', 'DOOR'] +1998-29455-0013-2245: ref=["THAT'S", 'ALL', 'RIGHT', 'SAID', 'MISTER', 'BEALE', 'AWKWARDLY'] +1998-29455-0013-2245: hyp=["THAT'S", 'ALL', 'RIGHT', 'SAID', 'MISTER', 'BEALE', 'AWKWARDLY'] +1998-29455-0014-2246: ref=['DICKIE', 'QUICK', 'TO', 'IMITATE', 'TOUCHED', 'HIS'] +1998-29455-0014-2246: hyp=['DICKY', 'QUICKLY', 'IMITATE', 'TOUCHED', 'HIS'] +1998-29455-0015-2247: ref=['POOR', 'LITTLE', 'MAN', 'SAID', 'THE', 'LADY', 'YOU', 'MISS', 'YOUR', 'MOTHER', "DON'T", 'YOU'] +1998-29455-0015-2247: hyp=['POOR', 'LITTLE', 'MAN', 'SAID', 'THE', 'LADY', 'YOU', 'MISS', 'YOUR', 'MOTHER', "DON'T", 'YOU'] +1998-29455-0016-2248: ref=['OH', 'WELL', 'DONE', 'LITTLE', 'UN', 'SAID', 'MISTER', 'BEALE', 'TO', 'HIMSELF'] +1998-29455-0016-2248: hyp=['OH', 'WELL', 'DONE', 'LITTLE', 'ONE', 'SAID', 'MISTER', 'BEE', 'TO', 'HIMSELF'] +1998-29455-0017-2249: ref=['THE', 'TWO', 'TRAVELLERS', 'WERE', 'LEFT', 'FACING', 'EACH', 'OTHER', 'THE', 'RICHER', 'BY', 'A', 'PENNY', 'AND', 'OH', 'WONDERFUL', 'GOOD', 'FORTUNE', 'A', 'WHOLE', 'HALF', 'CROWN'] +1998-29455-0017-2249: hyp=['THE', 'TWO', 'TRAVELLERS', 'WERE', 'LEFT', 'FACING', 'EACH', 'OTHER', 'THE', 'RICHER', 'BY', 'A', 'PENNY', 'AND', 'O', 'WONDERFUL', 'GOOD', 'FORTUNE', 'A', 'WHOLE', 'HALF', 'CROWN'] +1998-29455-0018-2250: ref=['NO', 'I', 'NEVER', 'SAID', 'DICKIE', "ERE'S", 'THE', 'STEEVER'] +1998-29455-0018-2250: hyp=['NO', 'I', 'NEVER', 'SAID', 'DICKIE', 'YES', 'THE', 'STEVER'] +1998-29455-0019-2251: ref=['YOU', 'STICK', 'TO', 'THAT', 'SAID', 'BEALE', 'RADIANT', 'WITH', 'DELIGHT', "YOU'RE", 'A', 'FAIR', 'MASTERPIECE', 'YOU', 'ARE', 'YOU', 'EARNED', 'IT', 'HONEST', 'IF', 'EVER', 'A', 'KID', 'DONE'] +1998-29455-0019-2251: hyp=['YOU', 'STICK', 'TO', 'THAT', 'SAID', 'BEARD', 'RADIANT', 'WAS', 'DELIGHT', "YOU'RE", 'A', 'FAIR', 'MASTERPIECE', 'YOU', 'ARE', 'YOU', 'EARNED', 'IT', 'HONEST', 'IF', 'EVER', 'KEPT', 'DONE'] +1998-29455-0020-2252: ref=['THEY', 'WENT', 'ON', 'UP', 'THE', 'HILL', 'AS', 'HAPPY', 'AS', 'ANY', 'ONE', 'NEED', 'WISH', 'TO', 'BE'] +1998-29455-0020-2252: hyp=['THEY', 'WENT', 'ON', 'UP', 'THE', 'HILL', 'AS', 'HAPPY', 'AS', 'ANY', 'ONE', 'NEED', 'WISH', 'TO', 'BE'] +1998-29455-0021-2253: ref=['PLEASE', 'DO', 'NOT', 'BE', 'TOO', 'SHOCKED'] +1998-29455-0021-2253: hyp=['PLEASE', "DON'T", 'A', 'BETOUT'] +1998-29455-0022-2254: ref=['REMEMBER', 'THAT', 'NEITHER', 'OF', 'THEM', 'KNEW', 'ANY', 'BETTER'] +1998-29455-0022-2254: hyp=['REMEMBER', 'THAT', 'NEITHER', 'OF', 'THEM', 'KNEW', 'ANY', 'BETTER'] +1998-29455-0023-2255: ref=['TO', 'THE', 'ELDER', 'TRAMP', 'LIES', 'AND', 'BEGGING', 'WERE', 'NATURAL', 'MEANS', 'OF', 'LIVELIHOOD'] +1998-29455-0023-2255: hyp=['TO', 'THE', 'OTHER', 'TRAMP', 'LIES', 'IN', 'PEGGING', 'WHERE', 'NATURAL', 'MEANS', 'OF', 'LIVELIHOOD'] +1998-29455-0024-2256: ref=['BUT', 'YOU', 'SAID', 'THE', 'BED', 'WITH', 'THE', 'GREEN', 'CURTAINS', 'URGED', 'DICKIE'] +1998-29455-0024-2256: hyp=['BUT', 'YOU', 'SAID', 'THE', 'BED', 'WAS', 'THE', 'GREEN', 'CURTAINS', 'ADDED', 'THE', 'KEI'] +1998-29455-0025-2257: ref=['WHICH', 'THIS', "AIN'T", 'NOT', 'BY', 'NO', 'MEANS'] +1998-29455-0025-2257: hyp=['WHICH', 'THIS', 'END', 'NOT', 'BY', 'NO', 'MEANS'] +1998-29455-0026-2258: ref=['THE', 'NIGHT', 'IS', 'FULL', 'OF', 'INTERESTING', 'LITTLE', 'SOUNDS', 'THAT', 'WILL', 'NOT', 'AT', 'FIRST', 'LET', 'YOU', 'SLEEP', 'THE', 'RUSTLE', 'OF', 'LITTLE', 'WILD', 'THINGS', 'IN', 'THE', 'HEDGES', 'THE', 'BARKING', 'OF', 'DOGS', 'IN', 'DISTANT', 'FARMS', 'THE', 'CHIRP', 'OF', 'CRICKETS', 'AND', 'THE', 'CROAKING', 'OF', 'FROGS'] +1998-29455-0026-2258: hyp=['THE', 'NIGHT', 'IS', 'FULL', 'OF', 'INTERESTING', 'LITTLE', 'SOUNDS', 'THAT', 'WILL', 'NOT', 'AT', 'FIRST', 'LET', 'YOU', 'SLEEP', 'THE', 'RUSTLE', 'OF', 'LITTLE', 'WHITE', 'THINGS', 'ON', 'THE', 'HATCHES', 'THE', 'BARKING', 'OF', 'DOGS', 'AND', 'DISTANT', 'FARMS', 'THE', 'CHIRP', 'OF', 'CRICKETS', 'AND', 'THE', 'CROAKING', 'OF', 'FROGS'] +1998-29455-0027-2259: ref=['THE', 'NEW', 'GAME', 'OF', 'BEGGING', 'AND', 'INVENTING', 'STORIES', 'TO', 'INTEREST', 'THE', 'PEOPLE', 'FROM', 'WHOM', 'IT', 'WAS', 'WORTH', 'WHILE', 'TO', 'BEG', 'WENT', 'ON', 'GAILY', 'DAY', 'BY', 'DAY', 'AND', 'WEEK', 'BY', 'WEEK', 'AND', 'DICKIE', 'BY', 'CONSTANT', 'PRACTICE', 'GREW', 'SO', 'CLEVER', 'AT', 'TAKING', 'HIS', 'PART', 'IN', 'THE', 'ACTING', 'THAT', 'MISTER', 'BEALE', 'WAS', 'QUITE', 'DAZED', 'WITH', 'ADMIRATION'] +1998-29455-0027-2259: hyp=['THEN', 'YOU', 'GAME', 'OF', 'BEGGING', 'AND', 'INVENTING', 'STORIES', 'TO', 'INTEREST', 'THE', 'PEOPLE', 'FROM', 'WHOM', 'IT', 'WAS', 'WORSE', 'WHILE', 'TO', 'BEG', 'WENT', 'ON', 'GAILY', 'DAY', 'BY', 'DAY', 'AND', 'WEEK', 'BY', 'WEEK', 'AND', 'DICKIE', 'BY', 'CONSTANT', 'PRACTICE', 'GREW', 'SO', 'CLEVER', 'AT', 'TAKING', 'HIS', 'PART', 'IN', 'THE', 'ACTING', 'THAT', 'MISTER', 'BEER', 'WAS', 'QUITE', 'DAZED', 'WITH', 'ADMIRATION'] +1998-29455-0028-2260: ref=['BLESSED', 'IF', 'I', 'EVER', 'SEE', 'SUCH', 'A', 'NIPPER', 'HE', 'SAID', 'OVER', 'AND', 'OVER', 'AGAIN'] +1998-29455-0028-2260: hyp=['BLEST', 'IF', 'I', 'EVER', 'SEE', 'SUCH', 'A', 'NIPPER', 'HE', 'SAID', 'OVER', 'AND', 'OVER', 'AGAIN'] +1998-29455-0029-2261: ref=['CLEVER', 'AS', 'A', 'TRAINDAWG', 'E', 'IS', 'AN', 'ALL', 'OUTER', 'IS', 'OWN', 'EAD'] +1998-29455-0029-2261: hyp=['CLEVER', 'AS', 'A', 'TRAIN', 'DOG', 'IS', 'IN', 'ALL', 'OUT', 'OF', 'HIS', 'OWN', 'HEAD'] +1998-29455-0030-2262: ref=['I', "AIN'T", 'SURE', 'AS', 'I', "ADN'T", 'BETTER', 'STICK', 'TO', 'THE', 'ROAD', 'AND', 'KEEP', 'AWAY', 'FROM', 'OLD', 'ANDS', 'LIKE', 'YOU', 'JIM'] +1998-29455-0030-2262: hyp=['I', 'AM', 'SURE', 'AS', 'I', "HADN'T", 'BETTER', 'STICK', 'TO', 'THE', 'ROAD', 'AND', 'KEEP', 'AWAY', 'FROM', 'OLD', 'ENDS', 'LIKE', 'EUGEN'] +1998-29455-0031-2263: ref=['I', 'OPE', "E'S", 'CLEVER', 'ENOUGH', 'TO', 'DO', 'WOT', "E'S", 'TOLD', 'KEEP', 'IS', 'MUG', 'SHUT', "THAT'S", 'ALL'] +1998-29455-0031-2263: hyp=['IOPIUS', 'LOVE', 'ENOUGH', 'TO', 'DO', 'WHAT', 'HE', 'STOWED', 'HE', 'WAS', 'MUCH', 'AT', "THAT'S", 'ALL'] +1998-29455-0032-2264: ref=['IF', "E'S", 'STRAIGHT', "E'LL", 'DO', 'FOR', 'ME', 'AND', 'IF', 'HE', "AIN'T", "I'LL", 'DO', 'FOR', 'IM', 'SEE'] +1998-29455-0032-2264: hyp=['IF', 'HE', 'STRAYED', 'YOU', 'DO', 'FOR', 'ME', 'AND', 'IF', 'HE', "AIN'T", "I'LL", 'DO', 'FOR', 'HIM', 'SEE'] +1998-29455-0033-2265: ref=['SEE', 'THAT', 'BLOKE', 'JUST', 'NOW', 'SAID', 'MISTER', 'BEALE', 'YUSS', 'SAID', 'DICKIE'] +1998-29455-0033-2265: hyp=['SEE', 'THAT', 'LOCTICE', 'NOW', 'SAID', 'MISTER', 'BEALE', 'YES', 'SAID', 'DICKIE'] +1998-29455-0034-2266: ref=['WELL', 'YOU', 'NEVER', 'SEE', 'IM'] +1998-29455-0034-2266: hyp=['WELL', 'YOU', 'NEVER', 'SEE', 'HIM'] +1998-29455-0035-2267: ref=['IF', 'ANY', 'ONE', 'ARSTS', 'YOU', 'IF', 'YOU', 'EVER', 'SEE', 'IM', 'YOU', 'NEVER', 'SET', 'EYES', 'ON', 'IM', 'IN', 'ALL', 'YOUR', 'BORN', 'NOT', 'TO', 'REMEMBER', 'IM'] +1998-29455-0035-2267: hyp=['IF', 'ANY', 'ONE', 'ASKED', 'YOU', 'IF', 'YOU', 'EVER', 'SEE', 'HIM', 'YOU', 'NEVER', 'SET', 'EYES', 'ON', 'HIM', 'IN', 'ALL', 'YOUR', 'BORN', 'NOT', 'TO', 'REMEMBER', 'HIM'] +1998-29455-0036-2268: ref=['DICKIE', 'WAS', 'FULL', 'OF', 'QUESTIONS', 'BUT', 'MISTER', 'BEALE', 'HAD', 'NO', 'ANSWERS', 'FOR', 'THEM'] +1998-29455-0036-2268: hyp=['DICKIE', 'WAS', 'FULL', 'OF', 'QUESTIONS', 'BUT', 'MISTER', 'BEE', 'HAD', 'NO', 'ANSWERS', 'FOR', 'THEM'] +1998-29455-0037-2269: ref=['NOR', 'WAS', 'IT', 'SUNDAY', 'ON', 'WHICH', 'THEY', 'TOOK', 'A', 'REST', 'AND', 'WASHED', 'THEIR', 'SHIRTS', 'ACCORDING', 'TO', 'MISTER', "BEALE'S", 'RULE', 'OF', 'LIFE'] +1998-29455-0037-2269: hyp=['NOR', 'WAS', 'IT', 'SUNDAY', 'ON', 'WHICH', 'THEY', 'TOOK', 'A', 'REST', 'AND', 'WASHED', 'THEIR', 'SHIRTS', 'ACCORDING', 'TO', 'MISTER', "BEAT'S", 'RULE', 'OF', 'LIFE'] +1998-29455-0038-2270: ref=['THEY', 'DID', 'NOT', 'STAY', 'THERE', 'BUT', 'WALKED', 'OUT', 'ACROSS', 'THE', 'DOWNS', 'WHERE', 'THE', 'SKYLARKS', 'WERE', 'SINGING', 'AND', 'ON', 'A', 'DIP', 'OF', 'THE', 'DOWNS', 'CAME', 'UPON', 'GREAT', 'STONE', 'WALLS', 'AND', 'TOWERS', 'VERY', 'STRONG', 'AND', 'GRAY'] +1998-29455-0038-2270: hyp=['THEY', 'DID', 'NOT', 'STAY', 'THERE', 'BUT', 'WALKED', 'OUT', 'ACROSS', 'THE', 'DOWNS', 'WITH', 'THE', 'SKYLIGHTS', 'WERE', 'SINGING', 'AND', 'ON', 'A', 'DIP', 'OF', 'THE', 'DOWNS', 'CAME', 'UPON', 'GREAT', 'STONE', 'WALLS', 'AND', 'TOWERS', 'WHERE', 'STRONG', 'AND', 'GRAY'] +1998-29455-0039-2271: ref=["WHAT'S", 'THAT', 'THERE', 'SAID', 'DICKIE'] +1998-29455-0039-2271: hyp=["WHAT'S", 'THAT', 'THERE', 'SAID', 'DICKY'] +2033-164914-0000-661: ref=['REPLIED', 'HE', 'OF', 'A', 'TRUTH', 'I', 'HEARD', 'HIM', 'NOT', 'AND', 'I', 'WOT', 'HIM', 'NOT', 'AND', 'FOLKS', 'ARE', 'ALL', 'SLEEPING'] +2033-164914-0000-661: hyp=['REPLIED', 'HE', 'OF', 'A', 'TRUTH', 'I', 'HEARD', 'HIM', 'NOT', 'AND', 'I', 'WOT', 'HIM', 'NOT', 'AND', 'FOLKS', 'ARE', 'ALL', 'SLEEPING'] +2033-164914-0001-662: ref=['BUT', 'SHE', 'SAID', 'WHOMSOEVER', 'THOU', 'SEEST', 'AWAKE', 'HE', 'IS', 'THE', 'RECITER'] +2033-164914-0001-662: hyp=['BUT', 'SHE', 'SAID', 'WHOMSOEVER', 'THOU', 'SEEST', 'AWAKE', 'HE', 'IS', 'THE', 'RECITER'] +2033-164914-0002-663: ref=['THEN', 'SAID', 'THE', 'EUNUCH', 'ART', 'THOU', 'HE', 'WHO', 'REPEATED', 'POETRY', 'BUT', 'NOW', 'AND', 'MY', 'LADY', 'HEARD', 'HIM'] +2033-164914-0002-663: hyp=['THEN', 'SAID', 'THE', 'EUNUCH', 'ART', 'THOU', 'HE', 'WHO', 'REPEATED', 'POETRY', 'BUT', 'NOW', 'AND', 'MY', 'LADY', 'HEARD', 'HIM'] +2033-164914-0003-664: ref=['REJOINED', 'THE', 'EUNUCH', 'WHO', 'THEN', 'WAS', 'THE', 'RECITER', 'POINT', 'HIM', 'OUT', 'TO', 'ME'] +2033-164914-0003-664: hyp=['REJOINED', 'THE', 'EUNUCH', 'WHO', 'THEN', 'WAS', 'THE', 'RECITER', 'POINT', 'HIM', 'OUT', 'TO', 'ME'] +2033-164914-0004-665: ref=['BY', 'ALLAH', 'REPLIED', 'THE', 'FIREMAN', 'I', 'TELL', 'THEE', 'THE', 'TRUTH'] +2033-164914-0004-665: hyp=['BY', 'ALLAH', 'REPLIED', 'THE', 'FIREMAN', 'I', 'TELL', 'THEE', 'THE', 'TRUTH'] +2033-164914-0005-666: ref=['TELL', 'ME', 'WHAT', 'HAPPENED', 'QUOTH', 'ZAU', 'AL', 'MAKAN'] +2033-164914-0005-666: hyp=['TELL', 'ME', 'WHAT', 'HAPPENED', 'QUOTH', 'SAO', 'ALMAN'] +2033-164914-0006-667: ref=['WHAT', 'AILS', 'THEE', 'THEN', 'THAT', 'THOU', 'MUST', 'NEEDS', 'RECITE', 'VERSES', 'SEEING', 'THAT', 'WE', 'ARE', 'TIRED', 'OUT', 'WITH', 'WALKING', 'AND', 'WATCHING', 'AND', 'ALL', 'THE', 'FOLK', 'ARE', 'ASLEEP', 'FOR', 'THEY', 'REQUIRE', 'SLEEP', 'TO', 'REST', 'THEM', 'OF', 'THEIR', 'FATIGUE'] +2033-164914-0006-667: hyp=['WHAT', 'AILS', 'THEE', 'THEN', 'THAT', 'THOU', 'MUST', 'NEEDS', 'RECITE', 'VERSES', 'SEEING', 'THAT', 'WE', 'ARE', 'TIRED', 'OUT', 'WITH', 'WALKING', 'AND', 'WATCHING', 'AND', 'ALL', 'THE', 'FOLK', 'ARE', 'ASLEEP', 'FOR', 'THEY', 'REQUIRE', 'SLEEP', 'TO', 'REST', 'THEM', 'OF', 'THEIR', 'FATIGUE'] +2033-164914-0007-668: ref=['AND', 'HE', 'ALSO', 'IMPROVISED', 'THE', 'TWO', 'FOLLOWING', 'DISTICHS'] +2033-164914-0007-668: hyp=['AND', 'HE', 'ALSO', 'PROVISED', 'THE', 'TWO', 'FOLLOWING', 'DISTINCTS'] +2033-164914-0008-669: ref=['WHEN', 'NUZHAT', 'AL', 'ZAMAN', 'HEARD', 'THE', 'FIRST', 'IMPROVISATION', 'SHE', 'CALLED', 'TO', 'MIND', 'HER', 'FATHER', 'AND', 'HER', 'MOTHER', 'AND', 'HER', 'BROTHER', 'AND', 'THEIR', 'WHILOME', 'HOME', 'THEN', 'SHE', 'WEPT', 'AND', 'CRIED', 'AT', 'THE', 'EUNUCH', 'AND', 'SAID', 'TO', 'HIM', 'WOE', 'TO', 'THEE'] +2033-164914-0008-669: hyp=['WHEN', "NUZHA'S", 'AL', 'ZAMAN', 'HEARD', 'THE', 'FIRST', 'IMPROVISATION', 'SHE', 'CALLED', 'TO', 'MINE', 'HER', 'FATHER', 'AND', 'HER', 'MOTHER', 'AND', 'HER', 'BROTHER', 'AND', 'THEIR', 'WILM', 'HOME', 'THEN', 'SHE', 'WEPT', 'AND', 'CRIED', 'TO', 'THE', 'EUNUCH', 'AND', 'SAID', 'TO', 'HIM', 'WOE', 'TO', 'THEE'] +2033-164914-0009-670: ref=['HE', 'WHO', 'RECITED', 'THE', 'FIRST', 'TIME', 'HATH', 'RECITED', 'A', 'SECOND', 'TIME', 'AND', 'I', 'HEARD', 'HIM', 'HARD', 'BY'] +2033-164914-0009-670: hyp=['HE', 'WHO', 'RESIDED', 'THE', 'FIRST', 'TIME', 'HAD', 'RECITED', 'A', 'SECOND', 'TIME', 'AND', 'HEARD', 'HIM', 'HEART', 'BY'] +2033-164914-0010-671: ref=['BY', 'ALLAH', 'AN', 'THOU', 'FETCH', 'HIM', 'NOT', 'TO', 'ME', 'I', 'WILL', 'ASSUREDLY', 'ROUSE', 'THE', 'CHAMBERLAIN', 'ON', 'THEE', 'AND', 'HE', 'SHALL', 'BEAT', 'THEE', 'AND', 'CAST', 'THEE', 'OUT'] +2033-164914-0010-671: hyp=['BY', 'ALLAH', 'AN', 'THOU', 'FETCH', 'HIM', 'NOT', 'TO', 'ME', 'I', 'WILL', 'ASSUREDLY', 'ROUSE', 'THE', 'CHAMBERLAIN', 'ON', 'THEE', 'AND', 'HE', 'SHALL', 'BEAT', 'THEE', 'AND', 'CAST', 'THEE', 'OUT'] +2033-164914-0011-672: ref=['BUT', 'TAKE', 'THESE', 'HUNDRED', 'DINERS', 'AND', 'GIVE', 'THEM', 'TO', 'THE', 'SINGER', 'AND', 'BRING', 'HIM', 'TO', 'ME', 'GENTLY', 'AND', 'DO', 'HIM', 'NO', 'HURT'] +2033-164914-0011-672: hyp=['BUT', 'TAKE', 'THESE', 'HUNDRED', 'DINERS', 'AND', 'GIVE', 'THEM', 'TO', 'THE', 'SINGER', 'AND', 'BRING', 'HIM', 'TO', 'ME', 'GENTLY', 'AND', 'DO', 'HIM', 'NO', 'HURT'] +2033-164914-0012-673: ref=['RETURN', 'QUICKLY', 'AND', 'LINGER', 'NOT'] +2033-164914-0012-673: hyp=['RETURN', 'QUICKLY', 'AND', 'LINGER', 'NOT'] +2033-164914-0013-674: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'THIRD', 'NIGHT'] +2033-164914-0013-674: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'THIRD', 'NIGHT'] +2033-164914-0014-675: ref=['BUT', 'THE', 'EUNUCH', 'SAID', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'TILL', 'THOU', 'SHOW', 'ME', 'WHO', 'IT', 'WAS', 'THAT', 'RECITED', 'THE', 'VERSES', 'FOR', 'I', 'DREAD', 'RETURNING', 'TO', 'MY', 'LADY', 'WITHOUT', 'HIM'] +2033-164914-0014-675: hyp=['BUT', 'THE', 'EUNUCH', 'SAID', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'TILL', 'THOU', 'SHOW', 'ME', 'WHO', 'IT', 'WAS', 'THAT', 'RECITED', 'THE', 'VERSES', 'FOR', 'I', 'DREAD', 'RETURNING', 'TO', 'MY', 'LADY', 'WITHOUT', 'HIM'] +2033-164914-0015-676: ref=['NOW', 'WHEN', 'THE', 'FIREMAN', 'HEARD', 'THESE', 'WORDS', 'HE', 'FEARED', 'FOR', 'ZAU', 'AL', 'MAKAN', 'AND', 'WEPT', 'WITH', 'EXCEEDING', 'WEEPING', 'AND', 'SAID', 'TO', 'THE', 'EUNUCH', 'BY', 'ALLAH', 'IT', 'WAS', 'NOT', 'I', 'AND', 'I', 'KNOW', 'HIM', 'NOT'] +2033-164914-0015-676: hyp=['NOW', 'WHEN', 'THE', 'FIREMAN', 'HEARD', 'THESE', 'WORDS', 'HE', 'FEARED', 'FOR', 'ZA', 'AL', 'MAKAN', 'AND', 'WEPT', 'WITH', 'EXCEEDING', 'WEEPING', 'AND', 'SAID', 'TO', 'THE', 'EUNUCH', 'BY', 'ALLAH', 'IT', 'WAS', 'NOT', 'I', 'AND', 'THEY', 'KNOW', 'HIM', 'NOT'] +2033-164914-0016-677: ref=['SO', 'GO', 'THOU', 'TO', 'THY', 'STATION', 'AND', 'IF', 'THOU', 'AGAIN', 'MEET', 'ANY', 'ONE', 'AFTER', 'THIS', 'HOUR', 'RECITING', 'AUGHT', 'OF', 'POETRY', 'WHETHER', 'HE', 'BE', 'NEAR', 'OR', 'FAR', 'IT', 'WILL', 'BE', 'I', 'OR', 'SOME', 'ONE', 'I', 'KNOW', 'AND', 'THOU', 'SHALT', 'NOT', 'LEARN', 'OF', 'HIM', 'BUT', 'BY', 'ME'] +2033-164914-0016-677: hyp=['SO', 'GO', 'THOU', 'TO', 'THY', 'STATION', 'AND', 'IF', 'THOU', 'AGAIN', 'ANY', 'ONE', 'AFTER', 'THIS', 'HOUR', 'RECITING', 'AUGHT', 'OF', 'POETRY', 'WHETHER', 'HE', 'BE', 'NEAR', 'OR', 'FAR', 'IT', 'WILL', 'BE', 'I', 'OR', 'SOME', 'ONE', 'I', 'KNOW', 'AND', 'THOU', 'SHALT', 'NOT', 'LEARN', 'OF', 'HIM', 'BUT', 'BY', 'ME'] +2033-164914-0017-678: ref=['THEN', 'HE', 'KISSED', 'THE', "EUNUCH'S", 'HEAD', 'AND', 'SPAKE', 'HIM', 'FAIR', 'TILL', 'HE', 'WENT', 'AWAY', 'BUT', 'THE', 'CASTRATO', 'FETCHED', 'A', 'ROUND', 'AND', 'RETURNING', 'SECRETLY', 'CAME', 'AND', 'STOOD', 'BEHIND', 'THE', 'FIREMAN', 'FEARING', 'TO', 'GO', 'BACK', 'TO', 'HIS', 'MISTRESS', 'WITHOUT', 'TIDINGS'] +2033-164914-0017-678: hyp=['THEN', 'HE', 'KISSED', 'THE', "EUNUCH'S", 'HEAD', 'AND', 'SPAKE', 'HIM', 'FAIR', 'TILL', 'HE', 'WENT', 'AWAY', 'BUT', 'THE', 'CASTRATO', 'FETCHED', 'AROUND', 'AND', 'RETURNING', 'SECRETLY', 'CAME', 'AND', 'STOOD', 'BEHIND', 'THE', 'FIREMAN', 'FEARING', 'TO', 'GO', 'BACK', 'TO', 'HIS', 'MISTRESS', 'WITHOUT', 'HIDINGS'] +2033-164914-0018-679: ref=['I', 'SAY', 'WHAT', 'MADE', 'MY', 'IGNOMY', "WHATE'ER", 'THE', 'BITTER', 'CUP', 'I', 'DRAIN', 'FAR', 'BE', 'FRO', 'ME', 'THAT', 'LAND', 'TO', 'FLEE', 'NOR', 'WILL', 'I', 'BOW', 'TO', 'THOSE', 'WHO', 'BLAME', 'AND', 'FOR', 'SUCH', 'LOVE', 'WOULD', 'DEAL', 'ME', 'SHAME'] +2033-164914-0018-679: hyp=['I', 'SAY', 'WHAT', 'MADE', 'MY', 'EGOY', 'WHATEVER', 'THE', 'BITTER', 'CUPIED', 'DRAIN', 'FAR', 'BE', 'FROM', 'ME', 'THE', 'LAND', 'TO', 'FLEE', 'NOR', 'WILL', 'I', 'BOW', 'TO', 'THOSE', 'WHO', 'BLAME', 'AND', 'FOR', 'SUCH', 'LOVE', 'WOULD', 'DEAL', 'ME', 'SHAME'] +2033-164914-0019-680: ref=['THEN', 'SAID', 'THE', 'EUNUCH', 'TO', 'ZAU', 'AL', 'MAKAN', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'LORD'] +2033-164914-0019-680: hyp=['THEN', 'SAID', 'THE', 'EUNUCH', 'TO', 'ZA', 'AL', 'MAKAN', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'LORD'] +2033-164914-0020-681: ref=['O', 'MY', 'LORD', 'CONTINUED', 'THE', 'EUNUCH', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164914-0020-681: hyp=['O', 'MY', 'LORD', 'CONTINUED', 'THE', 'EUNUCH', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THAT', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164914-0021-682: ref=['WE', 'WILL', 'DO', 'THEE', 'NO', 'UPRIGHT', 'O', 'MY', 'SON', 'NOR', 'WRONG', 'THEE', 'IN', 'AUGHT', 'BUT', 'OUR', 'OBJECT', 'IS', 'THAT', 'THOU', 'BEND', 'THY', 'GRACIOUS', 'STEPS', 'WITH', 'ME', 'TO', 'MY', 'MISTRESS', 'TO', 'RECEIVE', 'HER', 'ANSWER', 'AND', 'RETURN', 'IN', 'WEAL', 'AND', 'SAFETY', 'AND', 'THOU', 'SHALT', 'HAVE', 'A', 'HANDSOME', 'PRESENT', 'AS', 'ONE', 'WHO', 'BRINGETH', 'GOOD', 'NEWS'] +2033-164914-0021-682: hyp=['WE', 'WILL', 'DO', 'THEE', 'NO', 'UPRIGHT', 'O', 'MY', 'SON', 'NOR', 'WRONG', 'THEE', 'IN', 'AUGHT', 'BUT', 'OUR', 'OBJECT', 'IS', 'THAT', 'THOU', 'BEND', 'THY', 'GRECIOUS', 'STEPS', 'WITH', 'ME', 'TO', 'MY', 'MISTRESS', 'TO', 'RECEIVE', 'HER', 'ANSWER', 'AND', 'RETURNING', 'WHEEL', 'AND', 'SAFETY', 'AND', 'THOU', 'SHALT', 'HAVE', 'A', 'HANDSOME', 'PRESENT', 'AS', 'ONE', 'WHO', 'BRINGETH', 'GOOD', 'NEWS'] +2033-164914-0022-683: ref=['THEN', 'THE', 'EUNUCH', 'WENT', 'OUT', 'TO', 'ZAU', 'AL', 'MAKAN', 'AND', 'SAID', 'TO', 'HIM', 'RECITE', 'WHAT', 'VERSES', 'THOU', 'KNOWEST', 'FOR', 'MY', 'LADY', 'IS', 'HERE', 'HARD', 'BY', 'LISTENING', 'TO', 'THEE', 'AND', 'AFTER', 'I', 'WILL', 'ASK', 'THEE', 'OF', 'THY', 'NAME', 'AND', 'THY', 'NATIVE', 'COUNTRY', 'AND', 'THY', 'CONDITION'] +2033-164914-0022-683: hyp=['THEN', 'THE', 'EUNUCH', 'WENT', 'OUT', 'TO', 'THOU', 'MAKAN', 'AND', 'SAID', 'TO', 'HIM', 'RECITE', 'WHAT', 'VERSEST', 'THOU', 'KNOWEST', 'FOR', 'MY', 'LADIES', 'HERE', 'HARD', 'BY', 'LISTENING', 'TO', 'THEE', 'AND', 'AFTER', 'I', 'WILL', 'ASK', 'THEE', 'OF', 'THY', 'NAME', 'AND', 'THINE', 'NATIVE', 'COUNTRY', 'AND', 'THY', 'CONDITION'] +2033-164915-0000-643: ref=['AND', 'ALSO', 'THESE'] +2033-164915-0000-643: hyp=['AND', 'ALSO', 'THESE'] +2033-164915-0001-644: ref=['THEN', 'SHE', 'THREW', 'HERSELF', 'UPON', 'HIM', 'AND', 'HE', 'GATHERED', 'HER', 'TO', 'HIS', 'BOSOM', 'AND', 'THE', 'TWAIN', 'FELL', 'DOWN', 'IN', 'A', 'FAINTING', 'FIT'] +2033-164915-0001-644: hyp=['THEN', 'SHE', 'THREW', 'HERSELF', 'UPON', 'HIM', 'AND', 'HE', 'GATHERED', 'HER', 'TO', 'HIS', 'BOSOM', 'AND', 'THE', 'TWAIN', 'FELL', 'DOWN', 'IN', 'A', 'FAINTING', 'FIT'] +2033-164915-0002-645: ref=['WHEN', 'THE', 'EUNUCH', 'SAW', 'THIS', 'CASE', 'HE', 'WONDERED', 'AT', 'THEM', 'AND', 'THROWING', 'OVER', 'THEM', 'SOMEWHAT', 'TO', 'COVER', 'THEM', 'WAITED', 'TILL', 'THEY', 'SHOULD', 'RECOVER'] +2033-164915-0002-645: hyp=['WHEN', 'THE', 'EUNUCH', 'SAW', 'THESE', 'CAVES', 'HE', 'WONDERED', 'AT', 'THEM', 'AND', 'THROWING', 'OVER', 'THEM', 'SOMEWHAT', 'TO', 'COVER', 'THEM', 'WAITED', 'TILL', 'THEY', 'SHOULD', 'RECOVER'] +2033-164915-0003-646: ref=['AFTER', 'A', 'WHILE', 'THEY', 'CAME', 'TO', 'THEMSELVES', 'AND', 'NUZHAT', 'AL', 'ZAMAN', 'REJOICED', 'WITH', 'EXCEEDING', 'JOY', 'OPPRESSION', 'AND', 'DEPRESSION', 'LEFT', 'HER', 'AND', 'GLADNESS', 'TOOK', 'THE', 'MASTERY', 'OF', 'HER', 'AND', 'SHE', 'REPEATED', 'THESE', 'VERSES'] +2033-164915-0003-646: hyp=['AFTER', 'A', 'WHILE', 'THEY', 'CAME', 'TO', 'THEMSELVES', 'AND', 'UZHAT', 'AL', 'ZAMAN', 'REJOICED', 'WITH', 'EXCEEDING', 'JOY', 'OPPRESSION', 'AND', 'DEPRESSION', 'LEFT', 'HER', 'AND', 'GLADNESS', 'TOOK', 'THE', 'MASTERY', 'OF', 'HER', 'AND', 'SHE', 'REPEATED', 'THESE', 'VERSES'] +2033-164915-0004-647: ref=['ACCORDINGLY', 'SHE', 'TOLD', 'HIM', 'ALL', 'THAT', 'HAD', 'COME', 'TO', 'HER', 'SINCE', 'THEIR', 'SEPARATION', 'AT', 'THE', 'KHAN', 'AND', 'WHAT', 'HAD', 'HAPPENED', 'TO', 'HER', 'WITH', 'THE', 'BADAWI', 'HOW', 'THE', 'MERCHANT', 'HAD', 'BOUGHT', 'HER', 'OF', 'HIM', 'AND', 'HAD', 'TAKEN', 'HER', 'TO', 'HER', 'BROTHER', 'SHARRKAN', 'AND', 'HAD', 'SOLD', 'HER', 'TO', 'HIM', 'HOW', 'HE', 'HAD', 'FREED', 'HER', 'AT', 'THE', 'TIME', 'OF', 'BUYING', 'HOW', 'HE', 'HAD', 'MADE', 'A', 'MARRIAGE', 'CONTRACT', 'WITH', 'HER', 'AND', 'HAD', 'GONE', 'IN', 'TO', 'HER', 'AND', 'HOW', 'THE', 'KING', 'THEIR', 'SIRE', 'HAD', 'SENT', 'AND', 'ASKED', 'FOR', 'HER', 'FROM', 'SHARRKAN'] +2033-164915-0004-647: hyp=['ACCORDINGLY', 'SHE', 'TOLD', 'HIM', 'ALL', 'THAT', 'HAD', 'COME', 'TO', 'HER', 'SINCE', 'THEIR', 'SEPARATION', 'AT', 'THE', 'KHAN', 'AND', 'WHAT', 'HAD', 'HAPPENED', 'TO', 'HER', 'WITH', 'THE', 'BADAH', 'HOW', 'THE', 'MERCHANT', 'HAD', 'BOUGHT', 'HER', 'OF', 'HIM', 'AND', 'HAD', 'TAKEN', 'HER', 'TO', 'HER', 'BROTHER', 'SHARKAN', 'AND', 'HAD', 'SOLD', 'HER', 'TO', 'HIM', 'HOW', 'HE', 'HAD', 'FREED', 'HER', 'AT', 'THE', 'TIME', 'OF', 'BUYING', 'HOW', 'HE', 'HAD', 'MADE', 'HER', 'MARRIAGE', 'CONTRACT', 'WITH', 'HER', 'AND', 'HAD', 'GONE', 'IN', 'TO', 'HER', 'AND', 'HOW', 'THE', 'KING', 'THEIR', 'SIRE', 'HAD', 'SENT', 'AND', 'ASKED', 'FOR', 'HER', 'FROM', 'SHARKAN'] +2033-164915-0005-648: ref=['BUT', 'NOW', 'GO', 'TO', 'THY', 'MASTER', 'AND', 'BRING', 'HIM', 'QUICKLY', 'TO', 'ME'] +2033-164915-0005-648: hyp=['BUT', 'NOW', 'GO', 'TO', 'THY', 'MASTER', 'AND', 'BRING', 'HIM', 'QUICKLY', 'TO', 'ME'] +2033-164915-0006-649: ref=['THE', 'CHAMBERLAIN', 'CALLED', 'THE', 'CASTRATO', 'AND', 'CHARGED', 'HIM', 'TO', 'DO', 'ACCORDINGLY', 'SO', 'HE', 'REPLIED', 'I', 'HEAR', 'AND', 'I', 'OBEY', 'AND', 'HE', 'TOOK', 'HIS', 'PAGES', 'WITH', 'HIM', 'AND', 'WENT', 'OUT', 'IN', 'SEARCH', 'OF', 'THE', 'STOKER', 'TILL', 'HE', 'FOUND', 'HIM', 'IN', 'THE', 'REAR', 'OF', 'THE', 'CARAVAN', 'GIRTHING', 'HIS', 'ASS', 'AND', 'PREPARING', 'FOR', 'FLIGHT'] +2033-164915-0006-649: hyp=['THE', 'CHAMBERLAIN', 'CALLED', 'THE', 'CASTRATO', 'AND', 'CHARGED', 'HIM', 'TO', 'DO', 'ACCORDINGLY', 'SO', 'HE', 'REPLIED', 'I', 'HEAR', 'AND', 'I', 'OBEY', 'AND', 'HE', 'TOOK', 'HIS', 'PAGES', 'WITH', 'HIM', 'AND', 'WENT', 'OUT', 'IN', 'SEARCH', 'OF', 'THE', 'STOCKER', 'TILL', 'HE', 'FOUND', 'HIM', 'IN', 'THE', 'REAR', 'OF', 'THE', 'CARAVAN', 'GIRDING', 'HIS', 'ASS', 'AND', 'PREPARING', 'FOR', 'FLIGHT'] +2033-164915-0007-650: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'THE', 'STOKER', 'GIRTHED', 'HIS', 'ASS', 'FOR', 'FLIGHT', 'AND', 'BESPAKE', 'HIMSELF', 'SAYING', 'OH', 'WOULD', 'I', 'KNEW', 'WHAT', 'IS', 'BECOME', 'OF', 'HIM'] +2033-164915-0007-650: hyp=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'THE', 'STOCKER', 'GIRDED', 'HIS', 'ASS', 'FOR', 'FLIGHT', 'AND', 'BESPAKE', 'HIMSELF', 'SAYING', 'O', 'WOULD', 'I', 'KNEW', 'WHAT', 'IS', 'BECOME', 'OF', 'HIM'] +2033-164915-0008-651: ref=['I', 'BELIEVE', 'HE', 'HATH', 'DENOUNCED', 'ME', 'TO', 'THE', 'EUNUCH', 'HENCE', 'THESE', 'PAGES', 'ET', 'ABOUT', 'ME', 'AND', 'HE', 'HATH', 'MADE', 'ME', 'AN', 'ACCOMPLICE', 'IN', 'HIS', 'CRIME'] +2033-164915-0008-651: hyp=['I', 'BELIEVE', 'HE', 'HATH', 'DENOUNCED', 'ME', 'TO', 'THE', 'EUNUCH', 'HENCE', 'THESE', 'PAGES', 'AT', 'ABOUT', 'ME', 'AND', 'HE', 'HATH', 'MADE', 'ME', 'AN', 'ACCOMPLICE', 'IN', 'HIS', 'CRIME'] +2033-164915-0009-652: ref=['WHY', 'DIDST', 'THOU', 'SAY', 'I', 'NEVER', 'REPEATED', 'THESE', 'COUPLETS', 'NOR', 'DO', 'I', 'KNOW', 'WHO', 'REPEATED', 'THEM', 'WHEN', 'IT', 'WAS', 'THY', 'COMPANION'] +2033-164915-0009-652: hyp=['WHY', 'DIDST', 'THOU', 'SAY', 'I', 'NEVER', 'REPEATED', 'THIS', 'COUPLETS', 'NOR', 'DO', 'I', 'KNOW', 'WHO', 'REPEATED', 'THEM', 'WHEN', 'IT', 'WAS', 'THY', 'COMPANION'] +2033-164915-0010-653: ref=['BUT', 'NOW', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'BETWEEN', 'THIS', 'PLACE', 'AND', 'BAGHDAD', 'AND', 'WHAT', 'BETIDETH', 'THY', 'COMRADE', 'SHALL', 'BETIDE', 'THEE'] +2033-164915-0010-653: hyp=['BUT', 'NOW', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'BETWEEN', 'THIS', 'PLACE', 'AND', 'BAGHDAD', 'AND', 'WHAT', 'BETIDETH', 'THY', 'COMRADE', 'SHALL', 'BE', 'TIDE', 'THEE'] +2033-164915-0011-654: ref=['TWAS', 'AS', 'I', 'FEARED', 'THE', 'COMING', 'ILLS', 'DISCERNING', 'BUT', 'UNTO', 'ALLAH', 'WE', 'ARE', 'ALL', 'RETURNING'] +2033-164915-0011-654: hyp=['TWAS', 'AS', 'I', 'FEARED', 'THE', 'CARMINALS', 'DISCERNING', 'BUT', 'UNTO', 'ALLAH', 'WE', 'ARE', 'ALL', 'RETURNING'] +2033-164915-0012-655: ref=['THEN', 'THE', 'EUNUCH', 'CRIED', 'UPON', 'THE', 'PAGES', 'SAYING', 'TAKE', 'HIM', 'OFF', 'THE', 'ASS'] +2033-164915-0012-655: hyp=['THEN', 'THE', 'EUNUCH', 'CRIED', 'UPON', 'HIS', 'PAGES', 'SAYING', 'TAKE', 'HIM', 'OFF', 'THE', 'ASS'] +2033-164915-0013-656: ref=['AND', 'HE', 'ANSWERED', 'I', 'AM', 'THE', 'CHAMBERLAIN', 'OF', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'KING', 'SHARRKAN', 'SON', 'OF', 'OMAR', 'BIN', 'AL', "NU'UMAN", 'LORD', 'OF', 'BAGHDAD', 'AND', 'OF', 'THE', 'LAND', 'OF', 'KHORASAN', 'AND', 'I', 'BRING', 'TRIBUTE', 'AND', 'PRESENTS', 'FROM', 'HIM', 'TO', 'HIS', 'FATHER', 'IN', 'BAGHDAD'] +2033-164915-0013-656: hyp=['AND', 'HE', 'ANSWERED', 'I', 'AM', 'THE', 'CHAMBERLAIN', 'OF', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'KING', 'SHARKAN', 'SONG', 'OF', 'OMAR', 'BIN', 'AL', 'NUMAN', 'LORD', 'OF', 'ADAD', 'AND', 'OF', 'THE', 'LAND', 'OF', 'KHORASAN', 'AND', 'I', 'BRING', 'TRIBUTE', 'AND', 'PRESENTS', 'FROM', 'HIM', 'TO', 'HIS', 'FATHER', 'IN', 'BAGHDAD'] +2033-164915-0014-657: ref=['SO', 'FARE', 'YE', 'FORWARDS', 'NO', 'HARM', 'SHALL', 'BEFAL', 'YOU', 'TILL', 'YOU', 'JOIN', 'HIS', 'GRAND', 'WAZIR', 'DANDAN'] +2033-164915-0014-657: hyp=['SOPHIA', 'HE', 'FORWARDS', 'NO', 'HARM', 'SHALL', 'BEFALL', 'YOU', 'TILL', 'YOU', 'JOIN', 'HIS', 'GRAND', 'WAZIR', 'TAN'] +2033-164915-0015-658: ref=['THEN', 'HE', 'BADE', 'HIM', 'BE', 'SEATED', 'AND', 'QUESTIONED', 'HIM', 'AND', 'HE', 'REPLIED', 'THAT', 'HE', 'WAS', 'CHAMBERLAIN', 'TO', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'AND', 'WAS', 'BOUND', 'TO', 'KING', 'OMAR', 'WITH', 'PRESENTS', 'AND', 'THE', 'TRIBUTE', 'OF', 'SYRIA'] +2033-164915-0015-658: hyp=['THEN', 'HE', 'BADE', 'HIM', 'BE', 'SEATED', 'AND', 'QUESTIONED', 'HIM', 'AND', 'HE', 'REPLIED', 'THAT', 'HE', 'WAS', 'CHAMBERLAIN', 'TO', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'AND', 'WAS', 'BOUND', 'TO', 'KING', 'OMAR', 'WITH', 'PRESENTS', 'AND', 'THE', 'TRIBUTE', 'OF', 'SYRIA'] +2033-164915-0016-659: ref=['SO', 'IT', 'WAS', 'AGREED', 'THAT', 'WE', 'GO', 'TO', 'DAMASCUS', 'AND', 'FETCH', 'THENCE', 'THE', "KING'S", 'SON', 'SHARRKAN', 'AND', 'MAKE', 'HIM', 'SULTAN', 'OVER', 'HIS', "FATHER'S", 'REALM'] +2033-164915-0016-659: hyp=['SO', 'IT', 'WAS', 'AGREED', 'THAT', 'WE', 'GO', 'TO', 'DAMASCUS', 'AND', 'FETCH', 'THENCE', 'THE', "KING'S", 'SON', 'SHARKAN', 'AND', 'MADE', 'HIM', 'SULTAN', 'OVER', 'HIS', "FATHER'S", 'REALM'] +2033-164915-0017-660: ref=['AND', 'AMONGST', 'THEM', 'WERE', 'SOME', 'WHO', 'WOULD', 'HAVE', 'CHOSEN', 'THE', 'CADET', 'ZAU', 'AL', 'MAKAN', 'FOR', 'QUOTH', 'THEY', 'HIS', 'NAME', 'BE', 'LIGHT', 'OF', 'THE', 'PLACE', 'AND', 'HE', 'HATH', 'A', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'HIGHS', 'THE', 'DELIGHT', 'OF', 'THE', 'TIME', 'BUT', 'THEY', 'SET', 'OUT', 'FIVE', 'YEARS', 'AGO', 'FOR', 'AL', 'HIJAZ', 'AND', 'NONE', 'WOTTETH', 'WHAT', 'IS', 'BECOME', 'OF', 'THEM'] +2033-164915-0017-660: hyp=['AND', 'AMONGST', 'THEM', 'WERE', 'SOME', 'WHO', 'WOULD', 'HAVE', 'CHOSEN', 'THE', 'CADET', 'THOU', 'A', 'MACAN', 'FOR', 'QUOTH', 'THEY', 'HIS', 'NAME', 'BE', 'LIGHT', 'OF', 'THE', 'PLACE', 'AND', 'HE', 'HATH', 'A', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'HIES', 'THE', 'DELIGHT', 'OF', 'THE', 'TIME', 'BUT', 'THEY', 'SET', 'OUT', 'FIVE', 'YEARS', 'AGO', 'FOR', 'AL', 'HI', 'JARS', 'AND', 'NONE', 'WHATETH', 'WHAT', 'IS', 'BECOME', 'OF', 'THEM'] +2033-164916-0000-684: ref=['SO', 'HE', 'TURNED', 'TO', 'THE', 'WAZIR', 'DANDAN', 'AND', 'SAID', 'TO', 'HIM', 'VERILY', 'YOUR', 'TALE', 'IS', 'A', 'WONDER', 'OF', 'WONDERS'] +2033-164916-0000-684: hyp=['SO', 'HE', 'TURNED', 'TO', 'THE', 'WAZIR', 'DANDAN', 'AND', 'SAID', 'TO', 'HIM', 'VERILY', 'YOUR', 'TALE', 'IS', 'A', 'WANDER', 'OF', 'WONDERS'] +2033-164916-0001-685: ref=['KNOW', 'O', 'CHIEF', 'WAZIR', 'THAT', 'HERE', 'WHERE', 'YOU', 'HAVE', 'ENCOUNTERED', 'ME', 'ALLAH', 'HATH', 'GIVEN', 'YOU', 'REST', 'FROM', 'FATIGUE', 'AND', 'BRINGETH', 'YOU', 'YOUR', 'DESIRE', 'AFTER', 'THE', 'EASIEST', 'OF', 'FASHIONS', 'FOR', 'THAT', 'HIS', 'ALMIGHTY', 'WILL', 'RESTORETH', 'TO', 'YOU', 'ZAU', 'AL', 'MAKAN', 'AND', 'HIS', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'WHEREBY', 'WE', 'WILL', 'SETTLE', 'THE', 'MATTER', 'AS', 'WE', 'EASILY', 'CAN'] +2033-164916-0001-685: hyp=['NO', 'O', 'CHIEF', 'WOZZIER', 'THAT', 'HERE', 'WHERE', 'YOU', 'HAVE', 'ENCOUNTERED', 'ME', 'ALLAH', 'HATH', 'GIVEN', 'YOU', 'REST', 'FROM', 'FATIGUE', 'AND', 'BRINGETH', 'YOU', 'YOUR', 'DESIRE', 'AFTER', 'THE', 'EASIEST', 'OF', 'FASHIONS', 'FOR', 'LET', 'HIS', 'ALMIGHTY', 'WILL', 'RESTORE', 'IT', 'TO', 'YOU', 'THOU', 'ARMANQUIN', 'AND', 'HE', 'SISTER', 'KNOWSAT', 'AL', 'ZAMAN', 'WHEREBY', 'WE', 'WILL', 'SETTLE', 'THE', 'MATTER', 'AS', 'WE', 'EASILY', 'CAN'] +2033-164916-0002-686: ref=['WHEN', 'THE', 'MINISTER', 'HEARD', 'THESE', 'WORDS', 'HE', 'REJOICED', 'WITH', 'GREAT', 'JOY', 'AND', 'SAID', 'O', 'CHAMBERLAIN', 'TELL', 'ME', 'THE', 'TALE', 'OF', 'THE', 'TWAIN', 'AND', 'WHAT', 'BEFEL', 'THEM', 'AND', 'THE', 'CAUSE', 'OF', 'THEIR', 'LONG', 'ABSENCE'] +2033-164916-0002-686: hyp=['WHEN', 'THE', 'MEANESTER', 'HEARD', 'THESE', 'WORDS', 'HE', 'REJOICED', 'WITH', 'GREAT', 'JOY', 'AND', 'SAID', 'O', 'CHAMBERLAIN', 'TELL', 'ME', 'THE', 'TALE', 'OF', 'THE', 'TWAIN', 'AND', 'WHAT', 'BEFELL', 'THEM', 'AND', 'THE', 'CAUSE', 'OF', 'THEIR', 'LONG', 'ABSENCE'] +2033-164916-0003-687: ref=['ZAU', 'AL', 'MAKAN', 'BOWED', 'HIS', 'HEAD', 'AWHILE', 'AND', 'THEN', 'SAID', 'I', 'ACCEPT', 'THIS', 'POSITION', 'FOR', 'INDEED', 'THERE', 'WAS', 'NO', 'REFUSING', 'AND', 'HE', 'WAS', 'CERTIFIED', 'THAT', 'THE', 'CHAMBERLAIN', 'HAD', 'COUNSELLED', 'HIM', 'WELL', 'AND', 'WISELY', 'AND', 'SET', 'HIM', 'ON', 'THE', 'RIGHT', 'WAY'] +2033-164916-0003-687: hyp=['ZAWACON', 'BOWED', 'HIS', 'HEAD', 'AWHILE', 'AND', 'THEN', 'SAID', 'I', 'ACCEPT', 'THE', 'POSITION', 'FOR', 'INDEED', 'THERE', 'WAS', 'NO', 'REFUSING', 'AND', 'HE', 'WAS', 'CERTIFIED', 'THAT', 'THE', 'CHAMBERLAIN', 'HAD', 'COUNSELLED', 'HIM', 'WELL', 'AND', 'WISELY', 'AND', 'SAID', 'TO', 'HIM', 'ON', 'THE', 'RIGHT', 'WAY'] +2033-164916-0004-688: ref=['THEN', 'HE', 'ADDED', 'O', 'MY', 'UNCLE', 'HOW', 'SHALL', 'I', 'DO', 'WITH', 'MY', 'BROTHER', 'SHARRKAN'] +2033-164916-0004-688: hyp=['THEN', 'HE', 'ADDED', 'O', 'MY', 'UNCLE', 'HOW', 'SHALL', 'I', 'DO', 'WITH', 'MY', 'BROTHER', 'SHARKAN'] +2033-164916-0005-689: ref=['AFTER', 'AWHILE', 'THE', 'DUST', 'DISPERSED', 'AND', 'THERE', 'APPEARED', 'UNDER', 'IT', 'THE', 'ARMY', 'OF', 'BAGHDAD', 'AND', 'KHORASAN', 'A', 'CONQUERING', 'HOST', 'LIKE', 'THE', 'FULL', 'TIDE', 'SEA', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164916-0005-689: hyp=['AFTER', 'A', 'WHILE', 'THE', 'DUST', 'DISPERSED', 'AND', 'THERE', 'APPEARED', 'UNDER', 'IT', 'THE', 'ARMY', 'OF', 'BAGHDAD', 'AND', 'KHORASAN', 'A', 'CONQUERING', 'HOST', 'LIKE', 'THE', 'POOL', 'TIED', 'SEA', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164916-0006-690: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'EIGHTH', 'NIGHT'] +2033-164916-0006-690: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'EIGHTH', 'NIGHT'] +2033-164916-0007-691: ref=['AND', 'IN', 'IT', 'ALL', 'REJOICED', 'AT', 'THE', 'ACCESSION', 'OF', 'THE', 'LIGHT', 'OF', 'THE', 'PLACE'] +2033-164916-0007-691: hyp=['ANY', 'NEAT', 'OR', 'REJOICED', 'AT', 'THE', 'ACCESSION', 'OF', 'THE', 'LIGHT', 'OF', 'THE', 'PLACE'] +2033-164916-0008-692: ref=['LASTLY', 'THE', 'MINISTER', 'WENT', 'IN', 'AND', 'KISSED', 'THE', 'GROUND', 'BEFORE', 'ZAU', 'AL', 'MAKAN', 'WHO', 'ROSE', 'TO', 'MEET', 'HIM', 'SAYING', 'WELCOME', 'O', 'WAZIR', 'AND', 'SIRE', 'SANS', 'PEER'] +2033-164916-0008-692: hyp=['LASTLY', 'THE', 'MINISTER', 'WENT', 'IN', 'AND', 'KISSED', 'THE', 'GROUND', 'BEFORE', 'ZAO', 'MAKAN', 'WHO', 'ROSE', 'TO', 'MEET', 'HIM', 'SAYING', 'WELCOME', 'O', 'WAZIR', 'AND', 'SIRES', 'SONSPIER'] +2033-164916-0009-693: ref=['MOREOVER', 'THE', 'SULTAN', 'COMMANDED', 'HIS', 'WAZIR', 'DANDAN', 'CALL', 'A', 'TEN', 'DAYS', 'HALT', 'OF', 'THE', 'ARMY', 'THAT', 'HE', 'MIGHT', 'BE', 'PRIVATE', 'WITH', 'HIM', 'AND', 'LEARN', 'FROM', 'HIM', 'HOW', 'AND', 'WHEREFORE', 'HIS', 'FATHER', 'HAD', 'BEEN', 'SLAIN'] +2033-164916-0009-693: hyp=['MOREOVER', 'THE', 'SULTAN', 'COMMANDED', 'HIS', 'WAZIR', 'DANDAN', 'CALL', 'AT', 'TEN', 'DAYS', 'HALT', 'OF', 'THE', 'ARMY', 'THAT', 'HE', 'MIGHT', 'BE', 'PRIVATE', 'WITH', 'HIM', 'AND', 'LEARN', 'FROM', 'HIM', 'HOW', 'AND', 'WHEREFORE', 'HIS', 'FATHER', 'HAD', 'BEEN', 'SLAIN'] +2033-164916-0010-694: ref=['HE', 'THEN', 'REPAIRED', 'TO', 'THE', 'HEART', 'OF', 'THE', 'ENCAMPMENT', 'AND', 'ORDERED', 'THE', 'HOST', 'TO', 'HALT', 'TEN', 'DAYS'] +2033-164916-0010-694: hyp=['HE', 'THEN', 'REPAIRED', 'TO', 'THE', 'HEART', 'OF', 'THE', 'ENCAMPMENT', 'AND', 'ORDERED', 'THAT', 'THE', 'HOST', 'TO', 'HALT', 'TEN', 'DAYS'] +2414-128291-0000-2689: ref=['WHAT', 'HATH', 'HAPPENED', 'UNTO', 'ME'] +2414-128291-0000-2689: hyp=['WHAT', 'HATH', 'HAPPENED', 'TO', 'ME'] +2414-128291-0001-2690: ref=['HE', 'ASKED', 'HIMSELF', 'SOMETHING', 'WARM', 'AND', 'LIVING', 'QUICKENETH', 'ME', 'IT', 'MUST', 'BE', 'IN', 'THE', 'NEIGHBOURHOOD'] +2414-128291-0001-2690: hyp=['HE', 'ASKED', 'HIMSELF', 'SOMETHING', 'WRONG', 'AND', 'LIVING', 'QUICKENETH', 'ME', 'IT', 'MUST', 'BE', 'IN', 'THAT', 'NEIGHBOURHOOD'] +2414-128291-0002-2691: ref=['WHEN', 'HOWEVER', 'ZARATHUSTRA', 'WAS', 'QUITE', 'NIGH', 'UNTO', 'THEM', 'THEN', 'DID', 'HE', 'HEAR', 'PLAINLY', 'THAT', 'A', 'HUMAN', 'VOICE', 'SPAKE', 'IN', 'THE', 'MIDST', 'OF', 'THE', 'KINE', 'AND', 'APPARENTLY', 'ALL', 'OF', 'THEM', 'HAD', 'TURNED', 'THEIR', 'HEADS', 'TOWARDS', 'THE', 'SPEAKER'] +2414-128291-0002-2691: hyp=['READ', 'HOWEVER', 'THEIR', 'TWO', 'STRAW', 'WAS', 'QUITE', 'NIGH', 'AND', 'TO', 'THEM', 'THEN', 'DID', 'HE', 'HEAR', 'PLAINLY', 'WITH', 'HUMAN', 'VOICE', 'PAKE', 'IN', 'THE', 'MIDST', 'OF', 'THE', 'KIND', 'AND', 'A', 'FRIENDLY', 'ALL', 'OF', 'THEM', 'HAD', 'TURNED', 'THEIR', 'HEADS', 'TOWARDS', 'THE', 'SPEAKER'] +2414-128291-0003-2692: ref=['WHAT', 'DO', 'I', 'HERE', 'SEEK'] +2414-128291-0003-2692: hyp=['FOR', 'DIEU', 'I', 'HERE', 'SEEK'] +2414-128291-0004-2693: ref=['ANSWERED', 'HE', 'THE', 'SAME', 'THAT', 'THOU', 'SEEKEST', 'THOU', 'MISCHIEF', 'MAKER', 'THAT', 'IS', 'TO', 'SAY', 'HAPPINESS', 'UPON', 'EARTH'] +2414-128291-0004-2693: hyp=['ANSWERED', 'HE', 'THE', 'SAME', 'THAT', 'THOU', "SEEK'ST", 'THOU', 'MISCHIEF', 'MAKER', 'THAT', 'IS', 'TO', 'SAY', 'HAPPINESS', 'UPON', 'EARTH'] +2414-128291-0005-2694: ref=['FOR', 'I', 'TELL', 'THEE', 'THAT', 'I', 'HAVE', 'ALREADY', 'TALKED', 'HALF', 'A', 'MORNING', 'UNTO', 'THEM', 'AND', 'JUST', 'NOW', 'WERE', 'THEY', 'ABOUT', 'TO', 'GIVE', 'ME', 'THEIR', 'ANSWER'] +2414-128291-0005-2694: hyp=['FOR', 'I', 'TELL', 'THEE', 'THAT', 'I', 'HAVE', 'ALREAD', 'TALKED', 'HALF', 'A', 'MORNING', 'UNTO', 'THEM', 'AND', 'JUST', 'NOW', 'WHERE', 'THEY', 'ARE', 'ABOUT', 'TO', 'GIVE', 'ME', 'THE', 'ANSWER'] +2414-128291-0006-2695: ref=['HE', 'WOULD', 'NOT', 'BE', 'RID', 'OF', 'HIS', 'AFFLICTION'] +2414-128291-0006-2695: hyp=['HE', 'WOULD', 'NOT', 'BE', 'RID', 'OF', 'HIS', 'AFFLICTION'] +2414-128291-0007-2696: ref=['WHO', 'HATH', 'NOT', 'AT', 'PRESENT', 'HIS', 'HEART', 'HIS', 'MOUTH', 'AND', 'HIS', 'EYES', 'FULL', 'OF', 'DISGUST'] +2414-128291-0007-2696: hyp=['WHO', 'HAD', 'NOT', 'AT', 'PRESENT', 'HIS', 'HEART', 'HIS', 'MOUTH', 'AND', 'HIS', 'EYES', 'FULL', 'OF', 'DISGUST'] +2414-128291-0008-2697: ref=['THOU', 'ALSO', 'THOU', 'ALSO'] +2414-128291-0008-2697: hyp=['THOU', 'ALSO', 'THOU', 'ALSO'] +2414-128291-0009-2698: ref=['BUT', 'BEHOLD', 'THESE', 'KINE'] +2414-128291-0009-2698: hyp=['BUT', 'BEHOLD', 'HIS', 'KIND'] +2414-128291-0010-2699: ref=['THE', 'KINE', 'HOWEVER', 'GAZED', 'AT', 'IT', 'ALL', 'AND', 'WONDERED'] +2414-128291-0010-2699: hyp=['THE', 'KIND', 'HOWEVER', 'GAZED', 'AT', 'IT', 'ALL', 'AND', 'WONDERED'] +2414-128291-0011-2700: ref=['WANTON', 'AVIDITY', 'BILIOUS', 'ENVY', 'CAREWORN', 'REVENGE', 'POPULACE', 'PRIDE', 'ALL', 'THESE', 'STRUCK', 'MINE', 'EYE'] +2414-128291-0011-2700: hyp=['WANTON', 'ADVITY', 'BILIOUS', 'ENVY', 'CAREWORN', 'REVENGE', 'POPULOUS', 'PRIDE', 'ALL', 'DISTRACT', 'MY', 'EYE'] +2414-128291-0012-2701: ref=['IT', 'IS', 'NO', 'LONGER', 'TRUE', 'THAT', 'THE', 'POOR', 'ARE', 'BLESSED'] +2414-128291-0012-2701: hyp=['IT', 'IS', 'NO', 'LONGER', 'TRUE', 'LITTLE', 'POOR', 'A', 'BLESSED'] +2414-128291-0013-2702: ref=['THE', 'KINGDOM', 'OF', 'HEAVEN', 'HOWEVER', 'IS', 'WITH', 'THE', 'KINE', 'AND', 'WHY', 'IS', 'IT', 'NOT', 'WITH', 'THE', 'RICH'] +2414-128291-0013-2702: hyp=['THE', 'KINGDOM', 'OF', 'HEAVEN', 'HOWEVER', 'IS', 'WITH', 'THE', 'KIND', 'AND', 'WAS', 'IT', 'NOT', 'WITH', 'A', 'RICH'] +2414-128291-0014-2703: ref=['WHY', 'DOST', 'THOU', 'TEMPT', 'ME'] +2414-128291-0014-2703: hyp=['WHY', 'THOSE', 'THOU', 'TEMPT', 'ME'] +2414-128291-0015-2704: ref=['ANSWERED', 'THE', 'OTHER'] +2414-128291-0015-2704: hyp=['ANSWERED', 'HER'] +2414-128291-0016-2705: ref=['THOU', 'KNOWEST', 'IT', 'THYSELF', 'BETTER', 'EVEN', 'THAN', 'I'] +2414-128291-0016-2705: hyp=['THOU', 'KNOWEST', 'IT', 'THYSELF', 'BETTER', 'EVEN', 'THAN', 'I'] +2414-128291-0017-2706: ref=['THUS', 'SPAKE', 'THE', 'PEACEFUL', 'ONE', 'AND', 'PUFFED', 'HIMSELF', 'AND', 'PERSPIRED', 'WITH', 'HIS', 'WORDS', 'SO', 'THAT', 'THE', 'KINE', 'WONDERED', 'ANEW'] +2414-128291-0017-2706: hyp=['DOES', 'BEG', 'THE', 'PEACEFUL', 'ONE', 'AND', 'PUFFED', 'HIMSELF', 'AND', 'POISPIRED', 'WITH', 'HIS', 'WORDS', 'TO', 'INTER', 'KIND', 'WONDERED', 'ANEW'] +2414-128291-0018-2707: ref=['THOU', 'DOEST', 'VIOLENCE', 'TO', 'THYSELF', 'THOU', 'PREACHER', 'ON', 'THE', 'MOUNT', 'WHEN', 'THOU', 'USEST', 'SUCH', 'SEVERE', 'WORDS'] +2414-128291-0018-2707: hyp=['THOU', 'DOEST', 'WILDEST', 'TO', 'THYSELF', 'THOU', 'PREACHER', 'ON', 'THE', 'MOUND', 'AND', 'THOU', 'USEST', 'SUCH', 'SAVOUR', 'WORDS'] +2414-128291-0019-2708: ref=['THEY', 'ALSO', 'ABSTAIN', 'FROM', 'ALL', 'HEAVY', 'THOUGHTS', 'WHICH', 'INFLATE', 'THE', 'HEART'] +2414-128291-0019-2708: hyp=['THEY', 'ALSO', 'ABSTAINED', 'FROM', 'ALL', 'HEAVY', 'THOUGHTS', 'WHICH', 'INFLATE', 'THE', 'HEART'] +2414-128291-0020-2709: ref=['WELL'] +2414-128291-0020-2709: hyp=['WELL'] +2414-128291-0021-2710: ref=['SAID', 'ZARATHUSTRA', 'THOU', 'SHOULDST', 'ALSO', 'SEE', 'MINE', 'ANIMALS', 'MINE', 'EAGLE', 'AND', 'MY', 'SERPENT', 'THEIR', 'LIKE', 'DO', 'NOT', 'AT', 'PRESENT', 'EXIST', 'ON', 'EARTH'] +2414-128291-0021-2710: hyp=['SAYS', 'ERGUSTARA', 'THOU', 'SHOULDST', 'ALSO', 'SEE', 'MY', 'ANIMALS', 'MY', 'EAGLE', 'AND', 'MY', 'SERPENT', 'THEY', 'ARE', 'LIKE', 'DO', 'NOT', 'AT', 'PRESENT', 'EXIST', 'ON', 'EARTH'] +2414-128291-0022-2711: ref=['AND', 'TALK', 'TO', 'MINE', 'ANIMALS', 'OF', 'THE', 'HAPPINESS', 'OF', 'ANIMALS'] +2414-128291-0022-2711: hyp=['AND', 'TALK', 'TO', 'MY', 'ANIMALS', 'OF', 'THE', 'HAPPINESS', 'OF', 'ANIMALS'] +2414-128291-0023-2712: ref=['NOW', 'HOWEVER', 'TAKE', 'LEAVE', 'AT', 'ONCE', 'OF', 'THY', 'KINE', 'THOU', 'STRANGE', 'ONE'] +2414-128291-0023-2712: hyp=['NOW', 'HOWEVER', 'THEY', 'LEAVE', 'IT', 'WAS', 'OF', 'THEIR', 'KIND', 'THOU', 'STRANGE', 'WORLD'] +2414-128291-0024-2713: ref=['THOU', 'AMIABLE', 'ONE'] +2414-128291-0024-2713: hyp=['THOU', 'AMIABLE', 'ONE'] +2414-128291-0025-2714: ref=['FOR', 'THEY', 'ARE', 'THY', 'WARMEST', 'FRIENDS', 'AND', 'PRECEPTORS'] +2414-128291-0025-2714: hyp=['FOR', 'THEY', 'ARE', 'DIVORITES', 'AND', 'PERCEPTORS'] +2414-128291-0026-2715: ref=['THOU', 'EVIL', 'FLATTERER'] +2414-128291-0026-2715: hyp=['THOU', 'EVIL', 'SLACKER'] +2414-128292-0000-2618: ref=['WHITHER', 'HATH', 'MY', 'LONESOMENESS', 'GONE', 'SPAKE', 'HE'] +2414-128292-0000-2618: hyp=['WITHER', 'HAD', 'MY', 'LONESOME', 'DISCOUR', 'SPAKE', 'HE'] +2414-128292-0001-2619: ref=['MY', 'SHADOW', 'CALLETH', 'ME'] +2414-128292-0001-2619: hyp=['MY', 'SHADOW', 'CAUGHT', 'ME'] +2414-128292-0002-2620: ref=['WHAT', 'MATTER', 'ABOUT', 'MY', 'SHADOW'] +2414-128292-0002-2620: hyp=['WHAT', 'MATTER', 'ABOUT', 'MY', 'SHADOW'] +2414-128292-0003-2621: ref=['LET', 'IT', 'RUN', 'AFTER', 'ME', 'I', 'RUN', 'AWAY', 'FROM', 'IT'] +2414-128292-0003-2621: hyp=['NEKHLUD', 'TRUE', 'ENOUGH', 'TO', 'ME', 'I', 'RAN', 'AWAY', 'FROM', 'IT'] +2414-128292-0004-2622: ref=['THUS', 'SPAKE', 'ZARATHUSTRA', 'TO', 'HIS', 'HEART', 'AND', 'RAN', 'AWAY'] +2414-128292-0004-2622: hyp=['THUS', 'BEING', 'THEIR', 'TOO', 'STRIKE', 'TO', 'HIS', 'HEART', 'AND', 'RAN', 'AWAY'] +2414-128292-0005-2623: ref=['VERILY', 'MY', 'FOLLY', 'HATH', 'GROWN', 'BIG', 'IN', 'THE', 'MOUNTAINS'] +2414-128292-0005-2623: hyp=['VERILY', 'MY', 'FOLLY', 'HATH', 'GROWN', 'BIG', 'IN', 'THE', 'MOUNTAINS'] +2414-128292-0006-2624: ref=['NOW', 'DO', 'I', 'HEAR', 'SIX', 'OLD', 'FOOLS', 'LEGS', 'RATTLING', 'BEHIND', 'ONE', 'ANOTHER'] +2414-128292-0006-2624: hyp=['NOW', 'DO', 'I', 'HEAR', 'SIX', 'OLD', "FOOD'S", 'LEGS', 'RATTLING', 'BEHIND', 'ONE', 'ANOTHER'] +2414-128292-0007-2625: ref=['BUT', 'DOTH', 'ZARATHUSTRA', 'NEED', 'TO', 'BE', 'FRIGHTENED', 'BY', 'HIS', 'SHADOW'] +2414-128292-0007-2625: hyp=['BY', 'DOTHAH', 'TOUSTRA', 'NEED', 'TO', 'BE', 'FRIGHTENED', 'BY', 'A', 'SHADOW'] +2414-128292-0008-2626: ref=['ALSO', 'METHINKETH', 'THAT', 'AFTER', 'ALL', 'IT', 'HATH', 'LONGER', 'LEGS', 'THAN', 'MINE'] +2414-128292-0008-2626: hyp=['ALSO', 'METHINK', 'IT', 'THAT', 'AFTER', 'ALL', 'IT', 'HAD', 'LONG', 'OR', 'LESS', 'THAN', 'MINE'] +2414-128292-0009-2627: ref=['FOR', 'WHEN', 'ZARATHUSTRA', 'SCRUTINISED', 'HIM', 'WITH', 'HIS', 'GLANCE', 'HE', 'WAS', 'FRIGHTENED', 'AS', 'BY', 'A', 'SUDDEN', 'APPARITION', 'SO', 'SLENDER', 'SWARTHY', 'HOLLOW', 'AND', 'WORN', 'OUT', 'DID', 'THIS', 'FOLLOWER', 'APPEAR'] +2414-128292-0009-2627: hyp=['FOR', 'WHEN', 'THEY', 'ARE', 'TOO', 'STRESS', 'COGNIZED', 'HIM', 'IT', 'IS', 'GLANCE', 'HE', 'WAS', 'FRIGHTENED', 'ALBERT', 'A', 'CERTAIN', 'APPARITION', 'SO', 'SLENDER', 'SWALLTY', 'HOLLOW', 'AND', 'WORN', 'OUT', 'IT', 'IS', 'FULL', 'OF', 'PEER'] +2414-128292-0010-2628: ref=['ASKED', 'ZARATHUSTRA', 'VEHEMENTLY', 'WHAT', 'DOEST', 'THOU', 'HERE'] +2414-128292-0010-2628: hyp=['I', 'TAKE', 'TO', 'EXTRAVE', 'IMAGINE', 'WHAT', 'DOST', 'THOU', 'HEAR'] +2414-128292-0011-2629: ref=['AND', 'WHY', 'CALLEST', 'THOU', 'THYSELF', 'MY', 'SHADOW'] +2414-128292-0011-2629: hyp=['AND', 'WHY', 'COLLEST', 'THOU', 'THYSELF', 'MY', 'SHADOW'] +2414-128292-0012-2630: ref=['THOU', 'ART', 'NOT', 'PLEASING', 'UNTO', 'ME'] +2414-128292-0012-2630: hyp=['THOU', 'ART', 'NOT', 'PLEASING', 'INTO', 'ME'] +2414-128292-0013-2631: ref=['MUST', 'I', 'EVER', 'BE', 'ON', 'THE', 'WAY'] +2414-128292-0013-2631: hyp=['MUST', 'I', 'EVER', 'BE', 'ON', 'THE', 'WAY'] +2414-128292-0014-2632: ref=['O', 'EARTH', 'THOU', 'HAST', 'BECOME', 'TOO', 'ROUND', 'FOR', 'ME'] +2414-128292-0014-2632: hyp=['O', 'ART', 'THOU', 'HAST', 'BECOME', 'TO', 'ROUND', 'FOR', 'ME'] +2414-128292-0015-2633: ref=['WHEN', 'THE', 'DEVIL', 'CASTETH', 'HIS', 'SKIN', 'DOTH', 'NOT', 'HIS', 'NAME', 'ALSO', 'FALL', 'AWAY', 'IT', 'IS', 'ALSO', 'SKIN'] +2414-128292-0015-2633: hyp=['WITH', 'THE', 'DEVIL', 'CAST', 'AT', 'HIS', 'SKIN', 'DOTH', 'NOT', 'HIS', 'NAME', 'ALSO', 'FALL', 'AWAY', 'IT', 'IS', 'ALSO', 'SKIN'] +2414-128292-0016-2634: ref=['THE', 'DEVIL', 'HIMSELF', 'IS', 'PERHAPS', 'SKIN'] +2414-128292-0016-2634: hyp=['THE', 'DEVIL', 'HIMSELF', 'IS', 'PERHAPS', 'SKIN'] +2414-128292-0017-2635: ref=['SOMETIMES', 'I', 'MEANT', 'TO', 'LIE', 'AND', 'BEHOLD'] +2414-128292-0017-2635: hyp=['SOMETIMES', 'I', 'MEANT', 'TO', 'LIE', 'AND', 'BEHOLD'] +2414-128292-0018-2636: ref=['THEN', 'ONLY', 'DID', 'I', 'HIT', 'THE', 'TRUTH'] +2414-128292-0018-2636: hyp=['THEN', 'OLD', 'LADY', 'DID', 'I', 'HIT', 'THE', 'TRUTH'] +2414-128292-0019-2637: ref=['HOW', 'HAVE', 'I', 'STILL', 'INCLINATION'] +2414-128292-0019-2637: hyp=['HOW', 'HAIR', 'I', 'STILL', 'INCLINATION'] +2414-128292-0020-2638: ref=['HAVE', 'I', 'STILL', 'A', 'GOAL'] +2414-128292-0020-2638: hyp=['EH', 'I', 'STILL', 'A', 'GOLD'] +2414-128292-0021-2639: ref=['A', 'HAVEN', 'TOWARDS', 'WHICH', 'MY', 'SAIL', 'IS', 'SET'] +2414-128292-0021-2639: hyp=['A', 'HAIRED', 'TOWARD', 'SPEECH', 'MY', 'SAILOR', 'SAID'] +2414-128292-0022-2640: ref=['FOR', 'IT', 'DO', 'I', 'ASK', 'AND', 'SEEK', 'AND', 'HAVE', 'SOUGHT', 'BUT', 'HAVE', 'NOT', 'FOUND', 'IT'] +2414-128292-0022-2640: hyp=['FOR', 'IT', 'TOO', 'I', 'ASK', 'AND', 'SEEK', 'AND', 'HAVE', 'THOUGHT', 'IT', 'HATH', 'NOT', 'FOUND', 'IT'] +2414-128292-0023-2641: ref=['O', 'ETERNAL', 'EVERYWHERE', 'O', 'ETERNAL', 'NOWHERE', 'O', 'ETERNAL', 'IN', 'VAIN'] +2414-128292-0023-2641: hyp=['I', 'TURNED', 'OUT', 'EVERYWHERE', 'WHO', 'HAD', 'TURNED', 'OUT', 'NOWHERE', 'WHO', 'HAD', 'TURNED', 'OUT', 'IN', 'VAIN'] +2414-128292-0024-2642: ref=['THOU', 'ART', 'MY', 'SHADOW'] +2414-128292-0024-2642: hyp=['THOU', 'ART', 'MY', 'SHADOW'] +2414-128292-0025-2643: ref=['SAID', 'HE', 'AT', 'LAST', 'SADLY'] +2414-128292-0025-2643: hyp=['SAID', 'HE', 'AT', 'LAST', 'SADLY'] +2414-128292-0026-2644: ref=['THY', 'DANGER', 'IS', 'NOT', 'SMALL', 'THOU', 'FREE', 'SPIRIT', 'AND', 'WANDERER'] +2414-128292-0026-2644: hyp=['THY', 'DANGER', 'HIS', 'PERCHAL', 'THOU', 'FREE', 'SPIRIT', 'AND', 'WONDER'] +2414-128292-0027-2645: ref=['THEY', 'SLEEP', 'QUIETLY', 'THEY', 'ENJOY', 'THEIR', 'NEW', 'SECURITY'] +2414-128292-0027-2645: hyp=['THEY', 'SLEEP', 'QUIETLY', 'THEY', 'ENJOYED', 'THEIR', 'NEW', 'SECURITY'] +2414-128292-0028-2646: ref=['BEWARE', 'LEST', 'IN', 'THE', 'END', 'A', 'NARROW', 'FAITH', 'CAPTURE', 'THEE', 'A', 'HARD', 'RIGOROUS', 'DELUSION'] +2414-128292-0028-2646: hyp=['BE', 'REALIST', 'IN', 'THE', 'END', 'A', 'NARROW', 'FIT', 'CAPTURE', 'THEE', 'A', 'HARD', 'RECKLESS', 'DELUSION'] +2414-128292-0029-2647: ref=['FOR', 'NOW', 'EVERYTHING', 'THAT', 'IS', 'NARROW', 'AND', 'FIXED', 'SEDUCETH', 'AND', 'TEMPTETH', 'THEE'] +2414-128292-0029-2647: hyp=['FOR', 'NOW', 'EVERYTHING', 'THAT', 'IS', 'NARROW', 'AND', 'FIXED', 'SEDUCE', 'IT', 'AND', 'TEMPTED', 'THEE'] +2414-128292-0030-2648: ref=['THOU', 'HAST', 'LOST', 'THY', 'GOAL'] +2414-128292-0030-2648: hyp=['THOU', 'HAST', 'LOST', 'DAGGULE'] +2414-128292-0031-2649: ref=['THOU', 'POOR', 'ROVER', 'AND', 'RAMBLER', 'THOU', 'TIRED', 'BUTTERFLY'] +2414-128292-0031-2649: hyp=['THOUGH', 'POOR', 'ROVER', 'AND', 'RAMBLER', 'NOW', 'TIRED', 'BUT', 'TO', 'FLY'] +2414-128292-0032-2650: ref=['WILT', 'THOU', 'HAVE', 'A', 'REST', 'AND', 'A', 'HOME', 'THIS', 'EVENING'] +2414-128292-0032-2650: hyp=['WILT', 'THOU', 'HAVE', 'A', 'REST', 'IN', 'THE', 'WHOLE', 'THIS', 'EVENING'] +2414-159411-0000-2653: ref=['ONCE', 'UPON', 'A', 'TIME', 'A', 'BRAHMAN', 'WHO', 'WAS', 'WALKING', 'ALONG', 'THE', 'ROAD', 'CAME', 'UPON', 'AN', 'IRON', 'CAGE', 'IN', 'WHICH', 'A', 'GREAT', 'TIGER', 'HAD', 'BEEN', 'SHUT', 'UP', 'BY', 'THE', 'VILLAGERS', 'WHO', 'CAUGHT', 'HIM'] +2414-159411-0000-2653: hyp=['ONCE', 'UPON', 'HER', 'TIME', 'A', 'BRAHM', 'IN', 'WHO', 'WAS', 'WALKING', 'ALONG', 'THE', 'ROAD', 'CAME', 'UPON', 'AN', 'IRON', 'CAGE', 'IN', 'WHICH', 'A', 'GREAT', 'TIGER', 'AT', 'MONSHADE', 'UP', 'BY', 'THE', 'VILLAGES', 'WHO', 'CAUGHT', 'HIM'] +2414-159411-0001-2654: ref=['THE', 'BRAHMAN', 'ANSWERED', 'NO', 'I', 'WILL', 'NOT', 'FOR', 'IF', 'I', 'LET', 'YOU', 'OUT', 'OF', 'THE', 'CAGE', 'YOU', 'WILL', 'EAT', 'ME'] +2414-159411-0001-2654: hyp=['THE', 'BRAMIAN', 'ANSWERED', 'NO', 'I', 'WILL', 'NOT', 'FOR', 'IF', 'I', 'LET', 'YOU', 'OUT', 'OF', 'THE', 'CAGE', 'YOU', 'WILL', 'EAT', 'ME'] +2414-159411-0002-2655: ref=['OH', 'FATHER', 'OF', 'MERCY', 'ANSWERED', 'THE', 'TIGER', 'IN', 'TRUTH', 'THAT', 'I', 'WILL', 'NOT'] +2414-159411-0002-2655: hyp=['OH', 'FATHER', 'OF', 'MERCY', 'ANSWERED', 'THE', 'TIGER', 'IN', 'TRUTH', 'THAT', 'I', 'WILL', 'NOT'] +2414-159411-0003-2656: ref=['I', 'WILL', 'NEVER', 'BE', 'SO', 'UNGRATEFUL', 'ONLY', 'LET', 'ME', 'OUT', 'THAT', 'I', 'MAY', 'DRINK', 'SOME', 'WATER', 'AND', 'RETURN'] +2414-159411-0003-2656: hyp=['I', 'WILL', 'NEVER', 'BE', 'SO', 'UNGRATEFUL', 'ONLY', 'LET', 'ME', 'OUT', 'THAT', 'I', 'MAY', 'BRING', 'SOME', 'WATER', 'AND', 'RETURN'] +2414-159411-0004-2657: ref=['THEN', 'THE', 'BRAHMAN', 'TOOK', 'PITY', 'ON', 'HIM', 'AND', 'OPENED', 'THE', 'CAGE', 'DOOR', 'BUT', 'NO', 'SOONER', 'HAD', 'HE', 'DONE', 'SO', 'THAN', 'THE', 'TIGER', 'JUMPING', 'OUT', 'SAID', 'NOW', 'I', 'WILL', 'EAT', 'YOU', 'FIRST', 'AND', 'DRINK', 'THE', 'WATER', 'AFTERWARDS'] +2414-159411-0004-2657: hyp=['AND', 'IN', 'THE', 'BRAM', 'INTO', 'PITY', 'ON', 'HIM', 'AND', 'OPENED', 'THE', 'CAGE', 'DOOR', 'BUT', 'NO', 'SOONER', 'HAD', 'HE', 'TURNED', 'SO', 'THAN', 'THE', 'TIGER', 'JUMPING', 'OUT', 'SAID', 'NOW', 'I', 'WILL', 'EAT', 'YOU', 'FIRST', 'AND', 'DRINK', 'THE', 'WATER', 'AFTERWARDS'] +2414-159411-0005-2658: ref=['SO', 'THE', 'BRAHMAN', 'AND', 'THE', 'TIGER', 'WALKED', 'ON', 'TILL', 'THEY', 'CAME', 'TO', 'A', 'BANYAN', 'TREE', 'AND', 'THE', 'BRAHMAN', 'SAID', 'TO', 'IT', 'BANYAN', 'TREE', 'BANYAN', 'TREE', 'HEAR', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0005-2658: hyp=['SO', 'THE', 'BRAMID', 'AND', 'THE', 'TIGER', 'WALKED', 'ON', 'TILL', 'THEY', 'CAME', 'TO', 'A', 'BANDON', 'TREE', 'AND', 'THE', 'BRAMMEN', 'SAID', 'TO', 'IT', 'BANION', 'TREE', 'BANNING', 'TREE', 'HERE', 'AND', 'GIVE', 'JOINTMENT'] +2414-159411-0006-2659: ref=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'BANYAN', 'TREE'] +2414-159411-0006-2659: hyp=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'BEN', 'TREE'] +2414-159411-0007-2660: ref=['THIS', 'TIGER', 'SAID', 'THE', 'BRAHMAN', 'BEGGED', 'ME', 'TO', 'LET', 'HIM', 'OUT', 'OF', 'HIS', 'CAGE', 'TO', 'DRINK', 'A', 'LITTLE', 'WATER', 'AND', 'HE', 'PROMISED', 'NOT', 'TO', 'HURT', 'ME', 'IF', 'I', 'DID', 'SO', 'BUT', 'NOW', 'THAT', 'I', 'HAVE', 'LET', 'HIM', 'OUT', 'HE', 'WISHES', 'TO', 'EAT', 'ME'] +2414-159411-0007-2660: hyp=['DISTAGGER', 'SAID', 'DEBRAMIN', 'BEGGED', 'ME', 'TO', 'LET', 'HIM', 'OUT', 'OF', 'HIS', 'CAGE', 'TO', 'DRINK', 'A', 'LITTLE', 'WATER', 'AND', 'HE', 'PROMISED', 'NOT', 'TO', 'HIDE', 'ME', 'IF', 'I', 'DID', 'SO', 'BUT', 'NOW', 'THAT', 'I', 'HAVE', 'LEFT', 'HIM', 'OUT', 'HE', 'WISHES', 'TO', 'EAT', 'ME'] +2414-159411-0008-2661: ref=['IS', 'IT', 'JUST', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NO'] +2414-159411-0008-2661: hyp=['IT', 'IS', 'JEALOUS', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'I', 'KNOW'] +2414-159411-0009-2662: ref=['LET', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'ARE', 'AN', 'UNGRATEFUL', 'RACE'] +2414-159411-0009-2662: hyp=['LATE', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'ARE', 'IN', 'UNGRATEFUL', 'RACE'] +2414-159411-0010-2663: ref=['SIR', 'CAMEL', 'SIR', 'CAMEL', 'CRIED', 'THE', 'BRAHMAN', 'HEAR', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0010-2663: hyp=['SO', 'CAMEL', 'SIR', 'CAMEL', 'CRIED', 'THE', 'BRAMIN', 'HERE', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0011-2664: ref=['AT', 'A', 'LITTLE', 'DISTANCE', 'THEY', 'FOUND', 'A', 'BULLOCK', 'LYING', 'BY', 'THE', 'ROADSIDE'] +2414-159411-0011-2664: hyp=['AT', 'A', 'LITTLE', 'DISTANCE', 'THEY', 'FOUND', 'A', 'BULLOCK', 'LYING', 'BY', 'THE', 'ROADSIDE'] +2414-159411-0012-2665: ref=['IS', 'IT', 'FAIR', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NOT'] +2414-159411-0012-2665: hyp=['IS', 'IT', 'FAIR', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NOT'] +2414-159411-0013-2666: ref=['LET', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'HAVE', 'NO', 'PITY'] +2414-159411-0013-2666: hyp=['LATER', 'TIRE', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'HAVE', 'NO', 'PITY'] +2414-159411-0014-2667: ref=['THREE', 'OUT', 'OF', 'THE', 'SIX', 'HAD', 'GIVEN', 'JUDGMENT', 'AGAINST', 'THE', 'BRAHMAN', 'BUT', 'STILL', 'HE', 'DID', 'NOT', 'LOSE', 'ALL', 'HOPE', 'AND', 'DETERMINED', 'TO', 'ASK', 'THE', 'OTHER', 'THREE'] +2414-159411-0014-2667: hyp=['THREE', 'OUT', 'OF', 'THE', 'SIX', 'IN', 'GIVING', 'JUDGMENT', 'OF', 'EUSTA', 'BRAMIN', 'WHICH', 'STILL', 'HE', 'DID', 'NOT', 'LOSE', 'ALL', 'HOPE', 'AND', 'TURN', 'MIND', 'TO', 'ASK', 'THE', 'OTHER', 'THREE'] +2414-159411-0015-2668: ref=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'EAGLE'] +2414-159411-0015-2668: hyp=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'YOU', 'TENANT', 'ASKED', 'THE', 'EAGLE'] +2414-159411-0016-2669: ref=['THE', 'BRAHMAN', 'STATED', 'THE', 'CASE', 'AND', 'THE', 'EAGLE', 'ANSWERED', 'WHENEVER', 'MEN', 'SEE', 'ME', 'THEY', 'TRY', 'TO', 'SHOOT', 'ME', 'THEY', 'CLIMB', 'THE', 'ROCKS', 'AND', 'STEAL', 'AWAY', 'MY', 'LITTLE', 'ONES'] +2414-159411-0016-2669: hyp=['THE', 'BRAM', 'IS', 'SUITED', 'THE', 'CASE', 'AND', 'THE', 'EAGLE', 'ANSWERED', 'WHENEVER', 'MEN', 'SEE', 'ME', 'THEY', 'TRY', 'TO', 'SHOOT', 'ME', 'DECLINE', 'THE', 'ROCKS', 'AND', 'STEAL', 'AWAY', 'MY', 'LITTLE', 'ONES'] +2414-159411-0017-2670: ref=['THEN', 'THE', 'TIGER', 'BEGAN', 'TO', 'ROAR', 'AND', 'SAID', 'THE', 'JUDGMENT', 'OF', 'ALL', 'IS', 'AGAINST', 'YOU', 'O', 'BRAHMAN'] +2414-159411-0017-2670: hyp=['THEN', 'THE', 'TIGER', 'BEGAN', 'TO', 'ROAR', 'AND', 'SAID', 'JUDGMENT', 'OF', 'ALL', 'IS', 'AGAINST', 'YOU', 'O', 'BRAHMIN'] +2414-159411-0018-2671: ref=['AFTER', 'THIS', 'THEY', 'SAW', 'AN', 'ALLIGATOR', 'AND', 'THE', 'BRAHMAN', 'RELATED', 'THE', 'MATTER', 'TO', 'HIM', 'HOPING', 'FOR', 'A', 'MORE', 'FAVORABLE', 'VERDICT'] +2414-159411-0018-2671: hyp=['AFTER', 'THIS', 'THEY', 'SAW', 'AN', 'ALLEGATOR', 'AND', 'THE', 'BRAMMER', 'RELATED', 'THE', 'MATTER', 'TO', 'HIM', 'HOPING', 'FOR', 'A', 'MORE', 'FAVORABLE', 'VERDICT'] +2414-159411-0019-2672: ref=['BUT', 'THE', 'ALLIGATOR', 'SAID', 'WHENEVER', 'I', 'PUT', 'MY', 'NOSE', 'OUT', 'OF', 'THE', 'WATER', 'MEN', 'TORMENT', 'ME', 'AND', 'TRY', 'TO', 'KILL', 'ME'] +2414-159411-0019-2672: hyp=['WITH', 'THE', 'ADDURE', 'TO', 'THE', 'SIT', 'WHENEVER', 'A', 'PUT', 'MY', 'NOSE', 'OUT', 'OF', 'THE', 'WATER', 'MAYNUM', 'AND', 'ME', 'AND', 'TRIED', 'TO', 'KILL', 'ME'] +2414-159411-0020-2673: ref=['THE', 'BRAHMAN', 'GAVE', 'HIMSELF', 'UP', 'AS', 'LOST', 'BUT', 'AGAIN', 'HE', 'PRAYED', 'THE', 'TIGER', 'TO', 'HAVE', 'PATIENCE', 'AND', 'LET', 'HIM', 'ASK', 'THE', 'OPINION', 'OF', 'THE', 'SIXTH', 'JUDGE'] +2414-159411-0020-2673: hyp=['NO', 'GRAMMEN', 'GAVE', 'HIMSELF', 'UP', 'AS', 'LOST', 'BUT', 'AGAIN', 'HE', 'PRAYED', 'THE', 'TIGER', 'TO', 'HAVE', 'PATIENCE', 'AND', 'LET', 'HIM', 'ASK', 'THE', 'OPINION', 'OF', 'THE', 'SIXTH', 'JUDGE'] +2414-159411-0021-2674: ref=['NOW', 'THE', 'SIXTH', 'WAS', 'A', 'JACKAL'] +2414-159411-0021-2674: hyp=['BY', 'THE', 'SIXTH', 'WAS', 'A', 'JACKAL'] +2414-159411-0022-2675: ref=['THE', 'BRAHMAN', 'TOLD', 'HIS', 'STORY', 'AND', 'SAID', 'TO', 'HIM', 'UNCLE', 'JACKAL', 'UNCLE', 'JACKAL', 'SAY', 'WHAT', 'IS', 'YOUR', 'JUDGMENT'] +2414-159411-0022-2675: hyp=['THE', 'GRAMMER', 'TOLD', 'HIS', 'STORY', 'AND', 'SAID', 'TO', 'HIM', 'UNCLE', 'JACK', 'HO', 'AND', 'WILL', 'JACK', 'HO', 'SAY', 'WHAT', 'IS', 'YOUR', 'TEM', 'IT'] +2414-159411-0023-2676: ref=['SHOW', 'ME', 'THE', 'PLACE'] +2414-159411-0023-2676: hyp=['SHOW', 'ME', 'THE', 'PACE'] +2414-159411-0024-2677: ref=['WHEN', 'THEY', 'GOT', 'THERE', 'THE', 'JACKAL', 'SAID', 'NOW', 'BRAHMAN', 'SHOW', 'ME', 'EXACTLY', 'WHERE', 'YOU', 'STOOD'] +2414-159411-0024-2677: hyp=['AND', 'THE', 'COURT', 'THERE', 'THE', 'JACKAL', 'SAID', 'NABRAMAN', 'SHOW', 'ME', 'EXACTLY', 'WHERE', 'YOU', 'STOOD'] +2414-159411-0025-2678: ref=['EXACTLY', 'THERE', 'WAS', 'IT', 'ASKED', 'THE', 'JACKAL'] +2414-159411-0025-2678: hyp=['EXACTLY', 'THERE', 'WAS', 'IT', 'ASKED', 'JACK', 'HO'] +2414-159411-0026-2679: ref=['EXACTLY', 'HERE', 'REPLIED', 'THE', 'BRAHMAN'] +2414-159411-0026-2679: hyp=['EXACTLY', 'HERE', 'REPLIED', 'THE', 'PROMIN'] +2414-159411-0027-2680: ref=['WHERE', 'WAS', 'THE', 'TIGER', 'THEN'] +2414-159411-0027-2680: hyp=['THERE', 'WAS', 'THE', 'TIGER', 'THEN'] +2414-159411-0028-2681: ref=['WHY', 'I', 'STOOD', 'SO', 'SAID', 'THE', 'TIGER', 'JUMPING', 'INTO', 'THE', 'CAGE', 'AND', 'MY', 'HEAD', 'WAS', 'ON', 'THIS', 'SIDE'] +2414-159411-0028-2681: hyp=['WHY', 'I', 'STOOD', 'SO', 'SAID', 'THE', 'DRAGGER', 'JUMPING', 'INTO', 'THE', 'CAGE', 'AND', 'MY', 'HEAD', 'WAS', 'ON', 'THIS', 'SIDE'] +2414-159411-0029-2682: ref=['VERY', 'GOOD', 'SAID', 'THE', 'JACKAL', 'BUT', 'I', 'CANNOT', 'JUDGE', 'WITHOUT', 'UNDERSTANDING', 'THE', 'WHOLE', 'MATTER', 'EXACTLY'] +2414-159411-0029-2682: hyp=['VERY', 'GOOD', 'SAID', 'THE', 'JACK', 'HOPE', 'BUT', 'I', 'CANNOT', 'JUDGE', 'WITHOUT', 'UNDERSTANDING', 'THE', 'WHOLE', 'MATTER', 'EXACTLY'] +2414-159411-0030-2683: ref=['SHUT', 'AND', 'BOLTED', 'SAID', 'THE', 'BRAHMAN'] +2414-159411-0030-2683: hyp=['SHED', 'AND', 'BOLTED', 'SAID', 'DEBRAMIN'] +2414-159411-0031-2684: ref=['THEN', 'SHUT', 'AND', 'BOLT', 'IT', 'SAID', 'THE', 'JACKAL'] +2414-159411-0031-2684: hyp=['VEN', 'SHIRT', 'AND', 'BOLTED', 'SAID', 'TO', 'JACKAL'] +2414-159411-0032-2685: ref=['WHEN', 'THE', 'BRAHMAN', 'HAD', 'DONE', 'THIS', 'THE', 'JACKAL', 'SAID', 'OH', 'YOU', 'WICKED', 'AND', 'UNGRATEFUL', 'TIGER'] +2414-159411-0032-2685: hyp=['WHEN', 'THE', 'BRAMIN', 'HAD', 'TURNED', 'THIS', 'THE', 'JACKAL', 'SAID', 'OH', 'YOU', 'WICKED', 'AND', 'UNGRATEFUL', 'TYER'] +2414-159411-0033-2686: ref=['WHEN', 'THE', 'GOOD', 'BRAHMAN', 'OPENED', 'YOUR', 'CAGE', 'DOOR', 'IS', 'TO', 'EAT', 'HIM', 'THE', 'ONLY', 'RETURN', 'YOU', 'WOULD', 'MAKE'] +2414-159411-0033-2686: hyp=['WITH', 'A', 'GOOD', 'BRAHMAN', 'OPENED', 'YOU', 'CARED', 'DOOR', 'IS', 'TO', 'EAT', 'HIM', 'THE', 'ONLY', 'RETURN', 'HE', 'WOULD', 'MAKE'] +2414-159411-0034-2687: ref=['PROCEED', 'ON', 'YOUR', 'JOURNEY', 'FRIEND', 'BRAHMAN'] +2414-159411-0034-2687: hyp=['PROCEED', 'ON', 'YOUR', 'JOURNEY', 'FRANJAMIN'] +2414-159411-0035-2688: ref=['YOUR', 'ROAD', 'LIES', 'THAT', 'WAY', 'AND', 'MINE', 'THIS'] +2414-159411-0035-2688: hyp=['HE', 'RULED', 'LIES', 'THAT', 'WAY', 'MIND', 'THIS'] +2414-165385-0000-2651: ref=['THUS', 'ACCOMPLISHED', 'HE', 'EXCITED', 'THE', 'ADMIRATION', 'OF', 'EVERY', 'SILLY', 'COQUETTE', 'AND', 'THE', 'ENVY', 'OF', 'EVERY', 'FLUTTERING', 'COXCOMB', 'BUT', 'BY', 'ALL', 'YOUNG', 'GENTLEMEN', 'AND', 'LADIES', 'OF', 'UNDERSTANDING', 'HE', 'WAS', 'HEARTILY', 'DESPISED', 'AS', 'A', 'MERE', 'CIVILIZED', 'MONKEY'] +2414-165385-0000-2651: hyp=["THERE'S", 'ACCOMPLISHED', 'EXCITED', 'ADMIRATION', 'OF', 'EVERY', 'SILLY', 'POCKET', 'AND', 'THE', 'ENVY', 'OF', 'EVERY', 'REFLECTING', 'ACCOUNT', 'BUT', 'BY', 'ALL', 'YOUNG', 'GENTLEMEN', 'AND', 'LADIES', 'OF', 'UNDERSTANDING', 'HE', 'WAS', 'HEARTILY', 'DESPISED', 'AS', 'A', 'MERE', 'CIVILIZED', 'MONKEY'] +2414-165385-0001-2652: ref=['THAT', 'HIS', 'SOUL', 'MIGHT', 'AFTERWARDS', 'OCCUPY', 'SUCH', 'A', 'STATION', 'AS', 'WOULD', 'BE', 'MOST', 'SUITABLE', 'TO', 'HIS', 'CHARACTER', 'IT', 'WAS', 'SENTENCED', 'TO', 'INHABIT', 'THE', 'BODY', 'OF', 'THAT', 'FINICAL', 'GRINNING', 'AND', 'MISCHIEVOUS', 'LITTLE', 'MIMICK', 'WITH', 'FOUR', 'LEGS', 'WHICH', 'YOU', 'NOW', 'BEHOLD', 'BEFORE', 'YOU'] +2414-165385-0001-2652: hyp=['THAT', 'HIS', 'SOUL', 'MIGHT', 'AFTERWARDS', 'OCCUPY', 'SUCH', 'A', 'STATION', 'AS', 'WOULD', 'BE', 'MOST', 'SUITABLE', 'TO', 'HIS', 'CHARACTER', 'IT', 'WAS', 'INTENSE', 'TO', 'INHABIT', 'A', 'BODY', 'OF', 'THAT', 'PHYNICAL', 'GRINNING', 'AND', 'MACHINEROUS', 'LITTLE', 'MIMIC', 'WITH', 'FULL', 'LEGS', 'WHICH', 'SHE', 'NOW', 'BEHOLD', 'BEFORE', 'YOU'] +2609-156975-0000-2367: ref=['THEN', 'MOSES', 'WAS', 'AFRAID', 'AND', 'SAID', 'SURELY', 'THE', 'THING', 'IS', 'KNOWN'] +2609-156975-0000-2367: hyp=['THEN', 'MOSES', 'WAS', 'AFRAID', 'AND', 'SAID', 'SURELY', 'THE', 'THING', 'IS', 'KNOWN'] +2609-156975-0001-2368: ref=['HOLD', 'ON', 'HOLD', 'FAST', 'HOLD', 'OUT', 'PATIENCE', 'IS', 'GENIUS'] +2609-156975-0001-2368: hyp=['OR', 'ON', 'HER', 'FAST', 'HODE', 'PATENTS', 'AS', 'GENIUS'] +2609-156975-0002-2369: ref=['LET', 'US', 'HAVE', 'FAITH', 'THAT', 'RIGHT', 'MAKES', 'MIGHT', 'AND', 'IN', 'THAT', 'FAITH', 'LET', 'US', 'DARE', 'TO', 'DO', 'OUR', 'DUTY', 'AS', 'WE', 'UNDERSTAND', 'IT', 'LINCOLN'] +2609-156975-0002-2369: hyp=['LET', 'US', 'HAVE', 'FAITH', 'THAT', 'RIGHT', 'MATRON', 'MIGHT', 'AND', 'IN', 'THAT', 'FAITH', 'LET', 'STARED', 'TO', 'DO', 'OUR', 'DUTY', 'IF', 'WE', 'UNDERSTAND', 'IT', 'LINCOLN'] +2609-156975-0003-2370: ref=['THE', 'EGYPTIAN', 'BACKGROUND', 'OF', 'THE', 'BONDAGE'] +2609-156975-0003-2370: hyp=['THE', 'EGYPTIAN', 'BACKGROUND', 'OF', 'THE', 'BONDAGE'] +2609-156975-0004-2371: ref=['EVERY', 'ONE', 'WHO', 'IS', 'TURBULENT', 'HAS', 'BEEN', 'FOUND', 'BY', 'KING', 'MERNEPTAH', 'THE', 'TESTIMONY', 'OF', 'THE', 'OLDEST', 'BIBLICAL', 'NARRATIVES', 'REGARDING', 'THE', 'SOJOURN', 'OF', 'THE', 'HEBREWS', 'IN', 'EGYPT', 'IS', 'ALSO', 'IN', 'PERFECT', 'ACCORD', 'WITH', 'THE', 'PICTURE', 'WHICH', 'THE', 'CONTEMPORARY', 'EGYPTIAN', 'INSCRIPTIONS', 'GIVE', 'OF', 'THE', 'PERIOD'] +2609-156975-0004-2371: hyp=['EVERY', 'ONE', 'WHO', 'IS', 'TREBRANT', 'HAS', 'BEEN', 'FOUND', 'BY', 'GIMER', 'PATH', 'THE', 'TESTIMONY', 'OF', 'THE', 'OLDEST', 'PABRICAL', 'NARRATIVE', 'REGARDING', 'THE', 'SOJOURN', 'OF', 'THE', 'HEBREWS', 'IN', 'EGYPT', 'HIS', 'ALSO', 'IN', 'PERFECT', 'ACCORD', 'WITH', 'THE', 'PITCHER', 'WHICH', 'IT', 'CONTEMPORARY', 'EGYPTIAN', 'INSCRIPTIONS', 'GIVE', 'THIS', 'PERIOD'] +2609-156975-0005-2372: ref=['THE', 'ABSENCE', 'OF', 'DETAILED', 'REFERENCE', 'TO', 'THE', 'HEBREWS', 'IS', 'THEREFORE', 'PERFECTLY', 'NATURAL'] +2609-156975-0005-2372: hyp=['THE', 'ABSENCE', 'OF', 'DETAILED', 'REFUCER', 'THE', 'HEBREWS', 'IS', 'THEREFORE', 'PERFECTLY', 'NATURAL'] +2609-156975-0006-2373: ref=['IT', 'SEEMS', 'PROBABLE', 'THAT', 'NOT', 'ALL', 'BUT', 'ONLY', 'PART', 'OF', 'THE', 'TRIBES', 'WHICH', 'ULTIMATELY', 'COALESCED', 'INTO', 'THE', 'HEBREW', 'NATION', 'FOUND', 'THEIR', 'WAY', 'TO', 'EGYPT'] +2609-156975-0006-2373: hyp=['IT', 'SEEMS', 'PROBABLE', 'THAT', 'NOT', 'ALL', 'BUT', 'ONLY', 'PART', 'THE', 'TRIBES', 'WHICH', 'ULTIMATE', 'COLLETS', 'INTO', 'THE', 'HEBREW', 'NATION', 'FOUND', 'THEIR', 'WAY', 'TO', 'EGYPT'] +2609-156975-0007-2374: ref=['THE', 'STORIES', 'REGARDING', 'JOSEPH', 'THE', 'TRADITIONAL', 'FATHER', 'OF', 'EPHRAIM', 'AND', 'MANASSEH', 'IMPLY', 'THAT', 'THESE', 'STRONG', 'CENTRAL', 'TRIBES', 'POSSIBLY', 'TOGETHER', 'WITH', 'THE', 'SOUTHERN', 'TRIBES', 'OF', 'BENJAMIN', 'AND', 'JUDAH', 'WERE', 'THE', 'CHIEF', 'ACTORS', 'IN', 'THIS', 'OPENING', 'SCENE', 'IN', "ISRAEL'S", 'HISTORY'] +2609-156975-0007-2374: hyp=['THE', 'STORIES', 'REGARDING', 'JOSEPH', 'THEIR', 'TRADITIONAL', 'FOUNDER', 'THAT', 'FROM', 'IN', 'MANETTE', 'SE', 'INCLINE', 'THAT', 'THE', 'STRONG', 'CENTRAL', 'TRIBES', 'POSSIBLY', 'TOGETHER', 'WITH', 'A', 'SOUTHERN', 'TRINES', 'OF', 'BENJAMIN', 'AND', 'JUDAH', 'WERE', 'THE', 'CHIEF', 'ACTORS', 'OPENING', 'SCENE', 'IN', "ISRA'S"] +2609-156975-0008-2375: ref=['THE', 'BIBLICAL', 'NARRATIVES', 'APPARENTLY', 'DISAGREE', 'REGARDING', 'THE', 'DURATION', 'OF', 'THE', 'SOJOURN', 'IN', 'EGYPT'] +2609-156975-0008-2375: hyp=['THE', 'BIBOCO', 'NARRATIVES', 'APPARENTLY', 'DISAGREED', 'GUARDING', 'THE', 'DURATION', 'OF', 'THE', 'SOJOURN', 'IN', 'EGYPT'] +2609-156975-0009-2376: ref=['THE', 'LATER', 'TRADITIONS', 'TEND', 'TO', 'EXTEND', 'THE', 'PERIOD'] +2609-156975-0009-2376: hyp=['THE', 'LATER', 'JUDICINES', 'INTEREST', 'IN', 'THE', 'PERIOD'] +2609-156975-0010-2377: ref=['HERE', 'WERE', 'FOUND', 'SEVERAL', 'INSCRIPTIONS', 'BEARING', 'THE', 'EGYPTIAN', 'NAME', 'OF', 'THE', 'CITY', 'P', 'ATUM', 'HOUSE', 'OF', 'THE', 'GOD', 'ATUM'] +2609-156975-0010-2377: hyp=['YOU', 'WERE', 'FOUND', 'SEVEREIGN', 'SCRIPS', 'AND', 'SPARED', 'THE', 'EGYPTIAN', 'NAME', 'OF', 'THE', 'CITY', 'PATUM', 'OUTS', 'OF', 'THE', 'GOD', 'ATOM'] +2609-156975-0011-2378: ref=['A', 'CONTEMPORARY', 'INSCRIPTION', 'ALSO', 'STATES', 'THAT', 'HE', 'FOUNDED', 'NEAR', 'PITHUM', 'THE', 'HOUSE', 'OF', 'RAMSES', 'A', 'CITY', 'WITH', 'A', 'ROYAL', 'RESIDENCE', 'AND', 'TEMPLES'] +2609-156975-0011-2378: hyp=['A', 'CONTEMPORARY', 'INSCRIPTION', 'OUT', 'THE', 'STATES', 'THAT', 'HE', 'FOUND', 'A', 'NEAR', 'PITTHAM', 'THE', 'HOUSE', 'OF', 'RANSES', 'A', 'CITY', 'WITH', 'THE', 'ROYAL', 'RESIDENCE', 'IN', 'TEMPLES'] +2609-156975-0012-2379: ref=['THAT', 'THE', 'HEBREWS', 'WERE', 'RESTIVE', 'UNDER', 'THIS', 'TYRANNY', 'WAS', 'NATURAL', 'INEVITABLE'] +2609-156975-0012-2379: hyp=['THAT', 'THE', 'HEBREWS', 'WERE', 'WRETS', 'OF', 'UNDER', 'THE', 'STERN', 'WAS', 'NATURALLY', 'INEVITABLE'] +2609-156975-0013-2380: ref=['WAS', 'ANY', 'OTHER', 'PROCEDURE', 'TO', 'BE', 'EXPECTED', 'FROM', 'A', 'DESPOTIC', 'RULER', 'OF', 'THAT', 'LAND', 'AND', 'DAY'] +2609-156975-0013-2380: hyp=['WAS', 'ANY', 'OTHER', 'PROCEDURE', 'TO', 'BE', 'INSPECTRE', 'FROM', 'IT', 'THAT', 'SPOTIC', 'ROAR', 'OF', 'THAT', 'LAND', 'AND', 'DAY'] +2609-156975-0014-2381: ref=['THE', 'MAKING', 'OF', 'A', 'LOYAL', 'PATRIOT'] +2609-156975-0014-2381: hyp=['THE', 'MAKING', 'OF', 'THE', 'LOYAL', 'PATRIOT'] +2609-156975-0015-2382: ref=['THE', 'STORY', 'OF', 'MOSES', 'BIRTH', 'AND', 'EARLY', 'CHILDHOOD', 'IS', 'ONE', 'OF', 'THE', 'MOST', 'INTERESTING', 'CHAPTERS', 'IN', 'BIBLICAL', 'HISTORY'] +2609-156975-0015-2382: hyp=['THE', 'STORY', 'OF', 'MOSES', 'BIRTH', 'AN', 'EARLY', 'CHILDHOOD', 'IS', 'ONE', 'OF', 'THE', 'MOST', 'INTERESTING', 'CHAPTERS', 'IN', 'BIBLICAL', 'HISTORY'] +2609-156975-0016-2383: ref=['WAS', 'MOSES', 'JUSTIFIED', 'IN', 'RESISTING', 'THE', 'EGYPTIAN', 'TASKMASTER'] +2609-156975-0016-2383: hyp=["WE'S", 'MOVES', "IT'S", 'JEST', 'FUN', 'AND', 'RESISTIN', 'DE', 'GYPTIAN', 'TAX', 'MASTER'] +2609-156975-0017-2384: ref=['IS', 'PEONAGE', 'ALWAYS', 'DISASTROUS', 'NOT', 'ONLY', 'TO', 'ITS', 'VICTIMS', 'BUT', 'ALSO', 'TO', 'THE', 'GOVERNMENT', 'IMPOSING', 'IT'] +2609-156975-0017-2384: hyp=['HIS', 'PINIONS', 'ALWAYS', 'DISASTRATES', 'NOT', 'OWING', 'TO', 'ITS', 'VICTIMS', 'BUT', 'ALSO', 'TO', 'THE', 'GOVERNMENT', 'IMPOSING', 'IT'] +2609-156975-0018-2385: ref=['NATURALLY', 'HE', 'WENT', 'TO', 'THE', 'LAND', 'OF', 'MIDIAN'] +2609-156975-0018-2385: hyp=['NATURALLY', 'HE', 'WENT', 'TO', 'THE', 'LAND', 'A', 'MILLION'] +2609-156975-0019-2386: ref=['THE', 'WILDERNESS', 'TO', 'THE', 'EAST', 'OF', 'EGYPT', 'HAD', 'FOR', 'CENTURIES', 'BEEN', 'THE', 'PLACE', 'OF', 'REFUGE', 'FOR', 'EGYPTIAN', 'FUGITIVES'] +2609-156975-0019-2386: hyp=['THE', 'WILDERNESS', 'TO', 'THE', 'EAST', 'OF', 'EGYPT', 'AND', 'FOR', 'CENTURIES', 'BEEN', 'THE', 'PLATE', 'OF', 'REPUGUE', 'EGYPTIAN', 'FUGITIVES'] +2609-156975-0020-2387: ref=['FROM', 'ABOUT', 'TWO', 'THOUSAND', 'B', 'C'] +2609-156975-0020-2387: hyp=['FROM', 'ABOUT', 'TWO', 'THOUSAND', 'C', 'B'] +2609-156975-0021-2388: ref=['ON', 'THE', 'BORDERS', 'OF', 'THE', 'WILDERNESS', 'HE', 'FOUND', 'CERTAIN', 'BEDOUIN', 'HERDSMEN', 'WHO', 'RECEIVED', 'HIM', 'HOSPITABLY'] +2609-156975-0021-2388: hyp=['ON', 'THE', 'BORDERS', 'OF', 'THE', 'WILDERNESS', 'HE', 'FOUND', 'CERTAIN', 'BEDOUIN', 'HERDSMAN', 'WHO', 'RECEIVED', 'HIM', 'HALF', 'SPITABLY'] +2609-156975-0022-2389: ref=['THESE', 'SAND', 'WANDERERS', 'SENT', 'HIM', 'ON', 'FROM', 'TRIBE', 'TO', 'TRIBE', 'UNTIL', 'HE', 'REACHED', 'THE', 'LAND', 'OF', 'KEDEM', 'EAST', 'OF', 'THE', 'DEAD', 'SEA', 'WHERE', 'HE', 'REMAINED', 'FOR', 'A', 'YEAR', 'AND', 'A', 'HALF'] +2609-156975-0022-2389: hyp=['THESE', 'SAN', 'JUANS', 'SENT', 'HIM', 'ON', 'FROM', 'TIME', 'TO', 'TRIBE', 'UNTIL', 'HE', 'REACHED', 'THE', 'LAND', 'OF', 'KIDDAM', 'EACH', 'OF', 'THE', 'DEAD', 'SEA', 'WHERE', 'HE', 'REMAINED', 'FOR', 'YEAR', 'AND', 'A', 'HALF'] +2609-156975-0023-2390: ref=['LATER', 'HE', 'FOUND', 'HIS', 'WAY', 'TO', 'THE', 'COURT', 'OF', 'ONE', 'OF', 'THE', 'LOCAL', 'KINGS', 'IN', 'CENTRAL', 'PALESTINE', 'WHERE', 'HE', 'MARRIED', 'AND', 'BECAME', 'IN', 'TIME', 'A', 'PROSPEROUS', 'LOCAL', 'PRINCE'] +2609-156975-0023-2390: hyp=['LATER', 'HE', 'FOUND', 'HIS', 'WAY', 'TO', 'THE', 'COURT', 'OF', 'ONE', 'OF', 'THE', 'LOCAL', 'KINGS', 'AND', 'CENTRAL', 'PALESTINE', 'WHERE', 'HE', 'MARRIED', 'AND', 'MICHANG', 'IN', 'THE', 'TIME', 'A', 'PROSPEROUS', 'LOCAL', 'PRINCE'] +2609-156975-0024-2391: ref=['THE', 'SCHOOL', 'OF', 'THE', 'WILDERNESS'] +2609-156975-0024-2391: hyp=['THE', 'SCHOOL', 'AS', 'THE', 'WEARINESS'] +2609-156975-0025-2392: ref=['THE', 'STORY', 'OF', 'MOSES', 'IS', 'IN', 'MANY', 'WAYS', 'CLOSELY', 'PARALLEL', 'TO', 'THAT', 'OF', 'SINUHIT'] +2609-156975-0025-2392: hyp=['THE', 'STORY', 'OF', 'MOSES', 'IS', 'IN', 'MANY', 'WAYS', 'CLOSELY', 'PARALLEL', 'DID', 'NOT', 'ASSUME', 'IT'] +2609-156975-0026-2393: ref=['THE', 'PRIEST', 'OF', 'THE', 'SUB', 'TRIBE', 'OF', 'THE', 'KENITES', 'RECEIVED', 'HIM', 'INTO', 'HIS', 'HOME', 'AND', 'GAVE', 'HIM', 'HIS', 'DAUGHTER', 'IN', 'MARRIAGE'] +2609-156975-0026-2393: hyp=['THE', 'PRIEST', 'THE', 'SUBTRINE', 'OF', 'THE', 'KENITES', 'RECEIVED', 'HIM', 'INTO', 'HIS', 'HOME', 'AND', 'GAVE', 'HIM', 'HIS', 'DAUGHTER', 'IN', 'MARRIAGE'] +2609-156975-0027-2394: ref=['NOTE', 'THE', 'CHARACTERISTIC', 'ORIENTAL', 'IDEA', 'OF', 'MARRIAGE'] +2609-156975-0027-2394: hyp=['NOTE', 'THE', 'CARE', 'OF', 'A', 'RESTAKE', 'ORIENTOUINE', 'OF', 'MARES'] +2609-156975-0028-2395: ref=['HERE', 'MOSES', 'LEARNED', 'THE', 'LESSONS', 'THAT', 'WERE', 'ESSENTIAL', 'FOR', 'HIS', 'TRAINING', 'AS', 'THE', 'LEADER', 'AND', 'DELIVERER', 'OF', 'HIS', 'PEOPLE'] +2609-156975-0028-2395: hyp=['HERE', 'MOSES', 'LEARNED', 'THAT', 'LESSONS', 'THAT', 'WERE', 'ESSENTIAL', 'FOR', 'HIS', 'TRAINED', 'IN', 'AS', 'A', 'LEADER', 'AND', 'DELIVERER', 'OF', 'HIS', 'PEOPLE'] +2609-156975-0029-2396: ref=['AFTER', 'THE', 'CAPTURE', 'OF', 'JERICHO', 'CERTAIN', 'OF', 'THEM', 'WENT', 'UP', 'WITH', 'THE', 'SOUTHERN', 'TRIBES', 'TO', 'CONQUER', 'SOUTHERN', 'PALESTINE'] +2609-156975-0029-2396: hyp=['AFTER', 'THE', 'CAPTURE', 'OF', 'JERICHO', 'CERTAIN', 'OF', 'THEM', 'WENT', 'UP', 'WITH', 'A', 'SUDDEN', 'TRIUMPHS', 'SHE', 'CONQUER', 'SOUTHERN', 'PALESTINE'] +2609-156975-0030-2397: ref=['MANY', 'MODERN', 'SCHOLARS', 'DRAW', 'THE', 'CONCLUSION', 'FROM', 'THE', 'BIBLICAL', 'NARRATIVE', 'THAT', 'IT', 'WAS', 'FROM', 'THE', 'KENITES', 'THAT', 'MOSES', 'FIRST', 'LEARNED', 'OF', 'YAHWEH', 'OR', 'AS', 'THE', 'DISTINCTIVE', 'NAME', 'OF', "ISRAEL'S", 'GOD', 'WAS', 'TRANSLATED', 'BY', 'LATER', 'JEWISH', 'SCRIBES', 'JEHOVAH'] +2609-156975-0030-2397: hyp=['MANY', 'MODERN', 'STARS', 'DRAWN', 'THE', 'CONCLUSION', 'FROM', 'THE', 'BIBLICAL', 'NARRATIVE', 'THAT', 'IT', 'WAS', 'FROM', 'THE', 'KENNITES', 'THAT', 'MOSES', 'FIRST', 'LEARNED', 'OF', 'YONWAY', 'OR', 'AS', 'THE', 'DISTINCTIVE', 'NAME', 'OF', "ISRAEL'S", 'GONE', 'WAS', 'TRANSLATED', 'BY', 'LATER', 'JEWISH', 'GRIBES', 'JEHOVAH'] +2609-156975-0031-2398: ref=['DO', 'THE', 'EARLIEST', 'HEBREW', 'TRADITIONS', 'IMPLY', 'THAT', 'THE', 'ANCESTORS', 'OF', 'THE', 'ISRAELITES', 'WERE', 'WORSHIPPERS', 'OF', 'JEHOVAH'] +2609-156975-0031-2398: hyp=['DO', 'THE', 'ARIOSABOO', 'TRADITIONS', 'IMPLY', 'THAT', 'INSECTORS', 'OF', 'THE', 'ISRAIT', 'WERE', 'WORSHIPPED', 'OF', 'JEHOVAH'] +2609-156975-0032-2399: ref=['THE', 'TITLE', 'OF', 'HIS', 'FATHER', 'IN', 'LAW', 'IMPLIES', 'THAT', 'THIS', 'PRIEST', 'MINISTERED', 'AT', 'SOME', 'WILDERNESS', 'SANCTUARY'] +2609-156975-0032-2399: hyp=['THE', 'TOWN', 'OF', 'HIS', 'FUND', 'AND', 'ALL', 'IMPLIES', 'AT', 'THIS', 'PREACH', 'MINISTERED', 'AT', 'SOME', 'LEARNAN', 'SANCTUARY'] +2609-156975-0033-2400: ref=['MOSES', 'IN', 'THE', 'HOME', 'OF', 'THE', 'MIDIAN', 'PRIEST', 'WAS', 'BROUGHT', 'INTO', 'DIRECT', 'AND', 'CONSTANT', 'CONTACT', 'WITH', 'THE', 'JEHOVAH', 'WORSHIP'] +2609-156975-0033-2400: hyp=['ROSES', 'IN', 'THE', 'HOME', 'OF', 'THE', 'MENDIAN', 'PRIESTS', 'WAS', 'BROUGHT', 'INTO', 'DIRECT', 'INCONSTANT', 'CONTACT', 'WITH', 'THE', 'JEHOVAH', 'WORSHIP'] +2609-156975-0034-2401: ref=['THE', 'CRUEL', 'FATE', 'OF', 'HIS', 'PEOPLE', 'AND', 'THE', 'PAINFUL', 'EXPERIENCE', 'IN', 'EGYPT', 'THAT', 'HAD', 'DRIVEN', 'HIM', 'INTO', 'THE', 'WILDERNESS', 'PREPARED', 'HIS', 'MIND', 'TO', 'RECEIVE', 'THIS', 'TRAINING'] +2609-156975-0034-2401: hyp=['THE', 'CRUEL', 'FATE', 'OF', 'THIS', 'PEOPLE', 'AND', 'THE', 'PAINFUL', 'EXPERIENCE', 'IN', 'EGYPT', 'THAT', 'HAD', 'DRIVEN', 'HIM', 'INTO', 'THE', 'WILDERNESS', 'PREPARED', 'HIS', 'MIND', 'TO', 'RECEIVE', 'THIS', 'TRAINING'] +2609-156975-0035-2402: ref=['HIS', 'QUEST', 'WAS', 'FOR', 'A', 'JUST', 'AND', 'STRONG', 'GOD', 'ABLE', 'TO', 'DELIVER', 'THE', 'OPPRESSED'] +2609-156975-0035-2402: hyp=['HIS', 'PRICE', 'WAS', 'FOR', 'JETS', 'AND', 'STRONG', 'ARM', 'ABLE', 'TO', 'DELIVER', 'THE', 'OPPRESSED'] +2609-156975-0036-2403: ref=['THE', 'WILDERNESS', 'WITH', 'ITS', 'LURKING', 'FOES', 'AND', 'THE', 'EVER', 'PRESENT', 'DREAD', 'OF', 'HUNGER', 'AND', 'THIRST', 'DEEPENED', 'HIS', 'SENSE', 'OF', 'NEED', 'AND', 'OF', 'DEPENDENCE', 'UPON', 'A', 'POWER', 'ABLE', 'TO', 'GUIDE', 'THE', 'DESTINIES', 'OF', 'MEN'] +2609-156975-0036-2403: hyp=['THE', 'WEDDING', 'IT', 'WITH', 'ITS', 'LURKING', 'FOES', 'AND', 'THE', 'EVER', 'PRESENT', 'DREAD', 'OF', 'HUNGER', 'AND', 'THIRST', 'DEEP', 'INTO', 'SENSE', 'OF', 'NEED', 'AND', 'OF', 'DEPENDENCE', 'UPON', 'A', 'POWER', 'ABLE', 'TO', 'GUIDE', 'THE', 'DEBTS', 'NEEDS', 'OF', 'MEN'] +2609-156975-0037-2404: ref=['THE', 'PEASANTS', 'OF', 'THE', 'VAST', 'ANTOLIAN', 'PLAIN', 'IN', 'CENTRAL', 'ASIA', 'MINOR', 'STILL', 'CALL', 'EVERY', 'LIFE', 'GIVING', 'SPRING', 'GOD', 'HATH', 'GIVEN'] +2609-156975-0037-2404: hyp=['THE', 'PEASANTS', 'OF', 'THE', 'VATS', 'INTONING', 'PLAIN', 'OF', 'CENTRAL', 'AS', 'A', 'MINOR', 'SO', 'CALL', 'EVERY', 'LIFE', 'GIVEN', 'SPRING', 'GOD', 'HATH', 'GIVEN'] +2609-156975-0038-2405: ref=['THE', 'CONSTANT', 'NECESSITY', 'OF', 'MEETING', 'THE', 'DANGERS', 'OF', 'THE', 'WILDERNESS', 'AND', 'OF', 'DEFENDING', 'THE', 'FLOCKS', 'ENTRUSTED', 'TO', 'MOSES', 'CARE', 'DEVELOPED', 'HIS', 'COURAGE', 'AND', 'POWER', 'OF', 'LEADERSHIP', 'AND', 'ACTION'] +2609-156975-0038-2405: hyp=['THEY', "CAN'T", 'SENT', 'THE', 'NECESSITY', 'A', 'MEETING', 'THE', 'DANGERS', 'OF', 'THE', 'WILDERNESS', 'AND', 'THE', 'DEFENDING', 'THE', 'FLOCKS', 'AND', 'TRITES', 'OF', 'JEMOSIS', 'CARE', 'DEVELOPED', 'HIS', 'COURAGE', 'AND', 'POWER', 'OF', 'LEISURESHIP', 'AND', 'ACTION'] +2609-157645-0000-2352: ref=['EVIDENTLY', 'THE', 'INTENTION', 'WAS', 'TO', 'MAKE', 'THINGS', 'PLEASANT', 'FOR', 'THE', 'ROYAL', 'FOE', 'OF', 'TOBACCO', 'DURING', 'HIS', 'VISIT'] +2609-157645-0000-2352: hyp=['EVIDENTLY', 'THE', 'INTENTION', 'WHICH', 'MADE', 'THINGS', 'PRESENT', 'FOR', 'THE', 'ROYAL', 'FOLK', 'A', 'TOBACCO', 'DURING', 'HIS', 'VISIT'] +2609-157645-0001-2353: ref=['THE', 'PROHIBITION', 'IN', 'THE', 'REGULATION', 'QUOTED', 'OF', 'SMOKING', 'IN', 'SAINT', "MARY'S", 'CHURCH', 'REFERRED', 'IT', 'MAY', 'BE', 'NOTED', 'TO', 'THE', 'ACT', 'WHICH', 'WAS', 'HELD', 'THEREIN'] +2609-157645-0001-2353: hyp=['THE', 'PROBES', 'AND', 'THE', 'REGULATING', 'QUOTED', 'HER', 'SMOKING', 'AND', 'SAINT', "MARY'S", 'CHURCH', 'REFERRED', 'MAY', 'BE', 'NOTED', 'TO', 'THE', 'ACT', 'WHICH', 'WAS', 'HELD', 'THEREIN'] +2609-157645-0002-2354: ref=['SOMETIMES', 'TOBACCO', 'WAS', 'USED', 'IN', 'CHURCH', 'FOR', 'DISINFECTING', 'OR', 'DEODORIZING', 'PURPOSES'] +2609-157645-0002-2354: hyp=['SOMETIMES', 'TOBACCO', 'IS', 'USED', 'IN', 'CHURCH', 'FOR', 'DISINFECT', 'AN', 'OLD', 'DEAL', 'ARISING', 'PURPOSES'] +2609-157645-0003-2355: ref=['BLACKBURN', 'ARCHBISHOP', 'OF', 'YORK', 'WAS', 'A', 'GREAT', 'SMOKER'] +2609-157645-0003-2355: hyp=['BLACKBIRD', 'ARCHBISH', 'OF', 'YORK', 'WAS', 'A', 'GREAT', 'SMOKER'] +2609-157645-0004-2356: ref=['ON', 'ONE', 'OCCASION', 'HE', 'WAS', 'AT', 'SAINT', "MARY'S", 'CHURCH', 'NOTTINGHAM', 'FOR', 'A', 'CONFIRMATION'] +2609-157645-0004-2356: hyp=['ON', 'ONE', 'OCCASION', 'HE', 'WAS', 'AT', 'SAINT', "MARY'S", 'CHURCH', 'NINE', 'IN', 'HAM', 'FOR', 'A', 'CONFIRMATESON'] +2609-157645-0005-2357: ref=['ANOTHER', 'EIGHTEENTH', 'CENTURY', 'CLERICAL', 'WORTHY', 'THE', 'FAMOUS', 'DOCTOR', 'PARR', 'AN', 'INVETERATE', 'SMOKER', 'WAS', 'ACCUSTOMED', 'TO', 'DO', 'WHAT', 'MISTER', 'DISNEY', 'PREVENTED', 'ARCHBISHOP', 'BLACKBURN', 'FROM', 'DOING', 'HE', 'SMOKED', 'IN', 'HIS', 'VESTRY', 'AT', 'HATTON'] +2609-157645-0005-2357: hyp=['ANOTHER', 'EIGHTEENTH', 'CENTURY', 'CLERICAL', 'WORTHY', 'THE', 'FAMOUS', 'DOCTRIPOIRE', 'AN', 'INVETERATE', 'SMOKER', 'WAS', 'ACCUSTOMED', 'TO', 'DO', 'AT', 'MIDSER', "DIDN'T", 'PREVENTED', 'ARCHBISH', 'OF', 'BLACKBURN', 'FROM', 'DOING', 'HE', 'SMOKED', 'IN', 'HIS', 'VETERY', 'AT', 'HATTON'] +2609-157645-0006-2358: ref=['PARR', 'WAS', 'SUCH', 'A', 'CONTINUAL', 'SMOKER', 'THAT', 'ANYONE', 'WHO', 'CAME', 'INTO', 'HIS', 'COMPANY', 'IF', 'HE', 'HAD', 'NEVER', 'SMOKED', 'BEFORE', 'HAD', 'TO', 'LEARN', 'THE', 'USE', 'OF', 'A', 'PIPE', 'AS', 'A', 'MEANS', 'OF', 'SELF', 'DEFENCE'] +2609-157645-0006-2358: hyp=['PAR', 'WITH', 'SUCH', 'A', 'CONTINUOUS', 'MOCHER', 'THAT', 'ANY', 'ONE', 'WHO', 'CAME', 'INTO', 'HIS', 'COMPANY', 'HE', 'HAD', 'NEVER', 'SMOKED', 'BEFORE', 'AND', 'TO', 'LEARNED', 'THE', 'USE', 'OF', 'A', 'PIPE', 'AS', 'A', 'MEANS', 'OF', 'SELF', 'DEFENCE'] +2609-157645-0007-2359: ref=['ONE', 'SUNDAY', 'SAYS', 'MISTER', 'DITCHFIELD', 'HE', 'HAD', 'AN', 'EXTRA', 'PIPE', 'AND', 'JOSHUA', 'THE', 'CLERK', 'TOLD', 'HIM', 'THAT', 'THE', 'PEOPLE', 'WERE', 'GETTING', 'IMPATIENT'] +2609-157645-0007-2359: hyp=['ONE', 'SUNDAY', 'SAYS', 'MISTER', 'DITZFIELD', 'HE', 'ENDING', 'THAT', 'SIR', 'PIPE', 'AND', 'JOHNSHAW', 'THE', 'CLERK', 'TOLD', 'HIM', 'THAT', 'THE', 'PEOPLE', 'WERE', 'GETTING', 'IMPATIENT'] +2609-157645-0008-2360: ref=['LET', 'THEM', 'SING', 'ANOTHER', 'PSALM', 'SAID', 'THE', 'CURATE'] +2609-157645-0008-2360: hyp=['THEM', 'TO', 'THEM', 'SING', 'AND', 'NOW', 'THE', 'PSALMS', 'SAKE', 'THE', 'CURATE'] +2609-157645-0009-2361: ref=['THEY', 'HAVE', 'SIR', 'REPLIED', 'THE', 'CLERK'] +2609-157645-0009-2361: hyp=['THEY', 'HAVE', 'SERVED', 'PARTLY', 'CLERK'] +2609-157645-0010-2362: ref=['THEN', 'LET', 'THEM', 'SING', 'THE', 'HUNDRED', 'AND', 'NINETEENTH', 'REPLIED', 'THE', 'CURATE'] +2609-157645-0010-2362: hyp=['THEN', 'LET', 'THEM', 'SING', 'THE', 'HUNDRED', 'AND', 'NINETEENTH', 'REPLIED', 'THE', 'CURATE'] +2609-157645-0011-2363: ref=['SIX', 'ARMS', 'THE', 'NEAREST', 'WITHIN', 'REACH', 'PRESENTED', 'WITH', 'AN', 'OBEDIENT', 'START', 'AS', 'MANY', 'TOBACCO', 'POUCHES', 'TO', 'THE', 'MAN', 'OF', 'OFFICE'] +2609-157645-0011-2363: hyp=['SIX', 'ARMS', 'THE', 'NOTCH', 'WITHIN', 'REACH', 'PRESENTED', 'WITH', 'AN', 'OBEDIENT', 'START', 'AND', 'AS', 'MANY', 'TOBACCO', 'PIUCHES', 'TO', 'THE', 'MEN', 'OF', 'OFFICE'] +2609-157645-0012-2364: ref=['DAVID', 'DEANS', 'HOWEVER', 'DID', 'NOT', 'AT', 'ALL', 'APPROVE', 'THIS', 'IRREVERENCE'] +2609-157645-0012-2364: hyp=['DAVID', 'DEAN', 'SAMBERT', 'DID', 'NOT', 'AT', 'ALL', 'IMPROVE', 'THIS', 'IRREVERENCE'] +2609-157645-0013-2365: ref=['GOING', 'TO', 'CHURCH', 'AT', 'HAYES', 'IN', 'THOSE', 'DAYS', 'MUST', 'HAVE', 'BEEN', 'QUITE', 'AN', 'EXCITING', 'EXPERIENCE'] +2609-157645-0013-2365: hyp=['GO', 'INTO', 'CHURCH', 'THAT', 'HATE', 'AND', 'THUS', 'DAYS', 'MISS', 'HAVE', 'BEEN', 'ACQUAINTED', 'AND', 'THE', 'SIGNING', 'SPIRITS'] +2609-157645-0014-2366: ref=['WHEN', 'THESE', 'MEN', 'IN', 'THE', 'COURSE', 'OF', 'MY', 'REMONSTRANCE', 'FOUND', 'THAT', 'I', 'WAS', 'NOT', 'GOING', 'TO', 'CONTINUE', 'THE', 'CUSTOM', 'THEY', 'NO', 'LONGER', 'CARED', 'TO', 'BE', 'COMMUNICANTS'] +2609-157645-0014-2366: hyp=['WHEN', 'THESE', 'MEN', 'IN', 'THE', 'COURSE', 'OF', 'MY', 'REMONSTRANCE', 'FOUND', 'OUT', 'THAT', 'WAS', 'NOT', 'GOING', 'TO', 'CONTINUE', 'THE', 'COTTOM', 'THEY', 'NO', 'LONGER', 'CARED', 'TO', 'BE', 'COMMUNICANTS'] +2609-169640-0000-2406: ref=['PROAS', 'IN', 'THAT', 'QUARTER', 'WERE', 'USUALLY', 'DISTRUSTED', 'BY', 'SHIPS', 'IT', 'IS', 'TRUE', 'BUT', 'THE', 'SEA', 'IS', 'FULL', 'OF', 'THEM', 'AND', 'FAR', 'MORE', 'ARE', 'INNOCENT', 'THAN', 'ARE', 'GUILTY', 'OF', 'ANY', 'ACTS', 'OF', 'VIOLENCE'] +2609-169640-0000-2406: hyp=['PERHAPS', 'IN', 'THAT', 'QUARTER', 'WHERE', 'USUAL', 'DISTRUDGED', 'BY', 'THE', 'STEPS', 'AT', 'IS', 'TRUE', 'BUT', 'THE', 'SEAS', 'FOR', 'THEM', 'FAR', 'MORE', 'OUR', 'INNOCENT', 'THAN', 'ARE', 'GUILTY', 'OF', 'ANY', 'ACTS', 'OF', 'VIOLENCE'] +2609-169640-0001-2407: ref=['AN', 'HOUR', 'AFTER', 'THE', 'SUN', 'HAD', 'SET', 'THE', 'WIND', 'FELL', 'TO', 'A', 'LIGHT', 'AIR', 'THAT', 'JUST', 'KEPT', 'STEERAGE', 'WAY', 'ON', 'THE', 'SHIP'] +2609-169640-0001-2407: hyp=['NOW', 'I', 'OUTDREW', 'THE', 'SUN', 'HAD', 'SET', 'THE', 'WIND', 'FELL', 'TURNED', 'LIGHT', 'AIR', 'DAT', 'GITS', 'KEPT', 'STEERAGE', 'WAY', 'ON', 'THE', 'SHIP'] +2609-169640-0002-2408: ref=['FORTUNATELY', 'THE', 'JOHN', 'WAS', 'NOT', 'ONLY', 'FAST', 'BUT', 'SHE', 'MINDED', 'HER', 'HELM', 'AS', 'A', 'LIGHT', 'FOOTED', 'GIRL', 'TURNS', 'IN', 'A', 'LIVELY', 'DANCE'] +2609-169640-0002-2408: hyp=['FORTUNATELY', 'THE', 'JOHN', 'WAS', 'NOT', 'ONLY', 'FAT', 'BUT', 'SEA', 'MINDED', 'HER', 'HAIL', 'AS', 'THE', 'LIGHTFOOTED', 'GIRL', 'TURNED', 'IN', 'THE', 'LIVELY', 'DANCE'] +2609-169640-0003-2409: ref=['I', 'NEVER', 'WAS', 'IN', 'A', 'BETTER', 'STEERING', 'SHIP', 'MOST', 'ESPECIALLY', 'IN', 'MODERATE', 'WEATHER'] +2609-169640-0003-2409: hyp=['I', 'NEVER', 'WAS', 'IN', 'A', 'BETTER', 'STIRRING', 'SHIP', 'POSEY', 'SPENT', 'FREE', 'AND', 'MINDER', 'IT', 'WEATHER'] +2609-169640-0004-2410: ref=['MISTER', 'MARBLE', 'HE', 'I', 'DO', 'BELIEVE', 'WAS', 'FAIRLY', 'SNOOZING', 'ON', 'THE', 'HEN', 'COOPS', 'BEING', 'LIKE', 'THE', 'SAILS', 'AS', 'ONE', 'MIGHT', 'SAY', 'BARELY', 'ASLEEP'] +2609-169640-0004-2410: hyp=['MISTER', 'MARBLE', 'HE', 'OUGHT', 'TO', 'BELIEVE', 'WAS', 'FAIRLY', 'SNOOZING', 'ON', 'THE', 'INCOUPS', 'BEING', 'LIKE', 'THE', 'SAILORS', 'AS', 'ONE', 'MIGHT', 'SAY', 'VARIOUS', 'LEAP'] +2609-169640-0005-2411: ref=['AT', 'THAT', 'MOMENT', 'I', 'HEARD', 'A', 'NOISE', 'ONE', 'FAMILIAR', 'TO', 'SEAMEN', 'THAT', 'OF', 'AN', 'OAR', 'FALLING', 'IN', 'A', 'BOAT'] +2609-169640-0005-2411: hyp=['AT', 'THAT', 'MOMENT', 'I', 'KNOW', 'A', 'NOISE', 'WHEN', 'FAMILIAR', 'TO', 'SEE', 'MEN', 'THAT', 'OF', 'AN', 'OAR', 'FOLLOWING', 'IN', 'A', 'BOAT'] +2609-169640-0006-2412: ref=['I', 'SANG', 'OUT', 'SAIL', 'HO', 'AND', 'CLOSE', 'ABOARD'] +2609-169640-0006-2412: hyp=['AS', 'IN', 'YET', 'SO', 'HO', 'AND', 'CLOSE', 'ABROAD'] +2609-169640-0007-2413: ref=['HE', 'WAS', 'TOO', 'MUCH', 'OF', 'A', 'SEAMAN', 'TO', 'REQUIRE', 'A', 'SECOND', 'LOOK', 'IN', 'ORDER', 'TO', 'ASCERTAIN', 'WHAT', 'WAS', 'TO', 'BE', 'DONE'] +2609-169640-0007-2413: hyp=['HE', 'WAS', 'SHIMMERTS', 'OF', 'THE', 'SEAMAN', 'TO', 'REQUIRE', 'SECOND', 'LOOK', 'IN', 'ORDER', 'TO', 'ASCERTAIN', 'BUT', 'WAS', 'TO', 'BE', 'DONE'] +2609-169640-0008-2414: ref=['ALTHOUGH', 'THEY', 'WENT', 'THREE', 'FEET', 'TO', 'OUR', 'TWO', 'THIS', 'GAVE', 'US', 'A', 'MOMENT', 'OF', 'BREATHING', 'TIME'] +2609-169640-0008-2414: hyp=['ALTHOUGH', 'THEY', 'WENT', 'THREE', 'FEET', 'TO', 'OUR', 'TWO', 'THIS', 'GAVE', 'US', 'A', 'MOMENT', 'OF', 'BREATHING', 'TIME'] +2609-169640-0009-2415: ref=['AS', 'OUR', 'SHEETS', 'WERE', 'ALL', 'FLYING', 'FORWARD', 'AND', 'REMAINED', 'SO', 'FOR', 'A', 'FEW', 'MINUTES', 'IT', 'GAVE', 'ME', 'LEISURE', 'TO', 'LOOK', 'ABOUT'] +2609-169640-0009-2415: hyp=['AS', 'OUR', 'SEATS', 'WERE', 'ALL', 'FLYING', 'FORWARD', 'AND', 'REMAINED', 'SO', 'FOR', 'A', 'FEW', 'MINUTES', 'IT', 'GAVE', 'ME', 'A', 'LEISURE', 'TO', 'WORK', 'ABOUT'] +2609-169640-0010-2416: ref=['I', 'SOON', 'SAW', 'BOTH', 'PROAS', 'AND', 'GLAD', 'ENOUGH', 'WAS', 'I', 'TO', 'PERCEIVE', 'THAT', 'THEY', 'HAD', 'NOT', 'APPROACHED', 'MATERIALLY', 'NEARER'] +2609-169640-0010-2416: hyp=['I', 'SOON', 'SAW', 'BOTH', 'PROTS', 'AND', 'GRINDING', 'UP', 'WAS', 'I', 'TO', 'PERCEIVE', 'THAT', 'THEY', 'HAD', 'NOT', 'APPROACHED', 'MATERIALLY', 'IN', 'NEW', 'YORK'] +2609-169640-0011-2417: ref=['MISTER', 'KITE', 'OBSERVED', 'THIS', 'ALSO', 'AND', 'REMARKED', 'THAT', 'OUR', 'MOVEMENTS', 'HAD', 'BEEN', 'SO', 'PROMPT', 'AS', 'TO', 'TAKE', 'THE', 'RASCALS', 'ABACK'] +2609-169640-0011-2417: hyp=['BISHO', 'DESERVED', 'THIS', 'ALSO', 'IN', 'REMARK', 'THAT', 'OUR', 'MOVEMENTS', 'HAD', 'BEEN', 'SO', 'PROMPT', 'AS', 'TO', 'TAKE', 'THE', 'RASCUE', 'WAS', 'ABACK'] +2609-169640-0012-2418: ref=['A', 'BREATHLESS', 'STILLNESS', 'SUCCEEDED'] +2609-169640-0012-2418: hyp=['A', 'BREATH', 'WHICH', 'STILL', 'IN', 'IT', 'SUCCEEDED'] +2609-169640-0013-2419: ref=['THE', 'PROAS', 'DID', 'NOT', 'ALTER', 'THEIR', 'COURSE', 'BUT', 'NEARED', 'US', 'FAST'] +2609-169640-0013-2419: hyp=['THE', 'PROITS', 'DID', 'NOT', 'ALTER', 'THE', 'COURSE', 'BUT', 'NEARED', 'AT', 'ITS', 'FAST'] +2609-169640-0014-2420: ref=['I', 'HEARD', 'THE', 'RATTLING', 'OF', 'THE', 'BOARDING', 'PIKES', 'TOO', 'AS', 'THEY', 'WERE', 'CUT', 'ADRIFT', 'FROM', 'THE', 'SPANKER', 'BOOM', 'AND', 'FELL', 'UPON', 'THE', 'DECKS'] +2609-169640-0014-2420: hyp=['I', 'HEARD', 'THE', 'RIDERING', 'OF', 'THE', 'BOARDING', 'PIPES', 'TOO', 'AS', 'THEY', 'WERE', 'CUT', 'ADRIFT', 'FROM', 'THE', 'SPANKER', 'BOOM', 'AND', 'FELL', 'UPON', 'THE', 'DECKS'] +2609-169640-0015-2421: ref=['KITE', 'WENT', 'AFT', 'AND', 'RETURNED', 'WITH', 'THREE', 'OR', 'FOUR', 'MUSKETS', 'AND', 'AS', 'MANY', 'PIKES'] +2609-169640-0015-2421: hyp=['KIND', 'WENT', 'APT', 'AND', 'RETURNED', 'WITH', 'THREE', 'OR', 'FOUR', 'MASKETS', 'AND', 'AS', 'MANY', 'PIKES'] +2609-169640-0016-2422: ref=['THE', 'STILLNESS', 'THAT', 'REIGNED', 'ON', 'BOTH', 'SIDES', 'WAS', 'LIKE', 'THAT', 'OF', 'DEATH'] +2609-169640-0016-2422: hyp=['THE', 'STILLNESS', 'THAT', 'RAINED', 'ON', 'BOTH', 'SIDES', 'WAS', 'LIKE', 'THAT', 'OF', 'DEATH'] +2609-169640-0017-2423: ref=['THE', 'JOHN', 'BEHAVED', 'BEAUTIFULLY', 'AND', 'CAME', 'ROUND', 'LIKE', 'A', 'TOP'] +2609-169640-0017-2423: hyp=['JOHN', 'BEHAVED', 'BEAUTIFULLY', 'HE', 'CAME', 'ROUND', 'LIKE', 'A', 'TOP'] +2609-169640-0018-2424: ref=['THE', 'QUESTION', 'WAS', 'NOW', 'WHETHER', 'WE', 'COULD', 'PASS', 'THEM', 'OR', 'NOT', 'BEFORE', 'THEY', 'GOT', 'NEAR', 'ENOUGH', 'TO', 'GRAPPLE'] +2609-169640-0018-2424: hyp=['THE', 'QUESTION', 'WAS', 'NOW', 'WHETHER', 'WE', 'COULD', 'PASS', 'AND', 'OR', 'NOT', 'BEFORE', 'THEY', 'GOT', 'NEAR', 'ENOUGH', 'TO', 'GRANTEL'] +2609-169640-0019-2425: ref=['THE', 'CAPTAIN', 'BEHAVED', 'PERFECTLY', 'WELL', 'IN', 'THIS', 'CRITICAL', 'INSTANT', 'COMMANDING', 'A', 'DEAD', 'SILENCE', 'AND', 'THE', 'CLOSEST', 'ATTENTION', 'TO', 'HIS', 'ORDERS'] +2609-169640-0019-2425: hyp=['THE', 'CAPTAIN', 'BEHAVED', 'PERFECTUALLY', 'WELL', 'IN', 'ITS', 'CRITICAL', 'INSTANT', 'COMMANDING', 'A', 'DEAD', 'SCIENCE', 'IN', 'THE', 'CITIZEN', 'TO', 'HIS', 'ORDERS'] +2609-169640-0020-2426: ref=['NOT', 'A', 'SOUL', 'ON', 'BOARD', 'THE', 'JOHN', 'WAS', 'HURT'] +2609-169640-0020-2426: hyp=['NOW', 'SO', 'ON', 'BOARD', 'THE', 'JOHN', 'WAS', 'HURT'] +2609-169640-0021-2427: ref=['ON', 'OUR', 'SIDE', 'WE', 'GAVE', 'THE', 'GENTLEMEN', 'THE', 'FOUR', 'SIXES', 'TWO', 'AT', 'THE', 'NEAREST', 'AND', 'TWO', 'AT', 'THE', 'STERN', 'MOST', 'PROA', 'WHICH', 'WAS', 'STILL', 'NEAR', 'A', "CABLE'S", 'LENGTH', 'DISTANT'] +2609-169640-0021-2427: hyp=['WHEN', 'OURSAN', 'WE', 'GAVE', 'THE', 'GENTLEMAN', 'THE', 'FOUR', 'SIX', 'TO', 'OUT', 'THE', 'NEWS', 'AND', 'TWO', 'AT', 'THE', 'STERNMOST', 'PRO', 'WHICH', 'WAS', 'STILL', 'NEAR', 'A', "CABLE'S", 'LENGTH', 'OF', 'ITS', 'END'] +2609-169640-0022-2428: ref=['THEY', 'WERE', 'LIKE', 'THE', 'YELLS', 'OF', 'FIENDS', 'IN', 'ANGUISH'] +2609-169640-0022-2428: hyp=['THEY', 'WERE', 'NIGHTLY', 'YEARS', 'OF', 'FIENDS', 'IN', 'ANGLES'] +2609-169640-0023-2429: ref=['I', 'DOUBT', 'IF', 'WE', 'TOUCHED', 'A', 'MAN', 'IN', 'THE', 'NEAREST', 'PROA'] +2609-169640-0023-2429: hyp=['AND', 'OUT', 'IF', 'WE', 'TOUCHED', 'THE', 'MAIN', 'IN', 'THE', 'NURTURE'] +2609-169640-0024-2430: ref=['IN', 'THIS', 'STATE', 'THE', 'SHIP', 'PASSED', 'AHEAD', 'ALL', 'HER', 'CANVAS', 'BEING', 'FULL', 'LEAVING', 'THE', 'PROA', 'MOTIONLESS', 'IN', 'HER', 'WAKE'] +2609-169640-0024-2430: hyp=['IN', 'THAT', 'STATE', 'THE', 'SHIP', 'PASSED', 'AHEAD', 'ON', 'FOR', 'A', 'CANVAS', 'BEEN', 'FOR', 'LEAVING', 'THE', 'PROCEED', 'IN', 'HER', 'WAKE'] +3005-163389-0000-1108: ref=['THEY', 'SWARMED', 'UP', 'IN', 'FRONT', 'OF', "SHERBURN'S", 'PALINGS', 'AS', 'THICK', 'AS', 'THEY', 'COULD', 'JAM', 'TOGETHER', 'AND', 'YOU', "COULDN'T", 'HEAR', 'YOURSELF', 'THINK', 'FOR', 'THE', 'NOISE'] +3005-163389-0000-1108: hyp=['THEY', 'SWARMED', 'UP', 'IN', 'FRONT', 'OF', "SHERBURN'S", 'PALINGS', 'AS', 'THICK', 'AS', 'THEY', 'COULD', 'JAM', 'TOGETHER', 'AND', 'YOU', "COULDN'T", 'HEAR', 'YOURSELF', 'THINK', 'FOR', 'THE', 'NOISE'] +3005-163389-0001-1109: ref=['SOME', 'SUNG', 'OUT', 'TEAR', 'DOWN', 'THE', 'FENCE', 'TEAR', 'DOWN', 'THE', 'FENCE'] +3005-163389-0001-1109: hyp=['SOME', 'SUNG', 'OUT', 'TEAR', 'DOWN', 'THE', 'FENCE', 'TEAR', 'DOWN', 'THE', 'FENCE'] +3005-163389-0002-1110: ref=['THE', 'STILLNESS', 'WAS', 'AWFUL', 'CREEPY', 'AND', 'UNCOMFORTABLE'] +3005-163389-0002-1110: hyp=['THE', 'STILLNESS', 'WAS', 'AWFUL', 'CREEPY', 'AND', 'UNCOMFORTABLE'] +3005-163389-0003-1111: ref=['SHERBURN', 'RUN', 'HIS', 'EYE', 'SLOW', 'ALONG', 'THE', 'CROWD', 'AND', 'WHEREVER', 'IT', 'STRUCK', 'THE', 'PEOPLE', 'TRIED', 'A', 'LITTLE', 'TO', 'OUT', 'GAZE', 'HIM', 'BUT', 'THEY', "COULDN'T", 'THEY', 'DROPPED', 'THEIR', 'EYES', 'AND', 'LOOKED', 'SNEAKY'] +3005-163389-0003-1111: hyp=['SHERBURN', 'RUN', 'HIS', 'EYE', 'SLOW', 'ALONG', 'THE', 'CROWD', 'AND', 'WHEREVER', 'IT', 'STRUCK', 'THE', 'PEOPLE', 'TRIED', 'A', 'LITTLE', 'TO', 'OUTGAZE', 'HIM', 'BUT', 'THEY', "COULDN'T", 'THEY', 'DROPPED', 'THEIR', 'EYES', 'AND', 'LOOKED', 'SNEAKY'] +3005-163389-0004-1112: ref=['THE', 'AVERAGE', "MAN'S", 'A', 'COWARD'] +3005-163389-0004-1112: hyp=['THE', 'AVERAGE', "MAN'S", 'A', 'COWARD'] +3005-163389-0005-1113: ref=['BECAUSE', "THEY'RE", 'AFRAID', 'THE', "MAN'S", 'FRIENDS', 'WILL', 'SHOOT', 'THEM', 'IN', 'THE', 'BACK', 'IN', 'THE', 'DARKAND', "IT'S", 'JUST', 'WHAT', 'THEY', 'WOULD', 'DO'] +3005-163389-0005-1113: hyp=['BECAUSE', "THEY'RE", 'AFRAID', 'THE', "MAN'S", 'FRIENDS', 'WILL', 'SHOOT', 'THEM', 'IN', 'THE', 'BACK', 'IN', 'THE', 'DARK', 'AND', "IT'S", 'JUST', 'WHAT', 'THEY', 'WOULD', 'DO'] +3005-163389-0006-1114: ref=['SO', 'THEY', 'ALWAYS', 'ACQUIT', 'AND', 'THEN', 'A', 'MAN', 'GOES', 'IN', 'THE', 'NIGHT', 'WITH', 'A', 'HUNDRED', 'MASKED', 'COWARDS', 'AT', 'HIS', 'BACK', 'AND', 'LYNCHES', 'THE', 'RASCAL'] +3005-163389-0006-1114: hyp=['SO', 'THEY', 'ALWAYS', 'ACQUIT', 'AND', 'THEN', 'A', 'MAN', 'GOES', 'IN', 'THE', 'NIGHT', 'WITH', 'A', 'HUNDRED', 'MASSED', 'COWARDS', 'AT', 'HIS', 'BACK', 'AND', 'LYNCHES', 'THE', 'RASCAL'] +3005-163389-0007-1115: ref=['YOU', "DIDN'T", 'WANT', 'TO', 'COME'] +3005-163389-0007-1115: hyp=['YOU', "DIDN'T", 'WANT', 'TO', 'COME'] +3005-163389-0008-1116: ref=['BUT', 'A', 'MOB', 'WITHOUT', 'ANY', 'MAN', 'AT', 'THE', 'HEAD', 'OF', 'IT', 'IS', 'BENEATH', 'PITIFULNESS'] +3005-163389-0008-1116: hyp=['BUT', 'A', 'MOB', 'WITHOUT', 'ANY', 'MAN', 'AT', 'THE', 'HEAD', 'OF', 'IT', 'IS', 'BENEATH', 'PITIFULNESS'] +3005-163389-0009-1117: ref=['NOW', 'LEAVE', 'AND', 'TAKE', 'YOUR', 'HALF', 'A', 'MAN', 'WITH', 'YOU', 'TOSSING', 'HIS', 'GUN', 'UP', 'ACROSS', 'HIS', 'LEFT', 'ARM', 'AND', 'COCKING', 'IT', 'WHEN', 'HE', 'SAYS', 'THIS'] +3005-163389-0009-1117: hyp=['NOW', 'LE', 'AND', 'TAKE', 'YOUR', 'HALF', 'A', 'MAN', 'WITH', 'YOU', 'TOSSING', 'HIS', 'GUN', 'UP', 'ACROSS', 'HIS', 'LEFT', 'ARM', 'AND', 'COCKING', 'IT', 'WHEN', 'HE', 'SAYS', 'THIS'] +3005-163389-0010-1118: ref=['THE', 'CROWD', 'WASHED', 'BACK', 'SUDDEN', 'AND', 'THEN', 'BROKE', 'ALL', 'APART', 'AND', 'WENT', 'TEARING', 'OFF', 'EVERY', 'WHICH', 'WAY', 'AND', 'BUCK', 'HARKNESS', 'HE', 'HEELED', 'IT', 'AFTER', 'THEM', 'LOOKING', 'TOLERABLE', 'CHEAP'] +3005-163389-0010-1118: hyp=['THE', 'CROWD', 'WASHED', 'BACK', 'SUDDEN', 'AND', 'THEN', 'BROKE', 'ALL', 'APART', 'AND', 'WENT', 'TEARING', 'OFF', 'EVERY', 'WHICH', 'WAY', 'AND', 'BUCK', 'HARKINS', 'HE', 'HEALED', 'IT', 'AFTER', 'THEM', 'LOOKING', 'TOLERABLE', 'CHEAP'] +3005-163389-0011-1119: ref=['YOU', "CAN'T", 'BE', 'TOO', 'CAREFUL'] +3005-163389-0011-1119: hyp=['HE', "CAN'T", 'BE', 'TOO', 'CAREFUL'] +3005-163389-0012-1120: ref=['THEY', 'ARGUED', 'AND', 'TRIED', 'TO', 'KEEP', 'HIM', 'OUT', 'BUT', 'HE', "WOULDN'T", 'LISTEN', 'AND', 'THE', 'WHOLE', 'SHOW', 'COME', 'TO', 'A', 'STANDSTILL'] +3005-163389-0012-1120: hyp=['THEY', 'ARGUED', 'AND', 'TRIED', 'TO', 'KEEP', 'HIM', 'OUT', 'BUT', 'HE', "WOULDN'T", 'LISTEN', 'AND', 'A', 'WHOLE', 'SHOW', 'COME', 'TO', 'A', 'FAN', 'STILL'] +3005-163389-0013-1121: ref=['AND', 'ONE', 'OR', 'TWO', 'WOMEN', 'BEGUN', 'TO', 'SCREAM'] +3005-163389-0013-1121: hyp=['AND', 'ONE', 'OR', 'TWO', 'WOMEN', 'BEGAN', 'TO', 'SCREAM'] +3005-163389-0014-1122: ref=['SO', 'THEN', 'THE', 'RINGMASTER', 'HE', 'MADE', 'A', 'LITTLE', 'SPEECH', 'AND', 'SAID', 'HE', 'HOPED', 'THERE', "WOULDN'T", 'BE', 'NO', 'DISTURBANCE', 'AND', 'IF', 'THE', 'MAN', 'WOULD', 'PROMISE', 'HE', "WOULDN'T", 'MAKE', 'NO', 'MORE', 'TROUBLE', 'HE', 'WOULD', 'LET', 'HIM', 'RIDE', 'IF', 'HE', 'THOUGHT', 'HE', 'COULD', 'STAY', 'ON', 'THE', 'HORSE'] +3005-163389-0014-1122: hyp=['SO', 'THEN', 'A', 'RING', 'MASTER', 'HE', 'MADE', 'A', 'LITTLE', 'SPEECH', 'AND', 'SAID', 'HE', 'HOPED', 'THERE', "WOULDN'T", 'BE', 'NO', 'DISTURBANCE', 'AND', 'IF', 'THE', 'MAN', 'WOULD', 'PROMISE', 'HE', "WOULDN'T", 'MAKE', 'NO', 'MORE', 'TROUBLE', 'HE', 'WOULD', 'LET', 'HIM', 'RIDE', 'IF', 'HE', 'THOUGHT', 'HE', 'COULD', 'STAY', 'ON', 'THE', 'HORSE'] +3005-163389-0015-1123: ref=['IT', "WARN'T", 'FUNNY', 'TO', 'ME', 'THOUGH', 'I', 'WAS', 'ALL', 'OF', 'A', 'TREMBLE', 'TO', 'SEE', 'HIS', 'DANGER'] +3005-163389-0015-1123: hyp=['IT', "WARN'T", 'FUNNY', 'TO', 'ME', 'THOUGH', 'I', 'WAS', 'ALL', 'OF', 'A', 'TREMBLE', 'TO', 'SEE', 'HIS', 'DANGER'] +3005-163389-0016-1124: ref=['AND', 'THE', 'HORSE', 'A', 'GOING', 'LIKE', 'A', 'HOUSE', 'AFIRE', 'TOO'] +3005-163389-0016-1124: hyp=['AND', 'A', 'HORSE', 'A', 'GOING', 'LIKE', 'A', 'HOUSE', 'AFAR', 'TOO'] +3005-163389-0017-1125: ref=['HE', 'SHED', 'THEM', 'SO', 'THICK', 'THEY', 'KIND', 'OF', 'CLOGGED', 'UP', 'THE', 'AIR', 'AND', 'ALTOGETHER', 'HE', 'SHED', 'SEVENTEEN', 'SUITS'] +3005-163389-0017-1125: hyp=['HE', 'SHARED', 'THEM', 'SO', 'THICK', 'THEY', 'KIND', 'OF', 'CLOGGED', 'UP', 'THE', 'AIR', 'AND', 'ALTOGETHER', 'HE', 'SHED', 'SEVENTEEN', 'SUITS'] +3005-163389-0018-1126: ref=['WHY', 'IT', 'WAS', 'ONE', 'OF', 'HIS', 'OWN', 'MEN'] +3005-163389-0018-1126: hyp=['WHY', 'IT', 'WAS', 'ONE', 'OF', 'HIS', 'OWN', 'MEN'] +3005-163390-0000-1185: ref=['ANDBUT', 'NEVER', 'MIND', 'THE', 'REST', 'OF', 'HIS', 'OUTFIT', 'IT', 'WAS', 'JUST', 'WILD', 'BUT', 'IT', 'WAS', 'AWFUL', 'FUNNY'] +3005-163390-0000-1185: hyp=['AND', 'BUT', 'NEVER', 'MIND', 'THE', 'REST', 'OF', 'HIS', 'OUTFIT', 'IT', 'WAS', 'JUST', 'WILD', 'BUT', 'IT', 'WAS', 'AWFUL', 'FUNNY'] +3005-163390-0001-1186: ref=['THE', 'PEOPLE', 'MOST', 'KILLED', 'THEMSELVES', 'LAUGHING', 'AND', 'WHEN', 'THE', 'KING', 'GOT', 'DONE', 'CAPERING', 'AND', 'CAPERED', 'OFF', 'BEHIND', 'THE', 'SCENES', 'THEY', 'ROARED', 'AND', 'CLAPPED', 'AND', 'STORMED', 'AND', 'HAW', 'HAWED', 'TILL', 'HE', 'COME', 'BACK', 'AND', 'DONE', 'IT', 'OVER', 'AGAIN', 'AND', 'AFTER', 'THAT', 'THEY', 'MADE', 'HIM', 'DO', 'IT', 'ANOTHER', 'TIME'] +3005-163390-0001-1186: hyp=['THE', 'PEOPLE', 'MOST', 'KILLED', 'THEMSELVES', 'LAUGHING', 'AND', 'WHEN', 'THE', 'KING', 'GOT', 'DONE', 'CAPERING', 'AND', 'CAPERED', 'OFF', 'BEHIND', 'THE', 'SCENES', 'THEY', 'ROARED', 'AND', 'CLAPPED', 'AND', 'STORMED', 'AND', 'HAWHAT', 'TILL', 'HE', 'COME', 'BACK', 'AND', 'DONE', 'IT', 'OVER', 'AGAIN', 'AND', 'AFTER', 'THAT', 'THEY', 'MADE', 'HIM', 'DO', 'IT', 'ANOTHER', 'TIME'] +3005-163390-0002-1187: ref=['TWENTY', 'PEOPLE', 'SINGS', 'OUT'] +3005-163390-0002-1187: hyp=['TWENTY', 'PEOPLE', 'SANGS', 'OUT'] +3005-163390-0003-1188: ref=['THE', 'DUKE', 'SAYS', 'YES'] +3005-163390-0003-1188: hyp=['THE', 'DUKE', 'SAYS', 'YES'] +3005-163390-0004-1189: ref=['EVERYBODY', 'SINGS', 'OUT', 'SOLD'] +3005-163390-0004-1189: hyp=['EVERYBODY', 'SINGS', 'OUT', 'SOLD'] +3005-163390-0005-1190: ref=['BUT', 'A', 'BIG', 'FINE', 'LOOKING', 'MAN', 'JUMPS', 'UP', 'ON', 'A', 'BENCH', 'AND', 'SHOUTS', 'HOLD', 'ON'] +3005-163390-0005-1190: hyp=['BUT', 'A', 'BIG', 'FINE', 'LOOKING', 'MAN', 'JUMPS', 'UP', 'ON', 'A', 'BENCH', 'AND', 'SHOUTS', 'HOLD', 'ON'] +3005-163390-0006-1191: ref=['JUST', 'A', 'WORD', 'GENTLEMEN', 'THEY', 'STOPPED', 'TO', 'LISTEN'] +3005-163390-0006-1191: hyp=['JUST', 'A', 'WORD', 'GENTLEMEN', 'THEY', 'STOPPED', 'TO', 'LISTEN'] +3005-163390-0007-1192: ref=['WHAT', 'WE', 'WANT', 'IS', 'TO', 'GO', 'OUT', 'OF', 'HERE', 'QUIET', 'AND', 'TALK', 'THIS', 'SHOW', 'UP', 'AND', 'SELL', 'THE', 'REST', 'OF', 'THE', 'TOWN'] +3005-163390-0007-1192: hyp=['WHAT', 'WE', 'WANT', 'IS', 'TO', 'GO', 'OUT', 'OF', 'HERE', 'QUIET', 'AND', 'TALK', 'THIS', 'SHOW', 'UP', 'AND', 'SELL', 'THE', 'REST', 'O', 'THE', 'TOWN'] +3005-163390-0008-1193: ref=['YOU', 'BET', 'IT', 'IS', 'THE', 'JEDGE', 'IS', 'RIGHT', 'EVERYBODY', 'SINGS', 'OUT'] +3005-163390-0008-1193: hyp=['YOU', 'BADE', 'IT', 'IS', 'THE', 'JUDGE', 'IS', 'RIGHT', 'EVERYBODY', 'SINGS', 'OUT'] +3005-163390-0009-1194: ref=['WE', 'STRUCK', 'THE', 'RAFT', 'AT', 'THE', 'SAME', 'TIME', 'AND', 'IN', 'LESS', 'THAN', 'TWO', 'SECONDS', 'WE', 'WAS', 'GLIDING', 'DOWN', 'STREAM', 'ALL', 'DARK', 'AND', 'STILL', 'AND', 'EDGING', 'TOWARDS', 'THE', 'MIDDLE', 'OF', 'THE', 'RIVER', 'NOBODY', 'SAYING', 'A', 'WORD'] +3005-163390-0009-1194: hyp=['WE', 'STRUCK', 'THE', 'RAFT', 'AT', 'THE', 'SAME', 'TIME', 'AND', 'IN', 'LESS', 'THAN', 'TWO', 'SECONDS', 'WE', 'WAS', 'GLIDING', 'DOWN', 'STREAM', 'ALL', 'DARK', 'AND', 'STILL', 'AND', 'EDGING', 'TOWARDS', 'THE', 'MIDDLE', 'OF', 'THE', 'RIVER', 'NOBODY', 'SAYING', 'A', 'WORD'] +3005-163390-0010-1195: ref=['WE', 'NEVER', 'SHOWED', 'A', 'LIGHT', 'TILL', 'WE', 'WAS', 'ABOUT', 'TEN', 'MILE', 'BELOW', 'THE', 'VILLAGE'] +3005-163390-0010-1195: hyp=['WE', 'NEVER', 'SHOWED', 'A', 'LIGHT', 'TILL', 'WE', 'WAS', 'ABOUT', 'TEN', 'MILE', 'BELOW', 'THE', 'VILLAGE'] +3005-163390-0011-1196: ref=['GREENHORNS', 'FLATHEADS'] +3005-163390-0011-1196: hyp=['GREENHORNS', 'FLAT', 'HEADS'] +3005-163390-0012-1197: ref=['NO', 'I', 'SAYS', 'IT', "DON'T"] +3005-163390-0012-1197: hyp=['NO', 'I', 'SAY', 'IS', 'IT', "DON'T"] +3005-163390-0013-1198: ref=['WELL', 'IT', "DON'T", 'BECAUSE', "IT'S", 'IN', 'THE', 'BREED', 'I', 'RECKON', "THEY'RE", 'ALL', 'ALIKE'] +3005-163390-0013-1198: hyp=['WELL', 'IT', "DON'T", 'BECAUSE', "IT'S", 'IN', 'DE', 'BREED', 'I', 'RECKON', "THEY'RE", 'ALL', 'ALIKE'] +3005-163390-0014-1199: ref=['WELL', "THAT'S", 'WHAT', "I'M", 'A', 'SAYING', 'ALL', 'KINGS', 'IS', 'MOSTLY', 'RAPSCALLIONS', 'AS', 'FUR', 'AS', 'I', 'CAN', 'MAKE', 'OUT', 'IS', 'DAT', 'SO'] +3005-163390-0014-1199: hyp=['WELL', "THAT'S", 'WHAT', 'I', 'MUST', 'SAYING', 'ALL', 'KINGS', 'IS', 'MOSTLY', 'RATSKAGGS', 'AS', 'FUR', 'AS', 'I', 'CAN', 'MAKE', 'OUT', 'IS', 'DAT', 'SO'] +3005-163390-0015-1200: ref=['AND', 'LOOK', 'AT', 'CHARLES', 'SECOND', 'AND', 'LOUIS', 'FOURTEEN', 'AND', 'LOUIS', 'FIFTEEN', 'AND', 'JAMES', 'SECOND', 'AND', 'EDWARD', 'SECOND', 'AND', 'RICHARD', 'THIRD', 'AND', 'FORTY', 'MORE', 'BESIDES', 'ALL', 'THEM', 'SAXON', 'HEPTARCHIES', 'THAT', 'USED', 'TO', 'RIP', 'AROUND', 'SO', 'IN', 'OLD', 'TIMES', 'AND', 'RAISE', 'CAIN'] +3005-163390-0015-1200: hyp=['AND', 'LOOK', 'AT', 'CHARLES', 'SECOND', 'AND', 'LOUIS', 'FOURTEEN', 'AND', 'LOUIS', 'FIFTEEN', 'AND', 'JAMES', 'SECOND', 'AND', 'EDWARD', 'SECOND', 'AND', 'RICHARD', 'AND', 'FORTY', 'MORE', 'BESIDES', 'ALL', 'THEM', 'SAXON', 'HEPTARK', 'IS', 'THAT', 'USED', 'TO', 'RIP', 'AROUND', 'SO', 'IN', 'OLD', 'TIMES', 'AND', 'RAISED', 'GAME'] +3005-163390-0016-1201: ref=['MY', 'YOU', 'OUGHT', 'TO', 'SEEN', 'OLD', 'HENRY', 'THE', 'EIGHT', 'WHEN', 'HE', 'WAS', 'IN', 'BLOOM', 'HE', 'WAS', 'A', 'BLOSSOM'] +3005-163390-0016-1201: hyp=['MY', 'YOU', 'OUGHT', 'TO', 'SEE', 'AN', 'OLD', 'HENRY', 'THE', 'EIGHT', 'WHEN', 'HE', 'WAS', 'IN', 'BLOOM', 'HE', 'WAS', 'A', 'BLOSSOM'] +3005-163390-0017-1202: ref=['RING', 'UP', 'FAIR', 'ROSAMUN'] +3005-163390-0017-1202: hyp=['RING', 'UP', 'FAIR', 'ROSAMOND'] +3005-163390-0018-1203: ref=['WELL', 'HENRY', 'HE', 'TAKES', 'A', 'NOTION', 'HE', 'WANTS', 'TO', 'GET', 'UP', 'SOME', 'TROUBLE', 'WITH', 'THIS', 'COUNTRY'] +3005-163390-0018-1203: hyp=['WELL', 'HENRY', 'HE', 'TAKES', 'A', 'NOTION', 'HE', 'WANTS', 'TO', 'GIT', 'UP', 'SOME', 'TROUBLE', 'WITH', 'THIS', 'COUNTRY'] +3005-163390-0019-1204: ref=["S'POSE", 'HE', 'OPENED', 'HIS', 'MOUTHWHAT', 'THEN'] +3005-163390-0019-1204: hyp=["S'POSE", 'HE', 'OPENED', 'HIS', 'MOUTH', 'WHAT', 'THEN'] +3005-163390-0020-1205: ref=['ALL', 'I', 'SAY', 'IS', 'KINGS', 'IS', 'KINGS', 'AND', 'YOU', 'GOT', 'TO', 'MAKE', 'ALLOWANCES'] +3005-163390-0020-1205: hyp=['ALL', 'I', 'SAY', 'IS', 'KINGS', 'IS', 'KINGS', 'AN', 'YE', 'GOT', 'TO', 'MAKE', 'ALLOWANCES'] +3005-163390-0021-1206: ref=['TAKE', 'THEM', 'ALL', 'AROUND', "THEY'RE", 'A', 'MIGHTY', 'ORNERY', 'LOT', "IT'S", 'THE', 'WAY', "THEY'RE", 'RAISED'] +3005-163390-0021-1206: hyp=['TAKE', 'THEM', 'ALL', 'AROUND', "THEY'RE", 'A', 'MIGHTY', 'ORNERY', 'LOT', "IT'S", 'THE', 'WAY', "THEY'RE", 'RAISED'] +3005-163390-0022-1207: ref=['WELL', 'THEY', 'ALL', 'DO', 'JIM'] +3005-163390-0022-1207: hyp=['WELL', 'THEY', 'ALL', 'DO', 'JIM'] +3005-163390-0023-1208: ref=['NOW', 'DE', 'DUKE', "HE'S", 'A', 'TOLERBLE', 'LIKELY', 'MAN', 'IN', 'SOME', 'WAYS'] +3005-163390-0023-1208: hyp=['NOW', 'TO', 'DO', "HE'S", 'A', 'TOLERABLE', 'LIKE', 'THE', 'MAN', 'IN', 'SOME', 'WAYS'] +3005-163390-0024-1209: ref=['THIS', "ONE'S", 'A', 'MIDDLING', 'HARD', 'LOT', 'FOR', 'A', 'DUKE'] +3005-163390-0024-1209: hyp=['THIS', "ONE'S", 'A', 'MIDDLIN', 'HARD', 'LOT', 'FOR', 'A', 'DUPE'] +3005-163390-0025-1210: ref=['WHEN', 'I', 'WAKED', 'UP', 'JUST', 'AT', 'DAYBREAK', 'HE', 'WAS', 'SITTING', 'THERE', 'WITH', 'HIS', 'HEAD', 'DOWN', 'BETWIXT', 'HIS', 'KNEES', 'MOANING', 'AND', 'MOURNING', 'TO', 'HIMSELF'] +3005-163390-0025-1210: hyp=['WHEN', 'I', 'WAKED', 'UP', 'JEST', 'AT', 'DAYBREAK', 'HE', 'WAS', 'SITTING', 'THERE', 'WITH', 'HIS', 'HEAD', 'DOWN', 'BETWIXT', 'HIS', 'KNEES', 'MOANING', 'AND', 'MOURNING', 'TO', 'HIMSELF'] +3005-163390-0026-1211: ref=['IT', "DON'T", 'SEEM', 'NATURAL', 'BUT', 'I', 'RECKON', "IT'S", 'SO'] +3005-163390-0026-1211: hyp=['IT', "DON'T", 'SEEM', 'NATURAL', 'BUT', 'I', 'RECKON', "IT'S", 'SO'] +3005-163390-0027-1212: ref=['HE', 'WAS', 'OFTEN', 'MOANING', 'AND', 'MOURNING', 'THAT', 'WAY', 'NIGHTS', 'WHEN', 'HE', 'JUDGED', 'I', 'WAS', 'ASLEEP', 'AND', 'SAYING', 'PO', 'LITTLE', 'LIZABETH'] +3005-163390-0027-1212: hyp=['HE', 'WAS', 'OFTEN', 'MOANING', 'IN', 'MOURNING', 'THAT', 'WAY', 'NIGHTS', 'WHEN', 'HE', 'JUDGED', 'I', 'WAS', 'ASLEEP', 'AND', 'SAYING', 'PO', 'LITTLE', 'LISBETH'] +3005-163390-0028-1213: ref=['DOAN', 'YOU', 'HEAR', 'ME', 'SHET', 'DE', 'DO'] +3005-163390-0028-1213: hyp=["DON'T", 'YOU', 'HEAR', 'ME', 'SHUT', 'DE', 'DO'] +3005-163390-0029-1214: ref=['I', 'LAY', 'I', 'MAKE', 'YOU', 'MINE'] +3005-163390-0029-1214: hyp=['I', 'LAY', 'I', 'MAKE', 'YOU', 'MINE'] +3005-163390-0030-1215: ref=['JIS', 'AS', 'LOUD', 'AS', 'I', 'COULD', 'YELL'] +3005-163390-0030-1215: hyp=['LOUD', 'AS', 'I', 'COULD', 'YELL'] +3005-163391-0000-1127: ref=['WHICH', 'WAS', 'SOUND', 'ENOUGH', 'JUDGMENT', 'BUT', 'YOU', 'TAKE', 'THE', 'AVERAGE', 'MAN', 'AND', 'HE', "WOULDN'T", 'WAIT', 'FOR', 'HIM', 'TO', 'HOWL'] +3005-163391-0000-1127: hyp=['WHICH', 'WAS', 'SOUND', 'ENOUGH', 'JUDGMENT', 'BUT', 'YOU', 'TAKE', 'THE', 'AVERAGE', 'MAN', 'AND', 'HE', "WOULDN'T", 'WAIT', 'FOR', 'HIM', 'TO', 'HOWL'] +3005-163391-0001-1128: ref=['THE', "KING'S", 'DUDS', 'WAS', 'ALL', 'BLACK', 'AND', 'HE', 'DID', 'LOOK', 'REAL', 'SWELL', 'AND', 'STARCHY'] +3005-163391-0001-1128: hyp=['THE', "KING'S", 'DERDS', 'WAS', 'ALL', 'BLACK', 'AND', 'HE', 'DID', 'LOOK', 'REAL', 'SWELL', 'AN', 'STARCHY'] +3005-163391-0002-1129: ref=['WHY', 'BEFORE', 'HE', 'LOOKED', 'LIKE', 'THE', 'ORNERIEST', 'OLD', 'RIP', 'THAT', 'EVER', 'WAS', 'BUT', 'NOW', 'WHEN', "HE'D", 'TAKE', 'OFF', 'HIS', 'NEW', 'WHITE', 'BEAVER', 'AND', 'MAKE', 'A', 'BOW', 'AND', 'DO', 'A', 'SMILE', 'HE', 'LOOKED', 'THAT', 'GRAND', 'AND', 'GOOD', 'AND', 'PIOUS', 'THAT', "YOU'D", 'SAY', 'HE', 'HAD', 'WALKED', 'RIGHT', 'OUT', 'OF', 'THE', 'ARK', 'AND', 'MAYBE', 'WAS', 'OLD', 'LEVITICUS', 'HIMSELF'] +3005-163391-0002-1129: hyp=['WHY', 'BEFORE', 'HE', 'LOOKED', 'LIKE', 'THE', 'ORNERIEST', 'OLD', 'RIP', 'THAT', 'EVER', 'WAS', 'BUT', 'NOW', 'WHEN', "HE'D", 'TAKE', 'OFF', 'HIS', 'NEW', 'WHITE', 'BEAVER', 'AND', 'MAKE', 'A', 'BOW', 'AND', 'DO', 'A', 'SMILE', 'HE', 'LOOKED', 'THAT', 'GRAND', 'AND', 'GOOD', 'AND', 'PIOUS', 'THAT', "YOU'D", 'SAY', "HE'D", 'WALKED', 'RIGHT', 'OUT', 'OF', 'THE', 'ARK', 'AND', 'MAYBE', 'WAS', 'OLD', 'LEVITICUS', 'HIMSELF'] +3005-163391-0003-1130: ref=['JIM', 'CLEANED', 'UP', 'THE', 'CANOE', 'AND', 'I', 'GOT', 'MY', 'PADDLE', 'READY'] +3005-163391-0003-1130: hyp=['JIM', 'CLEANED', 'UP', 'THE', 'CANOE', 'AND', 'I', 'GOT', 'MY', 'PADDLE', 'READY'] +3005-163391-0004-1131: ref=['WHER', 'YOU', 'BOUND', 'FOR', 'YOUNG', 'MAN'] +3005-163391-0004-1131: hyp=['WERE', 'YOU', 'BOUND', 'FOR', 'YOUNG', 'MAN'] +3005-163391-0005-1132: ref=['GIT', 'ABOARD', 'SAYS', 'THE', 'KING'] +3005-163391-0005-1132: hyp=['GET', 'ABOARD', 'SAYS', 'THE', 'KING'] +3005-163391-0006-1133: ref=['I', 'DONE', 'SO', 'AND', 'THEN', 'WE', 'ALL', 'THREE', 'STARTED', 'ON', 'AGAIN'] +3005-163391-0006-1133: hyp=['I', 'DONE', 'SO', 'AN', 'THEN', 'WE', 'ALL', 'THREE', 'STARTED', 'ON', 'AGAIN'] +3005-163391-0007-1134: ref=['THE', 'YOUNG', 'CHAP', 'WAS', 'MIGHTY', 'THANKFUL', 'SAID', 'IT', 'WAS', 'TOUGH', 'WORK', 'TOTING', 'HIS', 'BAGGAGE', 'SUCH', 'WEATHER'] +3005-163391-0007-1134: hyp=['THE', 'YOUNG', 'CHAP', 'WAS', 'MIGHTY', 'THANKFUL', 'SAID', 'IT', 'WAS', 'TOUGH', 'WORK', 'TOTING', 'HIS', 'BAGGAGE', 'SUCH', 'WEATHER'] +3005-163391-0008-1135: ref=['HE', 'ASKED', 'THE', 'KING', 'WHERE', 'HE', 'WAS', 'GOING', 'AND', 'THE', 'KING', 'TOLD', 'HIM', "HE'D", 'COME', 'DOWN', 'THE', 'RIVER', 'AND', 'LANDED', 'AT', 'THE', 'OTHER', 'VILLAGE', 'THIS', 'MORNING', 'AND', 'NOW', 'HE', 'WAS', 'GOING', 'UP', 'A', 'FEW', 'MILE', 'TO', 'SEE', 'AN', 'OLD', 'FRIEND', 'ON', 'A', 'FARM', 'UP', 'THERE', 'THE', 'YOUNG', 'FELLOW', 'SAYS'] +3005-163391-0008-1135: hyp=['THE', 'AIR', 'THE', 'KING', 'WHERE', 'HE', 'WAS', 'GOING', 'AND', 'THE', 'KING', 'TOLD', 'HIM', "HE'D", 'COME', 'DOWN', 'A', 'RIVER', 'AND', 'LANDED', 'AT', 'THE', 'OTHER', 'VILLAGE', 'THIS', 'MORNING', 'AND', 'NOW', 'HE', 'WAS', 'GOING', 'UP', 'A', 'FEW', 'MILE', 'TO', 'SEE', 'AN', 'OLD', 'FRIEND', 'ON', 'A', 'FARM', 'UP', 'THERE', 'THE', 'YOUNG', 'FELLOW', 'SAYS'] +3005-163391-0009-1136: ref=['BUT', 'THEN', 'I', 'SAYS', 'AGAIN', 'NO', 'I', 'RECKON', 'IT', "AIN'T", 'HIM', 'OR', 'ELSE', 'HE', "WOULDN'T", 'BE', 'PADDLING', 'UP', 'THE', 'RIVER', 'YOU', "AIN'T", 'HIM', 'ARE', 'YOU'] +3005-163391-0009-1136: hyp=['BUT', 'THEN', 'I', 'SAYS', 'AGAIN', 'NO', 'I', 'RECKON', 'IT', "AIN'T", 'HIM', 'OR', 'ELSE', 'HE', "WOULDN'T", 'BE', 'PADDLIN', 'UP', 'THE', 'RIVER', 'YOU', "AIN'T", 'HIM', 'ARE', 'YOU'] +3005-163391-0010-1137: ref=['NO', 'MY', "NAME'S", 'BLODGETT', 'ELEXANDER', 'BLODGETT', 'REVEREND', 'ELEXANDER', 'BLODGETT', 'I', "S'POSE", 'I', 'MUST', 'SAY', 'AS', "I'M", 'ONE', 'O', 'THE', "LORD'S", 'POOR', 'SERVANTS'] +3005-163391-0010-1137: hyp=['NO', 'MY', "NAME'S", 'BLODGE', 'IT', 'ALEXANDER', 'BLOT', 'REVEREND', 'ALEXANDER', 'BLODGET', 'I', 'SUPPOSE', 'I', 'MUST', 'SAY', 'AS', "I'M", 'ONE', 'OF', 'THE', 'LARGE', 'POOR', 'SERVANTS'] +3005-163391-0011-1138: ref=['YOU', 'SEE', 'HE', 'WAS', 'PRETTY', 'OLD', 'AND', "GEORGE'S", "G'YIRLS", 'WAS', 'TOO', 'YOUNG', 'TO', 'BE', 'MUCH', 'COMPANY', 'FOR', 'HIM', 'EXCEPT', 'MARY', 'JANE', 'THE', 'RED', 'HEADED', 'ONE', 'AND', 'SO', 'HE', 'WAS', 'KINDER', 'LONESOME', 'AFTER', 'GEORGE', 'AND', 'HIS', 'WIFE', 'DIED', 'AND', "DIDN'T", 'SEEM', 'TO', 'CARE', 'MUCH', 'TO', 'LIVE'] +3005-163391-0011-1138: hyp=['YOU', 'SEE', 'HE', 'WAS', 'PRETTY', 'OLD', 'AND', 'GEORGE', 'IS', 'GUY', 'EARLS', 'WAS', 'TOO', 'YOUNG', 'TO', 'BE', 'MUCH', 'COMPANY', 'FOR', 'HIM', 'EXCEPT', 'MARY', 'JANE', 'THE', 'RED', 'HEADED', 'ONE', 'AND', 'SO', 'HE', 'WAS', 'KINDER', 'LONESOME', 'AFTER', 'GEORGE', 'AND', 'HIS', 'WIFE', 'DIED', 'AND', "DIDN'T", 'SEEM', 'TO', 'CARE', 'MUCH', 'TO', 'LIVE'] +3005-163391-0012-1139: ref=['TOO', 'BAD', 'TOO', 'BAD', 'HE', "COULDN'T", 'A', 'LIVED', 'TO', 'SEE', 'HIS', 'BROTHERS', 'POOR', 'SOUL'] +3005-163391-0012-1139: hyp=['TOO', 'BAD', 'TOO', 'BAD', 'HE', "COULDN'T", 'HAVE', 'LIVED', 'TO', 'SEE', 'HIS', "BROTHER'S", 'POOR', 'SOUL'] +3005-163391-0013-1140: ref=["I'M", 'GOING', 'IN', 'A', 'SHIP', 'NEXT', 'WEDNESDAY', 'FOR', 'RYO', 'JANEERO', 'WHERE', 'MY', 'UNCLE', 'LIVES'] +3005-163391-0013-1140: hyp=["I'M", 'GOIN', 'IN', 'A', 'SHIP', 'NEXT', 'WEDNESDAY', 'FOR', 'RIO', 'GENERO', 'WHERE', 'MY', 'UNCLE', 'IS'] +3005-163391-0014-1141: ref=['BUT', "IT'LL", 'BE', 'LOVELY', 'WISHT', 'I', 'WAS', 'A', 'GOING'] +3005-163391-0014-1141: hyp=['BUT', "IT'LL", 'BE', 'LOVELY', 'WISHED', 'I', 'WAS', 'A', 'GOIN'] +3005-163391-0015-1142: ref=['MARY', "JANE'S", 'NINETEEN', "SUSAN'S", 'FIFTEEN', 'AND', "JOANNA'S", 'ABOUT', "FOURTEENTHAT'S", 'THE', 'ONE', 'THAT', 'GIVES', 'HERSELF', 'TO', 'GOOD', 'WORKS', 'AND', 'HAS', 'A', 'HARE', 'LIP', 'POOR', 'THINGS'] +3005-163391-0015-1142: hyp=['MARY', "JANE'S", 'NINETEEN', "SUSAN'S", 'FIFTEEN', 'AND', "JOANNA'S", 'ABOUT', 'FOURTEEN', "THAT'S", 'THE', 'ONE', 'THAT', 'GIVES', 'HERSELF', 'TO', 'GOOD', 'WORKS', 'AND', 'HAS', 'A', 'HAIR', 'LIP', 'POOR', 'THINGS'] +3005-163391-0016-1143: ref=['WELL', 'THEY', 'COULD', 'BE', 'WORSE', 'OFF'] +3005-163391-0016-1143: hyp=['WELL', 'THEY', 'COULD', 'BE', 'WORSE', 'OFF'] +3005-163391-0017-1144: ref=['OLD', 'PETER', 'HAD', 'FRIENDS', 'AND', 'THEY', "AIN'T", 'GOING', 'TO', 'LET', 'THEM', 'COME', 'TO', 'NO', 'HARM'] +3005-163391-0017-1144: hyp=['O', 'PETER', 'HAD', 'FRIENDS', 'AND', 'THEY', "AIN'T", 'GOING', 'TO', 'LET', 'THEM', 'COME', 'TO', 'NO', 'HARM'] +3005-163391-0018-1145: ref=['BLAMED', 'IF', 'HE', "DIDN'T", 'INQUIRE', 'ABOUT', 'EVERYBODY', 'AND', 'EVERYTHING', 'IN', 'THAT', 'BLESSED', 'TOWN', 'AND', 'ALL', 'ABOUT', 'THE', 'WILKSES', 'AND', 'ABOUT', "PETER'S", 'BUSINESSWHICH', 'WAS', 'A', 'TANNER', 'AND', 'ABOUT', "GEORGE'SWHICH", 'WAS', 'A', 'CARPENTER', 'AND', 'ABOUT', "HARVEY'SWHICH", 'WAS', 'A', 'DISSENTERING', 'MINISTER', 'AND', 'SO', 'ON', 'AND', 'SO', 'ON', 'THEN', 'HE', 'SAYS'] +3005-163391-0018-1145: hyp=['BLAMED', 'IF', 'HE', "DIDN'T", 'ACQUIRE', 'ABOUT', 'EVERYBODY', 'AND', 'EVERYTHING', 'AND', 'THAT', 'BLESSED', 'TOWN', 'AND', 'ALL', 'ABOUT', 'THE', 'WILKES', 'AND', 'ABOUT', "PETER'S", 'BUSINESS', 'WHICH', 'WAS', 'A', 'TANNER', 'AND', 'ABOUT', "GEORGE'S", 'WHICH', 'WAS', 'A', 'CARPENTER', 'AND', 'ABOUT', 'HARVEST', 'WHICH', 'WAS', 'A', 'DISSENTERING', 'MINISTER', 'AND', 'SO', 'ON', 'AND', 'SO', 'ON', 'THEN', 'HE', 'SAYS'] +3005-163391-0019-1146: ref=['WHEN', "THEY'RE", 'DEEP', 'THEY', "WON'T", 'STOP', 'FOR', 'A', 'HAIL'] +3005-163391-0019-1146: hyp=['WHEN', 'HER', 'DEEP', 'THEY', "WON'T", 'STOP', 'FOR', 'A', 'HAIL'] +3005-163391-0020-1147: ref=['WAS', 'PETER', 'WILKS', 'WELL', 'OFF'] +3005-163391-0020-1147: hyp=['WAS', 'PETER', 'WILKES', 'WELL', 'OFF'] +3005-163391-0021-1148: ref=['WHEN', 'WE', 'STRUCK', 'THE', 'BOAT', 'SHE', 'WAS', 'ABOUT', 'DONE', 'LOADING', 'AND', 'PRETTY', 'SOON', 'SHE', 'GOT', 'OFF'] +3005-163391-0021-1148: hyp=['WHEN', 'WASTED', 'UP', 'THE', 'BOAT', 'SHE', 'WAS', 'ABOUT', 'DONE', 'LOADING', 'AND', 'PRETTY', 'SOON', 'SHE', 'GOT', 'OFF'] +3005-163391-0022-1149: ref=['NOW', 'HUSTLE', 'BACK', 'RIGHT', 'OFF', 'AND', 'FETCH', 'THE', 'DUKE', 'UP', 'HERE', 'AND', 'THE', 'NEW', 'CARPET', 'BAGS'] +3005-163391-0022-1149: hyp=['NOW', 'HUSTLE', 'BACK', 'RIGHT', 'OFF', 'AND', 'FETCH', 'THE', 'DUKE', 'UP', 'HERE', 'AND', 'THE', 'NEW', 'CARPET', 'BAGS'] +3005-163391-0023-1150: ref=['SO', 'THEN', 'THEY', 'WAITED', 'FOR', 'A', 'STEAMBOAT'] +3005-163391-0023-1150: hyp=['SO', 'THEN', 'THEY', 'WAITED', 'FOR', 'A', 'STEAMBOAT'] +3005-163391-0024-1151: ref=['BUT', 'THE', 'KING', 'WAS', "CA'M", 'HE', 'SAYS'] +3005-163391-0024-1151: hyp=['THAT', 'THE', 'KING', 'WAS', 'CALM', 'HE', 'SAYS'] +3005-163391-0025-1152: ref=['THEY', 'GIVE', 'A', 'GLANCE', 'AT', 'ONE', 'ANOTHER', 'AND', 'NODDED', 'THEIR', 'HEADS', 'AS', 'MUCH', 'AS', 'TO', 'SAY', 'WHAT', 'D', 'I', 'TELL', 'YOU'] +3005-163391-0025-1152: hyp=['THEY', 'GAVE', 'A', 'GLANCE', 'AT', 'ONE', 'ANOTHER', 'AND', 'NODDED', 'THEIR', 'HEADS', 'AS', 'MUCH', 'AS', 'TO', 'SAY', 'WOULD', 'THEY', 'TELL', 'YOU'] +3005-163391-0026-1153: ref=['THEN', 'ONE', 'OF', 'THEM', 'SAYS', 'KIND', 'OF', 'SOFT', 'AND', 'GENTLE'] +3005-163391-0026-1153: hyp=['THEN', 'ONE', 'OF', 'THEM', 'SAYS', 'KIND', 'OF', 'SOFT', 'AND', 'GENTLE'] +3005-163399-0000-1154: ref=['PHELPS', 'WAS', 'ONE', 'OF', 'THESE', 'LITTLE', 'ONE', 'HORSE', 'COTTON', 'PLANTATIONS', 'AND', 'THEY', 'ALL', 'LOOK', 'ALIKE'] +3005-163399-0000-1154: hyp=['PHELPS', 'IS', 'ONE', 'OF', 'THESE', 'LITTLE', 'ONE', 'HORSE', 'COTTON', 'PLANTATIONS', 'AND', 'THEY', 'ALL', 'LOOK', 'ALIKE'] +3005-163399-0001-1155: ref=['I', 'WENT', 'AROUND', 'AND', 'CLUMB', 'OVER', 'THE', 'BACK', 'STILE', 'BY', 'THE', 'ASH', 'HOPPER', 'AND', 'STARTED', 'FOR', 'THE', 'KITCHEN'] +3005-163399-0001-1155: hyp=['I', 'WENT', 'AROUND', 'AND', 'CLIMB', 'OVER', 'THE', 'BACK', 'STILE', 'BY', 'THE', 'ASH', 'HOPPER', 'AND', 'STARTED', 'FOR', 'THE', 'KITCHEN'] +3005-163399-0002-1156: ref=['I', 'OUT', 'WITH', 'A', "YES'M", 'BEFORE', 'I', 'THOUGHT'] +3005-163399-0002-1156: hyp=['AH', 'OUT', 'WITH', 'A', 'YES', 'AND', 'FORE', 'I', 'THOUGHT'] +3005-163399-0003-1157: ref=['SO', 'THEN', 'SHE', 'STARTED', 'FOR', 'THE', 'HOUSE', 'LEADING', 'ME', 'BY', 'THE', 'HAND', 'AND', 'THE', 'CHILDREN', 'TAGGING', 'AFTER'] +3005-163399-0003-1157: hyp=['SO', 'THEN', 'SHE', 'STARTED', 'FOR', 'THE', 'HOUSE', 'LEADING', 'ME', 'BY', 'THE', 'HAND', 'AND', 'THE', 'CHILDREN', 'TAGGING', 'AFTER'] +3005-163399-0004-1158: ref=['WHEN', 'WE', 'GOT', 'THERE', 'SHE', 'SET', 'ME', 'DOWN', 'IN', 'A', 'SPLIT', 'BOTTOMED', 'CHAIR', 'AND', 'SET', 'HERSELF', 'DOWN', 'ON', 'A', 'LITTLE', 'LOW', 'STOOL', 'IN', 'FRONT', 'OF', 'ME', 'HOLDING', 'BOTH', 'OF', 'MY', 'HANDS', 'AND', 'SAYS'] +3005-163399-0004-1158: hyp=['WHEN', 'WE', 'GOT', 'THERE', 'SHE', 'SET', 'ME', 'DOWN', 'IN', 'A', 'SPLIT', 'BOTTOM', 'CHAIR', 'AND', 'SET', 'HERSELF', 'DOWN', 'ON', 'A', 'LITTLE', 'LOW', 'STOOL', 'IN', 'FRONT', 'OF', 'ME', 'HOLDING', 'BOTH', 'OF', 'MY', 'HANDS', 'AND', 'SAYS'] +3005-163399-0005-1159: ref=['WELL', "IT'S", 'LUCKY', 'BECAUSE', 'SOMETIMES', 'PEOPLE', 'DO', 'GET', 'HURT'] +3005-163399-0005-1159: hyp=['WELL', "IT'S", 'LUCKY', 'BECAUSE', 'SOMETIMES', 'PEOPLE', 'DO', 'GET', 'HURT'] +3005-163399-0006-1160: ref=['AND', 'I', 'THINK', 'HE', 'DIED', 'AFTERWARDS', 'HE', 'WAS', 'A', 'BAPTIST'] +3005-163399-0006-1160: hyp=['AND', 'I', 'THINK', 'HE', 'DIED', 'AFTERWARDS', 'HE', 'WAS', 'A', 'BAPTIST'] +3005-163399-0007-1161: ref=['YES', 'IT', 'WAS', 'MORTIFICATIONTHAT', 'WAS', 'IT'] +3005-163399-0007-1161: hyp=['YES', 'IT', 'WAS', 'MORTIFICATION', 'THAT', 'WAS', 'IT'] +3005-163399-0008-1162: ref=['YOUR', "UNCLE'S", 'BEEN', 'UP', 'TO', 'THE', 'TOWN', 'EVERY', 'DAY', 'TO', 'FETCH', 'YOU'] +3005-163399-0008-1162: hyp=['YOUR', "UNCLE'S", 'BEEN', 'UP', 'TO', 'THE', 'TOWN', 'EVERY', 'DAY', 'TO', 'FETCH', 'YOU'] +3005-163399-0009-1163: ref=['YOU', 'MUST', 'A', 'MET', 'HIM', 'ON', 'THE', 'ROAD', "DIDN'T", 'YOU', 'OLDISH', 'MAN', 'WITH', 'A'] +3005-163399-0009-1163: hyp=['YOU', 'MUST', 'AMERGE', 'HIM', 'ON', 'THE', 'ROAD', "DIDN'T", 'YOU', 'OLDISH', 'MAN', 'WITH', 'A'] +3005-163399-0010-1164: ref=['WHY', 'CHILD', 'IT', 'LL', 'BE', 'STOLE'] +3005-163399-0010-1164: hyp=['WHY', 'CHILD', "IT'LL", 'BE', 'STOLE'] +3005-163399-0011-1165: ref=['IT', 'WAS', 'KINDER', 'THIN', 'ICE', 'BUT', 'I', 'SAYS'] +3005-163399-0011-1165: hyp=['IT', 'WAS', 'KIND', 'OR', 'THIN', 'EYES', 'BUT', 'I', 'SAYS'] +3005-163399-0012-1166: ref=['I', 'HAD', 'MY', 'MIND', 'ON', 'THE', 'CHILDREN', 'ALL', 'THE', 'TIME', 'I', 'WANTED', 'TO', 'GET', 'THEM', 'OUT', 'TO', 'ONE', 'SIDE', 'AND', 'PUMP', 'THEM', 'A', 'LITTLE', 'AND', 'FIND', 'OUT', 'WHO', 'I', 'WAS'] +3005-163399-0012-1166: hyp=['I', 'HAD', 'MY', 'MIND', 'ON', 'THE', 'CHILDREN', 'ALL', 'THE', 'TIME', 'I', 'WANTED', 'TO', 'GET', 'THEM', 'OUT', 'TO', 'ONE', 'SIDE', 'AND', 'PUMPED', 'THEM', 'A', 'LITTLE', 'AND', 'FIND', 'OUT', 'WHO', 'I', 'WAS'] +3005-163399-0013-1167: ref=['PRETTY', 'SOON', 'SHE', 'MADE', 'THE', 'COLD', 'CHILLS', 'STREAK', 'ALL', 'DOWN', 'MY', 'BACK', 'BECAUSE', 'SHE', 'SAYS'] +3005-163399-0013-1167: hyp=['PRETTY', 'SOON', 'SHE', 'MADE', 'THE', 'COLD', 'CHILL', 'STREAK', 'ALL', 'DOWN', 'MY', 'BACK', 'BECAUSE', 'SHE', 'SAYS'] +3005-163399-0014-1168: ref=['I', 'SEE', 'IT', "WARN'T", 'A', 'BIT', 'OF', 'USE', 'TO', 'TRY', 'TO', 'GO', 'AHEAD', "I'D", 'GOT', 'TO', 'THROW', 'UP', 'MY', 'HAND'] +3005-163399-0014-1168: hyp=['I', 'SEE', 'IT', "WARN'T", 'A', 'BIT', 'OF', 'USE', 'TO', 'TRY', 'TO', 'GO', 'AHEAD', "I'D", 'GOT', 'TO', 'THROW', 'UP', 'MY', 'HAND'] +3005-163399-0015-1169: ref=['SO', 'I', 'SAYS', 'TO', 'MYSELF', "HERE'S", 'ANOTHER', 'PLACE', 'WHERE', 'I', 'GOT', 'TO', 'RESK', 'THE', 'TRUTH'] +3005-163399-0015-1169: hyp=['SO', 'I', 'SAYS', 'TO', 'MYSELF', 'HERE', 'IS', 'ANOTHER', 'PLACE', 'WHERE', 'I', 'GOT', 'TO', 'REST', 'THE', 'TRUTH'] +3005-163399-0016-1170: ref=['I', 'OPENED', 'MY', 'MOUTH', 'TO', 'BEGIN', 'BUT', 'SHE', 'GRABBED', 'ME', 'AND', 'HUSTLED', 'ME', 'IN', 'BEHIND', 'THE', 'BED', 'AND', 'SAYS', 'HERE', 'HE', 'COMES'] +3005-163399-0016-1170: hyp=['I', 'OPENED', 'MY', 'MOUTH', 'TO', 'BEGIN', 'BUT', 'SHE', 'GRABBED', 'ME', 'AND', 'HUSTLED', 'ME', 'IN', 'BEHIND', 'THE', 'BED', 'AND', 'SAYS', 'HERE', 'HE', 'COMES'] +3005-163399-0017-1171: ref=['CHILDREN', "DON'T", 'YOU', 'SAY', 'A', 'WORD'] +3005-163399-0017-1171: hyp=['CHILDREN', "DON'T", 'YOU', 'SAY', 'A', 'WORD'] +3005-163399-0018-1172: ref=['I', 'SEE', 'I', 'WAS', 'IN', 'A', 'FIX', 'NOW'] +3005-163399-0018-1172: hyp=['I', 'SEE', 'I', 'WAS', 'IN', 'A', 'FIX', 'NOW'] +3005-163399-0019-1173: ref=['MISSUS', 'PHELPS', 'SHE', 'JUMPS', 'FOR', 'HIM', 'AND', 'SAYS'] +3005-163399-0019-1173: hyp=['MISSUS', 'PHELPS', 'SHE', 'JUMPED', 'FOR', 'HIM', 'AND', 'SAYS'] +3005-163399-0020-1174: ref=['HAS', 'HE', 'COME', 'NO', 'SAYS', 'HER', 'HUSBAND'] +3005-163399-0020-1174: hyp=['HAS', 'HE', 'COME', 'NO', 'SAYS', 'HER', 'HUSBAND'] +3005-163399-0021-1175: ref=['I', "CAN'T", 'IMAGINE', 'SAYS', 'THE', 'OLD', 'GENTLEMAN', 'AND', 'I', 'MUST', 'SAY', 'IT', 'MAKES', 'ME', 'DREADFUL', 'UNEASY'] +3005-163399-0021-1175: hyp=['I', "CAN'T", 'IMAGINE', 'SAYS', 'THE', 'OLD', 'GENTLEMAN', 'AND', 'I', 'MUST', 'SAY', 'IT', 'MAKES', 'ME', 'DREADFUL', 'UNEASY'] +3005-163399-0022-1176: ref=['UNEASY', 'SHE', 'SAYS', "I'M", 'READY', 'TO', 'GO', 'DISTRACTED'] +3005-163399-0022-1176: hyp=['UNEASY', 'SHE', 'SAYS', "I'M", 'READY', 'TO', 'GO', 'DISTRACTED'] +3005-163399-0023-1177: ref=['HE', 'MUST', 'A', 'COME', 'AND', "YOU'VE", 'MISSED', 'HIM', 'ALONG', 'THE', 'ROAD'] +3005-163399-0023-1177: hyp=['HE', 'MUST', 'HAVE', 'COME', 'AND', "YOU'VE", 'MISSED', 'HIM', 'ALONG', 'THE', 'ROAD'] +3005-163399-0024-1178: ref=['OH', "DON'T", 'DISTRESS', 'ME', 'ANY', "MORE'N", "I'M", 'ALREADY', 'DISTRESSED'] +3005-163399-0024-1178: hyp=['OH', "DON'T", 'DISTRESS', 'ME', 'ANY', 'MORE', 'NUM', 'ALREADY', 'DISTRESSED'] +3005-163399-0025-1179: ref=['WHY', 'SILAS', 'LOOK', 'YONDER', 'UP', 'THE', 'ROAD', "AIN'T", 'THAT', 'SOMEBODY', 'COMING'] +3005-163399-0025-1179: hyp=['WHY', 'SILAS', 'LOOK', 'YONDER', 'UP', 'THE', 'ROAD', "HAIN'T", 'THAT', 'SOMEBODY', 'COMIN'] +3005-163399-0026-1180: ref=['THE', 'OLD', 'GENTLEMAN', 'STARED', 'AND', 'SAYS'] +3005-163399-0026-1180: hyp=['THE', 'OLD', 'GENTLEMAN', 'STARED', 'AND', 'SAYS'] +3005-163399-0027-1181: ref=['I', "HAIN'T", 'NO', 'IDEA', 'WHO', 'IS', 'IT'] +3005-163399-0027-1181: hyp=['I', "HAIN'T", 'NO', 'IDEA', 'WHO', 'IS', 'IT'] +3005-163399-0028-1182: ref=["IT'S", 'TOM', 'SAWYER'] +3005-163399-0028-1182: hyp=['IS', 'TOM', 'SAWYER'] +3005-163399-0029-1183: ref=['BEING', 'TOM', 'SAWYER', 'WAS', 'EASY', 'AND', 'COMFORTABLE', 'AND', 'IT', 'STAYED', 'EASY', 'AND', 'COMFORTABLE', 'TILL', 'BY', 'AND', 'BY', 'I', 'HEAR', 'A', 'STEAMBOAT', 'COUGHING', 'ALONG', 'DOWN', 'THE', 'RIVER'] +3005-163399-0029-1183: hyp=['BEING', 'TOM', 'SAWYER', 'WAS', 'EASY', 'AND', 'COMFORTABLE', 'AND', 'ITS', 'STATE', 'EASY', 'AND', 'COMFORTABLE', 'TILL', 'BY', 'AND', 'BY', 'I', 'HEAR', 'A', 'STEAMBOAT', 'COUGHING', 'ALONG', 'DOWN', 'THE', 'RIVER'] +3005-163399-0030-1184: ref=['THEN', 'I', 'SAYS', 'TO', 'MYSELF', "S'POSE", 'TOM', 'SAWYER', 'COMES', 'DOWN', 'ON', 'THAT', 'BOAT'] +3005-163399-0030-1184: hyp=['THEN', 'I', 'SAYS', 'TO', 'MYSELF', "S'POSE", 'TOM', 'SAWYER', 'COMES', 'DOWN', 'ON', 'MY', 'BOAT'] +3080-5032-0000-312: ref=['BUT', 'I', 'AM', 'HUGELY', 'PLEASED', 'THAT', 'YOU', 'HAVE', 'SEEN', 'MY', 'LADY'] +3080-5032-0000-312: hyp=['BUT', 'I', 'AM', 'HUGELY', 'PLEASED', 'THAT', 'YOU', 'HAVE', 'SEEN', 'MY', 'LADY'] +3080-5032-0001-313: ref=['I', 'KNEW', 'YOU', 'COULD', 'NOT', 'CHOOSE', 'BUT', 'LIKE', 'HER', 'BUT', 'YET', 'LET', 'ME', 'TELL', 'YOU', 'YOU', 'HAVE', 'SEEN', 'BUT', 'THE', 'WORST', 'OF', 'HER'] +3080-5032-0001-313: hyp=['I', 'KNEW', 'YOU', 'COULD', 'NOT', 'CHOOSE', 'BUT', 'LIKE', 'HER', 'BUT', 'YET', 'LET', 'ME', 'TELL', 'YOU', 'YOU', 'HAVE', 'SEEN', 'BUT', 'THE', 'WORST', 'OF', 'HER'] +3080-5032-0002-314: ref=['HER', 'CONVERSATION', 'HAS', 'MORE', 'CHARMS', 'THAN', 'CAN', 'BE', 'IN', 'MERE', 'BEAUTY', 'AND', 'HER', 'HUMOUR', 'AND', 'DISPOSITION', 'WOULD', 'MAKE', 'A', 'DEFORMED', 'PERSON', 'APPEAR', 'LOVELY'] +3080-5032-0002-314: hyp=['HER', 'CONVERSATION', 'HAS', 'MORE', 'CHARMS', 'THAN', 'CAN', 'BE', 'IN', 'MERE', 'BEAUTY', 'AND', 'A', 'HUMOUR', 'AND', 'DISPOSITION', 'WOULD', 'MAKE', 'A', 'DEFORMED', 'PERSON', 'APPEAR', 'LOVELY'] +3080-5032-0003-315: ref=['WHY', 'DID', 'YOU', 'NOT', 'SEND', 'ME', 'THAT', 'NEWS', 'AND', 'A', 'GARLAND'] +3080-5032-0003-315: hyp=['WHY', 'DID', 'YOU', 'NOT', 'SEND', 'ME', 'THAT', 'NEWS', 'AND', 'A', 'GARLAND'] +3080-5032-0004-316: ref=['WELL', 'THE', 'BEST', "ON'T", 'IS', 'I', 'HAVE', 'A', 'SQUIRE', 'NOW', 'THAT', 'IS', 'AS', 'GOOD', 'AS', 'A', 'KNIGHT'] +3080-5032-0004-316: hyp=['WHY', 'THE', 'BEST', 'ON', 'IT', 'IS', 'THAT', 'I', 'HAVE', 'A', 'SQUIRE', 'NOW', 'THAT', 'IS', 'AS', 'GOOD', 'AS', 'A', 'KNIGHT'] +3080-5032-0005-317: ref=['IN', 'EARNEST', 'WE', 'HAVE', 'HAD', 'SUCH', 'A', 'SKIRMISH', 'AND', 'UPON', 'SO', 'FOOLISH', 'AN', 'OCCASION', 'AS', 'I', 'CANNOT', 'TELL', 'WHICH', 'IS', 'STRANGEST'] +3080-5032-0005-317: hyp=['IN', 'EARNEST', 'WE', 'HAVE', 'HAD', 'SUCH', 'A', 'SKIRMISH', 'IN', 'A', 'POST', 'OF', 'FOOLISH', 'AN', 'OCCASION', 'AS', 'I', 'CANNOT', 'TELL', 'WHICH', 'IS', 'STRANGERS'] +3080-5032-0006-318: ref=['ALL', 'THE', 'PEOPLE', 'THAT', 'I', 'HAD', 'EVER', 'IN', 'MY', 'LIFE', 'REFUSED', 'WERE', 'BROUGHT', 'AGAIN', 'UPON', 'THE', 'STAGE', 'LIKE', 'RICHARD', 'THE', 'THREE', 'S', 'GHOSTS', 'TO', 'REPROACH', 'ME', 'WITHAL', 'AND', 'ALL', 'THE', 'KINDNESS', 'HIS', 'DISCOVERIES', 'COULD', 'MAKE', 'I', 'HAD', 'FOR', 'YOU', 'WAS', 'LAID', 'TO', 'MY', 'CHARGE'] +3080-5032-0006-318: hyp=['ALL', 'THE', 'PEOPLE', 'THAT', 'I', 'HAD', 'EVER', 'IN', 'MY', 'LIFE', 'REFUSED', 'WERE', 'BROUGHT', 'AGAIN', 'UPON', 'THE', 'STAGE', 'LIKE', 'RICHARD', 'THE', 'THIRD', 'GHOSTS', 'TO', 'REPROACH', 'ME', 'WITHAL', 'IN', 'ALL', 'THE', 'KINDNESS', 'HIS', 'DISCOVERIES', 'COULD', 'MAKE', 'I', 'HAD', 'FOR', 'YOU', 'WAS', 'LATE', 'TO', 'MY', 'CHARGE'] +3080-5032-0007-319: ref=['MY', 'BEST', 'QUALITIES', 'IF', 'I', 'HAVE', 'ANY', 'THAT', 'ARE', 'GOOD', 'SERVED', 'BUT', 'FOR', 'AGGRAVATIONS', 'OF', 'MY', 'FAULT', 'AND', 'I', 'WAS', 'ALLOWED', 'TO', 'HAVE', 'WIT', 'AND', 'UNDERSTANDING', 'AND', 'DISCRETION', 'IN', 'OTHER', 'THINGS', 'THAT', 'IT', 'MIGHT', 'APPEAR', 'I', 'HAD', 'NONE', 'IN', 'THIS'] +3080-5032-0007-319: hyp=['MY', 'BEST', 'QUALITIES', 'IF', 'I', 'HAVE', 'ANY', 'THAT', 'ARE', 'GOOD', 'SERVED', 'BUT', 'FOR', 'AGGRAVATIONS', 'OF', 'MY', 'FAULT', 'AND', 'I', 'WAS', 'ALLOWED', 'TO', 'HAVE', 'WIT', 'AND', 'UNDERSTANDING', 'AND', 'DISCRETION', 'IN', 'OTHER', 'THINGS', 'THAT', 'IT', 'MIGHT', 'APPEAR', 'I', 'HAD', 'NONE', 'IN', 'THIS'] +3080-5032-0008-320: ref=['TIS', 'A', 'STRANGE', 'CHANGE', 'AND', 'I', 'AM', 'VERY', 'SORRY', 'FOR', 'IT', 'BUT', "I'LL", 'SWEAR', 'I', 'KNOW', 'NOT', 'HOW', 'TO', 'HELP', 'IT'] +3080-5032-0008-320: hyp=['TIS', 'A', 'STRANGE', 'CHANGE', 'AND', 'I', 'AM', 'VERY', 'SORRY', 'FOR', 'IT', 'BUT', "I'LL", 'SWEAR', 'I', 'KNOW', 'NOT', 'HOW', 'TO', 'HELP', 'IT'] +3080-5032-0009-321: ref=['MISTER', 'FISH', 'IS', 'THE', 'SQUIRE', 'OF', 'DAMES', 'AND', 'HAS', 'SO', 'MANY', 'MISTRESSES', 'THAT', 'ANYBODY', 'MAY', 'PRETEND', 'A', 'SHARE', 'IN', 'HIM', 'AND', 'BE', 'BELIEVED', 'BUT', 'THOUGH', 'I', 'HAVE', 'THE', 'HONOUR', 'TO', 'BE', 'HIS', 'NEAR', 'NEIGHBOUR', 'TO', 'SPEAK', 'FREELY', 'I', 'CANNOT', 'BRAG', 'MUCH', 'THAT', 'HE', 'MAKES', 'ANY', 'COURT', 'TO', 'ME', 'AND', 'I', 'KNOW', 'NO', 'YOUNG', 'WOMAN', 'IN', 'THE', 'COUNTRY', 'THAT', 'HE', 'DOES', 'NOT', 'VISIT', 'OFTEN'] +3080-5032-0009-321: hyp=['MISTER', 'FISH', 'IS', 'A', 'SQUIRE', 'OF', 'DAMES', 'AND', 'HAS', 'SO', 'MANY', 'MISTRESSES', 'THAN', 'ANYBODY', 'MAY', 'PRETEND', 'TO', 'SHARE', 'IN', 'HIM', 'AND', 'BE', 'BELIEVED', 'THOUGH', 'I', 'HAVE', 'THE', 'HONOUR', 'TO', 'BE', 'HIS', 'NEAR', 'NEIGHBOUR', 'TO', 'SPEAK', 'FREELY', 'I', 'CANNOT', 'BRAG', 'MUCH', 'THAT', 'HE', 'MAKES', 'ANY', 'COURT', 'TO', 'ME', 'AND', 'I', 'KNOW', 'NO', 'YOUNG', 'WOMAN', 'IN', 'THE', 'COUNTRY', 'THAT', 'HE', 'DOES', 'NOT', 'VISIT', 'OFTEN'] +3080-5032-0010-322: ref=['I', 'THINK', 'MY', 'YOUNGEST', 'BROTHER', 'COMES', 'DOWN', 'WITH', 'HIM'] +3080-5032-0010-322: hyp=['I', 'THINK', 'MY', 'YOUNGEST', 'BROTHER', 'COMES', 'DOWN', 'WITH', 'HIM'] +3080-5032-0011-323: ref=['I', 'CAN', 'NO', 'SOONER', 'GIVE', 'YOU', 'SOME', 'LITTLE', 'HINTS', 'WHEREABOUTS', 'THEY', 'LIVE', 'BUT', 'YOU', 'KNOW', 'THEM', 'PRESENTLY', 'AND', 'I', 'MEANT', 'YOU', 'SHOULD', 'BE', 'BEHOLDING', 'TO', 'ME', 'FOR', 'YOUR', 'ACQUAINTANCE'] +3080-5032-0011-323: hyp=['I', 'CAN', 'NO', 'SOONER', 'GIVE', 'YOU', 'SOME', 'LITTLE', 'HINTS', 'WHEREABOUT', 'THEY', 'LIVE', 'BUT', 'YOU', 'KNOW', 'THEM', 'PRESENTLY', 'AND', 'I', 'MEANT', 'YOU', 'SHOULD', 'BE', 'BEHOLDING', 'TO', 'ME', 'FOR', 'YOUR', 'ACQUAINTANCE'] +3080-5032-0012-324: ref=['BUT', 'IT', 'SEEMS', 'THIS', 'GENTLEMAN', 'IS', 'NOT', 'SO', 'EASY', 'ACCESS', 'BUT', 'YOU', 'MAY', 'ACKNOWLEDGE', 'SOMETHING', 'DUE', 'TO', 'ME', 'IF', 'I', 'INCLINE', 'HIM', 'TO', 'LOOK', 'GRACIOUSLY', 'UPON', 'YOU', 'AND', 'THEREFORE', 'THERE', 'IS', 'NOT', 'MUCH', 'HARM', 'DONE'] +3080-5032-0012-324: hyp=['BUT', 'IT', 'SEEMS', 'THIS', 'GENTLEMAN', 'IS', 'NOT', 'SO', 'EASY', 'ACCESS', 'BUT', 'YOU', 'MAY', 'ACKNOWLEDGE', 'SOMETHING', 'DUE', 'TO', 'ME', 'IF', 'I', 'INCLINE', 'HIM', 'TO', 'LOOK', 'GRACIOUSLY', 'UPON', 'YOU', 'AND', 'THEREFORE', 'THERE', 'IS', 'NOT', 'MUCH', 'HARM', 'DONE'] +3080-5032-0013-325: ref=['I', 'HAVE', 'MISSED', 'FOUR', 'FITS', 'AND', 'HAD', 'BUT', 'FIVE', 'AND', 'HAVE', 'RECOVERED', 'SO', 'MUCH', 'STRENGTH', 'AS', 'MADE', 'ME', 'VENTURE', 'TO', 'MEET', 'YOUR', 'LETTER', 'ON', 'WEDNESDAY', 'A', 'MILE', 'FROM', 'HOME'] +3080-5032-0013-325: hyp=['I', 'HAVE', 'MISSED', 'FOUR', 'FITS', 'AND', 'HAVE', 'HAD', 'BUT', 'FIVE', 'AND', 'HAVE', 'RECOVERED', 'SO', 'MUCH', 'STRENGTH', 'AS', 'MADE', 'ME', 'VENTURE', 'TO', 'MEET', 'YOUR', 'LETTER', 'ON', 'WEDNESDAY', 'A', 'MILE', 'FROM', 'HOME'] +3080-5032-0014-326: ref=['BUT', 'BESIDES', 'I', 'CAN', 'GIVE', 'YOU', 'OTHERS'] +3080-5032-0014-326: hyp=['BUT', 'BESIDES', 'I', 'CAN', 'GIVE', 'YOU', 'OTHERS'] +3080-5032-0015-327: ref=['I', 'AM', 'HERE', 'MUCH', 'MORE', 'OUT', 'OF', "PEOPLE'S", 'WAY', 'THAN', 'IN', 'TOWN', 'WHERE', 'MY', 'AUNT', 'AND', 'SUCH', 'AS', 'PRETEND', 'AN', 'INTEREST', 'IN', 'ME', 'AND', 'A', 'POWER', 'OVER', 'ME', 'DO', 'SO', 'PERSECUTE', 'ME', 'WITH', 'THEIR', 'GOOD', 'NATURE', 'AND', 'TAKE', 'IT', 'SO', 'ILL', 'THAT', 'THEY', 'ARE', 'NOT', 'ACCEPTED', 'AS', 'I', 'WOULD', 'LIVE', 'IN', 'A', 'HOLLOW', 'TREE', 'TO', 'AVOID', 'THEM'] +3080-5032-0015-327: hyp=['I', 'AM', 'HERE', 'MUCH', 'MORE', 'OUT', 'OF', "PEOPLE'S", 'WAY', 'THAN', 'IN', 'TOWN', 'WHERE', 'MY', 'AUNTS', 'IN', 'SUCH', 'HAS', 'PRETEND', 'AN', 'INTEREST', 'IN', 'ME', 'IN', 'A', 'POWER', 'OVER', 'ME', 'DO', 'SO', 'PERSECUTE', 'ME', 'MY', 'DEAR', 'GOOD', 'NATURE', 'YOU', 'TAKE', 'IT', 'SO', 'ILL', 'THAT', 'THEY', 'ARE', 'NOT', 'ACCEPTED', 'AS', 'I', 'WOULD', 'LIVE', 'IN', 'A', 'HOLLOW', 'TREE', 'TO', 'AVOID', 'THEM'] +3080-5032-0016-328: ref=['YOU', 'WILL', 'THINK', 'HIM', 'ALTERED', 'AND', 'IF', 'IT', 'BE', 'POSSIBLE', 'MORE', 'MELANCHOLY', 'THAN', 'HE', 'WAS'] +3080-5032-0016-328: hyp=['YOU', 'WILL', 'THINK', 'HIM', 'ALTERED', 'AND', 'IF', 'IT', 'BE', 'POSSIBLE', 'MORE', 'MELANCHOLY', 'THAN', 'HE', 'WAS'] +3080-5032-0017-329: ref=['IF', 'MARRIAGE', 'AGREES', 'NO', 'BETTER', 'WITH', 'OTHER', 'PEOPLE', 'THAN', 'IT', 'DOES', 'WITH', 'HIM', 'I', 'SHALL', 'PRAY', 'THAT', 'ALL', 'MY', 'FRIENDS', 'MAY', 'SCAPE', 'IT'] +3080-5032-0017-329: hyp=['IF', 'MARRIAGE', 'AGREES', 'NO', 'BETTER', 'WITH', 'OTHER', 'PEOPLE', 'THAN', 'IT', 'DOES', 'WITH', 'HIM', 'I', 'SHALL', 'PRAY', 'THAT', 'ALL', 'MY', 'FRIENDS', 'MAY', 'ESCAPE', 'IT'] +3080-5032-0018-330: ref=['WELL', 'IN', 'EARNEST', 'IF', 'I', 'WERE', 'A', 'PRINCE', 'THAT', 'LADY', 'SHOULD', 'BE', 'MY', 'MISTRESS', 'BUT', 'I', 'CAN', 'GIVE', 'NO', 'RULE', 'TO', 'ANY', 'ONE', 'ELSE', 'AND', 'PERHAPS', 'THOSE', 'THAT', 'ARE', 'IN', 'NO', 'DANGER', 'OF', 'LOSING', 'THEIR', 'HEARTS', 'TO', 'HER', 'MAY', 'BE', 'INFINITELY', 'TAKEN', 'WITH', 'ONE', 'I', 'SHOULD', 'NOT', 'VALUE', 'AT', 'ALL', 'FOR', 'SO', 'SAYS', 'THE', 'JUSTINIAN', 'WISE', 'PROVIDENCE', 'HAS', 'ORDAINED', 'IT', 'THAT', 'BY', 'THEIR', 'DIFFERENT', 'HUMOURS', 'EVERYBODY', 'MIGHT', 'FIND', 'SOMETHING', 'TO', 'PLEASE', 'THEMSELVES', 'WITHAL', 'WITHOUT', 'ENVYING', 'THEIR', 'NEIGHBOURS'] +3080-5032-0018-330: hyp=['WELL', 'IN', 'EARNEST', 'IF', 'I', 'WERE', 'A', 'PRINCE', 'THAT', 'LADY', 'SHOULD', 'BE', 'MY', 'MISTRESS', 'BUT', 'I', 'CAN', 'GIVE', 'NO', 'RULE', 'TO', 'ANY', 'ONE', 'ELSE', 'AND', 'PERHAPS', 'THOSE', 'THAT', 'ARE', 'IN', 'NO', 'DANGER', 'OF', 'LOSING', 'THEIR', 'HEARTS', 'TO', 'HER', 'MAY', 'BE', 'INFINITELY', 'TAKEN', 'WITH', 'ONE', 'I', 'SHOULD', 'NOT', 'VALUE', 'IT', 'ALL', 'FOR', 'SO', 'SAYS', 'THE', 'JUSTINIAN', 'WISE', 'PROVIDENCE', 'HAS', 'ORDAINED', 'IT', 'THAT', 'BY', 'THEIR', 'DIFFERENT', 'HUMANS', 'EVERYBODY', 'MIGHT', 'FIND', 'SOMETHING', 'TO', 'PLEASE', 'THEMSELVES', 'WITHAL', 'WITHOUT', 'ENVYING', 'THEIR', 'NEIGHBOURS'] +3080-5032-0019-331: ref=['THE', 'MATTER', 'IS', 'NOT', 'GREAT', 'FOR', 'I', 'CONFESS', 'I', 'DO', 'NATURALLY', 'HATE', 'THE', 'NOISE', 'AND', 'TALK', 'OF', 'THE', 'WORLD', 'AND', 'SHOULD', 'BE', 'BEST', 'PLEASED', 'NEVER', 'TO', 'BE', 'KNOWN', "IN'T", 'UPON', 'ANY', 'OCCASION', 'WHATSOEVER', 'YET', 'SINCE', 'IT', 'CAN', 'NEVER', 'BE', 'WHOLLY', 'AVOIDED', 'ONE', 'MUST', 'SATISFY', 'ONESELF', 'BY', 'DOING', 'NOTHING', 'THAT', 'ONE', 'NEED', 'CARE', 'WHO', 'KNOWS'] +3080-5032-0019-331: hyp=['THE', 'MATTER', 'IS', 'NOT', 'GREAT', 'FOR', 'I', 'CONFESS', 'I', 'DO', 'NATURALLY', 'HATE', 'THE', 'NOISE', 'AND', 'TALK', 'OF', 'THE', 'WORLD', 'AND', 'SHOULD', 'BE', 'BEST', 'PLEASED', 'NEVER', 'TO', 'BE', 'KNOWN', 'IN', 'UPON', 'ANY', 'OCCASION', 'WHATSOEVER', 'YET', 'SINCE', 'IT', 'CAN', 'NEVER', 'BE', 'WHOLLY', 'AVOIDED', 'ONE', 'MUST', 'SATISFY', 'ONESELF', 'BY', 'DOING', 'NOTHING', 'THAT', 'ONE', 'NEED', 'CARE', 'WHO', 'KNOWS'] +3080-5032-0020-332: ref=['IF', 'I', 'HAD', 'A', 'PICTURE', 'THAT', 'WERE', 'FIT', 'FOR', 'YOU', 'YOU', 'SHOULD', 'HAVE', 'IT'] +3080-5032-0020-332: hyp=['IF', 'I', 'HAD', 'A', 'PICTURE', 'THAT', 'WERE', 'FIT', 'FOR', 'YOU', 'YOU', 'SHOULD', 'HAVE', 'IT'] +3080-5032-0021-333: ref=['HOW', 'CAN', 'YOU', 'TALK', 'OF', 'DEFYING', 'FORTUNE', 'NOBODY', 'LIVES', 'WITHOUT', 'IT', 'AND', 'THEREFORE', 'WHY', 'SHOULD', 'YOU', 'IMAGINE', 'YOU', 'COULD'] +3080-5032-0021-333: hyp=['HOW', 'CAN', 'YOU', 'TALK', 'OF', 'DEFYING', 'FORTUNE', 'NOBODY', 'LIVES', 'WITHOUT', 'IT', 'AND', 'THEREFORE', 'WHY', 'SHOULD', 'YOU', 'IMAGINE', 'YOU', 'COULD'] +3080-5032-0022-334: ref=['I', 'KNOW', 'NOT', 'HOW', 'MY', 'BROTHER', 'COMES', 'TO', 'BE', 'SO', 'WELL', 'INFORMED', 'AS', 'YOU', 'SAY', 'BUT', 'I', 'AM', 'CERTAIN', 'HE', 'KNOWS', 'THE', 'UTMOST', 'OF', 'THE', 'INJURIES', 'YOU', 'HAVE', 'RECEIVED', 'FROM', 'HER'] +3080-5032-0022-334: hyp=['I', 'KNOW', 'NOT', 'HOW', 'MY', 'BROTHER', 'COMES', 'TO', 'BE', 'SO', 'WELL', 'INFORMED', 'AS', 'YOU', 'SAY', 'BUT', 'I', 'AM', 'CERTAIN', 'HE', 'KNOWS', 'THE', 'UTMOST', 'OF', 'THE', 'INJURIES', 'YOU', 'HAVE', 'RECEIVED', 'FROM', 'HER'] +3080-5032-0023-335: ref=['WE', 'HAVE', 'HAD', 'ANOTHER', 'DEBATE', 'BUT', 'MUCH', 'MORE', 'CALMLY'] +3080-5032-0023-335: hyp=['WE', 'HAVE', 'HAD', 'ANOTHER', 'DEBATE', 'BUT', 'MUCH', 'MORE', 'CALMLY'] +3080-5032-0024-336: ref=['AND', 'BESIDES', 'THERE', 'WAS', 'A', 'TIME', 'WHEN', 'WE', 'OURSELVES', 'WERE', 'INDIFFERENT', 'TO', 'ONE', 'ANOTHER', 'DID', 'I', 'DO', 'SO', 'THEN', 'OR', 'HAVE', 'I', 'LEARNED', 'IT', 'SINCE'] +3080-5032-0024-336: hyp=['AND', 'BESIDES', 'THERE', 'WAS', 'A', 'TIME', 'WHEN', 'WE', 'OURSELVES', 'WERE', 'INDIFFERENT', 'TO', 'ONE', 'ANOTHER', 'DID', 'I', 'DO', 'SO', 'THEN', 'OR', 'HAVE', 'I', 'LEARNED', 'IT', 'SINCE'] +3080-5032-0025-337: ref=['I', 'HAVE', 'BEEN', 'STUDYING', 'HOW', 'TOM', 'CHEEKE', 'MIGHT', 'COME', 'BY', 'HIS', 'INTELLIGENCE', 'AND', 'I', 'VERILY', 'BELIEVE', 'HE', 'HAS', 'IT', 'FROM', 'MY', 'COUSIN', 'PETERS'] +3080-5032-0025-337: hyp=['I', 'HAVE', 'BEEN', 'STUDYING', 'HOW', 'TOM', 'CHEEK', 'MIGHT', 'COME', 'BY', 'HIS', 'INTELLIGENCE', 'AND', 'I', 'VRAIRIB', 'HE', 'HAS', 'IT', 'FROM', 'MY', 'COUSIN', 'PETERS'] +3080-5032-0026-338: ref=['HOW', 'KINDLY', 'DO', 'I', 'TAKE', 'THESE', 'CIVILITIES', 'OF', 'YOUR', "FATHER'S", 'IN', 'EARNEST', 'YOU', 'CANNOT', 'IMAGINE', 'HOW', 'HIS', 'LETTER', 'PLEASED', 'ME'] +3080-5032-0026-338: hyp=['HOW', 'KINDLY', 'DO', 'I', 'TAKE', 'THE', 'CIVILITIES', 'OF', 'YOUR', 'FATHERS', 'IN', 'EARNEST', 'YOU', 'CANNOT', 'IMAGINE', 'HOW', 'HIS', 'LETTER', 'PLEASED', 'ME'] +3080-5040-0000-278: ref=['WOULD', 'IT', 'WOULD', 'LEAVE', 'ME', 'AND', 'THEN', 'I', 'COULD', 'BELIEVE', 'I', 'SHALL', 'NOT', 'ALWAYS', 'HAVE', 'OCCASION', 'FOR', 'IT'] +3080-5040-0000-278: hyp=['WOULD', 'IT', 'WOULD', 'LEAVE', 'ME', 'AND', 'THEN', 'I', 'COULD', 'BELIEVE', 'I', 'SHALL', 'NOT', 'ALWAYS', 'HAVE', 'OCCASION', 'FOR', 'IT'] +3080-5040-0001-279: ref=['MY', 'POOR', 'LADY', 'VAVASOUR', 'IS', 'CARRIED', 'TO', 'THE', 'TOWER', 'AND', 'HER', 'GREAT', 'BELLY', 'COULD', 'NOT', 'EXCUSE', 'HER', 'BECAUSE', 'SHE', 'WAS', 'ACQUAINTED', 'BY', 'SOMEBODY', 'THAT', 'THERE', 'WAS', 'A', 'PLOT', 'AGAINST', 'THE', 'PROTECTOR', 'AND', 'DID', 'NOT', 'DISCOVER', 'IT'] +3080-5040-0001-279: hyp=['MY', 'POOR', 'LADY', 'VAVERASSEUR', 'IS', 'CHARACTERED', 'A', 'TOWER', 'IN', 'HER', 'GREAT', 'BELLY', 'COULD', 'NOT', 'EXCUSE', 'HER', 'BECAUSE', 'SHE', 'WAS', 'ACQUAINTED', 'BY', 'SOMEBODY', 'THAT', 'THERE', 'WAS', 'A', 'PLOT', 'AGAINST', 'THE', 'PROTECTOR', 'ANNE', 'DID', 'NOT', 'DISCOVER', 'IT'] +3080-5040-0002-280: ref=['SHE', 'HAS', 'TOLD', 'NOW', 'ALL', 'THAT', 'WAS', 'TOLD', 'HER', 'BUT', 'VOWS', 'SHE', 'WILL', 'NEVER', 'SAY', 'FROM', 'WHENCE', 'SHE', 'HAD', 'IT', 'WE', 'SHALL', 'SEE', 'WHETHER', 'HER', 'RESOLUTIONS', 'ARE', 'AS', 'UNALTERABLE', 'AS', 'THOSE', 'OF', 'MY', 'LADY', 'TALMASH'] +3080-5040-0002-280: hyp=['SHE', 'HAS', 'TOLD', 'NOW', 'ALL', 'THAT', 'WAS', 'TOLD', 'HER', 'BUT', 'VOWS', 'SHE', 'WILL', 'NEVER', 'SAY', 'FROM', 'WHENCE', 'SHE', 'HAD', 'IT', 'WE', 'SHALL', 'SEE', 'WHETHER', 'HER', 'RESOLUTIONS', 'ARE', 'AS', 'UNALTERABLE', 'AS', 'THOSE', 'OF', 'MY', 'LADY', 'THOMMISH'] +3080-5040-0003-281: ref=['I', 'WONDER', 'HOW', 'SHE', 'BEHAVED', 'HERSELF', 'WHEN', 'SHE', 'WAS', 'MARRIED'] +3080-5040-0003-281: hyp=['I', 'WONDER', 'HOW', 'SHE', 'BEHAVED', 'HERSELF', 'WHEN', 'SHE', 'WAS', 'MARRIED'] +3080-5040-0004-282: ref=['I', 'NEVER', 'SAW', 'ANY', 'ONE', 'YET', 'THAT', 'DID', 'NOT', 'LOOK', 'SIMPLY', 'AND', 'OUT', 'OF', 'COUNTENANCE', 'NOR', 'EVER', 'KNEW', 'A', 'WEDDING', 'WELL', 'DESIGNED', 'BUT', 'ONE', 'AND', 'THAT', 'WAS', 'OF', 'TWO', 'PERSONS', 'WHO', 'HAD', 'TIME', 'ENOUGH', 'I', 'CONFESS', 'TO', 'CONTRIVE', 'IT', 'AND', 'NOBODY', 'TO', 'PLEASE', "IN'T", 'BUT', 'THEMSELVES'] +3080-5040-0004-282: hyp=['I', 'NEVER', 'SAW', 'ANY', 'ONE', 'YET', 'THAT', 'DID', 'NOT', 'LOOK', 'SIMPLY', 'AND', 'OUT', 'OF', 'COUNTENANCE', 'NOR', 'EVER', 'KNEW', 'A', 'WEDDING', 'WELL', 'DESIGNED', 'BUT', 'ONE', 'AND', 'THAT', 'WAS', 'OF', 'TWO', 'PERSONS', 'WHO', 'AT', 'TIME', 'ENOUGH', 'I', 'CONFESS', 'TO', 'CONTRIVE', 'IT', 'AND', 'NOBODY', 'TO', 'PLEASE', 'IN', 'BUT', 'THEMSELVES'] +3080-5040-0005-283: ref=['THE', 'TRUTH', 'IS', 'I', 'COULD', 'NOT', 'ENDURE', 'TO', 'BE', 'MISSUS', 'BRIDE', 'IN', 'A', 'PUBLIC', 'WEDDING', 'TO', 'BE', 'MADE', 'THE', 'HAPPIEST', 'PERSON', 'ON', 'EARTH'] +3080-5040-0005-283: hyp=['THE', 'TRUTH', 'IS', 'I', 'COULD', 'NOT', 'ENDURE', 'TO', 'BE', 'MISSUS', 'BRIDE', 'IN', 'A', 'PUBLIC', 'WEDDING', 'TO', 'BE', 'MADE', 'THE', 'HAPPIEST', 'PERSON', 'ON', 'EARTH'] +3080-5040-0006-284: ref=['DO', 'NOT', 'TAKE', 'IT', 'ILL', 'FOR', 'I', 'WOULD', 'ENDURE', 'IT', 'IF', 'I', 'COULD', 'RATHER', 'THAN', 'FAIL', 'BUT', 'IN', 'EARNEST', 'I', 'DO', 'NOT', 'THINK', 'IT', 'WERE', 'POSSIBLE', 'FOR', 'ME'] +3080-5040-0006-284: hyp=['DO', 'NOT', 'TAKE', 'IT', 'ILL', 'FOR', 'I', 'WOULD', 'ENDURE', 'IT', 'IF', 'I', 'COULD', 'RATHER', 'THAN', 'FAIL', 'BUT', 'IN', 'EARNEST', 'I', 'DO', 'NOT', 'THINK', 'IT', 'WERE', 'POSSIBLE', 'FOR', 'ME'] +3080-5040-0007-285: ref=['YET', 'IN', 'EARNEST', 'YOUR', 'FATHER', 'WILL', 'NOT', 'FIND', 'MY', 'BROTHER', 'PEYTON', 'WANTING', 'IN', 'CIVILITY', 'THOUGH', 'HE', 'IS', 'NOT', 'A', 'MAN', 'OF', 'MUCH', 'COMPLIMENT', 'UNLESS', 'IT', 'BE', 'IN', 'HIS', 'LETTERS', 'TO', 'ME', 'NOR', 'AN', 'UNREASONABLE', 'PERSON', 'IN', 'ANYTHING', 'SO', 'HE', 'WILL', 'ALLOW', 'HIM', 'OUT', 'OF', 'HIS', 'KINDNESS', 'TO', 'HIS', 'WIFE', 'TO', 'SET', 'A', 'HIGHER', 'VALUE', 'UPON', 'HER', 'SISTER', 'THAN', 'SHE', 'DESERVES'] +3080-5040-0007-285: hyp=['YET', 'IN', 'EARNEST', 'YOUR', 'FATHER', 'WILL', 'NOT', 'FIND', 'MY', 'BROTHER', 'PEYTON', 'WANTING', 'IN', 'CIVILITY', 'THOUGH', 'HE', 'IS', 'NOT', 'A', 'MAN', 'OF', 'MUCH', 'COMPLIMENT', 'UNLESS', 'IT', 'BE', 'IN', 'HIS', 'LETTER', 'TO', 'ME', 'NO', 'AN', 'UNREASONABLE', 'PERSON', 'IN', 'ANYTHING', 'SO', 'HE', 'WILL', 'ALLOW', 'HIM', 'OUT', 'OF', 'HIS', 'KINDNESS', 'TO', 'HIS', 'WIFE', 'TO', 'SET', 'A', 'HIGHER', 'VALUE', 'UPON', 'HER', 'SISTER', 'THAN', 'SHE', 'DESERVES'] +3080-5040-0008-286: ref=['MY', 'AUNT', 'TOLD', 'ME', 'NO', 'LONGER', 'AGONE', 'THAN', 'YESTERDAY', 'THAT', 'I', 'WAS', 'THE', 'MOST', 'WILFUL', 'WOMAN', 'THAT', 'EVER', 'SHE', 'KNEW', 'AND', 'HAD', 'AN', 'OBSTINACY', 'OF', 'SPIRIT', 'NOTHING', 'COULD', 'OVERCOME', 'TAKE', 'HEED'] +3080-5040-0008-286: hyp=['MY', 'AUNT', 'TOLD', 'ME', 'NO', 'LONGER', 'A', 'GONDON', 'YESTERDAY', 'THAT', 'I', 'WAS', 'THE', 'MOST', 'WILFUL', 'WOMAN', 'THAT', 'EVER', 'SHE', 'KNEW', 'AND', 'HAD', 'AN', 'OBSTINACY', 'OF', 'SPIRIT', 'NOTHING', 'COULD', 'OVERCOME', 'TAKE', 'HEED'] +3080-5040-0009-287: ref=['YOU', 'SEE', 'I', 'GIVE', 'YOU', 'FAIR', 'WARNING'] +3080-5040-0009-287: hyp=['YOU', 'SEE', 'I', 'GIVE', 'YOU', 'FAIR', 'WARNING'] +3080-5040-0010-288: ref=['BY', 'THE', 'NEXT', 'I', 'SHALL', 'BE', 'GONE', 'INTO', 'KENT', 'AND', 'MY', 'OTHER', 'JOURNEY', 'IS', 'LAID', 'ASIDE', 'WHICH', 'I', 'AM', 'NOT', 'DISPLEASED', 'AT', 'BECAUSE', 'IT', 'WOULD', 'HAVE', 'BROKEN', 'OUR', 'INTERCOURSE', 'VERY', 'MUCH'] +3080-5040-0010-288: hyp=['BY', 'THE', 'NEXT', 'I', 'SHALL', 'BE', 'GONE', 'INTO', 'KENT', 'AND', 'MY', 'OTHER', 'JOURNEY', 'IS', 'LAID', 'ASIDE', 'WHICH', 'I', 'AM', 'NOT', 'DISPLEASED', 'AT', 'BECAUSE', 'IT', 'WOULD', 'HAVE', 'BROKEN', 'OUR', 'INTERCOURSE', 'VERY', 'MUCH'] +3080-5040-0011-289: ref=['HERE', 'ARE', 'SOME', 'VERSES', 'OF', "COWLEY'S", 'TELL', 'ME', 'HOW', 'YOU', 'LIKE', 'THEM'] +3080-5040-0011-289: hyp=['HERE', 'ARE', 'SOME', 'VERSES', 'OF', 'CARLIS', 'TELL', 'ME', 'HOW', 'YOU', 'LIKE', 'THEM'] +3080-5040-0012-290: ref=['I', 'TOLD', 'YOU', 'IN', 'MY', 'LAST', 'THAT', 'MY', 'SUFFOLK', 'JOURNEY', 'WAS', 'LAID', 'ASIDE', 'AND', 'THAT', 'INTO', 'KENT', 'HASTENED'] +3080-5040-0012-290: hyp=['I', 'TOLD', 'YOU', 'IN', 'MY', 'LAST', 'THAT', 'MY', 'SUFFOLD', 'JOURNEY', 'WAS', 'LAID', 'ASIDE', 'AND', 'THAT', 'INTO', 'KENT', 'HASTENED'] +3080-5040-0013-291: ref=['IF', 'I', 'DROWN', 'BY', 'THE', 'WAY', 'THIS', 'WILL', 'BE', 'MY', 'LAST', 'LETTER', 'AND', 'LIKE', 'A', 'WILL', 'I', 'BEQUEATH', 'ALL', 'MY', 'KINDNESS', 'TO', 'YOU', 'IN', 'IT', 'WITH', 'A', 'CHARGE', 'NEVER', 'TO', 'BESTOW', 'IT', 'ALL', 'UPON', 'ANOTHER', 'MISTRESS', 'LEST', 'MY', 'GHOST', 'RISE', 'AGAIN', 'AND', 'HAUNT', 'YOU'] +3080-5040-0013-291: hyp=['IF', 'I', 'DROWN', 'BY', 'THE', 'WAY', 'THIS', 'WILL', 'BE', 'MY', 'LAST', 'LETTER', 'AND', 'LIKE', 'A', 'WILL', 'I', 'BEQUEATH', 'ALL', 'MY', 'KINDNESS', 'TO', 'YOU', 'IN', 'IT', 'WITH', 'A', 'CHARGE', 'NEVER', 'TO', 'BESTOW', 'AT', 'ALL', 'UPON', 'ANOTHER', 'MISTRESS', 'LEST', 'MY', 'GHOST', 'RISE', 'AGAIN', 'AND', 'HAUNT', 'YOU'] +3080-5040-0014-292: ref=['INDEED', 'I', 'LIKE', 'HIM', 'EXTREMELY', 'AND', 'HE', 'IS', 'COMMENDED', 'TO', 'ME', 'BY', 'PEOPLE', 'THAT', 'KNOW', 'HIM', 'VERY', 'WELL', 'AND', 'ARE', 'ABLE', 'TO', 'JUDGE', 'FOR', 'A', 'MOST', 'EXCELLENT', 'SERVANT', 'AND', 'FAITHFUL', 'AS', 'POSSIBLE'] +3080-5040-0014-292: hyp=['INDEED', 'I', 'LIKE', 'HIM', 'EXTREMELY', 'AND', 'HE', 'IS', 'COMMENDED', 'TO', 'ME', 'BY', 'PEOPLE', 'THAT', 'KNOW', 'HIM', 'VERY', 'WELL', 'AND', 'ARE', 'ABLE', 'TO', 'JUDGE', 'FOR', 'A', 'MOST', 'EXCELLENT', 'SERVANT', 'AND', 'FAITHFUL', 'AS', 'POSSIBLE'] +3080-5040-0015-293: ref=['BECAUSE', 'YOU', 'FIND', 'FAULT', 'WITH', 'MY', 'OTHER', 'LETTERS', 'THIS', 'IS', 'LIKE', 'TO', 'BE', 'SHORTER', 'THAN', 'THEY', 'I', 'DID', 'NOT', 'INTEND', 'IT', 'SO', 'THOUGH', 'I', 'CAN', 'ASSURE', 'YOU'] +3080-5040-0015-293: hyp=['BECAUSE', 'YOU', 'FIND', 'FAULT', 'WITH', 'MY', 'OTHER', 'LETTERS', 'THIS', 'IS', 'LIKE', 'TO', 'BE', 'SHORTER', 'THAN', 'THEY', 'I', 'DID', 'NOT', 'INTEND', 'IT', 'SO', 'THOUGH', 'I', 'CAN', 'ASSURE', 'YOU'] +3080-5040-0016-294: ref=['I', 'DO', 'NOT', 'FIND', 'IT', 'THOUGH', 'I', 'AM', 'TOLD', 'I', 'WAS', 'SO', 'EXTREMELY', 'WHEN', 'I', 'BELIEVED', 'YOU', 'LOVED', 'ME'] +3080-5040-0016-294: hyp=['I', 'DO', 'NOT', 'FIND', 'IT', 'THOUGH', 'I', 'AM', 'TOLD', 'I', 'WAS', 'SO', 'EXTREMELY', 'WHEN', 'I', 'BELIEVED', 'YOU', 'LOVED', 'ME'] +3080-5040-0017-295: ref=['BUT', 'I', 'AM', 'CALLED', 'UPON'] +3080-5040-0017-295: hyp=['BUT', 'I', 'AM', 'CALLED', 'UPON'] +3080-5040-0018-296: ref=['DIRECTED', 'FOR', 'YOUR', 'MASTER'] +3080-5040-0018-296: hyp=['DIRECTED', 'FOR', 'YOUR', 'MASTER'] +3080-5040-0019-297: ref=['I', 'SEE', 'YOU', 'CAN', 'CHIDE', 'WHEN', 'YOU', 'PLEASE', 'AND', 'WITH', 'AUTHORITY', 'BUT', 'I', 'DESERVE', 'IT', 'I', 'CONFESS', 'AND', 'ALL', 'I', 'CAN', 'SAY', 'FOR', 'MYSELF', 'IS', 'THAT', 'MY', 'FAULT', 'PROCEEDED', 'FROM', 'A', 'VERY', 'GOOD', 'PRINCIPLE', 'IN', 'ME'] +3080-5040-0019-297: hyp=['I', 'SEE', 'YOU', 'CAN', 'CHID', 'WHEN', 'YOU', 'PLEASE', 'AND', 'WITH', 'AUTHORITY', 'BUT', 'I', 'DESERVE', 'IT', 'I', 'CONFESS', 'AND', 'ALL', 'I', 'CAN', 'SAY', 'FOR', 'MYSELF', 'IS', 'THAT', 'MY', 'FAULT', 'PROCEEDED', 'FROM', 'A', 'VERY', 'GOOD', 'PRINCIPLE', 'IN', 'ME'] +3080-5040-0020-298: ref=['WE', 'DARE', 'NOT', 'LET', 'OUR', 'TONGUES', 'LIE', 'MORE', 'ON', 'ONE', 'SIDE', 'OF', 'OUR', 'MOUTHS', 'THAN', "T'OTHER", 'FOR', 'FEAR', 'OF', 'OVERTURNING', 'IT'] +3080-5040-0020-298: hyp=['WE', 'DARE', 'NOT', 'LET', 'OUR', 'TONGUES', 'LIE', 'MORE', 'SIDE', 'OF', 'OUR', 'MOTHS', 'THAN', 'THE', 'OTHER', 'FOR', 'FEAR', 'OF', 'OVERTURNING', 'IT'] +3080-5040-0021-299: ref=['YOU', 'ARE', 'SATISFIED', 'I', 'HOPE', 'ERE', 'THIS', 'THAT', 'I', 'SCAPED', 'DROWNING'] +3080-5040-0021-299: hyp=['YOU', 'ARE', 'SATISFIED', 'I', 'HOPE', 'IF', 'THIS', 'THAT', 'I', 'ESCAPED', 'DROWNING'] +3080-5040-0022-300: ref=['BUT', 'I', 'AM', 'TROUBLED', 'MUCH', 'YOU', 'SHOULD', 'MAKE', 'SO', 'ILL', 'A', 'JOURNEY', 'TO', 'SO', 'LITTLE', 'PURPOSE', 'INDEED', 'I', 'WRIT', 'BY', 'THE', 'FIRST', 'POST', 'AFTER', 'MY', 'ARRIVAL', 'HERE', 'AND', 'CANNOT', 'IMAGINE', 'HOW', 'YOU', 'CAME', 'TO', 'MISS', 'OF', 'MY', 'LETTERS'] +3080-5040-0022-300: hyp=['BUT', 'I', 'AM', 'TROUBLED', 'MUCH', 'YOU', 'SHOULD', 'MAKE', 'SO', 'ILL', 'A', 'JOURNEY', 'TO', 'SO', 'LITTLE', 'PURPOSE', 'INDEED', 'I', 'WRITE', 'BY', 'THE', 'FIRST', 'POST', 'AFTER', 'MY', 'ARRIVAL', 'HERE', 'AND', 'CANNOT', 'IMAGINE', 'HOW', 'YOU', 'CAME', 'TO', 'MISS', 'OF', 'MY', 'LETTERS'] +3080-5040-0023-301: ref=['HOW', 'WELCOME', 'YOU', 'WILL', 'BE', 'BUT', 'ALAS'] +3080-5040-0023-301: hyp=['OH', 'WELCOME', 'YOU', 'WILL', 'BE', 'BUT', 'ALAS'] +3080-5040-0024-302: ref=['FOR', 'MY', 'LIFE', 'I', 'CANNOT', 'BEAT', 'INTO', 'THEIR', 'HEADS', 'A', 'PASSION', 'THAT', 'MUST', 'BE', 'SUBJECT', 'TO', 'NO', 'DECAY', 'AN', 'EVEN', 'PERFECT', 'KINDNESS', 'THAT', 'MUST', 'LAST', 'PERPETUALLY', 'WITHOUT', 'THE', 'LEAST', 'INTERMISSION'] +3080-5040-0024-302: hyp=['FOR', 'MY', 'LIFE', 'I', 'CANNOT', 'BEAT', 'INTO', 'THEIR', 'HEADS', 'A', 'PASSION', 'THAT', 'MUST', 'BE', 'SUBJECT', 'TO', 'NO', 'DECAY', 'AND', 'EVEN', 'PERFECT', 'KINDNESS', 'THAT', 'MUST', 'LAST', 'PERPETUALLY', 'WITHOUT', 'THE', 'LEAST', 'INTERMISSION'] +3080-5040-0025-303: ref=['THEY', 'LAUGH', 'TO', 'HEAR', 'ME', 'SAY', 'THAT', 'ONE', 'UNKIND', 'WORD', 'WOULD', 'DESTROY', 'ALL', 'THE', 'SATISFACTION', 'OF', 'MY', 'LIFE', 'AND', 'THAT', 'I', 'SHOULD', 'EXPECT', 'OUR', 'KINDNESS', 'SHOULD', 'INCREASE', 'EVERY', 'DAY', 'IF', 'IT', 'WERE', 'POSSIBLE', 'BUT', 'NEVER', 'LESSEN'] +3080-5040-0025-303: hyp=['THEY', 'LAUGH', 'TO', 'HEAR', 'ME', 'SAY', 'THAT', 'ONE', 'UNKIND', 'WORD', 'WOULD', 'DESTROY', 'ALL', 'THE', 'SATISFACTION', 'OF', 'MY', 'LIFE', 'AND', 'THAT', 'I', 'SHOULD', 'EXPECT', 'OUR', 'KINDNESS', 'SHOULD', 'INCREASE', 'EVERY', 'DAY', 'IF', 'IT', 'WERE', 'POSSIBLE', 'BUT', 'NEVER', 'LESSEN'] +3080-5040-0026-304: ref=['WE', 'GO', 'ABROAD', 'ALL', 'DAY', 'AND', 'PLAY', 'ALL', 'NIGHT', 'AND', 'SAY', 'OUR', 'PRAYERS', 'WHEN', 'WE', 'HAVE', 'TIME'] +3080-5040-0026-304: hyp=['WE', 'GO', 'ABROAD', 'ALL', 'DAY', 'AND', 'PLAY', 'ALL', 'NIGHT', 'AND', 'SAY', "I'LL", 'PRAY', 'AS', 'WHEN', 'WE', 'HAVE', 'TIME'] +3080-5040-0027-305: ref=['WELL', 'IN', 'SOBER', 'EARNEST', 'NOW', 'I', 'WOULD', 'NOT', 'LIVE', 'THUS', 'A', 'TWELVEMONTH', 'TO', 'GAIN', 'ALL', 'THAT', 'THE', 'KING', 'HAS', 'LOST', 'UNLESS', 'IT', 'WERE', 'TO', 'GIVE', 'IT', 'HIM', 'AGAIN'] +3080-5040-0027-305: hyp=['WHILE', 'IN', 'SOBER', 'EARNEST', 'NOW', 'I', 'WOULD', 'NOT', 'LIVE', 'THUS', 'A', 'TWELVE', 'MONTHS', 'TO', 'GAIN', 'ALL', 'THAT', 'KING', 'HAS', 'LOST', 'UNLESS', 'IT', 'WERE', 'TO', 'GIVE', 'IT', 'HIM', 'AGAIN'] +3080-5040-0028-306: ref=['WILL', 'YOU', 'BE', 'SO', 'GOOD', 'NATURED'] +3080-5040-0028-306: hyp=['WILL', 'YOU', 'BE', 'SO', 'GOOD', 'NATURED'] +3080-5040-0029-307: ref=['HE', 'HAS', 'ONE', 'SON', 'AND', 'TIS', 'THE', 'FINEST', 'BOY', 'THAT', "E'ER", 'YOU', 'SAW', 'AND', 'HAS', 'A', 'NOBLE', 'SPIRIT', 'BUT', 'YET', 'STANDS', 'IN', 'THAT', 'AWE', 'OF', 'HIS', 'FATHER', 'THAT', 'ONE', 'WORD', 'FROM', 'HIM', 'IS', 'AS', 'MUCH', 'AS', 'TWENTY', 'WHIPPINGS'] +3080-5040-0029-307: hyp=['HE', 'HAS', 'ONE', 'SON', 'AND', 'TIS', 'THE', 'FINEST', 'BOY', 'THAT', 'ERE', 'YOU', 'SAW', 'AND', 'HAS', 'A', 'NOBLE', 'SPIRIT', 'BUT', 'YET', 'STANDS', 'IN', 'THAT', 'AWE', 'OF', 'HIS', 'FATHER', 'THAT', 'ONE', 'WORD', 'FROM', 'HIM', 'IS', 'AS', 'MUCH', 'AS', 'TWENTY', 'WHIPPINGS'] +3080-5040-0030-308: ref=['YOU', 'MUST', 'GIVE', 'ME', 'LEAVE', 'TO', 'ENTERTAIN', 'YOU', 'THUS', 'WITH', 'DISCOURSES', 'OF', 'THE', 'FAMILY', 'FOR', 'I', 'CAN', 'TELL', 'YOU', 'NOTHING', 'ELSE', 'FROM', 'HENCE'] +3080-5040-0030-308: hyp=['YOU', 'MUST', 'GIVE', 'ME', 'LEAVE', 'TO', 'ENTERTAIN', 'YOURSELVES', 'WITH', 'DISCOURSES', 'OF', 'THE', 'FAMILY', 'FOR', 'I', 'CAN', 'TELL', 'YOU', 'NOTHING', 'ELSE', 'FROM', 'HENCE'] +3080-5040-0031-309: ref=['NOT', 'TO', 'KNOW', 'WHEN', 'YOU', 'WOULD', 'COME', 'HOME', 'I', 'CAN', 'ASSURE', 'YOU', 'NOR', 'FOR', 'ANY', 'OTHER', 'OCCASION', 'OF', 'MY', 'OWN', 'BUT', 'WITH', 'A', 'COUSIN', 'OF', 'MINE', 'THAT', 'HAD', 'LONG', 'DESIGNED', 'TO', 'MAKE', 'HERSELF', 'SPORT', 'WITH', 'HIM', 'AND', 'DID', 'NOT', 'MISS', 'OF', 'HER', 'AIM'] +3080-5040-0031-309: hyp=['NOT', 'TO', 'KNOW', 'WHEN', 'YOU', 'HAD', 'COME', 'HOME', 'I', 'CAN', 'ASSURE', 'YOU', 'NO', 'FOR', 'ANY', 'OTHER', 'OCCASION', 'ON', 'MY', 'OWN', 'BUT', 'WITH', 'A', 'COUSIN', 'OF', 'MINE', 'THAT', 'HAD', 'LONG', 'DESIGNED', 'TO', 'MAKE', 'HERSELF', 'SPORT', 'WITH', 'HIM', 'AND', 'DID', 'NOT', 'MISS', 'OF', 'HER', 'AIM'] +3080-5040-0032-310: ref=['IN', 'MY', 'LIFE', 'I', 'NEVER', 'HEARD', 'SO', 'RIDICULOUS', 'A', 'DISCOURSE', 'AS', 'HE', 'MADE', 'US', 'AND', 'NO', 'OLD', 'WOMAN', 'WHO', 'PASSES', 'FOR', 'A', 'WITCH', 'COULD', 'HAVE', 'BEEN', 'MORE', 'PUZZLED', 'TO', 'SEEK', 'WHAT', 'TO', 'SAY', 'TO', 'REASONABLE', 'PEOPLE', 'THAN', 'HE', 'WAS'] +3080-5040-0032-310: hyp=['IN', 'MY', 'LIFE', 'I', 'NEVER', 'HEARD', 'SO', 'RIDICULOUS', 'A', 'DISCOURSE', 'AS', 'HE', 'MADE', 'US', 'AND', 'NO', 'OLD', 'WOMAN', 'WHO', 'PAUSES', 'FOR', 'A', 'WITCH', 'COULD', 'HAVE', 'BEEN', 'MORE', 'PUZZLED', 'TO', 'SEEK', 'WHAT', 'TO', 'SAY', 'TO', 'REASONABLE', 'PEOPLE', 'THAN', 'HE', 'WAS'] +3080-5040-0033-311: ref=['EVER', 'SINCE', 'THIS', 'ADVENTURE', 'I', 'HAVE', 'HAD', 'SO', 'GREAT', 'A', 'BELIEF', 'IN', 'ALL', 'THINGS', 'OF', 'THIS', 'NATURE', 'THAT', 'I', 'COULD', 'NOT', 'FORBEAR', 'LAYING', 'A', 'PEAS', 'COD', 'WITH', 'NINE', 'PEAS', "IN'T", 'UNDER', 'MY', 'DOOR', 'YESTERDAY', 'AND', 'WAS', 'INFORMED', 'BY', 'IT', 'THAT', 'MY', "HUSBAND'S", 'NAME', 'SHOULD', 'BE', 'THOMAS', 'HOW', 'DO', 'YOU', 'LIKE', 'THAT'] +3080-5040-0033-311: hyp=['EVER', 'SINCE', 'THIS', 'ADVENTURE', 'I', 'HAVE', 'HAD', 'SO', 'GREAT', 'A', 'BELIEF', 'IN', 'ALL', 'THINGS', 'OF', 'THIS', 'NATURE', 'THAT', 'I', 'COULD', 'NOT', 'FORBEAR', 'LAYING', 'A', 'PEASE', 'COT', 'WITH', 'NINE', 'PEAS', 'INTO', 'UNDER', 'MY', 'DOOR', 'YESTERDAY', 'IT', 'WAS', 'INFORMED', 'BY', 'IT', 'THAT', 'MY', "HUSBAND'S", 'NAME', 'SHOULD', 'BE', 'THOMAS', 'HOW', 'DO', 'YOU', 'LIKE', 'THAT'] +3331-159605-0000-695: ref=['SHE', 'PULLED', 'HER', 'HAIR', 'DOWN', 'TURNED', 'HER', 'SKIRT', 'BACK', 'PUT', 'HER', 'FEET', 'ON', 'THE', 'FENDER', 'AND', 'TOOK', 'PUTTEL', 'INTO', 'HER', 'LAP', 'ALL', 'OF', 'WHICH', 'ARRANGEMENTS', 'SIGNIFIED', 'THAT', 'SOMETHING', 'VERY', 'IMPORTANT', 'HAD', 'GOT', 'TO', 'BE', 'THOUGHT', 'OVER', 'AND', 'SETTLED'] +3331-159605-0000-695: hyp=['SHE', 'PULLED', 'HER', 'HAIR', 'DOWN', 'TURNED', 'HIS', 'GIRT', 'BACK', 'PUT', 'HER', 'FEET', 'ON', 'THE', 'FENDER', 'AND', 'TOOK', 'PATTERN', 'INTO', 'HER', 'LAP', 'ALL', 'OF', 'WHICH', 'ARRANGEMENTS', 'SIGNIFIED', 'THAT', 'SOMETHING', 'VERY', 'IMPORTANT', 'HAD', 'GOT', 'TO', 'BE', 'THOUGHT', 'OVER', 'AND', 'SETTLED'] +3331-159605-0001-696: ref=['THE', 'MORE', 'PROPOSALS', 'THE', 'MORE', 'CREDIT'] +3331-159605-0001-696: hyp=['THE', 'MORE', 'PROPOSALS', 'THE', 'MORE', 'CREDIT'] +3331-159605-0002-697: ref=['I', 'VE', 'TRIED', 'IT', 'AND', 'LIKED', 'IT', 'AND', 'MAYBE', 'THIS', 'IS', 'THE', 'CONSEQUENCE', 'OF', 'THAT', "NIGHT'S", 'FUN'] +3331-159605-0002-697: hyp=["I'VE", 'TRIED', 'IT', 'AND', 'LIKED', 'IT', 'AND', 'MAYBE', 'THIS', 'IS', 'THE', 'CONSEQUENCE', 'OF', 'THAT', "NIGHT'S", 'FUN'] +3331-159605-0003-698: ref=['JUST', 'SUPPOSE', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'DOES', 'ASK', 'ME', 'AND', 'I', 'SAY', 'YES'] +3331-159605-0003-698: hyp=['JUST', 'SUPPOSE', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'DOES', 'ASK', 'ME', 'AND', 'I', 'SAY', 'YES'] +3331-159605-0004-699: ref=['WHAT', 'A', 'SPITEFUL', 'THING', 'I', 'AM'] +3331-159605-0004-699: hyp=['WHAT', 'A', 'SPITEFUL', 'THING', 'I', 'AM'] +3331-159605-0005-700: ref=['I', 'COULD', 'DO', 'SO', 'MUCH', 'FOR', 'ALL', 'AT', 'HOME', 'HOW', 'I', 'SHOULD', 'ENJOY', 'THAT'] +3331-159605-0005-700: hyp=['I', 'COULD', 'DO', 'SO', 'MUCH', 'FOR', 'ALL', 'AT', 'HOME', 'HOW', 'I', 'SHOULD', 'ENJOY', 'THAT'] +3331-159605-0006-701: ref=['LET', 'ME', 'SEE', 'HOW', 'CAN', 'I', 'BEGIN'] +3331-159605-0006-701: hyp=['THAT', 'MISSY', 'HOW', 'CAN', 'I', 'BEGIN'] +3331-159605-0007-702: ref=['HE', 'HAS', 'KNOWN', 'HER', 'ALL', 'HER', 'LIFE', 'AND', 'HAS', 'A', 'GOOD', 'INFLUENCE', 'OVER', 'HER'] +3331-159605-0007-702: hyp=['HE', 'HAS', 'KNOWN', 'HER', 'ALL', 'HER', 'LIFE', 'AND', 'HAS', 'A', 'GOOD', 'INFLUENCE', 'OVER', 'HER'] +3331-159605-0008-703: ref=['NOW', 'AS', 'POLLY', 'WAS', 'BY', 'NO', 'MEANS', 'A', 'PERFECT', 'CREATURE', 'I', 'AM', 'FREE', 'TO', 'CONFESS', 'THAT', 'THE', 'OLD', 'TEMPTATION', 'ASSAILED', 'HER', 'MORE', 'THAN', 'ONCE', 'THAT', 'WEEK', 'FOR', 'WHEN', 'THE', 'FIRST', 'EXCITEMENT', 'OF', 'THE', 'DODGING', 'REFORM', 'HAD', 'SUBSIDED', 'SHE', 'MISSED', 'THE', 'PLEASANT', 'LITTLE', 'INTERVIEWS', 'THAT', 'USED', 'TO', 'PUT', 'A', 'CERTAIN', 'FLAVOR', 'OF', 'ROMANCE', 'INTO', 'HER', 'DULL', 'HARD', 'WORKING', 'DAYS'] +3331-159605-0008-703: hyp=['NOW', 'AS', 'POLLY', 'WAS', 'BY', 'NO', 'MEANS', 'A', 'PERFECT', 'CREATURE', 'I', 'AM', 'FREE', 'TO', 'CONFESS', 'THAT', 'THE', 'OLD', 'TEMPTATION', 'ASSAILED', 'HER', 'MORE', 'THAN', 'ONCE', 'THE', 'WEEK', 'FOR', 'WHEN', 'THE', 'FIRST', 'EXCITEMENT', 'OF', 'THE', 'DODGING', 'REFORM', 'HAD', 'SUBSIDED', 'SHE', 'MISSED', 'THE', 'PLEASANT', 'LITTLE', 'INTERVIEWS', 'THAT', 'USED', 'TO', 'PUT', 'A', 'CERTAIN', 'FLAVOUR', 'OF', 'ROMANCE', 'INTO', 'HER', 'DULL', 'HARD', 'WORKING', 'DAYS'] +3331-159605-0009-704: ref=['I', "DON'T", 'THINK', 'IT', 'WAS', 'HIS', 'WEALTH', 'ACCOMPLISHMENTS', 'OR', 'POSITION', 'THAT', 'MOST', 'ATTRACTED', 'POLLY', 'THOUGH', 'THESE', 'DOUBTLESS', 'POSSESSED', 'A', 'GREATER', 'INFLUENCE', 'THAN', 'SHE', 'SUSPECTED'] +3331-159605-0009-704: hyp=['I', "DON'T", 'THINK', 'IT', 'WAS', 'HIS', 'WEALTH', 'THE', 'ACCOMPLISHMENTS', 'OPPOSITION', 'THAT', 'MOST', 'ATTRACTED', 'POLLY', 'THOUGH', 'THESE', 'DOUBTLESS', 'POSSESSED', 'A', 'GREATER', 'INFLUENCE', 'THAN', 'SHE', 'SUSPECTED'] +3331-159605-0010-705: ref=['IT', 'WAS', 'THAT', 'INDESCRIBABLE', 'SOMETHING', 'WHICH', 'WOMEN', 'ARE', 'QUICK', 'TO', 'SEE', 'AND', 'FEEL', 'IN', 'MEN', 'WHO', 'HAVE', 'BEEN', 'BLESSED', 'WITH', 'WISE', 'AND', 'GOOD', 'MOTHERS'] +3331-159605-0010-705: hyp=['IT', 'WAS', 'THAT', 'INDESCRIBABLE', 'SOMETHING', 'WHICH', 'WOMEN', 'ARE', 'QUICK', 'TO', 'SEE', 'AND', 'FEEL', 'IN', 'MEN', 'WHO', 'HAVE', 'BEEN', 'BLESSED', 'THE', 'WISE', 'AND', 'GOOD', 'MOTHERS'] +3331-159605-0011-706: ref=['THIS', 'HAD', 'AN', 'ESPECIAL', 'CHARM', 'TO', 'POLLY', 'FOR', 'SHE', 'SOON', 'FOUND', 'THAT', 'THIS', 'SIDE', 'OF', 'HIS', 'CHARACTER', 'WAS', 'NOT', 'SHOWN', 'TO', 'EVERY', 'ONE'] +3331-159605-0011-706: hyp=['THIS', 'HAD', 'AN', 'ESPECIAL', 'CHARM', 'TO', 'POLLY', 'FOR', 'SHE', 'SOON', 'FOUND', 'THAT', 'THIS', 'SIGHT', 'OF', 'HIS', 'CHARACTER', 'WAS', 'NOT', 'SHOWN', 'TO', 'EVERYONE'] +3331-159605-0012-707: ref=['LATELY', 'THIS', 'HAD', 'CHANGED', 'ESPECIALLY', 'TOWARDS', 'POLLY', 'AND', 'IT', 'FLATTERED', 'HER', 'MORE', 'THAN', 'SHE', 'WOULD', 'CONFESS', 'EVEN', 'TO', 'HERSELF'] +3331-159605-0012-707: hyp=['LATELY', 'THIS', 'HAD', 'CHANGED', 'ESPECIALLY', 'TOWARDS', 'POLLY', 'AND', 'IT', 'FURTHER', 'HER', 'MORE', 'THAN', 'SHE', 'WOULD', 'CONFESS', 'EVEN', 'TO', 'HERSELF'] +3331-159605-0013-708: ref=['AT', 'FIRST', 'SHE', 'TRIED', 'TO', 'THINK', 'SHE', 'COULD', 'BUT', 'UNFORTUNATELY', 'HEARTS', 'ARE', 'SO', 'CONTRARY', 'THAT', 'THEY', "WON'T", 'BE', 'OBEDIENT', 'TO', 'REASON', 'WILL', 'OR', 'EVEN', 'GRATITUDE'] +3331-159605-0013-708: hyp=['AT', 'FIRST', 'SHE', 'TRIED', 'TO', 'THINK', 'SHE', 'COULD', 'BUT', 'UNFORTUNATELY', 'HEARTS', 'ARE', 'SO', 'CONTRARY', 'THAT', 'THEY', "WON'T", 'BE', 'OBEDIENT', 'TO', 'REASON', 'WILL', 'OR', 'EVEN', 'CREDITU'] +3331-159605-0014-709: ref=['POLLY', 'FELT', 'A', 'VERY', 'CORDIAL', 'FRIENDSHIP', 'FOR', 'MISTER', 'SYDNEY', 'BUT', 'NOT', 'ONE', 'PARTICLE', 'OF', 'THE', 'LOVE', 'WHICH', 'IS', 'THE', 'ONLY', 'COIN', 'IN', 'WHICH', 'LOVE', 'CAN', 'BE', 'TRULY', 'PAID'] +3331-159605-0014-709: hyp=['POLLY', 'FELT', 'A', 'VERY', 'CORDIAL', 'FRIENDSHIP', 'FOR', 'MISTER', 'SYDNEY', 'BUT', 'NOT', 'ONE', 'PARTICLE', 'OF', 'THE', 'LAW', 'PITCHED', 'THE', 'ONLY', 'COIN', 'IN', 'WHICH', 'LOVE', 'CAN', 'BE', 'TRULY', 'PAID'] +3331-159605-0015-710: ref=['THIS', 'FINISHED', "POLLY'S", 'INDECISION', 'AND', 'AFTER', 'THAT', 'NIGHT', 'SHE', 'NEVER', 'ALLOWED', 'HERSELF', 'TO', 'DWELL', 'UPON', 'THE', 'PLEASANT', 'TEMPTATION', 'WHICH', 'CAME', 'IN', 'A', 'GUISE', 'PARTICULARLY', 'ATTRACTIVE', 'TO', 'A', 'YOUNG', 'GIRL', 'WITH', 'A', 'SPICE', 'OF', 'THE', 'OLD', 'EVE', 'IN', 'HER', 'COMPOSITION'] +3331-159605-0015-710: hyp=['THIS', 'FINISHED', "POLLY'S", 'INDECISION', 'AND', 'AFTER', 'THAT', 'NIGHT', 'SHE', 'NEVER', 'ALLOWED', 'HERSELF', 'TO', 'DWELL', 'UPON', 'THE', 'PLEASANT', 'TEMPTATION', 'WHICH', 'CAME', 'IN', 'A', "GUY'S", 'PARTICULARLY', 'ATTRACTIVE', 'TO', 'A', 'YOUNG', 'GIRL', 'WITH', 'THE', 'SPIES', 'OF', 'THE', 'OLD', 'EVE', 'AND', 'HER', 'COMPOSITION'] +3331-159605-0016-711: ref=['WHEN', 'SATURDAY', 'CAME', 'POLLY', 'STARTED', 'AS', 'USUAL', 'FOR', 'A', 'VISIT', 'TO', 'BECKY', 'AND', 'BESS', 'BUT', 'COULD', "N'T", 'RESIST', 'STOPPING', 'AT', 'THE', 'SHAWS', 'TO', 'LEAVE', 'A', 'LITTLE', 'PARCEL', 'FOR', 'FAN', 'THOUGH', 'IT', 'WAS', 'CALLING', 'TIME'] +3331-159605-0016-711: hyp=['WHEN', 'SAID', 'CAME', 'POLLY', 'STARTED', 'AS', 'USUAL', 'FOR', 'A', 'VISIT', 'TO', 'BECKY', 'AND', 'BESS', 'BUT', "COULDN'T", 'RESIST', 'STOPPING', 'AT', 'THE', 'SHORES', 'TO', 'LEAVE', 'A', 'LITTLE', 'PARCEL', 'FOR', 'FAN', 'THAT', 'WAS', 'CALLING', 'TIME'] +3331-159605-0017-712: ref=['A', 'FOOLISH', 'LITTLE', 'SPEECH', 'TO', 'MAKE', 'TO', 'A', 'DOG', 'BUT', 'YOU', 'SEE', 'POLLY', 'WAS', 'ONLY', 'A', 'TENDER', 'HEARTED', 'GIRL', 'TRYING', 'TO', 'DO', 'HER', 'DUTY'] +3331-159605-0017-712: hyp=['A', 'FOOLISH', 'LITTLE', 'SPEECH', 'TO', 'MAKE', 'TO', 'A', 'DARK', 'BUT', 'YOU', 'SEE', 'POLLY', 'WAS', 'ONLY', 'A', 'TENDER', 'HEARTED', 'GIRL', 'TRYING', 'TO', 'HER', 'DUTY'] +3331-159605-0018-713: ref=['TAKE', 'HOLD', 'OF', 'MASTER', "CHARLEY'S", 'HAND', 'MISS', 'MAMIE', 'AND', 'WALK', 'PRETTY', 'LIKE', 'WILLY', 'AND', 'FLOSSY', 'SAID', 'THE', 'MAID'] +3331-159605-0018-713: hyp=['TAKE', 'HOLD', 'OF', 'MASSR', "CHARLIE'S", 'HAND', 'MISS', 'MAY', 'AND', 'BUCK', 'PRETTY', 'LIKE', 'BILLY', 'AND', 'FLOSSIE', 'SAID', 'THE', 'MATE'] +3331-159605-0019-714: ref=['AT', 'A', 'STREET', 'CORNER', 'A', 'BLACK', 'EYED', 'SCHOOL', 'BOY', 'WAS', 'PARTING', 'FROM', 'A', 'ROSY', 'FACED', 'SCHOOL', 'GIRL', 'WHOSE', 'MUSIC', 'ROLL', 'HE', 'WAS', 'RELUCTANTLY', 'SURRENDERING'] +3331-159605-0019-714: hyp=['A', 'DISTRICT', 'CORNER', 'A', 'BLACK', 'EYED', 'SCHOOLBOY', 'WAS', 'PARTING', 'FROM', 'A', 'ROSY', 'FACED', 'SCHOOL', 'GIRL', 'WHOSE', 'MUSIC', 'ROLL', 'HE', 'WAS', 'RELUCTANTLY', 'SURRENDERING'] +3331-159605-0020-715: ref=['HOW', 'HE', 'GOT', 'THERE', 'WAS', 'NEVER', 'VERY', 'CLEAR', 'TO', 'POLLY', 'BUT', 'THERE', 'HE', 'WAS', 'FLUSHED', 'AND', 'A', 'LITTLE', 'OUT', 'OF', 'BREATH', 'BUT', 'LOOKING', 'SO', 'GLAD', 'TO', 'SEE', 'HER', 'THAT', 'SHE', 'HAD', "N'T", 'THE', 'HEART', 'TO', 'BE', 'STIFF', 'AND', 'COOL', 'AS', 'SHE', 'HAD', 'FULLY', 'INTENDED', 'TO', 'BE', 'WHEN', 'THEY', 'MET'] +3331-159605-0020-715: hyp=['HOW', 'HE', 'GOT', 'THERE', 'WAS', 'NEVER', 'VERY', 'CLEAR', 'TO', 'POLLY', 'BUT', 'THERE', 'HE', 'WAS', 'FLUSHED', 'AND', 'A', 'LITTLE', 'OUT', 'OF', 'BREATH', 'BUT', 'LOOKING', 'SO', 'GLAD', 'TO', 'SEE', 'HER', 'TILL', 'SHE', 'HAD', 'NOT', 'THE', 'HEART', 'TO', 'BE', 'STIFF', 'AND', 'COOL', 'AS', 'SHE', 'HAD', 'FULLY', 'INTENDED', 'TO', 'BE', 'WHEN', 'THEY', 'MET'] +3331-159605-0021-716: ref=['SHE', 'REALLY', 'COULD', "N'T", 'HELP', 'IT', 'IT', 'WAS', 'SO', 'PLEASANT', 'TO', 'SEE', 'HIM', 'AGAIN', 'JUST', 'WHEN', 'SHE', 'WAS', 'FEELING', 'SO', 'LONELY'] +3331-159605-0021-716: hyp=['SHE', 'REALLY', 'COULD', 'NOT', 'HELP', 'IT', 'IT', 'WAS', 'SO', 'PLEASANT', 'TO', 'SEE', 'HIM', 'AGAIN', 'JUST', 'WHEN', 'SHE', 'WAS', 'FEELING', 'SO', 'LONELY'] +3331-159605-0022-717: ref=['THAT', 'IS', 'THE', 'WAY', 'I', 'GET', 'TO', 'THE', 'ROTHS', 'ANSWERED', 'POLLY'] +3331-159605-0022-717: hyp=['THAT', 'IS', 'THE', 'WAY', 'I', 'GET', 'TO', 'THE', 'WORSE', 'ANSWERED', 'POLLY'] +3331-159605-0023-718: ref=['SHE', 'DID', 'NOT', 'MEAN', 'TO', 'TELL', 'BUT', 'HIS', 'FRANKNESS', 'WAS', 'SO', 'AGREEABLE', 'SHE', 'FORGOT', 'HERSELF'] +3331-159605-0023-718: hyp=['SHE', 'DID', 'NOT', 'MEAN', 'TO', 'TELL', 'BUT', 'HIS', 'FRANKNESS', 'WAS', 'TO', 'AGREEABLE', 'SHE', 'FORGOT', 'HERSELF'] +3331-159605-0024-719: ref=['BUT', 'I', 'KNOW', 'HER', 'BETTER', 'AND', 'I', 'ASSURE', 'YOU', 'THAT', 'SHE', 'DOES', 'IMPROVE', 'SHE', 'TRIES', 'TO', 'MEND', 'HER', 'FAULTS', 'THOUGH', 'SHE', "WON'T", 'OWN', 'IT', 'AND', 'WILL', 'SURPRISE', 'YOU', 'SOME', 'DAY', 'BY', 'THE', 'AMOUNT', 'OF', 'HEART', 'AND', 'SENSE', 'AND', 'GOODNESS', 'SHE', 'HAS', 'GOT'] +3331-159605-0024-719: hyp=['BUT', 'I', 'KNOW', 'HER', 'BETTER', 'AND', 'I', 'ASSURE', 'YOU', 'THAT', 'SHE', "DOESN'T", 'PROVE', 'SHE', 'TRIES', 'TO', 'MEAN', 'TO', 'FAULTS', 'THOUGH', 'SHE', "WON'T", 'OWN', 'IT', 'AND', 'WILL', 'SURPRISE', 'YOU', 'SOME', 'DAY', 'BY', 'THE', 'AMOUNT', 'OF', 'HEART', 'AND', 'SENSE', 'AND', 'GOODNESS', 'SHE', 'HAS', 'GOT'] +3331-159605-0025-720: ref=['THANK', 'YOU', 'NO'] +3331-159605-0025-720: hyp=['THANK', 'YOU', 'NO'] +3331-159605-0026-721: ref=['HOW', 'LOVELY', 'THE', 'PARK', 'LOOKS', 'SHE', 'SAID', 'IN', 'GREAT', 'CONFUSION'] +3331-159605-0026-721: hyp=['HER', 'LOVELY', 'THE', 'PARK', 'LOOKS', 'SHE', 'SAID', 'IN', 'GREAT', 'CONFUSION'] +3331-159605-0027-722: ref=['ASKED', 'THE', 'ARTFUL', 'YOUNG', 'MAN', 'LAYING', 'A', 'TRAP', 'INTO', 'WHICH', 'POLLY', 'IMMEDIATELY', 'FELL'] +3331-159605-0027-722: hyp=['ASKED', 'THE', 'ARTFUL', 'YOUNG', 'MAN', 'LAYING', 'A', 'TRAP', 'INTO', 'WHICH', 'POLLY', 'IMMEDIATELY', 'FELL'] +3331-159605-0028-723: ref=['HE', 'WAS', 'QUICKER', 'TO', 'TAKE', 'A', 'HINT', 'THAN', 'SHE', 'HAD', 'EXPECTED', 'AND', 'BEING', 'BOTH', 'PROUD', 'AND', 'GENEROUS', 'RESOLVED', 'TO', 'SETTLE', 'THE', 'MATTER', 'AT', 'ONCE', 'FOR', "POLLY'S", 'SAKE', 'AS', 'WELL', 'AS', 'HIS', 'OWN'] +3331-159605-0028-723: hyp=['HE', 'WAS', 'QUICKER', 'TO', 'TAKE', 'A', 'HINT', 'THAN', 'SHE', 'HAD', 'EXPECTED', 'AND', 'BEING', 'BOTH', 'PROUD', 'AND', 'GENEROUS', 'WE', 'SOFT', 'TO', 'SETTLE', 'THE', 'MATTER', 'AT', 'ONCE', 'FOR', "POLLY'S", 'SAKE', 'AS', 'WELL', 'AS', 'HIS', 'OWN'] +3331-159605-0029-724: ref=['SO', 'WHEN', 'SHE', 'MADE', 'HER', 'LAST', 'BRILLIANT', 'REMARK', 'HE', 'SAID', 'QUIETLY', 'WATCHING', 'HER', 'FACE', 'KEENLY', 'ALL', 'THE', 'WHILE', 'I', 'THOUGHT', 'SO', 'WELL', 'I', 'M', 'GOING', 'OUT', 'OF', 'TOWN', 'ON', 'BUSINESS', 'FOR', 'SEVERAL', 'WEEKS', 'SO', 'YOU', 'CAN', 'ENJOY', 'YOUR', 'LITTLE', 'BIT', 'OF', 'COUNTRY', 'WITHOUT', 'BEING', 'ANNOYED', 'BY', 'ME', 'ANNOYED'] +3331-159605-0029-724: hyp=['SO', 'WHEN', 'SHE', 'MADE', 'HER', 'LAST', 'BUOYANT', 'REMARK', 'HE', 'SAID', 'QUIETLY', 'WATCHING', 'HER', 'FACE', 'KEENLY', 'ALL', 'THE', 'WHILE', 'I', 'THOUGHT', 'SO', 'WELL', "I'M", 'GOING', 'OUT', 'OF', 'TOWN', 'ON', 'BUSINESS', 'FOR', 'SEVERAL', 'WEEKS', 'SO', 'YOU', 'CAN', 'ENJOY', 'YOUR', 'LITTLE', 'BIT', 'OF', 'COUNTRY', 'WITHOUT', 'BEING', 'ANNOYED', 'BY', 'ME', 'ANNOY', 'IT'] +3331-159605-0030-725: ref=['SHE', 'THOUGHT', 'SHE', 'HAD', 'A', 'GOOD', 'DEAL', 'OF', 'THE', 'COQUETTE', 'IN', 'HER', 'AND', 'I', 'VE', 'NO', 'DOUBT', 'THAT', 'WITH', 'TIME', 'AND', 'TRAINING', 'SHE', 'WOULD', 'HAVE', 'BECOME', 'A', 'VERY', 'DANGEROUS', 'LITTLE', 'PERSON', 'BUT', 'NOW', 'SHE', 'WAS', 'FAR', 'TOO', 'TRANSPARENT', 'AND', 'STRAIGHTFORWARD', 'BY', 'NATURE', 'EVEN', 'TO', 'TELL', 'A', 'WHITE', 'LIE', 'CLEVERLY'] +3331-159605-0030-725: hyp=['SHE', 'THOUGHT', 'SHE', 'HAD', 'A', 'GOOD', 'DEAL', 'OF', 'THE', 'COQUET', 'IN', 'HER', 'AND', "I'VE", 'NO', 'DOUBT', 'THAT', 'WITH', 'TIME', 'AND', 'TRAINING', 'SHE', 'WOULD', 'HAVE', 'BECOME', 'A', 'VERY', 'DANGEROUS', 'LITTLE', 'PERSON', 'BUT', 'NOW', 'SHE', 'WAS', 'FAR', 'TO', 'TRANSPARENT', 'AND', 'STRAIGHTFORWARD', 'BY', 'NATURE', 'EVEN', 'TO', 'TELL', 'A', 'WIDE', 'LIKE', 'LEVELLY'] +3331-159605-0031-726: ref=['HE', 'WAS', 'GONE', 'BEFORE', 'SHE', 'COULD', 'DO', 'ANYTHING', 'BUT', 'LOOK', 'UP', 'AT', 'HIM', 'WITH', 'A', 'REMORSEFUL', 'FACE', 'AND', 'SHE', 'WALKED', 'ON', 'FEELING', 'THAT', 'THE', 'FIRST', 'AND', 'PERHAPS', 'THE', 'ONLY', 'LOVER', 'SHE', 'WOULD', 'EVER', 'HAVE', 'HAD', 'READ', 'HIS', 'ANSWER', 'AND', 'ACCEPTED', 'IT', 'IN', 'SILENCE'] +3331-159605-0031-726: hyp=['HE', 'WAS', 'GONE', 'BEFORE', 'SHE', 'COULD', 'DO', 'ANYTHING', 'BUT', 'LOOK', 'UP', 'AT', 'HIM', 'WITH', 'A', 'REMORSEFUL', 'FACE', 'AND', 'SHE', 'WALKED', 'ON', 'FEELING', 'THAT', 'THE', 'FIRST', 'AND', 'PERHAPS', 'THE', 'ONLY', 'LOVE', 'SHE', 'WOULD', 'EVER', 'HAVE', 'HAD', 'READ', 'HIS', 'ANSWER', 'AND', 'ACCEPTED', 'IN', 'SILENCE'] +3331-159605-0032-727: ref=['POLLY', 'DID', 'NOT', 'RETURN', 'TO', 'HER', 'FAVORITE', 'WALK', 'TILL', 'SHE', 'LEARNED', 'FROM', 'MINNIE', 'THAT', 'UNCLE', 'HAD', 'REALLY', 'LEFT', 'TOWN', 'AND', 'THEN', 'SHE', 'FOUND', 'THAT', 'HIS', 'FRIENDLY', 'COMPANY', 'AND', 'CONVERSATION', 'WAS', 'WHAT', 'HAD', 'MADE', 'THE', 'WAY', 'SO', 'PLEASANT', 'AFTER', 'ALL'] +3331-159605-0032-727: hyp=['POLLY', 'DID', 'NOT', 'RETURN', 'TO', 'HER', 'FAVOURITE', 'WALK', 'TILL', 'SHE', 'LEARNED', 'FOR', 'MINNIE', 'THAT', 'UNCLE', 'HAD', 'REALLY', 'LEFT', 'TOWN', 'AND', 'THEN', 'SHE', 'FOUND', 'THAT', 'HIS', 'FRIENDLY', 'COMPANY', 'AND', 'CONVERSATION', 'WAS', 'WHAT', 'HAD', 'MADE', 'THE', 'WAY', 'SO', 'PLEASANT', 'AFTER', 'ALL'] +3331-159605-0033-728: ref=['WAGGING', 'TO', 'AND', 'FRO', 'AS', 'USUAL', "WHAT'S", 'THE', 'NEWS', 'WITH', 'YOU'] +3331-159605-0033-728: hyp=['WORKING', 'TO', 'AND', 'FRO', 'AS', 'USUAL', "WHAT'S", 'THE', 'NEWS', 'WITH', 'YOU'] +3331-159605-0034-729: ref=['PERHAPS', 'SHE', 'LL', 'JILT', 'HIM'] +3331-159605-0034-729: hyp=['PERHAPS', "SHE'LL", 'CHILLED', 'HIM'] +3331-159605-0035-730: ref=['UTTERLY', 'DONE', 'WITH', 'AND', 'LAID', 'UPON', 'THE', 'SHELF'] +3331-159605-0035-730: hyp=['UTTERLY', 'DONE', 'WITH', 'AND', 'LAID', 'UPON', 'THE', 'SHELF'] +3331-159605-0036-731: ref=['MINNIE', 'SAID', 'THE', 'OTHER', 'DAY', 'SHE', 'WISHED', 'SHE', 'WAS', 'A', 'PIGEON', 'SO', 'SHE', 'COULD', 'PADDLE', 'IN', 'THE', 'PUDDLES', 'AND', 'NOT', 'FUSS', 'ABOUT', 'RUBBERS'] +3331-159605-0036-731: hyp=['MANY', 'SAID', 'THE', 'OTHER', 'DAY', 'SHE', 'WISHED', 'SHE', 'WAS', 'A', 'PIGEON', 'SO', 'SHE', 'COULD', 'PADDLE', 'IN', 'THE', 'POTTLES', 'AND', 'NOT', 'FUSS', 'ABOUT', 'WRAPPERS'] +3331-159605-0037-732: ref=['NOW', "DON'T", 'BE', 'AFFECTED', 'POLLY', 'BUT', 'JUST', 'TELL', 'ME', 'LIKE', 'A', 'DEAR', 'HAS', "N'T", 'HE', 'PROPOSED'] +3331-159605-0037-732: hyp=['NOW', "DON'T", 'BE', 'AFFECTED', 'POLLY', 'BUT', 'JUST', 'TELL', 'ME', 'LIKE', 'A', 'DEAR', 'HAS', 'NOT', 'HE', 'PROPOSED'] +3331-159605-0038-733: ref=["DON'T", 'YOU', 'THINK', 'HE', 'MEANS', 'TO'] +3331-159605-0038-733: hyp=["DON'T", 'YOU', 'THINK', 'HE', 'MEANS', 'TO'] +3331-159605-0039-734: ref=['TRULY', 'TRULY', 'FAN'] +3331-159605-0039-734: hyp=['TRULY', 'JULIE', 'FAN'] +3331-159605-0040-735: ref=['I', "DON'T", 'MEAN', 'TO', 'BE', 'PRYING', 'BUT', 'I', 'REALLY', 'THOUGHT', 'HE', 'DID'] +3331-159605-0040-735: hyp=['I', "DON'T", 'MEAN', 'TO', 'BE', 'PRYING', 'BUT', 'I', 'REALLY', 'THOUGHT', 'HE', 'DID'] +3331-159605-0041-736: ref=['WELL', 'I', 'ALWAYS', 'MEANT', 'TO', 'TRY', 'IT', 'IF', 'I', 'GOT', 'A', 'CHANCE', 'AND', 'I', 'HAVE'] +3331-159605-0041-736: hyp=['WELL', 'I', 'ALWAYS', 'MEANT', 'TO', 'TRY', 'IT', 'IF', 'I', 'GOT', 'A', 'CHANCE', 'AND', 'I', 'HAVE'] +3331-159605-0042-737: ref=['I', 'JUST', 'GAVE', 'HIM', 'A', 'HINT', 'AND', 'HE', 'TOOK', 'IT'] +3331-159605-0042-737: hyp=['I', 'JUST', 'GAVE', 'HIM', 'A', 'HINT', 'AND', 'HE', 'TOOK', 'IT'] +3331-159605-0043-738: ref=['HE', 'MEANT', 'TO', 'GO', 'AWAY', 'BEFORE', 'THAT', 'SO', "DON'T", 'THINK', 'HIS', 'HEART', 'IS', 'BROKEN', 'OR', 'MIND', 'WHAT', 'SILLY', 'TATTLERS', 'SAY'] +3331-159605-0043-738: hyp=['HE', 'MEANT', 'TO', 'GO', 'AWAY', 'BEFORE', 'THAT', 'SO', 'THEY', "DON'T", 'THINK', 'HIS', 'HEART', 'IS', 'BROKEN', 'OH', 'MIND', 'WHAT', 'SYTLER', 'SAY'] +3331-159605-0044-739: ref=['HE', 'UNDERSTOOD', 'AND', 'BEING', 'A', 'GENTLEMAN', 'MADE', 'NO', 'FUSS'] +3331-159605-0044-739: hyp=['HE', 'UNDERSTOOD', 'AND', 'BEING', 'A', 'GENTLEMAN', 'MADE', 'NO', 'FUSS'] +3331-159605-0045-740: ref=['BUT', 'POLLY', 'IT', 'WOULD', 'HAVE', 'BEEN', 'A', 'GRAND', 'THING', 'FOR', 'YOU'] +3331-159605-0045-740: hyp=['BUT', 'POLLY', 'IT', 'WOULD', 'HAVE', 'BEEN', 'A', 'GRAND', 'THING', 'FOR', 'YOU'] +3331-159605-0046-741: ref=['I', 'M', 'ODD', 'YOU', 'KNOW', 'AND', 'PREFER', 'TO', 'BE', 'AN', 'INDEPENDENT', 'SPINSTER', 'AND', 'TEACH', 'MUSIC', 'ALL', 'MY', 'DAYS'] +3331-159605-0046-741: hyp=["I'M", 'NOT', 'YOU', 'KNOW', "I'M", 'PREFER', 'TO', 'BE', 'AN', 'INDEPENDENT', 'SPINSTER', 'AND', 'TEACH', 'MUSIC', 'ALL', 'MY', 'DAYS'] +3331-159609-0000-742: ref=['NEVER', 'MIND', 'WHAT', 'THE', 'BUSINESS', 'WAS', 'IT', 'SUFFICES', 'TO', 'SAY', 'THAT', 'IT', 'WAS', 'A', 'GOOD', 'BEGINNING', 'FOR', 'A', 'YOUNG', 'MAN', 'LIKE', 'TOM', 'WHO', 'HAVING', 'BEEN', 'BORN', 'AND', 'BRED', 'IN', 'THE', 'MOST', 'CONSERVATIVE', 'CLASS', 'OF', 'THE', 'MOST', 'CONCEITED', 'CITY', 'IN', 'NEW', 'ENGLAND', 'NEEDED', 'JUST', 'THE', 'HEALTHY', 'HEARTY', 'SOCIAL', 'INFLUENCES', 'OF', 'THE', 'WEST', 'TO', 'WIDEN', 'HIS', 'VIEWS', 'AND', 'MAKE', 'A', 'MAN', 'OF', 'HIM'] +3331-159609-0000-742: hyp=['NEVER', 'MIND', 'WHAT', 'THE', 'BUSINESS', 'WAS', 'IT', 'SURFACES', 'TO', 'SAY', 'THAT', 'IT', 'WAS', 'A', 'GOOD', 'BEGINNING', 'FOR', 'A', 'YOUNG', 'MAN', 'LIKE', 'TOM', 'WHO', 'HAVING', 'BEEN', 'BORN', 'AND', 'BRED', 'IN', 'THE', 'MOST', 'CONSERVATIVE', 'CLASS', 'OF', 'THE', 'MOST', 'CONCEITED', 'CITY', 'IN', 'NEW', 'ENGLAND', 'NEEDED', 'JUST', 'THE', 'HEALTHY', 'HEARTY', 'SOCIAL', 'INFLUENCES', 'OF', 'THE', 'WEST', 'TO', 'WIDEN', 'HIS', 'VIEWS', 'AND', 'MAKE', 'A', 'MAN', 'OF', 'HIM'] +3331-159609-0001-743: ref=['FORTUNATELY', 'EVERY', 'ONE', 'WAS', 'SO', 'BUSY', 'WITH', 'THE', 'NECESSARY', 'PREPARATIONS', 'THAT', 'THERE', 'WAS', 'NO', 'TIME', 'FOR', 'ROMANCE', 'OF', 'ANY', 'SORT', 'AND', 'THE', 'FOUR', 'YOUNG', 'PEOPLE', 'WORKED', 'TOGETHER', 'AS', 'SOBERLY', 'AND', 'SENSIBLY', 'AS', 'IF', 'ALL', 'SORTS', 'OF', 'EMOTIONS', 'WERE', 'NOT', 'BOTTLED', 'UP', 'IN', 'THEIR', 'RESPECTIVE', 'HEARTS'] +3331-159609-0001-743: hyp=['FORTUNATELY', 'EVERY', 'ONE', 'WAS', 'SO', 'BUSY', 'WITH', 'THE', 'NECESSARY', 'PREPARATIONS', 'THAT', 'THERE', 'WAS', 'NO', 'TIME', 'FOR', 'ROMANS', 'OF', 'ANY', 'SORT', 'AND', 'THE', 'FOUR', 'YOUNG', 'PEOPLE', 'WORKED', 'TOGETHER', 'AS', 'SOBERLY', 'AND', 'SENSIBLY', 'AS', 'IF', 'ALL', 'SORTS', 'OF', 'EMOTIONS', 'WERE', 'NOT', 'BOTHERED', 'UP', 'IN', 'THEIR', 'RESPECTIVE', 'HEARTS'] +3331-159609-0002-744: ref=['PITY', 'THAT', 'THE', 'END', 'SHOULD', 'COME', 'SO', 'SOON', 'BUT', 'THE', 'HOUR', 'DID', 'ITS', 'WORK', 'AND', 'WENT', 'ITS', 'WAY', 'LEAVING', 'A', 'CLEARER', 'ATMOSPHERE', 'BEHIND', 'THOUGH', 'THE', 'YOUNG', 'FOLKS', 'DID', 'NOT', 'SEE', 'IT', 'THEN', 'FOR', 'THEIR', 'EYES', 'WERE', 'DIM', 'BECAUSE', 'OF', 'THE', 'PARTINGS', 'THAT', 'MUST', 'BE'] +3331-159609-0002-744: hyp=['PITY', 'THAT', 'THE', 'ANCIENT', 'COME', 'SO', 'SOON', 'BUT', 'THE', 'HOUR', 'DID', 'ITS', 'WORK', 'AND', 'WHEN', 'ITS', 'WAY', 'LEAVING', 'A', 'CLEARER', 'ATMOSPHERE', 'BEHIND', 'THAN', 'THE', 'YOUNG', 'FOLKS', 'DID', 'NOT', 'SEE', 'IT', 'THEN', 'FOR', 'THEIR', 'EYES', 'WERE', 'DIM', 'BECAUSE', 'OF', 'THE', 'PARTING', 'STEP', 'MUST', 'BE'] +3331-159609-0003-745: ref=['IF', 'IT', 'HAD', 'NOT', 'BEEN', 'FOR', 'TWO', 'THINGS', 'I', 'FEAR', 'SHE', 'NEVER', 'WOULD', 'HAVE', 'STOOD', 'A', 'SUMMER', 'IN', 'TOWN', 'BUT', 'SYDNEY', 'OFTEN', 'CALLED', 'TILL', 'HIS', 'VACATION', 'CAME', 'AND', 'A', 'VOLUMINOUS', 'CORRESPONDENCE', 'WITH', 'POLLY', 'BEGUILED', 'THE', 'LONG', 'DAYS'] +3331-159609-0003-745: hyp=['IF', 'IT', 'HAD', 'NOT', 'BEEN', 'FOR', 'TWO', 'THINGS', 'I', 'FEAR', 'SHE', 'NEVER', 'WOULD', 'HAVE', 'STOOD', 'A', 'SUMMER', 'IN', 'TOWN', 'BUT', 'SYDNEY', 'OFTEN', 'CALLED', 'TO', 'HIS', 'VACATION', 'CAME', 'AND', 'A', 'VOLUMINOUS', 'CORRESPONDENCE', 'WITH', 'POLLY', 'BEGUILED', 'THE', 'LONG', 'DAYS'] +3331-159609-0004-746: ref=['TOM', 'WROTE', 'ONCE', 'A', 'WEEK', 'TO', 'HIS', 'MOTHER', 'BUT', 'THE', 'LETTERS', 'WERE', 'SHORT', 'AND', 'NOT', 'VERY', 'SATISFACTORY', 'FOR', 'MEN', 'NEVER', 'DO', 'TELL', 'THE', 'INTERESTING', 'LITTLE', 'THINGS', 'THAT', 'WOMEN', 'BEST', 'LIKE', 'TO', 'HEAR'] +3331-159609-0004-746: hyp=['TUM', 'WOLT', 'ONES', 'A', 'WEEK', 'TO', 'HIS', 'MOTHER', 'BUT', 'THEY', 'LET', 'US', 'WERE', 'SHORT', 'AND', 'NOT', 'VERY', 'SATISFACTORY', 'FOR', 'MEN', 'NEVER', 'DO', 'TELL', 'THE', 'INTERESTING', 'LITTLE', 'THINGS', 'THAT', 'WOMEN', 'BEST', 'LIKE', 'TO', 'HEAR'] +3331-159609-0005-747: ref=['NO', 'I', 'M', 'ONLY', 'TIRED', 'HAD', 'A', 'GOOD', 'DEAL', 'TO', 'DO', 'LATELY', 'AND', 'THE', 'DULL', 'WEATHER', 'MAKES', 'ME', 'JUST', 'A', 'TRIFLE', 'BLUE'] +3331-159609-0005-747: hyp=['NO', 'I', 'AM', 'ONLY', 'TIRED', 'HAD', 'A', 'GOOD', 'DEAL', 'TO', 'DO', 'LATELY', 'AND', 'THE', 'DOLL', 'WEATHER', 'MAKES', 'ME', 'JUST', 'A', 'TRAVEL', 'BLUE'] +3331-159609-0006-748: ref=['FORGIVE', 'ME', 'POLLY', 'BUT', 'I', "CAN'T", 'HELP', 'SAYING', 'IT', 'FOR', 'IT', 'IS', 'THERE', 'AND', 'I', 'WANT', 'TO', 'BE', 'AS', 'TRUE', 'TO', 'YOU', 'AS', 'YOU', 'WERE', 'TO', 'ME', 'IF', 'I', 'CAN'] +3331-159609-0006-748: hyp=['FORGIVE', 'ME', 'POLLY', 'BUT', 'I', "CAN'T", 'HELP', 'SAYING', 'IT', 'FOR', "THERE'S", 'THERE', 'AND', 'I', 'WANT', 'TO', 'BE', 'AS', 'TRUE', 'TO', 'YOU', 'AS', 'YOU', 'WERE', 'TO', 'ME', 'IF', 'I', 'CAN'] +3331-159609-0007-749: ref=['I', 'TRY', 'NOT', 'TO', 'DECEIVE', 'MYSELF', 'BUT', 'IT', 'DOES', 'SEEM', 'AS', 'IF', 'THERE', 'WAS', 'A', 'CHANCE', 'OF', 'HAPPINESS', 'FOR', 'ME'] +3331-159609-0007-749: hyp=['I', 'TRIED', 'NOT', 'TO', 'DECEIVE', 'MYSELF', 'BUT', 'IT', 'DOES', 'SEEM', 'AS', 'IF', 'THERE', 'WAS', 'A', 'CHANCE', 'OF', 'HAPPINESS', 'FOR', 'ME'] +3331-159609-0008-750: ref=['THANK', 'HEAVEN', 'FOR', 'THAT'] +3331-159609-0008-750: hyp=['THANK', 'HEAVEN', 'FOR', 'THAT'] +3331-159609-0009-751: ref=['CRIED', 'POLLY', 'WITH', 'THE', 'HEARTIEST', 'SATISFACTION', 'IN', 'HER', 'VOICE'] +3331-159609-0009-751: hyp=['CRIED', 'POLLY', 'WITH', 'THE', 'HEARTIEST', 'SATISFACTION', 'IN', 'HER', 'VOICE'] +3331-159609-0010-752: ref=['POOR', 'POLLY', 'WAS', 'SO', 'TAKEN', 'BY', 'SURPRISE', 'THAT', 'SHE', 'HAD', 'NOT', 'A', 'WORD', 'TO', 'SAY'] +3331-159609-0010-752: hyp=['POOR', 'PEARLING', 'WAS', 'SO', 'TAKEN', 'BY', 'SURPRISE', 'THAT', 'SHE', 'HAD', 'NOT', 'A', 'WORD', 'TO', 'SAY'] +3331-159609-0011-753: ref=['NONE', 'WERE', 'NEEDED', 'HER', 'TELLTALE', 'FACE', 'ANSWERED', 'FOR', 'HER', 'AS', 'WELL', 'AS', 'THE', 'IMPULSE', 'WHICH', 'MADE', 'HER', 'HIDE', 'HER', 'HEAD', 'IN', 'THE', 'SOFA', 'CUSHION', 'LIKE', 'A', 'FOOLISH', 'OSTRICH', 'WHEN', 'THE', 'HUNTERS', 'ARE', 'AFTER', 'IT'] +3331-159609-0011-753: hyp=['NONE', 'WERE', 'NEEDED', 'HER', 'TELL', 'HER', 'FACE', 'ANSWERED', 'FOR', 'HER', 'AS', 'WELL', 'AS', 'THE', 'IMPULSE', 'WHICH', 'MADE', 'HER', 'HIDE', 'HER', 'HEAD', 'IN', 'THE', 'SILVER', 'CUSHION', 'LIKE', 'A', 'FOOLISH', 'OSTRICH', 'AND', 'THE', 'HANDLES', 'ARE', 'AFTER', 'IT'] +3331-159609-0012-754: ref=['ONCE', 'OR', 'TWICE', 'BUT', 'SORT', 'OF', 'JOKINGLY', 'AND', 'I', 'THOUGHT', 'IT', 'WAS', 'ONLY', 'SOME', 'LITTLE', 'FLIRTATION'] +3331-159609-0012-754: hyp=['ONCE', 'OR', 'TWICE', 'THAT', 'SORT', 'OF', 'CHOKINGLY', 'AND', 'I', 'THOUGHT', 'IT', 'WAS', 'ONLY', 'SOME', 'LITTLE', 'FLIRTATION'] +3331-159609-0013-755: ref=['IT', 'WAS', 'SO', 'STUPID', 'OF', 'ME', 'NOT', 'TO', 'GUESS', 'BEFORE'] +3331-159609-0013-755: hyp=['IT', 'WAS', 'SO', 'STUPID', 'OF', 'ME', 'NOT', 'TO', 'GUESS', 'BEFORE'] +3331-159609-0014-756: ref=['IT', 'WAS', 'SO', 'TENDER', 'EARNEST', 'AND', 'DEFIANT', 'THAT', 'FANNY', 'FORGOT', 'THE', 'DEFENCE', 'OF', 'HER', 'OWN', 'LOVER', 'IN', 'ADMIRATION', 'OF', "POLLY'S", 'LOYALTY', 'TO', 'HERS', 'FOR', 'THIS', 'FAITHFUL', 'ALL', 'ABSORBING', 'LOVE', 'WAS', 'A', 'NEW', 'REVELATION', 'TO', 'FANNY', 'WHO', 'WAS', 'USED', 'TO', 'HEARING', 'HER', 'FRIENDS', 'BOAST', 'OF', 'TWO', 'OR', 'THREE', 'LOVERS', 'A', 'YEAR', 'AND', 'CALCULATE', 'THEIR', 'RESPECTIVE', 'VALUES', 'WITH', 'ALMOST', 'AS', 'MUCH', 'COOLNESS', 'AS', 'THE', 'YOUNG', 'MEN', 'DISCUSSED', 'THE', 'FORTUNES', 'OF', 'THE', 'GIRLS', 'THEY', 'WISHED', 'FOR', 'BUT', 'COULD', 'NOT', 'AFFORD', 'TO', 'MARRY'] +3331-159609-0014-756: hyp=['IT', 'WAS', 'SO', 'TENDER', 'EARNEST', 'AND', 'DEFIANT', 'THAT', 'FANNY', 'FORGOT', 'THE', 'DEFENCE', 'OF', 'HER', 'OWN', 'LOVER', 'AND', 'ADMIRATION', 'OF', "POLLY'S", 'LOYALTY', 'TO', 'HERS', 'FOR', 'THIS', 'FAITHFUL', 'ALL', 'ABSORBING', 'LOVE', 'WAS', 'A', 'NEWER', 'RELATION', 'TO', 'FANNY', 'WHO', 'WAS', 'USED', 'TO', 'HEARING', 'HER', 'FRIENDS', 'BOAST', 'OF', 'TWO', 'OR', 'THREE', 'LOVERS', 'A', 'YEAR', 'AND', 'CALCULATE', 'THEIR', 'RESPECTIVE', 'VALUES', 'WITH', 'ALMOST', 'AS', 'MUCH', 'COOLNESS', 'AS', 'THE', 'YOUNG', 'MEN', 'DISCUSSED', 'THE', 'FORTUNES', 'OF', 'THE', 'GIRLS', 'THEY', 'WISHED', 'FOR', 'BUT', 'COULD', 'NOT', 'AFFORD', 'TO', 'MARRY'] +3331-159609-0015-757: ref=['I', 'HOPE', 'MARIA', 'BAILEY', 'IS', 'ALL', 'HE', 'THINKS', 'HER', 'SHE', 'ADDED', 'SOFTLY', 'FOR', 'I', 'COULD', "N'T", 'BEAR', 'TO', 'HAVE', 'HIM', 'DISAPPOINTED', 'AGAIN'] +3331-159609-0015-757: hyp=['I', 'HOPE', 'MARIA', 'BAILEY', 'IS', 'ONLY', 'THINKS', 'HER', 'SHE', 'ADDED', 'SOFTLY', 'FOR', 'I', 'COULD', 'NOT', 'BEAR', 'TO', 'HAVE', 'HIM', 'DISAPPOINTED', 'AGAIN'] +3331-159609-0016-758: ref=['SAID', 'FANNY', 'TURNING', 'HOPEFUL', 'ALL', 'AT', 'ONCE'] +3331-159609-0016-758: hyp=['SAID', 'FANNY', 'TURNING', 'HOPEFUL', 'ALL', 'AT', 'ONCE'] +3331-159609-0017-759: ref=['SUPPOSE', 'I', 'SAY', 'A', 'WORD', 'TO', 'TOM', 'JUST', 'INQUIRE', 'AFTER', 'HIS', 'HEART', 'IN', 'A', 'GENERAL', 'WAY', 'YOU', 'KNOW', 'AND', 'GIVE', 'HIM', 'A', 'CHANCE', 'TO', 'TELL', 'ME', 'IF', 'THERE', 'IS', 'ANYTHING', 'TO', 'TELL'] +3331-159609-0017-759: hyp=['SUPPOSE', 'HER', 'SAY', 'A', 'WORD', 'TO', 'TOM', 'JUST', 'INQUIRE', 'AFTER', 'HIS', 'HEART', 'IN', 'A', 'GENERAL', 'WAY', 'YOU', 'KNOW', 'AND', 'GIVE', 'HIM', 'A', 'CHANCE', 'TO', 'TELL', 'ME', 'IF', "THERE'S", 'ANYTHING', 'TO', 'TELL'] +3331-159609-0018-760: ref=['BEAR', 'IT', 'PEOPLE', 'ALWAYS', 'DO', 'BEAR', 'THINGS', 'SOMEHOW', 'ANSWERED', 'POLLY', 'LOOKING', 'AS', 'IF', 'SENTENCE', 'HAD', 'BEEN', 'PASSED', 'UPON', 'HER'] +3331-159609-0018-760: hyp=['BEAR', 'IT', 'PEOPLE', 'ALWAYS', 'DO', 'BEAR', 'THINGS', 'SOMEHOW', 'ANSWERED', 'POLLY', 'LOOKING', 'AS', 'IF', 'SENTENCE', 'HAD', 'BEEN', 'PASSED', 'UPON', 'HER'] +3331-159609-0019-761: ref=['IT', 'WAS', 'A', 'VERY', 'DIFFERENT', 'WINTER', 'FROM', 'THE', 'LAST', 'FOR', 'BOTH', 'THE', 'GIRLS'] +3331-159609-0019-761: hyp=['IT', 'WAS', 'VERY', 'DIFFERENT', 'WINDOW', 'FROM', 'THE', 'LAST', 'ABOVE', 'THE', 'GIRLS'] +3331-159609-0020-762: ref=['IF', 'FANNY', 'WANTED', 'TO', 'SHOW', 'HIM', 'WHAT', 'SHE', 'COULD', 'DO', 'TOWARD', 'MAKING', 'A', 'PLEASANT', 'HOME', 'SHE', 'CERTAINLY', 'SUCCEEDED', 'BETTER', 'THAN', 'SHE', 'SUSPECTED', 'FOR', 'IN', 'SPITE', 'OF', 'MANY', 'FAILURES', 'AND', 'DISCOURAGEMENTS', 'BEHIND', 'THE', 'SCENES', 'THE', 'LITTLE', 'HOUSE', 'BECAME', 'A', 'MOST', 'ATTRACTIVE', 'PLACE', 'TO', 'MISTER', 'SYDNEY', 'AT', 'LEAST', 'FOR', 'HE', 'WAS', 'MORE', 'THE', 'HOUSE', 'FRIEND', 'THAN', 'EVER', 'AND', 'SEEMED', 'DETERMINED', 'TO', 'PROVE', 'THAT', 'CHANGE', 'OF', 'FORTUNE', 'MADE', 'NO', 'DIFFERENCE', 'TO', 'HIM'] +3331-159609-0020-762: hyp=['IF', 'ANY', 'WANTED', 'TO', 'SHOW', 'HIM', 'WHAT', 'SHE', 'COULD', 'DO', 'TOWARD', 'MAKING', 'A', 'PLEASANT', 'HOME', 'SHE', 'CERTAINLY', 'SUCCEEDED', 'BY', 'THEN', 'SHE', 'SUSPECTED', 'FOR', 'IN', 'SPITE', 'OF', 'MANY', 'FAILURES', 'AND', 'DISCOURAGEMENTS', 'BEHIND', 'THE', 'SCENES', 'THE', 'LITTLE', 'HOUSE', 'BECAME', 'A', 'MOST', 'ATTRACTIVE', 'PLACE', 'TO', 'MISTER', 'SIDNEY', 'AT', 'LEAST', 'FOR', 'HE', 'WAS', 'MORE', 'THE', 'HOUSE', 'FRIEND', 'THAN', 'EVER', 'AND', 'SEEMED', 'DETERMINED', 'TO', 'PROVE', 'THAT', 'CHANGE', 'OF', 'FORTUNE', 'MADE', 'NO', 'DIFFERENCE', 'TO', 'HIM'] +3331-159609-0021-763: ref=['SHE', 'KEPT', 'MUCH', 'AT', 'HOME', 'WHEN', 'THE', "DAY'S", 'WORK', 'WAS', 'DONE', 'FINDING', 'IT', 'PLEASANTER', 'TO', 'SIT', 'DREAMING', 'OVER', 'BOOK', 'OR', 'SEWING', 'ALONE', 'THAN', 'TO', 'EXERT', 'HERSELF', 'EVEN', 'TO', 'GO', 'TO', 'THE', 'SHAWS'] +3331-159609-0021-763: hyp=['SHE', 'KEPT', 'MUCH', 'AT', 'HOME', 'IN', 'THE', "DAY'S", 'WORK', 'WAS', 'DONE', 'FINDING', 'IT', 'PLEASANTER', 'TO', 'SIT', 'DREAMING', 'OF', 'A', 'BOOK', 'OR', 'SOON', 'ALONE', 'THAN', 'TO', 'EXERT', 'HERSELF', 'EVEN', 'TO', 'GO', 'TO', 'THE', 'SHORES'] +3331-159609-0022-764: ref=['POLLY', 'WAS', 'NOT', 'AT', 'ALL', 'LIKE', 'HERSELF', 'THAT', 'WINTER', 'AND', 'THOSE', 'NEAREST', 'TO', 'HER', 'SAW', 'AND', 'WONDERED', 'AT', 'IT', 'MOST'] +3331-159609-0022-764: hyp=['POLLY', 'WAS', 'NOT', 'AT', 'ALL', 'LIKE', 'HERSELF', 'THAT', 'WINDOW', 'AND', 'THOSE', 'NEAREST', 'TO', 'HER', 'SAW', 'INVOLUNTE', 'AT', 'IT', 'MOST'] +3331-159609-0023-765: ref=['FOR', 'NED', 'WAS', 'SO', 'ABSORBED', 'IN', 'BUSINESS', 'THAT', 'HE', 'IGNORED', 'THE', 'WHOLE', 'BAILEY', 'QUESTION', 'AND', 'LEFT', 'THEM', 'IN', 'UTTER', 'DARKNESS'] +3331-159609-0023-765: hyp=['FOR', 'NED', 'WAS', 'SO', 'ABSORBED', 'IN', 'BUSINESS', 'THAT', 'HE', 'IGNORED', 'THE', 'WHOLE', 'BAILIQUE', 'QUESTION', 'AND', 'LEFT', 'THEM', 'IN', 'OTHER', 'DARKNESS'] +3331-159609-0024-766: ref=['FANNY', 'CAME', 'WALKING', 'IN', 'UPON', 'HER', 'ONE', 'DAY', 'LOOKING', 'AS', 'IF', 'SHE', 'BROUGHT', 'TIDINGS', 'OF', 'SUCH', 'GREAT', 'JOY', 'THAT', 'SHE', 'HARDLY', 'KNEW', 'HOW', 'TO', 'TELL', 'THEM'] +3331-159609-0024-766: hyp=['WHEN', 'HE', 'CAME', 'WALKING', 'IN', 'UPON', 'HER', 'ONE', 'DAY', 'LOOKING', 'AS', 'IF', 'SHE', 'POURED', 'HIDINGS', 'OF', 'SUCH', 'GREAT', 'JOY', 'THAT', 'SHE', 'HARDLY', 'KNEW', 'HOW', 'TO', 'TELL', 'THEM'] +3331-159609-0025-767: ref=['BUT', 'IF', 'WORK', 'BASKETS', 'WERE', 'GIFTED', 'WITH', 'POWERS', 'OF', 'SPEECH', 'THEY', 'COULD', 'TELL', 'STORIES', 'MORE', 'TRUE', 'AND', 'TENDER', 'THAN', 'ANY', 'WE', 'READ'] +3331-159609-0025-767: hyp=['BUT', 'IF', 'WORK', 'BASKETS', 'WERE', 'GIFTED', 'WITH', 'POWERS', 'OF', 'SPEECH', 'THEY', 'COULD', 'TELL', 'STORIES', 'MORE', 'TRUE', 'AND', 'TENDER', 'THAN', 'ANY', 'WEED'] +3528-168656-0000-864: ref=['SHE', 'HAD', 'EVEN', 'BEEN', 'IN', 'SOCIETY', 'BEFORE', 'THE', 'REVOLUTION'] +3528-168656-0000-864: hyp=['SHE', 'HAD', 'EVEN', 'BEEN', 'IN', 'SOCIETY', 'BEFORE', 'THE', 'REVOLUTION'] +3528-168656-0001-865: ref=['IT', 'WAS', 'HER', 'PLEASURE', 'AND', 'HER', 'VANITY', 'TO', 'DRAG', 'IN', 'THESE', 'NAMES', 'ON', 'EVERY', 'PRETEXT'] +3528-168656-0001-865: hyp=['IT', 'WAS', 'HER', 'PLEASURE', 'AND', 'HER', 'VANITY', 'TO', 'DRAG', 'IN', 'THESE', 'NAMES', 'ON', 'EVERY', 'PRETEXT'] +3528-168656-0002-866: ref=['EVERY', 'YEAR', 'SHE', 'SOLEMNLY', 'RENEWED', 'HER', 'VOWS', 'AND', 'AT', 'THE', 'MOMENT', 'OF', 'TAKING', 'THE', 'OATH', 'SHE', 'SAID', 'TO', 'THE', 'PRIEST', 'MONSEIGNEUR', 'SAINT', 'FRANCOIS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'EUSEBIUS', 'MONSEIGNEUR', 'SAINT', 'EUSEBIUS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'PROCOPIUS', 'ET', 'CETERA', 'ET', 'CETERA'] +3528-168656-0002-866: hyp=['EVERY', 'YEAR', 'SHE', 'SOLEMNLY', 'RENEWED', 'HER', 'VOWS', 'AND', 'AT', 'THE', 'MOMENT', 'OF', 'TAKING', 'THE', 'OATH', 'SHE', 'SAID', 'TO', 'THE', 'PRIEST', 'MONSEIGNEUR', 'SAINT', 'FROIS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'MONSEIGNEUR', 'SAINT', 'JULIAN', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'EUSIDES', 'MONSIEUR', 'SAINT', 'EUSIBIUS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'PROCOPIUS', 'ET', 'CETERA', 'ET', 'CETERA'] +3528-168656-0003-867: ref=['AND', 'THE', 'SCHOOL', 'GIRLS', 'WOULD', 'BEGIN', 'TO', 'LAUGH', 'NOT', 'IN', 'THEIR', 'SLEEVES', 'BUT', 'UNDER', 'THEIR', 'VEILS', 'CHARMING', 'LITTLE', 'STIFLED', 'LAUGHS', 'WHICH', 'MADE', 'THE', 'VOCAL', 'MOTHERS', 'FROWN'] +3528-168656-0003-867: hyp=['AND', 'THE', 'SCHOOLGIRLS', 'WOULD', 'BEGIN', 'TO', 'LAUGH', 'NOT', 'IN', 'THEIR', 'SLEEVES', 'BUT', 'UNDER', 'THE', 'VEILS', 'CHARMING', 'LITTLE', 'STIFLED', 'LAUGHS', 'WHICH', 'MADE', 'THE', 'FULCAL', 'MOTHERS', 'FROWN'] +3528-168656-0004-868: ref=['IT', 'WAS', 'A', 'CENTURY', 'WHICH', 'SPOKE', 'THROUGH', 'HER', 'BUT', 'IT', 'WAS', 'THE', 'EIGHTEENTH', 'CENTURY'] +3528-168656-0004-868: hyp=['IT', 'WAS', 'A', 'CENTURY', 'WHICH', 'SPOKE', 'THROUGH', 'HER', 'BUT', 'IT', 'WAS', 'THE', 'EIGHTEENTH', 'CENTURY'] +3528-168656-0005-869: ref=['THE', 'RULE', 'OF', 'FONTEVRAULT', 'DID', 'NOT', 'FORBID', 'THIS'] +3528-168656-0005-869: hyp=['THE', 'RULE', 'OF', 'FONTREVAL', 'DID', 'NOT', 'FORBID', 'THIS'] +3528-168656-0006-870: ref=['SHE', 'WOULD', 'NOT', 'SHOW', 'THIS', 'OBJECT', 'TO', 'ANYONE'] +3528-168656-0006-870: hyp=['SHE', 'WOULD', 'NOT', 'SHOW', 'THE', 'SUBJECT', 'TO', 'ANY', 'ONE'] +3528-168656-0007-871: ref=['THUS', 'IT', 'FURNISHED', 'A', 'SUBJECT', 'OF', 'COMMENT', 'FOR', 'ALL', 'THOSE', 'WHO', 'WERE', 'UNOCCUPIED', 'OR', 'BORED', 'IN', 'THE', 'CONVENT'] +3528-168656-0007-871: hyp=['THUS', 'IT', 'FURNISHED', 'A', 'SUBJECT', 'OF', 'COMMENT', 'FOR', 'ALL', 'THOSE', 'WHO', 'WERE', 'ON', 'OCCUPIED', 'OR', 'BORED', 'IN', 'THE', 'CONVENT'] +3528-168656-0008-872: ref=['SOME', 'UNIQUE', 'CHAPLET', 'SOME', 'AUTHENTIC', 'RELIC'] +3528-168656-0008-872: hyp=['SOME', 'EUIK', 'CHAPLET', 'SOME', 'AUTHENTIC', 'RELIC'] +3528-168656-0009-873: ref=['THEY', 'LOST', 'THEMSELVES', 'IN', 'CONJECTURES'] +3528-168656-0009-873: hyp=['THEY', 'LOST', 'THEMSELVES', 'IN', 'CONJECTURES'] +3528-168656-0010-874: ref=['WHEN', 'THE', 'POOR', 'OLD', 'WOMAN', 'DIED', 'THEY', 'RUSHED', 'TO', 'HER', 'CUPBOARD', 'MORE', 'HASTILY', 'THAN', 'WAS', 'FITTING', 'PERHAPS', 'AND', 'OPENED', 'IT'] +3528-168656-0010-874: hyp=['WHEN', 'THE', 'POOR', 'OLD', 'WOMAN', 'DIED', 'THEY', 'RUSHED', 'TO', 'HER', 'CUPBOARD', 'MORE', 'HASTILY', 'THAN', 'WAS', 'FITTING', 'PERHAPS', 'AND', 'OPENED', 'IT'] +3528-168656-0011-875: ref=['HE', 'IS', 'RESISTING', 'FLUTTERING', 'HIS', 'TINY', 'WINGS', 'AND', 'STILL', 'MAKING', 'AN', 'EFFORT', 'TO', 'FLY', 'BUT', 'THE', 'DANCER', 'IS', 'LAUGHING', 'WITH', 'A', 'SATANICAL', 'AIR'] +3528-168656-0011-875: hyp=['HE', 'IS', 'RESISTING', 'FLUTTERING', 'HIS', 'TINY', 'WINGS', 'AND', 'STILL', 'MAKING', 'AN', 'EFFORT', 'TO', 'FLY', 'BUT', 'THE', 'DANCERS', 'LAUGHING', 'WITH', 'A', 'SATANICAL', 'AIR'] +3528-168656-0012-876: ref=['MORAL', 'LOVE', 'CONQUERED', 'BY', 'THE', 'COLIC'] +3528-168656-0012-876: hyp=['MORAL', 'LOVE', 'CONQUERED', 'BY', 'THE', 'COLIC'] +3528-168669-0000-877: ref=['THE', 'PRIORESS', 'RETURNED', 'AND', 'SEATED', 'HERSELF', 'ONCE', 'MORE', 'ON', 'HER', 'CHAIR'] +3528-168669-0000-877: hyp=['THE', 'PRIORS', 'RETURNED', 'AND', 'SEATED', 'HERSELF', 'ONCE', 'MORE', 'ON', 'HER', 'CHAIR'] +3528-168669-0001-878: ref=['WE', 'WILL', 'PRESENT', 'A', 'STENOGRAPHIC', 'REPORT', 'OF', 'THE', 'DIALOGUE', 'WHICH', 'THEN', 'ENSUED', 'TO', 'THE', 'BEST', 'OF', 'OUR', 'ABILITY'] +3528-168669-0001-878: hyp=['WE', 'WILL', 'PRESENT', 'A', 'STENOGRAPHIC', 'REPORT', 'OF', 'THE', 'DIALOGUE', 'WHICH', 'THEN', 'ENSUED', 'TO', 'THE', 'BEST', 'OF', 'OUR', 'ABILITY'] +3528-168669-0002-879: ref=['FATHER', 'FAUVENT'] +3528-168669-0002-879: hyp=['FATHER', 'VUENT'] +3528-168669-0003-880: ref=['REVEREND', 'MOTHER', 'DO', 'YOU', 'KNOW', 'THE', 'CHAPEL'] +3528-168669-0003-880: hyp=['REVEREND', 'MOTHER', 'DO', 'YOU', 'KNOW', 'THE', 'CHAPEL'] +3528-168669-0004-881: ref=['AND', 'YOU', 'HAVE', 'BEEN', 'IN', 'THE', 'CHOIR', 'IN', 'PURSUANCE', 'OF', 'YOUR', 'DUTIES', 'TWO', 'OR', 'THREE', 'TIMES'] +3528-168669-0004-881: hyp=['AND', 'YOU', 'HAVE', 'BEEN', 'IN', 'THE', 'CHOIR', 'IN', 'PURSUANCE', 'OF', 'YOUR', 'DUTIES', 'TWO', 'OR', 'THREE', 'TIMES'] +3528-168669-0005-882: ref=['THERE', 'IS', 'A', 'STONE', 'TO', 'BE', 'RAISED', 'HEAVY'] +3528-168669-0005-882: hyp=['THERE', 'IS', 'A', 'STONE', 'TO', 'BE', 'RAISED', 'HEAVY'] +3528-168669-0006-883: ref=['THE', 'SLAB', 'OF', 'THE', 'PAVEMENT', 'WHICH', 'IS', 'AT', 'THE', 'SIDE', 'OF', 'THE', 'ALTAR'] +3528-168669-0006-883: hyp=['THE', 'SLAB', 'OF', 'THE', 'PAVEMENT', 'WHICH', 'IS', 'AT', 'THE', 'THOUGHT', 'OF', 'THE', 'ALTAR'] +3528-168669-0007-884: ref=['THE', 'SLAB', 'WHICH', 'CLOSES', 'THE', 'VAULT', 'YES'] +3528-168669-0007-884: hyp=['THE', 'FLAP', 'WHICH', 'CLOSES', 'THE', 'VAULT', 'YES'] +3528-168669-0008-885: ref=['IT', 'WOULD', 'BE', 'A', 'GOOD', 'THING', 'TO', 'HAVE', 'TWO', 'MEN', 'FOR', 'IT'] +3528-168669-0008-885: hyp=['IT', 'WOULD', 'BE', 'A', 'GOOD', 'THING', 'TO', 'HAVE', 'TWO', 'MEN', 'FOR', 'IT'] +3528-168669-0009-886: ref=['A', 'WOMAN', 'IS', 'NEVER', 'A', 'MAN'] +3528-168669-0009-886: hyp=['A', 'WOMAN', 'IS', 'NEVER', 'A', 'MAN'] +3528-168669-0010-887: ref=['BECAUSE', 'DOM', 'MABILLON', 'GIVES', 'FOUR', 'HUNDRED', 'AND', 'SEVENTEEN', 'EPISTLES', 'OF', 'SAINT', 'BERNARD', 'WHILE', 'MERLONUS', 'HORSTIUS', 'ONLY', 'GIVES', 'THREE', 'HUNDRED', 'AND', 'SIXTY', 'SEVEN', 'I', 'DO', 'NOT', 'DESPISE', 'MERLONUS', 'HORSTIUS', 'NEITHER', 'DO', 'I'] +3528-168669-0010-887: hyp=['BECAUSE', 'DON', 'MARLAN', 'GIVES', 'FOUR', 'HUNDRED', 'AND', 'SEVENTEEN', 'EPISTLES', 'OF', 'SAINT', 'BERNARD', 'WHILE', 'MERLINUS', 'HORSES', 'ONLY', 'GIVES', 'THREE', 'HUNDRED', 'AND', 'SIXTY', 'SEVEN', 'I', 'DO', 'NOT', 'DESPISE', "MERLINA'S", 'HORSES', 'NEITHER', 'DO', 'I'] +3528-168669-0011-888: ref=['MERIT', 'CONSISTS', 'IN', 'WORKING', 'ACCORDING', 'TO', "ONE'S", 'STRENGTH', 'A', 'CLOISTER', 'IS', 'NOT', 'A', 'DOCK', 'YARD'] +3528-168669-0011-888: hyp=['MARRIAGE', 'CONSISTS', 'IN', 'WORKING', 'ACCORDING', 'TO', "ONE'S", 'STRENGTH', 'A', 'CLOISTER', 'IS', 'NOT', 'A', 'DOCKYARD'] +3528-168669-0012-889: ref=['AND', 'A', 'WOMAN', 'IS', 'NOT', 'A', 'MAN', 'BUT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'ONE', 'THOUGH'] +3528-168669-0012-889: hyp=['AND', 'A', 'WOMAN', 'IS', 'NOT', 'A', 'MAN', 'BUT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'ONE', 'THOUGH'] +3528-168669-0013-890: ref=['AND', 'CAN', 'YOU', 'GET', 'A', 'LEVER'] +3528-168669-0013-890: hyp=['AND', 'CAN', 'YOU', 'GET', 'A', 'LOVER'] +3528-168669-0014-891: ref=['THERE', 'IS', 'A', 'RING', 'IN', 'THE', 'STONE'] +3528-168669-0014-891: hyp=['THERE', 'IS', 'A', 'RING', 'IN', 'THE', 'STONE'] +3528-168669-0015-892: ref=['I', 'WILL', 'PUT', 'THE', 'LEVER', 'THROUGH', 'IT'] +3528-168669-0015-892: hyp=['I', 'WILL', 'PUT', 'THE', 'LEVER', 'THROUGH', 'IT'] +3528-168669-0016-893: ref=['THAT', 'IS', 'GOOD', 'REVEREND', 'MOTHER', 'I', 'WILL', 'OPEN', 'THE', 'VAULT'] +3528-168669-0016-893: hyp=['THAT', 'IS', 'GOOD', 'REVEREND', 'MOTHER', 'I', 'WILL', 'OPEN', 'THE', 'VAULT'] +3528-168669-0017-894: ref=['WILL', 'THAT', 'BE', 'ALL', 'NO'] +3528-168669-0017-894: hyp=['WILL', 'THAT', 'BE', 'ALL', 'NO'] +3528-168669-0018-895: ref=['GIVE', 'ME', 'YOUR', 'ORDERS', 'VERY', 'REVEREND', 'MOTHER'] +3528-168669-0018-895: hyp=['GIVE', 'ME', 'YOUR', 'ORDERS', 'VERY', 'REVEREND', 'MOTHER'] +3528-168669-0019-896: ref=['FAUVENT', 'WE', 'HAVE', 'CONFIDENCE', 'IN', 'YOU'] +3528-168669-0019-896: hyp=['FOR', 'THAT', 'WE', 'HAVE', 'CONFIDENCE', 'IN', 'YOU'] +3528-168669-0020-897: ref=['I', 'AM', 'HERE', 'TO', 'DO', 'ANYTHING', 'YOU', 'WISH'] +3528-168669-0020-897: hyp=['I', 'AM', 'HERE', 'TO', 'DO', 'ANYTHING', 'YOU', 'WISH'] +3528-168669-0021-898: ref=['AND', 'TO', 'HOLD', 'YOUR', 'PEACE', 'ABOUT', 'EVERYTHING', 'YES', 'REVEREND', 'MOTHER'] +3528-168669-0021-898: hyp=['AND', 'TO', 'HOLD', 'YOUR', 'PEACE', 'ABOUT', 'EVERYTHING', 'YES', 'ROBIN', 'MOTHER'] +3528-168669-0022-899: ref=['WHEN', 'THE', 'VAULT', 'IS', 'OPEN', 'I', 'WILL', 'CLOSE', 'IT', 'AGAIN'] +3528-168669-0022-899: hyp=['WHEN', 'THE', 'WALL', 'IS', 'OPEN', 'I', 'WILL', 'CLOSE', 'IT', 'AGAIN'] +3528-168669-0023-900: ref=['BUT', 'BEFORE', 'THAT', 'WHAT', 'REVEREND', 'MOTHER'] +3528-168669-0023-900: hyp=['BUT', 'BEFORE', 'THAT', 'WHAT', 'REVEREND', 'MOTHER'] +3528-168669-0024-901: ref=['FATHER', 'FAUVENT', 'REVEREND', 'MOTHER'] +3528-168669-0024-901: hyp=['FATHER', 'FERVENT', 'REVEREND', 'MOTHER'] +3528-168669-0025-902: ref=['YOU', 'KNOW', 'THAT', 'A', 'MOTHER', 'DIED', 'THIS', 'MORNING'] +3528-168669-0025-902: hyp=['YOU', 'KNOW', 'THAT', 'A', 'MOTHER', 'DIED', 'THIS', 'MORNING'] +3528-168669-0026-903: ref=['NO', 'DID', 'YOU', 'NOT', 'HEAR', 'THE', 'BELL'] +3528-168669-0026-903: hyp=['NO', 'DID', 'YOU', 'NOT', 'HEAR', 'THE', 'BELL'] +3528-168669-0027-904: ref=['NOTHING', 'CAN', 'BE', 'HEARD', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN', 'REALLY'] +3528-168669-0027-904: hyp=['NOTHING', 'CAN', 'BE', 'HEARD', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN', 'REALLY'] +3528-168669-0028-905: ref=['AND', 'THEN', 'THE', 'WIND', 'IS', 'NOT', 'BLOWING', 'IN', 'MY', 'DIRECTION', 'THIS', 'MORNING'] +3528-168669-0028-905: hyp=['AND', 'THEN', 'THE', 'WIND', 'DOES', 'NOT', 'BLOWING', 'IN', 'MY', 'DIRECTION', 'THIS', 'MORNING'] +3528-168669-0029-906: ref=['IT', 'WAS', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0029-906: hyp=['IT', 'WAS', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0030-907: ref=['THREE', 'YEARS', 'AGO', 'MADAME', 'DE', 'BETHUNE', 'A', 'JANSENIST', 'TURNED', 'ORTHODOX', 'MERELY', 'FROM', 'HAVING', 'SEEN', 'MOTHER', 'CRUCIFIXION', 'AT', 'PRAYER', 'AH'] +3528-168669-0030-907: hyp=['THREE', 'YEARS', 'AGO', 'MADAME', 'DE', 'BESOON', 'A', 'GENTLEST', 'TURNED', 'ORTHODOX', 'MERELY', 'FROM', 'HAVING', 'SEEN', 'MOTHER', 'CRUCIFIXION', 'AT', 'PRAYER', 'AH'] +3528-168669-0031-908: ref=['THE', 'MOTHERS', 'HAVE', 'TAKEN', 'HER', 'TO', 'THE', 'DEAD', 'ROOM', 'WHICH', 'OPENS', 'ON', 'THE', 'CHURCH', 'I', 'KNOW'] +3528-168669-0031-908: hyp=['THE', 'MOTHERS', 'HAVE', 'TAKEN', 'HER', 'TO', 'THE', 'DEAD', 'ROOM', 'WHICH', 'OPENS', 'ON', 'THE', 'CHURCH', 'I', 'KNOW'] +3528-168669-0032-909: ref=['A', 'FINE', 'SIGHT', 'IT', 'WOULD', 'BE', 'TO', 'SEE', 'A', 'MAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'MORE', 'OFTEN'] +3528-168669-0032-909: hyp=['A', 'FINE', 'SIGHT', 'IT', 'WOULD', 'BE', 'TO', 'SEE', 'A', 'MAN', 'ENTER', 'THE', 'BEDROOM', 'MORE', 'OFTEN'] +3528-168669-0033-910: ref=['HEY', 'MORE', 'OFTEN'] +3528-168669-0033-910: hyp=['HEY', 'MORE', 'OFTEN'] +3528-168669-0034-911: ref=['WHAT', 'DO', 'YOU', 'SAY'] +3528-168669-0034-911: hyp=['WHAT', 'DO', 'YOU', 'SAY'] +3528-168669-0035-912: ref=['I', 'SAY', 'MORE', 'OFTEN', 'MORE', 'OFTEN', 'THAN', 'WHAT'] +3528-168669-0035-912: hyp=['I', 'SAY', 'MORE', 'OFTEN', 'MORE', 'OFTEN', 'THAN', 'WHAT'] +3528-168669-0036-913: ref=['REVEREND', 'MOTHER', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN', 'THAN', 'WHAT', 'I', 'SAID', 'MORE', 'OFTEN'] +3528-168669-0036-913: hyp=['REVEREND', 'MOTHER', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN', 'THAN', 'WHAT', 'I', 'SAID', 'MORE', 'OFTEN'] +3528-168669-0037-914: ref=['BUT', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN'] +3528-168669-0037-914: hyp=['BUT', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN'] +3528-168669-0038-915: ref=['AT', 'THAT', 'MOMENT', 'NINE', "O'CLOCK", 'STRUCK'] +3528-168669-0038-915: hyp=['AT', 'THAT', 'MOMENT', 'NINE', "O'CLOCK", 'STRUCK'] +3528-168669-0039-916: ref=['AT', 'NINE', "O'CLOCK", 'IN', 'THE', 'MORNING', 'AND', 'AT', 'ALL', 'HOURS', 'PRAISED', 'AND', 'ADORED', 'BE', 'THE', 'MOST', 'HOLY', 'SACRAMENT', 'OF', 'THE', 'ALTAR', 'SAID', 'THE', 'PRIORESS'] +3528-168669-0039-916: hyp=['AT', 'NINE', "O'CLOCK", 'IN', 'THE', 'MORNING', 'AND', 'AT', 'ALL', 'HOURS', 'PRAISED', 'AND', 'ADORED', 'TO', 'BE', 'THE', 'MOST', 'HOLY', 'SACRAMENT', 'OF', 'THE', 'ALTAR', 'SAID', 'THE', 'PIRATES'] +3528-168669-0040-917: ref=['IT', 'CUT', 'MORE', 'OFTEN', 'SHORT'] +3528-168669-0040-917: hyp=['IT', 'CUT', 'MORE', 'OFTEN', 'SHORT'] +3528-168669-0041-918: ref=['FAUCHELEVENT', 'MOPPED', 'HIS', 'FOREHEAD'] +3528-168669-0041-918: hyp=['FAUCHELEVENT', 'MOPPED', 'HIS', 'FOREHEAD'] +3528-168669-0042-919: ref=['IN', 'HER', 'LIFETIME', 'MOTHER', 'CRUCIFIXION', 'MADE', 'CONVERTS', 'AFTER', 'HER', 'DEATH', 'SHE', 'WILL', 'PERFORM', 'MIRACLES', 'SHE', 'WILL'] +3528-168669-0042-919: hyp=['IN', 'HER', 'LIFETIME', 'MOTHER', 'CRUCIFIXION', 'MADE', 'CONVERTS', 'AFTER', 'HER', 'DEATH', 'SHE', 'WILL', 'PERFORM', 'MIRACLES', 'SHE', 'WILL'] +3528-168669-0043-920: ref=['FATHER', 'FAUVENT', 'THE', 'COMMUNITY', 'HAS', 'BEEN', 'BLESSED', 'IN', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0043-920: hyp=['FATHER', 'VOUVENT', 'THE', 'COMMUNITY', 'HAS', 'BEEN', 'BLESSED', 'IN', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0044-921: ref=['SHE', 'RETAINED', 'HER', 'CONSCIOUSNESS', 'TO', 'THE', 'VERY', 'LAST', 'MOMENT'] +3528-168669-0044-921: hyp=['SHE', 'RETAINED', 'HER', 'CONSCIOUSNESS', 'TO', 'THE', 'VERY', 'LAST', 'MOMENT'] +3528-168669-0045-922: ref=['SHE', 'GAVE', 'US', 'HER', 'LAST', 'COMMANDS'] +3528-168669-0045-922: hyp=['SHE', 'GAVE', 'US', 'HER', 'LAST', 'COMMANDS'] +3528-168669-0046-923: ref=['IF', 'YOU', 'HAD', 'A', 'LITTLE', 'MORE', 'FAITH', 'AND', 'IF', 'YOU', 'COULD', 'HAVE', 'BEEN', 'IN', 'HER', 'CELL', 'SHE', 'WOULD', 'HAVE', 'CURED', 'YOUR', 'LEG', 'MERELY', 'BY', 'TOUCHING', 'IT', 'SHE', 'SMILED'] +3528-168669-0046-923: hyp=['IF', 'YOU', 'HAD', 'A', 'LITTLE', 'MORE', 'FAITH', 'AND', 'IF', 'YOU', 'COULD', 'HAVE', 'BEEN', 'IN', 'HERSELF', 'SHE', 'WOULD', 'HAVE', 'CURED', 'YOUR', 'LEG', 'MERELY', 'BY', 'TOUCHING', 'IT', 'SHE', 'SMILED'] +3528-168669-0047-924: ref=['THERE', 'WAS', 'SOMETHING', 'OF', 'PARADISE', 'IN', 'THAT', 'DEATH'] +3528-168669-0047-924: hyp=['THERE', 'WAS', 'SOMETHING', 'OF', 'PARADISE', 'IN', 'THAT', 'DEATH'] +3528-168669-0048-925: ref=['FAUCHELEVENT', 'THOUGHT', 'THAT', 'IT', 'WAS', 'AN', 'ORISON', 'WHICH', 'SHE', 'WAS', 'FINISHING'] +3528-168669-0048-925: hyp=['FAUCHELEVENT', 'THOUGHT', 'THAT', 'IT', 'WAS', 'AN', 'ORISON', 'WHICH', 'SHE', 'WAS', 'FINISHING'] +3528-168669-0049-926: ref=['FAUCHELEVENT', 'HELD', 'HIS', 'PEACE', 'SHE', 'WENT', 'ON'] +3528-168669-0049-926: hyp=['FAUCHELEVENT', 'HELD', 'HIS', 'PEACE', 'SHE', 'WENT', 'ON'] +3528-168669-0050-927: ref=['I', 'HAVE', 'CONSULTED', 'UPON', 'THIS', 'POINT', 'MANY', 'ECCLESIASTICS', 'LABORING', 'IN', 'OUR', 'LORD', 'WHO', 'OCCUPY', 'THEMSELVES', 'IN', 'THE', 'EXERCISES', 'OF', 'THE', 'CLERICAL', 'LIFE', 'AND', 'WHO', 'BEAR', 'WONDERFUL', 'FRUIT'] +3528-168669-0050-927: hyp=['I', 'HAVE', 'CONSULTED', 'UPON', 'THIS', 'POINT', 'MANY', 'ECCLESIASTICS', 'LABORING', 'IN', 'OUR', 'LORD', 'WHO', 'OCCUPY', 'THEMSELVES', 'IN', 'THE', 'EXERCISES', 'OF', 'THE', 'CLERICAL', 'LIFE', 'AND', 'WHO', 'BEAR', 'WONDERFUL', 'FRUIT'] +3528-168669-0051-928: ref=['FORTUNATELY', 'THE', 'PRIORESS', 'COMPLETELY', 'ABSORBED', 'IN', 'HER', 'OWN', 'THOUGHTS', 'DID', 'NOT', 'HEAR', 'IT'] +3528-168669-0051-928: hyp=['FORTUNATELY', 'THE', 'PIRASS', 'COMPLETELY', 'ABSORBED', 'IN', 'HER', 'OWN', 'THOUGHTS', 'DID', 'NOT', 'HEAR', 'IT'] +3528-168669-0052-929: ref=['SHE', 'CONTINUED', 'FATHER', 'FAUVENT'] +3528-168669-0052-929: hyp=['SHE', 'CONTINUED', 'FATHER', 'REVENT'] +3528-168669-0053-930: ref=['YES', 'REVEREND', 'MOTHER'] +3528-168669-0053-930: hyp=['YES', 'REVEREND', 'MOTHER'] +3528-168669-0054-931: ref=['SAINT', 'TERENTIUS', 'BISHOP', 'OF', 'PORT', 'WHERE', 'THE', 'MOUTH', 'OF', 'THE', 'TIBER', 'EMPTIES', 'INTO', 'THE', 'SEA', 'REQUESTED', 'THAT', 'ON', 'HIS', 'TOMB', 'MIGHT', 'BE', 'ENGRAVED', 'THE', 'SIGN', 'WHICH', 'WAS', 'PLACED', 'ON', 'THE', 'GRAVES', 'OF', 'PARRICIDES', 'IN', 'THE', 'HOPE', 'THAT', 'PASSERS', 'BY', 'WOULD', 'SPIT', 'ON', 'HIS', 'TOMB', 'THIS', 'WAS', 'DONE'] +3528-168669-0054-931: hyp=['SAINT', 'TERENTIUS', 'BISHOP', 'OF', 'PORT', 'WHERE', 'THE', 'MOUTH', 'OF', 'THE', 'TIBER', 'EMPTIES', 'INTO', 'THE', 'SEA', 'REQUESTED', 'THAT', 'ON', 'HIS', 'TWO', 'MIGHT', 'BE', 'ENGRAVED', 'THE', 'SIGN', 'WHICH', 'WAS', 'PLACED', 'ON', 'THE', 'GRAVES', 'OF', 'PARASITES', 'IN', 'THE', 'HOPE', 'THAT', 'PASSERS', 'BY', 'WOULD', 'SPIT', 'ON', 'HIS', 'TOMB', 'THIS', 'WAS', 'DONE'] +3528-168669-0055-932: ref=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'SO', 'BE', 'IT'] +3528-168669-0055-932: hyp=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'SO', 'BE', 'IT'] +3528-168669-0056-933: ref=['FOR', 'THAT', 'MATTER', 'NO', 'REVEREND', 'MOTHER'] +3528-168669-0056-933: hyp=['FOR', 'THAT', 'MATTER', 'NO', 'REVEREND', 'MOTHER'] +3528-168669-0057-934: ref=['FATHER', 'FAUVENT', 'MOTHER', 'CRUCIFIXION', 'WILL', 'BE', 'INTERRED', 'IN', 'THE', 'COFFIN', 'IN', 'WHICH', 'SHE', 'HAS', 'SLEPT', 'FOR', 'THE', 'LAST', 'TWENTY', 'YEARS', 'THAT', 'IS', 'JUST'] +3528-168669-0057-934: hyp=['FATHER', 'VENT', 'MOTHER', 'CRUCIFIXION', 'WILL', 'BE', 'INTERRED', 'IN', 'THE', 'COFFIN', 'IN', 'WHICH', 'SHE', 'HAS', 'SLEPT', 'FOR', 'THE', 'LAST', 'TWENTY', 'YEARS', 'THAT', 'IS', 'JUST'] +3528-168669-0058-935: ref=['IT', 'IS', 'A', 'CONTINUATION', 'OF', 'HER', 'SLUMBER'] +3528-168669-0058-935: hyp=['IT', 'IS', 'A', 'CONTINUATION', 'OF', 'HER', 'SLUMBER'] +3528-168669-0059-936: ref=['SO', 'I', 'SHALL', 'HAVE', 'TO', 'NAIL', 'UP', 'THAT', 'COFFIN', 'YES'] +3528-168669-0059-936: hyp=['SO', 'I', 'SHALL', 'HAVE', 'TO', 'NAIL', 'UP', 'THAT', 'COFFIN', 'YES'] +3528-168669-0060-937: ref=['I', 'AM', 'AT', 'THE', 'ORDERS', 'OF', 'THE', 'VERY', 'REVEREND', 'COMMUNITY'] +3528-168669-0060-937: hyp=['I', 'AM', 'AT', 'THE', 'ORDERS', 'OF', 'THE', 'VERY', 'REVEREND', 'CUNITY'] +3528-168669-0061-938: ref=['THE', 'FOUR', 'MOTHER', 'PRECENTORS', 'WILL', 'ASSIST', 'YOU'] +3528-168669-0061-938: hyp=['THE', 'FOREMOTHER', 'PRESENTERS', 'WILL', 'ASSIST', 'YOU'] +3528-168669-0062-939: ref=['NO', 'IN', 'LOWERING', 'THE', 'COFFIN'] +3528-168669-0062-939: hyp=['NO', 'INLORING', 'THE', 'COFFIN'] +3528-168669-0063-940: ref=['WHERE', 'INTO', 'THE', 'VAULT'] +3528-168669-0063-940: hyp=['WHERE', 'INTO', 'THE', 'VAULT'] +3528-168669-0064-941: ref=['FAUCHELEVENT', 'STARTED', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR'] +3528-168669-0064-941: hyp=['FAUCHELEVENT', 'STARTED', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR'] +3528-168669-0065-942: ref=['UNDER', 'THE', 'ALTAR', 'BUT'] +3528-168669-0065-942: hyp=['UNDER', 'THE', 'ALTAR', 'BUT'] +3528-168669-0066-943: ref=['YOU', 'WILL', 'HAVE', 'AN', 'IRON', 'BAR', 'YES', 'BUT'] +3528-168669-0066-943: hyp=['YOU', 'WILL', 'HAVE', 'AN', 'IRON', 'BAR', 'YES', 'BUT'] +3528-168669-0067-944: ref=['YOU', 'WILL', 'RAISE', 'THE', 'STONE', 'WITH', 'THE', 'BAR', 'BY', 'MEANS', 'OF', 'THE', 'RING', 'BUT'] +3528-168669-0067-944: hyp=['YOU', 'WILL', 'RAISE', 'THE', 'STONE', 'WITH', 'THE', 'BAR', 'BY', 'MEANS', 'OF', 'THE', 'RING', 'BUT'] +3528-168669-0068-945: ref=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL', 'NOT', 'TO', 'GO', 'TO', 'PROFANE', 'EARTH', 'TO', 'REMAIN', 'THERE', 'IN', 'DEATH', 'WHERE', 'SHE', 'PRAYED', 'WHILE', 'LIVING', 'SUCH', 'WAS', 'THE', 'LAST', 'WISH', 'OF', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0068-945: hyp=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL', 'NOT', 'TO', 'GO', 'TO', 'PROFANE', 'EARTH', 'TO', 'REMAIN', 'THERE', 'IN', 'DEATH', 'WHERE', 'SHE', 'PRAYED', 'WHILE', 'LIVING', 'SUCH', 'WAS', 'THE', 'LAST', 'WISH', 'OF', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0069-946: ref=['SHE', 'ASKED', 'IT', 'OF', 'US', 'THAT', 'IS', 'TO', 'SAY', 'COMMANDED', 'US'] +3528-168669-0069-946: hyp=['SHE', 'ASKED', 'IT', 'OF', 'US', 'THAT', 'IS', 'TO', 'SAY', 'COMMANDED', 'US'] +3528-168669-0070-947: ref=['BUT', 'IT', 'IS', 'FORBIDDEN'] +3528-168669-0070-947: hyp=['BUT', 'IT', 'IS', 'FORBIDDEN'] +3528-168669-0071-948: ref=['OH', 'I', 'AM', 'A', 'STONE', 'IN', 'YOUR', 'WALLS'] +3528-168669-0071-948: hyp=['OH', 'I', 'AM', 'A', 'STONE', 'IN', 'YOUR', 'WALLS'] +3528-168669-0072-949: ref=['THINK', 'FATHER', 'FAUVENT', 'IF', 'SHE', 'WERE', 'TO', 'WORK', 'MIRACLES', 'HERE'] +3528-168669-0072-949: hyp=['THINK', 'FATHER', 'UVERT', 'IF', 'SHE', 'WERE', 'TO', 'WORK', 'MIRACLES', 'HERE'] +3528-168669-0073-950: ref=['WHAT', 'A', 'GLORY', 'OF', 'GOD', 'FOR', 'THE', 'COMMUNITY', 'AND', 'MIRACLES', 'ISSUE', 'FROM', 'TOMBS'] +3528-168669-0073-950: hyp=['WHAT', 'A', 'GLORY', 'OF', 'GOD', 'FOR', 'THE', 'COMMUNITY', 'AND', 'MIRACLES', 'ISSUE', 'FROM', 'TOMBS'] +3528-168669-0074-951: ref=['BUT', 'REVEREND', 'MOTHER', 'IF', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'COMMISSION'] +3528-168669-0074-951: hyp=['BUT', 'REVEREND', 'MOTHER', 'IF', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'COMMISSION'] +3528-168669-0075-952: ref=['BUT', 'THE', 'COMMISSARY', 'OF', 'POLICE'] +3528-168669-0075-952: hyp=['BUT', 'THE', 'COMMISSARY', 'OF', 'POLICE'] +3528-168669-0076-953: ref=['CHONODEMAIRE', 'ONE', 'OF', 'THE', 'SEVEN', 'GERMAN', 'KINGS', 'WHO', 'ENTERED', 'AMONG', 'THE', 'GAULS', 'UNDER', 'THE', 'EMPIRE', 'OF', 'CONSTANTIUS', 'EXPRESSLY', 'RECOGNIZED', 'THE', 'RIGHT', 'OF', 'NUNS', 'TO', 'BE', 'BURIED', 'IN', 'RELIGION', 'THAT', 'IS', 'TO', 'SAY', 'BENEATH', 'THE', 'ALTAR'] +3528-168669-0076-953: hyp=['CHATEAU', 'DE', 'MER', 'ONE', 'OF', 'THE', 'SEVEN', 'GERMAN', 'KINGS', 'WHO', 'ENTERED', 'AMONG', 'THE', 'GULFS', 'UNDER', 'THE', 'EMPIRE', 'OF', 'CONSTANTIUS', 'EXPRESSLY', 'RECOGNIZED', 'THE', 'RIGHT', 'OF', 'NUNS', 'TO', 'BE', 'BURIED', 'IN', 'RELIGION', 'THAT', 'IS', 'TO', 'SAY', 'BENEATH', 'THE', 'ALTAR'] +3528-168669-0077-954: ref=['THE', 'WORLD', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'THE', 'CROSS'] +3528-168669-0077-954: hyp=['THE', 'WORLD', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'THE', 'CROSS'] +3528-168669-0078-955: ref=['MARTIN', 'THE', 'ELEVENTH', 'GENERAL', 'OF', 'THE', 'CARTHUSIANS', 'GAVE', 'TO', 'HIS', 'ORDER', 'THIS', 'DEVICE', 'STAT', 'CRUX', 'DUM', 'VOLVITUR', 'ORBIS'] +3528-168669-0078-955: hyp=['MARTIN', 'THE', 'ELEVENTH', 'GENERAL', 'OF', 'THE', 'CARTHUSIANS', 'GAVE', 'TO', 'HIS', 'ORDER', 'THIS', 'DEVICE', 'STAT', 'CREW', 'DOOM', 'VOLVETER', 'ORBUS'] +3528-168669-0079-956: ref=['THE', 'PRIORESS', 'WHO', 'WAS', 'USUALLY', 'SUBJECTED', 'TO', 'THE', 'BARRIER', 'OF', 'SILENCE', 'AND', 'WHOSE', 'RESERVOIR', 'WAS', 'OVERFULL', 'ROSE', 'AND', 'EXCLAIMED', 'WITH', 'THE', 'LOQUACITY', 'OF', 'A', 'DAM', 'WHICH', 'HAS', 'BROKEN', 'AWAY'] +3528-168669-0079-956: hyp=['THE', 'PYRIS', 'WHO', 'WAS', 'USUALLY', 'SUBJECTED', 'TO', 'THE', 'BARRIER', 'OF', 'SILENCE', 'AND', 'WHOSE', 'RESERVOIR', 'WAS', 'OVER', 'FULL', 'ROSE', 'AND', 'EXCLAIMED', 'WITH', 'THE', 'LEQUEST', 'OF', 'A', 'DAM', 'WHICH', 'HAS', 'BROKEN', 'AWAY'] +3528-168669-0080-957: ref=['I', 'HAVE', 'ON', 'MY', 'RIGHT', 'BENOIT', 'AND', 'ON', 'MY', 'LEFT', 'BERNARD', 'WHO', 'WAS', 'BERNARD'] +3528-168669-0080-957: hyp=['I', 'HAVE', 'ON', 'MY', 'RIGHT', 'BENOIS', 'AND', 'ON', 'MY', 'LEFT', 'BERNARD', 'WHO', 'WAS', 'BERNARD'] +3528-168669-0081-958: ref=['THE', 'FIRST', 'ABBOT', 'OF', 'CLAIRVAUX'] +3528-168669-0081-958: hyp=['THE', 'FIRST', 'ABBOT', 'OF', 'CLERVAL'] +3528-168669-0082-959: ref=['HIS', 'ORDER', 'HAS', 'PRODUCED', 'FORTY', 'POPES', 'TWO', 'HUNDRED', 'CARDINALS', 'FIFTY', 'PATRIARCHS', 'SIXTEEN', 'HUNDRED', 'ARCHBISHOPS', 'FOUR', 'THOUSAND', 'SIX', 'HUNDRED', 'BISHOPS', 'FOUR', 'EMPERORS', 'TWELVE', 'EMPRESSES', 'FORTY', 'SIX', 'KINGS', 'FORTY', 'ONE', 'QUEENS', 'THREE', 'THOUSAND', 'SIX', 'HUNDRED', 'CANONIZED', 'SAINTS', 'AND', 'HAS', 'BEEN', 'IN', 'EXISTENCE', 'FOR', 'FOURTEEN', 'HUNDRED', 'YEARS'] +3528-168669-0082-959: hyp=['HIS', 'ORDER', 'HAS', 'PRODUCED', 'FORTY', 'POPES', 'TWO', 'HUNDRED', 'CARDINALS', 'FIFTY', 'PATRIARCHS', 'SIXTEEN', 'HUNDRED', 'ARCHBISHOPS', 'FOUR', 'THOUSAND', 'SIX', 'HUNDRED', 'BISHOPS', 'FOUR', 'EMPERORS', 'TWELVE', 'EMPRESSES', 'FORTY', 'SIX', 'KINGS', 'FORTY', 'ONE', 'QUEENS', 'THREE', 'THOUSAND', 'SIX', 'HUNDRED', 'CANONIZED', 'SAINTS', 'AND', 'HAS', 'BEEN', 'IN', 'EXISTENCE', 'FOR', 'FOURTEEN', 'HUNDRED', 'YEARS'] +3528-168669-0083-960: ref=['ON', 'ONE', 'SIDE', 'SAINT', 'BERNARD', 'ON', 'THE', 'OTHER', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'DEPARTMENT'] +3528-168669-0083-960: hyp=['ON', 'ONE', 'SIDE', 'SAINT', 'BERNARD', 'ON', 'THE', 'OTHER', 'THE', 'AGENT', 'OF', 'THE', 'SENATORY', 'DEPARTMENT'] +3528-168669-0084-961: ref=['GOD', 'SUBORDINATED', 'TO', 'THE', 'COMMISSARY', 'OF', 'POLICE', 'SUCH', 'IS', 'THE', 'AGE', 'SILENCE', 'FAUVENT'] +3528-168669-0084-961: hyp=['GOD', 'SUBORDINATED', 'TO', 'THE', 'COMMISSORY', 'OF', 'POLICE', 'SUCH', 'WAS', 'THE', 'AGE', 'SILENCE', 'FAVAN'] +3528-168669-0085-962: ref=['NO', 'ONE', 'DOUBTS', 'THE', 'RIGHT', 'OF', 'THE', 'MONASTERY', 'TO', 'SEPULTURE'] +3528-168669-0085-962: hyp=['NO', 'ONE', 'DOUBTS', 'THE', 'RIGHT', 'OF', 'THE', 'MONASTERY', 'CHOOSE', 'SEPULTURE'] +3528-168669-0086-963: ref=['ONLY', 'FANATICS', 'AND', 'THOSE', 'IN', 'ERROR', 'DENY', 'IT'] +3528-168669-0086-963: hyp=['ONLY', 'FANATICS', 'AND', 'THOSE', 'IN', 'ERROR', 'DENY', 'IT'] +3528-168669-0087-964: ref=['WE', 'LIVE', 'IN', 'TIMES', 'OF', 'TERRIBLE', 'CONFUSION'] +3528-168669-0087-964: hyp=['WE', 'LIVE', 'IN', 'TIMES', 'OF', 'TERRIBLE', 'CONFUSION'] +3528-168669-0088-965: ref=['WE', 'ARE', 'IGNORANT', 'AND', 'IMPIOUS'] +3528-168669-0088-965: hyp=['WE', 'ARE', 'IGNORANT', 'AND', 'IMPIOUS'] +3528-168669-0089-966: ref=['AND', 'THEN', 'RELIGION', 'IS', 'ATTACKED', 'WHY'] +3528-168669-0089-966: hyp=['AND', 'THEN', 'RELIGION', 'IS', 'ATTACKED', 'WHY'] +3528-168669-0090-967: ref=['BECAUSE', 'THERE', 'HAVE', 'BEEN', 'BAD', 'PRIESTS', 'BECAUSE', 'SAGITTAIRE', 'BISHOP', 'OF', 'GAP', 'WAS', 'THE', 'BROTHER', 'OF', 'SALONE', 'BISHOP', 'OF', 'EMBRUN', 'AND', 'BECAUSE', 'BOTH', 'OF', 'THEM', 'FOLLOWED', 'MOMMOL'] +3528-168669-0090-967: hyp=['BECAUSE', 'THERE', 'HAVE', 'BEEN', 'BAD', 'PRIESTS', 'BECAUSE', 'SAGATURE', 'BISHOP', 'OF', 'GAP', 'WAS', 'THE', 'BROTHER', 'OF', 'SALON', 'BISHOP', 'OF', 'EMBRON', 'AND', 'BECAUSE', 'BOTH', 'OF', 'THEM', 'FOLLOWED', 'MAMMA'] +3528-168669-0091-968: ref=['THEY', 'PERSECUTE', 'THE', 'SAINTS'] +3528-168669-0091-968: hyp=['THEY', 'PERSECUTE', 'THE', 'SAINTS'] +3528-168669-0092-969: ref=['THEY', 'SHUT', 'THEIR', 'EYES', 'TO', 'THE', 'TRUTH', 'DARKNESS', 'IS', 'THE', 'RULE'] +3528-168669-0092-969: hyp=['THEY', 'SHUT', 'THEIR', 'EYES', 'TO', 'THE', 'TRUTH', 'DARKNESS', 'IS', 'THE', 'RULE'] +3528-168669-0093-970: ref=['THE', 'MOST', 'FEROCIOUS', 'BEASTS', 'ARE', 'BEASTS', 'WHICH', 'ARE', 'BLIND'] +3528-168669-0093-970: hyp=['THE', 'MOST', 'FEROCIOUS', 'BEASTS', 'ARE', 'BEASTS', 'WHICH', 'ARE', 'BLIND'] +3528-168669-0094-971: ref=['OH', 'HOW', 'WICKED', 'PEOPLE', 'ARE'] +3528-168669-0094-971: hyp=['OH', 'HOW', 'WICKED', 'PEOPLE', 'ARE'] +3528-168669-0095-972: ref=['BY', 'ORDER', 'OF', 'THE', 'KING', 'SIGNIFIES', 'TO', 'DAY', 'BY', 'ORDER', 'OF', 'THE', 'REVOLUTION'] +3528-168669-0095-972: hyp=['BY', 'ORDER', 'OF', 'THE', 'KING', 'SIGNIFIES', 'TO', 'DAY', 'BY', 'ORDER', 'OF', 'THE', 'REVOLUTION'] +3528-168669-0096-973: ref=['ONE', 'NO', 'LONGER', 'KNOWS', 'WHAT', 'IS', 'DUE', 'TO', 'THE', 'LIVING', 'OR', 'TO', 'THE', 'DEAD', 'A', 'HOLY', 'DEATH', 'IS', 'PROHIBITED'] +3528-168669-0096-973: hyp=['ONE', 'NO', 'LONGER', 'KNOWS', 'WHAT', 'IS', 'DUE', 'TO', 'THE', 'LIVING', 'OR', 'TO', 'THE', 'DEAD', 'A', 'HOLY', 'DEATH', 'IS', 'PROHIBITED'] +3528-168669-0097-974: ref=['GAUTHIER', 'BISHOP', 'OF', 'CHALONS', 'HELD', 'HIS', 'OWN', 'IN', 'THIS', 'MATTER', 'AGAINST', 'OTHO', 'DUKE', 'OF', 'BURGUNDY'] +3528-168669-0097-974: hyp=['GATHIER', 'BISHOP', 'OF', 'CALON', 'HELD', 'HIS', 'OWN', 'IN', 'THIS', 'MATTER', 'AGAINST', 'OTHO', 'DUKE', 'OF', 'BURGUNDY'] +3528-168669-0098-975: ref=['THE', 'PRIORESS', 'TOOK', 'BREATH', 'THEN', 'TURNED', 'TO', 'FAUCHELEVENT'] +3528-168669-0098-975: hyp=['THE', 'PRIORS', 'TOOK', 'BREATH', 'THEN', 'TURNED', 'TO', 'FAUCHELEVENT'] +3528-168669-0099-976: ref=['YOU', 'WILL', 'CLOSE', 'THE', 'COFFIN', 'THE', 'SISTERS', 'WILL', 'CARRY', 'IT', 'TO', 'THE', 'CHAPEL'] +3528-168669-0099-976: hyp=['YOU', 'WILL', 'CLOSE', 'THE', 'COFFIN', 'THE', 'SISTERS', 'WILL', 'CARRY', 'IT', 'TO', 'THE', 'CHAPEL'] +3528-168669-0100-977: ref=['THE', 'OFFICE', 'FOR', 'THE', 'DEAD', 'WILL', 'THEN', 'BE', 'SAID'] +3528-168669-0100-977: hyp=['THE', 'OFFICE', 'FOR', 'THE', 'DEAD', 'WILL', 'THEN', 'BE', 'SAID'] +3528-168669-0101-978: ref=['BUT', 'SHE', 'WILL', 'HEAR', 'SHE', 'WILL', 'NOT', 'LISTEN'] +3528-168669-0101-978: hyp=['BUT', 'SHE', 'WILL', 'HEAR', 'SHE', 'WILL', 'NOT', 'LISTEN'] +3528-168669-0102-979: ref=['BESIDES', 'WHAT', 'THE', 'CLOISTER', 'KNOWS', 'THE', 'WORLD', 'LEARNS', 'NOT'] +3528-168669-0102-979: hyp=['BESIDES', 'WHAT', 'THE', 'CLOISTER', 'KNOWS', 'THE', 'WORLD', 'LEARNS', 'NOT'] +3528-168669-0103-980: ref=['A', 'PAUSE', 'ENSUED'] +3528-168669-0103-980: hyp=['A', 'PAUSE', 'ENSUIT'] +3528-168669-0104-981: ref=['YOU', 'WILL', 'REMOVE', 'YOUR', 'BELL'] +3528-168669-0104-981: hyp=['YOU', 'WILL', 'REMOVE', 'YOUR', 'BELT'] +3528-168669-0105-982: ref=['HAS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'PAID', 'HIS', 'VISIT'] +3528-168669-0105-982: hyp=['HAS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'PAID', 'HIS', 'VISIT'] +3528-168669-0106-983: ref=['HE', 'WILL', 'PAY', 'IT', 'AT', 'FOUR', "O'CLOCK", 'TO', 'DAY'] +3528-168669-0106-983: hyp=['HE', 'WILL', 'PAY', 'IT', 'AT', 'FOUR', "O'CLOCK", 'TO', 'DAY'] +3528-168669-0107-984: ref=['THE', 'PEAL', 'WHICH', 'ORDERS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'TO', 'BE', 'SUMMONED', 'HAS', 'ALREADY', 'BEEN', 'RUNG'] +3528-168669-0107-984: hyp=['THE', 'PEAL', 'WHICH', 'ORDERS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEBT', 'TO', 'BE', 'SUMMONED', 'HAS', 'ALREADY', 'BEEN', 'RUNG'] +3528-168669-0108-985: ref=['BUT', 'YOU', 'DO', 'NOT', 'UNDERSTAND', 'ANY', 'OF', 'THE', 'PEALS'] +3528-168669-0108-985: hyp=['BUT', 'YOU', 'DO', 'NOT', 'UNDERSTAND', 'ANY', 'OF', 'THE', 'PEALS'] +3528-168669-0109-986: ref=['THAT', 'IS', 'WELL', 'FATHER', 'FAUVENT'] +3528-168669-0109-986: hyp=['THAT', 'IS', 'WELL', 'FATHER', 'VENT'] +3528-168669-0110-987: ref=['WHERE', 'WILL', 'YOU', 'OBTAIN', 'IT'] +3528-168669-0110-987: hyp=['WHERE', 'WILL', 'YOU', 'OBTAIN', 'IT'] +3528-168669-0111-988: ref=['I', 'HAVE', 'MY', 'HEAP', 'OF', 'OLD', 'IRON', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN'] +3528-168669-0111-988: hyp=['I', 'HAVE', 'MY', 'HEAP', 'OF', 'OLD', 'IRON', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN'] +3528-168669-0112-989: ref=['REVEREND', 'MOTHER', 'WHAT'] +3528-168669-0112-989: hyp=['REVEREND', 'MOTHER', 'WHAT'] +3528-168669-0113-990: ref=['IF', 'YOU', 'WERE', 'EVER', 'TO', 'HAVE', 'ANY', 'OTHER', 'JOBS', 'OF', 'THIS', 'SORT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'MAN', 'FOR', 'YOU', 'A', 'PERFECT', 'TURK'] +3528-168669-0113-990: hyp=['IF', 'YOU', 'WERE', 'EVER', 'TO', 'HAVE', 'ANY', 'OTHER', 'JOBS', 'OF', 'THIS', 'SORT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'MAN', 'FOR', 'YOU', 'A', 'PERFECT', 'TURK'] +3528-168669-0114-991: ref=['YOU', 'WILL', 'DO', 'IT', 'AS', 'SPEEDILY', 'AS', 'POSSIBLE'] +3528-168669-0114-991: hyp=['YOU', 'WILL', 'DO', 'IT', 'AS', 'SPEEDILY', 'AS', 'POSSIBLE'] +3528-168669-0115-992: ref=['I', 'CANNOT', 'WORK', 'VERY', 'FAST', 'I', 'AM', 'INFIRM', 'THAT', 'IS', 'WHY', 'I', 'REQUIRE', 'AN', 'ASSISTANT', 'I', 'LIMP'] +3528-168669-0115-992: hyp=['I', 'CANNOT', 'WORK', 'VERY', 'FAST', 'I', 'AM', 'INFIRM', 'THAT', 'IS', 'WHY', 'I', 'REQUIRE', 'AN', 'ASSISTANT', 'I', 'LIMP'] +3528-168669-0116-993: ref=['EVERYTHING', 'MUST', 'HAVE', 'BEEN', 'COMPLETED', 'A', 'GOOD', 'QUARTER', 'OF', 'AN', 'HOUR', 'BEFORE', 'THAT'] +3528-168669-0116-993: hyp=['EVERYTHING', 'MUST', 'HAVE', 'BEEN', 'COMPLETED', 'A', 'GOOD', 'QUARTER', 'OF', 'AN', 'HOUR', 'BEFORE', 'THAT'] +3528-168669-0117-994: ref=['I', 'WILL', 'DO', 'ANYTHING', 'TO', 'PROVE', 'MY', 'ZEAL', 'TOWARDS', 'THE', 'COMMUNITY', 'THESE', 'ARE', 'MY', 'ORDERS', 'I', 'AM', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN'] +3528-168669-0117-994: hyp=['I', 'WILL', 'DO', 'ANYTHING', 'TO', 'PROVE', 'MY', 'ZEAL', 'TOWARDS', 'THE', 'COMMUNITY', 'THESE', 'ARE', 'MY', 'ORDERS', 'I', 'AM', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN'] +3528-168669-0118-995: ref=['AT', 'ELEVEN', "O'CLOCK", 'EXACTLY', 'I', 'AM', 'TO', 'BE', 'IN', 'THE', 'CHAPEL'] +3528-168669-0118-995: hyp=['AT', 'ELEVEN', "O'CLOCK", 'EXACTLY', 'I', 'AM', 'TO', 'BE', 'IN', 'THE', 'CHAPEL'] +3528-168669-0119-996: ref=['MOTHER', 'ASCENSION', 'WILL', 'BE', 'THERE', 'TWO', 'MEN', 'WOULD', 'BE', 'BETTER'] +3528-168669-0119-996: hyp=['MOTHER', 'ISSUE', 'WILL', 'BE', 'THERE', 'TWO', 'MEN', 'WOULD', 'BE', 'BETTER'] +3528-168669-0120-997: ref=['HOWEVER', 'NEVER', 'MIND', 'I', 'SHALL', 'HAVE', 'MY', 'LEVER'] +3528-168669-0120-997: hyp=['HOWEVER', 'NEVER', 'MIND', 'I', 'SHALL', 'HAVE', 'MY', 'LOVER'] +3528-168669-0121-998: ref=['AFTER', 'WHICH', 'THERE', 'WILL', 'BE', 'NO', 'TRACE', 'OF', 'ANYTHING'] +3528-168669-0121-998: hyp=['AFTER', 'WHICH', 'THERE', 'WILL', 'BE', 'NO', 'TRACE', 'OF', 'ANYTHING'] +3528-168669-0122-999: ref=['THE', 'GOVERNMENT', 'WILL', 'HAVE', 'NO', 'SUSPICION'] +3528-168669-0122-999: hyp=['THE', 'GOVERNMENT', 'WILL', 'HAVE', 'NO', 'SUSPICION'] +3528-168669-0123-1000: ref=['THE', 'EMPTY', 'COFFIN', 'REMAINS', 'THIS', 'PRODUCED', 'A', 'PAUSE'] +3528-168669-0123-1000: hyp=['THE', 'EMPTY', 'COFFIN', 'REMAINS', 'THIS', 'PRODUCED', 'A', 'PAUSE'] +3528-168669-0124-1001: ref=['WHAT', 'IS', 'TO', 'BE', 'DONE', 'WITH', 'THAT', 'COFFIN', 'FATHER', 'FAUVENT'] +3528-168669-0124-1001: hyp=['WHAT', 'IS', 'TO', 'BE', 'DONE', 'WITH', 'THAT', 'COFFIN', 'FATHER', 'VENT'] +3528-168669-0125-1002: ref=['IT', 'WILL', 'BE', 'GIVEN', 'TO', 'THE', 'EARTH', 'EMPTY'] +3528-168669-0125-1002: hyp=['IT', 'WILL', 'BE', 'GIVEN', 'TO', 'THE', 'EARTH', 'EMPTY'] +3528-168669-0126-1003: ref=['AH', 'THE', 'DE', 'EXCLAIMED', 'FAUCHELEVENT'] +3528-168669-0126-1003: hyp=['AH', 'LEDA', 'EXCLAIMED', 'FAUCHELEVENT'] +3528-168669-0127-1004: ref=['THE', 'VIL', 'STUCK', 'FAST', 'IN', 'HIS', 'THROAT'] +3528-168669-0127-1004: hyp=['THE', 'VILLE', 'STUCK', 'FAST', 'IN', 'HIS', 'THROAT'] +3528-168669-0128-1005: ref=['HE', 'MADE', 'HASTE', 'TO', 'IMPROVISE', 'AN', 'EXPEDIENT', 'TO', 'MAKE', 'HER', 'FORGET', 'THE', 'OATH'] +3528-168669-0128-1005: hyp=['HE', 'MADE', 'HASTE', 'TO', 'IMPROVISE', 'AN', 'EXPEDIENT', 'TO', 'MAKE', 'HER', 'FORGET', 'THE', 'OATH'] +3528-168669-0129-1006: ref=['I', 'WILL', 'PUT', 'EARTH', 'IN', 'THE', 'COFFIN', 'REVEREND', 'MOTHER', 'THAT', 'WILL', 'PRODUCE', 'THE', 'EFFECT', 'OF', 'A', 'CORPSE'] +3528-168669-0129-1006: hyp=['I', 'WILL', 'PUT', 'EARTH', 'IN', 'THE', 'COFFIN', 'REVERED', 'MOTHER', 'THAT', 'WILL', 'PRODUCE', 'THE', 'EFFECT', 'OF', 'A', 'CORPSE'] +3528-168669-0130-1007: ref=['I', 'WILL', 'MAKE', 'THAT', 'MY', 'SPECIAL', 'BUSINESS'] +3528-168669-0130-1007: hyp=['I', 'WILL', 'MAKE', 'THAT', 'MY', 'SPECIAL', 'BUSINESS'] +3538-142836-0000-1567: ref=['GENERAL', 'OBSERVATIONS', 'ON', 'PRESERVES', 'CONFECTIONARY', 'ICES', 'AND', 'DESSERT', 'DISHES'] +3538-142836-0000-1567: hyp=['GENERAL', 'OBSERVATIONS', 'ON', 'PRESERVES', 'CONFECTIONERY', 'ICES', 'AND', 'DESSERT', 'DISHES'] +3538-142836-0001-1568: ref=['THE', 'EXPENSE', 'OF', 'PRESERVING', 'THEM', 'WITH', 'SUGAR', 'IS', 'A', 'SERIOUS', 'OBJECTION', 'FOR', 'EXCEPT', 'THE', 'SUGAR', 'IS', 'USED', 'IN', 'CONSIDERABLE', 'QUANTITIES', 'THE', 'SUCCESS', 'IS', 'VERY', 'UNCERTAIN'] +3538-142836-0001-1568: hyp=['THE', 'EXPENSE', 'OF', 'PRESERVING', 'THEM', 'WITH', 'SUGAR', 'IS', 'A', 'SERIOUS', 'OBJECTION', 'FOR', 'EXCEPT', 'THE', 'SUGAR', 'IS', 'USED', 'IN', 'CONSIDERABLE', 'QUALITIES', 'THE', 'SUCCESS', 'IS', 'VERY', 'UNCERTAIN'] +3538-142836-0002-1569: ref=['FRUIT', 'GATHERED', 'IN', 'WET', 'OR', 'FOGGY', 'WEATHER', 'WILL', 'SOON', 'BE', 'MILDEWED', 'AND', 'BE', 'OF', 'NO', 'SERVICE', 'FOR', 'PRESERVES'] +3538-142836-0002-1569: hyp=['FRUIT', 'GATHERED', 'IN', 'WET', 'OR', 'FOGGY', 'WEATHER', 'WILL', 'SOON', 'BE', 'MELTED', 'AND', 'BE', 'OF', 'NO', 'SERVICE', 'FOR', 'PRESERVES'] +3538-142836-0003-1570: ref=['BUT', 'TO', 'DISTINGUISH', 'THESE', 'PROPERLY', 'REQUIRES', 'VERY', 'GREAT', 'ATTENTION', 'AND', 'CONSIDERABLE', 'EXPERIENCE'] +3538-142836-0003-1570: hyp=['BUT', 'TO', 'DISTINGUISH', 'THESE', 'PROPERLY', 'REQUIRES', 'VERY', 'GREAT', 'ATTENTION', 'AND', 'CONSIDERABLE', 'EXPERIENCE'] +3538-142836-0004-1571: ref=['IF', 'YOU', 'DIP', 'THE', 'FINGER', 'INTO', 'THE', 'SYRUP', 'AND', 'APPLY', 'IT', 'TO', 'THE', 'THUMB', 'THE', 'TENACITY', 'OF', 'THE', 'SYRUP', 'WILL', 'ON', 'SEPARATING', 'THE', 'FINGER', 'AND', 'THUMB', 'AFFORD', 'A', 'THREAD', 'WHICH', 'SHORTLY', 'BREAKS', 'THIS', 'IS', 'THE', 'LITTLE', 'THREAD'] +3538-142836-0004-1571: hyp=['IF', 'YOU', 'DIP', 'THE', 'FINGER', 'INTO', 'THE', 'SERF', 'AND', 'APPLY', 'IT', 'TO', 'THE', 'THUMB', 'THE', 'TENACITY', 'OF', 'THE', 'SERF', 'WILL', 'ON', 'SEPARATING', 'THE', 'FINGER', 'AND', 'THUMB', 'AFFORD', 'A', 'THREAD', 'WHICH', 'SHORTLY', 'BREAKS', 'THIS', 'IS', 'THE', 'LITTLE', 'THREAD'] +3538-142836-0005-1572: ref=['LET', 'IT', 'BOIL', 'UP', 'AGAIN', 'THEN', 'TAKE', 'IT', 'OFF', 'AND', 'REMOVE', 'CAREFULLY', 'THE', 'SCUM', 'THAT', 'HAS', 'RISEN'] +3538-142836-0005-1572: hyp=['LET', 'IT', 'BOIL', 'UP', 'AGAIN', 'THEN', 'TAKE', 'IT', 'OFF', 'AND', 'REMOVE', 'CAREFULLY', 'THE', 'SCUM', 'THAT', 'HAS', 'RISEN'] +3538-142836-0006-1573: ref=['IT', 'IS', 'CONSIDERED', 'TO', 'BE', 'SUFFICIENTLY', 'BOILED', 'WHEN', 'SOME', 'TAKEN', 'UP', 'IN', 'A', 'SPOON', 'POURS', 'OUT', 'LIKE', 'OIL'] +3538-142836-0006-1573: hyp=['IT', 'IS', 'CONSIDERED', 'TO', 'BE', 'SUFFICIENTLY', 'BOILED', 'WHEN', 'SOME', 'TAKEN', 'UP', 'IN', 'A', 'SPOON', 'POURS', 'OUT', 'LIKE', 'OIL'] +3538-142836-0007-1574: ref=['BEFORE', 'SUGAR', 'WAS', 'IN', 'USE', 'HONEY', 'WAS', 'EMPLOYED', 'TO', 'PRESERVE', 'MANY', 'VEGETABLE', 'PRODUCTIONS', 'THOUGH', 'THIS', 'SUBSTANCE', 'HAS', 'NOW', 'GIVEN', 'WAY', 'TO', 'THE', 'JUICE', 'OF', 'THE', 'SUGAR', 'CANE'] +3538-142836-0007-1574: hyp=['BEFORE', 'SUGAR', 'WAS', 'IN', 'USE', 'HONEY', 'WAS', 'EMPLOYED', 'TO', 'PRESENT', 'MANY', 'VEGETABLE', 'PRODUCTIONS', 'THOUGH', 'THIS', 'SUBSTANCE', 'IS', 'NOW', 'GIVEN', 'WAY', 'TO', 'THE', 'JUICE', 'OF', 'THE', 'SUGAR', 'CANE'] +3538-142836-0008-1575: ref=['FOURTEEN', 'NINETY', 'NINE'] +3538-142836-0008-1575: hyp=['FOURTEEN', 'NINETY', 'NINE'] +3538-142836-0009-1576: ref=['BOIL', 'THEM', 'UP', 'THREE', 'DAYS', 'SUCCESSIVELY', 'SKIMMING', 'EACH', 'TIME', 'AND', 'THEY', 'WILL', 'THEN', 'BE', 'FINISHED', 'AND', 'IN', 'A', 'STATE', 'FIT', 'TO', 'BE', 'PUT', 'INTO', 'POTS', 'FOR', 'USE'] +3538-142836-0009-1576: hyp=['BOIL', 'THEM', 'UP', 'THREE', 'DAYS', 'SUCCESSIVELY', 'SKIMMING', 'EACH', 'TIME', 'AND', 'THEY', 'WILL', 'THEN', 'BE', 'FINISHED', 'AND', 'IN', 'A', 'STATE', 'FIT', 'TO', 'BE', 'PUT', 'INTO', 'POTS', 'FOR', 'USE'] +3538-142836-0010-1577: ref=['THE', 'REASON', 'WHY', 'THE', 'FRUIT', 'IS', 'EMPTIED', 'OUT', 'OF', 'THE', 'PRESERVING', 'PAN', 'INTO', 'AN', 'EARTHEN', 'PAN', 'IS', 'THAT', 'THE', 'ACID', 'OF', 'THE', 'FRUIT', 'ACTS', 'UPON', 'THE', 'COPPER', 'OF', 'WHICH', 'THE', 'PRESERVING', 'PANS', 'ARE', 'USUALLY', 'MADE'] +3538-142836-0010-1577: hyp=['THE', 'REASON', 'WHY', 'THE', 'FRUIT', 'IS', 'EMPTIED', 'OUT', 'OF', 'THE', 'PRESERVING', 'PAN', 'INTO', 'OUR', 'EARTHEN', 'PAN', 'IS', 'THAT', 'THE', 'ACID', 'OF', 'THE', 'FRUIT', 'ACTS', 'UPON', 'THE', 'COPPER', 'OF', 'WHICH', 'THE', 'PRESERVING', 'PANS', 'ARE', 'USUALLY', 'MADE'] +3538-142836-0011-1578: ref=['FROM', 'THIS', 'EXAMPLE', 'THE', 'PROCESS', 'OF', 'PRESERVING', 'FRUITS', 'BY', 'SYRUP', 'WILL', 'BE', 'EASILY', 'COMPREHENDED'] +3538-142836-0011-1578: hyp=['FROM', 'THIS', 'EXAMPLE', 'THE', 'PROCESS', 'OF', 'PRESERVING', 'FRUITS', 'BY', 'SYRUP', 'WOULD', 'BE', 'EASILY', 'COMPREHENDED'] +3538-142836-0012-1579: ref=['THEY', 'SHOULD', 'BE', 'DRIED', 'IN', 'THE', 'STOVE', 'OR', 'OVEN', 'ON', 'A', 'SIEVE', 'AND', 'TURNED', 'EVERY', 'SIX', 'OR', 'EIGHT', 'HOURS', 'FRESH', 'POWDERED', 'SUGAR', 'BEING', 'SIFTED', 'OVER', 'THEM', 'EVERY', 'TIME', 'THEY', 'ARE', 'TURNED'] +3538-142836-0012-1579: hyp=['THEY', 'SHOULD', 'BE', 'DRIED', 'IN', 'THE', 'STOVE', 'OR', 'OVEN', 'ON', 'A', 'SEA', 'AND', 'TURNED', 'EVERY', 'SIX', 'OR', 'EIGHT', 'HOURS', 'FRESH', 'POWDERED', 'SUGAR', 'BEING', 'SIFTED', 'OVER', 'THEM', 'EVERY', 'TIME', 'THEY', 'RETURNED'] +3538-142836-0013-1580: ref=['IN', 'THIS', 'WAY', 'IT', 'IS', 'ALSO', 'THAT', 'ORANGE', 'AND', 'LEMON', 'CHIPS', 'ARE', 'PRESERVED'] +3538-142836-0013-1580: hyp=['IN', 'THIS', 'WAY', 'IT', 'IS', 'ALSO', 'THAT', 'ORANGE', 'AND', 'LINENSHIPS', 'ARE', 'PRESERVED'] +3538-142836-0014-1581: ref=['MARMALADES', 'JAMS', 'AND', 'FRUIT', 'PASTES', 'ARE', 'OF', 'THE', 'SAME', 'NATURE', 'AND', 'ARE', 'NOW', 'IN', 'VERY', 'GENERAL', 'REQUEST'] +3538-142836-0014-1581: hyp=['MARMALADES', 'JAMS', 'AND', 'FRUIT', 'PACE', 'ARE', 'OF', 'THE', 'SAME', 'NATURE', 'AND', 'ARE', 'NOW', 'IN', 'VERY', 'GENERAL', 'QUEST'] +3538-142836-0015-1582: ref=['MARMALADES', 'AND', 'JAMS', 'DIFFER', 'LITTLE', 'FROM', 'EACH', 'OTHER', 'THEY', 'ARE', 'PRESERVES', 'OF', 'A', 'HALF', 'LIQUID', 'CONSISTENCY', 'MADE', 'BY', 'BOILING', 'THE', 'PULP', 'OF', 'FRUITS', 'AND', 'SOMETIMES', 'PART', 'OF', 'THE', 'RINDS', 'WITH', 'SUGAR'] +3538-142836-0015-1582: hyp=['MARMAL', 'ETS', 'AND', 'JAMS', 'DIFFER', 'LITTLE', 'FROM', 'EACH', 'OTHER', 'THEIR', 'PRESERVES', 'OF', 'HALF', 'LIQUID', 'CONSISTENCY', 'MADE', 'BY', 'BOILING', 'THE', 'PULP', 'OF', 'FRUITS', 'AND', 'SOMETIMES', 'PART', 'OF', 'THE', 'RHINS', 'WITH', 'SUGAR'] +3538-142836-0016-1583: ref=['THAT', 'THEY', 'MAY', 'KEEP', 'IT', 'IS', 'NECESSARY', 'NOT', 'TO', 'BE', 'SPARING', 'OF', 'SUGAR', 'FIFTEEN', 'O', 'THREE'] +3538-142836-0016-1583: hyp=['THAT', 'THEY', 'MAY', 'KEEP', 'IT', 'IS', 'NECESSARY', 'NOT', 'TO', 'BE', 'SPARING', 'OF', 'SUGAR', 'FIFTEEN', 'O', 'THREE'] +3538-142836-0017-1584: ref=['IN', 'ALL', 'THE', 'OPERATIONS', 'FOR', 'PRESERVE', 'MAKING', 'WHEN', 'THE', 'PRESERVING', 'PAN', 'IS', 'USED', 'IT', 'SHOULD', 'NOT', 'BE', 'PLACED', 'ON', 'THE', 'FIRE', 'BUT', 'ON', 'A', 'TRIVET', 'UNLESS', 'THE', 'JAM', 'IS', 'MADE', 'ON', 'A', 'HOT', 'PLATE', 'WHEN', 'THIS', 'IS', 'NOT', 'NECESSARY'] +3538-142836-0017-1584: hyp=['IN', 'ALL', 'THE', 'OPERATIONS', 'FOR', 'PRESERVE', 'MAKING', 'WHEN', 'THE', 'PRESERVING', 'PAN', 'IS', 'USED', 'IT', 'SHOULD', 'NOT', 'BE', 'PLACED', 'ON', 'THE', 'FIRE', 'BUT', 'ON', 'A', 'TRIBUT', 'UNLESS', 'THE', 'JAM', 'IS', 'MADE', 'ON', 'A', 'HOT', 'PLATE', 'WHEN', 'THIS', 'IS', 'NOT', 'NECESSARY'] +3538-142836-0018-1585: ref=['CONFECTIONARY', 'FIFTEEN', 'O', 'EIGHT'] +3538-142836-0018-1585: hyp=['CONFECTIONERY', 'FIFTEEN', 'O', 'EIGHT'] +3538-142836-0019-1586: ref=['IN', 'SPEAKING', 'OF', 'CONFECTIONARY', 'IT', 'SHOULD', 'BE', 'REMARKED', 'THAT', 'ALL', 'THE', 'VARIOUS', 'PREPARATIONS', 'ABOVE', 'NAMED', 'COME', 'STRICTLY', 'SPEAKING', 'UNDER', 'THAT', 'HEAD', 'FOR', 'THE', 'VARIOUS', 'FRUITS', 'FLOWERS', 'HERBS', 'ROOTS', 'AND', 'JUICES', 'WHICH', 'WHEN', 'BOILED', 'WITH', 'SUGAR', 'WERE', 'FORMERLY', 'EMPLOYED', 'IN', 'PHARMACY', 'AS', 'WELL', 'AS', 'FOR', 'SWEETMEATS', 'WERE', 'CALLED', 'CONFECTIONS', 'FROM', 'THE', 'LATIN', 'WORD', 'CONFICERE', 'TO', 'MAKE', 'UP', 'BUT', 'THE', 'TERM', 'CONFECTIONARY', 'EMBRACES', 'A', 'VERY', 'LARGE', 'CLASS', 'INDEED', 'OF', 'SWEET', 'FOOD', 'MANY', 'KINDS', 'OF', 'WHICH', 'SHOULD', 'NOT', 'BE', 'ATTEMPTED', 'IN', 'THE', 'ORDINARY', 'CUISINE'] +3538-142836-0019-1586: hyp=['IN', 'SPEAKING', 'OF', 'CONFECTIONERIES', 'SHOULD', 'BE', 'REMARKED', 'THAT', 'ALL', 'THE', 'VARIOUS', 'PREPARATIONS', 'ABOVE', 'NAMED', 'COME', 'STRICTLY', 'SPEAKING', 'UNDER', 'THAT', 'HEAD', 'FOR', 'THE', 'VERY', 'SPRUITS', 'FLOWERS', 'HERBS', 'RUTHS', 'AND', 'JUICES', 'WHICH', 'ONE', 'BOIL', 'WITH', 'SUGAR', 'WERE', 'FORMERLY', 'EMPLOYED', 'IN', 'PHARMACY', 'AS', 'WELL', 'AS', 'FOR', 'SWEETMEATS', 'WERE', 'CALLED', 'CONFECTIONS', 'FROM', 'THE', 'LATIN', 'WORD', 'CONFERS', 'TO', 'MAKE', 'UP', 'BUT', 'THE', 'TERM', 'CONFECTIONARY', 'EMBRACES', 'A', 'VERY', 'LARGE', 'CLASS', 'INDEED', 'OF', 'SWEET', 'FOOD', 'MANY', 'KINDS', 'OF', 'WHICH', 'SHOULD', 'NOT', 'BE', 'ATTEMPTED', 'IN', 'THE', 'ORDINARY', 'COUISINE'] +3538-142836-0020-1587: ref=['THE', 'THOUSAND', 'AND', 'ONE', 'ORNAMENTAL', 'DISHES', 'THAT', 'ADORN', 'THE', 'TABLES', 'OF', 'THE', 'WEALTHY', 'SHOULD', 'BE', 'PURCHASED', 'FROM', 'THE', 'CONFECTIONER', 'THEY', 'CANNOT', 'PROFITABLY', 'BE', 'MADE', 'AT', 'HOME'] +3538-142836-0020-1587: hyp=['THE', 'THOUSAND', 'AND', 'ONE', 'ORNAMENTAL', 'DISHES', 'THAT', 'ADORN', 'THE', 'TABLES', 'OF', 'THE', 'WEALTHY', 'SHOULD', 'BE', 'PURCHASED', 'FROM', 'THE', 'CONFECTIONER', 'THEY', 'CANNOT', 'PROFITABLY', 'BE', 'MADE', 'AT', 'HOME'] +3538-142836-0021-1588: ref=['HOWEVER', 'AS', 'LATE', 'AS', 'THE', 'REIGNS', 'OF', 'OUR', 'TWO', 'LAST', 'GEORGES', 'FABULOUS', 'SUMS', 'WERE', 'OFTEN', 'EXPENDED', 'UPON', 'FANCIFUL', 'DESSERTS'] +3538-142836-0021-1588: hyp=['HOWEVER', 'AS', 'LATE', 'AS', 'THE', 'REIGN', 'OF', 'OUR', 'TWO', 'LAST', 'GEORGES', 'FABULOUS', 'SUMS', 'WERE', 'OFTEN', 'EXPENDED', 'UPON', 'FANCIFUL', 'DESERTS'] +3538-142836-0022-1589: ref=['THE', 'SHAPE', 'OF', 'THE', 'DISHES', 'VARIES', 'AT', 'DIFFERENT', 'PERIODS', 'THE', 'PREVAILING', 'FASHION', 'AT', 'PRESENT', 'BEING', 'OVAL', 'AND', 'CIRCULAR', 'DISHES', 'ON', 'STEMS'] +3538-142836-0022-1589: hyp=['THE', 'SHAPE', 'OF', 'THE', 'DISHES', 'VARIES', 'AT', 'DIFFERENT', 'PERIODS', 'THE', 'PREVAILING', 'FASHION', 'AT', 'PRESENT', 'BEING', 'OVAL', 'AND', 'CIRCULAR', 'DISHES', 'ON', 'STEMS'] +3538-142836-0023-1590: ref=['ICES'] +3538-142836-0023-1590: hyp=['ISIS'] +3538-142836-0024-1591: ref=['AT', 'DESSERTS', 'OR', 'AT', 'SOME', 'EVENING', 'PARTIES', 'ICES', 'ARE', 'SCARCELY', 'TO', 'BE', 'DISPENSED', 'WITH'] +3538-142836-0024-1591: hyp=['A', 'DESSERTS', 'OR', 'AT', 'SOME', 'EVENING', 'PARTIES', 'IISES', 'ARE', 'SCARCELY', 'TO', 'BE', 'DISPENSED', 'WITH'] +3538-142836-0025-1592: ref=['THE', 'SPADDLE', 'IS', 'GENERALLY', 'MADE', 'OF', 'COPPER', 'KEPT', 'BRIGHT', 'AND', 'CLEAN'] +3538-142836-0025-1592: hyp=['THE', 'SPADEL', 'IS', 'GENERALLY', 'MADE', 'OF', 'COPPER', 'KEPT', 'BRIGHT', 'AND', 'CLEAN'] +3538-142836-0026-1593: ref=['THEY', 'SHOULD', 'BE', 'TAKEN', 'IMMEDIATELY', 'AFTER', 'THE', 'REPAST', 'OR', 'SOME', 'HOURS', 'AFTER', 'BECAUSE', 'THE', 'TAKING', 'THESE', 'SUBSTANCES', 'DURING', 'THE', 'PROCESS', 'OF', 'DIGESTION', 'IS', 'APT', 'TO', 'PROVOKE', 'INDISPOSITION'] +3538-142836-0026-1593: hyp=['THEY', 'SHOULD', 'BE', 'TAKEN', 'IMMEDIATELY', 'AFTER', 'THE', 'REPAST', 'OR', 'SOME', 'HOURS', 'AFTER', 'BECAUSE', 'THE', 'TAKING', 'OF', 'THESE', 'SUBSTANCES', 'DURING', 'THE', 'PROCESS', 'OF', 'DIGESTION', 'IS', 'APT', 'TO', 'PROVOKE', 'INDISPOSITION'] +3538-163619-0000-1500: ref=['THERE', 'WAS', 'ONCE', 'ON', 'A', 'TIME', 'A', 'WIDOWER', 'WHO', 'HAD', 'A', 'SON', 'AND', 'A', 'DAUGHTER', 'BY', 'HIS', 'FIRST', 'WIFE'] +3538-163619-0000-1500: hyp=['THERE', 'WAS', 'ONCE', 'TILL', 'THE', 'TIME', 'A', 'WIDOWER', 'WHO', 'HAD', 'A', 'SON', 'AND', 'A', 'DAUGHTER', 'BY', 'HIS', 'FIRST', 'WI'] +3538-163619-0001-1501: ref=['FROM', 'THE', 'VERY', 'DAY', 'THAT', 'THE', 'NEW', 'WIFE', 'CAME', 'INTO', 'THE', 'HOUSE', 'THERE', 'WAS', 'NO', 'PEACE', 'FOR', 'THE', "MAN'S", 'CHILDREN', 'AND', 'NOT', 'A', 'CORNER', 'TO', 'BE', 'FOUND', 'WHERE', 'THEY', 'COULD', 'GET', 'ANY', 'REST', 'SO', 'THE', 'BOY', 'THOUGHT', 'THAT', 'THE', 'BEST', 'THING', 'HE', 'COULD', 'DO', 'WAS', 'TO', 'GO', 'OUT', 'INTO', 'THE', 'WORLD', 'AND', 'TRY', 'TO', 'EARN', 'HIS', 'OWN', 'BREAD'] +3538-163619-0001-1501: hyp=['FROM', 'THE', 'VERY', 'DAY', 'THAT', 'THE', 'NEW', 'WIFE', 'CAME', 'INTO', 'THE', 'HOUSE', 'THERE', 'WAS', 'NO', 'PEACE', 'FOR', 'THE', "MAN'S", 'CHILDREN', 'AND', 'NOT', 'A', 'CORNER', 'TO', 'BE', 'FOUND', 'WHERE', 'THEY', 'COULD', 'GET', 'ANY', 'REST', 'SO', 'THE', 'BOY', 'THOUGHT', 'THAT', 'THE', 'BEST', 'THING', 'HE', 'COULD', 'DO', 'WAS', 'TO', 'GO', 'OUT', 'INTO', 'THE', 'WORLD', 'AND', 'TRY', 'TO', 'EARN', 'HIS', 'OWN', 'BREAD'] +3538-163619-0002-1502: ref=['BUT', 'HIS', 'SISTER', 'WHO', 'WAS', 'STILL', 'AT', 'HOME', 'FARED', 'WORSE', 'AND', 'WORSE'] +3538-163619-0002-1502: hyp=['BUT', 'HIS', 'SISTER', 'WHO', 'WAS', 'STILL', 'AT', 'HOME', 'FARED', 'WORSE', 'AND', 'WORSE'] +3538-163619-0003-1503: ref=['KISS', 'ME', 'GIRL', 'SAID', 'THE', 'HEAD'] +3538-163619-0003-1503: hyp=['KISS', 'ME', 'GO', 'SAID', 'THE', 'HEAD'] +3538-163619-0004-1504: ref=['WHEN', 'THE', 'KING', 'ENTERED', 'AND', 'SAW', 'IT', 'HE', 'STOOD', 'STILL', 'AS', 'IF', 'HE', 'WERE', 'IN', 'FETTERS', 'AND', 'COULD', 'NOT', 'STIR', 'FROM', 'THE', 'SPOT', 'FOR', 'THE', 'PICTURE', 'SEEMED', 'TO', 'HIM', 'SO', 'BEAUTIFUL'] +3538-163619-0004-1504: hyp=['WHEN', 'THE', 'KING', 'ENTERED', 'AND', 'SAW', 'IT', 'HE', 'STOOD', 'STILL', 'AS', 'IF', 'HE', 'WERE', 'IN', 'FETTERS', 'AND', 'COULD', 'NOT', 'STIR', 'FROM', 'THE', 'SPOT', 'FOR', 'THE', 'PICTURE', 'SEEMED', 'TO', 'HIM', 'SO', 'BEAUTIFUL'] +3538-163619-0005-1505: ref=['THE', 'YOUTH', 'PROMISED', 'TO', 'MAKE', 'ALL', 'THE', 'HASTE', 'HE', 'COULD', 'AND', 'SET', 'FORTH', 'FROM', 'THE', "KING'S", 'PALACE'] +3538-163619-0005-1505: hyp=['THESE', 'PROMISED', 'TO', 'MAKE', 'ALL', 'THE', 'HASTE', 'HE', 'COULD', 'AND', 'SET', 'FORTH', 'FROM', 'THE', "KING'S", 'PALACE'] +3538-163619-0006-1506: ref=['AT', 'LAST', 'THEY', 'CAME', 'IN', 'SIGHT', 'OF', 'LAND'] +3538-163619-0006-1506: hyp=['AT', 'LAST', 'THEY', 'CAME', 'IN', 'SIGHT', 'OF', 'LAND'] +3538-163619-0007-1507: ref=['WELL', 'IF', 'MY', 'BROTHER', 'SAYS', 'SO', 'I', 'MUST', 'DO', 'IT', 'SAID', 'THE', "MAN'S", 'DAUGHTER', 'AND', 'SHE', 'FLUNG', 'HER', 'CASKET', 'INTO', 'THE', 'SEA'] +3538-163619-0007-1507: hyp=['WELL', 'IF', 'MY', 'BROTHER', 'SAYS', 'SO', 'I', 'MUST', 'DO', 'IT', 'SAID', 'THE', "MAN'S", 'DAUGHTER', 'AND', 'SHE', 'FLUNG', 'HER', 'CASKET', 'INTO', 'THE', 'SEA'] +3538-163619-0008-1508: ref=['WHAT', 'IS', 'MY', 'BROTHER', 'SAYING', 'ASKED', 'HIS', 'SISTER', 'AGAIN'] +3538-163619-0008-1508: hyp=['WHAT', 'IS', 'MY', 'BROTHER', 'SAYING', 'ASKED', 'HIS', 'SISTER', 'AGAIN'] +3538-163619-0009-1509: ref=['ON', 'THE', 'FIRST', 'THURSDAY', 'NIGHT', 'AFTER', 'THIS', 'A', 'BEAUTIFUL', 'MAIDEN', 'CAME', 'INTO', 'THE', 'KITCHEN', 'OF', 'THE', 'PALACE', 'AND', 'BEGGED', 'THE', 'KITCHEN', 'MAID', 'WHO', 'SLEPT', 'THERE', 'TO', 'LEND', 'HER', 'A', 'BRUSH'] +3538-163619-0009-1509: hyp=['ON', 'THE', 'FIRST', 'THURSDAY', 'NIGHT', 'AFTER', 'THIS', 'A', 'BEAUTIFUL', 'MAIDEN', 'CAME', 'INTO', 'THE', 'KITCHEN', 'OF', 'THE', 'PALACE', 'AND', 'BEGGED', 'THE', 'KITCHEN', 'MAID', 'WHO', 'SLEPT', 'THERE', 'TO', 'LEND', 'HER', 'A', 'BRUSH'] +3538-163619-0010-1510: ref=['SHE', 'BEGGED', 'VERY', 'PRETTILY', 'AND', 'GOT', 'IT', 'AND', 'THEN', 'SHE', 'BRUSHED', 'HER', 'HAIR', 'AND', 'THE', 'GOLD', 'DROPPED', 'FROM', 'IT'] +3538-163619-0010-1510: hyp=['SHE', 'BEGGED', 'VERY', 'PRETTILY', 'AND', 'GOT', 'IT', 'AND', 'THEN', 'SHE', 'BRUSHED', 'HER', 'HAIR', 'AND', 'THE', 'GOLD', 'DROPPED', 'FROM', 'IT'] +3538-163619-0011-1511: ref=['OUT', 'ON', 'THEE', 'UGLY', 'BUSHY', 'BRIDE', 'SLEEPING', 'SO', 'SOFT', 'BY', 'THE', 'YOUNG', "KING'S", 'SIDE', 'ON', 'SAND', 'AND', 'STONES', 'MY', 'BED', 'I', 'MAKE', 'AND', 'MY', 'BROTHER', 'SLEEPS', 'WITH', 'THE', 'COLD', 'SNAKE', 'UNPITIED', 'AND', 'UNWEPT'] +3538-163619-0011-1511: hyp=['OUT', 'ON', 'THEE', 'UGLY', 'BUSHY', 'BRIDE', 'SLEEPING', 'SO', 'SOFT', 'BY', 'THE', 'YOUNG', "KING'S", 'SIDE', 'ON', 'SAND', 'AND', 'STONES', 'MY', 'BED', 'I', 'MAKE', 'AND', 'MY', 'BROTHERS', 'SLEEPS', 'WITH', 'THE', 'COLD', 'SNAKE', 'UNPITIED', 'AND', 'UNWEPT'] +3538-163619-0012-1512: ref=['I', 'SHALL', 'COME', 'TWICE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN', 'SAID', 'SHE'] +3538-163619-0012-1512: hyp=['I', 'SHALL', 'COME', 'TWICE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN', 'SAID', 'SHE'] +3538-163619-0013-1513: ref=['THIS', 'TIME', 'ALSO', 'AS', 'BEFORE', 'SHE', 'BORROWED', 'A', 'BRUSH', 'AND', 'BRUSHED', 'HER', 'HAIR', 'WITH', 'IT', 'AND', 'THE', 'GOLD', 'DROPPED', 'DOWN', 'AS', 'SHE', 'DID', 'IT', 'AND', 'AGAIN', 'SHE', 'SENT', 'THE', 'DOG', 'OUT', 'THREE', 'TIMES', 'AND', 'WHEN', 'DAY', 'DAWNED', 'SHE', 'DEPARTED', 'BUT', 'AS', 'SHE', 'WAS', 'GOING', 'SHE', 'SAID', 'AS', 'SHE', 'HAD', 'SAID', 'BEFORE', 'I', 'SHALL', 'COME', 'ONCE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN'] +3538-163619-0013-1513: hyp=['THIS', 'TIME', 'ALSO', 'AS', 'BEFORE', 'SHE', 'BORROWED', 'A', 'BRUSH', 'AND', 'BRUSHED', 'HER', 'HAIR', 'WITH', 'IT', 'AND', 'THE', 'GOLD', 'DROPPED', 'DOWN', 'AS', 'SHE', 'DID', 'IT', 'AND', 'AGAIN', 'SHE', 'SENT', 'THE', 'DOG', 'OUT', 'THREE', 'TIMES', 'AND', 'WHEN', 'THEY', 'DAWNED', 'SHE', 'DEPARTED', 'BUT', 'AS', 'SHE', 'WAS', 'GOING', 'SHE', 'SAID', 'AS', 'SHE', 'HAD', 'SAID', 'BEFORE', 'I', 'SHALL', 'COME', 'ONCE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN'] +3538-163619-0014-1514: ref=['NO', 'ONE', 'CAN', 'TELL', 'HOW', 'DELIGHTED', 'THE', 'KING', 'WAS', 'TO', 'GET', 'RID', 'OF', 'THAT', 'HIDEOUS', 'BUSHY', 'BRIDE', 'AND', 'GET', 'A', 'QUEEN', 'WHO', 'WAS', 'BRIGHT', 'AND', 'BEAUTIFUL', 'AS', 'DAY', 'ITSELF'] +3538-163619-0014-1514: hyp=['NO', 'ONE', 'CAN', 'TELL', 'HOW', 'DELIGHTED', 'THE', 'KING', 'WAS', 'TO', 'GET', 'RID', 'OF', 'THAT', 'HIDEOUS', 'BUSHY', 'BRIDE', 'AND', 'GET', 'A', 'QUEEN', 'WHO', 'WAS', 'BRIGHT', 'AND', 'BEAUTIFUL', 'AS', 'DAY', 'ITSEL'] +3538-163622-0000-1515: ref=['WILT', 'THOU', 'SERVE', 'ME', 'AND', 'WATCH', 'MY', 'SEVEN', 'FOALS', 'ASKED', 'THE', 'KING'] +3538-163622-0000-1515: hyp=['WILT', 'THOU', 'SERVE', 'ME', 'AND', 'WATCH', 'MY', 'SEVEN', 'FOLDS', 'ASKED', 'THE', 'KING'] +3538-163622-0001-1516: ref=['THE', 'YOUTH', 'THOUGHT', 'THAT', 'IT', 'WAS', 'VERY', 'EASY', 'WORK', 'TO', 'WATCH', 'THE', 'FOALS', 'AND', 'THAT', 'HE', 'COULD', 'DO', 'IT', 'WELL', 'ENOUGH'] +3538-163622-0001-1516: hyp=['THE', 'YOUTH', 'THOUGHT', 'THAT', 'IT', 'WAS', 'VERY', 'EASY', 'WORK', 'TO', 'WATCH', 'THE', 'FOALS', 'AND', 'HE', 'COULD', 'DO', 'IT', 'WELL', 'ENOUGH'] +3538-163622-0002-1517: ref=['HAST', 'THOU', 'WATCHED', 'FAITHFULLY', 'AND', 'WELL', 'THE', 'WHOLE', 'DAY', 'LONG', 'SAID', 'THE', 'KING', 'WHEN', 'THE', 'LAD', 'CAME', 'INTO', 'HIS', 'PRESENCE', 'IN', 'THE', 'EVENING'] +3538-163622-0002-1517: hyp=['HAST', 'THOU', 'ART', 'FAITHFULLY', 'AND', 'WELL', 'BE', 'WHOLE', 'DAY', 'LONG', 'SAID', 'THE', 'KING', 'WHEN', 'THE', 'LAD', 'CAME', 'INTO', 'HIS', 'PRESENCE', 'IN', 'THE', 'EVENING'] +3538-163622-0003-1518: ref=['YES', 'THAT', 'I', 'HAVE', 'SAID', 'THE', 'YOUTH'] +3538-163622-0003-1518: hyp=['YES', 'THAT', 'I', 'HAVE', 'SAID', 'THE', 'YOUTH'] +3538-163622-0004-1519: ref=['HE', 'HAD', 'GONE', 'OUT', 'ONCE', 'TO', 'SEEK', 'A', 'PLACE', 'HE', 'SAID', 'BUT', 'NEVER', 'WOULD', 'HE', 'DO', 'SUCH', 'A', 'THING', 'AGAIN'] +3538-163622-0004-1519: hyp=['HE', 'HAD', 'GONE', 'OUT', 'ONCE', 'TO', 'SEEK', 'A', 'PLACE', 'HE', 'SAID', 'BUT', 'NEVER', 'WOULD', 'HE', 'DO', 'SUCH', 'A', 'THING', 'AGAIN'] +3538-163622-0005-1520: ref=['THEN', 'THE', 'KING', 'PROMISED', 'HIM', 'THE', 'SAME', 'PUNISHMENT', 'AND', 'THE', 'SAME', 'REWARD', 'THAT', 'HE', 'HAD', 'PROMISED', 'HIS', 'BROTHER'] +3538-163622-0005-1520: hyp=['THE', 'MACKING', 'PROMISED', 'HIM', 'THE', 'SAME', 'PUNISHMENT', 'AND', 'THE', 'SAME', 'REWARD', 'THAT', 'HE', 'HAD', 'PROMISED', 'HIS', 'BROTHER'] +3538-163622-0006-1521: ref=['WHEN', 'HE', 'HAD', 'RUN', 'AFTER', 'THE', 'FOALS', 'FOR', 'A', 'LONG', 'LONG', 'TIME', 'AND', 'WAS', 'HOT', 'AND', 'TIRED', 'HE', 'PASSED', 'BY', 'A', 'CLEFT', 'IN', 'THE', 'ROCK', 'WHERE', 'AN', 'OLD', 'WOMAN', 'WAS', 'SITTING', 'SPINNING', 'WITH', 'A', 'DISTAFF', 'AND', 'SHE', 'CALLED', 'TO', 'HIM'] +3538-163622-0006-1521: hyp=['WHEN', 'HE', 'HAD', 'RUN', 'AFTER', 'THE', 'FOOLS', 'FOR', 'A', 'LONG', 'LONG', 'TIME', 'AND', 'WAS', 'HOT', 'AND', 'TIRED', 'HE', 'PASSED', 'BY', 'CLIFF', 'IN', 'THE', 'ROCK', 'WHERE', 'AN', 'OLD', 'WOMAN', 'WAS', 'SITTING', 'SPINNING', 'WITH', 'A', 'DISTAFF', 'AND', 'SHE', 'CALLED', 'TO', 'HIM'] +3538-163622-0007-1522: ref=['COME', 'HITHER', 'COME', 'HITHER', 'MY', 'HANDSOME', 'SON', 'AND', 'LET', 'ME', 'COMB', 'YOUR', 'HAIR'] +3538-163622-0007-1522: hyp=['COMMANDER', 'COME', 'HITHER', 'MY', 'HANDSOME', 'SON', 'AND', 'LET', 'ME', 'COMB', 'YOUR', 'HAIR'] +3538-163622-0008-1523: ref=['THE', 'YOUTH', 'LIKED', 'THE', 'THOUGHT', 'OF', 'THIS', 'LET', 'THE', 'FOALS', 'RUN', 'WHERE', 'THEY', 'CHOSE', 'AND', 'SEATED', 'HIMSELF', 'IN', 'THE', 'CLEFT', 'OF', 'THE', 'ROCK', 'BY', 'THE', 'SIDE', 'OF', 'THE', 'OLD', 'HAG'] +3538-163622-0008-1523: hyp=['THE', 'YOUTH', 'LIKED', 'THE', 'THOUGHT', 'OF', 'THIS', 'LET', 'THE', 'FOLDS', 'WARM', 'WHERE', 'THEY', 'CHOSE', 'AND', 'SEATED', 'HIMSELF', 'IN', 'THE', 'CLEFT', 'OF', 'THE', 'ROCK', 'BY', 'THE', 'SIDE', 'OF', 'THE', 'OLD', 'HAG'] +3538-163622-0009-1524: ref=['SO', 'THERE', 'HE', 'SAT', 'WITH', 'HIS', 'HEAD', 'ON', 'HER', 'LAP', 'TAKING', 'HIS', 'EASE', 'THE', 'LIVELONG', 'DAY'] +3538-163622-0009-1524: hyp=['SO', 'THERE', 'HE', 'SAT', 'WITH', 'HIS', 'HEAD', 'ON', 'HER', 'LAP', 'TAKING', 'HIS', 'EASE', 'THE', 'LIVELONG', 'DAY'] +3538-163622-0010-1525: ref=['ON', 'THE', 'THIRD', 'DAY', 'CINDERLAD', 'WANTED', 'TO', 'SET', 'OUT'] +3538-163622-0010-1525: hyp=['ON', 'THE', 'THIRD', 'DAY', 'SAID', 'THE', 'LAD', 'WANTED', 'TO', 'SET', 'OUT'] +3538-163622-0011-1526: ref=['THE', 'TWO', 'BROTHERS', 'LAUGHED', 'AT', 'HIM', 'AND', 'HIS', 'FATHER', 'AND', 'MOTHER', 'BEGGED', 'HIM', 'NOT', 'TO', 'GO', 'BUT', 'ALL', 'TO', 'NO', 'PURPOSE', 'AND', 'CINDERLAD', 'SET', 'OUT', 'ON', 'HIS', 'WAY'] +3538-163622-0011-1526: hyp=['THE', 'TWO', 'BROTHERS', 'LAUGHED', 'AT', 'HIM', 'AND', 'HIS', 'FATHER', 'AND', 'MOTHER', 'BEGGED', 'HIM', 'NOT', 'TO', 'GO', 'BUT', 'ALL', 'TO', 'NO', 'PURPOSE', 'WHEN', 'CINDERLAD', 'SET', 'OUT', 'ON', 'HIS', 'WAY'] +3538-163622-0012-1527: ref=['I', 'AM', 'WALKING', 'ABOUT', 'IN', 'SEARCH', 'OF', 'A', 'PLACE', 'SAID', 'CINDERLAD'] +3538-163622-0012-1527: hyp=['I', 'AM', 'WALKING', 'ABOUT', 'IN', 'SEARCH', 'OF', 'A', 'PLACE', 'SAID', 'SAINTO', 'LAD'] +3538-163622-0013-1528: ref=['I', 'WOULD', 'MUCH', 'RATHER', 'HAVE', 'THE', 'PRINCESS', 'SAID', 'CINDERLAD'] +3538-163622-0013-1528: hyp=['I', 'WOULD', 'MUCH', 'RATHER', 'HAVE', 'THE', 'PRINCESS', 'SAID', 'CINDER', 'LAD'] +3538-163622-0014-1529: ref=['AND', 'THUS', 'THEY', 'JOURNEYED', 'ONWARDS', 'A', 'LONG', 'LONG', 'WAY'] +3538-163622-0014-1529: hyp=['AND', 'THUS', 'THEY', 'JOURNEYED', 'ONWARDS', 'A', 'LONG', 'LONG', 'WAY'] +3538-163622-0015-1530: ref=['WHEN', 'THEY', 'HAD', 'GONE', 'THUS', 'FOR', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FOAL', 'AGAIN', 'ASKED', 'DOST', 'THOU', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0015-1530: hyp=['WHEN', 'THEY', 'HAD', 'GONE', 'THUS', 'FOR', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FULL', 'AGAIN', 'ASKED', 'DOST', 'THOU', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0016-1531: ref=['YES', 'NOW', 'I', 'SEE', 'SOMETHING', 'THAT', 'IS', 'WHITE', 'SAID', 'CINDERLAD'] +3538-163622-0016-1531: hyp=['YES', 'NOW', 'I', 'SEE', 'SOMETHING', 'THAT', 'IS', 'WHITE', 'SAID', 'CINDER', 'LAD'] +3538-163622-0017-1532: ref=['IT', 'LOOKS', 'LIKE', 'THE', 'TRUNK', 'OF', 'A', 'GREAT', 'THICK', 'BIRCH', 'TREE'] +3538-163622-0017-1532: hyp=['IT', 'LOOKS', 'LIKE', 'THE', 'TRUNK', 'OF', 'A', 'GREAT', 'THICK', 'BIRCH', 'TREE'] +3538-163622-0018-1533: ref=['CINDERLAD', 'TRIED', 'BUT', 'COULD', 'NOT', 'DO', 'IT', 'SO', 'HE', 'HAD', 'TO', 'TAKE', 'A', 'DRAUGHT', 'FROM', 'THE', 'PITCHER', 'AND', 'THEN', 'ONE', 'MORE', 'AND', 'AFTER', 'THAT', 'STILL', 'ANOTHER', 'AND', 'THEN', 'HE', 'WAS', 'ABLE', 'TO', 'WIELD', 'THE', 'SWORD', 'WITH', 'PERFECT', 'EASE'] +3538-163622-0018-1533: hyp=['SOONER', 'LAD', 'TRIED', 'BUT', 'COULD', 'NOT', 'DO', 'IT', 'SO', 'HE', 'HAD', 'TO', 'TAKE', 'A', 'DROP', 'FROM', 'THE', 'PITCHER', 'AND', 'THEN', 'ONE', 'MORE', 'AND', 'AFTER', 'THAT', 'STILL', 'ANOTHER', 'AND', 'THEN', 'HE', 'WAS', 'ABLE', 'TO', 'WHEEL', 'THE', 'SWORD', 'WITH', 'PERFECT', 'EASE'] +3538-163622-0019-1534: ref=['FOR', 'WE', 'ARE', 'BROTHERS', 'OF', 'THE', 'PRINCESS', 'WHOM', 'THOU', 'ART', 'TO', 'HAVE', 'WHEN', 'THOU', 'CANST', 'TELL', 'THE', 'KING', 'WHAT', 'WE', 'EAT', 'AND', 'DRINK', 'BUT', 'THERE', 'IS', 'A', 'MIGHTY', 'TROLL', 'WHO', 'HAS', 'CAST', 'A', 'SPELL', 'OVER', 'US'] +3538-163622-0019-1534: hyp=['FOR', 'WE', 'ARE', 'BROTHERS', 'OF', 'THE', 'PRINCESS', 'WHOM', 'THOU', 'ART', 'TO', 'HAVE', 'WHEN', 'THOU', 'CANST', 'TELL', 'THE', 'KING', 'WHAT', 'WE', 'EAT', 'AND', 'DRINK', 'BUT', 'THERE', 'IS', 'A', 'MIGHTY', 'TROLL', 'WHO', 'IS', 'CAST', 'A', 'SPELL', 'OVER', 'US'] +3538-163622-0020-1535: ref=['WHEN', 'THEY', 'HAD', 'TRAVELLED', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FOAL', 'SAID', 'DOST', 'THOU', 'SEE', 'ANYTHING'] +3538-163622-0020-1535: hyp=['WHEN', 'THEY', 'HAD', 'TRAVELLED', 'ALONG', 'A', 'LONG', 'WAY', 'THE', 'FOAL', 'SAID', 'DOST', 'THOU', 'SEE', 'ANYTHING'] +3538-163622-0021-1536: ref=['AND', 'NOW', 'INQUIRED', 'THE', 'FOAL', 'SEEST', 'THOU', 'NOTHING', 'NOW'] +3538-163622-0021-1536: hyp=['AND', 'NOW', 'INQUIRED', 'THE', 'FULL', 'CEASE', 'THOU', 'NOTHING', 'NOW'] +3538-163622-0022-1537: ref=['NOW', 'THEN', 'SAID', 'THE', 'FOAL', 'DOST', 'THOU', 'NOT', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0022-1537: hyp=['NOW', 'THEN', 'SAID', 'THE', 'FOOL', 'DOST', 'THOU', 'NOT', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0023-1538: ref=['THAT', 'IS', 'A', 'RIVER', 'SAID', 'THE', 'FOAL', 'AND', 'WE', 'HAVE', 'TO', 'CROSS', 'IT'] +3538-163622-0023-1538: hyp=['THAT', 'IS', 'A', 'RIVER', 'SAID', 'THE', 'FOAL', 'AND', 'WE', 'HAVE', 'TO', 'CROSS', 'IT'] +3538-163622-0024-1539: ref=['I', 'HAVE', 'DONE', 'MY', 'BEST', 'REPLIED', 'CINDERLAD'] +3538-163622-0024-1539: hyp=['I', 'HAVE', 'DONE', 'MY', 'BEST', 'REPLIED', 'SIR', 'LAD'] +3538-163624-0000-1540: ref=['ONCE', 'UPON', 'A', 'TIME', 'THERE', 'WAS', 'A', 'KING', 'IN', 'THE', 'NORTH', 'WHO', 'HAD', 'WON', 'MANY', 'WARS', 'BUT', 'NOW', 'HE', 'WAS', 'OLD'] +3538-163624-0000-1540: hyp=['ONCE', 'UPON', 'A', 'TIME', 'THERE', 'WAS', 'A', 'KING', 'IN', 'THE', 'NORTH', 'WHO', 'HAD', 'WON', 'MANY', 'WARS', 'BUT', 'NOW', 'HE', 'WAS', 'OLD'] +3538-163624-0001-1541: ref=['THE', 'OLD', 'KING', 'WENT', 'OUT', 'AND', 'FOUGHT', 'BRAVELY', 'BUT', 'AT', 'LAST', 'HIS', 'SWORD', 'BROKE', 'AND', 'HE', 'WAS', 'WOUNDED', 'AND', 'HIS', 'MEN', 'FLED'] +3538-163624-0001-1541: hyp=['THE', 'OLD', 'KING', 'WENT', 'OUT', 'AND', 'THOUGHT', 'BRAVELY', 'BUT', 'AT', 'LAST', 'HIS', 'SWORD', 'BROKE', 'AND', 'HE', 'WAS', 'WOUNDED', 'AND', 'HIS', 'MEN', 'FLED'] +3538-163624-0002-1542: ref=['BUT', 'IN', 'THE', 'NIGHT', 'WHEN', 'THE', 'BATTLE', 'WAS', 'OVER', 'HIS', 'YOUNG', 'WIFE', 'CAME', 'OUT', 'AND', 'SEARCHED', 'FOR', 'HIM', 'AMONG', 'THE', 'SLAIN', 'AND', 'AT', 'LAST', 'SHE', 'FOUND', 'HIM', 'AND', 'ASKED', 'WHETHER', 'HE', 'MIGHT', 'BE', 'HEALED'] +3538-163624-0002-1542: hyp=['BUT', 'IN', 'THE', 'NIGHT', 'WHEN', 'THE', 'BATTLE', 'WAS', 'OVER', 'HIS', 'YOUNG', 'WIFE', 'CAME', 'OUT', 'AND', 'SEARCHED', 'FOR', 'HIM', 'AMONG', 'THE', 'SLAIN', 'AND', 'AT', 'LAST', 'SHE', 'FOUND', 'HIM', 'AND', 'ASKED', 'WHETHER', 'HE', 'MIGHT', 'BE', 'HEALED'] +3538-163624-0003-1543: ref=['SO', 'HE', 'ASKED', 'THE', 'QUEEN', 'HOW', 'DO', 'YOU', 'KNOW', 'IN', 'THE', 'DARK', 'OF', 'NIGHT', 'WHETHER', 'THE', 'HOURS', 'ARE', 'WEARING', 'TO', 'THE', 'MORNING', 'AND', 'SHE', 'SAID'] +3538-163624-0003-1543: hyp=['SO', 'YES', 'THE', 'QUEEN', 'HOW', 'DO', 'YOU', 'KNOW', 'IN', 'THE', 'DARK', 'OF', 'NIGHT', 'WHETHER', 'THE', 'HOURS', 'ARE', 'WEARING', 'TO', 'THE', 'MORNING', 'AND', 'SHE', 'SAID'] +3538-163624-0004-1544: ref=['THEN', 'THE', 'OLD', 'MAN', 'SAID', 'DRIVE', 'ALL', 'THE', 'HORSES', 'INTO', 'THE', 'RIVER', 'AND', 'CHOOSE', 'THE', 'ONE', 'THAT', 'SWIMS', 'ACROSS'] +3538-163624-0004-1544: hyp=['THEN', 'THE', 'OLD', 'MAN', 'SAID', 'DRIVE', 'ALL', 'THE', 'HORSES', 'INTO', 'THE', 'RIVER', 'AND', 'CHOOSE', 'THE', 'ONE', 'THAT', 'SWIMS', 'ACROSS'] +3538-163624-0005-1545: ref=['HE', 'IS', 'NO', 'BIGGER', 'THAN', 'OTHER', 'DRAGONS', 'SAID', 'THE', 'TUTOR', 'AND', 'IF', 'YOU', 'WERE', 'AS', 'BRAVE', 'AS', 'YOUR', 'FATHER', 'YOU', 'WOULD', 'NOT', 'FEAR', 'HIM'] +3538-163624-0005-1545: hyp=['HE', 'HAS', 'NO', 'BIGGER', 'THAN', 'OTHER', 'DRAGONS', 'SAID', 'THE', 'TUTOR', 'AND', 'IF', 'YOU', 'WERE', 'AS', 'BRAVE', 'AS', 'YOUR', 'FATHER', 'YOU', 'WOULD', 'NOT', 'FEAR', 'HIM'] +3538-163624-0006-1546: ref=['THEN', 'THE', 'PERSON', 'WHO', 'HAD', 'KILLED', 'OTTER', 'WENT', 'DOWN', 'AND', 'CAUGHT', 'THE', 'DWARF', 'WHO', 'OWNED', 'ALL', 'THE', 'TREASURE', 'AND', 'TOOK', 'IT', 'FROM', 'HIM'] +3538-163624-0006-1546: hyp=['THEN', 'THE', 'PERSON', 'WHO', 'HAD', 'KILLED', 'OTTER', 'WENT', 'DOWN', 'AND', 'CAUGHT', 'THE', 'DWARF', 'WHO', 'OWNED', 'ALL', 'THE', 'TREASURE', 'AND', 'TOOK', 'IT', 'FROM', 'HIM'] +3538-163624-0007-1547: ref=['ONLY', 'ONE', 'RING', 'WAS', 'LEFT', 'WHICH', 'THE', 'DWARF', 'WORE', 'AND', 'EVEN', 'THAT', 'WAS', 'TAKEN', 'FROM', 'HIM'] +3538-163624-0007-1547: hyp=['ONLY', 'ONE', 'RING', 'WAS', 'LEFT', 'WHICH', 'THE', 'DWARF', 'WORE', 'AND', 'EVEN', 'THAT', 'WAS', 'TAKEN', 'FROM', 'HIM'] +3538-163624-0008-1548: ref=['SO', 'REGIN', 'MADE', 'A', 'SWORD', 'AND', 'SIGURD', 'TRIED', 'IT', 'WITH', 'A', 'BLOW', 'ON', 'A', 'LUMP', 'OF', 'IRON', 'AND', 'THE', 'SWORD', 'BROKE'] +3538-163624-0008-1548: hyp=['SO', 'WE', 'GET', 'A', 'SWORD', 'AND', 'CIGAR', 'TRIED', 'IT', 'WITH', 'A', 'BLOW', 'AND', 'A', 'LUMP', 'OF', 'IRON', 'AND', 'THE', 'SWORD', 'BROKE'] +3538-163624-0009-1549: ref=['THEN', 'SIGURD', 'WENT', 'TO', 'HIS', 'MOTHER', 'AND', 'ASKED', 'FOR', 'THE', 'BROKEN', 'PIECES', 'OF', 'HIS', "FATHER'S", 'BLADE', 'AND', 'GAVE', 'THEM', 'TO', 'REGIN'] +3538-163624-0009-1549: hyp=['THEN', 'CIGAR', 'WENT', 'TO', 'HIS', 'MOTHER', 'AND', 'ASKED', 'FOR', 'THE', 'BROKEN', 'PIECES', 'OF', 'HIS', "FATHER'S", 'BLADE', 'AND', 'GAVE', 'THEM', 'TO', 'REGAN'] +3538-163624-0010-1550: ref=['SO', 'SIGURD', 'SAID', 'THAT', 'SWORD', 'WOULD', 'DO'] +3538-163624-0010-1550: hyp=['SO', 'CIGARS', 'SAID', 'THAT', 'SWORD', 'WOULD', 'DO'] +3538-163624-0011-1551: ref=['THEN', 'HE', 'SAW', 'THE', 'TRACK', 'WHICH', 'THE', 'DRAGON', 'MADE', 'WHEN', 'HE', 'WENT', 'TO', 'A', 'CLIFF', 'TO', 'DRINK', 'AND', 'THE', 'TRACK', 'WAS', 'AS', 'IF', 'A', 'GREAT', 'RIVER', 'HAD', 'ROLLED', 'ALONG', 'AND', 'LEFT', 'A', 'DEEP', 'VALLEY'] +3538-163624-0011-1551: hyp=['THEN', 'HE', 'SAW', 'THE', 'TRACK', 'WHICH', 'THE', 'DRAGON', 'HAD', 'MADE', 'WHEN', 'HE', 'WENT', 'TO', 'A', 'CLIFF', 'TO', 'DRINK', 'AND', 'THE', 'TRACK', 'WAS', 'AS', 'IF', 'A', 'GREAT', 'RIVER', 'HAD', 'ROLLED', 'ALONG', 'AND', 'LEFT', 'A', 'DEEP', 'VALLEY'] +3538-163624-0012-1552: ref=['BUT', 'SIGURD', 'WAITED', 'TILL', 'HALF', 'OF', 'HIM', 'HAD', 'CRAWLED', 'OVER', 'THE', 'PIT', 'AND', 'THEN', 'HE', 'THRUST', 'THE', 'SWORD', 'GRAM', 'RIGHT', 'INTO', 'HIS', 'VERY', 'HEART'] +3538-163624-0012-1552: hyp=['BUT', 'CIGARET', 'WAITED', 'TILL', 'HALF', 'OF', 'HIM', 'HAD', 'CRAWLED', 'OVER', 'THE', 'PIT', 'AND', 'THEN', 'HE', 'THRUST', 'THE', 'SWORD', 'GRAHAM', 'RIGHT', 'INTO', 'HIS', 'VERY', 'HEART'] +3538-163624-0013-1553: ref=['SIGURD', 'SAID', 'I', 'WOULD', 'TOUCH', 'NONE', 'OF', 'IT', 'IF', 'BY', 'LOSING', 'IT', 'I', 'SHOULD', 'NEVER', 'DIE'] +3538-163624-0013-1553: hyp=['CIGAR', 'SAID', 'I', 'WOULD', 'TOUCH', 'NONE', 'OF', 'IT', 'IF', 'BY', 'LOSING', 'IT', 'I', 'SHOULD', 'NEVER', 'DIE'] +3538-163624-0014-1554: ref=['BUT', 'ALL', 'MEN', 'DIE', 'AND', 'NO', 'BRAVE', 'MAN', 'LETS', 'DEATH', 'FRIGHTEN', 'HIM', 'FROM', 'HIS', 'DESIRE'] +3538-163624-0014-1554: hyp=['BUT', 'ALL', 'MEN', 'DIE', 'AND', 'KNOW', 'BRAVE', 'MAN', 'LETS', 'DEATH', 'FRIGHTEN', 'HIM', 'FROM', 'HIS', 'DESIRE'] +3538-163624-0015-1555: ref=['DIE', 'THOU', 'FAFNIR', 'AND', 'THEN', 'FAFNIR', 'DIED'] +3538-163624-0015-1555: hyp=['GUY', 'THOU', 'FAFFNER', 'AND', 'THEN', 'STAFF', 'DIED'] +3538-163624-0016-1556: ref=['THEN', 'SIGURD', 'RODE', 'BACK', 'AND', 'MET', 'REGIN', 'AND', 'REGIN', 'ASKED', 'HIM', 'TO', 'ROAST', "FAFNIR'S", 'HEART', 'AND', 'LET', 'HIM', 'TASTE', 'OF', 'IT'] +3538-163624-0016-1556: hyp=['THEN', 'CIGAR', 'RODE', 'BACK', 'AND', 'MET', 'RIGAN', 'AND', 'RIGAN', 'ASKED', 'HIM', 'TO', 'ROAST', "FAFNER'S", 'HEART', 'AND', 'LET', 'HIM', 'TASTE', 'OF', 'IT'] +3538-163624-0017-1557: ref=['SO', 'SIGURD', 'PUT', 'THE', 'HEART', 'OF', 'FAFNIR', 'ON', 'A', 'STAKE', 'AND', 'ROASTED', 'IT'] +3538-163624-0017-1557: hyp=['SO', 'SIR', 'GOD', 'PUT', 'THE', 'HEART', 'OF', 'FAFNER', 'ON', 'A', 'STAKE', 'AND', 'ROASTED', 'IT'] +3538-163624-0018-1558: ref=['THERE', 'IS', 'SIGURD', 'ROASTING', "FAFNIR'S", 'HEART', 'FOR', 'ANOTHER', 'WHEN', 'HE', 'SHOULD', 'TASTE', 'OF', 'IT', 'HIMSELF', 'AND', 'LEARN', 'ALL', 'WISDOM'] +3538-163624-0018-1558: hyp=['THERE', 'IS', 'CIGAR', 'ROASTING', "FASTENER'S", 'HEART', 'FOR', 'ANOTHER', 'WHEN', 'HE', 'SHOULD', 'TASTE', 'OF', 'IT', 'HIMSELF', 'AND', 'LEARN', 'ALL', 'WISDOM'] +3538-163624-0019-1559: ref=['THAT', 'LET', 'HIM', 'DO', 'AND', 'THEN', 'RIDE', 'OVER', 'HINDFELL', 'TO', 'THE', 'PLACE', 'WHERE', 'BRYNHILD', 'SLEEPS'] +3538-163624-0019-1559: hyp=['THAT', 'LET', 'HIM', 'DO', 'THEN', 'RIDE', 'OVER', 'HINFIELD', 'TO', 'THE', 'PLACE', 'WHERE', 'BURNHILD', 'SLEEPS'] +3538-163624-0020-1560: ref=['THERE', 'MUST', 'SHE', 'SLEEP', 'TILL', 'THOU', 'COMEST', 'FOR', 'HER', 'WAKING', 'RISE', 'UP', 'AND', 'RIDE', 'FOR', 'NOW', 'SURE', 'SHE', 'WILL', 'SWEAR', 'THE', 'VOW', 'FEARLESS', 'OF', 'BREAKING'] +3538-163624-0020-1560: hyp=['THERE', 'MUST', 'SHE', 'SLEEP', 'TILL', 'THOU', 'COMES', 'FOR', 'HER', 'WAKING', 'RISE', 'UP', 'AND', 'RIDE', 'FOR', 'NOW', 'SURE', 'SHE', 'WILL', 'SWEAR', 'THE', 'VOW', 'FEARLESS', 'OF', 'BREAKING'] +3538-163624-0021-1561: ref=['THEN', 'HE', 'TOOK', 'THE', 'HELMET', 'OFF', 'THE', 'HEAD', 'OF', 'THE', 'SLEEPER', 'AND', 'BEHOLD', 'SHE', 'WAS', 'A', 'MOST', 'BEAUTIFUL', 'LADY'] +3538-163624-0021-1561: hyp=['THEN', 'HE', 'TOOK', 'THE', 'HELMET', 'OFF', 'THE', 'HEAD', 'OF', 'THE', 'SLEEPER', 'AND', 'BEHOLD', 'SHE', 'WAS', 'A', 'MOST', 'BEAUTIFUL', 'LADY'] +3538-163624-0022-1562: ref=['THEN', 'SIGURD', 'RODE', 'AWAY', 'AND', 'HE', 'CAME', 'TO', 'THE', 'HOUSE', 'OF', 'A', 'KING', 'WHO', 'HAD', 'A', 'FAIR', 'DAUGHTER'] +3538-163624-0022-1562: hyp=['THEN', 'CIGAR', 'RODE', 'AWAY', 'AND', 'HE', 'CAME', 'TO', 'THE', 'HOUSE', 'OF', 'A', 'KING', 'WHO', 'HAD', 'A', 'FAIR', 'DAUGHTER'] +3538-163624-0023-1563: ref=['THEN', "BRYNHILD'S", 'FATHER', 'TOLD', 'GUNNAR', 'THAT', 'SHE', 'WOULD', 'MARRY', 'NONE', 'BUT', 'HIM', 'WHO', 'COULD', 'RIDE', 'THE', 'FLAME', 'IN', 'FRONT', 'OF', 'HER', 'ENCHANTED', 'TOWER', 'AND', 'THITHER', 'THEY', 'RODE', 'AND', 'GUNNAR', 'SET', 'HIS', 'HORSE', 'AT', 'THE', 'FLAME', 'BUT', 'HE', 'WOULD', 'NOT', 'FACE', 'IT'] +3538-163624-0023-1563: hyp=['WHEN', "BRUNHOLD'S", 'FATHER', 'TOLD', 'GUNNER', 'THAT', 'SHE', 'WOULD', 'MARRY', 'NONE', 'BUT', 'HIM', 'WHO', 'COULD', 'RIDE', 'THE', 'FLAME', 'IN', 'FRONT', 'OF', 'HER', 'ENCHANTED', 'TOWER', 'AND', 'THITHER', 'THEY', 'RODE', 'AND', 'GUNNER', 'SET', 'HIS', 'HORSE', 'TO', 'THE', 'FLAME', 'BUT', 'HE', 'WOULD', 'NOT', 'FACE', 'IT'] +3538-163624-0024-1564: ref=['FOR', 'ONE', 'DAY', 'WHEN', 'BRYNHILD', 'AND', 'GUDRUN', 'WERE', 'BATHING', 'BRYNHILD', 'WADED', 'FARTHEST', 'OUT', 'INTO', 'THE', 'RIVER', 'AND', 'SAID', 'SHE', 'DID', 'THAT', 'TO', 'SHOW', 'SHE', 'WAS', "GUIRUN'S", 'SUPERIOR'] +3538-163624-0024-1564: hyp=['FOR', 'ONE', 'DAY', 'WHEN', 'BURNEHELD', 'AND', 'GUNDRAIN', 'WERE', 'BATHING', 'BURNEHELD', 'WAITED', 'FARTHEST', 'SOUTH', 'INTO', 'THE', 'RIVER', 'AND', 'SAID', 'SHE', 'DID', 'THAT', 'TO', 'SHOW', 'SHE', 'WAS', 'GUNDERING', 'SUPERIOR'] +3538-163624-0025-1565: ref=['FOR', 'HER', 'HUSBAND', 'SHE', 'SAID', 'HAD', 'RIDDEN', 'THROUGH', 'THE', 'FLAME', 'WHEN', 'NO', 'OTHER', 'MAN', 'DARED', 'FACE', 'IT'] +3538-163624-0025-1565: hyp=['FOR', 'HER', 'HUSBAND', 'SHE', 'SAID', 'HAD', 'RIDDEN', 'THROUGH', 'THE', 'FLAME', 'WHEN', 'NO', 'OTHER', 'MAN', 'DARED', 'FACE', 'IT'] +3538-163624-0026-1566: ref=['NOT', 'LONG', 'TO', 'WAIT', 'HE', 'SAID', 'TILL', 'THE', 'BITTER', 'SWORD', 'STANDS', 'FAST', 'IN', 'MY', 'HEART', 'AND', 'THOU', 'WILL', 'NOT', 'LIVE', 'LONG', 'WHEN', 'I', 'AM', 'DEAD'] +3538-163624-0026-1566: hyp=['NOT', 'LONG', 'TO', 'WAIT', 'HE', 'SAID', 'TILL', 'THE', 'BITTER', 'SWORD', 'STANDS', 'FAST', 'IN', 'MY', 'HEART', 'AND', 'THOU', 'WILT', 'NOT', 'LIVE', 'LONG', 'WHEN', 'I', 'AM', 'DEAD'] +367-130732-0000-1466: ref=['LOBSTERS', 'AND', 'LOBSTERS'] +367-130732-0000-1466: hyp=['LOBSTERS', 'AND', 'LOBSTERS'] +367-130732-0001-1467: ref=['WHEN', 'IS', 'A', 'LOBSTER', 'NOT', 'A', 'LOBSTER', 'WHEN', 'IT', 'IS', 'A', 'CRAYFISH'] +367-130732-0001-1467: hyp=['WHEN', 'AS', 'A', 'LOBSTER', 'NOT', 'A', 'LOBSTER', 'WHEN', 'IT', 'IS', 'A', 'CRAYFISH'] +367-130732-0002-1468: ref=['THIS', 'QUESTION', 'AND', 'ANSWER', 'MIGHT', 'WELL', 'GO', 'INTO', 'THE', 'PRIMER', 'OF', 'INFORMATION', 'FOR', 'THOSE', 'WHO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'FROM', 'THE', 'EAST', 'FOR', 'WHAT', 'IS', 'CALLED', 'A', 'LOBSTER', 'IN', 'SAN', 'FRANCISCO', 'IS', 'NOT', 'A', 'LOBSTER', 'AT', 'ALL', 'BUT', 'A', 'CRAYFISH'] +367-130732-0002-1468: hyp=['THIS', 'QUESTION', 'AND', 'ANSWER', 'MIGHT', 'WELL', 'GO', 'INTO', 'THE', 'PRIMARY', 'OF', 'INFORMATION', 'FOR', 'THOSE', 'WHO', 'COME', 'THE', 'SAN', 'FRANCISCO', 'FROM', 'THE', 'EAST', 'FOR', 'WHAT', 'IS', 'CALLED', 'A', 'LOBSTER', 'IN', 'SAN', 'FRANCISCO', 'IS', 'NOT', 'A', 'LOBSOR', 'AT', 'ALL', 'BUT', 'A', 'CRAYFISH'] +367-130732-0003-1469: ref=['THE', 'PACIFIC', 'CRAYFISH', 'HOWEVER', 'SERVES', 'EVERY', 'PURPOSE', 'AND', 'WHILE', 'MANY', 'CONTEND', 'THAT', 'ITS', 'MEAT', 'IS', 'NOT', 'SO', 'DELICATE', 'IN', 'FLAVOR', 'AS', 'THAT', 'OF', 'ITS', 'EASTERN', 'COUSIN', 'THE', 'CALIFORNIAN', 'WILL', 'AS', 'STRENUOUSLY', 'INSIST', 'THAT', 'IT', 'IS', 'BETTER', 'BUT', 'OF', 'COURSE', 'SOMETHING', 'MUST', 'ALWAYS', 'BE', 'ALLOWED', 'FOR', 'THE', 'PATRIOTISM', 'OF', 'THE', 'CALIFORNIAN'] +367-130732-0003-1469: hyp=['THE', 'PACIFIC', 'CRAYFISH', 'HOURSERVES', 'EVERY', 'PURPOSE', 'AND', 'WHILE', 'MANY', 'CONTENDED', 'ITS', 'MEAT', 'IS', 'NOT', 'SO', 'DELICATE', 'IN', 'FLAVORIT', 'AS', 'THAT', 'OF', 'ITS', 'EASTERN', 'COUSIN', 'THE', 'CALIFORNIA', 'WILL', 'AS', 'STRENUOUSLY', 'INSIST', 'THAT', 'IT', 'IS', 'BETTER', 'BUT', 'OF', 'COURSE', 'SOMETHING', 'MUST', 'ALWAYS', 'BE', 'ALLOWED', 'FOR', 'THE', 'PATRIOTISM', 'OF', 'THE', 'CALIFORNIA'] +367-130732-0004-1470: ref=['A', 'BOOK', 'COULD', 'BE', 'WRITTEN', 'ABOUT', 'THIS', 'RESTAURANT', 'AND', 'THEN', 'ALL', 'WOULD', 'NOT', 'BE', 'TOLD', 'FOR', 'ALL', 'ITS', 'SECRETS', 'CAN', 'NEVER', 'BE', 'KNOWN'] +367-130732-0004-1470: hyp=['A', 'BOOK', 'COULD', 'BE', 'WRITTEN', 'ABOUT', 'THIS', 'RESTAURANT', 'AND', 'THEN', 'ALL', 'WOULD', 'NOT', 'BE', 'TOLD', 'FOR', 'ALL', 'ITS', 'SECRETS', 'CAN', 'NEVER', 'BE', 'KNOWN'] +367-130732-0005-1471: ref=['IT', 'WAS', 'HERE', 'THAT', 'MOST', 'MAGNIFICENT', 'DINNERS', 'WERE', 'ARRANGED', 'IT', 'WAS', 'HERE', 'THAT', 'EXTRAORDINARY', 'DISHES', 'WERE', 'CONCOCTED', 'BY', 'CHEFS', 'OF', 'WORLD', 'WIDE', 'FAME', 'IT', 'WAS', 'HERE', 'THAT', 'LOBSTER', 'A', 'LA', 'NEWBERG', 'REACHED', 'ITS', 'HIGHEST', 'PERFECTION', 'AND', 'THIS', 'IS', 'THE', 'RECIPE', 'THAT', 'WAS', 'FOLLOWED', 'WHEN', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', 'DELMONICO'] +367-130732-0005-1471: hyp=['IT', 'WAS', 'HERE', 'THAT', 'MOST', 'MAGNIFICENT', 'DINNERS', 'WERE', 'ARRANGED', 'IT', 'WAS', 'HERE', 'THAT', 'EXTRAORDINARY', 'DISHES', 'WERE', 'CONCOCTED', 'BY', 'CHEFS', 'OF', 'WOOLWIFE', 'IT', 'WAS', 'HERE', 'THAT', 'LOBSTER', 'ALAD', 'NEWBURG', 'REACHED', 'ITS', 'HIGHEST', 'PERFECTION', 'AND', 'THIS', 'IS', 'THE', 'RECIPE', 'THAT', 'WAS', 'FOLLOWED', 'WHEN', 'HE', 'WAS', 'PREPARED', 'IN', 'THE', 'DOMONICO'] +367-130732-0006-1472: ref=['LOBSTER', 'A', 'LA', 'NEWBERG'] +367-130732-0006-1472: hyp=['LOBSTER', 'OLY', 'NEWBURG'] +367-130732-0007-1473: ref=['ONE', 'POUND', 'OF', 'LOBSTER', 'MEAT', 'ONE', 'TEASPOONFUL', 'OF', 'BUTTER', 'ONE', 'HALF', 'PINT', 'OF', 'CREAM', 'YOLKS', 'OF', 'FOUR', 'EGGS', 'ONE', 'WINE', 'GLASS', 'OF', 'SHERRY', 'LOBSTER', 'FAT'] +367-130732-0007-1473: hyp=['ONE', 'POUND', 'OF', 'LOBS', 'TO', 'ME', 'ONE', 'TEASPOONFUL', 'OF', 'BUTTER', 'ONE', 'HALF', 'PINT', 'OF', 'CREAM', 'YOLKS', 'OF', 'FOUR', 'EGGS', 'ONE', 'WINE', 'GLASS', 'OF', 'SHERRY', 'LOBSTER', 'FAT'] +367-130732-0008-1474: ref=['PUT', 'THIS', 'IN', 'A', 'DOUBLE', 'BOILER', 'AND', 'LET', 'COOK', 'UNTIL', 'THICK', 'STIRRING', 'CONSTANTLY'] +367-130732-0008-1474: hyp=['PUS', 'IN', 'A', 'DOUBLE', 'BOILER', 'AND', 'LET', 'COOK', 'UNTIL', 'THICK', 'STIRRING', 'CONSTANTLY'] +367-130732-0009-1475: ref=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'THIN', 'SLICES', 'OF', 'DRY', 'TOAST'] +367-130732-0009-1475: hyp=['SERVE', 'IN', 'A', 'CHIEFING', 'DISH', 'WITH', 'FLIND', 'SLICES', 'OF', 'DRY', 'TOAST'] +367-130732-0010-1476: ref=['KING', 'OF', 'SHELL', 'FISH'] +367-130732-0010-1476: hyp=['KING', 'OF', 'SHELLFISH'] +367-130732-0011-1477: ref=['ONE', 'HAS', 'TO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'TO', 'PARTAKE', 'OF', 'THE', 'KING', 'OF', 'SHELL', 'FISH', 'THE', 'MAMMOTH', 'PACIFIC', 'CRAB'] +367-130732-0011-1477: hyp=['ONE', 'HAS', 'TO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'TO', 'PARTAKE', 'OF', 'THE', 'KING', 'OF', 'SHELLFISH', 'THE', 'MAMMOTH', 'PACIFIC', 'CRAB'] +367-130732-0012-1478: ref=['I', 'SAY', 'COME', 'TO', 'SAN', 'FRANCISCO', 'ADVISEDLY', 'FOR', 'WHILE', 'THE', 'CRAB', 'IS', 'FOUND', 'ALL', 'ALONG', 'THE', 'COAST', 'IT', 'IS', 'PREPARED', 'NOWHERE', 'SO', 'DELICIOUSLY', 'AS', 'IN', 'SAN', 'FRANCISCO'] +367-130732-0012-1478: hyp=['I', 'SAY', 'COME', 'TO', 'SAN', 'FRANCISCO', 'ADVISEDLY', 'FOR', 'WHILE', 'THE', 'CRAB', 'IS', 'FOUND', 'ALL', 'ALONG', 'THE', 'COAST', 'IT', 'IS', 'PREPARED', 'NOWHERE', 'SO', 'DELICIOUSLY', 'AS', 'IN', 'SAMPANCISCO'] +367-130732-0013-1479: ref=["GOBEY'S", 'PASSED', 'WITH', 'THE', 'FIRE', 'AND', 'THE', 'LITTLE', 'RESTAURANT', 'BEARING', 'HIS', 'NAME', 'AND', 'IN', 'CHARGE', 'OF', 'HIS', 'WIDOW', 'IN', 'UNION', 'SQUARE', 'AVENUE', 'HAS', 'NOT', 'ATTAINED', 'THE', 'FAME', 'OF', 'THE', 'OLD', 'PLACE'] +367-130732-0013-1479: hyp=['GOBYS', 'PASS', 'WITH', 'THE', 'FIRE', 'AND', 'THE', 'LITTLE', 'RESTAURANT', 'BEARING', 'HIS', 'NAME', 'IN', 'CHARGE', 'OF', 'HIS', 'WIDOW', 'AND', 'UNION', 'SQUARE', 'AVENUE', 'HAS', 'NOT', 'ATTAINED', 'THE', 'FAME', 'OF', 'THE', 'OLD', 'PLACE'] +367-130732-0014-1480: ref=['IT', 'IS', 'POSSIBLE', 'THAT', 'SHE', 'KNOWS', 'THE', 'SECRET', 'OF', 'PREPARING', 'CRAB', 'AS', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', "GOBEY'S", 'OF', 'BEFORE', 'THE', 'FIRE', 'BUT', 'HIS', 'PRESTIGE', 'DID', 'NOT', 'DESCEND', 'TO', 'HER'] +367-130732-0014-1480: hyp=['IT', 'IS', 'POSSIBLE', 'THAT', 'SHE', 'KNOWS', 'THE', 'SECRET', 'OF', 'PREPARING', 'CRAB', 'AS', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', 'GOBIES', 'OF', 'BEFORE', 'THE', 'FIRE', 'BUT', 'HIS', 'PRESAGE', 'DID', 'NOT', 'DESCEND', 'TO', 'HER'] +367-130732-0015-1481: ref=["GOBEY'S", 'CRAB', 'STEW'] +367-130732-0015-1481: hyp=['GOBIUS', 'CRABS', 'DO'] +367-130732-0016-1482: ref=['TAKE', 'THE', 'MEAT', 'OF', 'ONE', 'LARGE', 'CRAB', 'SCRAPING', 'OUT', 'ALL', 'OF', 'THE', 'FAT', 'FROM', 'THE', 'SHELL'] +367-130732-0016-1482: hyp=['TAKE', 'THE', 'MEAT', 'OF', 'ONE', 'LARGE', 'CRAB', 'SCRAPING', 'OUT', 'ALL', 'THE', 'BAT', 'FROM', 'THE', 'SHELL'] +367-130732-0017-1483: ref=['SOAK', 'THE', 'CRAB', 'MEAT', 'IN', 'THE', 'SHERRY', 'TWO', 'HOURS', 'BEFORE', 'COOKING'] +367-130732-0017-1483: hyp=['SOAK', 'THE', 'CRAB', 'MEAT', 'IN', 'THE', 'SHERRY', 'TWO', 'HOURS', 'BEFORE', 'COOKING'] +367-130732-0018-1484: ref=['CHOP', 'FINE', 'THE', 'ONION', 'SWEET', 'PEPPER', 'AND', 'TOMATO', 'WITH', 'THE', 'ROSEMARY'] +367-130732-0018-1484: hyp=['CHOP', 'FINE', 'THE', 'ONION', 'SWEET', 'PEPPER', 'AND', 'TOMATO', 'WITH', 'THE', 'ROSEMARY'] +367-130732-0019-1485: ref=['HEAT', 'THIS', 'IN', 'A', 'STEWPAN', 'AND', 'WHEN', 'SIMMERING', 'ADD', 'THE', 'SHERRY', 'AND', 'CRAB', 'MEAT', 'AND', 'LET', 'ALL', 'COOK', 'TOGETHER', 'WITH', 'A', 'SLOW', 'FIRE', 'FOR', 'EIGHT', 'MINUTES'] +367-130732-0019-1485: hyp=['HEAT', 'THIS', 'IN', 'A', 'STEWPANT', 'AND', 'WHEN', 'SIBBERING', 'AT', 'THE', 'SHERRY', 'AND', 'CRAB', 'ME', 'AND', 'LET', 'ALL', 'COOK', 'TOGETHER', 'WITH', 'A', 'SLOW', 'FIRE', 'FOR', 'EIGHT', 'MINUTES'] +367-130732-0020-1486: ref=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'TOASTED', 'CRACKERS', 'OR', 'THIN', 'SLICES', 'OF', 'TOASTED', 'BREAD'] +367-130732-0020-1486: hyp=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'TOASTED', 'CRACKERS', 'OR', 'THIN', 'SLICES', 'OF', 'TOASTED', 'BREAD'] +367-130732-0021-1487: ref=['LOBSTER', 'IN', 'MINIATURE'] +367-130732-0021-1487: hyp=['LOBSTER', 'IN', 'MINIATURE'] +367-130732-0022-1488: ref=['SO', 'FAR', 'IT', 'HAS', 'BEEN', 'USED', 'MOSTLY', 'FOR', 'GARNISHMENT', 'OF', 'OTHER', 'DISHES', 'AND', 'IT', 'IS', 'ONLY', 'RECENTLY', 'THAT', 'THE', 'HOF', 'BRAU', 'HAS', 'BEEN', 'MAKING', 'A', 'SPECIALTY', 'OF', 'THEM'] +367-130732-0022-1488: hyp=['SO', 'FAR', 'IT', 'HAS', 'BEEN', 'USED', 'MOSTLY', 'FOR', 'GARNISHMENT', 'OF', 'OTHER', 'DISHES', 'AND', 'IT', 'IS', 'ONLY', 'RECENTLY', 'THAT', 'THE', 'WHOLE', 'BROW', 'HAS', 'BEEN', 'MAKING', 'A', 'SPECIALTY', 'OF', 'THEM'] +367-130732-0023-1489: ref=['ALL', 'OF', 'THE', 'BETTER', 'CLASS', 'RESTAURANTS', 'HOWEVER', 'WILL', 'SERVE', 'THEM', 'IF', 'YOU', 'ORDER', 'THEM'] +367-130732-0023-1489: hyp=['ALL', 'THE', 'BETTER', 'CLASS', 'RESTAURANTS', 'HOWEVER', 'WILL', 'SERVE', 'THEM', 'IF', 'YOU', 'ORDER', 'THEM'] +367-130732-0024-1490: ref=['THIS', 'IS', 'THE', 'RECIPE', 'FOR', 'EIGHT', 'PEOPLE', 'AND', 'IT', 'IS', 'WELL', 'WORTH', 'TRYING', 'IF', 'YOU', 'ARE', 'GIVING', 'A', 'DINNER', 'OF', 'IMPORTANCE'] +367-130732-0024-1490: hyp=['THIS', 'IS', 'THE', 'RECIPE', 'FOR', 'EIGHT', 'PEOPLE', 'AND', 'IT', 'IS', 'WELL', 'IT', 'WORTH', 'TRYING', 'IF', 'YOU', 'ARE', 'GIVING', 'A', 'DINNER', 'OF', 'IMPORTANCE'] +367-130732-0025-1491: ref=['BISQUE', 'OF', 'CRAWFISH'] +367-130732-0025-1491: hyp=['THIS', 'OF', 'CROFISH'] +367-130732-0026-1492: ref=['TAKE', 'THIRTY', 'CRAWFISH', 'FROM', 'WHICH', 'REMOVE', 'THE', 'GUT', 'CONTAINING', 'THE', 'GALL', 'IN', 'THE', 'FOLLOWING', 'MANNER', 'TAKE', 'FIRM', 'HOLD', 'OF', 'THE', 'CRAWFISH', 'WITH', 'THE', 'LEFT', 'HAND', 'SO', 'AS', 'TO', 'AVOID', 'BEING', 'PINCHED', 'BY', 'ITS', 'CLAWS', 'WITH', 'THE', 'THUMB', 'AND', 'FOREFINGER', 'OF', 'THE', 'RIGHT', 'HAND', 'PINCH', 'THE', 'EXTREME', 'END', 'OF', 'THE', 'CENTRAL', 'FIN', 'OF', 'THE', 'TAIL', 'AND', 'WITH', 'A', 'SUDDEN', 'JERK', 'THE', 'GUT', 'WILL', 'BE', 'WITHDRAWN'] +367-130732-0026-1492: hyp=['TAKE', 'THIRTY', 'CROPFISH', 'FROM', 'WHICH', 'REMOVE', 'THE', 'GUT', 'CONTAINING', 'THE', 'GALL', 'IN', 'THE', 'FOLLOWING', 'MANNER', 'TAKE', 'FIRM', 'HOLD', 'OF', 'THE', 'CRAWFISH', 'WITH', 'THE', 'LEFT', 'HAND', 'SO', 'AS', 'TO', 'AVOID', 'BEING', 'PINCHED', 'BY', 'ITS', 'CLOTH', 'WITH', 'THE', 'THUMB', 'AND', 'FOREFINGER', 'OF', 'THE', 'RIGHT', 'HAND', 'PINCH', 'THE', 'EXTREME', 'END', 'OF', 'THE', 'CENTRAL', 'FIN', 'OF', 'THE', 'TAIL', 'AND', 'WITH', 'A', 'SUDDEN', 'JERK', 'THE', 'GUT', 'WILL', 'BE', 'WITHDRAWN'] +367-130732-0027-1493: ref=['MINCE', 'OR', 'CUT', 'INTO', 'SMALL', 'DICE', 'A', 'CARROT', 'AN', 'ONION', 'ONE', 'HEAD', 'OF', 'CELERY', 'AND', 'A', 'FEW', 'PARSLEY', 'ROOTS', 'AND', 'TO', 'THESE', 'ADD', 'A', 'BAY', 'LEAF', 'A', 'SPRIG', 'OF', 'THYME', 'A', 'LITTLE', 'MINIONETTE', 'PEPPER', 'AND', 'TWO', 'OUNCES', 'OF', 'BUTTER'] +367-130732-0027-1493: hyp=['MINCE', 'ARE', 'CUT', 'INTO', 'SMALL', 'DICE', 'A', 'CARROT', 'AND', 'ONION', 'ONE', 'HEAD', 'OF', 'CELERY', 'AND', 'A', 'FEW', 'PARSLEY', 'ROOTS', 'AND', 'TO', 'THESE', 'AT', 'A', 'BAILEAF', 'OF', 'A', 'SPRIG', 'OF', 'THYME', 'A', 'LITTLE', 'MINOR', 'PEPPER', 'AND', 'TWO', 'OZ', 'OF', 'BUTTER'] +367-130732-0028-1494: ref=['PUT', 'THESE', 'INGREDIENTS', 'INTO', 'A', 'STEWPAN', 'AND', 'FRY', 'THEM', 'TEN', 'MINUTES', 'THEN', 'THROW', 'IN', 'THE', 'CRAWFISH', 'AND', 'POUR', 'ON', 'THEM', 'HALF', 'A', 'BOTTLE', 'OF', 'FRENCH', 'WHITE', 'WINE'] +367-130732-0028-1494: hyp=['PUT', 'THESE', 'INGREDIENTS', 'INTO', 'A', 'STEWPAN', 'AND', 'FRY', 'THEM', 'TEN', 'MINUTES', 'THEN', 'THROW', 'IN', 'THE', 'CROPPISH', 'AND', 'POUR', 'ON', 'THEM', 'HALF', 'A', 'BOTTLE', 'OF', 'FRENCH', 'WHITE', 'WINE'] +367-130732-0029-1495: ref=['ALLOW', 'THIS', 'TO', 'BOIL', 'AND', 'THEN', 'ADD', 'A', 'QUART', 'OF', 'STRONG', 'CONSOMME', 'AND', 'LET', 'ALL', 'CONTINUE', 'BOILING', 'FOR', 'HALF', 'AN', 'HOUR'] +367-130732-0029-1495: hyp=['ALLOW', 'US', 'TO', 'BOIL', 'AND', 'THEN', 'ADD', 'A', 'QUART', 'OF', 'STRONG', 'CONSUM', 'AND', 'LET', 'ALL', 'CONTINUE', 'BOILING', 'FOR', 'HALF', 'AN', 'HOUR'] +367-130732-0030-1496: ref=['PICK', 'OUT', 'THE', 'CRAWFISH', 'AND', 'STRAIN', 'THE', 'BROTH', 'THROUGH', 'A', 'NAPKIN', 'BY', 'PRESSURE', 'INTO', 'A', 'BASIN', 'IN', 'ORDER', 'TO', 'EXTRACT', 'ALL', 'THE', 'ESSENCE', 'FROM', 'THE', 'VEGETABLES'] +367-130732-0030-1496: hyp=['PICK', 'OUT', 'THE', 'CRAW', 'FISH', 'AND', 'STRAIN', 'THE', 'BROTH', 'THROUGH', 'A', 'NAPKIN', 'BY', 'PRESSURE', 'INTO', 'A', 'BASIN', 'IN', 'ORDER', 'TO', 'EXTRACT', 'ALL', 'THE', 'ESSENCE', 'FROM', 'THE', 'VEGETABLES'] +367-130732-0031-1497: ref=['PICK', 'THE', 'SHELLS', 'OFF', 'TWENTY', 'FIVE', 'OF', 'THE', 'CRAWFISH', 'TAILS', 'TRIM', 'THEM', 'NEATLY', 'AND', 'SET', 'THEM', 'ASIDE', 'UNTIL', 'WANTED'] +367-130732-0031-1497: hyp=['PICK', 'THE', 'SHELLS', 'OF', 'TWENTY', 'FIVE', 'OF', 'THE', 'CROFISH', 'TAILS', 'TRIM', 'THEM', 'NEATLY', 'AND', 'SET', 'THEM', 'ASIDE', 'UNTIL', 'WANTED'] +367-130732-0032-1498: ref=['RESERVE', 'SOME', 'OF', 'THE', 'SPAWN', 'ALSO', 'HALF', 'OF', 'THE', 'BODY', 'SHELLS', 'WITH', 'WHICH', 'TO', 'MAKE', 'THE', 'CRAWFISH', 'BUTTER', 'TO', 'FINISH', 'THE', 'SOUP'] +367-130732-0032-1498: hyp=['RESERVE', 'SOME', 'OF', 'THE', 'SPAWN', 'ALSO', 'HAPPEN', 'THE', 'BODY', 'SHELLS', 'WITH', 'WHICH', 'TO', 'MAKE', 'THE', 'CRAWFISH', 'BUTTER', 'TO', 'FINISH', 'THE', 'SOUP'] +367-130732-0033-1499: ref=['THIS', 'BUTTER', 'IS', 'MADE', 'AS', 'FOLLOWS', 'PLACE', 'THE', 'SHELLS', 'ON', 'A', 'BAKING', 'SHEET', 'IN', 'THE', 'OVEN', 'TO', 'DRY', 'LET', 'THE', 'SHELLS', 'COOL', 'AND', 'THEN', 'POUND', 'THEM', 'IN', 'A', 'MORTAR', 'WITH', 'A', 'LITTLE', 'LOBSTER', 'CORAL', 'AND', 'FOUR', 'OUNCES', 'OF', 'FRESH', 'BUTTER', 'THOROUGHLY', 'BRUISING', 'THE', 'WHOLE', 'TOGETHER', 'SO', 'AS', 'TO', 'MAKE', 'A', 'FINE', 'PASTE'] +367-130732-0033-1499: hyp=['THIS', 'BUTTER', 'IS', 'MADE', 'AS', 'FOLLOWS', 'PLACE', 'THE', 'SHELLS', 'IN', 'A', 'BAKING', 'SHEET', 'IN', 'THE', 'OVEN', 'TO', 'DRY', 'LET', 'THE', 'SHELLS', 'COOL', 'AND', 'THEN', 'POUND', 'THEM', 'IN', 'A', 'MORTAR', 'WITH', 'A', 'LITTLE', 'LOBSTER', 'COAL', 'AND', 'FOUR', 'OUNCES', 'OF', 'FRESH', 'BUTTER', 'THOROUGHLY', 'BRUISING', 'THE', 'WHOLE', 'TOGETHER', 'SO', 'AS', 'TO', 'MAKE', 'A', 'FINE', 'PASTE'] +367-293981-0000-1445: ref=['I', 'SWEAR', 'IT', 'ANSWERED', 'SANCHO'] +367-293981-0000-1445: hyp=['I', 'SWEAR', 'ANSWERED', 'SANCHO'] +367-293981-0001-1446: ref=['I', 'SAY', 'SO', 'CONTINUED', 'DON', 'QUIXOTE', 'BECAUSE', 'I', 'HATE', 'TAKING', 'AWAY', "ANYONE'S", 'GOOD', 'NAME'] +367-293981-0001-1446: hyp=['I', 'SAY', 'SO', 'CONTINUED', 'DON', 'QUIXOTE', 'BECAUSE', 'I', 'HATE', 'TAKING', 'AWAY', 'ANY', "ONE'S", 'GOOD', 'NAME'] +367-293981-0002-1447: ref=['I', 'SAY', 'REPLIED', 'SANCHO', 'THAT', 'I', 'SWEAR', 'TO', 'HOLD', 'MY', 'TONGUE', 'ABOUT', 'IT', 'TILL', 'THE', 'END', 'OF', 'YOUR', "WORSHIP'S", 'DAYS', 'AND', 'GOD', 'GRANT', 'I', 'MAY', 'BE', 'ABLE', 'TO', 'LET', 'IT', 'OUT', 'TOMORROW'] +367-293981-0002-1447: hyp=['I', 'SAY', 'REPLIED', 'SANCHO', 'THAT', 'I', 'SWEAR', 'TO', 'HOLD', 'MY', 'TONGUE', 'ABOUT', 'IT', 'TILL', 'THE', 'END', 'OF', 'YOUR', 'WORSHIP', 'STAYS', 'AND', 'GONE', 'GRANT', 'I', 'MAY', 'BE', 'ABLE', 'TO', 'LET', 'IT', 'OUT', 'TO', 'MORROW'] +367-293981-0003-1448: ref=['THOUGH', 'YOUR', 'WORSHIP', 'WAS', 'NOT', 'SO', 'BADLY', 'OFF', 'HAVING', 'IN', 'YOUR', 'ARMS', 'THAT', 'INCOMPARABLE', 'BEAUTY', 'YOU', 'SPOKE', 'OF', 'BUT', 'I', 'WHAT', 'DID', 'I', 'HAVE', 'EXCEPT', 'THE', 'HEAVIEST', 'WHACKS', 'I', 'THINK', 'I', 'HAD', 'IN', 'ALL', 'MY', 'LIFE'] +367-293981-0003-1448: hyp=['THOUGH', 'YOUR', 'WORSHIP', 'WAS', 'NOT', 'SO', 'BADLY', 'OFF', 'HAVING', 'IN', 'YOUR', 'ARMS', 'THE', 'INN', 'COMPARABLE', 'BEAUTY', 'YOU', 'SPOKE', 'OF', 'BUT', 'I', 'WHAT', 'DID', 'I', 'HAVE', 'EXCEPT', 'THE', 'HEAVIEST', 'WAX', 'THAT', 'I', 'THINK', 'I', 'HAD', 'IN', 'ALL', 'MY', 'LIFE'] +367-293981-0004-1449: ref=['UNLUCKY', 'ME', 'AND', 'THE', 'MOTHER', 'THAT', 'BORE', 'ME'] +367-293981-0004-1449: hyp=['UNLUCKY', 'ME', 'INTO', 'THE', 'MOTHER', 'THAT', 'BORE', 'ME'] +367-293981-0005-1450: ref=["DIDN'T", 'I', 'SAY', 'SO', 'WORSE', 'LUCK', 'TO', 'MY', 'LINE', 'SAID', 'SANCHO'] +367-293981-0005-1450: hyp=["DIDN'T", 'I', 'SAY', 'SO', 'WORSE', 'LUCK', 'TO', 'MY', 'LINE', 'SAID', 'SANCHO'] +367-293981-0006-1451: ref=['IT', 'CANNOT', 'BE', 'THE', 'MOOR', 'ANSWERED', 'DON', 'QUIXOTE', 'FOR', 'THOSE', 'UNDER', 'ENCHANTMENT', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'BY', 'ANYONE'] +367-293981-0006-1451: hyp=['IT', 'CANNOT', 'BE', 'THE', 'MORE', 'ANSWERED', 'DON', 'QUIXOTE', 'FOR', 'THOSE', 'UNDER', 'ENCHANTMENT', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'BY', 'ANYONE'] +367-293981-0007-1452: ref=['IF', 'THEY', "DON'T", 'LET', 'THEMSELVES', 'BE', 'SEEN', 'THEY', 'LET', 'THEMSELVES', 'BE', 'FELT', 'SAID', 'SANCHO', 'IF', 'NOT', 'LET', 'MY', 'SHOULDERS', 'SPEAK', 'TO', 'THE', 'POINT'] +367-293981-0007-1452: hyp=['IF', 'THEY', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'THEY', 'LET', 'THEMSELVES', 'BE', 'FELT', 'SAID', 'SANCHO', 'IF', 'NOT', 'LET', 'MY', 'SHOULDERS', 'SPEAK', 'TO', 'THE', 'POINT'] +367-293981-0008-1453: ref=['MINE', 'COULD', 'SPEAK', 'TOO', 'SAID', 'DON', 'QUIXOTE', 'BUT', 'THAT', 'IS', 'NOT', 'A', 'SUFFICIENT', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'WHAT', 'WE', 'SEE', 'IS', 'THE', 'ENCHANTED', 'MOOR'] +367-293981-0008-1453: hyp=['MIKE', 'COULD', 'SPEAK', 'TOO', 'SAID', 'DON', 'QUIXOTE', 'BUT', 'THAT', 'IS', 'NOT', 'A', 'SUSPICION', 'OF', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'WHAT', 'WE', 'SEE', 'IS', 'THE', 'ENCHANTED', 'MOOR'] +367-293981-0009-1454: ref=['THE', 'OFFICER', 'TURNED', 'TO', 'HIM', 'AND', 'SAID', 'WELL', 'HOW', 'GOES', 'IT', 'GOOD', 'MAN'] +367-293981-0009-1454: hyp=['THE', 'OFFICERS', 'TURNED', 'TO', 'HIM', 'AND', 'SAID', 'WELL', 'HOW', 'GOES', 'A', 'GOOD', 'MAN'] +367-293981-0010-1455: ref=['SANCHO', 'GOT', 'UP', 'WITH', 'PAIN', 'ENOUGH', 'IN', 'HIS', 'BONES', 'AND', 'WENT', 'AFTER', 'THE', 'INNKEEPER', 'IN', 'THE', 'DARK', 'AND', 'MEETING', 'THE', 'OFFICER', 'WHO', 'WAS', 'LOOKING', 'TO', 'SEE', 'WHAT', 'HAD', 'BECOME', 'OF', 'HIS', 'ENEMY', 'HE', 'SAID', 'TO', 'HIM', 'SENOR', 'WHOEVER', 'YOU', 'ARE', 'DO', 'US', 'THE', 'FAVOUR', 'AND', 'KINDNESS', 'TO', 'GIVE', 'US', 'A', 'LITTLE', 'ROSEMARY', 'OIL', 'SALT', 'AND', 'WINE', 'FOR', 'IT', 'IS', 'WANTED', 'TO', 'CURE', 'ONE', 'OF', 'THE', 'BEST', 'KNIGHTS', 'ERRANT', 'ON', 'EARTH', 'WHO', 'LIES', 'ON', 'YONDER', 'BED', 'WOUNDED', 'BY', 'THE', 'HANDS', 'OF', 'THE', 'ENCHANTED', 'MOOR', 'THAT', 'IS', 'IN', 'THIS', 'INN'] +367-293981-0010-1455: hyp=['SANCHA', 'CUT', 'UP', 'WITH', 'PAIN', 'ENOUGH', 'IN', 'HIS', 'BONES', 'AND', 'WENT', 'AFTER', 'THE', 'INNKEEPER', 'IN', 'THE', 'DARK', 'AND', 'MEETING', 'THE', 'OFFICER', 'WHO', 'WAS', 'LOOKING', 'TO', 'SEE', 'WHAT', 'HAD', 'BECOME', 'OF', 'HIS', 'ENEMY', 'HE', 'SAID', 'TO', 'HIM', 'SENOR', 'WHOEVER', 'YOU', 'ARE', 'DO', 'US', 'TO', 'FAVOR', 'AND', 'KINDNESS', 'TO', 'GIVE', 'US', 'A', 'LITTLE', 'ROSEMARY', 'OIL', 'SALT', 'AND', 'WHITE', 'FOR', 'IT', 'IS', 'WATER', 'TO', 'CURE', 'ONE', 'OF', 'OUR', 'BEST', 'KNIGHTS', 'ERRANT', 'ON', 'EARTH', 'WHO', 'LIES', 'ON', 'YONDER', 'BED', 'WOUNDED', 'BY', 'THE', 'HANDS', 'OF', 'THE', 'ENCHANTED', 'MOOR', 'THAT', 'IS', 'IN', 'THIS', 'INN'] +367-293981-0011-1456: ref=['TO', 'BE', 'BRIEF', 'HE', 'TOOK', 'THE', 'MATERIALS', 'OF', 'WHICH', 'HE', 'MADE', 'A', 'COMPOUND', 'MIXING', 'THEM', 'ALL', 'AND', 'BOILING', 'THEM', 'A', 'GOOD', 'WHILE', 'UNTIL', 'IT', 'SEEMED', 'TO', 'HIM', 'THEY', 'HAD', 'COME', 'TO', 'PERFECTION'] +367-293981-0011-1456: hyp=['TO', 'BE', 'BRIEF', 'HE', 'TOOK', 'THE', 'MATURES', 'OF', 'WHICH', 'HE', 'MADE', 'A', 'COMPOUND', 'MIXING', 'THEM', 'ALL', 'BOILING', 'THEM', 'A', 'GOOD', 'WHILE', 'IT', 'UNTIL', 'IT', 'SEEMED', 'TO', 'HIM', 'THEY', 'HAD', 'COME', 'TO', 'PERFECTION'] +367-293981-0012-1457: ref=['SANCHO', 'PANZA', 'WHO', 'ALSO', 'REGARDED', 'THE', 'AMENDMENT', 'OF', 'HIS', 'MASTER', 'AS', 'MIRACULOUS', 'BEGGED', 'HIM', 'TO', 'GIVE', 'HIM', 'WHAT', 'WAS', 'LEFT', 'IN', 'THE', 'PIGSKIN', 'WHICH', 'WAS', 'NO', 'SMALL', 'QUANTITY'] +367-293981-0012-1457: hyp=['SANCHO', 'PANZA', 'WHO', 'ALSO', 'REGARDED', 'THE', 'AMENDMENT', 'OF', 'HIS', 'MASTER', 'AS', 'MIRACULOUS', 'BEGGED', 'HIM', 'TO', 'GIVE', 'HIM', 'WHAT', 'WAS', 'LET', 'IN', 'A', 'PICTION', 'WHICH', 'WAS', 'NO', 'SMALL', 'QUANTITY'] +367-293981-0013-1458: ref=['DON', 'QUIXOTE', 'CONSENTED', 'AND', 'HE', 'TAKING', 'IT', 'WITH', 'BOTH', 'HANDS', 'IN', 'GOOD', 'FAITH', 'AND', 'WITH', 'A', 'BETTER', 'WILL', 'GULPED', 'DOWN', 'AND', 'DRAINED', 'OFF', 'VERY', 'LITTLE', 'LESS', 'THAN', 'HIS', 'MASTER'] +367-293981-0013-1458: hyp=['DON', 'QUIXOTE', 'CONSENTED', 'AND', 'HE', 'TAKING', 'IT', 'WITH', 'BOTH', 'HANDS', 'IN', 'GOOD', 'FAITH', 'AND', 'WITH', 'A', 'BETTER', 'WILL', 'GO', 'TO', 'DOWN', 'AND', 'DRAINED', 'UP', 'VERY', 'LITTLE', 'LESS', 'THAN', 'HIS', 'MASTER'] +367-293981-0014-1459: ref=['IF', 'YOUR', 'WORSHIP', 'KNEW', 'THAT', 'RETURNED', 'SANCHO', 'WOE', 'BETIDE', 'ME', 'AND', 'ALL', 'MY', 'KINDRED', 'WHY', 'DID', 'YOU', 'LET', 'ME', 'TASTE', 'IT'] +367-293981-0014-1459: hyp=['IF', 'YOUR', 'WORSHIP', 'KNEW', 'THAT', 'RETURNED', 'SANCHO', "WON'T", 'BETIDE', 'ME', 'AND', 'ALL', 'MY', 'KINDRED', 'WHY', 'DID', 'YOU', 'LET', 'ME', 'TASTE', 'IT'] +367-293981-0015-1460: ref=['SEARCH', 'YOUR', 'MEMORY', 'AND', 'IF', 'YOU', 'FIND', 'ANYTHING', 'OF', 'THIS', 'KIND', 'YOU', 'NEED', 'ONLY', 'TELL', 'ME', 'OF', 'IT', 'AND', 'I', 'PROMISE', 'YOU', 'BY', 'THE', 'ORDER', 'OF', 'KNIGHTHOOD', 'WHICH', 'I', 'HAVE', 'RECEIVED', 'TO', 'PROCURE', 'YOU', 'SATISFACTION', 'AND', 'REPARATION', 'TO', 'THE', 'UTMOST', 'OF', 'YOUR', 'DESIRE'] +367-293981-0015-1460: hyp=['SEARCH', 'YOUR', 'MEMORY', 'AND', 'IF', 'YOU', 'FIND', 'ANYTHING', 'OF', 'THIS', 'KIND', 'YOU', 'NEED', 'ONLY', 'TELL', 'ME', 'OF', 'IT', 'AND', 'I', 'PROMISE', 'YOU', 'BY', 'THE', 'ORDER', 'OF', 'KNIGHTHOOD', 'WHICH', 'I', 'HAVE', 'RECEIVED', 'TO', 'PROCURE', 'YOU', 'SATISFACTION', 'IN', 'REPARATION', 'TO', 'THE', 'UTMOST', 'OF', 'YOUR', 'DESIRE'] +367-293981-0016-1461: ref=['THEN', 'THIS', 'IS', 'AN', 'INN', 'SAID', 'DON', 'QUIXOTE'] +367-293981-0016-1461: hyp=['THEN', 'THIS', 'IS', 'AN', 'IN', 'SAID', 'DON', 'QUIXOTE'] +367-293981-0017-1462: ref=['AND', 'A', 'VERY', 'RESPECTABLE', 'ONE', 'SAID', 'THE', 'INNKEEPER'] +367-293981-0017-1462: hyp=['IN', 'A', 'VERY', 'RESPECTABLE', 'ONE', 'SAID', 'THE', 'INNKEEPER'] +367-293981-0018-1463: ref=['THE', 'CRIES', 'OF', 'THE', 'POOR', 'BLANKETED', 'WRETCH', 'WERE', 'SO', 'LOUD', 'THAT', 'THEY', 'REACHED', 'THE', 'EARS', 'OF', 'HIS', 'MASTER', 'WHO', 'HALTING', 'TO', 'LISTEN', 'ATTENTIVELY', 'WAS', 'PERSUADED', 'THAT', 'SOME', 'NEW', 'ADVENTURE', 'WAS', 'COMING', 'UNTIL', 'HE', 'CLEARLY', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'HIS', 'SQUIRE', 'WHO', 'UTTERED', 'THEM'] +367-293981-0018-1463: hyp=['THE', 'CRIES', 'OF', 'THE', 'POOR', 'BLANKET', 'WRETCH', 'WERE', 'SO', 'LOUD', 'THAT', 'THEY', 'REACHED', 'THE', 'EARS', 'OF', 'HIS', 'MASTER', 'WHO', 'HALTING', 'TO', 'LISTEN', 'TINTIVELY', 'WAS', 'PERSUADED', 'THAT', 'SOME', 'NEW', 'ADVENTURE', 'WAS', 'COMING', 'UNTIL', 'HE', 'CLEARLY', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'THE', 'SQUIRE', 'WHO', 'UTTERED', 'THEM'] +367-293981-0019-1464: ref=['HE', 'SAW', 'HIM', 'RISING', 'AND', 'FALLING', 'IN', 'THE', 'AIR', 'WITH', 'SUCH', 'GRACE', 'AND', 'NIMBLENESS', 'THAT', 'HAD', 'HIS', 'RAGE', 'ALLOWED', 'HIM', 'IT', 'IS', 'MY', 'BELIEF', 'HE', 'WOULD', 'HAVE', 'LAUGHED'] +367-293981-0019-1464: hyp=['HE', 'SAW', 'HIM', 'RISING', 'AND', 'FALLING', 'IN', 'THE', 'AIR', 'WITH', 'SUCH', 'GRACE', 'AND', 'NIMBLENESS', 'THAT', 'HAD', 'HIS', 'RAGE', 'ALLOWED', 'HIM', 'IT', 'IS', 'MY', 'BELIEF', 'HE', 'WOULD', 'HAVE', 'LAUGHED'] +367-293981-0020-1465: ref=['SANCHO', 'TOOK', 'IT', 'AND', 'AS', 'HE', 'WAS', 'RAISING', 'IT', 'TO', 'HIS', 'MOUTH', 'HE', 'WAS', 'STOPPED', 'BY', 'THE', 'CRIES', 'OF', 'HIS', 'MASTER', 'EXCLAIMING', 'SANCHO', 'MY', 'SON', 'DRINK', 'NOT', 'WATER', 'DRINK', 'IT', 'NOT', 'MY', 'SON', 'FOR', 'IT', 'WILL', 'KILL', 'THEE', 'SEE', 'HERE', 'I', 'HAVE', 'THE', 'BLESSED', 'BALSAM', 'AND', 'HE', 'HELD', 'UP', 'THE', 'FLASK', 'OF', 'LIQUOR', 'AND', 'WITH', 'DRINKING', 'TWO', 'DROPS', 'OF', 'IT', 'THOU', 'WILT', 'CERTAINLY', 'BE', 'RESTORED'] +367-293981-0020-1465: hyp=['SANCHO', 'TOOK', 'IT', 'AND', 'AS', 'HE', 'WAS', 'RAISING', 'IT', 'TO', 'HIS', 'MOUTH', 'HE', 'WAS', 'STOPPED', 'BY', 'THE', 'CRIES', 'OF', 'HIS', 'MASTER', 'EXCLAIMING', 'SANCHO', 'MY', 'SON', 'DRINK', 'NOT', 'WATER', 'DRINKIN', 'O', 'MY', 'SON', 'FOR', 'IT', 'WILL', 'KILL', 'THEE', 'SEE', 'HERE', 'I', 'HAVE', 'THE', 'BLESSED', 'BALSAM', 'AND', 'HE', 'HELD', 'UP', 'THE', 'FLASK', 'OF', 'LIQUOR', 'AND', 'WITH', 'DRINKING', 'TWO', 'DROPS', 'WHAT', 'THOU', 'WILT', 'CERTAINLY', 'BE', 'RESTORED'] +3764-168670-0000-1666: ref=['THE', 'STRIDES', 'OF', 'A', 'LAME', 'MAN', 'ARE', 'LIKE', 'THE', 'OGLING', 'GLANCES', 'OF', 'A', 'ONE', 'EYED', 'MAN', 'THEY', 'DO', 'NOT', 'REACH', 'THEIR', 'GOAL', 'VERY', 'PROMPTLY'] +3764-168670-0000-1666: hyp=['THE', 'STRIDES', 'OF', 'A', 'LAME', 'MAN', 'ARE', 'LIKE', 'THE', 'OGLING', 'GLANCES', 'OF', 'A', 'ONE', 'EYED', 'MAN', 'THEY', 'DO', 'NOT', 'REACH', 'THEIR', 'GOAL', 'VERY', 'PROMPTLY'] +3764-168670-0001-1667: ref=['COSETTE', 'HAD', 'WAKED', 'UP'] +3764-168670-0001-1667: hyp=['COSETTE', 'HAD', 'WAKED', 'UP'] +3764-168670-0002-1668: ref=['JEAN', 'VALJEAN', 'HAD', 'PLACED', 'HER', 'NEAR', 'THE', 'FIRE'] +3764-168670-0002-1668: hyp=['JEAN', 'VALJEAN', 'HAD', 'PLACED', 'HER', 'NEAR', 'THE', 'FIRE'] +3764-168670-0003-1669: ref=['YOU', 'WILL', 'WAIT', 'FOR', 'ME', 'AT', 'A', "LADY'S", 'HOUSE', 'I', 'SHALL', 'COME', 'TO', 'FETCH', 'YOU'] +3764-168670-0003-1669: hyp=['YOU', 'WILL', 'WAIT', 'FOR', 'ME', 'AT', 'A', "LADY'S", 'HOUSE', 'I', 'SHALL', 'COME', 'TO', 'FETCH', 'YOU'] +3764-168670-0004-1670: ref=['EVERYTHING', 'IS', 'ARRANGED', 'AND', 'NOTHING', 'IS', 'SAID', 'FAUCHELEVENT'] +3764-168670-0004-1670: hyp=['EVERYTHING', 'IS', 'RANGED', 'AND', 'NOTHING', 'IS', 'SAID', 'FAUCHELEVENT'] +3764-168670-0005-1671: ref=['I', 'HAVE', 'PERMISSION', 'TO', 'BRING', 'YOU', 'IN', 'BUT', 'BEFORE', 'BRINGING', 'YOU', 'IN', 'YOU', 'MUST', 'BE', 'GOT', 'OUT'] +3764-168670-0005-1671: hyp=['I', 'HAVE', 'PERMISSION', 'TO', 'BRING', 'YOU', 'IN', 'BUT', 'BEFORE', 'BRINGING', 'YOU', 'IN', 'YOU', 'MUST', 'BE', 'GOT', 'OUT'] +3764-168670-0006-1672: ref=["THAT'S", 'WHERE', 'THE', 'DIFFICULTY', 'LIES'] +3764-168670-0006-1672: hyp=["THAT'S", 'WHERE', 'THE', 'DIFFICULTY', 'LIES'] +3764-168670-0007-1673: ref=['IT', 'IS', 'EASY', 'ENOUGH', 'WITH', 'THE', 'CHILD', 'YOU', 'WILL', 'CARRY', 'HER', 'OUT'] +3764-168670-0007-1673: hyp=['IT', 'IS', 'EASY', 'ENOUGH', 'WITH', 'THE', 'CHILD', 'YOU', 'WILL', 'CARRY', 'HER', 'OUT'] +3764-168670-0008-1674: ref=['AND', 'SHE', 'WILL', 'HOLD', 'HER', 'TONGUE', 'I', 'ANSWER', 'FOR', 'THAT'] +3764-168670-0008-1674: hyp=['AND', 'SHE', 'WILL', 'HOLD', 'HER', 'TONGUE', 'I', 'ANSWER', 'FOR', 'THAT'] +3764-168670-0009-1675: ref=['FAUCHELEVENT', 'GRUMBLED', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'JEAN', 'VALJEAN'] +3764-168670-0009-1675: hyp=['FAUCHELEVENT', 'GRUMBLED', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'JEAN', 'VALJEAN'] +3764-168670-0010-1676: ref=['YOU', 'UNDERSTAND', 'FATHER', 'MADELEINE', 'THE', 'GOVERNMENT', 'WILL', 'NOTICE', 'IT'] +3764-168670-0010-1676: hyp=['YOU', 'UNDERSTAND', 'FATHER', 'MADELEINE', 'THE', 'GOVERNMENT', 'WILL', 'NOTICE', 'IT'] +3764-168670-0011-1677: ref=['JEAN', 'VALJEAN', 'STARED', 'HIM', 'STRAIGHT', 'IN', 'THE', 'EYE', 'AND', 'THOUGHT', 'THAT', 'HE', 'WAS', 'RAVING'] +3764-168670-0011-1677: hyp=['JEAN', 'VALJEAN', 'STARED', 'HIM', 'STRAIGHT', 'IN', 'THE', 'EYE', 'AND', 'THOUGHT', 'THAT', 'HE', 'WAS', 'RAVING'] +3764-168670-0012-1678: ref=['FAUCHELEVENT', 'WENT', 'ON'] +3764-168670-0012-1678: hyp=['FAUCHELEVENT', 'WENT', 'ON'] +3764-168670-0013-1679: ref=['IT', 'IS', 'TO', 'MORROW', 'THAT', 'I', 'AM', 'TO', 'BRING', 'YOU', 'IN', 'THE', 'PRIORESS', 'EXPECTS', 'YOU'] +3764-168670-0013-1679: hyp=['IT', 'IS', 'TO', 'MORROW', 'THAT', 'I', 'AM', 'TO', 'BRING', 'YOU', 'IN', 'THE', 'PRIORS', 'EXPECTS', 'YOU'] +3764-168670-0014-1680: ref=['THEN', 'HE', 'EXPLAINED', 'TO', 'JEAN', 'VALJEAN', 'THAT', 'THIS', 'WAS', 'HIS', 'RECOMPENSE', 'FOR', 'A', 'SERVICE', 'WHICH', 'HE', 'FAUCHELEVENT', 'WAS', 'TO', 'RENDER', 'TO', 'THE', 'COMMUNITY'] +3764-168670-0014-1680: hyp=['THEN', 'HE', 'EXPLAINED', 'TO', 'JEAN', 'VALJEAN', 'THAT', 'THIS', 'WAS', 'HIS', 'RECOMPENSE', 'FOR', 'A', 'SERVICE', 'WHICH', 'HE', 'FOR', 'CHAUVELIN', 'WAS', 'TO', 'RENDER', 'TO', 'THE', 'COMMUNITY'] +3764-168670-0015-1681: ref=['THAT', 'THE', 'NUN', 'WHO', 'HAD', 'DIED', 'THAT', 'MORNING', 'HAD', 'REQUESTED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'COFFIN', 'WHICH', 'HAD', 'SERVED', 'HER', 'FOR', 'A', 'BED', 'AND', 'INTERRED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL'] +3764-168670-0015-1681: hyp=['THAT', 'THE', 'NUN', 'WHO', 'HAD', 'DIED', 'THAT', 'MORNING', 'HAD', 'REQUESTED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'COFFIN', 'WHICH', 'HAD', 'SERVED', 'HER', 'FOR', 'A', 'BED', 'AND', 'INTERRED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL'] +3764-168670-0016-1682: ref=['THAT', 'THE', 'PRIORESS', 'AND', 'THE', 'VOCAL', 'MOTHERS', 'INTENDED', 'TO', 'FULFIL', 'THE', 'WISH', 'OF', 'THE', 'DECEASED'] +3764-168670-0016-1682: hyp=['THAT', 'THE', 'PRIOR', 'REST', 'AND', 'THE', 'VOCAL', 'MOTHERS', 'INTENDED', 'TO', 'FULFIL', 'THE', 'WISH', 'OF', 'THE', 'DECEASED'] +3764-168670-0017-1683: ref=['THAT', 'HE', 'FAUCHELEVENT', 'WAS', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN', 'IN', 'THE', 'CELL', 'RAISE', 'THE', 'STONE', 'IN', 'THE', 'CHAPEL', 'AND', 'LOWER', 'THE', 'CORPSE', 'INTO', 'THE', 'VAULT'] +3764-168670-0017-1683: hyp=['THAT', 'HE', 'FOR', 'SCHLEVENT', 'WAS', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN', 'IN', 'THE', 'CELL', 'RAISED', 'THE', 'STONE', 'IN', 'THE', 'CHAPEL', 'AND', 'BLOW', 'THE', 'CORPSE', 'INTO', 'THE', 'VAULT'] +3764-168670-0018-1684: ref=['AND', 'THEN', 'THAT', 'THERE', 'WAS', 'ANOTHER', 'THE', 'EMPTY', 'COFFIN'] +3764-168670-0018-1684: hyp=['AND', 'THEN', 'THAT', 'THERE', 'WAS', 'ANOTHER', 'THE', 'EMPTY', 'COFFIN'] +3764-168670-0019-1685: ref=['WHAT', 'IS', 'THAT', 'EMPTY', 'COFFIN'] +3764-168670-0019-1685: hyp=['WHAT', 'IS', 'THAT', 'EMPTY', 'COFFIN'] +3764-168670-0020-1686: ref=['ASKED', 'JEAN', 'VALJEAN', 'FAUCHELEVENT', 'REPLIED'] +3764-168670-0020-1686: hyp=['ASKED', 'JEAN', 'VALJEAN', 'FAUCHELEVENT', 'REPLIED'] +3764-168670-0021-1687: ref=['WHAT', 'COFFIN', 'WHAT', 'ADMINISTRATION'] +3764-168670-0021-1687: hyp=['WHAT', 'COFFIN', 'WHAT', 'ADMINISTRATION'] +3764-168670-0022-1688: ref=['FAUCHELEVENT', 'WHO', 'WAS', 'SEATED', 'SPRANG', 'UP', 'AS', 'THOUGH', 'A', 'BOMB', 'HAD', 'BURST', 'UNDER', 'HIS', 'CHAIR', 'YOU'] +3764-168670-0022-1688: hyp=['FAUCHELEVENT', 'WHO', 'WAS', 'SEATED', 'SPRANG', 'UP', 'AS', 'THOUGH', 'A', 'BOMB', 'HAD', 'BURST', 'UNDER', 'HIS', 'CHAIR', 'YOU'] +3764-168670-0023-1689: ref=['YOU', 'KNOW', 'FAUCHELEVENT', 'WHAT', 'YOU', 'HAVE', 'SAID', 'MOTHER', 'CRUCIFIXION', 'IS', 'DEAD'] +3764-168670-0023-1689: hyp=['YOU', 'KNOW', 'FAUCHELEVENT', 'WHAT', 'YOU', 'HAVE', 'SAID', 'MOTHER', 'CRUCIFIXION', 'IS', 'DEAD'] +3764-168670-0024-1690: ref=['AND', 'I', 'ADD', 'AND', 'FATHER', 'MADELEINE', 'IS', 'BURIED', 'AH'] +3764-168670-0024-1690: hyp=['AND', 'I', 'ADD', 'AND', 'FATHER', 'MADELEINE', 'IS', 'BURIED'] +3764-168670-0025-1691: ref=['YOU', 'ARE', 'NOT', 'LIKE', 'OTHER', 'MEN', 'FATHER', 'MADELEINE'] +3764-168670-0025-1691: hyp=['YOU', 'ARE', 'NOT', 'LIKE', 'OTHER', 'MEN', 'FATHER', 'MADELEINE'] +3764-168670-0026-1692: ref=['THIS', 'OFFERS', 'THE', 'MEANS', 'BUT', 'GIVE', 'ME', 'SOME', 'INFORMATION', 'IN', 'THE', 'FIRST', 'PLACE'] +3764-168670-0026-1692: hyp=['THIS', 'OFFERS', 'THE', 'MEANS', 'BUT', 'GIVE', 'ME', 'SOME', 'INFORMATION', 'IN', 'THE', 'FIRST', 'PLACE'] +3764-168670-0027-1693: ref=['HOW', 'LONG', 'IS', 'THE', 'COFFIN', 'SIX', 'FEET'] +3764-168670-0027-1693: hyp=['HOW', 'LONG', 'IS', 'THE', 'COFFIN', 'SIX', 'FEET'] +3764-168670-0028-1694: ref=['IT', 'IS', 'A', 'CHAMBER', 'ON', 'THE', 'GROUND', 'FLOOR', 'WHICH', 'HAS', 'A', 'GRATED', 'WINDOW', 'OPENING', 'ON', 'THE', 'GARDEN', 'WHICH', 'IS', 'CLOSED', 'ON', 'THE', 'OUTSIDE', 'BY', 'A', 'SHUTTER', 'AND', 'TWO', 'DOORS', 'ONE', 'LEADS', 'INTO', 'THE', 'CONVENT', 'THE', 'OTHER', 'INTO', 'THE', 'CHURCH', 'WHAT', 'CHURCH'] +3764-168670-0028-1694: hyp=['IT', 'IS', 'A', 'CHAMBER', 'ON', 'THE', 'GROUND', 'FLOOR', 'WHICH', 'HAS', 'A', 'GRATED', 'WINDOW', 'OPENING', 'ON', 'THE', 'GARDEN', 'WHICH', 'IS', 'CLOSED', 'ON', 'THE', 'OUTSIDE', 'BY', 'A', 'SHUTTER', 'AND', 'TWO', 'DOORS', 'ONE', 'LEADS', 'INTO', 'THE', 'CONVENT', 'THE', 'OTHER', 'INTO', 'THE', 'CHURCH', 'A', 'WATCH'] +3764-168670-0029-1695: ref=['THE', 'CHURCH', 'IN', 'THE', 'STREET', 'THE', 'CHURCH', 'WHICH', 'ANY', 'ONE', 'CAN', 'ENTER'] +3764-168670-0029-1695: hyp=['THE', 'CHURCH', 'IN', 'THE', 'STREET', 'THOUGH', 'THE', 'CHURCH', 'WHICH', 'ANY', 'ONE', 'CAN', 'ENTER'] +3764-168670-0030-1696: ref=['HAVE', 'YOU', 'THE', 'KEYS', 'TO', 'THOSE', 'TWO', 'DOORS'] +3764-168670-0030-1696: hyp=['HAVE', 'YOU', 'THE', 'KEYS', 'TO', 'THOSE', 'TWO', 'DOORS'] +3764-168670-0031-1697: ref=['NO', 'I', 'HAVE', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CONVENT', 'THE', 'PORTER', 'HAS', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CHURCH'] +3764-168670-0031-1697: hyp=['AND', 'NO', 'I', 'HAVE', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CONVENT', 'THE', 'PORTER', 'HAS', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CHURCH'] +3764-168670-0032-1698: ref=['ONLY', 'TO', 'ALLOW', 'THE', "UNDERTAKER'S", 'MEN', 'TO', 'ENTER', 'WHEN', 'THEY', 'COME', 'TO', 'GET', 'THE', 'COFFIN'] +3764-168670-0032-1698: hyp=['ONLY', 'TO', 'ALLOW', 'THE', 'UNDERTAKERS', 'MEN', 'TO', 'ENTER', 'WHEN', 'THEY', 'COME', 'TO', 'GET', 'THE', 'COFFIN'] +3764-168670-0033-1699: ref=['WHO', 'NAILS', 'UP', 'THE', 'COFFIN', 'I', 'DO'] +3764-168670-0033-1699: hyp=['WHO', 'NAILS', 'UP', 'THE', 'COFFIN', 'I', 'DO'] +3764-168670-0034-1700: ref=['WHO', 'SPREADS', 'THE', 'PALL', 'OVER', 'IT'] +3764-168670-0034-1700: hyp=['WHO', 'SPREADS', 'THE', 'POOL', 'OVER', 'IT'] +3764-168670-0035-1701: ref=['NOT', 'ANOTHER', 'MAN', 'EXCEPT', 'THE', 'POLICE', 'DOCTOR', 'CAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'THAT', 'IS', 'EVEN', 'WRITTEN', 'ON', 'THE', 'WALL'] +3764-168670-0035-1701: hyp=['NOT', 'ANOTHER', 'MAN', 'EXCEPT', 'THE', 'POLICE', 'DOCTOR', 'CAN', 'ENTER', 'THE', 'BEDROOM', 'THAT', 'IS', 'EVEN', 'WRITTEN', 'ON', 'THE', 'WALL'] +3764-168670-0036-1702: ref=['COULD', 'YOU', 'HIDE', 'ME', 'IN', 'THAT', 'ROOM', 'TO', 'NIGHT', 'WHEN', 'EVERY', 'ONE', 'IS', 'ASLEEP'] +3764-168670-0036-1702: hyp=['COULD', 'YOU', 'HIDE', 'ME', 'IN', 'THAT', 'ROOM', 'TO', 'NIGHT', 'WHEN', 'EVERY', 'ONE', 'IS', 'ASLEEP'] +3764-168670-0037-1703: ref=['ABOUT', 'THREE', "O'CLOCK", 'IN', 'THE', 'AFTERNOON'] +3764-168670-0037-1703: hyp=['ABOUT', 'THREE', "O'CLOCK", 'IN', 'THE', 'AFTERNOON'] +3764-168670-0038-1704: ref=['I', 'SHALL', 'BE', 'HUNGRY', 'I', 'WILL', 'BRING', 'YOU', 'SOMETHING'] +3764-168670-0038-1704: hyp=['I', 'SHALL', 'BE', 'HUNGRY', 'I', 'WILL', 'BRING', 'YOU', 'SOMETHING'] +3764-168670-0039-1705: ref=['YOU', 'CAN', 'COME', 'AND', 'NAIL', 'ME', 'UP', 'IN', 'THE', 'COFFIN', 'AT', 'TWO', "O'CLOCK"] +3764-168670-0039-1705: hyp=['YOU', 'CAN', 'COME', 'AND', 'NAIL', 'ME', 'UP', 'IN', 'THE', 'COFFIN', 'AT', 'TWO', "O'CLOCK"] +3764-168670-0040-1706: ref=['FAUCHELEVENT', 'RECOILED', 'AND', 'CRACKED', 'HIS', 'FINGER', 'JOINTS', 'BUT', 'THAT', 'IS', 'IMPOSSIBLE'] +3764-168670-0040-1706: hyp=['FAUCHELEVENT', 'RECOILED', 'AND', 'CRACKED', 'HIS', 'FINGER', 'JOINTS', 'BUT', 'THAT', 'IS', 'IMPOSSIBLE'] +3764-168670-0041-1707: ref=['BAH', 'IMPOSSIBLE', 'TO', 'TAKE', 'A', 'HAMMER', 'AND', 'DRIVE', 'SOME', 'NAILS', 'IN', 'A', 'PLANK'] +3764-168670-0041-1707: hyp=['BAH', 'IMPOSSIBLE', 'TO', 'TAKE', 'A', 'HAMMER', 'AND', 'DRIVE', 'SOME', 'NAILS', 'IN', 'A', 'PLANK'] +3764-168670-0042-1708: ref=['JEAN', 'VALJEAN', 'HAD', 'BEEN', 'IN', 'WORSE', 'STRAITS', 'THAN', 'THIS'] +3764-168670-0042-1708: hyp=['JEAN', 'VALJEAN', 'HAD', 'BEEN', 'IN', 'WORSE', 'STRAIT', 'THAN', 'THIS'] +3764-168670-0043-1709: ref=['ANY', 'MAN', 'WHO', 'HAS', 'BEEN', 'A', 'PRISONER', 'UNDERSTANDS', 'HOW', 'TO', 'CONTRACT', 'HIMSELF', 'TO', 'FIT', 'THE', 'DIAMETER', 'OF', 'THE', 'ESCAPE'] +3764-168670-0043-1709: hyp=['ANY', 'MAN', 'WHO', 'HAS', 'BEEN', 'A', 'PRISONER', 'UNDERSTANDS', 'HOW', 'TO', 'CONTRACT', 'HIMSELF', 'TO', 'FIT', 'THE', 'DIAMETER', 'OF', 'THE', 'ESCAPE'] +3764-168670-0044-1710: ref=['WHAT', 'DOES', 'NOT', 'A', 'MAN', 'UNDERGO', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'CURE'] +3764-168670-0044-1710: hyp=['WHAT', 'DOES', 'NOT', 'A', 'MAN', 'UNDERGO', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'CURE'] +3764-168670-0045-1711: ref=['TO', 'HAVE', 'HIMSELF', 'NAILED', 'UP', 'IN', 'A', 'CASE', 'AND', 'CARRIED', 'OFF', 'LIKE', 'A', 'BALE', 'OF', 'GOODS', 'TO', 'LIVE', 'FOR', 'A', 'LONG', 'TIME', 'IN', 'A', 'BOX', 'TO', 'FIND', 'AIR', 'WHERE', 'THERE', 'IS', 'NONE', 'TO', 'ECONOMIZE', 'HIS', 'BREATH', 'FOR', 'HOURS', 'TO', 'KNOW', 'HOW', 'TO', 'STIFLE', 'WITHOUT', 'DYING', 'THIS', 'WAS', 'ONE', 'OF', 'JEAN', "VALJEAN'S", 'GLOOMY', 'TALENTS'] +3764-168670-0045-1711: hyp=['TO', 'HAVE', 'HIMSELF', 'NAILED', 'UP', 'IN', 'A', 'CASE', 'AND', 'CARRIED', 'OFF', 'LIKE', 'A', 'BALE', 'OF', 'GOODS', 'TO', 'LIVE', 'FOR', 'A', 'LONG', 'TIME', 'IN', 'A', 'BOX', 'TO', 'FIND', 'AIR', 'WHERE', 'THERE', 'IS', 'NONE', 'TO', 'ECONOMIZE', 'HIS', 'BREATH', 'FOR', 'HOURS', 'TO', 'KNOW', 'HOW', 'TO', 'STIFLE', 'WITHOUT', 'DYING', 'THIS', 'WAS', 'ONE', 'OF', 'JEAN', "VALJEAN'S", 'GLOOMY', 'TALENTS'] +3764-168670-0046-1712: ref=['YOU', 'SURELY', 'MUST', 'HAVE', 'A', 'GIMLET', 'YOU', 'WILL', 'MAKE', 'A', 'FEW', 'HOLES', 'HERE', 'AND', 'THERE', 'AROUND', 'MY', 'MOUTH', 'AND', 'YOU', 'WILL', 'NAIL', 'THE', 'TOP', 'PLANK', 'ON', 'LOOSELY', 'GOOD', 'AND', 'WHAT', 'IF', 'YOU', 'SHOULD', 'HAPPEN', 'TO', 'COUGH', 'OR', 'TO', 'SNEEZE'] +3764-168670-0046-1712: hyp=['YOU', 'SURELY', 'MUST', 'HAVE', 'A', 'GIMLET', 'YOU', 'WILL', 'MAKE', 'A', 'FEW', 'HOLES', 'HERE', 'AND', 'THERE', 'AROUND', 'MY', 'MOUTH', 'AND', 'YOU', 'WILL', 'NAIL', 'THE', 'TOP', 'PLANK', 'ON', 'LOOSELY', 'GOOD', 'AND', 'WHAT', 'IF', 'YOU', 'SHOULD', 'HAPPEN', 'TO', 'COUGH', 'OR', 'TO', 'SNEEZE'] +3764-168670-0047-1713: ref=['A', 'MAN', 'WHO', 'IS', 'MAKING', 'HIS', 'ESCAPE', 'DOES', 'NOT', 'COUGH', 'OR', 'SNEEZE'] +3764-168670-0047-1713: hyp=['A', 'MAN', 'WHO', 'IS', 'MAKING', 'HIS', 'ESCAPE', 'DOES', 'NOT', 'COUGH', 'OR', 'SNEEZE'] +3764-168670-0048-1714: ref=['WHO', 'IS', 'THERE', 'WHO', 'HAS', 'NOT', 'SAID', 'TO', 'A', 'CAT', 'DO', 'COME', 'IN'] +3764-168670-0048-1714: hyp=['WHO', 'IS', 'THERE', 'WHO', 'HAS', 'NOT', 'SAID', 'TO', 'A', 'CAT', 'DO', 'COME', 'IN'] +3764-168670-0049-1715: ref=['THE', 'OVER', 'PRUDENT', 'CATS', 'AS', 'THEY', 'ARE', 'AND', 'BECAUSE', 'THEY', 'ARE', 'CATS', 'SOMETIMES', 'INCUR', 'MORE', 'DANGER', 'THAN', 'THE', 'AUDACIOUS'] +3764-168670-0049-1715: hyp=['THE', 'OVERPRUDENT', 'CARTS', 'AS', 'THEY', 'ARE', 'AND', 'BECAUSE', 'THEY', 'ARE', 'CATS', 'SOMETIMES', 'INCUR', 'MORE', 'DANGER', 'THAN', 'THE', 'AUDACIOUS'] +3764-168670-0050-1716: ref=['BUT', 'JEAN', "VALJEAN'S", 'COOLNESS', 'PREVAILED', 'OVER', 'HIM', 'IN', 'SPITE', 'OF', 'HIMSELF', 'HE', 'GRUMBLED'] +3764-168670-0050-1716: hyp=['BUT', 'JEAN', "VALJEAN'S", 'COOLNESS', 'PREVAILED', 'OVER', 'HIM', 'IN', 'SPITE', 'OF', 'HIMSELF', 'HE', 'GRUMBLED'] +3764-168670-0051-1717: ref=['IF', 'YOU', 'ARE', 'SURE', 'OF', 'COMING', 'OUT', 'OF', 'THE', 'COFFIN', 'ALL', 'RIGHT', 'I', 'AM', 'SURE', 'OF', 'GETTING', 'YOU', 'OUT', 'OF', 'THE', 'GRAVE'] +3764-168670-0051-1717: hyp=['IF', 'YOU', 'ARE', 'SURE', 'OF', 'COMING', 'OUT', 'OF', 'THE', 'COFFIN', 'ALL', 'RIGHT', 'I', 'AM', 'SURE', 'OF', 'GETTING', 'OUT', 'OF', 'THE', 'GRAVE'] +3764-168670-0052-1718: ref=['AN', 'OLD', 'FELLOW', 'OF', 'THE', 'OLD', 'SCHOOL', 'THE', 'GRAVE', 'DIGGER', 'PUTS', 'THE', 'CORPSES', 'IN', 'THE', 'GRAVE', 'AND', 'I', 'PUT', 'THE', 'GRAVE', 'DIGGER', 'IN', 'MY', 'POCKET'] +3764-168670-0052-1718: hyp=['AN', 'OLD', 'FELLOW', 'OF', 'THE', 'OLD', 'SCHOOL', 'THE', 'GRAVE', 'DIGGER', 'PUTS', 'THE', 'CORPSES', 'IN', 'THE', 'GRAVE', 'AND', 'I', 'PUT', 'THE', 'GRAVE', 'DIGGER', 'IN', 'MY', 'POCKET'] +3764-168670-0053-1719: ref=['I', 'SHALL', 'FOLLOW', 'THAT', 'IS', 'MY', 'BUSINESS'] +3764-168670-0053-1719: hyp=['I', 'SHALL', 'FOLLOW', 'THAT', 'IS', 'MY', 'BUSINESS'] +3764-168670-0054-1720: ref=['THE', 'HEARSE', 'HALTS', 'THE', "UNDERTAKER'S", 'MEN', 'KNOT', 'A', 'ROPE', 'AROUND', 'YOUR', 'COFFIN', 'AND', 'LOWER', 'YOU', 'DOWN'] +3764-168670-0054-1720: hyp=['THE', 'HOUSEHOLTS', 'THE', 'UNDERTAKERS', 'MEN', 'NOT', 'A', 'ROPE', 'AROUND', 'YOUR', 'COFFIN', 'AND', 'LOWER', 'YOU', 'DOWN'] +3764-168670-0055-1721: ref=['THE', 'PRIEST', 'SAYS', 'THE', 'PRAYERS', 'MAKES', 'THE', 'SIGN', 'OF', 'THE', 'CROSS', 'SPRINKLES', 'THE', 'HOLY', 'WATER', 'AND', 'TAKES', 'HIS', 'DEPARTURE'] +3764-168670-0055-1721: hyp=['THE', 'PRIESTS', 'AS', 'THE', 'PRAYERS', 'MAKES', 'THE', 'SIGN', 'OF', 'THE', 'CROSS', 'SPRINKLES', 'THE', 'HOLY', 'WATER', 'AND', 'TAKES', 'HIS', 'DEPARTURE'] +3764-168670-0056-1722: ref=['ONE', 'OF', 'TWO', 'THINGS', 'WILL', 'HAPPEN', 'HE', 'WILL', 'EITHER', 'BE', 'SOBER', 'OR', 'HE', 'WILL', 'NOT', 'BE', 'SOBER'] +3764-168670-0056-1722: hyp=['ONE', 'OF', 'TWO', 'THINGS', 'WILL', 'HAPPEN', 'HE', 'WILL', 'EITHER', 'BE', 'SOBER', 'OR', 'HE', 'WILL', 'NOT', 'BE', 'SOBER'] +3764-168670-0057-1723: ref=['THAT', 'IS', 'SETTLED', 'FATHER', 'FAUCHELEVENT', 'ALL', 'WILL', 'GO', 'WELL'] +3764-168670-0057-1723: hyp=['THAT', 'IS', 'SETTLED', 'FATHER', 'FAUCHELEVENT', 'ALL', 'WILL', 'GO', 'WELL'] +3764-168671-0000-1724: ref=['ON', 'THE', 'FOLLOWING', 'DAY', 'AS', 'THE', 'SUN', 'WAS', 'DECLINING', 'THE', 'VERY', 'RARE', 'PASSERS', 'BY', 'ON', 'THE', 'BOULEVARD', 'DU', 'MAINE', 'PULLED', 'OFF', 'THEIR', 'HATS', 'TO', 'AN', 'OLD', 'FASHIONED', 'HEARSE', 'ORNAMENTED', 'WITH', 'SKULLS', 'CROSS', 'BONES', 'AND', 'TEARS'] +3764-168671-0000-1724: hyp=['ON', 'THE', 'FOLLOWING', 'DAY', 'AS', 'THE', 'SUN', 'WAS', 'DECLINING', 'THE', 'VERY', 'RARE', 'PASSERS', 'BY', 'ON', 'THE', 'BOULEVARD', 'DU', 'MIN', 'PULLED', 'OFF', 'THEIR', 'HATS', 'TO', 'AN', 'OLD', 'FASHIONED', 'HEARSE', 'ORNAMENTED', 'WITH', 'SKULLS', 'CROSS', 'BONES', 'AND', 'TEARS'] +3764-168671-0001-1725: ref=['THIS', 'HEARSE', 'CONTAINED', 'A', 'COFFIN', 'COVERED', 'WITH', 'A', 'WHITE', 'CLOTH', 'OVER', 'WHICH', 'SPREAD', 'A', 'LARGE', 'BLACK', 'CROSS', 'LIKE', 'A', 'HUGE', 'CORPSE', 'WITH', 'DROOPING', 'ARMS'] +3764-168671-0001-1725: hyp=['THIS', 'HEARSE', 'CONTAINED', 'A', 'COFFIN', 'COVERED', 'WITH', 'A', 'WHITE', 'CLOTH', 'OVER', 'WHICH', 'SPREAD', 'A', 'LARGE', 'BLACK', 'CROSS', 'LIKE', 'A', 'HUGE', 'CORPSE', 'WITH', 'DROOPING', 'ARMS'] +3764-168671-0002-1726: ref=['A', 'MOURNING', 'COACH', 'IN', 'WHICH', 'COULD', 'BE', 'SEEN', 'A', 'PRIEST', 'IN', 'HIS', 'SURPLICE', 'AND', 'A', 'CHOIR', 'BOY', 'IN', 'HIS', 'RED', 'CAP', 'FOLLOWED'] +3764-168671-0002-1726: hyp=['THE', 'MORNING', 'COACH', 'IN', 'WHICH', 'COULD', 'BE', 'SEEN', 'A', 'PRIEST', 'IN', 'HIS', 'SURPLICE', 'AND', 'A', 'CHOIR', 'BOY', 'IN', 'HIS', 'RED', 'CAP', 'FOLLOWED'] +3764-168671-0003-1727: ref=['BEHIND', 'IT', 'CAME', 'AN', 'OLD', 'MAN', 'IN', 'THE', 'GARMENTS', 'OF', 'A', 'LABORER', 'WHO', 'LIMPED', 'ALONG'] +3764-168671-0003-1727: hyp=['BEHIND', 'IT', 'CAME', 'AN', 'OLD', 'MAN', 'IN', 'THE', 'GARMENTS', 'OF', 'A', 'LABORER', 'WHO', 'LIMPED', 'ALONG'] +3764-168671-0004-1728: ref=['THE', 'GRAVE', 'DIGGERS', 'BEING', 'THUS', 'BOUND', 'TO', 'SERVICE', 'IN', 'THE', 'EVENING', 'IN', 'SUMMER', 'AND', 'AT', 'NIGHT', 'IN', 'WINTER', 'IN', 'THIS', 'CEMETERY', 'THEY', 'WERE', 'SUBJECTED', 'TO', 'A', 'SPECIAL', 'DISCIPLINE'] +3764-168671-0004-1728: hyp=['THE', 'GRAVE', 'DIGGERS', 'BEING', 'THUS', 'BOUND', 'TO', 'SERVICE', 'IN', 'THE', 'EVENING', 'IN', 'SUMMER', 'AND', 'AT', 'NIGHT', 'IN', 'WINTER', 'IN', 'THIS', 'CEMETERY', 'THEY', 'WERE', 'SUBJECTED', 'TO', 'A', 'SPECIAL', 'DISCIPLINE'] +3764-168671-0005-1729: ref=['THESE', 'GATES', 'THEREFORE', 'SWUNG', 'INEXORABLY', 'ON', 'THEIR', 'HINGES', 'AT', 'THE', 'INSTANT', 'WHEN', 'THE', 'SUN', 'DISAPPEARED', 'BEHIND', 'THE', 'DOME', 'OF', 'THE', 'INVALIDES'] +3764-168671-0005-1729: hyp=['THESE', 'GATES', 'THEREFORE', 'SWUNG', 'INEXORABLY', 'ON', 'THEIR', 'HINGES', 'AT', 'THE', 'INSTANT', 'WHEN', 'THE', 'SUN', 'DISAPPEARED', 'BEHIND', 'THE', 'DOME', 'OF', 'THE', 'INVALIDE'] +3764-168671-0006-1730: ref=['DAMPNESS', 'WAS', 'INVADING', 'IT', 'THE', 'FLOWERS', 'WERE', 'DESERTING', 'IT'] +3764-168671-0006-1730: hyp=['DAMPNESS', 'WAS', 'INVADING', 'IT', 'THE', 'FLOWERS', 'WERE', 'DESERTING', 'IT'] +3764-168671-0007-1731: ref=['THE', 'BOURGEOIS', 'DID', 'NOT', 'CARE', 'MUCH', 'ABOUT', 'BEING', 'BURIED', 'IN', 'THE', 'VAUGIRARD', 'IT', 'HINTED', 'AT', 'POVERTY', 'PERE', 'LACHAISE', 'IF', 'YOU', 'PLEASE'] +3764-168671-0007-1731: hyp=['THE', 'BOURGEOIS', 'DID', 'NOT', 'CARE', 'MUCH', 'ABOUT', 'BEING', 'BURIED', 'IN', 'THE', 'ROGI', 'IT', 'HINTED', 'AT', 'POVERTY', "PALACHE'S", 'IF', 'YOU', 'PLEASE'] +3764-168671-0008-1732: ref=['TO', 'BE', 'BURIED', 'IN', 'PERE', 'LACHAISE', 'IS', 'EQUIVALENT', 'TO', 'HAVING', 'FURNITURE', 'OF', 'MAHOGANY', 'IT', 'IS', 'RECOGNIZED', 'AS', 'ELEGANT'] +3764-168671-0008-1732: hyp=['TO', 'BE', 'BURIED', 'IN', 'PERFELLAR', 'CHASE', 'IS', 'EQUIVALENT', 'TO', 'HAVING', 'FURNITURE', 'OF', 'MAHOGANY', 'IT', 'IS', 'RECOGNIZED', 'AS', 'ELEGANT'] +3764-168671-0009-1733: ref=['THE', 'INTERMENT', 'OF', 'MOTHER', 'CRUCIFIXION', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'THE', 'EXIT', 'OF', 'COSETTE', 'THE', 'INTRODUCTION', 'OF', 'JEAN', 'VALJEAN', 'TO', 'THE', 'DEAD', 'ROOM', 'ALL', 'HAD', 'BEEN', 'EXECUTED', 'WITHOUT', 'DIFFICULTY', 'AND', 'THERE', 'HAD', 'BEEN', 'NO', 'HITCH', 'LET', 'US', 'REMARK', 'IN', 'PASSING', 'THAT', 'THE', 'BURIAL', 'OF', 'MOTHER', 'CRUCIFIXION', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CONVENT', 'IS', 'A', 'PERFECTLY', 'VENIAL', 'OFFENCE', 'IN', 'OUR', 'SIGHT'] +3764-168671-0009-1733: hyp=['THE', 'INTERMENT', 'OF', 'MOTHER', 'CRUCIFIXION', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'THE', 'EXIT', 'OF', 'COSETTE', 'THE', 'INTRODUCTION', 'OF', 'JEAN', 'VALJEAN', 'INTO', 'THE', 'DEAD', 'ROOM', 'ALL', 'HAD', 'BEEN', 'EXECUTED', 'WITHOUT', 'DIFFICULTY', 'AND', 'THERE', 'HAD', 'BEEN', 'NO', 'HITCH', 'LET', 'US', 'REMARK', 'IN', 'PASSING', 'THAT', 'THE', 'BURIAL', 'OF', 'MOTHER', 'CRUCIFIXION', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CONVENT', 'IS', 'A', 'PERFECTLY', 'VENIAL', 'OFFENCE', 'IN', 'OUR', 'SIGHT'] +3764-168671-0010-1734: ref=['IT', 'IS', 'ONE', 'OF', 'THE', 'FAULTS', 'WHICH', 'RESEMBLE', 'A', 'DUTY'] +3764-168671-0010-1734: hyp=['IT', 'IS', 'ONE', 'OF', 'THE', 'FAULTS', 'WHICH', 'RESEMBLE', 'A', 'DUTY'] +3764-168671-0011-1735: ref=['THE', 'NUNS', 'HAD', 'COMMITTED', 'IT', 'NOT', 'ONLY', 'WITHOUT', 'DIFFICULTY', 'BUT', 'EVEN', 'WITH', 'THE', 'APPLAUSE', 'OF', 'THEIR', 'OWN', 'CONSCIENCES'] +3764-168671-0011-1735: hyp=['THE', 'NUNS', 'HAD', 'COMMITTED', 'IT', 'NOT', 'ONLY', 'WITHOUT', 'DIFFICULTY', 'BUT', 'EVEN', 'WITH', 'THE', 'APPLAUSE', 'OF', 'THEIR', 'OWN', 'CONSCIENCES'] +3764-168671-0012-1736: ref=['IN', 'THE', 'CLOISTER', 'WHAT', 'IS', 'CALLED', 'THE', 'GOVERNMENT', 'IS', 'ONLY', 'AN', 'INTERMEDDLING', 'WITH', 'AUTHORITY', 'AN', 'INTERFERENCE', 'WHICH', 'IS', 'ALWAYS', 'QUESTIONABLE'] +3764-168671-0012-1736: hyp=['IN', 'THE', 'CLOISTER', 'WHAT', 'IS', 'CALLED', 'THE', 'GOVERNMENT', 'IS', 'ONLY', 'AN', 'INTERMEDDLING', 'WITH', 'AUTHORITY', 'AN', 'INTERFERENCE', 'WHICH', 'IS', 'ALWAYS', 'QUESTIONABLE'] +3764-168671-0013-1737: ref=['MAKE', 'AS', 'MANY', 'LAWS', 'AS', 'YOU', 'PLEASE', 'MEN', 'BUT', 'KEEP', 'THEM', 'FOR', 'YOURSELVES'] +3764-168671-0013-1737: hyp=['MAKE', 'AS', 'MANY', 'NOISE', 'AS', 'YOU', 'PLEASE', 'MEN', 'BUT', 'KEEP', 'THEM', 'FOR', 'YOURSELVES'] +3764-168671-0014-1738: ref=['A', 'PRINCE', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'PRINCIPLE'] +3764-168671-0014-1738: hyp=['A', 'PRINCE', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'PRINCIPLE'] +3764-168671-0015-1739: ref=['FAUCHELEVENT', 'LIMPED', 'ALONG', 'BEHIND', 'THE', 'HEARSE', 'IN', 'A', 'VERY', 'CONTENTED', 'FRAME', 'OF', 'MIND'] +3764-168671-0015-1739: hyp=['FAUCHELEVENT', 'LIMPED', 'ALONG', 'BEHIND', 'THE', 'HEARSE', 'IN', 'A', 'VERY', 'CONTENTED', 'FRAME', 'OF', 'MIND'] +3764-168671-0016-1740: ref=['JEAN', "VALJEAN'S", 'COMPOSURE', 'WAS', 'ONE', 'OF', 'THOSE', 'POWERFUL', 'TRANQUILLITIES', 'WHICH', 'ARE', 'CONTAGIOUS'] +3764-168671-0016-1740: hyp=['JEAN', "VALJEAN'S", 'COMPOSURE', 'WAS', 'ONE', 'OF', 'THOSE', 'POWERFUL', 'TRANQUILLITIES', 'WHICH', 'ARE', 'CONTAGIOUS'] +3764-168671-0017-1741: ref=['WHAT', 'REMAINED', 'TO', 'BE', 'DONE', 'WAS', 'A', 'MERE', 'NOTHING'] +3764-168671-0017-1741: hyp=['WHAT', 'REMAINED', 'TO', 'BE', 'DONE', 'WAS', 'A', 'MERE', 'NOTHING'] +3764-168671-0018-1742: ref=['HE', 'PLAYED', 'WITH', 'FATHER', 'MESTIENNE'] +3764-168671-0018-1742: hyp=['HE', 'PLAYED', 'WITH', 'FATHER', 'MESSIAN'] +3764-168671-0019-1743: ref=['HE', 'DID', 'WHAT', 'HE', 'LIKED', 'WITH', 'HIM', 'HE', 'MADE', 'HIM', 'DANCE', 'ACCORDING', 'TO', 'HIS', 'WHIM'] +3764-168671-0019-1743: hyp=['HE', 'DID', 'WHAT', 'HE', 'LIKED', 'WITH', 'HIM', 'HE', 'MADE', 'HIM', 'DANCE', 'ACCORDING', 'TO', 'HIS', 'WHIM'] +3764-168671-0020-1744: ref=['THE', 'PERMISSION', 'FOR', 'INTERMENT', 'MUST', 'BE', 'EXHIBITED'] +3764-168671-0020-1744: hyp=['THE', 'PERMISSION', 'FOR', 'INTERMENT', 'MUST', 'BE', 'EXHIBITED'] +3764-168671-0021-1745: ref=['HE', 'WAS', 'A', 'SORT', 'OF', 'LABORING', 'MAN', 'WHO', 'WORE', 'A', 'WAISTCOAT', 'WITH', 'LARGE', 'POCKETS', 'AND', 'CARRIED', 'A', 'MATTOCK', 'UNDER', 'HIS', 'ARM'] +3764-168671-0021-1745: hyp=['HE', 'WAS', 'A', 'SORT', 'OF', 'LABORING', 'MAN', 'WHO', 'WORE', 'A', 'WAISTCOAT', 'WITH', 'LARGE', 'POCKETS', 'AND', 'CARRIED', 'A', 'MATTOCK', 'UNDER', 'HIS', 'ARM'] +3764-168671-0022-1746: ref=['THE', 'MAN', 'REPLIED', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0022-1746: hyp=['THE', 'MAN', 'REPLIED', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0023-1747: ref=['THE', 'GRAVE', 'DIGGER', 'YES'] +3764-168671-0023-1747: hyp=['THE', 'BRAVE', 'DIGGER', 'YES'] +3764-168671-0024-1748: ref=['YOU', 'I'] +3764-168671-0024-1748: hyp=['YOU', 'I'] +3764-168671-0025-1749: ref=['FATHER', 'MESTIENNE', 'IS', 'THE', 'GRAVE', 'DIGGER', 'HE', 'WAS'] +3764-168671-0025-1749: hyp=['FATHER', 'MISSION', 'IS', 'THE', 'GRAVE', 'DIGGER', 'HE', 'WAS'] +3764-168671-0026-1750: ref=['FAUCHELEVENT', 'HAD', 'EXPECTED', 'ANYTHING', 'BUT', 'THIS', 'THAT', 'A', 'GRAVE', 'DIGGER', 'COULD', 'DIE'] +3764-168671-0026-1750: hyp=['FAUCHELEVENT', 'HAD', 'EXPECTED', 'ANYTHING', 'BUT', 'THIS', 'THAT', 'A', 'GRAVE', 'DIGGER', 'COULD', 'DIE'] +3764-168671-0027-1751: ref=['IT', 'IS', 'TRUE', 'NEVERTHELESS', 'THAT', 'GRAVE', 'DIGGERS', 'DO', 'DIE', 'THEMSELVES'] +3764-168671-0027-1751: hyp=['IT', 'IS', 'TRUE', 'NEVERTHELESS', 'THAT', 'GRAVE', 'DIGGERS', 'DO', 'DIE', 'THEMSELVES'] +3764-168671-0028-1752: ref=['HE', 'HAD', 'HARDLY', 'THE', 'STRENGTH', 'TO', 'STAMMER'] +3764-168671-0028-1752: hyp=['HE', 'HAD', 'HARDLY', 'THE', 'STRENGTH', 'TO', 'STAMMER'] +3764-168671-0029-1753: ref=['BUT', 'HE', 'PERSISTED', 'FEEBLY', 'FATHER', 'MESTIENNE', 'IS', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0029-1753: hyp=['BUT', 'HE', 'PERSISTED', 'FEEBLY', "I'VE", 'A', 'MESSIAN', 'IS', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0030-1754: ref=['DO', 'YOU', 'KNOW', 'WHO', 'LITTLE', 'FATHER', 'LENOIR', 'IS', 'HE', 'IS', 'A', 'JUG', 'OF', 'RED', 'WINE'] +3764-168671-0030-1754: hyp=['DO', 'YOU', 'KNOW', 'WHO', 'LITTLE', 'FATHER', 'LOIS', 'IS', 'HE', 'IS', 'A', 'JUG', 'OF', 'RED', 'WINE'] +3764-168671-0031-1755: ref=['BUT', 'YOU', 'ARE', 'A', 'JOLLY', 'FELLOW', 'TOO'] +3764-168671-0031-1755: hyp=['BUT', 'YOU', 'ARE', 'A', 'JOLLY', 'FELLOW', 'TOO'] +3764-168671-0032-1756: ref=['ARE', 'YOU', 'NOT', 'COMRADE', "WE'LL", 'GO', 'AND', 'HAVE', 'A', 'DRINK', 'TOGETHER', 'PRESENTLY'] +3764-168671-0032-1756: hyp=['ARE', 'YOU', 'NOT', 'COMRADE', 'WILGHAN', 'HAVE', 'A', 'DRINK', 'TOGETHER', 'PRESENTLY'] +3764-168671-0033-1757: ref=['THE', 'MAN', 'REPLIED'] +3764-168671-0033-1757: hyp=['THE', 'MAN', 'REPLIED'] +3764-168671-0034-1758: ref=['HE', 'LIMPED', 'MORE', 'OUT', 'OF', 'ANXIETY', 'THAN', 'FROM', 'INFIRMITY'] +3764-168671-0034-1758: hyp=['HE', 'LIMPED', 'MORE', 'OUT', 'OF', 'ANXIETY', 'THAN', 'FROM', 'INFIRMITY'] +3764-168671-0035-1759: ref=['THE', 'GRAVE', 'DIGGER', 'WALKED', 'ON', 'IN', 'FRONT', 'OF', 'HIM'] +3764-168671-0035-1759: hyp=['THE', 'GRAVE', 'DIGGER', 'WALKED', 'ON', 'IN', 'FRONT', 'OF', 'HIM'] +3764-168671-0036-1760: ref=['FAUCHELEVENT', 'PASSED', 'THE', 'UNEXPECTED', 'GRIBIER', 'ONCE', 'MORE', 'IN', 'REVIEW'] +3764-168671-0036-1760: hyp=['FAUCHELEVENT', 'PASSED', 'THE', 'UNEXPECTED', 'CLAVIER', 'ONCE', 'MORE', 'IN', 'REVIEW'] +3764-168671-0037-1761: ref=['FAUCHELEVENT', 'WHO', 'WAS', 'ILLITERATE', 'BUT', 'VERY', 'SHARP', 'UNDERSTOOD', 'THAT', 'HE', 'HAD', 'TO', 'DEAL', 'WITH', 'A', 'FORMIDABLE', 'SPECIES', 'OF', 'MAN', 'WITH', 'A', 'FINE', 'TALKER', 'HE', 'MUTTERED'] +3764-168671-0037-1761: hyp=['FAUCHELEVENT', 'WHO', 'WAS', 'ILLITERATE', 'BUT', 'VERY', 'SHARP', 'UNDERSTOOD', 'THAT', 'HE', 'HAD', 'TO', 'DEAL', 'WITH', 'A', 'FORMIDABLE', 'SPECIES', 'OF', 'MAN', 'WITH', 'A', 'FINE', 'TALKER', 'HE', 'MUTTERED'] +3764-168671-0038-1762: ref=['SO', 'FATHER', 'MESTIENNE', 'IS', 'DEAD'] +3764-168671-0038-1762: hyp=['MISS', 'OH', 'FATHER', 'MESS', 'TEEN', 'IS', 'DEAD'] +3764-168671-0039-1763: ref=['THE', 'MAN', 'REPLIED', 'COMPLETELY'] +3764-168671-0039-1763: hyp=['THE', 'MAN', 'REPLIED', 'COMPLETELY'] +3764-168671-0040-1764: ref=['THE', 'GOOD', 'GOD', 'CONSULTED', 'HIS', 'NOTE', 'BOOK', 'WHICH', 'SHOWS', 'WHEN', 'THE', 'TIME', 'IS', 'UP', 'IT', 'WAS', 'FATHER', "MESTIENNE'S", 'TURN', 'FATHER', 'MESTIENNE', 'DIED'] +3764-168671-0040-1764: hyp=['THE', 'GOOD', 'GOD', 'CONSULTED', 'HIS', 'NOTE', 'BOOK', 'WHICH', 'SHOWS', 'WHEN', 'THE', 'TIME', 'IS', 'UP', 'IT', 'WAS', "FARTHIAN'S", 'TURN', 'FOR', 'THE', 'MESSIAN', 'DIED'] +3764-168671-0041-1765: ref=['STAMMERED', 'FAUCHELEVENT', 'IT', 'IS', 'MADE'] +3764-168671-0041-1765: hyp=['STAMMERED', 'FAUCHELEVENT', 'IT', 'IS', 'MADE'] +3764-168671-0042-1766: ref=['YOU', 'ARE', 'A', 'PEASANT', 'I', 'AM', 'A', 'PARISIAN'] +3764-168671-0042-1766: hyp=['YOU', 'ARE', 'A', 'PEASANT', 'I', 'AM', 'A', 'PARISIAN'] +3764-168671-0043-1767: ref=['FAUCHELEVENT', 'THOUGHT', 'I', 'AM', 'LOST'] +3764-168671-0043-1767: hyp=['FAUCHELEVENT', 'THOUGHT', 'I', 'AM', 'LOST'] +3764-168671-0044-1768: ref=['THEY', 'WERE', 'ONLY', 'A', 'FEW', 'TURNS', 'OF', 'THE', 'WHEEL', 'DISTANT', 'FROM', 'THE', 'SMALL', 'ALLEY', 'LEADING', 'TO', 'THE', 'NUNS', 'CORNER'] +3764-168671-0044-1768: hyp=['THEY', 'WERE', 'ONLY', 'A', 'FEW', 'TURNS', 'OF', 'THE', 'WHEEL', 'DISTANT', 'FROM', 'THE', 'SMALL', 'ALLEY', 'LEADING', 'TO', 'THE', "NUN'S", 'CORNER'] +3764-168671-0045-1769: ref=['AND', 'HE', 'ADDED', 'WITH', 'THE', 'SATISFACTION', 'OF', 'A', 'SERIOUS', 'MAN', 'WHO', 'IS', 'TURNING', 'A', 'PHRASE', 'WELL'] +3764-168671-0045-1769: hyp=['AND', 'HE', 'ADDED', 'WITH', 'THE', 'SATISFACTION', 'OF', 'A', 'SERIOUS', 'MAN', 'WHO', 'IS', 'TURNING', 'A', 'PHRASE', 'WELL'] +3764-168671-0046-1770: ref=['FORTUNATELY', 'THE', 'SOIL', 'WHICH', 'WAS', 'LIGHT', 'AND', 'WET', 'WITH', 'THE', 'WINTER', 'RAINS', 'CLOGGED', 'THE', 'WHEELS', 'AND', 'RETARDED', 'ITS', 'SPEED'] +3764-168671-0046-1770: hyp=['FORTUNATELY', 'THE', 'SOIL', 'WHICH', 'WAS', 'LIGHT', 'AND', 'WET', 'WITH', 'THE', 'WINTER', 'RAINS', 'CLOGGED', 'THE', 'WHEELS', 'AND', 'RETARDED', 'ITS', 'SPEED'] +3764-168671-0047-1771: ref=['MY', 'FATHER', 'WAS', 'A', 'PORTER', 'AT', 'THE', 'PRYTANEUM', 'TOWN', 'HALL'] +3764-168671-0047-1771: hyp=['MY', 'FATHER', 'WAS', 'A', 'PORTER', 'AT', 'THE', 'BRITTANNIUM', 'TOWN', 'HALL'] +3764-168671-0048-1772: ref=['BUT', 'HE', 'HAD', 'REVERSES', 'HE', 'HAD', 'LOSSES', 'ON', 'CHANGE', 'I', 'WAS', 'OBLIGED', 'TO', 'RENOUNCE', 'THE', 'PROFESSION', 'OF', 'AUTHOR', 'BUT', 'I', 'AM', 'STILL', 'A', 'PUBLIC', 'WRITER'] +3764-168671-0048-1772: hyp=['BUT', 'HE', 'HAD', 'REVERSES', 'HE', 'HAD', 'LOSES', 'UNCHANGED', 'I', 'WAS', 'OBLIGED', 'TO', 'RENOUNCE', 'THE', 'PROFESSION', 'OF', 'AUTHOR', 'BUT', 'I', 'AM', 'STILL', 'A', 'PUBLIC', 'WRITER'] +3764-168671-0049-1773: ref=['SO', 'YOU', 'ARE', 'NOT', 'A', 'GRAVE', 'DIGGER', 'THEN'] +3764-168671-0049-1773: hyp=['BUT', 'SO', 'YOU', 'ARE', 'NOT', 'A', 'GRAVE', 'DIGGER', 'THEN'] +3764-168671-0050-1774: ref=['RETURNED', 'FAUCHELEVENT', 'CLUTCHING', 'AT', 'THIS', 'BRANCH', 'FEEBLE', 'AS', 'IT', 'WAS'] +3764-168671-0050-1774: hyp=['RETURNED', 'FAUCHELEVENT', 'CLUTCHING', 'AT', 'THIS', 'BRANCH', 'FEEBLE', 'AS', 'IT', 'WAS'] +3764-168671-0051-1775: ref=['HERE', 'A', 'REMARK', 'BECOMES', 'NECESSARY'] +3764-168671-0051-1775: hyp=['HERE', 'A', 'REMARK', 'BECOMES', 'NECESSARY'] +3764-168671-0052-1776: ref=['FAUCHELEVENT', 'WHATEVER', 'HIS', 'ANGUISH', 'OFFERED', 'A', 'DRINK', 'BUT', 'HE', 'DID', 'NOT', 'EXPLAIN', 'HIMSELF', 'ON', 'ONE', 'POINT', 'WHO', 'WAS', 'TO', 'PAY'] +3764-168671-0052-1776: hyp=['A', 'FAUCHELEVENT', 'WHATEVER', 'HIS', 'ANGUISH', 'OFFERED', 'A', 'DRINK', 'BUT', 'HE', 'DID', 'NOT', 'EXPLAIN', 'HIMSELF', 'ON', 'ONE', 'POINT', 'WHO', 'WAS', 'TO', 'PAY'] +3764-168671-0053-1777: ref=['THE', 'GRAVE', 'DIGGER', 'WENT', 'ON', 'WITH', 'A', 'SUPERIOR', 'SMILE'] +3764-168671-0053-1777: hyp=['THE', 'GRAVE', 'DIGGER', 'WENT', 'ON', 'WITH', 'THE', 'SUPERIOR', 'SMILE'] +3764-168671-0054-1778: ref=['ONE', 'MUST', 'EAT'] +3764-168671-0054-1778: hyp=['ONE', 'MUST', 'EAT'] +3997-180294-0000-1800: ref=['THE', 'DUKE', 'COMES', 'EVERY', 'MORNING', 'THEY', 'WILL', 'TELL', 'HIM', 'WHEN', 'HE', 'COMES', 'THAT', 'I', 'AM', 'ASLEEP', 'AND', 'PERHAPS', 'HE', 'WILL', 'WAIT', 'UNTIL', 'I', 'WAKE'] +3997-180294-0000-1800: hyp=['THE', 'DUKE', 'COMES', 'EVERY', 'MORNING', 'THEY', 'WILL', 'TELL', 'HIM', 'WHEN', 'HE', 'COMES', 'THAT', 'I', 'AM', 'ASLEEP', 'AND', 'PERHAPS', 'HE', 'WILL', 'WAIT', 'UNTIL', 'I', 'AWAKE'] +3997-180294-0001-1801: ref=['YES', 'BUT', 'IF', 'I', 'SHOULD', 'ALREADY', 'ASK', 'FOR', 'SOMETHING', 'WHAT'] +3997-180294-0001-1801: hyp=['YES', 'BUT', 'IF', 'I', 'SHOULD', 'ALREADY', 'ASK', 'FOR', 'SOMETHING', 'WHAT'] +3997-180294-0002-1802: ref=['WELL', 'DO', 'IT', 'FOR', 'ME', 'FOR', 'I', 'SWEAR', 'TO', 'YOU', 'THAT', 'I', "DON'T", 'LOVE', 'YOU', 'AS', 'THE', 'OTHERS', 'HAVE', 'LOVED', 'YOU'] +3997-180294-0002-1802: hyp=['WELL', 'DO', 'IT', 'FOR', 'ME', 'FOR', 'I', 'SWEAR', 'TO', 'YOU', 'THY', "DON'T", 'LOVE', 'YOU', 'AS', 'THE', 'OTHERS', 'HAVE', 'LOVED', 'YOU'] +3997-180294-0003-1803: ref=['THERE', 'ARE', 'BOLTS', 'ON', 'THE', 'DOOR', 'WRETCH'] +3997-180294-0003-1803: hyp=['THERE', 'ARE', 'BOLTS', 'IN', 'THE', 'DOOR', 'WRETCH'] +3997-180294-0004-1804: ref=['I', "DON'T", 'KNOW', 'HOW', 'IT', 'IS', 'BUT', 'IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'I', 'DO'] +3997-180294-0004-1804: hyp=['I', "DON'T", 'KNOW', 'HOW', 'IT', 'IS', 'BUT', 'IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'I', 'DO'] +3997-180294-0005-1805: ref=['NOW', 'GO', 'I', "CAN'T", 'KEEP', 'MY', 'EYES', 'OPEN'] +3997-180294-0005-1805: hyp=['NOW', 'GO', 'I', "CAN'T", 'KEEP', 'MY', 'EYES', 'OPEN'] +3997-180294-0006-1806: ref=['IT', 'SEEMED', 'TO', 'ME', 'AS', 'IF', 'THIS', 'SLEEPING', 'CITY', 'BELONGED', 'TO', 'ME', 'I', 'SEARCHED', 'MY', 'MEMORY', 'FOR', 'THE', 'NAMES', 'OF', 'THOSE', 'WHOSE', 'HAPPINESS', 'I', 'HAD', 'ONCE', 'ENVIED', 'AND', 'I', 'COULD', 'NOT', 'RECALL', 'ONE', 'WITHOUT', 'FINDING', 'MYSELF', 'THE', 'HAPPIER'] +3997-180294-0006-1806: hyp=['IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'THIS', 'SLEEPING', 'CITY', 'BELONGS', 'TO', 'ME', 'I', 'SEARCHED', 'MY', 'MEMORY', 'FOR', 'THE', 'NAMES', 'OF', 'THOSE', 'WHOSE', 'HAPPINESS', 'I', 'HAD', 'ONCE', 'ENVIED', 'AND', 'I', 'COULD', 'NOT', 'RECALL', 'ONE', 'WITHOUT', 'FINDING', 'MYSELF', 'THE', 'HAPPIER'] +3997-180294-0007-1807: ref=['EDUCATION', 'FAMILY', 'FEELING', 'THE', 'SENSE', 'OF', 'DUTY', 'THE', 'FAMILY', 'ARE', 'STRONG', 'SENTINELS', 'BUT', 'THERE', 'ARE', 'NO', 'SENTINELS', 'SO', 'VIGILANT', 'AS', 'NOT', 'TO', 'BE', 'DECEIVED', 'BY', 'A', 'GIRL', 'OF', 'SIXTEEN', 'TO', 'WHOM', 'NATURE', 'BY', 'THE', 'VOICE', 'OF', 'THE', 'MAN', 'SHE', 'LOVES', 'GIVES', 'THE', 'FIRST', 'COUNSELS', 'OF', 'LOVE', 'ALL', 'THE', 'MORE', 'ARDENT', 'BECAUSE', 'THEY', 'SEEM', 'SO', 'PURE'] +3997-180294-0007-1807: hyp=['EDUCATION', 'FAMILY', 'FEELING', 'THE', 'SENSE', 'OF', 'DUTY', 'THE', 'FAMILY', 'ARE', 'STRONG', 'SENTINELS', 'BUT', 'THERE', 'ARE', 'NO', 'SENTINELS', 'SO', 'VIGILANT', 'AS', 'NOT', 'TO', 'BE', 'DECEIVED', 'BY', 'A', 'GIRL', 'OF', 'SIXTEEN', 'TO', 'WHOM', 'NATURE', 'BY', 'THE', 'VOICE', 'OF', 'THE', 'MAN', 'SHE', 'LOVES', 'GIVES', 'THE', 'FIRST', 'COUNSEL', 'OF', 'LOVE', 'ALL', 'THE', 'MORE', 'ARDENT', 'BECAUSE', 'THEY', 'SEEM', 'SO', 'PURE'] +3997-180294-0008-1808: ref=['THE', 'MORE', 'A', 'GIRL', 'BELIEVES', 'IN', 'GOODNESS', 'THE', 'MORE', 'EASILY', 'WILL', 'SHE', 'GIVE', 'WAY', 'IF', 'NOT', 'TO', 'HER', 'LOVER', 'AT', 'LEAST', 'TO', 'LOVE', 'FOR', 'BEING', 'WITHOUT', 'MISTRUST', 'SHE', 'IS', 'WITHOUT', 'FORCE', 'AND', 'TO', 'WIN', 'HER', 'LOVE', 'IS', 'A', 'TRIUMPH', 'THAT', 'CAN', 'BE', 'GAINED', 'BY', 'ANY', 'YOUNG', 'MAN', 'OF', 'FIVE', 'AND', 'TWENTY', 'SEE', 'HOW', 'YOUNG', 'GIRLS', 'ARE', 'WATCHED', 'AND', 'GUARDED'] +3997-180294-0008-1808: hyp=['THE', 'MORE', 'GIRL', 'BELIEVES', 'IN', 'GOODNESS', 'THE', 'MORE', 'EASILY', 'WILL', 'SHE', 'GIVE', 'WAY', 'IF', 'NOT', 'TO', 'HER', 'LOVER', 'AT', 'LEAST', 'TO', 'LOVE', 'FOR', 'BE', 'WITHOUT', 'MISTRUST', 'SHE', 'IS', 'WITHOUT', 'FORCE', 'AND', 'TO', 'WIN', 'HER', 'LOVE', 'AS', 'A', 'TRIUMPH', 'THAT', 'CAN', 'BE', 'GAINED', 'BY', 'ANY', 'YOUNG', 'MEN', 'OF', 'FIVE', 'AND', 'TWENTY', 'SEE', 'HOW', 'YOUNG', 'GIRLS', 'ARE', 'WATCHED', 'AND', 'GUARDED'] +3997-180294-0009-1809: ref=['THEN', 'HOW', 'SURELY', 'MUST', 'THEY', 'DESIRE', 'THE', 'WORLD', 'WHICH', 'IS', 'HIDDEN', 'FROM', 'THEM', 'HOW', 'SURELY', 'MUST', 'THEY', 'FIND', 'IT', 'TEMPTING', 'HOW', 'SURELY', 'MUST', 'THEY', 'LISTEN', 'TO', 'THE', 'FIRST', 'VOICE', 'WHICH', 'COMES', 'TO', 'TELL', 'ITS', 'SECRETS', 'THROUGH', 'THEIR', 'BARS', 'AND', 'BLESS', 'THE', 'HAND', 'WHICH', 'IS', 'THE', 'FIRST', 'TO', 'RAISE', 'A', 'CORNER', 'OF', 'THE', 'MYSTERIOUS', 'VEIL'] +3997-180294-0009-1809: hyp=['THEN', 'HOW', 'SURELY', 'MUST', 'THEY', 'DESIRE', 'THE', 'WORLD', 'WHICH', 'IS', 'HIDDEN', 'FROM', 'THEM', 'HOW', 'TRULY', 'MUST', 'THEY', 'FIND', 'IT', 'TEMPTING', 'HOW', 'SURELY', 'MUST', 'THEY', 'LISTENED', 'TO', 'THE', 'FIRST', 'VOICE', 'WHICH', 'COMES', 'TO', 'TELL', 'ITS', 'SECRETS', 'THROUGH', 'THEIR', 'BARS', 'AND', 'BLESS', 'THE', 'HAND', 'WHICH', 'HE', 'IS', 'THE', 'FIRST', 'TO', 'RAISE', 'A', 'CORNER', 'OF', 'THE', 'MYSTERY', 'VEIL'] +3997-180294-0010-1810: ref=['WITH', 'THEM', 'THE', 'BODY', 'HAS', 'WORN', 'OUT', 'THE', 'SOUL', 'THE', 'SENSES', 'HAVE', 'BURNED', 'UP', 'THE', 'HEART', 'DISSIPATION', 'HAS', 'BLUNTED', 'THE', 'FEELINGS'] +3997-180294-0010-1810: hyp=['WITH', 'THEM', 'THE', 'BODY', 'HAS', 'WORN', 'OUT', 'THE', 'SOUL', 'THE', 'SENSES', 'HALF', 'BURNED', 'UP', 'THE', 'HEART', 'DISSIPATION', 'HAS', 'BLUNTED', 'THE', 'FEELINGS'] +3997-180294-0011-1811: ref=['THEY', 'LOVE', 'BY', 'PROFESSION', 'AND', 'NOT', 'BY', 'INSTINCT'] +3997-180294-0011-1811: hyp=['THEY', 'LOVE', 'BY', 'PROFESSION', 'AND', 'NOT', 'BY', 'INSTINCT'] +3997-180294-0012-1812: ref=['WHEN', 'A', 'CREATURE', 'WHO', 'HAS', 'ALL', 'HER', 'PAST', 'TO', 'REPROACH', 'HERSELF', 'WITH', 'IS', 'TAKEN', 'ALL', 'AT', 'ONCE', 'BY', 'A', 'PROFOUND', 'SINCERE', 'IRRESISTIBLE', 'LOVE', 'OF', 'WHICH', 'SHE', 'HAD', 'NEVER', 'FELT', 'HERSELF', 'CAPABLE', 'WHEN', 'SHE', 'HAS', 'CONFESSED', 'HER', 'LOVE', 'HOW', 'ABSOLUTELY', 'THE', 'MAN', 'WHOM', 'SHE', 'LOVES', 'DOMINATES', 'HER'] +3997-180294-0012-1812: hyp=['WHEN', 'A', 'CREATURE', 'WHO', 'HAS', 'ALL', 'HER', 'PAST', 'TO', 'REPROACH', 'HERSELF', 'WITH', 'IS', 'TAKEN', 'ALL', 'AT', 'ONCE', 'BY', 'A', 'PROFOUND', 'SINCERE', 'IRRESISTIBLE', 'LOVE', 'OF', 'WHICH', 'SHE', 'HAD', 'NEVER', 'FELT', 'HERSELF', 'CAPABLE', 'WHEN', 'SHE', 'HAS', 'CONFESSED', 'HER', 'LOVE', 'HOW', 'ABSOLUTELY', 'THE', 'MAN', 'WHOM', 'SHE', 'LOVES', 'DOMINATES', 'HER'] +3997-180294-0013-1813: ref=['THEY', 'KNOW', 'NOT', 'WHAT', 'PROOF', 'TO', 'GIVE'] +3997-180294-0013-1813: hyp=['THEY', 'KNOW', 'NOT', 'WHAT', 'PROOF', 'TO', 'GIVE'] +3997-180294-0014-1814: ref=['IN', 'ORDER', 'TO', 'DISTURB', 'THE', 'LABOURERS', 'IN', 'THE', 'FIELD', 'WAS', 'ONE', 'DAY', 'DEVOURED', 'BY', 'A', 'WOLF', 'BECAUSE', 'THOSE', 'WHOM', 'HE', 'HAD', 'SO', 'OFTEN', 'DECEIVED', 'NO', 'LONGER', 'BELIEVED', 'IN', 'HIS', 'CRIES', 'FOR', 'HELP'] +3997-180294-0014-1814: hyp=['IN', 'ORDER', 'TO', 'DISTURB', 'THE', 'LABORERS', 'IN', 'THE', 'FIELD', 'WAS', 'ONE', 'DAY', 'DEVOURED', 'BY', 'A', 'WOLF', 'BECAUSE', 'THOSE', 'WHOM', 'HE', 'HAD', 'SO', 'OFTEN', 'DECEIVED', 'NO', 'LONGER', 'BELIEVED', 'IN', 'HIS', 'CRIES', 'FOR', 'HELP'] +3997-180294-0015-1815: ref=['IT', 'IS', 'THE', 'SAME', 'WITH', 'THESE', 'UNHAPPY', 'WOMEN', 'WHEN', 'THEY', 'LOVE', 'SERIOUSLY'] +3997-180294-0015-1815: hyp=['THIS', 'IS', 'THE', 'SAME', 'WITH', 'THESE', 'UNHAPPY', 'WOMEN', 'WHEN', 'HE', 'LOVED', 'SERIOUSLY'] +3997-180294-0016-1816: ref=['BUT', 'WHEN', 'THE', 'MAN', 'WHO', 'INSPIRES', 'THIS', 'REDEEMING', 'LOVE', 'IS', 'GREAT', 'ENOUGH', 'IN', 'SOUL', 'TO', 'RECEIVE', 'IT', 'WITHOUT', 'REMEMBERING', 'THE', 'PAST', 'WHEN', 'HE', 'GIVES', 'HIMSELF', 'UP', 'TO', 'IT', 'WHEN', 'IN', 'SHORT', 'HE', 'LOVES', 'AS', 'HE', 'IS', 'LOVED', 'THIS', 'MAN', 'DRAINS', 'AT', 'ONE', 'DRAUGHT', 'ALL', 'EARTHLY', 'EMOTIONS', 'AND', 'AFTER', 'SUCH', 'A', 'LOVE', 'HIS', 'HEART', 'WILL', 'BE', 'CLOSED', 'TO', 'EVERY', 'OTHER'] +3997-180294-0016-1816: hyp=['BUT', 'WHEN', 'THE', 'MAN', 'WHO', 'INSPIRES', 'THIS', 'REDEEMING', 'LOVE', 'IS', 'GREAT', 'ENOUGH', 'IN', 'SOUL', 'TO', 'RECEIVE', 'IT', 'WITHOUT', 'REMEMBERING', 'THE', 'PAST', 'WHEN', 'HE', 'GIVES', 'HIMSELF', 'UP', 'TO', 'IT', 'WHEN', 'IN', 'SHORT', 'HE', 'LOVES', 'AS', 'HE', 'IS', 'LOVED', 'THIS', 'MAN', 'DRAINS', 'AT', 'ONE', 'DRAUGHT', 'ALL', 'EARTHLY', 'EMOTIONS', 'AND', 'AFTER', 'SUCH', 'A', 'LOVE', 'HIS', 'HEART', 'WILL', 'BE', 'CLOSED', 'TO', 'EVERY', 'OTHER'] +3997-180294-0017-1817: ref=['BUT', 'TO', 'RETURN', 'TO', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'LIAISON'] +3997-180294-0017-1817: hyp=['BUT', 'TO', 'RETURN', 'TO', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'LEAR', 'SONG'] +3997-180294-0018-1818: ref=['WHEN', 'I', 'REACHED', 'HOME', 'I', 'WAS', 'IN', 'A', 'STATE', 'OF', 'MAD', 'GAIETY'] +3997-180294-0018-1818: hyp=['WHEN', 'I', 'REACHED', 'HOME', 'I', 'WAS', 'IN', 'A', 'STATE', 'OF', 'MADGE', 'GAIETY'] +3997-180294-0019-1819: ref=['THE', 'WOMAN', 'BECOMES', 'THE', "MAN'S", 'MISTRESS', 'AND', 'LOVES', 'HIM'] +3997-180294-0019-1819: hyp=['THE', 'WOMAN', 'BECOMES', 'THE', "MAN'S", 'MISTRESS', 'AND', 'LOVES', 'HIM'] +3997-180294-0020-1820: ref=['HOW', 'WHY'] +3997-180294-0020-1820: hyp=['HOW', 'WHY'] +3997-180294-0021-1821: ref=['MY', 'WHOLE', 'BEING', 'WAS', 'EXALTED', 'INTO', 'JOY', 'AT', 'THE', 'MEMORY', 'OF', 'THE', 'WORDS', 'WE', 'HAD', 'EXCHANGED', 'DURING', 'THAT', 'FIRST', 'NIGHT'] +3997-180294-0021-1821: hyp=['MY', 'WHOLE', 'BEING', 'WAS', 'EXALTED', 'INTO', 'JOY', 'AT', 'THE', 'MEMORY', 'OF', 'THE', 'WORDS', 'WE', 'HAD', 'EXCHANGED', 'DURING', 'THAT', 'FIRST', 'NIGHT'] +3997-180294-0022-1822: ref=['HERE', 'ARE', 'MY', 'ORDERS', 'TO', 'NIGHT', 'AT', 'THE', 'VAUDEVILLE'] +3997-180294-0022-1822: hyp=['HERE', 'ARE', 'MY', 'ORDERS', 'TO', 'NIGHT', 'AT', 'THE', 'VAUDEVILLE'] +3997-180294-0023-1823: ref=['COME', 'DURING', 'THE', 'THIRD', "ENTR'ACTE"] +3997-180294-0023-1823: hyp=['CLER', 'DURING', 'THE', 'THIRD', 'ENTRACT'] +3997-180294-0024-1824: ref=['THE', 'BOXES', 'FILLED', 'ONE', 'AFTER', 'ANOTHER'] +3997-180294-0024-1824: hyp=['THE', 'BOXES', 'FILLED', 'ONE', 'AFTER', 'ANOTHER'] +3997-180294-0025-1825: ref=['ONLY', 'ONE', 'REMAINED', 'EMPTY', 'THE', 'STAGE', 'BOX'] +3997-180294-0025-1825: hyp=['ONLY', 'ONE', 'REMAINS', 'EMPTY', 'THE', 'STAGE', 'BOX'] +3997-180294-0026-1826: ref=['AT', 'THE', 'BEGINNING', 'OF', 'THE', 'THIRD', 'ACT', 'I', 'HEARD', 'THE', 'DOOR', 'OF', 'THE', 'BOX', 'ON', 'WHICH', 'MY', 'EYES', 'HAD', 'BEEN', 'ALMOST', 'CONSTANTLY', 'FIXED', 'OPEN', 'AND', 'MARGUERITE', 'APPEARED'] +3997-180294-0026-1826: hyp=['AT', 'THE', 'BEGINNING', 'OF', 'THE', 'THIRD', 'ACT', 'I', 'HEARD', 'THE', 'DOOR', 'OF', 'THE', 'BOX', 'ON', 'WHICH', 'MY', 'EYES', 'HAD', 'BEEN', 'ALMOST', 'CONSTANTLY', 'FIXED', 'OPEN', 'AND', 'MARGUERITE', 'APPEARED'] +3997-180294-0027-1827: ref=['DID', 'SHE', 'LOVE', 'ME', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'THE', 'MORE', 'BEAUTIFUL', 'SHE', 'LOOKED', 'THE', 'HAPPIER', 'I', 'SHOULD', 'BE'] +3997-180294-0027-1827: hyp=['THAT', 'SHE', 'LOVED', 'ME', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'THE', 'MORE', 'BEAUTIFUL', 'SHE', 'LOOKED', 'THE', 'HAPPIER', 'I', 'SHOULD', 'BE'] +3997-180294-0028-1828: ref=['WHAT', 'IS', 'THE', 'MATTER', 'WITH', 'YOU', 'TO', 'NIGHT', 'SAID', 'MARGUERITE', 'RISING', 'AND', 'COMING', 'TO', 'THE', 'BACK', 'OF', 'THE', 'BOX', 'AND', 'KISSING', 'ME', 'ON', 'THE', 'FOREHEAD'] +3997-180294-0028-1828: hyp=['WHAT', 'IS', 'THE', 'MATTER', 'WITH', 'YOU', 'TO', 'NIGHT', 'SAID', 'MARGUERITE', 'RISING', 'AND', 'COMING', 'TO', 'THE', 'BACK', 'OF', 'THE', 'BOX', 'AND', 'KISSING', 'ME', 'ON', 'THE', 'FOREHEAD'] +3997-180294-0029-1829: ref=['YOU', 'SHOULD', 'GO', 'TO', 'BED', 'SHE', 'REPLIED', 'WITH', 'THAT', 'IRONICAL', 'AIR', 'WHICH', 'WENT', 'SO', 'WELL', 'WITH', 'HER', 'DELICATE', 'AND', 'WITTY', 'FACE'] +3997-180294-0029-1829: hyp=['HE', 'SHOULD', 'GO', 'TO', 'BED', 'SHE', 'REPLIED', 'WITH', 'THAT', 'IRONIC', 'AIR', 'WHICH', 'WENT', 'SO', 'WELL', 'WITH', 'HER', 'DELICATE', 'AND', 'WITTY', 'FACE'] +3997-180294-0030-1830: ref=['WHERE', 'AT', 'HOME'] +3997-180294-0030-1830: hyp=['WHERE', 'AT', 'HOME'] +3997-180294-0031-1831: ref=['YOU', 'STILL', 'LOVE', 'ME', 'CAN', 'YOU', 'ASK'] +3997-180294-0031-1831: hyp=['YOU', 'STILL', 'LOVE', 'ME', 'CAN', 'YOU', 'ASK'] +3997-180294-0032-1832: ref=['BECAUSE', 'YOU', "DON'T", 'LIKE', 'SEEING', 'HIM'] +3997-180294-0032-1832: hyp=['BECAUSE', 'YOU', "DON'T", 'LIKE', 'SEEING', 'HIM'] +3997-180294-0033-1833: ref=['NONETHELESS', 'I', 'WAS', 'VERY', 'UNHAPPY', 'ALL', 'THE', 'REST', 'OF', 'THE', 'EVENING', 'AND', 'WENT', 'AWAY', 'VERY', 'SADLY', 'AFTER', 'HAVING', 'SEEN', 'PRUDENCE', 'THE', 'COUNT', 'AND', 'MARGUERITE', 'GET', 'INTO', 'THE', 'CARRIAGE', 'WHICH', 'WAS', 'WAITING', 'FOR', 'THEM', 'AT', 'THE', 'DOOR'] +3997-180294-0033-1833: hyp=['NONE', 'THE', 'LESS', 'I', 'WAS', 'VERY', 'UNHAPPY', 'ALL', 'THE', 'REST', 'OF', 'THE', 'EVENING', 'AND', 'WENT', 'AWAY', 'VERY', 'SADLY', 'AFTER', 'HAVING', 'SEEN', 'PRUDENCE', 'THE', 'COUNT', 'AND', 'MARGUERITE', 'GET', 'INTO', 'THE', 'CARRIAGE', 'WHICH', 'WAS', 'WINNING', 'FOR', 'THEM', 'AT', 'THE', 'DOOR'] +3997-180297-0000-1834: ref=['I', 'HAVE', 'NOT', 'COME', 'TO', 'HINDER', 'YOU', 'FROM', 'LEAVING', 'PARIS'] +3997-180297-0000-1834: hyp=['I', 'HAVE', 'NOT', 'COME', 'TO', 'HINDER', 'YOU', 'FROM', 'LEAVING', 'PARIS'] +3997-180297-0001-1835: ref=['YOU', 'IN', 'THE', 'WAY', 'MARGUERITE', 'BUT', 'HOW'] +3997-180297-0001-1835: hyp=['YOU', 'IN', 'THE', 'WAY', 'MARGUERITE', 'BUT', 'HOW'] +3997-180297-0002-1836: ref=['WELL', 'YOU', 'MIGHT', 'HAVE', 'HAD', 'A', 'WOMAN', 'HERE', 'SAID', 'PRUDENCE', 'AND', 'IT', 'WOULD', 'HARDLY', 'HAVE', 'BEEN', 'AMUSING', 'FOR', 'HER', 'TO', 'SEE', 'TWO', 'MORE', 'ARRIVE'] +3997-180297-0002-1836: hyp=['WELL', 'YOU', 'MIGHT', 'HAVE', 'HAD', 'A', 'WOMAN', 'HERE', 'SAID', 'PRUDENCE', 'AND', 'IT', 'WOULD', 'HARDLY', 'HAVE', 'BEEN', 'AMUSING', 'FOR', 'HER', 'TO', 'SEE', 'TWO', 'MORE', 'ARRIVE'] +3997-180297-0003-1837: ref=['DURING', 'THIS', 'REMARK', 'MARGUERITE', 'LOOKED', 'AT', 'ME', 'ATTENTIVELY'] +3997-180297-0003-1837: hyp=['DURING', 'THIS', 'REMARK', 'MARGUERITE', 'LOOKED', 'AT', 'ME', 'ATTENTIVELY'] +3997-180297-0004-1838: ref=['MY', 'DEAR', 'PRUDENCE', 'I', 'ANSWERED', 'YOU', 'DO', 'NOT', 'KNOW', 'WHAT', 'YOU', 'ARE', 'SAYING'] +3997-180297-0004-1838: hyp=['MY', 'DEAR', 'PRUDENCE', 'I', 'ANSWERED', 'YOU', 'DO', 'NOT', 'KNOW', 'WHAT', 'YOU', 'ARE', 'SAYING'] +3997-180297-0005-1839: ref=['YES', 'BUT', 'BESIDES', 'NOT', 'WISHING', 'TO', 'PUT', 'YOU', 'OUT', 'I', 'WAS', 'SURE', 'THAT', 'IF', 'YOU', 'CAME', 'AS', 'FAR', 'AS', 'MY', 'DOOR', 'YOU', 'WOULD', 'WANT', 'TO', 'COME', 'UP', 'AND', 'AS', 'I', 'COULD', 'NOT', 'LET', 'YOU', 'I', 'DID', 'NOT', 'WISH', 'TO', 'LET', 'YOU', 'GO', 'AWAY', 'BLAMING', 'ME', 'FOR', 'SAYING', 'NO'] +3997-180297-0005-1839: hyp=['YES', 'BUT', 'BESIDES', 'NOT', 'WISHING', 'TO', 'PUT', 'YOU', 'OUT', 'I', 'WAS', 'SURE', 'THAT', 'IF', 'YOU', 'CAME', 'AS', 'FAR', 'AS', 'MY', 'DOOR', 'YOU', 'WOULD', 'WANT', 'TO', 'COME', 'UP', 'AND', 'AS', 'I', 'COULD', 'NOT', 'LET', 'YOU', 'I', 'DID', 'NOT', 'WISH', 'TO', 'LET', 'YOU', 'GO', 'AWAY', 'BLAMING', 'ME', 'FOR', 'SAYING', 'NO'] +3997-180297-0006-1840: ref=['BECAUSE', 'I', 'AM', 'WATCHED', 'AND', 'THE', 'LEAST', 'SUSPICION', 'MIGHT', 'DO', 'ME', 'THE', 'GREATEST', 'HARM'] +3997-180297-0006-1840: hyp=['BECAUSE', 'I', 'AM', 'WATCHED', 'AND', 'THE', 'LEAST', 'SUSPICION', 'MIGHT', 'TO', 'ME', 'THE', 'GREATEST', 'HARM'] +3997-180297-0007-1841: ref=['IS', 'THAT', 'REALLY', 'THE', 'ONLY', 'REASON'] +3997-180297-0007-1841: hyp=['IS', 'THAT', 'REALLY', 'THE', 'ONLY', 'REASON'] +3997-180297-0008-1842: ref=['IF', 'THERE', 'WERE', 'ANY', 'OTHER', 'I', 'WOULD', 'TELL', 'YOU', 'FOR', 'WE', 'ARE', 'NOT', 'TO', 'HAVE', 'ANY', 'SECRETS', 'FROM', 'ONE', 'ANOTHER', 'NOW'] +3997-180297-0008-1842: hyp=['IF', 'THERE', 'WERE', 'ANY', 'OTHER', 'I', 'WOULD', 'TELL', 'YOU', 'FOR', 'WE', 'ARE', 'NOT', 'TO', 'HAVE', 'ANY', 'SECRETS', 'FROM', 'ONE', 'ANOTHER', 'NOW'] +3997-180297-0009-1843: ref=['HONESTLY', 'DO', 'YOU', 'CARE', 'FOR', 'ME', 'A', 'LITTLE', 'A', 'GREAT', 'DEAL'] +3997-180297-0009-1843: hyp=['ON', 'THE', 'SUIT', 'DO', 'YOU', 'CARE', 'FOR', 'ME', 'A', 'LITTLE', 'A', 'GREAT', 'DEAL'] +3997-180297-0010-1844: ref=['I', 'FANCIED', 'FOR', 'A', 'MOMENT', 'THAT', 'I', 'MIGHT', 'GIVE', 'MYSELF', 'THAT', 'HAPPINESS', 'FOR', 'SIX', 'MONTHS', 'YOU', 'WOULD', 'NOT', 'HAVE', 'IT', 'YOU', 'INSISTED', 'ON', 'KNOWING', 'THE', 'MEANS'] +3997-180297-0010-1844: hyp=['I', 'FANCIED', 'FOR', 'A', 'MOMENT', 'THAT', 'I', 'MIGHT', 'GIVE', 'MYSELF', 'THAT', 'HAPPINESS', 'FOR', 'SIX', 'MONTHS', 'YOU', 'WOULD', 'NOT', 'HAVE', 'IT', 'YOU', 'INSISTED', 'ON', 'KNOWING', 'THE', 'MEANS'] +3997-180297-0011-1845: ref=['WELL', 'GOOD', 'HEAVENS', 'THE', 'MEANS', 'WERE', 'EASY', 'ENOUGH', 'TO', 'GUESS'] +3997-180297-0011-1845: hyp=['WELL', 'GOOD', 'HEAVENS', 'THE', 'MEANS', 'WERE', 'EASY', 'ENOUGH', 'TO', 'GUESS'] +3997-180297-0012-1846: ref=['I', 'LISTENED', 'AND', 'I', 'GAZED', 'AT', 'MARGUERITE', 'WITH', 'ADMIRATION'] +3997-180297-0012-1846: hyp=['I', 'LISTENED', 'AND', 'I', 'GAZED', 'AT', 'MARGUERITE', 'WITH', 'ADMIRATION'] +3997-180297-0013-1847: ref=['WHEN', 'I', 'THOUGHT', 'THAT', 'THIS', 'MARVELLOUS', 'CREATURE', 'WHOSE', 'FEET', 'I', 'HAD', 'ONCE', 'LONGED', 'TO', 'KISS', 'WAS', 'WILLING', 'TO', 'LET', 'ME', 'TAKE', 'MY', 'PLACE', 'IN', 'HER', 'THOUGHTS', 'MY', 'PART', 'IN', 'HER', 'LIFE', 'AND', 'THAT', 'I', 'WAS', 'NOT', 'YET', 'CONTENT', 'WITH', 'WHAT', 'SHE', 'GAVE', 'ME', 'I', 'ASKED', 'IF', "MAN'S", 'DESIRE', 'HAS', 'INDEED', 'LIMITS', 'WHEN', 'SATISFIED', 'AS', 'PROMPTLY', 'AS', 'MINE', 'HAD', 'BEEN', 'IT', 'REACHED', 'AFTER', 'SOMETHING', 'FURTHER'] +3997-180297-0013-1847: hyp=['WHEN', 'THEY', 'THOUGHT', 'THAT', 'THIS', 'MARVELLOUS', 'CREATURE', 'WHOSE', 'FEET', 'I', 'HAD', 'ONCE', 'LONGED', 'TO', 'KISS', 'WAS', 'WILLING', 'TO', 'LET', 'ME', 'TAKE', 'MY', 'PLACE', 'IN', 'HER', 'THOUGHTS', 'BY', 'PART', 'IN', 'HER', 'LIFE', 'AND', 'THAT', 'I', 'WAS', 'NOT', 'YET', 'CONTENT', 'WITH', 'WHAT', 'SHE', 'GAVE', 'ME', 'I', 'ASKED', 'IF', "MAN'S", 'DESIRE', 'HAD', 'INDEED', 'LIMITS', 'WHEN', 'SATISFIED', 'AS', 'PROMPTLY', 'AS', 'MINE', 'HAD', 'BEEN', 'IT', 'REACHED', 'AFTER', 'SOMETHING', 'FURTHER'] +3997-180297-0014-1848: ref=['TRULY', 'SHE', 'CONTINUED', 'WE', 'POOR', 'CREATURES', 'OF', 'CHANCE', 'HAVE', 'FANTASTIC', 'DESIRES', 'AND', 'INCONCEIVABLE', 'LOVES'] +3997-180297-0014-1848: hyp=['TRULY', 'SHE', 'CONTINUED', 'WE', 'POOR', 'CREATURES', 'OF', 'CHANCE', 'HAVE', 'FANTASTIC', 'DESIRE', 'AND', 'INCONCEIVABLE', 'LOVES'] +3997-180297-0015-1849: ref=['WE', 'ARE', 'NOT', 'ALLOWED', 'TO', 'HAVE', 'HEARTS', 'UNDER', 'PENALTY', 'OF', 'BEING', 'HOOTED', 'DOWN', 'AND', 'OF', 'RUINING', 'OUR', 'CREDIT'] +3997-180297-0015-1849: hyp=['WE', 'ARE', 'NOT', 'ALLOWED', 'TO', 'HAVE', 'HEARTS', 'UNDER', 'PENALTY', 'OF', 'BEING', 'HOOTED', 'DOWN', 'AND', 'OF', 'RUINING', 'OUR', 'CREDIT'] +3997-180297-0016-1850: ref=['WE', 'NO', 'LONGER', 'BELONG', 'TO', 'OURSELVES'] +3997-180297-0016-1850: hyp=['WE', 'NO', 'LONGER', 'BELONG', 'TO', 'OURSELVES'] +3997-180297-0017-1851: ref=['WE', 'STAND', 'FIRST', 'IN', 'THEIR', 'SELF', 'ESTEEM', 'LAST', 'IN', 'THEIR', 'ESTEEM'] +3997-180297-0017-1851: hyp=['WE', 'STAND', 'FIRST', 'IN', 'THEIR', 'SELF', 'ESTEEM', 'LAST', 'IN', 'THEIR', 'ESTEEM'] +3997-180297-0018-1852: ref=['NEVER', 'DO', 'THEY', 'GIVE', 'YOU', 'ADVICE', 'WHICH', 'IS', 'NOT', 'LUCRATIVE'] +3997-180297-0018-1852: hyp=['NEVER', 'DID', 'HE', 'GIVE', 'YOU', 'ADVICE', 'WHICH', 'IS', 'NOT', 'LUCRATIVE'] +3997-180297-0019-1853: ref=['IT', 'MEANS', 'LITTLE', 'ENOUGH', 'TO', 'THEM', 'THAT', 'WE', 'SHOULD', 'HAVE', 'TEN', 'LOVERS', 'EXTRA', 'AS', 'LONG', 'AS', 'THEY', 'GET', 'DRESSES', 'OR', 'A', 'BRACELET', 'OUT', 'OF', 'THEM', 'AND', 'THAT', 'THEY', 'CAN', 'DRIVE', 'IN', 'OUR', 'CARRIAGE', 'FROM', 'TIME', 'TO', 'TIME', 'OR', 'COME', 'TO', 'OUR', 'BOX', 'AT', 'THE', 'THEATRE'] +3997-180297-0019-1853: hyp=['IT', 'MEANS', 'LITTLE', 'ENOUGH', 'TO', 'THEM', 'THAT', 'WE', 'SHOULD', 'HAVE', 'TEN', 'LOVERS', 'EXTRA', 'AS', 'LONG', 'AS', 'THEY', 'GET', 'DRESSES', 'OR', 'A', 'BRACELET', 'OUT', 'OF', 'THEM', 'AND', 'THAT', 'THEY', 'CAN', 'DRIVE', 'AND', 'ARE', 'PARISH', 'FROM', 'TIME', 'TO', 'TIME', 'OR', 'COME', 'TO', 'OUR', 'BOX', 'AT', 'THE', 'FUTURE'] +3997-180297-0020-1854: ref=['SUCH', 'A', 'MAN', 'I', 'FOUND', 'IN', 'THE', 'DUKE', 'BUT', 'THE', 'DUKE', 'IS', 'OLD', 'AND', 'OLD', 'AGE', 'NEITHER', 'PROTECTS', 'NOR', 'CONSOLES'] +3997-180297-0020-1854: hyp=['SUCH', 'A', 'MAN', 'I', 'FOUND', 'IN', 'THE', 'DUKE', 'BUT', 'THE', 'DUKE', 'IS', 'OLD', 'AND', 'THE', 'OLD', 'AGE', 'NEITHER', 'PROTECTS', 'NOR', 'CONSOLES'] +3997-180297-0021-1855: ref=['I', 'THOUGHT', 'I', 'COULD', 'ACCEPT', 'THE', 'LIFE', 'WHICH', 'HE', 'OFFERED', 'ME', 'BUT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +3997-180297-0021-1855: hyp=['I', 'THOUGHT', 'I', 'COULD', 'ACCEPT', 'THE', 'LIFE', 'WHICH', 'HE', 'OFFERED', 'ME', 'OR', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +3997-180297-0022-1856: ref=['WHAT', 'I', 'LOVED', 'IN', 'YOU', 'WAS', 'NOT', 'THE', 'MAN', 'WHO', 'WAS', 'BUT', 'THE', 'MAN', 'WHO', 'WAS', 'GOING', 'TO', 'BE'] +3997-180297-0022-1856: hyp=['WHAT', 'I', 'LOVED', 'IN', 'YOU', 'WAS', 'NOT', 'THE', 'MAN', 'WHO', 'WAS', 'BUT', 'THE', 'MAN', 'WHO', 'WAS', 'GOING', 'TO', 'BE'] +3997-180297-0023-1857: ref=['MARGUERITE', 'TIRED', 'OUT', 'WITH', 'THIS', 'LONG', 'CONFESSION', 'THREW', 'HERSELF', 'BACK', 'ON', 'THE', 'SOFA', 'AND', 'TO', 'STIFLE', 'A', 'SLIGHT', 'COUGH', 'PUT', 'UP', 'HER', 'HANDKERCHIEF', 'TO', 'HER', 'LIPS', 'AND', 'FROM', 'THAT', 'TO', 'HER', 'EYES'] +3997-180297-0023-1857: hyp=['MARGUERITE', 'HIRED', 'OUT', 'WITH', 'THIS', 'LONG', 'CONFESSION', 'THREW', 'HERSELF', 'BACK', 'ON', 'THE', 'SOFA', 'AND', 'TO', 'STIFLE', 'A', 'SLIGHT', 'COUGH', 'PULL', 'UP', 'HER', 'HANDKERCHIEF', 'TO', 'HER', 'LIPS', 'AND', 'FROM', 'THAT', 'TO', 'HER', 'EYES'] +3997-180297-0024-1858: ref=['MARGUERITE', 'DO', 'WITH', 'ME', 'AS', 'YOU', 'WILL', 'I', 'AM', 'YOUR', 'SLAVE', 'YOUR', 'DOG', 'BUT', 'IN', 'THE', 'NAME', 'OF', 'HEAVEN', 'TEAR', 'UP', 'THE', 'LETTER', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'AND', 'DO', 'NOT', 'MAKE', 'ME', 'LEAVE', 'YOU', 'TO', 'MORROW', 'IT', 'WOULD', 'KILL', 'ME'] +3997-180297-0024-1858: hyp=['MARGUERITE', 'DO', 'WITH', 'ME', 'AS', 'YOU', 'WILL', 'I', 'AM', 'YOUR', 'SLAVE', 'YOUR', 'DOG', 'BUT', 'IN', 'THE', 'NAME', 'OF', 'HEAVEN', 'TEAR', 'UP', 'THE', 'LETTER', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'AND', 'DO', 'NOT', 'MAKE', 'ME', 'LEAVE', 'YOU', 'TO', 'MORROW', 'IT', 'WOULD', 'KILL', 'ME'] +3997-180297-0025-1859: ref=['MARGUERITE', 'DREW', 'THE', 'LETTER', 'FROM', 'HER', 'BOSOM', 'AND', 'HANDING', 'IT', 'TO', 'ME', 'WITH', 'A', 'SMILE', 'OF', 'INFINITE', 'SWEETNESS', 'SAID'] +3997-180297-0025-1859: hyp=['MARGUERITE', 'DREW', 'THE', 'LETTER', 'FROM', 'HER', 'BOSOM', 'AND', 'HANDING', 'IT', 'TO', 'ME', 'WITH', 'A', 'SMILE', 'OF', 'INFINITE', 'SWEETNESS', 'SAID'] +3997-180297-0026-1860: ref=['HERE', 'IT', 'IS', 'I', 'HAVE', 'BROUGHT', 'IT', 'BACK'] +3997-180297-0026-1860: hyp=['HERE', 'IT', 'IS', 'I', 'HAVE', 'BROUGHT', 'IT', 'BACK'] +3997-180297-0027-1861: ref=['I', 'TORE', 'THE', 'LETTER', 'INTO', 'FRAGMENTS', 'AND', 'KISSED', 'WITH', 'TEARS', 'THE', 'HAND', 'THAT', 'GAVE', 'IT', 'TO', 'ME'] +3997-180297-0027-1861: hyp=['I', 'TOILED', 'A', 'LETTER', 'INTO', 'FRAGMENTS', 'AND', 'KISSED', 'WITH', 'TEARS', 'THE', 'HAND', 'THAT', 'GAVE', 'IT', 'TO', 'ME'] +3997-180297-0028-1862: ref=['LOOK', 'HERE', 'PRUDENCE', 'DO', 'YOU', 'KNOW', 'WHAT', 'HE', 'WANTS', 'SAID', 'MARGUERITE'] +3997-180297-0028-1862: hyp=['LOOK', 'HERE', 'PRUDENCE', 'DO', 'YOU', 'KNOW', 'WHAT', 'HE', 'WANTS', 'SAID', 'MARGUERITE'] +3997-180297-0029-1863: ref=['HE', 'WANTS', 'YOU', 'TO', 'FORGIVE', 'HIM'] +3997-180297-0029-1863: hyp=['HE', 'WANTS', 'YOU', 'TO', 'FORGIVE', 'HIM'] +3997-180297-0030-1864: ref=['ONE', 'HAS', 'TO', 'BUT', 'HE', 'WANTS', 'MORE', 'THAN', 'THAT', 'WHAT', 'THEN'] +3997-180297-0030-1864: hyp=['ONE', 'HAS', 'TWO', 'BUT', 'HE', 'WANTS', 'MORE', 'THAN', 'THAT', 'WHAT', 'THEN'] +3997-180297-0031-1865: ref=['I', 'EMBRACED', 'MARGUERITE', 'UNTIL', 'SHE', 'WAS', 'ALMOST', 'STIFLED'] +3997-180297-0031-1865: hyp=['I', 'EMBRACED', 'MARGUERITE', 'UNTIL', 'SHE', 'WAS', 'ALMOST', 'STIFLED'] +3997-182399-0000-1779: ref=['OL', 'MISTAH', 'BUZZARD', 'GRINNED'] +3997-182399-0000-1779: hyp=['ALL', 'MISTER', 'BUZZARD', 'GRINNED'] +3997-182399-0001-1780: ref=['THIS', 'SOUNDED', 'LIKE', 'ANOTHER', 'STORY'] +3997-182399-0001-1780: hyp=['THIS', 'SOUNDED', 'LIKE', 'ANOTHER', 'STORY'] +3997-182399-0002-1781: ref=['HE', 'WAS', 'CURIOUS', 'ABOUT', 'THAT', 'BLACK', 'HEADED', 'COUSIN', 'OF', 'OL', 'MISTAH', 'BUZZARD', 'VERY', 'CURIOUS', 'INDEED'] +3997-182399-0002-1781: hyp=['HE', 'WAS', 'CURIOUS', 'ABOUT', 'THAT', 'BLACK', 'HEADED', 'COUSIN', 'OF', 'OLD', 'MISTER', 'BUZZARD', 'VERY', 'CURIOUS', 'INDEED'] +3997-182399-0003-1782: ref=['ANYWAY', 'HE', 'WOULD', 'FIND', 'OUT'] +3997-182399-0003-1782: hyp=['ANYWAY', 'HE', 'WOULD', 'FIND', 'OUT'] +3997-182399-0004-1783: ref=['PLEASE', 'MISTER', 'BUZZARD', 'PLEASE', 'TELL', 'US', 'THE', 'STORY', 'HE', 'BEGGED'] +3997-182399-0004-1783: hyp=['PLEASE', 'MISTER', 'BOZARD', 'PLEASE', 'TELL', 'US', 'THE', 'STORY', 'HE', 'BEGGED'] +3997-182399-0005-1784: ref=['NOW', 'OL', 'MISTAH', 'BUZZARD', 'IS', 'NATURALLY', 'GOOD', 'NATURED', 'AND', 'ACCOMMODATING', 'AND', 'WHEN', 'PETER', 'BEGGED', 'SO', 'HARD', 'HE', 'JUST', "COULDN'T", 'FIND', 'IT', 'IN', 'HIS', 'HEART', 'TO', 'REFUSE'] +3997-182399-0005-1784: hyp=['NOW', 'ALL', 'MISTER', 'BUZZARD', 'IS', 'NATURALLY', 'GOOD', 'NATURED', 'AND', 'ACCOMMODATING', 'AND', 'WHEN', 'PETER', 'BAGS', 'SO', 'HARD', 'HE', 'JUST', "COULDN'T", 'FIND', 'IT', 'IN', 'HIS', 'HEART', 'TO', 'REFUSE'] +3997-182399-0006-1785: ref=['WAY', 'BACK', 'IN', 'THE', 'DAYS', 'WHEN', 'GRANDPAP', 'BUZZARD', 'HAD', 'HIS', 'LIL', 'FALLING', 'OUT', 'WITH', 'OL', 'KING', 'EAGLE', 'AND', 'DONE', 'FLY', 'SO', 'HIGH', 'HE', "SCO'TCH", 'THE', 'FEATHERS', 'OFFEN', 'HIS', 'HAID', 'HE', 'HAD', 'A', 'COUSIN', 'DID', 'GRANDPAP', 'BUZZARD', 'AND', 'THIS', 'COUSIN', 'WAS', 'JES', 'NATURALLY', 'LAZY', 'AND', 'NO', 'COUNT'] +3997-182399-0006-1785: hyp=['WAY', 'BACK', 'IN', 'THE', 'DAYS', 'WHEN', "GRAN'PAP", 'BUZZARD', 'HAD', 'HIS', 'LITTLE', 'FALLING', 'ON', 'WITH', 'OLD', 'KING', 'EAGLE', 'AND', 'DONE', 'FLIES', 'SO', 'HIGH', 'HE', 'SCORCHED', 'THE', 'FEATHERS', 'OFF', 'IN', 'HIS', 'HEAD', 'HE', 'HAD', 'A', 'COUSIN', 'DID', 'GRANDPA', 'BUZZARD', 'AND', 'THIS', 'COUSIN', 'WAS', 'JUST', 'NATURALLY', 'LAZY', 'AND', 'NO', 'COUNT'] +3997-182399-0007-1786: ref=['LIKE', 'MOST', 'NO', 'COUNT', 'PEOPLE', 'HE', 'USED', 'TO', 'MAKE', 'A', 'REGULAR', 'NUISANCE', 'OF', 'HISSELF', 'POKING', 'HIS', 'NOSE', 'INTO', "EV'YBODY'S", 'BUSINESS', 'AND', 'NEVER', 'TENDING', 'TO', 'HIS', 'OWN'] +3997-182399-0007-1786: hyp=['LIKE', 'MOST', 'NO', 'COUNT', 'PEOPLE', 'HE', 'USED', 'TO', 'MAKE', 'A', 'REGULAR', 'NOTICE', 'OF', 'HIS', 'SELF', 'POKING', 'HIS', 'NOSE', 'INTO', "EVERYBODY'S", 'BUSINESS', 'AND', 'NEVER', 'TENDING', 'TO', 'HIS', 'OWN'] +3997-182399-0008-1787: ref=["WASN'T", 'ANYTHING', 'GOING', 'ON', 'THAT', 'THIS', 'TRIFLING', 'MEMBER', 'OF', 'THE', 'BUZZARD', "FAM'LY", "DIDN'T", 'FIND', 'OUT', 'ABOUT', 'AND', 'MEDDLE', 'IN', 'HE', 'COULD', 'ASK', 'MO', 'QUESTIONS', 'THAN', 'PETER', 'RABBIT', 'CAN', 'AN', 'ANYBODY', 'THAT', 'CAN', 'DO', 'THAT', 'HAS', 'GOT', 'TO', 'ASK', 'A', 'LOT'] +3997-182399-0008-1787: hyp=["WASN'T", 'ANYTHING', 'GOING', 'ON', 'THAT', 'THIS', 'TRIFLING', 'MEMBER', 'OF', 'THE', 'BUZZARD', 'FAMILY', "DIDN'T", 'FIND', 'OUT', 'ABOUT', 'A', 'MEDAL', 'IN', 'HE', 'COULD', 'ASK', 'MORE', 'QUESTIONS', 'THAN', 'PETER', 'RABBIT', 'CAN', 'AND', 'ANYBODY', 'THAT', 'CAN', 'DO', 'THAT', 'HAS', 'GOT', 'TO', 'ASK', 'A', 'LOT'] +3997-182399-0009-1788: ref=['EVERYBODY', 'LOOKED', 'AT', 'PETER', 'AND', 'LAUGHED'] +3997-182399-0009-1788: hyp=['EVERYBODY', 'LOOKED', 'AT', 'PETER', 'AND', 'LAUGHED'] +3997-182399-0010-1789: ref=['SO', 'WE', 'UNS', 'SIT', 'ON', 'THE', 'CHIMNEY', 'TOPS', 'WHENEVER', 'OL', 'JACK', 'FROST', 'GETS', 'TO', 'STRAYING', 'DOWN', 'WHERE', 'HE', 'HAVE', 'NO', 'BUSINESS'] +3997-182399-0010-1789: hyp=['SO', 'WE', 'UNSTEAD', 'ON', 'THE', 'CHIMNEY', 'TOPS', 'WHENEVER', 'OLD', 'JACK', 'FROST', 'GETS', 'THE', 'STRAIN', 'DOWN', 'WHERE', 'HE', 'HAVE', 'NO', 'BUSINESS'] +3997-182399-0011-1790: ref=['ONE', 'DAY', 'THIS', 'NO', 'COUNT', 'TRIFLING', 'COUSIN', 'OF', 'GRANDPAP', 'BUZZARD', 'GET', 'COLD', 'IN', 'HIS', 'FEET'] +3997-182399-0011-1790: hyp=['ONE', 'DAY', 'THIS', 'NO', 'COUNT', 'TRIFLING', 'COUSIN', 'OF', 'GRANDPA', 'BUZZARD', 'GET', 'COLD', 'IN', 'HIS', 'FEET'] +3997-182399-0012-1791: ref=['IT', 'WAS', 'ON', 'A', 'LIL', 'OL', 'HOUSE', 'A', 'LIL', 'OL', 'TUMBLE', 'DOWN', 'HOUSE'] +3997-182399-0012-1791: hyp=['IT', 'WAS', 'ON', 'THE', 'LITTLE', 'OLD', 'HOUSE', 'A', 'LITTLE', 'OLD', 'TUMBLE', 'DOWN', 'HOUSE'] +3997-182399-0013-1792: ref=['WHY', 'HE', 'JES', 'STRETCH', 'HIS', 'FOOL', 'HAID', 'AS', 'FAR', 'DOWN', 'THAT', 'CHIMNEY', 'AS', 'HE', 'CAN', 'AN', 'LISTEN', 'AN', 'LISTEN'] +3997-182399-0013-1792: hyp=['WHY', 'HE', 'JUST', 'SEARCHED', 'HIS', 'FULL', 'HEAD', 'AS', 'FAR', 'DOWN', 'THE', 'CHIMNEY', 'AS', 'HE', 'CAN', 'AND', 'LISTEN', 'AND', 'LISTEN'] +3997-182399-0014-1793: ref=['BUT', 'HE', "DON'T", 'MIND', 'THAT'] +3997-182399-0014-1793: hyp=['BUT', 'HE', "DON'T", 'MIND', 'THAT'] +3997-182399-0015-1794: ref=['WILL', "YO'", 'ALLS', 'PLEASE', 'SPEAK', 'A', 'LIL', 'LOUDER', 'HE', 'HOLLER', 'DOWN', 'THE', 'CHIMNEY', 'JES', 'LIKE', 'THAT'] +3997-182399-0015-1794: hyp=['WELL', 'YOU', 'ALL', 'PLEASE', 'SPEAK', 'A', 'LITTLE', 'LOUDER', 'HE', 'HOLLERED', 'ON', 'THE', 'CHIMNEY', 'JUST', 'LIKE', 'THAT'] +3997-182399-0016-1795: ref=['YES', 'SAH', 'SHE', "SHO'LY", 'WAS', 'PLUMB', 'SCARED'] +3997-182399-0016-1795: hyp=['YES', 'SAD', 'SHE', 'SURELY', 'WAS', 'PLUM', 'SCARED'] +3997-182399-0017-1796: ref=['THEY', 'LIKE', 'TO', 'CHOKE', 'THAT', 'NO', 'COUNT', 'BUZZARD', 'TO', 'DEATH'] +3997-182399-0017-1796: hyp=['THEY', 'LIKED', 'TO', 'CHOKE', 'THAT', 'NO', 'COMPASSER', 'TO', 'DEATH'] +3997-182399-0018-1797: ref=['WHEN', 'HE', 'GET', 'HOME', 'HE', 'TRY', 'AN', 'TRY', 'TO', 'BRUSH', 'THAT', 'SOOT', 'OFF', 'BUT', 'IT', 'DONE', 'GET', 'INTO', 'THE', 'SKIN', 'AN', 'IT', 'STAY', 'THERE'] +3997-182399-0018-1797: hyp=['WHEN', 'HE', 'GET', 'HOME', 'HE', 'TRY', 'AND', 'TRIES', 'TO', 'BRUSH', 'THAT', 'SUIT', 'OFF', 'BUT', 'IT', "DON'T", 'GET', 'INTO', 'THE', 'SKIN', 'AND', 'IT', 'STAY', 'THERE'] +3997-182399-0019-1798: ref=['A', 'LITTLE', 'SIGH', 'OF', 'SATISFACTION', 'WENT', 'AROUND', 'THE', 'CIRCLE', 'OF', 'LISTENERS'] +3997-182399-0019-1798: hyp=['A', 'LITTLE', 'SIGH', 'OF', 'SATISFACTION', 'WENT', 'ROUND', 'THE', 'CIRCLE', 'OF', 'LISTENERS'] +3997-182399-0020-1799: ref=['IT', 'WAS', 'JUST', 'AS', 'GOOD', 'AS', 'ONE', 'OF', 'GRANDFATHER', "FROG'S"] +3997-182399-0020-1799: hyp=['IT', 'WAS', 'JUST', 'AS', 'GOOD', 'AS', 'ONE', 'OF', 'GRANDFATHER', 'FROGS'] +4198-12259-0000-203: ref=['DRAW', 'REACH', 'FILL', 'MIX', 'GIVE', 'IT', 'ME', 'WITHOUT', 'WATER'] +4198-12259-0000-203: hyp=['DRAW', 'REACH', 'FILL', 'MIX', 'GIVE', 'IT', 'ME', 'WITHOUT', 'WATER'] +4198-12259-0001-204: ref=['SO', 'MY', 'FRIEND', 'SO', 'WHIP', 'ME', 'OFF', 'THIS', 'GLASS', 'NEATLY', 'BRING', 'ME', 'HITHER', 'SOME', 'CLARET', 'A', 'FULL', 'WEEPING', 'GLASS', 'TILL', 'IT', 'RUN', 'OVER'] +4198-12259-0001-204: hyp=['SO', 'MY', 'FRIEND', 'SO', 'WHIP', 'ME', 'OFF', 'THIS', 'GLASS', 'NEATLY', 'BRING', 'ME', 'HITHER', 'SOME', 'CLARET', 'A', 'FULL', 'WEEPING', 'GLASS', 'TILL', 'IT', 'RUN', 'OVER'] +4198-12259-0002-205: ref=['A', 'CESSATION', 'AND', 'TRUCE', 'WITH', 'THIRST'] +4198-12259-0002-205: hyp=['A', 'CESSATION', 'AND', 'TRUTH', 'WITH', 'THIRST'] +4198-12259-0003-206: ref=['YOU', 'HAVE', 'CATCHED', 'A', 'COLD', 'GAMMER', 'YEA', 'FORSOOTH', 'SIR'] +4198-12259-0003-206: hyp=['YOU', 'HAVE', 'CAST', 'A', 'COLD', 'GAMMER', 'YEA', 'FORSOOTH', 'SIR'] +4198-12259-0004-207: ref=['BY', 'THE', 'BELLY', 'OF', 'SANCT', 'BUFF', 'LET', 'US', 'TALK', 'OF', 'OUR', 'DRINK', 'I', 'NEVER', 'DRINK', 'BUT', 'AT', 'MY', 'HOURS', 'LIKE', 'THE', "POPE'S", 'MULE'] +4198-12259-0004-207: hyp=['BY', 'THE', 'VALLEY', 'OF', 'SAINT', 'BUFF', 'LET', 'US', 'TALK', 'OF', 'OUR', 'DRINK', 'I', 'NEVER', 'DRINK', 'WITHOUT', 'MY', 'HOURS', 'LIKE', 'THE', "POPE'S", 'MULE'] +4198-12259-0005-208: ref=['WHICH', 'WAS', 'FIRST', 'THIRST', 'OR', 'DRINKING'] +4198-12259-0005-208: hyp=['WHICH', 'WAS', 'FIRST', 'THOSE', 'DRINKING'] +4198-12259-0006-209: ref=['WHAT', 'IT', 'SEEMS', 'I', 'DO', 'NOT', 'DRINK', 'BUT', 'BY', 'AN', 'ATTORNEY'] +4198-12259-0006-209: hyp=['WHAT', 'IT', 'SEEMS', 'I', 'DO', 'NOT', 'DRINK', 'BUT', 'BUY', 'AN', 'ATTORNEY'] +4198-12259-0007-210: ref=['DRINK', 'ALWAYS', 'AND', 'YOU', 'SHALL', 'NEVER', 'DIE'] +4198-12259-0007-210: hyp=['DRINK', 'ALWAYS', 'AND', 'YOU', 'SHALL', 'NEVER', 'DIE'] +4198-12259-0008-211: ref=['IF', 'I', 'DRINK', 'NOT', 'I', 'AM', 'A', 'GROUND', 'DRY', 'GRAVELLED', 'AND', 'SPENT', 'I', 'AM', 'STARK', 'DEAD', 'WITHOUT', 'DRINK', 'AND', 'MY', 'SOUL', 'READY', 'TO', 'FLY', 'INTO', 'SOME', 'MARSH', 'AMONGST', 'FROGS', 'THE', 'SOUL', 'NEVER', 'DWELLS', 'IN', 'A', 'DRY', 'PLACE', 'DROUTH', 'KILLS', 'IT'] +4198-12259-0008-211: hyp=['IF', 'I', 'DRINK', 'NOT', 'I', 'AM', 'A', 'GROUND', 'DRY', 'GRAVELLED', 'AND', 'SPENT', 'I', 'AM', 'START', 'DEAD', 'WITHOUT', 'DRINK', 'AND', 'MY', 'SOUL', 'READY', 'TO', 'FLY', 'INTO', 'SOME', 'MARSH', 'AMONGST', 'FROGS', 'THE', 'SOUL', 'NEVER', 'DWELLS', 'IN', 'A', 'DRY', 'PLACE', 'DROUTH', 'KILL', 'IT'] +4198-12259-0009-212: ref=['HE', 'DRINKS', 'IN', 'VAIN', 'THAT', 'FEELS', 'NOT', 'THE', 'PLEASURE', 'OF', 'IT'] +4198-12259-0009-212: hyp=['HE', 'DRINKS', 'THEIR', 'VEIN', 'THAT', 'FILLS', 'NOT', 'THE', 'PLEASURE', 'OF', 'IT'] +4198-12259-0010-213: ref=['IT', 'IS', 'ENOUGH', 'TO', 'BREAK', 'BOTH', 'GIRDS', 'AND', 'PETREL'] +4198-12259-0010-213: hyp=['IT', 'IS', 'ENOUGH', 'TO', 'BREAK', 'BOTH', 'GUERAGE', 'AND', 'PETEL'] +4198-12259-0011-214: ref=['WHAT', 'DIFFERENCE', 'IS', 'THERE', 'BETWEEN', 'A', 'BOTTLE', 'AND', 'A', 'FLAGON'] +4198-12259-0011-214: hyp=['WHAT', 'DIFFERENCE', 'IS', 'THERE', 'BETWEEN', 'A', 'BOTTLE', 'AND', 'A', 'FLAGON'] +4198-12259-0012-215: ref=['BRAVELY', 'AND', 'WELL', 'PLAYED', 'UPON', 'THE', 'WORDS'] +4198-12259-0012-215: hyp=['BRAVELY', 'AND', 'WELL', 'PLAYED', 'UPON', 'THE', 'WORDS'] +4198-12259-0013-216: ref=['OUR', 'FATHERS', 'DRANK', 'LUSTILY', 'AND', 'EMPTIED', 'THEIR', 'CANS'] +4198-12259-0013-216: hyp=['OUR', 'FATHERS', 'DRANK', 'LUSTILY', 'AND', 'EMPTIED', 'THE', 'ACADANS'] +4198-12259-0014-217: ref=['WELL', 'CACKED', 'WELL', 'SUNG'] +4198-12259-0014-217: hyp=['WELL', 'CAGLED', 'WELL', 'SUNG'] +4198-12259-0015-218: ref=['COME', 'LET', 'US', 'DRINK', 'WILL', 'YOU', 'SEND', 'NOTHING', 'TO', 'THE', 'RIVER'] +4198-12259-0015-218: hyp=['COME', 'LET', 'US', 'DRINK', 'WILL', 'YOU', 'SEND', 'NOTHING', 'TO', 'THE', 'RIVER'] +4198-12259-0016-219: ref=['I', 'DRINK', 'NO', 'MORE', 'THAN', 'A', 'SPONGE'] +4198-12259-0016-219: hyp=['I', 'DRANK', 'NO', 'MORE', 'THAN', 'HIS', 'SPINES'] +4198-12259-0017-220: ref=['I', 'DRINK', 'LIKE', 'A', 'TEMPLAR', 'KNIGHT'] +4198-12259-0017-220: hyp=['I', 'DRINK', 'LIKE', 'A', 'TENT', 'LAW', 'NIGHT'] +4198-12259-0018-221: ref=['AND', 'I', 'TANQUAM', 'SPONSUS'] +4198-12259-0018-221: hyp=['AND', 'I', 'TEN', 'QUALMS', 'BONSES'] +4198-12259-0019-222: ref=['AND', 'I', 'SICUT', 'TERRA', 'SINE', 'AQUA'] +4198-12259-0019-222: hyp=['AND', 'I', 'SECUT', 'TERRACE', 'IN', 'AQUA'] +4198-12259-0020-223: ref=['GIVE', 'ME', 'A', 'SYNONYMON', 'FOR', 'A', 'GAMMON', 'OF', 'BACON'] +4198-12259-0020-223: hyp=['GIVE', 'ME', 'A', 'SNYM', 'FOR', 'A', 'GAMIN', 'OF', 'BACON'] +4198-12259-0021-224: ref=['IT', 'IS', 'THE', 'COMPULSORY', 'OF', 'DRINKERS', 'IT', 'IS', 'A', 'PULLEY'] +4198-12259-0021-224: hyp=['IT', 'IS', 'THE', 'COMPULSORY', 'OF', 'DRINKERS', 'IT', 'IS', 'A', 'PULLLY'] +4198-12259-0022-225: ref=['A', 'LITTLE', 'RAIN', 'ALLAYS', 'A', 'GREAT', 'DEAL', 'OF', 'WIND', 'LONG', 'TIPPLING', 'BREAKS', 'THE', 'THUNDER'] +4198-12259-0022-225: hyp=['A', 'LITTLE', 'RAIN', 'A', 'LAYS', 'A', 'GREAT', 'DEAL', 'OF', 'WIND', 'LONG', 'TIPPLING', 'BREAKS', 'THAT', 'THUNDER'] +4198-12259-0023-226: ref=['BUT', 'IF', 'THERE', 'CAME', 'SUCH', 'LIQUOR', 'FROM', 'MY', 'BALLOCK', 'WOULD', 'YOU', 'NOT', 'WILLINGLY', 'THEREAFTER', 'SUCK', 'THE', 'UDDER', 'WHENCE', 'IT', 'ISSUED'] +4198-12259-0023-226: hyp=['BUT', 'IF', 'THERE', 'CAME', 'SUCH', 'LIQUOR', 'FOR', 'MY', 'BALLIC', 'WILL', 'YOU', 'NOT', 'WILLINGLY', 'THEREAFTER', 'SUCK', 'THE', 'UTTER', 'WHENCE', 'IT', 'ISSUED'] +4198-12259-0024-227: ref=['HERE', 'PAGE', 'FILL'] +4198-12259-0024-227: hyp=['HERE', 'PAGE', 'FILL'] +4198-12259-0025-228: ref=['I', 'APPEAL', 'FROM', 'THIRST', 'AND', 'DISCLAIM', 'ITS', 'JURISDICTION'] +4198-12259-0025-228: hyp=['I', 'APPEAL', 'FROM', 'THIRST', 'AND', 'DISCLAIM', 'ITS', 'JURIS', 'DIXON'] +4198-12259-0026-229: ref=['I', 'WAS', 'WONT', 'HERETOFORE', 'TO', 'DRINK', 'OUT', 'ALL', 'BUT', 'NOW', 'I', 'LEAVE', 'NOTHING'] +4198-12259-0026-229: hyp=['I', 'WAS', 'WONT', 'HERE', 'TOFORE', 'TO', 'DRINK', 'OUT', 'ALL', 'BUT', 'NOW', 'I', 'LEAVE', 'NOTHING'] +4198-12259-0027-230: ref=['HEYDAY', 'HERE', 'ARE', 'TRIPES', 'FIT', 'FOR', 'OUR', 'SPORT', 'AND', 'IN', 'EARNEST', 'EXCELLENT', 'GODEBILLIOS', 'OF', 'THE', 'DUN', 'OX', 'YOU', 'KNOW', 'WITH', 'THE', 'BLACK', 'STREAK'] +4198-12259-0027-230: hyp=['HEY', 'THEE', 'HERE', 'A', 'TRITE', 'FIT', 'FOR', 'OURSPORT', 'AND', 'IN', 'EARNEST', 'EXCELLENT', 'GO', 'TO', 'BE', 'YOURS', 'OF', 'THE', 'DUN', 'OX', 'YOU', 'KNOW', 'WITH', 'THE', 'BLACK', 'STREET'] +4198-12259-0028-231: ref=['O', 'FOR', "GOD'S", 'SAKE', 'LET', 'US', 'LASH', 'THEM', 'SOUNDLY', 'YET', 'THRIFTILY'] +4198-12259-0028-231: hyp=['OH', 'FOR', "GOD'S", 'SAKE', 'LET', 'US', 'LAST', 'THEM', 'SOUNDLY', 'YET', 'DRIFTILY'] +4198-12259-0029-232: ref=['SPARROWS', 'WILL', 'NOT', 'EAT', 'UNLESS', 'YOU', 'BOB', 'THEM', 'ON', 'THE', 'TAIL', 'NOR', 'CAN', 'I', 'DRINK', 'IF', 'I', 'BE', 'NOT', 'FAIRLY', 'SPOKE', 'TO'] +4198-12259-0029-232: hyp=['SPARROWS', 'WOULD', 'NOT', 'EAT', 'UNLESS', 'YOU', 'BOBBED', 'THEM', 'ON', 'THE', 'TAIL', 'NOR', 'CAN', 'I', 'DRINK', 'IF', 'I', 'BE', 'NOT', 'FAIRLY', 'SPOKE', 'TO'] +4198-12259-0030-233: ref=['HO', 'THIS', 'WILL', 'BANG', 'IT', 'SOUNDLY'] +4198-12259-0030-233: hyp=['OH', 'THIS', 'WAS', "BENNETT'S", 'ARMY'] +4198-12259-0031-234: ref=['BUT', 'THIS', 'SHALL', 'BANISH', 'IT', 'UTTERLY'] +4198-12259-0031-234: hyp=['BUT', 'THIS', 'OUR', 'BANACY', 'UTTERLY'] +4198-12259-0032-235: ref=['LET', 'US', 'WIND', 'OUR', 'HORNS', 'BY', 'THE', 'SOUND', 'OF', 'FLAGONS', 'AND', 'BOTTLES', 'AND', 'CRY', 'ALOUD', 'THAT', 'WHOEVER', 'HATH', 'LOST', 'HIS', 'THIRST', 'COME', 'NOT', 'HITHER', 'TO', 'SEEK', 'IT'] +4198-12259-0032-235: hyp=['LET', 'US', 'WIND', 'OUR', 'HORNS', 'BY', 'THE', 'SOUND', 'OF', 'FLAGONS', 'AND', 'BOTTLES', 'AND', 'CRY', 'ALOUD', 'THAT', 'WHOEVER', 'HATH', 'LOST', 'HIS', 'THIRST', 'COME', 'NIGH', 'HITHER', 'TO', 'SEEK', 'IT'] +4198-12259-0033-236: ref=['THE', 'GREAT', 'GOD', 'MADE', 'THE', 'PLANETS', 'AND', 'WE', 'MAKE', 'THE', 'PLATTERS', 'NEAT'] +4198-12259-0033-236: hyp=['THE', 'GREAT', 'GOD', 'MADE', 'THE', 'PLANETS', 'AND', 'WE', 'MAKE', 'THE', 'PLATTERS', 'NEAT'] +4198-12259-0034-237: ref=['APPETITE', 'COMES', 'WITH', 'EATING', 'SAYS', 'ANGESTON', 'BUT', 'THE', 'THIRST', 'GOES', 'AWAY', 'WITH', 'DRINKING'] +4198-12259-0034-237: hyp=['APPETITE', 'COMES', 'WITH', 'EATING', 'SAYS', 'ANGERSON', 'BUT', 'THE', 'DOZ', 'GOES', 'AWAY', 'WITH', 'DRINKING'] +4198-12259-0035-238: ref=['I', 'HAVE', 'A', 'REMEDY', 'AGAINST', 'THIRST', 'QUITE', 'CONTRARY', 'TO', 'THAT', 'WHICH', 'IS', 'GOOD', 'AGAINST', 'THE', 'BITING', 'OF', 'A', 'MAD', 'DOG'] +4198-12259-0035-238: hyp=['I', 'HAVE', 'A', 'REMEDY', 'AGAINST', 'THIRST', 'QUITE', 'CONTRARY', 'TO', 'THAT', 'WHICH', 'IS', 'GOOD', 'AGAINST', 'ABIDING', 'OF', 'A', 'MAD', 'DOG'] +4198-12259-0036-239: ref=['WHITE', 'WINE', 'HERE', 'WINE', 'BOYS'] +4198-12259-0036-239: hyp=['WHITE', 'WIRE', 'WINE', 'BOYS'] +4198-12259-0037-240: ref=['O', 'LACHRYMA', 'CHRISTI', 'IT', 'IS', 'OF', 'THE', 'BEST', 'GRAPE'] +4198-12259-0037-240: hyp=['O', 'LACKRY', 'MOLL', 'CHRISTI', 'IT', 'IS', 'OF', 'THE', 'BEST', 'GRAPE'] +4198-12259-0038-241: ref=["I'FAITH", 'PURE', 'GREEK', 'GREEK', 'O', 'THE', 'FINE', 'WHITE', 'WINE'] +4198-12259-0038-241: hyp=['I', 'FAITH', 'PURE', 'GREEK', 'GREEK', 'O', 'THE', 'FINE', 'WHITE', 'WINE'] +4198-12259-0039-242: ref=['THERE', 'IS', 'NO', 'ENCHANTMENT', 'NOR', 'CHARM', 'THERE', 'EVERY', 'ONE', 'OF', 'YOU', 'HATH', 'SEEN', 'IT'] +4198-12259-0039-242: hyp=['THERE', 'IS', 'NO', 'ENCHANTMENT', 'NOR', 'CHARM', 'THERE', 'EVERY', 'ONE', 'OF', 'YOU', 'HATH', 'SEEN', 'IT'] +4198-12259-0040-243: ref=['MY', 'PRENTICESHIP', 'IS', 'OUT', 'I', 'AM', 'A', 'FREE', 'MAN', 'AT', 'THIS', 'TRADE'] +4198-12259-0040-243: hyp=['MY', 'PRENTICE', 'IT', 'IS', 'OUT', "I'M", 'A', 'FREE', 'MAN', 'AT', 'THIS', 'TRADE'] +4198-12259-0041-244: ref=['I', 'SHOULD', 'SAY', 'MASTER', 'PAST'] +4198-12259-0041-244: hyp=['AS', 'YOU', 'SEE', 'MASTER', 'PASS'] +4198-12259-0042-245: ref=['O', 'THE', 'DRINKERS', 'THOSE', 'THAT', 'ARE', 'A', 'DRY', 'O', 'POOR', 'THIRSTY', 'SOULS'] +4198-12259-0042-245: hyp=['OH', 'THE', 'DRINKERS', 'THOSE', 'THAT', 'ARE', 'ADD', 'OH', 'POOR', 'THIRSTY', 'SOULS'] +4198-12259-0043-246: ref=['CLEAR', 'OFF', 'NEAT', 'SUPERNACULUM'] +4198-12259-0043-246: hyp=['CLEAR', 'OFF', 'NEAT', 'SUPERNACULUM'] +4198-12281-0000-187: ref=['ALTHOUGH', 'THE', 'PLAGUE', 'WAS', 'THERE', 'IN', 'THE', 'MOST', 'PART', 'OF', 'ALL', 'THE', 'HOUSES', 'THEY', 'NEVERTHELESS', 'ENTERED', 'EVERYWHERE', 'THEN', 'PLUNDERED', 'AND', 'CARRIED', 'AWAY', 'ALL', 'THAT', 'WAS', 'WITHIN', 'AND', 'YET', 'FOR', 'ALL', 'THIS', 'NOT', 'ONE', 'OF', 'THEM', 'TOOK', 'ANY', 'HURT', 'WHICH', 'IS', 'A', 'MOST', 'WONDERFUL', 'CASE'] +4198-12281-0000-187: hyp=['ALTHOUGH', 'THE', 'PLAGUE', 'WAS', 'THERE', 'IN', 'THE', 'MOST', 'PART', 'OF', 'ALL', 'THE', 'HOUSES', 'THEY', 'NEVERTHELESS', 'ENTERED', 'EVERYWHERE', 'THEN', 'PLUNDERED', 'AND', 'CARRIED', 'AWAY', 'ALL', 'THAT', 'WAS', 'WITHIN', 'AND', 'YET', 'FOR', 'ALL', 'THIS', 'NOT', 'ONE', 'OF', 'THEM', 'TOOK', 'ANY', 'HURT', 'WHICH', 'IS', 'A', 'MOST', 'WONDERFUL', 'CASE'] +4198-12281-0001-188: ref=['I', 'BESEECH', 'YOU', 'THINK', 'UPON', 'IT'] +4198-12281-0001-188: hyp=['I', 'BESEECH', 'YOU', 'THINK', 'UPON', 'IT'] +4198-12281-0002-189: ref=['NEVERTHELESS', 'AT', 'ALL', 'ADVENTURES', 'THEY', 'RANG', 'THE', 'BELLS', 'AD', 'CAPITULUM', 'CAPITULANTES'] +4198-12281-0002-189: hyp=['NEVERTHELESS', 'AT', 'ALL', 'VENTURES', 'THEY', 'RANG', 'THE', 'BELLS', 'ERE', 'AT', 'CAPITULAM', 'CAPITULAT', 'DAYS'] +4198-12281-0003-190: ref=['BY', 'THE', 'VIRTUE', 'OF', 'GOD', 'WHY', 'DO', 'NOT', 'YOU', 'SING', 'PANNIERS', 'FAREWELL', 'VINTAGE', 'IS', 'DONE'] +4198-12281-0003-190: hyp=['BY', 'THE', 'VIRTUE', 'OF', 'GOD', 'WHY', 'DO', 'NOT', 'YOU', 'SING', 'PENNYERS', 'FAREWELL', 'VINTAGE', 'IS', 'DONE'] +4198-12281-0004-191: ref=['BY', 'THE', 'BELLY', 'OF', 'SANCT', 'JAMES', 'WHAT', 'SHALL', 'WE', 'POOR', 'DEVILS', 'DRINK', 'THE', 'WHILE'] +4198-12281-0004-191: hyp=['BY', 'THE', 'BELLY', 'OF', 'SAINT', 'JAMES', 'WHAT', 'SHALL', 'WE', 'POOR', 'DEVILS', 'DRINK', 'THE', 'WHILE'] +4198-12281-0005-192: ref=['LORD', 'GOD', 'DA', 'MIHI', 'POTUM'] +4198-12281-0005-192: hyp=['LORD', 'GOD', 'THOU', 'ME', 'HE', 'POT', 'EM'] +4198-12281-0006-193: ref=['LET', 'HIM', 'BE', 'CARRIED', 'TO', 'PRISON', 'FOR', 'TROUBLING', 'THE', 'DIVINE', 'SERVICE'] +4198-12281-0006-193: hyp=['LET', 'HIM', 'BE', 'CARRIED', 'TO', 'PRISON', 'FOR', 'TROUBLING', 'THE', 'DIVINE', 'SERVICE'] +4198-12281-0007-194: ref=['WHEREFORE', 'IS', 'IT', 'THAT', 'OUR', 'DEVOTIONS', 'WERE', 'INSTITUTED', 'TO', 'BE', 'SHORT', 'IN', 'THE', 'TIME', 'OF', 'HARVEST', 'AND', 'VINTAGE', 'AND', 'LONG', 'IN', 'THE', 'ADVENT', 'AND', 'ALL', 'THE', 'WINTER'] +4198-12281-0007-194: hyp=['WHEREFORE', 'IS', 'IT', 'THAT', 'OUR', 'DEVOTIONS', 'WERE', 'INSTITUTED', 'TO', 'BE', 'SHORT', 'IN', 'THE', 'TIME', 'OF', 'HARVEST', 'AND', 'VINTAGE', 'AND', 'LONG', 'IN', 'THE', 'ADVENT', 'IN', 'ALL', 'THE', 'WINTER'] +4198-12281-0008-195: ref=['HARK', 'YOU', 'MY', 'MASTERS', 'YOU', 'THAT', 'LOVE', 'THE', 'WINE', "COP'S", 'BODY', 'FOLLOW', 'ME', 'FOR', 'SANCT', 'ANTHONY', 'BURN', 'ME', 'AS', 'FREELY', 'AS', 'A', 'FAGGOT', 'IF', 'THEY', 'GET', 'LEAVE', 'TO', 'TASTE', 'ONE', 'DROP', 'OF', 'THE', 'LIQUOR', 'THAT', 'WILL', 'NOT', 'NOW', 'COME', 'AND', 'FIGHT', 'FOR', 'RELIEF', 'OF', 'THE', 'VINE'] +4198-12281-0008-195: hyp=['HARK', 'YOU', 'MY', 'MASTERS', 'YOU', 'THAT', 'LOVE', 'THE', 'WINE', 'COPSE', 'BODY', 'FOLLOW', 'ME', 'FOR', 'SAINT', 'AUNT', 'ANY', 'BURN', 'ME', 'AS', 'FREELY', 'AS', 'A', 'FAGGOT', 'THEY', 'GET', 'LEAVE', 'TO', 'TASTE', 'ONE', 'DROP', 'OF', 'THE', 'LIQUOR', 'THAT', 'WOULD', 'NOT', 'NOW', 'COME', 'AND', 'FIGHT', 'FOR', 'RELIEF', 'OF', 'THE', 'VINE'] +4198-12281-0009-196: ref=['TO', 'OTHERS', 'AGAIN', 'HE', 'UNJOINTED', 'THE', 'SPONDYLES', 'OR', 'KNUCKLES', 'OF', 'THE', 'NECK', 'DISFIGURED', 'THEIR', 'CHAPS', 'GASHED', 'THEIR', 'FACES', 'MADE', 'THEIR', 'CHEEKS', 'HANG', 'FLAPPING', 'ON', 'THEIR', 'CHIN', 'AND', 'SO', 'SWINGED', 'AND', 'BALAMMED', 'THEM', 'THAT', 'THEY', 'FELL', 'DOWN', 'BEFORE', 'HIM', 'LIKE', 'HAY', 'BEFORE', 'A', 'MOWER'] +4198-12281-0009-196: hyp=['TO', 'OTHERS', 'AGAIN', 'HE', 'UNJOINTED', 'THE', 'SPY', 'MULES', 'OR', 'KNUCKLES', 'OF', 'THE', 'NECK', 'DIS', 'FIGURED', 'THEIR', 'CHAPS', 'GASH', 'THEIR', 'FACES', 'MADE', 'THEIR', 'CHEEKS', 'HANG', 'FLAPPING', 'ON', 'THEIR', 'CHIN', 'AND', 'SO', 'SWINGED', 'AND', 'BLAMMED', 'THEM', 'THAT', 'THEY', 'FELL', 'DOWN', 'BEFORE', 'HIM', 'LIKE', 'HAY', 'BEFORE', 'HIM', 'OVER'] +4198-12281-0010-197: ref=['TO', 'SOME', 'WITH', 'A', 'SMART', 'SOUSE', 'ON', 'THE', 'EPIGASTER', 'HE', 'WOULD', 'MAKE', 'THEIR', 'MIDRIFF', 'SWAG', 'THEN', 'REDOUBLING', 'THE', 'BLOW', 'GAVE', 'THEM', 'SUCH', 'A', 'HOMEPUSH', 'ON', 'THE', 'NAVEL', 'THAT', 'HE', 'MADE', 'THEIR', 'PUDDINGS', 'TO', 'GUSH', 'OUT'] +4198-12281-0010-197: hyp=['TO', 'SOME', 'WOULD', 'THEY', 'SMART', 'SOUS', 'ON', 'THEIR', 'EBERGASTER', 'HE', 'WILL', 'MAKE', 'THEM', 'MIDRIFTS', 'WAG', 'THEN', 'REDOUBLING', 'THE', 'BLOW', 'GAVE', 'THEM', 'SUCH', 'A', 'HOME', 'PUSH', 'ON', 'THE', 'NAVEL', 'THAT', 'HE', 'MADE', 'THEIR', 'PUDDINGS', 'TO', 'GUSH', 'OUT'] +4198-12281-0011-198: ref=['BELIEVE', 'THAT', 'IT', 'WAS', 'THE', 'MOST', 'HORRIBLE', 'SPECTACLE', 'THAT', 'EVER', 'ONE', 'SAW'] +4198-12281-0011-198: hyp=['BELIEVE', 'THAT', 'IT', 'WAS', 'THE', 'MOST', 'HORRIBLE', 'SPECTACLE', 'THAT', 'EVER', 'WON', 'SAW'] +4198-12281-0012-199: ref=['O', 'THE', 'HOLY', 'LADY', 'NYTOUCH', 'SAID', 'ONE', 'THE', 'GOOD', 'SANCTESS', 'O', 'OUR', 'LADY', 'OF', 'SUCCOURS', 'SAID', 'ANOTHER', 'HELP', 'HELP'] +4198-12281-0012-199: hyp=['ALL', 'THE', 'HOLY', 'LADY', 'KNIGHT', 'SAID', 'ONE', 'THE', 'GOOD', 'SANCTIS', 'O', 'OUR', 'LADY', 'OF', 'SECURS', 'SAID', 'ANOTHER', 'HELP', 'HELP'] +4198-12281-0013-200: ref=['SOME', 'DIED', 'WITHOUT', 'SPEAKING', 'OTHERS', 'SPOKE', 'WITHOUT', 'DYING', 'SOME', 'DIED', 'IN', 'SPEAKING', 'OTHERS', 'SPOKE', 'IN', 'DYING'] +4198-12281-0013-200: hyp=['SOME', 'DIED', 'WITHOUT', 'SPEAKING', 'OTHERS', 'SPOKE', 'WITHOUT', 'DYING', 'SOME', 'DIED', 'IN', 'SPEAKING', 'OTHERS', 'SPOKE', 'AND', 'DYING'] +4198-12281-0014-201: ref=['CAN', 'YOU', 'TELL', 'WITH', 'WHAT', 'INSTRUMENTS', 'THEY', 'DID', 'IT'] +4198-12281-0014-201: hyp=['CAN', 'YOU', 'TELL', 'WITH', 'WHAT', 'INSTRUMENTS', 'THEY', 'DID', 'IT'] +4198-12281-0015-202: ref=['IN', 'THE', 'MEANTIME', 'FRIAR', 'JOHN', 'WITH', 'HIS', 'FORMIDABLE', 'BATON', 'OF', 'THE', 'CROSS', 'GOT', 'TO', 'THE', 'BREACH', 'WHICH', 'THE', 'ENEMIES', 'HAD', 'MADE', 'AND', 'THERE', 'STOOD', 'TO', 'SNATCH', 'UP', 'THOSE', 'THAT', 'ENDEAVOURED', 'TO', 'ESCAPE'] +4198-12281-0015-202: hyp=['IN', 'THE', 'MEANTIME', 'FRIED', 'JOHN', 'WITH', 'HIS', 'FORMIDABLE', 'BATON', 'OF', 'THE', 'CROSS', 'GOT', 'TO', 'THE', 'BREACH', 'WHICH', 'THE', 'ENEMIES', 'HAD', 'MADE', 'AND', 'THERE', 'STOOD', 'TO', 'SNATCH', 'UP', 'THOSE', 'THAT', 'ENDEAVOURED', 'TO', 'ESCAPE'] +4198-61336-0000-247: ref=['IT', 'IS', 'SIGNIFICANT', 'TO', 'NOTE', 'IN', 'THIS', 'CONNECTION', 'THAT', 'THE', 'NEW', 'KING', 'WAS', 'AN', 'UNSWERVING', 'ADHERENT', 'OF', 'THE', 'CULT', 'OF', 'ASHUR', 'BY', 'THE', 'ADHERENTS', 'OF', 'WHICH', 'HE', 'WAS', 'PROBABLY', 'STRONGLY', 'SUPPORTED'] +4198-61336-0000-247: hyp=['IT', 'IS', 'SIGNIFICANT', 'TO', 'NOTE', 'IN', 'THIS', 'CONNECTION', 'THAT', 'THE', 'NEW', 'KING', 'WAS', 'AN', 'UNSWERVING', 'ADHERENT', 'OF', 'THE', 'CULT', 'OF', 'ASHER', 'BY', 'THE', 'ADHERENCE', 'OF', 'WHICH', 'HE', 'WAS', 'PROBABLY', 'STRONGLY', 'SUPPORTED'] +4198-61336-0001-248: ref=['AT', 'THE', 'BEGINNING', 'OF', 'HIS', 'REIGN', 'THERE', 'WAS', 'MUCH', 'SOCIAL', 'DISCONTENT', 'AND', 'SUFFERING'] +4198-61336-0001-248: hyp=['AT', 'THE', 'BEGINNING', 'OF', 'HIS', 'REIGN', 'THERE', 'WAS', 'MUCH', 'SOCIAL', 'DISCONTENT', 'AND', 'SUFFERING'] +4198-61336-0002-249: ref=['WELL', 'MIGHT', 'SHARDURIS', 'EXCLAIM', 'IN', 'THE', 'WORDS', 'OF', 'THE', 'PROPHET', 'WHERE', 'IS', 'THE', 'KING', 'OF', 'ARPAD'] +4198-61336-0002-249: hyp=['WELL', 'MIGHT', 'SHOW', 'DORIS', 'EXCLAIM', 'IN', 'THE', 'WORDS', 'OF', 'THE', 'PROPHET', 'WHERE', 'IS', 'THE', 'KING', 'OF', 'ARPE'] +4198-61336-0003-250: ref=['TIGLATH', 'PILESER', 'HOWEVER', 'CROSSED', 'THE', 'EUPHRATES', 'AND', 'MOVING', 'NORTHWARD', 'DELIVERED', 'AN', 'UNEXPECTED', 'ATTACK', 'ON', 'THE', 'URARTIAN', 'ARMY', 'IN', 'QUMMUKH'] +4198-61336-0003-250: hyp=['DICK', 'LAUGHED', 'PLEASURE', 'HOWEVER', 'CROSSED', 'THE', 'EUPHATEES', 'AND', 'MOVING', 'NORTHWARD', 'DELIVERED', 'AN', 'UNEXPECTED', 'ATTACK', 'ON', 'THE', 'GRACIAN', 'ARMY', 'AND', 'CUB'] +4198-61336-0004-251: ref=['A', 'FIERCE', 'BATTLE', 'ENSUED', 'AND', 'ONE', 'OF', 'ITS', 'DRAMATIC', 'INCIDENTS', 'WAS', 'A', 'SINGLE', 'COMBAT', 'BETWEEN', 'THE', 'RIVAL', 'KINGS'] +4198-61336-0004-251: hyp=['A', 'FIERCE', 'BATTLE', 'ENSUED', 'AND', 'ONE', 'OF', 'HIS', 'DRAMATIC', 'INCIDENTS', 'WAS', 'A', 'SINGLE', 'COMBAT', 'BETWEEN', 'THE', 'RIVAL', 'KINGS'] +4198-61336-0005-252: ref=['AN', 'ATTEMPT', 'WAS', 'MADE', 'TO', 'CAPTURE', 'KING', 'SHARDURIS', 'WHO', 'LEAPT', 'FROM', 'HIS', 'CHARIOT', 'AND', 'MADE', 'HASTY', 'ESCAPE', 'ON', 'HORSEBACK', 'HOTLY', 'PURSUED', 'IN', 'THE', 'GATHERING', 'DARKNESS', 'BY', 'AN', 'ASSYRIAN', 'CONTINGENT', 'OF', 'CAVALRY'] +4198-61336-0005-252: hyp=['AN', 'ATTEMPT', 'WAS', 'MADE', 'TO', 'CAPTURE', 'KING', 'SHADORIS', 'WHO', 'LEAPED', 'FROM', 'HIS', 'CHARIOT', 'AND', 'MADE', 'HASTY', 'ESCAPE', 'ON', 'HORSEBACK', 'HOTLY', 'PURSUED', 'IN', 'THE', 'GATHERING', 'DARKNESS', 'BY', 'AN', 'ASSYRIAN', 'CONTENTION', 'OF', 'CAVALRY'] +4198-61336-0006-253: ref=['DESPITE', 'THE', 'BLOW', 'DEALT', 'AGAINST', 'URARTU', 'ASSYRIA', 'DID', 'NOT', 'IMMEDIATELY', 'REGAIN', 'POSSESSION', 'OF', 'NORTH', 'SYRIA'] +4198-61336-0006-253: hyp=['DESPITE', 'THE', 'BLUE', 'DEALT', 'AGAINST', 'URITU', 'ASSYRIA', 'DID', 'NOT', 'IMMEDIATELY', 'REGAIN', 'POSSESSION', 'OF', 'NORTH', 'SYRIA'] +4198-61336-0007-254: ref=['THE', 'SHIFTY', 'MATI', 'ILU', 'EITHER', 'CHERISHED', 'THE', 'HOPE', 'THAT', 'SHARDURIS', 'WOULD', 'RECOVER', 'STRENGTH', 'AND', 'AGAIN', 'INVADE', 'NORTH', 'SYRIA', 'OR', 'THAT', 'HE', 'MIGHT', 'HIMSELF', 'ESTABLISH', 'AN', 'EMPIRE', 'IN', 'THAT', 'REGION'] +4198-61336-0007-254: hyp=['THE', 'SHIFTY', 'MANTI', 'ILIU', 'EITHER', 'CHERISHED', 'THE', 'HOPE', 'THAT', 'SHALL', 'DORIS', 'WOULD', 'RECOVER', 'STRENGTH', 'AND', 'AGAIN', 'IN', 'VAIN', 'NORTH', 'ASSYRIA', 'OR', 'THAT', 'HE', 'MIGHT', 'HIMSELF', 'ESTABLISH', 'AN', 'EMPIRE', 'IN', 'THAT', 'REGION'] +4198-61336-0008-255: ref=['TIGLATH', 'PILESER', 'HAD', 'THEREFORE', 'TO', 'MARCH', 'WESTWARD', 'AGAIN'] +4198-61336-0008-255: hyp=['T', 'GLASS', 'BE', 'LEISURE', 'HAD', 'THEREFORE', 'TO', 'MARCH', 'WESTWARD', 'AGAIN'] +4198-61336-0009-256: ref=['FOR', 'THREE', 'YEARS', 'HE', 'CONDUCTED', 'VIGOROUS', 'CAMPAIGNS', 'IN', 'THE', 'WESTERN', 'LAND', 'WHERE', 'HE', 'MET', 'WITH', 'VIGOROUS', 'RESISTANCE'] +4198-61336-0009-256: hyp=['FOR', 'THREE', 'YEARS', 'HE', 'CONDUCTED', 'VIGOROUS', 'CAMPAIGNS', 'IN', 'THE', 'WESTERN', 'LAND', 'WHERE', 'HE', 'MET', 'WITH', 'VIGOROUS', 'RESISTANCE'] +4198-61336-0010-257: ref=['ARPAD', 'WAS', 'CAPTURED', 'AND', 'MATI', 'ILU', 'DEPOSED', 'AND', 'PROBABLY', 'PUT', 'TO', 'DEATH'] +4198-61336-0010-257: hyp=['OUR', 'PAD', 'WAS', 'CAPTURED', 'AND', 'MET', 'TO', 'ILL', 'YOU', 'DEPOSED', 'AND', 'PROBABLY', 'PUT', 'TO', 'DEATH'] +4198-61336-0011-258: ref=['ONCE', 'AGAIN', 'THE', 'HEBREWS', 'CAME', 'INTO', 'CONTACT', 'WITH', 'ASSYRIA'] +4198-61336-0011-258: hyp=['ONCE', 'AGAIN', 'THE', 'HEBREWS', 'CAME', 'INTO', 'CONTACT', 'WITH', 'THE', 'SYRIA'] +4198-61336-0012-259: ref=['ITS', 'FALL', 'MAY', 'NOT', 'HAVE', 'BEEN', 'UNCONNECTED', 'WITH', 'THE', 'TREND', 'OF', 'EVENTS', 'IN', 'ASSYRIA', 'DURING', 'THE', 'CLOSING', 'YEARS', 'OF', 'THE', 'MIDDLE', 'EMPIRE'] +4198-61336-0012-259: hyp=["IT'S", 'FOR', 'ME', 'NAT', 'HAV', 'BEEN', 'UNCONNECTED', 'WITH', 'THE', 'TREND', 'OF', 'EVENTS', 'IN', 'ASSYRIA', 'DURING', 'THE', 'CLOSING', 'YEARS', 'OF', 'THE', 'MIDDLE', 'EMPIRE'] +4198-61336-0013-260: ref=['JEHOASH', 'THE', 'GRANDSON', 'OF', 'JEHU', 'HAD', 'ACHIEVED', 'SUCCESSES', 'IN', 'CONFLICT', 'WITH', 'DAMASCUS'] +4198-61336-0013-260: hyp=['JO', 'ASH', 'THE', 'GRANDSON', 'OF', 'JEHOV', 'HAD', 'ACHIEVED', 'SUCCESSES', 'IN', 'CONFLICT', 'WITH', 'DAMASCUS'] +4198-61336-0014-261: ref=['SIX', 'MONTHS', 'AFTERWARDS', 'HE', 'WAS', 'ASSASSINATED', 'BY', 'SHALLUM'] +4198-61336-0014-261: hyp=['SIX', 'MONTHS', 'AFTERWARD', 'HE', 'WAS', 'ASSASSINATED', 'BY', 'CELEM'] +4198-61336-0015-262: ref=['THIS', 'USURPER', 'HELD', 'SWAY', 'AT', 'SAMARIA', 'FOR', 'ONLY', 'A', 'MONTH'] +4198-61336-0015-262: hyp=['THIS', 'USURPER', 'HELD', 'SWAY', 'AT', 'SAMARIA', 'FOR', 'ONLY', 'A', 'MONTH'] +4198-61336-0016-263: ref=['NO', 'RESISTANCE', 'WAS', 'POSSIBLE', 'ON', 'THE', 'PART', 'OF', 'MENAHEM', 'THE', 'USURPER', 'WHO', 'WAS', 'PROBABLY', 'READY', 'TO', 'WELCOME', 'THE', 'ASSYRIAN', 'CONQUEROR', 'SO', 'THAT', 'BY', 'ARRANGING', 'AN', 'ALLIANCE', 'HE', 'MIGHT', 'SECURE', 'HIS', 'OWN', 'POSITION'] +4198-61336-0016-263: hyp=['NO', 'RESISTANCE', 'WAS', 'POSSIBLE', 'ON', 'THE', 'PART', 'OF', 'MANY', 'HIM', 'THE', 'USURPER', 'WHOSE', 'PROBABLY', 'READY', 'TO', 'WELCOME', 'THE', 'ASSYRIAN', 'CONQUEROR', 'SO', 'THAT', 'BY', 'ARRANGING', 'AN', 'ALLIANCE', 'HE', 'MIGHT', 'SECURE', 'HIS', 'OWN', 'POSITION'] +4198-61336-0017-264: ref=['TIGLATH', 'PILESER', 'NEXT', 'OPERATED', 'AGAINST', 'THE', 'MEDIAN', 'AND', 'OTHER', 'HILL', 'TRIBES', 'IN', 'THE', 'NORTH', 'EAST'] +4198-61336-0017-264: hyp=['TAKE', 'LAST', 'PLEASE', 'HER', 'NEXT', 'OPERATED', 'AGAINST', 'THE', 'MEDIUM', 'AND', 'OTHER', 'HILL', 'TRIBES', 'IN', 'THE', 'NORTHEAST'] +4198-61336-0018-265: ref=['HE', 'OVERTHREW', 'BUILDINGS', 'DESTROYED', 'ORCHARDS', 'AND', 'TRANSPORTED', 'TO', 'NINEVEH', 'THOSE', 'OF', 'THE', 'INHABITANTS', 'HE', 'HAD', 'NOT', 'PUT', 'TO', 'THE', 'SWORD', 'WITH', 'ALL', 'THE', 'LIVE', 'STOCK', 'HE', 'COULD', 'LAY', 'HANDS', 'ON'] +4198-61336-0018-265: hyp=['HE', 'OVERTHREW', 'BUILDINGS', 'DESTROYED', 'ORCHARDS', 'AND', 'TRANSPORTED', 'TO', 'NINEVEH', 'THOSE', 'OF', 'THE', 'INHABITANTS', 'HE', 'HAD', 'NOT', 'PUT', 'TO', 'THE', 'SWORD', 'WITH', 'ALL', 'THE', 'LIVE', 'STOCK', 'HE', 'COULD', 'LAY', 'HANDS', 'ON'] +4198-61336-0019-266: ref=['THUS', 'WAS', 'URARTU', 'CRIPPLED', 'AND', 'HUMILIATED', 'IT', 'NEVER', 'REGAINED', 'ITS', 'FORMER', 'PRESTIGE', 'AMONG', 'THE', 'NORTHERN', 'STATES'] +4198-61336-0019-266: hyp=['THIS', 'WAS', 'URA', 'TO', 'CRIPPLED', 'AND', 'HUMILIATED', 'IT', 'NEVER', 'REGAINED', 'ITS', 'FORM', 'OF', 'PRESTIGE', 'AMONG', 'THE', 'NORTHERN', 'STATES'] +4198-61336-0020-267: ref=['IN', 'THE', 'FOLLOWING', 'YEAR', 'TIGLATH', 'PILESER', 'RETURNED', 'TO', 'SYRIA'] +4198-61336-0020-267: hyp=['IN', 'THE', 'FOLLOWING', 'YEAR', 'TIGGLERS', 'BELEASE', 'HER', 'RETURN', 'TO', 'SYRIA'] +4198-61336-0021-268: ref=['MENAHEM', 'KING', 'OF', 'ISRAEL', 'HAD', 'DIED', 'AND', 'WAS', 'SUCCEEDED', 'BY', 'HIS', 'SON', 'PEKAHIAH'] +4198-61336-0021-268: hyp=['MANY', 'AND', 'KING', 'OF', 'ISRAEL', 'HAD', 'DIED', 'AND', 'WAS', 'SUCCEEDED', 'BY', 'HIS', 'SON', 'PECAH'] +4198-61336-0022-269: ref=['JUDAH', 'HAD', 'TAKEN', 'ADVANTAGE', 'OF', 'THE', 'DISTURBED', 'CONDITIONS', 'IN', 'ISRAEL', 'TO', 'ASSERT', 'ITS', 'INDEPENDENCE'] +4198-61336-0022-269: hyp=['JULIA', 'HAD', 'TAKEN', 'ADVANTAGE', 'OF', 'THE', 'DISTURBED', 'CONDITIONS', 'IN', 'ISRAEL', 'TO', 'ASSERT', 'ITS', 'INDEPENDENCE'] +4198-61336-0023-270: ref=['HE', 'CONDEMNED', 'ISRAEL', 'FOR', 'ITS', 'IDOLATRIES', 'AND', 'CRIED'] +4198-61336-0023-270: hyp=['HE', 'CONDEMNED', 'ISRAEL', 'FOR', 'ITS', 'IDOLATRIES', 'AND', 'CRIED'] +4198-61336-0024-271: ref=['FOR', 'THUS', 'SAITH', 'THE', 'LORD', 'UNTO', 'THE', 'HOUSE', 'OF', 'ISRAEL', 'SEEK', 'YE', 'ME', 'AND', 'YE', 'SHALL', 'LIVE', 'HAVE', 'YE', 'OFFERED', 'UNTO', 'ME', 'SACRIFICES', 'AND', 'OFFERINGS', 'IN', 'THE', 'WILDERNESS', 'FORTY', 'YEARS', 'O', 'HOUSE', 'OF', 'ISRAEL'] +4198-61336-0024-271: hyp=['FOR', 'THIS', 'SAITH', 'THE', 'LORD', 'UNTO', 'THE', 'HOUSE', 'OF', 'ISRAEL', 'SEEK', 'YE', 'ME', 'A', 'YE', 'TO', 'LIVE', 'HAVE', 'YE', 'OFFERED', 'UNTO', 'ME', 'SACRIFICES', 'AND', 'OFFERINGS', 'IN', 'THE', 'WILDERNESS', 'FORTY', 'YEARS', 'OR', 'HOUSE', 'OF', 'ISRAEL'] +4198-61336-0025-272: ref=['THE', 'REMNANT', 'OF', 'THE', 'PHILISTINES', 'SHALL', 'PERISH'] +4198-61336-0025-272: hyp=['THE', 'REMNANT', 'OF', 'THE', 'PHILISTINES', 'SHALL', 'PERISH'] +4198-61336-0026-273: ref=['ISRAEL', 'WAS', 'ALSO', 'DEALT', 'WITH'] +4198-61336-0026-273: hyp=['ISRAEL', 'WAS', 'ALSO', 'DEALT', 'WITH'] +4198-61336-0027-274: ref=['HE', 'SWEPT', 'THROUGH', 'ISRAEL', 'LIKE', 'A', 'HURRICANE'] +4198-61336-0027-274: hyp=['HE', 'SWEPT', 'THROUGH', 'ISRAEL', 'LIKE', 'A', 'HURRICANE'] +4198-61336-0028-275: ref=['THE', 'PHILISTINES', 'AND', 'THE', 'ARABIANS', 'OF', 'THE', 'DESERT', 'WERE', 'ALSO', 'SUBDUED'] +4198-61336-0028-275: hyp=['THE', 'FURTHER', 'STEAMS', 'AND', 'THE', 'ARABIANS', 'OF', 'THE', 'DESERT', 'WERE', 'ALSO', 'SUBDUED'] +4198-61336-0029-276: ref=['HE', 'INVADED', 'BABYLONIA'] +4198-61336-0029-276: hyp=['HE', 'INVADED', 'BABYLONIA'] +4198-61336-0030-277: ref=['UKINZER', 'TOOK', 'REFUGE', 'IN', 'HIS', 'CAPITAL', 'SHAPIA', 'WHICH', 'HELD', 'OUT', 'SUCCESSFULLY', 'ALTHOUGH', 'THE', 'SURROUNDING', 'COUNTRY', 'WAS', 'RAVAGED', 'AND', 'DESPOILED'] +4198-61336-0030-277: hyp=['A', 'KINDRED', 'TOOK', 'REFUGE', 'IN', 'HIS', 'CAPITAL', 'SHAPEIA', 'WHICH', 'HELD', 'OUT', 'SUCCESSFULLY', 'ALTHOUGH', 'THE', 'SURROUNDING', 'COUNTRY', 'WAS', 'RAVAGED', 'AND', 'DESPOILED'] +4294-14317-0000-1866: ref=['AS', 'I', 'THOUGHT', 'THAT', 'THIS', 'WAS', 'DUE', 'TO', 'SOME', 'FAULT', 'IN', 'THE', 'EARTH', 'I', 'WANTED', 'TO', 'MAKE', 'THESE', 'FIRST', 'EXPERIMENTS', 'BEFORE', 'I', 'UNDERTOOK', 'MY', 'PERSEUS'] +4294-14317-0000-1866: hyp=['AS', 'I', 'THOUGHT', 'THAT', 'THIS', 'WAS', 'DUE', 'TO', 'SOME', 'FAULT', 'IN', 'THE', 'EARTH', 'I', 'WANTED', 'TO', 'MAKE', 'THESE', 'FIRST', 'EXPERIMENTS', 'BEFORE', 'AND', 'UNDERTOOK', 'MY', 'PERSEUS'] +4294-14317-0001-1867: ref=['WHEN', 'I', 'SAW', 'THAT', 'THIS', 'BUST', 'CAME', 'OUT', 'SHARP', 'AND', 'CLEAN', 'I', 'SET', 'AT', 'ONCE', 'TO', 'CONSTRUCT', 'A', 'LITTLE', 'FURNACE', 'IN', 'THE', 'WORKSHOP', 'ERECTED', 'FOR', 'ME', 'BY', 'THE', 'DUKE', 'AFTER', 'MY', 'OWN', 'PLANS', 'AND', 'DESIGN', 'IN', 'THE', 'HOUSE', 'WHICH', 'THE', 'DUKE', 'HAD', 'GIVEN', 'ME'] +4294-14317-0001-1867: hyp=['WHEN', 'I', 'SAW', 'THIS', 'FUSS', 'CAME', 'OUT', 'SHARP', 'AND', 'CLEAN', 'I', 'SAID', 'AT', 'ONCE', 'TO', 'CONSTRUCT', 'A', 'LITTLE', 'FURNACE', 'IN', 'THE', 'WORKSHOP', 'ERECTED', 'FOR', 'ME', 'BY', 'THE', 'DUKE', 'AFTER', 'MY', 'OWN', 'PLANS', 'AND', 'DESIGN', 'IN', 'THE', 'HOUSE', 'WHICH', 'THE', 'DUKE', 'HAD', 'GIVEN', 'ME'] +4294-14317-0002-1868: ref=['IT', 'WAS', 'AN', 'EXTREMELY', 'DIFFICULT', 'TASK', 'AND', 'I', 'WAS', 'ANXIOUS', 'TO', 'OBSERVE', 'ALL', 'THE', 'NICETIES', 'OF', 'ART', 'WHICH', 'I', 'HAD', 'LEARNED', 'SO', 'AS', 'NOT', 'TO', 'LAPSE', 'INTO', 'SOME', 'ERROR'] +4294-14317-0002-1868: hyp=['IT', 'WAS', 'AN', 'EXTREMELY', 'DIFFICULT', 'TASK', 'AND', 'I', 'WAS', 'ANXIOUS', 'TO', 'OBSERVE', 'ALL', 'THE', 'NICETIES', 'OF', 'ART', 'WHICH', 'I', 'HAD', 'LEARNED', 'SO', 'AS', 'NOT', 'TO', 'LAPSE', 'INTO', 'SOME', 'ERROR'] +4294-14317-0003-1869: ref=['I', 'IN', 'MY', 'TURN', 'FEEL', 'THE', 'SAME', 'DESIRE', 'AND', 'HOPE', 'TO', 'PLAY', 'MY', 'PART', 'LIKE', 'THEM', 'THEREFORE', 'MY', 'LORD', 'GIVE', 'ME', 'THE', 'LEAVE', 'TO', 'GO'] +4294-14317-0003-1869: hyp=['I', 'IN', 'MY', 'TURN', 'FEEL', 'THE', 'SAME', 'DESIRE', 'AND', 'HOPE', 'TO', 'PLAY', 'MY', 'PART', 'LIKE', 'THEM', 'THEREFORE', 'MY', 'LORD', 'GIVE', 'ME', 'THE', 'LEAVE', 'TO', 'GO'] +4294-14317-0004-1870: ref=['BUT', 'BEWARE', 'OF', 'LETTING', 'BANDINELLO', 'QUIT', 'YOU', 'RATHER', 'BESTOW', 'UPON', 'HIM', 'ALWAYS', 'MORE', 'THAN', 'HE', 'DEMANDS', 'FOR', 'IF', 'HE', 'GOES', 'INTO', 'FOREIGN', 'PARTS', 'HIS', 'IGNORANCE', 'IS', 'SO', 'PRESUMPTUOUS', 'THAT', 'HE', 'IS', 'JUST', 'THE', 'MAN', 'TO', 'DISGRACE', 'OUR', 'MOST', 'ILLUSTRIOUS', 'SCHOOL'] +4294-14317-0004-1870: hyp=['BUT', 'BEWARE', 'OF', 'LETTING', 'BEND', 'NELLO', 'QUIT', 'YOU', 'RATHER', 'BESTOW', 'UPON', 'HIM', 'ALWAYS', 'MORE', 'THAN', 'HE', 'DEMANDS', 'FOR', 'IF', 'HE', 'GOES', 'INTO', 'FOREIGN', 'PARTS', 'HIS', 'IGNORANCE', 'IS', 'SO', 'PRESUMPTUOUS', 'THAT', 'HE', 'IS', 'JUST', 'THE', 'MAN', 'TO', 'DISGRACE', 'OUR', 'MOST', 'ILLUSTRIOUS', 'SCHOOL'] +4294-14317-0005-1871: ref=['I', 'ASK', 'NO', 'FURTHER', 'REWARD', 'FOR', 'MY', 'LABOURS', 'UP', 'TO', 'THIS', 'TIME', 'THAN', 'THE', 'GRACIOUS', 'FAVOUR', 'OF', 'YOUR', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0005-1871: hyp=['I', 'ASKED', 'NO', 'FURTHER', 'REWARD', 'FOR', 'MY', 'LABOURS', 'UP', 'TO', 'THIS', 'TIME', 'THAN', 'THE', 'GRACIOUS', 'FAVOUR', 'OF', 'YOUR', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0006-1872: ref=['THEN', 'I', 'THANKED', 'HIM', 'AND', 'SAID', 'I', 'HAD', 'NO', 'GREATER', 'DESIRE', 'THAN', 'TO', 'SHOW', 'THOSE', 'ENVIOUS', 'FOLK', 'THAT', 'I', 'HAD', 'IT', 'IN', 'ME', 'TO', 'EXECUTE', 'THE', 'PROMISED', 'WORK'] +4294-14317-0006-1872: hyp=['THEN', 'I', 'THANKED', 'HIM', 'AND', 'SAID', 'I', 'HAD', 'NO', 'GREATER', 'DESIRE', 'THAN', 'TO', 'SHOW', 'THOSE', 'ENVIOUS', 'FOLK', 'THAT', 'I', 'HAD', 'IT', 'IN', 'ME', 'TO', 'EXECUTE', 'THE', 'PROMISED', 'WORK'] +4294-14317-0007-1873: ref=['I', 'HAD', 'BETTER', 'LOOK', 'TO', 'MY', 'CONDUCT', 'FOR', 'IT', 'HAD', 'COME', 'TO', 'HIS', 'EARS', 'THAT', 'I', 'RELIED', 'UPON', 'HIS', 'FAVOUR', 'TO', 'TAKE', 'IN', 'FIRST', 'ONE', 'MAN', 'AND', 'THEN', 'ANOTHER'] +4294-14317-0007-1873: hyp=['I', 'HAD', 'BETTER', 'LOOK', 'TO', 'MY', 'CONDUCT', 'FOR', 'IT', 'HAD', 'COME', 'TO', 'HIS', 'EARS', 'THAT', 'I', 'RELIED', 'UPON', 'HIS', 'FAVOUR', 'TO', 'TAKE', 'IN', 'FIRST', 'ONE', 'MAN', 'AND', 'THEN', 'ANOTHER'] +4294-14317-0008-1874: ref=['I', 'BEGGED', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY', 'TO', 'NAME', 'A', 'SINGLE', 'PERSON', 'WHOM', 'I', 'HAD', 'EVER', 'TAKEN', 'IN'] +4294-14317-0008-1874: hyp=['I', 'BEGGED', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY', 'TO', 'NAME', 'A', 'SINGLE', 'PERSON', 'WHOM', 'I', 'HAD', 'EVER', 'TAKEN', 'IN'] +4294-14317-0009-1875: ref=['I', 'SAID', 'MY', 'LORD', 'I', 'THANK', 'YOU', 'AND', 'BEG', 'YOU', 'TO', 'CONDESCEND', 'SO', 'FAR', 'AS', 'TO', 'LISTEN', 'TO', 'FOUR', 'WORDS', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'LENT', 'ME', 'A', 'PAIR', 'OF', 'OLD', 'SCALES', 'TWO', 'ANVILS', 'AND', 'THREE', 'LITTLE', 'HAMMERS', 'WHICH', 'ARTICLES', 'I', 'BEGGED', 'HIS', 'WORKMAN', 'GIORGIO', 'DA', 'CORTONA', 'FIFTEEN', 'DAYS', 'AGO', 'TO', 'FETCH', 'BACK'] +4294-14317-0009-1875: hyp=['I', 'SAID', 'MY', 'LORD', 'I', 'THANK', 'YOU', 'AND', 'BEG', 'YOU', 'TO', 'CONDESCEND', 'SO', 'FAR', 'AS', 'TO', 'LISTEN', 'TO', 'FOUR', 'WORDS', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'LENT', 'ME', 'A', 'PAIR', 'OF', 'OLD', 'SCALES', 'TWO', 'AMBILS', 'AND', 'THREE', 'LITTLE', 'HAMMERS', 'WHICH', 'ARTICLES', 'I', 'BEGGED', 'HIS', 'WORKMEN', 'GEORGIO', 'DECORTUNA', 'FIFTEEN', 'DAYS', 'AGO', 'TO', 'FETCH', 'BACK'] +4294-14317-0010-1876: ref=['GIORGIO', 'CAME', 'FOR', 'THEM', 'HIMSELF'] +4294-14317-0010-1876: hyp=['GEORGIO', 'CAME', 'FOR', 'THEM', 'HIS', 'HEALTH'] +4294-14317-0011-1877: ref=['I', 'HOPE', 'TO', 'PROVE', 'ON', 'WHAT', 'ACCOUNT', 'THAT', 'SCOUNDREL', 'TRIES', 'TO', 'BRING', 'ME', 'INTO', 'DISGRACE'] +4294-14317-0011-1877: hyp=['I', 'HOPE', 'TO', 'PROVE', 'ON', 'WHAT', 'ACCOUNT', 'THAT', 'SCOUNDREL', 'TRIES', 'TO', 'BRING', 'ME', 'INTO', 'DISGRACE'] +4294-14317-0012-1878: ref=['WHEN', 'HE', 'HAD', 'HEARD', 'THIS', 'SPEECH', 'THE', 'DUKE', 'ROSE', 'UP', 'IN', 'ANGER', 'AND', 'SENT', 'FOR', 'BERNARDONE', 'WHO', 'WAS', 'FORCED', 'TO', 'TAKE', 'FLIGHT', 'AS', 'FAR', 'AS', 'VENICE', 'HE', 'AND', 'ANTONIO', 'LANDI', 'WITH', 'HIM'] +4294-14317-0012-1878: hyp=['WHEN', 'HE', 'HAD', 'HEARD', 'THIS', 'SPEECH', 'THE', 'DUKE', 'ROSE', 'UP', 'IN', 'ANGER', 'AND', 'SENT', 'FOR', 'BERNARDONE', 'WHO', 'WAS', 'FORCED', 'TO', 'TAKE', 'FLIGHT', 'AS', 'FAR', 'AS', 'VENICE', 'HE', 'AND', 'ANTONIO', 'LANDEE', 'WITH', 'HIM'] +4294-14317-0013-1879: ref=['YOU', 'HAD', 'BETTER', 'PUT', 'THIS', 'TO', 'THE', 'PROOF', 'AND', 'I', 'WILL', 'GO', 'AT', 'ONCE', 'TO', 'THE', 'BARGELLO'] +4294-14317-0013-1879: hyp=['YOU', 'HAD', 'BETTER', 'PUT', 'THIS', 'TO', 'THE', 'PROOF', 'AND', 'I', 'WILL', 'GO', 'AT', 'ONCE', 'TO', 'THE', 'BARGENO'] +4294-14317-0014-1880: ref=['I', 'AM', 'WILLING', 'TO', 'ENTER', 'INTO', 'COMPETITION', 'WITH', 'THE', 'ANCIENTS', 'AND', 'FEEL', 'ABLE', 'TO', 'SURPASS', 'THEM', 'FOR', 'SINCE', 'THOSE', 'EARLY', 'DAYS', 'IN', 'WHICH', 'I', 'MADE', 'THE', 'MEDALS', 'OF', 'POPE', 'CLEMENT', 'I', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'THAT', 'I', 'CAN', 'NOW', 'PRODUCE', 'FAR', 'BETTER', 'PIECES', 'OF', 'THE', 'KIND', 'I', 'THINK', 'I', 'CAN', 'ALSO', 'OUTDO', 'THE', 'COINS', 'I', 'STRUCK', 'FOR', 'DUKE', 'ALESSANDRO', 'WHICH', 'ARE', 'STILL', 'HELD', 'IN', 'HIGH', 'ESTEEM', 'IN', 'LIKE', 'MANNER', 'I', 'COULD', 'MAKE', 'FOR', 'YOU', 'LARGE', 'PIECES', 'OF', 'GOLD', 'AND', 'SILVER', 'PLATE', 'AS', 'I', 'DID', 'SO', 'OFTEN', 'FOR', 'THAT', 'NOBLE', 'MONARCH', 'KING', 'FRANCIS', 'OF', 'FRANCE', 'THANKS', 'TO', 'THE', 'GREAT', 'CONVENIENCES', 'HE', 'ALLOWED', 'ME', 'WITHOUT', 'EVER', 'LOSING', 'TIME', 'FOR', 'THE', 'EXECUTION', 'OF', 'COLOSSAL', 'STATUES', 'OR', 'OTHER', 'WORKS', 'OF', 'THE', 'SCULPTORS', 'CRAFT'] +4294-14317-0014-1880: hyp=['I', 'AM', 'WILLING', 'TO', 'ENTER', 'INTO', 'COMPETITION', 'WITH', 'THE', 'ANCIENTS', 'AND', 'FEEL', 'ABLE', 'TO', 'SURPASS', 'THEM', 'FOR', 'SINCE', 'THOSE', 'EARLY', 'DAYS', 'IN', 'WHICH', 'I', 'MADE', 'THE', 'MEDALS', 'OF', 'POPE', 'CLEMENT', 'I', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'THAT', 'I', 'CAN', 'NOW', 'PRODUCE', 'FAR', 'BETTER', 'PIECES', 'OF', 'THE', 'KIND', 'I', 'THINK', 'I', 'CAN', 'ALSO', 'OUTDO', 'THE', 'COINS', 'I', 'STRUCK', 'FOR', 'DUKE', 'ALISANDRO', 'WHICH', 'ARE', 'STILL', 'HELD', 'IN', 'HIGH', 'ESTEEM', 'IN', 'LIKE', 'MANNER', 'I', 'COULD', 'MAKE', 'FOR', 'YOU', 'LARGE', 'PIECES', 'OF', 'GOLD', 'AND', 'SILVER', 'PLATE', 'AS', 'I', 'DID', 'SO', 'OFTEN', 'FOR', 'THAT', 'NOBLE', 'MONARCH', 'KING', 'FRANCIS', 'OF', 'FRANCE', 'THANKS', 'TO', 'THE', 'GREAT', 'CONVENIENCES', 'HE', 'ALLOWED', 'ME', 'WITHOUT', 'EVER', 'LOSING', 'TIME', 'FOR', 'THE', 'EXECUTION', 'OF', 'COLOSSAL', 'STATUES', 'OR', 'OTHER', 'WORKS', 'OF', 'THE', "SCULPTOR'S", 'CRAFT'] +4294-14317-0015-1881: ref=['AFTER', 'SEVERAL', 'MONTHS', 'WERE', 'WASTED', 'AND', 'PIERO', 'WOULD', 'NEITHER', 'WORK', 'NOR', 'PUT', 'MEN', 'TO', 'WORK', 'UPON', 'THE', 'PIECE', 'I', 'MADE', 'HIM', 'GIVE', 'IT', 'BACK'] +4294-14317-0015-1881: hyp=['AFTER', 'SEVERAL', 'MONTHS', 'WERE', 'WASTED', 'AND', 'PIERRE', 'WOULD', 'NEITHER', 'WORK', 'NOR', 'PUT', 'MEN', 'TO', 'WORK', 'UPON', 'THE', 'PIECE', 'I', 'MADE', 'HIM', 'GIVE', 'IT', 'BACK'] +4294-14317-0016-1882: ref=['AMONG', 'ARTISTS', 'CERTAIN', 'ENRAGED', 'SCULPTORS', 'LAUGHED', 'AT', 'ME', 'AND', 'CALLED', 'ME', 'THE', 'NEW', 'SCULPTOR'] +4294-14317-0016-1882: hyp=['AMONG', 'ARTISTS', 'CERTAIN', 'ENRAGED', 'SCULPTORS', 'LAUGHED', 'AT', 'ME', 'AND', 'CALLED', 'ME', 'THE', 'NEW', 'SCULPTOR'] +4294-14317-0017-1883: ref=['NOW', 'I', 'HOPE', 'TO', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'AN', 'OLD', 'SCULPTOR', 'IF', 'GOD', 'SHALL', 'GRANT', 'ME', 'THE', 'BOON', 'OF', 'FINISHING', 'MY', 'PERSEUS', 'FOR', 'THAT', 'NOBLE', 'PIAZZA', 'OF', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0017-1883: hyp=['NOW', 'I', 'HOPE', 'TO', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'AN', 'OLD', 'SCULPTOR', 'IF', 'GOD', 'SHALL', 'GRANT', 'ME', 'THE', 'BOON', 'OF', 'FINISHING', 'MY', 'PURSES', 'FOR', 'THAT', 'NOBLE', 'PIAZZA', 'OF', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0018-1884: ref=['HAVING', 'THIS', 'EXCELLENT', 'RESOLVE', 'IN', 'HEART', 'I', 'REACHED', 'MY', 'HOME'] +4294-14317-0018-1884: hyp=['HAVING', 'THIS', 'EXCELLENT', 'RESOLVE', 'IN', 'HEART', 'I', 'REACHED', 'MY', 'HOME'] +4294-32859-0000-1942: ref=['WYLDER', 'WAS', 'RATHER', 'SURLY', 'AFTER', 'THE', 'LADIES', 'HAD', 'FLOATED', 'AWAY', 'FROM', 'THE', 'SCENE', 'AND', 'HE', 'DRANK', 'HIS', 'LIQUOR', 'DOGGEDLY'] +4294-32859-0000-1942: hyp=['WYLDER', 'WAS', 'RATHER', 'SURLY', 'AFTER', 'THE', 'LADIES', 'HAD', 'FLOATED', 'AWAY', 'FROM', 'THE', 'SCENE', 'AND', 'HE', 'DRANK', 'HIS', 'LIQUOR', 'DOGGEDLY'] +4294-32859-0001-1943: ref=['IT', 'WAS', 'HIS', 'FANCY', 'I', 'SUPPOSE', 'TO', 'REVIVE', 'CERTAIN', 'SENTIMENTAL', 'RELATIONS', 'WHICH', 'HAD', 'IT', 'MAY', 'BE', 'ONCE', 'EXISTED', 'BETWEEN', 'HIM', 'AND', 'MISS', 'LAKE', 'AND', 'HE', 'WAS', 'A', 'PERSON', 'OF', 'THAT', 'COMBATIVE', 'TEMPERAMENT', 'THAT', 'MAGNIFIES', 'AN', 'OBJECT', 'IN', 'PROPORTION', 'AS', 'ITS', 'PURSUIT', 'IS', 'THWARTED'] +4294-32859-0001-1943: hyp=['IT', 'WAS', 'HIS', 'FANCY', 'I', 'SUPPOSE', 'TO', 'REVIVE', 'CERTAIN', 'SENTIMENTAL', 'RELATIONS', 'WHICH', 'HAD', 'IT', 'MAY', 'BE', 'ONCE', 'EXISTED', 'BETWEEN', 'HIM', 'AND', 'MISS', 'LAKE', 'AND', 'HE', 'WAS', 'A', 'PERSON', 'OF', 'THAT', 'COMBATIVE', 'TEMPERAMENT', 'THAT', 'MAGNIFIES', 'AN', 'OBJECT', 'IN', 'PROPORTION', 'AS', 'ITS', 'PURSUIT', 'IS', 'THWARTED'] +4294-32859-0002-1944: ref=['THE', 'STORY', 'OF', 'FRIDOLIN', 'AND', "RETZCH'S", 'PRETTY', 'OUTLINES'] +4294-32859-0002-1944: hyp=['THE', 'STORY', 'OF', 'FRIDOLIN', 'AND', 'WRETCHES', 'PRETTY', 'OUTLINE'] +4294-32859-0003-1945: ref=['SIT', 'DOWN', 'BESIDE', 'ME', 'AND', "I'LL", 'TELL', 'YOU', 'THE', 'STORY'] +4294-32859-0003-1945: hyp=['SIT', 'DOWN', 'BESIDE', 'ME', 'AND', "I'LL", 'TELL', 'YOU', 'THE', 'STORY'] +4294-32859-0004-1946: ref=['HE', 'ASSISTED', 'AT', 'IT', 'BUT', 'TOOK', 'NO', 'PART', 'AND', 'IN', 'FACT', 'WAS', 'LISTENING', 'TO', 'THAT', 'OTHER', 'CONVERSATION', 'WHICH', 'SOUNDED', 'WITH', 'ITS', 'PLEASANT', 'GABBLE', 'AND', 'LAUGHTER', 'LIKE', 'A', 'LITTLE', 'MUSICAL', 'TINKLE', 'OF', 'BELLS', 'IN', 'THE', 'DISTANCE'] +4294-32859-0004-1946: hyp=['HE', 'ASSISTED', 'AT', 'IT', 'BUT', 'TOOK', 'NO', 'PART', 'AND', 'IN', 'FACT', 'WAS', 'LISTENING', 'TO', 'THAT', 'OTHER', 'CONVERSATION', 'WHICH', 'SOUNDED', 'WITH', 'ITS', 'PLEASANT', 'GABBLE', 'AND', 'LAUGHTER', 'LIKE', 'A', 'LITTLE', 'MUSICAL', 'TINKLE', 'OF', 'BELLS', 'IN', 'THE', 'DISTANCE'] +4294-32859-0005-1947: ref=['BUT', 'HONEST', 'MARK', 'FORGOT', 'THAT', 'YOUNG', 'LADIES', 'DO', 'NOT', 'ALWAYS', 'COME', 'OUT', 'QUITE', 'ALONE', 'AND', 'JUMP', 'UNASSISTED', 'INTO', 'THEIR', 'VEHICLES'] +4294-32859-0005-1947: hyp=['BUT', 'HONEST', 'MARK', 'FORGOT', 'THAT', 'YOUNG', 'LADIES', 'DO', 'NOT', 'ALWAYS', 'COME', 'OUT', 'QUITE', 'ALONE', 'AND', 'JUMP', 'UNASSISTED', 'INTO', 'THEIR', 'VEHICLES'] +4294-35475-0000-1885: ref=['BUT', 'THE', 'MIDDLE', 'SON', 'WAS', 'LITTLE', 'AND', 'LORN', 'HE', 'WAS', 'NEITHER', 'DARK', 'NOR', 'FAIR', 'HE', 'WAS', 'NEITHER', 'HANDSOME', 'NOR', 'STRONG'] +4294-35475-0000-1885: hyp=['BUT', 'THE', 'MIDDLE', 'SUN', 'WAS', 'LITTLE', 'AND', 'LORN', 'HE', 'WAS', 'NEITHER', 'DARK', 'NOR', 'FAIR', 'HE', 'WAS', 'NEITHER', 'HANDSOME', 'NOR', 'STRONG'] +4294-35475-0001-1886: ref=['THROWING', 'HIMSELF', 'ON', 'HIS', 'KNEES', 'BEFORE', 'THE', 'KING', 'HE', 'CRIED', 'OH', 'ROYAL', 'SIRE', 'BESTOW', 'UPON', 'ME', 'ALSO', 'A', 'SWORD', 'AND', 'A', 'STEED', 'THAT', 'I', 'MAY', 'UP', 'AND', 'AWAY', 'TO', 'FOLLOW', 'MY', 'BRETHREN'] +4294-35475-0001-1886: hyp=['THROWING', 'HIMSELF', 'ON', 'HIS', 'KNEES', 'BEFORE', 'THE', 'KING', 'HE', 'CRIED', 'O', 'ROYAL', 'SIRE', 'BESTOW', 'UPON', 'ME', 'ALSO', 'A', 'SWORD', 'AND', 'A', 'STEED', 'THAT', 'I', 'MAY', 'UP', 'AND', 'AWAY', 'TO', 'FOLLOW', 'MY', 'BRETHREN'] +4294-35475-0002-1887: ref=['BUT', 'THE', 'KING', 'LAUGHED', 'HIM', 'TO', 'SCORN', 'THOU', 'A', 'SWORD', 'HE', 'QUOTH'] +4294-35475-0002-1887: hyp=['BUT', 'THE', 'KING', 'LAUGHED', 'HIM', 'TO', 'SCORN', 'THOU', 'A', 'SWORD', 'HE', 'QUOTH'] +4294-35475-0003-1888: ref=['IN', 'SOOTH', 'THOU', 'SHALT', 'HAVE', 'ONE', 'BUT', 'IT', 'SHALL', 'BE', 'ONE', 'BEFITTING', 'THY', 'MAIDEN', 'SIZE', 'AND', 'COURAGE', 'IF', 'SO', 'SMALL', 'A', 'WEAPON', 'CAN', 'BE', 'FOUND', 'IN', 'ALL', 'MY', 'KINGDOM'] +4294-35475-0003-1888: hyp=['IN', 'SOOTH', 'THOU', 'SHALT', 'HAVE', 'ONE', 'BUT', 'IT', 'SHALL', 'BE', 'ONE', 'BEFITTING', 'THY', 'MAIDEN', 'SIZE', 'AND', 'COURAGE', 'IT', 'SO', 'SMALL', 'A', 'WEAPON', 'CAN', 'BE', 'FOUND', 'IN', 'ALL', 'MY', 'KINGDOM'] +4294-35475-0004-1889: ref=['FORTHWITH', 'THE', 'GRINNING', 'JESTER', 'BEGAN', 'SHRIEKING', 'WITH', 'LAUGHTER', 'SO', 'THAT', 'THE', 'BELLS', 'UPON', 'HIS', 'MOTLEY', 'CAP', 'WERE', 'ALL', 'SET', 'A', 'JANGLING'] +4294-35475-0004-1889: hyp=['FORTHWITH', 'THE', 'GRINNING', 'GESTURE', 'BEGAN', 'SHRIEKING', 'WITH', 'LAUGHTER', 'SO', 'THAT', 'THE', 'BELLS', 'UPON', 'HIS', 'MOTLEY', 'CAP', 'WERE', 'ALL', 'SET', 'A', 'JANGLING'] +4294-35475-0005-1890: ref=['I', 'DID', 'BUT', 'LAUGH', 'TO', 'THINK', 'THE', 'SWORD', 'OF', 'ETHELRIED', 'HAD', 'BEEN', 'SO', 'QUICKLY', 'FOUND', 'RESPONDED', 'THE', 'JESTER', 'AND', 'HE', 'POINTED', 'TO', 'THE', 'SCISSORS', 'HANGING', 'FROM', 'THE', "TAILOR'S", 'GIRDLE'] +4294-35475-0005-1890: hyp=['I', 'DID', 'BUT', 'LAUGH', 'TO', 'THINK', 'THE', 'SORT', 'OF', 'EPLORRIED', 'HAD', 'BEEN', 'SO', 'QUICKLY', 'FOUND', 'RESPONDED', 'THE', 'JESTER', 'AND', 'HE', 'POINTED', 'TO', 'THE', 'SCISSORS', 'HANGING', 'FROM', 'THE', "TAILOR'S", 'GIRDLE'] +4294-35475-0006-1891: ref=['ONE', 'NIGHT', 'AS', 'HE', 'LAY', 'IN', 'A', 'DEEP', 'FOREST', 'TOO', 'UNHAPPY', 'TO', 'SLEEP', 'HE', 'HEARD', 'A', 'NOISE', 'NEAR', 'AT', 'HAND', 'IN', 'THE', 'BUSHES'] +4294-35475-0006-1891: hyp=['ONE', 'NIGHT', 'AS', 'HE', 'LAY', 'IN', 'A', 'DEEP', 'FOREST', 'TWO', 'UNHAPPY', 'TO', 'SLEEP', 'HE', 'HEARD', 'A', 'NOISE', 'NEAR', 'AT', 'HAND', 'IN', 'THE', 'BUSHES'] +4294-35475-0007-1892: ref=['THOU', 'SHALT', 'HAVE', 'THY', 'LIBERTY', 'HE', 'CRIED', 'EVEN', 'THOUGH', 'THOU', 'SHOULDST', 'REND', 'ME', 'IN', 'PIECES', 'THE', 'MOMENT', 'THOU', 'ART', 'FREE'] +4294-35475-0007-1892: hyp=['THOU', 'SHALT', 'HAVE', 'THY', 'LIBERTY', 'HE', 'CRIED', 'EVEN', 'THOUGH', 'THOU', 'SHOULDST', 'RUN', 'ME', 'IN', 'PIECES', 'THE', 'MOMENT', 'THOU', 'ART', 'FREE'] +4294-35475-0008-1893: ref=['IT', 'HAD', 'SUDDENLY', 'DISAPPEARED', 'AND', 'IN', 'ITS', 'PLACE', 'STOOD', 'A', 'BEAUTIFUL', 'FAIRY', 'WITH', 'FILMY', 'WINGS', 'WHICH', 'SHONE', 'LIKE', 'RAINBOWS', 'IN', 'THE', 'MOONLIGHT'] +4294-35475-0008-1893: hyp=['HE', 'HAD', 'HID', 'IT', 'SUDDENLY', 'DISAPPEARED', 'AND', 'IN', 'ITS', 'PLACE', 'STOOD', 'A', 'BEAUTIFUL', 'FAIRY', 'WITH', 'FILMY', 'WINGS', 'WHICH', 'SHONE', 'LIKE', 'RAINBOWS', 'IN', 'THE', 'MOONLIGHT'] +4294-35475-0009-1894: ref=['AT', 'THIS', 'MOMENT', 'THERE', 'WAS', 'A', 'DISTANT', 'RUMBLING', 'AS', 'OF', 'THUNDER', 'TIS', 'THE', 'OGRE', 'CRIED', 'THE', 'FAIRY', 'WE', 'MUST', 'HASTEN'] +4294-35475-0009-1894: hyp=['AT', 'THIS', 'MOMENT', 'THERE', 'WAS', 'A', 'DISTANT', 'RUMBLING', 'AS', 'OF', 'THUNDER', 'TIS', 'THE', 'OGRE', 'CRIED', 'THE', 'FAIRY', 'WE', 'MUST', 'HASTEN'] +4294-35475-0010-1895: ref=['SCISSORS', 'GROW', 'A', "GIANT'S", 'HEIGHT', 'AND', 'SAVE', 'US', 'FROM', 'THE', "OGRE'S", 'MIGHT'] +4294-35475-0010-1895: hyp=['SCISSORS', 'GROW', 'A', "GIANT'S", 'HEIGHT', 'AND', 'SAVE', 'US', 'FROM', 'THE', "OGRE'S", 'MIGHT'] +4294-35475-0011-1896: ref=['HE', 'COULD', 'SEE', 'THE', 'OGRE', 'STANDING', 'POWERLESS', 'TO', 'HURT', 'HIM', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'CHASM', 'AND', 'GNASHING', 'HIS', 'TEETH', 'EACH', 'ONE', 'OF', 'WHICH', 'WAS', 'AS', 'BIG', 'AS', 'A', 'MILLSTON'] +4294-35475-0011-1896: hyp=['HE', 'COULD', 'SEE', 'THE', 'OGRE', 'STANDING', 'POWERLESS', 'TO', 'HURT', 'HIM', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'CHASM', 'AND', 'GNASHING', 'HIS', 'TEETH', 'EACH', 'ONE', 'OF', 'WHICH', 'WAS', 'AS', 'BIG', 'AS', 'A', 'MILLSTONE'] +4294-35475-0012-1897: ref=['THE', 'SIGHT', 'WAS', 'SO', 'TERRIBLE', 'THAT', 'HE', 'TURNED', 'ON', 'HIS', 'HEEL', 'AND', 'FLED', 'AWAY', 'AS', 'FAST', 'AS', 'HIS', 'FEET', 'COULD', 'CARRY', 'HIM'] +4294-35475-0012-1897: hyp=['THE', 'SIGHT', 'WAS', 'SO', 'TERRIBLE', 'THAT', 'HE', 'TURNED', 'ON', 'HIS', 'HEEL', 'AND', 'FLED', 'AWAY', 'AS', 'FAST', 'AS', 'HIS', 'FEET', 'COULD', 'CARRY', 'HIM'] +4294-35475-0013-1898: ref=['THOU', 'SHALT', 'NOT', 'BE', 'LEFT', 'A', 'PRISONER', 'IN', 'THIS', 'DISMAL', 'SPOT', 'WHILE', 'I', 'HAVE', 'THE', 'POWER', 'TO', 'HELP', 'THEE'] +4294-35475-0013-1898: hyp=['THOU', 'SHALT', 'NOT', 'BE', 'LEFT', 'A', 'PRISONER', 'IN', 'THIS', 'DISMAL', 'SPOT', 'WHILE', 'I', 'HAVE', 'THE', 'POWER', 'TO', 'HELP', 'THEE'] +4294-35475-0014-1899: ref=['HE', 'LIFTED', 'THE', 'SCISSORS', 'AND', 'WITH', 'ONE', 'STROKE', 'DESTROYED', 'THE', 'WEB', 'AND', 'GAVE', 'THE', 'FLY', 'ITS', 'FREEDOM'] +4294-35475-0014-1899: hyp=['HE', 'LIFTED', 'THE', 'SCISSORS', 'AND', 'WITH', 'ONE', 'STROKE', 'DESTROYED', 'THE', 'WEB', 'AND', 'GAVE', 'THE', 'FLY', 'TO', 'READ', 'THEM'] +4294-35475-0015-1900: ref=['A', 'FAINT', 'GLIMMER', 'OF', 'LIGHT', 'ON', 'THE', 'OPPOSITE', 'WALL', 'SHOWS', 'ME', 'THE', 'KEYHOLE'] +4294-35475-0015-1900: hyp=['A', 'FAINT', 'GLIMMER', 'OF', 'LIGHT', 'ON', 'THE', 'OPPOSITE', 'WALL', 'SHOWS', 'ME', 'THE', 'KEYHOLE'] +4294-35475-0016-1901: ref=['THE', 'PRINCE', 'SPENT', 'ALL', 'THE', 'FOLLOWING', 'TIME', 'UNTIL', 'MIDNIGHT', 'TRYING', 'TO', 'THINK', 'OF', 'A', 'SUITABLE', 'VERSE', 'TO', 'SAY', 'TO', 'THE', 'SCISSORS'] +4294-35475-0016-1901: hyp=['THE', 'PRINCE', 'SPENT', 'ALL', 'THE', 'FOLLOWING', 'TIME', 'UNTIL', 'MIDNIGHT', 'TRYING', 'TO', 'THINK', 'OF', 'A', 'SUITABLE', 'VERSE', 'TO', 'SAY', 'TO', 'THE', 'SCISSORS'] +4294-35475-0017-1902: ref=['AS', 'HE', 'UTTERED', 'THE', 'WORDS', 'THE', 'SCISSORS', 'LEAPED', 'OUT', 'OF', 'HIS', 'HAND', 'AND', 'BEGAN', 'TO', 'CUT', 'THROUGH', 'THE', 'WOODEN', 'SHUTTERS', 'AS', 'EASILY', 'AS', 'THROUGH', 'A', 'CHEESE'] +4294-35475-0017-1902: hyp=['AS', 'HE', 'UTTERED', 'THE', 'WORDS', 'THE', 'SCISSORS', 'LEAPED', 'OUT', 'OF', 'HIS', 'HAND', 'AND', 'BEGAN', 'TO', 'CUT', 'THROUGH', 'THE', 'WOODEN', 'SHUTTERS', 'AS', 'EASILY', 'AS', 'THROUGH', 'A', 'CHEESE'] +4294-35475-0018-1903: ref=['IN', 'A', 'VERY', 'SHORT', 'TIME', 'THE', 'PRINCE', 'HAD', 'CRAWLED', 'THROUGH', 'THE', 'OPENING'] +4294-35475-0018-1903: hyp=['IN', 'THE', 'VERY', 'SHORT', 'TIME', 'THE', 'PRINCE', 'HAD', 'CRAWLED', 'THROUGH', 'THE', 'OPENING'] +4294-35475-0019-1904: ref=['WHILE', 'HE', 'STOOD', 'LOOKING', 'AROUND', 'HIM', 'IN', 'BEWILDERMENT', 'A', 'FIREFLY', 'ALIGHTED', 'ON', 'HIS', 'ARM', 'FLASHING', 'ITS', 'LITTLE', 'LANTERN', 'IN', 'THE', "PRINCE'S", 'FACE', 'IT', 'CRIED', 'THIS', 'WAY', 'MY', 'FRIEND', 'THE', 'FLY', 'SENT', 'ME', 'TO', 'GUIDE', 'YOU', 'TO', 'A', 'PLACE', 'OF', 'SAFETY'] +4294-35475-0019-1904: hyp=['WHILE', 'HE', 'STOOD', 'LOOKING', 'AROUND', 'HIM', 'IN', 'BEWILDERMENT', 'A', 'FIREFLY', 'LIGHTED', 'DOWN', 'HIS', 'HEART', 'FLASHING', 'ITS', 'LITTLE', 'LANTERN', 'IN', 'THE', "PRINCE'S", 'FACE', 'IT', 'CRIED', 'THIS', 'WAY', 'MY', 'FRIEND', 'THE', 'FLY', 'SENT', 'ME', 'TO', 'GUIDE', 'YOU', 'TO', 'A', 'PLACE', 'OF', 'SAFETY'] +4294-35475-0020-1905: ref=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'ME', 'CRIED', 'THE', 'POOR', 'PEASANT'] +4294-35475-0020-1905: hyp=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'ME', 'CRIED', 'THE', 'POOR', 'PEASANT'] +4294-35475-0021-1906: ref=['MY', 'GRAIN', 'MUST', 'FALL', 'AND', 'ROT', 'IN', 'THE', 'FIELD', 'FROM', 'OVERRIPENESS', 'BECAUSE', 'I', 'HAVE', 'NOT', 'THE', 'STRENGTH', 'TO', 'RISE', 'AND', 'HARVEST', 'IT', 'THEN', 'INDEED', 'MUST', 'WE', 'ALL', 'STARVE'] +4294-35475-0021-1906: hyp=['BY', 'GRAIN', 'MUST', 'FALL', 'IN', 'ROT', 'IN', 'THE', 'FIELD', 'FROM', 'OVER', 'RIPENESS', 'BECAUSE', 'I', 'HAVE', 'NOT', 'THE', 'STRENGTH', 'TO', 'RISE', 'AND', 'HARVEST', 'IT', 'THEN', 'INDEED', 'MUST', 'WE', 'ALL', 'STARVE'] +4294-35475-0022-1907: ref=['THE', 'GRANDAME', 'WHOM', 'HE', 'SUPPLIED', 'WITH', 'FAGOTS', 'THE', 'MERCHANT', 'WHOM', 'HE', 'RESCUED', 'FROM', 'ROBBERS', 'THE', "KING'S", 'COUNCILLOR', 'TO', 'WHOM', 'HE', 'GAVE', 'AID', 'ALL', 'BECAME', 'HIS', 'FRIENDS', 'UP', 'AND', 'DOWN', 'THE', 'LAND', 'TO', 'BEGGAR', 'OR', 'LORD', 'HOMELESS', 'WANDERER', 'OR', 'HIGH', 'BORN', 'DAME', 'HE', 'GLADLY', 'GAVE', 'UNSELFISH', 'SERVICE', 'ALL', 'UNSOUGHT', 'AND', 'SUCH', 'AS', 'HE', 'HELPED', 'STRAIGHTWAY', 'BECAME', 'HIS', 'FRIENDS'] +4294-35475-0022-1907: hyp=['THE', 'GRAND', 'DAME', 'WHOM', 'HE', 'SUPPLIED', 'WITH', 'FAGOTS', 'THE', 'MERCHANT', 'WHOM', 'HE', 'RESCUED', 'FROM', 'ROBBERS', 'THE', "KING'S", 'COUNSELLOR', 'TO', 'WHOM', 'HE', 'GAVE', 'AID', 'ALL', 'BECAME', 'HIS', 'FRIENDS', 'UP', 'AND', 'DOWN', 'THE', 'LAND', 'TO', 'BEGGAR', 'OR', 'LORD', 'HOMELESS', 'WANDERER', 'OR', 'HIGH', 'BORN', 'DAME', 'HE', 'GLADLY', 'GAVE', 'UNSELFISH', 'SERVICE', 'ALL', 'UNSOUGHT', 'AND', 'SUCH', 'AS', 'HE', 'HELPED', 'STRAIGHTWAY', 'BECAME', 'HIS', 'FRIENDS'] +4294-35475-0023-1908: ref=['TO', 'HIM', 'WHO', 'COULD', 'BRING', 'HER', 'BACK', 'TO', 'HER', "FATHER'S", 'CASTLE', 'SHOULD', 'BE', 'GIVEN', 'THE', 'THRONE', 'AND', 'KINGDOM', 'AS', 'WELL', 'AS', 'THE', 'PRINCESS', 'HERSELF', 'SO', 'FROM', 'FAR', 'AND', 'NEAR', 'INDEED', 'FROM', 'ALMOST', 'EVERY', 'COUNTRY', 'UNDER', 'THE', 'SUN', 'CAME', 'KNIGHTS', 'AND', 'PRINCES', 'TO', 'FIGHT', 'THE', 'OGRE'] +4294-35475-0023-1908: hyp=['TO', 'HIM', 'WHO', 'COULD', 'BRING', 'HER', 'BACK', 'TO', 'HER', "FATHER'S", 'CASTLE', 'SHOULD', 'BE', 'GIVEN', 'THE', 'THRONE', 'AND', 'KINGDOM', 'AS', 'WELL', 'AS', 'THE', 'PRINCESS', 'HERSELF', 'SO', 'FROM', 'FAR', 'AND', 'NEAR', 'INDEED', 'FROM', 'ALMOST', 'EVERY', 'COUNTRY', 'UNDER', 'THE', 'SUN', 'CAME', 'KNIGHTS', 'AND', 'PRINCES', 'TO', 'FIGHT', 'THE', 'OGRE'] +4294-35475-0024-1909: ref=['AMONG', 'THOSE', 'WHO', 'DREW', 'BACK', 'WERE', "ETHELRIED'S", 'BROTHERS', 'THE', 'THREE', 'THAT', 'WERE', 'DARK', 'AND', 'THE', 'THREE', 'THAT', 'WERE', 'FAIR'] +4294-35475-0024-1909: hyp=['AMONG', 'THOSE', 'WHO', 'DREW', 'BACK', 'WERE', "EPILID'S", 'BROTHERS', 'THE', 'THREE', 'THAT', 'WERE', 'DARK', 'AND', 'THE', 'THREE', 'THAT', 'WERE', 'FAIR'] +4294-35475-0025-1910: ref=['BUT', 'ETHELRIED', 'HEEDED', 'NOT', 'THEIR', 'TAUNTS'] +4294-35475-0025-1910: hyp=['BUT', 'ETHELRED', 'HE', 'DID', 'NOT', 'THEIR', 'TAUNTS'] +4294-35475-0026-1911: ref=['SO', 'THEY', 'ALL', 'CRIED', 'OUT', 'LONG', 'AND', 'LOUD', 'LONG', 'LIVE', 'THE', 'PRINCE', 'PRINCE', 'CISEAUX'] +4294-35475-0026-1911: hyp=['SO', 'THEY', 'ALL', 'CRIED', 'OUT', 'LONG', 'AND', 'LOUD', 'LONG', 'LIVE', 'THE', 'PRINCE', 'PRINCE', 'ISAU'] +4294-9934-0000-1912: ref=['HE', 'FELT', 'WHAT', 'THE', 'EARTH', 'MAY', 'POSSIBLY', 'FEEL', 'AT', 'THE', 'MOMENT', 'WHEN', 'IT', 'IS', 'TORN', 'OPEN', 'WITH', 'THE', 'IRON', 'IN', 'ORDER', 'THAT', 'GRAIN', 'MAY', 'BE', 'DEPOSITED', 'WITHIN', 'IT', 'IT', 'FEELS', 'ONLY', 'THE', 'WOUND', 'THE', 'QUIVER', 'OF', 'THE', 'GERM', 'AND', 'THE', 'JOY', 'OF', 'THE', 'FRUIT', 'ONLY', 'ARRIVE', 'LATER'] +4294-9934-0000-1912: hyp=['HE', 'FELT', 'WITH', 'THE', 'EARTH', 'MAY', 'POSSIBLY', 'FEEL', 'AT', 'THE', 'MOMENT', 'WHEN', 'IT', 'IS', 'TORN', 'OPEN', 'WITH', 'THE', 'IRON', 'IN', 'ORDER', 'THAT', 'GRAIN', 'MAY', 'BE', 'DEPOSITED', 'WITHIN', 'IT', 'IT', 'FEELS', 'ONLY', 'THE', 'WOUND', 'THE', 'QUIVER', 'OF', 'THE', 'GERM', 'THE', 'JOY', 'OF', 'THE', 'FRUIT', 'ONLY', 'ARRIVES', 'LATER'] +4294-9934-0001-1913: ref=['HE', 'HAD', 'BUT', 'JUST', 'ACQUIRED', 'A', 'FAITH', 'MUST', 'HE', 'THEN', 'REJECT', 'IT', 'ALREADY'] +4294-9934-0001-1913: hyp=['HE', 'HAD', 'BUT', 'JUST', 'ACQUIRED', 'A', 'FAITH', 'MUST', 'HE', 'THEN', 'REJECTED', 'ALREADY'] +4294-9934-0002-1914: ref=['HE', 'AFFIRMED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'HE', 'DECLARED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'DOUBT', 'AND', 'HE', 'BEGAN', 'TO', 'DOUBT', 'IN', 'SPITE', 'OF', 'HIMSELF'] +4294-9934-0002-1914: hyp=['HE', 'AFFIRMED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'HE', 'DECLARED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'DOUBT', 'AND', 'HE', 'BEGAN', 'TO', 'DOUBT', 'IN', 'SPITE', 'OF', 'HIMSELF'] +4294-9934-0003-1915: ref=['TO', 'STAND', 'BETWEEN', 'TWO', 'RELIGIONS', 'FROM', 'ONE', 'OF', 'WHICH', 'YOU', 'HAVE', 'NOT', 'AS', 'YET', 'EMERGED', 'AND', 'ANOTHER', 'INTO', 'WHICH', 'YOU', 'HAVE', 'NOT', 'YET', 'ENTERED', 'IS', 'INTOLERABLE', 'AND', 'TWILIGHT', 'IS', 'PLEASING', 'ONLY', 'TO', 'BAT', 'LIKE', 'SOULS'] +4294-9934-0003-1915: hyp=['TO', 'STAND', 'BETWEEN', 'TWO', 'RELIGIONS', 'FROM', 'ONE', 'OF', 'WHICH', 'YOU', 'HAVE', 'NOT', 'AS', 'YET', 'EMERGED', 'AND', 'ANOTHER', 'INTO', 'WHICH', 'YOU', 'HAVE', 'NOT', 'YET', 'ENTERED', 'IS', 'INTOLERABLE', 'AND', 'TWILIGHT', 'IS', 'PLEASING', 'ONLY', 'TO', 'BAT', 'LIKE', 'SOULS'] +4294-9934-0004-1916: ref=['MARIUS', 'WAS', 'CLEAR', 'EYED', 'AND', 'HE', 'REQUIRED', 'THE', 'TRUE', 'LIGHT'] +4294-9934-0004-1916: hyp=['MARIUS', 'WAS', 'CLEAR', 'EYED', 'AND', 'HE', 'REQUIRED', 'THE', 'TRUE', 'LIGHT'] +4294-9934-0005-1917: ref=['THE', 'HALF', 'LIGHTS', 'OF', 'DOUBT', 'PAINED', 'HIM'] +4294-9934-0005-1917: hyp=['THE', 'HALF', 'LIGHTS', 'OF', 'DOUBT', 'PAINED', 'HIM'] +4294-9934-0006-1918: ref=['WHATEVER', 'MAY', 'HAVE', 'BEEN', 'HIS', 'DESIRE', 'TO', 'REMAIN', 'WHERE', 'HE', 'WAS', 'HE', 'COULD', 'NOT', 'HALT', 'THERE', 'HE', 'WAS', 'IRRESISTIBLY', 'CONSTRAINED', 'TO', 'CONTINUE', 'TO', 'ADVANCE', 'TO', 'EXAMINE', 'TO', 'THINK', 'TO', 'MARCH', 'FURTHER'] +4294-9934-0006-1918: hyp=['WHATEVER', 'MAY', 'HAVE', 'BEEN', 'HIS', 'DESIRE', 'TO', 'REMAIN', 'WHERE', 'HE', 'WAS', 'HE', 'COULD', 'NOT', 'HELP', 'THERE', 'HE', 'WAS', 'IRRESISTIBLY', 'CONSTRAINED', 'TO', 'CONTINUE', 'TO', 'ADVANCE', 'TO', 'EXAMINE', 'TO', 'THINK', 'TO', 'MARCH', 'FURTHER'] +4294-9934-0007-1919: ref=['HE', 'FEARED', 'AFTER', 'HAVING', 'TAKEN', 'SO', 'MANY', 'STEPS', 'WHICH', 'HAD', 'BROUGHT', 'HIM', 'NEARER', 'TO', 'HIS', 'FATHER', 'TO', 'NOW', 'TAKE', 'A', 'STEP', 'WHICH', 'SHOULD', 'ESTRANGE', 'HIM', 'FROM', 'THAT', 'FATHER'] +4294-9934-0007-1919: hyp=['HE', 'FEARED', 'AFTER', 'HAVING', 'TAKEN', 'SO', 'MANY', 'STEPS', 'WHICH', 'HAD', 'BROUGHT', 'HIM', 'NEARER', 'TO', 'HIS', 'FATHER', 'TO', 'NOW', 'TAKE', 'A', 'STEP', 'WHICH', 'SHOULD', 'ESTRANGE', 'HIM', 'FROM', 'THAT', 'FATHER'] +4294-9934-0008-1920: ref=['HIS', 'DISCOMFORT', 'WAS', 'AUGMENTED', 'BY', 'ALL', 'THE', 'REFLECTIONS', 'WHICH', 'OCCURRED', 'TO', 'HIM'] +4294-9934-0008-1920: hyp=['HIS', 'DISCOMFORT', 'WAS', 'AUGMENTED', 'BY', 'ALL', 'THE', 'REFLECTIONS', 'WHICH', 'OCCURRED', 'TO', 'HIM'] +4294-9934-0009-1921: ref=['IN', 'THE', 'TROUBLED', 'STATE', 'OF', 'HIS', 'CONSCIENCE', 'HE', 'NO', 'LONGER', 'THOUGHT', 'OF', 'CERTAIN', 'SERIOUS', 'SIDES', 'OF', 'EXISTENCE'] +4294-9934-0009-1921: hyp=['IN', 'THE', 'TROUBLED', 'STATE', 'OF', 'HIS', 'CONSCIENCE', 'HE', 'NO', 'LONGER', 'THOUGHT', 'OF', 'CERTAIN', 'SERIOUS', 'SIDES', 'OF', 'EXISTENCE'] +4294-9934-0010-1922: ref=['THEY', 'SOON', 'ELBOWED', 'HIM', 'ABRUPTLY'] +4294-9934-0010-1922: hyp=['THEY', 'SOON', 'ELBOWED', 'HIM', 'ABRUPTLY'] +4294-9934-0011-1923: ref=['REQUEST', 'COURFEYRAC', 'TO', 'COME', 'AND', 'TALK', 'WITH', 'ME', 'SAID', 'MARIUS'] +4294-9934-0011-1923: hyp=['REQUEST', 'COURFERAC', 'TO', 'COME', 'AND', 'TALK', 'WITH', 'ME', 'SAID', 'MARIUS'] +4294-9934-0012-1924: ref=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'YOU', 'SAID', 'COURFEYRAC'] +4294-9934-0012-1924: hyp=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'YOU', 'SAID', 'CURFAC'] +4294-9934-0013-1925: ref=['WHAT', 'ARE', 'YOU', 'GOING', 'TO', 'DO', 'I', 'DO', 'NOT', 'KNOW'] +4294-9934-0013-1925: hyp=['WHAT', 'ARE', 'YOU', 'GOING', 'TO', 'DO', 'I', 'DO', 'NOT', 'KNOW'] +4294-9934-0014-1926: ref=['SILVER', 'GOLD', 'HERE', 'IT', 'IS'] +4294-9934-0014-1926: hyp=['SILVER', 'GOLD', 'HERE', 'IT', 'IS'] +4294-9934-0015-1927: ref=['YOU', 'WILL', 'THEN', 'HAVE', 'ONLY', 'A', 'PAIR', 'OF', 'TROUSERS', 'A', 'WAISTCOAT', 'A', 'HAT', 'AND', 'A', 'COAT', 'AND', 'MY', 'BOOTS'] +4294-9934-0015-1927: hyp=['YOU', 'WILL', 'THEN', 'HAVE', 'ONLY', 'A', 'PAIR', 'OF', 'TROUSERS', 'A', 'WEST', 'COAT', 'A', 'HAT', 'AND', 'A', 'COAT', 'AND', 'MY', 'BOOTS'] +4294-9934-0016-1928: ref=['THAT', 'WILL', 'BE', 'ENOUGH'] +4294-9934-0016-1928: hyp=['THAT', 'WILL', 'BE', 'ENOUGH'] +4294-9934-0017-1929: ref=['NO', 'IT', 'IS', 'NOT', 'GOOD', 'WHAT', 'WILL', 'YOU', 'DO', 'AFTER', 'THAT'] +4294-9934-0017-1929: hyp=['NO', 'IT', 'IS', 'NOT', 'GOOD', 'WHAT', 'WE', 'DO', 'AFTER', 'THAT'] +4294-9934-0018-1930: ref=['DO', 'YOU', 'KNOW', 'GERMAN', 'NO'] +4294-9934-0018-1930: hyp=['DO', 'YOU', 'KNOW', 'GERMAN', 'NO'] +4294-9934-0019-1931: ref=['IT', 'IS', 'BADLY', 'PAID', 'WORK', 'BUT', 'ONE', 'CAN', 'LIVE', 'BY', 'IT'] +4294-9934-0019-1931: hyp=['IT', 'IS', 'BADLY', 'PAID', 'WORK', 'BUT', 'ONE', 'CAN', 'LIVE', 'BY', 'IT'] +4294-9934-0020-1932: ref=['THE', 'CLOTHES', 'DEALER', 'WAS', 'SENT', 'FOR'] +4294-9934-0020-1932: hyp=['THE', 'CLOTHES', 'DEALER', 'WAS', 'SENT', 'FOR'] +4294-9934-0021-1933: ref=['HE', 'PAID', 'TWENTY', 'FRANCS', 'FOR', 'THE', 'CAST', 'OFF', 'GARMENTS', 'THEY', 'WENT', 'TO', 'THE', "WATCHMAKER'S"] +4294-9934-0021-1933: hyp=['HE', 'PAID', 'TWENTY', 'FRANCS', 'FOR', 'THE', 'CAST', 'OFF', 'GARMENTS', 'THEY', 'WENT', 'TO', 'THE', 'WATCHMAKERS'] +4294-9934-0022-1934: ref=['HE', 'BOUGHT', 'THE', 'WATCH', 'FOR', 'FORTY', 'FIVE', 'FRANCS'] +4294-9934-0022-1934: hyp=['HE', 'BOUGHT', 'THE', 'WATCH', 'FOR', 'FORTY', 'FIVE', 'FRANCS'] +4294-9934-0023-1935: ref=['HELLO', 'I', 'HAD', 'FORGOTTEN', 'THAT', 'SAID', 'MARIUS'] +4294-9934-0023-1935: hyp=['HELLO', 'I', 'HAD', 'FORGOTTEN', 'THAT', 'SAID', 'MARIUS'] +4294-9934-0024-1936: ref=['THE', 'LANDLORD', 'PRESENTED', 'HIS', 'BILL', 'WHICH', 'HAD', 'TO', 'BE', 'PAID', 'ON', 'THE', 'SPOT'] +4294-9934-0024-1936: hyp=['THE', 'LANDLORD', 'PRESENTED', 'HIS', 'BILL', 'WHICH', 'HAD', 'TO', 'BE', 'PAID', 'ON', 'THE', 'SPOT'] +4294-9934-0025-1937: ref=['I', 'HAVE', 'TEN', 'FRANCS', 'LEFT', 'SAID', 'MARIUS'] +4294-9934-0025-1937: hyp=['I', 'HAVE', 'TEN', 'FRANCS', 'LEFT', 'SAID', 'MARIUS'] +4294-9934-0026-1938: ref=['THAT', 'WILL', 'BE', 'SWALLOWING', 'A', 'TONGUE', 'VERY', 'FAST', 'OR', 'A', 'HUNDRED', 'SOUS', 'VERY', 'SLOWLY'] +4294-9934-0026-1938: hyp=['THAT', 'WILL', 'BE', 'SWALLOWING', 'A', 'TONGUE', 'VERY', 'FAST', 'OR', 'A', 'HUNDRED', 'SOUS', 'VERY', 'SLOWLY'] +4294-9934-0027-1939: ref=['ONE', 'MORNING', 'ON', 'HIS', 'RETURN', 'FROM', 'THE', 'LAW', 'SCHOOL', 'MARIUS', 'FOUND', 'A', 'LETTER', 'FROM', 'HIS', 'AUNT', 'AND', 'THE', 'SIXTY', 'PISTOLES', 'THAT', 'IS', 'TO', 'SAY', 'SIX', 'HUNDRED', 'FRANCS', 'IN', 'GOLD', 'IN', 'A', 'SEALED', 'BOX'] +4294-9934-0027-1939: hyp=['ONE', 'MORNING', 'ON', 'HIS', 'RETURN', 'FROM', 'THE', 'LAST', 'SCHOOL', 'MARIUS', 'FOUND', 'A', 'LETTER', 'FROM', 'HIS', 'AUNT', 'AND', 'THE', 'SIXTY', 'PISTOL', 'THAT', 'IS', 'TO', 'SAY', 'SIX', 'HUNDRED', 'FRANCS', 'IN', 'GOLD', 'AND', 'A', 'SEALED', 'BOX'] +4294-9934-0028-1940: ref=['MARIUS', 'SENT', 'BACK', 'THE', 'THIRTY', 'LOUIS', 'TO', 'HIS', 'AUNT', 'WITH', 'A', 'RESPECTFUL', 'LETTER', 'IN', 'WHICH', 'HE', 'STATED', 'THAT', 'HE', 'HAD', 'SUFFICIENT', 'MEANS', 'OF', 'SUBSISTENCE', 'AND', 'THAT', 'HE', 'SHOULD', 'BE', 'ABLE', 'THENCEFORTH', 'TO', 'SUPPLY', 'ALL', 'HIS', 'NEEDS'] +4294-9934-0028-1940: hyp=['MARIUS', 'SENT', 'BACK', 'FOR', 'THIRTY', 'LOUIS', 'TO', 'HIS', 'AUNT', 'WITH', 'THE', 'RESPECTFUL', 'LETTER', 'IN', 'WHICH', 'HE', 'STATED', 'THAT', 'HE', 'HAD', 'SUSPICIENT', 'MEANS', 'OF', 'SUBSISTENCE', 'AND', 'THAT', 'HE', 'SHOULD', 'BE', 'ABLE', 'THENCEFORTH', 'TO', 'SUPPLY', 'ALL', 'HIS', 'NEEDS'] +4294-9934-0029-1941: ref=['AT', 'THAT', 'MOMENT', 'HE', 'HAD', 'THREE', 'FRANCS', 'LEFT'] +4294-9934-0029-1941: hyp=['AT', 'THAT', 'MOMENT', 'HE', 'HAD', 'THREE', 'FRANCS', 'LEFT'] +4350-10919-0000-2716: ref=['HE', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'NO', 'GOOD', 'TALKING', 'TO', 'THE', 'OLD', 'MAN', 'AND', 'THAT', 'THE', 'PRINCIPAL', 'PERSON', 'IN', 'THE', 'HOUSE', 'WAS', 'THE', 'MOTHER'] +4350-10919-0000-2716: hyp=['HE', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'NO', 'GOOD', 'TALKING', 'TO', 'THE', 'OLD', 'MAN', 'AND', 'THAT', 'THE', 'PRINCIPAL', 'PERSON', 'IN', 'THE', 'HOUSE', 'WAS', 'THE', 'MOTHER'] +4350-10919-0001-2717: ref=['BEFORE', 'HER', 'HE', 'DECIDED', 'TO', 'SCATTER', 'HIS', 'PEARLS'] +4350-10919-0001-2717: hyp=['BEFORE', 'HER', 'HE', 'DECIDED', 'TO', 'SCATTER', 'HIS', 'PEARLS'] +4350-10919-0002-2718: ref=['THE', 'PRINCESS', 'WAS', 'DISTRACTED', 'AND', 'DID', 'NOT', 'KNOW', 'WHAT', 'TO', 'DO', 'SHE', 'FELT', 'SHE', 'HAD', 'SINNED', 'AGAINST', 'KITTY'] +4350-10919-0002-2718: hyp=['THE', 'PRINCESS', 'WAS', 'DISTRACTED', 'AND', 'DID', 'NOT', 'KNOW', 'WHAT', 'TO', 'DO', 'SHE', 'FELT', 'SHE', 'HAD', 'SINNED', 'AGAINST', 'KITTY'] +4350-10919-0003-2719: ref=['WELL', 'DOCTOR', 'DECIDE', 'OUR', 'FATE', 'SAID', 'THE', 'PRINCESS', 'TELL', 'ME', 'EVERYTHING'] +4350-10919-0003-2719: hyp=['WELL', 'DOCTOR', 'DECIDE', 'OUR', 'PHAETON', 'SAID', 'THE', 'PRINCESS', 'TELL', 'ME', 'EVERYTHING'] +4350-10919-0004-2720: ref=['IS', 'THERE', 'HOPE', 'SHE', 'MEANT', 'TO', 'SAY', 'BUT', 'HER', 'LIPS', 'QUIVERED', 'AND', 'SHE', 'COULD', 'NOT', 'UTTER', 'THE', 'QUESTION', 'WELL', 'DOCTOR'] +4350-10919-0004-2720: hyp=['IS', 'THEIR', 'HOPE', 'SHE', 'MEANT', 'TO', 'SAY', 'BUT', 'HER', 'LIPS', 'QUIVERED', 'AND', 'SHE', 'COULD', 'NOT', 'UTTER', 'THE', 'QUESTION', 'WELL', 'DOCTOR'] +4350-10919-0005-2721: ref=['AS', 'YOU', 'PLEASE', 'THE', 'PRINCESS', 'WENT', 'OUT', 'WITH', 'A', 'SIGH'] +4350-10919-0005-2721: hyp=['AS', 'YOU', 'PLEASE', 'THE', 'PRINCESS', 'WENT', 'OUT', 'WITH', 'A', 'SIGH'] +4350-10919-0006-2722: ref=['THE', 'FAMILY', 'DOCTOR', 'RESPECTFULLY', 'CEASED', 'IN', 'THE', 'MIDDLE', 'OF', 'HIS', 'OBSERVATIONS'] +4350-10919-0006-2722: hyp=['THE', 'FAMILY', 'DOCTOR', 'RESPECTFULLY', 'CEASED', 'IN', 'THE', 'MIDDLE', 'OF', 'HIS', 'OBSERVATIONS'] +4350-10919-0007-2723: ref=['AND', 'THERE', 'ARE', 'INDICATIONS', 'MALNUTRITION', 'NERVOUS', 'EXCITABILITY', 'AND', 'SO', 'ON'] +4350-10919-0007-2723: hyp=['AND', 'THERE', 'ARE', 'INDICATIONS', 'MALTRICIAN', 'NERVOUS', 'EXCITABILITY', 'AND', 'SO', 'ON'] +4350-10919-0008-2724: ref=['THE', 'QUESTION', 'STANDS', 'THUS', 'IN', 'PRESENCE', 'OF', 'INDICATIONS', 'OF', 'TUBERCULOUS', 'PROCESS', 'WHAT', 'IS', 'TO', 'BE', 'DONE', 'TO', 'MAINTAIN', 'NUTRITION'] +4350-10919-0008-2724: hyp=['THE', 'QUESTION', 'SENDS', 'THUS', 'IN', 'PRESENCE', 'OF', 'INDICATIONS', 'OF', 'TUBERCUOUS', 'PROCESS', 'WHAT', 'IS', 'TO', 'BE', 'DONE', 'TO', 'MAINTAIN', 'NUTRITION'] +4350-10919-0009-2725: ref=['YES', "THAT'S", 'AN', 'UNDERSTOOD', 'THING', 'RESPONDED', 'THE', 'CELEBRATED', 'PHYSICIAN', 'AGAIN', 'GLANCING', 'AT', 'HIS', 'WATCH'] +4350-10919-0009-2725: hyp=['YES', 'I', 'CAN', 'UNDERSTOOD', 'THING', 'RESPONDED', 'THE', 'CELEBRATED', 'PHYSICIAN', 'AGAIN', 'GLANCING', 'AT', 'HIS', 'WATCH'] +4350-10919-0010-2726: ref=['BEG', 'PARDON', 'IS', 'THE', 'YAUSKY', 'BRIDGE', 'DONE', 'YET', 'OR', 'SHALL', 'I', 'HAVE', 'TO', 'DRIVE', 'AROUND'] +4350-10919-0010-2726: hyp=['BEG', 'PARDON', 'IS', 'THE', 'OWESKEE', 'BRIDGE', 'DONE', 'YET', 'OR', 'SHALL', 'I', 'HAVE', 'TO', 'DRIVE', 'HER', 'ON'] +4350-10919-0011-2727: ref=['HE', 'ASKED', 'AH', 'IT', 'IS'] +4350-10919-0011-2727: hyp=['HE', 'ASKED', 'AH', 'IT', 'IS'] +4350-10919-0012-2728: ref=['OH', 'WELL', 'THEN', 'I', 'CAN', 'DO', 'IT', 'IN', 'TWENTY', 'MINUTES'] +4350-10919-0012-2728: hyp=['OH', 'WELL', 'THEN', 'I', 'CAN', 'DO', 'IT', 'IN', 'TWENTY', 'MINUTES'] +4350-10919-0013-2729: ref=['AND', 'HOW', 'ABOUT', 'A', 'TOUR', 'ABROAD', 'ASKED', 'THE', 'FAMILY', 'DOCTOR'] +4350-10919-0013-2729: hyp=['AND', 'ABOUT', 'IT', 'TO', 'ABROAD', 'ASKED', 'THE', 'FAMILY', 'DOCTOR'] +4350-10919-0014-2730: ref=['WHAT', 'IS', 'WANTED', 'IS', 'MEANS', 'OF', 'IMPROVING', 'NUTRITION', 'AND', 'NOT', 'FOR', 'LOWERING', 'IT'] +4350-10919-0014-2730: hyp=['WHAT', 'IS', 'WANTED', 'IS', 'THE', 'MEANS', 'OF', 'IMPROVING', 'NUTRITION', 'AND', 'NOT', 'FOR', 'LOWERING', 'IT'] +4350-10919-0015-2731: ref=['THE', 'FAMILY', 'DOCTOR', 'LISTENED', 'ATTENTIVELY', 'AND', 'RESPECTFULLY'] +4350-10919-0015-2731: hyp=['THE', 'FAMILY', 'DOCTOR', 'LISTENED', 'ATTENTIVELY', 'AND', 'RESPECTFULLY'] +4350-10919-0016-2732: ref=['BUT', 'IN', 'FAVOR', 'OF', 'FOREIGN', 'TRAVEL', 'I', 'WOULD', 'URGE', 'THE', 'CHANGE', 'OF', 'HABITS', 'THE', 'REMOVAL', 'FROM', 'CONDITIONS', 'CALLING', 'UP', 'REMINISCENCES'] +4350-10919-0016-2732: hyp=['BUT', 'IN', 'FAVOUR', 'OF', 'FOREIGN', 'TRAVEL', 'I', 'WOULD', 'URGE', 'THE', 'CHANGE', 'OF', 'HABITS', 'THE', 'REMOVAL', 'FROM', 'CONDITIONS', 'CALLING', 'UP', 'REMINISCENCES'] +4350-10919-0017-2733: ref=['AND', 'THEN', 'THE', 'MOTHER', 'WISHES', 'IT', 'HE', 'ADDED'] +4350-10919-0017-2733: hyp=['AND', 'THEN', 'THE', 'MOTHER', 'WISHES', 'IT', 'HE', 'ADDED'] +4350-10919-0018-2734: ref=['AH', 'WELL', 'IN', 'THAT', 'CASE', 'TO', 'BE', 'SURE', 'LET', 'THEM', 'GO', 'ONLY', 'THOSE', 'GERMAN', 'QUACKS', 'ARE', 'MISCHIEVOUS'] +4350-10919-0018-2734: hyp=['AH', 'WELL', 'THAT', 'HAS', 'TO', 'BE', 'SURE', 'LET', 'THEM', 'GO', 'ONLY', 'THOSE', 'GERMAN', 'CLACKS', 'ARE', 'MISCHIEVOUS'] +4350-10919-0019-2735: ref=['OH', "TIME'S", 'UP', 'ALREADY', 'AND', 'HE', 'WENT', 'TO', 'THE', 'DOOR'] +4350-10919-0019-2735: hyp=['OH', "TIME'S", 'UP', 'ALREADY', 'AND', 'HE', 'WENT', 'TO', 'THE', 'DOOR'] +4350-10919-0020-2736: ref=['THE', 'CELEBRATED', 'DOCTOR', 'ANNOUNCED', 'TO', 'THE', 'PRINCESS', 'A', 'FEELING', 'OF', 'WHAT', 'WAS', 'DUE', 'FROM', 'HIM', 'DICTATED', 'HIS', 'DOING', 'SO', 'THAT', 'HE', 'OUGHT', 'TO', 'SEE', 'THE', 'PATIENT', 'ONCE', 'MORE'] +4350-10919-0020-2736: hyp=['THE', 'CELEBRATED', 'DOCTOR', 'ANNOUNCED', 'TO', 'THE', 'PRINCESS', 'A', 'FEELING', 'OF', 'WHAT', 'WAS', 'DUE', 'FROM', 'HIM', 'DICTATED', 'HIS', 'DOING', 'SO', 'THAT', 'HE', 'OUGHT', 'TO', 'SEE', 'THE', 'PATIENT', 'ONCE', 'MORE'] +4350-10919-0021-2737: ref=['OH', 'NO', 'ONLY', 'A', 'FEW', 'DETAILS', 'PRINCESS', 'COME', 'THIS', 'WAY'] +4350-10919-0021-2737: hyp=['O', 'NO', 'ONLY', 'A', 'FEW', 'DETAILS', 'PRINCESS', 'COME', 'THIS', 'WAY'] +4350-10919-0022-2738: ref=['AND', 'THE', 'MOTHER', 'ACCOMPANIED', 'BY', 'THE', 'DOCTOR', 'WENT', 'INTO', 'THE', 'DRAWING', 'ROOM', 'TO', 'KITTY'] +4350-10919-0022-2738: hyp=['AND', 'THE', 'MOTHER', 'ACCOMPANIED', 'BY', 'THE', 'DOCTOR', 'WENT', 'INTO', 'THE', 'DRAWING', 'ROOM', 'TO', 'KITTY'] +4350-10919-0023-2739: ref=['WHEN', 'THE', 'DOCTOR', 'CAME', 'IN', 'SHE', 'FLUSHED', 'CRIMSON', 'AND', 'HER', 'EYES', 'FILLED', 'WITH', 'TEARS'] +4350-10919-0023-2739: hyp=['WHEN', 'THE', 'DOCTOR', 'CAME', 'IN', 'SHE', 'FLUSHED', 'CRIMSON', 'AND', 'HER', 'EYES', 'FILLED', 'WITH', 'TEARS'] +4350-10919-0024-2740: ref=['SHE', 'ANSWERED', 'HIM', 'AND', 'ALL', 'AT', 'ONCE', 'GOT', 'UP', 'FURIOUS'] +4350-10919-0024-2740: hyp=['SHE', 'ANSWERED', 'HIM', 'AND', 'ALL', 'AT', 'ONCE', 'GOT', 'UP', 'FURIOUS'] +4350-10919-0025-2741: ref=['EXCUSE', 'ME', 'DOCTOR', 'BUT', 'THERE', 'IS', 'REALLY', 'NO', 'OBJECT', 'IN', 'THIS'] +4350-10919-0025-2741: hyp=['EXCUSE', 'ME', 'DOCTOR', 'BUT', 'THERE', 'IS', 'REALLY', 'NO', 'OBJECT', 'IN', 'THIS'] +4350-10919-0026-2742: ref=['THIS', 'IS', 'THE', 'THIRD', 'TIME', "YOU'VE", 'ASKED', 'ME', 'THE', 'SAME', 'THING'] +4350-10919-0026-2742: hyp=['THIS', 'IS', 'THE', 'THIRD', 'TIME', "YOU'VE", 'ASKED', 'ME', 'THE', 'SAME', 'THING'] +4350-10919-0027-2743: ref=['THE', 'CELEBRATED', 'DOCTOR', 'DID', 'NOT', 'TAKE', 'OFFENSE'] +4350-10919-0027-2743: hyp=['THE', 'CLEARED', 'DOCTOR', 'DID', 'NOT', 'TAKE', 'OFFENCE'] +4350-10919-0028-2744: ref=['NERVOUS', 'IRRITABILITY', 'HE', 'SAID', 'TO', 'THE', 'PRINCESS', 'WHEN', 'KITTY', 'HAD', 'LEFT', 'THE', 'ROOM', 'HOWEVER', 'I', 'HAD', 'FINISHED'] +4350-10919-0028-2744: hyp=['NERVOUS', 'IRRITABILITY', 'HE', 'SAID', 'TO', 'THE', 'PRINCESS', 'WHEN', 'KATY', 'HAD', 'LEFT', 'THE', 'ROOM', 'HOWEVER', 'I', 'HAD', 'FINISHED'] +4350-10919-0029-2745: ref=['AND', 'THE', 'DOCTOR', 'BEGAN', 'SCIENTIFICALLY', 'EXPLAINING', 'TO', 'THE', 'PRINCESS', 'AS', 'AN', 'EXCEPTIONALLY', 'INTELLIGENT', 'WOMAN', 'THE', 'CONDITION', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'AND', 'CONCLUDED', 'BY', 'INSISTING', 'ON', 'THE', 'DRINKING', 'OF', 'THE', 'WATERS', 'WHICH', 'WERE', 'CERTAINLY', 'HARMLESS'] +4350-10919-0029-2745: hyp=['AND', 'THE', 'DOCTOR', 'BEGAN', 'SCIENTIFICALLY', 'EXPLAINING', 'TO', 'THE', 'PRINCESS', 'AS', 'AN', 'EXCEPTIONALLY', 'INTELLIGENT', 'WOMAN', 'THE', 'CONDITION', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'AND', 'CONCLUDED', 'BY', 'INSISTING', 'ON', 'THE', 'DRINKING', 'OF', 'THE', 'WATERS', 'WHICH', 'WERE', 'CERTAINLY', 'HARMLESS'] +4350-10919-0030-2746: ref=['AT', 'THE', 'QUESTION', 'SHOULD', 'THEY', 'GO', 'ABROAD', 'THE', 'DOCTOR', 'PLUNGED', 'INTO', 'DEEP', 'MEDITATION', 'AS', 'THOUGH', 'RESOLVING', 'A', 'WEIGHTY', 'PROBLEM'] +4350-10919-0030-2746: hyp=['BUT', 'THE', 'QUESTION', 'SHOULD', 'THEY', 'GO', 'ABROAD', 'THE', 'DOCTOR', 'PLUNGED', 'INTO', 'DEEP', 'MEDITATION', 'AS', 'THOUGH', 'RESOLVING', 'A', 'WEIGHTY', 'PROBLEM'] +4350-10919-0031-2747: ref=['FINALLY', 'HIS', 'DECISION', 'WAS', 'PRONOUNCED', 'THEY', 'WERE', 'TO', 'GO', 'ABROAD', 'BUT', 'TO', 'PUT', 'NO', 'FAITH', 'IN', 'FOREIGN', 'QUACKS', 'AND', 'TO', 'APPLY', 'TO', 'HIM', 'IN', 'ANY', 'NEED'] +4350-10919-0031-2747: hyp=['FINALLY', 'HIS', 'DECISION', 'WAS', 'PRONOUNCED', 'THEY', 'WERE', 'TO', 'GO', 'ABROAD', 'BUT', 'TO', 'PUT', 'NO', 'FAITH', 'IN', 'FOREIGN', 'QUACKS', 'AND', 'TO', 'APPLY', 'TO', 'HIM', 'IN', 'ANY', 'NEED'] +4350-10919-0032-2748: ref=['IT', 'SEEMED', 'AS', 'THOUGH', 'SOME', 'PIECE', 'OF', 'GOOD', 'FORTUNE', 'HAD', 'COME', 'TO', 'PASS', 'AFTER', 'THE', 'DOCTOR', 'HAD', 'GONE'] +4350-10919-0032-2748: hyp=['IT', 'SEEMED', 'AS', 'THOUGH', 'SOME', 'PIECE', 'OF', 'GOOD', 'FORTUNE', 'HAD', 'COME', 'TO', 'PASS', 'AFTER', 'THE', 'DOCTOR', 'HAD', 'GONE'] +4350-10919-0033-2749: ref=['THE', 'MOTHER', 'WAS', 'MUCH', 'MORE', 'CHEERFUL', 'WHEN', 'SHE', 'WENT', 'BACK', 'TO', 'HER', 'DAUGHTER', 'AND', 'KITTY', 'PRETENDED', 'TO', 'BE', 'MORE', 'CHEERFUL'] +4350-10919-0033-2749: hyp=['THE', 'MOTHER', 'WAS', 'MUCH', 'MORE', 'CHEERFUL', 'WHEN', 'SHE', 'WENT', 'BACK', 'TO', 'HER', 'DAUGHTER', 'AND', 'KITTY', 'PRETENDED', 'TO', 'BE', 'MORE', 'CHEERFUL'] +4350-9170-0000-2750: ref=['EDUCATED', 'PEOPLE', 'OF', 'THE', 'UPPER', 'CLASSES', 'ARE', 'TRYING', 'TO', 'STIFLE', 'THE', 'EVER', 'GROWING', 'SENSE', 'OF', 'THE', 'NECESSITY', 'OF', 'TRANSFORMING', 'THE', 'EXISTING', 'SOCIAL', 'ORDER'] +4350-9170-0000-2750: hyp=['EDUCATED', 'PEOPLE', 'OF', 'THE', 'UPPER', 'CLASSES', 'ARE', 'TRYING', 'TO', 'STIFLE', 'THE', 'EVERGREWING', 'SENSE', 'OF', 'THE', 'NECESSITY', 'OF', 'TRANSFORMING', 'THE', 'EXISTING', 'SOCIAL', 'ORDER'] +4350-9170-0001-2751: ref=['THIS', 'IS', 'ABSOLUTELY', 'INCORRECT'] +4350-9170-0001-2751: hyp=['MISSUS', 'ABSOLUTELY', 'INCORRECT'] +4350-9170-0002-2752: ref=['IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IT', 'IS', 'SUPPOSED', 'THAT', 'SINCE', 'THE', 'AIM', 'OF', 'LIFE', 'IS', 'FOUND', 'IN', 'GROUPS', 'OF', 'INDIVIDUALS', 'INDIVIDUALS', 'WILL', 'VOLUNTARILY', 'SACRIFICE', 'THEIR', 'OWN', 'INTERESTS', 'FOR', 'THE', 'INTERESTS', 'OF', 'THE', 'GROUP'] +4350-9170-0002-2752: hyp=['IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IT', 'IS', 'SUPPOSED', 'THAT', 'SINCE', 'THE', 'AIM', 'OF', 'LIFE', 'IS', 'FOUND', 'IN', 'GROUPS', 'OF', 'INDIVIDUALS', 'INDIVIDUALS', 'WILL', 'VOLUNTARILY', 'SACRIFICE', 'THEIR', 'OWN', 'INTEREST', 'FOR', 'THE', 'INTEREST', 'OF', 'THE', 'GROUP'] +4350-9170-0003-2753: ref=['THE', 'CHAMPIONS', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'USUALLY', 'TRY', 'TO', 'CONNECT', 'THE', 'IDEA', 'OF', 'AUTHORITY', 'THAT', 'IS', 'OF', 'VIOLENCE', 'WITH', 'THE', 'IDEA', 'OF', 'MORAL', 'INFLUENCE', 'BUT', 'THIS', 'CONNECTION', 'IS', 'QUITE', 'IMPOSSIBLE'] +4350-9170-0003-2753: hyp=['THE', 'CHAMPIONS', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'USUALLY', 'TRY', 'TO', 'CONNECT', 'THE', 'IDEA', 'OF', 'AUTHORITY', 'THAT', 'IS', 'OF', 'VIOLENCE', 'WITH', 'THE', 'IDEA', 'OF', 'MORAL', 'INFLUENCE', 'BUT', 'THIS', 'CONNECTION', 'IS', 'QUITE', 'IMPOSSIBLE'] +4350-9170-0004-2754: ref=['THE', 'MAN', 'WHO', 'IS', 'CONTROLLED', 'BY', 'MORAL', 'INFLUENCE', 'ACTS', 'IN', 'ACCORDANCE', 'WITH', 'HIS', 'OWN', 'DESIRES'] +4350-9170-0004-2754: hyp=['THE', 'MAN', 'WHO', 'WAS', 'CONTROLLED', 'BY', 'MORAL', 'INFLUENCE', 'ACTS', 'IN', 'ACCORDANCE', 'WITH', 'HIS', 'OWN', 'DESIRES'] +4350-9170-0005-2755: ref=['THE', 'BASIS', 'OF', 'AUTHORITY', 'IS', 'BODILY', 'VIOLENCE'] +4350-9170-0005-2755: hyp=['THE', 'BASIS', 'OF', 'AUTHORITY', 'IS', 'BODILY', 'VIOLENCE'] +4350-9170-0006-2756: ref=['THE', 'POSSIBILITY', 'OF', 'APPLYING', 'BODILY', 'VIOLENCE', 'TO', 'PEOPLE', 'IS', 'PROVIDED', 'ABOVE', 'ALL', 'BY', 'AN', 'ORGANIZATION', 'OF', 'ARMED', 'MEN', 'TRAINED', 'TO', 'ACT', 'IN', 'UNISON', 'IN', 'SUBMISSION', 'TO', 'ONE', 'WILL'] +4350-9170-0006-2756: hyp=['THE', 'POSSIBILITY', 'OF', 'APPLYING', 'BODILY', 'VIOLENCE', 'TO', 'PEOPLE', 'IS', 'PROVIDED', 'ABOVE', 'ALL', 'BY', 'AN', 'ORGANIZATION', 'OF', 'ARMED', 'MEN', 'TRAINED', 'TO', 'ACT', 'IN', 'UNISON', 'AND', 'SUBMISSION', 'TO', 'ONE', 'WILL'] +4350-9170-0007-2757: ref=['THESE', 'BANDS', 'OF', 'ARMED', 'MEN', 'SUBMISSIVE', 'TO', 'A', 'SINGLE', 'WILL', 'ARE', 'WHAT', 'CONSTITUTE', 'THE', 'ARMY'] +4350-9170-0007-2757: hyp=['THESE', 'BANDS', 'OF', 'ARMED', 'MEN', 'SUBMISSIVE', 'TO', 'A', 'SINGLE', 'WILL', 'ARE', 'WHAT', 'CONSTITUTE', 'THE', 'ARMY'] +4350-9170-0008-2758: ref=['THE', 'ARMY', 'HAS', 'ALWAYS', 'BEEN', 'AND', 'STILL', 'IS', 'THE', 'BASIS', 'OF', 'POWER'] +4350-9170-0008-2758: hyp=['THE', 'ARMY', 'HAS', 'ALWAYS', 'BEEN', 'AND', 'STILL', 'IS', 'THE', 'BASIS', 'OF', 'POWER'] +4350-9170-0009-2759: ref=['POWER', 'IS', 'ALWAYS', 'IN', 'THE', 'HANDS', 'OF', 'THOSE', 'WHO', 'CONTROL', 'THE', 'ARMY', 'AND', 'ALL', 'MEN', 'IN', 'POWER', 'FROM', 'THE', 'ROMAN', 'CAESARS', 'TO', 'THE', 'RUSSIAN', 'AND', 'GERMAN', 'EMPERORS', 'TAKE', 'MORE', 'INTEREST', 'IN', 'THEIR', 'ARMY', 'THAN', 'IN', 'ANYTHING', 'AND', 'COURT', 'POPULARITY', 'IN', 'THE', 'ARMY', 'KNOWING', 'THAT', 'IF', 'THAT', 'IS', 'ON', 'THEIR', 'SIDE', 'THEIR', 'POWER', 'IS', 'SECURE'] +4350-9170-0009-2759: hyp=['POWER', 'IS', 'ALWAYS', 'IN', 'THE', 'HANDS', 'OF', 'THOSE', 'WHO', 'CONTROL', 'THE', 'ARMY', 'AND', 'ALL', 'MEN', 'IN', 'POWER', 'FROM', 'THE', 'ROMAN', 'CAESARS', 'TO', 'THE', 'RUSSIAN', 'AND', 'GERMAN', 'EMPERORS', 'TAKE', 'MORE', 'INTEREST', 'IN', 'THEIR', 'ARMY', 'THAN', 'IN', 'ANYTHING', 'AND', 'COURT', 'POPULARITY', 'IN', 'THE', 'ARMY', 'KNOWING', 'THAT', 'IF', 'THAT', 'IS', 'ON', 'THEIR', 'SIDE', 'THEIR', 'POWER', 'IS', 'SECURE'] +4350-9170-0010-2760: ref=['INDEED', 'IT', 'COULD', 'NOT', 'BE', 'OTHERWISE'] +4350-9170-0010-2760: hyp=['INDEED', 'IT', 'COULD', 'NOT', 'BE', 'OTHERWISE'] +4350-9170-0011-2761: ref=['ONLY', 'UNDER', 'THOSE', 'CONDITIONS', 'COULD', 'THE', 'SOCIAL', 'ORGANIZATION', 'BE', 'JUSTIFIED'] +4350-9170-0011-2761: hyp=['ONLY', 'UNDER', 'THOSE', 'CONDITIONS', 'COULD', 'THE', 'SOCIAL', 'ORGANIZATION', 'BE', 'JUSTIFIED'] +4350-9170-0012-2762: ref=['BUT', 'SINCE', 'THIS', 'IS', 'NOT', 'THE', 'CASE', 'AND', 'ON', 'THE', 'CONTRARY', 'MEN', 'IN', 'POWER', 'ARE', 'ALWAYS', 'FAR', 'FROM', 'BEING', 'SAINTS', 'THROUGH', 'THE', 'VERY', 'FACT', 'OF', 'THEIR', 'POSSESSION', 'OF', 'POWER', 'THE', 'SOCIAL', 'ORGANIZATION', 'BASED', 'ON', 'POWER', 'HAS', 'NO', 'JUSTIFICATION'] +4350-9170-0012-2762: hyp=['BUT', 'SINCE', 'THIS', 'IS', 'NOT', 'THE', 'CASE', 'AND', 'ON', 'THE', 'CONTRARY', 'MEN', 'AND', 'POWER', 'ARE', 'ALWAYS', 'FAR', 'FROM', 'BEING', 'SAINTS', 'THROUGH', 'THE', 'VERY', 'FACT', 'OF', 'THEIR', 'POSSESSION', 'OF', 'POWER', 'THE', 'SOCIAL', 'ORGANIZATION', 'BASED', 'ON', 'POWER', 'HAS', 'NO', 'JUSTIFICATION'] +4350-9170-0013-2763: ref=['EVEN', 'IF', 'THERE', 'WAS', 'ONCE', 'A', 'TIME', 'WHEN', 'OWING', 'TO', 'THE', 'LOW', 'STANDARD', 'OF', 'MORALS', 'AND', 'THE', 'DISPOSITION', 'OF', 'MEN', 'TO', 'VIOLENCE', 'THE', 'EXISTENCE', 'OF', 'AN', 'AUTHORITY', 'TO', 'RESTRAIN', 'SUCH', 'VIOLENCE', 'WAS', 'AN', 'ADVANTAGE', 'BECAUSE', 'THE', 'VIOLENCE', 'OF', 'GOVERNMENT', 'WAS', 'LESS', 'THAN', 'THE', 'VIOLENCE', 'OF', 'INDIVIDUALS', 'ONE', 'CANNOT', 'BUT', 'SEE', 'THAT', 'THIS', 'ADVANTAGE', 'COULD', 'NOT', 'BE', 'LASTING'] +4350-9170-0013-2763: hyp=['EVEN', 'IF', 'THERE', 'WAS', 'ONCE', 'A', 'TIME', 'WHEN', 'OWING', 'TO', 'THE', 'LOW', 'STANDARDS', 'OF', 'MORALS', 'WHEN', 'THE', 'DISPOSITION', 'OF', 'MEN', 'TO', 'VIOLENCE', 'THE', 'EXISTENCE', 'OF', 'AN', 'AUTHORITY', 'TO', 'RESTRAIN', 'SUCH', 'VIOLENCE', 'WAS', 'AN', 'ADVANTAGE', 'BECAUSE', 'THE', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'WAS', 'LESS', 'THAN', 'THE', 'VIOLENCE', 'OF', 'INDIVIDUALS', 'ONE', 'CANNOT', 'BUT', 'SEE', 'THAT', 'THIS', 'ADVANTAGE', 'COULD', 'NOT', 'BE', 'LASTING'] +4350-9170-0014-2764: ref=['BETWEEN', 'THE', 'MEMBERS', 'OF', 'ONE', 'STATE', 'SUBJECT', 'TO', 'A', 'SINGLE', 'AUTHORITY', 'THE', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'SEEMS', 'STILL', 'LESS', 'AND', 'THE', 'LIFE', 'OF', 'THE', 'STATE', 'SEEMS', 'EVEN', 'MORE', 'SECURE'] +4350-9170-0014-2764: hyp=['BETWEEN', 'THE', 'MEMBERS', 'OF', 'ONE', 'STATE', 'SUBJECT', 'TO', 'A', 'SINGLE', 'AUTHORITY', 'THE', 'STRIPE', 'BETWEEN', 'THE', 'INDIVIDUALS', 'SEEMED', 'STILL', 'LESS', 'AND', 'A', 'LIFE', 'OF', 'THE', 'STATE', 'SEEMS', 'EVEN', 'MORE', 'SECURE'] +4350-9170-0015-2765: ref=['IT', 'WAS', 'PRODUCED', 'ON', 'ONE', 'HAND', 'BY', 'THE', 'NATURAL', 'GROWTH', 'OF', 'POPULATION', 'AND', 'ON', 'THE', 'OTHER', 'BY', 'STRUGGLE', 'AND', 'CONQUEST'] +4350-9170-0015-2765: hyp=['IT', 'WAS', 'PRODUCED', 'ON', 'ONE', 'HAND', 'BY', 'THE', 'NATURAL', 'GROWTH', 'OF', 'POPULATION', 'AND', 'ON', 'THE', 'OTHER', 'BY', 'STRUGGLING', 'CONQUEST'] +4350-9170-0016-2766: ref=['AFTER', 'CONQUEST', 'THE', 'POWER', 'OF', 'THE', 'EMPEROR', 'PUTS', 'AN', 'END', 'TO', 'INTERNAL', 'DISSENSIONS', 'AND', 'SO', 'THE', 'STATE', 'CONCEPTION', 'OF', 'LIFE', 'JUSTIFIES', 'ITSELF'] +4350-9170-0016-2766: hyp=['AFTER', 'CONQUEST', 'THE', 'POWER', 'OF', 'THE', 'EMPEROR', 'PUTS', 'AN', 'END', 'TO', 'INTERNAL', 'DISSENSIONS', 'AND', 'SO', 'THE', 'STATE', 'CONCEPTION', 'OF', 'LIFE', 'JUSTIFIES', 'ITSELF'] +4350-9170-0017-2767: ref=['BUT', 'THIS', 'JUSTIFICATION', 'IS', 'NEVER', 'MORE', 'THAN', 'TEMPORARY'] +4350-9170-0017-2767: hyp=['BUT', 'THIS', 'JUSTIFICATION', 'IS', 'NEVER', 'MORE', 'THAN', 'TEMPORARY'] +4350-9170-0018-2768: ref=['INTERNAL', 'DISSENSIONS', 'DISAPPEAR', 'ONLY', 'IN', 'PROPORTION', 'TO', 'THE', 'DEGREE', 'OF', 'OPPRESSION', 'EXERTED', 'BY', 'THE', 'AUTHORITY', 'OVER', 'THE', 'DISSENTIENT', 'INDIVIDUALS'] +4350-9170-0018-2768: hyp=['INTERNAL', 'DISSENSIONS', 'DISAPPEAR', 'ONLY', 'IN', 'PROPORTION', 'TO', 'THE', 'DEGREE', 'OF', 'OPPRESSION', 'EXERTED', 'BY', 'THE', 'AUTHORITY', 'OVER', 'THE', 'DISINDIAN', 'INDIVIDUALS'] +4350-9170-0019-2769: ref=['GOVERNMENT', 'AUTHORITY', 'EVEN', 'IF', 'IT', 'DOES', 'SUPPRESS', 'PRIVATE', 'VIOLENCE', 'ALWAYS', 'INTRODUCES', 'INTO', 'THE', 'LIFE', 'OF', 'MEN', 'FRESH', 'FORMS', 'OF', 'VIOLENCE', 'WHICH', 'TEND', 'TO', 'BECOME', 'GREATER', 'AND', 'GREATER', 'IN', 'PROPORTION', 'TO', 'THE', 'DURATION', 'AND', 'STRENGTH', 'OF', 'THE', 'GOVERNMENT'] +4350-9170-0019-2769: hyp=['GOVERN', 'AUTHORITY', 'EVEN', 'IF', 'IT', 'DOES', 'SUPPRESS', 'PRIVATE', 'VIOLENCE', 'ALWAYS', 'INTRODUCES', 'INTO', 'THE', 'LIFE', 'OF', 'MEN', 'FRESH', 'FORMS', 'OF', 'VIOLENCE', 'WHICH', 'TEND', 'TO', 'BECOME', 'GREATER', 'AND', 'GREATER', 'IN', 'PROPORTION', 'TO', 'THE', 'DURATION', 'AND', 'STRENGTH', 'OF', 'THE', 'GOVERNMENT'] +4350-9170-0020-2770: ref=['AND', 'THEREFORE', 'THE', 'OPPRESSION', 'OF', 'THE', 'OPPRESSED', 'ALWAYS', 'GOES', 'ON', 'GROWING', 'UP', 'TO', 'THE', 'FURTHEST', 'LIMIT', 'BEYOND', 'WHICH', 'IT', 'CANNOT', 'GO', 'WITHOUT', 'KILLING', 'THE', 'GOOSE', 'WITH', 'THE', 'GOLDEN', 'EGGS'] +4350-9170-0020-2770: hyp=['AND', 'THEREFORE', 'THE', 'OPPRESSION', 'OF', 'THE', 'OPPRESSED', 'ALWAYS', 'GOES', 'ON', 'GROWING', 'UP', 'TO', 'THE', 'FURTHEST', 'LIMIT', 'BEYOND', 'WHICH', 'IT', 'CANNOT', 'GO', 'WITHOUT', 'KILLING', 'THE', 'GOOSE', 'WITH', 'THE', 'GOLD', 'KNIFE'] +4350-9170-0021-2771: ref=['THE', 'MOST', 'CONVINCING', 'EXAMPLE', 'OF', 'THIS', 'IS', 'TO', 'BE', 'FOUND', 'IN', 'THE', 'CONDITION', 'OF', 'THE', 'WORKING', 'CLASSES', 'OF', 'OUR', 'EPOCH', 'WHO', 'ARE', 'IN', 'REALITY', 'NO', 'BETTER', 'THAN', 'THE', 'SLAVES', 'OF', 'ANCIENT', 'TIMES', 'SUBDUED', 'BY', 'CONQUEST'] +4350-9170-0021-2771: hyp=['THE', 'MOST', 'CONVINCING', 'EXAMPLE', 'OF', 'THIS', 'IS', 'TO', 'BE', 'FOUND', 'IN', 'THE', 'CONDITION', 'OF', 'THE', 'WORKING', 'CLASSES', 'OF', 'OUR', 'EPOCH', 'WHO', 'ARE', 'IN', 'REALITY', 'NO', 'BETTER', 'THAN', 'THE', 'SLAVES', 'OF', 'ANCIENT', 'TIME', 'SUBDUED', 'BY', 'CONQUEST'] +4350-9170-0022-2772: ref=['SO', 'IT', 'HAS', 'ALWAYS', 'BEEN'] +4350-9170-0022-2772: hyp=['SO', 'IT', 'IS', 'ALWAYS', 'THEN'] +4350-9170-0023-2773: ref=['FOOTNOTE', 'THE', 'FACT', 'THAT', 'IN', 'AMERICA', 'THE', 'ABUSES', 'OF', 'AUTHORITY', 'EXIST', 'IN', 'SPITE', 'OF', 'THE', 'SMALL', 'NUMBER', 'OF', 'THEIR', 'TROOPS', 'NOT', 'ONLY', 'FAILS', 'TO', 'DISPROVE', 'THIS', 'POSITION', 'BUT', 'POSITIVELY', 'CONFIRMS', 'IT'] +4350-9170-0023-2773: hyp=['FOOTNOTE', 'THE', 'FACT', 'THAT', 'IN', 'AMERICA', 'THE', 'ABUSES', 'OF', 'AUTHORITY', 'EXIST', 'IN', 'SPITE', 'OF', 'THE', 'SMALL', 'NUMBER', 'OF', 'THEIR', 'TROOPS', 'NOT', 'ONLY', 'FAILS', 'TO', 'DISPROVE', 'THIS', 'POSITION', 'BUT', 'POSITIVELY', 'CONFIRMS', 'IT'] +4350-9170-0024-2774: ref=['THE', 'UPPER', 'CLASSES', 'KNOW', 'THAT', 'AN', 'ARMY', 'OF', 'FIFTY', 'THOUSAND', 'WILL', 'SOON', 'BE', 'INSUFFICIENT', 'AND', 'NO', 'LONGER', 'RELYING', 'ON', "PINKERTON'S", 'MEN', 'THEY', 'FEEL', 'THAT', 'THE', 'SECURITY', 'OF', 'THEIR', 'POSITION', 'DEPENDS', 'ON', 'THE', 'INCREASED', 'STRENGTH', 'OF', 'THE', 'ARMY'] +4350-9170-0024-2774: hyp=['THE', 'UPPER', 'CLASSES', 'KNOW', 'THAT', 'AN', 'ARMY', 'OF', 'FIFTY', 'THOUSAND', 'WILL', 'SOON', 'BE', 'INSUFFICIENT', 'AND', 'NO', 'LONGER', 'RELYING', 'ON', "PINKERTENT'S", 'MEN', 'THEY', 'FEEL', 'THAT', 'THE', 'SECURITY', 'OF', 'THEIR', 'POSITION', 'DEPENDS', 'ON', 'THE', 'INCREASED', 'STRENGTH', 'OF', 'THE', 'ARMY'] +4350-9170-0025-2775: ref=['THE', 'REASON', 'TO', 'WHICH', 'HE', 'GAVE', 'EXPRESSION', 'IS', 'ESSENTIALLY', 'THE', 'SAME', 'AS', 'THAT', 'WHICH', 'MADE', 'THE', 'FRENCH', 'KINGS', 'AND', 'THE', 'POPES', 'ENGAGE', 'SWISS', 'AND', 'SCOTCH', 'GUARDS', 'AND', 'MAKES', 'THE', 'RUSSIAN', 'AUTHORITIES', 'OF', 'TO', 'DAY', 'SO', 'CAREFULLY', 'DISTRIBUTE', 'THE', 'RECRUITS', 'SO', 'THAT', 'THE', 'REGIMENTS', 'FROM', 'THE', 'FRONTIERS', 'ARE', 'STATIONED', 'IN', 'CENTRAL', 'DISTRICTS', 'AND', 'THE', 'REGIMENTS', 'FROM', 'THE', 'CENTER', 'ARE', 'STATIONED', 'ON', 'THE', 'FRONTIERS'] +4350-9170-0025-2775: hyp=['THE', 'REASON', 'TO', 'WHICH', 'HE', 'GAVE', 'EXPRESSION', 'IS', 'ESSENTIALLY', 'THE', 'SAME', 'AS', 'THAT', 'WHICH', 'MADE', 'THE', 'FRENCH', 'KINGS', 'AND', 'THE', 'POPES', 'ENGAGE', 'SWISS', 'AND', 'SCOTCH', 'GUARDS', 'AND', 'MAKES', 'THE', 'RUSSIAN', 'AUTHORITIES', 'OF', 'TO', 'DAY', 'SO', 'CAREFULLY', 'DISTRIBUTE', 'THE', 'RECRUITS', 'SO', 'THAT', 'THE', 'REGIMENTS', 'FROM', 'THE', 'FRONTIER', 'THEY', 'ARE', 'STATIONED', 'IN', 'CENTRAL', 'DISTRICTS', 'AND', 'THE', 'REGIMENTS', 'FROM', 'THE', 'CENTRE', 'ARE', 'STATIONED', 'ON', 'THE', 'FRONTIERS'] +4350-9170-0026-2776: ref=['THE', 'MEANING', 'OF', "CAPRIVI'S", 'SPEECH', 'PUT', 'INTO', 'PLAIN', 'LANGUAGE', 'IS', 'THAT', 'FUNDS', 'ARE', 'NEEDED', 'NOT', 'TO', 'RESIST', 'FOREIGN', 'FOES', 'BUT', 'TO', 'BUY', 'UNDER', 'OFFICERS', 'TO', 'BE', 'READY', 'TO', 'ACT', 'AGAINST', 'THE', 'ENSLAVED', 'TOILING', 'MASSES'] +4350-9170-0026-2776: hyp=['THE', 'MEANING', 'OF', 'CAPRIVY', 'SPEECH', 'PUT', 'INTO', 'PLAY', 'AND', 'LANGUAGE', 'IS', 'THAT', 'FONDS', 'ARE', 'NEEDED', 'NOT', 'TO', 'RESIST', 'FOREIGN', 'FOES', 'BUT', 'TO', 'BUY', 'UNDER', 'OFFICERS', 'TO', 'BE', 'READY', 'TO', 'ACT', 'AGAINST', 'THE', 'ENSLAVED', 'TOILING', 'MASSES'] +4350-9170-0027-2777: ref=['AND', 'THIS', 'ABNORMAL', 'ORDER', 'OF', 'THINGS', 'IS', 'MAINTAINED', 'BY', 'THE', 'ARMY'] +4350-9170-0027-2777: hyp=['AND', 'THIS', 'ABNORMAL', 'ORDER', 'OF', 'THANKS', 'IS', 'MAINTAINED', 'BY', 'THE', 'ARMY'] +4350-9170-0028-2778: ref=['BUT', 'THERE', 'IS', 'NOT', 'ONLY', 'ONE', 'GOVERNMENT', 'THERE', 'ARE', 'OTHER', 'GOVERNMENTS', 'EXPLOITING', 'THEIR', 'SUBJECTS', 'BY', 'VIOLENCE', 'IN', 'THE', 'SAME', 'WAY', 'AND', 'ALWAYS', 'READY', 'TO', 'POUNCE', 'DOWN', 'ON', 'ANY', 'OTHER', 'GOVERNMENT', 'AND', 'CARRY', 'OFF', 'THE', 'FRUITS', 'OF', 'THE', 'TOIL', 'OF', 'ITS', 'ENSLAVED', 'SUBJECTS'] +4350-9170-0028-2778: hyp=['BUT', 'THERE', 'IS', 'NOT', 'ONLY', 'ONE', 'GOVERNMENT', 'THERE', 'ARE', 'OTHER', 'GOVERNMENTS', 'EXPLODING', 'THEIR', 'SUBJECTS', 'BY', 'VIOLENT', 'AND', 'THE', 'SAME', 'WAY', 'AND', 'ARE', 'ALWAYS', 'READY', 'TO', 'POUNCE', 'DOWN', 'ON', 'ANY', 'OTHER', 'GOVERNMENT', 'AND', 'CARRY', 'OFF', 'THE', 'FRUITS', 'OF', 'THE', 'TOIL', 'OF', 'ITS', 'ENSLAVE', 'SUBJECTS'] +4350-9170-0029-2779: ref=['AND', 'SO', 'EVERY', 'GOVERNMENT', 'NEEDS', 'AN', 'ARMY', 'ALSO', 'TO', 'PROTECT', 'ITS', 'BOOTY', 'FROM', 'ITS', 'NEIGHBOR', 'BRIGANDS'] +4350-9170-0029-2779: hyp=['AND', 'SO', 'EVERY', 'GOVERNMENT', 'NEEDS', 'AN', 'ARMY', 'ALSO', 'TO', 'PROTECT', 'ITS', 'BOOTY', 'FROM', 'ITS', 'NEIGHBOUR', 'BRIGANDS'] +4350-9170-0030-2780: ref=['THIS', 'INCREASE', 'IS', 'CONTAGIOUS', 'AS', 'MONTESQUIEU', 'POINTED', 'OUT', 'ONE', 'HUNDRED', 'FIFTY', 'YEARS', 'AGO'] +4350-9170-0030-2780: hyp=['THIS', 'INCREASE', 'IS', 'CONTAGIOUS', 'AS', 'MONTESQUIEU', 'POINTED', 'OUT', 'A', 'HUNDRED', 'FIFTY', 'YEARS', 'AGO'] +4350-9170-0031-2781: ref=['EVERY', 'INCREASE', 'IN', 'THE', 'ARMY', 'OF', 'ONE', 'STATE', 'WITH', 'THE', 'AIM', 'OF', 'SELF', 'DEFENSE', 'AGAINST', 'ITS', 'SUBJECTS', 'BECOMES', 'A', 'SOURCE', 'OF', 'DANGER', 'FOR', 'NEIGHBORING', 'STATES', 'AND', 'CALLS', 'FOR', 'A', 'SIMILAR', 'INCREASE', 'IN', 'THEIR', 'ARMIES'] +4350-9170-0031-2781: hyp=['EVERY', 'INCREASE', 'IN', 'THE', 'ARMY', 'OF', 'ONE', 'STATE', 'WITH', 'THE', 'AIM', 'OF', 'SELF', 'DEFENSE', 'AGAINST', 'ITS', 'SUBJECTS', 'BECOMES', 'A', 'SORT', 'OF', 'DANGER', 'FOR', 'NEIGHBORING', 'STATES', 'AND', 'CALLS', 'FOR', 'A', 'SIMILAR', 'INCREASE', 'IN', 'THEIR', 'ARMIES'] +4350-9170-0032-2782: ref=['THE', 'DESPOTISM', 'OF', 'A', 'GOVERNMENT', 'ALWAYS', 'INCREASES', 'WITH', 'THE', 'STRENGTH', 'OF', 'THE', 'ARMY', 'AND', 'ITS', 'EXTERNAL', 'SUCCESSES', 'AND', 'THE', 'AGGRESSIVENESS', 'OF', 'A', 'GOVERNMENT', 'INCREASES', 'WITH', 'ITS', 'INTERNAL', 'DESPOTISM'] +4350-9170-0032-2782: hyp=['THE', 'DESPOTISM', 'OF', 'THE', 'GOVERNMENT', 'ALWAYS', 'INCREASES', 'WITH', 'THE', 'STRENGTH', 'OF', 'THE', 'ARMY', 'AND', 'ITS', 'EXTERNAL', 'SUCCESSES', 'AND', 'THE', 'AGGRESSIVENESS', 'OF', 'A', 'GOVERNMENT', 'INCREASES', 'WITH', 'ITS', 'INTERNAL', 'DESPOTISM'] +4350-9170-0033-2783: ref=['THE', 'RIVALRY', 'OF', 'THE', 'EUROPEAN', 'STATES', 'IN', 'CONSTANTLY', 'INCREASING', 'THEIR', 'FORCES', 'HAS', 'REDUCED', 'THEM', 'TO', 'THE', 'NECESSITY', 'OF', 'HAVING', 'RECOURSE', 'TO', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'SINCE', 'BY', 'THAT', 'MEANS', 'THE', 'GREATEST', 'POSSIBLE', 'NUMBER', 'OF', 'SOLDIERS', 'IS', 'OBTAINED', 'AT', 'THE', 'LEAST', 'POSSIBLE', 'EXPENSE'] +4350-9170-0033-2783: hyp=['THE', 'RIVALRY', 'OF', 'THE', 'EUROPEAN', 'STATES', 'AND', 'CONSTANTLY', 'INCREASING', 'THEIR', 'FORCES', 'HAS', 'REDUCED', 'THEM', 'TO', 'THE', 'NECESSITY', 'OF', 'HAVING', 'RECOURSE', 'TO', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'SINCE', 'BY', 'THAT', 'MEANS', 'THE', 'GREATEST', 'POSSIBLE', 'NUMBER', 'OF', 'SOLDIERS', 'IS', 'OBTAINED', 'AT', 'THE', 'LEAST', 'POSSIBLE', 'EXPENSE'] +4350-9170-0034-2784: ref=['AND', 'BY', 'THIS', 'MEANS', 'ALL', 'CITIZENS', 'ARE', 'UNDER', 'ARMS', 'TO', 'SUPPORT', 'THE', 'INIQUITIES', 'PRACTICED', 'UPON', 'THEM', 'ALL', 'CITIZENS', 'HAVE', 'BECOME', 'THEIR', 'OWN', 'OPPRESSORS'] +4350-9170-0034-2784: hyp=['AND', 'BY', 'THIS', 'MEANS', 'ALL', 'CITIZENS', 'ARE', 'UNDER', 'ARMS', 'TO', 'SUPPORT', 'THE', 'INIQUITIES', 'PRACTICED', 'UPON', 'THEM', 'ALL', 'CITIZENS', 'HAVE', 'BECOME', 'THEIR', 'OWN', 'IMPRESSORS'] +4350-9170-0035-2785: ref=['THIS', 'INCONSISTENCY', 'HAS', 'BECOME', 'OBVIOUS', 'IN', 'UNIVERSAL', 'MILITARY', 'SERVICE'] +4350-9170-0035-2785: hyp=['THIS', 'INCONSISTENCY', 'HAS', 'BECOME', 'OBVIOUS', 'AND', 'UNIVERSAL', 'MILITARY', 'SERVICE'] +4350-9170-0036-2786: ref=['IN', 'FACT', 'THE', 'WHOLE', 'SIGNIFICANCE', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'CONSISTS', 'IN', "MAN'S", 'RECOGNITION', 'OF', 'THE', 'BARBARITY', 'OF', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'AND', 'THE', 'TRANSITORINESS', 'OF', 'PERSONAL', 'LIFE', 'ITSELF', 'AND', 'THE', 'TRANSFERENCE', 'OF', 'THE', 'AIM', 'OF', 'LIFE', 'TO', 'GROUPS', 'OF', 'PERSONS'] +4350-9170-0036-2786: hyp=['IN', 'FACT', 'THE', 'WHOLE', 'SIGNIFICANCE', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'CONSISTS', 'IN', "MAN'S", 'RECOGNITION', 'OF', 'THE', 'BARBARITY', 'OF', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'AND', 'THE', 'TRANSITORINESS', 'OF', 'PERSONAL', 'LIFE', 'ITSELF', 'AND', 'THE', 'TRANSFERENCE', 'OF', 'THE', 'AIM', 'OF', 'LIFE', 'THE', 'GROUPS', 'OF', 'PERSONS'] +4350-9170-0037-2787: ref=['BUT', 'WITH', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'IT', 'COMES', 'TO', 'PASS', 'THAT', 'MEN', 'AFTER', 'MAKING', 'EVERY', 'SACRIFICE', 'TO', 'GET', 'RID', 'OF', 'THE', 'CRUELTY', 'OF', 'STRIFE', 'AND', 'THE', 'INSECURITY', 'OF', 'EXISTENCE', 'ARE', 'CALLED', 'UPON', 'TO', 'FACE', 'ALL', 'THE', 'PERILS', 'THEY', 'HAD', 'MEANT', 'TO', 'AVOID'] +4350-9170-0037-2787: hyp=['BUT', 'WITH', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'IT', 'COMES', 'TO', 'PASS', 'THAT', 'MEN', 'AFTER', 'MAKING', 'EVERY', 'SACRIFICE', 'TO', 'GET', 'RID', 'OF', 'THE', 'CRUELTY', 'OF', 'STRIFE', 'AND', 'THE', 'INSECURITY', 'OF', 'EXISTENCE', 'ARE', 'CALLED', 'UPON', 'TO', 'FACE', 'ALL', 'THE', 'PERILS', 'THEY', 'HAD', 'MEANT', 'TO', 'AVOID'] +4350-9170-0038-2788: ref=['BUT', 'INSTEAD', 'OF', 'DOING', 'THAT', 'THEY', 'EXPOSE', 'THE', 'INDIVIDUALS', 'TO', 'THE', 'SAME', 'NECESSITY', 'OF', 'STRIFE', 'SUBSTITUTING', 'STRIFE', 'WITH', 'INDIVIDUALS', 'OF', 'OTHER', 'STATES', 'FOR', 'STRIFE', 'WITH', 'NEIGHBORS'] +4350-9170-0038-2788: hyp=['BUT', 'INSTEAD', 'OF', 'DOING', 'THAT', 'THEY', 'EXPOSED', 'TO', 'INDIVIDUALS', 'TO', 'THE', 'SAME', 'NECESSITY', 'OF', 'STRIFE', 'SUBSTITUTING', 'STRIKE', 'WITH', 'INDIVIDUALS', 'OF', 'OTHER', 'STATES', 'FOR', 'STRIFE', 'WITH', 'NEIGHBORS'] +4350-9170-0039-2789: ref=['THE', 'TAXES', 'RAISED', 'FROM', 'THE', 'PEOPLE', 'FOR', 'WAR', 'PREPARATIONS', 'ABSORB', 'THE', 'GREATER', 'PART', 'OF', 'THE', 'PRODUCE', 'OF', 'LABOR', 'WHICH', 'THE', 'ARMY', 'OUGHT', 'TO', 'DEFEND'] +4350-9170-0039-2789: hyp=['THE', 'TAXES', 'RAISED', 'FROM', 'THE', 'PEOPLE', 'FOR', 'WAR', 'PREPARATIONS', 'ABSORB', 'THE', 'GREATER', 'PART', 'OF', 'THE', 'PRODUCE', 'OF', 'LABOR', 'WHICH', 'THE', 'ARMY', 'OUGHT', 'TO', 'DEFEND'] +4350-9170-0040-2790: ref=['THE', 'DANGER', 'OF', 'WAR', 'EVER', 'READY', 'TO', 'BREAK', 'OUT', 'RENDERS', 'ALL', 'REFORMS', 'OF', 'LIFE', 'SOCIAL', 'LIFE', 'VAIN', 'AND', 'FRUITLESS'] +4350-9170-0040-2790: hyp=['THE', 'DANGER', 'OF', 'WAR', 'EVER', 'READY', 'TO', 'BREAK', 'OUT', 'RENDERS', 'ALL', 'REFORMS', 'OF', 'LIFE', 'SOCIAL', 'LIFE', 'VAIN', 'AND', 'FRUITLESS'] +4350-9170-0041-2791: ref=['BUT', 'THE', 'FATAL', 'SIGNIFICANCE', 'OF', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'AS', 'THE', 'MANIFESTATION', 'OF', 'THE', 'CONTRADICTION', 'INHERENT', 'IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IS', 'NOT', 'ONLY', 'APPARENT', 'IN', 'THAT'] +4350-9170-0041-2791: hyp=['BUT', 'THE', 'FATAL', 'SIGNIFICANCE', 'OF', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'AS', 'THE', 'MANIFESTATION', 'OF', 'THE', 'CONTRADICTION', 'INHERENT', 'IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IS', 'NOT', 'ONLY', 'APPARENT', 'IN', 'THAT'] +4350-9170-0042-2792: ref=['GOVERNMENTS', 'ASSERT', 'THAT', 'ARMIES', 'ARE', 'NEEDED', 'ABOVE', 'ALL', 'FOR', 'EXTERNAL', 'DEFENSE', 'BUT', 'THAT', 'IS', 'NOT', 'TRUE'] +4350-9170-0042-2792: hyp=['GOVERNMENT', 'ASSERT', 'THAT', 'ARMIES', 'ARE', 'NEEDED', 'ABOVE', 'ALL', 'FOR', 'EXTERNAL', 'DEFENSE', 'BUT', 'THAT', 'IS', 'NOT', 'TRUE'] +4350-9170-0043-2793: ref=['THEY', 'ARE', 'NEEDED', 'PRINCIPALLY', 'AGAINST', 'THEIR', 'SUBJECTS', 'AND', 'EVERY', 'MAN', 'UNDER', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'BECOMES', 'AN', 'ACCOMPLICE', 'IN', 'ALL', 'THE', 'ACTS', 'OF', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'AGAINST', 'THE', 'CITIZENS', 'WITHOUT', 'ANY', 'CHOICE', 'OF', 'HIS', 'OWN'] +4350-9170-0043-2793: hyp=['THERE', 'NEEDED', 'PRINCIPALLY', 'AGAINST', 'THEIR', 'SUBJECTS', 'AND', 'EVERY', 'MAN', 'UNDER', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'BECOMES', 'AN', 'ACCOMPLICE', 'AND', 'ALL', 'THAT', 'ACTS', 'OF', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'AGAINST', 'THE', 'CITIZENS', 'WITHOUT', 'ANY', 'CHOICE', 'OF', 'HIS', 'OWN'] +4350-9170-0044-2794: ref=['AND', 'FOR', 'THE', 'SAKE', 'OF', 'WHAT', 'AM', 'I', 'MAKING', 'THEM'] +4350-9170-0044-2794: hyp=['AND', 'FOR', 'THE', 'SAKE', 'OF', 'WHAT', 'AM', 'I', 'MAKING', 'THEM'] +4350-9170-0045-2795: ref=['I', 'AM', 'EXPECTED', 'FOR', 'THE', 'SAKE', 'OF', 'THE', 'STATE', 'TO', 'MAKE', 'THESE', 'SACRIFICES', 'TO', 'RENOUNCE', 'EVERYTHING', 'THAT', 'CAN', 'BE', 'PRECIOUS', 'TO', 'MAN', 'PEACE', 'FAMILY', 'SECURITY', 'AND', 'HUMAN', 'DIGNITY'] +4350-9170-0045-2795: hyp=['I', 'EXPECTED', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'STATE', 'TO', 'MAKE', 'THE', 'SACRIFICES', 'TO', 'RENOUNCE', 'EVERYTHING', 'THAT', 'CAN', 'BE', 'PRECIOUS', 'TO', 'MAN', 'PEACE', 'FAMILY', 'SECURITY', 'AND', 'HUMAN', 'DIGNITY'] +4350-9170-0046-2796: ref=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'SAY', 'WE', 'SHOULD', 'BE', 'EXPOSED', 'TO', 'THE', 'ATTACKS', 'OF', 'EVIL', 'DISPOSED', 'PERSONS', 'IN', 'OUR', 'OWN', 'COUNTRY'] +4350-9170-0046-2796: hyp=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'SAY', 'WE', 'SHOULD', 'BE', 'EXPOSED', 'TO', 'THE', 'ATTACKS', 'OF', 'EVIL', 'DISPOSED', 'PERSONS', 'IN', 'OUR', 'OWN', 'COUNTRY'] +4350-9170-0047-2797: ref=['WE', 'KNOW', 'NOW', 'THAT', 'THREATS', 'AND', 'PUNISHMENTS', 'CANNOT', 'DIMINISH', 'THEIR', 'NUMBER', 'THAT', 'THAT', 'CAN', 'ONLY', 'BE', 'DONE', 'BY', 'CHANGE', 'OF', 'ENVIRONMENT', 'AND', 'MORAL', 'INFLUENCE'] +4350-9170-0047-2797: hyp=['WE', 'NOW', 'KNOW', 'THAT', 'THREATS', 'AND', 'PUNISHMENTS', 'CANNOT', 'DIMINISH', 'THEIR', 'NUMBER', 'THAT', 'THAT', 'CAN', 'ONLY', 'BE', 'DONE', 'BY', 'CHANGE', 'OF', 'ENVIRONMENT', 'AND', 'MORAL', 'INFLUENCE'] +4350-9170-0048-2798: ref=['SO', 'THAT', 'THE', 'JUSTIFICATION', 'OF', 'STATE', 'VIOLENCE', 'ON', 'THE', 'GROUND', 'OF', 'THE', 'PROTECTION', 'IT', 'GIVES', 'US', 'FROM', 'EVIL', 'DISPOSED', 'PERSONS', 'EVEN', 'IF', 'IT', 'HAD', 'SOME', 'FOUNDATION', 'THREE', 'OR', 'FOUR', 'CENTURIES', 'AGO', 'HAS', 'NONE', 'WHATEVER', 'NOW'] +4350-9170-0048-2798: hyp=['SO', 'THAT', 'THIS', 'JUSTIFICATION', 'OF', 'STATE', 'VIOLENCE', 'ON', 'THE', 'GROUND', 'OF', 'THE', 'PROTECTION', 'IT', 'GIVES', 'US', 'FROM', 'EVIL', 'DISPOS', 'PERSONS', 'EVEN', 'IF', 'I', 'HAD', 'SOME', 'FOUNDATION', 'THREE', 'OR', 'FOUR', 'CENTURIES', 'AGO', 'HAS', 'NONE', 'WHATEVER', 'NOW'] +4350-9170-0049-2799: ref=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'TELL', 'US', 'WE', 'SHOULD', 'NOT', 'HAVE', 'ANY', 'RELIGION', 'EDUCATION', 'CULTURE', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'SO', 'ON'] +4350-9170-0049-2799: hyp=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'TELL', 'US', 'WE', 'SHOULD', 'NOT', 'HAVE', 'ANY', 'RELIGION', 'EDUCATION', 'CULTURE', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'SO', 'ON'] +4350-9170-0050-2800: ref=['WITHOUT', 'THE', 'STATE', 'MEN', 'WOULD', 'NOT', 'HAVE', 'BEEN', 'ABLE', 'TO', 'FORM', 'THE', 'SOCIAL', 'INSTITUTIONS', 'NEEDED', 'FOR', 'DOING', 'ANY', 'THING'] +4350-9170-0050-2800: hyp=['WITHOUT', 'THE', 'STATE', 'MEN', 'WOULD', 'NOT', 'HAVE', 'BEEN', 'ABLE', 'TO', 'FORM', 'THE', 'SOCIAL', 'INSTITUTIONS', 'NEEDED', 'FOR', 'DOING', 'ANYTHING'] +4350-9170-0051-2801: ref=['THIS', 'ARGUMENT', 'TOO', 'WAS', 'WELL', 'FOUNDED', 'ONLY', 'SOME', 'CENTURIES', 'AGO'] +4350-9170-0051-2801: hyp=['THIS', 'ARGUMENT', 'TOO', 'WAS', 'WELL', 'FOUNDED', 'ONLY', 'SOME', 'CENTURIES', 'AGO'] +4350-9170-0052-2802: ref=['THE', 'GREAT', 'EXTENSION', 'OF', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'INTERCHANGE', 'OF', 'IDEAS', 'HAS', 'MADE', 'MEN', 'COMPLETELY', 'ABLE', 'TO', 'DISPENSE', 'WITH', 'STATE', 'AID', 'IN', 'FORMING', 'SOCIETIES', 'ASSOCIATIONS', 'CORPORATIONS', 'AND', 'CONGRESSES', 'FOR', 'SCIENTIFIC', 'ECONOMIC', 'AND', 'POLITICAL', 'OBJECTS'] +4350-9170-0052-2802: hyp=['THE', 'GREAT', 'EXTENSION', 'OF', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'INTERCHANGE', 'OF', 'IDEAS', 'HAS', 'MADE', 'MEN', 'COMPLETELY', 'ABLE', 'TO', 'DISPENSE', 'WITH', 'STATE', 'AID', 'IN', 'FORMING', 'SOCIETIES', 'ASSOCIATIONS', 'CORPORATIONS', 'AND', 'CONGRESSES', 'FOR', 'SCIENTIFIC', 'ECONOMIC', 'AND', 'POLITICAL', 'OBJECTS'] +4350-9170-0053-2803: ref=['WITHOUT', 'GOVERNMENTS', 'NATIONS', 'WOULD', 'BE', 'ENSLAVED', 'BY', 'THEIR', 'NEIGHBORS'] +4350-9170-0053-2803: hyp=['WITHOUT', 'GOVERNMENTS', 'NATIONS', 'WOULD', 'BE', 'ENSLAVED', 'BY', 'THEIR', 'NEIGHBORS'] +4350-9170-0054-2804: ref=['THE', 'GOVERNMENT', 'THEY', 'TELL', 'US', 'WITH', 'ITS', 'ARMY', 'IS', 'NECESSARY', 'TO', 'DEFEND', 'US', 'FROM', 'NEIGHBORING', 'STATES', 'WHO', 'MIGHT', 'ENSLAVE', 'US'] +4350-9170-0054-2804: hyp=['THE', 'GOVERNMENT', 'THEY', 'TELL', 'US', 'WITH', 'ITS', 'ARMY', 'IS', 'NECESSARY', 'TO', 'DEFEND', 'US', 'FROM', 'NEIGHBORING', 'STATES', 'WHO', 'MIGHT', 'ENSLAVE', 'US'] +4350-9170-0055-2805: ref=['AND', 'IF', 'DEFENSE', 'AGAINST', 'BARBAROUS', 'NATIONS', 'IS', 'MEANT', 'ONE', 'THOUSANDTH', 'PART', 'OF', 'THE', 'TROOPS', 'NOW', 'UNDER', 'ARMS', 'WOULD', 'BE', 'AMPLY', 'SUFFICIENT', 'FOR', 'THAT', 'PURPOSE'] +4350-9170-0055-2805: hyp=['AND', 'IF', 'DEFENCE', 'AGAINST', 'BARBAROUS', 'NATIONS', 'IS', 'MEANT', 'ONE', 'THOUSANDTH', 'PART', 'OF', 'THE', 'TROOPS', 'NOW', 'UNDER', 'ARMS', 'WOULD', 'BE', 'AMPLY', 'SUFFICIENT', 'FOR', 'THAT', 'PURPOSE'] +4350-9170-0056-2806: ref=['THE', 'POWER', 'OF', 'THE', 'STATE', 'FAR', 'FROM', 'BEING', 'A', 'SECURITY', 'AGAINST', 'THE', 'ATTACKS', 'OF', 'OUR', 'NEIGHBORS', 'EXPOSES', 'US', 'ON', 'THE', 'CONTRARY', 'TO', 'MUCH', 'GREATER', 'DANGER', 'OF', 'SUCH', 'ATTACKS'] +4350-9170-0056-2806: hyp=['THE', 'POWER', 'OF', 'THE', 'STATE', 'FAR', 'FROM', 'BEING', 'A', 'SECURITY', 'AGAINST', 'THE', 'ATTACKS', 'OF', 'OUR', 'NEIGHBORS', 'EXPOSES', 'US', 'ON', 'THE', 'CONTRARY', 'TO', 'MUCH', 'GREATER', 'DANGER', 'OF', 'SUCH', 'ATTACKS'] +4350-9170-0057-2807: ref=['EVEN', 'LOOKING', 'AT', 'IT', 'PRACTICALLY', 'WEIGHING', 'THAT', 'IS', 'TO', 'SAY', 'ALL', 'THE', 'BURDENS', 'LAID', 'ON', 'HIM', 'BY', 'THE', 'STATE', 'NO', 'MAN', 'CAN', 'FAIL', 'TO', 'SEE', 'THAT', 'FOR', 'HIM', 'PERSONALLY', 'TO', 'COMPLY', 'WITH', 'STATE', 'DEMANDS', 'AND', 'SERVE', 'IN', 'THE', 'ARMY', 'WOULD', 'IN', 'THE', 'MAJORITY', 'OF', 'CASES', 'BE', 'MORE', 'DISADVANTAGEOUS', 'THAN', 'TO', 'REFUSE', 'TO', 'DO', 'SO'] +4350-9170-0057-2807: hyp=['EVEN', 'LOOKING', 'AT', 'IT', 'PRACTICALLY', 'WEIGHING', 'THAT', 'IS', 'TO', 'SAY', 'ALL', 'THE', 'BURDENS', 'LAID', 'ON', 'HIM', 'BY', 'THE', 'STATES', 'NO', 'MAN', 'CAN', 'FAIL', 'TO', 'SEE', 'THAT', 'FOR', 'HIM', 'PERSONALLY', 'TO', 'COMPLY', 'WITH', 'THE', 'STATE', 'DEMANDS', 'AND', 'SERVE', 'IN', 'THE', 'ARMY', 'WOULD', 'IN', 'THE', 'MAJORITY', 'OF', 'CASES', 'BE', 'MORE', 'DISADVANTAGEOUS', 'THAN', 'TO', 'REFUSE', 'TO', 'DO', 'SO'] +4350-9170-0058-2808: ref=['TO', 'RESIST', 'WOULD', 'NEED', 'INDEPENDENT', 'THOUGHT', 'AND', 'EFFORT', 'OF', 'WHICH', 'EVERY', 'MAN', 'IS', 'NOT', 'CAPABLE'] +4350-9170-0058-2808: hyp=['TO', 'RESIST', 'WOULD', 'NEED', 'INDEPENDENT', 'THOUGHT', 'AND', 'EFFORT', 'OF', 'WHICH', 'EVERY', 'MAN', 'IS', 'NOT', 'CAPABLE'] +4350-9170-0059-2809: ref=['SO', 'MUCH', 'FOR', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'OF', 'BOTH', 'LINES', 'OF', 'CONDUCT', 'FOR', 'A', 'MAN', 'OF', 'THE', 'WEALTHY', 'CLASSES', 'AN', 'OPPRESSOR'] +4350-9170-0059-2809: hyp=['SO', 'MUCH', 'FOR', 'THE', 'ADVANTAGES', 'OF', 'DISADVANTAGES', 'OF', 'BOTH', 'LINES', 'OF', 'CONDUCT', 'FOR', 'A', 'MAN', 'OF', 'THE', 'WEALTHY', 'CLASS', 'AND', 'OPPRESSOR'] +4350-9170-0060-2810: ref=['FOR', 'A', 'MAN', 'OF', 'THE', 'POOR', 'WORKING', 'CLASS', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'WILL', 'BE', 'THE', 'SAME', 'BUT', 'WITH', 'A', 'GREAT', 'INCREASE', 'OF', 'DISADVANTAGES'] +4350-9170-0060-2810: hyp=['FOR', 'A', 'MAN', 'OF', 'THE', 'POOR', 'WORKING', 'CLASS', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'WILL', 'BE', 'THE', 'SAME', 'BUT', 'WITH', 'A', 'GREAT', 'INCREASE', 'OF', 'DISADVANTAGES'] +4852-28311-0000-2098: ref=['SAY', 'YOU', 'KNOW', 'SUMTHIN'] +4852-28311-0000-2098: hyp=['SAY', 'YOU', 'KNOW', 'SOMETHING'] +4852-28311-0001-2099: ref=['CHRIS', 'LOOKED', 'FROM', 'A', 'NICKEL', 'PLATED', 'FLASHLIGHT', 'TO', 'A', 'CAR', 'JACK', 'AND', 'SPARK', 'PLUG'] +4852-28311-0001-2099: hyp=['CHRIS', 'LOOKED', 'FROM', 'A', 'COMPLATED', 'FLASH', 'LIKE', 'TO', 'A', 'CARJACK', 'AND', 'SPARK', 'PLUG'] +4852-28311-0002-2100: ref=['KNOW', 'WHO', 'NEEDS', 'A', 'JOB', 'BAD', "THAT'S", 'JAKEY', 'HARRIS'] +4852-28311-0002-2100: hyp=['NO', 'ONE', 'NEEDS', 'A', 'JOB', 'BAN', "THAT'S", 'JAKIE', 'HARRIS'] +4852-28311-0003-2101: ref=['O', 'K', 'HE', 'SAID'] +4852-28311-0003-2101: hyp=['O', 'K', 'HE', 'SAID'] +4852-28311-0004-2102: ref=['ONLY', 'WHY', "DIDN'T", 'YOU', 'ASK', 'HIM', 'YOURSELF'] +4852-28311-0004-2102: hyp=['ONLY', 'WHY', "DIDN'T", 'YOU', 'ASK', 'HIM', 'YOURSELF'] +4852-28311-0005-2103: ref=['MIKE', 'BECAME', 'UNEASY', 'AND', 'FISHED', 'AN', 'ELASTIC', 'BAND', 'OUT', 'OF', 'HIS', 'POCKET', 'MADE', 'A', 'FLICK', 'OF', 'PAPER', 'AND', 'SENT', 'IT', 'SOARING', 'OUT', 'INTO', 'M', 'STREET'] +4852-28311-0005-2103: hyp=['MIKE', 'BECAME', 'UNEASY', 'AND', 'FISHED', 'IT', 'MOLASTIC', 'BAND', 'OUT', 'OF', 'HIS', 'POCKET', 'MADE', 'A', 'FLICK', 'OF', 'PAPER', 'AND', 'SENT', 'IT', 'SOARING', 'OUT', 'AN', 'ENEM', 'STREET'] +4852-28311-0006-2104: ref=['WELL', 'HE', 'ADMITTED', 'I', 'DID'] +4852-28311-0006-2104: hyp=['WELL', 'HE', 'ADMITTED', 'I', 'DID'] +4852-28311-0007-2105: ref=['CHRIS', 'ASKED', 'AND', 'FOR', 'THE', 'FIRST', 'TIME', 'THAT', 'DAY', 'THE', 'HEAVY', 'WEIGHT', 'HE', 'CARRIED', 'WITHIN', 'HIM', 'LIFTED', 'AND', 'LIGHTENED', 'A', 'LITTLE'] +4852-28311-0007-2105: hyp=['CHRIS', 'ASKED', 'HIM', 'FOR', 'THE', 'FIRST', 'TIME', 'THAT', 'DAY', 'THE', 'HEAVY', 'WEIGHT', 'HE', 'CARRIED', 'WITHIN', 'HIM', 'LIFTED', 'AND', 'LIGHTENED', 'A', 'LITTLE'] +4852-28311-0008-2106: ref=['THINK', 'HE', 'REALLY', 'NEEDS', 'IT', 'HE', 'PURSUED'] +4852-28311-0008-2106: hyp=['THINKING', 'REALLY', 'NEEDS', 'IT', 'HE', 'PURSUED'] +4852-28311-0009-2107: ref=['HE', 'WOULD', 'HAVE', 'LIKED', 'TO', 'GET', 'THE', 'JOB', 'FOR', 'JAKEY', 'WHO', 'NEEDED', 'IT', 'BUT', 'SOMEHOW', 'THE', 'TASK', 'OF', 'FACING', 'MISTER', 'WICKER', 'ESPECIALLY', 'NOW', 'THAT', 'THE', 'LIGHT', 'WAS', 'GOING', 'AND', 'DUSK', 'EDGING', 'INTO', 'THE', 'STREETS', 'WAS', 'NOT', 'WHAT', 'CHRIS', 'HAD', 'INTENDED', 'FOR', 'ENDING', 'THE', 'AFTERNOON'] +4852-28311-0009-2107: hyp=['HE', 'WOULD', 'HAVE', 'LIKED', 'TO', 'GET', 'THE', 'JOB', 'FOR', 'JAKIE', 'WHO', 'NEEDED', 'IT', 'BUT', 'SOMEHOW', 'THE', 'TASK', 'OF', 'FACING', 'MISTER', 'WICKER', 'ESPECIALLY', 'NOW', 'THAT', 'THE', 'LIGHT', 'WAS', 'GOING', 'AND', 'DUSK', 'EDGED', 'INTO', 'THE', 'STREETS', 'WAS', 'NOT', 'WHAT', 'CHRISTEN', 'INTENDED', 'FOR', 'ENDING', 'THE', 'AFTERNOON'] +4852-28311-0010-2108: ref=["MIKE'S", 'EXPRESSION', 'CHANGED', 'AT', 'ONCE', 'TO', 'ONE', 'OF', 'TRIUMPH', 'BUT', 'CHRIS', 'WAS', 'ONLY', 'PARTLY', 'ENCOURAGED'] +4852-28311-0010-2108: hyp=["MIKE'S", 'EXPRESSION', 'CHANGED', 'AT', "ONE'S", 'TO', 'ONE', 'OF', 'TRIUMPH', 'BUT', 'BRUCE', 'WAS', 'ONLY', 'PARTIALLY', 'ENCOURAGED'] +4852-28311-0011-2109: ref=['BETCHA', "AREN'T", 'GOIN', 'AFTER', 'ALL', 'CHRIS', 'TURNED', 'ON', 'HIM'] +4852-28311-0011-2109: hyp=['BITCHER', 'AND', 'GOIN', 'AFTER', 'ALL', 'THIS', 'TURNED', 'TO', 'HIM'] +4852-28311-0012-2110: ref=['MIKE', 'WAS', 'STANDING', 'ON', 'THE', 'CORNER'] +4852-28311-0012-2110: hyp=['MIKE', 'WAS', 'STANDING', 'ON', 'THE', 'CORNER'] +4852-28311-0013-2111: ref=['AW', 'SHUCKS'] +4852-28311-0013-2111: hyp=['AH', 'SHOCKS'] +4852-28311-0014-2112: ref=['CHRIS', 'STARTED', 'OFF', 'ONCE', 'MORE', 'PASSING', 'THE', 'BLEAK', 'LITTLE', 'VICTORIAN', 'CHURCH', 'PERCHED', 'ON', 'THE', 'HILL', 'ABOVE', 'MISTER', "WICKER'S", 'HOUSE'] +4852-28311-0014-2112: hyp=['CHRIS', 'STARTED', 'OFF', 'ONCE', 'MORE', 'PASSING', 'A', 'BLEAK', 'LITTLE', 'VICTORIAN', 'CHURCH', 'PERCHED', 'ON', 'THE', 'HILL', 'ABOVE', 'MISTER', "WICKER'S", 'HOUSE'] +4852-28311-0015-2113: ref=['AN', 'EMPTY', 'LOT', 'CUT', 'INTO', 'BY', 'CHURCH', 'LANE', 'GAVE', 'A', 'LOOK', 'OF', 'ISOLATION', 'TO', 'THE', 'L', 'SHAPED', 'BRICK', 'BUILDING', 'THAT', 'SERVED', 'MISTER', 'WICKER', 'AS', 'BOTH', 'HOUSE', 'AND', 'PLACE', 'OF', 'BUSINESS'] +4852-28311-0015-2113: hyp=['AN', 'EMPTY', 'LOT', 'CUT', 'IN', 'INTO', 'BY', 'CHURCH', 'LANE', 'GAVE', 'A', 'LOOK', 'OF', 'ISOLATION', 'TO', 'THE', 'ALE', 'SHAPED', 'BRICK', 'BUILDING', 'THAT', 'SERVED', 'MISTER', "WICKER'S", 'BOTH', 'HOUSE', 'AND', 'PLACE', 'OF', 'BUSINESS'] +4852-28311-0016-2114: ref=['THE', 'LONGER', 'WING', 'TOWARD', 'THE', 'BACK', 'HAD', 'A', 'BACK', 'DOOR', 'THAT', 'OPENED', 'ONTO', 'WATER', 'STREET', 'THE', 'SPACE', 'BETWEEN', 'THE', 'HOUSE', 'AND', 'WISCONSIN', 'AVENUE', 'HAD', 'BEEN', 'MADE', 'INTO', 'A', 'NEAT', 'OBLONG', 'FLOWER', 'GARDEN', 'FENCED', 'OFF', 'FROM', 'THE', 'SIDEWALK', 'BY', 'BOX', 'SHRUBS', 'AND', 'A', 'WHITE', 'PICKET', 'FENCE'] +4852-28311-0016-2114: hyp=['NO', 'LONGER', 'WING', 'TOWARD', 'THE', 'BACK', 'GOT', 'A', 'BACK', 'DOOR', 'THAT', 'OPENED', 'ON', 'A', 'WATER', 'STREET', 'THE', 'SPACE', 'BETWEEN', 'THE', 'HOUSE', 'AND', 'WISCONSIN', 'AVIGUE', 'HAD', 'BEEN', 'MADE', 'INTO', 'A', 'NEAT', 'OBLONG', 'FLOWER', 'GARDEN', 'FENCED', 'OFF', 'FROM', 'THE', 'SIDEWALK', 'BY', 'BOX', 'SHRUGS', 'AND', 'A', 'WHITE', 'PICKET', 'FENCE'] +4852-28311-0017-2115: ref=['A', 'LIVID', 'YELLOW', 'STAINED', 'THE', 'HORIZON', 'BEYOND', 'THE', 'FACTORIES', 'AND', 'GRAY', 'CLOUDS', 'LOWERED', 'AND', 'TUMBLED', 'ABOVE'] +4852-28311-0017-2115: hyp=['A', 'LIVID', 'YELLOW', 'STAINED', 'THE', 'HORIZON', 'BEYOND', 'THE', 'FACTORIES', 'AND', 'GLAY', 'CLOUDS', 'LOWERED', 'AND', 'TUMBLED', 'ABOVE'] +4852-28311-0018-2116: ref=['THE', 'AIR', 'WAS', 'GROWING', 'CHILL', 'AND', 'CHRIS', 'DECIDED', 'TO', 'FINISH', 'HIS', 'JOB'] +4852-28311-0018-2116: hyp=['THE', 'AIR', 'WAS', 'GROWING', 'CHILL', 'AND', 'CHRIST', 'DECIDED', 'TO', 'FINISH', 'THE', 'JOB'] +4852-28311-0019-2117: ref=['ALL', 'AT', 'ONCE', 'HE', 'WONDERED', 'HOW', 'HIS', 'MOTHER', 'WAS', 'AND', 'EVERYTHING', 'IN', 'HIM', 'PINCHED', 'AND', 'TIGHTENED', 'ITSELF'] +4852-28311-0019-2117: hyp=['ALL', 'AT', 'ONCE', 'YOU', 'WONDERED', 'HOW', 'HIS', 'MOTHER', 'WAS', 'AND', 'EVERYTHING', 'IN', 'HIM', 'IMPINCED', 'AND', 'TIGHTENED', 'ITSELF'] +4852-28311-0020-2118: ref=['AT', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'HE', 'REACHED', 'THE', 'HOUSE'] +4852-28311-0020-2118: hyp=['AT', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'HE', 'REACHED', 'THE', 'HOUSE'] +4852-28311-0021-2119: ref=['THERE', 'WERE', 'THREE', 'THINGS', 'THAT', 'ALWAYS', 'CAUGHT', 'HIS', 'EYE', 'AMID', 'THE', 'LITTER', 'OF', 'DUSTY', 'PIECES'] +4852-28311-0021-2119: hyp=['THERE', 'WERE', 'THREE', 'THINGS', 'THAT', 'ALWAYS', 'CAUGHT', 'HIS', 'EYE', 'AMID', 'THE', 'LITTER', 'OF', 'DUSTY', 'PIECES'] +4852-28311-0022-2120: ref=['ON', 'THE', 'LEFT', 'THE', 'COIL', 'OF', 'ROPE', 'IN', 'THE', 'CENTER', 'THE', 'MODEL', 'OF', 'A', 'SAILING', 'SHIP', 'IN', 'A', 'GREEN', 'GLASS', 'BOTTLE', 'AND', 'ON', 'THE', 'RIGHT', 'THE', 'WOODEN', 'STATUE', 'OF', 'A', 'NEGRO', 'BOY', 'IN', 'BAGGY', 'TROUSERS', 'TURKISH', 'JACKET', 'AND', 'WHITE', 'TURBAN'] +4852-28311-0022-2120: hyp=['ON', 'THE', 'LEFT', 'THE', 'COIL', 'OF', 'ROPE', 'IN', 'THE', 'CENTRE', 'THE', 'MODEL', 'OF', 'A', 'SAILING', 'SHIP', 'IN', 'A', 'GREEN', 'GLASS', 'BOTTLE', 'AND', 'ON', 'THE', 'RIGHT', 'THE', 'WOODEN', 'STATUE', 'OF', 'A', 'NEGRO', 'BOY', 'AND', 'BAGGY', 'TROUSERS', 'TURKISH', 'JACKET', 'AND', 'WHITE', 'TURBAN'] +4852-28311-0023-2121: ref=['BUT', 'THE', 'NAME', 'STILL', 'SHOWED', 'AT', 'THE', 'PROW', 'AND', 'MANY', 'A', 'TIME', 'CHRIS', 'SAFE', 'AT', 'HOME', 'IN', 'BED', 'HAD', 'SAILED', 'IMAGINARY', 'VOYAGES', 'IN', 'THE', 'MIRABELLE'] +4852-28311-0023-2121: hyp=['BUT', 'THE', 'NAME', 'STILL', 'SHOWED', 'AT', 'THE', 'PROW', 'AND', 'MANY', 'A', 'TIME', 'CHRIS', 'SAFE', 'AT', 'HOME', 'IN', 'BED', 'EXHALED', 'IMAGINARY', 'VOYAGES', 'IN', 'THE', 'MIRABELLE'] +4852-28311-0024-2122: ref=['HE', 'HAD', 'NEVER', 'SEEN', 'ANYONE', 'GO', 'INTO', 'MISTER', "WICKER'S", 'SHOP', 'NOW', 'HE', 'THOUGHT', 'OF', 'IT'] +4852-28311-0024-2122: hyp=["HE'D", 'NEVER', 'SEEN', 'ANY', 'ONE', 'GO', 'INTO', 'MISTER', "HOOKER'S", 'SHOP', 'NOW', 'HE', 'THOUGHT', 'OF', 'IT'] +4852-28311-0025-2123: ref=['HOW', 'THEN', 'DID', 'HE', 'LIVE', 'AND', 'WHAT', 'DID', 'HE', 'EVER', 'SELL'] +4852-28311-0025-2123: hyp=['HOW', 'THEN', 'DID', 'HE', 'LIVE', 'AND', 'WHAT', 'DID', 'HE', 'EVER', 'SELL'] +4852-28311-0026-2124: ref=['A', 'SUDDEN', 'CAR', 'HORN', 'WOKE', 'HIM', 'FROM', 'HIS', 'DREAM'] +4852-28311-0026-2124: hyp=['A', 'SUDDEN', 'CAR', 'HORN', 'WALKING', 'FROM', 'THIS', 'DREAM'] +4852-28312-0000-2125: ref=['OF', 'THE', 'MANY', 'TIMES', 'HE', 'HAD', 'EXAMINED', 'MISTER', "WICKER'S", 'WINDOW', 'AND', 'PORED', 'OVER', 'THE', 'ROPE', 'THE', 'SHIP', 'AND', 'THE', 'NUBIAN', 'BOY', 'HE', 'HAD', 'NEVER', 'GONE', 'INTO', 'MISTER', "WICKER'S", 'SHOP'] +4852-28312-0000-2125: hyp=['OF', 'THE', 'MANY', 'TIMES', 'HE', 'HAD', 'EXAMINED', 'MISTER', "WICKER'S", 'WINDOW', 'AND', 'POURED', 'OVER', 'THE', 'ROPE', 'THE', 'SHIP', 'AND', 'THE', 'NUBIAN', 'BOY', 'HE', 'HAD', 'NEVER', 'GONE', 'INTO', 'MISTER', "ROOKER'S", 'SHOP'] +4852-28312-0001-2126: ref=['SO', 'NOW', 'ALONE', 'UNTIL', 'SOMEONE', 'SHOULD', 'ANSWER', 'THE', 'BELL', 'HE', 'LOOKED', 'EAGERLY', 'IF', 'UNEASILY', 'AROUND', 'HIM'] +4852-28312-0001-2126: hyp=['SO', 'NOW', 'ALONE', 'UNTIL', 'SOME', 'ONE', 'SHOULD', 'ANSWER', 'THE', 'BELL', 'THEY', 'LOOKED', 'EAGERLY', 'IF', 'UNEASILY', 'AROUND', 'HIM'] +4852-28312-0002-2127: ref=['WHAT', 'WITH', 'THE', 'ONE', 'WINDOW', 'AND', 'THE', 'LOWERING', 'DAY', 'OUTSIDE', 'THE', 'LONG', 'NARROW', 'SHOP', 'WAS', 'SOMBER'] +4852-28312-0002-2127: hyp=['WHAT', 'WITH', 'THE', 'ONE', 'WINDOW', 'AND', 'THE', 'LOWERING', 'DAY', 'OUTSIDE', 'THE', 'LONG', 'NARROW', 'SHOP', 'WAS', 'SOMBRE'] +4852-28312-0003-2128: ref=['HEAVY', 'HAND', 'HEWN', 'BEAMS', 'CROSSED', 'IT', 'FROM', 'ONE', 'SIDE', 'TO', 'THE', 'OTHER'] +4852-28312-0003-2128: hyp=['HEAVY', 'HAND', 'YOU', 'AND', 'BEAMS', 'CROSSED', 'IT', 'FROM', 'ONE', 'SIDE', 'TO', 'THE', 'OTHER'] +4852-28312-0004-2129: ref=['MISTER', "WICKER'S", 'BACK', 'BEING', 'TOWARD', 'THE', 'SOURCE', 'OF', 'LIGHT', 'CHRIS', 'COULD', 'NOT', 'SEE', 'HIS', 'FACE'] +4852-28312-0004-2129: hyp=['MISTER', "OCRE'S", 'BACK', 'BEING', 'TOWARD', 'THE', 'SOURCE', 'OF', 'LIGHT', 'CHRIS', 'COULD', 'NOT', 'SEE', 'HIS', 'FACE'] +4852-28312-0005-2130: ref=['THE', 'DOUBLE', 'FANS', 'OF', 'MINUTE', 'WRINKLES', 'BREAKING', 'FROM', 'EYE', 'CORNER', 'TO', 'TEMPLE', 'AND', 'JOINING', 'WITH', 'THOSE', 'OVER', 'THE', 'CHEEKBONES', 'WERE', 'DRAWN', 'INTO', 'THE', 'HORIZONTAL', 'LINES', 'ACROSS', 'THE', 'DOMED', 'FOREHEAD'] +4852-28312-0005-2130: hyp=['THE', 'DOUBLE', 'FANS', 'OF', 'MINUTE', 'WRINKLES', 'BREAKING', 'FROM', 'EYE', 'CORNERED', 'A', 'TEMPLE', 'ENJOINING', 'WITH', 'THOSE', 'OVER', 'THE', 'SHEEP', 'BONES', 'WERE', 'DRAWN', 'INTO', 'THE', 'HORIZONTAL', 'LINES', 'ACROSS', 'THE', 'DOMED', 'FOREHEAD'] +4852-28312-0006-2131: ref=['LITTLE', 'TUFTS', 'OF', 'WHITE', 'FUZZ', 'ABOVE', 'THE', 'EARS', 'WERE', 'ALL', 'THAT', 'REMAINED', 'OF', 'THE', "ANTIQUARIAN'S", 'HAIR', 'BUT', 'WHAT', 'DREW', 'AND', 'HELD', "CHRIS'S", 'GAZE', 'WERE', 'THE', 'OLD', "MAN'S", 'EYES'] +4852-28312-0006-2131: hyp=['LITTLE', 'TUFTS', 'OF', 'WHITE', 'FUZ', 'ABOVE', 'THE', 'EARS', 'WERE', 'ALL', 'THAT', 'REMAINED', 'OF', 'THE', "ANTIQUARIAN'S", 'HAIR', 'BUT', 'WHAT', 'DREW', 'AND', 'HELD', "CHRIS'S", 'GAZE', 'WERE', 'THE', 'OLD', "MAN'S", 'EYES'] +4852-28312-0007-2132: ref=['CHRIS', 'BLINKED', 'AND', 'LOOKED', 'AGAIN', 'YES', 'THEY', 'WERE', 'STILL', 'THERE'] +4852-28312-0007-2132: hyp=['CRIS', 'BLINKED', 'AND', 'LOOKED', 'AGAIN', 'YES', 'THEY', 'WERE', 'STILL', 'THERE'] +4852-28312-0008-2133: ref=['CHRIS', 'SWALLOWED', 'AND', 'HIS', 'VOICE', 'CAME', 'BACK', 'TO', 'HIM'] +4852-28312-0008-2133: hyp=['CHRIS', 'SWALLOWED', 'AND', 'HIS', 'VOICE', 'CAME', 'BACK', 'TO', 'HIM'] +4852-28312-0009-2134: ref=['YES', 'SIR', 'HE', 'SAID'] +4852-28312-0009-2134: hyp=['YES', 'SIR', 'HE', 'SAID'] +4852-28312-0010-2135: ref=['I', 'SAW', 'YOUR', 'SIGN', 'AND', 'I', 'KNOW', 'A', 'BOY', 'WHO', 'NEEDS', 'THE', 'JOB'] +4852-28312-0010-2135: hyp=['I', 'SAW', 'YOUR', 'SIGN', 'AND', 'I', 'KNOW', 'A', 'BOY', 'WHO', 'NEEDS', 'THE', 'JOB'] +4852-28312-0011-2136: ref=["HE'S", 'A', 'SCHOOLMATE', 'OF', 'MINE'] +4852-28312-0011-2136: hyp=["HE'S", 'SCHOOLMATE', 'OF', 'MINE'] +4852-28312-0012-2137: ref=['JAKEY', 'HARRIS', 'HIS', 'NAME', 'IS', 'AND', 'HE', 'REALLY', 'NEEDS', 'THE', 'JOB'] +4852-28312-0012-2137: hyp=["GIGERIS'S", 'NAME', 'IS', 'AND', 'HE', 'REALLY', 'NEEDS', 'THE', 'JOB'] +4852-28312-0013-2138: ref=['I', 'I', 'JUST', 'WONDERED', 'IF', 'THE', 'PLACE', 'WAS', 'STILL', 'OPEN'] +4852-28312-0013-2138: hyp=['I', 'I', 'JUST', 'WONDERED', 'IF', 'THE', 'PLACE', 'WAS', 'STILL', 'OPEN'] +4852-28312-0014-2139: ref=['WHAT', 'HE', 'SAW', 'WAS', 'A', 'FRESH', 'CHEEKED', 'LAD', 'TALL', 'FOR', 'THIRTEEN', 'STURDY', 'WITH', 'SINCERITY', 'AND', 'GOOD', 'HUMOR', 'IN', 'HIS', 'FACE', 'AND', 'SOMETHING', 'SENSITIVE', 'AND', 'APPEALING', 'ABOUT', 'HIS', 'EYES'] +4852-28312-0014-2139: hyp=['WHAT', 'HE', 'SAW', 'WAS', 'A', 'FRESH', 'CHEEKED', 'LAD', 'TALL', 'FOR', 'THIRTEEN', 'STURDY', 'WITH', 'SINCERITY', 'AND', 'GOOD', 'HUMOUR', 'IN', 'HIS', 'FACE', 'AND', 'SOMETHING', 'SENSITIVE', 'AND', 'APPEALING', 'ABOUT', 'HIS', 'EYES'] +4852-28312-0015-2140: ref=['HE', 'GUESSED', 'THERE', 'MUST', 'BE', 'A', 'LIVELY', 'FIRE', 'IN', 'THAT', 'ROOM', 'BEYOND'] +4852-28312-0015-2140: hyp=['HE', 'GUESSED', 'THERE', 'IT', 'MUST', 'BE', 'A', 'LIVELY', 'FIRE', 'IN', 'THAT', 'RUM', 'BEYOND'] +4852-28312-0016-2141: ref=['WOULD', 'THAT', 'INTERFERE', 'WITH', "JAKEY'S", 'GETTING', 'THE', 'JOB', 'SIR'] +4852-28312-0016-2141: hyp=['WOULD', 'THAT', 'INTERFERE', 'WITH', 'JAKIE', 'GIGS', 'GETTING', 'THE', 'JOB', 'SIR'] +4852-28312-0017-2142: ref=['BUT', 'EVEN', 'AS', 'HE', 'SLOWLY', 'TURNED', 'THE', 'THOUGHT', 'PIERCED', 'HIS', 'MIND', 'WHY', 'HAD', 'HE', 'NOT', 'SEEN', 'THE', 'REFLECTION', 'OF', 'THE', 'HEADLIGHTS', 'OF', 'THE', 'CARS', 'MOVING', 'UP', 'AROUND', 'THE', 'CORNER', 'OF', 'WATER', 'STREET', 'AND', 'UP', 'THE', 'HILL', 'TOWARD', 'THE', 'TRAFFIC', 'SIGNALS'] +4852-28312-0017-2142: hyp=['BUT', 'EVEN', 'AS', 'HE', 'SLOWLY', 'TURNED', 'THE', 'THOUGHT', 'PIERCED', 'HIS', 'MIND', 'WHY', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'REFLECTION', 'OF', 'THE', 'HEAD', 'LIGHTS', 'OF', 'THE', 'CARS', 'MOVING', 'UP', 'AROUND', 'THE', 'CORNER', 'OF', 'WALL', 'AT', 'HER', 'STREET', 'NOT', 'THE', 'HILL', 'TOWARD', 'THE', 'EFFIC', 'SIGNALS'] +4852-28312-0018-2143: ref=['THE', 'ROOM', 'SEEMED', 'OVERLY', 'STILL'] +4852-28312-0018-2143: hyp=['THE', 'ROOM', 'SEEMED', 'OVERLY', 'STILL'] +4852-28312-0019-2144: ref=['THEN', 'IN', 'THAT', 'SECOND', 'HE', 'TURNED', 'AND', 'FACED', 'ABOUT'] +4852-28312-0019-2144: hyp=['THEN', 'IN', 'THAT', 'SECOND', 'HE', 'TURNED', 'AND', 'FACED', 'ABOUT'] +4852-28312-0020-2145: ref=['THE', 'WIDE', 'BOW', 'WINDOW', 'WAS', 'THERE', 'BEFORE', 'HIM', 'THE', 'THREE', 'OBJECTS', 'HE', 'LIKED', 'BEST', 'SHOWING', 'FROSTY', 'IN', 'THE', 'MOONLIGHT', 'THAT', 'POURED', 'IN', 'FROM', 'ACROSS', 'THE', 'WATER'] +4852-28312-0020-2145: hyp=['THE', 'WIDE', 'BOW', 'WINDOW', 'WAS', 'THERE', 'BEFORE', 'HIM', 'THE', 'THREE', 'OBJECTS', 'HE', 'LIKED', 'BEST', 'SHOWING', 'FROSTY', 'IN', 'THE', 'MOONLIGHT', 'THAT', 'POURED', 'IN', 'FROM', 'ACROSS', 'THE', 'WATER'] +4852-28312-0021-2146: ref=['ACROSS', 'THE', 'WATER', 'WHERE', 'WAS', 'THE', 'FREEWAY'] +4852-28312-0021-2146: hyp=['ACROSS', 'THE', 'WATER', 'WHERE', 'WAS', 'THE', 'FREE', 'WAY'] +4852-28312-0022-2147: ref=['IT', 'WAS', 'NO', 'LONGER', 'THERE', 'NOR', 'WERE', 'THE', 'HIGH', 'WALLS', 'AND', 'SMOKESTACKS', 'OF', 'FACTORIES', 'TO', 'BE', 'SEEN'] +4852-28312-0022-2147: hyp=['IT', 'WAS', 'NO', 'LONGER', 'THERE', 'NOR', 'WERE', 'THE', 'HIGH', 'WALLS', 'AND', 'SMOKE', 'STACKS', 'OF', 'FACTORIES', 'TO', 'BE', 'SEEN'] +4852-28312-0023-2148: ref=['THE', 'WAREHOUSES', 'WERE', 'STILL', 'THERE'] +4852-28312-0023-2148: hyp=['THE', 'WAREHOUSES', 'WERE', 'STILL', 'THERE'] +4852-28312-0024-2149: ref=['FLABBERGASTED', 'AND', 'BREATHLESS', 'CHRIS', 'WAS', 'UNAWARE', 'THAT', 'HE', 'HAD', 'MOVED', 'CLOSER', 'TO', 'PEER', 'OUT', 'THE', 'WINDOW', 'IN', 'EVERY', 'DIRECTION'] +4852-28312-0024-2149: hyp=['FLABRA', 'GASTENED', 'IN', 'BREATHLESS', 'CHRIS', 'WAS', 'UNAWARE', 'THAT', 'HE', 'HAD', 'MOVED', 'CLOSER', 'TO', 'PEER', 'OUT', 'THE', 'WINDOW', 'IN', 'EVERY', 'DIRECTION'] +4852-28312-0025-2150: ref=['NO', 'ELECTRIC', 'SIGNS', 'NO', 'LAMPLIT', 'STREETS'] +4852-28312-0025-2150: hyp=['NO', 'ELECTRIC', 'SIGNS', 'NO', 'LAMPLIT', 'STREETS'] +4852-28312-0026-2151: ref=['WHERE', 'THE', "PEOPLE'S", 'DRUGSTORE', 'HAD', 'STOOD', 'BUT', 'A', 'HALF', 'HOUR', 'BEFORE', 'ROSE', 'THE', 'ROOFS', 'OF', 'WHAT', 'WAS', 'EVIDENTLY', 'AN', 'INN'] +4852-28312-0026-2151: hyp=['WHERE', 'THE', "PEOPLE'S", 'DRUG', 'STORE', 'HAD', 'STOOD', 'BUT', 'HALF', 'AN', 'HOUR', 'BEFORE', 'ROSE', 'THE', 'ROOFS', 'OF', 'WHAT', 'WAS', 'EVIDENTLY', 'AN', 'INN'] +4852-28312-0027-2152: ref=['A', 'COURTYARD', 'WAS', 'SPARSELY', 'LIT', 'BY', 'A', 'FLARING', 'TORCH', 'OR', 'TWO', 'SHOWING', 'A', 'SWINGING', 'SIGN', 'HUNG', 'ON', 'A', 'POST'] +4852-28312-0027-2152: hyp=['A', 'COURTYARD', 'WAS', 'FIRSTLY', 'LIT', 'BY', 'A', 'FLARING', 'TORTURE', 'TWO', 'SHOWING', 'THE', 'SWINGING', 'SIGN', 'HUNG', 'ON', 'THE', 'POST'] +4852-28312-0028-2153: ref=['THE', 'POST', 'WAS', 'PLANTED', 'AT', 'THE', 'EDGE', 'OF', 'WHAT', 'WAS', 'NOW', 'A', 'BROAD', 'AND', 'MUDDY', 'ROAD'] +4852-28312-0028-2153: hyp=['THE', 'POSTPLETED', 'AT', 'THE', 'EDGE', 'OF', 'IT', 'WAS', 'NOW', 'A', 'BROAD', 'AND', 'MUDDY', 'ROAD'] +4852-28312-0029-2154: ref=['A', 'COACH', 'WITH', 'ITS', 'TOP', 'PILED', 'HIGH', 'WITH', 'LUGGAGE', 'STAMPED', 'TO', 'A', 'HALT', 'BESIDE', 'THE', 'FLAGGED', 'COURTYARD'] +4852-28312-0029-2154: hyp=['A', 'COACH', 'WHICH', 'HAD', 'STOPPED', 'PILED', 'HIGH', 'WITH', 'LEGGED', 'STAMPED', 'O', 'HALT', 'BESIDE', 'THE', 'FLAGGED', 'COURTYARD'] +4852-28312-0030-2155: ref=['THEY', 'MOVED', 'INTO', 'THE', 'INN', 'THE', 'COACH', 'RATTLED', 'OFF', 'TO', 'THE', 'STABLE'] +4852-28312-0030-2155: hyp=['THEY', 'MOVED', 'INTO', 'THE', 'INN', 'THE', 'COACH', 'RATTLED', 'OFF', 'TO', 'THE', 'STABLE'] +4852-28312-0031-2156: ref=['MY', 'WINDOW', 'HAS', 'A', 'POWER', 'FOR', 'THOSE', 'FEW', 'WHO', 'ARE', 'TO', 'SEE'] +4852-28312-0031-2156: hyp=['BY', 'WINDOW', 'AS', 'A', 'POWER', 'FOR', 'THOSE', 'FEW', 'WHO', 'ARE', 'TO', 'SEE'] +4852-28319-0000-2070: ref=['THE', 'LEARNING', 'OF', 'MAGIC', 'WAS', 'BY', 'NO', 'MEANS', 'EASY'] +4852-28319-0000-2070: hyp=['THE', 'LEARNING', 'AND', 'MAGIC', 'WAS', 'BY', 'NO', 'MEANS', 'EASY'] +4852-28319-0001-2071: ref=['HE', 'HAD', 'TOLD', 'HIS', 'MASTER', 'AT', 'ONCE', 'ABOUT', 'SIMON', 'GOSLER', 'HIS', 'HORDE', 'OF', 'MONEY', 'AND', 'HIS', 'HIDING', 'PLACES', 'FOR', 'IT'] +4852-28319-0001-2071: hyp=['HE', 'HAD', 'TOLD', 'HIS', 'MASTER', 'AT', 'ONCE', 'HE', 'GOT', 'SIMON', 'GOSLER', 'HIS', 'HOARD', 'OF', 'MONEY', 'AND', 'HIS', 'HIDING', 'PLACES', 'FOR', 'IT'] +4852-28319-0002-2072: ref=['CHRIS', 'THEREFORE', 'THREW', 'HIMSELF', 'INTO', 'ALL', 'THE', 'PRELIMINARIES', 'OF', 'HIS', 'TASK'] +4852-28319-0002-2072: hyp=['CHRIS', 'THEREFORE', 'THREW', 'HIMSELF', 'AND', 'ALL', 'THE', 'PRELIMINARIES', 'OF', 'HIS', 'TASK'] +4852-28319-0003-2073: ref=['ONE', 'AFTERNOON', 'WHEN', 'HE', 'RETURNED', 'AFTER', 'A', 'REST', 'TO', 'MISTER', "WICKER'S", 'STUDY', 'HE', 'SAW', 'THAT', 'THERE', 'WAS', 'SOMETHING', 'NEW', 'IN', 'THE', 'ROOM', 'A', 'BOWL', 'WITH', 'A', 'GOLDFISH', 'IN', 'IT', 'STOOD', 'ON', 'THE', 'TABLE', 'BUT', 'MISTER', 'WICKER', 'WAS', 'NOT', 'TO', 'BE', 'SEEN'] +4852-28319-0003-2073: hyp=['ONE', 'AFTERNOON', 'WHEN', 'HE', 'HAD', 'RETURNED', 'AFTER', 'A', 'REST', 'TO', 'MISTER', "WICKER'S", 'STUDY', 'HE', 'SAW', 'THAT', 'THERE', 'WAS', 'SOMETHING', 'NEW', 'IN', 'THE', 'ROOM', 'A', 'BOWL', 'WITH', 'A', 'GOLD', 'FISH', 'IN', 'IT', 'STOOD', 'ON', 'THE', 'TABLE', 'BUT', 'MISTER', 'WICKER', 'WAS', 'NOT', 'TO', 'BE', 'SEEN'] +4852-28319-0004-2074: ref=['WHAT', 'SHALL', 'I', 'DO', 'FIRST'] +4852-28319-0004-2074: hyp=['WHAT', 'SHOULD', 'I', 'ALL', 'I', 'DO', 'FIRST'] +4852-28319-0005-2075: ref=['HOW', 'YOU', 'HAVE', 'IMPROVED', 'MY', 'BOY', 'HE', 'EXCLAIMED', 'IT', 'IS', 'NOW', 'TIME', 'FOR', 'YOU', 'TO', 'TRY', 'AND', 'THIS', 'IS', 'AS', 'GOOD', 'A', 'CHANGE', 'AS', 'ANY'] +4852-28319-0005-2075: hyp=['HOW', 'OFTEN', 'PROVED', 'MY', 'BOY', 'IT', 'EXCLAIMED', 'IS', 'NOW', 'TIME', 'FOR', 'YOU', 'TO', 'TRY', 'MISSUS', 'IS', 'GOOD', 'A', 'CHANGE', 'IS', 'ANY'] +4852-28319-0006-2076: ref=['SUPPOSE', 'I', 'CHANGE', 'AND', "CAN'T", 'CHANGE', 'BACK'] +4852-28319-0006-2076: hyp=['SUPPOSE', 'A', 'CHANGE', 'AND', "CAN'T", 'CHANCE', 'BACK'] +4852-28319-0007-2077: ref=['MISTER', 'WICKER', 'WAITED', 'PATIENTLY', 'BESIDE', 'HIM', 'FOR', 'A', 'FEW', 'MOMENTS', 'FOR', 'CHRIS', 'TO', 'GET', 'UP', 'HIS', 'COURAGE'] +4852-28319-0007-2077: hyp=['MISTER', 'WICKER', 'WAITED', 'PATIENTLY', 'BESIDE', 'HIM', 'FOR', 'A', 'FEW', 'MOMENTS', 'FOR', 'CHRIS', 'TO', 'GET', 'UP', 'HIS', 'COURAGE'] +4852-28319-0008-2078: ref=['THEN', 'AS', 'NOTHING', 'HAPPENED', 'WITH', 'A', 'VOICE', 'LIKE', 'A', 'WHIP', 'MISTER', 'WICKER', 'SAID', 'START', 'AT', 'ONCE'] +4852-28319-0008-2078: hyp=['THAT', 'IS', 'NOTHING', 'HAPPENED', 'WITH', 'A', 'VOICE', 'LIKE', 'A', 'WHIP', 'MISTER', 'WICKER', 'SAID', 'START', 'AT', 'ONCE'] +4852-28319-0009-2079: ref=['THE', 'SENSATION', 'SPREAD', 'FASTER', 'AND', 'FASTER'] +4852-28319-0009-2079: hyp=['SENSATION', 'SPREAD', 'FASTER', 'AND', 'FASTER'] +4852-28319-0010-2080: ref=['HIS', 'HEAD', 'SWAM', 'AND', 'HE', 'FELT', 'FAINT', 'AND', 'A', 'LITTLE', 'SICK', 'BUT', 'HE', 'PERSISTED', 'THROUGH', 'THE', 'FINAL', 'WORDS'] +4852-28319-0010-2080: hyp=['HIS', 'HEAD', 'SWAM', 'AND', 'HE', 'FELT', 'FAINT', 'IN', 'A', 'LITTLE', 'SICK', 'BUT', 'HE', 'PERSISTED', 'THROUGH', 'THE', 'FINAL', 'WORDS'] +4852-28319-0011-2081: ref=['HE', 'THOUGHT', 'NOT', 'WITHOUT', 'A', 'FEELING', 'OF', 'PRIDE', 'AND', 'COMMENCED', 'EXPERIMENTING', 'WITH', 'HIS', 'TAIL', 'AND', 'FINS', 'WITH', 'SUCH', 'ENTHUSIASM', 'AND', 'DELIGHT', 'THAT', 'SOME', 'LITTLE', 'TIME', 'ELAPSED', 'BEFORE', 'MISTER', "WICKER'S", 'VOICE', 'BOOMED', 'CLOSE', 'BY'] +4852-28319-0011-2081: hyp=['HE', 'THOUGHT', 'NOT', 'WITHOUT', 'A', 'FEELING', 'OF', 'PRIDE', 'AND', 'COMMENCED', 'THE', 'EXPERIMENTING', 'WITH', 'HIS', 'TAIL', 'AND', 'FINS', 'WITH', 'SUCH', 'ENTHUSIASM', 'AND', 'DELIGHT', 'THAT', 'SOME', 'LITTLE', 'TIME', 'ELAPSED', 'BEFORE', 'MISTER', "WICKER'S", 'VOICE', 'BOOMED', 'CLOSE', 'BY'] +4852-28319-0012-2082: ref=['SEVENTY', 'FOUR', 'BOOK', 'ONE', 'THE', 'RETURN'] +4852-28319-0012-2082: hyp=['SEVENTY', 'FOUR', 'BOOK', 'ONE', 'THE', 'RETURN'] +4852-28319-0013-2083: ref=['THE', "FIGURE'S", 'SHOES', 'CARVED', 'IN', 'SOME', 'EASTERN', 'STYLE', 'HAD', 'CURVED', 'UP', 'POINTING', 'TOES'] +4852-28319-0013-2083: hyp=['THE', 'FIGURES', 'SHOES', 'CARVED', 'IN', 'SOME', 'EASTERN', 'STYLE', 'HAD', 'CURVED', 'UP', 'POINTING', 'TOES'] +4852-28319-0014-2084: ref=['THEN', 'ALL', 'AT', 'ONCE', 'THE', 'IDEA', 'CAME', 'TO', 'CHRIS'] +4852-28319-0014-2084: hyp=['THEN', 'ALL', 'AT', 'ONCE', 'THE', 'IDEA', 'CAME', 'TO', 'CHRIS'] +4852-28319-0015-2085: ref=['IF', 'HE', 'WAS', 'TO', 'BE', 'A', 'MAGICIAN', 'COULD', 'HE', 'MAKE', 'THIS', 'BOY', 'COME', 'TO', 'LIFE'] +4852-28319-0015-2085: hyp=['IF', 'HE', 'WAS', 'TO', 'BE', 'A', 'MAGICIAN', 'COULD', 'HE', 'MAKE', 'THIS', 'BOY', 'COME', 'TO', 'LIFE'] +4852-28319-0016-2086: ref=['HE', 'SQUATTED', 'ON', 'HIS', 'HAUNCHES', 'EXAMINING', 'THE', 'CARVED', 'WOODEN', 'FIGURE', 'ATTENTIVELY', 'AND', 'FELT', 'CONVINCED', 'THAT', 'ONCE', 'ALIVE', 'THE', 'BOY', 'WOULD', 'BE', 'AN', 'IDEAL', 'AND', 'HAPPY', 'COMPANION'] +4852-28319-0016-2086: hyp=['IT', 'SQUATTED', 'ON', 'HIS', 'HAUNCHES', 'EXAMINED', 'THE', 'CARVED', 'WOODEN', 'FIGURE', 'ATTENTIVELY', 'AND', 'FELT', 'CONVINCED', 'THAT', 'ONCE', 'ALIVE', 'THE', 'BOY', 'WOULD', 'BE', 'AN', 'IDEAL', 'AND', 'HAPPY', 'COMPANION'] +4852-28319-0017-2087: ref=['BUT', 'HOW', 'DID', 'ONE', 'CHANGE', 'INANIMATE', 'TO', 'ANIMATE'] +4852-28319-0017-2087: hyp=['BUT', 'HOW', 'DID', 'ONE', 'A', 'CHANGE', 'INANIMATE', 'TO', 'ANIMATE'] +4852-28319-0018-2088: ref=['CHRIS', 'GOT', 'UP', 'AND', 'STOLE', 'BACK', 'TO', 'MISTER', "WICKER'S", 'DOOR'] +4852-28319-0018-2088: hyp=['GRIS', 'GOT', 'UP', 'AND', 'STOLE', 'BACK', 'TO', 'MISTER', "WICKER'S", 'DOOR'] +4852-28319-0019-2089: ref=['HE', 'HEARD', 'THE', 'MAGICIAN', 'GOING', 'UP', 'THE', 'SPIRAL', 'STAIRCASE', 'TO', 'HIS', 'ROOM', 'ABOVE', 'AND', 'AFTER', 'CHANGING', 'HIMSELF', 'TO', 'A', 'MOUSE', 'TO', 'SLIP', 'UNDER', 'THE', 'DOOR', 'AND', 'SEE', 'THAT', 'THE', 'ROOM', 'WAS', 'REALLY', 'EMPTY', 'CHRIS', 'RESUMED', 'HIS', 'PROPER', 'SHAPE', 'AND', 'OPENED', 'THE', 'DOORS', 'OF', 'THE', 'CUPBOARD', 'AT', 'THE', 'FAR', 'END', 'OF', 'THE', 'ROOM'] +4852-28319-0019-2089: hyp=['HE', 'HEARD', 'THAT', 'MAGICIAN', 'GOING', 'UP', 'THE', 'SPIRAL', 'STAIRCASE', 'TO', 'HIS', 'ROOM', 'ABOVE', 'AND', 'AFTER', 'CHANGING', 'HIMSELF', 'TO', 'A', 'MOUSE', 'TO', 'SLIP', 'UNDER', 'THE', 'DOOR', 'AND', 'SEE', 'THAT', 'THE', 'ROOM', 'WAS', 'REALLY', 'EMPTY', 'MISTER', "JUNE'S", 'PROPER', 'SHAPE', 'AND', 'OPENED', 'THE', 'DOORS', 'OF', 'THE', 'CUPBOARD', 'AT', 'THE', 'FAR', 'END', 'OF', 'THE', 'ROOM'] +4852-28319-0020-2090: ref=['THE', 'AFTERNOON', 'RAINY', 'BEFORE', 'INCREASED', 'IN', 'STORM'] +4852-28319-0020-2090: hyp=['THE', 'AFTERNOON', 'RAINING', 'BEFORE', 'INCREASED', 'IN', 'STORM'] +4852-28319-0021-2091: ref=['DUSK', 'CAME', 'TWO', 'HOURS', 'BEFORE', 'ITS', 'TIME', 'THUNDER', 'SNARLED', 'IN', 'THE', 'SKY'] +4852-28319-0021-2091: hyp=['THUS', 'GAINED', 'TWO', 'HOURS', 'BEFORE', 'ITS', 'TIME', 'THUNDER', 'SNARLS', 'IN', 'THE', 'SKY'] +4852-28319-0022-2092: ref=['CERTAIN', 'ELEMENTS', 'WERE', 'TO', 'BE', 'MIXED', 'AND', 'POURED', 'AT', 'THE', 'PROPER', 'TIME'] +4852-28319-0022-2092: hyp=['CERTAIN', 'ELEMENTS', 'WERE', 'TO', 'BE', 'MIXED', 'AND', 'POURED', 'AT', 'THE', 'PROPER', 'TIME'] +4852-28319-0023-2093: ref=['MISTER', 'WICKER', 'BEGAN', 'MOVING', 'ABOUT', 'UPSTAIRS', 'THE', 'FLOORBOARDS', 'CREAKED', 'AND', 'STILL', 'CHRIS', 'COULD', 'NOT', 'LEAVE', 'UNTIL', 'THE', 'POTION', 'FUMED', 'AND', 'GLOWED'] +4852-28319-0023-2093: hyp=['MISTER', 'WICKER', 'BEGAN', 'MOVING', 'ABOUT', 'UPSTAIRS', 'THE', 'FOREBOARDS', 'CREAKED', 'AND', 'STILL', 'CHRIS', 'COULD', 'NOT', 'LEAVE', 'UNTIL', 'THE', 'FORTUNE', 'FUMED', 'AND', 'GLOWED'] +4852-28319-0024-2094: ref=['WITH', 'INFINITE', 'CAUTION', 'CHRIS', 'CLOSED', 'THE', 'DOOR', 'SILENTLY', 'BEHIND', 'HIM', 'AND', 'RUNNING', 'LIGHTLY', 'FORWARD', 'REACHED', 'THE', 'FIGURE', 'OF', 'THE', 'NEGRO', 'BOY'] +4852-28319-0024-2094: hyp=['WITH', 'INFINITE', 'CAUTION', 'CHRIS', 'CLOSED', 'THE', 'DOOR', 'SILENTLY', 'BEHIND', 'HIM', 'AND', 'RUNNING', 'LIGHTLY', 'FORWARD', 'REACHED', 'THE', 'FIGURE', 'AT', 'THE', 'NEGRO', 'BOY'] +4852-28319-0025-2095: ref=['IT', 'WAS', 'AS', 'IF', 'THE', 'STIFFNESS', 'MELTED'] +4852-28319-0025-2095: hyp=['IT', 'WAS', 'AS', 'IF', 'THE', 'STIFFNESS', 'MELTED'] +4852-28319-0026-2096: ref=['UNDER', 'HIS', 'EYES', 'THE', 'WOODEN', 'FOLDS', 'OF', 'CLOTH', 'BECAME', 'RICH', 'SILK', 'EMBROIDERY', 'GLEAMED', 'IN', 'ITS', 'REALITY', 'UPON', 'THE', 'COAT', 'AND', 'OH', 'THE', 'FACE'] +4852-28319-0026-2096: hyp=['UNDER', 'HIS', 'EYES', 'WIDDLED', 'THOES', 'OF', 'CLOTH', 'BECAME', 'RICH', 'SILK', 'EMBROIDERY', 'GLEAMED', 'IN', 'ITS', 'REALITY', 'UPON', 'THE', 'COAT', 'AND', 'OH', 'THE', 'FACE'] +4852-28319-0027-2097: ref=['THE', 'WOODEN', 'GRIN', 'LOOSENED', 'THE', 'LARGE', 'EYES', 'TURNED', 'THE', 'HAND', 'HOLDING', 'THE', 'HARD', 'BOUQUET', 'OF', 'CARVED', 'FLOWERS', 'MOVED', 'AND', 'LET', 'THE', 'BOUQUET', 'FALL'] +4852-28319-0027-2097: hyp=['THE', 'WOODEN', 'GRIN', 'LOOSENED', 'THE', 'LARGE', 'EYES', 'TURNED', 'THE', 'HAND', 'HOLDING', 'A', 'HARD', 'BOUQUET', 'OF', 'CARVED', 'FLOWERS', 'MOVED', 'THE', 'BOUQUET', 'FALL'] +4852-28330-0000-2044: ref=['THEY', 'WENT', 'DOWN', 'TO', 'THEIR', 'QUARTERS', 'FIRST'] +4852-28330-0000-2044: hyp=['THEY', 'WENT', 'DOWN', 'TO', 'THEIR', 'QUARTERS', 'FIRST'] +4852-28330-0001-2045: ref=['GUESS', 'MISTER', 'FINNEY', 'WENT', 'TO', 'HIS', 'QUARTERS', 'I', "DON'T", 'REMEMBER', 'SEEING', 'HIM', 'CROSS', 'THE', 'DECK', 'OR', 'COME', 'OVER', 'THAT', 'WAY', 'AT', 'ALL'] +4852-28330-0001-2045: hyp=['GUESS', 'MISTER', 'FINNEY', 'WENT', 'TO', 'HIS', 'QUARTERS', 'I', "DON'T", 'REMEMBER', 'SEEING', 'HIM', 'CROSS', 'THE', 'DECK', 'OR', 'COME', 'OVER', 'THAT', 'WAY', 'AT', 'ALL'] +4852-28330-0002-2046: ref=['NEXT', 'NED', 'CILLEY', 'WAS', 'RELIEVED', 'AT', 'THE', 'HELM', 'BY', 'ELBERT', 'JONES', 'WHO', 'TOOK', 'OVER', 'NED', 'WENT', 'ON', 'DOWN'] +4852-28330-0002-2046: hyp=['NEXT', 'NED', 'CILLEY', 'WAS', 'RELIEVED', 'TO', 'THE', 'HELM', 'BY', 'ELBER', 'JONES', 'WHO', 'TOOK', 'OVER', 'NED', 'WENT', 'ON', 'DOWN'] +4852-28330-0003-2047: ref=['IT', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'IT', 'COULD', 'HAVE', 'BEEN', 'ONE', 'OF', 'SEVERAL', 'PEOPLE', 'AND', "I'LL", 'BE', 'SWITCHED', 'IF', 'I', 'KNOW', 'WHO', "I'LL", 'KEEP', 'MY', 'EYES', 'OPEN'] +4852-28330-0003-2047: hyp=['IT', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'IT', 'COULD', 'BIT', 'OF', 'IN', 'ONE', 'OF', 'SEVERAL', 'PEOPLE', 'AND', "I'LL", 'BE', 'SWITCHED', 'IF', 'I', 'KNOW', 'WHO', "I'LL", 'KEEP', 'MY', 'EYES', 'UP', 'AND'] +4852-28330-0004-2048: ref=['THE', 'MIRABELLE', 'WAS', 'NEARING', 'TAHITI'] +4852-28330-0004-2048: hyp=['THE', 'MIRABELLE', 'WAS', 'NEARING', 'DAEDI'] +4852-28330-0005-2049: ref=["WE'VE", 'WATER', 'AND', 'FRESH', 'STORES', 'TO', 'TAKE', 'ON', 'THERE'] +4852-28330-0005-2049: hyp=['REVOLTA', 'AND', 'FRESH', 'STORES', 'TO', 'TAKE', 'ON', 'THERE'] +4852-28330-0006-2050: ref=['CHRIS', 'LOST', 'NO', 'TIME', 'AS', 'SOON', 'AS', 'HE', 'COULD', 'DO', 'IT', 'WITHOUT', 'BEING', 'NOTICED', 'IN', 'HURRYING', 'DOWN', 'TO', 'HIS', 'CABIN'] +4852-28330-0006-2050: hyp=['CHRIS', 'LOST', 'NO', 'TIME', 'AS', 'SOON', 'AS', 'HE', 'COULD', 'DO', 'IT', 'WITHOUT', 'BEING', 'NOTICED', 'AND', 'HURRYING', 'DOWN', 'TO', 'HIS', 'CABIN'] +4852-28330-0007-2051: ref=['CERTAINLY', 'MY', 'BOY', 'BOOMED', 'OUT', 'THE', 'CAPTAIN', 'HIS', 'BLUE', 'EYES', 'ABRUPTLY', 'KEEN', 'AND', 'PENETRATING'] +4852-28330-0007-2051: hyp=['CERTAINLY', 'MY', 'BOY', 'BOOMED', 'OUT', 'THE', 'CAPTAIN', 'AS', 'BLUE', 'EYES', 'ABRUPTLY', 'KEEN', 'AND', 'PENETRATING'] +4852-28330-0008-2052: ref=['MISTER', 'FINNEY', 'WILL', 'BE', 'SOME', 'TIME', 'ON', 'DECK', 'WE', 'CANNOT', 'BE', 'OVERHEARD', 'IN', 'HERE'] +4852-28330-0008-2052: hyp=['MISTER', 'FINNEY', 'WOULD', 'BE', 'SOME', 'TIME', 'ON', 'DECK', 'WE', 'CANNOT', 'BE', 'OWNED', 'HEARD', 'AND', 'HERE'] +4852-28330-0009-2053: ref=['HIS', 'FACE', 'FROZE', 'WITH', 'NERVOUSNESS', 'THAT', 'THIS', 'MIGHT', 'NOT', 'DO', 'AS', 'AN', 'ANSWER', 'AND', 'HE', 'STOOD', 'STIFF', 'AND', 'STILL', 'BEFORE', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0009-2053: hyp=['HIS', 'FACE', 'ROSE', 'WITH', 'NERVOUSNESS', 'THAT', 'THIS', 'MIGHT', 'DO', 'NOT', 'DO', 'AS', 'AN', 'ANSWER', 'AND', 'HE', 'STOOD', 'STIFF', 'AND', 'STILL', 'BEFORE', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0010-2054: ref=['THE', 'CAPTAIN', 'SAT', 'FORWARD', 'IN', 'HIS', 'CHAIR', 'LOOKING', 'AT', 'HIM', 'FOR', 'A', 'LONG', 'MOMENT', 'CONSIDERING'] +4852-28330-0010-2054: hyp=['THE', 'CAPTAIN', 'SAT', 'FORWARD', 'IN', 'HIS', 'CHAIR', 'LOOKING', 'AT', 'HIM', 'FOR', 'A', 'LONG', 'MOMENT', 'CONSIDERING'] +4852-28330-0011-2055: ref=['THEN', 'HE', 'SAID', 'WELL', 'I', 'DO', 'NOT', 'CARE', 'FOR', 'IT', 'I', 'CANNOT', 'SAY', 'I', 'DO'] +4852-28330-0011-2055: hyp=['THEN', 'HE', 'SAID', 'WELL', 'I', 'DO', 'NOT', 'CARE', 'FOR', 'IT', 'I', 'CANNOT', 'SAY', 'THAT', 'DO'] +4852-28330-0012-2056: ref=['THIS', 'SHIP', 'IS', 'MORE', 'TO', 'ME', 'THAN', 'WIFE', 'OR', 'MOTHER', 'OR', 'FAMILY'] +4852-28330-0012-2056: hyp=['THE', 'SHIP', 'IS', 'MORE', 'TO', 'ME', 'THAN', 'MY', 'FULL', 'MOTHER', 'OR', 'FAMILY'] +4852-28330-0013-2057: ref=['HE', 'PAUSED', 'FINGERING', 'HIS', 'LOWER', 'LIP', 'AND', 'LOOKING', 'SIDEWAYS', 'IN', 'A', 'REFLECTIVE', 'FASHION', 'AT', 'CHRIS', 'STANDING', 'BEFORE', 'HIM'] +4852-28330-0013-2057: hyp=['AND', 'PAUSED', 'FINGERING', 'HIS', 'LOWER', 'LIP', 'AND', 'LOOKING', 'SIDEWAYS', 'IN', 'A', 'REFLECTIVE', 'FASHION', 'AT', 'CRIS', 'STANDING', 'BEFORE', 'HIM'] +4852-28330-0014-2058: ref=['WE', 'SHALL', 'SAY', 'NO', 'MORE', 'BUT', 'I', 'TRUST', 'YOU', 'UNDERSTAND', 'THE', 'RESPONSIBILITY', 'YOU', 'HAVE'] +4852-28330-0014-2058: hyp=['WE', 'SHALL', 'SAY', 'NO', 'MORE', 'BUT', 'I', 'TRUST', 'YOU', 'UNDERSTAND', 'THE', 'RESPONSIBILITY', 'YOU', 'HAVE'] +4852-28330-0015-2059: ref=['THIS', 'SHIP', 'ITS', 'CARGO', 'AND', 'ITS', 'MEN', 'WILL', 'BE', 'IN', 'YOUR', 'HANDS'] +4852-28330-0015-2059: hyp=['THE', 'SHIP', 'ITS', 'CARGO', 'IN', 'ITS', 'MAN', 'WILL', 'BE', 'IN', 'YOUR', 'HANDS'] +4852-28330-0016-2060: ref=['YES', 'SIR', 'I', 'THINK', 'I', 'CAN', 'DO', 'IT', 'SAFELY', 'OR', 'I', 'SHOULD', 'NOT', 'TRY', 'SIR'] +4852-28330-0016-2060: hyp=['YES', 'SIR', 'I', 'THINK', 'I', 'CAN', 'DO', 'IT', 'SAFELY', 'OR', 'I', 'SHOULD', 'NOT', 'TRY', 'SIR'] +4852-28330-0017-2061: ref=['CAPTAIN', "BLIZZARD'S", 'ROUND', 'PINK', 'FACE', 'CREASED', 'IN', 'HIS', 'WINNING', 'SMILE'] +4852-28330-0017-2061: hyp=['CAPTAIN', "BLIZZARD'S", 'ROUND', 'PINK', 'FACED', 'CREASED', 'IN', 'ITS', 'WINNING', 'SMILE'] +4852-28330-0018-2062: ref=['HE', 'THEN', 'WENT', 'ON', 'TO', 'DESCRIBE', 'WHAT', 'ELSE', 'WAS', 'TO', 'FOLLOW', 'THE', 'COVERING', 'OF', 'THE', 'SHIP', 'WITH', 'LEAVES', 'TO', 'MAKE', 'IT', 'BLEND', 'WITH', 'ITS', 'SURROUNDINGS'] +4852-28330-0018-2062: hyp=['HE', 'THEN', 'WENT', 'ON', 'TO', 'DESCRIBE', 'WHAT', 'ELSE', 'WAS', 'TO', 'FOLLOW', 'THE', 'COVERING', 'OF', 'THE', 'SHIP', 'WITH', 'LEAVES', 'TO', 'MAKE', 'IT', 'BLEND', 'WITH', 'ITS', 'SURROUNDINGS'] +4852-28330-0019-2063: ref=['CAMOUFLAGE', 'WAS', 'NOT', 'A', 'WORD', 'THE', 'CAPTAIN', 'OR', 'ANYONE', 'ELSE', 'OF', 'HIS', 'TIME', 'YET', 'UNDERSTOOD'] +4852-28330-0019-2063: hyp=['THE', 'CAMOUFLAGE', 'WAS', 'NOT', 'A', 'WORD', 'THE', 'CAPTAIN', 'OR', 'ANY', 'ONE', 'ELSE', 'OF', 'HIS', 'TIME', 'HE', 'HAD', 'UNDERSTOOD'] +4852-28330-0020-2064: ref=['WHAT', 'CAN', 'BE', 'SAID', 'DURING', 'THAT', 'TIME', 'SIR', 'CHRIS', 'THOUGHT', 'TO', 'ASK'] +4852-28330-0020-2064: hyp=['WHAT', 'CAN', 'BE', 'SAID', 'DURING', 'THAT', 'TIME', 'SIR', 'CHRIS', 'THOUGHT', 'TO', 'ASK'] +4852-28330-0021-2065: ref=['I', 'AM', 'SOMEWHAT', 'SKILLED', 'IN', 'MEDICAMENTS', 'I', 'HAVE', 'TO', 'BE', 'AS', 'CAPTAIN', 'OF', 'A', 'SHIP', 'AND', 'THE', 'CREW', 'KNOW', 'IT'] +4852-28330-0021-2065: hyp=['I', 'AM', 'SOMEWHAT', 'SKILLED', 'IN', 'MEDICTS', 'I', 'HAVE', 'TO', 'BE', 'AS', 'A', 'CAPTAIN', 'OF', 'SHIP', 'AND', 'CREW', 'KNOW', 'IT'] +4852-28330-0022-2066: ref=['I', 'SHALL', 'SAY', 'THAT', 'YOU', 'ARE', 'IN', 'MY', 'OWN', 'CABIN', 'SO', 'THAT', 'I', 'CAN', 'CARE', 'FOR', 'YOU'] +4852-28330-0022-2066: hyp=['I', 'SHALL', 'SAY', 'THAT', 'YOU', 'ARE', 'IN', 'MY', 'OWN', 'CABIN', 'SO', 'THAT', 'I', 'CAN', 'CARE', 'FOR', 'YOU'] +4852-28330-0023-2067: ref=['NOT', 'SINCE', 'HE', 'HAD', 'LEFT', 'MISTER', 'WICKER', 'HAD', 'CHRIS', 'FELT', 'SUCH', 'CONFIDENCE', 'AS', 'HE', 'DID', 'IN', 'THE', 'WORDS', 'AND', 'ACTIONS', 'OF', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0023-2067: hyp=['NOT', 'SINCE', 'HE', 'HAD', 'LEFT', 'MISTER', 'WICKER', 'AND', 'CHRIS', 'FELT', 'SUCH', 'CONFIDENCE', 'AS', 'HE', 'DID', 'IN', 'THE', 'WORDS', 'AND', 'ACTIONS', 'OF', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0024-2068: ref=['HE', 'KNEW', 'NOW', 'THAT', 'HIS', 'ABSENCE', 'FOR', 'AS', 'LONG', 'AS', 'HE', 'HAD', 'TO', 'BE', 'AWAY', 'WOULD', 'BE', 'COVERED', 'UP', 'AND', 'SATISFACTORILY', 'ACCOUNTED', 'FOR'] +4852-28330-0024-2068: hyp=['HE', 'KNEW', 'NOW', 'THAT', 'HIS', 'ABSENCE', 'FOR', 'AS', 'LONG', 'AS', 'HE', 'HAD', 'HAD', 'TO', 'BE', 'AWAY', 'WOULD', 'BE', 'COVERED', 'UP', 'IN', 'SATISFACTORILY', 'ACCOUNTED', 'FOR'] +4852-28330-0025-2069: ref=['THEIR', 'CONVERSATION', 'HAD', 'TAKEN', 'SOME', 'LITTLE', 'WHILE'] +4852-28330-0025-2069: hyp=['THEIR', 'CONVERSATION', 'HAD', 'TAKEN', 'SOME', 'LITTLE', 'WHILE'] +533-1066-0000-796: ref=['WHEN', 'CHURCHYARDS', 'YAWN'] +533-1066-0000-796: hyp=['ONE', 'CHURCHYARDS', 'YAWN'] +533-1066-0001-797: ref=['I', 'KNEW', 'WELL', 'ENOUGH', 'THAT', 'HE', 'MIGHT', 'BE', 'CARRIED', 'THOUSANDS', 'OF', 'MILES', 'IN', 'THE', 'BOX', 'CAR', 'LOCKED', 'IN', 'PERHAPS', 'WITHOUT', 'WATER', 'OR', 'FOOD'] +533-1066-0001-797: hyp=['I', 'KNEW', 'WELL', 'ENOUGH', 'THAT', 'HE', 'MIGHT', 'BE', 'CARRIED', 'THOUSAND', 'OF', 'MILES', 'INTO', 'BOX', 'CAR', 'LOCKED', 'IN', 'PERHAPS', 'WITHOUT', 'WATER', 'OR', 'FOOT'] +533-1066-0002-798: ref=['I', 'AM', 'SURE', 'I', 'KISSED', 'LIDDY', 'AND', 'I', 'HAVE', 'HAD', 'TERRIBLE', 'MOMENTS', 'SINCE', 'WHEN', 'I', 'SEEM', 'TO', 'REMEMBER', 'KISSING', 'MISTER', 'JAMIESON', 'TOO', 'IN', 'THE', 'EXCITEMENT'] +533-1066-0002-798: hyp=['I', 'AM', 'SURE', 'I', 'KISSED', 'LADY', 'AND', "I'VE", 'HAD', 'TERRIBLE', 'MOMENTS', 'SINCE', 'WHEN', 'I', 'SEEMED', 'TO', 'REMEMBER', 'KISSING', 'MISTER', 'JAMIESON', 'TOO', 'IN', 'THE', 'EXCITEMENT'] +533-1066-0003-799: ref=['FORTUNATELY', 'WARNER', 'AND', 'THE', 'DETECTIVES', 'WERE', 'KEEPING', 'BACHELOR', 'HALL', 'IN', 'THE', 'LODGE'] +533-1066-0003-799: hyp=['FORTUNATELY', 'WARNER', 'ON', 'THE', 'DETECTIVE', 'WERE', 'KEEPING', 'BACHELOR', 'HOLLAND', 'LODGE'] +533-1066-0004-800: ref=['OUT', 'OF', 'DEFERENCE', 'TO', 'LIDDY', 'THEY', 'WASHED', 'THEIR', 'DISHES', 'ONCE', 'A', 'DAY', 'AND', 'THEY', 'CONCOCTED', 'QUEER', 'MESSES', 'ACCORDING', 'TO', 'THEIR', 'SEVERAL', 'ABILITIES'] +533-1066-0004-800: hyp=['OUT', 'OF', 'DEFERENCE', 'TO', 'LIVE', 'THEY', 'WASHED', 'HER', 'DISHES', 'ONCE', 'A', 'DAY', 'AND', 'THEY', 'CONCLUDED', 'QUEER', 'MASSES', 'ACCORDING', 'TO', 'THEIR', 'SEVERAL', 'ABILITIES'] +533-1066-0005-801: ref=['MISS', 'INNES', 'HE', 'SAID', 'STOPPING', 'ME', 'AS', 'I', 'WAS', 'ABOUT', 'TO', 'GO', 'TO', 'MY', 'ROOM', 'UP', 'STAIRS', 'HOW', 'ARE', 'YOUR', 'NERVES', 'TONIGHT'] +533-1066-0005-801: hyp=['MISS', 'EAMES', 'HE', 'SAID', 'STOPPING', 'ME', 'AS', 'I', 'WAS', 'ABOUT', 'TO', 'GO', 'TO', 'MY', 'ROOM', 'UPSTAIRS', 'HOW', 'ARE', 'YOUR', 'NERVES', 'TO', 'NIGHT'] +533-1066-0006-802: ref=['I', 'HAVE', 'NONE', 'I', 'SAID', 'HAPPILY'] +533-1066-0006-802: hyp=['I', 'HAVE', 'NONE', 'I', 'SAID', 'HAPPILY'] +533-1066-0007-803: ref=['I', 'MEAN', 'HE', 'PERSISTED', 'DO', 'YOU', 'FEEL', 'AS', 'THOUGH', 'YOU', 'COULD', 'GO', 'THROUGH', 'WITH', 'SOMETHING', 'RATHER', 'UNUSUAL'] +533-1066-0007-803: hyp=['I', 'MEAN', 'HE', 'PERSISTED', 'DO', 'YOU', 'FEEL', 'AS', 'THOUGH', 'YOU', 'COULD', 'GO', 'THROUGH', 'WITH', 'SOMETHING', 'RATHER', 'UNUSUAL'] +533-1066-0008-804: ref=['THE', 'MOST', 'UNUSUAL', 'THING', 'I', 'CAN', 'THINK', 'OF', 'WOULD', 'BE', 'A', 'PEACEFUL', 'NIGHT'] +533-1066-0008-804: hyp=['THE', 'MOST', 'UNUSUAL', 'THING', 'I', 'CAN', 'THINK', 'OF', 'WOULD', 'BE', 'A', 'PEACEFUL', 'NIGHT'] +533-1066-0009-805: ref=['SOMETHING', 'IS', 'GOING', 'TO', 'OCCUR', 'HE', 'SAID'] +533-1066-0009-805: hyp=['SOMETHING', 'IS', 'GOING', 'TO', 'OCCUR', 'HE', 'SAID'] +533-1066-0010-806: ref=['PUT', 'ON', 'HEAVY', 'SHOES', 'AND', 'SOME', 'OLD', 'DARK', 'CLOTHES', 'AND', 'MAKE', 'UP', 'YOUR', 'MIND', 'NOT', 'TO', 'BE', 'SURPRISED', 'AT', 'ANYTHING'] +533-1066-0010-806: hyp=['PUT', 'ON', 'HEAVY', 'SHOES', 'AND', 'SOME', 'ALL', 'DARK', 'CLOTHES', 'AND', 'MAKE', 'UP', 'YOUR', 'MIND', 'NOT', 'TO', 'BE', 'SURPRISED', 'AT', 'ANYTHING'] +533-1066-0011-807: ref=['LIDDY', 'WAS', 'SLEEPING', 'THE', 'SLEEP', 'OF', 'THE', 'JUST', 'WHEN', 'I', 'WENT', 'UP', 'STAIRS', 'AND', 'I', 'HUNTED', 'OUT', 'MY', 'THINGS', 'CAUTIOUSLY'] +533-1066-0011-807: hyp=['LEAVING', 'WAS', 'SLEEPING', 'SLEEP', 'OF', 'THE', 'JUST', 'WHEN', 'I', 'WENT', 'UPSTAIRS', 'AND', 'I', 'HUNTED', 'OUT', 'MY', 'THINGS', 'CAUTIOUSLY'] +533-1066-0012-808: ref=['THEY', 'WERE', 'TALKING', 'CONFIDENTIALLY', 'TOGETHER', 'BUT', 'WHEN', 'I', 'CAME', 'DOWN', 'THEY', 'CEASED'] +533-1066-0012-808: hyp=['YOU', 'WERE', 'TALKING', 'UFFILIENTLY', 'TOGETHER', 'BUT', 'WHEN', 'I', 'CAME', 'DOWN', 'THEY', 'CEASED'] +533-1066-0013-809: ref=['THERE', 'WERE', 'A', 'FEW', 'PREPARATIONS', 'TO', 'BE', 'MADE', 'THE', 'LOCKS', 'TO', 'BE', 'GONE', 'OVER', 'WINTERS', 'TO', 'BE', 'INSTRUCTED', 'AS', 'TO', 'RENEWED', 'VIGILANCE', 'AND', 'THEN', 'AFTER', 'EXTINGUISHING', 'THE', 'HALL', 'LIGHT', 'WE', 'CREPT', 'IN', 'THE', 'DARKNESS', 'THROUGH', 'THE', 'FRONT', 'DOOR', 'AND', 'INTO', 'THE', 'NIGHT'] +533-1066-0013-809: hyp=['THERE', 'WERE', 'A', 'FEW', 'PREPARATIONS', 'TO', 'BE', 'MADE', 'LOCKS', 'TO', 'BE', 'GONE', 'OVER', 'WINTERS', 'TO', 'BE', 'INSTRUCTIVE', 'AS', 'TO', 'RENEWED', 'VISUALS', 'AND', 'THEN', 'AFTER', 'DISTINGUISHING', 'THE', 'WHOLE', 'LIGHT', 'WE', 'CREPT', 'IN', 'THE', 'DARKNESS', 'THROUGH', 'THE', 'FRONT', 'DOOR', 'AND', 'INTO', 'THE', 'NIGHT'] +533-1066-0014-810: ref=['I', 'ASKED', 'NO', 'QUESTIONS'] +533-1066-0014-810: hyp=['I', 'ASKED', 'NO', 'QUESTIONS'] +533-1066-0015-811: ref=['ONCE', 'ONLY', 'SOMEBODY', 'SPOKE', 'AND', 'THEN', 'IT', 'WAS', 'AN', 'EMPHATIC', 'BIT', 'OF', 'PROFANITY', 'FROM', 'DOCTOR', 'STEWART', 'WHEN', 'HE', 'RAN', 'INTO', 'A', 'WIRE', 'FENCE'] +533-1066-0015-811: hyp=['WAS', 'ONLY', 'SOMEBODY', 'SPOKE', 'AND', 'THEN', 'IT', 'WAS', 'AN', 'EMPHATIC', 'FIT', 'OF', 'PROFANITY', 'FROM', 'DOCTOR', 'STEWART', 'WHEN', 'HE', 'RAN', 'INTO', 'A', 'WIRE', 'FENCE'] +533-1066-0016-812: ref=['I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'EXPECTED'] +533-1066-0016-812: hyp=['I', 'ARE', 'TO', 'KNOW', 'WHAT', 'I', 'EXPECTED'] +533-1066-0017-813: ref=['THE', 'DOCTOR', 'WAS', 'PUFFING', 'SOMEWHAT', 'WHEN', 'WE', 'FINALLY', 'CAME', 'TO', 'A', 'HALT'] +533-1066-0017-813: hyp=['THE', 'DOCTOR', 'WAS', 'PUFFING', 'SOMEWHAT', 'WHEN', 'WE', 'FINALLY', 'CAME', 'TO', 'A', 'HALT'] +533-1066-0018-814: ref=['I', 'CONFESS', 'THAT', 'JUST', 'AT', 'THAT', 'MINUTE', 'EVEN', 'SUNNYSIDE', 'SEEMED', 'A', 'CHEERFUL', 'SPOT'] +533-1066-0018-814: hyp=['I', 'CONFESS', 'THAT', 'JUST', 'AT', 'THAT', 'MINUTE', 'EVEN', 'SUNNYSIDE', 'SEEMED', 'A', 'CHEERFUL', 'SPOT'] +533-1066-0019-815: ref=['IN', 'SPITE', 'OF', 'MYSELF', 'I', 'DREW', 'MY', 'BREATH', 'IN', 'SHARPLY'] +533-1066-0019-815: hyp=['IN', 'SPITE', 'OF', 'MYSELF', 'I', 'DREW', 'MY', 'BREATH', 'IN', 'SHARPLY'] +533-1066-0020-816: ref=['IT', 'WAS', 'ALEX', 'ARMED', 'WITH', 'TWO', 'LONG', 'HANDLED', 'SPADES'] +533-1066-0020-816: hyp=['IT', 'WAS', 'ALEX', "I'M", 'THE', 'TWO', 'LONG', 'HANDLED', 'SPADES'] +533-1066-0021-817: ref=['THE', 'DOCTOR', 'KEPT', 'A', 'KEEN', 'LOOKOUT', 'BUT', 'NO', 'ONE', 'APPEARED'] +533-1066-0021-817: hyp=['THE', 'DOCTOR', 'KEPT', 'A', 'KIN', 'LOOK', 'OUT', 'BUT', 'NO', 'ONE', 'APPEARED'] +533-1066-0022-818: ref=["THERE'S", 'ONE', 'THING', 'SURE', "I'LL", 'NOT', 'BE', 'SUSPECTED', 'OF', 'COMPLICITY'] +533-1066-0022-818: hyp=["THERE'S", 'ONE', 'THING', 'SURE', "I'LL", 'NOT', 'BE', 'SUSPECTED', 'OF', 'COMPLICITY'] +533-1066-0023-819: ref=['A', 'DOCTOR', 'IS', 'GENERALLY', 'SUPPOSED', 'TO', 'BE', 'HANDIER', 'AT', 'BURYING', 'FOLKS', 'THAN', 'AT', 'DIGGING', 'THEM', 'UP'] +533-1066-0023-819: hyp=['THE', 'DOCTOR', 'IS', 'GENERALLY', 'SUPPOSED', 'TO', 'BE', 'A', 'HANDIER', 'AT', 'BERING', 'FOLKS', 'THAN', 'A', 'TIGGING', 'THEM', 'UP'] +533-1066-0024-820: ref=['I', 'HELD', 'ON', 'TO', 'HIM', 'FRANTICALLY', 'AND', 'SOMEHOW', 'I', 'GOT', 'THERE', 'AND', 'LOOKED', 'DOWN'] +533-1066-0024-820: hyp=['I', 'HELD', 'ON', 'TO', 'HIM', 'FRANTICALLY', 'AND', 'SOMEHOW', 'I', 'GOT', 'TERRANT', 'LOOKED', 'DOWN'] +533-131556-0000-821: ref=['BUT', 'HOW', 'AM', 'I', 'TO', 'GET', 'OVER', 'THE', 'TEN', 'OR', 'TWELVE', 'DAYS', 'THAT', 'MUST', 'YET', 'ELAPSE', 'BEFORE', 'THEY', 'GO'] +533-131556-0000-821: hyp=['BUT', 'HOW', 'AM', 'I', 'TO', 'ADD', 'OVER', 'THE', 'TOWN', 'OR', 'TWELVE', 'DAYS', 'THAT', 'MUST', 'YET', 'ELAPSE', 'BEFORE', 'THEY', 'GO'] +533-131556-0001-822: ref=['FOR', 'NONE', 'COULD', 'INJURE', 'ME', 'AS', 'HE', 'HAS', 'DONE', 'OH'] +533-131556-0001-822: hyp=['FOR', 'NONE', 'COULD', 'ENDURE', 'ME', 'AS', 'HE', 'HAS', 'DONE', 'OH'] +533-131556-0002-823: ref=['THE', 'WORD', 'STARES', 'ME', 'IN', 'THE', 'FACE', 'LIKE', 'A', 'GUILTY', 'CONFESSION', 'BUT', 'IT', 'IS', 'TRUE', 'I', 'HATE', 'HIM', 'I', 'HATE', 'HIM'] +533-131556-0002-823: hyp=['THE', 'WORD', 'STARES', 'ME', 'IN', 'THE', 'FACE', 'LIKE', 'A', 'GUILTY', 'CONFESSION', 'BUT', 'IT', 'IS', 'TRUE', 'I', 'HATE', 'HIM', 'I', 'HATE', 'HIM'] +533-131556-0003-824: ref=['I', 'SOMETIMES', 'THINK', 'I', 'OUGHT', 'TO', 'GIVE', 'HIM', 'CREDIT', 'FOR', 'THE', 'GOOD', 'FEELING', 'HE', 'SIMULATES', 'SO', 'WELL', 'AND', 'THEN', 'AGAIN', 'I', 'THINK', 'IT', 'IS', 'MY', 'DUTY', 'TO', 'SUSPECT', 'HIM', 'UNDER', 'THE', 'PECULIAR', 'CIRCUMSTANCES', 'IN', 'WHICH', 'I', 'AM', 'PLACED'] +533-131556-0003-824: hyp=['I', 'SOMETIMES', 'THINK', 'I', 'OUGHT', 'TO', 'GIVE', 'HIM', 'CREDIT', 'FOR', 'THE', 'GOOD', 'FEELING', 'HE', 'SIMILATES', 'SO', 'WELL', 'AND', 'THEN', 'AGAIN', 'I', 'THINK', 'IT', 'IS', 'MY', 'DUTY', 'TO', 'SUSPECT', 'HIM', 'UNDER', 'THE', 'PECULIAR', 'CIRCUMSTANCES', 'IN', 'WHICH', 'I', 'AM', 'PLACED'] +533-131556-0004-825: ref=['I', 'HAVE', 'DONE', 'WELL', 'TO', 'RECORD', 'THEM', 'SO', 'MINUTELY'] +533-131556-0004-825: hyp=['HAVE', 'DONE', 'WELL', 'TO', 'RECORD', 'HIM', 'SUMINUTELY'] +533-131556-0005-826: ref=['THEY', 'HAD', 'BETAKEN', 'THEMSELVES', 'TO', 'THEIR', 'WORK', 'I', 'LESS', 'TO', 'DIVERT', 'MY', 'MIND', 'THAN', 'TO', 'DEPRECATE', 'CONVERSATION', 'HAD', 'PROVIDED', 'MYSELF', 'WITH', 'A', 'BOOK'] +533-131556-0005-826: hyp=['THE', 'YEAR', 'HAD', 'TAKEN', 'THEMSELVES', 'TO', 'THEIR', 'WORK', 'I', 'LEST', 'DIVERT', 'MY', 'MIND', 'THAN', 'TO', 'THE', 'PROCATE', 'CONVERSATION', 'HAD', 'PROVIDED', 'MYSELF', 'WITH', 'THE', 'BOOK'] +533-131556-0006-827: ref=['I', 'AM', 'TOO', 'WELL', 'ACQUAINTED', 'WITH', 'YOUR', 'CHARACTER', 'AND', 'CONDUCT', 'TO', 'FEEL', 'ANY', 'REAL', 'FRIENDSHIP', 'FOR', 'YOU', 'AND', 'AS', 'I', 'AM', 'WITHOUT', 'YOUR', 'TALENT', 'FOR', 'DISSIMULATION', 'I', 'CANNOT', 'ASSUME', 'THE', 'APPEARANCE', 'OF', 'IT'] +533-131556-0006-827: hyp=['I', 'AM', 'TOO', 'EQUANT', 'WITH', 'YOUR', 'CHARACTER', 'AND', 'CONDUCT', 'TO', 'FEEL', 'ANY', 'REAL', 'FRIENDSHIP', 'FOR', 'YOU', 'AND', 'AS', 'I', 'AM', 'WITHOUT', 'YOUR', 'TALENT', 'FOR', 'DISSIMULATION', 'I', 'CANNOT', 'ASSUME', 'YOUR', 'PUREST', 'OF', 'IT'] +533-131556-0007-828: ref=['UPON', 'PERUSING', 'THIS', 'SHE', 'TURNED', 'SCARLET', 'AND', 'BIT', 'HER', 'LIP'] +533-131556-0007-828: hyp=['UP', 'AND', 'PERUSING', 'THIS', 'SHE', 'TURNED', 'SCARLET', 'AND', 'BIT', 'HER', 'LIP'] +533-131556-0008-829: ref=['YOU', 'MAY', 'GO', 'MILICENT', 'AND', "SHE'LL", 'FOLLOW', 'IN', 'A', 'WHILE', 'MILICENT', 'WENT'] +533-131556-0008-829: hyp=['YOU', 'MAY', 'GO', 'MILLSON', 'AND', "SHE'LL", 'FOLLOWING', 'AWHILE', 'MELLICENT', 'WENT'] +533-131556-0009-830: ref=['WILL', 'YOU', 'OBLIGE', 'ME', 'HELEN', 'CONTINUED', 'SHE'] +533-131556-0009-830: hyp=["OLI'", 'OBLIGE', 'ME', 'ALAN', 'CONTINUED', 'SHE'] +533-131556-0010-831: ref=['AH', 'YOU', 'ARE', 'SUSPICIOUS'] +533-131556-0010-831: hyp=['HA', 'YOU', 'ARE', 'SUSPICIOUS'] +533-131556-0011-832: ref=['IF', 'I', 'WERE', 'SUSPICIOUS', 'I', 'REPLIED', 'I', 'SHOULD', 'HAVE', 'DISCOVERED', 'YOUR', 'INFAMY', 'LONG', 'BEFORE'] +533-131556-0011-832: hyp=['IF', 'I', 'WERE', 'SUSPICIOUS', 'I', 'REPLIED', 'I', 'SHOULD', 'HAVE', 'DISCOVERED', 'YOUR', 'INFAMY', 'LONG', 'BEFORE'] +533-131556-0012-833: ref=['I', 'ENJOY', 'A', 'MOONLIGHT', 'RAMBLE', 'AS', 'WELL', 'AS', 'YOU', 'I', 'ANSWERED', 'STEADILY', 'FIXING', 'MY', 'EYES', 'UPON', 'HER', 'AND', 'THE', 'SHRUBBERY', 'HAPPENS', 'TO', 'BE', 'ONE', 'OF', 'MY', 'FAVOURITE', 'RESORTS'] +533-131556-0012-833: hyp=['ENJOY', "YOU'VE", 'MALE', 'RAMBLE', 'AS', 'WELL', 'AS', 'YOU', 'I', 'ANSWERED', 'STEADILY', 'FIXING', 'MY', 'EYES', 'UP', 'ON', 'EARTH', 'AND', 'FREDERI', 'HAPPENS', 'TO', 'BE', 'ONE', 'OF', 'MY', 'FAVORITE', 'RESORTS'] +533-131556-0013-834: ref=['SHE', 'COLOURED', 'AGAIN', 'EXCESSIVELY', 'AND', 'REMAINED', 'SILENT', 'PRESSING', 'HER', 'FINGER', 'AGAINST', 'HER', 'TEETH', 'AND', 'GAZING', 'INTO', 'THE', 'FIRE'] +533-131556-0013-834: hyp=['SHE', 'COLOURED', 'AGAIN', 'EXCESSIVELY', 'AND', 'REMAINED', 'SILENT', 'PRESSING', 'HER', 'FINGER', 'AGAINST', 'HER', 'TEETH', 'AND', 'GAZING', 'INTO', 'THE', 'FIRE'] +533-131556-0014-835: ref=['I', 'WATCHED', 'HER', 'A', 'FEW', 'MOMENTS', 'WITH', 'A', 'FEELING', 'OF', 'MALEVOLENT', 'GRATIFICATION', 'THEN', 'MOVING', 'TOWARDS', 'THE', 'DOOR', 'I', 'CALMLY', 'ASKED', 'IF', 'SHE', 'HAD', 'ANYTHING', 'MORE', 'TO', 'SAY'] +533-131556-0014-835: hyp=['I', 'WATCH', 'FOR', 'A', 'FEW', 'MOMENTS', 'TO', 'THE', 'FEELING', 'OF', 'MALEVOLENT', 'GRATIFICATION', 'THEN', 'MOVING', 'TOWARDS', 'THE', 'DOOR', 'I', 'CALMLY', 'ASKED', 'IF', 'SHE', 'HAD', 'ANYTHING', 'MORE', 'TO', 'SAY'] +533-131556-0015-836: ref=['YES', 'YES'] +533-131556-0015-836: hyp=['YES', 'YES'] +533-131556-0016-837: ref=['SUPPOSE', 'I', 'DO'] +533-131556-0016-837: hyp=['SUPPOSE', 'I', 'DO'] +533-131556-0017-838: ref=['SHE', 'PAUSED', 'IN', 'EVIDENT', 'DISCONCERTION', 'AND', 'PERPLEXITY', 'MINGLED', 'WITH', 'ANGER', 'SHE', 'DARED', 'NOT', 'SHOW'] +533-131556-0017-838: hyp=['SHE', 'PAUSED', 'IN', 'EVIDENT', 'DISCONCERTION', 'AND', 'PERPLEXITY', 'MINGLED', 'WITH', 'ANGER', 'SHE', 'DARED', 'NOT', 'SHOW'] +533-131556-0018-839: ref=['I', 'CANNOT', 'RENOUNCE', 'WHAT', 'IS', 'DEARER', 'THAN', 'LIFE', 'SHE', 'MUTTERED', 'IN', 'A', 'LOW', 'HURRIED', 'TONE'] +533-131556-0018-839: hyp=['I', 'CANNOT', 'RENOUNCE', 'WHAT', 'IS', 'DEARER', 'THAN', 'LIFE', 'SHE', 'MUTTERED', 'IN', 'A', 'LOW', 'HURRIED', 'TONE'] +533-131556-0019-840: ref=['IF', 'YOU', 'ARE', 'GENEROUS', 'HERE', 'IS', 'A', 'FITTING', 'OPPORTUNITY', 'FOR', 'THE', 'EXERCISE', 'OF', 'YOUR', 'MAGNANIMITY', 'IF', 'YOU', 'ARE', 'PROUD', 'HERE', 'AM', 'I', 'YOUR', 'RIVAL', 'READY', 'TO', 'ACKNOWLEDGE', 'MYSELF', 'YOUR', 'DEBTOR', 'FOR', 'AN', 'ACT', 'OF', 'THE', 'MOST', 'NOBLE', 'FORBEARANCE'] +533-131556-0019-840: hyp=['IF', 'YOU', 'ARE', 'GENEROUS', 'HERE', 'IS', 'A', 'FITTING', 'OPPORTUNITY', 'FOR', 'THE', 'EXERCISE', 'OF', 'YOUR', 'MAGNANIMITY', 'IF', 'YOU', 'ARE', 'PROUD', 'HERE', 'AM', 'I', 'YOUR', 'RIVAL', 'RATHER', 'TO', 'ANNOUNCE', 'MYSELF', 'YOUR', 'DEPTOR', 'FOR', 'AN', 'ACT', 'OF', 'MOST', 'NOBLE', 'FORBEARANCE'] +533-131556-0020-841: ref=['I', 'SHALL', 'NOT', 'TELL', 'HIM'] +533-131556-0020-841: hyp=['I', 'SHALL', 'NOT', 'TELL', 'HIM'] +533-131556-0021-842: ref=['GIVE', 'ME', 'NO', 'THANKS', 'IT', 'IS', 'NOT', 'FOR', 'YOUR', 'SAKE', 'THAT', 'I', 'REFRAIN'] +533-131556-0021-842: hyp=['GIVE', 'ME', 'NO', 'THANKS', 'IT', 'IS', 'NOT', 'FOR', 'YOUR', 'SAKE', 'THAT', 'I', 'REFRAIN'] +533-131556-0022-843: ref=['AND', 'MILICENT', 'WILL', 'YOU', 'TELL', 'HER'] +533-131556-0022-843: hyp=['AND', 'MILLICENT', 'WILL', 'IT', 'TELL', 'HER'] +533-131556-0023-844: ref=['I', 'WOULD', 'NOT', 'FOR', 'MUCH', 'THAT', 'SHE', 'SHOULD', 'KNOW', 'THE', 'INFAMY', 'AND', 'DISGRACE', 'OF', 'HER', 'RELATION'] +533-131556-0023-844: hyp=['I', 'WILL', 'NOT', 'FOR', 'MUCH', 'THAT', 'YOU', 'SHOULD', 'NOT', 'INFAMY', 'AND', 'DISGRACE', 'OF', 'HER', 'RELATION'] +533-131556-0024-845: ref=['YOU', 'USE', 'HARD', 'WORDS', 'MISSUS', 'HUNTINGDON', 'BUT', 'I', 'CAN', 'PARDON', 'YOU'] +533-131556-0024-845: hyp=['YOU', 'USE', 'OUR', 'WORDS', 'MISSUS', 'HUNTINGDON', 'BUT', 'I', 'CAN', 'PARDON', 'YOU'] +533-131556-0025-846: ref=['HOW', 'DARE', 'YOU', 'MENTION', 'HIS', 'NAME', 'TO', 'ME'] +533-131556-0025-846: hyp=['HOW', 'DARE', 'YOU', 'MENTION', 'HIS', 'NAME', 'TO', 'ME'] +533-131562-0000-847: ref=['IT', 'SEEMS', 'VERY', 'INTERESTING', 'LOVE', 'SAID', 'HE', 'LIFTING', 'HIS', 'HEAD', 'AND', 'TURNING', 'TO', 'WHERE', 'I', 'STOOD', 'WRINGING', 'MY', 'HANDS', 'IN', 'SILENT', 'RAGE', 'AND', 'ANGUISH', 'BUT', "IT'S", 'RATHER', 'LONG', "I'LL", 'LOOK', 'AT', 'IT', 'SOME', 'OTHER', 'TIME', 'AND', 'MEANWHILE', "I'LL", 'TROUBLE', 'YOU', 'FOR', 'YOUR', 'KEYS', 'MY', 'DEAR', 'WHAT', 'KEYS'] +533-131562-0000-847: hyp=['IT', 'SEEMS', 'VERY', 'INTERESTING', 'LOVE', 'SAID', 'HE', 'LIFTING', 'HIS', 'HEAD', 'AND', 'SHOWING', 'TO', 'HER', 'EYES', 'TOO', 'WRINGING', 'MY', 'HAND', 'IN', 'SILENT', 'RATE', 'AND', 'ANGUISH', 'BUT', "IT'S", 'RATHER', 'LONG', 'I', 'LOOK', 'AT', 'IT', 'SOME', 'OTHER', 'TIME', 'AND', 'MEANWHILE', "I'LL", 'TROUBLE', 'YOU', 'FOR', 'YOUR', 'KEYS', 'MY', 'DEAR', 'WHAT', 'CASE'] +533-131562-0001-848: ref=['THE', 'KEYS', 'OF', 'YOUR', 'CABINET', 'DESK', 'DRAWERS', 'AND', 'WHATEVER', 'ELSE', 'YOU', 'POSSESS', 'SAID', 'HE', 'RISING', 'AND', 'HOLDING', 'OUT', 'HIS', 'HAND'] +533-131562-0001-848: hyp=['IT', 'ACCUSE', 'OF', 'YOUR', 'CABINET', 'DESKED', 'RAOUL', 'AND', 'WHATEVER', 'ELSE', 'YOU', 'POSSESS', 'SAID', 'HE', 'RISING', 'AND', 'HOLDING', 'OUT', 'HIS', 'HAND'] +533-131562-0002-849: ref=['THE', 'KEY', 'OF', 'MY', 'DESK', 'IN', 'FACT', 'WAS', 'AT', 'THAT', 'MOMENT', 'IN', 'THE', 'LOCK', 'AND', 'THE', 'OTHERS', 'WERE', 'ATTACHED', 'TO', 'IT'] +533-131562-0002-849: hyp=['THE', 'KEY', 'OF', 'MY', 'VES', 'IN', 'FACT', 'WAS', 'AT', 'THAT', 'MOMENT', 'IN', 'LOVE', 'AND', 'THE', 'OTHERS', 'WERE', 'ATTACHED', 'TO', 'IT'] +533-131562-0003-850: ref=['NOW', 'THEN', 'SNEERED', 'HE', 'WE', 'MUST', 'HAVE', 'A', 'CONFISCATION', 'OF', 'PROPERTY'] +533-131562-0003-850: hyp=['NOW', 'THEN', 'SNEERED', 'HE', 'WE', 'MUST', 'HAVE', 'A', 'CONFISCATION', 'OF', 'PROPERTY'] +533-131562-0004-851: ref=['AND', 'PUTTING', 'THE', 'KEYS', 'INTO', 'HIS', 'POCKET', 'HE', 'WALKED', 'INTO', 'THE', 'LIBRARY'] +533-131562-0004-851: hyp=['AND', 'PUT', 'IN', 'THE', 'KEYS', 'INTO', 'HIS', 'POCKET', 'HE', 'WALKED', 'INTO', 'THE', 'LIBRARY'] +533-131562-0005-852: ref=['THAT', 'AND', 'ALL', 'REPLIED', 'THE', 'MASTER', 'AND', 'THE', 'THINGS', 'WERE', 'CLEARED', 'AWAY'] +533-131562-0005-852: hyp=['THAT', 'AND', 'ALL', 'REPLIED', 'THE', 'MASTER', 'AND', 'THINGS', 'WERE', 'CLEARED', 'AWAY'] +533-131562-0006-853: ref=['MISTER', 'HUNTINGDON', 'THEN', 'WENT', 'UP', 'STAIRS'] +533-131562-0006-853: hyp=['MISTER', 'HANTINGDON', 'THEN', 'WENT', 'UPSTAIRS'] +533-131562-0007-854: ref=['MUTTERED', 'HE', 'STARTING', 'BACK', "SHE'S", 'THE', 'VERY', 'DEVIL', 'FOR', 'SPITE'] +533-131562-0007-854: hyp=['MUTTERED', 'HE', 'STARTING', 'BACK', "SHE'S", 'VERY', 'DEVIL', 'FOR', 'A', 'SPITE'] +533-131562-0008-855: ref=['I', "DIDN'T", 'SAY', "I'D", 'BROKEN', 'IT', 'DID', 'I', 'RETURNED', 'HE'] +533-131562-0008-855: hyp=['I', "DIDN'T", 'SAY', "I'VE", 'BROKEN', 'IT', 'DID', 'I', 'RETURNED', 'HE'] +533-131562-0009-856: ref=['I', 'SHALL', 'PUT', 'YOU', 'UPON', 'A', 'SMALL', 'MONTHLY', 'ALLOWANCE', 'IN', 'FUTURE', 'FOR', 'YOUR', 'OWN', 'PRIVATE', 'EXPENSES', 'AND', 'YOU', "NEEDN'T", 'TROUBLE', 'YOURSELF', 'ANY', 'MORE', 'ABOUT', 'MY', 'CONCERNS', 'I', 'SHALL', 'LOOK', 'OUT', 'FOR', 'A', 'STEWARD', 'MY', 'DEAR', 'I', "WON'T", 'EXPOSE', 'YOU', 'TO', 'THE', 'TEMPTATION'] +533-131562-0009-856: hyp=['I', 'SHALL', 'PUT', 'YOU', 'UP', 'IN', 'A', 'SMALL', 'MOUTHFUL', 'LAW', 'AS', 'IN', 'FUTURE', 'FOR', 'YOUR', 'OWN', 'PRIVATE', 'EXPENSES', 'AND', 'YOU', "NEEDN'T", 'TROUBLE', 'YOURSELF', 'ANY', 'MORE', 'ABOUT', 'MY', 'CONCERNS', 'I', 'SHALL', 'LOOK', 'OUT', 'FOR', 'A', 'STEWARD', 'MY', 'DEAR', 'I', "WON'T", 'EXPOSE', 'YOU', 'TO', 'TEMPTATION'] +533-131562-0010-857: ref=['AND', 'AS', 'FOR', 'THE', 'HOUSEHOLD', 'MATTERS', 'MISSUS', 'GREAVES', 'MUST', 'BE', 'VERY', 'PARTICULAR', 'IN', 'KEEPING', 'HER', 'ACCOUNTS', 'WE', 'MUST', 'GO', 'UPON', 'AN', 'ENTIRELY', 'NEW', 'PLAN'] +533-131562-0010-857: hyp=['AND', 'AS', 'FOR', 'THE', 'HOUSE', 'OR', 'MATTERS', 'MISSUS', 'GREEBS', 'MUST', 'BE', 'VERY', 'PARTICULAR', 'IN', 'KEEPING', 'HER', 'ACCOUNTS', 'WE', 'MUST', 'GO', 'UP', 'IN', 'AN', 'ENTIRELY', 'NEW', 'PLAN'] +533-131562-0011-858: ref=['WHAT', 'GREAT', 'DISCOVERY', 'HAVE', 'YOU', 'MADE', 'NOW', 'MISTER', 'HUNTINGDON'] +533-131562-0011-858: hyp=['WHAT', 'GREAT', 'DISCOVERY', 'HAVE', 'YOU', 'MADE', 'NOW', 'MISTER', 'HARDINGEN'] +533-131562-0012-859: ref=['HAVE', 'I', 'ATTEMPTED', 'TO', 'DEFRAUD', 'YOU'] +533-131562-0012-859: hyp=['IF', 'I', 'ATTENDED', 'TO', 'DEFRAUD', 'YOU'] +533-131562-0013-860: ref=['NOT', 'IN', 'MONEY', 'MATTERS', 'EXACTLY', 'IT', 'SEEMS', 'BUT', "IT'S", 'BEST', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'TEMPTATION'] +533-131562-0013-860: hyp=['NOT', 'IN', 'MONEY', 'MATTERS', 'EXACTLY', 'IT', 'SEEMS', 'BUT', 'IS', 'FAST', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'TEMPTATION'] +533-131562-0014-861: ref=['HERE', 'BENSON', 'ENTERED', 'WITH', 'THE', 'CANDLES', 'AND', 'THERE', 'FOLLOWED', 'A', 'BRIEF', 'INTERVAL', 'OF', 'SILENCE', 'I', 'SITTING', 'STILL', 'IN', 'MY', 'CHAIR', 'AND', 'HE', 'STANDING', 'WITH', 'HIS', 'BACK', 'TO', 'THE', 'FIRE', 'SILENTLY', 'TRIUMPHING', 'IN', 'MY', 'DESPAIR'] +533-131562-0014-861: hyp=['HERE', 'BUILTON', 'ENTERED', 'THE', 'CANDLES', 'AND', 'THERE', 'FOLLOWED', 'THE', 'ROOF', 'INTERVAL', 'OF', 'SILENCE', 'I', 'SITTING', 'STEALING', 'MY', 'CHAIR', 'AND', 'HE', 'STANDING', 'WITH', 'HIS', 'BACK', 'TO', 'THE', 'FIRE', 'SILENTLY', 'TRIUMPHING', 'IN', 'MY', 'DESPAIR'] +533-131562-0015-862: ref=['I', 'KNOW', 'THAT', 'DAY', 'AFTER', 'DAY', 'SUCH', 'FEELINGS', 'WILL', 'RETURN', 'UPON', 'ME'] +533-131562-0015-862: hyp=['I', 'KNOW', 'THAT', 'DAY', 'AFTER', 'DAY', 'SUCH', 'FEELINGS', 'TO', 'RETURN', 'UPON', 'ME'] +533-131562-0016-863: ref=['I', 'TRY', 'TO', 'LOOK', 'TO', 'HIM', 'AND', 'RAISE', 'MY', 'HEART', 'TO', 'HEAVEN', 'BUT', 'IT', 'WILL', 'CLEAVE', 'TO', 'THE', 'DUST'] +533-131562-0016-863: hyp=['I', 'TRIED', 'TO', 'LOOK', 'TO', 'HIM', 'AND', 'RAISE', 'MY', 'HEART', 'TO', 'HEAVEN', 'BUT', 'IT', 'WILL', 'CLIFF', 'TO', 'THE', 'DUST'] +533-131564-0000-768: ref=['VAIN', 'HOPE', 'I', 'FEAR'] +533-131564-0000-768: hyp=['VAIN', 'HOPE', 'I', 'FEAR'] +533-131564-0001-769: ref=['MISTER', 'AND', 'MISSUS', 'HATTERSLEY', 'HAVE', 'BEEN', 'STAYING', 'AT', 'THE', 'GROVE', 'A', 'FORTNIGHT', 'AND', 'AS', 'MISTER', 'HARGRAVE', 'IS', 'STILL', 'ABSENT', 'AND', 'THE', 'WEATHER', 'WAS', 'REMARKABLY', 'FINE', 'I', 'NEVER', 'PASSED', 'A', 'DAY', 'WITHOUT', 'SEEING', 'MY', 'TWO', 'FRIENDS', 'MILICENT', 'AND', 'ESTHER', 'EITHER', 'THERE', 'OR', 'HERE'] +533-131564-0001-769: hyp=['MISS', 'AND', 'MISSUS', 'HALTERSLEY', 'HAVE', 'BEEN', 'SEEING', 'IT', 'TO', 'GROW', 'A', 'FORTNIGHT', 'AND', 'AS', 'MISSUS', 'HARGREAVE', 'IS', 'STILL', 'ABSENT', 'AND', 'WEATHER', 'WAS', 'REMARKABLY', 'FINE', 'I', 'NEVER', 'PASSED', 'THE', 'DAY', 'WITHOUT', 'SEEING', 'MY', 'TWO', 'FRIENDS', 'MILLSON', 'AND', 'ASSER', 'EITHER', 'THERE', 'OR', 'HERE'] +533-131564-0002-770: ref=['NO', 'UNLESS', 'YOU', 'CAN', 'TELL', 'ME', 'WHEN', 'TO', 'EXPECT', 'HIM', 'HOME'] +533-131564-0002-770: hyp=['NO', 'UNLESS', 'YOU', 'CAN', 'TELL', 'YOU', 'WHEN', 'TO', 'EXPECT', 'HIM', 'HOME'] +533-131564-0003-771: ref=['I', "CAN'T", 'YOU', "DON'T", 'WANT', 'HIM', 'DO', 'YOU'] +533-131564-0003-771: hyp=['I', "CAN'T", 'IT', 'ANNOUNCE', 'HIM', 'DO', 'YOU'] +533-131564-0004-772: ref=['IT', 'IS', 'A', 'RESOLUTION', 'YOU', 'OUGHT', 'TO', 'HAVE', 'FORMED', 'LONG', 'AGO'] +533-131564-0004-772: hyp=['IT', 'IS', 'A', 'RESOLUTION', 'YOU', 'ARE', 'REFORMED', 'LONG', 'AGO'] +533-131564-0005-773: ref=['WE', 'ALL', 'HAVE', 'A', 'BIT', 'OF', 'A', 'LIKING', 'FOR', 'HIM', 'AT', 'THE', 'BOTTOM', 'OF', 'OUR', 'HEARTS', 'THOUGH', 'WE', "CAN'T", 'RESPECT', 'HIM'] +533-131564-0005-773: hyp=['WE', 'ALL', 'HAVE', 'A', 'BIT', 'OF', 'A', 'LIKING', 'FOR', 'HIM', 'AT', 'THE', 'BOTTOM', 'OF', 'OUR', 'HEART', 'THOUGH', 'WE', "CAN'T", 'RESPECT', 'HIM'] +533-131564-0006-774: ref=['NO', "I'D", 'RATHER', 'BE', 'LIKE', 'MYSELF', 'BAD', 'AS', 'I', 'AM'] +533-131564-0006-774: hyp=['NO', "I'D", 'RATHER', 'BE', 'LIKE', 'MYSELF', 'THAT', 'WAS', 'I', 'AM'] +533-131564-0007-775: ref=['NEVER', 'MIND', 'MY', 'PLAIN', 'SPEAKING', 'SAID', 'I', 'IT', 'IS', 'FROM', 'THE', 'BEST', 'OF', 'MOTIVES'] +533-131564-0007-775: hyp=['NEVER', 'MIND', 'MY', 'PLAIN', 'SPEAKING', 'SAID', 'I', 'IT', 'IS', 'FROM', 'THE', 'BEST', 'OF', 'MOTIVES'] +533-131564-0008-776: ref=['BUT', 'TELL', 'ME', 'SHOULD', 'YOU', 'WISH', 'YOUR', 'SONS', 'TO', 'BE', 'LIKE', 'MISTER', 'HUNTINGDON', 'OR', 'EVEN', 'LIKE', 'YOURSELF'] +533-131564-0008-776: hyp=['BUT', 'TELL', 'ME', 'SHOULD', 'YOU', 'WISH', 'YOURSELVES', 'TO', 'BE', 'LIKE', 'MISTER', 'HUNTINGDON', 'OR', 'EVEN', 'LIKE', 'YOURSELF'] +533-131564-0009-777: ref=['OH', 'NO', 'I', "COULDN'T", 'STAND', 'THAT'] +533-131564-0009-777: hyp=['OH', 'NO', 'ECHOLON', 'STAND', 'THAT'] +533-131564-0010-778: ref=['FIRE', 'AND', 'FURY'] +533-131564-0010-778: hyp=['FAR', 'AND', 'FURY'] +533-131564-0011-779: ref=['NOW', "DON'T", 'BURST', 'INTO', 'A', 'TEMPEST', 'AT', 'THAT'] +533-131564-0011-779: hyp=['NOW', "DON'T", 'FORCE', 'INTO', 'A', 'TEMPEST', 'AT', 'THAT'] +533-131564-0012-780: ref=['BUT', 'HANG', 'IT', "THAT'S", 'NOT', 'MY', 'FAULT'] +533-131564-0012-780: hyp=['BUT', 'HANG', 'IT', "THAT'S", 'NOT', 'MY', 'FAULT'] +533-131564-0013-781: ref=['NOT', 'YEARS', 'FOR', "SHE'S", 'ONLY', 'FIVE', 'AND', 'TWENTY'] +533-131564-0013-781: hyp=['NOT', 'EARS', 'FOR', "SHE'S", 'ONLY', 'FIVE', 'AND', 'TWENTY'] +533-131564-0014-782: ref=['WHAT', 'WOULD', 'YOU', 'MAKE', 'OF', 'ME', 'AND', 'THE', 'CHILDREN', 'TO', 'BE', 'SURE', 'THAT', 'WORRY', 'HER', 'TO', 'DEATH', 'BETWEEN', 'THEM'] +533-131564-0014-782: hyp=['WHAT', 'DID', 'YOU', 'MAKE', 'OF', 'ME', 'AND', 'THE', 'CHILDREN', 'TO', 'BE', 'SURE', 'THAT', 'WERE', 'HE', 'HURT', 'DEATH', 'BETWEEN', 'THEM'] +533-131564-0015-783: ref=['I', 'KNOW', 'THEY', 'ARE', 'BLESS', 'THEM'] +533-131564-0015-783: hyp=['I', 'KNOW', 'THEY', 'ARE', 'BLESS', 'THEM'] +533-131564-0016-784: ref=['HE', 'FOLLOWED', 'ME', 'INTO', 'THE', 'LIBRARY'] +533-131564-0016-784: hyp=['IF', 'I', 'WILL', 'MEAN', 'TO', 'THE', 'LIBRARY'] +533-131564-0017-785: ref=['I', 'SOUGHT', 'OUT', 'AND', 'PUT', 'INTO', 'HIS', 'HANDS', 'TWO', 'OF', "MILICENT'S", 'LETTERS', 'ONE', 'DATED', 'FROM', 'LONDON', 'AND', 'WRITTEN', 'DURING', 'ONE', 'OF', 'HIS', 'WILDEST', 'SEASONS', 'OF', 'RECKLESS', 'DISSIPATION', 'THE', 'OTHER', 'IN', 'THE', 'COUNTRY', 'DURING', 'A', 'LUCID', 'INTERVAL'] +533-131564-0017-785: hyp=['I', 'SAW', 'THEN', 'PUT', 'INTO', 'HIS', 'HAND', 'TWO', 'OF', 'MILICENT', 'LADDERS', 'ONE', 'DID', 'IT', 'FROM', 'LONDON', 'AND', 'WRITTEN', 'DURING', 'ONE', 'OF', 'HIS', "WALLA'S", 'SEASONS', 'OF', 'RECKLESS', 'DISSIPATION', 'THE', 'OTHER', 'IN', 'THE', 'COUNTRY', 'DURING', 'ELUSIVE', 'INTERVAL'] +533-131564-0018-786: ref=['THE', 'FORMER', 'WAS', 'FULL', 'OF', 'TROUBLE', 'AND', 'ANGUISH', 'NOT', 'ACCUSING', 'HIM', 'BUT', 'DEEPLY', 'REGRETTING', 'HIS', 'CONNECTION', 'WITH', 'HIS', 'PROFLIGATE', 'COMPANIONS', 'ABUSING', 'MISTER', 'GRIMSBY', 'AND', 'OTHERS', 'INSINUATING', 'BITTER', 'THINGS', 'AGAINST', 'MISTER', 'HUNTINGDON', 'AND', 'MOST', 'INGENIOUSLY', 'THROWING', 'THE', 'BLAME', 'OF', 'HER', "HUSBAND'S", 'MISCONDUCT', 'ON', 'TO', 'OTHER', "MEN'S", 'SHOULDERS'] +533-131564-0018-786: hyp=['THE', 'FORMER', 'WAS', 'FULL', 'OF', 'TROUBLE', 'AND', 'ANGUISH', 'NOT', 'ACCUSING', 'HIM', 'BUT', 'DEEPLY', 'REGRETTING', 'HIS', 'CONNECTION', 'WITH', 'HIS', 'PROFLIGATE', 'COMPANIONS', 'ABUSING', 'MISTER', "GRIM'S", 'BEING', 'OTHERS', 'INSINUATING', 'BITTER', 'THINGS', 'AGAINST', 'MISTER', 'HUNTINGDON', 'AND', 'MOST', 'INGENUOUSLY', 'THREW', 'IN', 'THE', 'BLAME', 'OF', 'HER', "HUSBAND'S", 'MISCONDUCT', 'ON', 'THE', 'OTHER', "MAN'S", 'SHOULDERS'] +533-131564-0019-787: ref=["I'VE", 'BEEN', 'A', 'CURSED', 'RASCAL', 'GOD', 'KNOWS', 'SAID', 'HE', 'AS', 'HE', 'GAVE', 'IT', 'A', 'HEARTY', 'SQUEEZE', 'BUT', 'YOU', 'SEE', 'IF', 'I', "DON'T", 'MAKE', 'AMENDS', 'FOR', 'IT', 'D', 'N', 'ME', 'IF', 'I', "DON'T"] +533-131564-0019-787: hyp=["I'VE", 'BEEN', 'A', 'CURSED', 'RASCAL', 'GOD', 'KNOWS', 'SAID', 'HE', 'AS', 'HE', 'GAVE', 'IT', 'EARTHLY', 'SQUEEZE', 'BUT', 'YOU', 'SEE', 'IF', 'I', "DON'T", 'MAKE', 'AMENDS', 'FOR', 'IT', 'THEN', 'ME', 'IF', 'I', "DON'T"] +533-131564-0020-788: ref=['IF', 'YOU', 'INTEND', 'TO', 'REFORM', 'INVOKE', "GOD'S", 'BLESSING', 'HIS', 'MERCY', 'AND', 'HIS', 'AID', 'NOT', 'HIS', 'CURSE'] +533-131564-0020-788: hyp=['IF', 'YOU', 'INSENT', 'REFORM', 'INVOKE', "GOD'S", 'BLESSING', 'IS', 'A', 'MERCY', 'IN', 'THIS', 'APE', 'NOR', 'DISCOURSE'] +533-131564-0021-789: ref=['GOD', 'HELP', 'ME', 'THEN', 'FOR', "I'M", 'SURE', 'I', 'NEED', 'IT'] +533-131564-0021-789: hyp=['GOD', 'HELP', 'ME', 'THEN', 'FOR', 'I', 'AM', 'SURE', 'I', 'NEEDED'] +533-131564-0022-790: ref=["WHERE'S", 'MILICENT'] +533-131564-0022-790: hyp=['WHERE', 'IS', 'MILICENT'] +533-131564-0023-791: ref=['NAY', 'NOT', 'I', 'SAID', 'HE', 'TURNING', 'HER', 'ROUND', 'AND', 'PUSHING', 'HER', 'TOWARDS', 'ME'] +533-131564-0023-791: hyp=['NAY', 'NOT', 'I', 'SAID', 'HE', 'TURNING', 'AROUND', 'AND', 'PUSHING', 'IT', 'TOWARDS', 'ME'] +533-131564-0024-792: ref=['MILICENT', 'FLEW', 'TO', 'THANK', 'ME', 'OVERFLOWING', 'WITH', 'GRATITUDE'] +533-131564-0024-792: hyp=['MILICENT', 'FLEW', 'TO', 'THANK', 'ME', 'OVERWHELMING', 'ITS', 'GRATITUDE'] +533-131564-0025-793: ref=['CRIED', 'SHE', 'I', "COULDN'T", 'HAVE', 'INFLUENCED', 'HIM', "I'M", 'SURE', 'BY', 'ANYTHING', 'THAT', 'I', 'COULD', 'HAVE', 'SAID'] +533-131564-0025-793: hyp=['CRIED', 'SHE', 'I', "COULDN'T", 'HAVE', 'INFLUENCED', 'HIM', "I'M", 'SURE', 'BY', 'ANYTHING', 'THAT', 'I', 'COULD', 'HAVE', 'SAID'] +533-131564-0026-794: ref=['YOU', 'NEVER', 'TRIED', 'ME', 'MILLY', 'SAID', 'HE'] +533-131564-0026-794: hyp=['YOU', 'NEVER', 'TRIED', 'ME', 'MERELY', 'SAID', 'HE'] +533-131564-0027-795: ref=['AFTER', 'THAT', 'THEY', 'WILL', 'REPAIR', 'TO', 'THEIR', 'COUNTRY', 'HOME'] +533-131564-0027-795: hyp=['AFTER', 'THAT', 'THEY', 'WILL', 'REPAIR', 'TO', 'THEIR', 'COUNTRY', 'HOME'] +5442-32873-0000-1365: ref=['CAPTAIN', 'LAKE', 'DID', 'NOT', 'LOOK', 'AT', 'ALL', 'LIKE', 'A', 'LONDON', 'DANDY', 'NOW'] +5442-32873-0000-1365: hyp=['CAPTAIN', 'LAKE', 'DID', 'NOT', 'LOOK', 'AT', 'ALL', 'LIKE', 'A', 'LONDON', 'DANDY', 'NOW'] +5442-32873-0001-1366: ref=['THERE', 'WAS', 'A', 'VERY', 'NATURAL', 'SAVAGERY', 'AND', 'DEJECTION', 'THERE', 'AND', 'A', 'WILD', 'LEER', 'IN', 'HIS', 'YELLOW', 'EYES', 'RACHEL', 'SAT', 'DOWN'] +5442-32873-0001-1366: hyp=['THERE', 'WAS', 'A', 'VERY', 'NATURAL', 'SAVAGERY', 'AND', 'DEJECTION', 'THERE', 'AND', 'A', 'WILD', 'URNOUS', 'YELLOW', 'EYES', 'RACHEL', 'SAT', 'DOWN'] +5442-32873-0002-1367: ref=['A', 'SLAVE', 'ONLY', 'THINK', 'A', 'SLAVE'] +5442-32873-0002-1367: hyp=['AND', 'SLAVE', 'ONLY', 'THINK', 'A', 'SLAVE'] +5442-32873-0003-1368: ref=['OH', 'FRIGHTFUL', 'FRIGHTFUL', 'IS', 'IT', 'A', 'DREAM'] +5442-32873-0003-1368: hyp=['OH', 'FRIGHTFUL', 'FRIGHTFUL', 'IS', 'IT', 'A', 'DREAM'] +5442-32873-0004-1369: ref=['OH', 'FRIGHTFUL', 'FRIGHTFUL'] +5442-32873-0004-1369: hyp=['OH', 'FRIGHTFUL', 'DREADFUL'] +5442-32873-0005-1370: ref=['STANLEY', 'STANLEY', 'IT', 'WOULD', 'BE', 'MERCY', 'TO', 'KILL', 'ME', 'SHE', 'BROKE', 'OUT', 'AGAIN'] +5442-32873-0005-1370: hyp=['STANLEY', 'STANLEY', 'IT', 'WOULD', 'BE', 'MERCY', 'TO', 'KILL', 'ME', 'SHE', 'BROKE', 'OUT', 'AGAIN'] +5442-32873-0006-1371: ref=['BRIGHT', 'AND', 'NATTY', 'WERE', 'THE', 'CHINTZ', 'CURTAINS', 'AND', 'THE', 'LITTLE', 'TOILET', 'SET', 'OUT', 'NOT', 'INELEGANTLY', 'AND', 'HER', 'PET', 'PIPING', 'GOLDFINCH', 'ASLEEP', 'ON', 'HIS', 'PERCH', 'WITH', 'HIS', 'BIT', 'OF', 'SUGAR', 'BETWEEN', 'THE', 'WIRES', 'OF', 'HIS', 'CAGE', 'HER', 'PILLOW', 'SO', 'WHITE', 'AND', 'UNPRESSED', 'WITH', 'ITS', 'LITTLE', 'EDGING', 'OF', 'LACE'] +5442-32873-0006-1371: hyp=['BRIGHT', 'AND', 'NATTY', 'WITH', "CHIN'S", 'CURTAINS', 'AND', 'THE', 'LITTLE', 'TOILET', 'SET', 'OUT', 'NOT', 'INELEGANTLY', 'AND', 'HER', 'PET', 'PIPING', 'GOLDFINCH', 'ASLEEP', 'ON', 'HIS', 'PERCH', 'WITH', 'HIS', 'BIT', 'OF', 'SUGAR', 'BETWEEN', 'THE', 'WIVES', 'OF', 'HIS', 'CAGE', 'HER', 'PILLOW', 'SO', 'WHITE', 'AND', 'UNPRESSED', 'WITH', 'ITS', 'LITTLE', 'EDGING', 'OF', 'LACE'] +5442-32873-0007-1372: ref=['WHEN', 'HE', 'CAME', 'BACK', 'TO', 'THE', 'DRAWING', 'ROOM', 'A', 'TOILET', 'BOTTLE', 'OF', 'EAU', 'DE', 'COLOGNE', 'IN', 'HIS', 'HAND', 'WITH', 'HER', 'LACE', 'HANDKERCHIEF', 'HE', 'BATHED', 'HER', 'TEMPLES', 'AND', 'FOREHEAD'] +5442-32873-0007-1372: hyp=['WHEN', 'HE', 'CAME', 'BACK', 'TO', 'THE', 'DRAWING', 'ROOM', 'A', 'TALLED', 'BOTTLE', 'OF', 'OVERCLONE', 'IN', 'HIS', 'HAND', 'WITH', 'HER', 'LACE', 'HANDKERCHIEF', 'HE', 'BATHED', 'HER', 'TEMPLE', 'AND', 'FOREHEAD'] +5442-32873-0008-1373: ref=['THERE', 'WAS', 'NOTHING', 'VERY', 'BROTHERLY', 'IN', 'HIS', 'LOOK', 'AS', 'HE', 'PEERED', 'INTO', 'HER', 'PALE', 'SHARP', 'FEATURES', 'DURING', 'THE', 'PROCESS'] +5442-32873-0008-1373: hyp=['THERE', 'WAS', 'NOTHING', 'VERY', 'BROTHERLY', 'IN', 'HIS', 'LOOK', 'AS', 'HE', 'PEERED', 'INTO', 'A', 'PALE', 'SHARP', 'FEATURES', 'DURING', 'THE', 'PROCESS'] +5442-32873-0009-1374: ref=['THERE', "DON'T", 'MIND', 'ME', 'SHE', 'SAID', 'SHARPLY', 'AND', 'GETTING', 'UP', 'SHE', 'LOOKED', 'DOWN', 'AT', 'HER', 'DRESS', 'AND', 'THIN', 'SHOES', 'AND', 'SEEMING', 'TO', 'RECOLLECT', 'HERSELF', 'SHE', 'TOOK', 'THE', 'CANDLE', 'HE', 'HAD', 'JUST', 'SET', 'DOWN', 'AND', 'WENT', 'SWIFTLY', 'TO', 'HER', 'ROOM'] +5442-32873-0009-1374: hyp=['THERE', "DON'T", 'MIND', 'ME', 'SHE', 'SAID', 'SHARPLY', 'AND', 'GETTING', 'UP', 'SHE', 'LOOKED', 'DOWN', 'AT', 'HER', 'DRESS', 'AND', 'THIN', 'SHOES', 'AND', 'SEEMING', 'TO', 'RECOLLECT', 'HERSELF', 'SHE', 'TOOK', 'THE', 'CANDLE', 'HE', 'HAD', 'JUST', 'SET', 'DOWN', 'AND', 'WENT', 'SWIFTLY', 'TO', 'HER', 'ROOM'] +5442-32873-0010-1375: ref=['AND', 'SHE', 'THREW', 'BACK', 'HER', 'VEIL', 'AND', 'GOING', 'HURRIEDLY', 'TO', 'THE', 'TOILET', 'MECHANICALLY', 'SURVEYED', 'HERSELF', 'IN', 'THE', 'GLASS'] +5442-32873-0010-1375: hyp=['AND', 'SHE', 'THREW', 'BACK', 'HER', 'VEIL', 'AND', 'GOING', 'HURRIEDLY', 'TO', 'THE', 'TOILET', 'MECHANICALLY', 'SURVEYED', 'HERSELF', 'FROM', 'THE', 'GLASS'] +5442-32873-0011-1376: ref=['RACHEL', 'LAKE', 'RACHEL', 'LAKE', 'WHAT', 'ARE', 'YOU', 'NOW'] +5442-32873-0011-1376: hyp=['RICHLY', 'LATER', 'MID', 'WHAT', 'ARE', 'YOU', 'NOW'] +5442-32873-0012-1377: ref=["I'LL", 'STAY', 'HERE', 'THAT', 'IS', 'IN', 'THE', 'DRAWING', 'ROOM', 'SHE', 'ANSWERED', 'AND', 'THE', 'FACE', 'WAS', 'WITHDRAWN'] +5442-32873-0012-1377: hyp=["I'LL", 'STAY', 'HERE', 'THAT', 'IS', 'IN', 'THE', 'DRAWING', 'ROOM', 'SHE', 'ANSWERED', 'AND', 'THE', 'FACE', 'WAS', 'WITHDRAWN'] +5442-32873-0013-1378: ref=['HE', 'SLACKENED', 'HIS', 'PACE', 'AND', 'TAPPED', 'SHARPLY', 'AT', 'THE', 'LITTLE', 'WINDOW', 'OF', 'THAT', 'MODEST', 'POST', 'OFFICE', 'AT', 'WHICH', 'THE', 'YOUNG', 'LADIES', 'IN', 'THE', 'PONY', 'CARRIAGE', 'HAD', 'PULLED', 'UP', 'THE', 'DAY', 'BEFORE', 'AND', 'WITHIN', 'WHICH', 'LUKE', 'WAGGOT', 'WAS', 'WONT', 'TO', 'SLEEP', 'IN', 'A', 'SORT', 'OF', 'WOODEN', 'BOX', 'THAT', 'FOLDED', 'UP', 'AND', 'APPEARED', 'TO', 'BE', 'A', 'CHEST', 'OF', 'DRAWERS', 'ALL', 'DAY'] +5442-32873-0013-1378: hyp=['HIS', 'CLACK', 'IN', 'THE', 'SPACE', 'AND', 'TOP', 'SHARPLY', 'AT', 'THE', 'LITTLE', 'WINDOW', 'OF', 'THE', 'MODEST', 'POST', 'OFFICE', 'AT', 'WHICH', 'THE', 'YOUNG', 'LADIES', 'IN', 'THE', 'PONY', 'CARRIAGE', 'HAD', 'PULLED', 'UP', 'THE', 'DAY', 'BEFORE', 'AND', 'WITHIN', 'WHICH', 'LUKE', 'RAGGED', 'WAS', 'WONT', 'TO', 'SLEEP', 'IN', 'A', 'SORT', 'OF', 'WOODEN', 'BOX', 'THAT', 'FOLDED', 'UP', 'AND', 'APPEARED', 'TO', 'BE', 'A', 'CHEST', 'OF', 'DRAWERS', 'ALL', 'DAY'] +5442-32873-0014-1379: ref=['LUKE', 'TOOK', 'CARE', 'OF', 'MISTER', "LARKIN'S", 'DOGS', 'AND', 'GROOMED', 'MISTER', "WYLDER'S", 'HORSE', 'AND', 'CLEANED', 'UP', 'HIS', 'DOG', 'CART', 'FOR', 'MARK', 'BEING', 'CLOSE', 'ABOUT', 'MONEY', 'AND', 'FINDING', 'THAT', 'THE', 'THING', 'WAS', 'TO', 'BE', 'DONE', 'MORE', 'CHEAPLY', 'THAT', 'WAY', 'PUT', 'UP', 'HIS', 'HORSE', 'AND', 'DOG', 'CART', 'IN', 'THE', 'POST', 'OFFICE', 'PREMISES', 'AND', 'SO', 'EVADED', 'THE', 'LIVERY', 'CHARGES', 'OF', 'THE', 'BRANDON', 'ARMS'] +5442-32873-0014-1379: hyp=['LOOK', 'TOOK', 'CARE', 'OF', 'MISTER', "LARKIN'S", 'DOG', 'AND', 'GROOMED', 'MISTER', "WYLDER'S", 'HORSE', 'AND', 'CLEANED', 'UP', 'HIS', 'DOOR', 'CART', 'FOR', 'MARK', 'BEING', 'CLOSE', 'ABOUT', 'MONEY', 'AND', 'FINDING', 'THAT', 'THE', 'THING', 'WAS', 'TO', 'BE', 'DONE', 'MORE', 'CHEAPLY', 'THAT', 'WAY', 'PUT', 'UP', 'HIS', 'HORSE', 'AND', 'DORCART', 'IN', 'THE', 'POST', 'OFFICE', 'PREMISES', 'AND', 'SO', 'EVADED', 'THE', 'LIVERY', 'CHARGES', 'OF', 'THE', 'BRANDON', 'ARMS'] +5442-32873-0015-1380: ref=['BUT', 'LUKE', 'WAS', 'NOT', 'THERE', 'AND', 'CAPTAIN', 'LAKE', 'RECOLLECTING', 'HIS', 'HABITS', 'AND', 'HIS', 'HAUNT', 'HURRIED', 'ON', 'TO', 'THE', 'SILVER', 'LION', 'WHICH', 'HAS', 'ITS', 'GABLE', 'TOWARDS', 'THE', 'COMMON', 'ONLY', 'ABOUT', 'A', 'HUNDRED', 'STEPS', 'AWAY', 'FOR', 'DISTANCES', 'ARE', 'NOT', 'GREAT', 'IN', 'GYLINGDEN'] +5442-32873-0015-1380: hyp=['BUT', 'LUKE', 'WAS', 'KNOWN', 'THERE', 'AND', 'CAPTAIN', 'LAKE', 'RECOLLECTING', 'HIS', 'HABITS', 'AND', 'HIS', 'HAUNT', 'HURRIED', 'ON', 'TO', 'THE', 'SILVER', 'LION', 'WHICH', 'HAS', 'ITS', 'GABLE', 'TOWARDS', 'THE', 'COMMON', 'ONLY', 'ABOUT', 'A', 'HUNDRED', 'STEPS', 'AWAY', 'FOR', 'DISTANCES', 'ARE', 'NOT', 'GREAT', 'IN', 'GILINGDEN'] +5442-32873-0016-1381: ref=['HERE', 'WERE', 'THE', 'FLOW', 'OF', 'SOUL', 'AND', 'OF', 'STOUT', 'LONG', 'PIPES', 'LONG', 'YARNS', 'AND', 'TOLERABLY', 'LONG', 'CREDITS', 'AND', 'THE', 'HUMBLE', 'SCAPEGRACES', 'OF', 'THE', 'TOWN', 'RESORTED', 'THITHER', 'FOR', 'THE', 'PLEASURES', 'OF', 'A', 'CLUB', 'LIFE', 'AND', 'OFTEN', 'REVELLED', 'DEEP', 'INTO', 'THE', 'SMALL', 'HOURS', 'OF', 'THE', 'MORNING'] +5442-32873-0016-1381: hyp=['HERE', 'WERE', 'THE', 'FLOOR', 'OF', 'SOUL', 'UN', 'OF', 'STOUT', 'LONG', 'PIPES', 'LONG', 'YARNS', 'AND', 'TOLERABLY', 'LONG', 'CREDITS', 'AND', 'THE', 'HUMBLE', 'CAPE', 'BRACES', 'OF', 'THE', 'TOWN', 'RESORTED', 'THITHER', 'FOR', 'THE', 'PLEASURES', 'OF', 'A', 'CLUB', 'LIFE', 'AND', 'OFTEN', 'REVELLED', 'DEEP', 'INTO', 'THE', 'SMALL', 'HOURS', 'OF', 'THE', 'MORNING'] +5442-32873-0017-1382: ref=['LOSE', 'NO', 'TIME', 'AND', "I'LL", 'GIVE', 'YOU', 'HALF', 'A', 'CROWN'] +5442-32873-0017-1382: hyp=['LOSE', 'NO', 'TIME', 'BUT', "I'LL", 'GIVE', 'YOU', 'HALF', 'A', 'CROWN'] +5442-32873-0018-1383: ref=['LUKE', 'STUCK', 'ON', 'HIS', 'GREASY', 'WIDEAWAKE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'MORE', 'THE', 'DOG', 'CART', 'WAS', 'TRUNDLED', 'OUT', 'INTO', 'THE', 'LANE', 'AND', 'THE', 'HORSE', 'HARNESSED', 'WENT', 'BETWEEN', 'THE', 'SHAFTS', 'WITH', 'THAT', 'WONDERFUL', 'CHEERFULNESS', 'WITH', 'WHICH', 'THEY', 'BEAR', 'TO', 'BE', 'CALLED', 'UP', 'UNDER', 'STARTLING', 'CIRCUMSTANCES', 'AT', 'UNSEASONABLE', 'HOURS'] +5442-32873-0018-1383: hyp=['LUKE', 'STUCK', 'ON', 'HIS', 'GREASY', 'WIDE', 'AWAKE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'MORE', 'THE', 'DOOR', 'CART', 'WAS', 'TUMBLED', 'OUT', 'INTO', 'THE', 'LANE', 'AND', 'THE', 'HORSE', 'HARNESSED', 'WENT', 'BETWEEN', 'THE', 'SHAFTS', 'WITH', 'THAT', 'WONDERFUL', 'CHEERFULNESS', 'WITH', 'WHICH', 'THEIR', 'BEAR', 'TO', 'BE', 'CALLED', 'UP', 'AND', 'THE', 'STARTLING', 'CIRCUMSTANCES', 'AND', 'UNSEASONABLE', 'HOURS'] +5442-32873-0019-1384: ref=['IF', 'I', 'THOUGHT', "YOU'D", 'FAIL', 'ME', 'NOW', 'TAMAR', 'I', 'SHOULD', 'NEVER', 'COME', 'BACK', 'GOOD', 'NIGHT', 'TAMAR'] +5442-32873-0019-1384: hyp=['IF', 'I', 'THOUGHT', "YOU'D", 'FAIL', 'ME', 'NOW', 'TO', 'MORROW', 'I', 'SHOULD', 'NEVER', 'COME', 'BACK', 'GOOD', 'NIGHT', 'TO', 'MORROW'] +5442-41168-0000-1385: ref=['THE', 'ACT', 'SAID', 'THAT', 'IN', 'CASE', 'OF', 'DIFFERENCE', 'OF', 'OPINION', 'THERE', 'MUST', 'BE', 'A', 'BALLOT'] +5442-41168-0000-1385: hyp=['THE', 'ACT', 'SAID', 'THAT', 'IN', 'CASE', 'OF', 'DIFFERENCE', 'OF', 'OPINION', 'THERE', 'MUST', 'BE', 'A', 'BALLOT'] +5442-41168-0001-1386: ref=['HE', 'WENT', 'UP', 'TO', 'THE', 'TABLE', 'AND', 'STRIKING', 'IT', 'WITH', 'HIS', 'FINGER', 'RING', 'HE', 'SHOUTED', 'LOUDLY', 'A', 'BALLOT'] +5442-41168-0001-1386: hyp=['HE', 'WENT', 'UP', 'TO', 'THE', 'TABLE', 'AND', 'STRIKING', 'IT', 'WITH', 'HIS', 'FINGER', 'RING', 'HE', 'SHOUTED', 'LOUDLY', 'A', 'BALLOT'] +5442-41168-0002-1387: ref=['HE', 'WAS', 'SHOUTING', 'FOR', 'THE', 'VERY', 'COURSE', 'SERGEY', 'IVANOVITCH', 'HAD', 'PROPOSED', 'BUT', 'IT', 'WAS', 'EVIDENT', 'THAT', 'HE', 'HATED', 'HIM', 'AND', 'ALL', 'HIS', 'PARTY', 'AND', 'THIS', 'FEELING', 'OF', 'HATRED', 'SPREAD', 'THROUGH', 'THE', 'WHOLE', 'PARTY', 'AND', 'ROUSED', 'IN', 'OPPOSITION', 'TO', 'IT', 'THE', 'SAME', 'VINDICTIVENESS', 'THOUGH', 'IN', 'A', 'MORE', 'SEEMLY', 'FORM', 'ON', 'THE', 'OTHER', 'SIDE'] +5442-41168-0002-1387: hyp=['HE', 'WAS', 'SHOUTING', 'FOR', 'THE', 'VERY', 'COARSE', 'SURGY', 'IVANOVITCH', 'HAD', 'PROPOSED', 'BUT', 'IT', 'WAS', 'EVIDENT', 'THAT', 'HE', 'HATED', 'HIM', 'AND', 'ALL', 'HIS', 'PARTY', 'AND', 'THIS', 'FEELING', 'OF', 'HATRED', 'SPREAD', 'THROUGH', 'THE', 'WHOLE', 'PARTY', 'AND', 'ROUSED', 'IN', 'OPPOSITION', 'TO', 'IT', 'THE', 'SAME', 'VINDICTIVENESS', 'THOUGH', 'IN', 'A', 'MORE', 'SEEMLY', 'FORM', 'ON', 'THE', 'OTHER', 'SIDE'] +5442-41168-0003-1388: ref=['SHOUTS', 'WERE', 'RAISED', 'AND', 'FOR', 'A', 'MOMENT', 'ALL', 'WAS', 'CONFUSION', 'SO', 'THAT', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'HAD', 'TO', 'CALL', 'FOR', 'ORDER', 'A', 'BALLOT'] +5442-41168-0003-1388: hyp=['SHOUTS', 'WERE', 'RAISED', 'AND', 'FOR', 'A', 'MOMENT', 'ALL', 'WAS', 'CONFUSION', 'SO', 'THAT', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'HAD', 'TO', 'CALL', 'FOR', 'ODO', 'A', 'BALLOT'] +5442-41168-0004-1389: ref=['WE', 'SHED', 'OUR', 'BLOOD', 'FOR', 'OUR', 'COUNTRY'] +5442-41168-0004-1389: hyp=['WE', 'SHED', 'OUR', 'BLOOD', 'FOR', 'OUR', 'COUNTRY'] +5442-41168-0005-1390: ref=['THE', 'CONFIDENCE', 'OF', 'THE', 'MONARCH', 'NO', 'CHECKING', 'THE', 'ACCOUNTS', 'OF', 'THE', 'MARSHAL', "HE'S", 'NOT', 'A', 'CASHIER', 'BUT', "THAT'S", 'NOT', 'THE', 'POINT'] +5442-41168-0005-1390: hyp=['THE', 'CONFIDENCE', 'OF', 'THE', 'MONARCH', 'BUT', 'NO', 'CHECKING', 'THE', 'ACCOUNTS', 'OF', 'THE', 'MARTIAN', 'IS', 'NOT', 'A', 'CASHIER', 'BUT', "THAT'S", 'NOT', 'THE', 'POINT'] +5442-41168-0006-1391: ref=['VOTES', 'PLEASE', 'BEASTLY'] +5442-41168-0006-1391: hyp=['VAULTS', 'PLEASE', 'BEASTLY'] +5442-41168-0007-1392: ref=['THEY', 'EXPRESSED', 'THE', 'MOST', 'IMPLACABLE', 'HATRED'] +5442-41168-0007-1392: hyp=['THEY', 'EXPRESSED', 'THE', 'MOST', 'IMPLACABLE', 'HATRED'] +5442-41168-0008-1393: ref=['LEVIN', 'DID', 'NOT', 'IN', 'THE', 'LEAST', 'UNDERSTAND', 'WHAT', 'WAS', 'THE', 'MATTER', 'AND', 'HE', 'MARVELED', 'AT', 'THE', 'PASSION', 'WITH', 'WHICH', 'IT', 'WAS', 'DISPUTED', 'WHETHER', 'OR', 'NOT', 'THE', 'DECISION', 'ABOUT', 'FLEROV', 'SHOULD', 'BE', 'PUT', 'TO', 'THE', 'VOTE'] +5442-41168-0008-1393: hyp=['LEVIN', 'DID', 'NOT', 'IN', 'THE', 'LEAST', 'UNDERSTAND', 'WHAT', 'WAS', 'THE', 'MATTER', 'AND', 'HE', 'MARVELLED', 'AT', 'THE', 'PASSION', 'WITH', 'WHICH', 'IT', 'WAS', 'DISPUTED', 'WHETHER', 'OR', 'NOT', 'THE', 'DECISION', 'ABOUT', 'FLARE', 'OFF', 'SHOULD', 'BE', 'PUT', 'TO', 'THE', 'VOTE'] +5442-41168-0009-1394: ref=['HE', 'FORGOT', 'AS', 'SERGEY', 'IVANOVITCH', 'EXPLAINED', 'TO', 'HIM', 'AFTERWARDS', 'THIS', 'SYLLOGISM', 'THAT', 'IT', 'WAS', 'NECESSARY', 'FOR', 'THE', 'PUBLIC', 'GOOD', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'THAT', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'IT', 'WAS', 'NECESSARY', 'TO', 'HAVE', 'A', 'MAJORITY', 'OF', 'VOTES', 'THAT', 'TO', 'GET', 'A', 'MAJORITY', 'OF', 'VOTES', 'IT', 'WAS', 'NECESSARY', 'TO', 'SECURE', "FLEROV'S", 'RIGHT', 'TO', 'VOTE', 'THAT', 'TO', 'SECURE', 'THE', 'RECOGNITION', 'OF', "FLEROV'S", 'RIGHT', 'TO', 'VOTE', 'THEY', 'MUST', 'DECIDE', 'ON', 'THE', 'INTERPRETATION', 'TO', 'BE', 'PUT', 'ON', 'THE', 'ACT'] +5442-41168-0009-1394: hyp=['HE', 'FORGOT', 'AS', 'SO', 'GIVANOVITCH', 'EXPLAINED', 'TO', 'HIM', 'AFTERWARDS', 'THIS', 'DILIGION', 'THAT', 'IT', 'WAS', 'NECESSARY', 'FOR', 'THE', 'PUBLIC', 'GOOD', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'THAT', 'TO', 'GET', 'INTO', 'THE', 'MARTIAN', 'IT', 'WAS', 'NECESSARY', 'TO', 'HAVE', 'A', 'MAJORITY', 'OF', 'VOTES', 'THAT', 'TO', 'GET', 'A', 'MAJORITY', 'OF', 'BOATS', 'IT', 'WAS', 'NECESSARY', 'TO', 'SECURE', "FUROV'S", 'RIGHT', 'TO', 'VOTE', 'THAT', 'TO', 'SECURE', 'THE', 'RECOGNITION', 'OF', "FLORA'S", 'RIGHT', 'TO', 'VOTE', 'THEY', 'MUST', 'DECIDE', 'ON', 'THE', 'INTERPRETATION', 'TO', 'BE', 'PUT', 'ON', 'THE', 'ACT'] +5442-41168-0010-1395: ref=['BUT', 'LEVIN', 'FORGOT', 'ALL', 'THAT', 'AND', 'IT', 'WAS', 'PAINFUL', 'TO', 'HIM', 'TO', 'SEE', 'ALL', 'THESE', 'EXCELLENT', 'PERSONS', 'FOR', 'WHOM', 'HE', 'HAD', 'A', 'RESPECT', 'IN', 'SUCH', 'AN', 'UNPLEASANT', 'AND', 'VICIOUS', 'STATE', 'OF', 'EXCITEMENT'] +5442-41168-0010-1395: hyp=['BUT', 'LEVIN', 'FORGOT', 'ALL', 'THAT', 'AND', 'IT', 'WAS', 'PAINFUL', 'TO', 'HIM', 'TO', 'SEE', 'ALL', 'THESE', 'EXCELLENT', 'PERSONS', 'FOR', 'WHOM', 'HE', 'HAD', 'A', 'RESPECT', 'IN', 'SUCH', 'AN', 'UNPLEASANT', 'AND', 'VICIOUS', 'STATE', 'OF', 'EXCITEMENT'] +5442-41168-0011-1396: ref=['TO', 'ESCAPE', 'FROM', 'THIS', 'PAINFUL', 'FEELING', 'HE', 'WENT', 'AWAY', 'INTO', 'THE', 'OTHER', 'ROOM', 'WHERE', 'THERE', 'WAS', 'NOBODY', 'EXCEPT', 'THE', 'WAITERS', 'AT', 'THE', 'REFRESHMENT', 'BAR'] +5442-41168-0011-1396: hyp=['TO', 'ESCAPE', 'FROM', 'THIS', 'PAINFUL', 'FEELING', 'HE', 'WENT', 'AWAY', 'INTO', 'THE', 'OTHER', 'ROOM', 'WHERE', 'THERE', 'WAS', 'NOBODY', 'EXCEPT', 'THE', 'WAITERS', 'AT', 'THE', 'REFRESHMENT', 'BAR'] +5442-41168-0012-1397: ref=['HE', 'PARTICULARLY', 'LIKED', 'THE', 'WAY', 'ONE', 'GRAY', 'WHISKERED', 'WAITER', 'WHO', 'SHOWED', 'HIS', 'SCORN', 'FOR', 'THE', 'OTHER', 'YOUNGER', 'ONES', 'AND', 'WAS', 'JEERED', 'AT', 'BY', 'THEM', 'WAS', 'TEACHING', 'THEM', 'HOW', 'TO', 'FOLD', 'UP', 'NAPKINS', 'PROPERLY'] +5442-41168-0012-1397: hyp=['HE', 'PARTICULARLY', 'LIKED', 'THE', 'WAY', 'ONE', 'GREY', 'WHISKIRT', 'WAITER', 'WHO', 'SHOWED', 'HIS', 'CORN', 'FOR', 'THE', 'OTHER', 'YOUNGER', 'ONES', 'AND', 'WAS', 'JERED', 'AT', 'BY', 'THEM', 'WAS', 'TEACHING', 'THEM', 'HOW', 'TO', 'FOLD', 'UP', 'NAPKINS', 'PROPERLY'] +5442-41168-0013-1398: ref=['LEVIN', 'ADVANCED', 'BUT', 'UTTERLY', 'FORGETTING', 'WHAT', 'HE', 'WAS', 'TO', 'DO', 'AND', 'MUCH', 'EMBARRASSED', 'HE', 'TURNED', 'TO', 'SERGEY', 'IVANOVITCH', 'WITH', 'THE', 'QUESTION', 'WHERE', 'AM', 'I', 'TO', 'PUT', 'IT'] +5442-41168-0013-1398: hyp=['LEVIN', 'ADVANCED', 'BUT', 'UTTERLY', 'FORGETTING', 'WHAT', 'HE', 'WAS', 'TO', 'DO', 'AND', 'MUCH', 'EMBARRASSED', 'HE', 'TURNED', 'TO', 'SERGEY', 'IVANOVITCH', 'WITH', 'THE', 'QUESTION', 'WHERE', 'AM', 'I', 'TO', 'PUT', 'IT'] +5442-41168-0014-1399: ref=['SERGEY', 'IVANOVITCH', 'FROWNED'] +5442-41168-0014-1399: hyp=['SOJOURNOVITCH', 'FROWNED'] +5442-41168-0015-1400: ref=['THAT', 'IS', 'A', 'MATTER', 'FOR', 'EACH', "MAN'S", 'OWN', 'DECISION', 'HE', 'SAID', 'SEVERELY'] +5442-41168-0015-1400: hyp=['THAT', 'IS', 'A', 'MATTER', 'FOR', 'EACH', "MAN'S", 'OWN', 'DECISION', 'HE', 'SAID', 'SEVERELY'] +5442-41168-0016-1401: ref=['HAVING', 'PUT', 'IT', 'IN', 'HE', 'RECOLLECTED', 'THAT', 'HE', 'OUGHT', 'TO', 'HAVE', 'THRUST', 'HIS', 'LEFT', 'HAND', 'TOO', 'AND', 'SO', 'HE', 'THRUST', 'IT', 'IN', 'THOUGH', 'TOO', 'LATE', 'AND', 'STILL', 'MORE', 'OVERCOME', 'WITH', 'CONFUSION', 'HE', 'BEAT', 'A', 'HASTY', 'RETREAT', 'INTO', 'THE', 'BACKGROUND'] +5442-41168-0016-1401: hyp=['HAVING', 'PUT', 'IT', 'IN', 'HE', 'RECOLLECTED', 'THAT', 'HE', 'OUGHT', 'TO', 'HAVE', 'THRUST', 'HIS', 'LEFT', 'HAND', 'TOO', 'AND', 'SO', 'HE', 'THRUST', 'IT', 'THOUGH', 'TOO', 'LATE', 'AND', 'STILL', 'MORE', 'OVERCOME', 'WITH', 'CONFUSION', 'HE', 'BEAT', 'A', 'HASTY', 'RETREAT', 'INTO', 'THE', 'BACKGROUND'] +5442-41168-0017-1402: ref=['A', 'HUNDRED', 'AND', 'TWENTY', 'SIX', 'FOR', 'ADMISSION', 'NINETY', 'EIGHT', 'AGAINST'] +5442-41168-0017-1402: hyp=['A', 'HUNDRED', 'AND', 'TWENTY', 'SIX', 'FOR', 'ADMISSION', 'NINETY', 'EIGHT', 'AGAINST'] +5442-41168-0018-1403: ref=['SANG', 'OUT', 'THE', 'VOICE', 'OF', 'THE', 'SECRETARY', 'WHO', 'COULD', 'NOT', 'PRONOUNCE', 'THE', 'LETTER', 'R'] +5442-41168-0018-1403: hyp=['SANG', 'ALL', 'THE', 'VOICE', 'OF', 'THE', 'SECRETARY', 'WHO', 'COULD', 'NOT', 'PRONOUNCE', 'THE', 'LETTER', 'R'] +5442-41168-0019-1404: ref=['THEN', 'THERE', 'WAS', 'A', 'LAUGH', 'A', 'BUTTON', 'AND', 'TWO', 'NUTS', 'WERE', 'FOUND', 'IN', 'THE', 'BOX'] +5442-41168-0019-1404: hyp=['THEN', 'THERE', 'WAS', 'A', 'LAUGH', 'OF', 'BOTTOM', 'AND', 'TWO', 'KNOTS', 'WERE', 'FOUND', 'IN', 'THE', 'BOX'] +5442-41168-0020-1405: ref=['BUT', 'THE', 'OLD', 'PARTY', 'DID', 'NOT', 'CONSIDER', 'THEMSELVES', 'CONQUERED'] +5442-41168-0020-1405: hyp=['BUT', 'THE', 'OLD', 'PARTY', 'DID', 'NOT', 'CONSIDER', 'THEMSELVES', 'CONQUERED'] +5442-41168-0021-1406: ref=['IN', 'REPLY', 'SNETKOV', 'SPOKE', 'OF', 'THE', 'TRUST', 'THE', 'NOBLEMEN', 'OF', 'THE', 'PROVINCE', 'HAD', 'PLACED', 'IN', 'HIM', 'THE', 'AFFECTION', 'THEY', 'HAD', 'SHOWN', 'HIM', 'WHICH', 'HE', 'DID', 'NOT', 'DESERVE', 'AS', 'HIS', 'ONLY', 'MERIT', 'HAD', 'BEEN', 'HIS', 'ATTACHMENT', 'TO', 'THE', 'NOBILITY', 'TO', 'WHOM', 'HE', 'HAD', 'DEVOTED', 'TWELVE', 'YEARS', 'OF', 'SERVICE'] +5442-41168-0021-1406: hyp=['INTERPLIES', 'NED', 'COUGH', 'SPOKE', 'OF', 'THE', 'TRUST', 'AND', 'NOBLEMEN', 'OF', 'THE', 'PROVINCE', 'HAD', 'PLACED', 'ON', 'HIM', 'THE', 'EFFECT', 'ON', 'THEY', 'HAD', 'SHOWN', 'HIM', 'WHICH', 'HE', 'DID', 'NOT', 'DESERVE', 'AS', 'HIS', 'ONLY', 'MERIT', 'HAD', 'BEEN', 'HIS', 'ATTACHMENT', 'TO', 'THE', 'NOBILITY', 'TO', 'WHOM', 'HE', 'HAD', 'DEVOTED', 'TWELVE', 'YEARS', 'OF', 'SERVICE'] +5442-41168-0022-1407: ref=['THIS', 'EXPRESSION', 'IN', 'THE', "MARSHAL'S", 'FACE', 'WAS', 'PARTICULARLY', 'TOUCHING', 'TO', 'LEVIN', 'BECAUSE', 'ONLY', 'THE', 'DAY', 'BEFORE', 'HE', 'HAD', 'BEEN', 'AT', 'HIS', 'HOUSE', 'ABOUT', 'HIS', 'TRUSTEE', 'BUSINESS', 'AND', 'HAD', 'SEEN', 'HIM', 'IN', 'ALL', 'HIS', 'GRANDEUR', 'A', 'KIND', 'HEARTED', 'FATHERLY', 'MAN'] +5442-41168-0022-1407: hyp=['THIS', 'EXPRESSION', 'IN', 'THE', "MARSHAL'S", 'FACE', 'WAS', 'PARTICULARLY', 'TOUCHING', 'TO', 'LEVIN', 'BECAUSE', 'ONLY', 'THE', 'DAY', 'FOR', 'HE', 'HAD', 'BEEN', 'AT', 'HIS', 'HOUSE', 'ABOUT', 'HIS', 'TRUSTY', 'BUSINESS', 'AND', 'HAD', 'SEEN', 'HIM', 'IN', 'ALL', 'HIS', 'GRANDEUR', 'A', 'KIND', 'HEARTED', 'FATHERLY', 'MAN'] +5442-41168-0023-1408: ref=['IF', 'THERE', 'ARE', 'MEN', 'YOUNGER', 'AND', 'MORE', 'DESERVING', 'THAN', 'I', 'LET', 'THEM', 'SERVE'] +5442-41168-0023-1408: hyp=['IF', 'THERE', 'ARE', 'MEN', 'YOUNGER', 'AND', 'MORE', 'DESERVING', 'THAN', 'I', 'LET', 'THEM', 'SERVE'] +5442-41168-0024-1409: ref=['AND', 'THE', 'MARSHAL', 'DISAPPEARED', 'THROUGH', 'A', 'SIDE', 'DOOR'] +5442-41168-0024-1409: hyp=['AND', 'THE', 'MARSHAL', 'DISAPPEARED', 'THROUGH', 'A', 'SIDE', 'DOOR'] +5442-41168-0025-1410: ref=['THEY', 'WERE', 'TO', 'PROCEED', 'IMMEDIATELY', 'TO', 'THE', 'ELECTION'] +5442-41168-0025-1410: hyp=['THERE', 'WERE', 'TO', 'PROCEED', 'IMMEDIATELY', 'TO', 'THE', 'ELECTION'] +5442-41168-0026-1411: ref=['TWO', 'NOBLE', 'GENTLEMEN', 'WHO', 'HAD', 'A', 'WEAKNESS', 'FOR', 'STRONG', 'DRINK', 'HAD', 'BEEN', 'MADE', 'DRUNK', 'BY', 'THE', 'PARTISANS', 'OF', 'SNETKOV', 'AND', 'A', 'THIRD', 'HAD', 'BEEN', 'ROBBED', 'OF', 'HIS', 'UNIFORM'] +5442-41168-0026-1411: hyp=['DO', 'NOBLE', 'GENTLEMEN', 'WHO', 'HAD', 'A', 'WEAKNESS', 'WAS', 'STRONG', 'DRINK', 'HAD', 'BEEN', 'MADE', 'DRUNK', 'BY', 'THE', 'PARTISANS', 'OF', 'SNETKOV', 'AND', 'THE', 'THIRD', 'HAD', 'BEEN', 'ROBBED', 'OF', 'HIS', 'UNIFORM'] +5442-41168-0027-1412: ref=['ON', 'LEARNING', 'THIS', 'THE', 'NEW', 'PARTY', 'HAD', 'MADE', 'HASTE', 'DURING', 'THE', 'DISPUTE', 'ABOUT', 'FLEROV', 'TO', 'SEND', 'SOME', 'OF', 'THEIR', 'MEN', 'IN', 'A', 'SLEDGE', 'TO', 'CLOTHE', 'THE', 'STRIPPED', 'GENTLEMAN', 'AND', 'TO', 'BRING', 'ALONG', 'ONE', 'OF', 'THE', 'INTOXICATED', 'TO', 'THE', 'MEETING'] +5442-41168-0027-1412: hyp=['ON', 'LEARNING', 'THIS', 'THE', 'NEW', 'PARTY', 'HAD', 'MADE', 'HASTE', 'DURING', 'THE', 'DISPUTE', 'ABOUT', 'FLAREFF', 'TO', 'SEND', 'SOME', 'OF', 'THEIR', 'MEN', 'IN', 'A', 'SLEDGE', 'TO', 'CLOTHE', 'THE', 'STRIPPED', 'GENTLEMAN', 'AND', 'TO', 'BRING', 'ALONG', 'ONE', 'OF', 'THE', 'INTOXICATED', 'TO', 'THE', 'MEETING'] +5442-41169-0000-1413: ref=['LEVIN', 'DID', 'NOT', 'CARE', 'TO', 'EAT', 'AND', 'HE', 'WAS', 'NOT', 'SMOKING', 'HE', 'DID', 'NOT', 'WANT', 'TO', 'JOIN', 'HIS', 'OWN', 'FRIENDS', 'THAT', 'IS', 'SERGEY', 'IVANOVITCH', 'STEPAN', 'ARKADYEVITCH', 'SVIAZHSKY', 'AND', 'THE', 'REST', 'BECAUSE', 'VRONSKY', 'IN', 'HIS', "EQUERRY'S", 'UNIFORM', 'WAS', 'STANDING', 'WITH', 'THEM', 'IN', 'EAGER', 'CONVERSATION'] +5442-41169-0000-1413: hyp=['LEVIN', 'DID', 'NOT', 'CARE', 'TO', 'EAT', 'AND', 'HE', 'WAS', 'NOT', 'SMOKING', 'HE', 'DID', 'NOT', 'WANT', 'TO', 'JOIN', 'HIS', 'OWN', 'FRIENDS', 'THAT', 'IS', 'SO', 'SHE', 'IVANOVITCH', 'STEPAN', 'ARKADYEVITCH', 'SVIAZHSKY', 'AND', 'THE', 'REST', 'BECAUSE', 'VRONSKY', 'IN', 'AN', 'EQUERRIES', 'UNIFORM', 'WAS', 'STANDING', 'WITH', 'THEM', 'IN', 'EAGER', 'CONVERSATION'] +5442-41169-0001-1414: ref=['HE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'SAT', 'DOWN', 'SCANNING', 'THE', 'GROUPS', 'AND', 'LISTENING', 'TO', 'WHAT', 'WAS', 'BEING', 'SAID', 'AROUND', 'HIM'] +5442-41169-0001-1414: hyp=['HE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'SAT', 'DOWN', 'SCANNING', 'THE', 'GROUPS', 'AND', 'LISTENING', 'TO', 'WHAT', 'WAS', 'BEING', 'SAID', 'AROUND', 'HIM'] +5442-41169-0002-1415: ref=["HE'S", 'SUCH', 'A', 'BLACKGUARD'] +5442-41169-0002-1415: hyp=["HE'S", 'SUCH', 'A', 'BLACKGUARD'] +5442-41169-0003-1416: ref=['I', 'HAVE', 'TOLD', 'HIM', 'SO', 'BUT', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'ONLY', 'THINK', 'OF', 'IT'] +5442-41169-0003-1416: hyp=['I', 'HAVE', 'TOLD', 'HIM', 'SO', 'BUT', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'ONLY', 'THINK', 'OF', 'IT'] +5442-41169-0004-1417: ref=['THESE', 'PERSONS', 'WERE', 'UNMISTAKABLY', 'SEEKING', 'A', 'PLACE', 'WHERE', 'THEY', 'COULD', 'TALK', 'WITHOUT', 'BEING', 'OVERHEARD'] +5442-41169-0004-1417: hyp=['THESE', 'PERSONS', 'WERE', 'UNMISTAKABLY', 'SEEKING', 'A', 'PLACE', 'WHERE', 'THEY', 'COULD', 'TALK', 'WITHOUT', 'BEING', 'OVERHEARD'] +5442-41169-0005-1418: ref=['SHALL', 'WE', 'GO', 'ON', 'YOUR', 'EXCELLENCY', 'FINE', 'CHAMPAGNE'] +5442-41169-0005-1418: hyp=['SHALL', 'WE', 'GO', 'ON', 'YOUR', 'EXCELLENCY', 'FINE', 'CHAMPAGNE'] +5442-41169-0006-1419: ref=['LAST', 'YEAR', 'AT', 'OUR', 'DISTRICT', 'MARSHAL', 'NIKOLAY', "IVANOVITCH'S"] +5442-41169-0006-1419: hyp=['MUST', 'YOU', 'ARE', 'A', 'DISTRICT', 'MARTIAL', 'NICCLAY', "IVANOVITCH'S"] +5442-41169-0007-1420: ref=['OH', 'STILL', 'JUST', 'THE', 'SAME', 'ALWAYS', 'AT', 'A', 'LOSS', 'THE', 'LANDOWNER', 'ANSWERED', 'WITH', 'A', 'RESIGNED', 'SMILE', 'BUT', 'WITH', 'AN', 'EXPRESSION', 'OF', 'SERENITY', 'AND', 'CONVICTION', 'THAT', 'SO', 'IT', 'MUST', 'BE'] +5442-41169-0007-1420: hyp=['OH', 'STILL', 'JUST', 'THE', 'SAME', 'ALWAYS', 'AT', 'A', 'LOSS', 'THE', 'LANDOWNER', 'ANSWERED', 'WITH', 'A', 'RESIGNED', 'SMILE', 'BUT', 'WITH', 'AN', 'EXPRESSION', 'OF', 'SERENITY', 'AND', 'CONVICTION', 'THAT', 'SO', 'IT', 'MUST', 'BE'] +5442-41169-0008-1421: ref=['WHY', 'WHAT', 'IS', 'THERE', 'TO', 'UNDERSTAND'] +5442-41169-0008-1421: hyp=['WHY', 'WHAT', 'IS', 'THAT', 'TO', 'UNDERSTAND'] +5442-41169-0009-1422: ref=["THERE'S", 'NO', 'MEANING', 'IN', 'IT', 'AT', 'ALL'] +5442-41169-0009-1422: hyp=['THERE', 'IS', 'NO', 'MEANING', 'IN', 'IT', 'AT', 'ALL'] +5442-41169-0010-1423: ref=['THEN', 'TOO', 'ONE', 'MUST', 'KEEP', 'UP', 'CONNECTIONS'] +5442-41169-0010-1423: hyp=['THEN', 'TOO', 'ONE', 'MUST', 'KEEP', 'UP', 'CONNECTIONS'] +5442-41169-0011-1424: ref=["IT'S", 'A', 'MORAL', 'OBLIGATION', 'OF', 'A', 'SORT'] +5442-41169-0011-1424: hyp=["IT'S", 'A', 'MORAL', 'OBLIGATION', 'OF', 'A', 'SORT'] +5442-41169-0012-1425: ref=['AND', 'THEN', 'TO', 'TELL', 'THE', 'TRUTH', "THERE'S", "ONE'S", 'OWN', 'INTERESTS'] +5442-41169-0012-1425: hyp=['AND', 'THEN', 'TO', 'TELL', 'THE', 'TRUTH', "THERE'S", "ONE'S", 'OWN', 'INTEREST'] +5442-41169-0013-1426: ref=["THEY'RE", 'PROPRIETORS', 'OF', 'A', 'SORT', 'BUT', "WE'RE", 'THE', 'LANDOWNERS'] +5442-41169-0013-1426: hyp=['THEIR', 'PROPRIETORS', 'OF', 'ASSAULT', 'BUT', 'WE', 'ARE', 'THE', 'LANDOWNERS'] +5442-41169-0014-1427: ref=['THAT', 'IT', 'MAY', 'BE', 'BUT', 'STILL', 'IT', 'OUGHT', 'TO', 'BE', 'TREATED', 'A', 'LITTLE', 'MORE', 'RESPECTFULLY'] +5442-41169-0014-1427: hyp=['THAT', 'IT', 'MAY', 'BE', 'BUT', 'STILL', 'IT', 'OUGHT', 'TO', 'BE', 'TREATED', 'A', 'LITTLE', 'MORE', 'RESPECTFULLY'] +5442-41169-0015-1428: ref=['IF', "WE'RE", 'LAYING', 'OUT', 'A', 'GARDEN', 'PLANNING', 'ONE', 'BEFORE', 'THE', 'HOUSE', 'YOU', 'KNOW', 'AND', 'THERE', "YOU'VE", 'A', 'TREE', "THAT'S", 'STOOD', 'FOR', 'CENTURIES', 'IN', 'THE', 'VERY', 'SPOT', 'OLD', 'AND', 'GNARLED', 'IT', 'MAY', 'BE', 'AND', 'YET', 'YOU', "DON'T", 'CUT', 'DOWN', 'THE', 'OLD', 'FELLOW', 'TO', 'MAKE', 'ROOM', 'FOR', 'THE', 'FLOWERBEDS', 'BUT', 'LAY', 'OUT', 'YOUR', 'BEDS', 'SO', 'AS', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'THE', 'TREE'] +5442-41169-0015-1428: hyp=['IF', 'WE', 'ARE', 'LAYING', 'OUT', 'A', 'GARDEN', 'CLANNING', 'ONE', 'BEFORE', 'THE', 'HOUSE', 'YOU', 'KNOW', 'AND', 'THERE', 'YOU', 'HAVE', 'A', 'TREE', 'THAT', 'STOOD', 'IN', 'CENTURIES', 'IN', 'THE', 'VERY', 'SPOT', 'OLD', 'AND', 'GNARLED', 'IT', 'MAY', 'BE', 'AND', 'YET', 'YOU', "DON'T", 'CUT', 'DOWN', 'THE', 'OLD', 'FELLOW', 'TO', 'MAKE', 'ROOM', 'FOR', 'THE', 'FLOWER', 'BEDS', 'BUT', 'LAY', 'OUT', 'YOUR', 'BEDS', 'SO', 'AS', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'THE', 'TREE'] +5442-41169-0016-1429: ref=['WELL', 'AND', 'HOW', 'IS', 'YOUR', 'LAND', 'DOING'] +5442-41169-0016-1429: hyp=['WELL', 'AND', 'HOW', 'IS', 'YOUR', 'LAND', 'DOING'] +5442-41169-0017-1430: ref=['BUT', "ONE'S", 'WORK', 'IS', 'THROWN', 'IN', 'FOR', 'NOTHING'] +5442-41169-0017-1430: hyp=['BUT', "ONE'S", 'WORK', 'IS', 'THROWN', 'IN', 'FOR', 'NOTHING'] +5442-41169-0018-1431: ref=['OH', 'WELL', 'ONE', 'DOES', 'IT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +5442-41169-0018-1431: hyp=['OH', 'WELL', 'ONE', 'DOES', 'IT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +5442-41169-0019-1432: ref=['AND', "WHAT'S", 'MORE', 'THE', 'LANDOWNER', 'WENT', 'ON', 'LEANING', 'HIS', 'ELBOWS', 'ON', 'THE', 'WINDOW', 'AND', 'CHATTING', 'ON', 'MY', 'SON', 'I', 'MUST', 'TELL', 'YOU', 'HAS', 'NO', 'TASTE', 'FOR', 'IT'] +5442-41169-0019-1432: hyp=['AND', 'ONCE', 'MORE', 'THE', 'LANDOWNER', 'WENT', 'ON', 'LEANING', 'HIS', 'ELBOWS', 'ON', 'THE', 'WINDOW', 'AND', 'CHATTING', 'ON', 'MY', 'SON', 'I', 'MUST', 'TELL', 'YOU', 'HAS', 'NO', 'TASTE', 'FOR', 'IT'] +5442-41169-0020-1433: ref=['SO', "THERE'LL", 'BE', 'NO', 'ONE', 'TO', 'KEEP', 'IT', 'UP', 'AND', 'YET', 'ONE', 'DOES', 'IT'] +5442-41169-0020-1433: hyp=['SO', "THERE'LL", 'BE', 'NO', 'ONE', 'TO', 'KEEP', 'IT', 'UP', 'AND', 'YET', 'ONE', 'DOES', 'IT'] +5442-41169-0021-1434: ref=['WE', 'WALKED', 'ABOUT', 'THE', 'FIELDS', 'AND', 'THE', 'GARDEN', 'NO', 'SAID', 'HE', 'STEPAN', 'VASSILIEVITCH', "EVERYTHING'S", 'WELL', 'LOOKED', 'AFTER', 'BUT', 'YOUR', "GARDEN'S", 'NEGLECTED'] +5442-41169-0021-1434: hyp=['WE', 'WALKED', 'ABOUT', 'THE', 'FIELDS', 'AND', 'THE', 'GARDEN', 'NO', 'SAID', 'HE', 'STEPAN', 'MISSILEVITCH', "EVERYTHING'S", 'WELL', 'LOOKED', 'AFTER', 'BUT', 'YOUR', 'GARDENS', 'NEGLECTED'] +5442-41169-0022-1435: ref=['TO', 'MY', 'THINKING', "I'D", 'CUT', 'DOWN', 'THAT', 'LIME', 'TREE'] +5442-41169-0022-1435: hyp=['TO', 'MY', 'THINKING', "I'D", 'GOT', 'DOWN', 'THE', 'LINE', 'TREE'] +5442-41169-0023-1436: ref=['HERE', "YOU'VE", 'THOUSANDS', 'OF', 'LIMES', 'AND', 'EACH', 'WOULD', 'MAKE', 'TWO', 'GOOD', 'BUNDLES', 'OF', 'BARK'] +5442-41169-0023-1436: hyp=['HERE', 'YOU', 'THOUSANDS', 'OF', 'LIMES', 'AND', 'EACH', 'WOULD', 'MAKE', 'TWO', 'GOOD', 'BUNDLES', 'OF', 'BARK'] +5442-41169-0024-1437: ref=["YOU'RE", 'MARRIED', "I'VE", 'HEARD', 'SAID', 'THE', 'LANDOWNER'] +5442-41169-0024-1437: hyp=["YOU'RE", 'MARRIED', 'I', 'HEARD', 'SAID', 'THE', 'LANDOWNER'] +5442-41169-0025-1438: ref=['YES', "IT'S", 'RATHER', 'STRANGE', 'HE', 'WENT', 'ON'] +5442-41169-0025-1438: hyp=['YES', 'AND', 'JARDA', 'STRANGE', 'HE', 'WENT', 'ON'] +5442-41169-0026-1439: ref=['THE', 'LANDOWNER', 'CHUCKLED', 'UNDER', 'HIS', 'WHITE', 'MUSTACHES'] +5442-41169-0026-1439: hyp=['THE', 'LANDOWNER', 'CHUCKLED', 'UNDER', 'HIS', 'WHITE', 'MOUSTACHES'] +5442-41169-0027-1440: ref=['WHY', "DON'T", 'WE', 'CUT', 'DOWN', 'OUR', 'PARKS', 'FOR', 'TIMBER'] +5442-41169-0027-1440: hyp=['WHY', "DON'T", 'WE', 'GO', 'DOWN', 'OUR', 'BOX', 'FOR', 'TIMBER'] +5442-41169-0028-1441: ref=['SAID', 'LEVIN', 'RETURNING', 'TO', 'A', 'THOUGHT', 'THAT', 'HAD', 'STRUCK', 'HIM'] +5442-41169-0028-1441: hyp=['SAID', 'LEVIN', 'RETURNING', 'TO', 'A', 'THOUGHT', 'THAT', 'HAD', 'STRUCK', 'HIM'] +5442-41169-0029-1442: ref=["THERE'S", 'A', 'CLASS', 'INSTINCT', 'TOO', 'OF', 'WHAT', 'ONE', 'OUGHT', 'AND', "OUGHTN'T", 'TO', 'DO'] +5442-41169-0029-1442: hyp=["THERE'S", 'THE', 'CLASS', 'INSTINCT', 'TOO', 'OF', 'WHAT', 'ONE', 'OUGHT', 'AND', 'OUGHT', 'NOT', 'KNOWN', 'TO', 'DO'] +5442-41169-0030-1443: ref=["THERE'S", 'THE', 'PEASANTS', 'TOO', 'I', 'WONDER', 'AT', 'THEM', 'SOMETIMES', 'ANY', 'GOOD', 'PEASANT', 'TRIES', 'TO', 'TAKE', 'ALL', 'THE', 'LAND', 'HE', 'CAN'] +5442-41169-0030-1443: hyp=["THERE'S", 'THE', 'PEASANTS', 'TOO', 'I', 'WONDER', 'AT', 'THEM', 'SOMETIMES', 'ANY', 'GOOD', 'PEASANT', 'TRIES', 'TO', 'TAKE', 'ALL', 'THE', 'LAND', 'HE', 'CAN'] +5442-41169-0031-1444: ref=['WITHOUT', 'A', 'RETURN', 'TOO', 'AT', 'A', 'SIMPLE', 'LOSS'] +5442-41169-0031-1444: hyp=['WITHOUT', 'A', 'RETURN', 'TOO', 'ADD', 'A', 'SIMPLE', 'LOSS'] +5484-24317-0000-571: ref=['WHEN', 'HE', 'CAME', 'FROM', 'THE', 'BATH', 'PROCLUS', 'VISITED', 'HIM', 'AGAIN'] +5484-24317-0000-571: hyp=['WHEN', 'HE', 'CAME', 'FROM', 'THE', 'BATH', 'PROCLASS', 'VISITED', 'HIM', 'AGAIN'] +5484-24317-0001-572: ref=['BUT', 'HERMON', 'WAS', 'NOT', 'IN', 'THE', 'MOOD', 'TO', 'SHARE', 'A', 'JOYOUS', 'REVEL', 'AND', 'HE', 'FRANKLY', 'SAID', 'SO', 'ALTHOUGH', 'IMMEDIATELY', 'AFTER', 'HIS', 'RETURN', 'HE', 'HAD', 'ACCEPTED', 'THE', 'INVITATION', 'TO', 'THE', 'FESTIVAL', 'WHICH', 'THE', 'WHOLE', 'FELLOWSHIP', 'OF', 'ARTISTS', 'WOULD', 'GIVE', 'THE', 'FOLLOWING', 'DAY', 'IN', 'HONOUR', 'OF', 'THE', 'SEVENTIETH', 'BIRTHDAY', 'OF', 'THE', 'OLD', 'SCULPTOR', 'EUPHRANOR'] +5484-24317-0001-572: hyp=['BUT', 'HARMON', 'WAS', 'NOT', 'IN', 'THE', 'MOOD', 'TO', 'SHARE', 'A', 'JOYOUS', 'REVEL', 'AND', 'HE', 'FRANKLY', 'SAID', 'SO', 'ALTHOUGH', 'IMMEDIATELY', 'AFTER', 'HIS', 'RETURN', 'HE', 'HAD', 'ACCEPTED', 'THE', 'INVITATION', 'TO', 'THE', 'FESTIVAL', 'WHICH', 'THE', 'WHOLE', 'FELLOWSHIP', 'OF', 'ARTISTS', 'WOULD', 'GIVE', 'THE', 'FOLLOWING', 'DAY', 'AN', 'HONOR', 'OF', 'THE', 'SEVENTEENTH', 'BIRTHDAY', 'OF', 'THE', 'OLD', 'SCULPTOR', 'EUPHRANER'] +5484-24317-0002-573: ref=['SHE', 'WOULD', 'APPEAR', 'HERSELF', 'AT', 'DESSERT', 'AND', 'THE', 'BANQUET', 'MUST', 'THEREFORE', 'BEGIN', 'AT', 'AN', 'UNUSUALLY', 'EARLY', 'HOUR'] +5484-24317-0002-573: hyp=['SHE', 'WOULD', 'APPEAR', 'HERSELF', 'AT', 'DESSERT', 'AND', 'THE', 'BANQUET', 'MUST', 'THEREFORE', 'BEGIN', 'AT', 'AN', 'UNUSUALLY', 'EARLY', 'HOUR'] +5484-24317-0003-574: ref=['SO', 'THE', 'ARTIST', 'FOUND', 'HIMSELF', 'OBLIGED', 'TO', 'RELINQUISH', 'HIS', 'OPPOSITION'] +5484-24317-0003-574: hyp=['SO', 'THE', 'ARTIST', 'FOUND', 'HIMSELF', 'OBLIGED', 'TO', 'RELINQUISH', 'HIS', 'OPPOSITION'] +5484-24317-0004-575: ref=['THE', 'BANQUET', 'WAS', 'TO', 'BEGIN', 'IN', 'A', 'FEW', 'HOURS', 'YET', 'HE', 'COULD', 'NOT', 'LET', 'THE', 'DAY', 'PASS', 'WITHOUT', 'SEEING', 'DAPHNE', 'AND', 'TELLING', 'HER', 'THE', 'WORDS', 'OF', 'THE', 'ORACLE'] +5484-24317-0004-575: hyp=['THE', 'BANQUET', 'WAS', 'TO', 'BEGIN', 'IN', 'A', 'FEW', 'HOURS', 'YET', 'HE', 'COULD', 'NOT', 'LET', 'THE', 'DAY', 'PASS', 'WITHOUT', 'SEEING', 'DAPHNE', 'AND', 'TELLING', 'HER', 'THE', 'WORDS', 'OF', 'THE', 'ORACLE'] +5484-24317-0005-576: ref=['HE', 'LONGED', 'WITH', 'ARDENT', 'YEARNING', 'FOR', 'THE', 'SOUND', 'OF', 'HER', 'VOICE', 'AND', 'STILL', 'MORE', 'TO', 'UNBURDEN', 'HIS', 'SORELY', 'TROUBLED', 'SOUL', 'TO', 'HER'] +5484-24317-0005-576: hyp=['HE', 'LONGED', 'WITH', 'ARDENT', 'YEARNING', 'FOR', 'THE', 'SOUND', 'OF', 'HER', 'VOICE', 'AND', 'STILL', 'MORE', 'TO', 'UNBURDEN', 'HIS', 'SORELY', 'TROUBLED', 'SOUL', 'TO', 'HER'] +5484-24317-0006-577: ref=['SINCE', 'HIS', 'RETURN', 'FROM', 'THE', 'ORACLE', 'THE', 'FEAR', 'THAT', 'THE', 'RESCUED', 'DEMETER', 'MIGHT', 'YET', 'BE', 'THE', 'WORK', 'OF', 'MYRTILUS', 'HAD', 'AGAIN', 'MASTERED', 'HIM'] +5484-24317-0006-577: hyp=['SINCE', 'HIS', 'RETURN', 'FROM', 'THE', 'ORACLE', 'THE', 'FEAR', 'THAT', 'THE', 'RESCUE', 'DEMETER', 'MIGHT', 'YET', 'BE', 'THE', 'WORK', 'OF', 'MERTOLUS', 'HAD', 'AGAIN', 'MASTERED', 'HIM'] +5484-24317-0007-578: ref=['THE', 'APPROVAL', 'AS', 'WELL', 'AS', 'THE', 'DOUBTS', 'WHICH', 'IT', 'AROUSED', 'IN', 'OTHERS', 'STRENGTHENED', 'HIS', 'OPINION', 'ALTHOUGH', 'EVEN', 'NOW', 'HE', 'COULD', 'NOT', 'SUCCEED', 'IN', 'BRINGING', 'IT', 'INTO', 'HARMONY', 'WITH', 'THE', 'FACTS'] +5484-24317-0007-578: hyp=['THE', 'APPROVAL', 'AS', 'WELL', 'AS', 'THE', 'DOUBT', 'WHICH', 'IT', 'ARISED', 'IN', 'OTHERS', 'STRENGTHENED', 'HIS', 'OPINION', 'ALTHOUGH', 'EVEN', 'NOW', 'HE', 'COULD', 'NOT', 'SUCCEED', 'IN', 'BRINGING', 'IT', 'INTO', 'HARMONY', 'WITH', 'THE', 'FACTS'] +5484-24317-0008-579: ref=['THEN', 'HE', 'WENT', 'DIRECTLY', 'TO', 'THE', 'NEIGHBOURING', 'PALACE', 'THE', 'QUEEN', 'MIGHT', 'HAVE', 'APPEARED', 'ALREADY', 'AND', 'IT', 'WOULD', 'NOT', 'DO', 'TO', 'KEEP', 'HER', 'WAITING'] +5484-24317-0008-579: hyp=['THEN', 'HE', 'WENT', 'DIRECTLY', 'TO', 'THE', 'NEIGHBORING', 'PALACE', 'THE', 'QUEEN', 'MIGHT', 'HAVE', 'APPEARED', 'ALREADY', 'AND', 'IT', 'WOULD', 'NOT', 'DO', 'TO', 'KEEP', 'HER', 'WAITING'] +5484-24317-0009-580: ref=['HITHERTO', 'THE', 'MERCHANT', 'HAD', 'BEEN', 'INDUCED', 'IT', 'IS', 'TRUE', 'TO', 'ADVANCE', 'LARGE', 'SUMS', 'OF', 'MONEY', 'TO', 'THE', 'QUEEN', 'BUT', 'THE', 'LOYAL', 'DEVOTION', 'WHICH', 'HE', 'SHOWED', 'TO', 'HER', 'ROYAL', 'HUSBAND', 'HAD', 'RENDERED', 'IT', 'IMPOSSIBLE', 'TO', 'GIVE', 'HIM', 'EVEN', 'A', 'HINT', 'OF', 'THE', 'CONSPIRACY'] +5484-24317-0009-580: hyp=['HITHERTO', 'THE', 'MERCHANT', 'HAD', 'BEEN', 'INDUCED', 'IT', 'IS', 'TRUE', 'TO', 'ADVANCE', 'LARGE', 'SUMS', 'OF', 'MONEY', 'TO', 'THE', 'QUEEN', 'BUT', 'THE', 'LOYAL', 'DEVOTION', 'WHICH', 'HE', 'SHOWED', 'TO', 'HER', 'ROYAL', 'HUSBAND', 'HAD', 'RENDERED', 'IT', 'IMPOSSIBLE', 'TO', 'GIVE', 'HIM', 'EVEN', 'A', 'HINT', 'OF', 'THE', 'CONSPIRACY'] +5484-24317-0010-581: ref=['WHEN', 'HERMON', 'ENTERED', 'THE', 'RESIDENCE', 'OF', 'THE', 'GRAMMATEUS', 'IN', 'THE', 'PALACE', 'THE', 'GUESTS', 'HAD', 'ALREADY', 'ASSEMBLED'] +5484-24317-0010-581: hyp=['WHEN', 'HERMAN', 'ANSWERED', 'THE', 'RESIDENCE', 'OF', 'THE', 'GRAMMATIUS', 'IN', 'THE', 'PALACE', 'THE', 'GUESTS', 'HAD', 'ALREADY', 'ASSEMBLED'] +5484-24317-0011-582: ref=['THE', 'PLACE', 'BY', "HERMON'S", 'SIDE', 'WHICH', 'ALTHEA', 'HAD', 'CHOSEN', 'FOR', 'HERSELF', 'WOULD', 'THEN', 'BE', 'GIVEN', 'UP', 'TO', 'ARSINOE'] +5484-24317-0011-582: hyp=['THEY', 'PLACED', 'BY', "HAHMON'S", 'SIDE', 'WHICH', 'ALTHIE', 'HAD', 'CHOSEN', 'FOR', 'HERSELF', 'WOULD', 'THEN', 'BE', 'GIVEN', 'UP', 'TO', 'ARSENO'] +5484-24317-0012-583: ref=['TRUE', 'AN', 'INTERESTING', 'CONVERSATION', 'STILL', 'HAD', 'POWER', 'TO', 'CHARM', 'HIM', 'BUT', 'OFTEN', 'DURING', 'ITS', 'CONTINUANCE', 'THE', 'FULL', 'CONSCIOUSNESS', 'OF', 'HIS', 'MISFORTUNE', 'FORCED', 'ITSELF', 'UPON', 'HIS', 'MIND', 'FOR', 'THE', 'MAJORITY', 'OF', 'THE', 'SUBJECTS', 'DISCUSSED', 'BY', 'THE', 'ARTISTS', 'CAME', 'TO', 'THEM', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'SIGHT', 'AND', 'REFERRED', 'TO', 'NEW', 'CREATIONS', 'OF', 'ARCHITECTURE', 'SCULPTURE', 'AND', 'PAINTING', 'FROM', 'WHOSE', 'ENJOYMENT', 'HIS', 'BLINDNESS', 'DEBARRED', 'HIM'] +5484-24317-0012-583: hyp=['TRUE', 'AN', 'INTERESTING', 'CONVERSATION', 'STILL', 'HAD', 'POWER', 'TO', 'CHARM', 'HIM', 'BUT', 'OFTEN', 'DURING', 'ITS', 'CONTINUANCE', 'THE', 'FULL', 'CONSCIOUSNESS', 'OF', 'HIS', 'MISFORTUNE', 'FORCED', 'ITSELF', 'UPON', 'HIS', 'MIND', 'FOR', 'THE', 'MAJORITY', 'OF', 'THE', 'SUBJECTS', 'DISCUSSED', 'BY', 'THE', 'ARTISTS', 'CAME', 'TO', 'THEM', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'SIGHT', 'AND', 'REFERRED', 'TO', 'NEW', 'CREATIONS', 'OF', 'ARCHITECTURE', 'SCULPTURE', 'AND', 'PAINTING', 'FROM', 'WHOSE', 'ENJOYMENT', 'IS', 'BLINDNESS', 'DEBARED', 'HIM'] +5484-24317-0013-584: ref=['A', 'STRANGER', 'OUT', 'OF', 'HIS', 'OWN', 'SPHERE', 'HE', 'FELT', 'CHILLED', 'AMONG', 'THESE', 'CLOSELY', 'UNITED', 'MEN', 'AND', 'WOMEN', 'TO', 'WHOM', 'NO', 'TIE', 'BOUND', 'HIM', 'SAVE', 'THE', 'PRESENCE', 'OF', 'THE', 'SAME', 'HOST'] +5484-24317-0013-584: hyp=['A', 'STRANGER', 'OUT', 'OF', 'HIS', 'OWN', 'SPHERE', 'HE', 'FELL', 'CHILLED', 'AMONG', 'THESE', 'CLOSELY', 'UNITED', 'MEN', 'AND', 'WOMEN', 'TO', 'WHOM', 'NO', 'TYPE', 'BOUND', 'HIM', 'SAVE', 'THE', 'PRESENCE', 'OF', 'THE', 'SAME', 'HOST'] +5484-24317-0014-585: ref=['CRATES', 'HAD', 'REALLY', 'BEEN', 'INVITED', 'IN', 'ORDER', 'TO', 'WIN', 'HIM', 'OVER', 'TO', 'THE', "QUEEN'S", 'CAUSE', 'BUT', 'CHARMING', 'FAIR', 'HAIRED', 'NICO', 'HAD', 'BEEN', 'COMMISSIONED', 'BY', 'THE', 'CONSPIRATORS', 'TO', 'PERSUADE', 'HIM', 'TO', 'SING', "ARSINOE'S", 'PRAISES', 'AMONG', 'HIS', 'PROFESSIONAL', 'ASSOCIATES'] +5484-24317-0014-585: hyp=['CREEDS', 'HAD', 'REALLY', 'BEEN', 'INVITED', 'IN', 'ORDER', 'TO', 'WIN', 'HIM', 'OVER', 'TO', 'THE', "QUEEN'S", 'CAUSE', 'BUT', 'CHARMING', 'FAIR', 'HAIRED', 'NIGO', 'HAD', 'BEEN', 'COMMISSIONED', 'BY', 'THE', 'CONSPIRATORS', 'TO', 'PERSUADE', 'HIM', 'TO', 'SING', "ARSENO'S", 'PRAISES', 'AMONG', 'HIS', 'PROFESSIONAL', 'ASSOCIATES'] +5484-24317-0015-586: ref=['HIS', 'SON', 'HAD', 'BEEN', 'THIS', 'ROYAL', "DAME'S", 'FIRST', 'HUSBAND', 'AND', 'SHE', 'HAD', 'DESERTED', 'HIM', 'TO', 'MARRY', 'LYSIMACHUS', 'THE', 'AGED', 'KING', 'OF', 'THRACE'] +5484-24317-0015-586: hyp=['HIS', 'SON', 'HAD', 'BEEN', 'THE', 'ROYAL', "JAMES'S", 'FIRST', 'HUSBAND', 'AND', 'SHE', 'HAD', 'DESERTED', 'HIM', 'TO', 'MARRY', 'LISMACHUS', 'THE', 'AGED', 'KING', 'OF', 'THRACE'] +5484-24317-0016-587: ref=['THE', "KING'S", 'SISTER', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'CRIED', 'HERMON', 'INCREDULOUSLY'] +5484-24317-0016-587: hyp=['THE', "KING'S", 'SISTER', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'CRIED', 'HARMON', 'INCREDULOUSLY'] +5484-24317-0017-588: ref=['WE', 'WOMEN', 'ARE', 'ONLY', 'AS', 'OLD', 'AS', 'WE', 'LOOK', 'AND', 'THE', 'LEECHES', 'AND', 'TIRING', 'WOMEN', 'OF', 'THIS', 'BEAUTY', 'OF', 'FORTY', 'PRACTISE', 'ARTS', 'WHICH', 'GIVE', 'HER', 'THE', 'APPEARANCE', 'OF', 'TWENTY', 'FIVE', 'YET', 'PERHAPS', 'THE', 'KING', 'VALUES', 'HER', 'INTELLECT', 'MORE', 'THAN', 'HER', 'PERSON', 'AND', 'THE', 'WISDOM', 'OF', 'A', 'HUNDRED', 'SERPENTS', 'IS', 'CERTAINLY', 'UNITED', 'IN', 'THIS', "WOMAN'S", 'HEAD'] +5484-24317-0017-588: hyp=['WE', 'WOMEN', 'ARE', 'ONLY', 'AS', 'OLD', 'AS', 'WE', 'LOOK', 'AND', 'THE', 'LEECHES', 'ENTIRE', 'WOMAN', 'OF', 'THIS', 'BEAUTY', 'OF', 'FORTY', 'PRACTISE', 'ARTS', 'WHICH', 'GIVE', 'HER', 'THE', 'APPEARANCE', 'OF', 'TWENTY', 'FIVE', 'YET', 'PERHAPS', 'THE', 'KING', 'VALUES', 'HER', 'INTELLECT', 'MORE', 'THAN', 'HER', 'PERSON', 'AND', 'THE', 'WISDOM', 'OF', 'A', 'HUNDRED', 'SERPENTS', 'IS', 'CERTAINLY', 'UNITED', 'IN', 'THIS', "WOMAN'S", 'HEAD'] +5484-24317-0018-589: ref=['THE', 'THREE', 'MOST', 'TRUSTWORTHY', 'ONES', 'ARE', 'HERE', 'AMYNTAS', 'THE', 'LEECH', 'CHRYSIPPUS', 'AND', 'THE', 'ADMIRABLE', 'PROCLUS'] +5484-24317-0018-589: hyp=['THE', 'THREE', 'MOST', 'TRUSTWORTHY', 'ONES', 'I', 'HEAR', 'I', 'MEANTIS', 'THE', 'LEECH', 'CHRYSIPPUS', 'IN', 'THE', 'ADMIRABLE', 'PROCLASS'] +5484-24317-0019-590: ref=['LET', 'US', 'HOPE', 'THAT', 'YOU', 'WILL', 'MAKE', 'THIS', 'THREE', 'LEAVED', 'CLOVER', 'THE', 'LUCK', 'PROMISING', 'FOUR', 'LEAVED', 'ONE'] +5484-24317-0019-590: hyp=['LET', 'US', 'HOPE', 'THAT', 'YOU', 'WILL', 'MAKE', 'THIS', 'THREE', 'LEAVED', 'CLOVER', 'THE', 'LUCK', 'PROMISING', 'FOLLY', 'TO', 'ONE'] +5484-24317-0020-591: ref=['YOUR', 'UNCLE', 'TOO', 'HAS', 'OFTEN', 'WITH', 'PRAISEWORTHY', 'GENEROSITY', 'HELPED', 'ARSINOE', 'IN', 'MANY', 'AN', 'EMBARRASSMENT'] +5484-24317-0020-591: hyp=['YOUR', 'UNCLE', 'TOO', 'HAS', 'OFTEN', 'WITH', 'PRAISED', 'WORTHY', 'GENEROSITY', 'HELPED', 'AUTON', 'IN', 'MANY', 'EMBARRASSMENT'] +5484-24317-0021-592: ref=['HOW', 'LONG', 'HE', 'KEPT', 'YOU', 'WAITING', 'FOR', 'THE', 'FIRST', 'WORD', 'CONCERNING', 'A', 'WORK', 'WHICH', 'JUSTLY', 'TRANSPORTED', 'THE', 'WHOLE', 'CITY', 'WITH', 'DELIGHT'] +5484-24317-0021-592: hyp=['HOW', 'LONG', 'HE', 'KEPT', 'YOU', 'WAITING', 'FROM', 'THE', 'FIRST', 'WORD', 'CONCERNING', 'A', 'WORK', 'WHICH', 'JUSTLY', 'TRANSPORTED', 'THE', 'WHOLE', 'CITY', 'WITH', 'DELIGHT'] +5484-24317-0022-593: ref=['WHEN', 'HE', 'DID', 'FINALLY', 'SUMMON', 'YOU', 'HE', 'SAID', 'THINGS', 'WHICH', 'MUST', 'HAVE', 'WOUNDED', 'YOU'] +5484-24317-0022-593: hyp=['WHEN', 'HE', 'DID', 'FINALLY', 'SUMMON', 'YOU', 'HE', 'SAID', 'THINGS', 'WHICH', 'MUST', 'HAVE', 'WOUNDED', 'YOU'] +5484-24317-0023-594: ref=['THAT', 'IS', 'GOING', 'TOO', 'FAR', 'REPLIED', 'HERMON'] +5484-24317-0023-594: hyp=['THAT', 'IS', 'GOING', 'TOO', 'FAR', 'REPLIED', 'HARMON'] +5484-24317-0024-595: ref=['HE', 'WINKED', 'AT', 'HER', 'AND', 'MADE', 'A', 'SIGNIFICANT', 'GESTURE', 'AS', 'HE', 'SPOKE', 'AND', 'THEN', 'INFORMED', 'THE', 'BLIND', 'ARTIST', 'HOW', 'GRACIOUSLY', 'ARSINOE', 'HAD', 'REMEMBERED', 'HIM', 'WHEN', 'SHE', 'HEARD', 'OF', 'THE', 'REMEDY', 'BY', 'WHOSE', 'AID', 'MANY', 'A', 'WONDERFUL', 'CURE', 'OF', 'BLIND', 'EYES', 'HAD', 'BEEN', 'MADE', 'IN', 'RHODES'] +5484-24317-0024-595: hyp=['HE', 'WINKED', 'AT', 'HER', 'AND', 'MADE', 'A', 'SIGNIFICANT', 'GESTURE', 'AS', 'HE', 'SPOKE', 'AND', 'THEN', 'INFORMED', 'THE', 'BLIND', 'ARTIST', 'HOW', 'GRACIOUSLY', 'ARSENO', 'HAD', 'REMEMBERED', 'HIM', 'WHEN', 'SHE', 'HEARD', 'OF', 'THE', 'REMEDY', 'BY', 'WHOSE', 'AID', 'MANY', 'A', 'WONDERFUL', 'CURE', 'OF', 'BLIND', 'EYE', 'HAD', 'BEEN', 'MADE', 'IN', 'ROADS'] +5484-24317-0025-596: ref=['THE', 'ROYAL', 'LADY', 'HAD', 'INQUIRED', 'ABOUT', 'HIM', 'AND', 'HIS', 'SUFFERINGS', 'WITH', 'ALMOST', 'SISTERLY', 'INTEREST', 'AND', 'ALTHEA', 'EAGERLY', 'CONFIRMED', 'THE', 'STATEMENT'] +5484-24317-0025-596: hyp=['THE', 'ROYAL', 'LADY', 'HAD', 'INQUIRED', 'ABOUT', 'HIM', 'AND', 'HIS', 'SUFFERINGS', 'WITH', 'ALMOST', 'SISTERLY', 'INTEREST', 'AND', 'ALTHEA', 'EAGERLY', 'CONFIRMED', 'THE', 'STATEMENT'] +5484-24317-0026-597: ref=['HERMON', 'LISTENED', 'TO', 'THE', 'PAIR', 'IN', 'SILENCE'] +5484-24317-0026-597: hyp=['HERMAN', 'LISTENED', 'TO', 'THE', 'PARENT', 'SILENCE'] +5484-24317-0027-598: ref=['THE', 'RHODIAN', 'WAS', 'JUST', 'BEGINNING', 'TO', 'PRAISE', 'ARSINOE', 'ALSO', 'AS', 'A', 'SPECIAL', 'FRIEND', 'AND', 'CONNOISSEUR', 'OF', 'THE', "SCULPTOR'S", 'ART', 'WHEN', 'CRATES', "HERMON'S", 'FELLOW', 'STUDENT', 'ASKED', 'THE', 'BLIND', 'ARTIST', 'IN', 'BEHALF', 'OF', 'HIS', 'BEAUTIFUL', 'COMPANION', 'WHY', 'HIS', 'DEMETER', 'WAS', 'PLACED', 'UPON', 'A', 'PEDESTAL', 'WHICH', 'TO', 'OTHERS', 'AS', 'WELL', 'AS', 'HIMSELF', 'SEEMED', 'TOO', 'HIGH', 'FOR', 'THE', 'SIZE', 'OF', 'THE', 'STATUE'] +5484-24317-0027-598: hyp=['THE', 'RADIAN', 'WAS', 'JUST', 'BEGINNING', 'TO', 'PRAISE', 'ARSENAL', 'ALSO', 'AS', 'A', 'SPECIAL', 'FRIEND', 'AND', 'CONNOISSEUR', 'OF', 'THE', "SCULPTOR'S", 'ART', 'WHEN', 'CRATES', "HERMANN'S", 'FELLOW', 'STUDENT', 'ASK', 'THE', 'BLIND', 'ARTIST', 'IN', 'BEHALF', 'OF', 'HIS', 'BEAUTIFUL', 'COMPANION', 'WHY', 'HIS', 'DEMETER', 'WAS', 'PLACED', 'UPON', 'A', 'PEDESTAL', 'WITCH', 'TO', 'OTHERS', 'AS', 'WELL', 'AS', 'HIMSELF', 'SEEMED', 'TOO', 'HIGH', 'FOR', 'THE', 'SIZE', 'OF', 'THE', 'STATUE'] +5484-24317-0028-599: ref=['YET', 'WHAT', 'MATTERED', 'IT', 'EVEN', 'IF', 'THESE', 'MISERABLE', 'PEOPLE', 'CONSIDERED', 'THEMSELVES', 'DECEIVED', 'AND', 'POINTED', 'THE', 'FINGER', 'OF', 'SCORN', 'AT', 'HIM'] +5484-24317-0028-599: hyp=['YET', 'WHAT', 'MATTERED', 'IT', 'EVEN', 'IF', 'THESE', 'MISERABLE', 'PEOPLE', 'CONSIDERED', 'THEMSELVES', 'DECEIVED', 'AND', 'POINTED', 'THE', 'FINGER', 'OF', 'SCORN', 'AT', 'HIM'] +5484-24317-0029-600: ref=['A', 'WOMAN', 'WHO', 'YEARNS', 'FOR', 'THE', 'REGARD', 'OF', 'ALL', 'MEN', 'AND', 'MAKES', 'LOVE', 'A', 'TOY', 'EASILY', 'LESSENS', 'THE', 'DEMANDS', 'SHE', 'IMPOSES', 'UPON', 'INDIVIDUALS'] +5484-24317-0029-600: hyp=['A', 'WOMAN', 'WHO', 'YEARNS', 'FOR', 'THE', 'REGARD', 'OF', 'ALL', 'MEN', 'AND', 'MAKES', 'LOVE', 'A', 'TOY', 'EASILY', 'LESSENS', 'THE', 'DEMANDS', 'SHE', 'IMPOSES', 'UPON', 'INDIVIDUALS'] +5484-24317-0030-601: ref=['ONLY', 'EVEN', 'THOUGH', 'LOVE', 'HAS', 'WHOLLY', 'DISAPPEARED', 'SHE', 'STILL', 'CLAIMS', 'CONSIDERATION', 'AND', 'ALTHEA', 'DID', 'NOT', 'WISH', 'TO', 'LOSE', "HERMON'S", 'REGARD'] +5484-24317-0030-601: hyp=['ONLY', 'EVEN', 'THOUGH', 'LOVE', 'HAS', 'WHOLLY', 'DISAPPEARED', 'SHE', 'STILL', 'CLAIMS', 'CONSIDERATION', 'AND', 'ALTHIA', 'DID', 'NOT', 'WISH', 'TO', 'LOSE', "HERMANN'S", 'REGARD'] +5484-24317-0031-602: ref=['HOW', 'INDIFFERENT', 'YOU', 'LOOK', 'BUT', 'I', 'TELL', 'YOU', 'HER', 'DEEP', 'BLUE', 'EYES', 'FLASHED', 'AS', 'SHE', 'SPOKE', 'THAT', 'SO', 'LONG', 'AS', 'YOU', 'WERE', 'STILL', 'A', 'GENUINE', 'CREATING', 'ARTIST', 'THE', 'CASE', 'WAS', 'DIFFERENT'] +5484-24317-0031-602: hyp=['HOW', 'INDIFFERENT', 'YOU', 'LOOK', 'BUT', 'I', 'TELL', 'YOU', 'HER', 'DEEP', 'BLUE', 'EYES', 'FLASHED', 'AS', 'SHE', 'SPOKE', 'THAT', 'SO', 'LONG', 'AS', 'YOU', 'WAS', 'STILL', 'A', 'GENUINE', 'CREATING', 'ARTIST', 'THE', 'CASE', 'WAS', 'DIFFERENT'] +5484-24317-0032-603: ref=['THOUGH', 'SO', 'LOUD', 'A', 'DENIAL', 'IS', 'WRITTEN', 'ON', 'YOUR', 'FACE', 'I', 'PERSIST', 'IN', 'MY', 'CONVICTION', 'AND', 'THAT', 'NO', 'IDLE', 'DELUSION', 'ENSNARES', 'ME', 'I', 'CAN', 'PROVE'] +5484-24317-0032-603: hyp=['THOUGH', 'SO', 'LOUD', 'A', 'DENIAL', 'IS', 'WRITTEN', 'ON', 'YOUR', 'FACE', 'I', 'PERSIST', 'IN', 'MY', 'CONVICTION', 'AND', 'THAT', 'NO', 'IDLE', 'DELUSION', 'AND', 'SNATHS', 'ME', 'I', 'CAN', 'PROVE'] +5484-24317-0033-604: ref=['IT', 'WAS', 'NAY', 'IT', 'COULD', 'HAVE', 'BEEN', 'NOTHING', 'ELSE', 'THAT', 'VERY', 'SPIDER'] +5484-24317-0033-604: hyp=['IT', 'WAS', 'NAY', 'IT', 'COULD', 'HAVE', 'BEEN', 'NOTHING', 'ELSE', 'THAT', 'VERY', 'SPIDER'] +5484-24318-0000-605: ref=['NOT', 'A', 'SOUND', 'IF', 'YOU', 'VALUE', 'YOUR', 'LIVES'] +5484-24318-0000-605: hyp=['NOT', 'A', 'SOUND', 'IF', 'YOU', 'VALUE', 'YOUR', 'LIVES'] +5484-24318-0001-606: ref=['TO', 'OFFER', 'RESISTANCE', 'WOULD', 'HAVE', 'BEEN', 'MADNESS', 'FOR', 'EVEN', 'HERMON', 'PERCEIVED', 'BY', 'THE', 'LOUD', 'CLANKING', 'OF', 'WEAPONS', 'AROUND', 'THEM', 'THE', 'GREATLY', 'SUPERIOR', 'POWER', 'OF', 'THE', 'ENEMY', 'AND', 'THEY', 'WERE', 'ACTING', 'BY', 'THE', 'ORDERS', 'OF', 'THE', 'KING', 'TO', 'THE', 'PRISON', 'NEAR', 'THE', 'PLACE', 'OF', 'EXECUTION'] +5484-24318-0001-606: hyp=['TO', 'OFFER', 'RESISTANCE', 'WOULD', 'HAVE', 'BEEN', 'MADNESS', 'FOR', 'EVEN', 'HERMANN', 'PERCEIVED', 'BY', 'THE', 'LOUD', 'CLANKING', 'OF', 'WEAPONS', 'AROUND', 'THEM', 'THEY', 'GREATLY', 'SUPERIOR', 'POWER', 'OF', 'THE', 'ENEMY', 'AND', 'THEY', 'WERE', 'ACTING', 'BY', 'THE', 'ORDERS', 'OF', 'THE', 'KING', 'TO', 'THE', 'PRISON', 'NEAR', 'THE', 'PLACE', 'OF', 'EXECUTION'] +5484-24318-0002-607: ref=['WAS', 'HE', 'TO', 'BE', 'LED', 'TO', 'THE', "EXECUTIONER'S", 'BLOCK'] +5484-24318-0002-607: hyp=['WAS', 'HE', 'TO', 'BE', 'LED', 'TO', 'THE', "EXECUTIONER'S", 'BLOCK'] +5484-24318-0003-608: ref=['WHAT', 'PLEASURE', 'HAD', 'LIFE', 'TO', 'OFFER', 'HIM', 'THE', 'BLIND', 'MAN', 'WHO', 'WAS', 'ALREADY', 'DEAD', 'TO', 'HIS', 'ART'] +5484-24318-0003-608: hyp=['WHAT', 'PLEASURE', 'HAD', 'LIFE', 'TO', 'OFFER', 'HIM', 'THE', 'BLIND', 'MAN', 'WHO', 'WAS', 'ALREADY', 'DEAD', 'TO', 'HIS', 'ART'] +5484-24318-0004-609: ref=['OUGHT', 'HE', 'NOT', 'TO', 'GREET', 'THIS', 'SUDDEN', 'END', 'AS', 'A', 'BOON', 'FROM', 'THE', 'IMMORTALS'] +5484-24318-0004-609: hyp=['OUGHT', 'HE', 'NOT', 'TO', 'GREET', 'HIS', 'SUDDEN', 'END', 'AS', 'A', 'BOOM', 'FROM', 'THE', 'IMMORTALS'] +5484-24318-0005-610: ref=['DID', 'IT', 'NOT', 'SPARE', 'HIM', 'A', 'HUMILIATION', 'AS', 'GREAT', 'AND', 'PAINFUL', 'AS', 'COULD', 'BE', 'IMAGINED'] +5484-24318-0005-610: hyp=['DID', 'IT', 'NOT', 'SPARE', 'HIM', 'A', 'HUMILIATION', 'AS', 'GREAT', 'AND', 'PAINFUL', 'AS', 'COULD', 'BE', 'IMAGINED'] +5484-24318-0006-611: ref=['WHATEVER', 'MIGHT', 'AWAIT', 'HIM', 'HE', 'DESIRED', 'NO', 'BETTER', 'FATE'] +5484-24318-0006-611: hyp=['WHATEVER', 'MIGHT', 'AWAIT', 'HIM', 'HE', 'DESIRED', 'NO', 'BETTER', 'FATE'] +5484-24318-0007-612: ref=['IF', 'HE', 'HAD', 'PASSED', 'INTO', 'ANNIHILATION', 'HE', 'HERMON', 'WISHED', 'TO', 'FOLLOW', 'HIM', 'THITHER', 'AND', 'ANNIHILATION', 'CERTAINLY', 'MEANT', 'REDEMPTION', 'FROM', 'PAIN', 'AND', 'MISERY'] +5484-24318-0007-612: hyp=['IF', 'HE', 'HAD', 'PASSED', 'INTO', 'ANNIHILATION', 'HE', 'HERMAN', 'WISHED', 'TO', 'FOLLOW', 'HIM', 'THITHER', 'AND', 'ANNIHILATION', 'CERTAINLY', 'MEANT', 'REDEMPTION', 'FROM', 'PAIN', 'AND', 'MISERY'] +5484-24318-0008-613: ref=['BUT', 'IF', 'HE', 'WERE', 'DESTINED', 'TO', 'MEET', 'HIS', 'MYRTILUS', 'AND', 'HIS', 'MOTHER', 'IN', 'THE', 'WORLD', 'BEYOND', 'THE', 'GRAVE', 'WHAT', 'HAD', 'HE', 'NOT', 'TO', 'TELL', 'THEM', 'HOW', 'SURE', 'HE', 'WAS', 'OF', 'FINDING', 'A', 'JOYFUL', 'RECEPTION', 'THERE', 'FROM', 'BOTH'] +5484-24318-0008-613: hyp=['BUT', 'IF', 'HE', 'WERE', 'DESTINED', 'TO', 'MEET', 'HIS', 'BURTLES', 'AND', 'HIS', 'MOTHER', 'IN', 'THE', 'WORLD', 'BEYOND', 'THE', 'GRAVE', 'WHAT', 'HAD', 'HE', 'NOT', 'TO', 'TELL', 'THEM', 'HOW', 'SURE', 'HE', 'WAS', 'A', 'FINDING', 'A', 'JOYFUL', 'RECEPTION', 'THERE', 'FROM', 'BOTH'] +5484-24318-0009-614: ref=['THE', 'POWER', 'WHICH', 'DELIVERED', 'HIM', 'OVER', 'TO', 'DEATH', 'JUST', 'AT', 'THAT', 'MOMENT', 'WAS', 'NOT', 'NEMESIS', 'NO', 'IT', 'WAS', 'A', 'KINDLY', 'DEITY'] +5484-24318-0009-614: hyp=['THE', 'POWER', 'WHICH', 'DELIVERED', 'HIM', 'OVER', 'TO', 'DEATH', 'JUST', 'AT', 'THAT', 'MOMENT', 'WAS', 'NOT', 'NEMESIS', 'NO', 'IT', 'WAS', 'A', 'KINDLY', 'DEITY'] +5484-24318-0010-615: ref=['YET', 'IT', 'WAS', 'NO', 'ILLUSION', 'THAT', 'DECEIVED', 'HIM'] +5484-24318-0010-615: hyp=['YET', 'IT', 'WAS', 'NO', 'ILLUSION', 'THAT', 'DECEIVED', 'HIM'] +5484-24318-0011-616: ref=['AGAIN', 'HE', 'HEARD', 'THE', 'BELOVED', 'VOICE', 'AND', 'THIS', 'TIME', 'IT', 'ADDRESSED', 'NOT', 'ONLY', 'HIM', 'BUT', 'WITH', 'THE', 'UTMOST', 'HASTE', 'THE', 'COMMANDER', 'OF', 'THE', 'SOLDIERS'] +5484-24318-0011-616: hyp=['AGAIN', 'HE', 'HEARD', 'THE', 'BELOVED', 'VOICE', 'AND', 'THIS', 'TIME', 'IT', 'ADDRESSED', 'NOT', 'ONLY', 'HIM', 'BUT', 'WITH', 'THE', 'UTMOST', 'HASTE', 'THE', 'COMMANDER', 'OF', 'THE', 'SOLDIERS'] +5484-24318-0012-617: ref=['SOMETIMES', 'WITH', 'TOUCHING', 'ENTREATY', 'SOMETIMES', 'WITH', 'IMPERIOUS', 'COMMAND', 'SHE', 'PROTESTED', 'AFTER', 'GIVING', 'HIM', 'HER', 'NAME', 'THAT', 'THIS', 'MATTER', 'COULD', 'BE', 'NOTHING', 'BUT', 'AN', 'UNFORTUNATE', 'MISTAKE'] +5484-24318-0012-617: hyp=['SOMETIMES', 'WITH', 'TOUCHING', 'ENTREATY', 'SOMETIMES', 'WITH', 'IMPERIOUS', 'COMMAND', 'SHE', 'PROTESTED', 'AFTER', 'GIVING', 'HIM', 'HER', 'NAME', 'THAT', 'THIS', 'MATTER', 'COULD', 'BE', 'NOTHING', 'BUT', 'AN', 'UNFORTUNATE', 'MISTAKE'] +5484-24318-0013-618: ref=['LASTLY', 'WITH', 'EARNEST', 'WARMTH', 'SHE', 'BESOUGHT', 'HIM', 'BEFORE', 'TAKING', 'THE', 'PRISONERS', 'AWAY', 'TO', 'PERMIT', 'HER', 'TO', 'SPEAK', 'TO', 'THE', 'COMMANDING', 'GENERAL', 'PHILIPPUS', 'HER', "FATHER'S", 'GUEST', 'WHO', 'SHE', 'WAS', 'CERTAIN', 'WAS', 'IN', 'THE', 'PALACE'] +5484-24318-0013-618: hyp=['LASTLY', 'WITH', 'EARNEST', 'WARMTH', 'SHE', 'BESOUGHT', 'HIM', 'BEFORE', 'TAKING', 'THE', 'PRISONERS', 'AWAY', 'TO', 'PERMIT', 'HER', 'TO', 'SPEAK', 'TO', 'THE', 'COMMANDING', 'GENERAL', 'PHILIPPUS', 'HER', "FATHER'S", 'GUEST', 'WHO', 'SHE', 'WAS', 'CERTAIN', 'WAS', 'IN', 'THE', 'PALACE'] +5484-24318-0014-619: ref=['CRIED', 'HERMON', 'IN', 'GRATEFUL', 'AGITATION', 'BUT', 'SHE', 'WOULD', 'NOT', 'LISTEN', 'TO', 'HIM', 'AND', 'FOLLOWED', 'THE', 'SOLDIER', 'WHOM', 'THE', 'CAPTAIN', 'DETAILED', 'TO', 'GUIDE', 'HER', 'INTO', 'THE', 'PALACE'] +5484-24318-0014-619: hyp=['CRIED', 'HERMANN', 'IN', 'GRATEFUL', 'AGITATION', 'BUT', 'SHE', 'WOULD', 'NOT', 'LISTEN', 'TO', 'HIM', 'AND', 'FOLLOW', 'THE', 'SOLDIER', 'WHOM', 'THE', 'CAPTAIN', 'DETAILED', 'TO', 'GUIDE', 'HER', 'INTO', 'THE', 'PALACE'] +5484-24318-0015-620: ref=['TO', 'MORROW', 'YOU', 'SHALL', 'CONFESS', 'TO', 'ME', 'WHO', 'TREACHEROUSLY', 'DIRECTED', 'YOU', 'TO', 'THIS', 'DANGEROUS', 'PATH'] +5484-24318-0015-620: hyp=['TO', 'MORROW', 'YOU', 'SHALL', 'CONFESS', 'TO', 'ME', 'WHO', 'TREACHEROUSLY', 'DIRECTED', 'YOU', 'TO', 'THIS', 'DANGEROUS', 'PATH'] +5484-24318-0016-621: ref=['DAPHNE', 'AGAIN', 'PLEADED', 'FOR', 'THE', 'LIBERATION', 'OF', 'THE', 'PRISONERS', 'BUT', 'PHILIPPUS', 'SILENCED', 'HER', 'WITH', 'THE', 'GRAVE', 'EXCLAMATION', 'THE', 'ORDER', 'OF', 'THE', 'KING'] +5484-24318-0016-621: hyp=['DAPHNE', 'AGAIN', 'PLEADED', 'FOR', 'THE', 'LIBERATION', 'OF', 'THE', 'PRISONERS', 'BUT', 'PHILIP', 'WAS', 'SILENCED', 'HER', 'WITH', 'A', 'GRAVE', 'EXCLAMATION', 'THE', 'ORDER', 'OF', 'THE', 'KING'] +5484-24318-0017-622: ref=['AS', 'SOON', 'AS', 'THE', 'CAPTIVE', 'ARTIST', 'WAS', 'ALONE', 'WITH', 'THE', 'WOMAN', 'HE', 'LOVED', 'HE', 'CLASPED', 'HER', 'HAND', 'POURING', 'FORTH', 'INCOHERENT', 'WORDS', 'OF', 'THE', 'MOST', 'ARDENT', 'GRATITUDE', 'AND', 'WHEN', 'HE', 'FELT', 'HER', 'WARMLY', 'RETURN', 'THE', 'PRESSURE', 'HE', 'COULD', 'NOT', 'RESTRAIN', 'THE', 'DESIRE', 'TO', 'CLASP', 'HER', 'TO', 'HIS', 'HEART'] +5484-24318-0017-622: hyp=['AS', 'SOON', 'AS', 'THE', 'CAPTIVE', 'ARTIST', 'WAS', 'ALONE', 'WITH', 'THE', 'WOMAN', 'HE', 'LOVED', 'HE', 'CLASPED', 'HER', 'HAND', 'POURING', 'FORTH', 'INCOHERENT', 'WORDS', 'OF', 'THE', 'MOST', 'ARDENT', 'GRATITUDE', 'AND', 'WHEN', 'HE', 'FELT', 'HER', 'WARMLY', 'RETURNED', 'THE', 'PRESSURE', 'HE', 'COULD', 'NOT', 'RESTRAIN', 'THE', 'DESIRE', 'TO', 'CLASP', 'HER', 'TO', 'HIS', 'HEART'] +5484-24318-0018-623: ref=['IN', 'SPITE', 'OF', 'HIS', 'DEEP', 'MENTAL', 'DISTRESS', 'HE', 'COULD', 'HAVE', 'SHOUTED', 'ALOUD', 'IN', 'HIS', 'DELIGHT', 'AND', 'GRATITUDE'] +5484-24318-0018-623: hyp=['IN', 'SPITE', 'OF', 'HIS', 'DEEP', 'MANTLE', 'DISTRESS', 'HE', 'COULD', 'HAVE', 'SHOUTED', 'ALOUD', 'IN', 'HIS', 'DELIGHT', 'AND', 'GRATITUDE'] +5484-24318-0019-624: ref=['HE', 'MIGHT', 'NOW', 'HAVE', 'BEEN', 'PERMITTED', 'TO', 'BIND', 'FOREVER', 'TO', 'HIS', 'LIFE', 'THE', 'WOMAN', 'WHO', 'HAD', 'JUST', 'RESCUED', 'HIM', 'FROM', 'THE', 'GREATEST', 'DANGER', 'BUT', 'THE', 'CONFESSION', 'HE', 'MUST', 'MAKE', 'TO', 'HIS', 'FELLOW', 'ARTISTS', 'IN', 'THE', 'PALAESTRA', 'THE', 'FOLLOWING', 'MORNING', 'STILL', 'SEALED', 'HIS', 'LIPS', 'YET', 'IN', 'THIS', 'HOUR', 'HE', 'FELT', 'THAT', 'HE', 'WAS', 'UNITED', 'TO', 'HER', 'AND', 'OUGHT', 'NOT', 'TO', 'CONCEAL', 'WHAT', 'AWAITED', 'HIM', 'SO', 'OBEYING', 'A', 'STRONG', 'IMPULSE', 'HE', 'EXCLAIMED', 'YOU', 'KNOW', 'THAT', 'I', 'LOVE', 'YOU'] +5484-24318-0019-624: hyp=['HE', 'MIGHT', 'NOW', 'HAVE', 'BEEN', 'PERMITTED', 'TO', 'BIND', 'FOREVER', 'TO', 'HIS', 'LIFE', 'THE', 'WOMAN', 'WHO', 'HAD', 'JUST', 'RESCUED', 'HIM', 'FROM', 'THE', 'GREATEST', 'DANGER', 'BUT', 'THE', 'CONFESSION', 'HE', 'MUST', 'MAKE', 'TO', 'HIS', 'FELLOW', 'ARTISTS', 'IN', 'THE', 'PELLESTRA', 'THE', 'FOLLOWING', 'MORNING', 'STILL', 'SEALED', 'HIS', 'LIPS', 'YET', 'IN', 'THIS', 'HOUR', 'HE', 'FELT', 'THAT', 'HE', 'WAS', 'UNITED', 'TO', 'HER', 'AND', 'OUGHT', 'NOT', 'TO', 'CONCEAL', 'WHAT', 'AWAITED', 'HIM', 'SO', 'OBEYING', 'A', 'STRONG', 'IMPULSE', 'HE', 'EXCLAIMED', 'YOU', 'KNOW', 'THAT', 'I', 'LOVE', 'YOU'] +5484-24318-0020-625: ref=['I', 'LOVE', 'YOU', 'AND', 'HAVE', 'LOVED', 'YOU', 'ALWAYS'] +5484-24318-0020-625: hyp=['I', 'LOVE', 'YOU', 'AND', 'HAVE', 'LOVED', 'YOU', 'ALWAYS'] +5484-24318-0021-626: ref=['DAPHNE', 'EXCLAIMED', 'TENDERLY', 'WHAT', 'MORE', 'IS', 'NEEDED'] +5484-24318-0021-626: hyp=['JAPHANE', 'EXCLAIMED', 'TENDERLY', 'WHAT', 'MORE', 'IS', 'NEEDED'] +5484-24318-0022-627: ref=['BUT', 'HERMON', 'WITH', 'DROOPING', 'HEAD', 'MURMURED', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'WHAT', 'I', 'AM', 'NOW'] +5484-24318-0022-627: hyp=['BUT', 'HARMON', 'WITH', 'DROOPING', 'HEAD', 'MURMURED', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'WHAT', 'I', 'AM', 'NOW'] +5484-24318-0023-628: ref=['THEN', 'DAPHNE', 'RAISED', 'HER', 'FACE', 'TO', 'HIS', 'ASKING', 'SO', 'THE', 'DEMETER', 'IS', 'THE', 'WORK', 'OF', 'MYRTILUS'] +5484-24318-0023-628: hyp=['THEN', 'JAPANE', 'RAISED', 'HER', 'FACE', 'TO', 'HIS', 'ASKING', 'SO', 'THE', 'DEMEANOR', 'IS', 'THE', 'WORK', 'OF', 'MYRTALIS'] +5484-24318-0024-629: ref=['WHAT', 'A', 'TERRIBLE', 'ORDEAL', 'AGAIN', 'AWAITS', 'YOU'] +5484-24318-0024-629: hyp=['WHAT', 'A', 'TERRIBLE', 'ORDEAL', 'AGAIN', 'AWAITS', 'YOU'] +5484-24318-0025-630: ref=['AND', 'I', 'FOOL', 'BLINDED', 'ALSO', 'IN', 'MIND', 'COULD', 'BE', 'VEXED', 'WITH', 'YOU', 'FOR', 'IT'] +5484-24318-0025-630: hyp=['AND', 'I', 'FOOL', 'BLINDED', 'ALSO', 'IN', 'MIND', 'COULD', 'BE', 'VEXED', 'WITH', 'YOU', 'FOR', 'IT'] +5484-24318-0026-631: ref=['BRING', 'THIS', 'BEFORE', 'YOUR', 'MIND', 'AND', 'EVERYTHING', 'ELSE', 'THAT', 'YOU', 'MUST', 'ACCEPT', 'WITH', 'IT', 'IF', 'YOU', 'CONSENT', 'WHEN', 'THE', 'TIME', 'ARRIVES', 'TO', 'BECOME', 'MINE', 'CONCEAL', 'AND', 'PALLIATE', 'NOTHING'] +5484-24318-0026-631: hyp=['BRING', 'THIS', 'BEFORE', 'YOUR', 'MIND', 'AND', 'EVERYTHING', 'ELSE', 'THAT', 'YOU', 'MUST', 'ACCEPT', 'WITH', 'IT', 'IF', 'YOU', 'CONSENT', 'WITH', 'THE', 'TIME', 'ARRIVES', 'TO', 'BECOME', 'MINE', 'CONCEAL', 'IMPALION', 'NOTHING'] +5484-24318-0027-632: ref=['SO', 'ARCHIAS', 'INTENDED', 'TO', 'LEAVE', 'THE', 'CITY', 'ON', 'ONE', 'OF', 'HIS', 'OWN', 'SHIPS', 'THAT', 'VERY', 'DAY'] +5484-24318-0027-632: hyp=['SARKAIUS', 'INTENDED', 'TO', 'LEAVE', 'THE', 'CITY', 'ON', 'ONE', 'OF', 'HIS', 'OWN', 'SHIPS', 'THAT', 'VERY', 'DAY'] +5484-24318-0028-633: ref=['HE', 'HIMSELF', 'ON', 'THE', 'WAY', 'TO', 'EXPOSE', 'HIMSELF', 'TO', 'THE', 'MALICE', 'AND', 'MOCKERY', 'OF', 'THE', 'WHOLE', 'CITY'] +5484-24318-0028-633: hyp=['SHE', 'HIMSELF', 'ON', 'THE', 'WAY', 'TO', 'EXPOSE', 'HIMSELF', 'TO', 'THE', 'MALICE', 'AND', 'MOCKERY', 'OF', 'THE', 'WHOLE', 'CITY'] +5484-24318-0029-634: ref=['HIS', 'HEART', 'CONTRACTED', 'PAINFULLY', 'AND', 'HIS', 'SOLICITUDE', 'ABOUT', 'HIS', "UNCLE'S", 'FATE', 'INCREASED', 'WHEN', 'PHILIPPUS', 'INFORMED', 'HIM', 'THAT', 'THE', 'CONSPIRATORS', 'HAD', 'BEEN', 'ARRESTED', 'AT', 'THE', 'BANQUET', 'AND', 'HEADED', 'BY', 'AMYNTAS', 'THE', 'RHODIAN', 'CHRYSIPPUS', 'AND', 'PROCLUS', 'HAD', 'PERISHED', 'BY', 'THE', "EXECUTIONER'S", 'SWORD', 'AT', 'SUNRISE'] +5484-24318-0029-634: hyp=['HIS', 'HEART', 'CONTRACTED', 'PAINFULLY', 'AND', 'HIS', 'SOLICITUDE', 'ABOUT', 'HIS', "UNCLE'S", 'FATE', 'INCREASED', 'WHEN', 'PHILIPPUS', 'INFORMED', 'HIM', 'THAT', 'THE', 'CONSPIRATORS', 'HAD', 'BEEN', 'ARRESTED', 'AT', 'THE', 'BANQUET', 'END', 'HEADED', 'BY', 'A', 'MEANTIS', 'THE', 'RODIAN', 'CHRYSIPPUS', 'AND', 'PROCLAS', 'HAD', 'PERISHED', 'BY', 'THE', "EXECUTIONER'S", 'SWORD', 'AT', 'SUNRISE'] +5484-24318-0030-635: ref=['BESIDES', 'HE', 'KNEW', 'THAT', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'WOULD', 'NOT', 'PART', 'FROM', 'HIM', 'WITHOUT', 'GRANTING', 'HIM', 'ONE', 'LAST', 'WORD'] +5484-24318-0030-635: hyp=['BESIDES', 'HE', 'KNEW', 'THAT', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'WOULD', 'NOT', 'PART', 'FROM', 'HIM', 'WITHOUT', 'GRANTING', 'HIM', 'ONE', 'LAST', 'WORD'] +5484-24318-0031-636: ref=['ON', 'THE', 'WAY', 'HIS', 'HEART', 'THROBBED', 'ALMOST', 'TO', 'BURSTING'] +5484-24318-0031-636: hyp=['ON', 'THE', 'WAY', 'HIS', 'HEART', 'THROBBED', 'ALMOST', 'TO', 'BURSTING'] +5484-24318-0032-637: ref=['EVEN', "DAPHNE'S", 'IMAGE', 'AND', 'WHAT', 'THREATENED', 'HER', 'FATHER', 'AND', 'HER', 'WITH', 'HIM', 'RECEDED', 'FAR', 'INTO', 'THE', 'BACKGROUND'] +5484-24318-0032-637: hyp=['EVEN', 'THESE', 'IMAGE', 'AND', 'WHAT', 'THREATENED', 'HER', 'FATHER', 'AND', 'HER', 'WITH', 'HIM', 'WAS', 'SEATED', 'FAR', 'INTO', 'THE', 'BACKGROUND'] +5484-24318-0033-638: ref=['HE', 'WAS', 'APPEARING', 'BEFORE', 'HIS', 'COMPANIONS', 'ONLY', 'TO', 'GIVE', 'TRUTH', 'ITS', 'JUST', 'DUE'] +5484-24318-0033-638: hyp=['HE', 'WAS', 'APPEARING', 'BEFORE', 'HIS', 'COMPANIONS', 'ONLY', 'TO', 'GIVE', 'TRUTH', 'ITS', 'JUST', 'DUE'] +5484-24318-0034-639: ref=['THE', 'EGYPTIAN', 'OBEYED', 'AND', 'HIS', 'MASTER', 'CROSSED', 'THE', 'WIDE', 'SPACE', 'STREWN', 'WITH', 'SAND', 'AND', 'APPROACHED', 'THE', 'STAGE', 'WHICH', 'HAD', 'BEEN', 'ERECTED', 'FOR', 'THE', 'FESTAL', 'PERFORMANCES', 'EVEN', 'HAD', 'HIS', 'EYES', 'RETAINED', 'THE', 'POWER', 'OF', 'SIGHT', 'HIS', 'BLOOD', 'WAS', 'COURSING', 'SO', 'WILDLY', 'THROUGH', 'HIS', 'VEINS', 'THAT', 'HE', 'MIGHT', 'PERHAPS', 'HAVE', 'BEEN', 'UNABLE', 'TO', 'DISTINGUISH', 'THE', 'STATUES', 'AROUND', 'HIM', 'AND', 'THE', 'THOUSANDS', 'OF', 'SPECTATORS', 'WHO', 'CROWDED', 'CLOSELY', 'TOGETHER', 'RICHLY', 'GARLANDED', 'THEIR', 'CHEEKS', 'GLOWING', 'WITH', 'ENTHUSIASM', 'SURROUNDED', 'THE', 'ARENA', 'HERMON'] +5484-24318-0034-639: hyp=['THE', 'EGYPTIAN', 'OBEYED', 'AND', 'HIS', 'MASTER', 'CROSSED', 'THE', 'WIDE', 'SPACE', 'STREWN', 'WITH', 'SAND', 'AND', 'APPROACHED', 'THE', 'STAGE', 'WHICH', 'HAD', 'BEEN', 'ERECTED', 'FOR', 'THE', 'FEAST', 'OF', 'PERFORMANCES', 'EVEN', 'HAD', 'HIS', 'EYES', 'RETAINED', 'THE', 'POWER', 'OF', 'SIGHT', 'HIS', 'BLOOD', 'WAS', 'COARSING', 'SO', 'WIDELY', 'THROUGH', 'HIS', 'VEINS', 'THAT', 'HE', 'MIGHT', 'PERHAPS', 'HAVE', 'BEEN', 'UNABLE', 'TO', 'DISTINGUISH', 'THE', 'STATUES', 'AROUND', 'HIM', 'AND', 'THE', 'THOUSANDS', 'OF', 'SPECTATORS', 'WHO', 'CROWDED', 'CLOSELY', 'TOGETHER', 'RICHLY', 'GARLANDED', 'THEIR', 'CHEEKS', 'GLOWING', 'WITH', 'ENTHUSIASM', 'SURROUNDED', 'THE', 'ARENA', 'HERMANN'] +5484-24318-0035-640: ref=['SHOUTED', 'HIS', 'FRIEND', 'SOTELES', 'IN', 'JOYFUL', 'SURPRISE', 'IN', 'THE', 'MIDST', 'OF', 'THIS', 'PAINFUL', 'WALK', 'HERMON'] +5484-24318-0035-640: hyp=['SHOUTED', 'HIS', 'FRIEND', 'SARTUOUS', 'AND', 'JOYFUL', 'SURPRISE', 'IN', 'THE', 'MIDST', 'OF', 'HIS', 'PAINFUL', 'WALK', 'HERE', 'ON'] +5484-24318-0036-641: ref=['EVEN', 'WHILE', 'HE', 'BELIEVED', 'HIMSELF', 'TO', 'BE', 'THE', 'CREATOR', 'OF', 'THE', 'DEMETER', 'HE', 'HAD', 'BEEN', 'SERIOUSLY', 'TROUBLED', 'BY', 'THE', 'PRAISE', 'OF', 'SO', 'MANY', 'CRITICS', 'BECAUSE', 'IT', 'HAD', 'EXPOSED', 'HIM', 'TO', 'THE', 'SUSPICION', 'OF', 'HAVING', 'BECOME', 'FAITHLESS', 'TO', 'HIS', 'ART', 'AND', 'HIS', 'NATURE'] +5484-24318-0036-641: hyp=['EVEN', 'WHILE', 'HE', 'BELIEVED', 'HIMSELF', 'TO', 'BE', 'THE', 'CREATOR', 'OF', 'THE', 'DEMETER', 'HE', 'HAD', 'BEEN', 'SERIOUSLY', 'TROUBLED', 'BY', 'THE', 'PRAISE', 'OF', 'SO', 'MANY', 'CRITICS', 'BECAUSE', 'IT', 'HAD', 'EXPOSED', 'HIM', 'TO', 'THE', 'SUSPICION', 'OF', 'HAVING', 'BECOME', 'FAITHLESS', 'TO', 'HIS', 'ART', 'AND', 'HIS', 'NATURE'] +5484-24318-0037-642: ref=['HONOUR', 'TO', 'MYRTILUS', 'AND', 'HIS', 'ART', 'BUT', 'HE', 'TRUSTED', 'THIS', 'NOBLE', 'FESTAL', 'ASSEMBLAGE', 'WOULD', 'PARDON', 'THE', 'UNINTENTIONAL', 'DECEPTION', 'AND', 'AID', 'HIS', 'PRAYER', 'FOR', 'RECOVERY'] +5484-24318-0037-642: hyp=['HONOUR', 'TO', 'MERTELUS', 'AND', 'HIS', 'ART', 'BUT', 'HE', 'TRUSTED', 'THIS', 'NOBLE', 'FEAST', 'A', 'SEMBLAGE', 'WOULD', 'PARDON', 'THE', 'UNINTENTIONAL', 'DECEPTION', 'AND', 'AID', 'HIS', 'PRAYER', 'FOR', 'RECOVERY'] +5764-299665-0000-405: ref=['AFTERWARD', 'IT', 'WAS', 'SUPPOSED', 'THAT', 'HE', 'WAS', 'SATISFIED', 'WITH', 'THE', 'BLOOD', 'OF', 'OXEN', 'LAMBS', 'AND', 'DOVES', 'AND', 'THAT', 'IN', 'EXCHANGE', 'FOR', 'OR', 'ON', 'ACCOUNT', 'OF', 'THESE', 'SACRIFICES', 'THIS', 'GOD', 'GAVE', 'RAIN', 'SUNSHINE', 'AND', 'HARVEST'] +5764-299665-0000-405: hyp=['AFTERWARD', 'IT', 'WAS', 'SUPPOSED', 'THAT', 'HE', 'WAS', 'SATISFIED', 'WITH', 'THE', 'BLOOD', 'OF', 'OXEN', 'LAMPS', 'AND', 'DOVES', 'AND', 'THAT', 'IN', 'EXCHANGE', 'FOR', 'OR', 'IN', 'ACCOUNT', 'OF', 'THESE', 'SACRIFICES', 'THESE', 'GOD', 'GAVE', 'REIN', 'SUNSHINE', 'AND', 'HARVEST'] +5764-299665-0001-406: ref=['WHETHER', 'HE', 'WAS', 'THE', 'CREATOR', 'OF', 'YOURSELF', 'AND', 'MYSELF'] +5764-299665-0001-406: hyp=['WHETHER', 'HE', 'WAS', 'THE', 'CREATOR', 'OF', 'YOURSELF', 'AND', 'MYSELF'] +5764-299665-0002-407: ref=['WHETHER', 'ANY', 'PRAYER', 'WAS', 'EVER', 'ANSWERED'] +5764-299665-0002-407: hyp=['WHETHER', 'ANY', 'PRAYER', 'WAS', 'EVER', 'ANSWERED'] +5764-299665-0003-408: ref=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'INTELLECTUALLY', 'INFERIOR'] +5764-299665-0003-408: hyp=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'INTELLECTUAL', 'INFERIOR'] +5764-299665-0004-409: ref=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'DEFORMED', 'AND', 'HELPLESS', 'WHY', 'DID', 'HE', 'CREATE', 'THE', 'CRIMINAL', 'THE', 'IDIOTIC', 'THE', 'INSANE'] +5764-299665-0004-409: hyp=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'DEFORMED', 'AND', 'HELPLESS', 'WHY', 'DID', 'HE', 'CREATE', 'THE', 'CRIMINAL', 'THE', 'IDIOTIC', 'THE', 'INSANE'] +5764-299665-0005-410: ref=['ARE', 'THE', 'FAILURES', 'UNDER', 'OBLIGATION', 'TO', 'THEIR', 'CREATOR'] +5764-299665-0005-410: hyp=['ARE', 'THE', 'FAILURES', 'UNDER', 'OBLIGATION', 'TO', 'THEIR', 'CREATOR'] +5764-299665-0006-411: ref=['IS', 'HE', 'RESPONSIBLE', 'FOR', 'ALL', 'THE', 'WARS', 'THAT', 'HAVE', 'BEEN', 'WAGED', 'FOR', 'ALL', 'THE', 'INNOCENT', 'BLOOD', 'THAT', 'HAS', 'BEEN', 'SHED'] +5764-299665-0006-411: hyp=['HIS', 'IRRESPONSIBLE', 'FOR', 'ALL', 'THE', 'WARS', 'THAT', 'HAVE', 'BEEN', 'RAGED', 'FOR', 'ALL', 'THE', 'INNOCENT', 'BLOOD', 'THAT', 'HAS', 'BEEN', 'SHED'] +5764-299665-0007-412: ref=['IS', 'HE', 'RESPONSIBLE', 'FOR', 'THE', 'CENTURIES', 'OF', 'SLAVERY', 'FOR', 'THE', 'BACKS', 'THAT', 'HAVE', 'BEEN', 'SCARRED', 'WITH', 'THE', 'LASH', 'FOR', 'THE', 'BABES', 'THAT', 'HAVE', 'BEEN', 'SOLD', 'FROM', 'THE', 'BREASTS', 'OF', 'MOTHERS', 'FOR', 'THE', 'FAMILIES', 'THAT', 'HAVE', 'BEEN', 'SEPARATED', 'AND', 'DESTROYED'] +5764-299665-0007-412: hyp=['IF', 'HE', 'RESPONSIBLE', 'FOR', 'THE', 'CENTURIES', 'OF', 'SLAVERY', 'FOR', 'THE', 'BACKS', 'THAT', 'HAVE', 'BEEN', 'SCARRED', 'WITH', 'A', 'LASH', 'FOR', 'THE', 'BABE', 'THAT', 'HAVE', 'BEEN', 'SOLD', 'FROM', 'THE', 'BREASTS', 'OF', 'MOTHERS', 'FOR', 'THE', 'FAMILIES', 'THAT', 'HAVE', 'BEEN', 'SEPARATED', 'AND', 'DESTROYED'] +5764-299665-0008-413: ref=['IS', 'THIS', 'GOD', 'RESPONSIBLE', 'FOR', 'RELIGIOUS', 'PERSECUTION', 'FOR', 'THE', 'INQUISITION', 'FOR', 'THE', 'THUMB', 'SCREW', 'AND', 'RACK', 'AND', 'FOR', 'ALL', 'THE', 'INSTRUMENTS', 'OF', 'TORTURE'] +5764-299665-0008-413: hyp=['IS', 'THE', 'SCOTT', 'RESPONSIBLE', 'FOR', 'RELIGIOUS', 'PERSECUTION', 'FOR', 'THE', 'INQUISITION', 'FOR', 'THE', 'TEMP', 'SCREW', 'AND', 'RACK', 'AND', 'FOR', 'ALL', 'THE', 'INSTRUMENTS', 'OF', 'TORTURE'] +5764-299665-0009-414: ref=['DID', 'THIS', 'GOD', 'ALLOW', 'THE', 'CRUEL', 'AND', 'VILE', 'TO', 'DESTROY', 'THE', 'BRAVE', 'AND', 'VIRTUOUS'] +5764-299665-0009-414: hyp=['DID', 'THIS', 'GOT', 'ALONE', 'THE', 'CRUEL', 'AND', 'VILE', 'TO', 'DESTROY', 'THE', 'BRAVE', 'AND', 'VIRTUOUS'] +5764-299665-0010-415: ref=['DID', 'HE', 'ALLOW', 'TYRANTS', 'TO', 'SHED', 'THE', 'BLOOD', 'OF', 'PATRIOTS'] +5764-299665-0010-415: hyp=['DID', 'HE', 'ALONE', 'TYRANTS', 'TO', 'SHED', 'A', 'BLOOD', 'OF', 'PATRIOTS'] +5764-299665-0011-416: ref=['CAN', 'WE', 'CONCEIVE', 'OF', 'A', 'DEVIL', 'BASE', 'ENOUGH', 'TO', 'PREFER', 'HIS', 'ENEMIES', 'TO', 'HIS', 'FRIENDS'] +5764-299665-0011-416: hyp=['CAN', 'WE', 'CONCEIVE', 'OF', 'A', 'DEVIL', 'BASE', 'ENOUGH', 'TO', 'PREFER', 'HIS', 'ENEMIES', 'TO', 'HIS', 'FRIENDS'] +5764-299665-0012-417: ref=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'THE', 'WILD', 'BEASTS', 'THAT', 'DEVOUR', 'HUMAN', 'BEINGS', 'FOR', 'THE', 'FANGED', 'SERPENTS', 'WHOSE', 'BITE', 'IS', 'DEATH'] +5764-299665-0012-417: hyp=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'THE', 'WILD', 'BEASTS', 'THAT', 'THE', 'FOUR', 'HUMAN', 'BEINGS', 'FOR', 'THE', 'FACT', 'SERPENTS', 'WHOSE', 'BITE', 'IS', 'DEATH'] +5764-299665-0013-418: ref=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'A', 'WORLD', 'WHERE', 'LIFE', 'FEEDS', 'ON', 'LIFE'] +5764-299665-0013-418: hyp=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'A', 'WORLD', 'WHERE', 'LIE', 'FEATS', 'ON', 'LIFE'] +5764-299665-0014-419: ref=['DID', 'INFINITE', 'WISDOM', 'INTENTIONALLY', 'PRODUCE', 'THE', 'MICROSCOPIC', 'BEASTS', 'THAT', 'FEED', 'UPON', 'THE', 'OPTIC', 'NERVE', 'THINK', 'OF', 'BLINDING', 'A', 'MAN', 'TO', 'SATISFY', 'THE', 'APPETITE', 'OF', 'A', 'MICROBE'] +5764-299665-0014-419: hyp=['THE', 'INFINITE', 'WISDOM', 'INTENTIONALLY', 'PRODUCED', 'A', 'MICROSCOPIC', 'BEASTS', 'THAT', 'FEAT', 'UPON', 'THE', 'OPTIC', 'NERVES', 'THINK', 'OF', 'BLINDING', 'A', 'MAN', 'TO', 'SATISFY', 'THE', 'APPETITE', 'OF', 'A', 'MICROBE'] +5764-299665-0015-420: ref=['FEAR', 'BUILDS', 'THE', 'ALTAR', 'AND', 'OFFERS', 'THE', 'SACRIFICE'] +5764-299665-0015-420: hyp=['FEAR', 'BIDS', 'THE', 'ALTAR', 'AND', 'OFFERS', 'THE', 'SACRIFICE'] +5764-299665-0016-421: ref=['FEAR', 'ERECTS', 'THE', 'CATHEDRAL', 'AND', 'BOWS', 'THE', 'HEAD', 'OF', 'MAN', 'IN', 'WORSHIP'] +5764-299665-0016-421: hyp=['FEAR', 'ERECTS', 'THE', 'KITRAL', 'AND', 'BOWS', 'THE', 'HEAD', 'OF', 'MAN', 'IN', 'WORSHIP'] +5764-299665-0017-422: ref=['LIPS', 'RELIGIOUS', 'AND', 'FEARFUL', 'TREMBLINGLY', 'REPEAT', 'THIS', 'PASSAGE', 'THOUGH', 'HE', 'SLAY', 'ME', 'YET', 'WILL', 'I', 'TRUST', 'HIM'] +5764-299665-0017-422: hyp=['LIPS', 'RELIGIOUS', 'AND', 'FEARFUL', 'TREMBLINGLY', 'REPEAT', 'THIS', 'PASSAGE', 'THOUGH', 'HE', 'SLAY', 'ME', 'YET', 'WILL', 'I', 'TRUST', 'HIM'] +5764-299665-0018-423: ref=['CAN', 'WE', 'SAY', 'THAT', 'HE', 'CARED', 'FOR', 'THE', 'CHILDREN', 'OF', 'MEN'] +5764-299665-0018-423: hyp=['CAN', 'WE', 'SAY', 'THAT', 'HE', 'CARED', 'FOR', 'THE', 'CHILDREN', 'OF', 'MEN'] +5764-299665-0019-424: ref=['CAN', 'WE', 'SAY', 'THAT', 'HIS', 'MERCY', 'ENDURETH', 'FOREVER'] +5764-299665-0019-424: hyp=['CAN', 'WE', 'SAY', 'THAT', 'HIS', 'MERCY', 'AND', 'DUET', 'FOREVER'] +5764-299665-0020-425: ref=['DO', 'WE', 'PROVE', 'HIS', 'GOODNESS', 'BY', 'SHOWING', 'THAT', 'HE', 'HAS', 'OPENED', 'THE', 'EARTH', 'AND', 'SWALLOWED', 'THOUSANDS', 'OF', 'HIS', 'HELPLESS', 'CHILDREN', 'OR', 'THAT', 'WITH', 'THE', 'VOLCANOES', 'HE', 'HAS', 'OVERWHELMED', 'THEM', 'WITH', 'RIVERS', 'OF', 'FIRE'] +5764-299665-0020-425: hyp=['DO', 'WE', 'PROVE', 'HIS', 'GOODNESS', 'BY', 'SHOWING', 'THAT', 'HE', 'HAS', 'OPENED', 'THE', 'EARTH', 'AND', 'SWALLOWED', 'THOUSAND', 'OF', 'HIS', 'HELPLESS', 'CHILDREN', 'ALL', 'THAT', 'WITH', 'THE', 'VOLCANOES', 'HE', 'HAS', 'OVERWHELMED', 'THEM', 'WITH', 'RIVERS', 'OF', 'FIRE'] +5764-299665-0021-426: ref=['WAS', 'THERE', 'GOODNESS', 'WAS', 'THERE', 'WISDOM', 'IN', 'THIS'] +5764-299665-0021-426: hyp=['WAS', 'THEIR', 'GOODNESS', 'WAS', 'THEIR', 'WISDOM', 'IN', 'THIS'] +5764-299665-0022-427: ref=['OUGHT', 'THE', 'SUPERIOR', 'RACES', 'TO', 'THANK', 'GOD', 'THAT', 'THEY', 'ARE', 'NOT', 'THE', 'INFERIOR'] +5764-299665-0022-427: hyp=['ALL', 'THE', 'SUPERIOR', 'RAYS', 'TO', 'THANK', 'GOD', 'THAT', 'THEY', 'ARE', 'NOT', 'THE', 'INFERIOR'] +5764-299665-0023-428: ref=['MOST', 'PEOPLE', 'CLING', 'TO', 'THE', 'SUPERNATURAL'] +5764-299665-0023-428: hyp=['MOST', 'PEOPLE', 'CLINK', 'TO', 'THE', 'SUPERNATURAL'] +5764-299665-0024-429: ref=['IF', 'THEY', 'GIVE', 'UP', 'ONE', 'GOD', 'THEY', 'IMAGINE', 'ANOTHER'] +5764-299665-0024-429: hyp=['IF', 'THEY', 'GIVE', 'UP', 'ONE', 'GOD', 'THEY', 'IMAGINE', 'ANOTHER'] +5764-299665-0025-430: ref=['WHAT', 'IS', 'THIS', 'POWER'] +5764-299665-0025-430: hyp=['WHAT', 'IS', 'THIS', 'POWER'] +5764-299665-0026-431: ref=['MAN', 'ADVANCES', 'AND', 'NECESSARILY', 'ADVANCES', 'THROUGH', 'EXPERIENCE'] +5764-299665-0026-431: hyp=['MAN', 'ADVANCES', 'A', 'NECESSARILY', 'ADVANCES', 'THROUGH', 'EXPERIENCE'] +5764-299665-0027-432: ref=['A', 'MAN', 'WISHING', 'TO', 'GO', 'TO', 'A', 'CERTAIN', 'PLACE', 'COMES', 'TO', 'WHERE', 'THE', 'ROAD', 'DIVIDES'] +5764-299665-0027-432: hyp=['A', 'MAN', 'WISHING', 'TO', 'GO', 'TO', 'A', 'CERTAIN', 'PLACE', 'COME', 'TO', 'WHERE', 'THE', 'ROAD', 'DIVIDES'] +5764-299665-0028-433: ref=['HE', 'HAS', 'TRIED', 'THAT', 'ROAD', 'AND', 'KNOWS', 'THAT', 'IT', 'IS', 'THE', 'WRONG', 'ROAD'] +5764-299665-0028-433: hyp=['HE', 'IS', 'TRIED', 'THAT', 'ROAD', 'AND', 'KNOWS', 'THAT', 'IT', 'IS', 'THE', 'WRONG', 'ROAD'] +5764-299665-0029-434: ref=['A', 'CHILD', 'CHARMED', 'BY', 'THE', 'BEAUTY', 'OF', 'THE', 'FLAME', 'GRASPS', 'IT', 'WITH', 'ITS', 'DIMPLED', 'HAND'] +5764-299665-0029-434: hyp=['A', 'CHILD', 'SHOWN', 'BY', 'THE', 'BEAUTY', 'OF', 'THE', 'FLAME', 'GRASPED', 'IT', 'WITH', 'HIS', 'DIMPLED', 'HAND'] +5764-299665-0030-435: ref=['THE', 'POWER', 'THAT', 'WORKS', 'FOR', 'RIGHTEOUSNESS', 'HAS', 'TAUGHT', 'THE', 'CHILD', 'A', 'LESSON'] +5764-299665-0030-435: hyp=['THE', 'POWER', 'THAT', 'WORK', 'FOR', 'RIGHTEOUSNESS', 'HAD', 'TAUGHT', 'THE', 'CHILD', 'A', 'LESSON'] +5764-299665-0031-436: ref=['IT', 'IS', 'A', 'RESULT'] +5764-299665-0031-436: hyp=['IT', 'IS', 'A', 'RESULT'] +5764-299665-0032-437: ref=['IT', 'IS', 'INSISTED', 'BY', 'THESE', 'THEOLOGIANS', 'AND', 'BY', 'MANY', 'OF', 'THE', 'SO', 'CALLED', 'PHILOSOPHERS', 'THAT', 'THIS', 'MORAL', 'SENSE', 'THIS', 'SENSE', 'OF', 'DUTY', 'OF', 'OBLIGATION', 'WAS', 'IMPORTED', 'AND', 'THAT', 'CONSCIENCE', 'IS', 'AN', 'EXOTIC'] +5764-299665-0032-437: hyp=['IT', 'IS', 'INSISTED', 'BY', 'THESE', 'THEOLOGIANS', 'AND', 'BY', 'MANY', 'OF', 'THE', 'SOUL', 'CALLED', 'PHILOSOPHERS', 'THAT', 'THIS', 'MORAL', 'SENSE', 'THIS', 'SENSE', 'OF', 'DUTY', 'OF', 'OBLIGATION', 'WAS', 'IMPORTED', 'AND', 'THAT', 'CONSCIENCE', 'IS', 'AN', 'EXOTIC'] +5764-299665-0033-438: ref=['WE', 'LIVE', 'TOGETHER', 'IN', 'FAMILIES', 'TRIBES', 'AND', 'NATIONS'] +5764-299665-0033-438: hyp=['WE', 'LIVE', 'TOGETHER', 'IN', 'FAMILIES', 'TRIBES', 'AND', 'NATIONS'] +5764-299665-0034-439: ref=['THEY', 'ARE', 'PRAISED', 'ADMIRED', 'AND', 'RESPECTED'] +5764-299665-0034-439: hyp=['THEY', 'ARE', 'PRAISED', 'ADMIRED', 'AND', 'RESPECTED'] +5764-299665-0035-440: ref=['THEY', 'ARE', 'REGARDED', 'AS', 'GOOD', 'THAT', 'IS', 'TO', 'SAY', 'AS', 'MORAL'] +5764-299665-0035-440: hyp=['THEY', 'ARE', 'REGARDED', 'AS', 'GOOD', 'THAT', 'IS', 'TO', 'SAY', 'S', 'MORAL'] +5764-299665-0036-441: ref=['THE', 'MEMBERS', 'WHO', 'ADD', 'TO', 'THE', 'MISERY', 'OF', 'THE', 'FAMILY', 'THE', 'TRIBE', 'OR', 'THE', 'NATION', 'ARE', 'CONSIDERED', 'BAD', 'MEMBERS'] +5764-299665-0036-441: hyp=['THE', 'MEMBERS', 'WHO', 'ADD', 'TO', 'THE', 'MISERY', 'OF', 'THE', 'FAMILY', 'THE', 'TRIBE', 'OF', 'THE', 'NATION', 'ARE', 'CONSIDERED', 'BAD', 'MEMBERS'] +5764-299665-0037-442: ref=['THE', 'GREATEST', 'OF', 'HUMAN', 'BEINGS', 'HAS', 'SAID', 'CONSCIENCE', 'IS', 'BORN', 'OF', 'LOVE'] +5764-299665-0037-442: hyp=['THE', 'GREATEST', 'OF', 'HUMAN', 'BEINGS', 'HAD', 'SAID', 'CONSCIENCE', 'IS', 'BORN', 'OF', 'LOVE'] +5764-299665-0038-443: ref=['AS', 'PEOPLE', 'ADVANCE', 'THE', 'REMOTE', 'CONSEQUENCES', 'ARE', 'PERCEIVED'] +5764-299665-0038-443: hyp=['AS', 'PEOPLE', 'ADVANCE', 'THE', 'REMOTE', 'CONSEQUENCES', 'ARE', 'PERCEIVED'] +5764-299665-0039-444: ref=['THE', 'IMAGINATION', 'IS', 'CULTIVATED'] +5764-299665-0039-444: hyp=['THE', 'IMAGINATION', 'IS', 'CULTIVATED'] +5764-299665-0040-445: ref=['A', 'MAN', 'PUTS', 'HIMSELF', 'IN', 'THE', 'PLACE', 'OF', 'ANOTHER'] +5764-299665-0040-445: hyp=['A', 'MAN', 'BITS', 'HIMSELF', 'IN', 'THE', 'PLACE', 'OF', 'ANOTHER'] +5764-299665-0041-446: ref=['THE', 'SENSE', 'OF', 'DUTY', 'BECOMES', 'STRONGER', 'MORE', 'IMPERATIVE'] +5764-299665-0041-446: hyp=['THE', 'SENSE', 'OF', 'DUTY', 'BECOMES', 'STRONGER', 'MORE', 'IMPERATIVE'] +5764-299665-0042-447: ref=['MAN', 'JUDGES', 'HIMSELF'] +5764-299665-0042-447: hyp=['MAN', 'JUDGES', 'HIMSELF'] +5764-299665-0043-448: ref=['IN', 'ALL', 'THIS', 'THERE', 'IS', 'NOTHING', 'SUPERNATURAL'] +5764-299665-0043-448: hyp=['IN', 'ALL', 'THIS', 'THERE', 'IS', 'NOTHING', 'SUPERNATURAL'] +5764-299665-0044-449: ref=['MAN', 'HAS', 'DECEIVED', 'HIMSELF'] +5764-299665-0044-449: hyp=['MAN', 'HAS', 'DECEIVED', 'HIMSELF'] +5764-299665-0045-450: ref=['HAS', 'CHRISTIANITY', 'DONE', 'GOOD'] +5764-299665-0045-450: hyp=['HESTERITY', 'DONEGOOD'] +5764-299665-0046-451: ref=['WHEN', 'THE', 'CHURCH', 'HAD', 'CONTROL', 'WERE', 'MEN', 'MADE', 'BETTER', 'AND', 'HAPPIER'] +5764-299665-0046-451: hyp=['WHEN', 'THE', 'CHURCH', 'HAD', 'CONTROLLED', 'WHERE', 'MEN', 'MADE', 'BETTER', 'AND', 'HAPPIER'] +5764-299665-0047-452: ref=['WHAT', 'HAS', 'RELIGION', 'DONE', 'FOR', 'HUNGARY', 'OR', 'AUSTRIA'] +5764-299665-0047-452: hyp=['WHAT', 'HAS', 'RELIGION', 'DONE', 'FOR', 'HUNGARY', 'OR', 'AUSTRIA'] +5764-299665-0048-453: ref=['COULD', 'THESE', 'COUNTRIES', 'HAVE', 'BEEN', 'WORSE', 'WITHOUT', 'RELIGION'] +5764-299665-0048-453: hyp=['GOOD', 'THESE', 'COUNTRIES', 'HAVE', 'BEEN', 'WORSE', 'WITHOUT', 'RELIGION'] +5764-299665-0049-454: ref=['COULD', 'THEY', 'HAVE', 'BEEN', 'WORSE', 'HAD', 'THEY', 'HAD', 'ANY', 'OTHER', 'RELIGION', 'THAN', 'CHRISTIANITY'] +5764-299665-0049-454: hyp=['COULD', 'THEY', 'HAVE', 'BEEN', 'WORSE', 'HAD', 'THEY', 'HAD', 'ANY', 'OTHER', 'RELIGION', 'THAN', 'CHRISTIANITY'] +5764-299665-0050-455: ref=['WHAT', 'DID', 'CHRISTIANITY', 'DO', 'FOR', 'THEM'] +5764-299665-0050-455: hyp=['WHAT', 'DID', 'CHRISTIANITY', 'DO', 'FAULT', 'THEM'] +5764-299665-0051-456: ref=['THEY', 'HATED', 'PLEASURE'] +5764-299665-0051-456: hyp=['THEY', 'HATED', 'PLEASURE'] +5764-299665-0052-457: ref=['THEY', 'MUFFLED', 'ALL', 'THE', 'BELLS', 'OF', 'GLADNESS'] +5764-299665-0052-457: hyp=['THEY', 'MUFFLED', 'ALL', 'THE', 'BELLS', 'OF', 'GLADNESS'] +5764-299665-0053-458: ref=['THE', 'RELIGION', 'OF', 'THE', 'PURITAN', 'WAS', 'AN', 'UNADULTERATED', 'CURSE'] +5764-299665-0053-458: hyp=['THE', 'RELIGION', 'OF', 'THE', 'PURITAN', 'WAS', 'AN', 'AN', 'ADULTERATED', 'CURSE'] +5764-299665-0054-459: ref=['THE', 'PURITAN', 'BELIEVED', 'THE', 'BIBLE', 'TO', 'BE', 'THE', 'WORD', 'OF', 'GOD', 'AND', 'THIS', 'BELIEF', 'HAS', 'ALWAYS', 'MADE', 'THOSE', 'WHO', 'HELD', 'IT', 'CRUEL', 'AND', 'WRETCHED'] +5764-299665-0054-459: hyp=['THE', 'PURITAN', 'BELIEVED', 'THE', 'BIBLE', 'TO', 'BE', 'THE', 'WORTH', 'OF', 'GOD', 'AND', 'THIS', 'BELIEF', 'HAS', 'ALWAYS', 'MADE', 'THOSE', 'WHO', 'HELD', 'IT', 'CRUEL', 'AND', 'WRETCHED'] +5764-299665-0055-460: ref=['LET', 'ME', 'REFER', 'TO', 'JUST', 'ONE', 'FACT', 'SHOWING', 'THE', 'INFLUENCE', 'OF', 'A', 'BELIEF', 'IN', 'THE', 'BIBLE', 'ON', 'HUMAN', 'BEINGS'] +5764-299665-0055-460: hyp=['LET', 'ME', 'REFER', 'TO', 'JUST', 'ONE', 'FACT', 'SHOWING', 'THE', 'INFLUENCE', 'OF', 'A', 'BELIEF', 'IN', 'THE', 'BIBLE', 'ON', 'HUMAN', 'BEINGS'] +5764-299665-0056-461: ref=['THE', 'QUEEN', 'RECEIVED', 'THE', 'BIBLE', 'KISSED', 'IT', 'AND', 'PLEDGED', 'HERSELF', 'TO', 'DILIGENTLY', 'READ', 'THEREIN'] +5764-299665-0056-461: hyp=['THE', 'QUEEN', 'RECEIVED', 'THE', 'BIBLE', 'KISSED', 'IT', 'AND', 'PLEDGED', 'HERSELF', 'TO', 'DILIGENTLY', 'READ', 'THEREIN'] +5764-299665-0057-462: ref=['IN', 'OTHER', 'WORDS', 'IT', 'WAS', 'JUST', 'AS', 'FIENDISH', 'JUST', 'AS', 'INFAMOUS', 'AS', 'THE', 'CATHOLIC', 'SPIRIT'] +5764-299665-0057-462: hyp=['IN', 'OTHER', 'WORDS', 'IT', 'WAS', 'JUST', 'AS', 'FIENDISH', 'JUST', 'AS', 'IN', 'FAMOUS', 'AS', 'THE', 'CATTLE', 'EXPERIOR'] +5764-299665-0058-463: ref=['HAS', 'THE', 'BIBLE', 'MADE', 'THE', 'PEOPLE', 'OF', 'GEORGIA', 'KIND', 'AND', 'MERCIFUL'] +5764-299665-0058-463: hyp=['HAS', 'THE', 'DIE', 'BUT', 'MADE', 'THE', 'PEOPLE', 'OF', 'GEORGIA', 'KIND', 'AND', 'MERCIFUL'] +5764-299665-0059-464: ref=['RELIGION', 'HAS', 'BEEN', 'TRIED', 'AND', 'IN', 'ALL', 'COUNTRIES', 'IN', 'ALL', 'TIMES', 'HAS', 'FAILED'] +5764-299665-0059-464: hyp=['WHO', 'RELIGION', 'HAS', 'BEEN', 'TRIED', 'AND', 'IN', 'ALL', 'COUNTRIES', 'IN', 'ALL', 'TIMES', 'HAS', 'FAILED'] +5764-299665-0060-465: ref=['RELIGION', 'HAS', 'ALWAYS', 'BEEN', 'THE', 'ENEMY', 'OF', 'SCIENCE', 'OF', 'INVESTIGATION', 'AND', 'THOUGHT'] +5764-299665-0060-465: hyp=['RELIGION', 'HATH', 'ALWAYS', 'BEEN', 'THE', 'ENEMY', 'OF', 'SCIENCE', 'OF', 'INVESTIGATION', 'AND', 'THOUGHT'] +5764-299665-0061-466: ref=['RELIGION', 'HAS', 'NEVER', 'MADE', 'MAN', 'FREE'] +5764-299665-0061-466: hyp=['RELIGION', 'IS', 'NEVER', 'MADE', 'MEN', 'FREE'] +5764-299665-0062-467: ref=['IT', 'HAS', 'NEVER', 'MADE', 'MAN', 'MORAL', 'TEMPERATE', 'INDUSTRIOUS', 'AND', 'HONEST'] +5764-299665-0062-467: hyp=['HE', 'JUST', 'NEVER', 'MADE', 'MAN', 'MORAL', 'TEMPERATE', 'INDUSTRIOUS', 'AND', 'HONEST'] +5764-299665-0063-468: ref=['ARE', 'CHRISTIANS', 'MORE', 'TEMPERATE', 'NEARER', 'VIRTUOUS', 'NEARER', 'HONEST', 'THAN', 'SAVAGES'] +5764-299665-0063-468: hyp=['AH', 'CHRISTIAN', 'SMALL', 'TEMPERATE', 'NEARER', 'VIRTUOUS', 'NEARER', 'HONEST', 'THAN', 'SAVAGES'] +5764-299665-0064-469: ref=['CAN', 'WE', 'CURE', 'DISEASE', 'BY', 'SUPPLICATION'] +5764-299665-0064-469: hyp=['CAN', 'WE', 'CURE', 'DISEASE', 'BY', 'SUPPLICATION'] +5764-299665-0065-470: ref=['CAN', 'WE', 'RECEIVE', 'VIRTUE', 'OR', 'HONOR', 'AS', 'ALMS'] +5764-299665-0065-470: hyp=['CAN', 'WE', 'RECEIVE', 'VIRTUE', 'OR', 'HUNGER', 'AS', 'ALMS'] +5764-299665-0066-471: ref=['RELIGION', 'RESTS', 'ON', 'THE', 'IDEA', 'THAT', 'NATURE', 'HAS', 'A', 'MASTER', 'AND', 'THAT', 'THIS', 'MASTER', 'WILL', 'LISTEN', 'TO', 'PRAYER', 'THAT', 'THIS', 'MASTER', 'PUNISHES', 'AND', 'REWARDS', 'THAT', 'HE', 'LOVES', 'PRAISE', 'AND', 'FLATTERY', 'AND', 'HATES', 'THE', 'BRAVE', 'AND', 'FREE'] +5764-299665-0066-471: hyp=['RELIGION', 'RESTS', 'ON', 'THE', 'IDEA', 'THAT', 'NATURE', 'HAS', 'A', 'MASTER', 'AND', 'THAT', 'THIS', 'MASTER', 'WILL', 'LISTEN', 'TO', 'PRAYER', 'THAT', 'HIS', 'MASTER', 'PUNISHES', 'AND', 'REWARDS', 'THAT', 'HE', 'LOVES', 'PRAISE', 'AND', 'FLATTERY', 'AND', 'HATES', 'THE', 'BRAVE', 'AND', 'FREE'] +5764-299665-0067-472: ref=['WE', 'MUST', 'HAVE', 'CORNER', 'STONES'] +5764-299665-0067-472: hyp=['WE', 'MUST', 'HAVE', 'CORN', 'THE', 'STONES'] +5764-299665-0068-473: ref=['THE', 'STRUCTURE', 'MUST', 'HAVE', 'A', 'BASEMENT'] +5764-299665-0068-473: hyp=['THE', 'STRUCTURE', 'MUST', 'HAVE', 'ABASEMENT'] +5764-299665-0069-474: ref=['IF', 'WE', 'BUILD', 'WE', 'MUST', 'BEGIN', 'AT', 'THE', 'BOTTOM'] +5764-299665-0069-474: hyp=['IF', 'WE', 'BUILD', 'WE', 'MUST', 'BEGIN', 'AT', 'THE', 'BOTTOM'] +5764-299665-0070-475: ref=['I', 'HAVE', 'A', 'THEORY', 'AND', 'I', 'HAVE', 'FOUR', 'CORNER', 'STONES'] +5764-299665-0070-475: hyp=['I', 'HAVE', 'IT', 'THEORY', 'AND', 'I', 'HAVE', 'FOUR', 'CORNER', 'STONES'] +5764-299665-0071-476: ref=['THE', 'FIRST', 'STONE', 'IS', 'THAT', 'MATTER', 'SUBSTANCE', 'CANNOT', 'BE', 'DESTROYED', 'CANNOT', 'BE', 'ANNIHILATED'] +5764-299665-0071-476: hyp=['THE', 'FIRST', 'STONE', 'EAST', 'AT', 'MATTHOR', 'SUBSTANCE', 'CANNOT', 'BE', 'DESTROYED', 'CANNOT', 'BE', 'ANNIHILATED'] +5764-299665-0072-477: ref=['IF', 'THESE', 'CORNER', 'STONES', 'ARE', 'FACTS', 'IT', 'FOLLOWS', 'AS', 'A', 'NECESSITY', 'THAT', 'MATTER', 'AND', 'FORCE', 'ARE', 'FROM', 'AND', 'TO', 'ETERNITY', 'THAT', 'THEY', 'CAN', 'NEITHER', 'BE', 'INCREASED', 'NOR', 'DIMINISHED'] +5764-299665-0072-477: hyp=['IF', 'THIS', 'CORN', 'THE', 'STONES', 'ARE', 'FACTS', 'IT', 'FOLLOWS', 'AS', 'A', 'NECESSITY', 'THAT', 'MATTER', 'AND', 'FORCE', 'ARE', 'FROM', 'END', 'TO', 'ETERNITY', 'THAT', 'THEY', 'CAN', 'NEITHER', 'BE', 'INCREASED', 'NOR', 'DIMINISHED'] +5764-299665-0073-478: ref=['IT', 'FOLLOWS', 'THAT', 'NOTHING', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'CREATED', 'THAT', 'THERE', 'NEVER', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'A', 'CREATOR'] +5764-299665-0073-478: hyp=['IT', 'FOLLOWS', 'THAT', 'NOTHING', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'CREATED', 'THAT', 'THERE', 'NEVER', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'A', 'CREATOR'] +5764-299665-0074-479: ref=['IT', 'FOLLOWS', 'THAT', 'THERE', 'COULD', 'NOT', 'HAVE', 'BEEN', 'ANY', 'INTELLIGENCE', 'ANY', 'DESIGN', 'BACK', 'OF', 'MATTER', 'AND', 'FORCE'] +5764-299665-0074-479: hyp=['IT', 'FOLLOWED', 'THAT', 'THERE', 'COULD', 'NOT', 'HAVE', 'BEEN', 'ANY', 'INTELLIGENCE', 'AND', 'A', 'DESIGNED', 'BACK', 'OF', 'MATTER', 'AND', 'FORCE'] +5764-299665-0075-480: ref=['I', 'SAY', 'WHAT', 'I', 'THINK'] +5764-299665-0075-480: hyp=['I', 'SAY', 'WHAT', 'I', 'THINK'] +5764-299665-0076-481: ref=['EVERY', 'EVENT', 'HAS', 'PARENTS'] +5764-299665-0076-481: hyp=['EVERY', 'EVENT', 'HAS', 'PARENTS'] +5764-299665-0077-482: ref=['THAT', 'WHICH', 'HAS', 'NOT', 'HAPPENED', 'COULD', 'NOT'] +5764-299665-0077-482: hyp=['THAT', 'WHICH', 'HATH', 'NOT', 'HAPPENED', 'COULD', 'NOT'] +5764-299665-0078-483: ref=['IN', 'THE', 'INFINITE', 'CHAIN', 'THERE', 'IS', 'AND', 'THERE', 'CAN', 'BE', 'NO', 'BROKEN', 'NO', 'MISSING', 'LINK'] +5764-299665-0078-483: hyp=['IN', 'THE', 'INFINITE', 'CHANGE', 'WREATHS', 'AND', 'THERE', 'CAN', 'BE', 'NO', 'BROKEN', 'NO', 'MISSING', 'LINK'] +5764-299665-0079-484: ref=['WE', 'NOW', 'KNOW', 'THAT', 'OUR', 'FIRST', 'PARENTS', 'WERE', 'NOT', 'FOREIGNERS'] +5764-299665-0079-484: hyp=['WE', 'NOW', 'KNOW', 'THAT', 'OUR', 'FIRST', 'PARENTS', 'WERE', 'NOT', 'FOREIGNERS'] +5764-299665-0080-485: ref=['WE', 'NOW', 'KNOW', 'IF', 'WE', 'KNOW', 'ANYTHING', 'THAT', 'THE', 'UNIVERSE', 'IS', 'NATURAL', 'AND', 'THAT', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'NATURALLY', 'PRODUCED'] +5764-299665-0080-485: hyp=['WE', 'NOW', 'KNOW', 'IF', 'WE', 'KNOW', 'ANYTHING', 'THAT', 'THE', 'UNIVERSE', 'IS', 'NATURAL', 'AND', 'THAT', 'MAN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'NATURALLY', 'PRODUCED'] +5764-299665-0081-486: ref=['WE', 'KNOW', 'THE', 'PATHS', 'THAT', 'LIFE', 'HAS', 'TRAVELED'] +5764-299665-0081-486: hyp=['WE', 'KNOW', 'THE', 'PATHS', 'THAT', 'LIFE', 'HAS', 'TRAVELLED'] +5764-299665-0082-487: ref=['WE', 'KNOW', 'THE', 'FOOTSTEPS', 'OF', 'ADVANCE', 'THEY', 'HAVE', 'BEEN', 'TRACED'] +5764-299665-0082-487: hyp=['WE', 'KNOW', 'THE', 'FOOTSTEPS', 'OF', 'ADVANCE', 'THEY', 'HAVE', 'BEEN', 'PRAISED'] +5764-299665-0083-488: ref=['FOR', 'THOUSANDS', 'OF', 'YEARS', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'TRYING', 'TO', 'REFORM', 'THE', 'WORLD'] +5764-299665-0083-488: hyp=['FOUR', 'THOUSANDS', 'OF', 'YEARS', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'CRYING', 'TO', 'REFORM', 'THE', 'WORLD'] +5764-299665-0084-489: ref=['WHY', 'HAVE', 'THE', 'REFORMERS', 'FAILED'] +5764-299665-0084-489: hyp=['WHY', 'HAVE', 'THE', 'REFORMED', 'FAITH'] +5764-299665-0085-490: ref=['THEY', 'DEPEND', 'ON', 'THE', 'LORD', 'ON', 'LUCK', 'AND', 'CHARITY'] +5764-299665-0085-490: hyp=['THEY', 'DEPEND', 'ON', 'THE', 'LOT', 'UNLUCK', 'AND', 'CHARITY'] +5764-299665-0086-491: ref=['THEY', 'LIVE', 'BY', 'FRAUD', 'AND', 'VIOLENCE', 'AND', 'BEQUEATH', 'THEIR', 'VICES', 'TO', 'THEIR', 'CHILDREN'] +5764-299665-0086-491: hyp=['THEY', 'LEAVE', 'BY', 'FRAUD', 'AND', 'VIOLENCE', 'AND', 'BEQUEATH', 'THEIR', 'VICES', 'TO', 'THEIR', 'CHILDREN'] +5764-299665-0087-492: ref=['FAILURE', 'SEEMS', 'TO', 'BE', 'THE', 'TRADEMARK', 'OF', 'NATURE', 'WHY'] +5764-299665-0087-492: hyp=['FAILURE', 'SEEMS', 'TO', 'BE', 'THE', 'TRADE', 'MARK', 'OF', 'NATURE', 'WHY'] +5764-299665-0088-493: ref=['NATURE', 'PRODUCES', 'WITHOUT', 'PURPOSE', 'SUSTAINS', 'WITHOUT', 'INTENTION', 'AND', 'DESTROYS', 'WITHOUT', 'THOUGHT'] +5764-299665-0088-493: hyp=['NATURE', 'PRODUCED', 'WITHOUT', 'PURPOSE', 'SUSTAINS', 'WITHOUT', 'INTENTION', 'AND', 'DESTROYS', 'WITHOUT', 'THOUGHT'] +5764-299665-0089-494: ref=['MUST', 'THE', 'WORLD', 'FOREVER', 'REMAIN', 'THE', 'VICTIM', 'OF', 'IGNORANT', 'PASSION'] +5764-299665-0089-494: hyp=['MISTER', 'BUILD', 'FOREVER', 'REMAINED', 'A', 'VICTIM', 'OF', 'IGNORANT', 'PASSION'] +5764-299665-0090-495: ref=['WHY', 'SHOULD', 'MEN', 'AND', 'WOMEN', 'HAVE', 'CHILDREN', 'THAT', 'THEY', 'CANNOT', 'TAKE', 'CARE', 'OF', 'CHILDREN', 'THAT', 'ARE', 'BURDENS', 'AND', 'CURSES', 'WHY'] +5764-299665-0090-495: hyp=['WHY', 'SHOULD', 'MEN', 'AND', 'WOMEN', 'HAVE', 'CHILDREN', 'THAT', 'THEY', 'CANNOT', 'TAKE', 'CARE', 'OF', 'CHILDREN', 'THAT', 'ARE', 'A', 'BURDEN', 'AND', 'CURSES', 'WHY'] +5764-299665-0091-496: ref=['PASSION', 'IS', 'AND', 'ALWAYS', 'HAS', 'BEEN', 'DEAF'] +5764-299665-0091-496: hyp=['PASSION', 'EAST', 'AND', 'ALL', 'THIS', 'HAS', 'BEEN', 'DEAF'] +5764-299665-0092-497: ref=['LAW', 'CAN', 'PUNISH', 'BUT', 'IT', 'CAN', 'NEITHER', 'REFORM', 'CRIMINALS', 'NOR', 'PREVENT', 'CRIME'] +5764-299665-0092-497: hyp=['LAW', 'CAN', 'PUNISH', 'THAT', 'IT', 'CAN', 'NEITHER', 'REFORM', 'CRIMINALS', 'NOR', 'PREVENT', 'CRIME'] +5764-299665-0093-498: ref=['THIS', 'CANNOT', 'BE', 'DONE', 'BY', 'TALK', 'OR', 'EXAMPLE'] +5764-299665-0093-498: hyp=['THESE', 'CANNOT', 'BE', 'DONE', 'BY', 'TALK', 'OR', 'EXAMPLE'] +5764-299665-0094-499: ref=['THIS', 'IS', 'THE', 'SOLUTION', 'OF', 'THE', 'WHOLE', 'QUESTION'] +5764-299665-0094-499: hyp=['THIS', 'IS', 'THE', 'SOLUTION', 'OF', 'THE', 'WHOLE', 'QUESTION'] +5764-299665-0095-500: ref=['THIS', 'FREES', 'WOMAN'] +5764-299665-0095-500: hyp=['THIS', 'FREEZWAMEN'] +5764-299665-0096-501: ref=['POVERTY', 'AND', 'CRIME', 'WILL', 'BE', 'CHILDLESS'] +5764-299665-0096-501: hyp=['BAVARY', 'AND', 'CRIME', 'WILL', 'BE', 'CHILDLESS'] +5764-299665-0097-502: ref=['IT', 'IS', 'FAR', 'BETTER', 'TO', 'BE', 'FREE', 'TO', 'LEAVE', 'THE', 'FORTS', 'AND', 'BARRICADES', 'OF', 'FEAR', 'TO', 'STAND', 'ERECT', 'AND', 'FACE', 'THE', 'FUTURE', 'WITH', 'A', 'SMILE'] +5764-299665-0097-502: hyp=['IT', 'IS', 'FAR', 'BETTER', 'TO', 'BE', 'FREE', 'TO', 'LEAVE', 'THE', 'FAULTS', 'AND', 'BARRICADES', 'OF', 'FEAR', 'TO', 'STAND', 'ERECT', 'AND', 'FAITH', 'THE', 'FUTURE', 'WITH', 'US', 'MIND'] +6070-63485-0000-2599: ref=["THEY'RE", 'DONE', 'FOR', 'SAID', 'THE', 'SCHOOLMASTER', 'IN', 'A', 'LOW', 'KEY', 'TO', 'THE', 'CHOUETTE', 'OUT', 'WITH', 'YOUR', 'VITRIOL', 'AND', 'MIND', 'YOUR', 'EYE'] +6070-63485-0000-2599: hyp=['THERE', 'DONE', 'FAR', 'SAID', 'THE', 'SCHOOLMASTER', 'IN', 'A', 'LOKI', 'TO', 'THE', 'SWEAT', 'OUT', 'WITH', 'YOUR', 'VITRIOL', 'AND', 'MIND', 'YOUR', 'EYE'] +6070-63485-0001-2600: ref=['THE', 'TWO', 'MONSTERS', 'TOOK', 'OFF', 'THEIR', 'SHOES', 'AND', 'MOVED', 'STEALTHILY', 'ALONG', 'KEEPING', 'IN', 'THE', 'SHADOWS', 'OF', 'THE', 'HOUSES'] +6070-63485-0001-2600: hyp=['THE', 'TWO', 'MONSTERS', 'TOOK', 'OFF', 'THEIR', 'SHOES', 'AND', 'MOVED', 'STEALTHILY', 'ALONG', 'KEEPING', 'IN', 'THE', 'SHADOWS', 'OF', 'THE', 'HOUSES'] +6070-63485-0002-2601: ref=['BY', 'MEANS', 'OF', 'THIS', 'STRATAGEM', 'THEY', 'FOLLOWED', 'SO', 'CLOSELY', 'THAT', 'ALTHOUGH', 'WITHIN', 'A', 'FEW', 'STEPS', 'OF', 'SARAH', 'AND', 'TOM', 'THEY', 'DID', 'NOT', 'HEAR', 'THEM'] +6070-63485-0002-2601: hyp=['BY', 'MEANS', 'OF', 'THIS', 'STRATAGEM', 'THEY', 'FOLLOWED', 'SO', 'CLOSELY', 'THAT', 'ALTHOUGH', 'WITHIN', 'A', 'FEW', 'STEPS', 'OF', 'SEREN', 'TOM', 'THEY', 'DID', 'NOT', 'HEAR', 'THEM'] +6070-63485-0003-2602: ref=['SARAH', 'AND', 'HER', 'BROTHER', 'HAVING', 'AGAIN', 'PASSED', 'BY', 'THE', 'TAPIS', 'FRANC', 'ARRIVED', 'CLOSE', 'TO', 'THE', 'DILAPIDATED', 'HOUSE', 'WHICH', 'WAS', 'PARTLY', 'IN', 'RUINS', 'AND', 'ITS', 'OPENED', 'CELLARS', 'FORMED', 'A', 'KIND', 'OF', 'GULF', 'ALONG', 'WHICH', 'THE', 'STREET', 'RAN', 'IN', 'THAT', 'DIRECTION'] +6070-63485-0003-2602: hyp=['SARAH', 'AND', 'HER', 'BROTHER', 'HAVING', 'AGAIN', 'PASSED', 'BY', 'THE', 'TAPPY', 'FRANK', 'ARRIVED', 'CLOSE', 'TO', 'THE', 'DILAPIDATED', 'HOUSE', 'WHICH', 'WAS', 'PARTLY', 'IN', 'RUINS', 'AND', 'ITS', 'OPEN', 'CELLARS', 'FORMED', 'A', 'KIND', 'OF', 'GULF', 'ALONG', 'WHICH', 'THE', 'STREET', 'RAN', 'IN', 'THAT', 'DIRECTION'] +6070-63485-0004-2603: ref=['IN', 'AN', 'INSTANT', 'THE', 'SCHOOLMASTER', 'WITH', 'A', 'LEAP', 'RESEMBLING', 'IN', 'STRENGTH', 'AND', 'AGILITY', 'THE', 'SPRING', 'OF', 'A', 'TIGER', 'SEIZED', 'SEYTON', 'WITH', 'ONE', 'HAND', 'BY', 'THE', 'THROAT', 'AND', 'EXCLAIMED', 'YOUR', 'MONEY', 'OR', 'I', 'WILL', 'FLING', 'YOU', 'INTO', 'THIS', 'HOLE'] +6070-63485-0004-2603: hyp=['IN', 'AN', 'INSTANT', 'THE', 'SCHOOLMASTER', 'WITH', 'A', 'LEAP', 'RESEMBLING', 'IN', 'STRENGTH', 'AND', 'AGILITY', 'THE', 'SPRING', 'OF', 'A', 'TIGER', 'SEIZED', 'SETON', 'WITH', 'ONE', 'HAND', 'BY', 'THE', 'THROAT', 'AND', 'EXCLAIMED', 'YOUR', 'MONEY', 'OR', 'I', 'WILL', 'FLING', 'YOU', 'INTO', 'THIS', 'HOLE'] +6070-63485-0005-2604: ref=['NO', 'SAID', 'THE', 'OLD', 'BRUTE', 'GRUMBLINGLY', 'NO', 'NOT', 'ONE', 'RING', 'WHAT', 'A', 'SHAME'] +6070-63485-0005-2604: hyp=['NO', 'SAID', 'THE', 'OLD', 'BRUTE', 'TREMBLINGLY', 'NO', 'NOT', 'ONE', 'RING', 'WHAT', 'A', 'SHAME'] +6070-63485-0006-2605: ref=['TOM', 'SEYTON', 'DID', 'NOT', 'LOSE', 'HIS', 'PRESENCE', 'OF', 'MIND', 'DURING', 'THIS', 'SCENE', 'RAPIDLY', 'AND', 'UNEXPECTEDLY', 'AS', 'IT', 'HAD', 'OCCURRED'] +6070-63485-0006-2605: hyp=['TOM', 'SEYTON', 'DID', 'NOT', 'LOSE', 'HIS', 'PRESENCE', 'OF', 'MIND', 'DURING', 'THIS', 'SCENE', 'RAPIDLY', 'AND', 'UNEXPECTEDLY', 'AS', 'IT', 'HAD', 'OCCURRED'] +6070-63485-0007-2606: ref=['OH', 'AH', 'TO', 'LAY', 'A', 'TRAP', 'TO', 'CATCH', 'US', 'REPLIED', 'THE', 'THIEF'] +6070-63485-0007-2606: hyp=['UH', 'TO', 'LAY', 'A', 'TRAP', 'TO', 'CATCH', 'US', 'REPLIED', 'THE', 'THIEF'] +6070-63485-0008-2607: ref=['THEN', 'ADDRESSING', 'THOMAS', 'SEYTON', 'YOU', 'KNOW', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0008-2607: hyp=['THEN', 'ADDRESSING', 'THOMAS', 'SETTON', 'YOU', 'KNOW', 'THE', 'PLANE', 'OF', 'SAINT', 'DENIS'] +6070-63485-0009-2608: ref=['DID', 'YOU', 'SEE', 'IN', 'THE', 'CABARET', 'WE', 'HAVE', 'JUST', 'LEFT', 'FOR', 'I', 'KNOW', 'YOU', 'AGAIN', 'THE', 'MAN', 'WHOM', 'THE', 'CHARCOAL', 'MAN', 'CAME', 'TO', 'SEEK'] +6070-63485-0009-2608: hyp=['DID', 'YOU', 'SEE', 'IN', 'THE', 'CABARET', 'WE', 'HAD', 'JUST', 'LEFT', 'FOR', 'I', 'KNOW', 'YOU', 'AGAIN', 'THE', 'MAN', 'WHOM', 'THE', 'CHARCOAL', 'MAN', 'CAME', 'TO', 'SEEK'] +6070-63485-0010-2609: ref=['CRIED', 'THE', 'SCHOOLMASTER', 'A', 'THOUSAND', 'FRANCS', 'AND', "I'LL", 'KILL', 'HIM'] +6070-63485-0010-2609: hyp=['CRIED', 'THE', 'SCHOOLMASTER', 'A', 'THOUSAND', 'FRANCS', 'AND', "I'LL", 'KILL', 'HIM'] +6070-63485-0011-2610: ref=['WRETCH', 'I', 'DO', 'NOT', 'SEEK', 'HIS', 'LIFE', 'REPLIED', 'SARAH', 'TO', 'THE', 'SCHOOLMASTER'] +6070-63485-0011-2610: hyp=['WRETCH', 'I', 'DO', 'NOT', 'SEE', 'HIS', 'LIFE', 'REPLIED', 'SARAH', 'TO', 'THE', 'SCHOOLMASTER'] +6070-63485-0012-2611: ref=["LET'S", 'GO', 'AND', 'MEET', 'HIM'] +6070-63485-0012-2611: hyp=["LET'S", 'GO', 'AND', 'MEET', 'HIM'] +6070-63485-0013-2612: ref=['OLD', 'BOY', 'IT', 'WILL', 'PAY', 'FOR', 'LOOKING', 'AFTER'] +6070-63485-0013-2612: hyp=['OLD', 'BY', 'IT', 'WILL', 'PAY', 'FOR', 'LOOKING', 'AFTER'] +6070-63485-0014-2613: ref=['WELL', 'MY', 'WIFE', 'SHALL', 'BE', 'THERE', 'SAID', 'THE', 'SCHOOLMASTER', 'YOU', 'WILL', 'TELL', 'HER', 'WHAT', 'YOU', 'WANT', 'AND', 'I', 'SHALL', 'SEE'] +6070-63485-0014-2613: hyp=['WELL', 'MY', 'WIFE', 'SHALL', 'BE', 'THERE', 'SAID', 'THE', 'SCHOOLMASTER', 'YOU', 'WILL', 'TELL', 'HER', 'WHAT', 'YOU', 'WANT', 'AND', 'I', 'SHALL', 'SEE'] +6070-63485-0015-2614: ref=['IN', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0015-2614: hyp=['IN', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0016-2615: ref=['BETWEEN', 'SAINT', 'OUEN', 'AND', 'THE', 'ROAD', 'OF', 'LA', 'REVOLTE', 'AT', 'THE', 'END', 'OF', 'THE', 'ROAD', 'AGREED'] +6070-63485-0016-2615: hyp=['BETWEEN', 'SAINT', 'WAT', 'AND', 'THE', 'ROAD', 'OF', 'LA', 'REVOLT', 'AT', 'THE', 'END', 'OF', 'THE', 'ROAD', 'AGREED'] +6070-63485-0017-2616: ref=['HE', 'HAD', 'FORGOTTEN', 'THE', 'ADDRESS', 'OF', 'THE', 'SELF', 'STYLED', 'FAN', 'PAINTER'] +6070-63485-0017-2616: hyp=['HE', 'HAD', 'FORGOTTEN', 'THE', 'ADDRESS', 'OF', 'THE', 'SELF', 'STYLED', 'PAN', 'PAINTER'] +6070-63485-0018-2617: ref=['THE', 'FIACRE', 'STARTED'] +6070-63485-0018-2617: hyp=['THE', 'FIACRE', 'STARTED'] +6070-86744-0000-2569: ref=['FRANZ', 'WHO', 'SEEMED', 'ATTRACTED', 'BY', 'SOME', 'INVISIBLE', 'INFLUENCE', 'TOWARDS', 'THE', 'COUNT', 'IN', 'WHICH', 'TERROR', 'WAS', 'STRANGELY', 'MINGLED', 'FELT', 'AN', 'EXTREME', 'RELUCTANCE', 'TO', 'PERMIT', 'HIS', 'FRIEND', 'TO', 'BE', 'EXPOSED', 'ALONE', 'TO', 'THE', 'SINGULAR', 'FASCINATION', 'THAT', 'THIS', 'MYSTERIOUS', 'PERSONAGE', 'SEEMED', 'TO', 'EXERCISE', 'OVER', 'HIM', 'AND', 'THEREFORE', 'MADE', 'NO', 'OBJECTION', 'TO', "ALBERT'S", 'REQUEST', 'BUT', 'AT', 'ONCE', 'ACCOMPANIED', 'HIM', 'TO', 'THE', 'DESIRED', 'SPOT', 'AND', 'AFTER', 'A', 'SHORT', 'DELAY', 'THE', 'COUNT', 'JOINED', 'THEM', 'IN', 'THE', 'SALON'] +6070-86744-0000-2569: hyp=['FRANCE', 'WHO', 'SEEMED', 'ATTRACTED', 'BY', 'SOME', 'INVISIBLE', 'INFLUENCE', 'TOWARDS', 'THE', 'COUNT', 'IN', 'WHICH', 'TERROR', 'WAS', 'STRANGELY', 'MINGLED', 'FELT', 'AN', 'EXTREME', 'RELUCTANCE', 'TO', 'PERMIT', 'HIS', 'FRIEND', 'TO', 'BE', 'EXPOSED', 'ALONE', 'TO', 'THE', 'SINGULAR', 'FASCINATION', 'THAT', 'THIS', 'MYSTERIOUS', 'PERSONAGE', 'SEEMED', 'TO', 'EXERCISE', 'OVER', 'HIM', 'AND', 'THEREFORE', 'MADE', 'NO', 'OBJECTION', 'TO', "ALBERT'S", 'REQUEST', 'BUT', 'AT', 'ONCE', 'ACCOMPANIED', 'HIM', 'TO', 'THE', 'DESIRED', 'SPOT', 'AND', 'AFTER', 'A', 'SHORT', 'DELAY', 'THE', 'COUNT', 'JOINED', 'THEM', 'IN', 'THE', 'SALON'] +6070-86744-0001-2570: ref=['MY', 'VERY', 'GOOD', 'FRIEND', 'AND', 'EXCELLENT', 'NEIGHBOR', 'REPLIED', 'THE', 'COUNT', 'WITH', 'A', 'SMILE', 'YOU', 'REALLY', 'EXAGGERATE', 'MY', 'TRIFLING', 'EXERTIONS'] +6070-86744-0001-2570: hyp=['MY', 'VERY', 'GOOD', 'FRIEND', 'AN', 'EXCELLENT', 'NEIGHBOUR', 'REPLIED', 'THE', 'COUNT', 'WITH', 'A', 'SMILE', 'YOU', 'REALLY', 'EXAGGERATE', 'MY', 'TRIFLING', 'EXERTIONS'] +6070-86744-0002-2571: ref=['MY', 'FATHER', 'THE', 'COMTE', 'DE', 'MORCERF', 'ALTHOUGH', 'OF', 'SPANISH', 'ORIGIN', 'POSSESSES', 'CONSIDERABLE', 'INFLUENCE', 'BOTH', 'AT', 'THE', 'COURT', 'OF', 'FRANCE', 'AND', 'MADRID', 'AND', 'I', 'UNHESITATINGLY', 'PLACE', 'THE', 'BEST', 'SERVICES', 'OF', 'MYSELF', 'AND', 'ALL', 'TO', 'WHOM', 'MY', 'LIFE', 'IS', 'DEAR', 'AT', 'YOUR', 'DISPOSAL'] +6070-86744-0002-2571: hyp=['MY', 'FATHER', 'THE', 'COMTE', 'DE', 'MORCERF', 'ALTHOUGH', 'A', 'SPANISH', 'ORIGIN', 'POSSESSES', 'CONSIDERABLE', 'INFLUENCE', 'BOTH', 'AT', 'THE', 'COURT', 'OF', 'FRANCE', 'AND', 'MADRID', 'AND', 'I', 'AM', 'HESITATINGLY', 'PLACE', 'THE', 'BEST', 'SERVICES', 'OF', 'MYSELF', 'AND', 'ALL', 'TO', 'WHOM', 'MY', 'LIFE', 'IS', 'DEAR', 'AT', 'YOUR', 'DISPOSAL'] +6070-86744-0003-2572: ref=['I', 'CAN', 'SCARCELY', 'CREDIT', 'IT'] +6070-86744-0003-2572: hyp=['I', 'CAN', 'SCARCELY', 'CREDIT', 'IT'] +6070-86744-0004-2573: ref=['THEN', 'IT', 'IS', 'SETTLED', 'SAID', 'THE', 'COUNT', 'AND', 'I', 'GIVE', 'YOU', 'MY', 'SOLEMN', 'ASSURANCE', 'THAT', 'I', 'ONLY', 'WAITED', 'AN', 'OPPORTUNITY', 'LIKE', 'THE', 'PRESENT', 'TO', 'REALIZE', 'PLANS', 'THAT', 'I', 'HAVE', 'LONG', 'MEDITATED'] +6070-86744-0004-2573: hyp=['THEN', 'IT', 'IS', 'SETTLED', 'SAID', 'THE', 'COUNT', 'AND', 'I', 'GIVE', 'YOU', 'MY', 'SOLEMN', 'ASSURANCE', 'THAT', 'I', 'ONLY', 'WAITED', 'IN', 'AN', 'OPPORTUNITY', 'LIKE', 'THE', 'PRESENT', 'TO', 'REALIZE', 'PLANS', 'THAT', 'I', 'HAVE', 'LONG', 'MEDITATED'] +6070-86744-0005-2574: ref=['SHALL', 'WE', 'MAKE', 'A', 'POSITIVE', 'APPOINTMENT', 'FOR', 'A', 'PARTICULAR', 'DAY', 'AND', 'HOUR', 'INQUIRED', 'THE', 'COUNT', 'ONLY', 'LET', 'ME', 'WARN', 'YOU', 'THAT', 'I', 'AM', 'PROVERBIAL', 'FOR', 'MY', 'PUNCTILIOUS', 'EXACTITUDE', 'IN', 'KEEPING', 'MY', 'ENGAGEMENTS', 'DAY', 'FOR', 'DAY', 'HOUR', 'FOR', 'HOUR', 'SAID', 'ALBERT', 'THAT', 'WILL', 'SUIT', 'ME', 'TO', 'A', 'DOT'] +6070-86744-0005-2574: hyp=['SHOW', 'WE', 'MAKE', 'A', 'POSITIVE', 'APPOINTMENT', 'FOR', 'A', 'PARTICULAR', 'DAY', 'AND', 'HOUR', 'INQUIRED', 'THE', 'COUNT', 'ONLY', 'LET', 'ME', 'WARN', 'YOU', 'THAT', 'I', 'AM', 'PROVERBIAL', 'FOR', 'MY', 'PUNCTILIOUS', 'EXACTITUDE', 'IN', 'KEEPING', 'MY', 'ENGAGEMENTS', 'DAY', 'FOR', 'DAY', 'HOUR', 'FOR', 'HOUR', 'SAID', 'ALBERT', 'THAT', 'WILL', 'SUIT', 'ME', 'TO', 'A', 'DOT'] +6070-86744-0006-2575: ref=['SO', 'BE', 'IT', 'THEN', 'REPLIED', 'THE', 'COUNT', 'AND', 'EXTENDING', 'HIS', 'HAND', 'TOWARDS', 'A', 'CALENDAR', 'SUSPENDED', 'NEAR', 'THE', 'CHIMNEY', 'PIECE', 'HE', 'SAID', 'TO', 'DAY', 'IS', 'THE', 'TWENTY', 'FIRST', 'OF', 'FEBRUARY', 'AND', 'DRAWING', 'OUT', 'HIS', 'WATCH', 'ADDED', 'IT', 'IS', 'EXACTLY', 'HALF', 'PAST', 'TEN', "O'CLOCK", 'NOW', 'PROMISE', 'ME', 'TO', 'REMEMBER', 'THIS', 'AND', 'EXPECT', 'ME', 'THE', 'TWENTY', 'FIRST', 'OF', 'MAY', 'AT', 'THE', 'SAME', 'HOUR', 'IN', 'THE', 'FORENOON'] +6070-86744-0006-2575: hyp=['SO', 'BE', 'IT', 'THEN', 'REPLIED', 'THE', 'COUNT', 'AND', 'EXTENDING', 'HIS', 'HAND', 'TOWARDS', 'THE', 'CALENDAR', 'SUSPENDED', 'NEAR', 'THE', 'CHIMNEY', 'PIECE', 'HE', 'SAID', 'TO', 'DAY', 'IS', 'THE', 'TWENTY', 'FIRST', 'OF', 'FEBRUARY', 'AND', 'DRAWING', 'OUT', 'HIS', 'WATCH', 'I', 'DID', 'IT', 'IS', 'EXACTLY', 'HALF', 'PAST', 'TEN', "O'CLOCK", 'NOW', 'PROMISE', 'ME', 'TO', 'REMEMBER', 'THIS', 'AND', 'EXPECT', 'ME', 'THAT', 'TWENTY', 'FIRST', 'OF', 'MAY', 'AT', 'THE', 'SAME', 'HOUR', 'IN', 'THE', 'FORENOON'] +6070-86744-0007-2576: ref=['I', 'RESIDE', 'IN', 'MY', "FATHER'S", 'HOUSE', 'BUT', 'OCCUPY', 'A', 'PAVILION', 'AT', 'THE', 'FARTHER', 'SIDE', 'OF', 'THE', 'COURT', 'YARD', 'ENTIRELY', 'SEPARATED', 'FROM', 'THE', 'MAIN', 'BUILDING'] +6070-86744-0007-2576: hyp=['I', 'RESIDE', 'IN', 'MY', "FATHER'S", 'HOUSE', 'BUT', 'OCCUPY', 'A', 'PAVILION', 'AT', 'THE', 'FARTHER', 'SIDE', 'OF', 'THE', 'COURTYARD', 'AND', 'TIRELY', 'SEPARATED', 'FROM', 'THE', 'MAIN', 'BUILDING'] +6070-86744-0008-2577: ref=['NOW', 'THEN', 'SAID', 'THE', 'COUNT', 'RETURNING', 'HIS', 'TABLETS', 'TO', 'HIS', 'POCKET', 'MAKE', 'YOURSELF', 'PERFECTLY', 'EASY', 'THE', 'HAND', 'OF', 'YOUR', 'TIME', 'PIECE', 'WILL', 'NOT', 'BE', 'MORE', 'ACCURATE', 'IN', 'MARKING', 'THE', 'TIME', 'THAN', 'MYSELF'] +6070-86744-0008-2577: hyp=['NOW', 'THEN', 'SAID', 'THE', 'COUNT', 'RETURNING', 'ESTABLETS', 'TO', 'HIS', 'POCKET', 'MAKE', 'YOURSELF', 'PERFECTLY', 'EASY', 'THE', 'HAND', 'OF', 'YOUR', 'TIME', 'PEACE', 'WILL', 'NOT', 'BE', 'MORE', 'ACCURATE', 'IN', 'MARKING', 'THE', 'TIME', 'THAN', 'MYSELF'] +6070-86744-0009-2578: ref=['THAT', 'DEPENDS', 'WHEN', 'DO', 'YOU', 'LEAVE'] +6070-86744-0009-2578: hyp=['THAT', 'DEPENDS', 'WHEN', 'DO', 'YOU', 'LEAVE'] +6070-86744-0010-2579: ref=['FOR', 'FRANCE', 'NO', 'FOR', 'VENICE', 'I', 'SHALL', 'REMAIN', 'IN', 'ITALY', 'FOR', 'ANOTHER', 'YEAR', 'OR', 'TWO'] +6070-86744-0010-2579: hyp=['FOR', 'FRANCE', 'NO', 'FOR', 'VENICE', 'I', 'SHALL', 'REMAIN', 'IN', 'ITALY', 'FOR', 'ANOTHER', 'YEAR', 'OR', 'TWO'] +6070-86744-0011-2580: ref=['THEN', 'WE', 'SHALL', 'NOT', 'MEET', 'IN', 'PARIS'] +6070-86744-0011-2580: hyp=['THEN', 'WE', 'SHALL', 'NOT', 'MEET', 'IN', 'PARIS'] +6070-86744-0012-2581: ref=['I', 'FEAR', 'I', 'SHALL', 'NOT', 'HAVE', 'THAT', 'HONOR'] +6070-86744-0012-2581: hyp=['I', 'FEAR', 'I', 'SHALL', 'NOT', 'HAVE', 'THAT', 'HONOUR'] +6070-86744-0013-2582: ref=['WELL', 'SINCE', 'WE', 'MUST', 'PART', 'SAID', 'THE', 'COUNT', 'HOLDING', 'OUT', 'A', 'HAND', 'TO', 'EACH', 'OF', 'THE', 'YOUNG', 'MEN', 'ALLOW', 'ME', 'TO', 'WISH', 'YOU', 'BOTH', 'A', 'SAFE', 'AND', 'PLEASANT', 'JOURNEY'] +6070-86744-0013-2582: hyp=['WELL', 'SINCE', 'WE', 'MUST', 'PART', 'SAID', 'THE', 'COUNT', 'HOLDING', 'OUT', 'A', 'HAND', 'TO', 'EACH', 'OF', 'THE', 'YOUNG', 'MEN', 'ALLOW', 'ME', 'TO', 'WISH', 'YOU', 'BOTH', 'A', 'SAFE', 'AND', 'PLEASANT', 'JOURNEY'] +6070-86744-0014-2583: ref=['WHAT', 'IS', 'THE', 'MATTER', 'ASKED', 'ALBERT', 'OF', 'FRANZ', 'WHEN', 'THEY', 'HAD', 'RETURNED', 'TO', 'THEIR', 'OWN', 'APARTMENTS', 'YOU', 'SEEM', 'MORE', 'THAN', 'COMMONLY', 'THOUGHTFUL'] +6070-86744-0014-2583: hyp=['WHAT', 'IS', 'THE', 'MATTER', 'ASKED', 'ALBERT', 'OF', 'FRANZ', 'WHEN', 'THEY', 'HAD', 'RETURNED', 'TO', 'THEIR', 'OWN', 'APARTMENTS', 'YOU', 'SEE', 'MORE', 'THAN', 'COMMONLY', 'THOUGHTFUL'] +6070-86744-0015-2584: ref=['I', 'WILL', 'CONFESS', 'TO', 'YOU', 'ALBERT', 'REPLIED', 'FRANZ', 'THE', 'COUNT', 'IS', 'A', 'VERY', 'SINGULAR', 'PERSON', 'AND', 'THE', 'APPOINTMENT', 'YOU', 'HAVE', 'MADE', 'TO', 'MEET', 'HIM', 'IN', 'PARIS', 'FILLS', 'ME', 'WITH', 'A', 'THOUSAND', 'APPREHENSIONS'] +6070-86744-0015-2584: hyp=['I', 'WILL', 'CONFESS', 'TO', 'YOU', 'ALBERT', 'REPLIED', 'FRANZ', 'THE', 'COUNT', 'IS', 'A', 'VERY', 'SINGULAR', 'PERSON', 'AND', 'THE', 'APPOINTMENT', 'YOU', 'HAVE', 'MADE', 'TO', 'MEET', 'HIM', 'IN', 'PARIS', 'FILLS', 'ME', 'WITH', 'A', 'THOUSAND', 'APPREHENSIONS'] +6070-86744-0016-2585: ref=['DID', 'YOU', 'EVER', 'MEET', 'HIM', 'PREVIOUSLY', 'TO', 'COMING', 'HITHER'] +6070-86744-0016-2585: hyp=['DID', 'YOU', 'EVER', 'MEET', 'HIM', 'PREVIOUSLY', 'TO', 'COMING', 'HITHER'] +6070-86744-0017-2586: ref=['UPON', 'MY', 'HONOR', 'THEN', 'LISTEN', 'TO', 'ME'] +6070-86744-0017-2586: hyp=['UPON', 'MY', 'HONOUR', 'THEN', 'LISTEN', 'TO', 'ME'] +6070-86744-0018-2587: ref=['HE', 'DWELT', 'WITH', 'CONSIDERABLE', 'FORCE', 'AND', 'ENERGY', 'ON', 'THE', 'ALMOST', 'MAGICAL', 'HOSPITALITY', 'HE', 'HAD', 'RECEIVED', 'FROM', 'THE', 'COUNT', 'AND', 'THE', 'MAGNIFICENCE', 'OF', 'HIS', 'ENTERTAINMENT', 'IN', 'THE', 'GROTTO', 'OF', 'THE', 'THOUSAND', 'AND', 'ONE', 'NIGHTS', 'HE', 'RECOUNTED', 'WITH', 'CIRCUMSTANTIAL', 'EXACTITUDE', 'ALL', 'THE', 'PARTICULARS', 'OF', 'THE', 'SUPPER', 'THE', 'HASHISH', 'THE', 'STATUES', 'THE', 'DREAM', 'AND', 'HOW', 'AT', 'HIS', 'AWAKENING', 'THERE', 'REMAINED', 'NO', 'PROOF', 'OR', 'TRACE', 'OF', 'ALL', 'THESE', 'EVENTS', 'SAVE', 'THE', 'SMALL', 'YACHT', 'SEEN', 'IN', 'THE', 'DISTANT', 'HORIZON', 'DRIVING', 'UNDER', 'FULL', 'SAIL', 'TOWARD', 'PORTO', 'VECCHIO'] +6070-86744-0018-2587: hyp=['HE', 'DWELT', 'WITH', 'CONSIDERABLE', 'FORCE', 'AND', 'ENERGY', 'ON', 'THE', 'ALMOST', 'MAGICAL', 'HOSPITALITY', 'HE', 'HAD', 'RECEIVED', 'FROM', 'THE', 'COUNT', 'AND', 'THE', 'MAGNIFICENCE', 'OF', 'HIS', 'ENTERTAINMENT', 'IN', 'THE', 'GROTTO', 'OF', 'THE', 'THOUSAND', 'AND', 'ONE', 'NIGHTS', 'HE', 'RECOUNTED', 'WITH', 'CIRCUMSTANTIAL', 'EXACTITUDE', 'ALL', 'THE', 'PARTICULARS', 'OF', 'THE', 'SUPPER', 'THE', 'HASHISH', 'THE', 'STATUES', 'THE', 'DREAM', 'AND', 'HOW', 'AT', 'HIS', 'AWAKENING', 'THERE', 'REMAINED', 'NO', 'PROOF', 'OR', 'TRACE', 'OF', 'ALL', 'THESE', 'EVENTS', 'SAVE', 'THE', 'SMALL', 'YACHT', 'SEEN', 'IN', 'THE', 'DISTANT', 'HORIZON', 'DRIVING', 'UNDER', 'FULL', 'SAIL', 'TOWARD', 'PORT', 'OF', 'QUICKU'] +6070-86744-0019-2588: ref=['THEN', 'HE', 'DETAILED', 'THE', 'CONVERSATION', 'OVERHEARD', 'BY', 'HIM', 'AT', 'THE', 'COLOSSEUM', 'BETWEEN', 'THE', 'COUNT', 'AND', 'VAMPA', 'IN', 'WHICH', 'THE', 'COUNT', 'HAD', 'PROMISED', 'TO', 'OBTAIN', 'THE', 'RELEASE', 'OF', 'THE', 'BANDIT', 'PEPPINO', 'AN', 'ENGAGEMENT', 'WHICH', 'AS', 'OUR', 'READERS', 'ARE', 'AWARE', 'HE', 'MOST', 'FAITHFULLY', 'FULFILLED'] +6070-86744-0019-2588: hyp=['THEN', 'HE', 'DETAILED', 'THE', 'CONVERSATION', 'OVERHEARD', 'BY', 'HIM', 'AT', 'THE', 'COLISEUM', 'BETWEEN', 'THE', 'COUNT', 'AND', 'VAMPA', 'IN', 'WHICH', 'THE', 'COUNT', 'HAD', 'PROMISED', 'TO', 'OBTAIN', 'THE', 'RELEASE', 'OF', 'THE', 'BANDIT', 'PEPPINO', 'AN', 'ENGAGEMENT', 'WHICH', 'AS', 'OUR', 'READERS', 'ARE', 'AWARE', 'HE', 'MOST', 'FAITHFULLY', 'FULFILLED'] +6070-86744-0020-2589: ref=['BUT', 'SAID', 'FRANZ', 'THE', 'CORSICAN', 'BANDITS', 'THAT', 'WERE', 'AMONG', 'THE', 'CREW', 'OF', 'HIS', 'VESSEL'] +6070-86744-0020-2589: hyp=['BUT', 'SAID', 'FRANZ', 'THE', 'CORSICAN', 'BANDITS', 'THAT', 'WERE', 'AMONG', 'THE', 'CREW', 'OF', 'HIS', 'VESSEL'] +6070-86744-0021-2590: ref=['WHY', 'REALLY', 'THE', 'THING', 'SEEMS', 'TO', 'ME', 'SIMPLE', 'ENOUGH'] +6070-86744-0021-2590: hyp=['WHY', 'REALLY', 'THE', 'THING', 'SEEMS', 'TO', 'ME', 'SIMPLE', 'ENOUGH'] +6070-86744-0022-2591: ref=['TALKING', 'OF', 'COUNTRIES', 'REPLIED', 'FRANZ', 'OF', 'WHAT', 'COUNTRY', 'IS', 'THE', 'COUNT', 'WHAT', 'IS', 'HIS', 'NATIVE', 'TONGUE', 'WHENCE', 'DOES', 'HE', 'DERIVE', 'HIS', 'IMMENSE', 'FORTUNE', 'AND', 'WHAT', 'WERE', 'THOSE', 'EVENTS', 'OF', 'HIS', 'EARLY', 'LIFE', 'A', 'LIFE', 'AS', 'MARVELLOUS', 'AS', 'UNKNOWN', 'THAT', 'HAVE', 'TINCTURED', 'HIS', 'SUCCEEDING', 'YEARS', 'WITH', 'SO', 'DARK', 'AND', 'GLOOMY', 'A', 'MISANTHROPY'] +6070-86744-0022-2591: hyp=['TALKING', 'OF', 'COUNTRIES', 'REPLIED', 'FRANZ', 'OF', 'WHAT', 'COUNTRIES', 'THE', 'COUNT', 'WHAT', 'IS', 'HIS', 'NATIVE', 'TONG', 'WHENCE', 'DOES', 'HE', 'DERIVE', 'HIS', 'IMMENSE', 'FORTUNE', 'AND', 'WHAT', 'WERE', 'THOSE', 'EVENTS', 'OF', 'HIS', 'EARLY', 'LIFE', 'A', 'LIFE', 'AS', 'MARVELLOUS', 'AS', 'UNKNOWN', 'THAT', 'HATH', 'TINCTURED', 'HIS', 'SUCCEEDING', 'YEARS', 'WITH', 'SO', 'DARK', 'AND', 'GLOOMY', 'A', 'MISANTHROPY'] +6070-86744-0023-2592: ref=['CERTAINLY', 'THESE', 'ARE', 'QUESTIONS', 'THAT', 'IN', 'YOUR', 'PLACE', 'I', 'SHOULD', 'LIKE', 'TO', 'HAVE', 'ANSWERED'] +6070-86744-0023-2592: hyp=['CERTAINLY', 'THESE', 'ARE', 'QUESTIONS', 'THAT', 'IN', 'YOUR', 'PLACE', 'I', 'SHOULD', 'LIKE', 'TO', 'HAVE', 'ANSWERED'] +6070-86744-0024-2593: ref=['MY', 'DEAR', 'FRANZ', 'REPLIED', 'ALBERT', 'WHEN', 'UPON', 'RECEIPT', 'OF', 'MY', 'LETTER', 'YOU', 'FOUND', 'THE', 'NECESSITY', 'OF', 'ASKING', 'THE', "COUNT'S", 'ASSISTANCE', 'YOU', 'PROMPTLY', 'WENT', 'TO', 'HIM', 'SAYING', 'MY', 'FRIEND', 'ALBERT', 'DE', 'MORCERF', 'IS', 'IN', 'DANGER', 'HELP', 'ME', 'TO', 'DELIVER', 'HIM'] +6070-86744-0024-2593: hyp=['MY', 'DEAR', 'FRIENDS', 'REPLIED', 'ALBERT', 'WHEN', 'UPON', 'RECEIPT', 'OF', 'MY', 'LETTER', 'YOU', 'FOUND', 'THE', 'NECESSITY', 'OF', 'ASKING', 'THE', "COUNT'S", 'ASSISTANCE', 'YOU', 'PROMPTLY', 'WENT', 'TO', 'HIM', 'SAYING', 'MY', 'FRIEND', 'ALBERT', 'DE', 'MORCERF', 'IS', 'IN', 'DANGER', 'HELP', 'ME', 'TO', 'DELIVER', 'HIM'] +6070-86744-0025-2594: ref=['WHAT', 'ARE', 'HIS', 'MEANS', 'OF', 'EXISTENCE', 'WHAT', 'IS', 'HIS', 'BIRTHPLACE', 'OF', 'WHAT', 'COUNTRY', 'IS', 'HE', 'A', 'NATIVE'] +6070-86744-0025-2594: hyp=['WHAT', 'ARE', 'HIS', 'MEANS', 'OF', 'EXISTENCE', 'WHAT', 'IS', 'HIS', 'BIRTHPLACE', 'OF', 'WHAT', 'COUNTRIES', 'HE', 'A', 'NATIVE'] +6070-86744-0026-2595: ref=['I', 'CONFESS', 'HE', 'ASKED', 'ME', 'NONE', 'NO', 'HE', 'MERELY', 'CAME', 'AND', 'FREED', 'ME', 'FROM', 'THE', 'HANDS', 'OF', 'SIGNOR', 'VAMPA', 'WHERE', 'I', 'CAN', 'ASSURE', 'YOU', 'IN', 'SPITE', 'OF', 'ALL', 'MY', 'OUTWARD', 'APPEARANCE', 'OF', 'EASE', 'AND', 'UNCONCERN', 'I', 'DID', 'NOT', 'VERY', 'PARTICULARLY', 'CARE', 'TO', 'REMAIN'] +6070-86744-0026-2595: hyp=['I', 'CONFESS', 'HE', 'ASKED', 'ME', 'NONE', 'NO', 'HE', 'MERELY', 'CAME', 'AND', 'FREED', 'ME', 'FROM', 'THE', 'HANDS', 'OF', 'SENOR', 'VAMPA', 'WHERE', 'I', 'CAN', 'ASSURE', 'YOU', 'IN', 'SPITE', 'OF', 'ALL', 'MY', 'OUTWARD', 'APPEARANCE', 'OF', 'EASE', 'AND', 'UNCONCERN', 'I', 'DID', 'NOT', 'VERY', 'PARTICULARLY', 'CARE', 'TO', 'REMAIN'] +6070-86744-0027-2596: ref=['AND', 'THIS', 'TIME', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'CONTRARY', 'TO', 'THE', 'USUAL', 'STATE', 'OF', 'AFFAIRS', 'IN', 'DISCUSSIONS', 'BETWEEN', 'THE', 'YOUNG', 'MEN', 'THE', 'EFFECTIVE', 'ARGUMENTS', 'WERE', 'ALL', 'ON', "ALBERT'S", 'SIDE'] +6070-86744-0027-2596: hyp=['AND', 'THIS', 'TIME', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'CONTRARY', 'TO', 'THE', 'USUAL', 'STATE', 'OF', 'AFFAIRS', 'IN', 'DISCUSSIONS', 'BETWEEN', 'THE', 'YOUNG', 'MEN', 'THE', 'EFFECTIVE', 'ARGUMENTS', 'WERE', 'ALL', 'ON', "ALBERT'S", 'SIDE'] +6070-86744-0028-2597: ref=['WELL', 'SAID', 'FRANZ', 'WITH', 'A', 'SIGH', 'DO', 'AS', 'YOU', 'PLEASE', 'MY', 'DEAR', 'VISCOUNT', 'FOR', 'YOUR', 'ARGUMENTS', 'ARE', 'BEYOND', 'MY', 'POWERS', 'OF', 'REFUTATION'] +6070-86744-0028-2597: hyp=['WELL', 'SAID', 'FRANZ', 'WITH', 'A', 'SIGH', 'DO', 'AS', 'YOU', 'PLEASE', 'MY', 'DEAR', 'VISCOUNT', 'FOR', 'YOUR', 'ARGUMENTS', 'ARE', 'BEYOND', 'MY', 'POWERS', 'OF', 'REFUTATION'] +6070-86744-0029-2598: ref=['AND', 'NOW', 'MY', 'DEAR', 'FRANZ', 'LET', 'US', 'TALK', 'OF', 'SOMETHING', 'ELSE'] +6070-86744-0029-2598: hyp=['AND', 'NOW', 'MY', 'DEAR', 'FRANCE', 'LET', 'US', 'TALK', 'OF', 'SOMETHING', 'ELSE'] +6070-86745-0000-2549: ref=['THEN', 'SHOULD', 'ANYTHING', 'APPEAR', 'TO', 'MERIT', 'A', 'MORE', 'MINUTE', 'EXAMINATION', 'ALBERT', 'DE', 'MORCERF', 'COULD', 'FOLLOW', 'UP', 'HIS', 'RESEARCHES', 'BY', 'MEANS', 'OF', 'A', 'SMALL', 'GATE', 'SIMILAR', 'TO', 'THAT', 'CLOSE', 'TO', 'THE', "CONCIERGE'S", 'DOOR', 'AND', 'WHICH', 'MERITS', 'A', 'PARTICULAR', 'DESCRIPTION'] +6070-86745-0000-2549: hyp=['THEN', 'SHOULD', 'ANYTHING', 'APPEAR', 'TO', 'MARRIT', 'A', 'MORE', 'MINUTE', 'EXAMINATION', 'ALBERT', 'DE', 'MORCERF', 'COULD', 'FOLLOW', 'UP', 'HIS', 'RESEARCHES', 'BY', 'MEANS', 'OF', 'A', 'SMALL', 'GATE', 'SIMILAR', 'TO', 'THAT', 'CLOSE', 'TO', 'THE', "CONCIERGE'S", 'DOOR', 'AND', 'WHICH', 'MERITS', 'A', 'PARTICULAR', 'DESCRIPTION'] +6070-86745-0001-2550: ref=['SHRUBS', 'AND', 'CREEPING', 'PLANTS', 'COVERED', 'THE', 'WINDOWS', 'AND', 'HID', 'FROM', 'THE', 'GARDEN', 'AND', 'COURT', 'THESE', 'TWO', 'APARTMENTS', 'THE', 'ONLY', 'ROOMS', 'INTO', 'WHICH', 'AS', 'THEY', 'WERE', 'ON', 'THE', 'GROUND', 'FLOOR', 'THE', 'PRYING', 'EYES', 'OF', 'THE', 'CURIOUS', 'COULD', 'PENETRATE'] +6070-86745-0001-2550: hyp=['SHRUBS', 'AND', 'CREEPING', 'PLANTS', 'COVERED', 'THE', 'WINDOWS', 'AND', 'HID', 'FROM', 'THE', 'GARDEN', 'AND', 'COURT', 'THESE', 'TWO', 'APARTMENTS', 'THE', 'ONLY', 'ROOMS', 'INTO', 'WHICH', 'AS', 'THEY', 'WERE', 'ON', 'THE', 'GROUND', 'FLOOR', 'THE', 'PRYING', 'EYES', 'OF', 'THE', 'CURIOUS', 'COULD', 'PENETRATE'] +6070-86745-0002-2551: ref=['AT', 'A', 'QUARTER', 'TO', 'TEN', 'A', 'VALET', 'ENTERED', 'HE', 'COMPOSED', 'WITH', 'A', 'LITTLE', 'GROOM', 'NAMED', 'JOHN', 'AND', 'WHO', 'ONLY', 'SPOKE', 'ENGLISH', 'ALL', "ALBERT'S", 'ESTABLISHMENT', 'ALTHOUGH', 'THE', 'COOK', 'OF', 'THE', 'HOTEL', 'WAS', 'ALWAYS', 'AT', 'HIS', 'SERVICE', 'AND', 'ON', 'GREAT', 'OCCASIONS', 'THE', "COUNT'S", 'CHASSEUR', 'ALSO'] +6070-86745-0002-2551: hyp=['AT', 'A', 'QUARTER', 'TO', 'TEN', 'A', 'VALLEY', 'ENTERED', 'HE', 'COMPOSED', 'WITH', 'A', 'LITTLE', 'GROOM', 'NAMED', 'JOHN', 'AND', 'WHO', 'ONLY', 'SPOKE', 'ENGLISH', 'ALL', "ALBERT'S", 'ESTABLISHMENT', 'ALTHOUGH', 'THE', 'COOK', 'OF', 'THE', 'HOTEL', 'WAS', 'ALWAYS', 'AT', 'HIS', 'SERVICE', 'AND', 'ON', 'GREAT', 'OCCASIONS', 'THE', "COUNT'S", 'CHASSEUR', 'ALSO'] +6070-86745-0003-2552: ref=['WAIT', 'THEN', 'DURING', 'THE', 'DAY', 'TELL', 'ROSA', 'THAT', 'WHEN', 'I', 'LEAVE', 'THE', 'OPERA', 'I', 'WILL', 'SUP', 'WITH', 'HER', 'AS', 'SHE', 'WISHES'] +6070-86745-0003-2552: hyp=['WAIT', 'THEN', 'DURING', 'THE', 'DAY', 'TELL', 'ROSA', 'THAT', 'WHEN', 'I', 'LEAVE', 'THE', 'OPERA', 'I', 'WILL', 'SUP', 'WITH', 'HER', 'AS', 'SHE', 'WISHES'] +6070-86745-0004-2553: ref=['VERY', 'WELL', 'AT', 'HALF', 'PAST', 'TEN'] +6070-86745-0004-2553: hyp=['VERY', 'WELL', 'AT', 'HALF', 'PAST', 'TEN'] +6070-86745-0005-2554: ref=['IS', 'THE', 'COUNTESS', 'UP', 'YET'] +6070-86745-0005-2554: hyp=['IS', 'THE', 'COUNTESS', 'UP', 'YET'] +6070-86745-0006-2555: ref=['THE', 'VALET', 'LEFT', 'THE', 'ROOM'] +6070-86745-0006-2555: hyp=['THE', 'VALET', 'LEFT', 'THE', 'ROOM'] +6070-86745-0007-2556: ref=['GOOD', 'MORNING', 'LUCIEN', 'GOOD', 'MORNING', 'SAID', 'ALBERT', 'YOUR', 'PUNCTUALITY', 'REALLY', 'ALARMS', 'ME'] +6070-86745-0007-2556: hyp=['GOOD', 'MORNING', 'LUCIEN', 'GOOD', 'MORNING', 'SAID', 'ALBERT', 'YOUR', 'PUNCTUALITY', 'REALLY', 'ALARMS', 'ME'] +6070-86745-0008-2557: ref=['YOU', 'WHOM', 'I', 'EXPECTED', 'LAST', 'YOU', 'ARRIVE', 'AT', 'FIVE', 'MINUTES', 'TO', 'TEN', 'WHEN', 'THE', 'TIME', 'FIXED', 'WAS', 'HALF', 'PAST'] +6070-86745-0008-2557: hyp=['YOU', 'WHOM', 'I', 'EXPECTED', 'LAST', 'YOU', 'ARRIVE', 'AT', 'FIVE', 'MINUTES', 'TO', 'TEN', 'WHEN', 'THE', 'TIME', 'FIXED', 'WAS', 'HALF', 'PAST'] +6070-86745-0009-2558: ref=['NO', 'NO', 'MY', 'DEAR', 'FELLOW', 'DO', 'NOT', 'CONFOUND', 'OUR', 'PLANS'] +6070-86745-0009-2558: hyp=['NO', 'NO', 'MY', 'DEAR', 'FELLOW', 'DO', 'NOT', 'CONFOUND', 'OUR', 'PLANS'] +6070-86745-0010-2559: ref=['YES', 'HE', 'HAS', 'NOT', 'MUCH', 'TO', 'COMPLAIN', 'OF', 'BOURGES', 'IS', 'THE', 'CAPITAL', 'OF', 'CHARLES', 'SEVEN'] +6070-86745-0010-2559: hyp=['YES', 'HE', 'HAS', 'NOT', 'MUCH', 'TO', 'COMPLAIN', 'OF', 'BOURGE', 'IS', 'THE', 'CAPITAL', 'OF', 'CHARLES', 'THE', 'SEVENTH'] +6070-86745-0011-2560: ref=['IT', 'IS', 'FOR', 'THAT', 'REASON', 'YOU', 'SEE', 'ME', 'SO', 'EARLY'] +6070-86745-0011-2560: hyp=['IT', 'IS', 'FOR', 'THAT', 'REASON', 'YOU', 'SEE', 'ME', 'SO', 'EARLY'] +6070-86745-0012-2561: ref=['I', 'RETURNED', 'HOME', 'AT', 'DAYBREAK', 'AND', 'STROVE', 'TO', 'SLEEP', 'BUT', 'MY', 'HEAD', 'ACHED', 'AND', 'I', 'GOT', 'UP', 'TO', 'HAVE', 'A', 'RIDE', 'FOR', 'AN', 'HOUR'] +6070-86745-0012-2561: hyp=['I', 'RETURNED', 'HOME', 'AT', 'DAYBREAK', 'AND', 'STROVE', 'TO', 'SLEEP', 'BUT', 'MY', 'HEAD', 'ACHED', 'AND', 'I', 'GOT', 'UP', 'TO', 'HAVE', 'A', 'RIDE', 'FOR', 'AN', 'HOUR'] +6070-86745-0013-2562: ref=['PESTE', 'I', 'WILL', 'DO', 'NOTHING', 'OF', 'THE', 'KIND', 'THE', 'MOMENT', 'THEY', 'COME', 'FROM', 'GOVERNMENT', 'YOU', 'WOULD', 'FIND', 'THEM', 'EXECRABLE'] +6070-86745-0013-2562: hyp=['PESTS', 'I', 'WILL', 'DO', 'NOTHING', 'OF', 'THE', 'KIND', 'THE', 'MOMENT', 'THEY', 'COME', 'FROM', 'GOVERNMENT', 'YOU', 'WOULD', 'FIND', 'THEM', 'EXECRABLE'] +6070-86745-0014-2563: ref=['BESIDES', 'THAT', 'DOES', 'NOT', 'CONCERN', 'THE', 'HOME', 'BUT', 'THE', 'FINANCIAL', 'DEPARTMENT'] +6070-86745-0014-2563: hyp=['BESIDES', 'THAT', 'DOES', 'NOT', 'CONCERN', 'THE', 'HOME', 'BUT', 'THE', 'FINANCIAL', 'DEPARTMENT'] +6070-86745-0015-2564: ref=['ABOUT', 'WHAT', 'ABOUT', 'THE', 'PAPERS'] +6070-86745-0015-2564: hyp=['ABOUT', 'WHAT', 'ABOUT', 'THE', 'PAPERS'] +6070-86745-0016-2565: ref=['IN', 'THE', 'ENTIRE', 'POLITICAL', 'WORLD', 'OF', 'WHICH', 'YOU', 'ARE', 'ONE', 'OF', 'THE', 'LEADERS'] +6070-86745-0016-2565: hyp=['IN', 'THE', 'ENTIRE', 'POLITICAL', 'WORLD', 'OF', 'WHICH', 'YOU', 'ARE', 'ONE', 'OF', 'THE', 'LEADERS'] +6070-86745-0017-2566: ref=['THEY', 'SAY', 'THAT', 'IT', 'IS', 'QUITE', 'FAIR', 'AND', 'THAT', 'SOWING', 'SO', 'MUCH', 'RED', 'YOU', 'OUGHT', 'TO', 'REAP', 'A', 'LITTLE', 'BLUE'] +6070-86745-0017-2566: hyp=['THEY', 'SAY', 'THAT', 'IT', 'IS', 'QUITE', 'FAIR', 'AND', 'THAT', 'SOWING', 'SO', 'MUCH', 'RED', 'YOU', 'OUGHT', 'TO', 'READ', 'A', 'LITTLE', 'BLUE'] +6070-86745-0018-2567: ref=['COME', 'COME', 'THAT', 'IS', 'NOT', 'BAD', 'SAID', 'LUCIEN'] +6070-86745-0018-2567: hyp=['COME', 'COME', 'THAT', 'IS', 'NOT', 'BAD', 'SAID', 'LUCIAN'] +6070-86745-0019-2568: ref=['WITH', 'YOUR', 'TALENTS', 'YOU', 'WOULD', 'MAKE', 'YOUR', 'FORTUNE', 'IN', 'THREE', 'OR', 'FOUR', 'YEARS'] +6070-86745-0019-2568: hyp=['WITH', 'THE', 'OR', 'TALONS', 'HE', 'WOULD', 'MAKE', 'YOUR', 'FORTUNE', 'IN', 'THREE', 'OR', 'FOUR', 'YEARS'] +6128-63240-0000-503: ref=['THE', 'GENTLEMAN', 'HAD', 'NOT', 'EVEN', 'NEEDED', 'TO', 'SIT', 'DOWN', 'TO', 'BECOME', 'INTERESTED', 'APPARENTLY', 'HE', 'HAD', 'TAKEN', 'UP', 'THE', 'VOLUME', 'FROM', 'A', 'TABLE', 'AS', 'SOON', 'AS', 'HE', 'CAME', 'IN', 'AND', 'STANDING', 'THERE', 'AFTER', 'A', 'SINGLE', 'GLANCE', 'ROUND', 'THE', 'APARTMENT', 'HAD', 'LOST', 'HIMSELF', 'IN', 'ITS', 'PAGES'] +6128-63240-0000-503: hyp=['THE', 'GENTLEMAN', 'HAD', 'NOT', 'EVEN', 'NEEDED', 'TO', 'SIT', 'DOWN', 'TO', 'BECOME', 'INTERESTED', 'APPARENTLY', 'HE', 'HAD', 'TAKEN', 'UP', 'THE', 'VOLUME', 'FROM', 'A', 'TABLE', 'AS', 'SOON', 'AS', 'HE', 'CAME', 'IN', 'AND', 'STANDING', 'THERE', 'AFTER', 'A', 'SINGLE', 'GLANCE', 'ROUND', 'THE', 'APARTMENT', 'HAD', 'LOST', 'HIMSELF', 'IN', 'ITS', 'PAGES'] +6128-63240-0001-504: ref=['THAT', 'HAS', 'AN', 'UNFLATTERING', 'SOUND', 'FOR', 'ME', 'SAID', 'THE', 'YOUNG', 'MAN'] +6128-63240-0001-504: hyp=['THAT', 'HAS', 'AN', 'UNFLATTERING', 'SOUND', 'FOR', 'ME', 'SAID', 'THE', 'YOUNG', 'MAN'] +6128-63240-0002-505: ref=['SHE', 'IS', 'WILLING', 'TO', 'RISK', 'THAT'] +6128-63240-0002-505: hyp=['SHE', 'IS', 'WILLING', 'TO', 'RISK', 'THAT'] +6128-63240-0003-506: ref=['JUST', 'AS', 'I', 'AM', 'THE', 'VISITOR', 'INQUIRED', 'PRESENTING', 'HIMSELF', 'WITH', 'RATHER', 'A', 'WORK', 'A', 'DAY', 'ASPECT'] +6128-63240-0003-506: hyp=['JUST', 'AS', 'I', 'AM', 'THE', 'VISITOR', 'INQUIRED', 'PRESENTING', 'HIMSELF', 'WITH', 'RATHER', 'A', 'WORKADAY', 'ASPECT'] +6128-63240-0004-507: ref=['HE', 'WAS', 'TALL', 'AND', 'LEAN', 'AND', 'DRESSED', 'THROUGHOUT', 'IN', 'BLACK', 'HIS', 'SHIRT', 'COLLAR', 'WAS', 'LOW', 'AND', 'WIDE', 'AND', 'THE', 'TRIANGLE', 'OF', 'LINEN', 'A', 'LITTLE', 'CRUMPLED', 'EXHIBITED', 'BY', 'THE', 'OPENING', 'OF', 'HIS', 'WAISTCOAT', 'WAS', 'ADORNED', 'BY', 'A', 'PIN', 'CONTAINING', 'A', 'SMALL', 'RED', 'STONE'] +6128-63240-0004-507: hyp=['HE', 'WAS', 'TALL', 'AND', 'LEAN', 'AND', 'DRESSED', 'THROUGHOUT', 'IN', 'BLACK', 'HIS', 'SHIRT', 'COLLAR', 'WAS', 'LOW', 'AND', 'WIDE', 'AND', 'THE', 'TRIANGLE', 'OF', 'LINEN', 'A', 'LITTLE', 'CRUMPLED', 'EXHIBITED', 'BY', 'THE', 'OPENING', 'OF', 'HIS', 'WAISTCOAT', 'WAS', 'ADORNED', 'BY', 'A', 'PIN', 'CONTAINING', 'A', 'SMALL', 'RED', 'STONE'] +6128-63240-0005-508: ref=['IN', 'SPITE', 'OF', 'THIS', 'DECORATION', 'THE', 'YOUNG', 'MAN', 'LOOKED', 'POOR', 'AS', 'POOR', 'AS', 'A', 'YOUNG', 'MAN', 'COULD', 'LOOK', 'WHO', 'HAD', 'SUCH', 'A', 'FINE', 'HEAD', 'AND', 'SUCH', 'MAGNIFICENT', 'EYES'] +6128-63240-0005-508: hyp=['IN', 'SPITE', 'OF', 'THIS', 'DECORATION', 'THE', 'YOUNG', 'MAN', 'LOOKED', 'POOR', 'AS', 'FAR', 'AS', 'A', 'YOUNG', 'MAN', 'COULD', 'LIVE', 'WHO', 'HAD', 'SUCH', 'A', 'FINE', 'HEAD', 'AND', 'SUCH', 'MAGNIFICENT', 'EYES'] +6128-63240-0006-509: ref=['THOSE', 'OF', 'BASIL', 'RANSOM', 'WERE', 'DARK', 'DEEP', 'AND', 'GLOWING', 'HIS', 'HEAD', 'HAD', 'A', 'CHARACTER', 'OF', 'ELEVATION', 'WHICH', 'FAIRLY', 'ADDED', 'TO', 'HIS', 'STATURE', 'IT', 'WAS', 'A', 'HEAD', 'TO', 'BE', 'SEEN', 'ABOVE', 'THE', 'LEVEL', 'OF', 'A', 'CROWD', 'ON', 'SOME', 'JUDICIAL', 'BENCH', 'OR', 'POLITICAL', 'PLATFORM', 'OR', 'EVEN', 'ON', 'A', 'BRONZE', 'MEDAL'] +6128-63240-0006-509: hyp=['THOSE', 'OF', 'BAESON', 'RANSOM', 'WENT', 'DARK', 'DEEP', 'AND', 'GLOWING', 'HIS', 'HEAD', 'HAD', 'A', 'CHARACTER', 'OF', 'ELEVATION', 'WHICH', 'FAIRLY', 'ADDED', 'TO', 'HIS', 'STATUE', 'IT', 'WAS', 'A', 'HEAD', 'TO', 'BE', 'SEEN', 'ABOVE', 'THE', 'LEVEL', 'OF', 'A', 'CROWD', 'ON', 'SOME', 'JUDICIAL', 'BENCH', 'OR', 'POLITICAL', 'PLATFORM', 'OR', 'EVEN', 'ON', 'A', 'BRONZE', 'MEDAL'] +6128-63240-0007-510: ref=['THESE', 'THINGS', 'THE', 'EYES', 'ESPECIALLY', 'WITH', 'THEIR', 'SMOULDERING', 'FIRE', 'MIGHT', 'HAVE', 'INDICATED', 'THAT', 'HE', 'WAS', 'TO', 'BE', 'A', 'GREAT', 'AMERICAN', 'STATESMAN', 'OR', 'ON', 'THE', 'OTHER', 'HAND', 'THEY', 'MIGHT', 'SIMPLY', 'HAVE', 'PROVED', 'THAT', 'HE', 'CAME', 'FROM', 'CAROLINA', 'OR', 'ALABAMA'] +6128-63240-0007-510: hyp=['THESE', 'THINGS', 'THE', 'EYES', 'ESPECIALLY', 'WITH', 'THEIR', 'SMOULDERING', 'FIRE', 'MIGHT', 'HAVE', 'INDICATED', 'THAT', 'HE', 'WAS', 'TO', 'BE', 'GREAT', 'AMERICAN', 'STATESMAN', 'OR', 'ON', 'THE', 'OTHER', 'HAND', 'THEY', 'MIGHT', 'SIMPLY', 'HAVE', 'PROVED', 'THAT', 'HE', 'CAME', 'FROM', 'CAROLINA', 'OR', 'ALABAMA'] +6128-63240-0008-511: ref=['AND', 'YET', 'THE', 'READER', 'WHO', 'LIKES', 'A', 'COMPLETE', 'IMAGE', 'WHO', 'DESIRES', 'TO', 'READ', 'WITH', 'THE', 'SENSES', 'AS', 'WELL', 'AS', 'WITH', 'THE', 'REASON', 'IS', 'ENTREATED', 'NOT', 'TO', 'FORGET', 'THAT', 'HE', 'PROLONGED', 'HIS', 'CONSONANTS', 'AND', 'SWALLOWED', 'HIS', 'VOWELS', 'THAT', 'HE', 'WAS', 'GUILTY', 'OF', 'ELISIONS', 'AND', 'INTERPOLATIONS', 'WHICH', 'WERE', 'EQUALLY', 'UNEXPECTED', 'AND', 'THAT', 'HIS', 'DISCOURSE', 'WAS', 'PERVADED', 'BY', 'SOMETHING', 'SULTRY', 'AND', 'VAST', 'SOMETHING', 'ALMOST', 'AFRICAN', 'IN', 'ITS', 'RICH', 'BASKING', 'TONE', 'SOMETHING', 'THAT', 'SUGGESTED', 'THE', 'TEEMING', 'EXPANSE', 'OF', 'THE', 'COTTON', 'FIELD'] +6128-63240-0008-511: hyp=['AND', 'YET', 'THE', 'READER', 'WHO', 'LIKES', 'A', 'COMPLETE', 'IMAGE', 'WHO', 'DESIRES', 'TO', 'READ', 'WITH', 'THE', 'SENSES', 'AS', 'WELL', 'AS', 'WITH', 'THE', 'REASON', 'IS', 'ENTREATED', 'NOT', 'TO', 'FORGET', 'THAT', 'HE', 'PROLONGED', 'HIS', 'COUNTENANCE', 'AND', 'SWALLOWED', 'HIS', 'VOWELS', 'THAT', 'HE', 'WAS', 'GUILTY', 'OF', 'ELYGIANS', 'AND', 'INTERPOLATIONS', 'WHICH', 'WERE', 'EQUALLY', 'UNEXPECTED', 'AND', 'THAT', 'HIS', 'DISCOURSE', 'WAS', 'PERVADED', 'BY', 'SOMETHING', 'SULTRY', 'AND', 'VAST', 'SOMETHING', 'ALMOST', 'AFRICAN', 'IN', 'ITS', 'RICH', 'BASKING', 'TONE', 'SOMETHING', 'THAT', 'SUGGESTED', 'THE', 'TEEMING', 'EXPOUNDS', 'OF', 'THE', 'COTTON', 'FIELD'] +6128-63240-0009-512: ref=['AND', 'HE', 'TOOK', 'UP', 'HIS', 'HAT', 'VAGUELY', 'A', 'SOFT', 'BLACK', 'HAT', 'WITH', 'A', 'LOW', 'CROWN', 'AND', 'AN', 'IMMENSE', 'STRAIGHT', 'BRIM'] +6128-63240-0009-512: hyp=['AND', 'HE', 'TOOK', 'UP', 'HIS', 'HAT', 'VAGUELY', 'A', 'SOFT', 'BLACK', 'HAT', 'WITH', 'A', 'LOW', 'CROWN', 'AND', 'AN', 'IMMENSE', 'STRAIGHT', 'BRIM'] +6128-63240-0010-513: ref=['WELL', 'SO', 'IT', 'IS', 'THEY', 'ARE', 'ALL', 'WITCHES', 'AND', 'WIZARDS', 'MEDIUMS', 'AND', 'SPIRIT', 'RAPPERS', 'AND', 'ROARING', 'RADICALS'] +6128-63240-0010-513: hyp=['WELL', 'SO', 'IT', 'IS', 'THEY', 'ARE', 'ALL', 'WITCHES', 'AND', 'WIZARDS', 'MEDIUMS', 'AND', 'SPIRIT', 'WRAPPERS', 'AND', 'ROWING', 'RADICALS'] +6128-63240-0011-514: ref=['IF', 'YOU', 'ARE', 'GOING', 'TO', 'DINE', 'WITH', 'HER', 'YOU', 'HAD', 'BETTER', 'KNOW', 'IT', 'OH', 'MURDER'] +6128-63240-0011-514: hyp=['IF', 'YOU', 'ARE', 'GOING', 'TO', 'DINE', 'WITH', 'HER', 'YOU', 'HAD', 'BETTER', 'KNOW', 'IT', 'OH', 'MURDER'] +6128-63240-0012-515: ref=['HE', 'LOOKED', 'AT', 'MISSUS', 'LUNA', 'WITH', 'INTELLIGENT', 'INCREDULITY'] +6128-63240-0012-515: hyp=['HE', 'LIFTED', 'MISSUS', 'LUNA', 'WITH', 'INTELLIGENT', 'INCREDULITY'] +6128-63240-0013-516: ref=['SHE', 'WAS', 'ATTRACTIVE', 'AND', 'IMPERTINENT', 'ESPECIALLY', 'THE', 'LATTER'] +6128-63240-0013-516: hyp=['SHE', 'WAS', 'ATTRACTIVE', 'AND', 'IMPERTINENT', 'ESPECIALLY', 'THE', 'LATTER'] +6128-63240-0014-517: ref=['HAVE', 'YOU', 'BEEN', 'IN', 'EUROPE'] +6128-63240-0014-517: hyp=['HAVE', 'YOU', 'BEEN', 'IN', 'EUROPE'] +6128-63240-0015-518: ref=['NO', 'I', "HAVEN'T", 'BEEN', 'ANYWHERE'] +6128-63240-0015-518: hyp=['NO', 'I', "HAVEN'T", 'BEEN', 'ANYWHERE'] +6128-63240-0016-519: ref=['SHE', 'HATES', 'IT', 'SHE', 'WOULD', 'LIKE', 'TO', 'ABOLISH', 'IT'] +6128-63240-0016-519: hyp=['SHE', 'HATES', 'IT', 'SHE', 'WOULD', 'LIKE', 'TO', 'ABOLISH', 'IT'] +6128-63240-0017-520: ref=['THIS', 'LAST', 'REMARK', 'HE', 'MADE', 'AT', 'A', 'VENTURE', 'FOR', 'HE', 'HAD', 'NATURALLY', 'NOT', 'DEVOTED', 'ANY', 'SUPPOSITION', 'WHATEVER', 'TO', 'MISSUS', 'LUNA'] +6128-63240-0017-520: hyp=['THIS', 'LAST', 'REMARK', 'HE', 'MADE', 'AT', 'A', 'VENTURE', 'FOR', 'HE', 'HAD', 'NATURALLY', 'NOT', 'DEVOTED', 'ANY', 'SUPPOSITION', 'WHATEVER', 'TO', 'MISSUS', 'LENA'] +6128-63240-0018-521: ref=['ARE', 'YOU', 'VERY', 'AMBITIOUS', 'YOU', 'LOOK', 'AS', 'IF', 'YOU', 'WERE'] +6128-63240-0018-521: hyp=['ARE', 'YOU', 'VERY', 'AMBITIOUS', 'YOU', 'LOOK', 'AS', 'IF', 'YOU', 'WERE'] +6128-63240-0019-522: ref=['AND', 'MISSUS', 'LUNA', 'ADDED', 'THAT', 'NOW', 'SHE', 'WAS', 'BACK', 'SHE', "DIDN'T", 'KNOW', 'WHAT', 'SHE', 'SHOULD', 'DO'] +6128-63240-0019-522: hyp=['AND', 'MISSUS', 'LENA', 'ADDED', 'THAT', 'NOW', 'SHE', 'WAS', 'BACK', 'SHE', "DIDN'T", 'KNOW', 'WHAT', 'SHE', 'SHOULD', 'DO'] +6128-63240-0020-523: ref=['ONE', "DIDN'T", 'EVEN', 'KNOW', 'WHAT', 'ONE', 'HAD', 'COME', 'BACK', 'FOR'] +6128-63240-0020-523: hyp=['ONE', "DIDN'T", 'EVEN', 'KNOW', 'WHAT', 'ONE', 'HAD', 'COME', 'BACK', 'FOR'] +6128-63240-0021-524: ref=['BESIDES', 'OLIVE', "DIDN'T", 'WANT', 'HER', 'IN', 'BOSTON', 'AND', "DIDN'T", 'GO', 'THROUGH', 'THE', 'FORM', 'OF', 'SAYING', 'SO'] +6128-63240-0021-524: hyp=['BESIDES', 'OLIVE', "DIDN'T", 'WANT', 'HER', 'IN', 'BUSTON', 'AND', "DIDN'T", 'GO', 'THROUGH', 'THE', 'FORM', 'OF', 'SAYING', 'SO'] +6128-63240-0022-525: ref=['THAT', 'WAS', 'ONE', 'COMFORT', 'WITH', 'OLIVE', 'SHE', 'NEVER', 'WENT', 'THROUGH', 'ANY', 'FORMS'] +6128-63240-0022-525: hyp=['THAT', 'WAS', 'ONE', 'COMFORT', 'WITH', 'OLIVE', 'SHE', 'NEVER', 'WON', 'THROUGH', 'ANY', 'FORMS'] +6128-63240-0023-526: ref=['SHE', 'STOOD', 'THERE', 'LOOKING', 'CONSCIOUSLY', 'AND', 'RATHER', 'SERIOUSLY', 'AT', 'MISTER', 'RANSOM', 'A', 'SMILE', 'OF', 'EXCEEDING', 'FAINTNESS', 'PLAYED', 'ABOUT', 'HER', 'LIPS', 'IT', 'WAS', 'JUST', 'PERCEPTIBLE', 'ENOUGH', 'TO', 'LIGHT', 'UP', 'THE', 'NATIVE', 'GRAVITY', 'OF', 'HER', 'FACE'] +6128-63240-0023-526: hyp=['SHE', 'STOOD', 'THERE', 'LOOKING', 'CONSCIOUSLY', 'AND', 'RATHER', 'SERIOUSLY', 'AND', 'MISTER', 'RANSOM', 'A', 'SMILE', 'OF', 'EXCEEDING', 'FAINTNESS', 'PLAYED', 'ABOUT', 'HER', 'LIPS', 'IT', 'WAS', 'JUST', 'PERCEPTIBLE', 'ENOUGH', 'TO', 'LIGHT', 'UP', 'THE', 'NATIVE', 'GRAVITY', 'OF', 'HER', 'FACE'] +6128-63240-0024-527: ref=['HER', 'VOICE', 'WAS', 'LOW', 'AND', 'AGREEABLE', 'A', 'CULTIVATED', 'VOICE', 'AND', 'SHE', 'EXTENDED', 'A', 'SLENDER', 'WHITE', 'HAND', 'TO', 'HER', 'VISITOR', 'WHO', 'REMARKED', 'WITH', 'SOME', 'SOLEMNITY', 'HE', 'FELT', 'A', 'CERTAIN', 'GUILT', 'OF', 'PARTICIPATION', 'IN', 'MISSUS', "LUNA'S", 'INDISCRETION', 'THAT', 'HE', 'WAS', 'INTENSELY', 'HAPPY', 'TO', 'MAKE', 'HER', 'ACQUAINTANCE'] +6128-63240-0024-527: hyp=['HER', 'VOICE', 'WAS', 'LOW', 'AND', 'AGREEABLE', 'A', 'CULTIVATED', 'VOICE', 'AND', 'SHE', 'EXTENDED', 'A', 'SLENDER', 'WHITE', 'HAND', 'TO', 'HER', 'VISITOR', 'HER', 'REMARKED', 'WITH', 'SOME', 'SOLEMNITY', 'HE', 'FELT', 'A', 'CERTAIN', 'GUILT', 'OF', 'PARTICIPATION', 'IN', 'MISSUS', "LUNAR'S", 'INDISCRETION', 'THAT', 'HE', 'WAS', 'INTENSELY', 'HAPPY', 'TO', 'MAKE', 'HER', 'ACQUAINTANCE'] +6128-63240-0025-528: ref=['HE', 'OBSERVED', 'THAT', 'MISS', "CHANCELLOR'S", 'HAND', 'WAS', 'AT', 'ONCE', 'COLD', 'AND', 'LIMP', 'SHE', 'MERELY', 'PLACED', 'IT', 'IN', 'HIS', 'WITHOUT', 'EXERTING', 'THE', 'SMALLEST', 'PRESSURE'] +6128-63240-0025-528: hyp=['HE', 'OBSERVED', 'THAT', 'MISS', "CHANCELLOR'S", 'HAND', 'WAS', 'AT', 'ONCE', 'CALLED', 'AND', 'LIMP', 'SHE', 'MERELY', 'PLACED', 'IT', 'IN', 'HIS', 'WITHOUT', 'EXERTING', 'THE', 'SMALLEST', 'PRESSURE'] +6128-63240-0026-529: ref=['I', 'SHALL', 'BE', 'BACK', 'VERY', 'LATE', 'WE', 'ARE', 'GOING', 'TO', 'A', 'THEATRE', 'PARTY', "THAT'S", 'WHY', 'WE', 'DINE', 'SO', 'EARLY'] +6128-63240-0026-529: hyp=['I', 'SHALL', 'BE', 'BACK', 'VERY', 'LATE', 'WILL', "DON'T", 'YOU', 'THEATER', 'PARTY', "THAT'S", 'WHY', 'WE', 'DINED', 'SO', 'EARLY'] +6128-63240-0027-530: ref=['MISSUS', "LUNA'S", 'FAMILIARITY', 'EXTENDED', 'EVEN', 'TO', 'HER', 'SISTER', 'SHE', 'REMARKED', 'TO', 'MISS', 'CHANCELLOR', 'THAT', 'SHE', 'LOOKED', 'AS', 'IF', 'SHE', 'WERE', 'GOT', 'UP', 'FOR', 'A', 'SEA', 'VOYAGE'] +6128-63240-0027-530: hyp=['MISSUS', "LUNDY'S", 'FAMILIARITY', 'EXTENDED', 'EVEN', 'TO', 'HER', 'SISTER', 'SHE', 'REMARKED', 'TO', 'MISS', 'CHANCELLOR', 'THAT', 'SHE', 'LOOKED', 'AS', 'IF', 'SHE', 'WERE', 'GOT', 'UP', 'FOR', 'A', 'SEA', 'VOY', 'EACH'] +6128-63241-0000-557: ref=['POOR', 'RANSOM', 'ANNOUNCED', 'THIS', 'FACT', 'TO', 'HIMSELF', 'AS', 'IF', 'HE', 'HAD', 'MADE', 'A', 'GREAT', 'DISCOVERY', 'BUT', 'IN', 'REALITY', 'HE', 'HAD', 'NEVER', 'BEEN', 'SO', 'BOEOTIAN', 'AS', 'AT', 'THAT', 'MOMENT'] +6128-63241-0000-557: hyp=['POOR', 'RANSOM', 'ANNOUNCED', 'THIS', 'FACT', 'TO', 'HIMSELF', 'AS', 'IF', 'HE', 'HAD', 'MADE', 'A', 'GREAT', 'DISCOVERY', 'BUT', 'IN', 'REALITY', 'HE', 'HAD', 'NEVER', 'BEEN', 'SO', 'BE', 'OTIAN', 'AS', 'AT', 'THAT', 'MOMENT'] +6128-63241-0001-558: ref=['THE', 'WOMEN', 'HE', 'HAD', 'HITHERTO', 'KNOWN', 'HAD', 'BEEN', 'MAINLY', 'OF', 'HIS', 'OWN', 'SOFT', 'CLIME', 'AND', 'IT', 'WAS', 'NOT', 'OFTEN', 'THEY', 'EXHIBITED', 'THE', 'TENDENCY', 'HE', 'DETECTED', 'AND', 'CURSORILY', 'DEPLORED', 'IN', 'MISSUS', "LUNA'S", 'SISTER'] +6128-63241-0001-558: hyp=['THE', 'WOMEN', 'HE', 'HAD', 'HITHERTO', 'KNOWN', 'HAD', 'BEEN', 'MAINLY', 'OF', 'HIS', 'OWN', 'SOFT', 'CLIMB', 'AND', 'IT', 'WAS', 'NOT', 'OFTEN', 'THEY', 'EXHIBITED', 'THE', 'TENDENCY', 'HE', 'DETECTED', 'AND', 'CURSORY', 'DEPLORED', 'IN', 'MISSUS', "LUNA'S", 'SISTER'] +6128-63241-0002-559: ref=['RANSOM', 'WAS', 'PLEASED', 'WITH', 'THE', 'VISION', 'OF', 'THAT', 'REMEDY', 'IT', 'MUST', 'BE', 'REPEATED', 'THAT', 'HE', 'WAS', 'VERY', 'PROVINCIAL'] +6128-63241-0002-559: hyp=['RANSOM', 'WAS', 'PLEASED', 'WITH', 'THE', 'VISION', 'OF', 'THAT', 'REMEDY', 'IT', 'MUST', 'BE', 'REPEATED', 'THAT', 'HE', 'WAS', 'VERY', 'PROVINCIAL'] +6128-63241-0003-560: ref=['HE', 'WAS', 'SORRY', 'FOR', 'HER', 'BUT', 'HE', 'SAW', 'IN', 'A', 'FLASH', 'THAT', 'NO', 'ONE', 'COULD', 'HELP', 'HER', 'THAT', 'WAS', 'WHAT', 'MADE', 'HER', 'TRAGIC'] +6128-63241-0003-560: hyp=['HE', 'WAS', 'SORRY', 'FOR', 'HER', 'BUT', 'HE', 'SAW', 'IN', 'A', 'FLASH', 'THAT', 'NO', 'ONE', 'COULD', 'HELP', 'HER', 'THAT', 'WAS', 'WHAT', 'MADE', 'HER', 'TRAGIC'] +6128-63241-0004-561: ref=['SHE', 'COULD', 'NOT', 'DEFEND', 'HERSELF', 'AGAINST', 'A', 'RICH', 'ADMIRATION', 'A', 'KIND', 'OF', 'TENDERNESS', 'OF', 'ENVY', 'OF', 'ANY', 'ONE', 'WHO', 'HAD', 'BEEN', 'SO', 'HAPPY', 'AS', 'TO', 'HAVE', 'THAT', 'OPPORTUNITY'] +6128-63241-0004-561: hyp=['SHE', 'COULD', 'NOT', 'DEFEND', 'HERSELF', 'AGAINST', 'A', 'RICH', 'ADMIRATION', 'A', 'KIND', 'OF', 'TENDERNESS', 'OF', 'ENVY', 'OF', 'ANY', 'ONE', 'WHO', 'HAD', 'BEEN', 'SO', 'HAPPY', 'AS', 'TO', 'HAVE', 'THAT', 'OPPORTUNITY'] +6128-63241-0005-562: ref=['HIS', 'FAMILY', 'WAS', 'RUINED', 'THEY', 'HAD', 'LOST', 'THEIR', 'SLAVES', 'THEIR', 'PROPERTY', 'THEIR', 'FRIENDS', 'AND', 'RELATIONS', 'THEIR', 'HOME', 'HAD', 'TASTED', 'OF', 'ALL', 'THE', 'CRUELTY', 'OF', 'DEFEAT'] +6128-63241-0005-562: hyp=['HIS', 'FAMILY', 'WAS', 'RUINED', 'THEY', 'HAD', 'LOST', 'THEIR', 'SLAVES', 'THEIR', 'PROPERTY', 'THEIR', 'FRIENDS', 'AND', 'RELATIONS', 'THEIR', 'HOME', 'HAD', 'TASTED', 'OF', 'ALL', 'THE', 'CRUELTY', 'OF', 'DEFEAT'] +6128-63241-0006-563: ref=['THE', 'STATE', 'OF', 'MISSISSIPPI', 'SEEMED', 'TO', 'HIM', 'THE', 'STATE', 'OF', 'DESPAIR', 'SO', 'HE', 'SURRENDERED', 'THE', 'REMNANTS', 'OF', 'HIS', 'PATRIMONY', 'TO', 'HIS', 'MOTHER', 'AND', 'SISTERS', 'AND', 'AT', 'NEARLY', 'THIRTY', 'YEARS', 'OF', 'AGE', 'ALIGHTED', 'FOR', 'THE', 'FIRST', 'TIME', 'IN', 'NEW', 'YORK', 'IN', 'THE', 'COSTUME', 'OF', 'HIS', 'PROVINCE', 'WITH', 'FIFTY', 'DOLLARS', 'IN', 'HIS', 'POCKET', 'AND', 'A', 'GNAWING', 'HUNGER', 'IN', 'HIS', 'HEART'] +6128-63241-0006-563: hyp=['THE', 'STATE', 'OF', 'MISSISSIPPI', 'SEEMED', 'TO', 'HIM', 'THE', 'STATE', 'OF', 'DESPAIR', 'SO', 'HIS', 'SURRENDERED', 'THE', 'REMNANTS', 'OF', 'HIS', 'PATRIMONY', 'TO', 'HIS', 'MOTHER', 'AND', 'SISTERS', 'AND', 'AT', 'NEARLY', 'THIRTY', 'YEARS', 'OF', 'AGE', 'ALIGHTED', 'FOR', 'THE', 'FIRST', 'TIME', 'IN', 'NEW', 'YORK', 'IN', 'THE', 'COSTUME', 'OF', 'HIS', 'PROVINCE', 'WITH', 'FIFTY', 'DOLLARS', 'IN', 'HIS', 'POCKET', 'AND', 'ANNAWING', 'HUNGER', 'IN', 'HIS', 'HEART'] +6128-63241-0007-564: ref=['IT', 'WAS', 'IN', 'THE', 'FEMALE', 'LINE', 'AS', 'BASIL', 'RANSOM', 'HAD', 'WRITTEN', 'IN', 'ANSWERING', 'HER', 'LETTER', 'WITH', 'A', 'GOOD', 'DEAL', 'OF', 'FORM', 'AND', 'FLOURISH', 'HE', 'SPOKE', 'AS', 'IF', 'THEY', 'HAD', 'BEEN', 'ROYAL', 'HOUSES'] +6128-63241-0007-564: hyp=['IT', 'WAS', 'IN', 'THE', 'FEMALE', 'LINE', 'AS', 'BALES', 'AT', 'RANSOM', 'HAD', 'WRITTEN', 'IN', 'ANSWERING', 'HER', 'LETTER', 'WITH', 'A', 'GOOD', 'DEAL', 'OF', 'FORM', 'AND', 'FLOURISH', 'HE', 'SPOKE', 'AS', 'IF', 'THEY', 'HAD', 'BEEN', 'ROYAL', 'HOUSES'] +6128-63241-0008-565: ref=['IF', 'IT', 'HAD', 'BEEN', 'POSSIBLE', 'TO', 'SEND', 'MISSUS', 'RANSOM', 'MONEY', 'OR', 'EVEN', 'CLOTHES', 'SHE', 'WOULD', 'HAVE', 'LIKED', 'THAT', 'BUT', 'SHE', 'HAD', 'NO', 'MEANS', 'OF', 'ASCERTAINING', 'HOW', 'SUCH', 'AN', 'OFFERING', 'WOULD', 'BE', 'TAKEN'] +6128-63241-0008-565: hyp=['IF', 'IT', 'HAD', 'BEEN', 'POSSIBLE', 'TO', 'SEND', 'MISSUS', 'RANDOM', 'MONEY', 'OR', 'EVEN', 'CLOTHES', 'SHE', 'WOULD', 'HAVE', 'LIKED', 'THAT', 'BUT', 'SHE', 'HAD', 'NO', 'MEANS', 'OF', 'ASCERTAINING', 'HER', 'SUCH', 'AN', 'OFFERING', 'WOULD', 'BE', 'TAKEN'] +6128-63241-0009-566: ref=['OLIVE', 'HAD', 'A', 'FEAR', 'OF', 'EVERYTHING', 'BUT', 'HER', 'GREATEST', 'FEAR', 'WAS', 'OF', 'BEING', 'AFRAID'] +6128-63241-0009-566: hyp=['OLIV', 'HAD', 'A', 'FEAR', 'OF', 'EVERYTHING', 'BUT', 'HER', 'GREATEST', 'FEAR', 'WAS', 'OF', 'BEING', 'AFRAID'] +6128-63241-0010-567: ref=['SHE', 'HAD', 'ERECTED', 'IT', 'INTO', 'A', 'SORT', 'OF', 'RULE', 'OF', 'CONDUCT', 'THAT', 'WHENEVER', 'SHE', 'SAW', 'A', 'RISK', 'SHE', 'WAS', 'TO', 'TAKE', 'IT', 'AND', 'SHE', 'HAD', 'FREQUENT', 'HUMILIATIONS', 'AT', 'FINDING', 'HERSELF', 'SAFE', 'AFTER', 'ALL'] +6128-63241-0010-567: hyp=['SHE', 'HAD', 'ERECTED', 'IT', 'INTO', 'A', 'SORT', 'OF', 'ROLE', 'OF', 'CONDUCT', 'THAT', 'WHENEVER', 'SHE', 'SAW', 'A', 'RISK', 'SHE', 'WAS', 'TO', 'TAKE', 'IT', 'AND', 'SHE', 'HAD', 'FREQUENT', 'HUMILIATIONS', 'AT', 'FINDING', 'HERSELF', 'SAFE', 'AFTER', 'ALL'] +6128-63241-0011-568: ref=['SHE', 'WAS', 'PERFECTLY', 'SAFE', 'AFTER', 'WRITING', 'TO', 'BASIL', 'RANSOM', 'AND', 'INDEED', 'IT', 'WAS', 'DIFFICULT', 'TO', 'SEE', 'WHAT', 'HE', 'COULD', 'HAVE', 'DONE', 'TO', 'HER', 'EXCEPT', 'THANK', 'HER', 'HE', 'WAS', 'ONLY', 'EXCEPTIONALLY', 'SUPERLATIVE', 'FOR', 'HER', 'LETTER', 'AND', 'ASSURE', 'HER', 'THAT', 'HE', 'WOULD', 'COME', 'AND', 'SEE', 'HER', 'THE', 'FIRST', 'TIME', 'HIS', 'BUSINESS', 'HE', 'WAS', 'BEGINNING', 'TO', 'GET', 'A', 'LITTLE', 'SHOULD', 'TAKE', 'HIM', 'TO', 'BOSTON'] +6128-63241-0011-568: hyp=['SHE', 'WAS', 'PERFECTLY', 'SAFE', 'AFTER', 'WRITING', 'TO', 'BASE', 'OR', 'RANSOM', 'AND', 'INDEED', 'IT', 'WAS', 'DIFFICULT', 'TO', 'SEE', 'WHAT', 'HE', 'COULD', 'HAVE', 'DONE', 'TO', 'HER', 'EXCEPT', 'THANK', 'HER', 'HE', 'WAS', 'ONLY', 'EXCEPTIONALLY', 'SUPERLATIVE', 'FOR', 'HER', 'LETTER', 'AND', 'ASSURE', 'HER', 'THAT', 'HE', 'WOULD', 'COME', 'AND', 'SEE', 'HER', 'THE', 'FIRST', 'TIME', 'HIS', 'BUSINESS', 'HE', 'WAS', 'BEGINNING', 'TO', 'GET', 'A', 'LITTLE', 'SHOULD', 'TAKE', 'HIM', 'TO', 'BOSTON'] +6128-63241-0012-569: ref=['HE', 'WAS', 'TOO', 'SIMPLE', 'TOO', 'MISSISSIPPIAN', 'FOR', 'THAT', 'SHE', 'WAS', 'ALMOST', 'DISAPPOINTED'] +6128-63241-0012-569: hyp=['HE', 'WAS', 'TOO', 'SIMPLE', 'TOO', 'MISSISSIPPIAN', 'FOR', 'THAT', 'SHE', 'WAS', 'ALMOST', 'DISAPPOINTED'] +6128-63241-0013-570: ref=['OF', 'ALL', 'THINGS', 'IN', 'THE', 'WORLD', 'CONTENTION', 'WAS', 'MOST', 'SWEET', 'TO', 'HER', 'THOUGH', 'WHY', 'IT', 'IS', 'HARD', 'TO', 'IMAGINE', 'FOR', 'IT', 'ALWAYS', 'COST', 'HER', 'TEARS', 'HEADACHES', 'A', 'DAY', 'OR', 'TWO', 'IN', 'BED', 'ACUTE', 'EMOTION', 'AND', 'IT', 'WAS', 'VERY', 'POSSIBLE', 'BASIL', 'RANSOM', 'WOULD', 'NOT', 'CARE', 'TO', 'CONTEND'] +6128-63241-0013-570: hyp=['OF', 'ALL', 'THINGS', 'IN', 'THE', 'WORLD', 'CONTENTION', 'WAS', 'MOST', 'SWEET', 'TO', 'HER', 'THOUGH', 'WHY', 'IT', 'IS', 'HARD', 'TO', 'IMAGINE', 'FOR', 'IT', 'ALWAYS', 'COST', 'HER', 'TEARS', 'HEADACHES', 'A', 'DAY', 'OR', 'TWO', 'IN', 'BED', 'ACUTE', 'EMOTION', 'AND', 'IT', 'WAS', 'VERY', 'POSSIBLE', 'BEESER', 'RANSOM', 'WOULD', 'NOT', 'CARE', 'TO', 'COMPEND'] +6128-63244-0000-531: ref=['MISS', 'CHANCELLOR', 'HERSELF', 'HAD', 'THOUGHT', 'SO', 'MUCH', 'ON', 'THE', 'VITAL', 'SUBJECT', 'WOULD', 'NOT', 'SHE', 'MAKE', 'A', 'FEW', 'REMARKS', 'AND', 'GIVE', 'THEM', 'SOME', 'OF', 'HER', 'EXPERIENCES'] +6128-63244-0000-531: hyp=['MISS', 'CHANCELLOR', 'HERSELF', 'HAD', 'THOUGHT', 'SO', 'MUCH', 'ON', 'THE', 'VITAL', 'SUBJECT', 'WOULD', 'NOT', 'SHE', 'MAKE', 'A', 'FEW', 'REMARKS', 'AND', 'GIVE', 'THEM', 'SOME', 'OF', 'HER', 'EXPERIENCES'] +6128-63244-0001-532: ref=['HOW', 'DID', 'THE', 'LADIES', 'ON', 'BEACON', 'STREET', 'FEEL', 'ABOUT', 'THE', 'BALLOT'] +6128-63244-0001-532: hyp=['HOW', 'DID', 'THE', 'LADIES', 'AND', 'BEACON', 'STREET', 'FEEL', 'ABOUT', 'THE', 'BALLOT'] +6128-63244-0002-533: ref=['PERHAPS', 'SHE', 'COULD', 'SPEAK', 'FOR', 'THEM', 'MORE', 'THAN', 'FOR', 'SOME', 'OTHERS'] +6128-63244-0002-533: hyp=['PERHAPS', 'SHE', 'COULD', 'SPEAK', 'FOR', 'THEM', 'MORE', 'THAN', 'FOR', 'SOME', 'OTHERS'] +6128-63244-0003-534: ref=['WITH', 'HER', 'IMMENSE', 'SYMPATHY', 'FOR', 'REFORM', 'SHE', 'FOUND', 'HERSELF', 'SO', 'OFTEN', 'WISHING', 'THAT', 'REFORMERS', 'WERE', 'A', 'LITTLE', 'DIFFERENT'] +6128-63244-0003-534: hyp=['WITH', 'HER', 'MENST', 'SYMPATHY', 'FOR', 'REFORM', 'SHE', 'FOUND', 'HERSELF', 'SO', 'OFTEN', 'WISHING', 'THAT', 'REFUSE', 'WERE', 'A', 'LITTLE', 'DIFFERENT'] +6128-63244-0004-535: ref=['OLIVE', 'HATED', 'TO', 'HEAR', 'THAT', 'FINE', 'AVENUE', 'TALKED', 'ABOUT', 'AS', 'IF', 'IT', 'WERE', 'SUCH', 'A', 'REMARKABLE', 'PLACE', 'AND', 'TO', 'LIVE', 'THERE', 'WERE', 'A', 'PROOF', 'OF', 'WORLDLY', 'GLORY'] +6128-63244-0004-535: hyp=['I', 'HAVE', 'HATED', 'TO', 'HEAR', 'THAT', 'FINE', 'AVENUE', 'TALKS', 'ABOUT', 'AS', 'IF', 'IT', 'WAS', 'SUCH', 'A', 'REMARKABLE', 'PLACE', 'AND', 'TO', 'LIVE', 'THERE', 'WHERE', 'A', 'PROOF', 'OF', 'WORLDLY', 'GLORY'] +6128-63244-0005-536: ref=['ALL', 'SORTS', 'OF', 'INFERIOR', 'PEOPLE', 'LIVED', 'THERE', 'AND', 'SO', 'BRILLIANT', 'A', 'WOMAN', 'AS', 'MISSUS', 'FARRINDER', 'WHO', 'LIVED', 'AT', 'ROXBURY', 'OUGHT', 'NOT', 'TO', 'MIX', 'THINGS', 'UP'] +6128-63244-0005-536: hyp=['ALL', 'SORTS', 'OF', 'INFERIOR', 'PEOPLE', 'LIFT', 'THERE', 'AND', 'SO', 'BRILLIANT', 'A', 'WOMAN', 'AS', 'MISSUS', 'FARINGDER', 'WHO', 'LIVED', 'AT', 'BRAXBURY', 'OUGHT', 'NOT', 'TO', 'MAKE', 'SPENCE', 'UP'] +6128-63244-0006-537: ref=['SHE', 'KNEW', 'HER', 'PLACE', 'IN', 'THE', 'BOSTON', 'HIERARCHY', 'AND', 'IT', 'WAS', 'NOT', 'WHAT', 'MISSUS', 'FARRINDER', 'SUPPOSED', 'SO', 'THAT', 'THERE', 'WAS', 'A', 'WANT', 'OF', 'PERSPECTIVE', 'IN', 'TALKING', 'TO', 'HER', 'AS', 'IF', 'SHE', 'HAD', 'BEEN', 'A', 'REPRESENTATIVE', 'OF', 'THE', 'ARISTOCRACY'] +6128-63244-0006-537: hyp=['SHE', 'KNEW', 'HER', 'PLACE', 'IN', 'THE', 'BOSTON', 'HIRAKEE', 'AND', 'IT', 'WAS', 'NOT', 'WHAT', 'MISSUS', 'BARRANGERS', 'SUPPOSED', 'SO', 'THAT', 'THERE', 'WAS', 'A', 'WANT', 'OF', 'PERSPECTIVE', 'IN', 'TALKING', 'TO', 'HER', 'AS', 'IF', 'SHE', 'HAD', 'BEEN', 'UNREPRESENTATIVE', 'OF', 'THE', 'ARISTOCRACY'] +6128-63244-0007-538: ref=['SHE', 'WISHED', 'TO', 'WORK', 'IN', 'ANOTHER', 'FIELD', 'SHE', 'HAD', 'LONG', 'BEEN', 'PREOCCUPIED', 'WITH', 'THE', 'ROMANCE', 'OF', 'THE', 'PEOPLE'] +6128-63244-0007-538: hyp=['SHE', 'WISHED', 'TO', 'WORK', 'IN', 'ANOTHER', 'FIELD', 'SHE', 'HAD', 'LONG', 'BEEN', 'PREOCCUPIED', 'WITH', 'THE', 'ROMANCE', 'OF', 'THE', 'PEOPLE'] +6128-63244-0008-539: ref=['THIS', 'MIGHT', 'SEEM', 'ONE', 'OF', 'THE', 'MOST', 'ACCESSIBLE', 'OF', 'PLEASURES', 'BUT', 'IN', 'POINT', 'OF', 'FACT', 'SHE', 'HAD', 'NOT', 'FOUND', 'IT', 'SO'] +6128-63244-0008-539: hyp=['THIS', 'MIGHT', 'SEEM', 'ONE', 'OF', 'THE', 'MOST', 'ACCESSIBLE', 'OF', 'PLEASURES', 'BUT', 'IN', 'POINT', 'OF', 'FACT', 'SHE', 'HAD', 'NOT', 'FOUND', 'IT', 'SO'] +6128-63244-0009-540: ref=['CHARLIE', 'WAS', 'A', 'YOUNG', 'MAN', 'IN', 'A', 'WHITE', 'OVERCOAT', 'AND', 'A', 'PAPER', 'COLLAR', 'IT', 'WAS', 'FOR', 'HIM', 'IN', 'THE', 'LAST', 'ANALYSIS', 'THAT', 'THEY', 'CARED', 'MUCH', 'THE', 'MOST'] +6128-63244-0009-540: hyp=['CHARLIE', 'WAS', 'A', 'YOUNG', 'MAN', 'IN', 'A', 'WORLD', 'OVERCOAT', 'AND', 'A', 'PAPER', 'COLLAR', 'IT', 'WAS', 'FOR', 'HIM', 'IN', 'THE', 'LAST', 'ANALYSIS', 'THAT', 'THE', 'CARED', 'MUCH', 'THE', 'MOST'] +6128-63244-0010-541: ref=['OLIVE', 'CHANCELLOR', 'WONDERED', 'HOW', 'MISSUS', 'FARRINDER', 'WOULD', 'TREAT', 'THAT', 'BRANCH', 'OF', 'THE', 'QUESTION'] +6128-63244-0010-541: hyp=['OUT', 'OF', 'CHANCELLORED', 'HOW', 'MISSUS', 'THINDER', 'WOULD', 'TREAT', 'THEIR', 'BRANCH', 'OF', 'THE', 'QUESTION'] +6128-63244-0011-542: ref=['IF', 'IT', 'BE', 'NECESSARY', 'WE', 'ARE', 'PREPARED', 'TO', 'TAKE', 'CERTAIN', 'STEPS', 'TO', 'CONCILIATE', 'THE', 'SHRINKING'] +6128-63244-0011-542: hyp=['IT', 'WOULD', 'BE', 'NECESSARY', 'WE', 'ARE', 'PREPARED', 'TO', 'TAKE', 'CIRCUMST', 'TO', 'CONCILIATE', 'THE', 'SHRINKING'] +6128-63244-0012-543: ref=['OUR', 'MOVEMENT', 'IS', 'FOR', 'ALL', 'IT', 'APPEALS', 'TO', 'THE', 'MOST', 'DELICATE', 'LADIES'] +6128-63244-0012-543: hyp=['OUR', 'MOVEMENT', 'IS', 'FOR', 'ALL', 'IT', 'APPEALS', 'TO', 'THE', 'MOST', 'DELICATE', 'LADIES'] +6128-63244-0013-544: ref=['RAISE', 'THE', 'STANDARD', 'AMONG', 'THEM', 'AND', 'BRING', 'ME', 'A', 'THOUSAND', 'NAMES'] +6128-63244-0013-544: hyp=['FOR', 'INSTANDED', 'AMONG', 'THEM', 'AND', 'BRING', 'ME', 'A', 'SPASM', 'NAMES'] +6128-63244-0014-545: ref=['I', 'LOOK', 'AFTER', 'THE', 'DETAILS', 'AS', 'WELL', 'AS', 'THE', 'BIG', 'CURRENTS', 'MISSUS', 'FARRINDER', 'ADDED', 'IN', 'A', 'TONE', 'AS', 'EXPLANATORY', 'AS', 'COULD', 'BE', 'EXPECTED', 'OF', 'SUCH', 'A', 'WOMAN', 'AND', 'WITH', 'A', 'SMILE', 'OF', 'WHICH', 'THE', 'SWEETNESS', 'WAS', 'THRILLING', 'TO', 'HER', 'LISTENER'] +6128-63244-0014-545: hyp=['AND', 'LOOK', 'AFTER', 'THE', 'DETAILS', 'AS', 'WELL', 'AS', 'THE', 'BIG', 'CURRANTS', 'MISSUS', 'FARNDER', 'ADDED', 'IN', 'A', 'TONE', 'AS', 'EXPLANATORY', 'AS', 'COULD', 'BE', 'EXPECTED', 'OF', 'SUCH', 'A', 'WOMAN', 'AND', 'WITH', 'A', 'SMILE', 'OF', 'WHICH', 'THIS', 'SWEETNESS', 'WAS', 'THRILLING', 'TO', 'HER', 'LISTENER'] +6128-63244-0015-546: ref=['SAID', 'OLIVE', 'CHANCELLOR', 'WITH', 'A', 'FACE', 'WHICH', 'SEEMED', 'TO', 'PLEAD', 'FOR', 'A', 'REMISSION', 'OF', 'RESPONSIBILITY'] +6128-63244-0015-546: hyp=['SAID', 'OLDEST', 'CHANCELLOR', 'WITH', 'A', 'FACE', 'WHICH', 'SEEMED', 'TO', 'PLEAD', 'FOR', 'A', "REMISSIONARY'S", 'RESPONSIBILITY'] +6128-63244-0016-547: ref=['I', 'WANT', 'TO', 'BE', 'NEAR', 'TO', 'THEM', 'TO', 'HELP', 'THEM'] +6128-63244-0016-547: hyp=['I', 'WARNED', 'TO', 'BE', 'NEAR', 'TO', 'THEM', 'TO', 'HELP', 'THEM'] +6128-63244-0017-548: ref=['IT', 'WAS', 'ONE', 'THING', 'TO', 'CHOOSE', 'FOR', 'HERSELF', 'BUT', 'NOW', 'THE', 'GREAT', 'REPRESENTATIVE', 'OF', 'THE', 'ENFRANCHISEMENT', 'OF', 'THEIR', 'SEX', 'FROM', 'EVERY', 'FORM', 'OF', 'BONDAGE', 'HAD', 'CHOSEN', 'FOR', 'HER'] +6128-63244-0017-548: hyp=['IT', 'WAS', 'ONE', 'THING', 'TO', 'CHOOSE', 'FOR', 'HERSELF', 'BUT', 'NOW', 'THE', 'GREAT', 'REPRESENTATIVE', 'OF', 'THE', 'ENCOMCHISEMENT', 'OF', 'THEIR', 'SEX', 'FROM', 'EVERY', 'FORM', 'OF', 'BONDAGE', 'HAD', 'CHOSEN', 'FOR', 'HER'] +6128-63244-0018-549: ref=['THE', 'UNHAPPINESS', 'OF', 'WOMEN'] +6128-63244-0018-549: hyp=['THE', 'UNHAPPINESS', 'OF', 'WOMEN'] +6128-63244-0019-550: ref=['THEY', 'WERE', 'HER', 'SISTERS', 'THEY', 'WERE', 'HER', 'OWN', 'AND', 'THE', 'DAY', 'OF', 'THEIR', 'DELIVERY', 'HAD', 'DAWNED'] +6128-63244-0019-550: hyp=['THEY', 'WERE', 'HIS', 'SISTERS', 'THEY', 'WERE', 'HER', 'OWN', 'AND', 'THE', 'DAY', 'OF', 'THEIR', 'DELIVERY', 'HAD', 'DAWNED'] +6128-63244-0020-551: ref=['THIS', 'WAS', 'THE', 'ONLY', 'SACRED', 'CAUSE', 'THIS', 'WAS', 'THE', 'GREAT', 'THE', 'JUST', 'REVOLUTION', 'IT', 'MUST', 'TRIUMPH', 'IT', 'MUST', 'SWEEP', 'EVERYTHING', 'BEFORE', 'IT', 'IT', 'MUST', 'EXACT', 'FROM', 'THE', 'OTHER', 'THE', 'BRUTAL', 'BLOOD', 'STAINED', 'RAVENING', 'RACE', 'THE', 'LAST', 'PARTICLE', 'OF', 'EXPIATION'] +6128-63244-0020-551: hyp=['THIS', 'WAS', 'THE', 'ONLY', 'SACRED', 'CAUSE', 'THIS', 'WAS', 'THE', 'GREAT', 'DESTROVISION', 'IT', 'WAS', 'TRIUMPH', 'IT', 'WAS', 'SWEEP', 'EVERYTHING', 'BEFORE', 'IT', 'IT', 'MUST', 'EXACT', 'FROM', 'THE', 'OTHER', 'THE', 'BRUTAL', 'BLOODSTAINED', 'RAVENING', 'RACE', 'THE', 'LAST', 'PARTICLE', 'OF', 'EXPIATION'] +6128-63244-0021-552: ref=['THEY', 'WOULD', 'BE', 'NAMES', 'OF', 'WOMEN', 'WEAK', 'INSULTED', 'PERSECUTED', 'BUT', 'DEVOTED', 'IN', 'EVERY', 'PULSE', 'OF', 'THEIR', 'BEING', 'TO', 'THE', 'CAUSE', 'AND', 'ASKING', 'NO', 'BETTER', 'FATE', 'THAN', 'TO', 'DIE', 'FOR', 'IT'] +6128-63244-0021-552: hyp=['THERE', 'WOULD', 'BE', 'NAMES', 'OF', 'WOMEN', 'WEAK', 'INSULTED', 'PERSECUTED', 'BUT', 'DEVOTED', 'IN', 'EVERY', 'PART', 'OF', 'THEIR', 'BEING', 'TO', 'THE', 'CAUSE', 'AND', 'ASKING', 'NO', 'BETTER', 'FATE', 'THAN', 'TO', 'DIE', 'FOR', 'IT'] +6128-63244-0022-553: ref=['IT', 'WAS', 'NOT', 'CLEAR', 'TO', 'THIS', 'INTERESTING', 'GIRL', 'IN', 'WHAT', 'MANNER', 'SUCH', 'A', 'SACRIFICE', 'AS', 'THIS', 'LAST', 'WOULD', 'BE', 'REQUIRED', 'OF', 'HER', 'BUT', 'SHE', 'SAW', 'THE', 'MATTER', 'THROUGH', 'A', 'KIND', 'OF', 'SUNRISE', 'MIST', 'OF', 'EMOTION', 'WHICH', 'MADE', 'DANGER', 'AS', 'ROSY', 'AS', 'SUCCESS'] +6128-63244-0022-553: hyp=['IT', 'WAS', 'NOT', 'CLEAR', 'TO', 'THIS', 'INTERESTING', 'GIRL', 'IN', 'WHAT', 'MANNER', 'SUCH', 'A', 'SACRIFICE', 'OF', 'THIS', 'LAST', 'WOULD', 'BE', 'REQUIRED', 'OF', 'HER', 'BUT', 'SHE', 'SOLDOM', 'MATTER', 'THROUGH', 'A', 'KIND', 'OF', 'SUNRISE', 'MISTABILATION', 'WHICH', 'MADE', 'DANGER', 'AS', 'ROSY', 'IS', 'SUCCESS'] +6128-63244-0023-554: ref=['WHEN', 'MISS', 'BIRDSEYE', 'APPROACHED', 'IT', 'TRANSFIGURED', 'HER', 'FAMILIAR', 'HER', 'COMICAL', 'SHAPE', 'AND', 'MADE', 'THE', 'POOR', 'LITTLE', 'HUMANITARY', 'HACK', 'SEEM', 'ALREADY', 'A', 'MARTYR'] +6128-63244-0023-554: hyp=['WHEN', 'MISS', "BIRD'S", 'EYE', 'APPROACHED', 'IT', 'TRANSFIGURED', 'FAMILIAR', 'HYCOMICAL', 'SHAPE', 'AND', 'MADE', 'THE', 'POOR', 'LITTLE', 'HUMANITARY', 'HACK', 'SEEM', 'ALREADY', 'A', 'MASTER'] +6128-63244-0024-555: ref=['OLIVE', 'CHANCELLOR', 'LOOKED', 'AT', 'HER', 'WITH', 'LOVE', 'REMEMBERED', 'THAT', 'SHE', 'HAD', 'NEVER', 'IN', 'HER', 'LONG', 'UNREWARDED', 'WEARY', 'LIFE', 'HAD', 'A', 'THOUGHT', 'OR', 'AN', 'IMPULSE', 'FOR', 'HERSELF'] +6128-63244-0024-555: hyp=['ONLY', 'IF', 'CHANCELLOR', 'LOOKED', 'AT', 'HER', 'WITH', 'LOVE', 'REMEMBERED', 'THAT', 'SHE', 'HAD', 'NEVER', 'IN', 'HER', 'LONG', 'IN', 'REWARDED', 'WEARY', 'LIFE', 'HAD', 'A', 'THOUGHT', 'OF', 'AN', 'IMPULSE', 'FOR', 'HERSELF'] +6128-63244-0025-556: ref=['SHE', 'HAD', 'BEEN', 'CONSUMED', 'BY', 'THE', 'PASSION', 'OF', 'SYMPATHY', 'IT', 'HAD', 'CRUMPLED', 'HER', 'INTO', 'AS', 'MANY', 'CREASES', 'AS', 'AN', 'OLD', 'GLAZED', 'DISTENDED', 'GLOVE'] +6128-63244-0025-556: hyp=['IF', 'SHE', 'HAD', 'BEEN', 'CONSUMED', 'BY', 'THE', 'PASSION', 'OF', 'SYMPATHY', 'IT', 'HAD', 'CRUMBLED', 'HER', 'INTO', 'AS', 'MANY', 'CREASES', 'AS', 'AN', 'OLD', 'GLAZED', 'DISTENDED', 'GLOVE'] +6432-63722-0000-2431: ref=['BUT', 'SCUSE', 'ME', "DIDN'T", 'YO', 'FIGGER', 'ON', 'DOIN', 'SOME', 'DETECTIN', 'AN', 'GIVE', 'UP', 'FISHIN'] +6432-63722-0000-2431: hyp=['PUSE', 'ME', 'THEN', "YOU'VE", 'GONE', 'DOING', 'SOME', 'DETECTIVE', 'AND', 'GIVIN', 'UP', 'FISHIN'] +6432-63722-0001-2432: ref=['AND', 'SHAG', 'WITH', 'THE', 'FREEDOM', 'OF', 'AN', 'OLD', 'SERVANT', 'STOOD', 'LOOKING', 'AT', 'HIS', 'MASTER', 'AS', 'IF', 'NOT', 'QUITE', 'UNDERSTANDING', 'THE', 'NEW', 'TWIST', 'THE', 'AFFAIRS', 'HAD', 'TAKEN'] +6432-63722-0001-2432: hyp=['AND', 'SHAG', 'WITH', 'THE', 'FREEDOM', 'OF', 'AN', 'OLD', 'SERVANT', 'STOOD', 'LOOKING', 'AT', 'HIS', 'MASTER', 'AS', 'IF', 'NOT', 'QUITE', 'UNDERSTANDING', 'THE', 'NEW', 'TWIST', 'THE', 'AFFAIRS', 'HAD', 'TAKEN'] +6432-63722-0002-2433: ref=["I'M", 'GOING', 'OFF', 'FISHING', 'I', 'MAY', 'NOT', 'CATCH', 'ANYTHING', 'I', 'MAY', 'NOT', 'WANT', 'TO', 'AFTER', 'I', 'GET', 'THERE'] +6432-63722-0002-2433: hyp=["I'M", 'GOIN', 'OUR', 'FISHIN', 'I', 'MAY', 'NOT', 'CATCH', 'ANYTHING', 'AND', 'MAY', 'NOT', 'WANT', 'TO', 'AFTER', 'I', 'GET', 'THERE'] +6432-63722-0003-2434: ref=['GET', 'READY', 'SHAG', 'YES', 'SAH', 'COLONEL'] +6432-63722-0003-2434: hyp=['GET', 'READY', 'SHAGG', 'YES', 'A', 'COLONEL'] +6432-63722-0004-2435: ref=['AND', 'HAVING', 'PUT', 'HIMSELF', 'IN', 'A', 'FAIR', 'WAY', 'AS', 'HE', 'HOPED', 'TO', 'SOLVE', 'SOME', 'OF', 'THE', 'PROBLEMS', 'CONNECTED', 'WITH', 'THE', 'DARCY', 'CASE', 'COLONEL', 'ASHLEY', 'WENT', 'DOWN', 'TO', 'POLICE', 'HEADQUARTERS', 'TO', 'LEARN', 'MORE', 'FACTS', 'IN', 'CONNECTION', 'WITH', 'THE', 'MURDER', 'OF', 'THE', 'EAST', 'INDIAN'] +6432-63722-0004-2435: hyp=['AND', 'HAVING', 'PUT', 'HIMSELF', 'IN', 'A', 'FAIR', 'WAY', 'AS', 'HE', 'HOPED', 'TO', 'SOLVE', 'SOME', 'OF', 'THE', 'PROBLEMS', 'CONNECTED', 'WITH', 'THE', 'DARCY', 'CASE', 'COLONEL', 'ASHLEY', 'WENT', 'DOWN', 'TO', 'POLICE', 'HEADQUARTERS', 'TO', 'LEARN', 'MORE', 'FACTS', 'IN', 'THE', 'CONNECTION', 'WITH', 'THE', 'MURDER', 'OF', 'THE', 'EAST', 'INDIAN'] +6432-63722-0005-2436: ref=['PINKUS', 'AND', 'DONOVAN', "HAVEN'T", 'THEY', 'CARROLL', 'YEP'] +6432-63722-0005-2436: hyp=['PEGAS', 'AND', 'DONOVAN', "HAVEN'T", 'THEY', 'CARROLL', 'YEP'] +6432-63722-0006-2437: ref=['CARROLL', 'WAS', 'TOO', 'MUCH', 'ENGAGED', 'IN', 'WATCHING', 'THE', 'BLUE', 'SMOKE', 'CURL', 'LAZILY', 'UPWARD', 'FROM', 'HIS', 'CIGAR', 'JUST', 'THEN', 'TO', 'SAY', 'MORE'] +6432-63722-0006-2437: hyp=['KAL', 'WAS', 'TOO', 'MUCH', 'ENGAGED', 'IN', 'WATCHING', 'THE', 'BLUE', 'SMOKE', 'GIRL', 'LAZILY', 'UPWARD', 'FROM', 'HIS', 'CIGAR', 'JUST', 'THEN', 'TO', 'SAY', 'MORE'] +6432-63722-0007-2438: ref=['ARE', 'YOU', 'GOING', 'TO', 'WORK', 'ON', 'THAT', 'CASE', 'COLONEL'] +6432-63722-0007-2438: hyp=['ARE', 'YOU', 'GOING', 'TO', 'WORK', 'ON', 'THAT', 'CASE', 'COLONEL'] +6432-63722-0008-2439: ref=['BUT', 'HE', "HADN'T", 'ANY', 'MORE', 'TO', 'DO', 'WITH', 'IT', 'COLONEL', 'THAN', 'THAT', 'CAT'] +6432-63722-0008-2439: hyp=['BUT', 'HE', "HADN'T", 'ANY', 'MORE', 'TO', 'DO', 'WITH', 'IT', 'COLONEL', 'THAN', 'THAT', 'CAT'] +6432-63722-0009-2440: ref=['PERHAPS', 'NOT', 'ADMITTED', 'COLONEL', 'ASHLEY'] +6432-63722-0009-2440: hyp=['PERHAPS', 'NOT', 'ADMITTED', 'COLONEL', 'ASHLEY'] +6432-63722-0010-2441: ref=["WE'VE", 'GOT', 'OUR', 'MAN', 'AND', "THAT'S", 'ALL', 'WE', 'WANT'] +6432-63722-0010-2441: hyp=["WE'VE", 'GOT', 'OUR', 'MAN', 'AND', "THAT'S", 'ALL', 'WE', 'WANT'] +6432-63722-0011-2442: ref=["YOU'RE", 'ON', 'THE', 'DARCY', 'CASE', 'THEY', 'TELL', 'ME', 'IN', 'A', 'WAY', 'YES'] +6432-63722-0011-2442: hyp=["YOU'RE", 'ON', 'THE', 'DARCY', 'CASE', 'THEY', 'TELL', 'ME', 'IN', 'A', 'WAY', 'YES'] +6432-63722-0012-2443: ref=["I'M", 'WORKING', 'IN', 'THE', 'INTERESTS', 'OF', 'THE', 'YOUNG', 'MAN'] +6432-63722-0012-2443: hyp=["I'M", 'WORKING', 'IN', 'THE', 'INTEREST', 'OF', 'THE', 'YOUNG', 'MAN'] +6432-63722-0013-2444: ref=["IT'S", 'JUST', 'ONE', 'OF', 'THEM', 'COINCIDENCES', 'LIKE'] +6432-63722-0013-2444: hyp=["IT'S", 'JUST', 'ONE', 'OF', 'THEM', 'COINCIDENCES', 'LIKE'] +6432-63722-0014-2445: ref=['BUSTED', 'HIS', 'HEAD', 'IN', 'WITH', 'A', 'HEAVY', 'CANDLESTICK', 'ONE', 'OF', 'A', 'PAIR'] +6432-63722-0014-2445: hyp=['BUSTED', 'HIS', 'HEAD', 'IN', 'WITH', 'A', 'HEAVY', 'CANDLESTICK', 'ONE', 'OF', 'A', 'PAIR'] +6432-63722-0015-2446: ref=['GAD', 'EXCLAIMED', 'THE', 'COLONEL'] +6432-63722-0015-2446: hyp=['GAD', 'EXPLAINED', 'THE', 'COLONEL'] +6432-63722-0016-2447: ref=['THE', 'VERY', 'PAIR', 'I', 'WAS', 'GOING', 'TO', 'BUY'] +6432-63722-0016-2447: hyp=['THE', 'VERY', 'PAIR', 'I', 'WAS', 'GOING', 'TO', 'BUY'] +6432-63722-0017-2448: ref=['LOOK', 'HERE', 'COLONEL', 'DO', 'YOU', 'KNOW', 'ANYTHING', 'ABOUT', 'THIS'] +6432-63722-0017-2448: hyp=['LOOK', 'HERE', 'CAROL', 'DO', 'YOU', 'KNOW', 'ANYTHING', 'ABOUT', 'THIS'] +6432-63722-0018-2449: ref=['AND', 'THE', "DETECTIVE'S", 'PROFESSIONAL', 'INSTINCTS', 'GOT', 'THE', 'UPPER', 'HAND', 'OF', 'HIS', 'FRIENDLINESS', 'NOT', 'THE', 'LEAST', 'IN', 'THE', 'WORLD', 'NOT', 'AS', 'MUCH', 'AS', 'YOU', 'DO', 'WAS', 'THE', 'COOL', 'ANSWER'] +6432-63722-0018-2449: hyp=['AND', 'THE', "DETECTIVE'S", 'PROFESSIONAL', 'INSTINCTS', 'GOT', 'THE', 'UPPER', 'HAND', 'OF', 'HIS', 'FRIENDLINESS', 'NOT', 'THE', 'LEAST', 'IN', 'THE', 'WORLD', 'NOT', 'AS', 'MUCH', 'AS', 'YOU', 'DO', 'WAS', 'THE', 'COOL', 'ANSWER'] +6432-63722-0019-2450: ref=['I', 'HAPPENED', 'TO', 'SEE', 'THOSE', 'CANDLESTICKS', 'IN', 'THE', 'WINDOW', 'OF', 'SINGA', "PHUT'S", 'SHOP', 'THE', 'OTHER', 'DAY', 'AND', 'I', 'MADE', 'UP', 'MY', 'MIND', 'TO', 'BUY', 'THEM', 'WHEN', 'I', 'HAD', 'A', 'CHANCE'] +6432-63722-0019-2450: hyp=['I', 'HAPPEN', 'TO', 'SEE', 'THOSE', 'CANDLESTICKS', 'IN', 'THE', 'WINDOW', 'OF', "SINGAFUT'S", 'SHOP', 'THE', 'OTHER', 'DAY', 'AND', 'I', 'MADE', 'UP', 'MY', 'MIND', 'TO', 'BUY', 'THEM', 'WHEN', 'I', 'HAD', 'A', 'CHANCE'] +6432-63722-0020-2451: ref=['NOW', "I'M", 'AFRAID', 'I', "WON'T", 'BUT', 'HOW', 'DID', 'IT', 'HAPPEN'] +6432-63722-0020-2451: hyp=['NOW', "I'M", 'AFRAID', 'I', "WON'T", 'BUT', 'HOW', 'DID', 'IT', 'HAPPEN'] +6432-63722-0021-2452: ref=['PHUT', 'I', "DON'T", 'KNOW', 'WHETHER', "THAT'S", 'HIS', 'FIRST', 'OR', 'HIS', 'LAST', 'NAME', 'ANYHOW', 'HE', 'HAD', 'A', 'PARTNER', 'NAMED', 'SHERE', 'ALI'] +6432-63722-0021-2452: hyp=['FIVE', 'I', "DON'T", 'KNOW', 'WHETHER', "THAT'S", 'HIS', 'FIRST', 'OR', 'HIS', 'LAST', 'NAME', 'ANYHOW', 'HE', 'HAD', 'A', 'PARTNER', 'NAMED', 'SHEAR', 'ALI'] +6432-63722-0022-2453: ref=['ANYHOW', 'HE', 'AND', 'PHUT', "DIDN'T", 'GET', 'ALONG', 'VERY', 'WELL', 'IT', 'SEEMS'] +6432-63722-0022-2453: hyp=['ANYHOW', 'HE', 'INFECTED', 'GET', 'ALONG', 'VERY', 'WELL', 'IT', 'SEEMS'] +6432-63722-0023-2454: ref=['NEIGHBORS', 'OFTEN', 'HEARD', 'EM', 'SCRAPPIN', 'A', 'LOT', 'AND', 'THIS', 'AFTERNOON', 'THEY', 'WENT', 'AT', 'IT', 'AGAIN', 'HOT', 'AND', 'HEAVY'] +6432-63722-0023-2454: hyp=['LABORS', 'OFTEN', 'HEARD', 'HIM', 'SCRAP', 'IN', 'A', 'LOT', 'AND', 'THIS', 'AFTERNOON', 'THEY', 'WENT', 'AT', 'IT', 'AGAIN', 'HOT', 'AND', 'HEAVY'] +6432-63722-0024-2455: ref=['TOWARD', 'DARK', 'A', 'MAN', 'WENT', 'IN', 'TO', 'BUY', 'A', 'LAMP'] +6432-63722-0024-2455: hyp=['TO', 'OUR', 'DARK', 'A', 'MAN', 'WENT', 'IN', 'TO', 'BUY', 'A', 'LAMP'] +6432-63722-0025-2456: ref=['HE', 'FOUND', 'THE', 'PLACE', 'WITHOUT', 'A', 'LIGHT', 'IN', 'IT', 'STUMBLED', 'OVER', 'SOMETHING', 'ON', 'THE', 'FLOOR', 'AND', 'THERE', 'WAS', "ALI'S", 'BODY', 'WITH', 'THE', 'HEAD', 'BUSTED', 'IN', 'AND', 'THIS', 'HEAVY', 'CANDLESTICK', 'NEAR', 'IT'] +6432-63722-0025-2456: hyp=['HE', 'FOUND', 'THE', 'PLACE', 'WITHOUT', 'A', 'LIGHT', 'IN', 'IT', 'STUMBLED', 'OVER', 'SOMETHING', 'ON', 'THE', 'FLOOR', 'AND', 'THERE', 'WAS', 'ALWAYS', 'BODY', 'WITH', 'THE', 'HEAD', 'BUSTED', 'IN', 'AND', 'THIS', 'HEAVY', 'CANDLESTICK', 'NEAR', 'IT'] +6432-63722-0026-2457: ref=['SURE', 'HELD', 'SO', 'TIGHT', 'WE', 'COULD', 'HARDLY', 'GET', 'IT', 'OUT'] +6432-63722-0026-2457: hyp=['SURE', 'HELD', 'SO', 'TIGHT', 'WE', 'COULD', 'HARDLY', 'GET', 'IT', 'OUT'] +6432-63722-0027-2458: ref=['MAYBE', 'THE', 'FIGHT', 'WAS', 'ABOUT', 'WHO', 'OWNED', 'THE', 'WATCH', 'FOR', 'THE', 'DAGOS', 'TALKED', 'IN', 'THEIR', 'FOREIGN', 'LINGO', 'AND', 'NONE', 'OF', 'THE', 'NEIGHBORS', 'COULD', 'TELL', 'WHAT', 'THEY', 'WERE', 'SAYIN', 'I', 'SEE'] +6432-63722-0027-2458: hyp=['MAYBE', 'THE', 'FIGHT', 'WAS', 'ABOUT', 'WHO', 'OWNED', 'THE', 'WATCH', 'FOR', 'THE', 'DAG', 'WAS', 'TALKED', 'IN', 'THEIR', 'FOREIGN', 'LINGO', 'AND', 'NONE', 'OF', 'THE', 'NEIGHBORS', 'COULD', 'TELL', 'WHAT', 'THEY', 'WERE', 'SAYING', 'I', 'SEE'] +6432-63722-0028-2459: ref=['AND', 'THE', 'WATCH', 'HAVE', 'YOU', 'IT', 'YES', "IT'S", 'HERE'] +6432-63722-0028-2459: hyp=['AND', 'THE', 'WATCH', 'HAVE', 'YOU', 'IT', 'YES', "IT'S", 'HERE'] +6432-63722-0029-2460: ref=["THAT'S", 'THE', 'WATCH', 'ANNOUNCED', 'THE', 'HEADQUARTERS', 'DETECTIVE', 'REACHING', 'IN', 'FOR', 'IT', 'GOING', 'YET', 'SEE'] +6432-63722-0029-2460: hyp=["THAT'S", 'THE', 'WATCH', 'ANNOUNCED', 'THE', "HEADQUARTER'S", 'DETECTIVE', 'REACHING', 'IN', 'FOR', 'IT', 'GOING', 'AT', 'SEE'] +6432-63722-0030-2461: ref=["YOU'RE", 'NOT', 'AS', 'SQUEAMISH', 'AS', 'ALL', 'THAT', 'ARE', 'YOU', 'JUST', 'BECAUSE', 'IT', 'WAS', 'IN', 'A', 'DEAD', "MAN'S", 'HAND', 'AND', 'IN', 'A', "WOMAN'S"] +6432-63722-0030-2461: hyp=["YOU'RE", 'NOT', 'A', 'SCREAMY', 'AS', 'ALL', 'THAT', 'ARE', 'YOU', 'JUST', 'BECAUSE', 'IT', 'WAS', 'IN', 'A', 'DEAD', "MAN'S", 'HANDS', 'AND', 'A', "WOMAN'S"] +6432-63722-0031-2462: ref=['AND', "DONOVAN'S", 'VOICE', 'WAS', 'PLAINLY', 'SKEPTICAL'] +6432-63722-0031-2462: hyp=['AND', "DONOVAN'S", 'VOICE', 'WAS', 'PLAINLY', 'SCEPTICAL'] +6432-63722-0032-2463: ref=['YES', 'IT', 'MAY', 'HAVE', 'SOME', 'ROUGH', 'EDGES', 'ON', 'IT'] +6432-63722-0032-2463: hyp=['YES', 'IT', 'MAY', 'HAVE', 'SOME', 'ROUGH', 'EDGES', 'ON', 'IT'] +6432-63722-0033-2464: ref=['AND', "I'VE", 'READ', 'ENOUGH', 'ABOUT', 'GERMS', 'TO', 'KNOW', 'THE', 'DANGER', "I'D", 'ADVISE', 'YOU', 'TO', 'BE', 'CAREFUL'] +6432-63722-0033-2464: hyp=['AND', "I'VE", 'READ', 'ENOUGH', 'ABOUT', 'GERMS', 'TO', 'KNOW', 'THE', 'DANGER', "I'D", 'ADVISE', 'YOU', 'TO', 'BE', 'CAREFUL'] +6432-63722-0034-2465: ref=['IF', 'YOU', "DON'T", 'MIND', 'I', 'SHOULD', 'LIKE', 'TO', 'EXAMINE', 'THIS', 'A', 'BIT'] +6432-63722-0034-2465: hyp=['IF', 'YOU', "DON'T", 'MIND', 'I', 'SHOULD', 'LIKE', 'TO', 'EXAMINE', 'THIS', 'A', 'BIT'] +6432-63722-0035-2466: ref=['BEFORE', 'THE', 'BIG', 'WIND', 'IN', 'IRELAND', 'SUGGESTED', 'THONG', 'WITH', 'A', 'NOD', 'AT', 'HIS', 'IRISH', 'COMPATRIOT', 'SLIGHTLY', 'LAUGHED', 'THE', 'COLONEL'] +6432-63722-0035-2466: hyp=['BEFORE', 'THE', 'BIG', 'WIND', 'IN', 'IRELAND', 'SUGGESTED', 'THONG', 'WITH', 'A', 'NOD', 'OF', 'HIS', 'IRISH', 'COMPATRIOT', 'SLIGHTLY', "THEY'LL", 'HAVE', 'THE', 'COLONEL'] +6432-63722-0036-2467: ref=["THAT'S", 'RIGHT', 'AGREED', 'THE', 'COLONEL', 'AS', 'HE', 'CONTINUED', 'TO', 'MOVE', 'HIS', 'MAGNIFYING', 'GLASS', 'OVER', 'THE', 'SURFACE', 'OF', 'THE', 'STILL', 'TICKING', 'WATCH'] +6432-63722-0036-2467: hyp=["THAT'S", 'RIGHT', 'AGREED', 'THE', 'COLONEL', 'AS', 'HE', 'CONTINUED', 'TO', 'MOVE', 'HIS', 'MAGNIFYING', 'GLASS', 'OVER', 'THE', 'SURFACE', 'OF', 'THE', 'STILL', 'TICKING', 'WATCH'] +6432-63722-0037-2468: ref=['AND', 'A', 'CLOSE', 'OBSERVER', 'MIGHT', 'HAVE', 'OBSERVED', 'THAT', 'HE', 'DID', 'NOT', 'TOUCH', 'HIS', 'BARE', 'FINGERS', 'TO', 'THE', 'TIMEPIECE', 'BUT', 'POKED', 'IT', 'ABOUT', 'AND', 'TOUCHED', 'IT', 'HERE', 'AND', 'THERE', 'WITH', 'THE', 'END', 'OF', 'A', 'LEADPENCIL'] +6432-63722-0037-2468: hyp=['IN', 'A', 'CLOSE', 'OBSERVER', 'MIGHT', 'HAVE', 'OBSERVED', 'THAT', 'HE', 'DID', 'NOT', 'TOUCH', 'HIS', 'BARE', 'FINGERS', 'TO', 'THE', 'TIMEPIECE', 'BUT', 'POKED', 'IT', 'ABOUT', 'AND', 'TOUCHED', 'IT', 'HERE', 'AND', 'THERE', 'WITH', 'THE', 'END', 'OF', 'A', 'LEAD', 'PENCIL'] +6432-63722-0038-2469: ref=['AND', 'DONOVAN', 'TAKE', 'A', "FRIEND'S", 'ADVICE', 'AND', "DON'T", 'BE', 'TOO', 'FREE', 'WITH', 'THAT', 'WATCH', 'TOO', 'FREE', 'WITH', 'IT'] +6432-63722-0038-2469: hyp=['AND', 'DONALD', 'TAKE', 'HER', "FRIEND'S", 'ADVICE', 'AND', "DON'T", 'BE', 'TOO', 'FREE', 'WITH', 'THAT', 'WATCH', 'TOO', 'FREE', 'WITH', 'IT'] +6432-63722-0039-2470: ref=['ASKED', 'THE', 'SURPRISED', 'DETECTIVE', 'YES'] +6432-63722-0039-2470: hyp=['ASKED', 'THE', 'SURPRISED', 'DETECTIVE', 'YES'] +6432-63722-0040-2471: ref=["DON'T", 'SCRATCH', 'YOURSELF', 'ON', 'IT', 'WHATEVER', 'YOU', 'DO', 'WHY', 'NOT'] +6432-63722-0040-2471: hyp=["DON'T", 'SCRATCH', 'YOURSELF', 'ON', 'IT', 'WHATEVER', 'YOU', 'DO', 'WHY', 'NOT'] +6432-63722-0041-2472: ref=['SIMPLY', 'BECAUSE', 'THIS', 'WATCH'] +6432-63722-0041-2472: hyp=['SIMPLY', 'BECAUSE', 'THIS', 'WATCH'] +6432-63722-0042-2473: ref=['SOME', 'ONE', 'OUT', 'HERE', 'TO', 'SEE', 'YOU'] +6432-63722-0042-2473: hyp=['SOME', 'ONE', 'OUT', 'HERE', 'TO', 'SEE', 'YOU'] +6432-63722-0043-2474: ref=['ALL', 'RIGHT', 'BE', 'THERE', 'IN', 'A', 'SECOND'] +6432-63722-0043-2474: hyp=['ALL', 'RIGHT', 'BE', 'THERE', 'IN', 'A', 'SECOND'] +6432-63722-0044-2475: ref=['SINGA', 'PHUT', 'WAS', 'THE', 'PANTING', 'ANSWER'] +6432-63722-0044-2475: hyp=['SHING', 'AFOOT', 'WAS', 'THE', 'PANTING', 'ANSWER'] +6432-63722-0045-2476: ref=['I', 'WANT', 'TO', 'TALK', 'OVER', "DARCY'S", 'CASE', 'WITH', 'YOU', 'THE', 'COLONEL', 'HAD', 'SAID', 'AND', 'THE', 'TWO', 'HAD', 'TALKED', 'HAD', 'THOUGHT', 'HAD', 'TALKED', 'AGAIN', 'AND', 'NOW', 'WERE', 'SILENT', 'FOR', 'A', 'TIME'] +6432-63722-0045-2476: hyp=['I', 'WANT', 'TO', 'TALK', 'OVER', "DARCY'S", 'CASE', 'WITH', 'YOU', 'THE', 'COLONEL', 'HAD', 'SAID', 'AND', 'THE', 'JEW', 'HAD', 'TALKED', 'HAD', 'THOUGHT', 'HAD', 'TALKED', 'AGAIN', 'AND', 'NOW', 'WERE', 'SILENT', 'FOR', 'A', 'TIME'] +6432-63722-0046-2477: ref=['WHAT', 'ARE', 'THE', 'CHANCES', 'OF', 'GETTING', 'HIM', 'OFF', 'LEGALLY', 'IF', 'WE', 'GO', 'AT', 'IT', 'FROM', 'A', 'NEGATIVE', 'STANDPOINT', 'ASKED', 'THE', 'COLONEL'] +6432-63722-0046-2477: hyp=['WHAT', 'ARE', 'THE', 'CHURCHES', 'OF', 'GETTING', 'HIM', 'OFF', 'LEGALLY', 'IF', 'WE', 'GO', 'AT', 'IT', 'FROM', 'A', 'NEGATIVE', 'STANDPOINT', 'ASKED', 'THE', 'COLONEL'] +6432-63722-0047-2478: ref=['RATHER', 'A', 'HYPOTHETICAL', 'QUESTION', 'COLONEL', 'BUT', 'I', 'SHOULD', 'SAY', 'IT', 'MIGHT', 'BE', 'A', 'FIFTY', 'FIFTY', 'PROPOSITION'] +6432-63722-0047-2478: hyp=['RATHER', 'A', 'HYPOTHETICAL', 'QUESTION', 'COLONEL', 'BUT', 'I', 'SHOULD', 'SAY', 'IT', 'MIGHT', 'BE', 'A', 'FIFTY', 'FIFTY', 'PROPOSITION'] +6432-63722-0048-2479: ref=['AT', 'BEST', 'HE', 'WOULD', 'GET', 'OFF', 'WITH', 'A', 'SCOTCH', 'VERDICT', 'OF', 'NOT', 'PROVEN', 'BUT', 'HE', "DOESN'T", 'WANT', 'THAT', 'NOR', 'DO', 'I'] +6432-63722-0048-2479: hyp=['AT', 'BEST', 'HE', 'WOULD', 'GET', 'OFF', 'FOR', 'THE', 'SCOTCH', 'VERDICT', 'OF', 'NOT', 'PROVEN', 'BUT', 'HE', "DOESN'T", 'WANT', 'THAT', 'NOR', 'DO', 'I'] +6432-63722-0049-2480: ref=['AND', 'YOU', 'I', "DON'T", 'WANT', 'IT', 'EITHER'] +6432-63722-0049-2480: hyp=['AND', 'YOU', 'I', "DON'T", 'WANT', 'IT', 'EITHER'] +6432-63722-0050-2481: ref=['BUT', 'I', 'WANT', 'TO', 'KNOW', 'JUST', 'WHERE', 'WE', 'STAND', 'NOW', 'I', 'KNOW'] +6432-63722-0050-2481: hyp=['BUT', 'I', 'WANT', 'TO', 'KNOW', 'JUST', 'WHERE', 'WE', 'STAND', 'NOW', 'I', 'KNOW'] +6432-63722-0051-2482: ref=['BUT', 'I', 'NEED', 'TO', 'DO', 'A', 'LITTLE', 'MORE', 'SMOKING', 'OUT', 'FIRST', 'NOW', 'I', 'WANT', 'TO', 'THINK'] +6432-63722-0051-2482: hyp=['BUT', 'I', 'NEED', 'TO', 'DO', 'A', 'LITTLE', 'MORE', 'SMOKING', 'OUT', 'FIRST', 'NOW', 'I', 'WANT', 'TO', 'THINK'] +6432-63722-0052-2483: ref=['IF', "YOU'LL", 'EXCUSE', 'ME', "I'LL", 'PRETEND', "I'M", 'FISHING', 'AND', 'I', 'MAY', 'CATCH', 'SOMETHING'] +6432-63722-0052-2483: hyp=['IF', "YOU'LL", 'EXCUSE', 'ME', "I'LL", 'PRETEND', "I'M", 'FISHING', 'AND', 'I', 'MAY', 'CATCH', 'SOMETHING'] +6432-63722-0053-2484: ref=['IN', 'FACT', 'I', 'HAVE', 'A', 'FEELING', 'THAT', "I'LL", 'LAND', 'MY', 'FISH'] +6432-63722-0053-2484: hyp=['IN', 'FACT', 'I', 'HAVE', 'A', 'FEELING', 'THAT', 'I', 'ALAN', 'MY', 'FISH'] +6432-63722-0054-2485: ref=["I'D", 'RECOMMEND', 'HIM', 'TO', 'YOU', 'INSTEAD', 'OF', 'BLACKSTONE', 'THANKS', 'LAUGHED', 'KENNETH'] +6432-63722-0054-2485: hyp=['I', 'RECOMMEND', 'HIM', 'TO', 'YOU', 'INSTEAD', 'OF', 'BLACKSTONE', 'THANKS', 'LAUGHED', 'KENNETH'] +6432-63722-0055-2486: ref=['WHAT', 'IS', 'IT', 'PERHAPS', 'I', 'CAN', 'HELP', 'YOU'] +6432-63722-0055-2486: hyp=['WHAT', 'IS', 'IT', 'PERHAPS', 'I', 'CAN', 'HELP', 'YOU'] +6432-63722-0056-2487: ref=['THE', 'OLD', 'ADAGE', 'OF', 'TWO', 'HEADS', 'YOU', 'KNOW'] +6432-63722-0056-2487: hyp=['THE', 'OLD', 'ADAGE', 'OF', 'TWO', 'HEADS', 'YOU', 'KNOW'] +6432-63722-0057-2488: ref=['YES', 'IT', 'STILL', 'HOLDS', 'GOOD'] +6432-63722-0057-2488: hyp=['YES', "IT'S", 'STILL', 'HOLDS', 'GOOD'] +6432-63722-0058-2489: ref=['NO', 'ALIMONY', 'REPEATED', 'THE', 'COLONEL', 'PUZZLED', 'YES', 'JUST', 'THAT'] +6432-63722-0058-2489: hyp=['NO', 'ALIMONY', 'REPLIED', 'THE', 'COLONEL', 'PUZZLED', 'YES', 'JUST', 'THAT'] +6432-63722-0059-2490: ref=['AND', "THERE'S", 'NO', 'REASON', 'YOU', "SHOULDN'T", 'KNOW'] +6432-63722-0059-2490: hyp=['AND', "THERE'S", 'NO', 'REASON', 'YOU', "SHOULDN'T", 'KNOW'] +6432-63723-0000-2491: ref=['CHUCKLED', 'THE', 'COLONEL', 'AS', 'HE', 'SKILFULLY', 'PLAYED', 'THE', 'LUCKLESS', 'TROUT', 'NOW', 'STRUGGLING', 'TO', 'GET', 'LOOSE', 'FROM', 'THE', 'HOOK'] +6432-63723-0000-2491: hyp=['CHUCKLED', 'THE', 'COLONEL', 'AS', 'HE', 'SKILFULLY', 'PLAYED', 'THE', 'LUCKLESS', 'TROUT', 'NOW', 'STRUGGLING', 'TO', 'GET', 'LOOSE', 'FROM', 'THE', 'HOOK'] +6432-63723-0001-2492: ref=['AND', 'WHEN', 'THE', 'FISH', 'WAS', 'LANDED', 'PANTING', 'ON', 'THE', 'GRASS', 'AND', 'SHAG', 'HAD', 'BEEN', 'ROUSED', 'FROM', 'HIS', 'SLUMBER', 'TO', 'SLIP', 'THE', 'NOW', 'LIMP', 'FISH', 'INTO', 'THE', 'CREEL', 'COLONEL', 'ASHLEY', 'GAVE', 'A', 'SIGH', 'OF', 'RELIEF', 'AND', 'REMARKED', 'I', 'THINK', 'I', 'SEE', 'IT', 'NOW'] +6432-63723-0001-2492: hyp=['AND', 'WHEN', 'THE', 'FISH', 'WAS', 'LANDED', 'PANTING', 'ON', 'THE', 'GRASS', 'AND', 'SHAG', 'HAD', 'BEEN', 'ROUSED', 'FROM', 'HIS', 'SLUMBER', 'TO', 'SLIP', 'A', 'NOW', 'LIMP', 'FISH', 'INTO', 'THE', 'CREOLE', 'COLONEL', 'ASHLEY', 'GAVE', 'A', 'SIGH', 'OF', 'RELIEF', 'AND', 'REMARKED', 'I', 'THINK', 'I', 'SEE', 'IT', 'NOW'] +6432-63723-0002-2493: ref=['THE', 'REASON', 'SHE', 'ASKED', 'NO', 'ALIMONY', 'INQUIRED', 'KENNETH'] +6432-63723-0002-2493: hyp=['THE', 'REASON', 'SHE', 'ASKED', 'NO', 'ALIMONY', 'INQUIRED', 'KENNETH'] +6432-63723-0003-2494: ref=['NO', 'I', "WASN'T", 'THINKING', 'OF', 'THAT'] +6432-63723-0003-2494: hyp=['NO', 'I', "WASN'T", 'THINKING', 'OF', 'THAT'] +6432-63723-0004-2495: ref=['HOWEVER', "DON'T", 'THINK', "I'M", 'NOT', 'INTERESTED', 'IN', 'YOUR', 'CASE', "I'VE", 'FISHED', 'ENOUGH', 'FOR', 'TO', 'DAY'] +6432-63723-0004-2495: hyp=['HOWEVER', "DON'T", 'THINK', "I'M", 'NOT', 'INTERESTED', 'IN', 'YOUR', 'CASE', "I'VE", 'FINISHED', 'ENOUGH', 'FOR', 'TO', 'DAY'] +6432-63723-0005-2496: ref=['WELL', 'I', "DON'T", 'KNOW', 'THAT', 'YOU', 'CAN'] +6432-63723-0005-2496: hyp=['WELL', 'I', "DON'T", 'KNOW', 'THAT', 'YOU', 'CAN'] +6432-63723-0006-2497: ref=['IT', "ISN'T", 'GENERALLY', 'KNOWN', 'WENT', 'ON', 'THE', 'LAWYER', 'THAT', 'THE', 'HOTEL', "KEEPER'S", 'WIFE', 'HAS', 'LEFT', 'HIM'] +6432-63723-0006-2497: hyp=['IT', "ISN'T", 'GENERALLY', 'KNOWN', 'WENT', 'ON', 'THE', 'LAWYER', 'THAT', 'THE', 'HOTEL', "KEEPER'S", 'WIFE', 'HAS', 'LEFT', 'HIM'] +6432-63723-0007-2498: ref=['IT', 'WAS', 'ONE', 'OF', 'WHAT', 'AT', 'FIRST', 'MIGHT', 'BE', 'CALLED', 'REFINED', 'CRUELTY', 'ON', 'HER', "HUSBAND'S", 'PART', 'DEGENERATING', 'GRADUALLY', 'INTO', 'THAT', 'OF', 'THE', 'BASER', 'SORT'] +6432-63723-0007-2498: hyp=['IT', 'WAS', 'ONE', 'OF', 'WHAT', 'AT', 'FIRST', 'MIGHT', 'BE', 'CALLED', 'REFINED', 'CRUELTY', 'ON', 'HER', "HUSBAND'S", 'PART', 'DEGENERATING', 'GRADUALLY', 'INTO', 'THAT', 'OF', 'A', 'BASER', 'SORT'] +6432-63723-0008-2499: ref=['YOU', "DON'T", 'MEAN', 'THAT', 'LARCH', 'STRUCK', 'HER', 'THAT', 'THERE', 'WAS', 'PHYSICAL', 'ABUSE', 'DO', 'YOU', 'ASKED', 'THE', 'COLONEL', "THAT'S", 'WHAT', 'HE', 'DID'] +6432-63723-0008-2499: hyp=['IT', 'ALL', 'MEAN', 'THAT', 'LARGE', 'STRUCK', 'HER', 'THAT', 'THERE', 'WAS', 'PHYSICAL', 'ABUSE', 'DO', 'YOU', 'ASKED', 'THE', 'COLONEL', "THAT'S", 'WHAT', 'HE', 'DID'] +6432-63723-0009-2500: ref=['THE', 'COLONEL', 'DID', 'NOT', 'DISCLOSE', 'THE', 'FACT', 'THAT', 'IT', 'WAS', 'NO', 'NEWS', 'TO', 'HIM'] +6432-63723-0009-2500: hyp=['THE', 'COLONEL', 'DID', 'NOT', 'DISCLOSE', 'THE', 'FACT', 'THAT', 'IT', 'WAS', 'NO', 'NEWS', 'TO', 'HIM'] +6432-63723-0010-2501: ref=['AARON', "GRAFTON'S", 'STATEMENT', 'WAS', 'BEING', 'UNEXPECTEDLY', 'CONFIRMED'] +6432-63723-0010-2501: hyp=['AARON', 'GRAFTON', 'STATEMENT', 'WAS', 'BEING', 'UNEXPECTED', 'GREAT', 'CONFIRMED'] +6432-63723-0011-2502: ref=['HE', 'REMEMBERED', 'THAT', 'CYNTHIA', 'AND', 'GRAFTON', 'HAD', 'ONCE', 'BEEN', 'IN', 'LOVE', 'WITH', 'EACH', 'OTHER'] +6432-63723-0011-2502: hyp=['HE', 'REMEMBERED', 'THAT', 'CYNTHIA', 'AND', 'GRAFTON', 'HAD', 'ONCE', 'BEEN', 'IN', 'LOVE', 'WITH', 'EACH', 'OTHER'] +6432-63723-0012-2503: ref=['SHE', 'SAID', 'HE', 'HAD', 'STRUCK', 'HER', 'MORE', 'THAN', 'ONCE', 'AND', 'SHE', 'COULD', 'STAND', 'IT', 'NO', 'LONGER'] +6432-63723-0012-2503: hyp=['SHE', 'SAID', 'HE', 'HAD', 'STRUCK', 'HER', 'MORE', 'THAN', 'ONCE', 'AND', 'SHE', 'COULD', 'STAND', 'IT', 'NO', 'LONGER'] +6432-63723-0013-2504: ref=['BECAUSE', 'LARCH', 'MADE', 'NO', 'DEFENSE'] +6432-63723-0013-2504: hyp=['BECAUSE', 'LARGE', 'MADE', 'NO', 'DEFENCE'] +6432-63723-0014-2505: ref=['LARCH', 'BY', 'REFUSING', 'TO', 'APPEAR', 'PRACTICALLY', 'ADMITTED', 'THE', 'CHARGES', 'AGAINST', 'HIM', 'AND', 'DID', 'NOT', 'OPPOSE', 'THE', 'SEPARATION'] +6432-63723-0014-2505: hyp=['LARGE', 'BY', 'REFUSING', 'TO', 'APPEAR', 'PRACTICALLY', 'ADMITTED', 'THE', 'CHARGES', 'AGAINST', 'HIM', 'AND', 'DID', 'NOT', 'OPPOSE', 'THE', 'SEPARATION'] +6432-63723-0015-2506: ref=['SO', 'I', 'HAD', 'TO', 'LET', 'HER', 'HAVE', 'HER', 'WAY', 'AND', 'WE', 'DID', 'NOT', 'ASK', 'THE', 'COURT', 'FOR', 'MONEY', 'THOUGH', 'I', 'HAD', 'NO', 'SUCH', 'SQUEAMISH', 'FEELINGS', 'WHEN', 'IT', 'CAME', 'TO', 'MY', 'COUNSEL', 'FEE'] +6432-63723-0015-2506: hyp=['SO', 'I', 'HAD', 'TO', 'LET', 'HER', 'HAVE', 'HER', 'WAY', 'AND', 'WE', 'DID', 'NOT', 'ASK', 'THE', 'CORP', 'FOR', 'MONEY', 'THOUGH', 'I', 'HAD', 'NO', 'SUCH', 'SQUEAMISH', 'FEELINGS', 'WHEN', 'IT', 'CAME', 'TO', 'MY', 'COUNSEL', 'FEE'] +6432-63723-0016-2507: ref=['NO', 'BUT', 'HE', 'WILL', 'OR', "I'LL", 'SUE', 'HIM', 'AND', 'GET', 'JUDGMENT', 'OH', "HE'LL", 'PAY', 'ALL', 'RIGHT'] +6432-63723-0016-2507: hyp=['NO', 'BUT', 'HE', 'WILL', 'OR', 'ELSEWOO', 'EM', 'AND', 'GET', 'JUDGMENT', 'OH', "HE'LL", 'PAY', 'ALL', 'RIGHT'] +6432-63723-0017-2508: ref=['AND', 'IT', 'TAKES', 'ALL', 'SORTS', 'OF', 'PERSONS', 'TO', 'MAKE', 'IT', 'UP'] +6432-63723-0017-2508: hyp=['AND', 'IT', 'TAKES', 'ALL', 'SORTS', 'OF', 'PERSONS', 'TO', 'MAKE', 'IT', 'UP'] +6432-63723-0018-2509: ref=['STILL', 'I', 'WOULD', 'LIKE', 'TO', 'KNOW'] +6432-63723-0018-2509: hyp=['STILL', 'I', 'WOULD', 'LIKE', 'TO', 'KNOW'] +6432-63723-0019-2510: ref=['THE', 'MURDER', 'OF', 'MISSUS', 'DARCY', 'HAD', 'SOME', 'TIME', 'AGO', 'BEEN', 'SHIFTED', 'OFF', 'THE', 'FRONT', 'PAGE', 'THOUGH', 'IT', 'WOULD', 'GET', 'BACK', 'THERE', 'WHEN', 'THE', 'YOUNG', 'JEWELER', 'WAS', 'TRIED'] +6432-63723-0019-2510: hyp=['THE', 'MURDER', 'OF', 'MISSUS', 'DARCY', 'HAD', 'SOME', 'TIME', 'AGO', 'BEEN', 'SHIFTED', 'OFF', 'THE', 'FRONT', 'PAGE', 'THOUGH', 'IT', 'WOULD', 'GET', 'BACK', 'THERE', 'WHEN', 'THE', 'YOUNG', 'JEWELER', 'WAS', 'TRIED'] +6432-63723-0020-2511: ref=['IT', 'HAD', 'A', 'DOUBLE', 'REPUTATION', 'SO', 'TO', 'SPEAK'] +6432-63723-0020-2511: hyp=['IT', 'HAD', 'A', 'DOUBLE', 'REPUTATION', 'SO', 'TO', 'SPEAK'] +6432-63723-0021-2512: ref=['GRAVE', 'AND', 'EVEN', 'REVEREND', 'CONVENTIONS', 'ASSEMBLED', 'IN', 'ITS', 'BALLROOM', 'AND', 'POLITICIANS', 'OF', 'THE', 'UPPER', 'IF', 'NOT', 'BETTER', 'CLASS', 'WERE', 'FREQUENTLY', 'SEEN', 'IN', 'ITS', 'DINING', 'ROOM', 'OR', 'CAFE'] +6432-63723-0021-2512: hyp=['GRAVE', 'AND', 'EVEN', 'REVEREND', 'THE', 'CONVENTIONS', 'ASSEMBLED', 'IN', 'ITS', 'BALL', 'ROOM', 'IN', 'POLITICIANS', 'OF', 'THE', 'UPPER', 'IF', 'NOT', 'BETTER', 'CLASS', 'WERE', 'FREQUENTLY', 'SEEN', 'IN', 'ITS', 'DINING', 'ROOM', 'OR', 'CAFE'] +6432-63723-0022-2513: ref=['LARCH', 'HIMSELF', 'WAS', 'A', 'PECULIAR', 'CHARACTER'] +6432-63723-0022-2513: hyp=['LARGE', 'HIMSELF', 'WAS', 'A', 'PECULIAR', 'CHARACTER'] +6432-63723-0023-2514: ref=['IN', 'A', 'SMALLER', 'PLACE', 'HE', 'WOULD', 'HAVE', 'BEEN', 'CALLED', 'A', 'SALOON', 'KEEPER'] +6432-63723-0023-2514: hyp=['IN', 'A', 'SMALLER', 'PLACE', 'HE', 'WOULD', 'HAVE', 'BEEN', 'CALLED', 'A', 'SALOON', 'KEEPER'] +6432-63723-0024-2515: ref=['AND', 'IT', 'WAS', 'THIS', 'MAN', 'RICH', 'IT', 'WAS', 'SAID', 'HANDSOME', 'CERTAINLY', 'THAT', 'CYNTHIA', 'RATCHFORD', 'HAD', 'MARRIED'] +6432-63723-0024-2515: hyp=['AND', 'IT', 'WAS', 'THIS', 'MAN', 'RICH', 'OVER', 'SAID', 'HANDSOME', 'CERTAINLY', 'THAT', 'SANTIA', 'RATCHFORD', 'HAD', 'MARRIED'] +6432-63723-0025-2516: ref=['TO', 'THIS', 'WAS', 'THE', 'ANSWER', 'WHISPERED', 'MONEY'] +6432-63723-0025-2516: hyp=['TO', 'THIS', 'WAS', 'THE', 'ANSWER', 'WHISPERED', 'MONEY'] +6432-63723-0026-2517: ref=['AND', 'IN', 'A', 'WAY', 'IT', 'WAS', 'TRUE'] +6432-63723-0026-2517: hyp=['AND', 'IN', 'A', 'WAY', 'IT', 'WAS', 'TRUE'] +6432-63723-0027-2518: ref=['SHE', 'ALSO', 'SAW', 'AN', 'OPPORTUNITY', 'OF', 'PAYING', 'OLD', 'DEBTS', 'AND', 'REAPING', 'SOME', 'REVENGES'] +6432-63723-0027-2518: hyp=['SHE', 'ALSO', 'SAW', 'AN', 'OPPORTUNITY', 'OF', 'PAYING', 'OLD', 'DEBTS', 'AND', 'REAPING', 'SOME', 'REVENGES'] +6432-63723-0028-2519: ref=['AFTER', 'THE', 'MARRIAGE', 'WHICH', 'WAS', 'A', 'BRILLIANT', 'AND', 'GAY', 'ONE', 'IF', 'NOT', 'HAPPY', 'THE', 'LARCH', 'HOTEL', 'IT', 'COULD', 'HARDLY', 'BE', 'CALLED', 'A', 'HOME', 'BECAME', 'THE', 'SCENE', 'OF', 'MANY', 'FESTIVE', 'OCCASIONS'] +6432-63723-0028-2519: hyp=['AFTER', 'THE', 'MARRIAGE', 'WHICH', 'WAS', 'A', 'BRILLIANT', 'AND', 'GAY', 'ONE', 'IF', 'NOT', 'HAPPY', 'THE', 'LARGE', 'HOTEL', 'IT', 'COULD', 'HARDLY', 'BE', 'CALLED', 'A', 'HOME', 'BECAME', 'THE', 'SCENE', 'OF', 'MANY', 'FESTIVE', 'OCCASIONS'] +6432-63723-0029-2520: ref=['THEN', 'IT', 'WAS', 'SAID', 'OF', 'LARCH', 'THAT', 'SOON', 'AFTER', 'THE', 'ECHOES', 'OF', 'THE', 'WEDDING', 'CHIMES', 'HAD', 'DIED', 'AWAY', 'HE', 'HAD', 'BEGUN', 'TO', 'TREAT', 'HIS', 'WIFE', 'WITH', 'REFINED', 'CRUELTY', 'THAT', 'HIDDEN', 'AWAY', 'FROM', 'THE', 'PUBLIC', 'UNDERNEATH', 'HIS', 'HABITUAL', 'MANNER', 'THERE', 'WAS', 'THE', 'RAWNESS', 'OF', 'THE', 'BRUTE'] +6432-63723-0029-2520: hyp=['THEN', 'IT', 'WAS', 'SAID', 'OF', 'LARGE', 'THAT', 'SOON', 'AFTER', 'THE', 'ECHOES', 'OF', 'THE', 'WEDDING', 'CHIMES', 'HAD', 'DIED', 'AWAY', 'HE', 'HAD', 'BEGUN', 'TO', 'TREAT', 'HIS', 'WIFE', 'WITH', 'A', 'REFINED', 'CRUELTY', 'THAT', 'HIDDEN', 'AWAY', 'FROM', 'THE', 'PUBLIC', 'UNDERNEATH', 'HIS', 'HABITUAL', 'MANNER', 'THERE', 'WAS', 'THE', 'RAWNESS', 'OF', 'THE', 'BRUTE'] +6432-63723-0030-2521: ref=['BUT', 'IT', 'WAS', 'NOTICED', 'THAT', 'THE', 'OLDER', 'AND', 'MORE', 'CONSERVATIVE', 'FAMILIES', 'WERE', 'LESS', 'OFTEN', 'REPRESENTED', 'AND', 'WHEN', 'THEY', 'WERE', 'IT', 'WAS', 'BY', 'SOME', 'OF', 'THE', 'YOUNGER', 'MEMBERS', 'WHOSE', 'REPUTATIONS', 'WERE', 'ALREADY', 'SMIRCHED', 'OR', 'WHO', 'HAD', 'NOT', 'YET', 'ACQUIRED', 'ANY', 'AND', 'WERE', 'WILLING', 'TO', 'TAKE', 'A', 'CHANCE'] +6432-63723-0030-2521: hyp=['BUT', 'IT', 'WAS', 'NOTICED', 'THAT', 'THE', 'OLDER', 'AND', 'MORE', 'CONSERVATIVE', 'FAMILIES', 'WERE', 'LESS', 'OFTEN', 'REPRESENTED', 'AND', 'WHEN', 'THEY', 'WERE', 'IT', 'WAS', 'BY', 'SOME', 'OF', 'THE', 'YOUNGER', 'MEMBERS', 'WHOSE', 'REPUTATIONS', 'WERE', 'ALREADY', 'SMARGED', 'OR', 'WHO', 'HAD', 'NOT', 'YET', 'ACQUIRED', 'ANY', 'AND', 'WERE', 'WILLING', 'TO', 'TAKE', 'A', 'CHANCE'] +6432-63723-0031-2522: ref=['IT', "WOULDN'T", 'DO', 'YOU', 'KNOW', 'AFTER', 'THAT', 'STORY', 'CAME', 'OUT', 'FOR', 'ME', 'AND', 'THE', 'VICE', 'CHANCELLOR', 'WHO', 'SAT', 'IN', 'THE', 'CASE', 'AS', 'WELL', 'AS', 'OTHER', 'JUDGES', 'AND', 'MEMBERS', 'OF', 'THE', 'BAR', 'TO', 'BE', 'SEEN', 'THERE', 'KENNETH', 'EXPLAINED', 'TO', 'THE', 'COLONEL'] +6432-63723-0031-2522: hyp=['IT', "WOULDN'T", 'DO', 'YOU', 'KNOW', 'AFTER', 'THAT', 'STORY', 'CAME', 'OUT', 'FOR', 'ME', 'AND', 'THE', 'VICE', 'CHANCELLOR', 'WHO', 'SAT', 'IN', 'A', 'CASE', 'AS', 'WELL', 'AS', 'OTHER', 'JUDGES', 'AND', 'MEMBERS', 'OF', 'THE', 'BAR', 'TO', 'BE', 'SEEN', 'THERE', 'KENNETH', 'EXPLAINED', 'TO', 'THE', 'COLONEL'] +6432-63723-0032-2523: ref=['MEANWHILE', 'COLONEL', 'ASHLEY', 'WAS', 'A', 'VERY', 'BUSY', 'MAN', 'AND', 'TO', 'NO', 'ONE', 'DID', 'HE', 'TELL', 'VERY', 'MUCH', 'ABOUT', 'HIS', 'ACTIVITIES', 'HE', 'SAW', 'DARCY', 'FREQUENTLY', 'AT', 'THE', 'JAIL', 'AND', 'TO', 'THAT', 'YOUNG', "MAN'S", 'PLEADINGS', 'THAT', 'SOMETHING', 'BE', 'DONE', 'ALWAYS', 'RETURNED', 'THE', 'ANSWER'] +6432-63723-0032-2523: hyp=['MEANWHILE', 'COLONEL', 'ASHLEY', 'WAS', 'A', 'VERY', 'BUSY', 'MAN', 'AND', 'TO', 'NO', 'ONE', 'DID', 'HE', 'TELL', 'VERY', 'MUCH', 'ABOUT', 'HIS', 'ACTIVITIES', 'HE', 'SAW', 'DARCY', 'FREQUENTLY', 'AT', 'THE', 'JAIL', 'AND', 'TO', 'THAT', 'YOUNG', "MAN'S", 'PLEADINGS', 'THAT', 'SOMETHING', 'TO', 'BE', 'DONE', 'ALWAYS', 'RETURNED', 'THE', 'ANSWER'] +6432-63723-0033-2524: ref=["DON'T", 'WORRY', 'IT', 'WILL', 'COME', 'OUT', 'ALL', 'RIGHT'] +6432-63723-0033-2524: hyp=["DON'T", 'WORRY', 'IT', 'WILL', 'COME', 'OUT', 'ALL', 'RIGHT'] +6432-63723-0034-2525: ref=["I'M", 'GOING', 'TO', 'RECTIFY', 'THEM', 'BUT', 'IT', 'WILL', 'TAKE', 'TIME'] +6432-63723-0034-2525: hyp=["I'M", 'GOING', 'DIRECT', 'BY', 'THEM', 'BUT', 'I', 'WILL', 'TAKE', 'TIME'] +6432-63723-0035-2526: ref=["IT'S", 'HARD', 'FOR', 'MISS', 'MASON', 'TOO', 'ALTHOUGH', "SHE'S", 'BEARING', 'UP', 'LIKE', 'A', 'MAJOR'] +6432-63723-0035-2526: hyp=['HIS', 'HARD', 'FOR', 'MISS', 'MASON', 'TOO', 'ALTHOUGH', "SHE'S", 'BEARING', 'UP', 'LIKE', 'A', 'MAJOR'] +6432-63723-0036-2527: ref=['SO', 'KING', 'GOT', 'BAIL', 'WHO', 'PUT', 'IT', 'UP'] +6432-63723-0036-2527: hyp=['SO', 'KING', 'GOD', 'BAIL', 'WHO', 'PUT', 'IT', 'UP'] +6432-63723-0037-2528: ref=['IT', 'WAS', 'HIGH', 'LARCH'] +6432-63723-0037-2528: hyp=['IT', 'WAS', 'IRCH'] +6432-63723-0038-2529: ref=['THEY', 'TOOK', 'HARRY', 'AWAY', 'A', 'WHILE', 'AGO'] +6432-63723-0038-2529: hyp=['THEY', 'TOOK', 'HARRY', 'AWAY', 'A', 'WHILE', 'AGO'] +6432-63723-0039-2530: ref=['BUT', 'HIS', 'ARE', 'PRETTY', 'UNCERTAIN', 'SHOES', 'TO', 'BE', 'IN', 'JUST', 'THE', 'SAME'] +6432-63723-0039-2530: hyp=['BUT', 'HIS', 'ARE', 'PRETTY', 'UNCERTAIN', 'SHOES', 'TO', 'BE', 'IN', 'JUST', 'THE', 'SAME'] +6432-63723-0040-2531: ref=['ONLY', 'THAT', 'I', 'DARCY', 'HESITATED', 'AND', 'GREW', 'RED'] +6432-63723-0040-2531: hyp=['ONLY', 'THAT', 'I', 'DARCY', 'HESITATED', 'AND', 'GREW', 'RED'] +6432-63723-0041-2532: ref=['GOOD', 'EVENING', 'COLONEL', 'HE', 'CALLED', 'GENIALLY', 'WILL', 'YOU', 'JOIN', 'ME', 'IN', 'A', 'WELSH', 'RABBIT'] +6432-63723-0041-2532: hyp=['GOOD', 'EVENING', 'COLONEL', 'HE', 'CALLED', 'GENIALLY', 'WILL', 'YOU', 'JOIN', 'ME', 'IN', 'A', 'WELSH', 'RABBIT'] +6432-63723-0042-2533: ref=['THANK', 'YOU', 'NO'] +6432-63723-0042-2533: hyp=['THANK', 'YOU', 'NO'] +6432-63723-0043-2534: ref=["I'M", 'AFRAID', 'MY', 'DIGESTION', "ISN'T", 'QUITE', 'UP', 'TO', 'THAT', 'AS', "I'VE", 'HAD', 'TO', 'CUT', 'OUT', 'MY', 'FISHING', 'OF', 'LATE'] +6432-63723-0043-2534: hyp=["I'M", 'AFRAID', 'MY', 'DIGESTION', "ISN'T", 'QUITE', 'UP', 'TO', 'THAT', 'AS', "I'VE", 'HAD', 'TO', 'CUT', 'OUT', 'MY', 'FISHING', 'OF', 'LATE'] +6432-63723-0044-2535: ref=['NOW', 'AS', 'TO', 'CERTAIN', 'MATTERS', 'IN', 'THE', 'STORE', 'ON', 'THE', 'MORNING', 'OF', 'THE', 'MURDER'] +6432-63723-0044-2535: hyp=['NOW', 'AS', 'TO', 'CERTAIN', 'MATTERS', 'IN', 'THE', 'STORE', 'ON', 'THE', 'MORNING', 'OF', 'THE', 'MURDER'] +6432-63723-0045-2536: ref=['THE', 'STOPPED', 'CLOCKS', 'FOR', 'INSTANCE', 'HAVE', 'YOU', 'ANY', 'THEORY'] +6432-63723-0045-2536: hyp=['THEY', 'STOPPED', 'CLUX', 'FOR', 'INSTANCE', 'HAVE', 'YOU', 'ANY', 'THEORY'] +6432-63723-0046-2537: ref=['THERE', 'WERE', 'THREE', 'OF', 'THEM', 'THE', 'CENTER', 'FIGURE', 'BEING', 'THAT', 'OF', 'HARRY', 'KING', 'AND', 'HE', 'WAS', 'VERY', 'MUCH', 'INTOXICATED'] +6432-63723-0046-2537: hyp=['THERE', 'WERE', 'THREE', 'OF', 'THEM', 'THE', 'CENTER', 'FIGURE', 'BEING', 'THAT', 'OF', 'HARRY', 'KING', 'AND', 'HE', 'WAS', 'VERY', 'MUCH', 'INTOXICATED'] +6432-63723-0047-2538: ref=['THAT', 'IS', 'NOT', 'ALWAYS', 'BUT', 'SOMETIMES', 'IT', 'HAPPENED', 'TO', 'BE', 'SO', 'NOW'] +6432-63723-0047-2538: hyp=['THAT', 'IS', 'NOT', 'ALWAYS', 'BUT', 'SOMETIMES', 'IT', 'HAPPENED', 'TO', 'BE', 'SO', 'NOW'] +6432-63723-0048-2539: ref=['I', 'BEG', 'YOUR', 'PARDON', 'HE', 'SAID', 'IN', 'THE', 'CULTURED', 'TONES', 'HE', 'KNEW', 'SO', 'WELL', 'HOW', 'TO', 'USE', 'YET', 'OF', 'WHICH', 'HE', 'MADE', 'SO', 'LITTLE', 'USE', 'OF', 'LATE'] +6432-63723-0048-2539: hyp=['I', 'BEG', 'YOUR', 'PARDON', 'HE', 'SAID', 'IN', 'THE', 'CULTURED', 'TONES', 'HE', 'KNEW', 'SO', 'WELL', 'HOW', 'TO', 'USE', 'YET', 'OF', 'WHICH', 'HE', 'MADE', 'SO', 'LITTLE', 'USE', 'OF', 'LATE'] +6432-63723-0049-2540: ref=['I', 'SAID', 'WHERE', 'HAVE', 'YOU', 'BEEN', 'REMARKED', 'THE', 'OTHER', "WE'VE", 'MISSED', 'YOU'] +6432-63723-0049-2540: hyp=['I', 'SAID', 'WHERE', 'HAVE', 'YOU', 'BEEN', 'REMARKED', 'THE', 'OTHER', "WE'VE", 'MISSED', 'YOU'] +6432-63723-0050-2541: ref=['I', 'SAID', 'I', 'WAS', 'GOLFING', 'HE', 'WENT', 'ON', 'EXCEEDINGLY', 'DISTINCTLY', 'THOUGH', 'WITH', 'AN', 'EFFORT'] +6432-63723-0050-2541: hyp=['I', 'SAID', 'I', 'WAS', 'GOLFING', 'HE', 'WENT', 'ON', 'EXCEEDINGLY', 'DISTINCTLY', 'THOUGH', 'WITH', 'AN', 'EFFORT'] +6432-63723-0051-2542: ref=['WHY', 'POLONIUS', 'SOME', 'ONE', 'ASKED'] +6432-63723-0051-2542: hyp=['WHY', 'BONIUS', 'SOME', 'ONE', 'ASKED'] +6432-63723-0052-2543: ref=['BECAUSE', 'DEAR', 'FRIEND', 'REPLIED', 'KING', 'SOFTLY', 'HE', 'SOMEWHAT', 'RESEMBLES', 'A', 'CERTAIN', 'PERSON', 'HERE', 'WHO', 'TALKS', 'TOO', 'MUCH', 'BUT', 'WHO', 'IS', 'NOT', 'SO', 'WISE', 'AS', 'HE', 'THINKS'] +6432-63723-0052-2543: hyp=['BECAUSE', 'DEAR', 'FRIEND', 'REPLIED', 'KING', 'SOFTLY', 'HE', 'SOMEWHAT', 'RESEMBLES', 'A', 'CERTAIN', 'PERSON', 'HERE', 'WHO', 'TALKS', 'TOO', 'MUCH', 'BUT', 'WHO', 'IS', 'NOT', 'SO', 'WISE', 'AS', 'HE', 'THINKS'] +6432-63723-0053-2544: ref=['THERE', 'WAS', 'A', 'RATTLE', 'OF', 'COINS', 'ON', 'THE', 'MAHOGANY', 'BAR', 'AS', 'KING', 'SOUGHT', 'TO', 'DISENTANGLE', 'A', 'SINGLE', 'BILL', 'FROM', 'THE', 'WADDED', 'UP', 'CURRENCY', 'IN', 'HIS', 'POCKET'] +6432-63723-0053-2544: hyp=['THERE', 'WAS', 'A', 'RATTLE', 'OF', 'COIN', 'DOWN', 'THE', 'MAHOGANY', 'BAR', 'AS', 'KING', 'SOUGHT', 'TO', 'DISENTANGLE', 'A', 'SINGLE', 'BILL', 'FROM', 'THE', 'WATERED', 'UP', 'CURRENCY', 'IN', 'HIS', 'POCKET'] +6432-63723-0054-2545: ref=["IT'S", "IT'S", 'AN', 'ODD', 'COIN', 'AN', 'OLD', 'ROMAN', 'ONE', 'THAT', 'MISSUS', 'DARCY', 'HAD', 'IN', 'HER', 'PRIVATE', 'COLLECTION', 'KEPT', 'IN', 'THE', 'JEWELRY', 'STORE', 'SAFE', 'WAS', 'THE', 'WHISPERED', 'ANSWER'] +6432-63723-0054-2545: hyp=["IT'S", 'AN', 'ODD', 'COIN', 'AN', 'OLD', 'ROMAN', 'ONE', 'THAT', 'MISSUS', 'DARCY', 'HAD', 'IN', 'HER', 'PRIVATE', 'COLLECTION', 'KEPT', 'IN', 'THE', 'JEWELRY', 'STORE', 'SAFE', 'WAS', 'THE', 'WHISPERED', 'ANSWER'] +6432-63723-0055-2546: ref=['I', 'WENT', 'OVER', 'THEM', 'THE', 'OTHER', 'DAY', 'AND', 'NOTICED', 'SOME', 'WERE', 'MISSING', 'THOUGH', 'I', 'SAW', 'THEM', 'ALL', 'WHEN', 'I', 'PAID', 'A', 'VISIT', 'TO', 'HER', 'JUST', 'A', 'SHORT', 'TIME', 'BEFORE', 'SHE', 'WAS', 'KILLED'] +6432-63723-0055-2546: hyp=['I', 'WENT', 'OVER', 'THEM', 'NEAR', 'THE', 'DAY', 'AND', 'NOTICED', 'SOME', 'WERE', 'MISSING', 'THOUGH', 'I', 'SAW', 'THEM', 'ALL', 'WHEN', 'I', 'PAID', 'A', 'VISIT', 'TO', 'HER', 'JUST', 'A', 'SHORT', 'TIME', 'BEFORE', 'SHE', 'WAS', 'KILLED'] +6432-63723-0056-2547: ref=['THAT', 'WAS', 'HERS', 'WENT', 'ON', 'THE', 'JEWELER'] +6432-63723-0056-2547: hyp=['THAT', 'WAS', 'HERS', 'WENT', 'ON', 'THE', 'JEWELER'] +6432-63723-0057-2548: ref=['NOW', 'HARRY', 'KING', 'HAS', 'IT', 'EXCLAIMED', 'COLONEL', 'ASHLEY'] +6432-63723-0057-2548: hyp=['NOW', 'HARRY', 'KING', 'HAS', 'IT', 'EXCLAIMED', 'COLONEL', 'ASHLEY'] +6938-70848-0000-1216: ref=['EVEN', 'THE', 'SUN', 'CAME', 'OUT', 'PALE', 'AND', 'WATERY', 'AT', 'NOON'] +6938-70848-0000-1216: hyp=['EVEN', 'THE', 'SUN', 'CAME', 'OUT', 'PALE', 'AND', 'WATERY', 'AT', 'NOON'] +6938-70848-0001-1217: ref=['THE', 'COLDS', 'AND', 'RHEUMATISM', 'OF', 'THE', 'RAINY', 'MONTHS', 'VANISHED'] +6938-70848-0001-1217: hyp=['THE', 'GOLDS', 'AND', 'RHEUMATISM', 'OF', 'THE', 'RAINY', 'MONTHS', 'VANISHED'] +6938-70848-0002-1218: ref=['ASKED', 'A', 'WORKER', 'LAST', 'SUNDAY', 'YOU', 'DID', 'IT', 'WHEN', 'THE', 'YUNKERS'] +6938-70848-0002-1218: hyp=['AS', 'TO', 'WORKER', 'LAST', 'SUNDAY', 'YOU', 'DID', 'IT', 'WHEN', 'THE', 'YUNKERS'] +6938-70848-0003-1219: ref=['WELL', "DIDN'T", 'THEY', 'SHOOT', 'US', 'ONE', 'MAN', 'EXHIBITED', 'HIS', 'ARM', 'IN', 'A', 'SLING'] +6938-70848-0003-1219: hyp=['WELL', "DIDN'T", 'THEY', 'SHOOT', 'US', 'ONE', 'MAN', 'EXHIBITED', 'HIS', 'ARM', 'IN', 'A', 'SLING'] +6938-70848-0004-1220: ref=["HAVEN'T", 'I', 'GOT', 'SOMETHING', 'TO', 'REMEMBER', 'THEM', 'BY', 'THE', 'DEVILS'] +6938-70848-0004-1220: hyp=["HAVEN'T", 'I', 'GOT', 'SOMETHING', 'TO', 'REMEMBER', 'THEM', 'BY', 'THE', 'DEVILS'] +6938-70848-0005-1221: ref=['WHO', 'ARE', 'YOU', 'TO', 'DESTROY', 'THE', 'LEGAL', 'GOVERNMENT', 'WHO', 'IS', 'LENIN', 'A', 'GERMAN'] +6938-70848-0005-1221: hyp=['WHO', 'ARE', 'YOU', 'TO', 'DESTROY', 'THE', 'LEGAL', 'GOVERNMENT', 'WITH', 'LANY', 'A', 'GERMAN'] +6938-70848-0006-1222: ref=['WHO', 'ARE', 'YOU', 'A', 'COUNTER', 'REVOLUTIONIST', 'A', 'PROVOCATOR', 'THEY', 'BELLOWED', 'AT', 'HIM'] +6938-70848-0006-1222: hyp=['WHO', 'ARE', 'YOU', 'A', 'COUNTER', 'REVOLISHNESS', 'APPROCATUR', 'THEY', 'BELOVED', 'AT', 'HIM'] +6938-70848-0007-1223: ref=['YOU', 'CALL', 'YOURSELVES', 'THE', 'PEOPLE', 'OF', 'RUSSIA', 'BUT', "YOU'RE", 'NOT', 'THE', 'PEOPLE', 'OF', 'RUSSIA'] +6938-70848-0007-1223: hyp=['YOU', 'CALL', 'YOURSELVES', 'THE', 'PEOPLE', 'OF', 'A', 'SHEPHERD', 'YOU', 'ARE', 'NOT', 'THE', 'PEOPLE', 'OF', 'RUSHIRE'] +6938-70848-0008-1224: ref=['THE', 'PEASANTS', 'ARE', 'THE', 'PEOPLE', 'OF', 'RUSSIA', 'WAIT', 'UNTIL', 'THE', 'PEASANTS'] +6938-70848-0008-1224: hyp=['TO', 'PIECE', 'AND', 'OTHER', 'PEOPLE', 'OF', 'RUSSIA', 'WRIT', 'UNTIL', 'THE', 'PEASANTS'] +6938-70848-0009-1225: ref=['WE', 'KNOW', 'WHAT', 'THE', 'PEASANTS', 'WILL', 'SAY', "AREN'T", 'THEY', 'WORKINGMEN', 'LIKE', 'OURSELVES'] +6938-70848-0009-1225: hyp=['WE', 'KNOW', 'WHAT', 'THE', 'PEASANTS', 'WILL', 'SAY', "AREN'T", 'THEY', 'WORKING', 'MEN', 'LIKE', 'OURSELVES'] +6938-70848-0010-1226: ref=['THESE', 'MEN', 'ESPECIALLY', 'WELCOMED', 'THE', 'CALL', 'TO', 'A', 'CONGRESS', 'OF', 'PEASANTS'] +6938-70848-0010-1226: hyp=['THIS', 'MAN', 'HAS', 'SPECIALLY', 'WELCOMED', 'THE', 'CALL', 'TO', 'A', 'CONGRESS', 'OF', 'PEASANTS'] +6938-70848-0011-1227: ref=['THESE', 'LAST', 'WERE', 'THE', 'YOUNG', 'GENERATION', 'WHO', 'HAD', 'BEEN', 'SERVING', 'IN', 'THE', 'ARMY'] +6938-70848-0011-1227: hyp=['THIS', 'LAST', 'WHERE', 'THE', 'YOUNG', 'GENERATION', 'WHO', 'HAD', 'BEEN', 'SERVING', 'IN', 'THE', 'ARMY'] +6938-70848-0012-1228: ref=['WHEREUPON', 'THE', 'OLD', 'EXECUTIVE', 'COMMITTEE', 'LEFT', 'THE', 'HALL'] +6938-70848-0012-1228: hyp=['WHEREUPON', 'THE', 'OLD', 'EXECUTED', 'COMMITTEE', 'LEFT', 'THE', 'HALL'] +6938-70848-0013-1229: ref=['DOWN', 'WITH', 'HIM', 'THEY', 'SHRIEKED'] +6938-70848-0013-1229: hyp=['DOWN', 'WITH', 'HIM', 'THEY', 'SHRIEKED'] +6938-70848-0014-1230: ref=['FEARFUL', 'TUMULT', 'CRIES', 'DOWN', 'WITH', 'THE', 'BOLSHEVIKI'] +6938-70848-0014-1230: hyp=['FEARFUL', 'TUMULT', 'CHRISTOWN', 'WITH', 'THE', 'PULCHEVIKI'] +6938-70848-0015-1231: ref=['UPON', 'MY', 'RETURN', 'I', 'VISITED', 'SMOLNY', 'NO', 'SUCH', 'ACCUSATION', 'WAS', 'MADE', 'AGAINST', 'ME', 'THERE', 'AFTER', 'A', 'BRIEF', 'CONVERSATION', 'I', 'LEFT', 'AND', "THAT'S", 'ALL', 'LET', 'ANY', 'ONE', 'PRESENT', 'MAKE', 'SUCH', 'AN', 'ACCUSATION'] +6938-70848-0015-1231: hyp=['UPON', 'MY', 'RETURN', 'I', 'VISITED', 'MORLEY', 'NO', 'SUCH', 'ACCUSATION', 'WAS', 'MADE', 'AGAINST', 'ME', 'THERE', 'AFTER', 'A', 'BRIEF', 'CONVERSATION', 'I', 'LEFT', 'AND', 'THAT', 'SOUL', 'LET', 'ANYONE', 'PRESENT', 'MAKE', 'SUCH', 'AN', 'ACCUSATION'] +6938-70848-0016-1232: ref=['MEANWHILE', 'THE', 'QUESTION', 'OF', 'THE', 'STATUS', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE', 'WAS', 'AGITATING', 'ALL', 'MINDS'] +6938-70848-0016-1232: hyp=['MEANWHILE', 'THE', 'QUESTION', 'OF', 'THE', 'STRATORS', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE', 'WAS', 'AGITATING', 'ALL', 'MINDS'] +6938-70848-0017-1233: ref=['BY', 'DECLARING', 'THE', 'ASSEMBLY', 'EXTRAORDINARY', 'CONFERENCE', 'IT', 'HAD', 'BEEN', 'PLANNED', 'TO', 'BLOCK', 'THE', 'REELECTION', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE'] +6938-70848-0017-1233: hyp=['BY', 'DECLINING', 'THEIR', 'ASSEMBLY', 'EXTRAORDINARY', 'CONFERENCE', 'IT', 'HAD', 'BEEN', 'PLANNED', 'TO', 'PLOT', 'THE', 'RE', 'ELECTION', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE'] +6938-70848-0018-1234: ref=['BUT', 'THIS', 'WORKED', 'BOTH', 'WAYS', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONISTS', 'DECIDED', 'THAT', 'IF', 'THE', 'CONGRESS', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'EXECUTIVE', 'COMMITTEE', 'THEN', 'THE', 'EXECUTIVE', 'COMMITTEE', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'CONGRESS'] +6938-70848-0018-1234: hyp=['BUT', 'THIS', 'WORTH', 'BOTH', 'WAYS', 'THE', 'LAP', 'SOCIALLY', 'REVOLUTIONIST', 'DECIDED', 'THAT', 'IF', 'THE', 'CONGRESS', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'EXECUTING', 'COMMITTEE', 'THEN', 'THE', 'EXECUTIVE', 'COMMITTEE', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'CONGRESS'] +6938-70848-0019-1235: ref=['ON', 'THE', 'TWENTY', 'SEVENTH', 'OCCURRED', 'THE', 'DEBATE', 'ON', 'THE', 'LAND', 'QUESTION', 'WHICH', 'REVEALED', 'THE', 'DIFFERENCES', 'BETWEEN', 'THE', 'AGRARIAN', 'PROGRAMME', 'OF', 'THE', 'BOLSHEVIKI', 'AND', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONARIES'] +6938-70848-0019-1235: hyp=['ON', 'THE', 'TWENTY', 'SEVENTH', 'OCCURRED', 'THE', 'DEBATE', 'ON', 'THE', 'LAND', 'QUESTION', 'WHICH', 'REVEALED', 'THE', 'DIFFERENCES', 'BETWEEN', 'THE', 'AGRIAN', 'PROGRAMME', 'OF', 'THE', 'BOLSHEVIKI', 'AND', 'THE', 'LAP', 'SOCIALIST', 'REVOLUTIONARIES'] +6938-70848-0020-1236: ref=['THE', 'CONSTITUENT', 'ASSEMBLY', 'WILL', 'NOT', 'DARE', 'TO', 'BREAK', 'WITH', 'THE', 'WILL', 'OF', 'THE', 'PEOPLE'] +6938-70848-0020-1236: hyp=['THE', 'CONSTITUTE', 'ASSEMBLY', 'WILL', 'NOT', 'DARE', 'TO', 'BREAK', 'WITH', 'THE', 'WILL', 'OF', 'THE', 'PEOPLE'] +6938-70848-0021-1237: ref=['FOLLOWED', 'HIM', 'LENIN', 'LISTENED', 'TO', 'NOW', 'WITH', 'ABSORBING', 'INTENSITY'] +6938-70848-0021-1237: hyp=['FOLLOWED', 'HIM', 'LENIN', 'LISTENED', 'TO', 'NOW', 'WITH', 'ABSORBING', 'INTENSITY'] +6938-70848-0022-1238: ref=['THE', 'FIRST', 'STAGE', 'WAS', 'THE', 'CRUSHING', 'OF', 'AUTOCRACY', 'AND', 'THE', 'CRUSHING', 'OF', 'THE', 'POWER', 'OF', 'THE', 'INDUSTRIAL', 'CAPITALISTS', 'AND', 'LAND', 'OWNERS', 'WHOSE', 'INTERESTS', 'ARE', 'CLOSELY', 'RELATED'] +6938-70848-0022-1238: hyp=['THE', 'FIRST', 'STAGE', 'WAS', 'A', 'CRUSHING', 'OF', 'AUTOCRACY', 'AND', 'THE', 'CRASHING', 'OF', 'THE', 'POWER', 'OF', 'THE', 'INDUSTRIAL', 'CAPITALIST', 'AND', 'THE', 'LANDOWNERS', 'WHOSE', 'INTEREST', 'OUR', 'CLOSELY', 'RELATED'] +6938-70848-0023-1239: ref=['THE', 'DUMAS', 'AND', 'ZEMSTVOS', 'WERE', 'DROPPED'] +6938-70848-0023-1239: hyp=['DID', 'YOU', 'ME', 'SEND', 'THEMSELVES', 'WERE', 'DROPPED'] +6938-70848-0024-1240: ref=['HE', 'KNEW', 'THAT', 'AN', 'AGREEMENT', 'WITH', 'THE', 'BOLSHEVIKI', 'WAS', 'BEING', 'DISCUSSED', 'BUT', 'HE', 'DID', 'NOT', 'KNOW', 'THAT', 'IT', 'HAD', 'BEEN', 'CONCLUDED'] +6938-70848-0024-1240: hyp=['HE', 'KNEW', 'THAT', 'AN', 'AGREEMENT', 'WITH', 'THE', 'BOLSHEVIKI', 'WAS', 'BEING', 'DISCUSSED', 'BUT', 'HE', 'DID', 'NOT', 'KNOW', 'THAT', 'IT', 'HAD', 'BEEN', 'CONCLUDED'] +6938-70848-0025-1241: ref=['HE', 'SPOKE', 'TO', 'THE', 'RUMP', 'CONVENTION'] +6938-70848-0025-1241: hyp=['HE', 'SPOKE', 'TO', 'THE', 'WRONG', 'CONVENTION'] +6938-70848-0026-1242: ref=['THE', 'VILLAGES', 'WILL', 'SAVE', 'US', 'IN', 'THE', 'END'] +6938-70848-0026-1242: hyp=['THE', 'RELIGIOUS', 'WILL', 'SAVE', 'US', 'IN', 'THE', 'END'] +6938-70848-0027-1243: ref=['BUT', 'THE', 'PRESENT', 'MOVEMENT', 'IS', 'INTERNATIONAL', 'AND', 'THAT', 'IS', 'WHY', 'IT', 'IS', 'INVINCIBLE'] +6938-70848-0027-1243: hyp=['BUT', 'THE', 'PRESENT', 'MOMENT', 'IS', 'INTERNATIONAL', 'AND', 'THAT', 'IS', 'WHY', 'IT', 'IS', 'INVINCIBLE'] +6938-70848-0028-1244: ref=['THE', 'WILL', 'OF', 'MILLIONS', 'OF', 'WORKERS', 'IS', 'NOW', 'CONCENTRATED', 'IN', 'THIS', 'HALL'] +6938-70848-0028-1244: hyp=['THE', 'WIDOW', 'OF', 'MILLIONS', 'OF', 'WORKERS', 'IS', 'SO', 'CONCENTRATED', 'IN', 'THE', 'HALL'] +6938-70848-0029-1245: ref=['A', 'NEW', 'HUMANITY', 'WILL', 'BE', 'BORN', 'OF', 'THIS', 'WAR'] +6938-70848-0029-1245: hyp=['A', 'NEW', 'HUMANITY', 'WILL', 'BE', 'BORN', 'OF', 'THIS', 'WAR'] +6938-70848-0030-1246: ref=['I', 'GREET', 'YOU', 'WITH', 'THE', 'CHRISTENING', 'OF', 'A', 'NEW', 'RUSSIAN', 'LIFE', 'AND', 'FREEDOM'] +6938-70848-0030-1246: hyp=['I', 'GREET', 'YOU', 'WITH', 'THE', 'CHRISTIAN', 'OF', 'A', 'NEW', 'RUSSIAN', 'LIFE', 'AND', 'FREEDOM'] +7018-75788-0000-135: ref=['THEN', 'I', 'TOOK', 'UP', 'A', 'GREAT', 'STONE', 'FROM', 'AMONG', 'THE', 'TREES', 'AND', 'COMING', 'UP', 'TO', 'HIM', 'SMOTE', 'HIM', 'THEREWITH', 'ON', 'THE', 'HEAD', 'WITH', 'ALL', 'MY', 'MIGHT', 'AND', 'CRUSHED', 'IN', 'HIS', 'SKULL', 'AS', 'HE', 'LAY', 'DEAD', 'DRUNK'] +7018-75788-0000-135: hyp=['THEN', 'I', 'TOOK', 'UP', 'A', 'GREAT', 'STONE', 'FROM', 'AMONG', 'THE', 'TREES', 'AND', 'COMING', 'UP', 'TO', 'HIM', 'SMOTE', 'HIM', 'THEREWITH', 'ON', 'THE', 'HEAD', 'WITH', 'ALL', 'MY', 'MIGHT', 'AND', 'CRUSHED', 'IN', 'HIS', 'SKULL', 'AS', 'HE', 'LAY', 'DEAD', 'DRUNK'] +7018-75788-0001-136: ref=['BEHOLD', 'A', 'SHIP', 'WAS', 'MAKING', 'FOR', 'THE', 'ISLAND', 'THROUGH', 'THE', 'DASHING', 'SEA', 'AND', 'CLASHING', 'WAVES'] +7018-75788-0001-136: hyp=['BEHOLD', 'A', 'SHIP', 'WAS', 'MAKING', 'FOR', 'THE', 'ISLAND', 'THROUGH', 'THE', 'DASHING', 'SEA', 'AND', 'CLASHING', 'WAVES'] +7018-75788-0002-137: ref=['HEARING', 'THIS', 'I', 'WAS', 'SORE', 'TROUBLED', 'REMEMBERING', 'WHAT', 'I', 'HAD', 'BEFORE', 'SUFFERED', 'FROM', 'THE', 'APE', 'KIND'] +7018-75788-0002-137: hyp=['HEARING', 'THIS', 'I', 'WAS', 'SORE', 'TROUBLED', 'REMEMBERING', 'WHAT', 'I', 'HAD', 'BEFORE', 'SUFFERED', 'FROM', 'THE', 'APE', 'KIND'] +7018-75788-0003-138: ref=['UPON', 'THIS', 'HE', 'BROUGHT', 'ME', 'A', 'COTTON', 'BAG', 'AND', 'GIVING', 'IT', 'TO', 'ME', 'SAID', 'TAKE', 'THIS', 'BAG', 'AND', 'FILL', 'IT', 'WITH', 'PEBBLES', 'FROM', 'THE', 'BEACH', 'AND', 'GO', 'FORTH', 'WITH', 'A', 'COMPANY', 'OF', 'THE', 'TOWNSFOLK', 'TO', 'WHOM', 'I', 'WILL', 'GIVE', 'A', 'CHARGE', 'RESPECTING', 'THEE'] +7018-75788-0003-138: hyp=['UPON', 'THIS', 'HE', 'BROUGHT', 'ME', 'A', 'COTTON', 'BAG', 'AND', 'GIVEN', 'IT', 'TO', 'ME', 'SAID', 'TAKE', 'THIS', 'BAG', 'AND', 'FILL', 'IT', 'WITH', 'PEBBLES', 'FROM', 'THE', 'BEACH', 'AND', 'GO', 'FORTH', 'WITH', 'A', 'COMPANY', 'OF', 'THE', 'TOWNSFOLK', 'TO', 'WHOM', 'I', 'WILL', 'GIVE', 'A', 'CHARGE', 'RESPECTING', 'THEE'] +7018-75788-0004-139: ref=['DO', 'AS', 'THEY', 'DO', 'AND', 'BELIKE', 'THOU', 'SHALT', 'GAIN', 'WHAT', 'MAY', 'FURTHER', 'THY', 'RETURN', 'VOYAGE', 'TO', 'THY', 'NATIVE', 'LAND'] +7018-75788-0004-139: hyp=['DO', 'AS', 'THEY', 'DO', 'AND', 'BE', 'LIKE', 'THOU', 'SHALT', 'GAIN', 'WHAT', 'MAY', 'FURTHER', 'THY', 'RETURN', 'VOYAGE', 'TO', 'THY', 'NATIVE', 'LAND'] +7018-75788-0005-140: ref=['THEN', 'HE', 'CARRIED', 'ME', 'TO', 'THE', 'BEACH', 'WHERE', 'I', 'FILLED', 'MY', 'BAG', 'WITH', 'PEBBLES', 'LARGE', 'AND', 'SMALL', 'AND', 'PRESENTLY', 'WE', 'SAW', 'A', 'COMPANY', 'OF', 'FOLK', 'ISSUE', 'FROM', 'THE', 'TOWN', 'EACH', 'BEARING', 'A', 'BAG', 'LIKE', 'MINE', 'FILLED', 'WITH', 'PEBBLES'] +7018-75788-0005-140: hyp=['THEN', 'HE', 'CARRIED', 'ME', 'TO', 'THE', 'BEACH', 'WHERE', 'I', 'FILLED', 'MY', 'BAG', 'WITH', 'PEBBLES', 'LARGE', 'AND', 'SMALL', 'AND', 'PRESENTLY', 'WE', 'SAW', 'A', 'COMPANY', 'OF', 'FOLK', 'ISSUE', 'FROM', 'THE', 'TOWN', 'EACH', 'BEARING', 'A', 'BAG', 'LIKE', 'MINE', 'FILLED', 'WITH', 'PEBBLES'] +7018-75788-0006-141: ref=['TO', 'THESE', 'HE', 'COMMITTED', 'ME', 'COMMENDING', 'ME', 'TO', 'THEIR', 'CARE', 'AND', 'SAYING', 'THIS', 'MAN', 'IS', 'A', 'STRANGER', 'SO', 'TAKE', 'HIM', 'WITH', 'YOU', 'AND', 'TEACH', 'HIM', 'HOW', 'TO', 'GATHER', 'THAT', 'HE', 'MAY', 'GET', 'HIS', 'DAILY', 'BREAD', 'AND', 'YOU', 'WILL', 'EARN', 'YOUR', 'REWARD', 'AND', 'RECOMPENSE', 'IN', 'HEAVEN'] +7018-75788-0006-141: hyp=['TO', 'THESE', 'HE', 'COMMITTED', 'ME', 'COMMENDING', 'ME', 'TO', 'THEIR', 'CARE', 'AND', 'SAYING', 'THIS', 'MAN', 'IS', 'A', 'STRANGER', 'SO', 'TAKE', 'HIM', 'WITH', 'YOU', 'AND', 'TEACH', 'HIM', 'HOW', 'TO', 'GATHER', 'THAT', 'HE', 'MAY', 'GET', 'HIS', 'DAILY', 'BREAD', 'AND', 'YOU', 'WILL', 'EARN', 'YOUR', 'REWARD', 'AND', 'RECOMPENSE', 'IN', 'HEAVEN'] +7018-75788-0007-142: ref=['NOW', 'SLEEPING', 'UNDER', 'THESE', 'TREES', 'WERE', 'MANY', 'APES', 'WHICH', 'WHEN', 'THEY', 'SAW', 'US', 'ROSE', 'AND', 'FLED', 'FROM', 'US', 'AND', 'SWARMED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'WHEREUPON', 'MY', 'COMPANIONS', 'BEGAN', 'TO', 'PELT', 'THEM', 'WITH', 'WHAT', 'THEY', 'HAD', 'IN', 'THEIR', 'BAGS', 'AND', 'THE', 'APES', 'FELL', 'TO', 'PLUCKING', 'OF', 'THE', 'FRUIT', 'OF', 'THE', 'TREES', 'AND', 'CASTING', 'THEM', 'AT', 'THE', 'FOLK'] +7018-75788-0007-142: hyp=['NOW', 'SLEEPING', 'UNDER', 'THESE', 'TREES', 'WERE', 'MANY', 'IPES', 'WHICH', 'WHEN', 'THEY', 'SAW', 'US', 'ROSE', 'AND', 'FLED', 'FROM', 'US', 'AND', 'SWARMED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'WHEREUPON', 'MY', 'COMPANIONS', 'BEGAN', 'TO', 'PELT', 'THEM', 'WITH', 'WHAT', 'THEY', 'HAD', 'IN', 'THEIR', 'BAGS', 'AND', 'THE', 'APES', 'FELL', 'TO', 'PLUCKING', 'OF', 'THE', 'FRUIT', 'OF', 'THE', 'TREES', 'AND', 'CASTING', 'THEM', 'AT', 'THE', 'FOLK'] +7018-75788-0008-143: ref=['WE', 'WEIGHED', 'ANCHOR', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'SAYING', 'HER', 'PERMITTED', 'SAY'] +7018-75788-0008-143: hyp=['WE', 'WADE', 'ANCHOR', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'SAYING', 'HER', 'PERMITTED', 'SAY'] +7018-75788-0009-144: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'FIFTY', 'NINTH', 'NIGHT'] +7018-75788-0009-144: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'FIFTY', 'NINTH', 'NIGHT'] +7018-75788-0010-145: ref=['AND', 'CEASED', 'NOT', 'SAILING', 'TILL', 'WE', 'ARRIVED', 'SAFELY', 'AT', 'BASSORAH'] +7018-75788-0010-145: hyp=['AND', 'CEASED', 'NOT', 'SAILING', 'TILL', 'WE', 'ARRIVED', 'SAFELY', 'AT', 'PUSSARA'] +7018-75788-0011-146: ref=['THERE', 'I', 'ABODE', 'A', 'LITTLE', 'AND', 'THEN', 'WENT', 'ON', 'TO', 'BAGHDAD', 'WHERE', 'I', 'ENTERED', 'MY', 'QUARTER', 'AND', 'FOUND', 'MY', 'HOUSE', 'AND', 'FOREGATHERED', 'WITH', 'MY', 'FAMILY', 'AND', 'SALUTED', 'MY', 'FRIENDS', 'WHO', 'GAVE', 'ME', 'JOY', 'OF', 'MY', 'SAFE', 'RETURN', 'AND', 'I', 'LAID', 'UP', 'ALL', 'MY', 'GOODS', 'AND', 'VALUABLES', 'IN', 'MY', 'STOREHOUSES'] +7018-75788-0011-146: hyp=['THERE', 'I', 'ABODE', 'A', 'LITTLE', 'AND', 'THEN', 'WENT', 'ON', 'TO', 'BAGDAD', 'WHERE', 'I', 'ENTERED', 'MY', 'QUARTER', 'AND', 'FOUND', 'MY', 'HOUSE', 'AND', 'FOR', 'GATHERED', 'WITH', 'MY', 'FAMILY', 'AND', 'SALUTED', 'MY', 'FRIENDS', 'WHO', 'GAVE', 'ME', 'JOY', 'OF', 'MY', 'SAFE', 'RETURN', 'AND', 'I', 'LAID', 'UP', 'ALL', 'MY', 'GOODS', 'AND', 'VALUABLES', 'IN', 'MY', 'STOREHOUSES'] +7018-75788-0012-147: ref=['AFTER', 'WHICH', 'I', 'RETURNED', 'TO', 'MY', 'OLD', 'MERRY', 'WAY', 'OF', 'LIFE', 'AND', 'FORGOT', 'ALL', 'I', 'HAD', 'SUFFERED', 'IN', 'THE', 'GREAT', 'PROFIT', 'AND', 'GAIN', 'I', 'HAD', 'MADE'] +7018-75788-0012-147: hyp=['AFTER', 'WHICH', 'I', 'RETURNED', 'TO', 'MY', 'OLD', 'MERRY', 'WAY', 'OF', 'LIFE', 'AND', 'FORGOT', 'ALL', 'I', 'HAD', 'SUFFERED', 'IN', 'THE', 'GREAT', 'PROFIT', 'AND', 'GAIN', 'I', 'HAD', 'MADE'] +7018-75788-0013-148: ref=['NEXT', 'MORNING', 'AS', 'SOON', 'AS', 'IT', 'WAS', 'LIGHT', 'HE', 'PRAYED', 'THE', 'DAWN', 'PRAYER', 'AND', 'AFTER', 'BLESSING', 'MOHAMMED', 'THE', 'CREAM', 'OF', 'ALL', 'CREATURES', 'BETOOK', 'HIMSELF', 'TO', 'THE', 'HOUSE', 'OF', 'SINDBAD', 'THE', 'SEAMAN', 'AND', 'WISHED', 'HIM', 'A', 'GOOD', 'DAY'] +7018-75788-0013-148: hyp=['NEXT', 'MORNING', 'AS', 'SOON', 'AS', 'IT', 'WAS', 'LIGHT', 'HE', 'PRAYED', 'THE', 'DAWN', 'PRAYER', 'AND', 'AFTER', 'BLESSING', 'MOHAMMED', 'THE', 'CREAM', 'OF', 'ALL', 'CREATURES', 'BETOOK', 'HIMSELF', 'TO', 'THE', 'HOUSE', 'OF', 'SINBAD', 'THE', 'SEAMAN', 'AND', 'WISHED', 'HIM', 'A', 'GOOD', 'DAY'] +7018-75788-0014-149: ref=['HERE', 'I', 'FOUND', 'A', 'GREAT', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'FULL', 'OF', 'MERCHANTS', 'AND', 'NOTABLES', 'WHO', 'HAD', 'WITH', 'THEM', 'GOODS', 'OF', 'PRICE', 'SO', 'I', 'EMBARKED', 'MY', 'BALES', 'THEREIN'] +7018-75788-0014-149: hyp=['HERE', 'I', 'FOUND', 'A', 'GREAT', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'FULL', 'OF', 'MERCHANTS', 'AND', 'NOTABLES', 'WHO', 'HAD', 'WITH', 'THEM', 'GOODS', 'OF', 'PRICE', 'SO', 'I', 'EMBARKED', 'MY', 'BALES', 'THEREIN'] +7018-75788-0015-150: ref=['HAPLY', 'AMONGST', 'YOU', 'IS', 'ONE', 'RIGHTEOUS', 'WHOSE', 'PRAYERS', 'THE', 'LORD', 'WILL', 'ACCEPT'] +7018-75788-0015-150: hyp=['HAPPILY', 'AMONGST', 'YOU', 'IS', 'ONE', 'RIGHTEOUS', 'WHOSE', 'PRAYERS', 'THE', 'LORD', 'WILL', 'ACCEPT'] +7018-75788-0016-151: ref=['PRESENTLY', 'THE', 'SHIP', 'STRUCK', 'THE', 'MOUNTAIN', 'AND', 'BROKE', 'UP', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'OF', 'HER', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75788-0016-151: hyp=['PRESENTLY', 'THE', 'SHIP', 'STRUCK', 'THE', 'MOUNTAIN', 'AND', 'BROKE', 'UP', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'OF', 'HER', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75788-0017-152: ref=['BUT', 'IT', 'BURNETH', 'IN', 'THEIR', 'BELLIES', 'SO', 'THEY', 'CAST', 'IT', 'UP', 'AGAIN', 'AND', 'IT', 'CONGEALETH', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'WATER', 'WHEREBY', 'ITS', 'COLOR', 'AND', 'QUANTITIES', 'ARE', 'CHANGED', 'AND', 'AT', 'LAST', 'THE', 'WAVES', 'CAST', 'IT', 'ASHORE', 'AND', 'THE', 'TRAVELLERS', 'AND', 'MERCHANTS', 'WHO', 'KNOW', 'IT', 'COLLECT', 'IT', 'AND', 'SELL', 'IT'] +7018-75788-0017-152: hyp=['BUT', 'AT', 'BERNETH', 'IN', 'THEIR', 'BELLIES', 'SO', 'THEY', 'CAST', 'IT', 'UP', 'AGAIN', 'AND', 'IT', 'CONGEALETH', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'WATER', 'WHEREBY', 'ITS', 'COLOR', 'AND', 'QUANTITIES', 'ARE', 'CHANGED', 'AND', 'AT', 'LAST', 'THE', 'WAVES', 'CAST', 'IT', 'ASHORE', 'AND', 'THE', 'TRAVELLERS', 'AND', 'MERCHANTS', 'WHO', 'KNOW', 'IT', 'COLLECTED', 'AND', 'SELL', 'IT'] +7018-75788-0018-153: ref=['EACH', 'THAT', 'DIED', 'WE', 'WASHED', 'AND', 'SHROUDED', 'IN', 'SOME', 'OF', 'THE', 'CLOTHES', 'AND', 'LINEN', 'CAST', 'ASHORE', 'BY', 'THE', 'TIDES', 'AND', 'AFTER', 'A', 'LITTLE', 'THE', 'REST', 'OF', 'MY', 'FELLOWS', 'PERISHED', 'ONE', 'BY', 'ONE', 'TILL', 'I', 'HAD', 'BURIED', 'THE', 'LAST', 'OF', 'THE', 'PARTY', 'AND', 'ABODE', 'ALONE', 'ON', 'THE', 'ISLAND', 'WITH', 'BUT', 'A', 'LITTLE', 'PROVISION', 'LEFT', 'I', 'WHO', 'WAS', 'WONT', 'TO', 'HAVE', 'SO', 'MUCH'] +7018-75788-0018-153: hyp=['EACH', 'THAT', 'DIED', 'WE', 'WASHED', 'AND', 'SHROUDED', 'IN', 'SOME', 'OF', 'THE', 'CLOTHES', 'AND', 'LINEN', 'CAST', 'ASHORE', 'BY', 'THE', 'TIDES', 'AND', 'AFTER', 'A', 'LITTLE', 'THE', 'REST', 'OF', 'MY', 'FELLOWS', 'PERISHED', 'ONE', 'BY', 'ONE', 'TILL', 'I', 'HAD', 'BURIED', 'THE', 'LAST', 'OF', 'THE', 'PARTY', 'AND', 'A', 'BOAT', 'ALONE', 'ON', 'THE', 'ISLAND', 'WITH', 'BUT', 'A', 'LITTLE', 'PROVISION', 'LEFT', 'I', 'WHO', 'WAS', 'WONT', 'TO', 'HAVE', 'SO', 'MUCH'] +7018-75788-0019-154: ref=['BUT', 'THERE', 'IS', 'MAJESTY', 'AND', 'THERE', 'IS', 'NO', 'MIGHT', 'SAVE', 'IN', 'ALLAH', 'THE', 'GLORIOUS', 'THE', 'GREAT'] +7018-75788-0019-154: hyp=['BUT', 'THERE', 'IS', 'MAJESTY', 'AND', 'THERE', 'IS', 'NO', 'MIGHT', 'SAVE', 'IN', 'ALLAH', 'THE', 'GLORIOUS', 'THE', 'GREAT'] +7018-75789-0000-155: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'FIRST', 'NIGHT'] +7018-75789-0000-155: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'FIRST', 'NIGHT'] +7018-75789-0001-156: ref=['THEN', 'SIGHING', 'FOR', 'MYSELF', 'I', 'SET', 'TO', 'WORK', 'COLLECTING', 'A', 'NUMBER', 'OF', 'PIECES', 'OF', 'CHINESE', 'AND', 'COMORIN', 'ALOES', 'WOOD', 'AND', 'I', 'BOUND', 'THEM', 'TOGETHER', 'WITH', 'ROPES', 'FROM', 'THE', 'WRECKAGE', 'THEN', 'I', 'CHOSE', 'OUT', 'FROM', 'THE', 'BROKEN', 'UP', 'SHIPS', 'STRAIGHT', 'PLANKS', 'OF', 'EVEN', 'SIZE', 'AND', 'FIXED', 'THEM', 'FIRMLY', 'UPON', 'THE', 'ALOES', 'WOOD', 'MAKING', 'ME', 'A', 'BOAT', 'RAFT', 'A', 'LITTLE', 'NARROWER', 'THAN', 'THE', 'CHANNEL', 'OF', 'THE', 'STREAM', 'AND', 'I', 'TIED', 'IT', 'TIGHTLY', 'AND', 'FIRMLY', 'AS', 'THOUGH', 'IT', 'WERE', 'NAILED'] +7018-75789-0001-156: hyp=['THEN', 'SIGNED', 'FOR', 'MYSELF', 'I', 'SET', 'TO', 'WORK', 'COLLECTING', 'A', 'NUMBER', 'OF', 'PIECES', 'OF', 'CHINESE', 'AND', 'CORMORRA', 'AND', 'ALLIES', 'WOOD', 'AND', 'I', 'BOUND', 'THEM', 'TOGETHER', 'WITH', 'ROPES', 'FROM', 'THE', 'WRECKAGE', 'THEN', 'I', 'CHOSE', 'OUT', 'FROM', 'THE', 'BROKEN', 'UP', 'SHIP', 'STRAIGHT', 'PLANKS', 'OF', 'EVEN', 'SIZE', 'AND', 'FIXED', 'THEM', 'FIRMLY', 'UPON', 'THE', 'ALLIES', 'WOOD', 'MAKING', 'ME', 'A', 'BOAT', 'RAFT', 'A', 'LITTLE', 'NARROWER', 'THAN', 'THE', 'CHANNEL', 'OF', 'THE', 'STREAM', 'AND', 'I', 'TIED', 'IT', 'TIGHTLY', 'AND', 'FIRMLY', 'AS', 'THOUGH', 'IT', 'WERE', 'NAILED'] +7018-75789-0002-157: ref=['LAND', 'AFTER', 'LAND', 'SHALT', 'THOU', 'SEEK', 'AND', 'FIND', 'BUT', 'NO', 'OTHER', 'LIFE', 'ON', 'THY', 'WISH', 'SHALL', 'WAIT', 'FRET', 'NOT', 'THY', 'SOUL', 'IN', 'THY', 'THOUGHTS', 'O', 'NIGHT', 'ALL', 'WOES', 'SHALL', 'END', 'OR', 'SOONER', 'OR', 'LATE'] +7018-75789-0002-157: hyp=['LAND', 'AFTER', 'LAND', 'SHALT', 'THOU', 'SEE', 'CONFINED', 'BUT', 'NO', 'OTHER', 'LIFE', 'ON', 'THY', 'WISH', 'SHALL', 'WAIT', 'FRET', 'NOT', 'THY', 'SOUL', 'IN', 'THY', 'THOUGHTS', 'A', 'NIGHT', 'OR', 'WOES', 'SHALL', 'END', 'OR', 'SOONER', 'OR', 'LATE'] +7018-75789-0003-158: ref=['I', 'ROWED', 'MY', 'CONVEYANCE', 'INTO', 'THE', 'PLACE', 'WHICH', 'WAS', 'INTENSELY', 'DARK', 'AND', 'THE', 'CURRENT', 'CARRIED', 'THE', 'RAFT', 'WITH', 'IT', 'DOWN', 'THE', 'UNDERGROUND', 'CHANNEL'] +7018-75789-0003-158: hyp=['I', 'RIDE', 'MY', 'CONVEYANCE', 'INTO', 'THE', 'PLACE', 'WHICH', 'WAS', 'INTENSELY', 'DARK', 'AND', 'THE', 'CURRENT', 'CARRIED', 'ME', 'THE', 'RAFT', 'WITH', 'IT', 'DOWN', 'THE', 'UNDERGROUND', 'CHANNEL'] +7018-75789-0004-159: ref=['AND', 'I', 'THREW', 'MYSELF', 'DOWN', 'UPON', 'MY', 'FACE', 'ON', 'THE', 'RAFT', 'BY', 'REASON', 'OF', 'THE', 'NARROWNESS', 'OF', 'THE', 'CHANNEL', 'WHILST', 'THE', 'STREAM', 'CEASED', 'NOT', 'TO', 'CARRY', 'ME', 'ALONG', 'KNOWING', 'NOT', 'NIGHT', 'FROM', 'DAY', 'FOR', 'THE', 'EXCESS', 'OF', 'THE', 'GLOOM', 'WHICH', 'ENCOMPASSED', 'ME', 'ABOUT', 'AND', 'MY', 'TERROR', 'AND', 'CONCERN', 'FOR', 'MYSELF', 'LEST', 'I', 'SHOULD', 'PERISH'] +7018-75789-0004-159: hyp=['AND', 'I', 'THREW', 'MYSELF', 'DOWN', 'UPON', 'MY', 'FACE', 'ON', 'THE', 'RAFT', 'BY', 'REASON', 'OF', 'THE', 'NARROWNESS', 'OF', 'THE', 'CHANNEL', 'WHILST', 'THE', 'STREAM', 'CEASED', 'NOT', 'TO', 'CARRY', 'ME', 'ALONG', 'KNOWING', 'NOT', 'NIGHT', 'FROM', 'DAY', 'FOR', 'THE', 'EXCESS', 'OF', 'THE', 'GLOOM', 'WHICH', 'ENCOMPASSED', 'ME', 'ABOUT', 'IN', 'MY', 'TERROR', 'AND', 'CONCERN', 'FOR', 'MYSELF', 'LEST', 'I', 'SHOULD', 'PERISH'] +7018-75789-0005-160: ref=['WHEN', 'I', 'AWOKE', 'AT', 'LAST', 'I', 'FOUND', 'MYSELF', 'IN', 'THE', 'LIGHT', 'OF', 'HEAVEN', 'AND', 'OPENING', 'MY', 'EYES', 'I', 'SAW', 'MYSELF', 'IN', 'A', 'BROAD', 'STREAM', 'AND', 'THE', 'RAFT', 'MOORED', 'TO', 'AN', 'ISLAND', 'IN', 'THE', 'MIDST', 'OF', 'A', 'NUMBER', 'OF', 'INDIANS', 'AND', 'ABYSSINIANS'] +7018-75789-0005-160: hyp=['WHEN', 'I', 'AWOKE', 'AT', 'LAST', 'I', 'FOUND', 'MYSELF', 'IN', 'THE', 'LIGHT', 'OF', 'HEAVEN', 'AND', 'OPENING', 'MY', 'EYES', 'I', 'SAW', 'MYSELF', 'IN', 'A', 'BROAD', 'STREAM', 'AND', 'THE', 'RAFT', 'MOORED', 'TO', 'AN', 'ISLAND', 'IN', 'THE', 'MIDST', 'OF', 'A', 'NUMBER', 'OF', 'INDIANS', 'AND', 'ABYSSINIANS'] +7018-75789-0006-161: ref=['BUT', 'I', 'WAS', 'DELIGHTED', 'AT', 'MY', 'ESCAPE', 'FROM', 'THE', 'RIVER'] +7018-75789-0006-161: hyp=['BUT', 'I', 'WAS', 'DELIGHTED', 'AT', 'MY', 'ESCAPE', 'FROM', 'THE', 'RIVER'] +7018-75789-0007-162: ref=['WHEN', 'THEY', 'SAW', 'I', 'UNDERSTOOD', 'THEM', 'NOT', 'AND', 'MADE', 'THEM', 'NO', 'ANSWER', 'ONE', 'OF', 'THEM', 'CAME', 'FORWARD', 'AND', 'SAID', 'TO', 'ME', 'IN', 'ARABIC', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'BROTHER'] +7018-75789-0007-162: hyp=['WHEN', 'THEY', 'SAW', 'I', 'UNDERSTOOD', 'THEM', 'NOT', 'AND', 'MADE', 'THEM', 'NO', 'ANSWER', 'ONE', 'OF', 'THEM', 'CAME', 'FORWARD', 'AND', 'SAID', 'TO', 'ME', 'IN', 'ARABIC', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'BROTHER'] +7018-75789-0008-163: ref=['O', 'MY', 'BROTHER', 'ANSWERED', 'HE', 'WE', 'ARE', 'HUSBANDMEN', 'AND', 'TILLERS', 'OF', 'THE', 'SOIL', 'WHO', 'CAME', 'OUT', 'TO', 'WATER', 'OUR', 'FIELDS', 'AND', 'PLANTATIONS', 'AND', 'FINDING', 'THEE', 'ASLEEP', 'ON', 'THIS', 'RAFT', 'LAID', 'HOLD', 'OF', 'IT', 'AND', 'MADE', 'IT', 'FAST', 'BY', 'US', 'AGAINST', 'THOU', 'SHOULDST', 'AWAKE', 'AT', 'THY', 'LEISURE'] +7018-75789-0008-163: hyp=['O', 'MY', 'BROTHER', 'ANSWERED', 'HE', 'WE', 'ARE', 'HUSBANDMEN', 'AND', 'TELLERS', 'OF', 'THE', 'SOIL', 'WHO', 'CAME', 'OUT', 'TO', 'WATER', 'OUR', 'FIELDS', 'AND', 'PLANTATIONS', 'AND', 'FINDING', 'THEE', 'ASLEEP', 'ON', 'THIS', 'RAFT', 'LAID', 'HOLD', 'OF', 'IT', 'AND', 'MADE', 'IT', 'FAST', 'BY', 'US', 'AGAINST', 'THOU', 'SHOULDEST', 'AWAKE', 'AT', 'THY', 'LEISURE'] +7018-75789-0009-164: ref=['I', 'ANSWERED', 'FOR', "ALLAH'S", 'SAKE', 'O', 'MY', 'LORD', 'ERE', 'I', 'SPEAK', 'GIVE', 'ME', 'SOMEWHAT', 'TO', 'EAT', 'FOR', 'I', 'AM', 'STARVING', 'AND', 'AFTER', 'ASK', 'ME', 'WHAT', 'THOU', 'WILT'] +7018-75789-0009-164: hyp=['I', 'ANSWERED', 'FOR', "ALLAH'S", 'SAKE', 'AM', 'MY', 'LORD', 'ERE', 'I', 'SPEAK', 'GIVE', 'ME', 'SOMEWHAT', 'TO', 'EAT', 'FOR', 'I', 'AM', 'STARVING', 'AND', 'AFTER', 'ASK', 'ME', 'WHAT', 'THOU', 'WILT'] +7018-75789-0010-165: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'SECOND', 'NIGHT'] +7018-75789-0010-165: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'SECOND', 'NIGHT'] +7018-75789-0011-166: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'SINDBAD', 'THE', 'SEAMAN', 'CONTINUED', 'WHEN', 'I', 'LANDED', 'AND', 'FOUND', 'MYSELF', 'AMONGST', 'THE', 'INDIANS', 'AND', 'ABYSSINIANS', 'AND', 'HAD', 'TAKEN', 'SOME', 'REST', 'THEY', 'CONSULTED', 'AMONG', 'THEMSELVES', 'AND', 'SAID', 'TO', 'ONE', 'ANOTHER', 'THERE', 'IS', 'NO', 'HELP', 'FOR', 'IT', 'BUT', 'WE', 'CARRY', 'HIM', 'WITH', 'US', 'AND', 'PRESENT', 'HIM', 'TO', 'OUR', 'KING', 'THAT', 'HE', 'MAY', 'ACQUAINT', 'HIM', 'WITH', 'HIS', 'ADVENTURES'] +7018-75789-0011-166: hyp=['SHE', 'SAID', 'IT', 'HATH', 'RAGED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'SINBAD', 'THE', 'SEAMAN', 'CONTINUED', 'WHEN', 'I', 'LANDED', 'AND', 'FOUND', 'MYSELF', 'AMONGST', 'THE', 'INDIANS', 'AND', 'ABYSSINIANS', 'AND', 'HAD', 'TAKEN', 'SOME', 'REST', 'THEY', 'CONSULTED', 'AMONG', 'THEMSELVES', 'AND', 'SAID', 'TO', 'ONE', 'ANOTHER', 'THERE', 'IS', 'NO', 'HELP', 'FOR', 'IT', 'BUT', 'WE', 'CARRY', 'HIM', 'WITH', 'US', 'AND', 'PRESENT', 'HIM', 'TO', 'OUR', 'KING', 'THAT', 'HE', 'MAY', 'ACQUAINT', 'HIM', 'WITH', 'HIS', 'ADVENTURES'] +7018-75789-0012-167: ref=['SO', 'I', 'CONSORTED', 'WITH', 'THE', 'CHIEF', 'OF', 'THE', 'ISLANDERS', 'AND', 'THEY', 'PAID', 'ME', 'THE', 'UTMOST', 'RESPECT'] +7018-75789-0012-167: hyp=['SO', 'I', 'CONSORTED', 'WITH', 'THE', 'CHIEF', 'OF', 'THE', 'ISLANDERS', 'AND', 'THEY', 'PAID', 'ME', 'THE', 'UTMOST', 'RESPECT'] +7018-75789-0013-168: ref=['SO', 'I', 'ROSE', 'WITHOUT', 'STAY', 'OR', 'DELAY', 'AND', 'KISSED', 'THE', "KING'S", 'HAND', 'AND', 'ACQUAINTED', 'HIM', 'WITH', 'MY', 'LONGING', 'TO', 'SET', 'OUT', 'WITH', 'THE', 'MERCHANTS', 'FOR', 'THAT', 'I', 'PINED', 'AFTER', 'MY', 'PEOPLE', 'AND', 'MINE', 'OWN', 'LAND'] +7018-75789-0013-168: hyp=['SO', 'I', 'ROSE', 'WITHOUT', 'STAY', 'OR', 'DELAY', 'AND', 'KISSED', 'THE', "KING'S", 'HAND', 'AND', 'ACQUAINTED', 'HIM', 'WITH', 'MY', 'LONGING', 'TO', 'SET', 'OUT', 'WITH', 'THE', 'MERCHANTS', 'FOR', 'THAT', 'I', 'PINED', 'AFTER', 'MY', 'PEOPLE', 'AND', 'MINE', 'OWN', 'LAND'] +7018-75789-0014-169: ref=['QUOTH', 'HE', 'THOU', 'ART', 'THINE', 'OWN', 'MASTER', 'YET', 'IF', 'IT', 'BE', 'THY', 'WILL', 'TO', 'ABIDE', 'WITH', 'US', 'ON', 'OUR', 'HEAD', 'AND', 'EYES', 'BE', 'IT', 'FOR', 'THOU', 'GLADDENEST', 'US', 'WITH', 'THY', 'COMPANY'] +7018-75789-0014-169: hyp=['QUOTH', 'HE', 'THOU', 'ART', 'THINE', 'OWN', 'MASTER', 'YET', 'IF', 'IT', 'BE', 'THY', 'WILL', 'TO', 'ABIDE', 'WITH', 'US', 'HONOUR', 'HEAD', 'AND', 'EYES', 'BE', 'IT', 'FOR', 'THOU', 'GLADDENEST', 'US', 'WITH', 'THY', 'COMPANY'] +7018-75789-0015-170: ref=['BY', 'ALLAH', 'O', 'MY', 'LORD', 'ANSWERED', 'I', 'THOU', 'HAST', 'INDEED', 'OVERWHELMED', 'ME', 'WITH', 'THY', 'FAVOURS', 'AND', 'WELL', 'DOINGS', 'BUT', 'I', 'WEARY', 'FOR', 'A', 'SIGHT', 'OF', 'MY', 'FRIENDS', 'AND', 'FAMILY', 'AND', 'NATIVE', 'COUNTRY'] +7018-75789-0015-170: hyp=['BY', 'ALLAH', 'O', 'MY', 'LORD', 'ANSWERED', 'I', 'THOU', 'HAST', 'INDEED', 'OVERWHELMED', 'ME', 'WITH', 'THY', 'FAVOURS', 'AND', 'WELL', 'DOINGS', 'BUT', 'I', 'WEARY', 'FOR', 'A', 'SIGHT', 'OF', 'MY', 'FRIENDS', 'AND', 'FAMILY', 'AND', 'NATIVE', 'COUNTRY'] +7018-75789-0016-171: ref=['THEN', 'I', 'TOOK', 'LEAVE', 'OF', 'HIM', 'AND', 'OF', 'ALL', 'MY', 'INTIMATES', 'AND', 'ACQUAINTANCES', 'IN', 'THE', 'ISLAND', 'AND', 'EMBARKED', 'WITH', 'THE', 'MERCHANTS', 'AFORESAID'] +7018-75789-0016-171: hyp=['THEN', 'I', 'TOOK', 'LEAVE', 'OF', 'HIM', 'AND', 'OF', 'ALL', 'MY', 'INTIMATES', 'AND', 'ACQUAINTANCES', 'IN', 'THE', 'ISLAND', 'AND', 'EMBARKED', 'WITH', 'THE', 'MERCHANTS', 'AFORESAID'] +7018-75789-0017-172: ref=['HE', 'ASKED', 'ME', 'WHENCE', 'THEY', 'CAME', 'AND', 'I', 'SAID', 'TO', 'HIM', 'BY', 'ALLAH', 'O', 'COMMANDER', 'OF', 'THE', 'FAITHFUL', 'I', 'KNOW', 'NOT', 'THE', 'NAME', 'OF', 'THE', 'CITY', 'NOR', 'THE', 'WAY', 'THITHER'] +7018-75789-0017-172: hyp=['HE', 'ASKED', 'ME', 'WHENCE', 'THEY', 'CAME', 'AND', 'I', 'SAID', 'TO', 'HIM', 'BY', 'ALLAH', 'A', 'COMMANDER', 'OF', 'THE', 'FAITHFUL', 'I', 'KNOW', 'NOT', 'THE', 'NAME', 'OF', 'THE', 'CITY', 'NOR', 'THE', 'WAY', 'THITHER'] +7018-75789-0018-173: ref=['FOR', 'STATE', 'PROCESSIONS', 'A', 'THRONE', 'IS', 'SET', 'FOR', 'HIM', 'UPON', 'A', 'HUGE', 'ELEPHANT', 'ELEVEN', 'CUBITS', 'HIGH', 'AND', 'UPON', 'THIS', 'HE', 'SITTETH', 'HAVING', 'HIS', 'GREAT', 'LORDS', 'AND', 'OFFICERS', 'AND', 'GUESTS', 'STANDING', 'IN', 'TWO', 'RANKS', 'ON', 'HIS', 'RIGHT', 'HAND', 'AND', 'ON', 'HIS', 'LEFT'] +7018-75789-0018-173: hyp=['FOR', 'STATE', 'PROCESSIONS', 'A', 'THRONE', 'IS', 'SET', 'FOR', 'HIM', 'UPON', 'A', 'HUGE', 'ELEPHANT', 'ELEVEN', 'CUBITS', 'HIGH', 'AND', 'UPON', 'THIS', 'HE', 'SITTETH', 'HAVING', 'HIS', 'GREAT', 'LORDS', 'AND', 'OFFICERS', 'AND', 'GUESTS', 'STANDING', 'IN', 'TWO', 'RANKS', 'ON', 'HIS', 'RIGHT', 'HAND', 'AND', 'ON', 'HIS', 'LEFT'] +7018-75789-0019-174: ref=['HIS', 'LETTER', 'HATH', 'SHOWN', 'ME', 'THIS', 'AND', 'AS', 'FOR', 'THE', 'MIGHTINESS', 'OF', 'HIS', 'DOMINION', 'THOU', 'HAST', 'TOLD', 'US', 'WHAT', 'THOU', 'HAST', 'EYE', 'WITNESSED'] +7018-75789-0019-174: hyp=['HIS', 'LETTER', 'HATH', 'SHOWN', 'ME', 'THIS', 'AND', 'AS', 'FOR', 'THE', 'MIGHTINESS', 'OF', 'HIS', 'DOMINION', 'THOU', 'HAST', 'TOLD', 'US', 'WHAT', 'THOU', 'HAST', 'I', 'WITNESSED'] +7018-75789-0020-175: ref=['PRESENTLY', 'MY', 'FRIENDS', 'CAME', 'TO', 'ME', 'AND', 'I', 'DISTRIBUTED', 'PRESENTS', 'AMONG', 'MY', 'FAMILY', 'AND', 'GAVE', 'ALMS', 'AND', 'LARGESSE', 'AFTER', 'WHICH', 'I', 'YIELDED', 'MYSELF', 'TO', 'JOYANCE', 'AND', 'ENJOYMENT', 'MIRTH', 'AND', 'MERRY', 'MAKING', 'AND', 'FORGOT', 'ALL', 'THAT', 'I', 'HAD', 'SUFFERED'] +7018-75789-0020-175: hyp=['PRESENTLY', 'MY', 'FRIENDS', 'CAME', 'TO', 'ME', 'AND', 'I', 'DISTRIBUTED', 'PRESENTS', 'AMONG', 'MY', 'FAMILY', 'AND', 'GAVE', 'ALMS', 'AND', 'LARGESSE', 'AFTER', 'WHICH', 'I', 'YIELDED', 'MYSELF', 'TO', 'JOYANCE', 'AND', 'ENJOYMENT', 'MIRTH', 'AND', 'MERRYMAKING', 'AND', 'FORGOT', 'ALL', 'THAT', 'I', 'HAD', 'SUFFERED'] +7018-75789-0021-176: ref=['SUCH', 'THEN', 'O', 'MY', 'BROTHERS', 'IS', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFEL', 'ME', 'IN', 'MY', 'SIXTH', 'VOYAGE', 'AND', 'TO', 'MORROW', 'INSHALLAH'] +7018-75789-0021-176: hyp=['SUCH', 'THEN', 'O', 'MY', 'BROTHERS', 'IS', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFELL', 'ME', 'IN', 'MY', 'SIXTH', 'VOYAGE', 'AND', 'TO', 'MORROW', 'INSHALLAH'] +7018-75789-0022-177: ref=['I', 'WILL', 'TELL', 'YOU', 'THE', 'STORY', 'OF', 'MY', 'SEVENTH', 'AND', 'LAST', 'VOYAGE', 'WHICH', 'IS', 'STILL', 'MORE', 'WONDROUS', 'AND', 'MARVELLOUS', 'THAN', 'THAT', 'OF', 'THE', 'FIRST', 'SIX'] +7018-75789-0022-177: hyp=['I', 'WILL', 'TELL', 'YOU', 'THE', 'STORY', 'OF', 'MY', 'SEVENTH', 'AND', 'LAST', 'VOYAGE', 'WHICH', 'IS', 'STILL', 'MORE', 'WONDROUS', 'AND', 'MARVELLOUS', 'THAN', 'THAT', 'OF', 'THE', 'FIRST', 'SIX'] +7018-75789-0023-178: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'THIRD', 'NIGHT'] +7018-75789-0023-178: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'THIRD', 'NIGHT'] +7018-75789-0024-179: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'SINDBAD', 'THE', 'SEAMAN', 'HAD', 'RELATED', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFEL', 'HIM', 'IN', 'HIS', 'SIXTH', 'VOYAGE', 'AND', 'ALL', 'THE', 'COMPANY', 'HAD', 'DISPERSED', 'SINDBAD', 'THE', 'LANDSMAN', 'WENT', 'HOME', 'AND', 'SLEPT', 'AS', 'OF', 'WONT'] +7018-75789-0024-179: hyp=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'SINBAD', 'THE', 'SEAMAN', 'HAD', 'RELIGHTED', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFELL', 'HIM', 'IN', 'HIS', 'SIXTH', 'VOYAGE', 'AND', 'ALL', 'THE', 'COMPANY', 'HAD', 'DISPERSED', 'SINBAD', 'THE', 'LANDSMAN', 'WENT', 'HOME', 'AND', 'SLEPT', 'AS', 'OF', 'WONT'] +7018-75789-0025-180: ref=['THE', 'SEVENTH', 'VOYAGE', 'OF', 'SINDBAD', 'THE', 'SEAMAN'] +7018-75789-0025-180: hyp=['THE', 'SEVENTH', 'VOYAGE', 'OF', 'SINBAD', 'THE', 'SALMON'] +7018-75789-0026-181: ref=['KNOW', 'O', 'COMPANY', 'THAT', 'AFTER', 'MY', 'RETURN', 'FROM', 'MY', 'SIXTH', 'VOYAGE', 'WHICH', 'BROUGHT', 'ME', 'ABUNDANT', 'PROFIT', 'I', 'RESUMED', 'MY', 'FORMER', 'LIFE', 'IN', 'ALL', 'POSSIBLE', 'JOYANCE', 'AND', 'ENJOYMENT', 'AND', 'MIRTH', 'AND', 'MAKING', 'MERRY', 'DAY', 'AND', 'NIGHT', 'AND', 'I', 'TARRIED', 'SOME', 'TIME', 'IN', 'THIS', 'SOLACE', 'AND', 'SATISFACTION', 'TILL', 'MY', 'SOUL', 'BEGAN', 'ONCE', 'MORE', 'TO', 'LONG', 'TO', 'SAIL', 'THE', 'SEAS', 'AND', 'SEE', 'FOREIGN', 'COUNTRIES', 'AND', 'COMPANY', 'WITH', 'MERCHANTS', 'AND', 'HEAR', 'NEW', 'THINGS'] +7018-75789-0026-181: hyp=['NO', 'O', 'COMPANY', 'THAT', 'AFTER', 'MY', 'RETURN', 'FROM', 'MY', 'SIXTH', 'VOYAGE', 'WHICH', 'BROUGHT', 'ME', 'ABUNDANT', 'PROFIT', 'I', 'RESUMED', 'MY', 'FORMER', 'LIFE', 'AND', 'ALL', 'POSSIBLE', 'JOYANCE', 'AND', 'ENJOYMENT', 'AND', 'MIRTH', 'AND', 'MAKING', 'MERRY', 'DAY', 'AND', 'NIGHT', 'AND', 'I', 'TARRIED', 'SOME', 'TIME', 'IN', 'THIS', 'SOLACE', 'AND', 'SATISFACTION', 'TILL', 'MY', 'SOUL', 'BEGAN', 'ONCE', 'MORE', 'TO', 'LONG', 'TO', 'SAIL', 'THE', 'SEAS', 'AND', 'SEE', 'FOREIGN', 'COUNTRIES', 'IN', 'COMPANY', 'WITH', 'MERCHANTS', 'AND', 'HERE', 'NEW', 'THINGS'] +7018-75789-0027-182: ref=['SO', 'HAVING', 'MADE', 'UP', 'MY', 'MIND', 'I', 'PACKED', 'UP', 'IN', 'BALES', 'A', 'QUANTITY', 'OF', 'PRECIOUS', 'STUFFS', 'SUITED', 'FOR', 'SEA', 'TRADE', 'AND', 'REPAIRED', 'WITH', 'THEM', 'FROM', 'BAGHDAD', 'CITY', 'TO', 'BASSORAH', 'TOWN', 'WHERE', 'I', 'FOUND', 'A', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'IN', 'HER', 'A', 'COMPANY', 'OF', 'CONSIDERABLE', 'MERCHANTS'] +7018-75789-0027-182: hyp=['SO', 'HAVING', 'MADE', 'UP', 'MY', 'MIND', 'I', 'PACKED', 'UP', 'IN', 'BALES', 'A', 'QUANTITY', 'OF', 'PRECIOUS', 'STUFFS', 'SUITED', 'FOR', 'SEA', 'TRADE', 'AND', 'REPAIRED', 'WITH', 'THEM', 'FROM', 'BAGHDAD', 'CITY', 'TO', 'BASSORA', 'TOWN', 'WHERE', 'I', 'FOUND', 'A', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'IN', 'HER', 'A', 'COMPANY', 'OF', 'CONSIDERABLE', 'MERCHANTS'] +7018-75789-0028-183: ref=['BUT', 'THE', 'CAPTAIN', 'AROSE', 'AND', 'TIGHTENING', 'HIS', 'GIRDLE', 'TUCKED', 'UP', 'HIS', 'SKIRTS', 'AND', 'AFTER', 'TAKING', 'REFUGE', 'WITH', 'ALLAH', 'FROM', 'SATAN', 'THE', 'STONED', 'CLOMB', 'TO', 'THE', 'MAST', 'HEAD', 'WHENCE', 'HE', 'LOOKED', 'OUT', 'RIGHT', 'AND', 'LEFT', 'AND', 'GAZING', 'AT', 'THE', 'PASSENGERS', 'AND', 'CREW', 'FELL', 'TO', 'BUFFETING', 'HIS', 'FACE', 'AND', 'PLUCKING', 'OUT', 'HIS', 'BEARD'] +7018-75789-0028-183: hyp=['BUT', 'THE', 'CAPTAIN', 'AROSE', 'AND', 'TIGHTENED', 'IN', 'HIS', 'GIRDLE', 'TUCKED', 'UP', 'HIS', 'SKIRTS', 'AND', 'AFTER', 'TAKING', 'REFUGE', 'WITH', 'ALLAH', 'FROM', 'SATAN', 'THE', 'STONE', 'CLIMBED', 'TO', 'THE', 'MAST', 'HEAD', 'WHENCE', 'HE', 'LOOKED', 'OUT', 'RIGHT', 'AND', 'LEFT', 'AND', 'GAZING', 'AT', 'THE', 'PASSENGERS', 'AND', 'CREW', 'FELL', 'TO', 'BUFFET', 'IN', 'HIS', 'FACE', 'AND', 'PLUCKING', 'OUT', 'HIS', 'BEARD'] +7018-75789-0029-184: ref=['THIS', 'HE', 'SET', 'IN', 'A', 'SAUCER', 'WETTED', 'WITH', 'A', 'LITTLE', 'WATER', 'AND', 'AFTER', 'WAITING', 'A', 'SHORT', 'TIME', 'SMELT', 'AND', 'TASTED', 'IT', 'AND', 'THEN', 'HE', 'TOOK', 'OUT', 'OF', 'THE', 'CHEST', 'A', 'BOOKLET', 'WHEREIN', 'HE', 'READ', 'AWHILE', 'AND', 'SAID', 'WEEPING', 'KNOW', 'O', 'YE', 'PASSENGERS', 'THAT', 'IN', 'THIS', 'BOOK', 'IS', 'A', 'MARVELLOUS', 'MATTER', 'DENOTING', 'THAT', 'WHOSO', 'COMETH', 'HITHER', 'SHALL', 'SURELY', 'DIE', 'WITHOUT', 'HOPE', 'OF', 'ESCAPE', 'FOR', 'THAT', 'THIS', 'OCEAN', 'IS', 'CALLED', 'THE', 'SEA', 'OF', 'THE', 'CLIME', 'OF', 'THE', 'KING', 'WHEREIN', 'IS', 'THE', 'SEPULCHRE', 'OF', 'OUR', 'LORD', 'SOLOMON', 'SON', 'OF', 'DAVID', 'ON', 'BOTH', 'BE', 'PEACE'] +7018-75789-0029-184: hyp=['THIS', 'HE', 'SAID', 'IN', 'A', 'SAUCER', 'WETTED', 'WITH', 'A', 'LITTLE', 'WATER', 'AND', 'AFTER', 'WAITING', 'A', 'SHORT', 'TIME', 'SMELT', 'AND', 'TASTED', 'IT', 'AND', 'THEN', 'HE', 'TOOK', 'OUT', 'OF', 'THE', 'CHEST', 'A', 'BOOKLET', 'WHEREIN', 'HE', 'READ', 'A', 'WHILE', 'AND', 'SAID', 'WEEPING', 'KNOW', 'O', 'YE', 'PASSENGERS', 'THAT', 'IN', 'THIS', 'BOOK', 'IS', 'A', 'MARVELLOUS', 'MATTER', 'DENOTING', 'THAT', 'WHOSO', 'COME', 'THITHER', 'SHALL', 'SURELY', 'DIE', 'WITHOUT', 'HOPE', 'OF', 'ESCAPE', 'FOR', 'THAT', 'THIS', 'OCEAN', 'IS', 'CALLED', 'THE', 'SEA', 'OF', 'THE', 'CLIME', 'OF', 'THE', 'KING', 'WHEREIN', 'IS', 'A', 'SEPULCHRE', 'OF', 'OUR', 'LORD', 'SOLOMON', 'SON', 'OF', 'DAVID', 'ON', 'BOTH', 'BE', 'PEACE'] +7018-75789-0030-185: ref=['A', 'SECOND', 'FISH', 'MADE', 'ITS', 'APPEARANCE', 'THAN', 'WHICH', 'WE', 'HAD', 'SEEN', 'NAUGHT', 'MORE', 'MONSTROUS'] +7018-75789-0030-185: hyp=['A', 'SECOND', 'FISH', 'READ', 'ITS', 'APPEARANCE', 'AND', 'WHICH', 'WE', 'HAD', 'SEEN', 'NOUGHT', 'MORE', 'MONSTROUS'] +7018-75789-0031-186: ref=['WHEN', 'SUDDENLY', 'A', 'VIOLENT', 'SQUALL', 'OF', 'WIND', 'AROSE', 'AND', 'SMOTE', 'THE', 'SHIP', 'WHICH', 'ROSE', 'OUT', 'OF', 'THE', 'WATER', 'AND', 'SETTLED', 'UPON', 'A', 'GREAT', 'REEF', 'THE', 'HAUNT', 'OF', 'SEA', 'MONSTERS', 'WHERE', 'IT', 'BROKE', 'UP', 'AND', 'FELL', 'ASUNDER', 'INTO', 'PLANKS', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75789-0031-186: hyp=['WHEN', 'SUDDENLY', 'A', 'VIOLENT', 'SQUALL', 'OF', 'WIND', 'AROSE', 'AND', 'SMOTE', 'THE', 'SHIP', 'WHICH', 'ROSE', 'OUT', 'OF', 'THE', 'WATER', 'AND', 'SETTLED', 'UPON', 'A', 'GREAT', 'REEF', 'THE', 'HAUNT', 'OF', 'SEA', 'MONSTERS', 'WHERE', 'IT', 'BROKE', 'UP', 'AND', 'FELL', 'ASUNDER', 'INTO', 'PLANKS', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7105-2330-0000-2310: ref=['UNFORTUNATELY', 'THERE', 'COULD', 'BE', 'NO', 'DOUBT', 'OR', 'MISCONCEPTION', 'AS', 'TO', "PLATTERBAFF'S", 'GUILT'] +7105-2330-0000-2310: hyp=['UNFORTUNATELY', 'THERE', 'COULD', 'BE', 'NO', 'DOUBT', 'OUR', 'MISCONCEPTION', 'AS', 'THE', "PLATTERBATH'S", 'GUILT'] +7105-2330-0001-2311: ref=['HE', 'HAD', 'NOT', 'ONLY', 'PLEADED', 'GUILTY', 'BUT', 'HAD', 'EXPRESSED', 'HIS', 'INTENTION', 'OF', 'REPEATING', 'HIS', 'ESCAPADE', 'IN', 'OTHER', 'DIRECTIONS', 'AS', 'SOON', 'AS', 'CIRCUMSTANCES', 'PERMITTED', 'THROUGHOUT', 'THE', 'TRIAL', 'HE', 'WAS', 'BUSY', 'EXAMINING', 'A', 'SMALL', 'MODEL', 'OF', 'THE', 'FREE', 'TRADE', 'HALL', 'IN', 'MANCHESTER'] +7105-2330-0001-2311: hyp=['HE', 'HAD', 'NOT', 'ONLY', 'PLAYED', 'IT', 'GUILTY', 'BUT', 'HAD', 'EXPRESSED', 'HIS', 'INTENTION', 'OF', 'REPEATING', 'HIS', 'ESCAPADE', 'IN', 'OTHER', 'DIRECTIONS', 'AS', 'SOON', 'AS', 'CIRCUMSTANCES', 'PERMITTED', 'THROUGHOUT', 'THE', 'TRIAL', 'HE', 'WAS', 'BUSY', 'EXAMINING', 'A', 'SMALL', 'MODEL', 'OF', 'THE', 'FREE', 'TRADE', 'HALL', 'IN', 'MANCHESTER'] +7105-2330-0002-2312: ref=['THE', 'JURY', 'COULD', 'NOT', 'POSSIBLY', 'FIND', 'THAT', 'THE', 'PRISONER', 'HAD', 'NOT', 'DELIBERATELY', 'AND', 'INTENTIONALLY', 'BLOWN', 'UP', 'THE', 'ALBERT', 'HALL', 'THE', 'QUESTION', 'WAS', 'COULD', 'THEY', 'FIND', 'ANY', 'EXTENUATING', 'CIRCUMSTANCES', 'WHICH', 'WOULD', 'PERMIT', 'OF', 'AN', 'ACQUITTAL'] +7105-2330-0002-2312: hyp=['VERY', 'CHEERY', 'COULD', 'NOT', 'POSSIBLY', 'FIND', 'THAT', 'THE', 'PRISONER', 'HAD', 'NOT', 'DELIBERATELY', 'AND', 'INTENTIONALLY', 'BLOWN', 'UP', 'THE', 'ALBERT', 'HALL', 'THE', 'QUESTION', 'WAS', 'COULD', 'THEY', 'FIND', 'ANY', 'EXTINUATING', 'CIRCUMSTANCES', 'WHICH', 'WOULD', 'PERMIT', 'OF', 'AN', 'ACQUITTAL'] +7105-2330-0003-2313: ref=['OF', 'COURSE', 'ANY', 'SENTENCE', 'WHICH', 'THE', 'LAW', 'MIGHT', 'FEEL', 'COMPELLED', 'TO', 'INFLICT', 'WOULD', 'BE', 'FOLLOWED', 'BY', 'AN', 'IMMEDIATE', 'PARDON', 'BUT', 'IT', 'WAS', 'HIGHLY', 'DESIRABLE', 'FROM', 'THE', "GOVERNMENT'S", 'POINT', 'OF', 'VIEW', 'THAT', 'THE', 'NECESSITY', 'FOR', 'SUCH', 'AN', 'EXERCISE', 'OF', 'CLEMENCY', 'SHOULD', 'NOT', 'ARISE'] +7105-2330-0003-2313: hyp=['OF', 'COURSE', 'ANY', 'SENTENCE', 'REACHED', 'THE', 'LAW', 'MIGHT', 'FEEL', 'COMPELLED', 'TO', 'INFLICT', 'WOULD', 'BE', 'FOLLOWED', 'BY', 'AN', 'IMMEDIATE', 'PARDON', 'BUT', 'IT', 'WAS', 'HIGHLY', 'DESIRABLE', 'FROM', 'THE', 'GOVERNMENT', 'SPINTER', 'VIEW', 'THAT', 'THE', 'NECESSITY', 'FOR', 'SUCH', 'AN', 'EXERCISE', 'OF', 'CLEMENCY', 'SHOULD', 'NOT', 'ARISE'] +7105-2330-0004-2314: ref=['A', 'HEADLONG', 'PARDON', 'ON', 'THE', 'EVE', 'OF', 'A', 'BYE', 'ELECTION', 'WITH', 'THREATS', 'OF', 'A', 'HEAVY', 'VOTING', 'DEFECTION', 'IF', 'IT', 'WERE', 'WITHHELD', 'OR', 'EVEN', 'DELAYED', 'WOULD', 'NOT', 'NECESSARILY', 'BE', 'A', 'SURRENDER', 'BUT', 'IT', 'WOULD', 'LOOK', 'LIKE', 'ONE'] +7105-2330-0004-2314: hyp=['I', 'HAD', 'LONG', 'PARDON', 'AND', 'THE', 'EVE', 'OF', 'A', 'BILL', 'WITH', 'THREATS', 'OF', 'A', 'HEAVY', 'VOTING', 'AFFECTION', 'IF', 'IT', 'WERE', 'WITHHELD', 'OR', 'EVEN', 'DELAYED', 'WOULD', 'NOT', 'NECESSARILY', 'BE', 'A', 'SURRENDER', 'BUT', 'IT', 'WOULD', 'LOOK', 'LIKE', 'ONE'] +7105-2330-0005-2315: ref=['HENCE', 'THE', 'ANXIETY', 'IN', 'THE', 'CROWDED', 'COURT', 'AND', 'IN', 'THE', 'LITTLE', 'GROUPS', 'GATHERED', 'ROUND', 'THE', 'TAPE', 'MACHINES', 'IN', 'WHITEHALL', 'AND', 'DOWNING', 'STREET', 'AND', 'OTHER', 'AFFECTED', 'CENTRES'] +7105-2330-0005-2315: hyp=['HENCE', 'THEIR', 'ANXIETY', 'IN', 'THE', 'CROWDED', 'COURT', 'AND', 'IN', 'THE', 'LITTLE', 'GROUPS', 'GATHERED', 'ROUND', 'THE', 'TAPE', 'MACHINES', 'IN', 'WHITEHALL', 'AND', 'DAWNING', 'STREET', 'ANOTHER', 'AFFECTED', 'CENTRES'] +7105-2330-0006-2316: ref=['THE', 'JURY', 'RETURNED', 'FROM', 'CONSIDERING', 'THEIR', 'VERDICT', 'THERE', 'WAS', 'A', 'FLUTTER', 'AN', 'EXCITED', 'MURMUR', 'A', 'DEATHLIKE', 'HUSH'] +7105-2330-0006-2316: hyp=['THEIR', 'CHEERY', 'RETURN', 'FROM', 'CONSIDERING', 'THEIR', 'VERDICT', 'THERE', 'WAS', 'A', 'FLUTTER', 'AN', 'EXCITED', 'MURMUR', 'A', 'DEATH', 'LIKE', 'HUSH'] +7105-2330-0007-2317: ref=['THE', 'FOREMAN', 'DELIVERED', 'HIS', 'MESSAGE'] +7105-2330-0007-2317: hyp=['THE', 'FOUR', 'MEN', 'DELIVERED', 'HIS', 'MESSAGE'] +7105-2330-0008-2318: ref=['THE', 'JURY', 'FIND', 'THE', 'PRISONER', 'GUILTY', 'OF', 'BLOWING', 'UP', 'THE', 'ALBERT', 'HALL'] +7105-2330-0008-2318: hyp=['THE', 'CHERRY', 'FIND', 'THE', 'PRISONER', 'GUILTY', 'OF', 'BLOWING', 'UP', 'THE', 'ALBERT', 'HALL'] +7105-2330-0009-2319: ref=['THE', 'JURY', 'WISH', 'TO', 'ADD', 'A', 'RIDER', 'DRAWING', 'ATTENTION', 'TO', 'THE', 'FACT', 'THAT', 'A', 'BY', 'ELECTION', 'IS', 'PENDING', 'IN', 'THE', 'PARLIAMENTARY', 'DIVISION', 'OF', 'NEMESIS', 'ON', 'HAND'] +7105-2330-0009-2319: hyp=['THEY', 'JERRY', 'WISH', 'TO', 'ADD', 'A', 'WRITER', 'DRAWING', 'ATTENTION', 'TO', 'THE', 'FACT', 'THAT', 'A', 'BILL', 'IS', 'SPENDING', 'IN', 'THE', 'PARLIAMENTARY', 'DIVISION', 'OF', 'NEMESIS', 'ON', 'HAND'] +7105-2330-0010-2320: ref=['AND', 'MAY', 'THE', 'LORD', 'HAVE', 'MERCY', 'ON', 'THE', 'POLL', 'A', 'JUNIOR', 'COUNSEL', 'EXCLAIMED', 'IRREVERENTLY'] +7105-2330-0010-2320: hyp=['AND', 'MADE', 'THE', 'LARD', 'HAVE', 'MERCY', 'ON', 'THE', 'POLE', 'A', 'GENIOR', 'CONSUL', 'EXCLAIMED', 'IRREVERENTLY'] +7105-2330-0011-2321: ref=['FIFTEEN', 'HUNDRED', 'SAID', 'THE', 'PRIME', 'MINISTER', 'WITH', 'A', 'SHUDDER', "IT'S", 'TOO', 'HORRIBLE', 'TO', 'THINK', 'OF'] +7105-2330-0011-2321: hyp=['FIFTEEN', 'HUNDRED', 'SAID', 'THE', 'PRIME', 'MINISTER', 'WITH', 'A', 'SHUDDER', "IT'S", 'TOO', 'HORRIBLE', 'TO', 'THINK', 'OF'] +7105-2330-0012-2322: ref=['OUR', 'MAJORITY', 'LAST', 'TIME', 'WAS', 'ONLY', 'A', 'THOUSAND', 'AND', 'SEVEN'] +7105-2330-0012-2322: hyp=['OUR', 'MAJORITY', 'LAST', 'TIME', 'WAS', 'ONLY', 'A', 'THOUSAND', 'AND', 'SEVEN'] +7105-2330-0013-2323: ref=['SEVEN', 'THIRTY', 'AMENDED', 'THE', 'PRIME', 'MINISTER', 'WE', 'MUST', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PRECIPITANCY'] +7105-2330-0013-2323: hyp=['SEVEN', 'THIRTY', 'AMENDED', 'THE', 'PRIME', 'MINISTER', 'WE', 'MUST', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PRECIPITANCY'] +7105-2330-0014-2324: ref=['NOT', 'LATER', 'THAN', 'SEVEN', 'THIRTY', 'THEN', 'SAID', 'THE', 'CHIEF', 'ORGANISER', 'I', 'HAVE', 'PROMISED', 'THE', 'AGENT', 'DOWN', 'THERE', 'THAT', 'HE', 'SHALL', 'BE', 'ABLE', 'TO', 'DISPLAY', 'POSTERS', 'ANNOUNCING', 'PLATTERBAFF', 'IS', 'OUT', 'BEFORE', 'THE', 'POLL', 'OPENS'] +7105-2330-0014-2324: hyp=['NOT', 'LATER', 'THAN', 'SEVEN', 'THIRTY', 'THEN', 'SAID', 'THE', 'CHIEF', 'ORGANIZER', 'I', 'HAVE', 'PROMISED', 'THE', 'AGENT', 'DOWN', 'THERE', 'THAT', 'HE', 'SHALL', 'BE', 'ABLE', 'TO', 'DISPLAY', 'POSTERS', 'ANNOUNCING', 'PLATTERBAFF', 'IS', 'OUT', 'BEFORE', 'THE', 'POLE', 'OPENS'] +7105-2330-0015-2325: ref=['HE', 'SAID', 'IT', 'WAS', 'OUR', 'ONLY', 'CHANCE', 'OF', 'GETTING', 'A', 'TELEGRAM', 'RADPROP', 'IS', 'IN', 'TO', 'NIGHT'] +7105-2330-0015-2325: hyp=['HE', 'SAID', 'IT', 'WAS', 'HER', 'ONLY', 'CHANCE', 'OF', 'GETTING', 'A', 'TELEGRAM', 'RED', "RAPPA'S", 'INN', 'TO', 'NIGHT'] +7105-2330-0016-2326: ref=['DESPITE', 'THE', 'EARLINESS', 'OF', 'THE', 'HOUR', 'A', 'SMALL', 'CROWD', 'HAD', 'GATHERED', 'IN', 'THE', 'STREET', 'OUTSIDE', 'AND', 'THE', 'HORRIBLE', 'MENACING', 'TRELAWNEY', 'REFRAIN', 'OF', 'THE', 'FIFTEEN', 'HUNDRED', 'VOTING', 'MEN', 'CAME', 'IN', 'A', 'STEADY', 'MONOTONOUS', 'CHANT'] +7105-2330-0016-2326: hyp=['THIS', 'SPITE', 'THE', 'EARLINESS', 'OF', 'THE', 'HOUR', 'A', 'SMALL', 'CROWD', 'HAD', 'GATHERED', 'IN', 'THE', 'STREET', 'OUTSIDE', 'AND', 'THE', 'HORRIBLE', 'MENACING', 'TREEONER', 'REFRAIN', 'OF', 'THE', 'FIFTEEN', 'HUNDRED', 'VOTING', 'MEN', 'CAME', 'IN', 'A', 'STEADY', 'MONOTONOUS', 'CHANT'] +7105-2330-0017-2327: ref=['HE', 'EXCLAIMED', "WON'T", 'GO'] +7105-2330-0017-2327: hyp=['HE', 'EXCLAIMED', "WON'T", 'GO'] +7105-2330-0018-2328: ref=['HE', 'SAYS', 'HE', 'NEVER', 'HAS', 'LEFT', 'PRISON', 'WITHOUT', 'A', 'BRASS', 'BAND', 'TO', 'PLAY', 'HIM', 'OUT', 'AND', "HE'S", 'NOT', 'GOING', 'TO', 'GO', 'WITHOUT', 'ONE', 'NOW'] +7105-2330-0018-2328: hyp=['HE', 'SAYS', 'HE', 'NEVER', 'HAS', 'LEFT', 'PRISON', 'WITHOUT', 'A', 'BREASTPAND', 'TO', 'PLAY', 'HIM', 'OUT', 'AND', "HE'S", 'NOT', 'GOING', 'TO', 'GO', 'WITHOUT', 'ONE', 'NOW'] +7105-2330-0019-2329: ref=['SAID', 'THE', 'PRIME', 'MINISTER', 'WE', 'CAN', 'HARDLY', 'BE', 'SUPPOSED', 'TO', 'SUPPLY', 'A', 'RELEASED', 'PRISONER', 'WITH', 'A', 'BRASS', 'BAND', 'HOW', 'ON', 'EARTH', 'COULD', 'WE', 'DEFEND', 'IT', 'ON', 'THE', 'ESTIMATES'] +7105-2330-0019-2329: hyp=['SAID', 'THE', 'PRIME', 'MINISTER', 'WE', 'CAN', 'HARDLY', 'BE', 'SUPPOSED', 'TO', 'SUPPLY', 'A', 'RELISSE', 'PRISONER', 'WITH', 'A', 'BRASS', 'BAND', 'HOW', 'ON', 'EARTH', 'COULD', 'WE', 'DEFENDED', 'ON', 'THE', 'ESTIMATES'] +7105-2330-0020-2330: ref=['ANYWAY', 'HE', "WON'T", 'GO', 'UNLESS', 'HE', 'HAS', 'A', 'BAND'] +7105-2330-0020-2330: hyp=['AND', 'AWAY', 'YOU', "WON'T", 'GO', 'UNLESS', 'HE', 'HAS', 'A', 'BAND'] +7105-2330-0021-2331: ref=['POLL', 'OPENS', 'IN', 'FIVE', 'MINUTES'] +7105-2330-0021-2331: hyp=['PAUL', 'OPENED', 'THIN', 'FIVE', 'MINUTES'] +7105-2330-0022-2332: ref=['IS', 'PLATTERBAFF', 'OUT', 'YET'] +7105-2330-0022-2332: hyp=['HIS', 'FURTHER', 'BATH', 'OUT', 'YET'] +7105-2330-0023-2333: ref=['IN', "HEAVEN'S", 'NAME', 'WHY'] +7105-2330-0023-2333: hyp=['IN', "HEAVEN'S", 'NAME', 'WHY'] +7105-2330-0024-2334: ref=['THE', 'CHIEF', 'ORGANISER', 'RANG', 'OFF'] +7105-2330-0024-2334: hyp=['THE', 'CHIEF', 'ORGANIZER', 'RANG', 'OFF'] +7105-2330-0025-2335: ref=['THIS', 'IS', 'NOT', 'A', 'MOMENT', 'FOR', 'STANDING', 'ON', 'DIGNITY', 'HE', 'OBSERVED', 'BLUNTLY', 'MUSICIANS', 'MUST', 'BE', 'SUPPLIED', 'AT', 'ONCE'] +7105-2330-0025-2335: hyp=['THIS', 'IS', 'NOT', 'A', 'MOMENT', 'FOR', 'STANDING', 'ON', 'DIGNITY', 'HE', 'OBSERVED', 'BLUNTLY', "MESSIE'S", 'MUST', 'BE', 'SUPPLIED', 'AT', 'ONCE'] +7105-2330-0026-2336: ref=["CAN'T", 'YOU', 'GET', 'A', 'STRIKE', 'PERMIT', 'ASKED', 'THE', 'ORGANISER'] +7105-2330-0026-2336: hyp=["CAN'T", 'YOU', 'GET', 'US', 'STRIKE', 'PERMIT', 'ASKED', 'THE', 'ORGANIZER'] +7105-2330-0027-2337: ref=["I'LL", 'TRY', 'SAID', 'THE', 'HOME', 'SECRETARY', 'AND', 'WENT', 'TO', 'THE', 'TELEPHONE'] +7105-2330-0027-2337: hyp=["I'LL", 'TRY', 'SAID', 'THE', 'HOME', 'SECRETARY', 'AND', 'WENT', 'TO', 'THE', 'TELEPHONE'] +7105-2330-0028-2338: ref=['EIGHT', "O'CLOCK", 'STRUCK', 'THE', 'CROWD', 'OUTSIDE', 'CHANTED', 'WITH', 'AN', 'INCREASING', 'VOLUME', 'OF', 'SOUND', 'WILL', 'VOTE', 'THE', 'OTHER', 'WAY'] +7105-2330-0028-2338: hyp=['EIGHT', "O'CLOCK", 'STRUCK', 'THE', 'CROWD', 'OUTSIDE', 'CHANTED', 'WITH', 'AN', 'INCREASING', 'VOLUME', 'OF', 'SOUND', 'REVOTE', 'THE', 'OTHER', 'WAY'] +7105-2330-0029-2339: ref=['A', 'TELEGRAM', 'WAS', 'BROUGHT', 'IN'] +7105-2330-0029-2339: hyp=['I', 'TELEGRAMAS', 'BROUGHT', 'IN'] +7105-2330-0030-2340: ref=['IT', 'WAS', 'FROM', 'THE', 'CENTRAL', 'COMMITTEE', 'ROOMS', 'AT', 'NEMESIS'] +7105-2330-0030-2340: hyp=['IT', 'WAS', 'FROM', 'THE', 'CENTRAL', 'COMEDY', 'ROOMS', 'AT', 'NEMESIS'] +7105-2330-0031-2341: ref=['WITHOUT', 'A', 'BAND', 'HE', 'WOULD', 'NOT', 'GO', 'AND', 'THEY', 'HAD', 'NO', 'BAND'] +7105-2330-0031-2341: hyp=['WITHOUT', 'A', 'BAND', 'HE', 'WOULD', 'NOT', 'GO', 'AND', 'THEY', 'HAD', 'NO', 'BEND'] +7105-2330-0032-2342: ref=['A', 'QUARTER', 'PAST', 'TEN', 'HALF', 'PAST'] +7105-2330-0032-2342: hyp=['A', 'QUARTER', 'PAST', 'TEN', 'HALF', 'PAST'] +7105-2330-0033-2343: ref=['HAVE', 'YOU', 'ANY', 'BAND', 'INSTRUMENTS', 'OF', 'AN', 'EASY', 'NATURE', 'TO', 'PLAY'] +7105-2330-0033-2343: hyp=['HAVE', 'YOU', 'ANY', 'BAND', 'INSTRUMENTS', 'OF', 'AN', 'EASY', 'NATURE', 'TO', 'PLAY'] +7105-2330-0034-2344: ref=['DEMANDED', 'THE', 'CHIEF', 'ORGANISER', 'OF', 'THE', 'PRISON', 'GOVERNOR', 'DRUMS', 'CYMBALS', 'THOSE', 'SORT', 'OF', 'THINGS'] +7105-2330-0034-2344: hyp=['DEMANDED', 'THE', 'CHIEF', 'ORGANIZER', 'OF', 'THE', 'PRISON', 'GOVERNOR', 'DRUMS', 'SYMBOLS', 'THOSE', 'SORT', 'OF', 'THINGS'] +7105-2330-0035-2345: ref=['THE', 'WARDERS', 'HAVE', 'A', 'PRIVATE', 'BAND', 'OF', 'THEIR', 'OWN', 'SAID', 'THE', 'GOVERNOR', 'BUT', 'OF', 'COURSE', 'I', "COULDN'T", 'ALLOW', 'THE', 'MEN', 'THEMSELVES'] +7105-2330-0035-2345: hyp=['THE', 'ORDERS', 'HAVE', 'A', 'PRIVATE', 'BAND', 'OF', 'THEIR', 'OWN', 'SAID', 'THE', 'GOVERNOR', 'BUT', 'OF', 'COURSE', 'I', "COULDN'T", 'ALLOW', 'THE', 'MEN', 'THEMSELVES'] +7105-2330-0036-2346: ref=['LEND', 'US', 'THE', 'INSTRUMENTS', 'SAID', 'THE', 'CHIEF', 'ORGANISER'] +7105-2330-0036-2346: hyp=['BLENDEST', 'THE', 'INSTRUMENTS', 'SAID', 'THE', 'CHIEF', 'ORGANIZER'] +7105-2330-0037-2347: ref=['THE', 'POPULAR', 'SONG', 'OF', 'THE', 'MOMENT', 'REPLIED', 'THE', 'AGITATOR', 'AFTER', 'A', "MOMENT'S", 'REFLECTION'] +7105-2330-0037-2347: hyp=['THOUGH', 'POPULAR', 'SONG', 'OF', 'THE', 'MOMENT', 'REPLIED', 'THE', 'AGITATOR', 'AFTER', 'A', "MOMENT'S", 'REFLECTION'] +7105-2330-0038-2348: ref=['IT', 'WAS', 'A', 'TUNE', 'THEY', 'HAD', 'ALL', 'HEARD', 'HUNDREDS', 'OF', 'TIMES', 'SO', 'THERE', 'WAS', 'NO', 'DIFFICULTY', 'IN', 'TURNING', 'OUT', 'A', 'PASSABLE', 'IMITATION', 'OF', 'IT', 'TO', 'THE', 'IMPROVISED', 'STRAINS', 'OF', 'I', "DIDN'T", 'WANT', 'TO', 'DO', 'IT', 'THE', 'PRISONER', 'STRODE', 'FORTH', 'TO', 'FREEDOM'] +7105-2330-0038-2348: hyp=['IT', 'WAS', 'A', 'TUNE', 'THEY', 'HAD', 'ALL', 'HEARD', 'HUNDREDS', 'OF', 'TIMES', 'SO', 'THERE', 'IS', 'NO', 'DIFFICULTY', 'IN', 'TURNING', 'OUT', 'A', 'PASSABLE', 'IMITATION', 'OF', 'IT', 'TO', 'THE', 'IMPROVISED', 'TRAINS', 'OF', 'I', "DON'T", 'WANT', 'TO', 'DO', 'IT', 'THE', 'PRISONER', 'STRODE', 'FORTH', 'TO', 'FREEDOM'] +7105-2330-0039-2349: ref=['THE', 'WORD', 'OF', 'THE', 'SONG', 'HAD', 'REFERENCE', 'IT', 'WAS', 'UNDERSTOOD', 'TO', 'THE', 'INCARCERATING', 'GOVERNMENT', 'AND', 'NOT', 'TO', 'THE', 'DESTROYER', 'OF', 'THE', 'ALBERT', 'HALL'] +7105-2330-0039-2349: hyp=['THE', 'WORD', 'OF', 'THE', 'SUN', 'HAD', 'REFERENCE', 'IT', 'WAS', 'UNDERSTOOD', 'THAT', 'INCARCERATING', 'GOVERNMENT', 'AND', 'NOT', 'TO', 'THE', 'DESTROYER', 'OF', 'THE', 'ALBERT', 'HALL'] +7105-2330-0040-2350: ref=['THE', 'SEAT', 'WAS', 'LOST', 'AFTER', 'ALL', 'BY', 'A', 'NARROW', 'MAJORITY'] +7105-2330-0040-2350: hyp=['THIS', 'HEAT', 'WAS', 'LOST', 'AFTER', 'ALL', 'BY', 'A', 'NARROW', 'MATURITY'] +7105-2330-0041-2351: ref=['THE', 'LOCAL', 'TRADE', 'UNIONISTS', 'TOOK', 'OFFENCE', 'AT', 'THE', 'FACT', 'OF', 'CABINET', 'MINISTERS', 'HAVING', 'PERSONALLY', 'ACTED', 'AS', 'STRIKE', 'BREAKERS', 'AND', 'EVEN', 'THE', 'RELEASE', 'OF', 'PLATTERBAFF', 'FAILED', 'TO', 'PACIFY', 'THEM'] +7105-2330-0041-2351: hyp=['THE', 'LOCAL', 'TRADE', 'UNIONISTS', 'TOOK', 'OFFENCE', 'AT', 'THE', 'FACT', 'OF', 'CABINETS', 'HAVING', 'PERSONALLY', 'ACTED', 'AS', 'STRIKEBREAKERS', 'AND', 'EVEN', 'THE', 'RELEASE', 'OF', 'PLATTERBUFF', 'FAILED', 'TO', 'PACIFY', 'THEM'] +7105-2340-0000-2272: ref=['WITH', 'THAT', 'NOTORIOUS', 'FAILING', 'OF', 'HIS', 'HE', 'WAS', 'NOT', 'THE', 'SORT', 'OF', 'PERSON', 'ONE', 'WANTED', 'IN', "ONE'S", 'HOUSE'] +7105-2340-0000-2272: hyp=['WITH', 'THAT', 'NOTORIOUS', 'FAILING', 'OF', 'HIS', 'HE', 'WAS', 'NOT', 'A', 'SORT', 'OF', 'PERSON', 'ONE', 'WANTED', 'IN', "ONE'S", 'HOUSE'] +7105-2340-0001-2273: ref=['WELL', 'THE', 'FAILING', 'STILL', 'EXISTS', "DOESN'T", 'IT', 'SAID', 'HER', 'HUSBAND', 'OR', 'DO', 'YOU', 'SUPPOSE', 'A', 'REFORM', 'OF', 'CHARACTER', 'IS', 'ENTAILED', 'ALONG', 'WITH', 'THE', 'ESTATE'] +7105-2340-0001-2273: hyp=['WELL', 'THE', 'FAILING', 'STILL', 'EXISTS', "DOESN'T", 'IT', 'SAID', 'THE', 'HUSBAND', 'OR', 'DO', 'YOU', 'SUPPOSE', 'A', 'REFORM', 'OF', 'CHARACTER', 'IS', 'ENTAILED', 'ALONG', 'WITH', 'THE', 'ESTATE'] +7105-2340-0002-2274: ref=['BESIDES', 'CYNICISM', 'APART', 'HIS', 'BEING', 'RICH', 'WILL', 'MAKE', 'A', 'DIFFERENCE', 'IN', 'THE', 'WAY', 'PEOPLE', 'WILL', 'LOOK', 'AT', 'HIS', 'FAILING'] +7105-2340-0002-2274: hyp=['BESIDES', 'CYS', 'IN', 'A', 'PART', 'IS', 'BEING', 'RICH', "WE'LL", 'MAKE', 'A', 'DIFFERENCE', 'IN', 'THE', 'WAY', 'PEOPLE', 'WILL', 'LOOK', 'AT', 'HIS', 'FEELING'] +7105-2340-0003-2275: ref=['WHEN', 'A', 'MAN', 'IS', 'ABSOLUTELY', 'WEALTHY', 'NOT', 'MERELY', 'WELL', 'TO', 'DO', 'ALL', 'SUSPICION', 'OF', 'SORDID', 'MOTIVE', 'NATURALLY', 'DISAPPEARS', 'THE', 'THING', 'BECOMES', 'MERELY', 'A', 'TIRESOME', 'MALADY'] +7105-2340-0003-2275: hyp=['WHEN', 'A', 'MAN', 'IS', 'ABSOLUTELY', 'WEALTHY', 'NOT', 'MERELY', 'WELL', 'TO', 'DO', 'ALL', 'SUSPICION', 'OF', 'SARDID', 'MOTIVE', 'NATURAL', 'DISAPPEARS', 'THE', 'THING', 'BECOMES', 'MERELY', 'A', 'PARASAN', 'MALADY'] +7105-2340-0004-2276: ref=['WILFRID', 'PIGEONCOTE', 'HAD', 'SUDDENLY', 'BECOME', 'HEIR', 'TO', 'HIS', 'UNCLE', 'SIR', 'WILFRID', 'PIGEONCOTE', 'ON', 'THE', 'DEATH', 'OF', 'HIS', 'COUSIN', 'MAJOR', 'WILFRID', 'PIGEONCOTE', 'WHO', 'HAD', 'SUCCUMBED', 'TO', 'THE', 'AFTER', 'EFFECTS', 'OF', 'A', 'POLO', 'ACCIDENT'] +7105-2340-0004-2276: hyp=['WILFRED', 'DIJIN', 'CODE', 'HAD', 'SUDDENLY', 'BECOME', 'HEIR', 'TO', 'HIS', 'UNCLE', 'SIR', 'WILFRID', 'PIGEON', 'COAT', 'ON', 'THE', 'DEATH', 'OF', 'HIS', 'COUSIN', 'MAJOR', 'WILFRED', 'PIGEONOTE', 'WHO', 'HAD', 'SUCCUMBED', 'THE', 'DAY', 'AFTER', 'EFFECTS', 'OF', 'APOLLO', 'ACCIDENT'] +7105-2340-0005-2277: ref=['A', 'WILFRID', 'PIGEONCOTE', 'HAD', 'COVERED', 'HIMSELF', 'WITH', 'HONOURS', 'IN', 'THE', 'COURSE', 'OF', "MARLBOROUGH'S", 'CAMPAIGNS', 'AND', 'THE', 'NAME', 'WILFRID', 'HAD', 'BEEN', 'A', 'BAPTISMAL', 'WEAKNESS', 'IN', 'THE', 'FAMILY', 'EVER', 'SINCE', 'THE', 'NEW', 'HEIR', 'TO', 'THE', 'FAMILY', 'DIGNITY', 'AND', 'ESTATES', 'WAS', 'A', 'YOUNG', 'MAN', 'OF', 'ABOUT', 'FIVE', 'AND', 'TWENTY', 'WHO', 'WAS', 'KNOWN', 'MORE', 'BY', 'REPUTATION', 'THAN', 'BY', 'PERSON', 'TO', 'A', 'WIDE', 'CIRCLE', 'OF', 'COUSINS', 'AND', 'KINSFOLK'] +7105-2340-0005-2277: hyp=['OF', 'WILFRED', 'BEECH', 'AND', 'COURT', 'HAD', 'COVERED', 'HIMSELF', 'WITH', 'HONORS', 'IN', 'THE', 'COURSE', 'OF', "MARLBOROUGH'S", 'CAMPAIGNS', 'AND', 'THE', 'NAME', 'LOYAL', 'FRED', 'HAD', 'BEEN', 'ABOVE', 'THE', 'SMALL', 'WEAKNESS', 'IN', 'THE', 'FAMILY', 'EVER', 'SINCE', 'THE', 'NEW', 'HEIR', 'TO', 'THE', 'FAMILY', 'DIGNITY', 'AND', 'ESTATES', 'WAS', 'A', 'YOUNG', 'MAN', 'OF', 'ABOUT', 'FIVE', 'AND', 'TWENTY', 'WHO', 'WAS', 'KNOWN', 'MORE', 'BY', 'REPETITION', 'THAN', 'BY', 'PERSON', 'TO', 'AVIDE', 'CIRCLE', 'OF', 'COUSINS', 'AND', 'KINSFOLK'] +7105-2340-0006-2278: ref=['AND', 'THE', 'REPUTATION', 'WAS', 'AN', 'UNPLEASANT', 'ONE'] +7105-2340-0006-2278: hyp=['AND', 'THE', 'REPUTATION', 'WAS', 'AN', 'UNPLEASANT', 'ONE'] +7105-2340-0007-2279: ref=['FROM', 'HIS', 'LATE', 'SCHOOLDAYS', 'ONWARD', 'HE', 'HAD', 'BEEN', 'POSSESSED', 'BY', 'AN', 'ACUTE', 'AND', 'OBSTINATE', 'FORM', 'OF', 'KLEPTOMANIA', 'HE', 'HAD', 'THE', 'ACQUISITIVE', 'INSTINCT', 'OF', 'THE', 'COLLECTOR', 'WITHOUT', 'ANY', 'OF', 'THE', "COLLECTOR'S", 'DISCRIMINATION'] +7105-2340-0007-2279: hyp=['FROM', 'HIS', 'LATE', 'SCHOOL', 'DAYS', 'ONWARD', 'HE', 'HAD', 'BEEN', 'POSSESSED', 'BY', 'AN', 'ACUTE', 'AND', 'OBSTINATE', 'FORM', 'OF', 'CLEFTOMANIA', 'HE', 'HAD', 'THE', 'ACQUISITIVE', 'INSTINCT', 'OF', 'THE', 'COLLECTOR', 'WITHOUT', 'ANY', 'OF', 'THE', "COLLECTOR'S", 'DISCRIMINATION'] +7105-2340-0008-2280: ref=['THE', 'SEARCH', 'USUALLY', 'PRODUCED', 'A', 'LARGE', 'AND', 'VARIED', 'YIELD', 'THIS', 'IS', 'FUNNY', 'SAID', 'PETER', 'PIGEONCOTE', 'TO', 'HIS', 'WIFE', 'SOME', 'HALF', 'HOUR', 'AFTER', 'THEIR', 'CONVERSATION', "HERE'S", 'A', 'TELEGRAM', 'FROM', 'WILFRID', 'SAYING', "HE'S", 'PASSING', 'THROUGH', 'HERE', 'IN', 'HIS', 'MOTOR', 'AND', 'WOULD', 'LIKE', 'TO', 'STOP', 'AND', 'PAY', 'US', 'HIS', 'RESPECTS'] +7105-2340-0008-2280: hyp=['THIS', 'SEARCH', 'USUALLY', 'PRODUCE', 'A', 'LARGE', 'AND', 'VARIED', 'YIELD', 'THIS', 'IS', 'FUNNY', 'SAID', 'PETER', 'PIGEON', 'BOLTO', 'HIS', 'WIFE', 'THEM', 'HALF', 'HOUR', 'AFTER', 'THEIR', 'CONVERSATION', 'HERE', 'IS', 'A', 'TELEGRAM', 'FROM', 'MILFRED', 'SAYING', "HE'S", 'PASSING', 'THROUGH', 'HERE', 'IN', 'HIS', 'MOTOR', 'AND', 'WOULD', 'LIKE', 'TO', 'STOP', 'AND', 'PAY', 'US', 'HIS', 'RESPECTS'] +7105-2340-0009-2281: ref=['SIGNED', 'WILFRID', 'PIGEONCOTE'] +7105-2340-0009-2281: hyp=['SIGN', 'WILFRED', 'PEACH', 'AND', 'CO'] +7105-2340-0010-2282: ref=['I', 'SUPPOSE', "HE'S", 'BRINGING', 'US', 'A', 'PRESENT', 'FOR', 'THE', 'SILVER', 'WEDDING', 'GOOD', 'GRACIOUS'] +7105-2340-0010-2282: hyp=['I', 'SUPPOSE', 'THIS', 'BRINGING', 'US', 'A', 'PRESENT', 'FOR', 'THE', 'SILVER', 'WEDDING', 'GOOD', 'GRACIOUS'] +7105-2340-0011-2283: ref=['THE', 'TALK', 'FLITTED', 'NERVOUSLY', 'AND', 'HURRIEDLY', 'FROM', 'ONE', 'IMPERSONAL', 'TOPIC', 'TO', 'ANOTHER'] +7105-2340-0011-2283: hyp=['THE', 'TALK', 'FLITTED', 'NERVOUSLY', 'AND', 'HURRIEDLY', 'FROM', 'ONE', 'IMPERSONAL', 'TOPIC', 'TO', 'ANOTHER'] +7105-2340-0012-2284: ref=['IN', 'THE', 'DRAWING', 'ROOM', 'AFTER', 'DINNER', 'THEIR', 'NERVOUSNESS', 'AND', 'AWKWARDNESS', 'INCREASED'] +7105-2340-0012-2284: hyp=['IN', 'THE', 'DRAWING', 'ROOM', 'AFTER', 'DINNER', 'THEIR', 'NERVOUSNESS', 'AND', 'AWKWARDNESS', 'INCREASED'] +7105-2340-0013-2285: ref=['OH', 'WE', "HAVEN'T", 'SHOWN', 'YOU', 'THE', 'SILVER', 'WEDDING', 'PRESENTS', 'SAID', 'MISSUS', 'PETER', 'SUDDENLY', 'AS', 'THOUGH', 'STRUCK', 'BY', 'A', 'BRILLIANT', 'IDEA', 'FOR', 'ENTERTAINING', 'THE', 'GUEST', 'HERE', 'THEY', 'ALL', 'ARE'] +7105-2340-0013-2285: hyp=['OH', 'WE', "HAVEN'T", 'SHOWN', 'YOU', 'THE', 'SILVER', 'WEDDING', 'PRESENTS', 'SAID', 'MISSUS', 'PETER', 'SUDDENLY', 'AS', 'THOUGH', 'STRUCK', 'BY', 'A', 'BRILLIANT', 'IDEA', 'FOR', 'ENTERTAINING', 'THE', 'GUEST', 'HERE', 'THEY', 'ALL', 'ARE'] +7105-2340-0014-2286: ref=['SUCH', 'NICE', 'USEFUL', 'GIFTS', 'A', 'FEW', 'DUPLICATES', 'OF', 'COURSE'] +7105-2340-0014-2286: hyp=['SUCH', 'NICE', 'FORGIFTS', 'A', 'FEW', 'DEPLICATES', 'OF', 'COURSE'] +7105-2340-0015-2287: ref=['SEVEN', 'CREAM', 'JUGS', 'PUT', 'IN', 'PETER'] +7105-2340-0015-2287: hyp=['SEVEN', 'QUEEN', 'JUGS', 'PUT', 'IN', 'PETER'] +7105-2340-0016-2288: ref=['WE', 'FEEL', 'THAT', 'WE', 'MUST', 'LIVE', 'ON', 'CREAM', 'FOR', 'THE', 'REST', 'OF', 'OUR', 'LIVES'] +7105-2340-0016-2288: hyp=['WE', 'FEEL', 'THAT', 'WE', 'MUST', 'LIVE', 'UNCREAM', 'FOR', 'THE', 'REST', 'OF', 'OUR', 'LIVES'] +7105-2340-0017-2289: ref=['OF', 'COURSE', 'SOME', 'OF', 'THEM', 'CAN', 'BE', 'CHANGED'] +7105-2340-0017-2289: hyp=['OF', 'COURSE', 'SOME', 'OF', 'THEM', 'CAN', 'BE', 'CHANGED'] +7105-2340-0018-2290: ref=['I', 'PUT', 'IT', 'DOWN', 'BY', 'THE', 'CLARET', 'JUG', 'SAID', 'WILFRID', 'BUSY', 'WITH', 'ANOTHER', 'OBJECT'] +7105-2340-0018-2290: hyp=['I', 'PUT', 'IT', 'DOWN', 'BY', 'THE', 'CLARGA', 'SAID', 'WILFRIED', 'BUSY', 'WITH', 'ANOTHER', 'OBJECT'] +7105-2340-0019-2291: ref=['VIGILANCE', 'WAS', 'NOT', 'COMPLETELY', 'CROWNED', 'WITH', 'A', 'SENSE', 'OF', 'VICTORY'] +7105-2340-0019-2291: hyp=['EACH', 'A', 'LENS', 'WAS', 'NOT', 'COMPLETELY', 'CROWNED', 'WITH', 'A', 'SENSE', 'OF', 'VICTORY'] +7105-2340-0020-2292: ref=['AFTER', 'THEY', 'HAD', 'SAID', 'GOOD', 'NIGHT', 'TO', 'THEIR', 'VISITOR', 'MISSUS', 'PETER', 'EXPRESSED', 'HER', 'CONVICTION', 'THAT', 'HE', 'HAD', 'TAKEN', 'SOMETHING'] +7105-2340-0020-2292: hyp=['AFTER', 'THEY', 'HAD', 'SAID', 'GOOD', 'NIGHT', 'TO', 'THEIR', 'VISITOR', 'MISSUS', 'PETER', 'EXPRESSED', 'HER', 'CONVICTION', 'THAT', 'HE', 'HAD', 'TAKEN', 'SOMETHING'] +7105-2340-0021-2293: ref=['HOW', 'ON', 'EARTH', 'ARE', 'WE', 'TO', 'KNOW', 'SAID', 'PETER', 'THE', 'MEAN', 'PIG', "HASN'T", 'BROUGHT', 'US', 'A', 'PRESENT', 'AND', "I'M", 'HANGED', 'IF', 'HE', 'SHALL', 'CARRY', 'ONE', 'OFF'] +7105-2340-0021-2293: hyp=['HOW', 'ON', 'EARTH', 'ARE', 'WE', 'TO', 'KNOW', 'SAID', 'PETER', 'THE', 'MEAN', 'PIG', "HASN'T", 'BROUGHT', 'US', 'A', 'PRESENT', 'AND', "I'M", 'HANGED', 'IF', 'HE', 'SHALL', 'CARRY', 'ONE', 'OFF'] +7105-2340-0022-2294: ref=["IT'S", 'THE', 'ONLY', 'THING', 'TO', 'DO'] +7105-2340-0022-2294: hyp=['IS', 'THE', 'ONLY', 'THING', 'TO', 'DO'] +7105-2340-0023-2295: ref=['WILFRID', 'WAS', 'LATE', 'IN', 'COMING', 'DOWN', 'TO', 'BREAKFAST', 'AND', 'HIS', 'MANNER', 'SHOWED', 'PLAINLY', 'THAT', 'SOMETHING', 'WAS', 'AMISS'] +7105-2340-0023-2295: hyp=['WILFRED', 'WAS', 'LATE', 'IN', 'COMING', 'DOWN', 'TO', 'BREAKFAST', 'AND', 'HIS', 'MANNER', 'SHOWED', 'PLAINLY', 'THAT', 'SOMETHING', 'WAS', 'AMISS'] +7105-2340-0024-2296: ref=["IT'S", 'AN', 'UNPLEASANT', 'THING', 'TO', 'HAVE', 'TO', 'SAY', 'HE', 'BLURTED', 'OUT', 'PRESENTLY', 'BUT', "I'M", 'AFRAID', 'YOU', 'MUST', 'HAVE', 'A', 'THIEF', 'AMONG', 'YOUR', 'SERVANTS', "SOMETHING'S", 'BEEN', 'TAKEN', 'OUT', 'OF', 'MY', 'PORTMANTEAU'] +7105-2340-0024-2296: hyp=["IT'S", 'AND', 'AN', 'UNPLEASANT', 'THING', 'TO', 'HAVE', 'TO', 'SAY', 'HE', 'BLURTED', 'OUT', 'PRESENTLY', 'BUT', "I'M", 'AFRAID', 'YOU', 'MUST', 'HAVE', 'A', 'THIEF', 'AMONG', 'YOUR', 'SERVANTS', "SOMETHING'S", 'BEEN', 'TAKEN', 'OUT', 'OF', 'MY', 'PARTNENT', 'TOE'] +7105-2340-0025-2297: ref=['IT', 'WAS', 'A', 'LITTLE', 'PRESENT', 'FROM', 'MY', 'MOTHER', 'AND', 'MYSELF', 'FOR', 'YOUR', 'SILVER', 'WEDDING'] +7105-2340-0025-2297: hyp=['IT', 'WAS', 'A', 'LITTLE', 'PRESENT', 'FROM', 'MY', 'MOTHER', 'AND', 'MYSELF', 'FOR', 'YOUR', 'SILVER', 'WEDDING'] +7105-2340-0026-2298: ref=['I', 'SHOULD', 'HAVE', 'GIVEN', 'IT', 'TO', 'YOU', 'LAST', 'NIGHT', 'AFTER', 'DINNER', 'ONLY', 'IT', 'HAPPENED', 'TO', 'BE', 'A', 'CREAM', 'JUG', 'AND', 'YOU', 'SEEMED', 'ANNOYED', 'AT', 'HAVING', 'SO', 'MANY', 'DUPLICATES', 'SO', 'I', 'FELT', 'RATHER', 'AWKWARD', 'ABOUT', 'GIVING', 'YOU', 'ANOTHER'] +7105-2340-0026-2298: hyp=['I', 'SHOULD', 'HAVE', 'GIVEN', 'IT', 'TO', 'YOU', 'LAST', 'NIGHT', 'AFTER', 'DINNER', 'ONLY', 'IT', 'HAPPENED', 'TO', 'BE', 'A', 'QUEEN', 'JUG', 'AND', 'YOU', 'SEEMED', 'ANNOYED', 'AT', 'HAVING', 'SO', 'MANY', 'DUPLICATES', 'SO', 'I', 'FELT', 'RATHER', 'AWKWARD', 'OF', 'A', 'GIVING', 'YOU', 'ANOTHER'] +7105-2340-0027-2299: ref=['THE', 'SNATCHER', 'HAD', 'BEEN', 'AN', 'ORPHAN', 'THESE', 'MANY', 'YEARS'] +7105-2340-0027-2299: hyp=['THIS', 'SNATCHER', 'HAD', 'BEEN', 'AN', 'ORPHAN', 'THIS', 'MANY', 'YEARS'] +7105-2340-0028-2300: ref=['LADY', 'ERNESTINE', 'PIGEONCOTE', 'HIS', 'MOTHER', 'MOVED', 'IN', 'CIRCLES', 'WHICH', 'WERE', 'ENTIRELY', 'BEYOND', 'THEIR', 'COMPASS', 'OR', 'AMBITIONS', 'AND', 'THE', 'SON', 'WOULD', 'PROBABLY', 'ONE', 'DAY', 'BE', 'AN', 'AMBASSADOR'] +7105-2340-0028-2300: hyp=['LAY', 'THE', 'ERNESTON', 'BEECH', 'AND', 'COLT', 'HIS', 'MOTHER', 'MOVED', 'IN', 'CIRCLES', 'WHICH', 'WERE', 'ENTIRELY', 'BEYOND', 'THEIR', 'COMPASS', 'OR', 'AMBITIONS', 'AND', 'THE', 'SUN', 'WOULD', 'PROBABLY', 'ONE', 'DAY', 'BE', 'AN', 'AMBASSADOR'] +7105-2340-0029-2301: ref=['HUSBAND', 'AND', 'WIFE', 'LOOKED', 'BLANKLY', 'AND', 'DESPERATELY', 'AT', 'ONE', 'ANOTHER'] +7105-2340-0029-2301: hyp=['HUSBAND', 'AND', 'WIFE', 'LOOKED', 'BLANKLY', 'AND', 'DESPERATELY', 'AT', 'ONE', 'ANOTHER'] +7105-2340-0030-2302: ref=['IT', 'WAS', 'MISSUS', 'PETER', 'WHO', 'ARRIVED', 'FIRST', 'AT', 'AN', 'INSPIRATION', 'HOW', 'DREADFUL', 'TO', 'THINK', 'THERE', 'ARE', 'THIEVES', 'IN', 'THE', 'HOUSE', 'WE', 'KEEP', 'THE', 'DRAWING', 'ROOM', 'LOCKED', 'UP', 'AT', 'NIGHT', 'OF', 'COURSE', 'BUT', 'ANYTHING', 'MIGHT', 'BE', 'CARRIED', 'OFF', 'WHILE', 'WE', 'ARE', 'AT', 'BREAKFAST'] +7105-2340-0030-2302: hyp=['IT', 'WAS', 'MISSUS', 'PETER', 'WHO', 'ARRIVED', 'FIRST', 'AT', 'AN', 'INSPIRATION', 'HOW', 'DREADFUL', 'TO', 'THINK', 'THERE', 'ARE', 'THIEVES', 'IN', 'THE', 'HOUSE', 'WE', 'KEEP', 'THE', 'DRAWING', 'ROOM', 'LOCKED', 'UP', 'AT', 'NIGHT', 'OF', 'COURSE', 'BUT', 'ANYTHING', 'MIGHT', 'BE', 'CARRIED', 'OFF', 'WHILE', 'WE', 'ARE', 'AT', 'BREAKFAST'] +7105-2340-0031-2303: ref=['SHE', 'ROSE', 'AND', 'WENT', 'OUT', 'HURRIEDLY', 'AS', 'THOUGH', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'THE', 'DRAWING', 'ROOM', 'WAS', 'NOT', 'BEING', 'STRIPPED', 'OF', 'ITS', 'SILVERWARE', 'AND', 'RETURNED', 'A', 'MOMENT', 'LATER', 'BEARING', 'A', 'CREAM', 'JUG', 'IN', 'HER', 'HANDS'] +7105-2340-0031-2303: hyp=['SHE', 'ROSE', 'AND', 'WENT', 'OUT', 'HURRIEDLY', 'AS', 'THOUGH', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'THE', 'DRAWING', 'ROOM', 'WAS', 'NOT', 'BEING', 'STRIPPED', 'OF', 'ITS', 'SILVERWARE', 'AND', 'RETURNED', 'A', 'MOMENT', 'LATER', 'BEARING', 'A', 'CREAM', 'CHUG', 'IN', 'HER', 'HANDS'] +7105-2340-0032-2304: ref=['THE', 'PIGEONCOTES', 'HAD', 'TURNED', 'PALER', 'THAN', 'EVER', 'MISSUS', 'PETER', 'HAD', 'A', 'FINAL', 'INSPIRATION'] +7105-2340-0032-2304: hyp=['THE', 'PIGEON', 'CORDS', 'HAD', 'TURNED', 'PALER', 'THAN', 'EVER', 'MISSUS', 'PETER', 'HAD', 'A', 'FINAL', 'INSPIRATION'] +7105-2340-0033-2305: ref=['PETER', 'DASHED', 'OUT', 'OF', 'THE', 'ROOM', 'WITH', 'GLAD', 'RELIEF', 'HE', 'HAD', 'LIVED', 'SO', 'LONG', 'DURING', 'THE', 'LAST', 'FEW', 'MINUTES', 'THAT', 'A', 'GOLDEN', 'WEDDING', 'SEEMED', 'WITHIN', 'MEASURABLE', 'DISTANCE'] +7105-2340-0033-2305: hyp=['EITHER', 'DASHED', 'OUT', 'OF', 'THE', 'ROOM', 'WITH', 'GLAD', 'RELIEF', 'HE', 'HAD', 'LIVED', 'SO', 'LONG', 'DURING', 'THE', 'LAST', 'FEW', 'MINUTES', 'THAT', 'A', 'GOLDEN', 'WEDDING', 'SEEMED', 'WITHIN', 'MEASURABLE', 'DISTANCE'] +7105-2340-0034-2306: ref=['MISSUS', 'PETER', 'TURNED', 'TO', 'HER', 'GUEST', 'WITH', 'CONFIDENTIAL', 'COYNESS'] +7105-2340-0034-2306: hyp=['MISSUS', 'BEATER', 'TURNED', 'TO', 'HER', 'GUEST', 'WITH', 'CONFIDENTIAL', 'KINDNESS'] +7105-2340-0035-2307: ref=["PETER'S", 'LITTLE', 'WEAKNESS', 'IT', 'RUNS', 'IN', 'THE', 'FAMILY', 'GOOD', 'LORD'] +7105-2340-0035-2307: hyp=['PETER', 'IS', 'LITTLE', 'WEAKNESS', 'EACH', 'ONES', 'IN', 'THE', 'FAMILY', 'GOOD', 'LORD'] +7105-2340-0036-2308: ref=['DO', 'YOU', 'MEAN', 'TO', 'SAY', "HE'S", 'A', 'KLEPTOMANIAC', 'LIKE', 'COUSIN', 'SNATCHER'] +7105-2340-0036-2308: hyp=['DO', 'YOU', 'MEAN', 'TO', 'SAY', "HE'S", 'A', 'CLAPTOMANIA', 'LIKE', 'COUSIN', 'SNATCHER'] +7105-2340-0037-2309: ref=['BRAVE', 'LITTLE', 'WOMAN', 'SAID', 'PETER', 'WITH', 'A', 'GASP', 'OF', 'RELIEF', 'I', 'COULD', 'NEVER', 'HAVE', 'DONE', 'IT'] +7105-2340-0037-2309: hyp=['PRETTY', 'LITTLE', 'WOMAN', 'SAID', 'PETER', 'WITH', 'A', 'GASP', 'OF', 'RELIEF', 'I', 'COULD', 'NEVER', 'HAVE', 'DONE', 'IT'] +7902-96591-0000-0: ref=['I', 'AM', 'FROM', 'THE', 'CUTTER', 'LYING', 'OFF', 'THE', 'COAST'] +7902-96591-0000-0: hyp=['AND', 'FROM', 'THE', 'CUTTER', 'LYING', 'OFF', 'THE', 'COAST'] +7902-96591-0001-1: ref=["DON'T", 'CRY', 'HE', 'SAID', 'I', 'WAS', 'OBLIGED', 'TO', 'COME'] +7902-96591-0001-1: hyp=["DON'T", 'CRY', 'HE', 'SAID', 'I', 'WAS', 'OBLIGED', 'TO', 'COME'] +7902-96591-0002-2: ref=['AND', 'AND', 'YOU', 'HAVE', 'NOT', 'FOUND', 'OUT', 'ANYTHING', 'CAME', 'IN', 'QUICK', 'FRIGHTENED', 'TONES'] +7902-96591-0002-2: hyp=['AND', 'AND', 'YOU', 'HAVE', 'NOT', 'FOUND', 'OUT', 'ANYTHING', 'CAME', 'IN', 'QUICK', 'FRIGHTENED', 'TONES'] +7902-96591-0003-3: ref=['I', 'WISH', 'YOU', 'WOULD', 'BELIEVE', 'ME', 'THAT', 'I', 'AM', 'IN', 'AS', 'GREAT', 'TROUBLE', 'ABOUT', 'IT', 'AS', 'YOU', 'ARE'] +7902-96591-0003-3: hyp=['I', 'WISH', 'YOU', 'WOULD', 'BELIEVE', 'ME', 'THAT', 'I', 'AM', 'IN', 'AS', 'GREAT', 'TROUBLE', 'ABOUT', 'IT', 'AS', 'YOU', 'ARE'] +7902-96591-0004-4: ref=['THAT', 'MY', 'FATHER', 'SIR', 'RISDON', 'GRAEME', 'HAS', 'SMUGGLED', 'GOODS', 'HERE'] +7902-96591-0004-4: hyp=['THAT', 'MY', 'FATHER', 'SIR', 'RISDON', 'GRAHAME', 'SMUGGLED', 'GOODS', 'HERE'] +7902-96591-0005-5: ref=['HE', 'COULD', 'NOT', 'HELP', 'IT', 'HE', 'HATES', 'THE', 'SMUGGLERS', 'YOU', 'SHALL', 'NOT', 'TELL'] +7902-96591-0005-5: hyp=['HE', 'COULD', 'NOT', 'HELP', 'IT', 'HE', 'HATES', 'THE', 'SMUGGLERS', 'YOU', 'SHALL', 'NOT', 'TELL'] +7902-96591-0006-6: ref=['PRAY', 'PRAY', 'SAY', 'YOU', 'WILL', 'NOT', 'ARCHY', 'WAS', 'SILENT'] +7902-96591-0006-6: hyp=['PRAY', 'PRAY', 'SAY', 'YOU', 'WILL', 'NOT', 'ARCHIE', 'WAS', 'SILENT'] +7902-96591-0007-7: ref=['THEN', 'AS', 'ARCHY', 'STOOD', 'IN', 'THE', 'DARK', 'LITERALLY', 'AGHAST', 'WITH', 'ASTONISHMENT', 'HE', 'HEARD', 'THE', 'FAINT', 'RUSTLING', 'ONCE', 'MORE', 'AND', 'AGAIN', 'ALL', 'WAS', 'SILENT'] +7902-96591-0007-7: hyp=['THEN', 'AS', 'ARCHIE', 'STOOD', 'IN', 'THE', 'DARK', 'LITERALLY', 'AGHAST', 'WITH', 'ASTONISHMENT', 'HE', 'HEARD', 'THE', 'FAINT', 'RUSTLING', 'ONCE', 'MORE', 'AND', 'AGAIN', 'ALL', 'WAS', 'SILENT'] +7902-96591-0008-8: ref=['HE', 'LAUGHED', 'BUT', 'IT', 'WAS', 'A', 'CURIOUS', 'KIND', 'OF', 'LAUGH', 'FULL', 'OF', 'VEXATION', 'INJURED', 'AMOUR', 'PROPRE', 'AS', 'THE', 'FRENCH', 'CALL', 'OUR', 'LOVE', 'OF', 'OUR', 'OWN', 'DIGNITY', 'OF', 'WHICH', 'ARCHIBALD', 'RAYSTOKE', 'IN', 'THE', 'FULL', 'FLUSH', 'OF', 'HIS', 'YOUNG', 'BELIEF', 'IN', 'HIS', 'IMPORTANCE', 'AS', 'A', 'BRITISH', 'OFFICER', 'HAD', 'A', 'PRETTY', 'GOOD', 'STOCK'] +7902-96591-0008-8: hyp=['HE', 'LAUGHED', 'BUT', 'IT', 'WAS', 'A', 'CURIOUS', 'KIND', 'OF', 'LAUGH', 'FULL', 'OF', 'VEXATION', 'INJURED', 'AMORE', 'A', 'PROPER', 'AS', 'THE', 'FRENCH', 'CALL', 'OUR', 'LOVE', 'OF', 'OUR', 'OWN', 'DIGNITY', 'OF', 'WHICH', 'ARQUEBAUL', 'RAY', 'STROKE', 'IN', 'THE', 'FULL', 'FLUSH', 'OF', 'HIS', 'YOUNG', 'BELIEF', 'IN', 'HIS', 'IMPORTANCE', 'AS', 'A', 'BRITISH', 'OFFICER', 'HAD', 'A', 'PRETTY', 'GOOD', 'STOCK'] +7902-96591-0009-9: ref=['IT', 'ALL', 'COMES', 'OF', 'DRESSING', 'UP', 'IN', 'THIS', 'STUPID', 'WAY', 'LIKE', 'A', 'ROUGH', 'FISHER', 'LAD'] +7902-96591-0009-9: hyp=['AND', 'ALL', 'COMES', 'OF', 'DRESSING', 'UP', 'IN', 'THIS', 'STUPID', 'WAY', 'LIKE', 'A', 'ROUGH', 'FISHER', 'LAD'] +7902-96591-0010-10: ref=['COLD', 'WATER', 'CAME', 'ON', 'THIS', 'IDEA', 'DIRECTLY', 'AS', 'HE', 'RECALLED', 'THE', 'FACT', 'THAT', 'THE', 'DARKNESS', 'WAS', 'INTENSE', 'AND', 'CELIA', 'COULD', 'NOT', 'HAVE', 'SEEN', 'HIM'] +7902-96591-0010-10: hyp=['COLD', 'WATER', 'CAME', 'ON', 'THIS', 'IDEA', 'DIRECTLY', 'AS', 'HE', 'RECALLED', 'THE', 'FACT', 'THAT', 'THE', 'DARKNESS', 'WAS', 'INTENSE', 'AND', 'CELIA', 'COULD', 'NOT', 'HAVE', 'SEEN', 'HIM'] +7902-96591-0011-11: ref=["I'LL", 'SOON', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'NOT', 'GOING', 'TO', 'BE', 'PLAYED', 'WITH'] +7902-96591-0011-11: hyp=["I'LL", 'SOON', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'NOT', 'GOING', 'TO', 'BE', 'PLAYED', 'WITH'] +7902-96591-0012-12: ref=['FOR', 'IT', 'SUDDENLY', 'OCCURRED', 'TO', 'HIM', 'THAT', 'HE', 'WAS', 'NOT', 'ONLY', 'A', 'PRISONER', 'BUT', 'A', 'PRISONER', 'IN', 'THE', 'POWER', 'OF', 'A', 'VERY', 'RECKLESS', 'SET', 'OF', 'PEOPLE', 'WHO', 'WOULD', 'STOP', 'AT', 'NOTHING'] +7902-96591-0012-12: hyp=['FOR', 'IT', 'SUDDENLY', 'OCCURRED', 'TO', 'HIM', 'THAT', 'HE', 'WAS', 'NOT', 'ONLY', 'A', 'PRISONER', 'BUT', 'A', 'PRISONER', 'IN', 'THE', 'POWER', 'OF', 'A', 'VERY', 'RECKLESS', 'SET', 'OF', 'PEOPLE', 'WHO', 'WOULD', 'STOP', 'AT', 'NOTHING'] +7902-96591-0013-13: ref=['NO', 'HE', 'THOUGHT', 'TO', 'HIMSELF', 'I', "DON'T", 'BELIEVE', 'THEY', 'WOULD', 'KILL', 'ME', 'BUT', 'THEY', 'WOULD', 'KNOCK', 'ME', 'ABOUT'] +7902-96591-0013-13: hyp=['NO', 'HE', 'THOUGHT', 'TO', 'HIMSELF', 'I', "DON'T", 'BELIEVE', 'THEY', 'WOULD', 'KILL', 'ME', 'BUT', 'THEY', 'WOULD', 'KNOCK', 'ME', 'ABOUT'] +7902-96591-0014-14: ref=['THE', 'KICK', 'HE', 'HAD', 'RECEIVED', 'WAS', 'A', 'FORETASTE', 'OF', 'WHAT', 'HE', 'MIGHT', 'EXPECT', 'AND', 'AFTER', 'A', 'LITTLE', 'CONSIDERATION', 'HE', 'CAME', 'TO', 'THE', 'CONCLUSION', 'THAT', 'HIS', 'DUTY', 'WAS', 'TO', 'ESCAPE', 'AND', 'GET', 'BACK', 'TO', 'THE', 'CUTTER', 'AS', 'QUICKLY', 'AS', 'HE', 'COULD'] +7902-96591-0014-14: hyp=['THE', 'KICKIE', 'HAD', 'RECEIVED', 'WAS', 'A', 'FORETASTE', 'OF', 'WHAT', 'HE', 'MIGHT', 'EXPECT', 'AND', 'AFTER', 'A', 'LITTLE', 'CONSIDERATION', 'HE', 'CAME', 'TO', 'THE', 'CONCLUSION', 'THAT', 'HIS', 'DUTY', 'WAS', 'TO', 'ESCAPE', 'AND', 'GET', 'BACK', 'TO', 'THE', 'CUTTER', 'AS', 'QUICKLY', 'AS', 'HE', 'COULD'] +7902-96591-0015-15: ref=['TO', 'DO', 'THIS', 'HE', 'MUST', 'SCHEME', 'LIE', 'HID', 'TILL', 'MORNING', 'THEN', 'MAKE', 'FOR', 'THE', 'NEAREST', 'POINT', 'AND', 'SIGNAL', 'FOR', 'HELP', 'UNLESS', 'A', "BOAT'S", 'CREW', 'WERE', 'ALREADY', 'SEARCHING', 'FOR', 'HIM', 'HOW', 'TO', 'ESCAPE'] +7902-96591-0015-15: hyp=['TO', 'DO', 'THIS', 'HE', 'MUST', 'SCHEME', 'LIE', 'HID', 'TILL', 'MORNING', 'THAN', 'MAKE', 'FOR', 'THE', 'NEAREST', 'POINT', 'AND', 'SIGNAL', 'FOR', 'HELP', 'UNLESS', 'A', "BOAT'S", 'CREW', 'WERE', 'ALREADY', 'SEARCHING', 'FOR', 'HIM', 'HOW', 'TO', 'ESCAPE'] +7902-96591-0016-16: ref=['THE', 'WINDOW', 'WAS', 'BARRED', 'BUT', 'HE', 'WENT', 'TO', 'IT', 'AND', 'TRIED', 'THE', 'BARS', 'ONE', 'BY', 'ONE', 'TO', 'FIND', 'THEM', 'ALL', 'SOLIDLY', 'FITTED', 'INTO', 'THE', 'STONE', 'SILL'] +7902-96591-0016-16: hyp=['THE', 'WINDOW', 'WAS', 'BARRED', 'BUT', 'HE', 'WENT', 'TO', 'IT', 'AND', 'TRIED', 'THE', 'BARS', 'ONE', 'BY', 'ONE', 'TO', 'FIND', 'THEM', 'ALL', 'SOLIDLY', 'FITTED', 'INTO', 'THE', 'STONE', 'SILL'] +7902-96591-0017-17: ref=['NEXT', 'MOMENT', 'AS', 'HE', 'FELT', 'HIS', 'WAY', 'ABOUT', 'HIS', 'HAND', 'TOUCHED', 'AN', 'OLD', 'FASHIONED', 'MARBLE', 'MANTELPIECE', 'FIREPLACE', 'CHIMNEY'] +7902-96591-0017-17: hyp=['NEXT', 'MOMENT', 'AS', 'HE', 'FELT', 'HIS', 'WAY', 'ABOUT', 'HIS', 'HAND', 'TOUCHED', 'AN', 'OLD', 'FASHIONED', 'MARBLE', 'MANTELPIECE', 'FIREPLACE', 'CHIMNEY'] +7902-96591-0018-18: ref=['YES', 'IF', 'OTHER', 'WAYS', 'FAILED', 'HE', 'COULD', 'ESCAPE', 'UP', 'THE', 'CHIMNEY'] +7902-96591-0018-18: hyp=['YES', 'IF', 'OTHER', 'WAYS', 'FAILED', 'HE', 'COULD', 'ESCAPE', 'UP', 'THE', 'CHIMNEY'] +7902-96591-0019-19: ref=['NO', 'THAT', 'WAS', 'TOO', 'BAD', 'HE', 'COULD', 'NOT', 'DO', 'THAT'] +7902-96591-0019-19: hyp=['NO', 'THAT', 'WAS', 'TOO', 'BAD', 'HE', 'CANNOT', 'DO', 'THAT'] +7902-96591-0020-20: ref=['SYMPATHY', 'AND', 'PITY', 'FOR', 'THE', 'DWELLERS', 'IN', 'THE', 'HOZE', 'WERE', 'COMPLETELY', 'GONE', 'NOW', 'AND', 'HE', 'SET', 'HIS', 'TEETH', 'FAST', 'AND', 'MENTALLY', 'CALLED', 'HIMSELF', 'A', 'WEAK', 'IDIOT', 'FOR', 'EVER', 'THINKING', 'ABOUT', 'SUCH', 'PEOPLE'] +7902-96591-0020-20: hyp=['SYMPATHY', 'AND', 'PITY', 'FOR', 'THE', 'DWELLERS', 'IN', 'THE', 'HOSE', 'WERE', 'COMPLETELY', 'GONE', 'NOW', 'AND', 'HE', 'SET', 'HIS', 'TEETH', 'FAST', 'AND', 'MENTALLY', 'CALLED', 'HIMSELF', 'A', 'WEAK', 'IDIOT', 'FOR', 'EVER', 'THINKING', 'ABOUT', 'SUCH', 'PEOPLE'] +7902-96591-0021-21: ref=['A', 'NARROW', 'TABLE', 'AGAINST', 'THE', 'WALL', 'IN', 'TWO', 'PLACES'] +7902-96591-0021-21: hyp=['A', 'NARROW', 'TABLE', 'AGAINST', 'THE', 'WALL', 'IN', 'TWO', 'PLACES'] +7902-96591-0022-22: ref=['HE', 'WENT', 'AND', 'TRIED', 'TO', 'FORCE', 'HIS', 'HEAD', 'THROUGH', 'RECALLING', 'AS', 'HE', 'DID', 'THAT', 'WHERE', 'A', "PERSON'S", 'HEAD', 'WOULD', 'GO', 'THE', 'REST', 'OF', 'THE', 'BODY', 'WOULD', 'PASS'] +7902-96591-0022-22: hyp=['HE', 'WENT', 'AND', 'TRIED', 'TO', 'FORCE', 'HIS', 'HEAD', 'THROUGH', 'RECALLING', 'AS', 'HE', 'DID', 'THAT', 'WHERE', 'A', "PERSON'S", 'HEAD', 'WOULD', 'GO', 'THE', 'REST', 'OF', 'THE', 'BODY', 'WOULD', 'PASS'] +7902-96591-0023-23: ref=['BUT', 'THERE', 'WAS', 'NO', 'CHANCE', 'FOR', 'HIS', 'BODY', 'THERE', 'THE', 'HEAD', 'WOULD', 'NOT', 'GO', 'FIRST'] +7902-96591-0023-23: hyp=['BUT', 'THERE', 'WAS', 'NO', 'CHANCE', 'FOR', 'HIS', 'BODY', 'THERE', 'THE', 'HEAD', 'WOULD', 'NOT', 'GO', 'FIRST'] +7902-96591-0024-24: ref=['A', 'FELLOW', 'WHO', 'WAS', 'SHUT', 'UP', 'IN', 'PRISON', 'FOR', 'LIFE', 'MIGHT', 'DO', 'IT', 'HE', 'SAID', 'BUT', 'NOT', 'IN', 'A', 'CASE', 'LIKE', 'THIS'] +7902-96591-0024-24: hyp=['A', 'FELLOW', 'WHO', 'WAS', 'SHUT', 'UP', 'IN', 'PRISON', 'FOR', 'LIFE', 'MIGHT', 'DO', 'IT', 'HE', 'SAID', 'BUT', 'NOT', 'IN', 'A', 'CASE', 'LIKE', 'THIS'] +7902-96592-0000-25: ref=['SURE', "YOU'VE", 'LOOKED', 'ROUND', 'EVERYWHERE', 'BOY', 'YES', 'FATHER', 'QUITE'] +7902-96592-0000-25: hyp=['SURE', 'YOU', 'LOOK', 'ROUND', 'EVERYWHERE', 'BOY', 'YES', 'FATHER', 'QUITE'] +7902-96592-0001-26: ref=["I'M", 'GOING', 'HOME', 'TO', 'BREAKFAST'] +7902-96592-0001-26: hyp=["I'M", 'GOING', 'HOME', 'TO', 'BREAKFAST'] +7902-96592-0002-27: ref=['SHALL', 'I', 'COME', 'TOO', 'FATHER', 'NO'] +7902-96592-0002-27: hyp=['SHALL', 'I', 'COME', 'TO', 'FATHER', 'NO'] +7902-96592-0003-28: ref=['STOP', 'HERE', 'TILL', 'SIR', 'RISDON', 'COMES', 'DOWN', 'AND', 'TELL', 'HIM', "I'M", 'VERY', 'SORRY', 'THAT', 'WE', 'SHOULD', 'HAVE', 'CLEARED', 'OUT', 'LAST', 'NIGHT', 'ONLY', 'A', 'BORN', 'FOOL', 'SAW', 'JERRY', "NANDY'S", 'LOBSTER', 'BOAT', 'COMING', 'INTO', 'THE', 'COVE', 'AND', 'CAME', 'RUNNING', 'TO', 'SAY', 'IT', 'WAS', 'A', 'PARTY', 'FROM', 'THE', 'CUTTER', 'YES', 'FATHER'] +7902-96592-0003-28: hyp=['STOP', 'HERE', 'TILL', 'SIR', 'RISDON', 'COMES', 'DOWN', 'AND', 'TELL', 'HIM', "I'M", 'VERY', 'SORRY', 'THAT', 'WE', 'SHOULD', 'HAVE', 'CLEARED', 'OUT', 'LAST', 'NIGHT', 'ONLY', 'A', 'BORN', 'FOOL', 'SAW', 'JERRY', "ANDY'S", 'LOBSTER', 'BOAT', 'COMING', 'INTO', 'THE', 'COVE', 'AND', 'CAME', 'RUNNING', 'TO', 'SAY', 'IT', 'WAS', 'A', 'PARTY', 'FROM', 'THE', 'CUTTER', 'YES', 'FATHER'] +7902-96592-0004-29: ref=['TELL', 'HIM', 'NOT', 'TO', 'BE', 'UNEASY', 'TIS', 'ALL', 'RIGHT', 'AND', "I'LL", 'HAVE', 'EVERYTHING', 'CLEAR', 'AWAY', 'TO', 'NIGHT'] +7902-96592-0004-29: hyp=['TELL', 'HIM', 'NOT', 'TO', 'BE', 'UNEASY', 'TIS', 'ALL', 'RIGHT', 'AND', "I'LL", 'HAVE', 'EVERYTHING', 'CLEAR', 'AWAY', 'TO', 'NIGHT'] +7902-96592-0005-30: ref=['THE', 'DULL', 'SOUND', 'OF', 'DEPARTING', 'STEPS', 'AND', 'A', 'LOW', 'WHISTLING', 'SOUND', 'COMING', 'DOWN', 'THROUGH', 'THE', 'SKYLIGHT', 'WINDOW', 'INTO', 'THE', 'CABIN', 'WHERE', 'ARCHY', 'RAYSTOKE', 'LAY', 'WITH', 'HIS', 'HEAVY', 'EYELIDS', 'PRESSED', 'DOWN', 'BY', 'SLEEP'] +7902-96592-0005-30: hyp=['THE', 'DULL', 'SOUND', 'OF', 'DEPARTING', 'STEPS', 'AND', 'A', 'LOW', 'WHISTLING', 'SOUND', 'COMING', 'DOWN', 'THROUGH', 'THE', 'SKYLIGHT', 'WINDOW', 'INTO', 'THE', 'CABIN', 'WHERE', 'ARCHIE', 'RAYSTROKE', 'LAY', 'WITH', 'HIS', 'HEAVY', 'EYELIDS', 'PRESSED', 'DOWN', 'BY', 'SLEEP'] +7902-96592-0006-31: ref=['WHAT', 'A', 'QUEER', 'DREAM', 'HE', 'THOUGHT', 'TO', 'HIMSELF'] +7902-96592-0006-31: hyp=['WHAT', 'A', 'QUEER', 'DREAM', 'HE', 'THOUGHT', 'TO', 'HIMSELF'] +7902-96592-0007-32: ref=['BUT', 'HOW', 'QUEER', 'FOR', 'MISTER', 'GURR', 'TO', 'BE', 'TALKING', 'LIKE', 'THAT', 'TO', 'ANDREW', 'TEAL', 'THE', 'BOY', 'WHO', 'HELPED', 'THE', 'COOK'] +7902-96592-0007-32: hyp=['BUT', 'HOW', 'QUEER', 'FOR', 'MISTER', 'GIRD', 'TO', 'BE', 'TALKING', 'LIKE', 'THAT', 'TO', 'ANDREW', 'TEALE', 'THE', 'BOY', 'WHO', 'HELPS', 'THE', 'COOK'] +7902-96592-0008-33: ref=['AND', 'WHY', 'DID', 'ANDY', 'CALL', 'MISTER', 'GURR', 'FATHER'] +7902-96592-0008-33: hyp=['AND', 'WHY', 'DID', 'ANDY', 'CALL', 'MISTER', 'GERFATHER'] +7902-96592-0009-34: ref=['THERE', 'WAS', 'AN', 'INTERVAL', 'OF', 'THINKING', 'OVER', 'THIS', 'KNOTTY', 'QUESTION', 'DURING', 'WHICH', 'THE', 'LOW', 'WHISTLING', 'WENT', 'ON'] +7902-96592-0009-34: hyp=['THERE', 'WAS', 'AN', 'INTERVAL', 'OF', 'THINKING', 'OVER', 'THIS', 'NAUGHTY', 'QUESTION', 'DURING', 'WHICH', 'THE', 'LOW', 'WHISTLING', 'WENT', 'ON'] +7902-96592-0010-35: ref=['AND', "I'M", 'HUNGRY', 'TOO', 'TIME', 'I', 'WAS', 'UP', 'I', 'SUPPOSE'] +7902-96592-0010-35: hyp=['AND', 'UNHUNGRY', 'TOO', 'TELL', 'IT', 'WAS', 'UP', 'I', 'SUPPOSE'] +7902-96592-0011-36: ref=['NO', 'HE', 'WAS', 'NOT', 'DREAMING', 'FOR', 'HE', 'WAS', 'LOOKING', 'OUT', 'ON', 'THE', 'SEA', 'OVER', 'WHICH', 'A', 'FAINT', 'MIST', 'HUNG', 'LIKE', 'WREATHS', 'OF', 'SMOKE'] +7902-96592-0011-36: hyp=['NO', 'HE', 'WAS', 'NOT', 'DREAMING', 'FOR', 'HE', 'WAS', 'LOOKING', 'OUT', 'ON', 'THE', 'SEA', 'OVER', 'WHICH', 'A', 'FAINT', 'MIST', 'HUNG', 'LIKE', 'WREATHS', 'OF', 'SMOKE'] +7902-96592-0012-37: ref=['WHAT', 'DID', 'THEY', 'SAY', 'FALSE', 'ALARM', 'TELL', 'SIR', 'RISDON', 'THEY', 'WOULD', 'CLEAR', 'ALL', 'AWAY', 'TO', 'NIGHT', 'SEE', 'IF', 'ANYTHING', 'HAD', 'BEEN', 'LEFT', 'ABOUT', 'LOBSTER', 'BOAT'] +7902-96592-0012-37: hyp=['WHAT', 'DID', 'THEY', 'SAY', 'FALSE', 'ALARM', 'TELL', 'SIR', 'RISDEN', 'THEY', 'WOULD', 'CLEAR', 'ALL', 'AWAY', 'TO', 'NIGHT', 'SEE', 'IF', 'ANYTHING', 'HAD', 'BEEN', 'LEFT', 'ABOUT', 'LOBSTER', 'BOAT'] +7902-96592-0013-38: ref=['ONCE', 'OUT', 'OF', 'THAT', 'ROOM', 'HE', 'COULD', 'RAN', 'AND', 'BY', 'DAYLIGHT', 'THE', 'SMUGGLERS', 'DARE', 'NOT', 'HUNT', 'HIM', 'DOWN'] +7902-96592-0013-38: hyp=['ONCE', 'OUT', 'OF', 'THAT', 'ROOM', 'HE', 'COULD', 'RUN', 'AND', 'BY', 'DAYLIGHT', 'THE', 'SMUGGLERS', 'DARED', 'NOT', 'HUNT', 'HIM', 'DOWN'] +7902-96592-0014-39: ref=['OH', 'THOSE', 'BARS', 'HE', 'MENTALLY', 'EXCLAIMED', 'AND', 'HE', 'WAS', 'ADVANCING', 'TOWARD', 'THEM', 'WHEN', 'JUST', 'AS', 'HE', 'DREW', 'NEAR', 'THERE', 'WAS', 'A', 'RUSTLING', 'NOISE', 'UNDER', 'THE', 'WINDOW', 'A', 'COUPLE', 'OF', 'HANDS', 'SEIZED', 'THE', 'BARS', 'THERE', 'WAS', 'A', 'SCRATCHING', 'OF', 'BOOT', 'TOES', 'AGAINST', 'STONE', 'WORK', 'AND', "RAM'S", 'FACE', 'APPEARED', 'TO', 'GAZE', 'INTO', 'THE', 'ROOM', 'BY', 'INTENTION', 'BUT', 'INTO', 'THE', 'ASTONISHED', 'COUNTENANCE', 'OF', 'THE', 'YOUNG', 'MIDSHIPMAN', 'INSTEAD'] +7902-96592-0014-39: hyp=['OH', 'THOSE', 'BARS', 'HE', 'MENTALLY', 'EXCLAIMED', 'AND', 'HE', 'WAS', 'ADVANCING', 'TOWARDS', 'THEM', 'WHEN', 'JUST', 'AS', 'HE', 'DREW', 'NEAR', 'THERE', 'WAS', 'A', 'RUSTLING', 'NOISE', 'UNDER', 'THE', 'WINDOW', 'A', 'COUPLE', 'OF', 'HANDS', 'SEIZED', 'THE', 'BARS', 'THERE', 'WAS', 'A', 'SCRATCHING', 'OF', 'BOOT', 'TOES', 'AGAINST', 'STONE', 'WORK', 'AND', "RAM'S", 'FACE', 'APPEARED', 'TO', 'GAZE', 'INTO', 'THE', 'ROOM', 'BY', 'INTENTION', 'BUT', 'INTO', 'THE', 'ASTONISHED', 'COUNTENANCE', 'OF', 'THE', 'YOUNG', 'MIDSHIPMAN', 'INSTEAD'] +7902-96592-0015-40: ref=['RAM', 'WAS', 'THE', 'FIRST', 'TO', 'RECOVER', 'FROM', 'HIS', 'SURPRISE'] +7902-96592-0015-40: hyp=['ROOM', 'WAS', 'THE', 'FIRST', 'TO', 'RECOVER', 'FROM', 'HIS', 'SURPRISE'] +7902-96592-0016-41: ref=['HULLO', 'HE', 'SAID', 'WHO', 'ARE', 'YOU'] +7902-96592-0016-41: hyp=['HULLO', 'HE', 'SAID', 'WHO', 'ARE', 'YOU'] +7902-96592-0017-42: ref=['GO', 'ROUND', 'AND', 'OPEN', 'THE', 'DOOR', 'I', 'WAS', 'SHUT', 'IN', 'LAST', 'NIGHT', 'BY', 'MISTAKE'] +7902-96592-0017-42: hyp=['GO', 'ROUND', 'AND', 'OPEN', 'THE', 'DOOR', 'I', 'WAS', 'SHUT', 'IN', 'LAST', 'NIGHT', 'BY', 'MISTAKE'] +7902-96592-0018-43: ref=['I', 'SAW', 'YOU', 'LAST', 'NIGHT', 'AND', 'WONDERED', 'WHOSE', 'BOY', 'YOU', 'WAS'] +7902-96592-0018-43: hyp=['I', 'SAW', 'YOU', 'LAST', 'NIGHT', 'AND', 'WONDERED', 'WHOSE', 'BOY', 'HE', 'WAS'] +7902-96592-0019-44: ref=['IT', 'WAS', 'YOU', 'FATHER', 'KICKED', 'FOR', 'SHIRKING', 'AND', 'MY', 'WELL', 'I', 'HARDLY', 'KNOWED', 'YOU'] +7902-96592-0019-44: hyp=['IT', 'WAS', 'YOUR', 'FATHER', 'KICKED', 'FOR', 'SHIRKING', 'AND', 'MY', 'WELL', 'I', 'HARDLY', 'KNOWED', 'YOU'] +7902-96592-0020-45: ref=['NONSENSE'] +7902-96592-0020-45: hyp=['NONSENSE'] +7902-96592-0021-46: ref=["WON'T", 'DO', 'SAID', 'RAM', 'GRINNING'] +7902-96592-0021-46: hyp=["WON'T", 'DO', 'SAID', 'RAM', 'GRINNING'] +7902-96592-0022-47: ref=['THINK', 'I', "DON'T", 'KNOW', 'YOU', 'MISTER', 'ORFICER'] +7902-96592-0022-47: hyp=['THINK', 'I', "DON'T", 'KNOW', 'YOU', 'MISTER', 'ORFASTER'] +7902-96592-0023-48: ref=["WON'T", 'DO', 'SAID', 'RAM', 'QUICKLY', 'I', 'KNOW', 'YOU'] +7902-96592-0023-48: hyp=['WELL', 'DO', 'SAID', 'RAM', 'QUICKLY', 'I', 'KNOW', 'YOU'] +7902-96592-0024-49: ref=['BEEN', 'PLAYING', 'THE', 'SPY', "THAT'S", 'WHAT', "YOU'VE", 'BEEN', 'DOING', 'WHO', 'LOCKED', 'YOU', 'IN'] +7902-96592-0024-49: hyp=['COMPLYING', 'THE', 'SPY', "THAT'S", 'WHAT', "YOU'VE", 'BEEN', 'DOING', 'WHO', 'LOCKED', 'YOU', 'IN'] +7902-96592-0025-50: ref=['ARCHY', 'STEPPED', 'BACK', 'TO', 'THE', 'DOOR', 'LISTENING', 'BUT', 'THERE', 'WAS', 'NOT', 'A', 'SOUND'] +7902-96592-0025-50: hyp=['ARCHIE', 'STEPPED', 'BACK', 'TO', 'THE', 'DOOR', 'LISTENING', 'BUT', 'THERE', 'WAS', 'NOT', 'A', 'SOUND'] +7902-96592-0026-51: ref=['HE', 'HAS', 'GONE', 'TO', 'GIVE', 'THE', 'ALARM', 'THOUGHT', 'THE', 'PRISONER', 'AND', 'HE', 'LOOKED', 'EXCITEDLY', 'ROUND', 'FOR', 'A', 'WAY', 'OF', 'ESCAPE'] +7902-96592-0026-51: hyp=['HE', 'HAS', 'GONE', 'TO', 'GIVE', 'THE', 'ALARM', 'THOUGHT', 'THE', 'PRISONER', 'AND', 'HE', 'LOOKED', 'EXCITEDLY', 'ROUND', 'FOR', 'A', 'WAY', 'OF', 'ESCAPE'] +7902-96592-0027-52: ref=['NOTHING', 'BUT', 'THE', 'CHIMNEY', 'PRESENTED', 'ITSELF'] +7902-96592-0027-52: hyp=['NOTHING', 'BUT', 'THE', 'CHIMNEY', 'PRESENTED', 'ITSELF'] +7902-96592-0028-53: ref=['A', 'HAPPY', 'INSPIRATION', 'HAD', 'COME', 'AND', 'PLACING', 'ONE', 'HAND', 'UPON', 'HIS', 'BREAST', 'HE', 'THRUST', 'IN', 'THE', 'OTHER', 'GAVE', 'A', 'TUG', 'AND', 'DREW', 'OUT', 'HIS', 'LITTLE', 'CURVED', 'DIRK', 'GLANCED', 'AT', 'THE', 'EDGE', 'RAN', 'TO', 'THE', 'WINDOW', 'AND', 'BEGAN', 'TO', 'CUT', 'AT', 'ONE', 'OF', 'THE', 'BARS', 'LABOUR', 'IN', 'VAIN'] +7902-96592-0028-53: hyp=['A', 'HAPPY', 'INSPIRATION', 'HAD', 'COME', 'AND', 'PLACING', 'ONE', 'HAND', 'UPON', 'HIS', 'CHEST', 'HE', 'THRUST', 'IN', 'THE', 'OTHER', 'GAVE', 'A', 'TUG', 'AND', 'DREW', 'OUT', 'HIS', 'LITTLE', 'CURVED', 'DIRK', 'GLANCED', 'AT', 'THE', 'EDGE', 'RAN', 'TO', 'THE', 'WINDOW', 'AND', 'BEGAN', 'TO', 'CUT', 'IT', 'ONE', 'OF', 'THE', 'BARS', 'LABOR', 'IN', 'VAIN'] +7902-96592-0029-54: ref=['HE', 'DIVIDED', 'THE', 'PAINT', 'AND', 'PRODUCED', 'A', 'FEW', 'SQUEAKS', 'AND', 'GRATING', 'SOUNDS', 'AS', 'HE', 'REALISED', 'THAT', 'THE', 'ATTEMPT', 'WAS', 'MADNESS'] +7902-96592-0029-54: hyp=['HE', 'DIVIDED', 'THE', 'PAINT', 'AND', 'PRODUCED', 'A', 'FEW', 'SQUEAKS', 'AND', 'GRATING', 'SOUNDS', 'AS', 'HE', 'REALIZED', 'THAT', 'THE', 'ATTEMPT', 'WAS', 'MADNESS'] +7902-96592-0030-55: ref=['THE', 'RESULT', 'WAS', 'NOT', 'VERY', 'SATISFACTORY', 'BUT', 'SUFFICIENTLY', 'SO', 'TO', 'MAKE', 'HIM', 'ESSAY', 'THE', 'BAR', 'OF', 'THE', 'WINDOW', 'ONCE', 'MORE', 'PRODUCING', 'A', 'GRATING', 'EAR', 'ASSAILING', 'SOUND', 'AS', 'HE', 'FOUND', 'THAT', 'NOW', 'HE', 'DID', 'MAKE', 'A', 'LITTLE', 'IMPRESSION', 'SO', 'LITTLE', 'THOUGH', 'THAT', 'THE', 'PROBABILITY', 'WAS', 'IF', 'HE', 'KEPT', 'ON', 'WORKING', 'WELL', 'FOR', 'TWENTY', 'FOUR', 'HOURS', 'HE', 'WOULD', 'NOT', 'GET', 'THROUGH'] +7902-96592-0030-55: hyp=['THE', 'RESULT', 'WAS', 'NOT', 'VERY', 'SATISFACTORY', 'BUT', 'SUFFICIENTLY', 'SO', 'TO', 'MAKE', 'HIM', 'ESSAY', 'THE', 'BAR', 'OF', 'THE', 'WINDOW', 'ONCE', 'MORE', 'PRODUCING', 'A', 'GRATING', 'IRRES', 'SELLING', 'SOUND', 'AS', 'HE', 'FOUND', 'THAT', 'NOW', 'HE', 'DID', 'MAKE', 'A', 'LITTLE', 'IMPRESSION', 'SO', 'LITTLE', 'THOUGH', 'THAT', 'THE', 'PROBABILITY', 'WAS', 'IF', 'HE', 'KEPT', 'ON', 'WORKING', 'WELL', 'FOR', 'TWENTY', 'FOUR', 'HOURS', 'HE', 'WOULD', 'NOT', 'GET', 'THROUGH'] +7902-96592-0031-56: ref=['BUT', 'AT', 'THE', 'END', 'OF', 'FIVE', 'MINUTES', 'HE', 'STOPPED', 'AND', 'THRUST', 'BACK', 'THE', 'DIRK', 'INTO', 'ITS', 'SHEATH'] +7902-96592-0031-56: hyp=['BUT', 'AT', 'THE', 'END', 'OF', 'FIVE', 'MINUTES', 'HE', 'STOPPED', 'AND', 'THRUST', 'BACK', 'THE', 'DIRK', 'INTO', 'ITS', 'SHEATH'] +7902-96592-0032-57: ref=['NO', 'I', "CAN'T", 'PART', 'WITH', 'THAT', 'HA', 'HA', 'HA', 'LAUGHED', 'THE', 'BOY', 'JEERINGLY'] +7902-96592-0032-57: hyp=['NO', 'I', "CAN'T", 'PART', 'WITH', 'THAT', 'HA', 'LAUGHED', 'THE', 'BOY', 'JEERINGLY'] +7902-96592-0033-58: ref=['BUT', "I'LL", 'YES', "I'LL", 'GIVE', 'YOU', 'A', 'GUINEA', 'IF', 'YOU', 'WILL', 'LET', 'ME', 'OUT'] +7902-96592-0033-58: hyp=['BUT', 'YES', "I'LL", 'GIVE', 'YOU', 'A', 'GUINEA', 'IF', 'YOU', 'WILL', 'LET', 'ME', 'OUT'] +7902-96592-0034-59: ref=['GUINEA', 'SAID', 'THE', 'BOY', 'THINK', "I'D", 'DO', 'IT', 'FOR', 'A', 'GUINEA', 'WELL', 'THEN', 'TWO'] +7902-96592-0034-59: hyp=['GUINEAS', 'OF', 'THE', 'BOY', 'THINK', "I'LL", 'DO', 'IT', 'FOR', 'A', 'GUINEA', 'WELL', 'THEN', 'TOO'] +7902-96592-0035-60: ref=['BE', 'QUICK', "THERE'S", 'A', 'GOOD', 'FELLOW', 'I', 'WANT', 'TO', 'GET', 'AWAY', 'AT', 'ONCE'] +7902-96592-0035-60: hyp=['BE', 'QUICK', "THERE'S", 'A', 'GOOD', 'FELLOW', 'I', 'WANT', 'TO', 'GET', 'AWAY', 'AT', 'ONCE'] +7902-96592-0036-61: ref=['NOT', 'YOU', 'ONLY', 'A', 'SHAM'] +7902-96592-0036-61: hyp=['NOT', 'YOU', 'ONLY', 'A', 'SHAM'] +7902-96592-0037-62: ref=['WHY', 'YOUR', 'CLOTHES', "DON'T", 'FIT', 'YOU', 'AND', 'YOUR', "CAP'S", 'PUT', 'ON', 'ALL', 'SKEW', 'REW'] +7902-96592-0037-62: hyp=['WHY', 'YOUR', 'CLOTHES', "DON'T", 'FIT', 'YOU', 'AND', 'YOUR', "CAP'S", 'PUT', 'ON', 'ALL', 'SCARO'] +7902-96592-0038-63: ref=['NEVER', 'MIND', 'ABOUT', 'THAT', 'LET', 'ME', 'OUT', 'OF', 'THIS', 'PLACE'] +7902-96592-0038-63: hyp=['NEVER', 'MIND', 'ABOUT', 'THAT', 'LET', 'ME', 'OUT', 'OF', 'THIS', 'PLACE'] +7902-96592-0039-64: ref=['I', 'TOLD', 'YOU', 'A', 'FISHER', 'BOY', 'CRIED', 'ARCHY', 'IMPATIENTLY', 'BUT', 'TRYING', 'NOT', 'TO', 'OFFEND', 'HIS', 'VISITOR', 'WHO', 'POSSESSED', 'THE', 'POWER', 'OF', 'CONFERRING', 'FREEDOM', 'BY', 'SPEAKING', 'SHARPLY'] +7902-96592-0039-64: hyp=['I', 'TOLD', 'YOU', 'A', 'FISHER', 'BOY', 'CRIED', 'ARCHIE', 'IMPATIENTLY', 'BUT', 'TRYING', 'NOT', 'TO', 'OFFEND', 'HIS', 'VISITOR', 'WHO', 'POSSESSED', 'THE', 'POWER', 'OF', 'CONFERRING', 'FREEDOM', 'BY', 'SPEAKING', 'SHARPLY'] +7902-96592-0040-65: ref=['NOT', 'YOU', 'LOOK', 'LIKE', 'A', 'WILD', 'BEAST', 'IN', 'A', 'CAGE', 'LIKE', 'A', 'MONKEY', 'YOU', 'INSOLENT'] +7902-96592-0040-65: hyp=['NOT', 'YOU', 'LOOK', 'LIKE', 'A', 'WILD', 'BEAST', 'IN', 'A', 'CAGE', 'LIKE', 'A', 'MONKEY', 'YOU', 'INSOLENT'] +7902-96592-0041-66: ref=['ARCHY', 'CHECKED', 'HIMSELF', 'AND', 'THE', 'BOY', 'LAUGHED'] +7902-96592-0041-66: hyp=['ARCHIE', 'CHECKED', 'HIMSELF', 'AND', 'THE', 'BOY', 'LAUGHED'] +7902-96592-0042-67: ref=['IT', 'WAS', 'YOUR', 'TURN', 'YESTERDAY', "IT'S", 'MINE', 'TO', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0042-67: hyp=['IT', 'WAS', 'YOUR', 'TURN', 'YESTERDAY', "IT'S", 'MINE', 'TO', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0043-68: ref=['YOU', 'LAUGHED', 'AND', 'FLEERED', 'AT', 'ME', 'WHEN', 'I', 'WAS', 'ON', 'THE', "CUTTER'S", 'DECK'] +7902-96592-0043-68: hyp=['YOU', 'LAUGHED', 'AND', 'FLARED', 'AT', 'ME', 'WHEN', 'I', 'WAS', 'ON', 'THE', "CUTTER'S", 'DECK'] +7902-96592-0044-69: ref=['I', 'SAY', 'YOU', 'DO', 'LOOK', 'A', 'RUM', 'UN', 'JUST', 'LIKE', 'A', 'BIG', 'MONKEY', 'IN', 'A', 'SHOW'] +7902-96592-0044-69: hyp=['I', 'SAY', 'YOU', 'DO', 'LOOK', 'LIKE', 'A', 'ROMAN', 'JUST', 'LIKE', 'A', 'BIG', 'MONKEY', 'IN', 'A', 'SHOW'] +7902-96592-0045-70: ref=['RAM', 'SHOWED', 'HIS', 'WHITE', 'TEETH', 'AS', 'HE', 'BURST', 'OUT', 'WITH', 'A', 'LONG', 'LOW', 'FIT', 'OF', 'LAUGHTER'] +7902-96592-0045-70: hyp=['RAM', 'SHOWED', 'HIS', 'WHITE', 'TEETH', 'AS', 'HE', 'BURST', 'OUT', 'WITH', 'A', 'LONG', 'LOW', 'FIT', 'OF', 'LAUGHTER'] +7902-96592-0046-71: ref=['YOU', "ROPE'S", 'END', 'ME', 'HE', 'SAID'] +7902-96592-0046-71: hyp=['YOU', 'HOPES', 'AND', 'ME', 'HE', 'SAID'] +7902-96592-0047-72: ref=['WHY', 'I', 'COULD', 'TIE', 'YOU', 'UP', 'IN', 'A', 'KNOT', 'AND', 'HEAVE', 'YOU', 'OFF', 'THE', 'CLIFF', 'ANY', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0047-72: hyp=['WHY', 'I', 'COULD', 'TIE', 'YOU', 'UP', 'IN', 'A', 'KNOT', 'AND', 'HEAVE', 'YOU', 'OFF', 'THE', 'CLIFF', 'ANY', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0048-73: ref=['BIT', 'OF', 'A', 'MIDDY', 'FED', 'ON', 'SALT', 'TACK', 'AND', 'WEEVILLY', 'BISCUIT', 'TALK', 'OF', 'GIVING', 'ME', "ROPE'S", 'END'] +7902-96592-0048-73: hyp=['BIT', 'OF', 'A', 'MIDDI', 'FED', 'ON', 'A', 'SALT', 'TACK', 'AND', 'WEEVILY', 'BISCUIT', 'TALK', 'OF', 'GIVING', 'ME', 'ROPES', 'AND'] +7902-96592-0049-74: ref=['ONCE', 'MORE', 'WILL', 'YOU', 'COME', 'AND', 'LET', 'ME', 'OUT', 'NO'] +7902-96592-0049-74: hyp=['ONCE', 'MORE', 'WILL', 'YOU', 'COME', 'AND', 'LET', 'ME', 'OUT', 'NO'] +7902-96592-0050-75: ref=['TO', 'HIS', 'ASTONISHMENT', 'THE', 'BOY', 'DID', 'NOT', 'FLINCH', 'BUT', 'THRUST', 'HIS', 'OWN', 'ARMS', 'THROUGH', 'PLACING', 'THEM', 'ABOUT', 'THE', "MIDDY'S", 'WAIST', 'CLENCHING', 'HIS', 'HANDS', 'BEHIND', 'AND', 'UTTERING', 'A', 'SHARP', 'WHISTLE'] +7902-96592-0050-75: hyp=['TO', 'HIS', 'ASTONISHMENT', 'THE', 'BOY', 'DID', 'NOT', 'FLINCH', 'BUT', 'THRUST', 'HIS', 'OWN', 'ARMS', 'THROUGH', 'PLACING', 'HIM', 'ABOUT', 'THE', "MIDDY'S", 'WAIST', 'CLENCHING', 'HIS', 'HAND', 'BEHIND', 'AND', 'UTTERING', 'A', 'SHARP', 'WHISTLE'] +7902-96594-0000-76: ref=['SEEMED', 'IN', 'GOOD', 'SPIRITS', 'LAST', 'NIGHT', 'MISTER', 'GURR', 'EH'] +7902-96594-0000-76: hyp=['SEEMING', 'AT', "SPEAR'S", 'LAST', 'NIGHT', 'MISTER', 'GARR', 'HEY'] +7902-96594-0001-77: ref=['YES', 'SIR', 'BUT', 'HE', 'MAY', 'TURN', 'UP', 'ON', 'THE', 'CLIFF', 'AT', 'ANY', 'MOMENT'] +7902-96594-0001-77: hyp=['YES', 'SIR', 'BUT', 'HE', 'MAY', 'TURN', 'UP', 'ON', 'THE', 'CLIFF', 'AT', 'ANY', 'MOMENT'] +7902-96594-0002-78: ref=['YES', 'MEN', 'QUITE', 'READY', 'YES', 'SIR'] +7902-96594-0002-78: hyp=['YES', 'MEN', 'QUITE', 'READY', 'YES', 'SIR'] +7902-96594-0003-79: ref=["THAT'S", 'RIGHT', 'OF', 'COURSE', 'WELL', 'ARMED'] +7902-96594-0003-79: hyp=['THE', 'THREAT', 'OF', 'COURSE', 'WILL', 'ALARMED'] +7902-96594-0004-80: ref=['SOON', 'AS', 'THE', 'SIGNAL', 'COMES', 'WE', 'SHALL', 'PUSH', 'OFF'] +7902-96594-0004-80: hyp=['SOON', 'AS', 'THE', 'SIGNAL', 'COMES', 'WE', 'SHALL', 'PUSH', 'OFF'] +7902-96594-0005-81: ref=['AWKWARD', 'BIT', 'O', 'COUNTRY', 'SIR', 'SIX', 'MILES', 'ROW', 'BEFORE', 'YOU', 'CAN', 'FIND', 'A', 'PLACE', 'TO', 'LAND'] +7902-96594-0005-81: hyp=['AWKWARD', 'BITTER', 'COUNTRY', 'SIR', 'SIX', 'MILES', 'ROW', 'BEFORE', 'YOU', 'CAN', 'FIND', 'A', 'PLACE', 'TO', 'LAND'] +7902-96594-0006-82: ref=['SO', 'SHALL', 'WE', 'YET', 'SIR'] +7902-96594-0006-82: hyp=['SO', 'SHALL', 'WE', 'YET', 'SIR'] +7902-96594-0007-83: ref=['YOU', "DON'T", 'THINK', 'MISTER', 'GURR', 'THAT', 'THEY', 'WOULD', 'DARE', 'TO', 'INJURE', 'HIM', 'IF', 'HE', 'WAS', 'SO', 'UNLUCKY', 'AS', 'TO', 'BE', 'CAUGHT'] +7902-96594-0007-83: hyp=['YOU', "DON'T", 'THINK', 'MISTER', 'GREW', 'THAT', 'THEY', 'WOULD', 'DARE', 'TO', 'INJURE', 'HIM', 'IF', 'HE', 'WAS', 'SO', 'UNLUCKY', 'AS', 'TO', 'BE', 'CAUGHT'] +7902-96594-0008-84: ref=['WELL', 'SIR', 'SAID', 'THE', 'MASTER', 'HESITATING', 'SMUGGLERS', 'ARE', 'SMUGGLERS'] +7902-96594-0008-84: hyp=['WELL', 'SIR', 'SAID', 'THE', 'MASTER', 'HESITATING', 'SMUGGLERS', 'ARE', 'SMUGGLERS'] +7902-96594-0009-85: ref=['CERTAINLY', 'SIR', 'SMUGGLERS', 'ARE', 'SMUGGLERS', 'INDEED'] +7902-96594-0009-85: hyp=['CERTAINLY', 'SIR', 'SMUGGLERS', 'ARE', 'SMUGGLERS', 'INDE'] +7902-96594-0010-86: ref=['BEG', 'PARDON', 'SIR', "DIDN'T", 'MEAN', 'ANY', 'HARM'] +7902-96594-0010-86: hyp=['THEY', 'PARDON', 'SIR', "DIDN'T", 'MEAN', 'ANY', 'HARM'] +7902-96594-0011-87: ref=["I'M", 'GETTING', 'VERY', 'ANXIOUS', 'ABOUT', 'MISTER', 'RAYSTOKE', 'START', 'AT', 'ONCE', 'SIR'] +7902-96594-0011-87: hyp=["I'M", 'GETTING', 'VERY', 'ANXIOUS', 'ABOUT', 'MISTER', 'RAYSTROKE', 'START', 'AT', 'ONCE', 'SIR'] +7902-96594-0012-88: ref=['NO', 'WAIT', 'ANOTHER', 'HALF', 'HOUR'] +7902-96594-0012-88: hyp=['NO', 'WAIT', 'ANOTHER', 'AND', 'HALF', 'HOUR'] +7902-96594-0013-89: ref=['VERY', 'ILL', 'ADVISED', 'THING', 'TO', 'DO'] +7902-96594-0013-89: hyp=['VERY', 'ADVISED', 'THING', 'TO', 'DO'] +7902-96594-0014-90: ref=['THEN', 'I', 'MUST', 'REQUEST', 'THAT', 'YOU', 'WILL', 'NOT', 'MAKE', 'IT', 'AGAIN', 'VERY', 'TRUE'] +7902-96594-0014-90: hyp=['THAT', 'I', 'MUST', 'REQUEST', 'THAT', 'YOU', 'WILL', 'NOT', 'MAKE', 'IT', 'AGAIN', 'VERY', 'TRUE'] +7902-96594-0015-91: ref=['AWK', 'WARD', 'MISTER', 'GURR', 'AWKWARD'] +7902-96594-0015-91: hyp=['AWKWARD', 'MISTER', 'GARR', 'AWKWARD'] +7902-96594-0016-92: ref=['YES', 'SIR', 'OF', 'COURSE'] +7902-96594-0016-92: hyp=['YES', 'SIR', 'OF', 'COURSE'] +7902-96594-0017-93: ref=['SAY', 'AWK', 'WARD', 'IN', 'FUTURE', 'NOT', "AWK'ARD"] +7902-96594-0017-93: hyp=['SAY', 'AWKWARD', 'IN', 'THE', 'FUTURE', 'NOT', 'UPWARD'] +7902-96594-0018-94: ref=['I', 'MEAN', 'ALL', 'ALONE', 'BY', 'MYSELF', 'SIR'] +7902-96594-0018-94: hyp=['I', 'MEAN', 'OUR', 'OWN', 'BY', 'MYSELF', 'SIR'] +7902-96594-0019-95: ref=['WHAT', 'FOR', 'THERE', "AREN'T", 'A', 'PUBLIC', 'HOUSE', 'FOR', 'TEN', 'MILES', "DIDN'T", 'MEAN', 'THAT'] +7902-96594-0019-95: hyp=['WHAT', 'FOR', 'THERE', 'ARE', 'TO', 'PUBLIC', 'HOUSE', 'FOR', 'TEN', 'MILES', "DIDN'T", 'MEAN', 'THAT'] +7902-96594-0020-96: ref=['THEN', 'WHAT', 'DID', 'YOU', 'MEAN', 'SPEAK', 'OUT', 'AND', "DON'T", 'DO', 'THE', 'DOUBLE', 'SHUFFLE', 'ALL', 'OVER', 'MY', 'CLEAN', 'DECK', 'NO', 'SIR'] +7902-96594-0020-96: hyp=['THEN', 'WHAT', 'DID', 'YOU', 'MEAN', 'SPEAK', 'OUT', 'AND', "DON'T", 'DO', 'THE', 'DOUBLE', 'SHUFFLE', 'ALL', 'OVER', 'MY', 'CLEAN', 'DECK', 'NO', 'SIR'] +7902-96594-0021-97: ref=['HOPPING', 'ABOUT', 'LIKE', 'A', 'CAT', 'ON', 'HOT', 'BRICKS'] +7902-96594-0021-97: hyp=['HAVING', 'ABOUT', 'THE', 'GOOD', 'CAT', 'ON', 'HOT', 'BRICKS'] +7902-96594-0022-98: ref=['NOW', 'THEN', 'WHY', 'DO', 'YOU', 'WANT', 'TO', 'GO', 'ASHORE'] +7902-96594-0022-98: hyp=['NOW', 'THEN', 'WHY', 'DO', 'YOU', 'WANT', 'TO', 'GO', 'ASHORE'] +7902-96594-0023-99: ref=['BEG', 'PARDON', "DIDN'T", 'MEAN', 'NOWT', 'SIR', 'SAID', 'THE', 'SAILOR', 'TOUCHING', 'HIS', 'FORELOCK'] +7902-96594-0023-99: hyp=['THEY', 'PARDON', "DIDN'T", 'MEAN', 'OUT', 'SIR', 'SAID', 'THE', 'SAILOR', 'TOUCHING', 'HIS', 'FORELOCK'] +7902-96594-0024-100: ref=['YES', 'SIR', 'SAID', 'THE', 'MAN', 'HUMBLY', 'SHALL', 'I', 'GO', 'AT', 'ONCE', 'SIR'] +7902-96594-0024-100: hyp=['YES', 'SIR', 'SAID', 'THE', 'MAN', 'HUMBLY', 'SHALL', 'I', 'GO', 'AT', 'ONCE', 'SIR'] +7902-96594-0025-101: ref=['NO', 'WAIT'] +7902-96594-0025-101: hyp=['NO', 'WAIT'] +7902-96594-0026-102: ref=['KEEP', 'A', 'SHARP', 'LOOK', 'OUT', 'ON', 'THE', 'CLIFF', 'TO', 'SEE', 'IF', 'MISTER', 'RAYSTOKE', 'IS', 'MAKING', 'SIGNALS', 'FOR', 'A', 'BOAT'] +7902-96594-0026-102: hyp=['HE', 'WAS', 'SHARP', 'LOOK', 'OUT', 'ON', 'THE', 'CLIFF', 'TO', 'SEE', 'IF', 'MISTER', 'RAYSTROKE', 'IS', 'MAKING', 'SIGNALS', 'FOR', 'A', 'BOAT'] +7902-96594-0027-103: ref=['HE', 'SWUNG', 'ROUND', 'WALKED', 'AFT', 'AND', 'BEGAN', 'SWEEPING', 'THE', 'SHORE', 'AGAIN', 'WITH', 'HIS', 'GLASS', 'WHILE', 'THE', 'MASTER', 'AND', 'DICK', 'EXCHANGED', 'GLANCES', 'WHICH', 'MEANT', 'A', 'GREAT', 'DEAL'] +7902-96594-0027-103: hyp=['HE', 'SWUNG', 'ROUND', 'WALKED', 'OFF', 'AND', 'BEGAN', 'SWEEPING', 'ASHORE', 'AGAIN', 'WITH', 'HIS', 'GLASS', 'WHILE', 'THE', 'MASTER', 'AND', 'DICK', 'EXCHANGED', 'GLANCES', 'WHICH', 'MEANT', 'A', 'GREAT', 'DEAL'] +7902-96594-0028-104: ref=['AT', 'LAST', 'THE', 'LITTLE', 'LIEUTENANT', 'COULD', 'BEAR', 'THE', 'ANXIETY', 'NO', 'LONGER'] +7902-96594-0028-104: hyp=['AT', 'LAST', 'THE', 'LITTLE', 'LIEUTENANT', 'COULD', 'BEAR', 'THE', 'ANXIETY', 'NO', 'LONGER'] +7902-96594-0029-105: ref=['PIPE', 'AWAY', 'THE', 'MEN', 'TO', 'THAT', 'BOAT', 'THERE', 'HE', 'SAID', 'AND', 'AS', 'THE', 'CREW', 'SPRANG', 'IN'] +7902-96594-0029-105: hyp=['PEG', 'AWAY', 'THEM', 'INTO', 'THAT', 'BOAT', 'THERE', 'HE', 'SAID', 'AND', 'AS', 'THE', 'CREW', 'SPRANG', 'IN'] +7902-96594-0030-106: ref=['NOW', 'MISTER', 'GURR', 'HE', 'SAID', "I'M", 'ONLY', 'GOING', 'TO', 'SAY', 'ONE', 'THING', 'TO', 'YOU', 'IN', 'THE', 'WAY', 'OF', 'INSTRUCTIONS', 'YES', 'SIR'] +7902-96594-0030-106: hyp=['NO', 'MISTER', 'GURR', 'HE', 'SAID', "I'M", 'ONLY', 'GOING', 'TO', 'SAY', 'ONE', 'THING', 'TO', 'YOU', 'IN', 'THE', 'WAY', 'OF', 'INSTRUCTIONS', 'YES', 'SIR'] +7902-96594-0031-107: ref=['BEG', 'PARDON', 'SIR', 'SAID', 'THE', 'MASTER', 'DEPRECATINGLY'] +7902-96594-0031-107: hyp=['BEG', 'PARDON', 'SIR', 'SAID', 'THE', 'MASTER', 'DEPRECATINGLY'] +7902-96594-0032-108: ref=['STEADY', 'MY', 'LADS', 'STEADY', 'CRIED', 'THE', 'MASTER', 'KEEP', 'STROKE', 'AND', 'THEN', 'HE', 'BEGAN', 'TO', 'MAKE', 'PLANS', 'AS', 'TO', 'HIS', 'FIRST', 'PROCEEDINGS', 'ON', 'GETTING', 'ASHORE'] +7902-96594-0032-108: hyp=['STEADY', 'MY', 'LAD', 'STEADY', 'CRIED', 'THE', 'MASTER', 'KEEP', 'STROKE', 'AND', 'THEN', 'HE', 'BEGAN', 'TO', 'MAKE', 'PLANS', 'AS', 'TO', 'HIS', 'FIRST', 'PROCEEDINGS', 'ON', 'GETTING', 'ASHORE'] +7902-96595-0000-109: ref=['SAY', 'MESTER', 'GURR', 'SAID', 'DICK', 'AFTER', 'ONE', 'OF', 'THESE', 'SEARCHES', 'HE', "WOULDN'T", 'RUN', 'AWAY', 'WHAT'] +7902-96595-0000-109: hyp=['SAY', 'MISTER', 'GIRK', 'SAID', 'DICK', 'AFTER', 'ONE', 'OF', 'THESE', 'SEARCHES', 'HE', "WOULDN'T", 'RUN', 'AWAY', 'WHAT'] +7902-96595-0001-110: ref=['MISTER', 'RAYSTOKE', 'SIR', "DON'T", 'BE', 'A', 'FOOL'] +7902-96595-0001-110: hyp=['MISTER', 'RAYSTOKE', 'SIR', "DON'T", 'BE', 'A', 'FOOL'] +7902-96595-0002-111: ref=['WHAT', 'CHUCKED', 'HIM', 'OFF', 'YONDER'] +7902-96595-0002-111: hyp=['WHAT', 'SAID', 'DE', 'MORVE', 'YONDER'] +7902-96595-0003-112: ref=['GURR', 'GLANCED', 'ROUND', 'TO', 'SEE', 'IF', 'THE', 'MEN', 'WERE', 'LOOKING', 'AND', 'THEN', 'SAID', 'RATHER', 'HUSKILY', 'BUT', 'KINDLY'] +7902-96595-0003-112: hyp=['GIRK', 'GLANCED', 'ROUND', 'TO', 'SEE', 'IF', 'THE', 'MEN', 'WERE', 'LOOKING', 'AND', 'THEN', 'SAID', 'RATHER', 'HUSKILY', 'BE', 'KINDLY'] +7902-96595-0004-113: ref=['AH', 'EJACULATED', 'DICK', 'SADLY'] +7902-96595-0004-113: hyp=['AH', 'EJACULATED', 'DICK', 'SADLY'] +7902-96595-0005-114: ref=['SAY', 'MESTER', 'GURR', 'SIR', 'WHICH', 'THANKFUL', 'I', 'AM', 'TO', 'YOU', 'FOR', 'SPEAKING', 'SO', 'BUT', 'YOU', "DON'T", 'REALLY', 'THINK', 'AS', 'HE', 'HAS', 'COME', 'TO', 'HARM'] +7902-96595-0005-114: hyp=['SAY', 'MISTER', 'GURSER', 'WHICH', 'THANKFUL', 'I', 'AM', 'FOR', 'YOU', 'FOR', 'SPEAKING', 'SO', 'BUT', 'YOU', "DON'T", 'REALLY', 'THINK', 'AS', 'HE', 'HAS', 'COME', 'TO', 'HARM'] +7902-96595-0006-115: ref=['I', 'HOPE', 'NOT', 'DICK', 'I', 'HOPE', 'NOT', 'BUT', 'SMUGGLERS', "DON'T", 'STAND', 'AT', 'ANYTHING', 'SOMETIMES'] +7902-96595-0006-115: hyp=['I', 'HOPE', 'NOT', 'DICK', 'I', 'OPEN', 'IT', 'BUT', 'SMUGGLERS', "DON'T", 'STAND', 'AT', 'ANYTHING', 'SOMETIMES'] +7902-96595-0007-116: ref=['I', 'DO', 'ASSURE', 'YOU', "THERE'S", 'NOTHING', 'HERE', 'BUT', 'WHAT', 'YOU', 'MAY', 'SEE'] +7902-96595-0007-116: hyp=['I', 'DO', 'ASSURE', 'YOU', "THERE'S", 'NOTHING', 'HERE', 'BUT', 'WHAT', 'YOU', 'MAY', 'SEE'] +7902-96595-0008-117: ref=['IF', "YOU'D", 'LET', 'ME', 'FINISH', "YOU'D", 'KNOW', 'SAID', 'GURR', 'GRUFFLY', 'ONE', 'OF', 'OUR', 'BOYS', 'IS', 'MISSING', 'SEEN', 'HIM', 'UP', 'HERE'] +7902-96595-0008-117: hyp=['IF', 'YOU', 'LET', 'ME', 'FINISH', "YOU'D", 'KNOW', 'SAID', 'GRIGGLY', 'ONE', 'OF', 'OUR', 'BOYS', 'IS', 'MISSING', 'SEEN', 'EM', 'UP', 'HERE'] +7902-96595-0009-118: ref=['BOY', 'BOUT', 'SEVENTEEN', 'WITH', 'A', 'RED', 'CAP', 'NO', 'SIR', 'INDEED', "I'VE", 'NOT'] +7902-96595-0009-118: hyp=['BOY', 'ABOUT', 'SEVENTEEN', 'WITH', 'A', 'RED', 'CAP', 'NO', 'SIR', 'INDEED', "I'VE", 'NOT'] +7902-96595-0010-119: ref=["DON'T", 'KNOW', 'AS', 'HE', 'HAS', 'BEEN', 'SEEN', 'ABOUT', 'HERE', 'DO', 'YOU', 'SAID', 'GURR', 'LOOKING', 'AT', 'HER', 'SEARCHINGLY', 'NO', 'SIR'] +7902-96595-0010-119: hyp=["DON'T", 'KNOW', 'AS', 'HE', 'HAS', 'BEEN', 'SEEN', 'ABOUT', 'HERE', 'DO', 'YOU', 'SAID', 'GURR', 'LOOKING', 'AT', 'HER', 'SEARCHINGLY', 'NO', 'SIR'] +7902-96595-0011-120: ref=['IF', 'SHE', 'KNEW', 'EVIL', 'HAD', 'COME', 'TO', 'THE', 'POOR', 'LAD', 'HER', 'FACE', 'WOULD', 'TELL', 'TALES', 'LIKE', 'PRINT'] +7902-96595-0011-120: hyp=['IF', 'SHE', 'KNEW', 'EVIL', 'HAD', 'COME', 'TO', 'THE', 'POOR', 'LAD', 'HER', 'FACE', 'WOULD', 'TELL', 'TALES', 'LIKE', 'PRINT'] +7902-96595-0012-121: ref=['I', 'SAID', 'A', 'LAD', 'BOUT', 'SEVENTEEN', 'IN', 'A', 'RED', 'CAP', 'LIKE', 'YOURS', 'SAID', 'GURR', 'VERY', 'SHORTLY'] +7902-96595-0012-121: hyp=['I', 'STOOD', 'ALOUD', 'ABOUT', 'SEVENTEEN', 'AND', 'A', 'RED', 'CAPLICHOS', 'SAID', 'GREW', 'VERY', 'SHORTLY'] +7902-96595-0013-122: ref=['THE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'AND', 'STARED', 'AS', 'IF', 'HE', "DIDN'T", 'HALF', 'UNDERSTAND', 'THE', 'DRIFT', 'OF', 'WHAT', 'WAS', 'SAID'] +7902-96595-0013-122: hyp=['THE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'AND', 'STARED', 'AS', 'IF', 'HE', "DIDN'T", 'HALF', 'UNDERSTAND', 'THE', 'DRIFT', 'OF', 'WHAT', 'WAS', 'SAID'] +7902-96595-0014-123: ref=['HERE', 'MY', 'LAD', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0014-123: hyp=['HERE', 'MY', 'LAD', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0015-124: ref=['EH', 'I', 'SAY', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0015-124: hyp=['EH', 'I', 'SAY', 'WAS', 'YOUR', 'MASTER'] +7902-96595-0016-125: ref=['GURR', 'TURNED', 'AWAY', 'IMPATIENTLY', 'AGAIN', 'AND', 'SIGNING', 'TO', 'HIS', 'MEN', 'TO', 'FOLLOW', 'THEY', 'ALL', 'BEGAN', 'TO', 'TRAMP', 'UP', 'THE', 'STEEP', 'TRACK', 'LEADING', 'TOWARD', 'THE', 'HOZE', 'WITH', 'THE', 'RABBITS', 'SCUTTLING', 'AWAY', 'AMONG', 'THE', 'FURZE', 'AND', 'SHOWING', 'THEIR', 'WHITE', 'COTTONY', 'TAILS', 'FOR', 'A', 'MOMENT', 'AS', 'THEY', 'DARTED', 'DOWN', 'INTO', 'THEIR', 'HOLES'] +7902-96595-0016-125: hyp=['GERT', 'TURNED', 'AWAY', 'IMPATIENTLY', 'AGAIN', 'AND', 'SOUNDING', 'TO', 'HIS', 'MEN', 'TO', 'FOLLOW', 'THEY', 'ALL', 'BEGAN', 'TO', 'TRAMP', 'UP', 'A', 'STEEP', 'CHECK', 'LEADING', 'TOWARD', 'THE', 'HOSE', 'WITH', 'THE', "RABBIT'S", 'SCUTTLING', 'AWAY', 'AMONG', 'THE', 'FIRS', 'AND', 'SHOWING', 'THEIR', 'WHITE', 'COTTONY', 'TAILS', 'FOR', 'A', 'MOMENT', 'AS', 'THEY', 'DARTED', 'DOWN', 'INTO', 'THEIR', 'HOLES'] +7902-96595-0017-126: ref=['I', 'DUNNO', 'MUTTERED', 'DICK', 'AND', 'A', 'MAN', "CAN'T", 'BE', 'SURE'] +7902-96595-0017-126: hyp=['I', 'DUNNO', 'MUTTERED', 'DICK', 'AND', 'A', 'MEN', "CAN'T", 'BE', 'SURE'] +7902-96595-0018-127: ref=['GURR', 'SALUTED', 'AND', 'STATED', 'HIS', 'BUSINESS', 'WHILE', 'THE', 'BARONET', 'WHO', 'HAD', 'TURNED', 'SALLOWER', 'AND', 'MORE', 'CAREWORN', 'THAN', 'HIS', 'LOT', 'DREW', 'A', 'BREATH', 'FULL', 'OF', 'RELIEF', 'ONE', 'OF', 'YOUR', 'SHIP', 'BOYS', 'HE', 'SAID'] +7902-96595-0018-127: hyp=['GIR', 'SALUTED', 'AND', 'STATED', 'HIS', 'BUSINESS', 'WHILE', 'THE', 'BARONET', 'WHO', 'HAD', 'TURNED', 'SALARY', 'AND', 'MORE', 'CAREWORN', 'THAN', 'HIS', 'LOT', 'DREW', 'A', 'BREATH', 'OF', 'FULL', 'OF', 'RELIEF', 'ONE', 'OF', 'YOUR', 'SHIP', 'BOYS', 'HE', 'SAID'] +7902-96595-0019-128: ref=['A', 'LAD', 'LOOKING', 'LIKE', 'A', 'COMMON', 'SAILOR', 'AND', 'WEARING', 'A', 'RED', 'CAP', 'NO', 'SAID', 'SIR', 'RISDON'] +7902-96595-0019-128: hyp=['A', 'LAD', 'LOOKING', 'LIKE', 'A', 'COMMON', 'SAILOR', 'AND', 'WEARING', 'A', 'RED', 'CAP', 'NO', 'SAID', 'SIR', 'RISDON'] +7902-96595-0020-129: ref=['I', 'HAVE', 'SEEN', 'NO', 'ONE', 'ANSWERING', 'TO', 'THE', 'DESCRIPTION', 'HERE'] +7902-96595-0020-129: hyp=['I', 'HAVE', 'SEEN', 'NO', 'ONE', 'ANSWERING', 'TO', 'THE', 'DESCRIPTION', 'HERE'] +7902-96595-0021-130: ref=['BEG', 'PARDON', 'SIR', 'BUT', 'CAN', 'YOU', 'AS', 'A', 'GENTLEMAN', 'ASSURE', 'ME', 'THAT', 'HE', 'IS', 'NOT', 'HERE', 'CERTAINLY', 'SAID', 'SIR', 'RISDON'] +7902-96595-0021-130: hyp=['BIG', 'PARTISER', 'BECAME', 'AS', 'GENTLEMAN', 'ASSURE', 'ME', 'THAT', 'HE', 'IS', 'NOT', 'HERE', 'CERTAINLY', 'SAID', 'SIR', 'RISDON'] +7902-96595-0022-131: ref=['SURELY', 'CRIED', 'SIR', 'RISDON', 'EXCITEDLY'] +7902-96595-0022-131: hyp=['SURELY', 'CRIED', 'SIR', 'RISDON', 'EXCITEDLY'] +7902-96595-0023-132: ref=['SIR', 'RISDON', 'WAS', 'SILENT'] +7902-96595-0023-132: hyp=['SIR', 'RICHMOND', 'WAS', 'SILENT'] +7902-96595-0024-133: ref=['LADY', 'GRAEME', 'LOOKED', 'GHASTLY'] +7902-96595-0024-133: hyp=['LADY', 'GRAHAM', 'LOOKED', 'GHASTLY'] +7902-96595-0025-134: ref=['YOU', 'DO', 'NOT', 'KNOW', 'NO'] +7902-96595-0025-134: hyp=['YOU', 'DO', 'NOT', 'KNOW', 'NO'] +7975-280057-0000-1008: ref=['THESE', 'HATREDS', 'WERE', 'SOON', 'TO', 'MAKE', 'TROUBLE', 'FOR', 'ME', 'OF', 'WHICH', 'I', 'HAD', 'NEVER', 'DREAMED'] +7975-280057-0000-1008: hyp=['THESE', 'HATREDS', 'WERE', 'SOON', 'TO', 'MAKE', 'TROUBLE', 'FOR', 'ME', 'OF', 'WHICH', 'I', 'HAD', 'NEVER', 'DREAMED'] +7975-280057-0001-1009: ref=['HENRY', 'WASHINGTON', 'YOUNGER', 'MY', 'FATHER', 'REPRESENTED', 'JACKSON', 'COUNTY', 'THREE', 'TIMES', 'IN', 'THE', 'LEGISLATURE', 'AND', 'WAS', 'ALSO', 'JUDGE', 'OF', 'THE', 'COUNTY', 'COURT'] +7975-280057-0001-1009: hyp=['HENRY', 'WASHINGTON', 'YOUNGER', 'MY', 'FATHER', 'REPRESENTED', 'JACKSON', 'COUNTY', 'THREE', 'TIMES', 'IN', 'THE', 'LEGISLATURE', 'AND', 'WAS', 'ALSO', 'A', 'JUDGE', 'OF', 'THE', 'COUNTY', 'COURT'] +7975-280057-0002-1010: ref=['MY', 'MOTHER', 'WHO', 'WAS', 'BURSHEBA', 'FRISTOE', 'OF', 'INDEPENDENCE', 'WAS', 'THE', 'DAUGHTER', 'OF', 'RICHARD', 'FRISTOE', 'WHO', 'FOUGHT', 'UNDER', 'GENERAL', 'ANDREW', 'JACKSON', 'AT', 'NEW', 'ORLEANS', 'JACKSON', 'COUNTY', 'HAVING', 'BEEN', 'SO', 'NAMED', 'AT', 'MY', 'GRANDFATHER', "FRISTOE'S", 'INSISTENCE'] +7975-280057-0002-1010: hyp=['MY', 'MOTHER', 'WHO', 'WAS', 'PERCEIVER', 'FOR', 'STOVE', 'OF', 'INDEPENDENCE', 'WAS', 'A', 'DAUGHTER', 'OF', 'RICHARD', 'FRISTOW', 'WHO', 'FOUGHT', 'UNDER', 'GENERAL', 'ANDREW', 'JACKSON', 'AT', 'NEW', 'ORLEANS', 'JACKSON', 'COUNTY', 'HAVING', 'BEEN', 'SO', 'NAMED', 'AND', 'MY', 'GRANDFATHER', 'FIRST', 'DOZE', 'INSISTANTS'] +7975-280057-0003-1011: ref=['I', 'CANNOT', 'REMEMBER', 'WHEN', 'I', 'DID', 'NOT', 'KNOW', 'HOW', 'TO', 'SHOOT'] +7975-280057-0003-1011: hyp=['I', 'CANNOT', 'REMEMBER', 'WHEN', 'I', 'DID', 'NOT', 'KNOW', 'HOW', 'TO', 'SHOOT'] +7975-280057-0004-1012: ref=['MY', 'BROTHER', 'JAMES', 'WAS', 'BORN', 'JANUARY', 'FIFTEENTH', 'EIGHTEEN', 'FORTY', 'EIGHT', 'JOHN', 'IN', 'EIGHTEEN', 'FIFTY', 'ONE', 'AND', 'ROBERT', 'IN', 'DECEMBER', 'EIGHTEEN', 'FIFTY', 'THREE'] +7975-280057-0004-1012: hyp=['MY', 'BROTHER', 'JAMES', 'WAS', 'BORN', 'JANUARY', 'FIFTEEN', 'EIGHTEEN', 'FORTY', 'EIGHT', 'JOHN', 'IN', 'EIGHTEEN', 'FIFTY', 'ONE', 'AND', 'ROBERT', 'IN', 'DECEMBER', 'EIGHTEEN', 'FIFTY', 'THREE'] +7975-280057-0005-1013: ref=['MY', 'ELDEST', 'BROTHER', 'RICHARD', 'DIED', 'IN', 'EIGHTEEN', 'SIXTY'] +7975-280057-0005-1013: hyp=['MY', 'ELDEST', 'BROTHER', 'RICHARD', 'DIED', 'IN', 'EIGHTEEN', 'SIXTY'] +7975-280057-0006-1014: ref=['MY', 'FATHER', 'WAS', 'IN', 'THE', 'EMPLOY', 'OF', 'THE', 'UNITED', 'STATES', 'GOVERNMENT', 'AND', 'HAD', 'THE', 'MAIL', 'CONTRACT', 'FOR', 'FIVE', 'HUNDRED', 'MILES'] +7975-280057-0006-1014: hyp=['MY', 'FATHER', 'WAS', 'IN', 'THE', 'EMPLOY', 'OF', 'THE', 'UNITED', 'STATES', 'GOVERNMENT', 'AND', 'HAD', 'THE', 'MALE', 'CONTRACT', 'FOR', 'FIVE', 'HUNDRED', 'MILES'] +7975-280057-0007-1015: ref=['HE', 'HAD', 'STARTED', 'BACK', 'TO', 'HARRISONVILLE', 'IN', 'A', 'BUGGY', 'BUT', 'WAS', 'WAYLAID', 'ONE', 'MILE', 'SOUTH', 'OF', 'WESTPORT', 'A', 'SUBURB', 'OF', 'KANSAS', 'CITY', 'AND', 'BRUTALLY', 'MURDERED', 'FALLING', 'OUT', 'OF', 'HIS', 'BUGGY', 'INTO', 'THE', 'ROAD', 'WITH', 'THREE', 'MORTAL', 'BULLET', 'WOUNDS'] +7975-280057-0007-1015: hyp=['HE', 'HAD', 'STARTED', 'BACK', 'TO', 'HARRISONVILLE', 'IN', 'A', 'BUGGY', 'BUT', 'WAS', 'WAYLAID', 'ONE', 'MILE', 'SOUTH', 'OF', 'WESTWARD', 'A', 'SUBURB', 'OF', 'KANSAS', 'CITY', 'AND', 'BRUTALLY', 'MURDERED', 'FALLING', 'OUT', 'OF', 'HIS', 'BUGGY', 'INTO', 'THE', 'ROAD', 'WITH', 'THREE', 'MORTAL', 'BULLET', 'WOUNDS'] +7975-280057-0008-1016: ref=['MISSUS', 'WASHINGTON', 'WELLS', 'AND', 'HER', 'SON', 'SAMUEL', 'ON', 'THE', 'ROAD', 'HOME', 'FROM', 'KANSAS', 'CITY', 'TO', "LEE'S", 'SUMMIT', 'RECOGNIZED', 'THE', 'BODY', 'AS', 'THAT', 'OF', 'MY', 'FATHER'] +7975-280057-0008-1016: hyp=['MISS', 'WASHINGTON', 'WALES', 'AND', 'HER', 'SON', 'SAMUEL', 'ON', 'THE', 'ROAD', 'HOME', 'FROM', 'KANSAS', 'CITY', 'TO', 'LEE', 'SUMMIT', 'RECOGNIZED', 'THE', 'BODY', 'AS', 'THAT', 'OF', 'MY', 'FATHER'] +7975-280057-0009-1017: ref=['MISSUS', 'WELLS', 'STAYED', 'TO', 'GUARD', 'THE', 'REMAINS', 'WHILE', 'HER', 'SON', 'CARRIED', 'THE', 'NEWS', 'OF', 'THE', 'MURDER', 'TO', 'COLONEL', 'PEABODY', 'OF', 'THE', 'FEDERAL', 'COMMAND', 'WHO', 'WAS', 'THEN', 'IN', 'CAMP', 'AT', 'KANSAS', 'CITY'] +7975-280057-0009-1017: hyp=['MUST', 'WELL', 'STAY', 'TO', 'GUARD', 'THE', 'REMAINS', 'WHETHER', 'HER', 'SON', 'CARRIED', 'THE', 'NEWS', 'OF', 'THE', 'MURDER', 'TO', 'COLONEL', 'PEABODY', 'OF', 'THE', 'FEDERAL', 'COMMAND', 'WHO', 'WAS', 'THEN', 'ENCAMP', 'AT', 'KANSAS', 'CITY'] +7975-280057-0010-1018: ref=['MISSUS', 'MC', 'CORKLE', 'JUMPED', 'FROM', 'THE', 'WINDOW', 'OF', 'THE', 'HOUSE', 'AND', 'ESCAPED'] +7975-280057-0010-1018: hyp=['MISS', 'MICROCLE', 'JUMPED', 'FROM', 'THE', 'WINDOW', 'OF', 'THE', 'HOUSE', 'AND', 'ESCAPED'] +7975-280057-0011-1019: ref=['AS', 'THE', 'RAIDERS', 'LEFT', 'ONE', 'OF', 'THEM', 'SHOUTED'] +7975-280057-0011-1019: hyp=['AS', 'THE', 'RAIDERS', 'LIVED', 'ONE', 'OF', 'THEM', 'SHOUTED'] +7975-280057-0012-1020: ref=['NOW', 'OLD', 'LADY', 'CALL', 'ON', 'YOUR', 'PROTECTORS', 'WHY', "DON'T", 'YOU', 'CALL', 'ON', 'COLE', 'YOUNGER', 'NOW'] +7975-280057-0012-1020: hyp=['NOW', 'LADY', 'CALL', 'ON', 'YOUR', 'PROTECTORS', 'WHY', "DON'T", 'YOU', 'CALL', 'AND', 'CO', 'YOUNGER', 'NOW'] +7975-280057-0013-1021: ref=['EVERY', 'KNOT', 'REPRESENTED', 'A', 'HUMAN', 'LIFE'] +7975-280057-0013-1021: hyp=['EVERY', 'KNOT', 'REPRESENTED', 'A', 'HUMAN', 'LIFE'] +7975-280057-0014-1022: ref=['BUT', 'SHE', 'FAILED', 'TO', 'FIND', 'THE', 'COMFORT', 'SHE', 'SOUGHT', 'FOR', 'ANNOYANCES', 'CONTINUED', 'IN', 'A', 'MORE', 'AGGRAVATED', 'FORM'] +7975-280057-0014-1022: hyp=['BUT', 'SHE', 'FAILED', 'TO', 'FANCY', 'COMFORT', 'SHE', 'SOUGHT', 'FOR', 'ANNOYANCES', 'CONTINUED', 'IN', 'A', 'MORE', 'AGGRAVATED', 'FORM'] +7975-280057-0015-1023: ref=['TWO', 'MONTHS', 'AFTER', 'THIS', 'INCIDENT', 'THE', 'SAME', 'PERSECUTORS', 'AGAIN', 'ENTERED', 'OUR', 'HOME', 'IN', 'THE', 'DEAD', 'OF', 'THE', 'NIGHT', 'AND', 'AT', 'THE', 'POINT', 'OF', 'A', 'PISTOL', 'TRIED', 'TO', 'FORCE', 'MY', 'MOTHER', 'TO', 'SET', 'FIRE', 'TO', 'HER', 'OWN', 'HOME'] +7975-280057-0015-1023: hyp=['TWO', 'MONTHS', 'AFTER', 'THE', 'INCIDENT', 'THE', 'SAME', 'PERSECUTORS', 'AGAIN', 'ENTERED', 'OUR', 'HOME', 'IN', 'THE', 'DAY', 'OF', 'THE', 'NIGHT', 'AND', 'AT', 'THE', 'POINT', 'OF', 'A', 'PISTOL', 'TRIED', 'TO', 'FORCE', 'MY', 'MOTHER', 'TO', 'SET', 'FIRE', 'TO', 'HER', 'OWN', 'HOME'] +7975-280057-0016-1024: ref=['I', 'HAVE', 'ALWAYS', 'FELT', 'THAT', 'THE', 'EXPOSURE', 'TO', 'WHICH', 'SHE', 'WAS', 'SUBJECTED', 'ON', 'THIS', 'CRUEL', 'JOURNEY', 'TOO', 'HARD', 'EVEN', 'FOR', 'A', 'MAN', 'TO', 'TAKE', 'WAS', 'THE', 'DIRECT', 'CAUSE', 'OF', 'HER', 'DEATH'] +7975-280057-0016-1024: hyp=['I', 'HAVE', 'ALWAYS', 'FELT', 'THAT', 'THE', 'EXPOSURE', 'TO', 'WHICH', 'SHE', 'WAS', 'SUBJECTED', 'ON', 'THIS', 'CRUEL', 'JOURNEY', 'TOO', 'HARD', 'EVEN', 'FOR', 'A', 'MAN', 'TO', 'TAKE', 'WAS', 'A', 'DIRECT', 'CAUSE', 'OF', 'HER', 'DEATH'] +7975-280057-0017-1025: ref=['FROM', 'HARRISONVILLE', 'SHE', 'WENT', 'TO', 'WAVERLY', 'WHERE', 'SHE', 'WAS', 'HOUNDED', 'CONTINUALLY'] +7975-280057-0017-1025: hyp=['FROM', 'HARRISONVILLE', 'SHE', 'WENT', 'TO', 'WAVERLEY', 'WHERE', 'SHE', 'WAS', 'HANDY', 'CONTINUALLY'] +7975-280057-0018-1026: ref=['ONE', 'OF', 'THE', 'CONDITIONS', 'UPON', 'WHICH', 'HER', 'LIFE', 'WAS', 'SPARED', 'WAS', 'THAT', 'SHE', 'WOULD', 'REPORT', 'AT', 'LEXINGTON', 'WEEKLY'] +7975-280057-0018-1026: hyp=['ONE', 'OF', 'THE', 'CONDITIONS', 'UPON', 'WHICH', 'HER', 'LIFE', 'WAS', 'SPARED', 'WAS', 'THAT', 'SHE', 'WOULD', 'REPORT', 'IT', 'LECINGTON', 'WEEKLY'] +7975-280057-0019-1027: ref=['ONE', 'OF', 'MY', 'OLD', 'SCHOOL', 'TEACHERS', 'WHOM', 'I', 'HAVE', 'NEVER', 'SEEN', 'SINCE', 'THE', 'SPRING', 'OR', 'SUMMER', 'OF', 'EIGHTEEN', 'SIXTY', 'TWO', 'IS', 'STEPHEN', 'B', 'ELKINS', 'SENATOR', 'FROM', 'WEST', 'VIRGINIA'] +7975-280057-0019-1027: hyp=['ONE', 'OF', 'MY', 'OLD', 'SCHOOL', 'TEACHERS', 'WHOM', 'I', 'HAVE', 'NEVER', 'SEEN', 'SINCE', 'THE', 'SPRING', 'OF', 'SUMMER', 'OF', 'EIGHTEEN', 'SIXTY', 'TWO', 'IS', 'STEPHEN', 'B', 'ELKINS', 'SENATOR', 'FROM', 'WEST', 'VIRGINIA'] +7975-280057-0020-1028: ref=['WHEN', 'I', 'WAS', 'TAKEN', 'PRISONER', 'I', 'EXPECTED', 'TO', 'BE', 'SHOT', 'WITHOUT', 'CEREMONY'] +7975-280057-0020-1028: hyp=['WHEN', 'I', 'WAS', 'TAKING', 'PRISONER', 'I', 'EXPECTED', 'TO', 'BE', 'SHOT', 'WITHOUT', 'CEREMONY'] +7975-280063-0000-1058: ref=['WE', 'TOOK', 'THE', 'OATH', 'PERHAPS', 'THREE', 'HUNDRED', 'OF', 'US', 'DOWN', 'ON', 'LUTHER', "MASON'S", 'FARM', 'A', 'FEW', 'MILES', 'FROM', 'WHERE', 'I', 'NOW', 'WRITE', 'WHERE', 'COLONEL', 'HAYS', 'HAD', 'ENCAMPED', 'AFTER', 'INDEPENDENCE'] +7975-280063-0000-1058: hyp=['WE', 'TOOK', 'THE', 'OATH', 'PERHAPS', 'THREE', 'HUNDRED', 'OF', 'US', 'DOWN', 'ON', 'LUTHER', "MASON'S", 'FARM', 'A', 'FEW', 'MILES', 'FROM', 'WHERE', 'I', 'NOW', 'WRITE', 'WHERE', 'COLONEL', 'HAYES', 'HAD', 'ENCAMPED', 'AFTER', 'INDEPENDENCE'] +7975-280063-0001-1059: ref=['BOONE', 'MUIR', 'AND', 'MYSELF', 'MET', 'COFFEE', 'AND', 'THE', 'REST', 'BELOW', 'ROSE', 'HILL', 'ON', 'GRAND', 'RIVER'] +7975-280063-0001-1059: hyp=['BOOM', 'YOU', 'AND', 'MYSELF', 'MAKE', 'COFFEE', 'AND', 'THE', 'REST', 'BELOW', 'ROSE', 'HILL', 'ON', 'GRAND', 'RIVER'] +7975-280063-0002-1060: ref=['ACCORDINGLY', 'I', 'WAS', 'SHORTLY', 'AWAKENED', 'TO', 'ACCOMPANY', 'HIM', 'TO', 'LONE', 'JACK', 'WHERE', 'HE', 'WOULD', 'PERSONALLY', 'MAKE', 'KNOWN', 'THE', 'SITUATION', 'TO', 'THE', 'OTHER', 'COLONELS'] +7975-280063-0002-1060: hyp=['ACCORDINGLY', 'I', 'WAS', 'SHORTLY', 'AWAKENED', 'TO', 'ACCOMPANY', 'HIM', 'THE', 'LONG', 'JAG', 'WHERE', 'HE', 'WOULD', 'PERSONALLY', 'MAKE', 'KNOWN', 'THE', 'SITUATION', 'TO', 'THE', 'OTHER', 'COLONELS'] +7975-280063-0003-1061: ref=['FOSTER', 'HAD', 'NEARLY', 'ONE', 'THOUSAND', 'CAVALRYMEN', 'AND', 'TWO', 'PIECES', 'OF', "RABB'S", 'INDIANA', 'BATTERY', 'THAT', 'HAD', 'ALREADY', 'MADE', 'FOR', 'ITSELF', 'A', 'NAME', 'FOR', 'HARD', 'FIGHTING'] +7975-280063-0003-1061: hyp=['FOSTER', 'HAD', 'NEARLY', 'ONE', 'THOUSAND', 'CAVERNMENT', 'AND', 'TWO', 'PIECES', 'OF', 'RABBS', 'INDIANA', 'BATTERY', 'THAT', 'HAD', 'ALREADY', 'MADE', 'FOR', 'ITSELF', 'A', 'NAME', 'FOR', 'HARD', 'FIGHTING'] +7975-280063-0004-1062: ref=['COME', 'IN', 'COLONEL', 'HAYS', 'EXCLAIMED', 'COLONEL', 'COCKRELL'] +7975-280063-0004-1062: hyp=['COMMONED', 'COLONEL', 'HAYES', 'EXCLAIMED', 'COLONEL', 'COCKLE'] +7975-280063-0005-1063: ref=['I', 'THINK', "HE'LL", 'BE', 'RATHER', 'TOUGH', 'MEAT', 'FOR', 'BREAKFAST', 'I', 'REPLIED', 'HE', 'MIGHT', 'BE', 'ALL', 'RIGHT', 'FOR', 'DINNER'] +7975-280063-0005-1063: hyp=['I', 'THINK', "HE'LL", 'BE', 'READY', 'TO', 'HAVE', 'MEET', 'FOR', 'BREAKFAST', 'I', 'REPLIED', 'HE', 'MIGHT', 'BE', 'ALL', 'RIPE', 'FOR', 'DINNER'] +7975-280063-0006-1064: ref=['JACKMAN', 'WITH', 'A', 'PARTY', 'OF', 'THIRTY', 'SEASONED', 'MEN', 'CHARGED', 'THE', 'INDIANA', 'GUNS', 'AND', 'CAPTURED', 'THEM', 'BUT', 'MAJOR', 'FOSTER', 'LED', 'A', 'GALLANT', 'CHARGE', 'AGAINST', 'THE', 'INVADERS', 'AND', 'RECAPTURED', 'THE', 'PIECES'] +7975-280063-0006-1064: hyp=['JACK', 'WENT', 'WITH', 'A', 'PARTY', 'OF', 'THIRTY', 'SEASONED', 'MEN', 'CHARGED', 'THE', 'INDIANA', 'GUNS', 'AND', 'CAPTURED', 'THEM', 'BUT', 'MAJOR', 'FOSTER', 'LED', 'A', 'GALLANT', 'CHARGE', 'AGAINST', 'THE', 'INVADERS', 'AND', 'RE', 'CAPTURED', 'THE', 'PIECES'] +7975-280063-0007-1065: ref=['WE', 'WERE', 'OUT', 'OF', 'AMMUNITION', 'AND', 'WERE', 'HELPLESS', 'HAD', 'THE', 'FIGHT', 'BEEN', 'PRESSED'] +7975-280063-0007-1065: hyp=['WE', 'WERE', 'OUT', 'OF', 'AMMUNITION', 'AND', 'WERE', 'HELPLESS', 'HAD', 'THE', 'FIGHT', 'BEEN', 'PRESSED'] +7975-280063-0008-1066: ref=['THEY', 'DID', 'MARK', 'MY', 'CLOTHES', 'IN', 'ONE', 'OR', 'TWO', 'PLACES', 'HOWEVER'] +7975-280063-0008-1066: hyp=['THEY', 'DID', 'MARK', 'MY', 'CLOTHES', 'IN', 'ONE', 'OR', 'TWO', 'PLACES', 'HOWEVER'] +7975-280063-0009-1067: ref=['MAJOR', 'FOSTER', 'IN', 'A', 'LETTER', 'TO', 'JUDGE', 'GEORGE', 'M', 'BENNETT', 'OF', 'MINNEAPOLIS', 'SAID'] +7975-280063-0009-1067: hyp=['MAJOR', 'FOSTER', 'IN', 'A', 'LETTER', 'TO', 'JOE', 'GEORGE', 'I', 'INVITED', 'OF', 'MANY', 'APOLIS', 'SAID'] +7975-280063-0010-1068: ref=['I', 'WAS', 'TOLD', 'BY', 'SOME', 'OF', 'OUR', 'MEN', 'FROM', 'THE', 'WESTERN', 'BORDER', 'OF', 'THE', 'STATE', 'THAT', 'THEY', 'RECOGNIZED', 'THE', 'DARING', 'YOUNG', 'RIDER', 'AS', 'COLE', 'YOUNGER'] +7975-280063-0010-1068: hyp=['I', 'WAS', 'TOLD', 'BY', 'SOME', 'OF', 'OUR', 'MEN', 'FROM', 'THE', 'WESTERN', 'BORDER', 'OF', 'THE', 'STATE', 'THAT', 'THEY', 'RECOGNIZED', 'A', 'DARING', 'YOUNG', 'RATTERAS', 'COURIER'] +7975-280063-0011-1069: ref=['ABOUT', 'NINE', 'THIRTY', 'A', 'M', 'I', 'WAS', 'SHOT', 'DOWN'] +7975-280063-0011-1069: hyp=['ABOUT', 'NINE', 'THIRTY', 'A', 'M', 'I', 'WAS', 'SHOT', 'DOWN'] +7975-280063-0012-1070: ref=['THE', 'WOUNDED', 'OF', 'BOTH', 'FORCES', 'WERE', 'GATHERED', 'UP', 'AND', 'WERE', 'PLACED', 'IN', 'HOUSES'] +7975-280063-0012-1070: hyp=['THE', 'WOUNDS', 'OF', 'BOTH', 'FORCES', 'WERE', 'GATHERED', 'UP', 'AND', 'WERE', 'PLACED', 'IN', 'HOUSES'] +7975-280076-0000-1029: ref=['ALTHOUGH', 'EVERY', 'BOOK', 'PURPORTING', 'TO', 'NARRATE', 'THE', 'LIVES', 'OF', 'THE', 'YOUNGER', 'BROTHERS', 'HAS', 'TOLD', 'OF', 'THE', 'LIBERTY', 'ROBBERY', 'AND', 'IMPLIED', 'THAT', 'WE', 'HAD', 'A', 'PART', 'IN', 'IT', 'THE', 'YOUNGERS', 'WERE', 'NOT', 'SUSPECTED', 'AT', 'THAT', 'TIME', 'NOR', 'FOR', 'A', 'LONG', 'TIME', 'AFTERWARD'] +7975-280076-0000-1029: hyp=['ALTHOUGH', 'EVERY', 'BOOK', 'REPORTING', 'TO', 'NARRATE', 'THE', 'LIVES', 'OF', 'THE', 'YOUNGER', 'BROTHERS', 'IS', 'TOLD', 'OF', 'THE', 'LIBERTY', 'ROBBERY', 'AND', 'IMPLIED', 'THAT', 'WE', 'HAD', 'A', 'PART', 'IN', 'IT', 'THE', 'YOUNGERS', 'WERE', 'NOT', 'SUSPECTED', 'AT', 'THAT', 'TIME', 'NOR', 'FOR', 'A', 'LONG', 'TIME', 'AFTERWARD'] +7975-280076-0001-1030: ref=['IT', 'WAS', 'CLAIMED', 'BY', 'PEOPLE', 'OF', 'LIBERTY', 'THAT', 'THEY', 'POSITIVELY', 'RECOGNIZED', 'AMONG', 'THE', 'ROBBERS', 'OLL', 'SHEPHERD', 'RED', 'MONKERS', 'AND', 'BUD', 'PENCE', 'WHO', 'HAD', 'SEEN', 'SERVICE', 'WITH', 'QUANTRELL'] +7975-280076-0001-1030: hyp=['IT', 'WAS', 'CLAIMED', 'BY', 'PEOPLE', 'OF', 'LIBERTY', 'THAT', 'THEY', 'POSITIVELY', 'RECOGNIZED', 'AMONG', 'THE', 'ROBBERS', 'ALL', 'SHEPARD', 'REDMOCKERS', 'AND', 'BUD', 'PINTS', 'WHO', 'HAD', 'SEEN', 'SERVICE', 'WITH', 'QUANTREL'] +7975-280076-0002-1031: ref=['THIS', 'RAID', 'WAS', 'ACCOMPANIED', 'BY', 'BLOODSHED', 'JUDGE', 'MC', 'LAIN', 'THE', 'BANKER', 'BEING', 'SHOT', 'THOUGH', 'NOT', 'FATALLY'] +7975-280076-0002-1031: hyp=['THIS', 'RAY', 'WAS', 'ACCOMPANIED', 'BY', 'BLOTCHETTE', 'JOE', 'MC', 'LANE', 'THE', 'BANKER', 'BEING', 'SHOT', 'THOUGH', 'NOT', 'FATALLY'] +7975-280076-0003-1032: ref=['NO', 'WARRANT', 'WAS', 'ISSUED', 'FOR', 'THE', 'YOUNGERS', 'BUT', 'SUBSEQUENT', 'HISTORIANS', 'HAVE', 'INFERENTIALLY', 'AT', 'LEAST', 'ACCUSED', 'US', 'OF', 'TAKING', 'PART', 'BUT', 'AS', 'I', 'SAID', 'BEFORE', 'THERE', 'IS', 'NO', 'TRUTH', 'IN', 'THE', 'ACCUSATION'] +7975-280076-0003-1032: hyp=['THOUGH', 'WARRANT', 'WAS', 'ISSUED', 'FOR', 'THE', 'YOUNGERS', 'BUT', 'SUBSEQUENT', 'HISTORIANS', 'HAVE', 'INFERENTIALLY', 'AT', 'LEAST', 'ACCUSED', 'US', 'OF', 'TAKING', 'PART', 'BUT', 'AS', 'I', 'SAID', 'BEFORE', 'THERE', 'IS', 'NO', 'TRUTH', 'IN', 'THE', 'ACCUSATION'] +7975-280076-0004-1033: ref=['JUNE', 'THIRD', 'EIGHTEEN', 'SEVENTY', 'ONE', 'OBOCOCK', 'BROTHERS', 'BANK', 'AT', 'CORYDON', 'IOWA', 'WAS', 'ROBBED', 'OF', 'FORTY', 'THOUSAND', 'DOLLARS', 'BY', 'SEVEN', 'MEN', 'IN', 'BROAD', 'DAYLIGHT'] +7975-280076-0004-1033: hyp=['JUNE', 'THIRD', 'EIGHTEEN', 'SEVENTY', 'ONE', 'OBEY', "BROTHER'S", 'BANK', 'AT', 'CROYDEN', 'HOUR', 'WAS', 'ROBBED', 'OF', 'FORTY', 'THOUSAND', 'DOLLARS', 'BY', 'SEVEN', 'MEN', 'IN', 'BROAD', 'DAYLIGHT'] +7975-280076-0005-1034: ref=['IT', 'WAS', 'CHARGED', 'THAT', 'ARTHUR', 'MC', 'COY', 'OR', 'A', 'C', 'MC', 'COY', 'AND', 'MYSELF', 'HAD', 'BEEN', 'PARTICIPANTS', 'IN', 'THE', "GAD'S", 'HILL', 'AFFAIR', 'AND', 'THE', 'TWO', 'STAGE', 'ROBBERIES'] +7975-280076-0005-1034: hyp=['IT', 'WAS', 'CHARGE', 'THAT', 'OFTEN', 'MA', 'KOY', 'OR', 'A', 'SEA', 'MAC', 'COY', 'AND', 'MYSELF', 'HAD', 'BEEN', 'PARTICIPANTS', 'IN', 'THE', "GAD'S", 'HILL', 'AFFAIR', 'AND', 'THE', 'TWO', 'STAGE', 'ROBBERIES'] +7975-280076-0006-1035: ref=['THE', 'PARTS', 'OF', 'THIS', 'LETTER', 'NOW', 'RELEVANT', 'ARE', 'AS', 'FOLLOWS'] +7975-280076-0006-1035: hyp=['THE', 'PARTS', 'OF', 'THIS', 'LETTER', 'NOW', 'RELEVANT', 'ARE', 'AS', 'FOLLOWS'] +7975-280076-0007-1036: ref=['YOU', 'MAY', 'USE', 'THIS', 'LETTER', 'IN', 'YOUR', 'OWN', 'WAY'] +7975-280076-0007-1036: hyp=['YOU', 'MAY', 'USE', 'THIS', 'LETTER', 'IN', 'YOUR', 'OWN', 'WAY'] +7975-280076-0008-1037: ref=['I', 'WILL', 'GIVE', 'YOU', 'THIS', 'OUTLINE', 'AND', 'SKETCH', 'OF', 'MY', 'WHEREABOUTS', 'AND', 'ACTIONS', 'AT', 'THE', 'TIME', 'OF', 'CERTAIN', 'ROBBERIES', 'WITH', 'WHICH', 'I', 'AM', 'CHARGED'] +7975-280076-0008-1037: hyp=['I', 'WILL', 'GIVE', 'YOU', 'THIS', 'OUTLINE', 'AND', 'SKETCH', 'OF', 'MY', 'WHEREABOUTS', 'AND', 'ACTIONS', 'AT', 'THE', 'TIME', 'OF', 'CERTAIN', 'ROBBERIES', 'WITH', 'WHICH', 'I', 'AM', 'CHARGED'] +7975-280076-0009-1038: ref=['AT', 'THE', 'TIME', 'OF', 'THE', 'GALLATIN', 'BANK', 'ROBBERY', 'I', 'WAS', 'GATHERING', 'CATTLE', 'IN', 'ELLIS', 'COUNTY', 'TEXAS', 'CATTLE', 'THAT', 'I', 'BOUGHT', 'FROM', 'PLEAS', 'TAYLOR', 'AND', 'RECTOR'] +7975-280076-0009-1038: hyp=["IT'S", 'THE', 'TIME', 'OF', 'THE', 'GALLOP', 'AND', 'BANK', 'ROBBERY', 'I', 'WAS', 'GATHERING', 'CATTLE', 'AND', 'ILLIS', 'COUNTY', 'TEXAS', 'CATTLEETTA', 'BOUGHT', 'FROM', 'PLACE', 'TAYLOR', 'AND', 'RECTOR'] +7975-280076-0010-1039: ref=['THIS', 'CAN', 'BE', 'PROVED', 'BY', 'BOTH', 'OF', 'THEM', 'ALSO', 'BY', 'SHERIFF', 'BARKLEY', 'AND', 'FIFTY', 'OTHER', 'RESPECTABLE', 'MEN', 'OF', 'THAT', 'COUNTY'] +7975-280076-0010-1039: hyp=['THIS', 'CAN', 'BE', 'PROVED', 'BY', 'BOTH', 'OF', 'THEM', 'ALSO', 'BY', 'SIR', 'PARKLEY', 'AND', 'FIFTY', 'OTHER', 'RESPECTABLE', 'MEN', 'OF', 'THAT', 'COUNTY'] +7975-280076-0011-1040: ref=['I', 'BROUGHT', 'THE', 'CATTLE', 'TO', 'KANSAS', 'THAT', 'FALL', 'AND', 'REMAINED', 'IN', 'SAINT', 'CLAIR', 'COUNTY', 'UNTIL', 'FEBRUARY'] +7975-280076-0011-1040: hyp=['I', 'BROUGHT', 'THE', 'CATTLE', 'THE', 'KANSAS', 'SET', 'FALL', 'AND', 'REMAINED', 'IN', 'SAINT', 'CLAIR', 'COUNTY', 'UNTIL', 'FEBRUARY'] +7975-280076-0012-1041: ref=['I', 'THEN', 'WENT', 'TO', 'ARKANSAS', 'AND', 'RETURNED', 'TO', 'SAINT', 'CLAIR', 'COUNTY', 'ABOUT', 'THE', 'FIRST', 'OF', 'MAY'] +7975-280076-0012-1041: hyp=['I', 'THEN', 'WENT', 'TO', 'OUR', 'CONSOHN', 'RETURNED', 'TO', 'SAINT', 'CLAIR', 'COUNTY', 'ABOUT', 'THE', 'FIRST', 'OF', 'MAY'] +7975-280076-0013-1042: ref=['I', 'WENT', 'TO', 'KANSAS', 'WHERE', 'OUR', 'CATTLE', 'WERE', 'IN', 'WOODSON', 'COUNTY', 'AT', 'COLONEL', "RIDGE'S"] +7975-280076-0013-1042: hyp=['AND', 'WENT', 'TO', 'KANSAS', 'WHERE', 'A', 'CATTLE', 'BURNED', 'WOODSON', 'COUNTY', 'AT', 'COLONEL', 'RICHES'] +7975-280076-0014-1043: ref=['DURING', 'THE', 'SUMMER', 'I', 'WAS', 'EITHER', 'IN', 'SAINT', 'CLAIR', 'JACKSON', 'OR', 'KANSAS', 'BUT', 'AS', 'THERE', 'WAS', 'NO', 'ROBBERY', 'COMMITTED', 'THAT', 'SUMMER', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'WHERE', 'I', 'WAS'] +7975-280076-0014-1043: hyp=['DURING', 'SUMMER', 'I', 'WAS', 'EITHER', 'IN', 'SAINT', 'CLAIR', 'OR', 'JACKSON', 'OR', 'KANSAS', 'BUT', 'AS', 'THERE', 'WAS', 'NO', 'ROBBERY', 'COMMITTED', 'THAT', 'SUMMER', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'WHERE', 'I', 'WAS'] +7975-280076-0015-1044: ref=['I', 'WENT', 'THROUGH', 'INDEPENDENCE', 'AND', 'FROM', 'THERE', 'TO', 'ACE', "WEBB'S"] +7975-280076-0015-1044: hyp=['AND', 'WENT', 'THROUGH', 'INDEPENDENCE', 'AND', 'FROM', 'THERE', 'TO', 'ACE', 'WEBBS'] +7975-280076-0016-1045: ref=['THERE', 'I', 'TOOK', 'DINNER', 'AND', 'THEN', 'WENT', 'TO', 'DOCTOR', 'L', 'W', "TWYMAN'S"] +7975-280076-0016-1045: hyp=['THERE', 'I', 'TOOK', 'DINNER', 'AND', 'THEN', 'WENT', 'TO', 'DOCTOR', 'OLD', 'W', 'TWIMMAN'] +7975-280076-0017-1046: ref=['OUR', 'BUSINESS', 'THERE', 'WAS', 'TO', 'SEE', 'E', 'P', 'WEST', 'HE', 'WAS', 'NOT', 'AT', 'HOME', 'BUT', 'THE', 'FAMILY', 'WILL', 'REMEMBER', 'THAT', 'WE', 'WERE', 'THERE'] +7975-280076-0017-1046: hyp=['OUR', 'BUSINESS', 'THERE', 'WAS', 'TO', 'SEE', 'E', 'P', 'WEST', 'HE', 'WAS', 'NOT', 'AT', 'HOME', 'BUT', 'THE', 'FAMILY', 'WILL', 'REMEMBER', 'THAT', 'WE', 'WERE', 'THERE'] +7975-280076-0018-1047: ref=['WE', 'CROSSED', 'ON', 'THE', 'BRIDGE', 'STAYED', 'IN', 'THE', 'CITY', 'ALL', 'NIGHT', 'AND', 'THE', 'NEXT', 'MORNING', 'WE', 'RODE', 'UP', 'THROUGH', 'THE', 'CITY'] +7975-280076-0018-1047: hyp=['WE', 'CROSSED', 'ON', 'THE', 'BRIDGE', 'STATE', 'IN', 'THE', 'CITY', 'ALL', 'NIGHT', 'AND', 'THE', 'NEXT', 'MORNING', 'WE', 'RODE', 'UP', 'TO', 'THE', 'CITY'] +7975-280076-0019-1048: ref=['I', 'MET', 'SEVERAL', 'OF', 'MY', 'FRIENDS', 'AMONG', 'THEM', 'WAS', 'BOB', 'HUDSPETH'] +7975-280076-0019-1048: hyp=['AMID', 'SEVERAL', 'OF', 'MY', 'FRIENDS', 'AMONG', 'THEM', 'WAS', 'BOB', 'HUSBATH'] +7975-280076-0020-1049: ref=['WE', 'WERE', 'NOT', 'ON', 'GOOD', 'TERMS', 'AT', 'THE', 'TIME', 'NOR', 'HAVE', 'WE', 'BEEN', 'FOR', 'SEVERAL', 'YEARS'] +7975-280076-0020-1049: hyp=['WE', 'WERE', 'NOT', 'ON', 'GOOD', 'TERMS', 'AT', 'THE', 'TIME', 'NOR', 'HAVE', 'WE', 'BEEN', 'FOR', 'SEVERAL', 'YEARS'] +7975-280076-0021-1050: ref=['POOR', 'JOHN', 'HE', 'HAS', 'BEEN', 'HUNTED', 'DOWN', 'AND', 'SHOT', 'LIKE', 'A', 'WILD', 'BEAST', 'AND', 'NEVER', 'WAS', 'A', 'BOY', 'MORE', 'INNOCENT'] +7975-280076-0021-1050: hyp=['POOR', 'JOHN', 'HE', 'HAS', 'BEEN', 'HUNTED', 'DOWN', 'AND', 'SHOT', 'LIKE', 'A', 'WILD', 'BEAST', 'AND', 'NEVER', 'WAS', 'A', 'BOY', 'MORE', 'INNOCENT'] +7975-280076-0022-1051: ref=['DOCTOR', 'L', 'LEWIS', 'WAS', 'HIS', 'PHYSICIAN'] +7975-280076-0022-1051: hyp=['DOCTOR', 'EL', 'LOUIS', 'WAS', 'HIS', 'PHYSICIAN'] +7975-280076-0023-1052: ref=['THERE', 'WERE', 'FIFTY', 'OR', 'A', 'HUNDRED', 'PERSONS', 'THERE', 'WHO', 'WILL', 'TESTIFY', 'IN', 'ANY', 'COURT', 'THAT', 'JOHN', 'AND', 'I', 'WERE', 'THERE'] +7975-280076-0023-1052: hyp=['THERE', 'WERE', 'FIFTY', 'OR', 'HUNDRED', 'PERSONS', 'THERE', 'WHO', 'WILL', 'TESTIFY', 'IN', 'ANY', 'COURT', 'THAT', 'JOHN', 'AND', 'I', 'WERE', 'THERE'] +7975-280076-0024-1053: ref=['HELVIN', 'FICKLE', 'AND', 'WIFE', 'OF', 'GREENTON', 'VALLEY', 'WERE', 'ATTENDING', 'THE', 'SPRINGS', 'AT', 'THAT', 'TIME', 'AND', 'EITHER', 'OF', 'THEM', 'WILL', 'TESTIFY', 'TO', 'THE', 'ABOVE', 'FOR', 'JOHN', 'AND', 'I', 'SAT', 'IN', 'FRONT', 'OF', 'MISTER', 'SMITH', 'WHILE', 'HE', 'WAS', 'PREACHING', 'AND', 'WAS', 'IN', 'HIS', 'COMPANY', 'FOR', 'A', 'FEW', 'MOMENTS', 'TOGETHER', 'WITH', 'HIS', 'WIFE', 'AND', 'MISTER', 'AND', 'MISSUS', 'FICKLE', 'AFTER', 'SERVICE'] +7975-280076-0024-1053: hyp=['HELVAN', 'FICKLE', 'AND', 'WIFE', 'OF', 'GREENS', 'AND', 'VALLEY', 'WERE', 'ATTENDING', 'THE', 'SPRINGS', 'AT', 'THAT', 'TIME', 'AND', 'EITHER', 'OF', 'THEM', 'WILL', 'TESTIFY', 'TO', 'THE', 'ABOVE', 'FOR', 'JOHN', 'AND', 'I', 'SET', 'IN', 'FRONT', 'OF', 'MISTER', 'SMITH', 'WHILE', 'HE', 'WAS', 'PREACHING', 'AND', 'WAS', 'IN', 'HIS', 'COMPANY', 'FOR', 'A', 'FEW', 'MOMENTS', 'TOGETHER', 'WITH', 'HIS', 'WIFE', 'AND', 'MISTER', 'AND', 'MISS', 'FICKLE', 'AFTER', 'THE', 'SERVICE'] +7975-280076-0025-1054: ref=['ABOUT', 'THE', 'LAST', 'OF', 'DECEMBER', 'EIGHTEEN', 'SEVENTY', 'THREE', 'I', 'ARRIVED', 'IN', 'CARROLL', 'PARISH', 'LOUISIANA'] +7975-280076-0025-1054: hyp=['ABOUT', 'THE', 'LAST', 'OF', 'DECEMBER', 'EIGHTEEN', 'SEVENTY', 'THREE', 'I', 'ARRIVED', 'IN', 'CAROL', 'PARISH', 'LOUISIANA'] +7975-280076-0026-1055: ref=['I', 'STAYED', 'THERE', 'UNTIL', 'THE', 'EIGHTH', 'OF', 'FEBRUARY', 'EIGHTEEN', 'SEVENTY', 'FOUR'] +7975-280076-0026-1055: hyp=['I', 'STAYED', 'THERE', 'UNTIL', 'THE', 'EIGHTH', 'OF', 'FEBRUARY', 'EIGHTEEN', 'SEVENTY', 'FOUR'] +7975-280076-0027-1056: ref=['I', 'HAD', 'NOT', 'HEARD', 'OF', 'THAT', 'WHEN', 'I', 'WROTE', 'THE', 'LETTER', 'OF', 'EIGHTEEN', 'SEVENTY', 'FOUR', 'AND', 'TO', 'CORRECT', 'ANY', 'MISAPPREHENSION', 'THAT', 'MIGHT', 'BE', 'CREATED', 'BY', 'OMITTING', 'IT', 'I', 'WILL', 'SAY', 'THAT', 'AT', 'THAT', 'TIME', 'I', 'WAS', 'AT', 'NEOSHO', 'KANSAS', 'WITH', 'A', 'DROVE', 'OF', 'CATTLE', 'WHICH', 'I', 'SOLD', 'TO', 'MAJOR', 'RAY'] +7975-280076-0027-1056: hyp=['I', 'HAD', 'NOT', 'HEARD', 'OF', 'THAT', 'WHEN', 'I', 'WROTE', 'THE', 'LETTER', 'OF', 'EIGHTEEN', 'SEVENTY', 'FOUR', 'AND', 'TO', 'CORRECT', 'ANY', 'MISAPPREHENSION', 'THAT', 'MIGHT', 'BE', 'CREATED', 'BY', 'OMITTING', 'IT', 'I', 'WILL', 'SAY', 'THAT', 'AT', 'THE', 'TIME', 'I', 'WAS', 'AT', 'NEOSH', 'OF', 'KANSAS', 'WITH', 'A', 'DROVE', 'OF', 'CATTLE', 'WHICH', 'I', 'SOLD', 'TO', 'MAJOR', 'WRAYE'] +7975-280076-0028-1057: ref=['IT', 'WAS', 'IMMEDIATELY', 'FOLLOWING', 'THE', 'ROCK', 'ISLAND', 'ROBBERY', 'AT', 'ADAIR', 'IOWA', 'THAT', 'THERE', 'FIRST', 'APPEARED', 'A', 'DELIBERATE', 'ENLISTMENT', 'OF', 'SOME', 'LOCAL', 'PAPERS', 'IN', 'MISSOURI', 'TO', 'CONNECT', 'US', 'WITH', 'THIS', 'ROBBERY'] +7975-280076-0028-1057: hyp=['IT', 'WAS', 'IMMEDIATELY', 'FOLLOWING', 'THE', 'ROCK', 'ISLAND', 'ROBBERY', 'AT', 'EIGHT', 'AIR', 'IOWA', 'THAT', 'THEIR', 'FIRST', 'APPEARED', 'A', 'DELIVERED', 'ENLISTMENT', 'OF', 'SOME', 'LOCAL', 'PAPERS', 'AND', 'MISSOURI', 'TO', 'CONNECT', 'US', 'WITH', 'THIS', 'ROBBERY'] +7975-280084-0000-1090: ref=['I', 'URGED', 'ON', 'THE', 'BOYS', 'THAT', 'WHATEVER', 'HAPPENED', 'WE', 'SHOULD', 'NOT', 'SHOOT', 'ANY', 'ONE'] +7975-280084-0000-1090: hyp=['I', 'URGED', 'ON', 'THE', 'BOYS', 'AT', 'WHATEVER', 'HAPPENED', 'WE', 'SHOULD', 'NOT', 'SHOOT', 'ANY', 'ONE'] +7975-280084-0001-1091: ref=['WHEN', 'MILLER', 'AND', 'I', 'CROSSED', 'THE', 'BRIDGE', 'THE', 'THREE', 'WERE', 'ON', 'SOME', 'DRY', 'GOODS', 'BOXES', 'AT', 'THE', 'CORNER', 'NEAR', 'THE', 'BANK', 'AND', 'AS', 'SOON', 'AS', 'THEY', 'SAW', 'US', 'WENT', 'RIGHT', 'INTO', 'THE', 'BANK', 'INSTEAD', 'OF', 'WAITING', 'FOR', 'US', 'TO', 'GET', 'THERE'] +7975-280084-0001-1091: hyp=['WHEN', 'MILLER', 'AND', 'I', 'CROSSED', 'THE', 'BRIDGE', 'THE', 'THREE', 'WERE', 'ON', 'SOME', 'DRAGOOD', 'BOXES', 'AT', 'THE', 'CORNER', 'NEAR', 'THE', 'BANK', 'AND', 'AS', 'SOON', 'AS', 'I', 'SAW', 'US', 'WENT', 'RIGHT', 'INTO', 'THE', 'BANK', 'INSTEAD', 'OF', 'WAITING', 'FOR', 'US', 'TO', 'GET', 'THERE'] +7975-280084-0002-1092: ref=['WHEN', 'WE', 'CAME', 'UP', 'I', 'TOLD', 'MILLER', 'TO', 'SHUT', 'THE', 'BANK', 'DOOR', 'WHICH', 'THEY', 'HAD', 'LEFT', 'OPEN', 'IN', 'THEIR', 'HURRY'] +7975-280084-0002-1092: hyp=['WHEN', 'WE', 'CAME', 'UP', 'A', 'TOE', 'MILLER', 'TO', 'SHUT', 'THE', 'BANK', 'DOOR', 'WHICH', 'THEY', 'HAD', 'LEFT', 'OPEN', 'IN', 'THEIR', 'HURRY'] +7975-280084-0003-1093: ref=['J', 'S', 'ALLEN', 'WHOSE', 'HARDWARE', 'STORE', 'WAS', 'NEAR', 'TRIED', 'TO', 'GO', 'INTO', 'THE', 'BANK', 'BUT', 'MILLER', 'ORDERED', 'HIM', 'AWAY', 'AND', 'HE', 'RAN', 'AROUND', 'THE', 'CORNER', 'SHOUTING'] +7975-280084-0003-1093: hyp=['J', 'HELEN', 'WHOSE', 'HARD', 'WORKED', 'ALWAYS', 'NEAR', 'TRIED', 'TO', 'GO', 'INTO', 'THE', 'BANK', 'BUT', 'MILLER', 'ORDERED', 'HIM', 'AWAY', 'AND', 'HE', 'RAN', 'ROUND', 'THE', 'CORNER', 'SHOUTING'] +7975-280084-0004-1094: ref=['GET', 'YOUR', 'GUNS', 'BOYS', "THEY'RE", 'ROBBING', 'THE', 'BANK'] +7975-280084-0004-1094: hyp=['GET', 'YOUR', 'GUNS', 'BOYS', "THEY'RE", 'ROBBING', 'THE', 'BANK'] +7975-280084-0005-1095: ref=['AND', 'I', 'CALLED', 'TO', 'HIM', 'TO', 'GET', 'INSIDE', 'AT', 'THE', 'SAME', 'TIME', 'FIRING', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'AIR', 'AS', 'A', 'SIGNAL', 'TO', 'THE', 'THREE', 'BOYS', 'AT', 'THE', 'BRIDGE', 'THAT', 'WE', 'HAD', 'BEEN', 'DISCOVERED'] +7975-280084-0005-1095: hyp=['AND', 'I', 'CALL', 'TO', 'HIM', 'TO', 'GET', 'INSIDE', 'AT', 'THE', 'SAME', 'TIME', 'FIRING', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'AIR', 'AS', 'THE', 'SIGNAL', 'TO', 'THE', 'THREE', 'BOYS', 'AT', 'THE', 'BRIDGE', 'THAT', 'WE', 'HAD', 'BEEN', 'DISCOVERED'] +7975-280084-0006-1096: ref=['ALMOST', 'AT', 'THIS', 'INSTANT', 'I', 'HEARD', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'BANK'] +7975-280084-0006-1096: hyp=['ALMOST', 'AT', 'THIS', 'INSTANT', 'I', 'HEARD', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'BANK'] +7975-280084-0007-1097: ref=['CHADWELL', 'WOODS', 'AND', 'JIM', 'RODE', 'UP', 'AND', 'JOINED', 'US', 'SHOUTING', 'TO', 'PEOPLE', 'IN', 'THE', 'STREET', 'TO', 'GET', 'INSIDE', 'AND', 'FIRING', 'THEIR', 'PISTOLS', 'TO', 'EMPHASIZE', 'THEIR', 'COMMANDS'] +7975-280084-0007-1097: hyp=['TEDWELL', 'WOODS', 'AND', 'JIM', 'RODE', 'UP', 'AND', 'JOINCE', 'SHOUTING', 'TO', 'THE', 'PEOPLE', 'IN', 'THE', 'STREET', 'TO', 'GET', 'INSIDE', 'AND', 'FIRING', 'THEIR', 'PISTOLS', 'TO', 'EMPHASIZE', 'THEIR', 'COMMANDS'] +7975-280084-0008-1098: ref=['IF', 'ANY', 'OF', 'OUR', 'PARTY', 'SHOT', 'HIM', 'IT', 'MUST', 'HAVE', 'BEEN', 'WOODS'] +7975-280084-0008-1098: hyp=['IF', 'ANY', 'OF', 'OUR', 'PARTY', 'SHOT', 'HIM', 'IT', 'MUST', 'HAVE', 'BEEN', 'WOODS'] +7975-280084-0009-1099: ref=['MEANTIME', 'THE', 'STREET', 'WAS', 'GETTING', 'UNCOMFORTABLY', 'HOT'] +7975-280084-0009-1099: hyp=['MEANTIME', 'THE', 'STREET', 'WAS', 'GETTING', 'UNCOMFORTABLY', 'HOT'] +7975-280084-0010-1100: ref=['EVERY', 'TIME', 'I', 'SAW', 'ANY', 'ONE', 'WITH', 'A', 'BEAD', 'ON', 'ME', 'I', 'WOULD', 'DROP', 'OFF', 'MY', 'HORSE', 'AND', 'TRY', 'TO', 'DRIVE', 'THE', 'SHOOTER', 'INSIDE', 'BUT', 'I', 'COULD', 'NOT', 'SEE', 'IN', 'EVERY', 'DIRECTION'] +7975-280084-0010-1100: hyp=['EVERY', 'TIME', 'I', 'SAW', 'ANY', 'ONE', 'WITH', 'A', 'BEAD', 'ON', 'ME', 'I', 'WOULD', 'DROP', 'OFF', 'MY', 'HORSE', 'AND', 'TROT', 'TO', 'DRIVE', 'THE', 'SHOOTER', 'INSIDE', 'BUT', 'I', 'COULD', 'NOT', 'SEE', 'IN', 'EVERY', 'DIRECTION'] +7975-280084-0011-1101: ref=['DOCTOR', 'WHEELER', 'WHO', 'HAD', 'GONE', 'UPSTAIRS', 'IN', 'THE', 'HOTEL', 'SHOT', 'MILLER', 'AND', 'HE', 'LAY', 'DYING', 'IN', 'THE', 'STREET'] +7975-280084-0011-1101: hyp=['DOCTOR', 'WHALER', 'WHO', 'HAD', 'GONE', 'UPSTAIRS', 'IN', 'THE', 'HOTEL', 'SHOT', 'MILLER', 'AND', 'HE', 'LAY', 'DYING', 'IN', 'THE', 'STREET'] +7975-280084-0012-1102: ref=['CHANGING', 'HIS', 'PISTOL', 'TO', 'HIS', 'LEFT', 'HAND', 'BOB', 'RAN', 'OUT', 'AND', 'MOUNTED', "MILLER'S", 'MARE'] +7975-280084-0012-1102: hyp=['CHANGING', 'HIS', 'PISTOL', 'TO', 'HIS', 'LEFT', 'HAND', 'BOB', 'RAN', 'OUT', 'AND', 'MOUNTED', "MILLER'S", 'MARE'] +7975-280084-0013-1103: ref=['WHAT', 'KEPT', 'YOU', 'SO', 'LONG', 'I', 'ASKED', 'PITTS'] +7975-280084-0013-1103: hyp=['WHAT', 'KEPT', 'YOU', 'SO', 'LONG', 'I', 'ASKED', 'PITTS'] +7975-280084-0014-1104: ref=['AS', 'TO', 'THE', 'REST', 'OF', 'THE', 'AFFAIR', 'INSIDE', 'THE', 'BANK', 'I', 'TAKE', 'THE', 'ACCOUNT', 'OF', 'A', 'NORTHFIELD', 'NARRATOR'] +7975-280084-0014-1104: hyp=['AS', 'TO', 'THE', 'REST', 'OF', 'THE', 'AFFAIR', 'INSIDE', 'THE', 'BANK', 'I', 'TAKE', 'THE', 'ACCOUNT', 'OF', 'A', 'NORTH', 'FIELD', 'NARRATOR'] +7975-280084-0015-1105: ref=["WHERE'S", 'THE', 'MONEY', 'OUTSIDE', 'THE', 'SAFE', 'BOB', 'ASKED'] +7975-280084-0015-1105: hyp=["WHERE'S", 'THE', 'MONEY', 'OUTSIDE', 'THE', 'SAFE', 'BOB', 'ASKED'] +7975-280084-0016-1106: ref=['THE', 'SHUTTERS', 'WERE', 'CLOSED', 'AND', 'THIS', 'CAUSED', 'BUNKER', 'AN', "INSTANT'S", 'DELAY', 'THAT', 'WAS', 'ALMOST', 'FATAL', 'PITTS', 'CHASED', 'HIM', 'WITH', 'A', 'BULLET'] +7975-280084-0016-1106: hyp=['THE', 'SHUTTERS', 'WERE', 'CLOSED', 'AND', 'THIS', 'CAUSED', 'BUNKER', 'AN', "INSTANT'S", 'DELAY', 'THAT', 'WAS', 'ALMOST', 'FATAL', 'FITZ', 'CHASED', 'HIM', 'WITH', 'A', 'BULLET'] +7975-280084-0017-1107: ref=['THE', 'FIRST', 'ONE', 'MISSED', 'HIM', 'BUT', 'THE', 'SECOND', 'WENT', 'THROUGH', 'HIS', 'RIGHT', 'SHOULDER'] +7975-280084-0017-1107: hyp=['THE', 'FIRST', 'ONE', 'MISTING', 'BUT', 'THE', 'SECOND', 'WENT', 'THROUGH', 'HIS', 'RIGHT', 'SHOULDER'] +7975-280085-0000-1071: ref=['THAT', 'NIGHT', 'IT', 'STARTED', 'TO', 'RAIN', 'AND', 'WE', 'WORE', 'OUT', 'OUR', 'HORSES'] +7975-280085-0000-1071: hyp=['THAT', 'NIGHT', 'IT', 'STARTED', 'TO', 'RAIN', 'AND', 'WE', 'WORE', 'OUT', 'OUR', 'HORSES'] +7975-280085-0001-1072: ref=['FRIDAY', 'WE', 'MOVED', 'TOWARD', 'WATERVILLE', 'AND', 'FRIDAY', 'NIGHT', 'WE', 'CAMPED', 'BETWEEN', 'ELYSIAN', 'AND', 'GERMAN', 'LAKE'] +7975-280085-0001-1072: hyp=['FRIDAY', 'WE', 'MOVED', 'TOWARD', 'WATERVILLE', 'AND', 'FRIDAY', 'NIGHT', 'WE', 'CAMPED', 'BETWEEN', 'THE', 'LUCIEN', 'AND', 'GERMAN', 'LAKE'] +7975-280085-0002-1073: ref=["BOB'S", 'SHATTERED', 'ELBOW', 'WAS', 'REQUIRING', 'FREQUENT', 'ATTENTION', 'AND', 'THAT', 'NIGHT', 'WE', 'MADE', 'ONLY', 'NINE', 'MILES', 'AND', 'MONDAY', 'MONDAY', 'NIGHT', 'AND', 'TUESDAY', 'WE', 'SPENT', 'IN', 'A', 'DESERTED', 'FARM', 'HOUSE', 'CLOSE', 'TO', 'MANKATO'] +7975-280085-0002-1073: hyp=['BOB', 'SATURDAIL', 'BOWS', 'REQUIRING', 'FREQUENT', 'ATTENTION', 'AND', 'THAT', 'NIGHT', 'WE', 'MADE', 'ONLY', 'NINE', 'MILES', 'AND', 'MONDAY', 'MONDAY', 'NIGHT', 'AND', 'TUESDAY', 'WE', 'SPENT', 'IN', 'A', 'DESERTED', 'FARM', 'HOUSE', 'CLOSE', 'TO', 'MAN', 'CATO'] +7975-280085-0003-1074: ref=['THAT', 'DAY', 'A', 'MAN', 'NAMED', 'DUNNING', 'DISCOVERED', 'US', 'AND', 'WE', 'TOOK', 'HIM', 'PRISONER'] +7975-280085-0003-1074: hyp=['THAT', 'THEY', 'A', 'MAN', 'NAMED', 'DINNING', 'DISCOVERED', 'US', 'AND', 'WE', 'TOOK', 'HIM', 'PRISONER'] +7975-280085-0004-1075: ref=['FINALLY', 'WE', 'ADMINISTERED', 'TO', 'HIM', 'AN', 'OATH', 'NOT', 'TO', 'BETRAY', 'OUR', 'WHEREABOUTS', 'UNTIL', 'WE', 'HAD', 'TIME', 'TO', 'MAKE', 'OUR', 'ESCAPE', 'AND', 'HE', 'AGREED', 'NOT', 'TO'] +7975-280085-0004-1075: hyp=['FINALLY', 'WE', 'ADMINISTERED', 'TO', 'HIM', 'AN', 'OATH', 'NOT', 'TO', 'BETRAY', 'OUR', 'WHEREABOUTS', 'UNTIL', 'WE', 'HAD', 'TIME', 'TO', 'MAKE', 'OUR', 'ESCAPE', 'AND', 'HE', 'AGREED', 'NOT', 'TO'] +7975-280085-0005-1076: ref=['NO', 'SOONER', 'HOWEVER', 'WAS', 'HE', 'RELEASED', 'THAN', 'HE', 'MADE', 'POSTHASTE', 'INTO', 'MANKATO', 'TO', 'ANNOUNCE', 'OUR', 'PRESENCE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'ANOTHER', 'POSSE', 'WAS', 'LOOKING', 'FOR', 'US'] +7975-280085-0005-1076: hyp=['NO', 'SOONER', 'HOWEVER', 'WAS', 'HE', 'RELEASED', 'THAN', 'HE', 'MADE', 'POST', 'TASTE', 'INTO', 'MANCAO', 'TO', 'ANNOUNCE', 'OUR', 'PRESENCE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'ANOTHER', 'POSSE', 'WAS', 'LOOKING', 'FOR', 'US'] +7975-280085-0006-1077: ref=['THE', 'WHISTLE', 'ON', 'THE', 'OIL', 'MILL', 'BLEW', 'AND', 'WE', 'FEARED', 'THAT', 'IT', 'WAS', 'A', 'SIGNAL', 'THAT', 'HAD', 'BEEN', 'AGREED', 'UPON', 'TO', 'ALARM', 'THE', 'TOWN', 'IN', 'CASE', 'WE', 'WERE', 'OBSERVED', 'BUT', 'WE', 'WERE', 'NOT', 'MOLESTED'] +7975-280085-0006-1077: hyp=['THE', 'WHISTLE', 'ON', 'THE', 'ORE', 'MILL', 'BLEW', 'AND', 'WE', 'FEARED', 'THAT', 'IT', 'WAS', 'A', 'SIGNAL', 'THAT', 'HAD', 'BEEN', 'AGREED', 'UPON', 'TO', 'ALARM', 'THE', 'TOWN', 'IN', 'CASE', 'WE', 'WERE', 'OBSERVED', 'BUT', 'WE', 'WERE', 'NOT', 'MOLESTED'] +7975-280085-0007-1078: ref=['HE', 'HAD', 'TO', 'SLEEP', 'WITH', 'IT', 'PILLOWED', 'ON', 'MY', 'BREAST', 'JIM', 'BEING', 'ALSO', 'CRIPPLED', 'WITH', 'A', 'WOUND', 'IN', 'HIS', 'SHOULDER', 'AND', 'WE', 'COULD', 'NOT', 'GET', 'MUCH', 'SLEEP'] +7975-280085-0007-1078: hyp=['HE', 'HAD', 'TO', 'SLEEP', 'WITH', 'A', 'PILLART', 'ON', 'MY', 'BREAST', 'JIM', 'BEING', 'ALSO', 'A', 'CRIPPLED', 'WITH', 'A', 'WOUND', 'IN', 'HIS', 'SHOULDER', 'AND', 'WE', 'COULD', 'NOT', 'GET', 'MUCH', 'SLEEP'] +7975-280085-0008-1079: ref=['BUT', 'THEY', 'SOON', 'AFTER', 'GOT', 'CLOSE', 'ENOUGH', 'SO', 'THAT', 'ONE', 'OF', 'THEM', 'BROKE', 'MY', 'WALKING', 'STICK', 'WITH', 'A', 'SHOT'] +7975-280085-0008-1079: hyp=['BUT', 'THEY', 'SOON', 'AFTER', 'GOT', 'CLOSE', 'ENOUGH', 'SO', 'THAT', 'ONE', 'OF', 'THEM', 'BROKE', 'MY', 'WALKING', 'STICK', 'WITH', 'A', 'SHOT'] +7975-280085-0009-1080: ref=['WE', 'WERE', 'IN', 'SIGHT', 'OF', 'OUR', 'LONG', 'SOUGHT', 'HORSES', 'WHEN', 'THEY', 'CUT', 'US', 'OFF', 'FROM', 'THE', 'ANIMALS', 'AND', 'OUR', 'LAST', 'HOPE', 'WAS', 'GONE'] +7975-280085-0009-1080: hyp=['WE', 'WERE', 'INSIDE', 'OF', 'OUR', 'LONG', 'SOWED', 'HORSES', 'WHEN', 'THEY', 'CUT', 'US', 'OFF', 'FROM', 'THE', 'ANIMALS', 'AND', 'OUR', 'LAST', 'HOPE', 'WAS', 'GONE'] +7975-280085-0010-1081: ref=['SIX', 'STEPPED', 'TO', 'THE', 'FRONT', 'SHERIFF', 'GLISPIN', 'COLONEL', 'T', 'L', 'VOUGHT', 'B', 'M', 'RICE', 'G', 'A', 'BRADFORD', 'C', 'A', 'POMEROY', 'AND', 'S', 'J', 'SEVERSON'] +7975-280085-0010-1081: hyp=['SIX', 'STEPS', 'TO', 'THE', 'FRONT', 'SHERIFF', 'CLISPIN', 'COLONEL', 'T', 'L', 'WALT', 'B', 'AND', 'RICE', 'G', 'BRADFORD', 'C', 'A', 'POMMEROI', 'AND', 'S', 'J', 'SEVERSON'] +7975-280085-0011-1082: ref=['FORMING', 'IN', 'LINE', 'FOUR', 'PACES', 'APART', 'HE', 'ORDERED', 'THEM', 'TO', 'ADVANCE', 'RAPIDLY', 'AND', 'CONCENTRATE', 'THE', 'FIRE', 'OF', 'THE', 'WHOLE', 'LINE', 'THE', 'INSTANT', 'THE', 'ROBBERS', 'WERE', 'DISCOVERED'] +7975-280085-0011-1082: hyp=['FORMING', 'A', 'LINE', 'FOUR', 'PACES', 'APART', 'HE', 'ORDERED', 'THEM', 'TO', 'ADVANCE', 'RAPIDLY', 'AND', 'CONCENTRATE', 'THE', 'FIRE', 'OF', 'THE', 'WHOLE', 'LINE', 'THE', 'INSTANT', 'THE', 'ROBBERS', 'WERE', 'DISCOVERED'] +7975-280085-0012-1083: ref=['MAKE', 'FOR', 'THE', 'HORSES', 'I', 'SAID', 'EVERY', 'MAN', 'FOR', 'HIMSELF'] +7975-280085-0012-1083: hyp=['MAKE', 'FOR', 'THE', 'HORSES', 'I', 'SAID', 'EVERY', 'MAN', 'FOR', 'HIMSELF'] +7975-280085-0013-1084: ref=['THERE', 'IS', 'NO', 'USE', 'STOPPING', 'TO', 'PICK', 'UP', 'A', 'COMRADE', 'HERE', 'FOR', 'WE', "CAN'T", 'GET', 'HIM', 'THROUGH', 'THE', 'LINE', 'JUST', 'CHARGE', 'THEM', 'AND', 'MAKE', 'IT', 'IF', 'WE', 'CAN'] +7975-280085-0013-1084: hyp=['THERE', 'IS', 'NO', 'USE', 'STOPPING', 'TO', 'PICK', 'UP', 'A', 'COMRADE', 'HERE', 'TILL', 'WE', "CAN'T", 'GET', 'HIM', 'THROUGH', 'THE', 'LINE', 'JUST', 'SHARS', 'THEM', 'AND', 'MAKE', 'IT', 'IF', 'WE', 'CAN'] +7975-280085-0014-1085: ref=['I', 'GOT', 'UP', 'AS', 'THE', 'SIGNAL', 'FOR', 'THE', 'CHARGE', 'AND', 'WE', 'FIRED', 'ONE', 'VOLLEY'] +7975-280085-0014-1085: hyp=['I', 'GOT', 'UP', 'AS', 'A', 'SIGNAL', 'FOR', 'THE', 'CHARGE', 'AND', 'WE', 'FIRED', 'ONE', 'VOLLEY'] +7975-280085-0015-1086: ref=['ONE', 'OF', 'THE', 'FELLOWS', 'IN', 'THE', 'OUTER', 'LINE', 'NOT', 'BRAVE', 'ENOUGH', 'HIMSELF', 'TO', 'JOIN', 'THE', 'VOLUNTEERS', 'WHO', 'HAD', 'COME', 'IN', 'TO', 'BEAT', 'US', 'OUT', 'WAS', 'NOT', 'DISPOSED', 'TO', 'BELIEVE', 'IN', 'THE', 'SURRENDER', 'AND', 'HAD', 'HIS', 'GUN', 'LEVELLED', 'ON', 'BOB', 'IN', 'SPITE', 'OF', 'THE', 'HANDKERCHIEF', 'WHICH', 'WAS', 'WAVING', 'AS', 'A', 'FLAG', 'OF', 'TRUCE'] +7975-280085-0015-1086: hyp=['ONE', 'OF', 'THE', 'FELLOWS', 'IN', 'THE', 'OUTER', 'LAND', 'NOT', 'BRAVE', 'ENOUGH', 'HIMSELF', 'TO', 'JOIN', 'THE', 'VOLUNTEERS', 'WHO', 'HAD', 'COME', 'IN', 'TO', 'BEAT', 'US', 'OUT', 'WAS', 'NOT', 'DISPOSED', 'TO', 'BELIEVE', 'IN', 'THE', 'SURRENDER', 'AND', 'HAD', 'HIS', 'GUN', 'LEVELLED', 'ON', 'BOB', 'IN', 'SPITE', 'OF', 'THE', 'HANDKERCHIEF', 'WHICH', 'WAS', 'WAVING', 'AS', 'A', 'FLAG', 'OF', 'TRUCE'] +7975-280085-0016-1087: ref=['SHERIFF', 'GLISPIN', 'OF', 'WATONWAN', 'COUNTY', 'WHO', 'WAS', 'TAKING', "BOB'S", 'PISTOL', 'FROM', 'HIM', 'WAS', 'ALSO', 'SHOUTING', 'TO', 'THE', 'FELLOW'] +7975-280085-0016-1087: hyp=['SURE', 'OF', 'GLISPIN', 'OF', 'WATERWAM', 'COUNTY', 'WHO', 'WAS', 'TAKING', "BOB'S", 'PISTOL', 'FROM', 'HIM', 'WAS', 'ALSO', 'SHOUTING', 'TO', 'THE', 'FELLOW'] +7975-280085-0017-1088: ref=['INCLUDING', 'THOSE', 'RECEIVED', 'IN', 'AND', 'ON', 'THE', 'WAY', 'FROM', 'NORTHFIELD', 'I', 'HAD', 'ELEVEN', 'WOUNDS'] +7975-280085-0017-1088: hyp=['INCLUDING', 'THOSE', 'RECEIVED', 'IN', 'AND', 'ON', 'THE', 'WAY', 'FROM', 'NORTH', 'FIELD', 'I', 'HAD', 'ELEVEN', 'ONES'] +7975-280085-0018-1089: ref=['AND', 'SHERIFF', "GLISPIN'S", 'ORDER', 'NOT', 'TO', 'SHOOT', 'WAS', 'THE', 'BEGINNING', 'OF', 'THE', 'PROTECTORATE', 'THAT', 'MINNESOTA', 'PEOPLE', 'ESTABLISHED', 'OVER', 'US'] +7975-280085-0018-1089: hyp=['IN', 'CHEER', "GLISPIN'S", 'ORDER', 'NOT', 'TO', 'SHOOT', 'WAS', 'THE', 'BEGINNING', 'OF', 'THE', 'PROTECTOR', 'THAT', 'MINNESOTA', 'PEOPLE', 'ESTABLISHED', 'OVER', 'US'] +8131-117016-0000-1303: ref=['CAPTAIN', 'MURDOCH'] +8131-117016-0000-1303: hyp=['CAPTAIN', 'MURDOCK'] +8131-117016-0001-1304: ref=['BUT', 'MARSPORT', 'HAD', 'FLOURISHED', 'ENOUGH', 'TO', 'KILL', 'IT', 'OFF'] +8131-117016-0001-1304: hyp=['BUT', 'MARSPORT', 'HAD', 'FLOURISHED', 'ENOUGH', 'TO', 'KILL', 'IT', 'OFF'] +8131-117016-0002-1305: ref=['SOME', 'OF', 'MARS', 'LAWS', 'DATED', 'FROM', 'THE', 'TIME', 'WHEN', 'LAW', 'ENFORCEMENT', 'HAD', 'BEEN', 'HAMPERED', 'BY', 'LACK', 'OF', 'MEN', 'RATHER', 'THAN', 'BY', 'THE', 'TYPE', 'OF', 'MEN'] +8131-117016-0002-1305: hyp=['SOME', 'OF', 'MARS', 'LAWS', 'DATED', 'FROM', 'THE', 'TIME', 'WHEN', 'LAWN', 'FORCEMENT', 'HAD', 'BEEN', 'HAMPERED', 'BY', 'LACK', 'OF', 'MEN', 'RATHER', 'THAN', 'BY', 'THE', 'TYPE', 'OF', 'MEN'] +8131-117016-0003-1306: ref=['THE', 'STONEWALL', 'GANG', 'NUMBERED', 'PERHAPS', 'FIVE', 'HUNDRED'] +8131-117016-0003-1306: hyp=['THE', 'STONE', 'WALL', 'GANG', 'NUMBERED', 'PERHAPS', 'FIVE', 'HUNDRED'] +8131-117016-0004-1307: ref=['EVEN', 'DERELICTS', 'AND', 'FAILURES', 'HAD', 'TO', 'EAT', 'THERE', 'WERE', 'STORES', 'AND', 'SHOPS', 'THROUGHOUT', 'THE', 'DISTRICT', 'WHICH', 'EKED', 'OUT', 'SOME', 'KIND', 'OF', 'A', 'MARGINAL', 'LIVING'] +8131-117016-0004-1307: hyp=['EVEN', 'DEAR', 'ALEXAM', 'FAILURES', 'HAD', 'TO', 'EAT', 'THERE', 'WERE', 'STORIES', 'AND', 'SHOPS', 'THROUGHOUT', 'THE', 'DISTRICT', 'WHICH', 'EKED', 'OUT', 'SOME', 'KIND', 'OF', 'A', 'MARGINAL', 'LIVING'] +8131-117016-0005-1308: ref=['THEY', 'WERE', 'SAFE', 'FROM', 'PROTECTION', 'RACKETEERS', 'THERE', 'NONE', 'BOTHERED', 'TO', 'COME', 'SO', 'FAR', 'OUT'] +8131-117016-0005-1308: hyp=['THEY', 'WERE', 'SAFE', 'FROM', 'PROTECTION', 'RAGATIRS', 'THERE', 'NONE', 'BOTHERED', 'TO', 'COME', 'SO', 'FAR', 'OUT'] +8131-117016-0006-1309: ref=['THE', 'SHOPKEEPERS', 'AND', 'SOME', 'OF', 'THE', 'LESS', 'UNFORTUNATE', 'PEOPLE', 'THERE', 'HAD', 'PROTESTED', 'LOUD', 'ENOUGH', 'TO', 'REACH', 'CLEAR', 'BACK', 'TO', 'EARTH'] +8131-117016-0006-1309: hyp=['THE', 'SHOPKEEPERS', 'AND', 'SOME', 'OF', 'THE', 'LESS', 'UNFORTUNATE', 'PEOPLE', 'THERE', 'HAD', 'PROTESTED', 'LOUD', 'ENOUGH', 'TO', 'REACH', 'CLEAR', 'BACK', 'TO', 'EARTH'] +8131-117016-0007-1310: ref=['CAPTAIN', 'MURDOCH', 'WAS', 'AN', 'UNKNOWN', 'FACTOR', 'AND', 'NOW', 'WAS', 'ASKING', 'FOR', 'MORE', 'MEN'] +8131-117016-0007-1310: hyp=['CAPTAIN', 'MURDOCK', 'WAS', 'AN', 'UNKNOWN', 'FACTOR', 'AND', 'NOW', 'WAS', 'ASKING', 'FOR', 'MORE', 'MEN'] +8131-117016-0008-1311: ref=['THE', 'PRESSURE', 'WAS', 'ENOUGH', 'TO', 'GET', 'THEM', 'FOR', 'HIM'] +8131-117016-0008-1311: hyp=['THE', 'PRESSURE', 'WAS', 'ENOUGH', 'TO', 'GET', 'THEM', 'FOR', 'HIM'] +8131-117016-0009-1312: ref=['GORDON', 'REPORTED', 'FOR', 'WORK', 'WITH', 'A', 'SENSE', 'OF', 'THE', 'BOTTOM', 'FALLING', 'OUT', 'MIXED', 'WITH', 'A', 'VAGUE', 'RELIEF'] +8131-117016-0009-1312: hyp=['GORDON', 'REPORTED', 'FOR', 'WORK', 'WITH', 'A', 'SENSE', 'OF', 'THE', 'BOTTOM', 'FALLING', 'OUT', 'MIXED', 'WITH', 'A', 'VAGUE', 'RELIEF'] +8131-117016-0010-1313: ref=["I'VE", 'GOT', 'A', 'FREE', 'HAND', 'AND', "WE'RE", 'GOING', 'TO', 'RUN', 'THIS', 'THE', 'WAY', 'WE', 'WOULD', 'ON', 'EARTH'] +8131-117016-0010-1313: hyp=["I'VE", 'GOT', 'A', 'FREE', 'HAND', 'AND', "WE'RE", 'GOING', 'TO', 'RUN', 'THIS', 'THE', 'WAY', 'WE', 'WOULD', 'ON', 'EARTH'] +8131-117016-0011-1314: ref=['YOUR', 'JOB', 'IS', 'TO', 'PROTECT', 'THE', 'CITIZENS', 'HERE', 'AND', 'THAT', 'MEANS', 'EVERYONE', 'NOT', 'BREAKING', 'THE', 'LAWS', 'WHETHER', 'YOU', 'FEEL', 'LIKE', 'IT', 'OR', 'NOT', 'NO', 'GRAFT'] +8131-117016-0011-1314: hyp=['YOUR', 'JOB', 'IS', 'TO', 'PROTECT', 'THE', 'CITIZENS', 'HERE', 'AND', 'THAT', 'MEANS', 'EVERY', 'ONE', 'NOT', 'BREAKING', 'THE', 'LAWS', 'WHETHER', 'YOU', 'FEEL', 'LIKE', 'IT', 'OR', 'NOT', 'NO', 'GRAFT'] +8131-117016-0012-1315: ref=['THE', 'FIRST', 'MAN', 'MAKING', 'A', 'SHAKEDOWN', 'WILL', 'GET', 'THE', 'SAME', 'TREATMENT', "WE'RE", 'GOING', 'TO', 'USE', 'ON', 'THE', 'STONEWALL', 'BOYS', "YOU'LL", 'GET', 'DOUBLE', 'PAY', 'HERE', 'AND', 'YOU', 'CAN', 'LIVE', 'ON', 'IT'] +8131-117016-0012-1315: hyp=['THE', 'FIRST', 'MAN', 'MAKING', 'A', 'SHAKE', 'DOWN', 'WILL', 'GET', 'THE', 'SAME', 'TREATMENT', "WE'RE", 'GOING', 'TO', 'USE', 'ON', 'THE', 'STONE', 'WALL', 'BOYS', "YOU'LL", 'GET', 'DOUBLE', 'PAY', 'HERE', 'AND', 'YOU', 'CAN', 'LIVE', 'ON', 'IT'] +8131-117016-0013-1316: ref=['HE', 'PICKED', 'OUT', 'FIVE', 'OF', 'THE', 'MEN', 'INCLUDING', 'GORDON', 'YOU', 'FIVE', 'WILL', 'COME', 'WITH', 'ME'] +8131-117016-0013-1316: hyp=['HE', 'PICKED', 'OUT', 'FIVE', 'OF', 'THE', 'MEN', 'INCLUDING', 'GORDON', 'YOU', 'FIVE', 'WILL', 'COME', 'WITH', 'ME'] +8131-117016-0014-1317: ref=['THE', 'REST', 'OF', 'YOU', 'CAN', 'TEAM', 'UP', 'ANY', 'WAY', 'YOU', 'WANT', 'TONIGHT', 'PICK', 'ANY', 'ROUTE', "THAT'S", 'OPEN', 'OKAY', 'MEN', "LET'S", 'GO'] +8131-117016-0014-1317: hyp=['THE', 'REST', 'OF', 'YOU', 'CAN', 'TEEM', 'UP', 'ANY', 'WAY', 'YOU', 'WANT', 'TO', 'NIGHT', 'PICK', 'ANY', 'ROUGH', "THAT'S", 'OPEN', 'OH', 'CAME', 'AND', "LET'S", 'GO'] +8131-117016-0015-1318: ref=['BRUCE', 'GORDON', 'GRINNED', 'SLOWLY', 'AS', 'HE', 'SWUNG', 'THE', 'STICK', 'AND', "MURDOCH'S", 'EYES', 'FELL', 'ON', 'HIM', 'EARTH', 'COP'] +8131-117016-0015-1318: hyp=['BRUSH', 'GORDON', 'GRINNED', 'SLOWLY', 'AS', 'HE', 'SWUNG', 'THE', 'STICK', 'AND', "MARDOC'S", 'EYES', 'FELL', 'ON', 'HIM', 'EARTH', 'COP'] +8131-117016-0016-1319: ref=['TWO', 'YEARS', 'GORDON', 'ADMITTED'] +8131-117016-0016-1319: hyp=['TWO', 'YEARS', 'GORDON', 'ADMITTED'] +8131-117016-0017-1320: ref=['FOR', 'A', 'SECOND', 'GORDON', 'CURSED', 'HIMSELF'] +8131-117016-0017-1320: hyp=['FOR', 'A', 'SECOND', 'GORDON', 'CURSED', 'HIMSELF'] +8131-117016-0018-1321: ref=['HE', 'BEGAN', 'WONDERING', 'ABOUT', 'SECURITY', 'THEN'] +8131-117016-0018-1321: hyp=['HE', 'BEGAN', 'WONDERING', 'ABOUT', 'SECURITY', 'THEN'] +8131-117016-0019-1322: ref=['NOBODY', 'HAD', 'TRIED', 'TO', 'GET', 'IN', 'TOUCH', 'WITH', 'HIM'] +8131-117016-0019-1322: hyp=['NOBODY', 'HAD', 'TRIED', 'TO', 'GET', 'IN', 'TOUCH', 'WITH', 'HIM'] +8131-117016-0020-1323: ref=['THERE', 'WAS', 'A', 'CRUDE', 'LIGHTING', 'SYSTEM', 'HERE', 'PUT', 'UP', 'BY', 'THE', 'CITIZENS', 'AT', 'THE', 'FRONT', 'OF', 'EACH', 'BUILDING', 'A', 'DIM', 'PHOSPHOR', 'BULB', 'GLOWED', 'WHEN', 'DARKNESS', 'FELL', 'THEY', 'WOULD', 'HAVE', 'NOTHING', 'ELSE', 'TO', 'SEE', 'BY'] +8131-117016-0020-1323: hyp=['THERE', 'WAS', 'A', 'CRUDE', 'LIGHTING', 'SYSTEM', 'HERE', 'PUT', 'UP', 'BY', 'THE', 'CITIZENS', 'AT', 'THE', 'FRONT', 'OF', 'EACH', 'BUILDING', 'A', 'DIM', 'PHOSPHER', 'BULB', 'GLOWED', 'WHEN', 'DARKNESS', 'FELL', 'THEY', 'WOULD', 'HAVE', 'NOTHING', 'ELSE', 'TO', 'SEE', 'BY'] +8131-117016-0021-1324: ref=['MOVING', 'IN', 'TWO', 'GROUPS', 'OF', 'THREES', 'AT', 'OPPOSITE', 'SIDES', 'OF', 'THE', 'STREET', 'THEY', 'BEGAN', 'THEIR', 'BEAT'] +8131-117016-0021-1324: hyp=['MOVING', 'IN', 'TWO', 'GROUPS', 'OF', 'THREES', 'IT', 'OPPOSITE', 'SIDES', 'OF', 'THE', 'STREET', 'THEY', 'BEGAN', 'THEIR', 'BEAT'] +8131-117016-0022-1325: ref=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'SAVE', 'THE', 'CITIZEN', 'WHO', 'WAS', 'DYING', 'FROM', 'LACK', 'OF', 'AIR'] +8131-117016-0022-1325: hyp=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'SAVE', 'THE', 'CITIZEN', 'WHO', 'WAS', 'DYING', 'FROM', 'LACK', 'OF', 'AIR'] +8131-117016-0023-1326: ref=['GORDON', 'FELT', 'THE', 'SOLID', 'PLEASURE', 'OF', 'THE', 'FINELY', 'TURNED', 'CLUB', 'IN', 'HIS', 'HANDS'] +8131-117016-0023-1326: hyp=['GORDON', 'FELT', 'THE', 'SOLID', 'PLEASURE', 'OF', 'THE', 'FINELY', 'TURNED', 'CLUB', 'IN', 'HIS', 'HANDS'] +8131-117016-0024-1327: ref=["GORDON'S", 'EYES', 'POPPED', 'AT', 'THAT'] +8131-117016-0024-1327: hyp=["GORDON'S", 'EYES', 'POPPED', 'AT', 'THAT'] +8131-117016-0025-1328: ref=['HE', 'SWALLOWED', 'THE', 'SENTIMENT', 'HIS', 'OWN', 'CLUB', 'WAS', 'MOVING', 'NOW'] +8131-117016-0025-1328: hyp=['HE', 'SWALLOWED', 'THE', 'SENTIMENT', 'HIS', 'OWN', 'CLUB', 'WAS', 'MOVING', 'NOW'] +8131-117016-0026-1329: ref=['THE', 'OTHER', 'FOUR', 'COPS', 'HAD', 'COME', 'IN', 'RELUCTANTLY'] +8131-117016-0026-1329: hyp=['THE', 'OTHER', 'FUPS', 'HAD', 'COME', 'IN', 'RELUCTANTLY'] +8131-117016-0027-1330: ref=['HE', 'BROUGHT', 'HIM', 'TO', 'THE', 'GROUND', 'WITH', 'A', 'SINGLE', 'BLOW', 'ACROSS', 'THE', 'KIDNEYS'] +8131-117016-0027-1330: hyp=['HE', 'BROUGHT', 'HIM', 'TO', 'THE', 'GROUND', 'WITH', 'A', 'SINGLE', 'BLOW', 'ACROSS', 'THE', 'KIDNEYS'] +8131-117016-0028-1331: ref=['THEY', 'ROUNDED', 'UP', 'THE', 'MEN', 'OF', 'THE', 'GANG', 'AND', 'ONE', 'OF', 'THE', 'COPS', 'STARTED', 'OFF'] +8131-117016-0028-1331: hyp=['THEY', 'ROUTED', 'UP', 'THE', 'MEN', 'OF', 'THE', 'GANG', 'AND', 'ONE', 'OF', 'THE', 'CUPS', 'STARTED', 'OFF'] +8131-117016-0029-1332: ref=['TO', 'FIND', 'A', 'PHONE', 'AND', 'CALL', 'THE', 'WAGON'] +8131-117016-0029-1332: hyp=['TO', 'FIND', 'A', 'PHONE', 'AND', 'CALL', 'THE', 'WAGON'] +8131-117016-0030-1333: ref=["WE'RE", 'NOT', 'USING', 'WAGONS', 'MURDOCH', 'TOLD', 'HIM', 'LINE', 'THEM', 'UP'] +8131-117016-0030-1333: hyp=['WERE', 'NOT', 'USING', 'WAGONS', 'MURDOCK', 'TOLD', 'HIM', 'LYING', 'THEM', 'UP'] +8131-117016-0031-1334: ref=['IF', 'THEY', 'TRIED', 'TO', 'RUN', 'THEY', 'WERE', 'HIT', 'FROM', 'BEHIND', 'IF', 'THEY', 'STOOD', 'STILL', 'THEY', 'WERE', 'CLUBBED', 'CAREFULLY'] +8131-117016-0031-1334: hyp=['IF', 'THEY', 'TRIED', 'TO', 'RUN', 'THEY', 'WERE', 'HIT', 'FROM', 'BEHIND', 'THAT', 'THEY', 'STOOD', 'STILL', 'THEY', 'WERE', 'CLUBBED', 'CAREFULLY'] +8131-117016-0032-1335: ref=['MURDOCH', 'INDICATED', 'ONE', 'WHO', 'STOOD', 'WITH', 'HIS', 'SHOULDERS', 'SHAKING', 'AND', 'TEARS', 'RUNNING', 'DOWN', 'HIS', 'CHEEKS'] +8131-117016-0032-1335: hyp=['MURDOCK', 'INDICATED', 'ONE', 'WHO', 'STOOD', 'WITH', 'HIS', 'SHOULDER', 'SHAKING', 'AND', 'TEARS', 'RUNNING', 'DOWN', 'HIS', 'CHEEKS'] +8131-117016-0033-1336: ref=['THE', "CAPTAIN'S", 'FACE', 'WAS', 'AS', 'SICK', 'AS', 'GORDON', 'FELT'] +8131-117016-0033-1336: hyp=['THE', "CAPTAIN'S", 'FACE', 'WAS', 'AS', 'SICK', 'AS', "GORDON'S", 'FELT'] +8131-117016-0034-1337: ref=['I', 'WANT', 'THE', 'NAME', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'GANG', 'YOU', 'CAN', 'REMEMBER', 'HE', 'TOLD', 'THE', 'MAN'] +8131-117016-0034-1337: hyp=['I', 'WANT', 'THE', 'NAME', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'GANG', 'YOU', 'CAN', 'REMEMBER', 'HE', 'TOLD', 'THE', 'MAN'] +8131-117016-0035-1338: ref=['COLONEL', "THEY'D", 'KILL', 'ME', 'I', "DON'T", 'KNOW'] +8131-117016-0035-1338: hyp=['COLONEL', "THEY'D", 'KILL', 'ME', 'I', "DON'T", 'KNOW'] +8131-117016-0036-1339: ref=['MURDOCH', 'TOOK', 'HIS', 'NOD', 'AS', 'EVIDENCE', 'ENOUGH', 'AND', 'TURNED', 'TO', 'THE', 'WRETCHED', 'TOUGHS'] +8131-117016-0036-1339: hyp=['MURDOCK', 'TOOK', 'HIS', 'NOD', 'AS', 'EVIDENCE', 'ENOUGH', 'AND', 'TURNED', 'TO', 'THE', 'WRETCHED', 'TUFTS'] +8131-117016-0037-1340: ref=['IF', 'HE', 'SHOULD', 'TURN', 'UP', 'DEAD', "I'LL", 'KNOW', 'YOU', 'BOYS', 'ARE', 'RESPONSIBLE', 'AND', "I'LL", 'FIND', 'YOU'] +8131-117016-0037-1340: hyp=['IF', 'HE', 'SHOULD', 'TURN', 'UP', 'DEAD', "I'LL", 'KNOW', 'YOU', 'BOYS', 'ARE', 'RESPONSIBLE', 'AND', "I'LL", 'FIND', 'YOU'] +8131-117016-0038-1341: ref=['TROUBLE', 'BEGAN', 'BREWING', 'SHORTLY', 'AFTER', 'THOUGH'] +8131-117016-0038-1341: hyp=['TROUBLE', 'BEGAN', 'BREWING', 'SHORTLY', 'AFTER', 'THOUGH'] +8131-117016-0039-1342: ref=['MURDOCH', 'SENT', 'ONE', 'OF', 'THE', 'MEN', 'TO', 'PICK', 'UP', 'A', 'SECOND', 'SQUAD', 'OF', 'SIX', 'AND', 'THEN', 'A', 'THIRD'] +8131-117016-0039-1342: hyp=['MARDOX', 'SAT', 'ONE', 'OF', 'THE', 'MEN', 'TO', 'PICK', 'UP', 'A', 'SECOND', 'SQUAD', 'OF', 'SIX', 'AND', 'THEN', 'A', 'THIRD'] +8131-117016-0040-1343: ref=['IN', 'THE', 'THIRD', 'ONE', 'BRUCE', 'GORDON', 'SPOTTED', 'ONE', 'OF', 'THE', 'MEN', "WHO'D", 'BEEN', 'BEATEN', 'BEFORE'] +8131-117016-0040-1343: hyp=['AND', 'THE', 'THIRD', 'ONE', 'BRUCE', 'GORDON', 'SPOTTED', 'ONE', 'OF', 'THE', 'MEN', 'WHO', 'HAD', 'BEEN', 'BEATEN', 'BEFORE'] +8131-117016-0041-1344: ref=['GET', 'A', 'STRETCHER', 'AND', 'TAKE', 'HIM', 'WHEREVER', 'HE', 'BELONGS', 'HE', 'ORDERED'] +8131-117016-0041-1344: hyp=['GET', 'A', 'STRETCHER', 'AND', 'TAKE', 'HIM', 'WHEREVER', 'HE', 'BELONGS', 'HE', 'ORDERED'] +8131-117016-0042-1345: ref=['BUT', 'THE', 'CAPTAIN', 'STIRRED', 'FINALLY', 'SIGHING'] +8131-117016-0042-1345: hyp=['BUT', 'THE', 'CAPTAIN', 'STIRRED', 'FINALLY', 'SIGHING'] +8131-117016-0043-1346: ref=['NO', 'THE', 'COPS', "THEY'RE", 'GIVING', 'ME', "WE'RE", 'COVERED', 'GORDON'] +8131-117016-0043-1346: hyp=['NOW', 'THE', 'CAPS', 'ARE', 'GIVING', 'ME', "WE'RE", 'COVERED', 'GORDON'] +8131-117016-0044-1347: ref=['BUT', 'THE', 'STONEWALL', 'GANG', 'IS', 'BACKING', 'WAYNE'] +8131-117016-0044-1347: hyp=['BUT', 'THE', 'STERN', 'WALL', 'GANG', 'IS', 'BACK', 'IN', 'WAIN'] +8131-117016-0045-1348: ref=['BUT', "IT'S", 'GOING', 'TO', 'BE', 'TOUGH', 'ON', 'THEM'] +8131-117016-0045-1348: hyp=['BUT', "IT'S", 'GOING', 'TO', 'BE', 'TOUGH', 'ON', 'THEM'] +8131-117016-0046-1349: ref=['BRUCE', 'GORDON', 'GRIMACED', "I'VE", 'GOT', 'A', 'YELLOW', 'TICKET', 'FROM', 'SECURITY'] +8131-117016-0046-1349: hyp=['BRUCE', 'GORD', 'AND', 'GRIMACED', "I'VE", 'GOT', 'A', 'YELLOW', 'TICKET', 'FROM', 'SECURITY'] +8131-117016-0047-1350: ref=['MURDOCH', 'BLINKED', 'HE', 'DROPPED', 'HIS', 'EYES', 'SLOWLY'] +8131-117016-0047-1350: hyp=['MARDOCK', 'BLINKED', 'HE', 'DROPPED', 'HIS', 'EYES', 'SLOWLY'] +8131-117016-0048-1351: ref=['WHAT', 'MAKES', 'YOU', 'THINK', 'WAYNE', 'WILL', 'BE', 'RE', 'ELECTED'] +8131-117016-0048-1351: hyp=['WHAT', 'MAKES', 'YOU', 'THINK', 'WAIN', 'WILL', 'BE', 'RE', 'ELECTED'] +8131-117016-0049-1352: ref=['NOBODY', 'WANTS', 'HIM', 'EXCEPT', 'A', 'GANG', 'OF', 'CROOKS', 'AND', 'THOSE', 'IN', 'POWER'] +8131-117016-0049-1352: hyp=['NOBODY', 'WANTS', 'HIM', 'EXCEPT', 'A', 'GANG', 'OF', 'COOKS', 'AND', 'THOSE', 'IN', 'POWER'] +8131-117016-0050-1353: ref=['EVER', 'SEE', 'A', 'MARTIAN', 'ELECTION'] +8131-117016-0050-1353: hyp=['EVER', 'SEE', 'A', 'MARTIAN', 'ELECTION'] +8131-117016-0051-1354: ref=['NO', "YOU'RE", 'A', 'FIRSTER', 'HE', "CAN'T", 'LOSE'] +8131-117016-0051-1354: hyp=['NO', 'YOU', 'ARE', 'FIRST', 'HE', "CAN'T", 'LOSE'] +8131-117016-0052-1355: ref=['AND', 'THEN', 'HELL', 'IS', 'GOING', 'TO', 'POP', 'AND', 'THIS', 'WHOLE', 'PLANET', 'MAY', 'BE', 'BLOWN', 'WIDE', 'OPEN'] +8131-117016-0052-1355: hyp=['AND', 'THEN', 'HELL', 'IS', 'GOING', 'TO', 'POP', 'AND', 'THIS', 'WHOLE', 'PLANET', 'MAY', 'BE', 'BLOWN', 'WIDE', 'OPEN'] +8131-117016-0053-1356: ref=['IT', 'FITTED', 'WITH', 'THE', 'DIRE', 'PREDICTIONS', 'OF', 'SECURITY', 'AND', 'WITH', 'THE', 'SPYING', 'GORDON', 'WAS', 'GOING', 'TO', 'DO', 'ACCORDING', 'TO', 'THEM'] +8131-117016-0053-1356: hyp=['YET', 'FITTED', 'WITH', 'THE', 'DIA', 'PREDICTIONS', 'OF', 'SECURITY', 'AND', 'WITH', 'A', 'SPYING', 'GORDON', 'WAS', 'GOING', 'TO', 'DO', 'ACCORDING', 'TO', 'THEM'] +8131-117016-0054-1357: ref=['HE', 'WAS', 'GETTING', 'EVEN', 'FATTER', 'NOW', 'THAT', 'HE', 'WAS', 'EATING', 'BETTER', 'FOOD', 'FROM', 'THE', 'FAIR', 'RESTAURANT', 'AROUND', 'THE', 'CORNER'] +8131-117016-0054-1357: hyp=['HE', 'WAS', 'GETTING', 'EVEN', 'FATTER', 'NOW', 'THAT', 'HE', 'WAS', 'EATING', 'BETTER', 'FOOD', 'FROM', 'THE', 'FAIR', 'RESTAURANT', 'AROUND', 'THE', 'CORNER'] +8131-117016-0055-1358: ref=['COST', 'EM', 'MORE', 'BUT', "THEY'D", 'BE', 'RESPECTABLE'] +8131-117016-0055-1358: hyp=['COSTUM', 'MORE', 'BUT', "THEY'D", 'BE', 'RESPECTABLE'] +8131-117016-0056-1359: ref=['BECAUSE', 'IZZY', 'IS', 'ALWAYS', 'HONEST', 'ACCORDING', 'TO', 'HOW', 'HE', 'SEES', 'IT'] +8131-117016-0056-1359: hyp=['BECAUSE', 'IZZIE', 'IS', 'ALWAYS', 'HONEST', 'ACCORDING', 'TO', 'HOW', 'HE', 'SEES', 'IT'] +8131-117016-0057-1360: ref=['BUT', 'YOU', 'GOT', 'EARTH', 'IDEAS', 'OF', 'THE', 'STUFF', 'LIKE', 'I', 'HAD', 'ONCE'] +8131-117016-0057-1360: hyp=['BUT', 'YOU', 'GOT', 'EARTH', 'IDEAS', 'OF', 'THE', 'STUFF', 'LIKE', 'I', 'HAD', 'ONCE'] +8131-117016-0058-1361: ref=['THE', 'GROUPS', 'GREW', 'MORE', 'EXPERIENCED', 'AND', 'MURDOCH', 'WAS', 'TRAINING', 'A', 'NEW', 'SQUAD', 'EVERY', 'NIGHT'] +8131-117016-0058-1361: hyp=['THE', 'GROUPS', 'GREW', 'MORE', 'EXPERIENCED', 'AND', 'MURDOCK', 'WAS', 'TRAINING', 'A', 'NEW', 'SQUAD', 'EVERY', 'NIGHT'] +8131-117016-0059-1362: ref=['IT', "WASN'T", 'EXACTLY', 'LEGAL', 'BUT', 'NOTHING', 'WAS', 'HERE'] +8131-117016-0059-1362: hyp=['IT', "WASN'T", 'EXACTLY', 'LEGAL', 'BUT', 'NOTHING', 'WAS', 'HERE'] +8131-117016-0060-1363: ref=['THIS', 'COULD', 'LEAD', 'TO', 'ABUSES', 'AS', "HE'D", 'SEEN', 'ON', 'EARTH'] +8131-117016-0060-1363: hyp=['THIS', 'COULD', 'LEAD', 'TO', 'ABUSES', 'AS', "HE'D", 'SEEN', 'ON', 'EARTH'] +8131-117016-0061-1364: ref=['BUT', 'THERE', 'PROBABLY', "WOULDN'T", 'BE', 'TIME', 'FOR', 'IT', 'IF', 'MAYOR', 'WAYNE', 'WAS', 'RE', 'ELECTED'] +8131-117016-0061-1364: hyp=['BUT', 'THEIR', 'PROBABLY', "WOULDN'T", 'BE', 'TIME', 'FOR', 'IT', 'IF', 'MAYOR', 'WAIN', 'WAS', 'RE', 'ELECTED'] +8131-117017-0000-1270: ref=['IT', 'WAS', 'NIGHT', 'OUTSIDE', 'AND', 'THE', 'PHOSPHOR', 'BULBS', 'AT', 'THE', 'CORNERS', 'GLOWED', 'DIMLY', 'GIVING', 'HIM', 'BARELY', 'ENOUGH', 'LIGHT', 'BY', 'WHICH', 'TO', 'LOCATE', 'THE', 'WAY', 'TO', 'THE', 'EXTEMPORIZED', 'PRECINCT', 'HOUSE'] +8131-117017-0000-1270: hyp=['IT', 'WAS', 'NIGHT', 'OUTSIDE', 'AND', 'THE', 'PHOSPHOBS', 'AT', 'THE', 'CORNERS', 'GLOWED', 'DIMLY', 'GIVING', 'HIM', 'BARELY', 'ENOUGH', 'LIGHT', 'BY', 'WHICH', 'TO', 'LOCATE', 'THE', 'WAY', 'TO', 'THE', 'EXTEMPORIZED', 'PRECINCT', 'HOUSE'] +8131-117017-0001-1271: ref=['IT', 'HAD', 'PROBABLY', 'BEEN', 'YEARS', 'SINCE', 'ANY', 'HAD', 'DARED', 'RISK', 'IT', 'AFTER', 'THE', 'SUN', 'WENT', 'DOWN'] +8131-117017-0001-1271: hyp=['IT', 'HAD', 'PROBABLY', 'BEEN', 'YEARS', 'SINCE', 'ANY', 'HAD', 'DARED', 'RISK', 'IT', 'AFTER', 'THE', 'SUN', 'WENT', 'DOWN'] +8131-117017-0002-1272: ref=['AND', 'THE', 'SLOW', 'DOUBTFUL', 'RESPECT', 'ON', 'THE', 'FACES', 'OF', 'THE', 'CITIZENS', 'AS', 'THEY', 'NODDED', 'TO', 'HIM', 'WAS', 'EVEN', 'MORE', 'PROOF', 'THAT', "HALEY'S", 'SYSTEM', 'WAS', 'WORKING'] +8131-117017-0002-1272: hyp=['AND', 'THE', 'SLOW', 'DOUBTFUL', 'RESPECT', 'ON', 'THE', 'FACES', 'OF', 'THE', 'CITIZENS', 'AS', 'THEY', 'NODDED', 'TO', 'HIM', 'WAS', 'EVEN', 'MORE', 'PROOF', 'THAT', 'HALELY', 'SYSTEM', 'WAS', 'WORKING'] +8131-117017-0003-1273: ref=['GORDON', 'HIT', 'THE', 'SIGNAL', 'SWITCH', 'AND', 'THE', 'MARSPEAKER', 'LET', 'OUT', 'A', 'SHRILL', 'WHISTLE'] +8131-117017-0003-1273: hyp=['GORDON', 'HIT', 'THE', 'SIGNAL', 'SWITCH', 'AND', 'THE', 'MARKEER', 'LED', 'OUT', 'A', 'SHRILL', 'WHISTLE'] +8131-117017-0004-1274: ref=['GUNS', 'SUDDENLY', 'SEEMED', 'TO', 'BE', 'FLOURISHING', 'EVERYWHERE'] +8131-117017-0004-1274: hyp=['GUN', 'SUDDENLY', 'SEEMED', 'TO', 'BE', 'FLOURISHING', 'EVERYWHERE'] +8131-117017-0005-1275: ref=['YOU', "CAN'T", 'DO', 'IT', 'TO', 'ME'] +8131-117017-0005-1275: hyp=['YOU', "CAN'T", 'DO', 'IT', 'TO', 'ME'] +8131-117017-0006-1276: ref=["I'M", 'REFORMED', "I'M", 'GOING', 'STRAIGHT'] +8131-117017-0006-1276: hyp=['I', 'AM', 'REFORMED', "I'M", 'GOING', 'STRAIGHT'] +8131-117017-0007-1277: ref=['YOU', 'DAMNED', 'COPS', "CAN'T", "O'NEILL", 'WAS', 'BLUBBERING'] +8131-117017-0007-1277: hyp=['YOU', 'DAMNED', 'COPSE', "CAN'T", "O'NEIA", 'WAS', 'BLUBBERING'] +8131-117017-0008-1278: ref=['ONE', 'LOOK', 'WAS', 'ENOUGH', 'THE', 'WORK', 'PAPERS', 'HAD', 'THE', 'TELLTALE', 'OVER', 'THICKENING', 'OF', 'THE', 'SIGNATURE', 'THAT', 'HAD', 'SHOWED', 'UP', 'ON', 'OTHER', 'PAPERS', 'OBVIOUSLY', 'FORGERIES'] +8131-117017-0008-1278: hyp=['ONE', 'LOOK', 'WAS', 'ENOUGH', 'THE', 'WORK', 'PAPERS', 'HAD', 'THE', 'TELL', 'TALE', 'OVER', 'THICKENING', 'OF', 'THE', 'SIGNATURE', 'THEY', 'HAD', 'SHOWED', 'UP', 'ON', 'OTHER', 'PAPERS', 'OBVIOUSLY', 'FORGERIES'] +8131-117017-0009-1279: ref=['SOME', 'TURNED', 'AWAY', 'AS', 'GORDON', 'AND', 'THE', 'OTHER', 'COP', 'WENT', 'TO', 'WORK', 'BUT', 'MOST', 'OF', 'THEM', "WEREN'T", 'SQUEAMISH'] +8131-117017-0009-1279: hyp=['SOME', 'TURNED', 'AWAY', 'AS', 'GORDON', 'AND', 'THE', 'OTHER', 'COPP', 'WENT', 'TO', 'WORK', 'BUT', 'MOST', 'OF', 'THEM', "WEREN'T", 'SQUEAMISH'] +8131-117017-0010-1280: ref=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'TWO', 'PICKED', 'UP', 'THEIR', 'WHIMPERING', 'CAPTIVE'] +8131-117017-0010-1280: hyp=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'TWO', 'PICKED', 'UP', 'THEIR', 'WHIMPERING', 'CAPTIVE'] +8131-117017-0011-1281: ref=['JENKINS', 'THE', 'OTHER', 'COP', 'HAD', 'BEEN', 'HOLDING', 'THE', 'WALLET'] +8131-117017-0011-1281: hyp=['JENKINS', 'THE', 'OTHER', 'COP', 'HAD', 'BEEN', 'HOLDING', 'THE', 'WALLET'] +8131-117017-0012-1282: ref=['MUST', 'OF', 'BEEN', 'MAKING', 'A', 'BIG', 'CONTACT', 'IN', 'SOMETHING', 'FIFTY', 'FIFTY'] +8131-117017-0012-1282: hyp=['MUST', 'HAVE', 'BEEN', 'MAKING', 'A', 'BIG', 'CONTACT', 'IN', 'SOMETHING', 'FIFTY', 'FIFTY'] +8131-117017-0013-1283: ref=['THERE', 'MUST', 'HAVE', 'BEEN', 'OVER', 'TWO', 'THOUSAND', 'CREDITS', 'IN', 'THE', 'WALLET'] +8131-117017-0013-1283: hyp=['THERE', 'MUST', 'HAVE', 'BEEN', 'OVER', 'TWO', 'THOUSAND', 'CREDITS', 'IN', 'THE', 'WALLET'] +8131-117017-0014-1284: ref=['WHEN', 'GORDON', 'AND', 'JENKINS', 'CAME', 'BACK', 'MURDOCH', 'TOSSED', 'THE', 'MONEY', 'TO', 'THEM', 'SPLIT', 'IT'] +8131-117017-0014-1284: hyp=['WHEN', 'GORDON', 'AND', 'JENKINS', 'CAME', 'BACK', 'MURDOCK', 'TOSSED', 'THE', 'MONEY', 'TO', 'THEM', 'SPLIT', 'IT'] +8131-117017-0015-1285: ref=['WHATEVER', 'COMES', 'TO', 'HAND', "GOV'NOR"] +8131-117017-0015-1285: hyp=['WHATEVER', 'COMES', 'TO', 'HAND', 'GOVERNOR'] +8131-117017-0016-1286: ref=['LIKE', 'THIS', 'SOCIAL', 'CALL', 'GORDON', 'ASKED', 'HIM'] +8131-117017-0016-1286: hyp=['LIKE', 'THIS', 'SOCIAL', 'CALL', 'GORDON', 'ASKED', 'HIM'] +8131-117017-0017-1287: ref=['THE', 'LITTLE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'HIS', 'ANCIENT', 'EIGHTEEN', 'YEAR', 'OLD', 'FACE', 'TURNING', 'SOBER', 'NOPE'] +8131-117017-0017-1287: hyp=['THE', 'LITTLE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'HIS', 'ANCIENT', 'EIGHTEEN', 'YEAR', 'OLD', 'FACE', 'TURNING', 'SOBER', 'NOTE'] +8131-117017-0018-1288: ref=['YOU', 'OWE', 'ME', 'SOME', 'BILLS', "GOV'NOR"] +8131-117017-0018-1288: hyp=['YOU', 'ARE', 'ME', 'SOME', 'BILLS', "GUV'NER"] +8131-117017-0019-1289: ref=['ELEVEN', 'HUNDRED', 'FIFTY', 'CREDITS'] +8131-117017-0019-1289: hyp=['ELEVEN', 'HUNDRED', 'FIFTY', 'CREDITS'] +8131-117017-0020-1290: ref=['YOU', "DIDN'T", 'PAY', 'UP', 'YOUR', 'PLEDGE', 'TO', 'THE', 'CAMPAIGN', 'FUND', 'SO', 'I', 'HADDA', 'FILL', 'IN'] +8131-117017-0020-1290: hyp=['YOU', "DIDN'T", 'PAY', 'UP', 'YOUR', 'PLEDGE', 'TO', 'THE', 'CAPTAIN', 'FUND', 'SO', 'I', 'HAD', 'A', 'FILL', 'IN'] +8131-117017-0021-1291: ref=['A', 'THOUSAND', 'INTEREST', 'AT', 'TEN', 'PER', 'CENT', 'A', 'WEEK', 'STANDARD', 'RIGHT'] +8131-117017-0021-1291: hyp=['A', 'THOUSAND', 'INTEREST', 'AT', 'TEN', 'PER', 'CENT', 'A', 'WEEK', 'STANDARD', 'RIGHT'] +8131-117017-0022-1292: ref=['GORDON', 'HAD', 'HEARD', 'OF', 'THE', 'FRIENDLY', 'INTEREST', 'CHARGED', 'ON', 'THE', 'SIDE', 'HERE', 'BUT', 'HE', 'SHOOK', 'HIS', 'HEAD', 'WRONG', 'IZZY'] +8131-117017-0022-1292: hyp=['GORDON', 'HAD', 'HEARD', 'OF', 'THE', 'FRIENDLY', 'INTEREST', 'CHARGED', 'ON', 'THE', 'SIDE', 'HERE', 'BUT', 'HE', 'SHOOK', 'HIS', 'HEAD', 'WRONG', 'IS', 'HE'] +8131-117017-0023-1293: ref=['HUH', 'IZZY', 'TURNED', 'IT', 'OVER', 'AND', 'SHOOK', 'HIS', 'HEAD'] +8131-117017-0023-1293: hyp=['HOW', 'AS', 'HE', 'TURNED', 'IT', 'OVER', 'AND', 'SHOOK', 'HIS', 'HEAD'] +8131-117017-0024-1294: ref=['NOW', 'SHOW', 'ME', 'WHERE', 'I', 'SIGNED', 'ANY', 'AGREEMENT', 'SAYING', "I'D", 'PAY', 'YOU', 'BACK'] +8131-117017-0024-1294: hyp=['NOW', 'SHOW', 'ME', 'WHERE', 'I', 'SIGNED', 'ANY', 'AGREEMENT', 'SAYING', "I'D", 'PAY', 'YOU', 'BACK'] +8131-117017-0025-1295: ref=['FOR', 'A', 'SECOND', "IZZY'S", 'FACE', 'WENT', 'BLANK', 'THEN', 'HE', 'CHUCKLED'] +8131-117017-0025-1295: hyp=['FOR', 'A', 'SECOND', "IZZIE'S", 'FACE', 'WENT', 'BLANK', 'THEN', 'HE', 'CHUCKLED'] +8131-117017-0026-1296: ref=['HE', 'PULLED', 'OUT', 'THE', 'BILLS', 'AND', 'HANDED', 'THEM', 'OVER'] +8131-117017-0026-1296: hyp=['HE', 'POURED', 'OUT', 'THE', 'BILLS', 'AND', 'HANDED', 'THEM', 'OVER'] +8131-117017-0027-1297: ref=['THANKS', 'IZZY', 'THANKS', 'YOURSELF'] +8131-117017-0027-1297: hyp=['THANKS', 'IS', 'HE', 'THANKS', 'YOURSELF'] +8131-117017-0028-1298: ref=['THE', 'KID', 'POCKETED', 'THE', 'MONEY', 'CHEERFULLY', 'NODDING'] +8131-117017-0028-1298: hyp=['THE', 'KID', 'POCKETED', 'THE', 'MONEY', 'CHEERFULLY', 'NODDING'] +8131-117017-0029-1299: ref=['THE', 'LITTLE', 'GUY', 'KNEW', 'MARS', 'AS', 'FEW', 'OTHERS', 'DID', 'APPARENTLY', 'FROM', 'ALL', 'SIDES'] +8131-117017-0029-1299: hyp=['THE', 'LITTLE', 'GUY', 'KNEW', 'MARS', 'AS', 'FEW', 'OTHERS', 'DID', 'APPARENTLY', 'FROM', 'ALL', 'SIDES'] +8131-117017-0030-1300: ref=['AND', 'IF', 'ANY', 'OF', 'THE', 'OTHER', 'COPS', 'HAD', 'PRIVATE', 'RACKETS', 'OF', 'THEIR', 'OWN', 'IZZY', 'WAS', 'UNDOUBTEDLY', 'THE', 'MAN', 'TO', 'FIND', 'IT', 'OUT', 'AND', 'USE', 'THE', 'INFORMATION', 'WITH', 'A', 'BEAT', 'SUCH', 'AS', 'THAT', 'EVEN', 'GOING', 'HALVES', 'AND', 'WITH', 'ALL', 'THE', 'GRAFT', 'TO', 'THE', 'UPPER', 'BRACKETS', "HE'D", 'STILL', 'BE', 'ABLE', 'TO', 'MAKE', 'HIS', 'PILE', 'IN', 'A', 'MATTER', 'OF', 'MONTHS'] +8131-117017-0030-1300: hyp=['AND', 'IF', 'ANY', 'OF', 'THE', 'OTHER', 'CUPS', 'HAD', 'PRIVATE', 'RACKETS', 'OF', 'THEIR', 'OWN', 'IZZIE', 'WAS', 'UNDOUBTEDLY', 'THE', 'MAN', 'TO', 'FIND', 'IT', 'OUT', 'AND', 'USED', 'THE', 'INFORMATION', 'WITH', 'A', 'BEAT', 'SUCH', 'AS', 'THAT', 'EVEN', 'GOING', 'HALVES', 'AND', 'WITH', 'ALL', 'THE', 'GRAFT', 'AT', 'THE', 'UPPER', 'BRACKETS', "HE'D", 'STILL', 'BE', 'ABLE', 'TO', 'MAKE', 'HIS', 'PILE', 'IN', 'A', 'MATTER', 'OF', 'MONTHS'] +8131-117017-0031-1301: ref=['THE', 'CAPTAIN', 'LOOKED', 'COMPLETELY', 'BEATEN', 'AS', 'HE', 'CAME', 'INTO', 'THE', 'ROOM', 'AND', 'DROPPED', 'ONTO', 'THE', 'BENCH'] +8131-117017-0031-1301: hyp=['THE', 'CAPTAIN', 'LOOKED', 'COMPLETELY', 'BEATEN', 'AS', 'HE', 'CAME', 'INTO', 'THE', 'ROOM', 'AND', 'DROPPED', 'INTO', 'THE', 'BENCH'] +8131-117017-0032-1302: ref=['GO', 'ON', 'ACCEPT', 'DAMN', 'IT'] +8131-117017-0032-1302: hyp=['GO', 'ON', 'EXCEPT', 'DEAR', 'MIN'] +8131-117029-0000-1247: ref=['THERE', 'WAS', 'A', 'MAN', 'COMING', 'FROM', 'EARTH', 'ON', 'A', 'SECOND', 'SHIP', 'WHO', 'WOULD', 'SEE', 'HIM'] +8131-117029-0000-1247: hyp=['THERE', 'WAS', 'A', 'MAN', 'COMING', 'FROM', 'EARTH', 'ON', 'A', 'SECOND', 'SHIP', 'WHO', 'WOULD', 'SEE', 'HIM'] +8131-117029-0001-1248: ref=['THE', 'LITTLE', 'PUBLISHER', 'WAS', 'BACK', 'AT', 'THE', 'CRUSADER', 'AGAIN'] +8131-117029-0001-1248: hyp=['THE', 'LITTLE', 'PUBLISHER', 'WAS', 'BACK', 'AT', 'THE', 'CRUSADER', 'AGAIN'] +8131-117029-0002-1249: ref=['ONLY', 'GORDON', 'AND', 'SHEILA', 'WERE', 'LEFT'] +8131-117029-0002-1249: hyp=['ONLY', 'GORDON', 'AND', 'SHEILA', 'WERE', 'LEFT'] +8131-117029-0003-1250: ref=['CREDIT', 'HAD', 'BEEN', 'ESTABLISHED', 'AGAIN', 'AND', 'THE', 'BUSINESSES', 'WERE', 'OPEN'] +8131-117029-0003-1250: hyp=['CREDIT', 'HAD', 'BEEN', 'ESTABLISHED', 'AGAIN', 'AND', 'THE', 'BUSINESSES', 'WERE', 'OPEN'] +8131-117029-0004-1251: ref=['GORDON', 'CAME', 'TO', 'A', 'ROW', 'OF', 'TEMPORARY', 'BUBBLES', 'INDIVIDUAL', 'DWELLINGS', 'BUILT', 'LIKE', 'THE', 'DOME', 'BUT', 'OPAQUE', 'FOR', 'PRIVACY'] +8131-117029-0004-1251: hyp=['GORDON', 'CAME', 'TO', 'A', 'ROW', 'OF', 'TEMPORARY', 'BUBBLES', 'INDIVIDUAL', 'DWELLINGS', 'BUILT', 'LIKE', 'THE', 'DOME', 'BUT', 'OPAQUE', 'FOR', 'PRIVACY'] +8131-117029-0005-1252: ref=['THEY', 'HAD', 'BEEN', 'LUCKY'] +8131-117029-0005-1252: hyp=['THEY', 'HAD', 'BEEN', 'LUCKY'] +8131-117029-0006-1253: ref=["SCHULBERG'S", 'VOLUNTEERS', 'WERE', 'OFFICIAL', 'NOW'] +8131-117029-0006-1253: hyp=["SHUBERG'S", 'VOLUNTEERS', 'WERE', 'OFFICIAL', 'NOW'] +8131-117029-0007-1254: ref=['FATS', 'PLACE', 'WAS', 'STILL', 'OPEN', 'THOUGH', 'THE', 'CROOKED', 'TABLES', 'HAD', 'BEEN', 'REMOVED', 'GORDON', 'DROPPED', 'TO', 'A', 'STOOL', 'SLIPPING', 'OFF', 'HIS', 'HELMET'] +8131-117029-0007-1254: hyp=["FAT'S", 'PLACE', 'WAS', 'STILL', 'OPEN', 'THOUGH', 'THE', 'CROOKED', 'TABLES', 'HAD', 'BEEN', 'REMOVED', 'GORDON', 'DROPPED', 'TO', 'A', 'STOOL', 'SLIPPING', 'OFF', 'HIS', 'HELMET'] +8131-117029-0008-1255: ref=['HE', 'REACHED', 'AUTOMATICALLY', 'FOR', 'THE', 'GLASS', 'OF', 'ETHER', 'NEEDLED', 'BEER'] +8131-117029-0008-1255: hyp=['HE', 'REACHED', 'AUTOMATICALLY', 'FOR', 'THE', 'GLASS', 'OF', 'ETHER', 'NEEDLE', 'BEER'] +8131-117029-0009-1256: ref=['THOUGHT', "YOU'D", 'BE', 'IN', 'THE', 'CHIPS'] +8131-117029-0009-1256: hyp=['THOUGHT', "YE'D", 'BE', 'IN', 'THE', 'CHIPS'] +8131-117029-0010-1257: ref=["THAT'S", 'MARS', 'GORDON', 'ECHOED', 'THE', "OTHER'S", 'COMMENT', 'WHY', "DON'T", 'YOU', 'PULL', 'OFF', 'THE', 'PLANET', 'FATS', 'YOU', 'COULD', 'GO', 'BACK', 'TO', 'EARTH', "I'D", 'GUESS', 'THE', 'OTHER', 'NODDED'] +8131-117029-0010-1257: hyp=["THAT'S", 'MARS', 'GORDON', 'ECHOED', 'OTHERS', 'COMMENTS', 'WHY', "DON'T", 'YOU', 'PULL', 'OFF', 'THE', 'PLANET', 'FATS', 'YOU', 'COULD', 'GO', 'BACK', 'TO', 'EARTH', "I'D", 'GUESS', 'THE', 'OTHER', 'NODDED'] +8131-117029-0011-1258: ref=['GUESS', 'A', 'MAN', 'GETS', 'USED', 'TO', 'ANYTHING', 'HELL', 'MAYBE', 'I', 'CAN', 'HIRE', 'SOME', 'BUMS', 'TO', 'SIT', 'AROUND', 'AND', 'WHOOP', 'IT', 'UP', 'WHEN', 'THE', 'SHIPS', 'COME', 'IN', 'AND', 'BILL', 'THIS', 'AS', 'A', 'REAL', 'OLD', 'MARTIAN', 'DEN', 'OF', 'SIN'] +8131-117029-0011-1258: hyp=['GES', 'A', 'MAN', 'GETS', 'USED', 'TO', 'ANYTHING', 'HELL', 'MAYBE', 'I', 'CAN', 'HIRE', 'SOME', 'BUMS', 'TO', 'SIT', 'AROUND', 'AND', 'WHOOP', 'IT', 'UP', 'WHEN', 'THE', 'SHIPS', 'COME', 'IN', 'AND', 'BUILD', 'THIS', 'IS', 'A', 'REAL', 'OLD', 'MARTIAN', 'DEN', 'OF', 'SIN'] +8131-117029-0012-1259: ref=['THERE', 'WAS', 'A', 'GRIN', 'ON', 'THE', "OTHER'S", 'FACE'] +8131-117029-0012-1259: hyp=['THERE', 'WAS', 'A', 'GRIN', 'ON', 'THE', "OTHER'S", 'FACE'] +8131-117029-0013-1260: ref=['FINALLY', 'GOT', 'OUR', 'ORDERS', 'FOR', 'YOU', "IT'S", 'MERCURY'] +8131-117029-0013-1260: hyp=['FINALLY', 'GOT', 'OUR', 'ORDERS', 'FOR', 'YOU', "IT'S", 'MERCURY'] +8131-117029-0014-1261: ref=['WE', 'SENT', 'TWENTY', 'OTHERS', 'THE', 'SAME', 'WAY', 'AND', 'THEY', 'FAILED'] +8131-117029-0014-1261: hyp=['WE', 'SENT', 'TWENTY', 'OTHERS', 'THE', 'SAME', 'WAY', 'AND', 'THEY', 'FAILED'] +8131-117029-0015-1262: ref=["LET'S", 'SAY', "YOU'VE", 'SHIFTED', 'SOME', 'OF', 'THE', 'MISERY', 'AROUND', 'A', 'BIT', 'AND', 'GIVEN', 'THEM', 'A', 'CHANCE', 'TO', 'DO', 'BETTER'] +8131-117029-0015-1262: hyp=['LET', 'SAVE', 'SHIFTED', 'SOME', 'OF', 'THE', 'MISERY', 'AROUND', 'A', 'BIT', 'AND', 'GIVEN', 'THEM', 'A', 'CHANCE', 'TO', 'DO', 'BETTER'] +8131-117029-0016-1263: ref=['YOU', "CAN'T", 'STAY', 'HERE'] +8131-117029-0016-1263: hyp=['YOU', "CAN'T", 'STAY', 'HERE'] +8131-117029-0017-1264: ref=["THERE'S", 'A', 'ROCKET', 'WAITING', 'TO', 'TRANSSHIP', 'YOU', 'TO', 'THE', 'MOON', 'ON', 'THE', 'WAY', 'TO', 'MERCURY', 'RIGHT', 'NOW', 'GORDON', 'SIGHED'] +8131-117029-0017-1264: hyp=["THERE'S", 'A', 'ROCKET', 'WAITING', 'TO', 'TRANSHIP', 'YOU', 'TO', 'THE', 'MOON', 'ON', 'THE', 'WAY', 'TO', 'MERCURY', 'RIGHT', 'NOW', 'GORDON', 'SIGHED'] +8131-117029-0018-1265: ref=['AND', "I'VE", 'PAID', 'HER', 'THE', 'PAY', 'WE', 'OWE', 'YOU', 'FROM', 'THE', 'TIME', 'YOU', 'BEGAN', 'USING', 'YOUR', 'BADGE', "SHE'S", 'OUT', 'SHOPPING'] +8131-117029-0018-1265: hyp=['AND', 'I', 'PAID', 'HER', 'THE', 'PAY', 'WE', 'OWE', 'YOU', 'FROM', 'THE', 'TIME', 'YOU', 'BEGIN', 'USING', 'YOUR', 'BADGE', "SHE'S", 'OUT', 'SHOPPING'] +8131-117029-0019-1266: ref=['BUT', 'HIS', 'OLD', 'EYES', 'WERE', 'GLINTING'] +8131-117029-0019-1266: hyp=['BUT', 'HIS', 'OLD', 'EYES', 'WERE', 'GLINTING'] +8131-117029-0020-1267: ref=['DID', 'YOU', 'THINK', "WE'D", 'LET', 'YOU', 'GO', 'WITHOUT', 'SEEING', 'YOU', 'OFF', 'COBBER', 'HE', 'ASKED'] +8131-117029-0020-1267: hyp=['DID', 'YOU', 'THINK', "WE'D", 'LET', 'YOU', 'GO', 'WITHOUT', 'SEEING', 'YOU', 'OFF', 'COBBER', 'HE', 'ASKED'] +8131-117029-0021-1268: ref=['I', 'I', 'OH', 'DRAT', 'IT', "I'M", 'GETTING', 'OLD', 'IZZY', 'YOU', 'TELL', 'HIM'] +8131-117029-0021-1268: hyp=['I', 'I', 'DREAD', 'IT', "I'M", 'GETTING', 'OLD', 'AS', 'HE', 'YOU', 'TELL', 'HIM'] +8131-117029-0022-1269: ref=['HE', 'GRABBED', "GORDON'S", 'HAND', 'AND', 'WADDLED', 'DOWN', 'THE', 'LANDING', 'PLANK', 'IZZY', 'SHOOK', 'HIS', 'HEAD'] +8131-117029-0022-1269: hyp=['HE', 'GRABBED', "GORDON'S", 'HAND', 'AND', 'WADDLED', 'DOWN', 'THE', 'LANDING', 'PLANK', 'IZZIE', 'SHOOK', 'HIS', 'HEAD'] +8188-269288-0000-2881: ref=['ANNIE', 'COLCHESTER', 'HAD', 'BEGUN', 'TO', 'MAKE', 'FRIENDS', 'WITH', 'LESLIE'] +8188-269288-0000-2881: hyp=['ANY', 'COLCHESTER', 'HAD', 'BEGUN', 'TO', 'MAKE', 'FRIENDS', 'WITH', 'LISLEY'] +8188-269288-0001-2882: ref=['LESLIE', 'DETERMINED', 'TO', 'TRY', 'FOR', 'HONORS', 'IN', 'ENGLISH', 'LANGUAGE', 'AND', 'LITERATURE'] +8188-269288-0001-2882: hyp=['LESLIE', 'DETERMINED', 'TO', 'TRIFLE', 'HONORS', 'IN', 'ENGLISH', 'LANGUAGE', 'AND', 'LITERATURE'] +8188-269288-0002-2883: ref=['HER', 'TASTES', 'ALL', 'LAY', 'IN', 'THIS', 'DIRECTION', 'HER', 'IDEA', 'BEING', 'BY', 'AND', 'BY', 'TO', 'FOLLOW', 'HER', "MOTHER'S", 'PROFESSION', 'OF', 'JOURNALISM', 'FOR', 'WHICH', 'SHE', 'ALREADY', 'SHOWED', 'CONSIDERABLE', 'APTITUDE'] +8188-269288-0002-2883: hyp=['HER', 'TASTES', 'ALL', 'LAY', 'IN', 'THIS', 'DIRECTION', 'HER', 'IDEA', 'BEING', 'BY', 'AND', 'BY', 'TO', 'FOLLOW', 'HER', "MOTHER'S", 'PROFESSION', 'OF', 'JOURNALISM', 'FOR', 'WHICH', 'SHE', 'ALREADY', 'SHOWED', 'CONSIDERABLE', 'APTITUDE'] +8188-269288-0003-2884: ref=['SHE', 'HAD', 'NO', 'IDEA', 'OF', 'ALLOWING', 'HERSELF', 'TO', 'BREAK', 'DOWN'] +8188-269288-0003-2884: hyp=['SHE', 'HAD', 'NO', 'IDEA', 'OF', 'ALLOWING', 'HERSELF', 'TO', 'BREAK', 'DOWN'] +8188-269288-0004-2885: ref=['WHAT', 'DO', 'YOU', 'MEAN', 'REPLIED', 'LESLIE'] +8188-269288-0004-2885: hyp=['WHAT', 'DO', 'YOU', 'MEAN', 'REPLIED', 'LESLIE'] +8188-269288-0005-2886: ref=['WHY', 'YOU', 'WILL', 'BE', 'PARTING', 'FROM', 'ME', 'YOU', 'KNOW'] +8188-269288-0005-2886: hyp=['WHY', 'YOU', 'WILL', 'BE', 'PARTING', 'FROM', 'ME', 'YOU', 'KNOW'] +8188-269288-0006-2887: ref=['I', "WON'T", 'BE', 'THE', 'CONSTANT', 'WORRY', 'AND', 'PLAGUE', 'OF', 'YOUR', 'LIFE'] +8188-269288-0006-2887: hyp=['I', 'WOULD', 'BE', 'THE', 'CONSTANT', 'WORRY', 'IN', 'PLAGUE', 'OF', 'YOUR', 'LIFE'] +8188-269288-0007-2888: ref=['IT', 'IS', 'THIS', 'IF', 'BY', 'ANY', 'CHANCE', 'YOU', "DON'T", 'LEAVE', 'SAINT', "WODE'S", 'ANNIE', 'I', 'HOPE', 'YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'BE', 'YOUR', 'ROOMFELLOW', 'AGAIN', 'NEXT', 'TERM'] +8188-269288-0007-2888: hyp=['IT', 'IS', 'THIS', 'IF', 'BY', 'ANY', 'CHANCE', 'YOU', "DON'T", 'LEAVE', 'SAINT', 'WORDS', 'ANNIE', 'I', 'HOPE', 'YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'BE', 'YOUR', 'ROOM', 'FELLOW', 'AGAIN', 'NEXT', 'TERM'] +8188-269288-0008-2889: ref=['SAID', 'ANNIE', 'A', 'FLASH', 'OF', 'LIGHT', 'COMING', 'INTO', 'HER', 'EYES', 'AND', 'THEN', 'LEAVING', 'THEM'] +8188-269288-0008-2889: hyp=['SAID', 'ANNIE', 'A', 'FLASH', 'OF', 'LIGHT', 'COMING', 'INTO', 'HER', 'EYES', 'AND', 'THEN', 'LEAVING', 'THEM'] +8188-269288-0009-2890: ref=['BUT', 'SHE', 'ADDED', 'ABRUPTLY', 'YOU', 'SPEAK', 'OF', 'SOMETHING', 'WHICH', 'MUST', 'NOT', 'TAKE', 'PLACE'] +8188-269288-0009-2890: hyp=['BUT', 'SHE', 'ADDED', 'ABRUPTLY', 'YOU', 'SPEAK', 'OF', 'SOMETHING', 'WHICH', 'MUST', 'NOT', 'TAKE', 'PLACE'] +8188-269288-0010-2891: ref=['I', 'MUST', 'PASS', 'IN', 'HONORS', 'IF', 'I', "DON'T", 'I', 'SHALL', 'DIE'] +8188-269288-0010-2891: hyp=['I', 'MUST', 'PASS', 'ON', 'HONOURS', 'IF', 'I', "DON'T", 'I', 'SHALL', 'DIE'] +8188-269288-0011-2892: ref=['A', 'FEW', 'MOMENTS', 'LATER', 'THERE', 'CAME', 'A', 'TAP', 'AT', 'THE', 'DOOR'] +8188-269288-0011-2892: hyp=['A', 'FEW', 'MOMENTS', 'LATER', 'THERE', 'CAME', 'A', 'TAP', 'AT', 'THE', 'DOOR'] +8188-269288-0012-2893: ref=['LESLIE', 'OPENED', 'THE', 'DOOR'] +8188-269288-0012-2893: hyp=['LESLIE', 'OPENED', 'THE', 'DOOR'] +8188-269288-0013-2894: ref=['JANE', 'HERIOT', 'STOOD', 'WITHOUT'] +8188-269288-0013-2894: hyp=['JANE', 'HERIOT', 'STOOD', 'WITHOUT'] +8188-269288-0014-2895: ref=['THESE', 'LETTERS', 'HAVE', 'JUST', 'COME', 'FOR', 'YOU', 'AND', 'ANNIE', 'COLCHESTER', 'SHE', 'SAID', 'AND', 'AS', 'I', 'WAS', 'COMING', 'UPSTAIRS', 'I', 'THOUGHT', 'I', 'WOULD', 'LEAVE', 'THEM', 'WITH', 'YOU'] +8188-269288-0014-2895: hyp=['THESE', 'LITTLE', 'HAVE', 'JUST', 'COME', 'FOR', 'YOU', 'AND', 'ANY', 'COLCHESTER', 'SHE', 'SAID', 'AND', 'AS', 'I', 'WAS', 'COMING', 'UP', 'STAIRS', 'I', 'THOUGHT', 'I', 'WOULD', 'LEAVE', 'THEM', 'WITH', 'YOU'] +8188-269288-0015-2896: ref=['LESLIE', 'THANKED', 'HER', 'AND', 'EAGERLY', 'GRASPED', 'THE', 'LITTLE', 'PARCEL'] +8188-269288-0015-2896: hyp=['LISLEY', 'THANKED', 'HER', 'AND', 'EAGERLY', 'GRASPED', 'THE', 'LITTLE', 'PARCEL'] +8188-269288-0016-2897: ref=['HER', 'EYES', 'SHONE', 'WITH', 'PLEASURE', 'AT', 'THE', 'ANTICIPATION', 'OF', 'THE', 'DELIGHTFUL', 'TIME', 'SHE', 'WOULD', 'HAVE', 'REVELING', 'IN', 'THE', 'HOME', 'NEWS', 'THE', 'OTHER', 'LETTER', 'WAS', 'DIRECTED', 'TO', 'ANNIE', 'COLCHESTER'] +8188-269288-0016-2897: hyp=['HER', 'EYES', 'SHONE', 'WITH', 'PLEASURE', 'AT', 'THE', 'ANTICIPATION', 'OF', 'THE', 'DELIGHTFUL', 'TIME', 'SHE', 'WOULD', 'HAVE', 'REVELLING', 'IN', 'THE', 'HOME', 'NEWS', 'THE', 'OTHER', 'LETTER', 'WAS', 'DIRECTED', 'TO', 'ANY', 'COLCHESTER'] +8188-269288-0017-2898: ref=['HERE', 'IS', 'A', 'LETTER', 'FOR', 'YOU', 'ANNIE', 'CRIED', 'LESLIE'] +8188-269288-0017-2898: hyp=['HERE', 'IS', 'A', 'LETTER', 'FOR', 'YOU', 'ANNIE', 'CRIED', 'LIZZIE'] +8188-269288-0018-2899: ref=['HER', 'FACE', 'GREW', 'SUDDENLY', 'WHITE', 'AS', 'DEATH', 'WHAT', 'IS', 'IT', 'DEAR'] +8188-269288-0018-2899: hyp=['HER', 'FACE', 'GREW', 'SUDDENLY', 'WHITE', 'AS', 'DEATH', 'WHAT', 'IS', 'IT', 'DEAR'] +8188-269288-0019-2900: ref=['I', 'HAVE', 'BEEN', 'STARVING', 'OR', 'RATHER', 'I', 'HAVE', 'BEEN', 'THIRSTING'] +8188-269288-0019-2900: hyp=['I', 'HAVE', 'BEEN', 'STARLING', 'OR', 'RATHER', 'I', 'HAVE', 'BEEN', 'THIRSTING'] +8188-269288-0020-2901: ref=['WELL', 'READ', 'IT', 'IN', 'PEACE', 'SAID', 'LESLIE', 'I', "WON'T", 'DISTURB', 'YOU'] +8188-269288-0020-2901: hyp=['WELL', 'READ', 'IT', 'IN', 'PEACE', 'SAID', 'LINLESILY', 'I', "WON'T", 'DISTURB', 'YOU'] +8188-269288-0021-2902: ref=['I', 'AM', 'TRULY', 'GLAD', 'IT', 'HAS', 'COME'] +8188-269288-0021-2902: hyp=['I', 'AM', 'TRULY', 'GLAD', 'IT', 'HAS', 'COME'] +8188-269288-0022-2903: ref=['LESLIE', 'SEATED', 'HERSELF', 'WITH', 'HER', 'BACK', 'TO', 'HER', 'COMPANION', 'AND', 'OPENED', 'HER', 'OWN', 'LETTERS'] +8188-269288-0022-2903: hyp=['LISALLY', 'SEATED', 'HERSELF', 'WITH', 'HER', 'BACK', 'TO', 'HER', 'COMPANION', 'AND', 'OPENED', 'HER', 'ON', 'LETTERS'] +8188-269288-0023-2904: ref=["DON'T", 'NOTICE', 'ME', 'REPLIED', 'ANNIE'] +8188-269288-0023-2904: hyp=["DON'T", 'NOTICE', 'ME', 'REPLIED', 'ANNIE'] +8188-269288-0024-2905: ref=['I', 'MUST', 'GO', 'INTO', 'THE', 'GROUNDS', 'THE', 'AIR', 'IS', 'STIFLING'] +8188-269288-0024-2905: hyp=['I', 'MUST', 'GO', 'INTO', 'THE', 'GROUNDS', 'THE', 'AIR', 'IS', 'STIFLING'] +8188-269288-0025-2906: ref=['BUT', 'THEY', 'ARE', 'JUST', 'SHUTTING', 'UP'] +8188-269288-0025-2906: hyp=['BUT', 'THEY', 'ARE', 'JUST', 'SHUTTING', 'UP'] +8188-269288-0026-2907: ref=['I', 'SHALL', 'GO', 'I', 'KNOW', 'A', 'WAY'] +8188-269288-0026-2907: hyp=['I', 'SHALL', 'GO', 'I', 'KNOW', 'A', 'WAY'] +8188-269288-0027-2908: ref=['JUST', 'AFTER', 'MIDNIGHT', 'SHE', 'ROSE', 'WITH', 'A', 'SIGH', 'TO', 'PREPARE', 'FOR', 'BED'] +8188-269288-0027-2908: hyp=['JUST', 'AFTER', 'MIDNIGHT', 'SHE', 'ROSE', 'WITH', 'A', 'SIGH', 'TO', 'PREPARE', 'FOR', 'BED'] +8188-269288-0028-2909: ref=['SHE', 'LOOKED', 'ROUND', 'THE', 'ROOM'] +8188-269288-0028-2909: hyp=['SHE', 'LOOKED', 'ROUND', 'THE', 'ROOM'] +8188-269288-0029-2910: ref=['NOW', 'I', 'REMEMBER', 'SHE', 'GOT', 'A', 'LETTER', 'WHICH', 'UPSET', 'HER', 'VERY', 'MUCH', 'AND', 'WENT', 'OUT'] +8188-269288-0029-2910: hyp=['NOW', 'I', 'REMEMBER', 'SHE', 'GOT', 'A', 'LETTER', 'WHICH', 'UPSET', 'HER', 'VERY', 'MUCH', 'AND', 'WENT', 'OUT'] +8188-269288-0030-2911: ref=['LESLIE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'FLUNG', 'IT', 'OPEN', 'SHE', 'PUT', 'HER', 'HEAD', 'OUT', 'AND', 'TRIED', 'TO', 'PEER', 'INTO', 'THE', 'DARKNESS', 'BUT', 'THE', 'MOON', 'HAD', 'ALREADY', 'SET', 'AND', 'SHE', 'COULD', 'NOT', 'SEE', 'MORE', 'THAN', 'A', 'COUPLE', 'OF', 'YARDS', 'IN', 'FRONT', 'OF', 'HER'] +8188-269288-0030-2911: hyp=['LISLEY', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'FLUNG', 'IT', 'OPEN', 'SHE', 'PUT', 'HER', 'HEAD', 'OUT', 'AND', 'TRIED', 'TO', 'PEER', 'INTO', 'THE', 'DARKNESS', 'BUT', 'THE', 'MOON', 'HAD', 'ALREADY', 'SET', 'AND', 'SHE', 'COULD', 'NOT', 'SEE', 'MORE', 'THAN', 'A', 'COUPLE', 'OF', 'YARDS', 'IN', 'FRONT', 'OF', 'HER'] +8188-269288-0031-2912: ref=['SHE', 'IS', 'A', 'VERY', 'QUEER', 'ERRATIC', 'CREATURE', 'AND', 'THAT', 'LETTER', 'THERE', 'WAS', 'BAD', 'NEWS', 'IN', 'THAT', 'LETTER'] +8188-269288-0031-2912: hyp=['SHE', 'IS', 'A', 'VERY', 'QUEER', 'ERRATIC', 'CREATURE', 'AND', 'THAT', 'LETTER', 'THERE', 'IS', 'BAD', 'NEWS', 'IN', 'THAT', 'LETTER'] +8188-269288-0032-2913: ref=['WHAT', 'CAN', 'SHE', 'BE', 'DOING', 'OUT', 'BY', 'HERSELF'] +8188-269288-0032-2913: hyp=['WHAT', "CAN'T", 'YOU', 'BE', 'DOING', 'OUT', 'BY', 'HERSELF'] +8188-269288-0033-2914: ref=['LESLIE', 'LEFT', 'THE', 'ROOM', 'BUT', 'SHE', 'HAD', 'SCARCELY', 'GONE', 'A', 'DOZEN', 'PACES', 'DOWN', 'THE', 'CORRIDOR', 'BEFORE', 'SHE', 'MET', 'ANNIE', 'RETURNING'] +8188-269288-0033-2914: hyp=['THIS', 'LILY', 'LIT', 'THE', 'ROOM', 'BUT', 'SHE', 'HAD', 'SCARCELY', 'GONE', 'A', 'DOZEN', 'PLACES', 'DOWN', 'THE', 'CORRIDOR', 'BEFORE', 'SHE', 'MET', 'ANY', 'RETURNING'] +8188-269288-0034-2915: ref=["ANNIE'S", 'EYES', 'WERE', 'VERY', 'BRIGHT', 'HER', 'CHEEKS', 'WERE', 'NO', 'LONGER', 'PALE', 'AND', 'THERE', 'WAS', 'A', 'BRILLIANT', 'COLOR', 'IN', 'THEM'] +8188-269288-0034-2915: hyp=['AND', 'HIS', 'EYES', 'WERE', 'VERY', 'BRIGHT', 'HER', 'CHEEKS', 'WERE', 'NO', 'LONGER', 'PALE', 'AND', 'THERE', 'WAS', 'A', 'BRILLIANT', 'COLOR', 'IN', 'THEM'] +8188-269288-0035-2916: ref=['SHE', 'DID', 'NOT', 'TAKE', 'THE', 'LEAST', 'NOTICE', 'OF', 'LESLIE', 'BUT', 'GOING', 'INTO', 'THE', 'ROOM', 'SHUT', 'THE', 'DOOR'] +8188-269288-0035-2916: hyp=['SHE', 'DID', 'NOT', 'TAKE', 'THE', 'LEAST', 'NOTICE', 'OF', 'LIZZLING', 'BUT', 'GOING', 'INTO', 'THE', 'ROOM', 'SHUT', 'THE', 'DOOR'] +8188-269288-0036-2917: ref=["DON'T", 'BEGIN', 'SAID', 'ANNIE'] +8188-269288-0036-2917: hyp=["DON'T", 'BEGIN', 'SAID', 'ANNIE'] +8188-269288-0037-2918: ref=["DON'T", 'BEGIN', 'WHAT', 'DO', 'YOU', 'MEAN'] +8188-269288-0037-2918: hyp=["DON'T", 'BEGIN', 'WHAT', 'DO', 'YOU', 'MEAN'] +8188-269288-0038-2919: ref=['I', 'MEAN', 'THAT', 'I', "DON'T", 'WANT', 'YOU', 'TO', 'BEGIN', 'TO', 'ASK', 'QUESTIONS'] +8188-269288-0038-2919: hyp=['I', 'MEAN', 'THAT', 'I', "DON'T", 'WANT', 'YOU', 'TO', 'BEGIN', 'TO', 'ASK', 'QUESTIONS'] +8188-269288-0039-2920: ref=['I', 'WALKED', 'UP', 'AND', 'DOWN', 'AS', 'FAST', 'AS', 'EVER', 'I', 'COULD', 'OUTSIDE', 'IN', 'ORDER', 'TO', 'MAKE', 'MYSELF', 'SLEEPY'] +8188-269288-0039-2920: hyp=['I', 'WALKED', 'UP', 'AND', 'DOWN', 'AS', 'FAST', 'AS', 'EVER', 'I', 'COULD', 'OUTSIDE', 'IN', 'ORDER', 'TO', 'MAKE', 'MYSELF', 'SLEEPY'] +8188-269288-0040-2921: ref=["DON'T", 'TALK', 'TO', 'ME', 'LESLIE', "DON'T", 'SAY', 'A', 'SINGLE', 'WORD'] +8188-269288-0040-2921: hyp=["DON'T", 'TALK', 'TO', 'ME', 'LESLIE', "DON'T", 'SAY', 'A', 'SINGLE', 'WORD'] +8188-269288-0041-2922: ref=['I', 'SHALL', 'GO', 'OFF', 'TO', 'SLEEP', 'THAT', 'IS', 'ALL', 'I', 'CARE', 'FOR'] +8188-269288-0041-2922: hyp=['I', 'SHALL', 'GO', 'OFF', 'TO', 'SLEEP', 'THAT', 'IS', 'ALL', 'I', 'CARE', 'FOR'] +8188-269288-0042-2923: ref=["DON'T", 'SAID', 'ANNIE'] +8188-269288-0042-2923: hyp=["DON'T", 'SAID', 'ANNIE'] +8188-269288-0043-2924: ref=['NOW', 'DRINK', 'THIS', 'AT', 'ONCE', 'SHE', 'SAID', 'IN', 'A', 'VOICE', 'OF', 'AUTHORITY', 'IF', 'YOU', 'REALLY', 'WISH', 'TO', 'SLEEP'] +8188-269288-0043-2924: hyp=['NOW', 'DRINK', 'THIS', 'AT', 'ONCE', 'SHE', 'SAID', 'IN', 'A', 'VOICE', 'OF', 'AUTHORITY', 'IF', 'YOU', 'REALLY', 'WISH', 'TO', 'SLEEP'] +8188-269288-0044-2925: ref=['ANNIE', 'STARED', 'VACANTLY', 'AT', 'THE', 'COCOA', 'THEN', 'SHE', 'UTTERED', 'A', 'LAUGH'] +8188-269288-0044-2925: hyp=['ANY', 'STEERED', 'VACANTLY', 'AT', 'THE', 'COOKER', "DIDN'T", 'UTTERED', 'A', 'LAUGH'] +8188-269288-0045-2926: ref=['DRINK', 'THAT', 'SHE', 'SAID'] +8188-269288-0045-2926: hyp=['DRINK', 'THAT', 'SHE', 'SAID'] +8188-269288-0046-2927: ref=['DO', 'YOU', 'WANT', 'TO', 'KILL', 'ME', "DON'T", 'TALK', 'ANY', 'MORE'] +8188-269288-0046-2927: hyp=['DO', 'YOU', 'WANT', 'TO', 'KILL', 'ME', "DON'T", 'TALK', 'ANY', 'MORE'] +8188-269288-0047-2928: ref=['I', 'AM', 'SLEEPY', 'I', 'SHALL', 'SLEEP'] +8188-269288-0047-2928: hyp=['I', 'AMY', 'I', 'SHALL', 'SLEEP'] +8188-269288-0048-2929: ref=['SHE', 'GOT', 'INTO', 'BED', 'AS', 'SHE', 'SPOKE', 'AND', 'WRAPPED', 'THE', 'CLOTHES', 'TIGHTLY', 'ROUND', 'HER'] +8188-269288-0048-2929: hyp=['SHE', 'GOT', 'INTO', 'BED', 'AS', 'SHE', 'SPOKE', 'AND', 'WRAPPED', 'THE', 'CLOTHES', 'TIGHTLY', 'ROUND', 'HER'] +8188-269288-0049-2930: ref=["CAN'T", 'YOU', 'MANAGE', 'WITH', 'A', 'CANDLE', 'JUST', 'FOR', 'ONCE'] +8188-269288-0049-2930: hyp=['COULD', 'YOU', 'MANAGE', 'WITH', 'A', 'CANDLE', 'JUST', 'FOR', 'ONCE'] +8188-269288-0050-2931: ref=['CERTAINLY', 'SAID', 'LESLIE'] +8188-269288-0050-2931: hyp=['CERTAINLY', 'SAID', 'IT', 'EASILY'] +8188-269288-0051-2932: ref=['SHE', 'TURNED', 'OFF', 'THE', 'LIGHT', 'AND', 'LIT', 'A', 'CANDLE', 'WHICH', 'SHE', 'PUT', 'BEHIND', 'HER', 'SCREEN', 'THEN', 'PREPARED', 'TO', 'GET', 'INTO', 'BED'] +8188-269288-0051-2932: hyp=['SHE', 'TURNED', 'OFF', 'THE', 'LIGHT', 'AND', 'LIT', 'A', 'CANDLE', 'WHICH', 'HE', 'PUT', 'BEHIND', 'HER', 'SCREEN', 'THEN', 'PREPARED', 'TO', 'GET', 'INTO', 'BED'] +8188-269288-0052-2933: ref=["ANNIE'S", 'MANNER', 'WAS', 'VERY', 'MYSTERIOUS'] +8188-269288-0052-2933: hyp=['ANY', 'MANNER', 'WAS', 'VERY', 'MYSTERIOUS'] +8188-269288-0053-2934: ref=['ANNIE', 'DID', 'NOT', 'MEAN', 'TO', 'CONFIDE', 'IN', 'ANYONE', 'THAT', 'NIGHT', 'AND', 'THE', 'KINDEST', 'THING', 'WAS', 'TO', 'LEAVE', 'HER', 'ALONE'] +8188-269288-0053-2934: hyp=['AND', 'HE', 'DID', 'NOT', 'MEAN', 'TO', 'CONFINE', 'IN', 'ANY', 'ONE', 'THAT', 'NIGHT', 'AND', 'THE', 'KINDEST', 'THING', 'WAS', 'TO', 'LEAVE', 'HER', 'ALONE'] +8188-269288-0054-2935: ref=['TIRED', 'OUT', 'LESLIE', 'HERSELF', 'DROPPED', 'ASLEEP'] +8188-269288-0054-2935: hyp=['TIE', 'IT', 'OUT', 'LIZZLEY', 'HERSELF', 'DROPPED', 'ASLEEP'] +8188-269288-0055-2936: ref=['ANNIE', 'IS', 'THAT', 'YOU', 'SHE', 'CALLED', 'OUT'] +8188-269288-0055-2936: hyp=['ANNIE', 'IS', 'THAT', 'YOU', 'SHE', 'CALLED', 'OUT'] +8188-269288-0056-2937: ref=['THERE', 'WAS', 'NO', 'REPLY', 'BUT', 'THE', 'SOUND', 'OF', 'HURRYING', 'STEPS', 'CAME', 'QUICKER', 'AND', 'QUICKER', 'NOW', 'AND', 'THEN', 'THEY', 'WERE', 'INTERRUPTED', 'BY', 'A', 'GROAN'] +8188-269288-0056-2937: hyp=['THERE', 'WAS', 'NO', 'REPLY', 'BUT', 'THE', 'SOUND', 'OF', 'HURRYING', 'STEPS', 'CAME', 'QUICKER', 'AND', 'QUICKER', 'NOW', 'AND', 'THEN', 'THEIR', 'INTERRUPTED', 'BY', 'A', 'GROAN'] +8188-269288-0057-2938: ref=['OH', 'THIS', 'WILL', 'KILL', 'ME', 'MY', 'HEART', 'WILL', 'BREAK', 'THIS', 'WILL', 'KILL', 'ME'] +8188-269288-0057-2938: hyp=['OH', 'THIS', 'WILL', 'KILL', 'ME', 'MY', 'HEART', 'WILL', 'BREAK', 'THIS', 'WILL', 'KILL', 'ME'] +8188-269290-0000-2823: ref=['THE', 'GUILD', 'OF', 'SAINT', 'ELIZABETH'] +8188-269290-0000-2823: hyp=['THE', 'GOLD', 'OF', 'SAINT', 'ELIZABETH'] +8188-269290-0001-2824: ref=['IMMEDIATELY', 'AFTER', 'DINNER', 'THAT', 'EVENING', 'LESLIE', 'RAN', 'UP', 'TO', 'HER', 'ROOM', 'TO', 'MAKE', 'PREPARATIONS', 'FOR', 'HER', 'VISIT', 'TO', 'EAST', 'HALL'] +8188-269290-0001-2824: hyp=['IMMEDIATELY', 'AFTER', 'DINNER', 'THAT', 'EVENING', 'LESLIE', 'RAN', 'UP', 'TO', 'HER', 'ROOM', 'TO', 'MAKE', 'PREPARATIONS', 'FOR', 'HER', 'VISIT', 'TO', 'EAST', 'HALL'] +8188-269290-0002-2825: ref=["I'M", 'NOT', 'COMING', 'SAID', 'ANNIE'] +8188-269290-0002-2825: hyp=["I'M", 'NOT', 'COMING', 'SAID', 'ANNIE'] +8188-269290-0003-2826: ref=['EVERY', 'STUDENT', 'IS', 'TO', 'BE', 'IN', 'EAST', 'HALL', 'AT', 'HALF', 'PAST', 'EIGHT'] +8188-269290-0003-2826: hyp=['EVERY', 'STUDENT', 'IS', 'TO', 'BE', 'AN', 'EAST', 'HALL', 'AT', 'HALF', 'PAST', 'EIGHT'] +8188-269290-0004-2827: ref=['IT', "DOESN'T", 'MATTER', 'REPLIED', 'ANNIE', 'WHETHER', 'IT', 'IS', 'AN', 'ORDER', 'OR', 'NOT', "I'M", 'NOT', 'COMING', 'SAY', 'NOTHING', 'ABOUT', 'ME', 'PLEASE'] +8188-269290-0004-2827: hyp=['IT', "DOESN'T", 'MATTER', 'REPLIED', 'ANNIE', 'WHETHER', 'IT', 'IS', 'AN', 'ORDER', 'OR', 'NOT', "I'M", 'NOT', 'COMING', 'SAY', 'NOTHING', 'ABOUT', 'ME', 'PLEASE'] +8188-269290-0005-2828: ref=['IT', 'BURNED', 'AS', 'IF', 'WITH', 'FEVER'] +8188-269290-0005-2828: hyp=['IT', 'BURNED', 'AS', 'IF', 'WITH', 'FEVER'] +8188-269290-0006-2829: ref=['YOU', "DON'T", 'KNOW', 'WHAT', 'A', 'TRIAL', 'IT', 'IS', 'FOR', 'ME', 'TO', 'HAVE', 'YOU', 'HERE'] +8188-269290-0006-2829: hyp=['YOU', "DON'T", 'KNOW', 'WHAT', 'A', 'TRIAL', 'IT', 'IS', 'FOR', 'ME', 'TO', 'HAVE', 'YOU', 'HERE'] +8188-269290-0007-2830: ref=['I', 'WANT', 'TO', 'BE', 'ALONE', 'GO'] +8188-269290-0007-2830: hyp=['I', 'WANT', 'TO', 'BE', 'ALONE', 'GO'] +8188-269290-0008-2831: ref=['I', 'KNOW', 'YOU', "DON'T", 'QUITE', 'MEAN', 'WHAT', 'YOU', 'SAY', 'SAID', 'LESLIE', 'BUT', 'OF', 'COURSE', 'IF', 'YOU', 'REALLY', 'WISH', 'ME'] +8188-269290-0008-2831: hyp=['I', 'KNOW', 'YOU', "DON'T", 'QUITE', 'MEAN', 'WHAT', 'YOU', 'SAY', 'SAID', 'LESLIE', 'BUT', 'OF', 'COURSE', 'IF', 'YOU', 'REALLY', 'WISH', 'ME'] +8188-269290-0009-2832: ref=['YOU', 'FRET', 'ME', 'BEYOND', 'ENDURANCE'] +8188-269290-0009-2832: hyp=['YOU', 'FRITTEN', 'ME', 'BEYOND', 'ENDURANCE'] +8188-269290-0010-2833: ref=['WRAPPING', 'A', 'PRETTY', 'BLUE', 'SHAWL', 'ROUND', 'HER', 'HEAD', 'AND', 'SHOULDERS', 'SHE', 'TURNED', 'TO', 'ANNIE'] +8188-269290-0010-2833: hyp=['WRAPPING', 'A', 'PRETTY', 'BLUE', 'SHAWL', 'AROUND', 'A', 'HIDDEN', 'SHOULDERS', 'SHE', 'TURNED', 'TO', 'ANNIE'] +8188-269290-0011-2834: ref=['LESLIE', 'WAS', 'JUST', 'CLOSING', 'THE', 'DOOR', 'BEHIND', 'HER', 'WHEN', 'ANNIE', 'CALLED', 'AFTER', 'HER'] +8188-269290-0011-2834: hyp=['LESLIE', 'WAS', 'JUST', 'CLOSING', 'THE', 'DOOR', 'BEHIND', 'HER', 'WHEN', 'ANY', 'CALLED', 'AFTER', 'HER'] +8188-269290-0012-2835: ref=['I', 'TOOK', 'IT', 'OUT', 'SAID', 'LESLIE', 'TOOK', 'IT', 'OUT'] +8188-269290-0012-2835: hyp=['I', 'TOOK', 'IT', 'OUT', 'SAID', 'LIZZIE', 'TOOK', 'IT', 'OUT'] +8188-269290-0013-2836: ref=['HAVE', 'THE', 'GOODNESS', 'TO', 'FIND', 'IT', 'AND', 'PUT', 'IT', 'BACK'] +8188-269290-0013-2836: hyp=['HAVE', 'THE', 'GOODNESS', 'TO', 'FIND', 'IT', 'AND', 'PUT', 'IT', 'BACK'] +8188-269290-0014-2837: ref=['BUT', "DON'T", 'LOCK', 'ME', 'OUT', 'PLEASE', 'ANNIE'] +8188-269290-0014-2837: hyp=['BUT', "DON'T", 'LOCK', 'ME', 'OUT', 'PLEASE', 'ANNIE'] +8188-269290-0015-2838: ref=['OH', 'I', "WON'T", 'LOCK', 'YOU', 'OUT', 'SHE', 'SAID', 'BUT', 'I', 'MUST', 'HAVE', 'THE', 'KEY'] +8188-269290-0015-2838: hyp=['OH', 'I', "WON'T", 'LOOK', 'YOU', 'OUT', 'SHE', 'SAID', 'BUT', 'I', 'MUST', 'HAVE', 'THE', 'KEY'] +8188-269290-0016-2839: ref=['JANE', "HERIOT'S", 'VOICE', 'WAS', 'HEARD', 'IN', 'THE', 'PASSAGE'] +8188-269290-0016-2839: hyp=['JANE', "HERETT'S", 'VOICE', 'WAS', 'HEARD', 'IN', 'THE', 'PASSAGE'] +8188-269290-0017-2840: ref=['AS', 'SHE', 'WALKED', 'DOWN', 'THE', 'CORRIDOR', 'SHE', 'HEARD', 'IT', 'BEING', 'TURNED', 'IN', 'THE', 'LOCK'] +8188-269290-0017-2840: hyp=['AS', 'SHE', 'WALKED', 'ROUND', 'THE', 'CORRIDOR', 'SHE', 'HEARD', 'IT', 'BEING', 'TURNED', 'TO', 'THE', 'LOCK'] +8188-269290-0018-2841: ref=['WHAT', 'CAN', 'THIS', 'MEAN', 'SHE', 'SAID', 'TO', 'HERSELF'] +8188-269290-0018-2841: hyp=['WHAT', 'CAN', 'THIS', 'MEAN', 'SHE', 'SAID', 'TO', 'HERSELF'] +8188-269290-0019-2842: ref=['OH', 'I', "WON'T", 'PRESS', 'YOU', 'REPLIED', 'JANE'] +8188-269290-0019-2842: hyp=['OH', 'I', 'WOULD', 'PRESS', 'YOU', 'REPLIED', 'JANE'] +8188-269290-0020-2843: ref=['OH', 'I', 'SHALL', 'NEVER', 'DO', 'THAT', 'REPLIED', 'LESLIE'] +8188-269290-0020-2843: hyp=['OH', 'I', 'SHALL', 'NEVER', 'DO', 'THAT', 'REPLIED', 'LESLIE'] +8188-269290-0021-2844: ref=['YOU', 'SEE', 'ALL', 'THE', 'GIRLS', 'EXCEPT', 'EILEEN', 'AND', 'MARJORIE', 'LAUGH', 'AT', 'HER', 'AND', 'THAT', 'SEEMS', 'TO', 'ME', 'TO', 'MAKE', 'HER', 'WORSE'] +8188-269290-0021-2844: hyp=['YOU', 'SEE', 'ALL', 'THE', 'GIRLS', 'EXCEPT', 'AILEEN', 'AND', 'MARJORIE', 'LAUGH', 'AT', 'HER', 'AND', 'THAT', 'SEEMS', 'TO', 'ME', 'TO', 'MAKE', 'HER', 'WORSE'] +8188-269290-0022-2845: ref=['SOME', 'DAY', 'JANE', 'YOU', 'MUST', 'SEE', 'HER'] +8188-269290-0022-2845: hyp=['SOME', 'DAY', 'JANE', 'YOU', 'MUST', 'SEE', 'HER'] +8188-269290-0023-2846: ref=['IF', 'YOU', 'ARE', 'IN', 'LONDON', 'DURING', 'THE', 'SUMMER', 'YOU', 'MUST', 'COME', 'AND', 'PAY', 'US', 'A', 'VISIT', 'WILL', 'YOU'] +8188-269290-0023-2846: hyp=['IF', 'YOU', 'IN', 'LONDON', 'DURING', 'THE', 'SUMMER', 'YOU', 'MUST', 'COME', 'A', 'PAIRS', 'A', 'VISIT', 'WILL', 'YOU'] +8188-269290-0024-2847: ref=['THAT', 'IS', 'IF', 'YOU', 'CARE', 'TO', 'CONFIDE', 'IN', 'ME'] +8188-269290-0024-2847: hyp=['THAT', 'IS', 'IF', 'YOU', 'CARE', 'TO', 'CONFIDE', 'IN', 'ME'] +8188-269290-0025-2848: ref=['I', 'BELIEVE', 'POOR', 'ANNIE', 'IS', 'DREADFULLY', 'UNHAPPY'] +8188-269290-0025-2848: hyp=['I', 'BELIEVE', 'POOR', 'ANNIE', 'IS', 'DREADFULLY', 'UNHAPPY'] +8188-269290-0026-2849: ref=["THAT'S", 'JUST', 'IT', 'JANE', 'THAT', 'IS', 'WHAT', 'FRIGHTENS', 'ME', 'SHE', 'REFUSES', 'TO', 'COME'] +8188-269290-0026-2849: hyp=["THAT'S", 'JUST', 'A', 'CHANT', 'THAT', 'IS', 'WHAT', 'BRIGHTENS', 'ME', 'SHE', 'REFUSES', 'TO', 'COME'] +8188-269290-0027-2850: ref=['REFUSES', 'TO', 'COME', 'SHE', 'CRIED'] +8188-269290-0027-2850: hyp=['REFUSES', 'TO', 'COME', 'SHE', 'CRIED'] +8188-269290-0028-2851: ref=['SHE', 'WILL', 'GET', 'INTO', 'AN', 'AWFUL', 'SCRAPE'] +8188-269290-0028-2851: hyp=["SHE'LL", 'GET', 'IN', 'AN', 'AWFUL', 'SCRAPE'] +8188-269290-0029-2852: ref=['I', 'AM', 'SURE', 'SHE', 'IS', 'ILL', 'SHE', 'WORKS', 'TOO', 'HARD', 'AND', 'SHE', 'BUT', 'THERE', 'I', "DON'T", 'KNOW', 'THAT', 'I', 'OUGHT', 'TO', 'SAY', 'ANY', 'MORE'] +8188-269290-0029-2852: hyp=['I', 'AM', 'SURE', 'SHE', 'IS', 'ILL', 'SHE', 'WORKS', 'TOO', 'HARD', 'AND', 'SHE', 'BUT', 'THERE', 'I', "DON'T", 'KNOW', 'THAT', 'I', 'OUGHT', 'TO', 'SAY', 'ANY', 'MORE'] +8188-269290-0030-2853: ref=["I'LL", 'WAIT', 'FOR', 'YOU', 'HERE', 'SAID', 'LESLIE'] +8188-269290-0030-2853: hyp=["I'LL", 'WAIT', 'FOR', 'YOU', 'HERE', 'SAID', 'LIZZIE'] +8188-269290-0031-2854: ref=['DO', 'COME', 'ANNIE', 'DO'] +8188-269290-0031-2854: hyp=['DO', 'COME', 'ANY', 'DO'] +8188-269290-0032-2855: ref=['SCARCELY', 'LIKELY', 'REPLIED', 'LESLIE', 'SHE', 'TOLD', 'ME', 'SHE', 'WAS', 'DETERMINED', 'NOT', 'TO', 'COME', 'TO', 'THE', 'MEETING'] +8188-269290-0032-2855: hyp=['SCARCELY', 'LIKELY', 'REPLIED', 'LESLIE', 'SHE', 'TOLD', 'ME', 'SHE', 'WAS', 'DETERMINED', 'NOT', 'TO', 'COME', 'TO', 'THE', 'MEETING'] +8188-269290-0033-2856: ref=['BUT', 'MARJORIE', 'AND', 'EILEEN', 'HAD', 'ALREADY', 'DEPARTED', 'AND', 'LESLIE', 'AND', 'JANE', 'FOUND', 'THEMSELVES', 'AMONG', 'THE', 'LAST', 'STUDENTS', 'TO', 'ARRIVE', 'AT', 'THE', 'GREAT', 'EAST', 'HALL'] +8188-269290-0033-2856: hyp=['BUT', 'MARGERY', 'AND', 'AILEEN', 'HAD', 'ALREADY', 'DEPARTED', 'AND', 'LESLIE', 'AND', 'JANE', 'FOUND', 'THEMSELVES', 'AMONG', 'THE', 'LAST', 'STUDENTS', 'TO', 'ARRIVE', 'AT', 'THE', 'GREAT', 'EAST', 'HALL'] +8188-269290-0034-2857: ref=['MISS', 'LAUDERDALE', 'WAS', 'STANDING', 'WITH', 'THE', 'OTHER', 'TUTORS', 'AND', 'PRINCIPALS', 'OF', 'THE', 'DIFFERENT', 'HALLS', 'ON', 'A', 'RAISED', 'PLATFORM'] +8188-269290-0034-2857: hyp=['MISS', 'LAUDIDAL', 'WAS', 'STANDING', 'WITH', 'THE', 'OTHER', 'TUTORS', 'AND', 'PRINCIPLES', 'OF', 'THE', 'DIFFERENT', 'HALLS', 'ON', 'A', 'RAISED', 'PLATFORM'] +8188-269290-0035-2858: ref=['THEN', 'A', 'ROLL', 'CALL', 'WAS', 'GONE', 'THROUGH', 'BY', 'ONE', 'OF', 'THE', 'TUTORS', 'THE', 'ONLY', 'ABSENTEE', 'WAS', 'ANNIE', 'COLCHESTER'] +8188-269290-0035-2858: hyp=['THEN', 'A', 'ROCCALL', 'WAS', 'GONE', 'THROUGH', 'BY', 'ONE', 'OF', 'THE', 'TUTORS', 'THE', 'ONLY', 'ABSENTE', 'WAS', 'ANY', 'COLCHESTER'] +8188-269290-0036-2859: ref=['THE', 'PHYSICAL', 'PART', 'OF', 'YOUR', 'TRAINING', 'AND', 'ALSO', 'THE', 'MENTAL', 'PART', 'ARE', 'ABUNDANTLY', 'SUPPLIED', 'IN', 'THIS', 'GREAT', 'HOUSE', 'OF', 'LEARNING', 'SHE', 'CONTINUED', 'BUT', 'THE', 'SPIRITUAL', 'PART', 'IT', 'SEEMS', 'TO', 'ME', 'OUGHT', 'NOW', 'TO', 'BE', 'STRENGTHENED'] +8188-269290-0036-2859: hyp=['THE', 'PHYSICAL', 'PART', 'OF', 'THE', 'OLD', 'TRAINING', 'AND', 'ALSO', 'THE', 'MENTAL', 'PART', 'ARE', 'ABUNDANTLY', 'SUPPLIED', 'IN', 'THIS', 'GREAT', 'HOUSE', 'OF', 'LEARNING', 'SHE', 'CONTINUED', 'BUT', 'THE', 'SPIRITUAL', 'PART', 'IT', 'SEEMS', 'TO', 'ME', 'OUGHT', 'NOW', 'TO', 'BE', 'STRENGTHENED'] +8188-269290-0037-2860: ref=['HEAR', 'HEAR', 'AND', 'ONCE', 'AGAIN', 'HEAR'] +8188-269290-0037-2860: hyp=['HAIR', 'HAIR', 'AND', 'ONCE', 'AGAIN', 'HAIR'] +8188-269290-0038-2861: ref=['SHE', 'UTTERED', 'HER', 'STRANGE', 'REMARK', 'STANDING', 'UP'] +8188-269290-0038-2861: hyp=['SHE', 'UTTERED', 'A', 'STRAIN', 'REMARK', 'STANDING', 'UP'] +8188-269290-0039-2862: ref=['MARJORIE', 'AND', 'EILEEN', 'WERE', 'CLOSE', 'TO', 'HER'] +8188-269290-0039-2862: hyp=['MARJORIE', 'AND', 'AILEEN', 'WERE', 'CLOSE', 'TO', 'HER'] +8188-269290-0040-2863: ref=['I', 'WILL', 'TALK', 'WITH', 'YOU', 'BELLE', 'ACHESON', 'PRESENTLY', 'SHE', 'SAID'] +8188-269290-0040-2863: hyp=['I', 'WILL', 'TALK', 'WITH', 'YOU', 'BELL', 'ARCHISON', 'PRESENTLY', 'SHE', 'SAID'] +8188-269290-0041-2864: ref=['THE', 'NAMES', 'OF', 'PROPOSED', 'MEMBERS', 'ARE', 'TO', 'BE', 'SUBMITTED', 'TO', 'ME', 'BEFORE', 'THIS', 'DAY', 'WEEK'] +8188-269290-0041-2864: hyp=['THE', 'NAMES', 'OF', 'THE', 'PROPOSED', 'MEMBERS', 'ARE', 'TO', 'BE', 'SUBMITTED', 'TO', 'ME', 'BEFORE', 'THIS', 'DAY', 'WEEK'] +8188-269290-0042-2865: ref=['AM', 'I', 'MY', "BROTHER'S", 'KEEPER'] +8188-269290-0042-2865: hyp=['AM', 'I', 'MY', "BROTHER'S", 'KEEPER'] +8188-269290-0043-2866: ref=['YOU', 'ASK', 'SHE', 'CONTINUED'] +8188-269290-0043-2866: hyp=['YOU', 'ASK', 'SHE', 'CONTINUED'] +8188-269290-0044-2867: ref=['GOD', 'ANSWERS', 'TO', 'EACH', 'OF', 'YOU', 'YOU', 'ARE'] +8188-269290-0044-2867: hyp=['GOD', 'ADDEST', 'EACH', 'OF', 'YOU', 'YOU', 'ARE'] +8188-269290-0045-2868: ref=['THE', 'WORLD', 'SAYS', 'NO', 'I', 'AM', 'NOT', 'BUT', 'GOD', 'SAYS', 'YES', 'YOU', 'ARE'] +8188-269290-0045-2868: hyp=['THE', 'WORLD', 'TEETH', 'NO', 'I', 'AM', 'NOT', 'BUT', 'GOD', 'SAYS', 'YES', 'YOU', 'ARE'] +8188-269290-0046-2869: ref=['ALL', 'MEN', 'ARE', 'YOUR', 'BROTHERS'] +8188-269290-0046-2869: hyp=['ALL', 'MEN', 'ARE', 'YOUR', 'BROTHERS'] +8188-269290-0047-2870: ref=['FOR', 'ALL', 'WHO', 'SIN', 'ALL', 'WHO', 'SUFFER', 'YOU', 'ARE', 'TO', 'A', 'CERTAIN', 'EXTENT', 'RESPONSIBLE'] +8188-269290-0047-2870: hyp=['FOR', 'ALL', 'WHO', 'SIN', 'ALL', 'WHO', 'SUFFER', 'YOU', 'ARE', 'TO', 'EXERT', 'AN', 'EXTENT', 'RESPONSIBLE'] +8188-269290-0048-2871: ref=['AFTER', 'THE', 'ADDRESS', 'THE', 'GIRLS', 'THEMSELVES', 'WERE', 'ENCOURAGED', 'TO', 'SPEAK', 'AND', 'A', 'VERY', 'ANIMATED', 'DISCUSSION', 'FOLLOWED'] +8188-269290-0048-2871: hyp=['AFTER', 'THE', 'ADDRESS', 'THE', 'GIRLS', 'THEMSELVES', 'WERE', 'ENCOURAGED', 'TO', 'SPEAK', 'AND', 'A', 'VERY', 'ANIMATED', 'DISCUSSION', 'FOLLOWED'] +8188-269290-0049-2872: ref=['IT', 'WAS', 'PAST', 'TEN', "O'CLOCK", 'WHEN', 'SHE', 'LEFT', 'THE', 'HALL'] +8188-269290-0049-2872: hyp=['IT', 'WAS', 'PAST', 'TEN', "O'CLOCK", 'WHEN', 'SHE', 'LEFT', 'THE', 'HALL'] +8188-269290-0050-2873: ref=['JUST', 'AS', 'SHE', 'WAS', 'DOING', 'SO', 'MISS', 'FRERE', 'CAME', 'UP'] +8188-269290-0050-2873: hyp=['JUST', 'AS', 'SHE', 'WAS', 'DOING', 'SO', 'MISS', 'FRERE', 'CAME', 'UP'] +8188-269290-0051-2874: ref=['ANNIE', 'COLCHESTER', 'IS', 'YOUR', 'ROOMFELLOW', 'IS', 'SHE', 'NOT', 'SHE', 'SAID'] +8188-269290-0051-2874: hyp=['ANY', "COLCHESTER'S", 'ROOM', 'FELLOW', 'IS', 'SHE', 'NOT', 'SHE', 'SAID'] +8188-269290-0052-2875: ref=['I', 'SEE', 'BY', 'YOUR', 'FACE', 'MISS', 'GILROY', 'THAT', 'YOU', 'ARE', 'DISTRESSED', 'ABOUT', 'SOMETHING', 'ARE', 'YOU', 'KEEPING', 'ANYTHING', 'BACK'] +8188-269290-0052-2875: hyp=['I', 'SEE', 'BY', 'YOUR', 'FACE', 'MISS', 'GILROY', 'THAT', 'YOU', 'ARE', 'DISTRESSED', 'ABOUT', 'SOMETHING', 'ARE', 'YOU', 'KEEPING', 'ANYTHING', 'BACK'] +8188-269290-0053-2876: ref=['I', 'AM', 'AFRAID', 'I', 'AM', 'REPLIED', 'LESLIE', 'DISTRESS', 'NOW', 'IN', 'HER', 'TONE'] +8188-269290-0053-2876: hyp=["I'M", 'AFRAID', 'I', 'AM', 'REPLIED', 'LIZZIE', 'DISTRESSED', 'NOW', 'IN', 'HER', 'TONE'] +8188-269290-0054-2877: ref=['I', 'MUST', 'SEE', 'HER', 'MYSELF', 'EARLY', 'IN', 'THE', 'MORNING', 'AND', 'I', 'AM', 'QUITE', 'SURE', 'THAT', 'NOTHING', 'WILL', 'SATISFY', 'MISS', 'LAUDERDALE', 'EXCEPT', 'A', 'VERY', 'AMPLE', 'APOLOGY', 'AND', 'A', 'FULL', 'EXPLANATION', 'OF', 'THE', 'REASON', 'WHY', 'SHE', 'ABSENTED', 'HERSELF'] +8188-269290-0054-2877: hyp=['I', 'MUST', 'SEE', 'HER', 'MYSELF', 'EARLY', 'IN', 'THE', 'MORNING', 'AND', 'I', 'AM', 'QUITE', 'SURE', 'THAT', 'NOTHING', 'WILL', 'SATISFY', 'MISS', 'LAURA', 'EXCEPT', 'A', 'VERY', 'AMPLE', 'APOLOGY', 'AND', 'A', 'FULL', 'EXPLANATION', 'OF', 'THE', 'REASON', 'WHY', 'SHE', 'ABSENTED', 'HERSELF'] +8188-269290-0055-2878: ref=['EXCUSES', 'MAKE', 'NO', 'DIFFERENCE'] +8188-269290-0055-2878: hyp=['EXCUSES', 'MAKE', 'NO', 'DIFFERENCE'] +8188-269290-0056-2879: ref=['THE', 'GIRL', 'WHO', 'BREAKS', 'THE', 'RULES', 'HAS', 'TO', 'BE', 'PUNISHED'] +8188-269290-0056-2879: hyp=['THE', 'GIRL', 'WHO', 'BREAKS', 'THE', 'RULES', 'HAVE', 'TO', 'BE', 'PUNISHED'] +8188-269290-0057-2880: ref=['I', 'WILL', 'TELL', 'HER'] +8188-269290-0057-2880: hyp=['I', 'WILL', 'TELL', 'HER'] +8188-274364-0000-2811: ref=['THE', 'COMMONS', 'ALSO', 'VOTED', 'THAT', 'THE', 'NEW', 'CREATED', 'PEERS', 'OUGHT', 'TO', 'HAVE', 'NO', 'VOICE', 'IN', 'THIS', 'TRIAL', 'BECAUSE', 'THE', 'ACCUSATION', 'BEING', 'AGREED', 'TO', 'WHILE', 'THEY', 'WERE', 'COMMONERS', 'THEIR', 'CONSENT', 'TO', 'IT', 'WAS', 'IMPLIED', 'WITH', 'THAT', 'OF', 'ALL', 'THE', 'COMMONS', 'OF', 'ENGLAND'] +8188-274364-0000-2811: hyp=['THE', 'COMMONS', 'ALSO', 'VOTED', 'THAT', 'THE', 'NEW', 'CREATED', 'PEERS', 'OUGHT', 'TO', 'HAVE', 'NO', 'VOICE', 'IN', 'THIS', 'TRIAL', 'BECAUSE', 'THE', 'ACCUSATION', 'BEING', 'AGREED', 'TO', 'WHILE', 'THEY', 'WERE', 'COMMONERS', 'THEY', 'CONSENT', 'TO', 'IT', 'WAS', 'IMPLIED', 'WITH', 'THAT', 'OF', 'ALL', 'THE', 'COMMONS', 'OF', 'ENGLAND'] +8188-274364-0001-2812: ref=['IN', 'THE', 'GOVERNMENT', 'OF', 'IRELAND', 'HIS', 'ADMINISTRATION', 'HAD', 'BEEN', 'EQUALLY', 'PROMOTIVE', 'OF', 'HIS', "MASTER'S", 'INTEREST', 'AND', 'THAT', 'OF', 'THE', 'SUBJECTS', 'COMMITTED', 'TO', 'HIS', 'CARE'] +8188-274364-0001-2812: hyp=['THE', 'GOVERNMENT', 'OF', 'IRELAND', 'HIS', 'ADMINISTRATION', 'HAD', 'BEEN', 'EQUALLY', 'PROMOTED', 'OF', 'HIS', "MASTER'S", 'INTEREST', 'AND', 'THAT', 'OF', 'THE', 'SUBJECTS', 'COMMITTED', 'TO', 'HIS', 'CARE'] +8188-274364-0002-2813: ref=['THE', 'CASE', 'OF', 'LORD', 'MOUNTNORRIS', 'OF', 'ALL', 'THOSE', 'WHICH', 'WERE', 'COLLECTED', 'WITH', 'SO', 'MUCH', 'INDUSTRY', 'IS', 'THE', 'MOST', 'FLAGRANT', 'AND', 'THE', 'LEAST', 'EXCUSABLE'] +8188-274364-0002-2813: hyp=['THE', 'CASE', 'OF', 'LORD', 'MONTORAS', 'OF', 'ALL', 'THOSE', 'WHICH', 'WERE', 'COLLECTED', 'WITH', 'SO', 'MUCH', 'INDUSTRY', 'IS', 'THE', 'MOST', 'FLAGRANT', 'AND', 'THE', 'LEAST', 'EXCUSABLE'] +8188-274364-0003-2814: ref=['THE', 'COURT', 'WHICH', 'CONSISTED', 'OF', 'THE', 'CHIEF', 'OFFICERS', 'OF', 'THE', 'ARMY', 'FOUND', 'THE', 'CRIME', 'TO', 'BE', 'CAPITAL', 'AND', 'CONDEMNED', 'THAT', 'NOBLEMAN', 'TO', 'LOSE', 'HIS', 'HEAD'] +8188-274364-0003-2814: hyp=['THE', 'COURT', 'WHICH', 'CONSISTED', 'OF', 'THE', 'CHIE', 'OFFICIALS', 'OF', 'THE', 'ARMY', 'FOUND', 'THE', 'CRIME', 'TO', 'BE', 'CAPT', 'ON', 'AND', 'CONDEMNED', 'THAT', 'NOBLEMAN', 'TO', 'LOSE', 'HIS', 'HEAD'] +8188-274364-0004-2815: ref=['WHERE', 'THE', 'TOKEN', 'BY', 'WHICH', 'I', 'SHOULD', 'DISCOVER', 'IT'] +8188-274364-0004-2815: hyp=['WHERE', 'THE', 'TOKEN', 'BY', 'WHICH', 'I', 'SHALL', 'DISCOVER', 'IT'] +8188-274364-0005-2816: ref=['IT', 'IS', 'NOW', 'FULL', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'YEARS', 'SINCE', 'TREASONS', 'WERE', 'DEFINED', 'AND', 'SO', 'LONG', 'HAS', 'IT', 'BEEN', 'SINCE', 'ANY', 'MAN', 'WAS', 'TOUCHED', 'TO', 'THIS', 'EXTENT', 'UPON', 'THIS', 'CRIME', 'BEFORE', 'MYSELF'] +8188-274364-0005-2816: hyp=['IT', 'IS', 'NOW', 'A', 'FULL', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'YEARS', 'SINCE', 'TREASONS', 'WERE', 'DEFINED', 'AND', 'SO', 'LONG', 'HAS', 'IT', 'BEEN', 'SINCE', 'ANY', 'MAN', 'WAS', 'TOUCHED', 'TO', 'THIS', 'EXTENT', 'UPON', 'THIS', 'CRIME', 'BEFORE', 'MYSELF'] +8188-274364-0006-2817: ref=['LET', 'US', 'NOT', 'TO', 'OUR', 'OWN', 'DESTRUCTION', 'AWAKE', 'THOSE', 'SLEEPING', 'LIONS', 'BY', 'RATTLING', 'UP', 'A', 'COMPANY', 'OF', 'OLD', 'RECORDS', 'WHICH', 'HAVE', 'LAIN', 'FOR', 'SO', 'MANY', 'AGES', 'BY', 'THE', 'WALL', 'FORGOTTEN', 'AND', 'NEGLECTED'] +8188-274364-0006-2817: hyp=['LET', 'US', 'NOT', 'TO', 'UNDISTRUC', 'AWAKE', 'THOSE', 'SLEEPING', 'LIONS', 'BY', 'RATTLING', 'UP', 'A', 'COMPANY', 'OF', 'OLD', 'RICARDS', 'WHICH', 'HAVE', 'LAIN', 'FOR', 'SO', 'MANY', 'AGES', 'BY', 'THE', 'WAR', 'FORGOTTEN', 'AND', 'NEGLECTED'] +8188-274364-0007-2818: ref=['HOWEVER', 'THESE', 'GENTLEMEN', 'AT', 'THE', 'BAR', 'SAY', 'THEY', 'SPEAK', 'FOR', 'THE', 'COMMONWEALTH', 'AND', 'THEY', 'BELIEVE', 'SO', 'YET', 'UNDER', 'FAVOR', 'IT', 'IS', 'I', 'WHO', 'IN', 'THIS', 'PARTICULAR', 'SPEAK', 'FOR', 'THE', 'COMMONWEALTH'] +8188-274364-0007-2818: hyp=['HERBERT', 'THESE', 'GENTLEMAN', 'AT', 'THE', 'BAR', 'SO', 'THEY', 'SPEAK', 'FOR', 'THE', 'CONWEALTH', 'AND', 'THEY', 'BELIEVE', 'SO', 'YET', 'UNDER', 'FAVOUR', 'IT', 'IS', 'I', 'WHO', 'IN', 'THIS', 'PARTICULAR', 'SPEAK', 'FOR', 'THE', 'CORNWEALTH'] +8188-274364-0008-2819: ref=['MY', 'LORDS', 'I', 'HAVE', 'NOW', 'TROUBLED', 'YOUR', 'LORDSHIPS', 'A', 'GREAT', 'DEAL', 'LONGER', 'THAN', 'I', 'SHOULD', 'HAVE', 'DONE'] +8188-274364-0008-2819: hyp=['MY', 'LORDS', 'I', 'HAVE', 'NOW', 'TROUBLED', 'YOUR', 'LORDSHIPS', 'A', 'GREAT', 'DEAL', 'LONGER', 'THAN', 'I', 'SHOULD', 'HAVE', 'DONE'] +8188-274364-0009-2820: ref=['YOUNG', 'VANE', 'FALLING', 'UPON', 'THIS', 'PAPER', 'OF', 'NOTES', 'DEEMED', 'THE', 'MATTER', 'OF', 'THE', 'UTMOST', 'IMPORTANCE', 'AND', 'IMMEDIATELY', 'COMMUNICATED', 'IT', 'TO', 'PYM', 'WHO', 'NOW', 'PRODUCED', 'THE', 'PAPER', 'BEFORE', 'THE', 'HOUSE', 'OF', 'COMMONS'] +8188-274364-0009-2820: hyp=['YOUNG', 'VAIN', 'FALLING', 'UPON', 'THIS', 'PAPER', 'OF', 'NOTES', 'DEEMED', 'THE', 'MATTER', 'OF', 'THE', 'UTMOST', 'IMPORTANCE', 'AND', 'IMMEDIATELY', 'COMMUNICATED', 'IT', 'TO', 'POEM', 'WHO', 'NOW', 'PRODUCED', 'THE', 'PAPER', 'BEFORE', 'THE', 'HOUSE', 'OF', 'COMMONS'] +8188-274364-0010-2821: ref=['THE', 'KING', 'PROPOSES', 'THIS', 'DIFFICULTY', 'BUT', 'HOW', 'CAN', 'I', 'UNDERTAKE', 'OFFENSIVE', 'WAR', 'IF', 'I', 'HAVE', 'NO', 'MORE', 'MONEY'] +8188-274364-0010-2821: hyp=['THE', 'KING', 'PROPOSES', 'THIS', 'DIFFICULTY', 'BUT', 'HOW', 'CAN', 'I', 'UNDERTAKE', 'OFFENSIVE', 'FOR', 'IF', 'I', 'HAVE', 'NO', 'MORE', 'MONEY'] +8188-274364-0011-2822: ref=['YOUR', 'MAJESTY', 'HAVING', 'TRIED', 'THE', 'AFFECTIONS', 'OF', 'YOUR', 'PEOPLE', 'YOU', 'ARE', 'ABSOLVED', 'AND', 'LOOSE', 'FROM', 'ALL', 'RULES', 'OF', 'GOVERNMENT', 'AND', 'MAY', 'DO', 'WHAT', 'POWER', 'WILL', 'ADMIT'] +8188-274364-0011-2822: hyp=['YOUR', 'MAJESTY', 'HAVING', 'TRIED', 'THE', 'AFFECTIONS', 'OF', 'YOUR', 'PEOPLE', 'YOU', 'ARE', 'ABSORBED', 'AND', 'LOOSE', 'FROM', 'ALL', 'RULES', 'OF', 'GOVERNMENT', 'AND', 'MAY', 'DO', 'WHAT', 'POWER', 'WILL', 'ADMIT'] +8280-266249-0000-339: ref=['OLD', 'MISTER', 'DINSMORE', 'HAD', 'ACCEPTED', 'A', 'PRESSING', 'INVITATION', 'FROM', 'HIS', 'GRANDDAUGHTER', 'AND', 'HER', 'HUSBAND', 'TO', 'JOIN', 'THE', 'PARTY', 'AND', 'WITH', 'THE', 'ADDITION', 'OF', 'SERVANTS', 'IT', 'WAS', 'A', 'LARGE', 'ONE'] +8280-266249-0000-339: hyp=['OLD', 'MISTER', 'DINSMORE', 'HAD', 'ACCEPTED', 'A', 'PRESSING', 'INVITATION', 'FROM', 'HIS', 'GRANDDAUGHTER', 'AND', 'HER', 'HUSBAND', 'TO', 'JOIN', 'THE', 'PARTY', 'AND', 'WITH', 'THE', 'ADDITION', 'OF', 'SERVANTS', 'IT', 'WAS', 'A', 'LARGE', 'ONE'] +8280-266249-0001-340: ref=['AS', 'THEY', 'WERE', 'IN', 'NO', 'HASTE', 'AND', 'THE', 'CONFINEMENT', 'OF', 'A', 'RAILROAD', 'CAR', 'WOULD', 'BE', 'VERY', 'IRKSOME', 'TO', 'THE', 'YOUNGER', 'CHILDREN', 'IT', 'HAD', 'BEEN', 'DECIDED', 'TO', 'MAKE', 'THE', 'JOURNEY', 'BY', 'WATER'] +8280-266249-0001-340: hyp=['AS', 'THEY', 'WERE', 'IN', 'NO', 'HASTE', 'AND', 'THE', 'CONFINEMENT', 'OF', 'A', 'RAILROAD', 'CAR', 'WOULD', 'BE', 'VERY', 'IRKSOME', 'TO', 'THE', 'YOUNGER', 'CHILDREN', 'IT', 'HAD', 'BEEN', 'DECIDED', 'TO', 'MAKE', 'THE', 'JOURNEY', 'BY', 'WATER'] +8280-266249-0002-341: ref=['THERE', 'WERE', 'NO', 'SAD', 'LEAVE', 'TAKINGS', 'TO', 'MAR', 'THEIR', 'PLEASURE', 'THE', 'CHILDREN', 'WERE', 'IN', 'WILD', 'SPIRITS', 'AND', 'ALL', 'SEEMED', 'CHEERFUL', 'AND', 'HAPPY', 'AS', 'THEY', 'SAT', 'OR', 'STOOD', 'UPON', 'THE', 'DECK', 'WATCHING', 'THE', 'RECEDING', 'SHORE', 'AS', 'THE', 'VESSEL', 'STEAMED', 'OUT', 'OF', 'THE', 'HARBOR'] +8280-266249-0002-341: hyp=['THERE', 'WERE', 'NO', 'SAD', 'LEAVE', 'TAKINGS', 'TO', 'MAR', 'THEIR', 'PLEASURE', 'THE', 'CHILDREN', 'WERE', 'IN', 'WILD', 'SPIRITS', 'AND', 'ALL', 'SEEMED', 'CHEERFUL', 'AND', 'HAPPY', 'AS', 'THEY', 'SAT', 'OR', 'STOOD', 'UPON', 'THE', 'DECK', 'WATCHING', 'THE', 'RECEDING', 'SHORE', 'AS', 'THE', 'VESSEL', 'STEAMED', 'OUT', 'OF', 'THE', 'HARBOR'] +8280-266249-0003-342: ref=['AT', 'LENGTH', 'THE', 'LAND', 'HAD', 'QUITE', 'DISAPPEARED', 'NOTHING', 'COULD', 'BE', 'SEEN', 'BUT', 'THE', 'SKY', 'OVERHEAD', 'AND', 'A', 'VAST', 'EXPANSE', 'OF', 'WATER', 'ALL', 'AROUND', 'AND', 'THE', 'PASSENGERS', 'FOUND', 'LEISURE', 'TO', 'TURN', 'THEIR', 'ATTENTION', 'UPON', 'EACH', 'OTHER'] +8280-266249-0003-342: hyp=['AT', 'LENGTH', 'THE', 'LAND', 'HAD', 'QUITE', 'DISAPPEARED', 'NOTHING', 'COULD', 'BE', 'SEEN', 'BUT', 'THE', 'SKY', 'OVERHEAD', 'AND', 'A', 'VAST', 'EXPANSE', 'OF', 'WATER', 'ALL', 'ROUND', 'AND', 'THE', 'PASSENGERS', 'FOUND', 'LEISURE', 'TO', 'TURN', 'THEIR', 'ATTENTION', 'UPON', 'EACH', 'OTHER'] +8280-266249-0004-343: ref=['THERE', 'ARE', 'SOME', 'NICE', 'LOOKING', 'PEOPLE', 'ON', 'BOARD', 'REMARKED', 'MISTER', 'TRAVILLA', 'IN', 'AN', 'UNDERTONE', 'TO', 'HIS', 'WIFE'] +8280-266249-0004-343: hyp=['THERE', 'ARE', 'SOME', 'NICE', 'LOOKING', 'PEOPLE', 'ON', 'BOARD', 'REMARKED', 'MISTER', 'TRAVILLA', 'IN', 'AN', 'UNDERTONE', 'TO', 'HIS', 'WIFE'] +8280-266249-0005-344: ref=['BESIDE', 'OURSELVES', 'ADDED', 'COUSIN', 'RONALD', 'LAUGHING'] +8280-266249-0005-344: hyp=['BESIDES', 'OURSELVES', 'ADDED', 'COUSIN', 'RANALD', 'LAUGHING'] +8280-266249-0006-345: ref=['YES', 'SHE', 'ANSWERED', 'THAT', 'LITTLE', 'GROUP', 'YONDER', 'A', 'YOUNG', 'MINISTER', 'AND', 'HIS', 'WIFE', 'AND', 'CHILD', 'I', 'SUPPOSE'] +8280-266249-0006-345: hyp=['YES', 'SHE', 'ANSWERED', 'THAT', 'LITTLE', 'GROUP', 'YONDER', 'A', 'YOUNG', 'MINISTER', 'AND', 'HIS', 'WIFE', 'AND', 'CHILD', 'I', 'SUPPOSE'] +8280-266249-0007-346: ref=['AND', 'WHAT', 'A', 'DEAR', 'LITTLE', 'FELLOW', 'HE', 'IS', 'JUST', 'ABOUT', 'THE', 'AGE', 'OF', 'OUR', 'HAROLD', 'I', 'SHOULD', 'JUDGE'] +8280-266249-0007-346: hyp=['AND', 'WHAT', 'A', 'DEAR', 'LITTLE', 'FELLOW', 'HE', 'IS', 'JUST', 'ABOUT', 'THE', 'AGE', 'OF', 'OUR', 'HERALD', 'I', 'SHOULD', 'JUDGE'] +8280-266249-0008-347: ref=['DO', 'YOU', 'SON', 'WAS', 'THE', 'SMILING', 'REJOINDER'] +8280-266249-0008-347: hyp=['DO', 'YOU', 'SON', 'WAS', 'THE', 'SMILING', 'REJOINDER'] +8280-266249-0009-348: ref=['HE', 'CERTAINLY', 'LOOKS', 'LIKE', 'A', 'VERY', 'NICE', 'LITTLE', 'BOY'] +8280-266249-0009-348: hyp=['HE', 'CERTAINLY', 'LOOKS', 'LIKE', 'A', 'VERY', 'NICE', 'LITTLE', 'BOY'] +8280-266249-0010-349: ref=['SUPPOSE', 'YOU', 'AND', 'HE', 'SHAKE', 'HANDS', 'FRANK'] +8280-266249-0010-349: hyp=['SUPPOSE', 'YOU', 'AND', 'HE', 'SHAKE', 'HANDS', 'FRANK'] +8280-266249-0011-350: ref=['I', 'DO', 'INDEED', 'THOUGH', 'PROBABLY', 'COMPARATIVELY', 'FEW', 'ARE', 'AWARE', 'THAT', 'TOBACCO', 'IS', 'THE', 'CAUSE', 'OF', 'THEIR', 'AILMENTS'] +8280-266249-0011-350: hyp=['I', 'DO', 'INDEED', 'THE', 'PROBABLY', 'COMPARATIVELY', 'FEW', 'ARE', 'AWARE', 'THAT', 'TOBACCO', 'IS', 'THE', 'CAUSE', 'OF', 'THEIR', 'AILMENTS'] +8280-266249-0012-351: ref=['DOUBTLESS', 'THAT', 'IS', 'THE', 'CASE', 'REMARKED', 'MISTER', 'DINSMORE'] +8280-266249-0012-351: hyp=['DOUBTLESS', 'THAT', 'IS', 'THE', 'CASE', 'REMARKED', 'MISTER', 'DINSMORE'] +8280-266249-0013-352: ref=['WITH', 'ALL', 'MY', 'HEART', 'IF', 'YOU', 'WILL', 'STEP', 'INTO', 'THE', "GENTLEMEN'S", 'CABIN', 'WHERE', "THERE'S", 'A', 'LIGHT'] +8280-266249-0013-352: hyp=['WITH', 'ALL', 'MY', 'HEART', 'IF', 'YOU', 'WILL', 'STEP', 'INTO', 'THE', "GENTLEMAN'S", 'CABIN', 'WHERE', "THERE'S", 'A', 'LIGHT'] +8280-266249-0014-353: ref=['HE', 'LED', 'THE', 'WAY', 'THE', 'OTHERS', 'ALL', 'FOLLOWING', 'AND', 'TAKING', 'OUT', 'A', 'SLIP', 'OF', 'PAPER', 'READ', 'FROM', 'IT', 'IN', 'A', 'DISTINCT', 'TONE', 'LOUD', 'ENOUGH', 'TO', 'BE', 'HEARD', 'BY', 'THOSE', 'ABOUT', 'HIM', 'WITHOUT', 'DISTURBING', 'THE', 'OTHER', 'PASSENGERS'] +8280-266249-0014-353: hyp=['HE', 'LED', 'THE', 'WAY', 'THE', 'OTHERS', 'ALL', 'FOLLOWING', 'AND', 'TAKING', 'OUT', 'A', 'SLIP', 'OF', 'PAPER', 'READ', 'FROM', 'IT', 'IN', 'A', 'DISTINCT', 'TONE', 'LOUD', 'ENOUGH', 'TO', 'BE', 'HEARD', 'BY', 'THOSE', 'ALL', 'ABOUT', 'HIM', 'WITHOUT', 'DISTURBING', 'THE', 'OTHER', 'PASSENGERS'] +8280-266249-0015-354: ref=['ONE', 'DROP', 'OF', 'NICOTINE', 'EXTRACT', 'OF', 'TOBACCO', 'PLACED', 'ON', 'THE', 'TONGUE', 'OF', 'A', 'DOG', 'WILL', 'KILL', 'HIM', 'IN', 'A', 'MINUTE', 'THE', 'HUNDREDTH', 'PART', 'OF', 'A', 'GRAIN', 'PICKED', 'UNDER', 'THE', 'SKIN', 'OF', 'A', "MAN'S", 'ARM', 'WILL', 'PRODUCE', 'NAUSEA', 'AND', 'FAINTING'] +8280-266249-0015-354: hyp=['ONE', 'DROP', 'OF', 'NICOTINE', 'EXTRACTED', 'TOBACCO', 'PLACED', 'ON', 'THE', 'TONGUE', 'OF', 'THE', 'DOG', 'WILL', 'KILL', 'HIM', 'IN', 'A', 'MINUTE', 'THE', 'HUNDREDTH', 'PART', 'OF', 'THE', 'GRAIN', 'PRICKED', 'UNDER', 'THE', 'SKIN', 'OF', 'A', "MAN'S", 'ARM', 'WILL', 'PRODUCE', 'NAUSEA', 'AND', 'FAINTING'] +8280-266249-0016-355: ref=['THE', 'HALF', 'DOZEN', 'CIGARS', 'WHICH', 'MOST', 'SMOKERS', 'USE', 'A', 'DAY', 'CONTAIN', 'SIX', 'OR', 'SEVEN', 'GRAINS', 'ENOUGH', 'IF', 'CONCENTRATED', 'AND', 'ABSORBED', 'TO', 'KILL', 'THREE', 'MEN', 'AND', 'A', 'POUND', 'OF', 'TOBACCO', 'ACCORDING', 'TO', 'ITS', 'QUALITY', 'CONTAINS', 'FROM', 'ONE', 'QUARTER', 'TO', 'ONE', 'AND', 'A', 'QUARTER', 'OUNCES'] +8280-266249-0016-355: hyp=['THE', 'HALF', 'DOZEN', 'CIGARS', 'WHICH', 'MOST', 'SMOKERS', 'YEARS', 'A', 'DAY', 'CONTAIN', 'SIX', 'OR', 'SEVEN', 'GRAINS', 'ENOUGH', 'IF', 'CONCENTRATED', 'AND', 'ABSORBED', 'TO', 'KILL', 'THREE', 'MEN', 'AND', 'A', 'POUND', 'OF', 'TOBACCO', 'ACCORDING', 'TO', 'ITS', 'QUALITY', 'CONTAINS', 'FROM', 'ONE', 'QUARTER', 'TO', 'ONE', 'AND', 'A', 'QUARTER', 'OUNCES'] +8280-266249-0017-356: ref=['IS', 'IT', 'STRANGE', 'THEN', 'THAT', 'SMOKERS', 'AND', 'CHEWERS', 'HAVE', 'A', 'THOUSAND', 'AILMENTS'] +8280-266249-0017-356: hyp=['IS', 'IT', 'STRANGE', 'THEN', 'THAT', 'SMOKERS', 'AND', 'CHEWERS', 'HAVE', 'A', 'THOUSAND', 'AILMENTS'] +8280-266249-0018-357: ref=['THAT', 'THE', 'FRENCH', 'POLYTECHNIC', 'INSTITUTE', 'HAD', 'TO', 'PROHIBIT', 'ITS', 'USE', 'ON', 'ACCOUNT', 'OF', 'ITS', 'EFFECTS', 'ON', 'THE', 'MIND'] +8280-266249-0018-357: hyp=['THAT', 'THE', 'FRENCH', 'POLY', 'TECHNIC', 'AT', 'INSTITUTE', 'HAD', 'TO', 'PROHIBIT', 'ITS', 'THE', 'USE', 'ON', 'ACCOUNT', 'OF', 'ITS', 'EFFECTS', 'UPON', 'THE', 'MIND'] +8280-266249-0019-358: ref=['NOTICE', 'THE', 'MULTITUDE', 'OF', 'SUDDEN', 'DEATHS', 'AND', 'SEE', 'HOW', 'MANY', 'ARE', 'SMOKERS', 'AND', 'CHEWERS'] +8280-266249-0019-358: hyp=['NOTICED', 'THE', 'MULTITUDE', 'OF', 'SUDDEN', 'DEATHS', 'AND', 'SEE', 'HOW', 'MANY', 'ARE', 'SMOKERS', 'AND', 'CHEWERS'] +8280-266249-0020-359: ref=['IN', 'A', 'SMALL', 'COUNTRY', 'TOWN', 'SEVEN', 'OF', 'THESE', 'MYSTERIOUS', 'PROVIDENCES', 'OCCURRED', 'WITHIN', 'THE', 'CIRCUIT', 'OF', 'A', 'MILE', 'ALL', 'DIRECTLY', 'TRACEABLE', 'TO', 'TOBACCO', 'AND', 'ANY', 'PHYSICIAN', 'ON', 'A', 'FEW', 'MOMENTS', 'REFLECTION', 'CAN', 'MATCH', 'THIS', 'FACT', 'BY', 'HIS', 'OWN', 'OBSERVATION'] +8280-266249-0020-359: hyp=['AND', 'A', 'SMALL', 'COUNTRY', 'TOWN', 'SEVEN', 'OF', 'THESE', 'MYSTERIOUS', 'PROVIDENCES', 'OCCURRED', 'WITHIN', 'THE', 'CIRCUIT', 'OF', 'A', 'MILE', 'ALL', 'DIRECTLY', 'TRACEABLE', 'TO', 'TOBACCO', 'AND', 'ANY', 'PHYSICIAN', 'ON', 'A', 'FEW', 'MOMENTS', 'REFLECTION', 'CAN', 'MATCH', 'THIS', 'FACT', 'BY', 'HIS', 'OWN', 'OBSERVATION'] +8280-266249-0021-360: ref=['AND', 'THEN', 'SUCH', 'POWERFUL', 'ACIDS', 'PRODUCE', 'INTENSE', 'IRRITATION', 'AND', 'THIRST', 'THIRST', 'WHICH', 'WATER', 'DOES', 'NOT', 'QUENCH'] +8280-266249-0021-360: hyp=['AND', 'THEN', 'SUCH', 'POWERFUL', 'ACIDS', 'PRODUCE', 'INTENSE', 'IRRITATION', 'AND', 'THIRST', 'THIRST', 'WHICH', 'WATER', 'DOES', 'NOT', 'QUENCH'] +8280-266249-0022-361: ref=['HENCE', 'A', 'RESORT', 'TO', 'CIDER', 'AND', 'BEER'] +8280-266249-0022-361: hyp=['HENCE', 'A', 'RESORT', 'TO', 'CIDER', 'AND', 'BEER'] +8280-266249-0023-362: ref=['NO', 'SIR', 'WHAT', 'KNOW', 'YE', 'NOT', 'THAT', 'YOUR', 'BODY', 'IS', 'THE', 'TEMPLE', 'OF', 'THE', 'HOLY', 'GHOST', 'WHICH', 'IS', 'IN', 'YOU', 'WHICH', 'YE', 'HAVE', 'OF', 'GOD', 'AND', 'YE', 'ARE', 'NOT', 'YOUR', 'OWN'] +8280-266249-0023-362: hyp=['NO', 'SIR', 'WHAT', 'NO', 'YE', 'NOT', 'THAT', 'YOUR', 'BODY', 'IS', 'THE', 'TEMPLE', 'OF', 'THE', 'HOLY', 'GHOST', 'WHICH', 'IS', 'IN', 'YOU', 'WHICH', 'YE', 'HAVE', 'OF', 'GOD', 'AND', 'YE', 'ARE', 'NOT', 'YOUR', 'OWN'] +8280-266249-0024-363: ref=['FOR', 'YE', 'ARE', 'BOUGHT', 'WITH', 'A', 'PRICE', 'THEREFORE', 'GLORIFY', 'GOD', 'IN', 'YOUR', 'BODY', 'AND', 'IN', 'YOUR', 'SPIRIT', 'WHICH', 'ARE', "GOD'S"] +8280-266249-0024-363: hyp=['FOR', 'YOU', 'ARE', 'BOUGHT', 'WITH', 'A', 'PRICE', 'THEREFORE', 'GLORIFY', 'GOD', 'IN', 'YOUR', 'BODY', 'AND', 'IN', 'YOUR', 'SPIRIT', 'WHICH', 'ARE', "GOD'S"] +8280-266249-0025-364: ref=['WE', 'CERTAINLY', 'HAVE', 'NO', 'RIGHT', 'TO', 'INJURE', 'OUR', 'BODIES', 'EITHER', 'BY', 'NEGLECT', 'OR', 'SELF', 'INDULGENCE'] +8280-266249-0025-364: hyp=['WE', 'CERTAINLY', 'HAVE', 'NO', 'RIGHT', 'TO', 'INJURE', 'OUR', 'BODIES', 'EITHER', 'BY', 'NEGLECT', 'OR', 'SELF', 'INDULGENCE'] +8280-266249-0026-365: ref=['AND', 'AGAIN', 'I', 'BESEECH', 'YOU', 'THEREFORE', 'BRETHREN', 'BY', 'THE', 'MERCIES', 'OF', 'GOD', 'THAT', 'YE', 'PRESENT', 'YOUR', 'BODIES', 'A', 'LIVING', 'SACRIFICE', 'HOLY', 'ACCEPTABLE', 'UNTO', 'GOD', 'WHICH', 'IS', 'YOUR', 'REASONABLE', 'SERVICE'] +8280-266249-0026-365: hyp=['AND', 'AGAIN', 'I', 'BESEECH', 'YOU', 'THEREFORE', 'BRETHREN', 'BY', 'THE', 'MERCIES', 'OF', 'GOD', 'THAT', 'YE', 'PRESENT', 'YOUR', 'BODIES', 'A', 'LIVING', 'SACRIFICE', 'WHOLLY', 'ACCEPTABLE', 'UNTO', 'GOD', 'WHICH', 'IS', 'YOUR', 'REASONABLE', 'SERVICE'] +8280-266249-0027-366: ref=['IT', 'MUST', 'REQUIRE', 'A', 'GOOD', 'DEAL', 'OF', 'RESOLUTION', 'FOR', 'ONE', 'WHO', 'HAS', 'BECOME', 'FOND', 'OF', 'THE', 'INDULGENCE', 'TO', 'GIVE', 'IT', 'UP', 'REMARKED', 'MISTER', 'DALY'] +8280-266249-0027-366: hyp=['EMMA', 'STACCOY', 'A', 'GOOD', 'DEAL', 'OF', 'RESOLUTION', 'FOR', 'ONE', 'WHO', 'HAS', 'BECOME', 'FOND', 'OF', 'THE', 'INDULGENCE', 'TO', 'GIVE', 'IT', 'UP', 'REMARKED', 'MISTER', 'DALEY'] +8280-266249-0028-367: ref=['NO', 'DOUBT', 'NO', 'DOUBT', 'RETURNED', 'MISTER', 'LILBURN', 'BUT', 'IF', 'THY', 'RIGHT', 'EYE', 'OFFEND', 'THEE', 'PLUCK', 'IT', 'OUT', 'AND', 'CAST', 'IT', 'FROM', 'THEE', 'FOR', 'IT', 'IS', 'PROFITABLE', 'FOR', 'THEE', 'THAT', 'ONE', 'OF', 'THY', 'MEMBERS', 'SHOULD', 'PERISH', 'AND', 'NOT', 'THAT', 'THY', 'WHOLE', 'BODY', 'SHOULD', 'BE', 'CAST', 'INTO', 'HELL'] +8280-266249-0028-367: hyp=['NO', 'DOUBT', 'NO', 'DOUBT', 'RETURNED', 'MISTER', 'LOWBOURNE', 'BUT', 'IF', 'I', 'WRITE', 'I', 'OFFEND', 'THEE', 'PLUCK', 'IT', 'UP', 'AND', 'CAST', 'IT', 'FROM', 'ME', 'FOR', 'IT', 'IS', 'PROFITABLE', 'FOR', 'THEE', 'THAT', 'ONE', 'OF', 'THY', 'MEMBERS', 'SHOULD', 'PERISH', 'AND', 'NOT', 'THAT', 'THY', 'WHOLE', 'BODY', 'SHOULD', 'BE', 'CAST', 'INTO', 'HELL'] +8280-266249-0029-368: ref=['THERE', 'WAS', 'A', 'PAUSE', 'BROKEN', 'BY', 'YOUNG', 'HORACE', 'WHO', 'HAD', 'BEEN', 'WATCHING', 'A', 'GROUP', 'OF', 'MEN', 'GATHERED', 'ABOUT', 'A', 'TABLE', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'ROOM'] +8280-266249-0029-368: hyp=['THERE', 'WAS', 'A', 'PAUSE', 'BROKEN', 'BY', 'YOUNG', 'HORACE', 'WHO', 'HAD', 'BEEN', 'WATCHING', 'A', 'GROUP', 'OF', 'MEN', 'GATHERED', 'ABOUT', 'A', 'TABLE', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'ROOM'] +8280-266249-0030-369: ref=['THEY', 'ARE', 'GAMBLING', 'YONDER', 'AND', "I'M", 'AFRAID', 'THAT', 'YOUNG', 'FELLOW', 'IS', 'BEING', 'BADLY', 'FLEECED', 'BY', 'THAT', 'MIDDLE', 'AGED', 'MAN', 'OPPOSITE'] +8280-266249-0030-369: hyp=['THEY', 'ARE', 'GAMBLING', 'YONDER', 'AND', "I'M", 'AFRAID', 'THAT', 'YOUNG', 'FELLOW', 'IS', 'BEING', 'BADLY', 'FLEECED', 'BY', 'THE', 'MIDDLE', 'AGED', 'MAN', 'OPPOSITE'] +8280-266249-0031-370: ref=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'WERE', 'AT', 'ONCE', 'TURNED', 'IN', 'THAT', 'DIRECTION'] +8280-266249-0031-370: hyp=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'WERE', 'AT', 'ONCE', 'TURNED', 'IN', 'THAT', 'DIRECTION'] +8280-266249-0032-371: ref=['NO', 'SIR', 'HE', 'IS', 'NOT', 'HERE'] +8280-266249-0032-371: hyp=['NO', 'SIR', 'HE', 'IS', 'NOT', 'HERE'] +8280-266249-0033-372: ref=['AND', 'THE', 'DOOR', 'WAS', 'SLAMMED', 'VIOLENTLY', 'TO'] +8280-266249-0033-372: hyp=['AS', 'THE', 'DOOR', 'WAS', 'SLAMMED', 'VIOLENTLY', 'TOO'] +8280-266249-0034-373: ref=['NOW', 'THE', 'VOICE', 'CAME', 'FROM', 'THE', 'SKYLIGHT', 'OVERHEAD', 'APPARENTLY', 'AND', 'WITH', 'A', 'FIERCE', 'IMPRECATION', 'THE', 'IRATE', 'GAMESTER', 'RUSHED', 'UPON', 'DECK', 'AND', 'RAN', 'HITHER', 'AND', 'THITHER', 'IN', 'SEARCH', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0034-373: hyp=['NOW', 'THE', 'VOICE', 'CAME', 'FROM', 'THE', 'SKYLIGHT', 'OVERHEAD', 'APPARENTLY', 'AND', 'WITH', 'A', 'FIERCE', 'IMPRECATION', 'THE', 'IRATE', 'GAMESTER', 'RUSHED', 'UPON', 'DECK', 'AND', 'RAN', 'HITHER', 'AND', 'THITHER', 'IN', 'SEARCH', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0035-374: ref=['HIS', 'VICTIM', 'WHO', 'HAD', 'BEEN', 'LOOKING', 'ON', 'DURING', 'THE', 'LITTLE', 'SCENE', 'AND', 'LISTENING', 'TO', 'THE', 'MYSTERIOUS', 'VOICE', 'IN', 'SILENT', 'WIDE', 'EYED', 'WONDER', 'AND', 'FEAR', 'NOW', 'ROSE', 'HASTILY', 'HIS', 'FACE', 'DEATHLY', 'PALE', 'WITH', 'TREMBLING', 'HANDS', 'GATHERED', 'UP', 'THE', 'MONEY', 'HE', 'HAD', 'STAKED', 'AND', 'HURRYING', 'INTO', 'HIS', 'STATE', 'ROOM', 'LOCKED', 'HIMSELF', 'IN'] +8280-266249-0035-374: hyp=['HIS', 'VICTIM', 'WHO', 'HAD', 'BEEN', 'LOOKING', 'ON', 'DURING', 'THE', 'LITTLE', 'SCENE', 'AND', 'LISTENING', 'TO', 'THE', 'MYSTERIOUS', 'VOICE', 'AND', 'SILENT', 'WIDE', 'EYED', 'WONDER', 'AND', 'FEAR', 'NOW', 'AROSE', 'HASTILY', 'HIS', 'FACE', 'DEFTLY', 'PALE', 'WITH', 'TREMBLING', 'HANDS', 'GATHERED', 'UP', 'THE', 'MONEY', 'HE', 'HAD', 'STAKED', 'AND', 'HURRYING', 'TO', 'HIS', 'STATEROOM', 'LOCKED', 'HIMSELF', 'IN'] +8280-266249-0036-375: ref=['WHAT', 'DOES', 'IT', 'MEAN', 'CRIED', 'ONE'] +8280-266249-0036-375: hyp=['WHAT', 'DOES', 'IT', 'MEAN', 'CRIED', 'ONE'] +8280-266249-0037-376: ref=['A', 'VENTRILOQUIST', 'ABOARD', 'OF', 'COURSE', 'RETURNED', 'ANOTHER', "LET'S", 'FOLLOW', 'AND', 'SEE', 'THE', 'FUN'] +8280-266249-0037-376: hyp=['A', 'VENTILLA', 'QUESTED', 'BORN', 'OF', 'COURSE', 'RETURNED', 'ANOTHER', "LET'S", 'FOLLOW', 'AND', 'SEE', 'THE', 'FUN'] +8280-266249-0038-377: ref=['I', 'WONDER', 'WHICH', 'OF', 'US', 'IT', 'IS', 'REMARKED', 'THE', 'FIRST', 'LOOKING', 'HARD', 'AT', 'OUR', 'PARTY', 'I', "DON'T", 'KNOW', 'BUT', 'COME', 'ON'] +8280-266249-0038-377: hyp=['I', 'WONDER', 'WHICH', 'OF', 'US', 'IT', 'IS', 'REMARKED', 'THE', 'FIRST', 'LOOKING', 'HARD', 'AT', 'OUR', 'PARTY', 'I', "DON'T", 'KNOW', 'BUT', 'COME', 'ON'] +8280-266249-0039-378: ref=['THAT', 'FELLOW', 'NICK', 'WARD', 'IS', 'A', 'NOTED', 'BLACKLEG', 'AND', 'RUFFIAN', 'HAD', 'HIS', 'NOSE', 'BROKEN', 'IN', 'A', 'FIGHT', 'AND', 'IS', 'SENSITIVE', 'ON', 'THE', 'SUBJECT', 'WAS', 'CHEATING', 'OF', 'COURSE'] +8280-266249-0039-378: hyp=['THAT', 'FELLOW', 'NICK', 'WARD', 'IS', 'A', 'NOTED', 'BLACK', 'LAG', 'IN', 'RUFFIAN', 'HAD', 'HIS', 'NOSE', 'BROKEN', 'IN', 'A', 'FIGHT', 'AND', 'IS', 'SENSITIVE', 'ON', 'THE', 'SUBJECT', 'WAS', 'CHEATING', 'OF', 'COURSE'] +8280-266249-0040-379: ref=['WHO', 'ASKED', 'THE', 'MATE', "I'VE", 'SEEN', 'NONE', 'UP', 'HERE', 'THOUGH', 'THERE', 'ARE', 'SOME', 'IN', 'THE', 'STEERAGE'] +8280-266249-0040-379: hyp=['WHO', 'ASKED', 'THE', 'MATE', "I'VE", 'SEEN', 'NO', 'NOT', 'HERE', 'THOUGH', 'THERE', 'ARE', 'SOME', 'IN', 'THE', 'STEERAGE'] +8280-266249-0041-380: ref=['THEY', 'HEARD', 'HIM', 'IN', 'SILENCE', 'WITH', 'A', 'COOL', 'PHLEGMATIC', 'INDIFFERENCE', 'MOST', 'EXASPERATING', 'TO', 'ONE', 'IN', 'HIS', 'PRESENT', 'MOOD'] +8280-266249-0041-380: hyp=['THEY', 'HEARD', 'HIM', 'IN', 'SILENCE', 'WITH', 'A', 'COOL', 'PHLEGMATIC', 'INDIFFERENCE', 'MOST', 'EXASPERATING', 'TO', 'ONE', 'IN', 'HIS', 'PRESENT', 'MOOD'] +8280-266249-0042-381: ref=['A', 'MAN', 'OF', 'GIANT', 'SIZE', 'AND', 'HERCULEAN', 'STRENGTH', 'HAD', 'LAID', 'ASIDE', 'HIS', 'PIPE', 'AND', 'SLOWLY', 'RISING', 'TO', 'HIS', 'FEET', 'SEIZED', 'THE', 'SCOUNDREL', 'IN', 'HIS', 'POWERFUL', 'GRASP'] +8280-266249-0042-381: hyp=['A', 'MAN', 'OF', 'GIANT', 'SIZE', 'AND', 'HERCULEAN', 'STRENGTH', 'HAD', 'LAID', 'ASIDE', 'HIS', 'PIPE', 'AND', 'SLOWLY', 'RISING', 'TO', 'HIS', 'FEET', 'SEIZED', 'THE', 'SCOUNDREL', 'IN', 'HIS', 'POWERFUL', 'GRASP'] +8280-266249-0043-382: ref=['LET', 'ME', 'GO', 'YELLED', 'WARD', 'MAKING', 'A', 'DESPERATE', 'EFFORT', 'TO', 'FREE', 'HIS', 'ARMS'] +8280-266249-0043-382: hyp=['LET', 'ME', 'GO', 'YELLED', 'WARD', 'MAKING', 'A', 'DESPERATE', 'EFFORT', 'TO', 'FREE', 'HIS', 'ARMS'] +8280-266249-0044-383: ref=['I', 'DINKS', 'NO', 'I', 'DINKS', 'I', 'DEACH', 'YOU', 'VON', 'LESSON', 'RETURNED', 'HIS', 'CAPTOR', 'NOT', 'RELAXING', 'HIS', 'GRASP', 'IN', 'THE', 'LEAST'] +8280-266249-0044-383: hyp=['I', 'DENZ', 'NO', 'I', 'THINK', 'I', 'DID', 'YOU', 'VON', "MESS'", 'RETURNED', 'HIS', 'CAPTOR', 'NOT', 'RELAXING', 'HIS', 'GRASP', 'IN', 'THE', 'LEAST'] +8280-266249-0045-384: ref=['THE', 'GERMAN', 'RELEASED', 'HIS', 'PRISONER', 'AND', 'THE', 'LATTER', 'SLUNK', 'AWAY', 'WITH', 'MUTTERED', 'THREATS', 'AND', 'IMPRECATIONS', 'UPON', 'THE', 'HEAD', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0045-384: hyp=['THE', 'GERMAN', 'RELEASED', 'HIS', 'PRISONER', 'AND', 'THE', 'LATTER', 'SUNK', 'AWAY', 'WITH', 'MUTTERED', 'THREATS', 'AND', 'IMPRECATIONS', 'UPON', 'THE', 'HEAD', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0046-385: ref=['MISTER', 'LILBURN', 'AND', 'MISTER', 'DALY', 'EACH', 'AT', 'A', 'DIFFERENT', 'TIME', 'SOUGHT', 'OUT', 'THE', 'YOUNG', 'MAN', "WARD'S", 'INTENDED', 'VICTIM', 'AND', 'TRIED', 'TO', 'INFLUENCE', 'HIM', 'FOR', 'GOOD'] +8280-266249-0046-385: hyp=['MISTER', 'LILBURN', 'AND', 'MISTER', 'DALEY', 'EACH', 'HAD', 'A', 'DIFFERENT', 'TIME', 'SOUGHT', 'OUT', 'THE', 'YOUNG', 'MAN', 'WORDS', 'INTENDED', 'VICTIM', 'AND', 'TRIED', 'TO', 'INFLUENCE', 'HIM', 'FOR', 'GOOD'] +8280-266249-0047-386: ref=['YET', 'THERE', 'WAS', 'GAMBLING', 'AGAIN', 'THE', 'SECOND', 'NIGHT', 'BETWEEN', 'WARD', 'AND', 'SEVERAL', 'OTHERS', 'OF', 'HIS', 'PROFESSION'] +8280-266249-0047-386: hyp=['YET', 'THERE', 'WAS', 'GAMBLING', 'AGAIN', 'THE', 'SECOND', 'NIGHT', 'BETWEEN', 'WARD', 'AND', 'SEVERAL', 'OTHERS', 'OF', 'HIS', 'PROFESSIONS'] +8280-266249-0048-387: ref=['THEY', 'KEPT', 'IT', 'UP', 'TILL', 'AFTER', 'MIDNIGHT'] +8280-266249-0048-387: hyp=['THEY', 'KEPT', 'IT', 'UP', 'TILL', 'AFTER', 'MIDNIGHT'] +8280-266249-0049-388: ref=['THEN', 'MISTER', 'LILBURN', 'WAKING', 'FROM', 'HIS', 'FIRST', 'SLEEP', 'IN', 'A', 'STATEROOM', 'NEAR', 'BY', 'THOUGHT', 'HE', 'WOULD', 'BREAK', 'IT', 'UP', 'ONCE', 'MORE'] +8280-266249-0049-388: hyp=['THEN', 'MISTER', 'LOWBORN', 'WAKING', 'FROM', 'HIS', 'FIRST', 'SLEEP', 'IN', 'A', 'STATEROOM', 'NEAR', 'BY', 'THOUGHT', 'HE', 'WOULD', 'BREAK', 'IT', 'UP', 'ONCE', 'MORE'] +8280-266249-0050-389: ref=['AN', 'INTENSE', 'VOICELESS', 'EXCITEMENT', 'POSSESSED', 'THE', 'PLAYERS', 'FOR', 'THE', 'GAME', 'WAS', 'A', 'CLOSE', 'ONE', 'AND', 'THE', 'STAKES', 'WERE', 'VERY', 'HEAVY'] +8280-266249-0050-389: hyp=['AN', 'INTENSE', 'VOICELESS', 'EXCITEMENT', 'POSSESSED', 'THE', 'PLAYERS', 'FOR', 'THE', 'GAME', 'WAS', 'A', 'CLOSE', 'ONE', 'AND', 'MISTAKES', 'WERE', 'VERY', 'HEAVY'] +8280-266249-0051-390: ref=['THEY', 'BENT', 'EAGERLY', 'OVER', 'THE', 'BOARD', 'EACH', 'WATCHING', 'WITH', 'FEVERISH', 'ANXIETY', 'HIS', "COMPANION'S", 'MOVEMENTS', 'EACH', 'CASTING', 'NOW', 'AND', 'AGAIN', 'A', 'GLOATING', 'EYE', 'UPON', 'THE', 'HEAP', 'OF', 'GOLD', 'AND', 'GREENBACKS', 'THAT', 'LAY', 'BETWEEN', 'THEM', 'AND', 'AT', 'TIMES', 'HALF', 'STRETCHING', 'OUT', 'HIS', 'HAND', 'TO', 'CLUTCH', 'IT'] +8280-266249-0051-390: hyp=['THEY', 'BENT', 'EAGERLY', 'OVER', 'THE', 'BOARD', 'EACH', 'WATCHING', 'WITH', 'FEVERISH', 'ANXIETY', 'HIS', "COMPANION'S", 'MOVEMENTS', 'EACH', 'CASTING', 'NOW', 'AND', 'AGAIN', 'A', 'GLOATING', 'EYE', 'UPON', 'THE', 'HEAP', 'OF', 'GOLD', 'AND', 'GREEN', 'BACKS', 'THAT', 'LAY', 'BETWEEN', 'THEM', 'AND', 'AT', 'TIMES', 'HALF', 'STRETCHING', 'OUT', 'HIS', 'HAND', 'TO', 'CLUTCH', 'IT'] +8280-266249-0052-391: ref=['A', 'DEEP', 'GROAN', 'STARTLED', 'THEM', 'AND', 'THEY', 'SPRANG', 'TO', 'THEIR', 'FEET', 'PALE', 'AND', 'TREMBLING', 'WITH', 'SUDDEN', 'TERROR', 'EACH', 'HOLDING', 'HIS', 'BREATH', 'AND', 'STRAINING', 'HIS', 'EAR', 'TO', 'CATCH', 'A', 'REPETITION', 'OF', 'THE', 'DREAD', 'SOUND'] +8280-266249-0052-391: hyp=['A', 'DEEP', 'GROUND', 'STARTLED', 'THEM', 'AND', 'THEY', 'SPRANG', 'TO', 'THEIR', 'FEET', 'PALE', 'AND', 'TREMBLING', 'WITH', 'SUDDEN', 'TERROR', 'EACH', 'HOLDING', 'HIS', 'BREATH', 'AND', 'STRAINING', 'HIS', 'EAR', 'TO', 'CATCH', 'A', 'REPETITION', 'OF', 'THE', 'DREAD', 'SOUND'] +8280-266249-0053-392: ref=['BUT', 'ALL', 'WAS', 'SILENT', 'AND', 'AFTER', 'A', 'MOMENT', 'OF', 'ANXIOUS', 'WAITING', 'THEY', 'SAT', 'DOWN', 'TO', 'THEIR', 'GAME', 'AGAIN', 'TRYING', 'TO', 'CONCEAL', 'AND', 'SHAKE', 'OFF', 'THEIR', 'FEARS', 'WITH', 'A', 'FORCED', 'UNNATURAL', 'LAUGH'] +8280-266249-0053-392: hyp=['BUT', 'ALWAYS', 'SILENT', 'AND', 'AFTER', 'A', 'MOMENT', 'OF', 'ANXIOUS', 'WAITING', 'THEY', 'SAT', 'DOWN', 'TO', 'THEIR', 'GAME', 'AGAIN', 'TRYING', 'TO', 'CONCEAL', 'AND', 'SHAKE', 'OFF', 'THEIR', 'FEARS', 'FOR', 'THE', 'FORCED', 'UNNATURAL', 'LAUGH'] +8280-266249-0054-393: ref=['IT', 'CAME', 'FROM', 'UNDER', 'THE', 'TABLE', 'GASPED', 'WARD', 'LOOK', "WHAT'S", 'THERE', 'LOOK', 'YOURSELF'] +8280-266249-0054-393: hyp=['IT', 'CAME', 'FROM', 'UNDER', 'THE', 'TABLE', 'GASPED', 'TOWARD', 'LOOK', "WHAT'S", 'THERE', 'LOOKED', 'YOURSELF'] +8280-266249-0055-394: ref=['WHAT', 'CAN', 'IT', 'HAVE', 'BEEN', 'THEY', 'ASKED', 'EACH', 'OTHER'] +8280-266249-0055-394: hyp=['WHAT', 'CAN', 'IT', 'HAVE', 'BEEN', 'THEY', 'ASKED', 'EACH', 'OTHER'] +8280-266249-0056-395: ref=['OH', 'NONSENSE', 'WHAT', 'FOOLS', 'WE', 'ARE'] +8280-266249-0056-395: hyp=['OH', 'NONSENSE', 'WHAT', 'FOOLS', 'WE', 'ARE'] +8280-266249-0057-396: ref=['IT', 'WAS', 'THE', 'LAST', 'GAME', 'OF', 'CARDS', 'FOR', 'THAT', 'TRIP'] +8280-266249-0057-396: hyp=['IT', 'WAS', 'THE', 'LAST', 'GAME', 'OF', 'CARDS', 'FOR', 'THAT', 'TRIP'] +8280-266249-0058-397: ref=['THE', 'CAPTAIN', 'COMING', 'IN', 'SHORTLY', 'AFTER', 'THE', 'SUDDEN', 'FLIGHT', 'OF', 'THE', 'GAMBLERS', 'TOOK', 'CHARGE', 'OF', 'THE', 'MONEY', 'AND', 'THE', 'NEXT', 'DAY', 'RESTORED', 'IT', 'TO', 'THE', 'OWNERS'] +8280-266249-0058-397: hyp=['THE', 'CAPTAIN', 'COMING', 'IN', 'SHORTLY', 'AFTER', 'THE', 'SUDDEN', 'FLIGHT', 'OF', 'THE', 'GAMBLERS', 'TOOK', 'CHARGE', 'OF', 'THE', 'MONEY', 'AND', 'THE', 'NEXT', 'DAY', 'RESTORED', 'IT', 'TO', 'THE', 'OWNERS'] +8280-266249-0059-398: ref=['TO', "ELSIE'S", 'OBSERVANT', 'EYES', 'IT', 'PRESENTLY', 'BECAME', 'EVIDENT', 'THAT', 'THE', 'DALYS', 'WERE', 'IN', 'VERY', 'STRAITENED', 'CIRCUMSTANCES'] +8280-266249-0059-398: hyp=['TO', "ELSIE'S", 'OBSERVANT', 'EYES', 'IT', 'PRESENTLY', 'BECAME', 'EVIDENT', 'THAT', 'THE', 'DAILIES', 'RAN', 'VERY', 'STRAIGHTENED', 'CIRCUMSTANCES'] +8280-266249-0060-399: ref=['OH', 'HOW', 'KIND', 'HOW', 'VERY', 'KIND', 'MISSUS', 'DALY', 'SAID', 'WITH', 'TEARS', 'OF', 'JOY', 'AND', 'GRATITUDE', 'WE', 'HAVE', 'HARDLY', 'KNOWN', 'HOW', 'WE', 'SHOULD', 'MEET', 'THE', 'MOST', 'NECESSARY', 'EXPENSES', 'OF', 'THIS', 'TRIP', 'BUT', 'HAVE', 'BEEN', 'TRYING', 'TO', 'CAST', 'OUR', 'CARE', 'UPON', 'THE', 'LORD', 'ASKING', 'HIM', 'TO', 'PROVIDE'] +8280-266249-0060-399: hyp=['OH', 'HOW', 'KIND', 'HOW', 'VERY', 'KIND', 'MISSUS', 'DALEY', 'SAID', 'WITH', 'TEARS', 'OF', 'JOY', 'AND', 'GRATITUDE', 'WE', 'HAVE', 'HARDLY', 'KNOWN', 'HOW', 'WE', 'SHOULD', 'MEET', 'THE', 'MOST', 'NECESSARY', 'EXPENSES', 'OF', 'THIS', 'TRIP', 'BUT', 'HAVE', 'BEEN', 'TRYING', 'TO', 'CAST', 'OUR', 'CARE', 'UPON', 'THE', 'LORD', 'ASKING', 'HIM', 'TO', 'PROVIDE'] +8280-266249-0061-400: ref=['AND', 'HOW', 'WONDERFULLY', 'HE', 'HAS', 'ANSWERED', 'OUR', 'PETITIONS'] +8280-266249-0061-400: hyp=['AND', 'HOW', 'WONDERFULLY', 'HE', 'HAS', 'ANSWERED', 'OUR', 'PETITIONS'] +8280-266249-0062-401: ref=['ELSIE', 'ANSWERED', 'PRESSING', 'HER', 'HAND', 'AFFECTIONATELY', 'ART', 'WE', 'NOT', 'SISTERS', 'IN', 'CHRIST'] +8280-266249-0062-401: hyp=['ELSIE', 'ANSWERED', 'PRESSING', 'HER', 'HAND', 'AFFECTIONATELY', 'ARE', 'WE', 'NOT', 'SISTERS', 'IN', 'CHRIST'] +8280-266249-0063-402: ref=['YE', 'ARE', 'ALL', 'THE', 'CHILDREN', 'OF', 'GOD', 'BY', 'FAITH', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0063-402: hyp=['YE', 'ARE', 'ALL', 'THE', 'CHILDREN', 'OF', 'GOD', 'BY', 'FAITH', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0064-403: ref=['YE', 'ARE', 'ALL', 'ONE', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0064-403: hyp=['YE', 'ARE', 'ALL', 'ONE', 'AND', 'CHRIST', 'JESUS'] +8280-266249-0065-404: ref=['WE', 'FEEL', 'MY', 'HUSBAND', 'AND', 'I', 'THAT', 'WE', 'ARE', 'ONLY', 'THE', 'STEWARDS', 'OF', 'HIS', 'BOUNTY', 'AND', 'THAT', 'BECAUSE', 'HE', 'HAS', 'SAID', 'INASMUCH', 'AS', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ONE', 'OF', 'THE', 'LEAST', 'OF', 'THESE', 'MY', 'BRETHREN', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ME', 'IT', 'IS', 'THE', 'GREATEST', 'PRIVILEGE', 'AND', 'DELIGHT', 'TO', 'DO', 'ANYTHING', 'FOR', 'HIS', 'PEOPLE'] +8280-266249-0065-404: hyp=['WE', 'SEE', 'ON', 'MY', 'HUSBAND', 'AND', 'I', 'THAT', 'WE', 'ARE', 'ONLY', 'THE', 'STEWARDS', 'OF', 'HIS', 'BOUNTY', 'AND', 'BECAUSE', 'HE', 'HAS', 'SAID', 'INASMUCH', 'AS', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ONE', 'OF', 'THE', 'LEAST', 'OF', 'THESE', 'MY', 'BRETHREN', 'YOU', 'HAVE', 'DONE', 'IT', 'UNTO', 'ME', 'IT', 'IS', 'THE', 'GREATEST', 'PRIVILEGE', 'AND', 'DELIGHT', 'TO', 'DO', 'ANYTHING', 'FOR', 'HIS', 'PEOPLE'] +8461-258277-0000-1649: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVEN', 'HUNDRED', 'AND', 'EIGHTEENTH', 'NIGHT'] +8461-258277-0000-1649: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVEN', 'HUNDRED', 'AND', 'EIGHTEENTH', 'NIGHT'] +8461-258277-0001-1650: ref=['BUT', 'HE', 'ANSWERED', 'NEEDS', 'MUST', 'I', 'HAVE', 'ZAYNAB', 'ALSO', 'NOW', 'SUDDENLY', 'THERE', 'CAME', 'A', 'RAP', 'AT', 'THE', 'DOOR', 'AND', 'THE', 'MAID', 'SAID', 'WHO', 'IS', 'AT', 'THE', 'DOOR'] +8461-258277-0001-1650: hyp=['BUT', 'HE', 'ANSWERED', 'NEEDS', 'MY', 'STY', 'HAVE', 'THINE', 'ABBS', 'SO', 'NOW', 'SUDDENLY', 'THERE', 'CAME', 'A', 'RAP', 'AT', 'THE', 'DOOR', 'AND', 'THE', 'MAID', 'SAID', 'WHO', 'IS', 'AT', 'THE', 'DOOR'] +8461-258277-0002-1651: ref=['THE', 'KNOCKER', 'REPLIED', 'KAMAR', 'DAUGHTER', 'OF', 'AZARIAH', 'THE', 'JEW', 'SAY', 'ME', 'IS', 'ALI', 'OF', 'CAIRO', 'WITH', 'YOU'] +8461-258277-0002-1651: hyp=['THE', 'KNOCKER', 'REPLIED', 'COME', 'ON', 'DAUGHTER', 'VAZARRE', 'THE', 'JEW', 'SAY', 'ME', 'IS', 'ALIO', 'KARA', 'WITH', 'YOU'] +8461-258277-0003-1652: ref=['REPLIED', 'THE', "BROKER'S", 'DAUGHTER', 'O', 'THOU', 'DAUGHTER', 'OF', 'A', 'DOG'] +8461-258277-0003-1652: hyp=['REPLIED', 'THE', "BROKER'S", 'DAUGHTER', 'O', 'THOU', 'DAUGHTER', 'OF', 'A', 'DOG'] +8461-258277-0004-1653: ref=['AND', 'HAVING', 'THUS', 'ISLAMISED', 'SHE', 'ASKED', 'HIM', 'DO', 'MEN', 'IN', 'THE', 'FAITH', 'OF', 'AL', 'ISLAM', 'GIVE', 'MARRIAGE', 'PORTIONS', 'TO', 'WOMEN', 'OR', 'DO', 'WOMEN', 'DOWER', 'MEN'] +8461-258277-0004-1653: hyp=['AND', 'HAVING', 'THUS', 'ISLAMMISED', 'SHE', 'ASKED', 'HIM', 'TWO', 'MEN', 'IN', 'THE', 'FAITH', 'OF', 'ALICELA', 'GAVE', 'MARRIAGE', 'PORTIONS', 'TO', 'WOMEN', 'OR', 'TWO', 'WOMEN', 'DOWER', 'MEN'] +8461-258277-0005-1654: ref=['AND', 'SHE', 'THREW', 'DOWN', 'THE', "JEW'S", 'HEAD', 'BEFORE', 'HIM'] +8461-258277-0005-1654: hyp=['AND', 'SHE', 'THREW', 'DOWN', 'THE', "JEW'S", 'HEAD', 'BEFORE', 'HIM'] +8461-258277-0006-1655: ref=['NOW', 'THE', 'CAUSE', 'OF', 'HER', 'SLAYING', 'HER', 'SIRE', 'WAS', 'AS', 'FOLLOWS'] +8461-258277-0006-1655: hyp=['NOW', 'THE', 'COURSE', 'OF', 'HER', 'SLAYING', 'HER', 'SIRE', 'WAS', 'AS', 'FOLLOWS'] +8461-258277-0007-1656: ref=['THEN', 'HE', 'SET', 'OUT', 'REJOICING', 'TO', 'RETURN', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0007-1656: hyp=['THEN', 'HE', 'SAT', 'OUT', 'REJOICING', 'TO', 'RETURN', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0008-1657: ref=['SO', 'HE', 'ATE', 'AND', 'FELL', 'DOWN', 'SENSELESS', 'FOR', 'THE', 'SWEETMEATS', 'WERE', 'DRUGGED', 'WITH', 'BHANG', 'WHEREUPON', 'THE', 'KAZI', 'BUNDLED', 'HIM', 'INTO', 'THE', 'SACK', 'AND', 'MADE', 'OFF', 'WITH', 'HIM', 'CHARGER', 'AND', 'CHEST', 'AND', 'ALL', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0008-1657: hyp=['SO', 'HE', 'ATE', 'AND', 'FELL', 'DOWN', 'SENSELESS', 'FOR', 'THE', 'SWEETMEATS', 'WERE', 'DRUGGED', 'WITH', 'BANG', 'WHEREUPON', 'THE', 'KAZI', 'BUNDLED', 'HIM', 'INTO', 'THE', 'SACK', 'AND', 'MADE', 'OFF', 'WITH', 'THEM', 'CHARGER', 'AND', 'CHEST', 'AND', 'ALL', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTE'] +8461-258277-0009-1658: ref=['PRESENTLY', 'HASAN', 'SHUMAN', 'CAME', 'OUT', 'OF', 'A', 'CLOSET', 'AND', 'SAID', 'TO', 'HIM', 'HAST', 'THOU', 'GOTTEN', 'THE', 'GEAR', 'O', 'ALI'] +8461-258277-0009-1658: hyp=['PRESENTLY', 'HER', 'SON', 'SCHUMANN', 'CAME', 'OUT', 'OF', 'A', 'CLOTH', 'AND', 'SAID', 'TO', 'HIM', 'HAST', 'THOU', 'GOTTEN', 'THE', 'GEAR', 'O', 'ALI'] +8461-258277-0010-1659: ref=['SO', 'HE', 'TOLD', 'HIM', 'WHAT', 'HAD', 'BEFALLEN', 'HIM', 'AND', 'ADDED', 'IF', 'I', 'KNOW', 'WHITHER', 'THE', 'RASCAL', 'IS', 'GONE', 'AND', 'WHERE', 'TO', 'FIND', 'THE', 'KNAVE', 'I', 'WOULD', 'PAY', 'HIM', 'OUT'] +8461-258277-0010-1659: hyp=['SO', 'HE', 'TOLD', 'HIM', 'WHAT', 'HAD', 'BEFALLEN', 'HIM', 'AND', 'ADDED', 'IF', 'I', 'KNOW', 'WHETHER', 'THE', 'RASCAL', 'IS', 'GONE', 'AND', 'WHERE', 'TO', 'FIND', 'THE', 'KNAVE', 'I', 'WOULD', 'PAY', 'HIM', 'OUT'] +8461-258277-0011-1660: ref=['KNOWEST', 'THOU', 'WHITHER', 'HE', 'WENT'] +8461-258277-0011-1660: hyp=['KNOWEST', 'THOU', 'WHITHER', 'HE', 'WENT'] +8461-258277-0012-1661: ref=['ANSWERED', 'HASAN', 'I', 'KNOW', 'WHERE', 'HE', 'IS', 'AND', 'OPENING', 'THE', 'DOOR', 'OF', 'THE', 'CLOSET', 'SHOWED', 'HIM', 'THE', 'SWEETMEAT', 'SELLER', 'WITHIN', 'DRUGGED', 'AND', 'SENSELESS'] +8461-258277-0012-1661: hyp=['ANSWERED', 'HASAN', 'I', 'KNOW', 'WHERE', 'HE', 'IS', 'AND', 'OPENING', 'THE', 'DOOR', 'OF', 'THE', 'CLOSET', 'SHOWED', 'HIM', 'THE', 'SWEETMEAT', 'CELLAR', 'WITHIN', 'DRUGGED', 'AND', 'SENSELESS'] +8461-258277-0013-1662: ref=['SO', 'I', 'WENT', 'ROUND', 'ABOUT', 'THE', 'HIGHWAYS', 'OF', 'THE', 'CITY', 'TILL', 'I', 'MET', 'A', 'SWEETMEAT', 'SELLER', 'AND', 'BUYING', 'HIS', 'CLOTHES', 'AND', 'STOCK', 'IN', 'TRADE', 'AND', 'GEAR', 'FOR', 'TEN', 'DINARS', 'DID', 'WHAT', 'WAS', 'DONE'] +8461-258277-0013-1662: hyp=['SO', 'I', 'WENT', 'ROUND', 'ABOUT', 'THE', 'HIGHWAYS', 'OF', 'THE', 'CITY', 'TILL', 'I', 'MET', 'A', 'SWEETMEAT', 'CELLAR', 'AND', 'BUYING', 'HIS', 'CLOTHES', 'AND', 'STOCKING', 'TRADE', 'AND', 'GEAR', 'FOR', 'TEN', 'HOURS', 'DID', 'WHAT', 'WAS', 'DONE'] +8461-258277-0014-1663: ref=['QUOTH', 'AL', 'RASHID', 'WHOSE', 'HEAD', 'IS', 'THIS'] +8461-258277-0014-1663: hyp=['QUOTH', 'A', 'RASHID', 'WHOSE', 'HEAD', 'IS', 'THIS'] +8461-258277-0015-1664: ref=['SO', 'ALI', 'RELATED', 'TO', 'HIM', 'ALL', 'THAT', 'HAD', 'PASSED', 'FROM', 'FIRST', 'TO', 'LAST', 'AND', 'THE', 'CALIPH', 'SAID', 'I', 'HAD', 'NOT', 'THOUGHT', 'THOU', 'WOULDST', 'KILL', 'HIM', 'FOR', 'THAT', 'HE', 'WAS', 'A', 'SORCERER'] +8461-258277-0015-1664: hyp=['SO', 'I', 'RELATED', 'TO', 'HIM', 'ALL', 'THAT', 'PASSED', 'FROM', 'FIRST', 'LAST', 'AND', 'THE', 'CALIPH', 'SAID', 'I', 'HAD', 'NOT', 'THOUGHT', 'THOU', 'WOULDST', 'KILL', 'HIM', 'FOR', 'THAT', 'HE', 'WAS', 'A', 'SORCERER'] +8461-258277-0016-1665: ref=['HE', 'REPLIED', 'I', 'HAVE', 'FORTY', 'LADS', 'BUT', 'THEY', 'ARE', 'IN', 'CAIRO'] +8461-258277-0016-1665: hyp=['HE', 'REPLIED', 'I', 'HAVE', 'FORTY', 'LADS', 'BUT', 'THEY', 'ARE', 'IN', 'CAIRO'] +8461-278226-0000-1633: ref=['AND', 'LAURA', 'HAD', 'HER', 'OWN', 'PET', 'PLANS'] +8461-278226-0000-1633: hyp=['AND', 'LAURA', 'HAD', 'HER', 'OWN', 'PET', 'PLANS'] +8461-278226-0001-1634: ref=['SHE', 'MEANT', 'TO', 'BE', 'SCRUPULOUSLY', 'CONSCIENTIOUS', 'IN', 'THE', 'ADMINISTRATION', 'OF', 'HER', 'TALENTS', 'AND', 'SOMETIMES', 'AT', 'CHURCH', 'ON', 'A', 'SUNDAY', 'WHEN', 'THE', 'SERMON', 'WAS', 'PARTICULARLY', 'AWAKENING', 'SHE', 'MENTALLY', 'DEBATED', 'THE', 'SERIOUS', 'QUESTION', 'AS', 'TO', 'WHETHER', 'NEW', 'BONNETS', 'AND', 'A', 'PAIR', 'OF', "JOUVIN'S", 'GLOVES', 'DAILY', 'WERE', 'NOT', 'SINFUL', 'BUT', 'I', 'THINK', 'SHE', 'DECIDED', 'THAT', 'THE', 'NEW', 'BONNETS', 'AND', 'GLOVES', 'WERE', 'ON', 'THE', 'WHOLE', 'A', 'PARDONABLE', 'WEAKNESS', 'AS', 'BEING', 'GOOD', 'FOR', 'TRADE'] +8461-278226-0001-1634: hyp=['SHE', 'MEANT', 'TO', 'BE', 'SCRUPULOUSLY', 'CONSCIENTIOUS', 'IN', 'THE', 'ADMINISTRATION', 'OF', 'ITALIANS', 'AND', 'SOMETIMES', 'AT', 'CHURCH', 'ON', 'A', 'SUNDAY', 'WHEN', 'THE', 'SAME', 'WAS', 'PARTICULARLY', 'AWAKENING', 'SHE', 'MENTALLY', 'DEBATED', 'A', 'SERIOUS', 'QUESTION', 'AS', 'TO', 'WHETHER', 'NEW', 'BONNETS', 'AND', 'A', 'PAIR', 'OF', 'JUBANCE', 'GLOVES', 'DAILY', 'WERE', 'NOT', 'SENT', 'FOR', 'BUT', 'I', 'THINK', 'SHE', 'DECIDED', 'THAT', 'THE', 'NEW', 'BONNETS', 'AND', 'GLOVES', 'WERE', 'ON', 'THE', 'WHOLE', 'A', 'PURCHANGLE', 'WEAKNESS', 'AS', 'BEING', 'GOOD', 'FOR', 'TRADE'] +8461-278226-0002-1635: ref=['ONE', 'MORNING', 'LAURA', 'TOLD', 'HER', 'HUSBAND', 'WITH', 'A', 'GAY', 'LAUGH', 'THAT', 'SHE', 'WAS', 'GOING', 'TO', 'VICTIMIZE', 'HIM', 'BUT', 'HE', 'WAS', 'TO', 'PROMISE', 'TO', 'BE', 'PATIENT', 'AND', 'BEAR', 'WITH', 'HER', 'FOR', 'ONCE', 'IN', 'A', 'WAY'] +8461-278226-0002-1635: hyp=['ONE', 'MORNING', 'LAURA', 'TOLD', 'HER', 'HUSBAND', 'WITH', 'A', 'GAY', 'LAUGH', 'THAT', 'SHE', 'WAS', 'GOING', 'TO', 'VICTIMISE', 'HIM', 'BUT', 'HE', 'WAS', 'TO', 'PROMISE', 'TO', 'BE', 'PATIENT', 'AND', 'BEAR', 'WITH', 'HER', 'FOR', 'ONCE', 'IN', 'A', 'WAY'] +8461-278226-0003-1636: ref=['I', 'WANT', 'TO', 'SEE', 'ALL', 'THE', 'PICTURES', 'THE', 'MODERN', 'PICTURES', 'ESPECIALLY'] +8461-278226-0003-1636: hyp=['I', 'WANT', 'TO', 'SEE', 'ALL', 'THE', 'PICTURES', 'THE', 'MODERN', 'PICTURES', 'ESPECIALLY'] +8461-278226-0004-1637: ref=['I', 'REMEMBER', 'ALL', 'THE', 'RUBENSES', 'AT', 'THE', 'LOUVRE', 'FOR', 'I', 'SAW', 'THEM', 'THREE', 'YEARS', 'AGO', 'WHEN', 'I', 'WAS', 'STAYING', 'IN', 'PARIS', 'WITH', 'GRANDPAPA'] +8461-278226-0004-1637: hyp=['I', 'REMEMBER', 'ALL', 'THE', 'REUBEN', 'SAYS', 'THAT', 'THE', 'LOUVRE', 'FOR', 'I', 'SAW', 'THEM', 'THREE', 'YEARS', 'AGO', 'WHEN', 'I', 'WAS', 'STAYING', 'IN', 'PARIS', 'WITH', 'GRANDPAPA'] +8461-278226-0005-1638: ref=['SHE', 'RETURNED', 'IN', 'A', 'LITTLE', 'MORE', 'THAN', 'TEN', 'MINUTES', 'IN', 'THE', 'FRESHEST', 'TOILETTE', 'ALL', 'PALE', 'SHIMMERING', 'BLUE', 'LIKE', 'THE', 'SPRING', 'SKY', 'WITH', 'PEARL', 'GREY', 'GLOVES', 'AND', 'BOOTS', 'AND', 'PARASOL', 'AND', 'A', 'BONNET', 'THAT', 'SEEMED', 'MADE', 'OF', 'AZURE', 'BUTTERFLIES'] +8461-278226-0005-1638: hyp=['SHE', 'RETURNED', 'IN', 'A', 'LITTLE', 'MORE', 'THAN', 'TEN', 'MINUTES', 'IN', 'THE', 'FRESHEST', 'TOILETTE', 'ALL', 'PALE', 'SHIMMERING', 'BLUE', 'LIKE', 'THE', 'SPRING', 'SKY', 'WITH', 'POOR', 'GRAY', 'GLOVES', 'AND', 'BOOTS', 'AND', 'PARASOL', 'AND', 'A', 'BONNET', 'THAT', 'SEEMED', 'MADE', 'OF', 'AZURE', 'BUTTERFLIES'] +8461-278226-0006-1639: ref=['IT', 'WAS', 'DRAWING', 'TOWARDS', 'THE', 'CLOSE', 'OF', 'THIS', 'DELIGHTFUL', 'HONEYMOON', 'TOUR', 'AND', 'IT', 'WAS', 'A', 'BRIGHT', 'SUNSHINY', 'MORNING', 'EARLY', 'IN', 'FEBRUARY', 'BUT', 'FEBRUARY', 'IN', 'PARIS', 'IS', 'SOMETIMES', 'BETTER', 'THAN', 'APRIL', 'IN', 'LONDON'] +8461-278226-0006-1639: hyp=['HE', 'WAS', 'DRAWING', 'TOWARDS', 'THE', 'CLOSE', 'OF', 'THIS', 'DELIGHTFUL', 'HONEYMOON', 'TOUR', 'AND', 'IT', 'WAS', 'A', 'BRIGHT', 'SUNSHINY', 'MORNING', 'EARLY', 'IN', 'FEBRUARY', 'BUT', 'FEBRUARY', 'IN', 'PARIS', 'IS', 'SOMETIMES', 'BETTER', 'THAN', 'APRIL', 'IN', 'LONDON'] +8461-278226-0007-1640: ref=['BUT', 'SHE', 'FIXED', 'UPON', 'A', 'PICTURE', 'WHICH', 'SHE', 'SAID', 'SHE', 'PREFERRED', 'TO', 'ANYTHING', 'SHE', 'HAD', 'SEEN', 'IN', 'THE', 'GALLERY'] +8461-278226-0007-1640: hyp=['BUT', 'SHE', 'FIXED', 'UPON', 'A', 'PICTURE', 'WHICH', 'SHE', 'SAID', 'SHE', 'PREFERRED', 'TO', 'ANYTHING', 'SHE', 'HAD', 'SEEN', 'IN', 'THE', 'GALLERY'] +8461-278226-0008-1641: ref=['PHILIP', 'JOCELYN', 'WAS', 'EXAMINING', 'SOME', 'PICTURES', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'ROOM', 'WHEN', 'HIS', 'WIFE', 'MADE', 'THIS', 'DISCOVERY'] +8461-278226-0008-1641: hyp=['PHILIP', 'JOSCELYN', 'WAS', 'EXAMINING', 'SOME', 'PICTURES', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'ROOM', 'WHEN', 'HIS', 'WIFE', 'MADE', 'THE', 'DISCOVERY'] +8461-278226-0009-1642: ref=['HOW', 'I', 'WISH', 'YOU', 'COULD', 'GET', 'ME', 'A', 'COPY', 'OF', 'THAT', 'PICTURE', 'PHILIP', 'LAURA', 'SAID', 'ENTREATINGLY'] +8461-278226-0009-1642: hyp=['HOW', 'I', 'WISH', 'YOU', 'COULD', 'GET', 'ME', 'A', 'COPY', 'OF', 'THAT', 'PICTURE', 'PHILIP', 'LAURA', 'SAID', 'ENTREATINGLY'] +8461-278226-0010-1643: ref=['I', 'SHOULD', 'SO', 'LIKE', 'ONE', 'TO', 'HANG', 'IN', 'MY', 'MORNING', 'ROOM', 'AT', "JOCELYN'S", 'ROCK'] +8461-278226-0010-1643: hyp=['I', 'SHOULD', 'SO', 'LIKE', 'ONE', 'TO', 'HANG', 'IN', 'MY', 'MORNING', 'ROOM', 'A', 'JOCELYN', 'STROKE'] +8461-278226-0011-1644: ref=['SHE', 'TURNED', 'TO', 'THE', 'FRENCH', 'ARTIST', 'PRESENTLY', 'AND', 'ASKED', 'HIM', 'WHERE', 'THE', 'ELDER', 'MISTER', 'KERSTALL', 'LIVED', 'AND', 'IF', 'THERE', 'WAS', 'ANY', 'POSSIBILITY', 'OF', 'SEEING', 'HIM'] +8461-278226-0011-1644: hyp=['SHE', 'TURNED', 'TO', 'THE', 'FRENCHARD', 'THIS', 'PRESENTLY', 'AND', 'ASKED', 'THEM', 'WHERE', 'THE', 'ELDER', 'MISTER', 'CURSON', 'LIVED', 'AND', 'IF', 'THERE', 'WAS', 'ANY', 'POSSIBILITY', 'OF', 'SEEING', 'HIM'] +8461-278226-0012-1645: ref=['THEY', 'HAVE', 'SAID', 'THAT', 'HE', 'IS', 'EVEN', 'A', 'LITTLE', 'IMBECILE', 'THAT', 'HE', 'DOES', 'NOT', 'REMEMBER', 'HIMSELF', 'OF', 'THE', 'MOST', 'COMMON', 'EVENTS', 'OF', 'HIS', 'LIFE'] +8461-278226-0012-1645: hyp=['THEY', 'HAVE', 'SAID', 'THAT', 'HE', 'IS', 'EVEN', 'A', 'LITTLE', 'IMBECILE', 'THAT', 'HE', 'DOES', 'NOT', 'REMEMBER', 'HIMSELF', 'OF', 'THE', 'MOST', 'COMMON', 'EVENTS', 'OF', 'HIS', 'LIFE'] +8461-278226-0013-1646: ref=['BUT', 'THERE', 'ARE', 'SOME', 'OTHERS', 'WHO', 'SAY', 'THAT', 'HIS', 'MEMORY', 'HAS', 'NOT', 'ALTOGETHER', 'FAILED', 'AND', 'THAT', 'HE', 'IS', 'STILL', 'ENOUGH', 'HARSHLY', 'CRITICAL', 'TOWARDS', 'THE', 'WORKS', 'OF', 'OTHERS'] +8461-278226-0013-1646: hyp=['BUT', 'THERE', 'ARE', 'SOME', 'OTHERS', 'WHO', 'SAY', 'THAT', 'HIS', 'MEMORY', 'HAS', 'NOT', 'ALTOGETHER', 'FAILED', 'AND', 'THAT', 'HE', 'STILL', 'ENOUGH', 'HARSHLY', 'CRITICAL', 'TOWARDS', 'THE', 'WORKS', 'OF', 'OTHERS'] +8461-278226-0014-1647: ref=['I', "DON'T", 'THINK', 'YOU', 'WILL', 'HAVE', 'ANY', 'DIFFICULTY', 'IN', 'FINDING', 'THE', 'HOUSE'] +8461-278226-0014-1647: hyp=['I', "DON'T", 'THINK', 'YOU', 'WILL', 'HAVE', 'ANY', 'DIFFICULTY', 'IN', 'FINDING', 'THE', 'HOUSE'] +8461-278226-0015-1648: ref=['YOU', 'WILL', 'BE', 'DOING', 'ME', 'SUCH', 'A', 'FAVOUR', 'PHILIP', 'IF', "YOU'LL", 'SAY', 'YES'] +8461-278226-0015-1648: hyp=['YOU', 'WERE', 'BETWEEN', 'ME', 'SUCH', 'A', 'FAVOUR', 'FELLOW', 'IF', "YOU'LL", 'SAY', 'YES'] +8461-281231-0000-1594: ref=['HIS', 'FOLLOWERS', 'RUSHED', 'FORWARD', 'TO', 'WHERE', 'HE', 'LAY', 'AND', 'THEIR', 'UNITED', 'FORCE', 'COMPELLING', 'THE', 'BLACK', 'KNIGHT', 'TO', 'PAUSE', 'THEY', 'DRAGGED', 'THEIR', 'WOUNDED', 'LEADER', 'WITHIN', 'THE', 'WALLS'] +8461-281231-0000-1594: hyp=['HIS', 'FOLLOWERS', 'RUSH', 'FORWARD', 'TO', 'WHERE', 'HE', 'LAY', 'AND', 'THE', 'UNITED', 'FORCE', 'COMPELLING', 'THE', 'BLACK', 'NIGHT', 'TO', 'PAUSE', 'THEY', 'DRAGGED', 'THE', 'WOUNDED', 'LEADER', 'WITHIN', 'THE', 'WALLS'] +8461-281231-0001-1595: ref=['IT', 'WAS', 'ON', 'THEIR', 'JOURNEY', 'TO', 'THAT', 'TOWN', 'THAT', 'THEY', 'WERE', 'OVERTAKEN', 'ON', 'THE', 'ROAD', 'BY', 'CEDRIC', 'AND', 'HIS', 'PARTY', 'IN', 'WHOSE', 'COMPANY', 'THEY', 'WERE', 'AFTERWARDS', 'CARRIED', 'CAPTIVE', 'TO', 'THE', 'CASTLE', 'OF', 'TORQUILSTONE'] +8461-281231-0001-1595: hyp=['IT', 'WAS', 'ON', 'THEIR', 'JOURNEY', 'TO', 'THAT', 'TOWN', 'THAT', 'THEY', 'WERE', 'OVERTAKEN', 'ON', 'THE', 'ROAD', 'BY', 'SEDRRICK', 'AND', 'HIS', 'PARTY', 'IN', 'WHOSE', 'COMPANY', 'THEY', 'WERE', 'AFTERWARDS', 'CARRIED', 'CAPTIVE', 'TO', 'THE', 'COUNCIL', 'OF', 'TORCHLESTONE'] +8461-281231-0002-1596: ref=['AS', 'HE', 'LAY', 'UPON', 'HIS', 'BED', 'RACKED', 'WITH', 'PAIN', 'AND', 'MENTAL', 'AGONY', 'AND', 'FILLED', 'WITH', 'THE', 'FEAR', 'OF', 'RAPIDLY', 'APPROACHING', 'DEATH', 'HE', 'HEARD', 'A', 'VOICE', 'ADDRESS', 'HIM'] +8461-281231-0002-1596: hyp=['I', 'SEE', 'LAY', 'UPON', 'HIS', 'BED', 'WRAPPED', 'WITH', 'PAIN', 'AND', 'MENTAL', 'AGONY', 'AND', 'FILLED', 'WITH', 'THE', 'FEAR', 'OF', 'RAPIDLY', 'APPROACHING', 'DEATH', 'HE', 'HEARD', 'A', 'VOICE', 'ADDRESS', 'HIM'] +8461-281231-0003-1597: ref=['WHAT', 'ART', 'THOU', 'HE', 'EXCLAIMED', 'IN', 'TERROR'] +8461-281231-0003-1597: hyp=['WHAT', 'ART', 'THOU', 'HE', 'EXCLAIMED', 'IN', 'TERROR'] +8461-281231-0004-1598: ref=['LEAVE', 'ME', 'AND', 'SEEK', 'THE', 'SAXON', 'WITCH', 'ULRICA', 'WHO', 'WAS', 'MY', 'TEMPTRESS', 'LET', 'HER', 'AS', 'WELL', 'AS', 'I', 'TASTE', 'THE', 'TORTURES', 'WHICH', 'ANTICIPATE', 'HELL'] +8461-281231-0004-1598: hyp=['LEAVE', 'ME', 'AND', 'SEEK', 'THE', 'SAXON', 'WHICH', 'OIKA', 'WHO', 'WAS', 'MY', 'TEMPTRESS', 'LET', 'HER', 'AS', 'WELL', 'AS', 'I', 'CASE', 'THE', 'TORTURES', 'WHICH', 'ANTICIPATE', 'HELL'] +8461-281231-0005-1599: ref=['EXCLAIMED', 'THE', 'NORMAN', 'HO'] +8461-281231-0005-1599: hyp=['EXCLAIMED', 'THE', 'NORMAN', 'OH'] +8461-281231-0006-1600: ref=['REMEMBEREST', 'THOU', 'THE', 'MAGAZINE', 'OF', 'FUEL', 'THAT', 'IS', 'STORED', 'BENEATH', 'THESE', 'APARTMENTS', 'WOMAN'] +8461-281231-0006-1600: hyp=['REMEMBER', 'AS', 'THOU', 'THE', 'MAGAZINE', 'OF', 'FUEL', 'THAT', 'IS', 'STOLE', 'BENEATH', 'THESE', 'APARTMENTS', 'WOMAN'] +8461-281231-0007-1601: ref=['THEY', 'ARE', 'FAST', 'RISING', 'AT', 'LEAST', 'SAID', 'ULRICA', 'AND', 'A', 'SIGNAL', 'SHALL', 'SOON', 'WAVE', 'TO', 'WARN', 'THE', 'BESIEGERS', 'TO', 'PRESS', 'HARD', 'UPON', 'THOSE', 'WHO', 'WOULD', 'EXTINGUISH', 'THEM'] +8461-281231-0007-1601: hyp=['THEY', 'ARE', 'FAST', 'RISING', 'AT', 'LEAST', 'SAID', 'EUREKA', 'AND', 'A', 'SIGNAL', 'SHALL', 'SOON', 'WAVE', 'TOWARD', 'THE', 'BESIEGERS', 'TO', 'PRESS', 'HARD', 'UPON', 'THOSE', 'WHO', 'WOULD', 'EXTINGUISH', 'THEM'] +8461-281231-0008-1602: ref=['MEANWHILE', 'THE', 'BLACK', 'KNIGHT', 'HAD', 'LED', 'HIS', 'FORCES', 'AGAIN', 'TO', 'THE', 'ATTACK', 'AND', 'SO', 'VIGOROUS', 'WAS', 'THEIR', 'ASSAULT', 'THAT', 'BEFORE', 'LONG', 'THE', 'GATE', 'OF', 'THE', 'CASTLE', 'ALONE', 'SEPARATED', 'THEM', 'FROM', 'THOSE', 'WITHIN'] +8461-281231-0008-1602: hyp=['MEANWHILE', 'THE', 'BLACK', 'KNIGHT', 'HAD', 'LED', 'HIS', 'FORCES', 'AGAIN', 'TO', 'THE', 'ATTACK', 'AND', 'SO', 'VIGOROUS', 'WAS', 'THEIR', 'ASSAULT', 'THAT', 'BEFORE', 'LONG', 'THE', 'GATE', 'OF', 'THE', 'CASTLE', 'ALONE', 'SEPARATED', 'THEM', 'FROM', 'THOSE', 'WITHIN'] +8461-281231-0009-1603: ref=['THE', 'DEFENDERS', 'FINDING', 'THE', 'CASTLE', 'TO', 'BE', 'ON', 'FIRE', 'NOW', 'DETERMINED', 'TO', 'SELL', 'THEIR', 'LIVES', 'AS', 'DEARLY', 'AS', 'THEY', 'COULD', 'AND', 'HEADED', 'BY', 'DE', 'BRACY', 'THEY', 'THREW', 'OPEN', 'THE', 'GATE', 'AND', 'WERE', 'AT', 'ONCE', 'INVOLVED', 'IN', 'A', 'TERRIFIC', 'CONFLICT', 'WITH', 'THOSE', 'OUTSIDE'] +8461-281231-0009-1603: hyp=['THE', 'DEFENDERS', 'FIND', 'IN', 'THE', 'CASTLE', 'TO', 'BE', 'ON', 'FIRE', 'NOW', 'DETERMINED', 'TO', 'SELL', 'THEIR', 'LIVES', 'AS', 'DAILY', 'AS', 'THEY', 'COULD', 'AND', 'HEADED', 'BY', 'THE', 'BRACEE', 'THEY', 'THREW', 'OPEN', 'THE', 'GATE', 'AND', 'WERE', 'AT', 'ONCE', 'INVOLVED', 'IN', 'A', 'TERRIFIC', 'CONFLICT', 'WITH', 'THOSE', 'OUTSIDE'] +8461-281231-0010-1604: ref=['THE', 'BLACK', 'KNIGHT', 'WITH', 'PORTENTOUS', 'STRENGTH', 'FORCED', 'HIS', 'WAY', 'INWARD', 'IN', 'DESPITE', 'OF', 'DE', 'BRACY', 'AND', 'HIS', 'FOLLOWERS'] +8461-281231-0010-1604: hyp=['THE', 'BLACK', 'NIGHT', 'WITH', 'POTENT', 'OF', 'STRENGTH', 'FORCE', 'HIS', 'WAY', 'INWARD', 'IN', 'DESPITE', 'OF', 'THE', 'BRAZY', 'AND', 'HIS', 'FOLLOWERS'] +8461-281231-0011-1605: ref=['TWO', 'OF', 'THE', 'FOREMOST', 'INSTANTLY', 'FELL', 'AND', 'THE', 'REST', 'GAVE', 'WAY', 'NOTWITHSTANDING', 'ALL', 'THEIR', 'LEADERS', 'EFFORTS', 'TO', 'STOP', 'THEM'] +8461-281231-0011-1605: hyp=['TWO', 'OF', 'THE', 'FOREMOST', 'THING', 'AND', 'THE', 'REST', 'GAVE', 'WAY', 'NOTWITHSTANDING', 'ALL', 'THE', "LEADER'S", 'EFFORT', 'TO', 'STOP', 'THEM'] +8461-281231-0012-1606: ref=['THE', 'BLACK', 'KNIGHT', 'WAS', 'SOON', 'ENGAGED', 'IN', 'DESPERATE', 'COMBAT', 'WITH', 'THE', 'NORMAN', 'CHIEF', 'AND', 'THE', 'VAULTED', 'ROOF', 'OF', 'THE', 'HALL', 'RUNG', 'WITH', 'THEIR', 'FURIOUS', 'BLOWS'] +8461-281231-0012-1606: hyp=['THE', 'BLACK', 'NIGHT', 'WAS', 'SOON', 'ENGAGED', 'IN', 'DESPERATE', 'COMBAT', 'WITH', 'THE', 'NORMAN', 'CHIEF', 'AND', 'THE', 'VAULTED', 'ROOF', 'OF', 'THE', 'HALL', 'RANG', 'WITH', 'THE', 'FURIOUS', 'BLOWS'] +8461-281231-0013-1607: ref=['AT', 'LENGTH', 'DE', 'BRACY', 'FELL'] +8461-281231-0013-1607: hyp=['AT', 'LENGTH', 'THE', 'BRACEY', 'FELL'] +8461-281231-0014-1608: ref=['TELL', 'ME', 'THY', 'NAME', 'OR', 'WORK', 'THY', 'PLEASURE', 'ON', 'ME'] +8461-281231-0014-1608: hyp=['TELL', 'ME', 'THY', 'NAME', 'A', 'WORK', 'THY', 'PLEASURE', 'ON', 'ME'] +8461-281231-0015-1609: ref=['YET', 'FIRST', 'LET', 'ME', 'SAY', 'SAID', 'DE', 'BRACY', 'WHAT', 'IT', 'IMPORTS', 'THEE', 'TO', 'KNOW'] +8461-281231-0015-1609: hyp=['YET', 'FIRST', 'LET', 'ME', 'SAY', 'SAID', 'DEBRACY', 'WHAT', 'IT', 'IMPORTS', 'THEE', 'TO', 'KNOW'] +8461-281231-0016-1610: ref=['EXCLAIMED', 'THE', 'BLACK', 'KNIGHT', 'PRISONER', 'AND', 'PERISH'] +8461-281231-0016-1610: hyp=['EXCLAIMED', 'THE', 'BLACK', 'KNIGHT', 'PRISONER', 'AND', 'PERISH'] +8461-281231-0017-1611: ref=['THE', 'LIFE', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'CASTLE', 'SHALL', 'ANSWER', 'IT', 'IF', 'A', 'HAIR', 'OF', 'HIS', 'HEAD', 'BE', 'SINGED', 'SHOW', 'ME', 'HIS', 'CHAMBER'] +8461-281231-0017-1611: hyp=['THE', 'LIFE', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'CASTLE', 'SHALL', 'ANSWER', 'IT', 'IF', 'A', 'HAIR', 'OF', 'HIS', 'HEAD', 'BE', 'SINGED', 'SHOW', 'ME', 'HIS', 'CHAMBER'] +8461-281231-0018-1612: ref=['RAISING', 'THE', 'WOUNDED', 'MAN', 'WITH', 'EASE', 'THE', 'BLACK', 'KNIGHT', 'RUSHED', 'WITH', 'HIM', 'TO', 'THE', 'POSTERN', 'GATE', 'AND', 'HAVING', 'THERE', 'DELIVERED', 'HIS', 'BURDEN', 'TO', 'THE', 'CARE', 'OF', 'TWO', 'YEOMEN', 'HE', 'AGAIN', 'ENTERED', 'THE', 'CASTLE', 'TO', 'ASSIST', 'IN', 'THE', 'RESCUE', 'OF', 'THE', 'OTHER', 'PRISONERS'] +8461-281231-0018-1612: hyp=['RAISING', 'THE', 'WOUNDED', 'MAN', 'WITH', 'EASE', 'THE', 'BLACK', 'KNIGHT', 'RUSHED', 'WITH', 'THEM', 'TO', 'THE', 'PASSING', 'GATE', 'AND', 'HAVING', 'THERE', 'DELIVERED', 'HIS', 'BURDEN', 'TO', 'THE', 'CARE', 'OF', 'TWO', 'YEOMAN', 'HE', 'AGAIN', 'ENTERED', 'THE', 'CASTLE', 'TO', 'ASSIST', 'IN', 'THE', 'RESCUE', 'OF', 'THEIR', 'PRISONERS'] +8461-281231-0019-1613: ref=['BUT', 'IN', 'OTHER', 'PARTS', 'THE', 'BESIEGERS', 'PURSUED', 'THE', 'DEFENDERS', 'OF', 'THE', 'CASTLE', 'FROM', 'CHAMBER', 'TO', 'CHAMBER', 'AND', 'SATIATED', 'IN', 'THEIR', 'BLOOD', 'THE', 'VENGEANCE', 'WHICH', 'HAD', 'LONG', 'ANIMATED', 'THEM', 'AGAINST', 'THE', 'SOLDIERS', 'OF', 'THE', 'TYRANT', 'FRONT', 'DE', 'BOEUF'] +8461-281231-0019-1613: hyp=['BUT', 'IN', 'OTHER', 'PARTS', 'THE', 'BESIEGERS', 'PURSUED', 'THE', 'DEFENDERS', 'OF', 'THE', 'CASTLE', 'FROM', 'CHAMBER', 'TO', 'CHAMBER', 'AND', 'SATIATED', 'IN', 'THE', 'BLOOD', 'THE', 'VENGEANCE', 'WHICH', 'HAD', 'LONG', 'ANIMATED', 'THEM', 'AGAINST', 'THE', 'SOLDIERS', 'OF', 'THE', 'TYRANT', 'FROM', 'DE', 'BOEUF'] +8461-281231-0020-1614: ref=['AS', 'THE', 'FIRE', 'COMMENCED', 'TO', 'SPREAD', 'RAPIDLY', 'THROUGH', 'ALL', 'PARTS', 'OF', 'THE', 'CASTLE', 'ULRICA', 'APPEARED', 'ON', 'ONE', 'OF', 'THE', 'TURRETS'] +8461-281231-0020-1614: hyp=['AS', 'THE', 'FIRE', 'COMMANDS', 'TO', 'SPREAD', 'RAPIDLY', 'THROUGH', 'ALL', 'PARTS', 'OF', 'THE', 'CASTLE', 'OR', 'RICHA', 'APPEARED', 'ON', 'ONE', 'OF', 'THE', 'TURRETS'] +8461-281231-0021-1615: ref=['BEFORE', 'LONG', 'THE', 'TOWERING', 'FLAMES', 'HAD', 'SURMOUNTED', 'EVERY', 'OBSTRUCTION', 'AND', 'ROSE', 'TO', 'THE', 'EVENING', 'SKIES', 'ONE', 'HUGE', 'AND', 'BURNING', 'BEACON', 'SEEN', 'FAR', 'AND', 'WIDE', 'THROUGH', 'THE', 'ADJACENT', 'COUNTRY', 'TOWER', 'AFTER', 'TOWER', 'CRASHED', 'DOWN', 'WITH', 'BLAZING', 'ROOF', 'AND', 'RAFTER'] +8461-281231-0021-1615: hyp=['BEFORE', 'LONG', 'THE', 'TOWERING', 'FLAMES', 'HAD', 'SURMOUNTED', 'EVERY', 'OBSTRUCTION', 'AND', 'ROSE', 'TO', 'THE', 'EVENING', 'SKIES', 'WHEN', 'HUGE', 'AND', 'BURNING', 'BEACON', 'SEEMED', 'FAR', 'AND', 'WIDE', 'THROUGH', 'THE', 'ADJACENT', 'COUNTRY', 'TOWERED', 'AFTER', 'TOWER', 'CRASHED', 'DOWN', 'WITH', 'BLAZING', 'ROOF', 'AND', 'RAFTER'] +8461-281231-0022-1616: ref=['AT', 'LENGTH', 'WITH', 'A', 'TERRIFIC', 'CRASH', 'THE', 'WHOLE', 'TURRET', 'GAVE', 'WAY', 'AND', 'SHE', 'PERISHED', 'IN', 'THE', 'FLAMES', 'WHICH', 'HAD', 'CONSUMED', 'HER', 'TYRANT'] +8461-281231-0022-1616: hyp=['AT', 'LENGTH', 'WITH', 'A', 'TERRIFIC', 'CRASH', 'THE', 'WHOLE', 'TOWER', 'GAVE', 'WAY', 'AND', 'SHE', 'PERISHED', 'IN', 'FLAMES', 'WHICH', 'HAD', 'CONSUMED', 'HER', 'TYRANT'] +8461-281231-0023-1617: ref=['WHEN', 'THE', 'OUTLAWS', 'HAD', 'DIVIDED', 'THE', 'SPOILS', 'WHICH', 'THEY', 'HAD', 'TAKEN', 'FROM', 'THE', 'CASTLE', 'OF', 'TORQUILSTONE', 'CEDRIC', 'PREPARED', 'TO', 'TAKE', 'HIS', 'DEPARTURE'] +8461-281231-0023-1617: hyp=['WHEN', 'THE', 'OUTLAWS', 'ARE', 'DIVIDED', 'THE', 'SPOILS', 'WHICH', 'THEY', 'HAD', 'TAKEN', 'FROM', 'THE', 'CASTLE', 'OF', 'TORCHLESTONE', 'CEDRIC', 'PREPARED', 'TO', 'TAKE', 'HIS', 'DEPARTURE'] +8461-281231-0024-1618: ref=['HE', 'LEFT', 'THE', 'GALLANT', 'BAND', 'OF', 'FORESTERS', 'SORROWING', 'DEEPLY', 'FOR', 'HIS', 'LOST', 'FRIEND', 'THE', 'LORD', 'OF', 'CONINGSBURGH', 'AND', 'HE', 'AND', 'HIS', 'FOLLOWERS', 'HAD', 'SCARCE', 'DEPARTED', 'WHEN', 'A', 'PROCESSION', 'MOVED', 'SLOWLY', 'FROM', 'UNDER', 'THE', 'GREENWOOD', 'BRANCHES', 'IN', 'THE', 'DIRECTION', 'WHICH', 'HE', 'HAD', 'TAKEN', 'IN', 'THE', 'CENTRE', 'OF', 'WHICH', 'WAS', 'THE', 'CAR', 'IN', 'WHICH', 'THE', 'BODY', 'OF', 'ATHELSTANE', 'WAS', 'LAID'] +8461-281231-0024-1618: hyp=['HE', 'LEFT', 'THE', 'GALLANT', 'BAND', 'OF', 'FORESTERS', 'SORROWING', 'DEEPLY', 'FOR', 'HIS', 'LOST', 'FRIEND', 'THE', 'LORD', 'OF', 'CONIGSBURG', 'AND', 'HE', 'AND', 'HIS', 'FOLLOWERS', 'HAD', 'SCARCE', 'DEPARTED', 'WHEN', 'A', 'PROCESSION', 'MOVED', 'SLOWLY', 'FROM', 'UNDER', 'THE', 'GREENWOOD', 'BRANCHES', 'IN', 'THE', 'DIRECTION', 'WHICH', 'HE', 'HAD', 'TAKEN', 'IN', 'THE', 'CENTRE', 'OF', 'WHICH', 'WAS', 'THE', 'CAR', 'IN', 'WHICH', 'THE', 'BODY', 'OF', 'ADDLESTEIN', 'WAS', 'LAID'] +8461-281231-0025-1619: ref=['DE', 'BRACY', 'BOWED', 'LOW', 'AND', 'IN', 'SILENCE', 'THREW', 'HIMSELF', 'UPON', 'A', 'HORSE', 'AND', 'GALLOPED', 'OFF', 'THROUGH', 'THE', 'WOOD'] +8461-281231-0025-1619: hyp=['DEBRACY', 'BOWED', 'LOW', 'AND', 'IN', 'SILENCE', 'THREW', 'HIMSELF', 'UPON', 'A', 'HORSE', 'AND', 'GALLOPED', 'OFF', 'THROUGH', 'THE', 'WOODS'] +8461-281231-0026-1620: ref=['HERE', 'IS', 'A', 'BUGLE', 'WHICH', 'AN', 'ENGLISH', 'YEOMAN', 'HAS', 'ONCE', 'WORN', 'I', 'PRAY', 'YOU', 'TO', 'KEEP', 'IT', 'AS', 'A', 'MEMORIAL', 'OF', 'YOUR', 'GALLANT', 'BEARING'] +8461-281231-0026-1620: hyp=['HERE', 'IS', 'A', 'BUGLE', 'WHICH', 'AN', 'ENGLISH', 'YEOMAN', 'HAS', 'ONCE', 'WORN', 'I', 'PRAY', 'YOU', 'TO', 'KEEP', 'IT', 'AS', 'A', 'MEMORIAL', 'OF', 'YOUR', 'GALLANT', 'BEARING'] +8461-281231-0027-1621: ref=['SO', 'SAYING', 'HE', 'MOUNTED', 'HIS', 'STRONG', 'WAR', 'HORSE', 'AND', 'RODE', 'OFF', 'THROUGH', 'THE', 'FOREST'] +8461-281231-0027-1621: hyp=['SO', 'SAYING', 'HE', 'MOUNTED', 'HIS', 'STRONG', 'WAR', 'HORSE', 'AND', 'RODE', 'OFF', 'THROUGH', 'THE', 'FOREST'] +8461-281231-0028-1622: ref=['DURING', 'ALL', 'THIS', 'TIME', 'ISAAC', 'OF', 'YORK', 'SAT', 'MOURNFULLY', 'APART', 'GRIEVING', 'FOR', 'THE', 'LOSS', 'OF', 'HIS', 'DEARLY', 'LOVED', 'DAUGHTER', 'REBECCA'] +8461-281231-0028-1622: hyp=['DURING', 'ALL', 'THIS', 'TIME', 'MISERC', 'OF', 'YORK', 'SAT', 'MOURNFULLY', 'APART', 'GRIEVING', 'FOR', 'THE', 'LOSS', 'OF', 'HIS', 'STILLIE', 'LOVED', 'DAUGHTER', 'REBECCA'] +8461-281231-0029-1623: ref=['AND', 'WITH', 'THIS', 'EPISTLE', 'THE', 'UNHAPPY', 'OLD', 'MAN', 'SET', 'OUT', 'TO', 'PROCURE', 'HIS', "DAUGHTER'S", 'LIBERATION'] +8461-281231-0029-1623: hyp=['AND', 'WITH', 'THIS', 'EPISTLE', 'THEN', 'HAPPY', 'OLD', 'MAN', 'SET', 'OUT', 'TO', 'PROCURE', 'HIS', "DAUGHTER'S", 'LIBERATION'] +8461-281231-0030-1624: ref=['THE', 'TEMPLAR', 'IS', 'FLED', 'SAID', 'DE', 'BRACY', 'IN', 'ANSWER', 'TO', 'THE', "PRINCE'S", 'EAGER', 'QUESTIONS', 'FRONT', 'DE', 'BOEUF', 'YOU', 'WILL', 'NEVER', 'SEE', 'MORE', 'AND', 'HE', 'ADDED', 'IN', 'A', 'LOW', 'AND', 'EMPHATIC', 'TONE', 'RICHARD', 'IS', 'IN', 'ENGLAND', 'I', 'HAVE', 'SEEN', 'HIM', 'AND', 'SPOKEN', 'WITH', 'HIM'] +8461-281231-0030-1624: hyp=['THE', 'TEMPLE', 'IS', 'FLED', 'SAID', 'THE', 'BRACEE', 'IN', 'ANSWER', 'TO', 'THE', "PRINCE'S", 'EAGER', 'QUESTIONS', 'FROM', 'DE', 'BOEUF', 'YOU', 'WILL', 'NEVER', 'SEE', 'MORE', 'AND', 'HE', 'ADDED', 'IN', 'A', 'LOW', 'AND', 'EMPHATIC', 'TONE', 'WRETCHED', 'IS', 'AN', 'ENGLAND', 'I', 'HAVE', 'SEEN', 'HIM', 'AND', 'SPOKEN', 'WITH', 'HIM'] +8461-281231-0031-1625: ref=['HE', 'APPEALED', 'TO', 'DE', 'BRACY', 'TO', 'ASSIST', 'HIM', 'IN', 'THIS', 'PROJECT', 'AND', 'BECAME', 'AT', 'ONCE', 'DEEPLY', 'SUSPICIOUS', 'OF', 'THE', "KNIGHT'S", 'LOYALTY', 'TOWARDS', 'HIM', 'WHEN', 'HE', 'DECLINED', 'TO', 'LIFT', 'HAND', 'AGAINST', 'THE', 'MAN', 'WHO', 'HAD', 'SPARED', 'HIS', 'OWN', 'LIFE'] +8461-281231-0031-1625: hyp=['HE', 'APPEARED', 'TO', 'THE', 'BRACELET', 'TO', 'ASSIST', 'HIM', 'IN', 'THIS', 'PROJECT', 'AND', 'BECAME', 'AT', 'ONCE', 'DEEPLY', 'SUSPICIOUS', 'OF', 'THE', "NIGHT'S", 'LOYALTY', 'TOWARDS', 'HIM', 'WHEN', 'HE', 'DECLINED', 'TO', 'LIFT', 'HAND', 'AGAINST', 'THE', 'MAN', 'WHO', 'HAD', 'SPARED', 'HIS', 'OWN', 'LIFE'] +8461-281231-0032-1626: ref=['BEFORE', 'REACHING', 'HIS', 'DESTINATION', 'HE', 'WAS', 'TOLD', 'THAT', 'LUCAS', 'DE', 'BEAUMANOIR', 'THE', 'GRAND', 'MASTER', 'OF', 'THE', 'ORDER', 'OF', 'THE', 'TEMPLARS', 'WAS', 'THEN', 'ON', 'VISIT', 'TO', 'THE', 'PRECEPTORY'] +8461-281231-0032-1626: hyp=['BEFORE', 'REACHING', 'HIS', 'DESTINATION', 'HE', 'WAS', 'STOLE', 'THAT', 'LUCAS', 'THE', 'BURMANOIS', 'THE', 'GRAND', 'MASTER', 'OF', 'THE', 'ORDER', 'OF', 'THE', 'TEMPLARS', 'WAS', 'THEN', 'ON', 'VISIT', 'TO', 'THE', 'PRECEPTORY'] +8461-281231-0033-1627: ref=['HE', 'HAD', 'NOT', 'UNTIL', 'THEN', 'BEEN', 'INFORMED', 'OF', 'THE', 'PRESENCE', 'OF', 'THE', 'JEWISH', 'MAIDEN', 'IN', 'THE', 'ABODE', 'OF', 'THE', 'TEMPLARS', 'AND', 'GREAT', 'WAS', 'HIS', 'FURY', 'AND', 'INDIGNATION', 'ON', 'LEARNING', 'THAT', 'SHE', 'WAS', 'AMONGST', 'THEM'] +8461-281231-0033-1627: hyp=['HE', 'HAD', 'NOT', 'UNTIL', 'THEN', 'BEEN', 'INFORMED', 'TO', 'THE', 'PRESENCE', 'OF', 'THE', 'JEWISH', 'MAIDEN', 'IN', 'THE', 'ABODE', 'OF', 'THE', 'TEMPLARS', 'AND', 'GREAT', 'WAS', 'HIS', 'FURY', 'AND', 'INDIGNATION', 'ON', 'LEARNING', 'THAT', 'SHE', 'WAS', 'AMONGST', 'THEM'] +8461-281231-0034-1628: ref=['POOR', 'ISAAC', 'WAS', 'HURRIED', 'OFF', 'ACCORDINGLY', 'AND', 'EXPELLED', 'FROM', 'THE', 'PRECEPTORY', 'ALL', 'HIS', 'ENTREATIES', 'AND', 'EVEN', 'HIS', 'OFFERS', 'UNHEARD', 'AND', 'DISREGARDED'] +8461-281231-0034-1628: hyp=['POOR', 'ISAAC', 'WAS', 'HURRIED', 'OFF', 'ACCORDINGLY', 'AND', 'EXPELLED', 'FROM', 'THE', 'PRECEPTORY', 'ALL', 'HIS', 'ENTREATIES', 'AND', 'EVEN', 'HIS', 'OFFERS', 'UNHEARD', 'AND', 'DISREGARDED'] +8461-281231-0035-1629: ref=['THE', 'ASSURANCE', 'THAT', 'SHE', 'POSSESSED', 'SOME', 'FRIEND', 'IN', 'THIS', 'AWFUL', 'ASSEMBLY', 'GAVE', 'HER', 'COURAGE', 'TO', 'LOOK', 'AROUND', 'AND', 'TO', 'MARK', 'INTO', 'WHOSE', 'PRESENCE', 'SHE', 'HAD', 'BEEN', 'CONDUCTED'] +8461-281231-0035-1629: hyp=['THE', 'ASSURANCE', 'THAT', 'SHE', 'POSSESSED', 'SOME', 'FRIEND', 'IN', 'THIS', 'AWFUL', 'ASSEMBLY', 'GAVE', 'HER', 'COURAGE', 'TO', 'LOOK', 'AROUND', 'AND', 'TO', 'MARK', 'INTO', 'WHOSE', 'PRESENCE', 'SHE', 'HAD', 'BEEN', 'CONDUCTED'] +8461-281231-0036-1630: ref=['SHE', 'GAZED', 'ACCORDINGLY', 'UPON', 'A', 'SCENE', 'WHICH', 'MIGHT', 'WELL', 'HAVE', 'STRUCK', 'TERROR', 'INTO', 'A', 'BOLDER', 'HEART', 'THAN', 'HERS'] +8461-281231-0036-1630: hyp=['SHE', 'GAZED', 'ACCORDINGLY', 'UPON', 'A', 'SCENE', 'WHICH', 'MIGHT', 'WELL', 'HAVE', 'STRUCK', 'TERROR', 'INTO', 'A', 'BOLDER', 'HEART', 'THAN', 'HERS'] +8461-281231-0037-1631: ref=['AT', 'HIS', 'FEET', 'WAS', 'PLACED', 'A', 'TABLE', 'OCCUPIED', 'BY', 'TWO', 'SCRIBES', 'WHOSE', 'DUTY', 'IT', 'WAS', 'TO', 'RECORD', 'THE', 'PROCEEDINGS', 'OF', 'THE', 'DAY'] +8461-281231-0037-1631: hyp=['AT', 'HIS', 'FEET', 'WAS', 'PLACED', 'THE', 'TABLE', 'OCCUPIED', 'BY', 'TWO', 'SCRIBES', 'WHOSE', 'DUTY', 'WAS', 'TO', 'RECORD', 'THE', 'PROCEEDINGS', 'OF', 'THE', 'DAY'] +8461-281231-0038-1632: ref=['THE', 'PRECEPTORS', 'OF', 'WHOM', 'THERE', 'WERE', 'FOUR', 'PRESENT', 'OCCUPIED', 'SEATS', 'BEHIND', 'THEIR', 'SUPERIORS', 'AND', 'BEHIND', 'THEM', 'STOOD', 'THE', 'ESQUIRES', 'OF', 'THE', 'ORDER', 'ROBED', 'IN', 'WHITE'] +8461-281231-0038-1632: hyp=['THE', 'PRECEPTORS', 'OF', 'WHOM', 'THERE', 'WERE', 'FOUR', 'PRESENT', 'OCCUPIED', 'SEATS', 'BEHIND', 'THE', 'SUPERIORS', 'AND', 'BEHIND', 'THEM', 'STOOD', 'THE', 'ESQUIRES', 'OF', 'THE', 'ORDER', 'ROBED', 'IN', 'WHITE'] diff --git a/log/greedy_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt b/log/greedy_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..a398478c9c6708973a836de892f3b394b9acc942 --- /dev/null +++ b/log/greedy_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt @@ -0,0 +1,2 @@ +settings WER +greedy_search 3.58 diff --git a/log/greedy_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt b/log/greedy_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..eee8ebc0b2569d2504970e56df9c97ebb10a5546 --- /dev/null +++ b/log/greedy_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-context-2-max-sym-per-frame-1-use-averaged-model.txt @@ -0,0 +1,2 @@ +settings WER +greedy_search 9.29 diff --git a/log/log-train-2023-03-31-18-51-54-0 b/log/log-train-2023-03-31-18-51-54-0 new file mode 100644 index 0000000000000000000000000000000000000000..623391716329b0802dc1151b0478c2bd502b1a9f --- /dev/null +++ b/log/log-train-2023-03-31-18-51-54-0 @@ -0,0 +1,25561 @@ +2023-03-31 18:51:54,767 INFO [train.py:975] (0/4) Training started +2023-03-31 18:51:54,771 INFO [train.py:985] (0/4) Device: cuda:0 +2023-03-31 18:51:54,825 INFO [train.py:994] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r2n03', 'IP address': '10.1.2.3'}, 'world_size': 4, 'master_port': 54321, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 10, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-31 18:51:54,826 INFO [train.py:996] (0/4) About to create model +2023-03-31 18:51:55,663 INFO [zipformer.py:405] (0/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-31 18:51:55,687 INFO [train.py:1000] (0/4) Number of model parameters: 20697573 +2023-03-31 18:52:03,347 INFO [train.py:1019] (0/4) Using DDP +2023-03-31 18:52:03,649 INFO [asr_datamodule.py:429] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts, combined with their reverberated versions +2023-03-31 18:52:03,689 INFO [asr_datamodule.py:224] (0/4) Enable MUSAN +2023-03-31 18:52:03,689 INFO [asr_datamodule.py:225] (0/4) About to get Musan cuts +2023-03-31 18:52:05,928 INFO [asr_datamodule.py:249] (0/4) Enable SpecAugment +2023-03-31 18:52:05,928 INFO [asr_datamodule.py:250] (0/4) Time warp factor: 80 +2023-03-31 18:52:05,928 INFO [asr_datamodule.py:260] (0/4) Num frame mask: 10 +2023-03-31 18:52:05,928 INFO [asr_datamodule.py:273] (0/4) About to create train dataset +2023-03-31 18:52:05,928 INFO [asr_datamodule.py:300] (0/4) Using DynamicBucketingSampler. +2023-03-31 18:52:08,259 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:09,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:09,538 INFO [asr_datamodule.py:315] (0/4) About to create train dataloader +2023-03-31 18:52:09,538 INFO [asr_datamodule.py:440] (0/4) About to get dev-clean cuts +2023-03-31 18:52:09,540 INFO [asr_datamodule.py:447] (0/4) About to get dev-other cuts +2023-03-31 18:52:09,540 INFO [asr_datamodule.py:346] (0/4) About to create dev dataset +2023-03-31 18:52:09,987 INFO [asr_datamodule.py:363] (0/4) About to create dev dataloader +2023-03-31 18:52:23,164 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:24,169 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:36,261 INFO [train.py:903] (0/4) Epoch 1, batch 0, loss[loss=7.146, simple_loss=6.465, pruned_loss=6.793, over 19712.00 frames. ], tot_loss[loss=7.146, simple_loss=6.465, pruned_loss=6.793, over 19712.00 frames. ], batch size: 46, lr: 2.50e-02, grad_scale: 2.0 +2023-03-31 18:52:36,262 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 18:52:49,139 INFO [train.py:937] (0/4) Epoch 1, validation: loss=6.888, simple_loss=6.229, pruned_loss=6.575, over 944034.00 frames. +2023-03-31 18:52:49,140 INFO [train.py:938] (0/4) Maximum memory allocated so far is 11725MB +2023-03-31 18:53:03,044 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 18:53:58,990 INFO [train.py:903] (0/4) Epoch 1, batch 50, loss[loss=1.375, simple_loss=1.22, pruned_loss=1.381, over 17522.00 frames. ], tot_loss[loss=2.154, simple_loss=1.946, pruned_loss=2.001, over 868928.52 frames. ], batch size: 101, lr: 2.75e-02, grad_scale: 0.125 +2023-03-31 18:54:00,549 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 18:54:26,751 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.4230, 4.4169, 4.4217, 4.4202, 4.4120, 4.4227, 4.4136, 4.4169], + device='cuda:0'), covar=tensor([0.0023, 0.0099, 0.0043, 0.0073, 0.0042, 0.0048, 0.0045, 0.0039], + device='cuda:0'), in_proj_covar=tensor([0.0014, 0.0014, 0.0014, 0.0015, 0.0014, 0.0014, 0.0015, 0.0014], + device='cuda:0'), out_proj_covar=tensor([9.4309e-06, 9.5826e-06, 9.4325e-06, 9.2044e-06, 9.7229e-06, 9.3247e-06, + 9.7607e-06, 9.4217e-06], device='cuda:0') +2023-03-31 18:54:36,661 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 18:54:41,866 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 18:54:51,554 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=17.36 vs. limit=2.0 +2023-03-31 18:55:11,351 INFO [train.py:903] (0/4) Epoch 1, batch 100, loss[loss=1.066, simple_loss=0.9158, pruned_loss=1.191, over 19727.00 frames. ], tot_loss[loss=1.62, simple_loss=1.442, pruned_loss=1.606, over 1527941.54 frames. ], batch size: 51, lr: 3.00e-02, grad_scale: 0.25 +2023-03-31 18:55:11,538 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 18:55:17,834 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.479e+01 1.678e+02 3.237e+02 1.260e+03 8.630e+04, threshold=6.475e+02, percent-clipped=0.0 +2023-03-31 18:55:25,890 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 18:55:46,080 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=21.98 vs. limit=2.0 +2023-03-31 18:56:16,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=7.37 vs. limit=2.0 +2023-03-31 18:56:20,069 INFO [train.py:903] (0/4) Epoch 1, batch 150, loss[loss=1.071, simple_loss=0.911, pruned_loss=1.156, over 19727.00 frames. ], tot_loss[loss=1.389, simple_loss=1.22, pruned_loss=1.424, over 2038834.94 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 0.25 +2023-03-31 18:56:21,697 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=3.96 vs. limit=2.0 +2023-03-31 18:56:46,162 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=59.42 vs. limit=5.0 +2023-03-31 18:57:32,417 INFO [train.py:903] (0/4) Epoch 1, batch 200, loss[loss=0.8904, simple_loss=0.7512, pruned_loss=0.9287, over 19058.00 frames. ], tot_loss[loss=1.242, simple_loss=1.081, pruned_loss=1.281, over 2447046.74 frames. ], batch size: 42, lr: 3.50e-02, grad_scale: 0.5 +2023-03-31 18:57:32,453 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 18:57:39,439 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.067e+01 1.186e+02 1.653e+02 2.090e+02 5.158e+02, threshold=3.307e+02, percent-clipped=0.0 +2023-03-31 18:57:50,595 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=10.94 vs. limit=5.0 +2023-03-31 18:58:03,185 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=12.78 vs. limit=5.0 +2023-03-31 18:58:15,856 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=9.66 vs. limit=5.0 +2023-03-31 18:58:43,202 INFO [train.py:903] (0/4) Epoch 1, batch 250, loss[loss=0.9894, simple_loss=0.8377, pruned_loss=0.9593, over 18157.00 frames. ], tot_loss[loss=1.154, simple_loss=0.9975, pruned_loss=1.181, over 2756766.97 frames. ], batch size: 83, lr: 3.75e-02, grad_scale: 0.5 +2023-03-31 18:58:45,588 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.08 vs. limit=2.0 +2023-03-31 18:59:35,280 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9899, 3.9901, 3.9886, 3.9893, 3.9897, 3.9906, 3.9848, 3.9905], + device='cuda:0'), covar=tensor([0.0035, 0.0045, 0.0049, 0.0054, 0.0051, 0.0049, 0.0057, 0.0049], + device='cuda:0'), in_proj_covar=tensor([0.0013, 0.0013, 0.0013, 0.0014, 0.0013, 0.0013, 0.0014, 0.0013], + device='cuda:0'), out_proj_covar=tensor([9.0738e-06, 9.0535e-06, 9.0144e-06, 8.7269e-06, 8.9411e-06, 8.9467e-06, + 8.8238e-06, 8.7904e-06], device='cuda:0') +2023-03-31 18:59:51,886 INFO [train.py:903] (0/4) Epoch 1, batch 300, loss[loss=0.8041, simple_loss=0.6691, pruned_loss=0.7872, over 19816.00 frames. ], tot_loss[loss=1.095, simple_loss=0.9391, pruned_loss=1.108, over 2991484.96 frames. ], batch size: 49, lr: 4.00e-02, grad_scale: 1.0 +2023-03-31 18:59:56,685 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.717e+01 1.166e+02 1.521e+02 1.991e+02 3.277e+02, threshold=3.043e+02, percent-clipped=0.0 +2023-03-31 18:59:58,372 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=306.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:00:09,458 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=314.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:00:58,531 INFO [train.py:903] (0/4) Epoch 1, batch 350, loss[loss=0.9322, simple_loss=0.7719, pruned_loss=0.884, over 19676.00 frames. ], tot_loss[loss=1.057, simple_loss=0.9003, pruned_loss=1.054, over 3165631.78 frames. ], batch size: 53, lr: 4.25e-02, grad_scale: 1.0 +2023-03-31 19:01:05,504 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 19:02:08,281 INFO [train.py:903] (0/4) Epoch 1, batch 400, loss[loss=0.8909, simple_loss=0.7342, pruned_loss=0.8213, over 19745.00 frames. ], tot_loss[loss=1.023, simple_loss=0.8651, pruned_loss=1.004, over 3318503.55 frames. ], batch size: 51, lr: 4.50e-02, grad_scale: 2.0 +2023-03-31 19:02:13,389 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.278e+02 1.546e+02 1.978e+02 5.474e+02, threshold=3.091e+02, percent-clipped=7.0 +2023-03-31 19:02:13,648 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=405.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:02:33,547 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=421.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:03:05,037 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=445.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:03:07,167 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=5.48 vs. limit=5.0 +2023-03-31 19:03:12,886 INFO [train.py:903] (0/4) Epoch 1, batch 450, loss[loss=0.9719, simple_loss=0.8015, pruned_loss=0.8608, over 18138.00 frames. ], tot_loss[loss=0.9995, simple_loss=0.8406, pruned_loss=0.9633, over 3437746.07 frames. ], batch size: 83, lr: 4.75e-02, grad_scale: 2.0 +2023-03-31 19:03:26,470 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.8292, 3.4111, 4.8035, 3.3214, 5.9386, 3.2284, 5.9385, 2.9695], + device='cuda:0'), covar=tensor([0.0049, 0.1632, 0.0288, 0.1983, 0.0055, 0.1800, 0.0054, 0.1742], + device='cuda:0'), in_proj_covar=tensor([0.0016, 0.0018, 0.0016, 0.0019, 0.0015, 0.0018, 0.0016, 0.0018], + device='cuda:0'), out_proj_covar=tensor([1.0884e-05, 1.2206e-05, 1.1148e-05, 1.1953e-05, 1.0952e-05, 1.1976e-05, + 1.0788e-05, 1.3087e-05], device='cuda:0') +2023-03-31 19:03:46,825 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6804, 4.6723, 4.6801, 4.6671, 4.5651, 4.6717, 4.6791, 4.6772], + device='cuda:0'), covar=tensor([0.0088, 0.0064, 0.0084, 0.0071, 0.0091, 0.0083, 0.0096, 0.0082], + device='cuda:0'), in_proj_covar=tensor([0.0014, 0.0013, 0.0014, 0.0013, 0.0014, 0.0013, 0.0014, 0.0014], + device='cuda:0'), out_proj_covar=tensor([9.1936e-06, 8.7371e-06, 9.0572e-06, 8.9951e-06, 9.5862e-06, 9.0783e-06, + 8.8703e-06, 9.3240e-06], device='cuda:0') +2023-03-31 19:03:49,660 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 19:03:51,779 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 19:04:19,402 INFO [train.py:903] (0/4) Epoch 1, batch 500, loss[loss=1.004, simple_loss=0.8314, pruned_loss=0.8508, over 18132.00 frames. ], tot_loss[loss=0.981, simple_loss=0.8219, pruned_loss=0.9235, over 3518521.20 frames. ], batch size: 83, lr: 4.99e-02, grad_scale: 2.0 +2023-03-31 19:04:22,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=4.04 vs. limit=2.0 +2023-03-31 19:04:25,197 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.386e+02 1.860e+02 2.529e+02 4.736e+02, threshold=3.719e+02, percent-clipped=12.0 +2023-03-31 19:05:06,540 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6726, 1.3132, 1.6530, 1.2703, 2.1970, 1.7914, 1.1416, 1.4102], + device='cuda:0'), covar=tensor([0.6733, 0.8468, 0.6514, 0.9552, 0.5075, 0.6402, 0.6547, 0.7141], + device='cuda:0'), in_proj_covar=tensor([0.0041, 0.0043, 0.0040, 0.0044, 0.0035, 0.0046, 0.0041, 0.0040], + device='cuda:0'), out_proj_covar=tensor([2.6551e-05, 2.9653e-05, 2.5329e-05, 2.3989e-05, 2.1307e-05, 2.8475e-05, + 2.4338e-05, 2.2266e-05], device='cuda:0') +2023-03-31 19:05:27,834 INFO [train.py:903] (0/4) Epoch 1, batch 550, loss[loss=0.901, simple_loss=0.7521, pruned_loss=0.7258, over 19530.00 frames. ], tot_loss[loss=0.9627, simple_loss=0.805, pruned_loss=0.8816, over 3589697.23 frames. ], batch size: 54, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:05:40,607 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=560.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:03,851 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=580.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:06:12,412 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=586.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:06:32,552 INFO [train.py:903] (0/4) Epoch 1, batch 600, loss[loss=0.8931, simple_loss=0.7527, pruned_loss=0.6844, over 19674.00 frames. ], tot_loss[loss=0.9393, simple_loss=0.7856, pruned_loss=0.8344, over 3657401.10 frames. ], batch size: 60, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:06:36,898 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 2.910e+02 4.086e+02 6.136e+02 1.097e+03, threshold=8.173e+02, percent-clipped=60.0 +2023-03-31 19:06:40,860 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=608.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:07:11,480 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 19:07:13,974 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.92 vs. limit=2.0 +2023-03-31 19:07:38,076 INFO [train.py:903] (0/4) Epoch 1, batch 650, loss[loss=0.6846, simple_loss=0.581, pruned_loss=0.5043, over 19778.00 frames. ], tot_loss[loss=0.9158, simple_loss=0.7673, pruned_loss=0.7881, over 3686324.51 frames. ], batch size: 47, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:07:47,314 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=658.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:08:11,546 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=677.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:41,839 INFO [train.py:903] (0/4) Epoch 1, batch 700, loss[loss=0.7609, simple_loss=0.6523, pruned_loss=0.5364, over 19533.00 frames. ], tot_loss[loss=0.886, simple_loss=0.7446, pruned_loss=0.7384, over 3721873.14 frames. ], batch size: 54, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:08:42,981 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=702.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:46,604 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.173e+02 6.580e+02 8.914e+02 3.039e+03, threshold=1.316e+03, percent-clipped=29.0 +2023-03-31 19:09:04,075 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.99 vs. limit=2.0 +2023-03-31 19:09:43,554 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=749.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:09:45,317 INFO [train.py:903] (0/4) Epoch 1, batch 750, loss[loss=0.7706, simple_loss=0.6647, pruned_loss=0.5266, over 19657.00 frames. ], tot_loss[loss=0.8585, simple_loss=0.724, pruned_loss=0.6927, over 3741912.36 frames. ], batch size: 60, lr: 4.97e-02, grad_scale: 2.0 +2023-03-31 19:10:09,419 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1803, 0.8552, 1.0174, 1.0734, 1.2735, 1.7623, 0.9972, 1.2472], + device='cuda:0'), covar=tensor([0.9400, 1.4627, 1.5910, 1.2003, 0.5534, 1.1186, 1.2665, 0.9890], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0099, 0.0107, 0.0091, 0.0061, 0.0112, 0.0090, 0.0087], + device='cuda:0'), out_proj_covar=tensor([5.3175e-05, 6.5926e-05, 6.9401e-05, 5.2876e-05, 3.5719e-05, 7.4215e-05, + 5.5713e-05, 5.1770e-05], device='cuda:0') +2023-03-31 19:10:14,495 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=773.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:10:48,969 INFO [train.py:903] (0/4) Epoch 1, batch 800, loss[loss=0.7628, simple_loss=0.6544, pruned_loss=0.52, over 19668.00 frames. ], tot_loss[loss=0.8324, simple_loss=0.7045, pruned_loss=0.6515, over 3768970.45 frames. ], batch size: 55, lr: 4.97e-02, grad_scale: 4.0 +2023-03-31 19:10:53,085 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.827e+02 7.991e+02 1.030e+03 2.888e+03, threshold=1.598e+03, percent-clipped=14.0 +2023-03-31 19:11:01,626 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 19:11:08,320 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=816.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:11:39,603 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=841.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:11:40,561 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-31 19:11:52,174 INFO [train.py:903] (0/4) Epoch 1, batch 850, loss[loss=0.6531, simple_loss=0.564, pruned_loss=0.4328, over 19771.00 frames. ], tot_loss[loss=0.8074, simple_loss=0.6858, pruned_loss=0.614, over 3773175.14 frames. ], batch size: 47, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:10,469 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=864.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:12:11,228 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=865.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:12:15,097 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-31 19:12:43,328 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 19:12:43,565 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5622, 2.0142, 1.3027, 2.9773, 3.7648, 2.0498, 3.2785, 3.4262], + device='cuda:0'), covar=tensor([0.3922, 0.7327, 1.3081, 0.3319, 0.3507, 1.0428, 0.3158, 0.2801], + device='cuda:0'), in_proj_covar=tensor([0.0063, 0.0071, 0.0096, 0.0058, 0.0072, 0.0090, 0.0074, 0.0059], + device='cuda:0'), out_proj_covar=tensor([3.9235e-05, 5.0024e-05, 6.6715e-05, 3.8274e-05, 4.3811e-05, 6.2202e-05, + 4.4756e-05, 4.0852e-05], device='cuda:0') +2023-03-31 19:12:46,097 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5870, 1.2061, 1.7887, 1.3091, 1.8403, 1.7956, 1.6903, 1.6813], + device='cuda:0'), covar=tensor([0.3519, 0.5668, 0.2932, 0.4128, 0.3150, 0.2355, 0.3173, 0.3253], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0064, 0.0057, 0.0063, 0.0061, 0.0052, 0.0061, 0.0054], + device='cuda:0'), out_proj_covar=tensor([3.8451e-05, 4.6669e-05, 3.4982e-05, 4.4182e-05, 3.7468e-05, 3.0544e-05, + 3.8210e-05, 3.4099e-05], device='cuda:0') +2023-03-31 19:12:54,615 INFO [train.py:903] (0/4) Epoch 1, batch 900, loss[loss=0.7565, simple_loss=0.6521, pruned_loss=0.497, over 19308.00 frames. ], tot_loss[loss=0.7855, simple_loss=0.6699, pruned_loss=0.5804, over 3784655.08 frames. ], batch size: 66, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:59,599 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 6.072e+02 7.456e+02 9.579e+02 1.181e+04, threshold=1.491e+03, percent-clipped=3.0 +2023-03-31 19:13:21,564 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=924.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:13:30,344 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=930.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:13:53,706 INFO [train.py:903] (0/4) Epoch 1, batch 950, loss[loss=0.6747, simple_loss=0.5881, pruned_loss=0.4289, over 19495.00 frames. ], tot_loss[loss=0.7674, simple_loss=0.6569, pruned_loss=0.5519, over 3792702.56 frames. ], batch size: 49, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:13:53,741 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 19:13:55,996 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=952.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:14:00,790 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.97 vs. limit=5.0 +2023-03-31 19:14:51,788 INFO [train.py:903] (0/4) Epoch 1, batch 1000, loss[loss=0.673, simple_loss=0.593, pruned_loss=0.415, over 17676.00 frames. ], tot_loss[loss=0.7484, simple_loss=0.643, pruned_loss=0.5251, over 3798892.75 frames. ], batch size: 102, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:14:56,987 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 5.980e+02 7.509e+02 1.052e+03 2.029e+03, threshold=1.502e+03, percent-clipped=4.0 +2023-03-31 19:15:14,919 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2826, 1.4962, 1.3002, 2.1021, 1.6819, 2.3933, 2.5087, 1.4233], + device='cuda:0'), covar=tensor([0.4102, 0.9753, 1.0359, 0.4361, 1.4447, 0.4055, 0.3919, 0.8111], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0057, 0.0071, 0.0044, 0.0081, 0.0044, 0.0046, 0.0051], + device='cuda:0'), out_proj_covar=tensor([2.5678e-05, 4.0111e-05, 5.0702e-05, 2.8625e-05, 6.0590e-05, 2.5738e-05, + 2.7663e-05, 3.5199e-05], device='cuda:0') +2023-03-31 19:15:25,830 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1029.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:15:38,832 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1039.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:15:41,741 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 19:15:45,014 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1045.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:52,666 INFO [train.py:903] (0/4) Epoch 1, batch 1050, loss[loss=0.5841, simple_loss=0.516, pruned_loss=0.3555, over 19366.00 frames. ], tot_loss[loss=0.7296, simple_loss=0.6293, pruned_loss=0.5002, over 3791228.60 frames. ], batch size: 47, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:15:56,716 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1054.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:12,357 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1067.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:20,733 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 19:16:53,093 INFO [train.py:903] (0/4) Epoch 1, batch 1100, loss[loss=0.5984, simple_loss=0.5341, pruned_loss=0.355, over 19766.00 frames. ], tot_loss[loss=0.7141, simple_loss=0.6181, pruned_loss=0.479, over 3787483.63 frames. ], batch size: 51, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:16:57,405 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.036e+02 7.117e+02 8.563e+02 1.068e+03 2.368e+03, threshold=1.713e+03, percent-clipped=4.0 +2023-03-31 19:17:14,920 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1120.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:17:23,436 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-31 19:17:44,289 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1145.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:17:51,776 INFO [train.py:903] (0/4) Epoch 1, batch 1150, loss[loss=0.6175, simple_loss=0.544, pruned_loss=0.3721, over 19708.00 frames. ], tot_loss[loss=0.6988, simple_loss=0.6071, pruned_loss=0.4593, over 3789169.86 frames. ], batch size: 51, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:18:13,009 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1171.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:18:47,616 INFO [train.py:903] (0/4) Epoch 1, batch 1200, loss[loss=0.5968, simple_loss=0.5419, pruned_loss=0.3398, over 19526.00 frames. ], tot_loss[loss=0.6888, simple_loss=0.5999, pruned_loss=0.4448, over 3797569.53 frames. ], batch size: 56, lr: 4.93e-02, grad_scale: 8.0 +2023-03-31 19:18:52,223 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 7.433e+02 9.314e+02 1.239e+03 3.000e+03, threshold=1.863e+03, percent-clipped=16.0 +2023-03-31 19:18:56,108 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1209.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:19:16,396 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 19:19:42,377 INFO [train.py:903] (0/4) Epoch 1, batch 1250, loss[loss=0.6163, simple_loss=0.5506, pruned_loss=0.3589, over 19661.00 frames. ], tot_loss[loss=0.6732, simple_loss=0.5894, pruned_loss=0.4262, over 3811134.84 frames. ], batch size: 53, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:19:55,590 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3130, 1.5912, 1.2793, 2.6692, 3.0474, 1.5798, 2.7197, 2.9856], + device='cuda:0'), covar=tensor([0.1155, 0.5102, 0.9183, 0.2436, 0.1545, 0.8546, 0.1752, 0.1691], + device='cuda:0'), in_proj_covar=tensor([0.0077, 0.0099, 0.0129, 0.0083, 0.0101, 0.0153, 0.0094, 0.0079], + device='cuda:0'), out_proj_covar=tensor([4.4735e-05, 6.8349e-05, 8.9177e-05, 5.7078e-05, 6.1210e-05, 1.0065e-04, + 5.8989e-05, 5.3781e-05], device='cuda:0') +2023-03-31 19:20:32,301 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1295.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:20:39,090 INFO [train.py:903] (0/4) Epoch 1, batch 1300, loss[loss=0.6739, simple_loss=0.585, pruned_loss=0.4072, over 19538.00 frames. ], tot_loss[loss=0.6612, simple_loss=0.5807, pruned_loss=0.4119, over 3818189.75 frames. ], batch size: 56, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:39,463 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1301.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:20:43,725 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+02 7.699e+02 1.048e+03 1.379e+03 4.741e+03, threshold=2.097e+03, percent-clipped=13.0 +2023-03-31 19:20:59,996 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1320.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:03,670 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1323.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:04,874 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1324.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:06,781 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1326.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:33,403 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1348.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:35,927 INFO [train.py:903] (0/4) Epoch 1, batch 1350, loss[loss=0.6695, simple_loss=0.5923, pruned_loss=0.3916, over 19400.00 frames. ], tot_loss[loss=0.6537, simple_loss=0.576, pruned_loss=0.4013, over 3814240.90 frames. ], batch size: 70, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:31,179 INFO [train.py:903] (0/4) Epoch 1, batch 1400, loss[loss=0.6235, simple_loss=0.5669, pruned_loss=0.3493, over 19668.00 frames. ], tot_loss[loss=0.6409, simple_loss=0.5678, pruned_loss=0.3869, over 3829234.77 frames. ], batch size: 58, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:35,192 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.777e+02 7.620e+02 9.515e+02 1.230e+03 4.278e+03, threshold=1.903e+03, percent-clipped=3.0 +2023-03-31 19:23:24,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 19:23:25,213 INFO [train.py:903] (0/4) Epoch 1, batch 1450, loss[loss=0.6027, simple_loss=0.5475, pruned_loss=0.3371, over 18242.00 frames. ], tot_loss[loss=0.6327, simple_loss=0.562, pruned_loss=0.3775, over 3828520.83 frames. ], batch size: 83, lr: 4.90e-02, grad_scale: 8.0 +2023-03-31 19:23:26,441 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1452.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:23:34,536 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.21 vs. limit=2.0 +2023-03-31 19:23:56,780 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.25 vs. limit=2.0 +2023-03-31 19:24:18,018 INFO [train.py:903] (0/4) Epoch 1, batch 1500, loss[loss=0.4681, simple_loss=0.4393, pruned_loss=0.2497, over 19382.00 frames. ], tot_loss[loss=0.6247, simple_loss=0.5569, pruned_loss=0.3682, over 3822547.94 frames. ], batch size: 47, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:24:23,055 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+02 9.104e+02 1.060e+03 1.370e+03 5.981e+03, threshold=2.119e+03, percent-clipped=12.0 +2023-03-31 19:24:35,369 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1515.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:25:14,430 INFO [train.py:903] (0/4) Epoch 1, batch 1550, loss[loss=0.6301, simple_loss=0.5559, pruned_loss=0.3631, over 13548.00 frames. ], tot_loss[loss=0.6153, simple_loss=0.5512, pruned_loss=0.3582, over 3829774.35 frames. ], batch size: 136, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:25:27,687 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-31 19:25:43,546 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1580.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:25:47,315 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1584.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:05,877 INFO [train.py:903] (0/4) Epoch 1, batch 1600, loss[loss=0.5265, simple_loss=0.484, pruned_loss=0.2879, over 19796.00 frames. ], tot_loss[loss=0.6061, simple_loss=0.5451, pruned_loss=0.349, over 3835439.24 frames. ], batch size: 47, lr: 4.88e-02, grad_scale: 8.0 +2023-03-31 19:26:10,815 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.811e+02 9.198e+02 1.152e+03 1.497e+03 2.578e+03, threshold=2.303e+03, percent-clipped=3.0 +2023-03-31 19:26:11,200 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1605.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:25,444 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 19:26:35,458 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1629.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:36,331 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1630.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:57,914 INFO [train.py:903] (0/4) Epoch 1, batch 1650, loss[loss=0.4915, simple_loss=0.462, pruned_loss=0.2612, over 19604.00 frames. ], tot_loss[loss=0.5993, simple_loss=0.5409, pruned_loss=0.3417, over 3830103.64 frames. ], batch size: 50, lr: 4.87e-02, grad_scale: 8.0 +2023-03-31 19:27:52,350 INFO [train.py:903] (0/4) Epoch 1, batch 1700, loss[loss=0.561, simple_loss=0.5255, pruned_loss=0.2992, over 19520.00 frames. ], tot_loss[loss=0.5891, simple_loss=0.5341, pruned_loss=0.3326, over 3829326.22 frames. ], batch size: 56, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:27:56,185 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.719e+02 9.402e+02 1.223e+03 1.535e+03 2.582e+03, threshold=2.447e+03, percent-clipped=3.0 +2023-03-31 19:28:26,602 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 19:28:46,835 INFO [train.py:903] (0/4) Epoch 1, batch 1750, loss[loss=0.5212, simple_loss=0.4845, pruned_loss=0.2801, over 19727.00 frames. ], tot_loss[loss=0.5819, simple_loss=0.5299, pruned_loss=0.3257, over 3823502.02 frames. ], batch size: 51, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:29:37,345 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1796.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:29:43,454 INFO [train.py:903] (0/4) Epoch 1, batch 1800, loss[loss=0.5464, simple_loss=0.5096, pruned_loss=0.2924, over 19779.00 frames. ], tot_loss[loss=0.5735, simple_loss=0.5238, pruned_loss=0.3187, over 3818986.45 frames. ], batch size: 54, lr: 4.85e-02, grad_scale: 8.0 +2023-03-31 19:29:47,613 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.927e+02 9.266e+02 1.209e+03 1.539e+03 2.564e+03, threshold=2.418e+03, percent-clipped=2.0 +2023-03-31 19:30:36,189 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 19:30:40,571 INFO [train.py:903] (0/4) Epoch 1, batch 1850, loss[loss=0.5994, simple_loss=0.552, pruned_loss=0.3247, over 19735.00 frames. ], tot_loss[loss=0.5677, simple_loss=0.5207, pruned_loss=0.3131, over 3810574.60 frames. ], batch size: 63, lr: 4.84e-02, grad_scale: 8.0 +2023-03-31 19:30:45,827 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1856.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:31:06,943 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1875.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:11,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 19:31:20,275 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1886.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:22,125 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1888.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:36,186 INFO [train.py:903] (0/4) Epoch 1, batch 1900, loss[loss=0.4577, simple_loss=0.442, pruned_loss=0.2363, over 19751.00 frames. ], tot_loss[loss=0.5635, simple_loss=0.5186, pruned_loss=0.3088, over 3810490.24 frames. ], batch size: 46, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:31:40,294 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 9.078e+02 1.104e+03 1.499e+03 2.754e+03, threshold=2.207e+03, percent-clipped=2.0 +2023-03-31 19:31:47,404 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1911.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:47,424 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1911.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:52,273 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 19:31:56,268 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 19:32:06,353 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:32:19,691 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 19:32:31,510 INFO [train.py:903] (0/4) Epoch 1, batch 1950, loss[loss=0.5672, simple_loss=0.5261, pruned_loss=0.3044, over 18824.00 frames. ], tot_loss[loss=0.5604, simple_loss=0.5174, pruned_loss=0.3053, over 3818182.08 frames. ], batch size: 74, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:32:57,186 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1973.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:33:27,214 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-2000.pt +2023-03-31 19:33:29,175 INFO [train.py:903] (0/4) Epoch 1, batch 2000, loss[loss=0.597, simple_loss=0.5495, pruned_loss=0.3222, over 19640.00 frames. ], tot_loss[loss=0.5567, simple_loss=0.5154, pruned_loss=0.3018, over 3829921.80 frames. ], batch size: 60, lr: 4.82e-02, grad_scale: 8.0 +2023-03-31 19:33:33,541 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.917e+02 1.007e+03 1.260e+03 1.703e+03 3.255e+03, threshold=2.521e+03, percent-clipped=11.0 +2023-03-31 19:34:18,968 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2043.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:34:25,230 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 19:34:27,591 INFO [train.py:903] (0/4) Epoch 1, batch 2050, loss[loss=0.5722, simple_loss=0.5363, pruned_loss=0.304, over 18240.00 frames. ], tot_loss[loss=0.5458, simple_loss=0.5096, pruned_loss=0.2932, over 3843484.33 frames. ], batch size: 83, lr: 4.81e-02, grad_scale: 16.0 +2023-03-31 19:34:43,694 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 19:34:43,730 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 19:35:06,976 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 19:35:12,669 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2088.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:35:27,372 INFO [train.py:903] (0/4) Epoch 1, batch 2100, loss[loss=0.561, simple_loss=0.5319, pruned_loss=0.295, over 19670.00 frames. ], tot_loss[loss=0.5382, simple_loss=0.5056, pruned_loss=0.2871, over 3841917.07 frames. ], batch size: 60, lr: 4.80e-02, grad_scale: 16.0 +2023-03-31 19:35:31,663 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+02 9.211e+02 1.091e+03 1.524e+03 2.851e+03, threshold=2.182e+03, percent-clipped=6.0 +2023-03-31 19:35:56,703 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 19:36:17,081 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 19:36:24,974 INFO [train.py:903] (0/4) Epoch 1, batch 2150, loss[loss=0.4437, simple_loss=0.4417, pruned_loss=0.2229, over 19748.00 frames. ], tot_loss[loss=0.5305, simple_loss=0.5014, pruned_loss=0.2811, over 3836517.68 frames. ], batch size: 46, lr: 4.79e-02, grad_scale: 16.0 +2023-03-31 19:36:38,452 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 19:36:45,564 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2167.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:37:13,898 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2192.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:37:24,838 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:37:25,812 INFO [train.py:903] (0/4) Epoch 1, batch 2200, loss[loss=0.429, simple_loss=0.4263, pruned_loss=0.2159, over 19762.00 frames. ], tot_loss[loss=0.5242, simple_loss=0.498, pruned_loss=0.2762, over 3828303.93 frames. ], batch size: 45, lr: 4.78e-02, grad_scale: 16.0 +2023-03-31 19:37:31,687 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.347e+02 9.332e+02 1.145e+03 1.435e+03 3.303e+03, threshold=2.290e+03, percent-clipped=7.0 +2023-03-31 19:37:49,456 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2219.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:03,593 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:27,470 INFO [train.py:903] (0/4) Epoch 1, batch 2250, loss[loss=0.4595, simple_loss=0.4455, pruned_loss=0.2367, over 19747.00 frames. ], tot_loss[loss=0.5199, simple_loss=0.4961, pruned_loss=0.2727, over 3842504.62 frames. ], batch size: 46, lr: 4.77e-02, grad_scale: 16.0 +2023-03-31 19:39:24,157 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2299.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:39:25,909 INFO [train.py:903] (0/4) Epoch 1, batch 2300, loss[loss=0.3842, simple_loss=0.3978, pruned_loss=0.1853, over 19706.00 frames. ], tot_loss[loss=0.5143, simple_loss=0.4927, pruned_loss=0.2686, over 3830064.83 frames. ], batch size: 45, lr: 4.77e-02, grad_scale: 8.0 +2023-03-31 19:39:31,324 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+02 9.458e+02 1.205e+03 1.557e+03 3.326e+03, threshold=2.410e+03, percent-clipped=10.0 +2023-03-31 19:39:39,116 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 19:39:41,661 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2315.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:39:52,808 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6341, 1.2358, 1.3032, 1.4180, 1.5748, 2.0309, 1.6730, 1.7201], + device='cuda:0'), covar=tensor([0.1632, 0.2489, 0.3255, 0.2157, 0.4948, 0.1066, 0.2747, 0.1710], + device='cuda:0'), in_proj_covar=tensor([0.0149, 0.0165, 0.0209, 0.0161, 0.0256, 0.0159, 0.0188, 0.0169], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-31 19:39:53,803 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8115, 3.3905, 4.0015, 3.9181, 1.6348, 3.8286, 3.4032, 4.0125], + device='cuda:0'), covar=tensor([0.0232, 0.0484, 0.0243, 0.0148, 0.2734, 0.0215, 0.0411, 0.0271], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0117, 0.0108, 0.0082, 0.0210, 0.0082, 0.0115, 0.0105], + device='cuda:0'), out_proj_covar=tensor([5.3152e-05, 8.0129e-05, 6.1727e-05, 4.8572e-05, 1.2797e-04, 4.8900e-05, + 7.2126e-05, 6.2226e-05], device='cuda:0') +2023-03-31 19:39:53,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2324.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:04,946 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2334.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:17,232 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2344.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:40:20,632 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2347.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:24,828 INFO [train.py:903] (0/4) Epoch 1, batch 2350, loss[loss=0.4817, simple_loss=0.4831, pruned_loss=0.2402, over 19668.00 frames. ], tot_loss[loss=0.5085, simple_loss=0.4891, pruned_loss=0.2645, over 3826569.52 frames. ], batch size: 55, lr: 4.76e-02, grad_scale: 8.0 +2023-03-31 19:40:33,247 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2358.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:40:48,353 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2369.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:55,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.08 vs. limit=2.0 +2023-03-31 19:41:07,244 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 19:41:23,365 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 19:41:26,485 INFO [train.py:903] (0/4) Epoch 1, batch 2400, loss[loss=0.3963, simple_loss=0.4059, pruned_loss=0.1933, over 19712.00 frames. ], tot_loss[loss=0.5031, simple_loss=0.4858, pruned_loss=0.2606, over 3811040.67 frames. ], batch size: 46, lr: 4.75e-02, grad_scale: 8.0 +2023-03-31 19:41:33,163 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+02 9.458e+02 1.226e+03 1.613e+03 2.603e+03, threshold=2.451e+03, percent-clipped=4.0 +2023-03-31 19:42:26,157 INFO [train.py:903] (0/4) Epoch 1, batch 2450, loss[loss=0.4956, simple_loss=0.4921, pruned_loss=0.2495, over 19692.00 frames. ], tot_loss[loss=0.4989, simple_loss=0.4839, pruned_loss=0.2573, over 3808027.52 frames. ], batch size: 59, lr: 4.74e-02, grad_scale: 8.0 +2023-03-31 19:42:30,970 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6255, 1.7265, 1.7256, 0.9989, 2.2592, 2.9343, 2.6946, 2.6101], + device='cuda:0'), covar=tensor([0.2409, 0.2267, 0.1689, 0.3475, 0.0945, 0.0192, 0.0331, 0.0429], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0162, 0.0139, 0.0197, 0.0135, 0.0078, 0.0101, 0.0086], + device='cuda:0'), out_proj_covar=tensor([1.2660e-04, 1.0630e-04, 8.9364e-05, 1.2763e-04, 9.4105e-05, 4.3811e-05, + 5.8731e-05, 5.0579e-05], device='cuda:0') +2023-03-31 19:43:20,049 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0522, 1.1387, 1.5920, 1.3550, 2.0389, 2.7786, 2.3732, 1.6335], + device='cuda:0'), covar=tensor([0.3209, 0.2079, 0.2486, 0.3159, 0.1235, 0.0370, 0.0804, 0.2015], + device='cuda:0'), in_proj_covar=tensor([0.0136, 0.0108, 0.0126, 0.0150, 0.0139, 0.0066, 0.0115, 0.0140], + device='cuda:0'), out_proj_covar=tensor([9.2116e-05, 7.1223e-05, 8.7536e-05, 1.0373e-04, 9.4163e-05, 4.0542e-05, + 7.7590e-05, 9.2573e-05], device='cuda:0') +2023-03-31 19:43:24,722 INFO [train.py:903] (0/4) Epoch 1, batch 2500, loss[loss=0.5268, simple_loss=0.4844, pruned_loss=0.2846, over 19405.00 frames. ], tot_loss[loss=0.495, simple_loss=0.4814, pruned_loss=0.2546, over 3804397.54 frames. ], batch size: 48, lr: 4.73e-02, grad_scale: 8.0 +2023-03-31 19:43:30,995 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.049e+02 1.082e+03 1.390e+03 1.742e+03 4.873e+03, threshold=2.779e+03, percent-clipped=5.0 +2023-03-31 19:43:41,913 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6601, 3.2049, 1.9137, 2.8510, 1.4318, 3.3909, 2.9559, 3.0899], + device='cuda:0'), covar=tensor([0.0665, 0.1270, 0.2628, 0.0679, 0.2884, 0.0480, 0.0743, 0.0819], + device='cuda:0'), in_proj_covar=tensor([0.0167, 0.0200, 0.0208, 0.0139, 0.0220, 0.0129, 0.0133, 0.0128], + device='cuda:0'), out_proj_covar=tensor([1.3689e-04, 1.5780e-04, 1.3921e-04, 1.0297e-04, 1.5396e-04, 9.3544e-05, + 9.5972e-05, 9.3732e-05], device='cuda:0') +2023-03-31 19:44:22,068 INFO [train.py:903] (0/4) Epoch 1, batch 2550, loss[loss=0.5195, simple_loss=0.5046, pruned_loss=0.2672, over 19550.00 frames. ], tot_loss[loss=0.4949, simple_loss=0.4817, pruned_loss=0.2543, over 3799435.64 frames. ], batch size: 56, lr: 4.72e-02, grad_scale: 8.0 +2023-03-31 19:44:47,124 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2571.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:09,013 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2590.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:14,138 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 19:45:15,592 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2596.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:21,665 INFO [train.py:903] (0/4) Epoch 1, batch 2600, loss[loss=0.4601, simple_loss=0.4629, pruned_loss=0.2287, over 19540.00 frames. ], tot_loss[loss=0.4888, simple_loss=0.4784, pruned_loss=0.2497, over 3812652.54 frames. ], batch size: 56, lr: 4.71e-02, grad_scale: 8.0 +2023-03-31 19:45:24,457 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2603.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:28,255 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+02 9.154e+02 1.259e+03 1.710e+03 2.682e+03, threshold=2.519e+03, percent-clipped=0.0 +2023-03-31 19:45:39,460 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2615.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:45,370 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-31 19:45:49,965 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.37 vs. limit=5.0 +2023-03-31 19:45:55,203 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2628.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:46:22,931 INFO [train.py:903] (0/4) Epoch 1, batch 2650, loss[loss=0.4675, simple_loss=0.477, pruned_loss=0.2289, over 19677.00 frames. ], tot_loss[loss=0.4836, simple_loss=0.4754, pruned_loss=0.246, over 3811747.55 frames. ], batch size: 60, lr: 4.70e-02, grad_scale: 8.0 +2023-03-31 19:46:39,346 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 19:47:23,190 INFO [train.py:903] (0/4) Epoch 1, batch 2700, loss[loss=0.3989, simple_loss=0.4141, pruned_loss=0.1919, over 19767.00 frames. ], tot_loss[loss=0.48, simple_loss=0.4734, pruned_loss=0.2433, over 3826453.55 frames. ], batch size: 48, lr: 4.69e-02, grad_scale: 8.0 +2023-03-31 19:47:24,580 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2702.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:47:25,677 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2703.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:47:29,292 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-31 19:47:29,729 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+02 8.490e+02 1.133e+03 1.436e+03 3.154e+03, threshold=2.267e+03, percent-clipped=3.0 +2023-03-31 19:47:49,723 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.91 vs. limit=2.0 +2023-03-31 19:48:24,813 INFO [train.py:903] (0/4) Epoch 1, batch 2750, loss[loss=0.4561, simple_loss=0.4627, pruned_loss=0.2248, over 19536.00 frames. ], tot_loss[loss=0.4762, simple_loss=0.4712, pruned_loss=0.2406, over 3817492.91 frames. ], batch size: 56, lr: 4.68e-02, grad_scale: 8.0 +2023-03-31 19:49:25,712 INFO [train.py:903] (0/4) Epoch 1, batch 2800, loss[loss=0.4176, simple_loss=0.4175, pruned_loss=0.2089, over 19077.00 frames. ], tot_loss[loss=0.4739, simple_loss=0.4701, pruned_loss=0.2389, over 3817851.16 frames. ], batch size: 42, lr: 4.67e-02, grad_scale: 8.0 +2023-03-31 19:49:31,054 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 1.002e+03 1.265e+03 1.511e+03 4.462e+03, threshold=2.529e+03, percent-clipped=7.0 +2023-03-31 19:49:44,664 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9834, 1.4751, 1.4038, 1.6058, 1.8916, 2.0391, 2.1340, 1.8861], + device='cuda:0'), covar=tensor([0.1329, 0.2093, 0.2483, 0.2545, 0.3126, 0.2307, 0.3219, 0.1657], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0244, 0.0255, 0.0251, 0.0341, 0.0242, 0.0306, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 19:49:45,688 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2817.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:50:06,590 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7005, 1.3421, 1.7658, 0.9726, 2.8271, 2.5730, 2.5508, 2.3098], + device='cuda:0'), covar=tensor([0.1926, 0.2493, 0.1559, 0.3074, 0.0489, 0.0230, 0.0312, 0.0396], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0195, 0.0177, 0.0240, 0.0168, 0.0095, 0.0118, 0.0101], + device='cuda:0'), out_proj_covar=tensor([1.5737e-04, 1.3078e-04, 1.1887e-04, 1.5954e-04, 1.2695e-04, 5.8135e-05, + 7.5061e-05, 6.6786e-05], device='cuda:0') +2023-03-31 19:50:15,441 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2842.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:50:26,221 INFO [train.py:903] (0/4) Epoch 1, batch 2850, loss[loss=0.445, simple_loss=0.4562, pruned_loss=0.2169, over 19367.00 frames. ], tot_loss[loss=0.4721, simple_loss=0.4688, pruned_loss=0.2377, over 3810427.28 frames. ], batch size: 70, lr: 4.66e-02, grad_scale: 8.0 +2023-03-31 19:51:16,291 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.02 vs. limit=2.0 +2023-03-31 19:51:22,260 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 19:51:25,259 INFO [train.py:903] (0/4) Epoch 1, batch 2900, loss[loss=0.506, simple_loss=0.4885, pruned_loss=0.2617, over 18913.00 frames. ], tot_loss[loss=0.4699, simple_loss=0.4671, pruned_loss=0.2363, over 3825749.22 frames. ], batch size: 74, lr: 4.65e-02, grad_scale: 8.0 +2023-03-31 19:51:30,481 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.237e+02 1.045e+03 1.349e+03 1.754e+03 3.463e+03, threshold=2.699e+03, percent-clipped=4.0 +2023-03-31 19:52:25,011 INFO [train.py:903] (0/4) Epoch 1, batch 2950, loss[loss=0.4398, simple_loss=0.4507, pruned_loss=0.2144, over 19651.00 frames. ], tot_loss[loss=0.4651, simple_loss=0.464, pruned_loss=0.2331, over 3818577.77 frames. ], batch size: 55, lr: 4.64e-02, grad_scale: 8.0 +2023-03-31 19:52:30,817 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2955.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:53:26,138 INFO [train.py:903] (0/4) Epoch 1, batch 3000, loss[loss=0.4467, simple_loss=0.4632, pruned_loss=0.2151, over 18729.00 frames. ], tot_loss[loss=0.4631, simple_loss=0.4633, pruned_loss=0.2315, over 3825880.41 frames. ], batch size: 74, lr: 4.63e-02, grad_scale: 8.0 +2023-03-31 19:53:26,139 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 19:53:38,708 INFO [train.py:937] (0/4) Epoch 1, validation: loss=0.3995, simple_loss=0.4801, pruned_loss=0.1594, over 944034.00 frames. +2023-03-31 19:53:38,709 INFO [train.py:938] (0/4) Maximum memory allocated so far is 15735MB +2023-03-31 19:53:43,194 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 19:53:45,670 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 9.060e+02 1.151e+03 1.550e+03 2.691e+03, threshold=2.303e+03, percent-clipped=0.0 +2023-03-31 19:54:23,238 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3037.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:35,488 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3047.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:40,047 INFO [train.py:903] (0/4) Epoch 1, batch 3050, loss[loss=0.4571, simple_loss=0.4725, pruned_loss=0.2208, over 19530.00 frames. ], tot_loss[loss=0.4611, simple_loss=0.462, pruned_loss=0.2301, over 3831683.75 frames. ], batch size: 56, lr: 4.62e-02, grad_scale: 8.0 +2023-03-31 19:54:50,176 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3353, 0.9244, 1.0992, 1.2790, 1.1405, 1.7529, 1.4913, 1.4117], + device='cuda:0'), covar=tensor([0.1308, 0.2754, 0.2399, 0.1703, 0.2965, 0.1098, 0.1807, 0.1537], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0156, 0.0167, 0.0139, 0.0199, 0.0128, 0.0141, 0.0129], + device='cuda:0'), out_proj_covar=tensor([8.4845e-05, 1.1301e-04, 1.1499e-04, 1.0105e-04, 1.4184e-04, 9.1249e-05, + 9.8527e-05, 9.1916e-05], device='cuda:0') +2023-03-31 19:54:52,332 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-31 19:55:07,342 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3073.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:55:36,288 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3098.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:55:39,212 INFO [train.py:903] (0/4) Epoch 1, batch 3100, loss[loss=0.4429, simple_loss=0.4367, pruned_loss=0.2245, over 19691.00 frames. ], tot_loss[loss=0.4623, simple_loss=0.4622, pruned_loss=0.2312, over 3827373.90 frames. ], batch size: 53, lr: 4.61e-02, grad_scale: 8.0 +2023-03-31 19:55:45,833 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+02 1.021e+03 1.362e+03 1.815e+03 5.785e+03, threshold=2.723e+03, percent-clipped=14.0 +2023-03-31 19:56:17,182 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6042, 1.9688, 2.3053, 1.7458, 1.8235, 0.9392, 0.9093, 1.4751], + device='cuda:0'), covar=tensor([0.1669, 0.0710, 0.0383, 0.1112, 0.1382, 0.1474, 0.2307, 0.1462], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0128, 0.0125, 0.0156, 0.0125, 0.0173, 0.0196, 0.0177], + device='cuda:0'), out_proj_covar=tensor([1.5921e-04, 9.9970e-05, 9.8031e-05, 1.2227e-04, 1.0202e-04, 1.3412e-04, + 1.4646e-04, 1.3607e-04], device='cuda:0') +2023-03-31 19:56:21,061 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8101, 1.1784, 1.6889, 1.1726, 2.5839, 3.3961, 2.9821, 3.0194], + device='cuda:0'), covar=tensor([0.2483, 0.3235, 0.2193, 0.3496, 0.0876, 0.0163, 0.0336, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0212, 0.0199, 0.0256, 0.0179, 0.0101, 0.0127, 0.0103], + device='cuda:0'), out_proj_covar=tensor([1.7110e-04, 1.4508e-04, 1.3720e-04, 1.7537e-04, 1.4149e-04, 6.5771e-05, + 8.7744e-05, 7.2562e-05], device='cuda:0') +2023-03-31 19:56:41,856 INFO [train.py:903] (0/4) Epoch 1, batch 3150, loss[loss=0.4726, simple_loss=0.4749, pruned_loss=0.2352, over 19639.00 frames. ], tot_loss[loss=0.4586, simple_loss=0.4599, pruned_loss=0.2287, over 3829200.19 frames. ], batch size: 60, lr: 4.60e-02, grad_scale: 8.0 +2023-03-31 19:56:54,520 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3162.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:08,989 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 19:57:24,072 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3186.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:30,846 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-03-31 19:57:40,829 INFO [train.py:903] (0/4) Epoch 1, batch 3200, loss[loss=0.3935, simple_loss=0.4167, pruned_loss=0.1851, over 19500.00 frames. ], tot_loss[loss=0.4558, simple_loss=0.4585, pruned_loss=0.2265, over 3835957.49 frames. ], batch size: 49, lr: 4.59e-02, grad_scale: 8.0 +2023-03-31 19:57:46,448 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+02 9.158e+02 1.127e+03 1.418e+03 2.574e+03, threshold=2.253e+03, percent-clipped=0.0 +2023-03-31 19:58:41,775 INFO [train.py:903] (0/4) Epoch 1, batch 3250, loss[loss=0.4397, simple_loss=0.4546, pruned_loss=0.2124, over 18766.00 frames. ], tot_loss[loss=0.4536, simple_loss=0.4571, pruned_loss=0.225, over 3830299.68 frames. ], batch size: 74, lr: 4.58e-02, grad_scale: 8.0 +2023-03-31 19:59:40,386 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3299.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:42,435 INFO [train.py:903] (0/4) Epoch 1, batch 3300, loss[loss=0.4811, simple_loss=0.487, pruned_loss=0.2376, over 19329.00 frames. ], tot_loss[loss=0.4496, simple_loss=0.4548, pruned_loss=0.2223, over 3810273.52 frames. ], batch size: 66, lr: 4.57e-02, grad_scale: 8.0 +2023-03-31 19:59:42,803 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3301.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:48,777 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+02 9.991e+02 1.183e+03 1.562e+03 4.237e+03, threshold=2.366e+03, percent-clipped=7.0 +2023-03-31 19:59:48,826 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 19:59:49,156 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3306.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:59:54,697 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2965, 1.0628, 1.0108, 1.2678, 1.2097, 1.3980, 1.2427, 1.3417], + device='cuda:0'), covar=tensor([0.1132, 0.1895, 0.1920, 0.1551, 0.2102, 0.1668, 0.2166, 0.1278], + device='cuda:0'), in_proj_covar=tensor([0.0204, 0.0254, 0.0256, 0.0271, 0.0361, 0.0245, 0.0319, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 20:00:43,740 INFO [train.py:903] (0/4) Epoch 1, batch 3350, loss[loss=0.3835, simple_loss=0.4044, pruned_loss=0.1814, over 18987.00 frames. ], tot_loss[loss=0.448, simple_loss=0.4538, pruned_loss=0.2211, over 3809269.82 frames. ], batch size: 42, lr: 4.56e-02, grad_scale: 8.0 +2023-03-31 20:00:49,625 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0428, 4.2892, 5.6605, 5.3730, 1.8778, 4.9086, 4.5654, 5.1230], + device='cuda:0'), covar=tensor([0.0169, 0.0393, 0.0222, 0.0134, 0.3079, 0.0324, 0.0410, 0.0431], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0176, 0.0199, 0.0132, 0.0307, 0.0116, 0.0167, 0.0182], + device='cuda:0'), out_proj_covar=tensor([9.3834e-05, 1.2303e-04, 1.2869e-04, 8.2271e-05, 1.7637e-04, 7.6271e-05, + 1.1318e-04, 1.1617e-04], device='cuda:0') +2023-03-31 20:01:22,256 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3381.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:01:46,107 INFO [train.py:903] (0/4) Epoch 1, batch 3400, loss[loss=0.4229, simple_loss=0.4161, pruned_loss=0.2149, over 19773.00 frames. ], tot_loss[loss=0.4471, simple_loss=0.4534, pruned_loss=0.2204, over 3816417.29 frames. ], batch size: 45, lr: 4.55e-02, grad_scale: 8.0 +2023-03-31 20:01:46,920 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-31 20:01:48,080 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-03-31 20:01:52,903 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.525e+02 9.967e+02 1.253e+03 1.611e+03 4.007e+03, threshold=2.507e+03, percent-clipped=3.0 +2023-03-31 20:02:02,054 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3414.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:06,791 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3418.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:02:38,902 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3443.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:48,480 INFO [train.py:903] (0/4) Epoch 1, batch 3450, loss[loss=0.4488, simple_loss=0.4712, pruned_loss=0.2132, over 19658.00 frames. ], tot_loss[loss=0.4484, simple_loss=0.4545, pruned_loss=0.2211, over 3812897.10 frames. ], batch size: 58, lr: 4.54e-02, grad_scale: 8.0 +2023-03-31 20:02:50,748 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 20:03:43,982 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:03:50,182 INFO [train.py:903] (0/4) Epoch 1, batch 3500, loss[loss=0.4489, simple_loss=0.4651, pruned_loss=0.2164, over 19662.00 frames. ], tot_loss[loss=0.4509, simple_loss=0.456, pruned_loss=0.2229, over 3803575.46 frames. ], batch size: 60, lr: 4.53e-02, grad_scale: 8.0 +2023-03-31 20:03:56,677 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+02 9.703e+02 1.213e+03 1.703e+03 9.610e+03, threshold=2.427e+03, percent-clipped=9.0 +2023-03-31 20:04:48,394 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-31 20:04:52,060 INFO [train.py:903] (0/4) Epoch 1, batch 3550, loss[loss=0.5704, simple_loss=0.5257, pruned_loss=0.3076, over 13170.00 frames. ], tot_loss[loss=0.4486, simple_loss=0.4547, pruned_loss=0.2213, over 3793975.90 frames. ], batch size: 137, lr: 4.51e-02, grad_scale: 8.0 +2023-03-31 20:05:00,240 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3557.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:05:07,862 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3564.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:05:16,035 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.46 vs. limit=5.0 +2023-03-31 20:05:30,977 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3582.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:05:53,929 INFO [train.py:903] (0/4) Epoch 1, batch 3600, loss[loss=0.4809, simple_loss=0.4817, pruned_loss=0.24, over 19364.00 frames. ], tot_loss[loss=0.4588, simple_loss=0.4609, pruned_loss=0.2284, over 3806675.61 frames. ], batch size: 70, lr: 4.50e-02, grad_scale: 8.0 +2023-03-31 20:06:00,961 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+02 9.459e+02 1.417e+03 1.964e+03 2.103e+04, threshold=2.834e+03, percent-clipped=17.0 +2023-03-31 20:06:39,631 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7945, 1.8325, 1.6856, 1.1837, 1.0480, 1.5303, 0.3630, 1.0178], + device='cuda:0'), covar=tensor([0.0904, 0.0401, 0.0555, 0.1044, 0.1163, 0.1163, 0.2374, 0.1593], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0100, 0.0115, 0.0136, 0.0145, 0.0141, 0.0168, 0.0158], + device='cuda:0'), out_proj_covar=tensor([8.7750e-05, 7.6651e-05, 8.8901e-05, 1.0320e-04, 1.1117e-04, 1.0574e-04, + 1.2572e-04, 1.2421e-04], device='cuda:0') +2023-03-31 20:06:55,026 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3650.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:06:55,794 INFO [train.py:903] (0/4) Epoch 1, batch 3650, loss[loss=0.5615, simple_loss=0.5245, pruned_loss=0.2993, over 19669.00 frames. ], tot_loss[loss=0.4577, simple_loss=0.4608, pruned_loss=0.2272, over 3807279.50 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 8.0 +2023-03-31 20:07:18,704 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3670.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:40,586 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-31 20:07:49,495 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3694.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:07:50,816 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3695.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:56,860 INFO [train.py:903] (0/4) Epoch 1, batch 3700, loss[loss=0.4792, simple_loss=0.4771, pruned_loss=0.2407, over 19313.00 frames. ], tot_loss[loss=0.4609, simple_loss=0.4626, pruned_loss=0.2296, over 3815459.11 frames. ], batch size: 66, lr: 4.48e-02, grad_scale: 8.0 +2023-03-31 20:08:05,842 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.255e+02 1.022e+03 1.666e+03 2.666e+03 1.441e+04, threshold=3.331e+03, percent-clipped=22.0 +2023-03-31 20:09:01,162 INFO [train.py:903] (0/4) Epoch 1, batch 3750, loss[loss=0.4905, simple_loss=0.5051, pruned_loss=0.2379, over 19046.00 frames. ], tot_loss[loss=0.4577, simple_loss=0.4609, pruned_loss=0.2273, over 3817826.15 frames. ], batch size: 69, lr: 4.47e-02, grad_scale: 8.0 +2023-03-31 20:09:02,741 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3752.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:09:19,346 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3765.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:09:34,317 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3777.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:09:53,230 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8234, 1.9141, 1.3393, 2.1012, 1.6666, 0.8033, 1.0295, 1.3723], + device='cuda:0'), covar=tensor([0.1035, 0.0841, 0.1189, 0.0847, 0.1220, 0.1387, 0.2485, 0.1627], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0091, 0.0127, 0.0137, 0.0142, 0.0081, 0.0135, 0.0122], + device='cuda:0'), out_proj_covar=tensor([7.4978e-05, 6.7821e-05, 8.5290e-05, 9.6979e-05, 9.6875e-05, 5.1451e-05, + 1.0229e-04, 8.7621e-05], device='cuda:0') +2023-03-31 20:09:57,039 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-31 20:10:05,361 INFO [train.py:903] (0/4) Epoch 1, batch 3800, loss[loss=0.4145, simple_loss=0.4436, pruned_loss=0.1927, over 19651.00 frames. ], tot_loss[loss=0.4532, simple_loss=0.4583, pruned_loss=0.2241, over 3829235.14 frames. ], batch size: 58, lr: 4.46e-02, grad_scale: 8.0 +2023-03-31 20:10:12,575 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.844e+02 1.035e+03 1.394e+03 1.973e+03 4.112e+03, threshold=2.788e+03, percent-clipped=1.0 +2023-03-31 20:10:15,317 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2520, 1.0461, 1.1187, 1.3175, 1.2756, 1.3372, 1.2687, 1.2914], + device='cuda:0'), covar=tensor([0.1123, 0.1880, 0.1760, 0.1544, 0.2111, 0.1820, 0.2026, 0.1329], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0274, 0.0276, 0.0294, 0.0378, 0.0266, 0.0330, 0.0244], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 20:10:41,863 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 20:11:08,747 INFO [train.py:903] (0/4) Epoch 1, batch 3850, loss[loss=0.4337, simple_loss=0.4544, pruned_loss=0.2065, over 19534.00 frames. ], tot_loss[loss=0.4476, simple_loss=0.4544, pruned_loss=0.2204, over 3843992.31 frames. ], batch size: 54, lr: 4.45e-02, grad_scale: 8.0 +2023-03-31 20:11:15,011 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9493, 1.6075, 1.2469, 1.3864, 1.0445, 1.2175, 0.6264, 1.0840], + device='cuda:0'), covar=tensor([0.0786, 0.0641, 0.0610, 0.0882, 0.1408, 0.1246, 0.2475, 0.1556], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0113, 0.0122, 0.0146, 0.0160, 0.0153, 0.0180, 0.0173], + device='cuda:0'), out_proj_covar=tensor([9.5432e-05, 8.7071e-05, 9.6328e-05, 1.1030e-04, 1.2398e-04, 1.1514e-04, + 1.3441e-04, 1.3575e-04], device='cuda:0') +2023-03-31 20:12:13,123 INFO [train.py:903] (0/4) Epoch 1, batch 3900, loss[loss=0.4476, simple_loss=0.4553, pruned_loss=0.22, over 19510.00 frames. ], tot_loss[loss=0.445, simple_loss=0.4525, pruned_loss=0.2187, over 3845526.11 frames. ], batch size: 64, lr: 4.44e-02, grad_scale: 8.0 +2023-03-31 20:12:22,004 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.917e+02 1.152e+03 1.441e+03 1.935e+03 3.736e+03, threshold=2.883e+03, percent-clipped=2.0 +2023-03-31 20:12:23,355 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3908.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:13:07,839 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:13:18,194 INFO [train.py:903] (0/4) Epoch 1, batch 3950, loss[loss=0.3641, simple_loss=0.3992, pruned_loss=0.1645, over 19629.00 frames. ], tot_loss[loss=0.4471, simple_loss=0.4535, pruned_loss=0.2204, over 3845411.23 frames. ], batch size: 50, lr: 4.43e-02, grad_scale: 8.0 +2023-03-31 20:13:23,976 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 20:14:21,835 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-4000.pt +2023-03-31 20:14:23,994 INFO [train.py:903] (0/4) Epoch 1, batch 4000, loss[loss=0.457, simple_loss=0.4578, pruned_loss=0.2281, over 13288.00 frames. ], tot_loss[loss=0.4425, simple_loss=0.4505, pruned_loss=0.2173, over 3830160.22 frames. ], batch size: 136, lr: 4.42e-02, grad_scale: 8.0 +2023-03-31 20:14:30,937 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.942e+02 1.053e+03 1.358e+03 1.948e+03 9.883e+03, threshold=2.717e+03, percent-clipped=12.0 +2023-03-31 20:14:49,301 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4021.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:14:51,511 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4023.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:15:11,863 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4038.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:15:12,778 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 20:15:22,641 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4046.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:15:28,266 INFO [train.py:903] (0/4) Epoch 1, batch 4050, loss[loss=0.3981, simple_loss=0.4169, pruned_loss=0.1896, over 19535.00 frames. ], tot_loss[loss=0.4397, simple_loss=0.4481, pruned_loss=0.2156, over 3811991.79 frames. ], batch size: 54, lr: 4.41e-02, grad_scale: 8.0 +2023-03-31 20:16:32,898 INFO [train.py:903] (0/4) Epoch 1, batch 4100, loss[loss=0.4153, simple_loss=0.4401, pruned_loss=0.1952, over 19724.00 frames. ], tot_loss[loss=0.4398, simple_loss=0.4485, pruned_loss=0.2156, over 3806729.48 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 8.0 +2023-03-31 20:16:41,803 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.649e+02 1.198e+03 1.458e+03 1.833e+03 3.490e+03, threshold=2.915e+03, percent-clipped=3.0 +2023-03-31 20:17:08,499 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 20:17:38,861 INFO [train.py:903] (0/4) Epoch 1, batch 4150, loss[loss=0.3858, simple_loss=0.4108, pruned_loss=0.1804, over 19871.00 frames. ], tot_loss[loss=0.4384, simple_loss=0.4476, pruned_loss=0.2146, over 3793583.29 frames. ], batch size: 52, lr: 4.39e-02, grad_scale: 8.0 +2023-03-31 20:17:41,670 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4153.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:17:49,786 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4159.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:17:58,237 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6162, 1.5301, 1.6696, 2.5717, 3.3161, 1.3906, 2.1397, 3.3681], + device='cuda:0'), covar=tensor([0.0351, 0.2607, 0.2908, 0.1589, 0.0353, 0.2841, 0.1245, 0.0366], + device='cuda:0'), in_proj_covar=tensor([0.0165, 0.0263, 0.0254, 0.0264, 0.0190, 0.0321, 0.0226, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:18:06,904 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0891, 1.9513, 1.5685, 2.2938, 1.7963, 0.5663, 1.1694, 1.5231], + device='cuda:0'), covar=tensor([0.1112, 0.0988, 0.1041, 0.0960, 0.1420, 0.1938, 0.2671, 0.1530], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0078, 0.0107, 0.0114, 0.0119, 0.0080, 0.0122, 0.0104], + device='cuda:0'), out_proj_covar=tensor([6.3573e-05, 5.3425e-05, 6.8721e-05, 7.5055e-05, 7.5545e-05, 4.9392e-05, + 9.0291e-05, 7.0564e-05], device='cuda:0') +2023-03-31 20:18:14,049 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.24 vs. limit=5.0 +2023-03-31 20:18:44,276 INFO [train.py:903] (0/4) Epoch 1, batch 4200, loss[loss=0.4719, simple_loss=0.4829, pruned_loss=0.2305, over 19494.00 frames. ], tot_loss[loss=0.4342, simple_loss=0.4449, pruned_loss=0.2117, over 3796359.61 frames. ], batch size: 64, lr: 4.38e-02, grad_scale: 8.0 +2023-03-31 20:18:46,652 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 20:18:51,465 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.919e+02 1.098e+03 1.489e+03 3.268e+03, threshold=2.196e+03, percent-clipped=3.0 +2023-03-31 20:19:43,641 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-03-31 20:19:47,735 INFO [train.py:903] (0/4) Epoch 1, batch 4250, loss[loss=0.482, simple_loss=0.4817, pruned_loss=0.2412, over 19778.00 frames. ], tot_loss[loss=0.4349, simple_loss=0.4453, pruned_loss=0.2123, over 3785945.18 frames. ], batch size: 56, lr: 4.36e-02, grad_scale: 8.0 +2023-03-31 20:20:02,077 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 20:20:15,225 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 20:20:25,100 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4279.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:20:36,567 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:20:52,968 INFO [train.py:903] (0/4) Epoch 1, batch 4300, loss[loss=0.3873, simple_loss=0.4076, pruned_loss=0.1835, over 19628.00 frames. ], tot_loss[loss=0.4335, simple_loss=0.4439, pruned_loss=0.2115, over 3795109.33 frames. ], batch size: 50, lr: 4.35e-02, grad_scale: 8.0 +2023-03-31 20:20:57,802 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4304.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:21:00,179 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4306.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:21:02,056 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.021e+02 1.171e+03 1.478e+03 2.100e+03 3.660e+03, threshold=2.957e+03, percent-clipped=20.0 +2023-03-31 20:21:44,163 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6484, 3.3520, 1.9167, 3.0791, 1.2756, 3.2982, 3.0312, 2.9783], + device='cuda:0'), covar=tensor([0.0633, 0.1190, 0.2275, 0.0624, 0.3180, 0.0753, 0.0558, 0.0833], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0231, 0.0256, 0.0205, 0.0273, 0.0207, 0.0155, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-03-31 20:21:46,488 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 20:21:59,415 INFO [train.py:903] (0/4) Epoch 1, batch 4350, loss[loss=0.4244, simple_loss=0.443, pruned_loss=0.2028, over 19535.00 frames. ], tot_loss[loss=0.4308, simple_loss=0.4429, pruned_loss=0.2094, over 3793305.98 frames. ], batch size: 56, lr: 4.34e-02, grad_scale: 8.0 +2023-03-31 20:22:20,197 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7679, 1.1824, 1.4409, 0.7814, 2.6004, 2.8400, 2.7358, 2.6387], + device='cuda:0'), covar=tensor([0.1636, 0.2831, 0.2278, 0.2904, 0.0490, 0.0183, 0.0263, 0.0254], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0269, 0.0279, 0.0304, 0.0208, 0.0127, 0.0158, 0.0121], + device='cuda:0'), out_proj_covar=tensor([2.2508e-04, 2.0494e-04, 2.1278e-04, 2.3036e-04, 1.8519e-04, 9.1116e-05, + 1.2465e-04, 1.0075e-04], device='cuda:0') +2023-03-31 20:23:03,132 INFO [train.py:903] (0/4) Epoch 1, batch 4400, loss[loss=0.4095, simple_loss=0.4339, pruned_loss=0.1925, over 19576.00 frames. ], tot_loss[loss=0.4284, simple_loss=0.4414, pruned_loss=0.2077, over 3799435.99 frames. ], batch size: 61, lr: 4.33e-02, grad_scale: 8.0 +2023-03-31 20:23:05,845 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4403.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:23:11,485 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.454e+02 9.391e+02 1.114e+03 1.514e+03 3.216e+03, threshold=2.228e+03, percent-clipped=1.0 +2023-03-31 20:23:14,571 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4409.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:23:30,201 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 20:23:39,401 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 20:23:46,823 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4434.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:24:06,851 INFO [train.py:903] (0/4) Epoch 1, batch 4450, loss[loss=0.5332, simple_loss=0.5091, pruned_loss=0.2787, over 13491.00 frames. ], tot_loss[loss=0.4282, simple_loss=0.4413, pruned_loss=0.2076, over 3798897.60 frames. ], batch size: 136, lr: 4.32e-02, grad_scale: 8.0 +2023-03-31 20:24:10,359 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5166, 1.2301, 1.2535, 1.6075, 1.3156, 1.4140, 1.2663, 1.5917], + device='cuda:0'), covar=tensor([0.1231, 0.2377, 0.1877, 0.1303, 0.2049, 0.1053, 0.1837, 0.1008], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0276, 0.0246, 0.0208, 0.0276, 0.0201, 0.0226, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 20:24:29,565 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-31 20:25:09,799 INFO [train.py:903] (0/4) Epoch 1, batch 4500, loss[loss=0.3622, simple_loss=0.3919, pruned_loss=0.1662, over 19593.00 frames. ], tot_loss[loss=0.4274, simple_loss=0.4413, pruned_loss=0.2067, over 3819646.46 frames. ], batch size: 52, lr: 4.31e-02, grad_scale: 8.0 +2023-03-31 20:25:12,517 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4503.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:25:18,130 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.500e+02 1.049e+03 1.357e+03 1.620e+03 3.962e+03, threshold=2.713e+03, percent-clipped=8.0 +2023-03-31 20:26:14,040 INFO [train.py:903] (0/4) Epoch 1, batch 4550, loss[loss=0.4148, simple_loss=0.4336, pruned_loss=0.198, over 18699.00 frames. ], tot_loss[loss=0.4238, simple_loss=0.4387, pruned_loss=0.2045, over 3823733.93 frames. ], batch size: 74, lr: 4.30e-02, grad_scale: 8.0 +2023-03-31 20:26:24,214 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 20:26:47,306 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 20:27:04,912 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4705, 0.9841, 1.3617, 1.7375, 2.1825, 1.3682, 1.8597, 2.0524], + device='cuda:0'), covar=tensor([0.0375, 0.2571, 0.2437, 0.1309, 0.0440, 0.1790, 0.0755, 0.0656], + device='cuda:0'), in_proj_covar=tensor([0.0173, 0.0267, 0.0256, 0.0258, 0.0194, 0.0315, 0.0225, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:27:16,034 INFO [train.py:903] (0/4) Epoch 1, batch 4600, loss[loss=0.4428, simple_loss=0.4605, pruned_loss=0.2125, over 18792.00 frames. ], tot_loss[loss=0.4244, simple_loss=0.4392, pruned_loss=0.2048, over 3810891.59 frames. ], batch size: 75, lr: 4.29e-02, grad_scale: 4.0 +2023-03-31 20:27:24,059 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.210e+02 9.691e+02 1.279e+03 1.723e+03 8.130e+03, threshold=2.557e+03, percent-clipped=7.0 +2023-03-31 20:27:36,627 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4618.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:27:54,576 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6032, 2.6742, 2.1692, 2.9033, 1.9958, 2.5664, 2.1711, 1.9642], + device='cuda:0'), covar=tensor([0.1073, 0.0855, 0.0894, 0.0909, 0.1394, 0.0771, 0.2022, 0.1241], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0108, 0.0138, 0.0154, 0.0162, 0.0090, 0.0163, 0.0133], + device='cuda:0'), out_proj_covar=tensor([8.7656e-05, 7.4996e-05, 9.1657e-05, 1.0148e-04, 1.0391e-04, 5.5804e-05, + 1.1812e-04, 9.0435e-05], device='cuda:0') +2023-03-31 20:28:17,144 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4650.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:18,171 INFO [train.py:903] (0/4) Epoch 1, batch 4650, loss[loss=0.4609, simple_loss=0.4698, pruned_loss=0.2259, over 19611.00 frames. ], tot_loss[loss=0.4224, simple_loss=0.4376, pruned_loss=0.2036, over 3813322.71 frames. ], batch size: 61, lr: 4.28e-02, grad_scale: 4.0 +2023-03-31 20:28:27,851 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4659.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:33,642 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5965, 4.2585, 2.2465, 3.6460, 1.5327, 3.9913, 3.7255, 3.9716], + device='cuda:0'), covar=tensor([0.0534, 0.1048, 0.2426, 0.0659, 0.3131, 0.0748, 0.0538, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0229, 0.0226, 0.0268, 0.0214, 0.0281, 0.0218, 0.0164, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-03-31 20:28:34,610 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 20:28:44,788 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 20:28:58,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4684.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:29:18,946 INFO [train.py:903] (0/4) Epoch 1, batch 4700, loss[loss=0.4106, simple_loss=0.4279, pruned_loss=0.1967, over 19512.00 frames. ], tot_loss[loss=0.4202, simple_loss=0.4361, pruned_loss=0.2022, over 3811753.40 frames. ], batch size: 56, lr: 4.27e-02, grad_scale: 4.0 +2023-03-31 20:29:27,997 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+02 9.658e+02 1.202e+03 1.526e+03 2.859e+03, threshold=2.405e+03, percent-clipped=1.0 +2023-03-31 20:29:39,223 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 20:30:21,813 INFO [train.py:903] (0/4) Epoch 1, batch 4750, loss[loss=0.4233, simple_loss=0.4349, pruned_loss=0.2058, over 19708.00 frames. ], tot_loss[loss=0.4171, simple_loss=0.4335, pruned_loss=0.2004, over 3808871.83 frames. ], batch size: 63, lr: 4.26e-02, grad_scale: 4.0 +2023-03-31 20:30:39,317 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4765.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:31:23,846 INFO [train.py:903] (0/4) Epoch 1, batch 4800, loss[loss=0.4119, simple_loss=0.4259, pruned_loss=0.199, over 17561.00 frames. ], tot_loss[loss=0.4188, simple_loss=0.4351, pruned_loss=0.2012, over 3813173.89 frames. ], batch size: 101, lr: 4.25e-02, grad_scale: 8.0 +2023-03-31 20:31:32,985 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.504e+02 1.038e+03 1.224e+03 1.522e+03 3.175e+03, threshold=2.447e+03, percent-clipped=5.0 +2023-03-31 20:32:13,379 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.1207, 3.7950, 1.9968, 3.4665, 1.4572, 3.5450, 3.3186, 3.6026], + device='cuda:0'), covar=tensor([0.0634, 0.1266, 0.2606, 0.0705, 0.3658, 0.1028, 0.0684, 0.0671], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0236, 0.0264, 0.0214, 0.0288, 0.0220, 0.0167, 0.0172], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-03-31 20:32:24,211 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-31 20:32:25,500 INFO [train.py:903] (0/4) Epoch 1, batch 4850, loss[loss=0.3844, simple_loss=0.3937, pruned_loss=0.1876, over 19394.00 frames. ], tot_loss[loss=0.4169, simple_loss=0.4334, pruned_loss=0.2002, over 3826733.91 frames. ], batch size: 48, lr: 4.24e-02, grad_scale: 8.0 +2023-03-31 20:32:46,763 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 20:32:54,326 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4874.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:04,558 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4470, 1.7365, 1.5310, 1.4395, 1.4845, 1.0019, 1.0925, 1.4791], + device='cuda:0'), covar=tensor([0.1087, 0.0445, 0.0739, 0.0932, 0.0873, 0.1373, 0.1353, 0.0908], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0149, 0.0176, 0.0212, 0.0152, 0.0237, 0.0244, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:33:06,426 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 20:33:12,764 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 20:33:12,785 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 20:33:23,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 20:33:25,317 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4899.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:27,241 INFO [train.py:903] (0/4) Epoch 1, batch 4900, loss[loss=0.3553, simple_loss=0.3906, pruned_loss=0.16, over 19665.00 frames. ], tot_loss[loss=0.4137, simple_loss=0.431, pruned_loss=0.1982, over 3821467.67 frames. ], batch size: 53, lr: 4.23e-02, grad_scale: 8.0 +2023-03-31 20:33:36,363 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8082, 1.7053, 1.3147, 1.3626, 1.4245, 1.2762, 0.2733, 1.0140], + device='cuda:0'), covar=tensor([0.0753, 0.0561, 0.0436, 0.0715, 0.0916, 0.0835, 0.1712, 0.1364], + device='cuda:0'), in_proj_covar=tensor([0.0141, 0.0145, 0.0144, 0.0182, 0.0206, 0.0188, 0.0211, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:33:37,035 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.319e+02 9.845e+02 1.167e+03 1.485e+03 2.856e+03, threshold=2.333e+03, percent-clipped=2.0 +2023-03-31 20:33:44,813 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 20:34:29,508 INFO [train.py:903] (0/4) Epoch 1, batch 4950, loss[loss=0.5644, simple_loss=0.5311, pruned_loss=0.2989, over 19768.00 frames. ], tot_loss[loss=0.4134, simple_loss=0.431, pruned_loss=0.1979, over 3815176.53 frames. ], batch size: 56, lr: 4.21e-02, grad_scale: 8.0 +2023-03-31 20:34:38,643 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4203, 1.3842, 1.1946, 1.3709, 1.2744, 1.4526, 1.2536, 1.4455], + device='cuda:0'), covar=tensor([0.0815, 0.1358, 0.1327, 0.0970, 0.1575, 0.0709, 0.1199, 0.0674], + device='cuda:0'), in_proj_covar=tensor([0.0205, 0.0307, 0.0262, 0.0224, 0.0293, 0.0219, 0.0247, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:34:40,549 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 20:35:05,330 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-31 20:35:05,872 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 20:35:31,811 INFO [train.py:903] (0/4) Epoch 1, batch 5000, loss[loss=0.3839, simple_loss=0.3992, pruned_loss=0.1843, over 19741.00 frames. ], tot_loss[loss=0.4119, simple_loss=0.4299, pruned_loss=0.1969, over 3813177.26 frames. ], batch size: 45, lr: 4.20e-02, grad_scale: 8.0 +2023-03-31 20:35:35,556 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 20:35:40,125 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.692e+02 8.720e+02 1.063e+03 1.451e+03 3.452e+03, threshold=2.125e+03, percent-clipped=4.0 +2023-03-31 20:35:46,918 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 20:35:56,135 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5021.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:18,949 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3566, 3.8898, 2.4864, 3.5852, 1.2670, 3.8963, 3.4202, 3.4909], + device='cuda:0'), covar=tensor([0.0770, 0.1444, 0.2394, 0.0800, 0.3853, 0.0841, 0.0747, 0.0692], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0242, 0.0270, 0.0223, 0.0288, 0.0223, 0.0163, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-03-31 20:36:27,130 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5046.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:33,279 INFO [train.py:903] (0/4) Epoch 1, batch 5050, loss[loss=0.4203, simple_loss=0.4476, pruned_loss=0.1965, over 19664.00 frames. ], tot_loss[loss=0.4114, simple_loss=0.4301, pruned_loss=0.1963, over 3826132.29 frames. ], batch size: 55, lr: 4.19e-02, grad_scale: 8.0 +2023-03-31 20:36:41,705 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7763, 2.1753, 2.4870, 2.1339, 1.8227, 2.1265, 0.3707, 1.7279], + device='cuda:0'), covar=tensor([0.1138, 0.0663, 0.0320, 0.0710, 0.1043, 0.0880, 0.2108, 0.1881], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0144, 0.0147, 0.0181, 0.0209, 0.0191, 0.0213, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:37:02,502 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 20:37:34,522 INFO [train.py:903] (0/4) Epoch 1, batch 5100, loss[loss=0.4921, simple_loss=0.4828, pruned_loss=0.2507, over 19658.00 frames. ], tot_loss[loss=0.4101, simple_loss=0.4291, pruned_loss=0.1956, over 3826830.44 frames. ], batch size: 60, lr: 4.18e-02, grad_scale: 8.0 +2023-03-31 20:37:39,766 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 20:37:43,116 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+02 1.065e+03 1.254e+03 1.490e+03 3.647e+03, threshold=2.509e+03, percent-clipped=6.0 +2023-03-31 20:37:43,189 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 20:37:47,668 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 20:38:26,587 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2238, 3.3012, 3.6972, 3.4799, 1.1726, 3.2820, 2.9811, 3.1371], + device='cuda:0'), covar=tensor([0.0328, 0.0497, 0.0418, 0.0285, 0.3247, 0.0249, 0.0421, 0.1002], + device='cuda:0'), in_proj_covar=tensor([0.0174, 0.0224, 0.0273, 0.0191, 0.0366, 0.0137, 0.0201, 0.0284], + device='cuda:0'), out_proj_covar=tensor([1.1615e-04, 1.4757e-04, 1.8180e-04, 1.1621e-04, 2.0351e-04, 9.0705e-05, + 1.2792e-04, 1.6713e-04], device='cuda:0') +2023-03-31 20:38:33,532 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8662, 1.3250, 1.5369, 1.0470, 2.7291, 3.1549, 3.1207, 3.4918], + device='cuda:0'), covar=tensor([0.1733, 0.2778, 0.2477, 0.2731, 0.0537, 0.0198, 0.0232, 0.0178], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0277, 0.0294, 0.0304, 0.0205, 0.0124, 0.0171, 0.0126], + device='cuda:0'), out_proj_covar=tensor([2.3973e-04, 2.2267e-04, 2.3410e-04, 2.4378e-04, 1.8999e-04, 9.8038e-05, + 1.4029e-04, 1.1110e-04], device='cuda:0') +2023-03-31 20:38:36,973 INFO [train.py:903] (0/4) Epoch 1, batch 5150, loss[loss=0.3708, simple_loss=0.3904, pruned_loss=0.1756, over 15154.00 frames. ], tot_loss[loss=0.4103, simple_loss=0.429, pruned_loss=0.1957, over 3827175.25 frames. ], batch size: 33, lr: 4.17e-02, grad_scale: 8.0 +2023-03-31 20:38:46,431 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 20:39:08,042 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-31 20:39:20,325 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 20:39:33,876 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-31 20:39:37,961 INFO [train.py:903] (0/4) Epoch 1, batch 5200, loss[loss=0.3755, simple_loss=0.3922, pruned_loss=0.1794, over 19720.00 frames. ], tot_loss[loss=0.4104, simple_loss=0.4289, pruned_loss=0.196, over 3832722.06 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 8.0 +2023-03-31 20:39:45,882 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+02 1.028e+03 1.252e+03 1.630e+03 4.880e+03, threshold=2.504e+03, percent-clipped=1.0 +2023-03-31 20:39:50,382 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 20:40:32,320 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 20:40:39,295 INFO [train.py:903] (0/4) Epoch 1, batch 5250, loss[loss=0.4409, simple_loss=0.4598, pruned_loss=0.211, over 18801.00 frames. ], tot_loss[loss=0.4099, simple_loss=0.4288, pruned_loss=0.1955, over 3828000.66 frames. ], batch size: 74, lr: 4.15e-02, grad_scale: 8.0 +2023-03-31 20:41:24,115 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9224, 4.3564, 5.5941, 5.2400, 1.5825, 5.0440, 4.6878, 4.7335], + device='cuda:0'), covar=tensor([0.0237, 0.0470, 0.0395, 0.0254, 0.3654, 0.0199, 0.0340, 0.0995], + device='cuda:0'), in_proj_covar=tensor([0.0182, 0.0228, 0.0281, 0.0197, 0.0368, 0.0142, 0.0206, 0.0293], + device='cuda:0'), out_proj_covar=tensor([1.2050e-04, 1.5085e-04, 1.8654e-04, 1.1817e-04, 2.0418e-04, 9.2051e-05, + 1.3018e-04, 1.7155e-04], device='cuda:0') +2023-03-31 20:41:39,595 INFO [train.py:903] (0/4) Epoch 1, batch 5300, loss[loss=0.4195, simple_loss=0.4456, pruned_loss=0.1967, over 19677.00 frames. ], tot_loss[loss=0.409, simple_loss=0.4281, pruned_loss=0.1949, over 3832457.56 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 8.0 +2023-03-31 20:41:48,685 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.962e+02 9.394e+02 1.191e+03 1.647e+03 4.206e+03, threshold=2.383e+03, percent-clipped=5.0 +2023-03-31 20:41:53,347 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 20:42:06,837 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9401, 1.0561, 1.5918, 1.2522, 2.2098, 2.2189, 2.3324, 1.4211], + device='cuda:0'), covar=tensor([0.1759, 0.2019, 0.1410, 0.1716, 0.0830, 0.0770, 0.0846, 0.1518], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0252, 0.0245, 0.0273, 0.0281, 0.0214, 0.0290, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:42:41,824 INFO [train.py:903] (0/4) Epoch 1, batch 5350, loss[loss=0.3744, simple_loss=0.4162, pruned_loss=0.1663, over 19675.00 frames. ], tot_loss[loss=0.4077, simple_loss=0.4275, pruned_loss=0.194, over 3836743.67 frames. ], batch size: 55, lr: 4.13e-02, grad_scale: 8.0 +2023-03-31 20:43:07,200 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9308, 1.4029, 1.6641, 1.3428, 2.6490, 3.1896, 2.9271, 3.1747], + device='cuda:0'), covar=tensor([0.1500, 0.2553, 0.2096, 0.2338, 0.0526, 0.0141, 0.0265, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0276, 0.0292, 0.0299, 0.0199, 0.0118, 0.0173, 0.0125], + device='cuda:0'), out_proj_covar=tensor([2.3952e-04, 2.2431e-04, 2.3378e-04, 2.4216e-04, 1.8555e-04, 9.3408e-05, + 1.4158e-04, 1.1072e-04], device='cuda:0') +2023-03-31 20:43:13,707 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 20:43:43,593 INFO [train.py:903] (0/4) Epoch 1, batch 5400, loss[loss=0.3997, simple_loss=0.4262, pruned_loss=0.1866, over 19763.00 frames. ], tot_loss[loss=0.4093, simple_loss=0.4288, pruned_loss=0.1949, over 3836785.10 frames. ], batch size: 54, lr: 4.12e-02, grad_scale: 8.0 +2023-03-31 20:43:51,063 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.073e+02 9.364e+02 1.084e+03 1.611e+03 4.795e+03, threshold=2.168e+03, percent-clipped=7.0 +2023-03-31 20:44:38,333 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-31 20:44:44,645 INFO [train.py:903] (0/4) Epoch 1, batch 5450, loss[loss=0.3671, simple_loss=0.4071, pruned_loss=0.1635, over 19660.00 frames. ], tot_loss[loss=0.4078, simple_loss=0.4275, pruned_loss=0.194, over 3822688.65 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 8.0 +2023-03-31 20:45:33,606 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9011, 0.9721, 1.4539, 1.0347, 2.0206, 1.8432, 1.8709, 1.0512], + device='cuda:0'), covar=tensor([0.1886, 0.2000, 0.1360, 0.1877, 0.0710, 0.0857, 0.0768, 0.1479], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0262, 0.0250, 0.0280, 0.0289, 0.0223, 0.0301, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:45:46,527 INFO [train.py:903] (0/4) Epoch 1, batch 5500, loss[loss=0.3573, simple_loss=0.3944, pruned_loss=0.1601, over 19743.00 frames. ], tot_loss[loss=0.4074, simple_loss=0.4272, pruned_loss=0.1938, over 3824435.73 frames. ], batch size: 51, lr: 4.10e-02, grad_scale: 8.0 +2023-03-31 20:45:54,009 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+02 9.504e+02 1.107e+03 1.412e+03 4.004e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 20:46:08,674 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 20:46:46,625 INFO [train.py:903] (0/4) Epoch 1, batch 5550, loss[loss=0.3367, simple_loss=0.3739, pruned_loss=0.1497, over 18181.00 frames. ], tot_loss[loss=0.4062, simple_loss=0.4261, pruned_loss=0.1931, over 3827132.10 frames. ], batch size: 40, lr: 4.09e-02, grad_scale: 8.0 +2023-03-31 20:46:53,467 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 20:47:42,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 20:47:47,482 INFO [train.py:903] (0/4) Epoch 1, batch 5600, loss[loss=0.441, simple_loss=0.4545, pruned_loss=0.2138, over 18213.00 frames. ], tot_loss[loss=0.405, simple_loss=0.4253, pruned_loss=0.1923, over 3829139.35 frames. ], batch size: 83, lr: 4.08e-02, grad_scale: 8.0 +2023-03-31 20:47:56,555 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.862e+02 1.009e+03 1.185e+03 1.400e+03 2.216e+03, threshold=2.370e+03, percent-clipped=2.0 +2023-03-31 20:48:12,710 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4273, 2.0506, 2.3324, 2.1189, 3.0387, 3.6877, 3.3637, 3.6761], + device='cuda:0'), covar=tensor([0.1036, 0.1871, 0.1558, 0.1613, 0.0462, 0.0121, 0.0166, 0.0164], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0274, 0.0295, 0.0300, 0.0193, 0.0118, 0.0166, 0.0122], + device='cuda:0'), out_proj_covar=tensor([2.3614e-04, 2.2589e-04, 2.3904e-04, 2.4745e-04, 1.8182e-04, 9.6208e-05, + 1.3747e-04, 1.0952e-04], device='cuda:0') +2023-03-31 20:48:48,825 INFO [train.py:903] (0/4) Epoch 1, batch 5650, loss[loss=0.3308, simple_loss=0.3679, pruned_loss=0.1468, over 19377.00 frames. ], tot_loss[loss=0.4056, simple_loss=0.4255, pruned_loss=0.1929, over 3814040.42 frames. ], batch size: 47, lr: 4.07e-02, grad_scale: 8.0 +2023-03-31 20:49:08,755 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5668.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:49:32,789 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 20:49:49,723 INFO [train.py:903] (0/4) Epoch 1, batch 5700, loss[loss=0.3851, simple_loss=0.4223, pruned_loss=0.1739, over 19505.00 frames. ], tot_loss[loss=0.4077, simple_loss=0.4271, pruned_loss=0.1941, over 3822422.22 frames. ], batch size: 64, lr: 4.06e-02, grad_scale: 8.0 +2023-03-31 20:49:57,492 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.193e+02 1.084e+03 1.385e+03 1.754e+03 4.325e+03, threshold=2.770e+03, percent-clipped=14.0 +2023-03-31 20:49:57,788 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1855, 3.1952, 3.6306, 3.4206, 1.2986, 3.0414, 2.8334, 3.0134], + device='cuda:0'), covar=tensor([0.0329, 0.0519, 0.0428, 0.0329, 0.3130, 0.0365, 0.0489, 0.0998], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0223, 0.0271, 0.0194, 0.0367, 0.0142, 0.0205, 0.0287], + device='cuda:0'), out_proj_covar=tensor([1.2092e-04, 1.4406e-04, 1.7789e-04, 1.1421e-04, 2.0079e-04, 9.2285e-05, + 1.2764e-04, 1.6519e-04], device='cuda:0') +2023-03-31 20:50:51,579 INFO [train.py:903] (0/4) Epoch 1, batch 5750, loss[loss=0.4694, simple_loss=0.4633, pruned_loss=0.2377, over 13594.00 frames. ], tot_loss[loss=0.4046, simple_loss=0.4253, pruned_loss=0.192, over 3834142.32 frames. ], batch size: 135, lr: 4.05e-02, grad_scale: 8.0 +2023-03-31 20:50:51,602 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 20:50:59,677 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 20:51:06,045 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 20:51:18,541 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5773.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:51:52,609 INFO [train.py:903] (0/4) Epoch 1, batch 5800, loss[loss=0.3816, simple_loss=0.428, pruned_loss=0.1676, over 19294.00 frames. ], tot_loss[loss=0.4021, simple_loss=0.4236, pruned_loss=0.1903, over 3843984.68 frames. ], batch size: 66, lr: 4.04e-02, grad_scale: 8.0 +2023-03-31 20:52:02,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.310e+02 8.969e+02 1.169e+03 1.352e+03 2.735e+03, threshold=2.337e+03, percent-clipped=0.0 +2023-03-31 20:52:16,012 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-31 20:52:53,453 INFO [train.py:903] (0/4) Epoch 1, batch 5850, loss[loss=0.4171, simple_loss=0.4466, pruned_loss=0.1937, over 19580.00 frames. ], tot_loss[loss=0.404, simple_loss=0.4251, pruned_loss=0.1915, over 3849203.83 frames. ], batch size: 61, lr: 4.03e-02, grad_scale: 8.0 +2023-03-31 20:53:23,809 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5876.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:53:55,089 INFO [train.py:903] (0/4) Epoch 1, batch 5900, loss[loss=0.3919, simple_loss=0.4291, pruned_loss=0.1773, over 19617.00 frames. ], tot_loss[loss=0.4025, simple_loss=0.4243, pruned_loss=0.1903, over 3851201.67 frames. ], batch size: 57, lr: 4.02e-02, grad_scale: 8.0 +2023-03-31 20:53:58,589 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 20:54:03,114 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.633e+02 8.668e+02 1.127e+03 1.397e+03 3.736e+03, threshold=2.255e+03, percent-clipped=4.0 +2023-03-31 20:54:21,185 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 20:54:32,234 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4081, 2.4159, 1.8346, 1.6319, 1.8821, 0.8795, 0.9966, 1.6698], + device='cuda:0'), covar=tensor([0.1217, 0.0349, 0.0813, 0.0682, 0.0786, 0.1458, 0.1375, 0.0814], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0148, 0.0199, 0.0221, 0.0156, 0.0254, 0.0250, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:54:50,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-31 20:54:55,344 INFO [train.py:903] (0/4) Epoch 1, batch 5950, loss[loss=0.3951, simple_loss=0.4225, pruned_loss=0.1839, over 19681.00 frames. ], tot_loss[loss=0.4027, simple_loss=0.4245, pruned_loss=0.1905, over 3846178.79 frames. ], batch size: 53, lr: 4.01e-02, grad_scale: 8.0 +2023-03-31 20:55:55,469 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-6000.pt +2023-03-31 20:55:57,492 INFO [train.py:903] (0/4) Epoch 1, batch 6000, loss[loss=0.4388, simple_loss=0.4688, pruned_loss=0.2044, over 19664.00 frames. ], tot_loss[loss=0.4042, simple_loss=0.4257, pruned_loss=0.1914, over 3843480.00 frames. ], batch size: 58, lr: 4.00e-02, grad_scale: 8.0 +2023-03-31 20:55:57,493 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 20:56:10,587 INFO [train.py:937] (0/4) Epoch 1, validation: loss=0.2784, simple_loss=0.3626, pruned_loss=0.09714, over 944034.00 frames. +2023-03-31 20:56:10,588 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18153MB +2023-03-31 20:56:19,577 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.849e+02 9.012e+02 1.240e+03 1.620e+03 2.952e+03, threshold=2.480e+03, percent-clipped=5.0 +2023-03-31 20:56:19,902 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:56:24,067 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6012.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:57:10,741 INFO [train.py:903] (0/4) Epoch 1, batch 6050, loss[loss=0.5052, simple_loss=0.4994, pruned_loss=0.2555, over 19546.00 frames. ], tot_loss[loss=0.4069, simple_loss=0.4274, pruned_loss=0.1932, over 3830296.51 frames. ], batch size: 61, lr: 3.99e-02, grad_scale: 8.0 +2023-03-31 20:57:24,316 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:57:59,147 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0033, 1.1159, 1.7429, 1.2978, 2.3970, 2.2370, 2.4764, 1.2654], + device='cuda:0'), covar=tensor([0.1252, 0.1489, 0.0996, 0.1218, 0.0598, 0.0600, 0.0649, 0.1239], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0284, 0.0265, 0.0288, 0.0308, 0.0245, 0.0330, 0.0300], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 20:58:12,892 INFO [train.py:903] (0/4) Epoch 1, batch 6100, loss[loss=0.3818, simple_loss=0.4163, pruned_loss=0.1737, over 19766.00 frames. ], tot_loss[loss=0.4046, simple_loss=0.4259, pruned_loss=0.1917, over 3834135.57 frames. ], batch size: 54, lr: 3.98e-02, grad_scale: 8.0 +2023-03-31 20:58:20,961 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+02 9.509e+02 1.169e+03 1.489e+03 2.977e+03, threshold=2.338e+03, percent-clipped=4.0 +2023-03-31 20:58:32,474 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6117.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:58:44,702 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6127.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:59:13,360 INFO [train.py:903] (0/4) Epoch 1, batch 6150, loss[loss=0.4191, simple_loss=0.4473, pruned_loss=0.1954, over 19666.00 frames. ], tot_loss[loss=0.401, simple_loss=0.4232, pruned_loss=0.1893, over 3839302.54 frames. ], batch size: 60, lr: 3.97e-02, grad_scale: 8.0 +2023-03-31 20:59:15,665 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6153.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:59:28,307 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-31 20:59:42,671 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 20:59:52,046 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-31 20:59:58,491 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6188.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:13,786 INFO [train.py:903] (0/4) Epoch 1, batch 6200, loss[loss=0.3562, simple_loss=0.3973, pruned_loss=0.1576, over 19596.00 frames. ], tot_loss[loss=0.4021, simple_loss=0.4236, pruned_loss=0.1903, over 3822706.56 frames. ], batch size: 52, lr: 3.96e-02, grad_scale: 8.0 +2023-03-31 21:00:20,830 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8445, 1.3835, 1.2043, 1.7998, 1.2967, 1.8217, 1.7556, 1.6728], + device='cuda:0'), covar=tensor([0.0863, 0.1713, 0.1908, 0.1387, 0.2143, 0.1357, 0.1898, 0.1134], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0305, 0.0301, 0.0322, 0.0391, 0.0280, 0.0343, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 21:00:22,726 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 9.585e+02 1.181e+03 1.511e+03 2.920e+03, threshold=2.362e+03, percent-clipped=2.0 +2023-03-31 21:00:38,370 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:39,666 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:41,250 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-31 21:00:49,011 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6229.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:52,526 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:01:16,025 INFO [train.py:903] (0/4) Epoch 1, batch 6250, loss[loss=0.4073, simple_loss=0.4277, pruned_loss=0.1935, over 19571.00 frames. ], tot_loss[loss=0.4008, simple_loss=0.4228, pruned_loss=0.1894, over 3820827.23 frames. ], batch size: 61, lr: 3.95e-02, grad_scale: 8.0 +2023-03-31 21:01:46,722 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 21:01:56,437 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6284.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:02:18,554 INFO [train.py:903] (0/4) Epoch 1, batch 6300, loss[loss=0.3701, simple_loss=0.399, pruned_loss=0.1706, over 19468.00 frames. ], tot_loss[loss=0.3976, simple_loss=0.4211, pruned_loss=0.1871, over 3813678.02 frames. ], batch size: 49, lr: 3.94e-02, grad_scale: 8.0 +2023-03-31 21:02:26,556 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.984e+02 8.812e+02 1.125e+03 1.363e+03 2.149e+03, threshold=2.249e+03, percent-clipped=0.0 +2023-03-31 21:02:45,504 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-31 21:02:58,273 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6335.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:06,727 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8188, 1.2901, 1.4316, 1.0233, 2.3941, 3.2153, 2.8436, 3.3749], + device='cuda:0'), covar=tensor([0.1899, 0.4121, 0.3749, 0.3031, 0.0653, 0.0220, 0.0454, 0.0270], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0287, 0.0313, 0.0303, 0.0198, 0.0114, 0.0177, 0.0121], + device='cuda:0'), out_proj_covar=tensor([2.5964e-04, 2.4466e-04, 2.6142e-04, 2.5841e-04, 1.9106e-04, 9.6840e-05, + 1.4820e-04, 1.1165e-04], device='cuda:0') +2023-03-31 21:03:17,629 INFO [train.py:903] (0/4) Epoch 1, batch 6350, loss[loss=0.3822, simple_loss=0.4196, pruned_loss=0.1724, over 19485.00 frames. ], tot_loss[loss=0.3992, simple_loss=0.4219, pruned_loss=0.1883, over 3831716.59 frames. ], batch size: 64, lr: 3.93e-02, grad_scale: 8.0 +2023-03-31 21:03:18,874 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6352.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:50,810 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6866, 1.3438, 1.2215, 1.8167, 1.5442, 1.7998, 1.9163, 1.6664], + device='cuda:0'), covar=tensor([0.0902, 0.1495, 0.1626, 0.1220, 0.1790, 0.1212, 0.1395, 0.0922], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0302, 0.0294, 0.0314, 0.0380, 0.0279, 0.0337, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 21:03:58,092 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6383.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:04:11,220 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4715, 1.0275, 1.1570, 0.4076, 2.3096, 2.1184, 1.8671, 2.0358], + device='cuda:0'), covar=tensor([0.1307, 0.2543, 0.2507, 0.2445, 0.0372, 0.0197, 0.0338, 0.0248], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0285, 0.0316, 0.0303, 0.0200, 0.0114, 0.0179, 0.0122], + device='cuda:0'), out_proj_covar=tensor([2.5990e-04, 2.4399e-04, 2.6353e-04, 2.5817e-04, 1.9322e-04, 9.6715e-05, + 1.5059e-04, 1.1409e-04], device='cuda:0') +2023-03-31 21:04:12,771 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.34 vs. limit=5.0 +2023-03-31 21:04:18,887 INFO [train.py:903] (0/4) Epoch 1, batch 6400, loss[loss=0.4028, simple_loss=0.437, pruned_loss=0.1843, over 19618.00 frames. ], tot_loss[loss=0.3982, simple_loss=0.4217, pruned_loss=0.1873, over 3824453.68 frames. ], batch size: 57, lr: 3.92e-02, grad_scale: 8.0 +2023-03-31 21:04:24,485 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6405.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:04:27,872 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.880e+02 9.359e+02 1.206e+03 1.547e+03 5.333e+03, threshold=2.412e+03, percent-clipped=7.0 +2023-03-31 21:04:28,271 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6408.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:05:09,509 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-31 21:05:19,076 INFO [train.py:903] (0/4) Epoch 1, batch 6450, loss[loss=0.4841, simple_loss=0.4883, pruned_loss=0.24, over 17944.00 frames. ], tot_loss[loss=0.399, simple_loss=0.4222, pruned_loss=0.1879, over 3813154.84 frames. ], batch size: 83, lr: 3.91e-02, grad_scale: 8.0 +2023-03-31 21:05:39,895 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:05:55,618 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6480.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:01,942 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6485.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:05,061 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 21:06:05,478 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6488.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:15,201 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6497.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:21,198 INFO [train.py:903] (0/4) Epoch 1, batch 6500, loss[loss=0.4885, simple_loss=0.4846, pruned_loss=0.2462, over 18763.00 frames. ], tot_loss[loss=0.3985, simple_loss=0.4222, pruned_loss=0.1874, over 3817228.47 frames. ], batch size: 74, lr: 3.90e-02, grad_scale: 8.0 +2023-03-31 21:06:25,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 21:06:28,780 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+02 9.704e+02 1.201e+03 1.443e+03 2.205e+03, threshold=2.402e+03, percent-clipped=0.0 +2023-03-31 21:06:35,097 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6513.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:43,916 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6520.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:58,399 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:15,431 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-31 21:07:21,792 INFO [train.py:903] (0/4) Epoch 1, batch 6550, loss[loss=0.3841, simple_loss=0.4182, pruned_loss=0.1749, over 19673.00 frames. ], tot_loss[loss=0.3982, simple_loss=0.4219, pruned_loss=0.1872, over 3812635.48 frames. ], batch size: 58, lr: 3.89e-02, grad_scale: 8.0 +2023-03-31 21:07:39,359 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6565.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:45,286 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1785, 1.2627, 1.0073, 0.9246, 1.0326, 0.7693, 0.4130, 1.0647], + device='cuda:0'), covar=tensor([0.0676, 0.0541, 0.1110, 0.0741, 0.0821, 0.1407, 0.1377, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0156, 0.0227, 0.0236, 0.0163, 0.0265, 0.0253, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:07:48,404 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6573.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:10,418 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6591.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:22,325 INFO [train.py:903] (0/4) Epoch 1, batch 6600, loss[loss=0.428, simple_loss=0.4511, pruned_loss=0.2024, over 19662.00 frames. ], tot_loss[loss=0.3946, simple_loss=0.4193, pruned_loss=0.1849, over 3820048.89 frames. ], batch size: 58, lr: 3.89e-02, grad_scale: 16.0 +2023-03-31 21:08:31,047 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.089e+02 8.736e+02 1.082e+03 1.231e+03 3.386e+03, threshold=2.164e+03, percent-clipped=2.0 +2023-03-31 21:08:36,154 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6612.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:40,729 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6616.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:56,077 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6628.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:05,509 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9993, 1.0847, 0.9038, 0.8808, 0.8220, 1.0241, 0.0619, 0.4902], + device='cuda:0'), covar=tensor([0.0461, 0.0510, 0.0330, 0.0417, 0.0914, 0.0461, 0.1190, 0.0942], + device='cuda:0'), in_proj_covar=tensor([0.0177, 0.0178, 0.0179, 0.0223, 0.0252, 0.0227, 0.0234, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:09:19,944 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:20,015 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:24,251 INFO [train.py:903] (0/4) Epoch 1, batch 6650, loss[loss=0.4575, simple_loss=0.4507, pruned_loss=0.2322, over 12854.00 frames. ], tot_loss[loss=0.3953, simple_loss=0.42, pruned_loss=0.1852, over 3817105.37 frames. ], batch size: 136, lr: 3.88e-02, grad_scale: 4.0 +2023-03-31 21:09:42,128 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3226, 1.0028, 1.0486, 1.3616, 1.0081, 1.2976, 1.2609, 1.2422], + device='cuda:0'), covar=tensor([0.0792, 0.1508, 0.1593, 0.0993, 0.1509, 0.1151, 0.1297, 0.0959], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0303, 0.0296, 0.0330, 0.0379, 0.0272, 0.0341, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 21:09:56,611 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9964, 1.6552, 1.5094, 1.7933, 1.5526, 1.7824, 1.8959, 1.8407], + device='cuda:0'), covar=tensor([0.0793, 0.1451, 0.1533, 0.1431, 0.2168, 0.1309, 0.1798, 0.0926], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0305, 0.0299, 0.0334, 0.0382, 0.0274, 0.0345, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 21:09:59,883 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6680.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:09,261 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6688.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:11,575 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8688, 1.4262, 1.5888, 1.2061, 2.7443, 3.3211, 3.1851, 3.5750], + device='cuda:0'), covar=tensor([0.1708, 0.2912, 0.2758, 0.2632, 0.0538, 0.0145, 0.0214, 0.0154], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0294, 0.0323, 0.0309, 0.0205, 0.0111, 0.0180, 0.0123], + device='cuda:0'), out_proj_covar=tensor([2.6290e-04, 2.5506e-04, 2.7242e-04, 2.6599e-04, 2.0068e-04, 9.8375e-05, + 1.5637e-04, 1.1721e-04], device='cuda:0') +2023-03-31 21:10:23,711 INFO [train.py:903] (0/4) Epoch 1, batch 6700, loss[loss=0.4138, simple_loss=0.4396, pruned_loss=0.194, over 19509.00 frames. ], tot_loss[loss=0.3955, simple_loss=0.4196, pruned_loss=0.1857, over 3828002.39 frames. ], batch size: 64, lr: 3.87e-02, grad_scale: 4.0 +2023-03-31 21:10:35,447 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+02 8.354e+02 1.101e+03 1.693e+03 1.016e+04, threshold=2.202e+03, percent-clipped=16.0 +2023-03-31 21:10:51,612 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6723.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:14,953 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6743.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:19,487 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9753, 3.6917, 2.2900, 3.2950, 1.4769, 3.4693, 3.2486, 3.3720], + device='cuda:0'), covar=tensor([0.0725, 0.1021, 0.2015, 0.0865, 0.3081, 0.0867, 0.0689, 0.0884], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0244, 0.0282, 0.0242, 0.0304, 0.0237, 0.0185, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 21:11:19,670 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5159, 1.5340, 1.1411, 1.5667, 1.4253, 1.4624, 1.3508, 1.6604], + device='cuda:0'), covar=tensor([0.0830, 0.1634, 0.1382, 0.1038, 0.1542, 0.0751, 0.1225, 0.0619], + device='cuda:0'), in_proj_covar=tensor([0.0229, 0.0365, 0.0271, 0.0244, 0.0310, 0.0238, 0.0268, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:11:20,754 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6748.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:23,799 INFO [train.py:903] (0/4) Epoch 1, batch 6750, loss[loss=0.3761, simple_loss=0.4097, pruned_loss=0.1713, over 19373.00 frames. ], tot_loss[loss=0.3926, simple_loss=0.4182, pruned_loss=0.1835, over 3837255.19 frames. ], batch size: 48, lr: 3.86e-02, grad_scale: 4.0 +2023-03-31 21:11:51,614 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:18,952 INFO [train.py:903] (0/4) Epoch 1, batch 6800, loss[loss=0.4861, simple_loss=0.4739, pruned_loss=0.2491, over 13014.00 frames. ], tot_loss[loss=0.3927, simple_loss=0.4184, pruned_loss=0.1835, over 3824598.94 frames. ], batch size: 135, lr: 3.85e-02, grad_scale: 8.0 +2023-03-31 21:12:19,292 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6801.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:28,853 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.487e+02 9.125e+02 1.072e+03 1.412e+03 3.162e+03, threshold=2.143e+03, percent-clipped=4.0 +2023-03-31 21:12:43,729 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6824.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:48,499 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-1.pt +2023-03-31 21:13:03,356 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 21:13:04,444 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 21:13:06,742 INFO [train.py:903] (0/4) Epoch 2, batch 0, loss[loss=0.4562, simple_loss=0.4637, pruned_loss=0.2244, over 19043.00 frames. ], tot_loss[loss=0.4562, simple_loss=0.4637, pruned_loss=0.2244, over 19043.00 frames. ], batch size: 69, lr: 3.77e-02, grad_scale: 8.0 +2023-03-31 21:13:06,743 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 21:13:18,419 INFO [train.py:937] (0/4) Epoch 2, validation: loss=0.2802, simple_loss=0.3637, pruned_loss=0.09835, over 944034.00 frames. +2023-03-31 21:13:18,420 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18153MB +2023-03-31 21:13:18,572 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6829.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:13:28,805 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 21:14:06,576 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6868.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:20,898 INFO [train.py:903] (0/4) Epoch 2, batch 50, loss[loss=0.3667, simple_loss=0.3922, pruned_loss=0.1706, over 19843.00 frames. ], tot_loss[loss=0.3882, simple_loss=0.4138, pruned_loss=0.1813, over 867290.57 frames. ], batch size: 52, lr: 3.76e-02, grad_scale: 8.0 +2023-03-31 21:14:37,427 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6893.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:41,557 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6896.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:14:49,439 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6903.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:54,401 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 21:14:57,930 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.488e+02 9.076e+02 1.150e+03 1.515e+03 2.802e+03, threshold=2.301e+03, percent-clipped=3.0 +2023-03-31 21:15:19,035 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6927.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,147 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,723 INFO [train.py:903] (0/4) Epoch 2, batch 100, loss[loss=0.5355, simple_loss=0.5121, pruned_loss=0.2794, over 17444.00 frames. ], tot_loss[loss=0.3987, simple_loss=0.4214, pruned_loss=0.1879, over 1515834.46 frames. ], batch size: 101, lr: 3.75e-02, grad_scale: 8.0 +2023-03-31 21:15:29,299 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6936.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:32,189 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 21:15:33,689 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6939.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,518 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:43,891 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.54 vs. limit=5.0 +2023-03-31 21:16:01,540 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6961.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:10,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6969.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:23,040 INFO [train.py:903] (0/4) Epoch 2, batch 150, loss[loss=0.3371, simple_loss=0.3823, pruned_loss=0.146, over 19775.00 frames. ], tot_loss[loss=0.3923, simple_loss=0.4172, pruned_loss=0.1837, over 2027462.14 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 4.0 +2023-03-31 21:16:38,559 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6991.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:49,224 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6999.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:03,027 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.870e+02 9.855e+02 1.288e+03 4.108e+03, threshold=1.971e+03, percent-clipped=4.0 +2023-03-31 21:17:19,658 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7024.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:24,032 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 21:17:25,200 INFO [train.py:903] (0/4) Epoch 2, batch 200, loss[loss=0.4042, simple_loss=0.4352, pruned_loss=0.1866, over 19307.00 frames. ], tot_loss[loss=0.3856, simple_loss=0.4128, pruned_loss=0.1792, over 2437375.74 frames. ], batch size: 66, lr: 3.73e-02, grad_scale: 4.0 +2023-03-31 21:17:54,551 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.1090, 3.7815, 4.5865, 4.4044, 1.6319, 4.0600, 3.6830, 4.0447], + device='cuda:0'), covar=tensor([0.0249, 0.0485, 0.0365, 0.0186, 0.2798, 0.0189, 0.0356, 0.0761], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0257, 0.0316, 0.0220, 0.0398, 0.0155, 0.0238, 0.0342], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 21:18:11,606 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7066.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:18:29,133 INFO [train.py:903] (0/4) Epoch 2, batch 250, loss[loss=0.3867, simple_loss=0.4201, pruned_loss=0.1766, over 19528.00 frames. ], tot_loss[loss=0.3831, simple_loss=0.4111, pruned_loss=0.1776, over 2753309.82 frames. ], batch size: 54, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:19:03,847 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:19:09,455 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 7.747e+02 9.740e+02 1.157e+03 2.695e+03, threshold=1.948e+03, percent-clipped=1.0 +2023-03-31 21:19:33,822 INFO [train.py:903] (0/4) Epoch 2, batch 300, loss[loss=0.2842, simple_loss=0.3301, pruned_loss=0.1192, over 19737.00 frames. ], tot_loss[loss=0.3833, simple_loss=0.412, pruned_loss=0.1773, over 2984945.74 frames. ], batch size: 46, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:20:35,435 INFO [train.py:903] (0/4) Epoch 2, batch 350, loss[loss=0.3827, simple_loss=0.4197, pruned_loss=0.1729, over 19657.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.4129, pruned_loss=0.1789, over 3175006.36 frames. ], batch size: 60, lr: 3.71e-02, grad_scale: 4.0 +2023-03-31 21:20:37,925 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7181.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:20:39,829 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 21:20:54,814 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7195.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:01,700 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:15,795 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 9.781e+02 1.245e+03 1.512e+03 3.081e+03, threshold=2.489e+03, percent-clipped=8.0 +2023-03-31 21:21:26,506 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:32,335 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7225.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:37,138 INFO [train.py:903] (0/4) Epoch 2, batch 400, loss[loss=0.3932, simple_loss=0.4007, pruned_loss=0.1928, over 19763.00 frames. ], tot_loss[loss=0.3868, simple_loss=0.4135, pruned_loss=0.1801, over 3316961.96 frames. ], batch size: 45, lr: 3.70e-02, grad_scale: 8.0 +2023-03-31 21:21:51,080 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7240.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:22:30,090 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7271.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:22:40,124 INFO [train.py:903] (0/4) Epoch 2, batch 450, loss[loss=0.41, simple_loss=0.4378, pruned_loss=0.1911, over 19686.00 frames. ], tot_loss[loss=0.3854, simple_loss=0.4129, pruned_loss=0.179, over 3444386.82 frames. ], batch size: 59, lr: 3.69e-02, grad_scale: 8.0 +2023-03-31 21:22:58,508 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:23:14,127 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 21:23:15,292 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 21:23:19,468 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.877e+02 8.763e+02 1.192e+03 1.491e+03 2.950e+03, threshold=2.384e+03, percent-clipped=4.0 +2023-03-31 21:23:43,129 INFO [train.py:903] (0/4) Epoch 2, batch 500, loss[loss=0.3243, simple_loss=0.3543, pruned_loss=0.1472, over 19779.00 frames. ], tot_loss[loss=0.3847, simple_loss=0.4124, pruned_loss=0.1784, over 3535667.02 frames. ], batch size: 48, lr: 3.68e-02, grad_scale: 8.0 +2023-03-31 21:24:14,671 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7355.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:24:23,762 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7362.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:37,496 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2387, 3.3390, 3.6636, 3.6151, 1.3006, 3.1628, 3.0620, 3.1328], + device='cuda:0'), covar=tensor([0.0330, 0.0582, 0.0533, 0.0318, 0.3082, 0.0269, 0.0404, 0.1009], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0255, 0.0323, 0.0218, 0.0396, 0.0155, 0.0231, 0.0332], + device='cuda:0'), out_proj_covar=tensor([1.3372e-04, 1.5776e-04, 2.0237e-04, 1.2543e-04, 2.1244e-04, 9.9618e-05, + 1.3496e-04, 1.8547e-04], device='cuda:0') +2023-03-31 21:24:45,279 INFO [train.py:903] (0/4) Epoch 2, batch 550, loss[loss=0.3556, simple_loss=0.4002, pruned_loss=0.1555, over 19658.00 frames. ], tot_loss[loss=0.3858, simple_loss=0.4135, pruned_loss=0.179, over 3606109.16 frames. ], batch size: 55, lr: 3.67e-02, grad_scale: 8.0 +2023-03-31 21:24:53,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7386.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:55,088 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7387.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:24,040 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7410.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:25:26,117 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+02 9.206e+02 1.127e+03 1.377e+03 2.659e+03, threshold=2.254e+03, percent-clipped=2.0 +2023-03-31 21:25:43,338 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:47,659 INFO [train.py:903] (0/4) Epoch 2, batch 600, loss[loss=0.3646, simple_loss=0.3793, pruned_loss=0.1749, over 19743.00 frames. ], tot_loss[loss=0.3852, simple_loss=0.4129, pruned_loss=0.1788, over 3668463.98 frames. ], batch size: 45, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:26:13,260 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0541, 3.9347, 4.5480, 4.4863, 1.4713, 4.0333, 3.7156, 3.9842], + device='cuda:0'), covar=tensor([0.0265, 0.0447, 0.0388, 0.0194, 0.3081, 0.0219, 0.0358, 0.0906], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0256, 0.0325, 0.0217, 0.0398, 0.0157, 0.0233, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 21:26:26,674 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8169, 1.3272, 1.3040, 1.8682, 1.3948, 1.8009, 2.0529, 2.0713], + device='cuda:0'), covar=tensor([0.1132, 0.1776, 0.1823, 0.1693, 0.2209, 0.1376, 0.1787, 0.0907], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0307, 0.0303, 0.0328, 0.0376, 0.0277, 0.0340, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 21:26:29,908 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 21:26:50,215 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-31 21:26:50,577 INFO [train.py:903] (0/4) Epoch 2, batch 650, loss[loss=0.3356, simple_loss=0.3785, pruned_loss=0.1463, over 19734.00 frames. ], tot_loss[loss=0.3837, simple_loss=0.4119, pruned_loss=0.1777, over 3699557.05 frames. ], batch size: 51, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:27:07,972 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9658, 5.4437, 2.9988, 4.6588, 1.6755, 5.4665, 5.2931, 5.4825], + device='cuda:0'), covar=tensor([0.0492, 0.0977, 0.2084, 0.0716, 0.3533, 0.0768, 0.0509, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0268, 0.0305, 0.0249, 0.0320, 0.0256, 0.0202, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:27:30,827 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 8.519e+02 1.041e+03 1.431e+03 3.840e+03, threshold=2.082e+03, percent-clipped=3.0 +2023-03-31 21:27:49,374 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7525.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:27:49,584 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7525.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:27:53,956 INFO [train.py:903] (0/4) Epoch 2, batch 700, loss[loss=0.3813, simple_loss=0.4264, pruned_loss=0.1681, over 19661.00 frames. ], tot_loss[loss=0.3815, simple_loss=0.4099, pruned_loss=0.1766, over 3731587.76 frames. ], batch size: 58, lr: 3.65e-02, grad_scale: 8.0 +2023-03-31 21:28:56,975 INFO [train.py:903] (0/4) Epoch 2, batch 750, loss[loss=0.4171, simple_loss=0.4345, pruned_loss=0.1999, over 18455.00 frames. ], tot_loss[loss=0.3844, simple_loss=0.4121, pruned_loss=0.1783, over 3753916.13 frames. ], batch size: 84, lr: 3.64e-02, grad_scale: 8.0 +2023-03-31 21:29:24,821 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6233, 2.8547, 3.0089, 2.9428, 1.1499, 2.7175, 2.4952, 2.5493], + device='cuda:0'), covar=tensor([0.0521, 0.0648, 0.0628, 0.0406, 0.3135, 0.0322, 0.0502, 0.1341], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0261, 0.0330, 0.0224, 0.0398, 0.0162, 0.0234, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 21:29:35,963 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.266e+02 8.676e+02 1.032e+03 1.220e+03 3.020e+03, threshold=2.064e+03, percent-clipped=5.0 +2023-03-31 21:29:36,435 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7611.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:29:55,584 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6872, 1.5532, 1.1953, 1.7705, 1.5343, 1.5378, 1.5204, 1.9568], + device='cuda:0'), covar=tensor([0.0950, 0.2151, 0.1741, 0.1208, 0.1792, 0.0765, 0.1352, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0370, 0.0290, 0.0253, 0.0327, 0.0259, 0.0285, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:29:58,614 INFO [train.py:903] (0/4) Epoch 2, batch 800, loss[loss=0.3877, simple_loss=0.4156, pruned_loss=0.1799, over 19691.00 frames. ], tot_loss[loss=0.3829, simple_loss=0.4113, pruned_loss=0.1772, over 3773181.13 frames. ], batch size: 53, lr: 3.63e-02, grad_scale: 8.0 +2023-03-31 21:30:01,363 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1400, 1.6394, 1.2706, 2.0925, 1.3805, 2.2003, 2.1142, 1.7614], + device='cuda:0'), covar=tensor([0.0923, 0.1383, 0.1861, 0.1210, 0.2087, 0.0990, 0.1671, 0.0935], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0303, 0.0311, 0.0329, 0.0378, 0.0274, 0.0340, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 21:30:07,109 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7636.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:30:09,131 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7637.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:11,708 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9943, 1.1799, 1.7178, 1.2112, 2.2492, 2.3533, 2.4399, 1.1114], + device='cuda:0'), covar=tensor([0.1397, 0.1755, 0.1207, 0.1416, 0.0794, 0.0739, 0.0870, 0.1596], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0321, 0.0304, 0.0314, 0.0352, 0.0286, 0.0408, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 21:30:12,812 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7640.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,236 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,378 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:16,157 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 21:30:46,859 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7667.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:31:01,250 INFO [train.py:903] (0/4) Epoch 2, batch 850, loss[loss=0.3752, simple_loss=0.4087, pruned_loss=0.1709, over 19843.00 frames. ], tot_loss[loss=0.3842, simple_loss=0.4128, pruned_loss=0.1778, over 3797338.71 frames. ], batch size: 52, lr: 3.62e-02, grad_scale: 8.0 +2023-03-31 21:31:41,593 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+02 9.016e+02 1.057e+03 1.450e+03 5.160e+03, threshold=2.114e+03, percent-clipped=6.0 +2023-03-31 21:31:56,564 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 21:32:02,492 INFO [train.py:903] (0/4) Epoch 2, batch 900, loss[loss=0.3395, simple_loss=0.3657, pruned_loss=0.1567, over 18131.00 frames. ], tot_loss[loss=0.3848, simple_loss=0.4128, pruned_loss=0.1784, over 3794843.97 frames. ], batch size: 40, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:32:04,688 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-31 21:32:26,842 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2082, 1.7613, 1.4606, 1.2026, 1.5722, 0.8850, 0.5274, 1.6857], + device='cuda:0'), covar=tensor([0.0785, 0.0414, 0.0962, 0.0720, 0.0544, 0.1395, 0.1190, 0.0512], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0174, 0.0246, 0.0238, 0.0178, 0.0277, 0.0258, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:32:32,755 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7752.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:43,835 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:44,208 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-31 21:32:53,062 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7769.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:58,707 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6798, 1.4046, 1.1650, 1.5630, 1.1980, 1.3837, 1.3791, 1.5981], + device='cuda:0'), covar=tensor([0.0855, 0.1737, 0.1429, 0.1101, 0.1595, 0.0776, 0.1194, 0.0718], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0362, 0.0280, 0.0252, 0.0317, 0.0249, 0.0281, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:33:06,311 INFO [train.py:903] (0/4) Epoch 2, batch 950, loss[loss=0.4446, simple_loss=0.4608, pruned_loss=0.2142, over 19297.00 frames. ], tot_loss[loss=0.384, simple_loss=0.4123, pruned_loss=0.1779, over 3815827.83 frames. ], batch size: 66, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:33:09,105 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7781.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:33:10,900 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 21:33:39,978 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7806.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:33:46,539 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.660e+02 8.698e+02 1.089e+03 1.494e+03 2.916e+03, threshold=2.178e+03, percent-clipped=6.0 +2023-03-31 21:33:58,941 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7820.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:34:09,630 INFO [train.py:903] (0/4) Epoch 2, batch 1000, loss[loss=0.3629, simple_loss=0.4067, pruned_loss=0.1595, over 19664.00 frames. ], tot_loss[loss=0.3822, simple_loss=0.4114, pruned_loss=0.1765, over 3824385.25 frames. ], batch size: 58, lr: 3.60e-02, grad_scale: 4.0 +2023-03-31 21:35:04,945 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 21:35:11,833 INFO [train.py:903] (0/4) Epoch 2, batch 1050, loss[loss=0.3442, simple_loss=0.371, pruned_loss=0.1587, over 19765.00 frames. ], tot_loss[loss=0.3804, simple_loss=0.41, pruned_loss=0.1754, over 3802499.69 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 4.0 +2023-03-31 21:35:18,830 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7884.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:33,736 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7896.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:46,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 21:35:53,415 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+02 8.878e+02 9.952e+02 1.228e+03 3.126e+03, threshold=1.990e+03, percent-clipped=5.0 +2023-03-31 21:36:05,089 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7921.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:36:14,154 INFO [train.py:903] (0/4) Epoch 2, batch 1100, loss[loss=0.438, simple_loss=0.447, pruned_loss=0.2144, over 17495.00 frames. ], tot_loss[loss=0.3815, simple_loss=0.4106, pruned_loss=0.1762, over 3801683.46 frames. ], batch size: 101, lr: 3.58e-02, grad_scale: 4.0 +2023-03-31 21:36:31,415 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7545, 2.9408, 3.0644, 3.0692, 1.1108, 2.7590, 2.6037, 2.7097], + device='cuda:0'), covar=tensor([0.0483, 0.0566, 0.0635, 0.0391, 0.3120, 0.0320, 0.0461, 0.1253], + device='cuda:0'), in_proj_covar=tensor([0.0228, 0.0251, 0.0331, 0.0228, 0.0391, 0.0157, 0.0228, 0.0353], + device='cuda:0'), out_proj_covar=tensor([1.3955e-04, 1.5496e-04, 2.0549e-04, 1.2859e-04, 2.0777e-04, 9.9032e-05, + 1.3016e-04, 1.9373e-04], device='cuda:0') +2023-03-31 21:36:40,181 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2634, 1.1042, 1.3561, 0.3240, 2.6102, 2.2788, 1.7213, 2.2712], + device='cuda:0'), covar=tensor([0.1433, 0.2671, 0.2434, 0.2567, 0.0302, 0.0212, 0.0468, 0.0245], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0282, 0.0324, 0.0297, 0.0196, 0.0111, 0.0186, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 21:37:16,838 INFO [train.py:903] (0/4) Epoch 2, batch 1150, loss[loss=0.492, simple_loss=0.4811, pruned_loss=0.2515, over 19587.00 frames. ], tot_loss[loss=0.3824, simple_loss=0.4112, pruned_loss=0.1768, over 3800276.00 frames. ], batch size: 61, lr: 3.57e-02, grad_scale: 4.0 +2023-03-31 21:37:26,671 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7986.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:43,808 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-8000.pt +2023-03-31 21:37:54,467 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:58,738 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+02 8.681e+02 1.035e+03 1.273e+03 2.854e+03, threshold=2.070e+03, percent-clipped=5.0 +2023-03-31 21:38:21,506 INFO [train.py:903] (0/4) Epoch 2, batch 1200, loss[loss=0.4473, simple_loss=0.4448, pruned_loss=0.2249, over 13376.00 frames. ], tot_loss[loss=0.3814, simple_loss=0.4105, pruned_loss=0.1762, over 3791795.22 frames. ], batch size: 136, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:38:26,392 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8033.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:38:52,432 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 21:39:22,668 INFO [train.py:903] (0/4) Epoch 2, batch 1250, loss[loss=0.372, simple_loss=0.4097, pruned_loss=0.1671, over 19619.00 frames. ], tot_loss[loss=0.3825, simple_loss=0.4112, pruned_loss=0.177, over 3796588.26 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:39:50,757 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8101.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:39:57,180 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:40:05,139 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.392e+02 1.041e+03 1.254e+03 3.427e+03, threshold=2.083e+03, percent-clipped=3.0 +2023-03-31 21:40:25,457 INFO [train.py:903] (0/4) Epoch 2, batch 1300, loss[loss=0.3869, simple_loss=0.4219, pruned_loss=0.1759, over 19317.00 frames. ], tot_loss[loss=0.3803, simple_loss=0.4096, pruned_loss=0.1755, over 3795935.85 frames. ], batch size: 66, lr: 3.55e-02, grad_scale: 8.0 +2023-03-31 21:40:39,774 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8140.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:09,939 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8164.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:11,405 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8165.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:28,092 INFO [train.py:903] (0/4) Epoch 2, batch 1350, loss[loss=0.3858, simple_loss=0.422, pruned_loss=0.1748, over 19298.00 frames. ], tot_loss[loss=0.38, simple_loss=0.409, pruned_loss=0.1755, over 3809187.03 frames. ], batch size: 66, lr: 3.54e-02, grad_scale: 8.0 +2023-03-31 21:41:31,735 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8759, 1.3831, 1.6613, 1.4212, 2.7452, 3.0915, 3.1457, 3.3129], + device='cuda:0'), covar=tensor([0.1323, 0.2483, 0.2362, 0.2031, 0.0463, 0.0154, 0.0227, 0.0135], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0280, 0.0321, 0.0290, 0.0199, 0.0110, 0.0183, 0.0111], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 21:41:42,272 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 21:42:08,819 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.809e+02 9.088e+02 1.109e+03 1.527e+03 2.312e+03, threshold=2.218e+03, percent-clipped=6.0 +2023-03-31 21:42:19,621 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:42:30,883 INFO [train.py:903] (0/4) Epoch 2, batch 1400, loss[loss=0.334, simple_loss=0.3814, pruned_loss=0.1433, over 19724.00 frames. ], tot_loss[loss=0.3764, simple_loss=0.4067, pruned_loss=0.1731, over 3829762.47 frames. ], batch size: 51, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:32,681 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 21:43:33,840 INFO [train.py:903] (0/4) Epoch 2, batch 1450, loss[loss=0.3765, simple_loss=0.4028, pruned_loss=0.1751, over 19470.00 frames. ], tot_loss[loss=0.3749, simple_loss=0.4058, pruned_loss=0.1721, over 3833242.76 frames. ], batch size: 49, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:34,146 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8279.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:43:38,616 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8283.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:44:15,421 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+02 8.724e+02 1.078e+03 1.353e+03 2.729e+03, threshold=2.156e+03, percent-clipped=3.0 +2023-03-31 21:44:30,498 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6645, 1.8203, 1.2636, 1.2062, 1.2048, 1.4419, 0.0978, 0.7758], + device='cuda:0'), covar=tensor([0.0499, 0.0465, 0.0326, 0.0434, 0.0940, 0.0552, 0.1068, 0.0857], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0199, 0.0190, 0.0226, 0.0266, 0.0236, 0.0240, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:44:35,646 INFO [train.py:903] (0/4) Epoch 2, batch 1500, loss[loss=0.3766, simple_loss=0.3989, pruned_loss=0.1771, over 19576.00 frames. ], tot_loss[loss=0.3766, simple_loss=0.4065, pruned_loss=0.1733, over 3820863.57 frames. ], batch size: 52, lr: 3.52e-02, grad_scale: 8.0 +2023-03-31 21:45:10,884 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8357.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:45:38,213 INFO [train.py:903] (0/4) Epoch 2, batch 1550, loss[loss=0.3911, simple_loss=0.4228, pruned_loss=0.1797, over 19588.00 frames. ], tot_loss[loss=0.3758, simple_loss=0.4059, pruned_loss=0.1728, over 3830874.48 frames. ], batch size: 52, lr: 3.51e-02, grad_scale: 8.0 +2023-03-31 21:45:42,320 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8382.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:46:19,242 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.712e+02 9.525e+02 1.175e+03 1.582e+03 3.285e+03, threshold=2.351e+03, percent-clipped=5.0 +2023-03-31 21:46:41,077 INFO [train.py:903] (0/4) Epoch 2, batch 1600, loss[loss=0.3627, simple_loss=0.394, pruned_loss=0.1657, over 19841.00 frames. ], tot_loss[loss=0.3747, simple_loss=0.4054, pruned_loss=0.172, over 3808778.47 frames. ], batch size: 52, lr: 3.50e-02, grad_scale: 8.0 +2023-03-31 21:47:02,053 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 21:47:39,723 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:47:42,841 INFO [train.py:903] (0/4) Epoch 2, batch 1650, loss[loss=0.332, simple_loss=0.3704, pruned_loss=0.1468, over 19576.00 frames. ], tot_loss[loss=0.3723, simple_loss=0.4035, pruned_loss=0.1705, over 3812333.28 frames. ], batch size: 52, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:47:56,679 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7657, 4.3566, 2.5378, 3.9938, 1.5313, 4.1533, 3.8967, 4.1299], + device='cuda:0'), covar=tensor([0.0470, 0.0922, 0.1881, 0.0618, 0.3306, 0.0789, 0.0683, 0.0610], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0268, 0.0301, 0.0242, 0.0320, 0.0258, 0.0201, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 21:48:10,065 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:48:23,186 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.971e+02 8.791e+02 1.048e+03 1.403e+03 4.696e+03, threshold=2.096e+03, percent-clipped=2.0 +2023-03-31 21:48:44,117 INFO [train.py:903] (0/4) Epoch 2, batch 1700, loss[loss=0.3832, simple_loss=0.4168, pruned_loss=0.1749, over 19690.00 frames. ], tot_loss[loss=0.3745, simple_loss=0.4048, pruned_loss=0.1721, over 3819875.70 frames. ], batch size: 59, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:51,035 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8535.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:17,272 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9430, 2.0422, 1.5988, 1.6429, 1.5684, 1.6323, 0.7244, 1.3635], + device='cuda:0'), covar=tensor([0.0390, 0.0383, 0.0253, 0.0334, 0.0537, 0.0447, 0.0790, 0.0651], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0199, 0.0190, 0.0231, 0.0270, 0.0239, 0.0242, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:49:23,079 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8560.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:23,924 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 21:49:31,719 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3334, 0.9762, 1.2231, 1.1482, 2.0540, 1.0978, 1.8212, 1.9503], + device='cuda:0'), covar=tensor([0.0652, 0.2940, 0.2707, 0.1877, 0.0738, 0.1984, 0.1039, 0.0863], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0293, 0.0281, 0.0272, 0.0240, 0.0314, 0.0256, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:49:46,288 INFO [train.py:903] (0/4) Epoch 2, batch 1750, loss[loss=0.3709, simple_loss=0.4124, pruned_loss=0.1646, over 19525.00 frames. ], tot_loss[loss=0.3759, simple_loss=0.4059, pruned_loss=0.173, over 3812070.02 frames. ], batch size: 54, lr: 3.48e-02, grad_scale: 8.0 +2023-03-31 21:50:27,276 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.711e+02 8.589e+02 1.062e+03 1.367e+03 2.706e+03, threshold=2.124e+03, percent-clipped=6.0 +2023-03-31 21:50:46,999 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8627.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:50:48,943 INFO [train.py:903] (0/4) Epoch 2, batch 1800, loss[loss=0.3974, simple_loss=0.4168, pruned_loss=0.189, over 19663.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.4035, pruned_loss=0.1709, over 3822914.00 frames. ], batch size: 53, lr: 3.47e-02, grad_scale: 8.0 +2023-03-31 21:51:48,862 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 21:51:52,355 INFO [train.py:903] (0/4) Epoch 2, batch 1850, loss[loss=0.3081, simple_loss=0.3531, pruned_loss=0.1316, over 19351.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.4039, pruned_loss=0.1707, over 3820789.65 frames. ], batch size: 47, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:52:26,719 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 21:52:32,412 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.683e+02 8.545e+02 1.022e+03 1.402e+03 2.945e+03, threshold=2.044e+03, percent-clipped=4.0 +2023-03-31 21:52:53,437 INFO [train.py:903] (0/4) Epoch 2, batch 1900, loss[loss=0.5349, simple_loss=0.4922, pruned_loss=0.2889, over 13397.00 frames. ], tot_loss[loss=0.3717, simple_loss=0.403, pruned_loss=0.1702, over 3817412.42 frames. ], batch size: 138, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:53:10,022 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:13,048 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 21:53:19,062 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 21:53:44,522 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 21:53:51,870 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.8445, 5.4256, 2.9988, 4.6512, 1.7866, 5.4857, 5.0549, 5.3056], + device='cuda:0'), covar=tensor([0.0410, 0.0865, 0.1672, 0.0589, 0.3167, 0.0612, 0.0536, 0.0576], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0267, 0.0300, 0.0243, 0.0320, 0.0260, 0.0206, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 21:53:55,409 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8778.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:57,531 INFO [train.py:903] (0/4) Epoch 2, batch 1950, loss[loss=0.3861, simple_loss=0.4163, pruned_loss=0.1779, over 19746.00 frames. ], tot_loss[loss=0.3698, simple_loss=0.402, pruned_loss=0.1688, over 3827961.52 frames. ], batch size: 63, lr: 3.45e-02, grad_scale: 8.0 +2023-03-31 21:54:38,567 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.007e+02 8.441e+02 1.013e+03 1.280e+03 2.038e+03, threshold=2.026e+03, percent-clipped=0.0 +2023-03-31 21:55:00,983 INFO [train.py:903] (0/4) Epoch 2, batch 2000, loss[loss=0.3455, simple_loss=0.3718, pruned_loss=0.1596, over 18612.00 frames. ], tot_loss[loss=0.3697, simple_loss=0.4022, pruned_loss=0.1686, over 3826922.78 frames. ], batch size: 41, lr: 3.44e-02, grad_scale: 8.0 +2023-03-31 21:56:00,559 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 21:56:01,573 INFO [train.py:903] (0/4) Epoch 2, batch 2050, loss[loss=0.3429, simple_loss=0.3732, pruned_loss=0.1562, over 19125.00 frames. ], tot_loss[loss=0.3715, simple_loss=0.4033, pruned_loss=0.1698, over 3827805.17 frames. ], batch size: 42, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:56:19,561 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 21:56:20,748 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 21:56:41,655 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 21:56:44,118 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.978e+02 1.029e+03 1.194e+03 1.427e+03 4.040e+03, threshold=2.389e+03, percent-clipped=7.0 +2023-03-31 21:56:51,785 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-31 21:57:05,505 INFO [train.py:903] (0/4) Epoch 2, batch 2100, loss[loss=0.3194, simple_loss=0.3698, pruned_loss=0.1345, over 19741.00 frames. ], tot_loss[loss=0.3722, simple_loss=0.4041, pruned_loss=0.1701, over 3828279.55 frames. ], batch size: 51, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:57:25,717 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8945.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:57:36,778 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 21:57:59,444 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 21:58:07,609 INFO [train.py:903] (0/4) Epoch 2, batch 2150, loss[loss=0.322, simple_loss=0.3653, pruned_loss=0.1393, over 19649.00 frames. ], tot_loss[loss=0.3683, simple_loss=0.4014, pruned_loss=0.1676, over 3828995.49 frames. ], batch size: 53, lr: 3.42e-02, grad_scale: 8.0 +2023-03-31 21:58:30,522 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-31 21:58:32,326 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8998.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:40,102 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9004.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:49,979 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+02 7.392e+02 9.171e+02 1.181e+03 2.165e+03, threshold=1.834e+03, percent-clipped=0.0 +2023-03-31 21:59:04,434 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9023.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:59:11,992 INFO [train.py:903] (0/4) Epoch 2, batch 2200, loss[loss=0.333, simple_loss=0.3716, pruned_loss=0.1472, over 19770.00 frames. ], tot_loss[loss=0.3668, simple_loss=0.4, pruned_loss=0.1668, over 3824046.26 frames. ], batch size: 48, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 21:59:14,465 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9031.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:59:14,585 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4665, 2.1338, 1.6080, 1.6203, 1.6030, 1.4751, 0.3069, 1.0612], + device='cuda:0'), covar=tensor([0.0477, 0.0404, 0.0335, 0.0484, 0.0863, 0.0642, 0.1132, 0.0796], + device='cuda:0'), in_proj_covar=tensor([0.0204, 0.0208, 0.0201, 0.0245, 0.0286, 0.0253, 0.0256, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 22:00:11,531 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-31 22:00:13,986 INFO [train.py:903] (0/4) Epoch 2, batch 2250, loss[loss=0.4021, simple_loss=0.4317, pruned_loss=0.1863, over 18681.00 frames. ], tot_loss[loss=0.3683, simple_loss=0.4017, pruned_loss=0.1674, over 3813707.48 frames. ], batch size: 74, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 22:00:56,131 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.993e+02 8.727e+02 9.943e+02 1.293e+03 2.077e+03, threshold=1.989e+03, percent-clipped=4.0 +2023-03-31 22:01:08,135 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9122.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:01:17,887 INFO [train.py:903] (0/4) Epoch 2, batch 2300, loss[loss=0.3751, simple_loss=0.4078, pruned_loss=0.1711, over 19673.00 frames. ], tot_loss[loss=0.3665, simple_loss=0.4007, pruned_loss=0.1662, over 3829350.59 frames. ], batch size: 59, lr: 3.40e-02, grad_scale: 8.0 +2023-03-31 22:01:29,610 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6821, 1.8317, 2.2615, 1.9485, 3.0236, 3.4419, 3.4705, 3.5562], + device='cuda:0'), covar=tensor([0.1586, 0.2012, 0.2008, 0.1635, 0.0517, 0.0143, 0.0170, 0.0151], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0280, 0.0323, 0.0288, 0.0197, 0.0107, 0.0188, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 22:01:30,529 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 22:02:11,734 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8288, 1.1758, 1.3524, 1.5425, 2.4692, 1.1540, 1.7629, 2.4146], + device='cuda:0'), covar=tensor([0.0576, 0.2980, 0.2610, 0.1657, 0.0576, 0.2199, 0.1126, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0302, 0.0280, 0.0269, 0.0250, 0.0318, 0.0257, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 22:02:19,087 INFO [train.py:903] (0/4) Epoch 2, batch 2350, loss[loss=0.4073, simple_loss=0.4275, pruned_loss=0.1935, over 18757.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4014, pruned_loss=0.1673, over 3818127.56 frames. ], batch size: 74, lr: 3.39e-02, grad_scale: 8.0 +2023-03-31 22:03:00,735 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.988e+02 9.117e+02 1.090e+03 1.432e+03 2.529e+03, threshold=2.180e+03, percent-clipped=5.0 +2023-03-31 22:03:00,800 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 22:03:18,246 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 22:03:22,888 INFO [train.py:903] (0/4) Epoch 2, batch 2400, loss[loss=0.4009, simple_loss=0.4336, pruned_loss=0.1841, over 19504.00 frames. ], tot_loss[loss=0.3706, simple_loss=0.4035, pruned_loss=0.1689, over 3813640.02 frames. ], batch size: 64, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:03:32,266 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9237.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:24,306 INFO [train.py:903] (0/4) Epoch 2, batch 2450, loss[loss=0.4605, simple_loss=0.4584, pruned_loss=0.2314, over 13617.00 frames. ], tot_loss[loss=0.3701, simple_loss=0.4031, pruned_loss=0.1685, over 3808790.52 frames. ], batch size: 135, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:04:38,145 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9289.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:57,186 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-31 22:05:06,391 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 8.791e+02 1.096e+03 1.484e+03 3.289e+03, threshold=2.192e+03, percent-clipped=7.0 +2023-03-31 22:05:27,995 INFO [train.py:903] (0/4) Epoch 2, batch 2500, loss[loss=0.3447, simple_loss=0.3936, pruned_loss=0.1479, over 19537.00 frames. ], tot_loss[loss=0.3691, simple_loss=0.4028, pruned_loss=0.1677, over 3818691.58 frames. ], batch size: 56, lr: 3.37e-02, grad_scale: 8.0 +2023-03-31 22:05:52,423 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9348.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:25,085 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9375.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:29,686 INFO [train.py:903] (0/4) Epoch 2, batch 2550, loss[loss=0.4652, simple_loss=0.4568, pruned_loss=0.2368, over 13373.00 frames. ], tot_loss[loss=0.3689, simple_loss=0.4019, pruned_loss=0.1679, over 3813956.48 frames. ], batch size: 136, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:01,775 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9404.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:07:11,556 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+02 8.134e+02 9.492e+02 1.275e+03 2.544e+03, threshold=1.898e+03, percent-clipped=3.0 +2023-03-31 22:07:12,954 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.8418, 5.1677, 2.3863, 4.6086, 1.4547, 5.2615, 5.0664, 5.1846], + device='cuda:0'), covar=tensor([0.0509, 0.1151, 0.2403, 0.0628, 0.3767, 0.0760, 0.0534, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0280, 0.0314, 0.0259, 0.0328, 0.0267, 0.0214, 0.0233], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 22:07:25,225 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 22:07:33,274 INFO [train.py:903] (0/4) Epoch 2, batch 2600, loss[loss=0.3651, simple_loss=0.4067, pruned_loss=0.1617, over 19770.00 frames. ], tot_loss[loss=0.3664, simple_loss=0.4004, pruned_loss=0.1661, over 3832392.05 frames. ], batch size: 56, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:36,924 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:14,519 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9463.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:34,691 INFO [train.py:903] (0/4) Epoch 2, batch 2650, loss[loss=0.3679, simple_loss=0.4088, pruned_loss=0.1635, over 19250.00 frames. ], tot_loss[loss=0.3673, simple_loss=0.4011, pruned_loss=0.1668, over 3823923.92 frames. ], batch size: 66, lr: 3.35e-02, grad_scale: 8.0 +2023-03-31 22:08:49,014 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9490.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:52,630 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6156, 1.2220, 1.5941, 1.2154, 2.8166, 3.3828, 3.4791, 3.5928], + device='cuda:0'), covar=tensor([0.1504, 0.2748, 0.2634, 0.2242, 0.0407, 0.0145, 0.0170, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0283, 0.0321, 0.0292, 0.0194, 0.0108, 0.0183, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 22:08:52,710 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9493.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:53,487 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 22:09:09,376 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.83 vs. limit=5.0 +2023-03-31 22:09:16,684 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.258e+02 8.887e+02 1.023e+03 1.383e+03 3.476e+03, threshold=2.047e+03, percent-clipped=7.0 +2023-03-31 22:09:24,004 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9518.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:09:36,963 INFO [train.py:903] (0/4) Epoch 2, batch 2700, loss[loss=0.4111, simple_loss=0.44, pruned_loss=0.1911, over 19690.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.3995, pruned_loss=0.1655, over 3841283.22 frames. ], batch size: 60, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:10:39,553 INFO [train.py:903] (0/4) Epoch 2, batch 2750, loss[loss=0.345, simple_loss=0.3847, pruned_loss=0.1526, over 19753.00 frames. ], tot_loss[loss=0.366, simple_loss=0.4, pruned_loss=0.166, over 3839921.12 frames. ], batch size: 54, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:11:08,785 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9602.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:11:20,990 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.046e+02 9.047e+02 1.065e+03 1.297e+03 2.590e+03, threshold=2.130e+03, percent-clipped=3.0 +2023-03-31 22:11:43,322 INFO [train.py:903] (0/4) Epoch 2, batch 2800, loss[loss=0.4125, simple_loss=0.4196, pruned_loss=0.2027, over 19665.00 frames. ], tot_loss[loss=0.364, simple_loss=0.3984, pruned_loss=0.1648, over 3835929.53 frames. ], batch size: 53, lr: 3.33e-02, grad_scale: 8.0 +2023-03-31 22:11:56,240 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0419, 1.8546, 1.8612, 2.8072, 2.0373, 2.6834, 2.1398, 1.8532], + device='cuda:0'), covar=tensor([0.1087, 0.0946, 0.0636, 0.0529, 0.1082, 0.0326, 0.1191, 0.0869], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0296, 0.0315, 0.0407, 0.0385, 0.0214, 0.0414, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:12:20,914 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9660.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:41,292 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9676.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:44,609 INFO [train.py:903] (0/4) Epoch 2, batch 2850, loss[loss=0.3159, simple_loss=0.3532, pruned_loss=0.1393, over 19730.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.3972, pruned_loss=0.1641, over 3840014.17 frames. ], batch size: 46, lr: 3.32e-02, grad_scale: 8.0 +2023-03-31 22:12:51,698 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9685.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:26,587 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.408e+02 8.898e+02 1.125e+03 1.384e+03 2.599e+03, threshold=2.251e+03, percent-clipped=6.0 +2023-03-31 22:13:35,219 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:45,574 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 22:13:46,841 INFO [train.py:903] (0/4) Epoch 2, batch 2900, loss[loss=0.3726, simple_loss=0.4078, pruned_loss=0.1687, over 19778.00 frames. ], tot_loss[loss=0.3632, simple_loss=0.398, pruned_loss=0.1642, over 3828873.78 frames. ], batch size: 56, lr: 3.31e-02, grad_scale: 16.0 +2023-03-31 22:13:58,075 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.34 vs. limit=5.0 +2023-03-31 22:14:05,839 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9744.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:09,036 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9746.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:27,836 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:39,277 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:45,585 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:49,525 INFO [train.py:903] (0/4) Epoch 2, batch 2950, loss[loss=0.414, simple_loss=0.433, pruned_loss=0.1975, over 19365.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.3996, pruned_loss=0.1654, over 3827686.46 frames. ], batch size: 70, lr: 3.31e-02, grad_scale: 8.0 +2023-03-31 22:15:31,703 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+02 8.475e+02 1.131e+03 1.413e+03 3.215e+03, threshold=2.262e+03, percent-clipped=4.0 +2023-03-31 22:15:53,069 INFO [train.py:903] (0/4) Epoch 2, batch 3000, loss[loss=0.4689, simple_loss=0.4646, pruned_loss=0.2366, over 13128.00 frames. ], tot_loss[loss=0.3656, simple_loss=0.3999, pruned_loss=0.1656, over 3835819.41 frames. ], batch size: 137, lr: 3.30e-02, grad_scale: 4.0 +2023-03-31 22:15:53,070 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 22:16:04,424 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7867, 1.2247, 1.4847, 1.6075, 2.5024, 1.2682, 1.8054, 2.4733], + device='cuda:0'), covar=tensor([0.0580, 0.3315, 0.3190, 0.2045, 0.0596, 0.2661, 0.1274, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0293, 0.0284, 0.0271, 0.0242, 0.0309, 0.0252, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 22:16:06,238 INFO [train.py:937] (0/4) Epoch 2, validation: loss=0.2513, simple_loss=0.3423, pruned_loss=0.08019, over 944034.00 frames. +2023-03-31 22:16:06,240 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18153MB +2023-03-31 22:16:12,106 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 22:16:53,686 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0664, 1.1118, 1.9409, 1.3096, 2.6445, 2.4787, 3.0451, 1.3158], + device='cuda:0'), covar=tensor([0.1378, 0.2011, 0.1099, 0.1310, 0.0833, 0.0783, 0.0999, 0.1787], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0372, 0.0340, 0.0345, 0.0396, 0.0318, 0.0465, 0.0355], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:17:08,087 INFO [train.py:903] (0/4) Epoch 2, batch 3050, loss[loss=0.3934, simple_loss=0.4155, pruned_loss=0.1857, over 13285.00 frames. ], tot_loss[loss=0.3646, simple_loss=0.399, pruned_loss=0.1651, over 3832343.06 frames. ], batch size: 137, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:17:22,786 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9891.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:17:50,735 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 8.806e+02 1.158e+03 1.402e+03 3.282e+03, threshold=2.315e+03, percent-clipped=2.0 +2023-03-31 22:18:10,032 INFO [train.py:903] (0/4) Epoch 2, batch 3100, loss[loss=0.4447, simple_loss=0.4463, pruned_loss=0.2215, over 13484.00 frames. ], tot_loss[loss=0.3654, simple_loss=0.3993, pruned_loss=0.1657, over 3814990.23 frames. ], batch size: 135, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:18:29,665 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9946.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:19:11,238 INFO [train.py:903] (0/4) Epoch 2, batch 3150, loss[loss=0.3929, simple_loss=0.418, pruned_loss=0.1839, over 19483.00 frames. ], tot_loss[loss=0.3645, simple_loss=0.3992, pruned_loss=0.1649, over 3823152.07 frames. ], batch size: 64, lr: 3.28e-02, grad_scale: 4.0 +2023-03-31 22:19:39,329 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-10000.pt +2023-03-31 22:19:42,630 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 22:19:48,737 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7861, 3.8843, 4.3066, 4.1500, 1.3647, 3.7845, 3.4299, 3.8068], + device='cuda:0'), covar=tensor([0.0362, 0.0493, 0.0412, 0.0288, 0.3150, 0.0212, 0.0363, 0.0832], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0278, 0.0367, 0.0265, 0.0413, 0.0178, 0.0252, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 22:19:50,333 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-31 22:19:56,512 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 7.345e+02 9.404e+02 1.247e+03 3.615e+03, threshold=1.881e+03, percent-clipped=3.0 +2023-03-31 22:20:03,763 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10020.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:15,208 INFO [train.py:903] (0/4) Epoch 2, batch 3200, loss[loss=0.3473, simple_loss=0.3856, pruned_loss=0.1545, over 19579.00 frames. ], tot_loss[loss=0.3638, simple_loss=0.3989, pruned_loss=0.1644, over 3821589.12 frames. ], batch size: 52, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:20:50,849 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10057.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:55,736 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:21:19,931 INFO [train.py:903] (0/4) Epoch 2, batch 3250, loss[loss=0.4166, simple_loss=0.4348, pruned_loss=0.1992, over 19664.00 frames. ], tot_loss[loss=0.3634, simple_loss=0.3989, pruned_loss=0.1639, over 3826267.97 frames. ], batch size: 53, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:21:21,258 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10080.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:21:41,233 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3190, 1.4532, 1.4850, 1.9350, 2.9991, 1.2752, 1.9238, 2.8162], + device='cuda:0'), covar=tensor([0.0339, 0.2628, 0.2782, 0.1461, 0.0451, 0.2252, 0.1306, 0.0541], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0299, 0.0287, 0.0272, 0.0246, 0.0315, 0.0253, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:21:50,583 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:02,472 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+02 8.486e+02 1.037e+03 1.291e+03 3.604e+03, threshold=2.074e+03, percent-clipped=6.0 +2023-03-31 22:22:20,719 INFO [train.py:903] (0/4) Epoch 2, batch 3300, loss[loss=0.3509, simple_loss=0.3913, pruned_loss=0.1552, over 19728.00 frames. ], tot_loss[loss=0.3633, simple_loss=0.3987, pruned_loss=0.1639, over 3839872.92 frames. ], batch size: 51, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:22:25,200 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 22:22:27,887 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10135.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:42,679 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10147.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:14,925 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10172.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:22,721 INFO [train.py:903] (0/4) Epoch 2, batch 3350, loss[loss=0.3901, simple_loss=0.4221, pruned_loss=0.179, over 19518.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.3983, pruned_loss=0.1636, over 3839546.30 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:23:51,296 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9639, 1.2507, 1.5379, 1.9932, 1.5275, 1.9626, 2.1140, 1.9016], + device='cuda:0'), covar=tensor([0.0822, 0.1620, 0.1455, 0.1170, 0.1655, 0.1001, 0.1265, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0309, 0.0304, 0.0323, 0.0363, 0.0269, 0.0327, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 22:24:07,629 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.361e+02 9.909e+02 1.198e+03 2.844e+03, threshold=1.982e+03, percent-clipped=3.0 +2023-03-31 22:24:14,899 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6936, 1.8912, 1.7108, 2.5064, 4.3641, 1.4395, 2.1960, 3.9048], + device='cuda:0'), covar=tensor([0.0281, 0.2536, 0.2522, 0.1578, 0.0315, 0.2274, 0.1127, 0.0466], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0298, 0.0286, 0.0274, 0.0247, 0.0316, 0.0254, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:24:14,941 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:24:25,935 INFO [train.py:903] (0/4) Epoch 2, batch 3400, loss[loss=0.3441, simple_loss=0.397, pruned_loss=0.1456, over 19337.00 frames. ], tot_loss[loss=0.3617, simple_loss=0.3979, pruned_loss=0.1627, over 3832583.61 frames. ], batch size: 66, lr: 3.25e-02, grad_scale: 8.0 +2023-03-31 22:25:29,366 INFO [train.py:903] (0/4) Epoch 2, batch 3450, loss[loss=0.3218, simple_loss=0.3718, pruned_loss=0.1359, over 19673.00 frames. ], tot_loss[loss=0.3639, simple_loss=0.3996, pruned_loss=0.1642, over 3834807.66 frames. ], batch size: 53, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:25:32,668 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 22:26:13,346 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+02 9.409e+02 1.166e+03 1.453e+03 2.796e+03, threshold=2.333e+03, percent-clipped=9.0 +2023-03-31 22:26:17,036 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10317.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:26:31,778 INFO [train.py:903] (0/4) Epoch 2, batch 3500, loss[loss=0.3795, simple_loss=0.3963, pruned_loss=0.1814, over 19356.00 frames. ], tot_loss[loss=0.3644, simple_loss=0.3997, pruned_loss=0.1645, over 3821311.26 frames. ], batch size: 47, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:26:47,007 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10342.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:27:33,760 INFO [train.py:903] (0/4) Epoch 2, batch 3550, loss[loss=0.3438, simple_loss=0.393, pruned_loss=0.1473, over 19787.00 frames. ], tot_loss[loss=0.3605, simple_loss=0.3968, pruned_loss=0.1621, over 3815299.35 frames. ], batch size: 56, lr: 3.23e-02, grad_scale: 4.0 +2023-03-31 22:27:49,477 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10391.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:27:50,564 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3588, 1.0142, 1.4067, 0.3459, 2.6952, 2.3015, 2.0539, 2.4480], + device='cuda:0'), covar=tensor([0.1224, 0.2681, 0.2640, 0.2284, 0.0281, 0.0169, 0.0377, 0.0186], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0274, 0.0316, 0.0280, 0.0191, 0.0101, 0.0184, 0.0110], + device='cuda:0'), out_proj_covar=tensor([2.6526e-04, 2.7567e-04, 2.9945e-04, 2.7519e-04, 2.0554e-04, 9.9056e-05, + 1.7069e-04, 1.1471e-04], device='cuda:0') +2023-03-31 22:28:04,073 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10401.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:20,155 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+02 7.776e+02 1.013e+03 1.369e+03 3.978e+03, threshold=2.027e+03, percent-clipped=2.0 +2023-03-31 22:28:21,557 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10416.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:30,580 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10424.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:28:35,007 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10427.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:37,126 INFO [train.py:903] (0/4) Epoch 2, batch 3600, loss[loss=0.3096, simple_loss=0.3663, pruned_loss=0.1265, over 19601.00 frames. ], tot_loss[loss=0.3585, simple_loss=0.3958, pruned_loss=0.1606, over 3822131.71 frames. ], batch size: 57, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:29:32,507 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10472.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:37,366 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:41,348 INFO [train.py:903] (0/4) Epoch 2, batch 3650, loss[loss=0.4036, simple_loss=0.4334, pruned_loss=0.1869, over 19753.00 frames. ], tot_loss[loss=0.3558, simple_loss=0.3935, pruned_loss=0.1591, over 3835726.81 frames. ], batch size: 63, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:29:48,911 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-31 22:30:09,087 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:26,009 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+02 8.454e+02 1.072e+03 1.396e+03 2.688e+03, threshold=2.143e+03, percent-clipped=6.0 +2023-03-31 22:30:27,454 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10516.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:45,121 INFO [train.py:903] (0/4) Epoch 2, batch 3700, loss[loss=0.3153, simple_loss=0.3689, pruned_loss=0.1308, over 19417.00 frames. ], tot_loss[loss=0.3574, simple_loss=0.3946, pruned_loss=0.1602, over 3841434.90 frames. ], batch size: 48, lr: 3.21e-02, grad_scale: 8.0 +2023-03-31 22:30:51,539 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0174, 0.9905, 2.0680, 1.2399, 2.7113, 2.6590, 3.0486, 1.2613], + device='cuda:0'), covar=tensor([0.1417, 0.2074, 0.1080, 0.1349, 0.0717, 0.0739, 0.0955, 0.1848], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0373, 0.0345, 0.0346, 0.0397, 0.0327, 0.0474, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:30:57,266 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10539.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:31:47,575 INFO [train.py:903] (0/4) Epoch 2, batch 3750, loss[loss=0.4319, simple_loss=0.4514, pruned_loss=0.2062, over 19705.00 frames. ], tot_loss[loss=0.3556, simple_loss=0.3935, pruned_loss=0.1589, over 3845011.67 frames. ], batch size: 59, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:32:33,531 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+02 8.986e+02 1.057e+03 1.326e+03 2.585e+03, threshold=2.114e+03, percent-clipped=3.0 +2023-03-31 22:32:50,179 INFO [train.py:903] (0/4) Epoch 2, batch 3800, loss[loss=0.5113, simple_loss=0.4989, pruned_loss=0.2619, over 19502.00 frames. ], tot_loss[loss=0.3564, simple_loss=0.3936, pruned_loss=0.1596, over 3843504.08 frames. ], batch size: 64, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:33:19,196 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-31 22:33:23,948 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 22:33:51,757 INFO [train.py:903] (0/4) Epoch 2, batch 3850, loss[loss=0.3575, simple_loss=0.4012, pruned_loss=0.1569, over 19656.00 frames. ], tot_loss[loss=0.3561, simple_loss=0.3932, pruned_loss=0.1595, over 3830941.12 frames. ], batch size: 58, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:34:37,544 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.106e+02 8.604e+02 1.077e+03 1.442e+03 2.985e+03, threshold=2.155e+03, percent-clipped=6.0 +2023-03-31 22:34:56,856 INFO [train.py:903] (0/4) Epoch 2, batch 3900, loss[loss=0.3087, simple_loss=0.3677, pruned_loss=0.1249, over 19590.00 frames. ], tot_loss[loss=0.3567, simple_loss=0.3938, pruned_loss=0.1598, over 3824548.44 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:35:36,172 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10762.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:43,253 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-31 22:35:49,448 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:50,801 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10772.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:58,736 INFO [train.py:903] (0/4) Epoch 2, batch 3950, loss[loss=0.3444, simple_loss=0.3965, pruned_loss=0.1461, over 19784.00 frames. ], tot_loss[loss=0.3566, simple_loss=0.3938, pruned_loss=0.1597, over 3818501.29 frames. ], batch size: 56, lr: 3.18e-02, grad_scale: 8.0 +2023-03-31 22:36:04,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 22:36:15,281 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10792.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:18,964 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10795.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:36:21,098 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10797.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:44,827 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.316e+02 7.972e+02 1.008e+03 1.220e+03 2.629e+03, threshold=2.016e+03, percent-clipped=1.0 +2023-03-31 22:36:46,226 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10816.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:51,361 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10820.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:37:01,757 INFO [train.py:903] (0/4) Epoch 2, batch 4000, loss[loss=0.3128, simple_loss=0.3544, pruned_loss=0.1356, over 19741.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.3923, pruned_loss=0.1586, over 3824394.88 frames. ], batch size: 51, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:37:49,240 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 22:38:04,673 INFO [train.py:903] (0/4) Epoch 2, batch 4050, loss[loss=0.3351, simple_loss=0.3884, pruned_loss=0.1409, over 19563.00 frames. ], tot_loss[loss=0.3572, simple_loss=0.3943, pruned_loss=0.16, over 3818591.19 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:38:15,742 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10886.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:38:35,397 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10903.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:38:49,942 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.194e+02 9.186e+02 1.101e+03 1.338e+03 4.215e+03, threshold=2.201e+03, percent-clipped=7.0 +2023-03-31 22:39:10,606 INFO [train.py:903] (0/4) Epoch 2, batch 4100, loss[loss=0.3089, simple_loss=0.3458, pruned_loss=0.136, over 19744.00 frames. ], tot_loss[loss=0.3548, simple_loss=0.393, pruned_loss=0.1584, over 3818074.13 frames. ], batch size: 47, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:39:13,344 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10931.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:39:45,312 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 22:39:54,833 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6934, 1.4247, 1.4349, 2.0925, 1.8395, 1.6253, 1.5324, 1.9039], + device='cuda:0'), covar=tensor([0.0871, 0.1724, 0.1234, 0.0825, 0.1139, 0.0615, 0.1007, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0374, 0.0280, 0.0254, 0.0310, 0.0266, 0.0276, 0.0227], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:40:04,230 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5287, 1.2616, 1.4012, 2.1353, 3.1007, 1.8425, 2.2264, 2.8849], + device='cuda:0'), covar=tensor([0.0519, 0.3082, 0.2699, 0.1431, 0.0522, 0.1997, 0.1496, 0.0672], + device='cuda:0'), in_proj_covar=tensor([0.0223, 0.0305, 0.0286, 0.0280, 0.0259, 0.0321, 0.0262, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:40:13,106 INFO [train.py:903] (0/4) Epoch 2, batch 4150, loss[loss=0.3726, simple_loss=0.4132, pruned_loss=0.166, over 17288.00 frames. ], tot_loss[loss=0.3556, simple_loss=0.3939, pruned_loss=0.1587, over 3808129.85 frames. ], batch size: 101, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:40:59,342 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 7.731e+02 1.007e+03 1.258e+03 2.097e+03, threshold=2.015e+03, percent-clipped=0.0 +2023-03-31 22:41:15,490 INFO [train.py:903] (0/4) Epoch 2, batch 4200, loss[loss=0.4689, simple_loss=0.461, pruned_loss=0.2384, over 12750.00 frames. ], tot_loss[loss=0.3572, simple_loss=0.3947, pruned_loss=0.1599, over 3800510.25 frames. ], batch size: 137, lr: 3.15e-02, grad_scale: 8.0 +2023-03-31 22:41:18,940 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 22:41:27,259 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6802, 1.2859, 1.2288, 1.8293, 1.5767, 1.5048, 1.4352, 1.6280], + device='cuda:0'), covar=tensor([0.0844, 0.1718, 0.1422, 0.0893, 0.1130, 0.0621, 0.1016, 0.0639], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0383, 0.0287, 0.0255, 0.0314, 0.0264, 0.0281, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:42:10,710 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8387, 1.2844, 1.2514, 1.8828, 1.5133, 1.9197, 2.1259, 1.9991], + device='cuda:0'), covar=tensor([0.0941, 0.1454, 0.1794, 0.1316, 0.1742, 0.1034, 0.1271, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0305, 0.0294, 0.0322, 0.0354, 0.0262, 0.0320, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 22:42:18,010 INFO [train.py:903] (0/4) Epoch 2, batch 4250, loss[loss=0.4325, simple_loss=0.4407, pruned_loss=0.2121, over 19541.00 frames. ], tot_loss[loss=0.3565, simple_loss=0.394, pruned_loss=0.1595, over 3795109.10 frames. ], batch size: 56, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:42:35,128 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 22:42:45,402 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 22:42:52,660 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:03,987 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.611e+02 8.485e+02 1.107e+03 1.406e+03 3.284e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 22:43:21,882 INFO [train.py:903] (0/4) Epoch 2, batch 4300, loss[loss=0.4145, simple_loss=0.4359, pruned_loss=0.1965, over 18268.00 frames. ], tot_loss[loss=0.3533, simple_loss=0.3916, pruned_loss=0.1575, over 3813621.99 frames. ], batch size: 83, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:43:30,381 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11136.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:37,578 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11142.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:45,978 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-03-31 22:43:50,382 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.36 vs. limit=5.0 +2023-03-31 22:43:53,033 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-31 22:44:08,988 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11167.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:16,377 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 22:44:23,251 INFO [train.py:903] (0/4) Epoch 2, batch 4350, loss[loss=0.4015, simple_loss=0.4231, pruned_loss=0.19, over 13087.00 frames. ], tot_loss[loss=0.3515, simple_loss=0.3908, pruned_loss=0.1561, over 3824043.90 frames. ], batch size: 136, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:44:32,827 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11187.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:39,297 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11192.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:04,729 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11212.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:09,081 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.029e+02 7.841e+02 9.559e+02 1.160e+03 2.939e+03, threshold=1.912e+03, percent-clipped=2.0 +2023-03-31 22:45:15,635 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:24,371 INFO [train.py:903] (0/4) Epoch 2, batch 4400, loss[loss=0.335, simple_loss=0.3652, pruned_loss=0.1524, over 19732.00 frames. ], tot_loss[loss=0.3541, simple_loss=0.3927, pruned_loss=0.1578, over 3815346.00 frames. ], batch size: 46, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:45:48,410 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11247.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:45:50,515 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 22:45:54,201 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11251.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:46:00,721 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 22:46:20,829 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3996, 1.1349, 1.3105, 2.0173, 1.6066, 1.6850, 2.7828, 1.7099], + device='cuda:0'), covar=tensor([0.0993, 0.2296, 0.2072, 0.1819, 0.2112, 0.1806, 0.1475, 0.1393], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0303, 0.0295, 0.0329, 0.0353, 0.0265, 0.0318, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 22:46:27,487 INFO [train.py:903] (0/4) Epoch 2, batch 4450, loss[loss=0.3942, simple_loss=0.4188, pruned_loss=0.1848, over 19764.00 frames. ], tot_loss[loss=0.3562, simple_loss=0.3945, pruned_loss=0.159, over 3829713.03 frames. ], batch size: 54, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:47:14,189 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.820e+02 1.081e+03 1.372e+03 2.333e+03, threshold=2.162e+03, percent-clipped=5.0 +2023-03-31 22:47:31,602 INFO [train.py:903] (0/4) Epoch 2, batch 4500, loss[loss=0.3639, simple_loss=0.405, pruned_loss=0.1614, over 19517.00 frames. ], tot_loss[loss=0.3528, simple_loss=0.3917, pruned_loss=0.1569, over 3829811.04 frames. ], batch size: 54, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:47:35,926 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.16 vs. limit=2.0 +2023-03-31 22:48:12,940 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11362.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:48:14,066 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0136, 1.3867, 1.2528, 1.9328, 1.4896, 1.8500, 1.9387, 1.8331], + device='cuda:0'), covar=tensor([0.0694, 0.1355, 0.1506, 0.1121, 0.1485, 0.1119, 0.1280, 0.0739], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0306, 0.0294, 0.0326, 0.0349, 0.0267, 0.0320, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-03-31 22:48:34,554 INFO [train.py:903] (0/4) Epoch 2, batch 4550, loss[loss=0.3699, simple_loss=0.4069, pruned_loss=0.1665, over 19784.00 frames. ], tot_loss[loss=0.3533, simple_loss=0.3919, pruned_loss=0.1573, over 3831977.74 frames. ], batch size: 56, lr: 3.11e-02, grad_scale: 8.0 +2023-03-31 22:48:45,431 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 22:49:08,241 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 22:49:21,599 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.316e+02 7.788e+02 1.003e+03 1.220e+03 2.356e+03, threshold=2.005e+03, percent-clipped=1.0 +2023-03-31 22:49:37,109 INFO [train.py:903] (0/4) Epoch 2, batch 4600, loss[loss=0.3891, simple_loss=0.4163, pruned_loss=0.1809, over 17265.00 frames. ], tot_loss[loss=0.3522, simple_loss=0.3914, pruned_loss=0.1565, over 3841315.25 frames. ], batch size: 101, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:06,616 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4153, 1.7419, 1.7933, 1.9104, 3.0457, 4.5655, 4.7153, 5.0952], + device='cuda:0'), covar=tensor([0.1025, 0.2180, 0.2471, 0.1630, 0.0373, 0.0091, 0.0095, 0.0053], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0273, 0.0321, 0.0275, 0.0187, 0.0103, 0.0183, 0.0106], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 22:50:19,685 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1767, 2.0778, 1.8875, 3.0512, 2.2120, 3.2429, 2.4081, 1.7014], + device='cuda:0'), covar=tensor([0.0914, 0.0723, 0.0470, 0.0495, 0.0873, 0.0190, 0.0842, 0.0793], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0327, 0.0346, 0.0452, 0.0412, 0.0244, 0.0454, 0.0357], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:50:37,512 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11477.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:50:39,645 INFO [train.py:903] (0/4) Epoch 2, batch 4650, loss[loss=0.305, simple_loss=0.3559, pruned_loss=0.1271, over 19656.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.3925, pruned_loss=0.1576, over 3827373.55 frames. ], batch size: 53, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:59,945 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 22:51:09,648 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11502.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:10,451 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 22:51:15,594 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11507.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:26,520 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.039e+02 8.212e+02 1.126e+03 1.409e+03 2.689e+03, threshold=2.252e+03, percent-clipped=6.0 +2023-03-31 22:51:42,949 INFO [train.py:903] (0/4) Epoch 2, batch 4700, loss[loss=0.3552, simple_loss=0.4026, pruned_loss=0.1539, over 18214.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3905, pruned_loss=0.1551, over 3831038.33 frames. ], batch size: 84, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:51:47,850 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:52,264 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11536.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:04,173 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8689, 1.3493, 1.4200, 2.1371, 1.3319, 1.7646, 1.7284, 1.8385], + device='cuda:0'), covar=tensor([0.0761, 0.1890, 0.1319, 0.0813, 0.1390, 0.0591, 0.0838, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0379, 0.0295, 0.0258, 0.0324, 0.0264, 0.0278, 0.0227], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 22:52:05,035 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 22:52:08,641 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11550.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:18,079 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7306, 1.2987, 1.3305, 1.8349, 1.4740, 1.9996, 2.0428, 1.6372], + device='cuda:0'), covar=tensor([0.0964, 0.1448, 0.1425, 0.1142, 0.1282, 0.0871, 0.1076, 0.0813], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0312, 0.0295, 0.0326, 0.0351, 0.0269, 0.0319, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-03-31 22:52:46,145 INFO [train.py:903] (0/4) Epoch 2, batch 4750, loss[loss=0.4276, simple_loss=0.4415, pruned_loss=0.2069, over 19539.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3905, pruned_loss=0.1545, over 3836630.12 frames. ], batch size: 54, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:52:58,627 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-31 22:53:32,495 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+02 7.433e+02 1.012e+03 1.332e+03 3.283e+03, threshold=2.025e+03, percent-clipped=2.0 +2023-03-31 22:53:35,185 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11618.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:53:47,420 INFO [train.py:903] (0/4) Epoch 2, batch 4800, loss[loss=0.2923, simple_loss=0.3399, pruned_loss=0.1224, over 19773.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3885, pruned_loss=0.1535, over 3830714.43 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:54:04,594 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11643.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:54:15,067 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11651.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:19,421 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:49,929 INFO [train.py:903] (0/4) Epoch 2, batch 4850, loss[loss=0.2865, simple_loss=0.3412, pruned_loss=0.1159, over 19482.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3882, pruned_loss=0.1534, over 3824511.49 frames. ], batch size: 49, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:55:14,985 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 22:55:34,576 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 22:55:36,942 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.585e+02 1.008e+03 1.288e+03 2.592e+03, threshold=2.016e+03, percent-clipped=4.0 +2023-03-31 22:55:38,521 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:55:40,632 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 22:55:41,791 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 22:55:51,154 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 22:55:53,218 INFO [train.py:903] (0/4) Epoch 2, batch 4900, loss[loss=0.3169, simple_loss=0.3593, pruned_loss=0.1373, over 19754.00 frames. ], tot_loss[loss=0.3472, simple_loss=0.3873, pruned_loss=0.1535, over 3820881.96 frames. ], batch size: 51, lr: 3.07e-02, grad_scale: 8.0 +2023-03-31 22:56:13,254 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 22:56:55,831 INFO [train.py:903] (0/4) Epoch 2, batch 4950, loss[loss=0.3505, simple_loss=0.3858, pruned_loss=0.1576, over 19607.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3875, pruned_loss=0.1537, over 3827756.53 frames. ], batch size: 50, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:57:12,053 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 22:57:37,547 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 22:57:41,841 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+02 9.257e+02 1.136e+03 1.412e+03 3.441e+03, threshold=2.272e+03, percent-clipped=4.0 +2023-03-31 22:57:57,804 INFO [train.py:903] (0/4) Epoch 2, batch 5000, loss[loss=0.3891, simple_loss=0.4073, pruned_loss=0.1854, over 19668.00 frames. ], tot_loss[loss=0.3476, simple_loss=0.3875, pruned_loss=0.1538, over 3824023.44 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:58:04,608 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:58:07,424 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 22:58:16,638 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 22:58:42,460 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11864.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:00,548 INFO [train.py:903] (0/4) Epoch 2, batch 5050, loss[loss=0.2931, simple_loss=0.341, pruned_loss=0.1226, over 19728.00 frames. ], tot_loss[loss=0.3481, simple_loss=0.3877, pruned_loss=0.1542, over 3822364.59 frames. ], batch size: 46, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 22:59:19,011 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11894.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:32,446 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.68 vs. limit=5.0 +2023-03-31 22:59:36,628 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:38,652 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 22:59:48,010 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 8.836e+02 1.119e+03 1.460e+03 3.605e+03, threshold=2.237e+03, percent-clipped=7.0 +2023-03-31 23:00:03,339 INFO [train.py:903] (0/4) Epoch 2, batch 5100, loss[loss=0.3023, simple_loss=0.3522, pruned_loss=0.1262, over 19481.00 frames. ], tot_loss[loss=0.3473, simple_loss=0.3874, pruned_loss=0.1536, over 3823468.92 frames. ], batch size: 49, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 23:00:08,016 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11932.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:09,170 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7408, 1.0719, 1.3720, 1.0454, 2.7369, 3.2887, 3.2567, 3.6471], + device='cuda:0'), covar=tensor([0.1374, 0.2851, 0.2893, 0.2256, 0.0445, 0.0141, 0.0201, 0.0101], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0276, 0.0317, 0.0278, 0.0193, 0.0104, 0.0190, 0.0108], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 23:00:16,215 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11938.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:19,216 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 23:00:22,846 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 23:00:26,271 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 23:00:50,293 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11966.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:01,941 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9871, 1.9820, 1.4122, 1.5185, 1.3276, 1.4451, 0.1523, 0.9526], + device='cuda:0'), covar=tensor([0.0358, 0.0299, 0.0201, 0.0250, 0.0634, 0.0421, 0.0639, 0.0499], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0231, 0.0229, 0.0244, 0.0308, 0.0264, 0.0257, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 23:01:07,384 INFO [train.py:903] (0/4) Epoch 2, batch 5150, loss[loss=0.3209, simple_loss=0.3739, pruned_loss=0.1339, over 19779.00 frames. ], tot_loss[loss=0.3479, simple_loss=0.3878, pruned_loss=0.154, over 3817631.63 frames. ], batch size: 54, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:01:20,955 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:01:28,128 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-31 23:01:29,876 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11997.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:33,200 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-12000.pt +2023-03-31 23:01:36,604 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12002.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:44,865 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:54,416 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 8.254e+02 9.989e+02 1.287e+03 2.673e+03, threshold=1.998e+03, percent-clipped=4.0 +2023-03-31 23:01:57,693 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:02:10,373 INFO [train.py:903] (0/4) Epoch 2, batch 5200, loss[loss=0.3688, simple_loss=0.4009, pruned_loss=0.1684, over 17467.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3875, pruned_loss=0.1539, over 3816083.55 frames. ], batch size: 101, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:02:25,397 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 23:02:52,111 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:01,733 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3058, 2.1804, 1.9501, 3.5828, 2.5437, 4.4702, 3.4862, 1.9807], + device='cuda:0'), covar=tensor([0.1133, 0.0828, 0.0518, 0.0471, 0.0938, 0.0124, 0.0759, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0369, 0.0336, 0.0355, 0.0466, 0.0421, 0.0261, 0.0455, 0.0360], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:03:11,163 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 23:03:13,773 INFO [train.py:903] (0/4) Epoch 2, batch 5250, loss[loss=0.3042, simple_loss=0.3498, pruned_loss=0.1294, over 19297.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3877, pruned_loss=0.1539, over 3817098.03 frames. ], batch size: 44, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:03:27,392 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9821, 1.9229, 1.4737, 1.2316, 1.4277, 1.5006, 0.1494, 0.8157], + device='cuda:0'), covar=tensor([0.0286, 0.0289, 0.0200, 0.0293, 0.0536, 0.0380, 0.0635, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0231, 0.0223, 0.0246, 0.0309, 0.0262, 0.0261, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 23:03:32,701 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12094.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:55,673 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12112.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:59,697 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.115e+02 8.666e+02 1.057e+03 1.421e+03 4.195e+03, threshold=2.115e+03, percent-clipped=5.0 +2023-03-31 23:04:14,783 INFO [train.py:903] (0/4) Epoch 2, batch 5300, loss[loss=0.3272, simple_loss=0.3837, pruned_loss=0.1354, over 19778.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3898, pruned_loss=0.1554, over 3810112.88 frames. ], batch size: 56, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:04:16,280 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0435, 1.9373, 2.1953, 2.6624, 4.6642, 1.5329, 2.2598, 4.4562], + device='cuda:0'), covar=tensor([0.0168, 0.2035, 0.1958, 0.1204, 0.0237, 0.1899, 0.1013, 0.0319], + device='cuda:0'), in_proj_covar=tensor([0.0227, 0.0297, 0.0286, 0.0272, 0.0257, 0.0309, 0.0260, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:04:24,809 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-03-31 23:04:29,181 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5486, 3.7713, 4.0197, 3.9771, 1.4085, 3.4970, 3.3707, 3.5422], + device='cuda:0'), covar=tensor([0.0438, 0.0477, 0.0473, 0.0294, 0.3068, 0.0250, 0.0388, 0.0971], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0285, 0.0399, 0.0294, 0.0428, 0.0196, 0.0273, 0.0396], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 23:04:35,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 23:05:13,665 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12176.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:15,907 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12178.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:16,930 INFO [train.py:903] (0/4) Epoch 2, batch 5350, loss[loss=0.3118, simple_loss=0.3637, pruned_loss=0.13, over 19851.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3892, pruned_loss=0.1552, over 3821393.49 frames. ], batch size: 52, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:05:53,517 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 23:05:53,639 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12208.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:06:03,423 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 9.122e+02 1.169e+03 1.506e+03 3.477e+03, threshold=2.338e+03, percent-clipped=13.0 +2023-03-31 23:06:20,776 INFO [train.py:903] (0/4) Epoch 2, batch 5400, loss[loss=0.3335, simple_loss=0.3701, pruned_loss=0.1484, over 19730.00 frames. ], tot_loss[loss=0.3501, simple_loss=0.3892, pruned_loss=0.1555, over 3824388.23 frames. ], batch size: 51, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:07:06,371 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12265.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:23,450 INFO [train.py:903] (0/4) Epoch 2, batch 5450, loss[loss=0.3694, simple_loss=0.4073, pruned_loss=0.1658, over 18644.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3886, pruned_loss=0.154, over 3814794.39 frames. ], batch size: 74, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:07:27,018 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12282.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:36,430 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12290.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:39,788 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:01,525 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12310.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:02,767 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0865, 2.8653, 2.0283, 2.6420, 1.4544, 2.6879, 2.5294, 2.6338], + device='cuda:0'), covar=tensor([0.0846, 0.0954, 0.1628, 0.0779, 0.2391, 0.0979, 0.0730, 0.0945], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0284, 0.0311, 0.0262, 0.0328, 0.0278, 0.0222, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 23:08:08,443 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 7.539e+02 9.464e+02 1.137e+03 1.898e+03, threshold=1.893e+03, percent-clipped=0.0 +2023-03-31 23:08:17,817 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12323.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:23,992 INFO [train.py:903] (0/4) Epoch 2, batch 5500, loss[loss=0.3768, simple_loss=0.406, pruned_loss=0.1738, over 19751.00 frames. ], tot_loss[loss=0.349, simple_loss=0.3891, pruned_loss=0.1545, over 3810369.28 frames. ], batch size: 54, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:08:47,584 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12346.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:49,878 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 23:09:14,231 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12368.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:28,513 INFO [train.py:903] (0/4) Epoch 2, batch 5550, loss[loss=0.3137, simple_loss=0.3663, pruned_loss=0.1305, over 19660.00 frames. ], tot_loss[loss=0.3481, simple_loss=0.3886, pruned_loss=0.1538, over 3799075.78 frames. ], batch size: 53, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:09:36,187 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 23:09:46,100 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12393.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:50,647 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12397.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:14,306 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+02 8.577e+02 1.018e+03 1.198e+03 2.956e+03, threshold=2.037e+03, percent-clipped=3.0 +2023-03-31 23:10:26,765 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 23:10:27,083 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:29,728 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.18 vs. limit=5.0 +2023-03-31 23:10:31,326 INFO [train.py:903] (0/4) Epoch 2, batch 5600, loss[loss=0.2953, simple_loss=0.3472, pruned_loss=0.1217, over 19750.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3879, pruned_loss=0.1529, over 3801541.26 frames. ], batch size: 51, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:10:35,282 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:41,856 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12438.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:06,549 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12457.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:11,966 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12461.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:16,317 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12464.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:33,647 INFO [train.py:903] (0/4) Epoch 2, batch 5650, loss[loss=0.3338, simple_loss=0.3794, pruned_loss=0.1441, over 18181.00 frames. ], tot_loss[loss=0.3457, simple_loss=0.3866, pruned_loss=0.1524, over 3797451.41 frames. ], batch size: 83, lr: 2.99e-02, grad_scale: 8.0 +2023-03-31 23:12:19,592 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.393e+02 8.152e+02 1.040e+03 1.340e+03 2.595e+03, threshold=2.080e+03, percent-clipped=4.0 +2023-03-31 23:12:21,928 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 23:12:35,556 INFO [train.py:903] (0/4) Epoch 2, batch 5700, loss[loss=0.3426, simple_loss=0.3892, pruned_loss=0.148, over 19526.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3887, pruned_loss=0.1548, over 3808766.02 frames. ], batch size: 56, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:02,373 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:06,807 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12553.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:32,333 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12574.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:37,666 INFO [train.py:903] (0/4) Epoch 2, batch 5750, loss[loss=0.3577, simple_loss=0.3915, pruned_loss=0.162, over 19585.00 frames. ], tot_loss[loss=0.3471, simple_loss=0.3873, pruned_loss=0.1534, over 3812538.41 frames. ], batch size: 52, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:38,090 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12579.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:42,210 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 23:13:50,267 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 23:13:56,018 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9161, 1.0061, 1.3256, 1.5642, 2.6184, 1.4157, 1.8528, 2.5918], + device='cuda:0'), covar=tensor([0.0383, 0.2450, 0.2307, 0.1408, 0.0440, 0.1815, 0.0942, 0.0526], + device='cuda:0'), in_proj_covar=tensor([0.0227, 0.0302, 0.0285, 0.0273, 0.0260, 0.0318, 0.0258, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-31 23:13:56,869 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 23:14:10,383 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:14:23,848 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+02 8.083e+02 9.516e+02 1.322e+03 3.330e+03, threshold=1.903e+03, percent-clipped=5.0 +2023-03-31 23:14:40,962 INFO [train.py:903] (0/4) Epoch 2, batch 5800, loss[loss=0.4148, simple_loss=0.4435, pruned_loss=0.193, over 19635.00 frames. ], tot_loss[loss=0.3458, simple_loss=0.3863, pruned_loss=0.1526, over 3837665.42 frames. ], batch size: 57, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:14:58,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6510, 1.4721, 1.2955, 1.8303, 1.6442, 1.6092, 1.2923, 1.7533], + device='cuda:0'), covar=tensor([0.0829, 0.1667, 0.1318, 0.0925, 0.1181, 0.0580, 0.1127, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0376, 0.0281, 0.0252, 0.0319, 0.0260, 0.0277, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:15:10,924 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:42,712 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12678.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:43,511 INFO [train.py:903] (0/4) Epoch 2, batch 5850, loss[loss=0.3034, simple_loss=0.344, pruned_loss=0.1315, over 19732.00 frames. ], tot_loss[loss=0.3451, simple_loss=0.3861, pruned_loss=0.1521, over 3829495.26 frames. ], batch size: 45, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:15:46,286 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12681.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:18,388 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12706.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:30,350 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 7.980e+02 9.827e+02 1.217e+03 2.781e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-31 23:16:31,907 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:46,106 INFO [train.py:903] (0/4) Epoch 2, batch 5900, loss[loss=0.3675, simple_loss=0.4078, pruned_loss=0.1636, over 19700.00 frames. ], tot_loss[loss=0.3438, simple_loss=0.3847, pruned_loss=0.1515, over 3809982.40 frames. ], batch size: 59, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:16:49,438 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 23:17:03,709 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:17:11,474 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 23:17:49,032 INFO [train.py:903] (0/4) Epoch 2, batch 5950, loss[loss=0.3404, simple_loss=0.3923, pruned_loss=0.1442, over 19283.00 frames. ], tot_loss[loss=0.346, simple_loss=0.3864, pruned_loss=0.1528, over 3812110.12 frames. ], batch size: 66, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:17:51,524 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12781.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:25,354 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12808.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:26,806 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12809.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:34,471 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+02 9.090e+02 1.139e+03 1.452e+03 3.383e+03, threshold=2.279e+03, percent-clipped=8.0 +2023-03-31 23:18:51,783 INFO [train.py:903] (0/4) Epoch 2, batch 6000, loss[loss=0.337, simple_loss=0.388, pruned_loss=0.143, over 19753.00 frames. ], tot_loss[loss=0.3457, simple_loss=0.386, pruned_loss=0.1527, over 3818779.62 frames. ], batch size: 63, lr: 2.95e-02, grad_scale: 8.0 +2023-03-31 23:18:51,784 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 23:19:06,008 INFO [train.py:937] (0/4) Epoch 2, validation: loss=0.246, simple_loss=0.337, pruned_loss=0.07745, over 944034.00 frames. +2023-03-31 23:19:06,009 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18153MB +2023-03-31 23:19:13,259 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:20:08,251 INFO [train.py:903] (0/4) Epoch 2, batch 6050, loss[loss=0.4387, simple_loss=0.4483, pruned_loss=0.2145, over 13499.00 frames. ], tot_loss[loss=0.3465, simple_loss=0.3865, pruned_loss=0.1533, over 3814646.94 frames. ], batch size: 136, lr: 2.95e-02, grad_scale: 4.0 +2023-03-31 23:20:56,514 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+02 7.840e+02 9.937e+02 1.323e+03 8.220e+03, threshold=1.987e+03, percent-clipped=9.0 +2023-03-31 23:21:03,468 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12923.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:21:03,953 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-31 23:21:10,259 INFO [train.py:903] (0/4) Epoch 2, batch 6100, loss[loss=0.3465, simple_loss=0.3886, pruned_loss=0.1522, over 17456.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3863, pruned_loss=0.1523, over 3806695.36 frames. ], batch size: 101, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:05,400 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-31 23:22:11,863 INFO [train.py:903] (0/4) Epoch 2, batch 6150, loss[loss=0.3229, simple_loss=0.3623, pruned_loss=0.1417, over 19595.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3875, pruned_loss=0.153, over 3818908.06 frames. ], batch size: 52, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:13,426 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3839, 1.2462, 2.3647, 1.8483, 3.3814, 3.4641, 3.7729, 1.6472], + device='cuda:0'), covar=tensor([0.1271, 0.2001, 0.1096, 0.1072, 0.0738, 0.0695, 0.0924, 0.1893], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0397, 0.0366, 0.0364, 0.0443, 0.0353, 0.0512, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:22:43,816 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 23:23:00,059 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2575, 2.2971, 1.4542, 1.5088, 1.4150, 1.5503, 0.2640, 0.9276], + device='cuda:0'), covar=tensor([0.0251, 0.0210, 0.0192, 0.0249, 0.0630, 0.0344, 0.0569, 0.0490], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0227, 0.0225, 0.0245, 0.0314, 0.0259, 0.0251, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-31 23:23:00,761 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+02 7.749e+02 1.029e+03 1.287e+03 3.235e+03, threshold=2.059e+03, percent-clipped=7.0 +2023-03-31 23:23:04,771 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3352, 1.2759, 1.9064, 1.5927, 2.6251, 2.6863, 2.7815, 1.1765], + device='cuda:0'), covar=tensor([0.1376, 0.2063, 0.1219, 0.1232, 0.0922, 0.0826, 0.1185, 0.2074], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0397, 0.0366, 0.0365, 0.0439, 0.0350, 0.0514, 0.0386], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:23:13,346 INFO [train.py:903] (0/4) Epoch 2, batch 6200, loss[loss=0.3272, simple_loss=0.3819, pruned_loss=0.1362, over 19663.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3877, pruned_loss=0.1537, over 3812515.69 frames. ], batch size: 55, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:15,386 INFO [train.py:903] (0/4) Epoch 2, batch 6250, loss[loss=0.3118, simple_loss=0.3694, pruned_loss=0.1271, over 19689.00 frames. ], tot_loss[loss=0.3496, simple_loss=0.3896, pruned_loss=0.1547, over 3802567.88 frames. ], batch size: 59, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:47,223 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 23:25:04,356 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.358e+02 8.552e+02 1.023e+03 1.333e+03 3.705e+03, threshold=2.046e+03, percent-clipped=2.0 +2023-03-31 23:25:04,789 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9896, 2.0673, 1.6366, 2.7745, 2.0912, 3.0287, 2.3318, 1.8145], + device='cuda:0'), covar=tensor([0.0778, 0.0623, 0.0461, 0.0393, 0.0689, 0.0171, 0.0715, 0.0590], + device='cuda:0'), in_proj_covar=tensor([0.0388, 0.0367, 0.0380, 0.0492, 0.0445, 0.0277, 0.0474, 0.0375], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:25:13,024 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13125.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:25:17,708 INFO [train.py:903] (0/4) Epoch 2, batch 6300, loss[loss=0.3719, simple_loss=0.4072, pruned_loss=0.1683, over 19765.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3891, pruned_loss=0.1546, over 3823542.79 frames. ], batch size: 56, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:25:59,096 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6766, 2.3898, 2.1995, 3.6809, 2.4750, 4.4123, 3.5125, 2.3151], + device='cuda:0'), covar=tensor([0.0838, 0.0684, 0.0401, 0.0430, 0.0863, 0.0116, 0.0554, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0381, 0.0365, 0.0373, 0.0484, 0.0441, 0.0276, 0.0467, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:26:19,874 INFO [train.py:903] (0/4) Epoch 2, batch 6350, loss[loss=0.2986, simple_loss=0.3608, pruned_loss=0.1182, over 19749.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3893, pruned_loss=0.1544, over 3826012.09 frames. ], batch size: 54, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:20,284 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13179.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:26:49,643 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13204.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:26:51,304 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.90 vs. limit=5.0 +2023-03-31 23:27:06,688 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.673e+02 8.539e+02 1.071e+03 1.407e+03 4.202e+03, threshold=2.141e+03, percent-clipped=6.0 +2023-03-31 23:27:08,226 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5649, 1.4380, 1.3788, 1.8058, 1.5320, 1.3950, 1.3005, 1.6887], + device='cuda:0'), covar=tensor([0.0961, 0.1809, 0.1486, 0.0960, 0.1367, 0.0908, 0.1443, 0.0790], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0382, 0.0291, 0.0261, 0.0317, 0.0264, 0.0282, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:27:14,123 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5294, 1.5619, 2.0451, 2.6107, 2.0767, 2.1026, 1.7239, 2.5192], + device='cuda:0'), covar=tensor([0.0774, 0.2323, 0.1326, 0.0919, 0.1485, 0.0542, 0.1355, 0.0604], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0382, 0.0291, 0.0261, 0.0319, 0.0264, 0.0283, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:27:19,358 INFO [train.py:903] (0/4) Epoch 2, batch 6400, loss[loss=0.3798, simple_loss=0.4091, pruned_loss=0.1752, over 19566.00 frames. ], tot_loss[loss=0.3476, simple_loss=0.3876, pruned_loss=0.1538, over 3835053.16 frames. ], batch size: 52, lr: 2.92e-02, grad_scale: 8.0 +2023-03-31 23:27:33,772 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13240.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:27:52,433 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-31 23:28:12,858 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-31 23:28:22,168 INFO [train.py:903] (0/4) Epoch 2, batch 6450, loss[loss=0.3555, simple_loss=0.3959, pruned_loss=0.1576, over 19700.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3888, pruned_loss=0.1547, over 3835366.51 frames. ], batch size: 59, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:07,670 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0269, 3.9286, 2.3439, 2.9968, 3.3232, 1.5819, 1.3372, 1.8004], + device='cuda:0'), covar=tensor([0.1191, 0.0243, 0.0887, 0.0496, 0.0414, 0.1212, 0.1074, 0.0794], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0216, 0.0307, 0.0256, 0.0208, 0.0310, 0.0274, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-31 23:29:09,603 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 23:29:10,633 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+02 7.874e+02 9.907e+02 1.203e+03 2.411e+03, threshold=1.981e+03, percent-clipped=4.0 +2023-03-31 23:29:24,102 INFO [train.py:903] (0/4) Epoch 2, batch 6500, loss[loss=0.2663, simple_loss=0.3314, pruned_loss=0.1006, over 19574.00 frames. ], tot_loss[loss=0.3482, simple_loss=0.3884, pruned_loss=0.154, over 3826642.73 frames. ], batch size: 52, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:29,952 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8401, 4.7326, 5.6667, 5.6112, 1.9779, 5.1178, 4.6325, 5.0743], + device='cuda:0'), covar=tensor([0.0432, 0.0416, 0.0366, 0.0211, 0.3006, 0.0153, 0.0307, 0.0762], + device='cuda:0'), in_proj_covar=tensor([0.0321, 0.0302, 0.0406, 0.0307, 0.0445, 0.0204, 0.0274, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 23:29:30,898 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 23:30:26,693 INFO [train.py:903] (0/4) Epoch 2, batch 6550, loss[loss=0.348, simple_loss=0.3921, pruned_loss=0.1519, over 18018.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3867, pruned_loss=0.1521, over 3835955.45 frames. ], batch size: 83, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:31:14,493 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+02 7.924e+02 9.507e+02 1.218e+03 2.525e+03, threshold=1.901e+03, percent-clipped=3.0 +2023-03-31 23:31:27,085 INFO [train.py:903] (0/4) Epoch 2, batch 6600, loss[loss=0.371, simple_loss=0.4075, pruned_loss=0.1673, over 18152.00 frames. ], tot_loss[loss=0.3457, simple_loss=0.3866, pruned_loss=0.1524, over 3824927.12 frames. ], batch size: 83, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:32:29,087 INFO [train.py:903] (0/4) Epoch 2, batch 6650, loss[loss=0.4237, simple_loss=0.4398, pruned_loss=0.2038, over 19673.00 frames. ], tot_loss[loss=0.3467, simple_loss=0.3871, pruned_loss=0.1532, over 3829570.31 frames. ], batch size: 58, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:32:50,439 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:08,242 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-31 23:33:17,438 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 9.096e+02 1.181e+03 1.471e+03 3.411e+03, threshold=2.361e+03, percent-clipped=10.0 +2023-03-31 23:33:21,111 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13521.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:29,657 INFO [train.py:903] (0/4) Epoch 2, batch 6700, loss[loss=0.3724, simple_loss=0.4135, pruned_loss=0.1657, over 19528.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3877, pruned_loss=0.1531, over 3836526.49 frames. ], batch size: 56, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:34:27,765 INFO [train.py:903] (0/4) Epoch 2, batch 6750, loss[loss=0.3826, simple_loss=0.4202, pruned_loss=0.1724, over 19254.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3876, pruned_loss=0.153, over 3835548.81 frames. ], batch size: 66, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:34:38,864 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-31 23:34:51,595 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8614, 4.8414, 5.7286, 5.5934, 2.0302, 5.3092, 4.6756, 5.1404], + device='cuda:0'), covar=tensor([0.0366, 0.0424, 0.0324, 0.0201, 0.2718, 0.0136, 0.0254, 0.0708], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0306, 0.0410, 0.0307, 0.0444, 0.0210, 0.0272, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 23:35:12,679 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.052e+02 8.174e+02 9.996e+02 1.293e+03 2.664e+03, threshold=1.999e+03, percent-clipped=1.0 +2023-03-31 23:35:25,228 INFO [train.py:903] (0/4) Epoch 2, batch 6800, loss[loss=0.3522, simple_loss=0.3921, pruned_loss=0.1562, over 19736.00 frames. ], tot_loss[loss=0.3482, simple_loss=0.3882, pruned_loss=0.1541, over 3822076.75 frames. ], batch size: 63, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:35:54,699 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-2.pt +2023-03-31 23:36:09,998 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 23:36:11,043 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 23:36:13,727 INFO [train.py:903] (0/4) Epoch 3, batch 0, loss[loss=0.3343, simple_loss=0.3632, pruned_loss=0.1527, over 19725.00 frames. ], tot_loss[loss=0.3343, simple_loss=0.3632, pruned_loss=0.1527, over 19725.00 frames. ], batch size: 46, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:36:13,727 INFO [train.py:928] (0/4) Computing validation loss +2023-03-31 23:36:24,488 INFO [train.py:937] (0/4) Epoch 3, validation: loss=0.241, simple_loss=0.3346, pruned_loss=0.07374, over 944034.00 frames. +2023-03-31 23:36:24,489 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18153MB +2023-03-31 23:36:37,425 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 23:36:45,585 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1523, 0.9135, 1.0139, 1.5048, 1.1118, 1.2184, 1.2620, 1.1744], + device='cuda:0'), covar=tensor([0.1106, 0.1800, 0.1578, 0.0859, 0.1274, 0.1199, 0.1259, 0.1120], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0308, 0.0293, 0.0325, 0.0338, 0.0270, 0.0309, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-03-31 23:37:25,524 INFO [train.py:903] (0/4) Epoch 3, batch 50, loss[loss=0.3896, simple_loss=0.4165, pruned_loss=0.1813, over 17457.00 frames. ], tot_loss[loss=0.3434, simple_loss=0.3848, pruned_loss=0.151, over 865249.12 frames. ], batch size: 101, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:37:38,309 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 7.787e+02 9.326e+02 1.115e+03 3.182e+03, threshold=1.865e+03, percent-clipped=5.0 +2023-03-31 23:37:58,962 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 23:38:23,958 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13755.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:38:25,858 INFO [train.py:903] (0/4) Epoch 3, batch 100, loss[loss=0.4201, simple_loss=0.4386, pruned_loss=0.2008, over 19237.00 frames. ], tot_loss[loss=0.3415, simple_loss=0.3849, pruned_loss=0.1491, over 1536049.28 frames. ], batch size: 69, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:38:35,102 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 23:39:27,419 INFO [train.py:903] (0/4) Epoch 3, batch 150, loss[loss=0.3332, simple_loss=0.3853, pruned_loss=0.1406, over 19465.00 frames. ], tot_loss[loss=0.3407, simple_loss=0.3841, pruned_loss=0.1486, over 2055816.99 frames. ], batch size: 64, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:39:30,164 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4818, 0.9886, 1.2917, 1.3026, 2.2360, 1.1883, 1.7991, 2.1205], + device='cuda:0'), covar=tensor([0.0412, 0.2271, 0.1959, 0.1314, 0.0514, 0.1390, 0.0724, 0.0557], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0303, 0.0287, 0.0275, 0.0273, 0.0318, 0.0265, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:39:40,067 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+02 7.521e+02 1.009e+03 1.351e+03 3.530e+03, threshold=2.018e+03, percent-clipped=10.0 +2023-03-31 23:40:28,875 INFO [train.py:903] (0/4) Epoch 3, batch 200, loss[loss=0.4297, simple_loss=0.4499, pruned_loss=0.2048, over 19308.00 frames. ], tot_loss[loss=0.3418, simple_loss=0.3844, pruned_loss=0.1496, over 2441726.99 frames. ], batch size: 66, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:40:28,917 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 23:41:28,996 INFO [train.py:903] (0/4) Epoch 3, batch 250, loss[loss=0.2713, simple_loss=0.3269, pruned_loss=0.1079, over 19768.00 frames. ], tot_loss[loss=0.3416, simple_loss=0.3838, pruned_loss=0.1497, over 2754507.22 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:41:44,255 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+02 8.729e+02 1.056e+03 1.304e+03 3.760e+03, threshold=2.113e+03, percent-clipped=6.0 +2023-03-31 23:41:49,454 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-31 23:42:33,072 INFO [train.py:903] (0/4) Epoch 3, batch 300, loss[loss=0.2925, simple_loss=0.3554, pruned_loss=0.1147, over 19512.00 frames. ], tot_loss[loss=0.3386, simple_loss=0.3808, pruned_loss=0.1481, over 2988693.02 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:43:25,964 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-14000.pt +2023-03-31 23:43:34,502 INFO [train.py:903] (0/4) Epoch 3, batch 350, loss[loss=0.369, simple_loss=0.3933, pruned_loss=0.1724, over 19587.00 frames. ], tot_loss[loss=0.3386, simple_loss=0.3814, pruned_loss=0.1479, over 3176080.10 frames. ], batch size: 52, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:43:40,055 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:43:46,963 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 7.628e+02 9.853e+02 1.217e+03 3.369e+03, threshold=1.971e+03, percent-clipped=3.0 +2023-03-31 23:44:34,933 INFO [train.py:903] (0/4) Epoch 3, batch 400, loss[loss=0.4001, simple_loss=0.4169, pruned_loss=0.1916, over 13128.00 frames. ], tot_loss[loss=0.339, simple_loss=0.3813, pruned_loss=0.1483, over 3313546.68 frames. ], batch size: 138, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:45:05,470 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7859, 1.5383, 1.5337, 1.7143, 1.7599, 1.7488, 1.7138, 2.0766], + device='cuda:0'), covar=tensor([0.0846, 0.1714, 0.1293, 0.0916, 0.1170, 0.0534, 0.0883, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0381, 0.0288, 0.0260, 0.0319, 0.0272, 0.0273, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:45:17,145 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14090.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:27,292 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14099.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:27,630 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8702, 1.7761, 1.7142, 2.6007, 1.7508, 2.3097, 2.1655, 1.6575], + device='cuda:0'), covar=tensor([0.0819, 0.0664, 0.0438, 0.0337, 0.0717, 0.0257, 0.0766, 0.0686], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0379, 0.0388, 0.0510, 0.0454, 0.0285, 0.0484, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-31 23:45:36,306 INFO [train.py:903] (0/4) Epoch 3, batch 450, loss[loss=0.3443, simple_loss=0.3927, pruned_loss=0.1479, over 19662.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3806, pruned_loss=0.1475, over 3427201.02 frames. ], batch size: 55, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:45:52,383 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+02 8.213e+02 1.015e+03 1.206e+03 3.609e+03, threshold=2.029e+03, percent-clipped=6.0 +2023-03-31 23:45:55,949 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14121.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:46:10,162 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 23:46:11,132 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 23:46:14,944 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14138.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 23:46:38,896 INFO [train.py:903] (0/4) Epoch 3, batch 500, loss[loss=0.2997, simple_loss=0.3525, pruned_loss=0.1235, over 19374.00 frames. ], tot_loss[loss=0.339, simple_loss=0.3812, pruned_loss=0.1484, over 3516381.84 frames. ], batch size: 47, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:47:38,213 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6804, 1.3312, 1.7899, 1.4928, 2.8076, 4.2540, 4.3199, 4.7014], + device='cuda:0'), covar=tensor([0.1289, 0.2558, 0.2481, 0.1871, 0.0472, 0.0109, 0.0138, 0.0074], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0277, 0.0324, 0.0274, 0.0192, 0.0103, 0.0195, 0.0109], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-31 23:47:38,988 INFO [train.py:903] (0/4) Epoch 3, batch 550, loss[loss=0.3121, simple_loss=0.3721, pruned_loss=0.1261, over 19707.00 frames. ], tot_loss[loss=0.3386, simple_loss=0.3813, pruned_loss=0.148, over 3576647.13 frames. ], batch size: 63, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:47:47,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14214.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:47:51,334 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+02 8.063e+02 9.949e+02 1.307e+03 2.222e+03, threshold=1.990e+03, percent-clipped=3.0 +2023-03-31 23:47:55,490 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.20 vs. limit=5.0 +2023-03-31 23:48:38,813 INFO [train.py:903] (0/4) Epoch 3, batch 600, loss[loss=0.3774, simple_loss=0.4076, pruned_loss=0.1736, over 19776.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3807, pruned_loss=0.1475, over 3637875.06 frames. ], batch size: 56, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:16,706 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 23:49:39,186 INFO [train.py:903] (0/4) Epoch 3, batch 650, loss[loss=0.3193, simple_loss=0.366, pruned_loss=0.1363, over 19536.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3812, pruned_loss=0.1474, over 3671102.82 frames. ], batch size: 54, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:54,624 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.103e+02 8.257e+02 1.098e+03 1.322e+03 3.191e+03, threshold=2.197e+03, percent-clipped=10.0 +2023-03-31 23:50:41,496 INFO [train.py:903] (0/4) Epoch 3, batch 700, loss[loss=0.3372, simple_loss=0.3739, pruned_loss=0.1502, over 19866.00 frames. ], tot_loss[loss=0.3377, simple_loss=0.3808, pruned_loss=0.1473, over 3717364.12 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:43,785 INFO [train.py:903] (0/4) Epoch 3, batch 750, loss[loss=0.3306, simple_loss=0.395, pruned_loss=0.1331, over 19605.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.3799, pruned_loss=0.1456, over 3756478.95 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:56,444 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.798e+02 9.551e+02 1.191e+03 2.807e+03, threshold=1.910e+03, percent-clipped=6.0 +2023-03-31 23:52:11,634 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14431.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:14,916 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14434.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:44,747 INFO [train.py:903] (0/4) Epoch 3, batch 800, loss[loss=0.3001, simple_loss=0.3416, pruned_loss=0.1294, over 19430.00 frames. ], tot_loss[loss=0.3341, simple_loss=0.3782, pruned_loss=0.145, over 3770625.82 frames. ], batch size: 48, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:52:53,861 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14465.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:54,899 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:52:59,795 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14470.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:15,236 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14482.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:53:31,015 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14495.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:42,648 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14505.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:44,740 INFO [train.py:903] (0/4) Epoch 3, batch 850, loss[loss=0.3574, simple_loss=0.4076, pruned_loss=0.1536, over 19518.00 frames. ], tot_loss[loss=0.3355, simple_loss=0.3792, pruned_loss=0.1459, over 3793879.79 frames. ], batch size: 64, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:53:58,373 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.292e+02 8.633e+02 1.105e+03 1.534e+03 3.114e+03, threshold=2.210e+03, percent-clipped=11.0 +2023-03-31 23:54:32,134 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 23:54:35,855 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:54:45,563 INFO [train.py:903] (0/4) Epoch 3, batch 900, loss[loss=0.2984, simple_loss=0.358, pruned_loss=0.1194, over 19616.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3799, pruned_loss=0.1461, over 3800957.25 frames. ], batch size: 57, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:55:15,225 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14580.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:55:23,135 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5691, 2.9020, 2.9751, 2.9973, 1.1656, 2.7074, 2.4640, 2.5951], + device='cuda:0'), covar=tensor([0.0721, 0.0690, 0.0601, 0.0479, 0.2838, 0.0360, 0.0506, 0.1134], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0313, 0.0440, 0.0328, 0.0469, 0.0223, 0.0292, 0.0434], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-31 23:55:33,870 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14597.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:55:39,424 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7624, 1.3813, 1.2933, 1.8296, 1.4096, 1.9020, 1.8268, 1.8667], + device='cuda:0'), covar=tensor([0.0748, 0.1170, 0.1355, 0.1016, 0.1169, 0.0863, 0.1031, 0.0692], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0299, 0.0287, 0.0324, 0.0322, 0.0267, 0.0301, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-03-31 23:55:46,733 INFO [train.py:903] (0/4) Epoch 3, batch 950, loss[loss=0.3533, simple_loss=0.4022, pruned_loss=0.1522, over 19624.00 frames. ], tot_loss[loss=0.336, simple_loss=0.3798, pruned_loss=0.1461, over 3813776.51 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:55:46,747 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 23:56:00,986 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+02 7.381e+02 9.246e+02 1.263e+03 4.500e+03, threshold=1.849e+03, percent-clipped=5.0 +2023-03-31 23:56:21,391 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-31 23:56:46,895 INFO [train.py:903] (0/4) Epoch 3, batch 1000, loss[loss=0.3005, simple_loss=0.3535, pruned_loss=0.1237, over 19616.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.381, pruned_loss=0.1465, over 3830613.78 frames. ], batch size: 50, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:57:38,693 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 23:57:47,678 INFO [train.py:903] (0/4) Epoch 3, batch 1050, loss[loss=0.3566, simple_loss=0.3884, pruned_loss=0.1624, over 19738.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3806, pruned_loss=0.1456, over 3837529.43 frames. ], batch size: 45, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:01,055 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.305e+02 8.951e+02 1.118e+03 2.421e+03, threshold=1.790e+03, percent-clipped=2.0 +2023-03-31 23:58:15,792 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.50 vs. limit=5.0 +2023-03-31 23:58:17,620 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 23:58:48,366 INFO [train.py:903] (0/4) Epoch 3, batch 1100, loss[loss=0.371, simple_loss=0.4042, pruned_loss=0.1689, over 19583.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.38, pruned_loss=0.1453, over 3835569.75 frames. ], batch size: 61, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:49,821 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14758.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:10,908 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14775.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:46,865 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14805.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:49,456 INFO [train.py:903] (0/4) Epoch 3, batch 1150, loss[loss=0.3683, simple_loss=0.3997, pruned_loss=0.1684, over 19715.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3788, pruned_loss=0.1448, over 3836916.93 frames. ], batch size: 63, lr: 2.64e-02, grad_scale: 4.0 +2023-04-01 00:00:03,884 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+02 7.465e+02 1.021e+03 1.238e+03 3.548e+03, threshold=2.043e+03, percent-clipped=7.0 +2023-04-01 00:00:16,780 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:23,653 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:40,916 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:45,731 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14853.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:00:49,934 INFO [train.py:903] (0/4) Epoch 3, batch 1200, loss[loss=0.3489, simple_loss=0.4013, pruned_loss=0.1482, over 19558.00 frames. ], tot_loss[loss=0.3348, simple_loss=0.3795, pruned_loss=0.1451, over 3831137.21 frames. ], batch size: 61, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:00:55,819 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:15,101 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14878.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:01:17,851 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 00:01:21,324 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2205, 1.2668, 2.0708, 1.6304, 3.0376, 3.1552, 3.7332, 1.5210], + device='cuda:0'), covar=tensor([0.1419, 0.2215, 0.1235, 0.1253, 0.0889, 0.0823, 0.0959, 0.2033], + device='cuda:0'), in_proj_covar=tensor([0.0404, 0.0435, 0.0402, 0.0391, 0.0473, 0.0386, 0.0553, 0.0400], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:01:31,250 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14890.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:51,485 INFO [train.py:903] (0/4) Epoch 3, batch 1250, loss[loss=0.3512, simple_loss=0.3799, pruned_loss=0.1613, over 19609.00 frames. ], tot_loss[loss=0.3334, simple_loss=0.3789, pruned_loss=0.1439, over 3831980.39 frames. ], batch size: 50, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:02:00,410 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2302, 1.2601, 1.9591, 1.4892, 3.1144, 2.7560, 3.1302, 0.9951], + device='cuda:0'), covar=tensor([0.1166, 0.1880, 0.1011, 0.1021, 0.0600, 0.0712, 0.0801, 0.1894], + device='cuda:0'), in_proj_covar=tensor([0.0400, 0.0434, 0.0402, 0.0387, 0.0471, 0.0383, 0.0554, 0.0401], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:02:05,938 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+02 7.701e+02 1.002e+03 1.250e+03 2.941e+03, threshold=2.004e+03, percent-clipped=3.0 +2023-04-01 00:02:53,120 INFO [train.py:903] (0/4) Epoch 3, batch 1300, loss[loss=0.2944, simple_loss=0.3611, pruned_loss=0.1138, over 19807.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.38, pruned_loss=0.1447, over 3815231.49 frames. ], batch size: 56, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:03:02,029 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:03:54,923 INFO [train.py:903] (0/4) Epoch 3, batch 1350, loss[loss=0.3516, simple_loss=0.3911, pruned_loss=0.156, over 18753.00 frames. ], tot_loss[loss=0.3381, simple_loss=0.3825, pruned_loss=0.1469, over 3799285.29 frames. ], batch size: 74, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:03:57,804 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.94 vs. limit=5.0 +2023-04-01 00:04:10,657 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+02 8.351e+02 9.883e+02 1.225e+03 3.360e+03, threshold=1.977e+03, percent-clipped=2.0 +2023-04-01 00:04:57,758 INFO [train.py:903] (0/4) Epoch 3, batch 1400, loss[loss=0.3787, simple_loss=0.4123, pruned_loss=0.1725, over 17116.00 frames. ], tot_loss[loss=0.3362, simple_loss=0.3808, pruned_loss=0.1458, over 3799081.01 frames. ], batch size: 101, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:05:08,569 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 00:05:53,731 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15102.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:05:59,088 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 00:06:00,251 INFO [train.py:903] (0/4) Epoch 3, batch 1450, loss[loss=0.349, simple_loss=0.3893, pruned_loss=0.1544, over 19446.00 frames. ], tot_loss[loss=0.3365, simple_loss=0.381, pruned_loss=0.146, over 3799700.06 frames. ], batch size: 64, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:06:13,786 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+02 7.923e+02 9.351e+02 1.150e+03 2.880e+03, threshold=1.870e+03, percent-clipped=3.0 +2023-04-01 00:06:49,290 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:07:01,533 INFO [train.py:903] (0/4) Epoch 3, batch 1500, loss[loss=0.2737, simple_loss=0.3285, pruned_loss=0.1095, over 19491.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3788, pruned_loss=0.1445, over 3808481.93 frames. ], batch size: 49, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:07:20,407 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15171.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:03,674 INFO [train.py:903] (0/4) Epoch 3, batch 1550, loss[loss=0.3174, simple_loss=0.3739, pruned_loss=0.1304, over 19130.00 frames. ], tot_loss[loss=0.3323, simple_loss=0.3772, pruned_loss=0.1437, over 3812875.38 frames. ], batch size: 69, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:08:06,003 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2913, 0.9669, 1.5305, 1.0508, 2.2659, 2.9050, 2.8843, 3.1457], + device='cuda:0'), covar=tensor([0.1785, 0.3973, 0.3604, 0.2478, 0.0613, 0.0235, 0.0309, 0.0164], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0282, 0.0329, 0.0278, 0.0191, 0.0103, 0.0198, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 00:08:12,797 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:18,397 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:20,277 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.825e+02 9.432e+02 1.149e+03 3.008e+03, threshold=1.886e+03, percent-clipped=3.0 +2023-04-01 00:08:21,877 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:52,221 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:09:08,812 INFO [train.py:903] (0/4) Epoch 3, batch 1600, loss[loss=0.2928, simple_loss=0.346, pruned_loss=0.1198, over 19385.00 frames. ], tot_loss[loss=0.3323, simple_loss=0.3774, pruned_loss=0.1436, over 3819798.78 frames. ], batch size: 47, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:09:32,830 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 00:09:38,988 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3301, 2.8661, 1.8794, 2.4365, 2.1443, 2.0399, 0.5930, 2.2975], + device='cuda:0'), covar=tensor([0.0285, 0.0207, 0.0271, 0.0246, 0.0456, 0.0438, 0.0554, 0.0440], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0237, 0.0242, 0.0257, 0.0319, 0.0266, 0.0254, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 00:10:10,173 INFO [train.py:903] (0/4) Epoch 3, batch 1650, loss[loss=0.3535, simple_loss=0.3763, pruned_loss=0.1654, over 19779.00 frames. ], tot_loss[loss=0.3304, simple_loss=0.3761, pruned_loss=0.1424, over 3827579.81 frames. ], batch size: 48, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:10:24,984 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+02 8.096e+02 9.310e+02 1.117e+03 2.889e+03, threshold=1.862e+03, percent-clipped=6.0 +2023-04-01 00:10:35,606 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5207, 1.0868, 1.0532, 1.7393, 1.2196, 1.7084, 1.7597, 1.4276], + device='cuda:0'), covar=tensor([0.0762, 0.1308, 0.1413, 0.0978, 0.1195, 0.0878, 0.1029, 0.0875], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0294, 0.0285, 0.0318, 0.0324, 0.0262, 0.0302, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 00:11:11,890 INFO [train.py:903] (0/4) Epoch 3, batch 1700, loss[loss=0.2716, simple_loss=0.3193, pruned_loss=0.112, over 19330.00 frames. ], tot_loss[loss=0.3304, simple_loss=0.3757, pruned_loss=0.1426, over 3827694.70 frames. ], batch size: 44, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:11:30,482 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7277, 1.7103, 1.6166, 2.2860, 1.5730, 1.9454, 1.9628, 1.5531], + device='cuda:0'), covar=tensor([0.0811, 0.0629, 0.0461, 0.0326, 0.0671, 0.0283, 0.0757, 0.0681], + device='cuda:0'), in_proj_covar=tensor([0.0432, 0.0405, 0.0412, 0.0548, 0.0482, 0.0316, 0.0505, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:11:50,333 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 00:12:13,187 INFO [train.py:903] (0/4) Epoch 3, batch 1750, loss[loss=0.2628, simple_loss=0.3169, pruned_loss=0.1043, over 19763.00 frames. ], tot_loss[loss=0.331, simple_loss=0.3758, pruned_loss=0.143, over 3817518.84 frames. ], batch size: 46, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:12:30,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+02 8.359e+02 1.065e+03 1.276e+03 4.198e+03, threshold=2.129e+03, percent-clipped=6.0 +2023-04-01 00:13:17,302 INFO [train.py:903] (0/4) Epoch 3, batch 1800, loss[loss=0.3056, simple_loss=0.3663, pruned_loss=0.1224, over 18683.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3751, pruned_loss=0.142, over 3822164.55 frames. ], batch size: 74, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:13:37,278 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15473.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:07,872 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:14,391 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 00:14:17,663 INFO [train.py:903] (0/4) Epoch 3, batch 1850, loss[loss=0.2696, simple_loss=0.3197, pruned_loss=0.1097, over 19744.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3743, pruned_loss=0.141, over 3825430.47 frames. ], batch size: 46, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:14:32,125 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+02 7.448e+02 9.407e+02 1.169e+03 2.273e+03, threshold=1.881e+03, percent-clipped=1.0 +2023-04-01 00:14:50,377 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 00:15:18,052 INFO [train.py:903] (0/4) Epoch 3, batch 1900, loss[loss=0.4264, simple_loss=0.4337, pruned_loss=0.2095, over 19673.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3753, pruned_loss=0.142, over 3810316.16 frames. ], batch size: 53, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:15:18,210 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:15:36,172 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 00:15:41,730 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 00:16:04,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 00:16:19,662 INFO [train.py:903] (0/4) Epoch 3, batch 1950, loss[loss=0.4068, simple_loss=0.4305, pruned_loss=0.1916, over 13528.00 frames. ], tot_loss[loss=0.3316, simple_loss=0.3766, pruned_loss=0.1433, over 3801971.69 frames. ], batch size: 136, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:16:31,838 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 00:16:36,885 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+02 7.789e+02 9.617e+02 1.296e+03 2.448e+03, threshold=1.923e+03, percent-clipped=3.0 +2023-04-01 00:17:22,855 INFO [train.py:903] (0/4) Epoch 3, batch 2000, loss[loss=0.2821, simple_loss=0.3302, pruned_loss=0.117, over 19759.00 frames. ], tot_loss[loss=0.3317, simple_loss=0.3769, pruned_loss=0.1432, over 3806604.49 frames. ], batch size: 48, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:17:41,425 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:18:20,112 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 00:18:23,586 INFO [train.py:903] (0/4) Epoch 3, batch 2050, loss[loss=0.3066, simple_loss=0.3702, pruned_loss=0.1215, over 19781.00 frames. ], tot_loss[loss=0.3312, simple_loss=0.3766, pruned_loss=0.1429, over 3812689.17 frames. ], batch size: 63, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:18:38,231 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+02 7.410e+02 9.269e+02 1.172e+03 2.915e+03, threshold=1.854e+03, percent-clipped=8.0 +2023-04-01 00:18:38,299 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 00:18:39,611 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 00:18:58,658 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 00:19:24,999 INFO [train.py:903] (0/4) Epoch 3, batch 2100, loss[loss=0.3852, simple_loss=0.4282, pruned_loss=0.1711, over 19488.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.3772, pruned_loss=0.1428, over 3812064.95 frames. ], batch size: 64, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:19:52,227 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 00:20:09,577 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.2397, 4.8653, 6.0474, 5.8231, 1.9670, 5.5368, 4.9068, 5.4227], + device='cuda:0'), covar=tensor([0.0461, 0.0503, 0.0294, 0.0239, 0.3159, 0.0166, 0.0310, 0.0725], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0336, 0.0458, 0.0345, 0.0476, 0.0236, 0.0306, 0.0439], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 00:20:13,816 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 00:20:25,854 INFO [train.py:903] (0/4) Epoch 3, batch 2150, loss[loss=0.3456, simple_loss=0.3928, pruned_loss=0.1492, over 19476.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.3763, pruned_loss=0.1424, over 3817305.10 frames. ], batch size: 49, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:20:42,351 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 7.356e+02 9.022e+02 1.284e+03 2.686e+03, threshold=1.804e+03, percent-clipped=4.0 +2023-04-01 00:21:28,851 INFO [train.py:903] (0/4) Epoch 3, batch 2200, loss[loss=0.2741, simple_loss=0.3449, pruned_loss=0.1016, over 19669.00 frames. ], tot_loss[loss=0.331, simple_loss=0.377, pruned_loss=0.1426, over 3818165.50 frames. ], batch size: 53, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:23,669 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:22:30,390 INFO [train.py:903] (0/4) Epoch 3, batch 2250, loss[loss=0.3916, simple_loss=0.4093, pruned_loss=0.1869, over 13319.00 frames. ], tot_loss[loss=0.3302, simple_loss=0.3762, pruned_loss=0.142, over 3790084.53 frames. ], batch size: 136, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:44,786 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+02 7.012e+02 9.226e+02 1.146e+03 2.721e+03, threshold=1.845e+03, percent-clipped=4.0 +2023-04-01 00:22:55,658 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:08,212 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 00:23:26,925 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:31,838 INFO [train.py:903] (0/4) Epoch 3, batch 2300, loss[loss=0.3393, simple_loss=0.3839, pruned_loss=0.1473, over 19707.00 frames. ], tot_loss[loss=0.3281, simple_loss=0.375, pruned_loss=0.1406, over 3811776.66 frames. ], batch size: 59, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:23:44,534 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 00:23:57,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 00:24:24,635 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-16000.pt +2023-04-01 00:24:33,426 INFO [train.py:903] (0/4) Epoch 3, batch 2350, loss[loss=0.4111, simple_loss=0.4177, pruned_loss=0.2022, over 13784.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3761, pruned_loss=0.1414, over 3811430.76 frames. ], batch size: 136, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:24:48,805 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 7.519e+02 9.138e+02 1.115e+03 3.205e+03, threshold=1.828e+03, percent-clipped=8.0 +2023-04-01 00:24:49,111 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:25:07,118 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9965, 3.7229, 2.0468, 2.8067, 3.1147, 1.7671, 1.2743, 2.0071], + device='cuda:0'), covar=tensor([0.1218, 0.0336, 0.1008, 0.0508, 0.0558, 0.1052, 0.0969, 0.0738], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0227, 0.0309, 0.0259, 0.0214, 0.0309, 0.0274, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 00:25:15,405 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 00:25:31,102 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 00:25:34,314 INFO [train.py:903] (0/4) Epoch 3, batch 2400, loss[loss=0.3421, simple_loss=0.3844, pruned_loss=0.1499, over 19574.00 frames. ], tot_loss[loss=0.3309, simple_loss=0.3772, pruned_loss=0.1423, over 3815405.00 frames. ], batch size: 52, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:25:58,796 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5703, 1.2804, 1.5247, 1.8512, 3.0938, 1.3551, 1.9760, 2.9000], + device='cuda:0'), covar=tensor([0.0311, 0.2526, 0.2423, 0.1397, 0.0472, 0.2066, 0.1125, 0.0500], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0295, 0.0293, 0.0269, 0.0280, 0.0319, 0.0261, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:26:33,074 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:26:37,324 INFO [train.py:903] (0/4) Epoch 3, batch 2450, loss[loss=0.2919, simple_loss=0.3595, pruned_loss=0.1122, over 19539.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3761, pruned_loss=0.1414, over 3818734.29 frames. ], batch size: 56, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:26:51,589 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+02 8.374e+02 9.822e+02 1.305e+03 3.634e+03, threshold=1.964e+03, percent-clipped=9.0 +2023-04-01 00:27:02,707 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-01 00:27:38,195 INFO [train.py:903] (0/4) Epoch 3, batch 2500, loss[loss=0.342, simple_loss=0.3914, pruned_loss=0.1464, over 19791.00 frames. ], tot_loss[loss=0.329, simple_loss=0.3757, pruned_loss=0.1411, over 3808524.45 frames. ], batch size: 56, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:27:50,929 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:28:01,914 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.87 vs. limit=5.0 +2023-04-01 00:28:14,684 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2159, 1.0077, 1.4770, 1.2109, 1.8109, 1.9349, 1.9583, 0.4611], + device='cuda:0'), covar=tensor([0.1318, 0.2229, 0.1168, 0.1314, 0.0852, 0.0988, 0.0834, 0.2000], + device='cuda:0'), in_proj_covar=tensor([0.0398, 0.0437, 0.0403, 0.0380, 0.0472, 0.0391, 0.0556, 0.0403], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:28:40,060 INFO [train.py:903] (0/4) Epoch 3, batch 2550, loss[loss=0.3674, simple_loss=0.41, pruned_loss=0.1624, over 18197.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.3756, pruned_loss=0.1413, over 3812605.52 frames. ], batch size: 83, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:28:56,244 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+02 7.641e+02 9.209e+02 1.283e+03 2.881e+03, threshold=1.842e+03, percent-clipped=1.0 +2023-04-01 00:29:28,552 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:29:36,513 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 00:29:44,020 INFO [train.py:903] (0/4) Epoch 3, batch 2600, loss[loss=0.305, simple_loss=0.3676, pruned_loss=0.1212, over 19742.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3754, pruned_loss=0.1417, over 3822441.43 frames. ], batch size: 54, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:30:46,336 INFO [train.py:903] (0/4) Epoch 3, batch 2650, loss[loss=0.3387, simple_loss=0.3826, pruned_loss=0.1474, over 19605.00 frames. ], tot_loss[loss=0.328, simple_loss=0.3747, pruned_loss=0.1407, over 3822391.54 frames. ], batch size: 61, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:30:47,125 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 00:31:00,230 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+02 7.570e+02 8.863e+02 1.283e+03 4.568e+03, threshold=1.773e+03, percent-clipped=9.0 +2023-04-01 00:31:06,930 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 00:31:42,825 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7939, 3.9817, 4.4515, 4.3681, 1.5470, 3.9768, 3.6914, 3.8525], + device='cuda:0'), covar=tensor([0.0635, 0.0676, 0.0431, 0.0300, 0.3292, 0.0257, 0.0348, 0.0944], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0336, 0.0460, 0.0351, 0.0475, 0.0242, 0.0300, 0.0445], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 00:31:47,144 INFO [train.py:903] (0/4) Epoch 3, batch 2700, loss[loss=0.3812, simple_loss=0.4204, pruned_loss=0.171, over 18666.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3741, pruned_loss=0.1405, over 3823160.39 frames. ], batch size: 74, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:51,937 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:31:54,873 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:26,204 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16389.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:47,343 INFO [train.py:903] (0/4) Epoch 3, batch 2750, loss[loss=0.3611, simple_loss=0.3907, pruned_loss=0.1657, over 19121.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3745, pruned_loss=0.1409, over 3827958.75 frames. ], batch size: 69, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:33:01,694 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.986e+02 7.672e+02 9.838e+02 1.209e+03 2.463e+03, threshold=1.968e+03, percent-clipped=5.0 +2023-04-01 00:33:35,971 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:33:43,140 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.23 vs. limit=5.0 +2023-04-01 00:33:46,965 INFO [train.py:903] (0/4) Epoch 3, batch 2800, loss[loss=0.3561, simple_loss=0.3958, pruned_loss=0.1582, over 13842.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3761, pruned_loss=0.142, over 3815117.50 frames. ], batch size: 136, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:03,332 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2777, 2.1080, 1.5887, 1.5211, 1.5640, 1.6154, 0.2566, 1.0196], + device='cuda:0'), covar=tensor([0.0159, 0.0201, 0.0159, 0.0214, 0.0397, 0.0260, 0.0429, 0.0353], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0239, 0.0234, 0.0260, 0.0317, 0.0268, 0.0253, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 00:34:13,545 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:34:20,506 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 00:34:48,737 INFO [train.py:903] (0/4) Epoch 3, batch 2850, loss[loss=0.3964, simple_loss=0.4278, pruned_loss=0.1825, over 19623.00 frames. ], tot_loss[loss=0.33, simple_loss=0.376, pruned_loss=0.142, over 3823169.80 frames. ], batch size: 57, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:54,286 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16511.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:35:03,282 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+02 7.862e+02 1.093e+03 1.433e+03 3.382e+03, threshold=2.185e+03, percent-clipped=3.0 +2023-04-01 00:35:41,674 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 00:35:47,695 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 00:35:49,903 INFO [train.py:903] (0/4) Epoch 3, batch 2900, loss[loss=0.3205, simple_loss=0.373, pruned_loss=0.134, over 19517.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3753, pruned_loss=0.1417, over 3818731.29 frames. ], batch size: 54, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:35:57,068 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16562.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:36:38,540 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7610, 1.7997, 1.3948, 1.3182, 1.1784, 1.3552, 0.0587, 0.7782], + device='cuda:0'), covar=tensor([0.0183, 0.0188, 0.0125, 0.0164, 0.0412, 0.0223, 0.0345, 0.0290], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0242, 0.0240, 0.0261, 0.0318, 0.0272, 0.0255, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 00:36:51,760 INFO [train.py:903] (0/4) Epoch 3, batch 2950, loss[loss=0.3053, simple_loss=0.3473, pruned_loss=0.1316, over 19744.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.377, pruned_loss=0.1429, over 3800576.62 frames. ], batch size: 51, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:02,816 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16616.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:07,331 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 7.271e+02 9.405e+02 1.170e+03 2.853e+03, threshold=1.881e+03, percent-clipped=4.0 +2023-04-01 00:37:15,863 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:33,960 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:52,197 INFO [train.py:903] (0/4) Epoch 3, batch 3000, loss[loss=0.3213, simple_loss=0.3551, pruned_loss=0.1437, over 19386.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3753, pruned_loss=0.1417, over 3816657.31 frames. ], batch size: 47, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:52,198 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 00:38:05,265 INFO [train.py:937] (0/4) Epoch 3, validation: loss=0.231, simple_loss=0.3246, pruned_loss=0.06867, over 944034.00 frames. +2023-04-01 00:38:05,266 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 00:38:08,702 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 00:38:17,985 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 00:39:07,683 INFO [train.py:903] (0/4) Epoch 3, batch 3050, loss[loss=0.307, simple_loss=0.3578, pruned_loss=0.1281, over 19749.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3742, pruned_loss=0.1405, over 3822313.90 frames. ], batch size: 51, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:39:22,586 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+02 7.860e+02 1.014e+03 1.267e+03 1.851e+03, threshold=2.027e+03, percent-clipped=0.0 +2023-04-01 00:39:37,069 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.22 vs. limit=5.0 +2023-04-01 00:39:38,697 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16733.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:39:40,019 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:08,118 INFO [train.py:903] (0/4) Epoch 3, batch 3100, loss[loss=0.2653, simple_loss=0.3265, pruned_loss=0.1021, over 19731.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3724, pruned_loss=0.1392, over 3827666.25 frames. ], batch size: 45, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:40:11,530 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:31,755 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3915, 1.0599, 1.4025, 0.9617, 2.4798, 2.9878, 2.8444, 3.2114], + device='cuda:0'), covar=tensor([0.1504, 0.2924, 0.3176, 0.2168, 0.0474, 0.0158, 0.0257, 0.0119], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0278, 0.0332, 0.0270, 0.0196, 0.0107, 0.0200, 0.0112], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 00:40:32,044 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 00:40:42,530 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4472, 1.3030, 1.2507, 1.7525, 1.3891, 1.7170, 1.8832, 1.5353], + device='cuda:0'), covar=tensor([0.0824, 0.1030, 0.1136, 0.0822, 0.0968, 0.0783, 0.0968, 0.0729], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0283, 0.0271, 0.0310, 0.0314, 0.0262, 0.0290, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 00:41:08,259 INFO [train.py:903] (0/4) Epoch 3, batch 3150, loss[loss=0.2872, simple_loss=0.3352, pruned_loss=0.1196, over 19712.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3741, pruned_loss=0.1403, over 3820316.70 frames. ], batch size: 46, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:41:20,755 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0756, 1.0971, 1.6337, 1.2653, 2.0792, 1.9863, 2.3024, 0.7791], + device='cuda:0'), covar=tensor([0.1650, 0.2599, 0.1419, 0.1446, 0.1020, 0.1204, 0.1067, 0.2317], + device='cuda:0'), in_proj_covar=tensor([0.0403, 0.0442, 0.0408, 0.0383, 0.0483, 0.0391, 0.0564, 0.0410], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:41:21,849 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16818.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:22,526 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.096e+02 7.784e+02 9.820e+02 1.270e+03 2.923e+03, threshold=1.964e+03, percent-clipped=2.0 +2023-04-01 00:41:31,802 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 00:41:51,823 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:57,406 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:07,316 INFO [train.py:903] (0/4) Epoch 3, batch 3200, loss[loss=0.3938, simple_loss=0.4207, pruned_loss=0.1834, over 19614.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3743, pruned_loss=0.1403, over 3832477.02 frames. ], batch size: 57, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:42:26,199 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-01 00:42:39,227 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16882.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:43,582 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16886.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:43:08,575 INFO [train.py:903] (0/4) Epoch 3, batch 3250, loss[loss=0.3453, simple_loss=0.3909, pruned_loss=0.1499, over 19576.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3744, pruned_loss=0.1403, over 3824923.03 frames. ], batch size: 61, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:43:08,965 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:43:24,383 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+02 8.507e+02 1.021e+03 1.288e+03 2.328e+03, threshold=2.042e+03, percent-clipped=1.0 +2023-04-01 00:43:29,307 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:43:50,170 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 00:44:09,582 INFO [train.py:903] (0/4) Epoch 3, batch 3300, loss[loss=0.2954, simple_loss=0.3522, pruned_loss=0.1193, over 19858.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3733, pruned_loss=0.1393, over 3822830.25 frames. ], batch size: 52, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:44:16,150 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 00:45:10,105 INFO [train.py:903] (0/4) Epoch 3, batch 3350, loss[loss=0.3393, simple_loss=0.3876, pruned_loss=0.1456, over 19306.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3731, pruned_loss=0.1395, over 3816824.54 frames. ], batch size: 66, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:45:24,559 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+02 7.847e+02 9.419e+02 1.175e+03 3.710e+03, threshold=1.884e+03, percent-clipped=5.0 +2023-04-01 00:46:10,035 INFO [train.py:903] (0/4) Epoch 3, batch 3400, loss[loss=0.2959, simple_loss=0.3482, pruned_loss=0.1218, over 19458.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3737, pruned_loss=0.1399, over 3813284.43 frames. ], batch size: 49, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:46:21,762 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6367, 1.2499, 1.2862, 1.7873, 1.6951, 1.8327, 2.1351, 1.7092], + device='cuda:0'), covar=tensor([0.0882, 0.1268, 0.1378, 0.1124, 0.1023, 0.0998, 0.0930, 0.0752], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0285, 0.0276, 0.0314, 0.0315, 0.0274, 0.0289, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 00:47:08,445 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:47:12,305 INFO [train.py:903] (0/4) Epoch 3, batch 3450, loss[loss=0.3395, simple_loss=0.3894, pruned_loss=0.1448, over 19440.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.374, pruned_loss=0.1399, over 3800361.30 frames. ], batch size: 70, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:47:14,325 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 00:47:28,182 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 8.544e+02 1.014e+03 1.278e+03 1.988e+03, threshold=2.028e+03, percent-clipped=3.0 +2023-04-01 00:47:39,729 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:48:12,439 INFO [train.py:903] (0/4) Epoch 3, batch 3500, loss[loss=0.3647, simple_loss=0.4108, pruned_loss=0.1593, over 18878.00 frames. ], tot_loss[loss=0.3302, simple_loss=0.3762, pruned_loss=0.1421, over 3790845.00 frames. ], batch size: 74, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:49:12,366 INFO [train.py:903] (0/4) Epoch 3, batch 3550, loss[loss=0.2831, simple_loss=0.3363, pruned_loss=0.115, over 19741.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.3764, pruned_loss=0.1423, over 3798581.34 frames. ], batch size: 51, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:49:12,657 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:49:26,840 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.377e+02 8.429e+02 1.068e+03 1.302e+03 2.755e+03, threshold=2.137e+03, percent-clipped=4.0 +2023-04-01 00:49:40,870 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17230.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:49:49,121 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 00:50:11,407 INFO [train.py:903] (0/4) Epoch 3, batch 3600, loss[loss=0.2778, simple_loss=0.3279, pruned_loss=0.1139, over 19486.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3754, pruned_loss=0.141, over 3823129.00 frames. ], batch size: 49, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:50:24,836 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:50:56,817 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.99 vs. limit=5.0 +2023-04-01 00:51:11,855 INFO [train.py:903] (0/4) Epoch 3, batch 3650, loss[loss=0.3136, simple_loss=0.3722, pruned_loss=0.1275, over 19674.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3738, pruned_loss=0.1397, over 3820200.22 frames. ], batch size: 58, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:51:27,524 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.083e+02 7.894e+02 9.345e+02 1.119e+03 1.949e+03, threshold=1.869e+03, percent-clipped=0.0 +2023-04-01 00:51:57,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17345.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:52:12,711 INFO [train.py:903] (0/4) Epoch 3, batch 3700, loss[loss=0.3327, simple_loss=0.3655, pruned_loss=0.1499, over 19749.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3713, pruned_loss=0.1379, over 3826459.54 frames. ], batch size: 46, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:52:38,311 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4159, 1.2118, 1.6496, 0.8288, 2.5676, 2.9243, 2.6934, 3.0942], + device='cuda:0'), covar=tensor([0.1118, 0.2311, 0.2158, 0.1858, 0.0324, 0.0122, 0.0218, 0.0102], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0276, 0.0325, 0.0270, 0.0192, 0.0107, 0.0199, 0.0113], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 00:52:39,342 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:52:42,842 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:53:05,601 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17401.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:53:12,843 INFO [train.py:903] (0/4) Epoch 3, batch 3750, loss[loss=0.4477, simple_loss=0.4496, pruned_loss=0.2229, over 13148.00 frames. ], tot_loss[loss=0.3278, simple_loss=0.3744, pruned_loss=0.1406, over 3821082.86 frames. ], batch size: 136, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:53:27,566 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 9.192e+02 1.060e+03 1.489e+03 3.397e+03, threshold=2.120e+03, percent-clipped=7.0 +2023-04-01 00:54:12,636 INFO [train.py:903] (0/4) Epoch 3, batch 3800, loss[loss=0.334, simple_loss=0.3815, pruned_loss=0.1433, over 17410.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3721, pruned_loss=0.1387, over 3819024.50 frames. ], batch size: 101, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:54:23,028 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2653, 1.1637, 1.2958, 0.7903, 2.5237, 2.8232, 2.6974, 3.0739], + device='cuda:0'), covar=tensor([0.1385, 0.2696, 0.2981, 0.2241, 0.0396, 0.0137, 0.0251, 0.0114], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0281, 0.0331, 0.0276, 0.0195, 0.0107, 0.0201, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 00:54:45,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 00:55:11,733 INFO [train.py:903] (0/4) Epoch 3, batch 3850, loss[loss=0.307, simple_loss=0.3519, pruned_loss=0.131, over 19737.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3733, pruned_loss=0.1394, over 3829207.18 frames. ], batch size: 45, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:55:28,331 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+02 7.944e+02 9.720e+02 1.209e+03 3.103e+03, threshold=1.944e+03, percent-clipped=2.0 +2023-04-01 00:56:01,956 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4680, 1.0891, 1.5689, 1.1808, 2.5674, 3.3517, 3.2868, 3.6515], + device='cuda:0'), covar=tensor([0.1294, 0.2751, 0.2743, 0.1976, 0.0443, 0.0123, 0.0182, 0.0086], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0280, 0.0326, 0.0274, 0.0191, 0.0107, 0.0200, 0.0113], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 00:56:04,060 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17551.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:56:12,747 INFO [train.py:903] (0/4) Epoch 3, batch 3900, loss[loss=0.3141, simple_loss=0.3718, pruned_loss=0.1282, over 19784.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3736, pruned_loss=0.1396, over 3823323.23 frames. ], batch size: 56, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:56:16,667 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4473, 2.1289, 1.6154, 1.7051, 1.5032, 1.6528, 0.2161, 1.0981], + device='cuda:0'), covar=tensor([0.0173, 0.0220, 0.0177, 0.0266, 0.0438, 0.0293, 0.0441, 0.0351], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0246, 0.0239, 0.0266, 0.0324, 0.0259, 0.0250, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 00:57:05,241 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17601.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:57:11,568 INFO [train.py:903] (0/4) Epoch 3, batch 3950, loss[loss=0.3056, simple_loss=0.3665, pruned_loss=0.1223, over 19588.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.374, pruned_loss=0.1399, over 3817730.71 frames. ], batch size: 61, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:18,151 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 00:57:27,263 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+02 7.211e+02 9.089e+02 1.148e+03 2.193e+03, threshold=1.818e+03, percent-clipped=2.0 +2023-04-01 00:57:34,442 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17626.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:57:51,660 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:03,602 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7087, 1.7390, 1.4273, 1.4966, 1.3630, 1.5401, 0.5910, 1.2366], + device='cuda:0'), covar=tensor([0.0182, 0.0200, 0.0119, 0.0165, 0.0300, 0.0210, 0.0386, 0.0264], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0247, 0.0243, 0.0270, 0.0325, 0.0265, 0.0256, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 00:58:08,354 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-04-01 00:58:12,126 INFO [train.py:903] (0/4) Epoch 3, batch 4000, loss[loss=0.3975, simple_loss=0.4184, pruned_loss=0.1883, over 14071.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.374, pruned_loss=0.1398, over 3810694.19 frames. ], batch size: 136, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:58:20,391 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:22,671 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:59,486 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 00:59:11,471 INFO [train.py:903] (0/4) Epoch 3, batch 4050, loss[loss=0.299, simple_loss=0.3554, pruned_loss=0.1213, over 19680.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3731, pruned_loss=0.1384, over 3818966.55 frames. ], batch size: 58, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:59:23,448 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2154, 2.1134, 2.0077, 3.3967, 2.2299, 3.9023, 3.0666, 1.9090], + device='cuda:0'), covar=tensor([0.1307, 0.0956, 0.0519, 0.0625, 0.1257, 0.0223, 0.0856, 0.0811], + device='cuda:0'), in_proj_covar=tensor([0.0478, 0.0445, 0.0449, 0.0594, 0.0527, 0.0358, 0.0548, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 00:59:25,591 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:28,910 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+02 7.398e+02 9.459e+02 1.250e+03 4.446e+03, threshold=1.892e+03, percent-clipped=10.0 +2023-04-01 00:59:32,663 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:57,743 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17745.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:00:01,012 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17748.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:00:02,019 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5619, 3.8864, 4.1129, 4.0730, 1.5098, 3.8068, 3.3780, 3.7209], + device='cuda:0'), covar=tensor([0.0598, 0.0527, 0.0423, 0.0317, 0.3023, 0.0242, 0.0426, 0.0812], + device='cuda:0'), in_proj_covar=tensor([0.0379, 0.0346, 0.0476, 0.0367, 0.0491, 0.0251, 0.0317, 0.0459], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 01:00:10,715 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1099, 1.0064, 1.4719, 1.2191, 1.8558, 1.7764, 1.9609, 0.4815], + device='cuda:0'), covar=tensor([0.1463, 0.2334, 0.1213, 0.1347, 0.0876, 0.1206, 0.0824, 0.2079], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0453, 0.0407, 0.0386, 0.0487, 0.0399, 0.0571, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 01:00:12,216 INFO [train.py:903] (0/4) Epoch 3, batch 4100, loss[loss=0.3527, simple_loss=0.3993, pruned_loss=0.153, over 19541.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3736, pruned_loss=0.139, over 3820790.24 frames. ], batch size: 54, lr: 2.43e-02, grad_scale: 4.0 +2023-04-01 01:00:48,448 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 01:01:11,680 INFO [train.py:903] (0/4) Epoch 3, batch 4150, loss[loss=0.341, simple_loss=0.3869, pruned_loss=0.1475, over 19467.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.373, pruned_loss=0.1384, over 3818844.92 frames. ], batch size: 64, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:01:28,530 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+02 7.784e+02 9.706e+02 1.186e+03 3.618e+03, threshold=1.941e+03, percent-clipped=3.0 +2023-04-01 01:01:49,967 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:02:10,745 INFO [train.py:903] (0/4) Epoch 3, batch 4200, loss[loss=0.3191, simple_loss=0.3624, pruned_loss=0.1379, over 19846.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.373, pruned_loss=0.1387, over 3808465.95 frames. ], batch size: 52, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:02:14,251 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17860.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:02:14,921 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 01:02:25,250 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0743, 1.2166, 1.3066, 1.6364, 2.6417, 1.2583, 1.8455, 2.6163], + device='cuda:0'), covar=tensor([0.0291, 0.2102, 0.2010, 0.1196, 0.0470, 0.1676, 0.0879, 0.0441], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0303, 0.0295, 0.0273, 0.0281, 0.0318, 0.0272, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:02:38,329 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7613, 1.5007, 1.8183, 1.6387, 2.9127, 4.5434, 4.4721, 4.9366], + device='cuda:0'), covar=tensor([0.1407, 0.2435, 0.2530, 0.1817, 0.0454, 0.0106, 0.0136, 0.0067], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0280, 0.0326, 0.0273, 0.0198, 0.0109, 0.0202, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 01:03:09,259 INFO [train.py:903] (0/4) Epoch 3, batch 4250, loss[loss=0.3471, simple_loss=0.3956, pruned_loss=0.1493, over 18776.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3718, pruned_loss=0.1378, over 3805527.78 frames. ], batch size: 74, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:03:11,737 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6272, 4.1959, 2.6513, 3.6433, 1.5063, 3.6965, 3.8257, 3.8740], + device='cuda:0'), covar=tensor([0.0546, 0.1072, 0.1816, 0.0736, 0.3272, 0.1022, 0.0572, 0.0788], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0292, 0.0337, 0.0271, 0.0339, 0.0293, 0.0243, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:03:26,775 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.782e+02 8.334e+02 9.808e+02 1.259e+03 2.577e+03, threshold=1.962e+03, percent-clipped=5.0 +2023-04-01 01:03:26,832 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 01:03:28,267 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:03:37,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 01:03:57,655 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:04:08,380 INFO [train.py:903] (0/4) Epoch 3, batch 4300, loss[loss=0.3845, simple_loss=0.4141, pruned_loss=0.1775, over 18226.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3722, pruned_loss=0.1376, over 3804076.37 frames. ], batch size: 84, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:04:35,072 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:04:38,680 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5312, 1.7783, 2.1423, 2.7284, 2.2343, 2.2243, 2.1610, 2.6619], + device='cuda:0'), covar=tensor([0.0690, 0.2028, 0.1236, 0.0784, 0.1143, 0.0531, 0.0893, 0.0575], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0383, 0.0287, 0.0256, 0.0317, 0.0267, 0.0274, 0.0243], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:05:00,891 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-18000.pt +2023-04-01 01:05:06,041 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 01:05:10,734 INFO [train.py:903] (0/4) Epoch 3, batch 4350, loss[loss=0.3226, simple_loss=0.3796, pruned_loss=0.1327, over 19682.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.3703, pruned_loss=0.1363, over 3812075.67 frames. ], batch size: 60, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:05:27,020 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+02 7.530e+02 9.485e+02 1.282e+03 2.824e+03, threshold=1.897e+03, percent-clipped=4.0 +2023-04-01 01:06:11,119 INFO [train.py:903] (0/4) Epoch 3, batch 4400, loss[loss=0.3213, simple_loss=0.3565, pruned_loss=0.1431, over 19614.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3702, pruned_loss=0.1366, over 3813121.93 frames. ], batch size: 50, lr: 2.41e-02, grad_scale: 8.0 +2023-04-01 01:06:17,178 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:32,857 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 01:06:43,596 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 01:06:53,801 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18092.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:57,446 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:10,934 INFO [train.py:903] (0/4) Epoch 3, batch 4450, loss[loss=0.3522, simple_loss=0.4007, pruned_loss=0.1519, over 19671.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.371, pruned_loss=0.1368, over 3819376.77 frames. ], batch size: 60, lr: 2.40e-02, grad_scale: 8.0 +2023-04-01 01:07:16,929 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3120, 1.5131, 1.7040, 2.3050, 1.9220, 2.5871, 2.5059, 2.3679], + device='cuda:0'), covar=tensor([0.0737, 0.1173, 0.1226, 0.1293, 0.1237, 0.0716, 0.1131, 0.0715], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0283, 0.0277, 0.0311, 0.0313, 0.0266, 0.0283, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 01:07:21,271 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18116.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:07:26,625 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:28,235 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.891e+02 7.625e+02 9.248e+02 1.171e+03 2.408e+03, threshold=1.850e+03, percent-clipped=4.0 +2023-04-01 01:07:33,876 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8297, 3.4772, 2.2481, 3.0844, 1.1545, 3.0833, 3.0453, 3.2860], + device='cuda:0'), covar=tensor([0.0813, 0.1215, 0.2110, 0.0979, 0.3809, 0.1167, 0.0979, 0.0985], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0286, 0.0335, 0.0278, 0.0343, 0.0297, 0.0248, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:07:53,433 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18141.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:08:05,272 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.80 vs. limit=2.0 +2023-04-01 01:08:11,158 INFO [train.py:903] (0/4) Epoch 3, batch 4500, loss[loss=0.3214, simple_loss=0.3668, pruned_loss=0.1379, over 19743.00 frames. ], tot_loss[loss=0.32, simple_loss=0.369, pruned_loss=0.1355, over 3819553.85 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:08:25,323 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.63 vs. limit=5.0 +2023-04-01 01:08:37,479 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:54,038 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18192.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:55,176 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:11,760 INFO [train.py:903] (0/4) Epoch 3, batch 4550, loss[loss=0.3594, simple_loss=0.4072, pruned_loss=0.1558, over 19752.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.3696, pruned_loss=0.1358, over 3812257.02 frames. ], batch size: 63, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:09:12,105 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:18,463 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 01:09:29,444 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.087e+02 7.616e+02 9.596e+02 1.201e+03 2.125e+03, threshold=1.919e+03, percent-clipped=4.0 +2023-04-01 01:09:42,216 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 01:10:11,971 INFO [train.py:903] (0/4) Epoch 3, batch 4600, loss[loss=0.3821, simple_loss=0.4146, pruned_loss=0.1748, over 18114.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3699, pruned_loss=0.1362, over 3825813.93 frames. ], batch size: 83, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:11,634 INFO [train.py:903] (0/4) Epoch 3, batch 4650, loss[loss=0.3231, simple_loss=0.3773, pruned_loss=0.1345, over 19483.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3704, pruned_loss=0.1367, over 3824816.52 frames. ], batch size: 64, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:27,794 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 01:11:28,860 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+02 7.851e+02 9.796e+02 1.308e+03 3.825e+03, threshold=1.959e+03, percent-clipped=6.0 +2023-04-01 01:11:29,035 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:11:38,749 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 01:12:06,729 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7558, 4.2710, 2.3685, 3.8850, 1.2782, 4.0240, 3.9623, 4.1797], + device='cuda:0'), covar=tensor([0.0473, 0.0915, 0.1991, 0.0622, 0.3470, 0.0780, 0.0591, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0281, 0.0324, 0.0268, 0.0332, 0.0283, 0.0238, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:12:10,876 INFO [train.py:903] (0/4) Epoch 3, batch 4700, loss[loss=0.3548, simple_loss=0.3883, pruned_loss=0.1606, over 19494.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3716, pruned_loss=0.1377, over 3821717.98 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:12:33,343 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 01:13:13,556 INFO [train.py:903] (0/4) Epoch 3, batch 4750, loss[loss=0.3542, simple_loss=0.3978, pruned_loss=0.1553, over 19674.00 frames. ], tot_loss[loss=0.3247, simple_loss=0.3725, pruned_loss=0.1384, over 3827693.85 frames. ], batch size: 58, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:13:31,290 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.455e+02 7.810e+02 9.309e+02 1.222e+03 2.382e+03, threshold=1.862e+03, percent-clipped=4.0 +2023-04-01 01:13:44,348 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18433.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:13:48,810 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:11,429 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 01:14:15,107 INFO [train.py:903] (0/4) Epoch 3, batch 4800, loss[loss=0.3186, simple_loss=0.37, pruned_loss=0.1336, over 19335.00 frames. ], tot_loss[loss=0.323, simple_loss=0.3711, pruned_loss=0.1375, over 3808979.16 frames. ], batch size: 66, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:14:16,595 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:22,279 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18463.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:53,523 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18488.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:57,073 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4357, 1.3962, 2.5206, 1.6688, 3.1088, 3.0070, 3.5968, 1.3159], + device='cuda:0'), covar=tensor([0.1500, 0.2460, 0.1200, 0.1347, 0.1062, 0.1062, 0.1231, 0.2454], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0459, 0.0419, 0.0394, 0.0498, 0.0404, 0.0581, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 01:15:16,093 INFO [train.py:903] (0/4) Epoch 3, batch 4850, loss[loss=0.2963, simple_loss=0.3529, pruned_loss=0.1199, over 19483.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3716, pruned_loss=0.1371, over 3809939.85 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:15:17,551 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:34,581 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+02 7.295e+02 9.130e+02 1.063e+03 1.681e+03, threshold=1.826e+03, percent-clipped=0.0 +2023-04-01 01:15:38,194 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 01:15:40,614 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18526.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:15:52,386 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:53,567 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18537.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:16:00,134 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 01:16:04,624 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 01:16:05,762 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 01:16:14,590 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 01:16:15,642 INFO [train.py:903] (0/4) Epoch 3, batch 4900, loss[loss=0.3714, simple_loss=0.4115, pruned_loss=0.1656, over 19688.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3718, pruned_loss=0.1373, over 3819998.38 frames. ], batch size: 59, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:16:34,635 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 01:17:14,802 INFO [train.py:903] (0/4) Epoch 3, batch 4950, loss[loss=0.3265, simple_loss=0.3762, pruned_loss=0.1384, over 17509.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3723, pruned_loss=0.1379, over 3820503.77 frames. ], batch size: 101, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:17:30,520 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 01:17:34,977 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.825e+02 8.688e+02 1.059e+03 1.337e+03 3.400e+03, threshold=2.119e+03, percent-clipped=10.0 +2023-04-01 01:17:52,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 01:18:09,818 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18651.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:10,944 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:17,099 INFO [train.py:903] (0/4) Epoch 3, batch 5000, loss[loss=0.4049, simple_loss=0.4277, pruned_loss=0.1911, over 14078.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3715, pruned_loss=0.1371, over 3818691.83 frames. ], batch size: 135, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:18:21,590 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 01:18:32,453 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 01:18:59,277 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:00,335 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18693.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:16,433 INFO [train.py:903] (0/4) Epoch 3, batch 5050, loss[loss=0.3191, simple_loss=0.3759, pruned_loss=0.1311, over 19397.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.371, pruned_loss=0.1366, over 3830222.65 frames. ], batch size: 66, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:19:29,215 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:35,283 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.108e+02 7.776e+02 1.028e+03 1.229e+03 3.550e+03, threshold=2.057e+03, percent-clipped=2.0 +2023-04-01 01:19:48,428 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 01:20:16,939 INFO [train.py:903] (0/4) Epoch 3, batch 5100, loss[loss=0.3294, simple_loss=0.3787, pruned_loss=0.14, over 19664.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3717, pruned_loss=0.1366, over 3824819.00 frames. ], batch size: 55, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:20:24,654 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 01:20:27,859 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 01:20:30,035 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-04-01 01:20:32,462 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 01:21:19,959 INFO [train.py:903] (0/4) Epoch 3, batch 5150, loss[loss=0.3224, simple_loss=0.3761, pruned_loss=0.1343, over 19732.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.3691, pruned_loss=0.1349, over 3826517.13 frames. ], batch size: 63, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:21:31,925 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 01:21:40,054 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.924e+02 8.047e+02 1.080e+03 1.932e+03, threshold=1.609e+03, percent-clipped=0.0 +2023-04-01 01:22:06,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 01:22:09,583 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.81 vs. limit=5.0 +2023-04-01 01:22:14,743 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18852.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:22:20,356 INFO [train.py:903] (0/4) Epoch 3, batch 5200, loss[loss=0.3627, simple_loss=0.4073, pruned_loss=0.159, over 19603.00 frames. ], tot_loss[loss=0.3204, simple_loss=0.3696, pruned_loss=0.1356, over 3833209.29 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 8.0 +2023-04-01 01:22:37,386 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 01:22:37,520 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18870.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:22:59,117 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3925, 2.4316, 2.1768, 3.5532, 2.2754, 3.9313, 3.6377, 2.1954], + device='cuda:0'), covar=tensor([0.1319, 0.0882, 0.0493, 0.0543, 0.1194, 0.0222, 0.0735, 0.0751], + device='cuda:0'), in_proj_covar=tensor([0.0486, 0.0454, 0.0458, 0.0608, 0.0534, 0.0372, 0.0555, 0.0452], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 01:23:21,224 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 01:23:23,224 INFO [train.py:903] (0/4) Epoch 3, batch 5250, loss[loss=0.3435, simple_loss=0.3825, pruned_loss=0.1522, over 19657.00 frames. ], tot_loss[loss=0.321, simple_loss=0.37, pruned_loss=0.136, over 3819728.28 frames. ], batch size: 60, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:23:23,663 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:24,823 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:42,374 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+02 7.388e+02 9.793e+02 1.246e+03 4.620e+03, threshold=1.959e+03, percent-clipped=9.0 +2023-04-01 01:23:53,525 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:54,536 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:12,744 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:22,204 INFO [train.py:903] (0/4) Epoch 3, batch 5300, loss[loss=0.285, simple_loss=0.3369, pruned_loss=0.1165, over 19433.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3691, pruned_loss=0.1353, over 3815059.60 frames. ], batch size: 48, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:24:35,223 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:39,538 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 01:24:56,468 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18985.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:25:22,318 INFO [train.py:903] (0/4) Epoch 3, batch 5350, loss[loss=0.296, simple_loss=0.3509, pruned_loss=0.1206, over 19515.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3696, pruned_loss=0.1359, over 3820214.36 frames. ], batch size: 54, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:25:42,784 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+02 7.837e+02 1.011e+03 1.314e+03 3.062e+03, threshold=2.023e+03, percent-clipped=6.0 +2023-04-01 01:25:55,450 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 01:25:56,692 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19036.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:26:20,418 INFO [train.py:903] (0/4) Epoch 3, batch 5400, loss[loss=0.2819, simple_loss=0.3343, pruned_loss=0.1148, over 19740.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3689, pruned_loss=0.1352, over 3819446.33 frames. ], batch size: 51, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:27:22,179 INFO [train.py:903] (0/4) Epoch 3, batch 5450, loss[loss=0.315, simple_loss=0.3697, pruned_loss=0.1302, over 19587.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3693, pruned_loss=0.1353, over 3811951.85 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:27:41,223 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+02 7.941e+02 9.480e+02 1.159e+03 2.761e+03, threshold=1.896e+03, percent-clipped=1.0 +2023-04-01 01:27:49,159 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1281, 3.4775, 1.9783, 2.7210, 2.8199, 1.5439, 1.0911, 1.8073], + device='cuda:0'), covar=tensor([0.0984, 0.0384, 0.0957, 0.0387, 0.0499, 0.1085, 0.1009, 0.0661], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0233, 0.0310, 0.0251, 0.0215, 0.0309, 0.0275, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:28:04,314 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7891, 4.3007, 2.3895, 3.8410, 1.2171, 3.8936, 3.8011, 4.1418], + device='cuda:0'), covar=tensor([0.0490, 0.0993, 0.2004, 0.0631, 0.3715, 0.0820, 0.0699, 0.0709], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0299, 0.0346, 0.0280, 0.0348, 0.0295, 0.0252, 0.0284], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:28:08,997 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9741, 1.3378, 0.9188, 1.0306, 1.1721, 0.8227, 0.4774, 1.2179], + device='cuda:0'), covar=tensor([0.0508, 0.0492, 0.1124, 0.0434, 0.0474, 0.1249, 0.0853, 0.0434], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0232, 0.0309, 0.0250, 0.0216, 0.0311, 0.0276, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:28:10,062 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19147.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:28:14,848 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:28:21,132 INFO [train.py:903] (0/4) Epoch 3, batch 5500, loss[loss=0.2894, simple_loss=0.3372, pruned_loss=0.1208, over 19754.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3691, pruned_loss=0.1351, over 3809092.25 frames. ], batch size: 46, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:28:45,301 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 01:28:58,487 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.4794, 4.0236, 2.5024, 3.6066, 1.2614, 3.5783, 3.6942, 3.8994], + device='cuda:0'), covar=tensor([0.0586, 0.1020, 0.1902, 0.0752, 0.3733, 0.0979, 0.0697, 0.0844], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0296, 0.0344, 0.0281, 0.0348, 0.0296, 0.0253, 0.0286], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:29:11,725 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2128, 1.1395, 1.1127, 1.4989, 1.1590, 1.3343, 1.2892, 1.2542], + device='cuda:0'), covar=tensor([0.1089, 0.1327, 0.1440, 0.0945, 0.1084, 0.1109, 0.1160, 0.0887], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0283, 0.0271, 0.0310, 0.0311, 0.0259, 0.0282, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 01:29:20,074 INFO [train.py:903] (0/4) Epoch 3, batch 5550, loss[loss=0.3056, simple_loss=0.3598, pruned_loss=0.1258, over 19857.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3694, pruned_loss=0.1352, over 3819117.10 frames. ], batch size: 52, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:29:26,556 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3784, 1.0379, 1.3346, 1.2534, 2.1157, 1.1889, 1.7980, 2.0889], + device='cuda:0'), covar=tensor([0.0559, 0.2404, 0.2170, 0.1336, 0.0607, 0.1573, 0.0905, 0.0615], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0310, 0.0303, 0.0278, 0.0290, 0.0316, 0.0276, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:29:27,430 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 01:29:32,666 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 01:29:41,442 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:29:42,151 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 7.947e+02 9.800e+02 1.342e+03 2.993e+03, threshold=1.960e+03, percent-clipped=6.0 +2023-04-01 01:30:02,026 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19241.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:30:09,705 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:30:15,624 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 01:30:21,461 INFO [train.py:903] (0/4) Epoch 3, batch 5600, loss[loss=0.3407, simple_loss=0.3873, pruned_loss=0.1471, over 19063.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.367, pruned_loss=0.1336, over 3829164.57 frames. ], batch size: 69, lr: 2.34e-02, grad_scale: 8.0 +2023-04-01 01:30:33,673 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19266.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:31:03,988 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:31:18,606 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 01:31:22,068 INFO [train.py:903] (0/4) Epoch 3, batch 5650, loss[loss=0.3378, simple_loss=0.3787, pruned_loss=0.1485, over 18220.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3665, pruned_loss=0.1335, over 3830550.92 frames. ], batch size: 83, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:31:41,038 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 7.403e+02 9.102e+02 1.185e+03 3.385e+03, threshold=1.820e+03, percent-clipped=3.0 +2023-04-01 01:32:09,534 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 01:32:15,629 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0431, 2.5271, 1.7809, 2.2705, 1.8391, 1.8328, 0.5436, 2.0725], + device='cuda:0'), covar=tensor([0.0215, 0.0256, 0.0247, 0.0280, 0.0434, 0.0379, 0.0565, 0.0428], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0255, 0.0254, 0.0281, 0.0339, 0.0268, 0.0266, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:32:21,816 INFO [train.py:903] (0/4) Epoch 3, batch 5700, loss[loss=0.2805, simple_loss=0.3319, pruned_loss=0.1145, over 19492.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3663, pruned_loss=0.1331, over 3833235.61 frames. ], batch size: 49, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:32:38,177 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 01:32:47,051 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4078, 1.3393, 1.0222, 1.3389, 1.1605, 1.2167, 1.0927, 1.2296], + device='cuda:0'), covar=tensor([0.0893, 0.1187, 0.1351, 0.0842, 0.0993, 0.0615, 0.0984, 0.0775], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0376, 0.0280, 0.0248, 0.0310, 0.0262, 0.0267, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:33:22,521 INFO [train.py:903] (0/4) Epoch 3, batch 5750, loss[loss=0.3201, simple_loss=0.3722, pruned_loss=0.134, over 19677.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3676, pruned_loss=0.1336, over 3833148.95 frames. ], batch size: 59, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:33:22,872 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:22,942 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:24,898 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 01:33:30,054 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:34,047 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 01:33:39,378 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 01:33:43,561 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.525e+02 9.538e+02 1.231e+03 3.556e+03, threshold=1.908e+03, percent-clipped=6.0 +2023-04-01 01:33:53,866 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:34:22,185 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1653, 2.0817, 1.5499, 1.6766, 1.5064, 1.6681, 0.2214, 0.8457], + device='cuda:0'), covar=tensor([0.0203, 0.0190, 0.0143, 0.0189, 0.0410, 0.0246, 0.0481, 0.0368], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0247, 0.0247, 0.0272, 0.0331, 0.0259, 0.0257, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:34:22,889 INFO [train.py:903] (0/4) Epoch 3, batch 5800, loss[loss=0.3447, simple_loss=0.3911, pruned_loss=0.1492, over 19345.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3679, pruned_loss=0.1341, over 3828606.68 frames. ], batch size: 66, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:34:49,395 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9785, 2.0270, 2.1461, 2.6588, 2.0665, 2.7065, 2.9071, 2.9312], + device='cuda:0'), covar=tensor([0.0584, 0.1025, 0.1023, 0.1234, 0.1240, 0.0801, 0.0853, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0275, 0.0266, 0.0300, 0.0307, 0.0249, 0.0274, 0.0244], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 01:35:03,997 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19491.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:35:07,671 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.94 vs. limit=5.0 +2023-04-01 01:35:23,138 INFO [train.py:903] (0/4) Epoch 3, batch 5850, loss[loss=0.3044, simple_loss=0.3653, pruned_loss=0.1217, over 19756.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3673, pruned_loss=0.1346, over 3835829.68 frames. ], batch size: 54, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:35:43,379 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+02 8.354e+02 1.035e+03 1.319e+03 5.609e+03, threshold=2.070e+03, percent-clipped=8.0 +2023-04-01 01:35:54,668 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2784, 1.3122, 1.0830, 1.0120, 0.9411, 1.2592, 0.0115, 0.3904], + device='cuda:0'), covar=tensor([0.0233, 0.0215, 0.0137, 0.0164, 0.0475, 0.0157, 0.0368, 0.0353], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0250, 0.0251, 0.0278, 0.0332, 0.0264, 0.0257, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:36:23,588 INFO [train.py:903] (0/4) Epoch 3, batch 5900, loss[loss=0.2997, simple_loss=0.3558, pruned_loss=0.1218, over 19843.00 frames. ], tot_loss[loss=0.3195, simple_loss=0.3683, pruned_loss=0.1354, over 3825049.82 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:36:26,955 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 01:36:46,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 01:37:21,768 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19606.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:37:22,519 INFO [train.py:903] (0/4) Epoch 3, batch 5950, loss[loss=0.298, simple_loss=0.3611, pruned_loss=0.1174, over 19607.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3691, pruned_loss=0.1351, over 3825069.77 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:37:45,645 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.894e+02 9.030e+02 1.163e+03 3.004e+03, threshold=1.806e+03, percent-clipped=5.0 +2023-04-01 01:37:59,423 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:38:24,426 INFO [train.py:903] (0/4) Epoch 3, batch 6000, loss[loss=0.3386, simple_loss=0.3618, pruned_loss=0.1577, over 17772.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3693, pruned_loss=0.1354, over 3821389.79 frames. ], batch size: 39, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:38:24,426 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 01:38:37,335 INFO [train.py:937] (0/4) Epoch 3, validation: loss=0.2218, simple_loss=0.3182, pruned_loss=0.06273, over 944034.00 frames. +2023-04-01 01:38:37,335 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 01:38:45,538 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:15,723 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:37,218 INFO [train.py:903] (0/4) Epoch 3, batch 6050, loss[loss=0.3333, simple_loss=0.3822, pruned_loss=0.1422, over 18322.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3686, pruned_loss=0.1349, over 3812244.67 frames. ], batch size: 84, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:39:59,308 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+02 7.768e+02 9.451e+02 1.306e+03 2.772e+03, threshold=1.890e+03, percent-clipped=6.0 +2023-04-01 01:40:09,625 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 01:40:37,790 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:40:38,845 INFO [train.py:903] (0/4) Epoch 3, batch 6100, loss[loss=0.3236, simple_loss=0.3629, pruned_loss=0.1422, over 19409.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3681, pruned_loss=0.1345, over 3809771.56 frames. ], batch size: 48, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:41:38,886 INFO [train.py:903] (0/4) Epoch 3, batch 6150, loss[loss=0.3069, simple_loss=0.3722, pruned_loss=0.1208, over 19285.00 frames. ], tot_loss[loss=0.3204, simple_loss=0.3697, pruned_loss=0.1355, over 3810557.88 frames. ], batch size: 66, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:01,425 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+02 8.602e+02 1.123e+03 1.510e+03 2.312e+03, threshold=2.246e+03, percent-clipped=7.0 +2023-04-01 01:42:04,685 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 01:42:38,569 INFO [train.py:903] (0/4) Epoch 3, batch 6200, loss[loss=0.268, simple_loss=0.3224, pruned_loss=0.1068, over 19820.00 frames. ], tot_loss[loss=0.3184, simple_loss=0.3674, pruned_loss=0.1346, over 3810416.10 frames. ], batch size: 47, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:46,184 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19862.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:42:56,179 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:43:15,863 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19887.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:43:39,798 INFO [train.py:903] (0/4) Epoch 3, batch 6250, loss[loss=0.3011, simple_loss=0.3599, pruned_loss=0.1212, over 19531.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.3667, pruned_loss=0.1338, over 3816534.33 frames. ], batch size: 56, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:44:01,767 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+02 7.100e+02 9.208e+02 1.133e+03 3.490e+03, threshold=1.842e+03, percent-clipped=3.0 +2023-04-01 01:44:09,643 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 01:44:40,497 INFO [train.py:903] (0/4) Epoch 3, batch 6300, loss[loss=0.2416, simple_loss=0.3036, pruned_loss=0.08987, over 19726.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3661, pruned_loss=0.1331, over 3827202.04 frames. ], batch size: 46, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:44:55,611 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.72 vs. limit=5.0 +2023-04-01 01:45:07,281 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19980.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:45:21,579 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4106, 0.9346, 1.1764, 1.3121, 2.0735, 1.0499, 1.7849, 2.0331], + device='cuda:0'), covar=tensor([0.0551, 0.2492, 0.2258, 0.1340, 0.0635, 0.1658, 0.0839, 0.0637], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0309, 0.0301, 0.0278, 0.0295, 0.0315, 0.0278, 0.0288], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:45:32,462 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-20000.pt +2023-04-01 01:45:41,421 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1744, 1.9034, 1.8954, 2.2918, 2.1539, 2.0250, 1.9476, 2.0925], + device='cuda:0'), covar=tensor([0.0570, 0.1107, 0.0902, 0.0541, 0.0740, 0.0394, 0.0703, 0.0438], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0381, 0.0288, 0.0245, 0.0310, 0.0261, 0.0270, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:45:42,110 INFO [train.py:903] (0/4) Epoch 3, batch 6350, loss[loss=0.369, simple_loss=0.4038, pruned_loss=0.1671, over 13031.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3661, pruned_loss=0.133, over 3831010.26 frames. ], batch size: 136, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:46:03,318 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.424e+02 9.293e+02 1.158e+03 2.968e+03, threshold=1.859e+03, percent-clipped=5.0 +2023-04-01 01:46:20,197 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20038.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:46:42,148 INFO [train.py:903] (0/4) Epoch 3, batch 6400, loss[loss=0.3183, simple_loss=0.381, pruned_loss=0.1278, over 19276.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.3667, pruned_loss=0.1331, over 3837961.01 frames. ], batch size: 66, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:46:47,220 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5892, 1.6182, 1.5125, 2.2912, 1.4614, 1.9080, 1.9860, 1.5093], + device='cuda:0'), covar=tensor([0.1095, 0.0906, 0.0578, 0.0432, 0.0942, 0.0406, 0.0992, 0.0902], + device='cuda:0'), in_proj_covar=tensor([0.0508, 0.0480, 0.0476, 0.0641, 0.0553, 0.0400, 0.0571, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 01:47:29,302 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:47:43,490 INFO [train.py:903] (0/4) Epoch 3, batch 6450, loss[loss=0.2783, simple_loss=0.3523, pruned_loss=0.1022, over 19654.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.3653, pruned_loss=0.1316, over 3847948.26 frames. ], batch size: 55, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:43,803 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:47:51,714 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-01 01:47:53,546 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3169, 3.9165, 2.4082, 3.5171, 1.0447, 3.5337, 3.4346, 3.7387], + device='cuda:0'), covar=tensor([0.0689, 0.1322, 0.2129, 0.0794, 0.4374, 0.1077, 0.0907, 0.1078], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0295, 0.0339, 0.0268, 0.0342, 0.0293, 0.0246, 0.0288], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:48:05,562 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.028e+02 7.833e+02 9.380e+02 1.145e+03 2.427e+03, threshold=1.876e+03, percent-clipped=3.0 +2023-04-01 01:48:08,304 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:12,594 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:29,105 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 01:48:39,367 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:42,970 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 01:48:44,549 INFO [train.py:903] (0/4) Epoch 3, batch 6500, loss[loss=0.3027, simple_loss=0.373, pruned_loss=0.1162, over 19541.00 frames. ], tot_loss[loss=0.3142, simple_loss=0.3652, pruned_loss=0.1316, over 3834721.63 frames. ], batch size: 56, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:48:52,306 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 01:49:45,353 INFO [train.py:903] (0/4) Epoch 3, batch 6550, loss[loss=0.3194, simple_loss=0.358, pruned_loss=0.1404, over 19382.00 frames. ], tot_loss[loss=0.3137, simple_loss=0.3647, pruned_loss=0.1314, over 3844589.29 frames. ], batch size: 47, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:50:03,598 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:50:07,448 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.085e+02 9.596e+02 1.224e+03 2.377e+03, threshold=1.919e+03, percent-clipped=5.0 +2023-04-01 01:50:46,685 INFO [train.py:903] (0/4) Epoch 3, batch 6600, loss[loss=0.3421, simple_loss=0.3935, pruned_loss=0.1453, over 19087.00 frames. ], tot_loss[loss=0.3148, simple_loss=0.3658, pruned_loss=0.1319, over 3837422.34 frames. ], batch size: 69, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:51:19,942 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8441, 1.8616, 2.1142, 3.0267, 2.3464, 2.5398, 2.3354, 2.9620], + device='cuda:0'), covar=tensor([0.0630, 0.1779, 0.1239, 0.0664, 0.1133, 0.0429, 0.0910, 0.0421], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0383, 0.0288, 0.0251, 0.0320, 0.0265, 0.0275, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:51:35,733 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-04-01 01:51:47,898 INFO [train.py:903] (0/4) Epoch 3, batch 6650, loss[loss=0.3085, simple_loss=0.3685, pruned_loss=0.1243, over 19679.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3661, pruned_loss=0.1322, over 3839115.63 frames. ], batch size: 55, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:52:10,192 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+02 7.672e+02 8.992e+02 1.144e+03 3.524e+03, threshold=1.798e+03, percent-clipped=4.0 +2023-04-01 01:52:42,304 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:52:49,537 INFO [train.py:903] (0/4) Epoch 3, batch 6700, loss[loss=0.2793, simple_loss=0.3261, pruned_loss=0.1163, over 19736.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3669, pruned_loss=0.1329, over 3808837.96 frames. ], batch size: 45, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:53:11,820 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:17,971 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:44,504 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:46,033 INFO [train.py:903] (0/4) Epoch 3, batch 6750, loss[loss=0.3045, simple_loss=0.3616, pruned_loss=0.1237, over 19686.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3669, pruned_loss=0.1335, over 3808230.25 frames. ], batch size: 59, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:54:05,359 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+02 7.667e+02 1.002e+03 1.269e+03 2.908e+03, threshold=2.004e+03, percent-clipped=6.0 +2023-04-01 01:54:17,377 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:26,459 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-04-01 01:54:34,793 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:41,604 INFO [train.py:903] (0/4) Epoch 3, batch 6800, loss[loss=0.3072, simple_loss=0.3667, pruned_loss=0.1239, over 19558.00 frames. ], tot_loss[loss=0.3151, simple_loss=0.3655, pruned_loss=0.1324, over 3812023.67 frames. ], batch size: 54, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:55:00,655 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:55:04,509 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6305, 1.6485, 1.9267, 1.7963, 2.6057, 2.8096, 2.8464, 3.0257], + device='cuda:0'), covar=tensor([0.1181, 0.2133, 0.2029, 0.1488, 0.0545, 0.0210, 0.0231, 0.0132], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0276, 0.0322, 0.0264, 0.0192, 0.0106, 0.0196, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 01:55:11,008 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-3.pt +2023-04-01 01:55:26,349 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 01:55:26,796 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 01:55:29,041 INFO [train.py:903] (0/4) Epoch 4, batch 0, loss[loss=0.3576, simple_loss=0.4, pruned_loss=0.1577, over 19750.00 frames. ], tot_loss[loss=0.3576, simple_loss=0.4, pruned_loss=0.1577, over 19750.00 frames. ], batch size: 63, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:55:29,041 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 01:55:40,521 INFO [train.py:937] (0/4) Epoch 4, validation: loss=0.2245, simple_loss=0.3205, pruned_loss=0.06426, over 944034.00 frames. +2023-04-01 01:55:40,521 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 01:55:53,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 01:55:55,117 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20497.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:56:27,860 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.855e+02 8.790e+02 1.166e+03 2.960e+03, threshold=1.758e+03, percent-clipped=3.0 +2023-04-01 01:56:41,009 INFO [train.py:903] (0/4) Epoch 4, batch 50, loss[loss=0.2927, simple_loss=0.3635, pruned_loss=0.1109, over 19534.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3649, pruned_loss=0.1325, over 871116.95 frames. ], batch size: 56, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:56:50,168 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.01 vs. limit=5.0 +2023-04-01 01:56:54,047 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1088, 5.4775, 2.8471, 4.9270, 1.4283, 5.2270, 5.1771, 5.4814], + device='cuda:0'), covar=tensor([0.0409, 0.0999, 0.1922, 0.0498, 0.3982, 0.0725, 0.0608, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0298, 0.0336, 0.0270, 0.0346, 0.0291, 0.0247, 0.0285], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:57:04,734 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4353, 1.1255, 1.3800, 1.7313, 3.0555, 1.2246, 2.1086, 3.1214], + device='cuda:0'), covar=tensor([0.0360, 0.2514, 0.2234, 0.1362, 0.0459, 0.2005, 0.1036, 0.0376], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0308, 0.0307, 0.0280, 0.0293, 0.0315, 0.0276, 0.0284], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:57:14,492 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 01:57:15,796 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:16,005 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:23,251 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1299, 5.4082, 2.9952, 4.9719, 1.3403, 5.3902, 5.3414, 5.6359], + device='cuda:0'), covar=tensor([0.0357, 0.0915, 0.1507, 0.0467, 0.3511, 0.0567, 0.0426, 0.0503], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0294, 0.0332, 0.0266, 0.0341, 0.0287, 0.0244, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 01:57:40,114 INFO [train.py:903] (0/4) Epoch 4, batch 100, loss[loss=0.2696, simple_loss=0.3185, pruned_loss=0.1104, over 18124.00 frames. ], tot_loss[loss=0.3149, simple_loss=0.3644, pruned_loss=0.1328, over 1529178.06 frames. ], batch size: 40, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:46,144 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20589.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:52,772 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 01:58:05,903 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:58:29,320 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 7.763e+02 9.275e+02 1.175e+03 2.763e+03, threshold=1.855e+03, percent-clipped=7.0 +2023-04-01 01:58:41,724 INFO [train.py:903] (0/4) Epoch 4, batch 150, loss[loss=0.282, simple_loss=0.3411, pruned_loss=0.1115, over 19742.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.3614, pruned_loss=0.1292, over 2033537.98 frames. ], batch size: 51, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:58:43,271 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0082, 1.8397, 1.3624, 1.3256, 1.6604, 0.8844, 0.6760, 1.5162], + device='cuda:0'), covar=tensor([0.0734, 0.0528, 0.0929, 0.0543, 0.0495, 0.1189, 0.0775, 0.0377], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0238, 0.0308, 0.0248, 0.0211, 0.0306, 0.0269, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 01:59:35,903 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20681.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:59:40,169 INFO [train.py:903] (0/4) Epoch 4, batch 200, loss[loss=0.2762, simple_loss=0.3337, pruned_loss=0.1093, over 19740.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3619, pruned_loss=0.1294, over 2436469.65 frames. ], batch size: 51, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:41,292 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 01:59:43,164 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 02:00:16,602 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1350, 1.1809, 1.9491, 1.3045, 2.3465, 2.3289, 2.6391, 0.8502], + device='cuda:0'), covar=tensor([0.1564, 0.2531, 0.1174, 0.1338, 0.1029, 0.1069, 0.1021, 0.2286], + device='cuda:0'), in_proj_covar=tensor([0.0424, 0.0471, 0.0437, 0.0399, 0.0517, 0.0414, 0.0593, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 02:00:28,682 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 7.097e+02 9.184e+02 1.257e+03 2.857e+03, threshold=1.837e+03, percent-clipped=5.0 +2023-04-01 02:00:39,435 INFO [train.py:903] (0/4) Epoch 4, batch 250, loss[loss=0.3275, simple_loss=0.3883, pruned_loss=0.1333, over 19566.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3636, pruned_loss=0.1307, over 2740885.29 frames. ], batch size: 61, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:00:43,909 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7513, 1.0024, 1.3080, 2.1059, 1.6887, 2.0082, 2.1021, 1.6291], + device='cuda:0'), covar=tensor([0.0796, 0.1373, 0.1371, 0.0987, 0.1163, 0.0756, 0.1135, 0.0876], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0276, 0.0267, 0.0295, 0.0305, 0.0248, 0.0268, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:00:53,622 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4847, 1.1196, 1.1412, 1.7356, 1.3939, 1.7049, 1.7821, 1.4039], + device='cuda:0'), covar=tensor([0.0745, 0.1161, 0.1304, 0.0813, 0.0967, 0.0690, 0.0881, 0.0789], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0274, 0.0266, 0.0294, 0.0304, 0.0247, 0.0266, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:00:57,775 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:02,455 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:32,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:34,841 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:41,109 INFO [train.py:903] (0/4) Epoch 4, batch 300, loss[loss=0.3494, simple_loss=0.3946, pruned_loss=0.1521, over 19747.00 frames. ], tot_loss[loss=0.3117, simple_loss=0.3635, pruned_loss=0.13, over 2988407.39 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:02:15,290 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 02:02:25,184 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:29,141 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 7.559e+02 9.012e+02 1.206e+03 2.235e+03, threshold=1.802e+03, percent-clipped=6.0 +2023-04-01 02:02:40,179 INFO [train.py:903] (0/4) Epoch 4, batch 350, loss[loss=0.4024, simple_loss=0.4269, pruned_loss=0.1889, over 13341.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3638, pruned_loss=0.1302, over 3160274.76 frames. ], batch size: 136, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:02:45,648 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 02:02:52,654 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:54,955 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20847.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:18,340 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20865.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:20,549 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0577, 1.9441, 1.3599, 1.3706, 1.7611, 0.9431, 0.8902, 1.5735], + device='cuda:0'), covar=tensor([0.0631, 0.0364, 0.0875, 0.0422, 0.0325, 0.1025, 0.0621, 0.0329], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0245, 0.0314, 0.0246, 0.0212, 0.0314, 0.0277, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:03:23,857 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:27,190 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3975, 1.6793, 1.8371, 2.4398, 1.8660, 2.6698, 2.9327, 2.6804], + device='cuda:0'), covar=tensor([0.0776, 0.1228, 0.1228, 0.1254, 0.1368, 0.0739, 0.1043, 0.0653], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0272, 0.0266, 0.0297, 0.0304, 0.0248, 0.0267, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:03:34,713 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3157, 2.4054, 1.5751, 1.7950, 2.1563, 1.1951, 1.1822, 1.5876], + device='cuda:0'), covar=tensor([0.0713, 0.0320, 0.0883, 0.0408, 0.0364, 0.0975, 0.0602, 0.0421], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0241, 0.0312, 0.0244, 0.0211, 0.0310, 0.0273, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:03:40,719 INFO [train.py:903] (0/4) Epoch 4, batch 400, loss[loss=0.3864, simple_loss=0.4201, pruned_loss=0.1764, over 19753.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3635, pruned_loss=0.1303, over 3307434.16 frames. ], batch size: 63, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:03:52,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:27,878 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.529e+02 9.870e+02 1.265e+03 2.610e+03, threshold=1.974e+03, percent-clipped=3.0 +2023-04-01 02:04:39,096 INFO [train.py:903] (0/4) Epoch 4, batch 450, loss[loss=0.3524, simple_loss=0.394, pruned_loss=0.1554, over 18233.00 frames. ], tot_loss[loss=0.3127, simple_loss=0.3639, pruned_loss=0.1307, over 3410484.38 frames. ], batch size: 84, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:04:41,848 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:58,412 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:02,976 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8396, 4.4872, 2.6621, 3.9905, 1.5237, 4.1128, 3.9366, 3.9894], + device='cuda:0'), covar=tensor([0.0460, 0.0839, 0.1766, 0.0578, 0.3132, 0.0758, 0.0613, 0.0859], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0303, 0.0345, 0.0275, 0.0343, 0.0294, 0.0250, 0.0294], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 02:05:12,742 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 02:05:13,091 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:13,941 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 02:05:40,636 INFO [train.py:903] (0/4) Epoch 4, batch 500, loss[loss=0.3377, simple_loss=0.3928, pruned_loss=0.1413, over 19545.00 frames. ], tot_loss[loss=0.3111, simple_loss=0.3624, pruned_loss=0.1299, over 3507794.81 frames. ], batch size: 56, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:06:27,513 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.944e+02 7.370e+02 9.144e+02 1.191e+03 3.185e+03, threshold=1.829e+03, percent-clipped=4.0 +2023-04-01 02:06:27,865 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8049, 1.5474, 1.5470, 1.8462, 3.3371, 1.2410, 2.2007, 3.4309], + device='cuda:0'), covar=tensor([0.0315, 0.2128, 0.2111, 0.1353, 0.0420, 0.2046, 0.1033, 0.0365], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0304, 0.0301, 0.0282, 0.0295, 0.0316, 0.0281, 0.0285], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:06:40,256 INFO [train.py:903] (0/4) Epoch 4, batch 550, loss[loss=0.2578, simple_loss=0.3262, pruned_loss=0.09475, over 19585.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3618, pruned_loss=0.1289, over 3585643.90 frames. ], batch size: 52, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:07:08,543 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3497, 2.3799, 1.6128, 1.5574, 2.1035, 1.0518, 1.0411, 1.8452], + device='cuda:0'), covar=tensor([0.0815, 0.0455, 0.0965, 0.0541, 0.0360, 0.1147, 0.0730, 0.0377], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0241, 0.0306, 0.0241, 0.0209, 0.0305, 0.0267, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:07:08,877 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 02:07:17,078 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:23,301 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:24,893 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 02:07:39,554 INFO [train.py:903] (0/4) Epoch 4, batch 600, loss[loss=0.3488, simple_loss=0.4005, pruned_loss=0.1485, over 19471.00 frames. ], tot_loss[loss=0.3091, simple_loss=0.3613, pruned_loss=0.1285, over 3646176.26 frames. ], batch size: 64, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:19,255 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 02:08:23,052 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21121.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:27,137 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.199e+02 8.023e+02 1.001e+03 1.305e+03 2.804e+03, threshold=2.003e+03, percent-clipped=3.0 +2023-04-01 02:08:39,444 INFO [train.py:903] (0/4) Epoch 4, batch 650, loss[loss=0.2466, simple_loss=0.3036, pruned_loss=0.09479, over 19737.00 frames. ], tot_loss[loss=0.3076, simple_loss=0.3603, pruned_loss=0.1275, over 3684096.38 frames. ], batch size: 45, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:40,602 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:52,206 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:00,276 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:29,101 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21176.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:38,635 INFO [train.py:903] (0/4) Epoch 4, batch 700, loss[loss=0.3206, simple_loss=0.3542, pruned_loss=0.1435, over 19385.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3601, pruned_loss=0.1271, over 3724784.32 frames. ], batch size: 47, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:10:26,834 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 7.118e+02 8.929e+02 1.162e+03 2.438e+03, threshold=1.786e+03, percent-clipped=3.0 +2023-04-01 02:10:40,519 INFO [train.py:903] (0/4) Epoch 4, batch 750, loss[loss=0.3277, simple_loss=0.3812, pruned_loss=0.1371, over 19570.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3605, pruned_loss=0.1273, over 3750877.99 frames. ], batch size: 61, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:11:40,043 INFO [train.py:903] (0/4) Epoch 4, batch 800, loss[loss=0.3132, simple_loss=0.3769, pruned_loss=0.1247, over 19659.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3625, pruned_loss=0.1291, over 3758970.39 frames. ], batch size: 55, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:11:56,119 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 02:12:25,416 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:30,559 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.379e+02 8.113e+02 1.001e+03 1.207e+03 2.017e+03, threshold=2.002e+03, percent-clipped=2.0 +2023-04-01 02:12:35,494 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:41,604 INFO [train.py:903] (0/4) Epoch 4, batch 850, loss[loss=0.3478, simple_loss=0.3914, pruned_loss=0.152, over 18785.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3621, pruned_loss=0.1286, over 3767929.43 frames. ], batch size: 74, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:12:42,260 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 02:12:54,252 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:54,350 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:13:30,947 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 02:13:39,983 INFO [train.py:903] (0/4) Epoch 4, batch 900, loss[loss=0.3283, simple_loss=0.367, pruned_loss=0.1448, over 13443.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3637, pruned_loss=0.1302, over 3769789.89 frames. ], batch size: 137, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:13:42,754 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5024, 1.3380, 1.0761, 1.2711, 1.1594, 1.3206, 1.1058, 1.3336], + device='cuda:0'), covar=tensor([0.0851, 0.1104, 0.1402, 0.0865, 0.0956, 0.0576, 0.1032, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0382, 0.0286, 0.0256, 0.0317, 0.0266, 0.0270, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:14:14,503 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:14:28,712 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+02 7.656e+02 9.192e+02 1.106e+03 2.022e+03, threshold=1.838e+03, percent-clipped=1.0 +2023-04-01 02:14:40,651 INFO [train.py:903] (0/4) Epoch 4, batch 950, loss[loss=0.2991, simple_loss=0.3484, pruned_loss=0.1249, over 19671.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3628, pruned_loss=0.1293, over 3791745.30 frames. ], batch size: 53, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:43,013 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 02:15:35,413 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:15:40,648 INFO [train.py:903] (0/4) Epoch 4, batch 1000, loss[loss=0.358, simple_loss=0.395, pruned_loss=0.1605, over 19771.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3615, pruned_loss=0.1287, over 3801690.28 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:16:29,780 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.436e+02 9.242e+02 1.292e+03 2.692e+03, threshold=1.848e+03, percent-clipped=7.0 +2023-04-01 02:16:33,308 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 02:16:33,621 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:16:40,203 INFO [train.py:903] (0/4) Epoch 4, batch 1050, loss[loss=0.2853, simple_loss=0.3447, pruned_loss=0.1129, over 19846.00 frames. ], tot_loss[loss=0.3091, simple_loss=0.3614, pruned_loss=0.1284, over 3814422.65 frames. ], batch size: 52, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:17:14,232 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 02:17:40,177 INFO [train.py:903] (0/4) Epoch 4, batch 1100, loss[loss=0.2963, simple_loss=0.3657, pruned_loss=0.1134, over 19772.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3625, pruned_loss=0.129, over 3818749.66 frames. ], batch size: 56, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:17:52,953 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:03,973 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2087, 1.3155, 0.9371, 0.9989, 0.9963, 1.1497, 0.0090, 0.3861], + device='cuda:0'), covar=tensor([0.0250, 0.0243, 0.0186, 0.0199, 0.0490, 0.0203, 0.0426, 0.0389], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0262, 0.0258, 0.0285, 0.0338, 0.0275, 0.0264, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 02:18:15,033 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:30,559 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+02 7.921e+02 9.622e+02 1.275e+03 2.981e+03, threshold=1.924e+03, percent-clipped=6.0 +2023-04-01 02:18:36,434 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21631.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:43,699 INFO [train.py:903] (0/4) Epoch 4, batch 1150, loss[loss=0.2655, simple_loss=0.307, pruned_loss=0.112, over 19743.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.362, pruned_loss=0.1288, over 3810166.27 frames. ], batch size: 45, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:30,748 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:19:45,550 INFO [train.py:903] (0/4) Epoch 4, batch 1200, loss[loss=0.3354, simple_loss=0.377, pruned_loss=0.1469, over 19726.00 frames. ], tot_loss[loss=0.308, simple_loss=0.3606, pruned_loss=0.1278, over 3810808.14 frames. ], batch size: 63, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:51,412 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:19:54,755 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.5027, 5.0181, 2.9964, 4.3813, 1.4298, 4.5795, 4.5511, 4.8256], + device='cuda:0'), covar=tensor([0.0438, 0.0891, 0.1497, 0.0581, 0.3882, 0.0722, 0.0615, 0.0672], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0302, 0.0342, 0.0278, 0.0348, 0.0295, 0.0259, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 02:20:14,889 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 02:20:35,528 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+02 7.515e+02 9.038e+02 1.111e+03 2.454e+03, threshold=1.808e+03, percent-clipped=2.0 +2023-04-01 02:20:45,434 INFO [train.py:903] (0/4) Epoch 4, batch 1250, loss[loss=0.2433, simple_loss=0.303, pruned_loss=0.09183, over 19743.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3603, pruned_loss=0.1276, over 3800286.15 frames. ], batch size: 47, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:39,797 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:44,776 INFO [train.py:903] (0/4) Epoch 4, batch 1300, loss[loss=0.2831, simple_loss=0.3348, pruned_loss=0.1157, over 19291.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3587, pruned_loss=0.1262, over 3805951.96 frames. ], batch size: 44, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:45,186 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:49,660 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:08,377 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-01 02:22:10,049 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:15,818 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:34,649 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 7.410e+02 9.034e+02 1.121e+03 1.849e+03, threshold=1.807e+03, percent-clipped=1.0 +2023-04-01 02:22:45,118 INFO [train.py:903] (0/4) Epoch 4, batch 1350, loss[loss=0.3001, simple_loss=0.3537, pruned_loss=0.1233, over 19666.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.3589, pruned_loss=0.1262, over 3802811.55 frames. ], batch size: 53, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:23:04,038 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:23,930 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:33,929 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:45,407 INFO [train.py:903] (0/4) Epoch 4, batch 1400, loss[loss=0.2864, simple_loss=0.332, pruned_loss=0.1204, over 19025.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3599, pruned_loss=0.1272, over 3803445.56 frames. ], batch size: 42, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:24:15,793 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.77 vs. limit=5.0 +2023-04-01 02:24:35,708 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.358e+02 7.364e+02 9.517e+02 1.310e+03 2.254e+03, threshold=1.903e+03, percent-clipped=6.0 +2023-04-01 02:24:42,753 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 02:24:46,194 INFO [train.py:903] (0/4) Epoch 4, batch 1450, loss[loss=0.3118, simple_loss=0.3675, pruned_loss=0.1281, over 19690.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3589, pruned_loss=0.1264, over 3811577.21 frames. ], batch size: 60, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:25:13,206 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:34,255 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:40,061 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7790, 1.6435, 1.5590, 1.9058, 3.3217, 1.3171, 2.1583, 3.5882], + device='cuda:0'), covar=tensor([0.0322, 0.2119, 0.2010, 0.1360, 0.0426, 0.2035, 0.1128, 0.0271], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0302, 0.0303, 0.0281, 0.0286, 0.0320, 0.0278, 0.0284], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:25:45,923 INFO [train.py:903] (0/4) Epoch 4, batch 1500, loss[loss=0.319, simple_loss=0.3714, pruned_loss=0.1333, over 19673.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3594, pruned_loss=0.1272, over 3818592.35 frames. ], batch size: 55, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:25:46,306 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0648, 2.7763, 1.8614, 2.1037, 1.8263, 2.1475, 0.7845, 2.1320], + device='cuda:0'), covar=tensor([0.0249, 0.0244, 0.0250, 0.0328, 0.0452, 0.0339, 0.0518, 0.0423], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0264, 0.0264, 0.0288, 0.0347, 0.0276, 0.0265, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 02:25:49,629 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:50,920 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1697, 1.1374, 1.5704, 1.2874, 2.1774, 2.0975, 2.3312, 0.7726], + device='cuda:0'), covar=tensor([0.1644, 0.2774, 0.1430, 0.1411, 0.1039, 0.1279, 0.1084, 0.2436], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0480, 0.0446, 0.0407, 0.0522, 0.0420, 0.0604, 0.0430], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 02:25:55,271 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2130, 5.5774, 3.0143, 4.9905, 1.4357, 5.4154, 5.4116, 5.5288], + device='cuda:0'), covar=tensor([0.0340, 0.0825, 0.1681, 0.0451, 0.3608, 0.0598, 0.0560, 0.0578], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0300, 0.0344, 0.0278, 0.0346, 0.0298, 0.0259, 0.0292], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 02:26:05,040 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-22000.pt +2023-04-01 02:26:20,386 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6991, 1.2899, 1.3808, 2.2332, 1.6375, 1.9355, 2.0198, 1.8931], + device='cuda:0'), covar=tensor([0.0866, 0.1222, 0.1339, 0.0948, 0.1081, 0.0870, 0.1004, 0.0765], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0266, 0.0262, 0.0296, 0.0298, 0.0243, 0.0263, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:26:36,550 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+02 6.805e+02 9.241e+02 1.170e+03 2.581e+03, threshold=1.848e+03, percent-clipped=2.0 +2023-04-01 02:26:47,124 INFO [train.py:903] (0/4) Epoch 4, batch 1550, loss[loss=0.2669, simple_loss=0.3354, pruned_loss=0.09919, over 19667.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3586, pruned_loss=0.1266, over 3829033.86 frames. ], batch size: 53, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:01,608 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:19,398 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:30,474 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:32,751 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:49,225 INFO [train.py:903] (0/4) Epoch 4, batch 1600, loss[loss=0.3109, simple_loss=0.3691, pruned_loss=0.1264, over 19779.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3596, pruned_loss=0.1272, over 3835901.46 frames. ], batch size: 56, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:50,741 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22086.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:55,242 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:10,569 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 02:28:37,639 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:39,601 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+02 8.565e+02 1.081e+03 1.346e+03 3.673e+03, threshold=2.162e+03, percent-clipped=6.0 +2023-04-01 02:28:48,843 INFO [train.py:903] (0/4) Epoch 4, batch 1650, loss[loss=0.2888, simple_loss=0.3491, pruned_loss=0.1143, over 19600.00 frames. ], tot_loss[loss=0.3067, simple_loss=0.3593, pruned_loss=0.1271, over 3835287.04 frames. ], batch size: 50, lr: 2.05e-02, grad_scale: 4.0 +2023-04-01 02:29:04,251 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22148.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:29:49,257 INFO [train.py:903] (0/4) Epoch 4, batch 1700, loss[loss=0.3194, simple_loss=0.3738, pruned_loss=0.1325, over 19603.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3596, pruned_loss=0.1268, over 3841634.57 frames. ], batch size: 61, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:00,399 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22194.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:23,329 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22212.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:27,870 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 02:30:40,332 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.920e+02 6.276e+02 7.753e+02 9.050e+02 1.909e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 02:30:48,057 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22233.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:49,928 INFO [train.py:903] (0/4) Epoch 4, batch 1750, loss[loss=0.3165, simple_loss=0.3718, pruned_loss=0.1307, over 19569.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.36, pruned_loss=0.1272, over 3833302.08 frames. ], batch size: 61, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:56,955 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-04-01 02:30:57,711 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:15,623 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22255.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:52,442 INFO [train.py:903] (0/4) Epoch 4, batch 1800, loss[loss=0.3398, simple_loss=0.3878, pruned_loss=0.1459, over 19619.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3597, pruned_loss=0.1268, over 3825258.49 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:32:43,154 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+02 7.313e+02 8.961e+02 1.128e+03 3.443e+03, threshold=1.792e+03, percent-clipped=8.0 +2023-04-01 02:32:43,412 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:44,458 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:46,332 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 02:32:48,645 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22332.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:52,003 INFO [train.py:903] (0/4) Epoch 4, batch 1850, loss[loss=0.264, simple_loss=0.3264, pruned_loss=0.1008, over 19849.00 frames. ], tot_loss[loss=0.3074, simple_loss=0.3605, pruned_loss=0.1271, over 3829521.37 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:33:04,841 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:13,899 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:24,820 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 02:33:36,220 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:51,849 INFO [train.py:903] (0/4) Epoch 4, batch 1900, loss[loss=0.3326, simple_loss=0.3823, pruned_loss=0.1414, over 19747.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3602, pruned_loss=0.1267, over 3822415.05 frames. ], batch size: 51, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:34:09,721 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 02:34:14,789 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 02:34:15,042 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:34:39,429 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 02:34:42,867 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.044e+02 7.549e+02 9.520e+02 1.192e+03 3.384e+03, threshold=1.904e+03, percent-clipped=5.0 +2023-04-01 02:34:51,945 INFO [train.py:903] (0/4) Epoch 4, batch 1950, loss[loss=0.283, simple_loss=0.3462, pruned_loss=0.1099, over 19757.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.359, pruned_loss=0.1258, over 3826945.02 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:35:08,637 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:35:53,487 INFO [train.py:903] (0/4) Epoch 4, batch 2000, loss[loss=0.3054, simple_loss=0.3615, pruned_loss=0.1246, over 19523.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3601, pruned_loss=0.1268, over 3804381.24 frames. ], batch size: 56, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:01,809 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22492.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:06,594 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:37,391 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:45,515 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+02 6.735e+02 8.799e+02 1.102e+03 2.294e+03, threshold=1.760e+03, percent-clipped=1.0 +2023-04-01 02:36:46,753 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 02:36:47,091 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4185, 1.1697, 1.5416, 0.9574, 2.5720, 3.1510, 3.0729, 3.3689], + device='cuda:0'), covar=tensor([0.1184, 0.2765, 0.2551, 0.1892, 0.0375, 0.0118, 0.0205, 0.0106], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0280, 0.0323, 0.0260, 0.0192, 0.0110, 0.0201, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 02:36:54,493 INFO [train.py:903] (0/4) Epoch 4, batch 2050, loss[loss=0.3877, simple_loss=0.4286, pruned_loss=0.1734, over 19660.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3603, pruned_loss=0.1267, over 3795184.52 frames. ], batch size: 55, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:58,154 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:04,905 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 02:37:06,035 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 02:37:20,868 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2279, 1.2163, 1.4098, 1.5459, 2.7859, 1.0505, 2.0155, 2.9317], + device='cuda:0'), covar=tensor([0.0422, 0.2544, 0.2358, 0.1521, 0.0539, 0.2237, 0.1035, 0.0446], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0308, 0.0304, 0.0278, 0.0292, 0.0320, 0.0283, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:37:27,448 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 02:37:45,369 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22577.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:52,351 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:54,201 INFO [train.py:903] (0/4) Epoch 4, batch 2100, loss[loss=0.2733, simple_loss=0.342, pruned_loss=0.1023, over 19715.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3594, pruned_loss=0.1263, over 3809685.80 frames. ], batch size: 59, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:38:10,689 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:21,642 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 02:38:21,978 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:23,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:42,450 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 02:38:44,808 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.772e+02 6.954e+02 8.861e+02 1.126e+03 2.028e+03, threshold=1.772e+03, percent-clipped=3.0 +2023-04-01 02:38:53,760 INFO [train.py:903] (0/4) Epoch 4, batch 2150, loss[loss=0.3753, simple_loss=0.4017, pruned_loss=0.1745, over 13370.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3597, pruned_loss=0.1264, over 3796708.32 frames. ], batch size: 136, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:39:17,732 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22653.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:22,223 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22657.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:56,339 INFO [train.py:903] (0/4) Epoch 4, batch 2200, loss[loss=0.2792, simple_loss=0.3358, pruned_loss=0.1112, over 19742.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3578, pruned_loss=0.1249, over 3807163.23 frames. ], batch size: 51, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:40:05,445 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:08,931 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3434, 1.0591, 1.1670, 1.4177, 1.1545, 1.3611, 1.3838, 1.2470], + device='cuda:0'), covar=tensor([0.0852, 0.1190, 0.1154, 0.0764, 0.0874, 0.0819, 0.0828, 0.0733], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0266, 0.0264, 0.0301, 0.0300, 0.0249, 0.0264, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:40:18,035 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:30,220 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:47,465 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+02 6.866e+02 8.417e+02 1.098e+03 2.160e+03, threshold=1.683e+03, percent-clipped=4.0 +2023-04-01 02:40:49,078 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:56,709 INFO [train.py:903] (0/4) Epoch 4, batch 2250, loss[loss=0.3016, simple_loss=0.3664, pruned_loss=0.1184, over 19701.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.3583, pruned_loss=0.1257, over 3803527.88 frames. ], batch size: 59, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:41:10,258 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:41:50,575 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3831, 2.2741, 1.5290, 1.6845, 1.9936, 1.0211, 1.2224, 1.6816], + device='cuda:0'), covar=tensor([0.0771, 0.0441, 0.0965, 0.0534, 0.0370, 0.1095, 0.0659, 0.0413], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0245, 0.0321, 0.0245, 0.0221, 0.0308, 0.0284, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:41:57,020 INFO [train.py:903] (0/4) Epoch 4, batch 2300, loss[loss=0.3809, simple_loss=0.4156, pruned_loss=0.1731, over 19735.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3576, pruned_loss=0.1254, over 3807093.59 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:42:09,271 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 02:42:31,173 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22812.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:42:47,875 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+02 7.223e+02 9.387e+02 1.200e+03 1.860e+03, threshold=1.877e+03, percent-clipped=8.0 +2023-04-01 02:42:56,332 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.89 vs. limit=5.0 +2023-04-01 02:42:56,885 INFO [train.py:903] (0/4) Epoch 4, batch 2350, loss[loss=0.311, simple_loss=0.3661, pruned_loss=0.1279, over 19569.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.3591, pruned_loss=0.126, over 3810634.02 frames. ], batch size: 61, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:43:06,743 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:31,035 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:32,278 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:38,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 02:43:54,378 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 02:43:57,673 INFO [train.py:903] (0/4) Epoch 4, batch 2400, loss[loss=0.4557, simple_loss=0.4562, pruned_loss=0.2276, over 12795.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3585, pruned_loss=0.1258, over 3806463.45 frames. ], batch size: 135, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:44:03,301 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:27,618 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:38,976 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0002, 5.3119, 2.6931, 4.6513, 1.3758, 5.2829, 5.2206, 5.3605], + device='cuda:0'), covar=tensor([0.0462, 0.1110, 0.2239, 0.0593, 0.4376, 0.0678, 0.0628, 0.0744], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0297, 0.0353, 0.0275, 0.0350, 0.0296, 0.0259, 0.0298], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 02:44:49,611 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+02 7.425e+02 9.466e+02 1.182e+03 3.064e+03, threshold=1.893e+03, percent-clipped=2.0 +2023-04-01 02:44:58,904 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:59,530 INFO [train.py:903] (0/4) Epoch 4, batch 2450, loss[loss=0.3638, simple_loss=0.3977, pruned_loss=0.1649, over 19655.00 frames. ], tot_loss[loss=0.3037, simple_loss=0.3573, pruned_loss=0.1251, over 3812642.06 frames. ], batch size: 53, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:45:14,042 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:40,891 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:44,395 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:57,738 INFO [train.py:903] (0/4) Epoch 4, batch 2500, loss[loss=0.3713, simple_loss=0.4102, pruned_loss=0.1662, over 19675.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3583, pruned_loss=0.1264, over 3820946.56 frames. ], batch size: 58, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:46:09,304 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22995.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:16,257 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:46,585 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9893, 1.4490, 1.3976, 2.0866, 1.6363, 2.1178, 2.2532, 1.9836], + device='cuda:0'), covar=tensor([0.0691, 0.0990, 0.1058, 0.0853, 0.0992, 0.0683, 0.0746, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0262, 0.0258, 0.0292, 0.0296, 0.0243, 0.0256, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:46:46,610 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0394, 1.4605, 1.5239, 1.9867, 1.8188, 1.7421, 1.8473, 1.9000], + device='cuda:0'), covar=tensor([0.0699, 0.1568, 0.1130, 0.0809, 0.1047, 0.0473, 0.0783, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0365, 0.0280, 0.0247, 0.0313, 0.0256, 0.0270, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:46:48,541 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 7.308e+02 8.619e+02 1.083e+03 2.930e+03, threshold=1.724e+03, percent-clipped=3.0 +2023-04-01 02:46:57,730 INFO [train.py:903] (0/4) Epoch 4, batch 2550, loss[loss=0.2929, simple_loss=0.3583, pruned_loss=0.1137, over 19789.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.3596, pruned_loss=0.1265, over 3824909.65 frames. ], batch size: 56, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:47:18,940 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-04-01 02:47:20,074 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 02:47:43,520 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-04-01 02:47:49,704 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 02:47:57,382 INFO [train.py:903] (0/4) Epoch 4, batch 2600, loss[loss=0.2917, simple_loss=0.3406, pruned_loss=0.1213, over 19791.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3596, pruned_loss=0.1266, over 3829727.75 frames. ], batch size: 49, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:48:16,538 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-01 02:48:20,333 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5699, 1.0714, 1.2360, 1.7793, 1.3948, 1.6431, 1.9093, 1.5827], + device='cuda:0'), covar=tensor([0.0843, 0.1205, 0.1229, 0.1009, 0.1077, 0.0797, 0.0856, 0.0737], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0265, 0.0259, 0.0294, 0.0300, 0.0244, 0.0262, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 02:48:29,108 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7036, 1.6948, 1.8151, 1.9831, 4.1965, 1.2429, 2.2570, 4.3147], + device='cuda:0'), covar=tensor([0.0238, 0.2251, 0.2367, 0.1465, 0.0441, 0.2230, 0.1248, 0.0289], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0304, 0.0302, 0.0276, 0.0290, 0.0315, 0.0279, 0.0288], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 02:48:33,813 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23116.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:36,188 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:42,577 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.04 vs. limit=5.0 +2023-04-01 02:48:46,631 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:48,301 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+02 6.946e+02 8.555e+02 1.074e+03 2.756e+03, threshold=1.711e+03, percent-clipped=6.0 +2023-04-01 02:48:58,041 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 02:48:58,589 INFO [train.py:903] (0/4) Epoch 4, batch 2650, loss[loss=0.2745, simple_loss=0.3318, pruned_loss=0.1086, over 19386.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3589, pruned_loss=0.1259, over 3839529.30 frames. ], batch size: 48, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:49:08,302 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:17,081 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 02:49:21,041 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.95 vs. limit=5.0 +2023-04-01 02:49:22,933 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:43,849 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-01 02:49:58,047 INFO [train.py:903] (0/4) Epoch 4, batch 2700, loss[loss=0.2949, simple_loss=0.357, pruned_loss=0.1164, over 19683.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3586, pruned_loss=0.1254, over 3840806.07 frames. ], batch size: 59, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:00,479 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23187.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:50:48,019 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.628e+02 7.002e+02 8.947e+02 1.091e+03 2.361e+03, threshold=1.789e+03, percent-clipped=7.0 +2023-04-01 02:50:57,145 INFO [train.py:903] (0/4) Epoch 4, batch 2750, loss[loss=0.2727, simple_loss=0.3431, pruned_loss=0.1012, over 19793.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.359, pruned_loss=0.1256, over 3838940.20 frames. ], batch size: 56, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:57,412 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:11,551 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:24,729 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2977, 1.1991, 1.8896, 1.3962, 2.3891, 2.2929, 2.6460, 0.9733], + device='cuda:0'), covar=tensor([0.1566, 0.2697, 0.1360, 0.1361, 0.1020, 0.1199, 0.1130, 0.2366], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0481, 0.0448, 0.0397, 0.0521, 0.0424, 0.0602, 0.0429], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 02:51:33,819 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-01 02:51:39,835 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8830, 4.2802, 4.5873, 4.5300, 1.5092, 4.1630, 3.7780, 4.1883], + device='cuda:0'), covar=tensor([0.0687, 0.0468, 0.0408, 0.0326, 0.3578, 0.0256, 0.0376, 0.0891], + device='cuda:0'), in_proj_covar=tensor([0.0435, 0.0391, 0.0525, 0.0411, 0.0531, 0.0298, 0.0344, 0.0498], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 02:51:41,053 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:56,446 INFO [train.py:903] (0/4) Epoch 4, batch 2800, loss[loss=0.249, simple_loss=0.3078, pruned_loss=0.09514, over 19284.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3575, pruned_loss=0.1245, over 3838758.42 frames. ], batch size: 44, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:52:17,025 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:52:45,199 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+02 7.909e+02 1.044e+03 1.347e+03 2.323e+03, threshold=2.087e+03, percent-clipped=7.0 +2023-04-01 02:52:56,802 INFO [train.py:903] (0/4) Epoch 4, batch 2850, loss[loss=0.3148, simple_loss=0.3552, pruned_loss=0.1372, over 19298.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3578, pruned_loss=0.125, over 3834127.51 frames. ], batch size: 44, lr: 1.99e-02, grad_scale: 8.0 +2023-04-01 02:53:41,821 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23372.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:53:42,859 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4640, 3.8580, 4.0375, 4.0077, 1.4282, 3.6957, 3.1949, 3.5738], + device='cuda:0'), covar=tensor([0.0815, 0.0564, 0.0554, 0.0401, 0.3718, 0.0361, 0.0519, 0.1164], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0395, 0.0530, 0.0415, 0.0529, 0.0297, 0.0349, 0.0506], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 02:53:56,271 INFO [train.py:903] (0/4) Epoch 4, batch 2900, loss[loss=0.2593, simple_loss=0.3243, pruned_loss=0.09722, over 19830.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3567, pruned_loss=0.1242, over 3835338.63 frames. ], batch size: 49, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:53:56,292 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 02:54:10,032 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:54:45,629 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 7.852e+02 1.023e+03 1.284e+03 2.319e+03, threshold=2.047e+03, percent-clipped=2.0 +2023-04-01 02:54:53,634 INFO [train.py:903] (0/4) Epoch 4, batch 2950, loss[loss=0.2823, simple_loss=0.3425, pruned_loss=0.111, over 19758.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3581, pruned_loss=0.1252, over 3832371.70 frames. ], batch size: 54, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:35,105 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:55:52,223 INFO [train.py:903] (0/4) Epoch 4, batch 3000, loss[loss=0.2851, simple_loss=0.3496, pruned_loss=0.1103, over 19590.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3567, pruned_loss=0.1239, over 3830742.31 frames. ], batch size: 57, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:52,224 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 02:56:05,135 INFO [train.py:937] (0/4) Epoch 4, validation: loss=0.2145, simple_loss=0.3118, pruned_loss=0.05862, over 944034.00 frames. +2023-04-01 02:56:05,136 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 02:56:09,790 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 02:56:32,363 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,208 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23527.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,886 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+02 6.234e+02 7.977e+02 1.046e+03 2.333e+03, threshold=1.595e+03, percent-clipped=2.0 +2023-04-01 02:57:05,046 INFO [train.py:903] (0/4) Epoch 4, batch 3050, loss[loss=0.2401, simple_loss=0.3042, pruned_loss=0.08794, over 19757.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.3582, pruned_loss=0.1245, over 3826443.00 frames. ], batch size: 46, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:57:26,963 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23552.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:28,443 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 02:57:33,673 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:53,409 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.01 vs. limit=5.0 +2023-04-01 02:57:58,283 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:04,138 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:06,132 INFO [train.py:903] (0/4) Epoch 4, batch 3100, loss[loss=0.2349, simple_loss=0.3002, pruned_loss=0.08484, over 16407.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3566, pruned_loss=0.1233, over 3815242.64 frames. ], batch size: 36, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:58:06,485 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:13,277 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:55,942 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.915e+02 8.546e+02 1.092e+03 2.878e+03, threshold=1.709e+03, percent-clipped=7.0 +2023-04-01 02:59:03,925 INFO [train.py:903] (0/4) Epoch 4, batch 3150, loss[loss=0.3385, simple_loss=0.4006, pruned_loss=0.1382, over 19371.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3577, pruned_loss=0.1245, over 3801547.81 frames. ], batch size: 70, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:59:28,011 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 03:00:02,537 INFO [train.py:903] (0/4) Epoch 4, batch 3200, loss[loss=0.2542, simple_loss=0.3149, pruned_loss=0.09678, over 19405.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3587, pruned_loss=0.1252, over 3817771.11 frames. ], batch size: 48, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:00:11,296 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 03:00:13,184 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23694.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:29,404 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:53,668 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 7.379e+02 9.197e+02 1.143e+03 1.957e+03, threshold=1.839e+03, percent-clipped=5.0 +2023-04-01 03:01:02,105 INFO [train.py:903] (0/4) Epoch 4, batch 3250, loss[loss=0.3393, simple_loss=0.3826, pruned_loss=0.148, over 17668.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3593, pruned_loss=0.126, over 3803560.20 frames. ], batch size: 101, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:01,901 INFO [train.py:903] (0/4) Epoch 4, batch 3300, loss[loss=0.3276, simple_loss=0.3747, pruned_loss=0.1402, over 19663.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3575, pruned_loss=0.1251, over 3814808.89 frames. ], batch size: 58, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:04,049 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 03:02:54,104 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 7.772e+02 9.614e+02 1.210e+03 2.492e+03, threshold=1.923e+03, percent-clipped=5.0 +2023-04-01 03:03:02,120 INFO [train.py:903] (0/4) Epoch 4, batch 3350, loss[loss=0.2692, simple_loss=0.3289, pruned_loss=0.1048, over 19628.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.357, pruned_loss=0.1243, over 3811970.61 frames. ], batch size: 50, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:03:09,291 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:19,200 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:40,431 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:41,830 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 03:04:01,843 INFO [train.py:903] (0/4) Epoch 4, batch 3400, loss[loss=0.3049, simple_loss=0.3564, pruned_loss=0.1267, over 19579.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3552, pruned_loss=0.1219, over 3818485.13 frames. ], batch size: 52, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:04:42,896 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8304, 1.8684, 2.2066, 2.9748, 2.1741, 2.7937, 2.2382, 2.7349], + device='cuda:0'), covar=tensor([0.0634, 0.1705, 0.1075, 0.0721, 0.1141, 0.0347, 0.0845, 0.0501], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0373, 0.0280, 0.0245, 0.0312, 0.0259, 0.0271, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:04:53,666 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.081e+02 7.350e+02 9.318e+02 1.202e+03 2.145e+03, threshold=1.864e+03, percent-clipped=3.0 +2023-04-01 03:05:01,728 INFO [train.py:903] (0/4) Epoch 4, batch 3450, loss[loss=0.3137, simple_loss=0.3703, pruned_loss=0.1286, over 19329.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.356, pruned_loss=0.1226, over 3814321.49 frames. ], batch size: 70, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:05:01,755 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 03:05:22,668 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:36,148 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:39,417 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:50,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:04,982 INFO [train.py:903] (0/4) Epoch 4, batch 3500, loss[loss=0.3126, simple_loss=0.363, pruned_loss=0.131, over 18805.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3574, pruned_loss=0.124, over 3815799.85 frames. ], batch size: 74, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:06:07,779 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:22,278 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-24000.pt +2023-04-01 03:06:58,145 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+02 7.196e+02 8.612e+02 1.120e+03 2.630e+03, threshold=1.722e+03, percent-clipped=5.0 +2023-04-01 03:07:06,277 INFO [train.py:903] (0/4) Epoch 4, batch 3550, loss[loss=0.3369, simple_loss=0.3859, pruned_loss=0.144, over 19572.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3563, pruned_loss=0.1233, over 3829407.34 frames. ], batch size: 61, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:07:28,425 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:08:05,265 INFO [train.py:903] (0/4) Epoch 4, batch 3600, loss[loss=0.3536, simple_loss=0.4094, pruned_loss=0.1489, over 19732.00 frames. ], tot_loss[loss=0.3037, simple_loss=0.3576, pruned_loss=0.1249, over 3828935.01 frames. ], batch size: 63, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:08:56,915 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+02 7.148e+02 8.733e+02 1.077e+03 2.339e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 03:09:04,849 INFO [train.py:903] (0/4) Epoch 4, batch 3650, loss[loss=0.337, simple_loss=0.384, pruned_loss=0.145, over 18748.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3567, pruned_loss=0.1242, over 3837657.12 frames. ], batch size: 74, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:10:05,500 INFO [train.py:903] (0/4) Epoch 4, batch 3700, loss[loss=0.2673, simple_loss=0.333, pruned_loss=0.1008, over 19686.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3548, pruned_loss=0.1231, over 3831429.24 frames. ], batch size: 53, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:10:48,446 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:10:58,949 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.207e+02 9.640e+02 1.134e+03 2.323e+03, threshold=1.928e+03, percent-clipped=6.0 +2023-04-01 03:11:05,386 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6108, 1.6106, 1.7715, 2.3868, 4.1035, 1.3995, 2.1459, 4.2157], + device='cuda:0'), covar=tensor([0.0307, 0.2455, 0.2311, 0.1252, 0.0442, 0.2169, 0.1348, 0.0278], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0308, 0.0300, 0.0278, 0.0293, 0.0314, 0.0282, 0.0288], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:11:07,279 INFO [train.py:903] (0/4) Epoch 4, batch 3750, loss[loss=0.3058, simple_loss=0.3704, pruned_loss=0.1206, over 19562.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.3551, pruned_loss=0.123, over 3827834.00 frames. ], batch size: 61, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:11:20,323 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:11:20,612 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 03:12:07,328 INFO [train.py:903] (0/4) Epoch 4, batch 3800, loss[loss=0.2695, simple_loss=0.3206, pruned_loss=0.1092, over 19742.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3541, pruned_loss=0.1223, over 3826624.73 frames. ], batch size: 45, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:12:38,483 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 03:13:00,196 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.239e+02 7.507e+02 9.076e+02 1.248e+03 3.254e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-04-01 03:13:07,202 INFO [train.py:903] (0/4) Epoch 4, batch 3850, loss[loss=0.2187, simple_loss=0.2845, pruned_loss=0.07643, over 19738.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3535, pruned_loss=0.1223, over 3835676.77 frames. ], batch size: 46, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:13:12,285 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 03:14:06,613 INFO [train.py:903] (0/4) Epoch 4, batch 3900, loss[loss=0.3028, simple_loss=0.3679, pruned_loss=0.1189, over 19507.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3543, pruned_loss=0.1225, over 3829780.51 frames. ], batch size: 64, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:14:25,899 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:14:28,371 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1344, 1.2295, 1.0171, 0.9448, 0.8915, 1.0973, 0.0809, 0.4408], + device='cuda:0'), covar=tensor([0.0271, 0.0235, 0.0159, 0.0187, 0.0525, 0.0194, 0.0435, 0.0394], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0271, 0.0268, 0.0294, 0.0357, 0.0286, 0.0276, 0.0286], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 03:14:46,519 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1862, 2.0492, 1.5556, 1.4060, 1.9952, 1.0198, 1.0778, 1.6723], + device='cuda:0'), covar=tensor([0.0673, 0.0377, 0.0704, 0.0495, 0.0339, 0.0931, 0.0576, 0.0348], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0249, 0.0306, 0.0240, 0.0216, 0.0308, 0.0280, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:15:00,797 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+02 8.238e+02 9.729e+02 1.230e+03 4.971e+03, threshold=1.946e+03, percent-clipped=9.0 +2023-04-01 03:15:09,421 INFO [train.py:903] (0/4) Epoch 4, batch 3950, loss[loss=0.3706, simple_loss=0.401, pruned_loss=0.1701, over 13081.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3543, pruned_loss=0.122, over 3820031.08 frames. ], batch size: 136, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:15:17,167 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 03:16:10,702 INFO [train.py:903] (0/4) Epoch 4, batch 4000, loss[loss=0.3239, simple_loss=0.3748, pruned_loss=0.1365, over 19298.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3534, pruned_loss=0.121, over 3823523.10 frames. ], batch size: 66, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:16:44,991 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24514.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:16:58,863 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 03:17:03,421 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.288e+02 6.850e+02 8.525e+02 1.041e+03 2.187e+03, threshold=1.705e+03, percent-clipped=1.0 +2023-04-01 03:17:09,918 INFO [train.py:903] (0/4) Epoch 4, batch 4050, loss[loss=0.3466, simple_loss=0.3902, pruned_loss=0.1515, over 19682.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3554, pruned_loss=0.1224, over 3828205.45 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:17:47,517 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24565.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:18:10,053 INFO [train.py:903] (0/4) Epoch 4, batch 4100, loss[loss=0.2862, simple_loss=0.3483, pruned_loss=0.1121, over 19677.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3542, pruned_loss=0.1217, over 3831769.32 frames. ], batch size: 55, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:18:12,616 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:18:49,640 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 03:19:04,403 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+02 6.813e+02 8.989e+02 1.047e+03 2.179e+03, threshold=1.798e+03, percent-clipped=2.0 +2023-04-01 03:19:12,691 INFO [train.py:903] (0/4) Epoch 4, batch 4150, loss[loss=0.2689, simple_loss=0.3426, pruned_loss=0.09759, over 19532.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3539, pruned_loss=0.1217, over 3831720.21 frames. ], batch size: 54, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:13,435 INFO [train.py:903] (0/4) Epoch 4, batch 4200, loss[loss=0.2321, simple_loss=0.2955, pruned_loss=0.08437, over 19717.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3536, pruned_loss=0.1205, over 3844835.35 frames. ], batch size: 45, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:19,920 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 03:21:05,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 7.220e+02 8.850e+02 1.090e+03 2.101e+03, threshold=1.770e+03, percent-clipped=3.0 +2023-04-01 03:21:12,813 INFO [train.py:903] (0/4) Epoch 4, batch 4250, loss[loss=0.2893, simple_loss=0.3502, pruned_loss=0.1142, over 19795.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3543, pruned_loss=0.1214, over 3840376.80 frames. ], batch size: 56, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:21:29,776 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 03:21:41,558 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 03:21:56,470 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:09,998 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2354, 2.1052, 1.7407, 1.6685, 1.4952, 1.7010, 0.4089, 1.0995], + device='cuda:0'), covar=tensor([0.0247, 0.0239, 0.0166, 0.0236, 0.0479, 0.0301, 0.0463, 0.0409], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0274, 0.0265, 0.0291, 0.0354, 0.0278, 0.0269, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 03:22:10,847 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24783.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:12,643 INFO [train.py:903] (0/4) Epoch 4, batch 4300, loss[loss=0.3185, simple_loss=0.3704, pruned_loss=0.1333, over 19139.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3549, pruned_loss=0.1218, over 3824646.93 frames. ], batch size: 69, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:22:26,914 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:58,045 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 03:23:06,670 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.009e+02 7.093e+02 8.869e+02 1.163e+03 2.104e+03, threshold=1.774e+03, percent-clipped=1.0 +2023-04-01 03:23:08,973 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 03:23:14,538 INFO [train.py:903] (0/4) Epoch 4, batch 4350, loss[loss=0.3109, simple_loss=0.3624, pruned_loss=0.1297, over 19733.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3545, pruned_loss=0.1218, over 3831777.76 frames. ], batch size: 51, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:23:52,196 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:05,092 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2939, 3.8890, 2.5350, 3.6146, 1.0095, 3.5153, 3.3832, 3.6578], + device='cuda:0'), covar=tensor([0.0639, 0.1145, 0.1729, 0.0689, 0.3894, 0.0904, 0.0700, 0.0846], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0302, 0.0356, 0.0280, 0.0356, 0.0306, 0.0266, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 03:24:15,073 INFO [train.py:903] (0/4) Epoch 4, batch 4400, loss[loss=0.3399, simple_loss=0.3886, pruned_loss=0.1456, over 17642.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3539, pruned_loss=0.1212, over 3827992.48 frames. ], batch size: 101, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:24:40,897 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 03:24:44,302 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:50,759 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 03:25:09,651 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.331e+02 8.078e+02 9.889e+02 1.280e+03 3.768e+03, threshold=1.978e+03, percent-clipped=10.0 +2023-04-01 03:25:12,010 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:25:16,497 INFO [train.py:903] (0/4) Epoch 4, batch 4450, loss[loss=0.3351, simple_loss=0.386, pruned_loss=0.1421, over 19494.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3543, pruned_loss=0.1214, over 3830332.44 frames. ], batch size: 64, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:17,090 INFO [train.py:903] (0/4) Epoch 4, batch 4500, loss[loss=0.3064, simple_loss=0.369, pruned_loss=0.1219, over 19662.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3546, pruned_loss=0.1214, over 3839765.16 frames. ], batch size: 60, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:52,422 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:04,622 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:11,028 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.968e+02 6.473e+02 7.865e+02 1.057e+03 2.211e+03, threshold=1.573e+03, percent-clipped=1.0 +2023-04-01 03:27:16,558 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9238, 0.8168, 0.7761, 1.0799, 0.8249, 0.9418, 1.0433, 0.8972], + device='cuda:0'), covar=tensor([0.0636, 0.0903, 0.0915, 0.0620, 0.0750, 0.0726, 0.0723, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0269, 0.0255, 0.0293, 0.0293, 0.0249, 0.0256, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 03:27:18,546 INFO [train.py:903] (0/4) Epoch 4, batch 4550, loss[loss=0.2696, simple_loss=0.3366, pruned_loss=0.1013, over 19854.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.3533, pruned_loss=0.1205, over 3820960.01 frames. ], batch size: 52, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:27:27,109 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 03:27:31,943 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:50,578 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 03:28:18,971 INFO [train.py:903] (0/4) Epoch 4, batch 4600, loss[loss=0.2973, simple_loss=0.3595, pruned_loss=0.1176, over 19687.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3556, pruned_loss=0.1223, over 3830800.19 frames. ], batch size: 59, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:29:10,640 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:29:12,893 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+02 7.418e+02 9.211e+02 1.176e+03 2.853e+03, threshold=1.842e+03, percent-clipped=7.0 +2023-04-01 03:29:20,333 INFO [train.py:903] (0/4) Epoch 4, batch 4650, loss[loss=0.3381, simple_loss=0.3559, pruned_loss=0.1602, over 19769.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3555, pruned_loss=0.1222, over 3820687.13 frames. ], batch size: 47, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:29:37,195 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 03:29:37,785 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 03:29:46,695 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 03:30:19,351 INFO [train.py:903] (0/4) Epoch 4, batch 4700, loss[loss=0.2698, simple_loss=0.3293, pruned_loss=0.1051, over 19411.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3564, pruned_loss=0.1238, over 3818511.98 frames. ], batch size: 48, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:30:42,836 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 03:30:50,627 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:13,778 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+02 7.587e+02 9.394e+02 1.259e+03 3.233e+03, threshold=1.879e+03, percent-clipped=11.0 +2023-04-01 03:31:21,429 INFO [train.py:903] (0/4) Epoch 4, batch 4750, loss[loss=0.3316, simple_loss=0.3908, pruned_loss=0.1362, over 19684.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3574, pruned_loss=0.1236, over 3825177.37 frames. ], batch size: 60, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:31:30,697 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25242.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:15,372 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:21,436 INFO [train.py:903] (0/4) Epoch 4, batch 4800, loss[loss=0.2885, simple_loss=0.338, pruned_loss=0.1195, over 19580.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3556, pruned_loss=0.1226, over 3838612.22 frames. ], batch size: 52, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:32:41,259 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:44,297 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:09,536 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:11,901 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:13,790 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+02 7.095e+02 8.715e+02 1.261e+03 2.828e+03, threshold=1.743e+03, percent-clipped=4.0 +2023-04-01 03:33:21,568 INFO [train.py:903] (0/4) Epoch 4, batch 4850, loss[loss=0.3444, simple_loss=0.3956, pruned_loss=0.1466, over 19663.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.357, pruned_loss=0.1233, over 3821961.51 frames. ], batch size: 58, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:33:22,957 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8355, 1.8217, 2.2125, 2.8752, 1.8887, 2.7791, 2.8568, 2.5760], + device='cuda:0'), covar=tensor([0.0682, 0.1165, 0.1016, 0.1091, 0.1322, 0.0770, 0.0927, 0.0727], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0263, 0.0253, 0.0293, 0.0289, 0.0247, 0.0255, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 03:33:45,829 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 03:33:48,272 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:34:04,841 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 03:34:11,320 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 03:34:12,497 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 03:34:21,580 INFO [train.py:903] (0/4) Epoch 4, batch 4900, loss[loss=0.2573, simple_loss=0.3107, pruned_loss=0.102, over 19807.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3556, pruned_loss=0.1225, over 3825889.90 frames. ], batch size: 48, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:34:21,593 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 03:34:41,692 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 03:35:16,120 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.171e+02 7.094e+02 8.660e+02 1.069e+03 1.655e+03, threshold=1.732e+03, percent-clipped=0.0 +2023-04-01 03:35:23,612 INFO [train.py:903] (0/4) Epoch 4, batch 4950, loss[loss=0.2975, simple_loss=0.3642, pruned_loss=0.1154, over 19369.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3556, pruned_loss=0.1224, over 3823772.25 frames. ], batch size: 66, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:35:37,177 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25446.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:35:41,273 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 03:36:04,439 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 03:36:08,196 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:24,198 INFO [train.py:903] (0/4) Epoch 4, batch 5000, loss[loss=0.2931, simple_loss=0.353, pruned_loss=0.1166, over 19670.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3555, pruned_loss=0.1223, over 3825799.46 frames. ], batch size: 55, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:36:33,155 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 03:36:40,153 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:44,426 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 03:36:44,672 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8036, 1.2298, 1.4299, 1.6420, 2.4689, 1.2919, 1.7398, 2.4122], + device='cuda:0'), covar=tensor([0.0467, 0.2387, 0.2211, 0.1332, 0.0584, 0.1868, 0.1338, 0.0565], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0306, 0.0306, 0.0280, 0.0297, 0.0316, 0.0283, 0.0290], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:36:50,730 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 03:37:10,957 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25523.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:37:17,567 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.823e+02 8.824e+02 1.078e+03 2.588e+03, threshold=1.765e+03, percent-clipped=9.0 +2023-04-01 03:37:24,345 INFO [train.py:903] (0/4) Epoch 4, batch 5050, loss[loss=0.3066, simple_loss=0.3528, pruned_loss=0.1302, over 19816.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3554, pruned_loss=0.1221, over 3827848.07 frames. ], batch size: 49, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:02,129 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 03:38:21,779 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:38:25,842 INFO [train.py:903] (0/4) Epoch 4, batch 5100, loss[loss=0.404, simple_loss=0.43, pruned_loss=0.189, over 13835.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3547, pruned_loss=0.1209, over 3827156.78 frames. ], batch size: 135, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:33,129 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.85 vs. limit=5.0 +2023-04-01 03:38:36,820 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 03:38:40,932 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 03:38:44,399 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 03:38:52,400 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:38:55,771 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3508, 1.1924, 1.4616, 1.2485, 2.6503, 3.4585, 3.4160, 3.7045], + device='cuda:0'), covar=tensor([0.1378, 0.2757, 0.2790, 0.1866, 0.0429, 0.0124, 0.0201, 0.0104], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0278, 0.0317, 0.0256, 0.0194, 0.0113, 0.0202, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 03:39:01,138 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7621, 1.4835, 1.5995, 1.7468, 3.2366, 1.0886, 1.9970, 3.3484], + device='cuda:0'), covar=tensor([0.0296, 0.2290, 0.2218, 0.1422, 0.0485, 0.2300, 0.1281, 0.0383], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0305, 0.0303, 0.0278, 0.0299, 0.0313, 0.0281, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:39:19,438 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.903e+02 6.354e+02 8.688e+02 1.168e+03 2.387e+03, threshold=1.738e+03, percent-clipped=4.0 +2023-04-01 03:39:27,222 INFO [train.py:903] (0/4) Epoch 4, batch 5150, loss[loss=0.2788, simple_loss=0.3459, pruned_loss=0.1058, over 18752.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3557, pruned_loss=0.1213, over 3824835.92 frames. ], batch size: 74, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:39:39,295 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 03:40:12,881 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 03:40:28,104 INFO [train.py:903] (0/4) Epoch 4, batch 5200, loss[loss=0.293, simple_loss=0.3493, pruned_loss=0.1184, over 19598.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.356, pruned_loss=0.1221, over 3829019.86 frames. ], batch size: 52, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:40:42,782 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 03:41:21,034 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:41:21,774 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 7.304e+02 9.145e+02 1.165e+03 2.884e+03, threshold=1.829e+03, percent-clipped=6.0 +2023-04-01 03:41:25,406 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 03:41:28,709 INFO [train.py:903] (0/4) Epoch 4, batch 5250, loss[loss=0.2993, simple_loss=0.3678, pruned_loss=0.1154, over 19542.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3552, pruned_loss=0.121, over 3823943.11 frames. ], batch size: 54, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:41:51,406 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:42:30,030 INFO [train.py:903] (0/4) Epoch 4, batch 5300, loss[loss=0.2442, simple_loss=0.3064, pruned_loss=0.09095, over 19745.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3547, pruned_loss=0.1206, over 3827317.84 frames. ], batch size: 45, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:42:35,917 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25790.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:42:49,035 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 03:43:11,855 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 03:43:23,319 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 7.857e+02 9.756e+02 1.201e+03 3.803e+03, threshold=1.951e+03, percent-clipped=8.0 +2023-04-01 03:43:31,863 INFO [train.py:903] (0/4) Epoch 4, batch 5350, loss[loss=0.275, simple_loss=0.3213, pruned_loss=0.1144, over 19024.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3543, pruned_loss=0.1211, over 3839357.51 frames. ], batch size: 42, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:44:04,171 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 03:44:21,907 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25876.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:44:32,189 INFO [train.py:903] (0/4) Epoch 4, batch 5400, loss[loss=0.3103, simple_loss=0.3658, pruned_loss=0.1274, over 19529.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3553, pruned_loss=0.1221, over 3826137.61 frames. ], batch size: 54, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:44:50,041 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5759, 1.3078, 1.2681, 1.8895, 1.5293, 1.8802, 2.0784, 1.6835], + device='cuda:0'), covar=tensor([0.0890, 0.1059, 0.1179, 0.1056, 0.1128, 0.0746, 0.0967, 0.0731], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0260, 0.0251, 0.0284, 0.0287, 0.0242, 0.0250, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 03:44:56,707 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:45:13,667 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 03:45:22,512 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8523, 1.9339, 1.8616, 2.7736, 1.8946, 2.5038, 2.4745, 1.6785], + device='cuda:0'), covar=tensor([0.1783, 0.1355, 0.0822, 0.0784, 0.1552, 0.0538, 0.1426, 0.1409], + device='cuda:0'), in_proj_covar=tensor([0.0574, 0.0553, 0.0520, 0.0716, 0.0623, 0.0476, 0.0624, 0.0535], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 03:45:26,555 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.180e+02 6.939e+02 8.636e+02 1.056e+03 2.577e+03, threshold=1.727e+03, percent-clipped=2.0 +2023-04-01 03:45:33,333 INFO [train.py:903] (0/4) Epoch 4, batch 5450, loss[loss=0.3229, simple_loss=0.3739, pruned_loss=0.136, over 19723.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3542, pruned_loss=0.1211, over 3844527.22 frames. ], batch size: 63, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:46:32,923 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 03:46:34,627 INFO [train.py:903] (0/4) Epoch 4, batch 5500, loss[loss=0.2949, simple_loss=0.3628, pruned_loss=0.1135, over 19708.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3538, pruned_loss=0.1207, over 3832183.37 frames. ], batch size: 63, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:46:53,477 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-26000.pt +2023-04-01 03:46:58,090 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 03:47:31,758 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.568e+02 9.013e+02 1.115e+03 1.816e+03, threshold=1.803e+03, percent-clipped=1.0 +2023-04-01 03:47:37,475 INFO [train.py:903] (0/4) Epoch 4, batch 5550, loss[loss=0.2516, simple_loss=0.3276, pruned_loss=0.08782, over 19775.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3528, pruned_loss=0.1197, over 3832404.31 frames. ], batch size: 54, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:47:45,444 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 03:47:46,216 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-04-01 03:48:33,399 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 03:48:38,944 INFO [train.py:903] (0/4) Epoch 4, batch 5600, loss[loss=0.3239, simple_loss=0.3742, pruned_loss=0.1369, over 17327.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3535, pruned_loss=0.121, over 3817834.60 frames. ], batch size: 101, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:48:42,550 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4138, 1.9880, 1.8411, 2.6436, 2.0748, 2.6034, 2.8556, 2.8249], + device='cuda:0'), covar=tensor([0.0758, 0.1033, 0.1045, 0.0958, 0.1007, 0.0713, 0.0920, 0.0572], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0258, 0.0250, 0.0285, 0.0285, 0.0238, 0.0251, 0.0233], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 03:48:45,930 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5264, 1.5352, 2.1257, 2.4375, 2.2710, 2.3972, 2.0731, 2.3934], + device='cuda:0'), covar=tensor([0.0671, 0.1757, 0.1107, 0.0771, 0.1010, 0.0372, 0.0827, 0.0515], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0366, 0.0284, 0.0242, 0.0307, 0.0255, 0.0270, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:49:06,273 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 03:49:14,931 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4075, 1.2446, 1.0216, 1.2852, 1.1415, 1.2780, 1.0150, 1.2500], + device='cuda:0'), covar=tensor([0.0886, 0.1017, 0.1349, 0.0828, 0.0982, 0.0519, 0.0978, 0.0729], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0366, 0.0286, 0.0242, 0.0307, 0.0255, 0.0271, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 03:49:22,517 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:49:34,176 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+02 7.669e+02 9.359e+02 1.114e+03 3.409e+03, threshold=1.872e+03, percent-clipped=3.0 +2023-04-01 03:49:36,813 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7806, 4.2230, 4.3510, 4.3203, 1.5727, 3.9196, 3.5719, 3.9974], + device='cuda:0'), covar=tensor([0.0821, 0.0518, 0.0474, 0.0378, 0.3916, 0.0333, 0.0473, 0.0913], + device='cuda:0'), in_proj_covar=tensor([0.0460, 0.0408, 0.0539, 0.0438, 0.0537, 0.0315, 0.0357, 0.0510], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 03:49:40,064 INFO [train.py:903] (0/4) Epoch 4, batch 5650, loss[loss=0.2838, simple_loss=0.3358, pruned_loss=0.1159, over 19620.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3526, pruned_loss=0.1205, over 3817420.65 frames. ], batch size: 50, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:12,597 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26161.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:50:25,371 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 03:50:38,348 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4706, 1.5125, 1.4978, 2.0919, 1.4878, 1.7406, 1.8406, 1.3681], + device='cuda:0'), covar=tensor([0.1430, 0.0988, 0.0662, 0.0504, 0.1089, 0.0477, 0.1197, 0.1118], + device='cuda:0'), in_proj_covar=tensor([0.0577, 0.0556, 0.0517, 0.0711, 0.0622, 0.0475, 0.0629, 0.0536], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 03:50:40,061 INFO [train.py:903] (0/4) Epoch 4, batch 5700, loss[loss=0.3673, simple_loss=0.4038, pruned_loss=0.1654, over 19659.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3528, pruned_loss=0.1208, over 3817185.92 frames. ], batch size: 58, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:42,167 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:50:42,270 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26186.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:51:12,083 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.64 vs. limit=5.0 +2023-04-01 03:51:23,017 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26220.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:51:35,339 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.235e+02 7.760e+02 9.507e+02 1.157e+03 2.773e+03, threshold=1.901e+03, percent-clipped=5.0 +2023-04-01 03:51:39,939 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 03:51:40,944 INFO [train.py:903] (0/4) Epoch 4, batch 5750, loss[loss=0.3454, simple_loss=0.3941, pruned_loss=0.1484, over 17591.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3533, pruned_loss=0.1212, over 3820276.58 frames. ], batch size: 101, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:51:48,679 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 03:51:52,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 03:52:02,105 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 03:52:20,371 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:52:40,302 INFO [train.py:903] (0/4) Epoch 4, batch 5800, loss[loss=0.3612, simple_loss=0.3958, pruned_loss=0.1633, over 13247.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3541, pruned_loss=0.1223, over 3833241.76 frames. ], batch size: 137, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:52:44,287 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 03:53:36,644 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.787e+02 8.542e+02 1.114e+03 2.576e+03, threshold=1.708e+03, percent-clipped=4.0 +2023-04-01 03:53:41,254 INFO [train.py:903] (0/4) Epoch 4, batch 5850, loss[loss=0.303, simple_loss=0.3588, pruned_loss=0.1236, over 19766.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3534, pruned_loss=0.1214, over 3830760.55 frames. ], batch size: 54, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:53:41,584 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26335.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:54:40,799 INFO [train.py:903] (0/4) Epoch 4, batch 5900, loss[loss=0.2841, simple_loss=0.351, pruned_loss=0.1086, over 19688.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3528, pruned_loss=0.1209, over 3820920.44 frames. ], batch size: 60, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:54:41,856 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 03:55:03,873 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 03:55:37,697 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+02 6.673e+02 8.574e+02 1.115e+03 3.080e+03, threshold=1.715e+03, percent-clipped=4.0 +2023-04-01 03:55:39,799 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:55:43,006 INFO [train.py:903] (0/4) Epoch 4, batch 5950, loss[loss=0.286, simple_loss=0.3487, pruned_loss=0.1116, over 19382.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3516, pruned_loss=0.1203, over 3831208.68 frames. ], batch size: 70, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:56:17,508 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:56:43,853 INFO [train.py:903] (0/4) Epoch 4, batch 6000, loss[loss=0.2806, simple_loss=0.3484, pruned_loss=0.1063, over 17078.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3507, pruned_loss=0.1198, over 3820541.46 frames. ], batch size: 101, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:56:43,854 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 03:56:57,354 INFO [train.py:937] (0/4) Epoch 4, validation: loss=0.2103, simple_loss=0.3081, pruned_loss=0.05622, over 944034.00 frames. +2023-04-01 03:56:57,355 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 03:57:04,045 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 03:57:51,996 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:57:52,992 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+02 7.085e+02 8.722e+02 1.145e+03 2.334e+03, threshold=1.744e+03, percent-clipped=4.0 +2023-04-01 03:57:54,583 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1003, 1.1228, 1.4465, 1.2384, 1.9359, 1.7555, 2.0140, 0.5065], + device='cuda:0'), covar=tensor([0.1568, 0.2585, 0.1423, 0.1406, 0.0931, 0.1374, 0.0892, 0.2402], + device='cuda:0'), in_proj_covar=tensor([0.0431, 0.0485, 0.0453, 0.0403, 0.0529, 0.0431, 0.0602, 0.0429], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 03:57:55,656 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4820, 1.2974, 1.4458, 0.9545, 2.6946, 3.2165, 2.9711, 3.3698], + device='cuda:0'), covar=tensor([0.1220, 0.2688, 0.2747, 0.2082, 0.0359, 0.0127, 0.0225, 0.0120], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0282, 0.0316, 0.0258, 0.0196, 0.0114, 0.0201, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 03:57:57,465 INFO [train.py:903] (0/4) Epoch 4, batch 6050, loss[loss=0.2713, simple_loss=0.3281, pruned_loss=0.1073, over 19614.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3514, pruned_loss=0.12, over 3813706.27 frames. ], batch size: 50, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:58:49,947 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:58:57,306 INFO [train.py:903] (0/4) Epoch 4, batch 6100, loss[loss=0.2498, simple_loss=0.3115, pruned_loss=0.09399, over 19468.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.352, pruned_loss=0.1208, over 3796992.65 frames. ], batch size: 49, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:59:04,716 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26591.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:59:29,873 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:59:34,651 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26616.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:59:51,598 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 6.768e+02 7.963e+02 1.032e+03 2.370e+03, threshold=1.593e+03, percent-clipped=5.0 +2023-04-01 03:59:56,256 INFO [train.py:903] (0/4) Epoch 4, batch 6150, loss[loss=0.267, simple_loss=0.3295, pruned_loss=0.1023, over 19862.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3529, pruned_loss=0.1208, over 3795416.12 frames. ], batch size: 52, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:00:08,621 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:00:23,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 04:00:54,796 INFO [train.py:903] (0/4) Epoch 4, batch 6200, loss[loss=0.2996, simple_loss=0.3615, pruned_loss=0.1189, over 19543.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3532, pruned_loss=0.121, over 3810278.98 frames. ], batch size: 56, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:01:45,232 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26727.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:01:51,166 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+02 7.340e+02 8.907e+02 1.200e+03 2.630e+03, threshold=1.781e+03, percent-clipped=5.0 +2023-04-01 04:01:55,635 INFO [train.py:903] (0/4) Epoch 4, batch 6250, loss[loss=0.2551, simple_loss=0.313, pruned_loss=0.09861, over 16908.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3535, pruned_loss=0.1217, over 3800926.31 frames. ], batch size: 37, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:02:24,768 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 04:02:44,566 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:02:55,315 INFO [train.py:903] (0/4) Epoch 4, batch 6300, loss[loss=0.3254, simple_loss=0.382, pruned_loss=0.1344, over 19523.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3536, pruned_loss=0.1212, over 3820141.44 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:03:06,891 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4142, 2.3065, 2.0975, 3.3164, 2.3592, 3.6747, 3.1733, 2.1813], + device='cuda:0'), covar=tensor([0.1703, 0.1298, 0.0740, 0.0900, 0.1607, 0.0406, 0.1161, 0.1143], + device='cuda:0'), in_proj_covar=tensor([0.0584, 0.0567, 0.0531, 0.0731, 0.0632, 0.0483, 0.0636, 0.0539], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:03:49,878 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.898e+02 7.473e+02 9.358e+02 1.215e+03 3.413e+03, threshold=1.872e+03, percent-clipped=4.0 +2023-04-01 04:03:54,564 INFO [train.py:903] (0/4) Epoch 4, batch 6350, loss[loss=0.2715, simple_loss=0.3248, pruned_loss=0.1091, over 18669.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3549, pruned_loss=0.1216, over 3818788.94 frames. ], batch size: 41, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:03:54,968 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:26,060 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:55,432 INFO [train.py:903] (0/4) Epoch 4, batch 6400, loss[loss=0.3205, simple_loss=0.3794, pruned_loss=0.1308, over 19725.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3539, pruned_loss=0.1206, over 3823879.77 frames. ], batch size: 63, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:05:05,504 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26891.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:16,536 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:41,164 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26922.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:05:45,906 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:52,842 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+02 6.991e+02 8.678e+02 1.023e+03 2.915e+03, threshold=1.736e+03, percent-clipped=2.0 +2023-04-01 04:05:58,078 INFO [train.py:903] (0/4) Epoch 4, batch 6450, loss[loss=0.3224, simple_loss=0.3643, pruned_loss=0.1402, over 19588.00 frames. ], tot_loss[loss=0.2952, simple_loss=0.3519, pruned_loss=0.1193, over 3824607.43 frames. ], batch size: 52, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:06:36,874 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:39,715 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 04:06:56,677 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:58,602 INFO [train.py:903] (0/4) Epoch 4, batch 6500, loss[loss=0.3, simple_loss=0.3591, pruned_loss=0.1204, over 19736.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3519, pruned_loss=0.1196, over 3821405.89 frames. ], batch size: 63, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:07:03,006 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 04:07:25,281 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27008.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:07:42,483 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-01 04:07:53,581 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+02 7.867e+02 9.982e+02 1.245e+03 2.621e+03, threshold=1.996e+03, percent-clipped=6.0 +2023-04-01 04:07:57,612 INFO [train.py:903] (0/4) Epoch 4, batch 6550, loss[loss=0.3415, simple_loss=0.387, pruned_loss=0.148, over 19668.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3531, pruned_loss=0.1212, over 3830637.31 frames. ], batch size: 59, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:08:18,787 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4003, 2.1206, 1.5620, 1.5026, 2.0201, 1.0783, 1.1277, 1.6296], + device='cuda:0'), covar=tensor([0.0597, 0.0453, 0.0853, 0.0485, 0.0321, 0.0998, 0.0647, 0.0356], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0255, 0.0314, 0.0236, 0.0213, 0.0308, 0.0284, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 04:08:34,242 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4464, 1.3110, 1.5902, 1.2290, 2.7082, 3.4532, 3.3370, 3.7114], + device='cuda:0'), covar=tensor([0.1353, 0.2775, 0.2685, 0.1930, 0.0379, 0.0154, 0.0186, 0.0096], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0279, 0.0314, 0.0259, 0.0194, 0.0114, 0.0202, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 04:08:57,147 INFO [train.py:903] (0/4) Epoch 4, batch 6600, loss[loss=0.3072, simple_loss=0.3697, pruned_loss=0.1223, over 19601.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3518, pruned_loss=0.1201, over 3827811.62 frames. ], batch size: 57, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:09:48,364 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6050, 1.2855, 1.8946, 1.4465, 2.9408, 4.2356, 4.4123, 4.8713], + device='cuda:0'), covar=tensor([0.1342, 0.2847, 0.2538, 0.1854, 0.0398, 0.0132, 0.0146, 0.0079], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0277, 0.0314, 0.0257, 0.0194, 0.0114, 0.0199, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-04-01 04:09:53,358 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+02 7.635e+02 9.605e+02 1.185e+03 2.942e+03, threshold=1.921e+03, percent-clipped=6.0 +2023-04-01 04:09:58,812 INFO [train.py:903] (0/4) Epoch 4, batch 6650, loss[loss=0.2567, simple_loss=0.3182, pruned_loss=0.09759, over 19613.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3531, pruned_loss=0.1208, over 3822160.13 frames. ], batch size: 50, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:10:13,141 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:27,427 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0174, 1.5680, 1.5290, 2.1529, 1.7559, 1.8859, 1.6443, 1.9639], + device='cuda:0'), covar=tensor([0.0725, 0.1532, 0.1220, 0.0689, 0.1076, 0.0421, 0.0937, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0367, 0.0279, 0.0238, 0.0304, 0.0253, 0.0270, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 04:10:35,294 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:42,750 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:58,824 INFO [train.py:903] (0/4) Epoch 4, batch 6700, loss[loss=0.2831, simple_loss=0.3457, pruned_loss=0.1103, over 19676.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3543, pruned_loss=0.1219, over 3822559.75 frames. ], batch size: 53, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:11:52,644 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.097e+02 7.210e+02 9.176e+02 1.266e+03 4.477e+03, threshold=1.835e+03, percent-clipped=7.0 +2023-04-01 04:11:57,026 INFO [train.py:903] (0/4) Epoch 4, batch 6750, loss[loss=0.2724, simple_loss=0.3342, pruned_loss=0.1053, over 19612.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3555, pruned_loss=0.1225, over 3810126.95 frames. ], batch size: 50, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:12:31,956 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27266.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:12:52,954 INFO [train.py:903] (0/4) Epoch 4, batch 6800, loss[loss=0.3137, simple_loss=0.3741, pruned_loss=0.1266, over 19661.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3553, pruned_loss=0.1225, over 3818227.78 frames. ], batch size: 58, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:13:21,175 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-4.pt +2023-04-01 04:13:37,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 04:13:37,988 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 04:13:40,505 INFO [train.py:903] (0/4) Epoch 5, batch 0, loss[loss=0.3105, simple_loss=0.3681, pruned_loss=0.1265, over 19743.00 frames. ], tot_loss[loss=0.3105, simple_loss=0.3681, pruned_loss=0.1265, over 19743.00 frames. ], batch size: 63, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:13:40,505 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 04:13:52,268 INFO [train.py:937] (0/4) Epoch 5, validation: loss=0.2121, simple_loss=0.3102, pruned_loss=0.05704, over 944034.00 frames. +2023-04-01 04:13:52,269 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 04:13:52,407 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:13:55,669 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5892, 4.2059, 2.5719, 3.8282, 1.2790, 3.9287, 3.7847, 3.9840], + device='cuda:0'), covar=tensor([0.0563, 0.1020, 0.1732, 0.0674, 0.3582, 0.0691, 0.0685, 0.0780], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0303, 0.0352, 0.0280, 0.0347, 0.0293, 0.0263, 0.0297], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 04:14:04,636 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 04:14:13,183 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 04:14:16,053 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.218e+02 7.861e+02 9.859e+02 1.236e+03 2.711e+03, threshold=1.972e+03, percent-clipped=3.0 +2023-04-01 04:14:52,474 INFO [train.py:903] (0/4) Epoch 5, batch 50, loss[loss=0.2489, simple_loss=0.3167, pruned_loss=0.09053, over 19836.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3529, pruned_loss=0.1203, over 869766.43 frames. ], batch size: 52, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:15:15,084 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27381.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:15:22,381 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 04:15:26,076 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 04:15:26,406 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1268, 0.9638, 1.0425, 1.4986, 1.2129, 1.2013, 1.2868, 1.2365], + device='cuda:0'), covar=tensor([0.0965, 0.1256, 0.1234, 0.0719, 0.0854, 0.0946, 0.0920, 0.0851], + device='cuda:0'), in_proj_covar=tensor([0.0238, 0.0254, 0.0250, 0.0288, 0.0280, 0.0233, 0.0247, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 04:15:53,859 INFO [train.py:903] (0/4) Epoch 5, batch 100, loss[loss=0.2809, simple_loss=0.346, pruned_loss=0.1079, over 19626.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3487, pruned_loss=0.1167, over 1523886.11 frames. ], batch size: 61, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:16:05,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 04:16:11,123 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:16:15,199 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.953e+02 8.679e+02 1.081e+03 2.199e+03, threshold=1.736e+03, percent-clipped=1.0 +2023-04-01 04:16:53,736 INFO [train.py:903] (0/4) Epoch 5, batch 150, loss[loss=0.2876, simple_loss=0.3417, pruned_loss=0.1167, over 19719.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3472, pruned_loss=0.116, over 2049724.30 frames. ], batch size: 51, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:52,379 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:17:54,460 INFO [train.py:903] (0/4) Epoch 5, batch 200, loss[loss=0.3076, simple_loss=0.3657, pruned_loss=0.1247, over 19655.00 frames. ], tot_loss[loss=0.2934, simple_loss=0.35, pruned_loss=0.1184, over 2433482.82 frames. ], batch size: 55, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:54,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 04:18:05,479 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.84 vs. limit=5.0 +2023-04-01 04:18:19,387 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+02 6.870e+02 8.382e+02 1.064e+03 2.606e+03, threshold=1.676e+03, percent-clipped=3.0 +2023-04-01 04:18:40,863 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8646, 1.4554, 1.5631, 2.1078, 1.9206, 1.8279, 2.1149, 1.9031], + device='cuda:0'), covar=tensor([0.0560, 0.0847, 0.0855, 0.0718, 0.0734, 0.0713, 0.0693, 0.0578], + device='cuda:0'), in_proj_covar=tensor([0.0238, 0.0254, 0.0250, 0.0289, 0.0283, 0.0234, 0.0245, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 04:18:43,141 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2688, 2.0823, 1.5722, 1.4403, 1.9298, 1.0365, 1.1731, 1.5793], + device='cuda:0'), covar=tensor([0.0643, 0.0487, 0.0859, 0.0463, 0.0372, 0.0994, 0.0555, 0.0364], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0260, 0.0322, 0.0241, 0.0221, 0.0312, 0.0283, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 04:18:56,503 INFO [train.py:903] (0/4) Epoch 5, batch 250, loss[loss=0.289, simple_loss=0.3518, pruned_loss=0.1131, over 19659.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3506, pruned_loss=0.118, over 2749329.30 frames. ], batch size: 58, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:19:51,634 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 04:19:58,944 INFO [train.py:903] (0/4) Epoch 5, batch 300, loss[loss=0.301, simple_loss=0.3693, pruned_loss=0.1163, over 19680.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3504, pruned_loss=0.1181, over 2988419.38 frames. ], batch size: 59, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:20:15,393 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:20:22,963 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+02 6.581e+02 8.607e+02 1.103e+03 1.922e+03, threshold=1.721e+03, percent-clipped=6.0 +2023-04-01 04:20:28,958 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27637.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:20:43,959 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 04:21:01,020 INFO [train.py:903] (0/4) Epoch 5, batch 350, loss[loss=0.3139, simple_loss=0.3806, pruned_loss=0.1235, over 19705.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3491, pruned_loss=0.1175, over 3185611.28 frames. ], batch size: 59, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:21:01,367 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27662.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:21:07,197 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 04:21:11,013 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8634, 1.3655, 0.9820, 1.0107, 1.2194, 0.8618, 0.8606, 1.2238], + device='cuda:0'), covar=tensor([0.0440, 0.0535, 0.0870, 0.0426, 0.0370, 0.0955, 0.0517, 0.0314], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0258, 0.0313, 0.0238, 0.0217, 0.0314, 0.0282, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 04:21:26,386 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:21:58,719 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:22:02,931 INFO [train.py:903] (0/4) Epoch 5, batch 400, loss[loss=0.2833, simple_loss=0.3436, pruned_loss=0.1115, over 19674.00 frames. ], tot_loss[loss=0.2903, simple_loss=0.3479, pruned_loss=0.1164, over 3337691.28 frames. ], batch size: 60, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:22:27,976 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.948e+02 7.305e+02 9.017e+02 1.065e+03 1.815e+03, threshold=1.803e+03, percent-clipped=3.0 +2023-04-01 04:22:28,315 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27732.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:23:04,213 INFO [train.py:903] (0/4) Epoch 5, batch 450, loss[loss=0.2879, simple_loss=0.3505, pruned_loss=0.1126, over 19617.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3474, pruned_loss=0.1157, over 3446463.61 frames. ], batch size: 57, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:23:46,004 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 04:23:47,123 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 04:24:07,122 INFO [train.py:903] (0/4) Epoch 5, batch 500, loss[loss=0.2766, simple_loss=0.3345, pruned_loss=0.1094, over 19805.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3469, pruned_loss=0.1151, over 3546593.16 frames. ], batch size: 49, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:24:29,747 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4185, 1.0357, 1.2053, 1.2842, 2.0598, 1.1252, 1.7961, 2.0968], + device='cuda:0'), covar=tensor([0.0599, 0.2676, 0.2511, 0.1446, 0.0776, 0.1787, 0.0951, 0.0661], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0310, 0.0309, 0.0285, 0.0304, 0.0316, 0.0283, 0.0297], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 04:24:31,792 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+02 6.150e+02 8.318e+02 1.057e+03 1.987e+03, threshold=1.664e+03, percent-clipped=1.0 +2023-04-01 04:25:11,321 INFO [train.py:903] (0/4) Epoch 5, batch 550, loss[loss=0.3533, simple_loss=0.3968, pruned_loss=0.1549, over 19539.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3481, pruned_loss=0.1156, over 3616510.61 frames. ], batch size: 54, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:25:35,615 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:07,938 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:14,562 INFO [train.py:903] (0/4) Epoch 5, batch 600, loss[loss=0.3217, simple_loss=0.3821, pruned_loss=0.1306, over 19500.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3463, pruned_loss=0.1141, over 3676515.85 frames. ], batch size: 64, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:26:38,769 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.595e+02 8.388e+02 1.023e+03 2.578e+03, threshold=1.678e+03, percent-clipped=3.0 +2023-04-01 04:27:02,826 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 04:27:17,957 INFO [train.py:903] (0/4) Epoch 5, batch 650, loss[loss=0.3042, simple_loss=0.3685, pruned_loss=0.12, over 19486.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3472, pruned_loss=0.1147, over 3718564.89 frames. ], batch size: 64, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:27:35,648 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7672, 1.8882, 1.7566, 2.6062, 1.6660, 2.3672, 2.2830, 1.7406], + device='cuda:0'), covar=tensor([0.1575, 0.1255, 0.0770, 0.0618, 0.1469, 0.0531, 0.1268, 0.1218], + device='cuda:0'), in_proj_covar=tensor([0.0593, 0.0576, 0.0533, 0.0733, 0.0639, 0.0494, 0.0649, 0.0549], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:27:36,863 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1816, 2.4232, 2.1378, 3.5797, 2.3228, 3.7066, 3.3254, 2.2417], + device='cuda:0'), covar=tensor([0.1918, 0.1330, 0.0732, 0.0763, 0.1764, 0.0429, 0.1075, 0.1139], + device='cuda:0'), in_proj_covar=tensor([0.0592, 0.0575, 0.0532, 0.0733, 0.0638, 0.0493, 0.0648, 0.0548], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:28:05,333 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-28000.pt +2023-04-01 04:28:20,088 INFO [train.py:903] (0/4) Epoch 5, batch 700, loss[loss=0.3808, simple_loss=0.4172, pruned_loss=0.1722, over 18848.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3499, pruned_loss=0.1164, over 3746228.73 frames. ], batch size: 74, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:28:47,045 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 7.486e+02 9.333e+02 1.140e+03 2.488e+03, threshold=1.867e+03, percent-clipped=5.0 +2023-04-01 04:28:48,570 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0661, 3.4503, 3.6345, 3.6013, 1.4828, 3.2787, 2.9679, 3.2668], + device='cuda:0'), covar=tensor([0.1099, 0.0946, 0.0591, 0.0525, 0.3845, 0.0539, 0.0595, 0.1085], + device='cuda:0'), in_proj_covar=tensor([0.0477, 0.0426, 0.0567, 0.0450, 0.0552, 0.0323, 0.0367, 0.0525], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 04:29:25,959 INFO [train.py:903] (0/4) Epoch 5, batch 750, loss[loss=0.2616, simple_loss=0.3135, pruned_loss=0.1049, over 19369.00 frames. ], tot_loss[loss=0.2916, simple_loss=0.3494, pruned_loss=0.1169, over 3745790.62 frames. ], batch size: 47, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:29:43,610 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28076.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:30:27,878 INFO [train.py:903] (0/4) Epoch 5, batch 800, loss[loss=0.2572, simple_loss=0.3155, pruned_loss=0.09949, over 19402.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3488, pruned_loss=0.1164, over 3769501.47 frames. ], batch size: 48, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:30:48,172 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 04:30:52,981 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.190e+02 6.427e+02 8.567e+02 1.032e+03 2.729e+03, threshold=1.713e+03, percent-clipped=3.0 +2023-04-01 04:31:32,201 INFO [train.py:903] (0/4) Epoch 5, batch 850, loss[loss=0.305, simple_loss=0.3653, pruned_loss=0.1223, over 19658.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3484, pruned_loss=0.1163, over 3779897.05 frames. ], batch size: 58, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:31:55,401 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9746, 2.0102, 1.7643, 2.7777, 1.7629, 2.5573, 2.5649, 1.8991], + device='cuda:0'), covar=tensor([0.1680, 0.1264, 0.0774, 0.0824, 0.1636, 0.0565, 0.1275, 0.1173], + device='cuda:0'), in_proj_covar=tensor([0.0590, 0.0574, 0.0530, 0.0733, 0.0633, 0.0496, 0.0639, 0.0544], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:32:08,717 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28191.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:32:29,192 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 04:32:33,625 INFO [train.py:903] (0/4) Epoch 5, batch 900, loss[loss=0.2655, simple_loss=0.3393, pruned_loss=0.09585, over 19662.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.348, pruned_loss=0.1158, over 3794012.63 frames. ], batch size: 58, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:32:59,335 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+02 6.938e+02 8.196e+02 1.125e+03 2.658e+03, threshold=1.639e+03, percent-clipped=4.0 +2023-04-01 04:33:17,411 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:33:36,689 INFO [train.py:903] (0/4) Epoch 5, batch 950, loss[loss=0.2932, simple_loss=0.3591, pruned_loss=0.1136, over 19288.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3486, pruned_loss=0.1166, over 3787412.55 frames. ], batch size: 70, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:33:42,366 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 04:33:49,413 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28273.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:34:28,478 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:34:36,190 INFO [train.py:903] (0/4) Epoch 5, batch 1000, loss[loss=0.2746, simple_loss=0.3388, pruned_loss=0.1052, over 19831.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.349, pruned_loss=0.117, over 3792616.61 frames. ], batch size: 52, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:34:51,484 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.93 vs. limit=5.0 +2023-04-01 04:34:59,302 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 7.143e+02 8.888e+02 1.138e+03 2.880e+03, threshold=1.778e+03, percent-clipped=9.0 +2023-04-01 04:35:30,262 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 04:35:36,616 INFO [train.py:903] (0/4) Epoch 5, batch 1050, loss[loss=0.2889, simple_loss=0.3353, pruned_loss=0.1213, over 19709.00 frames. ], tot_loss[loss=0.2918, simple_loss=0.349, pruned_loss=0.1173, over 3799515.43 frames. ], batch size: 45, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:36:09,501 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 04:36:36,350 INFO [train.py:903] (0/4) Epoch 5, batch 1100, loss[loss=0.3588, simple_loss=0.405, pruned_loss=0.1563, over 19525.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3485, pruned_loss=0.1172, over 3813379.45 frames. ], batch size: 54, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:36:56,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2162, 1.1930, 1.4312, 1.3238, 1.8175, 1.8652, 1.8936, 0.4491], + device='cuda:0'), covar=tensor([0.1849, 0.2944, 0.1716, 0.1472, 0.1143, 0.1535, 0.1115, 0.2849], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0495, 0.0467, 0.0408, 0.0538, 0.0439, 0.0616, 0.0430], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:37:01,573 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+02 6.917e+02 8.996e+02 1.190e+03 3.192e+03, threshold=1.799e+03, percent-clipped=6.0 +2023-04-01 04:37:19,643 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28447.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:37:37,666 INFO [train.py:903] (0/4) Epoch 5, batch 1150, loss[loss=0.3022, simple_loss=0.3611, pruned_loss=0.1217, over 19614.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3464, pruned_loss=0.115, over 3827959.53 frames. ], batch size: 57, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:37:50,278 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28472.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:38:11,081 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7895, 1.8923, 1.8533, 2.9047, 2.0918, 2.5928, 3.0533, 2.8570], + device='cuda:0'), covar=tensor([0.0651, 0.1013, 0.1102, 0.0948, 0.1050, 0.0724, 0.0899, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0254, 0.0245, 0.0279, 0.0280, 0.0237, 0.0242, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 04:38:29,188 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:36,122 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:38,083 INFO [train.py:903] (0/4) Epoch 5, batch 1200, loss[loss=0.2677, simple_loss=0.3284, pruned_loss=0.1035, over 19725.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3475, pruned_loss=0.1153, over 3835695.38 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:39:01,763 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.666e+02 7.999e+02 1.012e+03 1.920e+03, threshold=1.600e+03, percent-clipped=0.0 +2023-04-01 04:39:12,237 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 04:39:37,132 INFO [train.py:903] (0/4) Epoch 5, batch 1250, loss[loss=0.2976, simple_loss=0.3596, pruned_loss=0.1178, over 19402.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3482, pruned_loss=0.1157, over 3835710.92 frames. ], batch size: 66, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:13,037 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:40:37,856 INFO [train.py:903] (0/4) Epoch 5, batch 1300, loss[loss=0.2711, simple_loss=0.3428, pruned_loss=0.09967, over 19635.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.348, pruned_loss=0.1157, over 3832412.32 frames. ], batch size: 57, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:43,789 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28617.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:41:03,192 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 7.072e+02 8.596e+02 1.219e+03 1.879e+03, threshold=1.719e+03, percent-clipped=8.0 +2023-04-01 04:41:11,313 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9512, 2.8228, 1.7350, 2.1352, 1.7966, 2.0941, 0.6564, 2.1611], + device='cuda:0'), covar=tensor([0.0274, 0.0253, 0.0321, 0.0388, 0.0494, 0.0359, 0.0602, 0.0450], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0277, 0.0275, 0.0296, 0.0360, 0.0277, 0.0273, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 04:41:23,365 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:41:39,858 INFO [train.py:903] (0/4) Epoch 5, batch 1350, loss[loss=0.337, simple_loss=0.3765, pruned_loss=0.1488, over 13235.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3492, pruned_loss=0.1169, over 3803158.54 frames. ], batch size: 136, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:42:34,039 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:42:40,549 INFO [train.py:903] (0/4) Epoch 5, batch 1400, loss[loss=0.3283, simple_loss=0.3767, pruned_loss=0.1399, over 17588.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3494, pruned_loss=0.117, over 3801432.51 frames. ], batch size: 101, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:01,030 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5345, 3.9484, 4.1822, 4.1392, 1.5472, 3.7636, 3.3865, 3.7335], + device='cuda:0'), covar=tensor([0.0940, 0.0626, 0.0470, 0.0408, 0.3648, 0.0375, 0.0526, 0.0931], + device='cuda:0'), in_proj_covar=tensor([0.0495, 0.0445, 0.0587, 0.0466, 0.0564, 0.0337, 0.0379, 0.0543], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 04:43:04,133 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 6.792e+02 9.169e+02 1.129e+03 1.829e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-04-01 04:43:04,516 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28732.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:43:40,836 INFO [train.py:903] (0/4) Epoch 5, batch 1450, loss[loss=0.2367, simple_loss=0.3009, pruned_loss=0.08624, over 19774.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3476, pruned_loss=0.1158, over 3805596.52 frames. ], batch size: 47, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:43,185 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 04:43:43,500 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28764.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:44:41,296 INFO [train.py:903] (0/4) Epoch 5, batch 1500, loss[loss=0.2887, simple_loss=0.3435, pruned_loss=0.1169, over 19829.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3469, pruned_loss=0.1152, over 3816649.33 frames. ], batch size: 52, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:44:55,667 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4897, 1.1857, 1.6021, 1.0492, 2.4713, 2.7454, 2.6754, 2.8837], + device='cuda:0'), covar=tensor([0.1089, 0.2624, 0.2445, 0.1898, 0.0442, 0.0304, 0.0246, 0.0163], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0281, 0.0320, 0.0256, 0.0197, 0.0116, 0.0206, 0.0138], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 04:45:06,145 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.912e+02 8.562e+02 1.022e+03 2.509e+03, threshold=1.712e+03, percent-clipped=1.0 +2023-04-01 04:45:24,295 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:31,721 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:32,286 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 04:45:42,352 INFO [train.py:903] (0/4) Epoch 5, batch 1550, loss[loss=0.2712, simple_loss=0.324, pruned_loss=0.1093, over 19314.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3458, pruned_loss=0.1147, over 3817385.99 frames. ], batch size: 44, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:46:02,329 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28879.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:46:41,474 INFO [train.py:903] (0/4) Epoch 5, batch 1600, loss[loss=0.3224, simple_loss=0.3709, pruned_loss=0.137, over 19511.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3473, pruned_loss=0.1157, over 3802716.21 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 8.0 +2023-04-01 04:47:01,760 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8626, 1.9110, 1.8148, 2.6817, 1.7100, 2.4593, 2.4792, 1.7734], + device='cuda:0'), covar=tensor([0.1643, 0.1307, 0.0754, 0.0819, 0.1575, 0.0564, 0.1263, 0.1285], + device='cuda:0'), in_proj_covar=tensor([0.0605, 0.0591, 0.0544, 0.0751, 0.0651, 0.0507, 0.0656, 0.0561], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:47:04,538 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+02 6.984e+02 8.420e+02 1.102e+03 2.946e+03, threshold=1.684e+03, percent-clipped=4.0 +2023-04-01 04:47:04,565 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 04:47:41,583 INFO [train.py:903] (0/4) Epoch 5, batch 1650, loss[loss=0.3094, simple_loss=0.3661, pruned_loss=0.1264, over 18828.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.3471, pruned_loss=0.1156, over 3812322.11 frames. ], batch size: 74, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:47:42,028 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:43,145 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28963.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:49,892 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:12,452 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:13,613 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28988.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:48:42,351 INFO [train.py:903] (0/4) Epoch 5, batch 1700, loss[loss=0.3343, simple_loss=0.3788, pruned_loss=0.1449, over 17310.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3463, pruned_loss=0.1154, over 3817670.04 frames. ], batch size: 101, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:48:43,895 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29013.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:48:51,927 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:52,070 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:08,766 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+02 6.699e+02 8.227e+02 1.083e+03 2.721e+03, threshold=1.645e+03, percent-clipped=4.0 +2023-04-01 04:49:16,951 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2218, 2.1881, 2.1709, 2.9890, 2.3831, 2.9687, 2.7354, 1.9956], + device='cuda:0'), covar=tensor([0.1401, 0.1105, 0.0596, 0.0635, 0.1136, 0.0365, 0.0946, 0.1010], + device='cuda:0'), in_proj_covar=tensor([0.0599, 0.0589, 0.0536, 0.0743, 0.0645, 0.0502, 0.0657, 0.0557], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:49:21,047 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 04:49:22,493 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:36,747 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-01 04:49:40,680 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9745, 2.0119, 1.8856, 2.7413, 1.7938, 2.7814, 2.5119, 1.8195], + device='cuda:0'), covar=tensor([0.1696, 0.1293, 0.0757, 0.0800, 0.1574, 0.0480, 0.1385, 0.1281], + device='cuda:0'), in_proj_covar=tensor([0.0594, 0.0584, 0.0531, 0.0737, 0.0640, 0.0498, 0.0651, 0.0554], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:49:42,474 INFO [train.py:903] (0/4) Epoch 5, batch 1750, loss[loss=0.284, simple_loss=0.3596, pruned_loss=0.1042, over 19291.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3451, pruned_loss=0.1144, over 3814772.84 frames. ], batch size: 66, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:26,370 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 04:50:43,924 INFO [train.py:903] (0/4) Epoch 5, batch 1800, loss[loss=0.2576, simple_loss=0.3325, pruned_loss=0.09142, over 19766.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3442, pruned_loss=0.1143, over 3815482.89 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:59,177 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0545, 1.1806, 1.5576, 0.8160, 2.4901, 2.8511, 2.6955, 3.0803], + device='cuda:0'), covar=tensor([0.1423, 0.2811, 0.2724, 0.2003, 0.0382, 0.0144, 0.0262, 0.0145], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0278, 0.0314, 0.0253, 0.0197, 0.0115, 0.0205, 0.0136], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 04:51:07,718 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.740e+02 6.050e+02 7.849e+02 1.011e+03 2.328e+03, threshold=1.570e+03, percent-clipped=7.0 +2023-04-01 04:51:39,754 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 04:51:44,191 INFO [train.py:903] (0/4) Epoch 5, batch 1850, loss[loss=0.2514, simple_loss=0.3181, pruned_loss=0.09233, over 19500.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3446, pruned_loss=0.1141, over 3807896.22 frames. ], batch size: 49, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:17,660 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 04:52:19,834 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 04:52:43,728 INFO [train.py:903] (0/4) Epoch 5, batch 1900, loss[loss=0.3243, simple_loss=0.3824, pruned_loss=0.1331, over 19699.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3464, pruned_loss=0.1153, over 3818231.57 frames. ], batch size: 59, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:53,184 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29219.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:52:58,359 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:01,679 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:02,418 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 04:53:07,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 04:53:11,315 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+02 6.988e+02 9.073e+02 1.156e+03 1.890e+03, threshold=1.815e+03, percent-clipped=5.0 +2023-04-01 04:53:23,884 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:28,884 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 04:53:30,457 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:35,730 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:44,967 INFO [train.py:903] (0/4) Epoch 5, batch 1950, loss[loss=0.2492, simple_loss=0.3005, pruned_loss=0.09895, over 19751.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3467, pruned_loss=0.1157, over 3819574.64 frames. ], batch size: 46, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:54:46,885 INFO [train.py:903] (0/4) Epoch 5, batch 2000, loss[loss=0.2637, simple_loss=0.3229, pruned_loss=0.1022, over 19828.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3465, pruned_loss=0.1154, over 3805304.89 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:10,279 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.142e+02 7.102e+02 9.141e+02 1.135e+03 3.050e+03, threshold=1.828e+03, percent-clipped=2.0 +2023-04-01 04:55:10,958 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-01 04:55:16,857 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:43,166 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 04:55:46,566 INFO [train.py:903] (0/4) Epoch 5, batch 2050, loss[loss=0.2711, simple_loss=0.3391, pruned_loss=0.1015, over 19578.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3455, pruned_loss=0.1144, over 3802581.34 frames. ], batch size: 61, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:49,002 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:59,870 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 04:56:00,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 04:56:23,069 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 04:56:37,161 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1789, 1.2074, 1.5533, 1.2709, 2.1913, 1.9146, 2.3501, 0.7804], + device='cuda:0'), covar=tensor([0.1648, 0.2723, 0.1464, 0.1428, 0.1056, 0.1396, 0.1104, 0.2576], + device='cuda:0'), in_proj_covar=tensor([0.0440, 0.0502, 0.0472, 0.0405, 0.0538, 0.0434, 0.0620, 0.0436], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 04:56:45,838 INFO [train.py:903] (0/4) Epoch 5, batch 2100, loss[loss=0.3063, simple_loss=0.3601, pruned_loss=0.1263, over 13473.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3481, pruned_loss=0.1162, over 3795799.01 frames. ], batch size: 136, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:57:07,818 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 04:57:12,304 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 7.145e+02 9.347e+02 1.270e+03 4.921e+03, threshold=1.869e+03, percent-clipped=10.0 +2023-04-01 04:57:13,529 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 04:57:20,913 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1587, 2.1065, 1.5188, 1.2266, 1.9421, 0.8394, 1.1743, 1.7254], + device='cuda:0'), covar=tensor([0.0616, 0.0339, 0.0696, 0.0530, 0.0298, 0.1017, 0.0489, 0.0267], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0260, 0.0306, 0.0236, 0.0212, 0.0306, 0.0280, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 04:57:34,281 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 04:57:46,686 INFO [train.py:903] (0/4) Epoch 5, batch 2150, loss[loss=0.2506, simple_loss=0.3074, pruned_loss=0.09697, over 19781.00 frames. ], tot_loss[loss=0.2916, simple_loss=0.3493, pruned_loss=0.117, over 3797948.58 frames. ], batch size: 47, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:58:08,901 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29479.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:58:48,389 INFO [train.py:903] (0/4) Epoch 5, batch 2200, loss[loss=0.2935, simple_loss=0.3532, pruned_loss=0.1168, over 17541.00 frames. ], tot_loss[loss=0.2918, simple_loss=0.3494, pruned_loss=0.1171, over 3793764.44 frames. ], batch size: 101, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:11,936 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+02 6.760e+02 8.285e+02 1.117e+03 1.782e+03, threshold=1.657e+03, percent-clipped=0.0 +2023-04-01 04:59:47,491 INFO [train.py:903] (0/4) Epoch 5, batch 2250, loss[loss=0.3097, simple_loss=0.37, pruned_loss=0.1247, over 19544.00 frames. ], tot_loss[loss=0.2903, simple_loss=0.3484, pruned_loss=0.1161, over 3807165.73 frames. ], batch size: 56, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:55,522 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:28,197 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:31,586 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5070, 1.1138, 1.1800, 1.6222, 1.3272, 1.5800, 1.7592, 1.4357], + device='cuda:0'), covar=tensor([0.0833, 0.1159, 0.1173, 0.0910, 0.0909, 0.0839, 0.0919, 0.0811], + device='cuda:0'), in_proj_covar=tensor([0.0234, 0.0253, 0.0245, 0.0283, 0.0277, 0.0230, 0.0240, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 05:00:32,399 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:48,024 INFO [train.py:903] (0/4) Epoch 5, batch 2300, loss[loss=0.2865, simple_loss=0.3357, pruned_loss=0.1186, over 19828.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3475, pruned_loss=0.1152, over 3816865.51 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 05:00:56,315 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29619.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:01:03,670 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 05:01:15,152 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.694e+02 6.380e+02 7.654e+02 9.572e+02 1.859e+03, threshold=1.531e+03, percent-clipped=2.0 +2023-04-01 05:01:48,834 INFO [train.py:903] (0/4) Epoch 5, batch 2350, loss[loss=0.3285, simple_loss=0.3691, pruned_loss=0.1439, over 13536.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.3471, pruned_loss=0.1148, over 3800785.09 frames. ], batch size: 136, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:11,351 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29680.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:02:28,866 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 05:02:45,269 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 05:02:49,427 INFO [train.py:903] (0/4) Epoch 5, batch 2400, loss[loss=0.261, simple_loss=0.3324, pruned_loss=0.0948, over 19590.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3467, pruned_loss=0.1143, over 3807691.22 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:50,597 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:12,596 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.376e+02 6.843e+02 7.889e+02 1.103e+03 3.246e+03, threshold=1.578e+03, percent-clipped=5.0 +2023-04-01 05:03:15,272 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:46,774 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:48,490 INFO [train.py:903] (0/4) Epoch 5, batch 2450, loss[loss=0.3431, simple_loss=0.3765, pruned_loss=0.1548, over 18074.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3477, pruned_loss=0.115, over 3821359.58 frames. ], batch size: 83, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:48,603 INFO [train.py:903] (0/4) Epoch 5, batch 2500, loss[loss=0.2989, simple_loss=0.3524, pruned_loss=0.1227, over 19586.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3475, pruned_loss=0.1145, over 3819633.86 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:57,774 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:05:14,587 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 6.790e+02 8.500e+02 1.086e+03 2.138e+03, threshold=1.700e+03, percent-clipped=3.0 +2023-04-01 05:05:44,700 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.58 vs. limit=5.0 +2023-04-01 05:05:48,271 INFO [train.py:903] (0/4) Epoch 5, batch 2550, loss[loss=0.3128, simple_loss=0.3669, pruned_loss=0.1293, over 19762.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3472, pruned_loss=0.1145, over 3818443.76 frames. ], batch size: 54, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:24,469 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 05:06:34,350 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4263, 1.4698, 2.0021, 1.5995, 3.0083, 2.5029, 2.9848, 1.3823], + device='cuda:0'), covar=tensor([0.1596, 0.2744, 0.1549, 0.1294, 0.1168, 0.1266, 0.1415, 0.2678], + device='cuda:0'), in_proj_covar=tensor([0.0439, 0.0504, 0.0472, 0.0403, 0.0546, 0.0435, 0.0620, 0.0437], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:06:40,389 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 05:06:48,588 INFO [train.py:903] (0/4) Epoch 5, batch 2600, loss[loss=0.2857, simple_loss=0.3529, pruned_loss=0.1093, over 19536.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.3483, pruned_loss=0.1151, over 3821463.48 frames. ], batch size: 56, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:50,697 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:07:13,516 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.882e+02 9.175e+02 1.239e+03 1.828e+03, threshold=1.835e+03, percent-clipped=5.0 +2023-04-01 05:07:50,213 INFO [train.py:903] (0/4) Epoch 5, batch 2650, loss[loss=0.2624, simple_loss=0.3279, pruned_loss=0.09843, over 19599.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3471, pruned_loss=0.1142, over 3816453.55 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:07:58,269 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:08,357 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 05:08:29,809 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:36,443 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-30000.pt +2023-04-01 05:08:50,701 INFO [train.py:903] (0/4) Epoch 5, batch 2700, loss[loss=0.279, simple_loss=0.3511, pruned_loss=0.1034, over 19666.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3462, pruned_loss=0.114, over 3809458.75 frames. ], batch size: 55, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:09:04,697 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:10,274 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:17,306 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.255e+02 6.674e+02 8.375e+02 9.964e+02 1.932e+03, threshold=1.675e+03, percent-clipped=1.0 +2023-04-01 05:09:49,950 INFO [train.py:903] (0/4) Epoch 5, batch 2750, loss[loss=0.2557, simple_loss=0.3157, pruned_loss=0.09792, over 19481.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3446, pruned_loss=0.1131, over 3812991.49 frames. ], batch size: 49, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:10:20,401 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 05:10:50,796 INFO [train.py:903] (0/4) Epoch 5, batch 2800, loss[loss=0.3156, simple_loss=0.3746, pruned_loss=0.1283, over 19612.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3455, pruned_loss=0.1136, over 3809537.60 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:11:17,048 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.279e+02 7.041e+02 9.072e+02 1.191e+03 2.188e+03, threshold=1.814e+03, percent-clipped=6.0 +2023-04-01 05:11:22,801 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30139.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:27,364 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:51,983 INFO [train.py:903] (0/4) Epoch 5, batch 2850, loss[loss=0.339, simple_loss=0.3767, pruned_loss=0.1507, over 13278.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3446, pruned_loss=0.1134, over 3805378.40 frames. ], batch size: 136, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:11:54,299 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30164.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:12:17,089 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5855, 1.4401, 1.4292, 1.7271, 1.6095, 1.4764, 1.4463, 1.5247], + device='cuda:0'), covar=tensor([0.0681, 0.1026, 0.0944, 0.0544, 0.0732, 0.0427, 0.0745, 0.0488], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0371, 0.0287, 0.0246, 0.0309, 0.0255, 0.0276, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:12:51,657 INFO [train.py:903] (0/4) Epoch 5, batch 2900, loss[loss=0.2719, simple_loss=0.3423, pruned_loss=0.1008, over 19611.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3449, pruned_loss=0.1132, over 3808450.81 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:12:51,692 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 05:12:53,196 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7352, 1.6057, 2.1218, 1.8696, 3.0575, 2.4519, 3.0932, 1.9770], + device='cuda:0'), covar=tensor([0.1514, 0.2617, 0.1499, 0.1324, 0.1004, 0.1314, 0.1183, 0.2190], + device='cuda:0'), in_proj_covar=tensor([0.0445, 0.0508, 0.0480, 0.0408, 0.0559, 0.0443, 0.0630, 0.0445], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:13:09,289 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:13:14,407 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7500, 1.8334, 1.4977, 1.3298, 1.3386, 1.4811, 0.1145, 0.7930], + device='cuda:0'), covar=tensor([0.0242, 0.0222, 0.0168, 0.0233, 0.0542, 0.0241, 0.0480, 0.0409], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0281, 0.0280, 0.0301, 0.0365, 0.0289, 0.0275, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:13:20,144 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.023e+02 7.237e+02 9.091e+02 1.153e+03 2.755e+03, threshold=1.818e+03, percent-clipped=7.0 +2023-04-01 05:13:51,429 INFO [train.py:903] (0/4) Epoch 5, batch 2950, loss[loss=0.3084, simple_loss=0.3621, pruned_loss=0.1273, over 18854.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3457, pruned_loss=0.1136, over 3804197.01 frames. ], batch size: 74, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:12,808 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:18,203 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30284.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:46,074 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30309.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:50,603 INFO [train.py:903] (0/4) Epoch 5, batch 3000, loss[loss=0.2502, simple_loss=0.307, pruned_loss=0.09668, over 19351.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3462, pruned_loss=0.1148, over 3805226.02 frames. ], batch size: 47, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:50,604 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 05:15:03,127 INFO [train.py:937] (0/4) Epoch 5, validation: loss=0.2047, simple_loss=0.3034, pruned_loss=0.05296, over 944034.00 frames. +2023-04-01 05:15:03,128 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 05:15:05,715 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 05:15:33,552 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 7.155e+02 8.736e+02 1.085e+03 2.346e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 05:16:06,357 INFO [train.py:903] (0/4) Epoch 5, batch 3050, loss[loss=0.3837, simple_loss=0.4022, pruned_loss=0.1826, over 13340.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3461, pruned_loss=0.1141, over 3814815.78 frames. ], batch size: 136, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:16:26,373 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:16:45,458 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30395.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:16:57,139 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7618, 1.5919, 1.7898, 1.9661, 4.1926, 1.1254, 2.2088, 4.2016], + device='cuda:0'), covar=tensor([0.0313, 0.2367, 0.2439, 0.1509, 0.0511, 0.2417, 0.1370, 0.0324], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0313, 0.0314, 0.0290, 0.0311, 0.0319, 0.0292, 0.0306], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:17:07,570 INFO [train.py:903] (0/4) Epoch 5, batch 3100, loss[loss=0.3571, simple_loss=0.3942, pruned_loss=0.16, over 19506.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3465, pruned_loss=0.1143, over 3812366.00 frames. ], batch size: 64, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:17:17,061 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30420.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:33,697 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+02 6.847e+02 8.274e+02 1.001e+03 3.134e+03, threshold=1.655e+03, percent-clipped=2.0 +2023-04-01 05:18:06,709 INFO [train.py:903] (0/4) Epoch 5, batch 3150, loss[loss=0.2443, simple_loss=0.3158, pruned_loss=0.0864, over 19586.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3461, pruned_loss=0.115, over 3803169.13 frames. ], batch size: 52, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:18:34,302 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 05:18:37,220 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30487.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:19:06,289 INFO [train.py:903] (0/4) Epoch 5, batch 3200, loss[loss=0.314, simple_loss=0.3613, pruned_loss=0.1333, over 13467.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3463, pruned_loss=0.1149, over 3799671.60 frames. ], batch size: 135, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:19:30,541 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-04-01 05:19:35,546 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 7.005e+02 8.588e+02 1.128e+03 3.335e+03, threshold=1.718e+03, percent-clipped=13.0 +2023-04-01 05:19:35,978 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30535.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:04,445 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:06,327 INFO [train.py:903] (0/4) Epoch 5, batch 3250, loss[loss=0.3371, simple_loss=0.3846, pruned_loss=0.1447, over 19530.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3462, pruned_loss=0.1149, over 3813924.97 frames. ], batch size: 54, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:20:19,561 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:35,356 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 05:20:55,331 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:59,221 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7880, 1.2637, 1.3153, 1.6822, 1.5120, 1.4844, 1.4009, 1.6293], + device='cuda:0'), covar=tensor([0.0790, 0.1253, 0.1241, 0.0803, 0.0936, 0.0470, 0.0912, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0364, 0.0278, 0.0235, 0.0301, 0.0245, 0.0266, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:21:08,680 INFO [train.py:903] (0/4) Epoch 5, batch 3300, loss[loss=0.3505, simple_loss=0.3958, pruned_loss=0.1526, over 19621.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3467, pruned_loss=0.115, over 3803473.17 frames. ], batch size: 60, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:21:16,490 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 05:21:35,516 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 6.699e+02 7.755e+02 9.198e+02 2.155e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 05:21:35,952 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1503, 1.9232, 1.4671, 1.2267, 1.7765, 0.9799, 1.0818, 1.6175], + device='cuda:0'), covar=tensor([0.0503, 0.0413, 0.0811, 0.0482, 0.0307, 0.0949, 0.0415, 0.0247], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0271, 0.0316, 0.0242, 0.0220, 0.0313, 0.0284, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:21:46,021 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1331, 2.2549, 2.0340, 3.3365, 2.0865, 3.4102, 3.0317, 2.0125], + device='cuda:0'), covar=tensor([0.2118, 0.1645, 0.0853, 0.0957, 0.2116, 0.0539, 0.1549, 0.1439], + device='cuda:0'), in_proj_covar=tensor([0.0618, 0.0615, 0.0552, 0.0770, 0.0663, 0.0527, 0.0676, 0.0574], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:22:09,093 INFO [train.py:903] (0/4) Epoch 5, batch 3350, loss[loss=0.2812, simple_loss=0.3496, pruned_loss=0.1064, over 19673.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3452, pruned_loss=0.1136, over 3806883.00 frames. ], batch size: 60, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:22:21,622 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:22:38,263 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:09,773 INFO [train.py:903] (0/4) Epoch 5, batch 3400, loss[loss=0.2587, simple_loss=0.3268, pruned_loss=0.09533, over 19668.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3455, pruned_loss=0.1143, over 3786438.40 frames. ], batch size: 53, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:23:22,328 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:35,530 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8730, 1.5194, 1.5103, 2.0717, 1.8105, 1.5827, 1.6608, 1.8630], + device='cuda:0'), covar=tensor([0.0791, 0.1692, 0.1242, 0.0843, 0.1079, 0.0506, 0.0885, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0361, 0.0277, 0.0234, 0.0300, 0.0243, 0.0266, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:23:39,931 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+02 6.893e+02 8.724e+02 1.073e+03 2.213e+03, threshold=1.745e+03, percent-clipped=7.0 +2023-04-01 05:24:11,822 INFO [train.py:903] (0/4) Epoch 5, batch 3450, loss[loss=0.3161, simple_loss=0.3719, pruned_loss=0.1301, over 19361.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3461, pruned_loss=0.1146, over 3789220.74 frames. ], batch size: 66, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:24:15,158 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 05:24:27,413 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1122, 1.8830, 1.4235, 1.2293, 1.7165, 1.0510, 0.9878, 1.5825], + device='cuda:0'), covar=tensor([0.0538, 0.0422, 0.0752, 0.0482, 0.0285, 0.0796, 0.0498, 0.0265], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0265, 0.0310, 0.0240, 0.0218, 0.0306, 0.0279, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:25:04,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 05:25:13,755 INFO [train.py:903] (0/4) Epoch 5, batch 3500, loss[loss=0.2628, simple_loss=0.3412, pruned_loss=0.09218, over 19342.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.344, pruned_loss=0.1129, over 3797961.42 frames. ], batch size: 66, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:25:39,223 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 7.015e+02 8.195e+02 1.097e+03 2.546e+03, threshold=1.639e+03, percent-clipped=5.0 +2023-04-01 05:25:41,894 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:25:50,112 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9499, 1.2865, 0.9746, 0.8705, 1.1988, 0.9110, 0.7146, 1.2422], + device='cuda:0'), covar=tensor([0.0516, 0.0596, 0.0932, 0.0518, 0.0400, 0.0941, 0.0636, 0.0337], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0270, 0.0316, 0.0244, 0.0219, 0.0312, 0.0287, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:26:09,305 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30858.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:13,372 INFO [train.py:903] (0/4) Epoch 5, batch 3550, loss[loss=0.2835, simple_loss=0.3441, pruned_loss=0.1114, over 19297.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3458, pruned_loss=0.1139, over 3807538.47 frames. ], batch size: 66, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:26:37,866 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:13,895 INFO [train.py:903] (0/4) Epoch 5, batch 3600, loss[loss=0.3155, simple_loss=0.3622, pruned_loss=0.1344, over 19785.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3461, pruned_loss=0.1138, over 3822350.32 frames. ], batch size: 56, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:27:24,174 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30921.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:33,968 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2788, 1.2361, 1.4722, 0.7677, 2.3102, 2.8822, 2.6568, 2.9574], + device='cuda:0'), covar=tensor([0.1245, 0.2782, 0.2823, 0.2026, 0.0420, 0.0172, 0.0245, 0.0160], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0280, 0.0318, 0.0252, 0.0196, 0.0119, 0.0205, 0.0136], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:27:43,240 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+02 6.769e+02 8.289e+02 1.066e+03 2.218e+03, threshold=1.658e+03, percent-clipped=4.0 +2023-04-01 05:27:51,217 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:52,165 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:02,186 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:13,490 INFO [train.py:903] (0/4) Epoch 5, batch 3650, loss[loss=0.3178, simple_loss=0.3689, pruned_loss=0.1334, over 19378.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.346, pruned_loss=0.1139, over 3816240.54 frames. ], batch size: 70, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:28:20,113 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:29:14,323 INFO [train.py:903] (0/4) Epoch 5, batch 3700, loss[loss=0.2991, simple_loss=0.354, pruned_loss=0.1221, over 19507.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3468, pruned_loss=0.1147, over 3820951.34 frames. ], batch size: 64, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:29:21,016 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:29:27,635 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1534, 1.2490, 1.7453, 1.4553, 2.1039, 1.7826, 2.1325, 0.7708], + device='cuda:0'), covar=tensor([0.1845, 0.2878, 0.1479, 0.1448, 0.1153, 0.1648, 0.1369, 0.2816], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0504, 0.0481, 0.0406, 0.0549, 0.0442, 0.0625, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:29:34,011 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31029.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:29:40,640 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+02 7.099e+02 8.962e+02 1.140e+03 3.223e+03, threshold=1.792e+03, percent-clipped=9.0 +2023-04-01 05:30:15,308 INFO [train.py:903] (0/4) Epoch 5, batch 3750, loss[loss=0.2809, simple_loss=0.3482, pruned_loss=0.1068, over 19297.00 frames. ], tot_loss[loss=0.286, simple_loss=0.345, pruned_loss=0.1135, over 3820974.14 frames. ], batch size: 66, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:30:53,657 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:15,743 INFO [train.py:903] (0/4) Epoch 5, batch 3800, loss[loss=0.3161, simple_loss=0.3737, pruned_loss=0.1292, over 19331.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3451, pruned_loss=0.1133, over 3816063.96 frames. ], batch size: 70, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:31:22,810 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:40,724 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:44,605 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 5.856e+02 8.467e+02 1.115e+03 2.554e+03, threshold=1.693e+03, percent-clipped=5.0 +2023-04-01 05:31:46,154 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5026, 1.9325, 1.4782, 1.5266, 1.8248, 1.2371, 1.2991, 1.5559], + device='cuda:0'), covar=tensor([0.0499, 0.0396, 0.0596, 0.0402, 0.0262, 0.0648, 0.0428, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0271, 0.0316, 0.0240, 0.0221, 0.0312, 0.0283, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:31:49,130 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 05:32:15,367 INFO [train.py:903] (0/4) Epoch 5, batch 3850, loss[loss=0.2521, simple_loss=0.3112, pruned_loss=0.09647, over 19039.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3441, pruned_loss=0.1126, over 3807614.54 frames. ], batch size: 42, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:32:34,617 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.60 vs. limit=5.0 +2023-04-01 05:33:19,169 INFO [train.py:903] (0/4) Epoch 5, batch 3900, loss[loss=0.2309, simple_loss=0.2945, pruned_loss=0.0837, over 19791.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3428, pruned_loss=0.1116, over 3813112.51 frames. ], batch size: 47, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:33:45,289 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.653e+02 6.709e+02 8.069e+02 1.055e+03 2.198e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 05:34:18,959 INFO [train.py:903] (0/4) Epoch 5, batch 3950, loss[loss=0.338, simple_loss=0.3897, pruned_loss=0.1432, over 19768.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3422, pruned_loss=0.1111, over 3821302.60 frames. ], batch size: 54, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:34:22,387 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:34:24,564 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 05:34:47,793 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:34:52,307 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8611, 1.8755, 1.8015, 2.8736, 1.9424, 2.6013, 2.6065, 1.8093], + device='cuda:0'), covar=tensor([0.2125, 0.1696, 0.0894, 0.0905, 0.1864, 0.0684, 0.1599, 0.1631], + device='cuda:0'), in_proj_covar=tensor([0.0622, 0.0613, 0.0553, 0.0767, 0.0659, 0.0529, 0.0676, 0.0580], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:35:00,482 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31296.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:18,263 INFO [train.py:903] (0/4) Epoch 5, batch 4000, loss[loss=0.3295, simple_loss=0.3778, pruned_loss=0.1406, over 19587.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3435, pruned_loss=0.1122, over 3822176.49 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:35:48,960 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+02 7.061e+02 8.767e+02 1.081e+03 2.366e+03, threshold=1.753e+03, percent-clipped=7.0 +2023-04-01 05:36:06,106 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 05:36:18,325 INFO [train.py:903] (0/4) Epoch 5, batch 4050, loss[loss=0.257, simple_loss=0.3173, pruned_loss=0.09833, over 19788.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3439, pruned_loss=0.1125, over 3811102.74 frames. ], batch size: 48, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:36:26,545 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1702, 2.7727, 1.9032, 2.1287, 1.8087, 2.2451, 0.7252, 2.0487], + device='cuda:0'), covar=tensor([0.0272, 0.0231, 0.0293, 0.0384, 0.0471, 0.0402, 0.0579, 0.0483], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0280, 0.0282, 0.0301, 0.0375, 0.0293, 0.0277, 0.0292], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:36:33,369 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31373.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:36:42,165 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:36:51,300 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:04,926 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4055, 2.0526, 1.8279, 1.6676, 1.4530, 1.7660, 0.3343, 1.2594], + device='cuda:0'), covar=tensor([0.0225, 0.0268, 0.0191, 0.0306, 0.0531, 0.0297, 0.0511, 0.0383], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0282, 0.0283, 0.0303, 0.0377, 0.0294, 0.0278, 0.0292], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:37:07,163 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:18,594 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:19,401 INFO [train.py:903] (0/4) Epoch 5, batch 4100, loss[loss=0.2531, simple_loss=0.3228, pruned_loss=0.09167, over 19568.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.344, pruned_loss=0.1122, over 3812082.64 frames. ], batch size: 52, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:37:20,890 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:37,684 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:48,918 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.964e+02 7.397e+02 9.605e+02 3.908e+03, threshold=1.479e+03, percent-clipped=5.0 +2023-04-01 05:37:53,727 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 05:38:20,925 INFO [train.py:903] (0/4) Epoch 5, batch 4150, loss[loss=0.2722, simple_loss=0.3379, pruned_loss=0.1032, over 19326.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3438, pruned_loss=0.1118, over 3804753.61 frames. ], batch size: 66, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:38:26,188 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 05:38:50,977 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31488.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:39:16,850 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31509.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:39:20,061 INFO [train.py:903] (0/4) Epoch 5, batch 4200, loss[loss=0.3089, simple_loss=0.3588, pruned_loss=0.1295, over 19495.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3421, pruned_loss=0.1107, over 3818550.60 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:39:23,674 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 05:39:50,936 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.017e+02 6.856e+02 8.101e+02 1.045e+03 3.023e+03, threshold=1.620e+03, percent-clipped=6.0 +2023-04-01 05:40:15,021 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1381, 3.9624, 2.3595, 2.6180, 3.4605, 1.7373, 1.1993, 2.0181], + device='cuda:0'), covar=tensor([0.0778, 0.0270, 0.0626, 0.0492, 0.0255, 0.0893, 0.0853, 0.0507], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0266, 0.0315, 0.0235, 0.0217, 0.0309, 0.0282, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:40:19,241 INFO [train.py:903] (0/4) Epoch 5, batch 4250, loss[loss=0.36, simple_loss=0.3999, pruned_loss=0.16, over 17512.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3417, pruned_loss=0.1104, over 3814178.00 frames. ], batch size: 101, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:40:35,452 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 05:40:44,450 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9209, 1.9596, 1.8888, 2.8817, 1.9795, 2.6925, 2.5977, 1.8178], + device='cuda:0'), covar=tensor([0.1926, 0.1602, 0.0820, 0.0831, 0.1765, 0.0601, 0.1473, 0.1423], + device='cuda:0'), in_proj_covar=tensor([0.0619, 0.0612, 0.0553, 0.0768, 0.0656, 0.0529, 0.0681, 0.0579], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:40:46,345 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 05:41:20,811 INFO [train.py:903] (0/4) Epoch 5, batch 4300, loss[loss=0.2632, simple_loss=0.3352, pruned_loss=0.0956, over 18784.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3413, pruned_loss=0.1105, over 3805214.41 frames. ], batch size: 74, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:41:35,094 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6873, 1.3626, 1.4526, 1.7424, 1.6125, 1.5634, 1.4655, 1.6434], + device='cuda:0'), covar=tensor([0.0856, 0.1591, 0.1231, 0.0825, 0.1063, 0.0508, 0.1024, 0.0654], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0363, 0.0277, 0.0233, 0.0300, 0.0240, 0.0266, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:41:51,080 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:41:51,772 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 7.144e+02 8.425e+02 1.091e+03 2.021e+03, threshold=1.685e+03, percent-clipped=5.0 +2023-04-01 05:42:13,881 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 05:42:18,582 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:21,711 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:22,335 INFO [train.py:903] (0/4) Epoch 5, batch 4350, loss[loss=0.3268, simple_loss=0.3696, pruned_loss=0.142, over 19852.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3416, pruned_loss=0.1109, over 3811683.91 frames. ], batch size: 52, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:42:28,415 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:39,497 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7764, 3.0913, 3.2436, 3.2479, 1.1474, 3.0211, 2.7490, 2.9262], + device='cuda:0'), covar=tensor([0.1050, 0.0790, 0.0798, 0.0602, 0.3789, 0.0490, 0.0658, 0.1279], + device='cuda:0'), in_proj_covar=tensor([0.0505, 0.0443, 0.0590, 0.0490, 0.0575, 0.0358, 0.0388, 0.0553], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 05:42:45,917 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:53,936 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 05:42:57,763 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:03,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.64 vs. limit=5.0 +2023-04-01 05:43:22,083 INFO [train.py:903] (0/4) Epoch 5, batch 4400, loss[loss=0.2299, simple_loss=0.2916, pruned_loss=0.08404, over 19345.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3423, pruned_loss=0.1118, over 3795660.72 frames. ], batch size: 47, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:43:28,260 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6387, 2.5273, 1.8408, 1.8585, 1.7893, 2.0484, 0.9740, 1.9411], + device='cuda:0'), covar=tensor([0.0289, 0.0280, 0.0278, 0.0415, 0.0468, 0.0379, 0.0536, 0.0430], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0281, 0.0279, 0.0301, 0.0375, 0.0293, 0.0275, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:43:33,977 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:44,927 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 05:43:53,319 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.183e+02 7.191e+02 8.879e+02 1.082e+03 1.961e+03, threshold=1.776e+03, percent-clipped=1.0 +2023-04-01 05:43:55,417 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 05:44:02,701 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31744.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:44:22,696 INFO [train.py:903] (0/4) Epoch 5, batch 4450, loss[loss=0.3098, simple_loss=0.3623, pruned_loss=0.1287, over 19308.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.344, pruned_loss=0.1126, over 3809518.99 frames. ], batch size: 66, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:44:30,926 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31769.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:44:32,872 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:44:40,335 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8777, 1.9054, 1.9099, 3.0206, 1.9674, 2.9336, 2.8146, 1.9819], + device='cuda:0'), covar=tensor([0.2213, 0.1788, 0.0897, 0.0939, 0.2061, 0.0649, 0.1530, 0.1539], + device='cuda:0'), in_proj_covar=tensor([0.0622, 0.0613, 0.0554, 0.0770, 0.0664, 0.0533, 0.0678, 0.0584], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:45:16,689 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:23,178 INFO [train.py:903] (0/4) Epoch 5, batch 4500, loss[loss=0.2606, simple_loss=0.3176, pruned_loss=0.1018, over 19386.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3433, pruned_loss=0.1121, over 3818752.52 frames. ], batch size: 47, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:45:31,277 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 05:45:53,690 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.666e+02 8.776e+02 1.149e+03 2.550e+03, threshold=1.755e+03, percent-clipped=7.0 +2023-04-01 05:45:54,070 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9237, 1.9385, 1.4796, 1.4583, 1.3348, 1.4176, 0.4228, 1.1533], + device='cuda:0'), covar=tensor([0.0324, 0.0322, 0.0281, 0.0375, 0.0630, 0.0427, 0.0602, 0.0471], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0280, 0.0279, 0.0301, 0.0374, 0.0293, 0.0273, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:45:58,550 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:46:12,844 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:46:24,490 INFO [train.py:903] (0/4) Epoch 5, batch 4550, loss[loss=0.3264, simple_loss=0.3684, pruned_loss=0.1422, over 19680.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3441, pruned_loss=0.1129, over 3817643.72 frames. ], batch size: 59, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:46:30,257 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 05:46:51,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 05:46:52,338 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:04,882 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 05:47:05,643 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31896.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:25,666 INFO [train.py:903] (0/4) Epoch 5, batch 4600, loss[loss=0.2546, simple_loss=0.3163, pruned_loss=0.09647, over 19778.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3427, pruned_loss=0.1121, over 3808094.05 frames. ], batch size: 47, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:47:49,549 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:55,745 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 6.775e+02 7.918e+02 9.900e+02 3.222e+03, threshold=1.584e+03, percent-clipped=3.0 +2023-04-01 05:48:25,674 INFO [train.py:903] (0/4) Epoch 5, batch 4650, loss[loss=0.3316, simple_loss=0.3948, pruned_loss=0.1341, over 19532.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3439, pruned_loss=0.1126, over 3801359.44 frames. ], batch size: 56, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:48:32,740 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:48:32,807 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31968.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:48:38,525 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1823, 1.1167, 1.4164, 1.3063, 1.7523, 1.7126, 1.7949, 0.4639], + device='cuda:0'), covar=tensor([0.1645, 0.2903, 0.1514, 0.1453, 0.1046, 0.1550, 0.1013, 0.2757], + device='cuda:0'), in_proj_covar=tensor([0.0440, 0.0502, 0.0477, 0.0405, 0.0549, 0.0445, 0.0618, 0.0440], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:48:41,065 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 05:48:52,756 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 05:49:11,288 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-32000.pt +2023-04-01 05:49:24,516 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9770, 1.8734, 2.5161, 2.5418, 2.6163, 2.5770, 2.2385, 2.8624], + device='cuda:0'), covar=tensor([0.0598, 0.1656, 0.1015, 0.0807, 0.0966, 0.0382, 0.0856, 0.0419], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0361, 0.0279, 0.0237, 0.0299, 0.0239, 0.0265, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:49:25,340 INFO [train.py:903] (0/4) Epoch 5, batch 4700, loss[loss=0.2451, simple_loss=0.3085, pruned_loss=0.09084, over 19389.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3456, pruned_loss=0.1138, over 3797962.10 frames. ], batch size: 47, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:49:48,441 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 05:49:56,506 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.386e+02 6.540e+02 8.637e+02 1.061e+03 2.519e+03, threshold=1.727e+03, percent-clipped=5.0 +2023-04-01 05:50:07,060 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4850, 0.9888, 1.1838, 1.2467, 2.1242, 0.9571, 1.8825, 2.1201], + device='cuda:0'), covar=tensor([0.0561, 0.2554, 0.2412, 0.1459, 0.0741, 0.1898, 0.0906, 0.0603], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0312, 0.0310, 0.0282, 0.0299, 0.0310, 0.0285, 0.0300], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:50:27,317 INFO [train.py:903] (0/4) Epoch 5, batch 4750, loss[loss=0.3271, simple_loss=0.3796, pruned_loss=0.1373, over 19468.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3451, pruned_loss=0.1126, over 3807124.78 frames. ], batch size: 64, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:50:32,825 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:50:38,990 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7144, 1.7536, 1.7379, 2.3977, 1.4491, 2.2222, 2.2367, 1.8003], + device='cuda:0'), covar=tensor([0.1859, 0.1550, 0.0860, 0.0841, 0.1888, 0.0693, 0.1558, 0.1449], + device='cuda:0'), in_proj_covar=tensor([0.0626, 0.0615, 0.0550, 0.0774, 0.0662, 0.0532, 0.0680, 0.0581], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:51:28,010 INFO [train.py:903] (0/4) Epoch 5, batch 4800, loss[loss=0.2863, simple_loss=0.3501, pruned_loss=0.1112, over 19291.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3447, pruned_loss=0.1125, over 3812084.94 frames. ], batch size: 66, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:51:57,642 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+02 7.051e+02 8.763e+02 1.042e+03 3.094e+03, threshold=1.753e+03, percent-clipped=4.0 +2023-04-01 05:52:04,790 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32142.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:14,227 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:27,936 INFO [train.py:903] (0/4) Epoch 5, batch 4850, loss[loss=0.3365, simple_loss=0.3924, pruned_loss=0.1403, over 19789.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.345, pruned_loss=0.1132, over 3808102.86 frames. ], batch size: 56, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:52:34,156 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:45,383 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8456, 1.8583, 1.7826, 2.7296, 1.7855, 2.6957, 2.4674, 1.8779], + device='cuda:0'), covar=tensor([0.2037, 0.1623, 0.0881, 0.0901, 0.1843, 0.0625, 0.1627, 0.1455], + device='cuda:0'), in_proj_covar=tensor([0.0625, 0.0614, 0.0550, 0.0770, 0.0659, 0.0533, 0.0679, 0.0578], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 05:52:51,460 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:54,108 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 05:52:56,527 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:53:12,906 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 05:53:17,712 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 05:53:18,683 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 05:53:26,876 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 05:53:27,946 INFO [train.py:903] (0/4) Epoch 5, batch 4900, loss[loss=0.2623, simple_loss=0.3342, pruned_loss=0.09516, over 19672.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3441, pruned_loss=0.1121, over 3816462.20 frames. ], batch size: 55, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:53:44,427 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32224.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:53:48,965 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 05:53:59,194 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.307e+02 6.585e+02 7.912e+02 1.039e+03 2.328e+03, threshold=1.582e+03, percent-clipped=1.0 +2023-04-01 05:54:02,588 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:13,268 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32249.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:54:29,207 INFO [train.py:903] (0/4) Epoch 5, batch 4950, loss[loss=0.2544, simple_loss=0.3192, pruned_loss=0.09485, over 19731.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3424, pruned_loss=0.1108, over 3834760.16 frames. ], batch size: 51, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:54:33,924 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:48,143 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32277.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:49,213 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 05:55:12,831 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 05:55:15,535 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:30,488 INFO [train.py:903] (0/4) Epoch 5, batch 5000, loss[loss=0.3158, simple_loss=0.3624, pruned_loss=0.1346, over 19780.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3423, pruned_loss=0.1106, over 3841079.65 frames. ], batch size: 54, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:55:30,639 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:40,413 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 05:55:50,535 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 05:55:58,317 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+02 7.030e+02 8.789e+02 1.165e+03 2.289e+03, threshold=1.758e+03, percent-clipped=7.0 +2023-04-01 05:56:21,773 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:56:29,553 INFO [train.py:903] (0/4) Epoch 5, batch 5050, loss[loss=0.2821, simple_loss=0.3422, pruned_loss=0.111, over 18825.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3425, pruned_loss=0.1105, over 3843172.70 frames. ], batch size: 74, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:57:05,115 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 05:57:07,784 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:57:29,759 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 05:57:30,263 INFO [train.py:903] (0/4) Epoch 5, batch 5100, loss[loss=0.246, simple_loss=0.3234, pruned_loss=0.08426, over 19312.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3415, pruned_loss=0.11, over 3833550.38 frames. ], batch size: 66, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:57:33,080 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8318, 1.3332, 1.5079, 1.8075, 1.5315, 1.4967, 1.5854, 1.7232], + device='cuda:0'), covar=tensor([0.0815, 0.1432, 0.1222, 0.0873, 0.1092, 0.0513, 0.0946, 0.0622], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0359, 0.0282, 0.0240, 0.0304, 0.0242, 0.0268, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:57:40,591 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 05:57:44,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 05:57:51,245 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 05:57:51,520 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:02,216 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 6.879e+02 8.267e+02 1.044e+03 2.791e+03, threshold=1.653e+03, percent-clipped=3.0 +2023-04-01 05:58:02,636 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:16,439 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2711, 2.9769, 2.2167, 2.7656, 0.9610, 2.8020, 2.7326, 2.8216], + device='cuda:0'), covar=tensor([0.0910, 0.1264, 0.1765, 0.0882, 0.3578, 0.1040, 0.0879, 0.1055], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0310, 0.0360, 0.0284, 0.0352, 0.0301, 0.0286, 0.0308], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 05:58:32,010 INFO [train.py:903] (0/4) Epoch 5, batch 5150, loss[loss=0.3915, simple_loss=0.4173, pruned_loss=0.1829, over 16980.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3411, pruned_loss=0.1096, over 3828750.57 frames. ], batch size: 100, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:58:32,371 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32462.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:36,864 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7639, 1.3494, 1.4075, 1.7625, 1.4676, 1.4950, 1.5665, 1.6083], + device='cuda:0'), covar=tensor([0.0840, 0.1627, 0.1376, 0.0861, 0.1280, 0.0519, 0.0887, 0.0684], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0362, 0.0283, 0.0241, 0.0307, 0.0241, 0.0266, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 05:58:45,282 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 05:59:19,198 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 05:59:33,452 INFO [train.py:903] (0/4) Epoch 5, batch 5200, loss[loss=0.29, simple_loss=0.353, pruned_loss=0.1135, over 19545.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3422, pruned_loss=0.1103, over 3823153.25 frames. ], batch size: 64, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:59:43,879 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:59:45,953 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 05:59:50,748 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2119, 1.2051, 1.7921, 1.4569, 2.6233, 2.2410, 2.8556, 1.0200], + device='cuda:0'), covar=tensor([0.1942, 0.3397, 0.1831, 0.1558, 0.1308, 0.1579, 0.1376, 0.3100], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0507, 0.0479, 0.0407, 0.0554, 0.0448, 0.0622, 0.0437], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:00:02,763 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 6.175e+02 7.903e+02 1.065e+03 1.799e+03, threshold=1.581e+03, percent-clipped=1.0 +2023-04-01 06:00:15,185 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:26,285 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:29,346 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 06:00:32,702 INFO [train.py:903] (0/4) Epoch 5, batch 5250, loss[loss=0.2604, simple_loss=0.3204, pruned_loss=0.1002, over 19767.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3422, pruned_loss=0.1102, over 3810555.16 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:00:34,028 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5467, 4.1443, 2.4262, 3.6938, 1.0073, 3.6864, 3.6947, 3.9362], + device='cuda:0'), covar=tensor([0.0604, 0.1151, 0.1938, 0.0738, 0.4346, 0.0848, 0.0785, 0.0649], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0314, 0.0365, 0.0287, 0.0359, 0.0305, 0.0290, 0.0313], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:00:55,222 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,127 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,913 INFO [train.py:903] (0/4) Epoch 5, batch 5300, loss[loss=0.3912, simple_loss=0.4085, pruned_loss=0.1869, over 13114.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3427, pruned_loss=0.1104, over 3814768.54 frames. ], batch size: 135, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:01:52,345 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 06:02:04,545 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:05,310 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 6.583e+02 8.041e+02 1.027e+03 2.106e+03, threshold=1.608e+03, percent-clipped=4.0 +2023-04-01 06:02:17,833 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32648.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:19,376 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-04-01 06:02:34,326 INFO [train.py:903] (0/4) Epoch 5, batch 5350, loss[loss=0.2793, simple_loss=0.3498, pruned_loss=0.1044, over 19715.00 frames. ], tot_loss[loss=0.282, simple_loss=0.343, pruned_loss=0.1104, over 3816177.52 frames. ], batch size: 63, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:02:48,887 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:59,137 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2013, 1.3076, 1.0761, 0.9801, 1.0455, 1.0901, 0.0441, 0.4126], + device='cuda:0'), covar=tensor([0.0263, 0.0256, 0.0159, 0.0212, 0.0514, 0.0209, 0.0491, 0.0444], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0293, 0.0286, 0.0310, 0.0378, 0.0301, 0.0281, 0.0298], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:03:00,320 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:06,684 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 06:03:31,090 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:35,125 INFO [train.py:903] (0/4) Epoch 5, batch 5400, loss[loss=0.2889, simple_loss=0.3504, pruned_loss=0.1137, over 19656.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3425, pruned_loss=0.1102, over 3822800.26 frames. ], batch size: 58, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:03:56,047 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-04-01 06:04:03,209 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.192e+02 6.673e+02 8.431e+02 1.064e+03 2.658e+03, threshold=1.686e+03, percent-clipped=8.0 +2023-04-01 06:04:34,478 INFO [train.py:903] (0/4) Epoch 5, batch 5450, loss[loss=0.2657, simple_loss=0.3382, pruned_loss=0.09658, over 19650.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3429, pruned_loss=0.1103, over 3828300.53 frames. ], batch size: 55, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:05:34,683 INFO [train.py:903] (0/4) Epoch 5, batch 5500, loss[loss=0.2552, simple_loss=0.3236, pruned_loss=0.09338, over 19845.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3438, pruned_loss=0.1113, over 3814501.96 frames. ], batch size: 52, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:05:57,569 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 06:06:05,437 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+02 6.283e+02 7.782e+02 1.000e+03 2.107e+03, threshold=1.556e+03, percent-clipped=4.0 +2023-04-01 06:06:34,470 INFO [train.py:903] (0/4) Epoch 5, batch 5550, loss[loss=0.2518, simple_loss=0.308, pruned_loss=0.09779, over 19380.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3422, pruned_loss=0.1105, over 3817854.19 frames. ], batch size: 47, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:06:40,877 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 06:06:55,829 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 06:07:29,976 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 06:07:36,771 INFO [train.py:903] (0/4) Epoch 5, batch 5600, loss[loss=0.2705, simple_loss=0.347, pruned_loss=0.09698, over 19663.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3421, pruned_loss=0.1108, over 3813794.84 frames. ], batch size: 55, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:07:51,836 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2432, 1.2192, 1.9418, 1.3978, 3.1261, 2.4630, 3.2136, 1.4218], + device='cuda:0'), covar=tensor([0.1948, 0.3366, 0.1807, 0.1618, 0.1337, 0.1595, 0.1613, 0.2982], + device='cuda:0'), in_proj_covar=tensor([0.0446, 0.0505, 0.0482, 0.0407, 0.0554, 0.0446, 0.0628, 0.0444], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:08:06,606 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 7.188e+02 9.159e+02 1.163e+03 2.158e+03, threshold=1.832e+03, percent-clipped=9.0 +2023-04-01 06:08:23,129 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1150, 1.1916, 1.7132, 1.3026, 2.3716, 1.9238, 2.4219, 0.9262], + device='cuda:0'), covar=tensor([0.1797, 0.2969, 0.1515, 0.1484, 0.1175, 0.1511, 0.1239, 0.2774], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0500, 0.0479, 0.0403, 0.0550, 0.0444, 0.0622, 0.0439], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:08:38,293 INFO [train.py:903] (0/4) Epoch 5, batch 5650, loss[loss=0.24, simple_loss=0.3101, pruned_loss=0.08498, over 19576.00 frames. ], tot_loss[loss=0.281, simple_loss=0.341, pruned_loss=0.1105, over 3819943.04 frames. ], batch size: 52, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:08:54,135 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-01 06:09:04,595 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:09:24,980 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 06:09:38,288 INFO [train.py:903] (0/4) Epoch 5, batch 5700, loss[loss=0.2765, simple_loss=0.3438, pruned_loss=0.1046, over 19153.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3394, pruned_loss=0.1093, over 3838881.64 frames. ], batch size: 69, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:09:42,493 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 06:10:09,259 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.491e+02 7.117e+02 8.783e+02 1.086e+03 2.576e+03, threshold=1.757e+03, percent-clipped=4.0 +2023-04-01 06:10:38,536 INFO [train.py:903] (0/4) Epoch 5, batch 5750, loss[loss=0.3329, simple_loss=0.3761, pruned_loss=0.1449, over 19680.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3388, pruned_loss=0.109, over 3830787.62 frames. ], batch size: 58, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:10:39,678 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 06:10:47,556 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 06:10:52,572 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 06:11:01,624 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1624, 1.1946, 1.7812, 1.4164, 2.4057, 2.1933, 2.6559, 0.9932], + device='cuda:0'), covar=tensor([0.1898, 0.3097, 0.1634, 0.1516, 0.1236, 0.1427, 0.1306, 0.2840], + device='cuda:0'), in_proj_covar=tensor([0.0442, 0.0496, 0.0476, 0.0402, 0.0549, 0.0437, 0.0617, 0.0437], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:11:09,105 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33087.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:11:40,202 INFO [train.py:903] (0/4) Epoch 5, batch 5800, loss[loss=0.2751, simple_loss=0.3298, pruned_loss=0.1102, over 19452.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.34, pruned_loss=0.1096, over 3831952.07 frames. ], batch size: 49, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:08,910 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 7.070e+02 8.520e+02 1.129e+03 2.712e+03, threshold=1.704e+03, percent-clipped=8.0 +2023-04-01 06:12:40,678 INFO [train.py:903] (0/4) Epoch 5, batch 5850, loss[loss=0.2867, simple_loss=0.3495, pruned_loss=0.112, over 19329.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3403, pruned_loss=0.1096, over 3837955.63 frames. ], batch size: 66, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:53,479 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:13:20,550 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8022, 1.9554, 1.7386, 3.1154, 1.9195, 2.6980, 2.5600, 1.6589], + device='cuda:0'), covar=tensor([0.2173, 0.1635, 0.0886, 0.0899, 0.2032, 0.0695, 0.1650, 0.1619], + device='cuda:0'), in_proj_covar=tensor([0.0642, 0.0629, 0.0559, 0.0790, 0.0675, 0.0556, 0.0695, 0.0593], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:13:40,936 INFO [train.py:903] (0/4) Epoch 5, batch 5900, loss[loss=0.2763, simple_loss=0.3352, pruned_loss=0.1087, over 19548.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3404, pruned_loss=0.1098, over 3839407.43 frames. ], batch size: 56, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:13:43,338 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 06:14:04,496 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 06:14:11,990 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.345e+02 7.066e+02 8.762e+02 1.130e+03 2.300e+03, threshold=1.752e+03, percent-clipped=6.0 +2023-04-01 06:14:41,680 INFO [train.py:903] (0/4) Epoch 5, batch 5950, loss[loss=0.2787, simple_loss=0.3498, pruned_loss=0.1037, over 19663.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.34, pruned_loss=0.1091, over 3838346.93 frames. ], batch size: 60, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:08,525 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2381, 1.1993, 1.5474, 0.8232, 2.3930, 2.9488, 2.7377, 3.1184], + device='cuda:0'), covar=tensor([0.1356, 0.2977, 0.2806, 0.2024, 0.0434, 0.0158, 0.0260, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0284, 0.0316, 0.0251, 0.0200, 0.0118, 0.0204, 0.0144], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:15:43,948 INFO [train.py:903] (0/4) Epoch 5, batch 6000, loss[loss=0.2485, simple_loss=0.3155, pruned_loss=0.09073, over 19733.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3403, pruned_loss=0.1091, over 3813988.18 frames. ], batch size: 51, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:43,949 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 06:15:56,881 INFO [train.py:937] (0/4) Epoch 5, validation: loss=0.203, simple_loss=0.3017, pruned_loss=0.05213, over 944034.00 frames. +2023-04-01 06:15:56,882 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 06:16:18,149 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:16:28,853 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.050e+02 6.893e+02 8.503e+02 1.056e+03 1.945e+03, threshold=1.701e+03, percent-clipped=4.0 +2023-04-01 06:16:59,410 INFO [train.py:903] (0/4) Epoch 5, batch 6050, loss[loss=0.2692, simple_loss=0.334, pruned_loss=0.1022, over 18111.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3402, pruned_loss=0.1095, over 3822805.39 frames. ], batch size: 83, lr: 1.56e-02, grad_scale: 16.0 +2023-04-01 06:18:00,558 INFO [train.py:903] (0/4) Epoch 5, batch 6100, loss[loss=0.2741, simple_loss=0.33, pruned_loss=0.1091, over 19742.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3404, pruned_loss=0.11, over 3823670.95 frames. ], batch size: 51, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:18:21,940 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6523, 4.1484, 2.7370, 3.7971, 1.4339, 3.5991, 3.8553, 3.9311], + device='cuda:0'), covar=tensor([0.0626, 0.1303, 0.1941, 0.0694, 0.3769, 0.1131, 0.0814, 0.1373], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0318, 0.0371, 0.0289, 0.0360, 0.0312, 0.0294, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 06:18:23,019 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33431.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:18:28,996 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-04-01 06:18:32,606 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.262e+02 7.464e+02 9.853e+02 2.581e+03, threshold=1.493e+03, percent-clipped=2.0 +2023-04-01 06:18:39,251 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:19:00,969 INFO [train.py:903] (0/4) Epoch 5, batch 6150, loss[loss=0.3074, simple_loss=0.3638, pruned_loss=0.1255, over 19675.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3412, pruned_loss=0.1107, over 3813347.69 frames. ], batch size: 60, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:19:29,653 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 06:19:35,805 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:01,182 INFO [train.py:903] (0/4) Epoch 5, batch 6200, loss[loss=0.3141, simple_loss=0.3622, pruned_loss=0.133, over 19353.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3406, pruned_loss=0.1106, over 3811727.77 frames. ], batch size: 66, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:20:08,862 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:25,364 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3069, 3.0327, 2.0008, 2.8064, 0.9413, 2.7691, 2.7653, 2.9169], + device='cuda:0'), covar=tensor([0.0907, 0.1199, 0.1982, 0.0864, 0.3525, 0.1079, 0.0990, 0.1049], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0308, 0.0364, 0.0285, 0.0350, 0.0303, 0.0287, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:20:34,185 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.213e+02 6.728e+02 8.677e+02 1.165e+03 2.777e+03, threshold=1.735e+03, percent-clipped=13.0 +2023-04-01 06:20:40,236 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1705, 2.1664, 1.6627, 1.5820, 1.5233, 1.7180, 0.2701, 1.0382], + device='cuda:0'), covar=tensor([0.0226, 0.0254, 0.0204, 0.0249, 0.0492, 0.0313, 0.0479, 0.0414], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0279, 0.0282, 0.0298, 0.0372, 0.0293, 0.0272, 0.0291], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:20:43,552 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:21:03,399 INFO [train.py:903] (0/4) Epoch 5, batch 6250, loss[loss=0.3276, simple_loss=0.3748, pruned_loss=0.1402, over 13160.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3413, pruned_loss=0.1107, over 3799152.21 frames. ], batch size: 136, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:21:31,243 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 06:22:03,940 INFO [train.py:903] (0/4) Epoch 5, batch 6300, loss[loss=0.2872, simple_loss=0.3541, pruned_loss=0.1101, over 17669.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3423, pruned_loss=0.1118, over 3795507.24 frames. ], batch size: 101, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:22:28,455 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:22:35,563 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 6.435e+02 8.030e+02 9.840e+02 2.632e+03, threshold=1.606e+03, percent-clipped=3.0 +2023-04-01 06:23:04,054 INFO [train.py:903] (0/4) Epoch 5, batch 6350, loss[loss=0.2287, simple_loss=0.303, pruned_loss=0.07724, over 19848.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3419, pruned_loss=0.1117, over 3782971.73 frames. ], batch size: 52, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:23:45,940 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:23:50,615 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:05,044 INFO [train.py:903] (0/4) Epoch 5, batch 6400, loss[loss=0.2338, simple_loss=0.2949, pruned_loss=0.08638, over 19718.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3411, pruned_loss=0.1104, over 3807214.67 frames. ], batch size: 46, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:24:20,318 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:37,366 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.704e+02 6.874e+02 8.420e+02 1.031e+03 3.616e+03, threshold=1.684e+03, percent-clipped=3.0 +2023-04-01 06:25:05,851 INFO [train.py:903] (0/4) Epoch 5, batch 6450, loss[loss=0.2677, simple_loss=0.3165, pruned_loss=0.1094, over 19414.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3409, pruned_loss=0.1104, over 3813682.68 frames. ], batch size: 48, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:25:18,508 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6566, 1.2772, 1.5654, 1.2107, 2.5275, 3.1528, 3.0194, 3.2942], + device='cuda:0'), covar=tensor([0.1180, 0.2931, 0.2928, 0.1929, 0.0460, 0.0301, 0.0215, 0.0141], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0282, 0.0316, 0.0250, 0.0198, 0.0116, 0.0203, 0.0143], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:25:48,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 06:25:54,084 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:06,364 INFO [train.py:903] (0/4) Epoch 5, batch 6500, loss[loss=0.2924, simple_loss=0.3566, pruned_loss=0.114, over 19608.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3418, pruned_loss=0.1107, over 3827457.61 frames. ], batch size: 61, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:26:12,175 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 06:26:24,758 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:32,491 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33834.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:36,709 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.680e+02 8.171e+02 1.132e+03 2.519e+03, threshold=1.634e+03, percent-clipped=6.0 +2023-04-01 06:27:07,202 INFO [train.py:903] (0/4) Epoch 5, batch 6550, loss[loss=0.2908, simple_loss=0.3543, pruned_loss=0.1137, over 19665.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.341, pruned_loss=0.1109, over 3830240.58 frames. ], batch size: 58, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:27:38,512 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:27:59,331 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8556, 1.3310, 0.9729, 0.9653, 1.2214, 0.9053, 0.7505, 1.2269], + device='cuda:0'), covar=tensor([0.0442, 0.0542, 0.0944, 0.0419, 0.0355, 0.0945, 0.0480, 0.0343], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0269, 0.0312, 0.0237, 0.0222, 0.0303, 0.0285, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 06:28:06,987 INFO [train.py:903] (0/4) Epoch 5, batch 6600, loss[loss=0.3068, simple_loss=0.3652, pruned_loss=0.1242, over 19386.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3409, pruned_loss=0.1104, over 3836026.27 frames. ], batch size: 70, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:28:08,463 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:28:40,229 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.407e+02 7.502e+02 9.137e+02 1.060e+03 2.817e+03, threshold=1.827e+03, percent-clipped=6.0 +2023-04-01 06:28:52,881 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33949.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:01,830 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:09,036 INFO [train.py:903] (0/4) Epoch 5, batch 6650, loss[loss=0.2891, simple_loss=0.3573, pruned_loss=0.1105, over 19673.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.341, pruned_loss=0.1104, over 3814120.65 frames. ], batch size: 55, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:29:54,080 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-34000.pt +2023-04-01 06:30:10,052 INFO [train.py:903] (0/4) Epoch 5, batch 6700, loss[loss=0.2135, simple_loss=0.2814, pruned_loss=0.07279, over 19747.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3395, pruned_loss=0.1092, over 3828593.57 frames. ], batch size: 48, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:30:40,326 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 7.257e+02 9.131e+02 1.100e+03 2.314e+03, threshold=1.826e+03, percent-clipped=7.0 +2023-04-01 06:30:40,470 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:31:06,265 INFO [train.py:903] (0/4) Epoch 5, batch 6750, loss[loss=0.3088, simple_loss=0.3718, pruned_loss=0.1229, over 19141.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3425, pruned_loss=0.1113, over 3826430.96 frames. ], batch size: 69, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:31:06,502 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:31:22,417 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3951, 1.2132, 1.2788, 1.7773, 1.3886, 1.6579, 1.7325, 1.4586], + device='cuda:0'), covar=tensor([0.0891, 0.1102, 0.1176, 0.0859, 0.0986, 0.0780, 0.0859, 0.0752], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0251, 0.0249, 0.0278, 0.0268, 0.0233, 0.0231, 0.0224], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 06:32:02,677 INFO [train.py:903] (0/4) Epoch 5, batch 6800, loss[loss=0.3043, simple_loss=0.3548, pruned_loss=0.1269, over 19764.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3425, pruned_loss=0.111, over 3827199.72 frames. ], batch size: 54, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:32:30,647 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.271e+02 6.317e+02 7.808e+02 9.230e+02 1.582e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-04-01 06:32:32,097 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-5.pt +2023-04-01 06:32:48,125 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 06:32:48,569 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 06:32:50,739 INFO [train.py:903] (0/4) Epoch 6, batch 0, loss[loss=0.2949, simple_loss=0.3453, pruned_loss=0.1222, over 19600.00 frames. ], tot_loss[loss=0.2949, simple_loss=0.3453, pruned_loss=0.1222, over 19600.00 frames. ], batch size: 52, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:32:50,740 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 06:33:02,092 INFO [train.py:937] (0/4) Epoch 6, validation: loss=0.2022, simple_loss=0.3015, pruned_loss=0.05149, over 944034.00 frames. +2023-04-01 06:33:02,093 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 06:33:15,327 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 06:33:20,318 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:33:30,302 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:03,526 INFO [train.py:903] (0/4) Epoch 6, batch 50, loss[loss=0.2153, simple_loss=0.2815, pruned_loss=0.07455, over 19754.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3403, pruned_loss=0.1095, over 870008.59 frames. ], batch size: 47, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:34:22,182 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34205.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:40,711 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 06:34:53,240 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34230.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:35:05,224 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.721e+02 5.756e+02 7.149e+02 1.025e+03 3.166e+03, threshold=1.430e+03, percent-clipped=7.0 +2023-04-01 06:35:06,289 INFO [train.py:903] (0/4) Epoch 6, batch 100, loss[loss=0.3104, simple_loss=0.3727, pruned_loss=0.124, over 19688.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3385, pruned_loss=0.1076, over 1538250.44 frames. ], batch size: 59, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:35:18,596 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 06:35:23,478 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7170, 0.8430, 0.9827, 0.9929, 1.5366, 0.7876, 1.3455, 1.5794], + device='cuda:0'), covar=tensor([0.0451, 0.1692, 0.1575, 0.0975, 0.0512, 0.1331, 0.0840, 0.0401], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0311, 0.0313, 0.0286, 0.0306, 0.0316, 0.0283, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 06:35:26,668 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2916, 3.9396, 2.5049, 3.5125, 1.2537, 3.5252, 3.6275, 3.7331], + device='cuda:0'), covar=tensor([0.0591, 0.0948, 0.1728, 0.0648, 0.3306, 0.0745, 0.0685, 0.0715], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0313, 0.0371, 0.0287, 0.0353, 0.0304, 0.0287, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:36:06,452 INFO [train.py:903] (0/4) Epoch 6, batch 150, loss[loss=0.2661, simple_loss=0.3325, pruned_loss=0.09987, over 19382.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.3398, pruned_loss=0.1087, over 2051831.82 frames. ], batch size: 70, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:36:19,454 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:37:08,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.164e+02 6.478e+02 8.283e+02 9.993e+02 1.951e+03, threshold=1.657e+03, percent-clipped=7.0 +2023-04-01 06:37:08,905 INFO [train.py:903] (0/4) Epoch 6, batch 200, loss[loss=0.2655, simple_loss=0.3302, pruned_loss=0.1004, over 19485.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3374, pruned_loss=0.1065, over 2451775.14 frames. ], batch size: 49, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:37:08,924 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 06:37:44,159 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2050, 1.3006, 1.6500, 1.3905, 2.1163, 2.0101, 2.2916, 0.7456], + device='cuda:0'), covar=tensor([0.1878, 0.3079, 0.1620, 0.1520, 0.1127, 0.1592, 0.1259, 0.2921], + device='cuda:0'), in_proj_covar=tensor([0.0450, 0.0503, 0.0485, 0.0411, 0.0553, 0.0448, 0.0627, 0.0444], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:38:12,102 INFO [train.py:903] (0/4) Epoch 6, batch 250, loss[loss=0.2783, simple_loss=0.3457, pruned_loss=0.1055, over 18358.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3371, pruned_loss=0.1069, over 2753470.04 frames. ], batch size: 83, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:38:33,066 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:38,003 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:38,438 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.90 vs. limit=5.0 +2023-04-01 06:38:44,813 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2451, 3.8757, 2.4699, 3.5495, 1.1028, 3.4491, 3.5226, 3.6990], + device='cuda:0'), covar=tensor([0.0705, 0.1088, 0.1938, 0.0749, 0.3919, 0.1048, 0.0843, 0.0959], + device='cuda:0'), in_proj_covar=tensor([0.0350, 0.0310, 0.0369, 0.0286, 0.0355, 0.0308, 0.0286, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:38:44,979 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:08,874 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:14,113 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 6.949e+02 8.663e+02 1.115e+03 2.860e+03, threshold=1.733e+03, percent-clipped=3.0 +2023-04-01 06:39:14,131 INFO [train.py:903] (0/4) Epoch 6, batch 300, loss[loss=0.2992, simple_loss=0.3601, pruned_loss=0.1191, over 19666.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3372, pruned_loss=0.1071, over 2997294.16 frames. ], batch size: 60, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:39:15,636 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3432, 1.0575, 1.2471, 1.2147, 2.0073, 0.9211, 1.6921, 2.0062], + device='cuda:0'), covar=tensor([0.0585, 0.2509, 0.2351, 0.1448, 0.0764, 0.2038, 0.1048, 0.0640], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0316, 0.0320, 0.0288, 0.0311, 0.0318, 0.0289, 0.0307], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 06:40:17,215 INFO [train.py:903] (0/4) Epoch 6, batch 350, loss[loss=0.224, simple_loss=0.284, pruned_loss=0.08205, over 19322.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3372, pruned_loss=0.107, over 3189702.66 frames. ], batch size: 44, lr: 1.43e-02, grad_scale: 4.0 +2023-04-01 06:40:22,965 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 06:40:37,839 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:40:55,542 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:41:18,579 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+02 6.755e+02 8.258e+02 9.979e+02 1.871e+03, threshold=1.652e+03, percent-clipped=1.0 +2023-04-01 06:41:18,598 INFO [train.py:903] (0/4) Epoch 6, batch 400, loss[loss=0.278, simple_loss=0.3448, pruned_loss=0.1056, over 19763.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3375, pruned_loss=0.1071, over 3325758.11 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:42:20,652 INFO [train.py:903] (0/4) Epoch 6, batch 450, loss[loss=0.2641, simple_loss=0.3276, pruned_loss=0.1003, over 19748.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3397, pruned_loss=0.1093, over 3430988.89 frames. ], batch size: 51, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:42:48,992 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:42:54,487 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 06:42:55,450 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 06:43:01,502 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:23,817 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.432e+02 6.970e+02 8.269e+02 1.071e+03 2.551e+03, threshold=1.654e+03, percent-clipped=6.0 +2023-04-01 06:43:23,840 INFO [train.py:903] (0/4) Epoch 6, batch 500, loss[loss=0.2721, simple_loss=0.3431, pruned_loss=0.1006, over 19774.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3398, pruned_loss=0.109, over 3524004.18 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:43:24,103 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34640.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:29,888 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:03,457 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:27,130 INFO [train.py:903] (0/4) Epoch 6, batch 550, loss[loss=0.2612, simple_loss=0.328, pruned_loss=0.09714, over 19732.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3401, pruned_loss=0.1093, over 3585220.77 frames. ], batch size: 51, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:44:37,128 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:45:17,136 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34728.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:45:31,829 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.191e+02 8.093e+02 9.820e+02 1.880e+03, threshold=1.619e+03, percent-clipped=2.0 +2023-04-01 06:45:31,848 INFO [train.py:903] (0/4) Epoch 6, batch 600, loss[loss=0.2841, simple_loss=0.3398, pruned_loss=0.1143, over 19737.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.34, pruned_loss=0.1086, over 3644102.81 frames. ], batch size: 51, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:13,316 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 06:46:15,091 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 06:46:20,218 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34777.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:46:32,653 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.98 vs. limit=5.0 +2023-04-01 06:46:35,474 INFO [train.py:903] (0/4) Epoch 6, batch 650, loss[loss=0.2711, simple_loss=0.3201, pruned_loss=0.111, over 19813.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.339, pruned_loss=0.1079, over 3690163.13 frames. ], batch size: 48, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:51,790 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:47:38,631 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 6.280e+02 8.587e+02 1.153e+03 3.497e+03, threshold=1.717e+03, percent-clipped=9.0 +2023-04-01 06:47:38,650 INFO [train.py:903] (0/4) Epoch 6, batch 700, loss[loss=0.2729, simple_loss=0.3419, pruned_loss=0.102, over 17385.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3386, pruned_loss=0.1076, over 3723561.96 frames. ], batch size: 101, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:48:27,730 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:48:43,535 INFO [train.py:903] (0/4) Epoch 6, batch 750, loss[loss=0.2742, simple_loss=0.3264, pruned_loss=0.111, over 17781.00 frames. ], tot_loss[loss=0.276, simple_loss=0.3379, pruned_loss=0.1071, over 3746373.60 frames. ], batch size: 39, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:49:00,037 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:49:06,815 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4672, 1.2782, 1.5122, 1.0503, 2.5349, 3.5133, 3.2750, 3.6232], + device='cuda:0'), covar=tensor([0.1321, 0.2849, 0.2719, 0.1900, 0.0417, 0.0165, 0.0191, 0.0123], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0281, 0.0311, 0.0246, 0.0198, 0.0118, 0.0201, 0.0145], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 06:49:45,092 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.784e+02 6.254e+02 7.861e+02 1.094e+03 2.828e+03, threshold=1.572e+03, percent-clipped=5.0 +2023-04-01 06:49:45,111 INFO [train.py:903] (0/4) Epoch 6, batch 800, loss[loss=0.2831, simple_loss=0.3436, pruned_loss=0.1113, over 17467.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3363, pruned_loss=0.1066, over 3762699.73 frames. ], batch size: 101, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:50:02,472 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 06:50:03,702 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:21,726 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 06:50:41,616 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:46,214 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:48,468 INFO [train.py:903] (0/4) Epoch 6, batch 850, loss[loss=0.3071, simple_loss=0.366, pruned_loss=0.1241, over 18867.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3378, pruned_loss=0.1073, over 3769992.58 frames. ], batch size: 74, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:51:32,723 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1318, 1.2032, 1.7023, 1.4273, 2.3693, 2.0647, 2.5437, 0.9043], + device='cuda:0'), covar=tensor([0.1873, 0.2914, 0.1529, 0.1374, 0.1112, 0.1482, 0.1204, 0.2728], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0511, 0.0488, 0.0412, 0.0556, 0.0449, 0.0625, 0.0448], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:51:42,520 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 06:51:49,529 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 6.223e+02 7.935e+02 9.772e+02 2.166e+03, threshold=1.587e+03, percent-clipped=2.0 +2023-04-01 06:51:49,548 INFO [train.py:903] (0/4) Epoch 6, batch 900, loss[loss=0.2866, simple_loss=0.3417, pruned_loss=0.1157, over 19297.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3385, pruned_loss=0.1077, over 3780519.94 frames. ], batch size: 44, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:51:51,218 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8357, 1.5882, 1.5523, 1.6248, 3.3750, 1.0646, 2.1531, 3.5447], + device='cuda:0'), covar=tensor([0.0299, 0.2155, 0.2117, 0.1428, 0.0540, 0.2268, 0.1185, 0.0307], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0314, 0.0313, 0.0284, 0.0310, 0.0315, 0.0290, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 06:52:28,231 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:52:30,301 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35072.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:52:51,734 INFO [train.py:903] (0/4) Epoch 6, batch 950, loss[loss=0.3201, simple_loss=0.3684, pruned_loss=0.1359, over 19400.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3382, pruned_loss=0.1073, over 3792512.54 frames. ], batch size: 70, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:52:57,559 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 06:53:04,298 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:09,177 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:18,080 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:55,217 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 7.194e+02 8.589e+02 1.083e+03 2.096e+03, threshold=1.718e+03, percent-clipped=5.0 +2023-04-01 06:53:55,236 INFO [train.py:903] (0/4) Epoch 6, batch 1000, loss[loss=0.3145, simple_loss=0.3641, pruned_loss=0.1324, over 19676.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3371, pruned_loss=0.1067, over 3800089.39 frames. ], batch size: 55, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:54:48,390 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 06:54:52,210 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35187.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:54:55,179 INFO [train.py:903] (0/4) Epoch 6, batch 1050, loss[loss=0.2696, simple_loss=0.3343, pruned_loss=0.1024, over 17466.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3381, pruned_loss=0.1081, over 3795982.80 frames. ], batch size: 101, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:55:31,003 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 06:55:49,745 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 06:55:57,165 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.102e+02 7.066e+02 8.619e+02 1.238e+03 3.302e+03, threshold=1.724e+03, percent-clipped=8.0 +2023-04-01 06:55:57,184 INFO [train.py:903] (0/4) Epoch 6, batch 1100, loss[loss=0.227, simple_loss=0.2912, pruned_loss=0.08138, over 19736.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3391, pruned_loss=0.1088, over 3804929.15 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:56:09,825 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9621, 2.0232, 2.0482, 1.9972, 4.4279, 1.1071, 2.4709, 4.5797], + device='cuda:0'), covar=tensor([0.0242, 0.2129, 0.2257, 0.1465, 0.0503, 0.2404, 0.1175, 0.0247], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0315, 0.0317, 0.0286, 0.0310, 0.0315, 0.0288, 0.0307], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 06:56:48,011 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-04-01 06:56:59,593 INFO [train.py:903] (0/4) Epoch 6, batch 1150, loss[loss=0.2856, simple_loss=0.3508, pruned_loss=0.1102, over 19756.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3397, pruned_loss=0.1092, over 3792027.51 frames. ], batch size: 54, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:57:45,474 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:04,197 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.124e+02 7.469e+02 9.050e+02 1.884e+03, threshold=1.494e+03, percent-clipped=1.0 +2023-04-01 06:58:04,215 INFO [train.py:903] (0/4) Epoch 6, batch 1200, loss[loss=0.3346, simple_loss=0.3898, pruned_loss=0.1397, over 19749.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3382, pruned_loss=0.1077, over 3812562.66 frames. ], batch size: 63, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:58:17,318 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:23,057 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:27,691 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:32,182 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35363.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:58:36,603 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 06:58:55,103 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:56,253 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1323, 1.0579, 1.7143, 1.2693, 2.7931, 2.1528, 2.8553, 1.1406], + device='cuda:0'), covar=tensor([0.2066, 0.3449, 0.1871, 0.1788, 0.1113, 0.1637, 0.1332, 0.2995], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0508, 0.0494, 0.0413, 0.0562, 0.0451, 0.0626, 0.0452], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 06:58:59,588 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:05,734 INFO [train.py:903] (0/4) Epoch 6, batch 1250, loss[loss=0.3123, simple_loss=0.3612, pruned_loss=0.1317, over 19654.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.339, pruned_loss=0.1079, over 3802460.86 frames. ], batch size: 55, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:59:09,384 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:00:08,109 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+02 6.685e+02 8.523e+02 1.077e+03 2.432e+03, threshold=1.705e+03, percent-clipped=5.0 +2023-04-01 07:00:08,128 INFO [train.py:903] (0/4) Epoch 6, batch 1300, loss[loss=0.2523, simple_loss=0.3078, pruned_loss=0.0984, over 19356.00 frames. ], tot_loss[loss=0.276, simple_loss=0.3376, pruned_loss=0.1071, over 3821045.00 frames. ], batch size: 44, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 07:00:12,135 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35443.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:00:26,225 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35454.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:00:43,964 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35468.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:01:11,031 INFO [train.py:903] (0/4) Epoch 6, batch 1350, loss[loss=0.2646, simple_loss=0.3208, pruned_loss=0.1042, over 19749.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3375, pruned_loss=0.107, over 3814628.61 frames. ], batch size: 45, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:01:40,785 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6233, 1.2039, 1.8972, 1.9182, 2.7367, 4.6752, 4.4846, 4.8875], + device='cuda:0'), covar=tensor([0.1324, 0.3113, 0.2746, 0.1525, 0.0514, 0.0103, 0.0165, 0.0075], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0285, 0.0312, 0.0249, 0.0201, 0.0122, 0.0202, 0.0146], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:02:13,019 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+02 6.783e+02 8.495e+02 1.071e+03 2.340e+03, threshold=1.699e+03, percent-clipped=3.0 +2023-04-01 07:02:13,038 INFO [train.py:903] (0/4) Epoch 6, batch 1400, loss[loss=0.2458, simple_loss=0.3095, pruned_loss=0.09105, over 19837.00 frames. ], tot_loss[loss=0.2745, simple_loss=0.3369, pruned_loss=0.106, over 3829642.53 frames. ], batch size: 52, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:02:22,777 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:02:47,214 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:03:13,362 INFO [train.py:903] (0/4) Epoch 6, batch 1450, loss[loss=0.3225, simple_loss=0.3847, pruned_loss=0.1301, over 19273.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3368, pruned_loss=0.1063, over 3833781.17 frames. ], batch size: 66, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:03:13,400 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 07:03:52,454 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4591, 1.1485, 1.2935, 1.2538, 2.1236, 0.9181, 1.7394, 2.0783], + device='cuda:0'), covar=tensor([0.0592, 0.2396, 0.2373, 0.1358, 0.0831, 0.1934, 0.0952, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0313, 0.0316, 0.0286, 0.0313, 0.0312, 0.0286, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:04:15,923 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+02 6.609e+02 8.520e+02 1.101e+03 2.891e+03, threshold=1.704e+03, percent-clipped=3.0 +2023-04-01 07:04:15,942 INFO [train.py:903] (0/4) Epoch 6, batch 1500, loss[loss=0.2596, simple_loss=0.3294, pruned_loss=0.09491, over 19667.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.338, pruned_loss=0.1071, over 3830828.13 frames. ], batch size: 55, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:05:17,132 INFO [train.py:903] (0/4) Epoch 6, batch 1550, loss[loss=0.2228, simple_loss=0.2852, pruned_loss=0.08023, over 19393.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.3378, pruned_loss=0.1074, over 3820694.60 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:05:39,881 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35707.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:06:16,826 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:06:22,436 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 6.450e+02 9.026e+02 1.093e+03 2.835e+03, threshold=1.805e+03, percent-clipped=5.0 +2023-04-01 07:06:22,455 INFO [train.py:903] (0/4) Epoch 6, batch 1600, loss[loss=0.2656, simple_loss=0.335, pruned_loss=0.09806, over 18698.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3376, pruned_loss=0.1074, over 3802520.94 frames. ], batch size: 74, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:06:24,317 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.87 vs. limit=5.0 +2023-04-01 07:06:44,234 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 07:06:53,836 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7941, 1.7255, 1.8735, 2.0315, 4.2691, 1.1052, 2.1699, 4.1515], + device='cuda:0'), covar=tensor([0.0265, 0.2372, 0.2425, 0.1408, 0.0496, 0.2320, 0.1280, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0310, 0.0313, 0.0286, 0.0308, 0.0309, 0.0283, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:07:23,507 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:07:24,377 INFO [train.py:903] (0/4) Epoch 6, batch 1650, loss[loss=0.2001, simple_loss=0.2788, pruned_loss=0.06064, over 19754.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3369, pruned_loss=0.1071, over 3804936.83 frames. ], batch size: 45, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:07:35,338 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5286, 1.5905, 1.6385, 2.0808, 1.3475, 1.7335, 1.9865, 1.5695], + device='cuda:0'), covar=tensor([0.2354, 0.1795, 0.1031, 0.0987, 0.1994, 0.0877, 0.2167, 0.1819], + device='cuda:0'), in_proj_covar=tensor([0.0659, 0.0649, 0.0575, 0.0807, 0.0686, 0.0568, 0.0701, 0.0612], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 07:07:50,596 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 07:07:51,783 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 07:08:05,697 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35822.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:08:09,351 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:13,994 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35829.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:27,430 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.226e+02 6.593e+02 7.720e+02 9.793e+02 2.227e+03, threshold=1.544e+03, percent-clipped=1.0 +2023-04-01 07:08:27,449 INFO [train.py:903] (0/4) Epoch 6, batch 1700, loss[loss=0.2993, simple_loss=0.3526, pruned_loss=0.123, over 13590.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3374, pruned_loss=0.107, over 3780200.08 frames. ], batch size: 136, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:08:38,889 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:40,027 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:09:06,051 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 07:09:25,824 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 07:09:29,406 INFO [train.py:903] (0/4) Epoch 6, batch 1750, loss[loss=0.2388, simple_loss=0.3045, pruned_loss=0.0866, over 19802.00 frames. ], tot_loss[loss=0.276, simple_loss=0.3379, pruned_loss=0.107, over 3802380.32 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:09:31,972 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35892.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:10:33,848 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+02 6.695e+02 8.372e+02 1.116e+03 2.634e+03, threshold=1.674e+03, percent-clipped=7.0 +2023-04-01 07:10:33,867 INFO [train.py:903] (0/4) Epoch 6, batch 1800, loss[loss=0.2817, simple_loss=0.3358, pruned_loss=0.1138, over 19730.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3381, pruned_loss=0.1073, over 3795439.27 frames. ], batch size: 51, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:11:31,918 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 07:11:36,738 INFO [train.py:903] (0/4) Epoch 6, batch 1850, loss[loss=0.2675, simple_loss=0.3322, pruned_loss=0.1014, over 19521.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3381, pruned_loss=0.1074, over 3787836.74 frames. ], batch size: 56, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:11:49,650 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-36000.pt +2023-04-01 07:11:59,150 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36007.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:12:11,431 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 07:12:40,879 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.024e+02 7.330e+02 9.053e+02 1.086e+03 1.723e+03, threshold=1.811e+03, percent-clipped=2.0 +2023-04-01 07:12:40,903 INFO [train.py:903] (0/4) Epoch 6, batch 1900, loss[loss=0.2588, simple_loss=0.336, pruned_loss=0.09076, over 19785.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.3381, pruned_loss=0.1071, over 3797805.02 frames. ], batch size: 56, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:12:57,334 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 07:13:04,088 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 07:13:27,625 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 07:13:29,151 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36078.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:13:42,466 INFO [train.py:903] (0/4) Epoch 6, batch 1950, loss[loss=0.2786, simple_loss=0.3483, pruned_loss=0.1044, over 19533.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3393, pruned_loss=0.1073, over 3800999.77 frames. ], batch size: 56, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:13:49,847 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6417, 1.3629, 2.1245, 1.6022, 3.1758, 4.8250, 4.6551, 5.0497], + device='cuda:0'), covar=tensor([0.1322, 0.2777, 0.2463, 0.1654, 0.0367, 0.0124, 0.0135, 0.0060], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0283, 0.0317, 0.0248, 0.0204, 0.0124, 0.0203, 0.0148], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:14:00,163 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36103.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:14:04,984 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:21,453 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-04-01 07:14:28,121 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1521, 1.2101, 1.5131, 1.2795, 2.1903, 1.8451, 2.3296, 0.7321], + device='cuda:0'), covar=tensor([0.1826, 0.2954, 0.1594, 0.1505, 0.1066, 0.1658, 0.1116, 0.2810], + device='cuda:0'), in_proj_covar=tensor([0.0459, 0.0519, 0.0499, 0.0420, 0.0567, 0.0456, 0.0636, 0.0455], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 07:14:35,237 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:36,138 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36133.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:36,296 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7678, 4.3003, 4.5781, 4.5166, 1.5458, 4.2073, 3.6066, 4.1082], + device='cuda:0'), covar=tensor([0.1120, 0.0557, 0.0464, 0.0455, 0.4258, 0.0369, 0.0558, 0.0981], + device='cuda:0'), in_proj_covar=tensor([0.0544, 0.0478, 0.0630, 0.0524, 0.0604, 0.0389, 0.0403, 0.0594], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 07:14:45,007 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.894e+02 6.352e+02 7.868e+02 9.806e+02 1.510e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-04-01 07:14:45,036 INFO [train.py:903] (0/4) Epoch 6, batch 2000, loss[loss=0.2709, simple_loss=0.3405, pruned_loss=0.1006, over 19674.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3389, pruned_loss=0.107, over 3800910.30 frames. ], batch size: 58, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:24,944 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:15:42,582 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 07:15:46,082 INFO [train.py:903] (0/4) Epoch 6, batch 2050, loss[loss=0.263, simple_loss=0.3349, pruned_loss=0.09549, over 19570.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3383, pruned_loss=0.1067, over 3817871.99 frames. ], batch size: 61, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:57,096 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36199.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:16:01,825 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 07:16:03,077 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 07:16:22,967 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 07:16:23,197 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2411, 2.9654, 2.0227, 2.7492, 1.0044, 2.7620, 2.6454, 2.7744], + device='cuda:0'), covar=tensor([0.1033, 0.1470, 0.2103, 0.0875, 0.3543, 0.1044, 0.0942, 0.1063], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0321, 0.0377, 0.0296, 0.0360, 0.0317, 0.0297, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 07:16:37,655 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8725, 1.2817, 0.9748, 0.9572, 1.1847, 0.8756, 0.6082, 1.2321], + device='cuda:0'), covar=tensor([0.0485, 0.0535, 0.0902, 0.0428, 0.0380, 0.0981, 0.0649, 0.0403], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0276, 0.0310, 0.0240, 0.0228, 0.0309, 0.0287, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:16:47,774 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.109e+02 6.690e+02 8.798e+02 1.173e+03 2.442e+03, threshold=1.760e+03, percent-clipped=12.0 +2023-04-01 07:16:47,792 INFO [train.py:903] (0/4) Epoch 6, batch 2100, loss[loss=0.3079, simple_loss=0.3623, pruned_loss=0.1268, over 19316.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.339, pruned_loss=0.107, over 3818599.85 frames. ], batch size: 70, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:16:57,882 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:17,819 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36263.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:18,609 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 07:17:39,999 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 07:17:49,959 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:50,005 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:51,848 INFO [train.py:903] (0/4) Epoch 6, batch 2150, loss[loss=0.3113, simple_loss=0.3486, pruned_loss=0.137, over 19764.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3389, pruned_loss=0.107, over 3807505.26 frames. ], batch size: 45, lr: 1.40e-02, grad_scale: 16.0 +2023-04-01 07:18:39,882 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6531, 1.2110, 1.2657, 1.8989, 1.6086, 1.9132, 2.0942, 1.6879], + device='cuda:0'), covar=tensor([0.0883, 0.1197, 0.1269, 0.1070, 0.1099, 0.0820, 0.0968, 0.0793], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0247, 0.0241, 0.0275, 0.0269, 0.0230, 0.0226, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 07:18:53,906 INFO [train.py:903] (0/4) Epoch 6, batch 2200, loss[loss=0.2385, simple_loss=0.3012, pruned_loss=0.08784, over 19802.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3391, pruned_loss=0.1074, over 3800101.49 frames. ], batch size: 48, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:18:55,064 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 6.300e+02 8.031e+02 1.073e+03 2.013e+03, threshold=1.606e+03, percent-clipped=1.0 +2023-04-01 07:19:57,016 INFO [train.py:903] (0/4) Epoch 6, batch 2250, loss[loss=0.2491, simple_loss=0.3084, pruned_loss=0.09493, over 19740.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3375, pruned_loss=0.1059, over 3818150.56 frames. ], batch size: 51, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:40,675 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 07:20:58,383 INFO [train.py:903] (0/4) Epoch 6, batch 2300, loss[loss=0.2359, simple_loss=0.3079, pruned_loss=0.08198, over 19679.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3367, pruned_loss=0.1056, over 3814499.37 frames. ], batch size: 60, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:59,563 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.689e+02 7.482e+02 9.822e+02 1.768e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-04-01 07:21:12,179 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8568, 4.3072, 2.5176, 3.8222, 0.9972, 3.8455, 4.0424, 4.1764], + device='cuda:0'), covar=tensor([0.0532, 0.1097, 0.2077, 0.0680, 0.4112, 0.0933, 0.0668, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0362, 0.0321, 0.0375, 0.0295, 0.0358, 0.0319, 0.0294, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 07:21:12,407 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3889, 2.2394, 1.7972, 1.7406, 1.6275, 1.9006, 0.2608, 1.1921], + device='cuda:0'), covar=tensor([0.0261, 0.0267, 0.0237, 0.0349, 0.0528, 0.0373, 0.0616, 0.0509], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0300, 0.0297, 0.0316, 0.0384, 0.0307, 0.0288, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:21:14,333 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 07:22:00,468 INFO [train.py:903] (0/4) Epoch 6, batch 2350, loss[loss=0.2261, simple_loss=0.2968, pruned_loss=0.07767, over 19132.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3352, pruned_loss=0.1042, over 3830803.61 frames. ], batch size: 42, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:22:19,324 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:22:43,460 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 07:22:49,414 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:00,577 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 07:23:02,880 INFO [train.py:903] (0/4) Epoch 6, batch 2400, loss[loss=0.2696, simple_loss=0.344, pruned_loss=0.09758, over 19645.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3347, pruned_loss=0.1035, over 3824590.02 frames. ], batch size: 58, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:23:04,017 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.742e+02 5.696e+02 7.249e+02 9.157e+02 1.479e+03, threshold=1.450e+03, percent-clipped=0.0 +2023-04-01 07:23:07,411 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:08,776 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36544.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:39,050 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:06,969 INFO [train.py:903] (0/4) Epoch 6, batch 2450, loss[loss=0.301, simple_loss=0.3593, pruned_loss=0.1213, over 17419.00 frames. ], tot_loss[loss=0.272, simple_loss=0.336, pruned_loss=0.104, over 3813282.99 frames. ], batch size: 101, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:24:27,654 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 07:24:46,725 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:51,039 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:56,976 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:25:07,680 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-04-01 07:25:07,911 INFO [train.py:903] (0/4) Epoch 6, batch 2500, loss[loss=0.3053, simple_loss=0.3671, pruned_loss=0.1218, over 19611.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3362, pruned_loss=0.1047, over 3816080.04 frames. ], batch size: 57, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:25:09,078 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.265e+02 8.006e+02 9.274e+02 1.564e+03, threshold=1.601e+03, percent-clipped=1.0 +2023-04-01 07:25:30,299 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:25:39,443 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1352, 0.9929, 0.9895, 1.2265, 1.1470, 1.2251, 1.2990, 1.2347], + device='cuda:0'), covar=tensor([0.0956, 0.1116, 0.1225, 0.0748, 0.0840, 0.0899, 0.0870, 0.0771], + device='cuda:0'), in_proj_covar=tensor([0.0227, 0.0244, 0.0235, 0.0268, 0.0259, 0.0226, 0.0227, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 07:26:09,592 INFO [train.py:903] (0/4) Epoch 6, batch 2550, loss[loss=0.3042, simple_loss=0.359, pruned_loss=0.1248, over 19687.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3358, pruned_loss=0.1044, over 3822704.08 frames. ], batch size: 58, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:26:44,788 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36718.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:26:46,975 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0939, 1.2428, 1.7440, 1.4583, 2.7608, 4.4060, 4.4446, 4.9526], + device='cuda:0'), covar=tensor([0.1548, 0.3051, 0.2894, 0.1867, 0.0480, 0.0143, 0.0132, 0.0062], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0282, 0.0312, 0.0247, 0.0200, 0.0122, 0.0200, 0.0148], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:26:53,889 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9934, 5.3749, 2.8385, 4.7716, 0.9231, 5.0821, 5.2173, 5.3478], + device='cuda:0'), covar=tensor([0.0419, 0.0861, 0.1974, 0.0494, 0.4471, 0.0721, 0.0530, 0.0659], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0314, 0.0372, 0.0291, 0.0353, 0.0313, 0.0294, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 07:27:05,101 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 07:27:10,912 INFO [train.py:903] (0/4) Epoch 6, batch 2600, loss[loss=0.2353, simple_loss=0.2996, pruned_loss=0.08548, over 19393.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3367, pruned_loss=0.1049, over 3814954.94 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:27:12,677 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.492e+02 8.253e+02 1.085e+03 2.742e+03, threshold=1.651e+03, percent-clipped=10.0 +2023-04-01 07:28:14,515 INFO [train.py:903] (0/4) Epoch 6, batch 2650, loss[loss=0.2894, simple_loss=0.3578, pruned_loss=0.1105, over 19787.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3363, pruned_loss=0.1048, over 3808836.02 frames. ], batch size: 56, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:28:37,569 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 07:28:54,596 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36823.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:29:05,797 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8114, 1.3479, 0.9570, 0.9248, 1.2166, 0.8750, 0.8523, 1.1787], + device='cuda:0'), covar=tensor([0.0488, 0.0490, 0.0881, 0.0456, 0.0400, 0.0980, 0.0497, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0278, 0.0312, 0.0240, 0.0226, 0.0312, 0.0282, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:29:16,460 INFO [train.py:903] (0/4) Epoch 6, batch 2700, loss[loss=0.3397, simple_loss=0.3861, pruned_loss=0.1467, over 19145.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3367, pruned_loss=0.1049, over 3807229.39 frames. ], batch size: 69, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:29:17,596 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.342e+02 7.427e+02 9.812e+02 2.890e+03, threshold=1.485e+03, percent-clipped=2.0 +2023-04-01 07:30:18,807 INFO [train.py:903] (0/4) Epoch 6, batch 2750, loss[loss=0.2964, simple_loss=0.3553, pruned_loss=0.1187, over 19774.00 frames. ], tot_loss[loss=0.2741, simple_loss=0.3373, pruned_loss=0.1054, over 3809799.63 frames. ], batch size: 56, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:30:51,769 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36914.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:04,563 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9885, 1.0089, 1.4046, 0.4349, 2.2833, 2.3972, 2.1139, 2.5395], + device='cuda:0'), covar=tensor([0.1405, 0.3241, 0.2992, 0.2208, 0.0407, 0.0202, 0.0372, 0.0225], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0284, 0.0312, 0.0247, 0.0202, 0.0123, 0.0202, 0.0152], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:31:09,568 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:22,223 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:23,026 INFO [train.py:903] (0/4) Epoch 6, batch 2800, loss[loss=0.212, simple_loss=0.2825, pruned_loss=0.07072, over 15677.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3361, pruned_loss=0.1045, over 3808613.15 frames. ], batch size: 34, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:31:24,219 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+02 7.168e+02 8.701e+02 1.243e+03 3.330e+03, threshold=1.740e+03, percent-clipped=17.0 +2023-04-01 07:31:56,914 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:00,390 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:06,257 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:08,504 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:26,414 INFO [train.py:903] (0/4) Epoch 6, batch 2850, loss[loss=0.2361, simple_loss=0.3139, pruned_loss=0.07917, over 19653.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3361, pruned_loss=0.1042, over 3808058.09 frames. ], batch size: 55, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:32:34,757 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2345, 2.1485, 2.1274, 3.4530, 2.0058, 3.2954, 2.9926, 2.0480], + device='cuda:0'), covar=tensor([0.2522, 0.1991, 0.0920, 0.1098, 0.2574, 0.0738, 0.1745, 0.1750], + device='cuda:0'), in_proj_covar=tensor([0.0661, 0.0653, 0.0573, 0.0804, 0.0685, 0.0567, 0.0701, 0.0604], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 07:32:48,220 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1751, 3.7086, 3.8085, 3.7852, 1.2379, 3.5354, 3.1335, 3.4546], + device='cuda:0'), covar=tensor([0.1051, 0.0572, 0.0550, 0.0529, 0.4183, 0.0461, 0.0595, 0.1063], + device='cuda:0'), in_proj_covar=tensor([0.0546, 0.0471, 0.0641, 0.0527, 0.0601, 0.0395, 0.0410, 0.0601], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 07:32:52,096 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 07:33:05,605 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:33:05,639 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3086, 1.3390, 1.3500, 1.6020, 2.8726, 0.9789, 1.8894, 2.9745], + device='cuda:0'), covar=tensor([0.0367, 0.2274, 0.2343, 0.1331, 0.0552, 0.2197, 0.1250, 0.0393], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0315, 0.0315, 0.0290, 0.0307, 0.0314, 0.0289, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:33:28,652 INFO [train.py:903] (0/4) Epoch 6, batch 2900, loss[loss=0.2901, simple_loss=0.3524, pruned_loss=0.1139, over 19349.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3371, pruned_loss=0.1047, over 3811091.80 frames. ], batch size: 66, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:33:28,669 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 07:33:28,987 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6781, 4.1221, 4.3932, 4.3212, 1.5587, 3.9718, 3.5548, 3.9777], + device='cuda:0'), covar=tensor([0.1027, 0.0659, 0.0558, 0.0480, 0.4297, 0.0446, 0.0578, 0.1073], + device='cuda:0'), in_proj_covar=tensor([0.0546, 0.0470, 0.0639, 0.0526, 0.0598, 0.0394, 0.0407, 0.0598], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 07:33:29,873 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.105e+02 7.947e+02 1.025e+03 2.308e+03, threshold=1.589e+03, percent-clipped=2.0 +2023-04-01 07:33:55,100 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37062.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:33:58,580 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8741, 1.4930, 1.4229, 1.8322, 1.6849, 1.7091, 1.5447, 1.7760], + device='cuda:0'), covar=tensor([0.0751, 0.1270, 0.1205, 0.0817, 0.1006, 0.0419, 0.0929, 0.0562], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0355, 0.0279, 0.0235, 0.0299, 0.0238, 0.0263, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:34:20,465 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:24,021 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37085.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:28,758 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37089.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:29,562 INFO [train.py:903] (0/4) Epoch 6, batch 2950, loss[loss=0.2339, simple_loss=0.3096, pruned_loss=0.07912, over 19762.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3376, pruned_loss=0.1052, over 3796960.27 frames. ], batch size: 51, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:34:58,745 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2737, 3.6637, 3.7832, 3.7945, 1.4793, 3.5239, 3.1811, 3.4470], + device='cuda:0'), covar=tensor([0.1085, 0.0764, 0.0637, 0.0513, 0.3981, 0.0504, 0.0574, 0.1193], + device='cuda:0'), in_proj_covar=tensor([0.0558, 0.0482, 0.0651, 0.0532, 0.0607, 0.0398, 0.0414, 0.0610], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 07:35:31,165 INFO [train.py:903] (0/4) Epoch 6, batch 3000, loss[loss=0.2682, simple_loss=0.336, pruned_loss=0.1002, over 19772.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3378, pruned_loss=0.1055, over 3811803.86 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:35:31,166 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 07:35:43,641 INFO [train.py:937] (0/4) Epoch 6, validation: loss=0.1968, simple_loss=0.2962, pruned_loss=0.04867, over 944034.00 frames. +2023-04-01 07:35:43,644 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 07:35:44,844 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.497e+02 6.001e+02 7.289e+02 9.626e+02 1.809e+03, threshold=1.458e+03, percent-clipped=5.0 +2023-04-01 07:35:48,627 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 07:36:18,742 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37167.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:36:30,242 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37177.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:36:33,604 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8723, 1.7837, 1.8134, 2.0787, 4.2306, 1.2529, 2.5143, 4.2966], + device='cuda:0'), covar=tensor([0.0308, 0.2445, 0.2345, 0.1391, 0.0530, 0.2206, 0.1132, 0.0303], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0318, 0.0318, 0.0290, 0.0312, 0.0315, 0.0290, 0.0309], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:36:45,878 INFO [train.py:903] (0/4) Epoch 6, batch 3050, loss[loss=0.2593, simple_loss=0.3128, pruned_loss=0.1029, over 19784.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3376, pruned_loss=0.1061, over 3800805.14 frames. ], batch size: 47, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:34,453 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7676, 1.8115, 1.5042, 1.3523, 1.2771, 1.4530, 0.0883, 0.6458], + device='cuda:0'), covar=tensor([0.0221, 0.0238, 0.0165, 0.0220, 0.0527, 0.0230, 0.0504, 0.0478], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0298, 0.0295, 0.0312, 0.0384, 0.0308, 0.0283, 0.0303], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:37:34,884 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 07:37:48,506 INFO [train.py:903] (0/4) Epoch 6, batch 3100, loss[loss=0.2859, simple_loss=0.3519, pruned_loss=0.11, over 18784.00 frames. ], tot_loss[loss=0.2744, simple_loss=0.3375, pruned_loss=0.1056, over 3810487.45 frames. ], batch size: 74, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:49,789 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 6.699e+02 8.375e+02 1.038e+03 2.239e+03, threshold=1.675e+03, percent-clipped=7.0 +2023-04-01 07:38:28,933 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37273.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:41,342 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37282.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:38:46,565 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37286.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:50,728 INFO [train.py:903] (0/4) Epoch 6, batch 3150, loss[loss=0.2407, simple_loss=0.3038, pruned_loss=0.08884, over 19486.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3377, pruned_loss=0.105, over 3815780.66 frames. ], batch size: 49, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:38:52,157 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7189, 4.0965, 4.3609, 4.3189, 1.5653, 3.9210, 3.5817, 3.9849], + device='cuda:0'), covar=tensor([0.1105, 0.0713, 0.0491, 0.0488, 0.4257, 0.0465, 0.0542, 0.0976], + device='cuda:0'), in_proj_covar=tensor([0.0549, 0.0472, 0.0642, 0.0531, 0.0604, 0.0392, 0.0407, 0.0598], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 07:38:54,763 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 07:39:13,511 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 07:39:27,509 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37320.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:29,898 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7845, 1.4239, 1.6355, 2.2437, 1.9494, 1.7165, 2.1008, 1.7152], + device='cuda:0'), covar=tensor([0.0927, 0.1442, 0.1179, 0.0920, 0.1041, 0.1179, 0.1041, 0.1008], + device='cuda:0'), in_proj_covar=tensor([0.0228, 0.0243, 0.0237, 0.0273, 0.0262, 0.0229, 0.0226, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 07:39:49,074 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:51,054 INFO [train.py:903] (0/4) Epoch 6, batch 3200, loss[loss=0.2729, simple_loss=0.3382, pruned_loss=0.1038, over 17331.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3362, pruned_loss=0.1045, over 3807847.46 frames. ], batch size: 101, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:39:52,147 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.326e+02 6.253e+02 8.171e+02 9.916e+02 1.975e+03, threshold=1.634e+03, percent-clipped=4.0 +2023-04-01 07:39:52,616 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:58,078 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37345.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:00,253 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6965, 1.4816, 1.2789, 1.5172, 3.1305, 0.9462, 2.0880, 3.3614], + device='cuda:0'), covar=tensor([0.0381, 0.2410, 0.2626, 0.1526, 0.0597, 0.2431, 0.1248, 0.0349], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0318, 0.0317, 0.0294, 0.0313, 0.0316, 0.0291, 0.0310], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:40:20,405 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,434 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:29,132 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37370.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:42,587 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:50,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:52,610 INFO [train.py:903] (0/4) Epoch 6, batch 3250, loss[loss=0.2818, simple_loss=0.3517, pruned_loss=0.1059, over 19534.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3359, pruned_loss=0.1044, over 3827124.21 frames. ], batch size: 56, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:30,770 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9504, 1.3554, 1.0202, 0.9216, 1.2019, 0.8352, 0.8273, 1.2695], + device='cuda:0'), covar=tensor([0.0403, 0.0478, 0.0891, 0.0433, 0.0361, 0.0933, 0.0460, 0.0289], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0276, 0.0309, 0.0241, 0.0221, 0.0310, 0.0280, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:41:45,866 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37433.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:41:48,020 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:41:53,540 INFO [train.py:903] (0/4) Epoch 6, batch 3300, loss[loss=0.2632, simple_loss=0.3308, pruned_loss=0.09778, over 19608.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3352, pruned_loss=0.104, over 3843328.06 frames. ], batch size: 52, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:57,421 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 6.108e+02 8.102e+02 1.002e+03 3.053e+03, threshold=1.620e+03, percent-clipped=3.0 +2023-04-01 07:42:01,121 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 07:42:17,161 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37458.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:42:44,790 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:42:56,307 INFO [train.py:903] (0/4) Epoch 6, batch 3350, loss[loss=0.2963, simple_loss=0.3533, pruned_loss=0.1197, over 18263.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3351, pruned_loss=0.1046, over 3841669.93 frames. ], batch size: 83, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:43:56,162 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37538.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:43:57,998 INFO [train.py:903] (0/4) Epoch 6, batch 3400, loss[loss=0.2812, simple_loss=0.3492, pruned_loss=0.1066, over 19493.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3346, pruned_loss=0.1041, over 3856386.85 frames. ], batch size: 64, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:44:00,246 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.377e+02 8.364e+02 1.096e+03 2.128e+03, threshold=1.673e+03, percent-clipped=5.0 +2023-04-01 07:44:26,239 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37563.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:44:59,262 INFO [train.py:903] (0/4) Epoch 6, batch 3450, loss[loss=0.2476, simple_loss=0.3152, pruned_loss=0.09002, over 19611.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3356, pruned_loss=0.1048, over 3827651.45 frames. ], batch size: 50, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:45:07,201 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 07:45:12,647 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 07:45:39,230 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8611, 3.4834, 2.2152, 3.2294, 0.8188, 3.2051, 3.2031, 3.3197], + device='cuda:0'), covar=tensor([0.0824, 0.1142, 0.2227, 0.0787, 0.4022, 0.0971, 0.0852, 0.1051], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0324, 0.0384, 0.0294, 0.0360, 0.0319, 0.0297, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 07:45:49,554 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:02,045 INFO [train.py:903] (0/4) Epoch 6, batch 3500, loss[loss=0.242, simple_loss=0.3032, pruned_loss=0.09035, over 19759.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3351, pruned_loss=0.1043, over 3831427.40 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:46:04,580 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.340e+02 8.060e+02 1.060e+03 3.220e+03, threshold=1.612e+03, percent-clipped=3.0 +2023-04-01 07:46:07,389 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:23,301 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5914, 1.8802, 1.6922, 2.2032, 1.8951, 2.5498, 2.7438, 2.3249], + device='cuda:0'), covar=tensor([0.0667, 0.0949, 0.1088, 0.1150, 0.1022, 0.0734, 0.0861, 0.0677], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0244, 0.0237, 0.0274, 0.0262, 0.0230, 0.0226, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:0') +2023-04-01 07:46:38,285 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:54,379 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9784, 1.0726, 1.2748, 1.4772, 2.5517, 0.9827, 1.8945, 2.6750], + device='cuda:0'), covar=tensor([0.0490, 0.2764, 0.2522, 0.1437, 0.0673, 0.2314, 0.1083, 0.0473], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0316, 0.0311, 0.0287, 0.0311, 0.0312, 0.0288, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:47:04,856 INFO [train.py:903] (0/4) Epoch 6, batch 3550, loss[loss=0.3101, simple_loss=0.3836, pruned_loss=0.1184, over 18738.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3351, pruned_loss=0.1042, over 3842628.93 frames. ], batch size: 74, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:47:07,128 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37691.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:12,717 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7490, 0.7652, 0.8909, 0.9637, 1.5533, 0.7546, 1.4222, 1.5215], + device='cuda:0'), covar=tensor([0.0459, 0.1812, 0.1704, 0.1002, 0.0562, 0.1379, 0.1094, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0318, 0.0315, 0.0288, 0.0312, 0.0313, 0.0288, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 07:47:34,876 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37716.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:48,282 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:03,026 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:05,908 INFO [train.py:903] (0/4) Epoch 6, batch 3600, loss[loss=0.3157, simple_loss=0.3694, pruned_loss=0.131, over 19770.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3363, pruned_loss=0.1057, over 3839293.78 frames. ], batch size: 63, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:48:06,563 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 07:48:08,231 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.040e+02 8.572e+02 1.227e+03 4.209e+03, threshold=1.714e+03, percent-clipped=12.0 +2023-04-01 07:48:12,136 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:33,220 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:49:08,254 INFO [train.py:903] (0/4) Epoch 6, batch 3650, loss[loss=0.274, simple_loss=0.3451, pruned_loss=0.1015, over 19519.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3363, pruned_loss=0.1056, over 3828778.13 frames. ], batch size: 56, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:08,368 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4020, 2.9863, 1.9386, 2.4126, 2.3429, 2.6269, 0.7658, 2.2270], + device='cuda:0'), covar=tensor([0.0326, 0.0270, 0.0328, 0.0399, 0.0513, 0.0408, 0.0688, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0297, 0.0295, 0.0312, 0.0384, 0.0305, 0.0282, 0.0298], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 07:50:10,246 INFO [train.py:903] (0/4) Epoch 6, batch 3700, loss[loss=0.2759, simple_loss=0.3401, pruned_loss=0.1058, over 18738.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3379, pruned_loss=0.1067, over 3809110.81 frames. ], batch size: 74, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:11,756 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:50:12,527 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 5.969e+02 7.272e+02 1.002e+03 1.787e+03, threshold=1.454e+03, percent-clipped=1.0 +2023-04-01 07:50:27,705 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 07:51:00,200 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6557, 4.0853, 4.3095, 4.2933, 1.6164, 3.9488, 3.5321, 3.9581], + device='cuda:0'), covar=tensor([0.0990, 0.0649, 0.0453, 0.0463, 0.3985, 0.0413, 0.0512, 0.0913], + device='cuda:0'), in_proj_covar=tensor([0.0551, 0.0473, 0.0651, 0.0534, 0.0606, 0.0398, 0.0411, 0.0603], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 07:51:13,478 INFO [train.py:903] (0/4) Epoch 6, batch 3750, loss[loss=0.3025, simple_loss=0.3551, pruned_loss=0.1249, over 12912.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3382, pruned_loss=0.1066, over 3812285.83 frames. ], batch size: 135, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:16,185 INFO [train.py:903] (0/4) Epoch 6, batch 3800, loss[loss=0.3125, simple_loss=0.3657, pruned_loss=0.1296, over 17195.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3377, pruned_loss=0.1058, over 3812551.00 frames. ], batch size: 101, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:18,422 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.762e+02 7.572e+02 9.178e+02 2.007e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-04-01 07:52:47,795 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 07:53:13,676 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-01 07:53:17,773 INFO [train.py:903] (0/4) Epoch 6, batch 3850, loss[loss=0.2365, simple_loss=0.31, pruned_loss=0.08154, over 19735.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3364, pruned_loss=0.1048, over 3821760.03 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:53:25,802 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37997.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:53:30,386 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-38000.pt +2023-04-01 07:53:32,719 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:03,070 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:20,062 INFO [train.py:903] (0/4) Epoch 6, batch 3900, loss[loss=0.3228, simple_loss=0.3735, pruned_loss=0.136, over 13042.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3367, pruned_loss=0.1055, over 3816061.04 frames. ], batch size: 136, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:54:22,365 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.032e+02 6.773e+02 7.927e+02 9.711e+02 2.220e+03, threshold=1.585e+03, percent-clipped=5.0 +2023-04-01 07:54:59,167 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 07:55:24,014 INFO [train.py:903] (0/4) Epoch 6, batch 3950, loss[loss=0.2846, simple_loss=0.3513, pruned_loss=0.1089, over 18335.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3365, pruned_loss=0.1053, over 3810103.93 frames. ], batch size: 83, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:55:27,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 07:55:33,054 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:55:35,397 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8174, 1.8812, 1.8444, 2.5563, 1.8445, 2.4054, 2.3785, 1.7767], + device='cuda:0'), covar=tensor([0.2254, 0.1765, 0.0972, 0.1151, 0.2017, 0.0869, 0.1929, 0.1761], + device='cuda:0'), in_proj_covar=tensor([0.0673, 0.0671, 0.0585, 0.0828, 0.0699, 0.0593, 0.0717, 0.0626], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 07:56:03,022 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:25,972 INFO [train.py:903] (0/4) Epoch 6, batch 4000, loss[loss=0.2878, simple_loss=0.3603, pruned_loss=0.1076, over 19499.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3351, pruned_loss=0.1041, over 3808104.52 frames. ], batch size: 64, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:56:28,272 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 6.005e+02 7.523e+02 9.814e+02 1.567e+03, threshold=1.505e+03, percent-clipped=0.0 +2023-04-01 07:56:52,235 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 07:57:10,251 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 07:57:25,998 INFO [train.py:903] (0/4) Epoch 6, batch 4050, loss[loss=0.2336, simple_loss=0.2925, pruned_loss=0.08735, over 19777.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3348, pruned_loss=0.1036, over 3819049.21 frames. ], batch size: 45, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:23,646 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-01 07:58:27,533 INFO [train.py:903] (0/4) Epoch 6, batch 4100, loss[loss=0.2737, simple_loss=0.3294, pruned_loss=0.1091, over 19612.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3338, pruned_loss=0.1032, over 3817225.32 frames. ], batch size: 50, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:30,599 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.486e+02 7.817e+02 9.790e+02 2.532e+03, threshold=1.563e+03, percent-clipped=8.0 +2023-04-01 07:59:04,836 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 07:59:30,609 INFO [train.py:903] (0/4) Epoch 6, batch 4150, loss[loss=0.2572, simple_loss=0.3333, pruned_loss=0.09058, over 19552.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3337, pruned_loss=0.1032, over 3824076.63 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:34,887 INFO [train.py:903] (0/4) Epoch 6, batch 4200, loss[loss=0.2961, simple_loss=0.3561, pruned_loss=0.1181, over 19375.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3338, pruned_loss=0.1033, over 3828444.48 frames. ], batch size: 70, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:36,308 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:00:37,311 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.104e+02 7.462e+02 9.650e+02 2.123e+03, threshold=1.492e+03, percent-clipped=5.0 +2023-04-01 08:00:38,135 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.68 vs. limit=5.0 +2023-04-01 08:00:39,355 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 08:01:21,315 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:01:34,728 INFO [train.py:903] (0/4) Epoch 6, batch 4250, loss[loss=0.3687, simple_loss=0.4032, pruned_loss=0.167, over 19195.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3338, pruned_loss=0.103, over 3827846.27 frames. ], batch size: 69, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:01:48,255 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 08:02:01,581 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 08:02:34,849 INFO [train.py:903] (0/4) Epoch 6, batch 4300, loss[loss=0.236, simple_loss=0.3071, pruned_loss=0.0825, over 19857.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3317, pruned_loss=0.1019, over 3836522.57 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:02:37,125 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+02 6.582e+02 8.580e+02 1.078e+03 2.349e+03, threshold=1.716e+03, percent-clipped=8.0 +2023-04-01 08:02:56,002 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38456.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:03:23,344 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6681, 3.3634, 2.4917, 3.0429, 1.5548, 3.0229, 3.0083, 3.1277], + device='cuda:0'), covar=tensor([0.0811, 0.1039, 0.1719, 0.0719, 0.2733, 0.0940, 0.0870, 0.1110], + device='cuda:0'), in_proj_covar=tensor([0.0367, 0.0320, 0.0379, 0.0291, 0.0358, 0.0317, 0.0302, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 08:03:27,717 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 08:03:35,923 INFO [train.py:903] (0/4) Epoch 6, batch 4350, loss[loss=0.2436, simple_loss=0.3104, pruned_loss=0.08844, over 19478.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3322, pruned_loss=0.1022, over 3828948.62 frames. ], batch size: 49, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:10,315 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2759, 3.7058, 3.8542, 3.8674, 1.3758, 3.5775, 3.2033, 3.4930], + device='cuda:0'), covar=tensor([0.1056, 0.0717, 0.0555, 0.0474, 0.4502, 0.0530, 0.0579, 0.1070], + device='cuda:0'), in_proj_covar=tensor([0.0560, 0.0479, 0.0657, 0.0528, 0.0616, 0.0398, 0.0414, 0.0608], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 08:04:33,255 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5994, 1.3202, 1.4414, 1.6668, 3.1830, 0.9578, 1.9721, 3.2801], + device='cuda:0'), covar=tensor([0.0326, 0.2226, 0.2180, 0.1296, 0.0533, 0.2222, 0.1188, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0316, 0.0319, 0.0292, 0.0316, 0.0311, 0.0291, 0.0310], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:04:40,411 INFO [train.py:903] (0/4) Epoch 6, batch 4400, loss[loss=0.2841, simple_loss=0.3547, pruned_loss=0.1068, over 19643.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3324, pruned_loss=0.1025, over 3831713.98 frames. ], batch size: 55, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:40,965 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 08:04:42,536 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+02 6.588e+02 8.122e+02 1.160e+03 2.348e+03, threshold=1.624e+03, percent-clipped=4.0 +2023-04-01 08:05:05,910 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 08:05:13,676 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 08:05:30,775 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-01 08:05:40,416 INFO [train.py:903] (0/4) Epoch 6, batch 4450, loss[loss=0.2691, simple_loss=0.3393, pruned_loss=0.0995, over 19787.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3326, pruned_loss=0.1023, over 3838033.36 frames. ], batch size: 56, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:05:42,850 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3933, 1.1390, 1.3426, 0.8557, 2.3495, 2.9864, 2.7568, 3.1858], + device='cuda:0'), covar=tensor([0.1318, 0.3123, 0.3217, 0.2168, 0.0462, 0.0143, 0.0256, 0.0164], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0283, 0.0316, 0.0248, 0.0201, 0.0128, 0.0203, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:06:42,162 INFO [train.py:903] (0/4) Epoch 6, batch 4500, loss[loss=0.2729, simple_loss=0.3339, pruned_loss=0.1059, over 19595.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3318, pruned_loss=0.1019, over 3817444.34 frames. ], batch size: 50, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:44,523 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 6.139e+02 7.598e+02 9.442e+02 2.713e+03, threshold=1.520e+03, percent-clipped=3.0 +2023-04-01 08:07:28,071 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3929, 1.2397, 1.7113, 1.5634, 3.0603, 4.2514, 4.4076, 4.8280], + device='cuda:0'), covar=tensor([0.1508, 0.3234, 0.3046, 0.1914, 0.0437, 0.0242, 0.0131, 0.0081], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0286, 0.0318, 0.0250, 0.0202, 0.0130, 0.0204, 0.0156], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:07:42,587 INFO [train.py:903] (0/4) Epoch 6, batch 4550, loss[loss=0.2745, simple_loss=0.3459, pruned_loss=0.1015, over 19682.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3315, pruned_loss=0.1019, over 3825459.51 frames. ], batch size: 55, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:07:53,123 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 08:08:10,876 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38712.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:17,336 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 08:08:22,019 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:41,573 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:45,393 INFO [train.py:903] (0/4) Epoch 6, batch 4600, loss[loss=0.2638, simple_loss=0.3383, pruned_loss=0.09462, over 19712.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3323, pruned_loss=0.1023, over 3816978.42 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:08:47,715 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 6.588e+02 8.068e+02 1.040e+03 1.807e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 08:09:22,048 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 08:09:45,876 INFO [train.py:903] (0/4) Epoch 6, batch 4650, loss[loss=0.3364, simple_loss=0.3844, pruned_loss=0.1442, over 17481.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3324, pruned_loss=0.102, over 3819536.90 frames. ], batch size: 101, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:01,541 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 08:10:10,760 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0725, 5.3447, 2.8666, 4.6591, 1.6195, 5.3028, 5.1947, 5.3958], + device='cuda:0'), covar=tensor([0.0388, 0.0843, 0.1712, 0.0491, 0.3242, 0.0559, 0.0612, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0370, 0.0324, 0.0385, 0.0291, 0.0357, 0.0319, 0.0299, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 08:10:11,707 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 08:10:43,233 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:46,234 INFO [train.py:903] (0/4) Epoch 6, batch 4700, loss[loss=0.2784, simple_loss=0.3392, pruned_loss=0.1088, over 19769.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3337, pruned_loss=0.1033, over 3819478.03 frames. ], batch size: 54, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:46,872 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 08:10:47,718 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:48,620 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.701e+02 8.528e+02 1.085e+03 2.106e+03, threshold=1.706e+03, percent-clipped=3.0 +2023-04-01 08:11:07,249 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 08:11:46,444 INFO [train.py:903] (0/4) Epoch 6, batch 4750, loss[loss=0.209, simple_loss=0.2809, pruned_loss=0.06851, over 19778.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3335, pruned_loss=0.1028, over 3819821.92 frames. ], batch size: 48, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:40,567 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8125, 1.2752, 1.3400, 1.4215, 2.4426, 0.9406, 1.7694, 2.5296], + device='cuda:0'), covar=tensor([0.0426, 0.2230, 0.2223, 0.1328, 0.0598, 0.2022, 0.0974, 0.0442], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0309, 0.0313, 0.0289, 0.0309, 0.0308, 0.0288, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:12:47,940 INFO [train.py:903] (0/4) Epoch 6, batch 4800, loss[loss=0.2965, simple_loss=0.3538, pruned_loss=0.1196, over 19660.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.335, pruned_loss=0.1046, over 3816529.18 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:52,286 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 6.641e+02 7.688e+02 1.063e+03 2.770e+03, threshold=1.538e+03, percent-clipped=4.0 +2023-04-01 08:13:49,887 INFO [train.py:903] (0/4) Epoch 6, batch 4850, loss[loss=0.2539, simple_loss=0.3267, pruned_loss=0.09052, over 19650.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3352, pruned_loss=0.1047, over 3817537.86 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:13:56,377 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0442, 2.8849, 1.8359, 2.0030, 1.8112, 2.1486, 0.4811, 1.9351], + device='cuda:0'), covar=tensor([0.0354, 0.0337, 0.0400, 0.0655, 0.0623, 0.0643, 0.0810, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0298, 0.0300, 0.0318, 0.0383, 0.0312, 0.0286, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:14:14,736 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 08:14:17,462 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4501, 1.1492, 1.6376, 1.3473, 2.8848, 3.6859, 3.4916, 3.9114], + device='cuda:0'), covar=tensor([0.1392, 0.3214, 0.3001, 0.1897, 0.0406, 0.0158, 0.0190, 0.0125], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0283, 0.0316, 0.0246, 0.0201, 0.0129, 0.0201, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:14:36,514 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 08:14:42,310 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 08:14:42,351 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 08:14:51,529 INFO [train.py:903] (0/4) Epoch 6, batch 4900, loss[loss=0.3132, simple_loss=0.3757, pruned_loss=0.1253, over 19591.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3342, pruned_loss=0.1038, over 3814380.34 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:51,635 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 08:14:55,127 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 6.820e+02 8.455e+02 1.082e+03 3.554e+03, threshold=1.691e+03, percent-clipped=3.0 +2023-04-01 08:15:13,710 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 08:15:52,286 INFO [train.py:903] (0/4) Epoch 6, batch 4950, loss[loss=0.2342, simple_loss=0.3073, pruned_loss=0.08055, over 19624.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3342, pruned_loss=0.1034, over 3821751.62 frames. ], batch size: 50, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:15:56,199 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:11,576 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 08:16:27,947 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:34,752 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 08:16:54,830 INFO [train.py:903] (0/4) Epoch 6, batch 5000, loss[loss=0.2884, simple_loss=0.3434, pruned_loss=0.1168, over 19766.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3343, pruned_loss=0.1039, over 3819567.90 frames. ], batch size: 54, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:16:58,439 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8621, 1.3645, 1.4505, 1.5278, 3.3435, 0.9245, 2.1391, 3.4949], + device='cuda:0'), covar=tensor([0.0340, 0.2416, 0.2401, 0.1631, 0.0603, 0.2516, 0.1173, 0.0356], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0317, 0.0320, 0.0297, 0.0318, 0.0316, 0.0296, 0.0314], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:16:59,190 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.473e+02 6.699e+02 7.802e+02 1.019e+03 2.317e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 08:17:05,631 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 08:17:14,754 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 08:17:51,526 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:17:57,114 INFO [train.py:903] (0/4) Epoch 6, batch 5050, loss[loss=0.2343, simple_loss=0.3173, pruned_loss=0.0756, over 19538.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.334, pruned_loss=0.1039, over 3801850.12 frames. ], batch size: 56, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:18:09,742 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2618, 2.9288, 2.0184, 2.7103, 0.8665, 2.7880, 2.6748, 2.7989], + device='cuda:0'), covar=tensor([0.1092, 0.1484, 0.2211, 0.0965, 0.4134, 0.1175, 0.1024, 0.1398], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0318, 0.0381, 0.0290, 0.0356, 0.0316, 0.0297, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 08:18:12,171 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39203.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:18:29,793 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 08:18:57,179 INFO [train.py:903] (0/4) Epoch 6, batch 5100, loss[loss=0.3398, simple_loss=0.3908, pruned_loss=0.1444, over 19736.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3345, pruned_loss=0.1041, over 3790503.68 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:19:00,429 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.152e+02 6.811e+02 8.395e+02 1.062e+03 1.934e+03, threshold=1.679e+03, percent-clipped=3.0 +2023-04-01 08:19:05,462 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 08:19:08,950 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 08:19:13,355 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 08:19:57,721 INFO [train.py:903] (0/4) Epoch 6, batch 5150, loss[loss=0.2622, simple_loss=0.3376, pruned_loss=0.09339, over 19484.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3354, pruned_loss=0.1045, over 3783939.13 frames. ], batch size: 64, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:20:08,431 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 08:20:11,734 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:13,849 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:43,363 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 08:21:00,837 INFO [train.py:903] (0/4) Epoch 6, batch 5200, loss[loss=0.2354, simple_loss=0.296, pruned_loss=0.0874, over 19776.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3348, pruned_loss=0.1042, over 3788830.75 frames. ], batch size: 48, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:21:04,236 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.495e+02 6.191e+02 7.598e+02 1.014e+03 2.218e+03, threshold=1.520e+03, percent-clipped=2.0 +2023-04-01 08:21:14,285 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 08:21:45,102 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8355, 1.8853, 1.9244, 2.8513, 1.9646, 2.7117, 2.4115, 1.7405], + device='cuda:0'), covar=tensor([0.2725, 0.2163, 0.1157, 0.1241, 0.2317, 0.0878, 0.2247, 0.1998], + device='cuda:0'), in_proj_covar=tensor([0.0679, 0.0675, 0.0581, 0.0825, 0.0703, 0.0587, 0.0718, 0.0627], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 08:21:56,492 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 08:22:03,032 INFO [train.py:903] (0/4) Epoch 6, batch 5250, loss[loss=0.2622, simple_loss=0.3218, pruned_loss=0.1013, over 19586.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3347, pruned_loss=0.1042, over 3780214.77 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:23:05,659 INFO [train.py:903] (0/4) Epoch 6, batch 5300, loss[loss=0.2522, simple_loss=0.3088, pruned_loss=0.09775, over 19765.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3349, pruned_loss=0.1045, over 3777307.38 frames. ], batch size: 46, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:23:10,367 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.313e+02 6.384e+02 7.596e+02 9.799e+02 2.153e+03, threshold=1.519e+03, percent-clipped=7.0 +2023-04-01 08:23:22,443 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 08:23:56,530 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:24:06,722 INFO [train.py:903] (0/4) Epoch 6, batch 5350, loss[loss=0.2665, simple_loss=0.3354, pruned_loss=0.09882, over 19286.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3338, pruned_loss=0.1035, over 3797394.48 frames. ], batch size: 66, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:24:41,850 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 08:25:07,433 INFO [train.py:903] (0/4) Epoch 6, batch 5400, loss[loss=0.2628, simple_loss=0.3343, pruned_loss=0.09562, over 19737.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3339, pruned_loss=0.1034, over 3811293.06 frames. ], batch size: 63, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:25:12,677 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-04-01 08:25:15,198 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.754e+02 6.102e+02 7.285e+02 1.015e+03 2.320e+03, threshold=1.457e+03, percent-clipped=6.0 +2023-04-01 08:25:18,913 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39547.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:25:30,309 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:26:00,334 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:26:08,227 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3048, 1.3171, 1.1534, 1.0928, 0.9875, 1.1702, 0.2397, 0.6103], + device='cuda:0'), covar=tensor([0.0206, 0.0222, 0.0134, 0.0171, 0.0377, 0.0206, 0.0419, 0.0341], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0302, 0.0299, 0.0315, 0.0385, 0.0312, 0.0288, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:26:13,207 INFO [train.py:903] (0/4) Epoch 6, batch 5450, loss[loss=0.2536, simple_loss=0.3228, pruned_loss=0.09222, over 19573.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3336, pruned_loss=0.1032, over 3797010.86 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:26:44,785 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2986, 3.8542, 2.4724, 3.4900, 1.2031, 3.4414, 3.5910, 3.6198], + device='cuda:0'), covar=tensor([0.0682, 0.1097, 0.2032, 0.0771, 0.3616, 0.1063, 0.0767, 0.1143], + device='cuda:0'), in_proj_covar=tensor([0.0373, 0.0330, 0.0386, 0.0297, 0.0365, 0.0322, 0.0304, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 08:27:13,643 INFO [train.py:903] (0/4) Epoch 6, batch 5500, loss[loss=0.2592, simple_loss=0.3267, pruned_loss=0.09586, over 19607.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3343, pruned_loss=0.104, over 3802067.58 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:27:18,176 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.238e+02 6.796e+02 8.775e+02 1.086e+03 2.226e+03, threshold=1.755e+03, percent-clipped=13.0 +2023-04-01 08:27:20,740 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:27:33,892 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39657.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:27:36,852 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 08:27:40,434 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39662.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:28:14,678 INFO [train.py:903] (0/4) Epoch 6, batch 5550, loss[loss=0.3542, simple_loss=0.3998, pruned_loss=0.1543, over 19347.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3342, pruned_loss=0.1042, over 3818516.33 frames. ], batch size: 66, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:28:21,847 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 08:28:28,790 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5588, 3.7775, 4.3017, 4.4376, 1.5142, 3.9960, 3.4074, 3.5410], + device='cuda:0'), covar=tensor([0.1814, 0.1541, 0.1144, 0.1004, 0.6558, 0.1124, 0.1098, 0.2178], + device='cuda:0'), in_proj_covar=tensor([0.0562, 0.0482, 0.0655, 0.0540, 0.0613, 0.0407, 0.0413, 0.0607], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 08:28:30,190 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.23 vs. limit=5.0 +2023-04-01 08:28:47,144 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0120, 1.1198, 1.3937, 0.8455, 2.4467, 2.9709, 2.7697, 3.1696], + device='cuda:0'), covar=tensor([0.1445, 0.3062, 0.2905, 0.2036, 0.0386, 0.0164, 0.0246, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0283, 0.0313, 0.0247, 0.0198, 0.0128, 0.0201, 0.0152], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:28:50,403 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0293, 1.9530, 1.6550, 1.5375, 1.5291, 1.7933, 0.3283, 0.8354], + device='cuda:0'), covar=tensor([0.0278, 0.0270, 0.0183, 0.0293, 0.0564, 0.0298, 0.0535, 0.0480], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0300, 0.0301, 0.0314, 0.0385, 0.0313, 0.0288, 0.0303], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:29:11,948 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 08:29:15,371 INFO [train.py:903] (0/4) Epoch 6, batch 5600, loss[loss=0.2505, simple_loss=0.3122, pruned_loss=0.09433, over 19604.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3336, pruned_loss=0.1035, over 3818374.08 frames. ], batch size: 50, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:29:20,720 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 6.221e+02 7.656e+02 9.358e+02 1.388e+03, threshold=1.531e+03, percent-clipped=0.0 +2023-04-01 08:29:43,080 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:11,505 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:19,675 INFO [train.py:903] (0/4) Epoch 6, batch 5650, loss[loss=0.2737, simple_loss=0.3354, pruned_loss=0.1059, over 19091.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3349, pruned_loss=0.1043, over 3798645.60 frames. ], batch size: 69, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:31:01,144 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:08,882 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 08:31:12,745 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2451, 1.2879, 1.7170, 1.4279, 2.3755, 1.9589, 2.5020, 0.9755], + device='cuda:0'), covar=tensor([0.1856, 0.3124, 0.1682, 0.1532, 0.1149, 0.1602, 0.1201, 0.2992], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0516, 0.0503, 0.0406, 0.0564, 0.0447, 0.0629, 0.0453], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 08:31:13,795 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39833.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:17,320 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7302, 4.2177, 2.6063, 3.7129, 1.1912, 3.9899, 3.8692, 4.2038], + device='cuda:0'), covar=tensor([0.0576, 0.1014, 0.1884, 0.0749, 0.3883, 0.0772, 0.0708, 0.0797], + device='cuda:0'), in_proj_covar=tensor([0.0367, 0.0323, 0.0377, 0.0290, 0.0357, 0.0310, 0.0297, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:31:21,778 INFO [train.py:903] (0/4) Epoch 6, batch 5700, loss[loss=0.2696, simple_loss=0.3391, pruned_loss=0.1001, over 19671.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3344, pruned_loss=0.1034, over 3809636.40 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:31:26,540 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.066e+02 6.807e+02 8.639e+02 1.032e+03 2.369e+03, threshold=1.728e+03, percent-clipped=2.0 +2023-04-01 08:31:54,453 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6038, 1.6170, 1.7412, 2.1136, 1.2637, 1.7576, 2.1019, 1.6858], + device='cuda:0'), covar=tensor([0.2343, 0.1872, 0.0978, 0.1024, 0.2110, 0.0934, 0.2152, 0.1735], + device='cuda:0'), in_proj_covar=tensor([0.0681, 0.0675, 0.0581, 0.0825, 0.0706, 0.0591, 0.0718, 0.0622], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 08:32:21,661 INFO [train.py:903] (0/4) Epoch 6, batch 5750, loss[loss=0.3201, simple_loss=0.3709, pruned_loss=0.1346, over 19649.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3357, pruned_loss=0.1042, over 3811446.69 frames. ], batch size: 58, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:32:23,996 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 08:32:30,902 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 08:32:36,528 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 08:32:42,524 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1942, 2.1088, 1.7132, 1.5766, 1.4237, 1.7384, 0.3401, 0.9821], + device='cuda:0'), covar=tensor([0.0279, 0.0275, 0.0216, 0.0306, 0.0629, 0.0365, 0.0577, 0.0491], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0303, 0.0301, 0.0316, 0.0388, 0.0313, 0.0291, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:32:58,453 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:23,262 INFO [train.py:903] (0/4) Epoch 6, batch 5800, loss[loss=0.3066, simple_loss=0.3772, pruned_loss=0.118, over 19532.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3361, pruned_loss=0.1046, over 3790608.99 frames. ], batch size: 56, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:33:23,652 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:28,185 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:29,042 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+02 6.519e+02 7.840e+02 9.394e+02 2.455e+03, threshold=1.568e+03, percent-clipped=4.0 +2023-04-01 08:34:26,995 INFO [train.py:903] (0/4) Epoch 6, batch 5850, loss[loss=0.2115, simple_loss=0.2776, pruned_loss=0.07268, over 17258.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3348, pruned_loss=0.1032, over 3806277.60 frames. ], batch size: 38, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:34:38,529 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-40000.pt +2023-04-01 08:34:40,698 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40001.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:34:46,871 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 08:34:59,466 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:28,999 INFO [train.py:903] (0/4) Epoch 6, batch 5900, loss[loss=0.2872, simple_loss=0.3525, pruned_loss=0.111, over 17582.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3345, pruned_loss=0.1034, over 3821386.66 frames. ], batch size: 101, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:35:31,453 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 08:35:31,825 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:33,716 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.177e+02 6.123e+02 7.695e+02 9.772e+02 2.844e+03, threshold=1.539e+03, percent-clipped=4.0 +2023-04-01 08:35:53,054 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 08:36:21,308 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2952, 2.1856, 1.5867, 1.3646, 2.0814, 1.0404, 1.1504, 1.7745], + device='cuda:0'), covar=tensor([0.0846, 0.0562, 0.0949, 0.0604, 0.0389, 0.1118, 0.0673, 0.0425], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0280, 0.0315, 0.0237, 0.0226, 0.0310, 0.0288, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:36:30,103 INFO [train.py:903] (0/4) Epoch 6, batch 5950, loss[loss=0.2536, simple_loss=0.3232, pruned_loss=0.092, over 19680.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3352, pruned_loss=0.1037, over 3814684.34 frames. ], batch size: 60, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:03,174 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40116.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:37:14,645 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0384, 5.0349, 5.9808, 5.8770, 1.6708, 5.4932, 4.7202, 5.4770], + device='cuda:0'), covar=tensor([0.1098, 0.0598, 0.0390, 0.0403, 0.4976, 0.0366, 0.0489, 0.0854], + device='cuda:0'), in_proj_covar=tensor([0.0564, 0.0481, 0.0657, 0.0541, 0.0613, 0.0408, 0.0417, 0.0604], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 08:37:18,121 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:37:27,905 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.33 vs. limit=5.0 +2023-04-01 08:37:30,832 INFO [train.py:903] (0/4) Epoch 6, batch 6000, loss[loss=0.2782, simple_loss=0.3396, pruned_loss=0.1084, over 19597.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3334, pruned_loss=0.103, over 3808929.07 frames. ], batch size: 52, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:30,833 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 08:37:43,221 INFO [train.py:937] (0/4) Epoch 6, validation: loss=0.1955, simple_loss=0.2951, pruned_loss=0.04789, over 944034.00 frames. +2023-04-01 08:37:43,222 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 08:37:47,775 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 6.298e+02 7.514e+02 9.544e+02 1.960e+03, threshold=1.503e+03, percent-clipped=1.0 +2023-04-01 08:38:29,636 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:38:44,844 INFO [train.py:903] (0/4) Epoch 6, batch 6050, loss[loss=0.2819, simple_loss=0.3516, pruned_loss=0.106, over 19776.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3339, pruned_loss=0.1033, over 3790955.51 frames. ], batch size: 56, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:38:45,115 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7996, 1.4222, 1.6246, 2.2918, 2.1608, 1.8466, 2.0790, 1.7958], + device='cuda:0'), covar=tensor([0.0673, 0.1009, 0.0856, 0.0623, 0.0628, 0.0737, 0.0765, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0238, 0.0234, 0.0267, 0.0255, 0.0223, 0.0221, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 08:38:53,724 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:14,895 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:24,383 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:43,586 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40236.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:48,081 INFO [train.py:903] (0/4) Epoch 6, batch 6100, loss[loss=0.3143, simple_loss=0.3691, pruned_loss=0.1298, over 19696.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3345, pruned_loss=0.1041, over 3790016.00 frames. ], batch size: 59, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:39:54,269 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 6.367e+02 7.728e+02 1.144e+03 2.582e+03, threshold=1.546e+03, percent-clipped=10.0 +2023-04-01 08:39:54,682 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:51,268 INFO [train.py:903] (0/4) Epoch 6, batch 6150, loss[loss=0.3003, simple_loss=0.3591, pruned_loss=0.1208, over 19086.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3329, pruned_loss=0.1031, over 3803434.61 frames. ], batch size: 69, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:40:51,598 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40290.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:54,070 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:57,798 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0540, 3.6975, 2.1068, 1.9708, 3.1943, 1.6837, 1.0637, 1.8739], + device='cuda:0'), covar=tensor([0.1004, 0.0347, 0.0817, 0.0634, 0.0446, 0.1002, 0.0975, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0280, 0.0314, 0.0234, 0.0223, 0.0308, 0.0284, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:41:19,454 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 08:41:36,484 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.1108, 3.7209, 2.5632, 3.3660, 1.2373, 3.4596, 3.4484, 3.6414], + device='cuda:0'), covar=tensor([0.0745, 0.1205, 0.1685, 0.0728, 0.3482, 0.0768, 0.0733, 0.0810], + device='cuda:0'), in_proj_covar=tensor([0.0374, 0.0327, 0.0384, 0.0297, 0.0363, 0.0316, 0.0304, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 08:41:52,313 INFO [train.py:903] (0/4) Epoch 6, batch 6200, loss[loss=0.3088, simple_loss=0.3687, pruned_loss=0.1245, over 19463.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.334, pruned_loss=0.1039, over 3795635.29 frames. ], batch size: 64, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:41:57,219 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 6.810e+02 8.218e+02 1.008e+03 2.334e+03, threshold=1.644e+03, percent-clipped=5.0 +2023-04-01 08:42:01,794 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:42:33,815 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40372.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:42:54,272 INFO [train.py:903] (0/4) Epoch 6, batch 6250, loss[loss=0.3002, simple_loss=0.359, pruned_loss=0.1207, over 19374.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3328, pruned_loss=0.1033, over 3784074.75 frames. ], batch size: 70, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:43:04,605 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40397.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:43:27,278 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 08:43:58,137 INFO [train.py:903] (0/4) Epoch 6, batch 6300, loss[loss=0.2328, simple_loss=0.3077, pruned_loss=0.07892, over 19659.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3324, pruned_loss=0.1027, over 3795742.07 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:44:03,775 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.389e+02 8.265e+02 1.061e+03 2.633e+03, threshold=1.653e+03, percent-clipped=7.0 +2023-04-01 08:44:07,500 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0305, 1.4844, 1.6677, 1.9657, 1.9132, 1.8196, 1.5791, 2.0643], + device='cuda:0'), covar=tensor([0.0825, 0.1673, 0.1247, 0.0930, 0.1127, 0.0489, 0.1152, 0.0571], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0360, 0.0284, 0.0237, 0.0300, 0.0247, 0.0273, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:45:00,760 INFO [train.py:903] (0/4) Epoch 6, batch 6350, loss[loss=0.2621, simple_loss=0.3354, pruned_loss=0.09443, over 19595.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3331, pruned_loss=0.1029, over 3818275.81 frames. ], batch size: 61, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:45:12,818 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:24,298 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:45,004 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40525.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:02,598 INFO [train.py:903] (0/4) Epoch 6, batch 6400, loss[loss=0.2614, simple_loss=0.3336, pruned_loss=0.09462, over 19695.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3333, pruned_loss=0.1029, over 3811925.79 frames. ], batch size: 59, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:46:07,242 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.528e+02 8.039e+02 1.077e+03 1.980e+03, threshold=1.608e+03, percent-clipped=3.0 +2023-04-01 08:46:12,185 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:23,189 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:44,906 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:46,462 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 08:46:52,950 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40580.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:53,173 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7779, 1.3718, 1.3638, 2.0119, 1.6258, 2.0495, 2.1364, 1.9714], + device='cuda:0'), covar=tensor([0.0796, 0.1010, 0.1116, 0.0933, 0.0986, 0.0735, 0.0919, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0222, 0.0238, 0.0236, 0.0266, 0.0260, 0.0221, 0.0220, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 08:47:04,384 INFO [train.py:903] (0/4) Epoch 6, batch 6450, loss[loss=0.3428, simple_loss=0.3849, pruned_loss=0.1503, over 13550.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.333, pruned_loss=0.1026, over 3810708.29 frames. ], batch size: 136, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:47:38,283 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-01 08:47:50,040 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 08:47:59,669 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:07,614 INFO [train.py:903] (0/4) Epoch 6, batch 6500, loss[loss=0.2574, simple_loss=0.3283, pruned_loss=0.09325, over 19705.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.333, pruned_loss=0.1024, over 3823882.48 frames. ], batch size: 59, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:48:13,023 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 6.039e+02 7.833e+02 1.001e+03 2.233e+03, threshold=1.567e+03, percent-clipped=5.0 +2023-04-01 08:48:15,437 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 08:48:44,317 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:47,930 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:55,147 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 08:49:12,721 INFO [train.py:903] (0/4) Epoch 6, batch 6550, loss[loss=0.2927, simple_loss=0.3554, pruned_loss=0.115, over 19697.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3322, pruned_loss=0.102, over 3826250.00 frames. ], batch size: 59, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:49:15,168 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:18,782 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:50:15,129 INFO [train.py:903] (0/4) Epoch 6, batch 6600, loss[loss=0.2952, simple_loss=0.3502, pruned_loss=0.1201, over 19673.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3326, pruned_loss=0.1024, over 3826215.81 frames. ], batch size: 53, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:50:19,765 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+02 6.030e+02 7.716e+02 9.913e+02 2.888e+03, threshold=1.543e+03, percent-clipped=3.0 +2023-04-01 08:50:26,104 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40749.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:51:17,189 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-01 08:51:17,593 INFO [train.py:903] (0/4) Epoch 6, batch 6650, loss[loss=0.222, simple_loss=0.2945, pruned_loss=0.07475, over 14488.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3319, pruned_loss=0.1019, over 3820213.69 frames. ], batch size: 32, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:51:40,497 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40807.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:51:55,839 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 08:52:19,377 INFO [train.py:903] (0/4) Epoch 6, batch 6700, loss[loss=0.2793, simple_loss=0.3492, pruned_loss=0.1047, over 19612.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3327, pruned_loss=0.1025, over 3811805.76 frames. ], batch size: 57, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:52:24,141 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.981e+02 6.315e+02 8.385e+02 9.952e+02 2.559e+03, threshold=1.677e+03, percent-clipped=5.0 +2023-04-01 08:52:39,403 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:13,414 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:21,378 INFO [train.py:903] (0/4) Epoch 6, batch 6750, loss[loss=0.2474, simple_loss=0.325, pruned_loss=0.08485, over 19606.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3319, pruned_loss=0.1018, over 3817664.94 frames. ], batch size: 57, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:53:27,466 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7288, 1.4535, 1.4481, 1.6036, 3.2668, 1.0454, 2.1922, 3.5138], + device='cuda:0'), covar=tensor([0.0335, 0.2296, 0.2355, 0.1541, 0.0619, 0.2304, 0.1199, 0.0291], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0310, 0.0315, 0.0292, 0.0315, 0.0310, 0.0289, 0.0310], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:53:42,286 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3453, 2.2236, 1.7488, 1.7487, 1.6253, 1.7154, 0.3554, 1.0697], + device='cuda:0'), covar=tensor([0.0250, 0.0290, 0.0260, 0.0384, 0.0591, 0.0400, 0.0646, 0.0524], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0305, 0.0300, 0.0322, 0.0394, 0.0318, 0.0293, 0.0308], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 08:53:53,173 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:54,385 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9507, 1.5949, 1.5255, 1.9344, 1.8713, 1.8102, 1.5774, 1.8741], + device='cuda:0'), covar=tensor([0.0843, 0.1462, 0.1346, 0.0828, 0.1037, 0.0389, 0.1030, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0358, 0.0283, 0.0236, 0.0299, 0.0241, 0.0270, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:54:04,563 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:14,788 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-01 08:54:17,527 INFO [train.py:903] (0/4) Epoch 6, batch 6800, loss[loss=0.3028, simple_loss=0.3598, pruned_loss=0.1229, over 19757.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.333, pruned_loss=0.1027, over 3815441.02 frames. ], batch size: 56, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:54:23,021 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.879e+02 7.609e+02 1.019e+03 2.150e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 08:54:31,380 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:33,645 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:49,204 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-6.pt +2023-04-01 08:55:04,819 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 08:55:06,219 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 08:55:08,788 INFO [train.py:903] (0/4) Epoch 7, batch 0, loss[loss=0.2292, simple_loss=0.297, pruned_loss=0.08067, over 19368.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.297, pruned_loss=0.08067, over 19368.00 frames. ], batch size: 47, lr: 1.24e-02, grad_scale: 8.0 +2023-04-01 08:55:08,789 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 08:55:20,409 INFO [train.py:937] (0/4) Epoch 7, validation: loss=0.1957, simple_loss=0.2957, pruned_loss=0.04779, over 944034.00 frames. +2023-04-01 08:55:20,410 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 08:55:21,930 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:30,928 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:32,930 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 08:56:04,635 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:08,594 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 08:56:15,085 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4435, 2.4248, 1.7218, 1.7241, 2.4053, 1.2779, 1.1095, 1.7916], + device='cuda:0'), covar=tensor([0.0790, 0.0499, 0.0849, 0.0574, 0.0340, 0.0983, 0.0726, 0.0411], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0278, 0.0314, 0.0237, 0.0225, 0.0309, 0.0283, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 08:56:15,981 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:18,316 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41015.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:22,012 INFO [train.py:903] (0/4) Epoch 7, batch 50, loss[loss=0.329, simple_loss=0.3885, pruned_loss=0.1347, over 19318.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3305, pruned_loss=0.1008, over 855737.61 frames. ], batch size: 70, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:56:36,108 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41030.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:51,947 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.786e+02 6.089e+02 7.435e+02 1.027e+03 3.072e+03, threshold=1.487e+03, percent-clipped=7.0 +2023-04-01 08:56:56,506 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 08:57:17,945 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:18,085 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:23,454 INFO [train.py:903] (0/4) Epoch 7, batch 100, loss[loss=0.2237, simple_loss=0.2929, pruned_loss=0.07727, over 19579.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3309, pruned_loss=0.1011, over 1515414.81 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:57:34,761 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 08:57:46,196 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41088.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:24,483 INFO [train.py:903] (0/4) Epoch 7, batch 150, loss[loss=0.2514, simple_loss=0.3286, pruned_loss=0.08712, over 19778.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3308, pruned_loss=0.1008, over 2029761.39 frames. ], batch size: 56, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:58:26,862 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:36,370 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:56,746 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.181e+02 6.219e+02 8.190e+02 1.094e+03 2.901e+03, threshold=1.638e+03, percent-clipped=4.0 +2023-04-01 08:59:22,647 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 08:59:23,718 INFO [train.py:903] (0/4) Epoch 7, batch 200, loss[loss=0.2711, simple_loss=0.3368, pruned_loss=0.1027, over 19477.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3295, pruned_loss=0.1006, over 2439607.47 frames. ], batch size: 64, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:59:44,056 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5662, 1.5046, 2.0517, 1.6808, 3.2274, 4.7499, 4.6865, 5.0720], + device='cuda:0'), covar=tensor([0.1353, 0.2821, 0.2508, 0.1683, 0.0390, 0.0097, 0.0130, 0.0059], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0284, 0.0314, 0.0247, 0.0205, 0.0131, 0.0202, 0.0156], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:00:27,762 INFO [train.py:903] (0/4) Epoch 7, batch 250, loss[loss=0.2649, simple_loss=0.3363, pruned_loss=0.09678, over 19660.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3317, pruned_loss=0.102, over 2746449.77 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:00:30,418 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-01 09:00:37,150 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 09:00:37,984 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:40,037 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:59,430 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.194e+02 6.866e+02 8.894e+02 1.080e+03 3.290e+03, threshold=1.779e+03, percent-clipped=6.0 +2023-04-01 09:01:06,541 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:22,636 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:30,752 INFO [train.py:903] (0/4) Epoch 7, batch 300, loss[loss=0.2936, simple_loss=0.3514, pruned_loss=0.1179, over 19777.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3315, pruned_loss=0.102, over 2969771.38 frames. ], batch size: 56, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:31,489 INFO [train.py:903] (0/4) Epoch 7, batch 350, loss[loss=0.2336, simple_loss=0.309, pruned_loss=0.07912, over 19760.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3316, pruned_loss=0.1019, over 3160259.60 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:34,017 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 09:03:02,033 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:04,935 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.797e+02 7.460e+02 9.435e+02 2.818e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-04-01 09:03:22,751 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:32,953 INFO [train.py:903] (0/4) Epoch 7, batch 400, loss[loss=0.3551, simple_loss=0.3975, pruned_loss=0.1563, over 19372.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3309, pruned_loss=0.1012, over 3306103.28 frames. ], batch size: 70, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:03:43,454 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41377.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:54,220 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:20,757 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:23,150 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:34,709 INFO [train.py:903] (0/4) Epoch 7, batch 450, loss[loss=0.2456, simple_loss=0.3163, pruned_loss=0.08739, over 19669.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3307, pruned_loss=0.1009, over 3426697.45 frames. ], batch size: 53, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:05:02,709 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 09:05:03,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 09:05:06,101 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 5.824e+02 7.719e+02 9.807e+02 3.448e+03, threshold=1.544e+03, percent-clipped=7.0 +2023-04-01 09:05:30,639 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:05:37,396 INFO [train.py:903] (0/4) Epoch 7, batch 500, loss[loss=0.2757, simple_loss=0.341, pruned_loss=0.1052, over 19533.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3302, pruned_loss=0.1008, over 3528891.33 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 16.0 +2023-04-01 09:05:44,880 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:06:38,625 INFO [train.py:903] (0/4) Epoch 7, batch 550, loss[loss=0.2175, simple_loss=0.2908, pruned_loss=0.07207, over 19863.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3286, pruned_loss=0.09911, over 3591518.01 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:06:43,702 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41522.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:07:09,851 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 6.032e+02 7.411e+02 9.275e+02 1.625e+03, threshold=1.482e+03, percent-clipped=1.0 +2023-04-01 09:07:37,920 INFO [train.py:903] (0/4) Epoch 7, batch 600, loss[loss=0.2376, simple_loss=0.2974, pruned_loss=0.08887, over 19012.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3297, pruned_loss=0.09982, over 3635467.65 frames. ], batch size: 42, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:07:50,892 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:13,417 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 09:08:16,131 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:39,327 INFO [train.py:903] (0/4) Epoch 7, batch 650, loss[loss=0.2614, simple_loss=0.3214, pruned_loss=0.1006, over 19686.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3302, pruned_loss=0.09987, over 3683613.09 frames. ], batch size: 53, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:08:45,330 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:00,381 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41633.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:13,503 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 5.719e+02 7.418e+02 1.065e+03 4.334e+03, threshold=1.484e+03, percent-clipped=7.0 +2023-04-01 09:09:28,655 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:39,246 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 09:09:40,774 INFO [train.py:903] (0/4) Epoch 7, batch 700, loss[loss=0.2636, simple_loss=0.3318, pruned_loss=0.09768, over 19670.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3317, pruned_loss=0.1006, over 3708762.52 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:09:48,552 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:10:11,474 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 09:10:37,099 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 09:10:45,014 INFO [train.py:903] (0/4) Epoch 7, batch 750, loss[loss=0.2282, simple_loss=0.2973, pruned_loss=0.07954, over 19405.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3298, pruned_loss=0.09925, over 3735532.36 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:10:59,407 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:15,964 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.751e+02 6.886e+02 8.721e+02 1.519e+03, threshold=1.377e+03, percent-clipped=2.0 +2023-04-01 09:11:31,830 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:46,763 INFO [train.py:903] (0/4) Epoch 7, batch 800, loss[loss=0.2523, simple_loss=0.3091, pruned_loss=0.09769, over 19765.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3296, pruned_loss=0.09957, over 3746218.48 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:11:56,187 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 09:11:58,683 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:31,056 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:42,769 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0037, 2.0319, 2.1381, 3.1339, 2.0834, 3.1686, 2.9584, 2.0407], + device='cuda:0'), covar=tensor([0.3035, 0.2422, 0.1093, 0.1442, 0.2880, 0.0893, 0.2089, 0.1991], + device='cuda:0'), in_proj_covar=tensor([0.0702, 0.0694, 0.0598, 0.0845, 0.0712, 0.0606, 0.0730, 0.0643], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 09:12:48,118 INFO [train.py:903] (0/4) Epoch 7, batch 850, loss[loss=0.2689, simple_loss=0.3424, pruned_loss=0.09777, over 19519.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3296, pruned_loss=0.09936, over 3759810.44 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:13:10,238 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:23,186 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.000e+02 6.514e+02 8.083e+02 9.646e+02 1.896e+03, threshold=1.617e+03, percent-clipped=5.0 +2023-04-01 09:13:38,252 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 09:13:40,926 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:41,462 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 09:13:50,188 INFO [train.py:903] (0/4) Epoch 7, batch 900, loss[loss=0.2823, simple_loss=0.3571, pruned_loss=0.1037, over 19602.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3282, pruned_loss=0.09874, over 3774396.47 frames. ], batch size: 57, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:14:29,634 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2268, 1.3242, 1.1589, 0.9781, 1.0480, 1.0728, 0.0246, 0.3403], + device='cuda:0'), covar=tensor([0.0305, 0.0329, 0.0195, 0.0259, 0.0652, 0.0258, 0.0576, 0.0527], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0303, 0.0295, 0.0323, 0.0390, 0.0317, 0.0287, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:14:49,701 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2284, 1.1676, 1.6596, 0.9685, 2.5506, 3.2971, 3.0816, 3.5139], + device='cuda:0'), covar=tensor([0.1363, 0.3059, 0.2721, 0.2079, 0.0415, 0.0149, 0.0206, 0.0142], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0279, 0.0308, 0.0245, 0.0201, 0.0133, 0.0200, 0.0156], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:14:51,573 INFO [train.py:903] (0/4) Epoch 7, batch 950, loss[loss=0.3015, simple_loss=0.3598, pruned_loss=0.1216, over 19647.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3282, pruned_loss=0.09873, over 3790516.15 frames. ], batch size: 55, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:14:55,086 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 09:15:01,482 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:15:02,799 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0218, 3.4948, 2.0082, 2.2640, 3.1490, 1.6768, 0.9925, 1.8036], + device='cuda:0'), covar=tensor([0.0859, 0.0346, 0.0814, 0.0538, 0.0356, 0.0859, 0.0933, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0280, 0.0310, 0.0237, 0.0223, 0.0305, 0.0287, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:15:26,244 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.318e+02 6.505e+02 7.519e+02 9.487e+02 1.757e+03, threshold=1.504e+03, percent-clipped=1.0 +2023-04-01 09:15:53,965 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41966.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:15:55,891 INFO [train.py:903] (0/4) Epoch 7, batch 1000, loss[loss=0.2283, simple_loss=0.2976, pruned_loss=0.07953, over 19708.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3291, pruned_loss=0.09896, over 3800275.68 frames. ], batch size: 51, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:16:36,144 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-42000.pt +2023-04-01 09:16:45,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8400, 1.6410, 1.3821, 1.8801, 1.9480, 1.6574, 1.5414, 1.8613], + device='cuda:0'), covar=tensor([0.0832, 0.1302, 0.1256, 0.0735, 0.0882, 0.0462, 0.0956, 0.0547], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0361, 0.0289, 0.0238, 0.0303, 0.0248, 0.0274, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:16:48,575 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 09:16:56,929 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:16:59,085 INFO [train.py:903] (0/4) Epoch 7, batch 1050, loss[loss=0.2816, simple_loss=0.3379, pruned_loss=0.1127, over 19730.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3286, pruned_loss=0.099, over 3799701.93 frames. ], batch size: 51, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:17:14,962 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6102, 1.2850, 1.3712, 1.8659, 1.4541, 1.7907, 1.7951, 1.5937], + device='cuda:0'), covar=tensor([0.0837, 0.1050, 0.1115, 0.0858, 0.0913, 0.0734, 0.0895, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0234, 0.0234, 0.0265, 0.0255, 0.0217, 0.0218, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 09:17:19,836 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 09:17:29,374 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 09:17:33,746 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+02 5.894e+02 7.129e+02 9.584e+02 2.561e+03, threshold=1.426e+03, percent-clipped=5.0 +2023-04-01 09:18:00,554 INFO [train.py:903] (0/4) Epoch 7, batch 1100, loss[loss=0.2932, simple_loss=0.3314, pruned_loss=0.1275, over 19758.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.329, pruned_loss=0.09931, over 3788807.01 frames. ], batch size: 45, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:18:29,521 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7109, 1.2305, 1.4246, 1.5119, 3.1947, 0.9379, 2.1290, 3.4541], + device='cuda:0'), covar=tensor([0.0369, 0.2576, 0.2426, 0.1679, 0.0636, 0.2498, 0.1210, 0.0316], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0318, 0.0325, 0.0297, 0.0320, 0.0317, 0.0299, 0.0317], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:19:03,370 INFO [train.py:903] (0/4) Epoch 7, batch 1150, loss[loss=0.3056, simple_loss=0.3605, pruned_loss=0.1254, over 18784.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3288, pruned_loss=0.09844, over 3797883.45 frames. ], batch size: 74, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:21,126 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:19:37,737 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.892e+02 7.369e+02 1.011e+03 1.805e+03, threshold=1.474e+03, percent-clipped=4.0 +2023-04-01 09:20:05,775 INFO [train.py:903] (0/4) Epoch 7, batch 1200, loss[loss=0.2189, simple_loss=0.2996, pruned_loss=0.06906, over 19666.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3287, pruned_loss=0.09857, over 3800539.35 frames. ], batch size: 53, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:20:30,599 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 09:21:08,017 INFO [train.py:903] (0/4) Epoch 7, batch 1250, loss[loss=0.248, simple_loss=0.313, pruned_loss=0.09154, over 19613.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3298, pruned_loss=0.09923, over 3805921.59 frames. ], batch size: 50, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:21:43,467 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 6.237e+02 7.824e+02 1.034e+03 2.254e+03, threshold=1.565e+03, percent-clipped=6.0 +2023-04-01 09:22:09,623 INFO [train.py:903] (0/4) Epoch 7, batch 1300, loss[loss=0.2346, simple_loss=0.2935, pruned_loss=0.08782, over 19715.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3292, pruned_loss=0.09953, over 3810905.47 frames. ], batch size: 45, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:22:09,768 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:22:53,544 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9262, 1.9692, 1.9384, 2.8286, 1.8432, 2.6346, 2.5640, 1.8565], + device='cuda:0'), covar=tensor([0.2550, 0.2105, 0.1059, 0.1270, 0.2433, 0.0872, 0.2057, 0.1938], + device='cuda:0'), in_proj_covar=tensor([0.0698, 0.0700, 0.0595, 0.0843, 0.0716, 0.0608, 0.0729, 0.0641], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 09:23:02,502 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42310.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:23:12,318 INFO [train.py:903] (0/4) Epoch 7, batch 1350, loss[loss=0.2828, simple_loss=0.3447, pruned_loss=0.1104, over 19690.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3296, pruned_loss=0.09979, over 3812727.46 frames. ], batch size: 59, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:23:47,351 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.606e+02 6.578e+02 7.819e+02 1.024e+03 2.032e+03, threshold=1.564e+03, percent-clipped=3.0 +2023-04-01 09:24:15,699 INFO [train.py:903] (0/4) Epoch 7, batch 1400, loss[loss=0.2475, simple_loss=0.3198, pruned_loss=0.08757, over 19775.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3287, pruned_loss=0.09965, over 3815492.25 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:24:34,374 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:39,136 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42387.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:50,678 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:10,405 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:13,363 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 09:25:17,936 INFO [train.py:903] (0/4) Epoch 7, batch 1450, loss[loss=0.249, simple_loss=0.3266, pruned_loss=0.08568, over 19676.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3297, pruned_loss=0.1, over 3818201.49 frames. ], batch size: 53, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:25:26,410 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42425.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:25:53,249 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.957e+02 5.854e+02 7.466e+02 9.791e+02 2.115e+03, threshold=1.493e+03, percent-clipped=4.0 +2023-04-01 09:26:19,589 INFO [train.py:903] (0/4) Epoch 7, batch 1500, loss[loss=0.2311, simple_loss=0.303, pruned_loss=0.07959, over 19573.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3288, pruned_loss=0.09906, over 3813539.50 frames. ], batch size: 52, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:01,855 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1953, 2.1632, 2.2564, 3.3981, 2.0868, 3.4757, 3.0516, 2.2332], + device='cuda:0'), covar=tensor([0.2779, 0.2327, 0.0976, 0.1323, 0.2813, 0.0799, 0.2021, 0.1862], + device='cuda:0'), in_proj_covar=tensor([0.0690, 0.0691, 0.0587, 0.0827, 0.0707, 0.0605, 0.0725, 0.0632], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 09:27:04,040 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5121, 3.6738, 3.9722, 3.9518, 2.2723, 3.6572, 3.4388, 3.6953], + device='cuda:0'), covar=tensor([0.0935, 0.1885, 0.0520, 0.0511, 0.3056, 0.0688, 0.0488, 0.0875], + device='cuda:0'), in_proj_covar=tensor([0.0569, 0.0495, 0.0668, 0.0552, 0.0622, 0.0422, 0.0426, 0.0618], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 09:27:15,281 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8410, 1.9996, 2.0101, 2.7697, 2.4520, 2.3390, 1.9214, 2.5788], + device='cuda:0'), covar=tensor([0.0591, 0.1574, 0.1197, 0.0780, 0.1041, 0.0401, 0.0995, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0355, 0.0285, 0.0235, 0.0301, 0.0245, 0.0265, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:27:20,493 INFO [train.py:903] (0/4) Epoch 7, batch 1550, loss[loss=0.2355, simple_loss=0.3049, pruned_loss=0.08304, over 19620.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3287, pruned_loss=0.09909, over 3813860.23 frames. ], batch size: 50, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:29,090 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:27:55,725 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 6.427e+02 8.071e+02 9.919e+02 2.182e+03, threshold=1.614e+03, percent-clipped=7.0 +2023-04-01 09:28:23,017 INFO [train.py:903] (0/4) Epoch 7, batch 1600, loss[loss=0.2562, simple_loss=0.3294, pruned_loss=0.09146, over 18266.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3286, pruned_loss=0.09855, over 3818172.66 frames. ], batch size: 83, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:28:41,292 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 09:29:02,627 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7272, 1.7847, 1.4480, 1.3777, 1.2601, 1.4702, 0.0777, 0.7043], + device='cuda:0'), covar=tensor([0.0291, 0.0296, 0.0201, 0.0253, 0.0584, 0.0289, 0.0528, 0.0461], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0303, 0.0299, 0.0323, 0.0388, 0.0318, 0.0285, 0.0300], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:29:24,654 INFO [train.py:903] (0/4) Epoch 7, batch 1650, loss[loss=0.2908, simple_loss=0.3549, pruned_loss=0.1133, over 18083.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3301, pruned_loss=0.09986, over 3818016.60 frames. ], batch size: 83, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:29:50,117 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:29:59,743 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.059e+02 6.175e+02 7.945e+02 9.824e+02 2.630e+03, threshold=1.589e+03, percent-clipped=4.0 +2023-04-01 09:30:23,042 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:26,305 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:27,187 INFO [train.py:903] (0/4) Epoch 7, batch 1700, loss[loss=0.3161, simple_loss=0.3593, pruned_loss=0.1364, over 13250.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3304, pruned_loss=0.09974, over 3800739.93 frames. ], batch size: 135, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:30:42,949 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42681.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:31:02,744 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 09:31:15,437 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42706.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 09:31:29,069 INFO [train.py:903] (0/4) Epoch 7, batch 1750, loss[loss=0.2539, simple_loss=0.3184, pruned_loss=0.09468, over 19560.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3283, pruned_loss=0.09869, over 3807044.88 frames. ], batch size: 61, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:31:59,553 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:32:05,278 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.682e+02 7.177e+02 9.179e+02 1.731e+03, threshold=1.435e+03, percent-clipped=1.0 +2023-04-01 09:32:23,200 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7380, 3.1538, 3.2128, 3.2238, 1.2177, 3.0349, 2.6738, 2.8972], + device='cuda:0'), covar=tensor([0.1303, 0.0844, 0.0755, 0.0725, 0.4202, 0.0725, 0.0797, 0.1388], + device='cuda:0'), in_proj_covar=tensor([0.0567, 0.0491, 0.0669, 0.0551, 0.0621, 0.0424, 0.0425, 0.0625], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 09:32:33,267 INFO [train.py:903] (0/4) Epoch 7, batch 1800, loss[loss=0.2503, simple_loss=0.3019, pruned_loss=0.09934, over 19791.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3267, pruned_loss=0.09761, over 3825896.45 frames. ], batch size: 48, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:33:27,633 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 09:33:35,071 INFO [train.py:903] (0/4) Epoch 7, batch 1850, loss[loss=0.3018, simple_loss=0.3487, pruned_loss=0.1274, over 19537.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3277, pruned_loss=0.09872, over 3809231.93 frames. ], batch size: 54, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:33:54,700 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8582, 1.3184, 1.0094, 0.9141, 1.1681, 0.8701, 0.6660, 1.1730], + device='cuda:0'), covar=tensor([0.0479, 0.0628, 0.0927, 0.0489, 0.0391, 0.0953, 0.0586, 0.0340], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0284, 0.0321, 0.0242, 0.0226, 0.0316, 0.0289, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:34:04,789 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 09:34:09,013 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 6.596e+02 7.909e+02 1.066e+03 2.536e+03, threshold=1.582e+03, percent-clipped=10.0 +2023-04-01 09:34:22,419 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:37,124 INFO [train.py:903] (0/4) Epoch 7, batch 1900, loss[loss=0.3461, simple_loss=0.3804, pruned_loss=0.1559, over 13355.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3283, pruned_loss=0.09957, over 3816761.41 frames. ], batch size: 136, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:37,261 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:48,032 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:51,096 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 09:34:55,747 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 09:35:14,054 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:35:21,945 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 09:35:38,556 INFO [train.py:903] (0/4) Epoch 7, batch 1950, loss[loss=0.2348, simple_loss=0.3015, pruned_loss=0.08405, over 19785.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3278, pruned_loss=0.09961, over 3813222.16 frames. ], batch size: 49, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:36:15,248 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.682e+02 8.244e+02 9.665e+02 2.689e+03, threshold=1.649e+03, percent-clipped=3.0 +2023-04-01 09:36:41,126 INFO [train.py:903] (0/4) Epoch 7, batch 2000, loss[loss=0.228, simple_loss=0.2949, pruned_loss=0.08053, over 19752.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.329, pruned_loss=0.1001, over 3807590.14 frames. ], batch size: 46, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:00,285 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:35,121 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:36,087 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 09:37:43,657 INFO [train.py:903] (0/4) Epoch 7, batch 2050, loss[loss=0.296, simple_loss=0.3552, pruned_loss=0.1184, over 19599.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3287, pruned_loss=0.09983, over 3802105.23 frames. ], batch size: 57, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:56,189 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 09:37:57,386 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 09:38:08,467 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3183, 1.5366, 1.8391, 2.3393, 1.6305, 2.4190, 2.4697, 2.0899], + device='cuda:0'), covar=tensor([0.0760, 0.1057, 0.1081, 0.1147, 0.1092, 0.0733, 0.0899, 0.0673], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0236, 0.0232, 0.0264, 0.0254, 0.0221, 0.0216, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 09:38:17,401 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.956e+02 6.149e+02 7.669e+02 9.586e+02 2.177e+03, threshold=1.534e+03, percent-clipped=1.0 +2023-04-01 09:38:18,627 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 09:38:46,694 INFO [train.py:903] (0/4) Epoch 7, batch 2100, loss[loss=0.224, simple_loss=0.2995, pruned_loss=0.07426, over 19863.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.329, pruned_loss=0.09959, over 3799807.50 frames. ], batch size: 52, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:39:13,037 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 09:39:16,682 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5357, 1.0174, 1.2409, 1.2098, 2.1529, 0.9509, 1.8936, 2.1405], + device='cuda:0'), covar=tensor([0.0514, 0.2491, 0.2408, 0.1388, 0.0750, 0.1889, 0.0870, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0316, 0.0326, 0.0295, 0.0322, 0.0318, 0.0298, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:39:36,358 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 09:39:41,425 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:39:48,067 INFO [train.py:903] (0/4) Epoch 7, batch 2150, loss[loss=0.2291, simple_loss=0.3042, pruned_loss=0.07698, over 19784.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3291, pruned_loss=0.09965, over 3799549.49 frames. ], batch size: 54, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:39:57,615 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:12,343 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:23,158 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 6.527e+02 7.673e+02 9.951e+02 2.226e+03, threshold=1.535e+03, percent-clipped=3.0 +2023-04-01 09:40:40,206 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 09:40:49,521 INFO [train.py:903] (0/4) Epoch 7, batch 2200, loss[loss=0.2411, simple_loss=0.306, pruned_loss=0.08807, over 19578.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3296, pruned_loss=0.09985, over 3797253.78 frames. ], batch size: 52, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:16,781 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2714, 1.1038, 1.5290, 1.0996, 2.7959, 3.4899, 3.3282, 3.6916], + device='cuda:0'), covar=tensor([0.1403, 0.3143, 0.2951, 0.1921, 0.0399, 0.0131, 0.0205, 0.0150], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0284, 0.0314, 0.0247, 0.0203, 0.0134, 0.0204, 0.0161], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:41:53,420 INFO [train.py:903] (0/4) Epoch 7, batch 2250, loss[loss=0.2364, simple_loss=0.3098, pruned_loss=0.08152, over 19588.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3273, pruned_loss=0.09839, over 3824165.71 frames. ], batch size: 52, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:57,890 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:19,712 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:21,846 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43241.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:27,686 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+02 6.206e+02 7.577e+02 9.251e+02 2.641e+03, threshold=1.515e+03, percent-clipped=5.0 +2023-04-01 09:42:31,784 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3485, 1.5061, 1.9941, 1.7022, 3.1447, 2.4610, 3.4515, 1.3243], + device='cuda:0'), covar=tensor([0.2186, 0.3685, 0.2235, 0.1686, 0.1416, 0.1808, 0.1503, 0.3324], + device='cuda:0'), in_proj_covar=tensor([0.0462, 0.0523, 0.0524, 0.0418, 0.0576, 0.0464, 0.0639, 0.0459], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 09:42:36,529 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3129, 2.3930, 1.6940, 1.4469, 2.1413, 1.1479, 1.1942, 1.8141], + device='cuda:0'), covar=tensor([0.0806, 0.0484, 0.0792, 0.0616, 0.0371, 0.0916, 0.0673, 0.0369], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0280, 0.0318, 0.0239, 0.0222, 0.0313, 0.0281, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:42:52,514 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:56,677 INFO [train.py:903] (0/4) Epoch 7, batch 2300, loss[loss=0.2577, simple_loss=0.3267, pruned_loss=0.09435, over 19607.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3291, pruned_loss=0.09911, over 3818590.85 frames. ], batch size: 57, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:43:10,467 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 09:43:59,263 INFO [train.py:903] (0/4) Epoch 7, batch 2350, loss[loss=0.2508, simple_loss=0.3213, pruned_loss=0.09012, over 19541.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3287, pruned_loss=0.09873, over 3824561.40 frames. ], batch size: 54, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:44:07,555 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8446, 1.9462, 1.9852, 2.8001, 1.7964, 2.6112, 2.5611, 1.9433], + device='cuda:0'), covar=tensor([0.2883, 0.2263, 0.1142, 0.1250, 0.2616, 0.0953, 0.2252, 0.1998], + device='cuda:0'), in_proj_covar=tensor([0.0696, 0.0692, 0.0599, 0.0837, 0.0713, 0.0611, 0.0729, 0.0635], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 09:44:20,934 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:34,241 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 6.151e+02 7.487e+02 9.533e+02 1.563e+03, threshold=1.497e+03, percent-clipped=2.0 +2023-04-01 09:44:43,148 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 09:44:46,804 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43356.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:59,568 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 09:45:00,596 INFO [train.py:903] (0/4) Epoch 7, batch 2400, loss[loss=0.292, simple_loss=0.3492, pruned_loss=0.1174, over 13339.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3277, pruned_loss=0.09789, over 3828688.68 frames. ], batch size: 136, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:45:20,051 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:45:49,711 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:46:04,429 INFO [train.py:903] (0/4) Epoch 7, batch 2450, loss[loss=0.2203, simple_loss=0.2912, pruned_loss=0.07468, over 19462.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3264, pruned_loss=0.0971, over 3832083.30 frames. ], batch size: 49, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:46:38,146 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.802e+02 7.669e+02 8.855e+02 2.284e+03, threshold=1.534e+03, percent-clipped=5.0 +2023-04-01 09:46:43,558 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 09:47:02,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2652, 1.5534, 1.9293, 1.7575, 2.9712, 4.3552, 4.3754, 4.7712], + device='cuda:0'), covar=tensor([0.1465, 0.2806, 0.2762, 0.1779, 0.0452, 0.0245, 0.0158, 0.0101], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0286, 0.0316, 0.0250, 0.0205, 0.0135, 0.0205, 0.0161], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:47:06,555 INFO [train.py:903] (0/4) Epoch 7, batch 2500, loss[loss=0.2946, simple_loss=0.3525, pruned_loss=0.1183, over 18139.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3265, pruned_loss=0.09702, over 3831669.11 frames. ], batch size: 83, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:47:38,462 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 09:48:09,515 INFO [train.py:903] (0/4) Epoch 7, batch 2550, loss[loss=0.2445, simple_loss=0.3153, pruned_loss=0.08681, over 19741.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3264, pruned_loss=0.09691, over 3818273.08 frames. ], batch size: 51, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:48:25,709 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:48:44,537 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.143e+02 6.070e+02 7.288e+02 8.830e+02 1.707e+03, threshold=1.458e+03, percent-clipped=2.0 +2023-04-01 09:49:01,566 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2723, 1.0931, 1.4986, 1.1419, 2.4588, 3.1671, 2.9964, 3.4794], + device='cuda:0'), covar=tensor([0.1561, 0.4311, 0.3892, 0.2173, 0.0531, 0.0219, 0.0303, 0.0183], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0284, 0.0315, 0.0247, 0.0202, 0.0133, 0.0203, 0.0160], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:49:05,669 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 09:49:10,238 INFO [train.py:903] (0/4) Epoch 7, batch 2600, loss[loss=0.2454, simple_loss=0.318, pruned_loss=0.08638, over 19664.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3269, pruned_loss=0.09756, over 3803463.67 frames. ], batch size: 55, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:49:41,285 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:49:59,882 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-01 09:50:05,431 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:12,240 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:12,993 INFO [train.py:903] (0/4) Epoch 7, batch 2650, loss[loss=0.2923, simple_loss=0.3491, pruned_loss=0.1177, over 18136.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3277, pruned_loss=0.09852, over 3786872.03 frames. ], batch size: 83, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:50:28,013 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8786, 1.8480, 1.6248, 1.4931, 1.4205, 1.6362, 0.3136, 0.9189], + device='cuda:0'), covar=tensor([0.0282, 0.0305, 0.0187, 0.0294, 0.0523, 0.0305, 0.0592, 0.0472], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0310, 0.0303, 0.0329, 0.0396, 0.0322, 0.0291, 0.0306], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:50:35,411 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 09:50:38,352 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43637.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:49,495 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 6.243e+02 7.316e+02 9.618e+02 1.411e+03, threshold=1.463e+03, percent-clipped=0.0 +2023-04-01 09:51:16,495 INFO [train.py:903] (0/4) Epoch 7, batch 2700, loss[loss=0.2258, simple_loss=0.2953, pruned_loss=0.07811, over 19826.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3271, pruned_loss=0.0982, over 3806138.69 frames. ], batch size: 49, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:51:25,752 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1857, 1.1837, 1.1623, 1.3256, 1.0496, 1.3424, 1.3425, 1.2918], + device='cuda:0'), covar=tensor([0.0901, 0.1007, 0.1112, 0.0738, 0.0851, 0.0839, 0.0824, 0.0769], + device='cuda:0'), in_proj_covar=tensor([0.0228, 0.0241, 0.0236, 0.0271, 0.0258, 0.0222, 0.0219, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 09:51:26,106 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-01 09:51:44,012 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 09:52:07,225 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9840, 1.9439, 2.2594, 2.9938, 2.7096, 2.5192, 1.8708, 2.8029], + device='cuda:0'), covar=tensor([0.0668, 0.1707, 0.1213, 0.0737, 0.1030, 0.0398, 0.1164, 0.0533], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0357, 0.0291, 0.0237, 0.0304, 0.0247, 0.0271, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:52:19,231 INFO [train.py:903] (0/4) Epoch 7, batch 2750, loss[loss=0.3068, simple_loss=0.3712, pruned_loss=0.1212, over 19481.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3272, pruned_loss=0.09838, over 3807823.33 frames. ], batch size: 64, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:52:43,620 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4993, 1.0553, 1.2399, 1.2369, 2.1726, 0.9072, 1.7188, 2.2609], + device='cuda:0'), covar=tensor([0.0560, 0.2507, 0.2403, 0.1409, 0.0741, 0.1949, 0.1019, 0.0528], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0313, 0.0322, 0.0291, 0.0319, 0.0312, 0.0297, 0.0314], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:52:55,474 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.869e+02 8.299e+02 1.091e+03 2.331e+03, threshold=1.660e+03, percent-clipped=8.0 +2023-04-01 09:53:20,556 INFO [train.py:903] (0/4) Epoch 7, batch 2800, loss[loss=0.2485, simple_loss=0.3091, pruned_loss=0.09395, over 19771.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3273, pruned_loss=0.09805, over 3819070.34 frames. ], batch size: 46, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:53:35,880 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3235, 1.0920, 1.5845, 1.3593, 2.7110, 3.7593, 3.6050, 4.0154], + device='cuda:0'), covar=tensor([0.1410, 0.2995, 0.2700, 0.1887, 0.0458, 0.0148, 0.0179, 0.0123], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0283, 0.0311, 0.0245, 0.0200, 0.0132, 0.0203, 0.0160], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 09:54:07,559 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:54:22,954 INFO [train.py:903] (0/4) Epoch 7, batch 2850, loss[loss=0.2555, simple_loss=0.3141, pruned_loss=0.09842, over 19389.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3264, pruned_loss=0.09786, over 3813589.94 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:54:59,107 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.316e+02 8.520e+02 1.005e+03 1.613e+03, threshold=1.704e+03, percent-clipped=0.0 +2023-04-01 09:55:02,921 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5345, 1.2564, 1.3742, 1.2253, 2.1986, 0.8769, 1.8417, 2.3325], + device='cuda:0'), covar=tensor([0.0518, 0.2202, 0.2149, 0.1332, 0.0763, 0.1794, 0.0849, 0.0467], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0315, 0.0326, 0.0296, 0.0323, 0.0316, 0.0300, 0.0319], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:55:25,665 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 09:55:26,287 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 09:55:26,752 INFO [train.py:903] (0/4) Epoch 7, batch 2900, loss[loss=0.1859, simple_loss=0.263, pruned_loss=0.05437, over 19346.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.326, pruned_loss=0.09756, over 3800226.60 frames. ], batch size: 47, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:55:36,388 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:56:28,085 INFO [train.py:903] (0/4) Epoch 7, batch 2950, loss[loss=0.2729, simple_loss=0.3487, pruned_loss=0.09857, over 19486.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3251, pruned_loss=0.09668, over 3813732.83 frames. ], batch size: 64, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:56:39,830 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:57:04,995 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.923e+02 7.218e+02 9.278e+02 2.092e+03, threshold=1.444e+03, percent-clipped=1.0 +2023-04-01 09:57:30,521 INFO [train.py:903] (0/4) Epoch 7, batch 3000, loss[loss=0.2041, simple_loss=0.279, pruned_loss=0.06457, over 19350.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3247, pruned_loss=0.09664, over 3825992.05 frames. ], batch size: 47, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:57:30,521 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 09:57:43,098 INFO [train.py:937] (0/4) Epoch 7, validation: loss=0.1917, simple_loss=0.2919, pruned_loss=0.04574, over 944034.00 frames. +2023-04-01 09:57:43,099 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 09:57:49,866 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 09:57:54,933 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:08,008 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5423, 2.5078, 1.6863, 1.6407, 2.2339, 1.2321, 1.2965, 1.8624], + device='cuda:0'), covar=tensor([0.0794, 0.0511, 0.0859, 0.0576, 0.0326, 0.0963, 0.0624, 0.0342], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0277, 0.0315, 0.0237, 0.0224, 0.0305, 0.0276, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 09:58:13,799 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:24,506 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-44000.pt +2023-04-01 09:58:47,326 INFO [train.py:903] (0/4) Epoch 7, batch 3050, loss[loss=0.2485, simple_loss=0.3162, pruned_loss=0.09036, over 19623.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3255, pruned_loss=0.09704, over 3830704.28 frames. ], batch size: 50, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:59:24,086 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+02 5.923e+02 7.306e+02 8.819e+02 1.422e+03, threshold=1.461e+03, percent-clipped=0.0 +2023-04-01 09:59:50,240 INFO [train.py:903] (0/4) Epoch 7, batch 3100, loss[loss=0.2248, simple_loss=0.2931, pruned_loss=0.07825, over 19737.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3253, pruned_loss=0.09688, over 3836292.04 frames. ], batch size: 46, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:00:17,741 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2767, 1.4203, 1.8988, 1.5341, 2.7470, 2.2410, 2.8759, 1.1758], + device='cuda:0'), covar=tensor([0.1814, 0.3031, 0.1678, 0.1413, 0.1129, 0.1479, 0.1265, 0.2835], + device='cuda:0'), in_proj_covar=tensor([0.0457, 0.0524, 0.0518, 0.0409, 0.0562, 0.0457, 0.0631, 0.0460], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 10:00:50,927 INFO [train.py:903] (0/4) Epoch 7, batch 3150, loss[loss=0.251, simple_loss=0.3169, pruned_loss=0.09253, over 18132.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3272, pruned_loss=0.09808, over 3826725.25 frames. ], batch size: 83, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:01:18,570 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 10:01:26,086 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 6.470e+02 8.010e+02 1.018e+03 2.357e+03, threshold=1.602e+03, percent-clipped=4.0 +2023-04-01 10:01:28,521 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:01:31,187 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 10:01:51,333 INFO [train.py:903] (0/4) Epoch 7, batch 3200, loss[loss=0.3008, simple_loss=0.3608, pruned_loss=0.1204, over 19658.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3264, pruned_loss=0.09784, over 3820600.36 frames. ], batch size: 60, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:02:48,450 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1765, 1.2082, 1.8477, 1.5039, 2.8476, 4.4916, 4.4553, 5.0130], + device='cuda:0'), covar=tensor([0.1531, 0.3138, 0.2704, 0.1813, 0.0456, 0.0151, 0.0134, 0.0069], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0284, 0.0312, 0.0247, 0.0202, 0.0133, 0.0204, 0.0162], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:02:51,382 INFO [train.py:903] (0/4) Epoch 7, batch 3250, loss[loss=0.2708, simple_loss=0.3398, pruned_loss=0.1009, over 19612.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3263, pruned_loss=0.09787, over 3818231.07 frames. ], batch size: 61, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:27,995 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 6.408e+02 8.261e+02 1.024e+03 1.757e+03, threshold=1.652e+03, percent-clipped=4.0 +2023-04-01 10:03:28,411 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:49,182 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:53,343 INFO [train.py:903] (0/4) Epoch 7, batch 3300, loss[loss=0.2544, simple_loss=0.3372, pruned_loss=0.08579, over 19686.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3271, pruned_loss=0.09784, over 3812602.91 frames. ], batch size: 60, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:59,175 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:00,222 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 10:04:00,594 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44272.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:57,173 INFO [train.py:903] (0/4) Epoch 7, batch 3350, loss[loss=0.2655, simple_loss=0.33, pruned_loss=0.1005, over 19592.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3273, pruned_loss=0.09799, over 3812840.87 frames. ], batch size: 52, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:05:00,795 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:05:32,241 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 6.124e+02 7.865e+02 1.014e+03 2.362e+03, threshold=1.573e+03, percent-clipped=3.0 +2023-04-01 10:05:58,470 INFO [train.py:903] (0/4) Epoch 7, batch 3400, loss[loss=0.2457, simple_loss=0.302, pruned_loss=0.09473, over 19740.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3276, pruned_loss=0.09823, over 3821664.52 frames. ], batch size: 47, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:06:05,548 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.1970, 5.1069, 6.0423, 5.9473, 1.8888, 5.6138, 4.8794, 5.5583], + device='cuda:0'), covar=tensor([0.1033, 0.0567, 0.0375, 0.0331, 0.4616, 0.0255, 0.0424, 0.0900], + device='cuda:0'), in_proj_covar=tensor([0.0575, 0.0507, 0.0682, 0.0555, 0.0631, 0.0426, 0.0432, 0.0627], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 10:06:20,848 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:01,356 INFO [train.py:903] (0/4) Epoch 7, batch 3450, loss[loss=0.1958, simple_loss=0.2644, pruned_loss=0.06353, over 19406.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3277, pruned_loss=0.09829, over 3827351.05 frames. ], batch size: 47, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:07:06,920 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 10:07:09,602 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5636, 1.9089, 2.1405, 2.4774, 1.8124, 2.3844, 2.7259, 2.5316], + device='cuda:0'), covar=tensor([0.0738, 0.0961, 0.1011, 0.1049, 0.1080, 0.0789, 0.0876, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0238, 0.0234, 0.0269, 0.0253, 0.0220, 0.0218, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 10:07:20,235 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3758, 1.2360, 1.6826, 1.2925, 2.7292, 3.7173, 3.5220, 3.9222], + device='cuda:0'), covar=tensor([0.1371, 0.2940, 0.2688, 0.1809, 0.0430, 0.0157, 0.0197, 0.0137], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0284, 0.0314, 0.0248, 0.0203, 0.0134, 0.0202, 0.0162], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:07:25,067 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:39,986 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 5.624e+02 7.209e+02 9.228e+02 1.461e+03, threshold=1.442e+03, percent-clipped=0.0 +2023-04-01 10:08:03,657 INFO [train.py:903] (0/4) Epoch 7, batch 3500, loss[loss=0.2566, simple_loss=0.3309, pruned_loss=0.09112, over 19595.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3278, pruned_loss=0.09825, over 3833364.64 frames. ], batch size: 61, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:07,864 INFO [train.py:903] (0/4) Epoch 7, batch 3550, loss[loss=0.2428, simple_loss=0.3013, pruned_loss=0.09217, over 19741.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3278, pruned_loss=0.0983, over 3821433.01 frames. ], batch size: 47, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:10,459 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:40,592 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:40,714 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:43,711 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 5.736e+02 7.487e+02 9.426e+02 2.431e+03, threshold=1.497e+03, percent-clipped=5.0 +2023-04-01 10:09:52,445 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3335, 2.2680, 1.4902, 1.3022, 1.8331, 1.0478, 1.1874, 1.8154], + device='cuda:0'), covar=tensor([0.0695, 0.0472, 0.0876, 0.0595, 0.0441, 0.1009, 0.0622, 0.0358], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0280, 0.0317, 0.0242, 0.0224, 0.0314, 0.0288, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 10:10:10,023 INFO [train.py:903] (0/4) Epoch 7, batch 3600, loss[loss=0.2376, simple_loss=0.3195, pruned_loss=0.07787, over 19417.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3289, pruned_loss=0.09898, over 3816922.01 frames. ], batch size: 70, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:10:10,221 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:12,083 INFO [train.py:903] (0/4) Epoch 7, batch 3650, loss[loss=0.2774, simple_loss=0.3333, pruned_loss=0.1107, over 19673.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3288, pruned_loss=0.09914, over 3817038.23 frames. ], batch size: 53, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:11:42,351 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:49,797 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 6.222e+02 7.769e+02 9.647e+02 2.431e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:12:12,892 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:12:13,721 INFO [train.py:903] (0/4) Epoch 7, batch 3700, loss[loss=0.2468, simple_loss=0.3197, pruned_loss=0.08696, over 19676.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3276, pruned_loss=0.09838, over 3820726.15 frames. ], batch size: 53, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:12:37,014 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2332, 5.5419, 3.1483, 4.8847, 1.0866, 5.4465, 5.4247, 5.7870], + device='cuda:0'), covar=tensor([0.0397, 0.0855, 0.1778, 0.0620, 0.4135, 0.0658, 0.0628, 0.0687], + device='cuda:0'), in_proj_covar=tensor([0.0380, 0.0337, 0.0393, 0.0296, 0.0362, 0.0322, 0.0307, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 10:12:45,598 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:12:49,113 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-01 10:13:16,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:13:16,975 INFO [train.py:903] (0/4) Epoch 7, batch 3750, loss[loss=0.2902, simple_loss=0.3567, pruned_loss=0.1119, over 19662.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3281, pruned_loss=0.09889, over 3815828.18 frames. ], batch size: 58, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:13:53,510 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+02 6.149e+02 7.382e+02 9.468e+02 1.650e+03, threshold=1.476e+03, percent-clipped=2.0 +2023-04-01 10:14:18,151 INFO [train.py:903] (0/4) Epoch 7, batch 3800, loss[loss=0.2692, simple_loss=0.3331, pruned_loss=0.1027, over 19768.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3281, pruned_loss=0.09823, over 3819726.90 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:14:50,696 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 10:14:59,337 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0102, 1.2062, 1.3164, 1.5552, 2.6515, 1.0061, 1.8605, 2.7381], + device='cuda:0'), covar=tensor([0.0429, 0.2399, 0.2542, 0.1327, 0.0584, 0.2098, 0.1161, 0.0423], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0314, 0.0325, 0.0292, 0.0318, 0.0311, 0.0294, 0.0314], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 10:15:06,260 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1506, 1.2105, 1.4395, 1.3070, 1.8481, 1.8339, 1.8775, 0.4860], + device='cuda:0'), covar=tensor([0.1997, 0.3489, 0.1915, 0.1614, 0.1169, 0.1721, 0.1176, 0.3274], + device='cuda:0'), in_proj_covar=tensor([0.0460, 0.0531, 0.0526, 0.0416, 0.0572, 0.0460, 0.0636, 0.0464], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 10:15:19,311 INFO [train.py:903] (0/4) Epoch 7, batch 3850, loss[loss=0.2544, simple_loss=0.3287, pruned_loss=0.09007, over 19427.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3292, pruned_loss=0.09916, over 3800593.85 frames. ], batch size: 70, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:15:57,069 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 6.658e+02 8.285e+02 1.095e+03 3.075e+03, threshold=1.657e+03, percent-clipped=10.0 +2023-04-01 10:16:20,993 INFO [train.py:903] (0/4) Epoch 7, batch 3900, loss[loss=0.2736, simple_loss=0.3219, pruned_loss=0.1126, over 19798.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3287, pruned_loss=0.0984, over 3806541.64 frames. ], batch size: 46, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:16:38,265 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2536, 1.0768, 1.1939, 1.3637, 0.9875, 1.3120, 1.3082, 1.2865], + device='cuda:0'), covar=tensor([0.0888, 0.1092, 0.1135, 0.0738, 0.0935, 0.0808, 0.0873, 0.0804], + device='cuda:0'), in_proj_covar=tensor([0.0222, 0.0234, 0.0231, 0.0263, 0.0250, 0.0217, 0.0215, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 10:16:48,321 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44889.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:15,621 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:24,388 INFO [train.py:903] (0/4) Epoch 7, batch 3950, loss[loss=0.2769, simple_loss=0.3358, pruned_loss=0.1091, over 19682.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3284, pruned_loss=0.09791, over 3819823.07 frames. ], batch size: 60, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:17:29,146 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 10:18:00,741 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+02 5.655e+02 7.377e+02 9.527e+02 2.304e+03, threshold=1.475e+03, percent-clipped=3.0 +2023-04-01 10:18:26,676 INFO [train.py:903] (0/4) Epoch 7, batch 4000, loss[loss=0.2419, simple_loss=0.302, pruned_loss=0.09084, over 19807.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3276, pruned_loss=0.09754, over 3826813.76 frames. ], batch size: 48, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:02,821 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6426, 1.6500, 1.9211, 1.7585, 2.6097, 2.2173, 2.6974, 1.5310], + device='cuda:0'), covar=tensor([0.1426, 0.2511, 0.1442, 0.1167, 0.0944, 0.1238, 0.0914, 0.2323], + device='cuda:0'), in_proj_covar=tensor([0.0460, 0.0530, 0.0526, 0.0416, 0.0572, 0.0464, 0.0633, 0.0461], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 10:19:07,590 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9022, 4.2888, 4.6060, 4.5664, 1.6025, 4.2179, 3.6736, 4.2118], + device='cuda:0'), covar=tensor([0.1108, 0.0648, 0.0451, 0.0454, 0.4438, 0.0441, 0.0582, 0.1027], + device='cuda:0'), in_proj_covar=tensor([0.0588, 0.0514, 0.0691, 0.0566, 0.0643, 0.0432, 0.0439, 0.0645], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 10:19:11,966 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45004.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:19:15,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 10:19:27,795 INFO [train.py:903] (0/4) Epoch 7, batch 4050, loss[loss=0.2598, simple_loss=0.3389, pruned_loss=0.09034, over 18158.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3282, pruned_loss=0.09788, over 3825474.80 frames. ], batch size: 83, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:39,276 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:20:05,075 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 6.019e+02 6.941e+02 8.524e+02 1.564e+03, threshold=1.388e+03, percent-clipped=1.0 +2023-04-01 10:20:28,979 INFO [train.py:903] (0/4) Epoch 7, batch 4100, loss[loss=0.2757, simple_loss=0.3424, pruned_loss=0.1045, over 18379.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3276, pruned_loss=0.09758, over 3820558.25 frames. ], batch size: 84, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:20:36,276 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:21:05,600 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 10:21:07,064 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:21:30,598 INFO [train.py:903] (0/4) Epoch 7, batch 4150, loss[loss=0.2802, simple_loss=0.3422, pruned_loss=0.1091, over 19588.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.3284, pruned_loss=0.09837, over 3818469.22 frames. ], batch size: 61, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:07,170 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 6.430e+02 8.235e+02 1.004e+03 2.607e+03, threshold=1.647e+03, percent-clipped=6.0 +2023-04-01 10:22:14,699 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2030, 1.1815, 1.4488, 1.3283, 1.8059, 1.7351, 1.7742, 0.5392], + device='cuda:0'), covar=tensor([0.2332, 0.3901, 0.2150, 0.1879, 0.1376, 0.2100, 0.1333, 0.3646], + device='cuda:0'), in_proj_covar=tensor([0.0464, 0.0536, 0.0528, 0.0417, 0.0575, 0.0466, 0.0640, 0.0464], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 10:22:32,969 INFO [train.py:903] (0/4) Epoch 7, batch 4200, loss[loss=0.2593, simple_loss=0.3142, pruned_loss=0.1023, over 16826.00 frames. ], tot_loss[loss=0.262, simple_loss=0.328, pruned_loss=0.09802, over 3815836.31 frames. ], batch size: 37, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:38,319 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 10:22:41,074 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9776, 1.9379, 1.5872, 1.4995, 1.2489, 1.4029, 0.2992, 0.9298], + device='cuda:0'), covar=tensor([0.0481, 0.0478, 0.0334, 0.0512, 0.0991, 0.0631, 0.0830, 0.0733], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0309, 0.0297, 0.0326, 0.0402, 0.0320, 0.0287, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:23:33,350 INFO [train.py:903] (0/4) Epoch 7, batch 4250, loss[loss=0.2186, simple_loss=0.303, pruned_loss=0.06715, over 19772.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.327, pruned_loss=0.09722, over 3821559.65 frames. ], batch size: 56, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:23:49,629 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 10:24:01,331 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 10:24:12,490 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.934e+02 6.110e+02 8.094e+02 1.072e+03 2.309e+03, threshold=1.619e+03, percent-clipped=5.0 +2023-04-01 10:24:27,303 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:37,360 INFO [train.py:903] (0/4) Epoch 7, batch 4300, loss[loss=0.2226, simple_loss=0.2891, pruned_loss=0.07806, over 19755.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3264, pruned_loss=0.09707, over 3823591.97 frames. ], batch size: 45, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:24:57,127 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45283.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:59,375 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45285.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:27,023 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:32,122 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 10:25:39,637 INFO [train.py:903] (0/4) Epoch 7, batch 4350, loss[loss=0.2672, simple_loss=0.3355, pruned_loss=0.09948, over 19773.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3265, pruned_loss=0.09735, over 3824994.49 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:25:54,093 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.18 vs. limit=5.0 +2023-04-01 10:26:16,052 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 6.479e+02 7.493e+02 1.002e+03 2.532e+03, threshold=1.499e+03, percent-clipped=5.0 +2023-04-01 10:26:39,963 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45366.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:26:42,861 INFO [train.py:903] (0/4) Epoch 7, batch 4400, loss[loss=0.2536, simple_loss=0.3226, pruned_loss=0.09231, over 17330.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3264, pruned_loss=0.09726, over 3834003.73 frames. ], batch size: 101, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:27:06,046 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 10:27:14,898 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 10:27:44,661 INFO [train.py:903] (0/4) Epoch 7, batch 4450, loss[loss=0.2676, simple_loss=0.332, pruned_loss=0.1016, over 19685.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3271, pruned_loss=0.09781, over 3824260.80 frames. ], batch size: 58, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:27:44,795 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:16,040 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:22,922 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.630e+02 6.151e+02 7.769e+02 9.586e+02 4.695e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:28:46,109 INFO [train.py:903] (0/4) Epoch 7, batch 4500, loss[loss=0.2726, simple_loss=0.3429, pruned_loss=0.1011, over 19656.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3272, pruned_loss=0.09793, over 3830585.80 frames. ], batch size: 60, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:28:46,489 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5278, 1.3879, 1.1581, 1.4758, 1.4644, 1.2118, 1.1352, 1.3463], + device='cuda:0'), covar=tensor([0.1076, 0.1527, 0.1737, 0.1128, 0.1289, 0.0939, 0.1449, 0.1019], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0350, 0.0281, 0.0232, 0.0294, 0.0239, 0.0263, 0.0227], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 10:29:48,542 INFO [train.py:903] (0/4) Epoch 7, batch 4550, loss[loss=0.3161, simple_loss=0.3609, pruned_loss=0.1356, over 12993.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.328, pruned_loss=0.09817, over 3835636.78 frames. ], batch size: 136, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:30:00,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 10:30:09,079 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:23,722 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 10:30:27,043 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.783e+02 6.251e+02 7.662e+02 9.726e+02 2.453e+03, threshold=1.532e+03, percent-clipped=6.0 +2023-04-01 10:30:38,114 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:42,540 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1832, 5.5791, 2.8843, 4.7279, 1.4248, 5.3322, 5.4591, 5.5225], + device='cuda:0'), covar=tensor([0.0368, 0.0723, 0.1658, 0.0505, 0.3451, 0.0499, 0.0549, 0.0519], + device='cuda:0'), in_proj_covar=tensor([0.0378, 0.0328, 0.0386, 0.0291, 0.0353, 0.0317, 0.0304, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 10:30:51,057 INFO [train.py:903] (0/4) Epoch 7, batch 4600, loss[loss=0.2318, simple_loss=0.3118, pruned_loss=0.07588, over 18779.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3287, pruned_loss=0.09814, over 3835483.07 frames. ], batch size: 74, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:31:21,632 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6762, 1.7496, 1.8623, 2.5266, 1.5905, 2.2130, 2.2702, 1.8555], + device='cuda:0'), covar=tensor([0.2746, 0.2164, 0.1168, 0.1196, 0.2384, 0.1009, 0.2412, 0.1937], + device='cuda:0'), in_proj_covar=tensor([0.0711, 0.0709, 0.0600, 0.0851, 0.0728, 0.0622, 0.0743, 0.0646], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 10:31:48,241 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-01 10:31:54,302 INFO [train.py:903] (0/4) Epoch 7, batch 4650, loss[loss=0.3372, simple_loss=0.3781, pruned_loss=0.1481, over 13505.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3285, pruned_loss=0.09811, over 3820551.00 frames. ], batch size: 135, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:31:57,361 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 10:32:11,875 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 10:32:24,508 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 10:32:33,275 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.859e+02 7.403e+02 8.847e+02 2.429e+03, threshold=1.481e+03, percent-clipped=2.0 +2023-04-01 10:32:55,949 INFO [train.py:903] (0/4) Epoch 7, batch 4700, loss[loss=0.2416, simple_loss=0.3209, pruned_loss=0.08117, over 19745.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3263, pruned_loss=0.09676, over 3839251.87 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:33:09,339 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 10:33:18,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 10:33:47,800 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45710.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:33:58,622 INFO [train.py:903] (0/4) Epoch 7, batch 4750, loss[loss=0.2532, simple_loss=0.3272, pruned_loss=0.08964, over 19708.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3276, pruned_loss=0.09773, over 3828241.97 frames. ], batch size: 59, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:34:01,206 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:34:11,839 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1067, 2.1178, 1.6130, 1.6019, 1.3310, 1.5526, 0.3515, 1.0136], + device='cuda:0'), covar=tensor([0.0415, 0.0401, 0.0345, 0.0542, 0.0853, 0.0645, 0.0764, 0.0700], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0313, 0.0310, 0.0332, 0.0404, 0.0327, 0.0288, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:34:36,015 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 6.576e+02 8.008e+02 9.322e+02 2.457e+03, threshold=1.602e+03, percent-clipped=6.0 +2023-04-01 10:35:01,470 INFO [train.py:903] (0/4) Epoch 7, batch 4800, loss[loss=0.3273, simple_loss=0.379, pruned_loss=0.1377, over 19288.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3271, pruned_loss=0.09785, over 3821779.53 frames. ], batch size: 66, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:35:14,193 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 10:35:26,434 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:29,614 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:30,839 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8671, 4.2525, 4.4646, 4.4419, 1.5318, 4.1510, 3.7721, 4.1251], + device='cuda:0'), covar=tensor([0.1079, 0.0577, 0.0500, 0.0477, 0.4518, 0.0437, 0.0464, 0.0985], + device='cuda:0'), in_proj_covar=tensor([0.0574, 0.0504, 0.0681, 0.0557, 0.0633, 0.0428, 0.0433, 0.0632], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 10:35:37,407 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6550, 2.0757, 2.0885, 2.9151, 2.0105, 2.5695, 2.6005, 2.5190], + device='cuda:0'), covar=tensor([0.0615, 0.0862, 0.0916, 0.0891, 0.0963, 0.0676, 0.0924, 0.0591], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0237, 0.0233, 0.0264, 0.0253, 0.0220, 0.0217, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 10:35:58,068 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:58,111 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:02,109 INFO [train.py:903] (0/4) Epoch 7, batch 4850, loss[loss=0.2647, simple_loss=0.3358, pruned_loss=0.09681, over 19655.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3274, pruned_loss=0.09805, over 3815419.51 frames. ], batch size: 58, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:36:10,635 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45825.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:36:23,726 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 10:36:25,100 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 10:36:27,427 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:40,692 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.989e+02 6.823e+02 8.718e+02 1.115e+03 2.265e+03, threshold=1.744e+03, percent-clipped=6.0 +2023-04-01 10:36:48,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 10:36:54,254 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 10:36:54,274 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 10:37:03,554 INFO [train.py:903] (0/4) Epoch 7, batch 4900, loss[loss=0.2458, simple_loss=0.3195, pruned_loss=0.08604, over 19581.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3273, pruned_loss=0.09807, over 3814466.42 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:37:04,798 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 10:37:14,483 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:37:26,132 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 10:38:05,113 INFO [train.py:903] (0/4) Epoch 7, batch 4950, loss[loss=0.2553, simple_loss=0.3376, pruned_loss=0.08651, over 19684.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3254, pruned_loss=0.097, over 3820836.96 frames. ], batch size: 59, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:38:24,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 10:38:44,273 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+02 6.277e+02 7.738e+02 9.356e+02 2.304e+03, threshold=1.548e+03, percent-clipped=1.0 +2023-04-01 10:38:44,961 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-01 10:38:47,739 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 10:39:09,686 INFO [train.py:903] (0/4) Epoch 7, batch 5000, loss[loss=0.3003, simple_loss=0.3546, pruned_loss=0.123, over 19620.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3246, pruned_loss=0.09647, over 3821574.96 frames. ], batch size: 57, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:39:18,583 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8555, 1.5180, 1.7615, 2.2958, 1.9381, 1.8409, 2.0600, 1.8816], + device='cuda:0'), covar=tensor([0.0863, 0.1370, 0.1098, 0.0925, 0.0996, 0.1039, 0.1053, 0.0859], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0235, 0.0234, 0.0264, 0.0252, 0.0217, 0.0217, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 10:39:20,559 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 10:39:30,928 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 10:39:46,818 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-46000.pt +2023-04-01 10:40:12,336 INFO [train.py:903] (0/4) Epoch 7, batch 5050, loss[loss=0.2046, simple_loss=0.2694, pruned_loss=0.06991, over 19763.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3255, pruned_loss=0.097, over 3825625.76 frames. ], batch size: 46, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:40:49,612 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 10:40:51,923 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 6.208e+02 7.894e+02 9.516e+02 2.052e+03, threshold=1.579e+03, percent-clipped=3.0 +2023-04-01 10:41:08,544 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46064.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:41:13,137 INFO [train.py:903] (0/4) Epoch 7, batch 5100, loss[loss=0.2315, simple_loss=0.289, pruned_loss=0.08694, over 19734.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3261, pruned_loss=0.09732, over 3805449.43 frames. ], batch size: 46, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:41:24,913 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 10:41:28,151 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 10:41:28,587 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46081.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:41:32,521 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 10:42:00,011 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46106.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:42:08,110 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4077, 1.2754, 1.7839, 1.7545, 3.2806, 4.6451, 4.5617, 5.0696], + device='cuda:0'), covar=tensor([0.1459, 0.3148, 0.2831, 0.1666, 0.0381, 0.0139, 0.0132, 0.0078], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0244, 0.0204, 0.0133, 0.0203, 0.0164], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:42:13,351 INFO [train.py:903] (0/4) Epoch 7, batch 5150, loss[loss=0.2199, simple_loss=0.2871, pruned_loss=0.07633, over 19369.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3255, pruned_loss=0.09664, over 3813036.06 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:42:26,341 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 10:42:37,776 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:42:53,637 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.315e+02 6.102e+02 7.228e+02 8.853e+02 1.806e+03, threshold=1.446e+03, percent-clipped=2.0 +2023-04-01 10:43:00,541 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 10:43:15,668 INFO [train.py:903] (0/4) Epoch 7, batch 5200, loss[loss=0.329, simple_loss=0.3859, pruned_loss=0.1361, over 19648.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3259, pruned_loss=0.0968, over 3817572.12 frames. ], batch size: 55, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:43:30,858 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46179.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:43:31,678 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 10:44:18,557 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 10:44:19,699 INFO [train.py:903] (0/4) Epoch 7, batch 5250, loss[loss=0.2576, simple_loss=0.3293, pruned_loss=0.09296, over 19510.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3266, pruned_loss=0.09693, over 3815877.90 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:44:23,422 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:44:52,954 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0903, 1.4594, 1.7086, 1.9731, 1.8766, 1.8003, 1.9556, 1.9574], + device='cuda:0'), covar=tensor([0.0739, 0.1600, 0.1153, 0.0856, 0.1046, 0.0446, 0.0794, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0356, 0.0285, 0.0240, 0.0299, 0.0242, 0.0272, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 10:44:58,429 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 5.998e+02 7.245e+02 8.826e+02 1.462e+03, threshold=1.449e+03, percent-clipped=1.0 +2023-04-01 10:45:00,912 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:45:21,289 INFO [train.py:903] (0/4) Epoch 7, batch 5300, loss[loss=0.2665, simple_loss=0.3406, pruned_loss=0.09619, over 19676.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3265, pruned_loss=0.09695, over 3803668.98 frames. ], batch size: 55, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:45:34,743 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 10:45:39,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 10:45:42,180 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4170, 1.7646, 2.0036, 2.2439, 1.7149, 2.5021, 2.6796, 2.3445], + device='cuda:0'), covar=tensor([0.0731, 0.1031, 0.1051, 0.1211, 0.1145, 0.0732, 0.0912, 0.0681], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0234, 0.0234, 0.0263, 0.0253, 0.0219, 0.0216, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 10:45:53,407 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46294.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:46:22,052 INFO [train.py:903] (0/4) Epoch 7, batch 5350, loss[loss=0.3002, simple_loss=0.365, pruned_loss=0.1177, over 19280.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3255, pruned_loss=0.09628, over 3795295.15 frames. ], batch size: 66, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:46:46,793 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:47:00,313 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 10:47:03,770 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.681e+02 7.157e+02 9.578e+02 3.754e+03, threshold=1.431e+03, percent-clipped=4.0 +2023-04-01 10:47:12,116 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2475, 2.1627, 1.7767, 1.6413, 1.5403, 1.7705, 0.3111, 1.0640], + device='cuda:0'), covar=tensor([0.0354, 0.0369, 0.0280, 0.0439, 0.0728, 0.0472, 0.0718, 0.0645], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0307, 0.0304, 0.0328, 0.0397, 0.0317, 0.0287, 0.0310], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:47:26,532 INFO [train.py:903] (0/4) Epoch 7, batch 5400, loss[loss=0.276, simple_loss=0.3437, pruned_loss=0.1041, over 19658.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3256, pruned_loss=0.0958, over 3799717.83 frames. ], batch size: 55, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:47:37,298 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.96 vs. limit=5.0 +2023-04-01 10:48:29,996 INFO [train.py:903] (0/4) Epoch 7, batch 5450, loss[loss=0.2864, simple_loss=0.358, pruned_loss=0.1074, over 19534.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3269, pruned_loss=0.09686, over 3808009.56 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:48:35,958 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0174, 5.1011, 5.8762, 5.7648, 1.6711, 5.5020, 4.7131, 5.4373], + device='cuda:0'), covar=tensor([0.1065, 0.0615, 0.0443, 0.0356, 0.5100, 0.0304, 0.0417, 0.0935], + device='cuda:0'), in_proj_covar=tensor([0.0578, 0.0511, 0.0691, 0.0573, 0.0642, 0.0438, 0.0437, 0.0644], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 10:48:49,874 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:10,731 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 7.102e+02 8.361e+02 1.036e+03 2.875e+03, threshold=1.672e+03, percent-clipped=7.0 +2023-04-01 10:49:23,094 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46460.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:31,708 INFO [train.py:903] (0/4) Epoch 7, batch 5500, loss[loss=0.2413, simple_loss=0.3184, pruned_loss=0.0821, over 19801.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.327, pruned_loss=0.09706, over 3824087.74 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:49:59,136 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 10:50:20,916 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:50:33,168 INFO [train.py:903] (0/4) Epoch 7, batch 5550, loss[loss=0.2811, simple_loss=0.3465, pruned_loss=0.1078, over 19587.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3272, pruned_loss=0.09726, over 3822511.84 frames. ], batch size: 61, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:50:43,607 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 10:50:51,456 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:51:02,836 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 10:51:15,055 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 5.815e+02 7.044e+02 9.146e+02 3.032e+03, threshold=1.409e+03, percent-clipped=3.0 +2023-04-01 10:51:31,073 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 10:51:35,768 INFO [train.py:903] (0/4) Epoch 7, batch 5600, loss[loss=0.2036, simple_loss=0.2739, pruned_loss=0.0666, over 19575.00 frames. ], tot_loss[loss=0.261, simple_loss=0.327, pruned_loss=0.09753, over 3816646.87 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:06,759 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:32,928 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:38,706 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:40,323 INFO [train.py:903] (0/4) Epoch 7, batch 5650, loss[loss=0.2686, simple_loss=0.3348, pruned_loss=0.1012, over 19679.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3289, pruned_loss=0.09897, over 3810613.24 frames. ], batch size: 53, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:42,899 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:58,471 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.98 vs. limit=5.0 +2023-04-01 10:53:03,523 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46638.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:20,360 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.976e+02 7.674e+02 9.559e+02 1.706e+03, threshold=1.535e+03, percent-clipped=4.0 +2023-04-01 10:53:28,315 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 10:53:42,094 INFO [train.py:903] (0/4) Epoch 7, batch 5700, loss[loss=0.26, simple_loss=0.3212, pruned_loss=0.09942, over 18598.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3286, pruned_loss=0.0985, over 3819362.90 frames. ], batch size: 41, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:53:53,874 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5742, 1.2517, 1.7518, 1.6710, 3.1137, 4.4411, 4.4370, 4.8649], + device='cuda:0'), covar=tensor([0.1486, 0.3307, 0.3078, 0.1841, 0.0430, 0.0159, 0.0154, 0.0087], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0284, 0.0315, 0.0245, 0.0205, 0.0135, 0.0203, 0.0168], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 10:54:43,275 INFO [train.py:903] (0/4) Epoch 7, batch 5750, loss[loss=0.2756, simple_loss=0.3359, pruned_loss=0.1077, over 19797.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3277, pruned_loss=0.09815, over 3830201.43 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:54:45,654 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 10:54:55,173 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 10:55:00,701 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 10:55:25,496 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+02 6.148e+02 7.021e+02 8.191e+02 1.564e+03, threshold=1.404e+03, percent-clipped=1.0 +2023-04-01 10:55:28,126 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:55:28,163 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9588, 1.6468, 1.5604, 2.0728, 1.9236, 1.7314, 1.6632, 1.9295], + device='cuda:0'), covar=tensor([0.0815, 0.1603, 0.1363, 0.0898, 0.1120, 0.0499, 0.1044, 0.0583], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0355, 0.0284, 0.0237, 0.0297, 0.0241, 0.0269, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 10:55:45,406 INFO [train.py:903] (0/4) Epoch 7, batch 5800, loss[loss=0.2839, simple_loss=0.352, pruned_loss=0.1078, over 19590.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.328, pruned_loss=0.09824, over 3825341.56 frames. ], batch size: 61, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:56:49,193 INFO [train.py:903] (0/4) Epoch 7, batch 5850, loss[loss=0.228, simple_loss=0.3097, pruned_loss=0.07321, over 19658.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3282, pruned_loss=0.0984, over 3827360.83 frames. ], batch size: 53, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:02,082 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:57:29,390 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.374e+02 9.303e+02 2.879e+03, threshold=1.475e+03, percent-clipped=6.0 +2023-04-01 10:57:51,527 INFO [train.py:903] (0/4) Epoch 7, batch 5900, loss[loss=0.2802, simple_loss=0.3469, pruned_loss=0.1068, over 19580.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3276, pruned_loss=0.09806, over 3827620.04 frames. ], batch size: 61, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:57,284 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 10:58:16,945 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 10:58:52,935 INFO [train.py:903] (0/4) Epoch 7, batch 5950, loss[loss=0.2172, simple_loss=0.2991, pruned_loss=0.06768, over 19760.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3278, pruned_loss=0.09778, over 3826114.27 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:59:34,059 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.644e+02 7.805e+02 9.690e+02 1.794e+03, threshold=1.561e+03, percent-clipped=4.0 +2023-04-01 10:59:40,943 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:48,548 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:52,255 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.33 vs. limit=5.0 +2023-04-01 10:59:52,799 INFO [train.py:903] (0/4) Epoch 7, batch 6000, loss[loss=0.2656, simple_loss=0.3359, pruned_loss=0.09761, over 19791.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3281, pruned_loss=0.09813, over 3829656.07 frames. ], batch size: 56, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 10:59:52,799 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 11:00:05,285 INFO [train.py:937] (0/4) Epoch 7, validation: loss=0.1903, simple_loss=0.2902, pruned_loss=0.04516, over 944034.00 frames. +2023-04-01 11:00:05,286 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 11:00:57,546 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47009.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:09,015 INFO [train.py:903] (0/4) Epoch 7, batch 6050, loss[loss=0.2508, simple_loss=0.3263, pruned_loss=0.08768, over 19699.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3257, pruned_loss=0.09654, over 3830388.48 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:01:30,910 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:49,944 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.317e+02 6.018e+02 7.577e+02 1.005e+03 2.434e+03, threshold=1.515e+03, percent-clipped=7.0 +2023-04-01 11:02:13,575 INFO [train.py:903] (0/4) Epoch 7, batch 6100, loss[loss=0.2146, simple_loss=0.2804, pruned_loss=0.07436, over 19729.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.326, pruned_loss=0.09669, over 3808883.05 frames. ], batch size: 45, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:02:18,714 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:02:26,714 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:02:41,737 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9548, 4.4122, 4.7608, 4.6991, 1.7983, 4.3815, 3.8023, 4.3944], + device='cuda:0'), covar=tensor([0.1117, 0.0572, 0.0416, 0.0399, 0.4123, 0.0381, 0.0519, 0.0879], + device='cuda:0'), in_proj_covar=tensor([0.0584, 0.0511, 0.0694, 0.0574, 0.0647, 0.0446, 0.0440, 0.0655], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 11:03:15,755 INFO [train.py:903] (0/4) Epoch 7, batch 6150, loss[loss=0.2524, simple_loss=0.3336, pruned_loss=0.0856, over 19789.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3253, pruned_loss=0.09613, over 3811389.92 frames. ], batch size: 56, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:03:36,425 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4520, 2.2813, 1.5952, 1.4074, 1.9358, 1.0987, 1.1432, 1.7004], + device='cuda:0'), covar=tensor([0.0768, 0.0502, 0.0764, 0.0588, 0.0449, 0.0953, 0.0672, 0.0406], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0283, 0.0314, 0.0239, 0.0226, 0.0311, 0.0287, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:03:41,636 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 11:03:56,564 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.986e+02 7.602e+02 9.492e+02 2.168e+03, threshold=1.520e+03, percent-clipped=5.0 +2023-04-01 11:04:01,805 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.60 vs. limit=5.0 +2023-04-01 11:04:15,667 INFO [train.py:903] (0/4) Epoch 7, batch 6200, loss[loss=0.2791, simple_loss=0.3399, pruned_loss=0.1092, over 19678.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.325, pruned_loss=0.09603, over 3813528.68 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:04:20,338 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:05:15,435 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8507, 1.5414, 1.6016, 2.0737, 1.6954, 2.1902, 2.2248, 2.0079], + device='cuda:0'), covar=tensor([0.0769, 0.1087, 0.1026, 0.0892, 0.0937, 0.0714, 0.0856, 0.0647], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0238, 0.0233, 0.0264, 0.0253, 0.0219, 0.0213, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:05:17,523 INFO [train.py:903] (0/4) Epoch 7, batch 6250, loss[loss=0.2622, simple_loss=0.3315, pruned_loss=0.09644, over 19537.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3239, pruned_loss=0.09536, over 3827812.66 frames. ], batch size: 56, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:05:47,058 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 11:05:57,375 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 6.379e+02 7.672e+02 9.726e+02 2.182e+03, threshold=1.534e+03, percent-clipped=2.0 +2023-04-01 11:06:08,287 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:06:15,454 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0370, 3.5947, 2.0138, 1.5262, 3.0171, 1.3608, 1.1780, 2.1377], + device='cuda:0'), covar=tensor([0.0738, 0.0231, 0.0610, 0.0634, 0.0322, 0.0907, 0.0766, 0.0397], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0285, 0.0317, 0.0241, 0.0226, 0.0314, 0.0288, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:06:19,223 INFO [train.py:903] (0/4) Epoch 7, batch 6300, loss[loss=0.2228, simple_loss=0.2949, pruned_loss=0.07537, over 19837.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.324, pruned_loss=0.0952, over 3843584.07 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:06:43,112 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:21,227 INFO [train.py:903] (0/4) Epoch 7, batch 6350, loss[loss=0.2103, simple_loss=0.288, pruned_loss=0.06635, over 19474.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3239, pruned_loss=0.09496, over 3839197.58 frames. ], batch size: 49, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:07:33,170 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:36,026 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 11:07:41,269 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47335.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:49,423 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.25 vs. limit=5.0 +2023-04-01 11:08:02,949 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 5.885e+02 7.911e+02 1.072e+03 3.322e+03, threshold=1.582e+03, percent-clipped=7.0 +2023-04-01 11:08:05,647 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:13,857 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:22,753 INFO [train.py:903] (0/4) Epoch 7, batch 6400, loss[loss=0.266, simple_loss=0.3363, pruned_loss=0.09787, over 19668.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3238, pruned_loss=0.09482, over 3845430.45 frames. ], batch size: 58, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:08:48,782 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.37 vs. limit=5.0 +2023-04-01 11:09:00,195 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:09:25,569 INFO [train.py:903] (0/4) Epoch 7, batch 6450, loss[loss=0.2811, simple_loss=0.3522, pruned_loss=0.105, over 19677.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3257, pruned_loss=0.09583, over 3823431.72 frames. ], batch size: 55, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:05,689 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 6.283e+02 7.588e+02 1.008e+03 1.535e+03, threshold=1.518e+03, percent-clipped=0.0 +2023-04-01 11:10:08,031 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 11:10:26,639 INFO [train.py:903] (0/4) Epoch 7, batch 6500, loss[loss=0.237, simple_loss=0.3141, pruned_loss=0.0799, over 19662.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3241, pruned_loss=0.09496, over 3813782.07 frames. ], batch size: 58, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:29,886 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 11:11:27,822 INFO [train.py:903] (0/4) Epoch 7, batch 6550, loss[loss=0.2614, simple_loss=0.3323, pruned_loss=0.09524, over 18127.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3252, pruned_loss=0.09563, over 3803273.80 frames. ], batch size: 83, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:11:58,213 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:12:10,160 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+02 6.347e+02 8.071e+02 1.082e+03 2.174e+03, threshold=1.614e+03, percent-clipped=4.0 +2023-04-01 11:12:29,838 INFO [train.py:903] (0/4) Epoch 7, batch 6600, loss[loss=0.2858, simple_loss=0.3489, pruned_loss=0.1113, over 19527.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3257, pruned_loss=0.09591, over 3816702.64 frames. ], batch size: 56, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:12:30,228 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:00,157 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 11:13:13,378 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:31,696 INFO [train.py:903] (0/4) Epoch 7, batch 6650, loss[loss=0.2592, simple_loss=0.3323, pruned_loss=0.09308, over 19496.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3262, pruned_loss=0.09628, over 3825756.89 frames. ], batch size: 64, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:14:11,831 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 6.649e+02 7.986e+02 1.008e+03 1.623e+03, threshold=1.597e+03, percent-clipped=0.0 +2023-04-01 11:14:28,114 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2161, 1.1180, 1.1502, 1.2663, 0.9256, 1.3354, 1.3781, 1.1774], + device='cuda:0'), covar=tensor([0.0893, 0.1043, 0.1094, 0.0770, 0.0904, 0.0826, 0.0823, 0.0789], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0237, 0.0234, 0.0267, 0.0251, 0.0221, 0.0216, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:14:32,252 INFO [train.py:903] (0/4) Epoch 7, batch 6700, loss[loss=0.2952, simple_loss=0.351, pruned_loss=0.1197, over 19674.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3263, pruned_loss=0.09595, over 3824288.28 frames. ], batch size: 53, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:30,802 INFO [train.py:903] (0/4) Epoch 7, batch 6750, loss[loss=0.28, simple_loss=0.3317, pruned_loss=0.1142, over 19463.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3257, pruned_loss=0.09582, over 3818778.44 frames. ], batch size: 49, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:31,149 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:15:31,676 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 11:15:52,143 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-04-01 11:15:58,012 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:09,116 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.932e+02 5.831e+02 7.200e+02 8.445e+02 1.747e+03, threshold=1.440e+03, percent-clipped=3.0 +2023-04-01 11:16:15,147 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:18,707 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6240, 2.3582, 1.7582, 1.6939, 2.0410, 1.2860, 1.3961, 1.8332], + device='cuda:0'), covar=tensor([0.0690, 0.0448, 0.0714, 0.0509, 0.0465, 0.0889, 0.0605, 0.0425], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0282, 0.0314, 0.0241, 0.0230, 0.0310, 0.0286, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:16:28,694 INFO [train.py:903] (0/4) Epoch 7, batch 6800, loss[loss=0.2133, simple_loss=0.2824, pruned_loss=0.07207, over 19756.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3253, pruned_loss=0.0954, over 3822335.85 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:16:33,961 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 11:16:59,276 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-7.pt +2023-04-01 11:17:14,720 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 11:17:15,773 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 11:17:18,393 INFO [train.py:903] (0/4) Epoch 8, batch 0, loss[loss=0.2561, simple_loss=0.332, pruned_loss=0.09009, over 19671.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.332, pruned_loss=0.09009, over 19671.00 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:17:18,394 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 11:17:30,977 INFO [train.py:937] (0/4) Epoch 8, validation: loss=0.1916, simple_loss=0.2915, pruned_loss=0.0458, over 944034.00 frames. +2023-04-01 11:17:30,978 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 11:17:41,959 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 11:17:43,286 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:18:07,516 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47827.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:18:15,587 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5316, 1.6156, 1.7261, 2.1369, 1.3676, 1.6799, 2.0452, 1.6525], + device='cuda:0'), covar=tensor([0.2619, 0.2063, 0.1097, 0.1194, 0.2478, 0.1162, 0.2516, 0.1961], + device='cuda:0'), in_proj_covar=tensor([0.0713, 0.0720, 0.0605, 0.0849, 0.0731, 0.0631, 0.0750, 0.0652], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:18:32,756 INFO [train.py:903] (0/4) Epoch 8, batch 50, loss[loss=0.2463, simple_loss=0.3236, pruned_loss=0.08452, over 19543.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3315, pruned_loss=0.0993, over 868754.44 frames. ], batch size: 56, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:18:38,615 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 5.925e+02 7.453e+02 9.478e+02 2.348e+03, threshold=1.491e+03, percent-clipped=8.0 +2023-04-01 11:18:44,797 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:19:06,101 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 11:19:32,492 INFO [train.py:903] (0/4) Epoch 8, batch 100, loss[loss=0.3056, simple_loss=0.366, pruned_loss=0.1226, over 19295.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3276, pruned_loss=0.09801, over 1530344.41 frames. ], batch size: 66, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:19:42,598 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 11:20:32,672 INFO [train.py:903] (0/4) Epoch 8, batch 150, loss[loss=0.2677, simple_loss=0.3445, pruned_loss=0.0955, over 19674.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3287, pruned_loss=0.09788, over 2035841.21 frames. ], batch size: 58, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:20:38,423 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.629e+02 6.208e+02 7.626e+02 9.440e+02 2.273e+03, threshold=1.525e+03, percent-clipped=3.0 +2023-04-01 11:20:38,856 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2618, 2.9356, 1.9394, 2.0277, 1.8987, 2.4042, 0.7259, 2.0499], + device='cuda:0'), covar=tensor([0.0296, 0.0315, 0.0407, 0.0561, 0.0636, 0.0487, 0.0720, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0314, 0.0307, 0.0325, 0.0398, 0.0322, 0.0285, 0.0311], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:21:07,721 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:21:29,220 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 11:21:32,695 INFO [train.py:903] (0/4) Epoch 8, batch 200, loss[loss=0.2513, simple_loss=0.3199, pruned_loss=0.09131, over 17501.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3271, pruned_loss=0.09732, over 2422774.94 frames. ], batch size: 101, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:21:36,553 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:21:37,382 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-48000.pt +2023-04-01 11:22:15,558 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1211, 1.2279, 1.6910, 1.2982, 2.7300, 3.5729, 3.3441, 3.8082], + device='cuda:0'), covar=tensor([0.1530, 0.3190, 0.2871, 0.2014, 0.0467, 0.0150, 0.0224, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0247, 0.0208, 0.0136, 0.0204, 0.0172], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:22:35,824 INFO [train.py:903] (0/4) Epoch 8, batch 250, loss[loss=0.2497, simple_loss=0.3271, pruned_loss=0.08615, over 19525.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3258, pruned_loss=0.09553, over 2743390.47 frames. ], batch size: 56, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:22:42,343 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 6.073e+02 7.344e+02 8.973e+02 2.163e+03, threshold=1.469e+03, percent-clipped=4.0 +2023-04-01 11:23:30,358 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:31,723 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2216, 1.2806, 1.6916, 1.4071, 2.6467, 2.1609, 2.8278, 1.1478], + device='cuda:0'), covar=tensor([0.1988, 0.3461, 0.1929, 0.1623, 0.1348, 0.1789, 0.1401, 0.3192], + device='cuda:0'), in_proj_covar=tensor([0.0455, 0.0533, 0.0530, 0.0416, 0.0572, 0.0467, 0.0624, 0.0459], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:23:36,839 INFO [train.py:903] (0/4) Epoch 8, batch 300, loss[loss=0.2957, simple_loss=0.3642, pruned_loss=0.1136, over 19696.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3257, pruned_loss=0.09555, over 2992738.77 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:23:41,627 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:55,920 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:59,587 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2900, 1.3280, 1.7482, 1.4860, 2.6836, 2.2515, 2.8190, 1.0948], + device='cuda:0'), covar=tensor([0.1915, 0.3512, 0.1942, 0.1589, 0.1305, 0.1642, 0.1444, 0.3179], + device='cuda:0'), in_proj_covar=tensor([0.0458, 0.0536, 0.0532, 0.0418, 0.0575, 0.0468, 0.0629, 0.0461], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:24:14,370 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1325, 2.1329, 2.2066, 3.3030, 2.2397, 3.2909, 2.8210, 1.9113], + device='cuda:0'), covar=tensor([0.3031, 0.2552, 0.1107, 0.1397, 0.2961, 0.0973, 0.2331, 0.2171], + device='cuda:0'), in_proj_covar=tensor([0.0721, 0.0723, 0.0607, 0.0852, 0.0734, 0.0632, 0.0748, 0.0657], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:24:27,759 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:37,683 INFO [train.py:903] (0/4) Epoch 8, batch 350, loss[loss=0.2285, simple_loss=0.307, pruned_loss=0.07501, over 19686.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3235, pruned_loss=0.09402, over 3185719.24 frames. ], batch size: 59, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:24:39,936 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 11:24:42,101 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:43,051 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.994e+02 6.192e+02 7.149e+02 9.949e+02 1.629e+03, threshold=1.430e+03, percent-clipped=6.0 +2023-04-01 11:25:08,554 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48171.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:25:22,139 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9245, 0.7280, 0.9645, 1.3702, 1.0350, 0.8681, 1.1388, 0.8721], + device='cuda:0'), covar=tensor([0.1078, 0.1839, 0.1319, 0.0718, 0.0903, 0.1395, 0.1036, 0.1133], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0235, 0.0232, 0.0262, 0.0246, 0.0217, 0.0211, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:25:33,638 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3381, 1.2622, 1.7732, 1.6584, 2.8980, 4.3132, 4.4153, 4.8663], + device='cuda:0'), covar=tensor([0.1551, 0.3349, 0.3038, 0.1835, 0.0503, 0.0186, 0.0160, 0.0105], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0284, 0.0313, 0.0244, 0.0207, 0.0136, 0.0202, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:25:37,730 INFO [train.py:903] (0/4) Epoch 8, batch 400, loss[loss=0.3206, simple_loss=0.3825, pruned_loss=0.1293, over 19727.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.325, pruned_loss=0.09516, over 3321570.49 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:25:57,754 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5934, 3.2057, 2.3990, 2.4150, 2.1031, 2.5762, 0.9838, 2.3571], + device='cuda:0'), covar=tensor([0.0345, 0.0272, 0.0347, 0.0508, 0.0615, 0.0479, 0.0673, 0.0545], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0311, 0.0306, 0.0324, 0.0397, 0.0318, 0.0285, 0.0308], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:26:03,257 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5821, 4.1320, 2.5801, 3.6910, 0.9616, 3.7039, 3.8366, 4.0098], + device='cuda:0'), covar=tensor([0.0567, 0.0969, 0.1812, 0.0635, 0.3782, 0.0730, 0.0700, 0.0781], + device='cuda:0'), in_proj_covar=tensor([0.0390, 0.0340, 0.0395, 0.0297, 0.0364, 0.0322, 0.0316, 0.0348], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 11:26:03,419 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:26:13,139 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7399, 1.8391, 1.9307, 2.6215, 1.6740, 2.3531, 2.4565, 1.9008], + device='cuda:0'), covar=tensor([0.2857, 0.2367, 0.1173, 0.1319, 0.2663, 0.1112, 0.2348, 0.2062], + device='cuda:0'), in_proj_covar=tensor([0.0726, 0.0730, 0.0612, 0.0861, 0.0737, 0.0638, 0.0756, 0.0660], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:26:14,296 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0303, 2.1313, 2.1705, 3.0596, 2.2464, 2.9332, 2.8722, 2.0702], + device='cuda:0'), covar=tensor([0.2937, 0.2254, 0.1101, 0.1483, 0.2636, 0.1054, 0.2093, 0.1898], + device='cuda:0'), in_proj_covar=tensor([0.0726, 0.0730, 0.0612, 0.0861, 0.0737, 0.0638, 0.0756, 0.0660], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:26:21,192 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48230.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:26:38,938 INFO [train.py:903] (0/4) Epoch 8, batch 450, loss[loss=0.2047, simple_loss=0.2813, pruned_loss=0.06401, over 19490.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3242, pruned_loss=0.09499, over 3437183.67 frames. ], batch size: 49, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:26:45,631 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.626e+02 7.034e+02 8.568e+02 1.629e+03, threshold=1.407e+03, percent-clipped=1.0 +2023-04-01 11:26:47,795 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1754, 1.1790, 1.5371, 0.9445, 2.3804, 3.0207, 2.7281, 3.1826], + device='cuda:0'), covar=tensor([0.1425, 0.3112, 0.2862, 0.2043, 0.0442, 0.0169, 0.0261, 0.0186], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0283, 0.0314, 0.0244, 0.0207, 0.0135, 0.0202, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:27:03,656 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:27:04,763 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1852, 1.2867, 1.5507, 1.2662, 2.6765, 3.4298, 3.2571, 3.6593], + device='cuda:0'), covar=tensor([0.1466, 0.3007, 0.2819, 0.1954, 0.0458, 0.0202, 0.0193, 0.0144], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0283, 0.0314, 0.0244, 0.0207, 0.0135, 0.0202, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:27:05,778 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6085, 4.7275, 5.2726, 5.2245, 2.0665, 4.8434, 4.3266, 4.9109], + device='cuda:0'), covar=tensor([0.1027, 0.0886, 0.0451, 0.0398, 0.4208, 0.0406, 0.0441, 0.0884], + device='cuda:0'), in_proj_covar=tensor([0.0591, 0.0515, 0.0700, 0.0588, 0.0645, 0.0447, 0.0446, 0.0647], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 11:27:11,123 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 11:27:12,255 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 11:27:27,259 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48286.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:27:41,080 INFO [train.py:903] (0/4) Epoch 8, batch 500, loss[loss=0.2983, simple_loss=0.3695, pruned_loss=0.1135, over 19096.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.324, pruned_loss=0.09515, over 3522595.75 frames. ], batch size: 69, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:23,295 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 11:28:37,096 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:28:41,343 INFO [train.py:903] (0/4) Epoch 8, batch 550, loss[loss=0.2565, simple_loss=0.321, pruned_loss=0.09603, over 19757.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3231, pruned_loss=0.09394, over 3602439.07 frames. ], batch size: 47, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:47,050 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.262e+02 6.228e+02 7.563e+02 9.337e+02 1.593e+03, threshold=1.513e+03, percent-clipped=3.0 +2023-04-01 11:28:56,627 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5632, 1.9121, 1.9675, 2.5969, 2.3899, 2.3101, 2.1056, 2.4602], + device='cuda:0'), covar=tensor([0.0820, 0.1858, 0.1300, 0.0931, 0.1246, 0.0447, 0.1008, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0354, 0.0285, 0.0241, 0.0301, 0.0243, 0.0273, 0.0233], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:29:42,852 INFO [train.py:903] (0/4) Epoch 8, batch 600, loss[loss=0.2307, simple_loss=0.2927, pruned_loss=0.08436, over 19755.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3227, pruned_loss=0.09356, over 3650258.95 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:29:49,152 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1538, 2.1270, 2.2518, 3.2982, 2.1804, 3.0796, 2.8895, 2.0998], + device='cuda:0'), covar=tensor([0.2976, 0.2479, 0.1095, 0.1490, 0.3053, 0.1100, 0.2251, 0.2072], + device='cuda:0'), in_proj_covar=tensor([0.0721, 0.0725, 0.0605, 0.0852, 0.0730, 0.0630, 0.0746, 0.0656], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:30:05,034 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:26,079 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 11:30:31,032 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:39,383 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:44,669 INFO [train.py:903] (0/4) Epoch 8, batch 650, loss[loss=0.2626, simple_loss=0.3395, pruned_loss=0.09281, over 19797.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3229, pruned_loss=0.09389, over 3689233.15 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:30:50,371 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 6.183e+02 7.491e+02 9.829e+02 2.830e+03, threshold=1.498e+03, percent-clipped=3.0 +2023-04-01 11:31:17,303 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48471.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:31:45,468 INFO [train.py:903] (0/4) Epoch 8, batch 700, loss[loss=0.2844, simple_loss=0.3604, pruned_loss=0.1042, over 19492.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3234, pruned_loss=0.09389, over 3720679.03 frames. ], batch size: 64, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:31:45,812 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:04,038 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2510, 1.2391, 1.4399, 1.3467, 1.6974, 1.8039, 1.8365, 0.5300], + device='cuda:0'), covar=tensor([0.1807, 0.3114, 0.1861, 0.1502, 0.1240, 0.1671, 0.1139, 0.3013], + device='cuda:0'), in_proj_covar=tensor([0.0463, 0.0540, 0.0535, 0.0421, 0.0577, 0.0469, 0.0635, 0.0466], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:32:17,584 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:44,030 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48542.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:32:48,881 INFO [train.py:903] (0/4) Epoch 8, batch 750, loss[loss=0.2744, simple_loss=0.3338, pruned_loss=0.1075, over 19843.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3265, pruned_loss=0.09592, over 3737391.59 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:32:49,288 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:52,552 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48549.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:55,669 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.054e+02 7.636e+02 9.380e+02 1.990e+03, threshold=1.527e+03, percent-clipped=3.0 +2023-04-01 11:33:10,158 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6900, 1.3310, 1.3122, 1.8072, 1.3553, 1.8194, 1.8563, 1.6114], + device='cuda:0'), covar=tensor([0.0774, 0.1023, 0.1069, 0.0865, 0.0905, 0.0759, 0.0821, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0234, 0.0231, 0.0261, 0.0248, 0.0218, 0.0211, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:33:13,733 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48567.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:33:17,070 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3534, 1.1767, 1.3047, 1.6079, 2.9130, 0.9244, 2.0578, 3.1524], + device='cuda:0'), covar=tensor([0.0425, 0.2731, 0.2752, 0.1518, 0.0685, 0.2528, 0.1086, 0.0355], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0321, 0.0328, 0.0301, 0.0325, 0.0321, 0.0298, 0.0321], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:33:21,435 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48574.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:33:49,547 INFO [train.py:903] (0/4) Epoch 8, batch 800, loss[loss=0.3181, simple_loss=0.3648, pruned_loss=0.1357, over 19482.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3252, pruned_loss=0.09553, over 3753021.61 frames. ], batch size: 64, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:02,654 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 11:34:19,554 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8945, 1.9803, 1.9505, 2.7610, 1.8479, 2.3572, 2.3780, 1.8783], + device='cuda:0'), covar=tensor([0.2596, 0.2061, 0.1080, 0.1182, 0.2400, 0.1035, 0.2392, 0.2024], + device='cuda:0'), in_proj_covar=tensor([0.0722, 0.0727, 0.0609, 0.0856, 0.0731, 0.0634, 0.0755, 0.0661], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:34:29,987 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2204, 1.3974, 1.7611, 1.4788, 2.7133, 2.2744, 2.8502, 1.1658], + device='cuda:0'), covar=tensor([0.2126, 0.3488, 0.2053, 0.1615, 0.1348, 0.1753, 0.1415, 0.3305], + device='cuda:0'), in_proj_covar=tensor([0.0457, 0.0534, 0.0530, 0.0417, 0.0574, 0.0468, 0.0628, 0.0460], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:34:34,562 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4390, 1.7428, 2.1718, 2.6067, 1.9255, 2.8630, 2.6741, 2.4081], + device='cuda:0'), covar=tensor([0.0676, 0.0968, 0.0916, 0.1023, 0.1011, 0.0578, 0.0784, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0232, 0.0230, 0.0261, 0.0248, 0.0215, 0.0209, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:34:48,687 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 11:34:51,085 INFO [train.py:903] (0/4) Epoch 8, batch 850, loss[loss=0.2625, simple_loss=0.3366, pruned_loss=0.09419, over 19615.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3244, pruned_loss=0.09423, over 3788448.76 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:57,942 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 5.922e+02 7.936e+02 9.993e+02 1.897e+03, threshold=1.587e+03, percent-clipped=5.0 +2023-04-01 11:35:39,764 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 11:35:39,886 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:35:43,325 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48689.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:35:50,882 INFO [train.py:903] (0/4) Epoch 8, batch 900, loss[loss=0.2524, simple_loss=0.3233, pruned_loss=0.09076, over 19684.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3245, pruned_loss=0.09455, over 3798392.15 frames. ], batch size: 59, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:54,671 INFO [train.py:903] (0/4) Epoch 8, batch 950, loss[loss=0.1965, simple_loss=0.268, pruned_loss=0.06253, over 19325.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.323, pruned_loss=0.09369, over 3798843.27 frames. ], batch size: 44, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:56,564 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 11:36:57,657 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9003, 3.5084, 2.3382, 3.3094, 0.8886, 3.1806, 3.2912, 3.3406], + device='cuda:0'), covar=tensor([0.0929, 0.1399, 0.2242, 0.0876, 0.4388, 0.1017, 0.0963, 0.1185], + device='cuda:0'), in_proj_covar=tensor([0.0395, 0.0342, 0.0401, 0.0301, 0.0366, 0.0329, 0.0318, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 11:37:03,057 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.996e+02 5.931e+02 7.048e+02 8.289e+02 1.665e+03, threshold=1.410e+03, percent-clipped=1.0 +2023-04-01 11:37:11,345 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:42,371 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:57,225 INFO [train.py:903] (0/4) Epoch 8, batch 1000, loss[loss=0.3041, simple_loss=0.3631, pruned_loss=0.1226, over 19590.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3229, pruned_loss=0.09351, over 3796899.09 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:38:02,369 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8384, 2.6869, 1.7131, 1.8827, 1.4795, 1.9260, 0.7719, 1.8848], + device='cuda:0'), covar=tensor([0.0472, 0.0487, 0.0573, 0.0789, 0.1098, 0.0846, 0.0911, 0.0925], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0318, 0.0317, 0.0334, 0.0409, 0.0329, 0.0296, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:38:03,518 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:04,675 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3897, 1.2164, 1.1656, 1.4744, 1.3014, 1.3057, 1.1953, 1.3904], + device='cuda:0'), covar=tensor([0.0713, 0.1077, 0.0924, 0.0571, 0.0834, 0.0400, 0.0865, 0.0511], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0353, 0.0280, 0.0236, 0.0299, 0.0238, 0.0270, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:38:08,245 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:16,351 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1863, 1.3219, 1.7377, 1.4245, 2.6247, 2.1202, 2.7875, 1.0238], + device='cuda:0'), covar=tensor([0.2092, 0.3599, 0.2034, 0.1709, 0.1299, 0.1788, 0.1403, 0.3383], + device='cuda:0'), in_proj_covar=tensor([0.0454, 0.0530, 0.0528, 0.0417, 0.0570, 0.0463, 0.0625, 0.0458], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:38:40,109 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:50,179 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 11:38:51,451 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:59,281 INFO [train.py:903] (0/4) Epoch 8, batch 1050, loss[loss=0.2166, simple_loss=0.2765, pruned_loss=0.07839, over 19726.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3237, pruned_loss=0.09411, over 3810327.51 frames. ], batch size: 45, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:39:06,227 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.500e+02 6.561e+02 8.180e+02 1.521e+03, threshold=1.312e+03, percent-clipped=1.0 +2023-04-01 11:39:11,093 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:31,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 11:39:33,597 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 11:39:34,233 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:58,991 INFO [train.py:903] (0/4) Epoch 8, batch 1100, loss[loss=0.2363, simple_loss=0.3148, pruned_loss=0.07885, over 19761.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3239, pruned_loss=0.09417, over 3815582.05 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:40:04,016 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48900.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:41:00,104 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48945.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:41:00,823 INFO [train.py:903] (0/4) Epoch 8, batch 1150, loss[loss=0.2628, simple_loss=0.3351, pruned_loss=0.09522, over 19124.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3228, pruned_loss=0.09368, over 3794413.60 frames. ], batch size: 69, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:41:09,121 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.943e+02 6.952e+02 8.882e+02 1.618e+03, threshold=1.390e+03, percent-clipped=5.0 +2023-04-01 11:41:20,470 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8882, 4.3073, 4.6122, 4.5677, 1.7097, 4.2367, 3.6415, 4.1817], + device='cuda:0'), covar=tensor([0.1256, 0.0686, 0.0532, 0.0502, 0.4705, 0.0546, 0.0589, 0.0998], + device='cuda:0'), in_proj_covar=tensor([0.0593, 0.0519, 0.0705, 0.0593, 0.0656, 0.0448, 0.0449, 0.0654], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 11:41:30,935 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:42:04,106 INFO [train.py:903] (0/4) Epoch 8, batch 1200, loss[loss=0.2683, simple_loss=0.3397, pruned_loss=0.09844, over 19281.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3224, pruned_loss=0.09353, over 3795196.09 frames. ], batch size: 70, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:42:32,694 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 11:43:05,011 INFO [train.py:903] (0/4) Epoch 8, batch 1250, loss[loss=0.2889, simple_loss=0.353, pruned_loss=0.1124, over 18261.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3231, pruned_loss=0.09349, over 3816939.23 frames. ], batch size: 83, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:43:11,736 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.529e+02 6.811e+02 8.521e+02 1.008e+03 2.064e+03, threshold=1.704e+03, percent-clipped=4.0 +2023-04-01 11:43:17,993 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:43:50,278 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:44:06,032 INFO [train.py:903] (0/4) Epoch 8, batch 1300, loss[loss=0.2725, simple_loss=0.346, pruned_loss=0.09953, over 19607.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3226, pruned_loss=0.09282, over 3820243.68 frames. ], batch size: 61, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:44:49,025 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:07,324 INFO [train.py:903] (0/4) Epoch 8, batch 1350, loss[loss=0.231, simple_loss=0.3006, pruned_loss=0.0807, over 19416.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3229, pruned_loss=0.0936, over 3813764.20 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:45:16,537 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.835e+02 7.092e+02 8.908e+02 2.388e+03, threshold=1.418e+03, percent-clipped=3.0 +2023-04-01 11:45:20,471 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:22,451 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:26,736 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9690, 3.5839, 1.7816, 2.0614, 2.9568, 1.6652, 1.2236, 1.9177], + device='cuda:0'), covar=tensor([0.1115, 0.0403, 0.1048, 0.0696, 0.0470, 0.1015, 0.0954, 0.0675], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0292, 0.0318, 0.0245, 0.0233, 0.0311, 0.0286, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:45:52,070 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:54,116 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49183.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:46:10,824 INFO [train.py:903] (0/4) Epoch 8, batch 1400, loss[loss=0.2528, simple_loss=0.3159, pruned_loss=0.09483, over 19836.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3214, pruned_loss=0.09278, over 3832131.90 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:46:17,525 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49200.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:47:01,994 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-04-01 11:47:12,312 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 11:47:13,437 INFO [train.py:903] (0/4) Epoch 8, batch 1450, loss[loss=0.2417, simple_loss=0.3199, pruned_loss=0.0817, over 19529.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3223, pruned_loss=0.09319, over 3822123.91 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:47:17,037 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3016, 1.2079, 1.3747, 1.3628, 1.7368, 1.9206, 1.8313, 0.4781], + device='cuda:0'), covar=tensor([0.1851, 0.3314, 0.1959, 0.1615, 0.1220, 0.1637, 0.1165, 0.3226], + device='cuda:0'), in_proj_covar=tensor([0.0458, 0.0532, 0.0530, 0.0415, 0.0574, 0.0463, 0.0628, 0.0462], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:47:19,901 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 6.215e+02 8.146e+02 9.729e+02 2.293e+03, threshold=1.629e+03, percent-clipped=2.0 +2023-04-01 11:47:22,550 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:47:56,333 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5605, 2.4153, 1.5912, 1.5793, 2.1207, 1.2756, 1.3362, 1.7487], + device='cuda:0'), covar=tensor([0.0845, 0.0545, 0.0975, 0.0607, 0.0479, 0.1001, 0.0673, 0.0490], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0290, 0.0317, 0.0242, 0.0230, 0.0310, 0.0283, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:48:14,505 INFO [train.py:903] (0/4) Epoch 8, batch 1500, loss[loss=0.24, simple_loss=0.3011, pruned_loss=0.08945, over 19397.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3232, pruned_loss=0.09363, over 3808271.37 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:48:16,780 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49298.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:38,575 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49315.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:49:12,447 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3627, 1.3135, 1.3583, 1.3333, 2.9425, 0.8847, 2.0360, 3.0522], + device='cuda:0'), covar=tensor([0.0429, 0.2632, 0.2583, 0.1743, 0.0642, 0.2589, 0.1185, 0.0403], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0323, 0.0330, 0.0301, 0.0328, 0.0320, 0.0301, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 11:49:14,530 INFO [train.py:903] (0/4) Epoch 8, batch 1550, loss[loss=0.2735, simple_loss=0.3438, pruned_loss=0.1015, over 19296.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3231, pruned_loss=0.09388, over 3808338.54 frames. ], batch size: 66, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:49:23,152 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+02 6.392e+02 7.844e+02 9.464e+02 1.840e+03, threshold=1.569e+03, percent-clipped=1.0 +2023-04-01 11:49:24,615 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1226, 0.9430, 1.0729, 1.4649, 1.1198, 1.2469, 1.3410, 1.1103], + device='cuda:0'), covar=tensor([0.1126, 0.1448, 0.1347, 0.0764, 0.0936, 0.1118, 0.0971, 0.1018], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0235, 0.0233, 0.0265, 0.0252, 0.0222, 0.0212, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:50:17,464 INFO [train.py:903] (0/4) Epoch 8, batch 1600, loss[loss=0.2454, simple_loss=0.3187, pruned_loss=0.08602, over 19653.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3223, pruned_loss=0.09323, over 3807148.77 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:50:38,475 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 11:50:49,074 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0840, 0.9799, 1.0719, 1.3299, 0.9968, 1.2740, 1.3447, 1.1732], + device='cuda:0'), covar=tensor([0.1087, 0.1222, 0.1237, 0.0857, 0.0942, 0.0927, 0.0916, 0.0887], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0234, 0.0233, 0.0264, 0.0252, 0.0223, 0.0212, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:51:12,181 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:51:20,002 INFO [train.py:903] (0/4) Epoch 8, batch 1650, loss[loss=0.2821, simple_loss=0.348, pruned_loss=0.1081, over 18814.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3228, pruned_loss=0.09377, over 3806726.42 frames. ], batch size: 74, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:51:24,784 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49450.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:51:26,741 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 6.230e+02 7.478e+02 9.229e+02 3.510e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-04-01 11:52:16,281 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6497, 4.1625, 4.3918, 4.3954, 1.5816, 4.1349, 3.5642, 3.9753], + device='cuda:0'), covar=tensor([0.1231, 0.0659, 0.0519, 0.0494, 0.4807, 0.0480, 0.0577, 0.1185], + device='cuda:0'), in_proj_covar=tensor([0.0606, 0.0537, 0.0718, 0.0602, 0.0671, 0.0462, 0.0457, 0.0671], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 11:52:21,802 INFO [train.py:903] (0/4) Epoch 8, batch 1700, loss[loss=0.2219, simple_loss=0.2897, pruned_loss=0.07704, over 19769.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.322, pruned_loss=0.09317, over 3813406.26 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:02,523 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 11:53:03,215 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 11:53:06,461 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4739, 1.4809, 1.9306, 1.6147, 2.6744, 2.5049, 2.7763, 1.3517], + device='cuda:0'), covar=tensor([0.1854, 0.3314, 0.1906, 0.1558, 0.1321, 0.1480, 0.1457, 0.3012], + device='cuda:0'), in_proj_covar=tensor([0.0461, 0.0538, 0.0534, 0.0418, 0.0578, 0.0467, 0.0631, 0.0465], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 11:53:23,395 INFO [train.py:903] (0/4) Epoch 8, batch 1750, loss[loss=0.2861, simple_loss=0.3483, pruned_loss=0.112, over 19662.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3219, pruned_loss=0.09311, over 3816672.21 frames. ], batch size: 60, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:26,506 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 11:53:31,457 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.147e+02 7.390e+02 1.012e+03 1.809e+03, threshold=1.478e+03, percent-clipped=6.0 +2023-04-01 11:53:35,100 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49554.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:50,020 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-01 11:53:56,591 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:58,883 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7691, 1.9804, 2.3223, 2.1154, 3.0641, 3.6364, 3.6674, 3.9199], + device='cuda:0'), covar=tensor([0.1226, 0.2268, 0.2094, 0.1688, 0.0681, 0.0358, 0.0167, 0.0158], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0282, 0.0311, 0.0245, 0.0205, 0.0135, 0.0203, 0.0169], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 11:54:06,005 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:27,936 INFO [train.py:903] (0/4) Epoch 8, batch 1800, loss[loss=0.3099, simple_loss=0.368, pruned_loss=0.1259, over 17390.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3214, pruned_loss=0.09303, over 3821521.18 frames. ], batch size: 101, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:54:28,351 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:31,405 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:55:25,420 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 11:55:30,998 INFO [train.py:903] (0/4) Epoch 8, batch 1850, loss[loss=0.3566, simple_loss=0.3963, pruned_loss=0.1585, over 12901.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3225, pruned_loss=0.09399, over 3822947.29 frames. ], batch size: 136, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:55:38,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.845e+02 7.519e+02 8.649e+02 2.522e+03, threshold=1.504e+03, percent-clipped=4.0 +2023-04-01 11:56:02,470 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 11:56:31,214 INFO [train.py:903] (0/4) Epoch 8, batch 1900, loss[loss=0.2664, simple_loss=0.3415, pruned_loss=0.09567, over 19543.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3236, pruned_loss=0.09472, over 3829276.81 frames. ], batch size: 56, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:56:48,649 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 11:56:52,377 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:56:54,379 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 11:57:18,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 11:57:33,019 INFO [train.py:903] (0/4) Epoch 8, batch 1950, loss[loss=0.2406, simple_loss=0.3218, pruned_loss=0.07972, over 19542.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3235, pruned_loss=0.09429, over 3823350.01 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:57:40,110 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.492e+02 5.402e+02 6.624e+02 8.916e+02 2.925e+03, threshold=1.325e+03, percent-clipped=4.0 +2023-04-01 11:58:20,344 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:20,492 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:26,588 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2485, 1.6154, 2.1675, 2.8779, 1.8641, 2.2389, 2.6203, 2.2988], + device='cuda:0'), covar=tensor([0.0763, 0.0984, 0.0855, 0.0832, 0.1000, 0.0757, 0.0886, 0.0620], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0231, 0.0229, 0.0261, 0.0248, 0.0217, 0.0210, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 11:58:32,981 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49794.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:58:35,922 INFO [train.py:903] (0/4) Epoch 8, batch 2000, loss[loss=0.2441, simple_loss=0.3247, pruned_loss=0.08178, over 19788.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.3238, pruned_loss=0.09426, over 3823424.87 frames. ], batch size: 56, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:35,609 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 11:59:38,067 INFO [train.py:903] (0/4) Epoch 8, batch 2050, loss[loss=0.1924, simple_loss=0.2778, pruned_loss=0.05356, over 19576.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.322, pruned_loss=0.09373, over 3820674.95 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:45,925 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.411e+02 7.156e+02 9.098e+02 3.444e+03, threshold=1.431e+03, percent-clipped=9.0 +2023-04-01 11:59:53,811 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 11:59:55,135 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 12:00:17,460 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 12:00:39,973 INFO [train.py:903] (0/4) Epoch 8, batch 2100, loss[loss=0.2579, simple_loss=0.3288, pruned_loss=0.09344, over 19770.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3224, pruned_loss=0.09386, over 3827346.29 frames. ], batch size: 56, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:00:43,806 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:00:55,217 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49909.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:01:09,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 12:01:32,556 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 12:01:41,833 INFO [train.py:903] (0/4) Epoch 8, batch 2150, loss[loss=0.3029, simple_loss=0.364, pruned_loss=0.1209, over 19425.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3213, pruned_loss=0.09348, over 3819410.12 frames. ], batch size: 70, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:01:48,292 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 5.997e+02 7.086e+02 8.659e+02 2.224e+03, threshold=1.417e+03, percent-clipped=8.0 +2023-04-01 12:02:03,058 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7751, 1.8887, 1.9971, 2.7504, 1.8151, 2.5972, 2.4897, 1.8896], + device='cuda:0'), covar=tensor([0.2958, 0.2374, 0.1150, 0.1279, 0.2564, 0.0981, 0.2411, 0.2195], + device='cuda:0'), in_proj_covar=tensor([0.0722, 0.0732, 0.0610, 0.0856, 0.0728, 0.0633, 0.0756, 0.0655], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 12:02:05,611 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 12:02:12,033 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:40,672 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:43,585 INFO [train.py:903] (0/4) Epoch 8, batch 2200, loss[loss=0.2399, simple_loss=0.3066, pruned_loss=0.08658, over 19854.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3214, pruned_loss=0.09391, over 3815483.83 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:02:49,452 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-50000.pt +2023-04-01 12:03:48,609 INFO [train.py:903] (0/4) Epoch 8, batch 2250, loss[loss=0.245, simple_loss=0.3113, pruned_loss=0.08937, over 19475.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3224, pruned_loss=0.09414, over 3809943.17 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:55,476 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+02 6.238e+02 7.806e+02 1.014e+03 2.092e+03, threshold=1.561e+03, percent-clipped=8.0 +2023-04-01 12:04:51,015 INFO [train.py:903] (0/4) Epoch 8, batch 2300, loss[loss=0.2523, simple_loss=0.3094, pruned_loss=0.09756, over 16052.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3242, pruned_loss=0.09521, over 3810612.82 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:04,996 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 12:05:31,266 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:52,929 INFO [train.py:903] (0/4) Epoch 8, batch 2350, loss[loss=0.2654, simple_loss=0.3351, pruned_loss=0.09786, over 19531.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3248, pruned_loss=0.09528, over 3824001.65 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:53,240 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:59,870 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.938e+02 7.771e+02 9.106e+02 1.869e+03, threshold=1.554e+03, percent-clipped=2.0 +2023-04-01 12:06:03,818 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:18,096 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50165.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:06:31,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 12:06:36,669 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:38,683 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 12:06:47,995 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50190.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:06:54,683 INFO [train.py:903] (0/4) Epoch 8, batch 2400, loss[loss=0.2611, simple_loss=0.328, pruned_loss=0.0971, over 19598.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3241, pruned_loss=0.09526, over 3823104.74 frames. ], batch size: 57, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:06:54,692 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 12:07:20,904 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50215.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:07:53,620 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:07:58,090 INFO [train.py:903] (0/4) Epoch 8, batch 2450, loss[loss=0.3001, simple_loss=0.3659, pruned_loss=0.1172, over 19307.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3249, pruned_loss=0.09596, over 3796436.39 frames. ], batch size: 66, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:08:05,247 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.846e+02 6.354e+02 7.339e+02 9.160e+02 2.255e+03, threshold=1.468e+03, percent-clipped=3.0 +2023-04-01 12:09:00,867 INFO [train.py:903] (0/4) Epoch 8, batch 2500, loss[loss=0.3442, simple_loss=0.3805, pruned_loss=0.154, over 13582.00 frames. ], tot_loss[loss=0.258, simple_loss=0.325, pruned_loss=0.09551, over 3771460.92 frames. ], batch size: 136, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:03,030 INFO [train.py:903] (0/4) Epoch 8, batch 2550, loss[loss=0.2742, simple_loss=0.3452, pruned_loss=0.1016, over 19308.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3239, pruned_loss=0.09405, over 3795804.26 frames. ], batch size: 66, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:09,527 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.479e+02 6.803e+02 8.076e+02 1.672e+03, threshold=1.361e+03, percent-clipped=2.0 +2023-04-01 12:10:59,275 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 12:11:05,094 INFO [train.py:903] (0/4) Epoch 8, batch 2600, loss[loss=0.2451, simple_loss=0.3005, pruned_loss=0.09483, over 17389.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3228, pruned_loss=0.09368, over 3807753.41 frames. ], batch size: 38, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:09,324 INFO [train.py:903] (0/4) Epoch 8, batch 2650, loss[loss=0.2902, simple_loss=0.356, pruned_loss=0.1122, over 19789.00 frames. ], tot_loss[loss=0.255, simple_loss=0.323, pruned_loss=0.09356, over 3814469.00 frames. ], batch size: 56, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:15,976 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.098e+02 6.836e+02 8.198e+02 1.046e+03 1.620e+03, threshold=1.640e+03, percent-clipped=8.0 +2023-04-01 12:12:27,569 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 12:12:34,601 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5904, 4.0299, 4.2218, 4.2098, 1.5506, 3.9275, 3.4424, 3.8762], + device='cuda:0'), covar=tensor([0.1107, 0.0607, 0.0500, 0.0491, 0.4634, 0.0523, 0.0602, 0.0992], + device='cuda:0'), in_proj_covar=tensor([0.0592, 0.0524, 0.0707, 0.0597, 0.0661, 0.0454, 0.0445, 0.0657], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 12:13:04,636 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:09,322 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0384, 5.3595, 2.8515, 4.5727, 1.0759, 5.3231, 5.2825, 5.5728], + device='cuda:0'), covar=tensor([0.0417, 0.0923, 0.1946, 0.0611, 0.4099, 0.0517, 0.0571, 0.0819], + device='cuda:0'), in_proj_covar=tensor([0.0389, 0.0338, 0.0401, 0.0293, 0.0361, 0.0322, 0.0313, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 12:13:11,330 INFO [train.py:903] (0/4) Epoch 8, batch 2700, loss[loss=0.2632, simple_loss=0.3145, pruned_loss=0.1059, over 14693.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.322, pruned_loss=0.0932, over 3814877.03 frames. ], batch size: 32, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:13:16,508 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50499.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:46,656 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:14:15,300 INFO [train.py:903] (0/4) Epoch 8, batch 2750, loss[loss=0.2798, simple_loss=0.3463, pruned_loss=0.1066, over 19652.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.322, pruned_loss=0.09292, over 3830869.18 frames. ], batch size: 55, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:14:20,713 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4837, 1.5298, 1.6174, 1.9786, 1.3537, 1.7122, 1.8825, 1.6042], + device='cuda:0'), covar=tensor([0.2505, 0.2032, 0.1065, 0.1057, 0.2208, 0.1004, 0.2399, 0.1917], + device='cuda:0'), in_proj_covar=tensor([0.0731, 0.0738, 0.0617, 0.0859, 0.0740, 0.0642, 0.0768, 0.0662], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 12:14:23,786 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 5.912e+02 7.181e+02 9.047e+02 1.864e+03, threshold=1.436e+03, percent-clipped=1.0 +2023-04-01 12:14:29,751 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3194, 1.6380, 2.0255, 2.5035, 1.8829, 2.9105, 2.6727, 2.3312], + device='cuda:0'), covar=tensor([0.0662, 0.0939, 0.0908, 0.0998, 0.0967, 0.0572, 0.0800, 0.0639], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0233, 0.0231, 0.0260, 0.0247, 0.0218, 0.0210, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 12:14:30,887 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50559.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:14:37,732 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50564.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:15:18,012 INFO [train.py:903] (0/4) Epoch 8, batch 2800, loss[loss=0.3304, simple_loss=0.3659, pruned_loss=0.1474, over 13579.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3232, pruned_loss=0.09389, over 3805480.39 frames. ], batch size: 135, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:15:29,724 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50605.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:16:21,990 INFO [train.py:903] (0/4) Epoch 8, batch 2850, loss[loss=0.2282, simple_loss=0.291, pruned_loss=0.08274, over 19751.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3232, pruned_loss=0.09394, over 3813513.95 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:16:31,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 5.617e+02 7.073e+02 8.787e+02 1.544e+03, threshold=1.415e+03, percent-clipped=2.0 +2023-04-01 12:16:57,575 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50674.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:17:25,989 INFO [train.py:903] (0/4) Epoch 8, batch 2900, loss[loss=0.2277, simple_loss=0.2864, pruned_loss=0.08447, over 19287.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3218, pruned_loss=0.09291, over 3825853.60 frames. ], batch size: 44, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:17:26,079 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 12:18:29,372 INFO [train.py:903] (0/4) Epoch 8, batch 2950, loss[loss=0.247, simple_loss=0.316, pruned_loss=0.08903, over 19864.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3213, pruned_loss=0.09214, over 3837620.02 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:18:37,517 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 6.074e+02 7.951e+02 1.027e+03 2.467e+03, threshold=1.590e+03, percent-clipped=7.0 +2023-04-01 12:19:31,424 INFO [train.py:903] (0/4) Epoch 8, batch 3000, loss[loss=0.3076, simple_loss=0.3625, pruned_loss=0.1263, over 19272.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3228, pruned_loss=0.09342, over 3834783.41 frames. ], batch size: 66, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:19:31,425 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 12:19:38,491 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1338, 1.2677, 1.5660, 0.9745, 2.2626, 2.8823, 2.6386, 3.0138], + device='cuda:0'), covar=tensor([0.1579, 0.3070, 0.2863, 0.2203, 0.0484, 0.0244, 0.0287, 0.0205], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0287, 0.0316, 0.0249, 0.0209, 0.0137, 0.0207, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 12:19:44,060 INFO [train.py:937] (0/4) Epoch 8, validation: loss=0.1875, simple_loss=0.2879, pruned_loss=0.04358, over 944034.00 frames. +2023-04-01 12:19:44,061 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 12:19:46,424 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 12:19:49,186 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50800.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:20:21,695 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50826.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:20:45,735 INFO [train.py:903] (0/4) Epoch 8, batch 3050, loss[loss=0.286, simple_loss=0.346, pruned_loss=0.113, over 17624.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3236, pruned_loss=0.09374, over 3830890.95 frames. ], batch size: 101, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:20:55,183 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.189e+02 5.734e+02 7.199e+02 9.163e+02 1.650e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-04-01 12:21:06,927 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:22,874 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2141, 1.1662, 1.4937, 1.3795, 2.0964, 1.9193, 2.1007, 0.8601], + device='cuda:0'), covar=tensor([0.2404, 0.4147, 0.2362, 0.2030, 0.1458, 0.2048, 0.1565, 0.3620], + device='cuda:0'), in_proj_covar=tensor([0.0467, 0.0540, 0.0537, 0.0422, 0.0576, 0.0471, 0.0637, 0.0468], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 12:21:24,278 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-01 12:21:36,759 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:44,445 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2653, 1.3631, 1.4056, 1.5206, 2.7764, 0.8505, 2.2081, 3.0608], + device='cuda:0'), covar=tensor([0.0457, 0.2368, 0.2608, 0.1465, 0.0752, 0.2370, 0.1098, 0.0395], + device='cuda:0'), in_proj_covar=tensor([0.0321, 0.0326, 0.0340, 0.0302, 0.0334, 0.0320, 0.0308, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:21:49,959 INFO [train.py:903] (0/4) Epoch 8, batch 3100, loss[loss=0.2044, simple_loss=0.2836, pruned_loss=0.0626, over 19648.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3236, pruned_loss=0.09405, over 3818211.99 frames. ], batch size: 55, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:22:04,733 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:22:32,054 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50930.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:22:52,354 INFO [train.py:903] (0/4) Epoch 8, batch 3150, loss[loss=0.2501, simple_loss=0.3291, pruned_loss=0.08553, over 18462.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3234, pruned_loss=0.09392, over 3815959.08 frames. ], batch size: 84, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:23:00,494 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 5.915e+02 7.023e+02 8.955e+02 1.571e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-01 12:23:03,163 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50955.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:23:14,706 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:23:20,378 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 12:23:32,483 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.39 vs. limit=5.0 +2023-04-01 12:23:35,052 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2996, 2.9797, 2.0954, 2.7824, 0.9532, 2.8117, 2.7671, 2.8543], + device='cuda:0'), covar=tensor([0.1069, 0.1446, 0.2094, 0.0836, 0.3609, 0.1106, 0.0942, 0.1235], + device='cuda:0'), in_proj_covar=tensor([0.0397, 0.0345, 0.0402, 0.0294, 0.0365, 0.0328, 0.0323, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 12:23:54,314 INFO [train.py:903] (0/4) Epoch 8, batch 3200, loss[loss=0.2546, simple_loss=0.3262, pruned_loss=0.09146, over 19403.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3232, pruned_loss=0.0938, over 3827178.92 frames. ], batch size: 48, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:24:30,128 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51023.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:24:57,559 INFO [train.py:903] (0/4) Epoch 8, batch 3250, loss[loss=0.2321, simple_loss=0.3076, pruned_loss=0.07827, over 19617.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3222, pruned_loss=0.09299, over 3831501.40 frames. ], batch size: 50, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:25:03,797 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7347, 1.4924, 1.6295, 1.6922, 3.2800, 1.0824, 2.3107, 3.5924], + device='cuda:0'), covar=tensor([0.0423, 0.2416, 0.2410, 0.1567, 0.0697, 0.2390, 0.1157, 0.0271], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0322, 0.0335, 0.0301, 0.0329, 0.0319, 0.0307, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:25:05,780 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 6.087e+02 7.883e+02 9.942e+02 3.174e+03, threshold=1.577e+03, percent-clipped=7.0 +2023-04-01 12:25:40,489 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:26:00,812 INFO [train.py:903] (0/4) Epoch 8, batch 3300, loss[loss=0.3066, simple_loss=0.3557, pruned_loss=0.1288, over 13878.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3223, pruned_loss=0.09329, over 3821833.78 frames. ], batch size: 135, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:26:01,209 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6180, 1.6854, 1.4275, 1.3344, 1.2090, 1.4368, 0.3970, 0.9040], + device='cuda:0'), covar=tensor([0.0287, 0.0304, 0.0226, 0.0289, 0.0580, 0.0328, 0.0597, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0320, 0.0320, 0.0336, 0.0413, 0.0336, 0.0297, 0.0317], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 12:26:08,900 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 12:26:17,151 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6990, 1.1079, 1.3344, 1.5275, 2.9610, 0.8619, 2.2607, 3.3566], + device='cuda:0'), covar=tensor([0.0521, 0.3562, 0.3306, 0.1892, 0.1090, 0.3025, 0.1329, 0.0516], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0324, 0.0335, 0.0301, 0.0331, 0.0321, 0.0307, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:27:02,456 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51144.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:27:04,793 INFO [train.py:903] (0/4) Epoch 8, batch 3350, loss[loss=0.2615, simple_loss=0.3131, pruned_loss=0.105, over 19289.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3224, pruned_loss=0.09367, over 3809564.79 frames. ], batch size: 44, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:27:12,715 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.942e+02 7.377e+02 9.279e+02 2.136e+03, threshold=1.475e+03, percent-clipped=2.0 +2023-04-01 12:27:34,023 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51170.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:28:00,636 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51190.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:28:07,181 INFO [train.py:903] (0/4) Epoch 8, batch 3400, loss[loss=0.252, simple_loss=0.3275, pruned_loss=0.08826, over 19781.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3234, pruned_loss=0.09411, over 3810066.47 frames. ], batch size: 56, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:10,666 INFO [train.py:903] (0/4) Epoch 8, batch 3450, loss[loss=0.2108, simple_loss=0.2816, pruned_loss=0.07001, over 19855.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3227, pruned_loss=0.09419, over 3801831.88 frames. ], batch size: 52, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:16,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 12:29:16,430 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0014, 5.3962, 2.8562, 4.6654, 1.5852, 5.2869, 5.2587, 5.3854], + device='cuda:0'), covar=tensor([0.0370, 0.0791, 0.1801, 0.0586, 0.3424, 0.0521, 0.0564, 0.0680], + device='cuda:0'), in_proj_covar=tensor([0.0399, 0.0346, 0.0405, 0.0300, 0.0373, 0.0330, 0.0323, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 12:29:18,575 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+02 6.280e+02 7.477e+02 9.686e+02 1.820e+03, threshold=1.495e+03, percent-clipped=3.0 +2023-04-01 12:29:27,081 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:29:52,057 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:00,073 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51285.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:30:12,716 INFO [train.py:903] (0/4) Epoch 8, batch 3500, loss[loss=0.2763, simple_loss=0.3242, pruned_loss=0.1142, over 18970.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3238, pruned_loss=0.09484, over 3810367.64 frames. ], batch size: 42, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:30:25,469 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51304.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:26,781 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8631, 1.3501, 1.0113, 0.9786, 1.1594, 0.8688, 0.8269, 1.2539], + device='cuda:0'), covar=tensor([0.0472, 0.0695, 0.0916, 0.0509, 0.0436, 0.1071, 0.0566, 0.0361], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0291, 0.0314, 0.0242, 0.0230, 0.0309, 0.0283, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:30:30,172 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:31:19,058 INFO [train.py:903] (0/4) Epoch 8, batch 3550, loss[loss=0.3049, simple_loss=0.3658, pruned_loss=0.122, over 19137.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.323, pruned_loss=0.09415, over 3817276.46 frames. ], batch size: 69, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:31:27,385 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.209e+02 6.396e+02 8.561e+02 1.899e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-01 12:32:00,660 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.38 vs. limit=5.0 +2023-04-01 12:32:21,070 INFO [train.py:903] (0/4) Epoch 8, batch 3600, loss[loss=0.2513, simple_loss=0.3214, pruned_loss=0.09064, over 19765.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3224, pruned_loss=0.09369, over 3830063.64 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:32:26,101 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51400.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:38,763 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:54,976 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:56,810 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:33:16,720 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0658, 2.2885, 2.4197, 2.4235, 1.0859, 2.2249, 2.0590, 2.2131], + device='cuda:0'), covar=tensor([0.1180, 0.1872, 0.0659, 0.0646, 0.3329, 0.0936, 0.0621, 0.1023], + device='cuda:0'), in_proj_covar=tensor([0.0598, 0.0527, 0.0711, 0.0595, 0.0659, 0.0460, 0.0450, 0.0655], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 12:33:22,342 INFO [train.py:903] (0/4) Epoch 8, batch 3650, loss[loss=0.22, simple_loss=0.2913, pruned_loss=0.07437, over 19842.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3239, pruned_loss=0.09439, over 3828372.37 frames. ], batch size: 52, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:33:27,496 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8842, 2.0119, 2.0222, 3.0091, 1.9314, 2.7124, 2.4732, 1.9289], + device='cuda:0'), covar=tensor([0.2939, 0.2206, 0.1118, 0.1228, 0.2603, 0.1026, 0.2463, 0.2098], + device='cuda:0'), in_proj_covar=tensor([0.0724, 0.0733, 0.0611, 0.0855, 0.0728, 0.0641, 0.0748, 0.0660], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 12:33:31,510 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 6.415e+02 7.779e+02 9.918e+02 2.619e+03, threshold=1.556e+03, percent-clipped=14.0 +2023-04-01 12:34:24,464 INFO [train.py:903] (0/4) Epoch 8, batch 3700, loss[loss=0.2273, simple_loss=0.2956, pruned_loss=0.07945, over 19726.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.323, pruned_loss=0.09392, over 3830706.75 frames. ], batch size: 46, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:34:46,880 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:34:49,336 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51515.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:12,246 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51534.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:35:18,924 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51539.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:20,178 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51540.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:21,369 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51541.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:35:27,845 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8121, 1.5983, 1.4842, 1.8574, 1.6965, 1.5543, 1.6151, 1.7046], + device='cuda:0'), covar=tensor([0.0877, 0.1492, 0.1317, 0.0849, 0.1020, 0.0489, 0.1037, 0.0658], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0356, 0.0290, 0.0238, 0.0299, 0.0242, 0.0271, 0.0236], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:35:28,605 INFO [train.py:903] (0/4) Epoch 8, batch 3750, loss[loss=0.2637, simple_loss=0.3341, pruned_loss=0.09665, over 17543.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3237, pruned_loss=0.09407, over 3830990.94 frames. ], batch size: 101, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:35:36,643 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.985e+02 5.907e+02 7.282e+02 9.270e+02 2.268e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:35:52,197 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51566.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:36:30,191 INFO [train.py:903] (0/4) Epoch 8, batch 3800, loss[loss=0.2366, simple_loss=0.2991, pruned_loss=0.08698, over 19732.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3229, pruned_loss=0.0933, over 3829307.21 frames. ], batch size: 45, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:02,478 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 12:37:31,376 INFO [train.py:903] (0/4) Epoch 8, batch 3850, loss[loss=0.2703, simple_loss=0.3479, pruned_loss=0.09631, over 19296.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3236, pruned_loss=0.0936, over 3821238.67 frames. ], batch size: 70, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:35,032 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51649.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:37:40,054 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.568e+02 6.155e+02 7.716e+02 1.023e+03 2.199e+03, threshold=1.543e+03, percent-clipped=8.0 +2023-04-01 12:38:13,457 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51679.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:38:33,778 INFO [train.py:903] (0/4) Epoch 8, batch 3900, loss[loss=0.2513, simple_loss=0.332, pruned_loss=0.08525, over 19616.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3217, pruned_loss=0.09219, over 3830347.93 frames. ], batch size: 57, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:38:44,978 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51704.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:33,677 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51744.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:37,156 INFO [train.py:903] (0/4) Epoch 8, batch 3950, loss[loss=0.2199, simple_loss=0.2861, pruned_loss=0.07685, over 19757.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3227, pruned_loss=0.09266, over 3827851.33 frames. ], batch size: 46, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:39:38,716 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3886, 2.2468, 1.5988, 1.5179, 2.1098, 1.2605, 1.1184, 1.8647], + device='cuda:0'), covar=tensor([0.0831, 0.0594, 0.0832, 0.0609, 0.0373, 0.0976, 0.0705, 0.0385], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0286, 0.0310, 0.0237, 0.0226, 0.0305, 0.0279, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:39:41,705 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 12:39:45,225 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+02 5.815e+02 7.280e+02 9.203e+02 2.422e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:39:46,646 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:52,176 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 12:39:59,557 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51765.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:37,935 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:38,730 INFO [train.py:903] (0/4) Epoch 8, batch 4000, loss[loss=0.254, simple_loss=0.33, pruned_loss=0.08902, over 19661.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3238, pruned_loss=0.09323, over 3819679.77 frames. ], batch size: 59, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:09,869 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:27,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 12:41:29,436 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7052, 2.1719, 1.7386, 1.6283, 2.0758, 1.5807, 1.4985, 1.7909], + device='cuda:0'), covar=tensor([0.0599, 0.0440, 0.0563, 0.0462, 0.0364, 0.0696, 0.0483, 0.0375], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0290, 0.0314, 0.0239, 0.0227, 0.0308, 0.0282, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:41:41,697 INFO [train.py:903] (0/4) Epoch 8, batch 4050, loss[loss=0.2824, simple_loss=0.3434, pruned_loss=0.1107, over 18211.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3231, pruned_loss=0.09303, over 3816072.80 frames. ], batch size: 83, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:45,837 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-01 12:41:50,762 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.576e+02 5.742e+02 7.614e+02 9.901e+02 2.045e+03, threshold=1.523e+03, percent-clipped=5.0 +2023-04-01 12:41:55,471 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:58,910 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51859.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:11,936 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51869.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:14,876 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-04-01 12:42:43,856 INFO [train.py:903] (0/4) Epoch 8, batch 4100, loss[loss=0.2979, simple_loss=0.3596, pruned_loss=0.1181, over 19112.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3218, pruned_loss=0.09237, over 3822449.97 frames. ], batch size: 69, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:42:56,230 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:43:21,448 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 12:43:26,437 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51930.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:43:47,771 INFO [train.py:903] (0/4) Epoch 8, batch 4150, loss[loss=0.2462, simple_loss=0.3189, pruned_loss=0.08673, over 19287.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3215, pruned_loss=0.09179, over 3840447.65 frames. ], batch size: 66, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:43:56,804 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.129e+02 6.343e+02 7.798e+02 9.790e+02 2.215e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 12:44:19,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:44:50,689 INFO [train.py:903] (0/4) Epoch 8, batch 4200, loss[loss=0.1953, simple_loss=0.2673, pruned_loss=0.06169, over 19719.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3204, pruned_loss=0.0914, over 3845691.57 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:44:55,600 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-52000.pt +2023-04-01 12:44:57,654 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 12:45:28,968 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 12:45:51,380 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5030, 1.2279, 1.1038, 1.4174, 1.2167, 1.2770, 1.0508, 1.3447], + device='cuda:0'), covar=tensor([0.0867, 0.1104, 0.1326, 0.0738, 0.0883, 0.0528, 0.1178, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0350, 0.0283, 0.0234, 0.0295, 0.0239, 0.0268, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:45:53,317 INFO [train.py:903] (0/4) Epoch 8, batch 4250, loss[loss=0.2422, simple_loss=0.3041, pruned_loss=0.09013, over 19495.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3202, pruned_loss=0.09144, over 3829915.22 frames. ], batch size: 49, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:46:01,321 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 5.455e+02 6.472e+02 8.916e+02 2.597e+03, threshold=1.294e+03, percent-clipped=4.0 +2023-04-01 12:46:08,431 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:46:11,491 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 12:46:21,927 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 12:46:55,489 INFO [train.py:903] (0/4) Epoch 8, batch 4300, loss[loss=0.2305, simple_loss=0.2997, pruned_loss=0.08067, over 19815.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.321, pruned_loss=0.09175, over 3813540.92 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:47:14,054 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52109.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:22,315 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:34,074 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:50,788 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 12:47:53,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:57,575 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9836, 1.9291, 1.5798, 1.4983, 1.2551, 1.4901, 0.5042, 1.0276], + device='cuda:0'), covar=tensor([0.0476, 0.0459, 0.0357, 0.0550, 0.0904, 0.0619, 0.0794, 0.0709], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0316, 0.0315, 0.0332, 0.0412, 0.0331, 0.0296, 0.0314], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 12:48:00,440 INFO [train.py:903] (0/4) Epoch 8, batch 4350, loss[loss=0.2383, simple_loss=0.3153, pruned_loss=0.08068, over 19658.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3197, pruned_loss=0.09122, over 3823152.68 frames. ], batch size: 55, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:48:06,421 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:09,409 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.672e+02 7.291e+02 9.101e+02 1.997e+03, threshold=1.458e+03, percent-clipped=8.0 +2023-04-01 12:48:29,737 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7569, 4.1450, 4.4258, 4.3994, 1.6732, 4.1385, 3.6774, 4.0292], + device='cuda:0'), covar=tensor([0.1128, 0.0724, 0.0534, 0.0476, 0.4551, 0.0485, 0.0559, 0.1069], + device='cuda:0'), in_proj_covar=tensor([0.0610, 0.0532, 0.0720, 0.0610, 0.0672, 0.0467, 0.0457, 0.0670], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 12:49:03,039 INFO [train.py:903] (0/4) Epoch 8, batch 4400, loss[loss=0.3244, simple_loss=0.3718, pruned_loss=0.1385, over 19728.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3202, pruned_loss=0.09201, over 3814588.33 frames. ], batch size: 63, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:49:21,164 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52211.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:26,769 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 12:49:37,191 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4790, 1.5034, 1.8690, 1.9161, 2.9348, 3.9305, 3.9469, 4.3911], + device='cuda:0'), covar=tensor([0.1494, 0.2969, 0.2865, 0.1730, 0.0539, 0.0316, 0.0176, 0.0127], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0286, 0.0317, 0.0247, 0.0207, 0.0138, 0.0205, 0.0175], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 12:49:38,214 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 12:49:38,565 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:43,352 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:50:05,229 INFO [train.py:903] (0/4) Epoch 8, batch 4450, loss[loss=0.2032, simple_loss=0.2784, pruned_loss=0.06404, over 19464.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3199, pruned_loss=0.09129, over 3822211.52 frames. ], batch size: 49, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:50:13,302 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.880e+02 7.086e+02 8.839e+02 1.936e+03, threshold=1.417e+03, percent-clipped=3.0 +2023-04-01 12:50:13,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:51:06,387 INFO [train.py:903] (0/4) Epoch 8, batch 4500, loss[loss=0.2962, simple_loss=0.3578, pruned_loss=0.1173, over 19722.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3193, pruned_loss=0.0906, over 3826568.00 frames. ], batch size: 63, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:51:50,107 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:52:10,352 INFO [train.py:903] (0/4) Epoch 8, batch 4550, loss[loss=0.2086, simple_loss=0.2789, pruned_loss=0.06913, over 19134.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3179, pruned_loss=0.08989, over 3819991.34 frames. ], batch size: 42, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:52:18,695 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.922e+02 7.010e+02 8.869e+02 1.679e+03, threshold=1.402e+03, percent-clipped=2.0 +2023-04-01 12:52:18,736 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 12:52:25,128 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 12:52:41,946 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 12:53:11,447 INFO [train.py:903] (0/4) Epoch 8, batch 4600, loss[loss=0.2326, simple_loss=0.308, pruned_loss=0.07863, over 19590.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3183, pruned_loss=0.09038, over 3817668.43 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:53:18,524 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:54:12,886 INFO [train.py:903] (0/4) Epoch 8, batch 4650, loss[loss=0.2488, simple_loss=0.3158, pruned_loss=0.09087, over 19610.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3178, pruned_loss=0.08994, over 3816246.66 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:54:21,262 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 5.664e+02 6.903e+02 8.285e+02 1.576e+03, threshold=1.381e+03, percent-clipped=2.0 +2023-04-01 12:54:30,516 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 12:54:42,682 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 12:54:56,955 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:15,223 INFO [train.py:903] (0/4) Epoch 8, batch 4700, loss[loss=0.3027, simple_loss=0.3553, pruned_loss=0.1251, over 13451.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3184, pruned_loss=0.09019, over 3809243.98 frames. ], batch size: 138, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:55:27,863 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52505.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:39,729 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 12:55:43,532 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:18,368 INFO [train.py:903] (0/4) Epoch 8, batch 4750, loss[loss=0.2577, simple_loss=0.3362, pruned_loss=0.08957, over 19544.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3186, pruned_loss=0.09009, over 3812161.28 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:56:29,690 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.555e+02 6.316e+02 7.348e+02 9.529e+02 1.491e+03, threshold=1.470e+03, percent-clipped=3.0 +2023-04-01 12:56:31,055 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:52,281 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52573.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:57:22,243 INFO [train.py:903] (0/4) Epoch 8, batch 4800, loss[loss=0.2585, simple_loss=0.3374, pruned_loss=0.08975, over 18084.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3195, pruned_loss=0.09104, over 3811695.84 frames. ], batch size: 83, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:22,763 INFO [train.py:903] (0/4) Epoch 8, batch 4850, loss[loss=0.2485, simple_loss=0.3244, pruned_loss=0.08634, over 19747.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3192, pruned_loss=0.09106, over 3810103.56 frames. ], batch size: 63, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:32,082 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 6.021e+02 7.604e+02 9.872e+02 2.114e+03, threshold=1.521e+03, percent-clipped=8.0 +2023-04-01 12:58:45,992 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 12:58:52,918 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52670.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:58:55,093 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2826, 1.3176, 1.4224, 1.5062, 2.8168, 0.9940, 2.1323, 3.1068], + device='cuda:0'), covar=tensor([0.0520, 0.2498, 0.2563, 0.1582, 0.0812, 0.2311, 0.1119, 0.0350], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0324, 0.0335, 0.0303, 0.0329, 0.0319, 0.0308, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 12:58:58,258 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:59:08,333 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 12:59:14,079 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 12:59:14,106 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 12:59:23,232 INFO [train.py:903] (0/4) Epoch 8, batch 4900, loss[loss=0.2377, simple_loss=0.2947, pruned_loss=0.09036, over 19775.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.32, pruned_loss=0.09154, over 3811413.77 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:59:24,415 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 12:59:44,290 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 13:00:22,669 INFO [train.py:903] (0/4) Epoch 8, batch 4950, loss[loss=0.2352, simple_loss=0.297, pruned_loss=0.08667, over 19770.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3195, pruned_loss=0.0915, over 3811592.74 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 13:00:35,713 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 6.374e+02 8.178e+02 1.048e+03 2.702e+03, threshold=1.636e+03, percent-clipped=11.0 +2023-04-01 13:00:39,449 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4681, 1.2865, 1.4285, 1.5627, 2.9647, 0.8317, 2.1968, 3.2745], + device='cuda:0'), covar=tensor([0.0447, 0.2522, 0.2553, 0.1513, 0.0786, 0.2546, 0.1218, 0.0334], + device='cuda:0'), in_proj_covar=tensor([0.0329, 0.0326, 0.0338, 0.0305, 0.0332, 0.0324, 0.0311, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:00:40,338 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 13:00:53,291 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:00:56,997 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:04,907 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 13:01:14,501 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3456, 2.2396, 1.7754, 1.6425, 1.5323, 1.7911, 0.3337, 1.0351], + device='cuda:0'), covar=tensor([0.0302, 0.0326, 0.0268, 0.0429, 0.0687, 0.0450, 0.0693, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0317, 0.0313, 0.0331, 0.0407, 0.0328, 0.0295, 0.0312], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 13:01:17,589 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:19,453 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-01 13:01:26,610 INFO [train.py:903] (0/4) Epoch 8, batch 5000, loss[loss=0.2345, simple_loss=0.3136, pruned_loss=0.0777, over 19695.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.32, pruned_loss=0.09162, over 3802334.20 frames. ], batch size: 53, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:01:29,398 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:35,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 13:01:47,278 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 13:02:28,523 INFO [train.py:903] (0/4) Epoch 8, batch 5050, loss[loss=0.24, simple_loss=0.3149, pruned_loss=0.08251, over 19664.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3187, pruned_loss=0.09059, over 3807730.58 frames. ], batch size: 53, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:02:39,042 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 5.648e+02 7.062e+02 8.811e+02 1.795e+03, threshold=1.412e+03, percent-clipped=2.0 +2023-04-01 13:03:04,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 13:03:15,058 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 13:03:30,501 INFO [train.py:903] (0/4) Epoch 8, batch 5100, loss[loss=0.2386, simple_loss=0.3042, pruned_loss=0.08652, over 18573.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3194, pruned_loss=0.09102, over 3807128.65 frames. ], batch size: 41, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:03:30,934 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0299, 2.0294, 1.6371, 1.6131, 1.4291, 1.5969, 0.1901, 0.7642], + device='cuda:0'), covar=tensor([0.0332, 0.0350, 0.0252, 0.0365, 0.0779, 0.0426, 0.0732, 0.0656], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0321, 0.0318, 0.0335, 0.0413, 0.0331, 0.0297, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 13:03:41,003 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 13:03:45,397 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 13:03:48,754 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9053, 1.7089, 1.5522, 1.9708, 1.9011, 1.7151, 1.7442, 1.8633], + device='cuda:0'), covar=tensor([0.0937, 0.1622, 0.1394, 0.0992, 0.1211, 0.0516, 0.1049, 0.0652], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0358, 0.0291, 0.0240, 0.0303, 0.0245, 0.0277, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:03:50,849 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 13:03:59,135 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52917.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:04:09,616 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:32,212 INFO [train.py:903] (0/4) Epoch 8, batch 5150, loss[loss=0.2546, simple_loss=0.3313, pruned_loss=0.08893, over 19779.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.318, pruned_loss=0.0901, over 3813150.31 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:04:40,975 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:45,910 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.391e+02 6.060e+02 7.983e+02 1.041e+03 2.368e+03, threshold=1.597e+03, percent-clipped=6.0 +2023-04-01 13:04:47,177 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:04:55,784 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8763, 1.5932, 1.4155, 2.1438, 1.6493, 2.2041, 2.2651, 1.9187], + device='cuda:0'), covar=tensor([0.0716, 0.0864, 0.0966, 0.0748, 0.0903, 0.0585, 0.0700, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0235, 0.0233, 0.0260, 0.0249, 0.0219, 0.0210, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 13:05:19,594 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:05:37,037 INFO [train.py:903] (0/4) Epoch 8, batch 5200, loss[loss=0.2281, simple_loss=0.2981, pruned_loss=0.07909, over 16082.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.32, pruned_loss=0.09116, over 3806822.66 frames. ], batch size: 35, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:05:51,328 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 13:05:51,705 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3885, 1.3847, 1.8960, 1.4918, 2.6282, 2.1172, 2.6833, 1.1890], + device='cuda:0'), covar=tensor([0.1901, 0.3195, 0.1750, 0.1534, 0.1175, 0.1574, 0.1187, 0.2967], + device='cuda:0'), in_proj_covar=tensor([0.0468, 0.0538, 0.0542, 0.0416, 0.0576, 0.0472, 0.0637, 0.0469], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 13:06:21,619 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53032.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:06:36,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 13:06:38,303 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:06:39,061 INFO [train.py:903] (0/4) Epoch 8, batch 5250, loss[loss=0.2108, simple_loss=0.2818, pruned_loss=0.06993, over 19760.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3197, pruned_loss=0.09102, over 3807935.48 frames. ], batch size: 46, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:06:49,012 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.973e+02 7.081e+02 8.822e+02 3.028e+03, threshold=1.416e+03, percent-clipped=2.0 +2023-04-01 13:07:08,270 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:13,746 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:39,281 INFO [train.py:903] (0/4) Epoch 8, batch 5300, loss[loss=0.2492, simple_loss=0.323, pruned_loss=0.08771, over 19542.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3205, pruned_loss=0.09138, over 3820378.49 frames. ], batch size: 56, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:07:57,420 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 13:08:03,077 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:08:25,885 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 13:08:41,249 INFO [train.py:903] (0/4) Epoch 8, batch 5350, loss[loss=0.2053, simple_loss=0.2711, pruned_loss=0.06975, over 19790.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3204, pruned_loss=0.09123, over 3818400.06 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:08:52,775 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 6.176e+02 7.478e+02 9.376e+02 1.338e+03, threshold=1.496e+03, percent-clipped=0.0 +2023-04-01 13:09:14,589 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7935, 1.5740, 1.4038, 1.6445, 1.5345, 1.4994, 1.5465, 1.5863], + device='cuda:0'), covar=tensor([0.0848, 0.1310, 0.1430, 0.0978, 0.1207, 0.0765, 0.1143, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0354, 0.0287, 0.0240, 0.0299, 0.0242, 0.0273, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:09:18,557 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 13:09:44,002 INFO [train.py:903] (0/4) Epoch 8, batch 5400, loss[loss=0.2692, simple_loss=0.3288, pruned_loss=0.1048, over 19507.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3205, pruned_loss=0.09135, over 3802611.57 frames. ], batch size: 64, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:24,037 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:10:47,209 INFO [train.py:903] (0/4) Epoch 8, batch 5450, loss[loss=0.211, simple_loss=0.2837, pruned_loss=0.06911, over 19721.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3198, pruned_loss=0.09122, over 3811099.17 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:57,345 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 5.768e+02 7.150e+02 9.218e+02 2.127e+03, threshold=1.430e+03, percent-clipped=3.0 +2023-04-01 13:11:02,279 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:11:40,463 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53288.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:11:48,913 INFO [train.py:903] (0/4) Epoch 8, batch 5500, loss[loss=0.2646, simple_loss=0.3165, pruned_loss=0.1063, over 19424.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3208, pruned_loss=0.09185, over 3806684.69 frames. ], batch size: 43, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:11,184 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53313.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:12:16,567 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 13:12:23,256 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:34,450 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1614, 1.0838, 1.4589, 0.8226, 2.3178, 3.0249, 2.7939, 3.2349], + device='cuda:0'), covar=tensor([0.1443, 0.3264, 0.2960, 0.2185, 0.0459, 0.0169, 0.0234, 0.0173], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0287, 0.0315, 0.0247, 0.0207, 0.0139, 0.0205, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 13:12:35,592 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53333.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:50,196 INFO [train.py:903] (0/4) Epoch 8, batch 5550, loss[loss=0.2696, simple_loss=0.3393, pruned_loss=0.09992, over 18467.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3211, pruned_loss=0.09195, over 3814016.63 frames. ], batch size: 84, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:59,710 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 13:13:03,078 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 6.394e+02 7.821e+02 9.803e+02 2.197e+03, threshold=1.564e+03, percent-clipped=2.0 +2023-04-01 13:13:40,357 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:13:50,338 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 13:13:53,916 INFO [train.py:903] (0/4) Epoch 8, batch 5600, loss[loss=0.2418, simple_loss=0.3186, pruned_loss=0.08248, over 19678.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3217, pruned_loss=0.09212, over 3809015.22 frames. ], batch size: 59, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:14:21,324 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:14:57,495 INFO [train.py:903] (0/4) Epoch 8, batch 5650, loss[loss=0.2711, simple_loss=0.3318, pruned_loss=0.1052, over 19665.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3227, pruned_loss=0.09313, over 3799226.30 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:15:07,839 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.837e+02 5.961e+02 7.306e+02 9.270e+02 2.985e+03, threshold=1.461e+03, percent-clipped=1.0 +2023-04-01 13:15:45,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 13:15:46,228 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:15:58,761 INFO [train.py:903] (0/4) Epoch 8, batch 5700, loss[loss=0.2297, simple_loss=0.3106, pruned_loss=0.07437, over 19676.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3232, pruned_loss=0.09307, over 3798425.28 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:16:15,331 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:44,952 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:59,704 INFO [train.py:903] (0/4) Epoch 8, batch 5750, loss[loss=0.2562, simple_loss=0.3234, pruned_loss=0.09447, over 19531.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3215, pruned_loss=0.0914, over 3803214.10 frames. ], batch size: 54, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:17:00,958 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 13:17:10,455 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 13:17:11,664 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 5.699e+02 6.647e+02 8.184e+02 1.829e+03, threshold=1.329e+03, percent-clipped=1.0 +2023-04-01 13:17:15,820 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 13:18:02,113 INFO [train.py:903] (0/4) Epoch 8, batch 5800, loss[loss=0.2668, simple_loss=0.3316, pruned_loss=0.101, over 18904.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3216, pruned_loss=0.09147, over 3811813.88 frames. ], batch size: 74, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:18:12,767 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:19:04,522 INFO [train.py:903] (0/4) Epoch 8, batch 5850, loss[loss=0.2541, simple_loss=0.33, pruned_loss=0.08907, over 19489.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3212, pruned_loss=0.09113, over 3811327.19 frames. ], batch size: 64, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:19:15,051 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.371e+02 7.746e+02 9.220e+02 2.993e+03, threshold=1.549e+03, percent-clipped=10.0 +2023-04-01 13:19:27,604 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:19:41,999 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:05,080 INFO [train.py:903] (0/4) Epoch 8, batch 5900, loss[loss=0.2596, simple_loss=0.3364, pruned_loss=0.09143, over 19746.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3214, pruned_loss=0.09104, over 3806240.67 frames. ], batch size: 63, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:20:09,559 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 13:20:30,235 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 13:20:32,887 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:47,010 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53729.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:21:06,831 INFO [train.py:903] (0/4) Epoch 8, batch 5950, loss[loss=0.2163, simple_loss=0.2945, pruned_loss=0.06907, over 19609.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3201, pruned_loss=0.09042, over 3818894.35 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:21:10,425 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1594, 3.8304, 2.1701, 2.1822, 3.3184, 2.0075, 1.4639, 1.9358], + device='cuda:0'), covar=tensor([0.0965, 0.0301, 0.0793, 0.0653, 0.0376, 0.0941, 0.0823, 0.0587], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0293, 0.0319, 0.0245, 0.0228, 0.0319, 0.0291, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:21:19,050 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 5.886e+02 7.195e+02 1.025e+03 2.007e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-04-01 13:21:50,757 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:00,077 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:04,770 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:09,975 INFO [train.py:903] (0/4) Epoch 8, batch 6000, loss[loss=0.2806, simple_loss=0.3371, pruned_loss=0.1121, over 19598.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3205, pruned_loss=0.0911, over 3797427.81 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:22:09,975 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 13:22:22,632 INFO [train.py:937] (0/4) Epoch 8, validation: loss=0.1864, simple_loss=0.2865, pruned_loss=0.04314, over 944034.00 frames. +2023-04-01 13:22:22,633 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 13:22:34,008 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 13:22:48,334 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:06,483 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1188, 5.4760, 3.1817, 4.6940, 1.1380, 5.2220, 5.3378, 5.4438], + device='cuda:0'), covar=tensor([0.0415, 0.0851, 0.1664, 0.0613, 0.3929, 0.0561, 0.0681, 0.0884], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0348, 0.0410, 0.0305, 0.0373, 0.0337, 0.0329, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 13:23:10,013 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0335, 1.7773, 1.5947, 1.9944, 1.8593, 1.8659, 1.6498, 1.9883], + device='cuda:0'), covar=tensor([0.0850, 0.1501, 0.1307, 0.0977, 0.1188, 0.0482, 0.1109, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0355, 0.0289, 0.0238, 0.0298, 0.0241, 0.0272, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:23:24,801 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53844.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:26,621 INFO [train.py:903] (0/4) Epoch 8, batch 6050, loss[loss=0.2853, simple_loss=0.3504, pruned_loss=0.1101, over 19444.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3203, pruned_loss=0.09081, over 3794606.88 frames. ], batch size: 64, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:23:39,145 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.583e+02 7.013e+02 9.917e+02 2.418e+03, threshold=1.403e+03, percent-clipped=8.0 +2023-04-01 13:24:06,029 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6279, 2.4973, 1.6751, 1.5264, 2.2547, 1.3073, 1.3266, 1.8556], + device='cuda:0'), covar=tensor([0.0803, 0.0543, 0.0953, 0.0663, 0.0403, 0.1115, 0.0705, 0.0456], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0292, 0.0314, 0.0244, 0.0227, 0.0316, 0.0290, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:24:30,159 INFO [train.py:903] (0/4) Epoch 8, batch 6100, loss[loss=0.2404, simple_loss=0.3254, pruned_loss=0.07776, over 19683.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3197, pruned_loss=0.0902, over 3790984.85 frames. ], batch size: 58, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:25:31,558 INFO [train.py:903] (0/4) Epoch 8, batch 6150, loss[loss=0.2354, simple_loss=0.3063, pruned_loss=0.08229, over 19748.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3206, pruned_loss=0.09102, over 3796070.61 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:25:42,179 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.923e+02 5.703e+02 7.303e+02 8.849e+02 1.874e+03, threshold=1.461e+03, percent-clipped=4.0 +2023-04-01 13:25:56,449 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 13:26:07,872 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:26:33,363 INFO [train.py:903] (0/4) Epoch 8, batch 6200, loss[loss=0.2609, simple_loss=0.329, pruned_loss=0.09644, over 19742.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3198, pruned_loss=0.09097, over 3808490.00 frames. ], batch size: 63, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:26:35,345 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 13:26:37,419 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:26:39,237 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-54000.pt +2023-04-01 13:27:25,486 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54037.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:37,604 INFO [train.py:903] (0/4) Epoch 8, batch 6250, loss[loss=0.2861, simple_loss=0.3467, pruned_loss=0.1128, over 17600.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3185, pruned_loss=0.09005, over 3816522.50 frames. ], batch size: 101, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:27:39,096 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0161, 1.2045, 1.3174, 1.3595, 2.5677, 0.9142, 1.9109, 2.8518], + device='cuda:0'), covar=tensor([0.0521, 0.2534, 0.2633, 0.1609, 0.0820, 0.2325, 0.1201, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0323, 0.0334, 0.0304, 0.0330, 0.0319, 0.0311, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:27:40,341 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:49,335 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.888e+02 5.723e+02 7.068e+02 9.572e+02 2.133e+03, threshold=1.414e+03, percent-clipped=6.0 +2023-04-01 13:27:58,109 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:04,584 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 13:28:09,556 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6237, 1.3801, 1.4585, 1.9444, 1.5197, 1.8512, 2.0097, 1.7811], + device='cuda:0'), covar=tensor([0.0811, 0.1015, 0.1032, 0.0844, 0.0933, 0.0756, 0.0838, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0228, 0.0226, 0.0254, 0.0243, 0.0211, 0.0206, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 13:28:10,804 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:40,884 INFO [train.py:903] (0/4) Epoch 8, batch 6300, loss[loss=0.2241, simple_loss=0.2884, pruned_loss=0.07991, over 19749.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3185, pruned_loss=0.08994, over 3817370.86 frames. ], batch size: 46, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:28:45,594 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:16,207 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:41,755 INFO [train.py:903] (0/4) Epoch 8, batch 6350, loss[loss=0.2223, simple_loss=0.294, pruned_loss=0.07534, over 19338.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3189, pruned_loss=0.09028, over 3831023.47 frames. ], batch size: 44, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:29:52,040 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.055e+02 7.520e+02 8.667e+02 2.456e+03, threshold=1.504e+03, percent-clipped=3.0 +2023-04-01 13:30:19,246 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2418, 3.6905, 3.8605, 3.8280, 1.3341, 3.6045, 3.1747, 3.5752], + device='cuda:0'), covar=tensor([0.1280, 0.0779, 0.0614, 0.0598, 0.4635, 0.0548, 0.0656, 0.1122], + device='cuda:0'), in_proj_covar=tensor([0.0617, 0.0543, 0.0734, 0.0611, 0.0675, 0.0479, 0.0463, 0.0677], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 13:30:43,420 INFO [train.py:903] (0/4) Epoch 8, batch 6400, loss[loss=0.2391, simple_loss=0.306, pruned_loss=0.08612, over 19847.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3191, pruned_loss=0.09052, over 3836734.88 frames. ], batch size: 52, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:45,983 INFO [train.py:903] (0/4) Epoch 8, batch 6450, loss[loss=0.2204, simple_loss=0.2817, pruned_loss=0.07955, over 19774.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3188, pruned_loss=0.09035, over 3819923.89 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:58,335 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 5.571e+02 6.761e+02 8.500e+02 1.702e+03, threshold=1.352e+03, percent-clipped=3.0 +2023-04-01 13:32:30,248 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 13:32:35,376 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 13:32:48,754 INFO [train.py:903] (0/4) Epoch 8, batch 6500, loss[loss=0.247, simple_loss=0.3248, pruned_loss=0.08458, over 19673.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3176, pruned_loss=0.08938, over 3832084.01 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:32:54,434 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 13:33:50,179 INFO [train.py:903] (0/4) Epoch 8, batch 6550, loss[loss=0.2514, simple_loss=0.3239, pruned_loss=0.08949, over 19680.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3193, pruned_loss=0.09036, over 3832635.75 frames. ], batch size: 59, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:33:53,971 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3118, 1.7554, 1.9057, 2.6661, 1.8620, 2.6935, 2.5880, 2.3405], + device='cuda:0'), covar=tensor([0.0726, 0.0924, 0.0988, 0.0857, 0.0966, 0.0583, 0.0840, 0.0597], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0234, 0.0232, 0.0256, 0.0246, 0.0216, 0.0207, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 13:34:00,552 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+02 6.329e+02 8.151e+02 1.084e+03 2.341e+03, threshold=1.630e+03, percent-clipped=12.0 +2023-04-01 13:34:26,067 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-01 13:34:51,138 INFO [train.py:903] (0/4) Epoch 8, batch 6600, loss[loss=0.2179, simple_loss=0.2938, pruned_loss=0.07098, over 19765.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.318, pruned_loss=0.08977, over 3840995.77 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:35:16,706 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-01 13:35:19,566 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9629, 1.4766, 1.5217, 1.9522, 1.5790, 1.7631, 1.6290, 1.8073], + device='cuda:0'), covar=tensor([0.0820, 0.1576, 0.1270, 0.0915, 0.1165, 0.0453, 0.1003, 0.0615], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0359, 0.0290, 0.0241, 0.0300, 0.0244, 0.0270, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:35:32,053 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 13:35:46,650 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2607, 1.3633, 1.6561, 1.4236, 2.5229, 2.0686, 2.4921, 0.9753], + device='cuda:0'), covar=tensor([0.2041, 0.3471, 0.2042, 0.1719, 0.1171, 0.1832, 0.1366, 0.3345], + device='cuda:0'), in_proj_covar=tensor([0.0467, 0.0545, 0.0549, 0.0420, 0.0573, 0.0474, 0.0640, 0.0474], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 13:35:53,837 INFO [train.py:903] (0/4) Epoch 8, batch 6650, loss[loss=0.2365, simple_loss=0.3025, pruned_loss=0.0852, over 19740.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3179, pruned_loss=0.08969, over 3829148.19 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:36:04,915 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.746e+02 5.865e+02 7.808e+02 1.010e+03 1.907e+03, threshold=1.562e+03, percent-clipped=2.0 +2023-04-01 13:36:55,565 INFO [train.py:903] (0/4) Epoch 8, batch 6700, loss[loss=0.2175, simple_loss=0.2804, pruned_loss=0.07735, over 19742.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3179, pruned_loss=0.0894, over 3823104.86 frames. ], batch size: 46, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:37:52,318 INFO [train.py:903] (0/4) Epoch 8, batch 6750, loss[loss=0.2348, simple_loss=0.3125, pruned_loss=0.0786, over 19589.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3182, pruned_loss=0.09015, over 3809203.82 frames. ], batch size: 52, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:38:03,632 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.992e+02 6.380e+02 7.224e+02 9.353e+02 2.017e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 13:38:12,145 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 13:38:50,509 INFO [train.py:903] (0/4) Epoch 8, batch 6800, loss[loss=0.2128, simple_loss=0.2887, pruned_loss=0.06844, over 19750.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3179, pruned_loss=0.09001, over 3821022.05 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:39:13,885 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.44 vs. limit=5.0 +2023-04-01 13:39:19,159 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-8.pt +2023-04-01 13:39:34,794 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 13:39:35,256 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 13:39:38,465 INFO [train.py:903] (0/4) Epoch 9, batch 0, loss[loss=0.2728, simple_loss=0.3405, pruned_loss=0.1026, over 19748.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3405, pruned_loss=0.1026, over 19748.00 frames. ], batch size: 63, lr: 9.56e-03, grad_scale: 8.0 +2023-04-01 13:39:38,465 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 13:39:49,509 INFO [train.py:937] (0/4) Epoch 9, validation: loss=0.1866, simple_loss=0.2872, pruned_loss=0.04294, over 944034.00 frames. +2023-04-01 13:39:49,510 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 13:40:03,807 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 13:40:28,239 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.879e+02 5.500e+02 7.208e+02 8.930e+02 1.459e+03, threshold=1.442e+03, percent-clipped=1.0 +2023-04-01 13:40:51,457 INFO [train.py:903] (0/4) Epoch 9, batch 50, loss[loss=0.2627, simple_loss=0.3248, pruned_loss=0.1004, over 13579.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3249, pruned_loss=0.09508, over 855116.27 frames. ], batch size: 136, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:41:02,921 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54682.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:41:26,435 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 13:41:53,043 INFO [train.py:903] (0/4) Epoch 9, batch 100, loss[loss=0.2272, simple_loss=0.3062, pruned_loss=0.07409, over 17393.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3213, pruned_loss=0.09162, over 1527801.42 frames. ], batch size: 101, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:42:05,367 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 13:42:31,167 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.564e+02 7.190e+02 9.001e+02 2.877e+03, threshold=1.438e+03, percent-clipped=2.0 +2023-04-01 13:42:53,291 INFO [train.py:903] (0/4) Epoch 9, batch 150, loss[loss=0.2723, simple_loss=0.3338, pruned_loss=0.1054, over 17383.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.32, pruned_loss=0.0905, over 2050338.23 frames. ], batch size: 101, lr: 9.54e-03, grad_scale: 16.0 +2023-04-01 13:43:53,898 INFO [train.py:903] (0/4) Epoch 9, batch 200, loss[loss=0.2331, simple_loss=0.3024, pruned_loss=0.08186, over 19847.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3201, pruned_loss=0.09137, over 2443493.41 frames. ], batch size: 52, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:43:56,317 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 13:44:03,669 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6873, 2.0618, 2.3307, 2.7385, 2.4713, 2.3372, 2.2070, 2.8269], + device='cuda:0'), covar=tensor([0.0689, 0.1618, 0.1180, 0.0796, 0.1155, 0.0419, 0.0972, 0.0492], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0355, 0.0290, 0.0239, 0.0297, 0.0244, 0.0269, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:44:36,456 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.883e+02 7.738e+02 9.204e+02 1.688e+03, threshold=1.548e+03, percent-clipped=2.0 +2023-04-01 13:44:47,188 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6533, 1.5575, 1.5894, 1.5862, 3.1477, 1.1040, 2.1764, 3.5139], + device='cuda:0'), covar=tensor([0.0441, 0.2441, 0.2465, 0.1732, 0.0713, 0.2361, 0.1236, 0.0294], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0330, 0.0339, 0.0309, 0.0339, 0.0323, 0.0313, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:44:57,170 INFO [train.py:903] (0/4) Epoch 9, batch 250, loss[loss=0.2198, simple_loss=0.2847, pruned_loss=0.07748, over 19720.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3183, pruned_loss=0.09024, over 2755074.32 frames. ], batch size: 46, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:45:06,563 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:45:39,327 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4040, 2.1970, 1.6614, 1.3776, 1.9761, 1.2474, 1.3560, 1.8358], + device='cuda:0'), covar=tensor([0.0748, 0.0530, 0.0788, 0.0585, 0.0459, 0.0941, 0.0585, 0.0353], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0290, 0.0321, 0.0241, 0.0228, 0.0310, 0.0287, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:45:57,845 INFO [train.py:903] (0/4) Epoch 9, batch 300, loss[loss=0.2799, simple_loss=0.3539, pruned_loss=0.1029, over 18771.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.318, pruned_loss=0.08978, over 2988915.34 frames. ], batch size: 74, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:46:39,713 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 5.660e+02 7.032e+02 9.457e+02 2.087e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-04-01 13:47:01,351 INFO [train.py:903] (0/4) Epoch 9, batch 350, loss[loss=0.2461, simple_loss=0.3155, pruned_loss=0.08836, over 19682.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3168, pruned_loss=0.0888, over 3185257.41 frames. ], batch size: 53, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:47:07,236 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:48:03,059 INFO [train.py:903] (0/4) Epoch 9, batch 400, loss[loss=0.2455, simple_loss=0.3186, pruned_loss=0.08624, over 19796.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3178, pruned_loss=0.08938, over 3336028.45 frames. ], batch size: 56, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:48:06,563 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55026.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:48:38,439 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 13:48:44,522 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.195e+02 5.719e+02 7.898e+02 1.018e+03 2.327e+03, threshold=1.580e+03, percent-clipped=4.0 +2023-04-01 13:49:04,321 INFO [train.py:903] (0/4) Epoch 9, batch 450, loss[loss=0.2825, simple_loss=0.3463, pruned_loss=0.1094, over 19344.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.318, pruned_loss=0.08971, over 3436635.60 frames. ], batch size: 70, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:49:42,336 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 13:49:43,530 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 13:50:00,608 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3962, 2.1573, 1.5938, 1.4145, 2.0027, 1.1610, 1.2115, 1.7096], + device='cuda:0'), covar=tensor([0.0806, 0.0573, 0.0890, 0.0632, 0.0397, 0.1023, 0.0640, 0.0414], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0290, 0.0321, 0.0242, 0.0229, 0.0312, 0.0286, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 13:50:01,631 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2090, 1.1889, 1.6595, 0.9712, 2.5369, 3.2899, 3.0428, 3.4794], + device='cuda:0'), covar=tensor([0.1530, 0.3315, 0.2920, 0.2148, 0.0472, 0.0183, 0.0229, 0.0174], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0288, 0.0317, 0.0249, 0.0209, 0.0141, 0.0206, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 13:50:07,774 INFO [train.py:903] (0/4) Epoch 9, batch 500, loss[loss=0.199, simple_loss=0.2758, pruned_loss=0.06109, over 19421.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3191, pruned_loss=0.09014, over 3525361.88 frames. ], batch size: 48, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:50:30,907 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55141.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:50:47,945 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 6.183e+02 7.261e+02 8.870e+02 1.589e+03, threshold=1.452e+03, percent-clipped=1.0 +2023-04-01 13:51:00,576 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7641, 4.2124, 2.7273, 3.6935, 1.2086, 4.0215, 4.0087, 4.2316], + device='cuda:0'), covar=tensor([0.0579, 0.0978, 0.1965, 0.0810, 0.3898, 0.0773, 0.0716, 0.1028], + device='cuda:0'), in_proj_covar=tensor([0.0406, 0.0347, 0.0412, 0.0307, 0.0373, 0.0340, 0.0329, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 13:51:01,627 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:51:04,366 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 13:51:10,560 INFO [train.py:903] (0/4) Epoch 9, batch 550, loss[loss=0.2744, simple_loss=0.3479, pruned_loss=0.1004, over 19696.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.319, pruned_loss=0.09014, over 3593621.64 frames. ], batch size: 59, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:14,550 INFO [train.py:903] (0/4) Epoch 9, batch 600, loss[loss=0.2774, simple_loss=0.3371, pruned_loss=0.1088, over 17418.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3193, pruned_loss=0.09016, over 3645097.06 frames. ], batch size: 101, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:15,824 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:21,725 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:47,918 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3428, 1.3467, 1.6400, 1.2953, 2.4795, 3.0857, 2.9041, 3.2319], + device='cuda:0'), covar=tensor([0.1537, 0.3350, 0.3133, 0.2212, 0.0682, 0.0293, 0.0300, 0.0261], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0290, 0.0319, 0.0250, 0.0211, 0.0142, 0.0208, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 13:52:55,194 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.838e+02 6.783e+02 8.586e+02 3.812e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 13:52:57,862 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2546, 3.8044, 3.8924, 3.8476, 1.4844, 3.5870, 3.2426, 3.5895], + device='cuda:0'), covar=tensor([0.1231, 0.0632, 0.0576, 0.0610, 0.4390, 0.0626, 0.0614, 0.1053], + device='cuda:0'), in_proj_covar=tensor([0.0619, 0.0543, 0.0732, 0.0613, 0.0668, 0.0476, 0.0462, 0.0671], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 13:52:58,753 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 13:53:13,356 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2247, 2.1160, 1.7660, 1.6450, 1.5550, 1.7364, 0.3807, 0.9628], + device='cuda:0'), covar=tensor([0.0304, 0.0328, 0.0237, 0.0398, 0.0703, 0.0380, 0.0681, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0313, 0.0313, 0.0333, 0.0408, 0.0334, 0.0296, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 13:53:16,399 INFO [train.py:903] (0/4) Epoch 9, batch 650, loss[loss=0.2864, simple_loss=0.3492, pruned_loss=0.1117, over 19450.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.318, pruned_loss=0.08978, over 3685869.05 frames. ], batch size: 70, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:19,294 INFO [train.py:903] (0/4) Epoch 9, batch 700, loss[loss=0.2387, simple_loss=0.3006, pruned_loss=0.08837, over 19383.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3178, pruned_loss=0.08922, over 3720194.94 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:41,883 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55339.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:55:02,469 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.553e+02 6.808e+02 9.255e+02 2.546e+03, threshold=1.362e+03, percent-clipped=4.0 +2023-04-01 13:55:22,970 INFO [train.py:903] (0/4) Epoch 9, batch 750, loss[loss=0.2268, simple_loss=0.2944, pruned_loss=0.0796, over 19754.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3183, pruned_loss=0.08983, over 3736593.00 frames. ], batch size: 48, lr: 9.49e-03, grad_scale: 4.0 +2023-04-01 13:55:53,610 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55397.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:56:25,969 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55422.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:56:26,675 INFO [train.py:903] (0/4) Epoch 9, batch 800, loss[loss=0.2532, simple_loss=0.325, pruned_loss=0.09074, over 19776.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3181, pruned_loss=0.08931, over 3765973.51 frames. ], batch size: 56, lr: 9.49e-03, grad_scale: 8.0 +2023-04-01 13:56:41,973 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:57:07,881 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.631e+02 7.179e+02 9.586e+02 1.610e+03, threshold=1.436e+03, percent-clipped=4.0 +2023-04-01 13:57:28,780 INFO [train.py:903] (0/4) Epoch 9, batch 850, loss[loss=0.3247, simple_loss=0.3647, pruned_loss=0.1424, over 13200.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3163, pruned_loss=0.08805, over 3779019.43 frames. ], batch size: 135, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:58:10,412 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9386, 1.9701, 2.0861, 2.8454, 1.9112, 2.6632, 2.5580, 1.9066], + device='cuda:0'), covar=tensor([0.3121, 0.2661, 0.1248, 0.1407, 0.2831, 0.1120, 0.2638, 0.2322], + device='cuda:0'), in_proj_covar=tensor([0.0743, 0.0756, 0.0627, 0.0872, 0.0751, 0.0658, 0.0774, 0.0677], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 13:58:14,346 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:58:22,713 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 13:58:29,605 INFO [train.py:903] (0/4) Epoch 9, batch 900, loss[loss=0.2578, simple_loss=0.3332, pruned_loss=0.09123, over 19681.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3173, pruned_loss=0.08855, over 3797810.27 frames. ], batch size: 58, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:12,471 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.962e+02 5.974e+02 7.143e+02 8.825e+02 2.413e+03, threshold=1.429e+03, percent-clipped=6.0 +2023-04-01 13:59:32,019 INFO [train.py:903] (0/4) Epoch 9, batch 950, loss[loss=0.2901, simple_loss=0.3613, pruned_loss=0.1095, over 19295.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3168, pruned_loss=0.08832, over 3810386.40 frames. ], batch size: 66, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:32,202 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:59:37,619 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 14:00:01,046 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:32,376 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:35,409 INFO [train.py:903] (0/4) Epoch 9, batch 1000, loss[loss=0.2506, simple_loss=0.3136, pruned_loss=0.09382, over 19855.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3179, pruned_loss=0.08913, over 3814548.98 frames. ], batch size: 52, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:00:37,956 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:01:18,081 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.465e+02 5.596e+02 6.834e+02 8.838e+02 1.578e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 14:01:29,803 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 14:01:39,305 INFO [train.py:903] (0/4) Epoch 9, batch 1050, loss[loss=0.2519, simple_loss=0.3241, pruned_loss=0.08978, over 19368.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3197, pruned_loss=0.09068, over 3800873.01 frames. ], batch size: 66, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:01:57,352 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:02:10,954 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 14:02:42,621 INFO [train.py:903] (0/4) Epoch 9, batch 1100, loss[loss=0.2528, simple_loss=0.3299, pruned_loss=0.08785, over 19579.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.319, pruned_loss=0.09036, over 3796415.37 frames. ], batch size: 61, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:02:54,472 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2926, 1.3372, 1.6378, 1.4343, 2.2377, 2.1346, 2.3057, 0.7409], + device='cuda:0'), covar=tensor([0.1823, 0.3261, 0.1823, 0.1418, 0.1162, 0.1556, 0.1201, 0.3190], + device='cuda:0'), in_proj_covar=tensor([0.0476, 0.0552, 0.0557, 0.0425, 0.0579, 0.0478, 0.0641, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 14:03:25,902 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 6.007e+02 7.412e+02 9.315e+02 2.515e+03, threshold=1.482e+03, percent-clipped=6.0 +2023-04-01 14:03:36,838 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55766.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:03:45,596 INFO [train.py:903] (0/4) Epoch 9, batch 1150, loss[loss=0.1871, simple_loss=0.2635, pruned_loss=0.05533, over 18592.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.32, pruned_loss=0.09091, over 3786183.29 frames. ], batch size: 41, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:04:16,530 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 14:04:50,174 INFO [train.py:903] (0/4) Epoch 9, batch 1200, loss[loss=0.313, simple_loss=0.3684, pruned_loss=0.1288, over 19607.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3199, pruned_loss=0.09055, over 3800885.11 frames. ], batch size: 61, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:04:55,602 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 14:05:19,005 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 14:05:31,643 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.284e+02 5.926e+02 7.645e+02 1.010e+03 3.329e+03, threshold=1.529e+03, percent-clipped=6.0 +2023-04-01 14:05:44,721 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:05:53,923 INFO [train.py:903] (0/4) Epoch 9, batch 1250, loss[loss=0.287, simple_loss=0.3515, pruned_loss=0.1112, over 19329.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3195, pruned_loss=0.09061, over 3806453.25 frames. ], batch size: 66, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:06:03,230 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:17,962 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6045, 1.3188, 1.6743, 1.2943, 2.7526, 3.7544, 3.5673, 4.0322], + device='cuda:0'), covar=tensor([0.1174, 0.3050, 0.2852, 0.1980, 0.0445, 0.0150, 0.0184, 0.0133], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0286, 0.0315, 0.0248, 0.0207, 0.0141, 0.0206, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:06:35,442 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:55,962 INFO [train.py:903] (0/4) Epoch 9, batch 1300, loss[loss=0.2123, simple_loss=0.2946, pruned_loss=0.06501, over 19539.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3189, pruned_loss=0.09049, over 3803745.90 frames. ], batch size: 56, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:07:07,846 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.94 vs. limit=5.0 +2023-04-01 14:07:23,915 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:39,624 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.793e+02 6.909e+02 8.321e+02 2.022e+03, threshold=1.382e+03, percent-clipped=2.0 +2023-04-01 14:07:54,312 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:58,594 INFO [train.py:903] (0/4) Epoch 9, batch 1350, loss[loss=0.2457, simple_loss=0.3228, pruned_loss=0.08429, over 19647.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3176, pruned_loss=0.08956, over 3808471.22 frames. ], batch size: 58, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:08:33,446 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-56000.pt +2023-04-01 14:08:50,498 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 14:09:02,607 INFO [train.py:903] (0/4) Epoch 9, batch 1400, loss[loss=0.2211, simple_loss=0.3079, pruned_loss=0.06711, over 19078.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.318, pruned_loss=0.08965, over 3815240.61 frames. ], batch size: 69, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:09:47,083 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.536e+02 7.314e+02 8.995e+02 2.483e+03, threshold=1.463e+03, percent-clipped=9.0 +2023-04-01 14:10:07,183 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 14:10:08,161 INFO [train.py:903] (0/4) Epoch 9, batch 1450, loss[loss=0.2641, simple_loss=0.3308, pruned_loss=0.09871, over 19729.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3181, pruned_loss=0.08995, over 3818585.15 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:10:33,652 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9667, 4.3247, 4.5994, 4.5816, 1.5198, 4.1827, 3.7586, 4.2154], + device='cuda:0'), covar=tensor([0.1196, 0.0680, 0.0522, 0.0493, 0.5028, 0.0546, 0.0624, 0.1029], + device='cuda:0'), in_proj_covar=tensor([0.0615, 0.0543, 0.0739, 0.0621, 0.0680, 0.0486, 0.0467, 0.0675], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 14:10:55,227 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:11:11,143 INFO [train.py:903] (0/4) Epoch 9, batch 1500, loss[loss=0.264, simple_loss=0.3403, pruned_loss=0.09382, over 19676.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3178, pruned_loss=0.08981, over 3834083.05 frames. ], batch size: 55, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:11:29,349 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8267, 1.3171, 1.4181, 1.5967, 3.2806, 0.9442, 2.2205, 3.4938], + device='cuda:0'), covar=tensor([0.0375, 0.2747, 0.2768, 0.1684, 0.0745, 0.2664, 0.1250, 0.0337], + device='cuda:0'), in_proj_covar=tensor([0.0329, 0.0328, 0.0339, 0.0308, 0.0333, 0.0325, 0.0315, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 14:11:52,401 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.776e+02 6.008e+02 7.164e+02 9.066e+02 2.093e+03, threshold=1.433e+03, percent-clipped=3.0 +2023-04-01 14:12:01,104 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:12:12,299 INFO [train.py:903] (0/4) Epoch 9, batch 1550, loss[loss=0.2258, simple_loss=0.3129, pruned_loss=0.06933, over 19775.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3202, pruned_loss=0.09134, over 3824422.48 frames. ], batch size: 56, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:12:26,264 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7393, 4.2322, 4.4163, 4.3976, 1.5633, 4.1318, 3.5724, 4.0535], + device='cuda:0'), covar=tensor([0.1281, 0.0638, 0.0525, 0.0522, 0.5049, 0.0530, 0.0631, 0.0995], + device='cuda:0'), in_proj_covar=tensor([0.0608, 0.0538, 0.0731, 0.0615, 0.0678, 0.0479, 0.0460, 0.0671], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 14:12:59,851 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:15,311 INFO [train.py:903] (0/4) Epoch 9, batch 1600, loss[loss=0.2815, simple_loss=0.348, pruned_loss=0.1075, over 18064.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3194, pruned_loss=0.09057, over 3827840.75 frames. ], batch size: 83, lr: 9.42e-03, grad_scale: 8.0 +2023-04-01 14:13:18,936 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:41,614 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 14:13:59,884 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.781e+02 6.769e+02 8.617e+02 2.222e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-01 14:14:19,016 INFO [train.py:903] (0/4) Epoch 9, batch 1650, loss[loss=0.363, simple_loss=0.3912, pruned_loss=0.1674, over 13009.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.319, pruned_loss=0.09068, over 3799348.09 frames. ], batch size: 136, lr: 9.42e-03, grad_scale: 4.0 +2023-04-01 14:14:21,806 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6393, 1.6658, 1.8070, 1.7299, 4.0809, 1.0362, 2.2956, 4.2888], + device='cuda:0'), covar=tensor([0.0349, 0.2449, 0.2429, 0.1685, 0.0653, 0.2639, 0.1421, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0324, 0.0321, 0.0335, 0.0301, 0.0328, 0.0322, 0.0311, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 14:15:20,633 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1609, 2.0700, 1.7288, 1.6105, 1.4940, 1.6804, 0.3499, 1.0008], + device='cuda:0'), covar=tensor([0.0360, 0.0399, 0.0297, 0.0448, 0.0845, 0.0459, 0.0747, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0317, 0.0316, 0.0333, 0.0409, 0.0335, 0.0295, 0.0317], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:15:22,541 INFO [train.py:903] (0/4) Epoch 9, batch 1700, loss[loss=0.2302, simple_loss=0.302, pruned_loss=0.0792, over 19480.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3179, pruned_loss=0.0897, over 3802636.48 frames. ], batch size: 49, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:15:25,173 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:16:02,378 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 14:16:05,921 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 5.619e+02 6.768e+02 8.904e+02 2.101e+03, threshold=1.354e+03, percent-clipped=6.0 +2023-04-01 14:16:24,627 INFO [train.py:903] (0/4) Epoch 9, batch 1750, loss[loss=0.2459, simple_loss=0.3167, pruned_loss=0.08756, over 19588.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3177, pruned_loss=0.08943, over 3812679.59 frames. ], batch size: 52, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:17:24,011 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 14:17:26,706 INFO [train.py:903] (0/4) Epoch 9, batch 1800, loss[loss=0.2306, simple_loss=0.304, pruned_loss=0.07856, over 19699.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3187, pruned_loss=0.09008, over 3807014.92 frames. ], batch size: 53, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:09,955 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.839e+02 7.002e+02 8.564e+02 1.629e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 14:18:25,592 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 14:18:29,985 INFO [train.py:903] (0/4) Epoch 9, batch 1850, loss[loss=0.2745, simple_loss=0.3417, pruned_loss=0.1036, over 19778.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.318, pruned_loss=0.08933, over 3806344.92 frames. ], batch size: 54, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:40,529 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:02,326 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 14:19:10,212 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:12,018 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:32,999 INFO [train.py:903] (0/4) Epoch 9, batch 1900, loss[loss=0.2685, simple_loss=0.3335, pruned_loss=0.1017, over 19600.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3176, pruned_loss=0.08909, over 3801894.86 frames. ], batch size: 61, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:19:34,870 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-01 14:19:45,165 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3160, 2.2217, 1.8405, 1.7076, 1.6559, 1.7373, 0.3368, 1.0608], + device='cuda:0'), covar=tensor([0.0290, 0.0324, 0.0287, 0.0396, 0.0646, 0.0479, 0.0762, 0.0611], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0314, 0.0311, 0.0332, 0.0405, 0.0331, 0.0292, 0.0314], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:19:48,396 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 14:19:54,885 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 14:19:55,172 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56541.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:20:16,633 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.665e+02 6.760e+02 8.403e+02 1.758e+03, threshold=1.352e+03, percent-clipped=2.0 +2023-04-01 14:20:19,007 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 14:20:35,965 INFO [train.py:903] (0/4) Epoch 9, batch 1950, loss[loss=0.2074, simple_loss=0.2773, pruned_loss=0.06878, over 19476.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3177, pruned_loss=0.08892, over 3813467.00 frames. ], batch size: 49, lr: 9.39e-03, grad_scale: 4.0 +2023-04-01 14:20:46,754 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:18,991 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:22,317 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7621, 1.4107, 1.9054, 1.6848, 4.1855, 0.8221, 2.3487, 4.4082], + device='cuda:0'), covar=tensor([0.0353, 0.2559, 0.2504, 0.1657, 0.0710, 0.2774, 0.1414, 0.0250], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0322, 0.0335, 0.0301, 0.0331, 0.0319, 0.0310, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 14:21:38,550 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:39,347 INFO [train.py:903] (0/4) Epoch 9, batch 2000, loss[loss=0.2105, simple_loss=0.272, pruned_loss=0.07452, over 19717.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3162, pruned_loss=0.08756, over 3823197.60 frames. ], batch size: 46, lr: 9.39e-03, grad_scale: 8.0 +2023-04-01 14:21:41,563 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-04-01 14:22:22,879 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.506e+02 7.081e+02 9.090e+02 3.144e+03, threshold=1.416e+03, percent-clipped=7.0 +2023-04-01 14:22:36,118 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 14:22:42,524 INFO [train.py:903] (0/4) Epoch 9, batch 2050, loss[loss=0.2323, simple_loss=0.3041, pruned_loss=0.08018, over 19561.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.317, pruned_loss=0.08889, over 3793298.77 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:22:56,339 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 14:22:57,521 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 14:23:19,236 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 14:23:44,278 INFO [train.py:903] (0/4) Epoch 9, batch 2100, loss[loss=0.2633, simple_loss=0.3403, pruned_loss=0.09318, over 19313.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3173, pruned_loss=0.08911, over 3805234.24 frames. ], batch size: 66, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:24:12,024 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 14:24:29,325 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.187e+02 5.573e+02 6.906e+02 8.990e+02 1.566e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-01 14:24:35,337 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 14:24:48,316 INFO [train.py:903] (0/4) Epoch 9, batch 2150, loss[loss=0.2298, simple_loss=0.3059, pruned_loss=0.07686, over 19690.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3169, pruned_loss=0.08871, over 3814966.00 frames. ], batch size: 55, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:25:48,735 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56821.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:25:50,905 INFO [train.py:903] (0/4) Epoch 9, batch 2200, loss[loss=0.2575, simple_loss=0.3308, pruned_loss=0.09207, over 19299.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.317, pruned_loss=0.08865, over 3814714.40 frames. ], batch size: 70, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:26:36,144 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 5.854e+02 7.300e+02 9.690e+02 2.298e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 14:26:46,346 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.53 vs. limit=5.0 +2023-04-01 14:26:57,258 INFO [train.py:903] (0/4) Epoch 9, batch 2250, loss[loss=0.2457, simple_loss=0.3027, pruned_loss=0.09428, over 19394.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3166, pruned_loss=0.089, over 3810859.83 frames. ], batch size: 48, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:27:03,664 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:27:11,764 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56885.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:27:34,064 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:27:49,936 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3308, 3.9545, 2.6246, 3.5173, 1.1669, 3.6842, 3.7502, 3.7874], + device='cuda:0'), covar=tensor([0.0699, 0.1061, 0.1933, 0.0757, 0.3575, 0.0786, 0.0708, 0.1056], + device='cuda:0'), in_proj_covar=tensor([0.0402, 0.0343, 0.0408, 0.0299, 0.0369, 0.0336, 0.0327, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 14:28:00,647 INFO [train.py:903] (0/4) Epoch 9, batch 2300, loss[loss=0.2368, simple_loss=0.315, pruned_loss=0.07934, over 19547.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3159, pruned_loss=0.08826, over 3805294.34 frames. ], batch size: 56, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:28:05,878 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:13,784 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 14:28:22,612 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9737, 2.0912, 1.9292, 1.9565, 4.3951, 1.0600, 2.3577, 4.7625], + device='cuda:0'), covar=tensor([0.0320, 0.2387, 0.2560, 0.1646, 0.0655, 0.2746, 0.1448, 0.0228], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0325, 0.0341, 0.0304, 0.0334, 0.0322, 0.0312, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 14:28:46,769 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.809e+02 7.207e+02 9.233e+02 1.673e+03, threshold=1.441e+03, percent-clipped=4.0 +2023-04-01 14:29:05,085 INFO [train.py:903] (0/4) Epoch 9, batch 2350, loss[loss=0.2579, simple_loss=0.3468, pruned_loss=0.08449, over 19670.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3154, pruned_loss=0.08753, over 3813269.83 frames. ], batch size: 55, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:29:40,063 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57000.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:29:46,435 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 14:29:53,601 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:29:55,742 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:30:01,352 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 14:30:07,034 INFO [train.py:903] (0/4) Epoch 9, batch 2400, loss[loss=0.2254, simple_loss=0.2955, pruned_loss=0.07763, over 19828.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3165, pruned_loss=0.08863, over 3806512.14 frames. ], batch size: 52, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:30:46,603 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 14:30:51,617 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.434e+02 5.803e+02 7.402e+02 8.988e+02 1.700e+03, threshold=1.480e+03, percent-clipped=2.0 +2023-04-01 14:31:11,540 INFO [train.py:903] (0/4) Epoch 9, batch 2450, loss[loss=0.2846, simple_loss=0.3426, pruned_loss=0.1133, over 13565.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3164, pruned_loss=0.08874, over 3801453.23 frames. ], batch size: 135, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:31:32,269 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3325, 2.0800, 1.8515, 1.6979, 1.6301, 1.8284, 0.3272, 1.1781], + device='cuda:0'), covar=tensor([0.0361, 0.0368, 0.0320, 0.0502, 0.0772, 0.0486, 0.0796, 0.0630], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0321, 0.0319, 0.0335, 0.0416, 0.0335, 0.0296, 0.0317], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:31:59,444 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2121, 1.3816, 1.8908, 1.6300, 3.1572, 4.6309, 4.5462, 4.9907], + device='cuda:0'), covar=tensor([0.1550, 0.3096, 0.2799, 0.1817, 0.0415, 0.0137, 0.0128, 0.0086], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0285, 0.0316, 0.0245, 0.0205, 0.0143, 0.0203, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:32:09,327 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.01 vs. limit=5.0 +2023-04-01 14:32:15,744 INFO [train.py:903] (0/4) Epoch 9, batch 2500, loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.0679, over 19662.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3171, pruned_loss=0.08898, over 3811082.28 frames. ], batch size: 53, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:32:17,608 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 14:33:00,908 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.325e+02 6.954e+02 9.918e+02 1.981e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 14:33:09,578 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57165.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:33:19,761 INFO [train.py:903] (0/4) Epoch 9, batch 2550, loss[loss=0.1913, simple_loss=0.2599, pruned_loss=0.06134, over 18661.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3164, pruned_loss=0.08849, over 3813615.54 frames. ], batch size: 41, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:33:59,329 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57204.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:13,711 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:15,579 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 14:34:22,474 INFO [train.py:903] (0/4) Epoch 9, batch 2600, loss[loss=0.2556, simple_loss=0.3244, pruned_loss=0.09347, over 19763.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3175, pruned_loss=0.08908, over 3819799.05 frames. ], batch size: 54, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:05,258 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57256.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:35:07,156 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.816e+02 6.932e+02 7.774e+02 1.592e+03, threshold=1.386e+03, percent-clipped=3.0 +2023-04-01 14:35:11,174 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9833, 1.9191, 1.6983, 1.4932, 1.3490, 1.4520, 0.3536, 0.7807], + device='cuda:0'), covar=tensor([0.0360, 0.0352, 0.0241, 0.0353, 0.0799, 0.0430, 0.0673, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0318, 0.0316, 0.0331, 0.0413, 0.0333, 0.0295, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:35:12,154 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:22,400 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57270.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:25,692 INFO [train.py:903] (0/4) Epoch 9, batch 2650, loss[loss=0.354, simple_loss=0.3917, pruned_loss=0.1581, over 13558.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3178, pruned_loss=0.08913, over 3818283.65 frames. ], batch size: 135, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:36,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:37,701 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57281.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:35:45,902 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 14:36:31,185 INFO [train.py:903] (0/4) Epoch 9, batch 2700, loss[loss=0.247, simple_loss=0.3089, pruned_loss=0.09255, over 19785.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3177, pruned_loss=0.08931, over 3814027.31 frames. ], batch size: 49, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:12,070 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:14,460 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:15,518 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.587e+02 5.592e+02 7.209e+02 8.892e+02 3.755e+03, threshold=1.442e+03, percent-clipped=7.0 +2023-04-01 14:37:33,721 INFO [train.py:903] (0/4) Epoch 9, batch 2750, loss[loss=0.2409, simple_loss=0.3075, pruned_loss=0.0872, over 19623.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3176, pruned_loss=0.08935, over 3805133.71 frames. ], batch size: 50, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:49,356 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:30,625 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:38,520 INFO [train.py:903] (0/4) Epoch 9, batch 2800, loss[loss=0.2514, simple_loss=0.3283, pruned_loss=0.08727, over 19576.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3156, pruned_loss=0.08807, over 3806582.36 frames. ], batch size: 61, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:01,670 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5894, 1.2599, 1.2067, 1.4362, 1.2467, 1.3771, 1.1751, 1.3978], + device='cuda:0'), covar=tensor([0.0904, 0.1072, 0.1332, 0.0843, 0.1024, 0.0546, 0.1170, 0.0705], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0352, 0.0288, 0.0237, 0.0297, 0.0243, 0.0270, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 14:39:11,834 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9973, 1.9321, 1.6569, 1.5242, 1.3637, 1.5247, 0.2789, 0.7960], + device='cuda:0'), covar=tensor([0.0311, 0.0355, 0.0242, 0.0390, 0.0775, 0.0441, 0.0733, 0.0646], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0318, 0.0315, 0.0330, 0.0412, 0.0333, 0.0293, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:39:23,160 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.525e+02 6.599e+02 8.446e+02 1.316e+03, threshold=1.320e+03, percent-clipped=0.0 +2023-04-01 14:39:38,392 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:40,784 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:42,543 INFO [train.py:903] (0/4) Epoch 9, batch 2850, loss[loss=0.231, simple_loss=0.3038, pruned_loss=0.07917, over 19617.00 frames. ], tot_loss[loss=0.245, simple_loss=0.315, pruned_loss=0.08743, over 3815584.55 frames. ], batch size: 50, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:51,249 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:33,749 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:37,262 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9616, 4.2555, 4.5727, 4.5702, 1.5878, 4.2930, 3.7380, 4.2295], + device='cuda:0'), covar=tensor([0.1045, 0.0782, 0.0512, 0.0460, 0.4900, 0.0489, 0.0580, 0.0962], + device='cuda:0'), in_proj_covar=tensor([0.0618, 0.0546, 0.0734, 0.0612, 0.0681, 0.0485, 0.0463, 0.0681], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 14:40:41,921 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 14:40:46,553 INFO [train.py:903] (0/4) Epoch 9, batch 2900, loss[loss=0.2743, simple_loss=0.3421, pruned_loss=0.1033, over 19285.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3161, pruned_loss=0.08827, over 3831751.65 frames. ], batch size: 66, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:41:03,475 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:16,926 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:30,791 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.787e+02 5.829e+02 7.475e+02 8.920e+02 2.516e+03, threshold=1.495e+03, percent-clipped=6.0 +2023-04-01 14:41:32,218 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57559.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:34,862 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57561.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:45,174 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5333, 2.3409, 1.6185, 1.5827, 2.1214, 1.2251, 1.2371, 1.7904], + device='cuda:0'), covar=tensor([0.0824, 0.0532, 0.0920, 0.0668, 0.0418, 0.1031, 0.0695, 0.0442], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0291, 0.0320, 0.0239, 0.0230, 0.0316, 0.0287, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 14:41:49,407 INFO [train.py:903] (0/4) Epoch 9, batch 2950, loss[loss=0.2253, simple_loss=0.2979, pruned_loss=0.07636, over 19615.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3159, pruned_loss=0.08812, over 3830173.48 frames. ], batch size: 50, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:32,112 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:42:53,540 INFO [train.py:903] (0/4) Epoch 9, batch 3000, loss[loss=0.2761, simple_loss=0.3435, pruned_loss=0.1044, over 18790.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3171, pruned_loss=0.08858, over 3833178.54 frames. ], batch size: 74, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:53,541 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 14:43:06,212 INFO [train.py:937] (0/4) Epoch 9, validation: loss=0.1831, simple_loss=0.2838, pruned_loss=0.04122, over 944034.00 frames. +2023-04-01 14:43:06,213 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 14:43:08,521 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 14:43:28,364 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:43:50,440 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.061e+02 7.914e+02 9.800e+02 2.087e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-04-01 14:43:56,627 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:00,204 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:08,767 INFO [train.py:903] (0/4) Epoch 9, batch 3050, loss[loss=0.2529, simple_loss=0.3053, pruned_loss=0.1003, over 19758.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3169, pruned_loss=0.0885, over 3838069.73 frames. ], batch size: 47, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:44:10,222 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:10,359 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8256, 1.9021, 1.9843, 2.8572, 1.8868, 2.5277, 2.3449, 1.7805], + device='cuda:0'), covar=tensor([0.3404, 0.2842, 0.1361, 0.1637, 0.3265, 0.1350, 0.3138, 0.2602], + device='cuda:0'), in_proj_covar=tensor([0.0746, 0.0759, 0.0623, 0.0872, 0.0748, 0.0663, 0.0770, 0.0675], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 14:45:09,113 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:11,162 INFO [train.py:903] (0/4) Epoch 9, batch 3100, loss[loss=0.2279, simple_loss=0.2884, pruned_loss=0.08373, over 19763.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3161, pruned_loss=0.08777, over 3836410.49 frames. ], batch size: 47, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:45:16,040 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:18,345 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:47,443 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:47,546 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:49,783 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:54,924 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.860e+02 5.906e+02 7.408e+02 1.029e+03 2.368e+03, threshold=1.482e+03, percent-clipped=3.0 +2023-04-01 14:45:57,424 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:46:14,158 INFO [train.py:903] (0/4) Epoch 9, batch 3150, loss[loss=0.2323, simple_loss=0.3186, pruned_loss=0.073, over 19655.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3147, pruned_loss=0.08684, over 3844818.68 frames. ], batch size: 58, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:46:42,120 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 14:47:14,703 INFO [train.py:903] (0/4) Epoch 9, batch 3200, loss[loss=0.2428, simple_loss=0.3113, pruned_loss=0.08709, over 19780.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3149, pruned_loss=0.08711, over 3855103.13 frames. ], batch size: 54, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:47:15,996 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57824.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:55,929 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:56,961 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.791e+02 7.100e+02 9.261e+02 4.038e+03, threshold=1.420e+03, percent-clipped=7.0 +2023-04-01 14:48:14,915 INFO [train.py:903] (0/4) Epoch 9, batch 3250, loss[loss=0.3447, simple_loss=0.386, pruned_loss=0.1517, over 13715.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3164, pruned_loss=0.08791, over 3835449.25 frames. ], batch size: 136, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:48:18,561 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:12,232 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57919.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:16,425 INFO [train.py:903] (0/4) Epoch 9, batch 3300, loss[loss=0.2632, simple_loss=0.333, pruned_loss=0.09668, over 19504.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3156, pruned_loss=0.08714, over 3834217.00 frames. ], batch size: 64, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:49:23,979 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 14:49:25,456 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57930.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:36,421 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:43,152 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:56,563 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:59,470 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.470e+02 6.783e+02 8.234e+02 1.579e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 14:50:17,131 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:17,931 INFO [train.py:903] (0/4) Epoch 9, batch 3350, loss[loss=0.2585, simple_loss=0.331, pruned_loss=0.09301, over 19618.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3171, pruned_loss=0.08825, over 3834085.96 frames. ], batch size: 61, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:50:22,855 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:24,996 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.6508, 5.0819, 2.8263, 4.4547, 1.4122, 4.7503, 4.9263, 5.0677], + device='cuda:0'), covar=tensor([0.0402, 0.0829, 0.1994, 0.0616, 0.3558, 0.0707, 0.0611, 0.0920], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0346, 0.0414, 0.0312, 0.0367, 0.0341, 0.0331, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 14:50:34,088 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8758, 2.7881, 1.7529, 1.8153, 1.6892, 2.0403, 0.9245, 1.8950], + device='cuda:0'), covar=tensor([0.0655, 0.0521, 0.0608, 0.0994, 0.1035, 0.1125, 0.0959, 0.0902], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0321, 0.0317, 0.0334, 0.0408, 0.0335, 0.0297, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:50:50,894 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-58000.pt +2023-04-01 14:50:54,314 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58002.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:51:10,709 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-01 14:51:20,068 INFO [train.py:903] (0/4) Epoch 9, batch 3400, loss[loss=0.2546, simple_loss=0.3256, pruned_loss=0.09184, over 17489.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3173, pruned_loss=0.08848, over 3842500.26 frames. ], batch size: 101, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:51:35,092 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 14:52:02,957 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.309e+02 6.592e+02 8.930e+02 1.711e+03, threshold=1.318e+03, percent-clipped=4.0 +2023-04-01 14:52:20,108 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7234, 1.4343, 1.4288, 2.2292, 1.8005, 2.1667, 2.2027, 1.8404], + device='cuda:0'), covar=tensor([0.0845, 0.1007, 0.1070, 0.0784, 0.0844, 0.0681, 0.0878, 0.0642], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0232, 0.0229, 0.0256, 0.0243, 0.0216, 0.0204, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 14:52:20,816 INFO [train.py:903] (0/4) Epoch 9, batch 3450, loss[loss=0.262, simple_loss=0.3216, pruned_loss=0.1012, over 19386.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3184, pruned_loss=0.08891, over 3840396.91 frames. ], batch size: 48, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:52:25,124 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 14:52:48,534 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:53:22,370 INFO [train.py:903] (0/4) Epoch 9, batch 3500, loss[loss=0.2946, simple_loss=0.3505, pruned_loss=0.1193, over 18643.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3175, pruned_loss=0.08848, over 3831508.95 frames. ], batch size: 74, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:53:25,170 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9497, 4.3835, 4.6906, 4.6725, 1.5725, 4.3035, 3.8056, 4.2933], + device='cuda:0'), covar=tensor([0.1197, 0.0704, 0.0522, 0.0475, 0.5225, 0.0487, 0.0560, 0.1031], + device='cuda:0'), in_proj_covar=tensor([0.0624, 0.0550, 0.0750, 0.0620, 0.0686, 0.0491, 0.0467, 0.0685], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 14:53:33,956 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:53:36,489 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 14:54:03,568 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:05,462 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.015e+02 7.407e+02 9.414e+02 2.837e+03, threshold=1.481e+03, percent-clipped=3.0 +2023-04-01 14:54:06,979 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58159.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:08,244 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2292, 1.2020, 1.4066, 1.3634, 1.8790, 1.7930, 1.8310, 0.5052], + device='cuda:0'), covar=tensor([0.2033, 0.3498, 0.2121, 0.1616, 0.1187, 0.1830, 0.1196, 0.3418], + device='cuda:0'), in_proj_covar=tensor([0.0468, 0.0548, 0.0558, 0.0422, 0.0580, 0.0473, 0.0638, 0.0474], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 14:54:10,314 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:24,272 INFO [train.py:903] (0/4) Epoch 9, batch 3550, loss[loss=0.2166, simple_loss=0.2867, pruned_loss=0.07327, over 19499.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3171, pruned_loss=0.08811, over 3837666.04 frames. ], batch size: 49, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:54:37,410 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0148, 2.0679, 1.7159, 1.5354, 1.3182, 1.5451, 0.4324, 0.8775], + device='cuda:0'), covar=tensor([0.0601, 0.0512, 0.0367, 0.0577, 0.1011, 0.0658, 0.0855, 0.0872], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0322, 0.0319, 0.0337, 0.0409, 0.0334, 0.0299, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 14:54:50,570 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:08,378 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:20,402 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:24,266 INFO [train.py:903] (0/4) Epoch 9, batch 3600, loss[loss=0.2729, simple_loss=0.3449, pruned_loss=0.1005, over 19783.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3191, pruned_loss=0.08963, over 3830008.84 frames. ], batch size: 56, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:55:30,332 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:56:01,038 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:56:06,153 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.487e+02 6.500e+02 7.997e+02 2.924e+03, threshold=1.300e+03, percent-clipped=2.0 +2023-04-01 14:56:23,768 INFO [train.py:903] (0/4) Epoch 9, batch 3650, loss[loss=0.2572, simple_loss=0.3262, pruned_loss=0.09412, over 19362.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3179, pruned_loss=0.08893, over 3829797.39 frames. ], batch size: 66, lr: 9.26e-03, grad_scale: 16.0 +2023-04-01 14:57:24,454 INFO [train.py:903] (0/4) Epoch 9, batch 3700, loss[loss=0.2085, simple_loss=0.2837, pruned_loss=0.06662, over 19716.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3173, pruned_loss=0.08833, over 3838350.19 frames. ], batch size: 46, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:57:59,834 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 14:58:07,749 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 6.060e+02 7.736e+02 9.843e+02 2.060e+03, threshold=1.547e+03, percent-clipped=9.0 +2023-04-01 14:58:23,948 INFO [train.py:903] (0/4) Epoch 9, batch 3750, loss[loss=0.2229, simple_loss=0.3015, pruned_loss=0.07218, over 19597.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3177, pruned_loss=0.08871, over 3830777.63 frames. ], batch size: 52, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:59:24,635 INFO [train.py:903] (0/4) Epoch 9, batch 3800, loss[loss=0.2303, simple_loss=0.3069, pruned_loss=0.07679, over 19837.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.318, pruned_loss=0.08916, over 3814045.02 frames. ], batch size: 52, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 14:59:54,350 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 15:00:08,584 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 5.230e+02 6.601e+02 8.580e+02 1.875e+03, threshold=1.320e+03, percent-clipped=3.0 +2023-04-01 15:00:11,128 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2376, 1.3891, 2.0137, 1.5316, 3.2006, 2.4375, 3.5661, 1.6517], + device='cuda:0'), covar=tensor([0.2269, 0.3850, 0.2275, 0.1727, 0.1406, 0.1857, 0.1486, 0.3278], + device='cuda:0'), in_proj_covar=tensor([0.0475, 0.0554, 0.0568, 0.0426, 0.0584, 0.0480, 0.0643, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 15:00:17,587 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58466.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:00:24,958 INFO [train.py:903] (0/4) Epoch 9, batch 3850, loss[loss=0.2259, simple_loss=0.2988, pruned_loss=0.07647, over 19484.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.318, pruned_loss=0.08853, over 3800789.67 frames. ], batch size: 49, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:00:46,676 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:01,846 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:06,094 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:25,313 INFO [train.py:903] (0/4) Epoch 9, batch 3900, loss[loss=0.2485, simple_loss=0.3123, pruned_loss=0.09238, over 19486.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3176, pruned_loss=0.0882, over 3802377.38 frames. ], batch size: 49, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:02:08,183 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 6.109e+02 7.308e+02 8.933e+02 2.200e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 15:02:26,008 INFO [train.py:903] (0/4) Epoch 9, batch 3950, loss[loss=0.2994, simple_loss=0.3547, pruned_loss=0.1221, over 13913.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3172, pruned_loss=0.08763, over 3805361.74 frames. ], batch size: 136, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:02:28,084 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 15:02:51,957 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5937, 1.2196, 1.1727, 1.4573, 1.1682, 1.3113, 1.1712, 1.3926], + device='cuda:0'), covar=tensor([0.0904, 0.1106, 0.1451, 0.0816, 0.1053, 0.0562, 0.1217, 0.0707], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0351, 0.0290, 0.0239, 0.0298, 0.0242, 0.0270, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:03:21,977 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58618.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:25,464 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:28,118 INFO [train.py:903] (0/4) Epoch 9, batch 4000, loss[loss=0.2379, simple_loss=0.3003, pruned_loss=0.08777, over 19814.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3164, pruned_loss=0.08751, over 3798179.84 frames. ], batch size: 48, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:04:10,731 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.392e+02 5.642e+02 6.845e+02 8.578e+02 2.087e+03, threshold=1.369e+03, percent-clipped=3.0 +2023-04-01 15:04:10,793 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 15:04:27,023 INFO [train.py:903] (0/4) Epoch 9, batch 4050, loss[loss=0.263, simple_loss=0.3345, pruned_loss=0.09573, over 19666.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3169, pruned_loss=0.08754, over 3801731.60 frames. ], batch size: 55, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:04:59,250 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:05:10,974 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.94 vs. limit=5.0 +2023-04-01 15:05:28,158 INFO [train.py:903] (0/4) Epoch 9, batch 4100, loss[loss=0.2634, simple_loss=0.3387, pruned_loss=0.094, over 19322.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3168, pruned_loss=0.0875, over 3814478.86 frames. ], batch size: 66, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:03,196 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 15:06:11,190 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.572e+02 6.953e+02 8.904e+02 1.888e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 15:06:28,257 INFO [train.py:903] (0/4) Epoch 9, batch 4150, loss[loss=0.2496, simple_loss=0.3263, pruned_loss=0.08644, over 19744.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3178, pruned_loss=0.08813, over 3828558.34 frames. ], batch size: 63, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:29,678 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 15:06:54,404 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58793.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:07:30,861 INFO [train.py:903] (0/4) Epoch 9, batch 4200, loss[loss=0.2026, simple_loss=0.2701, pruned_loss=0.06753, over 19329.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3171, pruned_loss=0.08765, over 3825966.08 frames. ], batch size: 44, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:07:34,201 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 15:08:14,670 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.798e+02 5.991e+02 7.229e+02 9.145e+02 1.621e+03, threshold=1.446e+03, percent-clipped=3.0 +2023-04-01 15:08:32,533 INFO [train.py:903] (0/4) Epoch 9, batch 4250, loss[loss=0.2026, simple_loss=0.2899, pruned_loss=0.0576, over 19677.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3175, pruned_loss=0.08804, over 3830023.58 frames. ], batch size: 58, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:08:34,169 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:37,487 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:48,673 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 15:08:59,995 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 15:09:03,830 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:07,328 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:32,792 INFO [train.py:903] (0/4) Epoch 9, batch 4300, loss[loss=0.2298, simple_loss=0.2825, pruned_loss=0.08855, over 18679.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3176, pruned_loss=0.08866, over 3814861.25 frames. ], batch size: 41, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:09:34,185 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:10:17,294 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.360e+02 5.584e+02 7.321e+02 9.114e+02 2.155e+03, threshold=1.464e+03, percent-clipped=5.0 +2023-04-01 15:10:27,340 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 15:10:33,830 INFO [train.py:903] (0/4) Epoch 9, batch 4350, loss[loss=0.2784, simple_loss=0.3423, pruned_loss=0.1072, over 19590.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3167, pruned_loss=0.08794, over 3819278.21 frames. ], batch size: 52, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:10:43,759 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 15:11:10,113 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6021, 4.1434, 2.7215, 3.7403, 1.2679, 3.9142, 3.8770, 4.0675], + device='cuda:0'), covar=tensor([0.0695, 0.0968, 0.2003, 0.0701, 0.3743, 0.0804, 0.0815, 0.0951], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0350, 0.0415, 0.0311, 0.0369, 0.0344, 0.0335, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 15:11:34,348 INFO [train.py:903] (0/4) Epoch 9, batch 4400, loss[loss=0.2542, simple_loss=0.3315, pruned_loss=0.08842, over 19689.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3164, pruned_loss=0.08755, over 3819875.03 frames. ], batch size: 59, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:11:58,694 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 15:11:58,793 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:12:08,639 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 15:12:18,355 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.626e+02 6.682e+02 1.014e+03 2.125e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-01 15:12:35,839 INFO [train.py:903] (0/4) Epoch 9, batch 4450, loss[loss=0.208, simple_loss=0.2805, pruned_loss=0.06772, over 15188.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3157, pruned_loss=0.08748, over 3814682.92 frames. ], batch size: 33, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:05,750 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 15:13:36,625 INFO [train.py:903] (0/4) Epoch 9, batch 4500, loss[loss=0.2212, simple_loss=0.3031, pruned_loss=0.06964, over 19649.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3163, pruned_loss=0.08747, over 3816116.54 frames. ], batch size: 58, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:54,290 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:17,431 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 15:14:20,682 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59158.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:21,457 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.665e+02 5.638e+02 7.209e+02 9.049e+02 2.430e+03, threshold=1.442e+03, percent-clipped=5.0 +2023-04-01 15:14:38,158 INFO [train.py:903] (0/4) Epoch 9, batch 4550, loss[loss=0.2406, simple_loss=0.3082, pruned_loss=0.08651, over 19742.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3165, pruned_loss=0.08778, over 3800857.58 frames. ], batch size: 51, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:14:46,071 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 15:14:49,756 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59182.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:15:11,382 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 15:15:39,255 INFO [train.py:903] (0/4) Epoch 9, batch 4600, loss[loss=0.281, simple_loss=0.344, pruned_loss=0.109, over 19744.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3155, pruned_loss=0.08742, over 3805114.06 frames. ], batch size: 63, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:12,646 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 15:16:15,388 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:24,166 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 5.555e+02 6.855e+02 8.371e+02 1.742e+03, threshold=1.371e+03, percent-clipped=2.0 +2023-04-01 15:16:35,108 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:37,663 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1650, 1.2224, 1.1570, 0.9804, 1.0027, 1.0412, 0.0463, 0.3435], + device='cuda:0'), covar=tensor([0.0401, 0.0400, 0.0253, 0.0306, 0.0813, 0.0344, 0.0784, 0.0708], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0322, 0.0321, 0.0337, 0.0413, 0.0339, 0.0298, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 15:16:40,775 INFO [train.py:903] (0/4) Epoch 9, batch 4650, loss[loss=0.2049, simple_loss=0.2712, pruned_loss=0.06929, over 19738.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3165, pruned_loss=0.08797, over 3798430.91 frames. ], batch size: 45, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:56,382 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 15:17:09,162 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 15:17:41,937 INFO [train.py:903] (0/4) Epoch 9, batch 4700, loss[loss=0.2554, simple_loss=0.318, pruned_loss=0.09638, over 18122.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.315, pruned_loss=0.08703, over 3803124.02 frames. ], batch size: 83, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:03,983 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 15:18:13,119 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7942, 1.3109, 1.4025, 2.2234, 1.8225, 1.9220, 2.0341, 1.7967], + device='cuda:0'), covar=tensor([0.0866, 0.1235, 0.1163, 0.0935, 0.0884, 0.0881, 0.0877, 0.0763], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0230, 0.0228, 0.0257, 0.0240, 0.0213, 0.0202, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 15:18:25,859 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.586e+02 6.716e+02 8.168e+02 1.499e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-01 15:18:32,956 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9833, 2.1486, 2.1864, 1.7965, 4.4888, 0.9714, 2.3465, 4.7774], + device='cuda:0'), covar=tensor([0.0323, 0.2216, 0.2130, 0.1727, 0.0726, 0.2610, 0.1305, 0.0194], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0329, 0.0338, 0.0311, 0.0343, 0.0326, 0.0315, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:18:41,885 INFO [train.py:903] (0/4) Epoch 9, batch 4750, loss[loss=0.2211, simple_loss=0.285, pruned_loss=0.07864, over 19408.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3144, pruned_loss=0.08653, over 3802910.17 frames. ], batch size: 48, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:54,722 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:32,444 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:43,259 INFO [train.py:903] (0/4) Epoch 9, batch 4800, loss[loss=0.3011, simple_loss=0.3587, pruned_loss=0.1217, over 17691.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3152, pruned_loss=0.08702, over 3795917.05 frames. ], batch size: 101, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:19:44,867 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.28 vs. limit=5.0 +2023-04-01 15:19:58,756 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1223, 3.6508, 2.1442, 2.2601, 3.1919, 1.7115, 1.3561, 1.9723], + device='cuda:0'), covar=tensor([0.1086, 0.0377, 0.0845, 0.0621, 0.0424, 0.0974, 0.0860, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0293, 0.0319, 0.0242, 0.0230, 0.0315, 0.0287, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:20:03,392 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59439.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:20:27,560 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.762e+02 6.893e+02 8.818e+02 1.836e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-01 15:20:43,943 INFO [train.py:903] (0/4) Epoch 9, batch 4850, loss[loss=0.2228, simple_loss=0.2986, pruned_loss=0.07351, over 19542.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3166, pruned_loss=0.08827, over 3797619.60 frames. ], batch size: 56, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:08,915 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 15:21:27,115 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:29,004 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 15:21:35,282 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 15:21:36,464 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 15:21:42,242 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:45,372 INFO [train.py:903] (0/4) Epoch 9, batch 4900, loss[loss=0.2344, simple_loss=0.3048, pruned_loss=0.08202, over 19676.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3165, pruned_loss=0.08798, over 3805920.55 frames. ], batch size: 60, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:46,550 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 15:21:49,052 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59526.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:56,482 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 15:21:56,952 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:22:05,289 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 15:22:29,457 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 5.274e+02 6.528e+02 8.023e+02 2.606e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-01 15:22:45,478 INFO [train.py:903] (0/4) Epoch 9, batch 4950, loss[loss=0.2761, simple_loss=0.3404, pruned_loss=0.1059, over 19536.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3158, pruned_loss=0.08747, over 3812576.57 frames. ], batch size: 54, lr: 9.15e-03, grad_scale: 8.0 +2023-04-01 15:23:01,068 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 15:23:24,590 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 15:23:44,258 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5613, 2.3526, 1.7025, 1.5771, 2.1132, 1.3986, 1.2132, 1.8185], + device='cuda:0'), covar=tensor([0.0815, 0.0563, 0.0929, 0.0636, 0.0451, 0.1009, 0.0728, 0.0447], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0291, 0.0318, 0.0241, 0.0230, 0.0314, 0.0286, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:23:46,154 INFO [train.py:903] (0/4) Epoch 9, batch 5000, loss[loss=0.2008, simple_loss=0.2799, pruned_loss=0.0608, over 19612.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3151, pruned_loss=0.08685, over 3810664.66 frames. ], batch size: 50, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:23:53,576 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 15:24:04,788 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 15:24:06,356 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:08,576 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:30,504 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.818e+02 6.890e+02 9.185e+02 2.943e+03, threshold=1.378e+03, percent-clipped=3.0 +2023-04-01 15:24:35,487 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:46,097 INFO [train.py:903] (0/4) Epoch 9, batch 5050, loss[loss=0.2329, simple_loss=0.3139, pruned_loss=0.0759, over 19541.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3152, pruned_loss=0.08706, over 3821939.47 frames. ], batch size: 56, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:25:21,712 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 15:25:47,327 INFO [train.py:903] (0/4) Epoch 9, batch 5100, loss[loss=0.257, simple_loss=0.3291, pruned_loss=0.09244, over 19277.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3143, pruned_loss=0.0863, over 3826981.09 frames. ], batch size: 66, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:25:56,483 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 15:25:59,753 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 15:26:01,325 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1963, 1.8160, 1.3813, 1.1242, 1.6423, 1.0634, 1.1133, 1.7131], + device='cuda:0'), covar=tensor([0.0669, 0.0576, 0.0960, 0.0625, 0.0377, 0.1086, 0.0540, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0294, 0.0321, 0.0242, 0.0231, 0.0317, 0.0288, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:26:05,154 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 15:26:33,075 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 5.962e+02 7.316e+02 9.170e+02 1.645e+03, threshold=1.463e+03, percent-clipped=5.0 +2023-04-01 15:26:47,508 INFO [train.py:903] (0/4) Epoch 9, batch 5150, loss[loss=0.3467, simple_loss=0.3804, pruned_loss=0.1565, over 13305.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3141, pruned_loss=0.08648, over 3822087.77 frames. ], batch size: 136, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:26:55,208 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9835, 1.1868, 1.2790, 1.4008, 2.3055, 0.9739, 2.0659, 2.7291], + device='cuda:0'), covar=tensor([0.0647, 0.2884, 0.2835, 0.1689, 0.1146, 0.2477, 0.1107, 0.0483], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0327, 0.0337, 0.0310, 0.0340, 0.0324, 0.0316, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:26:58,279 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 15:27:04,620 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3616, 1.4649, 1.8424, 1.5550, 3.0354, 2.4569, 3.3630, 1.5730], + device='cuda:0'), covar=tensor([0.2126, 0.3610, 0.2320, 0.1723, 0.1466, 0.1815, 0.1525, 0.3269], + device='cuda:0'), in_proj_covar=tensor([0.0476, 0.0555, 0.0570, 0.0425, 0.0588, 0.0477, 0.0641, 0.0476], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 15:27:32,217 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 15:27:49,828 INFO [train.py:903] (0/4) Epoch 9, batch 5200, loss[loss=0.2532, simple_loss=0.3305, pruned_loss=0.08798, over 19762.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3151, pruned_loss=0.08664, over 3820771.39 frames. ], batch size: 54, lr: 9.14e-03, grad_scale: 8.0 +2023-04-01 15:27:55,803 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0519, 1.1867, 1.4185, 1.4299, 2.6553, 0.9407, 1.9019, 2.7910], + device='cuda:0'), covar=tensor([0.0542, 0.2620, 0.2539, 0.1547, 0.0743, 0.2414, 0.1196, 0.0395], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0328, 0.0338, 0.0310, 0.0338, 0.0324, 0.0316, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:27:59,743 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 15:28:34,608 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.759e+02 5.545e+02 6.726e+02 8.723e+02 1.623e+03, threshold=1.345e+03, percent-clipped=2.0 +2023-04-01 15:28:39,319 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59864.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:41,618 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 15:28:41,857 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:50,361 INFO [train.py:903] (0/4) Epoch 9, batch 5250, loss[loss=0.2788, simple_loss=0.3465, pruned_loss=0.1056, over 17232.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3151, pruned_loss=0.08694, over 3813929.01 frames. ], batch size: 101, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:29:19,337 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:31,279 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,067 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,797 INFO [train.py:903] (0/4) Epoch 9, batch 5300, loss[loss=0.2126, simple_loss=0.2883, pruned_loss=0.06847, over 19761.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3157, pruned_loss=0.08697, over 3821033.25 frames. ], batch size: 54, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:30:04,494 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 15:30:36,750 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.525e+02 7.204e+02 8.995e+02 2.228e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-01 15:30:50,970 INFO [train.py:903] (0/4) Epoch 9, batch 5350, loss[loss=0.2695, simple_loss=0.3393, pruned_loss=0.09981, over 19695.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3148, pruned_loss=0.08662, over 3830951.69 frames. ], batch size: 59, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:30:58,318 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:31:24,018 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 15:31:25,304 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-60000.pt +2023-04-01 15:31:52,148 INFO [train.py:903] (0/4) Epoch 9, batch 5400, loss[loss=0.2181, simple_loss=0.2822, pruned_loss=0.07704, over 19317.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.315, pruned_loss=0.08698, over 3831000.48 frames. ], batch size: 44, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:32:16,759 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2206, 1.2785, 1.1671, 1.0183, 1.0118, 1.0934, 0.0893, 0.3763], + device='cuda:0'), covar=tensor([0.0411, 0.0400, 0.0246, 0.0308, 0.0918, 0.0349, 0.0762, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0316, 0.0318, 0.0332, 0.0412, 0.0334, 0.0296, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 15:32:34,990 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:37,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 5.587e+02 7.112e+02 9.365e+02 1.948e+03, threshold=1.422e+03, percent-clipped=3.0 +2023-04-01 15:32:49,966 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:53,196 INFO [train.py:903] (0/4) Epoch 9, batch 5450, loss[loss=0.2087, simple_loss=0.2854, pruned_loss=0.06606, over 19829.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3155, pruned_loss=0.08668, over 3833944.16 frames. ], batch size: 52, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:33:04,727 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 15:33:06,639 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60083.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:21,780 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60096.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:40,910 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 15:33:55,434 INFO [train.py:903] (0/4) Epoch 9, batch 5500, loss[loss=0.2664, simple_loss=0.3482, pruned_loss=0.09232, over 19766.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3166, pruned_loss=0.08777, over 3817592.77 frames. ], batch size: 56, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:34:17,030 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 15:34:40,127 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.485e+02 6.858e+02 8.862e+02 1.983e+03, threshold=1.372e+03, percent-clipped=4.0 +2023-04-01 15:34:56,047 INFO [train.py:903] (0/4) Epoch 9, batch 5550, loss[loss=0.2834, simple_loss=0.3506, pruned_loss=0.1081, over 19709.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3168, pruned_loss=0.08756, over 3815427.34 frames. ], batch size: 59, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:34:59,783 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2197, 3.6975, 3.8344, 3.8373, 1.4420, 3.5896, 3.1343, 3.5251], + device='cuda:0'), covar=tensor([0.1306, 0.0877, 0.0628, 0.0641, 0.4858, 0.0756, 0.0690, 0.1085], + device='cuda:0'), in_proj_covar=tensor([0.0623, 0.0559, 0.0743, 0.0629, 0.0683, 0.0497, 0.0456, 0.0691], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 15:35:01,731 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 15:35:42,126 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:48,978 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60216.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:49,884 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 15:35:57,517 INFO [train.py:903] (0/4) Epoch 9, batch 5600, loss[loss=0.2381, simple_loss=0.3168, pruned_loss=0.0797, over 18023.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3163, pruned_loss=0.08759, over 3818509.95 frames. ], batch size: 83, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:36:12,194 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:16,416 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 15:36:31,693 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:41,623 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.287e+02 5.903e+02 7.092e+02 9.595e+02 1.671e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 15:36:42,004 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:58,107 INFO [train.py:903] (0/4) Epoch 9, batch 5650, loss[loss=0.2456, simple_loss=0.3207, pruned_loss=0.08525, over 19775.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3153, pruned_loss=0.08691, over 3829739.05 frames. ], batch size: 56, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:37:38,577 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7903, 4.3664, 2.8725, 3.8309, 0.8904, 4.0202, 4.0770, 4.2250], + device='cuda:0'), covar=tensor([0.0595, 0.0950, 0.1692, 0.0719, 0.3871, 0.0751, 0.0705, 0.0784], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0347, 0.0418, 0.0304, 0.0368, 0.0344, 0.0332, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 15:37:45,043 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 15:38:00,337 INFO [train.py:903] (0/4) Epoch 9, batch 5700, loss[loss=0.2289, simple_loss=0.3027, pruned_loss=0.07755, over 19760.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.315, pruned_loss=0.08658, over 3826637.72 frames. ], batch size: 54, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:38:02,934 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:45,031 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.631e+02 6.670e+02 7.834e+02 2.342e+03, threshold=1.334e+03, percent-clipped=2.0 +2023-04-01 15:38:52,986 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:59,661 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 15:39:00,754 INFO [train.py:903] (0/4) Epoch 9, batch 5750, loss[loss=0.2503, simple_loss=0.3337, pruned_loss=0.08347, over 19338.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3151, pruned_loss=0.08694, over 3818424.55 frames. ], batch size: 66, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:39:07,411 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 15:39:12,833 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 15:39:36,133 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:39:50,843 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:00,918 INFO [train.py:903] (0/4) Epoch 9, batch 5800, loss[loss=0.2347, simple_loss=0.303, pruned_loss=0.08325, over 19589.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3165, pruned_loss=0.08784, over 3812031.70 frames. ], batch size: 52, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:40:06,314 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:12,303 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 15:40:22,430 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:45,425 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.333e+02 6.303e+02 7.771e+02 9.979e+02 2.257e+03, threshold=1.554e+03, percent-clipped=12.0 +2023-04-01 15:41:01,366 INFO [train.py:903] (0/4) Epoch 9, batch 5850, loss[loss=0.2025, simple_loss=0.2708, pruned_loss=0.06715, over 19093.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3167, pruned_loss=0.08833, over 3822375.09 frames. ], batch size: 42, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:41:55,996 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:02,422 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 15:42:03,580 INFO [train.py:903] (0/4) Epoch 9, batch 5900, loss[loss=0.2128, simple_loss=0.2797, pruned_loss=0.0729, over 19737.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3156, pruned_loss=0.08751, over 3801626.76 frames. ], batch size: 47, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:42:10,666 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:22,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 15:42:25,414 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60542.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:42,845 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:46,108 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5662, 1.1188, 1.1898, 2.1503, 1.4605, 1.5129, 1.7756, 1.6011], + device='cuda:0'), covar=tensor([0.0972, 0.1432, 0.1259, 0.0856, 0.1128, 0.1060, 0.1053, 0.0915], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0232, 0.0234, 0.0259, 0.0247, 0.0218, 0.0204, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 15:42:47,829 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.460e+02 7.799e+02 9.723e+02 2.713e+03, threshold=1.560e+03, percent-clipped=1.0 +2023-04-01 15:42:47,991 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:03,509 INFO [train.py:903] (0/4) Epoch 9, batch 5950, loss[loss=0.2425, simple_loss=0.3123, pruned_loss=0.08631, over 19587.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3162, pruned_loss=0.08813, over 3809454.58 frames. ], batch size: 52, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:43:10,771 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8736, 1.3285, 1.0708, 0.9026, 1.1589, 0.9945, 0.8339, 1.2596], + device='cuda:0'), covar=tensor([0.0489, 0.0619, 0.0910, 0.0519, 0.0428, 0.1010, 0.0531, 0.0376], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0298, 0.0326, 0.0245, 0.0233, 0.0324, 0.0292, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:43:13,134 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:20,760 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5647, 1.7135, 2.0807, 1.6301, 3.0619, 4.8904, 4.7084, 5.2105], + device='cuda:0'), covar=tensor([0.1384, 0.2810, 0.2682, 0.1764, 0.0442, 0.0133, 0.0135, 0.0101], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0288, 0.0317, 0.0245, 0.0209, 0.0145, 0.0202, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 15:43:45,240 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:03,655 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:04,413 INFO [train.py:903] (0/4) Epoch 9, batch 6000, loss[loss=0.2033, simple_loss=0.2847, pruned_loss=0.06092, over 19692.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3172, pruned_loss=0.08852, over 3790258.44 frames. ], batch size: 53, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:44:04,413 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 15:44:16,875 INFO [train.py:937] (0/4) Epoch 9, validation: loss=0.1828, simple_loss=0.2835, pruned_loss=0.04105, over 944034.00 frames. +2023-04-01 15:44:16,876 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 15:44:46,052 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:45:02,309 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.785e+02 7.329e+02 9.590e+02 1.552e+03, threshold=1.466e+03, percent-clipped=0.0 +2023-04-01 15:45:17,224 INFO [train.py:903] (0/4) Epoch 9, batch 6050, loss[loss=0.2707, simple_loss=0.3372, pruned_loss=0.1021, over 17232.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3157, pruned_loss=0.08759, over 3797462.23 frames. ], batch size: 101, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:45:19,785 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:46:18,277 INFO [train.py:903] (0/4) Epoch 9, batch 6100, loss[loss=0.2006, simple_loss=0.2703, pruned_loss=0.06539, over 19720.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3139, pruned_loss=0.08642, over 3807160.13 frames. ], batch size: 45, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:46:34,561 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:03,004 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.339e+02 6.367e+02 8.531e+02 1.806e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-01 15:47:18,986 INFO [train.py:903] (0/4) Epoch 9, batch 6150, loss[loss=0.2569, simple_loss=0.3262, pruned_loss=0.0938, over 19656.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3151, pruned_loss=0.0876, over 3803873.55 frames. ], batch size: 55, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:47:19,435 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:33,339 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:44,095 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 15:47:48,612 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:48,641 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:03,536 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:04,567 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60811.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:18,466 INFO [train.py:903] (0/4) Epoch 9, batch 6200, loss[loss=0.2601, simple_loss=0.3366, pruned_loss=0.09176, over 19645.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3155, pruned_loss=0.08786, over 3802121.18 frames. ], batch size: 55, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:48:18,860 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:34,296 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:49:03,644 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.706e+02 5.965e+02 7.614e+02 9.330e+02 2.107e+03, threshold=1.523e+03, percent-clipped=6.0 +2023-04-01 15:49:19,598 INFO [train.py:903] (0/4) Epoch 9, batch 6250, loss[loss=0.2578, simple_loss=0.331, pruned_loss=0.09225, over 19658.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3164, pruned_loss=0.08787, over 3810409.55 frames. ], batch size: 55, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:49:49,664 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 15:50:19,515 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7568, 1.7730, 1.6269, 1.5314, 1.3720, 1.5801, 0.8950, 1.1673], + device='cuda:0'), covar=tensor([0.0300, 0.0333, 0.0210, 0.0309, 0.0567, 0.0387, 0.0613, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0324, 0.0319, 0.0321, 0.0334, 0.0415, 0.0339, 0.0298, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 15:50:20,300 INFO [train.py:903] (0/4) Epoch 9, batch 6300, loss[loss=0.3091, simple_loss=0.3601, pruned_loss=0.1291, over 12418.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3169, pruned_loss=0.08817, over 3793135.10 frames. ], batch size: 135, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:50:30,434 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:01,525 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60956.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:05,846 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.456e+02 6.999e+02 8.896e+02 1.665e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 15:51:21,557 INFO [train.py:903] (0/4) Epoch 9, batch 6350, loss[loss=0.2857, simple_loss=0.3466, pruned_loss=0.1125, over 19737.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3163, pruned_loss=0.08774, over 3791256.64 frames. ], batch size: 63, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:52:05,028 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2997, 3.7421, 3.8250, 3.8545, 1.5135, 3.5816, 3.1919, 3.5113], + device='cuda:0'), covar=tensor([0.1207, 0.0868, 0.0601, 0.0561, 0.4687, 0.0663, 0.0635, 0.1045], + device='cuda:0'), in_proj_covar=tensor([0.0629, 0.0564, 0.0756, 0.0629, 0.0693, 0.0501, 0.0466, 0.0698], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 15:52:22,396 INFO [train.py:903] (0/4) Epoch 9, batch 6400, loss[loss=0.187, simple_loss=0.2703, pruned_loss=0.0519, over 19767.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3176, pruned_loss=0.08809, over 3798433.20 frames. ], batch size: 54, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:52:47,690 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4470, 1.1805, 1.2753, 1.6472, 1.2999, 1.5813, 1.6979, 1.4926], + device='cuda:0'), covar=tensor([0.0884, 0.1122, 0.1122, 0.0842, 0.0972, 0.0880, 0.0811, 0.0773], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0230, 0.0232, 0.0257, 0.0245, 0.0216, 0.0203, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 15:53:07,177 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 6.196e+02 7.384e+02 8.672e+02 1.804e+03, threshold=1.477e+03, percent-clipped=4.0 +2023-04-01 15:53:21,075 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 15:53:22,488 INFO [train.py:903] (0/4) Epoch 9, batch 6450, loss[loss=0.2646, simple_loss=0.3347, pruned_loss=0.09728, over 19575.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3181, pruned_loss=0.08871, over 3806168.18 frames. ], batch size: 61, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:53:30,332 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:53:31,420 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:54:05,086 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 15:54:22,079 INFO [train.py:903] (0/4) Epoch 9, batch 6500, loss[loss=0.2146, simple_loss=0.2838, pruned_loss=0.07273, over 19735.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.318, pruned_loss=0.08866, over 3804547.33 frames. ], batch size: 51, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:54:27,555 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 15:55:05,286 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5106, 1.8999, 1.9102, 1.9675, 4.0086, 1.2779, 2.5624, 4.2072], + device='cuda:0'), covar=tensor([0.0386, 0.2161, 0.2234, 0.1485, 0.0616, 0.2242, 0.1161, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0327, 0.0338, 0.0310, 0.0338, 0.0323, 0.0317, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 15:55:06,067 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.706e+02 8.302e+02 1.012e+03 2.679e+03, threshold=1.660e+03, percent-clipped=7.0 +2023-04-01 15:55:22,000 INFO [train.py:903] (0/4) Epoch 9, batch 6550, loss[loss=0.2727, simple_loss=0.3437, pruned_loss=0.1009, over 19706.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3178, pruned_loss=0.08826, over 3810855.61 frames. ], batch size: 59, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:55:50,226 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:56:10,802 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 15:56:24,270 INFO [train.py:903] (0/4) Epoch 9, batch 6600, loss[loss=0.2033, simple_loss=0.2704, pruned_loss=0.06812, over 19822.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3167, pruned_loss=0.08757, over 3813269.83 frames. ], batch size: 49, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:57:09,253 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 5.654e+02 6.784e+02 8.392e+02 1.741e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 15:57:24,976 INFO [train.py:903] (0/4) Epoch 9, batch 6650, loss[loss=0.2541, simple_loss=0.3377, pruned_loss=0.08523, over 19700.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3162, pruned_loss=0.08735, over 3814270.16 frames. ], batch size: 59, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:57:51,114 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 15:58:25,582 INFO [train.py:903] (0/4) Epoch 9, batch 6700, loss[loss=0.2363, simple_loss=0.317, pruned_loss=0.07778, over 19751.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3169, pruned_loss=0.08774, over 3819265.96 frames. ], batch size: 63, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 15:59:08,505 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.683e+02 7.501e+02 9.618e+02 2.603e+03, threshold=1.500e+03, percent-clipped=7.0 +2023-04-01 15:59:23,018 INFO [train.py:903] (0/4) Epoch 9, batch 6750, loss[loss=0.2047, simple_loss=0.286, pruned_loss=0.06163, over 19597.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3155, pruned_loss=0.08676, over 3830871.09 frames. ], batch size: 50, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 15:59:43,448 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3252, 2.3793, 1.8173, 1.5653, 2.1847, 1.3998, 1.2919, 1.8372], + device='cuda:0'), covar=tensor([0.0914, 0.0587, 0.0770, 0.0652, 0.0386, 0.0986, 0.0681, 0.0397], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0298, 0.0321, 0.0244, 0.0232, 0.0322, 0.0287, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:00:18,968 INFO [train.py:903] (0/4) Epoch 9, batch 6800, loss[loss=0.2503, simple_loss=0.3255, pruned_loss=0.08755, over 19678.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3153, pruned_loss=0.08703, over 3814591.80 frames. ], batch size: 60, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 16:00:19,084 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:00:40,936 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 16:00:48,222 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-9.pt +2023-04-01 16:01:03,773 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 16:01:04,209 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 16:01:06,621 INFO [train.py:903] (0/4) Epoch 10, batch 0, loss[loss=0.2248, simple_loss=0.3153, pruned_loss=0.06717, over 19770.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3153, pruned_loss=0.06717, over 19770.00 frames. ], batch size: 56, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:01:06,622 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 16:01:17,500 INFO [train.py:937] (0/4) Epoch 10, validation: loss=0.1825, simple_loss=0.2836, pruned_loss=0.04072, over 944034.00 frames. +2023-04-01 16:01:17,501 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 16:01:17,949 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:27,607 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.689e+02 6.760e+02 8.116e+02 1.440e+03, threshold=1.352e+03, percent-clipped=0.0 +2023-04-01 16:01:29,706 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 16:01:47,126 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:02:17,429 INFO [train.py:903] (0/4) Epoch 10, batch 50, loss[loss=0.2402, simple_loss=0.3189, pruned_loss=0.08078, over 17954.00 frames. ], tot_loss[loss=0.249, simple_loss=0.319, pruned_loss=0.08951, over 861154.67 frames. ], batch size: 83, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:02:50,337 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 16:02:55,416 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9352, 1.3663, 1.0568, 1.0308, 1.2150, 1.0116, 1.0569, 1.2362], + device='cuda:0'), covar=tensor([0.0487, 0.0704, 0.1002, 0.0521, 0.0460, 0.1134, 0.0472, 0.0407], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0300, 0.0324, 0.0245, 0.0234, 0.0326, 0.0289, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:03:03,257 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:03:18,483 INFO [train.py:903] (0/4) Epoch 10, batch 100, loss[loss=0.2552, simple_loss=0.3334, pruned_loss=0.08846, over 19140.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3179, pruned_loss=0.08988, over 1503468.67 frames. ], batch size: 69, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:03:24,178 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 16:03:29,318 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.266e+02 7.755e+02 9.384e+02 2.029e+03, threshold=1.551e+03, percent-clipped=6.0 +2023-04-01 16:04:19,612 INFO [train.py:903] (0/4) Epoch 10, batch 150, loss[loss=0.2296, simple_loss=0.2913, pruned_loss=0.08394, over 19361.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.315, pruned_loss=0.08717, over 2022825.03 frames. ], batch size: 47, lr: 8.56e-03, grad_scale: 16.0 +2023-04-01 16:05:12,384 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 16:05:20,103 INFO [train.py:903] (0/4) Epoch 10, batch 200, loss[loss=0.25, simple_loss=0.3252, pruned_loss=0.08736, over 19541.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3132, pruned_loss=0.08604, over 2424694.62 frames. ], batch size: 54, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:05:32,331 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.325e+02 6.934e+02 9.117e+02 1.602e+03, threshold=1.387e+03, percent-clipped=3.0 +2023-04-01 16:05:34,685 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6916, 2.4432, 2.0330, 2.8614, 2.7278, 2.3746, 2.3204, 2.5244], + device='cuda:0'), covar=tensor([0.0743, 0.1398, 0.1296, 0.0829, 0.1019, 0.0414, 0.0912, 0.0543], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0354, 0.0289, 0.0238, 0.0297, 0.0243, 0.0274, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:05:35,096 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 16:05:45,459 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8887, 5.0673, 5.7451, 5.7284, 2.1443, 5.4255, 4.6875, 5.3106], + device='cuda:0'), covar=tensor([0.1284, 0.0671, 0.0490, 0.0463, 0.4820, 0.0546, 0.0513, 0.1073], + device='cuda:0'), in_proj_covar=tensor([0.0632, 0.0563, 0.0751, 0.0633, 0.0694, 0.0506, 0.0465, 0.0701], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 16:06:20,989 INFO [train.py:903] (0/4) Epoch 10, batch 250, loss[loss=0.264, simple_loss=0.3393, pruned_loss=0.09432, over 19582.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3126, pruned_loss=0.08569, over 2749852.63 frames. ], batch size: 61, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:07:19,641 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:07:20,496 INFO [train.py:903] (0/4) Epoch 10, batch 300, loss[loss=0.1969, simple_loss=0.2643, pruned_loss=0.06475, over 19734.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3135, pruned_loss=0.08614, over 2990418.77 frames. ], batch size: 46, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:07:32,774 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.563e+02 6.785e+02 8.281e+02 1.821e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 16:07:33,018 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:12,876 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61794.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:20,712 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 16:08:21,872 INFO [train.py:903] (0/4) Epoch 10, batch 350, loss[loss=0.218, simple_loss=0.2947, pruned_loss=0.07068, over 19547.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3111, pruned_loss=0.08458, over 3176511.28 frames. ], batch size: 54, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:08:44,854 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61819.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:09:23,122 INFO [train.py:903] (0/4) Epoch 10, batch 400, loss[loss=0.2494, simple_loss=0.3232, pruned_loss=0.08786, over 19581.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3114, pruned_loss=0.08516, over 3322295.20 frames. ], batch size: 52, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:09:36,146 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+02 5.438e+02 6.846e+02 8.745e+02 2.106e+03, threshold=1.369e+03, percent-clipped=7.0 +2023-04-01 16:10:26,712 INFO [train.py:903] (0/4) Epoch 10, batch 450, loss[loss=0.2516, simple_loss=0.3285, pruned_loss=0.08734, over 19679.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3123, pruned_loss=0.08578, over 3434267.60 frames. ], batch size: 60, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:10:29,708 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 16:10:50,966 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 16:10:51,004 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 16:11:05,170 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:11:19,241 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0882, 5.1018, 5.8658, 5.8179, 1.8979, 5.5153, 4.7241, 5.3879], + device='cuda:0'), covar=tensor([0.1158, 0.0682, 0.0495, 0.0447, 0.5102, 0.0446, 0.0495, 0.1073], + device='cuda:0'), in_proj_covar=tensor([0.0624, 0.0559, 0.0743, 0.0631, 0.0687, 0.0500, 0.0460, 0.0690], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 16:11:27,993 INFO [train.py:903] (0/4) Epoch 10, batch 500, loss[loss=0.2202, simple_loss=0.3171, pruned_loss=0.06165, over 19609.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3121, pruned_loss=0.08505, over 3528203.21 frames. ], batch size: 57, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:11:30,101 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 16:11:39,884 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.473e+02 6.472e+02 7.882e+02 1.512e+03, threshold=1.294e+03, percent-clipped=2.0 +2023-04-01 16:12:28,789 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-62000.pt +2023-04-01 16:12:30,817 INFO [train.py:903] (0/4) Epoch 10, batch 550, loss[loss=0.2303, simple_loss=0.3121, pruned_loss=0.07427, over 19695.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3112, pruned_loss=0.08424, over 3610131.51 frames. ], batch size: 59, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:32,123 INFO [train.py:903] (0/4) Epoch 10, batch 600, loss[loss=0.2354, simple_loss=0.3148, pruned_loss=0.07801, over 19765.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3115, pruned_loss=0.08462, over 3661987.19 frames. ], batch size: 54, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:42,210 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 16:13:46,035 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.499e+02 5.366e+02 7.096e+02 8.541e+02 1.663e+03, threshold=1.419e+03, percent-clipped=2.0 +2023-04-01 16:14:11,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 16:14:26,432 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:14:35,312 INFO [train.py:903] (0/4) Epoch 10, batch 650, loss[loss=0.2312, simple_loss=0.3019, pruned_loss=0.08022, over 19732.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3115, pruned_loss=0.08471, over 3712516.32 frames. ], batch size: 51, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:14:40,022 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62105.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:24,559 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:39,368 INFO [train.py:903] (0/4) Epoch 10, batch 700, loss[loss=0.2378, simple_loss=0.306, pruned_loss=0.08482, over 19484.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3114, pruned_loss=0.08459, over 3735832.33 frames. ], batch size: 49, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:15:51,155 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.410e+02 5.986e+02 7.007e+02 9.230e+02 2.462e+03, threshold=1.401e+03, percent-clipped=6.0 +2023-04-01 16:15:51,631 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1899, 1.5088, 2.1686, 1.6801, 3.0655, 2.1328, 3.1401, 1.4320], + device='cuda:0'), covar=tensor([0.2363, 0.3917, 0.2145, 0.1704, 0.1422, 0.2154, 0.1696, 0.3634], + device='cuda:0'), in_proj_covar=tensor([0.0476, 0.0557, 0.0575, 0.0426, 0.0585, 0.0480, 0.0639, 0.0475], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 16:16:41,939 INFO [train.py:903] (0/4) Epoch 10, batch 750, loss[loss=0.2091, simple_loss=0.2943, pruned_loss=0.06199, over 19681.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.313, pruned_loss=0.08516, over 3764743.73 frames. ], batch size: 53, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:16:51,056 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:04,546 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:11,148 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2987, 1.3070, 1.4780, 1.7032, 2.8623, 1.2017, 2.0650, 3.0500], + device='cuda:0'), covar=tensor([0.0460, 0.2461, 0.2557, 0.1468, 0.0722, 0.2211, 0.1276, 0.0378], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0326, 0.0337, 0.0311, 0.0338, 0.0322, 0.0317, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:17:35,232 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.81 vs. limit=5.0 +2023-04-01 16:17:42,573 INFO [train.py:903] (0/4) Epoch 10, batch 800, loss[loss=0.2658, simple_loss=0.3403, pruned_loss=0.09561, over 19532.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.314, pruned_loss=0.08586, over 3778246.86 frames. ], batch size: 56, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:17:54,855 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.237e+02 6.930e+02 8.487e+02 1.526e+03, threshold=1.386e+03, percent-clipped=2.0 +2023-04-01 16:17:58,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 16:18:14,952 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:32,371 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:43,709 INFO [train.py:903] (0/4) Epoch 10, batch 850, loss[loss=0.2393, simple_loss=0.3035, pruned_loss=0.08754, over 18269.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.314, pruned_loss=0.08583, over 3803257.89 frames. ], batch size: 40, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:37,716 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 16:19:45,735 INFO [train.py:903] (0/4) Epoch 10, batch 900, loss[loss=0.2421, simple_loss=0.3164, pruned_loss=0.08386, over 19590.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3129, pruned_loss=0.08525, over 3818284.60 frames. ], batch size: 61, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:59,161 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 6.023e+02 7.076e+02 9.770e+02 2.916e+03, threshold=1.415e+03, percent-clipped=7.0 +2023-04-01 16:20:16,734 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4901, 1.5552, 1.5739, 1.9819, 1.4668, 1.8381, 1.7939, 1.5146], + device='cuda:0'), covar=tensor([0.2438, 0.2117, 0.1068, 0.1132, 0.2140, 0.0955, 0.2223, 0.1801], + device='cuda:0'), in_proj_covar=tensor([0.0762, 0.0769, 0.0629, 0.0879, 0.0759, 0.0674, 0.0774, 0.0687], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 16:20:35,892 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:20:48,986 INFO [train.py:903] (0/4) Epoch 10, batch 950, loss[loss=0.207, simple_loss=0.2888, pruned_loss=0.06261, over 19730.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.313, pruned_loss=0.08511, over 3816894.26 frames. ], batch size: 51, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:20:50,166 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 16:20:58,641 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:21:50,230 INFO [train.py:903] (0/4) Epoch 10, batch 1000, loss[loss=0.3064, simple_loss=0.3574, pruned_loss=0.1277, over 13658.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3146, pruned_loss=0.08627, over 3801219.44 frames. ], batch size: 137, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:01,606 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.402e+02 5.469e+02 6.659e+02 8.311e+02 1.987e+03, threshold=1.332e+03, percent-clipped=4.0 +2023-04-01 16:22:07,600 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:21,491 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:31,351 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62484.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:38,723 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:43,055 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 16:22:51,163 INFO [train.py:903] (0/4) Epoch 10, batch 1050, loss[loss=0.2157, simple_loss=0.2957, pruned_loss=0.06785, over 19657.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.313, pruned_loss=0.08526, over 3820405.46 frames. ], batch size: 55, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:51,588 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62501.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:23:23,823 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 16:23:54,501 INFO [train.py:903] (0/4) Epoch 10, batch 1100, loss[loss=0.3099, simple_loss=0.377, pruned_loss=0.1214, over 18283.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3122, pruned_loss=0.08462, over 3838327.00 frames. ], batch size: 83, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:24:07,777 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.569e+02 6.927e+02 9.101e+02 1.941e+03, threshold=1.385e+03, percent-clipped=3.0 +2023-04-01 16:24:10,206 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62563.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:54,352 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:56,403 INFO [train.py:903] (0/4) Epoch 10, batch 1150, loss[loss=0.3073, simple_loss=0.3607, pruned_loss=0.127, over 19281.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3135, pruned_loss=0.08586, over 3840477.87 frames. ], batch size: 66, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:24:59,681 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:15,631 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2402, 2.1044, 1.6472, 1.3326, 1.9179, 1.2423, 1.1120, 1.7504], + device='cuda:0'), covar=tensor([0.0682, 0.0598, 0.0846, 0.0667, 0.0366, 0.1013, 0.0576, 0.0357], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0296, 0.0321, 0.0239, 0.0231, 0.0325, 0.0285, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:25:37,687 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:47,587 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 16:25:53,850 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:58,140 INFO [train.py:903] (0/4) Epoch 10, batch 1200, loss[loss=0.2512, simple_loss=0.3149, pruned_loss=0.09373, over 19467.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3136, pruned_loss=0.0863, over 3827660.32 frames. ], batch size: 49, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:26:09,506 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.581e+02 6.902e+02 9.588e+02 2.703e+03, threshold=1.380e+03, percent-clipped=8.0 +2023-04-01 16:26:24,950 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:32,533 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 16:26:39,683 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62684.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:50,277 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-01 16:26:52,183 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5652, 2.0731, 1.9930, 2.6186, 2.3407, 2.0103, 2.1173, 2.4940], + device='cuda:0'), covar=tensor([0.0772, 0.1649, 0.1278, 0.0818, 0.1106, 0.0501, 0.1018, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0355, 0.0288, 0.0241, 0.0297, 0.0244, 0.0273, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:26:53,328 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1275, 1.7052, 1.5734, 2.0776, 1.8773, 1.7538, 1.7238, 1.9685], + device='cuda:0'), covar=tensor([0.0834, 0.1554, 0.1421, 0.0971, 0.1210, 0.0525, 0.1069, 0.0687], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0355, 0.0288, 0.0241, 0.0297, 0.0244, 0.0273, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:26:59,723 INFO [train.py:903] (0/4) Epoch 10, batch 1250, loss[loss=0.2549, simple_loss=0.3178, pruned_loss=0.09598, over 19659.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3129, pruned_loss=0.08537, over 3835740.93 frames. ], batch size: 53, lr: 8.49e-03, grad_scale: 4.0 +2023-04-01 16:27:08,208 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4447, 1.9618, 2.0252, 2.9459, 2.1764, 2.4848, 2.6954, 2.4558], + device='cuda:0'), covar=tensor([0.0683, 0.0923, 0.0978, 0.0810, 0.0805, 0.0754, 0.0820, 0.0641], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0229, 0.0227, 0.0255, 0.0239, 0.0215, 0.0199, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 16:28:00,861 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:01,690 INFO [train.py:903] (0/4) Epoch 10, batch 1300, loss[loss=0.2447, simple_loss=0.3206, pruned_loss=0.08442, over 18130.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3141, pruned_loss=0.08588, over 3822021.37 frames. ], batch size: 83, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:28:05,030 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:16,588 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.064e+02 6.596e+02 8.862e+02 1.920e+03, threshold=1.319e+03, percent-clipped=1.0 +2023-04-01 16:28:51,844 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:29:05,061 INFO [train.py:903] (0/4) Epoch 10, batch 1350, loss[loss=0.2245, simple_loss=0.292, pruned_loss=0.07853, over 19762.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3143, pruned_loss=0.0863, over 3809231.39 frames. ], batch size: 47, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:29:39,490 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 16:30:07,945 INFO [train.py:903] (0/4) Epoch 10, batch 1400, loss[loss=0.2637, simple_loss=0.3397, pruned_loss=0.09389, over 19713.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3136, pruned_loss=0.08579, over 3814397.52 frames. ], batch size: 63, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:30:13,090 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62855.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:20,939 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.591e+02 6.749e+02 8.217e+02 1.554e+03, threshold=1.350e+03, percent-clipped=4.0 +2023-04-01 16:30:27,994 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:44,858 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:47,426 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 16:31:07,169 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 16:31:09,388 INFO [train.py:903] (0/4) Epoch 10, batch 1450, loss[loss=0.2699, simple_loss=0.3371, pruned_loss=0.1014, over 19584.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3135, pruned_loss=0.08565, over 3811674.18 frames. ], batch size: 61, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:31:16,513 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:07,384 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:11,887 INFO [train.py:903] (0/4) Epoch 10, batch 1500, loss[loss=0.2442, simple_loss=0.3213, pruned_loss=0.08352, over 18083.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3138, pruned_loss=0.08565, over 3802877.86 frames. ], batch size: 84, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:32:27,768 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 5.675e+02 6.883e+02 8.252e+02 2.690e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-01 16:33:13,784 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2936, 3.0186, 2.2030, 2.7784, 0.8778, 2.8771, 2.7925, 2.8810], + device='cuda:0'), covar=tensor([0.1065, 0.1302, 0.1919, 0.0898, 0.3500, 0.1009, 0.0954, 0.1293], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0356, 0.0425, 0.0312, 0.0372, 0.0352, 0.0345, 0.0377], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 16:33:17,108 INFO [train.py:903] (0/4) Epoch 10, batch 1550, loss[loss=0.287, simple_loss=0.3559, pruned_loss=0.109, over 19092.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3134, pruned_loss=0.08517, over 3816228.04 frames. ], batch size: 69, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:33:23,634 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:40,047 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 16:33:42,939 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:49,615 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:53,291 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:20,244 INFO [train.py:903] (0/4) Epoch 10, batch 1600, loss[loss=0.2799, simple_loss=0.3346, pruned_loss=0.1126, over 19674.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3123, pruned_loss=0.08463, over 3824701.91 frames. ], batch size: 53, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:34:33,009 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.609e+02 5.367e+02 6.713e+02 8.573e+02 1.582e+03, threshold=1.343e+03, percent-clipped=2.0 +2023-04-01 16:34:33,423 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:42,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 16:35:21,351 INFO [train.py:903] (0/4) Epoch 10, batch 1650, loss[loss=0.2368, simple_loss=0.3144, pruned_loss=0.07964, over 18179.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.313, pruned_loss=0.08534, over 3832436.42 frames. ], batch size: 83, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:35:51,856 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:35:58,416 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6905, 1.4881, 1.5206, 1.5356, 3.1868, 1.0536, 2.3964, 3.6218], + device='cuda:0'), covar=tensor([0.0435, 0.2392, 0.2463, 0.1697, 0.0728, 0.2457, 0.1132, 0.0234], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0328, 0.0342, 0.0314, 0.0341, 0.0327, 0.0321, 0.0343], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:36:05,480 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:14,724 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:21,904 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:23,875 INFO [train.py:903] (0/4) Epoch 10, batch 1700, loss[loss=0.2675, simple_loss=0.3361, pruned_loss=0.0995, over 19788.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3125, pruned_loss=0.08519, over 3832597.78 frames. ], batch size: 56, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:36:38,455 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.819e+02 7.178e+02 9.040e+02 2.117e+03, threshold=1.436e+03, percent-clipped=7.0 +2023-04-01 16:37:04,086 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 16:37:28,665 INFO [train.py:903] (0/4) Epoch 10, batch 1750, loss[loss=0.2403, simple_loss=0.3109, pruned_loss=0.08486, over 19604.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3122, pruned_loss=0.08494, over 3835918.06 frames. ], batch size: 50, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:38:14,984 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,205 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,878 INFO [train.py:903] (0/4) Epoch 10, batch 1800, loss[loss=0.2316, simple_loss=0.2926, pruned_loss=0.08532, over 19347.00 frames. ], tot_loss[loss=0.241, simple_loss=0.312, pruned_loss=0.08495, over 3817969.38 frames. ], batch size: 47, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:38:44,564 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.870e+02 7.659e+02 9.293e+02 2.596e+03, threshold=1.532e+03, percent-clipped=8.0 +2023-04-01 16:39:03,354 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:03,697 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.84 vs. limit=5.0 +2023-04-01 16:39:26,422 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8020, 3.3604, 1.8450, 1.6731, 3.0979, 1.4700, 1.0948, 2.0012], + device='cuda:0'), covar=tensor([0.1056, 0.0392, 0.0845, 0.0798, 0.0479, 0.1126, 0.0900, 0.0575], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0301, 0.0329, 0.0248, 0.0236, 0.0326, 0.0291, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:39:30,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 16:39:32,968 INFO [train.py:903] (0/4) Epoch 10, batch 1850, loss[loss=0.1908, simple_loss=0.2612, pruned_loss=0.06021, over 19075.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3124, pruned_loss=0.08485, over 3810662.97 frames. ], batch size: 42, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:39:35,801 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:54,102 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:10,177 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 16:40:19,588 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0911, 1.6632, 1.6132, 2.0450, 1.8795, 1.7609, 1.6405, 2.0718], + device='cuda:0'), covar=tensor([0.0816, 0.1507, 0.1372, 0.0905, 0.1116, 0.0500, 0.1098, 0.0547], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0354, 0.0289, 0.0239, 0.0296, 0.0244, 0.0275, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 16:40:26,743 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:35,819 INFO [train.py:903] (0/4) Epoch 10, batch 1900, loss[loss=0.2081, simple_loss=0.2928, pruned_loss=0.06171, over 19771.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3135, pruned_loss=0.08531, over 3807755.32 frames. ], batch size: 56, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:40:52,657 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 5.562e+02 7.038e+02 8.618e+02 1.834e+03, threshold=1.408e+03, percent-clipped=3.0 +2023-04-01 16:40:55,976 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 16:41:01,876 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 16:41:24,930 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 16:41:37,239 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:41:40,406 INFO [train.py:903] (0/4) Epoch 10, batch 1950, loss[loss=0.2575, simple_loss=0.3316, pruned_loss=0.09167, over 19524.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3134, pruned_loss=0.08568, over 3810458.08 frames. ], batch size: 64, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:42:10,547 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:27,969 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:31,447 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:44,768 INFO [train.py:903] (0/4) Epoch 10, batch 2000, loss[loss=0.2836, simple_loss=0.347, pruned_loss=0.1101, over 19743.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3127, pruned_loss=0.08503, over 3808631.63 frames. ], batch size: 63, lr: 8.44e-03, grad_scale: 8.0 +2023-04-01 16:43:00,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.731e+02 5.221e+02 6.641e+02 8.776e+02 2.044e+03, threshold=1.328e+03, percent-clipped=3.0 +2023-04-01 16:43:43,937 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 16:43:47,379 INFO [train.py:903] (0/4) Epoch 10, batch 2050, loss[loss=0.2263, simple_loss=0.3079, pruned_loss=0.07237, over 19496.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3131, pruned_loss=0.08538, over 3801173.95 frames. ], batch size: 64, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:43:53,614 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:03,935 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 16:44:05,931 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 16:44:26,404 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:28,433 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 16:44:50,581 INFO [train.py:903] (0/4) Epoch 10, batch 2100, loss[loss=0.2863, simple_loss=0.3581, pruned_loss=0.1073, over 19331.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3132, pruned_loss=0.08518, over 3799589.35 frames. ], batch size: 66, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:45:06,405 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.645e+02 7.348e+02 9.688e+02 2.351e+03, threshold=1.470e+03, percent-clipped=4.0 +2023-04-01 16:45:26,369 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 16:45:32,492 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:37,346 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:46,623 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 16:45:54,834 INFO [train.py:903] (0/4) Epoch 10, batch 2150, loss[loss=0.2231, simple_loss=0.3019, pruned_loss=0.0722, over 19516.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3142, pruned_loss=0.08597, over 3803232.44 frames. ], batch size: 56, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:46:00,127 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63604.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:46:58,775 INFO [train.py:903] (0/4) Epoch 10, batch 2200, loss[loss=0.1835, simple_loss=0.2572, pruned_loss=0.05488, over 19759.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3134, pruned_loss=0.08586, over 3787790.90 frames. ], batch size: 45, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:47:13,920 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.200e+02 5.616e+02 6.875e+02 8.680e+02 1.983e+03, threshold=1.375e+03, percent-clipped=4.0 +2023-04-01 16:47:36,985 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9896, 2.2053, 2.3051, 2.3160, 1.0020, 2.1515, 1.9912, 2.1172], + device='cuda:0'), covar=tensor([0.1083, 0.1735, 0.0647, 0.0660, 0.3534, 0.0830, 0.0562, 0.0978], + device='cuda:0'), in_proj_covar=tensor([0.0635, 0.0575, 0.0767, 0.0642, 0.0704, 0.0512, 0.0469, 0.0703], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 16:47:50,343 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 16:47:59,060 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3360, 1.4201, 1.7967, 1.5684, 2.6710, 2.3088, 2.9261, 1.2183], + device='cuda:0'), covar=tensor([0.2139, 0.3709, 0.2163, 0.1729, 0.1397, 0.1715, 0.1384, 0.3465], + device='cuda:0'), in_proj_covar=tensor([0.0480, 0.0564, 0.0583, 0.0428, 0.0587, 0.0483, 0.0646, 0.0482], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 16:48:00,173 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:48:03,389 INFO [train.py:903] (0/4) Epoch 10, batch 2250, loss[loss=0.222, simple_loss=0.2891, pruned_loss=0.07748, over 19379.00 frames. ], tot_loss[loss=0.242, simple_loss=0.313, pruned_loss=0.08547, over 3801664.35 frames. ], batch size: 48, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:48:38,898 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7352, 3.8856, 4.1997, 4.2016, 2.4646, 3.9136, 3.6576, 3.9713], + device='cuda:0'), covar=tensor([0.1038, 0.2305, 0.0541, 0.0481, 0.3461, 0.0764, 0.0496, 0.0823], + device='cuda:0'), in_proj_covar=tensor([0.0634, 0.0575, 0.0767, 0.0639, 0.0704, 0.0512, 0.0470, 0.0701], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 16:48:44,201 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 16:48:51,732 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3263, 1.4083, 1.7469, 1.4638, 3.0792, 2.4786, 3.4651, 1.5667], + device='cuda:0'), covar=tensor([0.2290, 0.3820, 0.2404, 0.1899, 0.1399, 0.1767, 0.1349, 0.3257], + device='cuda:0'), in_proj_covar=tensor([0.0479, 0.0560, 0.0581, 0.0427, 0.0583, 0.0481, 0.0643, 0.0481], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 16:49:04,920 INFO [train.py:903] (0/4) Epoch 10, batch 2300, loss[loss=0.2349, simple_loss=0.3143, pruned_loss=0.07781, over 19669.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3137, pruned_loss=0.08568, over 3812151.80 frames. ], batch size: 55, lr: 8.42e-03, grad_scale: 4.0 +2023-04-01 16:49:18,691 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 16:49:23,050 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.837e+02 6.965e+02 8.642e+02 2.205e+03, threshold=1.393e+03, percent-clipped=3.0 +2023-04-01 16:49:45,517 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63782.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:49:47,898 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:09,267 INFO [train.py:903] (0/4) Epoch 10, batch 2350, loss[loss=0.3091, simple_loss=0.3597, pruned_loss=0.1292, over 13099.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3131, pruned_loss=0.08485, over 3822079.33 frames. ], batch size: 135, lr: 8.41e-03, grad_scale: 4.0 +2023-04-01 16:50:44,281 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:50,877 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 16:51:10,038 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 16:51:11,956 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-01 16:51:13,622 INFO [train.py:903] (0/4) Epoch 10, batch 2400, loss[loss=0.2174, simple_loss=0.294, pruned_loss=0.0704, over 19764.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3125, pruned_loss=0.08485, over 3836030.40 frames. ], batch size: 54, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:51:29,387 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:51:30,159 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.763e+02 5.915e+02 7.022e+02 9.244e+02 2.907e+03, threshold=1.404e+03, percent-clipped=10.0 +2023-04-01 16:51:55,405 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7415, 1.8116, 2.0581, 2.4885, 1.6429, 2.2478, 2.3492, 1.9599], + device='cuda:0'), covar=tensor([0.3370, 0.2749, 0.1367, 0.1440, 0.2936, 0.1336, 0.3050, 0.2393], + device='cuda:0'), in_proj_covar=tensor([0.0772, 0.0780, 0.0638, 0.0882, 0.0763, 0.0683, 0.0781, 0.0695], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 16:52:03,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 16:52:12,814 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:15,904 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:17,970 INFO [train.py:903] (0/4) Epoch 10, batch 2450, loss[loss=0.2536, simple_loss=0.3303, pruned_loss=0.08849, over 17447.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3121, pruned_loss=0.08425, over 3837621.75 frames. ], batch size: 101, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:52:55,521 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:59,271 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:16,593 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:19,963 INFO [train.py:903] (0/4) Epoch 10, batch 2500, loss[loss=0.2166, simple_loss=0.2987, pruned_loss=0.06725, over 19577.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3114, pruned_loss=0.08364, over 3826870.46 frames. ], batch size: 52, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:53:23,766 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:35,922 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.566e+02 5.261e+02 6.826e+02 9.233e+02 1.687e+03, threshold=1.365e+03, percent-clipped=6.0 +2023-04-01 16:53:56,230 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:54:00,224 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 16:54:21,195 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-64000.pt +2023-04-01 16:54:23,068 INFO [train.py:903] (0/4) Epoch 10, batch 2550, loss[loss=0.2505, simple_loss=0.3307, pruned_loss=0.08518, over 19590.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3118, pruned_loss=0.08409, over 3829920.57 frames. ], batch size: 57, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:55:13,541 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:15,690 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 16:55:20,769 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:26,459 INFO [train.py:903] (0/4) Epoch 10, batch 2600, loss[loss=0.244, simple_loss=0.3167, pruned_loss=0.08568, over 19683.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3124, pruned_loss=0.0848, over 3812273.05 frames. ], batch size: 59, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:55:41,492 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:42,249 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.625e+02 5.188e+02 6.411e+02 7.864e+02 1.888e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 16:56:27,597 INFO [train.py:903] (0/4) Epoch 10, batch 2650, loss[loss=0.298, simple_loss=0.3608, pruned_loss=0.1176, over 19516.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3143, pruned_loss=0.0863, over 3800690.99 frames. ], batch size: 54, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:56:45,343 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 16:57:23,814 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6894, 4.1818, 4.4348, 4.4187, 1.4674, 4.1120, 3.6069, 4.1294], + device='cuda:0'), covar=tensor([0.1375, 0.0765, 0.0573, 0.0573, 0.5506, 0.0578, 0.0613, 0.1083], + device='cuda:0'), in_proj_covar=tensor([0.0640, 0.0579, 0.0767, 0.0639, 0.0713, 0.0517, 0.0475, 0.0706], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 16:57:29,267 INFO [train.py:903] (0/4) Epoch 10, batch 2700, loss[loss=0.2551, simple_loss=0.3252, pruned_loss=0.09254, over 17625.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.314, pruned_loss=0.0862, over 3804523.25 frames. ], batch size: 101, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:57:32,906 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64153.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:35,308 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:45,502 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.891e+02 6.747e+02 8.779e+02 3.257e+03, threshold=1.349e+03, percent-clipped=11.0 +2023-04-01 16:57:56,898 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:04,097 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64178.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:07,373 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:33,076 INFO [train.py:903] (0/4) Epoch 10, batch 2750, loss[loss=0.2737, simple_loss=0.3411, pruned_loss=0.1032, over 18209.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3124, pruned_loss=0.08501, over 3803178.35 frames. ], batch size: 83, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:58:41,438 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:57,441 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2154, 1.2427, 1.6131, 1.3926, 2.5941, 2.0486, 2.7639, 1.0840], + device='cuda:0'), covar=tensor([0.2473, 0.4116, 0.2419, 0.1957, 0.1412, 0.1997, 0.1384, 0.3751], + device='cuda:0'), in_proj_covar=tensor([0.0481, 0.0564, 0.0583, 0.0428, 0.0587, 0.0483, 0.0646, 0.0484], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 16:59:36,101 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,916 INFO [train.py:903] (0/4) Epoch 10, batch 2800, loss[loss=0.2448, simple_loss=0.314, pruned_loss=0.08779, over 17223.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3112, pruned_loss=0.08403, over 3815804.52 frames. ], batch size: 101, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 16:59:52,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.243e+02 6.695e+02 7.923e+02 2.040e+03, threshold=1.339e+03, percent-clipped=2.0 +2023-04-01 17:00:10,647 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:13,398 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3627, 1.3995, 1.6535, 1.5209, 2.4942, 2.1329, 2.5263, 1.0234], + device='cuda:0'), covar=tensor([0.2130, 0.3762, 0.2259, 0.1733, 0.1367, 0.1820, 0.1396, 0.3589], + device='cuda:0'), in_proj_covar=tensor([0.0479, 0.0564, 0.0582, 0.0427, 0.0587, 0.0481, 0.0644, 0.0483], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 17:00:21,802 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:40,323 INFO [train.py:903] (0/4) Epoch 10, batch 2850, loss[loss=0.2077, simple_loss=0.283, pruned_loss=0.06619, over 19700.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3107, pruned_loss=0.08358, over 3821818.90 frames. ], batch size: 53, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:00:41,952 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:59,518 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3622, 1.8155, 1.6233, 2.5711, 1.9083, 2.4996, 2.6904, 2.5406], + device='cuda:0'), covar=tensor([0.0691, 0.0900, 0.0980, 0.0979, 0.0942, 0.0693, 0.0783, 0.0571], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0223, 0.0223, 0.0252, 0.0236, 0.0212, 0.0199, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 17:01:03,234 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:06,434 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:11,307 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5474, 1.2037, 1.1279, 1.3458, 1.1694, 1.1983, 0.9967, 1.2814], + device='cuda:0'), covar=tensor([0.0984, 0.1194, 0.1520, 0.0948, 0.1152, 0.0776, 0.1480, 0.0857], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0352, 0.0289, 0.0239, 0.0299, 0.0243, 0.0275, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:01:12,442 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:35,570 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:39,732 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 17:01:43,220 INFO [train.py:903] (0/4) Epoch 10, batch 2900, loss[loss=0.2243, simple_loss=0.2857, pruned_loss=0.08142, over 19793.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3104, pruned_loss=0.08316, over 3829573.24 frames. ], batch size: 49, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:01:58,206 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:58,969 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 6.015e+02 7.349e+02 1.031e+03 2.008e+03, threshold=1.470e+03, percent-clipped=12.0 +2023-04-01 17:02:15,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3721, 1.2904, 1.4322, 1.5945, 2.9133, 1.1496, 2.0592, 3.1794], + device='cuda:0'), covar=tensor([0.0418, 0.2729, 0.2734, 0.1645, 0.0751, 0.2324, 0.1275, 0.0330], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0331, 0.0345, 0.0315, 0.0339, 0.0328, 0.0321, 0.0343], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:02:26,618 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:36,405 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64393.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:46,691 INFO [train.py:903] (0/4) Epoch 10, batch 2950, loss[loss=0.2665, simple_loss=0.3404, pruned_loss=0.09631, over 19670.00 frames. ], tot_loss[loss=0.239, simple_loss=0.311, pruned_loss=0.08346, over 3833240.30 frames. ], batch size: 58, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:06,095 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.75 vs. limit=5.0 +2023-04-01 17:03:48,251 INFO [train.py:903] (0/4) Epoch 10, batch 3000, loss[loss=0.2213, simple_loss=0.3006, pruned_loss=0.07094, over 19684.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3111, pruned_loss=0.08336, over 3836714.05 frames. ], batch size: 59, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:48,252 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 17:04:00,870 INFO [train.py:937] (0/4) Epoch 10, validation: loss=0.1811, simple_loss=0.2816, pruned_loss=0.04036, over 944034.00 frames. +2023-04-01 17:04:00,871 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 17:04:02,458 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6359, 1.6512, 1.5022, 1.2761, 1.1309, 1.3138, 0.2286, 0.6043], + device='cuda:0'), covar=tensor([0.0551, 0.0554, 0.0322, 0.0484, 0.1052, 0.0605, 0.0868, 0.0889], + device='cuda:0'), in_proj_covar=tensor([0.0326, 0.0327, 0.0325, 0.0344, 0.0416, 0.0340, 0.0303, 0.0321], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 17:04:04,335 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 17:04:13,260 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-01 17:04:18,125 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 5.533e+02 6.661e+02 8.174e+02 1.809e+03, threshold=1.332e+03, percent-clipped=2.0 +2023-04-01 17:04:41,351 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64483.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:03,197 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:04,019 INFO [train.py:903] (0/4) Epoch 10, batch 3050, loss[loss=0.2013, simple_loss=0.2681, pruned_loss=0.06726, over 19737.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.311, pruned_loss=0.08335, over 3846287.81 frames. ], batch size: 46, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:05:48,629 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8133, 1.5131, 1.5203, 2.1264, 1.8139, 2.0913, 2.0458, 1.9003], + device='cuda:0'), covar=tensor([0.0791, 0.0996, 0.1024, 0.0825, 0.0845, 0.0728, 0.0854, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0253, 0.0239, 0.0216, 0.0202, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 17:05:57,723 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3974, 1.8969, 1.8467, 2.6883, 2.2213, 2.5289, 2.6477, 2.4290], + device='cuda:0'), covar=tensor([0.0691, 0.0890, 0.0948, 0.0836, 0.0868, 0.0654, 0.0812, 0.0580], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0254, 0.0240, 0.0216, 0.0202, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 17:05:57,751 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:06,591 INFO [train.py:903] (0/4) Epoch 10, batch 3100, loss[loss=0.2764, simple_loss=0.342, pruned_loss=0.1054, over 18744.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3111, pruned_loss=0.08342, over 3845952.05 frames. ], batch size: 74, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:06:22,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.961e+02 7.216e+02 8.690e+02 2.208e+03, threshold=1.443e+03, percent-clipped=3.0 +2023-04-01 17:06:27,980 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:40,849 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64578.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:01,269 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:10,478 INFO [train.py:903] (0/4) Epoch 10, batch 3150, loss[loss=0.1899, simple_loss=0.2638, pruned_loss=0.05796, over 19285.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3101, pruned_loss=0.08287, over 3848473.92 frames. ], batch size: 44, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:07:13,030 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:17,450 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:21,738 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5961, 4.0847, 4.2512, 4.2380, 1.4986, 3.9442, 3.5080, 3.9467], + device='cuda:0'), covar=tensor([0.1268, 0.0672, 0.0511, 0.0522, 0.4757, 0.0570, 0.0545, 0.0962], + device='cuda:0'), in_proj_covar=tensor([0.0645, 0.0581, 0.0768, 0.0645, 0.0713, 0.0518, 0.0476, 0.0712], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 17:07:31,625 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 17:07:39,773 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 17:08:10,662 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:12,600 INFO [train.py:903] (0/4) Epoch 10, batch 3200, loss[loss=0.2534, simple_loss=0.3329, pruned_loss=0.087, over 19673.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3098, pruned_loss=0.08243, over 3853060.87 frames. ], batch size: 60, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:08:30,168 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.483e+02 6.754e+02 8.204e+02 1.644e+03, threshold=1.351e+03, percent-clipped=2.0 +2023-04-01 17:08:32,795 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:43,793 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:16,028 INFO [train.py:903] (0/4) Epoch 10, batch 3250, loss[loss=0.1996, simple_loss=0.2743, pruned_loss=0.06249, over 19372.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3103, pruned_loss=0.08267, over 3849712.44 frames. ], batch size: 47, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:09:24,031 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64707.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:26,661 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:42,842 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:20,336 INFO [train.py:903] (0/4) Epoch 10, batch 3300, loss[loss=0.2704, simple_loss=0.3405, pruned_loss=0.1001, over 19530.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3114, pruned_loss=0.08319, over 3837856.25 frames. ], batch size: 56, lr: 8.35e-03, grad_scale: 8.0 +2023-04-01 17:10:26,800 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:28,776 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 17:10:35,495 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:37,431 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.384e+02 6.605e+02 8.281e+02 2.311e+03, threshold=1.321e+03, percent-clipped=9.0 +2023-04-01 17:10:43,924 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:57,727 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:23,426 INFO [train.py:903] (0/4) Epoch 10, batch 3350, loss[loss=0.2219, simple_loss=0.2846, pruned_loss=0.07961, over 19783.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3116, pruned_loss=0.08371, over 3844464.08 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:11:49,545 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:55,098 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:12:04,623 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6602, 1.4782, 1.3916, 1.9392, 1.5556, 1.9430, 1.8900, 1.8012], + device='cuda:0'), covar=tensor([0.0788, 0.0916, 0.1008, 0.0774, 0.0840, 0.0746, 0.0823, 0.0652], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0227, 0.0225, 0.0253, 0.0239, 0.0215, 0.0201, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 17:12:24,011 INFO [train.py:903] (0/4) Epoch 10, batch 3400, loss[loss=0.2378, simple_loss=0.3151, pruned_loss=0.08026, over 18156.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3119, pruned_loss=0.08461, over 3844819.41 frames. ], batch size: 83, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:12:40,676 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2419, 1.3211, 1.9342, 1.5002, 2.4434, 1.9916, 2.5631, 1.1686], + device='cuda:0'), covar=tensor([0.2313, 0.3923, 0.2052, 0.1836, 0.1656, 0.2157, 0.1892, 0.3680], + device='cuda:0'), in_proj_covar=tensor([0.0480, 0.0566, 0.0583, 0.0428, 0.0587, 0.0483, 0.0645, 0.0482], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 17:12:42,252 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.893e+02 5.724e+02 7.342e+02 8.665e+02 1.913e+03, threshold=1.468e+03, percent-clipped=6.0 +2023-04-01 17:12:59,015 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9574, 1.1585, 1.3505, 1.2892, 2.4548, 0.9226, 1.6991, 2.7933], + device='cuda:0'), covar=tensor([0.0633, 0.2852, 0.2901, 0.1872, 0.0968, 0.2606, 0.1529, 0.0449], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0330, 0.0344, 0.0316, 0.0341, 0.0329, 0.0322, 0.0343], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:13:27,824 INFO [train.py:903] (0/4) Epoch 10, batch 3450, loss[loss=0.2398, simple_loss=0.3193, pruned_loss=0.0802, over 19763.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3123, pruned_loss=0.08455, over 3826594.76 frames. ], batch size: 54, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:13:35,102 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 17:14:20,585 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:30,662 INFO [train.py:903] (0/4) Epoch 10, batch 3500, loss[loss=0.2759, simple_loss=0.3342, pruned_loss=0.1088, over 18091.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3124, pruned_loss=0.0847, over 3817309.52 frames. ], batch size: 83, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:14:31,778 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:48,653 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 5.616e+02 6.822e+02 8.996e+02 1.764e+03, threshold=1.364e+03, percent-clipped=1.0 +2023-04-01 17:14:49,030 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:59,244 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0778, 5.0723, 5.8934, 5.8740, 1.8904, 5.5885, 4.7529, 5.4690], + device='cuda:0'), covar=tensor([0.1267, 0.0663, 0.0521, 0.0469, 0.5194, 0.0405, 0.0499, 0.1100], + device='cuda:0'), in_proj_covar=tensor([0.0633, 0.0573, 0.0756, 0.0637, 0.0700, 0.0513, 0.0470, 0.0697], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 17:15:19,880 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:25,269 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:34,048 INFO [train.py:903] (0/4) Epoch 10, batch 3550, loss[loss=0.2443, simple_loss=0.3033, pruned_loss=0.09271, over 19742.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3121, pruned_loss=0.08436, over 3822359.35 frames. ], batch size: 46, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:15:44,361 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:34,666 INFO [train.py:903] (0/4) Epoch 10, batch 3600, loss[loss=0.2451, simple_loss=0.3326, pruned_loss=0.07878, over 19676.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3112, pruned_loss=0.08418, over 3806267.30 frames. ], batch size: 55, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:16:52,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.791e+02 6.813e+02 8.568e+02 1.743e+03, threshold=1.363e+03, percent-clipped=4.0 +2023-04-01 17:16:52,163 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:53,551 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:02,326 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:08,111 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:35,956 INFO [train.py:903] (0/4) Epoch 10, batch 3650, loss[loss=0.217, simple_loss=0.2981, pruned_loss=0.06797, over 19846.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3115, pruned_loss=0.08422, over 3789383.56 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:17:38,604 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:42,739 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:51,899 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:05,484 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:36,262 INFO [train.py:903] (0/4) Epoch 10, batch 3700, loss[loss=0.2061, simple_loss=0.2808, pruned_loss=0.06569, over 19763.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3122, pruned_loss=0.08503, over 3791401.76 frames. ], batch size: 54, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:18:53,971 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 5.629e+02 7.088e+02 8.754e+02 1.818e+03, threshold=1.418e+03, percent-clipped=5.0 +2023-04-01 17:19:02,242 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 17:19:12,177 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:33,192 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65198.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:37,055 INFO [train.py:903] (0/4) Epoch 10, batch 3750, loss[loss=0.2524, simple_loss=0.3264, pruned_loss=0.08923, over 18114.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3137, pruned_loss=0.08607, over 3792879.63 frames. ], batch size: 83, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:19:49,767 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 17:20:02,498 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:03,645 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:10,495 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:36,858 INFO [train.py:903] (0/4) Epoch 10, batch 3800, loss[loss=0.2409, simple_loss=0.3147, pruned_loss=0.08348, over 19647.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3151, pruned_loss=0.08674, over 3787142.28 frames. ], batch size: 55, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:54,008 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 5.620e+02 6.620e+02 8.005e+02 1.692e+03, threshold=1.324e+03, percent-clipped=4.0 +2023-04-01 17:21:06,349 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 17:21:20,019 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.23 vs. limit=5.0 +2023-04-01 17:21:20,174 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 17:21:37,764 INFO [train.py:903] (0/4) Epoch 10, batch 3850, loss[loss=0.2007, simple_loss=0.2747, pruned_loss=0.06337, over 15143.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3151, pruned_loss=0.08648, over 3793996.90 frames. ], batch size: 33, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:21:40,381 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:03,660 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:21,617 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:33,961 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:37,972 INFO [train.py:903] (0/4) Epoch 10, batch 3900, loss[loss=0.2139, simple_loss=0.2822, pruned_loss=0.07281, over 19749.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3142, pruned_loss=0.08576, over 3797376.85 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:22:55,833 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 5.653e+02 7.180e+02 9.093e+02 1.633e+03, threshold=1.436e+03, percent-clipped=2.0 +2023-04-01 17:23:14,896 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65381.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:32,382 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 17:23:40,380 INFO [train.py:903] (0/4) Epoch 10, batch 3950, loss[loss=0.2106, simple_loss=0.2771, pruned_loss=0.07207, over 19073.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.312, pruned_loss=0.08472, over 3796155.07 frames. ], batch size: 42, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:23:40,415 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 17:23:46,482 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:58,940 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:23,326 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:41,325 INFO [train.py:903] (0/4) Epoch 10, batch 4000, loss[loss=0.2348, simple_loss=0.3039, pruned_loss=0.0828, over 19576.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.313, pruned_loss=0.08482, over 3798300.31 frames. ], batch size: 52, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:24:44,028 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65453.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:54,635 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65461.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:58,706 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.016e+02 5.448e+02 7.265e+02 9.508e+02 1.942e+03, threshold=1.453e+03, percent-clipped=2.0 +2023-04-01 17:25:14,537 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:23,061 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 17:25:23,384 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:41,795 INFO [train.py:903] (0/4) Epoch 10, batch 4050, loss[loss=0.2789, simple_loss=0.3441, pruned_loss=0.1068, over 18099.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.313, pruned_loss=0.08485, over 3790469.53 frames. ], batch size: 83, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:25:45,297 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:53,252 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:19,256 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:40,676 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65550.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:26:41,431 INFO [train.py:903] (0/4) Epoch 10, batch 4100, loss[loss=0.2768, simple_loss=0.3472, pruned_loss=0.1032, over 18047.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3148, pruned_loss=0.08633, over 3794512.31 frames. ], batch size: 83, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:26:59,041 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.925e+02 7.032e+02 8.463e+02 2.911e+03, threshold=1.406e+03, percent-clipped=6.0 +2023-04-01 17:27:11,838 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 17:27:42,545 INFO [train.py:903] (0/4) Epoch 10, batch 4150, loss[loss=0.1897, simple_loss=0.2642, pruned_loss=0.0576, over 19734.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3127, pruned_loss=0.08518, over 3801043.28 frames. ], batch size: 46, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:39,191 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:28:43,357 INFO [train.py:903] (0/4) Epoch 10, batch 4200, loss[loss=0.2434, simple_loss=0.3007, pruned_loss=0.09303, over 19689.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3111, pruned_loss=0.08399, over 3814854.05 frames. ], batch size: 53, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:43,405 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 17:28:59,415 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.075e+02 5.851e+02 6.725e+02 9.221e+02 2.199e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-01 17:29:02,913 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.64 vs. limit=5.0 +2023-04-01 17:29:42,450 INFO [train.py:903] (0/4) Epoch 10, batch 4250, loss[loss=0.2419, simple_loss=0.3086, pruned_loss=0.08757, over 19623.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3107, pruned_loss=0.08377, over 3833753.76 frames. ], batch size: 50, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:29:54,090 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:29:55,765 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 17:30:06,600 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 17:30:22,834 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:30:43,321 INFO [train.py:903] (0/4) Epoch 10, batch 4300, loss[loss=0.2533, simple_loss=0.3332, pruned_loss=0.08664, over 19658.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3117, pruned_loss=0.08412, over 3844038.70 frames. ], batch size: 55, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:30:56,215 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:00,063 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 5.358e+02 7.223e+02 8.854e+02 2.636e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 17:31:27,986 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:35,317 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 17:31:43,143 INFO [train.py:903] (0/4) Epoch 10, batch 4350, loss[loss=0.252, simple_loss=0.3233, pruned_loss=0.09037, over 19741.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3117, pruned_loss=0.08445, over 3852377.15 frames. ], batch size: 63, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:31:58,631 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:32:44,687 INFO [train.py:903] (0/4) Epoch 10, batch 4400, loss[loss=0.2155, simple_loss=0.2801, pruned_loss=0.07541, over 19716.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3107, pruned_loss=0.08412, over 3856636.92 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:32:50,620 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65856.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:33:00,118 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.748e+02 7.554e+02 9.760e+02 1.805e+03, threshold=1.511e+03, percent-clipped=4.0 +2023-04-01 17:33:07,121 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7509, 1.7533, 1.5128, 1.3627, 1.2769, 1.4119, 0.2263, 0.6739], + device='cuda:0'), covar=tensor([0.0386, 0.0371, 0.0233, 0.0355, 0.0816, 0.0446, 0.0752, 0.0655], + device='cuda:0'), in_proj_covar=tensor([0.0326, 0.0326, 0.0325, 0.0347, 0.0418, 0.0344, 0.0303, 0.0317], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 17:33:10,962 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 17:33:16,848 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6705, 2.0910, 1.9121, 2.8325, 2.6329, 2.4177, 2.2199, 2.9493], + device='cuda:0'), covar=tensor([0.0810, 0.1736, 0.1442, 0.0930, 0.1181, 0.0416, 0.1050, 0.0465], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0352, 0.0293, 0.0240, 0.0297, 0.0244, 0.0276, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:33:19,853 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 17:33:36,071 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65894.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:33:44,051 INFO [train.py:903] (0/4) Epoch 10, batch 4450, loss[loss=0.2403, simple_loss=0.3219, pruned_loss=0.07941, over 19411.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3119, pruned_loss=0.08443, over 3850163.06 frames. ], batch size: 66, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:34:45,020 INFO [train.py:903] (0/4) Epoch 10, batch 4500, loss[loss=0.2657, simple_loss=0.3317, pruned_loss=0.09986, over 13985.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3111, pruned_loss=0.08412, over 3834668.79 frames. ], batch size: 136, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:35:01,306 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.376e+02 6.626e+02 8.325e+02 1.832e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-04-01 17:35:14,782 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7596, 4.2946, 2.4056, 3.7984, 1.0562, 4.0811, 4.0326, 4.1710], + device='cuda:0'), covar=tensor([0.0612, 0.1150, 0.2105, 0.0727, 0.3997, 0.0774, 0.0805, 0.0957], + device='cuda:0'), in_proj_covar=tensor([0.0423, 0.0362, 0.0425, 0.0316, 0.0376, 0.0355, 0.0351, 0.0381], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 17:35:43,036 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2363, 1.2957, 1.4275, 1.4102, 1.8023, 1.8282, 1.7874, 0.5818], + device='cuda:0'), covar=tensor([0.2167, 0.3733, 0.2307, 0.1742, 0.1323, 0.1961, 0.1296, 0.3567], + device='cuda:0'), in_proj_covar=tensor([0.0481, 0.0561, 0.0585, 0.0430, 0.0587, 0.0486, 0.0645, 0.0484], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 17:35:44,998 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-66000.pt +2023-04-01 17:35:47,224 INFO [train.py:903] (0/4) Epoch 10, batch 4550, loss[loss=0.2519, simple_loss=0.3288, pruned_loss=0.0875, over 19337.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3112, pruned_loss=0.08363, over 3829356.53 frames. ], batch size: 66, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:35:56,014 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 17:35:56,360 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66009.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:36:06,973 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:17,574 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 17:36:36,243 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:45,879 INFO [train.py:903] (0/4) Epoch 10, batch 4600, loss[loss=0.3007, simple_loss=0.3611, pruned_loss=0.1201, over 19602.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.311, pruned_loss=0.08393, over 3819003.65 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:37:00,874 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 5.689e+02 6.858e+02 9.462e+02 1.667e+03, threshold=1.372e+03, percent-clipped=8.0 +2023-04-01 17:37:02,832 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 17:37:43,792 INFO [train.py:903] (0/4) Epoch 10, batch 4650, loss[loss=0.2088, simple_loss=0.2813, pruned_loss=0.06811, over 19391.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.311, pruned_loss=0.08414, over 3826920.17 frames. ], batch size: 48, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:38:00,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 17:38:10,782 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 17:38:44,405 INFO [train.py:903] (0/4) Epoch 10, batch 4700, loss[loss=0.2563, simple_loss=0.3318, pruned_loss=0.09044, over 19617.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3114, pruned_loss=0.08414, over 3824352.92 frames. ], batch size: 57, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:38:57,769 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:39:00,973 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.007e+02 6.063e+02 7.892e+02 1.039e+03 2.104e+03, threshold=1.578e+03, percent-clipped=6.0 +2023-04-01 17:39:05,614 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 17:39:17,535 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66178.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:39:43,291 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66200.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:39:44,104 INFO [train.py:903] (0/4) Epoch 10, batch 4750, loss[loss=0.2657, simple_loss=0.3314, pruned_loss=0.09995, over 19479.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3121, pruned_loss=0.08475, over 3829694.80 frames. ], batch size: 64, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:40:43,453 INFO [train.py:903] (0/4) Epoch 10, batch 4800, loss[loss=0.23, simple_loss=0.3103, pruned_loss=0.07483, over 19432.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3124, pruned_loss=0.08513, over 3816029.84 frames. ], batch size: 70, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:41:01,217 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.914e+02 7.080e+02 8.206e+02 1.527e+03, threshold=1.416e+03, percent-clipped=0.0 +2023-04-01 17:41:01,646 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66265.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:41:15,548 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:41:31,322 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66290.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:41:35,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9848, 3.6564, 2.1967, 1.5390, 3.4117, 1.4042, 1.1421, 2.1607], + device='cuda:0'), covar=tensor([0.0991, 0.0316, 0.0669, 0.0793, 0.0338, 0.1049, 0.0883, 0.0494], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0296, 0.0325, 0.0241, 0.0235, 0.0318, 0.0287, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:41:44,772 INFO [train.py:903] (0/4) Epoch 10, batch 4850, loss[loss=0.1851, simple_loss=0.2639, pruned_loss=0.05312, over 19761.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3115, pruned_loss=0.0844, over 3795160.16 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:41:56,283 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 17:42:01,790 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66315.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:42:10,353 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 17:42:18,476 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2129, 1.8190, 1.9814, 2.5279, 1.9139, 2.5223, 2.5829, 2.3718], + device='cuda:0'), covar=tensor([0.0716, 0.0897, 0.0885, 0.0849, 0.0893, 0.0589, 0.0754, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0226, 0.0222, 0.0250, 0.0240, 0.0213, 0.0199, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 17:42:30,019 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 17:42:35,765 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 17:42:35,790 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 17:42:45,282 INFO [train.py:903] (0/4) Epoch 10, batch 4900, loss[loss=0.2662, simple_loss=0.3281, pruned_loss=0.1021, over 13770.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3125, pruned_loss=0.08464, over 3791815.81 frames. ], batch size: 136, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:42:45,297 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 17:43:01,825 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.269e+02 9.008e+02 2.184e+03, threshold=1.454e+03, percent-clipped=11.0 +2023-04-01 17:43:03,057 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 17:43:25,418 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1070, 1.2044, 1.6525, 1.0568, 2.4728, 3.2982, 3.0121, 3.4573], + device='cuda:0'), covar=tensor([0.1606, 0.3372, 0.2872, 0.2161, 0.0480, 0.0160, 0.0216, 0.0184], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0293, 0.0318, 0.0247, 0.0211, 0.0151, 0.0203, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 17:43:45,941 INFO [train.py:903] (0/4) Epoch 10, batch 4950, loss[loss=0.2474, simple_loss=0.3154, pruned_loss=0.08973, over 19541.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3139, pruned_loss=0.08563, over 3797298.65 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:03,262 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 17:44:26,211 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 17:44:44,945 INFO [train.py:903] (0/4) Epoch 10, batch 5000, loss[loss=0.2347, simple_loss=0.3034, pruned_loss=0.08296, over 19422.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3131, pruned_loss=0.08554, over 3803639.32 frames. ], batch size: 48, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:54,555 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8811, 2.1335, 2.3684, 2.6568, 2.6603, 2.4213, 2.3350, 2.8432], + device='cuda:0'), covar=tensor([0.0750, 0.1704, 0.1234, 0.0905, 0.1144, 0.0435, 0.1001, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0352, 0.0293, 0.0240, 0.0294, 0.0245, 0.0277, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:44:56,393 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 17:45:01,936 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.419e+02 6.464e+02 8.305e+02 1.628e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 17:45:06,368 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 17:45:45,174 INFO [train.py:903] (0/4) Epoch 10, batch 5050, loss[loss=0.2241, simple_loss=0.2981, pruned_loss=0.0751, over 19739.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3132, pruned_loss=0.08558, over 3810089.50 frames. ], batch size: 51, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:45:51,070 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:46:10,883 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66522.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:46:19,330 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 17:46:28,956 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2339, 1.4583, 1.6215, 1.8219, 2.8708, 1.3229, 2.1821, 3.1038], + device='cuda:0'), covar=tensor([0.0481, 0.2460, 0.2482, 0.1492, 0.0685, 0.2229, 0.1172, 0.0333], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0330, 0.0348, 0.0314, 0.0339, 0.0329, 0.0318, 0.0340], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:46:31,331 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66539.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:46:44,531 INFO [train.py:903] (0/4) Epoch 10, batch 5100, loss[loss=0.2099, simple_loss=0.2926, pruned_loss=0.06358, over 19841.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3137, pruned_loss=0.08624, over 3792495.76 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:46:57,222 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 17:46:59,528 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 17:47:01,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.864e+02 7.142e+02 9.030e+02 2.803e+03, threshold=1.428e+03, percent-clipped=6.0 +2023-04-01 17:47:04,008 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 17:47:08,937 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66571.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:47:39,596 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66596.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:47:45,738 INFO [train.py:903] (0/4) Epoch 10, batch 5150, loss[loss=0.2583, simple_loss=0.3271, pruned_loss=0.09473, over 19451.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3129, pruned_loss=0.08549, over 3783637.47 frames. ], batch size: 70, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:47:56,055 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2211, 2.0900, 2.0473, 2.4095, 2.2990, 1.9209, 1.9748, 2.2495], + device='cuda:0'), covar=tensor([0.0850, 0.1498, 0.1292, 0.0845, 0.1050, 0.0729, 0.1159, 0.0723], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0349, 0.0291, 0.0237, 0.0291, 0.0244, 0.0274, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:47:56,922 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 17:48:00,632 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 17:48:09,059 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:10,460 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:29,345 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66637.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:48:33,111 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 17:48:45,644 INFO [train.py:903] (0/4) Epoch 10, batch 5200, loss[loss=0.2487, simple_loss=0.323, pruned_loss=0.08721, over 18820.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3117, pruned_loss=0.08426, over 3801532.89 frames. ], batch size: 74, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:49:00,681 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 17:49:02,885 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 5.688e+02 7.032e+02 8.430e+02 1.656e+03, threshold=1.406e+03, percent-clipped=2.0 +2023-04-01 17:49:44,615 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 17:49:46,899 INFO [train.py:903] (0/4) Epoch 10, batch 5250, loss[loss=0.2165, simple_loss=0.2792, pruned_loss=0.07695, over 19803.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.313, pruned_loss=0.08511, over 3802017.36 frames. ], batch size: 48, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:50:28,104 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:50:44,976 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66749.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:50:47,100 INFO [train.py:903] (0/4) Epoch 10, batch 5300, loss[loss=0.202, simple_loss=0.2736, pruned_loss=0.06519, over 19765.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3128, pruned_loss=0.08502, over 3789788.08 frames. ], batch size: 46, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:51:03,216 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 17:51:04,350 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.985e+02 7.858e+02 1.069e+03 1.957e+03, threshold=1.572e+03, percent-clipped=7.0 +2023-04-01 17:51:47,684 INFO [train.py:903] (0/4) Epoch 10, batch 5350, loss[loss=0.2444, simple_loss=0.3225, pruned_loss=0.08316, over 19785.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3135, pruned_loss=0.0853, over 3799848.53 frames. ], batch size: 56, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:52:19,369 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 17:52:46,929 INFO [train.py:903] (0/4) Epoch 10, batch 5400, loss[loss=0.2384, simple_loss=0.3079, pruned_loss=0.08445, over 19576.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3136, pruned_loss=0.08542, over 3814149.15 frames. ], batch size: 52, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:52:52,114 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 17:53:05,690 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.633e+02 6.871e+02 8.512e+02 1.525e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-04-01 17:53:18,082 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:53:26,403 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66883.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:53:37,766 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66893.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:53:47,839 INFO [train.py:903] (0/4) Epoch 10, batch 5450, loss[loss=0.2489, simple_loss=0.322, pruned_loss=0.08789, over 19487.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3139, pruned_loss=0.08561, over 3827517.57 frames. ], batch size: 64, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:49,495 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:54:08,206 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66918.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:54:15,781 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9896, 1.6810, 1.5653, 2.0122, 1.8730, 1.7911, 1.7190, 1.8843], + device='cuda:0'), covar=tensor([0.0875, 0.1573, 0.1309, 0.0959, 0.1194, 0.0491, 0.1074, 0.0682], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0353, 0.0291, 0.0239, 0.0295, 0.0244, 0.0278, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 17:54:47,623 INFO [train.py:903] (0/4) Epoch 10, batch 5500, loss[loss=0.2181, simple_loss=0.293, pruned_loss=0.07161, over 19831.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3133, pruned_loss=0.08512, over 3832639.28 frames. ], batch size: 52, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:06,146 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.851e+02 7.428e+02 9.674e+02 2.066e+03, threshold=1.486e+03, percent-clipped=6.0 +2023-04-01 17:55:10,682 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 17:55:16,769 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:36,746 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:41,770 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 17:55:44,766 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66998.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:55:48,023 INFO [train.py:903] (0/4) Epoch 10, batch 5550, loss[loss=0.1961, simple_loss=0.2648, pruned_loss=0.06371, over 19735.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3122, pruned_loss=0.08434, over 3828887.87 frames. ], batch size: 45, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:54,309 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 17:56:05,789 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:56:42,310 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 17:56:47,795 INFO [train.py:903] (0/4) Epoch 10, batch 5600, loss[loss=0.2566, simple_loss=0.333, pruned_loss=0.09009, over 18187.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3126, pruned_loss=0.08465, over 3824495.07 frames. ], batch size: 83, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:56:52,340 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-01 17:57:06,482 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 5.918e+02 7.804e+02 1.015e+03 2.269e+03, threshold=1.561e+03, percent-clipped=7.0 +2023-04-01 17:57:38,369 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67093.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:57:47,808 INFO [train.py:903] (0/4) Epoch 10, batch 5650, loss[loss=0.2582, simple_loss=0.3315, pruned_loss=0.09243, over 19776.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3133, pruned_loss=0.08473, over 3840106.26 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:48,111 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:58:33,181 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 17:58:47,425 INFO [train.py:903] (0/4) Epoch 10, batch 5700, loss[loss=0.2116, simple_loss=0.2868, pruned_loss=0.06814, over 19614.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3138, pruned_loss=0.08582, over 3830730.23 frames. ], batch size: 50, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:05,254 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 6.145e+02 7.592e+02 1.064e+03 2.520e+03, threshold=1.518e+03, percent-clipped=7.0 +2023-04-01 17:59:06,717 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2573, 1.2828, 1.5108, 0.9746, 2.5103, 3.3396, 2.9785, 3.4704], + device='cuda:0'), covar=tensor([0.1428, 0.3225, 0.2955, 0.2145, 0.0455, 0.0157, 0.0211, 0.0196], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0290, 0.0313, 0.0245, 0.0206, 0.0150, 0.0202, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 17:59:47,165 INFO [train.py:903] (0/4) Epoch 10, batch 5750, loss[loss=0.2609, simple_loss=0.3354, pruned_loss=0.09323, over 19748.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.314, pruned_loss=0.08572, over 3816540.56 frames. ], batch size: 63, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:48,359 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 17:59:48,838 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1318, 2.1142, 2.2546, 3.1013, 2.0725, 2.9899, 2.7638, 2.1395], + device='cuda:0'), covar=tensor([0.3557, 0.3157, 0.1374, 0.1963, 0.3658, 0.1463, 0.3006, 0.2549], + device='cuda:0'), in_proj_covar=tensor([0.0773, 0.0785, 0.0639, 0.0885, 0.0762, 0.0690, 0.0773, 0.0697], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 17:59:57,179 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67208.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:59:57,974 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 18:00:00,570 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9755, 4.5450, 2.6991, 3.9194, 1.0069, 4.1883, 4.2531, 4.4046], + device='cuda:0'), covar=tensor([0.0549, 0.0932, 0.1886, 0.0755, 0.4092, 0.0649, 0.0750, 0.0908], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0358, 0.0424, 0.0311, 0.0372, 0.0352, 0.0346, 0.0377], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 18:00:02,616 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 18:00:03,008 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2861, 2.8605, 2.0499, 2.1098, 2.0278, 2.3676, 0.7260, 1.9867], + device='cuda:0'), covar=tensor([0.0407, 0.0468, 0.0502, 0.0760, 0.0754, 0.0713, 0.0967, 0.0840], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0328, 0.0323, 0.0348, 0.0416, 0.0344, 0.0302, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 18:00:47,882 INFO [train.py:903] (0/4) Epoch 10, batch 5800, loss[loss=0.2201, simple_loss=0.2932, pruned_loss=0.07353, over 19840.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3137, pruned_loss=0.08539, over 3825075.70 frames. ], batch size: 52, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 18:00:51,529 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67254.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:01:06,015 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.662e+02 5.628e+02 6.923e+02 8.923e+02 2.275e+03, threshold=1.385e+03, percent-clipped=6.0 +2023-04-01 18:01:20,781 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67279.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:01:47,312 INFO [train.py:903] (0/4) Epoch 10, batch 5850, loss[loss=0.209, simple_loss=0.2789, pruned_loss=0.06955, over 19743.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3137, pruned_loss=0.0854, over 3830643.91 frames. ], batch size: 46, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:09,366 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:02:48,435 INFO [train.py:903] (0/4) Epoch 10, batch 5900, loss[loss=0.2124, simple_loss=0.2836, pruned_loss=0.07058, over 19419.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3122, pruned_loss=0.08449, over 3835281.29 frames. ], batch size: 48, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:52,959 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 18:03:05,153 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.688e+02 6.588e+02 8.588e+02 1.646e+03, threshold=1.318e+03, percent-clipped=2.0 +2023-04-01 18:03:11,570 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 18:03:26,047 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6179, 1.2807, 1.3203, 2.0427, 1.6315, 1.8567, 2.0190, 1.7835], + device='cuda:0'), covar=tensor([0.0837, 0.1029, 0.1136, 0.0843, 0.0821, 0.0694, 0.0782, 0.0646], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0226, 0.0224, 0.0251, 0.0237, 0.0214, 0.0198, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 18:03:47,172 INFO [train.py:903] (0/4) Epoch 10, batch 5950, loss[loss=0.248, simple_loss=0.314, pruned_loss=0.09096, over 19657.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3119, pruned_loss=0.08441, over 3831613.26 frames. ], batch size: 55, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:04:26,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:38,296 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67445.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:45,815 INFO [train.py:903] (0/4) Epoch 10, batch 6000, loss[loss=0.2135, simple_loss=0.2812, pruned_loss=0.07287, over 19765.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3117, pruned_loss=0.08467, over 3820260.96 frames. ], batch size: 47, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:04:45,816 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 18:04:58,243 INFO [train.py:937] (0/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2805, pruned_loss=0.03952, over 944034.00 frames. +2023-04-01 18:04:58,244 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 18:05:08,753 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4513, 1.6076, 2.0306, 1.7338, 3.4002, 2.8255, 3.5489, 1.6622], + device='cuda:0'), covar=tensor([0.2054, 0.3472, 0.2091, 0.1554, 0.1241, 0.1572, 0.1411, 0.3167], + device='cuda:0'), in_proj_covar=tensor([0.0476, 0.0561, 0.0582, 0.0427, 0.0580, 0.0483, 0.0638, 0.0478], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 18:05:15,104 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67464.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:05:17,959 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.414e+02 6.867e+02 8.657e+02 1.897e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 18:05:22,907 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0895, 1.7936, 1.6793, 2.1376, 1.8554, 1.8195, 1.7441, 2.0489], + device='cuda:0'), covar=tensor([0.0813, 0.1484, 0.1303, 0.0837, 0.1152, 0.0481, 0.1144, 0.0619], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0357, 0.0294, 0.0241, 0.0297, 0.0246, 0.0282, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:05:44,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67489.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:05:59,559 INFO [train.py:903] (0/4) Epoch 10, batch 6050, loss[loss=0.217, simple_loss=0.2958, pruned_loss=0.06907, over 19590.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3111, pruned_loss=0.08395, over 3832977.02 frames. ], batch size: 61, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:06:35,143 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:06:59,156 INFO [train.py:903] (0/4) Epoch 10, batch 6100, loss[loss=0.1872, simple_loss=0.2694, pruned_loss=0.05251, over 19853.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3109, pruned_loss=0.08365, over 3845395.03 frames. ], batch size: 52, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:07:10,593 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:07:19,957 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.925e+02 6.739e+02 8.410e+02 2.337e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-01 18:07:59,365 INFO [train.py:903] (0/4) Epoch 10, batch 6150, loss[loss=0.2412, simple_loss=0.3086, pruned_loss=0.08692, over 19482.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3119, pruned_loss=0.08462, over 3840247.99 frames. ], batch size: 49, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:08:28,208 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 18:08:59,762 INFO [train.py:903] (0/4) Epoch 10, batch 6200, loss[loss=0.2583, simple_loss=0.3342, pruned_loss=0.09122, over 19595.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3125, pruned_loss=0.08483, over 3834406.74 frames. ], batch size: 61, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:09:02,991 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 18:09:20,095 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.059e+02 6.776e+02 9.553e+02 2.024e+03, threshold=1.355e+03, percent-clipped=7.0 +2023-04-01 18:09:46,790 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:56,453 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:59,569 INFO [train.py:903] (0/4) Epoch 10, batch 6250, loss[loss=0.2118, simple_loss=0.2961, pruned_loss=0.0638, over 19741.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.312, pruned_loss=0.08456, over 3836152.01 frames. ], batch size: 51, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:10:09,268 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 18:10:09,897 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67710.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:15,754 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67715.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:30,842 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 18:10:59,334 INFO [train.py:903] (0/4) Epoch 10, batch 6300, loss[loss=0.2118, simple_loss=0.2823, pruned_loss=0.07069, over 19744.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3113, pruned_loss=0.08388, over 3839829.09 frames. ], batch size: 46, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:11:19,322 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 5.371e+02 6.798e+02 8.698e+02 1.915e+03, threshold=1.360e+03, percent-clipped=7.0 +2023-04-01 18:11:58,679 INFO [train.py:903] (0/4) Epoch 10, batch 6350, loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06697, over 19746.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3115, pruned_loss=0.08418, over 3833161.50 frames. ], batch size: 51, lr: 8.16e-03, grad_scale: 4.0 +2023-04-01 18:12:16,854 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67816.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:47,253 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:58,567 INFO [train.py:903] (0/4) Epoch 10, batch 6400, loss[loss=0.2505, simple_loss=0.325, pruned_loss=0.08802, over 18778.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3122, pruned_loss=0.08483, over 3828975.85 frames. ], batch size: 74, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:13:18,749 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.283e+02 6.601e+02 9.298e+02 1.582e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-01 18:13:26,972 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:13:59,329 INFO [train.py:903] (0/4) Epoch 10, batch 6450, loss[loss=0.2355, simple_loss=0.3057, pruned_loss=0.0826, over 19341.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3122, pruned_loss=0.08507, over 3821701.44 frames. ], batch size: 66, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:14:42,673 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 18:14:59,317 INFO [train.py:903] (0/4) Epoch 10, batch 6500, loss[loss=0.1986, simple_loss=0.2718, pruned_loss=0.06274, over 19361.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3116, pruned_loss=0.08435, over 3817681.95 frames. ], batch size: 47, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:15:05,071 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 18:15:19,435 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.752e+02 5.758e+02 7.165e+02 9.309e+02 2.290e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 18:15:47,551 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:15:57,624 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:15:59,435 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-68000.pt +2023-04-01 18:16:01,758 INFO [train.py:903] (0/4) Epoch 10, batch 6550, loss[loss=0.2413, simple_loss=0.3116, pruned_loss=0.08555, over 19833.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3124, pruned_loss=0.08436, over 3824903.94 frames. ], batch size: 52, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:16:51,299 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:02,075 INFO [train.py:903] (0/4) Epoch 10, batch 6600, loss[loss=0.2514, simple_loss=0.3232, pruned_loss=0.0898, over 19780.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3128, pruned_loss=0.08413, over 3822535.25 frames. ], batch size: 56, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:17:05,653 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68054.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:23,334 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.670e+02 5.913e+02 7.324e+02 8.709e+02 1.479e+03, threshold=1.465e+03, percent-clipped=1.0 +2023-04-01 18:18:02,824 INFO [train.py:903] (0/4) Epoch 10, batch 6650, loss[loss=0.2715, simple_loss=0.3345, pruned_loss=0.1042, over 19685.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3133, pruned_loss=0.08511, over 3808588.66 frames. ], batch size: 58, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:18:07,728 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68105.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:19:02,662 INFO [train.py:903] (0/4) Epoch 10, batch 6700, loss[loss=0.2294, simple_loss=0.287, pruned_loss=0.08588, over 19787.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3122, pruned_loss=0.08435, over 3818069.66 frames. ], batch size: 48, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:19:09,883 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:19:22,650 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.762e+02 5.919e+02 7.224e+02 9.287e+02 2.274e+03, threshold=1.445e+03, percent-clipped=4.0 +2023-04-01 18:19:24,149 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:00,032 INFO [train.py:903] (0/4) Epoch 10, batch 6750, loss[loss=0.2128, simple_loss=0.2977, pruned_loss=0.06396, over 19702.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3117, pruned_loss=0.08434, over 3822842.65 frames. ], batch size: 59, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:20:51,822 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:56,750 INFO [train.py:903] (0/4) Epoch 10, batch 6800, loss[loss=0.2616, simple_loss=0.3351, pruned_loss=0.09405, over 19313.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.312, pruned_loss=0.08437, over 3820577.22 frames. ], batch size: 66, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:21:15,044 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.210e+02 6.150e+02 7.610e+02 9.082e+02 1.904e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 18:21:18,274 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:21:25,413 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-10.pt +2023-04-01 18:21:41,028 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 18:21:42,080 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 18:21:44,655 INFO [train.py:903] (0/4) Epoch 11, batch 0, loss[loss=0.2342, simple_loss=0.3074, pruned_loss=0.08051, over 19677.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3074, pruned_loss=0.08051, over 19677.00 frames. ], batch size: 53, lr: 7.77e-03, grad_scale: 8.0 +2023-04-01 18:21:44,656 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 18:21:56,759 INFO [train.py:937] (0/4) Epoch 11, validation: loss=0.181, simple_loss=0.2818, pruned_loss=0.04012, over 944034.00 frames. +2023-04-01 18:21:56,759 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 18:22:09,351 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 18:22:57,934 INFO [train.py:903] (0/4) Epoch 11, batch 50, loss[loss=0.2649, simple_loss=0.3319, pruned_loss=0.09889, over 19532.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3096, pruned_loss=0.08311, over 877066.71 frames. ], batch size: 54, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:23:15,230 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:23:35,577 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 18:23:46,499 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 18:23:46,895 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 5.747e+02 7.027e+02 9.557e+02 1.564e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 18:24:00,205 INFO [train.py:903] (0/4) Epoch 11, batch 100, loss[loss=0.2341, simple_loss=0.312, pruned_loss=0.07808, over 19702.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3067, pruned_loss=0.08086, over 1536018.84 frames. ], batch size: 59, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:24:13,707 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 18:24:18,543 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0745, 1.2355, 1.5982, 0.8853, 2.5214, 3.0708, 2.7522, 3.2348], + device='cuda:0'), covar=tensor([0.1506, 0.3191, 0.2795, 0.2170, 0.0417, 0.0177, 0.0215, 0.0189], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0294, 0.0319, 0.0250, 0.0211, 0.0154, 0.0203, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 18:24:42,541 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:47,923 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:57,051 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68425.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:01,043 INFO [train.py:903] (0/4) Epoch 11, batch 150, loss[loss=0.3229, simple_loss=0.3663, pruned_loss=0.1397, over 13682.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3094, pruned_loss=0.08313, over 2035474.63 frames. ], batch size: 136, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:25:11,923 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:25,283 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68449.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:25:26,556 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68450.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:36,604 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:47,788 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.281e+02 6.719e+02 8.986e+02 1.619e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-01 18:26:00,691 INFO [train.py:903] (0/4) Epoch 11, batch 200, loss[loss=0.1813, simple_loss=0.258, pruned_loss=0.05231, over 19047.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3068, pruned_loss=0.08146, over 2443258.04 frames. ], batch size: 42, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:26:02,031 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 18:27:03,595 INFO [train.py:903] (0/4) Epoch 11, batch 250, loss[loss=0.2386, simple_loss=0.3101, pruned_loss=0.08351, over 19737.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3078, pruned_loss=0.08161, over 2753531.97 frames. ], batch size: 63, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:27:46,843 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68564.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:27:51,041 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.512e+02 6.613e+02 8.406e+02 1.798e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 18:28:07,069 INFO [train.py:903] (0/4) Epoch 11, batch 300, loss[loss=0.2379, simple_loss=0.3179, pruned_loss=0.07892, over 19441.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3066, pruned_loss=0.08043, over 3004924.86 frames. ], batch size: 64, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:28:27,736 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:29:09,812 INFO [train.py:903] (0/4) Epoch 11, batch 350, loss[loss=0.2125, simple_loss=0.288, pruned_loss=0.06853, over 19568.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3085, pruned_loss=0.08117, over 3183840.95 frames. ], batch size: 52, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:29:16,655 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 18:29:44,107 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3811, 2.1364, 2.1982, 2.4253, 2.2042, 1.9563, 2.1204, 2.4327], + device='cuda:0'), covar=tensor([0.0681, 0.1275, 0.0971, 0.0705, 0.0951, 0.0455, 0.0892, 0.0475], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0353, 0.0291, 0.0239, 0.0298, 0.0247, 0.0277, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:29:57,394 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.409e+02 6.235e+02 7.842e+02 1.948e+03, threshold=1.247e+03, percent-clipped=7.0 +2023-04-01 18:30:09,871 INFO [train.py:903] (0/4) Epoch 11, batch 400, loss[loss=0.2203, simple_loss=0.2914, pruned_loss=0.07457, over 19276.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3099, pruned_loss=0.08232, over 3321243.23 frames. ], batch size: 44, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:30:40,464 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:30:54,254 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:11,175 INFO [train.py:903] (0/4) Epoch 11, batch 450, loss[loss=0.2258, simple_loss=0.3047, pruned_loss=0.07347, over 19693.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3095, pruned_loss=0.08225, over 3442150.22 frames. ], batch size: 60, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:31:25,452 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68739.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:49,567 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 18:31:50,636 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 18:31:51,642 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:59,522 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.180e+02 6.491e+02 8.751e+02 1.660e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-01 18:32:13,200 INFO [train.py:903] (0/4) Epoch 11, batch 500, loss[loss=0.2434, simple_loss=0.321, pruned_loss=0.08285, over 19472.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3095, pruned_loss=0.08181, over 3538974.82 frames. ], batch size: 64, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:33:04,547 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68820.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:33:17,983 INFO [train.py:903] (0/4) Epoch 11, batch 550, loss[loss=0.1806, simple_loss=0.2565, pruned_loss=0.05232, over 19387.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3094, pruned_loss=0.08174, over 3601617.82 frames. ], batch size: 47, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:33:36,723 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68845.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:34:06,685 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.206e+02 6.523e+02 8.559e+02 1.532e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 18:34:18,030 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:34:21,111 INFO [train.py:903] (0/4) Epoch 11, batch 600, loss[loss=0.2737, simple_loss=0.3271, pruned_loss=0.1101, over 19497.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3104, pruned_loss=0.08275, over 3642355.37 frames. ], batch size: 49, lr: 7.73e-03, grad_scale: 8.0 +2023-04-01 18:35:07,359 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 18:35:23,591 INFO [train.py:903] (0/4) Epoch 11, batch 650, loss[loss=0.2831, simple_loss=0.3482, pruned_loss=0.1091, over 19300.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3106, pruned_loss=0.08274, over 3676877.04 frames. ], batch size: 66, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:35:36,868 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:36:11,511 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0346, 1.9305, 2.0229, 1.7700, 4.4383, 1.1383, 2.5046, 4.7869], + device='cuda:0'), covar=tensor([0.0343, 0.2428, 0.2384, 0.1685, 0.0774, 0.2584, 0.1258, 0.0197], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0336, 0.0351, 0.0318, 0.0346, 0.0332, 0.0330, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:36:14,702 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 5.561e+02 7.157e+02 8.945e+02 2.089e+03, threshold=1.431e+03, percent-clipped=8.0 +2023-04-01 18:36:26,502 INFO [train.py:903] (0/4) Epoch 11, batch 700, loss[loss=0.2256, simple_loss=0.2988, pruned_loss=0.07622, over 19487.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3098, pruned_loss=0.0818, over 3706788.90 frames. ], batch size: 49, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:36:41,605 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.36 vs. limit=5.0 +2023-04-01 18:37:30,468 INFO [train.py:903] (0/4) Epoch 11, batch 750, loss[loss=0.2195, simple_loss=0.3089, pruned_loss=0.06508, over 19652.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3103, pruned_loss=0.08233, over 3730359.02 frames. ], batch size: 58, lr: 7.72e-03, grad_scale: 4.0 +2023-04-01 18:37:44,838 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3750, 2.2121, 1.8726, 1.7428, 1.4741, 1.7163, 0.5555, 1.2502], + device='cuda:0'), covar=tensor([0.0396, 0.0443, 0.0379, 0.0617, 0.0923, 0.0759, 0.0905, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0333, 0.0329, 0.0330, 0.0352, 0.0425, 0.0349, 0.0309, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 18:37:52,518 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69047.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:01,875 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:09,149 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:20,856 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.289e+02 6.423e+02 8.063e+02 1.861e+03, threshold=1.285e+03, percent-clipped=2.0 +2023-04-01 18:38:33,469 INFO [train.py:903] (0/4) Epoch 11, batch 800, loss[loss=0.2172, simple_loss=0.302, pruned_loss=0.06618, over 19307.00 frames. ], tot_loss[loss=0.238, simple_loss=0.311, pruned_loss=0.08253, over 3739259.52 frames. ], batch size: 66, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:38:49,975 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 18:39:34,238 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6084, 4.1606, 2.6826, 3.7216, 1.1418, 3.8965, 3.9266, 3.9913], + device='cuda:0'), covar=tensor([0.0578, 0.1034, 0.1909, 0.0759, 0.3593, 0.0808, 0.0727, 0.0946], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0362, 0.0429, 0.0315, 0.0373, 0.0361, 0.0349, 0.0382], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 18:39:35,198 INFO [train.py:903] (0/4) Epoch 11, batch 850, loss[loss=0.1842, simple_loss=0.259, pruned_loss=0.05467, over 19329.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3098, pruned_loss=0.08169, over 3773559.54 frames. ], batch size: 44, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:39:39,307 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:39:41,085 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 18:40:04,976 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7958, 1.8267, 2.0037, 2.5302, 1.6301, 2.3238, 2.2853, 1.9172], + device='cuda:0'), covar=tensor([0.3388, 0.2936, 0.1486, 0.1488, 0.3096, 0.1444, 0.3338, 0.2641], + device='cuda:0'), in_proj_covar=tensor([0.0779, 0.0795, 0.0641, 0.0886, 0.0771, 0.0698, 0.0780, 0.0704], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 18:40:09,383 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3565, 3.6853, 3.9870, 4.1158, 1.5380, 3.8280, 3.3916, 3.3984], + device='cuda:0'), covar=tensor([0.1775, 0.1489, 0.0940, 0.0975, 0.6325, 0.1159, 0.1064, 0.1876], + device='cuda:0'), in_proj_covar=tensor([0.0651, 0.0589, 0.0772, 0.0660, 0.0711, 0.0531, 0.0476, 0.0716], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 18:40:12,541 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:18,364 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:26,036 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.808e+02 7.359e+02 9.451e+02 2.011e+03, threshold=1.472e+03, percent-clipped=12.0 +2023-04-01 18:40:32,104 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 18:40:38,098 INFO [train.py:903] (0/4) Epoch 11, batch 900, loss[loss=0.2463, simple_loss=0.324, pruned_loss=0.0843, over 19542.00 frames. ], tot_loss[loss=0.237, simple_loss=0.31, pruned_loss=0.08204, over 3790686.06 frames. ], batch size: 56, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:40:39,576 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:47,291 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:41:28,531 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 18:41:42,669 INFO [train.py:903] (0/4) Epoch 11, batch 950, loss[loss=0.205, simple_loss=0.2799, pruned_loss=0.06501, over 19612.00 frames. ], tot_loss[loss=0.237, simple_loss=0.31, pruned_loss=0.08197, over 3793919.32 frames. ], batch size: 50, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:41:49,486 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 18:42:01,254 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:42:11,228 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-01 18:42:24,823 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69263.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:42:33,741 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.204e+02 6.315e+02 7.601e+02 2.315e+03, threshold=1.263e+03, percent-clipped=1.0 +2023-04-01 18:42:47,223 INFO [train.py:903] (0/4) Epoch 11, batch 1000, loss[loss=0.2165, simple_loss=0.3016, pruned_loss=0.06568, over 19521.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3106, pruned_loss=0.08205, over 3786082.25 frames. ], batch size: 54, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:01,237 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:27,058 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:43,508 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 18:43:48,898 INFO [train.py:903] (0/4) Epoch 11, batch 1050, loss[loss=0.3022, simple_loss=0.3516, pruned_loss=0.1263, over 13095.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3101, pruned_loss=0.082, over 3795293.17 frames. ], batch size: 136, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:56,987 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,148 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7995, 4.3295, 2.5265, 3.8105, 1.3100, 4.1024, 4.0904, 4.1225], + device='cuda:0'), covar=tensor([0.0508, 0.0866, 0.2003, 0.0788, 0.3561, 0.0735, 0.0731, 0.1015], + device='cuda:0'), in_proj_covar=tensor([0.0423, 0.0361, 0.0428, 0.0312, 0.0375, 0.0361, 0.0350, 0.0383], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 18:43:59,236 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,354 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1218, 2.1020, 1.7117, 1.5180, 1.3729, 1.6111, 0.4847, 1.0775], + device='cuda:0'), covar=tensor([0.0669, 0.0589, 0.0475, 0.0842, 0.1215, 0.0877, 0.1084, 0.0973], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0329, 0.0331, 0.0352, 0.0425, 0.0348, 0.0309, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 18:44:02,804 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8846, 1.3309, 1.0404, 1.0085, 1.1376, 0.9687, 0.9725, 1.2357], + device='cuda:0'), covar=tensor([0.0555, 0.0699, 0.1007, 0.0545, 0.0490, 0.1071, 0.0500, 0.0400], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0301, 0.0328, 0.0246, 0.0237, 0.0318, 0.0289, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:44:24,559 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 18:44:38,148 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.222e+02 6.475e+02 7.672e+02 1.315e+03, threshold=1.295e+03, percent-clipped=1.0 +2023-04-01 18:44:49,372 INFO [train.py:903] (0/4) Epoch 11, batch 1100, loss[loss=0.1938, simple_loss=0.2663, pruned_loss=0.06066, over 19753.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3107, pruned_loss=0.08248, over 3810337.12 frames. ], batch size: 46, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:44:57,866 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:24,080 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69405.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:32,705 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:39,566 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:53,092 INFO [train.py:903] (0/4) Epoch 11, batch 1150, loss[loss=0.2348, simple_loss=0.3137, pruned_loss=0.07795, over 19763.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3103, pruned_loss=0.08197, over 3829273.20 frames. ], batch size: 56, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:45:59,884 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 18:46:11,223 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:46:41,982 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.885e+02 7.181e+02 8.495e+02 1.651e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-01 18:46:55,768 INFO [train.py:903] (0/4) Epoch 11, batch 1200, loss[loss=0.2735, simple_loss=0.3391, pruned_loss=0.1039, over 18181.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3125, pruned_loss=0.08322, over 3827823.81 frames. ], batch size: 83, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:46:59,269 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5941, 1.3955, 1.3766, 2.1610, 1.5980, 2.0209, 2.1531, 1.8298], + device='cuda:0'), covar=tensor([0.0740, 0.0910, 0.0972, 0.0720, 0.0838, 0.0604, 0.0710, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0227, 0.0224, 0.0250, 0.0238, 0.0213, 0.0199, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 18:47:27,273 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 18:47:48,958 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:53,380 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:59,006 INFO [train.py:903] (0/4) Epoch 11, batch 1250, loss[loss=0.2233, simple_loss=0.2892, pruned_loss=0.07868, over 16944.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3134, pruned_loss=0.08401, over 3800305.82 frames. ], batch size: 37, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:48:00,168 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:48:49,516 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.575e+02 6.899e+02 8.428e+02 1.860e+03, threshold=1.380e+03, percent-clipped=3.0 +2023-04-01 18:49:00,828 INFO [train.py:903] (0/4) Epoch 11, batch 1300, loss[loss=0.2362, simple_loss=0.3077, pruned_loss=0.08234, over 19770.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3127, pruned_loss=0.08396, over 3807082.15 frames. ], batch size: 54, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:49:10,187 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:49:38,567 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69607.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:49:39,102 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 18:50:04,325 INFO [train.py:903] (0/4) Epoch 11, batch 1350, loss[loss=0.2316, simple_loss=0.3166, pruned_loss=0.07325, over 19712.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3117, pruned_loss=0.08328, over 3815692.28 frames. ], batch size: 59, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:50:13,495 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:19,156 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:26,756 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:54,481 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.709e+02 6.895e+02 8.476e+02 1.895e+03, threshold=1.379e+03, percent-clipped=5.0 +2023-04-01 18:51:08,228 INFO [train.py:903] (0/4) Epoch 11, batch 1400, loss[loss=0.2764, simple_loss=0.3386, pruned_loss=0.1071, over 13037.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3116, pruned_loss=0.08368, over 3809371.32 frames. ], batch size: 136, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:51:11,985 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69682.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:51:14,172 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0973, 1.3139, 1.5546, 1.5418, 2.6632, 1.0152, 2.0826, 2.8808], + device='cuda:0'), covar=tensor([0.0536, 0.2597, 0.2416, 0.1506, 0.0731, 0.2337, 0.1094, 0.0400], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0334, 0.0346, 0.0314, 0.0343, 0.0330, 0.0327, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:51:25,008 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69692.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:51:36,450 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69702.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:03,526 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69722.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:52:09,980 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 18:52:10,949 INFO [train.py:903] (0/4) Epoch 11, batch 1450, loss[loss=0.3296, simple_loss=0.3803, pruned_loss=0.1394, over 19490.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3128, pruned_loss=0.08446, over 3799801.73 frames. ], batch size: 64, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:52:12,374 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:26,628 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2578, 1.2054, 1.6866, 1.0850, 2.5952, 3.6047, 3.2110, 3.7391], + device='cuda:0'), covar=tensor([0.1513, 0.3540, 0.2975, 0.2120, 0.0527, 0.0130, 0.0211, 0.0185], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0297, 0.0321, 0.0250, 0.0215, 0.0154, 0.0203, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 18:52:36,731 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:44,780 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:49,242 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4083, 1.3674, 1.4188, 1.5122, 2.9796, 1.0478, 2.1663, 3.2595], + device='cuda:0'), covar=tensor([0.0452, 0.2302, 0.2395, 0.1521, 0.0638, 0.2258, 0.1109, 0.0293], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0330, 0.0344, 0.0312, 0.0340, 0.0328, 0.0326, 0.0345], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:53:01,499 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.344e+02 6.531e+02 8.357e+02 2.062e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 18:53:10,356 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:13,432 INFO [train.py:903] (0/4) Epoch 11, batch 1500, loss[loss=0.29, simple_loss=0.3444, pruned_loss=0.1178, over 12945.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3135, pruned_loss=0.08499, over 3804981.38 frames. ], batch size: 136, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:53:36,789 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:43,086 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:54:15,941 INFO [train.py:903] (0/4) Epoch 11, batch 1550, loss[loss=0.214, simple_loss=0.2975, pruned_loss=0.06523, over 19513.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3121, pruned_loss=0.08406, over 3822346.66 frames. ], batch size: 54, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:54:34,319 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 18:54:35,129 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5729, 1.2471, 1.1608, 1.4011, 1.1578, 1.3646, 1.0892, 1.3813], + device='cuda:0'), covar=tensor([0.0969, 0.1178, 0.1503, 0.0965, 0.1180, 0.0581, 0.1385, 0.0791], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0350, 0.0292, 0.0241, 0.0300, 0.0244, 0.0278, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:54:38,372 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:07,629 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 5.814e+02 6.999e+02 8.951e+02 2.972e+03, threshold=1.400e+03, percent-clipped=5.0 +2023-04-01 18:55:09,075 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:20,441 INFO [train.py:903] (0/4) Epoch 11, batch 1600, loss[loss=0.2578, simple_loss=0.3313, pruned_loss=0.09214, over 19544.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3109, pruned_loss=0.08331, over 3825416.30 frames. ], batch size: 56, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:55:36,919 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 18:55:41,192 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:46,717 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 18:55:48,135 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:07,298 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6816, 1.7540, 1.8542, 1.8139, 4.1603, 0.9710, 2.5556, 4.3768], + device='cuda:0'), covar=tensor([0.0316, 0.2459, 0.2407, 0.1629, 0.0669, 0.2669, 0.1255, 0.0211], + device='cuda:0'), in_proj_covar=tensor([0.0350, 0.0335, 0.0348, 0.0316, 0.0344, 0.0331, 0.0329, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 18:56:12,820 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69920.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:20,575 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:23,826 INFO [train.py:903] (0/4) Epoch 11, batch 1650, loss[loss=0.19, simple_loss=0.263, pruned_loss=0.05855, over 18603.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3108, pruned_loss=0.08355, over 3815255.18 frames. ], batch size: 41, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:56:33,402 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9928, 1.9486, 1.7028, 1.5508, 1.4203, 1.5642, 0.2576, 0.8861], + device='cuda:0'), covar=tensor([0.0434, 0.0409, 0.0290, 0.0426, 0.0861, 0.0541, 0.0858, 0.0714], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0325, 0.0326, 0.0349, 0.0422, 0.0346, 0.0306, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 18:57:00,739 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69958.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:16,658 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 5.294e+02 6.504e+02 8.314e+02 1.576e+03, threshold=1.301e+03, percent-clipped=2.0 +2023-04-01 18:57:26,314 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69978.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:57:27,016 INFO [train.py:903] (0/4) Epoch 11, batch 1700, loss[loss=0.2672, simple_loss=0.3412, pruned_loss=0.09661, over 18883.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3107, pruned_loss=0.08339, over 3803005.75 frames. ], batch size: 74, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:57:32,055 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:53,386 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-70000.pt +2023-04-01 18:57:59,049 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70003.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:58:03,148 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:09,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 18:58:29,317 INFO [train.py:903] (0/4) Epoch 11, batch 1750, loss[loss=0.2655, simple_loss=0.3309, pruned_loss=0.1001, over 17204.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3116, pruned_loss=0.08385, over 3795844.67 frames. ], batch size: 101, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:58:31,967 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:39,008 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70036.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:59:02,476 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70053.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:22,247 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 6.480e+02 8.127e+02 9.904e+02 1.581e+03, threshold=1.625e+03, percent-clipped=5.0 +2023-04-01 18:59:34,154 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:34,848 INFO [train.py:903] (0/4) Epoch 11, batch 1800, loss[loss=0.2867, simple_loss=0.3618, pruned_loss=0.1058, over 19674.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3124, pruned_loss=0.08388, over 3784349.41 frames. ], batch size: 58, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 19:00:02,644 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:34,759 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:35,503 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 19:00:35,892 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:37,763 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:38,699 INFO [train.py:903] (0/4) Epoch 11, batch 1850, loss[loss=0.2138, simple_loss=0.2776, pruned_loss=0.07503, over 14845.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3109, pruned_loss=0.08288, over 3791802.72 frames. ], batch size: 32, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:05,157 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70151.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 19:01:06,365 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:01:13,686 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 19:01:30,923 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.171e+02 6.551e+02 7.968e+02 1.780e+03, threshold=1.310e+03, percent-clipped=1.0 +2023-04-01 19:01:41,528 INFO [train.py:903] (0/4) Epoch 11, batch 1900, loss[loss=0.2606, simple_loss=0.339, pruned_loss=0.09113, over 18257.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3109, pruned_loss=0.08311, over 3798416.88 frames. ], batch size: 83, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:57,132 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 19:02:04,772 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 19:02:28,870 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 19:02:42,955 INFO [train.py:903] (0/4) Epoch 11, batch 1950, loss[loss=0.2482, simple_loss=0.3219, pruned_loss=0.0872, over 19505.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3107, pruned_loss=0.0832, over 3795215.14 frames. ], batch size: 64, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:03:35,375 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.987e+02 6.114e+02 7.804e+02 3.131e+03, threshold=1.223e+03, percent-clipped=9.0 +2023-04-01 19:03:45,733 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:03:46,666 INFO [train.py:903] (0/4) Epoch 11, batch 2000, loss[loss=0.2808, simple_loss=0.3434, pruned_loss=0.109, over 18865.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3099, pruned_loss=0.08233, over 3806901.94 frames. ], batch size: 74, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:04:04,122 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4326, 3.9023, 4.1187, 4.1143, 1.7369, 3.8706, 3.3944, 3.7677], + device='cuda:0'), covar=tensor([0.1434, 0.1038, 0.0634, 0.0637, 0.4933, 0.0837, 0.0646, 0.1196], + device='cuda:0'), in_proj_covar=tensor([0.0665, 0.0598, 0.0789, 0.0669, 0.0720, 0.0544, 0.0480, 0.0723], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 19:04:48,761 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 19:04:51,059 INFO [train.py:903] (0/4) Epoch 11, batch 2050, loss[loss=0.2197, simple_loss=0.2884, pruned_loss=0.07549, over 19015.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.309, pruned_loss=0.0817, over 3799368.35 frames. ], batch size: 42, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:05:07,040 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 19:05:08,140 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 19:05:27,817 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 19:05:43,665 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.481e+02 7.126e+02 9.531e+02 2.002e+03, threshold=1.425e+03, percent-clipped=10.0 +2023-04-01 19:05:54,412 INFO [train.py:903] (0/4) Epoch 11, batch 2100, loss[loss=0.2373, simple_loss=0.3196, pruned_loss=0.07754, over 19538.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3101, pruned_loss=0.08251, over 3801319.42 frames. ], batch size: 56, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:06:24,749 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 19:06:30,846 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70407.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 19:06:48,478 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 19:06:57,573 INFO [train.py:903] (0/4) Epoch 11, batch 2150, loss[loss=0.2511, simple_loss=0.3243, pruned_loss=0.08896, over 17451.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3104, pruned_loss=0.08294, over 3799387.50 frames. ], batch size: 101, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:07:01,541 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70432.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 19:07:45,868 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1894, 1.3523, 1.7033, 1.4104, 2.6452, 2.0860, 2.8203, 0.9745], + device='cuda:0'), covar=tensor([0.2198, 0.3576, 0.2165, 0.1752, 0.1362, 0.1886, 0.1381, 0.3638], + device='cuda:0'), in_proj_covar=tensor([0.0481, 0.0569, 0.0594, 0.0433, 0.0590, 0.0487, 0.0645, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 19:07:51,365 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.544e+02 5.585e+02 6.733e+02 8.759e+02 1.859e+03, threshold=1.347e+03, percent-clipped=4.0 +2023-04-01 19:07:52,797 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:08:02,117 INFO [train.py:903] (0/4) Epoch 11, batch 2200, loss[loss=0.2338, simple_loss=0.2982, pruned_loss=0.08467, over 19751.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3083, pruned_loss=0.08184, over 3812804.25 frames. ], batch size: 47, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:09:06,392 INFO [train.py:903] (0/4) Epoch 11, batch 2250, loss[loss=0.2286, simple_loss=0.3009, pruned_loss=0.07818, over 19761.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3077, pruned_loss=0.08163, over 3821556.01 frames. ], batch size: 54, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:00,993 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.990e+02 6.839e+02 8.349e+02 1.575e+03, threshold=1.368e+03, percent-clipped=2.0 +2023-04-01 19:10:10,414 INFO [train.py:903] (0/4) Epoch 11, batch 2300, loss[loss=0.195, simple_loss=0.2728, pruned_loss=0.0586, over 19394.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3073, pruned_loss=0.08117, over 3820700.19 frames. ], batch size: 48, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:19,765 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:10:23,155 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 19:11:05,003 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:11:13,192 INFO [train.py:903] (0/4) Epoch 11, batch 2350, loss[loss=0.1924, simple_loss=0.2684, pruned_loss=0.05824, over 19771.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3072, pruned_loss=0.08096, over 3811411.25 frames. ], batch size: 46, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:11:56,886 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 19:12:06,973 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.270e+02 6.524e+02 8.351e+02 2.247e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 19:12:12,603 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 19:12:16,798 INFO [train.py:903] (0/4) Epoch 11, batch 2400, loss[loss=0.2515, simple_loss=0.3184, pruned_loss=0.09233, over 19739.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3075, pruned_loss=0.0809, over 3814616.95 frames. ], batch size: 63, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:13:20,230 INFO [train.py:903] (0/4) Epoch 11, batch 2450, loss[loss=0.2457, simple_loss=0.3193, pruned_loss=0.08607, over 19736.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3071, pruned_loss=0.08068, over 3815846.91 frames. ], batch size: 63, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:13:32,391 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:13:52,888 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3880, 1.3846, 1.7407, 1.5107, 2.5925, 2.2734, 2.7666, 1.1037], + device='cuda:0'), covar=tensor([0.2160, 0.3887, 0.2325, 0.1776, 0.1357, 0.1780, 0.1336, 0.3769], + device='cuda:0'), in_proj_covar=tensor([0.0485, 0.0574, 0.0599, 0.0437, 0.0594, 0.0490, 0.0649, 0.0491], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 19:14:09,923 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4415, 2.4430, 1.7285, 1.6958, 2.2597, 1.2803, 1.2103, 1.8931], + device='cuda:0'), covar=tensor([0.0970, 0.0567, 0.0971, 0.0668, 0.0445, 0.1142, 0.0802, 0.0442], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0297, 0.0324, 0.0242, 0.0232, 0.0317, 0.0286, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 19:14:14,278 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.717e+02 6.752e+02 8.721e+02 4.215e+03, threshold=1.350e+03, percent-clipped=8.0 +2023-04-01 19:14:25,709 INFO [train.py:903] (0/4) Epoch 11, batch 2500, loss[loss=0.1948, simple_loss=0.2689, pruned_loss=0.06032, over 19323.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3078, pruned_loss=0.08065, over 3820274.35 frames. ], batch size: 44, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:27,558 INFO [train.py:903] (0/4) Epoch 11, batch 2550, loss[loss=0.2455, simple_loss=0.322, pruned_loss=0.08445, over 19338.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3083, pruned_loss=0.08063, over 3832046.08 frames. ], batch size: 66, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:45,656 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:15:52,562 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7538, 3.1917, 3.2474, 3.3000, 1.2478, 3.1392, 2.7568, 2.9850], + device='cuda:0'), covar=tensor([0.1596, 0.0954, 0.0818, 0.0859, 0.5031, 0.0822, 0.0806, 0.1415], + device='cuda:0'), in_proj_covar=tensor([0.0665, 0.0597, 0.0793, 0.0676, 0.0723, 0.0544, 0.0482, 0.0733], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 19:15:58,216 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7320, 1.4808, 1.4038, 2.1340, 1.6420, 2.1842, 2.0932, 1.9058], + device='cuda:0'), covar=tensor([0.0808, 0.0932, 0.1025, 0.0859, 0.0873, 0.0600, 0.0867, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0225, 0.0224, 0.0250, 0.0237, 0.0212, 0.0198, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 19:16:17,501 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:20,507 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.586e+02 5.428e+02 6.511e+02 7.969e+02 1.423e+03, threshold=1.302e+03, percent-clipped=4.0 +2023-04-01 19:16:26,507 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 19:16:30,145 INFO [train.py:903] (0/4) Epoch 11, batch 2600, loss[loss=0.2533, simple_loss=0.3272, pruned_loss=0.08967, over 19302.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3088, pruned_loss=0.08087, over 3830942.06 frames. ], batch size: 66, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:10,817 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2383, 5.5236, 3.0263, 4.9437, 1.1914, 5.5522, 5.5476, 5.7256], + device='cuda:0'), covar=tensor([0.0380, 0.0859, 0.1744, 0.0627, 0.3788, 0.0492, 0.0599, 0.0752], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0367, 0.0438, 0.0321, 0.0381, 0.0369, 0.0360, 0.0393], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 19:17:34,833 INFO [train.py:903] (0/4) Epoch 11, batch 2650, loss[loss=0.1905, simple_loss=0.2649, pruned_loss=0.05803, over 19763.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3089, pruned_loss=0.08098, over 3829612.94 frames. ], batch size: 48, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:41,049 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8832, 1.9345, 2.0737, 2.8319, 1.9028, 2.5083, 2.3836, 1.9428], + device='cuda:0'), covar=tensor([0.3451, 0.2970, 0.1495, 0.1693, 0.3293, 0.1504, 0.3306, 0.2609], + device='cuda:0'), in_proj_covar=tensor([0.0786, 0.0800, 0.0646, 0.0889, 0.0773, 0.0694, 0.0781, 0.0706], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 19:17:56,635 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 19:18:28,056 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 5.322e+02 6.264e+02 9.085e+02 2.401e+03, threshold=1.253e+03, percent-clipped=10.0 +2023-04-01 19:18:39,372 INFO [train.py:903] (0/4) Epoch 11, batch 2700, loss[loss=0.2392, simple_loss=0.3167, pruned_loss=0.08085, over 19635.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.31, pruned_loss=0.08223, over 3804402.00 frames. ], batch size: 57, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:18:56,213 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70993.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:18,212 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:30,050 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:43,440 INFO [train.py:903] (0/4) Epoch 11, batch 2750, loss[loss=0.1725, simple_loss=0.2474, pruned_loss=0.04882, over 19719.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3084, pruned_loss=0.08123, over 3805440.20 frames. ], batch size: 46, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:20:08,207 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:20:15,976 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 19:20:37,001 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.301e+02 6.565e+02 7.951e+02 1.458e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 19:20:45,882 INFO [train.py:903] (0/4) Epoch 11, batch 2800, loss[loss=0.2436, simple_loss=0.3203, pruned_loss=0.08338, over 19759.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3095, pruned_loss=0.08225, over 3807531.24 frames. ], batch size: 63, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:21:51,269 INFO [train.py:903] (0/4) Epoch 11, batch 2850, loss[loss=0.1941, simple_loss=0.2703, pruned_loss=0.05894, over 19753.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3078, pruned_loss=0.08106, over 3815455.96 frames. ], batch size: 48, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:22:45,075 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.466e+02 6.713e+02 8.640e+02 2.236e+03, threshold=1.343e+03, percent-clipped=7.0 +2023-04-01 19:22:54,319 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 19:22:55,475 INFO [train.py:903] (0/4) Epoch 11, batch 2900, loss[loss=0.276, simple_loss=0.3412, pruned_loss=0.1054, over 19454.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3078, pruned_loss=0.08119, over 3804644.79 frames. ], batch size: 64, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:24:00,138 INFO [train.py:903] (0/4) Epoch 11, batch 2950, loss[loss=0.2977, simple_loss=0.3549, pruned_loss=0.1202, over 19297.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3083, pruned_loss=0.08164, over 3786424.77 frames. ], batch size: 66, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:24:53,525 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.471e+02 6.960e+02 8.532e+02 1.699e+03, threshold=1.392e+03, percent-clipped=7.0 +2023-04-01 19:25:02,835 INFO [train.py:903] (0/4) Epoch 11, batch 3000, loss[loss=0.2348, simple_loss=0.3236, pruned_loss=0.07301, over 18844.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3084, pruned_loss=0.08098, over 3809299.87 frames. ], batch size: 74, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:25:02,836 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 19:25:16,083 INFO [train.py:937] (0/4) Epoch 11, validation: loss=0.1785, simple_loss=0.2793, pruned_loss=0.0389, over 944034.00 frames. +2023-04-01 19:25:16,084 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 19:25:20,543 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 19:26:20,108 INFO [train.py:903] (0/4) Epoch 11, batch 3050, loss[loss=0.2588, simple_loss=0.3296, pruned_loss=0.09402, over 18857.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3094, pruned_loss=0.08189, over 3792220.42 frames. ], batch size: 74, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:26:41,478 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 19:26:51,662 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:27:13,585 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.484e+02 7.005e+02 9.170e+02 2.820e+03, threshold=1.401e+03, percent-clipped=8.0 +2023-04-01 19:27:23,002 INFO [train.py:903] (0/4) Epoch 11, batch 3100, loss[loss=0.2289, simple_loss=0.3061, pruned_loss=0.07583, over 19523.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3093, pruned_loss=0.08222, over 3786807.37 frames. ], batch size: 54, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:27:36,327 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-01 19:27:39,073 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:28:24,248 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6040, 1.3046, 1.4412, 1.7788, 1.4506, 1.7853, 1.7835, 1.6444], + device='cuda:0'), covar=tensor([0.0742, 0.0977, 0.0961, 0.0684, 0.0794, 0.0710, 0.0837, 0.0667], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0226, 0.0223, 0.0250, 0.0236, 0.0214, 0.0196, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 19:28:26,265 INFO [train.py:903] (0/4) Epoch 11, batch 3150, loss[loss=0.2061, simple_loss=0.2884, pruned_loss=0.0619, over 19595.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3088, pruned_loss=0.08214, over 3804644.39 frames. ], batch size: 52, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:28:55,931 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 19:29:17,613 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:29:19,605 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.450e+02 6.508e+02 8.826e+02 1.534e+03, threshold=1.302e+03, percent-clipped=1.0 +2023-04-01 19:29:31,367 INFO [train.py:903] (0/4) Epoch 11, batch 3200, loss[loss=0.1904, simple_loss=0.2591, pruned_loss=0.06081, over 19348.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3064, pruned_loss=0.08048, over 3814495.90 frames. ], batch size: 47, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:30:03,841 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7951, 1.5738, 1.4220, 1.8852, 1.6238, 1.5619, 1.5075, 1.7856], + device='cuda:0'), covar=tensor([0.0938, 0.1464, 0.1428, 0.0866, 0.1212, 0.0546, 0.1178, 0.0691], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0348, 0.0288, 0.0236, 0.0296, 0.0244, 0.0277, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 19:30:05,881 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:30:32,439 INFO [train.py:903] (0/4) Epoch 11, batch 3250, loss[loss=0.2786, simple_loss=0.3496, pruned_loss=0.1038, over 18176.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3084, pruned_loss=0.08172, over 3822689.22 frames. ], batch size: 83, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:31:26,706 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.701e+02 5.157e+02 6.415e+02 8.641e+02 1.397e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-04-01 19:31:36,061 INFO [train.py:903] (0/4) Epoch 11, batch 3300, loss[loss=0.228, simple_loss=0.3081, pruned_loss=0.07391, over 18105.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3072, pruned_loss=0.08115, over 3820258.81 frames. ], batch size: 83, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:31:41,739 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 19:32:11,536 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-01 19:32:39,941 INFO [train.py:903] (0/4) Epoch 11, batch 3350, loss[loss=0.2241, simple_loss=0.3046, pruned_loss=0.07176, over 18771.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3081, pruned_loss=0.08152, over 3815552.86 frames. ], batch size: 74, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:32:46,619 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 19:32:47,351 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:33:34,249 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.202e+02 6.815e+02 8.173e+02 2.322e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 19:33:44,450 INFO [train.py:903] (0/4) Epoch 11, batch 3400, loss[loss=0.2706, simple_loss=0.3388, pruned_loss=0.1012, over 19581.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3085, pruned_loss=0.08154, over 3815470.65 frames. ], batch size: 52, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:44,104 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71725.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:34:48,499 INFO [train.py:903] (0/4) Epoch 11, batch 3450, loss[loss=0.2267, simple_loss=0.3073, pruned_loss=0.07301, over 18107.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3085, pruned_loss=0.08132, over 3817720.32 frames. ], batch size: 83, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:52,170 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 19:35:15,992 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:33,028 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:42,112 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.649e+02 6.835e+02 8.667e+02 1.565e+03, threshold=1.367e+03, percent-clipped=4.0 +2023-04-01 19:35:47,962 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7978, 4.3742, 2.8415, 3.9104, 1.1678, 4.0918, 4.1440, 4.2809], + device='cuda:0'), covar=tensor([0.0689, 0.1164, 0.1842, 0.0713, 0.3984, 0.0742, 0.0791, 0.1098], + device='cuda:0'), in_proj_covar=tensor([0.0424, 0.0360, 0.0431, 0.0313, 0.0373, 0.0365, 0.0350, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 19:35:52,286 INFO [train.py:903] (0/4) Epoch 11, batch 3500, loss[loss=0.2405, simple_loss=0.3158, pruned_loss=0.08257, over 19665.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3092, pruned_loss=0.0815, over 3826415.29 frames. ], batch size: 60, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:36:04,486 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:36:30,082 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5820, 1.5792, 1.6665, 1.6923, 3.1151, 1.1097, 2.4372, 3.5867], + device='cuda:0'), covar=tensor([0.0448, 0.2298, 0.2409, 0.1541, 0.0688, 0.2428, 0.1160, 0.0239], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0332, 0.0349, 0.0315, 0.0343, 0.0329, 0.0328, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 19:36:56,411 INFO [train.py:903] (0/4) Epoch 11, batch 3550, loss[loss=0.2272, simple_loss=0.3114, pruned_loss=0.0715, over 19481.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3084, pruned_loss=0.08097, over 3820080.94 frames. ], batch size: 64, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:37:49,026 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.129e+02 6.236e+02 8.069e+02 1.994e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-01 19:37:58,903 INFO [train.py:903] (0/4) Epoch 11, batch 3600, loss[loss=0.2897, simple_loss=0.3455, pruned_loss=0.117, over 19786.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3087, pruned_loss=0.08122, over 3812609.96 frames. ], batch size: 56, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:03,005 INFO [train.py:903] (0/4) Epoch 11, batch 3650, loss[loss=0.2327, simple_loss=0.3026, pruned_loss=0.0814, over 19482.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3077, pruned_loss=0.08114, over 3795828.50 frames. ], batch size: 49, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:13,487 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:39:55,946 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.422e+02 6.726e+02 7.997e+02 1.955e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-04-01 19:40:05,223 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:40:06,260 INFO [train.py:903] (0/4) Epoch 11, batch 3700, loss[loss=0.2836, simple_loss=0.3428, pruned_loss=0.1122, over 13240.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3083, pruned_loss=0.08104, over 3797937.33 frames. ], batch size: 136, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:40:33,202 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-72000.pt +2023-04-01 19:41:02,232 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-01 19:41:11,920 INFO [train.py:903] (0/4) Epoch 11, batch 3750, loss[loss=0.2372, simple_loss=0.3183, pruned_loss=0.07804, over 19755.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3071, pruned_loss=0.08086, over 3796651.55 frames. ], batch size: 63, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:06,342 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.141e+02 6.053e+02 7.322e+02 1.150e+03, threshold=1.211e+03, percent-clipped=0.0 +2023-04-01 19:42:17,090 INFO [train.py:903] (0/4) Epoch 11, batch 3800, loss[loss=0.1878, simple_loss=0.2673, pruned_loss=0.05418, over 19383.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3079, pruned_loss=0.08095, over 3791128.66 frames. ], batch size: 48, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:22,598 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-01 19:42:35,662 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:42:46,962 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 19:43:21,151 INFO [train.py:903] (0/4) Epoch 11, batch 3850, loss[loss=0.2437, simple_loss=0.3205, pruned_loss=0.08343, over 19318.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3083, pruned_loss=0.08063, over 3799761.12 frames. ], batch size: 66, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:43:45,116 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:44:16,373 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 5.600e+02 6.929e+02 8.545e+02 2.074e+03, threshold=1.386e+03, percent-clipped=5.0 +2023-04-01 19:44:25,945 INFO [train.py:903] (0/4) Epoch 11, batch 3900, loss[loss=0.2336, simple_loss=0.3186, pruned_loss=0.07428, over 19532.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3097, pruned_loss=0.08118, over 3807134.01 frames. ], batch size: 56, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:04,300 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1768, 1.2913, 1.7556, 1.2774, 2.6285, 3.5280, 3.2199, 3.7249], + device='cuda:0'), covar=tensor([0.1501, 0.3254, 0.2814, 0.2032, 0.0469, 0.0167, 0.0203, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0294, 0.0321, 0.0251, 0.0213, 0.0154, 0.0206, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 19:45:30,660 INFO [train.py:903] (0/4) Epoch 11, batch 3950, loss[loss=0.255, simple_loss=0.3251, pruned_loss=0.09241, over 18932.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3096, pruned_loss=0.08116, over 3814366.99 frames. ], batch size: 75, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:31,930 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 19:46:23,815 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 5.760e+02 7.487e+02 9.416e+02 2.541e+03, threshold=1.497e+03, percent-clipped=9.0 +2023-04-01 19:46:34,101 INFO [train.py:903] (0/4) Epoch 11, batch 4000, loss[loss=0.2416, simple_loss=0.3157, pruned_loss=0.08373, over 19677.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3097, pruned_loss=0.08143, over 3816384.52 frames. ], batch size: 53, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:46:36,413 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72281.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:47:22,613 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 19:47:37,789 INFO [train.py:903] (0/4) Epoch 11, batch 4050, loss[loss=0.2192, simple_loss=0.3057, pruned_loss=0.06633, over 19678.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3084, pruned_loss=0.08066, over 3830994.41 frames. ], batch size: 60, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:47:57,848 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:05,153 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:33,046 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.561e+02 4.924e+02 6.616e+02 8.581e+02 2.031e+03, threshold=1.323e+03, percent-clipped=3.0 +2023-04-01 19:48:37,060 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:43,655 INFO [train.py:903] (0/4) Epoch 11, batch 4100, loss[loss=0.2374, simple_loss=0.3085, pruned_loss=0.08313, over 19587.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3074, pruned_loss=0.08014, over 3826809.36 frames. ], batch size: 52, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:49:05,224 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72396.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:49:18,922 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 19:49:48,315 INFO [train.py:903] (0/4) Epoch 11, batch 4150, loss[loss=0.2532, simple_loss=0.3249, pruned_loss=0.09081, over 18787.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3088, pruned_loss=0.081, over 3814702.02 frames. ], batch size: 74, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:50:01,639 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2489, 3.7831, 3.8849, 3.8780, 1.5612, 3.6676, 3.2049, 3.6386], + device='cuda:0'), covar=tensor([0.1421, 0.0723, 0.0612, 0.0670, 0.4851, 0.0623, 0.0651, 0.1109], + device='cuda:0'), in_proj_covar=tensor([0.0665, 0.0601, 0.0793, 0.0674, 0.0723, 0.0550, 0.0485, 0.0733], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 19:50:42,220 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.902e+02 6.015e+02 8.099e+02 1.569e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-01 19:50:42,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1241, 2.8645, 2.2081, 2.6278, 1.1522, 2.7157, 2.6926, 2.7649], + device='cuda:0'), covar=tensor([0.1156, 0.1287, 0.1796, 0.0740, 0.2730, 0.0932, 0.0890, 0.1149], + device='cuda:0'), in_proj_covar=tensor([0.0433, 0.0370, 0.0440, 0.0320, 0.0379, 0.0373, 0.0358, 0.0393], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 19:50:47,136 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8898, 4.3586, 4.6188, 4.5708, 1.7026, 4.2602, 3.7288, 4.3004], + device='cuda:0'), covar=tensor([0.1333, 0.0719, 0.0472, 0.0568, 0.4910, 0.0682, 0.0556, 0.0918], + device='cuda:0'), in_proj_covar=tensor([0.0668, 0.0602, 0.0795, 0.0676, 0.0724, 0.0552, 0.0485, 0.0734], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 19:50:51,446 INFO [train.py:903] (0/4) Epoch 11, batch 4200, loss[loss=0.2582, simple_loss=0.3285, pruned_loss=0.094, over 18083.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3083, pruned_loss=0.08066, over 3815810.70 frames. ], batch size: 83, lr: 7.54e-03, grad_scale: 16.0 +2023-04-01 19:50:57,210 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 19:51:07,191 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:51:56,560 INFO [train.py:903] (0/4) Epoch 11, batch 4250, loss[loss=0.2587, simple_loss=0.3336, pruned_loss=0.09186, over 19667.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3081, pruned_loss=0.08058, over 3817058.58 frames. ], batch size: 55, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:52:17,354 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 19:52:23,704 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8051, 3.2310, 3.2950, 3.2959, 1.2928, 3.1358, 2.7484, 3.0454], + device='cuda:0'), covar=tensor([0.1514, 0.0879, 0.0757, 0.0860, 0.4708, 0.0854, 0.0725, 0.1232], + device='cuda:0'), in_proj_covar=tensor([0.0671, 0.0603, 0.0795, 0.0679, 0.0726, 0.0554, 0.0485, 0.0735], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 19:52:23,906 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6764, 1.7211, 1.8415, 2.0390, 1.4282, 1.8610, 2.0414, 1.7775], + device='cuda:0'), covar=tensor([0.3265, 0.2705, 0.1531, 0.1614, 0.2811, 0.1498, 0.3647, 0.2596], + device='cuda:0'), in_proj_covar=tensor([0.0792, 0.0811, 0.0651, 0.0896, 0.0781, 0.0707, 0.0790, 0.0713], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 19:52:28,268 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 19:52:51,987 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.433e+02 6.415e+02 7.675e+02 1.468e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-01 19:52:58,543 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5000, 1.5759, 2.0220, 1.6802, 3.0542, 2.5570, 3.2200, 1.6325], + device='cuda:0'), covar=tensor([0.2261, 0.3658, 0.2241, 0.1812, 0.1499, 0.1840, 0.1726, 0.3583], + device='cuda:0'), in_proj_covar=tensor([0.0490, 0.0572, 0.0596, 0.0435, 0.0589, 0.0489, 0.0645, 0.0493], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 19:53:01,653 INFO [train.py:903] (0/4) Epoch 11, batch 4300, loss[loss=0.1962, simple_loss=0.2802, pruned_loss=0.05612, over 19724.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3082, pruned_loss=0.08112, over 3808599.88 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:53:26,277 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4927, 1.4237, 1.4675, 1.8412, 3.0505, 1.0571, 2.3337, 3.4247], + device='cuda:0'), covar=tensor([0.0470, 0.2467, 0.2593, 0.1433, 0.0695, 0.2437, 0.1143, 0.0273], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0336, 0.0350, 0.0319, 0.0345, 0.0331, 0.0333, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 19:53:36,383 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:00,212 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 19:54:04,822 INFO [train.py:903] (0/4) Epoch 11, batch 4350, loss[loss=0.2728, simple_loss=0.3312, pruned_loss=0.1071, over 19536.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3077, pruned_loss=0.08128, over 3811887.28 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:54:33,998 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:58,901 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.206e+02 6.524e+02 8.372e+02 1.509e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-01 19:55:06,320 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:08,340 INFO [train.py:903] (0/4) Epoch 11, batch 4400, loss[loss=0.1902, simple_loss=0.2622, pruned_loss=0.05909, over 19752.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3076, pruned_loss=0.08121, over 3819753.77 frames. ], batch size: 48, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:55:20,139 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:31,030 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8670, 1.9891, 2.0947, 2.6956, 1.8520, 2.5030, 2.4148, 1.9778], + device='cuda:0'), covar=tensor([0.3451, 0.2949, 0.1412, 0.1834, 0.3387, 0.1500, 0.3141, 0.2481], + device='cuda:0'), in_proj_covar=tensor([0.0788, 0.0806, 0.0646, 0.0890, 0.0776, 0.0705, 0.0787, 0.0706], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 19:55:37,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 19:55:47,758 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 19:56:12,269 INFO [train.py:903] (0/4) Epoch 11, batch 4450, loss[loss=0.268, simple_loss=0.3288, pruned_loss=0.1036, over 19778.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3065, pruned_loss=0.08014, over 3823053.91 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:57:06,052 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.681e+02 5.500e+02 6.922e+02 8.863e+02 1.965e+03, threshold=1.384e+03, percent-clipped=10.0 +2023-04-01 19:57:14,267 INFO [train.py:903] (0/4) Epoch 11, batch 4500, loss[loss=0.2296, simple_loss=0.3039, pruned_loss=0.07769, over 19704.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3076, pruned_loss=0.08123, over 3823413.67 frames. ], batch size: 59, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:57:46,498 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:58:19,529 INFO [train.py:903] (0/4) Epoch 11, batch 4550, loss[loss=0.2442, simple_loss=0.3166, pruned_loss=0.0859, over 19673.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3079, pruned_loss=0.0812, over 3807593.58 frames. ], batch size: 58, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:58:28,676 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 19:58:53,317 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 19:59:02,253 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:59:15,579 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.895e+02 7.318e+02 9.053e+02 1.617e+03, threshold=1.464e+03, percent-clipped=1.0 +2023-04-01 19:59:16,613 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 19:59:23,893 INFO [train.py:903] (0/4) Epoch 11, batch 4600, loss[loss=0.2148, simple_loss=0.296, pruned_loss=0.06679, over 19610.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.307, pruned_loss=0.08067, over 3809184.05 frames. ], batch size: 57, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:59:34,658 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72887.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:04,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 20:00:06,131 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:27,747 INFO [train.py:903] (0/4) Epoch 11, batch 4650, loss[loss=0.2772, simple_loss=0.3475, pruned_loss=0.1035, over 18180.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3067, pruned_loss=0.08051, over 3806005.96 frames. ], batch size: 83, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 20:00:46,738 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 20:00:57,807 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 20:01:22,039 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.839e+02 5.234e+02 6.593e+02 8.122e+02 1.324e+03, threshold=1.319e+03, percent-clipped=0.0 +2023-04-01 20:01:30,191 INFO [train.py:903] (0/4) Epoch 11, batch 4700, loss[loss=0.2057, simple_loss=0.2711, pruned_loss=0.07012, over 19098.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3063, pruned_loss=0.08001, over 3807708.74 frames. ], batch size: 42, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:01:34,977 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72982.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:01:55,722 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 20:02:34,869 INFO [train.py:903] (0/4) Epoch 11, batch 4750, loss[loss=0.2983, simple_loss=0.3563, pruned_loss=0.1201, over 18048.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3068, pruned_loss=0.08049, over 3797010.72 frames. ], batch size: 83, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:11,979 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:03:28,794 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.728e+02 6.958e+02 8.518e+02 2.013e+03, threshold=1.392e+03, percent-clipped=2.0 +2023-04-01 20:03:37,833 INFO [train.py:903] (0/4) Epoch 11, batch 4800, loss[loss=0.2325, simple_loss=0.3147, pruned_loss=0.07513, over 19668.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3068, pruned_loss=0.0803, over 3801556.05 frames. ], batch size: 60, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:38,476 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 20:03:44,153 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73084.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:04:18,422 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9628, 3.6021, 2.3618, 3.2819, 0.9593, 3.4683, 3.3582, 3.4213], + device='cuda:0'), covar=tensor([0.0800, 0.1065, 0.1996, 0.0759, 0.3699, 0.0843, 0.0890, 0.1192], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0364, 0.0433, 0.0315, 0.0376, 0.0370, 0.0356, 0.0389], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:04:26,323 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2850, 1.2962, 1.6463, 1.4400, 2.1630, 1.9188, 2.1087, 0.8478], + device='cuda:0'), covar=tensor([0.2348, 0.4224, 0.2459, 0.1974, 0.1528, 0.2239, 0.1606, 0.3993], + device='cuda:0'), in_proj_covar=tensor([0.0490, 0.0577, 0.0601, 0.0438, 0.0591, 0.0493, 0.0645, 0.0497], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 20:04:40,996 INFO [train.py:903] (0/4) Epoch 11, batch 4850, loss[loss=0.2508, simple_loss=0.3238, pruned_loss=0.08885, over 19466.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3071, pruned_loss=0.08068, over 3791715.14 frames. ], batch size: 64, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:05:01,952 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 20:05:22,868 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 20:05:29,925 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 20:05:29,953 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 20:05:35,783 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.053e+02 5.592e+02 6.791e+02 8.044e+02 1.982e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 20:05:39,477 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 20:05:44,089 INFO [train.py:903] (0/4) Epoch 11, batch 4900, loss[loss=0.2352, simple_loss=0.3145, pruned_loss=0.07793, over 18730.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3076, pruned_loss=0.08095, over 3794059.53 frames. ], batch size: 74, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:05:57,050 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3349, 1.2275, 1.4333, 1.3799, 2.8463, 0.8932, 2.0516, 3.2301], + device='cuda:0'), covar=tensor([0.0488, 0.2702, 0.2715, 0.1809, 0.0769, 0.2679, 0.1338, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0337, 0.0352, 0.0321, 0.0344, 0.0332, 0.0331, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:06:01,202 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 20:06:48,389 INFO [train.py:903] (0/4) Epoch 11, batch 4950, loss[loss=0.2058, simple_loss=0.2867, pruned_loss=0.06244, over 19667.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3067, pruned_loss=0.08007, over 3798806.81 frames. ], batch size: 53, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:00,929 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 20:07:21,265 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73256.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:07:24,672 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 20:07:42,671 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.726e+02 5.448e+02 7.218e+02 1.051e+03 2.092e+03, threshold=1.444e+03, percent-clipped=5.0 +2023-04-01 20:07:51,005 INFO [train.py:903] (0/4) Epoch 11, batch 5000, loss[loss=0.2209, simple_loss=0.3035, pruned_loss=0.06913, over 19670.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3065, pruned_loss=0.08004, over 3808639.65 frames. ], batch size: 58, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:55,889 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 20:08:08,619 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 20:08:13,781 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3529, 2.0419, 2.4980, 2.7682, 2.1622, 2.7222, 2.7974, 2.6731], + device='cuda:0'), covar=tensor([0.0762, 0.0914, 0.0832, 0.0936, 0.0914, 0.0672, 0.0876, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0224, 0.0223, 0.0250, 0.0235, 0.0213, 0.0194, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 20:08:32,026 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:08:50,649 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73326.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:08:54,859 INFO [train.py:903] (0/4) Epoch 11, batch 5050, loss[loss=0.2363, simple_loss=0.3226, pruned_loss=0.07497, over 19695.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3054, pruned_loss=0.07879, over 3827870.63 frames. ], batch size: 59, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:09:28,969 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 20:09:47,509 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:09:48,292 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.337e+02 6.487e+02 8.820e+02 1.986e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-01 20:09:56,474 INFO [train.py:903] (0/4) Epoch 11, batch 5100, loss[loss=0.2338, simple_loss=0.3116, pruned_loss=0.07804, over 19668.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.307, pruned_loss=0.08035, over 3808419.56 frames. ], batch size: 60, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:10:04,900 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 20:10:10,277 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 20:10:13,775 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 20:10:22,121 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3125, 3.8858, 2.5435, 3.4992, 0.8793, 3.7784, 3.6453, 3.8111], + device='cuda:0'), covar=tensor([0.0678, 0.1011, 0.1992, 0.0738, 0.4139, 0.0756, 0.0802, 0.1015], + device='cuda:0'), in_proj_covar=tensor([0.0432, 0.0363, 0.0437, 0.0317, 0.0379, 0.0373, 0.0357, 0.0390], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:11:00,241 INFO [train.py:903] (0/4) Epoch 11, batch 5150, loss[loss=0.2571, simple_loss=0.3302, pruned_loss=0.09205, over 19541.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3063, pruned_loss=0.07978, over 3808817.39 frames. ], batch size: 54, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:11:09,671 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 20:11:16,019 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73441.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:11:31,498 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3140, 1.1407, 1.2020, 1.1539, 2.0161, 0.8602, 1.7634, 2.1782], + device='cuda:0'), covar=tensor([0.0657, 0.2308, 0.2453, 0.1478, 0.0824, 0.1998, 0.1066, 0.0512], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0334, 0.0348, 0.0320, 0.0344, 0.0331, 0.0328, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:11:43,077 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:11:50,701 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-01 20:11:54,720 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.282e+02 5.910e+02 7.077e+02 9.438e+02 2.777e+03, threshold=1.415e+03, percent-clipped=4.0 +2023-04-01 20:12:04,001 INFO [train.py:903] (0/4) Epoch 11, batch 5200, loss[loss=0.2273, simple_loss=0.3103, pruned_loss=0.07217, over 19529.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3065, pruned_loss=0.07996, over 3818899.60 frames. ], batch size: 54, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:12:16,643 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 20:13:00,031 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 20:13:07,094 INFO [train.py:903] (0/4) Epoch 11, batch 5250, loss[loss=0.257, simple_loss=0.3297, pruned_loss=0.09213, over 19597.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.307, pruned_loss=0.07996, over 3830431.31 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:14:02,591 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 5.338e+02 6.658e+02 8.757e+02 1.866e+03, threshold=1.332e+03, percent-clipped=3.0 +2023-04-01 20:14:11,102 INFO [train.py:903] (0/4) Epoch 11, batch 5300, loss[loss=0.2227, simple_loss=0.303, pruned_loss=0.07122, over 19696.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3073, pruned_loss=0.08, over 3829531.15 frames. ], batch size: 59, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:14:26,095 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 20:14:29,971 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3461, 2.2364, 1.8985, 1.7363, 1.5580, 1.7495, 0.5974, 1.2245], + device='cuda:0'), covar=tensor([0.0400, 0.0382, 0.0327, 0.0519, 0.0874, 0.0623, 0.0750, 0.0689], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0326, 0.0329, 0.0348, 0.0424, 0.0349, 0.0304, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 20:14:43,191 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2929, 3.7916, 3.9053, 3.8932, 1.5659, 3.6903, 3.1999, 3.6318], + device='cuda:0'), covar=tensor([0.1427, 0.0759, 0.0594, 0.0667, 0.4777, 0.0659, 0.0668, 0.1089], + device='cuda:0'), in_proj_covar=tensor([0.0666, 0.0600, 0.0794, 0.0678, 0.0724, 0.0553, 0.0485, 0.0730], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 20:14:49,204 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:12,298 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73627.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:15,335 INFO [train.py:903] (0/4) Epoch 11, batch 5350, loss[loss=0.2382, simple_loss=0.2999, pruned_loss=0.08831, over 19479.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3085, pruned_loss=0.08045, over 3823666.16 frames. ], batch size: 49, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:15:46,113 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:46,881 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 20:15:49,348 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73655.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:16:09,903 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.285e+02 6.573e+02 8.256e+02 1.333e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-04-01 20:16:19,929 INFO [train.py:903] (0/4) Epoch 11, batch 5400, loss[loss=0.2879, simple_loss=0.3458, pruned_loss=0.115, over 18905.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3087, pruned_loss=0.08063, over 3812441.81 frames. ], batch size: 75, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:16:41,368 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73697.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:17:13,022 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:17:20,603 INFO [train.py:903] (0/4) Epoch 11, batch 5450, loss[loss=0.2773, simple_loss=0.3423, pruned_loss=0.1062, over 19544.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3096, pruned_loss=0.08169, over 3821586.68 frames. ], batch size: 64, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:17:45,878 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:11,991 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:14,014 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.707e+02 7.323e+02 9.356e+02 2.024e+03, threshold=1.465e+03, percent-clipped=8.0 +2023-04-01 20:18:23,353 INFO [train.py:903] (0/4) Epoch 11, batch 5500, loss[loss=0.1991, simple_loss=0.2855, pruned_loss=0.05635, over 19678.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3096, pruned_loss=0.08147, over 3836822.73 frames. ], batch size: 59, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:18:50,423 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 20:19:25,475 INFO [train.py:903] (0/4) Epoch 11, batch 5550, loss[loss=0.2097, simple_loss=0.2929, pruned_loss=0.06329, over 19614.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3095, pruned_loss=0.08158, over 3831852.34 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:19:35,069 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 20:20:04,808 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1156, 1.2672, 1.7328, 1.1226, 2.4868, 3.3275, 3.1161, 3.5655], + device='cuda:0'), covar=tensor([0.1525, 0.3281, 0.2870, 0.2106, 0.0457, 0.0162, 0.0209, 0.0180], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0292, 0.0321, 0.0248, 0.0212, 0.0156, 0.0205, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 20:20:20,656 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.761e+02 6.752e+02 8.536e+02 1.570e+03, threshold=1.350e+03, percent-clipped=1.0 +2023-04-01 20:20:26,340 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 20:20:29,855 INFO [train.py:903] (0/4) Epoch 11, batch 5600, loss[loss=0.3334, simple_loss=0.3865, pruned_loss=0.1402, over 17883.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3081, pruned_loss=0.081, over 3839827.88 frames. ], batch size: 83, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:21:24,344 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6217, 1.2524, 1.5513, 1.4427, 3.1010, 1.0262, 2.2111, 3.5302], + device='cuda:0'), covar=tensor([0.0388, 0.2727, 0.2504, 0.1797, 0.0709, 0.2413, 0.1169, 0.0260], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0338, 0.0349, 0.0322, 0.0347, 0.0331, 0.0332, 0.0353], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:21:34,078 INFO [train.py:903] (0/4) Epoch 11, batch 5650, loss[loss=0.226, simple_loss=0.3052, pruned_loss=0.07343, over 19525.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3075, pruned_loss=0.0806, over 3826488.54 frames. ], batch size: 54, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:22:02,981 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:22:23,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 20:22:28,536 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.880e+02 7.006e+02 8.632e+02 1.687e+03, threshold=1.401e+03, percent-clipped=2.0 +2023-04-01 20:22:37,807 INFO [train.py:903] (0/4) Epoch 11, batch 5700, loss[loss=0.3035, simple_loss=0.361, pruned_loss=0.123, over 13508.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3081, pruned_loss=0.08084, over 3813582.92 frames. ], batch size: 136, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:03,726 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-74000.pt +2023-04-01 20:23:13,090 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4500, 1.5698, 2.0544, 1.8337, 3.3838, 2.7189, 3.6442, 1.6856], + device='cuda:0'), covar=tensor([0.2182, 0.3696, 0.2225, 0.1560, 0.1290, 0.1698, 0.1363, 0.3287], + device='cuda:0'), in_proj_covar=tensor([0.0492, 0.0574, 0.0599, 0.0436, 0.0594, 0.0492, 0.0645, 0.0494], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 20:23:38,867 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:23:42,023 INFO [train.py:903] (0/4) Epoch 11, batch 5750, loss[loss=0.2306, simple_loss=0.3112, pruned_loss=0.07494, over 19592.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3083, pruned_loss=0.08059, over 3821377.56 frames. ], batch size: 61, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:43,286 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 20:23:51,324 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 20:23:56,774 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 20:24:11,299 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74051.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:30,647 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74067.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:36,267 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.413e+02 6.547e+02 7.617e+02 1.298e+03, threshold=1.309e+03, percent-clipped=0.0 +2023-04-01 20:24:45,221 INFO [train.py:903] (0/4) Epoch 11, batch 5800, loss[loss=0.2374, simple_loss=0.3026, pruned_loss=0.08615, over 19336.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3087, pruned_loss=0.08094, over 3813397.04 frames. ], batch size: 47, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:24:50,554 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:04,170 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:46,671 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2602, 1.3251, 1.2582, 1.0677, 1.1133, 1.1341, 0.0199, 0.4067], + device='cuda:0'), covar=tensor([0.0436, 0.0416, 0.0271, 0.0336, 0.0798, 0.0379, 0.0771, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0325, 0.0327, 0.0346, 0.0421, 0.0344, 0.0305, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 20:25:48,487 INFO [train.py:903] (0/4) Epoch 11, batch 5850, loss[loss=0.1963, simple_loss=0.27, pruned_loss=0.06133, over 19791.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3084, pruned_loss=0.08099, over 3826823.06 frames. ], batch size: 48, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:25:52,588 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-01 20:26:04,395 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 20:26:41,999 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.778e+02 5.734e+02 7.251e+02 9.100e+02 2.751e+03, threshold=1.450e+03, percent-clipped=7.0 +2023-04-01 20:26:51,303 INFO [train.py:903] (0/4) Epoch 11, batch 5900, loss[loss=0.2281, simple_loss=0.3065, pruned_loss=0.07485, over 19751.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3082, pruned_loss=0.08131, over 3833387.96 frames. ], batch size: 54, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:26:53,554 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 20:27:15,038 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 20:27:21,460 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4096, 1.2391, 1.6129, 1.3024, 2.8151, 3.8336, 3.6108, 4.0352], + device='cuda:0'), covar=tensor([0.1386, 0.3318, 0.3013, 0.2028, 0.0444, 0.0140, 0.0156, 0.0156], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0295, 0.0324, 0.0250, 0.0214, 0.0158, 0.0207, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 20:27:30,970 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:27:55,157 INFO [train.py:903] (0/4) Epoch 11, batch 5950, loss[loss=0.1868, simple_loss=0.2772, pruned_loss=0.04821, over 19545.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3071, pruned_loss=0.08033, over 3823294.72 frames. ], batch size: 56, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:33,399 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74258.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:28:49,683 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 4.883e+02 6.160e+02 7.607e+02 1.521e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-01 20:28:59,179 INFO [train.py:903] (0/4) Epoch 11, batch 6000, loss[loss=0.187, simple_loss=0.2637, pruned_loss=0.05514, over 19732.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3072, pruned_loss=0.0804, over 3805595.23 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:59,180 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 20:29:11,916 INFO [train.py:937] (0/4) Epoch 11, validation: loss=0.1778, simple_loss=0.2787, pruned_loss=0.03847, over 944034.00 frames. +2023-04-01 20:29:11,918 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 20:30:09,775 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74323.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:30:18,373 INFO [train.py:903] (0/4) Epoch 11, batch 6050, loss[loss=0.2261, simple_loss=0.3097, pruned_loss=0.07125, over 19672.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3061, pruned_loss=0.07973, over 3815810.75 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:30:28,818 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9175, 4.3319, 4.6687, 4.6326, 1.8660, 4.3517, 3.7187, 4.3356], + device='cuda:0'), covar=tensor([0.1446, 0.0810, 0.0496, 0.0595, 0.4831, 0.0598, 0.0615, 0.1007], + device='cuda:0'), in_proj_covar=tensor([0.0672, 0.0600, 0.0800, 0.0682, 0.0725, 0.0557, 0.0485, 0.0732], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 20:30:42,877 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:31:12,596 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.290e+02 5.870e+02 7.295e+02 8.913e+02 1.728e+03, threshold=1.459e+03, percent-clipped=8.0 +2023-04-01 20:31:21,970 INFO [train.py:903] (0/4) Epoch 11, batch 6100, loss[loss=0.2423, simple_loss=0.318, pruned_loss=0.08332, over 18299.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.306, pruned_loss=0.07938, over 3822682.20 frames. ], batch size: 83, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:22,429 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74426.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:32:25,923 INFO [train.py:903] (0/4) Epoch 11, batch 6150, loss[loss=0.2426, simple_loss=0.3229, pruned_loss=0.0812, over 19676.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3063, pruned_loss=0.07922, over 3822878.06 frames. ], batch size: 58, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:37,681 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8699, 2.0162, 2.1132, 2.7042, 1.8855, 2.5536, 2.2983, 1.9887], + device='cuda:0'), covar=tensor([0.3493, 0.2889, 0.1431, 0.1848, 0.3459, 0.1465, 0.3384, 0.2573], + device='cuda:0'), in_proj_covar=tensor([0.0790, 0.0811, 0.0644, 0.0894, 0.0779, 0.0699, 0.0783, 0.0708], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 20:32:54,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 20:33:12,550 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:33:20,926 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.576e+02 6.382e+02 8.608e+02 2.367e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-01 20:33:29,309 INFO [train.py:903] (0/4) Epoch 11, batch 6200, loss[loss=0.2161, simple_loss=0.3049, pruned_loss=0.06362, over 19667.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.307, pruned_loss=0.07957, over 3823602.25 frames. ], batch size: 58, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:33:43,940 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:34:31,916 INFO [train.py:903] (0/4) Epoch 11, batch 6250, loss[loss=0.2729, simple_loss=0.344, pruned_loss=0.1009, over 19705.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3075, pruned_loss=0.08038, over 3798651.14 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:34:48,583 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74541.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:35:02,959 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 20:35:30,743 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 5.334e+02 6.699e+02 8.918e+02 1.691e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-01 20:35:38,660 INFO [train.py:903] (0/4) Epoch 11, batch 6300, loss[loss=0.2575, simple_loss=0.3116, pruned_loss=0.1017, over 19763.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3078, pruned_loss=0.08067, over 3792859.16 frames. ], batch size: 47, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:36:06,782 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:21,216 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:40,491 INFO [train.py:903] (0/4) Epoch 11, batch 6350, loss[loss=0.2164, simple_loss=0.292, pruned_loss=0.07039, over 19759.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3074, pruned_loss=0.08051, over 3805498.34 frames. ], batch size: 51, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:36,488 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 5.622e+02 6.880e+02 9.256e+02 3.776e+03, threshold=1.376e+03, percent-clipped=5.0 +2023-04-01 20:37:44,840 INFO [train.py:903] (0/4) Epoch 11, batch 6400, loss[loss=0.2272, simple_loss=0.3101, pruned_loss=0.07216, over 19670.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3066, pruned_loss=0.08016, over 3800749.44 frames. ], batch size: 58, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:56,012 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 20:38:01,361 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9274, 1.6120, 1.5187, 1.9007, 1.7647, 1.6829, 1.5261, 1.7947], + device='cuda:0'), covar=tensor([0.0917, 0.1406, 0.1325, 0.0933, 0.1138, 0.0497, 0.1214, 0.0693], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0351, 0.0291, 0.0238, 0.0298, 0.0243, 0.0278, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:38:33,487 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:38:46,859 INFO [train.py:903] (0/4) Epoch 11, batch 6450, loss[loss=0.2441, simple_loss=0.3186, pruned_loss=0.08477, over 19658.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3072, pruned_loss=0.08016, over 3801458.79 frames. ], batch size: 60, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:39:25,226 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6748, 1.4503, 1.4478, 1.7100, 1.6589, 1.5002, 1.4365, 1.6677], + device='cuda:0'), covar=tensor([0.0809, 0.1218, 0.1143, 0.0710, 0.0941, 0.0486, 0.1060, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0351, 0.0293, 0.0239, 0.0298, 0.0244, 0.0279, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:39:28,506 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 20:39:44,918 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 5.590e+02 7.165e+02 9.175e+02 2.194e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 20:39:53,337 INFO [train.py:903] (0/4) Epoch 11, batch 6500, loss[loss=0.2479, simple_loss=0.3184, pruned_loss=0.08867, over 19280.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3083, pruned_loss=0.08063, over 3794632.89 frames. ], batch size: 66, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:39:54,549 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 20:40:16,064 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:48,466 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:56,365 INFO [train.py:903] (0/4) Epoch 11, batch 6550, loss[loss=0.2228, simple_loss=0.299, pruned_loss=0.07334, over 19603.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3072, pruned_loss=0.08022, over 3798366.51 frames. ], batch size: 50, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:40:56,799 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1896, 2.0994, 1.8171, 1.6761, 1.5366, 1.6727, 0.3788, 1.0303], + device='cuda:0'), covar=tensor([0.0483, 0.0422, 0.0310, 0.0512, 0.0893, 0.0549, 0.0884, 0.0766], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0331, 0.0333, 0.0354, 0.0429, 0.0353, 0.0312, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 20:41:52,115 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 5.036e+02 6.201e+02 8.020e+02 1.527e+03, threshold=1.240e+03, percent-clipped=1.0 +2023-04-01 20:41:59,118 INFO [train.py:903] (0/4) Epoch 11, batch 6600, loss[loss=0.1826, simple_loss=0.2573, pruned_loss=0.05389, over 19796.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3063, pruned_loss=0.07953, over 3817225.14 frames. ], batch size: 46, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:42:42,899 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 20:43:01,202 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:43:03,149 INFO [train.py:903] (0/4) Epoch 11, batch 6650, loss[loss=0.2254, simple_loss=0.3051, pruned_loss=0.07286, over 19752.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3056, pruned_loss=0.07905, over 3815154.78 frames. ], batch size: 63, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:43:40,234 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:00,891 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.506e+02 7.046e+02 9.043e+02 1.602e+03, threshold=1.409e+03, percent-clipped=2.0 +2023-04-01 20:44:01,356 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:07,814 INFO [train.py:903] (0/4) Epoch 11, batch 6700, loss[loss=0.2597, simple_loss=0.3298, pruned_loss=0.09479, over 19588.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3067, pruned_loss=0.07998, over 3812536.23 frames. ], batch size: 61, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:44:12,414 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74982.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:29,466 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6054, 1.4349, 1.4079, 1.9891, 1.6316, 1.9015, 2.0169, 1.7371], + device='cuda:0'), covar=tensor([0.0780, 0.0925, 0.0978, 0.0713, 0.0788, 0.0644, 0.0737, 0.0596], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0227, 0.0224, 0.0247, 0.0237, 0.0214, 0.0197, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 20:44:31,726 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74998.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:46,244 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7005, 4.3231, 2.9234, 3.7408, 1.3863, 4.0669, 4.0851, 4.1567], + device='cuda:0'), covar=tensor([0.0568, 0.0921, 0.1809, 0.0770, 0.3357, 0.0709, 0.0701, 0.1055], + device='cuda:0'), in_proj_covar=tensor([0.0437, 0.0369, 0.0438, 0.0317, 0.0379, 0.0369, 0.0357, 0.0393], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:45:06,539 INFO [train.py:903] (0/4) Epoch 11, batch 6750, loss[loss=0.246, simple_loss=0.3105, pruned_loss=0.09069, over 19844.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3068, pruned_loss=0.0804, over 3808794.21 frames. ], batch size: 52, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:45:55,367 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:45:56,170 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 5.649e+02 7.002e+02 8.709e+02 1.691e+03, threshold=1.400e+03, percent-clipped=2.0 +2023-04-01 20:46:04,204 INFO [train.py:903] (0/4) Epoch 11, batch 6800, loss[loss=0.2081, simple_loss=0.2848, pruned_loss=0.06566, over 19569.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3065, pruned_loss=0.07998, over 3813694.76 frames. ], batch size: 52, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:46:36,713 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-11.pt +2023-04-01 20:46:52,796 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 20:46:53,242 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 20:46:56,121 INFO [train.py:903] (0/4) Epoch 12, batch 0, loss[loss=0.2534, simple_loss=0.3278, pruned_loss=0.08946, over 19773.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3278, pruned_loss=0.08946, over 19773.00 frames. ], batch size: 56, lr: 7.10e-03, grad_scale: 8.0 +2023-04-01 20:46:56,122 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 20:47:04,986 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8579, 3.4850, 2.6127, 3.3424, 0.8746, 3.2446, 3.2848, 3.4875], + device='cuda:0'), covar=tensor([0.0722, 0.0833, 0.1925, 0.0855, 0.4086, 0.1101, 0.0807, 0.0988], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0361, 0.0432, 0.0312, 0.0374, 0.0364, 0.0352, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-01 20:47:08,131 INFO [train.py:937] (0/4) Epoch 12, validation: loss=0.1777, simple_loss=0.2788, pruned_loss=0.03825, over 944034.00 frames. +2023-04-01 20:47:08,132 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 20:47:20,785 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 20:48:11,523 INFO [train.py:903] (0/4) Epoch 12, batch 50, loss[loss=0.2351, simple_loss=0.3145, pruned_loss=0.07789, over 19731.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3072, pruned_loss=0.07964, over 853262.88 frames. ], batch size: 63, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:48:20,830 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3016, 1.9703, 2.0751, 2.7843, 2.1061, 2.4987, 2.3448, 2.6629], + device='cuda:0'), covar=tensor([0.0684, 0.0840, 0.0832, 0.0829, 0.0899, 0.0644, 0.0856, 0.0515], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0225, 0.0222, 0.0246, 0.0235, 0.0212, 0.0195, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 20:48:29,484 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.251e+02 6.823e+02 1.011e+03 3.055e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-01 20:48:41,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 20:49:13,865 INFO [train.py:903] (0/4) Epoch 12, batch 100, loss[loss=0.2268, simple_loss=0.3105, pruned_loss=0.07154, over 19348.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3098, pruned_loss=0.08133, over 1516561.91 frames. ], batch size: 70, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:49:22,080 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 20:50:02,889 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:16,779 INFO [train.py:903] (0/4) Epoch 12, batch 150, loss[loss=0.2246, simple_loss=0.2859, pruned_loss=0.08161, over 18185.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3084, pruned_loss=0.08148, over 2023507.05 frames. ], batch size: 40, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:50:34,009 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:36,118 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.206e+02 6.373e+02 8.343e+02 1.576e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-01 20:50:56,177 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:15,331 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 20:51:18,701 INFO [train.py:903] (0/4) Epoch 12, batch 200, loss[loss=0.2299, simple_loss=0.304, pruned_loss=0.07792, over 19530.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.308, pruned_loss=0.08109, over 2427130.32 frames. ], batch size: 54, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:51:42,606 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:45,311 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:46,567 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9046, 2.0059, 2.1842, 2.7736, 1.8678, 2.6106, 2.4650, 1.9854], + device='cuda:0'), covar=tensor([0.3713, 0.3183, 0.1418, 0.1829, 0.3550, 0.1498, 0.3507, 0.2751], + device='cuda:0'), in_proj_covar=tensor([0.0789, 0.0808, 0.0642, 0.0891, 0.0775, 0.0703, 0.0777, 0.0703], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 20:52:16,202 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:52:22,155 INFO [train.py:903] (0/4) Epoch 12, batch 250, loss[loss=0.1948, simple_loss=0.2768, pruned_loss=0.05644, over 19483.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3087, pruned_loss=0.08163, over 2727327.64 frames. ], batch size: 49, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:52:41,710 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.714e+02 6.835e+02 8.470e+02 1.829e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 20:52:56,682 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:53:25,598 INFO [train.py:903] (0/4) Epoch 12, batch 300, loss[loss=0.2322, simple_loss=0.3139, pruned_loss=0.07526, over 19612.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3073, pruned_loss=0.08098, over 2964912.88 frames. ], batch size: 57, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:53:34,062 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4114, 1.4114, 1.5294, 1.6787, 2.9635, 1.0148, 2.2004, 3.2590], + device='cuda:0'), covar=tensor([0.0414, 0.2393, 0.2449, 0.1456, 0.0671, 0.2366, 0.1161, 0.0298], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0340, 0.0352, 0.0320, 0.0346, 0.0332, 0.0336, 0.0356], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:54:07,330 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:54:28,458 INFO [train.py:903] (0/4) Epoch 12, batch 350, loss[loss=0.3003, simple_loss=0.3553, pruned_loss=0.1227, over 13814.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3079, pruned_loss=0.08159, over 3157645.60 frames. ], batch size: 136, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:54:31,961 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:54:45,903 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.463e+02 6.814e+02 8.627e+02 1.955e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-01 20:55:24,622 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 20:55:30,815 INFO [train.py:903] (0/4) Epoch 12, batch 400, loss[loss=0.2277, simple_loss=0.2933, pruned_loss=0.08111, over 19408.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3076, pruned_loss=0.08093, over 3321172.99 frames. ], batch size: 48, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:56:16,856 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2614, 1.3775, 1.6648, 1.4652, 2.4146, 2.0984, 2.5006, 0.9151], + device='cuda:0'), covar=tensor([0.2147, 0.3618, 0.2066, 0.1648, 0.1330, 0.1762, 0.1338, 0.3689], + device='cuda:0'), in_proj_covar=tensor([0.0492, 0.0577, 0.0604, 0.0438, 0.0598, 0.0493, 0.0646, 0.0492], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 20:56:32,362 INFO [train.py:903] (0/4) Epoch 12, batch 450, loss[loss=0.2708, simple_loss=0.3396, pruned_loss=0.101, over 19602.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3072, pruned_loss=0.08071, over 3449664.64 frames. ], batch size: 61, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:56:51,536 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.172e+02 6.423e+02 7.976e+02 2.291e+03, threshold=1.285e+03, percent-clipped=4.0 +2023-04-01 20:57:09,457 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 20:57:10,696 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 20:57:13,220 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75590.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:57:17,020 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0613, 4.4031, 4.7356, 4.7140, 1.4675, 4.3887, 3.7960, 4.4006], + device='cuda:0'), covar=tensor([0.1263, 0.0788, 0.0564, 0.0554, 0.5654, 0.0682, 0.0630, 0.1071], + device='cuda:0'), in_proj_covar=tensor([0.0677, 0.0603, 0.0804, 0.0688, 0.0731, 0.0559, 0.0492, 0.0741], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 20:57:36,542 INFO [train.py:903] (0/4) Epoch 12, batch 500, loss[loss=0.2689, simple_loss=0.3383, pruned_loss=0.09974, over 19389.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3081, pruned_loss=0.08112, over 3525489.18 frames. ], batch size: 66, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:06,137 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:17,921 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:38,555 INFO [train.py:903] (0/4) Epoch 12, batch 550, loss[loss=0.2625, simple_loss=0.322, pruned_loss=0.1015, over 17271.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3079, pruned_loss=0.0808, over 3587399.09 frames. ], batch size: 101, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:50,382 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:56,742 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.816e+02 6.939e+02 9.327e+02 2.224e+03, threshold=1.388e+03, percent-clipped=13.0 +2023-04-01 20:59:09,128 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 20:59:24,531 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9531, 1.5573, 1.7347, 1.9920, 4.3750, 1.0996, 2.4018, 4.6619], + device='cuda:0'), covar=tensor([0.0346, 0.2620, 0.2666, 0.1723, 0.0775, 0.2707, 0.1400, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0340, 0.0351, 0.0319, 0.0347, 0.0330, 0.0333, 0.0354], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 20:59:26,865 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:38,276 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75705.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:41,416 INFO [train.py:903] (0/4) Epoch 12, batch 600, loss[loss=0.2567, simple_loss=0.3334, pruned_loss=0.08999, over 19548.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3078, pruned_loss=0.08047, over 3645522.00 frames. ], batch size: 61, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:59:57,482 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:22,873 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 21:00:30,155 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:42,500 INFO [train.py:903] (0/4) Epoch 12, batch 650, loss[loss=0.1955, simple_loss=0.2696, pruned_loss=0.0607, over 19764.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3082, pruned_loss=0.08056, over 3688637.76 frames. ], batch size: 48, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 21:01:01,243 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.224e+02 6.354e+02 7.929e+02 1.382e+03, threshold=1.271e+03, percent-clipped=0.0 +2023-04-01 21:01:22,978 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5380, 2.3891, 1.7888, 1.5348, 2.2227, 1.2770, 1.2578, 1.9652], + device='cuda:0'), covar=tensor([0.0906, 0.0598, 0.0840, 0.0673, 0.0380, 0.1000, 0.0690, 0.0392], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0304, 0.0329, 0.0249, 0.0237, 0.0320, 0.0292, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:01:24,545 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-01 21:01:29,683 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4017, 1.2674, 1.7825, 1.1052, 2.5011, 3.3683, 3.0789, 3.4964], + device='cuda:0'), covar=tensor([0.1356, 0.3389, 0.2783, 0.2185, 0.0488, 0.0154, 0.0202, 0.0211], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0293, 0.0323, 0.0250, 0.0214, 0.0156, 0.0206, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 21:01:45,574 INFO [train.py:903] (0/4) Epoch 12, batch 700, loss[loss=0.2101, simple_loss=0.2914, pruned_loss=0.06442, over 19666.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3073, pruned_loss=0.07997, over 3717818.73 frames. ], batch size: 55, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:02:04,537 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.68 vs. limit=5.0 +2023-04-01 21:02:46,468 INFO [train.py:903] (0/4) Epoch 12, batch 750, loss[loss=0.2312, simple_loss=0.3027, pruned_loss=0.07988, over 19497.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3072, pruned_loss=0.07994, over 3745926.15 frames. ], batch size: 49, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:03:02,335 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1836, 2.2085, 2.3245, 3.3463, 2.2198, 3.0667, 2.7779, 2.2420], + device='cuda:0'), covar=tensor([0.3845, 0.3378, 0.1456, 0.1895, 0.3805, 0.1520, 0.3450, 0.2810], + device='cuda:0'), in_proj_covar=tensor([0.0792, 0.0813, 0.0647, 0.0893, 0.0781, 0.0704, 0.0781, 0.0711], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 21:03:05,206 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 5.547e+02 6.814e+02 8.571e+02 2.504e+03, threshold=1.363e+03, percent-clipped=8.0 +2023-04-01 21:03:40,164 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 21:03:49,650 INFO [train.py:903] (0/4) Epoch 12, batch 800, loss[loss=0.2065, simple_loss=0.2768, pruned_loss=0.0681, over 19373.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3071, pruned_loss=0.08006, over 3758665.71 frames. ], batch size: 47, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:07,048 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 21:04:50,832 INFO [train.py:903] (0/4) Epoch 12, batch 850, loss[loss=0.2152, simple_loss=0.3014, pruned_loss=0.06448, over 19572.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.307, pruned_loss=0.07958, over 3785229.65 frames. ], batch size: 61, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:54,842 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75961.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:10,029 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.208e+02 6.361e+02 7.722e+02 1.579e+03, threshold=1.272e+03, percent-clipped=2.0 +2023-04-01 21:05:25,762 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75986.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:29,792 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5526, 1.4183, 1.8187, 1.6491, 2.9911, 4.6127, 4.5098, 4.9003], + device='cuda:0'), covar=tensor([0.1434, 0.3320, 0.3040, 0.1913, 0.0496, 0.0137, 0.0146, 0.0128], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0294, 0.0324, 0.0251, 0.0214, 0.0157, 0.0206, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 21:05:43,537 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-76000.pt +2023-04-01 21:05:46,886 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 21:05:48,501 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:53,895 INFO [train.py:903] (0/4) Epoch 12, batch 900, loss[loss=0.2618, simple_loss=0.3318, pruned_loss=0.09587, over 17173.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3064, pruned_loss=0.07887, over 3807209.94 frames. ], batch size: 101, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:06:13,717 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4571, 1.2648, 1.3068, 1.7167, 1.3945, 1.6982, 1.6925, 1.5566], + device='cuda:0'), covar=tensor([0.0792, 0.0984, 0.1028, 0.0713, 0.0880, 0.0694, 0.0875, 0.0658], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0224, 0.0222, 0.0244, 0.0235, 0.0212, 0.0195, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 21:06:20,498 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:06:57,921 INFO [train.py:903] (0/4) Epoch 12, batch 950, loss[loss=0.247, simple_loss=0.3191, pruned_loss=0.08744, over 18208.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3058, pruned_loss=0.07899, over 3807544.93 frames. ], batch size: 83, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:07:02,628 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 21:07:17,511 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 5.102e+02 6.493e+02 8.387e+02 1.985e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 21:07:59,842 INFO [train.py:903] (0/4) Epoch 12, batch 1000, loss[loss=0.3108, simple_loss=0.3661, pruned_loss=0.1278, over 17526.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3055, pruned_loss=0.07878, over 3811921.83 frames. ], batch size: 101, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:08:42,442 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1900, 1.9459, 1.5030, 1.3139, 1.8214, 1.1288, 1.0504, 1.7174], + device='cuda:0'), covar=tensor([0.0843, 0.0698, 0.0948, 0.0620, 0.0428, 0.1035, 0.0680, 0.0374], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0303, 0.0326, 0.0247, 0.0233, 0.0314, 0.0289, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:08:54,949 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 21:09:02,680 INFO [train.py:903] (0/4) Epoch 12, batch 1050, loss[loss=0.2281, simple_loss=0.2962, pruned_loss=0.08002, over 19399.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3057, pruned_loss=0.07921, over 3803183.64 frames. ], batch size: 48, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:09:20,679 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 5.178e+02 6.428e+02 8.624e+02 1.751e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-01 21:09:35,979 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 21:10:05,080 INFO [train.py:903] (0/4) Epoch 12, batch 1100, loss[loss=0.2563, simple_loss=0.3297, pruned_loss=0.0915, over 18715.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3053, pruned_loss=0.07911, over 3816590.01 frames. ], batch size: 74, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:10:12,398 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9130, 2.0931, 2.3546, 2.8294, 2.7318, 2.3919, 2.2425, 2.8056], + device='cuda:0'), covar=tensor([0.0691, 0.1593, 0.1079, 0.0821, 0.1061, 0.0426, 0.1117, 0.0495], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0352, 0.0292, 0.0241, 0.0296, 0.0243, 0.0280, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:11:08,891 INFO [train.py:903] (0/4) Epoch 12, batch 1150, loss[loss=0.2029, simple_loss=0.2777, pruned_loss=0.064, over 19420.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3048, pruned_loss=0.0789, over 3820217.27 frames. ], batch size: 48, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:11:27,481 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 5.432e+02 6.664e+02 8.568e+02 1.731e+03, threshold=1.333e+03, percent-clipped=3.0 +2023-04-01 21:11:44,823 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76287.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:12:10,722 INFO [train.py:903] (0/4) Epoch 12, batch 1200, loss[loss=0.199, simple_loss=0.2739, pruned_loss=0.06202, over 19478.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3047, pruned_loss=0.07847, over 3820850.07 frames. ], batch size: 49, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:12:15,520 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:12:45,736 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 21:12:52,921 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76341.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:13:14,059 INFO [train.py:903] (0/4) Epoch 12, batch 1250, loss[loss=0.1889, simple_loss=0.2585, pruned_loss=0.05961, over 18605.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3046, pruned_loss=0.07849, over 3816983.39 frames. ], batch size: 41, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:13:31,221 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 5.242e+02 6.279e+02 7.669e+02 1.575e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-01 21:13:40,368 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6834, 2.2834, 2.1602, 2.7642, 2.4569, 2.2127, 2.2205, 2.4807], + device='cuda:0'), covar=tensor([0.0759, 0.1518, 0.1226, 0.0818, 0.1132, 0.0509, 0.1083, 0.0627], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0353, 0.0292, 0.0241, 0.0298, 0.0245, 0.0280, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:14:15,607 INFO [train.py:903] (0/4) Epoch 12, batch 1300, loss[loss=0.2123, simple_loss=0.2803, pruned_loss=0.07216, over 19422.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3051, pruned_loss=0.07837, over 3817503.76 frames. ], batch size: 48, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:15:18,797 INFO [train.py:903] (0/4) Epoch 12, batch 1350, loss[loss=0.2163, simple_loss=0.2973, pruned_loss=0.0676, over 19673.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.305, pruned_loss=0.07859, over 3810320.27 frames. ], batch size: 60, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:15:37,069 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 5.247e+02 6.486e+02 7.910e+02 1.390e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 21:16:20,438 INFO [train.py:903] (0/4) Epoch 12, batch 1400, loss[loss=0.2497, simple_loss=0.322, pruned_loss=0.08866, over 19588.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3038, pruned_loss=0.07757, over 3812835.18 frames. ], batch size: 52, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:16:44,277 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 21:17:23,464 INFO [train.py:903] (0/4) Epoch 12, batch 1450, loss[loss=0.2545, simple_loss=0.3273, pruned_loss=0.09081, over 19301.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.304, pruned_loss=0.07726, over 3824948.15 frames. ], batch size: 66, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:17:25,871 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 21:17:26,560 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 21:17:40,980 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 4.935e+02 6.510e+02 8.250e+02 1.774e+03, threshold=1.302e+03, percent-clipped=3.0 +2023-04-01 21:18:24,877 INFO [train.py:903] (0/4) Epoch 12, batch 1500, loss[loss=0.2035, simple_loss=0.2803, pruned_loss=0.0633, over 19735.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3047, pruned_loss=0.07822, over 3818298.44 frames. ], batch size: 51, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:18:53,773 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76631.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:19:24,220 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76656.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:19:27,181 INFO [train.py:903] (0/4) Epoch 12, batch 1550, loss[loss=0.2822, simple_loss=0.3427, pruned_loss=0.1108, over 19319.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3051, pruned_loss=0.07837, over 3829667.02 frames. ], batch size: 66, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:19:46,336 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 5.709e+02 6.905e+02 9.210e+02 1.884e+03, threshold=1.381e+03, percent-clipped=4.0 +2023-04-01 21:20:01,202 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76685.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:20:19,370 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 21:20:23,675 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 21:20:29,928 INFO [train.py:903] (0/4) Epoch 12, batch 1600, loss[loss=0.2679, simple_loss=0.3357, pruned_loss=0.1001, over 18330.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3049, pruned_loss=0.07777, over 3833174.27 frames. ], batch size: 83, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:20:53,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 21:21:16,444 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76746.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:21:23,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4284, 2.3843, 1.6993, 1.4587, 2.2768, 1.2131, 1.2013, 2.0147], + device='cuda:0'), covar=tensor([0.1037, 0.0567, 0.0888, 0.0751, 0.0393, 0.1111, 0.0824, 0.0420], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0300, 0.0321, 0.0243, 0.0231, 0.0316, 0.0289, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:21:29,664 INFO [train.py:903] (0/4) Epoch 12, batch 1650, loss[loss=0.2724, simple_loss=0.3424, pruned_loss=0.1012, over 18766.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3059, pruned_loss=0.07848, over 3833398.01 frames. ], batch size: 74, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:21:30,358 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 21:21:46,971 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:21:49,944 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.672e+02 5.088e+02 6.360e+02 7.707e+02 1.579e+03, threshold=1.272e+03, percent-clipped=3.0 +2023-04-01 21:22:06,337 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7300, 1.8551, 1.6824, 2.8498, 1.8857, 2.7034, 2.0081, 1.4598], + device='cuda:0'), covar=tensor([0.4385, 0.3611, 0.2251, 0.2117, 0.3760, 0.1638, 0.4978, 0.4209], + device='cuda:0'), in_proj_covar=tensor([0.0791, 0.0816, 0.0646, 0.0889, 0.0781, 0.0706, 0.0779, 0.0709], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 21:22:22,685 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76800.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:22:33,406 INFO [train.py:903] (0/4) Epoch 12, batch 1700, loss[loss=0.2386, simple_loss=0.3215, pruned_loss=0.07781, over 19539.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3059, pruned_loss=0.07855, over 3835514.26 frames. ], batch size: 56, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:22:59,173 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5588, 1.3533, 1.3866, 1.9154, 1.5319, 1.8386, 1.9231, 1.6608], + device='cuda:0'), covar=tensor([0.0799, 0.0946, 0.0996, 0.0783, 0.0881, 0.0712, 0.0851, 0.0651], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0225, 0.0224, 0.0246, 0.0236, 0.0213, 0.0195, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 21:23:10,291 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 21:23:16,988 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3555, 2.1412, 1.9215, 1.8267, 1.5742, 1.8178, 0.6045, 1.2396], + device='cuda:0'), covar=tensor([0.0419, 0.0454, 0.0384, 0.0601, 0.0940, 0.0710, 0.0947, 0.0772], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0329, 0.0332, 0.0356, 0.0427, 0.0354, 0.0309, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 21:23:33,286 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:23:34,183 INFO [train.py:903] (0/4) Epoch 12, batch 1750, loss[loss=0.2176, simple_loss=0.2801, pruned_loss=0.07758, over 19765.00 frames. ], tot_loss[loss=0.233, simple_loss=0.307, pruned_loss=0.07951, over 3825957.59 frames. ], batch size: 46, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:23:36,417 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 21:23:53,640 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.587e+02 7.065e+02 8.864e+02 2.096e+03, threshold=1.413e+03, percent-clipped=6.0 +2023-04-01 21:24:37,122 INFO [train.py:903] (0/4) Epoch 12, batch 1800, loss[loss=0.2511, simple_loss=0.3221, pruned_loss=0.09002, over 19670.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3046, pruned_loss=0.07813, over 3833385.88 frames. ], batch size: 60, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:24:44,446 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3050, 1.9540, 1.5350, 1.4004, 1.8359, 1.1324, 1.2942, 1.7986], + device='cuda:0'), covar=tensor([0.0775, 0.0672, 0.0888, 0.0628, 0.0421, 0.1039, 0.0566, 0.0325], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0300, 0.0320, 0.0243, 0.0231, 0.0317, 0.0289, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:25:32,721 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 21:25:38,680 INFO [train.py:903] (0/4) Epoch 12, batch 1850, loss[loss=0.202, simple_loss=0.2784, pruned_loss=0.06285, over 19738.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3041, pruned_loss=0.07784, over 3829528.37 frames. ], batch size: 51, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:25:45,874 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.06 vs. limit=5.0 +2023-04-01 21:25:59,472 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 5.640e+02 7.026e+02 8.457e+02 1.689e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 21:26:12,306 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 21:26:34,345 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77002.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:26:41,893 INFO [train.py:903] (0/4) Epoch 12, batch 1900, loss[loss=0.22, simple_loss=0.2891, pruned_loss=0.07544, over 19719.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3059, pruned_loss=0.07888, over 3832637.94 frames. ], batch size: 51, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:27:00,222 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 21:27:04,920 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 21:27:06,367 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:27:06,384 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:28,514 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 21:27:36,567 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77052.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:42,293 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77056.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:27:43,966 INFO [train.py:903] (0/4) Epoch 12, batch 1950, loss[loss=0.2153, simple_loss=0.2809, pruned_loss=0.07487, over 19383.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3066, pruned_loss=0.07934, over 3836868.64 frames. ], batch size: 47, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:28:03,223 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.700e+02 5.468e+02 7.092e+02 9.065e+02 1.810e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 21:28:11,815 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77081.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:28:34,064 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 21:28:45,461 INFO [train.py:903] (0/4) Epoch 12, batch 2000, loss[loss=0.2919, simple_loss=0.3433, pruned_loss=0.1203, over 13330.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3079, pruned_loss=0.08031, over 3818409.36 frames. ], batch size: 136, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:28:48,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-01 21:28:51,499 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77113.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:29:15,575 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5838, 1.2828, 1.2083, 1.5158, 1.1418, 1.3687, 1.2140, 1.4044], + device='cuda:0'), covar=tensor([0.0932, 0.1072, 0.1400, 0.0794, 0.1112, 0.0542, 0.1263, 0.0725], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0349, 0.0292, 0.0238, 0.0294, 0.0242, 0.0278, 0.0236], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:29:43,044 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 21:29:46,528 INFO [train.py:903] (0/4) Epoch 12, batch 2050, loss[loss=0.2049, simple_loss=0.2956, pruned_loss=0.05709, over 19627.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3079, pruned_loss=0.08068, over 3816317.13 frames. ], batch size: 57, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:30:02,209 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 21:30:03,448 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 21:30:06,833 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.225e+02 5.837e+02 7.308e+02 9.815e+02 2.165e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 21:30:26,552 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 21:30:40,584 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77201.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:30:49,626 INFO [train.py:903] (0/4) Epoch 12, batch 2100, loss[loss=0.2634, simple_loss=0.3337, pruned_loss=0.09649, over 19672.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3067, pruned_loss=0.08012, over 3815922.64 frames. ], batch size: 58, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:31:10,325 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 21:31:12,333 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2144, 2.9459, 2.1533, 2.6913, 0.8656, 2.8560, 2.7080, 2.8318], + device='cuda:0'), covar=tensor([0.1155, 0.1364, 0.2097, 0.1044, 0.3847, 0.1087, 0.1194, 0.1441], + device='cuda:0'), in_proj_covar=tensor([0.0445, 0.0369, 0.0444, 0.0321, 0.0384, 0.0378, 0.0364, 0.0397], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:31:17,042 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:31:20,288 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 21:31:28,747 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2024, 1.1564, 1.1523, 1.3359, 1.1183, 1.3815, 1.3149, 1.2534], + device='cuda:0'), covar=tensor([0.0882, 0.0996, 0.1077, 0.0713, 0.0856, 0.0774, 0.0833, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0226, 0.0226, 0.0249, 0.0236, 0.0215, 0.0197, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 21:31:40,690 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 21:31:52,760 INFO [train.py:903] (0/4) Epoch 12, batch 2150, loss[loss=0.315, simple_loss=0.3578, pruned_loss=0.1361, over 13324.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3063, pruned_loss=0.07944, over 3819315.21 frames. ], batch size: 136, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:32:13,144 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.202e+02 5.327e+02 7.168e+02 9.347e+02 2.125e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-04-01 21:32:33,243 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:39,469 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77295.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:51,909 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-04-01 21:32:53,884 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1120, 1.1353, 1.6674, 1.1037, 2.6077, 3.5729, 3.3009, 3.7237], + device='cuda:0'), covar=tensor([0.1586, 0.3654, 0.3064, 0.2177, 0.0465, 0.0139, 0.0196, 0.0175], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0295, 0.0324, 0.0250, 0.0215, 0.0158, 0.0205, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 21:32:55,903 INFO [train.py:903] (0/4) Epoch 12, batch 2200, loss[loss=0.196, simple_loss=0.2684, pruned_loss=0.06184, over 19777.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3058, pruned_loss=0.07927, over 3823843.55 frames. ], batch size: 47, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:33:05,527 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77316.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:33:25,217 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 21:33:57,354 INFO [train.py:903] (0/4) Epoch 12, batch 2250, loss[loss=0.2327, simple_loss=0.3053, pruned_loss=0.08002, over 19604.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3055, pruned_loss=0.07914, over 3835137.35 frames. ], batch size: 50, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:34:08,706 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0349, 1.2780, 1.7051, 1.2469, 2.8455, 3.7168, 3.4695, 3.9189], + device='cuda:0'), covar=tensor([0.1686, 0.3299, 0.3009, 0.2128, 0.0482, 0.0140, 0.0187, 0.0174], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0294, 0.0325, 0.0250, 0.0215, 0.0158, 0.0206, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 21:34:18,146 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.298e+02 6.341e+02 7.997e+02 1.542e+03, threshold=1.268e+03, percent-clipped=2.0 +2023-04-01 21:34:27,379 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4834, 4.0223, 4.1978, 4.2037, 1.4501, 3.9070, 3.4183, 3.8715], + device='cuda:0'), covar=tensor([0.1468, 0.0886, 0.0618, 0.0607, 0.5442, 0.0810, 0.0687, 0.1179], + device='cuda:0'), in_proj_covar=tensor([0.0683, 0.0613, 0.0812, 0.0690, 0.0738, 0.0565, 0.0498, 0.0749], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 21:34:35,671 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 21:34:58,603 INFO [train.py:903] (0/4) Epoch 12, batch 2300, loss[loss=0.2098, simple_loss=0.2806, pruned_loss=0.06948, over 19136.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3071, pruned_loss=0.08067, over 3811292.12 frames. ], batch size: 42, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:35:10,948 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 21:35:59,716 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77457.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:36:00,828 INFO [train.py:903] (0/4) Epoch 12, batch 2350, loss[loss=0.2286, simple_loss=0.3002, pruned_loss=0.07848, over 19587.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3075, pruned_loss=0.08096, over 3815896.66 frames. ], batch size: 52, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:36:22,285 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.245e+02 6.457e+02 8.417e+02 4.507e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-01 21:36:41,243 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 21:36:59,073 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 21:37:02,548 INFO [train.py:903] (0/4) Epoch 12, batch 2400, loss[loss=0.1711, simple_loss=0.2536, pruned_loss=0.0443, over 19810.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3071, pruned_loss=0.08035, over 3832654.52 frames. ], batch size: 49, lr: 6.99e-03, grad_scale: 8.0 +2023-04-01 21:37:17,451 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5489, 1.3490, 1.3198, 1.8915, 1.5776, 1.7543, 1.9561, 1.6162], + device='cuda:0'), covar=tensor([0.0815, 0.0951, 0.1049, 0.0802, 0.0844, 0.0694, 0.0747, 0.0708], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0224, 0.0224, 0.0248, 0.0234, 0.0214, 0.0196, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 21:37:36,476 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8437, 1.8521, 2.1622, 2.6770, 1.7914, 2.4892, 2.4084, 1.9587], + device='cuda:0'), covar=tensor([0.3682, 0.3271, 0.1457, 0.1639, 0.3459, 0.1435, 0.3470, 0.2733], + device='cuda:0'), in_proj_covar=tensor([0.0802, 0.0823, 0.0653, 0.0897, 0.0788, 0.0711, 0.0789, 0.0719], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 21:38:04,502 INFO [train.py:903] (0/4) Epoch 12, batch 2450, loss[loss=0.2235, simple_loss=0.3052, pruned_loss=0.07094, over 19621.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3061, pruned_loss=0.07952, over 3834325.47 frames. ], batch size: 61, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:38:21,436 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:21,508 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:22,405 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:24,483 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.530e+02 6.529e+02 8.263e+02 1.639e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 21:38:52,856 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77597.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:56,122 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6051, 4.1817, 2.5901, 3.8084, 1.1041, 3.9786, 3.9436, 4.0585], + device='cuda:0'), covar=tensor([0.0584, 0.0951, 0.2062, 0.0693, 0.3915, 0.0715, 0.0799, 0.0967], + device='cuda:0'), in_proj_covar=tensor([0.0440, 0.0365, 0.0440, 0.0319, 0.0380, 0.0373, 0.0360, 0.0392], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:39:05,893 INFO [train.py:903] (0/4) Epoch 12, batch 2500, loss[loss=0.2849, simple_loss=0.3542, pruned_loss=0.1078, over 19515.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3067, pruned_loss=0.07984, over 3846961.01 frames. ], batch size: 64, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:39:27,209 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4280, 1.2973, 1.2912, 1.9114, 1.5174, 1.7073, 1.9036, 1.6593], + device='cuda:0'), covar=tensor([0.0874, 0.0982, 0.1068, 0.0786, 0.0882, 0.0742, 0.0793, 0.0640], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0244, 0.0232, 0.0211, 0.0193, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 21:39:39,443 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,513 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,758 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6668, 1.4673, 1.5359, 2.1864, 1.6747, 2.0775, 2.1524, 2.0573], + device='cuda:0'), covar=tensor([0.0798, 0.0921, 0.0980, 0.0789, 0.0875, 0.0691, 0.0813, 0.0551], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0221, 0.0243, 0.0231, 0.0211, 0.0193, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 21:39:45,169 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:08,014 INFO [train.py:903] (0/4) Epoch 12, batch 2550, loss[loss=0.2786, simple_loss=0.3378, pruned_loss=0.1097, over 19716.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3057, pruned_loss=0.07891, over 3845954.14 frames. ], batch size: 63, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:40:30,541 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.340e+02 6.709e+02 8.508e+02 1.809e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-01 21:40:46,046 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:54,790 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:41:05,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 21:41:10,773 INFO [train.py:903] (0/4) Epoch 12, batch 2600, loss[loss=0.2011, simple_loss=0.2731, pruned_loss=0.06458, over 19000.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3059, pruned_loss=0.07885, over 3836731.19 frames. ], batch size: 42, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:41:54,844 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 21:42:02,077 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:07,395 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:11,589 INFO [train.py:903] (0/4) Epoch 12, batch 2650, loss[loss=0.2057, simple_loss=0.2756, pruned_loss=0.06787, over 19801.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3047, pruned_loss=0.0781, over 3829279.78 frames. ], batch size: 48, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:42:32,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.775e+02 6.860e+02 8.613e+02 1.817e+03, threshold=1.372e+03, percent-clipped=5.0 +2023-04-01 21:42:33,300 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 21:43:06,917 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9781, 3.5955, 2.4028, 3.3093, 0.7206, 3.4013, 3.4110, 3.4688], + device='cuda:0'), covar=tensor([0.0852, 0.1299, 0.2117, 0.0815, 0.4296, 0.0904, 0.0883, 0.1286], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0374, 0.0450, 0.0326, 0.0386, 0.0379, 0.0367, 0.0400], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:43:12,307 INFO [train.py:903] (0/4) Epoch 12, batch 2700, loss[loss=0.2164, simple_loss=0.2816, pruned_loss=0.07561, over 19415.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3047, pruned_loss=0.0783, over 3837281.09 frames. ], batch size: 48, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:43:38,253 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:08,848 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77853.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:14,412 INFO [train.py:903] (0/4) Epoch 12, batch 2750, loss[loss=0.22, simple_loss=0.3049, pruned_loss=0.06754, over 19664.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3049, pruned_loss=0.0785, over 3827940.22 frames. ], batch size: 58, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:44:36,443 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.785e+02 6.935e+02 8.560e+02 1.739e+03, threshold=1.387e+03, percent-clipped=4.0 +2023-04-01 21:45:15,202 INFO [train.py:903] (0/4) Epoch 12, batch 2800, loss[loss=0.1985, simple_loss=0.2663, pruned_loss=0.06538, over 19743.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3054, pruned_loss=0.07862, over 3820176.95 frames. ], batch size: 47, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:46:01,262 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:19,703 INFO [train.py:903] (0/4) Epoch 12, batch 2850, loss[loss=0.2064, simple_loss=0.2779, pruned_loss=0.06742, over 19395.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3048, pruned_loss=0.0782, over 3815943.57 frames. ], batch size: 48, lr: 6.97e-03, grad_scale: 4.0 +2023-04-01 21:46:22,361 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6778, 1.2883, 1.5044, 1.5925, 3.2164, 0.9485, 2.1786, 3.5105], + device='cuda:0'), covar=tensor([0.0482, 0.2688, 0.2654, 0.1701, 0.0771, 0.2672, 0.1267, 0.0293], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0333, 0.0345, 0.0315, 0.0340, 0.0330, 0.0331, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:46:32,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:41,213 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.493e+02 5.663e+02 6.634e+02 8.773e+02 3.814e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-01 21:46:43,717 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:11,981 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-78000.pt +2023-04-01 21:47:20,361 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:22,310 INFO [train.py:903] (0/4) Epoch 12, batch 2900, loss[loss=0.1793, simple_loss=0.2587, pruned_loss=0.04991, over 19410.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3046, pruned_loss=0.07799, over 3818076.19 frames. ], batch size: 48, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:47:22,334 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 21:47:25,105 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:39,767 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2502, 1.3789, 1.6770, 1.4736, 2.4434, 2.1287, 2.5534, 0.9067], + device='cuda:0'), covar=tensor([0.2132, 0.3742, 0.2294, 0.1765, 0.1296, 0.1834, 0.1312, 0.3718], + device='cuda:0'), in_proj_covar=tensor([0.0489, 0.0578, 0.0608, 0.0437, 0.0594, 0.0489, 0.0644, 0.0495], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 21:47:52,652 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:57,902 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78035.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:02,313 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:25,033 INFO [train.py:903] (0/4) Epoch 12, batch 2950, loss[loss=0.2411, simple_loss=0.3148, pruned_loss=0.0837, over 19672.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3052, pruned_loss=0.07828, over 3820003.79 frames. ], batch size: 53, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:48:48,726 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.583e+02 6.866e+02 9.102e+02 1.641e+03, threshold=1.373e+03, percent-clipped=7.0 +2023-04-01 21:49:09,996 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:49:28,174 INFO [train.py:903] (0/4) Epoch 12, batch 3000, loss[loss=0.2336, simple_loss=0.3185, pruned_loss=0.07429, over 19628.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3048, pruned_loss=0.07779, over 3838199.14 frames. ], batch size: 57, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:49:28,175 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 21:49:40,666 INFO [train.py:937] (0/4) Epoch 12, validation: loss=0.1772, simple_loss=0.2779, pruned_loss=0.0383, over 944034.00 frames. +2023-04-01 21:49:40,667 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18224MB +2023-04-01 21:49:45,494 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 21:50:38,050 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:50:42,293 INFO [train.py:903] (0/4) Epoch 12, batch 3050, loss[loss=0.2393, simple_loss=0.3167, pruned_loss=0.081, over 19625.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3047, pruned_loss=0.07773, over 3842159.65 frames. ], batch size: 57, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:51:04,779 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 5.238e+02 6.566e+02 8.426e+02 1.854e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 21:51:20,716 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9253, 1.9663, 2.1789, 2.6371, 1.7554, 2.5518, 2.4671, 2.0804], + device='cuda:0'), covar=tensor([0.3281, 0.2882, 0.1338, 0.1591, 0.3144, 0.1359, 0.3302, 0.2531], + device='cuda:0'), in_proj_covar=tensor([0.0795, 0.0822, 0.0648, 0.0891, 0.0783, 0.0708, 0.0785, 0.0715], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 21:51:43,590 INFO [train.py:903] (0/4) Epoch 12, batch 3100, loss[loss=0.2148, simple_loss=0.3012, pruned_loss=0.06422, over 19531.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3062, pruned_loss=0.07909, over 3829418.72 frames. ], batch size: 56, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:52:14,923 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3817, 2.1532, 1.6232, 1.4625, 2.0152, 1.2215, 1.4350, 1.8876], + device='cuda:0'), covar=tensor([0.0928, 0.0744, 0.1128, 0.0742, 0.0480, 0.1242, 0.0592, 0.0351], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0301, 0.0323, 0.0244, 0.0234, 0.0322, 0.0286, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 21:52:18,458 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 21:52:36,393 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:52:45,562 INFO [train.py:903] (0/4) Epoch 12, batch 3150, loss[loss=0.2465, simple_loss=0.3002, pruned_loss=0.09639, over 19359.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3062, pruned_loss=0.07895, over 3835173.60 frames. ], batch size: 44, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:52:53,234 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.18 vs. limit=5.0 +2023-04-01 21:53:07,630 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.745e+02 5.622e+02 7.364e+02 8.683e+02 1.879e+03, threshold=1.473e+03, percent-clipped=3.0 +2023-04-01 21:53:15,176 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 21:53:32,902 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0159, 1.3574, 1.9423, 1.4695, 2.9376, 4.4628, 4.4629, 4.9154], + device='cuda:0'), covar=tensor([0.1753, 0.3499, 0.2981, 0.2058, 0.0529, 0.0173, 0.0155, 0.0122], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0298, 0.0326, 0.0252, 0.0216, 0.0159, 0.0205, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 21:53:47,084 INFO [train.py:903] (0/4) Epoch 12, batch 3200, loss[loss=0.2363, simple_loss=0.3076, pruned_loss=0.08253, over 18784.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3075, pruned_loss=0.07989, over 3833620.45 frames. ], batch size: 74, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:54:39,708 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:54:50,776 INFO [train.py:903] (0/4) Epoch 12, batch 3250, loss[loss=0.2213, simple_loss=0.3014, pruned_loss=0.07055, over 19539.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3061, pruned_loss=0.07917, over 3830141.26 frames. ], batch size: 56, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:55:11,630 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:13,651 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.227e+02 6.164e+02 7.465e+02 1.234e+03, threshold=1.233e+03, percent-clipped=0.0 +2023-04-01 21:55:54,299 INFO [train.py:903] (0/4) Epoch 12, batch 3300, loss[loss=0.2152, simple_loss=0.288, pruned_loss=0.07123, over 19740.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3062, pruned_loss=0.07935, over 3825121.59 frames. ], batch size: 51, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:55:57,128 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:57,908 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 21:56:26,566 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:56:55,610 INFO [train.py:903] (0/4) Epoch 12, batch 3350, loss[loss=0.2244, simple_loss=0.3045, pruned_loss=0.07216, over 18194.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3061, pruned_loss=0.07911, over 3828532.45 frames. ], batch size: 83, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:57:18,021 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 5.251e+02 6.145e+02 6.896e+02 1.617e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-01 21:57:20,569 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:57:57,211 INFO [train.py:903] (0/4) Epoch 12, batch 3400, loss[loss=0.2275, simple_loss=0.3087, pruned_loss=0.07315, over 19413.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3055, pruned_loss=0.07839, over 3824288.00 frames. ], batch size: 70, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:59:00,757 INFO [train.py:903] (0/4) Epoch 12, batch 3450, loss[loss=0.2128, simple_loss=0.2992, pruned_loss=0.06321, over 19760.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3049, pruned_loss=0.0777, over 3824971.00 frames. ], batch size: 54, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:59:06,193 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 21:59:21,424 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78574.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:59:23,278 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.692e+02 7.472e+02 9.495e+02 2.057e+03, threshold=1.494e+03, percent-clipped=9.0 +2023-04-01 21:59:44,669 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:00:03,311 INFO [train.py:903] (0/4) Epoch 12, batch 3500, loss[loss=0.2269, simple_loss=0.3064, pruned_loss=0.07371, over 19675.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3061, pruned_loss=0.07877, over 3804879.02 frames. ], batch size: 58, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 22:00:34,900 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-01 22:00:55,180 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9063, 1.7328, 1.5441, 2.0008, 1.6057, 1.6840, 1.5253, 1.8559], + device='cuda:0'), covar=tensor([0.0938, 0.1421, 0.1402, 0.0901, 0.1293, 0.0502, 0.1238, 0.0649], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0349, 0.0293, 0.0239, 0.0294, 0.0241, 0.0278, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:01:05,331 INFO [train.py:903] (0/4) Epoch 12, batch 3550, loss[loss=0.2165, simple_loss=0.302, pruned_loss=0.06547, over 19601.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3064, pruned_loss=0.07874, over 3796264.76 frames. ], batch size: 61, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:01:26,755 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 5.737e+02 7.042e+02 1.025e+03 1.962e+03, threshold=1.408e+03, percent-clipped=6.0 +2023-04-01 22:02:07,318 INFO [train.py:903] (0/4) Epoch 12, batch 3600, loss[loss=0.273, simple_loss=0.3321, pruned_loss=0.1069, over 13452.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3071, pruned_loss=0.07939, over 3787924.54 frames. ], batch size: 137, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:02:07,771 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6871, 1.6582, 1.5739, 1.3090, 1.2299, 1.4259, 0.2168, 0.6414], + device='cuda:0'), covar=tensor([0.0467, 0.0483, 0.0286, 0.0454, 0.0920, 0.0487, 0.0895, 0.0787], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0336, 0.0334, 0.0360, 0.0432, 0.0359, 0.0317, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:02:08,900 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:03:09,180 INFO [train.py:903] (0/4) Epoch 12, batch 3650, loss[loss=0.2472, simple_loss=0.3209, pruned_loss=0.08673, over 19243.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3065, pruned_loss=0.07941, over 3785547.92 frames. ], batch size: 66, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:03:33,805 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.318e+02 6.562e+02 8.219e+02 2.478e+03, threshold=1.312e+03, percent-clipped=4.0 +2023-04-01 22:03:37,877 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2708, 1.3076, 1.5571, 1.4874, 2.1962, 1.9411, 2.2745, 0.7072], + device='cuda:0'), covar=tensor([0.2308, 0.3930, 0.2475, 0.1778, 0.1434, 0.2041, 0.1373, 0.3876], + device='cuda:0'), in_proj_covar=tensor([0.0488, 0.0577, 0.0606, 0.0437, 0.0592, 0.0488, 0.0643, 0.0492], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:04:14,201 INFO [train.py:903] (0/4) Epoch 12, batch 3700, loss[loss=0.2531, simple_loss=0.3272, pruned_loss=0.08952, over 18720.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3065, pruned_loss=0.07902, over 3793218.10 frames. ], batch size: 74, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:04:31,377 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:05:15,862 INFO [train.py:903] (0/4) Epoch 12, batch 3750, loss[loss=0.2173, simple_loss=0.2958, pruned_loss=0.06944, over 19837.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3057, pruned_loss=0.07872, over 3817007.10 frames. ], batch size: 52, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:05:18,441 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3202, 3.8926, 2.6399, 3.5022, 0.8965, 3.6726, 3.6360, 3.8286], + device='cuda:0'), covar=tensor([0.0704, 0.1074, 0.2058, 0.0798, 0.4185, 0.0831, 0.0880, 0.1085], + device='cuda:0'), in_proj_covar=tensor([0.0444, 0.0369, 0.0445, 0.0321, 0.0381, 0.0376, 0.0364, 0.0399], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:05:37,732 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.608e+02 5.233e+02 6.179e+02 8.242e+02 1.500e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-01 22:06:16,424 INFO [train.py:903] (0/4) Epoch 12, batch 3800, loss[loss=0.2418, simple_loss=0.3125, pruned_loss=0.08562, over 19847.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3046, pruned_loss=0.07826, over 3832904.71 frames. ], batch size: 52, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:06:29,201 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:06:53,690 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 22:06:54,058 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:17,949 INFO [train.py:903] (0/4) Epoch 12, batch 3850, loss[loss=0.2453, simple_loss=0.3223, pruned_loss=0.08412, over 19786.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.305, pruned_loss=0.07825, over 3832037.70 frames. ], batch size: 56, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:07:19,947 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 22:07:27,650 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 22:07:28,312 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:40,738 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.982e+02 6.977e+02 9.373e+02 2.137e+03, threshold=1.395e+03, percent-clipped=8.0 +2023-04-01 22:07:49,874 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:58,091 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:08:21,045 INFO [train.py:903] (0/4) Epoch 12, batch 3900, loss[loss=0.2533, simple_loss=0.3227, pruned_loss=0.09196, over 19291.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3049, pruned_loss=0.07797, over 3831341.63 frames. ], batch size: 66, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:08:51,031 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79033.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:09:22,295 INFO [train.py:903] (0/4) Epoch 12, batch 3950, loss[loss=0.2406, simple_loss=0.3179, pruned_loss=0.08161, over 19659.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.305, pruned_loss=0.07825, over 3828807.52 frames. ], batch size: 58, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:09:29,071 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 22:09:40,906 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5397, 2.3966, 1.6548, 1.4920, 2.1490, 1.1867, 1.4226, 1.9006], + device='cuda:0'), covar=tensor([0.0970, 0.0614, 0.1065, 0.0739, 0.0498, 0.1227, 0.0766, 0.0452], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0300, 0.0326, 0.0244, 0.0235, 0.0318, 0.0287, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:09:43,636 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.132e+02 6.993e+02 8.356e+02 2.478e+03, threshold=1.399e+03, percent-clipped=5.0 +2023-04-01 22:10:04,420 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.52 vs. limit=5.0 +2023-04-01 22:10:18,509 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4554, 1.3022, 1.3221, 1.8517, 1.5832, 1.6784, 1.7806, 1.5435], + device='cuda:0'), covar=tensor([0.0873, 0.0985, 0.1094, 0.0684, 0.0790, 0.0726, 0.0780, 0.0710], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0225, 0.0223, 0.0247, 0.0236, 0.0211, 0.0195, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 22:10:22,895 INFO [train.py:903] (0/4) Epoch 12, batch 4000, loss[loss=0.2497, simple_loss=0.3335, pruned_loss=0.08293, over 19532.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3055, pruned_loss=0.07847, over 3840746.55 frames. ], batch size: 64, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:13,119 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 22:11:24,601 INFO [train.py:903] (0/4) Epoch 12, batch 4050, loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.05999, over 19601.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.305, pruned_loss=0.07793, over 3845793.41 frames. ], batch size: 52, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:47,121 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.067e+02 6.190e+02 7.758e+02 2.001e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-01 22:12:07,801 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:12:26,660 INFO [train.py:903] (0/4) Epoch 12, batch 4100, loss[loss=0.2481, simple_loss=0.3202, pruned_loss=0.08803, over 19773.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3048, pruned_loss=0.07807, over 3835907.70 frames. ], batch size: 56, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:12:39,286 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79218.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:13:03,848 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 22:13:27,006 INFO [train.py:903] (0/4) Epoch 12, batch 4150, loss[loss=0.1758, simple_loss=0.2493, pruned_loss=0.0511, over 19749.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3042, pruned_loss=0.07774, over 3833463.70 frames. ], batch size: 47, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:13:45,778 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9341, 1.9920, 2.1749, 2.8840, 1.9423, 2.5956, 2.4927, 1.9981], + device='cuda:0'), covar=tensor([0.3675, 0.3335, 0.1503, 0.1700, 0.3497, 0.1537, 0.3525, 0.2771], + device='cuda:0'), in_proj_covar=tensor([0.0801, 0.0826, 0.0654, 0.0897, 0.0788, 0.0714, 0.0793, 0.0715], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 22:13:49,455 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 5.552e+02 6.928e+02 9.304e+02 2.111e+03, threshold=1.386e+03, percent-clipped=6.0 +2023-04-01 22:14:06,919 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79289.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:23,416 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79303.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:14:28,774 INFO [train.py:903] (0/4) Epoch 12, batch 4200, loss[loss=0.1948, simple_loss=0.2724, pruned_loss=0.05861, over 16070.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.305, pruned_loss=0.07838, over 3821801.54 frames. ], batch size: 35, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:14:33,322 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 22:14:35,749 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79314.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:53,761 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:22,560 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:31,218 INFO [train.py:903] (0/4) Epoch 12, batch 4250, loss[loss=0.2752, simple_loss=0.3509, pruned_loss=0.09975, over 17532.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3053, pruned_loss=0.07832, over 3822920.82 frames. ], batch size: 101, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:15:43,239 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 22:15:52,361 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 4.925e+02 6.557e+02 8.003e+02 1.515e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-01 22:15:54,751 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 22:16:00,677 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:16:13,150 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79393.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:16:33,044 INFO [train.py:903] (0/4) Epoch 12, batch 4300, loss[loss=0.2011, simple_loss=0.2808, pruned_loss=0.06064, over 19752.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3055, pruned_loss=0.07859, over 3837519.65 frames. ], batch size: 51, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:17:13,283 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79442.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:17:24,354 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 22:17:29,506 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2234, 1.9625, 1.8456, 2.0837, 1.9468, 1.8826, 1.9009, 2.0997], + device='cuda:0'), covar=tensor([0.0758, 0.1249, 0.1198, 0.0922, 0.1137, 0.0444, 0.1066, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0352, 0.0292, 0.0240, 0.0297, 0.0242, 0.0281, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:17:32,597 INFO [train.py:903] (0/4) Epoch 12, batch 4350, loss[loss=0.2489, simple_loss=0.3199, pruned_loss=0.08898, over 18236.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3067, pruned_loss=0.079, over 3834055.75 frames. ], batch size: 83, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:17:52,327 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5075, 3.6694, 4.0068, 3.9896, 2.2018, 3.7467, 3.4179, 3.7201], + device='cuda:0'), covar=tensor([0.1151, 0.2850, 0.0551, 0.0628, 0.4101, 0.1028, 0.0558, 0.0970], + device='cuda:0'), in_proj_covar=tensor([0.0682, 0.0618, 0.0812, 0.0691, 0.0740, 0.0567, 0.0496, 0.0749], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 22:17:54,382 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.544e+02 6.866e+02 8.754e+02 2.036e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 22:18:34,725 INFO [train.py:903] (0/4) Epoch 12, batch 4400, loss[loss=0.2381, simple_loss=0.3168, pruned_loss=0.07971, over 18269.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3081, pruned_loss=0.08016, over 3807590.93 frames. ], batch size: 83, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:18:58,949 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 22:19:07,280 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 22:19:38,007 INFO [train.py:903] (0/4) Epoch 12, batch 4450, loss[loss=0.221, simple_loss=0.3015, pruned_loss=0.07026, over 19674.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3075, pruned_loss=0.07983, over 3796525.94 frames. ], batch size: 53, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:20:00,025 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.129e+02 5.261e+02 6.909e+02 8.531e+02 1.990e+03, threshold=1.382e+03, percent-clipped=5.0 +2023-04-01 22:20:00,429 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1177, 1.2987, 1.7811, 0.8795, 2.4301, 3.0805, 2.7743, 3.2594], + device='cuda:0'), covar=tensor([0.1553, 0.3438, 0.2862, 0.2334, 0.0484, 0.0188, 0.0251, 0.0231], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0300, 0.0329, 0.0252, 0.0219, 0.0161, 0.0207, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:20:33,583 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:20:41,077 INFO [train.py:903] (0/4) Epoch 12, batch 4500, loss[loss=0.2107, simple_loss=0.2828, pruned_loss=0.06926, over 19776.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3052, pruned_loss=0.07838, over 3821235.12 frames. ], batch size: 48, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:21:29,749 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79647.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:21:43,346 INFO [train.py:903] (0/4) Epoch 12, batch 4550, loss[loss=0.2141, simple_loss=0.292, pruned_loss=0.06813, over 19753.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3049, pruned_loss=0.07827, over 3826864.07 frames. ], batch size: 54, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:21:52,482 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 22:22:01,888 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9779, 1.5655, 1.4261, 1.6283, 1.5073, 1.4118, 1.2428, 1.6787], + device='cuda:0'), covar=tensor([0.0856, 0.1187, 0.1458, 0.0977, 0.1243, 0.0709, 0.1588, 0.0759], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0353, 0.0293, 0.0241, 0.0298, 0.0243, 0.0283, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:22:04,157 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:04,981 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.458e+02 6.277e+02 7.572e+02 1.495e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-01 22:22:08,805 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 22:22:15,964 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 22:22:23,291 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3863, 1.4649, 1.7097, 1.5829, 2.6889, 2.2649, 2.8463, 1.0486], + device='cuda:0'), covar=tensor([0.2085, 0.3500, 0.2127, 0.1637, 0.1296, 0.1745, 0.1255, 0.3613], + device='cuda:0'), in_proj_covar=tensor([0.0491, 0.0578, 0.0608, 0.0438, 0.0593, 0.0493, 0.0643, 0.0494], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:22:28,829 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9292, 1.5694, 1.4335, 1.7389, 1.6273, 1.5898, 1.4085, 1.7525], + device='cuda:0'), covar=tensor([0.0863, 0.1197, 0.1389, 0.0965, 0.1052, 0.0501, 0.1321, 0.0673], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0353, 0.0292, 0.0240, 0.0297, 0.0242, 0.0281, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:22:29,747 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:33,576 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:44,778 INFO [train.py:903] (0/4) Epoch 12, batch 4600, loss[loss=0.2313, simple_loss=0.3156, pruned_loss=0.07346, over 19659.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3054, pruned_loss=0.07861, over 3816125.91 frames. ], batch size: 55, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:04,274 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79723.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:07,617 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:18,661 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1405, 1.9893, 1.8297, 1.6435, 1.3751, 1.6361, 0.3423, 0.9610], + device='cuda:0'), covar=tensor([0.0393, 0.0436, 0.0330, 0.0567, 0.0947, 0.0680, 0.0958, 0.0786], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0330, 0.0330, 0.0355, 0.0430, 0.0357, 0.0310, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:23:21,764 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79737.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:23:46,648 INFO [train.py:903] (0/4) Epoch 12, batch 4650, loss[loss=0.2361, simple_loss=0.3009, pruned_loss=0.08562, over 19806.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.306, pruned_loss=0.07908, over 3820760.54 frames. ], batch size: 48, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:51,764 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79762.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:24:06,322 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 22:24:09,613 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 5.614e+02 7.010e+02 8.934e+02 1.991e+03, threshold=1.402e+03, percent-clipped=7.0 +2023-04-01 22:24:14,656 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:24:15,648 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 22:24:37,945 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 22:24:48,915 INFO [train.py:903] (0/4) Epoch 12, batch 4700, loss[loss=0.2274, simple_loss=0.3041, pruned_loss=0.07539, over 19785.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3054, pruned_loss=0.079, over 3824784.28 frames. ], batch size: 56, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:24:52,486 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:12,482 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 22:25:30,356 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:45,609 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79852.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:25:52,118 INFO [train.py:903] (0/4) Epoch 12, batch 4750, loss[loss=0.2282, simple_loss=0.3011, pruned_loss=0.07766, over 18283.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3056, pruned_loss=0.07879, over 3819946.27 frames. ], batch size: 84, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:26:14,209 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.848e+02 5.173e+02 6.366e+02 7.623e+02 1.625e+03, threshold=1.273e+03, percent-clipped=2.0 +2023-04-01 22:26:54,256 INFO [train.py:903] (0/4) Epoch 12, batch 4800, loss[loss=0.2123, simple_loss=0.2979, pruned_loss=0.06338, over 19531.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3039, pruned_loss=0.07795, over 3819359.02 frames. ], batch size: 64, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:27:42,673 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:27:56,277 INFO [train.py:903] (0/4) Epoch 12, batch 4850, loss[loss=0.2194, simple_loss=0.2997, pruned_loss=0.06952, over 19497.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3042, pruned_loss=0.07816, over 3810593.53 frames. ], batch size: 64, lr: 6.88e-03, grad_scale: 16.0 +2023-04-01 22:28:19,179 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 5.432e+02 6.685e+02 9.186e+02 1.976e+03, threshold=1.337e+03, percent-clipped=11.0 +2023-04-01 22:28:23,590 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 22:28:43,826 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 22:28:48,541 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-80000.pt +2023-04-01 22:28:49,943 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 22:28:51,011 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 22:28:57,919 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1060, 1.8270, 1.3947, 1.1268, 1.5644, 1.0737, 1.1182, 1.6515], + device='cuda:0'), covar=tensor([0.0766, 0.0721, 0.1059, 0.0761, 0.0532, 0.1204, 0.0643, 0.0323], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0298, 0.0324, 0.0244, 0.0237, 0.0315, 0.0287, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:28:59,976 INFO [train.py:903] (0/4) Epoch 12, batch 4900, loss[loss=0.2087, simple_loss=0.2868, pruned_loss=0.06534, over 19653.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3031, pruned_loss=0.07734, over 3812688.06 frames. ], batch size: 53, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:29:02,310 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 22:29:13,755 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80018.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:29:14,564 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:29:21,258 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 22:29:43,698 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80043.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:30:03,749 INFO [train.py:903] (0/4) Epoch 12, batch 4950, loss[loss=0.2759, simple_loss=0.3398, pruned_loss=0.106, over 19347.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3039, pruned_loss=0.07778, over 3814812.71 frames. ], batch size: 66, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:30:08,680 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:13,639 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:20,457 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 22:30:26,823 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.263e+02 6.788e+02 8.958e+02 2.034e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 22:30:44,554 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80091.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:45,319 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 22:30:52,457 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:04,653 INFO [train.py:903] (0/4) Epoch 12, batch 5000, loss[loss=0.2109, simple_loss=0.2962, pruned_loss=0.06282, over 19645.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3045, pruned_loss=0.07823, over 3818982.13 frames. ], batch size: 60, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:31:05,093 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80108.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:31:13,747 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 22:31:22,084 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:24,052 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:25,187 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 22:31:37,099 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80133.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:31:38,193 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:54,278 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0596, 1.6184, 1.6932, 2.0235, 1.8522, 1.7731, 1.6704, 1.8913], + device='cuda:0'), covar=tensor([0.0865, 0.1517, 0.1393, 0.0895, 0.1207, 0.0498, 0.1198, 0.0689], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0351, 0.0294, 0.0240, 0.0298, 0.0243, 0.0281, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:32:06,837 INFO [train.py:903] (0/4) Epoch 12, batch 5050, loss[loss=0.2399, simple_loss=0.3173, pruned_loss=0.0812, over 19652.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3057, pruned_loss=0.07889, over 3826245.31 frames. ], batch size: 58, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:32:30,894 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.181e+02 5.736e+02 7.301e+02 9.465e+02 2.500e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 22:32:33,496 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.7013, 0.9540, 1.2048, 0.5569, 1.4686, 1.7113, 1.4984, 1.8017], + device='cuda:0'), covar=tensor([0.1223, 0.2513, 0.2214, 0.2054, 0.0825, 0.0365, 0.0299, 0.0302], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0299, 0.0332, 0.0253, 0.0218, 0.0162, 0.0207, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:32:41,351 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 22:33:08,740 INFO [train.py:903] (0/4) Epoch 12, batch 5100, loss[loss=0.2173, simple_loss=0.3013, pruned_loss=0.06666, over 18089.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3054, pruned_loss=0.07884, over 3823286.76 frames. ], batch size: 83, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:33:21,044 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 22:33:23,204 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 22:33:26,508 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 22:33:47,569 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:33:52,237 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9878, 1.9012, 1.8314, 1.6774, 1.6080, 1.7310, 0.9451, 1.3159], + device='cuda:0'), covar=tensor([0.0379, 0.0461, 0.0267, 0.0401, 0.0641, 0.0572, 0.0756, 0.0595], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0330, 0.0331, 0.0356, 0.0429, 0.0358, 0.0311, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:34:11,774 INFO [train.py:903] (0/4) Epoch 12, batch 5150, loss[loss=0.3211, simple_loss=0.3712, pruned_loss=0.1355, over 18730.00 frames. ], tot_loss[loss=0.231, simple_loss=0.305, pruned_loss=0.0785, over 3824207.72 frames. ], batch size: 74, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:34:23,660 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 22:34:34,583 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.377e+02 5.205e+02 6.062e+02 7.794e+02 1.645e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-01 22:34:59,508 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 22:35:05,763 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:13,712 INFO [train.py:903] (0/4) Epoch 12, batch 5200, loss[loss=0.2052, simple_loss=0.2738, pruned_loss=0.06828, over 19730.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3042, pruned_loss=0.07824, over 3825743.65 frames. ], batch size: 46, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:35:26,505 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:27,311 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 22:35:58,809 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:36:00,979 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8181, 4.1875, 4.5159, 4.5163, 1.8060, 4.2109, 3.6911, 4.1870], + device='cuda:0'), covar=tensor([0.1589, 0.0974, 0.0629, 0.0637, 0.5655, 0.0782, 0.0619, 0.1195], + device='cuda:0'), in_proj_covar=tensor([0.0682, 0.0614, 0.0811, 0.0690, 0.0730, 0.0561, 0.0490, 0.0735], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 22:36:13,580 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 22:36:16,622 INFO [train.py:903] (0/4) Epoch 12, batch 5250, loss[loss=0.249, simple_loss=0.3269, pruned_loss=0.0855, over 19406.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3045, pruned_loss=0.07803, over 3824053.63 frames. ], batch size: 70, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:36:40,819 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 22:36:41,380 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.664e+02 5.603e+02 6.465e+02 8.351e+02 1.434e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 22:36:56,989 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80390.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:18,481 INFO [train.py:903] (0/4) Epoch 12, batch 5300, loss[loss=0.2106, simple_loss=0.2955, pruned_loss=0.06288, over 19532.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3053, pruned_loss=0.07853, over 3830598.84 frames. ], batch size: 54, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:37:22,932 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:26,625 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-01 22:37:28,731 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:42,145 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 22:38:23,298 INFO [train.py:903] (0/4) Epoch 12, batch 5350, loss[loss=0.2003, simple_loss=0.2804, pruned_loss=0.06005, over 19832.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3048, pruned_loss=0.07815, over 3832011.77 frames. ], batch size: 52, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:38:44,529 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.085e+02 6.728e+02 8.660e+02 2.071e+03, threshold=1.346e+03, percent-clipped=4.0 +2023-04-01 22:38:50,333 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:38:59,002 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 22:38:59,483 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2294, 2.2720, 2.5364, 3.2675, 2.2725, 3.2461, 2.7102, 2.2531], + device='cuda:0'), covar=tensor([0.3661, 0.3301, 0.1431, 0.1917, 0.3672, 0.1451, 0.3436, 0.2709], + device='cuda:0'), in_proj_covar=tensor([0.0805, 0.0826, 0.0656, 0.0894, 0.0790, 0.0716, 0.0790, 0.0718], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 22:39:08,254 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80495.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:23,541 INFO [train.py:903] (0/4) Epoch 12, batch 5400, loss[loss=0.2322, simple_loss=0.3116, pruned_loss=0.07637, over 19675.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3038, pruned_loss=0.07736, over 3842680.37 frames. ], batch size: 53, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:39:28,316 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80512.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:38,446 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:40:24,799 INFO [train.py:903] (0/4) Epoch 12, batch 5450, loss[loss=0.1769, simple_loss=0.2544, pruned_loss=0.04964, over 19741.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3037, pruned_loss=0.07759, over 3841002.23 frames. ], batch size: 46, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:40:35,127 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1468, 2.0790, 1.8415, 1.7020, 1.5018, 1.6870, 0.5339, 1.1262], + device='cuda:0'), covar=tensor([0.0414, 0.0475, 0.0353, 0.0552, 0.0922, 0.0732, 0.0982, 0.0830], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0330, 0.0333, 0.0356, 0.0428, 0.0358, 0.0312, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:40:49,072 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 5.165e+02 6.319e+02 8.444e+02 1.726e+03, threshold=1.264e+03, percent-clipped=5.0 +2023-04-01 22:41:26,455 INFO [train.py:903] (0/4) Epoch 12, batch 5500, loss[loss=0.279, simple_loss=0.3457, pruned_loss=0.1062, over 19304.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3046, pruned_loss=0.07812, over 3837573.61 frames. ], batch size: 66, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:41:53,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 22:42:12,355 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:42:29,082 INFO [train.py:903] (0/4) Epoch 12, batch 5550, loss[loss=0.2113, simple_loss=0.2931, pruned_loss=0.06475, over 19525.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3053, pruned_loss=0.07856, over 3833065.37 frames. ], batch size: 54, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:42:38,034 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 22:42:51,877 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.910e+02 5.291e+02 6.725e+02 8.423e+02 1.958e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-01 22:42:53,857 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 22:43:28,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 22:43:31,866 INFO [train.py:903] (0/4) Epoch 12, batch 5600, loss[loss=0.2455, simple_loss=0.3222, pruned_loss=0.08441, over 19304.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3049, pruned_loss=0.07763, over 3836538.59 frames. ], batch size: 66, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:44:10,246 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 22:44:29,082 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:30,092 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:33,519 INFO [train.py:903] (0/4) Epoch 12, batch 5650, loss[loss=0.1987, simple_loss=0.2791, pruned_loss=0.0591, over 19476.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3042, pruned_loss=0.07745, over 3834868.73 frames. ], batch size: 49, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:44:36,179 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:57,730 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.237e+02 6.303e+02 7.862e+02 2.175e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-01 22:45:15,165 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:45:18,720 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3672, 1.3358, 1.7734, 1.2846, 2.4457, 3.1315, 2.9388, 3.3256], + device='cuda:0'), covar=tensor([0.1481, 0.3345, 0.2927, 0.2215, 0.0785, 0.0273, 0.0231, 0.0263], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0299, 0.0330, 0.0252, 0.0219, 0.0161, 0.0207, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 22:45:20,711 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 22:45:35,185 INFO [train.py:903] (0/4) Epoch 12, batch 5700, loss[loss=0.2522, simple_loss=0.3178, pruned_loss=0.09326, over 19524.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3046, pruned_loss=0.07781, over 3833432.02 frames. ], batch size: 54, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:45:57,696 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:21,578 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7924, 1.3660, 1.4633, 1.6183, 3.2944, 1.1198, 2.1577, 3.6579], + device='cuda:0'), covar=tensor([0.0415, 0.2552, 0.2716, 0.1795, 0.0754, 0.2513, 0.1416, 0.0281], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0341, 0.0353, 0.0320, 0.0347, 0.0332, 0.0336, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:46:35,800 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:38,760 INFO [train.py:903] (0/4) Epoch 12, batch 5750, loss[loss=0.2657, simple_loss=0.3364, pruned_loss=0.09745, over 19343.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.304, pruned_loss=0.07685, over 3843521.30 frames. ], batch size: 70, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:46:39,996 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 22:46:44,887 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4345, 2.3626, 1.7584, 1.4704, 2.1030, 1.2686, 1.2821, 1.8377], + device='cuda:0'), covar=tensor([0.0973, 0.0646, 0.0978, 0.0733, 0.0460, 0.1211, 0.0716, 0.0460], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0302, 0.0323, 0.0244, 0.0239, 0.0318, 0.0284, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:46:47,879 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 22:46:52,463 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 22:46:52,781 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:47:00,405 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.415e+02 6.686e+02 8.336e+02 1.819e+03, threshold=1.337e+03, percent-clipped=1.0 +2023-04-01 22:47:13,129 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4454, 1.5039, 1.8627, 1.6455, 2.8100, 2.3712, 2.9665, 1.2036], + device='cuda:0'), covar=tensor([0.2126, 0.3686, 0.2247, 0.1678, 0.1322, 0.1732, 0.1321, 0.3535], + device='cuda:0'), in_proj_covar=tensor([0.0497, 0.0587, 0.0617, 0.0440, 0.0599, 0.0501, 0.0651, 0.0500], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 22:47:40,322 INFO [train.py:903] (0/4) Epoch 12, batch 5800, loss[loss=0.2864, simple_loss=0.3445, pruned_loss=0.1141, over 13893.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3051, pruned_loss=0.07774, over 3822587.48 frames. ], batch size: 139, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:16,645 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 22:48:21,624 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:48:34,430 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8812, 1.8817, 2.2239, 1.9863, 3.0389, 2.4900, 3.0736, 2.0685], + device='cuda:0'), covar=tensor([0.1936, 0.3293, 0.2127, 0.1630, 0.1238, 0.1843, 0.1279, 0.3006], + device='cuda:0'), in_proj_covar=tensor([0.0496, 0.0586, 0.0615, 0.0440, 0.0597, 0.0500, 0.0649, 0.0499], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 22:48:41,885 INFO [train.py:903] (0/4) Epoch 12, batch 5850, loss[loss=0.2048, simple_loss=0.2911, pruned_loss=0.05921, over 19756.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.306, pruned_loss=0.07843, over 3827241.77 frames. ], batch size: 54, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:57,974 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80971.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:49:06,079 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.387e+02 6.409e+02 7.183e+02 1.679e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 22:49:10,724 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2677, 3.8708, 2.6359, 3.5370, 1.0581, 3.5970, 3.6282, 3.6992], + device='cuda:0'), covar=tensor([0.0777, 0.1106, 0.1920, 0.0764, 0.3958, 0.0881, 0.0870, 0.1217], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0372, 0.0442, 0.0320, 0.0379, 0.0375, 0.0366, 0.0397], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:49:18,793 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5674, 2.9564, 3.1195, 3.1737, 1.2071, 2.9248, 2.6681, 2.6372], + device='cuda:0'), covar=tensor([0.2895, 0.1748, 0.1496, 0.1808, 0.7214, 0.1894, 0.1318, 0.2804], + device='cuda:0'), in_proj_covar=tensor([0.0687, 0.0617, 0.0814, 0.0700, 0.0738, 0.0567, 0.0500, 0.0750], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 22:49:43,638 INFO [train.py:903] (0/4) Epoch 12, batch 5900, loss[loss=0.2426, simple_loss=0.3258, pruned_loss=0.07976, over 19676.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3067, pruned_loss=0.07913, over 3823556.51 frames. ], batch size: 60, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:49:47,131 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 22:49:55,116 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4506, 2.4140, 1.6828, 1.5466, 2.1857, 1.2518, 1.2269, 1.8489], + device='cuda:0'), covar=tensor([0.1055, 0.0616, 0.0987, 0.0690, 0.0440, 0.1162, 0.0746, 0.0472], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0307, 0.0328, 0.0249, 0.0242, 0.0323, 0.0287, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:49:55,137 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:50:09,727 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 22:50:25,032 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:50:35,662 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0714, 1.6862, 1.6077, 1.9781, 1.7084, 1.7415, 1.6170, 1.7901], + device='cuda:0'), covar=tensor([0.0802, 0.1259, 0.1345, 0.0892, 0.1202, 0.0476, 0.1183, 0.0719], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0358, 0.0298, 0.0242, 0.0304, 0.0246, 0.0284, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:50:47,169 INFO [train.py:903] (0/4) Epoch 12, batch 5950, loss[loss=0.2379, simple_loss=0.3136, pruned_loss=0.08108, over 19765.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3064, pruned_loss=0.07873, over 3828132.51 frames. ], batch size: 63, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:10,057 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.377e+02 6.760e+02 8.757e+02 1.989e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-01 22:51:36,957 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81098.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:51:49,665 INFO [train.py:903] (0/4) Epoch 12, batch 6000, loss[loss=0.2187, simple_loss=0.3026, pruned_loss=0.06742, over 19530.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3059, pruned_loss=0.07845, over 3825622.15 frames. ], batch size: 56, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:49,666 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 22:52:03,357 INFO [train.py:937] (0/4) Epoch 12, validation: loss=0.1765, simple_loss=0.2774, pruned_loss=0.03779, over 944034.00 frames. +2023-04-01 22:52:03,358 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18321MB +2023-04-01 22:52:25,557 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:34,805 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:35,837 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:57,877 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:53:05,457 INFO [train.py:903] (0/4) Epoch 12, batch 6050, loss[loss=0.2124, simple_loss=0.2882, pruned_loss=0.06828, over 19469.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3039, pruned_loss=0.07789, over 3815011.63 frames. ], batch size: 49, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:53:27,708 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.041e+02 6.677e+02 8.260e+02 1.738e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-01 22:53:52,518 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:53:57,464 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 22:54:05,930 INFO [train.py:903] (0/4) Epoch 12, batch 6100, loss[loss=0.2049, simple_loss=0.287, pruned_loss=0.06144, over 19590.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3044, pruned_loss=0.07795, over 3815192.68 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:54:11,795 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:20,933 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:28,710 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:57,826 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:00,301 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:03,881 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9114, 1.3443, 1.0557, 0.9638, 1.1506, 0.9450, 0.9808, 1.2725], + device='cuda:0'), covar=tensor([0.0524, 0.0640, 0.0945, 0.0548, 0.0501, 0.1103, 0.0456, 0.0379], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0303, 0.0325, 0.0247, 0.0240, 0.0320, 0.0282, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:55:06,768 INFO [train.py:903] (0/4) Epoch 12, batch 6150, loss[loss=0.2182, simple_loss=0.2884, pruned_loss=0.07403, over 19604.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3039, pruned_loss=0.0775, over 3826798.63 frames. ], batch size: 50, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:55:31,812 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 22:55:33,032 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 5.279e+02 6.402e+02 8.020e+02 2.167e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-04-01 22:55:33,435 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4968, 1.0453, 1.1466, 1.3467, 1.0618, 1.3190, 1.1881, 1.3017], + device='cuda:0'), covar=tensor([0.0950, 0.1226, 0.1386, 0.0874, 0.1079, 0.0540, 0.1185, 0.0750], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0352, 0.0293, 0.0240, 0.0296, 0.0242, 0.0280, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:55:39,180 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 22:56:12,035 INFO [train.py:903] (0/4) Epoch 12, batch 6200, loss[loss=0.2807, simple_loss=0.3408, pruned_loss=0.1103, over 13324.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3032, pruned_loss=0.07681, over 3822437.72 frames. ], batch size: 137, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:56:22,821 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81317.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:56:25,114 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:56:32,149 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9905, 3.5925, 2.3598, 3.2569, 0.8622, 3.3862, 3.4062, 3.4998], + device='cuda:0'), covar=tensor([0.0794, 0.1248, 0.2170, 0.0854, 0.3961, 0.0891, 0.0907, 0.1081], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0373, 0.0442, 0.0320, 0.0380, 0.0376, 0.0365, 0.0396], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:56:57,108 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3559, 2.0675, 1.6180, 1.4194, 1.9229, 1.2746, 1.3465, 1.7999], + device='cuda:0'), covar=tensor([0.0970, 0.0655, 0.0879, 0.0681, 0.0467, 0.1098, 0.0634, 0.0430], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0301, 0.0323, 0.0246, 0.0239, 0.0318, 0.0282, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:57:11,771 INFO [train.py:903] (0/4) Epoch 12, batch 6250, loss[loss=0.219, simple_loss=0.2958, pruned_loss=0.07116, over 18148.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3031, pruned_loss=0.07675, over 3819107.33 frames. ], batch size: 83, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:57:33,904 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.420e+02 5.051e+02 6.163e+02 7.297e+02 1.401e+03, threshold=1.233e+03, percent-clipped=3.0 +2023-04-01 22:57:44,021 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 22:58:13,175 INFO [train.py:903] (0/4) Epoch 12, batch 6300, loss[loss=0.2494, simple_loss=0.3229, pruned_loss=0.08792, over 19541.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3024, pruned_loss=0.07653, over 3818131.90 frames. ], batch size: 54, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:58:48,844 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8350, 1.2785, 1.4251, 1.5515, 3.2931, 1.0635, 2.2715, 3.7784], + device='cuda:0'), covar=tensor([0.0398, 0.2685, 0.2891, 0.1799, 0.0773, 0.2531, 0.1378, 0.0239], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0339, 0.0351, 0.0318, 0.0345, 0.0327, 0.0335, 0.0358], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 22:59:14,521 INFO [train.py:903] (0/4) Epoch 12, batch 6350, loss[loss=0.2464, simple_loss=0.3226, pruned_loss=0.0851, over 19525.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3028, pruned_loss=0.07662, over 3830178.32 frames. ], batch size: 56, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 22:59:28,523 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:39,342 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 5.412e+02 6.997e+02 8.497e+02 1.750e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-04-01 22:59:39,898 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8185, 1.9363, 2.0583, 2.4461, 1.7274, 2.3469, 2.3168, 1.9753], + device='cuda:0'), covar=tensor([0.3373, 0.2747, 0.1432, 0.1626, 0.2977, 0.1423, 0.3390, 0.2542], + device='cuda:0'), in_proj_covar=tensor([0.0803, 0.0826, 0.0650, 0.0889, 0.0788, 0.0715, 0.0786, 0.0717], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 22:59:40,742 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:59,507 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81494.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:14,198 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:16,025 INFO [train.py:903] (0/4) Epoch 12, batch 6400, loss[loss=0.2327, simple_loss=0.3088, pruned_loss=0.07836, over 19531.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3036, pruned_loss=0.07725, over 3832781.48 frames. ], batch size: 56, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:00:45,261 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:01:19,141 INFO [train.py:903] (0/4) Epoch 12, batch 6450, loss[loss=0.208, simple_loss=0.2903, pruned_loss=0.06291, over 19671.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3041, pruned_loss=0.0774, over 3835658.94 frames. ], batch size: 53, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:01:41,365 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 5.839e+02 6.972e+02 8.326e+02 2.886e+03, threshold=1.394e+03, percent-clipped=3.0 +2023-04-01 23:02:03,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81593.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:02:06,515 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 23:02:20,371 INFO [train.py:903] (0/4) Epoch 12, batch 6500, loss[loss=0.2117, simple_loss=0.2909, pruned_loss=0.06626, over 19689.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3035, pruned_loss=0.07724, over 3819171.77 frames. ], batch size: 59, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:02:27,383 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 23:03:22,584 INFO [train.py:903] (0/4) Epoch 12, batch 6550, loss[loss=0.2442, simple_loss=0.3198, pruned_loss=0.08425, over 19457.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3028, pruned_loss=0.07686, over 3819912.31 frames. ], batch size: 64, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:03:26,197 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:28,570 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:47,197 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 5.338e+02 6.617e+02 7.892e+02 1.534e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 23:04:00,918 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:04:08,904 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9192, 3.5588, 2.5030, 3.1744, 0.8754, 3.4148, 3.2755, 3.4797], + device='cuda:0'), covar=tensor([0.0797, 0.1085, 0.1906, 0.0874, 0.3806, 0.0834, 0.0942, 0.1072], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0372, 0.0440, 0.0317, 0.0378, 0.0372, 0.0366, 0.0397], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:04:24,219 INFO [train.py:903] (0/4) Epoch 12, batch 6600, loss[loss=0.2163, simple_loss=0.2964, pruned_loss=0.06812, over 19611.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3037, pruned_loss=0.07749, over 3814554.26 frames. ], batch size: 61, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:04:56,138 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 23:05:14,743 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1689, 1.2551, 1.8350, 1.4618, 3.1604, 4.5656, 4.4749, 4.9769], + device='cuda:0'), covar=tensor([0.1655, 0.3714, 0.3177, 0.2131, 0.0492, 0.0179, 0.0163, 0.0132], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0299, 0.0328, 0.0252, 0.0220, 0.0163, 0.0206, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:05:26,102 INFO [train.py:903] (0/4) Epoch 12, batch 6650, loss[loss=0.2136, simple_loss=0.3014, pruned_loss=0.06291, over 19617.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3039, pruned_loss=0.07734, over 3816318.43 frames. ], batch size: 57, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:05:46,833 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:05:47,591 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.145e+02 6.461e+02 8.134e+02 1.737e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-01 23:05:49,127 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:06:26,739 INFO [train.py:903] (0/4) Epoch 12, batch 6700, loss[loss=0.201, simple_loss=0.2681, pruned_loss=0.06695, over 19763.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3046, pruned_loss=0.0776, over 3821710.58 frames. ], batch size: 46, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:07:16,949 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:26,538 INFO [train.py:903] (0/4) Epoch 12, batch 6750, loss[loss=0.27, simple_loss=0.3333, pruned_loss=0.1034, over 18173.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3053, pruned_loss=0.07861, over 3813604.32 frames. ], batch size: 83, lr: 6.80e-03, grad_scale: 4.0 +2023-04-01 23:07:37,174 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3395, 2.0827, 1.6233, 1.3439, 1.8892, 1.2217, 1.3579, 1.8572], + device='cuda:0'), covar=tensor([0.0800, 0.0660, 0.1025, 0.0718, 0.0490, 0.1191, 0.0551, 0.0359], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0306, 0.0325, 0.0246, 0.0240, 0.0325, 0.0288, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:07:45,296 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:48,725 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7831, 1.4820, 1.4214, 1.7794, 1.5469, 1.6078, 1.4195, 1.6600], + device='cuda:0'), covar=tensor([0.0953, 0.1274, 0.1406, 0.0853, 0.1075, 0.0490, 0.1229, 0.0713], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0349, 0.0293, 0.0237, 0.0293, 0.0240, 0.0278, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:07:49,449 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+02 6.276e+02 7.333e+02 1.082e+03 2.540e+03, threshold=1.467e+03, percent-clipped=11.0 +2023-04-01 23:08:23,231 INFO [train.py:903] (0/4) Epoch 12, batch 6800, loss[loss=0.2101, simple_loss=0.2877, pruned_loss=0.0662, over 19674.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3048, pruned_loss=0.07789, over 3820689.19 frames. ], batch size: 53, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:08:53,196 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-12.pt +2023-04-01 23:09:08,550 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 23:09:09,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 23:09:12,677 INFO [train.py:903] (0/4) Epoch 13, batch 0, loss[loss=0.2348, simple_loss=0.3162, pruned_loss=0.07675, over 19662.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3162, pruned_loss=0.07675, over 19662.00 frames. ], batch size: 55, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:09:12,678 INFO [train.py:928] (0/4) Computing validation loss +2023-04-01 23:09:23,576 INFO [train.py:937] (0/4) Epoch 13, validation: loss=0.176, simple_loss=0.2772, pruned_loss=0.03738, over 944034.00 frames. +2023-04-01 23:09:23,577 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18321MB +2023-04-01 23:09:35,400 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 23:10:14,566 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 5.222e+02 6.740e+02 8.452e+02 3.268e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-01 23:10:23,827 INFO [train.py:903] (0/4) Epoch 13, batch 50, loss[loss=0.2159, simple_loss=0.2924, pruned_loss=0.06971, over 19610.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3027, pruned_loss=0.07846, over 867891.07 frames. ], batch size: 61, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:10:39,647 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-82000.pt +2023-04-01 23:10:46,542 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:10:59,200 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 23:11:20,791 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:20,993 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:23,307 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:25,080 INFO [train.py:903] (0/4) Epoch 13, batch 100, loss[loss=0.2489, simple_loss=0.3178, pruned_loss=0.08996, over 19762.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3048, pruned_loss=0.07926, over 1519491.93 frames. ], batch size: 54, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:11:36,612 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 23:11:52,558 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:55,595 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:11,458 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:16,818 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.805e+02 6.218e+02 7.513e+02 1.266e+03, threshold=1.244e+03, percent-clipped=0.0 +2023-04-01 23:12:25,782 INFO [train.py:903] (0/4) Epoch 13, batch 150, loss[loss=0.2024, simple_loss=0.279, pruned_loss=0.06285, over 19591.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3022, pruned_loss=0.07723, over 2037519.95 frames. ], batch size: 52, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:10,404 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0353, 2.7833, 2.0849, 2.5320, 0.9061, 2.6522, 2.6186, 2.6557], + device='cuda:0'), covar=tensor([0.1269, 0.1407, 0.1977, 0.0902, 0.3426, 0.1073, 0.1063, 0.1435], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0370, 0.0444, 0.0316, 0.0380, 0.0374, 0.0367, 0.0399], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:13:23,620 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 23:13:24,765 INFO [train.py:903] (0/4) Epoch 13, batch 200, loss[loss=0.219, simple_loss=0.2985, pruned_loss=0.06969, over 19533.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3038, pruned_loss=0.07756, over 2440191.41 frames. ], batch size: 54, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:40,467 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:13:42,569 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5445, 2.2519, 2.1296, 2.7920, 2.6166, 2.1981, 2.0700, 2.6630], + device='cuda:0'), covar=tensor([0.0830, 0.1560, 0.1299, 0.0879, 0.1089, 0.0467, 0.1183, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0348, 0.0292, 0.0238, 0.0292, 0.0240, 0.0278, 0.0240], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:14:04,568 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9353, 1.9233, 1.7181, 1.4594, 1.3642, 1.4949, 0.3541, 0.7024], + device='cuda:0'), covar=tensor([0.0634, 0.0578, 0.0374, 0.0641, 0.1247, 0.0730, 0.1068, 0.1097], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0332, 0.0333, 0.0359, 0.0431, 0.0354, 0.0314, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:14:14,395 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.002e+02 5.972e+02 7.403e+02 2.257e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-01 23:14:26,897 INFO [train.py:903] (0/4) Epoch 13, batch 250, loss[loss=0.1883, simple_loss=0.2592, pruned_loss=0.05865, over 19312.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3036, pruned_loss=0.07756, over 2750535.92 frames. ], batch size: 44, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:15:25,416 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 23:15:26,892 INFO [train.py:903] (0/4) Epoch 13, batch 300, loss[loss=0.2081, simple_loss=0.2792, pruned_loss=0.06851, over 18235.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3029, pruned_loss=0.07712, over 2985816.75 frames. ], batch size: 40, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:16:18,766 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.753e+02 6.815e+02 9.164e+02 1.837e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 23:16:28,129 INFO [train.py:903] (0/4) Epoch 13, batch 350, loss[loss=0.2417, simple_loss=0.3126, pruned_loss=0.08538, over 19587.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3024, pruned_loss=0.07696, over 3167534.45 frames. ], batch size: 61, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:16:30,469 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 23:17:28,525 INFO [train.py:903] (0/4) Epoch 13, batch 400, loss[loss=0.2161, simple_loss=0.3025, pruned_loss=0.06489, over 19614.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.302, pruned_loss=0.07684, over 3313157.18 frames. ], batch size: 57, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:17:47,110 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:04,816 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:10,378 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0030, 3.6263, 2.5667, 3.2409, 0.9858, 3.4812, 3.4114, 3.5108], + device='cuda:0'), covar=tensor([0.0788, 0.1147, 0.1830, 0.0862, 0.3614, 0.0812, 0.0884, 0.1066], + device='cuda:0'), in_proj_covar=tensor([0.0440, 0.0370, 0.0442, 0.0318, 0.0378, 0.0371, 0.0365, 0.0398], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:18:21,965 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.325e+02 6.166e+02 7.720e+02 2.046e+03, threshold=1.233e+03, percent-clipped=4.0 +2023-04-01 23:18:28,196 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6344, 1.3164, 1.3261, 2.1675, 1.8358, 1.9927, 2.1367, 1.9285], + device='cuda:0'), covar=tensor([0.0860, 0.1064, 0.1133, 0.0829, 0.0841, 0.0754, 0.0845, 0.0633], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0226, 0.0226, 0.0245, 0.0234, 0.0211, 0.0193, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 23:18:31,252 INFO [train.py:903] (0/4) Epoch 13, batch 450, loss[loss=0.2072, simple_loss=0.276, pruned_loss=0.06922, over 19742.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3017, pruned_loss=0.07633, over 3432375.08 frames. ], batch size: 46, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:18:53,559 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:04,667 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 23:19:04,702 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 23:19:09,580 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:23,716 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82428.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:25,766 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8764, 1.2916, 1.4024, 1.6326, 3.3935, 1.0616, 2.5156, 3.7394], + device='cuda:0'), covar=tensor([0.0414, 0.2663, 0.2807, 0.1795, 0.0714, 0.2467, 0.1090, 0.0273], + device='cuda:0'), in_proj_covar=tensor([0.0362, 0.0344, 0.0356, 0.0324, 0.0350, 0.0331, 0.0340, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:19:33,820 INFO [train.py:903] (0/4) Epoch 13, batch 500, loss[loss=0.2465, simple_loss=0.3205, pruned_loss=0.08624, over 19672.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3005, pruned_loss=0.07538, over 3540059.22 frames. ], batch size: 55, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:20:06,459 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:20:27,379 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.153e+02 6.569e+02 8.401e+02 1.477e+03, threshold=1.314e+03, percent-clipped=3.0 +2023-04-01 23:20:35,266 INFO [train.py:903] (0/4) Epoch 13, batch 550, loss[loss=0.1951, simple_loss=0.2667, pruned_loss=0.06178, over 19752.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3019, pruned_loss=0.07598, over 3606078.18 frames. ], batch size: 45, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:21:05,879 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82511.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:21:30,821 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:21:35,045 INFO [train.py:903] (0/4) Epoch 13, batch 600, loss[loss=0.228, simple_loss=0.311, pruned_loss=0.07252, over 19549.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3023, pruned_loss=0.07605, over 3664532.25 frames. ], batch size: 56, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:22:17,360 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 23:22:20,658 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6444, 4.1860, 2.8553, 3.6959, 0.8830, 4.0430, 4.0151, 4.0827], + device='cuda:0'), covar=tensor([0.0571, 0.1026, 0.1746, 0.0782, 0.3980, 0.0691, 0.0800, 0.0857], + device='cuda:0'), in_proj_covar=tensor([0.0442, 0.0372, 0.0442, 0.0319, 0.0381, 0.0374, 0.0367, 0.0401], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:22:28,772 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.313e+02 6.751e+02 8.249e+02 1.619e+03, threshold=1.350e+03, percent-clipped=3.0 +2023-04-01 23:22:36,729 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.74 vs. limit=5.0 +2023-04-01 23:22:36,936 INFO [train.py:903] (0/4) Epoch 13, batch 650, loss[loss=0.2297, simple_loss=0.3094, pruned_loss=0.07501, over 19736.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3036, pruned_loss=0.07681, over 3706810.11 frames. ], batch size: 63, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:22:38,442 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:23:40,973 INFO [train.py:903] (0/4) Epoch 13, batch 700, loss[loss=0.2698, simple_loss=0.3381, pruned_loss=0.1008, over 19591.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3016, pruned_loss=0.07592, over 3733671.29 frames. ], batch size: 57, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:24:21,261 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3224, 2.1452, 1.6181, 1.3913, 2.0046, 1.2251, 1.2135, 1.8269], + device='cuda:0'), covar=tensor([0.0877, 0.0638, 0.0943, 0.0716, 0.0406, 0.1086, 0.0662, 0.0396], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0307, 0.0329, 0.0248, 0.0242, 0.0323, 0.0289, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:24:36,357 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.358e+02 6.603e+02 8.553e+02 2.977e+03, threshold=1.321e+03, percent-clipped=4.0 +2023-04-01 23:24:44,575 INFO [train.py:903] (0/4) Epoch 13, batch 750, loss[loss=0.2709, simple_loss=0.3364, pruned_loss=0.1027, over 18297.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3024, pruned_loss=0.0765, over 3743761.53 frames. ], batch size: 84, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:10,154 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:28,756 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:33,509 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6981, 1.4886, 1.4976, 1.4285, 3.2612, 1.0439, 2.3385, 3.7044], + device='cuda:0'), covar=tensor([0.0424, 0.2473, 0.2588, 0.1873, 0.0670, 0.2503, 0.1236, 0.0222], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0344, 0.0356, 0.0324, 0.0353, 0.0332, 0.0342, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:25:47,341 INFO [train.py:903] (0/4) Epoch 13, batch 800, loss[loss=0.2725, simple_loss=0.3432, pruned_loss=0.1009, over 19706.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3031, pruned_loss=0.07694, over 3766919.09 frames. ], batch size: 59, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:58,387 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:26:01,436 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 23:26:20,720 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4338, 2.2662, 1.6668, 1.4441, 2.0757, 1.2657, 1.2746, 1.8984], + device='cuda:0'), covar=tensor([0.0965, 0.0630, 0.0993, 0.0754, 0.0442, 0.1137, 0.0742, 0.0449], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0307, 0.0327, 0.0247, 0.0240, 0.0321, 0.0287, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:26:39,178 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1005, 1.3228, 1.4788, 1.4250, 2.6954, 1.1003, 1.9982, 2.9933], + device='cuda:0'), covar=tensor([0.0491, 0.2525, 0.2531, 0.1642, 0.0759, 0.2162, 0.1115, 0.0331], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0346, 0.0358, 0.0326, 0.0354, 0.0333, 0.0344, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:26:41,539 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7409, 4.1734, 4.4270, 4.4398, 1.7215, 4.1788, 3.6520, 4.0880], + device='cuda:0'), covar=tensor([0.1377, 0.0750, 0.0577, 0.0587, 0.5075, 0.0634, 0.0579, 0.1051], + device='cuda:0'), in_proj_covar=tensor([0.0689, 0.0613, 0.0815, 0.0698, 0.0737, 0.0569, 0.0495, 0.0745], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-01 23:26:42,349 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.821e+02 5.442e+02 6.436e+02 7.821e+02 1.140e+03, threshold=1.287e+03, percent-clipped=0.0 +2023-04-01 23:26:50,570 INFO [train.py:903] (0/4) Epoch 13, batch 850, loss[loss=0.2212, simple_loss=0.2899, pruned_loss=0.07627, over 19305.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3045, pruned_loss=0.07776, over 3782679.06 frames. ], batch size: 44, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:26:53,185 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:26,991 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:38,332 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:44,763 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 23:27:52,714 INFO [train.py:903] (0/4) Epoch 13, batch 900, loss[loss=0.2931, simple_loss=0.3549, pruned_loss=0.1156, over 18919.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3045, pruned_loss=0.07785, over 3789420.96 frames. ], batch size: 74, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:28:19,289 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82855.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:28:47,653 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.906e+02 6.958e+02 9.103e+02 2.196e+03, threshold=1.392e+03, percent-clipped=5.0 +2023-04-01 23:28:59,852 INFO [train.py:903] (0/4) Epoch 13, batch 950, loss[loss=0.2444, simple_loss=0.3265, pruned_loss=0.08112, over 18738.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3058, pruned_loss=0.07845, over 3780032.44 frames. ], batch size: 74, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:29:04,322 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 23:29:55,917 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:30:01,532 INFO [train.py:903] (0/4) Epoch 13, batch 1000, loss[loss=0.2584, simple_loss=0.3344, pruned_loss=0.09121, over 18825.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3046, pruned_loss=0.07768, over 3780933.19 frames. ], batch size: 74, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:30:14,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 23:30:44,807 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82970.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:30:48,284 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5408, 2.2233, 2.2257, 2.8567, 2.5495, 2.3730, 2.2938, 2.7728], + device='cuda:0'), covar=tensor([0.0840, 0.1649, 0.1277, 0.0904, 0.1181, 0.0432, 0.1086, 0.0571], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0350, 0.0296, 0.0238, 0.0296, 0.0241, 0.0280, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:30:48,368 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4818, 1.5385, 2.0748, 1.7130, 3.3296, 2.6442, 3.5900, 1.5980], + device='cuda:0'), covar=tensor([0.2172, 0.3806, 0.2384, 0.1689, 0.1315, 0.1735, 0.1434, 0.3563], + device='cuda:0'), in_proj_covar=tensor([0.0491, 0.0582, 0.0613, 0.0441, 0.0594, 0.0496, 0.0643, 0.0495], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:30:51,730 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5479, 1.0979, 1.3561, 1.1177, 2.2018, 0.9118, 2.0998, 2.4018], + device='cuda:0'), covar=tensor([0.0674, 0.2748, 0.2673, 0.1681, 0.0904, 0.2056, 0.0866, 0.0503], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0346, 0.0358, 0.0326, 0.0355, 0.0335, 0.0342, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:30:52,621 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 23:30:54,574 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.161e+02 6.395e+02 8.326e+02 2.115e+03, threshold=1.279e+03, percent-clipped=2.0 +2023-04-01 23:30:57,042 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82981.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:31:02,716 INFO [train.py:903] (0/4) Epoch 13, batch 1050, loss[loss=0.2238, simple_loss=0.2875, pruned_loss=0.08008, over 19764.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3053, pruned_loss=0.07831, over 3789460.67 frames. ], batch size: 48, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:31:27,833 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1605, 2.0737, 1.7946, 1.7172, 1.5136, 1.6977, 0.3746, 1.0239], + device='cuda:0'), covar=tensor([0.0450, 0.0441, 0.0370, 0.0529, 0.1007, 0.0604, 0.1018, 0.0827], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0333, 0.0333, 0.0360, 0.0429, 0.0356, 0.0316, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:31:34,341 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 23:32:04,664 INFO [train.py:903] (0/4) Epoch 13, batch 1100, loss[loss=0.2225, simple_loss=0.2942, pruned_loss=0.07542, over 19487.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3046, pruned_loss=0.07746, over 3810299.60 frames. ], batch size: 49, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:32:19,489 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:32:30,388 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1822, 1.3862, 1.6078, 1.2290, 2.7093, 3.4944, 3.2732, 3.7493], + device='cuda:0'), covar=tensor([0.1596, 0.3296, 0.3136, 0.2191, 0.0528, 0.0188, 0.0202, 0.0212], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0300, 0.0329, 0.0253, 0.0221, 0.0163, 0.0207, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:32:57,833 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 5.014e+02 6.117e+02 7.878e+02 1.226e+03, threshold=1.223e+03, percent-clipped=0.0 +2023-04-01 23:32:58,253 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:08,489 INFO [train.py:903] (0/4) Epoch 13, batch 1150, loss[loss=0.2243, simple_loss=0.2917, pruned_loss=0.07843, over 19097.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3047, pruned_loss=0.07753, over 3809121.16 frames. ], batch size: 42, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:33:30,516 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:40,674 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1941, 1.8145, 1.4187, 1.2048, 1.6091, 1.0786, 1.1796, 1.6600], + device='cuda:0'), covar=tensor([0.0681, 0.0718, 0.1010, 0.0654, 0.0460, 0.1210, 0.0542, 0.0368], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0311, 0.0331, 0.0248, 0.0241, 0.0327, 0.0291, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:34:10,939 INFO [train.py:903] (0/4) Epoch 13, batch 1200, loss[loss=0.1868, simple_loss=0.266, pruned_loss=0.05386, over 19596.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3037, pruned_loss=0.07689, over 3819729.20 frames. ], batch size: 52, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:34:40,325 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 23:35:06,465 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.101e+02 7.395e+02 1.032e+03 1.939e+03, threshold=1.479e+03, percent-clipped=13.0 +2023-04-01 23:35:07,336 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-01 23:35:12,376 INFO [train.py:903] (0/4) Epoch 13, batch 1250, loss[loss=0.2338, simple_loss=0.304, pruned_loss=0.08186, over 19141.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3035, pruned_loss=0.07685, over 3810176.15 frames. ], batch size: 42, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:35:48,373 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83214.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:36:02,476 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83226.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:36:13,263 INFO [train.py:903] (0/4) Epoch 13, batch 1300, loss[loss=0.2379, simple_loss=0.314, pruned_loss=0.08086, over 19544.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3027, pruned_loss=0.07625, over 3808658.92 frames. ], batch size: 54, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:36:33,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83251.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:37:05,206 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5357, 2.2524, 2.2244, 2.8357, 1.9379, 2.9456, 2.7417, 2.7432], + device='cuda:0'), covar=tensor([0.0628, 0.0728, 0.0810, 0.0789, 0.0918, 0.0567, 0.0801, 0.0526], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0222, 0.0222, 0.0243, 0.0233, 0.0210, 0.0193, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 23:37:08,171 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 5.042e+02 6.195e+02 7.677e+02 1.204e+03, threshold=1.239e+03, percent-clipped=0.0 +2023-04-01 23:37:17,151 INFO [train.py:903] (0/4) Epoch 13, batch 1350, loss[loss=0.2328, simple_loss=0.2916, pruned_loss=0.08695, over 19733.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3033, pruned_loss=0.07681, over 3811265.08 frames. ], batch size: 46, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:37:37,997 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:37:38,117 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:37:39,612 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.09 vs. limit=5.0 +2023-04-01 23:37:42,732 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4333, 1.3442, 1.2948, 1.6931, 1.3690, 1.7152, 1.7636, 1.5769], + device='cuda:0'), covar=tensor([0.0928, 0.1019, 0.1120, 0.0804, 0.0865, 0.0747, 0.0816, 0.0699], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0222, 0.0222, 0.0243, 0.0232, 0.0210, 0.0193, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-01 23:38:06,338 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:09,935 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:21,070 INFO [train.py:903] (0/4) Epoch 13, batch 1400, loss[loss=0.2348, simple_loss=0.3106, pruned_loss=0.07943, over 19669.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3036, pruned_loss=0.07681, over 3823593.96 frames. ], batch size: 55, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:38:38,125 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 23:39:16,728 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.533e+02 6.494e+02 7.603e+02 1.656e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 23:39:20,314 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 23:39:22,625 INFO [train.py:903] (0/4) Epoch 13, batch 1450, loss[loss=0.2077, simple_loss=0.2768, pruned_loss=0.0693, over 19395.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3056, pruned_loss=0.0781, over 3815622.28 frames. ], batch size: 47, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:24,405 INFO [train.py:903] (0/4) Epoch 13, batch 1500, loss[loss=0.216, simple_loss=0.3063, pruned_loss=0.06288, over 19699.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3047, pruned_loss=0.07727, over 3810488.89 frames. ], batch size: 59, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:29,216 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:40:44,776 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 23:41:19,826 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.552e+02 5.496e+02 6.437e+02 7.955e+02 2.023e+03, threshold=1.287e+03, percent-clipped=5.0 +2023-04-01 23:41:26,536 INFO [train.py:903] (0/4) Epoch 13, batch 1550, loss[loss=0.2024, simple_loss=0.2856, pruned_loss=0.05956, over 19534.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3038, pruned_loss=0.07678, over 3814978.64 frames. ], batch size: 56, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:42:30,076 INFO [train.py:903] (0/4) Epoch 13, batch 1600, loss[loss=0.2403, simple_loss=0.3171, pruned_loss=0.08173, over 19550.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3041, pruned_loss=0.07691, over 3811584.62 frames. ], batch size: 56, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:42:53,317 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 23:42:55,756 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:43:25,328 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 5.445e+02 6.386e+02 7.908e+02 1.256e+03, threshold=1.277e+03, percent-clipped=0.0 +2023-04-01 23:43:31,085 INFO [train.py:903] (0/4) Epoch 13, batch 1650, loss[loss=0.2136, simple_loss=0.281, pruned_loss=0.07312, over 19422.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3041, pruned_loss=0.0772, over 3831732.40 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:43:44,217 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3004, 1.5204, 1.9368, 1.6513, 3.1420, 2.5889, 3.4084, 1.5968], + device='cuda:0'), covar=tensor([0.2298, 0.3833, 0.2341, 0.1682, 0.1390, 0.1783, 0.1546, 0.3411], + device='cuda:0'), in_proj_covar=tensor([0.0495, 0.0584, 0.0618, 0.0440, 0.0598, 0.0495, 0.0649, 0.0501], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 23:44:33,584 INFO [train.py:903] (0/4) Epoch 13, batch 1700, loss[loss=0.1925, simple_loss=0.2651, pruned_loss=0.05994, over 18642.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3036, pruned_loss=0.07726, over 3837474.46 frames. ], batch size: 41, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:44:45,676 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:15,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 23:45:19,397 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:27,804 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.386e+02 6.785e+02 9.131e+02 1.645e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 23:45:33,657 INFO [train.py:903] (0/4) Epoch 13, batch 1750, loss[loss=0.1999, simple_loss=0.2698, pruned_loss=0.06494, over 19731.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3019, pruned_loss=0.07639, over 3838854.03 frames. ], batch size: 45, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:45:48,921 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83696.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:19,378 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:37,929 INFO [train.py:903] (0/4) Epoch 13, batch 1800, loss[loss=0.2578, simple_loss=0.3269, pruned_loss=0.09432, over 19667.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3029, pruned_loss=0.07659, over 3835038.94 frames. ], batch size: 60, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:47:01,559 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8444, 0.8968, 0.8269, 0.7217, 0.7188, 0.7661, 0.0808, 0.2782], + device='cuda:0'), covar=tensor([0.0365, 0.0377, 0.0242, 0.0324, 0.0692, 0.0347, 0.0774, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0334, 0.0333, 0.0358, 0.0426, 0.0356, 0.0316, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:47:08,530 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:29,808 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:32,958 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.542e+02 6.939e+02 8.095e+02 2.139e+03, threshold=1.388e+03, percent-clipped=3.0 +2023-04-01 23:47:35,255 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 23:47:39,805 INFO [train.py:903] (0/4) Epoch 13, batch 1850, loss[loss=0.2325, simple_loss=0.3136, pruned_loss=0.07575, over 19672.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3035, pruned_loss=0.07714, over 3822749.91 frames. ], batch size: 59, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:47:42,529 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2490, 1.3954, 1.7775, 1.3920, 2.7050, 3.3599, 3.1715, 3.5111], + device='cuda:0'), covar=tensor([0.1545, 0.3336, 0.2984, 0.2175, 0.0596, 0.0231, 0.0210, 0.0231], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0299, 0.0327, 0.0250, 0.0218, 0.0161, 0.0206, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:48:11,504 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 23:48:17,624 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0763, 1.3517, 1.8372, 1.3822, 3.0124, 4.6718, 4.5492, 5.0195], + device='cuda:0'), covar=tensor([0.1657, 0.3440, 0.3070, 0.2073, 0.0533, 0.0156, 0.0164, 0.0121], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0299, 0.0328, 0.0252, 0.0219, 0.0162, 0.0206, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-01 23:48:38,904 INFO [train.py:903] (0/4) Epoch 13, batch 1900, loss[loss=0.241, simple_loss=0.3058, pruned_loss=0.08808, over 19722.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3037, pruned_loss=0.07712, over 3830325.84 frames. ], batch size: 51, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:55,303 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 23:49:00,751 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 23:49:23,812 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 23:49:32,948 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.453e+02 6.640e+02 7.751e+02 1.927e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-01 23:49:38,629 INFO [train.py:903] (0/4) Epoch 13, batch 1950, loss[loss=0.2422, simple_loss=0.3119, pruned_loss=0.08624, over 19832.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3025, pruned_loss=0.07625, over 3834771.46 frames. ], batch size: 52, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:50:31,098 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:50:40,980 INFO [train.py:903] (0/4) Epoch 13, batch 2000, loss[loss=0.2148, simple_loss=0.2983, pruned_loss=0.06564, over 19427.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3039, pruned_loss=0.0771, over 3831024.15 frames. ], batch size: 66, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:02,440 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:04,038 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-01 23:51:17,802 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:36,104 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.295e+02 5.067e+02 6.527e+02 8.467e+02 1.955e+03, threshold=1.305e+03, percent-clipped=7.0 +2023-04-01 23:51:38,314 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 23:51:42,654 INFO [train.py:903] (0/4) Epoch 13, batch 2050, loss[loss=0.2176, simple_loss=0.3006, pruned_loss=0.06732, over 19666.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3032, pruned_loss=0.07708, over 3820085.20 frames. ], batch size: 58, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:56,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 23:51:57,787 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 23:51:59,003 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-84000.pt +2023-04-01 23:52:03,629 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:21,314 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 23:52:22,833 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:44,638 INFO [train.py:903] (0/4) Epoch 13, batch 2100, loss[loss=0.2384, simple_loss=0.3155, pruned_loss=0.08069, over 19536.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3028, pruned_loss=0.0773, over 3823306.12 frames. ], batch size: 64, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:52:52,266 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:58,248 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7930, 2.3009, 2.3094, 2.8211, 2.7431, 2.4015, 2.2366, 2.8781], + device='cuda:0'), covar=tensor([0.0735, 0.1519, 0.1254, 0.0952, 0.1152, 0.0439, 0.1061, 0.0511], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0345, 0.0292, 0.0237, 0.0293, 0.0242, 0.0278, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-01 23:53:14,853 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 23:53:28,758 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 23:53:36,214 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 23:53:39,575 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.545e+02 6.946e+02 9.457e+02 3.064e+03, threshold=1.389e+03, percent-clipped=12.0 +2023-04-01 23:53:45,272 INFO [train.py:903] (0/4) Epoch 13, batch 2150, loss[loss=0.252, simple_loss=0.3222, pruned_loss=0.09086, over 19582.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3034, pruned_loss=0.07731, over 3827549.70 frames. ], batch size: 52, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:54:23,367 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:31,335 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:49,422 INFO [train.py:903] (0/4) Epoch 13, batch 2200, loss[loss=0.2783, simple_loss=0.3503, pruned_loss=0.1031, over 19790.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3032, pruned_loss=0.07677, over 3832373.44 frames. ], batch size: 56, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:55:44,487 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.936e+02 7.647e+02 9.699e+02 2.302e+03, threshold=1.529e+03, percent-clipped=8.0 +2023-04-01 23:55:50,238 INFO [train.py:903] (0/4) Epoch 13, batch 2250, loss[loss=0.2149, simple_loss=0.3045, pruned_loss=0.06269, over 19671.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3036, pruned_loss=0.07721, over 3827620.97 frames. ], batch size: 58, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:09,019 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4062, 1.5983, 2.1676, 1.6853, 2.9254, 2.3086, 2.9633, 1.5773], + device='cuda:0'), covar=tensor([0.2375, 0.4019, 0.2244, 0.1897, 0.1612, 0.2200, 0.2012, 0.3786], + device='cuda:0'), in_proj_covar=tensor([0.0497, 0.0589, 0.0624, 0.0444, 0.0603, 0.0499, 0.0651, 0.0505], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-01 23:56:18,362 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-01 23:56:28,831 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:56:51,972 INFO [train.py:903] (0/4) Epoch 13, batch 2300, loss[loss=0.196, simple_loss=0.2697, pruned_loss=0.06111, over 19383.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3034, pruned_loss=0.07681, over 3828838.17 frames. ], batch size: 47, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:53,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84237.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:05,845 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 23:57:15,052 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:47,143 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 5.192e+02 6.483e+02 8.696e+02 2.103e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 23:57:52,877 INFO [train.py:903] (0/4) Epoch 13, batch 2350, loss[loss=0.2143, simple_loss=0.2967, pruned_loss=0.0659, over 18158.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3025, pruned_loss=0.07619, over 3838668.69 frames. ], batch size: 83, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:57:53,516 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-01 23:58:25,978 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:58:37,176 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 23:58:54,356 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 23:58:57,891 INFO [train.py:903] (0/4) Epoch 13, batch 2400, loss[loss=0.256, simple_loss=0.3235, pruned_loss=0.09423, over 19522.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3028, pruned_loss=0.07643, over 3838913.62 frames. ], batch size: 54, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:59:11,464 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:20,647 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:54,315 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.138e+02 6.932e+02 8.383e+02 1.660e+03, threshold=1.386e+03, percent-clipped=4.0 +2023-04-01 23:59:59,973 INFO [train.py:903] (0/4) Epoch 13, batch 2450, loss[loss=0.2487, simple_loss=0.3254, pruned_loss=0.08607, over 19849.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3034, pruned_loss=0.07688, over 3850609.87 frames. ], batch size: 52, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:00:16,771 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.14 vs. limit=5.0 +2023-04-02 00:00:51,567 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84426.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:03,733 INFO [train.py:903] (0/4) Epoch 13, batch 2500, loss[loss=0.2507, simple_loss=0.3329, pruned_loss=0.08423, over 19672.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3037, pruned_loss=0.07716, over 3835422.39 frames. ], batch size: 58, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:01:33,211 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:38,000 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:00,516 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 5.326e+02 7.100e+02 9.098e+02 1.657e+03, threshold=1.420e+03, percent-clipped=3.0 +2023-04-02 00:02:06,422 INFO [train.py:903] (0/4) Epoch 13, batch 2550, loss[loss=0.2501, simple_loss=0.3115, pruned_loss=0.09432, over 19608.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3033, pruned_loss=0.07722, over 3834797.27 frames. ], batch size: 50, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:02:16,039 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84493.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:48,818 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4300, 2.0228, 2.0504, 2.6776, 2.2861, 2.1556, 2.0075, 2.4202], + device='cuda:0'), covar=tensor([0.0869, 0.1673, 0.1318, 0.0834, 0.1290, 0.0470, 0.1163, 0.0639], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0346, 0.0292, 0.0236, 0.0293, 0.0240, 0.0278, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:02:48,828 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84518.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:04,532 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 00:03:10,388 INFO [train.py:903] (0/4) Epoch 13, batch 2600, loss[loss=0.2479, simple_loss=0.3257, pruned_loss=0.08502, over 19601.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3032, pruned_loss=0.07703, over 3826004.37 frames. ], batch size: 52, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:03:21,083 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:42,738 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:46,342 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0802, 1.6506, 1.6606, 1.8888, 1.7112, 1.8028, 1.6304, 1.8790], + device='cuda:0'), covar=tensor([0.0837, 0.1360, 0.1261, 0.0835, 0.1202, 0.0484, 0.1163, 0.0654], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0346, 0.0291, 0.0237, 0.0293, 0.0240, 0.0278, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:03:59,322 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:09,373 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.097e+02 6.359e+02 8.045e+02 2.004e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 00:04:15,154 INFO [train.py:903] (0/4) Epoch 13, batch 2650, loss[loss=0.2051, simple_loss=0.2818, pruned_loss=0.0642, over 19591.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3034, pruned_loss=0.07698, over 3818798.85 frames. ], batch size: 52, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:04:30,349 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:34,941 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 00:04:35,152 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4117, 1.0270, 1.3317, 1.5837, 2.7224, 1.0772, 2.2881, 3.2653], + device='cuda:0'), covar=tensor([0.0639, 0.3502, 0.3132, 0.1978, 0.1217, 0.2774, 0.1323, 0.0436], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0344, 0.0356, 0.0324, 0.0346, 0.0332, 0.0342, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:05:17,487 INFO [train.py:903] (0/4) Epoch 13, batch 2700, loss[loss=0.1823, simple_loss=0.2694, pruned_loss=0.04759, over 19676.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3051, pruned_loss=0.07772, over 3825106.03 frames. ], batch size: 53, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:05:36,217 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:08,620 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:14,005 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.394e+02 6.395e+02 8.456e+02 1.799e+03, threshold=1.279e+03, percent-clipped=4.0 +2023-04-02 00:06:16,790 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:21,022 INFO [train.py:903] (0/4) Epoch 13, batch 2750, loss[loss=0.197, simple_loss=0.2696, pruned_loss=0.06221, over 19282.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3045, pruned_loss=0.07822, over 3835739.35 frames. ], batch size: 44, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:06:37,479 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:37,744 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4508, 2.1609, 1.5704, 1.5254, 1.9990, 1.1980, 1.3859, 1.8354], + device='cuda:0'), covar=tensor([0.0814, 0.0690, 0.1053, 0.0688, 0.0534, 0.1187, 0.0601, 0.0454], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0305, 0.0327, 0.0252, 0.0240, 0.0320, 0.0289, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:06:49,112 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:56,188 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:01,951 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:24,547 INFO [train.py:903] (0/4) Epoch 13, batch 2800, loss[loss=0.1711, simple_loss=0.2501, pruned_loss=0.046, over 17791.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3036, pruned_loss=0.07731, over 3838524.25 frames. ], batch size: 39, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:07:34,100 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:52,563 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3247, 2.9709, 2.1493, 2.7788, 0.7657, 2.9066, 2.8943, 2.9827], + device='cuda:0'), covar=tensor([0.1050, 0.1431, 0.2201, 0.0972, 0.3967, 0.1052, 0.1029, 0.1345], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0368, 0.0443, 0.0319, 0.0380, 0.0374, 0.0368, 0.0400], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:08:22,665 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.423e+02 6.884e+02 8.957e+02 1.568e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-02 00:08:29,876 INFO [train.py:903] (0/4) Epoch 13, batch 2850, loss[loss=0.2596, simple_loss=0.3305, pruned_loss=0.09438, over 19535.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.303, pruned_loss=0.07684, over 3842118.60 frames. ], batch size: 54, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:08:31,518 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2290, 2.0960, 1.8508, 1.6448, 1.5841, 1.6989, 0.4407, 1.0131], + device='cuda:0'), covar=tensor([0.0418, 0.0437, 0.0324, 0.0577, 0.0972, 0.0610, 0.1003, 0.0814], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0333, 0.0334, 0.0357, 0.0429, 0.0356, 0.0315, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 00:09:04,276 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:08,027 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7456, 1.4711, 1.5029, 2.2257, 1.8627, 1.9363, 2.0957, 1.8436], + device='cuda:0'), covar=tensor([0.0815, 0.1009, 0.1089, 0.0771, 0.0823, 0.0786, 0.0878, 0.0644], + device='cuda:0'), in_proj_covar=tensor([0.0207, 0.0223, 0.0223, 0.0243, 0.0230, 0.0209, 0.0193, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 00:09:11,184 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84818.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:26,597 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:29,145 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9564, 1.9571, 2.2481, 2.6299, 1.8184, 2.4693, 2.4514, 2.0711], + device='cuda:0'), covar=tensor([0.3619, 0.3315, 0.1478, 0.2004, 0.3635, 0.1659, 0.3572, 0.2701], + device='cuda:0'), in_proj_covar=tensor([0.0814, 0.0844, 0.0663, 0.0899, 0.0796, 0.0725, 0.0800, 0.0723], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 00:09:33,381 INFO [train.py:903] (0/4) Epoch 13, batch 2900, loss[loss=0.2376, simple_loss=0.3108, pruned_loss=0.08219, over 19666.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3019, pruned_loss=0.07604, over 3855075.38 frames. ], batch size: 60, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:09:33,417 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 00:09:58,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:10:31,984 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.833e+02 5.191e+02 6.710e+02 8.572e+02 2.238e+03, threshold=1.342e+03, percent-clipped=4.0 +2023-04-02 00:10:38,036 INFO [train.py:903] (0/4) Epoch 13, batch 2950, loss[loss=0.1984, simple_loss=0.2814, pruned_loss=0.05767, over 19599.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3009, pruned_loss=0.07543, over 3842524.91 frames. ], batch size: 52, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:10:40,474 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:39,621 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:45,156 INFO [train.py:903] (0/4) Epoch 13, batch 3000, loss[loss=0.2488, simple_loss=0.3225, pruned_loss=0.08757, over 19664.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3027, pruned_loss=0.07669, over 3821721.23 frames. ], batch size: 60, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:11:45,157 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 00:12:00,840 INFO [train.py:937] (0/4) Epoch 13, validation: loss=0.1754, simple_loss=0.276, pruned_loss=0.03742, over 944034.00 frames. +2023-04-02 00:12:00,842 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18321MB +2023-04-02 00:12:05,792 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 00:12:29,830 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84957.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:43,877 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:44,017 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:59,475 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.728e+02 4.901e+02 6.331e+02 8.447e+02 1.208e+03, threshold=1.266e+03, percent-clipped=0.0 +2023-04-02 00:13:05,457 INFO [train.py:903] (0/4) Epoch 13, batch 3050, loss[loss=0.2439, simple_loss=0.3199, pruned_loss=0.08397, over 19650.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3024, pruned_loss=0.07661, over 3817326.53 frames. ], batch size: 58, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:13:17,125 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84994.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:18,145 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84995.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:26,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:14:09,068 INFO [train.py:903] (0/4) Epoch 13, batch 3100, loss[loss=0.2525, simple_loss=0.3239, pruned_loss=0.09059, over 19751.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3039, pruned_loss=0.07804, over 3790062.79 frames. ], batch size: 63, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:14:19,296 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 00:14:50,680 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:05,349 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.118e+02 6.727e+02 8.323e+02 1.616e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-02 00:15:11,218 INFO [train.py:903] (0/4) Epoch 13, batch 3150, loss[loss=0.2416, simple_loss=0.3308, pruned_loss=0.0762, over 19665.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.303, pruned_loss=0.07686, over 3808863.24 frames. ], batch size: 55, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:15:20,631 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:37,411 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 00:15:41,902 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85110.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:00,130 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:09,356 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:12,580 INFO [train.py:903] (0/4) Epoch 13, batch 3200, loss[loss=0.2279, simple_loss=0.3179, pruned_loss=0.06893, over 19660.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.303, pruned_loss=0.07696, over 3812018.68 frames. ], batch size: 60, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:16:46,231 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:17:10,847 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.817e+02 5.513e+02 6.647e+02 8.266e+02 1.326e+03, threshold=1.329e+03, percent-clipped=0.0 +2023-04-02 00:17:16,618 INFO [train.py:903] (0/4) Epoch 13, batch 3250, loss[loss=0.2108, simple_loss=0.2924, pruned_loss=0.0646, over 19778.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3021, pruned_loss=0.07632, over 3816109.11 frames. ], batch size: 56, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:17:54,816 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.96 vs. limit=5.0 +2023-04-02 00:18:20,664 INFO [train.py:903] (0/4) Epoch 13, batch 3300, loss[loss=0.2322, simple_loss=0.3188, pruned_loss=0.07274, over 19576.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3025, pruned_loss=0.07656, over 3819885.71 frames. ], batch size: 61, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:18:21,898 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 00:18:47,636 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:18:57,446 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2255, 5.5937, 3.0659, 4.9219, 1.2921, 5.5300, 5.5214, 5.6939], + device='cuda:0'), covar=tensor([0.0411, 0.0953, 0.1821, 0.0637, 0.3884, 0.0582, 0.0655, 0.0823], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0373, 0.0448, 0.0322, 0.0387, 0.0380, 0.0371, 0.0403], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:19:12,393 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:17,832 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 5.269e+02 6.447e+02 8.165e+02 1.494e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 00:19:19,488 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:22,466 INFO [train.py:903] (0/4) Epoch 13, batch 3350, loss[loss=0.2493, simple_loss=0.3245, pruned_loss=0.08705, over 19445.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3027, pruned_loss=0.07683, over 3819077.21 frames. ], batch size: 64, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:19:26,586 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 00:19:57,319 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:20:24,674 INFO [train.py:903] (0/4) Epoch 13, batch 3400, loss[loss=0.1883, simple_loss=0.2596, pruned_loss=0.05851, over 19707.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3031, pruned_loss=0.07722, over 3802058.51 frames. ], batch size: 45, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:20:42,480 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9935, 2.6407, 1.8640, 1.9537, 2.4474, 1.7770, 1.6131, 2.0168], + device='cuda:0'), covar=tensor([0.0812, 0.0627, 0.0732, 0.0587, 0.0388, 0.0854, 0.0599, 0.0508], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0301, 0.0323, 0.0247, 0.0236, 0.0317, 0.0283, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:20:42,554 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3263, 2.3713, 2.5840, 3.1799, 2.4755, 3.1115, 2.7677, 2.3370], + device='cuda:0'), covar=tensor([0.3256, 0.2943, 0.1300, 0.1623, 0.3168, 0.1306, 0.2866, 0.2327], + device='cuda:0'), in_proj_covar=tensor([0.0806, 0.0835, 0.0658, 0.0890, 0.0793, 0.0718, 0.0794, 0.0718], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 00:21:03,052 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:22,545 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.371e+02 6.826e+02 8.500e+02 2.504e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-02 00:21:27,161 INFO [train.py:903] (0/4) Epoch 13, batch 3450, loss[loss=0.2003, simple_loss=0.2745, pruned_loss=0.06306, over 19576.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3026, pruned_loss=0.07671, over 3813506.46 frames. ], batch size: 52, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:21:30,635 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 00:21:34,246 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85391.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:47,629 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6056, 1.4217, 1.4071, 1.9178, 1.6422, 1.9968, 1.9633, 1.7690], + device='cuda:0'), covar=tensor([0.0819, 0.1032, 0.1088, 0.0946, 0.0874, 0.0699, 0.0902, 0.0689], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0225, 0.0225, 0.0245, 0.0231, 0.0212, 0.0193, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 00:22:19,348 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:22:28,803 INFO [train.py:903] (0/4) Epoch 13, batch 3500, loss[loss=0.181, simple_loss=0.2516, pruned_loss=0.05515, over 19323.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3032, pruned_loss=0.07709, over 3811262.28 frames. ], batch size: 44, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:22:42,072 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:10,405 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:19,016 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 00:23:20,658 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:27,192 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.339e+02 6.675e+02 8.042e+02 1.644e+03, threshold=1.335e+03, percent-clipped=3.0 +2023-04-02 00:23:31,543 INFO [train.py:903] (0/4) Epoch 13, batch 3550, loss[loss=0.2077, simple_loss=0.2717, pruned_loss=0.07181, over 18611.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3028, pruned_loss=0.07686, over 3807555.15 frames. ], batch size: 41, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:23:50,596 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85501.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:31,217 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:34,224 INFO [train.py:903] (0/4) Epoch 13, batch 3600, loss[loss=0.2119, simple_loss=0.2774, pruned_loss=0.07324, over 19779.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3027, pruned_loss=0.07702, over 3799411.16 frames. ], batch size: 46, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:24:50,355 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:03,579 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85558.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:33,198 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.969e+02 7.367e+02 9.197e+02 1.857e+03, threshold=1.473e+03, percent-clipped=2.0 +2023-04-02 00:25:36,788 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:38,625 INFO [train.py:903] (0/4) Epoch 13, batch 3650, loss[loss=0.2781, simple_loss=0.3439, pruned_loss=0.1061, over 19282.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.304, pruned_loss=0.07753, over 3795042.43 frames. ], batch size: 66, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:25:46,137 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:26:41,857 INFO [train.py:903] (0/4) Epoch 13, batch 3700, loss[loss=0.2273, simple_loss=0.2929, pruned_loss=0.08083, over 18719.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3042, pruned_loss=0.07777, over 3799840.96 frames. ], batch size: 41, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:26:51,220 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:41,799 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.337e+02 6.385e+02 1.025e+03 1.989e+03, threshold=1.277e+03, percent-clipped=4.0 +2023-04-02 00:27:44,649 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:46,553 INFO [train.py:903] (0/4) Epoch 13, batch 3750, loss[loss=0.2853, simple_loss=0.3538, pruned_loss=0.1083, over 19613.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3037, pruned_loss=0.07744, over 3789295.14 frames. ], batch size: 57, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:28:14,925 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:28:48,148 INFO [train.py:903] (0/4) Epoch 13, batch 3800, loss[loss=0.1791, simple_loss=0.2531, pruned_loss=0.0525, over 19730.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.304, pruned_loss=0.07762, over 3792592.13 frames. ], batch size: 46, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:13,288 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0578, 5.3942, 2.8611, 4.6920, 1.1403, 5.4908, 5.3758, 5.5367], + device='cuda:0'), covar=tensor([0.0403, 0.0802, 0.1897, 0.0664, 0.3850, 0.0501, 0.0650, 0.0779], + device='cuda:0'), in_proj_covar=tensor([0.0446, 0.0375, 0.0449, 0.0323, 0.0388, 0.0383, 0.0373, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:29:20,841 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 00:29:44,888 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 5.587e+02 7.205e+02 9.194e+02 2.888e+03, threshold=1.441e+03, percent-clipped=8.0 +2023-04-02 00:29:50,626 INFO [train.py:903] (0/4) Epoch 13, batch 3850, loss[loss=0.2512, simple_loss=0.3263, pruned_loss=0.08805, over 19141.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3052, pruned_loss=0.07847, over 3803268.62 frames. ], batch size: 69, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:56,219 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:29:56,549 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8411, 1.9098, 2.1698, 2.6307, 1.8537, 2.5061, 2.3288, 2.0159], + device='cuda:0'), covar=tensor([0.3724, 0.3247, 0.1495, 0.1802, 0.3431, 0.1542, 0.3746, 0.2854], + device='cuda:0'), in_proj_covar=tensor([0.0812, 0.0846, 0.0661, 0.0899, 0.0797, 0.0723, 0.0800, 0.0722], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 00:30:53,504 INFO [train.py:903] (0/4) Epoch 13, batch 3900, loss[loss=0.2221, simple_loss=0.3097, pruned_loss=0.06722, over 19600.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.304, pruned_loss=0.07742, over 3802409.76 frames. ], batch size: 57, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:31:00,094 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:06,801 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85845.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:10,512 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:30,218 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:40,433 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:51,444 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.450e+02 6.738e+02 8.252e+02 2.044e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-02 00:31:57,035 INFO [train.py:903] (0/4) Epoch 13, batch 3950, loss[loss=0.2066, simple_loss=0.269, pruned_loss=0.07206, over 19769.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3034, pruned_loss=0.07719, over 3817703.59 frames. ], batch size: 47, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:32:00,536 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 00:32:04,154 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85892.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:05,530 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:20,776 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:51,825 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2917, 3.7136, 3.8656, 3.8726, 1.5367, 3.6495, 3.2437, 3.5538], + device='cuda:0'), covar=tensor([0.1494, 0.1076, 0.0633, 0.0689, 0.5119, 0.0900, 0.0628, 0.1086], + device='cuda:0'), in_proj_covar=tensor([0.0695, 0.0619, 0.0825, 0.0702, 0.0747, 0.0574, 0.0496, 0.0760], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 00:32:59,797 INFO [train.py:903] (0/4) Epoch 13, batch 4000, loss[loss=0.2513, simple_loss=0.3281, pruned_loss=0.08724, over 19302.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3034, pruned_loss=0.07693, over 3805946.38 frames. ], batch size: 66, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:33:14,761 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85948.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:33:30,751 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85960.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:46,158 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 00:33:48,581 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:56,481 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.280e+02 6.258e+02 8.022e+02 1.377e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 00:34:02,168 INFO [train.py:903] (0/4) Epoch 13, batch 4050, loss[loss=0.2024, simple_loss=0.2776, pruned_loss=0.06366, over 19712.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3039, pruned_loss=0.07746, over 3810610.28 frames. ], batch size: 46, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:34:03,493 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:20,259 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-86000.pt +2023-04-02 00:34:30,787 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:46,102 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:46,294 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1720, 2.2301, 2.3569, 3.1481, 2.2567, 3.0923, 2.6117, 2.2178], + device='cuda:0'), covar=tensor([0.3831, 0.3535, 0.1589, 0.2044, 0.3960, 0.1622, 0.3723, 0.2804], + device='cuda:0'), in_proj_covar=tensor([0.0813, 0.0845, 0.0660, 0.0899, 0.0798, 0.0725, 0.0800, 0.0723], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 00:35:05,744 INFO [train.py:903] (0/4) Epoch 13, batch 4100, loss[loss=0.2624, simple_loss=0.3307, pruned_loss=0.09707, over 17695.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3038, pruned_loss=0.07746, over 3810106.25 frames. ], batch size: 101, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:35:07,489 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3550, 1.8941, 1.9893, 2.9135, 2.0987, 2.6979, 2.6398, 2.3155], + device='cuda:0'), covar=tensor([0.0699, 0.0909, 0.0947, 0.0795, 0.0857, 0.0684, 0.0879, 0.0632], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0224, 0.0225, 0.0243, 0.0232, 0.0210, 0.0191, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 00:35:41,511 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 00:35:48,384 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86070.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:04,014 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.943e+02 6.094e+02 7.890e+02 1.986e+03, threshold=1.219e+03, percent-clipped=5.0 +2023-04-02 00:36:08,782 INFO [train.py:903] (0/4) Epoch 13, batch 4150, loss[loss=0.1848, simple_loss=0.2605, pruned_loss=0.05452, over 19761.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3029, pruned_loss=0.07701, over 3810949.78 frames. ], batch size: 47, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:36:11,029 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:24,547 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-02 00:36:28,536 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:37:11,325 INFO [train.py:903] (0/4) Epoch 13, batch 4200, loss[loss=0.1699, simple_loss=0.2459, pruned_loss=0.04691, over 19283.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3029, pruned_loss=0.07675, over 3818195.16 frames. ], batch size: 44, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:37:14,988 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 00:37:41,978 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:08,160 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.506e+02 6.651e+02 9.015e+02 1.898e+03, threshold=1.330e+03, percent-clipped=8.0 +2023-04-02 00:38:12,740 INFO [train.py:903] (0/4) Epoch 13, batch 4250, loss[loss=0.2119, simple_loss=0.2749, pruned_loss=0.0745, over 19722.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3028, pruned_loss=0.07663, over 3835291.54 frames. ], batch size: 46, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:38:13,158 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:31,135 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 00:38:43,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 00:38:52,336 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86216.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:15,638 INFO [train.py:903] (0/4) Epoch 13, batch 4300, loss[loss=0.2453, simple_loss=0.3225, pruned_loss=0.08404, over 19448.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3019, pruned_loss=0.07585, over 3846501.61 frames. ], batch size: 70, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:39:17,000 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86237.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:17,306 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5303, 2.3870, 1.8257, 1.5802, 2.1876, 1.4059, 1.4341, 1.9261], + device='cuda:0'), covar=tensor([0.0914, 0.0623, 0.0882, 0.0708, 0.0438, 0.1061, 0.0579, 0.0406], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0304, 0.0327, 0.0251, 0.0237, 0.0320, 0.0287, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:39:22,955 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86241.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:50,451 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:51,561 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86264.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:13,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.647e+02 5.244e+02 6.314e+02 8.271e+02 2.210e+03, threshold=1.263e+03, percent-clipped=7.0 +2023-04-02 00:40:15,192 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 00:40:19,673 INFO [train.py:903] (0/4) Epoch 13, batch 4350, loss[loss=0.2489, simple_loss=0.3139, pruned_loss=0.09191, over 19597.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3018, pruned_loss=0.07615, over 3841875.85 frames. ], batch size: 52, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:40:22,529 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:27,935 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86292.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:41:00,086 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86319.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:22,565 INFO [train.py:903] (0/4) Epoch 13, batch 4400, loss[loss=0.2364, simple_loss=0.3098, pruned_loss=0.0815, over 19668.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3014, pruned_loss=0.07599, over 3827521.83 frames. ], batch size: 58, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:41:42,034 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:49,162 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:50,097 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 00:41:58,578 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:00,813 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 00:42:20,371 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.626e+02 5.303e+02 6.614e+02 7.903e+02 1.976e+03, threshold=1.323e+03, percent-clipped=7.0 +2023-04-02 00:42:21,972 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:25,157 INFO [train.py:903] (0/4) Epoch 13, batch 4450, loss[loss=0.2282, simple_loss=0.3083, pruned_loss=0.07401, over 19610.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3004, pruned_loss=0.07555, over 3813697.72 frames. ], batch size: 57, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:42:52,178 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86407.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:43:00,988 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:21,599 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:25,407 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:27,352 INFO [train.py:903] (0/4) Epoch 13, batch 4500, loss[loss=0.236, simple_loss=0.3121, pruned_loss=0.07995, over 19533.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3007, pruned_loss=0.07538, over 3832986.42 frames. ], batch size: 54, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:43:52,035 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7295, 1.7304, 1.5966, 1.3806, 1.3267, 1.4072, 0.2030, 0.6842], + device='cuda:0'), covar=tensor([0.0452, 0.0449, 0.0281, 0.0437, 0.0859, 0.0525, 0.0906, 0.0811], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0337, 0.0334, 0.0360, 0.0433, 0.0360, 0.0316, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 00:43:55,654 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:01,502 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:21,173 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:24,468 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.430e+02 6.459e+02 7.870e+02 1.871e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-02 00:44:29,963 INFO [train.py:903] (0/4) Epoch 13, batch 4550, loss[loss=0.2449, simple_loss=0.3243, pruned_loss=0.08279, over 18730.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3011, pruned_loss=0.07567, over 3823493.31 frames. ], batch size: 74, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:44:39,219 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 00:45:02,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 00:45:07,127 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3837, 4.0351, 2.5080, 3.5624, 1.1139, 3.7484, 3.7553, 3.8750], + device='cuda:0'), covar=tensor([0.0663, 0.1025, 0.2086, 0.0829, 0.3765, 0.0813, 0.0826, 0.1026], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0374, 0.0450, 0.0323, 0.0388, 0.0382, 0.0373, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:45:16,171 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 00:45:24,953 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86529.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:45:33,594 INFO [train.py:903] (0/4) Epoch 13, batch 4600, loss[loss=0.1915, simple_loss=0.257, pruned_loss=0.06305, over 19762.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3016, pruned_loss=0.07605, over 3821812.54 frames. ], batch size: 47, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:45:45,502 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:45:50,023 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6362, 1.4019, 1.2757, 2.0909, 1.5866, 2.0387, 2.0466, 1.7170], + device='cuda:0'), covar=tensor([0.0808, 0.0993, 0.1136, 0.0899, 0.0942, 0.0747, 0.0894, 0.0714], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0225, 0.0224, 0.0245, 0.0231, 0.0211, 0.0192, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 00:46:31,573 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.452e+02 6.634e+02 8.020e+02 1.693e+03, threshold=1.327e+03, percent-clipped=1.0 +2023-04-02 00:46:35,925 INFO [train.py:903] (0/4) Epoch 13, batch 4650, loss[loss=0.2745, simple_loss=0.3324, pruned_loss=0.1082, over 13321.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3016, pruned_loss=0.07547, over 3826592.35 frames. ], batch size: 135, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:46:51,339 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:46:52,259 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 00:46:57,295 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 00:47:03,875 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 00:47:03,989 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:04,257 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:35,012 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:35,322 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 00:47:38,173 INFO [train.py:903] (0/4) Epoch 13, batch 4700, loss[loss=0.2364, simple_loss=0.3133, pruned_loss=0.07975, over 19581.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3017, pruned_loss=0.07561, over 3823247.97 frames. ], batch size: 67, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:47:53,756 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 00:48:04,331 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 00:48:06,008 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1761, 2.0418, 1.8462, 1.6757, 1.4956, 1.6667, 0.3837, 1.0830], + device='cuda:0'), covar=tensor([0.0441, 0.0454, 0.0350, 0.0575, 0.0986, 0.0601, 0.0976, 0.0754], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0335, 0.0332, 0.0357, 0.0432, 0.0357, 0.0314, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 00:48:13,273 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86663.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:48:35,960 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.083e+02 5.953e+02 7.054e+02 1.649e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-02 00:48:42,060 INFO [train.py:903] (0/4) Epoch 13, batch 4750, loss[loss=0.2413, simple_loss=0.3173, pruned_loss=0.08268, over 18846.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3015, pruned_loss=0.07515, over 3820128.23 frames. ], batch size: 74, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:48:45,709 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86688.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:48:48,026 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86690.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:48:58,159 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7045, 1.3314, 1.4021, 1.4951, 3.2379, 1.0906, 2.2105, 3.5898], + device='cuda:0'), covar=tensor([0.0459, 0.2677, 0.2922, 0.1848, 0.0730, 0.2512, 0.1378, 0.0278], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0345, 0.0356, 0.0325, 0.0352, 0.0332, 0.0346, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:49:07,677 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 00:49:10,036 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 00:49:15,251 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-02 00:49:18,489 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:27,240 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:31,060 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 00:49:43,045 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:43,760 INFO [train.py:903] (0/4) Epoch 13, batch 4800, loss[loss=0.2911, simple_loss=0.3502, pruned_loss=0.116, over 19273.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3023, pruned_loss=0.07609, over 3809447.56 frames. ], batch size: 66, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:50:12,879 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:41,009 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.270e+02 6.552e+02 8.488e+02 1.715e+03, threshold=1.310e+03, percent-clipped=7.0 +2023-04-02 00:50:45,087 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:45,826 INFO [train.py:903] (0/4) Epoch 13, batch 4850, loss[loss=0.2183, simple_loss=0.2812, pruned_loss=0.07771, over 19742.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3022, pruned_loss=0.07618, over 3791319.37 frames. ], batch size: 46, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:51:01,584 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.3096, 5.6701, 3.1402, 4.8318, 1.1260, 5.6133, 5.6861, 5.7313], + device='cuda:0'), covar=tensor([0.0339, 0.0835, 0.1650, 0.0644, 0.4046, 0.0507, 0.0556, 0.0835], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0369, 0.0444, 0.0321, 0.0382, 0.0379, 0.0368, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 00:51:06,432 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:06,703 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:09,592 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 00:51:14,191 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:18,076 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:31,690 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 00:51:37,264 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 00:51:37,288 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 00:51:38,810 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:45,325 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86832.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:47,403 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 00:51:49,797 INFO [train.py:903] (0/4) Epoch 13, batch 4900, loss[loss=0.2295, simple_loss=0.3101, pruned_loss=0.07448, over 19779.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.301, pruned_loss=0.0758, over 3794398.64 frames. ], batch size: 56, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:52:07,083 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 00:52:11,972 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 00:52:46,044 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 5.393e+02 6.845e+02 8.725e+02 1.892e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 00:52:50,737 INFO [train.py:903] (0/4) Epoch 13, batch 4950, loss[loss=0.2177, simple_loss=0.2878, pruned_loss=0.07379, over 19772.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3014, pruned_loss=0.07571, over 3813040.33 frames. ], batch size: 47, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:53:05,765 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86896.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:08,078 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 00:53:31,755 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:32,645 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 00:53:37,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:44,587 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0595, 5.0757, 5.9163, 5.8683, 1.9539, 5.4859, 4.7379, 5.5294], + device='cuda:0'), covar=tensor([0.1425, 0.0639, 0.0457, 0.0483, 0.5387, 0.0520, 0.0544, 0.0952], + device='cuda:0'), in_proj_covar=tensor([0.0710, 0.0626, 0.0837, 0.0713, 0.0753, 0.0584, 0.0506, 0.0770], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 00:53:56,223 INFO [train.py:903] (0/4) Epoch 13, batch 5000, loss[loss=0.2296, simple_loss=0.292, pruned_loss=0.08357, over 16487.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3013, pruned_loss=0.07519, over 3818270.54 frames. ], batch size: 36, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:54:03,375 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 00:54:03,510 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:14,004 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 00:54:49,307 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:52,320 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.998e+02 5.650e+02 6.679e+02 8.615e+02 1.943e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 00:54:56,911 INFO [train.py:903] (0/4) Epoch 13, batch 5050, loss[loss=0.2125, simple_loss=0.2805, pruned_loss=0.07219, over 19356.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3006, pruned_loss=0.07461, over 3831686.26 frames. ], batch size: 47, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:55:19,920 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87004.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:55:32,409 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 00:55:41,731 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87020.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:56:01,009 INFO [train.py:903] (0/4) Epoch 13, batch 5100, loss[loss=0.2399, simple_loss=0.3191, pruned_loss=0.08033, over 18333.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3007, pruned_loss=0.07478, over 3817979.23 frames. ], batch size: 83, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:56:07,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 00:56:11,374 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 00:56:18,115 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 00:56:28,896 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:56:58,517 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.089e+02 6.519e+02 8.713e+02 1.677e+03, threshold=1.304e+03, percent-clipped=2.0 +2023-04-02 00:57:03,281 INFO [train.py:903] (0/4) Epoch 13, batch 5150, loss[loss=0.2109, simple_loss=0.2798, pruned_loss=0.07105, over 19379.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3013, pruned_loss=0.07525, over 3817493.73 frames. ], batch size: 47, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:57:15,161 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 00:57:48,511 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 00:58:07,742 INFO [train.py:903] (0/4) Epoch 13, batch 5200, loss[loss=0.2327, simple_loss=0.2993, pruned_loss=0.08306, over 19578.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3016, pruned_loss=0.07574, over 3818902.12 frames. ], batch size: 52, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:58:18,415 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 00:58:54,661 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87173.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:58:57,947 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87176.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:01,501 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:04,669 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 00:59:05,685 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.198e+02 5.751e+02 6.794e+02 9.141e+02 2.147e+03, threshold=1.359e+03, percent-clipped=10.0 +2023-04-02 00:59:10,441 INFO [train.py:903] (0/4) Epoch 13, batch 5250, loss[loss=0.2939, simple_loss=0.3478, pruned_loss=0.12, over 12760.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3017, pruned_loss=0.0755, over 3813124.99 frames. ], batch size: 135, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 00:59:25,790 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87198.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:31,679 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:55,611 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 01:00:13,789 INFO [train.py:903] (0/4) Epoch 13, batch 5300, loss[loss=0.2352, simple_loss=0.3043, pruned_loss=0.08301, over 19745.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3013, pruned_loss=0.07536, over 3824573.23 frames. ], batch size: 51, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:00:18,347 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87240.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:27,869 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87248.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:28,783 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 01:00:51,912 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 01:01:04,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2257, 1.2774, 1.7536, 1.2633, 2.8389, 3.7173, 3.5044, 3.8804], + device='cuda:0'), covar=tensor([0.1598, 0.3581, 0.3165, 0.2197, 0.0495, 0.0183, 0.0193, 0.0208], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0302, 0.0332, 0.0252, 0.0223, 0.0164, 0.0209, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 01:01:10,966 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.140e+02 5.034e+02 6.226e+02 7.840e+02 1.929e+03, threshold=1.245e+03, percent-clipped=3.0 +2023-04-02 01:01:15,805 INFO [train.py:903] (0/4) Epoch 13, batch 5350, loss[loss=0.2039, simple_loss=0.2921, pruned_loss=0.05785, over 19528.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3015, pruned_loss=0.07528, over 3830204.34 frames. ], batch size: 54, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:01:24,090 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:01:49,518 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 01:01:51,120 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:21,295 INFO [train.py:903] (0/4) Epoch 13, batch 5400, loss[loss=0.2779, simple_loss=0.3411, pruned_loss=0.1074, over 19666.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3014, pruned_loss=0.07566, over 3837596.51 frames. ], batch size: 60, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 01:02:24,062 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:44,267 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87355.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:55,485 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87364.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:03:00,737 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.92 vs. limit=5.0 +2023-04-02 01:03:13,269 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5551, 4.1689, 2.4865, 3.6970, 0.8833, 3.9556, 3.9690, 4.0027], + device='cuda:0'), covar=tensor([0.0642, 0.0935, 0.2144, 0.0749, 0.4076, 0.0713, 0.0767, 0.1048], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0373, 0.0451, 0.0323, 0.0385, 0.0384, 0.0373, 0.0410], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:03:16,663 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87380.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:03:19,988 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 4.952e+02 6.479e+02 7.900e+02 1.578e+03, threshold=1.296e+03, percent-clipped=3.0 +2023-04-02 01:03:23,439 INFO [train.py:903] (0/4) Epoch 13, batch 5450, loss[loss=0.2105, simple_loss=0.2984, pruned_loss=0.06136, over 19534.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3008, pruned_loss=0.07514, over 3847394.11 frames. ], batch size: 54, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:10,192 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 01:04:25,745 INFO [train.py:903] (0/4) Epoch 13, batch 5500, loss[loss=0.1824, simple_loss=0.2591, pruned_loss=0.05286, over 19325.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3018, pruned_loss=0.07547, over 3844247.43 frames. ], batch size: 44, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:47,836 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 01:05:21,394 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:05:21,469 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87479.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:05:25,843 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 5.041e+02 6.443e+02 8.590e+02 2.643e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 01:05:29,326 INFO [train.py:903] (0/4) Epoch 13, batch 5550, loss[loss=0.2006, simple_loss=0.2727, pruned_loss=0.06423, over 19757.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3025, pruned_loss=0.07587, over 3847562.76 frames. ], batch size: 46, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:05:31,735 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8456, 4.2718, 4.5456, 4.5255, 1.7484, 4.2466, 3.6385, 4.2012], + device='cuda:0'), covar=tensor([0.1428, 0.0872, 0.0558, 0.0624, 0.5445, 0.0813, 0.0682, 0.1090], + device='cuda:0'), in_proj_covar=tensor([0.0709, 0.0630, 0.0837, 0.0716, 0.0752, 0.0582, 0.0504, 0.0773], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 01:05:33,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 01:05:58,112 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4437, 0.9336, 1.1110, 1.4564, 2.7300, 1.0518, 2.1287, 3.2692], + device='cuda:0'), covar=tensor([0.0658, 0.3934, 0.3828, 0.2302, 0.1358, 0.3053, 0.1579, 0.0477], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0345, 0.0356, 0.0324, 0.0352, 0.0333, 0.0343, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:06:12,062 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8193, 1.4198, 1.6205, 1.5082, 3.3987, 1.0269, 2.3162, 3.7926], + device='cuda:0'), covar=tensor([0.0407, 0.2513, 0.2586, 0.1867, 0.0658, 0.2527, 0.1312, 0.0209], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0345, 0.0355, 0.0324, 0.0352, 0.0332, 0.0343, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:06:16,481 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:06:23,053 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 01:06:33,781 INFO [train.py:903] (0/4) Epoch 13, batch 5600, loss[loss=0.2417, simple_loss=0.3082, pruned_loss=0.08755, over 19344.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3025, pruned_loss=0.0758, over 3834455.03 frames. ], batch size: 66, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:06:47,615 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:06:50,187 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-02 01:07:17,509 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87572.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:32,977 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 5.023e+02 6.222e+02 8.051e+02 1.328e+03, threshold=1.244e+03, percent-clipped=2.0 +2023-04-02 01:07:36,350 INFO [train.py:903] (0/4) Epoch 13, batch 5650, loss[loss=0.1984, simple_loss=0.2714, pruned_loss=0.06276, over 19360.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3024, pruned_loss=0.07608, over 3839276.79 frames. ], batch size: 47, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:07:38,473 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-02 01:07:39,053 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8353, 1.2089, 0.9736, 0.9208, 1.0584, 0.8923, 0.8982, 1.1702], + device='cuda:0'), covar=tensor([0.0581, 0.0735, 0.0930, 0.0586, 0.0492, 0.1053, 0.0483, 0.0404], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0305, 0.0330, 0.0251, 0.0241, 0.0324, 0.0292, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:07:43,440 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:56,265 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:01,807 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:06,352 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:23,588 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:24,450 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 01:08:38,517 INFO [train.py:903] (0/4) Epoch 13, batch 5700, loss[loss=0.2256, simple_loss=0.3027, pruned_loss=0.07424, over 19869.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3031, pruned_loss=0.07635, over 3838031.51 frames. ], batch size: 52, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:08:38,942 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:53,309 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-02 01:09:36,750 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 4.997e+02 6.690e+02 8.841e+02 1.921e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 01:09:40,130 INFO [train.py:903] (0/4) Epoch 13, batch 5750, loss[loss=0.2536, simple_loss=0.3261, pruned_loss=0.09049, over 19595.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3029, pruned_loss=0.07698, over 3834319.21 frames. ], batch size: 57, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:09:41,288 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 01:09:49,517 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 01:09:55,218 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 01:10:07,744 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:27,823 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87724.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:37,291 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4593, 2.2562, 1.6949, 1.4550, 2.1523, 1.1797, 1.2888, 1.9755], + device='cuda:0'), covar=tensor([0.0967, 0.0767, 0.0978, 0.0754, 0.0453, 0.1237, 0.0702, 0.0405], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0307, 0.0332, 0.0252, 0.0241, 0.0326, 0.0294, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:10:41,787 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87735.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:10:42,502 INFO [train.py:903] (0/4) Epoch 13, batch 5800, loss[loss=0.2788, simple_loss=0.3525, pruned_loss=0.1026, over 18102.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3035, pruned_loss=0.07718, over 3818917.78 frames. ], batch size: 83, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:10:46,844 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 01:11:07,375 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.76 vs. limit=5.0 +2023-04-02 01:11:13,107 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87760.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:11:27,850 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.3209, 5.2084, 6.0980, 6.0760, 1.7597, 5.7875, 4.9489, 5.7577], + device='cuda:0'), covar=tensor([0.1405, 0.0594, 0.0463, 0.0469, 0.5638, 0.0432, 0.0490, 0.0928], + device='cuda:0'), in_proj_covar=tensor([0.0703, 0.0625, 0.0836, 0.0714, 0.0745, 0.0581, 0.0502, 0.0765], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 01:11:43,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 5.041e+02 6.242e+02 7.928e+02 1.581e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 01:11:46,962 INFO [train.py:903] (0/4) Epoch 13, batch 5850, loss[loss=0.2151, simple_loss=0.2832, pruned_loss=0.07344, over 19853.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3016, pruned_loss=0.07624, over 3819086.69 frames. ], batch size: 52, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:18,351 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1843, 1.1713, 1.1337, 1.2794, 1.0927, 1.3202, 1.3631, 1.2002], + device='cuda:0'), covar=tensor([0.0834, 0.0874, 0.1011, 0.0673, 0.0792, 0.0745, 0.0764, 0.0716], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0223, 0.0224, 0.0242, 0.0229, 0.0211, 0.0191, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 01:12:33,158 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87823.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:12:47,993 INFO [train.py:903] (0/4) Epoch 13, batch 5900, loss[loss=0.2261, simple_loss=0.2924, pruned_loss=0.07986, over 19684.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3028, pruned_loss=0.07714, over 3813996.89 frames. ], batch size: 53, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:49,020 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 01:12:51,584 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:03,839 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4151, 1.2615, 1.0899, 1.2940, 1.0645, 1.1616, 1.0718, 1.2440], + device='cuda:0'), covar=tensor([0.0885, 0.0926, 0.1341, 0.0814, 0.0997, 0.0597, 0.1241, 0.0794], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0353, 0.0296, 0.0239, 0.0295, 0.0243, 0.0284, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:13:10,522 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 01:13:27,506 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:46,999 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.447e+02 6.662e+02 8.353e+02 2.279e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 01:13:50,406 INFO [train.py:903] (0/4) Epoch 13, batch 5950, loss[loss=0.2869, simple_loss=0.3461, pruned_loss=0.1138, over 19679.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3027, pruned_loss=0.07704, over 3809876.59 frames. ], batch size: 60, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:14:30,753 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7056, 1.5078, 1.4579, 2.1680, 1.7481, 2.0095, 2.0234, 1.8076], + device='cuda:0'), covar=tensor([0.0782, 0.0900, 0.1015, 0.0725, 0.0782, 0.0668, 0.0817, 0.0628], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0223, 0.0224, 0.0241, 0.0227, 0.0210, 0.0191, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 01:14:53,067 INFO [train.py:903] (0/4) Epoch 13, batch 6000, loss[loss=0.216, simple_loss=0.2979, pruned_loss=0.06703, over 19522.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3023, pruned_loss=0.07687, over 3807785.91 frames. ], batch size: 54, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:14:53,068 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 01:15:06,409 INFO [train.py:937] (0/4) Epoch 13, validation: loss=0.175, simple_loss=0.2755, pruned_loss=0.03726, over 944034.00 frames. +2023-04-02 01:15:06,409 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18321MB +2023-04-02 01:15:09,180 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87938.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:19,417 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:25,217 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:35,018 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8376, 1.7041, 1.6015, 1.9072, 1.7309, 1.6331, 1.6109, 1.8101], + device='cuda:0'), covar=tensor([0.0836, 0.1325, 0.1263, 0.0853, 0.1132, 0.0515, 0.1180, 0.0650], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0352, 0.0296, 0.0240, 0.0296, 0.0243, 0.0285, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:15:41,077 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:45,484 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:04,016 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:04,762 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.142e+02 6.649e+02 8.424e+02 2.027e+03, threshold=1.330e+03, percent-clipped=9.0 +2023-04-02 01:16:08,327 INFO [train.py:903] (0/4) Epoch 13, batch 6050, loss[loss=0.2244, simple_loss=0.3017, pruned_loss=0.0736, over 19400.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3023, pruned_loss=0.07686, over 3803340.25 frames. ], batch size: 48, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:16:12,361 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:26,863 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-88000.pt +2023-04-02 01:17:12,303 INFO [train.py:903] (0/4) Epoch 13, batch 6100, loss[loss=0.2561, simple_loss=0.3254, pruned_loss=0.09341, over 19776.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3012, pruned_loss=0.07636, over 3819336.85 frames. ], batch size: 54, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:17:42,722 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:48,664 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:55,244 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2232, 1.3042, 1.5078, 1.3933, 2.1603, 1.9648, 2.2007, 0.6854], + device='cuda:0'), covar=tensor([0.2207, 0.3755, 0.2339, 0.1827, 0.1386, 0.1928, 0.1317, 0.3987], + device='cuda:0'), in_proj_covar=tensor([0.0499, 0.0589, 0.0629, 0.0447, 0.0600, 0.0496, 0.0647, 0.0507], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 01:18:09,952 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:10,750 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.311e+02 6.775e+02 8.712e+02 1.953e+03, threshold=1.355e+03, percent-clipped=3.0 +2023-04-02 01:18:14,178 INFO [train.py:903] (0/4) Epoch 13, batch 6150, loss[loss=0.27, simple_loss=0.335, pruned_loss=0.1026, over 18741.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3033, pruned_loss=0.07756, over 3805766.02 frames. ], batch size: 74, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:18:24,908 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:38,384 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 01:18:54,515 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88118.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:56,799 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:19:15,204 INFO [train.py:903] (0/4) Epoch 13, batch 6200, loss[loss=0.3075, simple_loss=0.3538, pruned_loss=0.1307, over 13212.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3043, pruned_loss=0.07855, over 3785571.70 frames. ], batch size: 136, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:19:29,261 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9429, 4.3541, 4.6461, 4.6195, 1.7381, 4.2798, 3.7290, 4.3148], + device='cuda:0'), covar=tensor([0.1418, 0.0673, 0.0576, 0.0578, 0.5339, 0.0717, 0.0612, 0.1112], + device='cuda:0'), in_proj_covar=tensor([0.0696, 0.0622, 0.0826, 0.0705, 0.0739, 0.0574, 0.0496, 0.0758], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 01:19:32,520 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2948, 3.7761, 3.9033, 3.9039, 1.5674, 3.6868, 3.2135, 3.6043], + device='cuda:0'), covar=tensor([0.1470, 0.0806, 0.0669, 0.0668, 0.5172, 0.0847, 0.0672, 0.1187], + device='cuda:0'), in_proj_covar=tensor([0.0696, 0.0622, 0.0825, 0.0705, 0.0739, 0.0574, 0.0495, 0.0757], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 01:19:49,133 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 01:20:13,334 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.409e+02 5.304e+02 6.722e+02 8.627e+02 2.252e+03, threshold=1.344e+03, percent-clipped=3.0 +2023-04-02 01:20:16,890 INFO [train.py:903] (0/4) Epoch 13, batch 6250, loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06041, over 19748.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3044, pruned_loss=0.07873, over 3771122.95 frames. ], batch size: 51, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:20:29,078 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88194.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:20:44,745 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 01:20:58,225 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:20,740 INFO [train.py:903] (0/4) Epoch 13, batch 6300, loss[loss=0.2021, simple_loss=0.2716, pruned_loss=0.06629, over 19389.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3044, pruned_loss=0.0782, over 3794436.76 frames. ], batch size: 47, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:21:23,572 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:32,847 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.10 vs. limit=5.0 +2023-04-02 01:21:52,933 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:22:18,337 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.052e+02 6.199e+02 7.712e+02 1.989e+03, threshold=1.240e+03, percent-clipped=5.0 +2023-04-02 01:22:21,909 INFO [train.py:903] (0/4) Epoch 13, batch 6350, loss[loss=0.2402, simple_loss=0.307, pruned_loss=0.08674, over 19470.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3033, pruned_loss=0.07727, over 3815806.04 frames. ], batch size: 49, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:22:32,660 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6239, 4.1742, 2.6257, 3.7717, 1.1780, 3.8756, 3.9865, 4.0648], + device='cuda:0'), covar=tensor([0.0573, 0.1032, 0.2095, 0.0753, 0.3907, 0.0814, 0.0815, 0.1115], + device='cuda:0'), in_proj_covar=tensor([0.0444, 0.0374, 0.0451, 0.0321, 0.0386, 0.0380, 0.0373, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:22:50,064 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88309.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:23:02,580 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:08,247 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:24,604 INFO [train.py:903] (0/4) Epoch 13, batch 6400, loss[loss=0.2107, simple_loss=0.2888, pruned_loss=0.06632, over 19583.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.303, pruned_loss=0.07677, over 3814692.58 frames. ], batch size: 52, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:23:27,262 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:31,723 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:38,151 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:58,464 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:58,527 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3549, 1.4061, 1.6382, 1.5578, 2.4370, 2.1243, 2.5146, 0.8577], + device='cuda:0'), covar=tensor([0.2116, 0.3757, 0.2231, 0.1701, 0.1364, 0.1877, 0.1244, 0.3867], + device='cuda:0'), in_proj_covar=tensor([0.0499, 0.0592, 0.0629, 0.0448, 0.0603, 0.0499, 0.0645, 0.0505], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 01:24:16,128 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8951, 1.9925, 2.2846, 2.7205, 1.8594, 2.6217, 2.4216, 2.0523], + device='cuda:0'), covar=tensor([0.3969, 0.3450, 0.1571, 0.1973, 0.3666, 0.1600, 0.3851, 0.2900], + device='cuda:0'), in_proj_covar=tensor([0.0817, 0.0848, 0.0662, 0.0901, 0.0798, 0.0733, 0.0800, 0.0726], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 01:24:18,740 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 01:24:22,198 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.590e+02 5.639e+02 6.967e+02 8.699e+02 1.659e+03, threshold=1.393e+03, percent-clipped=7.0 +2023-04-02 01:24:25,785 INFO [train.py:903] (0/4) Epoch 13, batch 6450, loss[loss=0.2262, simple_loss=0.3177, pruned_loss=0.06732, over 19678.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3029, pruned_loss=0.07679, over 3806368.89 frames. ], batch size: 60, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:25:05,097 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 01:25:18,090 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7646, 1.8677, 2.0821, 2.3488, 1.6019, 2.2601, 2.2448, 1.9010], + device='cuda:0'), covar=tensor([0.3915, 0.3343, 0.1753, 0.1933, 0.3601, 0.1717, 0.4135, 0.3024], + device='cuda:0'), in_proj_covar=tensor([0.0813, 0.0845, 0.0661, 0.0898, 0.0796, 0.0729, 0.0797, 0.0722], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 01:25:28,933 INFO [train.py:903] (0/4) Epoch 13, batch 6500, loss[loss=0.1844, simple_loss=0.2577, pruned_loss=0.05562, over 19135.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3037, pruned_loss=0.07686, over 3810957.76 frames. ], batch size: 42, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:25:30,993 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 01:26:00,474 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:26:26,788 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.190e+02 6.145e+02 7.751e+02 1.614e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 01:26:30,119 INFO [train.py:903] (0/4) Epoch 13, batch 6550, loss[loss=0.2651, simple_loss=0.3176, pruned_loss=0.1063, over 19744.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3029, pruned_loss=0.07689, over 3806854.49 frames. ], batch size: 47, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:27:11,705 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1623, 1.8123, 1.4868, 1.2058, 1.6513, 1.1885, 1.2074, 1.6742], + device='cuda:0'), covar=tensor([0.0725, 0.0768, 0.0982, 0.0697, 0.0454, 0.1102, 0.0532, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0304, 0.0326, 0.0249, 0.0239, 0.0321, 0.0293, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:27:31,540 INFO [train.py:903] (0/4) Epoch 13, batch 6600, loss[loss=0.2416, simple_loss=0.3191, pruned_loss=0.08203, over 19472.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3035, pruned_loss=0.07671, over 3808707.94 frames. ], batch size: 64, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:28:21,373 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:28:28,438 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 5.175e+02 6.632e+02 9.239e+02 1.861e+03, threshold=1.326e+03, percent-clipped=7.0 +2023-04-02 01:28:29,320 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 01:28:32,111 INFO [train.py:903] (0/4) Epoch 13, batch 6650, loss[loss=0.1962, simple_loss=0.2689, pruned_loss=0.06174, over 19329.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3035, pruned_loss=0.07721, over 3804656.39 frames. ], batch size: 44, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:28:42,994 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5322, 2.3783, 1.7777, 1.3435, 2.2209, 1.2342, 1.3241, 2.0148], + device='cuda:0'), covar=tensor([0.1072, 0.0632, 0.0964, 0.0824, 0.0483, 0.1219, 0.0750, 0.0432], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0307, 0.0330, 0.0252, 0.0240, 0.0326, 0.0297, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:29:33,913 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6075, 1.6666, 1.9604, 1.7632, 3.1283, 2.5074, 3.2919, 1.8149], + device='cuda:0'), covar=tensor([0.2184, 0.3922, 0.2374, 0.1736, 0.1371, 0.1878, 0.1401, 0.3456], + device='cuda:0'), in_proj_covar=tensor([0.0502, 0.0592, 0.0629, 0.0447, 0.0604, 0.0497, 0.0648, 0.0507], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 01:29:34,644 INFO [train.py:903] (0/4) Epoch 13, batch 6700, loss[loss=0.2063, simple_loss=0.2918, pruned_loss=0.06041, over 17457.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3036, pruned_loss=0.07698, over 3805277.44 frames. ], batch size: 102, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:55,527 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88653.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:29:55,691 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5721, 1.4431, 1.3786, 1.8862, 1.5208, 2.0153, 1.9459, 1.8019], + device='cuda:0'), covar=tensor([0.0820, 0.0916, 0.1030, 0.0844, 0.0846, 0.0650, 0.0800, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0223, 0.0222, 0.0242, 0.0227, 0.0209, 0.0192, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 01:30:29,681 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 5.222e+02 6.682e+02 8.627e+02 1.913e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-02 01:30:33,139 INFO [train.py:903] (0/4) Epoch 13, batch 6750, loss[loss=0.1821, simple_loss=0.2656, pruned_loss=0.04932, over 19488.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3032, pruned_loss=0.07691, over 3804172.50 frames. ], batch size: 49, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:31:31,219 INFO [train.py:903] (0/4) Epoch 13, batch 6800, loss[loss=0.2368, simple_loss=0.3195, pruned_loss=0.077, over 19342.00 frames. ], tot_loss[loss=0.228, simple_loss=0.303, pruned_loss=0.07655, over 3798708.81 frames. ], batch size: 70, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:32:01,944 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-13.pt +2023-04-02 01:32:18,413 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 01:32:18,907 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 01:32:21,845 INFO [train.py:903] (0/4) Epoch 14, batch 0, loss[loss=0.3079, simple_loss=0.3756, pruned_loss=0.1201, over 19586.00 frames. ], tot_loss[loss=0.3079, simple_loss=0.3756, pruned_loss=0.1201, over 19586.00 frames. ], batch size: 61, lr: 6.05e-03, grad_scale: 8.0 +2023-04-02 01:32:21,846 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 01:32:33,656 INFO [train.py:937] (0/4) Epoch 14, validation: loss=0.1763, simple_loss=0.2772, pruned_loss=0.03773, over 944034.00 frames. +2023-04-02 01:32:33,660 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 01:32:41,835 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88768.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:32:49,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 01:32:59,173 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.998e+02 6.377e+02 7.989e+02 1.719e+03, threshold=1.275e+03, percent-clipped=2.0 +2023-04-02 01:33:10,437 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 01:33:40,052 INFO [train.py:903] (0/4) Epoch 14, batch 50, loss[loss=0.1951, simple_loss=0.2799, pruned_loss=0.05516, over 19683.00 frames. ], tot_loss[loss=0.221, simple_loss=0.298, pruned_loss=0.072, over 862967.96 frames. ], batch size: 53, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:01,734 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:15,373 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 01:34:34,180 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:40,591 INFO [train.py:903] (0/4) Epoch 14, batch 100, loss[loss=0.2077, simple_loss=0.2884, pruned_loss=0.0635, over 19708.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3007, pruned_loss=0.07393, over 1524846.09 frames. ], batch size: 53, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:51,071 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 01:35:02,787 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.271e+02 6.621e+02 8.700e+02 2.391e+03, threshold=1.324e+03, percent-clipped=10.0 +2023-04-02 01:35:41,065 INFO [train.py:903] (0/4) Epoch 14, batch 150, loss[loss=0.1663, simple_loss=0.2469, pruned_loss=0.04285, over 19412.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2994, pruned_loss=0.07353, over 2049630.45 frames. ], batch size: 48, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:35:53,873 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2632, 1.1529, 1.2411, 1.3456, 1.0275, 1.3309, 1.3268, 1.2576], + device='cuda:0'), covar=tensor([0.0850, 0.1056, 0.1071, 0.0691, 0.0836, 0.0853, 0.0856, 0.0775], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0223, 0.0225, 0.0244, 0.0228, 0.0210, 0.0193, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 01:36:00,295 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-02 01:36:29,581 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0143, 1.4144, 1.8775, 1.5582, 2.9721, 4.3099, 4.2703, 4.7918], + device='cuda:0'), covar=tensor([0.1748, 0.3460, 0.3114, 0.2010, 0.0546, 0.0186, 0.0182, 0.0144], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0301, 0.0331, 0.0251, 0.0220, 0.0165, 0.0207, 0.0217], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 01:36:38,764 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 01:36:39,942 INFO [train.py:903] (0/4) Epoch 14, batch 200, loss[loss=0.2337, simple_loss=0.3156, pruned_loss=0.07593, over 19805.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.0741, over 2442609.13 frames. ], batch size: 56, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:03,948 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 5.170e+02 6.545e+02 8.695e+02 1.666e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 01:37:41,107 INFO [train.py:903] (0/4) Epoch 14, batch 250, loss[loss=0.1846, simple_loss=0.2765, pruned_loss=0.04639, over 19667.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3003, pruned_loss=0.07441, over 2737992.99 frames. ], batch size: 55, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:53,626 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89024.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:38:17,611 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:38:22,447 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89049.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:38:43,490 INFO [train.py:903] (0/4) Epoch 14, batch 300, loss[loss=0.2156, simple_loss=0.3074, pruned_loss=0.0619, over 19598.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2999, pruned_loss=0.07391, over 2981558.45 frames. ], batch size: 57, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:38:52,772 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7500, 4.2492, 4.4321, 4.4143, 1.5042, 4.1380, 3.6056, 4.1103], + device='cuda:0'), covar=tensor([0.1427, 0.0729, 0.0576, 0.0615, 0.5519, 0.0746, 0.0649, 0.1095], + device='cuda:0'), in_proj_covar=tensor([0.0707, 0.0630, 0.0836, 0.0715, 0.0753, 0.0583, 0.0504, 0.0764], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 01:39:05,434 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.830e+02 5.923e+02 7.544e+02 2.111e+03, threshold=1.185e+03, percent-clipped=1.0 +2023-04-02 01:39:45,147 INFO [train.py:903] (0/4) Epoch 14, batch 350, loss[loss=0.292, simple_loss=0.3477, pruned_loss=0.1182, over 13378.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3004, pruned_loss=0.07389, over 3169929.50 frames. ], batch size: 136, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:39:47,483 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 01:40:29,897 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2057, 2.2215, 2.4204, 3.1960, 2.1847, 3.0162, 2.6659, 2.1767], + device='cuda:0'), covar=tensor([0.3892, 0.3621, 0.1579, 0.2122, 0.3990, 0.1713, 0.3685, 0.2832], + device='cuda:0'), in_proj_covar=tensor([0.0824, 0.0859, 0.0666, 0.0908, 0.0807, 0.0738, 0.0811, 0.0728], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 01:40:46,839 INFO [train.py:903] (0/4) Epoch 14, batch 400, loss[loss=0.2608, simple_loss=0.3274, pruned_loss=0.09709, over 19670.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.301, pruned_loss=0.07439, over 3313624.44 frames. ], batch size: 60, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:41:11,936 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.893e+02 5.942e+02 7.582e+02 1.529e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-02 01:41:16,770 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:41:36,314 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3857, 2.1291, 2.0699, 2.4972, 2.2833, 2.1417, 2.0233, 2.5806], + device='cuda:0'), covar=tensor([0.0872, 0.1550, 0.1324, 0.0914, 0.1261, 0.0473, 0.1147, 0.0583], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0353, 0.0297, 0.0240, 0.0298, 0.0243, 0.0285, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:41:47,623 INFO [train.py:903] (0/4) Epoch 14, batch 450, loss[loss=0.2242, simple_loss=0.3024, pruned_loss=0.07295, over 19478.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3007, pruned_loss=0.07375, over 3431116.92 frames. ], batch size: 49, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:42:19,958 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 01:42:20,928 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 01:42:51,726 INFO [train.py:903] (0/4) Epoch 14, batch 500, loss[loss=0.236, simple_loss=0.3085, pruned_loss=0.08172, over 19494.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3012, pruned_loss=0.07454, over 3525901.39 frames. ], batch size: 49, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:43:04,783 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7282, 1.5057, 1.3585, 1.6122, 1.4770, 1.3617, 1.3336, 1.5693], + device='cuda:0'), covar=tensor([0.1042, 0.1327, 0.1560, 0.1001, 0.1223, 0.0729, 0.1479, 0.0834], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0350, 0.0295, 0.0239, 0.0295, 0.0241, 0.0285, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:43:13,298 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.086e+02 6.501e+02 8.394e+02 1.936e+03, threshold=1.300e+03, percent-clipped=6.0 +2023-04-02 01:43:51,231 INFO [train.py:903] (0/4) Epoch 14, batch 550, loss[loss=0.2364, simple_loss=0.3128, pruned_loss=0.08005, over 19662.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3001, pruned_loss=0.07455, over 3599090.50 frames. ], batch size: 60, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:44:05,920 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 01:44:50,891 INFO [train.py:903] (0/4) Epoch 14, batch 600, loss[loss=0.2613, simple_loss=0.3329, pruned_loss=0.09488, over 19701.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2993, pruned_loss=0.07412, over 3639423.30 frames. ], batch size: 59, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:45:14,768 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.437e+02 6.463e+02 8.394e+02 1.645e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 01:45:23,617 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:27,402 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:32,883 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 01:45:52,142 INFO [train.py:903] (0/4) Epoch 14, batch 650, loss[loss=0.2201, simple_loss=0.2995, pruned_loss=0.07035, over 19726.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2998, pruned_loss=0.07407, over 3695225.61 frames. ], batch size: 51, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:45:54,681 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9534, 5.2895, 2.7640, 4.8207, 0.9903, 5.3211, 5.3211, 5.4952], + device='cuda:0'), covar=tensor([0.0426, 0.0929, 0.2140, 0.0674, 0.4413, 0.0576, 0.0703, 0.0782], + device='cuda:0'), in_proj_covar=tensor([0.0452, 0.0379, 0.0456, 0.0332, 0.0394, 0.0386, 0.0383, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:46:06,882 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 01:46:29,617 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89443.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:46:45,791 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89457.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:46:53,908 INFO [train.py:903] (0/4) Epoch 14, batch 700, loss[loss=0.2305, simple_loss=0.3082, pruned_loss=0.07638, over 19751.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.07384, over 3733615.72 frames. ], batch size: 54, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:47:21,078 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.092e+02 5.132e+02 6.716e+02 8.076e+02 1.293e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-02 01:47:44,491 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:47:59,821 INFO [train.py:903] (0/4) Epoch 14, batch 750, loss[loss=0.2692, simple_loss=0.3288, pruned_loss=0.1048, over 18285.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2988, pruned_loss=0.07365, over 3754554.21 frames. ], batch size: 83, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:48:14,781 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:48:19,166 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:49:01,550 INFO [train.py:903] (0/4) Epoch 14, batch 800, loss[loss=0.2775, simple_loss=0.349, pruned_loss=0.103, over 19330.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2994, pruned_loss=0.07417, over 3770767.65 frames. ], batch size: 70, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:49:16,468 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 01:49:24,433 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.343e+02 6.523e+02 8.164e+02 1.688e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 01:49:50,617 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5913, 1.6880, 1.8252, 2.0130, 1.3669, 1.8520, 1.9285, 1.6917], + device='cuda:0'), covar=tensor([0.3580, 0.2980, 0.1614, 0.1848, 0.3286, 0.1628, 0.3953, 0.2815], + device='cuda:0'), in_proj_covar=tensor([0.0813, 0.0851, 0.0662, 0.0896, 0.0798, 0.0730, 0.0803, 0.0724], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 01:50:01,793 INFO [train.py:903] (0/4) Epoch 14, batch 850, loss[loss=0.2494, simple_loss=0.3261, pruned_loss=0.08638, over 18847.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3015, pruned_loss=0.07537, over 3777591.70 frames. ], batch size: 74, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:50:42,876 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:50:44,471 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 01:50:52,347 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1651, 1.2613, 1.6715, 1.0317, 2.5632, 3.3268, 3.1056, 3.5606], + device='cuda:0'), covar=tensor([0.1598, 0.3550, 0.3175, 0.2262, 0.0502, 0.0191, 0.0226, 0.0244], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0302, 0.0333, 0.0253, 0.0221, 0.0166, 0.0206, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 01:50:55,564 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 01:51:03,553 INFO [train.py:903] (0/4) Epoch 14, batch 900, loss[loss=0.2087, simple_loss=0.2802, pruned_loss=0.06859, over 19482.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3006, pruned_loss=0.07481, over 3795246.23 frames. ], batch size: 49, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:51:29,667 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 4.983e+02 6.131e+02 7.467e+02 1.791e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 01:51:46,402 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:52:07,440 INFO [train.py:903] (0/4) Epoch 14, batch 950, loss[loss=0.2902, simple_loss=0.3646, pruned_loss=0.1079, over 19608.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3006, pruned_loss=0.0745, over 3812977.18 frames. ], batch size: 57, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:52:07,503 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 01:52:33,950 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:52:47,338 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-02 01:53:06,405 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:10,804 INFO [train.py:903] (0/4) Epoch 14, batch 1000, loss[loss=0.2385, simple_loss=0.3175, pruned_loss=0.07974, over 19337.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3022, pruned_loss=0.07546, over 3799486.05 frames. ], batch size: 66, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:53:19,417 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6601, 1.5451, 1.5147, 2.0744, 1.6992, 1.9301, 2.0819, 1.7873], + device='cuda:0'), covar=tensor([0.0728, 0.0836, 0.0962, 0.0744, 0.0831, 0.0687, 0.0746, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0222, 0.0223, 0.0242, 0.0229, 0.0209, 0.0192, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 01:53:34,646 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.523e+02 5.300e+02 6.720e+02 8.420e+02 1.635e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-02 01:53:36,373 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:38,545 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:59,141 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89801.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:54:02,274 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 01:54:13,787 INFO [train.py:903] (0/4) Epoch 14, batch 1050, loss[loss=0.2614, simple_loss=0.3269, pruned_loss=0.09795, over 17573.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3026, pruned_loss=0.07575, over 3801561.66 frames. ], batch size: 101, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:54:21,209 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6783, 3.3454, 2.5037, 3.0358, 1.4623, 3.2034, 3.1465, 3.2540], + device='cuda:0'), covar=tensor([0.0908, 0.1069, 0.2012, 0.0852, 0.2986, 0.0872, 0.0912, 0.1257], + device='cuda:0'), in_proj_covar=tensor([0.0446, 0.0372, 0.0447, 0.0325, 0.0390, 0.0381, 0.0373, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 01:54:46,491 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 01:55:02,247 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:16,496 INFO [train.py:903] (0/4) Epoch 14, batch 1100, loss[loss=0.2184, simple_loss=0.2802, pruned_loss=0.07829, over 19028.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3014, pruned_loss=0.07525, over 3806638.55 frames. ], batch size: 42, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:55:24,734 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,015 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89883.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,842 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 5.450e+02 6.738e+02 9.006e+02 2.173e+03, threshold=1.348e+03, percent-clipped=6.0 +2023-04-02 01:56:05,591 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:05,661 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:19,138 INFO [train.py:903] (0/4) Epoch 14, batch 1150, loss[loss=0.2008, simple_loss=0.282, pruned_loss=0.05976, over 19461.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3018, pruned_loss=0.07555, over 3813092.95 frames. ], batch size: 49, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:56:22,861 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89916.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:56:39,102 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:57:25,203 INFO [train.py:903] (0/4) Epoch 14, batch 1200, loss[loss=0.2267, simple_loss=0.3035, pruned_loss=0.07495, over 19747.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3028, pruned_loss=0.07614, over 3801976.56 frames. ], batch size: 51, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:57:49,396 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.889e+02 5.798e+02 7.146e+02 1.027e+03, threshold=1.160e+03, percent-clipped=0.0 +2023-04-02 01:57:51,958 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:57:55,100 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 01:58:10,200 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-90000.pt +2023-04-02 01:58:28,857 INFO [train.py:903] (0/4) Epoch 14, batch 1250, loss[loss=0.1912, simple_loss=0.2757, pruned_loss=0.05331, over 19768.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3027, pruned_loss=0.07603, over 3810441.57 frames. ], batch size: 54, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:04,891 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:59:31,268 INFO [train.py:903] (0/4) Epoch 14, batch 1300, loss[loss=0.2363, simple_loss=0.3125, pruned_loss=0.08008, over 19656.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3024, pruned_loss=0.07613, over 3809877.77 frames. ], batch size: 58, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:36,504 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7719, 1.7479, 1.5446, 1.3163, 1.3465, 1.4327, 0.1913, 0.6475], + device='cuda:0'), covar=tensor([0.0499, 0.0491, 0.0309, 0.0486, 0.1000, 0.0569, 0.0909, 0.0818], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0337, 0.0335, 0.0364, 0.0438, 0.0361, 0.0319, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 01:59:57,908 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 5.410e+02 6.769e+02 7.813e+02 2.183e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-02 02:00:02,793 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0763, 1.7652, 1.3553, 1.0965, 1.6152, 1.0058, 1.1714, 1.6383], + device='cuda:0'), covar=tensor([0.0716, 0.0676, 0.0976, 0.0679, 0.0417, 0.1162, 0.0567, 0.0374], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0307, 0.0327, 0.0250, 0.0238, 0.0326, 0.0295, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:00:25,735 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:00:33,598 INFO [train.py:903] (0/4) Epoch 14, batch 1350, loss[loss=0.2246, simple_loss=0.3056, pruned_loss=0.07178, over 19514.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3021, pruned_loss=0.07584, over 3813974.03 frames. ], batch size: 56, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:00:58,138 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:16,473 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:24,049 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 02:01:28,058 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:29,272 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:35,841 INFO [train.py:903] (0/4) Epoch 14, batch 1400, loss[loss=0.187, simple_loss=0.2609, pruned_loss=0.05649, over 19383.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3023, pruned_loss=0.07561, over 3817195.20 frames. ], batch size: 47, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:01:46,540 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90172.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:01:59,331 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:02:00,110 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.450e+02 7.234e+02 1.006e+03 2.886e+03, threshold=1.447e+03, percent-clipped=11.0 +2023-04-02 02:02:16,740 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90197.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:02:34,949 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 02:02:38,346 INFO [train.py:903] (0/4) Epoch 14, batch 1450, loss[loss=0.242, simple_loss=0.3054, pruned_loss=0.08927, over 19747.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3006, pruned_loss=0.07485, over 3814818.41 frames. ], batch size: 51, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:02:53,333 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90227.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:12,031 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:22,173 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:38,190 INFO [train.py:903] (0/4) Epoch 14, batch 1500, loss[loss=0.2038, simple_loss=0.2721, pruned_loss=0.06775, over 19754.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2993, pruned_loss=0.07446, over 3830032.41 frames. ], batch size: 47, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:03:42,138 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:04:03,122 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.072e+02 6.110e+02 7.921e+02 2.000e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 02:04:09,139 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2515, 3.7439, 3.8678, 3.8572, 1.5821, 3.6171, 3.1516, 3.5910], + device='cuda:0'), covar=tensor([0.1405, 0.0932, 0.0582, 0.0672, 0.4868, 0.0818, 0.0648, 0.1052], + device='cuda:0'), in_proj_covar=tensor([0.0700, 0.0624, 0.0827, 0.0710, 0.0746, 0.0576, 0.0499, 0.0764], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 02:04:23,047 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0701, 3.5935, 1.9827, 2.1810, 3.2088, 1.7839, 1.2730, 2.0680], + device='cuda:0'), covar=tensor([0.1274, 0.0433, 0.1008, 0.0782, 0.0469, 0.1154, 0.1046, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0308, 0.0327, 0.0251, 0.0238, 0.0327, 0.0298, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:04:39,097 INFO [train.py:903] (0/4) Epoch 14, batch 1550, loss[loss=0.237, simple_loss=0.3172, pruned_loss=0.0784, over 19478.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3002, pruned_loss=0.07455, over 3845936.63 frames. ], batch size: 64, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:16,362 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:05:42,760 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9058, 2.5861, 1.7429, 1.8197, 2.3749, 1.7384, 1.5032, 1.9958], + device='cuda:0'), covar=tensor([0.0917, 0.0580, 0.0755, 0.0647, 0.0432, 0.0863, 0.0732, 0.0543], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0308, 0.0327, 0.0250, 0.0239, 0.0327, 0.0297, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:05:44,735 INFO [train.py:903] (0/4) Epoch 14, batch 1600, loss[loss=0.2621, simple_loss=0.3322, pruned_loss=0.09605, over 19560.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3007, pruned_loss=0.07494, over 3819950.79 frames. ], batch size: 61, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:51,752 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:02,642 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 02:06:08,372 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.288e+02 6.387e+02 7.705e+02 1.564e+03, threshold=1.277e+03, percent-clipped=2.0 +2023-04-02 02:06:46,534 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:47,284 INFO [train.py:903] (0/4) Epoch 14, batch 1650, loss[loss=0.2005, simple_loss=0.2651, pruned_loss=0.06796, over 19743.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3001, pruned_loss=0.07475, over 3813896.82 frames. ], batch size: 46, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:14,984 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:07:47,914 INFO [train.py:903] (0/4) Epoch 14, batch 1700, loss[loss=0.1908, simple_loss=0.2702, pruned_loss=0.0557, over 19456.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3009, pruned_loss=0.07492, over 3826540.70 frames. ], batch size: 49, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:50,645 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3057, 2.3902, 2.4597, 3.1632, 2.2497, 2.9272, 2.6711, 2.4034], + device='cuda:0'), covar=tensor([0.3597, 0.3246, 0.1568, 0.2079, 0.3660, 0.1754, 0.3795, 0.2714], + device='cuda:0'), in_proj_covar=tensor([0.0817, 0.0852, 0.0665, 0.0893, 0.0797, 0.0737, 0.0800, 0.0729], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 02:08:03,304 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:13,896 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 5.354e+02 6.441e+02 8.114e+02 1.316e+03, threshold=1.288e+03, percent-clipped=1.0 +2023-04-02 02:08:19,178 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 02:08:23,204 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:25,331 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 02:08:49,500 INFO [train.py:903] (0/4) Epoch 14, batch 1750, loss[loss=0.2426, simple_loss=0.3246, pruned_loss=0.08028, over 19353.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3005, pruned_loss=0.07418, over 3842857.81 frames. ], batch size: 66, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:08:49,818 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:09:54,005 INFO [train.py:903] (0/4) Epoch 14, batch 1800, loss[loss=0.2687, simple_loss=0.3438, pruned_loss=0.09684, over 19480.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3002, pruned_loss=0.07385, over 3842069.97 frames. ], batch size: 64, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:10:17,889 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.96 vs. limit=5.0 +2023-04-02 02:10:18,178 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.749e+02 5.076e+02 6.157e+02 8.007e+02 1.318e+03, threshold=1.231e+03, percent-clipped=1.0 +2023-04-02 02:10:29,854 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:34,930 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:40,545 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-02 02:10:47,953 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:48,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 02:10:55,664 INFO [train.py:903] (0/4) Epoch 14, batch 1850, loss[loss=0.2694, simple_loss=0.3372, pruned_loss=0.1008, over 19715.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2997, pruned_loss=0.07406, over 3831678.68 frames. ], batch size: 63, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:11:06,609 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 02:11:07,313 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:11:24,438 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 02:11:30,931 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0103, 3.6155, 2.5646, 3.2585, 0.7879, 3.4912, 3.4070, 3.4908], + device='cuda:0'), covar=tensor([0.0723, 0.1070, 0.1830, 0.0892, 0.4062, 0.0816, 0.0921, 0.1071], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0374, 0.0451, 0.0326, 0.0391, 0.0384, 0.0379, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:11:38,451 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-02 02:11:58,527 INFO [train.py:903] (0/4) Epoch 14, batch 1900, loss[loss=0.1928, simple_loss=0.2777, pruned_loss=0.05394, over 19479.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2993, pruned_loss=0.07352, over 3830669.93 frames. ], batch size: 49, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:12:07,987 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5638, 4.0833, 4.2414, 4.2354, 1.4699, 3.9533, 3.4817, 3.9636], + device='cuda:0'), covar=tensor([0.1529, 0.0754, 0.0580, 0.0646, 0.5451, 0.0807, 0.0632, 0.1086], + device='cuda:0'), in_proj_covar=tensor([0.0713, 0.0635, 0.0844, 0.0730, 0.0758, 0.0590, 0.0508, 0.0778], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 02:12:12,350 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 02:12:14,023 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5662, 2.4030, 1.8004, 1.6535, 2.2167, 1.4272, 1.4259, 1.9812], + device='cuda:0'), covar=tensor([0.0918, 0.0651, 0.0897, 0.0671, 0.0453, 0.1142, 0.0680, 0.0435], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0309, 0.0329, 0.0252, 0.0241, 0.0328, 0.0297, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:12:15,968 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 02:12:18,349 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8903, 4.3292, 4.5949, 4.5928, 1.6845, 4.3075, 3.7942, 4.2683], + device='cuda:0'), covar=tensor([0.1476, 0.0814, 0.0513, 0.0580, 0.5280, 0.0712, 0.0596, 0.1040], + device='cuda:0'), in_proj_covar=tensor([0.0713, 0.0634, 0.0842, 0.0729, 0.0756, 0.0590, 0.0507, 0.0777], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 02:12:24,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.065e+02 6.349e+02 7.558e+02 1.663e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-02 02:12:29,237 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2090, 3.7922, 3.9036, 3.8738, 1.4369, 3.6750, 3.2590, 3.6029], + device='cuda:0'), covar=tensor([0.1612, 0.0768, 0.0642, 0.0693, 0.5423, 0.0837, 0.0655, 0.1204], + device='cuda:0'), in_proj_covar=tensor([0.0713, 0.0633, 0.0842, 0.0729, 0.0756, 0.0590, 0.0507, 0.0778], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 02:12:43,823 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 02:12:48,705 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6183, 1.3868, 1.4742, 2.0485, 1.5943, 1.8752, 2.0397, 1.7362], + device='cuda:0'), covar=tensor([0.0866, 0.1010, 0.1070, 0.0870, 0.0886, 0.0773, 0.0838, 0.0724], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0222, 0.0223, 0.0243, 0.0230, 0.0209, 0.0192, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 02:12:54,521 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:12:59,211 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:13:00,230 INFO [train.py:903] (0/4) Epoch 14, batch 1950, loss[loss=0.2289, simple_loss=0.3067, pruned_loss=0.07554, over 19695.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3006, pruned_loss=0.07413, over 3833466.48 frames. ], batch size: 53, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:13:40,096 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2282, 1.3273, 1.2223, 1.0139, 1.0735, 1.0976, 0.0442, 0.3927], + device='cuda:0'), covar=tensor([0.0541, 0.0510, 0.0324, 0.0424, 0.1036, 0.0472, 0.0958, 0.0873], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0336, 0.0331, 0.0359, 0.0434, 0.0358, 0.0315, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 02:14:03,108 INFO [train.py:903] (0/4) Epoch 14, batch 2000, loss[loss=0.2678, simple_loss=0.3334, pruned_loss=0.1011, over 19527.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3011, pruned_loss=0.07448, over 3843564.75 frames. ], batch size: 56, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:14:22,438 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4311, 1.3093, 1.4728, 1.3926, 2.9869, 0.8947, 2.3099, 3.3464], + device='cuda:0'), covar=tensor([0.0458, 0.2644, 0.2637, 0.1782, 0.0712, 0.2546, 0.1065, 0.0279], + device='cuda:0'), in_proj_covar=tensor([0.0370, 0.0347, 0.0360, 0.0329, 0.0351, 0.0335, 0.0346, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:14:27,795 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.207e+02 5.999e+02 7.179e+02 1.269e+03, threshold=1.200e+03, percent-clipped=0.0 +2023-04-02 02:14:59,995 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 02:15:05,781 INFO [train.py:903] (0/4) Epoch 14, batch 2050, loss[loss=0.2161, simple_loss=0.2891, pruned_loss=0.07149, over 19470.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3001, pruned_loss=0.07379, over 3838793.39 frames. ], batch size: 49, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:15:14,225 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90821.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:18,967 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 02:15:20,147 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 02:15:23,959 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90828.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:42,039 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 02:16:01,413 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:07,355 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:08,093 INFO [train.py:903] (0/4) Epoch 14, batch 2100, loss[loss=0.2059, simple_loss=0.2855, pruned_loss=0.06317, over 19616.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3005, pruned_loss=0.07389, over 3836437.58 frames. ], batch size: 50, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:16:12,820 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:32,691 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.435e+02 5.132e+02 6.276e+02 7.348e+02 1.970e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 02:16:37,024 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 02:16:38,554 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:46,370 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0071, 1.7843, 1.7160, 2.1725, 1.9183, 1.7678, 1.8505, 2.0152], + device='cuda:0'), covar=tensor([0.0890, 0.1455, 0.1250, 0.0836, 0.1098, 0.0476, 0.1054, 0.0651], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0354, 0.0296, 0.0240, 0.0295, 0.0240, 0.0288, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:16:59,492 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 02:17:09,967 INFO [train.py:903] (0/4) Epoch 14, batch 2150, loss[loss=0.1861, simple_loss=0.2636, pruned_loss=0.05427, over 19402.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3001, pruned_loss=0.07398, over 3830356.73 frames. ], batch size: 48, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:17:38,816 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:17:53,046 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 02:18:10,987 INFO [train.py:903] (0/4) Epoch 14, batch 2200, loss[loss=0.1822, simple_loss=0.2554, pruned_loss=0.05445, over 19818.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3001, pruned_loss=0.07445, over 3832226.16 frames. ], batch size: 48, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:18:12,455 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:18,949 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3576, 1.4395, 2.0042, 1.7259, 3.2251, 4.8208, 4.7226, 5.1210], + device='cuda:0'), covar=tensor([0.1569, 0.3390, 0.2898, 0.1864, 0.0469, 0.0172, 0.0138, 0.0152], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0302, 0.0332, 0.0254, 0.0225, 0.0166, 0.0206, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 02:18:23,309 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:35,015 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.72 vs. limit=5.0 +2023-04-02 02:18:35,325 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 5.132e+02 5.971e+02 7.684e+02 1.888e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-02 02:18:43,488 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90990.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:19:13,355 INFO [train.py:903] (0/4) Epoch 14, batch 2250, loss[loss=0.2102, simple_loss=0.2796, pruned_loss=0.07043, over 19426.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2999, pruned_loss=0.07429, over 3820776.29 frames. ], batch size: 48, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:14,199 INFO [train.py:903] (0/4) Epoch 14, batch 2300, loss[loss=0.2386, simple_loss=0.3173, pruned_loss=0.07991, over 19674.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3006, pruned_loss=0.07519, over 3793024.99 frames. ], batch size: 55, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:26,833 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 02:20:38,186 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.417e+02 6.829e+02 8.730e+02 1.535e+03, threshold=1.366e+03, percent-clipped=12.0 +2023-04-02 02:20:38,633 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:20:47,630 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9101, 1.7886, 1.7519, 2.0441, 1.9398, 1.8036, 1.7122, 2.0066], + device='cuda:0'), covar=tensor([0.0964, 0.1468, 0.1288, 0.0945, 0.1123, 0.0477, 0.1154, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0350, 0.0294, 0.0239, 0.0293, 0.0239, 0.0285, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:21:11,049 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:21:16,451 INFO [train.py:903] (0/4) Epoch 14, batch 2350, loss[loss=0.2181, simple_loss=0.2823, pruned_loss=0.07697, over 19379.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3003, pruned_loss=0.07462, over 3808145.49 frames. ], batch size: 47, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:21:58,620 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 02:22:13,121 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 02:22:14,983 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.84 vs. limit=5.0 +2023-04-02 02:22:17,784 INFO [train.py:903] (0/4) Epoch 14, batch 2400, loss[loss=0.1968, simple_loss=0.2776, pruned_loss=0.05802, over 19495.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.301, pruned_loss=0.07485, over 3811367.40 frames. ], batch size: 49, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:22:42,189 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.466e+02 6.599e+02 8.174e+02 1.804e+03, threshold=1.320e+03, percent-clipped=5.0 +2023-04-02 02:22:52,555 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9854, 4.3805, 4.6672, 4.6356, 1.6227, 4.3117, 3.7914, 4.3414], + device='cuda:0'), covar=tensor([0.1261, 0.0770, 0.0505, 0.0507, 0.5313, 0.0630, 0.0588, 0.1022], + device='cuda:0'), in_proj_covar=tensor([0.0708, 0.0636, 0.0839, 0.0724, 0.0756, 0.0586, 0.0504, 0.0777], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 02:22:52,716 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:15,504 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91211.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:19,601 INFO [train.py:903] (0/4) Epoch 14, batch 2450, loss[loss=0.2323, simple_loss=0.3126, pruned_loss=0.07599, over 19609.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3019, pruned_loss=0.07554, over 3808037.54 frames. ], batch size: 57, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:23:23,082 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:37,774 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91229.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:08,229 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:19,340 INFO [train.py:903] (0/4) Epoch 14, batch 2500, loss[loss=0.2072, simple_loss=0.2795, pruned_loss=0.06749, over 19380.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3014, pruned_loss=0.07544, over 3811320.53 frames. ], batch size: 48, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:24:32,865 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2949, 1.2763, 1.2457, 1.3807, 1.0779, 1.3441, 1.3987, 1.3575], + device='cuda:0'), covar=tensor([0.0875, 0.0908, 0.1050, 0.0640, 0.0790, 0.0798, 0.0783, 0.0731], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0223, 0.0224, 0.0241, 0.0228, 0.0209, 0.0191, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 02:24:35,619 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-02 02:24:39,580 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7292, 3.4084, 2.6578, 3.0472, 1.4089, 3.2000, 3.2156, 3.2798], + device='cuda:0'), covar=tensor([0.0980, 0.1247, 0.1852, 0.0961, 0.3252, 0.0976, 0.0940, 0.1252], + device='cuda:0'), in_proj_covar=tensor([0.0452, 0.0380, 0.0454, 0.0330, 0.0392, 0.0386, 0.0378, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:24:42,799 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.074e+02 5.867e+02 6.968e+02 8.618e+02 1.617e+03, threshold=1.394e+03, percent-clipped=2.0 +2023-04-02 02:24:45,406 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9380, 1.2221, 1.6607, 0.8170, 2.3003, 3.0233, 2.7196, 3.2157], + device='cuda:0'), covar=tensor([0.1683, 0.3626, 0.3134, 0.2466, 0.0549, 0.0193, 0.0246, 0.0236], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0301, 0.0330, 0.0253, 0.0223, 0.0166, 0.0206, 0.0217], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 02:24:58,179 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2323, 1.9476, 1.9183, 2.3908, 2.4360, 1.8970, 1.9449, 2.3468], + device='cuda:0'), covar=tensor([0.1112, 0.1934, 0.1787, 0.1115, 0.1373, 0.0899, 0.1599, 0.0859], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0352, 0.0294, 0.0240, 0.0294, 0.0240, 0.0288, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:25:06,895 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91303.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:25:19,881 INFO [train.py:903] (0/4) Epoch 14, batch 2550, loss[loss=0.208, simple_loss=0.2908, pruned_loss=0.06256, over 19527.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3011, pruned_loss=0.07492, over 3805006.78 frames. ], batch size: 54, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:25:33,187 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:26:12,380 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 02:26:19,133 INFO [train.py:903] (0/4) Epoch 14, batch 2600, loss[loss=0.1831, simple_loss=0.2582, pruned_loss=0.05394, over 18641.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3003, pruned_loss=0.07454, over 3816631.25 frames. ], batch size: 41, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:26:44,916 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.557e+02 7.203e+02 9.059e+02 1.995e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-02 02:27:21,520 INFO [train.py:903] (0/4) Epoch 14, batch 2650, loss[loss=0.246, simple_loss=0.3147, pruned_loss=0.08863, over 19695.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3013, pruned_loss=0.07524, over 3819512.51 frames. ], batch size: 58, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:27:41,950 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 02:28:11,767 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5925, 1.2725, 1.4510, 1.3036, 2.2209, 0.9887, 2.0156, 2.4284], + device='cuda:0'), covar=tensor([0.0636, 0.2385, 0.2552, 0.1463, 0.0820, 0.1953, 0.0974, 0.0447], + device='cuda:0'), in_proj_covar=tensor([0.0371, 0.0346, 0.0363, 0.0327, 0.0355, 0.0338, 0.0347, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:28:18,977 INFO [train.py:903] (0/4) Epoch 14, batch 2700, loss[loss=0.2281, simple_loss=0.3123, pruned_loss=0.07193, over 19589.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3018, pruned_loss=0.07562, over 3831164.68 frames. ], batch size: 61, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:28:43,685 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.145e+02 6.524e+02 8.606e+02 2.089e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 02:29:13,059 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.32 vs. limit=5.0 +2023-04-02 02:29:20,022 INFO [train.py:903] (0/4) Epoch 14, batch 2750, loss[loss=0.1859, simple_loss=0.2617, pruned_loss=0.05503, over 19419.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3006, pruned_loss=0.07507, over 3815159.89 frames. ], batch size: 48, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:29:44,615 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 02:30:14,297 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91560.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:30:18,370 INFO [train.py:903] (0/4) Epoch 14, batch 2800, loss[loss=0.2347, simple_loss=0.311, pruned_loss=0.07917, over 17237.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3015, pruned_loss=0.07547, over 3804989.50 frames. ], batch size: 101, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:30:41,195 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:30:44,142 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 4.991e+02 6.176e+02 8.428e+02 2.269e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 02:31:09,979 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:31:19,256 INFO [train.py:903] (0/4) Epoch 14, batch 2850, loss[loss=0.2415, simple_loss=0.3225, pruned_loss=0.08023, over 17544.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3008, pruned_loss=0.0752, over 3783197.48 frames. ], batch size: 101, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:31:40,067 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.13 vs. limit=5.0 +2023-04-02 02:31:49,776 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 02:31:58,526 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91647.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:32:20,012 INFO [train.py:903] (0/4) Epoch 14, batch 2900, loss[loss=0.2482, simple_loss=0.3289, pruned_loss=0.08381, over 19496.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3004, pruned_loss=0.07505, over 3790924.00 frames. ], batch size: 64, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:32:20,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 02:32:44,081 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.659e+02 5.126e+02 6.310e+02 7.919e+02 2.445e+03, threshold=1.262e+03, percent-clipped=4.0 +2023-04-02 02:33:05,190 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5032, 1.6466, 2.1109, 1.7626, 3.3468, 2.8503, 3.7104, 1.6954], + device='cuda:0'), covar=tensor([0.2228, 0.3669, 0.2376, 0.1676, 0.1237, 0.1685, 0.1263, 0.3405], + device='cuda:0'), in_proj_covar=tensor([0.0500, 0.0587, 0.0633, 0.0446, 0.0597, 0.0500, 0.0644, 0.0504], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 02:33:19,572 INFO [train.py:903] (0/4) Epoch 14, batch 2950, loss[loss=0.2406, simple_loss=0.3171, pruned_loss=0.08204, over 19769.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3001, pruned_loss=0.07469, over 3797081.32 frames. ], batch size: 56, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:06,808 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.30 vs. limit=5.0 +2023-04-02 02:34:17,831 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91762.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:34:19,623 INFO [train.py:903] (0/4) Epoch 14, batch 3000, loss[loss=0.2181, simple_loss=0.2988, pruned_loss=0.0687, over 19668.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3006, pruned_loss=0.0749, over 3801705.93 frames. ], batch size: 58, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:19,623 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 02:34:36,638 INFO [train.py:937] (0/4) Epoch 14, validation: loss=0.1742, simple_loss=0.2751, pruned_loss=0.03671, over 944034.00 frames. +2023-04-02 02:34:36,639 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 02:34:42,026 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 02:35:02,710 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.261e+02 6.370e+02 8.625e+02 1.479e+03, threshold=1.274e+03, percent-clipped=4.0 +2023-04-02 02:35:37,796 INFO [train.py:903] (0/4) Epoch 14, batch 3050, loss[loss=0.1869, simple_loss=0.2649, pruned_loss=0.05446, over 19360.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3007, pruned_loss=0.07487, over 3807684.76 frames. ], batch size: 47, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:36:37,008 INFO [train.py:903] (0/4) Epoch 14, batch 3100, loss[loss=0.2578, simple_loss=0.3252, pruned_loss=0.09519, over 17310.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3004, pruned_loss=0.07463, over 3819358.64 frames. ], batch size: 101, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:37:02,384 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.361e+02 6.622e+02 8.860e+02 2.580e+03, threshold=1.324e+03, percent-clipped=11.0 +2023-04-02 02:37:05,897 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2849, 3.8154, 2.2166, 2.4567, 3.4355, 2.0793, 1.6662, 2.2211], + device='cuda:0'), covar=tensor([0.1109, 0.0464, 0.0905, 0.0659, 0.0396, 0.1011, 0.0861, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0306, 0.0325, 0.0249, 0.0238, 0.0325, 0.0293, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:37:24,385 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91904.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:37:31,769 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:37:37,887 INFO [train.py:903] (0/4) Epoch 14, batch 3150, loss[loss=0.2057, simple_loss=0.2757, pruned_loss=0.06786, over 18261.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3001, pruned_loss=0.07496, over 3819096.43 frames. ], batch size: 40, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:04,271 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 02:38:10,518 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3346, 1.3698, 1.7389, 1.5140, 2.5451, 2.2213, 2.7339, 0.9971], + device='cuda:0'), covar=tensor([0.2442, 0.4179, 0.2627, 0.1916, 0.1622, 0.2098, 0.1535, 0.4283], + device='cuda:0'), in_proj_covar=tensor([0.0505, 0.0591, 0.0637, 0.0451, 0.0602, 0.0504, 0.0651, 0.0507], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 02:38:37,363 INFO [train.py:903] (0/4) Epoch 14, batch 3200, loss[loss=0.2423, simple_loss=0.3228, pruned_loss=0.08087, over 19409.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3007, pruned_loss=0.07493, over 3814853.59 frames. ], batch size: 70, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:48,663 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:02,848 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.150e+02 6.206e+02 7.874e+02 1.849e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 02:39:20,951 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91999.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:39:21,911 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-92000.pt +2023-04-02 02:39:39,710 INFO [train.py:903] (0/4) Epoch 14, batch 3250, loss[loss=0.2195, simple_loss=0.3049, pruned_loss=0.06707, over 19606.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3008, pruned_loss=0.07495, over 3812260.34 frames. ], batch size: 57, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:39:44,780 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:45,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92019.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:40:15,651 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:40:39,291 INFO [train.py:903] (0/4) Epoch 14, batch 3300, loss[loss=0.2612, simple_loss=0.3344, pruned_loss=0.094, over 19566.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3009, pruned_loss=0.07535, over 3804948.43 frames. ], batch size: 61, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:40:44,821 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 02:40:52,239 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 02:41:05,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.974e+02 6.176e+02 7.406e+02 2.018e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-02 02:41:41,642 INFO [train.py:903] (0/4) Epoch 14, batch 3350, loss[loss=0.2371, simple_loss=0.314, pruned_loss=0.08006, over 18647.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3004, pruned_loss=0.07478, over 3819512.20 frames. ], batch size: 74, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:42:40,612 INFO [train.py:903] (0/4) Epoch 14, batch 3400, loss[loss=0.1869, simple_loss=0.2658, pruned_loss=0.05397, over 19780.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3008, pruned_loss=0.0747, over 3821029.61 frames. ], batch size: 48, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:42:46,824 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0982, 5.1499, 5.9691, 5.9241, 1.9885, 5.5289, 4.7515, 5.5258], + device='cuda:0'), covar=tensor([0.1509, 0.0847, 0.0458, 0.0546, 0.5463, 0.0567, 0.0597, 0.1118], + device='cuda:0'), in_proj_covar=tensor([0.0717, 0.0647, 0.0857, 0.0738, 0.0763, 0.0594, 0.0517, 0.0782], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 02:43:05,849 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.934e+02 6.017e+02 7.496e+02 1.650e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-02 02:43:42,226 INFO [train.py:903] (0/4) Epoch 14, batch 3450, loss[loss=0.2364, simple_loss=0.3136, pruned_loss=0.0796, over 19610.00 frames. ], tot_loss[loss=0.225, simple_loss=0.301, pruned_loss=0.07452, over 3822893.12 frames. ], batch size: 57, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:43:44,674 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 02:44:14,161 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 02:44:28,719 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:44:40,673 INFO [train.py:903] (0/4) Epoch 14, batch 3500, loss[loss=0.1941, simple_loss=0.2876, pruned_loss=0.05028, over 19610.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3005, pruned_loss=0.07445, over 3823831.01 frames. ], batch size: 57, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:44:42,973 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4011, 2.1490, 1.5861, 1.4183, 2.0160, 1.1941, 1.2226, 1.8512], + device='cuda:0'), covar=tensor([0.0977, 0.0709, 0.0947, 0.0779, 0.0483, 0.1164, 0.0758, 0.0444], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0311, 0.0331, 0.0254, 0.0241, 0.0331, 0.0299, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:44:43,936 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8617, 4.5024, 3.2320, 3.9641, 2.0432, 4.1977, 4.2813, 4.3396], + device='cuda:0'), covar=tensor([0.0553, 0.0976, 0.1728, 0.0754, 0.2813, 0.0676, 0.0785, 0.1045], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0377, 0.0451, 0.0327, 0.0390, 0.0389, 0.0376, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:44:45,988 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1695, 1.8488, 1.9404, 2.6897, 2.0700, 2.5187, 2.6748, 2.3536], + device='cuda:0'), covar=tensor([0.0686, 0.0832, 0.0891, 0.0819, 0.0817, 0.0622, 0.0747, 0.0544], + device='cuda:0'), in_proj_covar=tensor([0.0207, 0.0221, 0.0221, 0.0240, 0.0225, 0.0206, 0.0189, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 02:44:54,880 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92275.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:45:05,729 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.239e+02 6.377e+02 7.871e+02 1.606e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-02 02:45:24,223 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92300.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:45:34,190 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3993, 2.1945, 1.6625, 1.5792, 2.0018, 1.3010, 1.3011, 1.8546], + device='cuda:0'), covar=tensor([0.1034, 0.0690, 0.1000, 0.0736, 0.0529, 0.1168, 0.0677, 0.0439], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0312, 0.0332, 0.0254, 0.0242, 0.0332, 0.0300, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:45:41,102 INFO [train.py:903] (0/4) Epoch 14, batch 3550, loss[loss=0.2021, simple_loss=0.2849, pruned_loss=0.05968, over 19465.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3012, pruned_loss=0.07515, over 3804004.91 frames. ], batch size: 64, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:45:44,515 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:46:15,218 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92343.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:46:39,901 INFO [train.py:903] (0/4) Epoch 14, batch 3600, loss[loss=0.2053, simple_loss=0.2797, pruned_loss=0.06546, over 19625.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2998, pruned_loss=0.07438, over 3814391.06 frames. ], batch size: 50, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:46:44,983 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92368.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:46:55,930 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9479, 3.5806, 2.2212, 3.2747, 0.8638, 3.4175, 3.3607, 3.5078], + device='cuda:0'), covar=tensor([0.0839, 0.1167, 0.2298, 0.0812, 0.3984, 0.0854, 0.0834, 0.1072], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0378, 0.0453, 0.0329, 0.0392, 0.0391, 0.0378, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:47:04,638 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.209e+02 6.321e+02 7.842e+02 1.520e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 02:47:40,755 INFO [train.py:903] (0/4) Epoch 14, batch 3650, loss[loss=0.204, simple_loss=0.2899, pruned_loss=0.05908, over 19788.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07436, over 3817444.75 frames. ], batch size: 56, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:48:02,178 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92432.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:48:34,072 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92458.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:48:40,400 INFO [train.py:903] (0/4) Epoch 14, batch 3700, loss[loss=0.2048, simple_loss=0.2914, pruned_loss=0.05912, over 19648.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3, pruned_loss=0.0739, over 3822252.85 frames. ], batch size: 58, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:48:51,192 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-02 02:49:05,824 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.888e+02 6.023e+02 8.004e+02 1.682e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 02:49:10,927 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1281, 1.9793, 2.0678, 2.4095, 2.0555, 2.2603, 2.2067, 2.1861], + device='cuda:0'), covar=tensor([0.0615, 0.0685, 0.0724, 0.0732, 0.0756, 0.0580, 0.0842, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0222, 0.0222, 0.0242, 0.0226, 0.0207, 0.0191, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 02:49:11,217 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.10 vs. limit=2.0 +2023-04-02 02:49:13,132 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9183, 1.6471, 1.8730, 1.8826, 4.4259, 1.0523, 2.4862, 4.8821], + device='cuda:0'), covar=tensor([0.0360, 0.2576, 0.2585, 0.1692, 0.0732, 0.2666, 0.1370, 0.0149], + device='cuda:0'), in_proj_covar=tensor([0.0370, 0.0345, 0.0362, 0.0326, 0.0356, 0.0336, 0.0345, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:49:41,647 INFO [train.py:903] (0/4) Epoch 14, batch 3750, loss[loss=0.2154, simple_loss=0.294, pruned_loss=0.06837, over 19677.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2989, pruned_loss=0.07358, over 3833791.51 frames. ], batch size: 58, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:50:42,055 INFO [train.py:903] (0/4) Epoch 14, batch 3800, loss[loss=0.2336, simple_loss=0.3088, pruned_loss=0.0792, over 19730.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2997, pruned_loss=0.07384, over 3826590.63 frames. ], batch size: 63, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:06,368 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.992e+02 6.384e+02 8.353e+02 1.667e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 02:51:10,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 02:51:40,635 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 02:51:42,072 INFO [train.py:903] (0/4) Epoch 14, batch 3850, loss[loss=0.2367, simple_loss=0.3138, pruned_loss=0.0798, over 19663.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2991, pruned_loss=0.07339, over 3823295.85 frames. ], batch size: 58, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:54,735 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92624.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:19,012 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:26,123 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:43,617 INFO [train.py:903] (0/4) Epoch 14, batch 3900, loss[loss=0.2345, simple_loss=0.3145, pruned_loss=0.07727, over 19649.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2992, pruned_loss=0.0731, over 3829479.79 frames. ], batch size: 58, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:10,396 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.910e+02 5.705e+02 7.277e+02 9.203e+02 2.913e+03, threshold=1.455e+03, percent-clipped=9.0 +2023-04-02 02:53:13,153 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92688.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:43,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:44,354 INFO [train.py:903] (0/4) Epoch 14, batch 3950, loss[loss=0.2466, simple_loss=0.3147, pruned_loss=0.08928, over 19790.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.299, pruned_loss=0.07327, over 3829514.84 frames. ], batch size: 56, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:44,792 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92714.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:53:48,548 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 02:54:12,965 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:54:14,267 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92739.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:54:45,458 INFO [train.py:903] (0/4) Epoch 14, batch 4000, loss[loss=0.2387, simple_loss=0.3188, pruned_loss=0.07934, over 18088.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2994, pruned_loss=0.07324, over 3827917.33 frames. ], batch size: 83, lr: 5.91e-03, grad_scale: 8.0 +2023-04-02 02:55:11,213 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.983e+02 6.436e+02 8.255e+02 1.908e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 02:55:33,053 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 02:55:45,554 INFO [train.py:903] (0/4) Epoch 14, batch 4050, loss[loss=0.1939, simple_loss=0.2895, pruned_loss=0.04915, over 19660.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2993, pruned_loss=0.07335, over 3837556.21 frames. ], batch size: 58, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:55:52,824 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7897, 1.5352, 1.5525, 1.7725, 1.6082, 1.5542, 1.4798, 1.7514], + device='cuda:0'), covar=tensor([0.0780, 0.1226, 0.1069, 0.0846, 0.1057, 0.0465, 0.1102, 0.0570], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0349, 0.0294, 0.0241, 0.0297, 0.0243, 0.0287, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 02:56:28,992 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:56:45,589 INFO [train.py:903] (0/4) Epoch 14, batch 4100, loss[loss=0.2168, simple_loss=0.2985, pruned_loss=0.06759, over 19443.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2994, pruned_loss=0.07347, over 3833888.44 frames. ], batch size: 64, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:12,584 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 02:57:13,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 5.414e+02 6.604e+02 8.302e+02 1.654e+03, threshold=1.321e+03, percent-clipped=7.0 +2023-04-02 02:57:21,830 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 02:57:45,523 INFO [train.py:903] (0/4) Epoch 14, batch 4150, loss[loss=0.2098, simple_loss=0.2787, pruned_loss=0.07043, over 19618.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2991, pruned_loss=0.0735, over 3815125.98 frames. ], batch size: 50, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:50,222 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:20,028 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3002, 1.3262, 1.5630, 1.5025, 2.2493, 2.0812, 2.2907, 0.7689], + device='cuda:0'), covar=tensor([0.2259, 0.4011, 0.2407, 0.1803, 0.1432, 0.1999, 0.1328, 0.4128], + device='cuda:0'), in_proj_covar=tensor([0.0499, 0.0588, 0.0633, 0.0446, 0.0599, 0.0500, 0.0645, 0.0503], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 02:58:28,832 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92949.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:48,455 INFO [train.py:903] (0/4) Epoch 14, batch 4200, loss[loss=0.2021, simple_loss=0.2818, pruned_loss=0.06121, over 19782.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2987, pruned_loss=0.07313, over 3820913.07 frames. ], batch size: 49, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:58:51,720 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 02:59:15,443 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.424e+02 5.036e+02 6.286e+02 7.807e+02 1.684e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-02 02:59:15,610 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:22,676 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:48,128 INFO [train.py:903] (0/4) Epoch 14, batch 4250, loss[loss=0.2149, simple_loss=0.2941, pruned_loss=0.06786, over 19083.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3007, pruned_loss=0.07388, over 3832774.29 frames. ], batch size: 69, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 03:00:03,996 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 03:00:05,139 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:15,041 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 03:00:37,776 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:47,386 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-02 03:00:48,957 INFO [train.py:903] (0/4) Epoch 14, batch 4300, loss[loss=0.2402, simple_loss=0.3186, pruned_loss=0.08091, over 19554.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3008, pruned_loss=0.07419, over 3827365.77 frames. ], batch size: 56, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:01:12,678 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:18,339 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.279e+02 5.152e+02 6.308e+02 8.497e+02 2.668e+03, threshold=1.262e+03, percent-clipped=7.0 +2023-04-02 03:01:18,720 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7758, 1.4482, 1.3429, 1.6664, 1.5121, 1.3771, 1.2256, 1.6203], + device='cuda:0'), covar=tensor([0.1021, 0.1331, 0.1559, 0.1043, 0.1199, 0.0740, 0.1541, 0.0809], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0349, 0.0294, 0.0241, 0.0295, 0.0244, 0.0287, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:01:36,136 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:42,511 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 03:01:50,403 INFO [train.py:903] (0/4) Epoch 14, batch 4350, loss[loss=0.1832, simple_loss=0.2669, pruned_loss=0.04974, over 19589.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2996, pruned_loss=0.07347, over 3828717.84 frames. ], batch size: 52, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:01:53,885 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4784, 2.1786, 1.5953, 1.5571, 2.0570, 1.3256, 1.2588, 1.8057], + device='cuda:0'), covar=tensor([0.0966, 0.0705, 0.1005, 0.0693, 0.0510, 0.1145, 0.0728, 0.0486], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0306, 0.0326, 0.0249, 0.0239, 0.0327, 0.0292, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:02:14,533 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:02:52,476 INFO [train.py:903] (0/4) Epoch 14, batch 4400, loss[loss=0.2205, simple_loss=0.2794, pruned_loss=0.0808, over 19763.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2987, pruned_loss=0.07308, over 3833600.68 frames. ], batch size: 47, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:03:15,285 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 03:03:18,717 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.134e+02 6.154e+02 7.916e+02 2.681e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 03:03:25,257 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 03:03:26,649 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:31,844 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93197.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:52,930 INFO [train.py:903] (0/4) Epoch 14, batch 4450, loss[loss=0.2035, simple_loss=0.2772, pruned_loss=0.06485, over 19748.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2981, pruned_loss=0.07274, over 3841019.88 frames. ], batch size: 51, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:03:54,412 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0121, 1.6759, 1.5882, 2.0745, 1.7623, 1.6788, 1.5935, 1.8666], + device='cuda:0'), covar=tensor([0.0950, 0.1496, 0.1435, 0.1039, 0.1324, 0.0535, 0.1280, 0.0739], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0349, 0.0296, 0.0242, 0.0297, 0.0244, 0.0288, 0.0243], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:04:49,431 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93261.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:04:52,743 INFO [train.py:903] (0/4) Epoch 14, batch 4500, loss[loss=0.2444, simple_loss=0.319, pruned_loss=0.08494, over 19278.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2988, pruned_loss=0.07335, over 3827187.48 frames. ], batch size: 66, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:05:21,647 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 5.314e+02 6.344e+02 7.896e+02 1.749e+03, threshold=1.269e+03, percent-clipped=5.0 +2023-04-02 03:05:28,481 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:45,652 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93308.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:52,290 INFO [train.py:903] (0/4) Epoch 14, batch 4550, loss[loss=0.2023, simple_loss=0.2725, pruned_loss=0.06605, over 18643.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2985, pruned_loss=0.07289, over 3838576.08 frames. ], batch size: 41, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:06:06,152 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 03:06:21,893 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:27,652 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 03:06:46,899 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:55,968 INFO [train.py:903] (0/4) Epoch 14, batch 4600, loss[loss=0.2057, simple_loss=0.2788, pruned_loss=0.06632, over 19386.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2977, pruned_loss=0.07288, over 3837436.37 frames. ], batch size: 48, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:07:02,072 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9713, 1.8827, 1.6025, 1.5117, 1.3413, 1.4936, 0.3374, 0.8891], + device='cuda:0'), covar=tensor([0.0493, 0.0542, 0.0416, 0.0606, 0.1130, 0.0764, 0.1029, 0.0870], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0339, 0.0333, 0.0363, 0.0436, 0.0363, 0.0316, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:07:05,014 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:09,509 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:13,249 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3396, 1.3276, 1.5445, 1.5384, 2.2406, 2.0740, 2.3000, 0.7916], + device='cuda:0'), covar=tensor([0.2208, 0.3894, 0.2422, 0.1749, 0.1408, 0.1877, 0.1359, 0.3852], + device='cuda:0'), in_proj_covar=tensor([0.0498, 0.0587, 0.0633, 0.0448, 0.0599, 0.0499, 0.0643, 0.0504], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:07:17,806 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:22,006 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.339e+02 6.388e+02 8.227e+02 2.509e+03, threshold=1.278e+03, percent-clipped=2.0 +2023-04-02 03:07:35,566 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:49,408 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:56,015 INFO [train.py:903] (0/4) Epoch 14, batch 4650, loss[loss=0.2142, simple_loss=0.2902, pruned_loss=0.06913, over 19596.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.07382, over 3823991.53 frames. ], batch size: 52, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:07:56,405 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9106, 1.8250, 1.6756, 2.1407, 1.9563, 1.7212, 1.7380, 1.8783], + device='cuda:0'), covar=tensor([0.1074, 0.1594, 0.1390, 0.0996, 0.1255, 0.0561, 0.1235, 0.0746], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0350, 0.0297, 0.0245, 0.0298, 0.0245, 0.0289, 0.0244], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:08:12,040 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 03:08:23,193 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 03:08:43,061 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:44,250 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93453.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:45,374 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9791, 1.9690, 1.8499, 1.7648, 1.6373, 1.8483, 0.9680, 1.3957], + device='cuda:0'), covar=tensor([0.0369, 0.0471, 0.0286, 0.0435, 0.0663, 0.0584, 0.0746, 0.0653], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0337, 0.0332, 0.0362, 0.0435, 0.0362, 0.0314, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:08:56,301 INFO [train.py:903] (0/4) Epoch 14, batch 4700, loss[loss=0.2336, simple_loss=0.3104, pruned_loss=0.07842, over 19603.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2989, pruned_loss=0.07358, over 3827753.27 frames. ], batch size: 57, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:09:11,849 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:13,361 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:20,329 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 03:09:25,717 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 5.199e+02 6.337e+02 7.857e+02 1.524e+03, threshold=1.267e+03, percent-clipped=2.0 +2023-04-02 03:09:26,087 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,133 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,932 INFO [train.py:903] (0/4) Epoch 14, batch 4750, loss[loss=0.2191, simple_loss=0.2985, pruned_loss=0.0699, over 19609.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2984, pruned_loss=0.07333, over 3822216.75 frames. ], batch size: 61, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:55,740 INFO [train.py:903] (0/4) Epoch 14, batch 4800, loss[loss=0.2647, simple_loss=0.338, pruned_loss=0.09567, over 19688.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3006, pruned_loss=0.07463, over 3827898.00 frames. ], batch size: 59, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:56,108 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:22,943 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.541e+02 6.642e+02 8.296e+02 2.320e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 03:11:23,356 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7076, 1.7244, 1.5489, 1.3862, 1.3480, 1.4525, 0.2050, 0.6565], + device='cuda:0'), covar=tensor([0.0483, 0.0491, 0.0316, 0.0515, 0.1027, 0.0563, 0.0955, 0.0875], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0338, 0.0334, 0.0363, 0.0438, 0.0363, 0.0314, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:11:25,690 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:29,129 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:57,049 INFO [train.py:903] (0/4) Epoch 14, batch 4850, loss[loss=0.2236, simple_loss=0.2984, pruned_loss=0.0744, over 19567.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3, pruned_loss=0.07424, over 3828397.20 frames. ], batch size: 52, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:17,832 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:23,247 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 03:12:42,785 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 03:12:47,279 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 03:12:48,586 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 03:12:48,922 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93657.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:56,532 INFO [train.py:903] (0/4) Epoch 14, batch 4900, loss[loss=0.2312, simple_loss=0.3093, pruned_loss=0.07652, over 19541.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3008, pruned_loss=0.07454, over 3813627.49 frames. ], batch size: 56, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:56,971 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:57,699 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 03:13:18,094 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 03:13:22,700 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.1991, 3.8707, 2.8734, 3.4428, 1.0734, 3.6443, 3.5788, 3.7558], + device='cuda:0'), covar=tensor([0.0811, 0.0993, 0.1638, 0.0806, 0.3791, 0.0869, 0.0924, 0.1244], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0377, 0.0451, 0.0326, 0.0389, 0.0387, 0.0377, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:13:25,555 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.842e+02 5.818e+02 7.123e+02 1.786e+03, threshold=1.164e+03, percent-clipped=2.0 +2023-04-02 03:13:28,157 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93689.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:49,653 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:55,992 INFO [train.py:903] (0/4) Epoch 14, batch 4950, loss[loss=0.2253, simple_loss=0.3019, pruned_loss=0.07435, over 19618.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2999, pruned_loss=0.07388, over 3813452.00 frames. ], batch size: 50, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:14:16,294 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 03:14:21,093 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:32,430 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:36,699 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 03:14:58,214 INFO [train.py:903] (0/4) Epoch 14, batch 5000, loss[loss=0.2595, simple_loss=0.3223, pruned_loss=0.09834, over 19605.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3, pruned_loss=0.07422, over 3805651.82 frames. ], batch size: 50, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:15:04,133 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93768.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:05,345 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:07,263 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 03:15:14,498 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1260, 1.8779, 1.6991, 2.1658, 2.0872, 1.7859, 1.7032, 2.0385], + device='cuda:0'), covar=tensor([0.0913, 0.1451, 0.1359, 0.0899, 0.1187, 0.0518, 0.1249, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0350, 0.0296, 0.0243, 0.0298, 0.0245, 0.0290, 0.0243], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:15:17,362 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 03:15:25,226 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 5.347e+02 6.962e+02 9.103e+02 2.417e+03, threshold=1.392e+03, percent-clipped=9.0 +2023-04-02 03:15:26,625 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93788.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:33,765 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:59,315 INFO [train.py:903] (0/4) Epoch 14, batch 5050, loss[loss=0.1929, simple_loss=0.2706, pruned_loss=0.05763, over 19748.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3002, pruned_loss=0.07456, over 3810399.32 frames. ], batch size: 47, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:16:35,021 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 03:16:40,853 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:16:59,044 INFO [train.py:903] (0/4) Epoch 14, batch 5100, loss[loss=0.2125, simple_loss=0.2873, pruned_loss=0.06888, over 19453.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2998, pruned_loss=0.07425, over 3814076.62 frames. ], batch size: 49, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:17:09,068 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:17:09,890 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 03:17:13,173 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 03:17:16,721 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 03:17:26,258 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 4.904e+02 5.934e+02 7.645e+02 1.361e+03, threshold=1.187e+03, percent-clipped=0.0 +2023-04-02 03:17:47,167 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0160, 5.4465, 2.8160, 4.7458, 1.2535, 5.5563, 5.4145, 5.5702], + device='cuda:0'), covar=tensor([0.0411, 0.0768, 0.1989, 0.0618, 0.3598, 0.0500, 0.0713, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0452, 0.0380, 0.0454, 0.0329, 0.0392, 0.0389, 0.0378, 0.0411], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:17:56,876 INFO [train.py:903] (0/4) Epoch 14, batch 5150, loss[loss=0.2057, simple_loss=0.2799, pruned_loss=0.06576, over 19733.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3009, pruned_loss=0.07487, over 3808979.78 frames. ], batch size: 51, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:18:09,225 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 03:18:43,202 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 03:18:58,008 INFO [train.py:903] (0/4) Epoch 14, batch 5200, loss[loss=0.258, simple_loss=0.3112, pruned_loss=0.1024, over 19759.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3, pruned_loss=0.07405, over 3816453.41 frames. ], batch size: 45, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:19:13,837 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 03:19:16,649 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2118, 1.2167, 1.3989, 1.3786, 1.6560, 1.7257, 1.7043, 0.4462], + device='cuda:0'), covar=tensor([0.2477, 0.4149, 0.2536, 0.1992, 0.1666, 0.2254, 0.1399, 0.4450], + device='cuda:0'), in_proj_covar=tensor([0.0501, 0.0587, 0.0634, 0.0448, 0.0600, 0.0500, 0.0645, 0.0505], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:19:25,471 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 5.268e+02 6.485e+02 8.631e+02 2.638e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 03:19:39,250 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:19:40,158 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-94000.pt +2023-04-02 03:19:57,306 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 03:19:59,391 INFO [train.py:903] (0/4) Epoch 14, batch 5250, loss[loss=0.2703, simple_loss=0.3497, pruned_loss=0.09539, over 19678.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2995, pruned_loss=0.07395, over 3824600.80 frames. ], batch size: 60, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:20:05,577 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6143, 1.6253, 1.7791, 1.8275, 4.1444, 1.1960, 2.4028, 4.4560], + device='cuda:0'), covar=tensor([0.0382, 0.2613, 0.2695, 0.1736, 0.0721, 0.2611, 0.1522, 0.0190], + device='cuda:0'), in_proj_covar=tensor([0.0372, 0.0345, 0.0364, 0.0324, 0.0351, 0.0334, 0.0343, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:20:43,656 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9043, 1.9915, 2.2342, 2.7117, 1.9654, 2.6185, 2.4057, 2.0139], + device='cuda:0'), covar=tensor([0.3784, 0.3300, 0.1570, 0.1979, 0.3682, 0.1653, 0.3845, 0.2833], + device='cuda:0'), in_proj_covar=tensor([0.0821, 0.0858, 0.0666, 0.0895, 0.0808, 0.0742, 0.0803, 0.0730], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 03:20:47,424 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 03:20:59,247 INFO [train.py:903] (0/4) Epoch 14, batch 5300, loss[loss=0.1938, simple_loss=0.2727, pruned_loss=0.05748, over 19787.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3001, pruned_loss=0.07471, over 3822247.05 frames. ], batch size: 48, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:21:16,464 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 03:21:27,961 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.389e+02 5.368e+02 7.020e+02 9.283e+02 2.840e+03, threshold=1.404e+03, percent-clipped=4.0 +2023-04-02 03:21:45,478 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 03:21:59,007 INFO [train.py:903] (0/4) Epoch 14, batch 5350, loss[loss=0.2276, simple_loss=0.3069, pruned_loss=0.07419, over 19670.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3005, pruned_loss=0.0748, over 3826860.45 frames. ], batch size: 58, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:22:23,137 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:22:34,755 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 03:23:01,402 INFO [train.py:903] (0/4) Epoch 14, batch 5400, loss[loss=0.2224, simple_loss=0.3058, pruned_loss=0.06954, over 19526.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3001, pruned_loss=0.07476, over 3810654.07 frames. ], batch size: 56, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:23:29,188 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.537e+02 7.248e+02 8.700e+02 2.021e+03, threshold=1.450e+03, percent-clipped=3.0 +2023-04-02 03:24:03,211 INFO [train.py:903] (0/4) Epoch 14, batch 5450, loss[loss=0.1897, simple_loss=0.2596, pruned_loss=0.05991, over 19721.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3005, pruned_loss=0.0748, over 3818189.89 frames. ], batch size: 45, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:24:33,951 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94241.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:24:43,529 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:25:02,862 INFO [train.py:903] (0/4) Epoch 14, batch 5500, loss[loss=0.2961, simple_loss=0.352, pruned_loss=0.1201, over 13533.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3003, pruned_loss=0.07436, over 3812937.77 frames. ], batch size: 136, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:25:24,845 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 03:25:30,870 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.805e+02 5.794e+02 7.462e+02 1.465e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 03:25:44,162 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4065, 2.2585, 1.7174, 1.5775, 2.1053, 1.4263, 1.3215, 1.9047], + device='cuda:0'), covar=tensor([0.0897, 0.0685, 0.0962, 0.0731, 0.0447, 0.1048, 0.0650, 0.0389], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0304, 0.0327, 0.0249, 0.0238, 0.0325, 0.0292, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:25:51,157 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-02 03:26:01,459 INFO [train.py:903] (0/4) Epoch 14, batch 5550, loss[loss=0.2114, simple_loss=0.2992, pruned_loss=0.06174, over 19666.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2998, pruned_loss=0.07448, over 3824280.53 frames. ], batch size: 59, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:26:08,356 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 03:26:37,722 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:26:57,886 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 03:27:01,434 INFO [train.py:903] (0/4) Epoch 14, batch 5600, loss[loss=0.1858, simple_loss=0.2678, pruned_loss=0.0519, over 19623.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.299, pruned_loss=0.07387, over 3826696.45 frames. ], batch size: 50, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:27:04,764 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:27:30,034 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.188e+02 6.005e+02 7.911e+02 1.925e+03, threshold=1.201e+03, percent-clipped=8.0 +2023-04-02 03:28:03,376 INFO [train.py:903] (0/4) Epoch 14, batch 5650, loss[loss=0.2549, simple_loss=0.3185, pruned_loss=0.09567, over 13608.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2987, pruned_loss=0.07363, over 3829031.40 frames. ], batch size: 136, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:28:40,691 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 03:28:48,908 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6008, 1.5545, 1.5852, 1.9244, 3.1856, 1.3791, 2.2052, 3.6474], + device='cuda:0'), covar=tensor([0.0496, 0.2397, 0.2536, 0.1396, 0.0669, 0.2098, 0.1282, 0.0224], + device='cuda:0'), in_proj_covar=tensor([0.0373, 0.0346, 0.0365, 0.0325, 0.0352, 0.0335, 0.0345, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:28:49,721 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 03:28:55,926 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:29:02,022 INFO [train.py:903] (0/4) Epoch 14, batch 5700, loss[loss=0.2675, simple_loss=0.3259, pruned_loss=0.1046, over 19606.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2999, pruned_loss=0.07453, over 3822812.24 frames. ], batch size: 50, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:29:10,686 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4361, 1.4711, 1.7937, 1.6732, 2.7808, 2.4045, 2.8530, 1.2043], + device='cuda:0'), covar=tensor([0.2302, 0.3923, 0.2470, 0.1798, 0.1410, 0.1866, 0.1458, 0.3931], + device='cuda:0'), in_proj_covar=tensor([0.0505, 0.0592, 0.0642, 0.0451, 0.0604, 0.0508, 0.0651, 0.0510], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 03:29:23,216 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 03:29:29,829 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 4.949e+02 6.008e+02 7.817e+02 2.884e+03, threshold=1.202e+03, percent-clipped=11.0 +2023-04-02 03:29:50,170 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94503.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:29:58,116 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8556, 1.4321, 1.5525, 1.7080, 3.3930, 1.0389, 2.2583, 3.7103], + device='cuda:0'), covar=tensor([0.0440, 0.2683, 0.2715, 0.1657, 0.0632, 0.2712, 0.1421, 0.0265], + device='cuda:0'), in_proj_covar=tensor([0.0375, 0.0347, 0.0367, 0.0326, 0.0354, 0.0337, 0.0346, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:30:02,384 INFO [train.py:903] (0/4) Epoch 14, batch 5750, loss[loss=0.2786, simple_loss=0.3424, pruned_loss=0.1074, over 19697.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2994, pruned_loss=0.07405, over 3821417.72 frames. ], batch size: 63, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:30:04,719 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 03:30:11,529 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 03:30:17,749 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 03:30:21,289 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:30:32,322 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6850, 1.4820, 1.3899, 2.1028, 1.6476, 2.0096, 2.0090, 1.7486], + device='cuda:0'), covar=tensor([0.0841, 0.0936, 0.1101, 0.0811, 0.0868, 0.0705, 0.0898, 0.0714], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0223, 0.0241, 0.0226, 0.0208, 0.0191, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 03:30:45,585 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-02 03:31:04,686 INFO [train.py:903] (0/4) Epoch 14, batch 5800, loss[loss=0.2222, simple_loss=0.2942, pruned_loss=0.07512, over 19734.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2994, pruned_loss=0.07389, over 3821075.68 frames. ], batch size: 51, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:31:30,513 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94585.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:31:32,533 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.430e+02 7.155e+02 9.192e+02 1.752e+03, threshold=1.431e+03, percent-clipped=10.0 +2023-04-02 03:32:06,961 INFO [train.py:903] (0/4) Epoch 14, batch 5850, loss[loss=0.218, simple_loss=0.2996, pruned_loss=0.06822, over 19671.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2985, pruned_loss=0.0734, over 3822767.47 frames. ], batch size: 58, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:33:06,741 INFO [train.py:903] (0/4) Epoch 14, batch 5900, loss[loss=0.2069, simple_loss=0.2762, pruned_loss=0.06875, over 19715.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2991, pruned_loss=0.07409, over 3823926.44 frames. ], batch size: 45, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:33:07,937 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 03:33:27,827 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 03:33:33,162 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.122e+02 5.971e+02 8.409e+02 2.018e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-02 03:33:50,045 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94700.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:34:01,307 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94710.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:05,815 INFO [train.py:903] (0/4) Epoch 14, batch 5950, loss[loss=0.2037, simple_loss=0.2785, pruned_loss=0.06447, over 19064.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2994, pruned_loss=0.07422, over 3814622.52 frames. ], batch size: 42, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:34:06,212 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94714.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:37,244 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94739.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:38,743 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.33 vs. limit=5.0 +2023-04-02 03:35:04,630 INFO [train.py:903] (0/4) Epoch 14, batch 6000, loss[loss=0.2457, simple_loss=0.3145, pruned_loss=0.08847, over 19539.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3, pruned_loss=0.07491, over 3817494.12 frames. ], batch size: 56, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:35:04,631 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 03:35:17,169 INFO [train.py:937] (0/4) Epoch 14, validation: loss=0.1744, simple_loss=0.2748, pruned_loss=0.03705, over 944034.00 frames. +2023-04-02 03:35:17,170 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 03:35:25,778 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8670, 1.7419, 1.5050, 1.9485, 1.7277, 1.6438, 1.4508, 1.8426], + device='cuda:0'), covar=tensor([0.0960, 0.1268, 0.1408, 0.0889, 0.1175, 0.0512, 0.1342, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0354, 0.0299, 0.0245, 0.0300, 0.0245, 0.0291, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:35:47,198 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.018e+02 6.191e+02 7.483e+02 1.325e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-02 03:36:17,852 INFO [train.py:903] (0/4) Epoch 14, batch 6050, loss[loss=0.2233, simple_loss=0.2956, pruned_loss=0.07547, over 19844.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2998, pruned_loss=0.07497, over 3817840.41 frames. ], batch size: 52, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:36:33,144 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94825.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:36:46,534 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1146, 2.7872, 2.2015, 2.1410, 2.0092, 2.3951, 0.8192, 1.9583], + device='cuda:0'), covar=tensor([0.0563, 0.0513, 0.0591, 0.0921, 0.1000, 0.0950, 0.1187, 0.0989], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0340, 0.0335, 0.0366, 0.0436, 0.0364, 0.0316, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:37:20,902 INFO [train.py:903] (0/4) Epoch 14, batch 6100, loss[loss=0.2561, simple_loss=0.3309, pruned_loss=0.09063, over 17441.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2999, pruned_loss=0.07476, over 3820256.63 frames. ], batch size: 101, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:37:48,987 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.260e+02 6.294e+02 8.137e+02 1.551e+03, threshold=1.259e+03, percent-clipped=3.0 +2023-04-02 03:38:10,927 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5823, 1.4102, 1.4354, 1.8388, 1.4377, 1.9780, 1.9081, 1.6184], + device='cuda:0'), covar=tensor([0.0805, 0.0933, 0.1037, 0.0825, 0.0900, 0.0666, 0.0796, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0223, 0.0243, 0.0227, 0.0210, 0.0193, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 03:38:21,674 INFO [train.py:903] (0/4) Epoch 14, batch 6150, loss[loss=0.2467, simple_loss=0.3153, pruned_loss=0.08904, over 19741.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2986, pruned_loss=0.07379, over 3825766.30 frames. ], batch size: 51, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:38:48,803 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 03:39:13,126 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94956.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:39:21,882 INFO [train.py:903] (0/4) Epoch 14, batch 6200, loss[loss=0.2134, simple_loss=0.2968, pruned_loss=0.06504, over 18215.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2983, pruned_loss=0.07313, over 3832347.72 frames. ], batch size: 83, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:39:43,629 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94981.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:39:48,242 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 03:39:51,883 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 5.470e+02 6.385e+02 8.085e+02 2.296e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 03:40:22,496 INFO [train.py:903] (0/4) Epoch 14, batch 6250, loss[loss=0.2317, simple_loss=0.3102, pruned_loss=0.07662, over 19622.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2979, pruned_loss=0.07283, over 3838337.19 frames. ], batch size: 57, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:40:55,024 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 03:41:24,163 INFO [train.py:903] (0/4) Epoch 14, batch 6300, loss[loss=0.2205, simple_loss=0.3061, pruned_loss=0.06746, over 19594.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2982, pruned_loss=0.07309, over 3840110.04 frames. ], batch size: 61, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:41:44,520 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:41:51,896 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.537e+02 5.238e+02 6.215e+02 7.195e+02 1.642e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 03:42:15,045 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95106.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:42:24,132 INFO [train.py:903] (0/4) Epoch 14, batch 6350, loss[loss=0.1859, simple_loss=0.2646, pruned_loss=0.05359, over 19383.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.0738, over 3831124.59 frames. ], batch size: 47, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:42:34,667 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95123.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:43:23,517 INFO [train.py:903] (0/4) Epoch 14, batch 6400, loss[loss=0.2501, simple_loss=0.3236, pruned_loss=0.08829, over 19508.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3002, pruned_loss=0.07438, over 3825714.57 frames. ], batch size: 64, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:43:40,120 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0538, 3.5848, 1.9175, 2.1471, 3.1004, 1.6497, 1.3221, 2.2005], + device='cuda:0'), covar=tensor([0.1296, 0.0472, 0.1041, 0.0774, 0.0480, 0.1168, 0.0964, 0.0684], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0305, 0.0328, 0.0248, 0.0240, 0.0327, 0.0291, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:43:52,823 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 5.689e+02 7.116e+02 8.755e+02 2.889e+03, threshold=1.423e+03, percent-clipped=3.0 +2023-04-02 03:44:23,629 INFO [train.py:903] (0/4) Epoch 14, batch 6450, loss[loss=0.2434, simple_loss=0.3174, pruned_loss=0.08465, over 18822.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3001, pruned_loss=0.07469, over 3821926.55 frames. ], batch size: 74, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:09,467 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 03:45:25,845 INFO [train.py:903] (0/4) Epoch 14, batch 6500, loss[loss=0.2331, simple_loss=0.3152, pruned_loss=0.07551, over 19694.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3008, pruned_loss=0.07487, over 3816825.66 frames. ], batch size: 60, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:32,331 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 03:45:32,718 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8325, 1.9081, 2.0878, 2.3588, 1.6499, 2.2665, 2.2544, 1.9986], + device='cuda:0'), covar=tensor([0.3513, 0.3025, 0.1631, 0.1867, 0.3337, 0.1618, 0.3883, 0.2819], + device='cuda:0'), in_proj_covar=tensor([0.0829, 0.0867, 0.0669, 0.0906, 0.0816, 0.0753, 0.0814, 0.0739], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 03:45:43,516 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:52,731 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:54,574 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.245e+02 6.559e+02 8.783e+02 2.152e+03, threshold=1.312e+03, percent-clipped=6.0 +2023-04-02 03:46:27,867 INFO [train.py:903] (0/4) Epoch 14, batch 6550, loss[loss=0.2059, simple_loss=0.2943, pruned_loss=0.05877, over 19619.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3001, pruned_loss=0.07411, over 3815258.26 frames. ], batch size: 57, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:47:20,347 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95357.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:47:28,129 INFO [train.py:903] (0/4) Epoch 14, batch 6600, loss[loss=0.2182, simple_loss=0.2954, pruned_loss=0.07044, over 19510.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2988, pruned_loss=0.07315, over 3829128.23 frames. ], batch size: 54, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:47:57,394 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.401e+02 5.166e+02 6.061e+02 7.266e+02 1.890e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 03:48:28,428 INFO [train.py:903] (0/4) Epoch 14, batch 6650, loss[loss=0.302, simple_loss=0.3691, pruned_loss=0.1174, over 19303.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.299, pruned_loss=0.07364, over 3819235.33 frames. ], batch size: 66, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:48:35,365 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 03:49:29,376 INFO [train.py:903] (0/4) Epoch 14, batch 6700, loss[loss=0.2339, simple_loss=0.3032, pruned_loss=0.08224, over 13459.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2999, pruned_loss=0.07436, over 3813873.21 frames. ], batch size: 137, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:49:33,795 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95467.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:49:57,461 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.267e+02 5.901e+02 8.158e+02 1.902e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-02 03:50:01,573 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 03:50:25,738 INFO [train.py:903] (0/4) Epoch 14, batch 6750, loss[loss=0.2292, simple_loss=0.3001, pruned_loss=0.07913, over 19662.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3002, pruned_loss=0.07474, over 3812863.96 frames. ], batch size: 53, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:01,630 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2273, 1.3941, 1.8285, 0.8966, 2.3578, 3.0743, 2.7827, 3.2938], + device='cuda:0'), covar=tensor([0.1430, 0.3255, 0.2610, 0.2276, 0.0501, 0.0192, 0.0238, 0.0215], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0303, 0.0329, 0.0253, 0.0223, 0.0167, 0.0207, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:51:21,227 INFO [train.py:903] (0/4) Epoch 14, batch 6800, loss[loss=0.2523, simple_loss=0.3331, pruned_loss=0.08573, over 19479.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3015, pruned_loss=0.07533, over 3813738.92 frames. ], batch size: 64, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:41,546 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95582.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:51:46,734 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.198e+02 6.166e+02 8.008e+02 1.508e+03, threshold=1.233e+03, percent-clipped=6.0 +2023-04-02 03:51:51,245 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-14.pt +2023-04-02 03:52:07,410 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 03:52:07,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 03:52:10,209 INFO [train.py:903] (0/4) Epoch 15, batch 0, loss[loss=0.2446, simple_loss=0.3207, pruned_loss=0.08429, over 18759.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3207, pruned_loss=0.08429, over 18759.00 frames. ], batch size: 74, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:52:10,209 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 03:52:21,738 INFO [train.py:937] (0/4) Epoch 15, validation: loss=0.1744, simple_loss=0.2751, pruned_loss=0.03681, over 944034.00 frames. +2023-04-02 03:52:21,739 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 03:52:26,536 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2992, 1.4728, 1.8186, 1.5999, 2.6766, 2.1905, 2.8090, 1.1449], + device='cuda:0'), covar=tensor([0.2478, 0.4112, 0.2543, 0.1885, 0.1589, 0.2074, 0.1577, 0.4190], + device='cuda:0'), in_proj_covar=tensor([0.0503, 0.0592, 0.0642, 0.0452, 0.0602, 0.0507, 0.0650, 0.0510], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 03:52:33,141 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 03:52:58,933 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:08,213 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:14,610 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:22,142 INFO [train.py:903] (0/4) Epoch 15, batch 50, loss[loss=0.2149, simple_loss=0.2829, pruned_loss=0.07347, over 19393.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3024, pruned_loss=0.07516, over 870129.00 frames. ], batch size: 48, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:53:57,894 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5110, 1.2467, 1.4801, 1.2451, 2.1240, 0.9864, 2.0693, 2.4352], + device='cuda:0'), covar=tensor([0.0698, 0.2564, 0.2461, 0.1569, 0.0953, 0.1975, 0.0904, 0.0447], + device='cuda:0'), in_proj_covar=tensor([0.0374, 0.0347, 0.0367, 0.0327, 0.0357, 0.0336, 0.0344, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:53:58,793 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 03:54:07,437 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 03:54:20,259 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 5.472e+02 6.461e+02 8.212e+02 1.912e+03, threshold=1.292e+03, percent-clipped=7.0 +2023-04-02 03:54:26,852 INFO [train.py:903] (0/4) Epoch 15, batch 100, loss[loss=0.1869, simple_loss=0.2577, pruned_loss=0.05802, over 19748.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2982, pruned_loss=0.07316, over 1518597.88 frames. ], batch size: 46, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:54:37,471 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 03:54:37,590 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95701.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:22,830 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:28,253 INFO [train.py:903] (0/4) Epoch 15, batch 150, loss[loss=0.2096, simple_loss=0.2926, pruned_loss=0.06325, over 19543.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2979, pruned_loss=0.07288, over 2037877.21 frames. ], batch size: 56, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:55:32,135 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:56:23,974 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 5.351e+02 6.322e+02 7.452e+02 1.833e+03, threshold=1.264e+03, percent-clipped=1.0 +2023-04-02 03:56:27,309 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 03:56:28,492 INFO [train.py:903] (0/4) Epoch 15, batch 200, loss[loss=0.2102, simple_loss=0.2976, pruned_loss=0.06142, over 19419.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2967, pruned_loss=0.07196, over 2435627.11 frames. ], batch size: 70, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:56:45,596 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4992, 2.1761, 2.1989, 2.7019, 2.4901, 2.3229, 1.9936, 2.7218], + device='cuda:0'), covar=tensor([0.0903, 0.1746, 0.1373, 0.0958, 0.1260, 0.0454, 0.1367, 0.0592], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0357, 0.0301, 0.0247, 0.0301, 0.0247, 0.0291, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:56:51,096 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0830, 1.2283, 1.4440, 1.4092, 2.6872, 1.0793, 2.1271, 3.0111], + device='cuda:0'), covar=tensor([0.0571, 0.2708, 0.2700, 0.1692, 0.0773, 0.2264, 0.1072, 0.0333], + device='cuda:0'), in_proj_covar=tensor([0.0374, 0.0346, 0.0369, 0.0328, 0.0354, 0.0337, 0.0344, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 03:56:59,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:57:15,624 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95830.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:57:24,872 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95838.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:29,640 INFO [train.py:903] (0/4) Epoch 15, batch 250, loss[loss=0.2215, simple_loss=0.2854, pruned_loss=0.07883, over 19328.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.297, pruned_loss=0.07223, over 2742416.84 frames. ], batch size: 44, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:57:56,091 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95863.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:57:58,213 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:58:24,514 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.277e+02 6.948e+02 9.039e+02 3.101e+03, threshold=1.390e+03, percent-clipped=9.0 +2023-04-02 03:58:28,338 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2829, 1.3045, 1.5391, 1.4400, 2.2830, 2.0007, 2.2881, 0.8136], + device='cuda:0'), covar=tensor([0.2264, 0.3903, 0.2373, 0.1809, 0.1340, 0.1963, 0.1328, 0.3960], + device='cuda:0'), in_proj_covar=tensor([0.0498, 0.0589, 0.0636, 0.0449, 0.0599, 0.0503, 0.0644, 0.0507], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 03:58:30,109 INFO [train.py:903] (0/4) Epoch 15, batch 300, loss[loss=0.2352, simple_loss=0.3178, pruned_loss=0.07628, over 19663.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2987, pruned_loss=0.07357, over 2975016.84 frames. ], batch size: 58, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:32,840 INFO [train.py:903] (0/4) Epoch 15, batch 350, loss[loss=0.2313, simple_loss=0.3101, pruned_loss=0.07624, over 19466.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2977, pruned_loss=0.07331, over 3164679.19 frames. ], batch size: 64, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:33,871 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 03:59:57,911 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5483, 4.0332, 4.2027, 4.1817, 1.7394, 3.9249, 3.4467, 3.9176], + device='cuda:0'), covar=tensor([0.1473, 0.1036, 0.0574, 0.0659, 0.5245, 0.0911, 0.0637, 0.1076], + device='cuda:0'), in_proj_covar=tensor([0.0719, 0.0646, 0.0856, 0.0740, 0.0762, 0.0600, 0.0516, 0.0787], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 04:00:17,404 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:28,195 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.018e+02 5.906e+02 6.897e+02 1.495e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-04-02 04:00:32,777 INFO [train.py:903] (0/4) Epoch 15, batch 400, loss[loss=0.2399, simple_loss=0.3188, pruned_loss=0.0805, over 19669.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2993, pruned_loss=0.07433, over 3311753.18 frames. ], batch size: 55, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 04:00:34,350 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:39,871 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95998.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:42,036 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-96000.pt +2023-04-02 04:00:44,427 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:04,731 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:16,006 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:33,842 INFO [train.py:903] (0/4) Epoch 15, batch 450, loss[loss=0.2326, simple_loss=0.3037, pruned_loss=0.08078, over 13338.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2985, pruned_loss=0.07364, over 3429887.04 frames. ], batch size: 135, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:07,802 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 04:02:07,849 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 04:02:12,861 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:31,244 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.837e+02 5.995e+02 7.453e+02 1.580e+03, threshold=1.199e+03, percent-clipped=6.0 +2023-04-02 04:02:36,671 INFO [train.py:903] (0/4) Epoch 15, batch 500, loss[loss=0.233, simple_loss=0.311, pruned_loss=0.07753, over 19527.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2975, pruned_loss=0.0731, over 3522286.65 frames. ], batch size: 54, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:39,300 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:43,774 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:44,798 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:03:38,908 INFO [train.py:903] (0/4) Epoch 15, batch 550, loss[loss=0.277, simple_loss=0.3369, pruned_loss=0.1085, over 19362.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2997, pruned_loss=0.07442, over 3585489.37 frames. ], batch size: 66, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:04:12,240 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 04:04:18,069 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96174.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:04:29,236 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:04:31,635 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-02 04:04:35,629 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.342e+02 6.491e+02 8.104e+02 1.503e+03, threshold=1.298e+03, percent-clipped=3.0 +2023-04-02 04:04:40,051 INFO [train.py:903] (0/4) Epoch 15, batch 600, loss[loss=0.216, simple_loss=0.302, pruned_loss=0.065, over 19770.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2997, pruned_loss=0.07441, over 3648266.23 frames. ], batch size: 56, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:04:53,090 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5698, 1.1827, 1.4089, 1.2424, 2.2273, 0.9961, 1.9777, 2.4832], + device='cuda:0'), covar=tensor([0.0621, 0.2633, 0.2629, 0.1556, 0.0868, 0.1925, 0.0964, 0.0440], + device='cuda:0'), in_proj_covar=tensor([0.0371, 0.0346, 0.0366, 0.0328, 0.0352, 0.0335, 0.0343, 0.0373], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:05:00,801 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:05:20,405 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 04:05:43,304 INFO [train.py:903] (0/4) Epoch 15, batch 650, loss[loss=0.2331, simple_loss=0.309, pruned_loss=0.07861, over 19538.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3004, pruned_loss=0.07421, over 3684893.00 frames. ], batch size: 64, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:06:41,549 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.105e+02 6.385e+02 8.770e+02 1.706e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 04:06:42,955 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96289.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:06:46,038 INFO [train.py:903] (0/4) Epoch 15, batch 700, loss[loss=0.218, simple_loss=0.3016, pruned_loss=0.06722, over 19385.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2988, pruned_loss=0.07295, over 3716475.55 frames. ], batch size: 70, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:26,437 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,381 INFO [train.py:903] (0/4) Epoch 15, batch 750, loss[loss=0.2127, simple_loss=0.2968, pruned_loss=0.0643, over 19728.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2987, pruned_loss=0.07281, over 3747181.99 frames. ], batch size: 63, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:47,558 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,709 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:57,775 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:28,492 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:44,429 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.276e+02 6.207e+02 7.536e+02 1.572e+03, threshold=1.241e+03, percent-clipped=2.0 +2023-04-02 04:08:47,393 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-02 04:08:49,803 INFO [train.py:903] (0/4) Epoch 15, batch 800, loss[loss=0.1976, simple_loss=0.2825, pruned_loss=0.05633, over 19668.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2986, pruned_loss=0.07308, over 3773235.63 frames. ], batch size: 58, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:08:53,730 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7236, 1.4573, 1.5021, 2.1029, 1.5644, 1.9228, 1.9422, 1.7241], + device='cuda:0'), covar=tensor([0.0808, 0.0945, 0.1027, 0.0811, 0.0910, 0.0787, 0.0886, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0223, 0.0224, 0.0243, 0.0228, 0.0210, 0.0191, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 04:09:04,712 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 04:09:09,582 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:09:17,553 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8277, 2.0808, 2.3612, 2.2968, 3.1437, 3.6114, 3.5534, 3.8803], + device='cuda:0'), covar=tensor([0.1251, 0.2575, 0.2321, 0.1618, 0.0874, 0.0332, 0.0164, 0.0204], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0303, 0.0330, 0.0253, 0.0225, 0.0167, 0.0206, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:09:50,620 INFO [train.py:903] (0/4) Epoch 15, batch 850, loss[loss=0.201, simple_loss=0.2773, pruned_loss=0.06237, over 19411.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2987, pruned_loss=0.0734, over 3772566.21 frames. ], batch size: 48, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:09:51,930 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:09,368 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:41,144 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 04:10:47,719 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.238e+02 6.465e+02 7.879e+02 1.664e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 04:10:52,491 INFO [train.py:903] (0/4) Epoch 15, batch 900, loss[loss=0.1989, simple_loss=0.2716, pruned_loss=0.06306, over 19378.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2985, pruned_loss=0.07347, over 3768924.67 frames. ], batch size: 47, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:36,464 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:11:55,050 INFO [train.py:903] (0/4) Epoch 15, batch 950, loss[loss=0.1934, simple_loss=0.2762, pruned_loss=0.05526, over 19671.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2982, pruned_loss=0.07283, over 3794066.54 frames. ], batch size: 53, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:56,235 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 04:11:59,021 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96545.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:12:06,745 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7613, 1.8594, 2.0159, 2.3497, 1.7261, 2.2887, 2.1792, 1.9026], + device='cuda:0'), covar=tensor([0.3593, 0.3128, 0.1682, 0.1817, 0.3261, 0.1637, 0.3805, 0.2892], + device='cuda:0'), in_proj_covar=tensor([0.0826, 0.0864, 0.0668, 0.0898, 0.0814, 0.0748, 0.0802, 0.0731], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 04:12:14,112 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:30,398 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96570.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:12:41,763 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:52,447 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.225e+02 6.143e+02 7.839e+02 1.754e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-02 04:12:57,222 INFO [train.py:903] (0/4) Epoch 15, batch 1000, loss[loss=0.1824, simple_loss=0.2574, pruned_loss=0.05369, over 19302.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2988, pruned_loss=0.07321, over 3798047.91 frames. ], batch size: 44, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:13,515 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:13:51,325 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 04:13:59,278 INFO [train.py:903] (0/4) Epoch 15, batch 1050, loss[loss=0.2042, simple_loss=0.2667, pruned_loss=0.07087, over 19720.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2989, pruned_loss=0.07326, over 3792788.99 frames. ], batch size: 45, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:59,656 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:31,326 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 04:14:35,039 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4025, 1.6601, 2.0687, 1.6730, 3.1371, 2.5975, 3.2115, 1.5550], + device='cuda:0'), covar=tensor([0.2318, 0.3778, 0.2304, 0.1789, 0.1514, 0.1938, 0.1758, 0.3678], + device='cuda:0'), in_proj_covar=tensor([0.0501, 0.0589, 0.0640, 0.0449, 0.0601, 0.0505, 0.0643, 0.0507], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:14:53,737 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:57,023 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.831e+02 5.170e+02 6.505e+02 8.376e+02 1.590e+03, threshold=1.301e+03, percent-clipped=4.0 +2023-04-02 04:15:01,350 INFO [train.py:903] (0/4) Epoch 15, batch 1100, loss[loss=0.2553, simple_loss=0.3298, pruned_loss=0.09033, over 19536.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2995, pruned_loss=0.07374, over 3799356.71 frames. ], batch size: 56, lr: 5.60e-03, grad_scale: 4.0 +2023-04-02 04:15:28,113 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:15:58,779 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:16:02,837 INFO [train.py:903] (0/4) Epoch 15, batch 1150, loss[loss=0.1981, simple_loss=0.2699, pruned_loss=0.06314, over 19733.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2979, pruned_loss=0.07258, over 3806470.34 frames. ], batch size: 47, lr: 5.59e-03, grad_scale: 4.0 +2023-04-02 04:16:15,357 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:01,609 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.122e+02 6.460e+02 8.216e+02 1.619e+03, threshold=1.292e+03, percent-clipped=5.0 +2023-04-02 04:17:06,203 INFO [train.py:903] (0/4) Epoch 15, batch 1200, loss[loss=0.1733, simple_loss=0.2552, pruned_loss=0.04567, over 19037.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2972, pruned_loss=0.07216, over 3818694.13 frames. ], batch size: 42, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:17:08,719 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:17,132 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:32,624 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:39,079 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 04:18:03,059 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:18:07,992 INFO [train.py:903] (0/4) Epoch 15, batch 1250, loss[loss=0.2659, simple_loss=0.3318, pruned_loss=0.1, over 18202.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2991, pruned_loss=0.07346, over 3803793.32 frames. ], batch size: 83, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:18:38,268 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:05,686 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 5.442e+02 6.897e+02 8.528e+02 1.967e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 04:19:09,069 INFO [train.py:903] (0/4) Epoch 15, batch 1300, loss[loss=0.2713, simple_loss=0.3318, pruned_loss=0.1054, over 13866.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.07405, over 3814556.29 frames. ], batch size: 135, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:19:17,636 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:48,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:20:12,478 INFO [train.py:903] (0/4) Epoch 15, batch 1350, loss[loss=0.1866, simple_loss=0.2562, pruned_loss=0.05849, over 16083.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2995, pruned_loss=0.07378, over 3809959.65 frames. ], batch size: 35, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:11,449 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.520e+02 5.515e+02 7.272e+02 1.782e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-04-02 04:21:15,868 INFO [train.py:903] (0/4) Epoch 15, batch 1400, loss[loss=0.2209, simple_loss=0.3072, pruned_loss=0.0673, over 19620.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2992, pruned_loss=0.0737, over 3805330.98 frames. ], batch size: 57, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:49,669 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:22:15,030 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 04:22:19,077 INFO [train.py:903] (0/4) Epoch 15, batch 1450, loss[loss=0.2306, simple_loss=0.315, pruned_loss=0.07305, over 19660.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07302, over 3815927.72 frames. ], batch size: 58, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:22:20,281 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 04:22:38,930 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:09,820 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:11,121 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2765, 2.2769, 2.4209, 3.1816, 2.1883, 3.0580, 2.8367, 2.2983], + device='cuda:0'), covar=tensor([0.3923, 0.3746, 0.1590, 0.2155, 0.4181, 0.1786, 0.3699, 0.2870], + device='cuda:0'), in_proj_covar=tensor([0.0833, 0.0871, 0.0673, 0.0905, 0.0817, 0.0750, 0.0808, 0.0737], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 04:23:18,581 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.198e+02 6.417e+02 9.294e+02 1.968e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 04:23:21,917 INFO [train.py:903] (0/4) Epoch 15, batch 1500, loss[loss=0.1912, simple_loss=0.28, pruned_loss=0.05119, over 19756.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2987, pruned_loss=0.0733, over 3797749.84 frames. ], batch size: 54, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:24:01,252 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:20,464 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:24,794 INFO [train.py:903] (0/4) Epoch 15, batch 1550, loss[loss=0.2203, simple_loss=0.3054, pruned_loss=0.0676, over 19773.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2986, pruned_loss=0.07283, over 3807985.96 frames. ], batch size: 56, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:24:25,253 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6889, 1.8147, 1.9411, 2.0671, 1.5249, 1.9635, 2.0916, 1.8783], + device='cuda:0'), covar=tensor([0.3437, 0.2774, 0.1566, 0.1700, 0.3125, 0.1592, 0.3812, 0.2654], + device='cuda:0'), in_proj_covar=tensor([0.0835, 0.0871, 0.0674, 0.0903, 0.0817, 0.0752, 0.0806, 0.0738], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 04:24:31,871 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:25:22,554 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.223e+02 6.490e+02 8.468e+02 1.572e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-02 04:25:26,805 INFO [train.py:903] (0/4) Epoch 15, batch 1600, loss[loss=0.2119, simple_loss=0.2981, pruned_loss=0.06289, over 19292.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2983, pruned_loss=0.07259, over 3809541.93 frames. ], batch size: 66, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:25:51,578 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 04:26:20,094 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 04:26:28,830 INFO [train.py:903] (0/4) Epoch 15, batch 1650, loss[loss=0.2881, simple_loss=0.3507, pruned_loss=0.1127, over 17417.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2976, pruned_loss=0.0727, over 3814984.05 frames. ], batch size: 101, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:26:42,629 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:03,865 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5833, 1.2321, 1.1857, 1.4433, 1.1800, 1.3584, 1.2093, 1.4073], + device='cuda:0'), covar=tensor([0.1008, 0.1251, 0.1481, 0.0990, 0.1130, 0.0579, 0.1407, 0.0794], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0349, 0.0296, 0.0242, 0.0293, 0.0244, 0.0287, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:27:14,835 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:27,488 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.574e+02 5.168e+02 6.563e+02 8.938e+02 1.305e+03, threshold=1.313e+03, percent-clipped=1.0 +2023-04-02 04:27:30,913 INFO [train.py:903] (0/4) Epoch 15, batch 1700, loss[loss=0.2198, simple_loss=0.2956, pruned_loss=0.07202, over 19653.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2978, pruned_loss=0.07281, over 3824184.68 frames. ], batch size: 55, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:27:49,503 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 04:28:02,153 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3307, 3.0211, 2.2981, 2.7558, 0.7418, 2.9958, 2.8420, 2.9382], + device='cuda:0'), covar=tensor([0.1040, 0.1399, 0.1954, 0.1046, 0.3894, 0.0989, 0.0991, 0.1269], + device='cuda:0'), in_proj_covar=tensor([0.0457, 0.0384, 0.0458, 0.0326, 0.0389, 0.0391, 0.0382, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:28:11,284 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 04:28:32,699 INFO [train.py:903] (0/4) Epoch 15, batch 1750, loss[loss=0.2543, simple_loss=0.3184, pruned_loss=0.09509, over 13430.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2969, pruned_loss=0.07268, over 3828338.48 frames. ], batch size: 137, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:28:55,667 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:28:57,837 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:01,593 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:30,674 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.096e+02 6.393e+02 8.037e+02 1.428e+03, threshold=1.279e+03, percent-clipped=5.0 +2023-04-02 04:29:33,929 INFO [train.py:903] (0/4) Epoch 15, batch 1800, loss[loss=0.2233, simple_loss=0.3065, pruned_loss=0.07007, over 19594.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2971, pruned_loss=0.07279, over 3821725.49 frames. ], batch size: 61, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:29:34,223 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:30:03,912 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1991, 1.2867, 1.2249, 1.0699, 1.0862, 1.1360, 0.0949, 0.4135], + device='cuda:0'), covar=tensor([0.0581, 0.0560, 0.0356, 0.0467, 0.1144, 0.0478, 0.1010, 0.0888], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0340, 0.0337, 0.0366, 0.0438, 0.0365, 0.0319, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:30:32,249 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 04:30:36,842 INFO [train.py:903] (0/4) Epoch 15, batch 1850, loss[loss=0.1935, simple_loss=0.2718, pruned_loss=0.05759, over 19769.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2972, pruned_loss=0.07257, over 3830471.95 frames. ], batch size: 48, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:10,814 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 04:31:21,325 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:31:25,820 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8743, 4.2864, 4.5922, 4.6001, 1.4729, 4.2919, 3.7083, 4.2720], + device='cuda:0'), covar=tensor([0.1547, 0.0803, 0.0624, 0.0591, 0.5899, 0.0879, 0.0641, 0.1101], + device='cuda:0'), in_proj_covar=tensor([0.0728, 0.0655, 0.0865, 0.0744, 0.0773, 0.0609, 0.0523, 0.0797], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 04:31:35,889 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.535e+02 5.674e+02 7.772e+02 1.771e+03, threshold=1.135e+03, percent-clipped=3.0 +2023-04-02 04:31:39,245 INFO [train.py:903] (0/4) Epoch 15, batch 1900, loss[loss=0.2343, simple_loss=0.3123, pruned_loss=0.07817, over 19283.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2978, pruned_loss=0.07287, over 3825304.81 frames. ], batch size: 66, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:47,722 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8343, 3.2389, 3.2841, 3.3210, 1.2283, 3.1597, 2.7135, 3.0386], + device='cuda:0'), covar=tensor([0.1483, 0.0956, 0.0803, 0.0816, 0.5039, 0.0927, 0.0833, 0.1274], + device='cuda:0'), in_proj_covar=tensor([0.0725, 0.0654, 0.0863, 0.0743, 0.0771, 0.0607, 0.0522, 0.0794], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 04:31:58,018 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 04:32:00,969 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97509.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:02,870 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 04:32:28,310 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 04:32:32,290 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:40,694 INFO [train.py:903] (0/4) Epoch 15, batch 1950, loss[loss=0.1983, simple_loss=0.2817, pruned_loss=0.05745, over 19464.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07299, over 3819847.15 frames. ], batch size: 49, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:33:19,888 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:33:39,667 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 4.980e+02 6.456e+02 8.636e+02 2.349e+03, threshold=1.291e+03, percent-clipped=8.0 +2023-04-02 04:33:43,308 INFO [train.py:903] (0/4) Epoch 15, batch 2000, loss[loss=0.2308, simple_loss=0.3114, pruned_loss=0.07512, over 19602.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2988, pruned_loss=0.07306, over 3820674.90 frames. ], batch size: 61, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:34:06,549 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-02 04:34:20,870 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:34:35,245 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 04:34:42,316 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 04:34:47,029 INFO [train.py:903] (0/4) Epoch 15, batch 2050, loss[loss=0.2006, simple_loss=0.2848, pruned_loss=0.0582, over 19743.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2984, pruned_loss=0.07231, over 3830437.59 frames. ], batch size: 51, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:01,879 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 04:35:03,038 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 04:35:23,967 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 04:35:47,019 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.998e+02 6.604e+02 7.928e+02 1.987e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 04:35:50,611 INFO [train.py:903] (0/4) Epoch 15, batch 2100, loss[loss=0.1996, simple_loss=0.2905, pruned_loss=0.05431, over 19524.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2982, pruned_loss=0.0723, over 3839203.28 frames. ], batch size: 56, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:50,941 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97692.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:35:56,743 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5092, 4.0572, 4.2145, 4.2097, 1.5667, 3.9613, 3.4928, 3.9172], + device='cuda:0'), covar=tensor([0.1551, 0.0919, 0.0669, 0.0658, 0.5692, 0.0898, 0.0666, 0.1134], + device='cuda:0'), in_proj_covar=tensor([0.0732, 0.0658, 0.0867, 0.0747, 0.0777, 0.0612, 0.0524, 0.0798], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 04:36:04,922 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:11,599 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:20,730 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 04:36:41,964 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:43,973 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 04:36:45,265 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:46,642 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:49,876 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9941, 1.1976, 1.4714, 0.6233, 2.0155, 2.4190, 2.1069, 2.5794], + device='cuda:0'), covar=tensor([0.1522, 0.3658, 0.3200, 0.2547, 0.0585, 0.0252, 0.0350, 0.0309], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0305, 0.0331, 0.0253, 0.0225, 0.0169, 0.0208, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:36:51,864 INFO [train.py:903] (0/4) Epoch 15, batch 2150, loss[loss=0.2426, simple_loss=0.3178, pruned_loss=0.0837, over 18708.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.299, pruned_loss=0.07268, over 3837709.26 frames. ], batch size: 74, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:37:12,246 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:37:49,742 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.862e+02 6.186e+02 7.185e+02 1.323e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 04:37:54,005 INFO [train.py:903] (0/4) Epoch 15, batch 2200, loss[loss=0.1834, simple_loss=0.2578, pruned_loss=0.05444, over 19410.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2985, pruned_loss=0.07238, over 3830079.59 frames. ], batch size: 48, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:37:55,569 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1725, 2.0196, 1.7914, 1.6548, 1.5723, 1.7545, 0.3485, 0.9856], + device='cuda:0'), covar=tensor([0.0465, 0.0486, 0.0383, 0.0610, 0.0901, 0.0650, 0.1004, 0.0829], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0340, 0.0336, 0.0365, 0.0438, 0.0364, 0.0318, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:38:28,065 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:33,738 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:57,679 INFO [train.py:903] (0/4) Epoch 15, batch 2250, loss[loss=0.2591, simple_loss=0.3268, pruned_loss=0.09567, over 19547.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2985, pruned_loss=0.0728, over 3819224.82 frames. ], batch size: 54, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:39:09,291 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:39:56,866 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.977e+02 6.292e+02 8.077e+02 1.831e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 04:40:00,323 INFO [train.py:903] (0/4) Epoch 15, batch 2300, loss[loss=0.2263, simple_loss=0.3107, pruned_loss=0.07093, over 19523.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2984, pruned_loss=0.07218, over 3819778.68 frames. ], batch size: 56, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:40:12,674 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 04:40:16,283 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:40:22,276 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2754, 1.1792, 1.2512, 1.3599, 1.0976, 1.3958, 1.3062, 1.3282], + device='cuda:0'), covar=tensor([0.0922, 0.1034, 0.1081, 0.0697, 0.0864, 0.0829, 0.0896, 0.0765], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0223, 0.0242, 0.0228, 0.0208, 0.0191, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 04:40:29,875 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:40:54,965 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2982, 3.0001, 2.3015, 2.6871, 0.7555, 2.9460, 2.8298, 2.9427], + device='cuda:0'), covar=tensor([0.1100, 0.1352, 0.2013, 0.1023, 0.3990, 0.1000, 0.1081, 0.1320], + device='cuda:0'), in_proj_covar=tensor([0.0462, 0.0381, 0.0461, 0.0325, 0.0392, 0.0393, 0.0382, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:41:01,540 INFO [train.py:903] (0/4) Epoch 15, batch 2350, loss[loss=0.2273, simple_loss=0.305, pruned_loss=0.07481, over 19701.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2981, pruned_loss=0.07237, over 3821746.10 frames. ], batch size: 59, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:41:44,926 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 04:41:58,283 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.416e+02 6.244e+02 7.823e+02 1.587e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-04-02 04:41:58,399 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 04:42:02,746 INFO [train.py:903] (0/4) Epoch 15, batch 2400, loss[loss=0.2079, simple_loss=0.2906, pruned_loss=0.06263, over 19525.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2986, pruned_loss=0.07261, over 3815768.23 frames. ], batch size: 56, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:42:04,439 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:12,201 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-98000.pt +2023-04-02 04:42:29,190 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9341, 4.4998, 2.6315, 4.0222, 1.1370, 4.3459, 4.3044, 4.3887], + device='cuda:0'), covar=tensor([0.0542, 0.0901, 0.1995, 0.0695, 0.3682, 0.0690, 0.0763, 0.1129], + device='cuda:0'), in_proj_covar=tensor([0.0459, 0.0379, 0.0457, 0.0323, 0.0388, 0.0391, 0.0379, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:42:36,216 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:53,753 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:58,104 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98036.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:43:05,605 INFO [train.py:903] (0/4) Epoch 15, batch 2450, loss[loss=0.2704, simple_loss=0.3344, pruned_loss=0.1032, over 19575.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2982, pruned_loss=0.07245, over 3823533.02 frames. ], batch size: 61, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:43:32,178 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1654, 3.7222, 2.2367, 2.3179, 3.3310, 2.1518, 1.2979, 2.2266], + device='cuda:0'), covar=tensor([0.1257, 0.0529, 0.0955, 0.0780, 0.0550, 0.1027, 0.1006, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0306, 0.0322, 0.0248, 0.0239, 0.0327, 0.0290, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:43:47,599 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98075.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:43:54,239 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:05,272 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.305e+02 6.204e+02 8.091e+02 1.870e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 04:44:09,743 INFO [train.py:903] (0/4) Epoch 15, batch 2500, loss[loss=0.1784, simple_loss=0.2635, pruned_loss=0.04663, over 19841.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2972, pruned_loss=0.07173, over 3821430.01 frames. ], batch size: 52, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:44:19,636 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98100.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:25,566 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98105.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:27,995 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:29,491 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 04:44:51,253 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98124.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:00,624 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:12,189 INFO [train.py:903] (0/4) Epoch 15, batch 2550, loss[loss=0.2241, simple_loss=0.3114, pruned_loss=0.06836, over 19790.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2968, pruned_loss=0.07136, over 3836116.53 frames. ], batch size: 56, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:45:23,230 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98151.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:45:46,897 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3708, 1.4682, 1.7768, 1.6239, 2.6572, 2.2534, 2.7467, 1.0771], + device='cuda:0'), covar=tensor([0.2341, 0.4008, 0.2541, 0.1797, 0.1425, 0.2076, 0.1477, 0.4047], + device='cuda:0'), in_proj_covar=tensor([0.0504, 0.0593, 0.0646, 0.0450, 0.0602, 0.0507, 0.0646, 0.0507], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:46:07,105 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 04:46:10,537 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 5.247e+02 6.339e+02 8.638e+02 2.352e+03, threshold=1.268e+03, percent-clipped=5.0 +2023-04-02 04:46:14,051 INFO [train.py:903] (0/4) Epoch 15, batch 2600, loss[loss=0.2218, simple_loss=0.3094, pruned_loss=0.0671, over 19684.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2977, pruned_loss=0.07157, over 3839020.12 frames. ], batch size: 59, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:46:24,898 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0398, 4.9894, 5.8436, 5.7801, 1.8350, 5.4558, 4.7000, 5.4586], + device='cuda:0'), covar=tensor([0.1535, 0.0830, 0.0523, 0.0582, 0.5868, 0.0622, 0.0578, 0.1122], + device='cuda:0'), in_proj_covar=tensor([0.0729, 0.0660, 0.0867, 0.0746, 0.0777, 0.0614, 0.0525, 0.0799], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 04:47:18,038 INFO [train.py:903] (0/4) Epoch 15, batch 2650, loss[loss=0.2011, simple_loss=0.2669, pruned_loss=0.06763, over 19726.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2973, pruned_loss=0.07194, over 3828585.11 frames. ], batch size: 46, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:28,824 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:47:39,902 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 04:48:17,337 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:48:18,022 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.990e+02 6.118e+02 7.575e+02 1.335e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 04:48:21,604 INFO [train.py:903] (0/4) Epoch 15, batch 2700, loss[loss=0.2388, simple_loss=0.3207, pruned_loss=0.07844, over 19752.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.297, pruned_loss=0.07135, over 3840104.13 frames. ], batch size: 63, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:48:47,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:49:24,166 INFO [train.py:903] (0/4) Epoch 15, batch 2750, loss[loss=0.2767, simple_loss=0.3348, pruned_loss=0.1093, over 13597.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2978, pruned_loss=0.07233, over 3818647.64 frames. ], batch size: 136, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:49:45,380 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0230, 1.7969, 1.5599, 2.1812, 1.8056, 1.7642, 1.6145, 1.9592], + device='cuda:0'), covar=tensor([0.0957, 0.1490, 0.1531, 0.0944, 0.1397, 0.0551, 0.1298, 0.0714], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0348, 0.0295, 0.0243, 0.0293, 0.0244, 0.0288, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:49:54,711 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:50:02,026 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1202, 2.7963, 1.9095, 2.0689, 1.8499, 2.3044, 0.7635, 1.9826], + device='cuda:0'), covar=tensor([0.0554, 0.0529, 0.0687, 0.0973, 0.1095, 0.1028, 0.1306, 0.0963], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0342, 0.0340, 0.0368, 0.0443, 0.0369, 0.0320, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:50:23,807 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.173e+02 6.107e+02 7.991e+02 1.431e+03, threshold=1.221e+03, percent-clipped=3.0 +2023-04-02 04:50:27,276 INFO [train.py:903] (0/4) Epoch 15, batch 2800, loss[loss=0.2248, simple_loss=0.3096, pruned_loss=0.07003, over 18134.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2977, pruned_loss=0.07223, over 3831047.32 frames. ], batch size: 83, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:50:28,753 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3511, 3.0751, 2.3107, 2.7996, 0.6313, 2.9530, 2.8919, 2.9666], + device='cuda:0'), covar=tensor([0.1028, 0.1341, 0.1908, 0.1015, 0.3904, 0.1058, 0.1012, 0.1210], + device='cuda:0'), in_proj_covar=tensor([0.0467, 0.0386, 0.0463, 0.0329, 0.0392, 0.0397, 0.0387, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:50:47,397 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9803, 5.2994, 3.0065, 4.6758, 1.3450, 5.3200, 5.2723, 5.4938], + device='cuda:0'), covar=tensor([0.0409, 0.0939, 0.1900, 0.0712, 0.3595, 0.0578, 0.0703, 0.0878], + device='cuda:0'), in_proj_covar=tensor([0.0467, 0.0386, 0.0463, 0.0329, 0.0392, 0.0397, 0.0387, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:50:48,826 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98407.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:51:13,923 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:51:18,584 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98432.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:51:30,822 INFO [train.py:903] (0/4) Epoch 15, batch 2850, loss[loss=0.2145, simple_loss=0.2955, pruned_loss=0.0668, over 19775.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2981, pruned_loss=0.07219, over 3836631.07 frames. ], batch size: 56, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:52:03,911 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:52:30,661 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 5.164e+02 6.295e+02 8.927e+02 2.262e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 04:52:34,177 INFO [train.py:903] (0/4) Epoch 15, batch 2900, loss[loss=0.2096, simple_loss=0.2975, pruned_loss=0.06088, over 19283.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2967, pruned_loss=0.0714, over 3850880.21 frames. ], batch size: 66, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:52:35,474 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 04:52:53,173 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0273, 1.8748, 1.5347, 2.0583, 1.7842, 1.6920, 1.6214, 1.8831], + device='cuda:0'), covar=tensor([0.0966, 0.1351, 0.1568, 0.0966, 0.1371, 0.0587, 0.1334, 0.0815], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0352, 0.0297, 0.0245, 0.0295, 0.0246, 0.0290, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 04:53:36,713 INFO [train.py:903] (0/4) Epoch 15, batch 2950, loss[loss=0.1895, simple_loss=0.2602, pruned_loss=0.05935, over 19403.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2978, pruned_loss=0.07179, over 3853997.87 frames. ], batch size: 47, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:00,567 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:29,226 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:35,383 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.043e+02 6.244e+02 8.249e+02 2.456e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 04:54:38,824 INFO [train.py:903] (0/4) Epoch 15, batch 3000, loss[loss=0.2223, simple_loss=0.3065, pruned_loss=0.06907, over 19707.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2972, pruned_loss=0.07177, over 3849636.37 frames. ], batch size: 59, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:38,825 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 04:54:51,337 INFO [train.py:937] (0/4) Epoch 15, validation: loss=0.1735, simple_loss=0.2738, pruned_loss=0.0366, over 944034.00 frames. +2023-04-02 04:54:51,338 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 04:54:51,791 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1328, 2.1578, 2.3673, 2.9157, 2.1816, 2.7316, 2.5177, 2.1737], + device='cuda:0'), covar=tensor([0.3621, 0.3171, 0.1447, 0.1850, 0.3373, 0.1551, 0.3522, 0.2603], + device='cuda:0'), in_proj_covar=tensor([0.0828, 0.0873, 0.0669, 0.0899, 0.0813, 0.0746, 0.0806, 0.0735], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 04:54:53,552 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 04:55:28,720 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:55:52,976 INFO [train.py:903] (0/4) Epoch 15, batch 3050, loss[loss=0.2183, simple_loss=0.2898, pruned_loss=0.07345, over 19768.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07228, over 3851162.48 frames. ], batch size: 47, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:55:57,865 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:56:51,965 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.607e+02 7.054e+02 9.171e+02 2.046e+03, threshold=1.411e+03, percent-clipped=6.0 +2023-04-02 04:56:54,317 INFO [train.py:903] (0/4) Epoch 15, batch 3100, loss[loss=0.1964, simple_loss=0.2789, pruned_loss=0.057, over 19765.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2968, pruned_loss=0.07217, over 3844818.93 frames. ], batch size: 46, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:57:58,294 INFO [train.py:903] (0/4) Epoch 15, batch 3150, loss[loss=0.2198, simple_loss=0.3043, pruned_loss=0.06772, over 19459.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2967, pruned_loss=0.0722, over 3848153.02 frames. ], batch size: 64, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:58:26,344 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 04:58:34,636 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:58:58,674 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.220e+02 6.366e+02 8.908e+02 1.802e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-02 04:59:01,102 INFO [train.py:903] (0/4) Epoch 15, batch 3200, loss[loss=0.1936, simple_loss=0.2655, pruned_loss=0.06088, over 19747.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2964, pruned_loss=0.07206, over 3838727.68 frames. ], batch size: 46, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:59:37,257 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0013, 1.4921, 1.6222, 1.4930, 2.8677, 4.4942, 4.3509, 4.8188], + device='cuda:0'), covar=tensor([0.1799, 0.3454, 0.3327, 0.2069, 0.0621, 0.0187, 0.0160, 0.0157], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0303, 0.0330, 0.0251, 0.0223, 0.0168, 0.0206, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 04:59:59,409 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:02,403 INFO [train.py:903] (0/4) Epoch 15, batch 3250, loss[loss=0.1932, simple_loss=0.2697, pruned_loss=0.05832, over 19751.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2964, pruned_loss=0.07184, over 3830172.35 frames. ], batch size: 47, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:00:24,181 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9396, 4.4903, 2.7553, 3.8216, 0.8671, 4.3533, 4.2514, 4.3202], + device='cuda:0'), covar=tensor([0.0537, 0.0938, 0.1853, 0.0776, 0.4115, 0.0615, 0.0800, 0.0851], + device='cuda:0'), in_proj_covar=tensor([0.0458, 0.0380, 0.0453, 0.0322, 0.0387, 0.0392, 0.0380, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:00:29,958 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:56,820 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:59,973 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.355e+02 4.885e+02 6.090e+02 7.322e+02 1.846e+03, threshold=1.218e+03, percent-clipped=3.0 +2023-04-02 05:01:02,397 INFO [train.py:903] (0/4) Epoch 15, batch 3300, loss[loss=0.2915, simple_loss=0.3518, pruned_loss=0.1156, over 14001.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2975, pruned_loss=0.07272, over 3811689.19 frames. ], batch size: 138, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:01:08,217 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 05:01:20,963 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:25,246 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98908.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:30,597 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:02:07,327 INFO [train.py:903] (0/4) Epoch 15, batch 3350, loss[loss=0.2065, simple_loss=0.2792, pruned_loss=0.06691, over 19766.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2965, pruned_loss=0.07183, over 3824186.99 frames. ], batch size: 48, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:02:26,973 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9346, 1.1571, 1.5434, 0.5561, 2.0796, 2.4186, 2.0622, 2.5642], + device='cuda:0'), covar=tensor([0.1551, 0.3710, 0.3137, 0.2619, 0.0571, 0.0265, 0.0372, 0.0320], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0306, 0.0333, 0.0254, 0.0225, 0.0169, 0.0208, 0.0224], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:02:38,129 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5606, 4.1684, 2.6208, 3.6571, 0.8265, 4.0639, 3.9229, 4.0381], + device='cuda:0'), covar=tensor([0.0658, 0.0969, 0.1990, 0.0828, 0.4283, 0.0692, 0.0838, 0.1040], + device='cuda:0'), in_proj_covar=tensor([0.0461, 0.0381, 0.0455, 0.0325, 0.0391, 0.0394, 0.0383, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:03:06,864 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98989.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:03:07,573 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 5.628e+02 7.317e+02 9.748e+02 2.071e+03, threshold=1.463e+03, percent-clipped=8.0 +2023-04-02 05:03:09,827 INFO [train.py:903] (0/4) Epoch 15, batch 3400, loss[loss=0.2216, simple_loss=0.3022, pruned_loss=0.07052, over 19687.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2975, pruned_loss=0.07248, over 3817800.84 frames. ], batch size: 59, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:03:11,396 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1749, 1.1192, 1.1392, 1.2433, 0.9922, 1.3473, 1.3114, 1.2218], + device='cuda:0'), covar=tensor([0.0883, 0.0988, 0.1061, 0.0740, 0.0893, 0.0771, 0.0837, 0.0749], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0222, 0.0222, 0.0241, 0.0228, 0.0208, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 05:03:44,041 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:03:48,621 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3396, 1.4151, 1.8979, 1.7088, 3.0602, 4.8025, 4.6954, 5.0532], + device='cuda:0'), covar=tensor([0.1524, 0.3477, 0.3000, 0.1899, 0.0492, 0.0137, 0.0143, 0.0135], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0305, 0.0331, 0.0253, 0.0225, 0.0168, 0.0207, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:04:10,820 INFO [train.py:903] (0/4) Epoch 15, batch 3450, loss[loss=0.2691, simple_loss=0.3227, pruned_loss=0.1078, over 19726.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2981, pruned_loss=0.07285, over 3824977.79 frames. ], batch size: 51, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:04:14,067 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 05:04:45,815 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:05:11,145 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.946e+02 5.886e+02 7.201e+02 1.354e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-02 05:05:12,326 INFO [train.py:903] (0/4) Epoch 15, batch 3500, loss[loss=0.237, simple_loss=0.3174, pruned_loss=0.07825, over 18118.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2988, pruned_loss=0.0736, over 3819511.55 frames. ], batch size: 83, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:15,628 INFO [train.py:903] (0/4) Epoch 15, batch 3550, loss[loss=0.2287, simple_loss=0.3075, pruned_loss=0.07498, over 19312.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2993, pruned_loss=0.07345, over 3817595.45 frames. ], batch size: 66, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:18,511 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:48,295 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:07:18,044 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 4.734e+02 6.110e+02 7.809e+02 1.736e+03, threshold=1.222e+03, percent-clipped=8.0 +2023-04-02 05:07:19,078 INFO [train.py:903] (0/4) Epoch 15, batch 3600, loss[loss=0.2693, simple_loss=0.3321, pruned_loss=0.1032, over 19504.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2988, pruned_loss=0.07313, over 3819143.06 frames. ], batch size: 64, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:07:57,243 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-04-02 05:08:20,326 INFO [train.py:903] (0/4) Epoch 15, batch 3650, loss[loss=0.1907, simple_loss=0.2662, pruned_loss=0.05755, over 19578.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2997, pruned_loss=0.07375, over 3821401.08 frames. ], batch size: 52, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:26,417 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:32,039 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:37,707 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:03,827 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:15,205 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:20,750 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.562e+02 6.527e+02 8.007e+02 1.277e+03, threshold=1.305e+03, percent-clipped=3.0 +2023-04-02 05:09:21,900 INFO [train.py:903] (0/4) Epoch 15, batch 3700, loss[loss=0.1944, simple_loss=0.2855, pruned_loss=0.05167, over 19669.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2991, pruned_loss=0.07349, over 3822328.26 frames. ], batch size: 55, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:09:32,862 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:13,725 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99333.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:14,277 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 05:10:24,819 INFO [train.py:903] (0/4) Epoch 15, batch 3750, loss[loss=0.2223, simple_loss=0.2961, pruned_loss=0.07423, over 19610.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2991, pruned_loss=0.0735, over 3814763.40 frames. ], batch size: 50, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:10:25,119 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:56,172 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99367.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:01,969 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:26,382 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.684e+02 5.640e+02 6.861e+02 8.808e+02 1.909e+03, threshold=1.372e+03, percent-clipped=3.0 +2023-04-02 05:11:28,553 INFO [train.py:903] (0/4) Epoch 15, batch 3800, loss[loss=0.1963, simple_loss=0.2717, pruned_loss=0.06046, over 19330.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2999, pruned_loss=0.07417, over 3802730.73 frames. ], batch size: 44, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:11:53,186 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:58,878 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 05:12:26,312 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:30,515 INFO [train.py:903] (0/4) Epoch 15, batch 3850, loss[loss=0.2198, simple_loss=0.2879, pruned_loss=0.0759, over 19475.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3, pruned_loss=0.07408, over 3798954.86 frames. ], batch size: 49, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:12:37,721 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:13:25,880 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99486.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:13:32,482 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 4.923e+02 6.054e+02 7.410e+02 1.518e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 05:13:32,500 INFO [train.py:903] (0/4) Epoch 15, batch 3900, loss[loss=0.2059, simple_loss=0.2898, pruned_loss=0.06101, over 19661.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2987, pruned_loss=0.07316, over 3805130.34 frames. ], batch size: 55, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:18,211 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:14:33,486 INFO [train.py:903] (0/4) Epoch 15, batch 3950, loss[loss=0.1888, simple_loss=0.2621, pruned_loss=0.05775, over 19748.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2988, pruned_loss=0.07326, over 3812112.29 frames. ], batch size: 47, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:41,181 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 05:15:00,513 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 05:15:02,362 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2843, 1.3869, 1.7069, 1.4885, 2.3706, 1.9839, 2.4512, 1.0961], + device='cuda:0'), covar=tensor([0.2293, 0.3909, 0.2233, 0.1814, 0.1449, 0.2071, 0.1405, 0.3846], + device='cuda:0'), in_proj_covar=tensor([0.0506, 0.0601, 0.0649, 0.0453, 0.0606, 0.0506, 0.0648, 0.0512], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:15:24,187 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8897, 2.6125, 2.5310, 3.0460, 2.7920, 2.5316, 2.2477, 2.8837], + device='cuda:0'), covar=tensor([0.0813, 0.1453, 0.1200, 0.0929, 0.1190, 0.0439, 0.1198, 0.0544], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0355, 0.0298, 0.0245, 0.0299, 0.0249, 0.0294, 0.0249], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:15:28,356 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99586.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:35,970 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:36,879 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.329e+02 6.331e+02 8.679e+02 1.427e+03, threshold=1.266e+03, percent-clipped=6.0 +2023-04-02 05:15:36,897 INFO [train.py:903] (0/4) Epoch 15, batch 4000, loss[loss=0.2293, simple_loss=0.2923, pruned_loss=0.08311, over 19764.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2994, pruned_loss=0.07384, over 3817331.82 frames. ], batch size: 47, lr: 5.51e-03, grad_scale: 8.0 +2023-04-02 05:15:54,646 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:14,189 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:20,937 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:23,035 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 05:16:23,148 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:36,049 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8749, 4.1750, 4.5177, 4.5112, 1.9641, 4.2137, 3.7901, 4.1840], + device='cuda:0'), covar=tensor([0.1418, 0.1395, 0.0502, 0.0562, 0.5123, 0.0774, 0.0601, 0.1029], + device='cuda:0'), in_proj_covar=tensor([0.0723, 0.0658, 0.0857, 0.0735, 0.0769, 0.0607, 0.0518, 0.0793], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 05:16:38,161 INFO [train.py:903] (0/4) Epoch 15, batch 4050, loss[loss=0.2305, simple_loss=0.3092, pruned_loss=0.07595, over 19729.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2991, pruned_loss=0.07314, over 3810254.69 frames. ], batch size: 63, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:16:45,318 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:50,833 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:32,701 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:39,407 INFO [train.py:903] (0/4) Epoch 15, batch 4100, loss[loss=0.2239, simple_loss=0.2868, pruned_loss=0.08047, over 16848.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2983, pruned_loss=0.07285, over 3811232.29 frames. ], batch size: 37, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:17:40,554 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.802e+02 5.566e+02 7.429e+02 9.161e+02 2.166e+03, threshold=1.486e+03, percent-clipped=8.0 +2023-04-02 05:17:46,974 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:54,963 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:58,060 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:14,937 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 05:18:27,022 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:43,272 INFO [train.py:903] (0/4) Epoch 15, batch 4150, loss[loss=0.2291, simple_loss=0.3126, pruned_loss=0.07285, over 18513.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2985, pruned_loss=0.0727, over 3799587.75 frames. ], batch size: 84, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:18:47,045 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:31,728 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:35,422 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99784.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:45,183 INFO [train.py:903] (0/4) Epoch 15, batch 4200, loss[loss=0.2981, simple_loss=0.3445, pruned_loss=0.1259, over 13192.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.298, pruned_loss=0.07249, over 3801532.56 frames. ], batch size: 137, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:19:47,442 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.549e+02 6.772e+02 8.720e+02 1.402e+03, threshold=1.354e+03, percent-clipped=0.0 +2023-04-02 05:19:50,948 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 05:19:57,176 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:06,466 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:29,572 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3212, 1.3162, 1.5558, 1.4859, 2.2408, 2.0009, 2.2106, 0.7788], + device='cuda:0'), covar=tensor([0.2311, 0.4123, 0.2417, 0.1837, 0.1444, 0.2036, 0.1432, 0.4185], + device='cuda:0'), in_proj_covar=tensor([0.0504, 0.0596, 0.0646, 0.0452, 0.0603, 0.0504, 0.0645, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:20:31,690 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99830.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:20:47,368 INFO [train.py:903] (0/4) Epoch 15, batch 4250, loss[loss=0.2429, simple_loss=0.3191, pruned_loss=0.08334, over 19654.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2984, pruned_loss=0.07243, over 3815734.89 frames. ], batch size: 55, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:03,978 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 05:21:15,094 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 05:21:30,020 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8682, 1.9092, 2.1581, 2.5679, 1.7515, 2.4046, 2.2262, 1.9780], + device='cuda:0'), covar=tensor([0.3951, 0.3515, 0.1763, 0.1944, 0.3741, 0.1756, 0.4540, 0.3124], + device='cuda:0'), in_proj_covar=tensor([0.0831, 0.0876, 0.0670, 0.0903, 0.0817, 0.0751, 0.0807, 0.0739], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 05:21:37,938 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5086, 1.4342, 1.4088, 1.8231, 1.5631, 1.6720, 1.7862, 1.6344], + device='cuda:0'), covar=tensor([0.0834, 0.0920, 0.1009, 0.0640, 0.0722, 0.0767, 0.0820, 0.0661], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0219, 0.0239, 0.0226, 0.0207, 0.0188, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 05:21:38,410 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 05:21:47,822 INFO [train.py:903] (0/4) Epoch 15, batch 4300, loss[loss=0.2263, simple_loss=0.3117, pruned_loss=0.07047, over 19516.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2978, pruned_loss=0.07235, over 3811833.38 frames. ], batch size: 54, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:48,962 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.517e+02 6.494e+02 8.260e+02 1.741e+03, threshold=1.299e+03, percent-clipped=4.0 +2023-04-02 05:21:53,839 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:35,378 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99930.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:41,691 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 05:22:48,543 INFO [train.py:903] (0/4) Epoch 15, batch 4350, loss[loss=0.18, simple_loss=0.2545, pruned_loss=0.05271, over 18729.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2981, pruned_loss=0.07243, over 3824035.37 frames. ], batch size: 41, lr: 5.50e-03, grad_scale: 4.0 +2023-04-02 05:22:52,427 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99945.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:23:00,233 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:12,148 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3587, 3.1047, 2.0658, 2.8008, 0.9478, 2.9740, 2.8671, 2.9798], + device='cuda:0'), covar=tensor([0.0975, 0.1159, 0.2083, 0.0917, 0.3445, 0.0999, 0.1033, 0.1203], + device='cuda:0'), in_proj_covar=tensor([0.0467, 0.0386, 0.0462, 0.0328, 0.0396, 0.0396, 0.0389, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:23:16,023 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99962.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:45,416 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:51,932 INFO [train.py:903] (0/4) Epoch 15, batch 4400, loss[loss=0.1798, simple_loss=0.2586, pruned_loss=0.05046, over 19741.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2968, pruned_loss=0.07179, over 3834907.48 frames. ], batch size: 47, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:23:53,158 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.844e+02 5.787e+02 7.470e+02 1.170e+03, threshold=1.157e+03, percent-clipped=0.0 +2023-04-02 05:24:04,174 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-100000.pt +2023-04-02 05:24:06,755 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:23,740 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 05:24:29,805 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6896, 4.0884, 4.3714, 4.3929, 1.9218, 4.0688, 3.6458, 4.0710], + device='cuda:0'), covar=tensor([0.1472, 0.1211, 0.0543, 0.0580, 0.4995, 0.0793, 0.0613, 0.1032], + device='cuda:0'), in_proj_covar=tensor([0.0732, 0.0665, 0.0870, 0.0744, 0.0775, 0.0613, 0.0524, 0.0802], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 05:24:31,862 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 05:24:34,572 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100025.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:24:35,747 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:57,064 INFO [train.py:903] (0/4) Epoch 15, batch 4450, loss[loss=0.1984, simple_loss=0.2752, pruned_loss=0.06078, over 19478.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2954, pruned_loss=0.07121, over 3831512.85 frames. ], batch size: 49, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:24:57,236 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:00,894 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:14,927 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:25,306 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:47,664 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:59,167 INFO [train.py:903] (0/4) Epoch 15, batch 4500, loss[loss=0.2471, simple_loss=0.3232, pruned_loss=0.08555, over 19663.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2965, pruned_loss=0.0718, over 3820871.90 frames. ], batch size: 60, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:26:00,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.166e+02 6.659e+02 8.670e+02 1.796e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 05:26:59,885 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 05:27:01,361 INFO [train.py:903] (0/4) Epoch 15, batch 4550, loss[loss=0.239, simple_loss=0.3322, pruned_loss=0.07293, over 19321.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2963, pruned_loss=0.07122, over 3829051.96 frames. ], batch size: 66, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:27:10,395 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 05:27:16,310 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:21,847 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:38,124 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 05:27:47,888 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:28:04,927 INFO [train.py:903] (0/4) Epoch 15, batch 4600, loss[loss=0.2324, simple_loss=0.3115, pruned_loss=0.07666, over 19141.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2963, pruned_loss=0.0713, over 3833173.35 frames. ], batch size: 69, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:28:06,060 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.707e+02 5.654e+02 7.541e+02 1.184e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-02 05:28:19,023 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100201.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:28:26,342 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1030, 1.2052, 1.4761, 1.3742, 2.7058, 1.0967, 2.0404, 3.0018], + device='cuda:0'), covar=tensor([0.0548, 0.2750, 0.2796, 0.1794, 0.0751, 0.2348, 0.1170, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0376, 0.0347, 0.0367, 0.0329, 0.0356, 0.0337, 0.0347, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:28:36,836 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4682, 1.4922, 1.8889, 1.7278, 3.2947, 2.7522, 3.5754, 1.7090], + device='cuda:0'), covar=tensor([0.2416, 0.4218, 0.2628, 0.1938, 0.1369, 0.1826, 0.1391, 0.3595], + device='cuda:0'), in_proj_covar=tensor([0.0504, 0.0596, 0.0648, 0.0452, 0.0603, 0.0505, 0.0641, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:28:47,106 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100226.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:29:08,987 INFO [train.py:903] (0/4) Epoch 15, batch 4650, loss[loss=0.2037, simple_loss=0.2829, pruned_loss=0.06222, over 19470.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2964, pruned_loss=0.07135, over 3830338.15 frames. ], batch size: 49, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:29:20,287 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 05:29:25,498 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 05:29:25,774 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:29:35,870 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 05:30:11,078 INFO [train.py:903] (0/4) Epoch 15, batch 4700, loss[loss=0.2207, simple_loss=0.2963, pruned_loss=0.07253, over 19777.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2975, pruned_loss=0.07174, over 3818492.57 frames. ], batch size: 54, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:30:12,228 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.601e+02 6.328e+02 8.331e+02 3.311e+03, threshold=1.266e+03, percent-clipped=13.0 +2023-04-02 05:30:22,181 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:33,248 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 05:30:50,889 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:55,446 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:13,905 INFO [train.py:903] (0/4) Epoch 15, batch 4750, loss[loss=0.2246, simple_loss=0.3024, pruned_loss=0.07346, over 19617.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2978, pruned_loss=0.07156, over 3824760.48 frames. ], batch size: 61, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:31:21,199 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:49,672 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100369.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:32:16,842 INFO [train.py:903] (0/4) Epoch 15, batch 4800, loss[loss=0.243, simple_loss=0.3228, pruned_loss=0.08166, over 19539.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2972, pruned_loss=0.07133, over 3835207.81 frames. ], batch size: 56, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:32:18,029 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.713e+02 6.387e+02 8.455e+02 2.006e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 05:32:43,225 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2884, 2.0155, 2.1042, 2.8580, 2.1647, 2.6773, 2.7621, 2.5030], + device='cuda:0'), covar=tensor([0.0699, 0.0850, 0.0907, 0.0784, 0.0784, 0.0675, 0.0726, 0.0556], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0221, 0.0220, 0.0239, 0.0226, 0.0208, 0.0187, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 05:32:44,441 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:32:52,646 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:17,175 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:21,571 INFO [train.py:903] (0/4) Epoch 15, batch 4850, loss[loss=0.2344, simple_loss=0.3114, pruned_loss=0.07875, over 19538.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2961, pruned_loss=0.07115, over 3826257.01 frames. ], batch size: 56, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:33:48,721 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 05:34:10,983 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 05:34:14,843 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100484.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:34:16,742 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 05:34:17,778 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 05:34:24,795 INFO [train.py:903] (0/4) Epoch 15, batch 4900, loss[loss=0.2449, simple_loss=0.3152, pruned_loss=0.08731, over 12910.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2972, pruned_loss=0.07205, over 3802596.07 frames. ], batch size: 135, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:34:25,929 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.042e+02 5.846e+02 7.925e+02 1.600e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-02 05:34:25,996 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 05:34:46,442 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 05:35:27,066 INFO [train.py:903] (0/4) Epoch 15, batch 4950, loss[loss=0.2018, simple_loss=0.279, pruned_loss=0.06225, over 19482.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2973, pruned_loss=0.07167, over 3790818.76 frames. ], batch size: 49, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:35:45,778 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 05:36:04,528 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 05:36:05,452 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4897, 2.2791, 1.6757, 1.5945, 2.1440, 1.3852, 1.3189, 1.8958], + device='cuda:0'), covar=tensor([0.1126, 0.0775, 0.0973, 0.0742, 0.0469, 0.1146, 0.0767, 0.0434], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0305, 0.0323, 0.0250, 0.0237, 0.0325, 0.0292, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:36:09,666 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 05:36:29,672 INFO [train.py:903] (0/4) Epoch 15, batch 5000, loss[loss=0.214, simple_loss=0.2794, pruned_loss=0.07427, over 19800.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2981, pruned_loss=0.07194, over 3810538.23 frames. ], batch size: 47, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:36:31,853 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 5.199e+02 6.623e+02 8.418e+02 1.165e+03, threshold=1.325e+03, percent-clipped=0.0 +2023-04-02 05:36:40,682 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 05:36:40,823 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:36:52,300 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 05:36:54,960 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7175, 1.7379, 1.5755, 1.3530, 1.3052, 1.3862, 0.2301, 0.6193], + device='cuda:0'), covar=tensor([0.0550, 0.0521, 0.0345, 0.0544, 0.1075, 0.0611, 0.0994, 0.0885], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0342, 0.0341, 0.0370, 0.0444, 0.0371, 0.0321, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:37:25,100 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8407, 1.1826, 1.5468, 0.5400, 2.0227, 2.4353, 2.0896, 2.5723], + device='cuda:0'), covar=tensor([0.1618, 0.3621, 0.3159, 0.2612, 0.0574, 0.0257, 0.0368, 0.0337], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0304, 0.0334, 0.0252, 0.0226, 0.0170, 0.0208, 0.0224], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:37:33,612 INFO [train.py:903] (0/4) Epoch 15, batch 5050, loss[loss=0.2216, simple_loss=0.3062, pruned_loss=0.06848, over 19107.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2981, pruned_loss=0.07226, over 3805593.75 frames. ], batch size: 69, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:38:09,678 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 05:38:37,496 INFO [train.py:903] (0/4) Epoch 15, batch 5100, loss[loss=0.2227, simple_loss=0.3061, pruned_loss=0.06967, over 19493.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2981, pruned_loss=0.07231, over 3808980.87 frames. ], batch size: 64, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:38:39,893 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 5.134e+02 6.124e+02 7.830e+02 1.941e+03, threshold=1.225e+03, percent-clipped=4.0 +2023-04-02 05:38:49,382 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 05:38:51,816 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 05:38:57,362 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 05:39:05,812 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:39:37,087 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100740.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:39:38,888 INFO [train.py:903] (0/4) Epoch 15, batch 5150, loss[loss=0.2363, simple_loss=0.3131, pruned_loss=0.07978, over 19730.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2962, pruned_loss=0.07166, over 3810891.82 frames. ], batch size: 51, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:39:47,030 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100748.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:39:51,337 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 05:40:07,666 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:40:09,039 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100765.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:40:20,529 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5461, 1.4723, 1.4506, 1.8278, 1.4258, 1.8217, 1.7492, 1.6609], + device='cuda:0'), covar=tensor([0.0799, 0.0909, 0.0991, 0.0680, 0.0773, 0.0699, 0.0790, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0221, 0.0221, 0.0241, 0.0227, 0.0209, 0.0189, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 05:40:28,224 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 05:40:38,849 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1275, 3.5466, 2.0315, 2.2791, 3.1251, 1.8460, 1.4962, 2.1125], + device='cuda:0'), covar=tensor([0.1387, 0.0519, 0.1043, 0.0745, 0.0476, 0.1173, 0.0956, 0.0696], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0305, 0.0325, 0.0251, 0.0237, 0.0326, 0.0294, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:40:41,950 INFO [train.py:903] (0/4) Epoch 15, batch 5200, loss[loss=0.2746, simple_loss=0.3283, pruned_loss=0.1105, over 13488.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2975, pruned_loss=0.07216, over 3809290.56 frames. ], batch size: 136, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:40:44,524 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 5.325e+02 6.515e+02 8.501e+02 1.618e+03, threshold=1.303e+03, percent-clipped=5.0 +2023-04-02 05:40:58,648 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 05:41:43,060 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 05:41:47,607 INFO [train.py:903] (0/4) Epoch 15, batch 5250, loss[loss=0.2412, simple_loss=0.315, pruned_loss=0.08373, over 18391.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2976, pruned_loss=0.07209, over 3816791.00 frames. ], batch size: 84, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:08,359 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4524, 2.2604, 2.0428, 1.8540, 1.7620, 1.8747, 0.6810, 1.1971], + device='cuda:0'), covar=tensor([0.0419, 0.0431, 0.0353, 0.0610, 0.0889, 0.0683, 0.0993, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0340, 0.0338, 0.0368, 0.0438, 0.0367, 0.0320, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:42:33,416 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100879.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:42:50,409 INFO [train.py:903] (0/4) Epoch 15, batch 5300, loss[loss=0.227, simple_loss=0.3082, pruned_loss=0.07294, over 18783.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2988, pruned_loss=0.0729, over 3814399.77 frames. ], batch size: 74, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:52,727 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.131e+02 6.120e+02 8.353e+02 1.768e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 05:43:08,007 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 05:43:52,931 INFO [train.py:903] (0/4) Epoch 15, batch 5350, loss[loss=0.242, simple_loss=0.3161, pruned_loss=0.084, over 19515.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2986, pruned_loss=0.07292, over 3818752.63 frames. ], batch size: 54, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:29,363 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 05:44:30,867 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:44:56,006 INFO [train.py:903] (0/4) Epoch 15, batch 5400, loss[loss=0.2189, simple_loss=0.2907, pruned_loss=0.07362, over 19461.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.299, pruned_loss=0.07337, over 3802933.53 frames. ], batch size: 49, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:58,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.355e+02 6.832e+02 8.412e+02 2.240e+03, threshold=1.366e+03, percent-clipped=7.0 +2023-04-02 05:45:00,855 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:45:55,909 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2246, 2.0705, 1.8662, 1.6760, 1.5751, 1.6916, 0.4741, 1.1169], + device='cuda:0'), covar=tensor([0.0518, 0.0528, 0.0417, 0.0649, 0.1040, 0.0747, 0.1172, 0.0853], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0342, 0.0338, 0.0367, 0.0441, 0.0369, 0.0321, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:46:00,097 INFO [train.py:903] (0/4) Epoch 15, batch 5450, loss[loss=0.1993, simple_loss=0.2872, pruned_loss=0.05564, over 19666.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2989, pruned_loss=0.07363, over 3803359.09 frames. ], batch size: 55, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:47:04,590 INFO [train.py:903] (0/4) Epoch 15, batch 5500, loss[loss=0.1848, simple_loss=0.2649, pruned_loss=0.0523, over 19770.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2994, pruned_loss=0.07355, over 3809415.48 frames. ], batch size: 48, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:47:04,778 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101092.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:47:06,825 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 5.715e+02 6.860e+02 8.623e+02 1.531e+03, threshold=1.372e+03, percent-clipped=1.0 +2023-04-02 05:47:27,323 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 05:47:45,322 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:47:58,078 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:48:05,680 INFO [train.py:903] (0/4) Epoch 15, batch 5550, loss[loss=0.2718, simple_loss=0.3449, pruned_loss=0.09939, over 19663.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2994, pruned_loss=0.07353, over 3818385.68 frames. ], batch size: 59, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:48:12,815 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 05:48:28,830 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:48:38,080 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-02 05:49:04,234 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 05:49:08,923 INFO [train.py:903] (0/4) Epoch 15, batch 5600, loss[loss=0.2555, simple_loss=0.3289, pruned_loss=0.09108, over 19589.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2991, pruned_loss=0.07323, over 3822518.71 frames. ], batch size: 61, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:49:11,017 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 5.272e+02 6.555e+02 8.017e+02 1.573e+03, threshold=1.311e+03, percent-clipped=2.0 +2023-04-02 05:49:28,229 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4353, 1.2220, 1.4547, 1.4729, 2.9133, 1.0772, 2.1863, 3.4406], + device='cuda:0'), covar=tensor([0.0647, 0.3266, 0.3150, 0.2090, 0.0954, 0.2796, 0.1603, 0.0329], + device='cuda:0'), in_proj_covar=tensor([0.0382, 0.0350, 0.0368, 0.0334, 0.0359, 0.0339, 0.0352, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:49:28,275 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101207.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:49:52,607 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9867, 1.0245, 1.3232, 1.2631, 2.3366, 0.9637, 2.0870, 2.7696], + device='cuda:0'), covar=tensor([0.0767, 0.3693, 0.3343, 0.2199, 0.1283, 0.2766, 0.1305, 0.0521], + device='cuda:0'), in_proj_covar=tensor([0.0381, 0.0350, 0.0368, 0.0333, 0.0358, 0.0339, 0.0351, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:50:11,404 INFO [train.py:903] (0/4) Epoch 15, batch 5650, loss[loss=0.2102, simple_loss=0.2859, pruned_loss=0.06724, over 19680.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2988, pruned_loss=0.07303, over 3822794.07 frames. ], batch size: 53, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:50:59,686 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 05:51:01,872 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8014, 1.5426, 1.9457, 1.6424, 4.2339, 1.1890, 2.3697, 4.6531], + device='cuda:0'), covar=tensor([0.0403, 0.2834, 0.2567, 0.1987, 0.0776, 0.2573, 0.1441, 0.0183], + device='cuda:0'), in_proj_covar=tensor([0.0381, 0.0352, 0.0369, 0.0334, 0.0360, 0.0340, 0.0352, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:51:14,090 INFO [train.py:903] (0/4) Epoch 15, batch 5700, loss[loss=0.2143, simple_loss=0.2924, pruned_loss=0.06813, over 19330.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2993, pruned_loss=0.07351, over 3821046.48 frames. ], batch size: 66, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:51:17,702 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.651e+02 6.610e+02 8.419e+02 1.957e+03, threshold=1.322e+03, percent-clipped=7.0 +2023-04-02 05:52:17,923 INFO [train.py:903] (0/4) Epoch 15, batch 5750, loss[loss=0.2157, simple_loss=0.2953, pruned_loss=0.06802, over 19426.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2994, pruned_loss=0.07326, over 3820282.89 frames. ], batch size: 70, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:52:20,199 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 05:52:28,370 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 05:52:33,016 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 05:53:21,260 INFO [train.py:903] (0/4) Epoch 15, batch 5800, loss[loss=0.2049, simple_loss=0.2776, pruned_loss=0.06609, over 19617.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2993, pruned_loss=0.07319, over 3819552.83 frames. ], batch size: 50, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:53:23,496 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.554e+02 5.132e+02 6.075e+02 7.663e+02 2.298e+03, threshold=1.215e+03, percent-clipped=3.0 +2023-04-02 05:54:09,484 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0401, 1.8604, 1.7593, 2.1725, 1.9615, 1.8264, 1.7881, 2.0523], + device='cuda:0'), covar=tensor([0.0930, 0.1497, 0.1371, 0.0980, 0.1234, 0.0510, 0.1227, 0.0662], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0351, 0.0295, 0.0244, 0.0297, 0.0243, 0.0291, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:54:24,297 INFO [train.py:903] (0/4) Epoch 15, batch 5850, loss[loss=0.2226, simple_loss=0.3007, pruned_loss=0.07225, over 19719.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2992, pruned_loss=0.07347, over 3819937.62 frames. ], batch size: 63, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:54:52,512 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101463.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:54:58,854 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:55:15,851 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 05:55:24,183 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101488.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:55:28,447 INFO [train.py:903] (0/4) Epoch 15, batch 5900, loss[loss=0.1896, simple_loss=0.2619, pruned_loss=0.05869, over 19767.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2974, pruned_loss=0.07254, over 3829454.46 frames. ], batch size: 48, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:55:30,756 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.163e+02 6.557e+02 7.983e+02 1.612e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-02 05:55:30,817 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 05:55:51,844 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 05:56:03,645 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0446, 3.6890, 2.4137, 3.3245, 0.8897, 3.5540, 3.4906, 3.5553], + device='cuda:0'), covar=tensor([0.0751, 0.1120, 0.2005, 0.0843, 0.3726, 0.0738, 0.0868, 0.1072], + device='cuda:0'), in_proj_covar=tensor([0.0462, 0.0382, 0.0458, 0.0327, 0.0390, 0.0393, 0.0385, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:56:30,791 INFO [train.py:903] (0/4) Epoch 15, batch 5950, loss[loss=0.2215, simple_loss=0.2918, pruned_loss=0.0756, over 19476.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.298, pruned_loss=0.07323, over 3813939.71 frames. ], batch size: 49, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:56:31,089 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6413, 1.5503, 1.9454, 2.0115, 4.1324, 1.3336, 2.4332, 4.3677], + device='cuda:0'), covar=tensor([0.0406, 0.2843, 0.2480, 0.1703, 0.0749, 0.2466, 0.1493, 0.0233], + device='cuda:0'), in_proj_covar=tensor([0.0380, 0.0350, 0.0368, 0.0333, 0.0360, 0.0339, 0.0352, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:57:17,244 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9423, 1.1052, 1.4824, 0.5342, 2.0654, 2.4497, 2.1402, 2.6036], + device='cuda:0'), covar=tensor([0.1590, 0.3942, 0.3369, 0.2606, 0.0601, 0.0263, 0.0367, 0.0334], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0305, 0.0334, 0.0252, 0.0225, 0.0169, 0.0208, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:57:24,037 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:57:34,464 INFO [train.py:903] (0/4) Epoch 15, batch 6000, loss[loss=0.2403, simple_loss=0.319, pruned_loss=0.08083, over 19624.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2968, pruned_loss=0.0721, over 3828354.83 frames. ], batch size: 57, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:34,465 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 05:57:47,190 INFO [train.py:937] (0/4) Epoch 15, validation: loss=0.1729, simple_loss=0.2735, pruned_loss=0.0362, over 944034.00 frames. +2023-04-02 05:57:47,192 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 05:57:49,633 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.208e+02 6.128e+02 8.316e+02 1.573e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 05:57:54,415 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0854, 1.8344, 1.7109, 2.0836, 1.8252, 1.8449, 1.6635, 2.0652], + device='cuda:0'), covar=tensor([0.0898, 0.1346, 0.1367, 0.0963, 0.1245, 0.0482, 0.1302, 0.0655], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0348, 0.0293, 0.0243, 0.0294, 0.0242, 0.0287, 0.0244], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 05:58:30,053 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4564, 1.6241, 2.0459, 1.8235, 3.1469, 2.7391, 3.5909, 1.6353], + device='cuda:0'), covar=tensor([0.2215, 0.3776, 0.2416, 0.1630, 0.1483, 0.1804, 0.1440, 0.3669], + device='cuda:0'), in_proj_covar=tensor([0.0505, 0.0604, 0.0650, 0.0454, 0.0605, 0.0509, 0.0648, 0.0513], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 05:58:49,961 INFO [train.py:903] (0/4) Epoch 15, batch 6050, loss[loss=0.2152, simple_loss=0.2986, pruned_loss=0.06585, over 19539.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.298, pruned_loss=0.07289, over 3812663.73 frames. ], batch size: 56, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:52,013 INFO [train.py:903] (0/4) Epoch 15, batch 6100, loss[loss=0.2186, simple_loss=0.3073, pruned_loss=0.06496, over 19681.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2989, pruned_loss=0.07316, over 3815329.79 frames. ], batch size: 58, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:55,081 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.053e+02 5.047e+02 6.326e+02 7.867e+02 1.574e+03, threshold=1.265e+03, percent-clipped=9.0 +2023-04-02 06:00:05,572 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:00:56,523 INFO [train.py:903] (0/4) Epoch 15, batch 6150, loss[loss=0.2318, simple_loss=0.3138, pruned_loss=0.07488, over 19698.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2993, pruned_loss=0.07347, over 3814913.68 frames. ], batch size: 59, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 06:01:23,863 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 06:01:59,359 INFO [train.py:903] (0/4) Epoch 15, batch 6200, loss[loss=0.2335, simple_loss=0.3259, pruned_loss=0.07058, over 19665.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2989, pruned_loss=0.07306, over 3819162.22 frames. ], batch size: 60, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:02:01,550 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.959e+02 6.303e+02 7.991e+02 1.526e+03, threshold=1.261e+03, percent-clipped=1.0 +2023-04-02 06:02:07,378 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8877, 4.9789, 5.7054, 5.7013, 1.9908, 5.3766, 4.5598, 5.3455], + device='cuda:0'), covar=tensor([0.1441, 0.0883, 0.0525, 0.0487, 0.5475, 0.0656, 0.0566, 0.1025], + device='cuda:0'), in_proj_covar=tensor([0.0737, 0.0672, 0.0880, 0.0754, 0.0785, 0.0625, 0.0528, 0.0813], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 06:02:59,272 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:02,066 INFO [train.py:903] (0/4) Epoch 15, batch 6250, loss[loss=0.2143, simple_loss=0.2851, pruned_loss=0.07179, over 19386.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2984, pruned_loss=0.07214, over 3828921.37 frames. ], batch size: 47, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:03:19,659 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 06:03:30,484 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 06:03:30,844 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:44,783 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3432, 2.3589, 2.1321, 2.6854, 2.5700, 2.1505, 2.1113, 2.6689], + device='cuda:0'), covar=tensor([0.0932, 0.1556, 0.1322, 0.1011, 0.1208, 0.0510, 0.1194, 0.0595], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0350, 0.0296, 0.0244, 0.0297, 0.0244, 0.0290, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:03:53,398 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.44 vs. limit=5.0 +2023-04-02 06:04:04,515 INFO [train.py:903] (0/4) Epoch 15, batch 6300, loss[loss=0.2013, simple_loss=0.2765, pruned_loss=0.06308, over 19732.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.298, pruned_loss=0.07213, over 3817193.32 frames. ], batch size: 51, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:04:07,998 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.483e+02 5.666e+02 6.616e+02 7.934e+02 1.912e+03, threshold=1.323e+03, percent-clipped=2.0 +2023-04-02 06:05:01,681 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5574, 2.1865, 2.2470, 2.6625, 2.4731, 2.3424, 2.2263, 2.8977], + device='cuda:0'), covar=tensor([0.0882, 0.1687, 0.1285, 0.0999, 0.1317, 0.0488, 0.1157, 0.0518], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0351, 0.0296, 0.0245, 0.0298, 0.0245, 0.0291, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:05:03,699 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101938.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:05:08,249 INFO [train.py:903] (0/4) Epoch 15, batch 6350, loss[loss=0.1876, simple_loss=0.2533, pruned_loss=0.06097, over 19753.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2978, pruned_loss=0.07208, over 3827005.83 frames. ], batch size: 46, lr: 5.45e-03, grad_scale: 2.0 +2023-04-02 06:06:11,825 INFO [train.py:903] (0/4) Epoch 15, batch 6400, loss[loss=0.2121, simple_loss=0.2943, pruned_loss=0.06499, over 19741.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2972, pruned_loss=0.07155, over 3834872.64 frames. ], batch size: 51, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:06:16,593 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 4.874e+02 5.936e+02 7.490e+02 2.019e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 06:06:22,637 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-102000.pt +2023-04-02 06:06:30,739 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:07:10,213 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3563, 3.9114, 2.4766, 3.4231, 0.9466, 3.7638, 3.6656, 3.8010], + device='cuda:0'), covar=tensor([0.0677, 0.1106, 0.2157, 0.0955, 0.4059, 0.0875, 0.1058, 0.1298], + device='cuda:0'), in_proj_covar=tensor([0.0467, 0.0383, 0.0459, 0.0330, 0.0391, 0.0395, 0.0387, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:07:17,097 INFO [train.py:903] (0/4) Epoch 15, batch 6450, loss[loss=0.1953, simple_loss=0.2808, pruned_loss=0.0549, over 19762.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2974, pruned_loss=0.07151, over 3836380.34 frames. ], batch size: 54, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:07:23,142 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102046.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:07:28,324 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5183, 3.1639, 2.6030, 2.4947, 2.6218, 2.7818, 0.9446, 2.3091], + device='cuda:0'), covar=tensor([0.0549, 0.0466, 0.0536, 0.0888, 0.0767, 0.0905, 0.1216, 0.0885], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0340, 0.0338, 0.0370, 0.0441, 0.0370, 0.0322, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:08:03,626 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 06:08:20,708 INFO [train.py:903] (0/4) Epoch 15, batch 6500, loss[loss=0.1771, simple_loss=0.2574, pruned_loss=0.04835, over 19733.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2971, pruned_loss=0.07121, over 3837073.43 frames. ], batch size: 46, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:08:25,528 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.860e+02 6.100e+02 7.866e+02 2.286e+03, threshold=1.220e+03, percent-clipped=9.0 +2023-04-02 06:08:26,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 06:09:23,526 INFO [train.py:903] (0/4) Epoch 15, batch 6550, loss[loss=0.1925, simple_loss=0.2761, pruned_loss=0.05445, over 19736.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2965, pruned_loss=0.07096, over 3820223.51 frames. ], batch size: 51, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:09:47,181 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:10:26,473 INFO [train.py:903] (0/4) Epoch 15, batch 6600, loss[loss=0.2742, simple_loss=0.3355, pruned_loss=0.1064, over 18084.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2976, pruned_loss=0.07156, over 3829832.53 frames. ], batch size: 83, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:10:31,178 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 4.937e+02 6.611e+02 8.175e+02 1.787e+03, threshold=1.322e+03, percent-clipped=6.0 +2023-04-02 06:11:01,868 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1435, 1.3557, 1.8506, 1.6078, 3.0211, 4.4130, 4.2703, 4.8370], + device='cuda:0'), covar=tensor([0.1714, 0.3732, 0.3236, 0.2110, 0.0568, 0.0195, 0.0175, 0.0139], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0308, 0.0336, 0.0256, 0.0227, 0.0171, 0.0210, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:11:29,859 INFO [train.py:903] (0/4) Epoch 15, batch 6650, loss[loss=0.1947, simple_loss=0.2678, pruned_loss=0.06084, over 19753.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2974, pruned_loss=0.07151, over 3828536.13 frames. ], batch size: 46, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:11:36,978 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2766, 1.4115, 1.8629, 1.5227, 2.7211, 2.2274, 2.7822, 1.2048], + device='cuda:0'), covar=tensor([0.2437, 0.3957, 0.2306, 0.1873, 0.1375, 0.1970, 0.1476, 0.3949], + device='cuda:0'), in_proj_covar=tensor([0.0505, 0.0602, 0.0654, 0.0455, 0.0606, 0.0512, 0.0651, 0.0514], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:12:20,476 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102282.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:12:33,728 INFO [train.py:903] (0/4) Epoch 15, batch 6700, loss[loss=0.184, simple_loss=0.2569, pruned_loss=0.05557, over 19726.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2967, pruned_loss=0.0713, over 3828985.05 frames. ], batch size: 45, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:12:38,442 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 5.015e+02 6.423e+02 7.841e+02 1.581e+03, threshold=1.285e+03, percent-clipped=1.0 +2023-04-02 06:13:32,251 INFO [train.py:903] (0/4) Epoch 15, batch 6750, loss[loss=0.2206, simple_loss=0.296, pruned_loss=0.07258, over 19692.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2961, pruned_loss=0.07129, over 3827126.61 frames. ], batch size: 53, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:13:40,325 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:14:30,400 INFO [train.py:903] (0/4) Epoch 15, batch 6800, loss[loss=0.1801, simple_loss=0.2567, pruned_loss=0.05181, over 19771.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2949, pruned_loss=0.07054, over 3838821.27 frames. ], batch size: 47, lr: 5.44e-03, grad_scale: 8.0 +2023-04-02 06:14:35,327 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.506e+02 4.944e+02 6.022e+02 7.665e+02 3.022e+03, threshold=1.204e+03, percent-clipped=5.0 +2023-04-02 06:14:36,978 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102397.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:14:57,420 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:15:00,313 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-15.pt +2023-04-02 06:15:15,669 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 06:15:16,141 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 06:15:19,105 INFO [train.py:903] (0/4) Epoch 16, batch 0, loss[loss=0.2627, simple_loss=0.3192, pruned_loss=0.1031, over 19483.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3192, pruned_loss=0.1031, over 19483.00 frames. ], batch size: 64, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:15:19,106 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 06:15:29,716 INFO [train.py:937] (0/4) Epoch 16, validation: loss=0.1737, simple_loss=0.2745, pruned_loss=0.03646, over 944034.00 frames. +2023-04-02 06:15:29,717 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 06:15:45,601 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 06:15:58,413 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:01,078 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.57 vs. limit=5.0 +2023-04-02 06:16:24,722 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:33,055 INFO [train.py:903] (0/4) Epoch 16, batch 50, loss[loss=0.2097, simple_loss=0.2975, pruned_loss=0.06088, over 19615.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2941, pruned_loss=0.06989, over 854416.40 frames. ], batch size: 57, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:04,309 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.709e+02 4.865e+02 6.426e+02 8.395e+02 1.744e+03, threshold=1.285e+03, percent-clipped=5.0 +2023-04-02 06:17:08,787 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 06:17:33,677 INFO [train.py:903] (0/4) Epoch 16, batch 100, loss[loss=0.1814, simple_loss=0.2544, pruned_loss=0.05422, over 19730.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2976, pruned_loss=0.0726, over 1514132.03 frames. ], batch size: 45, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:47,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 06:18:34,758 INFO [train.py:903] (0/4) Epoch 16, batch 150, loss[loss=0.1903, simple_loss=0.2667, pruned_loss=0.05698, over 19322.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2971, pruned_loss=0.07234, over 2026322.43 frames. ], batch size: 44, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:18:46,806 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8539, 1.9092, 2.1862, 2.4815, 1.7670, 2.3681, 2.2932, 2.0291], + device='cuda:0'), covar=tensor([0.3884, 0.3487, 0.1699, 0.1831, 0.3558, 0.1708, 0.4286, 0.3007], + device='cuda:0'), in_proj_covar=tensor([0.0835, 0.0884, 0.0679, 0.0905, 0.0824, 0.0758, 0.0813, 0.0744], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 06:18:54,616 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4574, 1.6666, 2.1340, 1.7755, 3.0496, 2.4679, 3.2842, 1.5714], + device='cuda:0'), covar=tensor([0.2658, 0.4317, 0.2758, 0.2061, 0.1859, 0.2348, 0.1873, 0.4242], + device='cuda:0'), in_proj_covar=tensor([0.0507, 0.0602, 0.0655, 0.0457, 0.0606, 0.0514, 0.0654, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 06:19:07,432 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.578e+02 6.638e+02 8.298e+02 1.665e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 06:19:36,764 INFO [train.py:903] (0/4) Epoch 16, batch 200, loss[loss=0.1941, simple_loss=0.2697, pruned_loss=0.05928, over 19768.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2959, pruned_loss=0.07173, over 2409174.45 frames. ], batch size: 47, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:38,833 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 06:20:18,470 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102653.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:20:37,454 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5262, 1.3004, 1.3895, 2.0099, 1.6505, 1.8503, 1.9831, 1.6855], + device='cuda:0'), covar=tensor([0.0903, 0.1131, 0.1076, 0.0872, 0.0885, 0.0819, 0.0829, 0.0743], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0222, 0.0223, 0.0243, 0.0226, 0.0210, 0.0190, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 06:20:38,296 INFO [train.py:903] (0/4) Epoch 16, batch 250, loss[loss=0.2358, simple_loss=0.3096, pruned_loss=0.08099, over 19746.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2969, pruned_loss=0.07217, over 2727076.29 frames. ], batch size: 63, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:20:50,334 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102678.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:21:12,172 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 5.293e+02 5.976e+02 7.347e+02 1.638e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 06:21:43,451 INFO [train.py:903] (0/4) Epoch 16, batch 300, loss[loss=0.1965, simple_loss=0.2701, pruned_loss=0.06146, over 19315.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2949, pruned_loss=0.07077, over 2976671.27 frames. ], batch size: 44, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:21:43,868 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102720.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:06,896 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9435, 1.3265, 1.0446, 0.9298, 1.1699, 0.9339, 0.9061, 1.2331], + device='cuda:0'), covar=tensor([0.0539, 0.0723, 0.1104, 0.0688, 0.0542, 0.1234, 0.0580, 0.0452], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0310, 0.0330, 0.0255, 0.0243, 0.0329, 0.0295, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:22:13,583 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:44,073 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0167, 1.2670, 1.6420, 1.1987, 2.6295, 3.4364, 3.1720, 3.6013], + device='cuda:0'), covar=tensor([0.1828, 0.3772, 0.3398, 0.2349, 0.0567, 0.0184, 0.0230, 0.0230], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0305, 0.0334, 0.0255, 0.0228, 0.0170, 0.0210, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:22:44,946 INFO [train.py:903] (0/4) Epoch 16, batch 350, loss[loss=0.2373, simple_loss=0.3148, pruned_loss=0.07988, over 18744.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2951, pruned_loss=0.07075, over 3163043.82 frames. ], batch size: 74, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:22:50,853 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 06:23:16,122 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.254e+02 6.186e+02 7.503e+02 2.205e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 06:23:36,188 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2318, 1.3073, 1.2400, 1.0303, 1.0848, 1.1205, 0.0322, 0.3657], + device='cuda:0'), covar=tensor([0.0548, 0.0524, 0.0325, 0.0459, 0.1076, 0.0469, 0.1045, 0.0924], + device='cuda:0'), in_proj_covar=tensor([0.0350, 0.0342, 0.0338, 0.0371, 0.0442, 0.0371, 0.0323, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:23:47,561 INFO [train.py:903] (0/4) Epoch 16, batch 400, loss[loss=0.208, simple_loss=0.2901, pruned_loss=0.06298, over 19684.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.296, pruned_loss=0.07072, over 3321927.23 frames. ], batch size: 53, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:24:49,390 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102869.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:24:50,277 INFO [train.py:903] (0/4) Epoch 16, batch 450, loss[loss=0.2295, simple_loss=0.3095, pruned_loss=0.07476, over 19587.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2956, pruned_loss=0.07064, over 3446895.95 frames. ], batch size: 61, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:25:22,280 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.837e+02 5.955e+02 8.003e+02 1.401e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 06:25:24,661 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 06:25:25,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 06:25:33,021 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:25:51,996 INFO [train.py:903] (0/4) Epoch 16, batch 500, loss[loss=0.2288, simple_loss=0.3134, pruned_loss=0.07211, over 19390.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2966, pruned_loss=0.07081, over 3512612.31 frames. ], batch size: 70, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:26:37,556 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9963, 2.0821, 2.2943, 2.7454, 1.9430, 2.6897, 2.4553, 2.1050], + device='cuda:0'), covar=tensor([0.4002, 0.3478, 0.1717, 0.2022, 0.3666, 0.1737, 0.4101, 0.3045], + device='cuda:0'), in_proj_covar=tensor([0.0837, 0.0886, 0.0679, 0.0906, 0.0824, 0.0758, 0.0814, 0.0743], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 06:26:54,172 INFO [train.py:903] (0/4) Epoch 16, batch 550, loss[loss=0.183, simple_loss=0.2564, pruned_loss=0.05479, over 19750.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2959, pruned_loss=0.0708, over 3590808.32 frames. ], batch size: 45, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:27:00,546 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.39 vs. limit=5.0 +2023-04-02 06:27:18,352 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2514, 1.5170, 1.9746, 1.7324, 3.0222, 4.5705, 4.4415, 4.9281], + device='cuda:0'), covar=tensor([0.1666, 0.3486, 0.3104, 0.2040, 0.0554, 0.0182, 0.0154, 0.0154], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0306, 0.0335, 0.0256, 0.0228, 0.0171, 0.0211, 0.0227], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:27:24,957 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.789e+02 5.227e+02 6.443e+02 7.702e+02 1.436e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 06:27:54,425 INFO [train.py:903] (0/4) Epoch 16, batch 600, loss[loss=0.2246, simple_loss=0.306, pruned_loss=0.07163, over 19698.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2956, pruned_loss=0.07065, over 3629282.73 frames. ], batch size: 59, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:28:37,057 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 06:28:55,608 INFO [train.py:903] (0/4) Epoch 16, batch 650, loss[loss=0.1943, simple_loss=0.2724, pruned_loss=0.05813, over 19476.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2959, pruned_loss=0.07056, over 3680375.30 frames. ], batch size: 49, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:29:28,781 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.715e+02 6.008e+02 7.942e+02 1.451e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-04-02 06:29:53,874 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103116.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:29:58,298 INFO [train.py:903] (0/4) Epoch 16, batch 700, loss[loss=0.2525, simple_loss=0.3235, pruned_loss=0.09071, over 19681.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2962, pruned_loss=0.07081, over 3719234.68 frames. ], batch size: 59, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:30:34,540 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.51 vs. limit=2.0 +2023-04-02 06:30:35,103 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103150.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:31:00,294 INFO [train.py:903] (0/4) Epoch 16, batch 750, loss[loss=0.2352, simple_loss=0.3025, pruned_loss=0.08392, over 19621.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2972, pruned_loss=0.07158, over 3752559.90 frames. ], batch size: 50, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:31:33,687 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.236e+02 6.262e+02 8.063e+02 1.865e+03, threshold=1.252e+03, percent-clipped=5.0 +2023-04-02 06:31:55,084 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:32:03,181 INFO [train.py:903] (0/4) Epoch 16, batch 800, loss[loss=0.2728, simple_loss=0.3355, pruned_loss=0.105, over 13200.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.298, pruned_loss=0.07195, over 3750638.39 frames. ], batch size: 135, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:32:18,116 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 06:32:26,451 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9642, 1.3493, 1.1203, 0.9637, 1.2669, 0.9444, 0.9859, 1.2694], + device='cuda:0'), covar=tensor([0.0661, 0.0740, 0.0785, 0.0667, 0.0429, 0.0962, 0.0525, 0.0367], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0309, 0.0330, 0.0254, 0.0242, 0.0330, 0.0292, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:32:38,486 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103249.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:32:52,092 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7378, 4.1968, 4.4071, 4.3897, 1.6954, 4.1592, 3.5707, 4.1000], + device='cuda:0'), covar=tensor([0.1532, 0.0844, 0.0656, 0.0644, 0.5669, 0.0704, 0.0653, 0.1157], + device='cuda:0'), in_proj_covar=tensor([0.0730, 0.0666, 0.0869, 0.0746, 0.0772, 0.0615, 0.0519, 0.0802], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 06:33:04,745 INFO [train.py:903] (0/4) Epoch 16, batch 850, loss[loss=0.2337, simple_loss=0.3114, pruned_loss=0.07799, over 19336.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2981, pruned_loss=0.07215, over 3758463.14 frames. ], batch size: 66, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:33:38,413 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.865e+02 6.263e+02 7.829e+02 1.710e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 06:33:40,906 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9786, 5.0776, 5.8122, 5.7484, 2.0578, 5.4479, 4.6906, 5.4398], + device='cuda:0'), covar=tensor([0.1507, 0.0765, 0.0534, 0.0607, 0.5514, 0.0584, 0.0535, 0.1131], + device='cuda:0'), in_proj_covar=tensor([0.0730, 0.0665, 0.0870, 0.0747, 0.0770, 0.0616, 0.0520, 0.0802], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 06:33:44,516 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5808, 1.7096, 2.0882, 1.8358, 3.2877, 3.0448, 3.5530, 1.5763], + device='cuda:0'), covar=tensor([0.2254, 0.3968, 0.2439, 0.1782, 0.1472, 0.1640, 0.1622, 0.3847], + device='cuda:0'), in_proj_covar=tensor([0.0505, 0.0600, 0.0654, 0.0455, 0.0609, 0.0512, 0.0650, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:33:57,929 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 06:34:06,717 INFO [train.py:903] (0/4) Epoch 16, batch 900, loss[loss=0.1916, simple_loss=0.2657, pruned_loss=0.05878, over 19747.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2979, pruned_loss=0.07182, over 3779991.32 frames. ], batch size: 45, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:34:17,372 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:35:01,509 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:35:06,308 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2661, 2.8560, 2.2226, 2.1036, 2.0927, 2.3844, 1.1343, 2.1006], + device='cuda:0'), covar=tensor([0.0544, 0.0457, 0.0609, 0.0890, 0.0876, 0.0844, 0.1073, 0.0838], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0338, 0.0336, 0.0364, 0.0437, 0.0367, 0.0320, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:35:08,184 INFO [train.py:903] (0/4) Epoch 16, batch 950, loss[loss=0.2586, simple_loss=0.3291, pruned_loss=0.09404, over 19377.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2968, pruned_loss=0.07097, over 3799222.58 frames. ], batch size: 70, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:35:13,576 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 06:35:19,098 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0220, 3.6481, 2.3892, 3.2895, 1.0643, 3.4706, 3.4102, 3.4848], + device='cuda:0'), covar=tensor([0.0761, 0.1053, 0.2027, 0.0831, 0.3649, 0.0863, 0.0932, 0.1218], + device='cuda:0'), in_proj_covar=tensor([0.0470, 0.0384, 0.0460, 0.0329, 0.0391, 0.0398, 0.0392, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:35:27,482 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103384.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:35:40,826 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.367e+02 6.234e+02 7.823e+02 2.113e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-02 06:36:12,131 INFO [train.py:903] (0/4) Epoch 16, batch 1000, loss[loss=0.2306, simple_loss=0.3223, pruned_loss=0.06945, over 17452.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2978, pruned_loss=0.0716, over 3791396.42 frames. ], batch size: 101, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:03,100 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103460.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:07,730 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 06:37:14,587 INFO [train.py:903] (0/4) Epoch 16, batch 1050, loss[loss=0.2153, simple_loss=0.2986, pruned_loss=0.06601, over 19688.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.298, pruned_loss=0.07207, over 3804571.27 frames. ], batch size: 59, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:43,688 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:45,894 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.497e+02 6.450e+02 8.583e+02 2.663e+03, threshold=1.290e+03, percent-clipped=6.0 +2023-04-02 06:37:49,185 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 06:38:15,957 INFO [train.py:903] (0/4) Epoch 16, batch 1100, loss[loss=0.228, simple_loss=0.3245, pruned_loss=0.06573, over 19611.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2973, pruned_loss=0.07193, over 3815561.39 frames. ], batch size: 57, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:38:21,830 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3419, 3.8450, 3.9337, 3.9200, 1.5921, 3.7430, 3.2440, 3.6506], + device='cuda:0'), covar=tensor([0.1490, 0.0807, 0.0618, 0.0673, 0.5269, 0.0784, 0.0688, 0.1122], + device='cuda:0'), in_proj_covar=tensor([0.0741, 0.0676, 0.0879, 0.0754, 0.0780, 0.0623, 0.0524, 0.0810], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 06:39:18,053 INFO [train.py:903] (0/4) Epoch 16, batch 1150, loss[loss=0.2315, simple_loss=0.308, pruned_loss=0.07745, over 19536.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2963, pruned_loss=0.07158, over 3813346.68 frames. ], batch size: 56, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:39:24,703 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103575.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:37,371 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:50,620 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.106e+02 6.190e+02 8.719e+02 1.567e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 06:39:58,866 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5094, 1.5271, 1.8512, 1.6999, 2.6482, 2.2581, 2.7202, 1.2871], + device='cuda:0'), covar=tensor([0.2096, 0.3746, 0.2274, 0.1716, 0.1341, 0.1903, 0.1337, 0.3806], + device='cuda:0'), in_proj_covar=tensor([0.0501, 0.0599, 0.0653, 0.0453, 0.0604, 0.0509, 0.0647, 0.0514], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:40:06,771 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:06,810 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:21,156 INFO [train.py:903] (0/4) Epoch 16, batch 1200, loss[loss=0.2542, simple_loss=0.3181, pruned_loss=0.09509, over 13139.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2952, pruned_loss=0.0707, over 3807898.78 frames. ], batch size: 136, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:40:21,605 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:52,908 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:54,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 06:41:24,586 INFO [train.py:903] (0/4) Epoch 16, batch 1250, loss[loss=0.2584, simple_loss=0.3217, pruned_loss=0.09759, over 13636.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2972, pruned_loss=0.07169, over 3799620.21 frames. ], batch size: 138, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:41:56,591 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.406e+02 4.955e+02 6.144e+02 7.729e+02 1.641e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 06:42:20,612 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9028, 1.1085, 1.5178, 0.6170, 2.0385, 2.3867, 2.1217, 2.5064], + device='cuda:0'), covar=tensor([0.1613, 0.3710, 0.3167, 0.2585, 0.0591, 0.0319, 0.0352, 0.0366], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0307, 0.0336, 0.0257, 0.0228, 0.0172, 0.0211, 0.0227], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 06:42:24,683 INFO [train.py:903] (0/4) Epoch 16, batch 1300, loss[loss=0.2746, simple_loss=0.3313, pruned_loss=0.109, over 19767.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2964, pruned_loss=0.07152, over 3798569.08 frames. ], batch size: 54, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:42:35,155 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103728.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:42:53,029 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6935, 1.8485, 2.0974, 2.4257, 1.6823, 2.2058, 2.2202, 1.9870], + device='cuda:0'), covar=tensor([0.3985, 0.3416, 0.1639, 0.1837, 0.3645, 0.1746, 0.4110, 0.2939], + device='cuda:0'), in_proj_covar=tensor([0.0838, 0.0885, 0.0682, 0.0910, 0.0826, 0.0763, 0.0811, 0.0744], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 06:43:12,106 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3538, 3.9556, 2.5316, 3.4366, 0.7948, 3.8781, 3.7213, 3.8180], + device='cuda:0'), covar=tensor([0.0678, 0.1025, 0.2015, 0.0909, 0.4047, 0.0667, 0.0867, 0.1024], + device='cuda:0'), in_proj_covar=tensor([0.0473, 0.0386, 0.0462, 0.0334, 0.0395, 0.0400, 0.0395, 0.0431], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:43:12,195 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:26,061 INFO [train.py:903] (0/4) Epoch 16, batch 1350, loss[loss=0.2549, simple_loss=0.3243, pruned_loss=0.09274, over 19723.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2963, pruned_loss=0.07197, over 3807097.81 frames. ], batch size: 63, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:43:43,812 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:47,409 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.13 vs. limit=5.0 +2023-04-02 06:43:59,438 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 5.214e+02 6.633e+02 9.392e+02 2.118e+03, threshold=1.327e+03, percent-clipped=8.0 +2023-04-02 06:44:30,117 INFO [train.py:903] (0/4) Epoch 16, batch 1400, loss[loss=0.2136, simple_loss=0.297, pruned_loss=0.06515, over 19296.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2963, pruned_loss=0.07145, over 3797372.48 frames. ], batch size: 66, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:44:43,931 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103831.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:44:57,958 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103843.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:45:14,423 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103856.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:20,224 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5257, 1.3935, 1.1770, 1.5237, 1.2922, 1.3210, 1.2292, 1.4107], + device='cuda:0'), covar=tensor([0.1019, 0.1009, 0.1436, 0.0779, 0.1021, 0.0612, 0.1273, 0.0774], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0354, 0.0298, 0.0246, 0.0301, 0.0247, 0.0293, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:45:26,689 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:32,154 INFO [train.py:903] (0/4) Epoch 16, batch 1450, loss[loss=0.1946, simple_loss=0.2721, pruned_loss=0.05852, over 19741.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2974, pruned_loss=0.07233, over 3804238.76 frames. ], batch size: 51, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:45:32,180 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 06:45:56,430 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:00,966 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103894.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:03,877 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.072e+02 4.634e+02 5.962e+02 7.073e+02 1.523e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-02 06:46:33,192 INFO [train.py:903] (0/4) Epoch 16, batch 1500, loss[loss=0.2362, simple_loss=0.2976, pruned_loss=0.08742, over 19073.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.296, pruned_loss=0.07154, over 3814316.09 frames. ], batch size: 42, lr: 5.23e-03, grad_scale: 16.0 +2023-04-02 06:47:06,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3164, 1.2558, 1.3024, 1.3012, 1.0782, 1.4028, 1.4662, 1.3258], + device='cuda:0'), covar=tensor([0.0867, 0.0959, 0.1026, 0.0705, 0.0788, 0.0803, 0.0759, 0.0744], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0222, 0.0224, 0.0244, 0.0226, 0.0210, 0.0192, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 06:47:35,228 INFO [train.py:903] (0/4) Epoch 16, batch 1550, loss[loss=0.229, simple_loss=0.3085, pruned_loss=0.07477, over 19350.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.296, pruned_loss=0.07164, over 3810132.07 frames. ], batch size: 70, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:47:58,591 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8176, 1.8382, 1.5034, 2.1182, 1.8391, 1.6176, 1.6701, 1.8991], + device='cuda:0'), covar=tensor([0.1089, 0.1480, 0.1507, 0.0908, 0.1232, 0.0605, 0.1333, 0.0725], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0355, 0.0299, 0.0247, 0.0302, 0.0247, 0.0295, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:48:09,262 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.500e+02 6.727e+02 8.636e+02 1.580e+03, threshold=1.345e+03, percent-clipped=8.0 +2023-04-02 06:48:12,943 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-104000.pt +2023-04-02 06:48:24,281 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:32,603 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:39,342 INFO [train.py:903] (0/4) Epoch 16, batch 1600, loss[loss=0.2279, simple_loss=0.3085, pruned_loss=0.07362, over 19326.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2955, pruned_loss=0.07097, over 3808033.59 frames. ], batch size: 66, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:48:51,274 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8072, 3.4530, 1.9222, 2.0365, 3.0139, 1.6614, 1.1966, 2.1474], + device='cuda:0'), covar=tensor([0.1504, 0.0553, 0.1054, 0.0836, 0.0524, 0.1244, 0.1122, 0.0714], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0310, 0.0330, 0.0255, 0.0244, 0.0333, 0.0296, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:49:05,515 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 06:49:27,357 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8594, 1.9799, 2.1721, 2.4829, 1.8405, 2.4088, 2.2411, 2.0183], + device='cuda:0'), covar=tensor([0.3766, 0.3131, 0.1710, 0.2037, 0.3383, 0.1727, 0.4091, 0.2875], + device='cuda:0'), in_proj_covar=tensor([0.0840, 0.0886, 0.0683, 0.0912, 0.0825, 0.0762, 0.0815, 0.0746], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 06:49:38,461 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:49:41,608 INFO [train.py:903] (0/4) Epoch 16, batch 1650, loss[loss=0.2134, simple_loss=0.2955, pruned_loss=0.06563, over 19566.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2963, pruned_loss=0.07137, over 3809538.58 frames. ], batch size: 52, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:14,808 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.025e+02 5.150e+02 6.179e+02 7.587e+02 1.568e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 06:50:18,554 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104099.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:50:21,710 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:50:37,162 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9330, 1.5121, 1.7531, 1.6602, 4.4573, 1.1747, 2.4610, 4.7510], + device='cuda:0'), covar=tensor([0.0414, 0.2856, 0.2709, 0.1942, 0.0752, 0.2595, 0.1461, 0.0182], + device='cuda:0'), in_proj_covar=tensor([0.0382, 0.0351, 0.0370, 0.0335, 0.0358, 0.0341, 0.0352, 0.0376], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:50:43,583 INFO [train.py:903] (0/4) Epoch 16, batch 1700, loss[loss=0.2528, simple_loss=0.3302, pruned_loss=0.08773, over 17237.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2967, pruned_loss=0.07136, over 3821117.50 frames. ], batch size: 101, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:48,592 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104124.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:50:51,804 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104127.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:51:25,772 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 06:51:29,472 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1126, 2.0118, 1.8218, 2.2359, 2.0577, 1.9046, 1.7118, 2.1428], + device='cuda:0'), covar=tensor([0.0940, 0.1470, 0.1244, 0.0978, 0.1174, 0.0477, 0.1294, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0356, 0.0300, 0.0249, 0.0303, 0.0249, 0.0295, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:51:44,972 INFO [train.py:903] (0/4) Epoch 16, batch 1750, loss[loss=0.2305, simple_loss=0.3027, pruned_loss=0.07915, over 19574.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2975, pruned_loss=0.07209, over 3821281.85 frames. ], batch size: 52, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:52:19,029 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.884e+02 5.867e+02 6.930e+02 2.034e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 06:52:44,500 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:52:48,726 INFO [train.py:903] (0/4) Epoch 16, batch 1800, loss[loss=0.2601, simple_loss=0.3298, pruned_loss=0.0952, over 19374.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2977, pruned_loss=0.07177, over 3828001.18 frames. ], batch size: 70, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:53:09,912 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:11,578 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 06:53:14,810 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:46,185 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 06:53:49,825 INFO [train.py:903] (0/4) Epoch 16, batch 1850, loss[loss=0.2254, simple_loss=0.314, pruned_loss=0.06841, over 19525.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2973, pruned_loss=0.07163, over 3832215.07 frames. ], batch size: 54, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:54:21,785 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 06:54:22,923 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.248e+02 6.749e+02 7.716e+02 1.558e+03, threshold=1.350e+03, percent-clipped=5.0 +2023-04-02 06:54:26,609 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 06:54:46,589 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5849, 1.3540, 1.5448, 1.5560, 3.1454, 1.1327, 2.2693, 3.5441], + device='cuda:0'), covar=tensor([0.0444, 0.2629, 0.2726, 0.1735, 0.0711, 0.2406, 0.1249, 0.0245], + device='cuda:0'), in_proj_covar=tensor([0.0381, 0.0349, 0.0367, 0.0334, 0.0357, 0.0339, 0.0351, 0.0374], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 06:54:51,925 INFO [train.py:903] (0/4) Epoch 16, batch 1900, loss[loss=0.266, simple_loss=0.3369, pruned_loss=0.09756, over 19743.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2977, pruned_loss=0.0722, over 3799303.68 frames. ], batch size: 63, lr: 5.22e-03, grad_scale: 4.0 +2023-04-02 06:55:09,077 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 06:55:15,645 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 06:55:31,877 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:34,121 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:39,870 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 06:55:41,199 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:53,979 INFO [train.py:903] (0/4) Epoch 16, batch 1950, loss[loss=0.2119, simple_loss=0.2975, pruned_loss=0.06311, over 19510.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2986, pruned_loss=0.07277, over 3814057.83 frames. ], batch size: 64, lr: 5.21e-03, grad_scale: 4.0 +2023-04-02 06:56:30,730 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.688e+02 6.377e+02 8.120e+02 1.703e+03, threshold=1.275e+03, percent-clipped=4.0 +2023-04-02 06:56:46,822 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104411.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:56:58,261 INFO [train.py:903] (0/4) Epoch 16, batch 2000, loss[loss=0.1959, simple_loss=0.2772, pruned_loss=0.05724, over 19850.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2975, pruned_loss=0.07195, over 3812564.01 frames. ], batch size: 52, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:57:57,340 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 06:57:57,610 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:00,766 INFO [train.py:903] (0/4) Epoch 16, batch 2050, loss[loss=0.169, simple_loss=0.2396, pruned_loss=0.04918, over 19309.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2967, pruned_loss=0.07149, over 3814309.35 frames. ], batch size: 44, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:58:04,661 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:05,701 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:09,152 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 06:58:14,089 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 06:58:15,353 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 06:58:35,725 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.885e+02 6.080e+02 8.555e+02 1.693e+03, threshold=1.216e+03, percent-clipped=6.0 +2023-04-02 06:58:36,103 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:36,135 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:39,795 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 06:58:40,126 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 06:59:02,733 INFO [train.py:903] (0/4) Epoch 16, batch 2100, loss[loss=0.2278, simple_loss=0.3061, pruned_loss=0.07481, over 19689.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2968, pruned_loss=0.07168, over 3830462.05 frames. ], batch size: 59, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:59:06,389 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:09,923 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:33,807 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 06:59:55,524 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 06:59:56,919 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8639, 4.9221, 5.6510, 5.6293, 2.1763, 5.2768, 4.5043, 5.2558], + device='cuda:0'), covar=tensor([0.1401, 0.0906, 0.0531, 0.0565, 0.5164, 0.0653, 0.0550, 0.1105], + device='cuda:0'), in_proj_covar=tensor([0.0726, 0.0667, 0.0868, 0.0748, 0.0771, 0.0618, 0.0515, 0.0799], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 07:00:04,689 INFO [train.py:903] (0/4) Epoch 16, batch 2150, loss[loss=0.2962, simple_loss=0.3549, pruned_loss=0.1187, over 19668.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2963, pruned_loss=0.07161, over 3831950.53 frames. ], batch size: 58, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:00:39,764 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.547e+02 6.907e+02 8.298e+02 2.194e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-02 07:00:49,284 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0356, 1.7450, 1.8716, 1.7209, 4.5030, 1.0514, 2.4452, 4.8729], + device='cuda:0'), covar=tensor([0.0371, 0.2602, 0.2618, 0.1862, 0.0723, 0.2609, 0.1408, 0.0177], + device='cuda:0'), in_proj_covar=tensor([0.0384, 0.0351, 0.0369, 0.0335, 0.0360, 0.0340, 0.0354, 0.0376], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:00:53,773 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:01:08,291 INFO [train.py:903] (0/4) Epoch 16, batch 2200, loss[loss=0.1872, simple_loss=0.2583, pruned_loss=0.05804, over 19773.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.296, pruned_loss=0.07136, over 3839945.23 frames. ], batch size: 48, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:01:26,328 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:01:41,695 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6286, 2.3884, 1.7809, 1.4614, 2.1690, 1.2320, 1.3512, 1.9268], + device='cuda:0'), covar=tensor([0.0990, 0.0726, 0.1053, 0.0835, 0.0561, 0.1335, 0.0786, 0.0467], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0309, 0.0328, 0.0255, 0.0244, 0.0332, 0.0291, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:02:12,257 INFO [train.py:903] (0/4) Epoch 16, batch 2250, loss[loss=0.2229, simple_loss=0.2864, pruned_loss=0.07974, over 19768.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2959, pruned_loss=0.07121, over 3828892.43 frames. ], batch size: 48, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:02:46,756 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.921e+02 5.898e+02 6.952e+02 1.646e+03, threshold=1.180e+03, percent-clipped=1.0 +2023-04-02 07:03:15,187 INFO [train.py:903] (0/4) Epoch 16, batch 2300, loss[loss=0.1828, simple_loss=0.2615, pruned_loss=0.0521, over 19050.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2952, pruned_loss=0.07047, over 3826525.43 frames. ], batch size: 42, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:03:19,214 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:27,409 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:29,350 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 07:03:51,587 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104748.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:59,908 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:17,946 INFO [train.py:903] (0/4) Epoch 16, batch 2350, loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.06392, over 19444.00 frames. ], tot_loss[loss=0.217, simple_loss=0.294, pruned_loss=0.06996, over 3822352.31 frames. ], batch size: 64, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:04:34,887 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:53,849 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.048e+02 6.738e+02 8.844e+02 1.580e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 07:05:00,895 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 07:05:05,754 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:05:18,375 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 07:05:22,501 INFO [train.py:903] (0/4) Epoch 16, batch 2400, loss[loss=0.2244, simple_loss=0.301, pruned_loss=0.0739, over 19661.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.07005, over 3831701.61 frames. ], batch size: 53, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:06:20,843 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 07:06:24,571 INFO [train.py:903] (0/4) Epoch 16, batch 2450, loss[loss=0.2413, simple_loss=0.3203, pruned_loss=0.08115, over 19677.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2957, pruned_loss=0.07086, over 3817765.89 frames. ], batch size: 60, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:07:00,038 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.877e+02 7.599e+02 9.302e+02 2.010e+03, threshold=1.520e+03, percent-clipped=8.0 +2023-04-02 07:07:13,328 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5768, 1.1972, 1.4015, 1.3504, 2.2110, 0.9950, 2.0277, 2.4261], + device='cuda:0'), covar=tensor([0.0684, 0.2729, 0.2693, 0.1524, 0.0908, 0.2004, 0.0992, 0.0461], + device='cuda:0'), in_proj_covar=tensor([0.0385, 0.0354, 0.0371, 0.0336, 0.0361, 0.0342, 0.0358, 0.0379], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:07:27,189 INFO [train.py:903] (0/4) Epoch 16, batch 2500, loss[loss=0.2226, simple_loss=0.3044, pruned_loss=0.0704, over 19765.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2957, pruned_loss=0.07121, over 3813602.01 frames. ], batch size: 63, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:07:30,681 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9496, 3.4818, 1.8224, 2.1693, 3.0881, 1.6887, 1.3592, 2.2042], + device='cuda:0'), covar=tensor([0.1447, 0.0593, 0.1080, 0.0751, 0.0488, 0.1158, 0.1078, 0.0641], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0314, 0.0330, 0.0257, 0.0244, 0.0333, 0.0295, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:07:39,237 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 07:07:48,630 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 07:08:26,459 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8374, 4.4381, 2.7121, 3.8653, 0.9972, 4.1897, 4.2100, 4.2580], + device='cuda:0'), covar=tensor([0.0590, 0.0907, 0.1836, 0.0770, 0.3923, 0.0656, 0.0779, 0.1092], + device='cuda:0'), in_proj_covar=tensor([0.0469, 0.0384, 0.0459, 0.0329, 0.0390, 0.0396, 0.0391, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:08:29,683 INFO [train.py:903] (0/4) Epoch 16, batch 2550, loss[loss=0.1822, simple_loss=0.2592, pruned_loss=0.05265, over 19403.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2967, pruned_loss=0.07147, over 3821907.04 frames. ], batch size: 48, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:09:05,174 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 5.239e+02 6.384e+02 8.143e+02 1.987e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 07:09:24,257 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-02 07:09:24,637 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 07:09:32,157 INFO [train.py:903] (0/4) Epoch 16, batch 2600, loss[loss=0.258, simple_loss=0.3369, pruned_loss=0.08956, over 19449.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2967, pruned_loss=0.0715, over 3820403.19 frames. ], batch size: 70, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:03,892 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3690, 2.1927, 2.1435, 2.6145, 2.4244, 2.1019, 2.0387, 2.4032], + device='cuda:0'), covar=tensor([0.0890, 0.1494, 0.1209, 0.0833, 0.1168, 0.0510, 0.1186, 0.0611], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0352, 0.0298, 0.0247, 0.0300, 0.0249, 0.0294, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:10:21,330 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4508, 2.3501, 1.7136, 1.5516, 2.1898, 1.3201, 1.2981, 1.8603], + device='cuda:0'), covar=tensor([0.1146, 0.0689, 0.0963, 0.0826, 0.0466, 0.1169, 0.0777, 0.0515], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0313, 0.0329, 0.0258, 0.0243, 0.0333, 0.0294, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:10:35,304 INFO [train.py:903] (0/4) Epoch 16, batch 2650, loss[loss=0.2381, simple_loss=0.3187, pruned_loss=0.07874, over 18754.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.296, pruned_loss=0.07081, over 3813833.91 frames. ], batch size: 74, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:54,808 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 07:11:09,896 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 5.317e+02 6.316e+02 8.001e+02 1.365e+03, threshold=1.263e+03, percent-clipped=2.0 +2023-04-02 07:11:36,912 INFO [train.py:903] (0/4) Epoch 16, batch 2700, loss[loss=0.2171, simple_loss=0.303, pruned_loss=0.06566, over 19363.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2974, pruned_loss=0.07102, over 3824997.50 frames. ], batch size: 66, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:12:39,773 INFO [train.py:903] (0/4) Epoch 16, batch 2750, loss[loss=0.2289, simple_loss=0.3063, pruned_loss=0.07578, over 19674.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2974, pruned_loss=0.07125, over 3815802.17 frames. ], batch size: 60, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:13:15,057 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 5.170e+02 6.445e+02 8.543e+02 2.677e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 07:13:41,785 INFO [train.py:903] (0/4) Epoch 16, batch 2800, loss[loss=0.2438, simple_loss=0.3199, pruned_loss=0.08385, over 19684.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2978, pruned_loss=0.07184, over 3818854.25 frames. ], batch size: 60, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:14:44,325 INFO [train.py:903] (0/4) Epoch 16, batch 2850, loss[loss=0.2318, simple_loss=0.3125, pruned_loss=0.07552, over 18900.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2985, pruned_loss=0.07222, over 3827769.11 frames. ], batch size: 74, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:18,937 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.908e+02 5.066e+02 6.361e+02 8.222e+02 2.548e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 07:15:46,243 INFO [train.py:903] (0/4) Epoch 16, batch 2900, loss[loss=0.2342, simple_loss=0.3033, pruned_loss=0.08252, over 19855.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2989, pruned_loss=0.07262, over 3832273.08 frames. ], batch size: 52, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:46,285 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 07:16:48,798 INFO [train.py:903] (0/4) Epoch 16, batch 2950, loss[loss=0.2113, simple_loss=0.2846, pruned_loss=0.06902, over 19329.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2983, pruned_loss=0.07201, over 3825114.18 frames. ], batch size: 44, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:23,864 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.947e+02 6.195e+02 7.724e+02 2.015e+03, threshold=1.239e+03, percent-clipped=3.0 +2023-04-02 07:17:24,283 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8832, 1.6090, 1.5041, 1.9252, 1.6872, 1.6832, 1.5079, 1.8122], + device='cuda:0'), covar=tensor([0.1025, 0.1479, 0.1489, 0.0958, 0.1244, 0.0533, 0.1384, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0353, 0.0299, 0.0247, 0.0299, 0.0250, 0.0295, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:17:33,595 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:17:50,878 INFO [train.py:903] (0/4) Epoch 16, batch 3000, loss[loss=0.2073, simple_loss=0.2922, pruned_loss=0.06123, over 19803.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2983, pruned_loss=0.07164, over 3843935.61 frames. ], batch size: 56, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:50,878 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 07:18:04,146 INFO [train.py:937] (0/4) Epoch 16, validation: loss=0.1725, simple_loss=0.273, pruned_loss=0.03604, over 944034.00 frames. +2023-04-02 07:18:04,147 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 07:18:07,769 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 07:19:07,110 INFO [train.py:903] (0/4) Epoch 16, batch 3050, loss[loss=0.1806, simple_loss=0.26, pruned_loss=0.0506, over 19793.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2967, pruned_loss=0.07059, over 3852427.51 frames. ], batch size: 48, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:19:41,628 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.577e+02 4.913e+02 6.190e+02 8.953e+02 2.496e+03, threshold=1.238e+03, percent-clipped=7.0 +2023-04-02 07:19:49,924 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:11,581 INFO [train.py:903] (0/4) Epoch 16, batch 3100, loss[loss=0.2182, simple_loss=0.3045, pruned_loss=0.06594, over 19357.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2953, pruned_loss=0.06997, over 3851247.72 frames. ], batch size: 70, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:20:11,860 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105520.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:40,008 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1063, 1.2614, 1.7145, 1.3518, 2.8155, 3.7209, 3.4926, 3.9794], + device='cuda:0'), covar=tensor([0.1752, 0.3797, 0.3313, 0.2272, 0.0550, 0.0175, 0.0200, 0.0207], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0307, 0.0337, 0.0255, 0.0230, 0.0174, 0.0210, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 07:21:13,220 INFO [train.py:903] (0/4) Epoch 16, batch 3150, loss[loss=0.2709, simple_loss=0.3433, pruned_loss=0.0992, over 17310.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2955, pruned_loss=0.06985, over 3854842.82 frames. ], batch size: 101, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:21:41,321 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 07:21:46,677 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.014e+02 6.069e+02 7.545e+02 2.509e+03, threshold=1.214e+03, percent-clipped=2.0 +2023-04-02 07:22:12,912 INFO [train.py:903] (0/4) Epoch 16, batch 3200, loss[loss=0.2014, simple_loss=0.2855, pruned_loss=0.05867, over 19856.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2966, pruned_loss=0.07079, over 3854799.78 frames. ], batch size: 52, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:15,478 INFO [train.py:903] (0/4) Epoch 16, batch 3250, loss[loss=0.2127, simple_loss=0.2877, pruned_loss=0.06885, over 19186.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2959, pruned_loss=0.07041, over 3846979.28 frames. ], batch size: 69, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:50,142 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 5.141e+02 5.939e+02 7.859e+02 1.653e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-02 07:24:19,063 INFO [train.py:903] (0/4) Epoch 16, batch 3300, loss[loss=0.2555, simple_loss=0.3236, pruned_loss=0.09373, over 19579.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2956, pruned_loss=0.07052, over 3830157.63 frames. ], batch size: 52, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:24:19,297 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9801, 4.5434, 2.6634, 4.0258, 0.8638, 4.4542, 4.3827, 4.4983], + device='cuda:0'), covar=tensor([0.0518, 0.0885, 0.1804, 0.0630, 0.3760, 0.0516, 0.0704, 0.0884], + device='cuda:0'), in_proj_covar=tensor([0.0471, 0.0387, 0.0463, 0.0328, 0.0393, 0.0398, 0.0394, 0.0428], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:24:22,584 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 07:24:48,441 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4237, 2.1224, 1.8967, 2.7599, 2.1386, 2.4802, 2.4888, 2.4333], + device='cuda:0'), covar=tensor([0.0708, 0.0876, 0.0942, 0.0848, 0.0875, 0.0732, 0.0868, 0.0602], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0221, 0.0240, 0.0224, 0.0207, 0.0189, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 07:24:55,795 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:25:21,449 INFO [train.py:903] (0/4) Epoch 16, batch 3350, loss[loss=0.1629, simple_loss=0.2379, pruned_loss=0.0439, over 19757.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2942, pruned_loss=0.06945, over 3837702.40 frames. ], batch size: 46, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:25:31,893 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 07:25:57,717 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 5.066e+02 6.242e+02 8.095e+02 2.652e+03, threshold=1.248e+03, percent-clipped=5.0 +2023-04-02 07:26:12,306 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:20,486 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105817.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:23,649 INFO [train.py:903] (0/4) Epoch 16, batch 3400, loss[loss=0.2306, simple_loss=0.3089, pruned_loss=0.0761, over 19048.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2952, pruned_loss=0.07002, over 3826045.66 frames. ], batch size: 69, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:26:29,258 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-02 07:26:59,994 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:18,390 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:19,628 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:26,111 INFO [train.py:903] (0/4) Epoch 16, batch 3450, loss[loss=0.1983, simple_loss=0.2703, pruned_loss=0.06314, over 16047.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2951, pruned_loss=0.06984, over 3828401.04 frames. ], batch size: 35, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:27:29,346 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 07:28:00,500 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.355e+02 6.157e+02 7.690e+02 1.854e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 07:28:29,093 INFO [train.py:903] (0/4) Epoch 16, batch 3500, loss[loss=0.2412, simple_loss=0.3178, pruned_loss=0.08227, over 19783.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2952, pruned_loss=0.07003, over 3831045.95 frames. ], batch size: 54, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:29:23,569 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:29:31,025 INFO [train.py:903] (0/4) Epoch 16, batch 3550, loss[loss=0.2453, simple_loss=0.3175, pruned_loss=0.08651, over 19660.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2957, pruned_loss=0.07075, over 3841198.83 frames. ], batch size: 60, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:29:42,155 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:30:06,596 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 5.018e+02 6.219e+02 7.272e+02 1.453e+03, threshold=1.244e+03, percent-clipped=3.0 +2023-04-02 07:30:10,061 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-106000.pt +2023-04-02 07:30:34,290 INFO [train.py:903] (0/4) Epoch 16, batch 3600, loss[loss=0.2295, simple_loss=0.3089, pruned_loss=0.07506, over 17484.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2952, pruned_loss=0.07043, over 3826293.61 frames. ], batch size: 101, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:30:46,588 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-02 07:30:47,285 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1132, 1.1730, 1.2875, 1.2402, 1.5348, 1.5788, 1.5082, 0.5494], + device='cuda:0'), covar=tensor([0.2129, 0.3614, 0.2277, 0.1717, 0.1355, 0.1995, 0.1150, 0.3915], + device='cuda:0'), in_proj_covar=tensor([0.0507, 0.0608, 0.0659, 0.0461, 0.0604, 0.0510, 0.0648, 0.0520], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 07:31:37,380 INFO [train.py:903] (0/4) Epoch 16, batch 3650, loss[loss=0.175, simple_loss=0.2526, pruned_loss=0.04871, over 19375.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2954, pruned_loss=0.07062, over 3814615.15 frames. ], batch size: 47, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:09,751 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:32:13,044 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.371e+02 6.801e+02 8.277e+02 1.518e+03, threshold=1.360e+03, percent-clipped=5.0 +2023-04-02 07:32:30,556 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9209, 2.0281, 2.1604, 2.5424, 1.8853, 2.4550, 2.2111, 2.0155], + device='cuda:0'), covar=tensor([0.3965, 0.3364, 0.1760, 0.2189, 0.3801, 0.1862, 0.4343, 0.2955], + device='cuda:0'), in_proj_covar=tensor([0.0839, 0.0889, 0.0679, 0.0905, 0.0825, 0.0759, 0.0809, 0.0743], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 07:32:42,528 INFO [train.py:903] (0/4) Epoch 16, batch 3700, loss[loss=0.1865, simple_loss=0.2672, pruned_loss=0.05296, over 19398.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.294, pruned_loss=0.06955, over 3829476.63 frames. ], batch size: 48, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:44,087 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:32:52,258 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7301, 1.8279, 2.0589, 2.3833, 1.7138, 2.2535, 2.1733, 1.9172], + device='cuda:0'), covar=tensor([0.3879, 0.3504, 0.1663, 0.1902, 0.3529, 0.1758, 0.4060, 0.3062], + device='cuda:0'), in_proj_covar=tensor([0.0838, 0.0887, 0.0678, 0.0903, 0.0822, 0.0756, 0.0808, 0.0742], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 07:33:13,534 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:23,569 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:34,045 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:44,481 INFO [train.py:903] (0/4) Epoch 16, batch 3750, loss[loss=0.2449, simple_loss=0.3173, pruned_loss=0.08623, over 18199.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.293, pruned_loss=0.06901, over 3834852.21 frames. ], batch size: 83, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:34:00,238 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-02 07:34:15,822 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8232, 1.3969, 1.5767, 1.7195, 3.3920, 1.2045, 2.3159, 3.7824], + device='cuda:0'), covar=tensor([0.0448, 0.2705, 0.2675, 0.1699, 0.0655, 0.2463, 0.1380, 0.0231], + device='cuda:0'), in_proj_covar=tensor([0.0388, 0.0352, 0.0371, 0.0337, 0.0359, 0.0340, 0.0354, 0.0377], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 07:34:19,089 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.705e+02 5.517e+02 6.973e+02 1.532e+03, threshold=1.103e+03, percent-clipped=3.0 +2023-04-02 07:34:45,448 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:34:46,185 INFO [train.py:903] (0/4) Epoch 16, batch 3800, loss[loss=0.2301, simple_loss=0.2918, pruned_loss=0.08419, over 19398.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.293, pruned_loss=0.06945, over 3819978.86 frames. ], batch size: 48, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:06,437 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106235.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:18,156 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 07:35:18,458 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106244.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:36,738 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106260.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,039 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106269.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,820 INFO [train.py:903] (0/4) Epoch 16, batch 3850, loss[loss=0.2133, simple_loss=0.296, pruned_loss=0.06532, over 19652.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2935, pruned_loss=0.06985, over 3826535.65 frames. ], batch size: 53, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:56,804 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:09,573 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:23,153 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.267e+02 6.364e+02 7.734e+02 1.610e+03, threshold=1.273e+03, percent-clipped=5.0 +2023-04-02 07:36:50,364 INFO [train.py:903] (0/4) Epoch 16, batch 3900, loss[loss=0.2383, simple_loss=0.3086, pruned_loss=0.08399, over 18223.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2941, pruned_loss=0.07024, over 3832529.63 frames. ], batch size: 84, lr: 5.17e-03, grad_scale: 16.0 +2023-04-02 07:37:32,141 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106354.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:37:53,350 INFO [train.py:903] (0/4) Epoch 16, batch 3950, loss[loss=0.2343, simple_loss=0.3009, pruned_loss=0.08383, over 19745.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2954, pruned_loss=0.07071, over 3823600.95 frames. ], batch size: 51, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:37:58,036 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 07:38:28,357 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.931e+02 6.019e+02 7.839e+02 1.392e+03, threshold=1.204e+03, percent-clipped=3.0 +2023-04-02 07:38:54,928 INFO [train.py:903] (0/4) Epoch 16, batch 4000, loss[loss=0.2268, simple_loss=0.3064, pruned_loss=0.07366, over 19521.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2954, pruned_loss=0.07027, over 3837686.86 frames. ], batch size: 54, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:03,764 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-02 07:39:19,315 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:39:23,114 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2108, 1.2750, 1.2050, 1.0312, 1.0927, 1.0814, 0.0683, 0.3879], + device='cuda:0'), covar=tensor([0.0590, 0.0565, 0.0345, 0.0485, 0.1070, 0.0530, 0.1082, 0.0926], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0345, 0.0343, 0.0371, 0.0445, 0.0375, 0.0323, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 07:39:45,487 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 07:39:56,761 INFO [train.py:903] (0/4) Epoch 16, batch 4050, loss[loss=0.2266, simple_loss=0.3041, pruned_loss=0.07456, over 19313.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2951, pruned_loss=0.07008, over 3831423.02 frames. ], batch size: 66, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:59,360 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:40:22,946 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5002, 3.0925, 2.2678, 2.4691, 2.4231, 2.6717, 1.0718, 2.2908], + device='cuda:0'), covar=tensor([0.0567, 0.0533, 0.0636, 0.0954, 0.0862, 0.0994, 0.1148, 0.0847], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0346, 0.0345, 0.0372, 0.0447, 0.0378, 0.0325, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 07:40:34,186 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.509e+02 4.912e+02 6.033e+02 8.007e+02 1.551e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-02 07:40:59,901 INFO [train.py:903] (0/4) Epoch 16, batch 4100, loss[loss=0.2068, simple_loss=0.2931, pruned_loss=0.06018, over 19549.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.294, pruned_loss=0.06968, over 3822406.58 frames. ], batch size: 56, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:41:07,082 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106525.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:15,865 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:34,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 07:41:36,425 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106550.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:41,938 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106554.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:45,517 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:42:02,445 INFO [train.py:903] (0/4) Epoch 16, batch 4150, loss[loss=0.221, simple_loss=0.3066, pruned_loss=0.06768, over 19559.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2945, pruned_loss=0.07037, over 3811201.16 frames. ], batch size: 61, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:42:20,385 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4011, 1.4850, 1.6198, 1.6245, 2.1990, 2.1766, 2.3081, 0.8313], + device='cuda:0'), covar=tensor([0.2115, 0.3822, 0.2462, 0.1648, 0.1438, 0.1846, 0.1242, 0.4067], + device='cuda:0'), in_proj_covar=tensor([0.0506, 0.0606, 0.0658, 0.0458, 0.0602, 0.0506, 0.0647, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 07:42:35,027 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1261, 2.0003, 1.7686, 1.6374, 1.5116, 1.6463, 0.4548, 1.0460], + device='cuda:0'), covar=tensor([0.0567, 0.0570, 0.0414, 0.0731, 0.0994, 0.0914, 0.1091, 0.0915], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0346, 0.0343, 0.0371, 0.0446, 0.0375, 0.0324, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 07:42:36,771 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.926e+02 6.073e+02 7.216e+02 1.422e+03, threshold=1.215e+03, percent-clipped=1.0 +2023-04-02 07:43:03,952 INFO [train.py:903] (0/4) Epoch 16, batch 4200, loss[loss=0.2037, simple_loss=0.2922, pruned_loss=0.05763, over 18057.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2959, pruned_loss=0.07083, over 3823072.97 frames. ], batch size: 83, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:43:11,042 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 07:43:15,841 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:05,135 INFO [train.py:903] (0/4) Epoch 16, batch 4250, loss[loss=0.241, simple_loss=0.3086, pruned_loss=0.08672, over 19569.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2961, pruned_loss=0.07091, over 3812144.41 frames. ], batch size: 61, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:44:20,446 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 07:44:32,720 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 07:44:41,212 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:42,276 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.502e+02 4.922e+02 6.114e+02 7.451e+02 1.808e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 07:45:08,519 INFO [train.py:903] (0/4) Epoch 16, batch 4300, loss[loss=0.2414, simple_loss=0.3217, pruned_loss=0.0806, over 19742.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2964, pruned_loss=0.07136, over 3822928.00 frames. ], batch size: 63, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:45:23,513 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=5.05 vs. limit=5.0 +2023-04-02 07:45:38,967 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:45:58,070 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 07:46:11,647 INFO [train.py:903] (0/4) Epoch 16, batch 4350, loss[loss=0.2404, simple_loss=0.3128, pruned_loss=0.08403, over 19559.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2966, pruned_loss=0.07136, over 3817206.32 frames. ], batch size: 61, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:46:46,924 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.102e+02 6.153e+02 8.101e+02 2.041e+03, threshold=1.231e+03, percent-clipped=8.0 +2023-04-02 07:46:48,701 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 07:47:03,099 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:04,401 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 07:47:06,320 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:09,593 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:14,208 INFO [train.py:903] (0/4) Epoch 16, batch 4400, loss[loss=0.2277, simple_loss=0.305, pruned_loss=0.07521, over 18810.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2957, pruned_loss=0.07086, over 3816584.97 frames. ], batch size: 74, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:47:33,138 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106835.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:36,416 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 07:47:46,711 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 07:48:17,289 INFO [train.py:903] (0/4) Epoch 16, batch 4450, loss[loss=0.217, simple_loss=0.2972, pruned_loss=0.06837, over 19788.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2951, pruned_loss=0.07044, over 3807801.21 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:48:53,920 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.966e+02 6.259e+02 8.420e+02 1.632e+03, threshold=1.252e+03, percent-clipped=6.0 +2023-04-02 07:49:18,991 INFO [train.py:903] (0/4) Epoch 16, batch 4500, loss[loss=0.2342, simple_loss=0.3131, pruned_loss=0.07769, over 17744.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2955, pruned_loss=0.07036, over 3817490.33 frames. ], batch size: 101, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:49:34,353 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106931.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:50:23,572 INFO [train.py:903] (0/4) Epoch 16, batch 4550, loss[loss=0.1757, simple_loss=0.2534, pruned_loss=0.04904, over 19610.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2959, pruned_loss=0.07058, over 3812749.80 frames. ], batch size: 50, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:50:31,659 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 07:50:44,431 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-02 07:50:54,389 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 07:50:59,933 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 4.883e+02 5.860e+02 7.136e+02 1.225e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-02 07:51:02,615 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:51:27,843 INFO [train.py:903] (0/4) Epoch 16, batch 4600, loss[loss=0.1978, simple_loss=0.2702, pruned_loss=0.06275, over 19181.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07116, over 3797758.74 frames. ], batch size: 42, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:51:34,890 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:29,051 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:29,793 INFO [train.py:903] (0/4) Epoch 16, batch 4650, loss[loss=0.1982, simple_loss=0.2839, pruned_loss=0.05621, over 19460.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07127, over 3812725.76 frames. ], batch size: 64, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:52:47,200 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 07:52:59,800 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 07:53:01,348 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:53:07,461 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.220e+02 5.459e+02 6.581e+02 8.910e+02 1.601e+03, threshold=1.316e+03, percent-clipped=6.0 +2023-04-02 07:53:31,767 INFO [train.py:903] (0/4) Epoch 16, batch 4700, loss[loss=0.2172, simple_loss=0.2986, pruned_loss=0.06791, over 19683.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2971, pruned_loss=0.07128, over 3817967.25 frames. ], batch size: 53, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:53:55,910 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 07:54:12,255 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-02 07:54:20,564 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107158.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 07:54:36,806 INFO [train.py:903] (0/4) Epoch 16, batch 4750, loss[loss=0.1645, simple_loss=0.2455, pruned_loss=0.04178, over 19400.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.296, pruned_loss=0.07047, over 3824791.11 frames. ], batch size: 48, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:54:37,106 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107170.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:54:58,932 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:11,905 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.545e+02 6.621e+02 8.650e+02 1.971e+03, threshold=1.324e+03, percent-clipped=6.0 +2023-04-02 07:55:29,423 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:40,210 INFO [train.py:903] (0/4) Epoch 16, batch 4800, loss[loss=0.2101, simple_loss=0.2875, pruned_loss=0.06633, over 19859.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2959, pruned_loss=0.07067, over 3832317.57 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 8.0 +2023-04-02 07:55:50,298 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 07:56:41,878 INFO [train.py:903] (0/4) Epoch 16, batch 4850, loss[loss=0.2207, simple_loss=0.3014, pruned_loss=0.07003, over 19521.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2959, pruned_loss=0.07049, over 3837455.76 frames. ], batch size: 64, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:07,071 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 07:57:19,837 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.930e+02 6.295e+02 8.478e+02 1.665e+03, threshold=1.259e+03, percent-clipped=1.0 +2023-04-02 07:57:27,481 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 07:57:32,764 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 07:57:32,791 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 07:57:43,096 INFO [train.py:903] (0/4) Epoch 16, batch 4900, loss[loss=0.2204, simple_loss=0.3049, pruned_loss=0.06799, over 18227.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2963, pruned_loss=0.07085, over 3833290.84 frames. ], batch size: 84, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:43,121 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 07:57:43,929 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.04 vs. limit=5.0 +2023-04-02 07:58:04,172 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 07:58:46,428 INFO [train.py:903] (0/4) Epoch 16, batch 4950, loss[loss=0.2056, simple_loss=0.2916, pruned_loss=0.05975, over 19587.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2969, pruned_loss=0.07113, over 3822728.35 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:04,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 07:59:22,737 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 5.680e+02 6.678e+02 8.404e+02 2.020e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 07:59:27,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 07:59:48,956 INFO [train.py:903] (0/4) Epoch 16, batch 5000, loss[loss=0.2673, simple_loss=0.3376, pruned_loss=0.09855, over 18055.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2963, pruned_loss=0.07102, over 3811615.79 frames. ], batch size: 83, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:58,942 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 08:00:09,000 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 08:00:50,457 INFO [train.py:903] (0/4) Epoch 16, batch 5050, loss[loss=0.1705, simple_loss=0.2484, pruned_loss=0.04628, over 19724.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2967, pruned_loss=0.07104, over 3817786.19 frames. ], batch size: 46, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:01:27,874 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.470e+02 6.454e+02 8.047e+02 2.188e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 08:01:27,916 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 08:01:30,381 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107502.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:01:44,961 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:01:51,915 INFO [train.py:903] (0/4) Epoch 16, batch 5100, loss[loss=0.2283, simple_loss=0.3084, pruned_loss=0.07412, over 19569.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2974, pruned_loss=0.07153, over 3824371.57 frames. ], batch size: 61, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:02:02,147 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:02:04,287 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 08:02:08,815 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 08:02:13,300 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 08:02:54,727 INFO [train.py:903] (0/4) Epoch 16, batch 5150, loss[loss=0.1993, simple_loss=0.276, pruned_loss=0.06131, over 19845.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2968, pruned_loss=0.071, over 3835501.27 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:03:08,977 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:03:31,799 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.461e+02 4.950e+02 6.087e+02 7.766e+02 1.818e+03, threshold=1.217e+03, percent-clipped=6.0 +2023-04-02 08:03:43,257 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:03:54,420 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107617.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:03:57,185 INFO [train.py:903] (0/4) Epoch 16, batch 5200, loss[loss=0.2191, simple_loss=0.2952, pruned_loss=0.0715, over 19667.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2959, pruned_loss=0.07059, over 3835479.69 frames. ], batch size: 58, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:04:08,672 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:04:09,526 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 08:04:55,314 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 08:04:59,579 INFO [train.py:903] (0/4) Epoch 16, batch 5250, loss[loss=0.2268, simple_loss=0.3074, pruned_loss=0.07312, over 18758.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2963, pruned_loss=0.0709, over 3838401.47 frames. ], batch size: 74, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:05:07,710 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:05:36,443 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.464e+02 6.488e+02 8.647e+02 1.622e+03, threshold=1.298e+03, percent-clipped=8.0 +2023-04-02 08:05:42,433 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7243, 1.5948, 1.4825, 2.1457, 1.6361, 2.0963, 2.1551, 1.8488], + device='cuda:0'), covar=tensor([0.0755, 0.0858, 0.1000, 0.0778, 0.0839, 0.0676, 0.0766, 0.0642], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0221, 0.0222, 0.0245, 0.0225, 0.0207, 0.0189, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 08:06:00,481 INFO [train.py:903] (0/4) Epoch 16, batch 5300, loss[loss=0.2196, simple_loss=0.2972, pruned_loss=0.07107, over 19736.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.295, pruned_loss=0.06971, over 3844923.99 frames. ], batch size: 51, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:06:19,308 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 08:06:28,019 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2177, 5.5647, 3.1420, 4.8790, 1.1928, 5.6775, 5.5959, 5.8096], + device='cuda:0'), covar=tensor([0.0359, 0.0826, 0.1826, 0.0642, 0.3932, 0.0482, 0.0649, 0.0707], + device='cuda:0'), in_proj_covar=tensor([0.0470, 0.0385, 0.0463, 0.0328, 0.0391, 0.0399, 0.0398, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:06:28,280 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4120, 1.6092, 2.0081, 1.6919, 3.4319, 2.7020, 3.6135, 1.8085], + device='cuda:0'), covar=tensor([0.2456, 0.4185, 0.2628, 0.1889, 0.1362, 0.1924, 0.1450, 0.3592], + device='cuda:0'), in_proj_covar=tensor([0.0507, 0.0609, 0.0659, 0.0461, 0.0607, 0.0508, 0.0649, 0.0520], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:06:45,108 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:07:03,282 INFO [train.py:903] (0/4) Epoch 16, batch 5350, loss[loss=0.2379, simple_loss=0.3196, pruned_loss=0.07813, over 19759.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2962, pruned_loss=0.06997, over 3850104.02 frames. ], batch size: 63, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:07:37,239 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 08:07:39,703 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3106, 3.0266, 2.1469, 2.7867, 0.7900, 2.9627, 2.9112, 2.9798], + device='cuda:0'), covar=tensor([0.1166, 0.1346, 0.2007, 0.0894, 0.3639, 0.0983, 0.1066, 0.1314], + device='cuda:0'), in_proj_covar=tensor([0.0470, 0.0384, 0.0463, 0.0328, 0.0391, 0.0399, 0.0399, 0.0428], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:07:40,608 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 4.506e+02 5.884e+02 6.805e+02 1.610e+03, threshold=1.177e+03, percent-clipped=2.0 +2023-04-02 08:08:06,498 INFO [train.py:903] (0/4) Epoch 16, batch 5400, loss[loss=0.2332, simple_loss=0.3098, pruned_loss=0.07832, over 19624.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2958, pruned_loss=0.07015, over 3851657.43 frames. ], batch size: 57, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:09:08,379 INFO [train.py:903] (0/4) Epoch 16, batch 5450, loss[loss=0.2096, simple_loss=0.2953, pruned_loss=0.06193, over 19667.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2958, pruned_loss=0.07021, over 3852099.18 frames. ], batch size: 60, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:09:10,565 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107872.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:11,922 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107873.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:09:26,836 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:44,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107898.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:09:46,448 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.026e+02 5.228e+02 6.592e+02 8.752e+02 1.860e+03, threshold=1.318e+03, percent-clipped=11.0 +2023-04-02 08:09:58,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:10:10,018 INFO [train.py:903] (0/4) Epoch 16, batch 5500, loss[loss=0.2239, simple_loss=0.2933, pruned_loss=0.07727, over 19414.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2978, pruned_loss=0.07168, over 3837741.20 frames. ], batch size: 48, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:10:12,869 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2381, 2.2064, 2.5465, 3.0808, 2.1059, 2.8861, 2.6470, 2.2087], + device='cuda:0'), covar=tensor([0.4212, 0.4210, 0.1716, 0.2487, 0.4624, 0.2104, 0.4399, 0.3334], + device='cuda:0'), in_proj_covar=tensor([0.0846, 0.0892, 0.0682, 0.0905, 0.0826, 0.0763, 0.0812, 0.0746], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 08:10:27,622 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8232, 3.2793, 3.3513, 3.3450, 1.3992, 3.2084, 2.8321, 3.1256], + device='cuda:0'), covar=tensor([0.1576, 0.0901, 0.0734, 0.0828, 0.4934, 0.0808, 0.0757, 0.1197], + device='cuda:0'), in_proj_covar=tensor([0.0736, 0.0678, 0.0879, 0.0761, 0.0784, 0.0630, 0.0524, 0.0817], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 08:10:34,949 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 08:10:43,660 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0445, 1.7200, 1.6782, 2.0196, 1.8357, 1.7772, 1.5981, 1.9384], + device='cuda:0'), covar=tensor([0.1018, 0.1603, 0.1560, 0.0997, 0.1314, 0.0533, 0.1380, 0.0743], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0354, 0.0302, 0.0244, 0.0299, 0.0248, 0.0292, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:11:00,097 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 08:11:06,868 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6581, 1.2228, 1.2879, 1.5174, 1.1456, 1.4367, 1.2182, 1.4784], + device='cuda:0'), covar=tensor([0.1052, 0.1229, 0.1490, 0.0929, 0.1234, 0.0561, 0.1463, 0.0763], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0353, 0.0301, 0.0244, 0.0298, 0.0248, 0.0291, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:11:13,110 INFO [train.py:903] (0/4) Epoch 16, batch 5550, loss[loss=0.2394, simple_loss=0.3084, pruned_loss=0.08522, over 19013.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2974, pruned_loss=0.07139, over 3822747.77 frames. ], batch size: 75, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:11:18,631 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 08:11:34,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:11:50,037 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.933e+02 6.287e+02 7.608e+02 2.106e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-02 08:11:50,201 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-108000.pt +2023-04-02 08:12:10,685 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 08:12:17,775 INFO [train.py:903] (0/4) Epoch 16, batch 5600, loss[loss=0.1971, simple_loss=0.2862, pruned_loss=0.05401, over 19654.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2975, pruned_loss=0.07148, over 3828580.98 frames. ], batch size: 55, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:12:19,189 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:12:48,000 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9383, 4.3611, 4.6737, 4.6434, 1.6129, 4.3721, 3.7444, 4.3342], + device='cuda:0'), covar=tensor([0.1504, 0.0734, 0.0569, 0.0647, 0.5795, 0.0683, 0.0676, 0.1126], + device='cuda:0'), in_proj_covar=tensor([0.0741, 0.0680, 0.0883, 0.0763, 0.0789, 0.0634, 0.0527, 0.0821], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 08:12:49,340 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3062, 1.4950, 1.8051, 1.5835, 2.9347, 2.2545, 3.0338, 1.2792], + device='cuda:0'), covar=tensor([0.2383, 0.3979, 0.2461, 0.1803, 0.1297, 0.1966, 0.1331, 0.3979], + device='cuda:0'), in_proj_covar=tensor([0.0510, 0.0608, 0.0661, 0.0460, 0.0606, 0.0509, 0.0647, 0.0519], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:13:17,742 INFO [train.py:903] (0/4) Epoch 16, batch 5650, loss[loss=0.2649, simple_loss=0.3282, pruned_loss=0.1008, over 19704.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2975, pruned_loss=0.07122, over 3823524.52 frames. ], batch size: 63, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:13:34,293 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3134, 3.0058, 2.0903, 2.6853, 0.8461, 2.9286, 2.8393, 2.9412], + device='cuda:0'), covar=tensor([0.1108, 0.1425, 0.2315, 0.1084, 0.4084, 0.1084, 0.1215, 0.1414], + device='cuda:0'), in_proj_covar=tensor([0.0470, 0.0387, 0.0464, 0.0329, 0.0394, 0.0402, 0.0401, 0.0429], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:13:55,624 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108099.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:13:56,645 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.688e+02 6.420e+02 8.034e+02 1.662e+03, threshold=1.284e+03, percent-clipped=4.0 +2023-04-02 08:14:03,619 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 08:14:21,000 INFO [train.py:903] (0/4) Epoch 16, batch 5700, loss[loss=0.2329, simple_loss=0.315, pruned_loss=0.07536, over 18761.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2975, pruned_loss=0.07169, over 3811830.47 frames. ], batch size: 74, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:14:41,782 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:15:22,677 INFO [train.py:903] (0/4) Epoch 16, batch 5750, loss[loss=0.1849, simple_loss=0.2637, pruned_loss=0.05306, over 19772.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2974, pruned_loss=0.07132, over 3823066.70 frames. ], batch size: 46, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:15:22,708 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 08:15:33,017 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 08:15:36,710 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 08:15:39,369 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1328, 1.8038, 1.4351, 1.1137, 1.6065, 1.0774, 1.1433, 1.6486], + device='cuda:0'), covar=tensor([0.0728, 0.0756, 0.0944, 0.0737, 0.0473, 0.1254, 0.0571, 0.0341], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0308, 0.0328, 0.0254, 0.0240, 0.0329, 0.0290, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:16:00,581 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.860e+02 6.382e+02 7.922e+02 1.732e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-02 08:16:18,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:16:26,740 INFO [train.py:903] (0/4) Epoch 16, batch 5800, loss[loss=0.2339, simple_loss=0.3061, pruned_loss=0.08083, over 19484.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2975, pruned_loss=0.07147, over 3806715.58 frames. ], batch size: 64, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:16:54,058 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:25,828 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:27,716 INFO [train.py:903] (0/4) Epoch 16, batch 5850, loss[loss=0.2493, simple_loss=0.3232, pruned_loss=0.08769, over 19561.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2977, pruned_loss=0.07175, over 3802411.57 frames. ], batch size: 61, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:05,292 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.738e+02 5.463e+02 6.888e+02 8.802e+02 1.964e+03, threshold=1.378e+03, percent-clipped=5.0 +2023-04-02 08:18:28,340 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 08:18:29,520 INFO [train.py:903] (0/4) Epoch 16, batch 5900, loss[loss=0.1961, simple_loss=0.2904, pruned_loss=0.05092, over 19794.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.298, pruned_loss=0.07182, over 3803372.65 frames. ], batch size: 56, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:52,011 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 08:18:59,398 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:19:30,982 INFO [train.py:903] (0/4) Epoch 16, batch 5950, loss[loss=0.1807, simple_loss=0.2645, pruned_loss=0.04848, over 19836.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2975, pruned_loss=0.0715, over 3800457.20 frames. ], batch size: 52, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:19:59,876 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:09,618 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 5.248e+02 6.439e+02 7.965e+02 2.252e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-02 08:20:30,472 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:35,475 INFO [train.py:903] (0/4) Epoch 16, batch 6000, loss[loss=0.2849, simple_loss=0.3467, pruned_loss=0.1115, over 18244.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2969, pruned_loss=0.07111, over 3808621.28 frames. ], batch size: 83, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:20:35,476 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 08:20:47,900 INFO [train.py:937] (0/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2723, pruned_loss=0.03545, over 944034.00 frames. +2023-04-02 08:20:47,901 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 08:21:06,653 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9616, 3.4129, 2.0316, 2.0337, 3.0808, 1.7109, 1.5127, 2.1988], + device='cuda:0'), covar=tensor([0.1368, 0.0587, 0.1027, 0.0827, 0.0502, 0.1250, 0.0914, 0.0669], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0308, 0.0329, 0.0254, 0.0241, 0.0330, 0.0290, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:21:11,132 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 08:21:25,601 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:21:33,880 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.51 vs. limit=2.0 +2023-04-02 08:21:51,714 INFO [train.py:903] (0/4) Epoch 16, batch 6050, loss[loss=0.1913, simple_loss=0.2798, pruned_loss=0.05144, over 19650.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2971, pruned_loss=0.07145, over 3820022.29 frames. ], batch size: 55, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:21:52,152 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108470.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:02,504 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8611, 1.3479, 1.1010, 0.9809, 1.1938, 0.9928, 0.9307, 1.2181], + device='cuda:0'), covar=tensor([0.0576, 0.0864, 0.1033, 0.0701, 0.0525, 0.1218, 0.0571, 0.0439], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0307, 0.0330, 0.0254, 0.0242, 0.0329, 0.0290, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:22:18,245 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 08:22:22,121 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108495.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:28,586 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.272e+02 6.402e+02 8.353e+02 1.883e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 08:22:53,807 INFO [train.py:903] (0/4) Epoch 16, batch 6100, loss[loss=0.1908, simple_loss=0.2628, pruned_loss=0.05941, over 19112.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2969, pruned_loss=0.07134, over 3821739.68 frames. ], batch size: 42, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:23:56,091 INFO [train.py:903] (0/4) Epoch 16, batch 6150, loss[loss=0.2856, simple_loss=0.3522, pruned_loss=0.1095, over 19596.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2967, pruned_loss=0.07125, over 3831552.01 frames. ], batch size: 61, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:24:26,267 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 08:24:28,829 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:24:35,758 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.845e+02 6.012e+02 7.583e+02 1.796e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 08:24:58,796 INFO [train.py:903] (0/4) Epoch 16, batch 6200, loss[loss=0.2641, simple_loss=0.3419, pruned_loss=0.09313, over 18112.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2965, pruned_loss=0.07135, over 3824633.46 frames. ], batch size: 83, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:26:02,198 INFO [train.py:903] (0/4) Epoch 16, batch 6250, loss[loss=0.2235, simple_loss=0.302, pruned_loss=0.07249, over 19553.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2968, pruned_loss=0.07147, over 3823552.95 frames. ], batch size: 56, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:26:22,913 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108687.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:26:27,145 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 08:26:29,276 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2310, 2.2393, 2.5236, 3.1126, 2.2212, 2.8977, 2.6796, 2.2936], + device='cuda:0'), covar=tensor([0.3930, 0.3868, 0.1630, 0.2294, 0.4221, 0.1936, 0.3792, 0.2898], + device='cuda:0'), in_proj_covar=tensor([0.0847, 0.0891, 0.0679, 0.0906, 0.0826, 0.0764, 0.0813, 0.0745], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 08:26:34,479 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 08:26:40,079 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 4.966e+02 6.026e+02 7.805e+02 1.726e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 08:27:04,690 INFO [train.py:903] (0/4) Epoch 16, batch 6300, loss[loss=0.1846, simple_loss=0.2619, pruned_loss=0.05369, over 19770.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.297, pruned_loss=0.07131, over 3838833.29 frames. ], batch size: 47, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:27:07,178 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5455, 4.0387, 4.2044, 4.2161, 1.6768, 3.9671, 3.3974, 3.8850], + device='cuda:0'), covar=tensor([0.1573, 0.0933, 0.0649, 0.0687, 0.5335, 0.0911, 0.0670, 0.1231], + device='cuda:0'), in_proj_covar=tensor([0.0741, 0.0684, 0.0886, 0.0768, 0.0789, 0.0638, 0.0532, 0.0820], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 08:27:10,280 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9232, 4.4772, 2.6906, 4.0092, 1.0296, 4.2951, 4.2291, 4.3539], + device='cuda:0'), covar=tensor([0.0543, 0.0971, 0.1995, 0.0768, 0.3947, 0.0711, 0.0856, 0.0993], + device='cuda:0'), in_proj_covar=tensor([0.0466, 0.0381, 0.0461, 0.0329, 0.0388, 0.0398, 0.0397, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:28:06,338 INFO [train.py:903] (0/4) Epoch 16, batch 6350, loss[loss=0.2072, simple_loss=0.2817, pruned_loss=0.06638, over 19755.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2972, pruned_loss=0.07125, over 3846188.77 frames. ], batch size: 45, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:28:38,916 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:47,231 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.125e+02 6.267e+02 8.166e+02 1.468e+03, threshold=1.253e+03, percent-clipped=8.0 +2023-04-02 08:28:48,865 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:54,181 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-02 08:28:56,939 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:29:01,718 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0456, 3.2831, 1.9570, 1.9699, 2.8488, 1.6712, 1.4150, 2.2221], + device='cuda:0'), covar=tensor([0.1336, 0.0549, 0.1094, 0.0840, 0.0569, 0.1132, 0.0976, 0.0616], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0306, 0.0330, 0.0254, 0.0242, 0.0329, 0.0289, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:29:09,787 INFO [train.py:903] (0/4) Epoch 16, batch 6400, loss[loss=0.1962, simple_loss=0.2604, pruned_loss=0.06599, over 19181.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2954, pruned_loss=0.07027, over 3848249.56 frames. ], batch size: 42, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:29:23,218 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-02 08:30:12,289 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8440, 1.9067, 2.0008, 1.8015, 4.3306, 1.1213, 2.5848, 4.7499], + device='cuda:0'), covar=tensor([0.0474, 0.2382, 0.2440, 0.1782, 0.0807, 0.2598, 0.1339, 0.0196], + device='cuda:0'), in_proj_covar=tensor([0.0384, 0.0352, 0.0373, 0.0332, 0.0358, 0.0339, 0.0358, 0.0375], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:30:14,208 INFO [train.py:903] (0/4) Epoch 16, batch 6450, loss[loss=0.2132, simple_loss=0.2856, pruned_loss=0.0704, over 19612.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2956, pruned_loss=0.07103, over 3820762.94 frames. ], batch size: 50, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:30:52,173 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.109e+02 6.250e+02 7.655e+02 1.750e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-02 08:31:00,312 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 08:31:04,028 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:31:18,889 INFO [train.py:903] (0/4) Epoch 16, batch 6500, loss[loss=0.2105, simple_loss=0.2893, pruned_loss=0.06585, over 19667.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2942, pruned_loss=0.07016, over 3817259.21 frames. ], batch size: 53, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:31:24,480 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 08:31:40,947 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:32:20,063 INFO [train.py:903] (0/4) Epoch 16, batch 6550, loss[loss=0.2215, simple_loss=0.3035, pruned_loss=0.06973, over 19596.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2948, pruned_loss=0.07053, over 3812619.83 frames. ], batch size: 61, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:32:53,527 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5992, 1.7068, 1.9350, 2.0923, 1.4761, 1.8952, 2.0010, 1.8237], + device='cuda:0'), covar=tensor([0.3987, 0.3304, 0.1689, 0.2017, 0.3463, 0.1887, 0.4335, 0.2970], + device='cuda:0'), in_proj_covar=tensor([0.0849, 0.0892, 0.0682, 0.0905, 0.0829, 0.0767, 0.0815, 0.0746], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 08:32:58,929 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.624e+02 5.967e+02 6.821e+02 1.232e+03, threshold=1.193e+03, percent-clipped=0.0 +2023-04-02 08:33:21,265 INFO [train.py:903] (0/4) Epoch 16, batch 6600, loss[loss=0.2551, simple_loss=0.3312, pruned_loss=0.08955, over 19780.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2956, pruned_loss=0.07072, over 3812890.46 frames. ], batch size: 56, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:33:35,340 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-02 08:33:52,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8268, 1.9570, 2.1545, 2.5346, 1.7687, 2.3857, 2.2907, 1.9400], + device='cuda:0'), covar=tensor([0.4068, 0.3572, 0.1761, 0.2240, 0.3940, 0.1958, 0.4055, 0.3145], + device='cuda:0'), in_proj_covar=tensor([0.0847, 0.0892, 0.0682, 0.0905, 0.0828, 0.0766, 0.0815, 0.0747], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 08:34:03,956 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:05,054 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109055.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:34:08,530 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:22,850 INFO [train.py:903] (0/4) Epoch 16, batch 6650, loss[loss=0.2169, simple_loss=0.3012, pruned_loss=0.06633, over 19597.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2961, pruned_loss=0.07111, over 3829507.71 frames. ], batch size: 61, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:34:41,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109083.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:35:01,853 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.420e+02 6.464e+02 8.289e+02 2.034e+03, threshold=1.293e+03, percent-clipped=5.0 +2023-04-02 08:35:27,577 INFO [train.py:903] (0/4) Epoch 16, batch 6700, loss[loss=0.23, simple_loss=0.3057, pruned_loss=0.07716, over 19596.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2956, pruned_loss=0.07051, over 3832745.70 frames. ], batch size: 52, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:35:32,578 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6204, 1.4916, 1.4613, 2.1030, 1.5989, 2.0385, 2.0563, 1.7492], + device='cuda:0'), covar=tensor([0.0844, 0.0950, 0.1101, 0.0819, 0.0923, 0.0680, 0.0829, 0.0682], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0224, 0.0243, 0.0225, 0.0206, 0.0188, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 08:36:06,060 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:19,905 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:25,879 INFO [train.py:903] (0/4) Epoch 16, batch 6750, loss[loss=0.2799, simple_loss=0.3492, pruned_loss=0.1053, over 19680.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2944, pruned_loss=0.06976, over 3829335.73 frames. ], batch size: 60, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:36:50,437 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:03,548 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 4.897e+02 5.778e+02 7.062e+02 1.289e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-02 08:37:15,684 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:24,185 INFO [train.py:903] (0/4) Epoch 16, batch 6800, loss[loss=0.193, simple_loss=0.2846, pruned_loss=0.05073, over 19539.00 frames. ], tot_loss[loss=0.218, simple_loss=0.295, pruned_loss=0.07051, over 3813663.07 frames. ], batch size: 56, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:37:45,699 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-02 08:37:53,863 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-16.pt +2023-04-02 08:38:10,021 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 08:38:11,131 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 08:38:13,448 INFO [train.py:903] (0/4) Epoch 17, batch 0, loss[loss=0.2226, simple_loss=0.3032, pruned_loss=0.07098, over 19683.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3032, pruned_loss=0.07098, over 19683.00 frames. ], batch size: 58, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:38:13,449 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 08:38:24,215 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0017, 1.0347, 1.1997, 1.1405, 1.6043, 1.4863, 1.6585, 0.6652], + device='cuda:0'), covar=tensor([0.1690, 0.2947, 0.1872, 0.1356, 0.1073, 0.1528, 0.0995, 0.3086], + device='cuda:0'), in_proj_covar=tensor([0.0509, 0.0612, 0.0664, 0.0460, 0.0606, 0.0512, 0.0650, 0.0520], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:38:26,006 INFO [train.py:937] (0/4) Epoch 17, validation: loss=0.1721, simple_loss=0.2728, pruned_loss=0.03571, over 944034.00 frames. +2023-04-02 08:38:26,007 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 08:38:39,459 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 08:38:51,276 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:29,166 INFO [train.py:903] (0/4) Epoch 17, batch 50, loss[loss=0.2161, simple_loss=0.2988, pruned_loss=0.0667, over 19127.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07113, over 868811.72 frames. ], batch size: 69, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:39:32,717 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 5.319e+02 6.338e+02 7.961e+02 1.981e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-02 08:39:40,225 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4152, 1.5022, 1.7746, 1.5921, 2.5454, 2.1632, 2.6507, 1.2032], + device='cuda:0'), covar=tensor([0.2331, 0.4053, 0.2456, 0.1852, 0.1462, 0.2085, 0.1430, 0.4027], + device='cuda:0'), in_proj_covar=tensor([0.0508, 0.0611, 0.0663, 0.0461, 0.0604, 0.0511, 0.0651, 0.0521], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:39:43,536 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:45,693 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:59,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 08:40:13,464 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:14,438 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:28,820 INFO [train.py:903] (0/4) Epoch 17, batch 100, loss[loss=0.2158, simple_loss=0.2907, pruned_loss=0.07047, over 19677.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2939, pruned_loss=0.06899, over 1543504.69 frames. ], batch size: 53, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:40:36,778 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 08:41:16,045 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6652, 1.5461, 1.5646, 2.1234, 1.7217, 1.8865, 2.0039, 1.8341], + device='cuda:0'), covar=tensor([0.0863, 0.1016, 0.1097, 0.0852, 0.0907, 0.0840, 0.0969, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0222, 0.0225, 0.0245, 0.0226, 0.0207, 0.0190, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 08:41:29,305 INFO [train.py:903] (0/4) Epoch 17, batch 150, loss[loss=0.2406, simple_loss=0.3127, pruned_loss=0.08431, over 19619.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2961, pruned_loss=0.07127, over 2036275.29 frames. ], batch size: 57, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:41:30,561 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109399.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:41:32,675 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.062e+02 6.078e+02 8.331e+02 1.364e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 08:41:46,300 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3015, 1.5314, 1.7164, 1.5994, 2.9472, 1.2280, 2.3042, 3.2503], + device='cuda:0'), covar=tensor([0.0495, 0.2489, 0.2453, 0.1710, 0.0624, 0.2335, 0.1334, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0384, 0.0350, 0.0373, 0.0333, 0.0359, 0.0339, 0.0359, 0.0377], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:42:22,336 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 08:42:29,336 INFO [train.py:903] (0/4) Epoch 17, batch 200, loss[loss=0.1852, simple_loss=0.2687, pruned_loss=0.05086, over 19846.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2959, pruned_loss=0.0713, over 2418455.44 frames. ], batch size: 52, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:32,822 INFO [train.py:903] (0/4) Epoch 17, batch 250, loss[loss=0.1988, simple_loss=0.2833, pruned_loss=0.05711, over 19667.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2953, pruned_loss=0.07091, over 2740051.96 frames. ], batch size: 55, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:36,237 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.403e+02 5.064e+02 6.047e+02 7.271e+02 1.663e+03, threshold=1.209e+03, percent-clipped=2.0 +2023-04-02 08:43:46,913 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-02 08:43:52,410 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109514.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:44:04,228 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:16,829 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3290, 1.3617, 1.5246, 1.4628, 2.1921, 1.9733, 2.2809, 0.8873], + device='cuda:0'), covar=tensor([0.2279, 0.3976, 0.2481, 0.1939, 0.1435, 0.2025, 0.1318, 0.4098], + device='cuda:0'), in_proj_covar=tensor([0.0504, 0.0608, 0.0660, 0.0459, 0.0603, 0.0509, 0.0647, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:44:35,508 INFO [train.py:903] (0/4) Epoch 17, batch 300, loss[loss=0.2028, simple_loss=0.2875, pruned_loss=0.05903, over 19601.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2948, pruned_loss=0.07048, over 2990383.26 frames. ], batch size: 57, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:44:37,141 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:45,067 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:56,598 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7664, 1.6040, 1.5934, 2.3400, 1.8172, 2.0772, 2.2054, 1.8657], + device='cuda:0'), covar=tensor([0.0803, 0.0959, 0.1038, 0.0778, 0.0822, 0.0735, 0.0822, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0223, 0.0244, 0.0226, 0.0207, 0.0189, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 08:45:36,075 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109597.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:45:36,918 INFO [train.py:903] (0/4) Epoch 17, batch 350, loss[loss=0.2228, simple_loss=0.3057, pruned_loss=0.06993, over 19655.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2959, pruned_loss=0.0706, over 3187685.55 frames. ], batch size: 58, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:45:38,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:45:40,565 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.783e+02 5.858e+02 7.548e+02 1.929e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 08:46:38,846 INFO [train.py:903] (0/4) Epoch 17, batch 400, loss[loss=0.2367, simple_loss=0.3146, pruned_loss=0.07937, over 19325.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2964, pruned_loss=0.07109, over 3327161.15 frames. ], batch size: 66, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:46:49,030 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:09,941 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:16,744 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:17,198 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.51 vs. limit=2.0 +2023-04-02 08:47:20,119 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:24,872 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1543, 3.5590, 3.7244, 3.7555, 1.6342, 3.5135, 3.0978, 3.4475], + device='cuda:0'), covar=tensor([0.1529, 0.1708, 0.0748, 0.0791, 0.5497, 0.1168, 0.0740, 0.1286], + device='cuda:0'), in_proj_covar=tensor([0.0732, 0.0676, 0.0877, 0.0761, 0.0785, 0.0625, 0.0526, 0.0807], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 08:47:40,321 INFO [train.py:903] (0/4) Epoch 17, batch 450, loss[loss=0.2112, simple_loss=0.2955, pruned_loss=0.06349, over 19373.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2956, pruned_loss=0.07064, over 3446908.27 frames. ], batch size: 70, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:47:44,685 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 5.202e+02 6.539e+02 8.058e+02 1.631e+03, threshold=1.308e+03, percent-clipped=6.0 +2023-04-02 08:48:12,290 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 08:48:12,739 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7164, 1.8174, 1.9901, 2.3655, 1.6877, 2.1920, 2.0909, 1.8437], + device='cuda:0'), covar=tensor([0.3949, 0.3618, 0.1879, 0.2144, 0.3744, 0.2022, 0.4339, 0.3329], + device='cuda:0'), in_proj_covar=tensor([0.0851, 0.0896, 0.0681, 0.0905, 0.0832, 0.0768, 0.0815, 0.0748], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 08:48:13,421 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 08:48:44,999 INFO [train.py:903] (0/4) Epoch 17, batch 500, loss[loss=0.2024, simple_loss=0.2877, pruned_loss=0.05852, over 19527.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2967, pruned_loss=0.07129, over 3544289.99 frames. ], batch size: 54, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:48:48,151 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 08:48:51,183 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0199, 1.3018, 1.7893, 1.2135, 2.6676, 3.5899, 3.3138, 3.8276], + device='cuda:0'), covar=tensor([0.1760, 0.3635, 0.3064, 0.2395, 0.0530, 0.0179, 0.0197, 0.0218], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0309, 0.0337, 0.0257, 0.0231, 0.0175, 0.0208, 0.0233], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:49:01,157 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-02 08:49:11,992 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109770.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:49:12,970 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:33,451 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 08:49:44,274 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:44,317 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109795.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:49:47,261 INFO [train.py:903] (0/4) Epoch 17, batch 550, loss[loss=0.2707, simple_loss=0.3402, pruned_loss=0.1006, over 19661.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2967, pruned_loss=0.07099, over 3610648.47 frames. ], batch size: 55, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:49:50,684 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.960e+02 5.345e+02 7.483e+02 9.617e+02 2.288e+03, threshold=1.497e+03, percent-clipped=10.0 +2023-04-02 08:50:48,355 INFO [train.py:903] (0/4) Epoch 17, batch 600, loss[loss=0.2153, simple_loss=0.2953, pruned_loss=0.06763, over 19658.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2959, pruned_loss=0.07071, over 3667260.13 frames. ], batch size: 60, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:27,270 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 08:51:49,456 INFO [train.py:903] (0/4) Epoch 17, batch 650, loss[loss=0.207, simple_loss=0.2757, pruned_loss=0.0691, over 19400.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.296, pruned_loss=0.07075, over 3696192.27 frames. ], batch size: 48, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:53,012 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.642e+02 6.698e+02 8.791e+02 1.815e+03, threshold=1.340e+03, percent-clipped=2.0 +2023-04-02 08:52:21,837 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1502, 1.3361, 1.6057, 1.4178, 2.7755, 0.9999, 2.1555, 3.0031], + device='cuda:0'), covar=tensor([0.0530, 0.2534, 0.2486, 0.1634, 0.0698, 0.2260, 0.1093, 0.0329], + device='cuda:0'), in_proj_covar=tensor([0.0386, 0.0351, 0.0371, 0.0333, 0.0359, 0.0340, 0.0356, 0.0378], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 08:52:27,876 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:43,910 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109941.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:51,842 INFO [train.py:903] (0/4) Epoch 17, batch 700, loss[loss=0.2466, simple_loss=0.3192, pruned_loss=0.08695, over 19694.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2957, pruned_loss=0.07048, over 3722349.47 frames. ], batch size: 59, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:52:58,938 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109952.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:53:06,798 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8101, 1.9312, 2.1466, 1.9820, 3.0952, 2.6747, 3.3527, 1.9408], + device='cuda:0'), covar=tensor([0.1986, 0.3439, 0.2287, 0.1621, 0.1414, 0.1790, 0.1440, 0.3308], + device='cuda:0'), in_proj_covar=tensor([0.0506, 0.0611, 0.0663, 0.0458, 0.0603, 0.0510, 0.0647, 0.0516], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:53:57,830 INFO [train.py:903] (0/4) Epoch 17, batch 750, loss[loss=0.1747, simple_loss=0.2486, pruned_loss=0.05037, over 19746.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2942, pruned_loss=0.06951, over 3730685.73 frames. ], batch size: 45, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:54:00,304 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-110000.pt +2023-04-02 08:54:02,553 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.697e+02 5.712e+02 7.217e+02 1.165e+03, threshold=1.142e+03, percent-clipped=0.0 +2023-04-02 08:54:25,895 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:33,285 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,175 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110035.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,311 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1941, 2.1002, 1.9072, 1.7701, 1.6132, 1.7700, 0.7650, 1.1815], + device='cuda:0'), covar=tensor([0.0538, 0.0495, 0.0384, 0.0595, 0.0861, 0.0700, 0.0943, 0.0805], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0349, 0.0347, 0.0371, 0.0448, 0.0378, 0.0326, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 08:55:00,572 INFO [train.py:903] (0/4) Epoch 17, batch 800, loss[loss=0.1918, simple_loss=0.2646, pruned_loss=0.05954, over 19371.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.294, pruned_loss=0.0692, over 3753487.45 frames. ], batch size: 47, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:55:04,524 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:05,675 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:10,549 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:14,915 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:55:17,657 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:35,595 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:54,362 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7779, 1.8768, 2.0297, 2.4076, 1.7066, 2.2522, 2.1502, 1.8876], + device='cuda:0'), covar=tensor([0.3878, 0.3422, 0.1757, 0.2039, 0.3578, 0.1847, 0.4434, 0.3152], + device='cuda:0'), in_proj_covar=tensor([0.0848, 0.0897, 0.0680, 0.0903, 0.0830, 0.0764, 0.0811, 0.0746], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 08:56:03,238 INFO [train.py:903] (0/4) Epoch 17, batch 850, loss[loss=0.2147, simple_loss=0.2957, pruned_loss=0.0669, over 19672.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2947, pruned_loss=0.06932, over 3778529.37 frames. ], batch size: 60, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:56:06,202 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.213e+02 6.563e+02 8.363e+02 2.159e+03, threshold=1.313e+03, percent-clipped=10.0 +2023-04-02 08:56:42,144 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:47,683 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:51,222 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:56,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 08:57:04,363 INFO [train.py:903] (0/4) Epoch 17, batch 900, loss[loss=0.1891, simple_loss=0.2648, pruned_loss=0.05674, over 19482.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2954, pruned_loss=0.06962, over 3793634.55 frames. ], batch size: 49, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:05,929 INFO [train.py:903] (0/4) Epoch 17, batch 950, loss[loss=0.2386, simple_loss=0.3039, pruned_loss=0.08667, over 19632.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2949, pruned_loss=0.06946, over 3799242.74 frames. ], batch size: 50, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:08,516 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 08:58:09,598 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.867e+02 6.255e+02 8.229e+02 2.250e+03, threshold=1.251e+03, percent-clipped=5.0 +2023-04-02 08:59:08,986 INFO [train.py:903] (0/4) Epoch 17, batch 1000, loss[loss=0.2049, simple_loss=0.2763, pruned_loss=0.06675, over 19761.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2958, pruned_loss=0.07003, over 3806091.14 frames. ], batch size: 47, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 08:59:44,521 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5550, 2.0338, 2.0952, 2.7427, 2.1635, 2.6303, 2.5515, 2.4989], + device='cuda:0'), covar=tensor([0.0606, 0.0836, 0.0865, 0.0859, 0.0837, 0.0659, 0.0789, 0.0563], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0223, 0.0244, 0.0226, 0.0207, 0.0187, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 09:00:05,679 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 09:00:11,636 INFO [train.py:903] (0/4) Epoch 17, batch 1050, loss[loss=0.1876, simple_loss=0.2674, pruned_loss=0.05391, over 19727.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.295, pruned_loss=0.06967, over 3811977.65 frames. ], batch size: 51, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:00:15,422 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:16,233 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 4.918e+02 6.228e+02 8.050e+02 1.872e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 09:00:28,167 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:40,504 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:44,970 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 09:01:00,140 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:01,449 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:13,840 INFO [train.py:903] (0/4) Epoch 17, batch 1100, loss[loss=0.1895, simple_loss=0.2629, pruned_loss=0.0581, over 19764.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2943, pruned_loss=0.06906, over 3817501.53 frames. ], batch size: 47, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:01:36,065 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3780, 1.1001, 1.4583, 1.4409, 2.9223, 1.0862, 2.1728, 3.2862], + device='cuda:0'), covar=tensor([0.0488, 0.2914, 0.2856, 0.1774, 0.0743, 0.2435, 0.1224, 0.0297], + device='cuda:0'), in_proj_covar=tensor([0.0389, 0.0355, 0.0375, 0.0335, 0.0360, 0.0342, 0.0357, 0.0381], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:01:54,278 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:09,863 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:16,588 INFO [train.py:903] (0/4) Epoch 17, batch 1150, loss[loss=0.1918, simple_loss=0.27, pruned_loss=0.0568, over 19720.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2931, pruned_loss=0.068, over 3823146.06 frames. ], batch size: 51, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:02:21,341 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.729e+02 5.705e+02 7.310e+02 1.426e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-02 09:02:28,343 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:43,175 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:20,437 INFO [train.py:903] (0/4) Epoch 17, batch 1200, loss[loss=0.2766, simple_loss=0.3371, pruned_loss=0.108, over 19654.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2943, pruned_loss=0.06882, over 3819455.65 frames. ], batch size: 60, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 09:03:22,075 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.13 vs. limit=5.0 +2023-04-02 09:03:49,893 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:53,237 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 09:03:55,648 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:19,551 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:23,816 INFO [train.py:903] (0/4) Epoch 17, batch 1250, loss[loss=0.2503, simple_loss=0.3102, pruned_loss=0.09518, over 13652.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2945, pruned_loss=0.06928, over 3805377.77 frames. ], batch size: 136, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:04:28,266 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.334e+02 6.614e+02 7.905e+02 1.343e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-02 09:04:48,466 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9904, 3.5627, 2.4376, 3.1983, 0.7663, 3.4868, 3.4295, 3.5159], + device='cuda:0'), covar=tensor([0.0884, 0.1355, 0.2182, 0.1015, 0.4318, 0.0854, 0.1032, 0.1373], + device='cuda:0'), in_proj_covar=tensor([0.0471, 0.0385, 0.0465, 0.0331, 0.0389, 0.0402, 0.0400, 0.0429], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:04:50,935 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110521.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:05:25,870 INFO [train.py:903] (0/4) Epoch 17, batch 1300, loss[loss=0.1757, simple_loss=0.2587, pruned_loss=0.04633, over 19756.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2944, pruned_loss=0.06946, over 3799929.44 frames. ], batch size: 51, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:02,879 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-04-02 09:06:14,189 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:20,254 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:26,785 INFO [train.py:903] (0/4) Epoch 17, batch 1350, loss[loss=0.2702, simple_loss=0.3394, pruned_loss=0.1005, over 17279.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2945, pruned_loss=0.06986, over 3795477.63 frames. ], batch size: 101, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:31,260 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 4.458e+02 6.078e+02 8.226e+02 1.667e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 09:06:34,173 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2939, 1.4113, 1.8357, 1.5196, 2.7150, 2.1135, 2.8194, 1.2744], + device='cuda:0'), covar=tensor([0.2684, 0.4559, 0.2786, 0.2129, 0.1600, 0.2358, 0.1598, 0.4390], + device='cuda:0'), in_proj_covar=tensor([0.0515, 0.0621, 0.0675, 0.0467, 0.0612, 0.0518, 0.0658, 0.0525], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 09:07:07,131 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:26,706 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:28,045 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0903, 1.1823, 1.5978, 1.1131, 2.7500, 3.6288, 3.3992, 3.8348], + device='cuda:0'), covar=tensor([0.1727, 0.3913, 0.3453, 0.2496, 0.0572, 0.0180, 0.0201, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0308, 0.0337, 0.0258, 0.0232, 0.0175, 0.0209, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 09:07:29,984 INFO [train.py:903] (0/4) Epoch 17, batch 1400, loss[loss=0.2298, simple_loss=0.304, pruned_loss=0.07781, over 19675.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2947, pruned_loss=0.06988, over 3794907.86 frames. ], batch size: 60, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:07:51,399 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110665.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:08,767 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:32,849 INFO [train.py:903] (0/4) Epoch 17, batch 1450, loss[loss=0.2518, simple_loss=0.3216, pruned_loss=0.09102, over 19542.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2963, pruned_loss=0.07065, over 3806640.97 frames. ], batch size: 56, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:08:32,915 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 09:08:38,606 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.047e+02 5.721e+02 7.117e+02 1.861e+03, threshold=1.144e+03, percent-clipped=3.0 +2023-04-02 09:09:35,390 INFO [train.py:903] (0/4) Epoch 17, batch 1500, loss[loss=0.2117, simple_loss=0.2898, pruned_loss=0.06678, over 19584.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2953, pruned_loss=0.07004, over 3795666.44 frames. ], batch size: 52, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:09:38,219 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:42,846 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:50,101 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:11,713 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:14,276 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110777.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:16,434 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2445, 3.7680, 3.8899, 3.8971, 1.4748, 3.6763, 3.1948, 3.6106], + device='cuda:0'), covar=tensor([0.1601, 0.0984, 0.0662, 0.0732, 0.5642, 0.0966, 0.0731, 0.1172], + device='cuda:0'), in_proj_covar=tensor([0.0742, 0.0686, 0.0885, 0.0771, 0.0792, 0.0638, 0.0534, 0.0818], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 09:10:17,719 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110780.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:27,706 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 09:10:35,425 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:38,203 INFO [train.py:903] (0/4) Epoch 17, batch 1550, loss[loss=0.2258, simple_loss=0.3079, pruned_loss=0.07185, over 19623.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.294, pruned_loss=0.06882, over 3807415.26 frames. ], batch size: 57, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:10:43,464 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:44,892 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.421e+02 5.256e+02 6.667e+02 1.625e+03, threshold=1.051e+03, percent-clipped=2.0 +2023-04-02 09:11:34,882 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:11:42,178 INFO [train.py:903] (0/4) Epoch 17, batch 1600, loss[loss=0.2005, simple_loss=0.2855, pruned_loss=0.05777, over 19696.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2944, pruned_loss=0.06927, over 3811404.83 frames. ], batch size: 59, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:11:42,572 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:05,493 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 09:12:05,868 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6118, 1.3921, 1.3371, 1.9293, 1.5444, 1.8880, 1.8570, 1.6631], + device='cuda:0'), covar=tensor([0.0786, 0.0961, 0.1071, 0.0744, 0.0784, 0.0666, 0.0784, 0.0669], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0222, 0.0244, 0.0226, 0.0208, 0.0188, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 09:12:07,163 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:12,585 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:29,552 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:45,458 INFO [train.py:903] (0/4) Epoch 17, batch 1650, loss[loss=0.184, simple_loss=0.2532, pruned_loss=0.05737, over 19376.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2939, pruned_loss=0.06927, over 3815958.71 frames. ], batch size: 47, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:12:51,300 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 5.600e+02 6.791e+02 9.379e+02 3.114e+03, threshold=1.358e+03, percent-clipped=15.0 +2023-04-02 09:13:47,402 INFO [train.py:903] (0/4) Epoch 17, batch 1700, loss[loss=0.1875, simple_loss=0.2715, pruned_loss=0.05169, over 19469.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2941, pruned_loss=0.06904, over 3829301.47 frames. ], batch size: 49, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:19,106 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:24,090 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110976.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:29,659 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 09:14:49,199 INFO [train.py:903] (0/4) Epoch 17, batch 1750, loss[loss=0.2302, simple_loss=0.3088, pruned_loss=0.07578, over 18794.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2945, pruned_loss=0.06976, over 3819548.96 frames. ], batch size: 74, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:55,345 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.870e+02 5.792e+02 7.151e+02 1.845e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 09:15:14,921 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:37,652 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111036.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:43,361 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111041.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:51,879 INFO [train.py:903] (0/4) Epoch 17, batch 1800, loss[loss=0.2227, simple_loss=0.3114, pruned_loss=0.06699, over 19673.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2942, pruned_loss=0.06932, over 3826117.54 frames. ], batch size: 58, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:15:56,905 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:10,622 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:28,040 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:44,373 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:51,937 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 09:16:57,623 INFO [train.py:903] (0/4) Epoch 17, batch 1850, loss[loss=0.2385, simple_loss=0.3115, pruned_loss=0.08277, over 19348.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.294, pruned_loss=0.06916, over 3829657.42 frames. ], batch size: 70, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:16:57,833 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:17:03,773 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.241e+02 6.549e+02 7.790e+02 1.838e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-02 09:17:29,438 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 09:18:00,981 INFO [train.py:903] (0/4) Epoch 17, batch 1900, loss[loss=0.2405, simple_loss=0.322, pruned_loss=0.07951, over 19749.00 frames. ], tot_loss[loss=0.216, simple_loss=0.294, pruned_loss=0.069, over 3831458.51 frames. ], batch size: 63, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:18:01,482 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5048, 1.6136, 1.8592, 1.7165, 2.6889, 2.3044, 2.7793, 1.2796], + device='cuda:0'), covar=tensor([0.2259, 0.4018, 0.2548, 0.1866, 0.1459, 0.2007, 0.1395, 0.4014], + device='cuda:0'), in_proj_covar=tensor([0.0508, 0.0615, 0.0671, 0.0463, 0.0609, 0.0515, 0.0653, 0.0523], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 09:18:17,324 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 09:18:24,356 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 09:18:34,689 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6092, 2.3575, 1.7164, 1.5523, 2.2128, 1.2843, 1.5041, 2.0214], + device='cuda:0'), covar=tensor([0.1049, 0.0695, 0.0940, 0.0775, 0.0429, 0.1176, 0.0684, 0.0429], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0310, 0.0331, 0.0257, 0.0242, 0.0329, 0.0292, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:18:41,270 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 09:18:49,751 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 09:19:02,952 INFO [train.py:903] (0/4) Epoch 17, batch 1950, loss[loss=0.194, simple_loss=0.2722, pruned_loss=0.05786, over 19860.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2936, pruned_loss=0.06875, over 3836446.57 frames. ], batch size: 52, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:19:08,706 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 5.118e+02 6.155e+02 7.161e+02 1.490e+03, threshold=1.231e+03, percent-clipped=2.0 +2023-04-02 09:19:19,582 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5562, 1.1287, 1.3250, 1.1346, 2.2157, 0.9281, 1.9596, 2.3928], + device='cuda:0'), covar=tensor([0.0718, 0.2760, 0.2851, 0.1718, 0.0851, 0.2173, 0.1075, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0389, 0.0352, 0.0374, 0.0335, 0.0361, 0.0342, 0.0358, 0.0382], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:19:23,914 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:33,807 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0667, 3.5058, 2.0445, 2.0051, 3.0855, 1.7444, 1.5311, 2.2266], + device='cuda:0'), covar=tensor([0.1439, 0.0712, 0.0972, 0.0880, 0.0535, 0.1196, 0.0977, 0.0671], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0310, 0.0332, 0.0257, 0.0243, 0.0331, 0.0293, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:19:44,075 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111230.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:54,145 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 09:19:59,242 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:20:05,432 INFO [train.py:903] (0/4) Epoch 17, batch 2000, loss[loss=0.2194, simple_loss=0.3007, pruned_loss=0.06907, over 19616.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.293, pruned_loss=0.06856, over 3838028.23 frames. ], batch size: 57, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:04,231 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 09:21:07,735 INFO [train.py:903] (0/4) Epoch 17, batch 2050, loss[loss=0.1976, simple_loss=0.2885, pruned_loss=0.05333, over 19522.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2943, pruned_loss=0.06931, over 3811622.10 frames. ], batch size: 54, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:14,880 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.413e+02 6.604e+02 7.706e+02 1.674e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 09:21:22,823 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 09:21:24,037 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 09:21:29,059 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8897, 1.6377, 1.5238, 1.9451, 1.6976, 1.6276, 1.5426, 1.7664], + device='cuda:0'), covar=tensor([0.1005, 0.1406, 0.1451, 0.0908, 0.1214, 0.0559, 0.1316, 0.0747], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0356, 0.0302, 0.0245, 0.0299, 0.0249, 0.0297, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:21:34,575 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:21:43,892 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 09:21:46,188 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7793, 1.8266, 2.0226, 2.3127, 1.8019, 2.2264, 2.0953, 1.9143], + device='cuda:0'), covar=tensor([0.3317, 0.2828, 0.1364, 0.1584, 0.2880, 0.1374, 0.3293, 0.2421], + device='cuda:0'), in_proj_covar=tensor([0.0854, 0.0905, 0.0683, 0.0910, 0.0834, 0.0769, 0.0817, 0.0752], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 09:22:05,010 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:06,090 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:09,334 INFO [train.py:903] (0/4) Epoch 17, batch 2100, loss[loss=0.2162, simple_loss=0.2994, pruned_loss=0.06654, over 19656.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2953, pruned_loss=0.06994, over 3789149.94 frames. ], batch size: 55, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:22:34,028 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:38,049 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 09:23:01,371 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 09:23:10,554 INFO [train.py:903] (0/4) Epoch 17, batch 2150, loss[loss=0.1923, simple_loss=0.2707, pruned_loss=0.05695, over 19832.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2952, pruned_loss=0.06965, over 3795391.28 frames. ], batch size: 52, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:23:16,487 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.943e+02 5.975e+02 7.359e+02 1.553e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 09:23:25,735 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5301, 1.0189, 1.2506, 1.2235, 2.2184, 0.9428, 1.8978, 2.4750], + device='cuda:0'), covar=tensor([0.0487, 0.2357, 0.2388, 0.1476, 0.0679, 0.1915, 0.0831, 0.0319], + device='cuda:0'), in_proj_covar=tensor([0.0391, 0.0354, 0.0376, 0.0337, 0.0363, 0.0343, 0.0359, 0.0383], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:23:44,967 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0232, 1.2335, 1.7427, 1.1957, 2.6409, 3.5229, 3.2252, 3.7371], + device='cuda:0'), covar=tensor([0.1733, 0.3776, 0.3195, 0.2378, 0.0535, 0.0175, 0.0229, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0307, 0.0337, 0.0256, 0.0230, 0.0175, 0.0209, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 09:23:56,923 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111435.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:11,378 INFO [train.py:903] (0/4) Epoch 17, batch 2200, loss[loss=0.2207, simple_loss=0.3015, pruned_loss=0.06999, over 19698.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2952, pruned_loss=0.06947, over 3803445.94 frames. ], batch size: 59, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:24:38,963 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:48,168 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9871, 2.5435, 2.5086, 2.8389, 2.6793, 2.5310, 2.3536, 2.9941], + device='cuda:0'), covar=tensor([0.0735, 0.1477, 0.1212, 0.1032, 0.1229, 0.0451, 0.1172, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0357, 0.0303, 0.0246, 0.0300, 0.0250, 0.0298, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:24:58,472 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:09,792 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:15,255 INFO [train.py:903] (0/4) Epoch 17, batch 2250, loss[loss=0.2091, simple_loss=0.2927, pruned_loss=0.06271, over 19531.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2949, pruned_loss=0.06932, over 3810868.93 frames. ], batch size: 56, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:25:22,029 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.257e+02 6.827e+02 8.687e+02 2.303e+03, threshold=1.365e+03, percent-clipped=8.0 +2023-04-02 09:25:29,370 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9101, 2.6167, 2.4842, 2.8817, 2.6490, 2.5525, 2.4094, 2.9525], + device='cuda:0'), covar=tensor([0.0811, 0.1460, 0.1311, 0.0999, 0.1299, 0.0461, 0.1174, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0357, 0.0303, 0.0246, 0.0300, 0.0250, 0.0298, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:26:16,855 INFO [train.py:903] (0/4) Epoch 17, batch 2300, loss[loss=0.1948, simple_loss=0.274, pruned_loss=0.05778, over 19844.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2934, pruned_loss=0.0689, over 3814519.36 frames. ], batch size: 52, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:26:27,094 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 09:27:05,554 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:18,432 INFO [train.py:903] (0/4) Epoch 17, batch 2350, loss[loss=0.2273, simple_loss=0.3051, pruned_loss=0.07477, over 18257.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.0696, over 3812540.40 frames. ], batch size: 83, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:27:22,400 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111601.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:24,236 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.149e+02 5.946e+02 7.803e+02 1.982e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-02 09:27:54,454 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:58,784 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 09:28:14,862 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 09:28:19,177 INFO [train.py:903] (0/4) Epoch 17, batch 2400, loss[loss=0.2054, simple_loss=0.2904, pruned_loss=0.0602, over 18299.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2949, pruned_loss=0.07004, over 3819496.12 frames. ], batch size: 84, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:29:15,420 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:23,213 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-02 09:29:24,901 INFO [train.py:903] (0/4) Epoch 17, batch 2450, loss[loss=0.1872, simple_loss=0.2711, pruned_loss=0.05162, over 19616.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2951, pruned_loss=0.07042, over 3798809.31 frames. ], batch size: 50, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:29:29,897 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:32,592 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.976e+02 6.292e+02 8.323e+02 1.636e+03, threshold=1.258e+03, percent-clipped=0.0 +2023-04-02 09:29:39,350 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 09:29:47,054 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111716.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:30:27,985 INFO [train.py:903] (0/4) Epoch 17, batch 2500, loss[loss=0.2139, simple_loss=0.2933, pruned_loss=0.06724, over 19407.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2951, pruned_loss=0.07018, over 3811942.12 frames. ], batch size: 70, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:31:31,105 INFO [train.py:903] (0/4) Epoch 17, batch 2550, loss[loss=0.2107, simple_loss=0.2782, pruned_loss=0.0716, over 19770.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.294, pruned_loss=0.06961, over 3822559.09 frames. ], batch size: 46, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:31:38,617 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.268e+02 6.331e+02 7.722e+02 1.710e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-02 09:31:44,767 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111809.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:32:13,241 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:32:28,446 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 09:32:34,315 INFO [train.py:903] (0/4) Epoch 17, batch 2600, loss[loss=0.1865, simple_loss=0.2583, pruned_loss=0.05732, over 18577.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2945, pruned_loss=0.07016, over 3821075.96 frames. ], batch size: 41, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:33:38,520 INFO [train.py:903] (0/4) Epoch 17, batch 2650, loss[loss=0.2224, simple_loss=0.2911, pruned_loss=0.07687, over 19786.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2943, pruned_loss=0.06973, over 3822276.85 frames. ], batch size: 47, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:33:46,341 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.387e+02 5.132e+02 6.430e+02 7.842e+02 1.964e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 09:33:58,637 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 09:34:38,351 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:34:41,295 INFO [train.py:903] (0/4) Epoch 17, batch 2700, loss[loss=0.1743, simple_loss=0.2608, pruned_loss=0.04387, over 19617.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2941, pruned_loss=0.06941, over 3830293.56 frames. ], batch size: 50, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:34:54,247 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:35:26,064 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:35:43,875 INFO [train.py:903] (0/4) Epoch 17, batch 2750, loss[loss=0.1699, simple_loss=0.2485, pruned_loss=0.0456, over 18744.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2947, pruned_loss=0.06965, over 3830898.23 frames. ], batch size: 41, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:35:46,295 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-112000.pt +2023-04-02 09:35:52,117 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.760e+02 5.468e+02 6.814e+02 8.726e+02 1.544e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-02 09:36:45,031 INFO [train.py:903] (0/4) Epoch 17, batch 2800, loss[loss=0.2328, simple_loss=0.3114, pruned_loss=0.07709, over 19391.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2956, pruned_loss=0.07046, over 3814731.61 frames. ], batch size: 70, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:29,646 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:37:48,060 INFO [train.py:903] (0/4) Epoch 17, batch 2850, loss[loss=0.2006, simple_loss=0.2725, pruned_loss=0.06433, over 19729.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2953, pruned_loss=0.07062, over 3798887.83 frames. ], batch size: 46, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:54,816 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 5.207e+02 6.661e+02 8.674e+02 1.797e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 09:37:59,863 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6556, 1.7054, 1.6044, 1.3761, 1.3411, 1.4120, 0.2403, 0.6772], + device='cuda:0'), covar=tensor([0.0574, 0.0575, 0.0366, 0.0579, 0.1164, 0.0731, 0.1170, 0.0984], + device='cuda:0'), in_proj_covar=tensor([0.0350, 0.0349, 0.0347, 0.0373, 0.0449, 0.0379, 0.0323, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 09:38:46,046 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 09:38:49,622 INFO [train.py:903] (0/4) Epoch 17, batch 2900, loss[loss=0.202, simple_loss=0.2845, pruned_loss=0.05975, over 19518.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2959, pruned_loss=0.07064, over 3800216.77 frames. ], batch size: 54, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:38:56,283 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112153.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:39:47,788 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 09:39:51,807 INFO [train.py:903] (0/4) Epoch 17, batch 2950, loss[loss=0.1988, simple_loss=0.2819, pruned_loss=0.05783, over 19669.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2972, pruned_loss=0.07144, over 3802525.14 frames. ], batch size: 55, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:39:55,960 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:39:58,767 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 4.879e+02 6.137e+02 7.850e+02 1.399e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-04-02 09:40:07,402 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.1501, 5.1050, 5.9404, 5.9385, 2.0544, 5.5944, 4.8538, 5.4886], + device='cuda:0'), covar=tensor([0.1412, 0.0868, 0.0519, 0.0524, 0.5386, 0.0556, 0.0551, 0.1085], + device='cuda:0'), in_proj_covar=tensor([0.0737, 0.0677, 0.0880, 0.0766, 0.0785, 0.0636, 0.0527, 0.0809], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 09:40:27,815 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:40:54,420 INFO [train.py:903] (0/4) Epoch 17, batch 3000, loss[loss=0.1987, simple_loss=0.2848, pruned_loss=0.05628, over 19655.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2956, pruned_loss=0.07011, over 3815429.35 frames. ], batch size: 55, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:40:54,421 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 09:41:09,007 INFO [train.py:937] (0/4) Epoch 17, validation: loss=0.1717, simple_loss=0.272, pruned_loss=0.03576, over 944034.00 frames. +2023-04-02 09:41:09,008 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 09:41:13,731 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 09:41:25,581 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0809, 1.0738, 1.6143, 1.3426, 2.5591, 3.5762, 3.3758, 3.9556], + device='cuda:0'), covar=tensor([0.1890, 0.5035, 0.4230, 0.2384, 0.0701, 0.0219, 0.0255, 0.0266], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0307, 0.0336, 0.0256, 0.0229, 0.0175, 0.0208, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 09:41:33,205 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112268.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:41:46,300 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 09:42:09,756 INFO [train.py:903] (0/4) Epoch 17, batch 3050, loss[loss=0.204, simple_loss=0.278, pruned_loss=0.06506, over 19746.00 frames. ], tot_loss[loss=0.217, simple_loss=0.295, pruned_loss=0.06952, over 3834156.21 frames. ], batch size: 47, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:42:16,489 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.197e+02 6.217e+02 9.038e+02 1.667e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-02 09:43:10,131 INFO [train.py:903] (0/4) Epoch 17, batch 3100, loss[loss=0.1747, simple_loss=0.2521, pruned_loss=0.04862, over 19341.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2967, pruned_loss=0.07073, over 3825198.36 frames. ], batch size: 44, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:43:14,676 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0028, 1.0345, 1.4904, 1.6372, 2.5297, 4.3622, 4.2630, 4.9364], + device='cuda:0'), covar=tensor([0.2137, 0.5516, 0.4798, 0.2430, 0.0848, 0.0242, 0.0208, 0.0168], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0307, 0.0336, 0.0256, 0.0229, 0.0175, 0.0208, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 09:44:14,542 INFO [train.py:903] (0/4) Epoch 17, batch 3150, loss[loss=0.1969, simple_loss=0.2733, pruned_loss=0.06024, over 19731.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2961, pruned_loss=0.07056, over 3836755.09 frames. ], batch size: 46, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:44:21,812 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.016e+02 6.190e+02 7.660e+02 1.883e+03, threshold=1.238e+03, percent-clipped=9.0 +2023-04-02 09:44:25,539 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112407.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:44:42,679 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 09:44:51,275 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:45:17,099 INFO [train.py:903] (0/4) Epoch 17, batch 3200, loss[loss=0.2205, simple_loss=0.2833, pruned_loss=0.07885, over 19764.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.296, pruned_loss=0.07069, over 3829347.97 frames. ], batch size: 45, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:19,039 INFO [train.py:903] (0/4) Epoch 17, batch 3250, loss[loss=0.1726, simple_loss=0.2482, pruned_loss=0.04847, over 19771.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07127, over 3819631.24 frames. ], batch size: 47, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:26,172 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.958e+02 6.274e+02 7.840e+02 2.025e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 09:46:51,212 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112524.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:47:13,515 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:47:18,737 INFO [train.py:903] (0/4) Epoch 17, batch 3300, loss[loss=0.2266, simple_loss=0.2905, pruned_loss=0.08136, over 19422.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2972, pruned_loss=0.07139, over 3822180.14 frames. ], batch size: 48, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:47:20,334 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112549.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:47:25,352 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 09:48:00,404 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:20,009 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:24,042 INFO [train.py:903] (0/4) Epoch 17, batch 3350, loss[loss=0.2071, simple_loss=0.2849, pruned_loss=0.06463, over 19582.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2962, pruned_loss=0.07059, over 3832088.37 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:48:31,317 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.436e+02 6.846e+02 8.612e+02 1.565e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 09:49:09,900 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:49:19,914 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8861, 4.4373, 2.5531, 3.9542, 0.9794, 4.2963, 4.2540, 4.3845], + device='cuda:0'), covar=tensor([0.0582, 0.1055, 0.2157, 0.0774, 0.4051, 0.0721, 0.0850, 0.1109], + device='cuda:0'), in_proj_covar=tensor([0.0473, 0.0389, 0.0472, 0.0331, 0.0390, 0.0407, 0.0402, 0.0431], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:49:24,124 INFO [train.py:903] (0/4) Epoch 17, batch 3400, loss[loss=0.1796, simple_loss=0.2675, pruned_loss=0.04586, over 19600.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2958, pruned_loss=0.07037, over 3840834.46 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:49:57,932 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.44 vs. limit=5.0 +2023-04-02 09:50:25,917 INFO [train.py:903] (0/4) Epoch 17, batch 3450, loss[loss=0.245, simple_loss=0.3254, pruned_loss=0.08228, over 19649.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2954, pruned_loss=0.07017, over 3822463.14 frames. ], batch size: 58, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:50:28,244 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 09:50:33,005 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.932e+02 6.092e+02 9.481e+02 2.200e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-02 09:51:03,686 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1408, 3.6918, 2.1722, 1.9521, 3.3367, 1.8526, 1.3788, 2.2889], + device='cuda:0'), covar=tensor([0.1338, 0.0472, 0.0935, 0.0878, 0.0556, 0.1177, 0.1014, 0.0627], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0310, 0.0331, 0.0257, 0.0244, 0.0332, 0.0294, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:51:27,357 INFO [train.py:903] (0/4) Epoch 17, batch 3500, loss[loss=0.2004, simple_loss=0.2699, pruned_loss=0.06543, over 18676.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2955, pruned_loss=0.07028, over 3833877.75 frames. ], batch size: 41, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:51:32,006 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:51:49,934 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5105, 1.7281, 2.0569, 1.7609, 3.2007, 2.6487, 3.5081, 1.6563], + device='cuda:0'), covar=tensor([0.2310, 0.3946, 0.2534, 0.1794, 0.1448, 0.1942, 0.1531, 0.3771], + device='cuda:0'), in_proj_covar=tensor([0.0508, 0.0610, 0.0667, 0.0463, 0.0605, 0.0514, 0.0646, 0.0521], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 09:52:31,111 INFO [train.py:903] (0/4) Epoch 17, batch 3550, loss[loss=0.2127, simple_loss=0.2853, pruned_loss=0.07002, over 19581.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2949, pruned_loss=0.06983, over 3836143.88 frames. ], batch size: 52, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:52:32,817 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:38,378 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.345e+02 4.759e+02 5.980e+02 7.566e+02 1.638e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 09:53:03,288 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:53:09,420 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 09:53:33,310 INFO [train.py:903] (0/4) Epoch 17, batch 3600, loss[loss=0.1942, simple_loss=0.2732, pruned_loss=0.05756, over 19778.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2951, pruned_loss=0.06988, over 3824703.48 frames. ], batch size: 54, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:53:55,557 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112866.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:53:56,512 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0091, 3.6039, 2.5331, 3.2199, 0.7525, 3.5615, 3.4479, 3.5462], + device='cuda:0'), covar=tensor([0.0756, 0.1072, 0.1954, 0.0954, 0.4179, 0.0857, 0.0973, 0.1216], + device='cuda:0'), in_proj_covar=tensor([0.0475, 0.0388, 0.0470, 0.0332, 0.0389, 0.0406, 0.0402, 0.0431], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:53:57,977 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8506, 1.3452, 1.0762, 0.9653, 1.1486, 1.0323, 0.8839, 1.2273], + device='cuda:0'), covar=tensor([0.0650, 0.0746, 0.1111, 0.0688, 0.0564, 0.1297, 0.0643, 0.0438], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0312, 0.0333, 0.0258, 0.0245, 0.0334, 0.0296, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:54:35,707 INFO [train.py:903] (0/4) Epoch 17, batch 3650, loss[loss=0.2949, simple_loss=0.3545, pruned_loss=0.1177, over 13541.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2952, pruned_loss=0.07015, over 3807695.43 frames. ], batch size: 136, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:54:43,570 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.951e+02 4.960e+02 5.826e+02 7.647e+02 1.614e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 09:55:09,846 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112924.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,600 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,743 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:38,877 INFO [train.py:903] (0/4) Epoch 17, batch 3700, loss[loss=0.208, simple_loss=0.2735, pruned_loss=0.07126, over 19050.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2949, pruned_loss=0.07049, over 3814341.44 frames. ], batch size: 42, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:17,465 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:56:42,237 INFO [train.py:903] (0/4) Epoch 17, batch 3750, loss[loss=0.1809, simple_loss=0.2533, pruned_loss=0.05422, over 18668.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2942, pruned_loss=0.06974, over 3828600.56 frames. ], batch size: 41, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:49,245 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.723e+02 6.001e+02 7.947e+02 1.345e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-02 09:57:32,661 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113039.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:57:42,422 INFO [train.py:903] (0/4) Epoch 17, batch 3800, loss[loss=0.2069, simple_loss=0.2931, pruned_loss=0.06033, over 19557.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2945, pruned_loss=0.06995, over 3824728.44 frames. ], batch size: 56, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:57:49,559 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:14,153 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 09:58:39,380 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:43,325 INFO [train.py:903] (0/4) Epoch 17, batch 3850, loss[loss=0.2293, simple_loss=0.3107, pruned_loss=0.07398, over 19609.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06966, over 3830155.79 frames. ], batch size: 57, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:58:51,555 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.627e+02 5.316e+02 6.326e+02 9.097e+02 1.552e+03, threshold=1.265e+03, percent-clipped=8.0 +2023-04-02 09:59:08,793 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3522, 2.9962, 2.3170, 2.7378, 0.9620, 2.9651, 2.9100, 3.0025], + device='cuda:0'), covar=tensor([0.1043, 0.1426, 0.1954, 0.1114, 0.3658, 0.1096, 0.1108, 0.1378], + device='cuda:0'), in_proj_covar=tensor([0.0475, 0.0387, 0.0470, 0.0331, 0.0389, 0.0408, 0.0403, 0.0431], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 09:59:14,496 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113122.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:28,704 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6526, 1.4799, 1.4799, 2.3246, 1.8824, 1.8711, 1.9704, 1.7792], + device='cuda:0'), covar=tensor([0.0841, 0.0956, 0.1057, 0.0694, 0.0745, 0.0788, 0.0848, 0.0710], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0242, 0.0226, 0.0208, 0.0188, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 09:59:35,638 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2118, 2.2250, 2.3723, 3.0306, 2.1381, 2.7653, 2.5476, 2.1952], + device='cuda:0'), covar=tensor([0.4135, 0.4020, 0.1884, 0.2477, 0.4495, 0.2170, 0.4259, 0.3308], + device='cuda:0'), in_proj_covar=tensor([0.0857, 0.0908, 0.0688, 0.0913, 0.0838, 0.0774, 0.0817, 0.0754], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 09:59:44,563 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113147.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:45,352 INFO [train.py:903] (0/4) Epoch 17, batch 3900, loss[loss=0.2067, simple_loss=0.2837, pruned_loss=0.06483, over 19486.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.294, pruned_loss=0.06928, over 3824233.21 frames. ], batch size: 49, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:17,085 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-02 10:00:33,600 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.92 vs. limit=5.0 +2023-04-02 10:00:48,752 INFO [train.py:903] (0/4) Epoch 17, batch 3950, loss[loss=0.2055, simple_loss=0.2891, pruned_loss=0.06097, over 19782.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2939, pruned_loss=0.06926, over 3816429.52 frames. ], batch size: 56, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:56,114 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 10:00:57,243 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.545e+02 5.288e+02 6.585e+02 1.560e+03, threshold=1.058e+03, percent-clipped=1.0 +2023-04-02 10:01:05,685 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0742, 1.7503, 1.9629, 2.0196, 4.5394, 1.2724, 2.7585, 4.9252], + device='cuda:0'), covar=tensor([0.0409, 0.2561, 0.2602, 0.1721, 0.0720, 0.2475, 0.1208, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0389, 0.0352, 0.0375, 0.0333, 0.0360, 0.0342, 0.0358, 0.0381], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:01:51,454 INFO [train.py:903] (0/4) Epoch 17, batch 4000, loss[loss=0.1861, simple_loss=0.2573, pruned_loss=0.05747, over 19733.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2937, pruned_loss=0.06939, over 3802370.33 frames. ], batch size: 46, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:01:56,547 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:00,264 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2267, 2.2621, 2.4452, 2.9086, 2.2052, 2.7602, 2.6000, 2.2625], + device='cuda:0'), covar=tensor([0.3822, 0.3349, 0.1639, 0.2145, 0.3621, 0.1817, 0.4015, 0.2980], + device='cuda:0'), in_proj_covar=tensor([0.0860, 0.0910, 0.0691, 0.0916, 0.0839, 0.0775, 0.0818, 0.0757], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 10:02:35,212 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:39,806 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 10:02:49,612 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:52,763 INFO [train.py:903] (0/4) Epoch 17, batch 4050, loss[loss=0.2301, simple_loss=0.305, pruned_loss=0.07758, over 19797.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2943, pruned_loss=0.06915, over 3809977.31 frames. ], batch size: 56, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:00,896 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.601e+02 4.703e+02 5.716e+02 7.594e+02 1.568e+03, threshold=1.143e+03, percent-clipped=5.0 +2023-04-02 10:03:08,190 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:21,566 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:38,666 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:54,659 INFO [train.py:903] (0/4) Epoch 17, batch 4100, loss[loss=0.1969, simple_loss=0.269, pruned_loss=0.06244, over 19795.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2958, pruned_loss=0.07054, over 3785296.24 frames. ], batch size: 48, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:57,614 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:28,652 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:31,489 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 10:04:56,296 INFO [train.py:903] (0/4) Epoch 17, batch 4150, loss[loss=0.2075, simple_loss=0.2926, pruned_loss=0.0612, over 19598.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.295, pruned_loss=0.07009, over 3794290.39 frames. ], batch size: 61, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:04:56,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:05:03,828 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 5.375e+02 6.520e+02 8.152e+02 2.133e+03, threshold=1.304e+03, percent-clipped=6.0 +2023-04-02 10:05:57,675 INFO [train.py:903] (0/4) Epoch 17, batch 4200, loss[loss=0.2502, simple_loss=0.3338, pruned_loss=0.08334, over 19670.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2949, pruned_loss=0.07005, over 3794476.24 frames. ], batch size: 59, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:06:02,360 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 10:06:15,150 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0977, 1.7719, 2.0710, 1.7226, 4.6324, 1.0944, 2.5663, 4.9945], + device='cuda:0'), covar=tensor([0.0417, 0.2560, 0.2393, 0.1797, 0.0660, 0.2576, 0.1345, 0.0152], + device='cuda:0'), in_proj_covar=tensor([0.0389, 0.0352, 0.0373, 0.0333, 0.0361, 0.0342, 0.0359, 0.0379], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:06:59,519 INFO [train.py:903] (0/4) Epoch 17, batch 4250, loss[loss=0.1818, simple_loss=0.2605, pruned_loss=0.05151, over 19400.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2932, pruned_loss=0.06921, over 3794690.01 frames. ], batch size: 47, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:07:06,456 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.808e+02 5.898e+02 7.585e+02 1.571e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 10:07:13,469 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 10:07:24,947 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 10:08:01,305 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7157, 1.7075, 1.6208, 1.3907, 1.3078, 1.4156, 0.2595, 0.6602], + device='cuda:0'), covar=tensor([0.0540, 0.0529, 0.0328, 0.0556, 0.1071, 0.0578, 0.1021, 0.0944], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0346, 0.0345, 0.0374, 0.0445, 0.0376, 0.0323, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 10:08:02,055 INFO [train.py:903] (0/4) Epoch 17, batch 4300, loss[loss=0.2168, simple_loss=0.297, pruned_loss=0.06828, over 19670.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2936, pruned_loss=0.06936, over 3802992.45 frames. ], batch size: 55, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:08:55,437 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 10:09:02,226 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113596.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:09:04,353 INFO [train.py:903] (0/4) Epoch 17, batch 4350, loss[loss=0.2247, simple_loss=0.2936, pruned_loss=0.07792, over 19753.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2926, pruned_loss=0.0684, over 3819820.64 frames. ], batch size: 51, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:09:12,444 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 4.847e+02 6.118e+02 7.738e+02 1.753e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 10:09:45,306 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 10:09:49,669 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:07,375 INFO [train.py:903] (0/4) Epoch 17, batch 4400, loss[loss=0.1955, simple_loss=0.2823, pruned_loss=0.05433, over 19788.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2927, pruned_loss=0.06821, over 3830357.66 frames. ], batch size: 56, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:10:14,892 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113654.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:26,277 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:33,149 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 10:10:43,126 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 10:10:45,268 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113679.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:11:07,787 INFO [train.py:903] (0/4) Epoch 17, batch 4450, loss[loss=0.2181, simple_loss=0.2899, pruned_loss=0.07312, over 19600.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2929, pruned_loss=0.06897, over 3831348.41 frames. ], batch size: 52, lr: 4.84e-03, grad_scale: 16.0 +2023-04-02 10:11:14,458 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.100e+02 6.811e+02 8.906e+02 1.680e+03, threshold=1.362e+03, percent-clipped=7.0 +2023-04-02 10:11:22,970 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113711.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:11:27,803 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-02 10:11:51,806 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 10:12:07,717 INFO [train.py:903] (0/4) Epoch 17, batch 4500, loss[loss=0.2192, simple_loss=0.3046, pruned_loss=0.06687, over 19617.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2935, pruned_loss=0.07005, over 3816237.57 frames. ], batch size: 57, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:12:41,931 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1172, 2.0356, 1.9028, 1.7346, 1.5201, 1.7126, 0.5504, 1.0966], + device='cuda:0'), covar=tensor([0.0528, 0.0531, 0.0332, 0.0635, 0.1030, 0.0712, 0.1127, 0.0869], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0344, 0.0344, 0.0372, 0.0444, 0.0375, 0.0322, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 10:12:59,656 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 10:13:06,569 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 10:13:09,291 INFO [train.py:903] (0/4) Epoch 17, batch 4550, loss[loss=0.1832, simple_loss=0.2619, pruned_loss=0.05223, over 19475.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.07001, over 3818250.65 frames. ], batch size: 49, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:13:19,360 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 10:13:20,448 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 5.090e+02 6.214e+02 7.749e+02 1.433e+03, threshold=1.243e+03, percent-clipped=2.0 +2023-04-02 10:13:42,454 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 10:14:12,738 INFO [train.py:903] (0/4) Epoch 17, batch 4600, loss[loss=0.2176, simple_loss=0.3054, pruned_loss=0.06486, over 19625.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2942, pruned_loss=0.06952, over 3824422.20 frames. ], batch size: 57, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:14,385 INFO [train.py:903] (0/4) Epoch 17, batch 4650, loss[loss=0.2099, simple_loss=0.2978, pruned_loss=0.061, over 19680.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2941, pruned_loss=0.06943, over 3836965.04 frames. ], batch size: 53, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:23,609 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.276e+02 6.482e+02 7.907e+02 1.823e+03, threshold=1.296e+03, percent-clipped=2.0 +2023-04-02 10:15:31,994 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 10:15:44,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 10:16:16,816 INFO [train.py:903] (0/4) Epoch 17, batch 4700, loss[loss=0.1911, simple_loss=0.2654, pruned_loss=0.0584, over 18657.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.293, pruned_loss=0.06894, over 3848036.53 frames. ], batch size: 41, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:16:42,988 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:16:43,756 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 10:16:56,533 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:12,176 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:19,855 INFO [train.py:903] (0/4) Epoch 17, batch 4750, loss[loss=0.2131, simple_loss=0.302, pruned_loss=0.06205, over 19645.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2931, pruned_loss=0.06873, over 3820583.76 frames. ], batch size: 58, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:17:22,233 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-114000.pt +2023-04-02 10:17:32,729 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.836e+02 6.122e+02 7.624e+02 1.576e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 10:17:35,054 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:47,627 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5627, 4.1231, 2.6292, 3.6136, 0.8967, 3.9629, 3.9107, 3.9852], + device='cuda:0'), covar=tensor([0.0652, 0.1045, 0.1958, 0.0890, 0.4175, 0.0779, 0.0942, 0.1061], + device='cuda:0'), in_proj_covar=tensor([0.0476, 0.0389, 0.0476, 0.0335, 0.0396, 0.0410, 0.0408, 0.0435], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:18:08,355 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7377, 1.2476, 1.4353, 1.4020, 3.3033, 1.0453, 2.3359, 3.6367], + device='cuda:0'), covar=tensor([0.0468, 0.2816, 0.2896, 0.1968, 0.0749, 0.2660, 0.1307, 0.0282], + device='cuda:0'), in_proj_covar=tensor([0.0388, 0.0352, 0.0373, 0.0334, 0.0361, 0.0342, 0.0359, 0.0378], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:18:24,455 INFO [train.py:903] (0/4) Epoch 17, batch 4800, loss[loss=0.2261, simple_loss=0.2988, pruned_loss=0.07671, over 19669.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2936, pruned_loss=0.06896, over 3815669.29 frames. ], batch size: 55, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:22,101 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:19:25,638 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2898, 3.8170, 3.9139, 3.8952, 1.5831, 3.7029, 3.2429, 3.6628], + device='cuda:0'), covar=tensor([0.1621, 0.0923, 0.0643, 0.0777, 0.5493, 0.1000, 0.0719, 0.1139], + device='cuda:0'), in_proj_covar=tensor([0.0739, 0.0691, 0.0887, 0.0772, 0.0791, 0.0643, 0.0536, 0.0814], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 10:19:26,563 INFO [train.py:903] (0/4) Epoch 17, batch 4850, loss[loss=0.2054, simple_loss=0.2892, pruned_loss=0.06074, over 19769.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2932, pruned_loss=0.06869, over 3820149.87 frames. ], batch size: 56, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:35,426 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.130e+02 6.675e+02 8.728e+02 1.864e+03, threshold=1.335e+03, percent-clipped=11.0 +2023-04-02 10:19:52,816 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 10:19:57,417 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:08,907 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8020, 3.2868, 3.3164, 3.3312, 1.3984, 3.1903, 2.7854, 3.0622], + device='cuda:0'), covar=tensor([0.1694, 0.0928, 0.0798, 0.0852, 0.5155, 0.0976, 0.0782, 0.1289], + device='cuda:0'), in_proj_covar=tensor([0.0742, 0.0694, 0.0892, 0.0777, 0.0798, 0.0646, 0.0539, 0.0819], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 10:20:14,653 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 10:20:19,126 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 10:20:20,346 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 10:20:25,205 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:28,429 INFO [train.py:903] (0/4) Epoch 17, batch 4900, loss[loss=0.2411, simple_loss=0.3305, pruned_loss=0.07587, over 18170.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2935, pruned_loss=0.06846, over 3831797.40 frames. ], batch size: 83, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:20:28,493 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 10:20:45,331 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9027, 1.9437, 2.1503, 2.4818, 1.7846, 2.3452, 2.2122, 1.9359], + device='cuda:0'), covar=tensor([0.4137, 0.3732, 0.1884, 0.2389, 0.3990, 0.2033, 0.4551, 0.3316], + device='cuda:0'), in_proj_covar=tensor([0.0856, 0.0905, 0.0687, 0.0913, 0.0836, 0.0773, 0.0817, 0.0755], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 10:20:48,144 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 10:21:29,558 INFO [train.py:903] (0/4) Epoch 17, batch 4950, loss[loss=0.1898, simple_loss=0.2628, pruned_loss=0.05835, over 19786.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2935, pruned_loss=0.0685, over 3829775.09 frames. ], batch size: 48, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:21:41,951 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.073e+02 6.090e+02 7.599e+02 1.461e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-02 10:21:48,501 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 10:22:09,556 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 10:22:31,813 INFO [train.py:903] (0/4) Epoch 17, batch 5000, loss[loss=0.1849, simple_loss=0.2633, pruned_loss=0.05326, over 19806.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2944, pruned_loss=0.06948, over 3816044.94 frames. ], batch size: 49, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:22:39,599 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 10:22:50,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 10:23:32,991 INFO [train.py:903] (0/4) Epoch 17, batch 5050, loss[loss=0.214, simple_loss=0.3003, pruned_loss=0.06389, over 19609.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2941, pruned_loss=0.06947, over 3814735.37 frames. ], batch size: 57, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:23:42,364 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 5.068e+02 6.244e+02 7.899e+02 1.430e+03, threshold=1.249e+03, percent-clipped=5.0 +2023-04-02 10:24:10,992 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 10:24:34,964 INFO [train.py:903] (0/4) Epoch 17, batch 5100, loss[loss=0.2153, simple_loss=0.3007, pruned_loss=0.06495, over 19604.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2944, pruned_loss=0.06938, over 3816937.87 frames. ], batch size: 61, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:24:37,659 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:24:44,365 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 10:24:46,816 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 10:24:52,437 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 10:25:10,350 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:14,739 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:19,724 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1412, 3.7050, 2.1916, 2.1562, 3.3495, 1.9030, 1.4076, 2.2912], + device='cuda:0'), covar=tensor([0.1383, 0.0490, 0.0977, 0.0882, 0.0514, 0.1207, 0.1048, 0.0656], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0309, 0.0329, 0.0256, 0.0245, 0.0328, 0.0292, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:25:36,282 INFO [train.py:903] (0/4) Epoch 17, batch 5150, loss[loss=0.1964, simple_loss=0.2867, pruned_loss=0.05304, over 19777.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2947, pruned_loss=0.06928, over 3815302.65 frames. ], batch size: 56, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:25:46,417 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114404.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:49,639 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.288e+02 6.652e+02 7.839e+02 1.735e+03, threshold=1.330e+03, percent-clipped=3.0 +2023-04-02 10:25:50,827 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 10:26:24,429 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 10:26:41,812 INFO [train.py:903] (0/4) Epoch 17, batch 5200, loss[loss=0.2453, simple_loss=0.3198, pruned_loss=0.08536, over 19687.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2936, pruned_loss=0.06882, over 3834787.03 frames. ], batch size: 59, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:26:47,161 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 10:26:54,845 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 10:27:33,107 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114489.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:27:35,795 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6922, 1.6888, 1.6227, 1.3825, 1.3429, 1.3879, 0.2822, 0.7011], + device='cuda:0'), covar=tensor([0.0594, 0.0583, 0.0345, 0.0591, 0.1054, 0.0684, 0.1136, 0.0984], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0346, 0.0345, 0.0374, 0.0448, 0.0379, 0.0325, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 10:27:37,829 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 10:27:43,619 INFO [train.py:903] (0/4) Epoch 17, batch 5250, loss[loss=0.1985, simple_loss=0.2807, pruned_loss=0.05811, over 19700.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2942, pruned_loss=0.06869, over 3841059.82 frames. ], batch size: 53, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:27:53,063 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 4.802e+02 5.852e+02 7.465e+02 1.395e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 10:28:03,704 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7808, 2.6171, 2.2653, 2.6773, 2.6198, 2.2945, 2.1786, 2.8107], + device='cuda:0'), covar=tensor([0.0818, 0.1334, 0.1218, 0.0964, 0.1147, 0.0452, 0.1158, 0.0476], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0352, 0.0299, 0.0243, 0.0297, 0.0246, 0.0293, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:28:26,797 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 10:28:44,586 INFO [train.py:903] (0/4) Epoch 17, batch 5300, loss[loss=0.1947, simple_loss=0.2628, pruned_loss=0.06327, over 19388.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2934, pruned_loss=0.06844, over 3840984.33 frames. ], batch size: 47, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:28:59,045 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 10:29:44,122 INFO [train.py:903] (0/4) Epoch 17, batch 5350, loss[loss=0.2079, simple_loss=0.2918, pruned_loss=0.06206, over 19598.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2939, pruned_loss=0.06924, over 3836146.97 frames. ], batch size: 57, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:29:51,245 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:29:54,892 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 5.171e+02 6.688e+02 9.089e+02 2.274e+03, threshold=1.338e+03, percent-clipped=9.0 +2023-04-02 10:30:19,030 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 10:30:46,233 INFO [train.py:903] (0/4) Epoch 17, batch 5400, loss[loss=0.1876, simple_loss=0.2691, pruned_loss=0.05308, over 19395.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2946, pruned_loss=0.06933, over 3827736.99 frames. ], batch size: 48, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:00,989 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8667, 4.4174, 2.8250, 3.7616, 1.0749, 4.3522, 4.2180, 4.3818], + device='cuda:0'), covar=tensor([0.0514, 0.0931, 0.1936, 0.0862, 0.3867, 0.0620, 0.0857, 0.1019], + device='cuda:0'), in_proj_covar=tensor([0.0474, 0.0390, 0.0474, 0.0336, 0.0394, 0.0410, 0.0408, 0.0435], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:31:32,773 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 10:31:47,161 INFO [train.py:903] (0/4) Epoch 17, batch 5450, loss[loss=0.2227, simple_loss=0.2876, pruned_loss=0.07888, over 19754.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2941, pruned_loss=0.06912, over 3828696.26 frames. ], batch size: 46, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:56,194 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.503e+02 5.761e+02 7.243e+02 1.420e+03, threshold=1.152e+03, percent-clipped=1.0 +2023-04-02 10:32:31,001 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:32:47,133 INFO [train.py:903] (0/4) Epoch 17, batch 5500, loss[loss=0.2028, simple_loss=0.2868, pruned_loss=0.05942, over 19776.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2951, pruned_loss=0.06954, over 3812889.49 frames. ], batch size: 56, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:32:48,638 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8170, 1.5927, 1.6406, 1.6191, 3.3578, 1.2052, 2.5318, 3.7915], + device='cuda:0'), covar=tensor([0.0441, 0.2411, 0.2518, 0.1740, 0.0671, 0.2376, 0.1102, 0.0215], + device='cuda:0'), in_proj_covar=tensor([0.0389, 0.0353, 0.0374, 0.0336, 0.0364, 0.0343, 0.0360, 0.0381], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:33:11,651 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 10:33:42,362 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 10:33:46,748 INFO [train.py:903] (0/4) Epoch 17, batch 5550, loss[loss=0.2083, simple_loss=0.2887, pruned_loss=0.06391, over 19687.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2944, pruned_loss=0.06891, over 3819474.37 frames. ], batch size: 53, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:33:54,779 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 10:33:55,923 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.950e+02 6.230e+02 7.289e+02 1.704e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 10:34:41,617 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 10:34:48,920 INFO [train.py:903] (0/4) Epoch 17, batch 5600, loss[loss=0.1744, simple_loss=0.2504, pruned_loss=0.04915, over 19100.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2945, pruned_loss=0.06912, over 3815638.47 frames. ], batch size: 42, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:03,675 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:32,407 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:39,355 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114889.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:50,182 INFO [train.py:903] (0/4) Epoch 17, batch 5650, loss[loss=0.1649, simple_loss=0.2418, pruned_loss=0.04403, over 15179.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2936, pruned_loss=0.069, over 3798682.62 frames. ], batch size: 33, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:59,362 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.160e+02 6.498e+02 8.575e+02 1.504e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-02 10:36:36,133 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 10:36:51,136 INFO [train.py:903] (0/4) Epoch 17, batch 5700, loss[loss=0.2504, simple_loss=0.3203, pruned_loss=0.09025, over 19322.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2951, pruned_loss=0.06979, over 3799393.88 frames. ], batch size: 66, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:43,544 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8170, 1.9268, 2.1281, 2.4432, 1.7665, 2.3499, 2.1643, 1.9575], + device='cuda:0'), covar=tensor([0.4175, 0.3766, 0.1824, 0.2330, 0.3994, 0.1990, 0.4613, 0.3215], + device='cuda:0'), in_proj_covar=tensor([0.0860, 0.0912, 0.0690, 0.0919, 0.0838, 0.0777, 0.0815, 0.0761], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 10:37:44,798 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7724, 1.7776, 1.6101, 1.3794, 1.4889, 1.4409, 0.2033, 0.6495], + device='cuda:0'), covar=tensor([0.0490, 0.0539, 0.0352, 0.0549, 0.0972, 0.0646, 0.1027, 0.0951], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0346, 0.0344, 0.0375, 0.0447, 0.0380, 0.0325, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 10:37:50,252 INFO [train.py:903] (0/4) Epoch 17, batch 5750, loss[loss=0.1902, simple_loss=0.2764, pruned_loss=0.05197, over 19529.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2947, pruned_loss=0.06951, over 3801833.85 frames. ], batch size: 56, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:50,264 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 10:37:57,221 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 10:37:59,478 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 5.221e+02 6.429e+02 7.572e+02 1.818e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 10:38:04,571 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 10:38:50,579 INFO [train.py:903] (0/4) Epoch 17, batch 5800, loss[loss=0.2313, simple_loss=0.3119, pruned_loss=0.07537, over 19676.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2948, pruned_loss=0.06974, over 3809052.10 frames. ], batch size: 59, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:39:27,224 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:39:52,202 INFO [train.py:903] (0/4) Epoch 17, batch 5850, loss[loss=0.195, simple_loss=0.2811, pruned_loss=0.05446, over 19668.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2945, pruned_loss=0.06941, over 3810967.64 frames. ], batch size: 58, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:39:59,333 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:40:01,415 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 4.663e+02 6.050e+02 7.882e+02 1.454e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 10:40:51,602 INFO [train.py:903] (0/4) Epoch 17, batch 5900, loss[loss=0.215, simple_loss=0.2934, pruned_loss=0.06826, over 17412.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2941, pruned_loss=0.06923, over 3817385.77 frames. ], batch size: 101, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:40:55,183 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 10:41:03,358 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3346, 3.8617, 2.6239, 3.4902, 1.1999, 3.8491, 3.7447, 3.9261], + device='cuda:0'), covar=tensor([0.0684, 0.1155, 0.1889, 0.0813, 0.3542, 0.0689, 0.0828, 0.0969], + device='cuda:0'), in_proj_covar=tensor([0.0470, 0.0387, 0.0469, 0.0334, 0.0388, 0.0406, 0.0403, 0.0431], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:41:13,994 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 10:41:45,856 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:41:51,143 INFO [train.py:903] (0/4) Epoch 17, batch 5950, loss[loss=0.2971, simple_loss=0.3534, pruned_loss=0.1204, over 13440.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2945, pruned_loss=0.06968, over 3805169.85 frames. ], batch size: 136, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:00,462 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.938e+02 6.318e+02 8.201e+02 2.090e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:42:35,074 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115233.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:42:51,731 INFO [train.py:903] (0/4) Epoch 17, batch 6000, loss[loss=0.2208, simple_loss=0.286, pruned_loss=0.07781, over 19756.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2939, pruned_loss=0.06913, over 3798466.29 frames. ], batch size: 47, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:51,732 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 10:43:04,252 INFO [train.py:937] (0/4) Epoch 17, validation: loss=0.1707, simple_loss=0.2712, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 10:43:04,253 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 10:44:04,142 INFO [train.py:903] (0/4) Epoch 17, batch 6050, loss[loss=0.1839, simple_loss=0.2516, pruned_loss=0.05809, over 19086.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2937, pruned_loss=0.06899, over 3792500.58 frames. ], batch size: 42, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:44:15,952 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 5.156e+02 6.136e+02 7.598e+02 1.906e+03, threshold=1.227e+03, percent-clipped=4.0 +2023-04-02 10:45:06,507 INFO [train.py:903] (0/4) Epoch 17, batch 6100, loss[loss=0.2128, simple_loss=0.3007, pruned_loss=0.06242, over 19544.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2925, pruned_loss=0.06839, over 3804999.87 frames. ], batch size: 56, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:45:06,872 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115348.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:27,762 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 10:45:36,308 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9398, 1.6653, 1.5175, 1.8931, 1.5407, 1.5982, 1.4643, 1.8360], + device='cuda:0'), covar=tensor([0.1022, 0.1420, 0.1546, 0.1011, 0.1409, 0.0600, 0.1412, 0.0737], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0353, 0.0302, 0.0245, 0.0297, 0.0247, 0.0295, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:45:54,951 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:56,132 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:46:06,535 INFO [train.py:903] (0/4) Epoch 17, batch 6150, loss[loss=0.1923, simple_loss=0.2853, pruned_loss=0.04971, over 19546.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2934, pruned_loss=0.06864, over 3807719.46 frames. ], batch size: 54, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:46:06,952 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5909, 2.3142, 1.7965, 1.4475, 2.1165, 1.3927, 1.4257, 1.9405], + device='cuda:0'), covar=tensor([0.0990, 0.0736, 0.0995, 0.0861, 0.0553, 0.1262, 0.0721, 0.0420], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0307, 0.0327, 0.0255, 0.0243, 0.0324, 0.0288, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:46:15,597 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.209e+02 6.440e+02 8.380e+02 1.538e+03, threshold=1.288e+03, percent-clipped=5.0 +2023-04-02 10:46:33,777 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 10:46:35,170 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8102, 3.2669, 3.3241, 3.3183, 1.3059, 3.2228, 2.8214, 3.0970], + device='cuda:0'), covar=tensor([0.1707, 0.1002, 0.0851, 0.0940, 0.5426, 0.0904, 0.0770, 0.1256], + device='cuda:0'), in_proj_covar=tensor([0.0745, 0.0695, 0.0896, 0.0779, 0.0799, 0.0648, 0.0537, 0.0822], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 10:47:07,331 INFO [train.py:903] (0/4) Epoch 17, batch 6200, loss[loss=0.1975, simple_loss=0.2669, pruned_loss=0.06406, over 19336.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2932, pruned_loss=0.0688, over 3813124.27 frames. ], batch size: 47, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:47:07,474 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:08,887 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:39,842 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:48:07,509 INFO [train.py:903] (0/4) Epoch 17, batch 6250, loss[loss=0.1791, simple_loss=0.2714, pruned_loss=0.04345, over 19769.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2931, pruned_loss=0.06859, over 3795209.99 frames. ], batch size: 56, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:48:16,568 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.690e+02 5.769e+02 7.890e+02 2.007e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-02 10:48:37,588 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 10:49:09,163 INFO [train.py:903] (0/4) Epoch 17, batch 6300, loss[loss=0.2107, simple_loss=0.2937, pruned_loss=0.06385, over 19654.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2942, pruned_loss=0.06903, over 3805059.69 frames. ], batch size: 60, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:49:27,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115563.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:12,480 INFO [train.py:903] (0/4) Epoch 17, batch 6350, loss[loss=0.2157, simple_loss=0.2881, pruned_loss=0.07161, over 19595.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.293, pruned_loss=0.0682, over 3798972.76 frames. ], batch size: 50, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:50:19,286 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 10:50:19,959 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:21,925 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.077e+02 8.091e+02 1.466e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 10:50:50,636 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:51:13,881 INFO [train.py:903] (0/4) Epoch 17, batch 6400, loss[loss=0.2532, simple_loss=0.3315, pruned_loss=0.08745, over 19489.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2936, pruned_loss=0.06802, over 3810483.86 frames. ], batch size: 64, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:15,082 INFO [train.py:903] (0/4) Epoch 17, batch 6450, loss[loss=0.2222, simple_loss=0.3008, pruned_loss=0.07185, over 19616.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2939, pruned_loss=0.06782, over 3827811.88 frames. ], batch size: 50, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:25,105 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.829e+02 5.862e+02 7.962e+02 1.327e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 10:52:41,080 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1270, 1.2638, 1.6679, 1.0069, 2.3655, 3.0994, 2.7752, 3.2560], + device='cuda:0'), covar=tensor([0.1597, 0.3709, 0.3209, 0.2501, 0.0586, 0.0191, 0.0266, 0.0293], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0313, 0.0341, 0.0260, 0.0235, 0.0179, 0.0211, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 10:52:58,106 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115732.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:52:59,222 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:53:01,407 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 10:53:16,380 INFO [train.py:903] (0/4) Epoch 17, batch 6500, loss[loss=0.1986, simple_loss=0.2753, pruned_loss=0.06094, over 19738.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2926, pruned_loss=0.06758, over 3834846.66 frames. ], batch size: 51, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:53:24,002 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 10:54:18,483 INFO [train.py:903] (0/4) Epoch 17, batch 6550, loss[loss=0.2677, simple_loss=0.3336, pruned_loss=0.1009, over 19649.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2927, pruned_loss=0.06801, over 3819907.20 frames. ], batch size: 55, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:54:28,764 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.150e+02 6.522e+02 8.804e+02 2.234e+03, threshold=1.304e+03, percent-clipped=7.0 +2023-04-02 10:54:43,121 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:15,220 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115844.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:18,694 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115847.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:19,547 INFO [train.py:903] (0/4) Epoch 17, batch 6600, loss[loss=0.1899, simple_loss=0.2625, pruned_loss=0.05863, over 19762.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2917, pruned_loss=0.06737, over 3828956.91 frames. ], batch size: 47, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:55:19,934 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:56:10,716 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7572, 1.5376, 1.5952, 2.0995, 1.6749, 2.0163, 2.1556, 1.8369], + device='cuda:0'), covar=tensor([0.0809, 0.0928, 0.1023, 0.0830, 0.0864, 0.0736, 0.0776, 0.0679], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0244, 0.0226, 0.0211, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 10:56:19,801 INFO [train.py:903] (0/4) Epoch 17, batch 6650, loss[loss=0.1685, simple_loss=0.2478, pruned_loss=0.0446, over 19366.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2926, pruned_loss=0.06816, over 3837768.15 frames. ], batch size: 47, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:56:27,599 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 10:56:30,889 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.833e+02 5.946e+02 8.225e+02 1.682e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-02 10:56:34,712 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115910.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 10:56:43,242 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1993, 1.8253, 1.5147, 1.2438, 1.6555, 1.2069, 1.1800, 1.5998], + device='cuda:0'), covar=tensor([0.0781, 0.0792, 0.0965, 0.0773, 0.0469, 0.1180, 0.0619, 0.0390], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0304, 0.0323, 0.0253, 0.0240, 0.0322, 0.0284, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:57:21,959 INFO [train.py:903] (0/4) Epoch 17, batch 6700, loss[loss=0.1956, simple_loss=0.2641, pruned_loss=0.06349, over 19726.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2925, pruned_loss=0.06837, over 3826107.57 frames. ], batch size: 47, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:07,683 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9240, 1.7628, 1.5165, 1.8756, 1.7981, 1.4787, 1.4541, 1.7504], + device='cuda:0'), covar=tensor([0.1112, 0.1680, 0.1727, 0.1088, 0.1462, 0.0804, 0.1631, 0.0904], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0354, 0.0304, 0.0246, 0.0298, 0.0246, 0.0294, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:58:20,387 INFO [train.py:903] (0/4) Epoch 17, batch 6750, loss[loss=0.2684, simple_loss=0.3316, pruned_loss=0.1027, over 13280.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2938, pruned_loss=0.06917, over 3823312.34 frames. ], batch size: 136, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:22,771 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-116000.pt +2023-04-02 10:58:31,536 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.004e+02 5.304e+02 6.320e+02 7.514e+02 1.971e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:58:44,259 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7976, 1.4673, 1.4232, 1.7840, 1.3782, 1.5903, 1.4594, 1.6879], + device='cuda:0'), covar=tensor([0.1056, 0.1380, 0.1448, 0.0954, 0.1294, 0.0552, 0.1306, 0.0732], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0351, 0.0301, 0.0244, 0.0295, 0.0244, 0.0292, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 10:58:59,769 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:59:17,212 INFO [train.py:903] (0/4) Epoch 17, batch 6800, loss[loss=0.2163, simple_loss=0.302, pruned_loss=0.06531, over 19537.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2953, pruned_loss=0.07035, over 3805766.14 frames. ], batch size: 56, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:59:42,987 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4805, 1.4653, 1.7027, 1.5880, 2.3586, 2.0078, 2.3521, 1.4624], + device='cuda:0'), covar=tensor([0.1863, 0.3202, 0.2016, 0.1522, 0.1218, 0.1671, 0.1158, 0.3491], + device='cuda:0'), in_proj_covar=tensor([0.0511, 0.0610, 0.0668, 0.0460, 0.0609, 0.0512, 0.0647, 0.0521], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 10:59:48,083 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-17.pt +2023-04-02 11:00:03,644 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 11:00:04,106 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 11:00:07,138 INFO [train.py:903] (0/4) Epoch 18, batch 0, loss[loss=0.2006, simple_loss=0.2889, pruned_loss=0.05619, over 19535.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2889, pruned_loss=0.05619, over 19535.00 frames. ], batch size: 54, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:00:07,139 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 11:00:18,782 INFO [train.py:937] (0/4) Epoch 18, validation: loss=0.1712, simple_loss=0.2722, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 11:00:18,782 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 11:00:32,358 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 11:00:34,739 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.6935, 5.1999, 3.2400, 4.5145, 1.3241, 5.0838, 5.1045, 5.2039], + device='cuda:0'), covar=tensor([0.0361, 0.0687, 0.1645, 0.0628, 0.3787, 0.0519, 0.0718, 0.1009], + device='cuda:0'), in_proj_covar=tensor([0.0472, 0.0389, 0.0470, 0.0331, 0.0394, 0.0408, 0.0402, 0.0432], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:00:51,639 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:52,826 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:55,571 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.457e+02 4.972e+02 6.494e+02 8.085e+02 1.604e+03, threshold=1.299e+03, percent-clipped=1.0 +2023-04-02 11:01:15,439 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:18,718 INFO [train.py:903] (0/4) Epoch 18, batch 50, loss[loss=0.1668, simple_loss=0.2378, pruned_loss=0.04792, over 15941.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2915, pruned_loss=0.06922, over 859248.24 frames. ], batch size: 35, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:01:21,404 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:23,481 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:30,152 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:52,471 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 11:01:59,224 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5572, 1.2501, 1.4948, 1.2467, 2.2185, 1.0012, 2.0748, 2.4316], + device='cuda:0'), covar=tensor([0.0705, 0.2741, 0.2626, 0.1678, 0.0837, 0.2068, 0.1001, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0386, 0.0354, 0.0370, 0.0339, 0.0359, 0.0343, 0.0357, 0.0381], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:02:21,175 INFO [train.py:903] (0/4) Epoch 18, batch 100, loss[loss=0.2128, simple_loss=0.3014, pruned_loss=0.06206, over 19752.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.294, pruned_loss=0.06888, over 1519747.53 frames. ], batch size: 63, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:02:32,224 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 11:02:41,859 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5974, 4.1998, 2.6671, 3.7069, 0.7982, 4.0578, 4.0093, 4.0916], + device='cuda:0'), covar=tensor([0.0600, 0.0933, 0.1902, 0.0772, 0.4175, 0.0647, 0.0834, 0.0920], + device='cuda:0'), in_proj_covar=tensor([0.0472, 0.0390, 0.0472, 0.0332, 0.0395, 0.0408, 0.0403, 0.0433], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:02:58,312 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 4.838e+02 6.090e+02 7.458e+02 2.009e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 11:03:21,607 INFO [train.py:903] (0/4) Epoch 18, batch 150, loss[loss=0.2455, simple_loss=0.3283, pruned_loss=0.08133, over 19453.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2934, pruned_loss=0.06783, over 2037301.20 frames. ], batch size: 64, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:03:56,074 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116254.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:04:20,457 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 11:04:21,608 INFO [train.py:903] (0/4) Epoch 18, batch 200, loss[loss=0.1676, simple_loss=0.2525, pruned_loss=0.04134, over 19766.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2923, pruned_loss=0.06778, over 2442348.46 frames. ], batch size: 48, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:01,397 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 4.750e+02 5.613e+02 7.153e+02 1.890e+03, threshold=1.123e+03, percent-clipped=2.0 +2023-04-02 11:05:24,085 INFO [train.py:903] (0/4) Epoch 18, batch 250, loss[loss=0.2309, simple_loss=0.3221, pruned_loss=0.06981, over 19348.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2936, pruned_loss=0.06871, over 2744642.37 frames. ], batch size: 70, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:43,806 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116341.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:05:57,162 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 11:06:05,938 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8459, 1.1498, 1.5028, 0.6130, 2.0788, 2.3681, 2.0464, 2.4700], + device='cuda:0'), covar=tensor([0.1719, 0.3851, 0.3329, 0.2805, 0.0640, 0.0308, 0.0386, 0.0384], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0312, 0.0340, 0.0258, 0.0232, 0.0176, 0.0210, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 11:06:18,041 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116369.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 11:06:19,021 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2465, 1.7859, 2.0777, 2.9767, 2.0917, 2.3976, 2.4193, 2.3974], + device='cuda:0'), covar=tensor([0.0799, 0.1094, 0.0988, 0.0775, 0.0921, 0.0872, 0.0960, 0.0710], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0223, 0.0240, 0.0225, 0.0209, 0.0186, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 11:06:25,429 INFO [train.py:903] (0/4) Epoch 18, batch 300, loss[loss=0.253, simple_loss=0.3298, pruned_loss=0.08812, over 19624.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2939, pruned_loss=0.0693, over 2995698.32 frames. ], batch size: 57, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:06:25,572 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:07:03,942 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 5.241e+02 6.705e+02 8.261e+02 1.478e+03, threshold=1.341e+03, percent-clipped=3.0 +2023-04-02 11:07:28,469 INFO [train.py:903] (0/4) Epoch 18, batch 350, loss[loss=0.252, simple_loss=0.3252, pruned_loss=0.08944, over 19310.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2931, pruned_loss=0.06891, over 3193474.09 frames. ], batch size: 66, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:07:33,277 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 11:07:44,270 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 11:08:16,131 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:19,403 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:29,490 INFO [train.py:903] (0/4) Epoch 18, batch 400, loss[loss=0.2116, simple_loss=0.2822, pruned_loss=0.07049, over 19626.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2934, pruned_loss=0.06926, over 3325346.06 frames. ], batch size: 50, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:08:31,842 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:47,814 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116491.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:08,776 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.874e+02 5.859e+02 7.069e+02 1.370e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:09:27,678 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:31,057 INFO [train.py:903] (0/4) Epoch 18, batch 450, loss[loss=0.1992, simple_loss=0.2766, pruned_loss=0.06095, over 19599.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.294, pruned_loss=0.06928, over 3426617.74 frames. ], batch size: 50, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:06,938 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 11:10:08,082 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 11:10:36,073 INFO [train.py:903] (0/4) Epoch 18, batch 500, loss[loss=0.1777, simple_loss=0.2594, pruned_loss=0.04797, over 19752.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2929, pruned_loss=0.06896, over 3505726.55 frames. ], batch size: 51, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:43,256 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:10:57,197 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116593.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:11:13,492 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.090e+02 6.291e+02 8.243e+02 1.843e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 11:11:38,118 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116625.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:11:38,806 INFO [train.py:903] (0/4) Epoch 18, batch 550, loss[loss=0.2266, simple_loss=0.2944, pruned_loss=0.07943, over 19436.00 frames. ], tot_loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06849, over 3568979.65 frames. ], batch size: 48, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:11:51,096 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:12:07,775 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116650.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:12:07,824 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2629, 1.3205, 1.6622, 1.4704, 2.2041, 1.9164, 2.1883, 0.9722], + device='cuda:0'), covar=tensor([0.2495, 0.4265, 0.2597, 0.2023, 0.1587, 0.2333, 0.1492, 0.4308], + device='cuda:0'), in_proj_covar=tensor([0.0512, 0.0612, 0.0671, 0.0463, 0.0611, 0.0516, 0.0648, 0.0522], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 11:12:40,996 INFO [train.py:903] (0/4) Epoch 18, batch 600, loss[loss=0.2215, simple_loss=0.3025, pruned_loss=0.07023, over 19669.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2938, pruned_loss=0.06869, over 3635928.76 frames. ], batch size: 55, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:12:51,630 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116685.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:18,872 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.849e+02 4.880e+02 6.214e+02 8.095e+02 1.532e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 11:13:21,141 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 11:13:28,752 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9565, 1.8502, 1.6187, 2.0631, 1.7641, 1.8076, 1.6123, 1.9353], + device='cuda:0'), covar=tensor([0.1078, 0.1420, 0.1475, 0.0938, 0.1378, 0.0544, 0.1352, 0.0712], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0356, 0.0302, 0.0247, 0.0299, 0.0247, 0.0296, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:13:37,834 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116722.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:42,022 INFO [train.py:903] (0/4) Epoch 18, batch 650, loss[loss=0.2869, simple_loss=0.358, pruned_loss=0.1079, over 17401.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2941, pruned_loss=0.06902, over 3683748.88 frames. ], batch size: 101, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:13:55,017 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5568, 1.0768, 1.3103, 1.2710, 2.2025, 0.9324, 1.9556, 2.4312], + device='cuda:0'), covar=tensor([0.0704, 0.2822, 0.2892, 0.1643, 0.0828, 0.2150, 0.1153, 0.0482], + device='cuda:0'), in_proj_covar=tensor([0.0390, 0.0354, 0.0374, 0.0340, 0.0362, 0.0345, 0.0359, 0.0384], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:14:08,712 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:34,415 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-02 11:14:38,772 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:43,011 INFO [train.py:903] (0/4) Epoch 18, batch 700, loss[loss=0.2064, simple_loss=0.289, pruned_loss=0.06186, over 19710.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2943, pruned_loss=0.069, over 3713864.60 frames. ], batch size: 59, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:15:15,694 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:23,661 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.148e+02 4.855e+02 5.777e+02 7.000e+02 1.472e+03, threshold=1.155e+03, percent-clipped=1.0 +2023-04-02 11:15:24,929 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:27,849 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 11:15:39,750 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:47,509 INFO [train.py:903] (0/4) Epoch 18, batch 750, loss[loss=0.1913, simple_loss=0.2676, pruned_loss=0.05745, over 19735.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2929, pruned_loss=0.06808, over 3751780.96 frames. ], batch size: 46, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:16:03,692 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:16,536 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:33,983 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:39,876 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:50,618 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116874.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:52,573 INFO [train.py:903] (0/4) Epoch 18, batch 800, loss[loss=0.2109, simple_loss=0.2972, pruned_loss=0.06226, over 19766.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2938, pruned_loss=0.06858, over 3760059.88 frames. ], batch size: 56, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:17:06,586 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 11:17:31,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2582, 5.5780, 3.0677, 5.0028, 1.4494, 5.6876, 5.6170, 5.8230], + device='cuda:0'), covar=tensor([0.0362, 0.0984, 0.1943, 0.0656, 0.3726, 0.0540, 0.0708, 0.0821], + device='cuda:0'), in_proj_covar=tensor([0.0472, 0.0388, 0.0470, 0.0331, 0.0390, 0.0407, 0.0401, 0.0431], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:17:32,327 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.513e+02 6.326e+02 7.669e+02 1.889e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-02 11:17:51,775 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:17:55,027 INFO [train.py:903] (0/4) Epoch 18, batch 850, loss[loss=0.2228, simple_loss=0.3116, pruned_loss=0.067, over 19685.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2946, pruned_loss=0.06921, over 3753629.63 frames. ], batch size: 58, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:18:11,531 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2263, 1.3508, 1.7500, 1.1008, 2.5524, 3.3267, 3.0071, 3.5121], + device='cuda:0'), covar=tensor([0.1542, 0.3691, 0.3170, 0.2493, 0.0559, 0.0225, 0.0229, 0.0257], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0312, 0.0339, 0.0256, 0.0231, 0.0177, 0.0209, 0.0236], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 11:18:48,152 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 11:18:56,388 INFO [train.py:903] (0/4) Epoch 18, batch 900, loss[loss=0.2325, simple_loss=0.315, pruned_loss=0.07507, over 19739.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2943, pruned_loss=0.06938, over 3758148.33 frames. ], batch size: 63, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:19:01,313 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116980.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:03,989 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2289, 1.4046, 1.9100, 1.6769, 3.0713, 4.7570, 4.6771, 5.1094], + device='cuda:0'), covar=tensor([0.1634, 0.3585, 0.3043, 0.2019, 0.0542, 0.0147, 0.0134, 0.0131], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0311, 0.0339, 0.0256, 0.0231, 0.0176, 0.0209, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 11:19:04,001 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:38,589 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 4.671e+02 5.637e+02 7.276e+02 1.422e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 11:20:00,613 INFO [train.py:903] (0/4) Epoch 18, batch 950, loss[loss=0.2437, simple_loss=0.3158, pruned_loss=0.08584, over 13542.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2937, pruned_loss=0.06858, over 3774597.63 frames. ], batch size: 136, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:20:02,912 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 11:20:38,595 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:20:51,115 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:03,206 INFO [train.py:903] (0/4) Epoch 18, batch 1000, loss[loss=0.2501, simple_loss=0.3243, pruned_loss=0.08796, over 19599.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2935, pruned_loss=0.06839, over 3794403.49 frames. ], batch size: 57, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:21:11,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:27,305 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:43,056 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 4.999e+02 6.181e+02 7.829e+02 2.221e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 11:21:56,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 11:22:07,203 INFO [train.py:903] (0/4) Epoch 18, batch 1050, loss[loss=0.2284, simple_loss=0.303, pruned_loss=0.07688, over 19577.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2937, pruned_loss=0.06845, over 3806429.57 frames. ], batch size: 52, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:22:40,243 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 11:22:55,009 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117164.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:09,072 INFO [train.py:903] (0/4) Epoch 18, batch 1100, loss[loss=0.2005, simple_loss=0.2816, pruned_loss=0.05963, over 19621.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2934, pruned_loss=0.06822, over 3813081.12 frames. ], batch size: 50, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:23:11,061 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-02 11:23:13,162 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:15,431 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117181.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:44,494 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117204.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:49,512 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.961e+02 5.150e+02 6.225e+02 7.900e+02 1.283e+03, threshold=1.245e+03, percent-clipped=2.0 +2023-04-02 11:24:11,116 INFO [train.py:903] (0/4) Epoch 18, batch 1150, loss[loss=0.1926, simple_loss=0.2595, pruned_loss=0.06284, over 19774.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2924, pruned_loss=0.06753, over 3829267.44 frames. ], batch size: 48, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:24:27,252 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:28,265 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117239.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:58,336 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:06,699 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 11:25:14,031 INFO [train.py:903] (0/4) Epoch 18, batch 1200, loss[loss=0.1585, simple_loss=0.2399, pruned_loss=0.03862, over 19281.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2929, pruned_loss=0.06786, over 3841102.83 frames. ], batch size: 44, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:25:18,947 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:31,186 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.2998, 5.2411, 6.1190, 6.0762, 1.8127, 5.7887, 4.8554, 5.7665], + device='cuda:0'), covar=tensor([0.1443, 0.0703, 0.0476, 0.0498, 0.6296, 0.0622, 0.0584, 0.0990], + device='cuda:0'), in_proj_covar=tensor([0.0748, 0.0698, 0.0909, 0.0787, 0.0804, 0.0657, 0.0546, 0.0836], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 11:25:36,121 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1862, 1.8116, 1.4584, 1.1702, 1.6242, 1.1303, 1.1900, 1.7312], + device='cuda:0'), covar=tensor([0.0692, 0.0763, 0.1019, 0.0759, 0.0529, 0.1295, 0.0581, 0.0363], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0312, 0.0331, 0.0259, 0.0243, 0.0332, 0.0290, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:25:50,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 11:25:54,167 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.775e+02 5.862e+02 7.562e+02 1.280e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:26:18,174 INFO [train.py:903] (0/4) Epoch 18, batch 1250, loss[loss=0.1697, simple_loss=0.2451, pruned_loss=0.04712, over 19714.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2926, pruned_loss=0.06813, over 3815043.79 frames. ], batch size: 46, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:26:43,736 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:49,476 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117351.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:27:20,895 INFO [train.py:903] (0/4) Epoch 18, batch 1300, loss[loss=0.2113, simple_loss=0.2917, pruned_loss=0.06545, over 19671.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.06749, over 3817645.39 frames. ], batch size: 58, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:27:21,310 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:28:01,377 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.655e+02 5.418e+02 6.594e+02 8.357e+02 1.516e+03, threshold=1.319e+03, percent-clipped=5.0 +2023-04-02 11:28:22,255 INFO [train.py:903] (0/4) Epoch 18, batch 1350, loss[loss=0.1683, simple_loss=0.2534, pruned_loss=0.04161, over 19805.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2908, pruned_loss=0.06711, over 3817861.84 frames. ], batch size: 48, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:28:36,242 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:07,724 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:24,520 INFO [train.py:903] (0/4) Epoch 18, batch 1400, loss[loss=0.2072, simple_loss=0.2856, pruned_loss=0.06441, over 19582.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2919, pruned_loss=0.06778, over 3816224.22 frames. ], batch size: 52, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:30:04,549 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.042e+02 5.443e+02 6.741e+02 8.791e+02 2.167e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 11:30:28,256 INFO [train.py:903] (0/4) Epoch 18, batch 1450, loss[loss=0.1968, simple_loss=0.2838, pruned_loss=0.05487, over 19652.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2919, pruned_loss=0.06752, over 3823987.26 frames. ], batch size: 58, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:30:29,452 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 11:30:40,096 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117535.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:11,204 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117560.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:30,928 INFO [train.py:903] (0/4) Epoch 18, batch 1500, loss[loss=0.2312, simple_loss=0.2973, pruned_loss=0.08251, over 19579.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2936, pruned_loss=0.06859, over 3822842.93 frames. ], batch size: 52, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:31:39,041 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:45,169 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3168, 1.3224, 1.7754, 1.2223, 2.6349, 3.5974, 3.2809, 3.7760], + device='cuda:0'), covar=tensor([0.1515, 0.3693, 0.3095, 0.2318, 0.0556, 0.0190, 0.0206, 0.0227], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0309, 0.0339, 0.0257, 0.0231, 0.0178, 0.0209, 0.0236], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 11:32:11,895 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.657e+02 6.062e+02 7.787e+02 1.498e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 11:32:32,136 INFO [train.py:903] (0/4) Epoch 18, batch 1550, loss[loss=0.1891, simple_loss=0.263, pruned_loss=0.05757, over 19349.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2935, pruned_loss=0.0684, over 3830576.06 frames. ], batch size: 47, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:33:03,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6403, 1.4218, 1.4547, 1.9406, 1.4551, 1.8477, 1.8861, 1.6475], + device='cuda:0'), covar=tensor([0.0814, 0.0954, 0.1042, 0.0754, 0.0861, 0.0715, 0.0855, 0.0703], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0222, 0.0240, 0.0226, 0.0209, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 11:33:34,531 INFO [train.py:903] (0/4) Epoch 18, batch 1600, loss[loss=0.195, simple_loss=0.2682, pruned_loss=0.06093, over 19468.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2928, pruned_loss=0.06822, over 3833124.68 frames. ], batch size: 49, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:33:54,943 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:01,918 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 11:34:03,320 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:15,618 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.964e+02 5.954e+02 7.051e+02 1.393e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 11:34:37,833 INFO [train.py:903] (0/4) Epoch 18, batch 1650, loss[loss=0.2581, simple_loss=0.3219, pruned_loss=0.09712, over 19828.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2924, pruned_loss=0.06833, over 3831107.90 frames. ], batch size: 52, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:34:50,758 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0103, 2.0404, 2.2562, 2.6520, 1.8836, 2.4602, 2.3145, 2.1077], + device='cuda:0'), covar=tensor([0.4018, 0.3797, 0.1723, 0.2200, 0.3981, 0.1976, 0.4355, 0.3125], + device='cuda:0'), in_proj_covar=tensor([0.0858, 0.0912, 0.0689, 0.0911, 0.0838, 0.0775, 0.0818, 0.0756], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 11:35:36,411 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5670, 1.3866, 1.3950, 2.0873, 1.4558, 1.7468, 1.8560, 1.6345], + device='cuda:0'), covar=tensor([0.0842, 0.0986, 0.1065, 0.0760, 0.0912, 0.0787, 0.0899, 0.0712], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0220, 0.0222, 0.0240, 0.0225, 0.0208, 0.0187, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 11:35:39,488 INFO [train.py:903] (0/4) Epoch 18, batch 1700, loss[loss=0.2268, simple_loss=0.3103, pruned_loss=0.07168, over 19622.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2931, pruned_loss=0.06835, over 3834884.90 frames. ], batch size: 57, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:35:55,765 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5847, 1.1359, 1.4438, 1.2073, 2.2475, 0.9956, 2.0463, 2.4410], + device='cuda:0'), covar=tensor([0.0727, 0.2854, 0.2711, 0.1750, 0.0863, 0.2074, 0.0983, 0.0528], + device='cuda:0'), in_proj_covar=tensor([0.0394, 0.0356, 0.0376, 0.0341, 0.0367, 0.0345, 0.0363, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:36:16,616 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:36:19,946 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.954e+02 5.098e+02 6.258e+02 7.253e+02 1.524e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 11:36:21,131 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 11:36:40,233 INFO [train.py:903] (0/4) Epoch 18, batch 1750, loss[loss=0.2175, simple_loss=0.3046, pruned_loss=0.06523, over 19686.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2939, pruned_loss=0.06918, over 3801613.60 frames. ], batch size: 53, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:37:43,032 INFO [train.py:903] (0/4) Epoch 18, batch 1800, loss[loss=0.2152, simple_loss=0.2916, pruned_loss=0.06935, over 19751.00 frames. ], tot_loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06844, over 3813466.78 frames. ], batch size: 54, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:38:23,380 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.803e+02 6.041e+02 7.952e+02 1.877e+03, threshold=1.208e+03, percent-clipped=3.0 +2023-04-02 11:38:42,058 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 11:38:45,252 INFO [train.py:903] (0/4) Epoch 18, batch 1850, loss[loss=0.1968, simple_loss=0.2876, pruned_loss=0.05299, over 19304.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.293, pruned_loss=0.0684, over 3819431.43 frames. ], batch size: 70, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:18,908 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 11:39:19,282 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117954.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:39:47,632 INFO [train.py:903] (0/4) Epoch 18, batch 1900, loss[loss=0.1833, simple_loss=0.2758, pruned_loss=0.04544, over 19587.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2926, pruned_loss=0.06795, over 3826859.96 frames. ], batch size: 57, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:51,607 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:40:03,248 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 11:40:07,853 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 11:40:15,686 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-118000.pt +2023-04-02 11:40:27,936 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.120e+02 6.309e+02 7.973e+02 1.539e+03, threshold=1.262e+03, percent-clipped=6.0 +2023-04-02 11:40:34,575 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 11:40:39,489 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-02 11:40:45,014 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6556, 2.3965, 1.8289, 1.6463, 2.2335, 1.5370, 1.3547, 1.9325], + device='cuda:0'), covar=tensor([0.1057, 0.0739, 0.1004, 0.0815, 0.0490, 0.1137, 0.0793, 0.0504], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0314, 0.0335, 0.0263, 0.0246, 0.0335, 0.0293, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:40:48,203 INFO [train.py:903] (0/4) Epoch 18, batch 1950, loss[loss=0.244, simple_loss=0.315, pruned_loss=0.08649, over 19324.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2938, pruned_loss=0.06883, over 3818287.95 frames. ], batch size: 66, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:41:28,980 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:33,522 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:49,955 INFO [train.py:903] (0/4) Epoch 18, batch 2000, loss[loss=0.1836, simple_loss=0.2637, pruned_loss=0.05176, over 19760.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2936, pruned_loss=0.06876, over 3815880.20 frames. ], batch size: 46, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:42:05,375 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:42:31,795 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 5.367e+02 6.632e+02 8.072e+02 1.503e+03, threshold=1.326e+03, percent-clipped=5.0 +2023-04-02 11:42:42,569 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 11:42:46,431 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 11:42:54,014 INFO [train.py:903] (0/4) Epoch 18, batch 2050, loss[loss=0.2477, simple_loss=0.3171, pruned_loss=0.0892, over 13528.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2926, pruned_loss=0.06812, over 3812459.36 frames. ], batch size: 136, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:42:58,526 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0886, 5.4033, 3.2079, 4.7681, 1.1020, 5.5396, 5.3781, 5.5687], + device='cuda:0'), covar=tensor([0.0370, 0.0945, 0.1811, 0.0671, 0.4181, 0.0487, 0.0734, 0.0888], + device='cuda:0'), in_proj_covar=tensor([0.0480, 0.0396, 0.0481, 0.0339, 0.0400, 0.0415, 0.0411, 0.0442], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:43:06,257 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 11:43:07,430 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 11:43:26,112 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 11:43:55,358 INFO [train.py:903] (0/4) Epoch 18, batch 2100, loss[loss=0.2171, simple_loss=0.3055, pruned_loss=0.06437, over 19091.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2934, pruned_loss=0.06847, over 3819841.75 frames. ], batch size: 69, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:44:07,037 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:44:21,271 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 11:44:29,248 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 11:44:36,110 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.186e+02 6.689e+02 8.493e+02 1.656e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 11:44:44,896 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 11:44:56,300 INFO [train.py:903] (0/4) Epoch 18, batch 2150, loss[loss=0.2175, simple_loss=0.2993, pruned_loss=0.06788, over 19610.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2923, pruned_loss=0.06783, over 3827420.05 frames. ], batch size: 57, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:45:57,805 INFO [train.py:903] (0/4) Epoch 18, batch 2200, loss[loss=0.2281, simple_loss=0.3134, pruned_loss=0.07141, over 19741.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2929, pruned_loss=0.06814, over 3836358.81 frames. ], batch size: 63, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:46:13,129 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:37,320 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:39,293 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 5.366e+02 7.280e+02 9.450e+02 2.114e+03, threshold=1.456e+03, percent-clipped=6.0 +2023-04-02 11:47:01,197 INFO [train.py:903] (0/4) Epoch 18, batch 2250, loss[loss=0.2096, simple_loss=0.2937, pruned_loss=0.06275, over 19572.00 frames. ], tot_loss[loss=0.216, simple_loss=0.294, pruned_loss=0.06902, over 3822115.67 frames. ], batch size: 52, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:47:21,583 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9186, 2.7506, 2.0901, 2.1321, 1.8750, 2.4128, 1.1027, 2.0448], + device='cuda:0'), covar=tensor([0.0576, 0.0582, 0.0669, 0.1020, 0.1090, 0.0968, 0.1218, 0.0897], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0347, 0.0346, 0.0373, 0.0448, 0.0380, 0.0327, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 11:47:27,273 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:47:54,144 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 11:48:02,754 INFO [train.py:903] (0/4) Epoch 18, batch 2300, loss[loss=0.1835, simple_loss=0.2648, pruned_loss=0.0511, over 19752.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2947, pruned_loss=0.06909, over 3824270.17 frames. ], batch size: 51, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:48:15,073 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 11:48:34,716 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:48:44,940 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.253e+02 6.243e+02 7.561e+02 1.558e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 11:49:05,628 INFO [train.py:903] (0/4) Epoch 18, batch 2350, loss[loss=0.2035, simple_loss=0.2874, pruned_loss=0.05983, over 19667.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2941, pruned_loss=0.06834, over 3822596.68 frames. ], batch size: 59, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:49:12,760 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1376, 1.3232, 1.6922, 1.1684, 2.5400, 3.3838, 3.0478, 3.5659], + device='cuda:0'), covar=tensor([0.1610, 0.3647, 0.3188, 0.2446, 0.0543, 0.0194, 0.0232, 0.0244], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0311, 0.0341, 0.0259, 0.0233, 0.0179, 0.0211, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 11:49:46,252 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 11:50:02,111 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 11:50:06,554 INFO [train.py:903] (0/4) Epoch 18, batch 2400, loss[loss=0.2069, simple_loss=0.293, pruned_loss=0.06038, over 19368.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2931, pruned_loss=0.06802, over 3833852.80 frames. ], batch size: 70, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:50:06,903 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3749, 2.2635, 2.1998, 2.5210, 2.2742, 2.0052, 2.1963, 2.3626], + device='cuda:0'), covar=tensor([0.0776, 0.1220, 0.1060, 0.0779, 0.1071, 0.0455, 0.0970, 0.0542], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0354, 0.0303, 0.0250, 0.0300, 0.0248, 0.0297, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:50:48,686 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.821e+02 5.593e+02 7.730e+02 1.750e+03, threshold=1.119e+03, percent-clipped=3.0 +2023-04-02 11:50:58,486 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118517.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:10,680 INFO [train.py:903] (0/4) Epoch 18, batch 2450, loss[loss=0.2607, simple_loss=0.3288, pruned_loss=0.09629, over 13552.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2932, pruned_loss=0.06824, over 3811473.42 frames. ], batch size: 135, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:51:16,268 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118530.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:52:12,594 INFO [train.py:903] (0/4) Epoch 18, batch 2500, loss[loss=0.2616, simple_loss=0.3305, pruned_loss=0.09632, over 18979.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.294, pruned_loss=0.06882, over 3792194.57 frames. ], batch size: 75, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:52:53,545 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 5.081e+02 6.040e+02 7.266e+02 1.380e+03, threshold=1.208e+03, percent-clipped=1.0 +2023-04-02 11:53:13,676 INFO [train.py:903] (0/4) Epoch 18, batch 2550, loss[loss=0.2735, simple_loss=0.3387, pruned_loss=0.1041, over 13162.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2942, pruned_loss=0.06895, over 3810612.74 frames. ], batch size: 136, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:53:19,800 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118631.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:37,329 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-02 11:53:37,917 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:42,634 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:45,542 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:06,487 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 11:54:15,893 INFO [train.py:903] (0/4) Epoch 18, batch 2600, loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06247, over 19729.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.292, pruned_loss=0.06778, over 3826728.75 frames. ], batch size: 51, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:54:34,614 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:56,095 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.742e+02 5.749e+02 7.114e+02 1.367e+03, threshold=1.150e+03, percent-clipped=3.0 +2023-04-02 11:55:16,574 INFO [train.py:903] (0/4) Epoch 18, batch 2650, loss[loss=0.2126, simple_loss=0.277, pruned_loss=0.07408, over 19081.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2922, pruned_loss=0.06802, over 3829202.29 frames. ], batch size: 42, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:55:33,739 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 11:55:42,225 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:06,112 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:15,292 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:18,246 INFO [train.py:903] (0/4) Epoch 18, batch 2700, loss[loss=0.1958, simple_loss=0.2776, pruned_loss=0.05697, over 19597.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2927, pruned_loss=0.06817, over 3827023.36 frames. ], batch size: 52, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:56:44,770 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118798.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:49,042 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.87 vs. limit=5.0 +2023-04-02 11:56:55,783 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:59,036 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.095e+02 6.154e+02 8.195e+02 1.746e+03, threshold=1.231e+03, percent-clipped=5.0 +2023-04-02 11:57:04,011 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9955, 5.3680, 3.2680, 4.7253, 0.8144, 5.4863, 5.3765, 5.4735], + device='cuda:0'), covar=tensor([0.0385, 0.0803, 0.1590, 0.0652, 0.4399, 0.0494, 0.0703, 0.0894], + device='cuda:0'), in_proj_covar=tensor([0.0483, 0.0394, 0.0480, 0.0341, 0.0402, 0.0419, 0.0413, 0.0445], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:57:06,542 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0244, 2.2096, 1.5742, 2.1257, 2.3542, 1.4957, 1.6742, 1.9519], + device='cuda:0'), covar=tensor([0.1245, 0.1650, 0.1984, 0.1259, 0.1406, 0.1146, 0.1868, 0.1046], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0353, 0.0299, 0.0248, 0.0298, 0.0246, 0.0295, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:57:20,475 INFO [train.py:903] (0/4) Epoch 18, batch 2750, loss[loss=0.1841, simple_loss=0.2538, pruned_loss=0.0572, over 19719.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2919, pruned_loss=0.06776, over 3830977.54 frames. ], batch size: 46, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:15,357 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4361, 1.4389, 1.7466, 1.6453, 2.5505, 2.2210, 2.6687, 1.1210], + device='cuda:0'), covar=tensor([0.2573, 0.4531, 0.2781, 0.2057, 0.1656, 0.2288, 0.1612, 0.4518], + device='cuda:0'), in_proj_covar=tensor([0.0519, 0.0622, 0.0679, 0.0468, 0.0615, 0.0521, 0.0655, 0.0530], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 11:58:23,106 INFO [train.py:903] (0/4) Epoch 18, batch 2800, loss[loss=0.2238, simple_loss=0.3046, pruned_loss=0.07147, over 19676.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2917, pruned_loss=0.06782, over 3829345.09 frames. ], batch size: 58, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:41,697 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2482, 2.1062, 1.9783, 1.8298, 1.5776, 1.8219, 0.6521, 1.1730], + device='cuda:0'), covar=tensor([0.0555, 0.0592, 0.0432, 0.0705, 0.1245, 0.0828, 0.1204, 0.0958], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0348, 0.0347, 0.0374, 0.0448, 0.0380, 0.0329, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 11:58:44,011 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8276, 1.5202, 1.7139, 1.6941, 4.3901, 1.1656, 2.6050, 4.7232], + device='cuda:0'), covar=tensor([0.0455, 0.2744, 0.2856, 0.1984, 0.0693, 0.2591, 0.1399, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0399, 0.0357, 0.0378, 0.0342, 0.0365, 0.0345, 0.0366, 0.0389], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 11:58:54,266 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:59:03,986 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.804e+02 6.148e+02 8.494e+02 1.418e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 11:59:24,485 INFO [train.py:903] (0/4) Epoch 18, batch 2850, loss[loss=0.2269, simple_loss=0.307, pruned_loss=0.07342, over 19778.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2921, pruned_loss=0.06821, over 3811337.07 frames. ], batch size: 56, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:59:24,913 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118926.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:22,862 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 12:00:26,012 INFO [train.py:903] (0/4) Epoch 18, batch 2900, loss[loss=0.2018, simple_loss=0.2663, pruned_loss=0.06867, over 19764.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2923, pruned_loss=0.06803, over 3827665.34 frames. ], batch size: 47, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:00:27,551 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6643, 1.6903, 1.5813, 1.3764, 1.2939, 1.4285, 0.2829, 0.6650], + device='cuda:0'), covar=tensor([0.0586, 0.0560, 0.0372, 0.0555, 0.1050, 0.0697, 0.1201, 0.0968], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0349, 0.0348, 0.0375, 0.0449, 0.0381, 0.0330, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:00:46,045 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:54,343 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-02 12:00:54,927 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3285, 1.3716, 1.6268, 1.5998, 2.4678, 2.0829, 2.6483, 1.0955], + device='cuda:0'), covar=tensor([0.2676, 0.4514, 0.2801, 0.2122, 0.1707, 0.2436, 0.1626, 0.4740], + device='cuda:0'), in_proj_covar=tensor([0.0520, 0.0625, 0.0682, 0.0469, 0.0618, 0.0524, 0.0659, 0.0533], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 12:00:56,050 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:58,541 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 12:01:05,215 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 5.127e+02 5.908e+02 8.371e+02 2.467e+03, threshold=1.182e+03, percent-clipped=10.0 +2023-04-02 12:01:21,042 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:25,113 INFO [train.py:903] (0/4) Epoch 18, batch 2950, loss[loss=0.2475, simple_loss=0.3126, pruned_loss=0.09116, over 19591.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2932, pruned_loss=0.06849, over 3819581.56 frames. ], batch size: 52, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:01:26,618 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:50,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:08,427 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:24,519 INFO [train.py:903] (0/4) Epoch 18, batch 3000, loss[loss=0.1928, simple_loss=0.2828, pruned_loss=0.05143, over 19794.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2932, pruned_loss=0.06857, over 3829159.00 frames. ], batch size: 56, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:02:24,520 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 12:02:37,007 INFO [train.py:937] (0/4) Epoch 18, validation: loss=0.1707, simple_loss=0.2711, pruned_loss=0.03521, over 944034.00 frames. +2023-04-02 12:02:37,008 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 12:02:37,363 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5657, 1.1633, 1.3566, 1.2397, 2.1978, 0.9719, 2.0163, 2.4244], + device='cuda:0'), covar=tensor([0.0680, 0.2810, 0.2836, 0.1671, 0.0860, 0.2103, 0.1053, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0398, 0.0359, 0.0379, 0.0343, 0.0366, 0.0347, 0.0367, 0.0390], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:02:40,530 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 12:02:50,778 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:57,654 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:58,916 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0029, 1.9534, 1.8016, 1.6087, 1.3046, 1.5676, 0.5775, 0.9684], + device='cuda:0'), covar=tensor([0.0830, 0.0728, 0.0506, 0.0855, 0.1515, 0.1074, 0.1351, 0.1243], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0348, 0.0349, 0.0374, 0.0449, 0.0381, 0.0329, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:03:17,168 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:17,950 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.504e+02 6.538e+02 8.295e+02 4.074e+03, threshold=1.308e+03, percent-clipped=8.0 +2023-04-02 12:03:20,402 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119111.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:37,845 INFO [train.py:903] (0/4) Epoch 18, batch 3050, loss[loss=0.2038, simple_loss=0.2893, pruned_loss=0.05917, over 19670.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2928, pruned_loss=0.06804, over 3837556.28 frames. ], batch size: 58, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:04:37,425 INFO [train.py:903] (0/4) Epoch 18, batch 3100, loss[loss=0.1923, simple_loss=0.2697, pruned_loss=0.05745, over 17744.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.294, pruned_loss=0.06873, over 3815797.69 frames. ], batch size: 39, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:05:18,222 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.828e+02 5.979e+02 7.400e+02 1.693e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 12:05:39,401 INFO [train.py:903] (0/4) Epoch 18, batch 3150, loss[loss=0.199, simple_loss=0.2797, pruned_loss=0.05916, over 19698.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2939, pruned_loss=0.06884, over 3813515.74 frames. ], batch size: 53, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:05:57,686 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5693, 1.7019, 2.0840, 1.9415, 3.5090, 2.9395, 3.8945, 1.7496], + device='cuda:0'), covar=tensor([0.2376, 0.4143, 0.2675, 0.1722, 0.1252, 0.1812, 0.1257, 0.3863], + device='cuda:0'), in_proj_covar=tensor([0.0517, 0.0619, 0.0676, 0.0466, 0.0611, 0.0517, 0.0652, 0.0528], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:06:06,960 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 12:06:35,346 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1984, 1.1664, 1.2072, 1.3154, 1.0677, 1.2896, 1.3583, 1.2244], + device='cuda:0'), covar=tensor([0.0929, 0.1029, 0.1088, 0.0705, 0.0847, 0.0874, 0.0804, 0.0830], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0224, 0.0226, 0.0245, 0.0228, 0.0211, 0.0190, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 12:06:39,658 INFO [train.py:903] (0/4) Epoch 18, batch 3200, loss[loss=0.2067, simple_loss=0.284, pruned_loss=0.06471, over 12995.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2944, pruned_loss=0.06924, over 3811569.17 frames. ], batch size: 136, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:06:45,759 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6287, 2.6019, 2.4249, 2.7074, 2.5382, 2.3267, 2.2292, 2.7312], + device='cuda:0'), covar=tensor([0.0888, 0.1408, 0.1289, 0.1010, 0.1418, 0.0498, 0.1267, 0.0585], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0355, 0.0303, 0.0250, 0.0301, 0.0248, 0.0298, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:07:18,203 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.710e+02 5.964e+02 8.137e+02 2.705e+03, threshold=1.193e+03, percent-clipped=4.0 +2023-04-02 12:07:39,013 INFO [train.py:903] (0/4) Epoch 18, batch 3250, loss[loss=0.1943, simple_loss=0.271, pruned_loss=0.05882, over 19377.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2938, pruned_loss=0.06888, over 3812715.13 frames. ], batch size: 48, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:24,454 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:08:37,660 INFO [train.py:903] (0/4) Epoch 18, batch 3300, loss[loss=0.2185, simple_loss=0.3033, pruned_loss=0.06691, over 18658.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2928, pruned_loss=0.06853, over 3813311.95 frames. ], batch size: 74, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:42,298 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 12:08:53,579 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:09:16,517 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.2249, 5.6941, 3.0744, 4.8855, 1.4058, 5.7693, 5.5945, 5.7893], + device='cuda:0'), covar=tensor([0.0346, 0.0809, 0.1701, 0.0696, 0.3536, 0.0466, 0.0744, 0.0863], + device='cuda:0'), in_proj_covar=tensor([0.0480, 0.0393, 0.0478, 0.0340, 0.0397, 0.0415, 0.0410, 0.0442], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:09:17,496 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.369e+02 6.623e+02 8.148e+02 1.799e+03, threshold=1.325e+03, percent-clipped=5.0 +2023-04-02 12:09:37,823 INFO [train.py:903] (0/4) Epoch 18, batch 3350, loss[loss=0.2014, simple_loss=0.2721, pruned_loss=0.06535, over 19770.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2926, pruned_loss=0.06836, over 3806914.71 frames. ], batch size: 48, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:09:50,645 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:11,908 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:37,463 INFO [train.py:903] (0/4) Epoch 18, batch 3400, loss[loss=0.2058, simple_loss=0.2855, pruned_loss=0.06309, over 19686.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2932, pruned_loss=0.06862, over 3808603.97 frames. ], batch size: 60, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:18,512 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 4.894e+02 5.911e+02 7.983e+02 1.559e+03, threshold=1.182e+03, percent-clipped=2.0 +2023-04-02 12:11:37,556 INFO [train.py:903] (0/4) Epoch 18, batch 3450, loss[loss=0.2031, simple_loss=0.2752, pruned_loss=0.06549, over 19756.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2935, pruned_loss=0.06862, over 3799522.71 frames. ], batch size: 48, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:43,125 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 12:11:45,595 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:09,581 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119552.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:31,768 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119570.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:38,213 INFO [train.py:903] (0/4) Epoch 18, batch 3500, loss[loss=0.2415, simple_loss=0.3299, pruned_loss=0.07651, over 19655.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2936, pruned_loss=0.06816, over 3808217.84 frames. ], batch size: 60, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:13:20,028 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.884e+02 6.516e+02 7.989e+02 1.670e+03, threshold=1.303e+03, percent-clipped=4.0 +2023-04-02 12:13:39,060 INFO [train.py:903] (0/4) Epoch 18, batch 3550, loss[loss=0.2562, simple_loss=0.3275, pruned_loss=0.09245, over 19138.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2922, pruned_loss=0.06758, over 3813459.44 frames. ], batch size: 69, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:14:15,094 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 12:14:39,534 INFO [train.py:903] (0/4) Epoch 18, batch 3600, loss[loss=0.1889, simple_loss=0.269, pruned_loss=0.05435, over 19353.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2923, pruned_loss=0.06777, over 3801931.22 frames. ], batch size: 47, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:14:47,953 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:15:20,526 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.936e+02 6.005e+02 7.350e+02 2.220e+03, threshold=1.201e+03, percent-clipped=3.0 +2023-04-02 12:15:39,373 INFO [train.py:903] (0/4) Epoch 18, batch 3650, loss[loss=0.2231, simple_loss=0.288, pruned_loss=0.07913, over 19795.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2914, pruned_loss=0.06728, over 3803584.37 frames. ], batch size: 47, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:40,016 INFO [train.py:903] (0/4) Epoch 18, batch 3700, loss[loss=0.2104, simple_loss=0.2799, pruned_loss=0.07047, over 19316.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2922, pruned_loss=0.06788, over 3795521.90 frames. ], batch size: 44, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:58,974 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 12:17:19,657 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:21,528 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 5.017e+02 5.995e+02 7.636e+02 1.424e+03, threshold=1.199e+03, percent-clipped=3.0 +2023-04-02 12:17:32,236 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9273, 1.2152, 1.4823, 0.6761, 2.0146, 2.3797, 2.1053, 2.5971], + device='cuda:0'), covar=tensor([0.1685, 0.3697, 0.3439, 0.2738, 0.0622, 0.0322, 0.0357, 0.0371], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0309, 0.0340, 0.0258, 0.0233, 0.0178, 0.0211, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 12:17:40,480 INFO [train.py:903] (0/4) Epoch 18, batch 3750, loss[loss=0.2342, simple_loss=0.3114, pruned_loss=0.07848, over 19646.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2934, pruned_loss=0.06845, over 3791104.22 frames. ], batch size: 55, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:17:40,907 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119826.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:48,788 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:58,558 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8869, 1.9860, 1.8560, 2.8467, 1.9053, 2.5833, 2.0416, 1.6043], + device='cuda:0'), covar=tensor([0.4802, 0.4160, 0.2644, 0.2736, 0.4401, 0.2291, 0.5776, 0.4952], + device='cuda:0'), in_proj_covar=tensor([0.0870, 0.0926, 0.0696, 0.0921, 0.0850, 0.0789, 0.0827, 0.0766], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 12:18:10,599 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:18:20,874 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5203, 4.1114, 4.2441, 4.2582, 1.7345, 4.0078, 3.4873, 3.9472], + device='cuda:0'), covar=tensor([0.1656, 0.0802, 0.0567, 0.0672, 0.5715, 0.0894, 0.0685, 0.1095], + device='cuda:0'), in_proj_covar=tensor([0.0757, 0.0696, 0.0905, 0.0790, 0.0806, 0.0656, 0.0546, 0.0839], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 12:18:33,192 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3800, 1.4348, 1.7238, 1.6251, 2.2642, 2.0457, 2.3259, 1.1080], + device='cuda:0'), covar=tensor([0.2310, 0.3949, 0.2413, 0.1797, 0.1457, 0.2091, 0.1346, 0.4063], + device='cuda:0'), in_proj_covar=tensor([0.0518, 0.0618, 0.0678, 0.0467, 0.0611, 0.0519, 0.0654, 0.0530], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:18:39,735 INFO [train.py:903] (0/4) Epoch 18, batch 3800, loss[loss=0.2403, simple_loss=0.3259, pruned_loss=0.07737, over 18818.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2926, pruned_loss=0.06777, over 3790915.27 frames. ], batch size: 74, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:18:39,891 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:19:12,059 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 12:19:22,040 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.499e+02 6.857e+02 8.596e+02 2.059e+03, threshold=1.371e+03, percent-clipped=8.0 +2023-04-02 12:19:41,466 INFO [train.py:903] (0/4) Epoch 18, batch 3850, loss[loss=0.1863, simple_loss=0.2712, pruned_loss=0.05067, over 19676.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2917, pruned_loss=0.0669, over 3813470.81 frames. ], batch size: 58, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:20:40,740 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 12:20:43,324 INFO [train.py:903] (0/4) Epoch 18, batch 3900, loss[loss=0.2092, simple_loss=0.287, pruned_loss=0.0657, over 19524.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2908, pruned_loss=0.06634, over 3800332.49 frames. ], batch size: 56, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:02,815 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:14,032 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-120000.pt +2023-04-02 12:21:26,390 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.199e+02 4.757e+02 5.722e+02 7.251e+02 1.425e+03, threshold=1.144e+03, percent-clipped=2.0 +2023-04-02 12:21:45,630 INFO [train.py:903] (0/4) Epoch 18, batch 3950, loss[loss=0.19, simple_loss=0.2606, pruned_loss=0.05968, over 19723.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2916, pruned_loss=0.06676, over 3800472.14 frames. ], batch size: 46, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:46,900 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=120027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:49,998 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 12:22:05,628 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0311, 5.4730, 3.1358, 4.7858, 0.9884, 5.6389, 5.4231, 5.6421], + device='cuda:0'), covar=tensor([0.0356, 0.0755, 0.1714, 0.0776, 0.4151, 0.0471, 0.0718, 0.0855], + device='cuda:0'), in_proj_covar=tensor([0.0480, 0.0392, 0.0479, 0.0341, 0.0396, 0.0415, 0.0408, 0.0442], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:22:47,957 INFO [train.py:903] (0/4) Epoch 18, batch 4000, loss[loss=0.2256, simple_loss=0.3103, pruned_loss=0.07051, over 19672.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2914, pruned_loss=0.06669, over 3807147.50 frames. ], batch size: 60, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:23:29,303 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 4.630e+02 5.764e+02 7.444e+02 1.534e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-02 12:23:36,024 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 12:23:49,504 INFO [train.py:903] (0/4) Epoch 18, batch 4050, loss[loss=0.1783, simple_loss=0.2621, pruned_loss=0.04722, over 19668.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2914, pruned_loss=0.06679, over 3805101.01 frames. ], batch size: 53, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:24:08,092 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=120142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:24:47,334 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2353, 1.1670, 1.1529, 1.3591, 1.0771, 1.2989, 1.2970, 1.2605], + device='cuda:0'), covar=tensor([0.0924, 0.1032, 0.1139, 0.0688, 0.0873, 0.0830, 0.0837, 0.0796], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0222, 0.0224, 0.0244, 0.0227, 0.0208, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 12:24:49,139 INFO [train.py:903] (0/4) Epoch 18, batch 4100, loss[loss=0.2358, simple_loss=0.311, pruned_loss=0.08031, over 17961.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2924, pruned_loss=0.06777, over 3810127.78 frames. ], batch size: 83, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:25:24,882 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 12:25:29,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.087e+02 6.190e+02 7.903e+02 1.294e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 12:25:48,079 INFO [train.py:903] (0/4) Epoch 18, batch 4150, loss[loss=0.2299, simple_loss=0.3124, pruned_loss=0.07373, over 19250.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2923, pruned_loss=0.06759, over 3814175.96 frames. ], batch size: 66, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:14,748 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:46,342 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:50,729 INFO [train.py:903] (0/4) Epoch 18, batch 4200, loss[loss=0.218, simple_loss=0.2988, pruned_loss=0.06856, over 18207.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2923, pruned_loss=0.06744, over 3803384.14 frames. ], batch size: 83, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:57,166 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 12:27:06,674 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0738, 1.9815, 1.8140, 1.5935, 1.4143, 1.6042, 0.5226, 1.0537], + device='cuda:0'), covar=tensor([0.0540, 0.0569, 0.0439, 0.0720, 0.1161, 0.0892, 0.1209, 0.0977], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0345, 0.0346, 0.0372, 0.0448, 0.0379, 0.0325, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:27:30,830 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 4.968e+02 6.385e+02 7.954e+02 1.571e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 12:27:51,828 INFO [train.py:903] (0/4) Epoch 18, batch 4250, loss[loss=0.2117, simple_loss=0.287, pruned_loss=0.06816, over 19635.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2923, pruned_loss=0.06746, over 3802232.07 frames. ], batch size: 50, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:28:09,653 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 12:28:18,896 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 12:28:19,572 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.56 vs. limit=5.0 +2023-04-02 12:28:51,544 INFO [train.py:903] (0/4) Epoch 18, batch 4300, loss[loss=0.26, simple_loss=0.3287, pruned_loss=0.09564, over 19665.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2933, pruned_loss=0.06832, over 3813853.69 frames. ], batch size: 53, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:19,840 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:34,125 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.084e+02 5.969e+02 7.631e+02 1.294e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 12:29:46,151 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 12:29:49,854 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120423.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:52,876 INFO [train.py:903] (0/4) Epoch 18, batch 4350, loss[loss=0.1816, simple_loss=0.2556, pruned_loss=0.05379, over 19722.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2914, pruned_loss=0.06715, over 3826596.77 frames. ], batch size: 46, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:56,626 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0118, 1.1650, 1.5905, 1.0901, 2.4210, 3.2802, 3.0226, 3.5403], + device='cuda:0'), covar=tensor([0.1870, 0.4101, 0.3499, 0.2556, 0.0651, 0.0206, 0.0265, 0.0275], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0311, 0.0339, 0.0258, 0.0234, 0.0179, 0.0211, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 12:30:20,690 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7478, 1.6508, 1.9418, 2.0227, 4.1131, 1.3751, 2.7763, 4.5363], + device='cuda:0'), covar=tensor([0.0440, 0.2837, 0.2689, 0.1786, 0.0762, 0.2639, 0.1302, 0.0234], + device='cuda:0'), in_proj_covar=tensor([0.0394, 0.0354, 0.0374, 0.0338, 0.0363, 0.0343, 0.0364, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:30:55,182 INFO [train.py:903] (0/4) Epoch 18, batch 4400, loss[loss=0.2471, simple_loss=0.3144, pruned_loss=0.08991, over 19350.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2914, pruned_loss=0.06754, over 3792211.62 frames. ], batch size: 70, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:31:19,628 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 12:31:28,329 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 12:31:35,103 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.092e+02 6.306e+02 8.223e+02 1.474e+03, threshold=1.261e+03, percent-clipped=7.0 +2023-04-02 12:31:55,629 INFO [train.py:903] (0/4) Epoch 18, batch 4450, loss[loss=0.1851, simple_loss=0.2584, pruned_loss=0.05597, over 15699.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2915, pruned_loss=0.06779, over 3788989.64 frames. ], batch size: 34, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:32:55,857 INFO [train.py:903] (0/4) Epoch 18, batch 4500, loss[loss=0.2298, simple_loss=0.3213, pruned_loss=0.06915, over 19674.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2914, pruned_loss=0.0673, over 3799205.62 frames. ], batch size: 60, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:33:37,597 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.202e+02 6.117e+02 7.756e+02 2.072e+03, threshold=1.223e+03, percent-clipped=4.0 +2023-04-02 12:33:56,192 INFO [train.py:903] (0/4) Epoch 18, batch 4550, loss[loss=0.2217, simple_loss=0.3117, pruned_loss=0.06582, over 19602.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2926, pruned_loss=0.06776, over 3803110.87 frames. ], batch size: 61, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:34:05,861 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 12:34:29,490 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 12:34:56,267 INFO [train.py:903] (0/4) Epoch 18, batch 4600, loss[loss=0.1971, simple_loss=0.2706, pruned_loss=0.06183, over 19789.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2921, pruned_loss=0.06746, over 3812576.67 frames. ], batch size: 49, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:35:17,287 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=120694.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:35:27,824 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3757, 1.2955, 1.3185, 1.6934, 1.3268, 1.6481, 1.6840, 1.4593], + device='cuda:0'), covar=tensor([0.0865, 0.0989, 0.1049, 0.0718, 0.0880, 0.0763, 0.0822, 0.0744], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0245, 0.0229, 0.0211, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 12:35:35,196 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 4.865e+02 6.169e+02 7.995e+02 1.948e+03, threshold=1.234e+03, percent-clipped=9.0 +2023-04-02 12:35:55,081 INFO [train.py:903] (0/4) Epoch 18, batch 4650, loss[loss=0.199, simple_loss=0.2744, pruned_loss=0.06186, over 19720.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2922, pruned_loss=0.06754, over 3828450.73 frames. ], batch size: 45, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:36:14,447 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 12:36:23,040 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 12:36:24,605 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 12:36:55,850 INFO [train.py:903] (0/4) Epoch 18, batch 4700, loss[loss=0.2151, simple_loss=0.2984, pruned_loss=0.06589, over 19659.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2915, pruned_loss=0.06711, over 3826119.69 frames. ], batch size: 58, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:37:18,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 12:37:37,345 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.224e+02 6.544e+02 8.077e+02 2.112e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 12:37:51,204 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6420, 1.4936, 1.5038, 2.0074, 1.5502, 1.8375, 1.9229, 1.7081], + device='cuda:0'), covar=tensor([0.0809, 0.0932, 0.1037, 0.0763, 0.0863, 0.0743, 0.0816, 0.0717], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0223, 0.0225, 0.0244, 0.0227, 0.0209, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 12:37:54,545 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 12:37:56,117 INFO [train.py:903] (0/4) Epoch 18, batch 4750, loss[loss=0.222, simple_loss=0.2907, pruned_loss=0.07659, over 19777.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2915, pruned_loss=0.06741, over 3823697.37 frames. ], batch size: 56, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:38:57,285 INFO [train.py:903] (0/4) Epoch 18, batch 4800, loss[loss=0.259, simple_loss=0.3248, pruned_loss=0.09659, over 18810.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2917, pruned_loss=0.06799, over 3805748.93 frames. ], batch size: 74, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:39:38,286 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.063e+02 6.517e+02 8.294e+02 1.429e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-04-02 12:39:57,795 INFO [train.py:903] (0/4) Epoch 18, batch 4850, loss[loss=0.2019, simple_loss=0.2924, pruned_loss=0.05574, over 19533.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2915, pruned_loss=0.06733, over 3802187.89 frames. ], batch size: 56, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:40:19,477 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 12:40:38,938 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 12:40:44,414 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 12:40:45,486 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 12:40:55,312 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 12:40:57,465 INFO [train.py:903] (0/4) Epoch 18, batch 4900, loss[loss=0.1807, simple_loss=0.2631, pruned_loss=0.0492, over 19840.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2931, pruned_loss=0.0684, over 3807575.58 frames. ], batch size: 52, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:41:15,403 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 12:41:38,441 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.463e+02 5.379e+02 6.693e+02 8.152e+02 1.326e+03, threshold=1.339e+03, percent-clipped=1.0 +2023-04-02 12:41:50,618 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:41:56,822 INFO [train.py:903] (0/4) Epoch 18, batch 4950, loss[loss=0.1885, simple_loss=0.2667, pruned_loss=0.05514, over 19417.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06794, over 3821429.66 frames. ], batch size: 48, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:42:10,889 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121038.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:42:13,941 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 12:42:36,736 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 12:42:55,214 INFO [train.py:903] (0/4) Epoch 18, batch 5000, loss[loss=0.2401, simple_loss=0.3136, pruned_loss=0.08329, over 19589.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2925, pruned_loss=0.06797, over 3824463.38 frames. ], batch size: 61, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:43:05,407 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 12:43:11,791 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8995, 1.6355, 1.7209, 2.6286, 1.8064, 2.2335, 2.2783, 1.9582], + device='cuda:0'), covar=tensor([0.0881, 0.1015, 0.1097, 0.0858, 0.1010, 0.0764, 0.0948, 0.0747], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0244, 0.0229, 0.0210, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 12:43:16,148 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 12:43:36,120 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.049e+02 6.044e+02 8.198e+02 1.739e+03, threshold=1.209e+03, percent-clipped=8.0 +2023-04-02 12:43:55,697 INFO [train.py:903] (0/4) Epoch 18, batch 5050, loss[loss=0.1851, simple_loss=0.2768, pruned_loss=0.04673, over 19779.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2931, pruned_loss=0.06774, over 3829847.46 frames. ], batch size: 56, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:44:27,536 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:44:29,513 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 12:44:55,825 INFO [train.py:903] (0/4) Epoch 18, batch 5100, loss[loss=0.176, simple_loss=0.2582, pruned_loss=0.04693, over 19614.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2931, pruned_loss=0.0683, over 3819291.34 frames. ], batch size: 50, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:45:03,061 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:45:05,058 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 12:45:08,125 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 12:45:12,784 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 12:45:37,241 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 4.928e+02 6.063e+02 8.354e+02 2.244e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 12:45:54,392 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9240, 1.3540, 1.0611, 1.0210, 1.1673, 0.9918, 1.0680, 1.2437], + device='cuda:0'), covar=tensor([0.0592, 0.0850, 0.1109, 0.0751, 0.0595, 0.1354, 0.0549, 0.0494], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0313, 0.0326, 0.0259, 0.0247, 0.0332, 0.0292, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:45:56,107 INFO [train.py:903] (0/4) Epoch 18, batch 5150, loss[loss=0.2557, simple_loss=0.3297, pruned_loss=0.09087, over 19663.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2942, pruned_loss=0.06922, over 3811755.08 frames. ], batch size: 55, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:46:05,296 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 12:46:40,463 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 12:46:55,814 INFO [train.py:903] (0/4) Epoch 18, batch 5200, loss[loss=0.2005, simple_loss=0.2706, pruned_loss=0.0652, over 19041.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2933, pruned_loss=0.06857, over 3826616.21 frames. ], batch size: 42, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:03,754 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4001, 1.1655, 1.3999, 1.6012, 2.9831, 1.1601, 2.2289, 3.2799], + device='cuda:0'), covar=tensor([0.0510, 0.2932, 0.2942, 0.1692, 0.0707, 0.2405, 0.1327, 0.0324], + device='cuda:0'), in_proj_covar=tensor([0.0394, 0.0355, 0.0376, 0.0341, 0.0365, 0.0347, 0.0366, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:47:08,979 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 12:47:36,015 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.110e+02 6.332e+02 8.392e+02 3.036e+03, threshold=1.266e+03, percent-clipped=7.0 +2023-04-02 12:47:39,706 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0711, 1.3172, 1.6283, 1.0731, 2.5124, 3.3836, 3.0956, 3.6188], + device='cuda:0'), covar=tensor([0.1677, 0.3614, 0.3287, 0.2472, 0.0568, 0.0171, 0.0223, 0.0218], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0312, 0.0343, 0.0259, 0.0235, 0.0179, 0.0212, 0.0243], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 12:47:49,050 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 12:47:50,668 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3565, 1.4380, 1.7129, 1.5854, 2.5332, 2.1605, 2.6849, 1.0921], + device='cuda:0'), covar=tensor([0.2246, 0.3960, 0.2449, 0.1790, 0.1441, 0.2018, 0.1398, 0.4014], + device='cuda:0'), in_proj_covar=tensor([0.0517, 0.0621, 0.0680, 0.0466, 0.0612, 0.0519, 0.0653, 0.0531], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:47:54,556 INFO [train.py:903] (0/4) Epoch 18, batch 5250, loss[loss=0.2272, simple_loss=0.3124, pruned_loss=0.07103, over 19654.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2944, pruned_loss=0.06915, over 3814142.69 frames. ], batch size: 53, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:59,197 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121329.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:48:22,542 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4839, 1.5558, 1.7364, 1.7559, 2.6508, 2.3917, 2.7155, 1.1240], + device='cuda:0'), covar=tensor([0.2308, 0.4224, 0.2635, 0.1763, 0.1386, 0.1898, 0.1393, 0.4150], + device='cuda:0'), in_proj_covar=tensor([0.0517, 0.0620, 0.0680, 0.0466, 0.0611, 0.0518, 0.0653, 0.0531], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:48:40,804 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:48:54,019 INFO [train.py:903] (0/4) Epoch 18, batch 5300, loss[loss=0.194, simple_loss=0.2653, pruned_loss=0.0613, over 19740.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2941, pruned_loss=0.06912, over 3817407.05 frames. ], batch size: 46, lr: 4.56e-03, grad_scale: 4.0 +2023-04-02 12:49:10,547 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 12:49:14,149 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5980, 1.6976, 1.9026, 2.0273, 1.5275, 1.9299, 1.9626, 1.7765], + device='cuda:0'), covar=tensor([0.3925, 0.3468, 0.1819, 0.2123, 0.3620, 0.1964, 0.4569, 0.3128], + device='cuda:0'), in_proj_covar=tensor([0.0870, 0.0928, 0.0695, 0.0923, 0.0851, 0.0788, 0.0826, 0.0764], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 12:49:33,462 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:35,383 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.404e+02 6.878e+02 8.269e+02 2.518e+03, threshold=1.376e+03, percent-clipped=11.0 +2023-04-02 12:49:52,480 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:53,395 INFO [train.py:903] (0/4) Epoch 18, batch 5350, loss[loss=0.21, simple_loss=0.2994, pruned_loss=0.06033, over 18838.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2946, pruned_loss=0.06869, over 3816923.41 frames. ], batch size: 74, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:03,586 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:50:26,219 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 12:50:53,959 INFO [train.py:903] (0/4) Epoch 18, batch 5400, loss[loss=0.2078, simple_loss=0.2914, pruned_loss=0.06216, over 19622.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2946, pruned_loss=0.06909, over 3813978.64 frames. ], batch size: 57, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:59,577 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121480.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:35,379 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 5.292e+02 6.238e+02 7.901e+02 1.766e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 12:51:54,669 INFO [train.py:903] (0/4) Epoch 18, batch 5450, loss[loss=0.2656, simple_loss=0.3383, pruned_loss=0.0964, over 19564.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2937, pruned_loss=0.06862, over 3823214.52 frames. ], batch size: 61, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:51:54,831 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:56,510 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 12:52:54,551 INFO [train.py:903] (0/4) Epoch 18, batch 5500, loss[loss=0.2332, simple_loss=0.3158, pruned_loss=0.07526, over 18734.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2934, pruned_loss=0.06836, over 3834371.67 frames. ], batch size: 74, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:53:09,252 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0168, 1.8838, 1.6726, 2.0734, 1.8525, 1.7530, 1.6386, 1.9040], + device='cuda:0'), covar=tensor([0.1058, 0.1507, 0.1390, 0.1016, 0.1278, 0.0550, 0.1357, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0355, 0.0302, 0.0249, 0.0299, 0.0247, 0.0297, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:53:19,888 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 12:53:37,492 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.754e+02 5.891e+02 7.661e+02 1.861e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-02 12:53:55,068 INFO [train.py:903] (0/4) Epoch 18, batch 5550, loss[loss=0.1911, simple_loss=0.2805, pruned_loss=0.05083, over 19670.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2934, pruned_loss=0.06853, over 3835154.57 frames. ], batch size: 53, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:54:03,885 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 12:54:13,226 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121641.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:54:52,422 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 12:54:52,536 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121673.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:54:55,722 INFO [train.py:903] (0/4) Epoch 18, batch 5600, loss[loss=0.2292, simple_loss=0.3091, pruned_loss=0.07467, over 19365.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2932, pruned_loss=0.06856, over 3833889.44 frames. ], batch size: 66, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:55:15,344 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.3837, 5.7218, 3.5891, 5.0573, 1.9196, 5.8702, 5.8552, 5.9218], + device='cuda:0'), covar=tensor([0.0365, 0.0822, 0.1781, 0.0807, 0.3547, 0.0513, 0.0687, 0.0934], + device='cuda:0'), in_proj_covar=tensor([0.0484, 0.0391, 0.0479, 0.0340, 0.0392, 0.0418, 0.0408, 0.0439], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 12:55:37,555 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.280e+02 4.851e+02 5.729e+02 6.791e+02 1.695e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-04-02 12:55:56,731 INFO [train.py:903] (0/4) Epoch 18, batch 5650, loss[loss=0.2567, simple_loss=0.3305, pruned_loss=0.09148, over 19508.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2935, pruned_loss=0.06846, over 3826308.06 frames. ], batch size: 64, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:56:08,931 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:37,750 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121761.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:44,588 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 12:56:48,209 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:50,041 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.95 vs. limit=5.0 +2023-04-02 12:56:57,050 INFO [train.py:903] (0/4) Epoch 18, batch 5700, loss[loss=0.2122, simple_loss=0.2915, pruned_loss=0.06646, over 19787.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2929, pruned_loss=0.0682, over 3824096.84 frames. ], batch size: 56, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:10,997 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121788.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:57:39,340 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.821e+02 5.859e+02 7.608e+02 1.521e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-02 12:57:57,043 INFO [train.py:903] (0/4) Epoch 18, batch 5750, loss[loss=0.2237, simple_loss=0.3026, pruned_loss=0.07243, over 19648.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2932, pruned_loss=0.06831, over 3821843.32 frames. ], batch size: 58, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:58,074 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 12:58:05,923 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 12:58:11,392 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 12:58:42,346 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0160, 2.0061, 1.8296, 1.6767, 1.5993, 1.7026, 0.4921, 1.0033], + device='cuda:0'), covar=tensor([0.0483, 0.0507, 0.0361, 0.0555, 0.0952, 0.0641, 0.1047, 0.0879], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0347, 0.0349, 0.0375, 0.0451, 0.0379, 0.0329, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 12:58:43,856 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 12:58:57,512 INFO [train.py:903] (0/4) Epoch 18, batch 5800, loss[loss=0.2082, simple_loss=0.2802, pruned_loss=0.06805, over 19361.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2935, pruned_loss=0.06834, over 3831433.89 frames. ], batch size: 47, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:59:08,067 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:24,587 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:39,870 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.174e+02 6.386e+02 8.157e+02 2.937e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-02 12:59:54,653 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:59,639 INFO [train.py:903] (0/4) Epoch 18, batch 5850, loss[loss=0.1742, simple_loss=0.2559, pruned_loss=0.04632, over 19796.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2925, pruned_loss=0.06759, over 3820728.73 frames. ], batch size: 49, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:00:05,813 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5279, 2.3853, 1.7701, 1.6556, 2.2090, 1.3776, 1.4607, 1.9338], + device='cuda:0'), covar=tensor([0.1059, 0.0747, 0.1018, 0.0789, 0.0530, 0.1235, 0.0738, 0.0486], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0313, 0.0327, 0.0257, 0.0245, 0.0331, 0.0291, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:00:51,267 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121968.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:01:00,886 INFO [train.py:903] (0/4) Epoch 18, batch 5900, loss[loss=0.1767, simple_loss=0.2601, pruned_loss=0.04668, over 19731.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2918, pruned_loss=0.06717, over 3820581.13 frames. ], batch size: 47, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:01:02,082 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 13:01:23,287 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 13:01:29,125 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-122000.pt +2023-04-02 13:01:39,543 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0311, 1.8333, 1.9233, 1.6748, 4.5318, 0.9346, 2.6083, 5.0325], + device='cuda:0'), covar=tensor([0.0356, 0.2482, 0.2579, 0.1934, 0.0700, 0.2842, 0.1362, 0.0148], + device='cuda:0'), in_proj_covar=tensor([0.0390, 0.0352, 0.0373, 0.0338, 0.0364, 0.0346, 0.0363, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:01:43,749 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.950e+02 6.415e+02 8.144e+02 2.513e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-02 13:02:01,671 INFO [train.py:903] (0/4) Epoch 18, batch 5950, loss[loss=0.1739, simple_loss=0.2618, pruned_loss=0.04299, over 19760.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2916, pruned_loss=0.06714, over 3807653.44 frames. ], batch size: 54, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:02:24,075 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122044.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:02:42,870 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 13:02:53,760 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122069.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:03:01,813 INFO [train.py:903] (0/4) Epoch 18, batch 6000, loss[loss=0.3102, simple_loss=0.3572, pruned_loss=0.1316, over 12851.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2913, pruned_loss=0.06704, over 3809554.35 frames. ], batch size: 135, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:03:01,813 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 13:03:14,295 INFO [train.py:937] (0/4) Epoch 18, validation: loss=0.1702, simple_loss=0.2706, pruned_loss=0.03489, over 944034.00 frames. +2023-04-02 13:03:14,296 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 13:03:22,595 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8806, 1.2441, 1.6024, 0.5750, 1.9646, 2.4722, 2.1012, 2.5778], + device='cuda:0'), covar=tensor([0.1651, 0.3820, 0.3241, 0.2686, 0.0613, 0.0262, 0.0350, 0.0349], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0313, 0.0343, 0.0259, 0.0235, 0.0180, 0.0212, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 13:03:57,709 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.792e+02 5.055e+02 6.106e+02 7.549e+02 1.634e+03, threshold=1.221e+03, percent-clipped=4.0 +2023-04-02 13:04:15,901 INFO [train.py:903] (0/4) Epoch 18, batch 6050, loss[loss=0.2061, simple_loss=0.2939, pruned_loss=0.05919, over 19766.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2915, pruned_loss=0.06688, over 3821374.85 frames. ], batch size: 54, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:04:27,115 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:04:33,657 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:04,389 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:18,140 INFO [train.py:903] (0/4) Epoch 18, batch 6100, loss[loss=0.2211, simple_loss=0.2809, pruned_loss=0.08064, over 19768.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2912, pruned_loss=0.06694, over 3831474.01 frames. ], batch size: 47, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:05:59,975 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.309e+02 6.592e+02 8.010e+02 1.726e+03, threshold=1.318e+03, percent-clipped=1.0 +2023-04-02 13:06:03,471 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:06:18,827 INFO [train.py:903] (0/4) Epoch 18, batch 6150, loss[loss=0.1896, simple_loss=0.2696, pruned_loss=0.05476, over 19727.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06687, over 3835258.82 frames. ], batch size: 51, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:06:46,724 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 13:07:20,188 INFO [train.py:903] (0/4) Epoch 18, batch 6200, loss[loss=0.2172, simple_loss=0.2991, pruned_loss=0.06765, over 19787.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2915, pruned_loss=0.06706, over 3832845.74 frames. ], batch size: 56, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:04,096 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.549e+02 5.662e+02 6.707e+02 1.660e+03, threshold=1.132e+03, percent-clipped=3.0 +2023-04-02 13:08:05,408 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:17,460 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 13:08:21,490 INFO [train.py:903] (0/4) Epoch 18, batch 6250, loss[loss=0.2204, simple_loss=0.3044, pruned_loss=0.06821, over 19658.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2913, pruned_loss=0.06686, over 3831466.18 frames. ], batch size: 55, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:35,709 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:53,452 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 13:09:23,686 INFO [train.py:903] (0/4) Epoch 18, batch 6300, loss[loss=0.229, simple_loss=0.3044, pruned_loss=0.0768, over 19628.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2916, pruned_loss=0.06698, over 3822556.92 frames. ], batch size: 50, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:06,342 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.826e+02 5.839e+02 7.420e+02 1.796e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-02 13:10:25,333 INFO [train.py:903] (0/4) Epoch 18, batch 6350, loss[loss=0.2541, simple_loss=0.3272, pruned_loss=0.09047, over 19749.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2929, pruned_loss=0.06796, over 3794381.34 frames. ], batch size: 51, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:26,809 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:25,783 INFO [train.py:903] (0/4) Epoch 18, batch 6400, loss[loss=0.1966, simple_loss=0.281, pruned_loss=0.05613, over 18055.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2944, pruned_loss=0.06902, over 3796719.39 frames. ], batch size: 83, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:11:29,340 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:38,259 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:12:08,690 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.572e+02 5.352e+02 7.001e+02 2.095e+03, threshold=1.070e+03, percent-clipped=5.0 +2023-04-02 13:12:27,158 INFO [train.py:903] (0/4) Epoch 18, batch 6450, loss[loss=0.2208, simple_loss=0.3152, pruned_loss=0.06322, over 19539.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2941, pruned_loss=0.06851, over 3794664.39 frames. ], batch size: 54, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:13:05,515 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:13:09,889 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 13:13:27,311 INFO [train.py:903] (0/4) Epoch 18, batch 6500, loss[loss=0.1976, simple_loss=0.2761, pruned_loss=0.05953, over 19817.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2933, pruned_loss=0.06783, over 3801570.56 frames. ], batch size: 49, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:13:32,630 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 13:13:50,437 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:14:10,392 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.824e+02 5.508e+02 6.898e+02 1.222e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 13:14:28,809 INFO [train.py:903] (0/4) Epoch 18, batch 6550, loss[loss=0.2501, simple_loss=0.3244, pruned_loss=0.08795, over 17727.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2934, pruned_loss=0.06775, over 3819939.40 frames. ], batch size: 101, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:24,485 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:29,442 INFO [train.py:903] (0/4) Epoch 18, batch 6600, loss[loss=0.2118, simple_loss=0.2985, pruned_loss=0.06257, over 17207.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2936, pruned_loss=0.06815, over 3814207.81 frames. ], batch size: 101, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:35,553 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:37,943 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:08,806 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:11,859 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.976e+02 6.114e+02 8.137e+02 1.508e+03, threshold=1.223e+03, percent-clipped=10.0 +2023-04-02 13:16:29,837 INFO [train.py:903] (0/4) Epoch 18, batch 6650, loss[loss=0.2481, simple_loss=0.3258, pruned_loss=0.08522, over 19660.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2928, pruned_loss=0.06774, over 3801370.96 frames. ], batch size: 58, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:30,798 INFO [train.py:903] (0/4) Epoch 18, batch 6700, loss[loss=0.176, simple_loss=0.2535, pruned_loss=0.0493, over 19789.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2919, pruned_loss=0.06749, over 3802740.17 frames. ], batch size: 49, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:55,725 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:12,046 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.791e+02 5.844e+02 7.603e+02 1.856e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 13:18:28,394 INFO [train.py:903] (0/4) Epoch 18, batch 6750, loss[loss=0.2206, simple_loss=0.3109, pruned_loss=0.06517, over 19536.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2926, pruned_loss=0.0681, over 3818614.34 frames. ], batch size: 54, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:18:33,056 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:55,631 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:24,218 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:25,055 INFO [train.py:903] (0/4) Epoch 18, batch 6800, loss[loss=0.1878, simple_loss=0.2746, pruned_loss=0.05048, over 19481.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2917, pruned_loss=0.06782, over 3813658.95 frames. ], batch size: 64, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:19:40,829 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:54,378 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-18.pt +2023-04-02 13:20:09,289 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 13:20:09,734 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 13:20:12,595 INFO [train.py:903] (0/4) Epoch 19, batch 0, loss[loss=0.2173, simple_loss=0.2985, pruned_loss=0.06807, over 19398.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2985, pruned_loss=0.06807, over 19398.00 frames. ], batch size: 66, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:20:12,596 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 13:20:24,046 INFO [train.py:937] (0/4) Epoch 19, validation: loss=0.171, simple_loss=0.2713, pruned_loss=0.03533, over 944034.00 frames. +2023-04-02 13:20:24,047 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 13:20:32,696 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 4.987e+02 6.075e+02 7.792e+02 1.350e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 13:20:38,460 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 13:20:53,676 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122928.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:13,140 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 13:21:13,966 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:24,643 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:25,464 INFO [train.py:903] (0/4) Epoch 19, batch 50, loss[loss=0.1905, simple_loss=0.286, pruned_loss=0.04752, over 19092.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2933, pruned_loss=0.06727, over 880240.17 frames. ], batch size: 69, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:21:51,203 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.39 vs. limit=5.0 +2023-04-02 13:22:04,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 13:22:16,579 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3951, 2.1227, 1.5687, 1.4401, 1.9679, 1.2565, 1.3920, 1.8321], + device='cuda:0'), covar=tensor([0.1028, 0.0793, 0.1084, 0.0806, 0.0528, 0.1307, 0.0637, 0.0451], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0314, 0.0331, 0.0259, 0.0247, 0.0334, 0.0291, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:22:27,934 INFO [train.py:903] (0/4) Epoch 19, batch 100, loss[loss=0.1899, simple_loss=0.2781, pruned_loss=0.05085, over 19679.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.289, pruned_loss=0.06582, over 1542959.43 frames. ], batch size: 53, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:22:35,845 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.975e+02 5.926e+02 8.151e+02 1.966e+03, threshold=1.185e+03, percent-clipped=7.0 +2023-04-02 13:22:37,373 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9439, 2.7501, 2.2166, 2.0713, 2.0440, 2.3534, 0.9218, 2.0114], + device='cuda:0'), covar=tensor([0.0621, 0.0523, 0.0600, 0.0980, 0.0895, 0.0997, 0.1279, 0.0885], + device='cuda:0'), in_proj_covar=tensor([0.0350, 0.0347, 0.0347, 0.0374, 0.0450, 0.0380, 0.0328, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 13:22:40,149 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 13:23:25,695 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:23:26,591 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6248, 4.1967, 2.7467, 3.6887, 0.9216, 4.0967, 4.0327, 4.0830], + device='cuda:0'), covar=tensor([0.0623, 0.0987, 0.1865, 0.0862, 0.4186, 0.0702, 0.0784, 0.1121], + device='cuda:0'), in_proj_covar=tensor([0.0485, 0.0395, 0.0483, 0.0342, 0.0397, 0.0421, 0.0410, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:23:27,453 INFO [train.py:903] (0/4) Epoch 19, batch 150, loss[loss=0.2117, simple_loss=0.2917, pruned_loss=0.06579, over 19601.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2914, pruned_loss=0.06756, over 2054391.72 frames. ], batch size: 52, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:23:55,509 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123077.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:24:27,281 INFO [train.py:903] (0/4) Epoch 19, batch 200, loss[loss=0.1973, simple_loss=0.2697, pruned_loss=0.06241, over 19767.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2909, pruned_loss=0.06695, over 2452073.56 frames. ], batch size: 47, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:24:29,571 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 13:24:35,482 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.223e+02 5.868e+02 7.012e+02 1.944e+03, threshold=1.174e+03, percent-clipped=7.0 +2023-04-02 13:24:36,959 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1089, 1.2483, 1.5479, 1.4532, 2.7004, 1.0542, 2.1499, 3.0607], + device='cuda:0'), covar=tensor([0.0575, 0.2854, 0.2725, 0.1731, 0.0769, 0.2442, 0.1173, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0396, 0.0356, 0.0378, 0.0341, 0.0367, 0.0349, 0.0369, 0.0386], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:24:58,726 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9599, 3.5955, 2.5940, 3.2682, 1.0296, 3.5480, 3.4280, 3.5189], + device='cuda:0'), covar=tensor([0.0868, 0.1173, 0.1887, 0.0910, 0.3957, 0.0848, 0.0962, 0.1216], + device='cuda:0'), in_proj_covar=tensor([0.0485, 0.0396, 0.0483, 0.0342, 0.0396, 0.0420, 0.0410, 0.0445], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:25:27,083 INFO [train.py:903] (0/4) Epoch 19, batch 250, loss[loss=0.2528, simple_loss=0.3299, pruned_loss=0.08787, over 19747.00 frames. ], tot_loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06853, over 2754599.66 frames. ], batch size: 63, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:25,119 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:26:28,234 INFO [train.py:903] (0/4) Epoch 19, batch 300, loss[loss=0.222, simple_loss=0.3028, pruned_loss=0.07061, over 19674.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06812, over 2998095.45 frames. ], batch size: 60, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:37,167 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.226e+02 7.852e+02 1.722e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-02 13:26:55,589 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:04,591 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:27,986 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-02 13:27:29,488 INFO [train.py:903] (0/4) Epoch 19, batch 350, loss[loss=0.2467, simple_loss=0.3157, pruned_loss=0.08881, over 13808.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2904, pruned_loss=0.06693, over 3188695.33 frames. ], batch size: 136, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:27:35,243 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 13:27:46,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7550, 1.2614, 1.5411, 1.7552, 3.3235, 1.1835, 2.4386, 3.7454], + device='cuda:0'), covar=tensor([0.0484, 0.2990, 0.2915, 0.1727, 0.0771, 0.2666, 0.1310, 0.0249], + device='cuda:0'), in_proj_covar=tensor([0.0397, 0.0357, 0.0380, 0.0342, 0.0367, 0.0350, 0.0370, 0.0388], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:28:10,784 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6835, 1.4279, 1.4379, 2.2867, 1.7396, 1.6813, 1.9357, 1.7070], + device='cuda:0'), covar=tensor([0.0939, 0.1184, 0.1181, 0.0773, 0.0909, 0.1005, 0.0972, 0.0861], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0219, 0.0224, 0.0241, 0.0226, 0.0210, 0.0186, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 13:28:30,168 INFO [train.py:903] (0/4) Epoch 19, batch 400, loss[loss=0.1824, simple_loss=0.26, pruned_loss=0.05243, over 19366.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2899, pruned_loss=0.06634, over 3335767.80 frames. ], batch size: 47, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:28:37,933 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.151e+02 6.306e+02 7.944e+02 1.366e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-02 13:29:04,090 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123332.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:24,919 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:30,457 INFO [train.py:903] (0/4) Epoch 19, batch 450, loss[loss=0.2376, simple_loss=0.312, pruned_loss=0.08162, over 19777.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2886, pruned_loss=0.06556, over 3449748.86 frames. ], batch size: 56, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:30:04,818 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 13:30:05,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 13:30:30,934 INFO [train.py:903] (0/4) Epoch 19, batch 500, loss[loss=0.2138, simple_loss=0.2856, pruned_loss=0.07098, over 19391.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2902, pruned_loss=0.06686, over 3533692.06 frames. ], batch size: 48, lr: 4.40e-03, grad_scale: 16.0 +2023-04-02 13:30:39,752 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.352e+02 7.180e+02 8.361e+02 2.088e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-02 13:31:21,136 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:31:22,325 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3445, 1.4759, 1.7090, 1.6932, 2.9825, 1.3679, 2.3843, 3.3339], + device='cuda:0'), covar=tensor([0.0503, 0.2562, 0.2526, 0.1715, 0.0675, 0.2235, 0.1202, 0.0284], + device='cuda:0'), in_proj_covar=tensor([0.0400, 0.0359, 0.0380, 0.0344, 0.0369, 0.0350, 0.0371, 0.0389], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:31:30,113 INFO [train.py:903] (0/4) Epoch 19, batch 550, loss[loss=0.187, simple_loss=0.2755, pruned_loss=0.04924, over 19782.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2903, pruned_loss=0.06696, over 3594901.99 frames. ], batch size: 56, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:30,276 INFO [train.py:903] (0/4) Epoch 19, batch 600, loss[loss=0.2518, simple_loss=0.3272, pruned_loss=0.0882, over 19479.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2897, pruned_loss=0.06652, over 3653640.86 frames. ], batch size: 64, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:39,899 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.960e+02 5.982e+02 8.370e+02 1.865e+03, threshold=1.196e+03, percent-clipped=4.0 +2023-04-02 13:32:49,074 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0061, 4.5240, 2.5435, 3.8998, 0.8146, 4.4345, 4.3467, 4.4584], + device='cuda:0'), covar=tensor([0.0532, 0.0932, 0.2113, 0.0874, 0.4295, 0.0639, 0.0854, 0.0951], + device='cuda:0'), in_proj_covar=tensor([0.0480, 0.0396, 0.0480, 0.0339, 0.0394, 0.0418, 0.0407, 0.0443], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:33:14,013 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 13:33:30,844 INFO [train.py:903] (0/4) Epoch 19, batch 650, loss[loss=0.2049, simple_loss=0.2779, pruned_loss=0.06598, over 16319.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.289, pruned_loss=0.06614, over 3699380.60 frames. ], batch size: 36, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:07,507 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4449, 2.1848, 1.6142, 1.5491, 2.1100, 1.3166, 1.3274, 1.8977], + device='cuda:0'), covar=tensor([0.1189, 0.0821, 0.1022, 0.0813, 0.0503, 0.1262, 0.0792, 0.0472], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0316, 0.0335, 0.0261, 0.0248, 0.0337, 0.0293, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:34:30,416 INFO [train.py:903] (0/4) Epoch 19, batch 700, loss[loss=0.2275, simple_loss=0.3104, pruned_loss=0.07227, over 18818.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2909, pruned_loss=0.06735, over 3725845.91 frames. ], batch size: 74, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:31,910 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:34:41,234 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.460e+02 6.000e+02 7.674e+02 1.249e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 13:35:02,771 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:35:31,845 INFO [train.py:903] (0/4) Epoch 19, batch 750, loss[loss=0.2474, simple_loss=0.3279, pruned_loss=0.08342, over 19361.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2917, pruned_loss=0.06786, over 3724444.23 frames. ], batch size: 70, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:35:59,275 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:36:34,036 INFO [train.py:903] (0/4) Epoch 19, batch 800, loss[loss=0.2088, simple_loss=0.2894, pruned_loss=0.06403, over 19799.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2923, pruned_loss=0.06794, over 3736848.81 frames. ], batch size: 56, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:36:37,839 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0111, 1.9562, 1.7022, 2.1059, 1.9762, 1.7819, 1.6751, 1.9863], + device='cuda:0'), covar=tensor([0.1156, 0.1567, 0.1519, 0.1106, 0.1309, 0.0590, 0.1437, 0.0732], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0352, 0.0303, 0.0249, 0.0298, 0.0248, 0.0297, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:36:43,873 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.634e+02 4.695e+02 6.255e+02 7.449e+02 1.390e+03, threshold=1.251e+03, percent-clipped=2.0 +2023-04-02 13:36:47,185 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 13:37:21,007 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0834, 3.3392, 1.9590, 2.1344, 3.0704, 1.7190, 1.5128, 2.2269], + device='cuda:0'), covar=tensor([0.1276, 0.0653, 0.1039, 0.0740, 0.0500, 0.1162, 0.0917, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0313, 0.0332, 0.0259, 0.0245, 0.0335, 0.0291, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:37:34,010 INFO [train.py:903] (0/4) Epoch 19, batch 850, loss[loss=0.1738, simple_loss=0.2535, pruned_loss=0.04707, over 19761.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2935, pruned_loss=0.06878, over 3760889.31 frames. ], batch size: 45, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:17,250 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:18,592 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:23,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 13:38:34,411 INFO [train.py:903] (0/4) Epoch 19, batch 900, loss[loss=0.1812, simple_loss=0.27, pruned_loss=0.04621, over 19595.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2932, pruned_loss=0.06835, over 3771465.79 frames. ], batch size: 52, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:44,953 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.405e+02 4.993e+02 6.523e+02 8.112e+02 2.572e+03, threshold=1.305e+03, percent-clipped=9.0 +2023-04-02 13:39:35,384 INFO [train.py:903] (0/4) Epoch 19, batch 950, loss[loss=0.2177, simple_loss=0.2963, pruned_loss=0.0696, over 19603.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2939, pruned_loss=0.06873, over 3780717.93 frames. ], batch size: 57, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:39:35,398 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 13:39:42,755 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 13:39:55,781 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 13:40:18,686 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:35,887 INFO [train.py:903] (0/4) Epoch 19, batch 1000, loss[loss=0.2147, simple_loss=0.2961, pruned_loss=0.0667, over 19370.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2924, pruned_loss=0.068, over 3794105.86 frames. ], batch size: 70, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:40:37,252 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:44,459 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 5.533e+02 6.903e+02 9.244e+02 2.435e+03, threshold=1.381e+03, percent-clipped=8.0 +2023-04-02 13:41:23,925 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 13:41:33,875 INFO [train.py:903] (0/4) Epoch 19, batch 1050, loss[loss=0.2257, simple_loss=0.2943, pruned_loss=0.07855, over 19668.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2938, pruned_loss=0.06891, over 3808974.43 frames. ], batch size: 55, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:42:03,304 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 13:42:17,131 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9840, 5.0874, 5.8532, 5.8496, 2.0706, 5.5052, 4.6691, 5.4750], + device='cuda:0'), covar=tensor([0.1762, 0.0781, 0.0584, 0.0628, 0.5845, 0.0732, 0.0589, 0.1223], + device='cuda:0'), in_proj_covar=tensor([0.0758, 0.0704, 0.0912, 0.0800, 0.0811, 0.0661, 0.0551, 0.0838], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 13:42:30,090 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-124000.pt +2023-04-02 13:42:36,015 INFO [train.py:903] (0/4) Epoch 19, batch 1100, loss[loss=0.2438, simple_loss=0.3125, pruned_loss=0.08751, over 19759.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2938, pruned_loss=0.06897, over 3803223.34 frames. ], batch size: 54, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:42:45,327 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.737e+02 5.703e+02 7.685e+02 1.238e+03, threshold=1.141e+03, percent-clipped=0.0 +2023-04-02 13:43:16,758 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3043, 1.3836, 1.5839, 1.6105, 1.2029, 1.5891, 1.6206, 1.4893], + device='cuda:0'), covar=tensor([0.3299, 0.2873, 0.1535, 0.1775, 0.3092, 0.1595, 0.3923, 0.2663], + device='cuda:0'), in_proj_covar=tensor([0.0871, 0.0931, 0.0697, 0.0918, 0.0854, 0.0789, 0.0824, 0.0764], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 13:43:28,345 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:36,792 INFO [train.py:903] (0/4) Epoch 19, batch 1150, loss[loss=0.1668, simple_loss=0.2465, pruned_loss=0.04362, over 18983.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.293, pruned_loss=0.06865, over 3800115.28 frames. ], batch size: 42, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:43:37,089 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:59,149 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:08,936 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:37,476 INFO [train.py:903] (0/4) Epoch 19, batch 1200, loss[loss=0.1977, simple_loss=0.2719, pruned_loss=0.06175, over 19367.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2928, pruned_loss=0.06858, over 3805638.65 frames. ], batch size: 47, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:44:48,188 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.076e+02 6.121e+02 8.217e+02 1.455e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-02 13:44:50,874 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9719, 1.7261, 1.6812, 1.9828, 1.7263, 1.6988, 1.5450, 1.9230], + device='cuda:0'), covar=tensor([0.1031, 0.1432, 0.1363, 0.0995, 0.1320, 0.0555, 0.1396, 0.0682], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0350, 0.0302, 0.0247, 0.0297, 0.0245, 0.0296, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:45:01,825 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4080, 1.4307, 1.7441, 1.6726, 2.9434, 2.2908, 3.0471, 1.2838], + device='cuda:0'), covar=tensor([0.2304, 0.4223, 0.2647, 0.1847, 0.1307, 0.2102, 0.1346, 0.4101], + device='cuda:0'), in_proj_covar=tensor([0.0515, 0.0621, 0.0682, 0.0469, 0.0613, 0.0519, 0.0653, 0.0530], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 13:45:09,226 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 13:45:38,204 INFO [train.py:903] (0/4) Epoch 19, batch 1250, loss[loss=0.223, simple_loss=0.2961, pruned_loss=0.07498, over 19359.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06792, over 3821978.14 frames. ], batch size: 70, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:45:47,139 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:16,847 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:38,209 INFO [train.py:903] (0/4) Epoch 19, batch 1300, loss[loss=0.2128, simple_loss=0.2898, pruned_loss=0.06789, over 19113.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06794, over 3826179.27 frames. ], batch size: 42, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:46:38,534 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2254, 1.1856, 1.6159, 1.6376, 2.7334, 4.4302, 4.3254, 5.0214], + device='cuda:0'), covar=tensor([0.1796, 0.5050, 0.4522, 0.2311, 0.0753, 0.0228, 0.0205, 0.0199], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0312, 0.0343, 0.0260, 0.0237, 0.0179, 0.0212, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 13:46:47,487 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1731, 3.3789, 3.6604, 3.6630, 1.9939, 3.4080, 3.1164, 3.4533], + device='cuda:0'), covar=tensor([0.1387, 0.2855, 0.0697, 0.0747, 0.4479, 0.1326, 0.0622, 0.1049], + device='cuda:0'), in_proj_covar=tensor([0.0757, 0.0705, 0.0909, 0.0795, 0.0808, 0.0660, 0.0550, 0.0838], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 13:46:48,381 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.504e+02 4.873e+02 5.866e+02 7.986e+02 1.872e+03, threshold=1.173e+03, percent-clipped=5.0 +2023-04-02 13:47:15,086 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:47:37,770 INFO [train.py:903] (0/4) Epoch 19, batch 1350, loss[loss=0.1848, simple_loss=0.2601, pruned_loss=0.05477, over 19754.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2921, pruned_loss=0.06751, over 3837476.75 frames. ], batch size: 47, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:22,154 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:48:39,028 INFO [train.py:903] (0/4) Epoch 19, batch 1400, loss[loss=0.2419, simple_loss=0.3182, pruned_loss=0.08283, over 19587.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2921, pruned_loss=0.06739, over 3828181.94 frames. ], batch size: 52, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:48,825 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.508e+02 6.829e+02 9.566e+02 2.163e+03, threshold=1.366e+03, percent-clipped=9.0 +2023-04-02 13:48:58,161 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4962, 2.1895, 1.6882, 1.5603, 2.0575, 1.3850, 1.3500, 1.8357], + device='cuda:0'), covar=tensor([0.1082, 0.0806, 0.1003, 0.0764, 0.0588, 0.1210, 0.0757, 0.0515], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0314, 0.0334, 0.0259, 0.0246, 0.0336, 0.0292, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:49:00,431 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9346, 2.0311, 2.1791, 2.7736, 2.0410, 2.5983, 2.2554, 2.0368], + device='cuda:0'), covar=tensor([0.3936, 0.3531, 0.1792, 0.1964, 0.3699, 0.1773, 0.4369, 0.3072], + device='cuda:0'), in_proj_covar=tensor([0.0873, 0.0934, 0.0700, 0.0920, 0.0856, 0.0793, 0.0827, 0.0768], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 13:49:28,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8184, 4.3636, 2.7788, 3.9074, 0.9004, 4.2438, 4.1508, 4.2727], + device='cuda:0'), covar=tensor([0.0518, 0.1003, 0.1884, 0.0810, 0.4115, 0.0655, 0.0822, 0.0971], + device='cuda:0'), in_proj_covar=tensor([0.0489, 0.0395, 0.0484, 0.0340, 0.0397, 0.0423, 0.0411, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:49:32,999 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:49:38,316 INFO [train.py:903] (0/4) Epoch 19, batch 1450, loss[loss=0.1855, simple_loss=0.2659, pruned_loss=0.05253, over 19402.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2917, pruned_loss=0.06727, over 3834128.89 frames. ], batch size: 48, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:49:40,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 13:49:53,664 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124366.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:50:32,826 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:50:40,521 INFO [train.py:903] (0/4) Epoch 19, batch 1500, loss[loss=0.2129, simple_loss=0.2971, pruned_loss=0.06432, over 17344.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2915, pruned_loss=0.06675, over 3830073.95 frames. ], batch size: 101, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:50:50,227 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.822e+02 6.235e+02 8.378e+02 1.519e+03, threshold=1.247e+03, percent-clipped=2.0 +2023-04-02 13:50:58,616 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-02 13:51:04,420 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:35,967 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:39,950 INFO [train.py:903] (0/4) Epoch 19, batch 1550, loss[loss=0.2388, simple_loss=0.3116, pruned_loss=0.08298, over 19757.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2915, pruned_loss=0.06704, over 3821588.08 frames. ], batch size: 54, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:40,759 INFO [train.py:903] (0/4) Epoch 19, batch 1600, loss[loss=0.2119, simple_loss=0.2934, pruned_loss=0.06518, over 19675.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2922, pruned_loss=0.0674, over 3815147.71 frames. ], batch size: 60, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:51,838 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 4.800e+02 6.281e+02 8.115e+02 1.566e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-02 13:52:52,207 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:06,488 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 13:53:23,364 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:40,466 INFO [train.py:903] (0/4) Epoch 19, batch 1650, loss[loss=0.2159, simple_loss=0.2981, pruned_loss=0.06686, over 19330.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2925, pruned_loss=0.06784, over 3814619.97 frames. ], batch size: 66, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:43,096 INFO [train.py:903] (0/4) Epoch 19, batch 1700, loss[loss=0.2001, simple_loss=0.2739, pruned_loss=0.06311, over 19470.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06699, over 3815984.69 frames. ], batch size: 49, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:44,650 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:54:53,185 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.721e+02 4.942e+02 5.736e+02 7.226e+02 1.444e+03, threshold=1.147e+03, percent-clipped=3.0 +2023-04-02 13:55:14,619 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:18,825 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:22,856 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 13:55:42,973 INFO [train.py:903] (0/4) Epoch 19, batch 1750, loss[loss=0.2112, simple_loss=0.2785, pruned_loss=0.07198, over 19465.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2922, pruned_loss=0.06734, over 3817510.80 frames. ], batch size: 49, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:55:45,637 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:16,371 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:20,687 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9893, 0.9528, 0.9769, 1.0367, 0.8844, 1.0089, 1.0171, 1.0049], + device='cuda:0'), covar=tensor([0.0703, 0.0771, 0.0851, 0.0559, 0.0693, 0.0740, 0.0748, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0225, 0.0243, 0.0227, 0.0211, 0.0189, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 13:56:44,205 INFO [train.py:903] (0/4) Epoch 19, batch 1800, loss[loss=0.1782, simple_loss=0.2621, pruned_loss=0.04714, over 19485.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2924, pruned_loss=0.06699, over 3826437.49 frames. ], batch size: 49, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:56:51,808 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124710.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:56:54,991 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.111e+02 6.286e+02 7.731e+02 1.656e+03, threshold=1.257e+03, percent-clipped=2.0 +2023-04-02 13:57:38,758 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 13:57:39,005 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:57:40,094 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7770, 1.4679, 1.6268, 1.6610, 4.3146, 1.1956, 2.3311, 4.5414], + device='cuda:0'), covar=tensor([0.0472, 0.2962, 0.3095, 0.2097, 0.0743, 0.2737, 0.1642, 0.0230], + device='cuda:0'), in_proj_covar=tensor([0.0398, 0.0357, 0.0377, 0.0342, 0.0366, 0.0349, 0.0369, 0.0388], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 13:57:44,336 INFO [train.py:903] (0/4) Epoch 19, batch 1850, loss[loss=0.2271, simple_loss=0.3084, pruned_loss=0.07295, over 19688.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.293, pruned_loss=0.06737, over 3813373.67 frames. ], batch size: 60, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:03,265 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:17,480 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 13:58:33,192 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:33,420 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:34,542 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:46,479 INFO [train.py:903] (0/4) Epoch 19, batch 1900, loss[loss=0.2034, simple_loss=0.2909, pruned_loss=0.05794, over 19336.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.292, pruned_loss=0.06656, over 3824398.10 frames. ], batch size: 66, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:56,687 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 4.862e+02 5.969e+02 7.822e+02 1.490e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 13:59:01,914 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 13:59:05,761 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:07,790 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 13:59:11,246 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124825.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:59:26,838 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:32,012 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 13:59:46,594 INFO [train.py:903] (0/4) Epoch 19, batch 1950, loss[loss=0.2023, simple_loss=0.2959, pruned_loss=0.05429, over 19756.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2929, pruned_loss=0.06693, over 3826793.87 frames. ], batch size: 63, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:47,546 INFO [train.py:903] (0/4) Epoch 19, batch 2000, loss[loss=0.2638, simple_loss=0.3306, pruned_loss=0.09848, over 13021.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2918, pruned_loss=0.0667, over 3825962.26 frames. ], batch size: 136, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:54,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:00:58,634 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.950e+02 6.179e+02 7.428e+02 1.573e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 14:01:07,242 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 14:01:45,077 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 14:01:48,491 INFO [train.py:903] (0/4) Epoch 19, batch 2050, loss[loss=0.2357, simple_loss=0.3227, pruned_loss=0.07428, over 19568.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2929, pruned_loss=0.06751, over 3814362.85 frames. ], batch size: 61, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:06,436 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 14:02:07,322 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 14:02:25,215 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 14:02:44,766 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:02:50,765 INFO [train.py:903] (0/4) Epoch 19, batch 2100, loss[loss=0.2053, simple_loss=0.2917, pruned_loss=0.05943, over 19534.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2925, pruned_loss=0.06736, over 3799055.85 frames. ], batch size: 54, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:52,325 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:02:52,530 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 14:03:00,992 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.657e+02 4.808e+02 5.766e+02 7.881e+02 2.968e+03, threshold=1.153e+03, percent-clipped=4.0 +2023-04-02 14:03:15,323 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:15,652 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0115, 2.0429, 2.2893, 2.8108, 2.0009, 2.6111, 2.3288, 2.1091], + device='cuda:0'), covar=tensor([0.4216, 0.3973, 0.1833, 0.2202, 0.4144, 0.2047, 0.4770, 0.3257], + device='cuda:0'), in_proj_covar=tensor([0.0869, 0.0927, 0.0696, 0.0912, 0.0853, 0.0786, 0.0824, 0.0763], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 14:03:18,743 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 14:03:19,093 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9718, 1.7872, 1.6289, 2.0615, 1.7513, 1.7113, 1.6038, 1.9151], + device='cuda:0'), covar=tensor([0.0982, 0.1294, 0.1437, 0.0822, 0.1209, 0.0570, 0.1343, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0356, 0.0307, 0.0250, 0.0299, 0.0249, 0.0300, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:03:22,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125030.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:30,110 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4919, 1.6733, 2.1063, 1.9169, 3.2841, 2.5312, 3.4908, 1.6935], + device='cuda:0'), covar=tensor([0.2559, 0.4337, 0.2889, 0.1900, 0.1520, 0.2237, 0.1589, 0.4038], + device='cuda:0'), in_proj_covar=tensor([0.0515, 0.0624, 0.0684, 0.0467, 0.0614, 0.0518, 0.0653, 0.0532], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 14:03:39,537 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 14:03:49,806 INFO [train.py:903] (0/4) Epoch 19, batch 2150, loss[loss=0.2774, simple_loss=0.3386, pruned_loss=0.1081, over 13040.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2926, pruned_loss=0.06736, over 3788372.21 frames. ], batch size: 136, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:22,837 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125081.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:04:50,320 INFO [train.py:903] (0/4) Epoch 19, batch 2200, loss[loss=0.2102, simple_loss=0.2945, pruned_loss=0.06291, over 18015.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2915, pruned_loss=0.067, over 3795384.96 frames. ], batch size: 83, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:53,006 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125106.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:05:01,336 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.808e+02 4.810e+02 5.592e+02 6.977e+02 1.826e+03, threshold=1.118e+03, percent-clipped=4.0 +2023-04-02 14:05:04,001 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125115.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:33,638 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:40,923 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 14:05:41,554 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:51,114 INFO [train.py:903] (0/4) Epoch 19, batch 2250, loss[loss=0.2062, simple_loss=0.2913, pruned_loss=0.06054, over 19485.00 frames. ], tot_loss[loss=0.213, simple_loss=0.292, pruned_loss=0.06697, over 3800437.89 frames. ], batch size: 49, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:05:52,659 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8935, 1.7544, 1.5380, 2.0245, 1.6833, 1.6369, 1.5869, 1.7671], + device='cuda:0'), covar=tensor([0.1013, 0.1452, 0.1486, 0.0914, 0.1288, 0.0565, 0.1328, 0.0759], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0356, 0.0307, 0.0249, 0.0299, 0.0249, 0.0300, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:06:05,776 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:25,680 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:35,049 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:52,195 INFO [train.py:903] (0/4) Epoch 19, batch 2300, loss[loss=0.2584, simple_loss=0.3309, pruned_loss=0.09291, over 17300.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2916, pruned_loss=0.06666, over 3815378.94 frames. ], batch size: 101, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:07:04,347 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.756e+02 5.218e+02 6.176e+02 8.185e+02 2.110e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 14:07:06,714 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 14:07:52,829 INFO [train.py:903] (0/4) Epoch 19, batch 2350, loss[loss=0.1807, simple_loss=0.2609, pruned_loss=0.05026, over 19371.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06644, over 3810877.87 frames. ], batch size: 47, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:08:33,595 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 14:08:45,060 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125297.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:08:49,235 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 14:08:52,752 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2212, 1.2977, 1.4327, 1.5433, 1.2176, 1.5093, 1.4900, 1.3592], + device='cuda:0'), covar=tensor([0.2663, 0.2193, 0.1307, 0.1470, 0.2390, 0.1345, 0.3111, 0.2267], + device='cuda:0'), in_proj_covar=tensor([0.0874, 0.0934, 0.0700, 0.0921, 0.0859, 0.0789, 0.0831, 0.0768], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 14:08:53,418 INFO [train.py:903] (0/4) Epoch 19, batch 2400, loss[loss=0.2973, simple_loss=0.356, pruned_loss=0.1194, over 13851.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2923, pruned_loss=0.06734, over 3797107.54 frames. ], batch size: 137, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:08:54,899 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8452, 3.3206, 3.3461, 3.3650, 1.4211, 3.2241, 2.8236, 3.1061], + device='cuda:0'), covar=tensor([0.1635, 0.0877, 0.0804, 0.0910, 0.5076, 0.0950, 0.0802, 0.1349], + device='cuda:0'), in_proj_covar=tensor([0.0765, 0.0710, 0.0916, 0.0806, 0.0816, 0.0666, 0.0554, 0.0849], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 14:09:05,368 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.876e+02 4.925e+02 6.150e+02 7.515e+02 1.529e+03, threshold=1.230e+03, percent-clipped=3.0 +2023-04-02 14:09:45,494 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 14:09:53,248 INFO [train.py:903] (0/4) Epoch 19, batch 2450, loss[loss=0.1936, simple_loss=0.2696, pruned_loss=0.05882, over 19401.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06625, over 3802711.01 frames. ], batch size: 48, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:10:14,728 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125371.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:17,759 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125374.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:41,851 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,299 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,329 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:53,819 INFO [train.py:903] (0/4) Epoch 19, batch 2500, loss[loss=0.2179, simple_loss=0.3004, pruned_loss=0.06764, over 19796.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2911, pruned_loss=0.067, over 3797807.13 frames. ], batch size: 63, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:11:05,671 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.898e+02 6.460e+02 8.264e+02 2.020e+03, threshold=1.292e+03, percent-clipped=4.0 +2023-04-02 14:11:13,967 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:35,802 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:54,216 INFO [train.py:903] (0/4) Epoch 19, batch 2550, loss[loss=0.2114, simple_loss=0.2961, pruned_loss=0.06332, over 19577.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.291, pruned_loss=0.06691, over 3814847.90 frames. ], batch size: 61, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:11:56,653 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3288, 3.0666, 2.1588, 2.7550, 0.8557, 2.9427, 2.8592, 2.9717], + device='cuda:0'), covar=tensor([0.1158, 0.1446, 0.2376, 0.1090, 0.3888, 0.1059, 0.1194, 0.1673], + device='cuda:0'), in_proj_covar=tensor([0.0486, 0.0396, 0.0480, 0.0337, 0.0394, 0.0420, 0.0411, 0.0445], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:12:38,052 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:12:47,108 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 14:12:53,797 INFO [train.py:903] (0/4) Epoch 19, batch 2600, loss[loss=0.2307, simple_loss=0.3126, pruned_loss=0.07442, over 19440.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2907, pruned_loss=0.0666, over 3824886.48 frames. ], batch size: 64, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:13:05,879 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 4.815e+02 5.841e+02 7.665e+02 1.339e+03, threshold=1.168e+03, percent-clipped=2.0 +2023-04-02 14:13:21,432 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1050, 1.3713, 1.7371, 1.2187, 2.5956, 3.5794, 3.2977, 3.7831], + device='cuda:0'), covar=tensor([0.1600, 0.3620, 0.3201, 0.2398, 0.0583, 0.0180, 0.0208, 0.0228], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0311, 0.0343, 0.0260, 0.0237, 0.0179, 0.0212, 0.0244], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 14:13:48,578 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.50 vs. limit=5.0 +2023-04-02 14:13:52,883 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125553.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:13:53,662 INFO [train.py:903] (0/4) Epoch 19, batch 2650, loss[loss=0.2108, simple_loss=0.2911, pruned_loss=0.06525, over 19575.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2921, pruned_loss=0.0677, over 3834606.93 frames. ], batch size: 52, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:15,462 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 14:14:23,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125578.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:14:54,493 INFO [train.py:903] (0/4) Epoch 19, batch 2700, loss[loss=0.224, simple_loss=0.3051, pruned_loss=0.07144, over 19527.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2902, pruned_loss=0.06664, over 3833363.74 frames. ], batch size: 56, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:55,974 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:15:07,179 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.817e+02 5.964e+02 7.468e+02 1.608e+03, threshold=1.193e+03, percent-clipped=5.0 +2023-04-02 14:15:15,684 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:15:31,466 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8138, 0.8921, 0.8600, 0.7465, 0.7287, 0.7504, 0.0704, 0.2371], + device='cuda:0'), covar=tensor([0.0463, 0.0444, 0.0297, 0.0419, 0.0737, 0.0487, 0.1039, 0.0731], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0349, 0.0351, 0.0374, 0.0451, 0.0385, 0.0331, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 14:15:35,557 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0792, 1.5874, 2.1721, 1.6009, 4.5949, 0.9461, 2.4504, 4.9125], + device='cuda:0'), covar=tensor([0.0382, 0.2641, 0.2385, 0.1976, 0.0663, 0.2741, 0.1424, 0.0176], + device='cuda:0'), in_proj_covar=tensor([0.0397, 0.0358, 0.0376, 0.0342, 0.0365, 0.0349, 0.0368, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:15:56,194 INFO [train.py:903] (0/4) Epoch 19, batch 2750, loss[loss=0.216, simple_loss=0.2873, pruned_loss=0.0723, over 19773.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2911, pruned_loss=0.06715, over 3824027.89 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:16:05,817 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3849, 1.5262, 1.8938, 1.6667, 2.5977, 2.1351, 2.6766, 1.2619], + device='cuda:0'), covar=tensor([0.2574, 0.4238, 0.2495, 0.2049, 0.1571, 0.2269, 0.1589, 0.4305], + device='cuda:0'), in_proj_covar=tensor([0.0518, 0.0625, 0.0688, 0.0470, 0.0614, 0.0521, 0.0655, 0.0534], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 14:16:55,539 INFO [train.py:903] (0/4) Epoch 19, batch 2800, loss[loss=0.2055, simple_loss=0.2839, pruned_loss=0.06359, over 19660.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2908, pruned_loss=0.06736, over 3823520.41 frames. ], batch size: 53, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:17:08,445 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.188e+02 6.338e+02 7.813e+02 1.733e+03, threshold=1.268e+03, percent-clipped=8.0 +2023-04-02 14:17:12,864 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:37,004 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:39,791 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 14:17:42,815 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:55,254 INFO [train.py:903] (0/4) Epoch 19, batch 2850, loss[loss=0.1743, simple_loss=0.2543, pruned_loss=0.04713, over 19393.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2902, pruned_loss=0.06702, over 3832504.53 frames. ], batch size: 48, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:25,424 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1925, 1.3379, 1.8459, 1.3704, 2.8326, 3.7018, 3.4629, 3.9070], + device='cuda:0'), covar=tensor([0.1646, 0.3712, 0.3105, 0.2345, 0.0599, 0.0238, 0.0201, 0.0270], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0312, 0.0343, 0.0260, 0.0238, 0.0179, 0.0212, 0.0243], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 14:18:30,662 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:18:56,092 INFO [train.py:903] (0/4) Epoch 19, batch 2900, loss[loss=0.1789, simple_loss=0.2593, pruned_loss=0.0492, over 19590.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2912, pruned_loss=0.06714, over 3836827.00 frames. ], batch size: 50, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:56,101 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 14:19:01,432 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3033, 3.8952, 2.6405, 3.5067, 1.0944, 3.8534, 3.7626, 3.7977], + device='cuda:0'), covar=tensor([0.0759, 0.1083, 0.2013, 0.0846, 0.3721, 0.0715, 0.0862, 0.1186], + device='cuda:0'), in_proj_covar=tensor([0.0487, 0.0398, 0.0481, 0.0338, 0.0395, 0.0419, 0.0410, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:19:09,044 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.625e+02 5.511e+02 7.350e+02 1.619e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 14:19:31,527 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:55,773 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:56,475 INFO [train.py:903] (0/4) Epoch 19, batch 2950, loss[loss=0.2018, simple_loss=0.2906, pruned_loss=0.05656, over 19263.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2908, pruned_loss=0.06659, over 3841334.58 frames. ], batch size: 70, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:05,702 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:35,710 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:50,774 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:57,388 INFO [train.py:903] (0/4) Epoch 19, batch 3000, loss[loss=0.181, simple_loss=0.257, pruned_loss=0.05243, over 19391.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2921, pruned_loss=0.06758, over 3810125.85 frames. ], batch size: 48, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:57,389 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 14:21:10,743 INFO [train.py:937] (0/4) Epoch 19, validation: loss=0.1696, simple_loss=0.2702, pruned_loss=0.03451, over 944034.00 frames. +2023-04-02 14:21:10,745 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 14:21:10,803 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 14:21:24,049 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 4.999e+02 6.816e+02 8.693e+02 1.814e+03, threshold=1.363e+03, percent-clipped=12.0 +2023-04-02 14:22:11,566 INFO [train.py:903] (0/4) Epoch 19, batch 3050, loss[loss=0.2433, simple_loss=0.3191, pruned_loss=0.08375, over 14021.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2916, pruned_loss=0.06729, over 3808739.66 frames. ], batch size: 136, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:22:24,990 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:23:08,041 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-126000.pt +2023-04-02 14:23:13,568 INFO [train.py:903] (0/4) Epoch 19, batch 3100, loss[loss=0.2096, simple_loss=0.2894, pruned_loss=0.06491, over 19738.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06643, over 3818748.31 frames. ], batch size: 63, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:23:14,309 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 14:23:26,817 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.651e+02 5.591e+02 6.916e+02 1.279e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-02 14:23:28,003 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:13,741 INFO [train.py:903] (0/4) Epoch 19, batch 3150, loss[loss=0.1639, simple_loss=0.2428, pruned_loss=0.04256, over 19048.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2896, pruned_loss=0.06567, over 3826822.05 frames. ], batch size: 42, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:24:40,336 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 14:24:45,984 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:48,473 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 14:24:53,523 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:55,901 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:14,240 INFO [train.py:903] (0/4) Epoch 19, batch 3200, loss[loss=0.1956, simple_loss=0.2749, pruned_loss=0.05813, over 19483.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.06559, over 3817991.22 frames. ], batch size: 49, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:25:21,274 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:26,792 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126114.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:27,594 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 4.817e+02 6.226e+02 7.515e+02 1.545e+03, threshold=1.245e+03, percent-clipped=7.0 +2023-04-02 14:25:51,008 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:15,153 INFO [train.py:903] (0/4) Epoch 19, batch 3250, loss[loss=0.2288, simple_loss=0.3109, pruned_loss=0.07334, over 19659.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2902, pruned_loss=0.06602, over 3816363.48 frames. ], batch size: 60, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:26:15,589 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:32,405 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0059, 1.6228, 1.8505, 2.4272, 1.9307, 2.2904, 2.3417, 2.0580], + device='cuda:0'), covar=tensor([0.0795, 0.0925, 0.0966, 0.0930, 0.0850, 0.0712, 0.0827, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0227, 0.0245, 0.0227, 0.0212, 0.0189, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 14:26:46,058 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:27:14,808 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:27:16,669 INFO [train.py:903] (0/4) Epoch 19, batch 3300, loss[loss=0.2274, simple_loss=0.3044, pruned_loss=0.07519, over 19767.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06539, over 3832678.21 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:27:20,149 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 14:27:30,250 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.439e+02 5.162e+02 6.410e+02 7.971e+02 2.422e+03, threshold=1.282e+03, percent-clipped=4.0 +2023-04-02 14:27:36,966 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3768, 3.9809, 2.7315, 3.5268, 0.9580, 3.8911, 3.7609, 3.9025], + device='cuda:0'), covar=tensor([0.0702, 0.1029, 0.1843, 0.0911, 0.4000, 0.0705, 0.0955, 0.1113], + device='cuda:0'), in_proj_covar=tensor([0.0488, 0.0394, 0.0479, 0.0337, 0.0394, 0.0420, 0.0410, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:28:17,436 INFO [train.py:903] (0/4) Epoch 19, batch 3350, loss[loss=0.2566, simple_loss=0.3176, pruned_loss=0.09778, over 13210.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2901, pruned_loss=0.06586, over 3837607.64 frames. ], batch size: 136, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:29:08,277 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0568, 2.8333, 2.1522, 2.0486, 1.8458, 2.3738, 0.9417, 2.0250], + device='cuda:0'), covar=tensor([0.0676, 0.0598, 0.0695, 0.1182, 0.1128, 0.1044, 0.1368, 0.1035], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0350, 0.0352, 0.0377, 0.0453, 0.0385, 0.0332, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 14:29:17,957 INFO [train.py:903] (0/4) Epoch 19, batch 3400, loss[loss=0.2068, simple_loss=0.2929, pruned_loss=0.06038, over 19676.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.29, pruned_loss=0.06578, over 3835799.20 frames. ], batch size: 58, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:29:25,173 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3436, 1.4719, 1.8198, 1.6013, 2.6006, 2.2364, 2.7575, 1.0503], + device='cuda:0'), covar=tensor([0.2462, 0.4178, 0.2563, 0.1959, 0.1627, 0.2112, 0.1432, 0.4437], + device='cuda:0'), in_proj_covar=tensor([0.0519, 0.0627, 0.0689, 0.0474, 0.0617, 0.0521, 0.0656, 0.0536], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 14:29:31,306 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.180e+02 6.014e+02 8.021e+02 1.733e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 14:29:57,167 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:18,214 INFO [train.py:903] (0/4) Epoch 19, batch 3450, loss[loss=0.239, simple_loss=0.3117, pruned_loss=0.08316, over 19789.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.06641, over 3840794.92 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:30:22,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 14:30:27,158 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:28,561 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126361.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:31:13,294 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:31:20,718 INFO [train.py:903] (0/4) Epoch 19, batch 3500, loss[loss=0.2206, simple_loss=0.3068, pruned_loss=0.06719, over 19686.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2904, pruned_loss=0.0662, over 3836906.61 frames. ], batch size: 60, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:31:34,959 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.908e+02 6.053e+02 7.325e+02 1.346e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 14:32:21,699 INFO [train.py:903] (0/4) Epoch 19, batch 3550, loss[loss=0.2038, simple_loss=0.2867, pruned_loss=0.06049, over 19503.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2898, pruned_loss=0.06604, over 3827155.83 frames. ], batch size: 64, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:32:23,009 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2983, 1.5374, 1.8971, 1.3669, 2.6384, 3.3416, 3.1712, 3.4959], + device='cuda:0'), covar=tensor([0.1634, 0.3466, 0.3041, 0.2416, 0.0700, 0.0284, 0.0217, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0313, 0.0344, 0.0260, 0.0238, 0.0181, 0.0214, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 14:32:26,660 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:38,970 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:48,119 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126475.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:57,946 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126483.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:33:21,942 INFO [train.py:903] (0/4) Epoch 19, batch 3600, loss[loss=0.2459, simple_loss=0.3338, pruned_loss=0.07899, over 19457.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06628, over 3811865.42 frames. ], batch size: 70, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:33:37,205 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.926e+02 5.826e+02 7.456e+02 2.258e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 14:34:04,292 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2114, 1.8508, 1.5175, 1.2516, 1.6699, 1.2349, 1.2453, 1.7333], + device='cuda:0'), covar=tensor([0.0669, 0.0803, 0.1062, 0.0785, 0.0528, 0.1237, 0.0562, 0.0381], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0311, 0.0332, 0.0260, 0.0243, 0.0333, 0.0289, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:34:23,092 INFO [train.py:903] (0/4) Epoch 19, batch 3650, loss[loss=0.2155, simple_loss=0.2881, pruned_loss=0.07146, over 19591.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2904, pruned_loss=0.06646, over 3811194.47 frames. ], batch size: 52, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:24,555 INFO [train.py:903] (0/4) Epoch 19, batch 3700, loss[loss=0.1899, simple_loss=0.2611, pruned_loss=0.0594, over 19767.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2907, pruned_loss=0.06705, over 3812482.22 frames. ], batch size: 48, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:38,476 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.326e+02 6.409e+02 8.349e+02 1.648e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-02 14:36:23,996 INFO [train.py:903] (0/4) Epoch 19, batch 3750, loss[loss=0.2156, simple_loss=0.2995, pruned_loss=0.06587, over 19789.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2919, pruned_loss=0.0678, over 3811305.57 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:36:26,350 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2875, 1.2993, 1.4634, 1.3970, 1.9457, 1.7489, 1.9448, 1.0364], + device='cuda:0'), covar=tensor([0.1728, 0.3083, 0.1915, 0.1418, 0.1114, 0.1663, 0.1037, 0.3567], + device='cuda:0'), in_proj_covar=tensor([0.0518, 0.0627, 0.0690, 0.0473, 0.0615, 0.0521, 0.0656, 0.0535], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 14:37:25,131 INFO [train.py:903] (0/4) Epoch 19, batch 3800, loss[loss=0.1856, simple_loss=0.275, pruned_loss=0.04809, over 19667.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2916, pruned_loss=0.06748, over 3801843.64 frames. ], batch size: 55, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:40,984 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.757e+02 5.693e+02 7.302e+02 1.543e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-02 14:37:57,319 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 14:37:58,864 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126731.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:12,086 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:24,575 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9088, 4.4591, 2.6484, 3.8493, 0.7745, 4.3359, 4.2615, 4.3438], + device='cuda:0'), covar=tensor([0.0612, 0.1003, 0.2000, 0.0826, 0.4380, 0.0725, 0.0896, 0.1046], + device='cuda:0'), in_proj_covar=tensor([0.0491, 0.0397, 0.0483, 0.0340, 0.0399, 0.0421, 0.0411, 0.0448], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:38:26,704 INFO [train.py:903] (0/4) Epoch 19, batch 3850, loss[loss=0.2164, simple_loss=0.3016, pruned_loss=0.06564, over 19530.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2922, pruned_loss=0.06743, over 3816260.38 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:38:30,264 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126756.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:39:28,456 INFO [train.py:903] (0/4) Epoch 19, batch 3900, loss[loss=0.2525, simple_loss=0.3263, pruned_loss=0.08932, over 19783.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2914, pruned_loss=0.06724, over 3816593.40 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:39:34,823 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-04-02 14:39:37,770 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:39:42,911 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.082e+02 6.454e+02 7.734e+02 3.345e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-02 14:40:04,537 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126834.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:29,133 INFO [train.py:903] (0/4) Epoch 19, batch 3950, loss[loss=0.2277, simple_loss=0.3056, pruned_loss=0.07491, over 19523.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2916, pruned_loss=0.06712, over 3802839.51 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:40:33,507 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126857.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:35,252 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 14:41:14,050 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-02 14:41:29,518 INFO [train.py:903] (0/4) Epoch 19, batch 4000, loss[loss=0.2386, simple_loss=0.3127, pruned_loss=0.08228, over 19735.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2914, pruned_loss=0.06761, over 3802800.87 frames. ], batch size: 63, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:41:43,556 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.579e+02 4.980e+02 6.258e+02 9.023e+02 1.716e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-02 14:41:57,623 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:42:16,841 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 14:42:30,358 INFO [train.py:903] (0/4) Epoch 19, batch 4050, loss[loss=0.2196, simple_loss=0.2874, pruned_loss=0.07592, over 19409.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2924, pruned_loss=0.06808, over 3791520.48 frames. ], batch size: 48, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:42:53,012 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.79 vs. limit=5.0 +2023-04-02 14:43:30,606 INFO [train.py:903] (0/4) Epoch 19, batch 4100, loss[loss=0.2014, simple_loss=0.2827, pruned_loss=0.06012, over 19676.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2927, pruned_loss=0.06782, over 3804495.85 frames. ], batch size: 53, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:45,831 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.699e+02 5.681e+02 7.096e+02 1.300e+03, threshold=1.136e+03, percent-clipped=1.0 +2023-04-02 14:44:06,972 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 14:44:31,635 INFO [train.py:903] (0/4) Epoch 19, batch 4150, loss[loss=0.2151, simple_loss=0.2986, pruned_loss=0.06582, over 19773.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2926, pruned_loss=0.06775, over 3799020.78 frames. ], batch size: 54, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:44:44,492 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 14:45:04,800 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0505, 5.0509, 5.9153, 5.9227, 2.0291, 5.5725, 4.7269, 5.5380], + device='cuda:0'), covar=tensor([0.1555, 0.0975, 0.0532, 0.0555, 0.6192, 0.0697, 0.0581, 0.1109], + device='cuda:0'), in_proj_covar=tensor([0.0770, 0.0716, 0.0921, 0.0808, 0.0821, 0.0675, 0.0556, 0.0858], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 14:45:32,546 INFO [train.py:903] (0/4) Epoch 19, batch 4200, loss[loss=0.2478, simple_loss=0.332, pruned_loss=0.0818, over 17644.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2919, pruned_loss=0.06712, over 3804018.04 frames. ], batch size: 102, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:45:35,888 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 14:45:42,373 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 14:45:43,054 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127113.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:45:46,907 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 4.915e+02 5.762e+02 6.825e+02 1.362e+03, threshold=1.152e+03, percent-clipped=8.0 +2023-04-02 14:46:01,602 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.8136, 5.2662, 3.1023, 4.6241, 1.0186, 5.3226, 5.1752, 5.3584], + device='cuda:0'), covar=tensor([0.0393, 0.0783, 0.1834, 0.0671, 0.4074, 0.0520, 0.0724, 0.0927], + device='cuda:0'), in_proj_covar=tensor([0.0488, 0.0394, 0.0482, 0.0337, 0.0397, 0.0419, 0.0410, 0.0445], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:46:14,862 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:46:32,718 INFO [train.py:903] (0/4) Epoch 19, batch 4250, loss[loss=0.2252, simple_loss=0.3019, pruned_loss=0.07425, over 19601.00 frames. ], tot_loss[loss=0.214, simple_loss=0.293, pruned_loss=0.06751, over 3816359.22 frames. ], batch size: 52, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:46:50,062 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 14:47:01,523 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 14:47:03,640 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:09,520 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:20,953 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 14:47:34,699 INFO [train.py:903] (0/4) Epoch 19, batch 4300, loss[loss=0.1854, simple_loss=0.2747, pruned_loss=0.04804, over 19707.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2919, pruned_loss=0.06671, over 3819270.09 frames. ], batch size: 59, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:47:40,383 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127208.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:50,084 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.800e+02 4.893e+02 5.914e+02 7.996e+02 1.682e+03, threshold=1.183e+03, percent-clipped=7.0 +2023-04-02 14:48:17,091 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 14:48:28,064 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 14:48:35,537 INFO [train.py:903] (0/4) Epoch 19, batch 4350, loss[loss=0.1866, simple_loss=0.2648, pruned_loss=0.05418, over 19397.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2923, pruned_loss=0.06719, over 3815326.42 frames. ], batch size: 48, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:48:51,139 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:23,180 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:35,822 INFO [train.py:903] (0/4) Epoch 19, batch 4400, loss[loss=0.2135, simple_loss=0.2973, pruned_loss=0.06481, over 19494.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2924, pruned_loss=0.06758, over 3804097.33 frames. ], batch size: 64, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:49:45,411 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5627, 1.1782, 1.3350, 1.2537, 2.2142, 0.9927, 1.9721, 2.5150], + device='cuda:0'), covar=tensor([0.0688, 0.2685, 0.2793, 0.1674, 0.0897, 0.2180, 0.1079, 0.0423], + device='cuda:0'), in_proj_covar=tensor([0.0394, 0.0357, 0.0376, 0.0342, 0.0366, 0.0348, 0.0368, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:49:49,568 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.239e+02 6.175e+02 6.853e+02 1.670e+03, threshold=1.235e+03, percent-clipped=2.0 +2023-04-02 14:50:02,112 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 14:50:12,009 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 14:50:22,516 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4840, 1.7053, 2.0183, 1.7466, 3.1635, 2.5385, 3.4602, 1.5521], + device='cuda:0'), covar=tensor([0.2431, 0.4044, 0.2507, 0.1887, 0.1528, 0.2074, 0.1540, 0.4085], + device='cuda:0'), in_proj_covar=tensor([0.0520, 0.0626, 0.0691, 0.0472, 0.0616, 0.0518, 0.0659, 0.0536], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 14:50:36,323 INFO [train.py:903] (0/4) Epoch 19, batch 4450, loss[loss=0.1849, simple_loss=0.2712, pruned_loss=0.04927, over 19844.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2919, pruned_loss=0.0673, over 3803107.19 frames. ], batch size: 52, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:50:46,010 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 14:51:38,005 INFO [train.py:903] (0/4) Epoch 19, batch 4500, loss[loss=0.2132, simple_loss=0.2964, pruned_loss=0.06503, over 19574.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.0665, over 3810060.30 frames. ], batch size: 52, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:52,896 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.116e+02 6.133e+02 7.767e+02 1.446e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-02 14:51:57,310 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1068, 1.1795, 1.4018, 1.3468, 2.7234, 1.0830, 2.1727, 3.0567], + device='cuda:0'), covar=tensor([0.0573, 0.2863, 0.2916, 0.1904, 0.0751, 0.2447, 0.1195, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0393, 0.0357, 0.0376, 0.0341, 0.0367, 0.0347, 0.0368, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:52:28,114 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127445.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:52:39,719 INFO [train.py:903] (0/4) Epoch 19, batch 4550, loss[loss=0.1609, simple_loss=0.2405, pruned_loss=0.04068, over 19771.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2906, pruned_loss=0.0665, over 3810268.32 frames. ], batch size: 47, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:52:48,347 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 14:53:11,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 14:53:40,574 INFO [train.py:903] (0/4) Epoch 19, batch 4600, loss[loss=0.1968, simple_loss=0.2784, pruned_loss=0.05759, over 19754.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2904, pruned_loss=0.06661, over 3802661.44 frames. ], batch size: 54, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:53:48,015 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 14:53:52,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8647, 1.7418, 1.7744, 2.2044, 1.9291, 1.9931, 2.0175, 1.9678], + device='cuda:0'), covar=tensor([0.0687, 0.0768, 0.0826, 0.0618, 0.0805, 0.0700, 0.0800, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0246, 0.0230, 0.0213, 0.0190, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 14:53:54,244 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.018e+02 6.286e+02 8.427e+02 2.189e+03, threshold=1.257e+03, percent-clipped=8.0 +2023-04-02 14:54:32,443 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:34,745 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:39,912 INFO [train.py:903] (0/4) Epoch 19, batch 4650, loss[loss=0.249, simple_loss=0.3252, pruned_loss=0.08645, over 19731.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2912, pruned_loss=0.06689, over 3802160.47 frames. ], batch size: 63, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:54:56,755 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 14:55:02,163 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2571, 2.3922, 2.5653, 3.0550, 2.2235, 2.9219, 2.5636, 2.2372], + device='cuda:0'), covar=tensor([0.4355, 0.3724, 0.1875, 0.2368, 0.4308, 0.2080, 0.4733, 0.3435], + device='cuda:0'), in_proj_covar=tensor([0.0879, 0.0937, 0.0702, 0.0924, 0.0861, 0.0796, 0.0831, 0.0768], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 14:55:05,404 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:07,390 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 14:55:40,717 INFO [train.py:903] (0/4) Epoch 19, batch 4700, loss[loss=0.2011, simple_loss=0.2832, pruned_loss=0.05951, over 19660.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2896, pruned_loss=0.06615, over 3802878.69 frames. ], batch size: 58, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:55:41,052 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3780, 1.0441, 1.2484, 2.1072, 1.5055, 1.3781, 1.6078, 1.3464], + device='cuda:0'), covar=tensor([0.1209, 0.1765, 0.1403, 0.1019, 0.1170, 0.1398, 0.1328, 0.1108], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0245, 0.0229, 0.0211, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 14:55:50,499 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:55,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 5.036e+02 6.203e+02 8.078e+02 1.735e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-02 14:56:02,615 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 14:56:41,689 INFO [train.py:903] (0/4) Epoch 19, batch 4750, loss[loss=0.1909, simple_loss=0.282, pruned_loss=0.04989, over 19588.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.29, pruned_loss=0.06637, over 3817570.62 frames. ], batch size: 57, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:13,610 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:57:33,554 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5913, 1.0843, 1.3353, 1.3403, 2.2344, 1.0439, 2.0843, 2.4149], + device='cuda:0'), covar=tensor([0.0710, 0.2816, 0.2808, 0.1563, 0.0843, 0.2069, 0.1030, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0395, 0.0359, 0.0377, 0.0341, 0.0367, 0.0347, 0.0369, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:57:41,809 INFO [train.py:903] (0/4) Epoch 19, batch 4800, loss[loss=0.2114, simple_loss=0.2722, pruned_loss=0.07526, over 19300.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2899, pruned_loss=0.06664, over 3808736.36 frames. ], batch size: 44, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:55,391 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.990e+02 6.329e+02 8.030e+02 1.437e+03, threshold=1.266e+03, percent-clipped=1.0 +2023-04-02 14:58:07,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127726.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:58:40,155 INFO [train.py:903] (0/4) Epoch 19, batch 4850, loss[loss=0.1924, simple_loss=0.2662, pruned_loss=0.05932, over 18339.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06699, over 3815464.44 frames. ], batch size: 40, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:04,712 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 14:59:22,981 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127789.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:59:25,083 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 14:59:30,832 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 14:59:30,852 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 14:59:32,266 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:59:35,945 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0142, 1.9190, 1.6378, 2.1576, 1.8452, 1.8050, 1.6588, 1.8831], + device='cuda:0'), covar=tensor([0.1029, 0.1312, 0.1480, 0.0891, 0.1237, 0.0519, 0.1308, 0.0732], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0351, 0.0305, 0.0248, 0.0296, 0.0246, 0.0296, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 14:59:40,801 INFO [train.py:903] (0/4) Epoch 19, batch 4900, loss[loss=0.2264, simple_loss=0.3089, pruned_loss=0.07201, over 19318.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2906, pruned_loss=0.06612, over 3821117.15 frames. ], batch size: 66, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:40,839 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 14:59:55,910 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.848e+02 5.865e+02 7.992e+02 2.664e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-02 15:00:01,796 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 15:00:41,586 INFO [train.py:903] (0/4) Epoch 19, batch 4950, loss[loss=0.1954, simple_loss=0.2721, pruned_loss=0.05932, over 19432.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2908, pruned_loss=0.06617, over 3821523.12 frames. ], batch size: 48, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:00:58,818 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:00:59,650 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 15:01:22,184 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 15:01:23,565 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1445, 1.2514, 1.6801, 1.1551, 2.4033, 3.3817, 3.0907, 3.5742], + device='cuda:0'), covar=tensor([0.1729, 0.3812, 0.3301, 0.2442, 0.0603, 0.0177, 0.0236, 0.0272], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0315, 0.0344, 0.0260, 0.0237, 0.0181, 0.0213, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 15:01:24,516 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:26,348 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127891.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:41,518 INFO [train.py:903] (0/4) Epoch 19, batch 5000, loss[loss=0.212, simple_loss=0.2916, pruned_loss=0.06619, over 19717.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2914, pruned_loss=0.06668, over 3808917.87 frames. ], batch size: 59, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:01:41,893 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127904.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:01:51,192 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 15:01:55,669 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.675e+02 5.614e+02 6.818e+02 2.294e+03, threshold=1.123e+03, percent-clipped=3.0 +2023-04-02 15:02:03,230 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 15:02:04,133 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 15:02:22,572 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 15:02:41,853 INFO [train.py:903] (0/4) Epoch 19, batch 5050, loss[loss=0.2418, simple_loss=0.3228, pruned_loss=0.08038, over 19594.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2918, pruned_loss=0.06717, over 3809159.71 frames. ], batch size: 61, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:03:16,213 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:18,095 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 15:03:36,860 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-128000.pt +2023-04-02 15:03:43,138 INFO [train.py:903] (0/4) Epoch 19, batch 5100, loss[loss=0.1941, simple_loss=0.2848, pruned_loss=0.05172, over 19672.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.291, pruned_loss=0.06671, over 3814895.50 frames. ], batch size: 59, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:03:45,772 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:47,019 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:47,355 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 15:03:56,492 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 15:03:58,283 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.139e+02 4.818e+02 5.706e+02 8.227e+02 1.561e+03, threshold=1.141e+03, percent-clipped=7.0 +2023-04-02 15:04:00,657 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 15:04:04,158 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 15:04:07,804 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128024.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:04:43,093 INFO [train.py:903] (0/4) Epoch 19, batch 5150, loss[loss=0.2407, simple_loss=0.3116, pruned_loss=0.08487, over 19382.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2903, pruned_loss=0.06649, over 3810676.78 frames. ], batch size: 70, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:04:51,937 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0127, 1.9425, 1.8141, 1.5896, 1.5562, 1.6363, 0.4082, 0.9746], + device='cuda:0'), covar=tensor([0.0519, 0.0561, 0.0368, 0.0631, 0.1103, 0.0775, 0.1219, 0.0940], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0346, 0.0348, 0.0372, 0.0447, 0.0380, 0.0328, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 15:04:57,229 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:05:31,571 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:05:44,863 INFO [train.py:903] (0/4) Epoch 19, batch 5200, loss[loss=0.2152, simple_loss=0.2985, pruned_loss=0.06601, over 19606.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06632, over 3811819.82 frames. ], batch size: 57, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:05:59,012 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.969e+02 4.920e+02 6.208e+02 7.921e+02 1.726e+03, threshold=1.242e+03, percent-clipped=7.0 +2023-04-02 15:05:59,070 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 15:06:16,658 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 15:06:28,650 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128139.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:29,667 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1191, 1.2492, 1.4891, 1.3470, 2.7034, 1.0064, 2.0893, 3.0560], + device='cuda:0'), covar=tensor([0.0590, 0.2819, 0.2715, 0.1851, 0.0795, 0.2487, 0.1252, 0.0329], + device='cuda:0'), in_proj_covar=tensor([0.0397, 0.0358, 0.0378, 0.0344, 0.0369, 0.0349, 0.0371, 0.0390], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:06:30,653 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128141.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:41,119 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 15:06:45,758 INFO [train.py:903] (0/4) Epoch 19, batch 5250, loss[loss=0.1777, simple_loss=0.2598, pruned_loss=0.04777, over 19405.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2909, pruned_loss=0.06643, over 3819611.23 frames. ], batch size: 48, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:06:53,696 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128160.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:07:23,148 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128185.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:07:45,451 INFO [train.py:903] (0/4) Epoch 19, batch 5300, loss[loss=0.2075, simple_loss=0.2931, pruned_loss=0.06098, over 19523.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2917, pruned_loss=0.06722, over 3827041.56 frames. ], batch size: 54, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:07:54,714 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:07:59,668 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.001e+02 6.088e+02 7.600e+02 1.403e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 15:08:00,872 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 15:08:21,801 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:34,560 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4523, 1.4240, 1.7590, 1.6592, 2.5974, 2.3339, 2.7484, 1.0469], + device='cuda:0'), covar=tensor([0.2395, 0.4275, 0.2570, 0.1885, 0.1475, 0.1935, 0.1393, 0.4425], + device='cuda:0'), in_proj_covar=tensor([0.0522, 0.0629, 0.0692, 0.0475, 0.0615, 0.0521, 0.0660, 0.0536], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:08:46,735 INFO [train.py:903] (0/4) Epoch 19, batch 5350, loss[loss=0.2454, simple_loss=0.3188, pruned_loss=0.08603, over 18096.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2912, pruned_loss=0.06675, over 3818621.74 frames. ], batch size: 83, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:08:50,303 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0998, 1.4178, 1.8565, 1.5191, 2.8301, 4.5148, 4.4468, 5.0090], + device='cuda:0'), covar=tensor([0.1810, 0.3768, 0.3436, 0.2311, 0.0690, 0.0221, 0.0185, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0315, 0.0346, 0.0260, 0.0238, 0.0181, 0.0213, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 15:08:50,315 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:57,115 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:20,022 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 15:09:26,895 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:47,392 INFO [train.py:903] (0/4) Epoch 19, batch 5400, loss[loss=0.1931, simple_loss=0.2695, pruned_loss=0.05837, over 19587.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2924, pruned_loss=0.06767, over 3792834.14 frames. ], batch size: 52, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:10:00,905 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0312, 3.6564, 2.5056, 3.2857, 0.8969, 3.5607, 3.4774, 3.6197], + device='cuda:0'), covar=tensor([0.0771, 0.1074, 0.1969, 0.0912, 0.3875, 0.0782, 0.0861, 0.1098], + device='cuda:0'), in_proj_covar=tensor([0.0489, 0.0394, 0.0482, 0.0340, 0.0396, 0.0420, 0.0410, 0.0446], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:10:01,756 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 4.631e+02 5.571e+02 7.152e+02 1.493e+03, threshold=1.114e+03, percent-clipped=2.0 +2023-04-02 15:10:15,370 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:29,858 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:42,687 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:48,543 INFO [train.py:903] (0/4) Epoch 19, batch 5450, loss[loss=0.2714, simple_loss=0.3291, pruned_loss=0.1069, over 13022.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2923, pruned_loss=0.06762, over 3794302.70 frames. ], batch size: 135, lr: 4.31e-03, grad_scale: 16.0 +2023-04-02 15:11:39,578 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128395.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:11:50,174 INFO [train.py:903] (0/4) Epoch 19, batch 5500, loss[loss=0.2195, simple_loss=0.2955, pruned_loss=0.07179, over 19707.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2928, pruned_loss=0.06764, over 3797858.95 frames. ], batch size: 59, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:06,853 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.166e+02 6.121e+02 7.872e+02 1.632e+03, threshold=1.224e+03, percent-clipped=5.0 +2023-04-02 15:12:10,622 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:12,705 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 15:12:16,375 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0715, 1.4251, 1.8277, 1.6037, 2.9522, 4.4761, 4.3726, 4.9489], + device='cuda:0'), covar=tensor([0.1735, 0.3793, 0.3392, 0.2220, 0.0660, 0.0187, 0.0178, 0.0177], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0315, 0.0346, 0.0260, 0.0238, 0.0181, 0.0214, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 15:12:23,819 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:50,265 INFO [train.py:903] (0/4) Epoch 19, batch 5550, loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06797, over 19395.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2924, pruned_loss=0.06747, over 3797381.45 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:56,475 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 15:13:44,849 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 15:13:51,308 INFO [train.py:903] (0/4) Epoch 19, batch 5600, loss[loss=0.2794, simple_loss=0.3391, pruned_loss=0.1098, over 13384.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2921, pruned_loss=0.06732, over 3807376.74 frames. ], batch size: 136, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:14:01,884 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128512.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:07,037 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.843e+02 5.877e+02 7.578e+02 1.194e+03, threshold=1.175e+03, percent-clipped=0.0 +2023-04-02 15:14:32,655 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:52,040 INFO [train.py:903] (0/4) Epoch 19, batch 5650, loss[loss=0.2016, simple_loss=0.2683, pruned_loss=0.06752, over 18574.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06624, over 3818749.17 frames. ], batch size: 41, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:27,808 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:37,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 15:15:53,141 INFO [train.py:903] (0/4) Epoch 19, batch 5700, loss[loss=0.1971, simple_loss=0.2683, pruned_loss=0.06298, over 17281.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06606, over 3835882.44 frames. ], batch size: 38, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:54,805 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:58,262 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:08,028 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.156e+02 6.108e+02 7.232e+02 1.309e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 15:16:24,868 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:53,492 INFO [train.py:903] (0/4) Epoch 19, batch 5750, loss[loss=0.2174, simple_loss=0.3029, pruned_loss=0.06601, over 19700.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2896, pruned_loss=0.0655, over 3844228.43 frames. ], batch size: 63, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:16:55,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 15:17:05,272 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 15:17:09,593 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 15:17:26,653 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-02 15:17:27,291 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:17:42,109 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2267, 2.3262, 2.4870, 2.9929, 2.1890, 2.9214, 2.5477, 2.2871], + device='cuda:0'), covar=tensor([0.4176, 0.3874, 0.1744, 0.2573, 0.4431, 0.2124, 0.4605, 0.3279], + device='cuda:0'), in_proj_covar=tensor([0.0874, 0.0935, 0.0700, 0.0923, 0.0859, 0.0794, 0.0833, 0.0767], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:17:55,080 INFO [train.py:903] (0/4) Epoch 19, batch 5800, loss[loss=0.2384, simple_loss=0.3151, pruned_loss=0.08084, over 19689.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.29, pruned_loss=0.06623, over 3843522.72 frames. ], batch size: 60, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:18:10,461 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 4.671e+02 6.414e+02 7.787e+02 1.302e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-02 15:18:24,061 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5173, 2.1643, 1.7246, 1.2397, 2.1202, 1.2359, 1.3207, 1.9330], + device='cuda:0'), covar=tensor([0.0821, 0.0601, 0.0763, 0.0788, 0.0441, 0.1012, 0.0634, 0.0420], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0309, 0.0327, 0.0259, 0.0242, 0.0332, 0.0289, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:18:55,625 INFO [train.py:903] (0/4) Epoch 19, batch 5850, loss[loss=0.2086, simple_loss=0.2915, pruned_loss=0.06284, over 19545.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2897, pruned_loss=0.06598, over 3843251.17 frames. ], batch size: 54, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:19:20,629 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:48,422 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:50,609 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:55,907 INFO [train.py:903] (0/4) Epoch 19, batch 5900, loss[loss=0.2107, simple_loss=0.2826, pruned_loss=0.0694, over 19619.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2901, pruned_loss=0.06647, over 3842118.53 frames. ], batch size: 50, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:20:02,588 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 15:20:11,629 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.778e+02 5.849e+02 7.721e+02 1.320e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 15:20:23,109 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 15:20:56,176 INFO [train.py:903] (0/4) Epoch 19, batch 5950, loss[loss=0.2036, simple_loss=0.288, pruned_loss=0.05961, over 19547.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2897, pruned_loss=0.06643, over 3833046.59 frames. ], batch size: 56, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:20:57,899 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 15:21:41,415 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:21:57,512 INFO [train.py:903] (0/4) Epoch 19, batch 6000, loss[loss=0.2067, simple_loss=0.2964, pruned_loss=0.05851, over 19709.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2897, pruned_loss=0.06625, over 3840940.99 frames. ], batch size: 59, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:21:57,513 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 15:22:07,208 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8938, 1.5830, 1.4887, 1.7912, 1.4793, 1.6131, 1.4049, 1.7161], + device='cuda:0'), covar=tensor([0.1048, 0.1229, 0.1537, 0.1096, 0.1445, 0.0562, 0.1668, 0.0726], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0355, 0.0308, 0.0249, 0.0298, 0.0248, 0.0300, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:22:12,603 INFO [train.py:937] (0/4) Epoch 19, validation: loss=0.1702, simple_loss=0.2702, pruned_loss=0.03514, over 944034.00 frames. +2023-04-02 15:22:12,604 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 15:22:17,263 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128908.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:22:27,998 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.136e+02 6.485e+02 9.043e+02 2.174e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 15:23:13,564 INFO [train.py:903] (0/4) Epoch 19, batch 6050, loss[loss=0.2373, simple_loss=0.3204, pruned_loss=0.07705, over 19743.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.291, pruned_loss=0.06699, over 3841467.32 frames. ], batch size: 63, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:23:33,155 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7808, 1.9083, 2.1372, 2.4034, 1.7708, 2.2669, 2.1353, 1.9358], + device='cuda:0'), covar=tensor([0.3971, 0.3561, 0.1763, 0.2111, 0.3693, 0.2000, 0.4647, 0.3242], + device='cuda:0'), in_proj_covar=tensor([0.0875, 0.0934, 0.0699, 0.0923, 0.0859, 0.0793, 0.0831, 0.0766], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:24:14,689 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3591, 1.4782, 1.8002, 1.6013, 2.9965, 2.4999, 3.3082, 1.6222], + device='cuda:0'), covar=tensor([0.2520, 0.4426, 0.2800, 0.2062, 0.1662, 0.2111, 0.1546, 0.4134], + device='cuda:0'), in_proj_covar=tensor([0.0523, 0.0632, 0.0695, 0.0476, 0.0620, 0.0524, 0.0664, 0.0540], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:24:15,354 INFO [train.py:903] (0/4) Epoch 19, batch 6100, loss[loss=0.2312, simple_loss=0.3158, pruned_loss=0.07337, over 19464.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2912, pruned_loss=0.06684, over 3842626.47 frames. ], batch size: 64, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:24:30,770 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.994e+02 6.076e+02 7.380e+02 1.472e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 15:25:14,909 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.8078, 5.2753, 2.9711, 4.6247, 1.0206, 5.3064, 5.2053, 5.3652], + device='cuda:0'), covar=tensor([0.0405, 0.0823, 0.1966, 0.0719, 0.4161, 0.0519, 0.0761, 0.1070], + device='cuda:0'), in_proj_covar=tensor([0.0493, 0.0397, 0.0486, 0.0343, 0.0399, 0.0422, 0.0413, 0.0450], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:25:15,123 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129053.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:25:15,830 INFO [train.py:903] (0/4) Epoch 19, batch 6150, loss[loss=0.2018, simple_loss=0.2944, pruned_loss=0.05457, over 18802.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2916, pruned_loss=0.06714, over 3843264.72 frames. ], batch size: 74, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:25:37,314 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 15:25:44,284 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 15:25:44,629 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:26:16,321 INFO [train.py:903] (0/4) Epoch 19, batch 6200, loss[loss=0.2093, simple_loss=0.284, pruned_loss=0.06726, over 19865.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2904, pruned_loss=0.06619, over 3849561.42 frames. ], batch size: 52, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:26:32,068 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.839e+02 4.815e+02 6.250e+02 7.621e+02 1.523e+03, threshold=1.250e+03, percent-clipped=7.0 +2023-04-02 15:26:53,197 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8214, 1.9364, 2.1999, 2.4333, 1.7518, 2.3372, 2.1984, 1.9935], + device='cuda:0'), covar=tensor([0.4174, 0.3567, 0.1750, 0.2145, 0.3753, 0.1863, 0.4531, 0.3187], + device='cuda:0'), in_proj_covar=tensor([0.0874, 0.0934, 0.0701, 0.0922, 0.0858, 0.0793, 0.0829, 0.0766], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:27:04,010 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:07,652 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:17,155 INFO [train.py:903] (0/4) Epoch 19, batch 6250, loss[loss=0.2429, simple_loss=0.328, pruned_loss=0.07888, over 17161.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2905, pruned_loss=0.06611, over 3834550.66 frames. ], batch size: 101, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:27:38,409 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:47,710 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 15:28:17,840 INFO [train.py:903] (0/4) Epoch 19, batch 6300, loss[loss=0.2365, simple_loss=0.3137, pruned_loss=0.07964, over 19498.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2913, pruned_loss=0.0667, over 3826664.69 frames. ], batch size: 64, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:28:33,725 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 5.181e+02 6.373e+02 8.503e+02 1.874e+03, threshold=1.275e+03, percent-clipped=7.0 +2023-04-02 15:29:17,490 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129252.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:29:19,480 INFO [train.py:903] (0/4) Epoch 19, batch 6350, loss[loss=0.211, simple_loss=0.2937, pruned_loss=0.06414, over 19670.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2909, pruned_loss=0.06637, over 3823119.51 frames. ], batch size: 55, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:29:25,240 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:30:20,709 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 15:30:21,272 INFO [train.py:903] (0/4) Epoch 19, batch 6400, loss[loss=0.1707, simple_loss=0.2513, pruned_loss=0.04504, over 19314.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.06676, over 3823746.52 frames. ], batch size: 44, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:30:36,857 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.331e+02 4.991e+02 6.008e+02 7.727e+02 1.608e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-02 15:30:58,495 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3303, 2.0279, 1.5430, 1.4368, 1.8255, 1.1986, 1.2940, 1.7817], + device='cuda:0'), covar=tensor([0.0894, 0.0807, 0.1064, 0.0742, 0.0510, 0.1229, 0.0649, 0.0437], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0310, 0.0328, 0.0259, 0.0241, 0.0331, 0.0288, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:31:22,261 INFO [train.py:903] (0/4) Epoch 19, batch 6450, loss[loss=0.1913, simple_loss=0.2781, pruned_loss=0.05222, over 19536.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2907, pruned_loss=0.06603, over 3823423.45 frames. ], batch size: 64, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:31:38,193 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129367.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:31:48,959 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8882, 1.3403, 1.0568, 1.0344, 1.1493, 1.0251, 1.0443, 1.2196], + device='cuda:0'), covar=tensor([0.0573, 0.0824, 0.1104, 0.0690, 0.0541, 0.1234, 0.0534, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0310, 0.0329, 0.0260, 0.0242, 0.0332, 0.0289, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:32:06,808 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 15:32:07,095 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0612, 1.7077, 1.9987, 1.9709, 4.5184, 1.3641, 2.7285, 4.9915], + device='cuda:0'), covar=tensor([0.0387, 0.2670, 0.2564, 0.1761, 0.0733, 0.2504, 0.1258, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0396, 0.0358, 0.0377, 0.0340, 0.0367, 0.0348, 0.0370, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:32:22,359 INFO [train.py:903] (0/4) Epoch 19, batch 6500, loss[loss=0.1993, simple_loss=0.2774, pruned_loss=0.06057, over 19380.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2908, pruned_loss=0.06593, over 3815926.28 frames. ], batch size: 48, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:32:29,740 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 15:32:38,768 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.340e+02 6.897e+02 8.888e+02 1.987e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 15:32:47,856 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6021, 1.1986, 1.3427, 1.2036, 2.2405, 1.0439, 2.0705, 2.5502], + device='cuda:0'), covar=tensor([0.0678, 0.2683, 0.2874, 0.1685, 0.0907, 0.2106, 0.0955, 0.0420], + device='cuda:0'), in_proj_covar=tensor([0.0396, 0.0357, 0.0377, 0.0340, 0.0367, 0.0348, 0.0370, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:32:56,832 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129431.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:33:24,206 INFO [train.py:903] (0/4) Epoch 19, batch 6550, loss[loss=0.2085, simple_loss=0.2926, pruned_loss=0.06222, over 19590.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2909, pruned_loss=0.06565, over 3833083.74 frames. ], batch size: 61, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:24,937 INFO [train.py:903] (0/4) Epoch 19, batch 6600, loss[loss=0.2048, simple_loss=0.294, pruned_loss=0.05784, over 19694.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2916, pruned_loss=0.06616, over 3835388.30 frames. ], batch size: 59, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:37,588 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:34:40,453 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.539e+02 5.847e+02 6.807e+02 8.552e+02 1.538e+03, threshold=1.361e+03, percent-clipped=4.0 +2023-04-02 15:35:03,129 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.90 vs. limit=5.0 +2023-04-02 15:35:03,893 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5923, 1.4744, 1.4664, 1.9924, 1.5380, 2.0033, 1.9121, 1.6477], + device='cuda:0'), covar=tensor([0.0823, 0.0934, 0.1022, 0.0774, 0.0902, 0.0667, 0.0831, 0.0731], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0245, 0.0227, 0.0209, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 15:35:07,235 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:35:25,979 INFO [train.py:903] (0/4) Epoch 19, batch 6650, loss[loss=0.1809, simple_loss=0.2578, pruned_loss=0.05202, over 19751.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2905, pruned_loss=0.06613, over 3831987.15 frames. ], batch size: 46, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:35:32,231 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4721, 2.4456, 2.6977, 3.3123, 2.4653, 3.0758, 2.8973, 2.5558], + device='cuda:0'), covar=tensor([0.3852, 0.3561, 0.1632, 0.2280, 0.3959, 0.1943, 0.4175, 0.2858], + device='cuda:0'), in_proj_covar=tensor([0.0868, 0.0927, 0.0697, 0.0917, 0.0854, 0.0787, 0.0826, 0.0761], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:36:26,350 INFO [train.py:903] (0/4) Epoch 19, batch 6700, loss[loss=0.218, simple_loss=0.3042, pruned_loss=0.06589, over 19745.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2905, pruned_loss=0.06652, over 3825347.92 frames. ], batch size: 63, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:36:42,862 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 4.923e+02 5.649e+02 7.598e+02 1.428e+03, threshold=1.130e+03, percent-clipped=1.0 +2023-04-02 15:36:51,037 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129623.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:36:58,742 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:37:03,803 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 15:37:04,553 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6487, 1.7859, 1.7070, 2.4899, 1.8151, 2.3854, 1.8084, 1.3967], + device='cuda:0'), covar=tensor([0.5307, 0.4421, 0.3068, 0.2807, 0.4295, 0.2244, 0.6722, 0.5690], + device='cuda:0'), in_proj_covar=tensor([0.0869, 0.0929, 0.0697, 0.0918, 0.0855, 0.0788, 0.0826, 0.0762], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:37:18,815 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129648.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:37:25,090 INFO [train.py:903] (0/4) Epoch 19, batch 6750, loss[loss=0.1772, simple_loss=0.2615, pruned_loss=0.04645, over 19487.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2894, pruned_loss=0.06579, over 3843708.97 frames. ], batch size: 49, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:20,255 INFO [train.py:903] (0/4) Epoch 19, batch 6800, loss[loss=0.1933, simple_loss=0.259, pruned_loss=0.06381, over 19787.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2884, pruned_loss=0.06581, over 3841359.26 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:34,410 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.884e+02 6.226e+02 8.201e+02 1.689e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-02 15:38:49,821 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-19.pt +2023-04-02 15:39:05,072 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 15:39:06,153 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 15:39:08,368 INFO [train.py:903] (0/4) Epoch 20, batch 0, loss[loss=0.2249, simple_loss=0.2982, pruned_loss=0.07581, over 19728.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2982, pruned_loss=0.07581, over 19728.00 frames. ], batch size: 51, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:39:08,369 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 15:39:19,745 INFO [train.py:937] (0/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2703, pruned_loss=0.03432, over 944034.00 frames. +2023-04-02 15:39:19,745 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 15:39:31,873 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 15:40:12,660 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129775.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:40:20,206 INFO [train.py:903] (0/4) Epoch 20, batch 50, loss[loss=0.2011, simple_loss=0.2889, pruned_loss=0.05666, over 19693.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2901, pruned_loss=0.06588, over 875020.00 frames. ], batch size: 53, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:40:51,307 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:40:54,560 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 15:41:03,011 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.543e+02 6.891e+02 8.835e+02 1.770e+03, threshold=1.378e+03, percent-clipped=8.0 +2023-04-02 15:41:05,559 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8173, 1.5550, 1.5594, 1.4688, 3.3828, 0.9734, 2.4458, 3.8771], + device='cuda:0'), covar=tensor([0.0434, 0.2430, 0.2625, 0.1829, 0.0659, 0.2641, 0.1207, 0.0203], + device='cuda:0'), in_proj_covar=tensor([0.0394, 0.0355, 0.0375, 0.0337, 0.0365, 0.0345, 0.0369, 0.0388], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:41:20,212 INFO [train.py:903] (0/4) Epoch 20, batch 100, loss[loss=0.2305, simple_loss=0.3044, pruned_loss=0.07823, over 19520.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06625, over 1546789.83 frames. ], batch size: 64, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:41:31,345 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 15:42:14,137 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2766, 1.2958, 1.7986, 1.2881, 2.7414, 3.6389, 3.3761, 3.8834], + device='cuda:0'), covar=tensor([0.1635, 0.3898, 0.3248, 0.2355, 0.0588, 0.0180, 0.0226, 0.0264], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0316, 0.0346, 0.0261, 0.0237, 0.0181, 0.0213, 0.0247], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 15:42:21,224 INFO [train.py:903] (0/4) Epoch 20, batch 150, loss[loss=0.218, simple_loss=0.3043, pruned_loss=0.06582, over 19528.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2918, pruned_loss=0.06685, over 2049930.86 frames. ], batch size: 54, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:42:30,167 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129890.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:43:03,459 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 4.799e+02 5.935e+02 7.467e+02 3.197e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 15:43:21,677 INFO [train.py:903] (0/4) Epoch 20, batch 200, loss[loss=0.1948, simple_loss=0.2754, pruned_loss=0.05707, over 19688.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2931, pruned_loss=0.06727, over 2444943.77 frames. ], batch size: 53, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:43:22,848 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 15:43:36,798 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 15:44:13,724 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:44:23,539 INFO [train.py:903] (0/4) Epoch 20, batch 250, loss[loss=0.1973, simple_loss=0.2825, pruned_loss=0.056, over 19672.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2917, pruned_loss=0.06657, over 2747680.59 frames. ], batch size: 58, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:44:45,556 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-130000.pt +2023-04-02 15:45:06,935 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.298e+02 6.354e+02 8.098e+02 1.543e+03, threshold=1.271e+03, percent-clipped=7.0 +2023-04-02 15:45:08,335 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:10,543 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:25,487 INFO [train.py:903] (0/4) Epoch 20, batch 300, loss[loss=0.2237, simple_loss=0.3, pruned_loss=0.07368, over 19670.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2914, pruned_loss=0.06699, over 2996452.71 frames. ], batch size: 60, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:45:57,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5278, 4.0954, 4.2559, 4.2650, 1.7817, 4.0263, 3.4846, 3.9692], + device='cuda:0'), covar=tensor([0.1641, 0.0913, 0.0638, 0.0669, 0.5582, 0.0886, 0.0674, 0.1137], + device='cuda:0'), in_proj_covar=tensor([0.0766, 0.0719, 0.0923, 0.0805, 0.0817, 0.0679, 0.0555, 0.0856], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 15:46:25,935 INFO [train.py:903] (0/4) Epoch 20, batch 350, loss[loss=0.2261, simple_loss=0.3216, pruned_loss=0.06529, over 19675.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2923, pruned_loss=0.06709, over 3186080.19 frames. ], batch size: 58, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:46:35,001 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:46:35,300 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:46:38,868 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3769, 1.2937, 1.5018, 1.4752, 2.9263, 1.2079, 2.3434, 3.3980], + device='cuda:0'), covar=tensor([0.0554, 0.2796, 0.2914, 0.1921, 0.0799, 0.2464, 0.1253, 0.0284], + device='cuda:0'), in_proj_covar=tensor([0.0399, 0.0359, 0.0378, 0.0341, 0.0369, 0.0349, 0.0373, 0.0393], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:46:50,450 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3869, 2.0831, 2.1073, 2.8628, 1.7827, 2.5288, 2.3900, 2.3252], + device='cuda:0'), covar=tensor([0.0716, 0.0831, 0.0922, 0.0769, 0.0971, 0.0708, 0.0925, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0207, 0.0219, 0.0224, 0.0242, 0.0225, 0.0209, 0.0187, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 15:47:08,653 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.230e+02 6.397e+02 7.792e+02 1.393e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-02 15:47:26,635 INFO [train.py:903] (0/4) Epoch 20, batch 400, loss[loss=0.2079, simple_loss=0.2921, pruned_loss=0.06185, over 17346.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2909, pruned_loss=0.0665, over 3328327.02 frames. ], batch size: 101, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:47:41,012 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.41 vs. limit=5.0 +2023-04-02 15:47:42,916 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130146.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:47:53,120 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:48:14,976 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130171.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:48:27,090 INFO [train.py:903] (0/4) Epoch 20, batch 450, loss[loss=0.2239, simple_loss=0.3076, pruned_loss=0.07008, over 19722.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2918, pruned_loss=0.06681, over 3432284.71 frames. ], batch size: 59, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:48:41,193 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2057, 2.1161, 1.8683, 1.6906, 1.7325, 1.7469, 0.6991, 1.1120], + device='cuda:0'), covar=tensor([0.0538, 0.0569, 0.0460, 0.0685, 0.1051, 0.0773, 0.1102, 0.0921], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0350, 0.0353, 0.0376, 0.0451, 0.0383, 0.0333, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 15:49:03,630 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 15:49:04,562 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 15:49:09,165 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.003e+02 6.443e+02 8.043e+02 1.786e+03, threshold=1.289e+03, percent-clipped=5.0 +2023-04-02 15:49:18,399 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5841, 1.6190, 1.7904, 1.6945, 2.4029, 2.1378, 2.4118, 1.4549], + device='cuda:0'), covar=tensor([0.1773, 0.3123, 0.2055, 0.1508, 0.1217, 0.1681, 0.1135, 0.3442], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0632, 0.0697, 0.0475, 0.0618, 0.0526, 0.0661, 0.0541], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 15:49:27,173 INFO [train.py:903] (0/4) Epoch 20, batch 500, loss[loss=0.2124, simple_loss=0.2896, pruned_loss=0.0676, over 19356.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.06675, over 3516366.45 frames. ], batch size: 48, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:49:50,923 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-02 15:50:10,052 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:50:27,989 INFO [train.py:903] (0/4) Epoch 20, batch 550, loss[loss=0.18, simple_loss=0.2527, pruned_loss=0.05363, over 19759.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2901, pruned_loss=0.06598, over 3586998.49 frames. ], batch size: 45, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:11,221 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.062e+02 6.327e+02 8.479e+02 2.088e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 15:51:28,462 INFO [train.py:903] (0/4) Epoch 20, batch 600, loss[loss=0.206, simple_loss=0.2955, pruned_loss=0.05825, over 19703.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2905, pruned_loss=0.06602, over 3648113.65 frames. ], batch size: 59, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:44,867 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:06,337 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:08,686 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:15,668 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 15:52:16,043 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130370.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:30,047 INFO [train.py:903] (0/4) Epoch 20, batch 650, loss[loss=0.2262, simple_loss=0.3027, pruned_loss=0.07482, over 19657.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2905, pruned_loss=0.06619, over 3701058.03 frames. ], batch size: 53, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:52:40,964 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9283, 1.3319, 1.0492, 0.9552, 1.1342, 0.9711, 0.8930, 1.2294], + device='cuda:0'), covar=tensor([0.0578, 0.0724, 0.0990, 0.0652, 0.0518, 0.1198, 0.0575, 0.0455], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0313, 0.0330, 0.0261, 0.0244, 0.0336, 0.0290, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 15:52:56,026 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:53:13,825 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.222e+02 6.307e+02 8.250e+02 2.391e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 15:53:31,357 INFO [train.py:903] (0/4) Epoch 20, batch 700, loss[loss=0.2336, simple_loss=0.3101, pruned_loss=0.07852, over 19679.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2903, pruned_loss=0.0658, over 3733445.31 frames. ], batch size: 53, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:53:55,089 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:02,068 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:28,618 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:30,948 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:35,655 INFO [train.py:903] (0/4) Epoch 20, batch 750, loss[loss=0.2363, simple_loss=0.313, pruned_loss=0.0798, over 13054.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2908, pruned_loss=0.0661, over 3745699.00 frames. ], batch size: 135, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:54:58,168 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 15:55:19,214 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.790e+02 6.042e+02 7.311e+02 1.890e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 15:55:28,430 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:55:37,192 INFO [train.py:903] (0/4) Epoch 20, batch 800, loss[loss=0.1993, simple_loss=0.2867, pruned_loss=0.05592, over 19676.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2909, pruned_loss=0.06598, over 3755591.05 frames. ], batch size: 60, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:55:53,517 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:55:58,722 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:55:59,953 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3900, 1.3212, 1.7506, 1.4113, 2.7551, 3.7343, 3.4526, 3.9374], + device='cuda:0'), covar=tensor([0.1501, 0.3625, 0.3227, 0.2284, 0.0540, 0.0186, 0.0206, 0.0237], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0316, 0.0347, 0.0262, 0.0238, 0.0182, 0.0213, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 15:56:21,742 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130566.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:27,625 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3121, 1.3578, 1.4771, 1.4395, 1.7487, 1.7874, 1.7490, 0.6319], + device='cuda:0'), covar=tensor([0.2225, 0.4008, 0.2588, 0.1843, 0.1567, 0.2155, 0.1383, 0.4478], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0633, 0.0696, 0.0476, 0.0616, 0.0526, 0.0659, 0.0540], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 15:56:40,733 INFO [train.py:903] (0/4) Epoch 20, batch 850, loss[loss=0.2227, simple_loss=0.2982, pruned_loss=0.07362, over 19769.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2895, pruned_loss=0.06528, over 3769479.87 frames. ], batch size: 54, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:56:41,903 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:57:07,797 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7479, 1.5855, 1.6733, 2.2568, 1.7434, 2.0188, 1.9989, 1.8128], + device='cuda:0'), covar=tensor([0.0779, 0.0904, 0.0949, 0.0664, 0.0779, 0.0711, 0.0831, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0226, 0.0243, 0.0227, 0.0209, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 15:57:25,283 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.920e+02 5.760e+02 7.852e+02 1.760e+03, threshold=1.152e+03, percent-clipped=6.0 +2023-04-02 15:57:33,264 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 15:57:40,813 INFO [train.py:903] (0/4) Epoch 20, batch 900, loss[loss=0.2017, simple_loss=0.2874, pruned_loss=0.05799, over 19541.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2904, pruned_loss=0.06617, over 3773867.98 frames. ], batch size: 56, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:44,421 INFO [train.py:903] (0/4) Epoch 20, batch 950, loss[loss=0.2403, simple_loss=0.321, pruned_loss=0.07978, over 19514.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06629, over 3773990.51 frames. ], batch size: 64, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:47,635 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 15:59:28,713 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.928e+02 5.917e+02 7.294e+02 1.421e+03, threshold=1.183e+03, percent-clipped=3.0 +2023-04-02 15:59:46,666 INFO [train.py:903] (0/4) Epoch 20, batch 1000, loss[loss=0.2412, simple_loss=0.3184, pruned_loss=0.08199, over 19751.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2912, pruned_loss=0.06653, over 3791385.69 frames. ], batch size: 54, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:59:48,240 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:59:50,473 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:03,661 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:16,552 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 16:00:18,718 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:20,899 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:38,540 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 16:00:48,485 INFO [train.py:903] (0/4) Epoch 20, batch 1050, loss[loss=0.1965, simple_loss=0.2682, pruned_loss=0.06234, over 19621.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2922, pruned_loss=0.06714, over 3800851.98 frames. ], batch size: 50, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:01:02,832 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:10,718 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:20,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 16:01:33,077 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.731e+02 5.562e+02 6.742e+02 8.268e+02 2.102e+03, threshold=1.348e+03, percent-clipped=2.0 +2023-04-02 16:01:49,838 INFO [train.py:903] (0/4) Epoch 20, batch 1100, loss[loss=0.2075, simple_loss=0.2737, pruned_loss=0.07071, over 19753.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2912, pruned_loss=0.06655, over 3813094.59 frames. ], batch size: 46, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:02:27,128 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:02:36,684 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-02 16:02:47,563 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-04-02 16:02:52,364 INFO [train.py:903] (0/4) Epoch 20, batch 1150, loss[loss=0.2044, simple_loss=0.2909, pruned_loss=0.05897, over 19531.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06693, over 3794992.82 frames. ], batch size: 64, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:03:26,689 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:27,633 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:33,808 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130915.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:39,055 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 5.063e+02 6.056e+02 7.993e+02 1.743e+03, threshold=1.211e+03, percent-clipped=5.0 +2023-04-02 16:03:50,403 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:55,681 INFO [train.py:903] (0/4) Epoch 20, batch 1200, loss[loss=0.2203, simple_loss=0.2984, pruned_loss=0.07112, over 19515.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.29, pruned_loss=0.06623, over 3812121.54 frames. ], batch size: 64, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:04:23,976 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 16:04:56,111 INFO [train.py:903] (0/4) Epoch 20, batch 1250, loss[loss=0.1766, simple_loss=0.2543, pruned_loss=0.04942, over 18591.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2904, pruned_loss=0.06644, over 3818964.47 frames. ], batch size: 41, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:05:42,761 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.121e+02 6.297e+02 7.673e+02 2.016e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 16:05:50,218 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:05:52,242 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0242, 1.1788, 1.6690, 1.1756, 2.6153, 3.6477, 3.3714, 3.8497], + device='cuda:0'), covar=tensor([0.1766, 0.3968, 0.3406, 0.2477, 0.0629, 0.0198, 0.0202, 0.0251], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0317, 0.0348, 0.0262, 0.0238, 0.0183, 0.0213, 0.0249], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 16:05:58,785 INFO [train.py:903] (0/4) Epoch 20, batch 1300, loss[loss=0.1999, simple_loss=0.2889, pruned_loss=0.05541, over 19605.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2901, pruned_loss=0.06597, over 3832936.30 frames. ], batch size: 57, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:06:12,053 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:06:32,908 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8772, 4.3913, 4.6268, 4.6434, 1.7474, 4.3039, 3.7544, 4.2974], + device='cuda:0'), covar=tensor([0.1754, 0.0767, 0.0618, 0.0699, 0.5978, 0.0928, 0.0677, 0.1238], + device='cuda:0'), in_proj_covar=tensor([0.0758, 0.0716, 0.0917, 0.0799, 0.0815, 0.0675, 0.0551, 0.0856], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 16:06:59,462 INFO [train.py:903] (0/4) Epoch 20, batch 1350, loss[loss=0.2386, simple_loss=0.309, pruned_loss=0.0841, over 14097.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2908, pruned_loss=0.06653, over 3816672.34 frames. ], batch size: 136, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:07:19,072 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 16:07:43,051 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131117.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:07:44,787 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.090e+02 6.517e+02 8.267e+02 2.193e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-02 16:08:02,290 INFO [train.py:903] (0/4) Epoch 20, batch 1400, loss[loss=0.233, simple_loss=0.3048, pruned_loss=0.08063, over 19483.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2915, pruned_loss=0.06687, over 3812392.74 frames. ], batch size: 49, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:08:15,121 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4936, 1.4034, 1.4265, 1.9758, 1.5787, 1.8298, 1.8550, 1.5352], + device='cuda:0'), covar=tensor([0.0844, 0.0923, 0.0995, 0.0712, 0.0803, 0.0721, 0.0796, 0.0735], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0243, 0.0226, 0.0209, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 16:08:15,148 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:42,679 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:50,665 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:03,825 INFO [train.py:903] (0/4) Epoch 20, batch 1450, loss[loss=0.18, simple_loss=0.2546, pruned_loss=0.05269, over 19764.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2912, pruned_loss=0.06708, over 3816243.37 frames. ], batch size: 47, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:09:06,058 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 16:09:14,520 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:16,743 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:21,563 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131196.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:50,840 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.028e+02 6.181e+02 7.641e+02 1.699e+03, threshold=1.236e+03, percent-clipped=6.0 +2023-04-02 16:10:06,715 INFO [train.py:903] (0/4) Epoch 20, batch 1500, loss[loss=0.2178, simple_loss=0.289, pruned_loss=0.07331, over 19563.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2916, pruned_loss=0.06702, over 3824634.45 frames. ], batch size: 61, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:10:21,846 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7925, 4.3621, 2.7714, 3.8311, 0.8331, 4.3077, 4.1754, 4.3076], + device='cuda:0'), covar=tensor([0.0570, 0.0833, 0.1793, 0.0765, 0.3935, 0.0616, 0.0810, 0.1013], + device='cuda:0'), in_proj_covar=tensor([0.0494, 0.0397, 0.0485, 0.0345, 0.0400, 0.0423, 0.0416, 0.0447], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:10:49,080 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.44 vs. limit=5.0 +2023-04-02 16:11:07,013 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:07,713 INFO [train.py:903] (0/4) Epoch 20, batch 1550, loss[loss=0.2227, simple_loss=0.282, pruned_loss=0.08166, over 19750.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2925, pruned_loss=0.06747, over 3814092.96 frames. ], batch size: 47, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:11:29,222 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:38,375 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:53,753 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 5.087e+02 6.243e+02 7.473e+02 1.350e+03, threshold=1.249e+03, percent-clipped=1.0 +2023-04-02 16:11:58,674 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:12:07,309 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5841, 1.1737, 1.4149, 1.3987, 2.2455, 1.0892, 2.2311, 2.4702], + device='cuda:0'), covar=tensor([0.0663, 0.2697, 0.2743, 0.1492, 0.0831, 0.1945, 0.0894, 0.0465], + device='cuda:0'), in_proj_covar=tensor([0.0399, 0.0357, 0.0377, 0.0339, 0.0368, 0.0348, 0.0371, 0.0393], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:12:10,214 INFO [train.py:903] (0/4) Epoch 20, batch 1600, loss[loss=0.1792, simple_loss=0.27, pruned_loss=0.04421, over 19695.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2925, pruned_loss=0.06741, over 3812865.18 frames. ], batch size: 53, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:12:36,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 16:13:12,817 INFO [train.py:903] (0/4) Epoch 20, batch 1650, loss[loss=0.1894, simple_loss=0.2805, pruned_loss=0.04919, over 18203.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2936, pruned_loss=0.06804, over 3790095.45 frames. ], batch size: 83, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:13:59,208 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.697e+02 5.218e+02 6.304e+02 8.075e+02 1.501e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 16:14:08,577 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:15,058 INFO [train.py:903] (0/4) Epoch 20, batch 1700, loss[loss=0.2008, simple_loss=0.2869, pruned_loss=0.05739, over 19768.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.293, pruned_loss=0.06774, over 3802747.21 frames. ], batch size: 56, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:14:17,619 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:55,853 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 16:15:16,171 INFO [train.py:903] (0/4) Epoch 20, batch 1750, loss[loss=0.1534, simple_loss=0.2373, pruned_loss=0.03477, over 19298.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.292, pruned_loss=0.06723, over 3807382.28 frames. ], batch size: 44, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:15:52,691 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131510.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:16:02,690 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.359e+02 6.412e+02 7.691e+02 1.507e+03, threshold=1.282e+03, percent-clipped=3.0 +2023-04-02 16:16:18,693 INFO [train.py:903] (0/4) Epoch 20, batch 1800, loss[loss=0.189, simple_loss=0.2726, pruned_loss=0.0527, over 19749.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2913, pruned_loss=0.06654, over 3824093.57 frames. ], batch size: 51, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:16:24,470 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131536.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:17:16,070 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 16:17:22,062 INFO [train.py:903] (0/4) Epoch 20, batch 1850, loss[loss=0.2172, simple_loss=0.2946, pruned_loss=0.06995, over 19483.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2915, pruned_loss=0.06687, over 3809689.95 frames. ], batch size: 49, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:17:33,547 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2006, 1.0150, 1.4541, 1.2609, 2.2503, 3.1962, 2.9237, 3.5730], + device='cuda:0'), covar=tensor([0.1826, 0.5248, 0.4597, 0.2524, 0.0780, 0.0266, 0.0323, 0.0279], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0317, 0.0348, 0.0263, 0.0238, 0.0183, 0.0214, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 16:17:54,263 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 16:18:09,336 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4743, 1.6002, 2.2071, 1.8359, 3.0418, 2.4545, 3.2116, 1.5720], + device='cuda:0'), covar=tensor([0.2636, 0.4604, 0.2686, 0.1998, 0.1631, 0.2389, 0.1953, 0.4427], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0631, 0.0694, 0.0474, 0.0612, 0.0526, 0.0658, 0.0539], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 16:18:09,977 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.826e+02 6.634e+02 9.045e+02 2.049e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-02 16:18:25,162 INFO [train.py:903] (0/4) Epoch 20, batch 1900, loss[loss=0.2253, simple_loss=0.3026, pruned_loss=0.07396, over 17291.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2921, pruned_loss=0.0669, over 3809779.77 frames. ], batch size: 101, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:18:40,086 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 16:18:45,471 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 16:18:47,918 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:19:10,717 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 16:19:25,446 INFO [train.py:903] (0/4) Epoch 20, batch 1950, loss[loss=0.1946, simple_loss=0.2835, pruned_loss=0.05285, over 19756.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2922, pruned_loss=0.06703, over 3814409.51 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:19:49,104 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8437, 1.3492, 1.0275, 0.9395, 1.1550, 0.9457, 0.9653, 1.2118], + device='cuda:0'), covar=tensor([0.0676, 0.0766, 0.1137, 0.0746, 0.0565, 0.1337, 0.0594, 0.0508], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0316, 0.0335, 0.0263, 0.0248, 0.0337, 0.0293, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:20:03,185 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0475, 2.5132, 1.8118, 1.7977, 2.3532, 1.7417, 1.6792, 2.1136], + device='cuda:0'), covar=tensor([0.1035, 0.0855, 0.0798, 0.0732, 0.0485, 0.0897, 0.0710, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0315, 0.0335, 0.0263, 0.0247, 0.0336, 0.0292, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:20:13,302 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.129e+02 6.435e+02 8.344e+02 2.370e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 16:20:28,634 INFO [train.py:903] (0/4) Epoch 20, batch 2000, loss[loss=0.2213, simple_loss=0.3038, pruned_loss=0.06944, over 19279.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2922, pruned_loss=0.06721, over 3802896.09 frames. ], batch size: 66, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:18,583 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:26,462 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 16:21:28,767 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131778.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:33,340 INFO [train.py:903] (0/4) Epoch 20, batch 2050, loss[loss=0.1933, simple_loss=0.2815, pruned_loss=0.05257, over 19682.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2918, pruned_loss=0.06701, over 3790358.29 frames. ], batch size: 59, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:45,609 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 16:21:46,767 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 16:22:07,705 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 16:22:22,550 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 4.827e+02 6.005e+02 7.777e+02 1.829e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 16:22:25,301 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3028, 1.2731, 1.2837, 1.3630, 1.0279, 1.3803, 1.2980, 1.3746], + device='cuda:0'), covar=tensor([0.0939, 0.1012, 0.1091, 0.0695, 0.0927, 0.0861, 0.0894, 0.0799], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0243, 0.0228, 0.0211, 0.0187, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 16:22:35,848 INFO [train.py:903] (0/4) Epoch 20, batch 2100, loss[loss=0.2257, simple_loss=0.3069, pruned_loss=0.07221, over 19481.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2921, pruned_loss=0.06742, over 3791383.83 frames. ], batch size: 64, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:02,414 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 16:23:02,523 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131854.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:26,816 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 16:23:29,473 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:37,612 INFO [train.py:903] (0/4) Epoch 20, batch 2150, loss[loss=0.2216, simple_loss=0.2945, pruned_loss=0.07438, over 19486.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2901, pruned_loss=0.06633, over 3804035.46 frames. ], batch size: 49, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:42,649 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:51,930 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:09,638 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:16,678 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1995, 1.2787, 1.2317, 1.0210, 1.0680, 1.0372, 0.1098, 0.3591], + device='cuda:0'), covar=tensor([0.0727, 0.0678, 0.0432, 0.0628, 0.1273, 0.0679, 0.1319, 0.1091], + device='cuda:0'), in_proj_covar=tensor([0.0359, 0.0351, 0.0356, 0.0382, 0.0455, 0.0386, 0.0335, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 16:24:26,289 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.118e+02 6.039e+02 8.265e+02 1.505e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 16:24:39,695 INFO [train.py:903] (0/4) Epoch 20, batch 2200, loss[loss=0.2641, simple_loss=0.3348, pruned_loss=0.09668, over 19684.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2911, pruned_loss=0.06721, over 3793193.94 frames. ], batch size: 59, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:24:40,118 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:17,293 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1593, 1.9889, 1.7662, 2.1176, 1.9476, 1.9054, 1.6329, 2.0553], + device='cuda:0'), covar=tensor([0.1014, 0.1461, 0.1490, 0.1078, 0.1370, 0.0523, 0.1436, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0356, 0.0309, 0.0250, 0.0300, 0.0249, 0.0305, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:25:26,135 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:42,868 INFO [train.py:903] (0/4) Epoch 20, batch 2250, loss[loss=0.1959, simple_loss=0.2747, pruned_loss=0.0586, over 19612.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2917, pruned_loss=0.06759, over 3806851.48 frames. ], batch size: 50, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:26:04,599 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-132000.pt +2023-04-02 16:26:31,962 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.368e+02 4.978e+02 6.272e+02 7.965e+02 1.499e+03, threshold=1.254e+03, percent-clipped=2.0 +2023-04-02 16:26:44,539 INFO [train.py:903] (0/4) Epoch 20, batch 2300, loss[loss=0.2481, simple_loss=0.3216, pruned_loss=0.08725, over 19644.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06797, over 3808670.94 frames. ], batch size: 58, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:26:58,039 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 16:27:05,368 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132049.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:34,684 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:47,305 INFO [train.py:903] (0/4) Epoch 20, batch 2350, loss[loss=0.1874, simple_loss=0.2678, pruned_loss=0.05348, over 19851.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2936, pruned_loss=0.06828, over 3802446.58 frames. ], batch size: 52, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:28:26,794 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 16:28:35,983 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 4.795e+02 5.776e+02 7.778e+02 1.972e+03, threshold=1.155e+03, percent-clipped=8.0 +2023-04-02 16:28:42,717 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 16:28:49,774 INFO [train.py:903] (0/4) Epoch 20, batch 2400, loss[loss=0.1874, simple_loss=0.2774, pruned_loss=0.04868, over 19841.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2922, pruned_loss=0.06724, over 3814614.88 frames. ], batch size: 52, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:29:03,424 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:11,458 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132149.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:32,881 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:41,666 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:51,981 INFO [train.py:903] (0/4) Epoch 20, batch 2450, loss[loss=0.1971, simple_loss=0.2677, pruned_loss=0.06329, over 19754.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2907, pruned_loss=0.06635, over 3819431.58 frames. ], batch size: 46, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:30:07,511 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.49 vs. limit=5.0 +2023-04-02 16:30:38,830 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:41,020 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.995e+02 6.263e+02 8.063e+02 1.363e+03, threshold=1.253e+03, percent-clipped=5.0 +2023-04-02 16:30:47,013 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132225.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:54,280 INFO [train.py:903] (0/4) Epoch 20, batch 2500, loss[loss=0.2169, simple_loss=0.2945, pruned_loss=0.06961, over 19500.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2902, pruned_loss=0.06637, over 3819977.35 frames. ], batch size: 64, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:31:11,671 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-02 16:31:15,973 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:31:56,368 INFO [train.py:903] (0/4) Epoch 20, batch 2550, loss[loss=0.2224, simple_loss=0.2937, pruned_loss=0.07552, over 19609.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2902, pruned_loss=0.06659, over 3822083.25 frames. ], batch size: 50, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:32:45,610 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.513e+02 4.985e+02 5.787e+02 8.066e+02 1.995e+03, threshold=1.157e+03, percent-clipped=4.0 +2023-04-02 16:32:52,674 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 16:32:58,506 INFO [train.py:903] (0/4) Epoch 20, batch 2600, loss[loss=0.216, simple_loss=0.2995, pruned_loss=0.06618, over 18159.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2905, pruned_loss=0.06657, over 3818381.36 frames. ], batch size: 83, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:33:02,427 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:33:39,103 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6210, 1.3985, 1.4387, 2.1351, 1.6235, 1.9161, 1.9381, 1.6618], + device='cuda:0'), covar=tensor([0.0851, 0.1004, 0.1092, 0.0758, 0.0897, 0.0758, 0.0909, 0.0721], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0242, 0.0228, 0.0210, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 16:34:01,399 INFO [train.py:903] (0/4) Epoch 20, batch 2650, loss[loss=0.2391, simple_loss=0.3235, pruned_loss=0.0773, over 18285.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06618, over 3835346.65 frames. ], batch size: 83, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:34:15,314 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:23,137 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 16:34:44,187 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132416.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:50,449 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.865e+02 6.149e+02 7.368e+02 1.585e+03, threshold=1.230e+03, percent-clipped=4.0 +2023-04-02 16:35:04,061 INFO [train.py:903] (0/4) Epoch 20, batch 2700, loss[loss=0.2259, simple_loss=0.3039, pruned_loss=0.07395, over 19596.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.291, pruned_loss=0.06627, over 3839604.94 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:35:14,882 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7416, 1.7106, 1.6275, 1.3929, 1.2921, 1.4659, 0.2194, 0.6889], + device='cuda:0'), covar=tensor([0.0560, 0.0560, 0.0349, 0.0561, 0.1108, 0.0653, 0.1165, 0.0947], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0351, 0.0354, 0.0381, 0.0454, 0.0383, 0.0333, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 16:36:06,699 INFO [train.py:903] (0/4) Epoch 20, batch 2750, loss[loss=0.1964, simple_loss=0.2805, pruned_loss=0.05617, over 19658.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2904, pruned_loss=0.06623, over 3821091.94 frames. ], batch size: 58, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:36:39,128 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132508.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:36:55,659 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.186e+02 6.180e+02 7.968e+02 1.505e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-02 16:37:07,814 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:37:08,659 INFO [train.py:903] (0/4) Epoch 20, batch 2800, loss[loss=0.272, simple_loss=0.3335, pruned_loss=0.1052, over 13019.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.0669, over 3810962.25 frames. ], batch size: 135, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:37:27,250 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2051, 2.2153, 2.5376, 3.1428, 2.2658, 2.9274, 2.6330, 2.2373], + device='cuda:0'), covar=tensor([0.4223, 0.4162, 0.1785, 0.2261, 0.4323, 0.2031, 0.4245, 0.3305], + device='cuda:0'), in_proj_covar=tensor([0.0883, 0.0942, 0.0704, 0.0926, 0.0862, 0.0797, 0.0833, 0.0773], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 16:38:13,003 INFO [train.py:903] (0/4) Epoch 20, batch 2850, loss[loss=0.2445, simple_loss=0.3228, pruned_loss=0.0831, over 19702.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2915, pruned_loss=0.06683, over 3814902.12 frames. ], batch size: 59, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:38:22,397 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:38:26,809 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0685, 2.2120, 1.6730, 2.1361, 2.2757, 1.6436, 1.6786, 1.9820], + device='cuda:0'), covar=tensor([0.1165, 0.1563, 0.1851, 0.1323, 0.1384, 0.0959, 0.1786, 0.0995], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0356, 0.0311, 0.0251, 0.0300, 0.0249, 0.0305, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:38:52,959 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:38:54,131 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3444, 2.4421, 2.1631, 2.5768, 2.4355, 2.1009, 2.0378, 2.4636], + device='cuda:0'), covar=tensor([0.0963, 0.1403, 0.1302, 0.0983, 0.1208, 0.0513, 0.1289, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0355, 0.0309, 0.0249, 0.0299, 0.0248, 0.0304, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:39:01,716 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 4.861e+02 5.815e+02 7.642e+02 3.357e+03, threshold=1.163e+03, percent-clipped=7.0 +2023-04-02 16:39:14,638 INFO [train.py:903] (0/4) Epoch 20, batch 2900, loss[loss=0.1954, simple_loss=0.2782, pruned_loss=0.05629, over 19488.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.0665, over 3802962.07 frames. ], batch size: 64, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:39:15,642 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 16:40:13,482 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132678.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:40:18,618 INFO [train.py:903] (0/4) Epoch 20, batch 2950, loss[loss=0.1711, simple_loss=0.2648, pruned_loss=0.03867, over 19530.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06544, over 3811112.67 frames. ], batch size: 56, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:40:21,815 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 16:41:09,140 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.627e+02 5.679e+02 7.371e+02 2.153e+03, threshold=1.136e+03, percent-clipped=3.0 +2023-04-02 16:41:20,813 INFO [train.py:903] (0/4) Epoch 20, batch 3000, loss[loss=0.2371, simple_loss=0.3139, pruned_loss=0.08014, over 19552.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2884, pruned_loss=0.06473, over 3820485.62 frames. ], batch size: 61, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:20,813 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 16:41:34,266 INFO [train.py:937] (0/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2697, pruned_loss=0.03462, over 944034.00 frames. +2023-04-02 16:41:34,268 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 16:41:40,268 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 16:42:10,887 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9051, 4.3157, 4.6469, 4.6407, 1.7625, 4.3496, 3.7948, 4.3516], + device='cuda:0'), covar=tensor([0.1674, 0.0876, 0.0618, 0.0632, 0.6157, 0.0949, 0.0634, 0.1186], + device='cuda:0'), in_proj_covar=tensor([0.0765, 0.0723, 0.0919, 0.0806, 0.0819, 0.0682, 0.0555, 0.0859], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 16:42:12,911 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:13,059 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:15,794 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 16:42:35,229 INFO [train.py:903] (0/4) Epoch 20, batch 3050, loss[loss=0.2014, simple_loss=0.2845, pruned_loss=0.05912, over 19531.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06542, over 3812407.24 frames. ], batch size: 54, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:42:41,476 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:43,575 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132789.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:13,147 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:24,233 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 4.785e+02 6.187e+02 7.720e+02 1.879e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-02 16:43:28,313 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9033, 2.0024, 2.1836, 2.6260, 1.8918, 2.4398, 2.2473, 1.9719], + device='cuda:0'), covar=tensor([0.4191, 0.3946, 0.1892, 0.2273, 0.4117, 0.2108, 0.4681, 0.3441], + device='cuda:0'), in_proj_covar=tensor([0.0886, 0.0948, 0.0708, 0.0931, 0.0866, 0.0800, 0.0837, 0.0776], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 16:43:37,016 INFO [train.py:903] (0/4) Epoch 20, batch 3100, loss[loss=0.236, simple_loss=0.3179, pruned_loss=0.07708, over 17260.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2895, pruned_loss=0.06569, over 3807499.36 frames. ], batch size: 100, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:43:39,637 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3248, 2.1534, 2.0356, 1.9010, 1.6785, 1.8457, 0.6043, 1.2881], + device='cuda:0'), covar=tensor([0.0529, 0.0555, 0.0445, 0.0775, 0.1024, 0.0846, 0.1227, 0.0964], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0352, 0.0357, 0.0383, 0.0457, 0.0386, 0.0334, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 16:44:33,776 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8871, 4.3256, 4.6147, 4.6535, 1.6980, 4.3480, 3.7684, 4.3404], + device='cuda:0'), covar=tensor([0.1726, 0.0864, 0.0636, 0.0688, 0.6155, 0.0915, 0.0667, 0.1230], + device='cuda:0'), in_proj_covar=tensor([0.0766, 0.0722, 0.0921, 0.0807, 0.0821, 0.0682, 0.0556, 0.0859], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 16:44:40,221 INFO [train.py:903] (0/4) Epoch 20, batch 3150, loss[loss=0.2886, simple_loss=0.3525, pruned_loss=0.1124, over 13059.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2896, pruned_loss=0.0658, over 3812850.32 frames. ], batch size: 136, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:45:07,813 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 16:45:11,639 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5546, 1.6260, 1.9062, 1.7898, 2.6453, 2.3110, 2.7534, 1.3971], + device='cuda:0'), covar=tensor([0.2258, 0.3804, 0.2483, 0.1789, 0.1391, 0.2011, 0.1395, 0.4027], + device='cuda:0'), in_proj_covar=tensor([0.0524, 0.0630, 0.0694, 0.0476, 0.0612, 0.0523, 0.0656, 0.0540], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 16:45:29,824 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.025e+02 5.951e+02 7.011e+02 1.371e+03, threshold=1.190e+03, percent-clipped=2.0 +2023-04-02 16:45:42,509 INFO [train.py:903] (0/4) Epoch 20, batch 3200, loss[loss=0.1857, simple_loss=0.266, pruned_loss=0.05272, over 19583.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2891, pruned_loss=0.06575, over 3829035.25 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:46:13,797 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7392, 1.7220, 1.6998, 1.4633, 1.3583, 1.5000, 0.2287, 0.7558], + device='cuda:0'), covar=tensor([0.0664, 0.0606, 0.0366, 0.0600, 0.1211, 0.0676, 0.1188, 0.1008], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0352, 0.0358, 0.0383, 0.0458, 0.0386, 0.0334, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 16:46:14,351 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 16:46:46,021 INFO [train.py:903] (0/4) Epoch 20, batch 3250, loss[loss=0.2403, simple_loss=0.3091, pruned_loss=0.08572, over 19849.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2892, pruned_loss=0.06576, over 3832399.53 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:47:37,650 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.383e+02 4.867e+02 6.333e+02 8.818e+02 1.782e+03, threshold=1.267e+03, percent-clipped=7.0 +2023-04-02 16:47:37,813 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:47:49,136 INFO [train.py:903] (0/4) Epoch 20, batch 3300, loss[loss=0.2112, simple_loss=0.2955, pruned_loss=0.0634, over 18869.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2913, pruned_loss=0.067, over 3831149.95 frames. ], batch size: 74, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:47:57,159 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 16:48:03,452 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4934, 1.3025, 1.8282, 1.3325, 2.6200, 3.5902, 3.2981, 3.8179], + device='cuda:0'), covar=tensor([0.1455, 0.3734, 0.3131, 0.2248, 0.0562, 0.0187, 0.0212, 0.0272], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0319, 0.0349, 0.0263, 0.0240, 0.0184, 0.0215, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 16:48:54,792 INFO [train.py:903] (0/4) Epoch 20, batch 3350, loss[loss=0.2035, simple_loss=0.2922, pruned_loss=0.0574, over 19678.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2902, pruned_loss=0.06648, over 3822335.78 frames. ], batch size: 53, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:49:27,411 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:49:45,520 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.956e+02 6.093e+02 7.175e+02 1.819e+03, threshold=1.219e+03, percent-clipped=1.0 +2023-04-02 16:49:57,526 INFO [train.py:903] (0/4) Epoch 20, batch 3400, loss[loss=0.2046, simple_loss=0.2921, pruned_loss=0.05854, over 19700.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2902, pruned_loss=0.06598, over 3818294.78 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:50:06,193 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133137.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:51:02,179 INFO [train.py:903] (0/4) Epoch 20, batch 3450, loss[loss=0.2039, simple_loss=0.2953, pruned_loss=0.05626, over 19670.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2917, pruned_loss=0.06657, over 3825508.80 frames. ], batch size: 58, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:51:08,047 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 16:51:41,614 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4198, 2.0718, 1.5591, 1.2918, 1.9840, 1.1963, 1.3018, 1.8880], + device='cuda:0'), covar=tensor([0.1090, 0.0766, 0.1136, 0.0926, 0.0519, 0.1365, 0.0800, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0313, 0.0333, 0.0259, 0.0245, 0.0335, 0.0291, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:51:52,696 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 4.746e+02 5.634e+02 7.504e+02 1.582e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 16:51:54,138 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133223.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:52:04,226 INFO [train.py:903] (0/4) Epoch 20, batch 3500, loss[loss=0.2131, simple_loss=0.2999, pruned_loss=0.06318, over 19680.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.292, pruned_loss=0.06714, over 3827187.26 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:08,045 INFO [train.py:903] (0/4) Epoch 20, batch 3550, loss[loss=0.2339, simple_loss=0.3055, pruned_loss=0.08115, over 19704.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2921, pruned_loss=0.06781, over 3822070.56 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:15,827 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 16:53:52,740 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 16:53:58,721 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 5.017e+02 5.933e+02 7.980e+02 2.795e+03, threshold=1.187e+03, percent-clipped=11.0 +2023-04-02 16:54:10,371 INFO [train.py:903] (0/4) Epoch 20, batch 3600, loss[loss=0.2163, simple_loss=0.3007, pruned_loss=0.06589, over 19837.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2923, pruned_loss=0.0677, over 3819565.72 frames. ], batch size: 52, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:54:48,395 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0158, 2.0963, 2.3490, 2.7159, 2.0049, 2.5989, 2.3704, 2.1288], + device='cuda:0'), covar=tensor([0.4279, 0.4031, 0.1802, 0.2420, 0.4202, 0.2134, 0.4761, 0.3316], + device='cuda:0'), in_proj_covar=tensor([0.0883, 0.0945, 0.0705, 0.0928, 0.0864, 0.0799, 0.0834, 0.0771], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 16:55:15,902 INFO [train.py:903] (0/4) Epoch 20, batch 3650, loss[loss=0.2698, simple_loss=0.3537, pruned_loss=0.09294, over 19281.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.06747, over 3808794.45 frames. ], batch size: 70, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:55:28,068 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133392.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 16:55:29,466 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:55:30,464 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:55:32,920 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8795, 1.7989, 1.4706, 1.8180, 1.6995, 1.4666, 1.4565, 1.7420], + device='cuda:0'), covar=tensor([0.1138, 0.1492, 0.1743, 0.1234, 0.1457, 0.0943, 0.1717, 0.0934], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0350, 0.0302, 0.0245, 0.0294, 0.0244, 0.0298, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 16:56:02,498 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133418.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:56:06,667 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.934e+02 5.113e+02 6.456e+02 7.889e+02 1.610e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 16:56:18,443 INFO [train.py:903] (0/4) Epoch 20, batch 3700, loss[loss=0.1775, simple_loss=0.2598, pruned_loss=0.04763, over 19624.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.292, pruned_loss=0.06748, over 3805575.36 frames. ], batch size: 50, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:02,955 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 16:57:19,791 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:23,016 INFO [train.py:903] (0/4) Epoch 20, batch 3750, loss[loss=0.2352, simple_loss=0.3116, pruned_loss=0.07936, over 19330.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.293, pruned_loss=0.06829, over 3802492.56 frames. ], batch size: 66, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:34,881 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:39,326 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6336, 1.7348, 1.9778, 2.0203, 1.5039, 1.8857, 2.0110, 1.8788], + device='cuda:0'), covar=tensor([0.3859, 0.3151, 0.1704, 0.2016, 0.3326, 0.1979, 0.4588, 0.3048], + device='cuda:0'), in_proj_covar=tensor([0.0881, 0.0942, 0.0703, 0.0923, 0.0861, 0.0794, 0.0832, 0.0768], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 16:57:50,915 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:58:13,508 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.077e+02 5.935e+02 8.206e+02 1.595e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 16:58:24,813 INFO [train.py:903] (0/4) Epoch 20, batch 3800, loss[loss=0.2786, simple_loss=0.3411, pruned_loss=0.1081, over 19665.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2935, pruned_loss=0.0683, over 3805620.00 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:58:58,170 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 16:59:30,203 INFO [train.py:903] (0/4) Epoch 20, batch 3850, loss[loss=0.1778, simple_loss=0.264, pruned_loss=0.04576, over 19778.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2923, pruned_loss=0.06774, over 3809916.34 frames. ], batch size: 48, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:59:31,738 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8777, 0.8295, 0.8489, 0.9042, 0.7922, 0.9476, 0.9620, 0.9037], + device='cuda:0'), covar=tensor([0.0687, 0.0750, 0.0800, 0.0587, 0.0764, 0.0671, 0.0687, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0221, 0.0224, 0.0242, 0.0227, 0.0210, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 16:59:48,047 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.38 vs. limit=5.0 +2023-04-02 17:00:20,972 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.079e+02 6.219e+02 7.261e+02 1.808e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-02 17:00:32,688 INFO [train.py:903] (0/4) Epoch 20, batch 3900, loss[loss=0.2469, simple_loss=0.3241, pruned_loss=0.08486, over 18038.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2918, pruned_loss=0.0671, over 3816135.50 frames. ], batch size: 83, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 17:01:21,113 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133670.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:01:37,403 INFO [train.py:903] (0/4) Epoch 20, batch 3950, loss[loss=0.2014, simple_loss=0.2868, pruned_loss=0.05803, over 19647.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2918, pruned_loss=0.06711, over 3798132.89 frames. ], batch size: 55, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:01:42,253 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 17:01:50,353 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-02 17:02:26,843 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 5.167e+02 6.244e+02 7.613e+02 1.189e+03, threshold=1.249e+03, percent-clipped=0.0 +2023-04-02 17:02:38,784 INFO [train.py:903] (0/4) Epoch 20, batch 4000, loss[loss=0.2147, simple_loss=0.2959, pruned_loss=0.06677, over 19415.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2904, pruned_loss=0.06608, over 3814055.83 frames. ], batch size: 70, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:02:43,766 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133736.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:02:46,990 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:02:53,757 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 17:03:26,442 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 17:03:41,991 INFO [train.py:903] (0/4) Epoch 20, batch 4050, loss[loss=0.2391, simple_loss=0.3104, pruned_loss=0.08391, over 19533.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2901, pruned_loss=0.06613, over 3804347.62 frames. ], batch size: 56, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:30,941 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.971e+02 6.413e+02 8.150e+02 1.897e+03, threshold=1.283e+03, percent-clipped=7.0 +2023-04-02 17:04:42,288 INFO [train.py:903] (0/4) Epoch 20, batch 4100, loss[loss=0.2175, simple_loss=0.3001, pruned_loss=0.06743, over 19522.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2913, pruned_loss=0.06696, over 3798444.84 frames. ], batch size: 56, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:47,156 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:04:58,532 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0737, 1.7536, 1.8413, 2.7739, 2.0285, 2.3652, 2.5012, 2.0918], + device='cuda:0'), covar=tensor([0.0828, 0.0934, 0.0989, 0.0807, 0.0853, 0.0727, 0.0792, 0.0699], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0219, 0.0223, 0.0240, 0.0225, 0.0208, 0.0186, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 17:05:06,743 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133851.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:05:09,020 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:05:18,894 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 17:05:35,459 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9505, 2.0494, 2.2736, 2.7544, 2.0848, 2.5949, 2.3469, 2.0733], + device='cuda:0'), covar=tensor([0.4249, 0.3843, 0.1837, 0.2159, 0.3891, 0.1982, 0.4433, 0.3209], + device='cuda:0'), in_proj_covar=tensor([0.0887, 0.0948, 0.0707, 0.0931, 0.0868, 0.0801, 0.0836, 0.0773], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 17:05:45,978 INFO [train.py:903] (0/4) Epoch 20, batch 4150, loss[loss=0.1801, simple_loss=0.2547, pruned_loss=0.05278, over 19723.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2906, pruned_loss=0.0668, over 3803036.83 frames. ], batch size: 46, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:00,788 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-02 17:06:19,805 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5325, 1.6140, 1.9671, 1.7244, 3.0907, 2.6064, 3.5756, 1.5359], + device='cuda:0'), covar=tensor([0.2329, 0.3997, 0.2578, 0.1788, 0.1455, 0.1998, 0.1442, 0.4041], + device='cuda:0'), in_proj_covar=tensor([0.0525, 0.0633, 0.0698, 0.0477, 0.0613, 0.0524, 0.0658, 0.0541], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 17:06:35,678 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.109e+02 4.757e+02 5.877e+02 6.653e+02 1.329e+03, threshold=1.175e+03, percent-clipped=1.0 +2023-04-02 17:06:47,879 INFO [train.py:903] (0/4) Epoch 20, batch 4200, loss[loss=0.1978, simple_loss=0.2747, pruned_loss=0.0604, over 19605.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2898, pruned_loss=0.06619, over 3810433.91 frames. ], batch size: 50, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:51,427 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 17:07:12,220 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:07:30,706 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3659, 3.9733, 2.6760, 3.4903, 0.7582, 3.9279, 3.8048, 3.9054], + device='cuda:0'), covar=tensor([0.0728, 0.1023, 0.1982, 0.0875, 0.4136, 0.0757, 0.0872, 0.1109], + device='cuda:0'), in_proj_covar=tensor([0.0492, 0.0398, 0.0483, 0.0342, 0.0399, 0.0423, 0.0415, 0.0449], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:07:31,264 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 17:07:50,933 INFO [train.py:903] (0/4) Epoch 20, batch 4250, loss[loss=0.2322, simple_loss=0.3106, pruned_loss=0.07693, over 19622.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.06571, over 3824633.90 frames. ], batch size: 57, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:08:08,173 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 17:08:12,876 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-134000.pt +2023-04-02 17:08:20,523 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 17:08:32,199 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:33,454 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:36,367 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 17:08:41,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.008e+02 5.790e+02 6.980e+02 1.679e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-02 17:08:54,645 INFO [train.py:903] (0/4) Epoch 20, batch 4300, loss[loss=0.197, simple_loss=0.2652, pruned_loss=0.06442, over 19355.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2889, pruned_loss=0.06549, over 3833588.44 frames. ], batch size: 47, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:09:27,709 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 17:09:50,646 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 17:09:56,296 INFO [train.py:903] (0/4) Epoch 20, batch 4350, loss[loss=0.2058, simple_loss=0.2917, pruned_loss=0.05993, over 19678.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2891, pruned_loss=0.06527, over 3833773.99 frames. ], batch size: 53, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:10:30,261 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134107.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:10:32,489 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:10:48,201 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 5.216e+02 6.315e+02 8.361e+02 2.012e+03, threshold=1.263e+03, percent-clipped=10.0 +2023-04-02 17:10:57,734 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:00,848 INFO [train.py:903] (0/4) Epoch 20, batch 4400, loss[loss=0.2241, simple_loss=0.3025, pruned_loss=0.07284, over 19792.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2892, pruned_loss=0.06561, over 3821881.74 frames. ], batch size: 56, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:11:01,273 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134132.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:11:03,606 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:18,430 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-02 17:11:29,061 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 17:11:37,426 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 17:12:05,041 INFO [train.py:903] (0/4) Epoch 20, batch 4450, loss[loss=0.2355, simple_loss=0.3017, pruned_loss=0.08469, over 13514.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2898, pruned_loss=0.06572, over 3804112.73 frames. ], batch size: 135, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:12:36,932 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134207.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:12:56,171 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.942e+02 5.989e+02 7.537e+02 1.405e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-02 17:13:08,112 INFO [train.py:903] (0/4) Epoch 20, batch 4500, loss[loss=0.272, simple_loss=0.3359, pruned_loss=0.104, over 13992.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.29, pruned_loss=0.06585, over 3810262.37 frames. ], batch size: 136, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:13:08,517 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134232.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:14:10,220 INFO [train.py:903] (0/4) Epoch 20, batch 4550, loss[loss=0.2066, simple_loss=0.2789, pruned_loss=0.06718, over 19057.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2905, pruned_loss=0.06668, over 3815087.75 frames. ], batch size: 42, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:14:19,190 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 17:14:42,913 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 17:15:01,018 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:04,301 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.805e+02 4.924e+02 5.886e+02 8.898e+02 2.816e+03, threshold=1.177e+03, percent-clipped=9.0 +2023-04-02 17:15:09,041 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:10,239 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9683, 2.1248, 1.7299, 2.0684, 2.1088, 1.6984, 1.6890, 1.8687], + device='cuda:0'), covar=tensor([0.0973, 0.1212, 0.1388, 0.0891, 0.1044, 0.0837, 0.1410, 0.0796], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0355, 0.0309, 0.0250, 0.0299, 0.0248, 0.0304, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:15:14,419 INFO [train.py:903] (0/4) Epoch 20, batch 4600, loss[loss=0.2245, simple_loss=0.3034, pruned_loss=0.07281, over 19617.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2911, pruned_loss=0.06701, over 3803593.72 frames. ], batch size: 61, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:15:47,425 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:15,891 INFO [train.py:903] (0/4) Epoch 20, batch 4650, loss[loss=0.2184, simple_loss=0.3005, pruned_loss=0.06818, over 19618.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2918, pruned_loss=0.0671, over 3801792.13 frames. ], batch size: 57, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:16:20,718 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134385.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:32,931 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 17:16:44,589 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 17:16:52,767 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134410.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:17:09,009 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 4.575e+02 5.939e+02 8.134e+02 1.295e+03, threshold=1.188e+03, percent-clipped=3.0 +2023-04-02 17:17:14,341 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.41 vs. limit=5.0 +2023-04-02 17:17:19,254 INFO [train.py:903] (0/4) Epoch 20, batch 4700, loss[loss=0.1913, simple_loss=0.2688, pruned_loss=0.05692, over 19587.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2902, pruned_loss=0.06618, over 3801380.79 frames. ], batch size: 52, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:17:23,222 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5967, 2.3136, 1.6963, 1.5757, 2.1280, 1.3522, 1.3706, 1.8969], + device='cuda:0'), covar=tensor([0.1096, 0.0828, 0.0965, 0.0780, 0.0511, 0.1202, 0.0760, 0.0494], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0310, 0.0331, 0.0256, 0.0244, 0.0334, 0.0286, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:17:41,459 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 17:18:11,952 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:18:20,723 INFO [train.py:903] (0/4) Epoch 20, batch 4750, loss[loss=0.2223, simple_loss=0.2949, pruned_loss=0.07481, over 19470.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2906, pruned_loss=0.06648, over 3805494.38 frames. ], batch size: 49, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:18:58,065 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 17:19:09,037 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 17:19:14,949 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.001e+02 5.955e+02 7.090e+02 1.974e+03, threshold=1.191e+03, percent-clipped=7.0 +2023-04-02 17:19:15,401 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3261, 1.3347, 1.4827, 1.5096, 1.7285, 1.8515, 1.7375, 0.5484], + device='cuda:0'), covar=tensor([0.2315, 0.4271, 0.2555, 0.1905, 0.1688, 0.2253, 0.1483, 0.4747], + device='cuda:0'), in_proj_covar=tensor([0.0525, 0.0635, 0.0697, 0.0477, 0.0615, 0.0523, 0.0658, 0.0543], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 17:19:25,462 INFO [train.py:903] (0/4) Epoch 20, batch 4800, loss[loss=0.1947, simple_loss=0.2747, pruned_loss=0.05733, over 19750.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2894, pruned_loss=0.0657, over 3820838.33 frames. ], batch size: 54, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:20:15,904 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6598, 1.6194, 1.5545, 2.1244, 1.6182, 2.0134, 1.8516, 1.6812], + device='cuda:0'), covar=tensor([0.0810, 0.0864, 0.0957, 0.0686, 0.0847, 0.0686, 0.0896, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0220, 0.0222, 0.0241, 0.0224, 0.0208, 0.0185, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 17:20:26,805 INFO [train.py:903] (0/4) Epoch 20, batch 4850, loss[loss=0.2018, simple_loss=0.2727, pruned_loss=0.06545, over 19419.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2894, pruned_loss=0.06554, over 3817892.45 frames. ], batch size: 48, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:20:50,078 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 17:21:13,058 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 17:21:18,750 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 17:21:18,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 17:21:21,958 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.787e+02 5.710e+02 7.760e+02 1.554e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-02 17:21:31,384 INFO [train.py:903] (0/4) Epoch 20, batch 4900, loss[loss=0.1961, simple_loss=0.2684, pruned_loss=0.06189, over 19805.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2891, pruned_loss=0.06524, over 3815444.62 frames. ], batch size: 48, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:21:31,400 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 17:21:51,449 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 17:21:52,995 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3001, 2.3126, 2.5445, 3.0997, 2.2891, 3.0041, 2.5538, 2.3391], + device='cuda:0'), covar=tensor([0.4002, 0.3946, 0.1794, 0.2515, 0.4327, 0.2031, 0.4478, 0.3132], + device='cuda:0'), in_proj_covar=tensor([0.0883, 0.0947, 0.0707, 0.0926, 0.0866, 0.0801, 0.0834, 0.0773], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 17:22:14,358 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134666.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:21,026 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:33,361 INFO [train.py:903] (0/4) Epoch 20, batch 4950, loss[loss=0.2618, simple_loss=0.3299, pruned_loss=0.09689, over 19524.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2918, pruned_loss=0.06736, over 3798982.22 frames. ], batch size: 56, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:22:49,195 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 17:23:15,465 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 17:23:28,332 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 4.876e+02 5.783e+02 7.285e+02 1.244e+03, threshold=1.157e+03, percent-clipped=2.0 +2023-04-02 17:23:34,564 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:23:37,532 INFO [train.py:903] (0/4) Epoch 20, batch 5000, loss[loss=0.2528, simple_loss=0.317, pruned_loss=0.09435, over 13195.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2914, pruned_loss=0.0669, over 3806653.38 frames. ], batch size: 137, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:23:45,504 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 17:23:56,617 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 17:24:06,086 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:37,268 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3262, 1.7539, 2.0019, 2.0608, 3.9088, 1.3712, 2.7019, 4.1414], + device='cuda:0'), covar=tensor([0.0521, 0.2726, 0.2643, 0.1735, 0.0803, 0.2633, 0.1523, 0.0246], + device='cuda:0'), in_proj_covar=tensor([0.0400, 0.0361, 0.0381, 0.0342, 0.0371, 0.0346, 0.0371, 0.0396], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:24:37,308 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134781.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:38,191 INFO [train.py:903] (0/4) Epoch 20, batch 5050, loss[loss=0.218, simple_loss=0.3002, pruned_loss=0.06796, over 17254.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2931, pruned_loss=0.06779, over 3808492.66 frames. ], batch size: 101, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:24:44,175 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134786.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:56,567 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3053, 1.2649, 1.4735, 1.4621, 1.8432, 1.7772, 1.7718, 0.6095], + device='cuda:0'), covar=tensor([0.2637, 0.4609, 0.2802, 0.2175, 0.1672, 0.2594, 0.1544, 0.5001], + device='cuda:0'), in_proj_covar=tensor([0.0523, 0.0631, 0.0693, 0.0476, 0.0612, 0.0521, 0.0654, 0.0540], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 17:25:13,075 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 17:25:31,564 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.915e+02 6.341e+02 8.226e+02 2.739e+03, threshold=1.268e+03, percent-clipped=9.0 +2023-04-02 17:25:41,326 INFO [train.py:903] (0/4) Epoch 20, batch 5100, loss[loss=0.2574, simple_loss=0.3297, pruned_loss=0.09259, over 19404.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2925, pruned_loss=0.06745, over 3790563.44 frames. ], batch size: 70, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:25:42,885 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6717, 1.7951, 1.7933, 2.5943, 1.8009, 2.3820, 1.8950, 1.5613], + device='cuda:0'), covar=tensor([0.4751, 0.4366, 0.2757, 0.2589, 0.4306, 0.2245, 0.5981, 0.4987], + device='cuda:0'), in_proj_covar=tensor([0.0882, 0.0946, 0.0707, 0.0926, 0.0865, 0.0799, 0.0834, 0.0773], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 17:25:50,461 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 17:25:53,835 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 17:25:58,181 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 17:26:26,106 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3387, 3.0668, 2.4039, 2.8285, 0.7196, 3.0307, 2.8903, 2.9845], + device='cuda:0'), covar=tensor([0.1076, 0.1373, 0.1960, 0.1033, 0.4083, 0.0942, 0.1073, 0.1449], + device='cuda:0'), in_proj_covar=tensor([0.0490, 0.0399, 0.0486, 0.0341, 0.0398, 0.0422, 0.0415, 0.0450], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:26:41,255 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134880.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:26:43,294 INFO [train.py:903] (0/4) Epoch 20, batch 5150, loss[loss=0.1904, simple_loss=0.2593, pruned_loss=0.06076, over 19741.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.06669, over 3807183.65 frames. ], batch size: 46, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:26:55,951 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 17:27:07,080 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 17:27:32,541 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 17:27:37,090 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.618e+02 4.964e+02 6.321e+02 8.056e+02 1.479e+03, threshold=1.264e+03, percent-clipped=3.0 +2023-04-02 17:27:46,172 INFO [train.py:903] (0/4) Epoch 20, batch 5200, loss[loss=0.2373, simple_loss=0.3134, pruned_loss=0.08062, over 19666.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2915, pruned_loss=0.06644, over 3812548.70 frames. ], batch size: 60, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:28:00,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 17:28:33,739 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-02 17:28:46,967 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 17:28:49,186 INFO [train.py:903] (0/4) Epoch 20, batch 5250, loss[loss=0.1932, simple_loss=0.2669, pruned_loss=0.05978, over 19403.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2917, pruned_loss=0.06646, over 3804643.90 frames. ], batch size: 48, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:42,796 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.845e+02 5.899e+02 8.450e+02 1.811e+03, threshold=1.180e+03, percent-clipped=4.0 +2023-04-02 17:29:51,999 INFO [train.py:903] (0/4) Epoch 20, batch 5300, loss[loss=0.1756, simple_loss=0.2489, pruned_loss=0.05121, over 19764.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2921, pruned_loss=0.06656, over 3805146.61 frames. ], batch size: 45, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:59,059 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135037.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:04,893 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:11,576 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 17:30:24,893 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4694, 2.5186, 2.6482, 3.1525, 2.5940, 3.0282, 2.7264, 2.5260], + device='cuda:0'), covar=tensor([0.3243, 0.2893, 0.1422, 0.1971, 0.3161, 0.1545, 0.3314, 0.2358], + device='cuda:0'), in_proj_covar=tensor([0.0885, 0.0947, 0.0708, 0.0928, 0.0867, 0.0800, 0.0837, 0.0773], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 17:30:30,726 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:36,496 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:54,695 INFO [train.py:903] (0/4) Epoch 20, batch 5350, loss[loss=0.2538, simple_loss=0.3247, pruned_loss=0.09142, over 19297.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2901, pruned_loss=0.06567, over 3813560.87 frames. ], batch size: 66, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:31:29,720 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 17:31:45,352 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9283, 1.7574, 1.5527, 1.9525, 1.6911, 1.6691, 1.5853, 1.8208], + device='cuda:0'), covar=tensor([0.1087, 0.1400, 0.1490, 0.0957, 0.1331, 0.0569, 0.1363, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0350, 0.0304, 0.0247, 0.0296, 0.0247, 0.0302, 0.0250], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:31:48,520 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.248e+02 6.375e+02 8.534e+02 1.946e+03, threshold=1.275e+03, percent-clipped=9.0 +2023-04-02 17:31:57,760 INFO [train.py:903] (0/4) Epoch 20, batch 5400, loss[loss=0.168, simple_loss=0.2471, pruned_loss=0.04443, over 19487.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2897, pruned_loss=0.06552, over 3822787.58 frames. ], batch size: 49, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:33:00,878 INFO [train.py:903] (0/4) Epoch 20, batch 5450, loss[loss=0.2288, simple_loss=0.2948, pruned_loss=0.0814, over 19584.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.0654, over 3829809.95 frames. ], batch size: 52, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:33:09,002 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135189.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:36,826 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2871, 1.3381, 1.7603, 1.3486, 2.7579, 3.5793, 3.3410, 3.7550], + device='cuda:0'), covar=tensor([0.1599, 0.3591, 0.3141, 0.2337, 0.0675, 0.0221, 0.0200, 0.0276], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0316, 0.0344, 0.0262, 0.0237, 0.0183, 0.0213, 0.0249], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 17:33:52,673 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135224.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:54,891 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.255e+02 6.010e+02 7.558e+02 1.824e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 17:34:03,095 INFO [train.py:903] (0/4) Epoch 20, batch 5500, loss[loss=0.2445, simple_loss=0.3197, pruned_loss=0.08466, over 19672.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2899, pruned_loss=0.06585, over 3833790.16 frames. ], batch size: 58, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:34:29,182 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 17:35:05,523 INFO [train.py:903] (0/4) Epoch 20, batch 5550, loss[loss=0.2211, simple_loss=0.2847, pruned_loss=0.07875, over 19780.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2895, pruned_loss=0.06567, over 3836837.91 frames. ], batch size: 48, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:35:13,923 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 17:35:47,243 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8315, 4.4236, 2.8033, 3.9065, 0.7405, 4.3299, 4.2005, 4.3328], + device='cuda:0'), covar=tensor([0.0562, 0.0896, 0.1781, 0.0854, 0.4389, 0.0623, 0.0863, 0.0987], + device='cuda:0'), in_proj_covar=tensor([0.0492, 0.0401, 0.0488, 0.0344, 0.0401, 0.0424, 0.0419, 0.0454], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:36:00,656 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.884e+02 6.418e+02 8.039e+02 2.322e+03, threshold=1.284e+03, percent-clipped=8.0 +2023-04-02 17:36:03,999 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 17:36:07,361 INFO [train.py:903] (0/4) Epoch 20, batch 5600, loss[loss=0.2147, simple_loss=0.2812, pruned_loss=0.07411, over 19370.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2898, pruned_loss=0.06582, over 3837853.27 frames. ], batch size: 47, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:36:18,135 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135339.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:36:29,866 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4896, 1.5455, 1.8384, 1.7135, 2.7063, 2.3426, 2.8914, 1.3169], + device='cuda:0'), covar=tensor([0.2319, 0.4168, 0.2630, 0.1858, 0.1357, 0.1982, 0.1229, 0.4193], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0633, 0.0697, 0.0477, 0.0616, 0.0523, 0.0657, 0.0543], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 17:37:11,251 INFO [train.py:903] (0/4) Epoch 20, batch 5650, loss[loss=0.1987, simple_loss=0.2687, pruned_loss=0.0643, over 19010.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2892, pruned_loss=0.06541, over 3828172.45 frames. ], batch size: 42, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:38:01,349 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 17:38:05,709 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.892e+02 5.590e+02 6.975e+02 1.698e+03, threshold=1.118e+03, percent-clipped=3.0 +2023-04-02 17:38:12,540 INFO [train.py:903] (0/4) Epoch 20, batch 5700, loss[loss=0.1941, simple_loss=0.2668, pruned_loss=0.06071, over 19303.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2893, pruned_loss=0.06553, over 3835411.19 frames. ], batch size: 44, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:38:47,470 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6734, 1.4020, 1.5032, 2.1986, 1.5954, 1.9509, 2.0012, 1.7136], + device='cuda:0'), covar=tensor([0.0950, 0.1133, 0.1070, 0.0810, 0.0990, 0.0822, 0.1019, 0.0823], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0222, 0.0225, 0.0243, 0.0227, 0.0211, 0.0186, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 17:38:48,671 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9008, 3.3409, 1.9059, 1.6405, 3.0349, 1.5078, 1.2835, 2.3057], + device='cuda:0'), covar=tensor([0.1336, 0.0509, 0.0952, 0.1046, 0.0480, 0.1294, 0.1052, 0.0627], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0311, 0.0331, 0.0257, 0.0244, 0.0333, 0.0289, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 17:39:14,106 INFO [train.py:903] (0/4) Epoch 20, batch 5750, loss[loss=0.2576, simple_loss=0.3347, pruned_loss=0.09023, over 19665.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2912, pruned_loss=0.06661, over 3825072.67 frames. ], batch size: 60, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:39:17,235 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 17:39:25,436 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 17:39:31,255 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 17:40:10,278 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 5.062e+02 5.931e+02 7.729e+02 1.708e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-02 17:40:18,214 INFO [train.py:903] (0/4) Epoch 20, batch 5800, loss[loss=0.2135, simple_loss=0.2975, pruned_loss=0.06473, over 19342.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.0662, over 3819455.61 frames. ], batch size: 66, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:40:19,513 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:40:21,363 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 17:40:56,842 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 17:41:20,579 INFO [train.py:903] (0/4) Epoch 20, batch 5850, loss[loss=0.2435, simple_loss=0.318, pruned_loss=0.08447, over 18139.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2922, pruned_loss=0.06654, over 3825731.26 frames. ], batch size: 83, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:41:35,346 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:03,224 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 17:42:08,892 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:15,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.765e+02 5.164e+02 6.346e+02 8.557e+02 2.066e+03, threshold=1.269e+03, percent-clipped=9.0 +2023-04-02 17:42:23,021 INFO [train.py:903] (0/4) Epoch 20, batch 5900, loss[loss=0.2004, simple_loss=0.2843, pruned_loss=0.05826, over 19539.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2911, pruned_loss=0.06605, over 3832748.97 frames. ], batch size: 56, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:42:25,417 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 17:42:42,980 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:43,044 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:46,308 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 17:43:19,396 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6827, 1.4924, 1.5626, 1.9160, 1.5015, 1.8019, 1.7866, 1.6529], + device='cuda:0'), covar=tensor([0.0750, 0.0907, 0.0931, 0.0594, 0.0752, 0.0738, 0.0801, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0244, 0.0227, 0.0212, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 17:43:24,699 INFO [train.py:903] (0/4) Epoch 20, batch 5950, loss[loss=0.2079, simple_loss=0.2898, pruned_loss=0.06294, over 19776.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2908, pruned_loss=0.06653, over 3824438.88 frames. ], batch size: 56, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:00,309 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2622, 2.2888, 2.5359, 2.9954, 2.2199, 2.8613, 2.6379, 2.2934], + device='cuda:0'), covar=tensor([0.4109, 0.3893, 0.1719, 0.2544, 0.4340, 0.2063, 0.4370, 0.3217], + device='cuda:0'), in_proj_covar=tensor([0.0881, 0.0945, 0.0703, 0.0925, 0.0863, 0.0794, 0.0831, 0.0770], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 17:44:19,051 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.126e+02 6.708e+02 1.004e+03 2.382e+03, threshold=1.342e+03, percent-clipped=11.0 +2023-04-02 17:44:19,670 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 17:44:27,256 INFO [train.py:903] (0/4) Epoch 20, batch 6000, loss[loss=0.1948, simple_loss=0.2789, pruned_loss=0.05533, over 19526.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2898, pruned_loss=0.06577, over 3830224.13 frames. ], batch size: 54, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:27,257 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 17:44:39,928 INFO [train.py:937] (0/4) Epoch 20, validation: loss=0.1697, simple_loss=0.2697, pruned_loss=0.0349, over 944034.00 frames. +2023-04-02 17:44:39,929 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 17:45:41,594 INFO [train.py:903] (0/4) Epoch 20, batch 6050, loss[loss=0.2146, simple_loss=0.3084, pruned_loss=0.0604, over 19368.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2908, pruned_loss=0.06633, over 3824394.83 frames. ], batch size: 70, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:46:36,913 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 4.873e+02 5.806e+02 7.519e+02 1.887e+03, threshold=1.161e+03, percent-clipped=2.0 +2023-04-02 17:46:43,929 INFO [train.py:903] (0/4) Epoch 20, batch 6100, loss[loss=0.2071, simple_loss=0.2922, pruned_loss=0.06101, over 19559.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2912, pruned_loss=0.06684, over 3817792.94 frames. ], batch size: 56, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:47:46,920 INFO [train.py:903] (0/4) Epoch 20, batch 6150, loss[loss=0.2234, simple_loss=0.2838, pruned_loss=0.08155, over 19717.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2917, pruned_loss=0.06752, over 3806650.31 frames. ], batch size: 51, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:48:15,302 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135904.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:16,099 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 17:48:41,891 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.701e+02 6.211e+02 7.118e+02 1.417e+03, threshold=1.242e+03, percent-clipped=3.0 +2023-04-02 17:48:45,706 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:49,668 INFO [train.py:903] (0/4) Epoch 20, batch 6200, loss[loss=0.2057, simple_loss=0.2905, pruned_loss=0.06041, over 19524.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06701, over 3819308.64 frames. ], batch size: 54, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:49:18,685 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4107, 1.2787, 1.2926, 1.8304, 1.4164, 1.5608, 1.6957, 1.4000], + device='cuda:0'), covar=tensor([0.0942, 0.1032, 0.1139, 0.0658, 0.0872, 0.0841, 0.0883, 0.0791], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0243, 0.0226, 0.0211, 0.0186, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 17:49:52,112 INFO [train.py:903] (0/4) Epoch 20, batch 6250, loss[loss=0.2499, simple_loss=0.3224, pruned_loss=0.08871, over 18072.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.292, pruned_loss=0.06691, over 3822297.24 frames. ], batch size: 83, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:50:04,667 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:14,613 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-136000.pt +2023-04-02 17:50:15,875 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:24,777 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 17:50:30,680 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136012.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:48,348 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 5.185e+02 6.758e+02 8.208e+02 2.074e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-02 17:50:55,342 INFO [train.py:903] (0/4) Epoch 20, batch 6300, loss[loss=0.1772, simple_loss=0.2592, pruned_loss=0.04758, over 19765.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2923, pruned_loss=0.06701, over 3820046.98 frames. ], batch size: 48, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:51:26,707 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7157, 4.2442, 4.4478, 4.4428, 1.7623, 4.1568, 3.6347, 4.1412], + device='cuda:0'), covar=tensor([0.1714, 0.0856, 0.0614, 0.0724, 0.5857, 0.0915, 0.0760, 0.1164], + device='cuda:0'), in_proj_covar=tensor([0.0770, 0.0724, 0.0927, 0.0810, 0.0820, 0.0685, 0.0557, 0.0856], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 17:51:58,448 INFO [train.py:903] (0/4) Epoch 20, batch 6350, loss[loss=0.1773, simple_loss=0.255, pruned_loss=0.04982, over 19619.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2914, pruned_loss=0.066, over 3833267.40 frames. ], batch size: 50, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:52:29,665 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:52:53,613 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.045e+02 5.932e+02 7.156e+02 1.987e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 17:53:01,256 INFO [train.py:903] (0/4) Epoch 20, batch 6400, loss[loss=0.2805, simple_loss=0.3447, pruned_loss=0.1082, over 13443.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2925, pruned_loss=0.06687, over 3829167.77 frames. ], batch size: 136, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:53:36,330 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:54:02,560 INFO [train.py:903] (0/4) Epoch 20, batch 6450, loss[loss=0.2559, simple_loss=0.32, pruned_loss=0.09588, over 19087.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2927, pruned_loss=0.06673, over 3836768.68 frames. ], batch size: 69, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:54:51,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 17:54:59,003 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.012e+02 6.099e+02 8.089e+02 3.011e+03, threshold=1.220e+03, percent-clipped=7.0 +2023-04-02 17:55:07,145 INFO [train.py:903] (0/4) Epoch 20, batch 6500, loss[loss=0.1922, simple_loss=0.2805, pruned_loss=0.05192, over 19672.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2904, pruned_loss=0.06526, over 3847421.03 frames. ], batch size: 53, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:55:12,966 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 17:55:44,587 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 17:56:09,970 INFO [train.py:903] (0/4) Epoch 20, batch 6550, loss[loss=0.2222, simple_loss=0.3052, pruned_loss=0.06964, over 19689.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2903, pruned_loss=0.06525, over 3832348.91 frames. ], batch size: 59, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:57:06,361 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.319e+02 6.594e+02 8.030e+02 1.579e+03, threshold=1.319e+03, percent-clipped=2.0 +2023-04-02 17:57:14,431 INFO [train.py:903] (0/4) Epoch 20, batch 6600, loss[loss=0.245, simple_loss=0.33, pruned_loss=0.07997, over 19304.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2899, pruned_loss=0.06516, over 3829235.55 frames. ], batch size: 66, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:57:28,427 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:41,115 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:45,647 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136356.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:53,909 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:16,828 INFO [train.py:903] (0/4) Epoch 20, batch 6650, loss[loss=0.2549, simple_loss=0.3272, pruned_loss=0.09134, over 19648.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2904, pruned_loss=0.06558, over 3824799.76 frames. ], batch size: 58, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:58:26,135 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:56,601 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 17:59:14,408 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.763e+02 5.879e+02 7.861e+02 2.647e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 17:59:22,069 INFO [train.py:903] (0/4) Epoch 20, batch 6700, loss[loss=0.1821, simple_loss=0.2625, pruned_loss=0.05088, over 19361.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2901, pruned_loss=0.06573, over 3825143.77 frames. ], batch size: 47, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:59:55,242 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:08,586 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136471.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:20,886 INFO [train.py:903] (0/4) Epoch 20, batch 6750, loss[loss=0.2156, simple_loss=0.2999, pruned_loss=0.06562, over 19597.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2893, pruned_loss=0.06582, over 3826942.09 frames. ], batch size: 61, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:00:45,714 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:01:11,793 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 5.547e+02 6.932e+02 9.039e+02 1.788e+03, threshold=1.386e+03, percent-clipped=9.0 +2023-04-02 18:01:19,069 INFO [train.py:903] (0/4) Epoch 20, batch 6800, loss[loss=0.2557, simple_loss=0.3332, pruned_loss=0.08908, over 19317.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2897, pruned_loss=0.06629, over 3808722.95 frames. ], batch size: 66, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:01:48,959 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-20.pt +2023-04-02 18:02:04,538 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 18:02:04,990 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 18:02:07,961 INFO [train.py:903] (0/4) Epoch 21, batch 0, loss[loss=0.2156, simple_loss=0.2909, pruned_loss=0.07016, over 19734.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2909, pruned_loss=0.07016, over 19734.00 frames. ], batch size: 45, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:02:07,961 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 18:02:18,721 INFO [train.py:937] (0/4) Epoch 21, validation: loss=0.1691, simple_loss=0.2696, pruned_loss=0.03427, over 944034.00 frames. +2023-04-02 18:02:18,722 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18677MB +2023-04-02 18:02:30,980 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 18:03:20,706 INFO [train.py:903] (0/4) Epoch 21, batch 50, loss[loss=0.2389, simple_loss=0.3158, pruned_loss=0.08103, over 19655.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2883, pruned_loss=0.06347, over 865964.31 frames. ], batch size: 55, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:03:33,306 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136619.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:03:43,288 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.947e+02 6.173e+02 6.953e+02 1.295e+03, threshold=1.235e+03, percent-clipped=0.0 +2023-04-02 18:03:54,604 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 18:04:02,463 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.00 vs. limit=5.0 +2023-04-02 18:04:22,856 INFO [train.py:903] (0/4) Epoch 21, batch 100, loss[loss=0.1831, simple_loss=0.2682, pruned_loss=0.04901, over 19662.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.06449, over 1522623.57 frames. ], batch size: 53, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:04:25,205 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:04:34,151 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 18:05:09,044 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136697.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:25,009 INFO [train.py:903] (0/4) Epoch 21, batch 150, loss[loss=0.2158, simple_loss=0.2882, pruned_loss=0.07173, over 19698.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2879, pruned_loss=0.06499, over 2037311.72 frames. ], batch size: 53, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:05:31,955 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:45,589 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 5.031e+02 5.954e+02 7.665e+02 1.668e+03, threshold=1.191e+03, percent-clipped=3.0 +2023-04-02 18:05:45,976 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136727.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:02,666 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136740.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:03,843 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7791, 1.5757, 1.5306, 2.3208, 1.8909, 2.1263, 2.1197, 1.7693], + device='cuda:0'), covar=tensor([0.0826, 0.0933, 0.1003, 0.0694, 0.0764, 0.0720, 0.0881, 0.0703], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0222, 0.0225, 0.0242, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 18:06:18,602 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:25,311 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 18:06:27,565 INFO [train.py:903] (0/4) Epoch 21, batch 200, loss[loss=0.243, simple_loss=0.3207, pruned_loss=0.08264, over 18342.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2885, pruned_loss=0.0649, over 2441580.84 frames. ], batch size: 84, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:06:29,103 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6286, 1.5078, 1.5296, 2.0085, 1.5587, 1.8702, 1.7926, 1.6069], + device='cuda:0'), covar=tensor([0.0832, 0.0997, 0.0981, 0.0702, 0.0822, 0.0750, 0.0877, 0.0722], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0243, 0.0226, 0.0213, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 18:06:35,855 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7806, 4.3339, 2.7929, 3.8436, 0.8658, 4.2324, 4.1301, 4.3453], + device='cuda:0'), covar=tensor([0.0579, 0.1055, 0.1954, 0.0839, 0.4313, 0.0730, 0.0947, 0.1026], + device='cuda:0'), in_proj_covar=tensor([0.0496, 0.0403, 0.0491, 0.0343, 0.0403, 0.0426, 0.0422, 0.0458], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:06:43,862 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5790, 1.3689, 1.3951, 2.1731, 1.7679, 1.9076, 1.8285, 1.5326], + device='cuda:0'), covar=tensor([0.0897, 0.1074, 0.1077, 0.0736, 0.0797, 0.0759, 0.0927, 0.0789], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0243, 0.0226, 0.0213, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 18:07:12,680 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 18:07:29,898 INFO [train.py:903] (0/4) Epoch 21, batch 250, loss[loss=0.2208, simple_loss=0.3035, pruned_loss=0.06904, over 18138.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.06555, over 2752423.09 frames. ], batch size: 84, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:07:32,575 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:47,462 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:52,059 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.815e+02 6.209e+02 8.068e+02 1.278e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-04-02 18:08:31,137 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4961, 2.3067, 1.8240, 1.4585, 2.1633, 1.4263, 1.3862, 2.0568], + device='cuda:0'), covar=tensor([0.0997, 0.0804, 0.0955, 0.0903, 0.0481, 0.1217, 0.0773, 0.0471], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0314, 0.0338, 0.0260, 0.0247, 0.0336, 0.0292, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:08:33,037 INFO [train.py:903] (0/4) Epoch 21, batch 300, loss[loss=0.218, simple_loss=0.3065, pruned_loss=0.06472, over 19660.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.29, pruned_loss=0.06554, over 2986279.27 frames. ], batch size: 55, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:08:53,704 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:23,331 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136900.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:36,406 INFO [train.py:903] (0/4) Epoch 21, batch 350, loss[loss=0.2335, simple_loss=0.3151, pruned_loss=0.07601, over 19618.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.06571, over 3175133.29 frames. ], batch size: 61, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:09:38,661 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 18:09:56,978 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.835e+02 5.943e+02 7.281e+02 1.741e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-02 18:10:39,349 INFO [train.py:903] (0/4) Epoch 21, batch 400, loss[loss=0.1924, simple_loss=0.2647, pruned_loss=0.06003, over 19737.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.291, pruned_loss=0.06633, over 3318465.11 frames. ], batch size: 46, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:11:02,227 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-02 18:11:35,373 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:11:41,217 INFO [train.py:903] (0/4) Epoch 21, batch 450, loss[loss=0.189, simple_loss=0.2666, pruned_loss=0.05572, over 19408.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2898, pruned_loss=0.06573, over 3429743.78 frames. ], batch size: 48, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:11:59,603 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6680, 3.0215, 3.1849, 3.2612, 1.3506, 2.9841, 2.6526, 2.7546], + device='cuda:0'), covar=tensor([0.2800, 0.1751, 0.1432, 0.1719, 0.7304, 0.2351, 0.1550, 0.2572], + device='cuda:0'), in_proj_covar=tensor([0.0767, 0.0719, 0.0928, 0.0817, 0.0819, 0.0687, 0.0560, 0.0860], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 18:12:00,846 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4391, 1.5014, 1.7831, 1.6970, 2.6732, 2.3084, 2.8299, 1.3115], + device='cuda:0'), covar=tensor([0.2430, 0.4142, 0.2657, 0.1903, 0.1606, 0.2045, 0.1526, 0.4132], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0634, 0.0700, 0.0479, 0.0618, 0.0525, 0.0657, 0.0540], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 18:12:03,759 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.613e+02 5.973e+02 7.527e+02 1.521e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-02 18:12:12,950 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 18:12:14,061 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 18:12:43,473 INFO [train.py:903] (0/4) Epoch 21, batch 500, loss[loss=0.2254, simple_loss=0.2865, pruned_loss=0.08219, over 19765.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2884, pruned_loss=0.06499, over 3528121.66 frames. ], batch size: 45, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:12:53,144 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:25,140 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:43,701 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.2386, 5.2708, 6.0985, 6.1214, 2.1717, 5.7422, 4.9297, 5.7548], + device='cuda:0'), covar=tensor([0.1569, 0.0694, 0.0456, 0.0502, 0.5510, 0.0686, 0.0553, 0.0968], + device='cuda:0'), in_proj_covar=tensor([0.0769, 0.0720, 0.0929, 0.0817, 0.0820, 0.0688, 0.0560, 0.0860], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 18:13:45,883 INFO [train.py:903] (0/4) Epoch 21, batch 550, loss[loss=0.2026, simple_loss=0.2821, pruned_loss=0.06154, over 19581.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2872, pruned_loss=0.06432, over 3609433.90 frames. ], batch size: 52, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:13:59,942 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:14:09,847 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.365e+02 6.823e+02 8.347e+02 2.113e+03, threshold=1.365e+03, percent-clipped=7.0 +2023-04-02 18:14:33,015 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5795, 1.1317, 1.4147, 1.1772, 2.2069, 0.9790, 2.1031, 2.4464], + device='cuda:0'), covar=tensor([0.0704, 0.2856, 0.2765, 0.1837, 0.0889, 0.2204, 0.1058, 0.0501], + device='cuda:0'), in_proj_covar=tensor([0.0404, 0.0361, 0.0382, 0.0344, 0.0372, 0.0347, 0.0373, 0.0398], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:14:48,996 INFO [train.py:903] (0/4) Epoch 21, batch 600, loss[loss=0.2232, simple_loss=0.2968, pruned_loss=0.07483, over 19743.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2882, pruned_loss=0.0644, over 3666779.36 frames. ], batch size: 51, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:15:00,534 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:15:29,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 18:15:47,666 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0854, 1.7699, 1.8257, 2.8272, 1.9893, 2.2680, 2.2824, 2.0699], + device='cuda:0'), covar=tensor([0.0890, 0.1116, 0.1107, 0.0856, 0.0996, 0.0916, 0.1019, 0.0808], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0241, 0.0224, 0.0210, 0.0186, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 18:15:53,982 INFO [train.py:903] (0/4) Epoch 21, batch 650, loss[loss=0.2057, simple_loss=0.2764, pruned_loss=0.06749, over 19365.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2892, pruned_loss=0.06511, over 3697931.10 frames. ], batch size: 47, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:16:16,601 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 4.775e+02 5.968e+02 8.002e+02 1.696e+03, threshold=1.194e+03, percent-clipped=7.0 +2023-04-02 18:16:56,145 INFO [train.py:903] (0/4) Epoch 21, batch 700, loss[loss=0.201, simple_loss=0.2877, pruned_loss=0.0571, over 19528.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2908, pruned_loss=0.06588, over 3726388.75 frames. ], batch size: 56, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:17:03,358 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2667, 1.8692, 1.8444, 2.1993, 1.9131, 1.8869, 1.7971, 2.2170], + device='cuda:0'), covar=tensor([0.0904, 0.1472, 0.1333, 0.0926, 0.1213, 0.0519, 0.1306, 0.0606], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0354, 0.0309, 0.0249, 0.0298, 0.0251, 0.0307, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:17:26,116 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:18:00,857 INFO [train.py:903] (0/4) Epoch 21, batch 750, loss[loss=0.196, simple_loss=0.2822, pruned_loss=0.05491, over 19670.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.289, pruned_loss=0.06465, over 3764586.02 frames. ], batch size: 53, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:18:18,164 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7502, 1.8457, 2.0798, 2.3339, 1.7408, 2.2814, 2.1373, 1.9571], + device='cuda:0'), covar=tensor([0.4024, 0.3541, 0.1796, 0.2153, 0.3672, 0.1871, 0.4633, 0.3149], + device='cuda:0'), in_proj_covar=tensor([0.0888, 0.0950, 0.0709, 0.0927, 0.0870, 0.0800, 0.0835, 0.0775], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 18:18:22,899 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 4.851e+02 6.236e+02 7.648e+02 2.101e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-04-02 18:18:42,017 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 18:19:01,638 INFO [train.py:903] (0/4) Epoch 21, batch 800, loss[loss=0.2279, simple_loss=0.3103, pruned_loss=0.07277, over 19613.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.0655, over 3741175.51 frames. ], batch size: 57, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:19:07,857 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 18:19:22,466 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:19:53,492 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:20:05,538 INFO [train.py:903] (0/4) Epoch 21, batch 850, loss[loss=0.2083, simple_loss=0.2904, pruned_loss=0.06312, over 19608.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.0654, over 3767709.99 frames. ], batch size: 61, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:20:27,118 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.583e+02 6.647e+02 8.818e+02 2.027e+03, threshold=1.329e+03, percent-clipped=5.0 +2023-04-02 18:20:48,462 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 18:21:06,665 INFO [train.py:903] (0/4) Epoch 21, batch 900, loss[loss=0.2304, simple_loss=0.3077, pruned_loss=0.07661, over 19537.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2908, pruned_loss=0.06614, over 3777557.41 frames. ], batch size: 56, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:21:59,587 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 18:22:08,625 INFO [train.py:903] (0/4) Epoch 21, batch 950, loss[loss=0.2276, simple_loss=0.3032, pruned_loss=0.07601, over 19595.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2914, pruned_loss=0.06636, over 3788044.97 frames. ], batch size: 52, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:22:31,546 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 5.256e+02 6.264e+02 7.895e+02 1.664e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 18:22:45,756 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:10,897 INFO [train.py:903] (0/4) Epoch 21, batch 1000, loss[loss=0.1879, simple_loss=0.2628, pruned_loss=0.05648, over 19740.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06627, over 3794194.01 frames. ], batch size: 45, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:23:15,602 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:55,417 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 18:23:55,871 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 18:24:14,004 INFO [train.py:903] (0/4) Epoch 21, batch 1050, loss[loss=0.2524, simple_loss=0.3185, pruned_loss=0.0931, over 13052.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06642, over 3804807.02 frames. ], batch size: 136, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:24:35,334 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.318e+02 6.486e+02 8.521e+02 3.216e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 18:24:36,546 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 18:25:17,802 INFO [train.py:903] (0/4) Epoch 21, batch 1100, loss[loss=0.2275, simple_loss=0.307, pruned_loss=0.074, over 19369.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2912, pruned_loss=0.0666, over 3805246.84 frames. ], batch size: 66, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:26:19,703 INFO [train.py:903] (0/4) Epoch 21, batch 1150, loss[loss=0.1859, simple_loss=0.2785, pruned_loss=0.04667, over 19781.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2909, pruned_loss=0.06595, over 3818994.74 frames. ], batch size: 56, lr: 3.95e-03, grad_scale: 4.0 +2023-04-02 18:26:43,796 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.127e+02 6.263e+02 7.580e+02 1.245e+03, threshold=1.253e+03, percent-clipped=0.0 +2023-04-02 18:27:22,172 INFO [train.py:903] (0/4) Epoch 21, batch 1200, loss[loss=0.2056, simple_loss=0.2799, pruned_loss=0.06562, over 19754.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.29, pruned_loss=0.06544, over 3828252.19 frames. ], batch size: 51, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:27:46,791 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 18:28:10,900 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 18:28:25,533 INFO [train.py:903] (0/4) Epoch 21, batch 1250, loss[loss=0.215, simple_loss=0.2997, pruned_loss=0.06512, over 17561.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2892, pruned_loss=0.06499, over 3822645.35 frames. ], batch size: 101, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:28:48,825 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 5.084e+02 5.991e+02 7.123e+02 1.423e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-02 18:29:27,971 INFO [train.py:903] (0/4) Epoch 21, batch 1300, loss[loss=0.2019, simple_loss=0.2706, pruned_loss=0.06655, over 19771.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2895, pruned_loss=0.06556, over 3819913.24 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:29:41,249 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7787, 1.6427, 1.6904, 2.2865, 1.7457, 2.1008, 1.9998, 1.7972], + device='cuda:0'), covar=tensor([0.0838, 0.0945, 0.0984, 0.0751, 0.0869, 0.0743, 0.0919, 0.0736], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0225, 0.0227, 0.0245, 0.0227, 0.0214, 0.0190, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 18:30:19,739 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=5.28 vs. limit=5.0 +2023-04-02 18:30:30,622 INFO [train.py:903] (0/4) Epoch 21, batch 1350, loss[loss=0.1999, simple_loss=0.2785, pruned_loss=0.06071, over 19677.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2892, pruned_loss=0.06544, over 3821554.31 frames. ], batch size: 53, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:30:30,911 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:34,446 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:54,796 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.098e+02 6.340e+02 8.452e+02 2.491e+03, threshold=1.268e+03, percent-clipped=6.0 +2023-04-02 18:31:33,355 INFO [train.py:903] (0/4) Epoch 21, batch 1400, loss[loss=0.1986, simple_loss=0.2827, pruned_loss=0.05729, over 19763.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06546, over 3827032.08 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:08,989 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6082, 1.5275, 1.5650, 1.9959, 1.5760, 1.8754, 1.9024, 1.6645], + device='cuda:0'), covar=tensor([0.0858, 0.0939, 0.0990, 0.0749, 0.0823, 0.0749, 0.0824, 0.0729], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0224, 0.0227, 0.0244, 0.0227, 0.0213, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 18:32:23,939 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-138000.pt +2023-04-02 18:32:28,139 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 18:32:37,232 INFO [train.py:903] (0/4) Epoch 21, batch 1450, loss[loss=0.2049, simple_loss=0.2896, pruned_loss=0.06008, over 19511.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2901, pruned_loss=0.06557, over 3821802.96 frames. ], batch size: 56, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:41,596 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 18:32:45,903 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5089, 1.4797, 1.4884, 1.8479, 1.4600, 1.7074, 1.7095, 1.5479], + device='cuda:0'), covar=tensor([0.0883, 0.0958, 0.1027, 0.0719, 0.0848, 0.0819, 0.0868, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0224, 0.0228, 0.0244, 0.0227, 0.0213, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 18:33:01,289 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.676e+02 5.543e+02 6.978e+02 2.034e+03, threshold=1.109e+03, percent-clipped=2.0 +2023-04-02 18:33:39,045 INFO [train.py:903] (0/4) Epoch 21, batch 1500, loss[loss=0.2094, simple_loss=0.2918, pruned_loss=0.06346, over 19485.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.06509, over 3828170.08 frames. ], batch size: 64, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:34:17,988 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8919, 1.2004, 1.6466, 0.5842, 1.9799, 2.4074, 2.1042, 2.5570], + device='cuda:0'), covar=tensor([0.1601, 0.3737, 0.3046, 0.2638, 0.0614, 0.0298, 0.0347, 0.0365], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0320, 0.0349, 0.0264, 0.0241, 0.0185, 0.0215, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 18:34:42,049 INFO [train.py:903] (0/4) Epoch 21, batch 1550, loss[loss=0.2213, simple_loss=0.293, pruned_loss=0.07476, over 19687.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2891, pruned_loss=0.06534, over 3833907.02 frames. ], batch size: 53, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:34:43,703 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9936, 2.1125, 2.3242, 2.7011, 2.0559, 2.6711, 2.3665, 2.1090], + device='cuda:0'), covar=tensor([0.4321, 0.3881, 0.1865, 0.2388, 0.4170, 0.2027, 0.4717, 0.3363], + device='cuda:0'), in_proj_covar=tensor([0.0888, 0.0952, 0.0711, 0.0927, 0.0869, 0.0800, 0.0833, 0.0777], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 18:34:56,473 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9120, 1.6631, 1.8855, 1.4227, 4.4274, 1.0874, 2.5032, 4.8716], + device='cuda:0'), covar=tensor([0.0438, 0.2680, 0.2702, 0.2185, 0.0733, 0.2752, 0.1498, 0.0166], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0365, 0.0385, 0.0346, 0.0374, 0.0349, 0.0377, 0.0400], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:35:05,478 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 5.280e+02 6.228e+02 7.726e+02 2.313e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 18:35:38,370 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9977, 5.0587, 5.8099, 5.8207, 2.1671, 5.5359, 4.6627, 5.4479], + device='cuda:0'), covar=tensor([0.1625, 0.0845, 0.0535, 0.0612, 0.5762, 0.0719, 0.0578, 0.1226], + device='cuda:0'), in_proj_covar=tensor([0.0770, 0.0721, 0.0929, 0.0816, 0.0817, 0.0687, 0.0561, 0.0862], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 18:35:44,862 INFO [train.py:903] (0/4) Epoch 21, batch 1600, loss[loss=0.2063, simple_loss=0.2719, pruned_loss=0.07029, over 19719.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2889, pruned_loss=0.06495, over 3823579.14 frames. ], batch size: 46, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:07,164 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 18:36:48,268 INFO [train.py:903] (0/4) Epoch 21, batch 1650, loss[loss=0.2399, simple_loss=0.3125, pruned_loss=0.0837, over 13180.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2883, pruned_loss=0.06485, over 3795242.71 frames. ], batch size: 136, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:50,165 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 18:37:12,972 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.591e+02 5.832e+02 7.172e+02 1.632e+03, threshold=1.166e+03, percent-clipped=1.0 +2023-04-02 18:37:43,137 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:46,572 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:49,815 INFO [train.py:903] (0/4) Epoch 21, batch 1700, loss[loss=0.2173, simple_loss=0.2937, pruned_loss=0.0705, over 19655.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2897, pruned_loss=0.06558, over 3800920.01 frames. ], batch size: 60, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:38:24,230 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:38:28,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 18:38:53,713 INFO [train.py:903] (0/4) Epoch 21, batch 1750, loss[loss=0.209, simple_loss=0.2909, pruned_loss=0.06351, over 19661.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2893, pruned_loss=0.0651, over 3822386.09 frames. ], batch size: 58, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:39:16,057 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 4.819e+02 5.989e+02 8.047e+02 2.111e+03, threshold=1.198e+03, percent-clipped=8.0 +2023-04-02 18:39:30,928 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:39:55,290 INFO [train.py:903] (0/4) Epoch 21, batch 1800, loss[loss=0.2176, simple_loss=0.3036, pruned_loss=0.06579, over 19697.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.0645, over 3824165.00 frames. ], batch size: 59, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:40:05,943 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:10,874 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:54,328 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 18:40:57,827 INFO [train.py:903] (0/4) Epoch 21, batch 1850, loss[loss=0.1962, simple_loss=0.2706, pruned_loss=0.06096, over 19399.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2891, pruned_loss=0.06461, over 3821179.96 frames. ], batch size: 48, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:41:22,804 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 4.784e+02 5.677e+02 7.800e+02 1.333e+03, threshold=1.135e+03, percent-clipped=2.0 +2023-04-02 18:41:34,213 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 18:42:01,833 INFO [train.py:903] (0/4) Epoch 21, batch 1900, loss[loss=0.232, simple_loss=0.3126, pruned_loss=0.07568, over 17468.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2912, pruned_loss=0.06613, over 3792434.69 frames. ], batch size: 101, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:42:18,834 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 18:42:19,482 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 18:42:23,577 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 18:42:48,358 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 18:43:04,463 INFO [train.py:903] (0/4) Epoch 21, batch 1950, loss[loss=0.2119, simple_loss=0.3064, pruned_loss=0.0587, over 19532.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2907, pruned_loss=0.06585, over 3806004.90 frames. ], batch size: 54, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:43:27,758 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 4.783e+02 6.007e+02 7.405e+02 3.008e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 18:44:06,915 INFO [train.py:903] (0/4) Epoch 21, batch 2000, loss[loss=0.2116, simple_loss=0.303, pruned_loss=0.06013, over 18110.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2906, pruned_loss=0.06567, over 3801331.24 frames. ], batch size: 83, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:44:14,587 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 18:45:03,885 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 18:45:08,463 INFO [train.py:903] (0/4) Epoch 21, batch 2050, loss[loss=0.1665, simple_loss=0.2469, pruned_loss=0.04304, over 19625.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2897, pruned_loss=0.0648, over 3808021.79 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:45:22,125 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 18:45:23,301 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 18:45:28,161 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138625.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:32,734 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:33,541 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.887e+02 4.818e+02 6.151e+02 8.119e+02 1.355e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 18:45:38,227 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:45,212 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 18:46:00,056 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:03,534 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:12,220 INFO [train.py:903] (0/4) Epoch 21, batch 2100, loss[loss=0.2052, simple_loss=0.2833, pruned_loss=0.06353, over 19610.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2894, pruned_loss=0.06487, over 3799805.07 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:46:13,822 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9765, 1.3421, 1.0578, 0.9659, 1.1474, 0.9549, 1.0087, 1.2632], + device='cuda:0'), covar=tensor([0.0561, 0.0845, 0.1138, 0.0762, 0.0568, 0.1327, 0.0544, 0.0476], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0313, 0.0334, 0.0260, 0.0245, 0.0336, 0.0290, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:46:39,348 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 18:46:41,754 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:00,945 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138699.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:02,059 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 18:47:06,941 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:14,541 INFO [train.py:903] (0/4) Epoch 21, batch 2150, loss[loss=0.2234, simple_loss=0.302, pruned_loss=0.0724, over 19614.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.29, pruned_loss=0.06548, over 3806054.42 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:47:21,563 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:37,629 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.033e+02 5.895e+02 7.227e+02 1.459e+03, threshold=1.179e+03, percent-clipped=3.0 +2023-04-02 18:48:00,969 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:48:03,228 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7708, 3.2387, 3.2949, 3.3215, 1.3675, 3.1626, 2.7529, 3.0407], + device='cuda:0'), covar=tensor([0.1859, 0.1272, 0.0875, 0.0960, 0.5439, 0.1207, 0.0921, 0.1433], + device='cuda:0'), in_proj_covar=tensor([0.0781, 0.0735, 0.0941, 0.0824, 0.0829, 0.0696, 0.0570, 0.0870], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 18:48:18,134 INFO [train.py:903] (0/4) Epoch 21, batch 2200, loss[loss=0.2457, simple_loss=0.3251, pruned_loss=0.08321, over 19479.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2904, pruned_loss=0.06576, over 3804389.64 frames. ], batch size: 64, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:48:39,537 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4321, 2.1901, 2.2115, 2.4852, 2.2435, 2.0284, 2.1433, 2.4392], + device='cuda:0'), covar=tensor([0.0769, 0.1335, 0.1077, 0.0826, 0.1118, 0.0473, 0.1124, 0.0526], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0358, 0.0311, 0.0250, 0.0301, 0.0251, 0.0309, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 18:49:08,075 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:49:20,496 INFO [train.py:903] (0/4) Epoch 21, batch 2250, loss[loss=0.2205, simple_loss=0.3184, pruned_loss=0.06136, over 19676.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2914, pruned_loss=0.06567, over 3808698.18 frames. ], batch size: 60, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:49:41,356 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0522, 1.3230, 1.7864, 1.2627, 2.7397, 3.7452, 3.4925, 3.9204], + device='cuda:0'), covar=tensor([0.1673, 0.3676, 0.3144, 0.2374, 0.0614, 0.0179, 0.0192, 0.0247], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0321, 0.0350, 0.0264, 0.0241, 0.0185, 0.0216, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 18:49:44,480 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.352e+02 5.230e+02 6.732e+02 8.271e+02 2.316e+03, threshold=1.346e+03, percent-clipped=7.0 +2023-04-02 18:50:12,140 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6450, 4.0134, 4.4943, 4.5175, 1.8491, 4.2096, 3.5366, 3.8726], + device='cuda:0'), covar=tensor([0.2287, 0.1506, 0.0935, 0.1152, 0.7143, 0.1793, 0.1201, 0.1940], + device='cuda:0'), in_proj_covar=tensor([0.0780, 0.0733, 0.0942, 0.0823, 0.0826, 0.0694, 0.0570, 0.0870], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 18:50:23,832 INFO [train.py:903] (0/4) Epoch 21, batch 2300, loss[loss=0.1971, simple_loss=0.2747, pruned_loss=0.05976, over 19627.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2905, pruned_loss=0.06597, over 3809612.48 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:50:38,385 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 18:51:27,178 INFO [train.py:903] (0/4) Epoch 21, batch 2350, loss[loss=0.2061, simple_loss=0.2964, pruned_loss=0.05793, over 19684.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2906, pruned_loss=0.06595, over 3807153.33 frames. ], batch size: 59, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:51:48,927 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:51:50,723 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.807e+02 6.646e+02 8.268e+02 1.737e+03, threshold=1.329e+03, percent-clipped=3.0 +2023-04-02 18:52:08,314 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 18:52:25,718 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 18:52:31,431 INFO [train.py:903] (0/4) Epoch 21, batch 2400, loss[loss=0.2433, simple_loss=0.3213, pruned_loss=0.08261, over 19621.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2912, pruned_loss=0.06628, over 3804246.41 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:26,790 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139003.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:34,426 INFO [train.py:903] (0/4) Epoch 21, batch 2450, loss[loss=0.2383, simple_loss=0.3193, pruned_loss=0.07859, over 19097.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2905, pruned_loss=0.06594, over 3799628.11 frames. ], batch size: 69, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:58,371 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:59,945 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.027e+02 6.383e+02 8.090e+02 1.476e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 18:54:17,224 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:22,823 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:31,523 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139055.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:37,040 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:39,177 INFO [train.py:903] (0/4) Epoch 21, batch 2500, loss[loss=0.2437, simple_loss=0.3072, pruned_loss=0.09006, over 19724.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2901, pruned_loss=0.06619, over 3805577.38 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:55:05,336 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:55:20,465 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-02 18:55:34,451 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.71 vs. limit=5.0 +2023-04-02 18:55:42,984 INFO [train.py:903] (0/4) Epoch 21, batch 2550, loss[loss=0.2153, simple_loss=0.2963, pruned_loss=0.06708, over 19320.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.289, pruned_loss=0.06535, over 3801524.69 frames. ], batch size: 66, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:55:53,912 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7157, 1.7120, 1.5935, 1.3432, 1.2775, 1.3837, 0.3418, 0.6842], + device='cuda:0'), covar=tensor([0.0647, 0.0659, 0.0417, 0.0645, 0.1324, 0.0758, 0.1263, 0.1096], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0352, 0.0357, 0.0381, 0.0458, 0.0386, 0.0333, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 18:56:06,421 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 4.926e+02 6.170e+02 7.459e+02 2.294e+03, threshold=1.234e+03, percent-clipped=3.0 +2023-04-02 18:56:14,904 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2457, 1.3202, 1.7101, 1.2973, 2.6908, 3.7057, 3.4167, 3.9262], + device='cuda:0'), covar=tensor([0.1667, 0.3831, 0.3482, 0.2539, 0.0617, 0.0220, 0.0212, 0.0259], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0322, 0.0351, 0.0264, 0.0242, 0.0186, 0.0217, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 18:56:35,492 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 18:56:43,471 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:56:45,330 INFO [train.py:903] (0/4) Epoch 21, batch 2600, loss[loss=0.2286, simple_loss=0.3067, pruned_loss=0.07525, over 19502.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2895, pruned_loss=0.06585, over 3801937.35 frames. ], batch size: 64, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:56:49,270 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:02,136 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:47,164 INFO [train.py:903] (0/4) Epoch 21, batch 2650, loss[loss=0.2124, simple_loss=0.2868, pruned_loss=0.06897, over 19857.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2902, pruned_loss=0.0661, over 3803277.69 frames. ], batch size: 52, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:58:08,412 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 18:58:13,098 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 4.962e+02 5.879e+02 7.846e+02 2.263e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 18:58:45,336 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:58:49,505 INFO [train.py:903] (0/4) Epoch 21, batch 2700, loss[loss=0.2134, simple_loss=0.2895, pruned_loss=0.06865, over 19860.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2904, pruned_loss=0.06592, over 3796246.25 frames. ], batch size: 52, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 18:59:04,241 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139271.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:26,353 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 18:59:33,178 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139294.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:49,897 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:52,995 INFO [train.py:903] (0/4) Epoch 21, batch 2750, loss[loss=0.1952, simple_loss=0.2657, pruned_loss=0.06238, over 19336.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2906, pruned_loss=0.06606, over 3807801.76 frames. ], batch size: 44, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 19:00:18,050 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.705e+02 5.796e+02 7.407e+02 1.811e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-02 19:00:55,535 INFO [train.py:903] (0/4) Epoch 21, batch 2800, loss[loss=0.205, simple_loss=0.289, pruned_loss=0.06047, over 19796.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2905, pruned_loss=0.06566, over 3821840.09 frames. ], batch size: 56, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:01:08,569 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 19:01:27,962 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139386.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:01:45,346 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 19:01:58,454 INFO [train.py:903] (0/4) Epoch 21, batch 2850, loss[loss=0.2029, simple_loss=0.2776, pruned_loss=0.06416, over 19746.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.06534, over 3810153.86 frames. ], batch size: 47, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:02:03,716 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:09,510 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139419.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:23,960 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.945e+02 5.282e+02 6.184e+02 7.725e+02 1.552e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 19:02:24,350 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139430.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:35,364 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:42,178 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139444.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:54,851 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:57,840 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 19:03:00,105 INFO [train.py:903] (0/4) Epoch 21, batch 2900, loss[loss=0.216, simple_loss=0.2961, pruned_loss=0.06794, over 19383.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2901, pruned_loss=0.06528, over 3809016.60 frames. ], batch size: 70, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:03:31,305 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7656, 1.2956, 1.5635, 1.6493, 3.2218, 1.2683, 2.4940, 3.6583], + device='cuda:0'), covar=tensor([0.0549, 0.2978, 0.2839, 0.1894, 0.0796, 0.2583, 0.1274, 0.0293], + device='cuda:0'), in_proj_covar=tensor([0.0406, 0.0361, 0.0380, 0.0342, 0.0368, 0.0345, 0.0372, 0.0398], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:04:04,518 INFO [train.py:903] (0/4) Epoch 21, batch 2950, loss[loss=0.2009, simple_loss=0.2876, pruned_loss=0.05713, over 19669.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.288, pruned_loss=0.06439, over 3823620.01 frames. ], batch size: 53, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:04:28,835 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.684e+02 5.947e+02 7.442e+02 1.403e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-02 19:05:01,584 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1464, 1.2605, 1.6556, 1.1969, 2.6519, 3.5125, 3.2315, 3.6311], + device='cuda:0'), covar=tensor([0.1663, 0.3788, 0.3380, 0.2420, 0.0587, 0.0192, 0.0205, 0.0266], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0320, 0.0350, 0.0264, 0.0242, 0.0186, 0.0216, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 19:05:06,911 INFO [train.py:903] (0/4) Epoch 21, batch 3000, loss[loss=0.2071, simple_loss=0.2917, pruned_loss=0.06123, over 19701.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06493, over 3838509.52 frames. ], batch size: 59, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:05:06,912 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 19:05:14,028 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8040, 3.5311, 2.6889, 3.2725, 0.8576, 3.5290, 3.2698, 3.5878], + device='cuda:0'), covar=tensor([0.0694, 0.0727, 0.1779, 0.0755, 0.3788, 0.0711, 0.0722, 0.0862], + device='cuda:0'), in_proj_covar=tensor([0.0495, 0.0407, 0.0488, 0.0342, 0.0398, 0.0424, 0.0419, 0.0455], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:05:20,609 INFO [train.py:937] (0/4) Epoch 21, validation: loss=0.1693, simple_loss=0.2693, pruned_loss=0.03465, over 944034.00 frames. +2023-04-02 19:05:20,610 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 19:05:24,369 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 19:05:57,511 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:05,905 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-02 19:06:12,376 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:12,515 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:23,830 INFO [train.py:903] (0/4) Epoch 21, batch 3050, loss[loss=0.2276, simple_loss=0.3042, pruned_loss=0.07554, over 19527.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2885, pruned_loss=0.06522, over 3825143.67 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:06:35,341 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 19:06:48,012 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.171e+02 6.119e+02 7.965e+02 1.649e+03, threshold=1.224e+03, percent-clipped=3.0 +2023-04-02 19:06:57,089 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139638.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:03,665 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:14,485 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:24,505 INFO [train.py:903] (0/4) Epoch 21, batch 3100, loss[loss=0.1761, simple_loss=0.2608, pruned_loss=0.0457, over 19726.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2874, pruned_loss=0.06501, over 3821965.36 frames. ], batch size: 51, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:07:34,147 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139667.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:43,943 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8184, 1.3466, 1.4856, 1.7711, 3.3799, 1.2285, 2.3705, 3.8478], + device='cuda:0'), covar=tensor([0.0475, 0.2895, 0.2954, 0.1757, 0.0731, 0.2535, 0.1430, 0.0233], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0363, 0.0381, 0.0342, 0.0369, 0.0346, 0.0373, 0.0399], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:08:25,945 INFO [train.py:903] (0/4) Epoch 21, batch 3150, loss[loss=0.2573, simple_loss=0.3212, pruned_loss=0.09665, over 13922.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2875, pruned_loss=0.06525, over 3812861.22 frames. ], batch size: 136, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:08:32,303 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:08:51,319 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.084e+02 6.643e+02 9.166e+02 2.493e+03, threshold=1.329e+03, percent-clipped=12.0 +2023-04-02 19:08:52,525 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 19:09:19,205 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139753.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:09:28,041 INFO [train.py:903] (0/4) Epoch 21, batch 3200, loss[loss=0.206, simple_loss=0.2891, pruned_loss=0.06152, over 19525.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2885, pruned_loss=0.06564, over 3818134.00 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:09:36,911 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:01,984 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:06,558 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3869, 2.1497, 1.6350, 1.4313, 1.9872, 1.3060, 1.4403, 1.9027], + device='cuda:0'), covar=tensor([0.1042, 0.0759, 0.1085, 0.0835, 0.0520, 0.1252, 0.0686, 0.0446], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0309, 0.0332, 0.0258, 0.0241, 0.0332, 0.0286, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:10:27,823 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:30,933 INFO [train.py:903] (0/4) Epoch 21, batch 3250, loss[loss=0.225, simple_loss=0.3038, pruned_loss=0.07306, over 19587.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.289, pruned_loss=0.06547, over 3830180.81 frames. ], batch size: 61, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:10:46,073 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139822.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:48,403 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:48,533 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9933, 1.9106, 1.8086, 1.6674, 1.4540, 1.6190, 0.4966, 0.8850], + device='cuda:0'), covar=tensor([0.0597, 0.0638, 0.0425, 0.0673, 0.1227, 0.0825, 0.1252, 0.1073], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0351, 0.0355, 0.0379, 0.0456, 0.0384, 0.0331, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 19:10:55,284 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.059e+02 5.030e+02 6.558e+02 8.674e+02 2.471e+03, threshold=1.312e+03, percent-clipped=9.0 +2023-04-02 19:11:32,327 INFO [train.py:903] (0/4) Epoch 21, batch 3300, loss[loss=0.209, simple_loss=0.2937, pruned_loss=0.06222, over 19674.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.289, pruned_loss=0.06542, over 3825295.63 frames. ], batch size: 53, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:11:37,041 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 19:11:39,024 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 19:11:43,190 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:12:34,959 INFO [train.py:903] (0/4) Epoch 21, batch 3350, loss[loss=0.1929, simple_loss=0.2673, pruned_loss=0.0592, over 15559.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2883, pruned_loss=0.06521, over 3815768.69 frames. ], batch size: 34, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:13:00,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.734e+02 5.660e+02 7.256e+02 1.171e+03, threshold=1.132e+03, percent-clipped=0.0 +2023-04-02 19:13:04,626 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139933.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:15,672 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-02 19:13:17,284 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:37,533 INFO [train.py:903] (0/4) Epoch 21, batch 3400, loss[loss=0.2284, simple_loss=0.3089, pruned_loss=0.07397, over 18074.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2879, pruned_loss=0.06511, over 3815226.63 frames. ], batch size: 83, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:13:52,373 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:22,124 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:27,648 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-140000.pt +2023-04-02 19:14:41,302 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140009.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:42,108 INFO [train.py:903] (0/4) Epoch 21, batch 3450, loss[loss=0.2081, simple_loss=0.2902, pruned_loss=0.06296, over 19314.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2899, pruned_loss=0.06589, over 3821509.92 frames. ], batch size: 66, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:14:43,268 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 19:14:56,546 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:57,031 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 19:14:58,850 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8921, 1.2252, 0.9921, 0.8957, 1.0925, 0.8793, 0.9309, 1.1715], + device='cuda:0'), covar=tensor([0.0548, 0.0805, 0.1058, 0.0696, 0.0519, 0.1255, 0.0519, 0.0457], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0312, 0.0335, 0.0260, 0.0244, 0.0334, 0.0287, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:15:06,286 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.068e+02 6.577e+02 8.644e+02 2.362e+03, threshold=1.315e+03, percent-clipped=9.0 +2023-04-02 19:15:11,058 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140034.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:27,194 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:28,349 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:41,562 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:42,380 INFO [train.py:903] (0/4) Epoch 21, batch 3500, loss[loss=0.2226, simple_loss=0.3017, pruned_loss=0.0718, over 17337.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2909, pruned_loss=0.06645, over 3831897.11 frames. ], batch size: 101, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:45,071 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-02 19:16:45,566 INFO [train.py:903] (0/4) Epoch 21, batch 3550, loss[loss=0.2276, simple_loss=0.2958, pruned_loss=0.07977, over 19489.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06644, over 3834592.82 frames. ], batch size: 64, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:59,737 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:08,439 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:10,317 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.095e+02 6.549e+02 8.089e+02 2.006e+03, threshold=1.310e+03, percent-clipped=2.0 +2023-04-02 19:17:11,692 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140131.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:36,893 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140151.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:47,020 INFO [train.py:903] (0/4) Epoch 21, batch 3600, loss[loss=0.2245, simple_loss=0.3082, pruned_loss=0.07041, over 19529.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.29, pruned_loss=0.0662, over 3842487.61 frames. ], batch size: 56, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:17:56,425 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140166.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:58,696 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:18:51,572 INFO [train.py:903] (0/4) Epoch 21, batch 3650, loss[loss=0.2218, simple_loss=0.3049, pruned_loss=0.06936, over 19773.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2898, pruned_loss=0.06594, over 3838764.93 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:18:54,130 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:15,568 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.123e+02 6.079e+02 7.474e+02 1.635e+03, threshold=1.216e+03, percent-clipped=1.0 +2023-04-02 19:19:36,491 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140246.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:54,182 INFO [train.py:903] (0/4) Epoch 21, batch 3700, loss[loss=0.1821, simple_loss=0.2649, pruned_loss=0.04967, over 19794.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2902, pruned_loss=0.06585, over 3845975.76 frames. ], batch size: 47, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:20:01,678 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140266.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:15,857 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:20,499 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:22,748 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:49,158 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140304.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:55,390 INFO [train.py:903] (0/4) Epoch 21, batch 3750, loss[loss=0.1862, simple_loss=0.2644, pruned_loss=0.05404, over 19830.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06631, over 3817072.41 frames. ], batch size: 52, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:21:02,636 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:13,149 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 19:21:17,849 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:20,319 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140329.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:22,188 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.949e+02 4.554e+02 6.128e+02 7.118e+02 1.255e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-02 19:21:31,154 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 19:21:33,844 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:44,250 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4803, 2.4012, 2.1950, 2.5937, 2.2841, 2.2673, 2.2303, 2.4552], + device='cuda:0'), covar=tensor([0.1049, 0.1634, 0.1386, 0.1090, 0.1401, 0.0498, 0.1259, 0.0668], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0360, 0.0312, 0.0251, 0.0301, 0.0252, 0.0310, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:21:57,847 INFO [train.py:903] (0/4) Epoch 21, batch 3800, loss[loss=0.2267, simple_loss=0.307, pruned_loss=0.07318, over 19402.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2901, pruned_loss=0.06571, over 3833859.97 frames. ], batch size: 66, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:22:30,450 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 19:22:54,714 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:23:01,180 INFO [train.py:903] (0/4) Epoch 21, batch 3850, loss[loss=0.1982, simple_loss=0.2796, pruned_loss=0.05842, over 17318.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.0655, over 3826230.00 frames. ], batch size: 38, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:23:19,346 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-02 19:23:25,867 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 5.035e+02 6.153e+02 7.922e+02 1.662e+03, threshold=1.231e+03, percent-clipped=3.0 +2023-04-02 19:24:03,191 INFO [train.py:903] (0/4) Epoch 21, batch 3900, loss[loss=0.1784, simple_loss=0.2564, pruned_loss=0.05017, over 19755.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.06556, over 3826879.30 frames. ], batch size: 46, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:24:09,090 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:17,273 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:55,703 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140502.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:01,336 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140507.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:04,333 INFO [train.py:903] (0/4) Epoch 21, batch 3950, loss[loss=0.199, simple_loss=0.2901, pruned_loss=0.05394, over 19482.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2897, pruned_loss=0.06547, over 3821968.66 frames. ], batch size: 64, lr: 3.91e-03, grad_scale: 4.0 +2023-04-02 19:25:10,222 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 19:25:20,283 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140522.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,803 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:31,859 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.763e+02 5.824e+02 7.544e+02 1.413e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 19:25:40,198 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:42,497 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:51,816 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:07,097 INFO [train.py:903] (0/4) Epoch 21, batch 4000, loss[loss=0.2083, simple_loss=0.2892, pruned_loss=0.06377, over 19592.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.06548, over 3824267.66 frames. ], batch size: 57, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:26:09,718 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140562.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:13,081 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:14,609 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-04-02 19:26:33,167 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:36,731 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:41,305 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:55,163 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 19:27:09,309 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:11,209 INFO [train.py:903] (0/4) Epoch 21, batch 4050, loss[loss=0.2318, simple_loss=0.3112, pruned_loss=0.07616, over 19583.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2898, pruned_loss=0.06502, over 3823691.44 frames. ], batch size: 61, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:27:25,062 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:36,533 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 5.165e+02 6.710e+02 8.332e+02 1.443e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-02 19:28:13,809 INFO [train.py:903] (0/4) Epoch 21, batch 4100, loss[loss=0.284, simple_loss=0.3385, pruned_loss=0.1147, over 13410.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06503, over 3833618.42 frames. ], batch size: 135, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:28:33,319 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.81 vs. limit=5.0 +2023-04-02 19:28:52,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 19:29:15,741 INFO [train.py:903] (0/4) Epoch 21, batch 4150, loss[loss=0.2165, simple_loss=0.2982, pruned_loss=0.06744, over 19654.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2895, pruned_loss=0.06474, over 3829728.35 frames. ], batch size: 60, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:29:42,680 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.318e+02 6.390e+02 7.957e+02 1.686e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 19:29:50,042 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:29:55,881 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140741.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:05,092 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:17,540 INFO [train.py:903] (0/4) Epoch 21, batch 4200, loss[loss=0.2323, simple_loss=0.3101, pruned_loss=0.07725, over 19507.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2894, pruned_loss=0.065, over 3826251.28 frames. ], batch size: 64, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:30:24,373 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 19:30:41,732 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7605, 2.5820, 2.5533, 2.8248, 2.5077, 2.3316, 2.2206, 2.7155], + device='cuda:0'), covar=tensor([0.0888, 0.1488, 0.1221, 0.0895, 0.1247, 0.0489, 0.1317, 0.0574], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0360, 0.0311, 0.0250, 0.0301, 0.0252, 0.0310, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:31:10,596 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6275, 1.3607, 1.5494, 1.4435, 3.2298, 1.0245, 2.5076, 3.5961], + device='cuda:0'), covar=tensor([0.0483, 0.2649, 0.2788, 0.1954, 0.0701, 0.2635, 0.1097, 0.0267], + device='cuda:0'), in_proj_covar=tensor([0.0405, 0.0363, 0.0383, 0.0343, 0.0370, 0.0347, 0.0374, 0.0398], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:31:21,343 INFO [train.py:903] (0/4) Epoch 21, batch 4250, loss[loss=0.2214, simple_loss=0.3029, pruned_loss=0.06998, over 19447.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2893, pruned_loss=0.06493, over 3839724.20 frames. ], batch size: 64, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:31:25,367 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 19:31:39,325 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7117, 4.2519, 4.4761, 4.4838, 1.6549, 4.1822, 3.6710, 4.1772], + device='cuda:0'), covar=tensor([0.1794, 0.0819, 0.0637, 0.0696, 0.6196, 0.1020, 0.0679, 0.1188], + device='cuda:0'), in_proj_covar=tensor([0.0783, 0.0733, 0.0947, 0.0830, 0.0829, 0.0702, 0.0571, 0.0876], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 19:31:40,145 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 19:31:42,786 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:31:47,367 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 5.291e+02 6.401e+02 8.566e+02 1.506e+03, threshold=1.280e+03, percent-clipped=6.0 +2023-04-02 19:31:50,845 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 19:31:53,755 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:01,546 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:12,614 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:24,303 INFO [train.py:903] (0/4) Epoch 21, batch 4300, loss[loss=0.204, simple_loss=0.2828, pruned_loss=0.06264, over 19522.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2888, pruned_loss=0.06473, over 3831156.88 frames. ], batch size: 54, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:32:25,851 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:29,397 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:33,861 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:37,166 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:33:18,642 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 19:33:24,433 INFO [train.py:903] (0/4) Epoch 21, batch 4350, loss[loss=0.1958, simple_loss=0.2858, pruned_loss=0.05287, over 19676.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2885, pruned_loss=0.06472, over 3831735.00 frames. ], batch size: 60, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:33:51,508 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.814e+02 5.846e+02 6.905e+02 1.613e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 19:33:56,408 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:25,325 INFO [train.py:903] (0/4) Epoch 21, batch 4400, loss[loss=0.2087, simple_loss=0.2941, pruned_loss=0.06166, over 18265.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2891, pruned_loss=0.06535, over 3825491.66 frames. ], batch size: 84, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:34:29,685 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3747, 3.9756, 2.6387, 3.5293, 1.1401, 3.9258, 3.8579, 3.8966], + device='cuda:0'), covar=tensor([0.0632, 0.1001, 0.1838, 0.0809, 0.3500, 0.0666, 0.0781, 0.1217], + device='cuda:0'), in_proj_covar=tensor([0.0494, 0.0405, 0.0485, 0.0343, 0.0395, 0.0425, 0.0418, 0.0453], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:34:32,107 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:33,344 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140966.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:53,850 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 19:34:58,756 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:02,873 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 19:35:05,662 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:27,476 INFO [train.py:903] (0/4) Epoch 21, batch 4450, loss[loss=0.1839, simple_loss=0.2611, pruned_loss=0.05335, over 19616.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2889, pruned_loss=0.06548, over 3825590.34 frames. ], batch size: 50, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:35:37,881 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141017.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:54,628 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.226e+02 6.450e+02 8.222e+02 2.218e+03, threshold=1.290e+03, percent-clipped=9.0 +2023-04-02 19:36:06,100 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 19:36:32,489 INFO [train.py:903] (0/4) Epoch 21, batch 4500, loss[loss=0.1843, simple_loss=0.2794, pruned_loss=0.04463, over 19613.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2891, pruned_loss=0.06552, over 3824987.51 frames. ], batch size: 57, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:37:01,368 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141085.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:34,668 INFO [train.py:903] (0/4) Epoch 21, batch 4550, loss[loss=0.197, simple_loss=0.282, pruned_loss=0.05595, over 18097.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2898, pruned_loss=0.06565, over 3825913.55 frames. ], batch size: 83, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:37:46,075 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 19:37:46,482 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:59,727 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.292e+02 5.246e+02 6.195e+02 7.478e+02 1.454e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-04-02 19:38:10,838 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 19:38:18,670 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:28,863 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0107, 3.6290, 2.3217, 3.2650, 0.8347, 3.5503, 3.5120, 3.5343], + device='cuda:0'), covar=tensor([0.0837, 0.1210, 0.2192, 0.0947, 0.3970, 0.0810, 0.0981, 0.1227], + device='cuda:0'), in_proj_covar=tensor([0.0499, 0.0408, 0.0489, 0.0345, 0.0397, 0.0428, 0.0420, 0.0455], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:38:35,234 INFO [train.py:903] (0/4) Epoch 21, batch 4600, loss[loss=0.2246, simple_loss=0.2953, pruned_loss=0.0769, over 19696.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.06619, over 3821098.80 frames. ], batch size: 60, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:38:36,709 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:48,706 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:25,208 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141200.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:36,153 INFO [train.py:903] (0/4) Epoch 21, batch 4650, loss[loss=0.2139, simple_loss=0.299, pruned_loss=0.0644, over 19769.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2903, pruned_loss=0.06583, over 3823168.27 frames. ], batch size: 54, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:39:53,019 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141222.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:56,094 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 19:40:02,331 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 4.513e+02 5.853e+02 7.712e+02 1.984e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-02 19:40:05,980 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 19:40:12,928 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0191, 3.6619, 2.5283, 3.3055, 1.0997, 3.5945, 3.5123, 3.6378], + device='cuda:0'), covar=tensor([0.0780, 0.1070, 0.2024, 0.0976, 0.3712, 0.0754, 0.0932, 0.1137], + device='cuda:0'), in_proj_covar=tensor([0.0495, 0.0406, 0.0486, 0.0343, 0.0395, 0.0424, 0.0419, 0.0451], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:40:15,513 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:18,306 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-02 19:40:21,237 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:37,899 INFO [train.py:903] (0/4) Epoch 21, batch 4700, loss[loss=0.1875, simple_loss=0.261, pruned_loss=0.05698, over 19781.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2899, pruned_loss=0.06559, over 3823764.45 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:40:45,391 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 19:40:47,257 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:00,529 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 19:41:00,630 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:08,822 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:38,490 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141309.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:39,482 INFO [train.py:903] (0/4) Epoch 21, batch 4750, loss[loss=0.2253, simple_loss=0.3126, pruned_loss=0.06895, over 19513.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2902, pruned_loss=0.06559, over 3823593.09 frames. ], batch size: 64, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:41:59,001 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6957, 2.4472, 2.4207, 2.6694, 2.4658, 2.3056, 2.2734, 2.6167], + device='cuda:0'), covar=tensor([0.0905, 0.1532, 0.1199, 0.0968, 0.1264, 0.0488, 0.1208, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0360, 0.0312, 0.0251, 0.0301, 0.0253, 0.0309, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:42:03,070 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.546e+02 5.235e+02 6.308e+02 8.195e+02 2.468e+03, threshold=1.262e+03, percent-clipped=8.0 +2023-04-02 19:42:24,995 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:42:30,013 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 19:42:39,822 INFO [train.py:903] (0/4) Epoch 21, batch 4800, loss[loss=0.1918, simple_loss=0.2677, pruned_loss=0.05799, over 19666.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2906, pruned_loss=0.06611, over 3822757.61 frames. ], batch size: 53, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:43:23,134 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:43:41,794 INFO [train.py:903] (0/4) Epoch 21, batch 4850, loss[loss=0.2269, simple_loss=0.3026, pruned_loss=0.07564, over 18141.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06613, over 3828306.04 frames. ], batch size: 83, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:01,418 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:07,725 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 19:44:08,842 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.192e+02 6.534e+02 8.985e+02 2.500e+03, threshold=1.307e+03, percent-clipped=12.0 +2023-04-02 19:44:13,622 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141435.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:44:28,284 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 19:44:33,280 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 19:44:34,448 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 19:44:39,583 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:44,215 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:45,061 INFO [train.py:903] (0/4) Epoch 21, batch 4900, loss[loss=0.2186, simple_loss=0.3034, pruned_loss=0.06691, over 19459.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2902, pruned_loss=0.06575, over 3831993.97 frames. ], batch size: 64, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:47,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 19:45:05,776 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 19:45:10,378 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141481.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:18,453 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2139, 2.1389, 1.9799, 1.8020, 1.6680, 1.8139, 0.5882, 1.1582], + device='cuda:0'), covar=tensor([0.0606, 0.0531, 0.0399, 0.0685, 0.1079, 0.0755, 0.1216, 0.0986], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0350, 0.0353, 0.0378, 0.0457, 0.0383, 0.0332, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 19:45:40,778 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141505.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:46,097 INFO [train.py:903] (0/4) Epoch 21, batch 4950, loss[loss=0.2355, simple_loss=0.3166, pruned_loss=0.07723, over 19508.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2902, pruned_loss=0.06595, over 3804253.82 frames. ], batch size: 64, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:03,691 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 19:46:10,479 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.116e+02 6.031e+02 7.721e+02 1.403e+03, threshold=1.206e+03, percent-clipped=1.0 +2023-04-02 19:46:26,548 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141542.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:46:28,465 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 19:46:46,926 INFO [train.py:903] (0/4) Epoch 21, batch 5000, loss[loss=0.2002, simple_loss=0.2939, pruned_loss=0.05329, over 19603.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.06549, over 3808731.95 frames. ], batch size: 57, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:53,584 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 19:46:55,134 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141567.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:47:08,259 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 19:47:48,192 INFO [train.py:903] (0/4) Epoch 21, batch 5050, loss[loss=0.2132, simple_loss=0.3026, pruned_loss=0.06187, over 19766.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2908, pruned_loss=0.0657, over 3811540.22 frames. ], batch size: 54, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:48:00,894 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2150, 1.2668, 1.6788, 1.2076, 2.6199, 3.5056, 3.2683, 3.6815], + device='cuda:0'), covar=tensor([0.1652, 0.3845, 0.3419, 0.2486, 0.0627, 0.0192, 0.0211, 0.0267], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0321, 0.0349, 0.0264, 0.0241, 0.0184, 0.0215, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 19:48:03,155 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:16,453 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.325e+02 4.754e+02 5.832e+02 6.826e+02 1.423e+03, threshold=1.166e+03, percent-clipped=2.0 +2023-04-02 19:48:25,907 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 19:48:39,224 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:53,405 INFO [train.py:903] (0/4) Epoch 21, batch 5100, loss[loss=0.2022, simple_loss=0.2908, pruned_loss=0.05675, over 19517.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2901, pruned_loss=0.06497, over 3818635.46 frames. ], batch size: 56, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:49:04,603 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 19:49:07,002 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 19:49:11,630 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 19:49:11,955 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141675.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:17,701 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:23,377 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3032, 3.8360, 3.9442, 3.9385, 1.5961, 3.7789, 3.2575, 3.6980], + device='cuda:0'), covar=tensor([0.1659, 0.0889, 0.0626, 0.0713, 0.5759, 0.0883, 0.0715, 0.1088], + device='cuda:0'), in_proj_covar=tensor([0.0789, 0.0740, 0.0955, 0.0835, 0.0837, 0.0706, 0.0576, 0.0881], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 19:49:30,089 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:50,662 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141705.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:56,348 INFO [train.py:903] (0/4) Epoch 21, batch 5150, loss[loss=0.2298, simple_loss=0.3019, pruned_loss=0.07882, over 18792.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2907, pruned_loss=0.06576, over 3805113.61 frames. ], batch size: 74, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:50:09,410 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 19:50:20,923 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 4.996e+02 6.424e+02 8.131e+02 1.633e+03, threshold=1.285e+03, percent-clipped=6.0 +2023-04-02 19:50:46,536 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 19:50:56,442 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 19:50:58,074 INFO [train.py:903] (0/4) Epoch 21, batch 5200, loss[loss=0.2383, simple_loss=0.3126, pruned_loss=0.08197, over 19672.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2905, pruned_loss=0.06548, over 3806029.91 frames. ], batch size: 58, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:51:14,046 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 19:51:22,176 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141779.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:51:51,405 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141803.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:54,942 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:58,264 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 19:51:59,455 INFO [train.py:903] (0/4) Epoch 21, batch 5250, loss[loss=0.1836, simple_loss=0.2553, pruned_loss=0.05592, over 19790.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06455, over 3828403.28 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:52:28,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 4.884e+02 6.185e+02 7.574e+02 1.457e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 19:53:02,552 INFO [train.py:903] (0/4) Epoch 21, batch 5300, loss[loss=0.1957, simple_loss=0.2677, pruned_loss=0.06187, over 19408.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.289, pruned_loss=0.06527, over 3817099.80 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:53:23,461 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:53:24,187 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 19:53:44,797 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141894.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 19:53:50,270 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9671, 1.7314, 1.6109, 1.9662, 1.6297, 1.7385, 1.5916, 1.8785], + device='cuda:0'), covar=tensor([0.1067, 0.1383, 0.1507, 0.0989, 0.1366, 0.0556, 0.1412, 0.0721], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0358, 0.0311, 0.0251, 0.0299, 0.0252, 0.0310, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 19:53:55,954 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:06,903 INFO [train.py:903] (0/4) Epoch 21, batch 5350, loss[loss=0.237, simple_loss=0.322, pruned_loss=0.076, over 19596.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2892, pruned_loss=0.06528, over 3824588.48 frames. ], batch size: 61, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:54:16,560 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:32,497 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.997e+02 5.942e+02 7.628e+02 1.099e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-02 19:54:35,137 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141934.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:43,732 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 19:54:51,430 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:55:07,484 INFO [train.py:903] (0/4) Epoch 21, batch 5400, loss[loss=0.1834, simple_loss=0.2615, pruned_loss=0.05267, over 19767.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2899, pruned_loss=0.06586, over 3832836.22 frames. ], batch size: 48, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:55:14,487 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4986, 3.7137, 4.0166, 4.0146, 2.3310, 3.7540, 3.4522, 3.8203], + device='cuda:0'), covar=tensor([0.1374, 0.3002, 0.0650, 0.0717, 0.4488, 0.1374, 0.0572, 0.1023], + device='cuda:0'), in_proj_covar=tensor([0.0781, 0.0735, 0.0946, 0.0827, 0.0828, 0.0701, 0.0569, 0.0871], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 19:55:57,949 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-142000.pt +2023-04-02 19:56:10,561 INFO [train.py:903] (0/4) Epoch 21, batch 5450, loss[loss=0.188, simple_loss=0.2725, pruned_loss=0.05177, over 19828.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2899, pruned_loss=0.06593, over 3817658.97 frames. ], batch size: 52, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:56:39,062 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8827, 1.3422, 1.6440, 0.5621, 1.9339, 2.4391, 2.1127, 2.5914], + device='cuda:0'), covar=tensor([0.1660, 0.3670, 0.3246, 0.2843, 0.0658, 0.0287, 0.0353, 0.0363], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0321, 0.0349, 0.0263, 0.0242, 0.0184, 0.0215, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 19:56:39,855 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 5.354e+02 6.392e+02 8.231e+02 1.329e+03, threshold=1.278e+03, percent-clipped=1.0 +2023-04-02 19:57:14,049 INFO [train.py:903] (0/4) Epoch 21, batch 5500, loss[loss=0.2169, simple_loss=0.2978, pruned_loss=0.06804, over 19520.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2901, pruned_loss=0.06628, over 3815549.94 frames. ], batch size: 56, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:57:18,001 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:57:42,006 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 19:57:48,309 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:58:15,211 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 19:58:17,740 INFO [train.py:903] (0/4) Epoch 21, batch 5550, loss[loss=0.2576, simple_loss=0.3166, pruned_loss=0.09928, over 13049.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06652, over 3811910.96 frames. ], batch size: 136, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:58:26,200 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 19:58:27,680 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7774, 4.1954, 4.4488, 4.4674, 1.7509, 4.2061, 3.6432, 4.1792], + device='cuda:0'), covar=tensor([0.1576, 0.1016, 0.0590, 0.0632, 0.5935, 0.0916, 0.0676, 0.1105], + device='cuda:0'), in_proj_covar=tensor([0.0781, 0.0736, 0.0944, 0.0826, 0.0827, 0.0702, 0.0570, 0.0874], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 19:58:34,914 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5219, 1.5837, 1.7523, 1.7097, 2.5982, 2.2959, 2.6328, 1.2009], + device='cuda:0'), covar=tensor([0.2368, 0.4186, 0.2577, 0.1820, 0.1330, 0.2009, 0.1297, 0.4239], + device='cuda:0'), in_proj_covar=tensor([0.0531, 0.0639, 0.0709, 0.0483, 0.0617, 0.0531, 0.0660, 0.0547], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 19:58:43,636 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.904e+02 5.778e+02 7.569e+02 2.193e+03, threshold=1.156e+03, percent-clipped=4.0 +2023-04-02 19:59:07,890 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142150.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 19:59:15,792 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 19:59:19,356 INFO [train.py:903] (0/4) Epoch 21, batch 5600, loss[loss=0.1851, simple_loss=0.276, pruned_loss=0.0471, over 19674.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2901, pruned_loss=0.06604, over 3825530.33 frames. ], batch size: 55, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:59:36,298 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:59:37,405 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142175.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:59:58,342 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3484, 1.2364, 1.2911, 1.3415, 1.0661, 1.3961, 1.4254, 1.3582], + device='cuda:0'), covar=tensor([0.0848, 0.0991, 0.1041, 0.0683, 0.0850, 0.0886, 0.0845, 0.0775], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0226, 0.0240, 0.0224, 0.0211, 0.0186, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 20:00:08,574 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142199.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:00:20,402 INFO [train.py:903] (0/4) Epoch 21, batch 5650, loss[loss=0.1843, simple_loss=0.2641, pruned_loss=0.05224, over 19389.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2902, pruned_loss=0.06595, over 3820691.37 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:00:49,291 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 4.896e+02 5.835e+02 7.915e+02 1.772e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-02 20:01:05,167 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5391, 2.4641, 2.3708, 2.6503, 2.4092, 2.2003, 1.9563, 2.6311], + device='cuda:0'), covar=tensor([0.0961, 0.1504, 0.1294, 0.0957, 0.1275, 0.0519, 0.1412, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0356, 0.0311, 0.0249, 0.0298, 0.0251, 0.0309, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:01:10,294 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 20:01:22,640 INFO [train.py:903] (0/4) Epoch 21, batch 5700, loss[loss=0.2223, simple_loss=0.3025, pruned_loss=0.0711, over 19776.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2901, pruned_loss=0.06588, over 3838168.11 frames. ], batch size: 56, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:01:45,761 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:01:59,755 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:02:09,014 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3780, 0.9855, 1.1778, 2.2309, 1.5897, 1.3312, 1.8156, 1.2534], + device='cuda:0'), covar=tensor([0.1169, 0.1746, 0.1385, 0.0850, 0.1061, 0.1429, 0.1162, 0.1123], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0226, 0.0239, 0.0224, 0.0210, 0.0186, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 20:02:26,062 INFO [train.py:903] (0/4) Epoch 21, batch 5750, loss[loss=0.2125, simple_loss=0.2941, pruned_loss=0.06548, over 19784.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2902, pruned_loss=0.06597, over 3819639.29 frames. ], batch size: 56, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:02:28,353 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 20:02:36,343 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 20:02:41,007 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 20:02:51,480 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.170e+02 6.700e+02 8.460e+02 1.665e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-02 20:03:26,602 INFO [train.py:903] (0/4) Epoch 21, batch 5800, loss[loss=0.2171, simple_loss=0.2808, pruned_loss=0.07667, over 19741.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2891, pruned_loss=0.06557, over 3826721.97 frames. ], batch size: 45, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:04:08,885 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:22,739 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:28,053 INFO [train.py:903] (0/4) Epoch 21, batch 5850, loss[loss=0.209, simple_loss=0.2905, pruned_loss=0.06371, over 19666.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2897, pruned_loss=0.06541, over 3841233.23 frames. ], batch size: 58, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:04:57,526 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.411e+02 4.867e+02 6.014e+02 7.504e+02 1.855e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 20:05:31,500 INFO [train.py:903] (0/4) Epoch 21, batch 5900, loss[loss=0.2046, simple_loss=0.2928, pruned_loss=0.05817, over 17289.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2896, pruned_loss=0.0652, over 3836774.25 frames. ], batch size: 101, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:05:35,059 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 20:05:58,770 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 20:06:36,834 INFO [train.py:903] (0/4) Epoch 21, batch 5950, loss[loss=0.2121, simple_loss=0.2756, pruned_loss=0.0743, over 19090.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2895, pruned_loss=0.06501, over 3842088.00 frames. ], batch size: 42, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:02,052 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.752e+02 5.691e+02 7.550e+02 2.003e+03, threshold=1.138e+03, percent-clipped=5.0 +2023-04-02 20:07:37,103 INFO [train.py:903] (0/4) Epoch 21, batch 6000, loss[loss=0.2529, simple_loss=0.3166, pruned_loss=0.09457, over 14074.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2905, pruned_loss=0.06581, over 3831000.11 frames. ], batch size: 136, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:37,104 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 20:07:50,393 INFO [train.py:937] (0/4) Epoch 21, validation: loss=0.1692, simple_loss=0.2693, pruned_loss=0.03459, over 944034.00 frames. +2023-04-02 20:07:50,394 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 20:08:26,799 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:08:52,466 INFO [train.py:903] (0/4) Epoch 21, batch 6050, loss[loss=0.2077, simple_loss=0.2887, pruned_loss=0.06331, over 18698.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2909, pruned_loss=0.06614, over 3830798.64 frames. ], batch size: 74, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:18,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.812e+02 5.741e+02 7.692e+02 1.541e+03, threshold=1.148e+03, percent-clipped=3.0 +2023-04-02 20:09:41,449 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:09:53,724 INFO [train.py:903] (0/4) Epoch 21, batch 6100, loss[loss=0.1877, simple_loss=0.2698, pruned_loss=0.05282, over 19660.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2907, pruned_loss=0.06611, over 3820009.67 frames. ], batch size: 53, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:55,343 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:11,615 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142674.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:17,587 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 20:10:28,115 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:55,895 INFO [train.py:903] (0/4) Epoch 21, batch 6150, loss[loss=0.2186, simple_loss=0.2965, pruned_loss=0.07036, over 19532.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2914, pruned_loss=0.06635, over 3823415.30 frames. ], batch size: 54, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:11:24,998 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.643e+02 5.511e+02 6.918e+02 9.659e+02 2.206e+03, threshold=1.384e+03, percent-clipped=13.0 +2023-04-02 20:11:26,176 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 20:11:36,759 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:11:59,801 INFO [train.py:903] (0/4) Epoch 21, batch 6200, loss[loss=0.1737, simple_loss=0.2622, pruned_loss=0.04259, over 19848.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2922, pruned_loss=0.06666, over 3794329.16 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:12:15,917 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5158, 1.6484, 2.0920, 1.7763, 3.1445, 2.5449, 3.5553, 1.6232], + device='cuda:0'), covar=tensor([0.2468, 0.4242, 0.2634, 0.1892, 0.1478, 0.2141, 0.1492, 0.4109], + device='cuda:0'), in_proj_covar=tensor([0.0530, 0.0643, 0.0710, 0.0485, 0.0619, 0.0532, 0.0664, 0.0548], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 20:13:02,732 INFO [train.py:903] (0/4) Epoch 21, batch 6250, loss[loss=0.2106, simple_loss=0.2881, pruned_loss=0.06655, over 13499.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2917, pruned_loss=0.06641, over 3785116.77 frames. ], batch size: 136, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:13:28,469 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 5.305e+02 6.117e+02 7.859e+02 2.157e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 20:13:30,732 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 20:13:46,848 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2136, 1.2590, 1.7810, 1.0624, 2.3563, 3.0951, 2.8221, 3.2841], + device='cuda:0'), covar=tensor([0.1522, 0.3678, 0.3072, 0.2325, 0.0577, 0.0229, 0.0257, 0.0296], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0319, 0.0349, 0.0263, 0.0240, 0.0184, 0.0214, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 20:14:05,176 INFO [train.py:903] (0/4) Epoch 21, batch 6300, loss[loss=0.2274, simple_loss=0.3045, pruned_loss=0.07509, over 19083.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2899, pruned_loss=0.06562, over 3795450.17 frames. ], batch size: 69, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:14:47,699 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1908, 2.0968, 1.9160, 1.7172, 1.6652, 1.7547, 0.5139, 1.0583], + device='cuda:0'), covar=tensor([0.0580, 0.0607, 0.0442, 0.0751, 0.1160, 0.0839, 0.1301, 0.1072], + device='cuda:0'), in_proj_covar=tensor([0.0356, 0.0355, 0.0354, 0.0382, 0.0459, 0.0386, 0.0335, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 20:15:07,132 INFO [train.py:903] (0/4) Epoch 21, batch 6350, loss[loss=0.251, simple_loss=0.324, pruned_loss=0.08899, over 19757.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2892, pruned_loss=0.06514, over 3799174.45 frames. ], batch size: 63, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:15:36,285 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.232e+02 4.679e+02 5.550e+02 7.220e+02 1.923e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-02 20:15:39,968 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:16:11,261 INFO [train.py:903] (0/4) Epoch 21, batch 6400, loss[loss=0.2312, simple_loss=0.3074, pruned_loss=0.07748, over 13194.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.289, pruned_loss=0.06468, over 3805687.09 frames. ], batch size: 136, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:14,730 INFO [train.py:903] (0/4) Epoch 21, batch 6450, loss[loss=0.1907, simple_loss=0.2728, pruned_loss=0.05428, over 19746.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2887, pruned_loss=0.06419, over 3810117.50 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:40,515 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.995e+02 6.270e+02 8.275e+02 2.312e+03, threshold=1.254e+03, percent-clipped=6.0 +2023-04-02 20:18:01,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 20:18:04,895 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:18:16,201 INFO [train.py:903] (0/4) Epoch 21, batch 6500, loss[loss=0.2014, simple_loss=0.2705, pruned_loss=0.06616, over 19763.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2886, pruned_loss=0.06422, over 3798204.37 frames. ], batch size: 47, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:18:23,436 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 20:18:24,238 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-02 20:18:48,992 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143086.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:19:16,584 INFO [train.py:903] (0/4) Epoch 21, batch 6550, loss[loss=0.1883, simple_loss=0.2706, pruned_loss=0.05295, over 19564.00 frames. ], tot_loss[loss=0.209, simple_loss=0.289, pruned_loss=0.06451, over 3802500.86 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:19:44,580 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 5.073e+02 6.169e+02 7.633e+02 2.146e+03, threshold=1.234e+03, percent-clipped=4.0 +2023-04-02 20:20:01,334 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6938, 2.3254, 2.2378, 2.5627, 2.3316, 2.1783, 2.0337, 2.5454], + device='cuda:0'), covar=tensor([0.0870, 0.1594, 0.1364, 0.1015, 0.1374, 0.0531, 0.1392, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0356, 0.0308, 0.0248, 0.0299, 0.0251, 0.0310, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:20:19,906 INFO [train.py:903] (0/4) Epoch 21, batch 6600, loss[loss=0.1986, simple_loss=0.2715, pruned_loss=0.06281, over 19472.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2884, pruned_loss=0.06404, over 3821166.68 frames. ], batch size: 49, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:10,256 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:21:20,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9803, 1.9685, 1.9071, 1.7272, 1.5856, 1.7883, 1.2026, 1.3880], + device='cuda:0'), covar=tensor([0.0680, 0.0725, 0.0491, 0.0801, 0.1221, 0.1029, 0.1265, 0.1057], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0359, 0.0360, 0.0385, 0.0465, 0.0390, 0.0338, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 20:21:22,421 INFO [train.py:903] (0/4) Epoch 21, batch 6650, loss[loss=0.1971, simple_loss=0.2625, pruned_loss=0.06586, over 19142.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2875, pruned_loss=0.06382, over 3821960.94 frames. ], batch size: 42, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:47,868 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.940e+02 5.672e+02 7.065e+02 1.538e+03, threshold=1.134e+03, percent-clipped=2.0 +2023-04-02 20:22:23,655 INFO [train.py:903] (0/4) Epoch 21, batch 6700, loss[loss=0.1901, simple_loss=0.2627, pruned_loss=0.05869, over 19400.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.0635, over 3824585.34 frames. ], batch size: 47, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:22:52,030 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-02 20:23:19,526 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:23,650 INFO [train.py:903] (0/4) Epoch 21, batch 6750, loss[loss=0.1766, simple_loss=0.263, pruned_loss=0.04514, over 19848.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2865, pruned_loss=0.0633, over 3821601.45 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:23:48,075 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143331.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:48,849 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.995e+02 6.197e+02 7.772e+02 2.067e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-02 20:24:20,248 INFO [train.py:903] (0/4) Epoch 21, batch 6800, loss[loss=0.2096, simple_loss=0.2968, pruned_loss=0.06113, over 19688.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2862, pruned_loss=0.06352, over 3831605.10 frames. ], batch size: 60, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:24:50,784 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-21.pt +2023-04-02 20:25:06,360 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 20:25:07,469 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 20:25:09,779 INFO [train.py:903] (0/4) Epoch 22, batch 0, loss[loss=0.2129, simple_loss=0.297, pruned_loss=0.06437, over 19291.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.297, pruned_loss=0.06437, over 19291.00 frames. ], batch size: 66, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:25:09,780 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 20:25:18,164 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4047, 1.3529, 1.4099, 1.7995, 1.4658, 1.5941, 1.5515, 1.5439], + device='cuda:0'), covar=tensor([0.0764, 0.0942, 0.0903, 0.0609, 0.0948, 0.0868, 0.1013, 0.0703], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 20:25:20,457 INFO [train.py:937] (0/4) Epoch 22, validation: loss=0.1683, simple_loss=0.2691, pruned_loss=0.03373, over 944034.00 frames. +2023-04-02 20:25:20,458 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 20:25:31,910 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 20:25:55,279 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143418.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:26:14,247 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.908e+02 5.891e+02 8.006e+02 1.582e+03, threshold=1.178e+03, percent-clipped=4.0 +2023-04-02 20:26:21,017 INFO [train.py:903] (0/4) Epoch 22, batch 50, loss[loss=0.2153, simple_loss=0.2958, pruned_loss=0.06742, over 19332.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2954, pruned_loss=0.06932, over 851964.27 frames. ], batch size: 70, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:26:24,464 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1193, 1.9127, 1.7670, 2.0477, 1.7781, 1.8236, 1.6501, 2.0197], + device='cuda:0'), covar=tensor([0.0943, 0.1381, 0.1501, 0.1047, 0.1300, 0.0541, 0.1481, 0.0667], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0353, 0.0308, 0.0246, 0.0296, 0.0248, 0.0307, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:26:42,120 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:26:53,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 20:27:13,715 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143482.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:27:19,995 INFO [train.py:903] (0/4) Epoch 22, batch 100, loss[loss=0.221, simple_loss=0.3063, pruned_loss=0.0678, over 19748.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2917, pruned_loss=0.06669, over 1499904.66 frames. ], batch size: 63, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:27:23,803 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143491.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:27:31,530 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 20:27:56,513 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5760, 1.5302, 1.4734, 1.9647, 1.6264, 1.8145, 1.9500, 1.6564], + device='cuda:0'), covar=tensor([0.0838, 0.0898, 0.1021, 0.0693, 0.0801, 0.0762, 0.0767, 0.0691], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0239, 0.0225, 0.0212, 0.0186, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 20:28:12,239 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.227e+02 6.391e+02 8.671e+02 1.540e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 20:28:19,047 INFO [train.py:903] (0/4) Epoch 22, batch 150, loss[loss=0.2454, simple_loss=0.3152, pruned_loss=0.08784, over 17434.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2913, pruned_loss=0.06688, over 2017076.76 frames. ], batch size: 101, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:28:30,206 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0259, 3.6867, 2.5018, 3.3121, 1.0894, 3.6284, 3.5461, 3.5301], + device='cuda:0'), covar=tensor([0.0768, 0.1129, 0.2058, 0.0978, 0.3886, 0.0858, 0.0999, 0.1329], + device='cuda:0'), in_proj_covar=tensor([0.0499, 0.0405, 0.0490, 0.0343, 0.0401, 0.0427, 0.0421, 0.0457], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:29:18,917 INFO [train.py:903] (0/4) Epoch 22, batch 200, loss[loss=0.2435, simple_loss=0.3181, pruned_loss=0.08443, over 18080.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2907, pruned_loss=0.06688, over 2420575.89 frames. ], batch size: 83, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:29:18,963 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 20:30:12,489 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.534e+02 5.144e+02 6.083e+02 7.742e+02 1.350e+03, threshold=1.217e+03, percent-clipped=1.0 +2023-04-02 20:30:20,919 INFO [train.py:903] (0/4) Epoch 22, batch 250, loss[loss=0.2331, simple_loss=0.3096, pruned_loss=0.07829, over 18405.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2891, pruned_loss=0.06559, over 2737814.75 frames. ], batch size: 84, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:31:20,919 INFO [train.py:903] (0/4) Epoch 22, batch 300, loss[loss=0.2281, simple_loss=0.3095, pruned_loss=0.07333, over 19448.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2896, pruned_loss=0.0652, over 2975530.79 frames. ], batch size: 70, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:31:56,667 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 20:32:15,079 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.028e+02 5.065e+02 6.247e+02 8.237e+02 1.383e+03, threshold=1.249e+03, percent-clipped=3.0 +2023-04-02 20:32:22,209 INFO [train.py:903] (0/4) Epoch 22, batch 350, loss[loss=0.2053, simple_loss=0.2757, pruned_loss=0.06746, over 19633.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.0645, over 3172729.67 frames. ], batch size: 50, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:32:29,137 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 20:32:51,084 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143762.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:32:52,726 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 20:33:20,944 INFO [train.py:903] (0/4) Epoch 22, batch 400, loss[loss=0.1821, simple_loss=0.2579, pruned_loss=0.05315, over 19768.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2882, pruned_loss=0.06495, over 3308671.90 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:15,302 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.215e+02 6.557e+02 8.093e+02 2.351e+03, threshold=1.311e+03, percent-clipped=8.0 +2023-04-02 20:34:17,778 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143835.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:34:20,915 INFO [train.py:903] (0/4) Epoch 22, batch 450, loss[loss=0.2126, simple_loss=0.2903, pruned_loss=0.06742, over 19669.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2888, pruned_loss=0.06515, over 3429897.33 frames. ], batch size: 58, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:57,883 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 20:34:58,983 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 20:35:08,553 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143877.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:35:22,927 INFO [train.py:903] (0/4) Epoch 22, batch 500, loss[loss=0.2172, simple_loss=0.3023, pruned_loss=0.06603, over 19563.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2888, pruned_loss=0.06534, over 3523889.19 frames. ], batch size: 61, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:17,497 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 5.123e+02 6.359e+02 8.434e+02 1.804e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 20:36:23,288 INFO [train.py:903] (0/4) Epoch 22, batch 550, loss[loss=0.2519, simple_loss=0.3219, pruned_loss=0.09097, over 18322.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2898, pruned_loss=0.06577, over 3591892.18 frames. ], batch size: 83, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:37,249 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143950.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:37:00,075 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5589, 1.3816, 1.4261, 2.0432, 1.5949, 1.8449, 1.9587, 1.6355], + device='cuda:0'), covar=tensor([0.0947, 0.1055, 0.1105, 0.0772, 0.0878, 0.0805, 0.0837, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0223, 0.0227, 0.0240, 0.0226, 0.0213, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 20:37:23,305 INFO [train.py:903] (0/4) Epoch 22, batch 600, loss[loss=0.1936, simple_loss=0.284, pruned_loss=0.05161, over 19659.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2897, pruned_loss=0.0653, over 3651143.59 frames. ], batch size: 55, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:37:36,828 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-144000.pt +2023-04-02 20:38:02,248 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9889, 3.6526, 2.4619, 3.1883, 0.9829, 3.5680, 3.4132, 3.5427], + device='cuda:0'), covar=tensor([0.0828, 0.1013, 0.2118, 0.0976, 0.3900, 0.0787, 0.1105, 0.1215], + device='cuda:0'), in_proj_covar=tensor([0.0500, 0.0408, 0.0492, 0.0345, 0.0402, 0.0430, 0.0426, 0.0459], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:38:06,649 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 20:38:17,798 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 4.914e+02 6.190e+02 8.004e+02 1.732e+03, threshold=1.238e+03, percent-clipped=3.0 +2023-04-02 20:38:23,572 INFO [train.py:903] (0/4) Epoch 22, batch 650, loss[loss=0.2024, simple_loss=0.2901, pruned_loss=0.05734, over 19614.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2891, pruned_loss=0.06484, over 3703067.62 frames. ], batch size: 57, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:39:26,363 INFO [train.py:903] (0/4) Epoch 22, batch 700, loss[loss=0.1908, simple_loss=0.2679, pruned_loss=0.05685, over 19796.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.06362, over 3741066.11 frames. ], batch size: 49, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:40:19,658 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 4.796e+02 6.107e+02 7.975e+02 1.533e+03, threshold=1.221e+03, percent-clipped=5.0 +2023-04-02 20:40:20,113 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144133.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:40:21,147 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9159, 1.3478, 1.6732, 0.6300, 2.0409, 2.4700, 2.1282, 2.6324], + device='cuda:0'), covar=tensor([0.1521, 0.3568, 0.3083, 0.2613, 0.0610, 0.0251, 0.0342, 0.0360], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0321, 0.0350, 0.0265, 0.0243, 0.0185, 0.0215, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 20:40:26,351 INFO [train.py:903] (0/4) Epoch 22, batch 750, loss[loss=0.1978, simple_loss=0.2737, pruned_loss=0.06092, over 19629.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06409, over 3765107.67 frames. ], batch size: 50, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:40:49,207 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144158.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:41:21,371 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 20:41:26,342 INFO [train.py:903] (0/4) Epoch 22, batch 800, loss[loss=0.207, simple_loss=0.2925, pruned_loss=0.0607, over 18211.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2882, pruned_loss=0.06375, over 3790235.57 frames. ], batch size: 83, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:41:44,760 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 20:41:47,230 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144206.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:42:19,086 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144231.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:42:20,959 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.021e+02 6.351e+02 8.019e+02 1.751e+03, threshold=1.270e+03, percent-clipped=5.0 +2023-04-02 20:42:26,670 INFO [train.py:903] (0/4) Epoch 22, batch 850, loss[loss=0.1951, simple_loss=0.2838, pruned_loss=0.05317, over 18810.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2881, pruned_loss=0.06381, over 3786487.30 frames. ], batch size: 74, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:43:19,873 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 20:43:26,414 INFO [train.py:903] (0/4) Epoch 22, batch 900, loss[loss=0.1626, simple_loss=0.2416, pruned_loss=0.04184, over 17813.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06406, over 3782503.85 frames. ], batch size: 39, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:43:42,705 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0466, 5.4545, 3.1106, 4.8283, 0.9508, 5.6962, 5.4420, 5.6757], + device='cuda:0'), covar=tensor([0.0406, 0.0893, 0.1799, 0.0780, 0.4406, 0.0483, 0.0763, 0.0941], + device='cuda:0'), in_proj_covar=tensor([0.0497, 0.0408, 0.0488, 0.0343, 0.0400, 0.0426, 0.0421, 0.0457], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:44:05,037 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6394, 2.4394, 2.3008, 2.6501, 2.4576, 2.1949, 2.2704, 2.4403], + device='cuda:0'), covar=tensor([0.0919, 0.1567, 0.1349, 0.1057, 0.1329, 0.0501, 0.1260, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0357, 0.0312, 0.0250, 0.0300, 0.0249, 0.0308, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:44:21,529 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.404e+02 5.111e+02 6.361e+02 7.451e+02 1.172e+03, threshold=1.272e+03, percent-clipped=0.0 +2023-04-02 20:44:26,104 INFO [train.py:903] (0/4) Epoch 22, batch 950, loss[loss=0.2344, simple_loss=0.3159, pruned_loss=0.0764, over 19329.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2892, pruned_loss=0.0643, over 3794079.87 frames. ], batch size: 70, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:44:30,630 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 20:45:27,344 INFO [train.py:903] (0/4) Epoch 22, batch 1000, loss[loss=0.2045, simple_loss=0.2922, pruned_loss=0.05844, over 19463.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2885, pruned_loss=0.06429, over 3799138.99 frames. ], batch size: 64, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:46:17,109 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=144429.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:46:17,960 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 20:46:22,209 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 5.215e+02 6.539e+02 8.059e+02 1.779e+03, threshold=1.308e+03, percent-clipped=4.0 +2023-04-02 20:46:26,887 INFO [train.py:903] (0/4) Epoch 22, batch 1050, loss[loss=0.2133, simple_loss=0.2928, pruned_loss=0.06691, over 19673.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2876, pruned_loss=0.06413, over 3804716.38 frames. ], batch size: 55, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:46:53,388 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0355, 5.0528, 5.8411, 5.8524, 1.7361, 5.4516, 4.6893, 5.4616], + device='cuda:0'), covar=tensor([0.1681, 0.0862, 0.0578, 0.0595, 0.6796, 0.0777, 0.0619, 0.1166], + device='cuda:0'), in_proj_covar=tensor([0.0771, 0.0731, 0.0933, 0.0821, 0.0823, 0.0695, 0.0564, 0.0865], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 20:47:00,685 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 20:47:07,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5918, 4.1813, 2.6393, 3.7035, 0.8569, 4.1293, 4.0213, 4.1128], + device='cuda:0'), covar=tensor([0.0643, 0.0954, 0.2015, 0.0856, 0.4307, 0.0638, 0.0955, 0.1211], + device='cuda:0'), in_proj_covar=tensor([0.0496, 0.0407, 0.0486, 0.0342, 0.0400, 0.0427, 0.0421, 0.0457], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:47:26,633 INFO [train.py:903] (0/4) Epoch 22, batch 1100, loss[loss=0.1938, simple_loss=0.2764, pruned_loss=0.05562, over 17979.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2892, pruned_loss=0.06457, over 3808312.11 frames. ], batch size: 83, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:48:21,826 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.380e+02 5.103e+02 6.169e+02 7.547e+02 2.403e+03, threshold=1.234e+03, percent-clipped=2.0 +2023-04-02 20:48:27,964 INFO [train.py:903] (0/4) Epoch 22, batch 1150, loss[loss=0.1844, simple_loss=0.2645, pruned_loss=0.05219, over 19730.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2884, pruned_loss=0.06432, over 3821150.77 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:49:28,349 INFO [train.py:903] (0/4) Epoch 22, batch 1200, loss[loss=0.2519, simple_loss=0.33, pruned_loss=0.08693, over 19665.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.289, pruned_loss=0.0643, over 3815645.23 frames. ], batch size: 59, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:49:59,941 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 20:50:23,752 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.877e+02 6.112e+02 7.869e+02 2.071e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 20:50:27,117 INFO [train.py:903] (0/4) Epoch 22, batch 1250, loss[loss=0.2587, simple_loss=0.3291, pruned_loss=0.0942, over 19749.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2894, pruned_loss=0.0646, over 3816243.12 frames. ], batch size: 63, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:50:33,355 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0631, 1.5700, 1.6311, 1.9096, 1.5020, 1.7098, 1.5345, 1.8733], + device='cuda:0'), covar=tensor([0.0927, 0.1287, 0.1355, 0.0812, 0.1232, 0.0536, 0.1404, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0357, 0.0312, 0.0249, 0.0299, 0.0250, 0.0308, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:51:26,211 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0357, 1.2204, 1.6723, 1.2243, 2.6302, 3.6292, 3.4023, 3.8780], + device='cuda:0'), covar=tensor([0.1768, 0.4015, 0.3484, 0.2541, 0.0616, 0.0181, 0.0209, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0320, 0.0349, 0.0263, 0.0241, 0.0184, 0.0214, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 20:51:28,173 INFO [train.py:903] (0/4) Epoch 22, batch 1300, loss[loss=0.2237, simple_loss=0.3024, pruned_loss=0.07253, over 18734.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06413, over 3827850.95 frames. ], batch size: 74, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:51:33,945 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7464, 1.5898, 1.7439, 1.6533, 4.2981, 1.0900, 2.5312, 4.6147], + device='cuda:0'), covar=tensor([0.0440, 0.2694, 0.2918, 0.1998, 0.0750, 0.2768, 0.1482, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0404, 0.0361, 0.0381, 0.0343, 0.0370, 0.0347, 0.0374, 0.0398], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:51:46,040 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-02 20:51:59,492 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 20:52:26,776 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.679e+02 5.951e+02 8.140e+02 2.957e+03, threshold=1.190e+03, percent-clipped=7.0 +2023-04-02 20:52:30,257 INFO [train.py:903] (0/4) Epoch 22, batch 1350, loss[loss=0.2219, simple_loss=0.2827, pruned_loss=0.08058, over 19484.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06477, over 3814219.33 frames. ], batch size: 49, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:52:39,372 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7762, 1.7660, 1.6691, 1.4417, 1.4350, 1.4704, 0.2915, 0.6798], + device='cuda:0'), covar=tensor([0.0586, 0.0580, 0.0384, 0.0516, 0.1081, 0.0651, 0.1165, 0.0987], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0355, 0.0357, 0.0381, 0.0457, 0.0385, 0.0334, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 20:53:12,534 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=144773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:53:20,643 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 20:53:31,338 INFO [train.py:903] (0/4) Epoch 22, batch 1400, loss[loss=0.2362, simple_loss=0.3078, pruned_loss=0.08232, over 19658.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2885, pruned_loss=0.0645, over 3803308.84 frames. ], batch size: 60, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:54:22,356 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-02 20:54:28,427 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.383e+02 4.808e+02 5.945e+02 7.380e+02 1.517e+03, threshold=1.189e+03, percent-clipped=2.0 +2023-04-02 20:54:29,515 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 20:54:31,676 INFO [train.py:903] (0/4) Epoch 22, batch 1450, loss[loss=0.2268, simple_loss=0.3033, pruned_loss=0.07514, over 19730.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2876, pruned_loss=0.06405, over 3821098.42 frames. ], batch size: 63, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:55:23,555 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 20:55:30,804 INFO [train.py:903] (0/4) Epoch 22, batch 1500, loss[loss=0.2316, simple_loss=0.3063, pruned_loss=0.07844, over 17474.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2878, pruned_loss=0.06426, over 3806902.80 frames. ], batch size: 101, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:55:31,145 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=144888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:56:27,869 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.957e+02 5.987e+02 8.036e+02 1.770e+03, threshold=1.197e+03, percent-clipped=5.0 +2023-04-02 20:56:31,399 INFO [train.py:903] (0/4) Epoch 22, batch 1550, loss[loss=0.2453, simple_loss=0.3212, pruned_loss=0.08466, over 19332.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2885, pruned_loss=0.06491, over 3805285.75 frames. ], batch size: 66, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:57:30,408 INFO [train.py:903] (0/4) Epoch 22, batch 1600, loss[loss=0.2263, simple_loss=0.3045, pruned_loss=0.07407, over 18273.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2881, pruned_loss=0.06449, over 3804353.27 frames. ], batch size: 84, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:57:48,910 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1354, 1.2897, 1.4824, 1.4059, 2.7619, 1.0789, 2.2690, 3.1167], + device='cuda:0'), covar=tensor([0.0557, 0.2659, 0.2877, 0.1784, 0.0744, 0.2360, 0.1132, 0.0315], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0365, 0.0388, 0.0349, 0.0375, 0.0351, 0.0378, 0.0404], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 20:57:50,812 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 20:58:02,378 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:58:27,678 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.884e+02 5.870e+02 7.908e+02 1.403e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-02 20:58:31,145 INFO [train.py:903] (0/4) Epoch 22, batch 1650, loss[loss=0.2259, simple_loss=0.3151, pruned_loss=0.06831, over 18740.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2885, pruned_loss=0.0646, over 3811494.91 frames. ], batch size: 74, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:58:39,506 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:27,008 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:31,360 INFO [train.py:903] (0/4) Epoch 22, batch 1700, loss[loss=0.1673, simple_loss=0.2424, pruned_loss=0.04612, over 19773.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2883, pruned_loss=0.06477, over 3809622.91 frames. ], batch size: 47, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:59:52,393 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.83 vs. limit=5.0 +2023-04-02 21:00:08,581 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 21:00:28,005 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 4.845e+02 6.228e+02 7.810e+02 2.223e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 21:00:33,039 INFO [train.py:903] (0/4) Epoch 22, batch 1750, loss[loss=0.2148, simple_loss=0.2778, pruned_loss=0.07586, over 19291.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.06508, over 3812831.76 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:00:40,308 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145144.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:09,158 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145169.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:31,687 INFO [train.py:903] (0/4) Epoch 22, batch 1800, loss[loss=0.2117, simple_loss=0.2932, pruned_loss=0.06511, over 19385.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2889, pruned_loss=0.06502, over 3818735.52 frames. ], batch size: 70, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:01:55,238 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1816, 1.3919, 1.7986, 1.7505, 2.9906, 4.5141, 4.4105, 4.9452], + device='cuda:0'), covar=tensor([0.1717, 0.3848, 0.3479, 0.2211, 0.0619, 0.0210, 0.0166, 0.0179], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0216, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 21:02:27,946 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.845e+02 5.086e+02 5.993e+02 7.804e+02 1.410e+03, threshold=1.199e+03, percent-clipped=2.0 +2023-04-02 21:02:27,977 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 21:02:31,524 INFO [train.py:903] (0/4) Epoch 22, batch 1850, loss[loss=0.2587, simple_loss=0.324, pruned_loss=0.09674, over 12937.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.06554, over 3825238.45 frames. ], batch size: 137, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:04,109 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 21:03:30,820 INFO [train.py:903] (0/4) Epoch 22, batch 1900, loss[loss=0.2327, simple_loss=0.3137, pruned_loss=0.07586, over 19410.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.29, pruned_loss=0.06553, over 3820439.06 frames. ], batch size: 70, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:44,232 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6780, 4.2120, 4.4804, 4.4704, 1.6872, 4.1897, 3.6042, 4.1871], + device='cuda:0'), covar=tensor([0.1687, 0.0899, 0.0618, 0.0629, 0.6226, 0.0914, 0.0693, 0.1117], + device='cuda:0'), in_proj_covar=tensor([0.0775, 0.0737, 0.0934, 0.0825, 0.0825, 0.0698, 0.0561, 0.0872], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 21:03:48,269 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 21:03:52,773 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 21:04:15,285 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 21:04:26,521 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.830e+02 5.286e+02 6.049e+02 6.874e+02 1.450e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 21:04:30,791 INFO [train.py:903] (0/4) Epoch 22, batch 1950, loss[loss=0.2296, simple_loss=0.3051, pruned_loss=0.07708, over 18752.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2901, pruned_loss=0.0657, over 3805975.21 frames. ], batch size: 74, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:04:44,257 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8139, 1.5591, 1.4549, 1.7931, 1.4913, 1.5951, 1.4741, 1.7212], + device='cuda:0'), covar=tensor([0.1081, 0.1375, 0.1520, 0.0988, 0.1296, 0.0575, 0.1424, 0.0765], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0356, 0.0312, 0.0249, 0.0300, 0.0250, 0.0309, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:04:55,830 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:31,553 INFO [train.py:903] (0/4) Epoch 22, batch 2000, loss[loss=0.2005, simple_loss=0.2726, pruned_loss=0.06422, over 19374.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2893, pruned_loss=0.06535, over 3810135.21 frames. ], batch size: 48, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:05:32,826 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:32,966 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:19,669 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:27,594 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 4.801e+02 6.060e+02 7.916e+02 1.266e+03, threshold=1.212e+03, percent-clipped=1.0 +2023-04-02 21:06:27,628 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 21:06:30,125 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5449, 1.3119, 1.8590, 1.4847, 2.7007, 3.8534, 3.5579, 4.0600], + device='cuda:0'), covar=tensor([0.1423, 0.3773, 0.3138, 0.2203, 0.0604, 0.0166, 0.0193, 0.0234], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0216, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 21:06:30,885 INFO [train.py:903] (0/4) Epoch 22, batch 2050, loss[loss=0.1975, simple_loss=0.2716, pruned_loss=0.06173, over 19401.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.289, pruned_loss=0.06484, over 3814330.15 frames. ], batch size: 47, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:06:46,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 21:06:46,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 21:07:06,386 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 21:07:13,309 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:07:30,908 INFO [train.py:903] (0/4) Epoch 22, batch 2100, loss[loss=0.2173, simple_loss=0.2974, pruned_loss=0.06863, over 19535.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2899, pruned_loss=0.0651, over 3825605.46 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:07:51,369 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:08:01,438 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 21:08:22,540 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 21:08:27,087 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 4.926e+02 6.113e+02 7.931e+02 1.598e+03, threshold=1.223e+03, percent-clipped=5.0 +2023-04-02 21:08:30,642 INFO [train.py:903] (0/4) Epoch 22, batch 2150, loss[loss=0.2493, simple_loss=0.3194, pruned_loss=0.08961, over 13022.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2895, pruned_loss=0.06484, over 3816667.03 frames. ], batch size: 135, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:08:38,086 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:09:32,290 INFO [train.py:903] (0/4) Epoch 22, batch 2200, loss[loss=0.1873, simple_loss=0.2602, pruned_loss=0.05721, over 19758.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2905, pruned_loss=0.06574, over 3824369.72 frames. ], batch size: 46, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:10:00,376 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145612.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:10:08,473 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6941, 1.5535, 1.5349, 2.1807, 1.7317, 2.0684, 2.1241, 1.8363], + device='cuda:0'), covar=tensor([0.0848, 0.0893, 0.1029, 0.0706, 0.0804, 0.0661, 0.0795, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0221, 0.0226, 0.0240, 0.0225, 0.0212, 0.0186, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 21:10:29,863 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.056e+02 6.179e+02 8.064e+02 2.249e+03, threshold=1.236e+03, percent-clipped=3.0 +2023-04-02 21:10:32,067 INFO [train.py:903] (0/4) Epoch 22, batch 2250, loss[loss=0.2292, simple_loss=0.3114, pruned_loss=0.0735, over 19518.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2893, pruned_loss=0.06459, over 3830453.52 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:11:01,341 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:11:33,806 INFO [train.py:903] (0/4) Epoch 22, batch 2300, loss[loss=0.206, simple_loss=0.2901, pruned_loss=0.0609, over 19757.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2895, pruned_loss=0.06476, over 3809060.88 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:11:45,981 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 21:12:22,729 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:27,059 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:30,302 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.899e+02 6.109e+02 7.402e+02 2.135e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 21:12:32,754 INFO [train.py:903] (0/4) Epoch 22, batch 2350, loss[loss=0.2423, simple_loss=0.3171, pruned_loss=0.08376, over 19574.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06481, over 3792304.76 frames. ], batch size: 61, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:12:54,257 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:56,382 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5994, 4.1530, 2.8433, 3.6603, 0.8993, 4.1224, 4.0133, 4.1366], + device='cuda:0'), covar=tensor([0.0680, 0.1126, 0.1668, 0.0858, 0.4198, 0.0674, 0.0934, 0.0973], + device='cuda:0'), in_proj_covar=tensor([0.0502, 0.0410, 0.0493, 0.0345, 0.0403, 0.0432, 0.0426, 0.0460], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:13:01,050 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:07,028 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-02 21:13:14,114 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 21:13:31,578 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 21:13:31,980 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:35,674 INFO [train.py:903] (0/4) Epoch 22, batch 2400, loss[loss=0.2629, simple_loss=0.3365, pruned_loss=0.09463, over 19664.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.06476, over 3806211.20 frames. ], batch size: 53, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:13:48,776 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:20,086 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:34,018 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.826e+02 5.747e+02 7.009e+02 1.532e+03, threshold=1.149e+03, percent-clipped=5.0 +2023-04-02 21:14:35,665 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7696, 1.8850, 2.1014, 2.3324, 1.7174, 2.2267, 2.0858, 1.9296], + device='cuda:0'), covar=tensor([0.4134, 0.3870, 0.1899, 0.2290, 0.3913, 0.2130, 0.5084, 0.3458], + device='cuda:0'), in_proj_covar=tensor([0.0893, 0.0957, 0.0714, 0.0929, 0.0873, 0.0811, 0.0840, 0.0780], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 21:14:36,518 INFO [train.py:903] (0/4) Epoch 22, batch 2450, loss[loss=0.2134, simple_loss=0.2979, pruned_loss=0.06443, over 19472.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2884, pruned_loss=0.06444, over 3816316.19 frames. ], batch size: 64, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:14:37,985 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145839.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:14:49,280 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:52,763 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7404, 1.3217, 1.5458, 1.5093, 3.3304, 1.0590, 2.3464, 3.8084], + device='cuda:0'), covar=tensor([0.0512, 0.2924, 0.2930, 0.1962, 0.0741, 0.2703, 0.1395, 0.0225], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0366, 0.0387, 0.0350, 0.0375, 0.0350, 0.0380, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:15:28,950 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3942, 1.4352, 1.7047, 1.6065, 2.4813, 2.1358, 2.6601, 1.0610], + device='cuda:0'), covar=tensor([0.2509, 0.4391, 0.2756, 0.1948, 0.1454, 0.2186, 0.1331, 0.4643], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0643, 0.0712, 0.0481, 0.0618, 0.0529, 0.0663, 0.0549], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 21:15:37,464 INFO [train.py:903] (0/4) Epoch 22, batch 2500, loss[loss=0.2199, simple_loss=0.3104, pruned_loss=0.06469, over 19656.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06405, over 3824565.66 frames. ], batch size: 60, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:25,710 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6254, 4.0063, 4.2340, 4.2363, 1.6605, 3.9881, 3.5079, 3.9823], + device='cuda:0'), covar=tensor([0.1718, 0.1028, 0.0694, 0.0756, 0.6049, 0.1149, 0.0724, 0.1233], + device='cuda:0'), in_proj_covar=tensor([0.0781, 0.0741, 0.0941, 0.0828, 0.0830, 0.0704, 0.0565, 0.0878], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 21:16:34,429 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.066e+02 4.836e+02 5.791e+02 7.519e+02 1.267e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 21:16:36,601 INFO [train.py:903] (0/4) Epoch 22, batch 2550, loss[loss=0.2241, simple_loss=0.3149, pruned_loss=0.06663, over 19519.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2887, pruned_loss=0.064, over 3836536.12 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:40,550 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-02 21:16:59,313 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145956.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:17:15,003 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6818, 4.2555, 2.7712, 3.7926, 0.9863, 4.2262, 4.1210, 4.2087], + device='cuda:0'), covar=tensor([0.0570, 0.0897, 0.1911, 0.0860, 0.3922, 0.0646, 0.0815, 0.0978], + device='cuda:0'), in_proj_covar=tensor([0.0501, 0.0407, 0.0489, 0.0342, 0.0399, 0.0427, 0.0422, 0.0456], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:17:33,993 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 21:17:38,147 INFO [train.py:903] (0/4) Epoch 22, batch 2600, loss[loss=0.2192, simple_loss=0.2953, pruned_loss=0.07158, over 19481.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2886, pruned_loss=0.06406, over 3827654.46 frames. ], batch size: 64, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:17:52,879 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-146000.pt +2023-04-02 21:17:59,427 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:18:28,305 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8293, 1.3382, 1.0833, 0.9651, 1.1783, 1.0094, 0.8492, 1.2130], + device='cuda:0'), covar=tensor([0.0660, 0.0817, 0.1148, 0.0727, 0.0561, 0.1283, 0.0669, 0.0512], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0315, 0.0338, 0.0264, 0.0248, 0.0336, 0.0290, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:18:38,079 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.022e+02 6.231e+02 7.783e+02 1.698e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 21:18:40,365 INFO [train.py:903] (0/4) Epoch 22, batch 2650, loss[loss=0.2024, simple_loss=0.2899, pruned_loss=0.05746, over 17981.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.288, pruned_loss=0.06408, over 3830291.73 frames. ], batch size: 83, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:19:00,427 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 21:19:21,317 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146071.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:19:35,523 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 21:19:41,307 INFO [train.py:903] (0/4) Epoch 22, batch 2700, loss[loss=0.2257, simple_loss=0.3031, pruned_loss=0.07417, over 19684.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2886, pruned_loss=0.06445, over 3821210.44 frames. ], batch size: 53, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:20:00,899 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:20,723 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:32,009 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:39,332 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.787e+02 6.288e+02 8.148e+02 2.582e+03, threshold=1.258e+03, percent-clipped=4.0 +2023-04-02 21:20:41,745 INFO [train.py:903] (0/4) Epoch 22, batch 2750, loss[loss=0.1736, simple_loss=0.2654, pruned_loss=0.04088, over 19683.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06428, over 3821244.38 frames. ], batch size: 53, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:21:05,842 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.64 vs. limit=5.0 +2023-04-02 21:21:08,980 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0517, 1.1875, 1.6008, 1.0644, 2.3183, 3.0021, 2.8218, 3.3469], + device='cuda:0'), covar=tensor([0.1790, 0.4735, 0.4404, 0.2541, 0.0660, 0.0276, 0.0306, 0.0336], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0320, 0.0352, 0.0265, 0.0242, 0.0186, 0.0215, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 21:21:18,163 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:21:37,421 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146183.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:21:43,817 INFO [train.py:903] (0/4) Epoch 22, batch 2800, loss[loss=0.2432, simple_loss=0.317, pruned_loss=0.08474, over 13308.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2889, pruned_loss=0.06401, over 3807697.77 frames. ], batch size: 136, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:21:47,751 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.04 vs. limit=5.0 +2023-04-02 21:22:42,896 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 4.555e+02 5.863e+02 7.335e+02 1.249e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-02 21:22:45,118 INFO [train.py:903] (0/4) Epoch 22, batch 2850, loss[loss=0.1846, simple_loss=0.2629, pruned_loss=0.05322, over 18181.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2877, pruned_loss=0.06337, over 3814783.74 frames. ], batch size: 40, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:42,916 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 21:23:45,159 INFO [train.py:903] (0/4) Epoch 22, batch 2900, loss[loss=0.2367, simple_loss=0.3085, pruned_loss=0.08252, over 19529.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06353, over 3818971.07 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:51,888 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6540, 1.4054, 1.4509, 2.0771, 1.6613, 1.9854, 2.0830, 1.7338], + device='cuda:0'), covar=tensor([0.0876, 0.1046, 0.1086, 0.0830, 0.0947, 0.0830, 0.0867, 0.0739], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0239, 0.0225, 0.0210, 0.0186, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 21:23:57,291 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146298.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:24:33,183 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:24:43,684 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 4.789e+02 5.854e+02 7.393e+02 1.532e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-04-02 21:24:45,861 INFO [train.py:903] (0/4) Epoch 22, batch 2950, loss[loss=0.2139, simple_loss=0.2997, pruned_loss=0.064, over 19657.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2874, pruned_loss=0.06341, over 3830746.18 frames. ], batch size: 55, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:25:04,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:32,537 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:33,962 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 21:25:46,771 INFO [train.py:903] (0/4) Epoch 22, batch 3000, loss[loss=0.2031, simple_loss=0.2874, pruned_loss=0.05942, over 19680.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06358, over 3825449.57 frames. ], batch size: 60, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:25:46,772 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 21:25:59,186 INFO [train.py:937] (0/4) Epoch 22, validation: loss=0.1687, simple_loss=0.2687, pruned_loss=0.0344, over 944034.00 frames. +2023-04-02 21:25:59,186 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 21:26:02,608 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 21:26:15,295 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:26:58,610 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 5.066e+02 6.686e+02 8.533e+02 1.871e+03, threshold=1.337e+03, percent-clipped=6.0 +2023-04-02 21:26:59,734 INFO [train.py:903] (0/4) Epoch 22, batch 3050, loss[loss=0.178, simple_loss=0.2608, pruned_loss=0.04761, over 19592.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2879, pruned_loss=0.06377, over 3824607.44 frames. ], batch size: 52, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:28:00,901 INFO [train.py:903] (0/4) Epoch 22, batch 3100, loss[loss=0.1884, simple_loss=0.28, pruned_loss=0.04844, over 18069.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2875, pruned_loss=0.06331, over 3836125.30 frames. ], batch size: 83, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:28:15,879 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2577, 1.5299, 2.0315, 1.7144, 3.2267, 4.8014, 4.6465, 5.1736], + device='cuda:0'), covar=tensor([0.1669, 0.3645, 0.3117, 0.2157, 0.0549, 0.0199, 0.0156, 0.0191], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0215, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 21:28:27,613 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146511.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:28:38,294 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4822, 1.3469, 1.3174, 1.8328, 1.4132, 1.6852, 1.7237, 1.5172], + device='cuda:0'), covar=tensor([0.0799, 0.0910, 0.1015, 0.0604, 0.0775, 0.0723, 0.0771, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0239, 0.0225, 0.0209, 0.0186, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 21:28:59,179 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.441e+02 5.077e+02 6.322e+02 8.082e+02 1.628e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 21:29:00,360 INFO [train.py:903] (0/4) Epoch 22, batch 3150, loss[loss=0.1848, simple_loss=0.2666, pruned_loss=0.05149, over 19845.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2886, pruned_loss=0.06445, over 3825820.36 frames. ], batch size: 52, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:29:19,882 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146554.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:29:29,190 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 21:29:30,739 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8647, 2.7357, 2.1375, 2.1343, 1.9620, 2.4237, 1.1190, 1.9724], + device='cuda:0'), covar=tensor([0.0684, 0.0626, 0.0695, 0.1084, 0.1065, 0.1024, 0.1370, 0.1062], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0357, 0.0359, 0.0384, 0.0460, 0.0388, 0.0338, 0.0343], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 21:29:47,823 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8751, 1.9682, 2.2032, 2.5973, 1.9561, 2.5410, 2.2338, 2.0833], + device='cuda:0'), covar=tensor([0.4432, 0.3750, 0.2002, 0.2250, 0.3975, 0.1974, 0.5005, 0.3365], + device='cuda:0'), in_proj_covar=tensor([0.0897, 0.0961, 0.0717, 0.0930, 0.0878, 0.0814, 0.0844, 0.0781], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 21:29:51,107 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146579.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:30:00,733 INFO [train.py:903] (0/4) Epoch 22, batch 3200, loss[loss=0.2222, simple_loss=0.301, pruned_loss=0.07168, over 19526.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06417, over 3830737.46 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:30:28,099 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:30:47,445 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:31:01,671 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 4.719e+02 5.878e+02 7.469e+02 1.229e+03, threshold=1.176e+03, percent-clipped=0.0 +2023-04-02 21:31:02,527 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 21:31:02,818 INFO [train.py:903] (0/4) Epoch 22, batch 3250, loss[loss=0.2114, simple_loss=0.2972, pruned_loss=0.06278, over 19531.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2873, pruned_loss=0.06387, over 3843201.63 frames. ], batch size: 56, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:31:10,856 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:32:03,196 INFO [train.py:903] (0/4) Epoch 22, batch 3300, loss[loss=0.1861, simple_loss=0.2607, pruned_loss=0.05575, over 19307.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2877, pruned_loss=0.06437, over 3823304.61 frames. ], batch size: 44, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:32:08,318 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 21:32:09,862 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 21:32:59,758 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.268e+02 6.638e+02 8.509e+02 1.642e+03, threshold=1.328e+03, percent-clipped=7.0 +2023-04-02 21:33:00,760 INFO [train.py:903] (0/4) Epoch 22, batch 3350, loss[loss=0.2291, simple_loss=0.3039, pruned_loss=0.07711, over 18665.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.06475, over 3825747.83 frames. ], batch size: 74, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:34:00,056 INFO [train.py:903] (0/4) Epoch 22, batch 3400, loss[loss=0.2022, simple_loss=0.28, pruned_loss=0.06223, over 19755.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2882, pruned_loss=0.06479, over 3811992.25 frames. ], batch size: 46, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:34:59,708 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 5.013e+02 6.159e+02 8.066e+02 2.491e+03, threshold=1.232e+03, percent-clipped=4.0 +2023-04-02 21:35:00,913 INFO [train.py:903] (0/4) Epoch 22, batch 3450, loss[loss=0.2334, simple_loss=0.3111, pruned_loss=0.07785, over 19730.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2882, pruned_loss=0.06465, over 3818861.59 frames. ], batch size: 63, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:35:04,227 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 21:35:28,806 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146862.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:35:54,044 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146882.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:36:01,446 INFO [train.py:903] (0/4) Epoch 22, batch 3500, loss[loss=0.1816, simple_loss=0.2657, pruned_loss=0.04871, over 19846.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2876, pruned_loss=0.06418, over 3815604.78 frames. ], batch size: 52, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:36:23,424 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:37:00,107 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.850e+02 4.615e+02 6.325e+02 8.235e+02 2.059e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 21:37:01,338 INFO [train.py:903] (0/4) Epoch 22, batch 3550, loss[loss=0.1704, simple_loss=0.2467, pruned_loss=0.04708, over 19731.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2875, pruned_loss=0.06396, over 3823585.34 frames. ], batch size: 46, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:37:18,263 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:37:40,836 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.14 vs. limit=5.0 +2023-04-02 21:38:02,340 INFO [train.py:903] (0/4) Epoch 22, batch 3600, loss[loss=0.195, simple_loss=0.2746, pruned_loss=0.05771, over 19631.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.288, pruned_loss=0.06428, over 3798824.04 frames. ], batch size: 50, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:38:02,532 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:39:01,661 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.391e+02 4.948e+02 6.297e+02 8.667e+02 2.605e+03, threshold=1.259e+03, percent-clipped=8.0 +2023-04-02 21:39:02,705 INFO [train.py:903] (0/4) Epoch 22, batch 3650, loss[loss=0.2584, simple_loss=0.3261, pruned_loss=0.0954, over 19614.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06465, over 3802493.14 frames. ], batch size: 57, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:39:39,070 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:03,858 INFO [train.py:903] (0/4) Epoch 22, batch 3700, loss[loss=0.2916, simple_loss=0.3503, pruned_loss=0.1164, over 13341.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.06545, over 3796502.04 frames. ], batch size: 135, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:40:21,179 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:58,545 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5753, 4.1824, 2.6258, 3.6587, 0.9944, 4.1306, 4.0176, 4.1027], + device='cuda:0'), covar=tensor([0.0636, 0.0997, 0.2014, 0.0839, 0.3976, 0.0724, 0.0928, 0.1252], + device='cuda:0'), in_proj_covar=tensor([0.0503, 0.0410, 0.0492, 0.0343, 0.0401, 0.0431, 0.0425, 0.0459], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:41:02,894 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.816e+02 5.960e+02 7.144e+02 1.653e+03, threshold=1.192e+03, percent-clipped=4.0 +2023-04-02 21:41:04,071 INFO [train.py:903] (0/4) Epoch 22, batch 3750, loss[loss=0.1982, simple_loss=0.2655, pruned_loss=0.06539, over 19755.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2901, pruned_loss=0.06578, over 3809248.56 frames. ], batch size: 47, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:41:21,574 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4889, 1.4944, 1.7669, 1.7047, 2.6977, 2.3027, 2.8826, 1.3261], + device='cuda:0'), covar=tensor([0.2445, 0.4305, 0.2655, 0.1918, 0.1435, 0.2071, 0.1392, 0.4195], + device='cuda:0'), in_proj_covar=tensor([0.0536, 0.0643, 0.0714, 0.0482, 0.0618, 0.0531, 0.0663, 0.0549], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 21:42:04,510 INFO [train.py:903] (0/4) Epoch 22, batch 3800, loss[loss=0.186, simple_loss=0.2613, pruned_loss=0.05537, over 19765.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2892, pruned_loss=0.06514, over 3820261.70 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:42:26,697 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147206.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:42:38,889 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 21:43:02,579 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.867e+02 5.077e+02 5.970e+02 7.548e+02 1.289e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 21:43:03,538 INFO [train.py:903] (0/4) Epoch 22, batch 3850, loss[loss=0.2051, simple_loss=0.2851, pruned_loss=0.0625, over 19678.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2891, pruned_loss=0.06511, over 3815976.38 frames. ], batch size: 53, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:43:09,367 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0219, 1.9606, 1.5093, 1.9630, 1.8472, 1.5072, 1.4284, 1.8264], + device='cuda:0'), covar=tensor([0.1188, 0.1612, 0.1851, 0.1217, 0.1571, 0.0832, 0.1868, 0.0927], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0354, 0.0309, 0.0249, 0.0301, 0.0251, 0.0308, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:43:31,162 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147259.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:05,365 INFO [train.py:903] (0/4) Epoch 22, batch 3900, loss[loss=0.2168, simple_loss=0.2977, pruned_loss=0.06801, over 19747.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2891, pruned_loss=0.06522, over 3816113.63 frames. ], batch size: 54, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:44:28,436 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-02 21:44:41,083 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8856, 1.7936, 2.0373, 1.6621, 4.4131, 1.1029, 2.5408, 4.8303], + device='cuda:0'), covar=tensor([0.0407, 0.2658, 0.2669, 0.2035, 0.0754, 0.2646, 0.1489, 0.0158], + device='cuda:0'), in_proj_covar=tensor([0.0406, 0.0364, 0.0385, 0.0348, 0.0372, 0.0347, 0.0379, 0.0402], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:44:46,192 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:49,772 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:55,095 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:04,849 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 4.890e+02 6.799e+02 8.609e+02 1.784e+03, threshold=1.360e+03, percent-clipped=9.0 +2023-04-02 21:45:05,887 INFO [train.py:903] (0/4) Epoch 22, batch 3950, loss[loss=0.1794, simple_loss=0.2569, pruned_loss=0.05094, over 16459.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2881, pruned_loss=0.06467, over 3810167.25 frames. ], batch size: 36, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:45:08,145 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 21:45:18,206 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:30,127 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:00,798 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:04,829 INFO [train.py:903] (0/4) Epoch 22, batch 4000, loss[loss=0.2056, simple_loss=0.2831, pruned_loss=0.06405, over 19621.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2872, pruned_loss=0.0641, over 3827208.94 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:46:50,141 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:52,234 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 21:46:54,077 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 21:47:03,821 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.354e+02 4.920e+02 5.925e+02 7.748e+02 1.160e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-04-02 21:47:05,864 INFO [train.py:903] (0/4) Epoch 22, batch 4050, loss[loss=0.2085, simple_loss=0.2798, pruned_loss=0.06855, over 19484.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06418, over 3829945.69 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:47:24,647 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:47:45,016 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 21:48:07,528 INFO [train.py:903] (0/4) Epoch 22, batch 4100, loss[loss=0.2709, simple_loss=0.3297, pruned_loss=0.106, over 19585.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2882, pruned_loss=0.06498, over 3813879.32 frames. ], batch size: 61, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:48:44,884 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 21:49:07,919 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.254e+02 6.349e+02 7.493e+02 1.711e+03, threshold=1.270e+03, percent-clipped=4.0 +2023-04-02 21:49:09,115 INFO [train.py:903] (0/4) Epoch 22, batch 4150, loss[loss=0.2198, simple_loss=0.3046, pruned_loss=0.06746, over 19733.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2879, pruned_loss=0.06494, over 3816424.83 frames. ], batch size: 63, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:49:56,959 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:09,104 INFO [train.py:903] (0/4) Epoch 22, batch 4200, loss[loss=0.2247, simple_loss=0.3051, pruned_loss=0.07216, over 19687.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2867, pruned_loss=0.06424, over 3817162.33 frames. ], batch size: 59, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:50:13,785 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 21:50:26,564 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:27,413 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147603.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:51:09,770 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.933e+02 5.671e+02 7.351e+02 2.024e+03, threshold=1.134e+03, percent-clipped=5.0 +2023-04-02 21:51:10,932 INFO [train.py:903] (0/4) Epoch 22, batch 4250, loss[loss=0.2014, simple_loss=0.2875, pruned_loss=0.05763, over 19775.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2861, pruned_loss=0.06404, over 3815995.38 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:51:25,901 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 21:51:38,183 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 21:51:51,755 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:51:57,369 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:52:11,224 INFO [train.py:903] (0/4) Epoch 22, batch 4300, loss[loss=0.1871, simple_loss=0.2748, pruned_loss=0.04971, over 19682.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2847, pruned_loss=0.06332, over 3825563.88 frames. ], batch size: 58, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:52:47,204 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:53:02,952 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 21:53:11,621 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 5.055e+02 6.219e+02 8.115e+02 2.735e+03, threshold=1.244e+03, percent-clipped=11.0 +2023-04-02 21:53:11,639 INFO [train.py:903] (0/4) Epoch 22, batch 4350, loss[loss=0.2259, simple_loss=0.3013, pruned_loss=0.07528, over 19672.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2851, pruned_loss=0.06349, over 3846619.75 frames. ], batch size: 55, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:53:48,686 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:11,180 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:12,000 INFO [train.py:903] (0/4) Epoch 22, batch 4400, loss[loss=0.2769, simple_loss=0.3433, pruned_loss=0.1053, over 12685.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2856, pruned_loss=0.06336, over 3837920.19 frames. ], batch size: 136, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:54:20,676 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:37,134 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 21:54:46,704 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 21:55:06,292 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:12,509 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.137e+02 6.374e+02 7.743e+02 1.534e+03, threshold=1.275e+03, percent-clipped=3.0 +2023-04-02 21:55:12,527 INFO [train.py:903] (0/4) Epoch 22, batch 4450, loss[loss=0.2174, simple_loss=0.3008, pruned_loss=0.06696, over 19517.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2864, pruned_loss=0.06367, over 3835867.67 frames. ], batch size: 64, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:55:57,419 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:57,452 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1339, 1.2887, 1.5361, 1.3225, 2.7582, 1.0064, 2.0831, 3.1084], + device='cuda:0'), covar=tensor([0.0638, 0.2883, 0.2872, 0.1996, 0.0797, 0.2589, 0.1396, 0.0340], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0348, 0.0373, 0.0349, 0.0382, 0.0404], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:56:08,753 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:56:12,927 INFO [train.py:903] (0/4) Epoch 22, batch 4500, loss[loss=0.217, simple_loss=0.2987, pruned_loss=0.06766, over 19788.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2881, pruned_loss=0.06459, over 3830422.75 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:56:41,878 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147911.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:03,777 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:15,408 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 4.535e+02 5.624e+02 7.235e+02 1.683e+03, threshold=1.125e+03, percent-clipped=3.0 +2023-04-02 21:57:15,427 INFO [train.py:903] (0/4) Epoch 22, batch 4550, loss[loss=0.2018, simple_loss=0.2947, pruned_loss=0.05451, over 19774.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2883, pruned_loss=0.06457, over 3840464.09 frames. ], batch size: 54, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:57:23,458 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 21:57:46,092 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 21:57:58,525 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:15,826 INFO [train.py:903] (0/4) Epoch 22, batch 4600, loss[loss=0.2547, simple_loss=0.3268, pruned_loss=0.09125, over 19487.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06463, over 3841638.08 frames. ], batch size: 64, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:58:28,667 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:29,490 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-148000.pt +2023-04-02 21:58:56,656 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:04,092 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 21:59:12,790 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6266, 1.3678, 1.5908, 1.4684, 3.2411, 1.0945, 2.4235, 3.6233], + device='cuda:0'), covar=tensor([0.0522, 0.2672, 0.2775, 0.1864, 0.0717, 0.2478, 0.1163, 0.0257], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0347, 0.0374, 0.0348, 0.0381, 0.0403], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 21:59:16,086 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.857e+02 6.032e+02 8.227e+02 1.754e+03, threshold=1.206e+03, percent-clipped=4.0 +2023-04-02 21:59:16,108 INFO [train.py:903] (0/4) Epoch 22, batch 4650, loss[loss=0.2379, simple_loss=0.3115, pruned_loss=0.08213, over 13632.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2883, pruned_loss=0.06452, over 3842029.40 frames. ], batch size: 135, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 21:59:22,644 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:32,295 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 21:59:43,962 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 21:59:53,347 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:00:16,030 INFO [train.py:903] (0/4) Epoch 22, batch 4700, loss[loss=0.1899, simple_loss=0.2696, pruned_loss=0.05511, over 19829.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.064, over 3840038.48 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:00:39,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 22:01:15,694 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:17,614 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.448e+02 6.322e+02 7.572e+02 1.580e+03, threshold=1.264e+03, percent-clipped=4.0 +2023-04-02 22:01:17,634 INFO [train.py:903] (0/4) Epoch 22, batch 4750, loss[loss=0.2177, simple_loss=0.3021, pruned_loss=0.06666, over 19362.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2884, pruned_loss=0.06443, over 3822968.00 frames. ], batch size: 70, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:01:20,378 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:34,514 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 22:01:50,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:52,769 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:05,545 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148177.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:06,208 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 22:02:18,760 INFO [train.py:903] (0/4) Epoch 22, batch 4800, loss[loss=0.2221, simple_loss=0.2995, pruned_loss=0.0724, over 19304.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2882, pruned_loss=0.06407, over 3823438.89 frames. ], batch size: 66, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:02:23,650 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:56,872 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:03:18,986 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.799e+02 5.785e+02 7.279e+02 1.291e+03, threshold=1.157e+03, percent-clipped=1.0 +2023-04-02 22:03:19,005 INFO [train.py:903] (0/4) Epoch 22, batch 4850, loss[loss=0.2262, simple_loss=0.3059, pruned_loss=0.0732, over 19584.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2878, pruned_loss=0.06424, over 3831935.98 frames. ], batch size: 61, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:03:44,130 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 22:04:01,907 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148273.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:02,912 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 22:04:08,108 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 22:04:09,257 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 22:04:18,711 INFO [train.py:903] (0/4) Epoch 22, batch 4900, loss[loss=0.2171, simple_loss=0.2831, pruned_loss=0.07551, over 19603.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2886, pruned_loss=0.06492, over 3814525.24 frames. ], batch size: 50, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:04:18,727 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 22:04:24,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:39,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 22:04:40,586 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148305.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:14,451 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:19,520 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.396e+02 6.572e+02 8.433e+02 1.736e+03, threshold=1.314e+03, percent-clipped=6.0 +2023-04-02 22:05:19,538 INFO [train.py:903] (0/4) Epoch 22, batch 4950, loss[loss=0.1995, simple_loss=0.2851, pruned_loss=0.05695, over 17428.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2894, pruned_loss=0.06525, over 3804102.54 frames. ], batch size: 101, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:05:26,429 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.33 vs. limit=5.0 +2023-04-02 22:05:35,928 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 22:06:01,175 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 22:06:20,895 INFO [train.py:903] (0/4) Epoch 22, batch 5000, loss[loss=0.2249, simple_loss=0.3029, pruned_loss=0.07342, over 19589.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2879, pruned_loss=0.06438, over 3820179.45 frames. ], batch size: 61, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:06:21,260 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:25,704 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:29,627 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 22:06:40,548 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 22:06:55,431 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:19,244 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.597e+02 4.828e+02 6.337e+02 7.923e+02 1.456e+03, threshold=1.267e+03, percent-clipped=1.0 +2023-04-02 22:07:19,262 INFO [train.py:903] (0/4) Epoch 22, batch 5050, loss[loss=0.2419, simple_loss=0.3176, pruned_loss=0.08313, over 19462.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2885, pruned_loss=0.06477, over 3812776.86 frames. ], batch size: 64, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:07:30,037 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148447.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:46,919 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3533, 3.0640, 2.2439, 2.7554, 0.9096, 3.0408, 2.9155, 3.0102], + device='cuda:0'), covar=tensor([0.1126, 0.1371, 0.2045, 0.1125, 0.3755, 0.0962, 0.1167, 0.1322], + device='cuda:0'), in_proj_covar=tensor([0.0502, 0.0409, 0.0492, 0.0344, 0.0398, 0.0432, 0.0424, 0.0458], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:07:54,473 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 22:08:19,394 INFO [train.py:903] (0/4) Epoch 22, batch 5100, loss[loss=0.2255, simple_loss=0.3078, pruned_loss=0.07155, over 19581.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2892, pruned_loss=0.0647, over 3819429.11 frames. ], batch size: 61, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:08:21,057 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 22:08:30,458 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 22:08:33,789 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 22:08:39,193 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 22:08:53,279 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148516.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:19,535 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.468e+02 6.941e+02 9.893e+02 2.948e+03, threshold=1.388e+03, percent-clipped=12.0 +2023-04-02 22:09:19,553 INFO [train.py:903] (0/4) Epoch 22, batch 5150, loss[loss=0.1721, simple_loss=0.2579, pruned_loss=0.04313, over 19671.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2898, pruned_loss=0.06503, over 3815440.30 frames. ], batch size: 53, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:09:31,376 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 22:09:32,953 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:40,842 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 22:10:02,797 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:05,888 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:10:20,789 INFO [train.py:903] (0/4) Epoch 22, batch 5200, loss[loss=0.1751, simple_loss=0.2619, pruned_loss=0.04414, over 19861.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.06468, over 3812700.07 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:10:23,562 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:33,163 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 22:10:53,687 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:58,724 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.69 vs. limit=2.0 +2023-04-02 22:11:17,526 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 22:11:21,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.558e+02 5.776e+02 7.251e+02 2.001e+03, threshold=1.155e+03, percent-clipped=2.0 +2023-04-02 22:11:21,018 INFO [train.py:903] (0/4) Epoch 22, batch 5250, loss[loss=0.1987, simple_loss=0.2748, pruned_loss=0.06128, over 19564.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.0648, over 3795550.67 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:11:27,758 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1350, 1.3893, 1.4883, 1.5148, 2.7691, 1.2176, 2.1464, 3.0935], + device='cuda:0'), covar=tensor([0.0529, 0.2500, 0.2840, 0.1693, 0.0702, 0.2198, 0.1231, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0367, 0.0387, 0.0348, 0.0375, 0.0351, 0.0384, 0.0405], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:11:27,866 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:33,739 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:35,097 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3223, 1.4416, 1.9895, 1.4047, 2.7774, 3.7514, 3.4790, 3.9130], + device='cuda:0'), covar=tensor([0.1578, 0.3606, 0.2983, 0.2344, 0.0555, 0.0168, 0.0186, 0.0233], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0326, 0.0357, 0.0268, 0.0248, 0.0190, 0.0218, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 22:11:58,275 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:58,383 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:12:20,268 INFO [train.py:903] (0/4) Epoch 22, batch 5300, loss[loss=0.2059, simple_loss=0.3006, pruned_loss=0.05559, over 19681.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2898, pruned_loss=0.06485, over 3808249.96 frames. ], batch size: 55, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:12:39,136 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 22:13:17,781 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:22,173 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.679e+02 5.309e+02 6.457e+02 8.011e+02 2.116e+03, threshold=1.291e+03, percent-clipped=5.0 +2023-04-02 22:13:22,191 INFO [train.py:903] (0/4) Epoch 22, batch 5350, loss[loss=0.1981, simple_loss=0.2779, pruned_loss=0.05918, over 19496.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2909, pruned_loss=0.06547, over 3791030.31 frames. ], batch size: 49, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:13:47,456 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9469, 2.0010, 2.3200, 2.6329, 1.9576, 2.4807, 2.3655, 2.0819], + device='cuda:0'), covar=tensor([0.4248, 0.4051, 0.1934, 0.2307, 0.4058, 0.2163, 0.4627, 0.3450], + device='cuda:0'), in_proj_covar=tensor([0.0897, 0.0961, 0.0716, 0.0929, 0.0877, 0.0814, 0.0840, 0.0780], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 22:13:50,697 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0444, 1.7871, 2.1874, 2.1026, 4.4592, 1.6318, 2.9276, 4.9405], + device='cuda:0'), covar=tensor([0.0449, 0.2893, 0.2456, 0.1863, 0.0732, 0.2402, 0.1254, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0368, 0.0387, 0.0347, 0.0375, 0.0350, 0.0383, 0.0405], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:13:53,897 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:55,486 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 22:14:03,701 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1843, 1.4834, 1.6406, 1.4112, 2.8306, 1.0372, 2.2786, 3.2255], + device='cuda:0'), covar=tensor([0.0453, 0.2304, 0.2353, 0.1723, 0.0645, 0.2284, 0.1027, 0.0230], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0369, 0.0388, 0.0348, 0.0376, 0.0351, 0.0384, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:14:24,407 INFO [train.py:903] (0/4) Epoch 22, batch 5400, loss[loss=0.2078, simple_loss=0.2936, pruned_loss=0.06104, over 19289.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2898, pruned_loss=0.065, over 3789610.71 frames. ], batch size: 66, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:14:28,071 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:15:24,091 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 4.882e+02 5.931e+02 8.184e+02 2.288e+03, threshold=1.186e+03, percent-clipped=7.0 +2023-04-02 22:15:24,109 INFO [train.py:903] (0/4) Epoch 22, batch 5450, loss[loss=0.2044, simple_loss=0.2872, pruned_loss=0.06079, over 19661.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2897, pruned_loss=0.06509, over 3800090.11 frames. ], batch size: 58, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:15:50,186 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:24,044 INFO [train.py:903] (0/4) Epoch 22, batch 5500, loss[loss=0.2051, simple_loss=0.2936, pruned_loss=0.05831, over 19545.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2907, pruned_loss=0.06577, over 3795966.78 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:16:47,559 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:49,405 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 22:16:50,917 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9356, 1.2028, 1.4703, 0.5596, 2.0222, 2.4199, 2.1509, 2.5835], + device='cuda:0'), covar=tensor([0.1654, 0.3873, 0.3581, 0.2895, 0.0629, 0.0285, 0.0342, 0.0363], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0324, 0.0356, 0.0266, 0.0247, 0.0189, 0.0216, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 22:17:25,271 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.956e+02 6.031e+02 8.410e+02 1.981e+03, threshold=1.206e+03, percent-clipped=11.0 +2023-04-02 22:17:25,290 INFO [train.py:903] (0/4) Epoch 22, batch 5550, loss[loss=0.1705, simple_loss=0.247, pruned_loss=0.04701, over 17280.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2887, pruned_loss=0.06437, over 3800210.93 frames. ], batch size: 38, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:17:33,836 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 22:17:57,713 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5542, 1.2218, 1.3704, 1.1573, 2.1949, 0.9817, 2.1012, 2.5075], + device='cuda:0'), covar=tensor([0.0772, 0.2798, 0.2999, 0.1893, 0.0873, 0.2261, 0.1059, 0.0467], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0367, 0.0386, 0.0347, 0.0375, 0.0351, 0.0382, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:18:10,822 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:18:21,581 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 22:18:27,007 INFO [train.py:903] (0/4) Epoch 22, batch 5600, loss[loss=0.1709, simple_loss=0.252, pruned_loss=0.04489, over 19615.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2893, pruned_loss=0.06456, over 3813113.04 frames. ], batch size: 50, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:18:56,760 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149013.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:06,015 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:10,575 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 22:19:27,601 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 5.096e+02 6.059e+02 8.103e+02 1.757e+03, threshold=1.212e+03, percent-clipped=10.0 +2023-04-02 22:19:27,619 INFO [train.py:903] (0/4) Epoch 22, batch 5650, loss[loss=0.1974, simple_loss=0.2786, pruned_loss=0.0581, over 19592.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2889, pruned_loss=0.06399, over 3824839.92 frames. ], batch size: 50, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:19:30,322 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2676, 2.9917, 2.3491, 2.2879, 2.2354, 2.6144, 1.2194, 2.1296], + device='cuda:0'), covar=tensor([0.0624, 0.0601, 0.0706, 0.1172, 0.1076, 0.1036, 0.1382, 0.1073], + device='cuda:0'), in_proj_covar=tensor([0.0356, 0.0357, 0.0359, 0.0382, 0.0462, 0.0390, 0.0337, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 22:19:35,857 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:15,158 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 22:20:16,290 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:28,281 INFO [train.py:903] (0/4) Epoch 22, batch 5700, loss[loss=0.2011, simple_loss=0.2735, pruned_loss=0.06433, over 19366.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2871, pruned_loss=0.06307, over 3836151.37 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:17,800 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:21:29,596 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.618e+02 6.077e+02 7.635e+02 1.470e+03, threshold=1.215e+03, percent-clipped=6.0 +2023-04-02 22:21:29,614 INFO [train.py:903] (0/4) Epoch 22, batch 5750, loss[loss=0.1905, simple_loss=0.2642, pruned_loss=0.05836, over 19775.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2883, pruned_loss=0.06314, over 3848412.68 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:30,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 22:21:39,631 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 22:21:46,332 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 22:21:59,230 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:03,954 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0454, 2.0942, 2.3682, 2.7032, 2.0018, 2.6027, 2.3720, 2.1687], + device='cuda:0'), covar=tensor([0.4234, 0.3837, 0.1977, 0.2404, 0.4190, 0.2085, 0.5006, 0.3419], + device='cuda:0'), in_proj_covar=tensor([0.0897, 0.0962, 0.0718, 0.0933, 0.0880, 0.0814, 0.0841, 0.0781], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 22:22:29,799 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:30,643 INFO [train.py:903] (0/4) Epoch 22, batch 5800, loss[loss=0.2715, simple_loss=0.3269, pruned_loss=0.1081, over 13724.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2874, pruned_loss=0.0632, over 3850700.44 frames. ], batch size: 135, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:22:37,196 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:23,057 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149231.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:30,410 INFO [train.py:903] (0/4) Epoch 22, batch 5850, loss[loss=0.2153, simple_loss=0.2995, pruned_loss=0.06556, over 19601.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2884, pruned_loss=0.06393, over 3856241.58 frames. ], batch size: 61, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:23:31,585 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.174e+02 6.346e+02 7.936e+02 1.645e+03, threshold=1.269e+03, percent-clipped=7.0 +2023-04-02 22:23:39,918 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4464, 1.2886, 1.2291, 1.4628, 1.2410, 1.2577, 1.2248, 1.3688], + device='cuda:0'), covar=tensor([0.0846, 0.1131, 0.1173, 0.0830, 0.1041, 0.0494, 0.1151, 0.0673], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0354, 0.0312, 0.0250, 0.0300, 0.0250, 0.0309, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:23:52,890 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:24:19,376 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6655, 1.5218, 1.5648, 2.0934, 1.5886, 1.9772, 2.1038, 1.7440], + device='cuda:0'), covar=tensor([0.0882, 0.0957, 0.1001, 0.0801, 0.0900, 0.0763, 0.0785, 0.0696], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0223, 0.0226, 0.0241, 0.0229, 0.0213, 0.0187, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 22:24:30,285 INFO [train.py:903] (0/4) Epoch 22, batch 5900, loss[loss=0.1859, simple_loss=0.2596, pruned_loss=0.05607, over 19477.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06472, over 3840691.18 frames. ], batch size: 49, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:24:35,575 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 22:24:56,388 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 22:25:31,743 INFO [train.py:903] (0/4) Epoch 22, batch 5950, loss[loss=0.1947, simple_loss=0.2665, pruned_loss=0.06142, over 19617.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.289, pruned_loss=0.06484, over 3824734.15 frames. ], batch size: 50, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:25:32,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.792e+02 4.957e+02 5.985e+02 7.132e+02 1.534e+03, threshold=1.197e+03, percent-clipped=1.0 +2023-04-02 22:26:26,169 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 22:26:28,453 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:26:33,222 INFO [train.py:903] (0/4) Epoch 22, batch 6000, loss[loss=0.1919, simple_loss=0.2728, pruned_loss=0.05547, over 19496.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06455, over 3831133.68 frames. ], batch size: 49, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:26:33,223 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 22:26:40,161 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4683, 1.4825, 1.5033, 1.8444, 1.4859, 1.6312, 1.6326, 1.6323], + device='cuda:0'), covar=tensor([0.0916, 0.0986, 0.1008, 0.0643, 0.0946, 0.0900, 0.0975, 0.0714], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0223, 0.0226, 0.0241, 0.0229, 0.0214, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 22:26:43,625 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2040, 3.7851, 3.8501, 3.8621, 1.7284, 3.6039, 3.2938, 3.5890], + device='cuda:0'), covar=tensor([0.1810, 0.0690, 0.0686, 0.0708, 0.6094, 0.0945, 0.0719, 0.1230], + device='cuda:0'), in_proj_covar=tensor([0.0783, 0.0743, 0.0949, 0.0833, 0.0835, 0.0714, 0.0566, 0.0883], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 22:26:46,899 INFO [train.py:937] (0/4) Epoch 22, validation: loss=0.1681, simple_loss=0.2682, pruned_loss=0.03398, over 944034.00 frames. +2023-04-02 22:26:46,901 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 22:27:13,614 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:27:48,665 INFO [train.py:903] (0/4) Epoch 22, batch 6050, loss[loss=0.1584, simple_loss=0.2452, pruned_loss=0.03578, over 19387.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.288, pruned_loss=0.06389, over 3841651.74 frames. ], batch size: 48, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:27:49,810 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.880e+02 5.766e+02 7.280e+02 1.810e+03, threshold=1.153e+03, percent-clipped=3.0 +2023-04-02 22:27:52,759 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-02 22:28:02,640 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:07,331 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3850, 3.6592, 2.1723, 2.2388, 3.3296, 1.9944, 1.6955, 2.3460], + device='cuda:0'), covar=tensor([0.1303, 0.0650, 0.1101, 0.0893, 0.0523, 0.1199, 0.0971, 0.0762], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0266, 0.0248, 0.0338, 0.0291, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:28:32,559 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:50,322 INFO [train.py:903] (0/4) Epoch 22, batch 6100, loss[loss=0.2008, simple_loss=0.2868, pruned_loss=0.05745, over 18732.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06332, over 3838383.29 frames. ], batch size: 74, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:28:59,759 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8033, 1.7507, 1.7202, 1.6153, 1.5086, 1.6299, 0.7954, 1.2334], + device='cuda:0'), covar=tensor([0.0635, 0.0659, 0.0390, 0.0630, 0.0983, 0.0865, 0.1261, 0.0918], + device='cuda:0'), in_proj_covar=tensor([0.0356, 0.0355, 0.0358, 0.0381, 0.0462, 0.0389, 0.0336, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 22:29:02,457 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-04-02 22:29:42,756 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:29:48,971 INFO [train.py:903] (0/4) Epoch 22, batch 6150, loss[loss=0.2178, simple_loss=0.2811, pruned_loss=0.07728, over 19358.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.287, pruned_loss=0.06371, over 3833799.32 frames. ], batch size: 44, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:29:49,331 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4369, 1.3985, 1.3423, 1.8339, 1.3448, 1.5936, 1.7042, 1.5142], + device='cuda:0'), covar=tensor([0.0943, 0.0982, 0.1099, 0.0648, 0.0889, 0.0813, 0.0810, 0.0776], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0224, 0.0240, 0.0227, 0.0212, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 22:29:50,016 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.761e+02 6.063e+02 7.648e+02 1.908e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 22:29:50,445 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2078, 2.0394, 1.7941, 2.1774, 1.9031, 1.8822, 1.7966, 2.0711], + device='cuda:0'), covar=tensor([0.0976, 0.1430, 0.1555, 0.1027, 0.1408, 0.0569, 0.1404, 0.0730], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0354, 0.0312, 0.0250, 0.0301, 0.0250, 0.0309, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:30:15,388 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0484, 5.1019, 5.8638, 5.8588, 2.0309, 5.5683, 4.6453, 5.5020], + device='cuda:0'), covar=tensor([0.1642, 0.0874, 0.0567, 0.0576, 0.6040, 0.0768, 0.0626, 0.1196], + device='cuda:0'), in_proj_covar=tensor([0.0779, 0.0740, 0.0943, 0.0827, 0.0831, 0.0708, 0.0563, 0.0877], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 22:30:19,811 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 22:30:29,462 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 22:30:43,268 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:30:49,418 INFO [train.py:903] (0/4) Epoch 22, batch 6200, loss[loss=0.221, simple_loss=0.3056, pruned_loss=0.06821, over 19473.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2871, pruned_loss=0.06364, over 3840102.63 frames. ], batch size: 64, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:31:51,302 INFO [train.py:903] (0/4) Epoch 22, batch 6250, loss[loss=0.1712, simple_loss=0.2607, pruned_loss=0.04086, over 19773.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2864, pruned_loss=0.06318, over 3842702.41 frames. ], batch size: 54, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:31:52,391 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.068e+02 6.178e+02 8.268e+02 1.694e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 22:32:21,865 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 22:32:52,473 INFO [train.py:903] (0/4) Epoch 22, batch 6300, loss[loss=0.2166, simple_loss=0.298, pruned_loss=0.06757, over 19368.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2867, pruned_loss=0.06319, over 3840153.47 frames. ], batch size: 70, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:32:59,392 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6731, 1.6053, 1.5015, 2.1095, 1.5280, 1.9773, 2.0156, 1.7661], + device='cuda:0'), covar=tensor([0.0846, 0.0918, 0.1028, 0.0719, 0.0875, 0.0720, 0.0831, 0.0679], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0220, 0.0222, 0.0238, 0.0226, 0.0211, 0.0186, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 22:33:51,829 INFO [train.py:903] (0/4) Epoch 22, batch 6350, loss[loss=0.2352, simple_loss=0.3194, pruned_loss=0.07548, over 19297.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2856, pruned_loss=0.06268, over 3844086.04 frames. ], batch size: 66, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:33:52,934 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.551e+02 6.531e+02 8.044e+02 1.579e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-02 22:34:23,929 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149764.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:34:52,496 INFO [train.py:903] (0/4) Epoch 22, batch 6400, loss[loss=0.2414, simple_loss=0.3152, pruned_loss=0.08377, over 19765.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2872, pruned_loss=0.06321, over 3846638.89 frames. ], batch size: 63, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:54,292 INFO [train.py:903] (0/4) Epoch 22, batch 6450, loss[loss=0.1891, simple_loss=0.264, pruned_loss=0.0571, over 19400.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2877, pruned_loss=0.06361, over 3836205.18 frames. ], batch size: 48, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:55,267 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.788e+02 5.699e+02 7.070e+02 1.580e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-02 22:36:39,134 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 22:36:40,526 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:36:54,748 INFO [train.py:903] (0/4) Epoch 22, batch 6500, loss[loss=0.2196, simple_loss=0.3137, pruned_loss=0.06268, over 19619.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.287, pruned_loss=0.06309, over 3849543.75 frames. ], batch size: 57, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:00,216 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 22:37:25,352 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5640, 4.0812, 4.2588, 4.2426, 1.7065, 3.9795, 3.4683, 3.9813], + device='cuda:0'), covar=tensor([0.1683, 0.0798, 0.0622, 0.0726, 0.5679, 0.0930, 0.0714, 0.1140], + device='cuda:0'), in_proj_covar=tensor([0.0784, 0.0743, 0.0950, 0.0827, 0.0835, 0.0710, 0.0565, 0.0879], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 22:37:41,908 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:37:55,100 INFO [train.py:903] (0/4) Epoch 22, batch 6550, loss[loss=0.2284, simple_loss=0.3098, pruned_loss=0.07343, over 19683.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2882, pruned_loss=0.06378, over 3834249.59 frames. ], batch size: 59, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:56,255 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 4.654e+02 5.933e+02 7.304e+02 1.667e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 22:38:45,109 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8284, 1.5775, 1.4662, 1.7767, 1.5024, 1.5564, 1.5093, 1.6791], + device='cuda:0'), covar=tensor([0.1069, 0.1348, 0.1558, 0.0996, 0.1266, 0.0594, 0.1419, 0.0796], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0354, 0.0312, 0.0250, 0.0302, 0.0251, 0.0308, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:38:55,835 INFO [train.py:903] (0/4) Epoch 22, batch 6600, loss[loss=0.1849, simple_loss=0.2764, pruned_loss=0.04666, over 19680.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2894, pruned_loss=0.06436, over 3834782.96 frames. ], batch size: 53, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:38:59,608 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:39:11,411 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-150000.pt +2023-04-02 22:39:59,893 INFO [train.py:903] (0/4) Epoch 22, batch 6650, loss[loss=0.2121, simple_loss=0.2759, pruned_loss=0.07414, over 19790.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2883, pruned_loss=0.0638, over 3832097.34 frames. ], batch size: 48, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:40:01,053 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 4.673e+02 5.867e+02 7.414e+02 1.313e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 22:40:04,720 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:40:59,360 INFO [train.py:903] (0/4) Epoch 22, batch 6700, loss[loss=0.2009, simple_loss=0.2875, pruned_loss=0.05719, over 19732.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06381, over 3838914.60 frames. ], batch size: 63, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:10,387 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:41:10,938 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 22:41:23,633 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150108.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:41:57,256 INFO [train.py:903] (0/4) Epoch 22, batch 6750, loss[loss=0.2002, simple_loss=0.2833, pruned_loss=0.05856, over 19672.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06395, over 3839903.54 frames. ], batch size: 58, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:58,371 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 4.879e+02 6.504e+02 7.654e+02 1.720e+03, threshold=1.301e+03, percent-clipped=5.0 +2023-04-02 22:42:17,374 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9589, 1.8301, 1.5055, 1.8849, 1.7504, 1.4370, 1.4785, 1.7832], + device='cuda:0'), covar=tensor([0.1129, 0.1534, 0.1774, 0.1195, 0.1489, 0.0838, 0.1808, 0.0903], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0352, 0.0312, 0.0249, 0.0301, 0.0250, 0.0308, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:42:53,133 INFO [train.py:903] (0/4) Epoch 22, batch 6800, loss[loss=0.1897, simple_loss=0.2732, pruned_loss=0.05313, over 19664.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.287, pruned_loss=0.0634, over 3829368.83 frames. ], batch size: 55, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:43:23,023 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-22.pt +2023-04-02 22:43:39,240 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 22:43:39,707 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 22:43:42,623 INFO [train.py:903] (0/4) Epoch 23, batch 0, loss[loss=0.2004, simple_loss=0.2664, pruned_loss=0.0672, over 19036.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2664, pruned_loss=0.0672, over 19036.00 frames. ], batch size: 42, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:43:42,624 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 22:43:54,251 INFO [train.py:937] (0/4) Epoch 23, validation: loss=0.1688, simple_loss=0.2693, pruned_loss=0.03418, over 944034.00 frames. +2023-04-02 22:43:54,251 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 22:43:54,671 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3755, 1.4612, 1.6641, 1.5991, 2.4891, 2.0992, 2.5388, 1.0544], + device='cuda:0'), covar=tensor([0.2560, 0.4337, 0.2589, 0.1941, 0.1469, 0.2278, 0.1427, 0.4486], + device='cuda:0'), in_proj_covar=tensor([0.0536, 0.0643, 0.0714, 0.0483, 0.0620, 0.0531, 0.0662, 0.0548], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 22:44:03,545 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150223.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:44:06,626 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 22:44:21,434 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.848e+02 5.561e+02 7.527e+02 1.735e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-04-02 22:44:31,829 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:44:55,831 INFO [train.py:903] (0/4) Epoch 23, batch 50, loss[loss=0.2282, simple_loss=0.3083, pruned_loss=0.07408, over 19468.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2864, pruned_loss=0.06261, over 870087.41 frames. ], batch size: 64, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:45:03,001 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:27,179 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:30,280 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 22:45:35,174 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:57,947 INFO [train.py:903] (0/4) Epoch 23, batch 100, loss[loss=0.2309, simple_loss=0.3081, pruned_loss=0.07688, over 13167.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2853, pruned_loss=0.06231, over 1526992.47 frames. ], batch size: 135, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:46:06,475 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:46:07,258 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 22:46:26,505 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.910e+02 5.630e+02 7.676e+02 1.557e+03, threshold=1.126e+03, percent-clipped=7.0 +2023-04-02 22:46:59,546 INFO [train.py:903] (0/4) Epoch 23, batch 150, loss[loss=0.1885, simple_loss=0.2776, pruned_loss=0.04975, over 19525.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2854, pruned_loss=0.06248, over 2043211.72 frames. ], batch size: 54, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:47:22,245 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9018, 3.3207, 3.6404, 3.6413, 1.7458, 3.4383, 2.9752, 3.1700], + device='cuda:0'), covar=tensor([0.2744, 0.2229, 0.1196, 0.1658, 0.7334, 0.2266, 0.1407, 0.2161], + device='cuda:0'), in_proj_covar=tensor([0.0789, 0.0747, 0.0955, 0.0836, 0.0839, 0.0715, 0.0569, 0.0882], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 22:47:23,460 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6725, 1.5654, 1.5341, 2.1405, 1.5980, 2.0196, 2.0728, 1.7784], + device='cuda:0'), covar=tensor([0.0877, 0.0960, 0.1034, 0.0794, 0.0919, 0.0752, 0.0866, 0.0718], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0222, 0.0240, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 22:47:59,877 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 22:48:00,265 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2763, 1.9944, 2.0970, 2.7745, 2.0371, 2.4063, 2.5151, 2.2812], + device='cuda:0'), covar=tensor([0.0749, 0.0909, 0.0889, 0.0827, 0.0835, 0.0778, 0.0924, 0.0648], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0222, 0.0239, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 22:48:01,004 INFO [train.py:903] (0/4) Epoch 23, batch 200, loss[loss=0.1764, simple_loss=0.2688, pruned_loss=0.04198, over 19763.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2863, pruned_loss=0.06377, over 2448264.03 frames. ], batch size: 54, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:48:01,405 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5987, 1.5505, 1.5177, 1.9752, 1.5768, 1.8606, 1.9619, 1.7494], + device='cuda:0'), covar=tensor([0.0824, 0.0911, 0.0991, 0.0755, 0.0800, 0.0752, 0.0781, 0.0672], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0222, 0.0239, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 22:48:26,084 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 22:48:30,853 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 5.350e+02 6.653e+02 9.712e+02 2.771e+03, threshold=1.331e+03, percent-clipped=16.0 +2023-04-02 22:48:33,196 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150441.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:48:41,458 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4040, 2.1704, 1.6266, 1.3795, 2.0201, 1.2959, 1.4395, 1.8414], + device='cuda:0'), covar=tensor([0.1081, 0.0844, 0.1101, 0.0854, 0.0543, 0.1324, 0.0679, 0.0524], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0317, 0.0339, 0.0267, 0.0247, 0.0336, 0.0291, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:49:02,670 INFO [train.py:903] (0/4) Epoch 23, batch 250, loss[loss=0.2032, simple_loss=0.2887, pruned_loss=0.05888, over 18247.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.287, pruned_loss=0.06457, over 2754167.31 frames. ], batch size: 83, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:49:20,036 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150479.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:49:36,975 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8691, 4.3280, 4.6218, 4.6168, 1.7854, 4.3250, 3.7943, 4.3143], + device='cuda:0'), covar=tensor([0.1709, 0.0989, 0.0655, 0.0685, 0.6101, 0.0978, 0.0696, 0.1184], + device='cuda:0'), in_proj_covar=tensor([0.0784, 0.0744, 0.0951, 0.0832, 0.0836, 0.0712, 0.0566, 0.0878], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 22:49:49,516 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7905, 4.3051, 2.8700, 3.7512, 0.8841, 4.2596, 4.1452, 4.3372], + device='cuda:0'), covar=tensor([0.0679, 0.1202, 0.1915, 0.0936, 0.4296, 0.0724, 0.0927, 0.1307], + device='cuda:0'), in_proj_covar=tensor([0.0504, 0.0412, 0.0497, 0.0345, 0.0400, 0.0433, 0.0425, 0.0460], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:49:49,699 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150504.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:50:05,860 INFO [train.py:903] (0/4) Epoch 23, batch 300, loss[loss=0.19, simple_loss=0.2758, pruned_loss=0.0521, over 17464.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2852, pruned_loss=0.06292, over 3004967.00 frames. ], batch size: 101, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:50:34,492 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.361e+02 5.024e+02 5.928e+02 7.198e+02 2.066e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 22:50:54,914 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150555.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:50:56,018 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:50:59,515 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2428, 1.3103, 1.2851, 1.0654, 1.0834, 1.1149, 0.0710, 0.3800], + device='cuda:0'), covar=tensor([0.0676, 0.0632, 0.0404, 0.0541, 0.1236, 0.0623, 0.1253, 0.1085], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0354, 0.0358, 0.0380, 0.0458, 0.0387, 0.0334, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 22:51:07,066 INFO [train.py:903] (0/4) Epoch 23, batch 350, loss[loss=0.1977, simple_loss=0.2834, pruned_loss=0.05602, over 19643.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2859, pruned_loss=0.06344, over 3183956.38 frames. ], batch size: 53, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:51:11,934 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:51:24,685 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8113, 4.2912, 4.5093, 4.5065, 1.7912, 4.1941, 3.7407, 4.2438], + device='cuda:0'), covar=tensor([0.1583, 0.0804, 0.0607, 0.0648, 0.5696, 0.0838, 0.0645, 0.1093], + device='cuda:0'), in_proj_covar=tensor([0.0787, 0.0745, 0.0954, 0.0832, 0.0838, 0.0713, 0.0568, 0.0880], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 22:51:53,186 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2948, 2.4090, 2.5230, 2.9712, 2.4566, 2.8772, 2.5453, 2.3979], + device='cuda:0'), covar=tensor([0.3615, 0.3116, 0.1567, 0.1965, 0.3335, 0.1670, 0.3819, 0.2609], + device='cuda:0'), in_proj_covar=tensor([0.0903, 0.0968, 0.0719, 0.0932, 0.0882, 0.0819, 0.0844, 0.0785], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 22:52:09,969 INFO [train.py:903] (0/4) Epoch 23, batch 400, loss[loss=0.2031, simple_loss=0.2649, pruned_loss=0.07066, over 19761.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06336, over 3324608.70 frames. ], batch size: 47, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:52:18,810 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3490, 2.1028, 1.6901, 1.4595, 1.9192, 1.4023, 1.3912, 1.8561], + device='cuda:0'), covar=tensor([0.0939, 0.0776, 0.0983, 0.0830, 0.0563, 0.1247, 0.0649, 0.0458], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0316, 0.0339, 0.0266, 0.0246, 0.0336, 0.0290, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 22:52:36,586 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:52:40,894 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.962e+02 5.093e+02 6.486e+02 8.008e+02 1.724e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 22:53:11,936 INFO [train.py:903] (0/4) Epoch 23, batch 450, loss[loss=0.165, simple_loss=0.2398, pruned_loss=0.04507, over 19025.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2867, pruned_loss=0.06347, over 3429677.83 frames. ], batch size: 42, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:53:46,065 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 22:53:46,092 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 22:54:10,481 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5760, 1.6306, 1.9272, 1.7819, 2.7549, 2.3724, 2.8639, 1.4050], + device='cuda:0'), covar=tensor([0.2479, 0.4262, 0.2610, 0.1924, 0.1445, 0.2093, 0.1387, 0.4338], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0644, 0.0713, 0.0485, 0.0617, 0.0532, 0.0662, 0.0548], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 22:54:15,729 INFO [train.py:903] (0/4) Epoch 23, batch 500, loss[loss=0.1836, simple_loss=0.2731, pruned_loss=0.04703, over 19839.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.286, pruned_loss=0.06289, over 3529970.50 frames. ], batch size: 52, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:54:45,188 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 5.191e+02 6.635e+02 8.528e+02 2.142e+03, threshold=1.327e+03, percent-clipped=5.0 +2023-04-02 22:54:56,598 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 22:54:57,358 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:54:58,595 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:55:17,411 INFO [train.py:903] (0/4) Epoch 23, batch 550, loss[loss=0.2359, simple_loss=0.3136, pruned_loss=0.07914, over 19486.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06335, over 3612525.53 frames. ], batch size: 64, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:56:14,501 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:18,591 INFO [train.py:903] (0/4) Epoch 23, batch 600, loss[loss=0.1797, simple_loss=0.255, pruned_loss=0.05215, over 19378.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06402, over 3658175.88 frames. ], batch size: 47, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:56:45,478 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150837.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:48,650 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+02 5.000e+02 5.859e+02 6.998e+02 1.831e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-02 22:56:59,201 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 22:57:21,232 INFO [train.py:903] (0/4) Epoch 23, batch 650, loss[loss=0.1996, simple_loss=0.2851, pruned_loss=0.05703, over 19670.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2872, pruned_loss=0.06375, over 3699010.73 frames. ], batch size: 53, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:02,420 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150899.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:23,646 INFO [train.py:903] (0/4) Epoch 23, batch 700, loss[loss=0.1902, simple_loss=0.2714, pruned_loss=0.05453, over 19681.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2864, pruned_loss=0.06327, over 3731026.97 frames. ], batch size: 53, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:26,208 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:52,734 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.019e+02 5.794e+02 7.164e+02 1.349e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 22:59:26,033 INFO [train.py:903] (0/4) Epoch 23, batch 750, loss[loss=0.1597, simple_loss=0.2374, pruned_loss=0.04096, over 19756.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2863, pruned_loss=0.06326, over 3758917.47 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:59:36,730 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0283, 2.0633, 2.3455, 2.7492, 2.0066, 2.6246, 2.4424, 2.1386], + device='cuda:0'), covar=tensor([0.4154, 0.3852, 0.1849, 0.2346, 0.4008, 0.1999, 0.4703, 0.3342], + device='cuda:0'), in_proj_covar=tensor([0.0902, 0.0968, 0.0718, 0.0932, 0.0884, 0.0819, 0.0845, 0.0784], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 23:00:06,239 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9682, 4.1048, 4.6922, 4.7661, 1.8287, 4.4423, 3.7248, 4.1245], + device='cuda:0'), covar=tensor([0.1970, 0.1349, 0.0897, 0.0946, 0.7109, 0.1595, 0.1092, 0.1822], + device='cuda:0'), in_proj_covar=tensor([0.0789, 0.0752, 0.0958, 0.0836, 0.0844, 0.0715, 0.0570, 0.0889], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 23:00:17,543 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:26,933 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:28,666 INFO [train.py:903] (0/4) Epoch 23, batch 800, loss[loss=0.2699, simple_loss=0.3392, pruned_loss=0.1003, over 19665.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06346, over 3786611.76 frames. ], batch size: 60, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 23:00:46,706 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 23:00:48,191 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:57,135 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.089e+02 6.161e+02 7.478e+02 1.780e+03, threshold=1.232e+03, percent-clipped=6.0 +2023-04-02 23:01:29,767 INFO [train.py:903] (0/4) Epoch 23, batch 850, loss[loss=0.2066, simple_loss=0.295, pruned_loss=0.05909, over 19687.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2886, pruned_loss=0.06378, over 3806402.56 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:01:49,507 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1641, 2.0737, 2.0260, 2.3409, 2.0960, 1.8946, 1.9870, 2.1918], + device='cuda:0'), covar=tensor([0.0903, 0.1239, 0.1226, 0.0777, 0.1066, 0.0519, 0.1143, 0.0619], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0357, 0.0316, 0.0252, 0.0306, 0.0254, 0.0312, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:02:04,867 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:02:25,394 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 23:02:31,810 INFO [train.py:903] (0/4) Epoch 23, batch 900, loss[loss=0.2035, simple_loss=0.2726, pruned_loss=0.06717, over 16008.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2881, pruned_loss=0.06379, over 3811441.38 frames. ], batch size: 35, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:02,019 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 5.070e+02 6.543e+02 8.443e+02 1.332e+03, threshold=1.309e+03, percent-clipped=3.0 +2023-04-02 23:03:32,678 INFO [train.py:903] (0/4) Epoch 23, batch 950, loss[loss=0.2238, simple_loss=0.3017, pruned_loss=0.07295, over 19609.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.0654, over 3812974.98 frames. ], batch size: 57, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:39,562 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 23:04:17,796 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:27,592 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:35,177 INFO [train.py:903] (0/4) Epoch 23, batch 1000, loss[loss=0.2255, simple_loss=0.3053, pruned_loss=0.07283, over 19770.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2899, pruned_loss=0.06509, over 3815514.53 frames. ], batch size: 56, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:05:04,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.146e+02 6.401e+02 7.951e+02 1.702e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 23:05:32,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 23:05:33,601 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:05:37,849 INFO [train.py:903] (0/4) Epoch 23, batch 1050, loss[loss=0.2056, simple_loss=0.2816, pruned_loss=0.06481, over 19749.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2898, pruned_loss=0.06508, over 3828226.29 frames. ], batch size: 51, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:05:42,793 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151270.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:12,791 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 23:06:14,343 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:21,346 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151300.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:40,104 INFO [train.py:903] (0/4) Epoch 23, batch 1100, loss[loss=0.1931, simple_loss=0.263, pruned_loss=0.06156, over 19736.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2894, pruned_loss=0.06473, over 3835681.29 frames. ], batch size: 45, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:06:40,432 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151316.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:57,240 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6300, 1.7821, 2.1768, 1.9467, 3.2839, 2.6826, 3.5808, 1.7663], + device='cuda:0'), covar=tensor([0.2510, 0.4251, 0.2737, 0.1892, 0.1560, 0.2091, 0.1594, 0.4114], + device='cuda:0'), in_proj_covar=tensor([0.0536, 0.0644, 0.0714, 0.0486, 0.0618, 0.0529, 0.0661, 0.0550], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 23:07:09,142 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.937e+02 5.070e+02 6.152e+02 7.617e+02 1.362e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-04-02 23:07:40,862 INFO [train.py:903] (0/4) Epoch 23, batch 1150, loss[loss=0.2051, simple_loss=0.2713, pruned_loss=0.06947, over 19744.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06407, over 3828871.78 frames. ], batch size: 47, lr: 3.60e-03, grad_scale: 4.0 +2023-04-02 23:07:52,344 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9693, 1.3543, 1.1168, 0.9545, 1.2539, 0.8973, 1.0248, 1.2282], + device='cuda:0'), covar=tensor([0.0593, 0.0639, 0.0676, 0.0624, 0.0432, 0.0985, 0.0465, 0.0400], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0266, 0.0247, 0.0338, 0.0292, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:07:55,725 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151377.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:08:43,939 INFO [train.py:903] (0/4) Epoch 23, batch 1200, loss[loss=0.2818, simple_loss=0.3403, pruned_loss=0.1116, over 13559.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06418, over 3818392.49 frames. ], batch size: 136, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:09:10,455 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6216, 1.2200, 1.2681, 1.5051, 1.1254, 1.3688, 1.2403, 1.4571], + device='cuda:0'), covar=tensor([0.1016, 0.1206, 0.1452, 0.0906, 0.1232, 0.0611, 0.1426, 0.0754], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0355, 0.0314, 0.0251, 0.0303, 0.0252, 0.0309, 0.0256], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:09:14,770 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.896e+02 6.001e+02 7.643e+02 1.247e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 23:09:18,087 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 23:09:45,194 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:09:46,024 INFO [train.py:903] (0/4) Epoch 23, batch 1250, loss[loss=0.2063, simple_loss=0.2928, pruned_loss=0.05986, over 18375.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.0646, over 3823902.35 frames. ], batch size: 84, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:10:16,513 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:10:27,795 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 23:10:46,698 INFO [train.py:903] (0/4) Epoch 23, batch 1300, loss[loss=0.2554, simple_loss=0.3244, pruned_loss=0.09317, over 19649.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.289, pruned_loss=0.06484, over 3816125.00 frames. ], batch size: 55, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:10:55,127 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3130, 1.3347, 1.4877, 1.4505, 1.7688, 1.8211, 1.8195, 0.6260], + device='cuda:0'), covar=tensor([0.2484, 0.4265, 0.2664, 0.1983, 0.1648, 0.2290, 0.1423, 0.4673], + device='cuda:0'), in_proj_covar=tensor([0.0539, 0.0648, 0.0718, 0.0488, 0.0621, 0.0533, 0.0666, 0.0554], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 23:10:55,327 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 23:11:16,028 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 5.118e+02 6.046e+02 8.032e+02 1.744e+03, threshold=1.209e+03, percent-clipped=5.0 +2023-04-02 23:11:22,857 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:11:25,567 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4310, 1.4323, 1.7008, 1.6255, 2.3624, 2.1104, 2.4137, 1.0108], + device='cuda:0'), covar=tensor([0.2555, 0.4397, 0.2710, 0.1930, 0.1587, 0.2204, 0.1655, 0.4636], + device='cuda:0'), in_proj_covar=tensor([0.0540, 0.0650, 0.0719, 0.0490, 0.0622, 0.0534, 0.0668, 0.0555], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 23:11:46,626 INFO [train.py:903] (0/4) Epoch 23, batch 1350, loss[loss=0.2137, simple_loss=0.3011, pruned_loss=0.06311, over 19592.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2885, pruned_loss=0.06465, over 3819930.27 frames. ], batch size: 57, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:12:48,237 INFO [train.py:903] (0/4) Epoch 23, batch 1400, loss[loss=0.21, simple_loss=0.298, pruned_loss=0.06095, over 19699.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2889, pruned_loss=0.06471, over 3820326.14 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:08,471 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:17,803 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 5.043e+02 6.240e+02 8.237e+02 1.280e+03, threshold=1.248e+03, percent-clipped=3.0 +2023-04-02 23:13:21,398 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:38,316 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151658.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:40,474 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151660.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:41,838 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:48,339 INFO [train.py:903] (0/4) Epoch 23, batch 1450, loss[loss=0.2263, simple_loss=0.3055, pruned_loss=0.07352, over 19418.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2886, pruned_loss=0.06427, over 3825662.00 frames. ], batch size: 70, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:48,372 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 23:14:36,938 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:14:49,057 INFO [train.py:903] (0/4) Epoch 23, batch 1500, loss[loss=0.1671, simple_loss=0.2497, pruned_loss=0.04227, over 19765.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06386, over 3826109.41 frames. ], batch size: 47, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:18,487 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.905e+02 6.054e+02 7.299e+02 2.065e+03, threshold=1.211e+03, percent-clipped=4.0 +2023-04-02 23:15:40,356 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151759.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:15:47,614 INFO [train.py:903] (0/4) Epoch 23, batch 1550, loss[loss=0.2113, simple_loss=0.2946, pruned_loss=0.06395, over 19477.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2885, pruned_loss=0.06436, over 3819166.93 frames. ], batch size: 64, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:59,948 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:16:50,043 INFO [train.py:903] (0/4) Epoch 23, batch 1600, loss[loss=0.2062, simple_loss=0.2852, pruned_loss=0.06359, over 18232.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06415, over 3805415.56 frames. ], batch size: 83, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:16:53,576 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:17:10,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 23:17:20,214 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.863e+02 5.887e+02 6.951e+02 2.426e+03, threshold=1.177e+03, percent-clipped=3.0 +2023-04-02 23:17:50,202 INFO [train.py:903] (0/4) Epoch 23, batch 1650, loss[loss=0.2008, simple_loss=0.2868, pruned_loss=0.05741, over 19604.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2887, pruned_loss=0.06441, over 3818953.03 frames. ], batch size: 57, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:18:39,713 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2258, 1.3323, 1.7673, 1.3544, 2.7825, 3.8009, 3.5580, 4.0582], + device='cuda:0'), covar=tensor([0.1608, 0.3729, 0.3262, 0.2396, 0.0586, 0.0183, 0.0198, 0.0225], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0323, 0.0355, 0.0265, 0.0244, 0.0189, 0.0217, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 23:18:51,777 INFO [train.py:903] (0/4) Epoch 23, batch 1700, loss[loss=0.1876, simple_loss=0.2652, pruned_loss=0.05502, over 19427.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.06397, over 3808768.32 frames. ], batch size: 48, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:18:53,366 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:21,472 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.018e+02 6.073e+02 7.613e+02 1.748e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 23:19:23,180 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:26,202 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 23:19:52,372 INFO [train.py:903] (0/4) Epoch 23, batch 1750, loss[loss=0.1904, simple_loss=0.2738, pruned_loss=0.05351, over 19728.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2879, pruned_loss=0.06377, over 3818442.82 frames. ], batch size: 51, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:20:33,144 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-152000.pt +2023-04-02 23:20:53,712 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:20:54,414 INFO [train.py:903] (0/4) Epoch 23, batch 1800, loss[loss=0.1914, simple_loss=0.2812, pruned_loss=0.05078, over 17329.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2857, pruned_loss=0.06277, over 3813887.19 frames. ], batch size: 101, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:21:13,130 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:24,598 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152040.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:26,673 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 5.200e+02 6.601e+02 8.823e+02 1.720e+03, threshold=1.320e+03, percent-clipped=12.0 +2023-04-02 23:21:36,352 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:40,797 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0915, 1.3995, 1.6905, 1.1522, 2.6187, 3.3650, 3.0509, 3.5531], + device='cuda:0'), covar=tensor([0.1703, 0.3682, 0.3364, 0.2514, 0.0552, 0.0183, 0.0239, 0.0259], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0324, 0.0355, 0.0265, 0.0245, 0.0188, 0.0217, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 23:21:44,275 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:48,439 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 23:21:55,072 INFO [train.py:903] (0/4) Epoch 23, batch 1850, loss[loss=0.2126, simple_loss=0.2966, pruned_loss=0.0643, over 18120.00 frames. ], tot_loss[loss=0.206, simple_loss=0.286, pruned_loss=0.06298, over 3820143.81 frames. ], batch size: 83, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:22:26,622 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 23:22:54,485 INFO [train.py:903] (0/4) Epoch 23, batch 1900, loss[loss=0.2186, simple_loss=0.2785, pruned_loss=0.07936, over 19773.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06465, over 3819651.07 frames. ], batch size: 47, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:23:09,860 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 23:23:16,371 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 23:23:26,804 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.505e+02 4.994e+02 5.968e+02 7.712e+02 2.482e+03, threshold=1.194e+03, percent-clipped=3.0 +2023-04-02 23:23:36,562 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5074, 1.5955, 1.7913, 1.7600, 2.6850, 2.4075, 2.8471, 1.1523], + device='cuda:0'), covar=tensor([0.2473, 0.4345, 0.2750, 0.1890, 0.1534, 0.2045, 0.1436, 0.4474], + device='cuda:0'), in_proj_covar=tensor([0.0534, 0.0643, 0.0715, 0.0486, 0.0618, 0.0530, 0.0663, 0.0549], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 23:23:41,404 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 23:23:52,764 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:55,363 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:56,144 INFO [train.py:903] (0/4) Epoch 23, batch 1950, loss[loss=0.1862, simple_loss=0.2773, pruned_loss=0.04757, over 19723.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.287, pruned_loss=0.06365, over 3814573.22 frames. ], batch size: 63, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:24:58,529 INFO [train.py:903] (0/4) Epoch 23, batch 2000, loss[loss=0.2263, simple_loss=0.3133, pruned_loss=0.06965, over 19593.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06393, over 3820139.97 frames. ], batch size: 57, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:25:06,473 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-02 23:25:28,733 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.661e+02 5.613e+02 7.041e+02 1.127e+03, threshold=1.123e+03, percent-clipped=0.0 +2023-04-02 23:25:30,268 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:25:53,680 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 23:25:58,367 INFO [train.py:903] (0/4) Epoch 23, batch 2050, loss[loss=0.2002, simple_loss=0.288, pruned_loss=0.05626, over 19807.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2879, pruned_loss=0.06363, over 3813974.73 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:26:13,106 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 23:26:13,467 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:14,198 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 23:26:29,846 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:36,222 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 23:26:58,781 INFO [train.py:903] (0/4) Epoch 23, batch 2100, loss[loss=0.1916, simple_loss=0.2784, pruned_loss=0.05238, over 19669.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06336, over 3818004.49 frames. ], batch size: 53, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:27:27,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 23:27:31,218 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.808e+02 5.705e+02 7.050e+02 1.568e+03, threshold=1.141e+03, percent-clipped=6.0 +2023-04-02 23:27:48,004 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 23:27:59,774 INFO [train.py:903] (0/4) Epoch 23, batch 2150, loss[loss=0.229, simple_loss=0.3052, pruned_loss=0.07641, over 19669.00 frames. ], tot_loss[loss=0.208, simple_loss=0.288, pruned_loss=0.06395, over 3792044.62 frames. ], batch size: 53, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:28:24,386 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2186, 1.9718, 1.8638, 2.1427, 1.9257, 1.8705, 1.8120, 2.0543], + device='cuda:0'), covar=tensor([0.0972, 0.1431, 0.1375, 0.1047, 0.1297, 0.0553, 0.1386, 0.0673], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0355, 0.0314, 0.0253, 0.0304, 0.0252, 0.0310, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:28:35,465 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 23:29:00,654 INFO [train.py:903] (0/4) Epoch 23, batch 2200, loss[loss=0.2396, simple_loss=0.3191, pruned_loss=0.08008, over 19661.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2873, pruned_loss=0.06357, over 3806891.42 frames. ], batch size: 55, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:29:07,429 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:31,743 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.084e+02 6.029e+02 8.181e+02 1.825e+03, threshold=1.206e+03, percent-clipped=10.0 +2023-04-02 23:29:37,615 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:58,449 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:30:01,458 INFO [train.py:903] (0/4) Epoch 23, batch 2250, loss[loss=0.2172, simple_loss=0.2981, pruned_loss=0.06815, over 19859.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2875, pruned_loss=0.06365, over 3809732.94 frames. ], batch size: 52, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:31:01,764 INFO [train.py:903] (0/4) Epoch 23, batch 2300, loss[loss=0.2275, simple_loss=0.3063, pruned_loss=0.07436, over 19472.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2869, pruned_loss=0.06325, over 3816529.67 frames. ], batch size: 64, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:31:17,224 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 23:31:25,329 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:31:36,142 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.927e+02 5.902e+02 7.617e+02 2.113e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 23:31:55,711 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152559.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:04,337 INFO [train.py:903] (0/4) Epoch 23, batch 2350, loss[loss=0.1966, simple_loss=0.2846, pruned_loss=0.05433, over 19481.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2861, pruned_loss=0.06249, over 3829570.37 frames. ], batch size: 64, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:32:30,240 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:39,671 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 23:32:44,139 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 23:32:45,492 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:54,402 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:03,099 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 23:33:05,157 INFO [train.py:903] (0/4) Epoch 23, batch 2400, loss[loss=0.3627, simple_loss=0.4017, pruned_loss=0.1619, over 13800.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.286, pruned_loss=0.06276, over 3819436.15 frames. ], batch size: 136, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:33:28,191 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:38,293 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 5.347e+02 6.485e+02 7.646e+02 1.871e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 23:34:06,660 INFO [train.py:903] (0/4) Epoch 23, batch 2450, loss[loss=0.1895, simple_loss=0.2687, pruned_loss=0.05514, over 17327.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2859, pruned_loss=0.06289, over 3823415.94 frames. ], batch size: 38, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:34:37,228 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6624, 1.6174, 1.6346, 1.4376, 3.2219, 1.1469, 2.4396, 3.6882], + device='cuda:0'), covar=tensor([0.0465, 0.2555, 0.2633, 0.1918, 0.0662, 0.2523, 0.1286, 0.0224], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0369, 0.0390, 0.0350, 0.0377, 0.0355, 0.0385, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:34:51,410 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:06,552 INFO [train.py:903] (0/4) Epoch 23, batch 2500, loss[loss=0.1637, simple_loss=0.2374, pruned_loss=0.04502, over 19732.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2862, pruned_loss=0.06327, over 3814415.94 frames. ], batch size: 45, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:35:12,378 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0188, 1.7214, 1.5920, 1.8780, 1.6378, 1.6927, 1.5229, 1.8687], + device='cuda:0'), covar=tensor([0.1082, 0.1439, 0.1610, 0.1094, 0.1408, 0.0574, 0.1525, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0355, 0.0313, 0.0252, 0.0302, 0.0250, 0.0309, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:35:29,547 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9930, 3.6624, 2.6170, 3.2019, 0.8693, 3.5936, 3.4254, 3.5686], + device='cuda:0'), covar=tensor([0.0897, 0.1186, 0.2058, 0.1038, 0.4258, 0.0829, 0.1011, 0.1528], + device='cuda:0'), in_proj_covar=tensor([0.0515, 0.0416, 0.0502, 0.0351, 0.0404, 0.0440, 0.0431, 0.0464], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:35:40,605 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.968e+02 5.949e+02 7.714e+02 2.745e+03, threshold=1.190e+03, percent-clipped=5.0 +2023-04-02 23:35:42,075 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152744.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:48,939 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:36:08,245 INFO [train.py:903] (0/4) Epoch 23, batch 2550, loss[loss=0.1912, simple_loss=0.2672, pruned_loss=0.05762, over 19365.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2855, pruned_loss=0.06248, over 3810493.39 frames. ], batch size: 47, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:36:28,459 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1501, 1.7407, 1.3693, 1.1890, 1.6008, 1.1506, 1.1635, 1.5508], + device='cuda:0'), covar=tensor([0.0826, 0.0747, 0.1000, 0.0784, 0.0539, 0.1179, 0.0610, 0.0438], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0314, 0.0334, 0.0265, 0.0246, 0.0336, 0.0288, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:36:57,882 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:37:01,071 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 23:37:08,446 INFO [train.py:903] (0/4) Epoch 23, batch 2600, loss[loss=0.1867, simple_loss=0.2735, pruned_loss=0.04994, over 19554.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2865, pruned_loss=0.0631, over 3796402.38 frames. ], batch size: 61, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:37:37,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9568, 0.9521, 1.0521, 1.0618, 1.3025, 1.2807, 1.2623, 0.5630], + device='cuda:0'), covar=tensor([0.1762, 0.3119, 0.1920, 0.1481, 0.1212, 0.1698, 0.1100, 0.4008], + device='cuda:0'), in_proj_covar=tensor([0.0536, 0.0647, 0.0718, 0.0487, 0.0620, 0.0533, 0.0664, 0.0552], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 23:37:40,425 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 4.724e+02 5.510e+02 7.301e+02 1.657e+03, threshold=1.102e+03, percent-clipped=4.0 +2023-04-02 23:38:08,554 INFO [train.py:903] (0/4) Epoch 23, batch 2650, loss[loss=0.2797, simple_loss=0.3379, pruned_loss=0.1107, over 12957.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.0633, over 3798848.56 frames. ], batch size: 136, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:38:27,765 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 23:39:08,680 INFO [train.py:903] (0/4) Epoch 23, batch 2700, loss[loss=0.2336, simple_loss=0.3084, pruned_loss=0.07943, over 19517.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2857, pruned_loss=0.06279, over 3808293.71 frames. ], batch size: 56, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:39:16,566 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:42,350 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.895e+02 5.730e+02 7.672e+02 1.465e+03, threshold=1.146e+03, percent-clipped=5.0 +2023-04-02 23:39:43,655 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:51,723 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:00,053 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:09,443 INFO [train.py:903] (0/4) Epoch 23, batch 2750, loss[loss=0.2261, simple_loss=0.3139, pruned_loss=0.06913, over 19768.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2862, pruned_loss=0.06303, over 3800572.25 frames. ], batch size: 54, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:40:23,468 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1037, 1.2638, 1.5018, 1.3309, 2.7556, 1.0628, 2.1099, 3.0923], + device='cuda:0'), covar=tensor([0.0610, 0.2826, 0.2935, 0.1869, 0.0739, 0.2428, 0.1296, 0.0315], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0370, 0.0390, 0.0350, 0.0375, 0.0354, 0.0383, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:40:29,164 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7721, 1.5344, 1.6384, 2.3497, 1.6397, 2.0874, 2.0376, 1.8597], + device='cuda:0'), covar=tensor([0.0836, 0.0996, 0.1002, 0.0738, 0.0896, 0.0776, 0.0889, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0238, 0.0227, 0.0211, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 23:40:31,170 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:57,914 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:10,187 INFO [train.py:903] (0/4) Epoch 23, batch 2800, loss[loss=0.2157, simple_loss=0.2906, pruned_loss=0.07042, over 19675.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2859, pruned_loss=0.06294, over 3808086.26 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:41:27,917 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:42,353 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.940e+02 6.140e+02 7.865e+02 1.529e+03, threshold=1.228e+03, percent-clipped=3.0 +2023-04-02 23:41:54,223 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 23:42:02,476 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:10,881 INFO [train.py:903] (0/4) Epoch 23, batch 2850, loss[loss=0.2096, simple_loss=0.298, pruned_loss=0.06062, over 19609.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06297, over 3816137.80 frames. ], batch size: 50, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:42:11,224 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:33,222 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2881, 3.8188, 3.9346, 3.9231, 1.6925, 3.7337, 3.2816, 3.6824], + device='cuda:0'), covar=tensor([0.1770, 0.1051, 0.0664, 0.0795, 0.5632, 0.1026, 0.0721, 0.1177], + device='cuda:0'), in_proj_covar=tensor([0.0786, 0.0750, 0.0957, 0.0840, 0.0844, 0.0718, 0.0570, 0.0892], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 23:42:36,382 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:51,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0674, 2.6380, 2.8010, 3.3568, 2.5682, 3.0656, 2.9735, 2.9220], + device='cuda:0'), covar=tensor([0.0535, 0.0685, 0.0669, 0.0599, 0.0716, 0.0603, 0.0777, 0.0495], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0239, 0.0228, 0.0212, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-02 23:43:09,824 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 23:43:11,002 INFO [train.py:903] (0/4) Epoch 23, batch 2900, loss[loss=0.2284, simple_loss=0.3047, pruned_loss=0.076, over 19758.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2866, pruned_loss=0.0634, over 3794982.32 frames. ], batch size: 63, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:43:12,400 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7687, 1.3309, 1.5604, 1.5374, 3.3847, 1.2499, 2.3853, 3.7893], + device='cuda:0'), covar=tensor([0.0528, 0.2764, 0.2846, 0.1890, 0.0685, 0.2466, 0.1324, 0.0238], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0368, 0.0390, 0.0350, 0.0374, 0.0354, 0.0383, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:43:34,519 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:43:45,165 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.545e+02 5.074e+02 6.117e+02 7.609e+02 1.538e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 23:44:10,299 INFO [train.py:903] (0/4) Epoch 23, batch 2950, loss[loss=0.2524, simple_loss=0.3226, pruned_loss=0.09111, over 19786.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.287, pruned_loss=0.06379, over 3801221.60 frames. ], batch size: 56, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:44:23,197 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6384, 4.1184, 4.2794, 4.2568, 1.5917, 4.0524, 3.5314, 4.0128], + device='cuda:0'), covar=tensor([0.1545, 0.0836, 0.0591, 0.0706, 0.5928, 0.0893, 0.0686, 0.1018], + device='cuda:0'), in_proj_covar=tensor([0.0780, 0.0743, 0.0950, 0.0833, 0.0838, 0.0711, 0.0566, 0.0885], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-02 23:44:25,524 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,204 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,244 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:45:09,887 INFO [train.py:903] (0/4) Epoch 23, batch 3000, loss[loss=0.205, simple_loss=0.2846, pruned_loss=0.06268, over 19679.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2879, pruned_loss=0.06423, over 3802267.83 frames. ], batch size: 59, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:45:09,888 INFO [train.py:928] (0/4) Computing validation loss +2023-04-02 23:45:23,394 INFO [train.py:937] (0/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2685, pruned_loss=0.03441, over 944034.00 frames. +2023-04-02 23:45:23,395 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-02 23:45:26,708 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 23:45:27,896 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9295, 4.5015, 2.6139, 3.8925, 0.9134, 4.4582, 4.3079, 4.4288], + device='cuda:0'), covar=tensor([0.0561, 0.0879, 0.2098, 0.0866, 0.4135, 0.0590, 0.0904, 0.1042], + device='cuda:0'), in_proj_covar=tensor([0.0511, 0.0414, 0.0498, 0.0349, 0.0402, 0.0437, 0.0429, 0.0463], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:45:40,632 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 23:45:55,393 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1376, 2.1931, 2.4516, 2.9812, 2.2363, 2.8086, 2.4839, 2.1121], + device='cuda:0'), covar=tensor([0.4600, 0.4224, 0.1983, 0.2739, 0.4515, 0.2356, 0.5225, 0.3589], + device='cuda:0'), in_proj_covar=tensor([0.0908, 0.0975, 0.0721, 0.0934, 0.0885, 0.0822, 0.0848, 0.0787], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 23:45:57,202 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.963e+02 5.132e+02 6.544e+02 7.997e+02 1.730e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 23:46:23,492 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-02 23:46:24,013 INFO [train.py:903] (0/4) Epoch 23, batch 3050, loss[loss=0.1778, simple_loss=0.2528, pruned_loss=0.05139, over 19401.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2878, pruned_loss=0.06408, over 3796795.45 frames. ], batch size: 47, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:46:26,459 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8202, 4.4060, 2.8226, 3.8447, 0.9551, 4.3452, 4.2161, 4.3031], + device='cuda:0'), covar=tensor([0.0567, 0.0824, 0.1767, 0.0783, 0.3983, 0.0618, 0.0843, 0.1182], + device='cuda:0'), in_proj_covar=tensor([0.0511, 0.0414, 0.0497, 0.0349, 0.0401, 0.0437, 0.0430, 0.0463], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:47:00,333 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153296.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:05,917 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4462, 1.5392, 1.8694, 1.7732, 2.7815, 2.3904, 3.0033, 1.2883], + device='cuda:0'), covar=tensor([0.2540, 0.4384, 0.2665, 0.1826, 0.1534, 0.2128, 0.1505, 0.4468], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0644, 0.0715, 0.0486, 0.0617, 0.0531, 0.0662, 0.0551], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 23:47:25,736 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:26,469 INFO [train.py:903] (0/4) Epoch 23, batch 3100, loss[loss=0.2151, simple_loss=0.3032, pruned_loss=0.06348, over 19318.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06399, over 3807104.53 frames. ], batch size: 70, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:47:33,570 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:54,325 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:59,280 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 4.897e+02 6.414e+02 9.491e+02 6.432e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 23:48:01,113 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-02 23:48:03,099 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:48:13,731 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2504, 2.0488, 1.8459, 2.1983, 2.1859, 1.7087, 1.6899, 2.1878], + device='cuda:0'), covar=tensor([0.1166, 0.1848, 0.1823, 0.1296, 0.1510, 0.0944, 0.1935, 0.0917], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0357, 0.0314, 0.0254, 0.0305, 0.0252, 0.0310, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:48:25,988 INFO [train.py:903] (0/4) Epoch 23, batch 3150, loss[loss=0.164, simple_loss=0.2468, pruned_loss=0.04064, over 19735.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06294, over 3824899.71 frames. ], batch size: 51, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:48:54,115 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 23:48:57,789 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3273, 1.4547, 1.6174, 1.6390, 2.9680, 1.2032, 2.4807, 3.2811], + device='cuda:0'), covar=tensor([0.0570, 0.2826, 0.2845, 0.1776, 0.0690, 0.2492, 0.1255, 0.0331], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0370, 0.0392, 0.0352, 0.0376, 0.0354, 0.0385, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:49:03,481 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5892, 1.7699, 2.0815, 1.9082, 3.2538, 2.7665, 3.7812, 1.7499], + device='cuda:0'), covar=tensor([0.2436, 0.4274, 0.2754, 0.1825, 0.1461, 0.2004, 0.1360, 0.4180], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0645, 0.0716, 0.0486, 0.0617, 0.0532, 0.0662, 0.0552], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-02 23:49:03,699 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.46 vs. limit=5.0 +2023-04-02 23:49:26,056 INFO [train.py:903] (0/4) Epoch 23, batch 3200, loss[loss=0.1947, simple_loss=0.2781, pruned_loss=0.0556, over 19672.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2865, pruned_loss=0.06296, over 3826020.44 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:49:54,849 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:00,122 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.043e+02 6.093e+02 8.078e+02 1.420e+03, threshold=1.219e+03, percent-clipped=2.0 +2023-04-02 23:50:17,427 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:26,644 INFO [train.py:903] (0/4) Epoch 23, batch 3250, loss[loss=0.2487, simple_loss=0.3248, pruned_loss=0.08632, over 19407.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2872, pruned_loss=0.06343, over 3826506.00 frames. ], batch size: 70, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:50:43,125 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:48,912 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153484.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:51:05,693 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 23:51:27,761 INFO [train.py:903] (0/4) Epoch 23, batch 3300, loss[loss=0.1922, simple_loss=0.2799, pruned_loss=0.05227, over 19617.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2877, pruned_loss=0.06364, over 3829999.25 frames. ], batch size: 57, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:51:34,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 23:52:00,748 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.081e+02 6.216e+02 8.009e+02 2.047e+03, threshold=1.243e+03, percent-clipped=5.0 +2023-04-02 23:52:26,359 INFO [train.py:903] (0/4) Epoch 23, batch 3350, loss[loss=0.2197, simple_loss=0.2931, pruned_loss=0.07316, over 19494.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06393, over 3830731.08 frames. ], batch size: 49, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:52:56,022 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5786, 2.2621, 1.6124, 1.5912, 2.0626, 1.3388, 1.4465, 1.9028], + device='cuda:0'), covar=tensor([0.1095, 0.0750, 0.1180, 0.0798, 0.0609, 0.1315, 0.0736, 0.0539], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0319, 0.0344, 0.0269, 0.0252, 0.0343, 0.0295, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:53:00,302 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:26,238 INFO [train.py:903] (0/4) Epoch 23, batch 3400, loss[loss=0.1972, simple_loss=0.2762, pruned_loss=0.0591, over 19668.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06458, over 3815667.51 frames. ], batch size: 55, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:53:37,516 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.46 vs. limit=2.0 +2023-04-02 23:53:47,128 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4360, 2.1040, 2.0940, 3.1337, 2.1486, 2.6398, 2.4681, 2.5402], + device='cuda:0'), covar=tensor([0.0687, 0.0800, 0.0859, 0.0654, 0.0800, 0.0668, 0.0850, 0.0547], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0219, 0.0223, 0.0237, 0.0225, 0.0211, 0.0186, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-02 23:53:56,978 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153640.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:58,246 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3221, 1.5446, 1.9843, 1.3807, 3.0896, 4.7311, 4.6067, 5.1321], + device='cuda:0'), covar=tensor([0.1646, 0.3631, 0.3266, 0.2395, 0.0581, 0.0185, 0.0171, 0.0202], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0266, 0.0245, 0.0189, 0.0218, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-02 23:54:01,368 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.295e+02 6.743e+02 8.549e+02 2.424e+03, threshold=1.349e+03, percent-clipped=5.0 +2023-04-02 23:54:17,531 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0012, 1.7378, 1.6782, 2.0847, 1.7327, 1.7465, 1.5442, 1.9007], + device='cuda:0'), covar=tensor([0.1111, 0.1611, 0.1574, 0.1017, 0.1394, 0.0588, 0.1547, 0.0806], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0360, 0.0317, 0.0256, 0.0308, 0.0254, 0.0313, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:54:28,040 INFO [train.py:903] (0/4) Epoch 23, batch 3450, loss[loss=0.2153, simple_loss=0.2988, pruned_loss=0.06588, over 19511.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2882, pruned_loss=0.06422, over 3809805.64 frames. ], batch size: 54, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:54:31,533 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 23:54:52,913 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0345, 2.1135, 2.3673, 2.7895, 2.0619, 2.6053, 2.4620, 2.1875], + device='cuda:0'), covar=tensor([0.4415, 0.4219, 0.1978, 0.2580, 0.4325, 0.2258, 0.4873, 0.3451], + device='cuda:0'), in_proj_covar=tensor([0.0907, 0.0972, 0.0719, 0.0934, 0.0882, 0.0819, 0.0847, 0.0788], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-02 23:55:30,145 INFO [train.py:903] (0/4) Epoch 23, batch 3500, loss[loss=0.2545, simple_loss=0.3233, pruned_loss=0.09284, over 18193.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2873, pruned_loss=0.06354, over 3810487.92 frames. ], batch size: 83, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:56:02,449 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.938e+02 5.821e+02 7.521e+02 2.332e+03, threshold=1.164e+03, percent-clipped=1.0 +2023-04-02 23:56:17,935 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:56:30,178 INFO [train.py:903] (0/4) Epoch 23, batch 3550, loss[loss=0.1955, simple_loss=0.2801, pruned_loss=0.05548, over 19767.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2877, pruned_loss=0.06381, over 3802608.43 frames. ], batch size: 54, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:56:50,046 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:57:30,137 INFO [train.py:903] (0/4) Epoch 23, batch 3600, loss[loss=0.2203, simple_loss=0.2942, pruned_loss=0.07317, over 19353.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2869, pruned_loss=0.06307, over 3807244.08 frames. ], batch size: 70, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:57:47,205 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 23:58:05,061 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 5.155e+02 6.351e+02 8.015e+02 2.586e+03, threshold=1.270e+03, percent-clipped=6.0 +2023-04-02 23:58:12,348 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:58:30,879 INFO [train.py:903] (0/4) Epoch 23, batch 3650, loss[loss=0.2454, simple_loss=0.3255, pruned_loss=0.08262, over 19466.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2866, pruned_loss=0.06304, over 3807941.45 frames. ], batch size: 64, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:58:42,969 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:08,904 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:31,666 INFO [train.py:903] (0/4) Epoch 23, batch 3700, loss[loss=0.1835, simple_loss=0.2547, pruned_loss=0.05612, over 19753.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2863, pruned_loss=0.06292, over 3818112.85 frames. ], batch size: 46, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:59:57,424 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0120, 0.9107, 1.3441, 1.2425, 2.4635, 1.0284, 2.1976, 2.8021], + device='cuda:0'), covar=tensor([0.0804, 0.3957, 0.3314, 0.2249, 0.1203, 0.2823, 0.1331, 0.0523], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0369, 0.0391, 0.0352, 0.0375, 0.0352, 0.0385, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-02 23:59:57,955 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-03 00:00:00,727 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153941.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:00:04,690 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.617e+02 5.510e+02 6.874e+02 2.344e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-03 00:00:31,961 INFO [train.py:903] (0/4) Epoch 23, batch 3750, loss[loss=0.1786, simple_loss=0.2599, pruned_loss=0.04868, over 19792.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2871, pruned_loss=0.0633, over 3801886.30 frames. ], batch size: 47, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:13,574 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-154000.pt +2023-04-03 00:01:24,685 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 00:01:27,882 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:01:33,255 INFO [train.py:903] (0/4) Epoch 23, batch 3800, loss[loss=0.2097, simple_loss=0.2841, pruned_loss=0.06761, over 19673.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2862, pruned_loss=0.06283, over 3801970.15 frames. ], batch size: 53, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:59,820 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154036.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:02:05,110 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 00:02:08,273 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 4.923e+02 6.103e+02 7.526e+02 2.694e+03, threshold=1.221e+03, percent-clipped=9.0 +2023-04-03 00:02:33,013 INFO [train.py:903] (0/4) Epoch 23, batch 3850, loss[loss=0.2641, simple_loss=0.3327, pruned_loss=0.0977, over 18771.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2867, pruned_loss=0.06314, over 3807907.23 frames. ], batch size: 74, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:02:35,275 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154067.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:03:01,621 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-03 00:03:09,332 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:18,567 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:35,952 INFO [train.py:903] (0/4) Epoch 23, batch 3900, loss[loss=0.2172, simple_loss=0.296, pruned_loss=0.06923, over 19499.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2864, pruned_loss=0.06265, over 3819693.58 frames. ], batch size: 64, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:09,484 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 4.608e+02 5.656e+02 7.392e+02 1.919e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-04-03 00:04:22,810 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:04:37,505 INFO [train.py:903] (0/4) Epoch 23, batch 3950, loss[loss=0.1923, simple_loss=0.2668, pruned_loss=0.05891, over 19627.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2869, pruned_loss=0.06328, over 3822555.97 frames. ], batch size: 50, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:44,239 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 00:04:52,288 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:05:37,012 INFO [train.py:903] (0/4) Epoch 23, batch 4000, loss[loss=0.1786, simple_loss=0.2584, pruned_loss=0.04942, over 19404.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2878, pruned_loss=0.06369, over 3815891.87 frames. ], batch size: 47, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:06:06,076 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:06:12,359 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.130e+02 6.145e+02 8.525e+02 2.203e+03, threshold=1.229e+03, percent-clipped=9.0 +2023-04-03 00:06:27,014 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 00:06:37,054 INFO [train.py:903] (0/4) Epoch 23, batch 4050, loss[loss=0.2265, simple_loss=0.307, pruned_loss=0.07301, over 17400.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2865, pruned_loss=0.06303, over 3825975.24 frames. ], batch size: 101, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:07:01,630 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154285.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:17,517 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5019, 1.0939, 1.3665, 1.2011, 2.0880, 1.0208, 2.1144, 2.4575], + device='cuda:0'), covar=tensor([0.0932, 0.3196, 0.3040, 0.1906, 0.1154, 0.2254, 0.1186, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0416, 0.0371, 0.0394, 0.0355, 0.0377, 0.0354, 0.0387, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 00:07:20,311 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 00:07:23,111 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:29,627 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:37,604 INFO [train.py:903] (0/4) Epoch 23, batch 4100, loss[loss=0.239, simple_loss=0.3179, pruned_loss=0.08007, over 19631.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2871, pruned_loss=0.06346, over 3833769.85 frames. ], batch size: 57, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:08:11,156 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.105e+02 5.940e+02 7.682e+02 1.555e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-03 00:08:13,567 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 00:08:16,177 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 00:08:25,562 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154355.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:08:39,412 INFO [train.py:903] (0/4) Epoch 23, batch 4150, loss[loss=0.2186, simple_loss=0.2999, pruned_loss=0.06862, over 19686.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2864, pruned_loss=0.06314, over 3814555.99 frames. ], batch size: 60, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:22,099 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:09:33,964 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154411.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:09:39,114 INFO [train.py:903] (0/4) Epoch 23, batch 4200, loss[loss=0.2116, simple_loss=0.2917, pruned_loss=0.06572, over 19325.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2869, pruned_loss=0.06327, over 3814885.01 frames. ], batch size: 70, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:41,433 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 00:10:07,054 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:14,760 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.103e+02 4.744e+02 5.863e+02 7.378e+02 1.705e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-03 00:10:17,212 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154446.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:40,149 INFO [train.py:903] (0/4) Epoch 23, batch 4250, loss[loss=0.1843, simple_loss=0.272, pruned_loss=0.04827, over 19753.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2861, pruned_loss=0.06291, over 3812220.11 frames. ], batch size: 51, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:10:54,220 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 00:11:05,308 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 00:11:12,043 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:11:40,260 INFO [train.py:903] (0/4) Epoch 23, batch 4300, loss[loss=0.207, simple_loss=0.2877, pruned_loss=0.06316, over 19682.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.286, pruned_loss=0.06333, over 3813084.01 frames. ], batch size: 60, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:11:53,648 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154526.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:12:02,711 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0865, 5.0910, 5.8385, 5.8704, 2.0816, 5.5896, 4.7290, 5.5153], + device='cuda:0'), covar=tensor([0.1624, 0.0832, 0.0555, 0.0608, 0.6009, 0.0801, 0.0621, 0.1107], + device='cuda:0'), in_proj_covar=tensor([0.0784, 0.0745, 0.0950, 0.0835, 0.0835, 0.0712, 0.0568, 0.0883], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 00:12:13,328 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.652e+02 5.888e+02 7.584e+02 1.931e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 00:12:24,496 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154553.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:33,752 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 00:12:35,188 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:41,455 INFO [train.py:903] (0/4) Epoch 23, batch 4350, loss[loss=0.2082, simple_loss=0.2959, pruned_loss=0.06023, over 19520.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.286, pruned_loss=0.06324, over 3823068.52 frames. ], batch size: 54, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:13:01,183 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:13:40,254 INFO [train.py:903] (0/4) Epoch 23, batch 4400, loss[loss=0.2304, simple_loss=0.3138, pruned_loss=0.07355, over 19676.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2871, pruned_loss=0.06364, over 3822153.66 frames. ], batch size: 55, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:13:46,129 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2097, 1.3030, 1.7306, 1.4307, 2.7202, 3.7611, 3.4660, 4.0212], + device='cuda:0'), covar=tensor([0.1733, 0.3942, 0.3467, 0.2402, 0.0634, 0.0200, 0.0223, 0.0252], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0322, 0.0351, 0.0263, 0.0243, 0.0187, 0.0215, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 00:14:04,361 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 00:14:14,043 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.197e+02 6.555e+02 7.915e+02 1.480e+03, threshold=1.311e+03, percent-clipped=6.0 +2023-04-03 00:14:15,161 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 00:14:16,465 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:18,497 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154648.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:23,872 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:27,560 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154656.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:38,570 INFO [train.py:903] (0/4) Epoch 23, batch 4450, loss[loss=0.2608, simple_loss=0.3296, pruned_loss=0.09598, over 19570.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2867, pruned_loss=0.06343, over 3826652.57 frames. ], batch size: 61, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:14:58,401 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154681.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:18,468 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:19,363 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154699.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:15:38,748 INFO [train.py:903] (0/4) Epoch 23, batch 4500, loss[loss=0.2504, simple_loss=0.3251, pruned_loss=0.08783, over 17507.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2885, pruned_loss=0.0645, over 3821959.84 frames. ], batch size: 101, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:15:59,640 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7406, 1.5472, 1.5387, 2.2267, 1.5345, 2.0879, 2.0210, 1.8672], + device='cuda:0'), covar=tensor([0.0810, 0.0955, 0.1008, 0.0722, 0.0911, 0.0732, 0.0843, 0.0659], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0221, 0.0226, 0.0240, 0.0227, 0.0212, 0.0187, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-03 00:16:01,943 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2813, 1.3601, 1.6127, 1.5021, 2.2521, 1.9840, 2.3227, 0.9403], + device='cuda:0'), covar=tensor([0.2772, 0.4652, 0.2947, 0.2152, 0.1576, 0.2453, 0.1510, 0.4850], + device='cuda:0'), in_proj_covar=tensor([0.0539, 0.0647, 0.0720, 0.0488, 0.0620, 0.0534, 0.0660, 0.0554], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 00:16:06,361 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154738.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:13,910 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.922e+02 6.448e+02 7.735e+02 1.395e+03, threshold=1.290e+03, percent-clipped=1.0 +2023-04-03 00:16:37,119 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:41,139 INFO [train.py:903] (0/4) Epoch 23, batch 4550, loss[loss=0.1958, simple_loss=0.2791, pruned_loss=0.05626, over 19774.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2881, pruned_loss=0.06443, over 3820327.17 frames. ], batch size: 54, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:43,802 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:48,238 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 00:17:00,064 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154782.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:17:11,893 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 00:17:31,839 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154807.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:34,145 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154809.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:17:39,823 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154814.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:17:41,844 INFO [train.py:903] (0/4) Epoch 23, batch 4600, loss[loss=0.2031, simple_loss=0.2863, pruned_loss=0.05994, over 19696.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2885, pruned_loss=0.06449, over 3814342.12 frames. ], batch size: 59, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:17:43,457 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154817.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:02,716 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154834.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:04,525 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:13,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:17,274 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.757e+02 5.456e+02 7.137e+02 2.039e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-04-03 00:18:41,886 INFO [train.py:903] (0/4) Epoch 23, batch 4650, loss[loss=0.2074, simple_loss=0.2909, pruned_loss=0.06189, over 19722.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2875, pruned_loss=0.06371, over 3806874.94 frames. ], batch size: 63, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:18:57,533 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 00:19:09,934 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 00:19:42,546 INFO [train.py:903] (0/4) Epoch 23, batch 4700, loss[loss=0.2285, simple_loss=0.3063, pruned_loss=0.07539, over 18388.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2867, pruned_loss=0.06343, over 3794080.62 frames. ], batch size: 84, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:20:04,430 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 00:20:17,977 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.550e+02 5.511e+02 7.065e+02 1.410e+03, threshold=1.102e+03, percent-clipped=2.0 +2023-04-03 00:20:25,028 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154951.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:28,237 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:37,193 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1323, 1.3400, 1.5574, 1.4346, 2.7235, 1.0970, 2.3007, 3.0947], + device='cuda:0'), covar=tensor([0.0569, 0.2804, 0.2695, 0.1767, 0.0747, 0.2339, 0.1073, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0367, 0.0390, 0.0350, 0.0374, 0.0351, 0.0384, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 00:20:44,143 INFO [train.py:903] (0/4) Epoch 23, batch 4750, loss[loss=0.1842, simple_loss=0.266, pruned_loss=0.05121, over 19466.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2866, pruned_loss=0.06311, over 3809423.41 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:21:00,237 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:11,339 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8944, 1.1350, 1.4458, 0.6387, 1.8937, 2.1918, 1.9597, 2.3298], + device='cuda:0'), covar=tensor([0.1623, 0.3751, 0.3258, 0.2853, 0.0793, 0.0385, 0.0394, 0.0440], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0324, 0.0354, 0.0264, 0.0246, 0.0189, 0.0217, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 00:21:12,322 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:17,039 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:45,322 INFO [train.py:903] (0/4) Epoch 23, batch 4800, loss[loss=0.1994, simple_loss=0.2832, pruned_loss=0.05778, over 19337.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.0636, over 3798697.62 frames. ], batch size: 66, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:21:49,034 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:54,235 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:18,855 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:19,564 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.324e+02 6.216e+02 7.674e+02 2.163e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-03 00:22:26,085 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:44,538 INFO [train.py:903] (0/4) Epoch 23, batch 4850, loss[loss=0.228, simple_loss=0.3144, pruned_loss=0.07083, over 19390.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2874, pruned_loss=0.06359, over 3791784.02 frames. ], batch size: 70, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:22:49,269 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155070.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:23:03,350 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:09,627 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 00:23:11,841 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:21,471 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155095.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:23:28,250 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1578, 1.8567, 2.0578, 2.8716, 1.9830, 2.4288, 2.4558, 2.2357], + device='cuda:0'), covar=tensor([0.0773, 0.0891, 0.0911, 0.0726, 0.0864, 0.0761, 0.0894, 0.0624], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0241, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 00:23:29,054 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 00:23:32,628 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:34,406 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 00:23:34,436 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 00:23:44,625 INFO [train.py:903] (0/4) Epoch 23, batch 4900, loss[loss=0.1988, simple_loss=0.2877, pruned_loss=0.05496, over 18852.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2886, pruned_loss=0.06449, over 3792269.59 frames. ], batch size: 74, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:23:44,639 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 00:24:04,394 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 00:24:20,234 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.924e+02 5.163e+02 5.938e+02 7.647e+02 1.407e+03, threshold=1.188e+03, percent-clipped=5.0 +2023-04-03 00:24:46,212 INFO [train.py:903] (0/4) Epoch 23, batch 4950, loss[loss=0.2024, simple_loss=0.2901, pruned_loss=0.05736, over 19638.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2888, pruned_loss=0.06442, over 3789670.27 frames. ], batch size: 55, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:01,074 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 00:25:21,570 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155197.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:22,303 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 00:25:34,809 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:44,533 INFO [train.py:903] (0/4) Epoch 23, batch 5000, loss[loss=0.1824, simple_loss=0.2563, pruned_loss=0.0543, over 19743.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2881, pruned_loss=0.06455, over 3790227.23 frames. ], batch size: 46, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:52,527 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 00:26:02,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:26:03,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 00:26:19,060 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.751e+02 5.889e+02 7.363e+02 1.722e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 00:26:43,568 INFO [train.py:903] (0/4) Epoch 23, batch 5050, loss[loss=0.2217, simple_loss=0.3024, pruned_loss=0.07053, over 19761.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2881, pruned_loss=0.06455, over 3794200.23 frames. ], batch size: 54, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:17,599 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 00:27:42,609 INFO [train.py:903] (0/4) Epoch 23, batch 5100, loss[loss=0.2236, simple_loss=0.3043, pruned_loss=0.07143, over 19737.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.06467, over 3789938.38 frames. ], batch size: 63, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:53,109 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 00:27:56,475 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 00:28:01,511 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 00:28:10,591 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:11,044 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.61 vs. limit=5.0 +2023-04-03 00:28:18,268 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.084e+02 6.467e+02 7.878e+02 1.414e+03, threshold=1.293e+03, percent-clipped=6.0 +2023-04-03 00:28:36,963 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:43,672 INFO [train.py:903] (0/4) Epoch 23, batch 5150, loss[loss=0.2303, simple_loss=0.3067, pruned_loss=0.07696, over 19762.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2887, pruned_loss=0.06458, over 3804029.86 frames. ], batch size: 51, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:28:56,737 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 00:29:08,611 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:29:30,423 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 00:29:45,071 INFO [train.py:903] (0/4) Epoch 23, batch 5200, loss[loss=0.2463, simple_loss=0.3232, pruned_loss=0.08471, over 19657.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.06436, over 3809949.03 frames. ], batch size: 55, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:29:58,666 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 00:30:02,277 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:19,768 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 5.305e+02 6.432e+02 7.969e+02 2.733e+03, threshold=1.286e+03, percent-clipped=6.0 +2023-04-03 00:30:30,719 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:30,789 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:34,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 00:30:41,477 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 00:30:44,906 INFO [train.py:903] (0/4) Epoch 23, batch 5250, loss[loss=0.1753, simple_loss=0.2486, pruned_loss=0.05097, over 19793.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2874, pruned_loss=0.06393, over 3822595.38 frames. ], batch size: 48, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:30:55,505 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155475.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:59,135 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:31:45,245 INFO [train.py:903] (0/4) Epoch 23, batch 5300, loss[loss=0.2377, simple_loss=0.3165, pruned_loss=0.07947, over 19723.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2874, pruned_loss=0.06403, over 3810833.90 frames. ], batch size: 63, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:32:03,696 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 00:32:21,415 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 4.703e+02 5.856e+02 7.687e+02 1.612e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 00:32:22,928 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:32:46,436 INFO [train.py:903] (0/4) Epoch 23, batch 5350, loss[loss=0.2125, simple_loss=0.2927, pruned_loss=0.06618, over 19350.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2868, pruned_loss=0.06329, over 3807721.63 frames. ], batch size: 66, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:18,084 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 00:33:46,942 INFO [train.py:903] (0/4) Epoch 23, batch 5400, loss[loss=0.2445, simple_loss=0.317, pruned_loss=0.08599, over 19654.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2869, pruned_loss=0.06325, over 3818870.31 frames. ], batch size: 55, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:56,238 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155623.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:34:21,898 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.892e+02 4.747e+02 5.806e+02 7.220e+02 1.360e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 00:34:48,075 INFO [train.py:903] (0/4) Epoch 23, batch 5450, loss[loss=0.2377, simple_loss=0.3133, pruned_loss=0.08102, over 19491.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.06339, over 3816152.41 frames. ], batch size: 64, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:34:58,337 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155675.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:39,944 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:47,576 INFO [train.py:903] (0/4) Epoch 23, batch 5500, loss[loss=0.2487, simple_loss=0.3252, pruned_loss=0.08604, over 19566.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2884, pruned_loss=0.06425, over 3817116.94 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:09,966 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:36:13,479 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 00:36:24,173 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.224e+02 5.057e+02 6.298e+02 8.158e+02 1.659e+03, threshold=1.260e+03, percent-clipped=6.0 +2023-04-03 00:36:25,533 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8045, 3.4686, 2.5252, 3.1687, 0.9598, 3.4424, 3.3380, 3.4076], + device='cuda:0'), covar=tensor([0.0807, 0.1125, 0.1829, 0.0934, 0.3740, 0.0774, 0.0976, 0.1135], + device='cuda:0'), in_proj_covar=tensor([0.0518, 0.0418, 0.0502, 0.0352, 0.0405, 0.0442, 0.0433, 0.0467], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 00:36:46,709 INFO [train.py:903] (0/4) Epoch 23, batch 5550, loss[loss=0.1938, simple_loss=0.2655, pruned_loss=0.06099, over 19055.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2886, pruned_loss=0.06454, over 3822408.88 frames. ], batch size: 42, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:56,203 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 00:37:30,487 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:30,623 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:42,233 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 00:37:48,755 INFO [train.py:903] (0/4) Epoch 23, batch 5600, loss[loss=0.1957, simple_loss=0.2809, pruned_loss=0.05526, over 19612.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06416, over 3820001.76 frames. ], batch size: 57, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:37:52,342 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155819.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:38:02,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:38:23,384 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 5.365e+02 7.033e+02 8.601e+02 1.530e+03, threshold=1.407e+03, percent-clipped=6.0 +2023-04-03 00:38:29,809 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3473, 1.9569, 1.5394, 1.0943, 1.9497, 1.0528, 1.2086, 1.8971], + device='cuda:0'), covar=tensor([0.1037, 0.0796, 0.1060, 0.1115, 0.0571, 0.1452, 0.0827, 0.0444], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0314, 0.0337, 0.0265, 0.0247, 0.0339, 0.0290, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 00:38:48,661 INFO [train.py:903] (0/4) Epoch 23, batch 5650, loss[loss=0.2198, simple_loss=0.2981, pruned_loss=0.07076, over 19666.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2873, pruned_loss=0.06437, over 3821396.45 frames. ], batch size: 55, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:39:12,712 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-03 00:39:33,337 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 00:39:47,798 INFO [train.py:903] (0/4) Epoch 23, batch 5700, loss[loss=0.2411, simple_loss=0.3144, pruned_loss=0.0839, over 19571.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.286, pruned_loss=0.06347, over 3829748.76 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:04,254 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 00:40:10,823 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:24,776 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.906e+02 6.184e+02 7.924e+02 2.131e+03, threshold=1.237e+03, percent-clipped=2.0 +2023-04-03 00:40:47,843 INFO [train.py:903] (0/4) Epoch 23, batch 5750, loss[loss=0.1759, simple_loss=0.2632, pruned_loss=0.04424, over 19588.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2864, pruned_loss=0.06344, over 3827825.69 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:49,188 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155967.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:51,093 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 00:40:58,779 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 00:41:04,139 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 00:41:29,692 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-156000.pt +2023-04-03 00:41:48,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4555, 1.5620, 1.8088, 1.7018, 2.4079, 2.2028, 2.5646, 1.0881], + device='cuda:0'), covar=tensor([0.2510, 0.4253, 0.2660, 0.1968, 0.1584, 0.2249, 0.1456, 0.4498], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0645, 0.0718, 0.0488, 0.0620, 0.0534, 0.0657, 0.0551], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 00:41:50,978 INFO [train.py:903] (0/4) Epoch 23, batch 5800, loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06963, over 19614.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06359, over 3839970.33 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:41:54,502 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:42:19,973 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9539, 1.9351, 1.7964, 1.5590, 1.5142, 1.5664, 0.3378, 0.8655], + device='cuda:0'), covar=tensor([0.0676, 0.0646, 0.0478, 0.0784, 0.1267, 0.0934, 0.1434, 0.1182], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0357, 0.0363, 0.0387, 0.0466, 0.0393, 0.0339, 0.0345], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 00:42:25,081 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 4.982e+02 6.301e+02 7.857e+02 1.493e+03, threshold=1.260e+03, percent-clipped=3.0 +2023-04-03 00:42:50,183 INFO [train.py:903] (0/4) Epoch 23, batch 5850, loss[loss=0.1937, simple_loss=0.2702, pruned_loss=0.05862, over 19587.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2873, pruned_loss=0.06354, over 3839624.31 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:42:52,029 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-03 00:43:08,238 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:14,839 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156088.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:35,999 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0996, 1.3290, 1.6893, 1.2946, 2.7735, 3.6845, 3.4293, 4.0100], + device='cuda:0'), covar=tensor([0.1792, 0.4059, 0.3653, 0.2578, 0.0660, 0.0209, 0.0250, 0.0264], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0326, 0.0355, 0.0265, 0.0247, 0.0191, 0.0218, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 00:43:48,541 INFO [train.py:903] (0/4) Epoch 23, batch 5900, loss[loss=0.2376, simple_loss=0.3128, pruned_loss=0.0812, over 19690.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06356, over 3833472.01 frames. ], batch size: 53, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:43:52,961 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 00:44:10,841 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:14,056 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 00:44:24,575 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.043e+02 6.365e+02 8.179e+02 2.050e+03, threshold=1.273e+03, percent-clipped=8.0 +2023-04-03 00:44:25,633 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:48,118 INFO [train.py:903] (0/4) Epoch 23, batch 5950, loss[loss=0.2255, simple_loss=0.2998, pruned_loss=0.07563, over 19111.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2885, pruned_loss=0.06453, over 3819271.04 frames. ], batch size: 69, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:44:50,267 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 00:44:55,112 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7640, 1.8594, 1.9369, 2.5018, 1.8737, 2.4039, 2.0927, 1.6915], + device='cuda:0'), covar=tensor([0.4243, 0.3938, 0.2413, 0.2610, 0.3884, 0.2144, 0.5408, 0.4536], + device='cuda:0'), in_proj_covar=tensor([0.0906, 0.0976, 0.0723, 0.0935, 0.0889, 0.0824, 0.0846, 0.0788], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 00:45:19,170 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:30,628 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 00:45:33,831 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 00:45:34,640 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6393, 1.5027, 1.4806, 2.0564, 1.5025, 1.9388, 1.9439, 1.7426], + device='cuda:0'), covar=tensor([0.0819, 0.0945, 0.1035, 0.0749, 0.0952, 0.0746, 0.0841, 0.0703], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0225, 0.0241, 0.0226, 0.0212, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 00:45:47,971 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:48,678 INFO [train.py:903] (0/4) Epoch 23, batch 6000, loss[loss=0.1756, simple_loss=0.2536, pruned_loss=0.04882, over 19754.00 frames. ], tot_loss[loss=0.208, simple_loss=0.288, pruned_loss=0.06399, over 3830444.87 frames. ], batch size: 47, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:45:48,679 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 00:46:01,143 INFO [train.py:937] (0/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2684, pruned_loss=0.03439, over 944034.00 frames. +2023-04-03 00:46:01,143 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 00:46:23,279 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:46:30,901 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7241, 1.5384, 1.5552, 2.1851, 1.5134, 2.0677, 2.0189, 1.8881], + device='cuda:0'), covar=tensor([0.0818, 0.0918, 0.1019, 0.0765, 0.0915, 0.0734, 0.0826, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0221, 0.0225, 0.0240, 0.0226, 0.0212, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 00:46:37,246 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.873e+02 6.527e+02 8.069e+02 1.468e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-03 00:46:55,676 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156261.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:47:00,357 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4001, 1.3807, 1.5717, 1.5527, 1.7188, 1.9525, 1.8339, 0.5188], + device='cuda:0'), covar=tensor([0.2395, 0.4256, 0.2677, 0.1904, 0.1703, 0.2158, 0.1414, 0.4853], + device='cuda:0'), in_proj_covar=tensor([0.0540, 0.0652, 0.0725, 0.0492, 0.0626, 0.0538, 0.0661, 0.0556], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 00:47:01,914 INFO [train.py:903] (0/4) Epoch 23, batch 6050, loss[loss=0.1905, simple_loss=0.2774, pruned_loss=0.05179, over 19800.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2873, pruned_loss=0.0641, over 3812753.83 frames. ], batch size: 56, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:47:23,879 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 00:47:53,130 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:02,112 INFO [train.py:903] (0/4) Epoch 23, batch 6100, loss[loss=0.2838, simple_loss=0.3384, pruned_loss=0.1146, over 13673.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2865, pruned_loss=0.06384, over 3807805.60 frames. ], batch size: 136, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:48:28,001 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:37,367 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.768e+02 6.249e+02 8.138e+02 1.749e+03, threshold=1.250e+03, percent-clipped=2.0 +2023-04-03 00:48:58,826 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:01,847 INFO [train.py:903] (0/4) Epoch 23, batch 6150, loss[loss=0.328, simple_loss=0.3839, pruned_loss=0.1361, over 19460.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2879, pruned_loss=0.0649, over 3797670.60 frames. ], batch size: 64, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:49:31,008 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:31,820 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 00:49:58,228 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6854, 1.5826, 1.5548, 1.9644, 1.5089, 1.9496, 1.9239, 1.7544], + device='cuda:0'), covar=tensor([0.0798, 0.0909, 0.0972, 0.0731, 0.0870, 0.0714, 0.0817, 0.0687], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0225, 0.0240, 0.0227, 0.0212, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-03 00:50:00,606 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156415.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:01,347 INFO [train.py:903] (0/4) Epoch 23, batch 6200, loss[loss=0.1851, simple_loss=0.2761, pruned_loss=0.04704, over 19776.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2875, pruned_loss=0.0641, over 3798544.29 frames. ], batch size: 54, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:50:22,375 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156432.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:38,864 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.815e+02 5.704e+02 6.895e+02 2.552e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-03 00:51:02,807 INFO [train.py:903] (0/4) Epoch 23, batch 6250, loss[loss=0.2417, simple_loss=0.3213, pruned_loss=0.08102, over 19599.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2872, pruned_loss=0.06368, over 3797223.59 frames. ], batch size: 61, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:51:32,679 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 00:52:02,820 INFO [train.py:903] (0/4) Epoch 23, batch 6300, loss[loss=0.2372, simple_loss=0.3179, pruned_loss=0.07823, over 19657.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2869, pruned_loss=0.06331, over 3819412.44 frames. ], batch size: 55, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:52:04,422 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:34,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156542.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:39,263 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.353e+02 6.743e+02 8.019e+02 1.408e+03, threshold=1.349e+03, percent-clipped=4.0 +2023-04-03 00:52:40,687 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:03,411 INFO [train.py:903] (0/4) Epoch 23, batch 6350, loss[loss=0.2017, simple_loss=0.2973, pruned_loss=0.05308, over 19545.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.0629, over 3814303.63 frames. ], batch size: 56, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:53:17,247 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:28,411 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5630, 1.6775, 2.1438, 1.8679, 3.2465, 2.7142, 3.4936, 1.7342], + device='cuda:0'), covar=tensor([0.2544, 0.4486, 0.2761, 0.1939, 0.1491, 0.2093, 0.1537, 0.4186], + device='cuda:0'), in_proj_covar=tensor([0.0539, 0.0651, 0.0723, 0.0492, 0.0624, 0.0536, 0.0660, 0.0554], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 00:54:02,672 INFO [train.py:903] (0/4) Epoch 23, batch 6400, loss[loss=0.2039, simple_loss=0.2951, pruned_loss=0.05632, over 19608.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06428, over 3791700.80 frames. ], batch size: 57, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:54:39,391 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 4.834e+02 5.927e+02 7.987e+02 2.615e+03, threshold=1.185e+03, percent-clipped=4.0 +2023-04-03 00:54:46,368 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:04,069 INFO [train.py:903] (0/4) Epoch 23, batch 6450, loss[loss=0.1814, simple_loss=0.2641, pruned_loss=0.04937, over 19728.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2888, pruned_loss=0.06422, over 3789325.06 frames. ], batch size: 51, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:55:35,960 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:47,926 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 00:56:04,457 INFO [train.py:903] (0/4) Epoch 23, batch 6500, loss[loss=0.2567, simple_loss=0.307, pruned_loss=0.1032, over 19785.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2886, pruned_loss=0.06418, over 3778824.53 frames. ], batch size: 49, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:56:10,981 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 00:56:39,931 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.077e+02 6.090e+02 8.057e+02 1.603e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-03 00:57:04,759 INFO [train.py:903] (0/4) Epoch 23, batch 6550, loss[loss=0.216, simple_loss=0.2928, pruned_loss=0.06956, over 19625.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2884, pruned_loss=0.06425, over 3776477.69 frames. ], batch size: 50, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:57:06,351 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:06,419 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:50,626 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:04,881 INFO [train.py:903] (0/4) Epoch 23, batch 6600, loss[loss=0.176, simple_loss=0.2504, pruned_loss=0.0508, over 19387.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2884, pruned_loss=0.06418, over 3789562.96 frames. ], batch size: 48, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:58:20,340 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:41,922 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.163e+02 6.336e+02 8.010e+02 1.885e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 00:59:05,157 INFO [train.py:903] (0/4) Epoch 23, batch 6650, loss[loss=0.2052, simple_loss=0.2868, pruned_loss=0.06178, over 19293.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2886, pruned_loss=0.06419, over 3788273.39 frames. ], batch size: 66, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:59:51,159 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156903.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:59:59,079 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3699, 3.8748, 3.9851, 3.9729, 1.6215, 3.7924, 3.2743, 3.7260], + device='cuda:0'), covar=tensor([0.1651, 0.0901, 0.0700, 0.0797, 0.5693, 0.0905, 0.0770, 0.1171], + device='cuda:0'), in_proj_covar=tensor([0.0786, 0.0747, 0.0956, 0.0832, 0.0836, 0.0721, 0.0571, 0.0882], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 01:00:07,300 INFO [train.py:903] (0/4) Epoch 23, batch 6700, loss[loss=0.2834, simple_loss=0.3389, pruned_loss=0.114, over 13577.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.06332, over 3792451.09 frames. ], batch size: 135, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:00:41,730 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.603e+02 5.252e+02 6.535e+02 7.903e+02 1.565e+03, threshold=1.307e+03, percent-clipped=2.0 +2023-04-03 01:00:45,394 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:01:04,448 INFO [train.py:903] (0/4) Epoch 23, batch 6750, loss[loss=0.2041, simple_loss=0.2809, pruned_loss=0.0637, over 19740.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2888, pruned_loss=0.06412, over 3801315.98 frames. ], batch size: 46, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:01:13,586 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:00,907 INFO [train.py:903] (0/4) Epoch 23, batch 6800, loss[loss=0.1938, simple_loss=0.2727, pruned_loss=0.05744, over 19671.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2885, pruned_loss=0.06394, over 3795605.29 frames. ], batch size: 53, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:02:09,484 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:14,927 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:30,223 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-23.pt +2023-04-03 01:02:44,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 01:02:45,298 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 01:02:48,304 INFO [train.py:903] (0/4) Epoch 24, batch 0, loss[loss=0.1649, simple_loss=0.2425, pruned_loss=0.04364, over 19040.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2425, pruned_loss=0.04364, over 19040.00 frames. ], batch size: 42, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:02:48,304 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 01:02:59,926 INFO [train.py:937] (0/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2685, pruned_loss=0.03408, over 944034.00 frames. +2023-04-03 01:02:59,927 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 01:03:03,177 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.212e+02 6.445e+02 8.399e+02 3.393e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-03 01:03:05,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:03:12,276 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 01:03:57,980 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0248, 2.1615, 2.4759, 2.2987, 3.7086, 2.9660, 3.8958, 2.2125], + device='cuda:0'), covar=tensor([0.2259, 0.3768, 0.2427, 0.1673, 0.1313, 0.1905, 0.1347, 0.3457], + device='cuda:0'), in_proj_covar=tensor([0.0537, 0.0646, 0.0718, 0.0489, 0.0618, 0.0533, 0.0658, 0.0551], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 01:04:00,822 INFO [train.py:903] (0/4) Epoch 24, batch 50, loss[loss=0.1886, simple_loss=0.281, pruned_loss=0.04809, over 19653.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2851, pruned_loss=0.06257, over 858695.71 frames. ], batch size: 60, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:04:20,652 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:04:32,482 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 01:05:01,205 INFO [train.py:903] (0/4) Epoch 24, batch 100, loss[loss=0.2231, simple_loss=0.3042, pruned_loss=0.07103, over 19533.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2859, pruned_loss=0.06355, over 1515448.40 frames. ], batch size: 56, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:05:03,493 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 5.500e+02 6.534e+02 8.918e+02 1.825e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 01:05:11,334 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 01:05:19,869 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157160.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:06:02,079 INFO [train.py:903] (0/4) Epoch 24, batch 150, loss[loss=0.2225, simple_loss=0.308, pruned_loss=0.06852, over 19608.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2852, pruned_loss=0.06269, over 2037421.31 frames. ], batch size: 57, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:06:42,420 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:07:01,366 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 01:07:02,476 INFO [train.py:903] (0/4) Epoch 24, batch 200, loss[loss=0.2125, simple_loss=0.3008, pruned_loss=0.0621, over 19784.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2864, pruned_loss=0.06291, over 2431977.05 frames. ], batch size: 56, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:07:04,623 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 4.992e+02 5.973e+02 7.088e+02 2.080e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 01:07:05,909 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:08:03,058 INFO [train.py:903] (0/4) Epoch 24, batch 250, loss[loss=0.1788, simple_loss=0.26, pruned_loss=0.0488, over 19432.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2851, pruned_loss=0.06276, over 2746783.95 frames. ], batch size: 48, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:08:10,680 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-04-03 01:09:03,278 INFO [train.py:903] (0/4) Epoch 24, batch 300, loss[loss=0.2388, simple_loss=0.3002, pruned_loss=0.08871, over 19470.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2854, pruned_loss=0.06289, over 2989761.28 frames. ], batch size: 49, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:09:06,234 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.405e+02 6.557e+02 9.024e+02 1.464e+03, threshold=1.311e+03, percent-clipped=9.0 +2023-04-03 01:09:25,587 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157362.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:36,369 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:39,461 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:10:05,065 INFO [train.py:903] (0/4) Epoch 24, batch 350, loss[loss=0.2772, simple_loss=0.3356, pruned_loss=0.1094, over 13261.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2862, pruned_loss=0.06353, over 3175550.15 frames. ], batch size: 137, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:10:10,685 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 01:10:12,244 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:10:27,188 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8494, 1.9044, 2.2144, 2.4211, 1.8233, 2.3125, 2.1897, 2.0195], + device='cuda:0'), covar=tensor([0.4268, 0.4076, 0.1960, 0.2499, 0.4256, 0.2286, 0.4996, 0.3500], + device='cuda:0'), in_proj_covar=tensor([0.0906, 0.0975, 0.0722, 0.0932, 0.0887, 0.0823, 0.0846, 0.0787], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 01:10:39,433 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157422.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:11:05,163 INFO [train.py:903] (0/4) Epoch 24, batch 400, loss[loss=0.2322, simple_loss=0.305, pruned_loss=0.0797, over 18715.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2872, pruned_loss=0.06408, over 3327415.46 frames. ], batch size: 74, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:11:07,648 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 4.825e+02 6.674e+02 8.153e+02 1.427e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-03 01:11:52,582 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157482.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:11:58,232 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:05,698 INFO [train.py:903] (0/4) Epoch 24, batch 450, loss[loss=0.25, simple_loss=0.317, pruned_loss=0.09148, over 19270.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2892, pruned_loss=0.06487, over 3431921.42 frames. ], batch size: 66, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:12:11,725 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8486, 1.8024, 1.9210, 1.6371, 3.4837, 1.2260, 2.6224, 3.8731], + device='cuda:0'), covar=tensor([0.0480, 0.2349, 0.2410, 0.1886, 0.0655, 0.2472, 0.1164, 0.0229], + device='cuda:0'), in_proj_covar=tensor([0.0416, 0.0371, 0.0391, 0.0352, 0.0375, 0.0352, 0.0386, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:12:20,134 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:22,668 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3551, 1.9494, 2.0261, 1.6782, 3.9646, 1.3710, 2.8065, 4.1324], + device='cuda:0'), covar=tensor([0.0530, 0.2440, 0.2605, 0.2145, 0.0771, 0.2624, 0.1555, 0.0259], + device='cuda:0'), in_proj_covar=tensor([0.0416, 0.0371, 0.0391, 0.0352, 0.0375, 0.0352, 0.0385, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:12:23,944 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:40,658 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 01:12:40,690 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 01:12:44,467 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157524.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:13:08,943 INFO [train.py:903] (0/4) Epoch 24, batch 500, loss[loss=0.1882, simple_loss=0.268, pruned_loss=0.05417, over 19386.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06462, over 3524706.19 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:13:12,136 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.617e+02 5.203e+02 6.096e+02 8.720e+02 1.456e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 01:14:02,838 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:12,207 INFO [train.py:903] (0/4) Epoch 24, batch 550, loss[loss=0.2228, simple_loss=0.2911, pruned_loss=0.07723, over 19619.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06391, over 3585630.44 frames. ], batch size: 50, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:14:21,008 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8419, 4.4110, 2.7860, 3.8269, 1.0344, 4.3303, 4.2522, 4.3550], + device='cuda:0'), covar=tensor([0.0598, 0.1016, 0.1900, 0.0847, 0.3998, 0.0674, 0.0864, 0.1004], + device='cuda:0'), in_proj_covar=tensor([0.0515, 0.0415, 0.0499, 0.0349, 0.0403, 0.0439, 0.0433, 0.0469], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:14:41,093 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157618.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:42,110 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,043 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,768 INFO [train.py:903] (0/4) Epoch 24, batch 600, loss[loss=0.202, simple_loss=0.2783, pruned_loss=0.0628, over 19576.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2875, pruned_loss=0.0639, over 3630435.83 frames. ], batch size: 52, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:15:15,911 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.646e+02 5.574e+02 6.767e+02 1.170e+03, threshold=1.115e+03, percent-clipped=0.0 +2023-04-03 01:15:53,025 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 01:16:14,759 INFO [train.py:903] (0/4) Epoch 24, batch 650, loss[loss=0.169, simple_loss=0.2434, pruned_loss=0.04726, over 19364.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2887, pruned_loss=0.0644, over 3672320.26 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:16:45,908 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157718.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:07,759 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4752, 1.5375, 1.7860, 1.7163, 2.8003, 2.2826, 2.8933, 1.2847], + device='cuda:0'), covar=tensor([0.2724, 0.4590, 0.3003, 0.2124, 0.1492, 0.2347, 0.1496, 0.4625], + device='cuda:0'), in_proj_covar=tensor([0.0542, 0.0651, 0.0725, 0.0494, 0.0623, 0.0537, 0.0664, 0.0555], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 01:17:14,675 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157743.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:15,389 INFO [train.py:903] (0/4) Epoch 24, batch 700, loss[loss=0.2291, simple_loss=0.3045, pruned_loss=0.07681, over 18147.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06388, over 3703014.88 frames. ], batch size: 83, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:17:15,552 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:20,745 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.232e+02 6.869e+02 8.195e+02 1.483e+03, threshold=1.374e+03, percent-clipped=7.0 +2023-04-03 01:17:44,055 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157766.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:17:46,345 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:18:19,077 INFO [train.py:903] (0/4) Epoch 24, batch 750, loss[loss=0.2006, simple_loss=0.2698, pruned_loss=0.06573, over 19794.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2883, pruned_loss=0.06413, over 3724010.93 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:19:06,769 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:19,815 INFO [train.py:903] (0/4) Epoch 24, batch 800, loss[loss=0.1869, simple_loss=0.2602, pruned_loss=0.05685, over 19629.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2885, pruned_loss=0.06428, over 3738370.33 frames. ], batch size: 50, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:19:22,466 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4766, 2.3365, 2.1175, 2.6617, 2.3544, 2.1132, 1.8920, 2.4688], + device='cuda:0'), covar=tensor([0.1002, 0.1642, 0.1509, 0.1051, 0.1375, 0.0565, 0.1549, 0.0717], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0356, 0.0315, 0.0254, 0.0305, 0.0255, 0.0314, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:19:23,271 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 5.074e+02 6.358e+02 8.526e+02 1.766e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-03 01:19:30,236 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 01:19:31,726 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2352, 1.9821, 1.8169, 2.1057, 1.8744, 1.8429, 1.6550, 2.0720], + device='cuda:0'), covar=tensor([0.0963, 0.1410, 0.1428, 0.0927, 0.1379, 0.0564, 0.1521, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0357, 0.0315, 0.0254, 0.0305, 0.0255, 0.0314, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:19:37,379 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157859.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:48,481 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157868.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:19:52,843 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:59,147 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157875.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:20:05,647 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157881.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:20:20,136 INFO [train.py:903] (0/4) Epoch 24, batch 850, loss[loss=0.188, simple_loss=0.2695, pruned_loss=0.05324, over 19618.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2888, pruned_loss=0.06419, over 3745198.45 frames. ], batch size: 50, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:20:27,388 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157900.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:21:05,393 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:21:08,626 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 01:21:21,347 INFO [train.py:903] (0/4) Epoch 24, batch 900, loss[loss=0.2125, simple_loss=0.2985, pruned_loss=0.06322, over 19681.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2882, pruned_loss=0.06372, over 3766265.56 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:21:25,793 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.800e+02 5.808e+02 7.910e+02 1.683e+03, threshold=1.162e+03, percent-clipped=5.0 +2023-04-03 01:21:31,620 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5471, 3.0119, 3.1661, 3.2146, 1.3252, 2.9887, 2.6519, 2.7353], + device='cuda:0'), covar=tensor([0.3086, 0.2094, 0.1553, 0.1920, 0.7851, 0.2390, 0.1503, 0.2857], + device='cuda:0'), in_proj_covar=tensor([0.0789, 0.0756, 0.0965, 0.0841, 0.0844, 0.0727, 0.0579, 0.0887], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 01:22:10,104 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157983.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:22:22,960 INFO [train.py:903] (0/4) Epoch 24, batch 950, loss[loss=0.2122, simple_loss=0.2999, pruned_loss=0.0622, over 18117.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2878, pruned_loss=0.06331, over 3793276.21 frames. ], batch size: 83, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:22:22,980 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 01:22:31,627 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-158000.pt +2023-04-03 01:22:38,855 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6163, 4.2251, 2.8219, 3.7155, 0.7894, 4.2261, 4.0669, 4.1514], + device='cuda:0'), covar=tensor([0.0579, 0.0941, 0.1687, 0.0816, 0.4086, 0.0607, 0.0868, 0.0986], + device='cuda:0'), in_proj_covar=tensor([0.0512, 0.0413, 0.0496, 0.0347, 0.0401, 0.0438, 0.0432, 0.0466], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:23:26,858 INFO [train.py:903] (0/4) Epoch 24, batch 1000, loss[loss=0.227, simple_loss=0.3059, pruned_loss=0.07409, over 19681.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.288, pruned_loss=0.06342, over 3800325.05 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:23:28,196 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:23:31,291 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.075e+02 5.979e+02 8.043e+02 1.884e+03, threshold=1.196e+03, percent-clipped=5.0 +2023-04-03 01:23:34,133 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7063, 1.8070, 1.8914, 2.4488, 1.8712, 2.3311, 2.0113, 1.6172], + device='cuda:0'), covar=tensor([0.4985, 0.4598, 0.2744, 0.2951, 0.4407, 0.2488, 0.6315, 0.5087], + device='cuda:0'), in_proj_covar=tensor([0.0909, 0.0980, 0.0723, 0.0935, 0.0888, 0.0825, 0.0845, 0.0787], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 01:24:17,479 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 01:24:22,236 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:27,537 INFO [train.py:903] (0/4) Epoch 24, batch 1050, loss[loss=0.1792, simple_loss=0.2684, pruned_loss=0.04495, over 19847.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06343, over 3814203.62 frames. ], batch size: 52, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:24:50,219 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158114.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:52,247 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158115.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:56,173 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 01:25:18,954 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5236, 1.5959, 1.9835, 1.7598, 2.7992, 2.3778, 2.9687, 1.3528], + device='cuda:0'), covar=tensor([0.2389, 0.4249, 0.2587, 0.1882, 0.1421, 0.2066, 0.1357, 0.4215], + device='cuda:0'), in_proj_covar=tensor([0.0538, 0.0648, 0.0720, 0.0492, 0.0619, 0.0534, 0.0660, 0.0553], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 01:25:20,095 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158137.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:25:23,232 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:23,325 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:27,541 INFO [train.py:903] (0/4) Epoch 24, batch 1100, loss[loss=0.1965, simple_loss=0.2813, pruned_loss=0.05585, over 19615.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2872, pruned_loss=0.06338, over 3820129.36 frames. ], batch size: 57, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:25:31,913 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 5.122e+02 6.777e+02 7.992e+02 2.032e+03, threshold=1.355e+03, percent-clipped=5.0 +2023-04-03 01:25:50,614 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158162.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:26:28,898 INFO [train.py:903] (0/4) Epoch 24, batch 1150, loss[loss=0.2047, simple_loss=0.2788, pruned_loss=0.06533, over 19621.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.06306, over 3822133.73 frames. ], batch size: 50, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:26:56,843 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:27:25,336 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158239.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:27:31,330 INFO [train.py:903] (0/4) Epoch 24, batch 1200, loss[loss=0.2658, simple_loss=0.3296, pruned_loss=0.101, over 13246.00 frames. ], tot_loss[loss=0.206, simple_loss=0.286, pruned_loss=0.06299, over 3825851.85 frames. ], batch size: 136, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:27:37,766 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.926e+02 5.852e+02 7.782e+02 1.430e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 01:27:56,352 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158264.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:28:02,790 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 01:28:21,971 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9678, 2.0299, 2.2742, 2.5163, 1.9220, 2.4275, 2.2842, 2.1299], + device='cuda:0'), covar=tensor([0.4174, 0.3777, 0.1868, 0.2519, 0.4113, 0.2200, 0.4742, 0.3180], + device='cuda:0'), in_proj_covar=tensor([0.0908, 0.0981, 0.0725, 0.0939, 0.0890, 0.0826, 0.0846, 0.0788], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 01:28:34,708 INFO [train.py:903] (0/4) Epoch 24, batch 1250, loss[loss=0.1923, simple_loss=0.2869, pruned_loss=0.04882, over 19782.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2853, pruned_loss=0.0624, over 3834887.94 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:28:43,298 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:14,178 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:20,306 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:35,676 INFO [train.py:903] (0/4) Epoch 24, batch 1300, loss[loss=0.204, simple_loss=0.2744, pruned_loss=0.06679, over 19763.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2848, pruned_loss=0.06194, over 3839446.41 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:29:40,388 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 5.283e+02 7.241e+02 8.945e+02 2.355e+03, threshold=1.448e+03, percent-clipped=9.0 +2023-04-03 01:30:36,953 INFO [train.py:903] (0/4) Epoch 24, batch 1350, loss[loss=0.2176, simple_loss=0.2965, pruned_loss=0.06934, over 19569.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2855, pruned_loss=0.06238, over 3841089.66 frames. ], batch size: 61, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:30:47,118 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:31:38,835 INFO [train.py:903] (0/4) Epoch 24, batch 1400, loss[loss=0.1851, simple_loss=0.2646, pruned_loss=0.0528, over 19869.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2855, pruned_loss=0.0626, over 3846845.19 frames. ], batch size: 52, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:31:43,420 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.106e+02 6.656e+02 8.188e+02 2.197e+03, threshold=1.331e+03, percent-clipped=4.0 +2023-04-03 01:32:03,687 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1807, 1.1842, 1.4473, 1.2819, 2.2642, 3.1886, 2.8748, 3.5103], + device='cuda:0'), covar=tensor([0.1811, 0.5213, 0.4987, 0.2660, 0.0845, 0.0299, 0.0417, 0.0421], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0322, 0.0354, 0.0263, 0.0244, 0.0190, 0.0215, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 01:32:16,281 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2548, 1.9554, 1.5708, 1.3366, 1.8430, 1.2344, 1.1590, 1.7480], + device='cuda:0'), covar=tensor([0.0994, 0.0859, 0.1070, 0.0874, 0.0573, 0.1325, 0.0753, 0.0496], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0270, 0.0249, 0.0340, 0.0292, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:32:27,022 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:32:39,488 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4029, 2.0605, 1.5669, 1.3983, 1.9302, 1.2266, 1.2084, 1.8073], + device='cuda:0'), covar=tensor([0.1030, 0.0850, 0.1136, 0.0899, 0.0577, 0.1328, 0.0821, 0.0511], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0317, 0.0339, 0.0270, 0.0248, 0.0340, 0.0291, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:32:40,271 INFO [train.py:903] (0/4) Epoch 24, batch 1450, loss[loss=0.2089, simple_loss=0.3002, pruned_loss=0.05881, over 19297.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.286, pruned_loss=0.06315, over 3844077.67 frames. ], batch size: 70, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:32:43,457 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 01:33:26,517 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7724, 4.2491, 4.4926, 4.4902, 1.6569, 4.1824, 3.6861, 4.2288], + device='cuda:0'), covar=tensor([0.1734, 0.0861, 0.0634, 0.0715, 0.6184, 0.0893, 0.0688, 0.1201], + device='cuda:0'), in_proj_covar=tensor([0.0790, 0.0756, 0.0961, 0.0839, 0.0844, 0.0727, 0.0578, 0.0888], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 01:33:41,524 INFO [train.py:903] (0/4) Epoch 24, batch 1500, loss[loss=0.2055, simple_loss=0.2901, pruned_loss=0.06043, over 19671.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2856, pruned_loss=0.063, over 3850204.28 frames. ], batch size: 55, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:33:46,124 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.633e+02 4.870e+02 5.968e+02 7.477e+02 1.869e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-03 01:34:33,871 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:34:42,444 INFO [train.py:903] (0/4) Epoch 24, batch 1550, loss[loss=0.1855, simple_loss=0.2727, pruned_loss=0.04918, over 19840.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2855, pruned_loss=0.06293, over 3839280.72 frames. ], batch size: 52, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:34:48,557 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:04,342 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:46,041 INFO [train.py:903] (0/4) Epoch 24, batch 1600, loss[loss=0.2078, simple_loss=0.2991, pruned_loss=0.05824, over 19344.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2856, pruned_loss=0.06269, over 3832827.44 frames. ], batch size: 66, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:35:51,808 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.721e+02 6.000e+02 7.264e+02 1.836e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 01:36:11,982 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1038, 2.1820, 2.3445, 2.2032, 3.1329, 2.6604, 3.2375, 2.1349], + device='cuda:0'), covar=tensor([0.1960, 0.3177, 0.2202, 0.1587, 0.1305, 0.1833, 0.1361, 0.3603], + device='cuda:0'), in_proj_covar=tensor([0.0540, 0.0650, 0.0721, 0.0492, 0.0621, 0.0537, 0.0663, 0.0557], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 01:36:12,716 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 01:36:28,018 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5160, 1.6207, 1.8276, 1.8568, 1.4378, 1.7754, 1.8710, 1.7090], + device='cuda:0'), covar=tensor([0.3851, 0.3129, 0.1726, 0.2067, 0.3321, 0.1899, 0.4458, 0.3070], + device='cuda:0'), in_proj_covar=tensor([0.0911, 0.0981, 0.0724, 0.0937, 0.0890, 0.0827, 0.0845, 0.0789], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 01:36:50,302 INFO [train.py:903] (0/4) Epoch 24, batch 1650, loss[loss=0.1845, simple_loss=0.2665, pruned_loss=0.05126, over 19848.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2861, pruned_loss=0.06293, over 3824806.09 frames. ], batch size: 52, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:52,882 INFO [train.py:903] (0/4) Epoch 24, batch 1700, loss[loss=0.182, simple_loss=0.2635, pruned_loss=0.05023, over 18653.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2848, pruned_loss=0.06218, over 3832685.23 frames. ], batch size: 41, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:55,314 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:37:57,371 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.745e+02 5.793e+02 7.168e+02 1.456e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 01:38:00,945 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3245, 3.0652, 2.2868, 2.7374, 0.8972, 3.0080, 2.9041, 3.0013], + device='cuda:0'), covar=tensor([0.0975, 0.1292, 0.1928, 0.1088, 0.3538, 0.1038, 0.1125, 0.1318], + device='cuda:0'), in_proj_covar=tensor([0.0512, 0.0414, 0.0497, 0.0346, 0.0403, 0.0438, 0.0430, 0.0463], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:38:35,593 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 01:38:54,538 INFO [train.py:903] (0/4) Epoch 24, batch 1750, loss[loss=0.1459, simple_loss=0.2297, pruned_loss=0.03107, over 19364.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2857, pruned_loss=0.06273, over 3828894.94 frames. ], batch size: 47, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:38:56,109 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:03,105 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:57,555 INFO [train.py:903] (0/4) Epoch 24, batch 1800, loss[loss=0.2432, simple_loss=0.3193, pruned_loss=0.08351, over 19759.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2863, pruned_loss=0.06285, over 3836356.94 frames. ], batch size: 63, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:40:02,406 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.690e+02 5.979e+02 7.282e+02 2.087e+03, threshold=1.196e+03, percent-clipped=3.0 +2023-04-03 01:40:08,475 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158853.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:40:10,958 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158855.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:19,324 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:40,502 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:56,888 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 01:40:58,011 INFO [train.py:903] (0/4) Epoch 24, batch 1850, loss[loss=0.2201, simple_loss=0.2987, pruned_loss=0.07072, over 19536.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2866, pruned_loss=0.06311, over 3831627.66 frames. ], batch size: 64, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:41:32,409 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 01:41:49,530 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8401, 4.1519, 4.7006, 4.7502, 1.6527, 4.3945, 3.7418, 4.0782], + device='cuda:0'), covar=tensor([0.2351, 0.1466, 0.0926, 0.1154, 0.8099, 0.1856, 0.1231, 0.1975], + device='cuda:0'), in_proj_covar=tensor([0.0788, 0.0757, 0.0959, 0.0838, 0.0841, 0.0727, 0.0576, 0.0886], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 01:42:00,399 INFO [train.py:903] (0/4) Epoch 24, batch 1900, loss[loss=0.1946, simple_loss=0.2887, pruned_loss=0.05023, over 19667.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2863, pruned_loss=0.06294, over 3833540.40 frames. ], batch size: 58, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:42:04,924 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 4.936e+02 5.873e+02 7.509e+02 2.125e+03, threshold=1.175e+03, percent-clipped=8.0 +2023-04-03 01:42:18,812 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 01:42:24,122 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 01:42:47,555 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 01:43:00,390 INFO [train.py:903] (0/4) Epoch 24, batch 1950, loss[loss=0.2413, simple_loss=0.3086, pruned_loss=0.08701, over 13511.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2874, pruned_loss=0.06374, over 3816413.43 frames. ], batch size: 135, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:03,842 INFO [train.py:903] (0/4) Epoch 24, batch 2000, loss[loss=0.1911, simple_loss=0.2773, pruned_loss=0.05245, over 19525.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2869, pruned_loss=0.06369, over 3819694.62 frames. ], batch size: 54, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:08,591 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.106e+02 6.605e+02 9.481e+02 1.726e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-03 01:44:57,594 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:44:59,845 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 01:45:06,694 INFO [train.py:903] (0/4) Epoch 24, batch 2050, loss[loss=0.2269, simple_loss=0.313, pruned_loss=0.07039, over 19659.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2875, pruned_loss=0.06373, over 3816027.80 frames. ], batch size: 58, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:45:19,165 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 01:45:20,293 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 01:45:35,009 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:45:41,066 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 01:46:02,768 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159139.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:06,486 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:08,201 INFO [train.py:903] (0/4) Epoch 24, batch 2100, loss[loss=0.2418, simple_loss=0.3141, pruned_loss=0.0847, over 19686.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2874, pruned_loss=0.06345, over 3815487.59 frames. ], batch size: 60, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:46:10,355 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159145.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:13,635 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.527e+02 4.786e+02 5.922e+02 8.131e+02 1.881e+03, threshold=1.184e+03, percent-clipped=4.0 +2023-04-03 01:46:37,332 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 01:46:49,747 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:58,696 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 01:47:00,963 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1214, 1.1943, 1.4050, 1.5441, 2.7198, 1.2403, 2.2041, 3.1163], + device='cuda:0'), covar=tensor([0.0617, 0.3008, 0.3144, 0.1705, 0.0738, 0.2312, 0.1289, 0.0311], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0354, 0.0378, 0.0355, 0.0389, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:47:10,234 INFO [train.py:903] (0/4) Epoch 24, batch 2150, loss[loss=0.1677, simple_loss=0.2385, pruned_loss=0.04846, over 19736.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.06362, over 3823084.92 frames. ], batch size: 45, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:47:13,851 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159197.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:48:12,832 INFO [train.py:903] (0/4) Epoch 24, batch 2200, loss[loss=0.2475, simple_loss=0.3307, pruned_loss=0.08218, over 19529.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.287, pruned_loss=0.06318, over 3832850.47 frames. ], batch size: 54, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:48:18,018 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.881e+02 6.157e+02 7.813e+02 2.191e+03, threshold=1.231e+03, percent-clipped=6.0 +2023-04-03 01:48:25,263 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:33,230 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:36,865 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1368, 2.0489, 1.8369, 1.7389, 1.3925, 1.5656, 0.6511, 1.0941], + device='cuda:0'), covar=tensor([0.0851, 0.0777, 0.0574, 0.1002, 0.1594, 0.1304, 0.1514, 0.1276], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0357, 0.0360, 0.0385, 0.0462, 0.0390, 0.0338, 0.0342], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 01:49:14,284 INFO [train.py:903] (0/4) Epoch 24, batch 2250, loss[loss=0.2494, simple_loss=0.3164, pruned_loss=0.09122, over 13354.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06393, over 3811302.30 frames. ], batch size: 135, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:49:31,837 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:49:37,375 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159312.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:50:16,687 INFO [train.py:903] (0/4) Epoch 24, batch 2300, loss[loss=0.2013, simple_loss=0.2891, pruned_loss=0.05672, over 19692.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2887, pruned_loss=0.06393, over 3815541.56 frames. ], batch size: 60, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:50:21,057 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.864e+02 6.208e+02 8.672e+02 1.812e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 01:50:31,299 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 01:51:19,173 INFO [train.py:903] (0/4) Epoch 24, batch 2350, loss[loss=0.2622, simple_loss=0.3302, pruned_loss=0.09705, over 13737.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2881, pruned_loss=0.06381, over 3817565.84 frames. ], batch size: 136, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:00,177 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 01:52:04,796 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:52:17,051 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 01:52:19,432 INFO [train.py:903] (0/4) Epoch 24, batch 2400, loss[loss=0.2002, simple_loss=0.2774, pruned_loss=0.06147, over 19720.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2869, pruned_loss=0.06313, over 3826143.35 frames. ], batch size: 51, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:21,048 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3023, 1.8844, 1.8962, 2.1429, 1.8371, 1.8801, 1.7294, 2.1118], + device='cuda:0'), covar=tensor([0.0898, 0.1484, 0.1329, 0.1030, 0.1343, 0.0537, 0.1440, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0358, 0.0313, 0.0255, 0.0304, 0.0253, 0.0315, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 01:52:25,061 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.946e+02 5.943e+02 8.368e+02 2.189e+03, threshold=1.189e+03, percent-clipped=6.0 +2023-04-03 01:53:21,819 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0251, 1.2836, 1.7129, 1.1383, 2.4238, 3.2944, 2.9916, 3.5104], + device='cuda:0'), covar=tensor([0.1774, 0.3898, 0.3415, 0.2726, 0.0707, 0.0231, 0.0253, 0.0301], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0325, 0.0355, 0.0265, 0.0246, 0.0190, 0.0216, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 01:53:22,623 INFO [train.py:903] (0/4) Epoch 24, batch 2450, loss[loss=0.2042, simple_loss=0.288, pruned_loss=0.06021, over 19746.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2857, pruned_loss=0.06259, over 3822071.91 frames. ], batch size: 63, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:53:42,077 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159510.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:47,923 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 01:53:49,981 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:55,500 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:13,538 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:21,689 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:24,657 INFO [train.py:903] (0/4) Epoch 24, batch 2500, loss[loss=0.191, simple_loss=0.2692, pruned_loss=0.05639, over 19721.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2859, pruned_loss=0.06267, over 3817150.68 frames. ], batch size: 51, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:54:27,427 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:29,311 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.941e+02 6.110e+02 7.649e+02 1.406e+03, threshold=1.222e+03, percent-clipped=1.0 +2023-04-03 01:54:55,452 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159568.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:54:56,911 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.46 vs. limit=2.0 +2023-04-03 01:55:25,694 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159593.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:55:26,488 INFO [train.py:903] (0/4) Epoch 24, batch 2550, loss[loss=0.2085, simple_loss=0.2959, pruned_loss=0.06059, over 19752.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06275, over 3827172.33 frames. ], batch size: 54, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:19,777 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:20,610 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 01:56:29,397 INFO [train.py:903] (0/4) Epoch 24, batch 2600, loss[loss=0.2164, simple_loss=0.3, pruned_loss=0.06642, over 19588.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.286, pruned_loss=0.06248, over 3828051.65 frames. ], batch size: 52, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:34,952 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.779e+02 5.928e+02 8.262e+02 1.528e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-03 01:56:36,540 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159649.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:39,953 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:57:31,606 INFO [train.py:903] (0/4) Epoch 24, batch 2650, loss[loss=0.2122, simple_loss=0.291, pruned_loss=0.06672, over 19772.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2854, pruned_loss=0.06198, over 3808265.45 frames. ], batch size: 54, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:57:34,981 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2612, 1.3049, 1.7538, 1.2947, 2.6093, 3.5311, 3.2381, 3.7694], + device='cuda:0'), covar=tensor([0.1654, 0.3846, 0.3371, 0.2580, 0.0642, 0.0199, 0.0232, 0.0276], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0324, 0.0356, 0.0264, 0.0246, 0.0190, 0.0216, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 01:57:43,910 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 01:58:21,001 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6579, 1.7926, 2.2276, 2.1696, 3.1342, 2.6585, 3.3350, 1.7611], + device='cuda:0'), covar=tensor([0.2695, 0.4575, 0.2999, 0.1983, 0.1784, 0.2337, 0.1890, 0.4443], + device='cuda:0'), in_proj_covar=tensor([0.0544, 0.0656, 0.0727, 0.0495, 0.0625, 0.0537, 0.0667, 0.0560], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 01:58:34,707 INFO [train.py:903] (0/4) Epoch 24, batch 2700, loss[loss=0.2036, simple_loss=0.288, pruned_loss=0.05964, over 19768.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2853, pruned_loss=0.06221, over 3824710.20 frames. ], batch size: 54, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:58:39,053 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 5.237e+02 6.508e+02 8.466e+02 2.382e+03, threshold=1.302e+03, percent-clipped=8.0 +2023-04-03 01:59:03,865 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:59:36,000 INFO [train.py:903] (0/4) Epoch 24, batch 2750, loss[loss=0.1936, simple_loss=0.2704, pruned_loss=0.05837, over 19611.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2868, pruned_loss=0.06274, over 3826061.13 frames. ], batch size: 50, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:59:46,796 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:19,499 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:40,385 INFO [train.py:903] (0/4) Epoch 24, batch 2800, loss[loss=0.2117, simple_loss=0.2977, pruned_loss=0.06287, over 19795.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.286, pruned_loss=0.06212, over 3814915.99 frames. ], batch size: 56, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:00:45,932 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.444e+02 4.661e+02 5.641e+02 7.181e+02 2.352e+03, threshold=1.128e+03, percent-clipped=2.0 +2023-04-03 02:01:10,957 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-03 02:01:16,147 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:40,841 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:42,761 INFO [train.py:903] (0/4) Epoch 24, batch 2850, loss[loss=0.2079, simple_loss=0.2933, pruned_loss=0.06126, over 19616.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2862, pruned_loss=0.06261, over 3812191.01 frames. ], batch size: 57, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:10,626 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:25,874 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159928.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:39,634 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 02:02:45,323 INFO [train.py:903] (0/4) Epoch 24, batch 2900, loss[loss=0.2362, simple_loss=0.3136, pruned_loss=0.07939, over 19297.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2867, pruned_loss=0.06306, over 3819558.29 frames. ], batch size: 66, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:51,071 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.874e+02 6.501e+02 8.672e+02 1.518e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-03 02:03:45,493 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:03:46,486 INFO [train.py:903] (0/4) Epoch 24, batch 2950, loss[loss=0.2442, simple_loss=0.3294, pruned_loss=0.07947, over 19765.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2881, pruned_loss=0.06387, over 3818052.80 frames. ], batch size: 63, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:03:54,507 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-160000.pt +2023-04-03 02:04:24,681 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:04:45,828 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 02:04:49,461 INFO [train.py:903] (0/4) Epoch 24, batch 3000, loss[loss=0.2088, simple_loss=0.2824, pruned_loss=0.06759, over 19588.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2878, pruned_loss=0.06389, over 3809799.19 frames. ], batch size: 52, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:49,462 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 02:05:02,000 INFO [train.py:937] (0/4) Epoch 24, validation: loss=0.1679, simple_loss=0.268, pruned_loss=0.03397, over 944034.00 frames. +2023-04-03 02:05:02,001 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 02:05:08,004 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:05:08,850 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 4.966e+02 6.275e+02 7.790e+02 1.988e+03, threshold=1.255e+03, percent-clipped=5.0 +2023-04-03 02:06:04,587 INFO [train.py:903] (0/4) Epoch 24, batch 3050, loss[loss=0.2339, simple_loss=0.3103, pruned_loss=0.07874, over 18113.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2873, pruned_loss=0.06337, over 3812515.05 frames. ], batch size: 83, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:06:23,573 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:06:37,826 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 02:07:06,754 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9690, 1.9177, 1.8095, 1.6383, 1.4921, 1.5964, 0.3826, 0.9514], + device='cuda:0'), covar=tensor([0.0605, 0.0599, 0.0421, 0.0665, 0.1235, 0.0803, 0.1304, 0.1075], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0356, 0.0361, 0.0383, 0.0461, 0.0391, 0.0338, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 02:07:08,747 INFO [train.py:903] (0/4) Epoch 24, batch 3100, loss[loss=0.2509, simple_loss=0.3261, pruned_loss=0.08787, over 13308.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2864, pruned_loss=0.0625, over 3808413.16 frames. ], batch size: 137, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:07:14,571 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 4.859e+02 5.894e+02 7.109e+02 1.682e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 02:07:40,543 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2219, 3.8140, 3.9202, 3.9199, 1.6202, 3.7336, 3.2349, 3.6688], + device='cuda:0'), covar=tensor([0.1886, 0.0971, 0.0747, 0.0866, 0.5927, 0.1029, 0.0785, 0.1201], + device='cuda:0'), in_proj_covar=tensor([0.0793, 0.0757, 0.0963, 0.0844, 0.0847, 0.0733, 0.0576, 0.0895], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 02:08:10,532 INFO [train.py:903] (0/4) Epoch 24, batch 3150, loss[loss=0.2284, simple_loss=0.2982, pruned_loss=0.07931, over 19606.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.285, pruned_loss=0.06188, over 3814296.93 frames. ], batch size: 50, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:08:36,006 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 02:08:39,732 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160217.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:09:14,344 INFO [train.py:903] (0/4) Epoch 24, batch 3200, loss[loss=0.1979, simple_loss=0.2843, pruned_loss=0.05576, over 17366.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06211, over 3823938.17 frames. ], batch size: 101, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:09:20,052 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.134e+02 6.599e+02 8.700e+02 2.161e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-03 02:09:33,578 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9969, 1.3601, 1.7500, 1.1371, 2.5283, 3.5671, 3.2149, 3.7098], + device='cuda:0'), covar=tensor([0.1766, 0.3849, 0.3401, 0.2618, 0.0693, 0.0202, 0.0230, 0.0301], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0326, 0.0356, 0.0265, 0.0246, 0.0190, 0.0217, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 02:09:49,686 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:10:17,062 INFO [train.py:903] (0/4) Epoch 24, batch 3250, loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05932, over 19730.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2866, pruned_loss=0.06264, over 3824444.43 frames. ], batch size: 51, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:10:56,742 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160325.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:11:04,747 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160332.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:11:21,816 INFO [train.py:903] (0/4) Epoch 24, batch 3300, loss[loss=0.2441, simple_loss=0.3223, pruned_loss=0.08294, over 19683.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.287, pruned_loss=0.06298, over 3826789.99 frames. ], batch size: 58, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:11:24,384 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 02:11:27,804 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.973e+02 5.834e+02 7.675e+02 1.997e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-03 02:11:46,496 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160364.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:15,589 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:18,158 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160389.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:23,804 INFO [train.py:903] (0/4) Epoch 24, batch 3350, loss[loss=0.215, simple_loss=0.305, pruned_loss=0.06255, over 19679.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.287, pruned_loss=0.06305, over 3824835.28 frames. ], batch size: 60, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:12:27,298 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 02:12:39,926 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-03 02:13:26,812 INFO [train.py:903] (0/4) Epoch 24, batch 3400, loss[loss=0.1723, simple_loss=0.2639, pruned_loss=0.04036, over 19658.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2866, pruned_loss=0.06267, over 3824509.14 frames. ], batch size: 55, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:13:32,538 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.133e+02 6.647e+02 9.203e+02 1.938e+03, threshold=1.329e+03, percent-clipped=8.0 +2023-04-03 02:13:48,736 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4018, 1.4926, 1.8072, 1.6467, 2.4087, 2.0648, 2.5272, 1.1402], + device='cuda:0'), covar=tensor([0.2630, 0.4409, 0.2680, 0.2035, 0.1651, 0.2347, 0.1505, 0.4589], + device='cuda:0'), in_proj_covar=tensor([0.0543, 0.0654, 0.0728, 0.0494, 0.0622, 0.0537, 0.0665, 0.0558], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 02:14:28,014 INFO [train.py:903] (0/4) Epoch 24, batch 3450, loss[loss=0.1918, simple_loss=0.264, pruned_loss=0.05983, over 19764.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2857, pruned_loss=0.06236, over 3828392.64 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:14:31,633 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 02:15:29,541 INFO [train.py:903] (0/4) Epoch 24, batch 3500, loss[loss=0.2021, simple_loss=0.2872, pruned_loss=0.05846, over 19537.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2848, pruned_loss=0.06148, over 3831952.84 frames. ], batch size: 54, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:15:38,051 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.685e+02 5.887e+02 7.904e+02 2.662e+03, threshold=1.177e+03, percent-clipped=4.0 +2023-04-03 02:15:42,059 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3473, 2.0672, 1.6085, 1.3930, 1.8399, 1.2823, 1.2667, 1.8603], + device='cuda:0'), covar=tensor([0.0969, 0.0761, 0.1059, 0.0804, 0.0535, 0.1299, 0.0697, 0.0427], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0266, 0.0246, 0.0339, 0.0289, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:15:50,495 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 02:16:26,846 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:16:33,573 INFO [train.py:903] (0/4) Epoch 24, batch 3550, loss[loss=0.2048, simple_loss=0.2945, pruned_loss=0.05753, over 18371.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06176, over 3820924.68 frames. ], batch size: 84, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:16:50,470 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3302, 3.8420, 3.9843, 3.9933, 1.6876, 3.8099, 3.3490, 3.7251], + device='cuda:0'), covar=tensor([0.1649, 0.0861, 0.0701, 0.0745, 0.5493, 0.0946, 0.0707, 0.1181], + device='cuda:0'), in_proj_covar=tensor([0.0793, 0.0757, 0.0963, 0.0842, 0.0846, 0.0733, 0.0576, 0.0895], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 02:16:57,349 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160613.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:16:58,911 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 02:17:20,635 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 02:17:36,544 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:17:37,259 INFO [train.py:903] (0/4) Epoch 24, batch 3600, loss[loss=0.1978, simple_loss=0.2758, pruned_loss=0.05987, over 19468.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06224, over 3818203.34 frames. ], batch size: 49, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:17:44,410 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 4.811e+02 5.669e+02 7.209e+02 1.690e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 02:18:07,930 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160668.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:18:08,795 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160669.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:18:29,979 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4135, 1.5009, 1.8295, 1.6638, 2.6067, 2.2894, 2.7530, 1.3054], + device='cuda:0'), covar=tensor([0.2665, 0.4452, 0.2820, 0.2061, 0.1628, 0.2191, 0.1562, 0.4456], + device='cuda:0'), in_proj_covar=tensor([0.0541, 0.0652, 0.0725, 0.0493, 0.0621, 0.0535, 0.0663, 0.0556], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 02:18:40,044 INFO [train.py:903] (0/4) Epoch 24, batch 3650, loss[loss=0.2533, simple_loss=0.3368, pruned_loss=0.08484, over 19338.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2844, pruned_loss=0.06184, over 3813934.78 frames. ], batch size: 66, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:18:48,125 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8254, 4.2203, 4.4563, 4.5157, 1.6179, 4.2191, 3.6482, 4.1724], + device='cuda:0'), covar=tensor([0.1614, 0.0967, 0.0644, 0.0688, 0.6209, 0.0904, 0.0731, 0.1158], + device='cuda:0'), in_proj_covar=tensor([0.0794, 0.0757, 0.0962, 0.0842, 0.0847, 0.0733, 0.0577, 0.0894], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 02:19:40,897 INFO [train.py:903] (0/4) Epoch 24, batch 3700, loss[loss=0.1961, simple_loss=0.2803, pruned_loss=0.05599, over 19630.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2857, pruned_loss=0.0632, over 3807126.96 frames. ], batch size: 57, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:19:49,425 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.616e+02 6.347e+02 7.690e+02 1.972e+03, threshold=1.269e+03, percent-clipped=6.0 +2023-04-03 02:20:30,317 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160784.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:20:44,492 INFO [train.py:903] (0/4) Epoch 24, batch 3750, loss[loss=0.2165, simple_loss=0.3033, pruned_loss=0.06486, over 19681.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2857, pruned_loss=0.06301, over 3815151.17 frames. ], batch size: 59, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:45,652 INFO [train.py:903] (0/4) Epoch 24, batch 3800, loss[loss=0.2598, simple_loss=0.3255, pruned_loss=0.09705, over 13226.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2866, pruned_loss=0.06339, over 3798500.83 frames. ], batch size: 136, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:53,458 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.186e+02 4.635e+02 5.250e+02 7.046e+02 1.734e+03, threshold=1.050e+03, percent-clipped=4.0 +2023-04-03 02:22:18,584 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 02:22:22,488 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0673, 1.8680, 1.6858, 2.0824, 1.8053, 1.7740, 1.7020, 1.9691], + device='cuda:0'), covar=tensor([0.1048, 0.1432, 0.1507, 0.1052, 0.1284, 0.0544, 0.1445, 0.0725], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0356, 0.0313, 0.0255, 0.0305, 0.0254, 0.0313, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:22:41,752 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0989, 1.2528, 1.6713, 1.2135, 2.5494, 3.5015, 3.2297, 3.7050], + device='cuda:0'), covar=tensor([0.1743, 0.3963, 0.3432, 0.2616, 0.0622, 0.0211, 0.0219, 0.0243], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0325, 0.0355, 0.0264, 0.0245, 0.0190, 0.0216, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 02:22:47,087 INFO [train.py:903] (0/4) Epoch 24, batch 3850, loss[loss=0.1701, simple_loss=0.2531, pruned_loss=0.04353, over 19773.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2866, pruned_loss=0.06355, over 3803898.68 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:23:18,517 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160919.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:23:48,689 INFO [train.py:903] (0/4) Epoch 24, batch 3900, loss[loss=0.2471, simple_loss=0.3112, pruned_loss=0.09156, over 13396.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2863, pruned_loss=0.06295, over 3797452.44 frames. ], batch size: 135, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:23:58,397 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 5.134e+02 6.381e+02 7.692e+02 1.884e+03, threshold=1.276e+03, percent-clipped=12.0 +2023-04-03 02:24:38,062 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:24:53,489 INFO [train.py:903] (0/4) Epoch 24, batch 3950, loss[loss=0.1841, simple_loss=0.2732, pruned_loss=0.04747, over 19676.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06235, over 3799893.41 frames. ], batch size: 59, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:24:57,060 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 02:25:51,208 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161040.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:25:55,364 INFO [train.py:903] (0/4) Epoch 24, batch 4000, loss[loss=0.1679, simple_loss=0.2515, pruned_loss=0.0421, over 19360.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.285, pruned_loss=0.06223, over 3805628.86 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:26:00,829 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.22 vs. limit=5.0 +2023-04-03 02:26:03,419 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.651e+02 5.074e+02 6.327e+02 7.723e+02 1.762e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-03 02:26:08,607 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8616, 1.6913, 1.4954, 1.8976, 1.6242, 1.5810, 1.5592, 1.7585], + device='cuda:0'), covar=tensor([0.1168, 0.1500, 0.1719, 0.0996, 0.1379, 0.0667, 0.1545, 0.0856], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0357, 0.0313, 0.0255, 0.0305, 0.0254, 0.0314, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:26:21,921 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161065.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:26:41,780 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 02:26:57,071 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-03 02:26:57,367 INFO [train.py:903] (0/4) Epoch 24, batch 4050, loss[loss=0.2599, simple_loss=0.3295, pruned_loss=0.09512, over 19564.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2859, pruned_loss=0.06265, over 3811782.76 frames. ], batch size: 61, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:27:57,729 INFO [train.py:903] (0/4) Epoch 24, batch 4100, loss[loss=0.1883, simple_loss=0.2685, pruned_loss=0.05402, over 19483.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.0636, over 3804923.39 frames. ], batch size: 49, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:27:58,485 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 02:28:06,051 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.915e+02 6.129e+02 7.795e+02 1.333e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 02:28:31,030 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 02:28:49,598 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-03 02:29:00,662 INFO [train.py:903] (0/4) Epoch 24, batch 4150, loss[loss=0.2221, simple_loss=0.2989, pruned_loss=0.07262, over 19536.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.288, pruned_loss=0.06361, over 3798511.47 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:29:50,549 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:01,374 INFO [train.py:903] (0/4) Epoch 24, batch 4200, loss[loss=0.1698, simple_loss=0.2429, pruned_loss=0.04839, over 19718.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.06319, over 3814459.97 frames. ], batch size: 46, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:30:02,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 02:30:08,523 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:09,255 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.684e+02 6.110e+02 7.845e+02 2.290e+03, threshold=1.222e+03, percent-clipped=7.0 +2023-04-03 02:30:24,244 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:31:03,257 INFO [train.py:903] (0/4) Epoch 24, batch 4250, loss[loss=0.2139, simple_loss=0.2983, pruned_loss=0.06477, over 19742.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2859, pruned_loss=0.06242, over 3826577.54 frames. ], batch size: 63, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:31:17,046 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 02:31:28,236 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 02:31:44,199 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:31:47,940 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 02:32:04,737 INFO [train.py:903] (0/4) Epoch 24, batch 4300, loss[loss=0.2098, simple_loss=0.292, pruned_loss=0.06383, over 19665.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06166, over 3825272.95 frames. ], batch size: 55, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:32:09,652 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3134, 3.8087, 3.9236, 3.9437, 1.6089, 3.7617, 3.2439, 3.6585], + device='cuda:0'), covar=tensor([0.1755, 0.0992, 0.0669, 0.0816, 0.5760, 0.1027, 0.0773, 0.1161], + device='cuda:0'), in_proj_covar=tensor([0.0794, 0.0760, 0.0961, 0.0842, 0.0844, 0.0728, 0.0575, 0.0893], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 02:32:12,545 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.585e+02 5.768e+02 7.257e+02 2.214e+03, threshold=1.154e+03, percent-clipped=5.0 +2023-04-03 02:32:36,957 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6885, 2.6378, 2.3603, 2.6775, 2.6033, 2.3234, 2.1785, 2.7218], + device='cuda:0'), covar=tensor([0.0921, 0.1395, 0.1320, 0.1017, 0.1230, 0.0489, 0.1376, 0.0619], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0358, 0.0314, 0.0256, 0.0306, 0.0254, 0.0315, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:32:47,562 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:32:47,633 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.0337, 2.8482, 2.2881, 2.2293, 2.0759, 2.5953, 1.0316, 2.1040], + device='cuda:0'), covar=tensor([0.0704, 0.0676, 0.0726, 0.1105, 0.1102, 0.1024, 0.1522, 0.1082], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0360, 0.0364, 0.0387, 0.0465, 0.0393, 0.0341, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 02:32:57,530 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 02:33:06,255 INFO [train.py:903] (0/4) Epoch 24, batch 4350, loss[loss=0.1817, simple_loss=0.2728, pruned_loss=0.04528, over 19766.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.284, pruned_loss=0.06135, over 3831090.17 frames. ], batch size: 54, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:33:19,475 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1677, 1.9213, 2.0775, 2.9492, 1.9545, 2.4279, 2.5853, 2.2920], + device='cuda:0'), covar=tensor([0.0787, 0.0884, 0.0908, 0.0665, 0.0899, 0.0734, 0.0805, 0.0620], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0238, 0.0224, 0.0212, 0.0188, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-03 02:33:37,810 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:33:38,146 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.26 vs. limit=5.0 +2023-04-03 02:34:07,007 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:34:08,990 INFO [train.py:903] (0/4) Epoch 24, batch 4400, loss[loss=0.166, simple_loss=0.2449, pruned_loss=0.04351, over 19374.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2848, pruned_loss=0.06177, over 3844648.39 frames. ], batch size: 47, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:34:15,586 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.009e+02 6.093e+02 7.233e+02 1.222e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 02:34:31,964 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 02:34:41,590 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 02:34:50,943 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:35:10,538 INFO [train.py:903] (0/4) Epoch 24, batch 4450, loss[loss=0.2939, simple_loss=0.3513, pruned_loss=0.1183, over 13206.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2853, pruned_loss=0.06263, over 3833338.48 frames. ], batch size: 137, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:35:14,645 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 02:35:49,116 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0029, 1.9459, 1.8679, 1.6194, 1.6194, 1.6189, 0.3735, 0.9237], + device='cuda:0'), covar=tensor([0.0621, 0.0650, 0.0426, 0.0725, 0.1212, 0.0854, 0.1419, 0.1117], + device='cuda:0'), in_proj_covar=tensor([0.0359, 0.0359, 0.0361, 0.0385, 0.0463, 0.0391, 0.0339, 0.0345], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 02:36:10,897 INFO [train.py:903] (0/4) Epoch 24, batch 4500, loss[loss=0.2223, simple_loss=0.2991, pruned_loss=0.07278, over 19606.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2862, pruned_loss=0.06344, over 3832479.34 frames. ], batch size: 57, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:36:17,773 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.067e+02 5.206e+02 6.185e+02 8.214e+02 2.130e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 02:36:48,170 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8853, 1.7139, 1.4828, 1.8702, 1.5788, 1.5743, 1.5432, 1.7655], + device='cuda:0'), covar=tensor([0.1246, 0.1532, 0.1835, 0.1234, 0.1516, 0.0702, 0.1646, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0358, 0.0315, 0.0258, 0.0307, 0.0255, 0.0317, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:36:53,344 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:36:57,977 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5689, 2.2823, 1.7719, 1.5336, 2.0488, 1.4143, 1.3309, 1.9862], + device='cuda:0'), covar=tensor([0.1158, 0.0849, 0.1088, 0.0961, 0.0602, 0.1447, 0.0819, 0.0511], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0317, 0.0338, 0.0267, 0.0248, 0.0341, 0.0292, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:37:10,263 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:37:12,187 INFO [train.py:903] (0/4) Epoch 24, batch 4550, loss[loss=0.2093, simple_loss=0.2858, pruned_loss=0.06638, over 19590.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2862, pruned_loss=0.06318, over 3833612.08 frames. ], batch size: 52, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:37:20,135 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 02:37:34,973 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5761, 1.6062, 1.8389, 1.8277, 2.6592, 2.4203, 2.8188, 1.1789], + device='cuda:0'), covar=tensor([0.2454, 0.4303, 0.2684, 0.1872, 0.1579, 0.2036, 0.1508, 0.4504], + device='cuda:0'), in_proj_covar=tensor([0.0546, 0.0656, 0.0732, 0.0497, 0.0629, 0.0540, 0.0667, 0.0560], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 02:37:44,726 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 02:38:01,460 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:38:15,061 INFO [train.py:903] (0/4) Epoch 24, batch 4600, loss[loss=0.1724, simple_loss=0.2538, pruned_loss=0.04548, over 19748.00 frames. ], tot_loss[loss=0.206, simple_loss=0.286, pruned_loss=0.06306, over 3828992.01 frames. ], batch size: 45, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:38:21,970 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.894e+02 5.872e+02 7.852e+02 1.807e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 02:38:33,827 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:15,601 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:16,434 INFO [train.py:903] (0/4) Epoch 24, batch 4650, loss[loss=0.2078, simple_loss=0.2989, pruned_loss=0.05837, over 19326.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2857, pruned_loss=0.06299, over 3828014.41 frames. ], batch size: 66, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:39:22,575 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:23,675 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1485, 1.2921, 1.8079, 1.3360, 2.8075, 3.7026, 3.4567, 3.9518], + device='cuda:0'), covar=tensor([0.1616, 0.3764, 0.3158, 0.2396, 0.0557, 0.0193, 0.0196, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0326, 0.0356, 0.0266, 0.0246, 0.0191, 0.0217, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 02:39:33,562 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 02:39:33,878 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:44,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 02:39:53,487 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:57,777 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3371, 2.0531, 1.6454, 1.3840, 1.8910, 1.3396, 1.2355, 1.8090], + device='cuda:0'), covar=tensor([0.0893, 0.0796, 0.1203, 0.0905, 0.0568, 0.1363, 0.0749, 0.0479], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0315, 0.0338, 0.0267, 0.0247, 0.0340, 0.0291, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:40:19,300 INFO [train.py:903] (0/4) Epoch 24, batch 4700, loss[loss=0.1932, simple_loss=0.2691, pruned_loss=0.05861, over 19758.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2863, pruned_loss=0.06304, over 3822364.87 frames. ], batch size: 51, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:40:26,425 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 5.147e+02 6.134e+02 7.606e+02 1.537e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-03 02:40:39,899 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 02:40:43,331 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:41:20,593 INFO [train.py:903] (0/4) Epoch 24, batch 4750, loss[loss=0.2199, simple_loss=0.3007, pruned_loss=0.06957, over 19538.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06296, over 3823039.21 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:41:57,340 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:09,924 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:23,972 INFO [train.py:903] (0/4) Epoch 24, batch 4800, loss[loss=0.1996, simple_loss=0.2785, pruned_loss=0.06038, over 19656.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2859, pruned_loss=0.06288, over 3826928.84 frames. ], batch size: 53, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:42:31,545 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.225e+02 6.101e+02 7.305e+02 1.695e+03, threshold=1.220e+03, percent-clipped=2.0 +2023-04-03 02:43:04,826 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:43:21,612 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0456, 1.7197, 1.8974, 1.6390, 4.5274, 1.1130, 2.5589, 4.9886], + device='cuda:0'), covar=tensor([0.0476, 0.2659, 0.2750, 0.2041, 0.0736, 0.2663, 0.1529, 0.0152], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0368, 0.0390, 0.0350, 0.0373, 0.0352, 0.0387, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:43:25,943 INFO [train.py:903] (0/4) Epoch 24, batch 4850, loss[loss=0.2109, simple_loss=0.289, pruned_loss=0.06637, over 19691.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.286, pruned_loss=0.06328, over 3823852.31 frames. ], batch size: 53, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:43:50,704 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 02:44:11,980 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 02:44:16,558 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 02:44:17,700 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 02:44:19,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161937.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:27,632 INFO [train.py:903] (0/4) Epoch 24, batch 4900, loss[loss=0.2119, simple_loss=0.2996, pruned_loss=0.06206, over 19289.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2873, pruned_loss=0.06407, over 3818616.94 frames. ], batch size: 66, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:44:27,659 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 02:44:33,806 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:34,692 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 5.435e+02 6.496e+02 8.047e+02 2.666e+03, threshold=1.299e+03, percent-clipped=6.0 +2023-04-03 02:44:47,022 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 02:44:53,262 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:54,340 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7191, 4.2640, 4.4857, 4.4784, 1.7731, 4.2254, 3.7003, 4.1886], + device='cuda:0'), covar=tensor([0.1831, 0.0967, 0.0629, 0.0715, 0.6025, 0.1050, 0.0676, 0.1177], + device='cuda:0'), in_proj_covar=tensor([0.0794, 0.0759, 0.0966, 0.0848, 0.0847, 0.0731, 0.0578, 0.0894], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 02:45:05,687 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:23,760 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:28,097 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4938, 1.2945, 1.2916, 1.5169, 1.2774, 1.3094, 1.2499, 1.4238], + device='cuda:0'), covar=tensor([0.0852, 0.1131, 0.1078, 0.0695, 0.1007, 0.0473, 0.1212, 0.0587], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0357, 0.0311, 0.0255, 0.0305, 0.0253, 0.0314, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:45:28,866 INFO [train.py:903] (0/4) Epoch 24, batch 4950, loss[loss=0.2689, simple_loss=0.3266, pruned_loss=0.1056, over 13276.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06353, over 3816586.29 frames. ], batch size: 137, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:45:36,805 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-162000.pt +2023-04-03 02:45:48,655 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 02:46:12,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 02:46:33,469 INFO [train.py:903] (0/4) Epoch 24, batch 5000, loss[loss=0.2108, simple_loss=0.3014, pruned_loss=0.06009, over 19535.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2858, pruned_loss=0.06281, over 3831986.96 frames. ], batch size: 54, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:46:41,315 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.589e+02 5.769e+02 7.322e+02 1.477e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-03 02:46:44,817 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 02:46:56,731 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 02:47:35,227 INFO [train.py:903] (0/4) Epoch 24, batch 5050, loss[loss=0.2019, simple_loss=0.2922, pruned_loss=0.05581, over 19772.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2869, pruned_loss=0.06335, over 3822755.99 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:47:36,638 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:47:40,129 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9910, 1.5591, 1.8669, 1.7708, 4.4690, 1.3433, 2.5923, 4.8835], + device='cuda:0'), covar=tensor([0.0394, 0.2734, 0.2778, 0.1905, 0.0684, 0.2377, 0.1453, 0.0160], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0368, 0.0388, 0.0350, 0.0372, 0.0352, 0.0386, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:48:14,933 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 02:48:25,483 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:37,725 INFO [train.py:903] (0/4) Epoch 24, batch 5100, loss[loss=0.1698, simple_loss=0.2595, pruned_loss=0.03998, over 19398.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.06251, over 3823883.19 frames. ], batch size: 48, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:48:44,637 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.540e+02 5.777e+02 7.463e+02 1.637e+03, threshold=1.155e+03, percent-clipped=6.0 +2023-04-03 02:48:51,569 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 02:48:55,177 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 02:48:55,562 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:58,697 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 02:49:18,933 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:39,588 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162193.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:40,373 INFO [train.py:903] (0/4) Epoch 24, batch 5150, loss[loss=0.2099, simple_loss=0.2987, pruned_loss=0.06055, over 19727.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2854, pruned_loss=0.06242, over 3810137.29 frames. ], batch size: 63, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:49:57,182 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 02:50:11,316 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:31,572 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 02:50:43,109 INFO [train.py:903] (0/4) Epoch 24, batch 5200, loss[loss=0.2061, simple_loss=0.2977, pruned_loss=0.05722, over 19686.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2856, pruned_loss=0.06234, over 3798925.77 frames. ], batch size: 59, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:50:50,121 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:51,036 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.774e+02 5.644e+02 7.681e+02 1.514e+03, threshold=1.129e+03, percent-clipped=4.0 +2023-04-03 02:50:59,707 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 02:51:44,065 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 02:51:44,346 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:51:46,282 INFO [train.py:903] (0/4) Epoch 24, batch 5250, loss[loss=0.1885, simple_loss=0.274, pruned_loss=0.05154, over 19730.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2863, pruned_loss=0.06261, over 3804167.47 frames. ], batch size: 51, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:14,368 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1872, 1.4268, 1.6926, 1.3410, 2.7987, 1.1168, 2.2261, 3.1688], + device='cuda:0'), covar=tensor([0.0575, 0.2622, 0.2543, 0.1841, 0.0712, 0.2315, 0.1147, 0.0288], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0368, 0.0389, 0.0350, 0.0373, 0.0353, 0.0386, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:52:50,082 INFO [train.py:903] (0/4) Epoch 24, batch 5300, loss[loss=0.2209, simple_loss=0.3158, pruned_loss=0.06299, over 19103.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.0625, over 3784817.43 frames. ], batch size: 69, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:57,094 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.862e+02 5.825e+02 7.901e+02 2.284e+03, threshold=1.165e+03, percent-clipped=8.0 +2023-04-03 02:53:08,418 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 02:53:49,890 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-03 02:53:51,566 INFO [train.py:903] (0/4) Epoch 24, batch 5350, loss[loss=0.1784, simple_loss=0.2563, pruned_loss=0.05028, over 19755.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2869, pruned_loss=0.06293, over 3781588.47 frames. ], batch size: 48, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:54:28,280 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 02:54:47,996 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:54:54,720 INFO [train.py:903] (0/4) Epoch 24, batch 5400, loss[loss=0.2071, simple_loss=0.2872, pruned_loss=0.06355, over 19604.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.287, pruned_loss=0.06271, over 3789099.73 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:55:02,634 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.752e+02 4.703e+02 6.237e+02 7.666e+02 1.372e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 02:55:16,617 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6150, 1.1953, 1.2759, 1.4754, 1.0907, 1.3694, 1.2698, 1.4770], + device='cuda:0'), covar=tensor([0.1161, 0.1280, 0.1640, 0.1151, 0.1392, 0.0642, 0.1550, 0.0883], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0359, 0.0312, 0.0256, 0.0305, 0.0255, 0.0315, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:55:56,870 INFO [train.py:903] (0/4) Epoch 24, batch 5450, loss[loss=0.1956, simple_loss=0.2851, pruned_loss=0.053, over 19628.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2869, pruned_loss=0.06261, over 3783044.59 frames. ], batch size: 57, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:56:40,384 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:00,250 INFO [train.py:903] (0/4) Epoch 24, batch 5500, loss[loss=0.2172, simple_loss=0.3029, pruned_loss=0.06571, over 19518.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06206, over 3795118.41 frames. ], batch size: 56, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:57:05,406 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:07,330 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.077e+02 6.464e+02 7.861e+02 1.317e+03, threshold=1.293e+03, percent-clipped=1.0 +2023-04-03 02:57:12,064 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:24,996 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 02:57:36,134 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:58:01,088 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:58:02,007 INFO [train.py:903] (0/4) Epoch 24, batch 5550, loss[loss=0.2535, simple_loss=0.3245, pruned_loss=0.09121, over 18127.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06209, over 3809985.93 frames. ], batch size: 83, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:58:08,905 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 02:58:25,391 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.33 vs. limit=5.0 +2023-04-03 02:58:29,652 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7494, 2.5361, 2.2781, 2.6579, 2.3330, 2.3159, 2.1478, 2.7594], + device='cuda:0'), covar=tensor([0.0979, 0.1579, 0.1494, 0.1088, 0.1491, 0.0529, 0.1482, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0359, 0.0314, 0.0257, 0.0305, 0.0255, 0.0316, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 02:58:59,596 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 02:59:02,952 INFO [train.py:903] (0/4) Epoch 24, batch 5600, loss[loss=0.2524, simple_loss=0.3228, pruned_loss=0.09104, over 19679.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2868, pruned_loss=0.0632, over 3804969.05 frames. ], batch size: 59, lr: 3.40e-03, grad_scale: 16.0 +2023-04-03 02:59:12,271 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 5.034e+02 5.949e+02 8.933e+02 2.100e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 02:59:52,043 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8992, 1.6341, 1.5292, 1.9536, 1.5345, 1.6639, 1.5940, 1.7589], + device='cuda:0'), covar=tensor([0.1091, 0.1485, 0.1597, 0.1103, 0.1478, 0.0593, 0.1406, 0.0820], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0359, 0.0313, 0.0257, 0.0305, 0.0255, 0.0315, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:00:07,503 INFO [train.py:903] (0/4) Epoch 24, batch 5650, loss[loss=0.1652, simple_loss=0.2531, pruned_loss=0.0386, over 19825.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2865, pruned_loss=0.06269, over 3803291.74 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:00:24,947 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:00:55,015 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 03:01:09,541 INFO [train.py:903] (0/4) Epoch 24, batch 5700, loss[loss=0.1943, simple_loss=0.284, pruned_loss=0.05232, over 19616.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2876, pruned_loss=0.0633, over 3803069.58 frames. ], batch size: 57, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:01:17,479 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 4.704e+02 5.740e+02 7.100e+02 1.656e+03, threshold=1.148e+03, percent-clipped=4.0 +2023-04-03 03:01:20,488 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 03:02:11,497 INFO [train.py:903] (0/4) Epoch 24, batch 5750, loss[loss=0.2887, simple_loss=0.3487, pruned_loss=0.1143, over 14015.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.286, pruned_loss=0.06262, over 3804765.58 frames. ], batch size: 136, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:02:13,882 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 03:02:22,211 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 03:02:28,833 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 03:02:31,393 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:00,387 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=5.11 vs. limit=5.0 +2023-04-03 03:03:03,442 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:13,922 INFO [train.py:903] (0/4) Epoch 24, batch 5800, loss[loss=0.1902, simple_loss=0.2766, pruned_loss=0.05188, over 19600.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2852, pruned_loss=0.06167, over 3819647.34 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:03:15,410 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162845.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:22,908 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 4.857e+02 6.549e+02 8.249e+02 1.553e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-03 03:03:51,948 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:04:02,697 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 03:04:16,911 INFO [train.py:903] (0/4) Epoch 24, batch 5850, loss[loss=0.1827, simple_loss=0.2748, pruned_loss=0.04525, over 19658.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.06189, over 3811469.87 frames. ], batch size: 55, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:04:28,656 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4413, 2.1044, 1.5460, 1.4001, 1.9755, 1.2960, 1.3105, 1.8655], + device='cuda:0'), covar=tensor([0.1049, 0.0810, 0.1211, 0.0914, 0.0554, 0.1345, 0.0792, 0.0484], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0313, 0.0336, 0.0266, 0.0246, 0.0338, 0.0290, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:04:39,290 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1419, 1.9615, 1.8145, 2.2015, 1.9104, 1.8243, 1.7452, 2.0917], + device='cuda:0'), covar=tensor([0.1028, 0.1478, 0.1518, 0.1050, 0.1371, 0.0568, 0.1429, 0.0722], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0357, 0.0313, 0.0256, 0.0304, 0.0254, 0.0315, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:05:20,442 INFO [train.py:903] (0/4) Epoch 24, batch 5900, loss[loss=0.162, simple_loss=0.238, pruned_loss=0.04303, over 16053.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.285, pruned_loss=0.06173, over 3810729.79 frames. ], batch size: 35, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:05:26,353 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 03:05:28,683 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 4.550e+02 5.410e+02 6.827e+02 1.573e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 03:05:37,955 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1407, 5.5746, 3.0312, 4.8854, 1.0897, 5.7197, 5.5382, 5.6885], + device='cuda:0'), covar=tensor([0.0339, 0.0783, 0.1925, 0.0707, 0.4022, 0.0493, 0.0759, 0.0981], + device='cuda:0'), in_proj_covar=tensor([0.0512, 0.0418, 0.0502, 0.0352, 0.0401, 0.0441, 0.0435, 0.0467], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:05:43,745 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:05:44,422 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 03:06:15,204 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162988.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:16,454 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:21,816 INFO [train.py:903] (0/4) Epoch 24, batch 5950, loss[loss=0.1789, simple_loss=0.2609, pruned_loss=0.04842, over 19853.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.06179, over 3814077.50 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,000 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6024, 1.5499, 1.5285, 2.0599, 1.5700, 1.8904, 1.8496, 1.6451], + device='cuda:0'), covar=tensor([0.0849, 0.0928, 0.0998, 0.0732, 0.0859, 0.0741, 0.0876, 0.0715], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0239, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 03:07:22,795 INFO [train.py:903] (0/4) Epoch 24, batch 6000, loss[loss=0.2168, simple_loss=0.3152, pruned_loss=0.0592, over 19681.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.286, pruned_loss=0.06209, over 3822399.26 frames. ], batch size: 58, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,796 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 03:07:35,178 INFO [train.py:937] (0/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2679, pruned_loss=0.03436, over 944034.00 frames. +2023-04-03 03:07:35,179 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 03:07:43,477 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.276e+02 6.488e+02 8.005e+02 1.643e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-03 03:08:35,913 INFO [train.py:903] (0/4) Epoch 24, batch 6050, loss[loss=0.204, simple_loss=0.2923, pruned_loss=0.05782, over 19593.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2869, pruned_loss=0.06249, over 3824333.00 frames. ], batch size: 61, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:08:53,220 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:09:37,874 INFO [train.py:903] (0/4) Epoch 24, batch 6100, loss[loss=0.1915, simple_loss=0.2638, pruned_loss=0.05959, over 19461.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2862, pruned_loss=0.06199, over 3834149.50 frames. ], batch size: 49, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:09:45,776 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 5.048e+02 5.892e+02 7.607e+02 1.565e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 03:10:14,677 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 03:10:33,896 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163189.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:10:40,175 INFO [train.py:903] (0/4) Epoch 24, batch 6150, loss[loss=0.2408, simple_loss=0.3153, pruned_loss=0.08322, over 18365.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2867, pruned_loss=0.06226, over 3839261.05 frames. ], batch size: 83, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:11:10,597 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 03:11:43,666 INFO [train.py:903] (0/4) Epoch 24, batch 6200, loss[loss=0.2611, simple_loss=0.3279, pruned_loss=0.09714, over 13775.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2858, pruned_loss=0.06203, over 3821654.46 frames. ], batch size: 137, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:11:44,084 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:11:51,416 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.538e+02 5.825e+02 7.342e+02 1.276e+03, threshold=1.165e+03, percent-clipped=3.0 +2023-04-03 03:12:00,532 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163258.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:14,128 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163269.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:44,000 INFO [train.py:903] (0/4) Epoch 24, batch 6250, loss[loss=0.2335, simple_loss=0.3079, pruned_loss=0.07953, over 13411.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2863, pruned_loss=0.06248, over 3801753.87 frames. ], batch size: 136, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:12:56,463 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:13:11,282 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 03:13:32,263 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0628, 5.4933, 3.0309, 4.6613, 1.1173, 5.5918, 5.4426, 5.6234], + device='cuda:0'), covar=tensor([0.0409, 0.0854, 0.1973, 0.0849, 0.4352, 0.0541, 0.0834, 0.0812], + device='cuda:0'), in_proj_covar=tensor([0.0511, 0.0418, 0.0501, 0.0351, 0.0401, 0.0442, 0.0436, 0.0466], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:13:45,520 INFO [train.py:903] (0/4) Epoch 24, batch 6300, loss[loss=0.2061, simple_loss=0.2931, pruned_loss=0.05953, over 19783.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2852, pruned_loss=0.06214, over 3794699.67 frames. ], batch size: 56, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:13:54,810 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.008e+02 6.887e+02 8.761e+02 2.377e+03, threshold=1.377e+03, percent-clipped=3.0 +2023-04-03 03:14:09,404 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.38 vs. limit=5.0 +2023-04-03 03:14:48,465 INFO [train.py:903] (0/4) Epoch 24, batch 6350, loss[loss=0.2545, simple_loss=0.3244, pruned_loss=0.09229, over 18762.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2857, pruned_loss=0.06251, over 3803483.62 frames. ], batch size: 74, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:03,592 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.2431, 5.1395, 6.0294, 6.0510, 1.9876, 5.6307, 4.7902, 5.7196], + device='cuda:0'), covar=tensor([0.1692, 0.0794, 0.0586, 0.0595, 0.6462, 0.0735, 0.0609, 0.1181], + device='cuda:0'), in_proj_covar=tensor([0.0805, 0.0764, 0.0973, 0.0849, 0.0852, 0.0737, 0.0581, 0.0901], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 03:15:50,634 INFO [train.py:903] (0/4) Epoch 24, batch 6400, loss[loss=0.27, simple_loss=0.3325, pruned_loss=0.1038, over 13083.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2851, pruned_loss=0.06268, over 3790686.26 frames. ], batch size: 136, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:59,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 4.869e+02 5.854e+02 7.378e+02 1.563e+03, threshold=1.171e+03, percent-clipped=2.0 +2023-04-03 03:16:00,310 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:16:52,285 INFO [train.py:903] (0/4) Epoch 24, batch 6450, loss[loss=0.1994, simple_loss=0.2861, pruned_loss=0.05632, over 19761.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2854, pruned_loss=0.06253, over 3792167.86 frames. ], batch size: 56, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:16:59,802 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.41 vs. limit=5.0 +2023-04-03 03:17:34,394 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 03:17:37,158 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2460, 3.6355, 2.1863, 2.1267, 3.2118, 1.9716, 1.5204, 2.4610], + device='cuda:0'), covar=tensor([0.1305, 0.0652, 0.1107, 0.0917, 0.0613, 0.1228, 0.1018, 0.0659], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0315, 0.0336, 0.0267, 0.0247, 0.0341, 0.0290, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:17:53,147 INFO [train.py:903] (0/4) Epoch 24, batch 6500, loss[loss=0.2436, simple_loss=0.3175, pruned_loss=0.08482, over 19345.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2841, pruned_loss=0.06191, over 3801110.68 frames. ], batch size: 70, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:56,720 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 03:18:01,367 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.063e+02 4.704e+02 6.024e+02 8.376e+02 1.457e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 03:18:13,945 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163560.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:15,832 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 03:18:22,395 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:33,117 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3422, 2.2442, 2.0816, 1.8906, 1.7391, 1.8872, 0.6255, 1.3218], + device='cuda:0'), covar=tensor([0.0663, 0.0638, 0.0510, 0.0926, 0.1209, 0.1005, 0.1417, 0.1097], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0357, 0.0361, 0.0384, 0.0461, 0.0391, 0.0338, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 03:18:44,284 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:55,197 INFO [train.py:903] (0/4) Epoch 24, batch 6550, loss[loss=0.1951, simple_loss=0.2755, pruned_loss=0.05735, over 19745.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2846, pruned_loss=0.06202, over 3810160.43 frames. ], batch size: 51, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:19:05,566 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163602.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:19:38,249 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7185, 1.7211, 1.6920, 1.4277, 1.3924, 1.4055, 0.3314, 0.7164], + device='cuda:0'), covar=tensor([0.0650, 0.0641, 0.0383, 0.0697, 0.1244, 0.0810, 0.1317, 0.1112], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0357, 0.0361, 0.0384, 0.0462, 0.0391, 0.0339, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 03:19:57,491 INFO [train.py:903] (0/4) Epoch 24, batch 6600, loss[loss=0.2176, simple_loss=0.2986, pruned_loss=0.06835, over 19772.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2853, pruned_loss=0.06228, over 3815215.91 frames. ], batch size: 54, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:20:05,514 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.011e+02 6.018e+02 7.633e+02 1.393e+03, threshold=1.204e+03, percent-clipped=8.0 +2023-04-03 03:20:57,660 INFO [train.py:903] (0/4) Epoch 24, batch 6650, loss[loss=0.1685, simple_loss=0.2507, pruned_loss=0.04318, over 19486.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2848, pruned_loss=0.06224, over 3825566.36 frames. ], batch size: 49, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:21:22,433 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3716, 1.4855, 1.7784, 1.7227, 2.9840, 4.5344, 4.3390, 5.1407], + device='cuda:0'), covar=tensor([0.1761, 0.4756, 0.4439, 0.2350, 0.0671, 0.0235, 0.0219, 0.0213], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0326, 0.0356, 0.0266, 0.0245, 0.0190, 0.0216, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 03:21:25,781 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163717.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:21:58,179 INFO [train.py:903] (0/4) Epoch 24, batch 6700, loss[loss=0.1868, simple_loss=0.2804, pruned_loss=0.04653, over 19676.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2861, pruned_loss=0.0629, over 3817060.10 frames. ], batch size: 59, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:22:08,769 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.744e+02 6.010e+02 8.153e+02 1.593e+03, threshold=1.202e+03, percent-clipped=6.0 +2023-04-03 03:22:45,666 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4048, 2.1263, 1.6338, 1.4615, 1.9577, 1.3542, 1.3355, 1.7771], + device='cuda:0'), covar=tensor([0.0987, 0.0838, 0.1081, 0.0843, 0.0575, 0.1257, 0.0704, 0.0507], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0318, 0.0340, 0.0270, 0.0251, 0.0344, 0.0293, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:22:57,589 INFO [train.py:903] (0/4) Epoch 24, batch 6750, loss[loss=0.2449, simple_loss=0.3211, pruned_loss=0.08436, over 13650.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06334, over 3809634.63 frames. ], batch size: 137, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:23:30,574 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:23:38,381 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1071, 1.2597, 1.7549, 1.2822, 2.7650, 3.6609, 3.3635, 3.8941], + device='cuda:0'), covar=tensor([0.1769, 0.3947, 0.3385, 0.2508, 0.0600, 0.0207, 0.0213, 0.0271], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0326, 0.0356, 0.0266, 0.0246, 0.0190, 0.0216, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 03:23:53,912 INFO [train.py:903] (0/4) Epoch 24, batch 6800, loss[loss=0.1805, simple_loss=0.2527, pruned_loss=0.05417, over 19358.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2863, pruned_loss=0.06301, over 3802862.34 frames. ], batch size: 47, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:23:58,806 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:24:03,010 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.890e+02 5.869e+02 7.347e+02 2.478e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 03:24:24,224 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-24.pt +2023-04-03 03:24:39,503 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 03:24:40,522 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 03:24:42,797 INFO [train.py:903] (0/4) Epoch 25, batch 0, loss[loss=0.2091, simple_loss=0.2861, pruned_loss=0.06607, over 19592.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2861, pruned_loss=0.06607, over 19592.00 frames. ], batch size: 50, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:24:42,798 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 03:24:54,386 INFO [train.py:937] (0/4) Epoch 25, validation: loss=0.1672, simple_loss=0.2675, pruned_loss=0.03346, over 944034.00 frames. +2023-04-03 03:24:54,387 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 03:25:06,941 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 03:25:40,910 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1253, 1.1915, 1.4440, 1.3955, 2.7144, 1.1302, 2.1869, 3.0762], + device='cuda:0'), covar=tensor([0.0555, 0.2967, 0.2919, 0.1845, 0.0755, 0.2332, 0.1225, 0.0326], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0368, 0.0391, 0.0347, 0.0374, 0.0352, 0.0386, 0.0405], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:25:57,056 INFO [train.py:903] (0/4) Epoch 25, batch 50, loss[loss=0.2005, simple_loss=0.2768, pruned_loss=0.0621, over 19738.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2838, pruned_loss=0.05948, over 874197.55 frames. ], batch size: 47, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:26:35,506 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.575e+02 5.589e+02 7.102e+02 2.434e+03, threshold=1.118e+03, percent-clipped=5.0 +2023-04-03 03:26:36,710 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 03:27:00,593 INFO [train.py:903] (0/4) Epoch 25, batch 100, loss[loss=0.1933, simple_loss=0.2805, pruned_loss=0.05301, over 19682.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2837, pruned_loss=0.06016, over 1537986.87 frames. ], batch size: 55, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:27:03,083 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163973.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:15,423 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 03:27:33,554 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:36,405 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-164000.pt +2023-04-03 03:28:05,187 INFO [train.py:903] (0/4) Epoch 25, batch 150, loss[loss=0.2203, simple_loss=0.2982, pruned_loss=0.0712, over 19606.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2834, pruned_loss=0.05987, over 2060321.41 frames. ], batch size: 61, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:28:42,973 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 5.520e+02 6.265e+02 7.455e+02 1.438e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-03 03:29:06,837 INFO [train.py:903] (0/4) Epoch 25, batch 200, loss[loss=0.248, simple_loss=0.3127, pruned_loss=0.09165, over 13811.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2853, pruned_loss=0.06127, over 2450766.21 frames. ], batch size: 136, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:29:10,500 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 03:29:53,425 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:30:10,589 INFO [train.py:903] (0/4) Epoch 25, batch 250, loss[loss=0.2188, simple_loss=0.3049, pruned_loss=0.06633, over 19315.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2853, pruned_loss=0.06138, over 2758831.46 frames. ], batch size: 66, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:30:41,660 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 03:30:48,736 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 4.875e+02 6.095e+02 7.386e+02 2.001e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 03:31:07,751 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 03:31:13,886 INFO [train.py:903] (0/4) Epoch 25, batch 300, loss[loss=0.1757, simple_loss=0.2528, pruned_loss=0.04926, over 19388.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2856, pruned_loss=0.06175, over 2992797.58 frames. ], batch size: 48, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:31:16,545 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:32:18,078 INFO [train.py:903] (0/4) Epoch 25, batch 350, loss[loss=0.1785, simple_loss=0.2552, pruned_loss=0.05091, over 19130.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.06135, over 3177646.30 frames. ], batch size: 42, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:32:25,190 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 03:32:55,000 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.912e+02 5.440e+02 6.552e+02 9.332e+02 1.789e+03, threshold=1.310e+03, percent-clipped=12.0 +2023-04-03 03:33:20,720 INFO [train.py:903] (0/4) Epoch 25, batch 400, loss[loss=0.1752, simple_loss=0.2495, pruned_loss=0.05041, over 19757.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.06181, over 3325712.74 frames. ], batch size: 45, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:33:53,581 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3760, 2.2856, 2.2355, 2.5312, 2.2517, 2.0389, 2.1796, 2.3704], + device='cuda:0'), covar=tensor([0.0785, 0.1237, 0.1093, 0.0755, 0.1066, 0.0489, 0.1114, 0.0553], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0359, 0.0314, 0.0256, 0.0306, 0.0254, 0.0316, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:34:24,838 INFO [train.py:903] (0/4) Epoch 25, batch 450, loss[loss=0.1666, simple_loss=0.2428, pruned_loss=0.04515, over 19050.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2862, pruned_loss=0.06226, over 3441559.33 frames. ], batch size: 42, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:34:59,262 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 03:35:00,479 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 03:35:02,820 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 4.778e+02 5.577e+02 7.081e+02 1.680e+03, threshold=1.115e+03, percent-clipped=3.0 +2023-04-03 03:35:27,607 INFO [train.py:903] (0/4) Epoch 25, batch 500, loss[loss=0.2095, simple_loss=0.3002, pruned_loss=0.0594, over 18803.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2861, pruned_loss=0.0618, over 3534067.89 frames. ], batch size: 74, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:35:35,587 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:36:31,238 INFO [train.py:903] (0/4) Epoch 25, batch 550, loss[loss=0.1623, simple_loss=0.2433, pruned_loss=0.0407, over 19773.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2858, pruned_loss=0.06171, over 3602080.81 frames. ], batch size: 45, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:36:49,752 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2584, 2.0097, 1.5605, 1.3232, 1.8273, 1.2131, 1.2496, 1.7531], + device='cuda:0'), covar=tensor([0.1014, 0.0808, 0.1081, 0.0854, 0.0585, 0.1366, 0.0717, 0.0488], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0339, 0.0268, 0.0250, 0.0342, 0.0292, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:37:09,261 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 5.163e+02 6.218e+02 7.641e+02 1.675e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 03:37:09,447 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:37:34,084 INFO [train.py:903] (0/4) Epoch 25, batch 600, loss[loss=0.1622, simple_loss=0.2399, pruned_loss=0.0423, over 19730.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2864, pruned_loss=0.06202, over 3644240.93 frames. ], batch size: 46, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:38:16,672 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 03:38:30,633 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:38:36,206 INFO [train.py:903] (0/4) Epoch 25, batch 650, loss[loss=0.1882, simple_loss=0.2697, pruned_loss=0.05333, over 19772.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2857, pruned_loss=0.06172, over 3700356.39 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:08,495 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6718, 1.5698, 1.5578, 2.2580, 1.6173, 1.8982, 1.9583, 1.7150], + device='cuda:0'), covar=tensor([0.0838, 0.0970, 0.1026, 0.0695, 0.0856, 0.0792, 0.0911, 0.0717], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0221, 0.0226, 0.0237, 0.0225, 0.0211, 0.0188, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:0') +2023-04-03 03:39:15,258 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.538e+02 5.913e+02 7.820e+02 1.600e+03, threshold=1.183e+03, percent-clipped=2.0 +2023-04-03 03:39:19,274 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0457, 1.7161, 2.0082, 1.6603, 4.5773, 1.3412, 2.8179, 5.0383], + device='cuda:0'), covar=tensor([0.0449, 0.2671, 0.2606, 0.2032, 0.0726, 0.2506, 0.1285, 0.0159], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0369, 0.0393, 0.0350, 0.0376, 0.0354, 0.0388, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:39:33,671 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:39:40,283 INFO [train.py:903] (0/4) Epoch 25, batch 700, loss[loss=0.199, simple_loss=0.2723, pruned_loss=0.06287, over 19487.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.06243, over 3729932.62 frames. ], batch size: 49, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:58,596 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:11,617 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:44,576 INFO [train.py:903] (0/4) Epoch 25, batch 750, loss[loss=0.2002, simple_loss=0.2858, pruned_loss=0.05726, over 18742.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06255, over 3748590.94 frames. ], batch size: 74, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:40:48,672 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4237, 1.5151, 1.8923, 1.7459, 2.7255, 2.3196, 2.9292, 1.2683], + device='cuda:0'), covar=tensor([0.2491, 0.4258, 0.2692, 0.1870, 0.1510, 0.2090, 0.1402, 0.4432], + device='cuda:0'), in_proj_covar=tensor([0.0545, 0.0660, 0.0734, 0.0497, 0.0633, 0.0542, 0.0669, 0.0561], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 03:40:57,977 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:41:21,491 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.157e+02 6.753e+02 8.219e+02 1.587e+03, threshold=1.351e+03, percent-clipped=10.0 +2023-04-03 03:41:28,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8648, 1.3043, 1.0480, 0.9535, 1.1504, 0.9692, 0.8793, 1.1970], + device='cuda:0'), covar=tensor([0.0618, 0.0850, 0.1091, 0.0771, 0.0580, 0.1303, 0.0629, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0319, 0.0340, 0.0269, 0.0251, 0.0345, 0.0293, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:41:48,143 INFO [train.py:903] (0/4) Epoch 25, batch 800, loss[loss=0.2109, simple_loss=0.2947, pruned_loss=0.06362, over 18184.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2863, pruned_loss=0.06274, over 3773209.44 frames. ], batch size: 83, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:41:52,830 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164676.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:42:03,217 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 03:42:50,193 INFO [train.py:903] (0/4) Epoch 25, batch 850, loss[loss=0.2078, simple_loss=0.2813, pruned_loss=0.06717, over 19571.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2869, pruned_loss=0.06298, over 3798596.80 frames. ], batch size: 52, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:42:50,358 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:29,203 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 4.928e+02 6.003e+02 7.929e+02 1.446e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 03:43:43,425 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164764.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:44,372 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 03:43:52,632 INFO [train.py:903] (0/4) Epoch 25, batch 900, loss[loss=0.2153, simple_loss=0.298, pruned_loss=0.06629, over 18220.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2859, pruned_loss=0.06261, over 3807219.43 frames. ], batch size: 83, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:56,069 INFO [train.py:903] (0/4) Epoch 25, batch 950, loss[loss=0.2202, simple_loss=0.3037, pruned_loss=0.06836, over 17371.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06255, over 3809807.24 frames. ], batch size: 101, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:57,277 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 03:44:57,675 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:15,717 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:28,776 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:32,815 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.681e+02 5.489e+02 6.922e+02 1.500e+03, threshold=1.098e+03, percent-clipped=4.0 +2023-04-03 03:45:43,195 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 03:46:00,730 INFO [train.py:903] (0/4) Epoch 25, batch 1000, loss[loss=0.183, simple_loss=0.2585, pruned_loss=0.05375, over 19731.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2854, pruned_loss=0.06188, over 3828146.59 frames. ], batch size: 45, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:46:19,869 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:53,157 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:55,239 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 03:47:03,437 INFO [train.py:903] (0/4) Epoch 25, batch 1050, loss[loss=0.2072, simple_loss=0.2887, pruned_loss=0.06283, over 19541.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06158, over 3834585.69 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:47:13,153 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:27,131 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:36,157 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 03:47:41,876 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.678e+02 5.688e+02 6.996e+02 1.189e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 03:47:45,538 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5983, 1.2352, 1.4210, 1.4489, 3.1890, 1.1008, 2.4077, 3.6492], + device='cuda:0'), covar=tensor([0.0493, 0.2871, 0.3071, 0.1948, 0.0739, 0.2615, 0.1335, 0.0228], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0369, 0.0394, 0.0349, 0.0377, 0.0353, 0.0388, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:48:06,293 INFO [train.py:903] (0/4) Epoch 25, batch 1100, loss[loss=0.1714, simple_loss=0.2486, pruned_loss=0.04707, over 19774.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2849, pruned_loss=0.06193, over 3822082.55 frames. ], batch size: 46, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:07,178 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165020.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:09,312 INFO [train.py:903] (0/4) Epoch 25, batch 1150, loss[loss=0.2076, simple_loss=0.2895, pruned_loss=0.06279, over 19689.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06226, over 3828964.59 frames. ], batch size: 59, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:40,387 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:48,275 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.101e+02 6.369e+02 8.453e+02 1.568e+03, threshold=1.274e+03, percent-clipped=10.0 +2023-04-03 03:49:52,159 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:14,743 INFO [train.py:903] (0/4) Epoch 25, batch 1200, loss[loss=0.1887, simple_loss=0.2781, pruned_loss=0.04968, over 19534.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2858, pruned_loss=0.06217, over 3828753.46 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:50:41,038 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:46,453 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 03:51:01,332 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:14,650 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:18,446 INFO [train.py:903] (0/4) Epoch 25, batch 1250, loss[loss=0.2737, simple_loss=0.3474, pruned_loss=0.09999, over 19693.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2859, pruned_loss=0.06244, over 3821646.60 frames. ], batch size: 59, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:51:34,100 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165135.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:41,182 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-03 03:51:41,824 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7959, 4.2865, 4.5045, 4.5170, 1.7972, 4.2713, 3.7131, 4.2247], + device='cuda:0'), covar=tensor([0.1610, 0.0859, 0.0583, 0.0656, 0.5942, 0.0959, 0.0675, 0.1092], + device='cuda:0'), in_proj_covar=tensor([0.0802, 0.0766, 0.0974, 0.0850, 0.0856, 0.0736, 0.0579, 0.0903], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 03:51:41,845 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:49,280 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2565, 1.5131, 2.1003, 1.6601, 3.0667, 4.8701, 4.7290, 5.2567], + device='cuda:0'), covar=tensor([0.1647, 0.3580, 0.2993, 0.2250, 0.0614, 0.0188, 0.0149, 0.0178], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0329, 0.0359, 0.0267, 0.0248, 0.0191, 0.0218, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 03:51:57,604 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 4.844e+02 6.322e+02 8.270e+02 1.695e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-03 03:52:21,099 INFO [train.py:903] (0/4) Epoch 25, batch 1300, loss[loss=0.2779, simple_loss=0.3368, pruned_loss=0.1095, over 13324.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2855, pruned_loss=0.0627, over 3814423.01 frames. ], batch size: 137, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:23,837 INFO [train.py:903] (0/4) Epoch 25, batch 1350, loss[loss=0.2055, simple_loss=0.2991, pruned_loss=0.05593, over 19677.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2858, pruned_loss=0.06263, over 3816171.67 frames. ], batch size: 59, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:26,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:54:04,603 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.919e+02 6.162e+02 7.502e+02 1.875e+03, threshold=1.232e+03, percent-clipped=3.0 +2023-04-03 03:54:29,442 INFO [train.py:903] (0/4) Epoch 25, batch 1400, loss[loss=0.1867, simple_loss=0.2648, pruned_loss=0.05432, over 19386.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2847, pruned_loss=0.06204, over 3813012.03 frames. ], batch size: 48, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:54:51,001 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6687, 1.2202, 1.5031, 1.4330, 3.2510, 1.2730, 2.4185, 3.6705], + device='cuda:0'), covar=tensor([0.0524, 0.2933, 0.2897, 0.1973, 0.0696, 0.2411, 0.1308, 0.0248], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0367, 0.0391, 0.0348, 0.0373, 0.0350, 0.0386, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:54:53,477 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0897, 1.9942, 1.9271, 1.7859, 1.5938, 1.8007, 0.6153, 1.1237], + device='cuda:0'), covar=tensor([0.0595, 0.0645, 0.0437, 0.0702, 0.1092, 0.0808, 0.1382, 0.1064], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0359, 0.0363, 0.0388, 0.0465, 0.0394, 0.0341, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 03:55:05,163 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:18,713 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:19,119 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:55:32,140 INFO [train.py:903] (0/4) Epoch 25, batch 1450, loss[loss=0.287, simple_loss=0.3551, pruned_loss=0.1095, over 19667.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2849, pruned_loss=0.06203, over 3833431.58 frames. ], batch size: 59, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:55:33,177 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 03:55:36,984 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:48,866 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:56:10,666 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.584e+02 6.100e+02 8.063e+02 1.924e+03, threshold=1.220e+03, percent-clipped=4.0 +2023-04-03 03:56:33,837 INFO [train.py:903] (0/4) Epoch 25, batch 1500, loss[loss=0.2214, simple_loss=0.3043, pruned_loss=0.06926, over 19606.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2852, pruned_loss=0.06263, over 3837382.11 frames. ], batch size: 61, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:56:58,480 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165391.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:30,072 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:36,920 INFO [train.py:903] (0/4) Epoch 25, batch 1550, loss[loss=0.1901, simple_loss=0.277, pruned_loss=0.0516, over 19442.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2853, pruned_loss=0.06238, over 3832316.97 frames. ], batch size: 70, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:58:06,753 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5630, 2.1532, 1.6686, 1.5570, 2.0480, 1.4099, 1.4546, 1.9151], + device='cuda:0'), covar=tensor([0.1006, 0.0815, 0.1088, 0.0803, 0.0597, 0.1267, 0.0739, 0.0539], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0268, 0.0249, 0.0343, 0.0293, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 03:58:17,718 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.229e+02 4.957e+02 6.075e+02 6.924e+02 1.069e+03, threshold=1.215e+03, percent-clipped=0.0 +2023-04-03 03:58:37,746 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 03:58:42,876 INFO [train.py:903] (0/4) Epoch 25, batch 1600, loss[loss=0.1825, simple_loss=0.2573, pruned_loss=0.05381, over 19059.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2846, pruned_loss=0.06176, over 3826042.38 frames. ], batch size: 42, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:58:51,581 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:58:59,595 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:06,518 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 03:59:21,416 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:44,271 INFO [train.py:903] (0/4) Epoch 25, batch 1650, loss[loss=0.1879, simple_loss=0.2565, pruned_loss=0.05962, over 19771.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2854, pruned_loss=0.06214, over 3816672.11 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:00:23,731 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.584e+02 6.289e+02 7.621e+02 1.672e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-03 04:00:44,037 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:00:47,362 INFO [train.py:903] (0/4) Epoch 25, batch 1700, loss[loss=0.1865, simple_loss=0.2774, pruned_loss=0.0478, over 19531.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2858, pruned_loss=0.062, over 3823046.82 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:22,679 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:01:29,488 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 04:01:49,445 INFO [train.py:903] (0/4) Epoch 25, batch 1750, loss[loss=0.1746, simple_loss=0.253, pruned_loss=0.04817, over 19839.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06144, over 3831770.57 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:53,781 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.60 vs. limit=5.0 +2023-04-03 04:02:29,221 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.689e+02 5.610e+02 7.383e+02 2.270e+03, threshold=1.122e+03, percent-clipped=4.0 +2023-04-03 04:02:53,399 INFO [train.py:903] (0/4) Epoch 25, batch 1800, loss[loss=0.1871, simple_loss=0.2752, pruned_loss=0.04953, over 19616.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2838, pruned_loss=0.06119, over 3826833.42 frames. ], batch size: 50, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:03:51,790 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 04:03:56,454 INFO [train.py:903] (0/4) Epoch 25, batch 1850, loss[loss=0.181, simple_loss=0.258, pruned_loss=0.052, over 19762.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2842, pruned_loss=0.06152, over 3832791.84 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:04:28,947 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 04:04:37,050 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 4.826e+02 5.776e+02 6.973e+02 2.376e+03, threshold=1.155e+03, percent-clipped=4.0 +2023-04-03 04:04:56,249 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:05:00,660 INFO [train.py:903] (0/4) Epoch 25, batch 1900, loss[loss=0.2002, simple_loss=0.2857, pruned_loss=0.05735, over 19535.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2853, pruned_loss=0.06172, over 3843818.71 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:05:16,888 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 04:05:21,705 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 04:05:35,620 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6330, 2.0326, 1.6126, 1.5924, 1.9613, 1.4656, 1.5157, 1.8677], + device='cuda:0'), covar=tensor([0.0899, 0.0741, 0.0846, 0.0740, 0.0498, 0.1043, 0.0640, 0.0473], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0318, 0.0339, 0.0267, 0.0249, 0.0342, 0.0293, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:05:48,080 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 04:05:48,787 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 04:06:02,683 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165821.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:06:03,530 INFO [train.py:903] (0/4) Epoch 25, batch 1950, loss[loss=0.1931, simple_loss=0.2805, pruned_loss=0.05285, over 19473.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2861, pruned_loss=0.06175, over 3832782.39 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:06:44,206 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.671e+02 6.009e+02 7.545e+02 1.239e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 04:06:48,275 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165856.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:06:52,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 04:07:08,064 INFO [train.py:903] (0/4) Epoch 25, batch 2000, loss[loss=0.1947, simple_loss=0.2814, pruned_loss=0.05395, over 19669.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06122, over 3827769.66 frames. ], batch size: 55, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:07:20,308 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:00,825 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:07,766 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 04:08:12,462 INFO [train.py:903] (0/4) Epoch 25, batch 2050, loss[loss=0.1849, simple_loss=0.2733, pruned_loss=0.04826, over 19484.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2832, pruned_loss=0.06043, over 3839214.32 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:08:27,905 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 04:08:27,942 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 04:08:48,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 04:08:51,223 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 5.221e+02 6.289e+02 7.653e+02 1.251e+03, threshold=1.258e+03, percent-clipped=2.0 +2023-04-03 04:09:15,826 INFO [train.py:903] (0/4) Epoch 25, batch 2100, loss[loss=0.2357, simple_loss=0.3036, pruned_loss=0.08394, over 13287.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06088, over 3827890.95 frames. ], batch size: 136, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:09:44,597 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 04:09:51,704 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-166000.pt +2023-04-03 04:10:08,522 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 04:10:19,025 INFO [train.py:903] (0/4) Epoch 25, batch 2150, loss[loss=0.3077, simple_loss=0.3556, pruned_loss=0.1299, over 13578.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2854, pruned_loss=0.06192, over 3822926.10 frames. ], batch size: 136, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:10:27,567 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:10:58,800 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 5.055e+02 6.614e+02 9.413e+02 1.694e+03, threshold=1.323e+03, percent-clipped=9.0 +2023-04-03 04:11:10,561 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8032, 1.9168, 2.1376, 2.3319, 1.8491, 2.2571, 2.1839, 1.9494], + device='cuda:0'), covar=tensor([0.3990, 0.3675, 0.1917, 0.2335, 0.3788, 0.2035, 0.4727, 0.3312], + device='cuda:0'), in_proj_covar=tensor([0.0917, 0.0991, 0.0729, 0.0937, 0.0895, 0.0829, 0.0854, 0.0793], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 04:11:21,696 INFO [train.py:903] (0/4) Epoch 25, batch 2200, loss[loss=0.2115, simple_loss=0.3015, pruned_loss=0.06071, over 19584.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2854, pruned_loss=0.06208, over 3817293.79 frames. ], batch size: 61, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:11:22,008 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166072.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:11:33,673 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8362, 3.3285, 3.3358, 3.3412, 1.3006, 3.2221, 2.7958, 3.1386], + device='cuda:0'), covar=tensor([0.1770, 0.1006, 0.0808, 0.0963, 0.5655, 0.1134, 0.0841, 0.1256], + device='cuda:0'), in_proj_covar=tensor([0.0796, 0.0758, 0.0968, 0.0843, 0.0840, 0.0730, 0.0573, 0.0892], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 04:11:38,417 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5854, 4.7296, 5.2713, 5.3110, 2.3527, 4.9790, 4.3284, 4.9949], + device='cuda:0'), covar=tensor([0.1662, 0.1504, 0.0580, 0.0630, 0.5508, 0.0863, 0.0612, 0.1056], + device='cuda:0'), in_proj_covar=tensor([0.0796, 0.0758, 0.0968, 0.0843, 0.0841, 0.0730, 0.0573, 0.0892], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 04:12:13,547 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166112.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:12:26,901 INFO [train.py:903] (0/4) Epoch 25, batch 2250, loss[loss=0.2234, simple_loss=0.3088, pruned_loss=0.06904, over 18198.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2847, pruned_loss=0.06159, over 3815916.23 frames. ], batch size: 83, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:13:04,906 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.777e+02 5.716e+02 7.657e+02 1.924e+03, threshold=1.143e+03, percent-clipped=2.0 +2023-04-03 04:13:21,657 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:21,809 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:31,175 INFO [train.py:903] (0/4) Epoch 25, batch 2300, loss[loss=0.2677, simple_loss=0.3312, pruned_loss=0.1021, over 13156.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.286, pruned_loss=0.0623, over 3808727.49 frames. ], batch size: 137, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:13:42,681 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 04:14:33,997 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5617, 2.6006, 2.2087, 2.7133, 2.6130, 2.2488, 2.0614, 2.6641], + device='cuda:0'), covar=tensor([0.0987, 0.1468, 0.1444, 0.0991, 0.1246, 0.0513, 0.1427, 0.0614], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0353, 0.0311, 0.0253, 0.0300, 0.0251, 0.0312, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:14:34,777 INFO [train.py:903] (0/4) Epoch 25, batch 2350, loss[loss=0.1752, simple_loss=0.2656, pruned_loss=0.04238, over 19668.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2856, pruned_loss=0.0622, over 3805042.49 frames. ], batch size: 53, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:14:41,258 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166227.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:09,852 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5522, 1.4472, 1.4301, 1.8548, 1.3747, 1.6369, 1.6636, 1.5905], + device='cuda:0'), covar=tensor([0.0785, 0.0867, 0.0923, 0.0582, 0.0900, 0.0772, 0.0885, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0223, 0.0226, 0.0239, 0.0226, 0.0213, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 04:15:14,901 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 5.104e+02 6.317e+02 8.219e+02 1.547e+03, threshold=1.263e+03, percent-clipped=3.0 +2023-04-03 04:15:17,155 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 04:15:33,842 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0677, 1.9918, 1.7704, 2.1851, 2.0036, 1.8224, 1.7222, 1.9812], + device='cuda:0'), covar=tensor([0.1027, 0.1409, 0.1398, 0.0923, 0.1242, 0.0542, 0.1440, 0.0683], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0354, 0.0311, 0.0253, 0.0300, 0.0252, 0.0312, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:15:34,630 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 04:15:38,221 INFO [train.py:903] (0/4) Epoch 25, batch 2400, loss[loss=0.2092, simple_loss=0.2908, pruned_loss=0.0638, over 19740.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.285, pruned_loss=0.06176, over 3826782.85 frames. ], batch size: 63, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:15:48,887 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166280.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:55,262 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166284.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:08,740 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2519, 1.2048, 1.1924, 1.3667, 1.0223, 1.3981, 1.3476, 1.3043], + device='cuda:0'), covar=tensor([0.0909, 0.1002, 0.1082, 0.0696, 0.0863, 0.0808, 0.0797, 0.0784], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0238, 0.0226, 0.0212, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 04:16:25,813 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:41,763 INFO [train.py:903] (0/4) Epoch 25, batch 2450, loss[loss=0.1746, simple_loss=0.2449, pruned_loss=0.05211, over 19287.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.06184, over 3829941.20 frames. ], batch size: 44, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:16:46,705 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:17:20,232 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.023e+02 4.903e+02 5.916e+02 7.490e+02 1.353e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-04-03 04:17:44,924 INFO [train.py:903] (0/4) Epoch 25, batch 2500, loss[loss=0.1792, simple_loss=0.2519, pruned_loss=0.05331, over 19352.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06205, over 3828268.73 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:18:41,258 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166416.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:18:48,040 INFO [train.py:903] (0/4) Epoch 25, batch 2550, loss[loss=0.2455, simple_loss=0.3114, pruned_loss=0.08983, over 12844.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.287, pruned_loss=0.06321, over 3813350.53 frames. ], batch size: 135, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:18:49,968 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-04-03 04:19:28,549 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 5.112e+02 6.516e+02 8.109e+02 2.174e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-03 04:19:46,463 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 04:19:53,274 INFO [train.py:903] (0/4) Epoch 25, batch 2600, loss[loss=0.1986, simple_loss=0.286, pruned_loss=0.05564, over 19684.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2861, pruned_loss=0.06246, over 3834700.47 frames. ], batch size: 59, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:20:08,593 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166483.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:29,561 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:40,153 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166508.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:41,192 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166509.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:58,304 INFO [train.py:903] (0/4) Epoch 25, batch 2650, loss[loss=0.1744, simple_loss=0.2659, pruned_loss=0.04148, over 19522.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2856, pruned_loss=0.06199, over 3830185.21 frames. ], batch size: 56, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:21:05,449 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:10,230 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166531.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:21:17,603 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166536.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:19,732 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 04:21:32,613 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:38,180 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.612e+02 5.770e+02 6.756e+02 1.610e+03, threshold=1.154e+03, percent-clipped=1.0 +2023-04-03 04:21:48,104 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:57,434 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5832, 1.6707, 1.9134, 1.8615, 2.8104, 2.2818, 2.9451, 1.5950], + device='cuda:0'), covar=tensor([0.2484, 0.4164, 0.2706, 0.1882, 0.1500, 0.2318, 0.1528, 0.4308], + device='cuda:0'), in_proj_covar=tensor([0.0544, 0.0657, 0.0731, 0.0495, 0.0625, 0.0541, 0.0666, 0.0562], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 04:22:03,046 INFO [train.py:903] (0/4) Epoch 25, batch 2700, loss[loss=0.1877, simple_loss=0.2757, pruned_loss=0.04986, over 19610.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2851, pruned_loss=0.06146, over 3830921.31 frames. ], batch size: 50, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:22:10,233 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7769, 1.6304, 1.5604, 1.7861, 1.5709, 1.5195, 1.5005, 1.6969], + device='cuda:0'), covar=tensor([0.0878, 0.1254, 0.1205, 0.0883, 0.1141, 0.0543, 0.1338, 0.0676], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0355, 0.0311, 0.0253, 0.0302, 0.0253, 0.0314, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:22:24,473 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-04-03 04:22:42,846 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1081, 2.8471, 2.2492, 2.2658, 1.9836, 2.4749, 0.9119, 2.0300], + device='cuda:0'), covar=tensor([0.0706, 0.0691, 0.0722, 0.1219, 0.1317, 0.1195, 0.1615, 0.1223], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0360, 0.0363, 0.0388, 0.0467, 0.0393, 0.0341, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 04:22:47,693 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 04:23:06,176 INFO [train.py:903] (0/4) Epoch 25, batch 2750, loss[loss=0.2316, simple_loss=0.3161, pruned_loss=0.07352, over 17581.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2865, pruned_loss=0.06221, over 3819516.00 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:23:08,967 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:23:45,192 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.014e+02 4.918e+02 6.109e+02 7.546e+02 1.552e+03, threshold=1.222e+03, percent-clipped=5.0 +2023-04-03 04:23:46,641 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.8564, 5.3233, 3.2062, 4.5945, 1.3037, 5.4749, 5.2796, 5.4584], + device='cuda:0'), covar=tensor([0.0458, 0.0882, 0.1749, 0.0754, 0.3667, 0.0528, 0.0772, 0.1045], + device='cuda:0'), in_proj_covar=tensor([0.0520, 0.0421, 0.0507, 0.0355, 0.0404, 0.0447, 0.0440, 0.0472], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:24:04,554 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:24:05,267 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-03 04:24:07,634 INFO [train.py:903] (0/4) Epoch 25, batch 2800, loss[loss=0.2097, simple_loss=0.2882, pruned_loss=0.06556, over 19517.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2863, pruned_loss=0.06198, over 3828192.25 frames. ], batch size: 56, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:25:04,291 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.1835, 5.2178, 5.9964, 6.0240, 2.0162, 5.6788, 4.6902, 5.6328], + device='cuda:0'), covar=tensor([0.1667, 0.0759, 0.0594, 0.0590, 0.6178, 0.0781, 0.0616, 0.1161], + device='cuda:0'), in_proj_covar=tensor([0.0801, 0.0764, 0.0973, 0.0852, 0.0850, 0.0738, 0.0578, 0.0902], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 04:25:11,690 INFO [train.py:903] (0/4) Epoch 25, batch 2850, loss[loss=0.2223, simple_loss=0.3041, pruned_loss=0.07027, over 18288.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.286, pruned_loss=0.06206, over 3820894.36 frames. ], batch size: 83, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:25:50,390 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.563e+02 5.950e+02 8.060e+02 1.987e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 04:25:50,650 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:11,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 04:26:14,215 INFO [train.py:903] (0/4) Epoch 25, batch 2900, loss[loss=0.2142, simple_loss=0.289, pruned_loss=0.06977, over 18417.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.287, pruned_loss=0.06271, over 3826306.52 frames. ], batch size: 84, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:26:25,423 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 04:26:27,330 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166782.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:29,658 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:33,239 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166787.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:26:43,675 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5363, 1.6794, 1.9917, 1.8572, 3.0494, 2.7220, 3.3683, 1.6048], + device='cuda:0'), covar=tensor([0.2698, 0.4438, 0.2945, 0.1963, 0.1713, 0.2158, 0.1777, 0.4432], + device='cuda:0'), in_proj_covar=tensor([0.0547, 0.0660, 0.0735, 0.0497, 0.0629, 0.0542, 0.0669, 0.0564], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 04:27:05,052 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166812.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:27:07,294 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2761, 2.1913, 2.1275, 1.9416, 1.7848, 1.9716, 0.7568, 1.3292], + device='cuda:0'), covar=tensor([0.0630, 0.0641, 0.0434, 0.0719, 0.1130, 0.0890, 0.1361, 0.1031], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0360, 0.0363, 0.0386, 0.0465, 0.0392, 0.0341, 0.0345], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 04:27:17,225 INFO [train.py:903] (0/4) Epoch 25, batch 2950, loss[loss=0.1921, simple_loss=0.2798, pruned_loss=0.05222, over 19689.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2869, pruned_loss=0.06296, over 3807849.07 frames. ], batch size: 59, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:27:44,142 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:27:56,482 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.674e+02 5.957e+02 7.713e+02 2.101e+03, threshold=1.191e+03, percent-clipped=6.0 +2023-04-03 04:28:12,816 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 04:28:18,965 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:28:21,108 INFO [train.py:903] (0/4) Epoch 25, batch 3000, loss[loss=0.2213, simple_loss=0.3029, pruned_loss=0.0699, over 17476.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2859, pruned_loss=0.06232, over 3808957.27 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:28:21,108 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 04:28:33,781 INFO [train.py:937] (0/4) Epoch 25, validation: loss=0.1677, simple_loss=0.2674, pruned_loss=0.034, over 944034.00 frames. +2023-04-03 04:28:33,784 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 04:28:35,117 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 04:28:44,970 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:01,375 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166893.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:17,266 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166905.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:38,482 INFO [train.py:903] (0/4) Epoch 25, batch 3050, loss[loss=0.2168, simple_loss=0.2961, pruned_loss=0.06871, over 19586.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2869, pruned_loss=0.0626, over 3821681.12 frames. ], batch size: 61, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:09,924 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:17,437 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 4.942e+02 6.233e+02 8.295e+02 1.859e+03, threshold=1.247e+03, percent-clipped=9.0 +2023-04-03 04:30:23,328 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:41,892 INFO [train.py:903] (0/4) Epoch 25, batch 3100, loss[loss=0.2235, simple_loss=0.2988, pruned_loss=0.07413, over 19787.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2873, pruned_loss=0.06276, over 3838720.53 frames. ], batch size: 56, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:58,956 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:27,826 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:44,389 INFO [train.py:903] (0/4) Epoch 25, batch 3150, loss[loss=0.2096, simple_loss=0.2962, pruned_loss=0.0615, over 19708.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2874, pruned_loss=0.0633, over 3834349.71 frames. ], batch size: 59, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:32:07,978 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167040.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:08,795 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 04:32:24,694 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.039e+02 6.011e+02 8.459e+02 2.094e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-03 04:32:39,350 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167065.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:48,322 INFO [train.py:903] (0/4) Epoch 25, batch 3200, loss[loss=0.1941, simple_loss=0.2806, pruned_loss=0.05383, over 19673.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2867, pruned_loss=0.06249, over 3832412.89 frames. ], batch size: 60, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:33:20,600 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:47,237 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:52,597 INFO [train.py:903] (0/4) Epoch 25, batch 3250, loss[loss=0.1952, simple_loss=0.2813, pruned_loss=0.05449, over 19539.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2857, pruned_loss=0.06207, over 3825783.99 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:33:52,893 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:57,483 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167126.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:34:33,306 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 4.897e+02 6.248e+02 7.764e+02 1.427e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 04:34:56,638 INFO [train.py:903] (0/4) Epoch 25, batch 3300, loss[loss=0.1869, simple_loss=0.2769, pruned_loss=0.04845, over 19653.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.0618, over 3822749.48 frames. ], batch size: 55, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:34:56,680 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 04:35:01,576 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4677, 1.3334, 1.5342, 1.5595, 3.0599, 1.2793, 2.4757, 3.4570], + device='cuda:0'), covar=tensor([0.0546, 0.2765, 0.2745, 0.1770, 0.0700, 0.2293, 0.1158, 0.0268], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0372, 0.0393, 0.0349, 0.0376, 0.0353, 0.0390, 0.0410], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:35:01,647 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3360, 2.0273, 1.6400, 1.3438, 1.8484, 1.3170, 1.3116, 1.8398], + device='cuda:0'), covar=tensor([0.0930, 0.0772, 0.1027, 0.0834, 0.0564, 0.1287, 0.0631, 0.0437], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0319, 0.0340, 0.0267, 0.0249, 0.0344, 0.0292, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:35:30,194 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9309, 1.2472, 1.6484, 0.6008, 2.1089, 2.4758, 2.1577, 2.5970], + device='cuda:0'), covar=tensor([0.1588, 0.3639, 0.3261, 0.2660, 0.0594, 0.0290, 0.0355, 0.0393], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0329, 0.0360, 0.0268, 0.0250, 0.0192, 0.0219, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 04:35:47,894 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:50,094 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:59,034 INFO [train.py:903] (0/4) Epoch 25, batch 3350, loss[loss=0.2099, simple_loss=0.2867, pruned_loss=0.06659, over 19848.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.285, pruned_loss=0.06198, over 3832988.57 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:36:21,814 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:24,229 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167241.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:25,466 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167242.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:41,239 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.821e+02 5.770e+02 7.565e+02 1.496e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-04-03 04:36:53,507 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167264.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:56,875 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167267.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:02,319 INFO [train.py:903] (0/4) Epoch 25, batch 3400, loss[loss=0.2282, simple_loss=0.3105, pruned_loss=0.07291, over 19526.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2854, pruned_loss=0.06204, over 3831784.93 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:37:14,438 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9443, 1.2566, 1.6059, 0.6060, 2.1181, 2.4872, 2.1748, 2.6233], + device='cuda:0'), covar=tensor([0.1630, 0.3778, 0.3367, 0.2825, 0.0606, 0.0277, 0.0354, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0328, 0.0359, 0.0268, 0.0250, 0.0192, 0.0219, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 04:37:26,094 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:28,365 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:38:08,142 INFO [train.py:903] (0/4) Epoch 25, batch 3450, loss[loss=0.2002, simple_loss=0.2779, pruned_loss=0.0612, over 19475.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2857, pruned_loss=0.06199, over 3821273.79 frames. ], batch size: 49, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:38:10,563 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 04:38:32,278 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5633, 1.3276, 1.4288, 2.2105, 1.6255, 1.8830, 1.8453, 1.6603], + device='cuda:0'), covar=tensor([0.0953, 0.1183, 0.1149, 0.0789, 0.0935, 0.0925, 0.0997, 0.0846], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0224, 0.0227, 0.0240, 0.0227, 0.0215, 0.0189, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 04:38:49,780 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.608e+02 5.631e+02 6.989e+02 1.333e+03, threshold=1.126e+03, percent-clipped=1.0 +2023-04-03 04:39:12,369 INFO [train.py:903] (0/4) Epoch 25, batch 3500, loss[loss=0.2338, simple_loss=0.3062, pruned_loss=0.0807, over 19651.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2861, pruned_loss=0.06226, over 3809349.44 frames. ], batch size: 55, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:39:53,211 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-04-03 04:39:55,192 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:07,539 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1162, 2.0965, 2.3189, 2.2238, 2.9946, 2.6568, 3.0779, 2.0044], + device='cuda:0'), covar=tensor([0.1819, 0.3129, 0.2066, 0.1537, 0.1149, 0.1708, 0.1151, 0.3455], + device='cuda:0'), in_proj_covar=tensor([0.0544, 0.0656, 0.0730, 0.0493, 0.0623, 0.0537, 0.0662, 0.0560], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 04:40:15,422 INFO [train.py:903] (0/4) Epoch 25, batch 3550, loss[loss=0.1825, simple_loss=0.2594, pruned_loss=0.05279, over 19366.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2855, pruned_loss=0.06221, over 3792712.70 frames. ], batch size: 48, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:40:38,488 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3137, 3.8513, 3.9536, 3.9646, 1.6498, 3.7839, 3.3257, 3.7175], + device='cuda:0'), covar=tensor([0.1779, 0.0931, 0.0712, 0.0815, 0.5643, 0.0928, 0.0727, 0.1200], + device='cuda:0'), in_proj_covar=tensor([0.0801, 0.0766, 0.0968, 0.0851, 0.0846, 0.0737, 0.0579, 0.0900], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 04:40:51,379 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167450.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:55,633 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.129e+02 6.252e+02 7.981e+02 1.792e+03, threshold=1.250e+03, percent-clipped=6.0 +2023-04-03 04:41:06,569 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:11,115 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167466.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:13,878 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:17,992 INFO [train.py:903] (0/4) Epoch 25, batch 3600, loss[loss=0.2696, simple_loss=0.3571, pruned_loss=0.091, over 19521.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06132, over 3801119.84 frames. ], batch size: 64, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:41:44,827 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:50,856 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167497.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:16,930 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-03 04:42:20,841 INFO [train.py:903] (0/4) Epoch 25, batch 3650, loss[loss=0.1644, simple_loss=0.2419, pruned_loss=0.04352, over 18721.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.06158, over 3808901.15 frames. ], batch size: 41, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:42:21,285 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:39,952 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.33 vs. limit=5.0 +2023-04-03 04:43:00,370 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 5.264e+02 6.466e+02 8.043e+02 1.528e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-03 04:43:24,179 INFO [train.py:903] (0/4) Epoch 25, batch 3700, loss[loss=0.2475, simple_loss=0.3252, pruned_loss=0.08486, over 19739.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2849, pruned_loss=0.06167, over 3817879.77 frames. ], batch size: 63, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:43:31,615 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:43:36,268 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:44:28,023 INFO [train.py:903] (0/4) Epoch 25, batch 3750, loss[loss=0.189, simple_loss=0.2741, pruned_loss=0.05198, over 19587.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.06124, over 3834149.53 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:45:08,828 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.966e+02 5.285e+02 6.593e+02 8.150e+02 1.742e+03, threshold=1.319e+03, percent-clipped=4.0 +2023-04-03 04:45:19,857 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:45:31,292 INFO [train.py:903] (0/4) Epoch 25, batch 3800, loss[loss=0.225, simple_loss=0.3054, pruned_loss=0.07226, over 19673.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06129, over 3825298.65 frames. ], batch size: 60, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:45:50,865 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:46:01,954 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 04:46:19,189 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-03 04:46:35,343 INFO [train.py:903] (0/4) Epoch 25, batch 3850, loss[loss=0.2777, simple_loss=0.3477, pruned_loss=0.1038, over 19349.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06126, over 3821639.22 frames. ], batch size: 70, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:46:53,991 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167737.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:47:16,485 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.991e+02 5.959e+02 7.597e+02 1.650e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 04:47:24,053 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.45 vs. limit=5.0 +2023-04-03 04:47:39,476 INFO [train.py:903] (0/4) Epoch 25, batch 3900, loss[loss=0.2054, simple_loss=0.2949, pruned_loss=0.05796, over 19787.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2856, pruned_loss=0.06188, over 3812057.40 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:47:55,769 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167785.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:47:59,106 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8326, 1.4177, 1.7165, 1.4578, 3.4052, 1.1193, 2.6531, 3.8471], + device='cuda:0'), covar=tensor([0.0485, 0.2972, 0.2764, 0.2029, 0.0685, 0.2597, 0.1120, 0.0229], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0374, 0.0396, 0.0350, 0.0377, 0.0353, 0.0390, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 04:48:06,191 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167794.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:43,212 INFO [train.py:903] (0/4) Epoch 25, batch 3950, loss[loss=0.2081, simple_loss=0.2907, pruned_loss=0.0627, over 19473.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2856, pruned_loss=0.06189, over 3817069.68 frames. ], batch size: 64, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:48:45,756 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 04:48:56,960 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:01,666 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:25,131 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 4.848e+02 5.890e+02 7.638e+02 1.655e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 04:49:29,083 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:34,592 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:46,053 INFO [train.py:903] (0/4) Epoch 25, batch 4000, loss[loss=0.2014, simple_loss=0.2894, pruned_loss=0.05669, over 18233.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2854, pruned_loss=0.06192, over 3797650.99 frames. ], batch size: 83, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:50:32,144 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 04:50:33,730 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:50:50,808 INFO [train.py:903] (0/4) Epoch 25, batch 4050, loss[loss=0.1833, simple_loss=0.2539, pruned_loss=0.05637, over 19105.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.06216, over 3794303.35 frames. ], batch size: 42, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:50:59,278 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6173, 1.7298, 2.0392, 1.7636, 2.6308, 3.0170, 2.8786, 3.1842], + device='cuda:0'), covar=tensor([0.1425, 0.3125, 0.2806, 0.2452, 0.1181, 0.0316, 0.0253, 0.0399], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0328, 0.0359, 0.0269, 0.0250, 0.0192, 0.0218, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 04:51:32,272 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.057e+02 6.529e+02 8.113e+02 1.821e+03, threshold=1.306e+03, percent-clipped=7.0 +2023-04-03 04:51:52,934 INFO [train.py:903] (0/4) Epoch 25, batch 4100, loss[loss=0.1961, simple_loss=0.2739, pruned_loss=0.05913, over 19753.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2861, pruned_loss=0.06282, over 3784756.80 frames. ], batch size: 47, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:52:27,684 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-168000.pt +2023-04-03 04:52:29,047 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 04:52:56,839 INFO [train.py:903] (0/4) Epoch 25, batch 4150, loss[loss=0.2225, simple_loss=0.3037, pruned_loss=0.07064, over 19544.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06188, over 3801087.45 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:53:03,561 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8850, 4.3319, 4.6455, 4.6310, 1.8479, 4.3579, 3.7321, 4.3286], + device='cuda:0'), covar=tensor([0.1789, 0.1019, 0.0632, 0.0792, 0.6378, 0.1071, 0.0778, 0.1251], + device='cuda:0'), in_proj_covar=tensor([0.0810, 0.0778, 0.0982, 0.0862, 0.0861, 0.0747, 0.0587, 0.0914], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 04:53:36,015 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168053.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:53:39,909 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 5.393e+02 6.582e+02 8.080e+02 1.683e+03, threshold=1.316e+03, percent-clipped=2.0 +2023-04-03 04:53:59,619 INFO [train.py:903] (0/4) Epoch 25, batch 4200, loss[loss=0.2399, simple_loss=0.326, pruned_loss=0.07684, over 19780.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2857, pruned_loss=0.06195, over 3810461.06 frames. ], batch size: 54, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:54:01,993 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 04:54:10,944 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168081.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:55:03,249 INFO [train.py:903] (0/4) Epoch 25, batch 4250, loss[loss=0.1928, simple_loss=0.2819, pruned_loss=0.05178, over 19474.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2858, pruned_loss=0.06182, over 3820754.59 frames. ], batch size: 64, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:55:13,202 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:55:17,973 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 04:55:29,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 04:55:46,758 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.961e+02 6.439e+02 7.740e+02 2.119e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-03 04:55:59,617 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:07,296 INFO [train.py:903] (0/4) Epoch 25, batch 4300, loss[loss=0.2024, simple_loss=0.277, pruned_loss=0.06394, over 19764.00 frames. ], tot_loss[loss=0.205, simple_loss=0.286, pruned_loss=0.06197, over 3820688.95 frames. ], batch size: 48, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:56:30,423 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:37,434 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168196.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:00,345 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 04:57:10,762 INFO [train.py:903] (0/4) Epoch 25, batch 4350, loss[loss=0.2097, simple_loss=0.2725, pruned_loss=0.07351, over 19754.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.285, pruned_loss=0.06187, over 3820425.42 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:57:38,635 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:47,841 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168251.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:53,358 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.774e+02 5.735e+02 7.211e+02 1.236e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 04:58:13,423 INFO [train.py:903] (0/4) Epoch 25, batch 4400, loss[loss=0.1953, simple_loss=0.2785, pruned_loss=0.05604, over 19615.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2856, pruned_loss=0.0617, over 3824276.41 frames. ], batch size: 50, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:58:20,092 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.46 vs. limit=5.0 +2023-04-03 04:58:40,147 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 04:58:50,386 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 04:59:16,257 INFO [train.py:903] (0/4) Epoch 25, batch 4450, loss[loss=0.229, simple_loss=0.3081, pruned_loss=0.07492, over 19679.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2855, pruned_loss=0.06142, over 3826800.33 frames. ], batch size: 59, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:59:59,623 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.609e+02 5.804e+02 7.720e+02 1.927e+03, threshold=1.161e+03, percent-clipped=7.0 +2023-04-03 05:00:20,131 INFO [train.py:903] (0/4) Epoch 25, batch 4500, loss[loss=0.1998, simple_loss=0.2763, pruned_loss=0.06167, over 19607.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.285, pruned_loss=0.06145, over 3829965.45 frames. ], batch size: 50, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:00:23,958 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7363, 2.6100, 2.2682, 2.1290, 1.9571, 2.3772, 1.1468, 1.9575], + device='cuda:0'), covar=tensor([0.0714, 0.0656, 0.0563, 0.1001, 0.1029, 0.1063, 0.1322, 0.1028], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0363, 0.0362, 0.0389, 0.0466, 0.0396, 0.0344, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 05:00:52,070 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:01:23,385 INFO [train.py:903] (0/4) Epoch 25, batch 4550, loss[loss=0.1735, simple_loss=0.2502, pruned_loss=0.04833, over 19737.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06148, over 3828630.77 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:01:34,636 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 05:01:59,987 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 05:02:02,774 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:08,833 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 4.803e+02 5.692e+02 6.651e+02 1.392e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 05:02:27,732 INFO [train.py:903] (0/4) Epoch 25, batch 4600, loss[loss=0.1855, simple_loss=0.266, pruned_loss=0.05252, over 19681.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2852, pruned_loss=0.06155, over 3809908.03 frames. ], batch size: 53, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:02:35,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168477.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:04,758 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168500.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:20,621 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:31,424 INFO [train.py:903] (0/4) Epoch 25, batch 4650, loss[loss=0.1728, simple_loss=0.2473, pruned_loss=0.04913, over 19758.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2848, pruned_loss=0.06142, over 3792449.69 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:03:35,315 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:50,743 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 05:04:02,095 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 05:04:16,048 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.060e+02 6.015e+02 7.632e+02 1.453e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 05:04:34,774 INFO [train.py:903] (0/4) Epoch 25, batch 4700, loss[loss=0.1524, simple_loss=0.2345, pruned_loss=0.03517, over 19749.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2855, pruned_loss=0.06239, over 3774835.78 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:04:57,660 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 05:05:04,401 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:05:15,549 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 05:05:38,974 INFO [train.py:903] (0/4) Epoch 25, batch 4750, loss[loss=0.2411, simple_loss=0.3174, pruned_loss=0.08236, over 19328.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2857, pruned_loss=0.06266, over 3791880.74 frames. ], batch size: 66, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:06:16,689 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.26 vs. limit=5.0 +2023-04-03 05:06:22,552 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.029e+02 6.051e+02 7.124e+02 1.309e+03, threshold=1.210e+03, percent-clipped=1.0 +2023-04-03 05:06:40,987 INFO [train.py:903] (0/4) Epoch 25, batch 4800, loss[loss=0.1979, simple_loss=0.269, pruned_loss=0.06337, over 19751.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2868, pruned_loss=0.06302, over 3797180.43 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:07:29,213 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168710.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:07:43,819 INFO [train.py:903] (0/4) Epoch 25, batch 4850, loss[loss=0.1544, simple_loss=0.2324, pruned_loss=0.03822, over 19099.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2863, pruned_loss=0.06269, over 3802464.30 frames. ], batch size: 42, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:08:09,441 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 05:08:29,299 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.536e+02 6.758e+02 9.275e+02 1.787e+03, threshold=1.352e+03, percent-clipped=12.0 +2023-04-03 05:08:30,524 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 05:08:36,430 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 05:08:36,465 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 05:08:43,844 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:08:47,084 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 05:08:48,226 INFO [train.py:903] (0/4) Epoch 25, batch 4900, loss[loss=0.207, simple_loss=0.2907, pruned_loss=0.06164, over 19759.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.286, pruned_loss=0.06229, over 3822335.12 frames. ], batch size: 54, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:08:49,804 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3968, 1.4082, 1.6645, 1.6145, 2.1014, 2.0200, 2.2491, 0.8188], + device='cuda:0'), covar=tensor([0.2554, 0.4430, 0.2755, 0.2008, 0.1736, 0.2309, 0.1566, 0.5049], + device='cuda:0'), in_proj_covar=tensor([0.0548, 0.0666, 0.0738, 0.0500, 0.0630, 0.0543, 0.0670, 0.0567], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 05:09:06,568 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 05:09:15,992 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:09:39,555 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0934, 1.1799, 1.6028, 1.2860, 2.6282, 3.5623, 3.2635, 3.8309], + device='cuda:0'), covar=tensor([0.1885, 0.4247, 0.3780, 0.2675, 0.0656, 0.0204, 0.0270, 0.0296], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0327, 0.0357, 0.0268, 0.0248, 0.0191, 0.0218, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 05:09:52,877 INFO [train.py:903] (0/4) Epoch 25, batch 4950, loss[loss=0.2453, simple_loss=0.3103, pruned_loss=0.09011, over 13308.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06253, over 3798727.54 frames. ], batch size: 135, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:10:04,477 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 05:10:30,146 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 05:10:36,919 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.693e+02 5.645e+02 7.417e+02 1.662e+03, threshold=1.129e+03, percent-clipped=1.0 +2023-04-03 05:10:55,836 INFO [train.py:903] (0/4) Epoch 25, batch 5000, loss[loss=0.184, simple_loss=0.2757, pruned_loss=0.04617, over 19648.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2851, pruned_loss=0.06149, over 3816526.30 frames. ], batch size: 55, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:11:02,557 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 05:11:13,727 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 05:11:58,385 INFO [train.py:903] (0/4) Epoch 25, batch 5050, loss[loss=0.2233, simple_loss=0.2938, pruned_loss=0.0764, over 19779.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.0611, over 3827739.24 frames. ], batch size: 56, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:12:33,964 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 05:12:41,881 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.713e+02 5.534e+02 7.099e+02 1.364e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-04-03 05:12:55,393 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168966.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:02,136 INFO [train.py:903] (0/4) Epoch 25, batch 5100, loss[loss=0.1682, simple_loss=0.2463, pruned_loss=0.04506, over 19399.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06098, over 3817095.40 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:13:11,271 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 05:13:14,765 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 05:13:19,272 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168985.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:20,158 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 05:13:21,484 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168987.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 05:13:23,824 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6005, 4.0933, 4.2627, 4.2759, 1.6519, 4.0727, 3.4911, 3.9742], + device='cuda:0'), covar=tensor([0.1620, 0.0886, 0.0619, 0.0691, 0.5928, 0.0880, 0.0715, 0.1188], + device='cuda:0'), in_proj_covar=tensor([0.0805, 0.0775, 0.0978, 0.0861, 0.0852, 0.0741, 0.0583, 0.0912], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 05:13:26,009 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:45,573 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-03 05:14:05,076 INFO [train.py:903] (0/4) Epoch 25, batch 5150, loss[loss=0.1953, simple_loss=0.2777, pruned_loss=0.05642, over 19661.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2852, pruned_loss=0.06142, over 3818229.73 frames. ], batch size: 55, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:14:09,680 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0536, 3.7180, 2.5985, 3.3491, 0.9510, 3.6655, 3.5295, 3.6124], + device='cuda:0'), covar=tensor([0.0814, 0.1137, 0.1861, 0.0877, 0.3670, 0.0792, 0.1041, 0.1173], + device='cuda:0'), in_proj_covar=tensor([0.0525, 0.0427, 0.0512, 0.0358, 0.0412, 0.0454, 0.0447, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:14:12,536 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 05:14:16,410 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 05:14:48,109 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.295e+02 6.704e+02 8.101e+02 2.101e+03, threshold=1.341e+03, percent-clipped=6.0 +2023-04-03 05:14:52,555 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:15:08,252 INFO [train.py:903] (0/4) Epoch 25, batch 5200, loss[loss=0.1632, simple_loss=0.2481, pruned_loss=0.03916, over 19722.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2857, pruned_loss=0.06156, over 3830534.69 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:15:23,489 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 05:15:29,718 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6926, 1.8047, 2.0547, 2.0813, 1.6027, 2.0255, 2.0546, 1.8690], + device='cuda:0'), covar=tensor([0.4216, 0.3690, 0.1995, 0.2393, 0.4026, 0.2264, 0.5152, 0.3469], + device='cuda:0'), in_proj_covar=tensor([0.0923, 0.0996, 0.0733, 0.0945, 0.0900, 0.0835, 0.0853, 0.0799], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 05:15:44,161 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169100.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:08,701 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 05:16:13,025 INFO [train.py:903] (0/4) Epoch 25, batch 5250, loss[loss=0.2188, simple_loss=0.298, pruned_loss=0.06981, over 13042.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2866, pruned_loss=0.06193, over 3825026.48 frames. ], batch size: 136, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:16:26,986 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0962, 1.9862, 1.8098, 1.7027, 1.5182, 1.6446, 0.4564, 1.0829], + device='cuda:0'), covar=tensor([0.0653, 0.0693, 0.0514, 0.0792, 0.1183, 0.1069, 0.1416, 0.1118], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0363, 0.0364, 0.0391, 0.0467, 0.0399, 0.0344, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 05:16:27,869 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:56,983 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.211e+02 5.791e+02 7.204e+02 1.532e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 05:17:05,412 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 05:17:16,182 INFO [train.py:903] (0/4) Epoch 25, batch 5300, loss[loss=0.2062, simple_loss=0.292, pruned_loss=0.0602, over 19473.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2867, pruned_loss=0.06203, over 3808996.81 frames. ], batch size: 64, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:17:22,363 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:17:24,887 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8003, 1.4424, 1.6444, 1.5714, 3.4019, 1.2580, 2.5098, 3.8221], + device='cuda:0'), covar=tensor([0.0561, 0.2965, 0.2922, 0.1911, 0.0718, 0.2451, 0.1291, 0.0254], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0372, 0.0394, 0.0351, 0.0377, 0.0353, 0.0391, 0.0411], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:17:34,475 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 05:17:49,978 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9976, 1.9148, 1.7796, 1.6096, 1.5367, 1.6048, 0.4888, 0.8536], + device='cuda:0'), covar=tensor([0.0646, 0.0643, 0.0487, 0.0691, 0.1208, 0.0850, 0.1303, 0.1099], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0363, 0.0366, 0.0392, 0.0468, 0.0400, 0.0345, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 05:18:18,923 INFO [train.py:903] (0/4) Epoch 25, batch 5350, loss[loss=0.2224, simple_loss=0.2925, pruned_loss=0.07612, over 19490.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2855, pruned_loss=0.06178, over 3809255.85 frames. ], batch size: 49, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:18:21,487 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6091, 1.5198, 1.5724, 1.8514, 1.3943, 1.7646, 1.7745, 1.6506], + device='cuda:0'), covar=tensor([0.0836, 0.0930, 0.0957, 0.0661, 0.0859, 0.0771, 0.0857, 0.0691], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0224, 0.0226, 0.0239, 0.0227, 0.0214, 0.0189, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 05:18:56,390 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 05:19:04,345 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 4.992e+02 6.506e+02 8.048e+02 1.510e+03, threshold=1.301e+03, percent-clipped=6.0 +2023-04-03 05:19:24,344 INFO [train.py:903] (0/4) Epoch 25, batch 5400, loss[loss=0.1852, simple_loss=0.2607, pruned_loss=0.05485, over 19356.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2848, pruned_loss=0.06137, over 3817612.03 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:20:27,248 INFO [train.py:903] (0/4) Epoch 25, batch 5450, loss[loss=0.2025, simple_loss=0.2884, pruned_loss=0.05827, over 19566.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2842, pruned_loss=0.06104, over 3817671.23 frames. ], batch size: 52, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:20:36,563 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:20:39,940 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169331.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 05:21:11,192 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.08 vs. limit=5.0 +2023-04-03 05:21:11,465 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.467e+02 5.289e+02 7.172e+02 1.661e+03, threshold=1.058e+03, percent-clipped=2.0 +2023-04-03 05:21:29,438 INFO [train.py:903] (0/4) Epoch 25, batch 5500, loss[loss=0.1845, simple_loss=0.2718, pruned_loss=0.04857, over 19534.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2845, pruned_loss=0.0614, over 3806291.07 frames. ], batch size: 54, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:21:57,709 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 05:22:03,495 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 05:22:33,002 INFO [train.py:903] (0/4) Epoch 25, batch 5550, loss[loss=0.1761, simple_loss=0.254, pruned_loss=0.04909, over 19779.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06176, over 3815183.48 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:22:43,779 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 05:23:00,564 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:00,789 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:04,030 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169446.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:23:17,173 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 5.192e+02 6.082e+02 7.576e+02 1.216e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-03 05:23:33,243 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 05:23:36,942 INFO [train.py:903] (0/4) Epoch 25, batch 5600, loss[loss=0.1986, simple_loss=0.2883, pruned_loss=0.05447, over 17398.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2837, pruned_loss=0.06137, over 3808842.32 frames. ], batch size: 101, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:23:44,276 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:08,196 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.00 vs. limit=5.0 +2023-04-03 05:24:38,800 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:39,764 INFO [train.py:903] (0/4) Epoch 25, batch 5650, loss[loss=0.1731, simple_loss=0.2508, pruned_loss=0.04766, over 19360.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2832, pruned_loss=0.06118, over 3819352.81 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:25:24,920 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.961e+02 6.158e+02 8.288e+02 1.627e+03, threshold=1.232e+03, percent-clipped=5.0 +2023-04-03 05:25:27,520 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169559.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:25:30,437 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 05:25:43,204 INFO [train.py:903] (0/4) Epoch 25, batch 5700, loss[loss=0.2136, simple_loss=0.2961, pruned_loss=0.06556, over 19711.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2834, pruned_loss=0.06108, over 3818525.01 frames. ], batch size: 63, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:11,331 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:26:47,655 INFO [train.py:903] (0/4) Epoch 25, batch 5750, loss[loss=0.1833, simple_loss=0.2738, pruned_loss=0.04643, over 19655.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2829, pruned_loss=0.06074, over 3821555.73 frames. ], batch size: 53, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:48,813 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 05:26:59,216 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 05:27:04,018 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 05:27:06,664 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:10,213 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:32,989 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.765e+02 5.968e+02 8.362e+02 1.575e+03, threshold=1.194e+03, percent-clipped=5.0 +2023-04-03 05:27:52,469 INFO [train.py:903] (0/4) Epoch 25, batch 5800, loss[loss=0.2306, simple_loss=0.3144, pruned_loss=0.07337, over 18133.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2835, pruned_loss=0.06102, over 3816346.54 frames. ], batch size: 83, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:28:27,979 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169700.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:28:30,416 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169702.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:28:43,033 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.1636, 5.1938, 6.0417, 6.0433, 2.0622, 5.7047, 4.8097, 5.6619], + device='cuda:0'), covar=tensor([0.1734, 0.0725, 0.0502, 0.0585, 0.6248, 0.0814, 0.0611, 0.1087], + device='cuda:0'), in_proj_covar=tensor([0.0805, 0.0771, 0.0978, 0.0859, 0.0851, 0.0743, 0.0583, 0.0911], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 05:28:55,630 INFO [train.py:903] (0/4) Epoch 25, batch 5850, loss[loss=0.1839, simple_loss=0.2739, pruned_loss=0.04695, over 19678.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2841, pruned_loss=0.0612, over 3801169.27 frames. ], batch size: 53, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:29:00,320 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:02,807 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169727.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:29:02,861 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5468, 1.7668, 2.1677, 1.9815, 3.1663, 2.4644, 3.2056, 1.7213], + device='cuda:0'), covar=tensor([0.2846, 0.4930, 0.3080, 0.2127, 0.1629, 0.2652, 0.1966, 0.4775], + device='cuda:0'), in_proj_covar=tensor([0.0547, 0.0663, 0.0736, 0.0501, 0.0629, 0.0542, 0.0667, 0.0564], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 05:29:12,335 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 05:29:28,402 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169747.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:41,205 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.003e+02 6.037e+02 8.413e+02 1.989e+03, threshold=1.207e+03, percent-clipped=6.0 +2023-04-03 05:29:51,726 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:56,994 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-03 05:29:59,829 INFO [train.py:903] (0/4) Epoch 25, batch 5900, loss[loss=0.2563, simple_loss=0.3245, pruned_loss=0.0941, over 13172.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06133, over 3806701.47 frames. ], batch size: 136, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:30:04,518 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 05:30:27,877 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 05:30:41,004 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:30:56,622 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:04,208 INFO [train.py:903] (0/4) Epoch 25, batch 5950, loss[loss=0.2062, simple_loss=0.2888, pruned_loss=0.06175, over 19681.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2852, pruned_loss=0.06193, over 3785097.79 frames. ], batch size: 59, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:31:28,374 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169840.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:39,964 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:49,615 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.837e+02 6.128e+02 7.395e+02 1.765e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-03 05:32:09,283 INFO [train.py:903] (0/4) Epoch 25, batch 6000, loss[loss=0.2269, simple_loss=0.2987, pruned_loss=0.07758, over 18041.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.0616, over 3801074.28 frames. ], batch size: 83, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:32:09,284 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 05:32:21,935 INFO [train.py:937] (0/4) Epoch 25, validation: loss=0.1675, simple_loss=0.2674, pruned_loss=0.03383, over 944034.00 frames. +2023-04-03 05:32:21,936 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 05:32:24,540 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:32:30,418 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.3081, 5.2632, 6.1776, 6.1945, 2.4296, 5.8089, 4.9163, 5.8500], + device='cuda:0'), covar=tensor([0.1626, 0.0681, 0.0517, 0.0551, 0.5628, 0.0642, 0.0621, 0.0930], + device='cuda:0'), in_proj_covar=tensor([0.0800, 0.0767, 0.0974, 0.0854, 0.0848, 0.0738, 0.0581, 0.0905], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 05:32:35,051 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3647, 1.3879, 2.1101, 1.7505, 3.0510, 4.8444, 4.6250, 5.1723], + device='cuda:0'), covar=tensor([0.1592, 0.3942, 0.3187, 0.2192, 0.0633, 0.0190, 0.0184, 0.0212], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0327, 0.0356, 0.0267, 0.0248, 0.0192, 0.0217, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 05:32:48,340 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:33:19,610 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5309, 1.5504, 1.7819, 1.7114, 2.7049, 2.3869, 2.8511, 1.2048], + device='cuda:0'), covar=tensor([0.2429, 0.4296, 0.2662, 0.1991, 0.1493, 0.1993, 0.1347, 0.4552], + device='cuda:0'), in_proj_covar=tensor([0.0549, 0.0666, 0.0739, 0.0503, 0.0632, 0.0544, 0.0670, 0.0567], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 05:33:20,526 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:33:26,959 INFO [train.py:903] (0/4) Epoch 25, batch 6050, loss[loss=0.2139, simple_loss=0.3024, pruned_loss=0.06268, over 19781.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2843, pruned_loss=0.0616, over 3814731.80 frames. ], batch size: 56, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:33:57,316 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-03 05:34:12,793 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.971e+02 6.200e+02 7.955e+02 1.563e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 05:34:30,155 INFO [train.py:903] (0/4) Epoch 25, batch 6100, loss[loss=0.2265, simple_loss=0.3139, pruned_loss=0.06957, over 19657.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.285, pruned_loss=0.06174, over 3807111.81 frames. ], batch size: 55, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:39,019 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.41 vs. limit=2.0 +2023-04-03 05:34:44,076 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:34:44,436 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0457, 2.1202, 2.3969, 2.6362, 2.0639, 2.5617, 2.4592, 2.1959], + device='cuda:0'), covar=tensor([0.4353, 0.4092, 0.1944, 0.2598, 0.4284, 0.2227, 0.4884, 0.3465], + device='cuda:0'), in_proj_covar=tensor([0.0921, 0.0997, 0.0733, 0.0944, 0.0900, 0.0833, 0.0855, 0.0797], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 05:35:05,921 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-170000.pt +2023-04-03 05:35:35,276 INFO [train.py:903] (0/4) Epoch 25, batch 6150, loss[loss=0.2204, simple_loss=0.2877, pruned_loss=0.07655, over 19615.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.0617, over 3801622.90 frames. ], batch size: 50, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:35:57,194 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.31 vs. limit=5.0 +2023-04-03 05:36:07,476 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 05:36:22,377 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 4.967e+02 5.743e+02 7.363e+02 2.013e+03, threshold=1.149e+03, percent-clipped=2.0 +2023-04-03 05:36:28,743 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:36:40,357 INFO [train.py:903] (0/4) Epoch 25, batch 6200, loss[loss=0.2372, simple_loss=0.3065, pruned_loss=0.08391, over 13350.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2857, pruned_loss=0.06244, over 3792928.28 frames. ], batch size: 137, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:37:04,390 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:13,796 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:26,457 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:43,645 INFO [train.py:903] (0/4) Epoch 25, batch 6250, loss[loss=0.2035, simple_loss=0.2873, pruned_loss=0.05988, over 19324.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2855, pruned_loss=0.06236, over 3807193.34 frames. ], batch size: 66, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:38:16,084 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 05:38:16,216 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170148.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:38:29,529 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.199e+02 6.170e+02 7.836e+02 1.706e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 05:38:47,601 INFO [train.py:903] (0/4) Epoch 25, batch 6300, loss[loss=0.1894, simple_loss=0.2754, pruned_loss=0.0517, over 19852.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2847, pruned_loss=0.06219, over 3790836.65 frames. ], batch size: 52, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:39:13,781 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.4335, 4.0825, 2.6837, 3.6217, 1.0835, 4.0501, 3.8849, 3.9376], + device='cuda:0'), covar=tensor([0.0628, 0.0987, 0.1854, 0.0802, 0.3618, 0.0686, 0.0857, 0.1066], + device='cuda:0'), in_proj_covar=tensor([0.0516, 0.0421, 0.0504, 0.0353, 0.0406, 0.0445, 0.0442, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:39:32,208 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170206.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:39:51,012 INFO [train.py:903] (0/4) Epoch 25, batch 6350, loss[loss=0.1916, simple_loss=0.2626, pruned_loss=0.06031, over 19398.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2838, pruned_loss=0.06162, over 3794608.95 frames. ], batch size: 48, lr: 3.25e-03, grad_scale: 4.0 +2023-04-03 05:39:53,853 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:36,692 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.417e+02 5.475e+02 7.140e+02 1.571e+03, threshold=1.095e+03, percent-clipped=3.0 +2023-04-03 05:40:40,362 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3937, 3.1328, 2.3537, 2.8402, 0.8045, 3.1080, 2.9480, 3.0098], + device='cuda:0'), covar=tensor([0.0970, 0.1231, 0.1866, 0.0977, 0.3602, 0.0869, 0.1076, 0.1409], + device='cuda:0'), in_proj_covar=tensor([0.0514, 0.0418, 0.0501, 0.0351, 0.0403, 0.0442, 0.0439, 0.0469], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:40:42,874 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1107, 1.2902, 1.4306, 1.5183, 2.7584, 1.1041, 2.2379, 3.1392], + device='cuda:0'), covar=tensor([0.0650, 0.2836, 0.3015, 0.1755, 0.0800, 0.2400, 0.1244, 0.0307], + device='cuda:0'), in_proj_covar=tensor([0.0416, 0.0371, 0.0392, 0.0351, 0.0377, 0.0352, 0.0391, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:40:42,919 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:54,618 INFO [train.py:903] (0/4) Epoch 25, batch 6400, loss[loss=0.2092, simple_loss=0.2869, pruned_loss=0.06573, over 19591.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2833, pruned_loss=0.06162, over 3794526.27 frames. ], batch size: 52, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:41:22,300 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-03 05:41:59,017 INFO [train.py:903] (0/4) Epoch 25, batch 6450, loss[loss=0.2272, simple_loss=0.3093, pruned_loss=0.07253, over 18836.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2836, pruned_loss=0.06125, over 3791048.31 frames. ], batch size: 74, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:42:38,962 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8492, 4.0558, 4.4060, 4.4087, 2.5713, 4.1345, 3.7531, 4.1688], + device='cuda:0'), covar=tensor([0.1433, 0.2574, 0.0652, 0.0723, 0.4823, 0.1238, 0.0613, 0.1081], + device='cuda:0'), in_proj_covar=tensor([0.0804, 0.0771, 0.0976, 0.0855, 0.0849, 0.0738, 0.0581, 0.0909], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 05:42:40,327 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:42:44,565 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.688e+02 6.468e+02 8.318e+02 2.178e+03, threshold=1.294e+03, percent-clipped=13.0 +2023-04-03 05:42:45,737 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 05:43:03,216 INFO [train.py:903] (0/4) Epoch 25, batch 6500, loss[loss=0.2179, simple_loss=0.298, pruned_loss=0.06884, over 13763.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2833, pruned_loss=0.06128, over 3797298.30 frames. ], batch size: 136, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:43:07,955 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 05:43:12,745 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:43:48,121 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170407.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:44:06,261 INFO [train.py:903] (0/4) Epoch 25, batch 6550, loss[loss=0.1605, simple_loss=0.2448, pruned_loss=0.03811, over 19601.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2831, pruned_loss=0.06154, over 3790740.40 frames. ], batch size: 50, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:44:52,752 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.881e+02 6.158e+02 7.799e+02 1.457e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-03 05:44:57,801 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:09,732 INFO [train.py:903] (0/4) Epoch 25, batch 6600, loss[loss=0.1722, simple_loss=0.2505, pruned_loss=0.047, over 19767.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2831, pruned_loss=0.06143, over 3797719.36 frames. ], batch size: 47, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:45:19,112 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:20,460 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:28,732 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:51,405 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:09,044 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:13,014 INFO [train.py:903] (0/4) Epoch 25, batch 6650, loss[loss=0.1975, simple_loss=0.2834, pruned_loss=0.05583, over 19668.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.283, pruned_loss=0.06118, over 3804341.37 frames. ], batch size: 55, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:46:13,349 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:41,267 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170544.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:58,413 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.827e+02 5.978e+02 7.974e+02 2.215e+03, threshold=1.196e+03, percent-clipped=6.0 +2023-04-03 05:46:58,837 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3375, 1.3902, 1.8830, 1.4746, 2.7536, 3.7656, 3.4280, 3.9693], + device='cuda:0'), covar=tensor([0.1523, 0.3806, 0.3145, 0.2360, 0.0613, 0.0185, 0.0213, 0.0229], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0329, 0.0359, 0.0268, 0.0250, 0.0192, 0.0218, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 05:47:16,983 INFO [train.py:903] (0/4) Epoch 25, batch 6700, loss[loss=0.1656, simple_loss=0.248, pruned_loss=0.04161, over 19368.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2834, pruned_loss=0.06126, over 3809689.53 frames. ], batch size: 47, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:47:36,655 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:48:05,686 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9655, 1.5905, 1.7613, 1.7729, 3.6208, 1.2358, 2.6490, 4.0771], + device='cuda:0'), covar=tensor([0.0469, 0.2643, 0.2680, 0.1814, 0.0655, 0.2498, 0.1203, 0.0190], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0373, 0.0393, 0.0352, 0.0378, 0.0353, 0.0391, 0.0411], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:48:17,850 INFO [train.py:903] (0/4) Epoch 25, batch 6750, loss[loss=0.212, simple_loss=0.297, pruned_loss=0.06351, over 19677.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2834, pruned_loss=0.06111, over 3811343.49 frames. ], batch size: 60, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:48:52,178 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.55 vs. limit=5.0 +2023-04-03 05:48:59,809 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.526e+02 5.801e+02 6.899e+02 1.670e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 05:49:15,797 INFO [train.py:903] (0/4) Epoch 25, batch 6800, loss[loss=0.2312, simple_loss=0.315, pruned_loss=0.07368, over 19587.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2837, pruned_loss=0.06134, over 3815347.26 frames. ], batch size: 61, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:49:44,711 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7527, 2.1472, 1.6506, 1.6518, 2.0111, 1.6070, 1.6100, 2.0017], + device='cuda:0'), covar=tensor([0.0867, 0.0696, 0.0914, 0.0763, 0.0519, 0.1061, 0.0618, 0.0492], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0316, 0.0335, 0.0268, 0.0247, 0.0340, 0.0292, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:49:46,897 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-25.pt +2023-04-03 05:50:03,525 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 05:50:05,063 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 05:50:07,355 INFO [train.py:903] (0/4) Epoch 26, batch 0, loss[loss=0.2542, simple_loss=0.3225, pruned_loss=0.09296, over 19621.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3225, pruned_loss=0.09296, over 19621.00 frames. ], batch size: 57, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:50:07,356 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 05:50:19,307 INFO [train.py:937] (0/4) Epoch 26, validation: loss=0.1673, simple_loss=0.2675, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 05:50:19,308 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 05:50:26,507 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9716, 1.6888, 1.6415, 1.9757, 1.6237, 1.7820, 1.7194, 1.8442], + device='cuda:0'), covar=tensor([0.1082, 0.1522, 0.1524, 0.1045, 0.1413, 0.0554, 0.1408, 0.0770], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0355, 0.0314, 0.0253, 0.0303, 0.0254, 0.0314, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:50:32,196 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 05:51:20,620 INFO [train.py:903] (0/4) Epoch 26, batch 50, loss[loss=0.1894, simple_loss=0.2709, pruned_loss=0.05397, over 19688.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.0602, over 866569.54 frames. ], batch size: 53, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:51:30,191 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.268e+02 6.216e+02 7.845e+02 1.668e+03, threshold=1.243e+03, percent-clipped=9.0 +2023-04-03 05:51:54,932 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170778.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:51:55,735 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 05:52:22,097 INFO [train.py:903] (0/4) Epoch 26, batch 100, loss[loss=0.1901, simple_loss=0.2708, pruned_loss=0.05473, over 19679.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2869, pruned_loss=0.06267, over 1526461.16 frames. ], batch size: 53, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:52:26,016 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:52:32,332 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 05:52:50,716 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:53:12,612 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 05:53:24,743 INFO [train.py:903] (0/4) Epoch 26, batch 150, loss[loss=0.1922, simple_loss=0.2805, pruned_loss=0.0519, over 17363.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2849, pruned_loss=0.06164, over 2030344.78 frames. ], batch size: 101, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:53:36,325 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 4.908e+02 6.470e+02 7.917e+02 1.560e+03, threshold=1.294e+03, percent-clipped=6.0 +2023-04-03 05:54:20,332 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5315, 2.3758, 2.1248, 2.6717, 2.2046, 2.2636, 2.0870, 2.4656], + device='cuda:0'), covar=tensor([0.0956, 0.1685, 0.1455, 0.0944, 0.1487, 0.0519, 0.1410, 0.0700], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0352, 0.0312, 0.0252, 0.0302, 0.0253, 0.0312, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:54:25,500 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 05:54:26,680 INFO [train.py:903] (0/4) Epoch 26, batch 200, loss[loss=0.1952, simple_loss=0.2729, pruned_loss=0.05876, over 19728.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2841, pruned_loss=0.06082, over 2442011.62 frames. ], batch size: 51, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:05,036 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170931.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:14,462 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:23,820 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170946.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:30,414 INFO [train.py:903] (0/4) Epoch 26, batch 250, loss[loss=0.1897, simple_loss=0.2807, pruned_loss=0.04934, over 19580.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06127, over 2742086.02 frames. ], batch size: 61, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:31,162 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 05:55:39,622 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8732, 1.3490, 1.0888, 0.9643, 1.1770, 1.0031, 1.0066, 1.2485], + device='cuda:0'), covar=tensor([0.0664, 0.0931, 0.1162, 0.0789, 0.0585, 0.1330, 0.0630, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0336, 0.0270, 0.0249, 0.0342, 0.0294, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 05:55:42,608 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.848e+02 5.950e+02 8.014e+02 1.769e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 05:56:35,081 INFO [train.py:903] (0/4) Epoch 26, batch 300, loss[loss=0.2025, simple_loss=0.2763, pruned_loss=0.0643, over 19475.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06127, over 2992199.18 frames. ], batch size: 49, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:34,219 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171046.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:57:34,575 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-03 05:57:38,577 INFO [train.py:903] (0/4) Epoch 26, batch 350, loss[loss=0.1804, simple_loss=0.2553, pruned_loss=0.05275, over 19726.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2829, pruned_loss=0.06051, over 3178473.00 frames. ], batch size: 46, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:45,659 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:57:49,060 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 4.764e+02 6.020e+02 7.720e+02 1.602e+03, threshold=1.204e+03, percent-clipped=4.0 +2023-04-03 05:58:42,177 INFO [train.py:903] (0/4) Epoch 26, batch 400, loss[loss=0.2219, simple_loss=0.303, pruned_loss=0.07041, over 19512.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06051, over 3325888.12 frames. ], batch size: 64, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:58:55,361 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171110.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:59:43,982 INFO [train.py:903] (0/4) Epoch 26, batch 450, loss[loss=0.2556, simple_loss=0.3227, pruned_loss=0.09423, over 13941.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06063, over 3424445.37 frames. ], batch size: 136, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:59:56,339 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 5.217e+02 6.577e+02 9.059e+02 2.566e+03, threshold=1.315e+03, percent-clipped=7.0 +2023-04-03 06:00:19,587 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 06:00:20,832 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 06:00:30,441 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2109, 2.0690, 2.0266, 2.2860, 2.1056, 1.9138, 1.9597, 2.1637], + device='cuda:0'), covar=tensor([0.0829, 0.1173, 0.1079, 0.0757, 0.1021, 0.0514, 0.1137, 0.0585], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0354, 0.0314, 0.0254, 0.0304, 0.0255, 0.0315, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:00:38,881 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:00:47,846 INFO [train.py:903] (0/4) Epoch 26, batch 500, loss[loss=0.1926, simple_loss=0.2724, pruned_loss=0.05646, over 19578.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2852, pruned_loss=0.06129, over 3520304.95 frames. ], batch size: 52, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:01:05,573 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:11,740 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:52,229 INFO [train.py:903] (0/4) Epoch 26, batch 550, loss[loss=0.2732, simple_loss=0.3377, pruned_loss=0.1044, over 19628.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2844, pruned_loss=0.06087, over 3591868.68 frames. ], batch size: 50, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:03,092 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.798e+02 6.471e+02 7.842e+02 1.459e+03, threshold=1.294e+03, percent-clipped=3.0 +2023-04-03 06:02:12,067 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 06:02:44,067 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171290.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:02:55,673 INFO [train.py:903] (0/4) Epoch 26, batch 600, loss[loss=0.1983, simple_loss=0.2866, pruned_loss=0.05496, over 19559.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2858, pruned_loss=0.06176, over 3620306.95 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:58,477 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:31,084 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:36,397 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 06:03:57,549 INFO [train.py:903] (0/4) Epoch 26, batch 650, loss[loss=0.1884, simple_loss=0.264, pruned_loss=0.05641, over 19407.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2862, pruned_loss=0.06214, over 3673902.79 frames. ], batch size: 48, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:04:09,344 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.689e+02 4.759e+02 5.860e+02 7.918e+02 1.260e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-03 06:04:15,214 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:05:01,840 INFO [train.py:903] (0/4) Epoch 26, batch 700, loss[loss=0.1709, simple_loss=0.2526, pruned_loss=0.04462, over 19750.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.06185, over 3712118.69 frames. ], batch size: 47, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:05:09,250 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171405.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:08,106 INFO [train.py:903] (0/4) Epoch 26, batch 750, loss[loss=0.194, simple_loss=0.2722, pruned_loss=0.05791, over 19608.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.0614, over 3742022.81 frames. ], batch size: 50, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:06:11,857 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4406, 1.4227, 1.5983, 1.5182, 3.0402, 1.2486, 2.2806, 3.4336], + device='cuda:0'), covar=tensor([0.0522, 0.2639, 0.2741, 0.1858, 0.0657, 0.2343, 0.1325, 0.0272], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0374, 0.0394, 0.0354, 0.0379, 0.0354, 0.0392, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:06:12,967 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171454.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:18,656 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.123e+02 6.550e+02 8.686e+02 2.549e+03, threshold=1.310e+03, percent-clipped=11.0 +2023-04-03 06:07:12,531 INFO [train.py:903] (0/4) Epoch 26, batch 800, loss[loss=0.1916, simple_loss=0.2834, pruned_loss=0.04987, over 19348.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2851, pruned_loss=0.06118, over 3773414.92 frames. ], batch size: 66, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:07:25,369 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 06:07:32,306 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:14,608 INFO [train.py:903] (0/4) Epoch 26, batch 850, loss[loss=0.2112, simple_loss=0.2957, pruned_loss=0.06341, over 19606.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.285, pruned_loss=0.06132, over 3780689.50 frames. ], batch size: 57, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:08:24,827 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:25,749 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.689e+02 5.699e+02 7.261e+02 1.636e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-03 06:08:40,937 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:09:05,302 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 06:09:18,365 INFO [train.py:903] (0/4) Epoch 26, batch 900, loss[loss=0.2036, simple_loss=0.2912, pruned_loss=0.05802, over 19595.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.0612, over 3788190.27 frames. ], batch size: 61, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:17,059 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.86 vs. limit=5.0 +2023-04-03 06:10:22,197 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 06:10:23,374 INFO [train.py:903] (0/4) Epoch 26, batch 950, loss[loss=0.1766, simple_loss=0.2646, pruned_loss=0.04425, over 19620.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06041, over 3786199.32 frames. ], batch size: 57, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:34,905 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.213e+02 6.211e+02 8.451e+02 1.981e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 06:10:37,562 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171661.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:10:45,884 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1665, 1.7856, 1.4730, 1.0942, 1.6512, 1.1228, 1.1412, 1.6595], + device='cuda:0'), covar=tensor([0.0770, 0.0738, 0.0918, 0.0993, 0.0504, 0.1325, 0.0644, 0.0385], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0337, 0.0270, 0.0250, 0.0343, 0.0295, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:10:51,827 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171673.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:00,243 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8103, 1.8983, 1.4486, 1.8527, 1.7661, 1.4663, 1.4208, 1.7017], + device='cuda:0'), covar=tensor([0.1250, 0.1462, 0.1866, 0.1224, 0.1484, 0.0971, 0.1956, 0.1029], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0354, 0.0314, 0.0254, 0.0305, 0.0254, 0.0315, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:11:09,420 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171686.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:27,636 INFO [train.py:903] (0/4) Epoch 26, batch 1000, loss[loss=0.1859, simple_loss=0.2655, pruned_loss=0.05319, over 19712.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06105, over 3788118.15 frames. ], batch size: 45, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:11:36,038 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171707.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:12:19,822 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 06:12:31,841 INFO [train.py:903] (0/4) Epoch 26, batch 1050, loss[loss=0.2353, simple_loss=0.3134, pruned_loss=0.07857, over 19352.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06064, over 3803074.60 frames. ], batch size: 70, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:12:42,470 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.465e+02 6.383e+02 7.807e+02 1.569e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-03 06:12:50,005 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-03 06:13:01,865 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 06:13:34,571 INFO [train.py:903] (0/4) Epoch 26, batch 1100, loss[loss=0.1957, simple_loss=0.2854, pruned_loss=0.05306, over 19721.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.0611, over 3799795.29 frames. ], batch size: 63, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:05,400 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:09,102 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171825.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:40,976 INFO [train.py:903] (0/4) Epoch 26, batch 1150, loss[loss=0.2463, simple_loss=0.315, pruned_loss=0.08874, over 13135.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2847, pruned_loss=0.06108, over 3797484.86 frames. ], batch size: 136, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:41,359 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171850.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:54,267 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.298e+02 6.784e+02 8.276e+02 1.649e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-03 06:14:55,493 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:15:46,135 INFO [train.py:903] (0/4) Epoch 26, batch 1200, loss[loss=0.2111, simple_loss=0.2974, pruned_loss=0.06239, over 19552.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.06165, over 3794713.48 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:12,768 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 06:16:21,033 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:29,733 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 06:16:48,474 INFO [train.py:903] (0/4) Epoch 26, batch 1250, loss[loss=0.2343, simple_loss=0.2931, pruned_loss=0.08772, over 19307.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2862, pruned_loss=0.06238, over 3799316.87 frames. ], batch size: 44, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:53,692 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:54,754 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171955.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:58,907 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.182e+02 6.196e+02 7.754e+02 1.405e+03, threshold=1.239e+03, percent-clipped=1.0 +2023-04-03 06:17:21,563 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:17:51,870 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-172000.pt +2023-04-03 06:17:52,780 INFO [train.py:903] (0/4) Epoch 26, batch 1300, loss[loss=0.1638, simple_loss=0.2473, pruned_loss=0.04016, over 19472.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2856, pruned_loss=0.06206, over 3823277.58 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:56,742 INFO [train.py:903] (0/4) Epoch 26, batch 1350, loss[loss=0.1929, simple_loss=0.2717, pruned_loss=0.05708, over 19758.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2852, pruned_loss=0.06178, over 3826404.77 frames. ], batch size: 51, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:59,475 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0056, 4.5474, 2.7374, 3.9386, 0.9408, 4.5523, 4.3629, 4.4705], + device='cuda:0'), covar=tensor([0.0506, 0.0914, 0.2007, 0.0857, 0.4047, 0.0615, 0.0939, 0.1080], + device='cuda:0'), in_proj_covar=tensor([0.0521, 0.0423, 0.0510, 0.0357, 0.0409, 0.0449, 0.0444, 0.0475], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:19:09,298 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-03 06:19:09,474 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.487e+02 6.725e+02 8.152e+02 1.378e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-03 06:19:16,131 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-03 06:19:25,563 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8692, 1.5981, 1.4810, 1.7816, 1.5337, 1.5361, 1.4373, 1.7096], + device='cuda:0'), covar=tensor([0.1099, 0.1332, 0.1553, 0.1129, 0.1358, 0.0629, 0.1562, 0.0828], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0357, 0.0318, 0.0256, 0.0308, 0.0256, 0.0318, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:19:33,987 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:01,390 INFO [train.py:903] (0/4) Epoch 26, batch 1400, loss[loss=0.1914, simple_loss=0.2713, pruned_loss=0.05573, over 19842.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2832, pruned_loss=0.06104, over 3831512.67 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:20:07,036 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:47,879 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172136.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:21:05,095 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 06:21:06,114 INFO [train.py:903] (0/4) Epoch 26, batch 1450, loss[loss=0.1795, simple_loss=0.2494, pruned_loss=0.05477, over 19779.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2832, pruned_loss=0.06102, over 3831648.75 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:21:16,561 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 4.778e+02 6.066e+02 7.310e+02 2.231e+03, threshold=1.213e+03, percent-clipped=2.0 +2023-04-03 06:21:22,788 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4561, 1.6184, 2.0573, 1.7553, 3.1932, 2.5971, 3.4986, 1.5955], + device='cuda:0'), covar=tensor([0.2768, 0.4638, 0.2919, 0.2084, 0.1621, 0.2228, 0.1562, 0.4612], + device='cuda:0'), in_proj_covar=tensor([0.0548, 0.0662, 0.0740, 0.0500, 0.0630, 0.0541, 0.0667, 0.0565], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 06:22:10,096 INFO [train.py:903] (0/4) Epoch 26, batch 1500, loss[loss=0.1799, simple_loss=0.2749, pruned_loss=0.04249, over 19098.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2826, pruned_loss=0.06052, over 3832409.38 frames. ], batch size: 69, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:22:33,565 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9175, 5.3545, 3.0609, 4.6902, 0.9698, 5.5706, 5.3386, 5.5680], + device='cuda:0'), covar=tensor([0.0368, 0.0818, 0.1926, 0.0726, 0.4268, 0.0506, 0.0775, 0.0955], + device='cuda:0'), in_proj_covar=tensor([0.0522, 0.0425, 0.0513, 0.0358, 0.0410, 0.0452, 0.0446, 0.0476], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:22:51,379 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172231.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:14,777 INFO [train.py:903] (0/4) Epoch 26, batch 1550, loss[loss=0.1696, simple_loss=0.2508, pruned_loss=0.04419, over 19774.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2826, pruned_loss=0.06016, over 3838230.43 frames. ], batch size: 45, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:23:23,243 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:26,529 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.787e+02 4.585e+02 5.725e+02 7.044e+02 1.122e+03, threshold=1.145e+03, percent-clipped=0.0 +2023-04-03 06:23:59,061 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3516, 1.7186, 1.8754, 1.9876, 2.9114, 1.7255, 2.8300, 3.2646], + device='cuda:0'), covar=tensor([0.0728, 0.2941, 0.2809, 0.1864, 0.0932, 0.2341, 0.1967, 0.0494], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0374, 0.0395, 0.0354, 0.0380, 0.0354, 0.0393, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:24:17,316 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:24:18,407 INFO [train.py:903] (0/4) Epoch 26, batch 1600, loss[loss=0.1816, simple_loss=0.2585, pruned_loss=0.0524, over 19786.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2825, pruned_loss=0.06058, over 3822916.52 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:24:30,668 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:24:44,999 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 06:24:49,165 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5902, 2.3422, 1.7657, 1.6156, 2.1396, 1.4649, 1.4204, 2.0910], + device='cuda:0'), covar=tensor([0.1232, 0.1013, 0.1133, 0.0996, 0.0659, 0.1355, 0.0846, 0.0540], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0320, 0.0340, 0.0272, 0.0251, 0.0345, 0.0294, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:25:22,890 INFO [train.py:903] (0/4) Epoch 26, batch 1650, loss[loss=0.2429, simple_loss=0.324, pruned_loss=0.08089, over 18199.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2826, pruned_loss=0.06038, over 3824868.83 frames. ], batch size: 83, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:25:32,972 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 4.806e+02 6.116e+02 7.815e+02 1.931e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 06:26:25,096 INFO [train.py:903] (0/4) Epoch 26, batch 1700, loss[loss=0.1943, simple_loss=0.2723, pruned_loss=0.05811, over 19844.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2823, pruned_loss=0.06032, over 3828912.58 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:26:27,915 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 06:26:43,100 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172414.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:27:07,063 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 06:27:27,217 INFO [train.py:903] (0/4) Epoch 26, batch 1750, loss[loss=0.1836, simple_loss=0.255, pruned_loss=0.05608, over 19311.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2819, pruned_loss=0.06034, over 3810329.84 frames. ], batch size: 44, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:27:39,761 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.883e+02 5.717e+02 7.034e+02 1.807e+03, threshold=1.143e+03, percent-clipped=3.0 +2023-04-03 06:27:52,717 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0860, 2.2393, 1.6013, 2.2292, 2.2002, 1.6428, 1.6345, 2.0690], + device='cuda:0'), covar=tensor([0.1232, 0.1677, 0.1993, 0.1268, 0.1490, 0.1070, 0.2018, 0.1056], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0356, 0.0315, 0.0254, 0.0307, 0.0254, 0.0316, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:28:06,463 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:28:09,152 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8884, 1.2586, 1.6037, 0.5341, 1.9081, 2.4817, 2.1510, 2.6252], + device='cuda:0'), covar=tensor([0.1654, 0.3832, 0.3393, 0.2917, 0.0671, 0.0289, 0.0350, 0.0381], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0270, 0.0252, 0.0193, 0.0219, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 06:28:25,522 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8009, 4.3444, 2.6829, 3.8460, 0.8274, 4.3597, 4.2289, 4.2984], + device='cuda:0'), covar=tensor([0.0547, 0.0945, 0.2015, 0.0842, 0.4229, 0.0619, 0.0862, 0.1201], + device='cuda:0'), in_proj_covar=tensor([0.0518, 0.0420, 0.0507, 0.0355, 0.0406, 0.0447, 0.0441, 0.0472], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:28:32,022 INFO [train.py:903] (0/4) Epoch 26, batch 1800, loss[loss=0.2156, simple_loss=0.2941, pruned_loss=0.06855, over 19656.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2823, pruned_loss=0.06079, over 3793200.24 frames. ], batch size: 55, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:31,751 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 06:29:36,210 INFO [train.py:903] (0/4) Epoch 26, batch 1850, loss[loss=0.2331, simple_loss=0.3096, pruned_loss=0.07831, over 19479.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.283, pruned_loss=0.06107, over 3783801.99 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:46,974 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.880e+02 5.794e+02 7.257e+02 1.575e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 06:30:11,356 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 06:30:20,681 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2565, 3.9144, 2.6635, 3.5102, 1.1738, 3.8256, 3.7497, 3.7950], + device='cuda:0'), covar=tensor([0.0761, 0.0960, 0.1918, 0.0844, 0.3534, 0.0693, 0.0952, 0.1262], + device='cuda:0'), in_proj_covar=tensor([0.0521, 0.0423, 0.0511, 0.0357, 0.0407, 0.0450, 0.0443, 0.0475], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:30:34,098 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:30:39,960 INFO [train.py:903] (0/4) Epoch 26, batch 1900, loss[loss=0.1943, simple_loss=0.2762, pruned_loss=0.05626, over 19840.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2832, pruned_loss=0.0609, over 3788627.18 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:30:59,214 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 06:31:01,764 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:05,015 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 06:31:23,049 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0675, 5.1353, 5.9335, 5.9558, 2.1511, 5.5489, 4.6808, 5.5858], + device='cuda:0'), covar=tensor([0.1958, 0.0860, 0.0640, 0.0637, 0.6298, 0.0814, 0.0682, 0.1375], + device='cuda:0'), in_proj_covar=tensor([0.0806, 0.0774, 0.0978, 0.0859, 0.0852, 0.0745, 0.0584, 0.0906], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 06:31:30,003 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 06:31:36,059 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:44,207 INFO [train.py:903] (0/4) Epoch 26, batch 1950, loss[loss=0.1849, simple_loss=0.2546, pruned_loss=0.05756, over 19725.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2834, pruned_loss=0.06096, over 3793192.02 frames. ], batch size: 46, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:31:57,689 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.105e+02 6.247e+02 7.487e+02 1.872e+03, threshold=1.249e+03, percent-clipped=7.0 +2023-04-03 06:32:12,279 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172670.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:42,538 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172695.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:48,412 INFO [train.py:903] (0/4) Epoch 26, batch 2000, loss[loss=0.2291, simple_loss=0.3081, pruned_loss=0.075, over 19338.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2846, pruned_loss=0.06164, over 3792448.52 frames. ], batch size: 70, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:14,447 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 06:33:38,926 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9381, 4.2576, 4.6157, 4.6133, 2.1878, 4.3117, 3.7656, 4.3524], + device='cuda:0'), covar=tensor([0.1618, 0.1436, 0.0582, 0.0670, 0.5507, 0.1018, 0.0669, 0.1009], + device='cuda:0'), in_proj_covar=tensor([0.0803, 0.0774, 0.0978, 0.0859, 0.0852, 0.0744, 0.0584, 0.0905], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 06:33:46,789 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 06:33:51,654 INFO [train.py:903] (0/4) Epoch 26, batch 2050, loss[loss=0.2097, simple_loss=0.2911, pruned_loss=0.06416, over 19777.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2852, pruned_loss=0.06182, over 3793961.39 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:55,236 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172752.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:34:03,941 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.217e+02 6.186e+02 8.512e+02 2.102e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 06:34:06,457 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 06:34:07,786 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 06:34:27,745 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 06:34:54,383 INFO [train.py:903] (0/4) Epoch 26, batch 2100, loss[loss=0.1796, simple_loss=0.2662, pruned_loss=0.04652, over 19466.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2848, pruned_loss=0.06149, over 3807657.56 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:25,181 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 06:35:47,342 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 06:35:56,681 INFO [train.py:903] (0/4) Epoch 26, batch 2150, loss[loss=0.2015, simple_loss=0.2755, pruned_loss=0.06376, over 19408.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2853, pruned_loss=0.06148, over 3825388.91 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:58,271 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172851.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:02,873 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0468, 1.7085, 1.9991, 2.1201, 4.5543, 1.4901, 2.7407, 4.9841], + device='cuda:0'), covar=tensor([0.0484, 0.2791, 0.2681, 0.1726, 0.0748, 0.2264, 0.1320, 0.0163], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0371, 0.0392, 0.0351, 0.0378, 0.0352, 0.0389, 0.0410], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:36:10,068 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.604e+02 4.749e+02 6.121e+02 8.086e+02 1.727e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 06:36:14,769 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:21,373 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-03 06:36:31,091 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:59,980 INFO [train.py:903] (0/4) Epoch 26, batch 2200, loss[loss=0.2113, simple_loss=0.2941, pruned_loss=0.06424, over 19682.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06118, over 3826407.10 frames. ], batch size: 53, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:38:04,010 INFO [train.py:903] (0/4) Epoch 26, batch 2250, loss[loss=0.1608, simple_loss=0.2384, pruned_loss=0.04157, over 19770.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06074, over 3830495.35 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:38:16,968 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172960.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:38:17,986 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.806e+02 5.924e+02 7.729e+02 1.368e+03, threshold=1.185e+03, percent-clipped=2.0 +2023-04-03 06:38:51,867 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:39:07,993 INFO [train.py:903] (0/4) Epoch 26, batch 2300, loss[loss=0.2318, simple_loss=0.3081, pruned_loss=0.07779, over 13368.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06063, over 3817052.54 frames. ], batch size: 135, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:39:13,393 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 06:39:19,730 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 06:40:11,060 INFO [train.py:903] (0/4) Epoch 26, batch 2350, loss[loss=0.1783, simple_loss=0.2694, pruned_loss=0.04365, over 19446.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2842, pruned_loss=0.06087, over 3819638.58 frames. ], batch size: 70, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:40:25,948 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.175e+02 6.518e+02 7.971e+02 2.695e+03, threshold=1.304e+03, percent-clipped=8.0 +2023-04-03 06:40:43,725 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173075.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:40:54,421 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 06:41:08,275 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:10,528 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 06:41:14,038 INFO [train.py:903] (0/4) Epoch 26, batch 2400, loss[loss=0.2787, simple_loss=0.3407, pruned_loss=0.1084, over 13163.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06114, over 3818620.61 frames. ], batch size: 135, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:41:18,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:48,367 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0805, 1.2565, 1.7846, 1.1335, 2.5265, 3.5207, 3.2130, 3.7183], + device='cuda:0'), covar=tensor([0.1749, 0.4068, 0.3370, 0.2718, 0.0703, 0.0193, 0.0222, 0.0283], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0330, 0.0362, 0.0271, 0.0252, 0.0193, 0.0220, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 06:42:19,069 INFO [train.py:903] (0/4) Epoch 26, batch 2450, loss[loss=0.2131, simple_loss=0.2904, pruned_loss=0.06783, over 19493.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2859, pruned_loss=0.06213, over 3783832.29 frames. ], batch size: 49, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:42:32,913 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.972e+02 5.973e+02 7.732e+02 1.743e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 06:42:41,751 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173168.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:22,795 INFO [train.py:903] (0/4) Epoch 26, batch 2500, loss[loss=0.1855, simple_loss=0.275, pruned_loss=0.04801, over 19770.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2855, pruned_loss=0.06179, over 3804505.57 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:43:24,460 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2998, 2.3948, 2.6871, 2.9750, 2.2972, 2.8274, 2.7199, 2.5020], + device='cuda:0'), covar=tensor([0.4281, 0.4200, 0.1838, 0.2735, 0.4656, 0.2368, 0.4630, 0.3229], + device='cuda:0'), in_proj_covar=tensor([0.0927, 0.1004, 0.0736, 0.0948, 0.0906, 0.0842, 0.0859, 0.0804], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 06:43:31,263 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:36,090 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173211.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:44:25,914 INFO [train.py:903] (0/4) Epoch 26, batch 2550, loss[loss=0.1654, simple_loss=0.2472, pruned_loss=0.04179, over 19370.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06114, over 3808341.24 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:44:40,216 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1777, 3.2396, 1.8617, 1.9231, 2.9452, 1.5593, 1.6155, 2.3594], + device='cuda:0'), covar=tensor([0.1302, 0.0712, 0.1150, 0.0857, 0.0578, 0.1346, 0.0947, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0317, 0.0337, 0.0269, 0.0248, 0.0341, 0.0291, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:44:40,985 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.733e+02 4.982e+02 5.984e+02 8.594e+02 2.255e+03, threshold=1.197e+03, percent-clipped=6.0 +2023-04-03 06:45:23,044 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 06:45:30,051 INFO [train.py:903] (0/4) Epoch 26, batch 2600, loss[loss=0.183, simple_loss=0.2735, pruned_loss=0.04622, over 19716.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2842, pruned_loss=0.06053, over 3823907.31 frames. ], batch size: 59, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:45:59,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173322.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,380 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,477 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4933, 2.1410, 2.1825, 3.1473, 2.0699, 2.5951, 2.5021, 2.3687], + device='cuda:0'), covar=tensor([0.0762, 0.0928, 0.0915, 0.0731, 0.0924, 0.0762, 0.0921, 0.0665], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0226, 0.0228, 0.0242, 0.0228, 0.0214, 0.0190, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 06:46:10,051 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:35,579 INFO [train.py:903] (0/4) Epoch 26, batch 2650, loss[loss=0.178, simple_loss=0.2587, pruned_loss=0.04864, over 19323.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2836, pruned_loss=0.06014, over 3829898.76 frames. ], batch size: 44, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:46:41,857 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3444, 2.0488, 1.6264, 1.3901, 1.8612, 1.3058, 1.2900, 1.8217], + device='cuda:0'), covar=tensor([0.0951, 0.0787, 0.1134, 0.0864, 0.0606, 0.1309, 0.0728, 0.0492], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0316, 0.0336, 0.0269, 0.0248, 0.0340, 0.0290, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:46:43,033 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173356.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:45,484 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:49,596 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.033e+02 5.962e+02 7.363e+02 1.395e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 06:46:55,577 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 06:47:17,766 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:47:39,266 INFO [train.py:903] (0/4) Epoch 26, batch 2700, loss[loss=0.1657, simple_loss=0.2464, pruned_loss=0.04248, over 16533.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06058, over 3811753.82 frames. ], batch size: 36, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:47:59,336 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8004, 3.3080, 3.3364, 3.3552, 1.3878, 3.2157, 2.8055, 3.1206], + device='cuda:0'), covar=tensor([0.1805, 0.1037, 0.0834, 0.0969, 0.5636, 0.1088, 0.0873, 0.1302], + device='cuda:0'), in_proj_covar=tensor([0.0806, 0.0773, 0.0978, 0.0861, 0.0856, 0.0742, 0.0584, 0.0909], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 06:48:32,626 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7009, 1.7881, 2.0595, 1.9526, 1.4954, 1.9373, 2.0357, 1.9206], + device='cuda:0'), covar=tensor([0.4291, 0.3784, 0.2012, 0.2459, 0.3961, 0.2232, 0.5213, 0.3408], + device='cuda:0'), in_proj_covar=tensor([0.0928, 0.1004, 0.0736, 0.0949, 0.0908, 0.0841, 0.0859, 0.0805], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 06:48:39,712 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.46 vs. limit=2.0 +2023-04-03 06:48:41,207 INFO [train.py:903] (0/4) Epoch 26, batch 2750, loss[loss=0.2034, simple_loss=0.2902, pruned_loss=0.05823, over 19533.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2841, pruned_loss=0.06048, over 3828009.63 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:54,808 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.847e+02 4.981e+02 6.163e+02 7.639e+02 1.916e+03, threshold=1.233e+03, percent-clipped=5.0 +2023-04-03 06:49:03,504 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:34,150 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:44,183 INFO [train.py:903] (0/4) Epoch 26, batch 2800, loss[loss=0.2183, simple_loss=0.2999, pruned_loss=0.06837, over 18699.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2849, pruned_loss=0.0611, over 3833711.85 frames. ], batch size: 74, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:49:52,943 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4951, 1.6373, 1.6581, 1.8993, 1.5498, 1.8641, 1.7636, 1.5315], + device='cuda:0'), covar=tensor([0.4719, 0.4207, 0.2865, 0.2894, 0.4211, 0.2573, 0.6395, 0.5013], + device='cuda:0'), in_proj_covar=tensor([0.0930, 0.1005, 0.0738, 0.0949, 0.0910, 0.0842, 0.0861, 0.0806], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 06:49:56,406 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173509.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:49:56,869 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-03 06:50:00,276 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:50:48,004 INFO [train.py:903] (0/4) Epoch 26, batch 2850, loss[loss=0.2046, simple_loss=0.2789, pruned_loss=0.06516, over 19795.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2855, pruned_loss=0.06179, over 3836502.10 frames. ], batch size: 48, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:01,769 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.954e+02 6.202e+02 8.183e+02 1.932e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 06:51:23,167 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:51:51,086 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 06:51:52,292 INFO [train.py:903] (0/4) Epoch 26, batch 2900, loss[loss=0.2395, simple_loss=0.308, pruned_loss=0.08552, over 17427.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06133, over 3831761.64 frames. ], batch size: 101, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:57,182 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173603.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:07,978 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0174, 2.0832, 2.4700, 2.5420, 1.9255, 2.5129, 2.4856, 2.2704], + device='cuda:0'), covar=tensor([0.4237, 0.4014, 0.1832, 0.2425, 0.4236, 0.2218, 0.4671, 0.3238], + device='cuda:0'), in_proj_covar=tensor([0.0928, 0.1003, 0.0736, 0.0946, 0.0907, 0.0840, 0.0859, 0.0805], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 06:52:27,641 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:33,440 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:56,764 INFO [train.py:903] (0/4) Epoch 26, batch 2950, loss[loss=0.2263, simple_loss=0.3109, pruned_loss=0.07084, over 18165.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2846, pruned_loss=0.06181, over 3820487.75 frames. ], batch size: 83, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:10,748 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.287e+02 6.719e+02 8.706e+02 2.181e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-03 06:53:23,890 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173671.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:53:59,613 INFO [train.py:903] (0/4) Epoch 26, batch 3000, loss[loss=0.2191, simple_loss=0.2992, pruned_loss=0.06949, over 19651.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06193, over 3819555.64 frames. ], batch size: 58, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:59,614 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 06:54:12,259 INFO [train.py:937] (0/4) Epoch 26, validation: loss=0.1681, simple_loss=0.2675, pruned_loss=0.03435, over 944034.00 frames. +2023-04-03 06:54:12,260 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 06:54:17,263 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 06:55:16,504 INFO [train.py:903] (0/4) Epoch 26, batch 3050, loss[loss=0.1838, simple_loss=0.2717, pruned_loss=0.04802, over 19662.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2849, pruned_loss=0.06119, over 3825213.83 frames. ], batch size: 53, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:55:25,140 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0530, 3.6938, 2.5373, 3.3300, 1.0685, 3.7332, 3.5615, 3.6013], + device='cuda:0'), covar=tensor([0.0719, 0.1141, 0.2018, 0.0978, 0.3699, 0.0711, 0.0887, 0.1116], + device='cuda:0'), in_proj_covar=tensor([0.0522, 0.0424, 0.0510, 0.0357, 0.0408, 0.0450, 0.0447, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 06:55:30,856 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 4.825e+02 5.950e+02 7.456e+02 1.374e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 06:56:02,523 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173786.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:56:20,180 INFO [train.py:903] (0/4) Epoch 26, batch 3100, loss[loss=0.1819, simple_loss=0.2732, pruned_loss=0.04536, over 19668.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2853, pruned_loss=0.06204, over 3787015.55 frames. ], batch size: 55, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:57:23,189 INFO [train.py:903] (0/4) Epoch 26, batch 3150, loss[loss=0.2427, simple_loss=0.3139, pruned_loss=0.08573, over 13625.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.06159, over 3796026.91 frames. ], batch size: 136, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:57:26,868 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173853.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 06:57:37,129 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.081e+02 6.233e+02 7.406e+02 2.417e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 06:57:49,125 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 06:58:05,121 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173883.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:58:25,323 INFO [train.py:903] (0/4) Epoch 26, batch 3200, loss[loss=0.2376, simple_loss=0.3138, pruned_loss=0.08074, over 19632.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06164, over 3811005.69 frames. ], batch size: 58, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:58:35,857 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173908.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:59:25,875 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8819, 3.9783, 4.4169, 4.4312, 2.6718, 4.1149, 3.8251, 4.1813], + device='cuda:0'), covar=tensor([0.1276, 0.3443, 0.0609, 0.0673, 0.4476, 0.1249, 0.0591, 0.1031], + device='cuda:0'), in_proj_covar=tensor([0.0804, 0.0771, 0.0974, 0.0856, 0.0851, 0.0742, 0.0579, 0.0902], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 06:59:27,979 INFO [train.py:903] (0/4) Epoch 26, batch 3250, loss[loss=0.1953, simple_loss=0.279, pruned_loss=0.05578, over 19659.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2859, pruned_loss=0.06271, over 3809048.66 frames. ], batch size: 53, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:59:42,864 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.716e+02 4.798e+02 6.185e+02 8.155e+02 2.789e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-03 06:59:52,460 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173968.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:59:55,195 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.42 vs. limit=5.0 +2023-04-03 07:00:01,772 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:00:31,435 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-174000.pt +2023-04-03 07:00:32,335 INFO [train.py:903] (0/4) Epoch 26, batch 3300, loss[loss=0.1984, simple_loss=0.2789, pruned_loss=0.05896, over 19497.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2861, pruned_loss=0.06281, over 3799765.09 frames. ], batch size: 49, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 07:00:35,706 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 07:01:08,025 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.12 vs. limit=5.0 +2023-04-03 07:01:09,845 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2555, 2.2766, 1.7396, 2.3467, 2.3813, 1.7670, 1.8351, 2.1167], + device='cuda:0'), covar=tensor([0.1145, 0.1630, 0.1832, 0.1190, 0.1360, 0.0917, 0.1837, 0.1004], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0358, 0.0316, 0.0256, 0.0306, 0.0255, 0.0319, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:01:13,136 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174033.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:17,498 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:25,956 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:35,836 INFO [train.py:903] (0/4) Epoch 26, batch 3350, loss[loss=0.1871, simple_loss=0.2773, pruned_loss=0.04844, over 19652.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06253, over 3788592.64 frames. ], batch size: 58, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:01:39,736 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0080, 1.5783, 1.8499, 1.6831, 4.4249, 1.1444, 2.7262, 4.8005], + device='cuda:0'), covar=tensor([0.0521, 0.3198, 0.3011, 0.2200, 0.0844, 0.2982, 0.1464, 0.0228], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0374, 0.0393, 0.0352, 0.0378, 0.0354, 0.0392, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:01:49,566 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.999e+02 5.843e+02 7.881e+02 1.777e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 07:01:56,794 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:01,246 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4924, 1.4879, 1.4576, 1.7981, 1.2806, 1.7100, 1.6669, 1.6462], + device='cuda:0'), covar=tensor([0.0869, 0.0919, 0.0961, 0.0647, 0.0847, 0.0752, 0.0826, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0224, 0.0227, 0.0241, 0.0226, 0.0213, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 07:02:25,281 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 07:02:28,230 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:39,922 INFO [train.py:903] (0/4) Epoch 26, batch 3400, loss[loss=0.1916, simple_loss=0.2821, pruned_loss=0.05053, over 17283.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2858, pruned_loss=0.06192, over 3791612.72 frames. ], batch size: 101, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:02:49,790 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2516, 2.0543, 1.9174, 2.1962, 1.8747, 1.9127, 1.7227, 2.1526], + device='cuda:0'), covar=tensor([0.0940, 0.1359, 0.1382, 0.0950, 0.1382, 0.0529, 0.1525, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0357, 0.0316, 0.0255, 0.0305, 0.0255, 0.0318, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:03:41,121 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 07:03:42,261 INFO [train.py:903] (0/4) Epoch 26, batch 3450, loss[loss=0.1874, simple_loss=0.2767, pruned_loss=0.04906, over 19769.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.06153, over 3786554.88 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:03:51,574 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-03 07:03:53,304 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:03:57,818 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.666e+02 5.900e+02 7.449e+02 1.550e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-03 07:04:08,803 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 07:04:39,824 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 07:04:47,526 INFO [train.py:903] (0/4) Epoch 26, batch 3500, loss[loss=0.1739, simple_loss=0.2476, pruned_loss=0.0501, over 19366.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06186, over 3806463.07 frames. ], batch size: 47, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:05:18,015 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174224.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 07:05:49,673 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174249.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 07:05:50,378 INFO [train.py:903] (0/4) Epoch 26, batch 3550, loss[loss=0.1748, simple_loss=0.2507, pruned_loss=0.04942, over 19318.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2846, pruned_loss=0.0618, over 3824267.03 frames. ], batch size: 44, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:05:55,405 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5818, 1.6912, 2.0782, 1.8427, 3.0513, 2.6092, 3.3358, 1.6415], + device='cuda:0'), covar=tensor([0.2625, 0.4499, 0.2781, 0.2023, 0.1704, 0.2215, 0.1714, 0.4630], + device='cuda:0'), in_proj_covar=tensor([0.0546, 0.0665, 0.0741, 0.0500, 0.0629, 0.0539, 0.0665, 0.0568], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:06:03,179 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.676e+02 6.063e+02 7.972e+02 1.969e+03, threshold=1.213e+03, percent-clipped=7.0 +2023-04-03 07:06:53,367 INFO [train.py:903] (0/4) Epoch 26, batch 3600, loss[loss=0.189, simple_loss=0.2738, pruned_loss=0.05207, over 19691.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.062, over 3818286.41 frames. ], batch size: 59, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:54,846 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:05,339 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:54,083 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:57,121 INFO [train.py:903] (0/4) Epoch 26, batch 3650, loss[loss=0.2262, simple_loss=0.3099, pruned_loss=0.07122, over 19537.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2857, pruned_loss=0.06241, over 3820450.38 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:08:12,055 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.064e+02 6.896e+02 8.623e+02 2.807e+03, threshold=1.379e+03, percent-clipped=9.0 +2023-04-03 07:08:26,843 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:32,673 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:35,128 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:48,509 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-03 07:09:02,443 INFO [train.py:903] (0/4) Epoch 26, batch 3700, loss[loss=0.1761, simple_loss=0.2469, pruned_loss=0.05266, over 19146.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2854, pruned_loss=0.06191, over 3812969.61 frames. ], batch size: 42, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:09:10,643 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:09:31,742 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9154, 1.2035, 1.4892, 0.6008, 1.9216, 2.4344, 2.1429, 2.5673], + device='cuda:0'), covar=tensor([0.1709, 0.4131, 0.3669, 0.2992, 0.0687, 0.0291, 0.0345, 0.0405], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0328, 0.0361, 0.0269, 0.0251, 0.0193, 0.0218, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 07:09:34,226 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3200, 2.1780, 2.0442, 1.9296, 1.6857, 1.9014, 0.5937, 1.3226], + device='cuda:0'), covar=tensor([0.0684, 0.0661, 0.0530, 0.0882, 0.1220, 0.0914, 0.1462, 0.1121], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0361, 0.0366, 0.0390, 0.0467, 0.0396, 0.0345, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:10:07,613 INFO [train.py:903] (0/4) Epoch 26, batch 3750, loss[loss=0.23, simple_loss=0.3118, pruned_loss=0.07415, over 17500.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06139, over 3815068.11 frames. ], batch size: 101, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:10:13,056 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 07:10:20,544 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.722e+02 4.995e+02 5.757e+02 7.069e+02 1.518e+03, threshold=1.151e+03, percent-clipped=1.0 +2023-04-03 07:11:01,675 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:04,057 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:05,280 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5094, 2.2366, 1.6613, 1.5543, 2.0573, 1.3766, 1.4359, 1.9391], + device='cuda:0'), covar=tensor([0.1087, 0.0755, 0.1108, 0.0864, 0.0578, 0.1280, 0.0737, 0.0496], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0319, 0.0338, 0.0271, 0.0248, 0.0343, 0.0293, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:11:10,966 INFO [train.py:903] (0/4) Epoch 26, batch 3800, loss[loss=0.1755, simple_loss=0.2629, pruned_loss=0.04406, over 19579.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2835, pruned_loss=0.06064, over 3827580.23 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:11:12,328 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:42,960 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 07:12:14,070 INFO [train.py:903] (0/4) Epoch 26, batch 3850, loss[loss=0.2376, simple_loss=0.3152, pruned_loss=0.08004, over 19772.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.06162, over 3816033.90 frames. ], batch size: 54, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:12:27,533 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 5.118e+02 6.382e+02 8.415e+02 1.605e+03, threshold=1.276e+03, percent-clipped=5.0 +2023-04-03 07:13:15,204 INFO [train.py:903] (0/4) Epoch 26, batch 3900, loss[loss=0.1805, simple_loss=0.2593, pruned_loss=0.05084, over 19790.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06173, over 3816036.37 frames. ], batch size: 48, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:13:16,175 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 07:13:36,329 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:13,851 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:20,758 INFO [train.py:903] (0/4) Epoch 26, batch 3950, loss[loss=0.1952, simple_loss=0.2705, pruned_loss=0.05993, over 15235.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2855, pruned_loss=0.06205, over 3816297.37 frames. ], batch size: 33, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:14:24,350 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 07:14:24,476 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:34,025 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.927e+02 6.085e+02 7.650e+02 1.385e+03, threshold=1.217e+03, percent-clipped=3.0 +2023-04-03 07:15:17,656 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8714, 4.5153, 3.3891, 4.0188, 2.1830, 4.4098, 4.3602, 4.4680], + device='cuda:0'), covar=tensor([0.0446, 0.0854, 0.1618, 0.0732, 0.2635, 0.0613, 0.0811, 0.1057], + device='cuda:0'), in_proj_covar=tensor([0.0523, 0.0423, 0.0509, 0.0356, 0.0410, 0.0449, 0.0446, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:15:24,372 INFO [train.py:903] (0/4) Epoch 26, batch 4000, loss[loss=0.2413, simple_loss=0.3121, pruned_loss=0.08524, over 13267.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06157, over 3805813.38 frames. ], batch size: 135, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:15:53,888 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:13,013 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 07:16:24,944 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:26,987 INFO [train.py:903] (0/4) Epoch 26, batch 4050, loss[loss=0.219, simple_loss=0.2849, pruned_loss=0.07656, over 19726.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.06123, over 3809202.12 frames. ], batch size: 51, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:16:27,145 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:27,466 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:35,717 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1760, 1.8089, 1.4586, 1.2611, 1.6505, 1.2411, 1.1675, 1.6185], + device='cuda:0'), covar=tensor([0.0837, 0.0789, 0.1107, 0.0807, 0.0548, 0.1245, 0.0661, 0.0468], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0320, 0.0339, 0.0272, 0.0250, 0.0346, 0.0293, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:16:40,296 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:41,108 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 4.947e+02 6.251e+02 7.600e+02 1.203e+03, threshold=1.250e+03, percent-clipped=0.0 +2023-04-03 07:16:52,058 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:59,242 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174773.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:01,654 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174775.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:32,202 INFO [train.py:903] (0/4) Epoch 26, batch 4100, loss[loss=0.2367, simple_loss=0.3254, pruned_loss=0.074, over 19283.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06106, over 3817993.73 frames. ], batch size: 66, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:18:07,632 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 07:18:22,237 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6526, 4.2560, 2.7774, 3.7517, 0.9555, 4.2354, 4.0896, 4.1584], + device='cuda:0'), covar=tensor([0.0611, 0.1070, 0.1848, 0.0903, 0.4050, 0.0669, 0.0931, 0.1065], + device='cuda:0'), in_proj_covar=tensor([0.0521, 0.0421, 0.0506, 0.0354, 0.0409, 0.0447, 0.0444, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:18:38,181 INFO [train.py:903] (0/4) Epoch 26, batch 4150, loss[loss=0.1992, simple_loss=0.2877, pruned_loss=0.05536, over 19611.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2838, pruned_loss=0.06081, over 3813364.54 frames. ], batch size: 57, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:18:53,312 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.046e+02 6.484e+02 8.542e+02 2.236e+03, threshold=1.297e+03, percent-clipped=8.0 +2023-04-03 07:18:57,232 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:05,608 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:11,134 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3615, 3.1281, 2.4016, 2.5156, 2.2596, 2.8023, 0.9227, 2.2630], + device='cuda:0'), covar=tensor([0.0738, 0.0606, 0.0753, 0.1152, 0.1109, 0.1075, 0.1628, 0.1024], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0362, 0.0366, 0.0389, 0.0469, 0.0396, 0.0345, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:19:38,475 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:41,597 INFO [train.py:903] (0/4) Epoch 26, batch 4200, loss[loss=0.1875, simple_loss=0.2643, pruned_loss=0.0554, over 19634.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2851, pruned_loss=0.06161, over 3803478.52 frames. ], batch size: 50, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:19:42,845 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 07:20:14,702 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1779, 1.8081, 1.4419, 1.2689, 1.6062, 1.2743, 1.1752, 1.6798], + device='cuda:0'), covar=tensor([0.0851, 0.0922, 0.1197, 0.0845, 0.0611, 0.1347, 0.0667, 0.0459], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0320, 0.0339, 0.0272, 0.0250, 0.0345, 0.0293, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:20:36,867 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6801, 1.6022, 1.6621, 2.3362, 1.6166, 2.0750, 2.0667, 1.7801], + device='cuda:0'), covar=tensor([0.0863, 0.0917, 0.0971, 0.0661, 0.0914, 0.0733, 0.0829, 0.0695], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 07:20:44,728 INFO [train.py:903] (0/4) Epoch 26, batch 4250, loss[loss=0.2257, simple_loss=0.3164, pruned_loss=0.06749, over 18323.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.0618, over 3790923.69 frames. ], batch size: 83, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:20:55,179 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 07:21:01,855 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.804e+02 5.687e+02 7.207e+02 1.586e+03, threshold=1.137e+03, percent-clipped=5.0 +2023-04-03 07:21:07,841 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 07:21:16,332 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 07:21:30,496 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6599, 1.5221, 1.5866, 2.0190, 1.5006, 1.8602, 1.8702, 1.7184], + device='cuda:0'), covar=tensor([0.0877, 0.1000, 0.1029, 0.0704, 0.0893, 0.0819, 0.0881, 0.0716], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0213, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 07:21:49,596 INFO [train.py:903] (0/4) Epoch 26, batch 4300, loss[loss=0.1935, simple_loss=0.2854, pruned_loss=0.05083, over 19538.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2856, pruned_loss=0.06224, over 3800340.88 frames. ], batch size: 54, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:22:11,532 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:20,970 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:38,902 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 07:22:41,547 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:52,832 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:53,613 INFO [train.py:903] (0/4) Epoch 26, batch 4350, loss[loss=0.1951, simple_loss=0.2842, pruned_loss=0.053, over 19760.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2851, pruned_loss=0.06197, over 3805883.82 frames. ], batch size: 63, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:23:08,529 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 4.980e+02 6.586e+02 7.974e+02 2.340e+03, threshold=1.317e+03, percent-clipped=8.0 +2023-04-03 07:23:14,390 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:23:33,070 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9538, 1.8089, 1.6676, 1.9593, 1.6445, 1.7306, 1.5906, 1.8949], + device='cuda:0'), covar=tensor([0.1100, 0.1555, 0.1570, 0.1032, 0.1593, 0.0584, 0.1621, 0.0824], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0355, 0.0314, 0.0254, 0.0304, 0.0255, 0.0316, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:23:56,993 INFO [train.py:903] (0/4) Epoch 26, batch 4400, loss[loss=0.1883, simple_loss=0.2683, pruned_loss=0.05416, over 19424.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2849, pruned_loss=0.06192, over 3811277.11 frames. ], batch size: 48, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:24:17,222 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 07:24:23,152 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:26,283 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 07:24:41,395 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9823, 3.6271, 2.6489, 3.2287, 1.0784, 3.6189, 3.4956, 3.5239], + device='cuda:0'), covar=tensor([0.0896, 0.1134, 0.1908, 0.0972, 0.3904, 0.0816, 0.1053, 0.1352], + device='cuda:0'), in_proj_covar=tensor([0.0524, 0.0423, 0.0509, 0.0356, 0.0409, 0.0449, 0.0447, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:24:55,276 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:59,549 INFO [train.py:903] (0/4) Epoch 26, batch 4450, loss[loss=0.2371, simple_loss=0.3115, pruned_loss=0.08135, over 17018.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2839, pruned_loss=0.06158, over 3830162.49 frames. ], batch size: 101, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:25:08,185 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:25:14,965 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 4.830e+02 6.413e+02 8.619e+02 2.132e+03, threshold=1.283e+03, percent-clipped=8.0 +2023-04-03 07:25:41,728 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175182.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:26:02,897 INFO [train.py:903] (0/4) Epoch 26, batch 4500, loss[loss=0.2003, simple_loss=0.2837, pruned_loss=0.05847, over 19673.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2843, pruned_loss=0.06099, over 3834526.44 frames. ], batch size: 53, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:26:06,553 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1567, 3.3953, 1.9249, 2.0908, 2.9987, 1.7415, 1.6464, 2.2980], + device='cuda:0'), covar=tensor([0.1336, 0.0678, 0.1214, 0.0949, 0.0588, 0.1363, 0.1022, 0.0677], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0319, 0.0338, 0.0271, 0.0249, 0.0344, 0.0292, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:26:36,000 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:26:38,394 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9700, 1.7419, 1.9176, 2.7146, 1.8188, 2.1193, 2.3468, 1.9806], + device='cuda:0'), covar=tensor([0.0893, 0.0989, 0.1011, 0.0785, 0.0993, 0.0819, 0.0912, 0.0712], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0225, 0.0229, 0.0242, 0.0227, 0.0214, 0.0190, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 07:27:08,209 INFO [train.py:903] (0/4) Epoch 26, batch 4550, loss[loss=0.194, simple_loss=0.2746, pruned_loss=0.0567, over 19745.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06101, over 3830484.06 frames. ], batch size: 47, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:27:15,063 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 07:27:23,517 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.834e+02 6.204e+02 7.660e+02 2.009e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-03 07:27:28,773 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3316, 1.3857, 1.5613, 1.5037, 1.6974, 1.8429, 1.7395, 0.5519], + device='cuda:0'), covar=tensor([0.2552, 0.4349, 0.2699, 0.2016, 0.1710, 0.2372, 0.1519, 0.5035], + device='cuda:0'), in_proj_covar=tensor([0.0545, 0.0661, 0.0738, 0.0500, 0.0626, 0.0540, 0.0662, 0.0564], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:27:40,231 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 07:28:13,298 INFO [train.py:903] (0/4) Epoch 26, batch 4600, loss[loss=0.1699, simple_loss=0.2518, pruned_loss=0.04399, over 19770.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.0616, over 3837263.58 frames. ], batch size: 48, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:16,038 INFO [train.py:903] (0/4) Epoch 26, batch 4650, loss[loss=0.1539, simple_loss=0.2339, pruned_loss=0.03699, over 18161.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06071, over 3835099.12 frames. ], batch size: 40, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:29,900 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 4.879e+02 5.869e+02 7.462e+02 1.692e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 07:29:31,128 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 07:29:44,485 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 07:30:00,505 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5025, 1.0086, 1.2328, 1.1790, 1.9903, 1.1040, 2.0862, 2.3391], + device='cuda:0'), covar=tensor([0.0975, 0.3660, 0.3487, 0.2120, 0.1437, 0.2457, 0.1253, 0.0705], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0372, 0.0392, 0.0350, 0.0380, 0.0355, 0.0391, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:30:03,994 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:17,713 INFO [train.py:903] (0/4) Epoch 26, batch 4700, loss[loss=0.2, simple_loss=0.2679, pruned_loss=0.06601, over 19022.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2836, pruned_loss=0.06091, over 3836226.39 frames. ], batch size: 42, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:30:35,684 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:36,042 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-03 07:30:40,250 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0825, 1.9324, 1.7870, 1.6886, 1.4814, 1.6482, 0.4319, 1.0484], + device='cuda:0'), covar=tensor([0.0664, 0.0671, 0.0560, 0.0911, 0.1300, 0.0960, 0.1472, 0.1138], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0362, 0.0367, 0.0389, 0.0468, 0.0395, 0.0344, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:30:41,026 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 07:30:43,655 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8309, 1.5961, 1.6543, 2.3622, 1.7284, 2.0489, 2.1450, 1.8322], + device='cuda:0'), covar=tensor([0.0836, 0.0958, 0.1009, 0.0715, 0.0870, 0.0735, 0.0835, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0240, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 07:31:06,994 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:31:18,754 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8671, 1.5814, 1.4677, 1.7778, 1.5378, 1.6009, 1.4462, 1.7151], + device='cuda:0'), covar=tensor([0.1165, 0.1370, 0.1673, 0.1070, 0.1328, 0.0608, 0.1598, 0.0843], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0356, 0.0315, 0.0255, 0.0303, 0.0255, 0.0317, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:31:21,516 INFO [train.py:903] (0/4) Epoch 26, batch 4750, loss[loss=0.1782, simple_loss=0.271, pruned_loss=0.04273, over 19659.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2846, pruned_loss=0.06112, over 3836436.83 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:31:37,185 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.945e+02 6.030e+02 7.655e+02 2.128e+03, threshold=1.206e+03, percent-clipped=6.0 +2023-04-03 07:31:38,753 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175463.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:22,896 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:23,792 INFO [train.py:903] (0/4) Epoch 26, batch 4800, loss[loss=0.167, simple_loss=0.2547, pruned_loss=0.03968, over 19490.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2841, pruned_loss=0.06079, over 3842159.02 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:32:25,130 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:29,430 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-03 07:32:43,081 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1892, 1.2266, 1.6812, 1.2739, 2.6342, 3.5211, 3.2933, 3.8143], + device='cuda:0'), covar=tensor([0.1684, 0.4188, 0.3528, 0.2659, 0.0620, 0.0207, 0.0218, 0.0277], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0329, 0.0363, 0.0270, 0.0253, 0.0195, 0.0219, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 07:33:28,507 INFO [train.py:903] (0/4) Epoch 26, batch 4850, loss[loss=0.1829, simple_loss=0.2768, pruned_loss=0.04449, over 17340.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06068, over 3834038.47 frames. ], batch size: 101, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:33:42,548 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.553e+02 5.462e+02 6.461e+02 1.537e+03, threshold=1.092e+03, percent-clipped=2.0 +2023-04-03 07:33:49,690 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 07:33:51,026 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:34:12,016 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 07:34:17,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 07:34:17,978 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 07:34:19,653 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5221, 1.5902, 1.8111, 1.7439, 2.4851, 2.1980, 2.5749, 1.1118], + device='cuda:0'), covar=tensor([0.2499, 0.4310, 0.2696, 0.2015, 0.1599, 0.2296, 0.1497, 0.4712], + device='cuda:0'), in_proj_covar=tensor([0.0547, 0.0662, 0.0741, 0.0503, 0.0629, 0.0542, 0.0664, 0.0565], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:34:27,155 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 07:34:30,720 INFO [train.py:903] (0/4) Epoch 26, batch 4900, loss[loss=0.2402, simple_loss=0.3164, pruned_loss=0.08202, over 19483.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2838, pruned_loss=0.06084, over 3835874.28 frames. ], batch size: 64, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:34:46,804 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 07:34:50,742 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:35:31,954 INFO [train.py:903] (0/4) Epoch 26, batch 4950, loss[loss=0.1921, simple_loss=0.2764, pruned_loss=0.0539, over 19667.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06161, over 3820656.04 frames. ], batch size: 58, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:35:49,941 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.878e+02 6.108e+02 7.489e+02 1.803e+03, threshold=1.222e+03, percent-clipped=10.0 +2023-04-03 07:35:49,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 07:36:13,146 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 07:36:15,546 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175684.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:36:36,321 INFO [train.py:903] (0/4) Epoch 26, batch 5000, loss[loss=0.1811, simple_loss=0.2594, pruned_loss=0.05142, over 19390.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2843, pruned_loss=0.06098, over 3828294.68 frames. ], batch size: 48, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:36:46,075 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 07:36:56,352 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 07:37:10,898 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175728.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:16,377 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175732.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:33,259 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-04-03 07:37:39,576 INFO [train.py:903] (0/4) Epoch 26, batch 5050, loss[loss=0.1895, simple_loss=0.2676, pruned_loss=0.05565, over 19414.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.285, pruned_loss=0.06123, over 3820452.60 frames. ], batch size: 48, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:37:46,789 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:53,681 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.916e+02 5.717e+02 6.994e+02 1.273e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-04-03 07:38:14,304 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 07:38:37,155 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 07:38:42,564 INFO [train.py:903] (0/4) Epoch 26, batch 5100, loss[loss=0.2191, simple_loss=0.3024, pruned_loss=0.06789, over 19664.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2843, pruned_loss=0.06091, over 3822764.67 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:38:44,171 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1311, 2.1999, 2.3419, 2.6062, 2.1982, 2.5689, 2.3942, 2.2769], + device='cuda:0'), covar=tensor([0.3311, 0.2990, 0.1623, 0.1916, 0.3146, 0.1727, 0.3560, 0.2585], + device='cuda:0'), in_proj_covar=tensor([0.0928, 0.1004, 0.0737, 0.0949, 0.0907, 0.0843, 0.0857, 0.0803], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 07:38:49,488 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 07:38:49,788 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2013, 1.8564, 1.7462, 2.1363, 1.7930, 1.7970, 1.6948, 2.0342], + device='cuda:0'), covar=tensor([0.0991, 0.1432, 0.1573, 0.0967, 0.1393, 0.0586, 0.1528, 0.0744], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0360, 0.0318, 0.0257, 0.0307, 0.0259, 0.0321, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:38:52,906 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 07:38:57,449 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 07:39:37,086 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:40,234 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-03 07:39:41,974 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175847.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:45,160 INFO [train.py:903] (0/4) Epoch 26, batch 5150, loss[loss=0.1673, simple_loss=0.2483, pruned_loss=0.04317, over 19751.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06111, over 3802305.82 frames. ], batch size: 45, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:39:55,536 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 07:39:58,799 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-03 07:40:01,368 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.003e+02 6.332e+02 8.398e+02 1.509e+03, threshold=1.266e+03, percent-clipped=8.0 +2023-04-03 07:40:14,035 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:15,238 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:30,102 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 07:40:47,054 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:49,293 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:50,195 INFO [train.py:903] (0/4) Epoch 26, batch 5200, loss[loss=0.1827, simple_loss=0.271, pruned_loss=0.04717, over 19666.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.284, pruned_loss=0.06106, over 3802863.34 frames. ], batch size: 60, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:41:02,280 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 07:41:12,232 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8642, 1.8322, 1.6846, 1.6342, 1.4718, 1.5702, 0.5216, 1.0338], + device='cuda:0'), covar=tensor([0.0576, 0.0569, 0.0416, 0.0569, 0.1012, 0.0778, 0.1247, 0.0972], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0361, 0.0366, 0.0389, 0.0469, 0.0396, 0.0344, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 07:41:41,503 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:41:45,617 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 07:41:53,690 INFO [train.py:903] (0/4) Epoch 26, batch 5250, loss[loss=0.1696, simple_loss=0.2484, pruned_loss=0.04544, over 19756.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06144, over 3806650.93 frames. ], batch size: 45, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:42:03,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:07,499 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.675e+02 5.849e+02 7.641e+02 1.436e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 07:42:08,948 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:11,236 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175965.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:55,056 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-176000.pt +2023-04-03 07:42:56,005 INFO [train.py:903] (0/4) Epoch 26, batch 5300, loss[loss=0.1836, simple_loss=0.272, pruned_loss=0.04764, over 19771.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2852, pruned_loss=0.06226, over 3789385.95 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:43:08,844 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 07:43:57,535 INFO [train.py:903] (0/4) Epoch 26, batch 5350, loss[loss=0.1919, simple_loss=0.2775, pruned_loss=0.05311, over 19648.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.285, pruned_loss=0.06225, over 3803557.23 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:44:14,894 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.809e+02 5.990e+02 7.353e+02 1.688e+03, threshold=1.198e+03, percent-clipped=9.0 +2023-04-03 07:44:27,819 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176072.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:44:30,103 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 07:45:03,466 INFO [train.py:903] (0/4) Epoch 26, batch 5400, loss[loss=0.2327, simple_loss=0.3246, pruned_loss=0.07045, over 19662.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.0616, over 3815463.86 frames. ], batch size: 58, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:45:07,635 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:36,745 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:37,874 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:08,008 INFO [train.py:903] (0/4) Epoch 26, batch 5450, loss[loss=0.2245, simple_loss=0.304, pruned_loss=0.0725, over 18232.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2842, pruned_loss=0.06151, over 3820420.57 frames. ], batch size: 83, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:46:10,710 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:23,251 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.815e+02 4.882e+02 5.855e+02 6.912e+02 1.680e+03, threshold=1.171e+03, percent-clipped=1.0 +2023-04-03 07:46:55,088 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:11,437 INFO [train.py:903] (0/4) Epoch 26, batch 5500, loss[loss=0.2104, simple_loss=0.2901, pruned_loss=0.06537, over 19675.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2835, pruned_loss=0.06121, over 3815030.37 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:47:28,806 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:30,665 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 07:47:48,427 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6494, 1.2249, 1.2734, 1.4848, 1.0839, 1.4169, 1.2774, 1.4494], + device='cuda:0'), covar=tensor([0.1175, 0.1279, 0.1753, 0.1117, 0.1469, 0.0673, 0.1587, 0.0907], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0355, 0.0315, 0.0255, 0.0304, 0.0255, 0.0317, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 07:48:02,343 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:06,791 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:14,675 INFO [train.py:903] (0/4) Epoch 26, batch 5550, loss[loss=0.2409, simple_loss=0.3156, pruned_loss=0.08312, over 19372.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2845, pruned_loss=0.06159, over 3802227.28 frames. ], batch size: 70, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:48:17,146 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 07:48:29,034 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:32,240 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.802e+02 5.930e+02 7.241e+02 1.738e+03, threshold=1.186e+03, percent-clipped=4.0 +2023-04-03 07:48:37,816 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 07:48:47,655 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:08,002 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 07:49:18,895 INFO [train.py:903] (0/4) Epoch 26, batch 5600, loss[loss=0.1955, simple_loss=0.28, pruned_loss=0.0555, over 19622.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2848, pruned_loss=0.0614, over 3808838.96 frames. ], batch size: 57, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:49:28,225 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176307.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:48,578 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-03 07:50:23,079 INFO [train.py:903] (0/4) Epoch 26, batch 5650, loss[loss=0.1555, simple_loss=0.2405, pruned_loss=0.03521, over 19330.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2851, pruned_loss=0.06158, over 3814912.22 frames. ], batch size: 44, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:50:26,193 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 07:50:32,680 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:50:39,208 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.381e+02 5.619e+02 7.137e+02 2.187e+03, threshold=1.124e+03, percent-clipped=4.0 +2023-04-03 07:51:02,586 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 07:51:24,935 INFO [train.py:903] (0/4) Epoch 26, batch 5700, loss[loss=0.1675, simple_loss=0.2485, pruned_loss=0.04318, over 19400.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2838, pruned_loss=0.06088, over 3824659.12 frames. ], batch size: 48, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:51:52,435 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:19,141 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:22,346 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 07:52:26,806 INFO [train.py:903] (0/4) Epoch 26, batch 5750, loss[loss=0.1804, simple_loss=0.264, pruned_loss=0.0484, over 19845.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06037, over 3839557.57 frames. ], batch size: 52, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:52:30,358 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 07:52:33,945 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 07:52:44,006 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.352e+02 5.112e+02 6.171e+02 7.862e+02 1.795e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 07:52:49,987 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:09,500 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:28,519 INFO [train.py:903] (0/4) Epoch 26, batch 5800, loss[loss=0.2101, simple_loss=0.3036, pruned_loss=0.05828, over 19775.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06082, over 3832072.29 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:32,079 INFO [train.py:903] (0/4) Epoch 26, batch 5850, loss[loss=0.1941, simple_loss=0.2665, pruned_loss=0.06089, over 19755.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2834, pruned_loss=0.06098, over 3826435.57 frames. ], batch size: 46, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:48,251 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.827e+02 6.114e+02 8.553e+02 2.097e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 07:55:30,012 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 07:55:33,225 INFO [train.py:903] (0/4) Epoch 26, batch 5900, loss[loss=0.2106, simple_loss=0.2944, pruned_loss=0.06343, over 19662.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2849, pruned_loss=0.06166, over 3832834.76 frames. ], batch size: 60, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:55:37,889 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:50,029 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:51,762 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 07:55:55,443 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:22,737 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:27,347 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:35,014 INFO [train.py:903] (0/4) Epoch 26, batch 5950, loss[loss=0.1718, simple_loss=0.2559, pruned_loss=0.04384, over 19492.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2857, pruned_loss=0.06203, over 3842277.24 frames. ], batch size: 49, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:56:51,412 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.421e+02 5.079e+02 6.315e+02 7.489e+02 1.732e+03, threshold=1.263e+03, percent-clipped=4.0 +2023-04-03 07:57:12,940 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:18,394 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:38,452 INFO [train.py:903] (0/4) Epoch 26, batch 6000, loss[loss=0.2091, simple_loss=0.2967, pruned_loss=0.06072, over 19563.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2853, pruned_loss=0.06157, over 3843936.32 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:57:38,453 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 07:57:51,361 INFO [train.py:937] (0/4) Epoch 26, validation: loss=0.1675, simple_loss=0.2672, pruned_loss=0.03393, over 944034.00 frames. +2023-04-03 07:57:51,362 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 07:57:55,493 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176703.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:16,475 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176719.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:36,003 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:54,246 INFO [train.py:903] (0/4) Epoch 26, batch 6050, loss[loss=0.2094, simple_loss=0.2933, pruned_loss=0.06273, over 19786.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2863, pruned_loss=0.06184, over 3831507.07 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:58:58,144 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0424, 1.3768, 1.7467, 0.9034, 2.3954, 3.0979, 2.8340, 3.2890], + device='cuda:0'), covar=tensor([0.1626, 0.3651, 0.3173, 0.2652, 0.0590, 0.0232, 0.0257, 0.0340], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0327, 0.0361, 0.0269, 0.0253, 0.0193, 0.0218, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 07:59:11,767 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 5.094e+02 6.239e+02 7.368e+02 1.384e+03, threshold=1.248e+03, percent-clipped=1.0 +2023-04-03 07:59:58,067 INFO [train.py:903] (0/4) Epoch 26, batch 6100, loss[loss=0.2349, simple_loss=0.3065, pruned_loss=0.08167, over 13685.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06217, over 3815580.54 frames. ], batch size: 135, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:00:17,907 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8342, 4.4358, 2.8935, 3.8810, 1.1056, 4.3639, 4.2717, 4.3808], + device='cuda:0'), covar=tensor([0.0546, 0.0874, 0.1839, 0.0869, 0.3951, 0.0653, 0.0925, 0.1066], + device='cuda:0'), in_proj_covar=tensor([0.0525, 0.0423, 0.0511, 0.0357, 0.0410, 0.0450, 0.0446, 0.0475], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 08:00:33,088 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:00:43,833 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 08:01:00,302 INFO [train.py:903] (0/4) Epoch 26, batch 6150, loss[loss=0.1862, simple_loss=0.2575, pruned_loss=0.0574, over 19777.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2847, pruned_loss=0.0618, over 3821545.30 frames. ], batch size: 45, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:01:18,091 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 4.899e+02 5.851e+02 7.446e+02 2.190e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 08:01:21,509 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 08:01:25,331 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:01:25,547 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4401, 2.4978, 2.7418, 3.2060, 2.4571, 3.1651, 2.7173, 2.5327], + device='cuda:0'), covar=tensor([0.4139, 0.4136, 0.1791, 0.2497, 0.4442, 0.2045, 0.4567, 0.3190], + device='cuda:0'), in_proj_covar=tensor([0.0929, 0.1007, 0.0739, 0.0952, 0.0908, 0.0845, 0.0860, 0.0804], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 08:02:01,461 INFO [train.py:903] (0/4) Epoch 26, batch 6200, loss[loss=0.2038, simple_loss=0.2852, pruned_loss=0.06126, over 18804.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06121, over 3832709.14 frames. ], batch size: 74, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:02:13,190 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5497, 1.4445, 1.4768, 1.9337, 1.4748, 1.7229, 1.8166, 1.6222], + device='cuda:0'), covar=tensor([0.0913, 0.0968, 0.1009, 0.0715, 0.0903, 0.0811, 0.0886, 0.0734], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0226, 0.0229, 0.0241, 0.0227, 0.0213, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 08:02:54,365 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:02,384 INFO [train.py:903] (0/4) Epoch 26, batch 6250, loss[loss=0.202, simple_loss=0.2922, pruned_loss=0.05596, over 19800.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.06136, over 3827015.78 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:03:20,788 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.905e+02 4.899e+02 6.025e+02 7.517e+02 2.005e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 08:03:29,817 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 08:03:33,681 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:47,527 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:51,227 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:52,865 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-03 08:04:03,474 INFO [train.py:903] (0/4) Epoch 26, batch 6300, loss[loss=0.2234, simple_loss=0.3041, pruned_loss=0.07132, over 19517.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2855, pruned_loss=0.06163, over 3833080.43 frames. ], batch size: 64, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:04:03,951 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:23,637 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177015.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:37,551 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177027.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:05:06,334 INFO [train.py:903] (0/4) Epoch 26, batch 6350, loss[loss=0.1967, simple_loss=0.2657, pruned_loss=0.0639, over 19768.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06135, over 3839077.62 frames. ], batch size: 45, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:05:26,137 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.448e+02 5.061e+02 6.104e+02 7.524e+02 1.291e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 08:06:11,980 INFO [train.py:903] (0/4) Epoch 26, batch 6400, loss[loss=0.1892, simple_loss=0.273, pruned_loss=0.05266, over 19758.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.06171, over 3836314.93 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:06:14,694 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:19,569 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 08:06:45,640 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:48,189 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:05,538 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:14,947 INFO [train.py:903] (0/4) Epoch 26, batch 6450, loss[loss=0.188, simple_loss=0.2771, pruned_loss=0.04941, over 19764.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.0612, over 3826302.74 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:07:32,375 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 08:07:33,750 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 4.651e+02 5.846e+02 7.696e+02 2.286e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-03 08:07:56,667 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 08:08:15,638 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177199.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:16,457 INFO [train.py:903] (0/4) Epoch 26, batch 6500, loss[loss=0.1708, simple_loss=0.2487, pruned_loss=0.04643, over 19735.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2851, pruned_loss=0.06156, over 3832194.71 frames. ], batch size: 45, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:08:17,675 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 08:08:38,036 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:49,246 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:09:20,909 INFO [train.py:903] (0/4) Epoch 26, batch 6550, loss[loss=0.1498, simple_loss=0.2354, pruned_loss=0.03215, over 19805.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06122, over 3820294.58 frames. ], batch size: 48, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:09:28,550 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6190, 1.4424, 1.4866, 2.1525, 1.6573, 1.9470, 1.9686, 1.6827], + device='cuda:0'), covar=tensor([0.0872, 0.0978, 0.1029, 0.0745, 0.0849, 0.0753, 0.0844, 0.0724], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0224, 0.0227, 0.0239, 0.0225, 0.0212, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 08:09:39,906 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.796e+02 6.270e+02 7.966e+02 1.683e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 08:10:25,212 INFO [train.py:903] (0/4) Epoch 26, batch 6600, loss[loss=0.2417, simple_loss=0.3176, pruned_loss=0.08288, over 17352.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06107, over 3822620.18 frames. ], batch size: 101, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:11:02,497 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:27,598 INFO [train.py:903] (0/4) Epoch 26, batch 6650, loss[loss=0.2153, simple_loss=0.303, pruned_loss=0.0638, over 18261.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06127, over 3818553.99 frames. ], batch size: 84, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:11:37,270 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:47,337 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.339e+02 5.694e+02 7.782e+02 1.307e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 08:12:11,036 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:28,526 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:30,607 INFO [train.py:903] (0/4) Epoch 26, batch 6700, loss[loss=0.1894, simple_loss=0.2722, pruned_loss=0.05327, over 19607.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2842, pruned_loss=0.06097, over 3819067.52 frames. ], batch size: 57, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:12:49,321 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5428, 1.5812, 1.7721, 1.7693, 2.4456, 2.2422, 2.5855, 1.0061], + device='cuda:0'), covar=tensor([0.2571, 0.4488, 0.2977, 0.2012, 0.1652, 0.2297, 0.1558, 0.4995], + device='cuda:0'), in_proj_covar=tensor([0.0549, 0.0665, 0.0743, 0.0502, 0.0630, 0.0543, 0.0664, 0.0567], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:13:01,352 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177423.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:02,833 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 08:13:31,556 INFO [train.py:903] (0/4) Epoch 26, batch 6750, loss[loss=0.1722, simple_loss=0.2494, pruned_loss=0.0475, over 19354.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2837, pruned_loss=0.06102, over 3819331.14 frames. ], batch size: 47, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:13:48,516 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.721e+02 5.880e+02 7.244e+02 1.873e+03, threshold=1.176e+03, percent-clipped=5.0 +2023-04-03 08:13:55,574 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177471.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:57,752 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177473.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:14:07,406 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 08:14:28,156 INFO [train.py:903] (0/4) Epoch 26, batch 6800, loss[loss=0.1883, simple_loss=0.2597, pruned_loss=0.05842, over 19720.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06077, over 3831496.39 frames. ], batch size: 46, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:14:58,603 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-26.pt +2023-04-03 08:15:14,315 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 08:15:15,395 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 08:15:18,385 INFO [train.py:903] (0/4) Epoch 27, batch 0, loss[loss=0.2081, simple_loss=0.2867, pruned_loss=0.06473, over 19453.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2867, pruned_loss=0.06473, over 19453.00 frames. ], batch size: 49, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:15:18,385 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 08:15:30,268 INFO [train.py:937] (0/4) Epoch 27, validation: loss=0.1666, simple_loss=0.2668, pruned_loss=0.03317, over 944034.00 frames. +2023-04-03 08:15:30,269 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 08:15:35,641 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2230, 1.3192, 1.2570, 1.0831, 1.1312, 1.1047, 0.1241, 0.3957], + device='cuda:0'), covar=tensor([0.0734, 0.0751, 0.0473, 0.0625, 0.1386, 0.0716, 0.1481, 0.1201], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0360, 0.0364, 0.0387, 0.0468, 0.0392, 0.0342, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:15:42,910 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 08:16:15,181 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.975e+02 6.244e+02 7.696e+02 2.158e+03, threshold=1.249e+03, percent-clipped=8.0 +2023-04-03 08:16:24,582 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177571.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:33,682 INFO [train.py:903] (0/4) Epoch 27, batch 50, loss[loss=0.2211, simple_loss=0.2999, pruned_loss=0.07116, over 19658.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2831, pruned_loss=0.06013, over 867336.33 frames. ], batch size: 58, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:16:43,125 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:43,179 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:46,393 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:06,248 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 08:17:15,809 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:35,977 INFO [train.py:903] (0/4) Epoch 27, batch 100, loss[loss=0.207, simple_loss=0.3009, pruned_loss=0.05651, over 19671.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2839, pruned_loss=0.06066, over 1532178.45 frames. ], batch size: 55, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:17:47,465 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 08:17:47,819 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2087, 1.2601, 1.6325, 1.2066, 2.6892, 3.6377, 3.3872, 3.8753], + device='cuda:0'), covar=tensor([0.1664, 0.3921, 0.3522, 0.2681, 0.0664, 0.0218, 0.0221, 0.0278], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0328, 0.0361, 0.0269, 0.0251, 0.0194, 0.0218, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 08:18:23,247 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.210e+02 6.703e+02 8.227e+02 2.617e+03, threshold=1.341e+03, percent-clipped=11.0 +2023-04-03 08:18:39,594 INFO [train.py:903] (0/4) Epoch 27, batch 150, loss[loss=0.1667, simple_loss=0.2396, pruned_loss=0.04688, over 19748.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.286, pruned_loss=0.06132, over 2046877.57 frames. ], batch size: 47, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:18:44,576 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:19:40,066 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 08:19:42,203 INFO [train.py:903] (0/4) Epoch 27, batch 200, loss[loss=0.2283, simple_loss=0.3094, pruned_loss=0.07361, over 19442.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06138, over 2429729.64 frames. ], batch size: 70, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:20:29,446 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 4.383e+02 5.368e+02 7.234e+02 1.640e+03, threshold=1.074e+03, percent-clipped=1.0 +2023-04-03 08:20:46,585 INFO [train.py:903] (0/4) Epoch 27, batch 250, loss[loss=0.2421, simple_loss=0.325, pruned_loss=0.07961, over 19786.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.06101, over 2748567.35 frames. ], batch size: 56, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:21:12,211 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:32,136 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:43,032 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.39 vs. limit=5.0 +2023-04-03 08:21:50,875 INFO [train.py:903] (0/4) Epoch 27, batch 300, loss[loss=0.163, simple_loss=0.2465, pruned_loss=0.03976, over 19859.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2827, pruned_loss=0.0602, over 3001390.23 frames. ], batch size: 52, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:22:08,514 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:10,821 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177844.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:36,373 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.100e+02 5.015e+02 6.251e+02 7.839e+02 1.329e+03, threshold=1.250e+03, percent-clipped=8.0 +2023-04-03 08:22:40,002 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177867.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:42,353 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:52,701 INFO [train.py:903] (0/4) Epoch 27, batch 350, loss[loss=0.1852, simple_loss=0.2733, pruned_loss=0.04854, over 19524.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2821, pruned_loss=0.06003, over 3175295.98 frames. ], batch size: 54, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:23:00,638 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 08:23:40,476 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177915.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:23:47,932 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2424, 2.1330, 1.9942, 1.8870, 1.6558, 1.8570, 0.8085, 1.3550], + device='cuda:0'), covar=tensor([0.0633, 0.0665, 0.0538, 0.0868, 0.1238, 0.1049, 0.1424, 0.1112], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0363, 0.0367, 0.0390, 0.0472, 0.0395, 0.0346, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:23:56,872 INFO [train.py:903] (0/4) Epoch 27, batch 400, loss[loss=0.1698, simple_loss=0.2577, pruned_loss=0.04089, over 19835.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2815, pruned_loss=0.05984, over 3307665.80 frames. ], batch size: 52, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:24:43,706 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.795e+02 5.555e+02 6.674e+02 1.146e+03, threshold=1.111e+03, percent-clipped=0.0 +2023-04-03 08:24:58,338 INFO [train.py:903] (0/4) Epoch 27, batch 450, loss[loss=0.2363, simple_loss=0.3187, pruned_loss=0.07695, over 19506.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2825, pruned_loss=0.06032, over 3429360.31 frames. ], batch size: 64, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:25:26,935 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-178000.pt +2023-04-03 08:25:39,726 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 08:25:40,956 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 08:25:42,688 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2228, 2.1433, 2.0233, 1.8807, 1.6340, 1.8264, 0.7574, 1.3730], + device='cuda:0'), covar=tensor([0.0722, 0.0688, 0.0549, 0.0941, 0.1252, 0.1008, 0.1426, 0.1116], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0363, 0.0366, 0.0390, 0.0472, 0.0394, 0.0346, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:25:59,675 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178026.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:02,530 INFO [train.py:903] (0/4) Epoch 27, batch 500, loss[loss=0.2144, simple_loss=0.2961, pruned_loss=0.06633, over 19670.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06033, over 3529006.71 frames. ], batch size: 59, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:26:06,490 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:48,564 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 5.151e+02 6.325e+02 8.134e+02 1.856e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-03 08:27:07,126 INFO [train.py:903] (0/4) Epoch 27, batch 550, loss[loss=0.227, simple_loss=0.3036, pruned_loss=0.07519, over 19667.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06056, over 3598743.22 frames. ], batch size: 60, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:27:11,686 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-03 08:28:09,862 INFO [train.py:903] (0/4) Epoch 27, batch 600, loss[loss=0.2186, simple_loss=0.2976, pruned_loss=0.06987, over 18262.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2833, pruned_loss=0.06041, over 3640211.00 frames. ], batch size: 84, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:28:25,326 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:26,321 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:41,231 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6468, 1.4942, 1.4967, 2.1841, 1.5565, 1.9731, 1.9323, 1.7256], + device='cuda:0'), covar=tensor([0.0852, 0.0955, 0.1043, 0.0756, 0.0911, 0.0742, 0.0861, 0.0703], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0223, 0.0227, 0.0238, 0.0225, 0.0212, 0.0186, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 08:28:48,731 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:54,526 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 08:28:55,499 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 5.259e+02 6.338e+02 7.640e+02 1.730e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-03 08:29:03,142 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-03 08:29:11,516 INFO [train.py:903] (0/4) Epoch 27, batch 650, loss[loss=0.1855, simple_loss=0.2733, pruned_loss=0.04884, over 19537.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06132, over 3680905.34 frames. ], batch size: 56, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:29:40,643 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7842, 1.4565, 1.6324, 1.6449, 3.3335, 1.2374, 2.5055, 3.8639], + device='cuda:0'), covar=tensor([0.0426, 0.2764, 0.2857, 0.1771, 0.0659, 0.2497, 0.1232, 0.0196], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0375, 0.0393, 0.0352, 0.0380, 0.0357, 0.0391, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 08:30:11,962 INFO [train.py:903] (0/4) Epoch 27, batch 700, loss[loss=0.1846, simple_loss=0.2644, pruned_loss=0.05238, over 19396.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2838, pruned_loss=0.06086, over 3716868.43 frames. ], batch size: 48, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:30:16,999 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6539, 1.4750, 1.4971, 2.1565, 1.5473, 1.8639, 1.9454, 1.6577], + device='cuda:0'), covar=tensor([0.0856, 0.0962, 0.1014, 0.0694, 0.0885, 0.0759, 0.0812, 0.0727], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0224, 0.0227, 0.0238, 0.0225, 0.0212, 0.0187, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 08:30:17,024 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0606, 1.9488, 1.7438, 2.0536, 1.8129, 1.7355, 1.7424, 1.9417], + device='cuda:0'), covar=tensor([0.1092, 0.1371, 0.1520, 0.1068, 0.1457, 0.0607, 0.1400, 0.0794], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0361, 0.0320, 0.0258, 0.0309, 0.0259, 0.0323, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 08:30:49,335 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:30:58,533 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.694e+02 6.359e+02 8.615e+02 1.569e+03, threshold=1.272e+03, percent-clipped=7.0 +2023-04-03 08:31:11,144 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:16,254 INFO [train.py:903] (0/4) Epoch 27, batch 750, loss[loss=0.2151, simple_loss=0.289, pruned_loss=0.07055, over 19688.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.06076, over 3736888.49 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:31:26,151 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:56,679 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:32:17,785 INFO [train.py:903] (0/4) Epoch 27, batch 800, loss[loss=0.2022, simple_loss=0.2882, pruned_loss=0.05812, over 19632.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2839, pruned_loss=0.06074, over 3751901.75 frames. ], batch size: 61, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:32:31,924 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 08:32:58,401 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0822, 5.1248, 5.9224, 5.9560, 2.0692, 5.6044, 4.8321, 5.5886], + device='cuda:0'), covar=tensor([0.1674, 0.0951, 0.0586, 0.0658, 0.6361, 0.0856, 0.0631, 0.1210], + device='cuda:0'), in_proj_covar=tensor([0.0820, 0.0784, 0.0989, 0.0871, 0.0863, 0.0754, 0.0582, 0.0917], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 08:33:04,022 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 4.845e+02 6.131e+02 7.065e+02 2.334e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 08:33:20,260 INFO [train.py:903] (0/4) Epoch 27, batch 850, loss[loss=0.1655, simple_loss=0.2486, pruned_loss=0.04117, over 19410.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2855, pruned_loss=0.06104, over 3780919.19 frames. ], batch size: 48, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:33:45,242 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:33:57,573 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5971, 4.2092, 2.6660, 3.7177, 0.9460, 4.2072, 4.0222, 4.1152], + device='cuda:0'), covar=tensor([0.0659, 0.0995, 0.2003, 0.0817, 0.4179, 0.0610, 0.0910, 0.1234], + device='cuda:0'), in_proj_covar=tensor([0.0528, 0.0427, 0.0515, 0.0359, 0.0410, 0.0452, 0.0447, 0.0476], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 08:34:12,703 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 08:34:16,570 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:34:22,839 INFO [train.py:903] (0/4) Epoch 27, batch 900, loss[loss=0.2636, simple_loss=0.3285, pruned_loss=0.09936, over 19687.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2858, pruned_loss=0.0612, over 3784092.45 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:35:10,869 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.108e+02 4.416e+02 5.422e+02 6.593e+02 1.258e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-04-03 08:35:16,994 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178470.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:35:28,152 INFO [train.py:903] (0/4) Epoch 27, batch 950, loss[loss=0.1799, simple_loss=0.2594, pruned_loss=0.05015, over 19395.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2844, pruned_loss=0.06063, over 3786256.21 frames. ], batch size: 48, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:35:30,672 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 08:36:11,756 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:32,190 INFO [train.py:903] (0/4) Epoch 27, batch 1000, loss[loss=0.236, simple_loss=0.3155, pruned_loss=0.07828, over 18831.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2849, pruned_loss=0.06083, over 3801144.05 frames. ], batch size: 74, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:36:34,996 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:45,450 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:01,969 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 08:37:06,188 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178555.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:19,296 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.971e+02 6.338e+02 8.822e+02 2.004e+03, threshold=1.268e+03, percent-clipped=12.0 +2023-04-03 08:37:25,279 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 08:37:35,828 INFO [train.py:903] (0/4) Epoch 27, batch 1050, loss[loss=0.1951, simple_loss=0.2739, pruned_loss=0.05819, over 19834.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2852, pruned_loss=0.06116, over 3811150.71 frames. ], batch size: 49, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:38:09,355 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 08:38:38,428 INFO [train.py:903] (0/4) Epoch 27, batch 1100, loss[loss=0.172, simple_loss=0.2528, pruned_loss=0.04562, over 19761.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.285, pruned_loss=0.06102, over 3811352.14 frames. ], batch size: 46, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:38:49,150 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178637.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:39:27,052 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.758e+02 5.833e+02 7.524e+02 1.653e+03, threshold=1.167e+03, percent-clipped=2.0 +2023-04-03 08:39:40,766 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.38 vs. limit=5.0 +2023-04-03 08:39:42,267 INFO [train.py:903] (0/4) Epoch 27, batch 1150, loss[loss=0.2187, simple_loss=0.3004, pruned_loss=0.06852, over 18742.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2856, pruned_loss=0.06196, over 3790219.58 frames. ], batch size: 74, lr: 3.05e-03, grad_scale: 4.0 +2023-04-03 08:39:42,649 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3354, 1.2769, 1.6121, 1.1006, 2.4267, 3.2823, 3.0094, 3.5380], + device='cuda:0'), covar=tensor([0.1582, 0.4111, 0.3675, 0.2835, 0.0684, 0.0223, 0.0254, 0.0304], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0330, 0.0363, 0.0272, 0.0253, 0.0196, 0.0219, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 08:40:25,272 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178712.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:40:28,931 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178715.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:40:47,635 INFO [train.py:903] (0/4) Epoch 27, batch 1200, loss[loss=0.2189, simple_loss=0.3003, pruned_loss=0.06872, over 19361.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2858, pruned_loss=0.06233, over 3784787.98 frames. ], batch size: 70, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:41:18,700 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 08:41:27,872 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 08:41:31,205 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4132, 1.4908, 1.6239, 1.5657, 1.8354, 1.9258, 1.8470, 0.7126], + device='cuda:0'), covar=tensor([0.2296, 0.4040, 0.2579, 0.1881, 0.1565, 0.2225, 0.1484, 0.4463], + device='cuda:0'), in_proj_covar=tensor([0.0550, 0.0665, 0.0744, 0.0502, 0.0631, 0.0542, 0.0664, 0.0567], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:41:38,086 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.626e+02 4.854e+02 6.177e+02 8.249e+02 1.593e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-03 08:41:53,223 INFO [train.py:903] (0/4) Epoch 27, batch 1250, loss[loss=0.1939, simple_loss=0.2834, pruned_loss=0.05222, over 17504.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06183, over 3797393.11 frames. ], batch size: 101, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:42:05,375 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:39,171 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:56,587 INFO [train.py:903] (0/4) Epoch 27, batch 1300, loss[loss=0.1952, simple_loss=0.275, pruned_loss=0.0577, over 19681.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06162, over 3811942.60 frames. ], batch size: 53, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:43:44,695 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.004e+02 6.015e+02 7.494e+02 1.649e+03, threshold=1.203e+03, percent-clipped=2.0 +2023-04-03 08:43:48,088 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-03 08:43:58,885 INFO [train.py:903] (0/4) Epoch 27, batch 1350, loss[loss=0.1964, simple_loss=0.2698, pruned_loss=0.06149, over 18699.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.284, pruned_loss=0.06137, over 3818422.97 frames. ], batch size: 41, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:44:41,867 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178912.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:45:03,470 INFO [train.py:903] (0/4) Epoch 27, batch 1400, loss[loss=0.1908, simple_loss=0.2783, pruned_loss=0.05164, over 18140.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06152, over 3820840.38 frames. ], batch size: 83, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:45:05,018 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:45:09,947 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4258, 1.4350, 1.6559, 1.6046, 2.1572, 2.0880, 2.2498, 0.9736], + device='cuda:0'), covar=tensor([0.2549, 0.4588, 0.2842, 0.2055, 0.1688, 0.2277, 0.1572, 0.4899], + device='cuda:0'), in_proj_covar=tensor([0.0551, 0.0666, 0.0745, 0.0503, 0.0631, 0.0542, 0.0666, 0.0568], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:45:42,599 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 08:45:50,938 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.777e+02 6.195e+02 8.137e+02 1.607e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-03 08:46:03,753 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 08:46:04,756 INFO [train.py:903] (0/4) Epoch 27, batch 1450, loss[loss=0.29, simple_loss=0.3447, pruned_loss=0.1176, over 13623.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2847, pruned_loss=0.06122, over 3827089.88 frames. ], batch size: 136, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:46:09,309 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:47:07,119 INFO [train.py:903] (0/4) Epoch 27, batch 1500, loss[loss=0.2041, simple_loss=0.2949, pruned_loss=0.0567, over 19665.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06125, over 3802398.11 frames. ], batch size: 59, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:47:42,378 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:47:45,632 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179059.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:47:54,377 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.273e+02 4.749e+02 5.669e+02 7.445e+02 1.551e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 08:48:08,110 INFO [train.py:903] (0/4) Epoch 27, batch 1550, loss[loss=0.2019, simple_loss=0.2852, pruned_loss=0.05932, over 19765.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2849, pruned_loss=0.06165, over 3807622.37 frames. ], batch size: 54, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:48:32,723 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:12,281 INFO [train.py:903] (0/4) Epoch 27, batch 1600, loss[loss=0.1925, simple_loss=0.2706, pruned_loss=0.05719, over 19475.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2857, pruned_loss=0.06235, over 3805207.38 frames. ], batch size: 49, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:49:18,211 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:37,301 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 08:49:59,058 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.994e+02 6.186e+02 7.700e+02 1.707e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 08:50:06,414 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179171.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:09,940 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179174.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:50:10,199 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 08:50:12,628 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-03 08:50:14,096 INFO [train.py:903] (0/4) Epoch 27, batch 1650, loss[loss=0.1771, simple_loss=0.2517, pruned_loss=0.05123, over 19366.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06233, over 3821910.41 frames. ], batch size: 47, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:50:23,701 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179185.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:43,608 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 08:50:55,281 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179210.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:56,537 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2601, 2.2960, 2.6298, 3.0353, 2.2579, 2.8233, 2.6700, 2.4642], + device='cuda:0'), covar=tensor([0.4373, 0.4460, 0.1864, 0.2664, 0.4814, 0.2379, 0.4754, 0.3364], + device='cuda:0'), in_proj_covar=tensor([0.0926, 0.1003, 0.0737, 0.0948, 0.0904, 0.0845, 0.0855, 0.0801], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 08:51:17,038 INFO [train.py:903] (0/4) Epoch 27, batch 1700, loss[loss=0.1799, simple_loss=0.2773, pruned_loss=0.04129, over 19652.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06187, over 3810905.95 frames. ], batch size: 58, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:51:41,286 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:52,230 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179256.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:51:58,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 08:52:04,051 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 5.162e+02 6.407e+02 7.875e+02 2.392e+03, threshold=1.281e+03, percent-clipped=6.0 +2023-04-03 08:52:18,186 INFO [train.py:903] (0/4) Epoch 27, batch 1750, loss[loss=0.2073, simple_loss=0.2917, pruned_loss=0.06143, over 19762.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2851, pruned_loss=0.06177, over 3830945.97 frames. ], batch size: 63, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:52:34,544 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0285, 5.1628, 5.9227, 5.9616, 2.2524, 5.5989, 4.7941, 5.5919], + device='cuda:0'), covar=tensor([0.1836, 0.0805, 0.0564, 0.0635, 0.5923, 0.0824, 0.0605, 0.1091], + device='cuda:0'), in_proj_covar=tensor([0.0803, 0.0772, 0.0976, 0.0858, 0.0850, 0.0744, 0.0571, 0.0900], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 08:52:46,631 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 08:53:05,951 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-03 08:53:21,040 INFO [train.py:903] (0/4) Epoch 27, batch 1800, loss[loss=0.1613, simple_loss=0.2365, pruned_loss=0.04305, over 19751.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2848, pruned_loss=0.06124, over 3828117.22 frames. ], batch size: 45, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:53:51,567 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:08,222 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 4.924e+02 6.082e+02 7.444e+02 1.664e+03, threshold=1.216e+03, percent-clipped=4.0 +2023-04-03 08:54:15,279 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179371.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:54:21,562 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 08:54:23,167 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:23,936 INFO [train.py:903] (0/4) Epoch 27, batch 1850, loss[loss=0.1972, simple_loss=0.2822, pruned_loss=0.05612, over 19346.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2861, pruned_loss=0.06192, over 3813921.67 frames. ], batch size: 66, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:54:25,204 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:45,779 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6182, 1.5306, 1.5102, 2.0491, 1.5087, 1.8097, 1.8681, 1.6476], + device='cuda:0'), covar=tensor([0.0892, 0.0932, 0.1024, 0.0773, 0.0923, 0.0811, 0.0903, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0225, 0.0228, 0.0240, 0.0227, 0.0214, 0.0189, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 08:55:00,568 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 08:55:19,879 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5833, 1.6984, 2.0904, 1.8736, 3.1546, 2.7358, 3.5939, 1.6762], + device='cuda:0'), covar=tensor([0.2550, 0.4456, 0.2824, 0.1999, 0.1544, 0.2078, 0.1417, 0.4449], + device='cuda:0'), in_proj_covar=tensor([0.0552, 0.0665, 0.0746, 0.0503, 0.0632, 0.0543, 0.0667, 0.0569], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 08:55:25,703 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5952, 1.5164, 1.5003, 2.0301, 1.5322, 1.8351, 1.9106, 1.6006], + device='cuda:0'), covar=tensor([0.0838, 0.0901, 0.1003, 0.0729, 0.0914, 0.0782, 0.0881, 0.0737], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0226, 0.0228, 0.0240, 0.0227, 0.0215, 0.0189, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 08:55:25,766 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179427.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:26,554 INFO [train.py:903] (0/4) Epoch 27, batch 1900, loss[loss=0.2031, simple_loss=0.2885, pruned_loss=0.05887, over 18711.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2859, pruned_loss=0.06205, over 3818876.77 frames. ], batch size: 74, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:55:29,237 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179430.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:55:45,921 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 08:55:49,563 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 08:55:56,463 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:56:00,898 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179455.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:56:09,162 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179462.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:56:14,506 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.488e+02 5.214e+02 6.120e+02 7.486e+02 1.853e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-03 08:56:15,780 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 08:56:28,540 INFO [train.py:903] (0/4) Epoch 27, batch 1950, loss[loss=0.2311, simple_loss=0.3126, pruned_loss=0.07486, over 19437.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.06185, over 3830931.51 frames. ], batch size: 70, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:02,023 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:57:31,829 INFO [train.py:903] (0/4) Epoch 27, batch 2000, loss[loss=0.2122, simple_loss=0.2953, pruned_loss=0.06453, over 19751.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2852, pruned_loss=0.06152, over 3821256.77 frames. ], batch size: 63, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:32,239 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179528.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:57:55,541 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0182, 1.8348, 1.6432, 1.9192, 1.6529, 1.6896, 1.5888, 1.8644], + device='cuda:0'), covar=tensor([0.1128, 0.1495, 0.1612, 0.1219, 0.1524, 0.0631, 0.1594, 0.0790], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0359, 0.0317, 0.0256, 0.0306, 0.0256, 0.0320, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 08:58:19,143 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.376e+02 6.611e+02 8.547e+02 2.231e+03, threshold=1.322e+03, percent-clipped=11.0 +2023-04-03 08:58:33,824 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 08:58:34,818 INFO [train.py:903] (0/4) Epoch 27, batch 2050, loss[loss=0.1914, simple_loss=0.2684, pruned_loss=0.05717, over 19488.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.0617, over 3821398.52 frames. ], batch size: 49, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:58:53,485 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 08:58:54,679 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 08:59:14,233 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 08:59:37,294 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179627.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:59:37,899 INFO [train.py:903] (0/4) Epoch 27, batch 2100, loss[loss=0.221, simple_loss=0.3055, pruned_loss=0.06828, over 19765.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06128, over 3814942.30 frames. ], batch size: 54, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:00:07,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 09:00:07,922 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179652.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:00:10,134 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179654.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:00:25,320 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.840e+02 4.609e+02 5.792e+02 6.874e+02 1.495e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 09:00:28,896 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 09:00:39,306 INFO [train.py:903] (0/4) Epoch 27, batch 2150, loss[loss=0.2519, simple_loss=0.3212, pruned_loss=0.0913, over 12853.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2838, pruned_loss=0.06103, over 3810678.32 frames. ], batch size: 137, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:01:37,657 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:01:43,433 INFO [train.py:903] (0/4) Epoch 27, batch 2200, loss[loss=0.1917, simple_loss=0.2628, pruned_loss=0.06031, over 19309.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2833, pruned_loss=0.06083, over 3817557.72 frames. ], batch size: 44, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:02:30,599 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.069e+02 5.235e+02 6.025e+02 7.394e+02 1.358e+03, threshold=1.205e+03, percent-clipped=1.0 +2023-04-03 09:02:46,359 INFO [train.py:903] (0/4) Epoch 27, batch 2250, loss[loss=0.265, simple_loss=0.3311, pruned_loss=0.09942, over 19398.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06179, over 3824204.98 frames. ], batch size: 70, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:03:21,827 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179806.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:03:23,156 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2771, 2.8895, 2.4233, 2.3928, 2.0743, 2.5691, 0.9227, 2.1243], + device='cuda:0'), covar=tensor([0.0669, 0.0657, 0.0669, 0.1162, 0.1182, 0.1176, 0.1549, 0.1158], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0363, 0.0367, 0.0391, 0.0470, 0.0397, 0.0346, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 09:03:33,047 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:03:50,156 INFO [train.py:903] (0/4) Epoch 27, batch 2300, loss[loss=0.2065, simple_loss=0.299, pruned_loss=0.05697, over 19588.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2844, pruned_loss=0.06113, over 3820268.70 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:04:02,173 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179838.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:04:03,041 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 09:04:38,194 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.173e+02 6.408e+02 8.011e+02 2.024e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 09:04:43,397 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-03 09:04:51,778 INFO [train.py:903] (0/4) Epoch 27, batch 2350, loss[loss=0.1731, simple_loss=0.2496, pruned_loss=0.0483, over 19337.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2834, pruned_loss=0.06082, over 3825064.25 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:05:19,155 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 09:05:34,688 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 09:05:44,060 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6947, 1.5885, 1.6292, 2.2472, 1.5842, 2.0309, 1.9922, 1.7930], + device='cuda:0'), covar=tensor([0.0850, 0.0890, 0.0987, 0.0738, 0.0887, 0.0732, 0.0889, 0.0697], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0223, 0.0227, 0.0239, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 09:05:45,206 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179921.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:05:48,585 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179924.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:05:50,588 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 09:05:53,778 INFO [train.py:903] (0/4) Epoch 27, batch 2400, loss[loss=0.2983, simple_loss=0.3554, pruned_loss=0.1206, over 12820.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.061, over 3826851.51 frames. ], batch size: 137, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:06:11,803 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179942.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:06:23,115 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5228, 1.4336, 1.4534, 1.8183, 1.3057, 1.6940, 1.6431, 1.5308], + device='cuda:0'), covar=tensor([0.0839, 0.0936, 0.1033, 0.0656, 0.0851, 0.0788, 0.0878, 0.0732], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0223, 0.0227, 0.0239, 0.0225, 0.0212, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 09:06:41,355 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 4.979e+02 6.417e+02 8.310e+02 2.182e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-03 09:06:57,078 INFO [train.py:903] (0/4) Epoch 27, batch 2450, loss[loss=0.1801, simple_loss=0.2583, pruned_loss=0.05099, over 19336.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06132, over 3824470.26 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:07:21,328 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:07:23,502 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-180000.pt +2023-04-03 09:08:00,990 INFO [train.py:903] (0/4) Epoch 27, batch 2500, loss[loss=0.2128, simple_loss=0.2963, pruned_loss=0.06466, over 19759.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2854, pruned_loss=0.0613, over 3832813.24 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:08:19,368 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:08:48,293 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 4.714e+02 5.802e+02 6.784e+02 1.958e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 09:09:02,324 INFO [train.py:903] (0/4) Epoch 27, batch 2550, loss[loss=0.1642, simple_loss=0.2447, pruned_loss=0.04182, over 19488.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2857, pruned_loss=0.06173, over 3837633.09 frames. ], batch size: 49, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:09:06,506 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 09:09:11,739 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7050, 1.5188, 1.5934, 2.2255, 1.7004, 1.9186, 1.9804, 1.6922], + device='cuda:0'), covar=tensor([0.0830, 0.0969, 0.1019, 0.0679, 0.0804, 0.0778, 0.0871, 0.0741], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0224, 0.0227, 0.0239, 0.0225, 0.0212, 0.0188, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 09:09:13,013 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5821, 1.7850, 2.1191, 1.9116, 3.0778, 2.6973, 3.3427, 1.6068], + device='cuda:0'), covar=tensor([0.2611, 0.4373, 0.2853, 0.2045, 0.1654, 0.2143, 0.1673, 0.4587], + device='cuda:0'), in_proj_covar=tensor([0.0549, 0.0663, 0.0744, 0.0501, 0.0629, 0.0541, 0.0664, 0.0567], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 09:09:22,060 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:47,086 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180113.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:54,385 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180119.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:58,723 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 09:10:04,108 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 09:10:04,536 INFO [train.py:903] (0/4) Epoch 27, batch 2600, loss[loss=0.1967, simple_loss=0.2764, pruned_loss=0.05853, over 19737.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06114, over 3849379.16 frames. ], batch size: 51, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:10:42,738 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:10:52,191 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.258e+02 6.240e+02 7.647e+02 1.495e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-03 09:11:00,081 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 09:11:07,732 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180177.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:11:08,389 INFO [train.py:903] (0/4) Epoch 27, batch 2650, loss[loss=0.1704, simple_loss=0.246, pruned_loss=0.04738, over 19784.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.283, pruned_loss=0.06075, over 3839498.67 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:11:29,717 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 09:11:38,056 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180202.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:11:50,372 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5454, 2.3931, 2.2455, 2.7040, 2.2259, 2.1596, 2.0317, 2.4818], + device='cuda:0'), covar=tensor([0.0968, 0.1624, 0.1322, 0.0910, 0.1424, 0.0524, 0.1415, 0.0679], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0359, 0.0317, 0.0257, 0.0307, 0.0256, 0.0321, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:12:04,700 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:12:11,768 INFO [train.py:903] (0/4) Epoch 27, batch 2700, loss[loss=0.2327, simple_loss=0.3136, pruned_loss=0.07594, over 19777.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.0603, over 3846147.96 frames. ], batch size: 56, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:13:00,986 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.426e+02 5.233e+02 6.139e+02 8.955e+02 2.009e+03, threshold=1.228e+03, percent-clipped=9.0 +2023-04-03 09:13:01,290 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5055, 1.3503, 1.7602, 1.5002, 2.7421, 3.7440, 3.4900, 4.0485], + device='cuda:0'), covar=tensor([0.1501, 0.3983, 0.3536, 0.2354, 0.0652, 0.0219, 0.0231, 0.0283], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0330, 0.0363, 0.0269, 0.0253, 0.0196, 0.0219, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 09:13:02,155 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180268.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:13:07,997 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:13:13,279 INFO [train.py:903] (0/4) Epoch 27, batch 2750, loss[loss=0.1816, simple_loss=0.2761, pruned_loss=0.04352, over 18683.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06086, over 3822350.71 frames. ], batch size: 74, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:13:22,763 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:14:15,243 INFO [train.py:903] (0/4) Epoch 27, batch 2800, loss[loss=0.2059, simple_loss=0.2957, pruned_loss=0.05806, over 18043.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2846, pruned_loss=0.06088, over 3811658.48 frames. ], batch size: 83, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:14:49,773 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:04,816 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.838e+02 6.554e+02 8.151e+02 1.805e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-03 09:15:07,630 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:11,405 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 09:15:18,465 INFO [train.py:903] (0/4) Epoch 27, batch 2850, loss[loss=0.2421, simple_loss=0.3237, pruned_loss=0.08027, over 19610.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06083, over 3809798.03 frames. ], batch size: 57, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:15:26,263 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180383.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:15:33,056 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:40,137 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:48,639 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180401.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:16:20,688 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 09:16:22,940 INFO [train.py:903] (0/4) Epoch 27, batch 2900, loss[loss=0.1966, simple_loss=0.2717, pruned_loss=0.06073, over 19453.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06098, over 3814413.87 frames. ], batch size: 49, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:13,071 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.922e+02 6.355e+02 8.136e+02 1.738e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-04-03 09:17:13,372 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3077, 3.8020, 3.9140, 3.9504, 1.6278, 3.7388, 3.2666, 3.6609], + device='cuda:0'), covar=tensor([0.1745, 0.1098, 0.0710, 0.0787, 0.5964, 0.0987, 0.0768, 0.1235], + device='cuda:0'), in_proj_covar=tensor([0.0813, 0.0777, 0.0986, 0.0865, 0.0860, 0.0748, 0.0580, 0.0915], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 09:17:13,474 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5019, 1.4396, 1.4651, 1.7228, 1.3533, 1.6671, 1.6944, 1.5574], + device='cuda:0'), covar=tensor([0.0882, 0.0932, 0.1028, 0.0726, 0.0831, 0.0782, 0.0803, 0.0740], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0225, 0.0229, 0.0240, 0.0225, 0.0213, 0.0189, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 09:17:16,930 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6575, 1.5034, 1.6421, 2.1179, 1.5073, 1.8333, 1.9307, 1.6904], + device='cuda:0'), covar=tensor([0.0855, 0.0969, 0.0980, 0.0735, 0.0889, 0.0813, 0.0866, 0.0735], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0225, 0.0229, 0.0240, 0.0226, 0.0213, 0.0189, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 09:17:26,071 INFO [train.py:903] (0/4) Epoch 27, batch 2950, loss[loss=0.2249, simple_loss=0.3014, pruned_loss=0.07422, over 19603.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.0607, over 3801442.40 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:56,660 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:17:56,713 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4873, 2.2113, 1.7147, 1.4982, 1.9858, 1.5107, 1.3828, 1.9259], + device='cuda:0'), covar=tensor([0.0966, 0.0819, 0.1071, 0.0909, 0.0557, 0.1195, 0.0763, 0.0462], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0322, 0.0338, 0.0274, 0.0251, 0.0345, 0.0293, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:18:27,183 INFO [train.py:903] (0/4) Epoch 27, batch 3000, loss[loss=0.1876, simple_loss=0.2634, pruned_loss=0.05589, over 19358.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2841, pruned_loss=0.06117, over 3788469.13 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:18:27,184 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 09:18:39,752 INFO [train.py:937] (0/4) Epoch 27, validation: loss=0.1667, simple_loss=0.2664, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 09:18:39,753 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 09:18:41,272 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:43,412 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 09:19:11,482 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:20,241 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2636, 1.3398, 1.8069, 1.4329, 2.7610, 3.7950, 3.4592, 3.9750], + device='cuda:0'), covar=tensor([0.1605, 0.4004, 0.3413, 0.2502, 0.0643, 0.0198, 0.0218, 0.0239], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0331, 0.0364, 0.0270, 0.0255, 0.0196, 0.0219, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 09:19:26,238 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.17 vs. limit=5.0 +2023-04-03 09:19:29,304 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:30,278 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.587e+02 5.827e+02 7.595e+02 1.750e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 09:19:41,904 INFO [train.py:903] (0/4) Epoch 27, batch 3050, loss[loss=0.2225, simple_loss=0.301, pruned_loss=0.07206, over 19669.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2834, pruned_loss=0.06065, over 3804308.78 frames. ], batch size: 58, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:19:54,601 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:20:44,775 INFO [train.py:903] (0/4) Epoch 27, batch 3100, loss[loss=0.1853, simple_loss=0.2691, pruned_loss=0.05076, over 19728.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06068, over 3800051.50 frames. ], batch size: 51, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:20:59,808 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180639.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:21:20,334 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180657.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:29,218 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180664.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:21:33,516 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.719e+02 5.890e+02 7.652e+02 1.677e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 09:21:46,248 INFO [train.py:903] (0/4) Epoch 27, batch 3150, loss[loss=0.2058, simple_loss=0.2897, pruned_loss=0.06099, over 19755.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06074, over 3807992.55 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:21:52,953 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:52,994 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:11,317 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:15,894 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 09:22:49,864 INFO [train.py:903] (0/4) Epoch 27, batch 3200, loss[loss=0.1909, simple_loss=0.2827, pruned_loss=0.04955, over 19591.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.0606, over 3815762.41 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:16,671 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:27,991 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180759.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:39,759 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.255e+02 6.624e+02 9.042e+02 3.460e+03, threshold=1.325e+03, percent-clipped=12.0 +2023-04-03 09:23:51,466 INFO [train.py:903] (0/4) Epoch 27, batch 3250, loss[loss=0.1816, simple_loss=0.257, pruned_loss=0.05308, over 19627.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06066, over 3816202.17 frames. ], batch size: 50, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:59,260 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7282, 4.2358, 4.4657, 4.4721, 1.7703, 4.1895, 3.6339, 4.1816], + device='cuda:0'), covar=tensor([0.1926, 0.0973, 0.0618, 0.0762, 0.6264, 0.0991, 0.0783, 0.1163], + device='cuda:0'), in_proj_covar=tensor([0.0819, 0.0781, 0.0993, 0.0872, 0.0863, 0.0752, 0.0583, 0.0918], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 09:23:59,387 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:36,417 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180813.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:54,208 INFO [train.py:903] (0/4) Epoch 27, batch 3300, loss[loss=0.2804, simple_loss=0.3416, pruned_loss=0.1096, over 19536.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2827, pruned_loss=0.06045, over 3826155.99 frames. ], batch size: 56, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:25:00,022 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 09:25:13,334 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:25:43,903 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.181e+02 6.566e+02 8.708e+02 1.878e+03, threshold=1.313e+03, percent-clipped=7.0 +2023-04-03 09:25:56,104 INFO [train.py:903] (0/4) Epoch 27, batch 3350, loss[loss=0.1784, simple_loss=0.258, pruned_loss=0.04941, over 18609.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2828, pruned_loss=0.06051, over 3817100.16 frames. ], batch size: 41, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:26:18,970 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-03 09:27:00,204 INFO [train.py:903] (0/4) Epoch 27, batch 3400, loss[loss=0.2455, simple_loss=0.3242, pruned_loss=0.08342, over 18209.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2834, pruned_loss=0.06084, over 3818597.38 frames. ], batch size: 83, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:27:05,026 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180932.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:11,994 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:43,939 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:50,447 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 4.823e+02 5.841e+02 8.287e+02 1.627e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 09:28:02,295 INFO [train.py:903] (0/4) Epoch 27, batch 3450, loss[loss=0.2187, simple_loss=0.3029, pruned_loss=0.06724, over 19358.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2835, pruned_loss=0.06107, over 3811685.13 frames. ], batch size: 70, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:28:06,948 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 09:29:04,451 INFO [train.py:903] (0/4) Epoch 27, batch 3500, loss[loss=0.2267, simple_loss=0.3098, pruned_loss=0.07178, over 19541.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.284, pruned_loss=0.06123, over 3815424.92 frames. ], batch size: 56, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:29:09,326 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4063, 1.4881, 2.0394, 1.6534, 3.2259, 4.2947, 4.1012, 4.6940], + device='cuda:0'), covar=tensor([0.1685, 0.3907, 0.3453, 0.2453, 0.0644, 0.0298, 0.0206, 0.0218], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0330, 0.0363, 0.0268, 0.0253, 0.0195, 0.0217, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 09:29:28,519 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181047.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:29:45,289 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6991, 1.6644, 2.0181, 1.6414, 4.2424, 1.3156, 2.7877, 4.6043], + device='cuda:0'), covar=tensor([0.0430, 0.2898, 0.2526, 0.2098, 0.0713, 0.2602, 0.1343, 0.0176], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0376, 0.0394, 0.0351, 0.0381, 0.0355, 0.0391, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:29:53,845 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.177e+02 6.114e+02 8.226e+02 1.424e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 09:29:55,468 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181069.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:06,529 INFO [train.py:903] (0/4) Epoch 27, batch 3550, loss[loss=0.2255, simple_loss=0.3159, pruned_loss=0.06759, over 19595.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06063, over 3816229.11 frames. ], batch size: 61, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:30:27,206 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:27,435 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:31:09,199 INFO [train.py:903] (0/4) Epoch 27, batch 3600, loss[loss=0.2341, simple_loss=0.311, pruned_loss=0.07856, over 19652.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06052, over 3829821.24 frames. ], batch size: 58, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:31:30,383 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-04-03 09:32:00,753 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 4.881e+02 5.766e+02 7.270e+02 1.755e+03, threshold=1.153e+03, percent-clipped=5.0 +2023-04-03 09:32:12,504 INFO [train.py:903] (0/4) Epoch 27, batch 3650, loss[loss=0.1717, simple_loss=0.2569, pruned_loss=0.04329, over 19849.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2843, pruned_loss=0.06054, over 3815971.45 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:32:24,257 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:32:52,745 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:32:54,499 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-03 09:33:15,867 INFO [train.py:903] (0/4) Epoch 27, batch 3700, loss[loss=0.2268, simple_loss=0.2998, pruned_loss=0.07691, over 19323.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.286, pruned_loss=0.06169, over 3788549.49 frames. ], batch size: 66, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:33:19,931 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-03 09:33:46,734 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 09:34:06,272 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.357e+02 6.375e+02 8.389e+02 2.807e+03, threshold=1.275e+03, percent-clipped=10.0 +2023-04-03 09:34:17,471 INFO [train.py:903] (0/4) Epoch 27, batch 3750, loss[loss=0.154, simple_loss=0.2374, pruned_loss=0.03531, over 19779.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.06168, over 3793439.10 frames. ], batch size: 48, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:48,303 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:34:49,693 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:04,203 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181315.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:20,118 INFO [train.py:903] (0/4) Epoch 27, batch 3800, loss[loss=0.1809, simple_loss=0.2589, pruned_loss=0.05141, over 19739.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.06163, over 3807940.70 frames. ], batch size: 51, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:35:20,512 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181328.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:48,544 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 09:36:10,635 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.827e+02 5.403e+02 6.773e+02 8.761e+02 1.536e+03, threshold=1.355e+03, percent-clipped=8.0 +2023-04-03 09:36:22,055 INFO [train.py:903] (0/4) Epoch 27, batch 3850, loss[loss=0.2134, simple_loss=0.2919, pruned_loss=0.06748, over 19762.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.06131, over 3825173.89 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:37:25,666 INFO [train.py:903] (0/4) Epoch 27, batch 3900, loss[loss=0.1665, simple_loss=0.2508, pruned_loss=0.04107, over 19482.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2851, pruned_loss=0.061, over 3819973.43 frames. ], batch size: 49, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:37:49,513 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.60 vs. limit=5.0 +2023-04-03 09:37:52,692 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9879, 2.1057, 2.3088, 2.6811, 2.0639, 2.6133, 2.2921, 2.1289], + device='cuda:0'), covar=tensor([0.4455, 0.4313, 0.2050, 0.2522, 0.4425, 0.2283, 0.5090, 0.3519], + device='cuda:0'), in_proj_covar=tensor([0.0931, 0.1008, 0.0739, 0.0949, 0.0907, 0.0848, 0.0858, 0.0805], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 09:38:13,064 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181465.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:38:17,127 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.394e+02 5.123e+02 6.226e+02 8.139e+02 1.802e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-03 09:38:28,445 INFO [train.py:903] (0/4) Epoch 27, batch 3950, loss[loss=0.2368, simple_loss=0.3193, pruned_loss=0.07709, over 17462.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2846, pruned_loss=0.06062, over 3813737.84 frames. ], batch size: 101, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:38:30,603 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 09:38:43,818 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181490.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:39:30,606 INFO [train.py:903] (0/4) Epoch 27, batch 4000, loss[loss=0.1903, simple_loss=0.2693, pruned_loss=0.05565, over 19728.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2846, pruned_loss=0.0608, over 3809563.46 frames. ], batch size: 51, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:39:55,701 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:08,402 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:14,677 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 09:40:21,509 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.331e+02 6.990e+02 9.796e+02 2.756e+03, threshold=1.398e+03, percent-clipped=11.0 +2023-04-03 09:40:32,615 INFO [train.py:903] (0/4) Epoch 27, batch 4050, loss[loss=0.1726, simple_loss=0.2528, pruned_loss=0.0462, over 19605.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2841, pruned_loss=0.06056, over 3810132.12 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:40:38,777 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:41:35,043 INFO [train.py:903] (0/4) Epoch 27, batch 4100, loss[loss=0.22, simple_loss=0.3037, pruned_loss=0.06814, over 19527.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2848, pruned_loss=0.06113, over 3811782.54 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:42:07,717 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 09:42:13,759 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:42:26,972 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 5.142e+02 6.045e+02 7.647e+02 1.406e+03, threshold=1.209e+03, percent-clipped=1.0 +2023-04-03 09:42:35,660 INFO [train.py:903] (0/4) Epoch 27, batch 4150, loss[loss=0.1798, simple_loss=0.2596, pruned_loss=0.04994, over 19388.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2844, pruned_loss=0.06084, over 3817954.63 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:38,706 INFO [train.py:903] (0/4) Epoch 27, batch 4200, loss[loss=0.2234, simple_loss=0.298, pruned_loss=0.07438, over 13327.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2846, pruned_loss=0.06106, over 3814949.31 frames. ], batch size: 136, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:42,089 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 09:44:19,619 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0901, 2.0868, 1.8256, 2.1683, 1.9383, 1.8627, 1.7459, 2.0335], + device='cuda:0'), covar=tensor([0.1115, 0.1476, 0.1570, 0.1104, 0.1435, 0.0576, 0.1592, 0.0784], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0360, 0.0319, 0.0258, 0.0309, 0.0257, 0.0322, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:44:30,941 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.083e+02 6.482e+02 7.948e+02 1.533e+03, threshold=1.296e+03, percent-clipped=4.0 +2023-04-03 09:44:36,093 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9568, 4.4959, 2.7831, 3.9176, 0.7837, 4.4941, 4.3300, 4.4508], + device='cuda:0'), covar=tensor([0.0548, 0.0969, 0.1996, 0.0888, 0.4263, 0.0646, 0.0911, 0.1158], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0426, 0.0510, 0.0357, 0.0406, 0.0452, 0.0447, 0.0476], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:44:36,260 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:44:40,471 INFO [train.py:903] (0/4) Epoch 27, batch 4250, loss[loss=0.2098, simple_loss=0.279, pruned_loss=0.07035, over 19109.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2846, pruned_loss=0.06101, over 3820858.69 frames. ], batch size: 42, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:44:55,302 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 09:45:08,537 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 09:45:43,727 INFO [train.py:903] (0/4) Epoch 27, batch 4300, loss[loss=0.2099, simple_loss=0.2893, pruned_loss=0.06526, over 19715.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2844, pruned_loss=0.06105, over 3815714.87 frames. ], batch size: 63, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:45:44,232 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4106, 1.4373, 1.5540, 1.5577, 1.8211, 1.8972, 1.7872, 0.5880], + device='cuda:0'), covar=tensor([0.2414, 0.4272, 0.2586, 0.1993, 0.1626, 0.2339, 0.1431, 0.5069], + device='cuda:0'), in_proj_covar=tensor([0.0552, 0.0667, 0.0749, 0.0505, 0.0633, 0.0545, 0.0668, 0.0570], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 09:46:36,877 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 4.917e+02 6.450e+02 8.224e+02 1.543e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-04-03 09:46:40,227 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 09:46:47,396 INFO [train.py:903] (0/4) Epoch 27, batch 4350, loss[loss=0.2044, simple_loss=0.2876, pruned_loss=0.06058, over 19699.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.0614, over 3808810.29 frames. ], batch size: 60, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:47:05,497 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:47:21,788 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5708, 1.1441, 1.3948, 1.3051, 2.2007, 1.1292, 2.1506, 2.5237], + device='cuda:0'), covar=tensor([0.0732, 0.3093, 0.3053, 0.1787, 0.0955, 0.2132, 0.1075, 0.0490], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0376, 0.0394, 0.0352, 0.0381, 0.0354, 0.0392, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:47:33,518 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 09:47:38,038 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6468, 1.7855, 2.1381, 1.9402, 3.2477, 2.9286, 3.7071, 1.6797], + device='cuda:0'), covar=tensor([0.2413, 0.4323, 0.2719, 0.1850, 0.1519, 0.1932, 0.1403, 0.4180], + device='cuda:0'), in_proj_covar=tensor([0.0555, 0.0671, 0.0754, 0.0507, 0.0638, 0.0548, 0.0671, 0.0573], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 09:47:49,883 INFO [train.py:903] (0/4) Epoch 27, batch 4400, loss[loss=0.1531, simple_loss=0.2287, pruned_loss=0.03879, over 19749.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2838, pruned_loss=0.0614, over 3817703.18 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:15,001 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 09:48:24,195 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 09:48:42,706 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.593e+02 4.919e+02 6.507e+02 9.105e+02 1.976e+03, threshold=1.301e+03, percent-clipped=10.0 +2023-04-03 09:48:52,975 INFO [train.py:903] (0/4) Epoch 27, batch 4450, loss[loss=0.2015, simple_loss=0.2861, pruned_loss=0.0584, over 19784.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2852, pruned_loss=0.06247, over 3817658.73 frames. ], batch size: 56, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:49:15,964 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:20,366 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-182000.pt +2023-04-03 09:49:30,785 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:39,962 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6331, 1.7463, 1.9819, 1.9590, 1.5145, 1.9438, 1.9659, 1.8670], + device='cuda:0'), covar=tensor([0.4230, 0.3782, 0.2127, 0.2392, 0.3898, 0.2272, 0.5302, 0.3541], + device='cuda:0'), in_proj_covar=tensor([0.0932, 0.1009, 0.0740, 0.0951, 0.0909, 0.0850, 0.0858, 0.0807], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 09:49:49,913 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-03 09:49:56,879 INFO [train.py:903] (0/4) Epoch 27, batch 4500, loss[loss=0.1861, simple_loss=0.2741, pruned_loss=0.04906, over 19665.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.286, pruned_loss=0.06264, over 3822937.07 frames. ], batch size: 53, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:49:59,836 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:31,958 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:38,352 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-03 09:50:49,963 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.059e+02 6.218e+02 7.785e+02 2.105e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 09:51:00,203 INFO [train.py:903] (0/4) Epoch 27, batch 4550, loss[loss=0.1771, simple_loss=0.2478, pruned_loss=0.05318, over 19738.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2846, pruned_loss=0.06195, over 3827191.12 frames. ], batch size: 46, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:51:09,780 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 09:51:24,544 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0267, 1.4699, 1.8080, 1.7930, 4.5477, 1.2086, 2.8001, 5.0117], + device='cuda:0'), covar=tensor([0.0486, 0.2915, 0.2875, 0.1983, 0.0764, 0.2674, 0.1344, 0.0149], + device='cuda:0'), in_proj_covar=tensor([0.0423, 0.0378, 0.0396, 0.0353, 0.0384, 0.0357, 0.0394, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:51:32,201 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 09:52:02,921 INFO [train.py:903] (0/4) Epoch 27, batch 4600, loss[loss=0.1736, simple_loss=0.2575, pruned_loss=0.04486, over 19851.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2841, pruned_loss=0.06143, over 3822908.03 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:52:54,753 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 4.830e+02 5.724e+02 7.323e+02 1.391e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 09:53:05,189 INFO [train.py:903] (0/4) Epoch 27, batch 4650, loss[loss=0.1982, simple_loss=0.286, pruned_loss=0.05521, over 19515.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2838, pruned_loss=0.06079, over 3825023.00 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:53:22,614 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 09:53:34,163 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 09:54:07,711 INFO [train.py:903] (0/4) Epoch 27, batch 4700, loss[loss=0.1999, simple_loss=0.2783, pruned_loss=0.06073, over 19356.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.06015, over 3819236.08 frames. ], batch size: 47, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:54:23,689 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2435, 2.0447, 2.0631, 1.9210, 1.5907, 1.8940, 0.6410, 1.3200], + device='cuda:0'), covar=tensor([0.0658, 0.0746, 0.0504, 0.0931, 0.1304, 0.0935, 0.1510, 0.1145], + device='cuda:0'), in_proj_covar=tensor([0.0365, 0.0363, 0.0369, 0.0391, 0.0469, 0.0395, 0.0347, 0.0348], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 09:54:30,872 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 09:54:51,531 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:54:59,128 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.994e+02 4.730e+02 5.895e+02 7.660e+02 1.174e+03, threshold=1.179e+03, percent-clipped=2.0 +2023-04-03 09:55:06,179 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8752, 4.0256, 4.4000, 4.4019, 2.7908, 4.1023, 3.7896, 4.1694], + device='cuda:0'), covar=tensor([0.1403, 0.2668, 0.0618, 0.0707, 0.4263, 0.1255, 0.0589, 0.1015], + device='cuda:0'), in_proj_covar=tensor([0.0817, 0.0779, 0.0986, 0.0864, 0.0858, 0.0750, 0.0584, 0.0915], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 09:55:10,403 INFO [train.py:903] (0/4) Epoch 27, batch 4750, loss[loss=0.2065, simple_loss=0.2902, pruned_loss=0.06139, over 19530.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2843, pruned_loss=0.06068, over 3819021.94 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:55:22,528 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182288.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:55:35,987 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:12,231 INFO [train.py:903] (0/4) Epoch 27, batch 4800, loss[loss=0.1904, simple_loss=0.2852, pruned_loss=0.04782, over 19648.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.0596, over 3829519.16 frames. ], batch size: 55, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:56:26,917 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182340.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:57:01,655 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5547, 1.5118, 1.4571, 2.0063, 1.4166, 1.7827, 1.8525, 1.6247], + device='cuda:0'), covar=tensor([0.0872, 0.0944, 0.1069, 0.0731, 0.0934, 0.0802, 0.0860, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0224, 0.0227, 0.0240, 0.0226, 0.0212, 0.0187, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 09:57:03,562 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.862e+02 5.780e+02 7.243e+02 1.108e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-03 09:57:13,600 INFO [train.py:903] (0/4) Epoch 27, batch 4850, loss[loss=0.1783, simple_loss=0.2755, pruned_loss=0.04055, over 19666.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06037, over 3832954.92 frames. ], batch size: 55, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:57:36,904 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 09:57:50,863 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8416, 1.5600, 1.4826, 1.7888, 1.5336, 1.5981, 1.4901, 1.7133], + device='cuda:0'), covar=tensor([0.1108, 0.1319, 0.1533, 0.0992, 0.1301, 0.0586, 0.1512, 0.0794], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0358, 0.0317, 0.0257, 0.0307, 0.0255, 0.0320, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 09:57:58,344 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 09:58:03,903 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 09:58:03,929 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 09:58:13,248 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 09:58:14,419 INFO [train.py:903] (0/4) Epoch 27, batch 4900, loss[loss=0.2032, simple_loss=0.2828, pruned_loss=0.06187, over 19541.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2839, pruned_loss=0.06062, over 3822101.86 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:58:34,909 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 09:58:50,310 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182455.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:59:07,219 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.786e+02 5.871e+02 7.388e+02 1.622e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 09:59:18,873 INFO [train.py:903] (0/4) Epoch 27, batch 4950, loss[loss=0.2058, simple_loss=0.2939, pruned_loss=0.05884, over 19312.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.05998, over 3831013.78 frames. ], batch size: 66, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:59:36,556 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 10:00:00,867 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 10:00:21,846 INFO [train.py:903] (0/4) Epoch 27, batch 5000, loss[loss=0.1865, simple_loss=0.2749, pruned_loss=0.04905, over 19664.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2828, pruned_loss=0.05963, over 3844707.68 frames. ], batch size: 55, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:00:27,411 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182532.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:00:32,564 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 10:00:44,441 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 10:01:15,165 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.878e+02 5.976e+02 7.448e+02 1.686e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 10:01:25,326 INFO [train.py:903] (0/4) Epoch 27, batch 5050, loss[loss=0.1993, simple_loss=0.2836, pruned_loss=0.05747, over 19539.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2827, pruned_loss=0.05956, over 3845750.41 frames. ], batch size: 56, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:02,775 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 10:02:27,489 INFO [train.py:903] (0/4) Epoch 27, batch 5100, loss[loss=0.1664, simple_loss=0.2572, pruned_loss=0.03778, over 19827.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05963, over 3849504.49 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:37,798 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 10:02:41,999 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 10:02:46,615 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 10:02:46,760 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:03:14,678 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-04-03 10:03:16,996 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 10:03:19,757 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.110e+02 6.408e+02 8.268e+02 2.195e+03, threshold=1.282e+03, percent-clipped=9.0 +2023-04-03 10:03:30,281 INFO [train.py:903] (0/4) Epoch 27, batch 5150, loss[loss=0.2484, simple_loss=0.319, pruned_loss=0.08886, over 17514.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2813, pruned_loss=0.05933, over 3847718.71 frames. ], batch size: 101, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:03:44,257 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:03:45,806 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7195, 1.7316, 1.9626, 1.8846, 2.8165, 2.3678, 2.8887, 1.6112], + device='cuda:0'), covar=tensor([0.2350, 0.4090, 0.2723, 0.1954, 0.1442, 0.2178, 0.1448, 0.4315], + device='cuda:0'), in_proj_covar=tensor([0.0553, 0.0668, 0.0753, 0.0507, 0.0634, 0.0546, 0.0670, 0.0572], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 10:04:12,421 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182711.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:21,110 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:04:25,033 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.21 vs. limit=5.0 +2023-04-03 10:04:30,866 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.05 vs. limit=5.0 +2023-04-03 10:04:34,470 INFO [train.py:903] (0/4) Epoch 27, batch 5200, loss[loss=0.2086, simple_loss=0.3008, pruned_loss=0.05825, over 19529.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2823, pruned_loss=0.05958, over 3851139.71 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:04:45,062 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:50,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 10:05:06,003 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9289, 0.8873, 0.9118, 1.0246, 0.8019, 0.9883, 0.9541, 0.9536], + device='cuda:0'), covar=tensor([0.0721, 0.0742, 0.0815, 0.0512, 0.0980, 0.0657, 0.0827, 0.0620], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0225, 0.0229, 0.0241, 0.0226, 0.0214, 0.0188, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 10:05:11,577 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:05:28,502 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.726e+02 5.796e+02 7.282e+02 1.371e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-03 10:05:35,310 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 10:05:37,679 INFO [train.py:903] (0/4) Epoch 27, batch 5250, loss[loss=0.1607, simple_loss=0.2479, pruned_loss=0.03679, over 19605.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.0593, over 3852949.06 frames. ], batch size: 50, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:05:43,493 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.5993, 4.7541, 5.3391, 5.3213, 2.3906, 4.9778, 4.3724, 5.0441], + device='cuda:0'), covar=tensor([0.1621, 0.1611, 0.0549, 0.0637, 0.5683, 0.0997, 0.0583, 0.1088], + device='cuda:0'), in_proj_covar=tensor([0.0812, 0.0779, 0.0984, 0.0867, 0.0855, 0.0748, 0.0582, 0.0913], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 10:06:10,113 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3697, 4.0051, 2.6387, 3.5164, 0.8077, 3.9470, 3.8136, 3.9188], + device='cuda:0'), covar=tensor([0.0725, 0.1176, 0.1986, 0.0926, 0.4059, 0.0800, 0.1034, 0.1207], + device='cuda:0'), in_proj_covar=tensor([0.0530, 0.0427, 0.0515, 0.0356, 0.0410, 0.0456, 0.0451, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:06:31,765 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6435, 2.3690, 1.7955, 1.5829, 2.1339, 1.4186, 1.4839, 2.0337], + device='cuda:0'), covar=tensor([0.1120, 0.0842, 0.1087, 0.0975, 0.0639, 0.1409, 0.0813, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0320, 0.0337, 0.0273, 0.0251, 0.0345, 0.0292, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:06:39,519 INFO [train.py:903] (0/4) Epoch 27, batch 5300, loss[loss=0.1802, simple_loss=0.2663, pruned_loss=0.04704, over 19848.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2817, pruned_loss=0.05906, over 3833777.39 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:06:57,461 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 10:07:07,883 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6075, 1.4871, 1.5305, 2.1474, 1.5845, 1.8658, 1.9782, 1.7133], + device='cuda:0'), covar=tensor([0.0885, 0.0933, 0.1013, 0.0703, 0.0863, 0.0772, 0.0807, 0.0722], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0224, 0.0228, 0.0240, 0.0224, 0.0213, 0.0187, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 10:07:34,150 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.440e+02 4.985e+02 5.848e+02 7.587e+02 2.195e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 10:07:36,641 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:39,954 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:42,166 INFO [train.py:903] (0/4) Epoch 27, batch 5350, loss[loss=0.1923, simple_loss=0.2718, pruned_loss=0.05637, over 19601.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.05897, over 3838945.87 frames. ], batch size: 50, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:07:44,853 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:08:15,011 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6158, 1.7286, 1.9375, 1.8678, 2.7111, 2.3203, 2.8410, 1.3885], + device='cuda:0'), covar=tensor([0.2373, 0.4120, 0.2641, 0.1847, 0.1487, 0.2143, 0.1361, 0.4294], + device='cuda:0'), in_proj_covar=tensor([0.0554, 0.0669, 0.0754, 0.0506, 0.0636, 0.0546, 0.0669, 0.0573], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 10:08:16,926 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 10:08:46,442 INFO [train.py:903] (0/4) Epoch 27, batch 5400, loss[loss=0.2126, simple_loss=0.2868, pruned_loss=0.06916, over 19581.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2824, pruned_loss=0.05936, over 3838276.44 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:08:59,335 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2676, 1.2557, 1.2799, 1.3789, 1.0133, 1.3519, 1.3208, 1.3277], + device='cuda:0'), covar=tensor([0.0931, 0.0993, 0.1042, 0.0682, 0.0892, 0.0875, 0.0892, 0.0784], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0239, 0.0224, 0.0212, 0.0186, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 10:09:41,329 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.624e+02 4.932e+02 6.009e+02 7.341e+02 1.388e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 10:09:49,115 INFO [train.py:903] (0/4) Epoch 27, batch 5450, loss[loss=0.1749, simple_loss=0.2566, pruned_loss=0.04657, over 19740.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05915, over 3837601.17 frames. ], batch size: 51, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:10:04,470 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:35,621 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183014.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:41,299 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2723, 1.2534, 1.3068, 1.3962, 1.0662, 1.3185, 1.3692, 1.3202], + device='cuda:0'), covar=tensor([0.0926, 0.0975, 0.1069, 0.0670, 0.0859, 0.0900, 0.0863, 0.0821], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0240, 0.0225, 0.0213, 0.0187, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 10:10:50,992 INFO [train.py:903] (0/4) Epoch 27, batch 5500, loss[loss=0.239, simple_loss=0.3175, pruned_loss=0.08019, over 18444.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2819, pruned_loss=0.05908, over 3830125.65 frames. ], batch size: 83, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:04,312 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183039.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:11:13,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 10:11:45,359 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.032e+02 6.184e+02 7.491e+02 1.557e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 10:11:52,119 INFO [train.py:903] (0/4) Epoch 27, batch 5550, loss[loss=0.22, simple_loss=0.2986, pruned_loss=0.07069, over 19667.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.05968, over 3828142.03 frames. ], batch size: 58, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:57,917 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 10:12:15,122 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-03 10:12:46,687 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 10:12:56,193 INFO [train.py:903] (0/4) Epoch 27, batch 5600, loss[loss=0.2198, simple_loss=0.2892, pruned_loss=0.0752, over 19780.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.281, pruned_loss=0.0594, over 3823710.67 frames. ], batch size: 46, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:13:51,540 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.790e+02 6.069e+02 7.863e+02 1.689e+03, threshold=1.214e+03, percent-clipped=3.0 +2023-04-03 10:13:59,318 INFO [train.py:903] (0/4) Epoch 27, batch 5650, loss[loss=0.2155, simple_loss=0.2997, pruned_loss=0.0657, over 19388.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2814, pruned_loss=0.05967, over 3824087.43 frames. ], batch size: 70, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:14:44,839 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 10:14:49,504 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:14:56,368 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:14:58,844 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0154, 3.6690, 2.3356, 3.2279, 0.7440, 3.6377, 3.5027, 3.6051], + device='cuda:0'), covar=tensor([0.0732, 0.1049, 0.2118, 0.0977, 0.4057, 0.0772, 0.0948, 0.1190], + device='cuda:0'), in_proj_covar=tensor([0.0525, 0.0423, 0.0511, 0.0355, 0.0406, 0.0450, 0.0447, 0.0475], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:15:01,003 INFO [train.py:903] (0/4) Epoch 27, batch 5700, loss[loss=0.1744, simple_loss=0.2553, pruned_loss=0.04673, over 19406.00 frames. ], tot_loss[loss=0.2, simple_loss=0.281, pruned_loss=0.0595, over 3830581.43 frames. ], batch size: 48, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:15:25,118 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:54,498 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.103e+02 5.954e+02 7.593e+02 1.308e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-03 10:15:54,885 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:59,208 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 10:16:01,600 INFO [train.py:903] (0/4) Epoch 27, batch 5750, loss[loss=0.2153, simple_loss=0.2971, pruned_loss=0.06676, over 19582.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05943, over 3832515.05 frames. ], batch size: 61, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:16:08,302 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 10:16:13,727 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 10:16:16,169 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6291, 4.2492, 2.8159, 3.8004, 0.9777, 4.2738, 4.0772, 4.1995], + device='cuda:0'), covar=tensor([0.0660, 0.1051, 0.1787, 0.0815, 0.3913, 0.0616, 0.0880, 0.1069], + device='cuda:0'), in_proj_covar=tensor([0.0524, 0.0422, 0.0510, 0.0354, 0.0406, 0.0449, 0.0446, 0.0474], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:16:50,192 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5946, 1.5416, 2.1467, 1.8624, 3.1026, 4.7798, 4.6250, 5.1312], + device='cuda:0'), covar=tensor([0.1474, 0.3709, 0.3116, 0.2109, 0.0631, 0.0208, 0.0161, 0.0166], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0332, 0.0363, 0.0271, 0.0255, 0.0196, 0.0219, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:17:05,411 INFO [train.py:903] (0/4) Epoch 27, batch 5800, loss[loss=0.2002, simple_loss=0.28, pruned_loss=0.06024, over 19619.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2823, pruned_loss=0.05936, over 3848766.51 frames. ], batch size: 50, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:17:11,564 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183333.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:19,544 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:59,580 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.364e+02 7.027e+02 9.052e+02 1.891e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-03 10:18:07,742 INFO [train.py:903] (0/4) Epoch 27, batch 5850, loss[loss=0.1825, simple_loss=0.2723, pruned_loss=0.04632, over 19531.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.05994, over 3843447.82 frames. ], batch size: 56, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:18:29,912 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9638, 1.1907, 1.5365, 1.0844, 2.1302, 2.9398, 2.7380, 3.3042], + device='cuda:0'), covar=tensor([0.2024, 0.5246, 0.4678, 0.2877, 0.0872, 0.0332, 0.0338, 0.0392], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0271, 0.0254, 0.0196, 0.0219, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:19:07,134 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:19:08,994 INFO [train.py:903] (0/4) Epoch 27, batch 5900, loss[loss=0.1996, simple_loss=0.2837, pruned_loss=0.05777, over 19687.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2822, pruned_loss=0.0598, over 3836527.98 frames. ], batch size: 59, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:19:09,051 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 10:19:32,075 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 10:20:03,305 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 4.868e+02 5.663e+02 7.388e+02 1.454e+03, threshold=1.133e+03, percent-clipped=1.0 +2023-04-03 10:20:10,247 INFO [train.py:903] (0/4) Epoch 27, batch 5950, loss[loss=0.2105, simple_loss=0.2993, pruned_loss=0.06087, over 19608.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2832, pruned_loss=0.06035, over 3828613.24 frames. ], batch size: 57, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:20:40,735 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8989, 4.5583, 3.3808, 3.8110, 1.9392, 4.5213, 4.3512, 4.4869], + device='cuda:0'), covar=tensor([0.0501, 0.0848, 0.1750, 0.0895, 0.3028, 0.0641, 0.0954, 0.1334], + device='cuda:0'), in_proj_covar=tensor([0.0528, 0.0427, 0.0515, 0.0357, 0.0410, 0.0454, 0.0450, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:20:58,222 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0636, 1.3758, 1.7984, 1.2271, 2.7899, 3.6158, 3.3274, 3.8037], + device='cuda:0'), covar=tensor([0.1653, 0.3705, 0.3269, 0.2572, 0.0546, 0.0186, 0.0200, 0.0257], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0271, 0.0254, 0.0196, 0.0219, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:21:12,623 INFO [train.py:903] (0/4) Epoch 27, batch 6000, loss[loss=0.1902, simple_loss=0.2723, pruned_loss=0.05401, over 19594.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06051, over 3814030.74 frames. ], batch size: 52, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,624 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 10:21:25,594 INFO [train.py:937] (0/4) Epoch 27, validation: loss=0.1675, simple_loss=0.2669, pruned_loss=0.03401, over 944034.00 frames. +2023-04-03 10:21:25,595 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 10:22:09,199 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1767, 1.3344, 1.7393, 1.3865, 2.8771, 3.7089, 3.4286, 3.9007], + device='cuda:0'), covar=tensor([0.1702, 0.3867, 0.3457, 0.2445, 0.0573, 0.0191, 0.0208, 0.0272], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0332, 0.0363, 0.0271, 0.0254, 0.0196, 0.0219, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:22:22,440 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.376e+02 6.105e+02 7.773e+02 1.848e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 10:22:28,428 INFO [train.py:903] (0/4) Epoch 27, batch 6050, loss[loss=0.2098, simple_loss=0.287, pruned_loss=0.06629, over 19726.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06072, over 3815990.22 frames. ], batch size: 63, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:22:43,106 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183589.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:22:50,962 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:14,126 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:20,843 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183620.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:31,579 INFO [train.py:903] (0/4) Epoch 27, batch 6100, loss[loss=0.2026, simple_loss=0.2901, pruned_loss=0.05751, over 19661.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06118, over 3809077.72 frames. ], batch size: 55, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:23:37,150 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:24:27,769 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.311e+02 5.176e+02 6.035e+02 7.763e+02 1.396e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-03 10:24:33,563 INFO [train.py:903] (0/4) Epoch 27, batch 6150, loss[loss=0.1759, simple_loss=0.2501, pruned_loss=0.05083, over 19287.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06099, over 3805304.33 frames. ], batch size: 44, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:25:01,302 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 10:25:35,001 INFO [train.py:903] (0/4) Epoch 27, batch 6200, loss[loss=0.2475, simple_loss=0.3225, pruned_loss=0.08619, over 19672.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06173, over 3791215.02 frames. ], batch size: 59, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:26:22,203 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.41 vs. limit=2.0 +2023-04-03 10:26:27,301 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183770.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:26:31,934 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.839e+02 5.826e+02 7.637e+02 1.855e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 10:26:37,744 INFO [train.py:903] (0/4) Epoch 27, batch 6250, loss[loss=0.1809, simple_loss=0.2714, pruned_loss=0.04524, over 19790.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2861, pruned_loss=0.06208, over 3790355.40 frames. ], batch size: 56, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:27:08,104 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 10:27:40,819 INFO [train.py:903] (0/4) Epoch 27, batch 6300, loss[loss=0.1913, simple_loss=0.2684, pruned_loss=0.05713, over 19323.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2867, pruned_loss=0.0621, over 3804587.63 frames. ], batch size: 44, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:20,269 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:28:36,676 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.928e+02 5.882e+02 7.199e+02 1.705e+03, threshold=1.176e+03, percent-clipped=2.0 +2023-04-03 10:28:43,614 INFO [train.py:903] (0/4) Epoch 27, batch 6350, loss[loss=0.2401, simple_loss=0.3084, pruned_loss=0.08587, over 19646.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2857, pruned_loss=0.06147, over 3804679.03 frames. ], batch size: 58, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:51,872 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:29:43,984 INFO [train.py:903] (0/4) Epoch 27, batch 6400, loss[loss=0.2247, simple_loss=0.3029, pruned_loss=0.07319, over 18837.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2851, pruned_loss=0.06127, over 3807978.22 frames. ], batch size: 74, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:30:39,972 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.757e+02 6.035e+02 7.575e+02 1.901e+03, threshold=1.207e+03, percent-clipped=7.0 +2023-04-03 10:30:43,678 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:30:45,010 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1826, 1.3423, 1.6876, 1.4002, 2.6797, 3.7547, 3.4338, 3.8966], + device='cuda:0'), covar=tensor([0.1735, 0.4043, 0.3717, 0.2648, 0.0697, 0.0210, 0.0220, 0.0306], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0270, 0.0253, 0.0195, 0.0219, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:30:45,873 INFO [train.py:903] (0/4) Epoch 27, batch 6450, loss[loss=0.2154, simple_loss=0.2973, pruned_loss=0.06676, over 19665.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2851, pruned_loss=0.06127, over 3801610.28 frames. ], batch size: 60, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:14,613 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-184000.pt +2023-04-03 10:31:28,486 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 10:31:42,559 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3563, 1.9417, 1.5193, 1.3555, 1.8070, 1.2590, 1.3153, 1.8240], + device='cuda:0'), covar=tensor([0.0907, 0.0818, 0.1062, 0.0887, 0.0581, 0.1320, 0.0695, 0.0463], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0320, 0.0337, 0.0273, 0.0251, 0.0344, 0.0293, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:31:50,539 INFO [train.py:903] (0/4) Epoch 27, batch 6500, loss[loss=0.1972, simple_loss=0.2836, pruned_loss=0.05545, over 19332.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2839, pruned_loss=0.06052, over 3805794.41 frames. ], batch size: 70, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:52,978 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 10:32:04,467 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1577, 1.2754, 1.5237, 1.5075, 2.7371, 1.2421, 2.2699, 3.1038], + device='cuda:0'), covar=tensor([0.0593, 0.2943, 0.3010, 0.1818, 0.0727, 0.2306, 0.1258, 0.0342], + device='cuda:0'), in_proj_covar=tensor([0.0424, 0.0378, 0.0397, 0.0354, 0.0385, 0.0357, 0.0395, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:32:11,849 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 10:32:40,201 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:32:46,404 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.542e+02 5.565e+02 7.286e+02 1.442e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-04-03 10:32:53,094 INFO [train.py:903] (0/4) Epoch 27, batch 6550, loss[loss=0.1728, simple_loss=0.2545, pruned_loss=0.04553, over 19363.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2836, pruned_loss=0.06017, over 3821600.03 frames. ], batch size: 47, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:33:08,666 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:33:24,621 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5266, 1.5685, 1.7998, 1.7223, 2.4400, 2.1856, 2.5809, 1.2078], + device='cuda:0'), covar=tensor([0.2426, 0.4266, 0.2701, 0.1980, 0.1614, 0.2192, 0.1480, 0.4740], + device='cuda:0'), in_proj_covar=tensor([0.0552, 0.0669, 0.0751, 0.0507, 0.0636, 0.0544, 0.0666, 0.0571], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 10:33:51,900 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4551, 1.4752, 1.8056, 1.6781, 3.1142, 4.4185, 4.1807, 4.7578], + device='cuda:0'), covar=tensor([0.1577, 0.3899, 0.3639, 0.2459, 0.0704, 0.0206, 0.0206, 0.0241], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0332, 0.0363, 0.0271, 0.0253, 0.0195, 0.0220, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:33:55,011 INFO [train.py:903] (0/4) Epoch 27, batch 6600, loss[loss=0.2068, simple_loss=0.291, pruned_loss=0.06128, over 19662.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2834, pruned_loss=0.06022, over 3809352.21 frames. ], batch size: 55, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:34:05,975 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0232, 2.1176, 2.3753, 2.6061, 2.0155, 2.5060, 2.2788, 2.1400], + device='cuda:0'), covar=tensor([0.4493, 0.4597, 0.2061, 0.2735, 0.4599, 0.2401, 0.5474, 0.3620], + device='cuda:0'), in_proj_covar=tensor([0.0933, 0.1009, 0.0740, 0.0947, 0.0909, 0.0846, 0.0859, 0.0803], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 10:34:11,860 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:43,132 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:46,840 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 10:34:46,856 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 10:34:50,799 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.027e+02 4.833e+02 5.798e+02 7.178e+02 1.551e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 10:34:58,096 INFO [train.py:903] (0/4) Epoch 27, batch 6650, loss[loss=0.2081, simple_loss=0.281, pruned_loss=0.06761, over 19754.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.05963, over 3792205.65 frames. ], batch size: 54, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:35:30,807 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184204.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:35:59,893 INFO [train.py:903] (0/4) Epoch 27, batch 6700, loss[loss=0.1738, simple_loss=0.2555, pruned_loss=0.04604, over 19416.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2827, pruned_loss=0.06005, over 3797938.72 frames. ], batch size: 48, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:36:52,206 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.122e+02 6.084e+02 8.596e+02 2.606e+03, threshold=1.217e+03, percent-clipped=9.0 +2023-04-03 10:36:57,985 INFO [train.py:903] (0/4) Epoch 27, batch 6750, loss[loss=0.202, simple_loss=0.2887, pruned_loss=0.05764, over 19324.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2824, pruned_loss=0.0596, over 3808010.53 frames. ], batch size: 66, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:37:31,840 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3612, 3.0921, 2.2781, 2.7541, 0.9345, 3.0565, 2.9193, 3.0499], + device='cuda:0'), covar=tensor([0.1079, 0.1231, 0.2046, 0.1086, 0.3657, 0.1011, 0.1208, 0.1320], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0425, 0.0516, 0.0357, 0.0407, 0.0455, 0.0451, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:37:44,327 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:37:54,077 INFO [train.py:903] (0/4) Epoch 27, batch 6800, loss[loss=0.226, simple_loss=0.2994, pruned_loss=0.07629, over 19543.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.06085, over 3796810.62 frames. ], batch size: 54, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:38:15,897 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:38:24,982 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-27.pt +2023-04-03 10:38:39,920 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 10:38:40,383 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 10:38:43,611 INFO [train.py:903] (0/4) Epoch 28, batch 0, loss[loss=0.2302, simple_loss=0.2995, pruned_loss=0.0805, over 19835.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2995, pruned_loss=0.0805, over 19835.00 frames. ], batch size: 52, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:38:43,611 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 10:38:54,476 INFO [train.py:937] (0/4) Epoch 28, validation: loss=0.1665, simple_loss=0.2666, pruned_loss=0.03316, over 944034.00 frames. +2023-04-03 10:38:54,477 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 10:39:08,322 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 10:39:14,442 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:15,175 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.715e+02 5.190e+02 6.304e+02 8.212e+02 1.288e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-03 10:39:21,653 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:57,733 INFO [train.py:903] (0/4) Epoch 28, batch 50, loss[loss=0.1791, simple_loss=0.2683, pruned_loss=0.04495, over 19674.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.285, pruned_loss=0.05902, over 856898.83 frames. ], batch size: 58, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:40:04,834 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:40:25,827 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6641, 1.7877, 2.0181, 1.8714, 3.0022, 2.5583, 3.2618, 1.6646], + device='cuda:0'), covar=tensor([0.2587, 0.4418, 0.2925, 0.2044, 0.1722, 0.2282, 0.1668, 0.4669], + device='cuda:0'), in_proj_covar=tensor([0.0553, 0.0668, 0.0751, 0.0506, 0.0636, 0.0544, 0.0668, 0.0571], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 10:40:32,255 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 10:40:58,080 INFO [train.py:903] (0/4) Epoch 28, batch 100, loss[loss=0.2028, simple_loss=0.2905, pruned_loss=0.05754, over 18641.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2835, pruned_loss=0.05894, over 1527832.01 frames. ], batch size: 74, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:41:08,314 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 10:41:18,601 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 4.572e+02 5.776e+02 7.316e+02 1.195e+03, threshold=1.155e+03, percent-clipped=0.0 +2023-04-03 10:41:56,017 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:41:58,109 INFO [train.py:903] (0/4) Epoch 28, batch 150, loss[loss=0.1666, simple_loss=0.2453, pruned_loss=0.04396, over 19376.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2849, pruned_loss=0.06031, over 2026633.38 frames. ], batch size: 47, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:42:24,575 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:42:57,425 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 10:42:58,585 INFO [train.py:903] (0/4) Epoch 28, batch 200, loss[loss=0.2132, simple_loss=0.3005, pruned_loss=0.06298, over 19326.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2822, pruned_loss=0.05885, over 2428469.46 frames. ], batch size: 66, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:43:19,482 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 4.969e+02 6.258e+02 7.516e+02 2.266e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 10:43:22,307 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184575.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:24,676 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9166, 1.7128, 1.6118, 1.9195, 1.6212, 1.6111, 1.6030, 1.8178], + device='cuda:0'), covar=tensor([0.1144, 0.1513, 0.1571, 0.1004, 0.1384, 0.0654, 0.1550, 0.0861], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0360, 0.0320, 0.0258, 0.0307, 0.0258, 0.0321, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:43:51,902 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:59,857 INFO [train.py:903] (0/4) Epoch 28, batch 250, loss[loss=0.2538, simple_loss=0.3095, pruned_loss=0.09912, over 19746.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05915, over 2749354.62 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:01,673 INFO [train.py:903] (0/4) Epoch 28, batch 300, loss[loss=0.234, simple_loss=0.3114, pruned_loss=0.07833, over 19416.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2828, pruned_loss=0.05913, over 2978788.98 frames. ], batch size: 70, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:22,244 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.936e+02 6.396e+02 8.049e+02 1.564e+03, threshold=1.279e+03, percent-clipped=7.0 +2023-04-03 10:45:29,100 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-04-03 10:45:40,681 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8497, 1.1396, 1.4755, 0.6097, 2.0564, 2.4397, 2.2003, 2.6326], + device='cuda:0'), covar=tensor([0.1768, 0.4184, 0.3803, 0.3064, 0.0678, 0.0306, 0.0364, 0.0401], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0333, 0.0364, 0.0272, 0.0255, 0.0196, 0.0220, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:46:02,781 INFO [train.py:903] (0/4) Epoch 28, batch 350, loss[loss=0.1869, simple_loss=0.2745, pruned_loss=0.04959, over 19789.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05986, over 3177799.25 frames. ], batch size: 56, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:46:06,328 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:46:11,143 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2393, 1.4154, 2.0355, 1.6360, 3.1516, 4.6311, 4.5217, 5.0500], + device='cuda:0'), covar=tensor([0.1725, 0.4033, 0.3446, 0.2479, 0.0651, 0.0209, 0.0172, 0.0200], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0334, 0.0364, 0.0272, 0.0255, 0.0196, 0.0220, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 10:46:20,173 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184721.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:47:04,871 INFO [train.py:903] (0/4) Epoch 28, batch 400, loss[loss=0.2158, simple_loss=0.2997, pruned_loss=0.06595, over 19799.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.284, pruned_loss=0.06045, over 3306032.37 frames. ], batch size: 56, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:47:07,992 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 10:47:24,994 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.910e+02 5.909e+02 7.518e+02 1.907e+03, threshold=1.182e+03, percent-clipped=3.0 +2023-04-03 10:47:33,301 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6602, 1.4712, 1.4550, 1.5692, 3.2136, 1.2498, 2.4829, 3.7426], + device='cuda:0'), covar=tensor([0.0455, 0.2788, 0.3025, 0.1881, 0.0663, 0.2462, 0.1249, 0.0211], + device='cuda:0'), in_proj_covar=tensor([0.0426, 0.0380, 0.0398, 0.0354, 0.0385, 0.0359, 0.0397, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:47:38,193 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:47:54,338 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9626, 1.9103, 1.6539, 2.0020, 1.7465, 1.6618, 1.7027, 1.8650], + device='cuda:0'), covar=tensor([0.1233, 0.1521, 0.1661, 0.1130, 0.1473, 0.0701, 0.1542, 0.0868], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0359, 0.0318, 0.0258, 0.0306, 0.0257, 0.0321, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:48:05,124 INFO [train.py:903] (0/4) Epoch 28, batch 450, loss[loss=0.233, simple_loss=0.3148, pruned_loss=0.07561, over 19749.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.06058, over 3426538.26 frames. ], batch size: 63, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:48:07,816 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184808.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:20,508 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9156, 2.9380, 2.6957, 2.8686, 2.6586, 2.4895, 2.4533, 2.9726], + device='cuda:0'), covar=tensor([0.0870, 0.1324, 0.1214, 0.1085, 0.1315, 0.0483, 0.1296, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0358, 0.0318, 0.0257, 0.0306, 0.0256, 0.0320, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:48:32,957 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8649, 1.5048, 1.6093, 1.7013, 3.4534, 1.2964, 2.5537, 3.9423], + device='cuda:0'), covar=tensor([0.0470, 0.2759, 0.2862, 0.1809, 0.0661, 0.2527, 0.1295, 0.0201], + device='cuda:0'), in_proj_covar=tensor([0.0424, 0.0379, 0.0398, 0.0353, 0.0384, 0.0358, 0.0396, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:48:38,474 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 10:48:39,492 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 10:48:43,479 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:56,804 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:49:06,863 INFO [train.py:903] (0/4) Epoch 28, batch 500, loss[loss=0.197, simple_loss=0.2764, pruned_loss=0.05879, over 19741.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2824, pruned_loss=0.05996, over 3527426.94 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:49:28,419 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.109e+02 6.404e+02 8.256e+02 1.456e+03, threshold=1.281e+03, percent-clipped=5.0 +2023-04-03 10:49:33,528 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4184, 1.4229, 1.5993, 1.5692, 1.7436, 1.9092, 1.8071, 0.5281], + device='cuda:0'), covar=tensor([0.2344, 0.4266, 0.2665, 0.1903, 0.1663, 0.2226, 0.1433, 0.4999], + device='cuda:0'), in_proj_covar=tensor([0.0551, 0.0666, 0.0750, 0.0505, 0.0635, 0.0543, 0.0666, 0.0569], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 10:49:41,486 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:50:02,657 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1148, 2.0636, 1.9279, 1.7271, 1.6072, 1.6801, 0.6507, 1.0781], + device='cuda:0'), covar=tensor([0.0721, 0.0668, 0.0512, 0.0864, 0.1305, 0.1002, 0.1394, 0.1130], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0365, 0.0371, 0.0395, 0.0472, 0.0397, 0.0347, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 10:50:09,150 INFO [train.py:903] (0/4) Epoch 28, batch 550, loss[loss=0.1767, simple_loss=0.2531, pruned_loss=0.05011, over 19758.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.06031, over 3577063.19 frames. ], batch size: 48, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:08,835 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-03 10:51:11,428 INFO [train.py:903] (0/4) Epoch 28, batch 600, loss[loss=0.2213, simple_loss=0.2855, pruned_loss=0.07855, over 19779.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2834, pruned_loss=0.06079, over 3616634.08 frames. ], batch size: 47, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:12,923 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184957.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:19,923 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:31,268 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.007e+02 6.276e+02 8.222e+02 1.849e+03, threshold=1.255e+03, percent-clipped=3.0 +2023-04-03 10:51:50,617 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 10:52:14,355 INFO [train.py:903] (0/4) Epoch 28, batch 650, loss[loss=0.2024, simple_loss=0.2868, pruned_loss=0.05898, over 17955.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2833, pruned_loss=0.06046, over 3675644.63 frames. ], batch size: 83, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:16,120 INFO [train.py:903] (0/4) Epoch 28, batch 700, loss[loss=0.2031, simple_loss=0.2827, pruned_loss=0.0618, over 19569.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06047, over 3697544.46 frames. ], batch size: 52, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:38,016 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 4.589e+02 5.550e+02 7.126e+02 1.317e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-03 10:53:45,140 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:01,386 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:19,786 INFO [train.py:903] (0/4) Epoch 28, batch 750, loss[loss=0.2518, simple_loss=0.3223, pruned_loss=0.09069, over 19538.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2838, pruned_loss=0.06105, over 3728504.63 frames. ], batch size: 64, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:54:34,023 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:10,725 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185147.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:20,945 INFO [train.py:903] (0/4) Epoch 28, batch 800, loss[loss=0.1764, simple_loss=0.2555, pruned_loss=0.04862, over 19479.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.0614, over 3749375.38 frames. ], batch size: 49, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:55:30,809 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-03 10:55:34,897 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:55:41,840 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.493e+02 6.558e+02 7.858e+02 2.224e+03, threshold=1.312e+03, percent-clipped=8.0 +2023-04-03 10:56:22,057 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8253, 4.3980, 2.8369, 3.8809, 0.9508, 4.4315, 4.2037, 4.3266], + device='cuda:0'), covar=tensor([0.0637, 0.0993, 0.1972, 0.0888, 0.4028, 0.0612, 0.1007, 0.1236], + device='cuda:0'), in_proj_covar=tensor([0.0527, 0.0426, 0.0517, 0.0359, 0.0409, 0.0456, 0.0451, 0.0480], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:56:24,245 INFO [train.py:903] (0/4) Epoch 28, batch 850, loss[loss=0.1841, simple_loss=0.2587, pruned_loss=0.05474, over 19733.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.06026, over 3772259.52 frames. ], batch size: 47, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:56:39,480 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:56:51,612 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185228.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:11,559 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:15,600 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 10:57:24,911 INFO [train.py:903] (0/4) Epoch 28, batch 900, loss[loss=0.1927, simple_loss=0.2702, pruned_loss=0.05765, over 19744.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2832, pruned_loss=0.06037, over 3794963.60 frames. ], batch size: 45, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:57:37,060 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 10:57:47,702 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 4.798e+02 5.781e+02 7.336e+02 1.381e+03, threshold=1.156e+03, percent-clipped=1.0 +2023-04-03 10:58:21,541 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:58:28,121 INFO [train.py:903] (0/4) Epoch 28, batch 950, loss[loss=0.1897, simple_loss=0.2826, pruned_loss=0.04847, over 19682.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05957, over 3810759.86 frames. ], batch size: 59, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:58:29,324 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 10:59:11,068 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2924, 1.9948, 1.5251, 1.3408, 1.8245, 1.2558, 1.1518, 1.7949], + device='cuda:0'), covar=tensor([0.0941, 0.0743, 0.1073, 0.0820, 0.0514, 0.1266, 0.0732, 0.0447], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0317, 0.0335, 0.0271, 0.0249, 0.0341, 0.0290, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:59:14,453 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185343.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:59:32,239 INFO [train.py:903] (0/4) Epoch 28, batch 1000, loss[loss=0.2024, simple_loss=0.2718, pruned_loss=0.06649, over 19764.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2817, pruned_loss=0.05939, over 3804264.63 frames. ], batch size: 45, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:59:49,404 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8222, 1.3847, 1.5565, 1.6631, 3.4445, 1.2969, 2.4744, 3.8958], + device='cuda:0'), covar=tensor([0.0520, 0.2884, 0.2956, 0.1907, 0.0645, 0.2501, 0.1425, 0.0224], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0376, 0.0395, 0.0350, 0.0381, 0.0356, 0.0393, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 10:59:53,723 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 4.891e+02 5.853e+02 7.878e+02 2.572e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-03 10:59:57,745 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 11:00:23,372 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 11:00:34,020 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1238, 2.8592, 2.2307, 2.3549, 2.0920, 2.4725, 1.0997, 2.1102], + device='cuda:0'), covar=tensor([0.0782, 0.0675, 0.0785, 0.1185, 0.1132, 0.1175, 0.1517, 0.1151], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0363, 0.0369, 0.0391, 0.0470, 0.0394, 0.0346, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 11:00:34,738 INFO [train.py:903] (0/4) Epoch 28, batch 1050, loss[loss=0.2057, simple_loss=0.2905, pruned_loss=0.06046, over 19658.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06006, over 3795217.97 frames. ], batch size: 55, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:00:47,040 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:00:53,748 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:01:02,617 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 11:01:14,704 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9789, 4.3828, 4.7347, 4.7079, 1.8278, 4.4165, 3.7577, 4.4380], + device='cuda:0'), covar=tensor([0.1742, 0.0995, 0.0608, 0.0757, 0.6108, 0.0946, 0.0752, 0.1190], + device='cuda:0'), in_proj_covar=tensor([0.0824, 0.0785, 0.0998, 0.0874, 0.0867, 0.0762, 0.0588, 0.0926], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 11:01:35,153 INFO [train.py:903] (0/4) Epoch 28, batch 1100, loss[loss=0.1813, simple_loss=0.2623, pruned_loss=0.05011, over 19779.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2853, pruned_loss=0.06126, over 3803712.43 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:01:57,231 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.947e+02 6.329e+02 8.288e+02 1.903e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-03 11:02:12,996 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:18,720 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185491.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:31,455 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7712, 1.2446, 1.4844, 1.4254, 3.3560, 1.1716, 2.3273, 3.7782], + device='cuda:0'), covar=tensor([0.0526, 0.3051, 0.2982, 0.2074, 0.0685, 0.2618, 0.1525, 0.0255], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0377, 0.0396, 0.0351, 0.0381, 0.0357, 0.0393, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:02:35,892 INFO [train.py:903] (0/4) Epoch 28, batch 1150, loss[loss=0.1688, simple_loss=0.2461, pruned_loss=0.04575, over 19743.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2861, pruned_loss=0.06173, over 3792858.28 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:03:15,696 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:03:28,122 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-03 11:03:40,791 INFO [train.py:903] (0/4) Epoch 28, batch 1200, loss[loss=0.2051, simple_loss=0.2795, pruned_loss=0.06538, over 19620.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.284, pruned_loss=0.06048, over 3808416.75 frames. ], batch size: 50, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:04:01,632 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.383e+02 6.799e+02 8.653e+02 1.626e+03, threshold=1.360e+03, percent-clipped=3.0 +2023-04-03 11:04:11,766 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 11:04:34,004 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:04:41,627 INFO [train.py:903] (0/4) Epoch 28, batch 1250, loss[loss=0.1933, simple_loss=0.2678, pruned_loss=0.05933, over 19399.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06007, over 3824870.18 frames. ], batch size: 48, lr: 2.94e-03, grad_scale: 16.0 +2023-04-03 11:04:42,003 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185606.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:03,710 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:44,892 INFO [train.py:903] (0/4) Epoch 28, batch 1300, loss[loss=0.2345, simple_loss=0.3122, pruned_loss=0.07843, over 17999.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06047, over 3811238.24 frames. ], batch size: 83, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:05:46,446 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6910, 1.4201, 1.6029, 1.6100, 3.2892, 1.2705, 2.5762, 3.6845], + device='cuda:0'), covar=tensor([0.0460, 0.2623, 0.2728, 0.1806, 0.0641, 0.2315, 0.1083, 0.0240], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0376, 0.0396, 0.0351, 0.0381, 0.0357, 0.0392, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:06:04,600 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185672.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:06,497 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 4.542e+02 5.530e+02 7.592e+02 1.164e+03, threshold=1.106e+03, percent-clipped=0.0 +2023-04-03 11:06:35,852 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185697.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:39,502 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0493, 3.2174, 1.9527, 2.0374, 2.9532, 1.7242, 1.6553, 2.2970], + device='cuda:0'), covar=tensor([0.1335, 0.0687, 0.1091, 0.0864, 0.0508, 0.1268, 0.0935, 0.0664], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0320, 0.0339, 0.0273, 0.0250, 0.0346, 0.0292, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:06:46,286 INFO [train.py:903] (0/4) Epoch 28, batch 1350, loss[loss=0.2076, simple_loss=0.2789, pruned_loss=0.06816, over 19391.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2826, pruned_loss=0.06005, over 3808484.81 frames. ], batch size: 48, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:07:48,956 INFO [train.py:903] (0/4) Epoch 28, batch 1400, loss[loss=0.2228, simple_loss=0.3034, pruned_loss=0.07116, over 19052.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06012, over 3812904.08 frames. ], batch size: 69, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:08:11,859 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.444e+02 4.899e+02 6.106e+02 7.699e+02 1.518e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 11:08:15,718 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185777.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:35,095 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:44,851 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3495, 3.8325, 3.9654, 3.9665, 1.5519, 3.7749, 3.2753, 3.7487], + device='cuda:0'), covar=tensor([0.1721, 0.0914, 0.0685, 0.0791, 0.6073, 0.1037, 0.0786, 0.1145], + device='cuda:0'), in_proj_covar=tensor([0.0826, 0.0788, 0.1001, 0.0875, 0.0869, 0.0763, 0.0591, 0.0928], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 11:08:49,345 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 11:08:51,548 INFO [train.py:903] (0/4) Epoch 28, batch 1450, loss[loss=0.2052, simple_loss=0.2949, pruned_loss=0.05772, over 19666.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2832, pruned_loss=0.06015, over 3813167.35 frames. ], batch size: 55, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:09:05,591 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185817.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:09:06,840 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:20,227 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:54,217 INFO [train.py:903] (0/4) Epoch 28, batch 1500, loss[loss=0.2151, simple_loss=0.3124, pruned_loss=0.05889, over 19777.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06081, over 3801159.42 frames. ], batch size: 56, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:10:01,968 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:15,979 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 5.017e+02 6.227e+02 8.666e+02 1.816e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-03 11:10:33,538 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185887.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:56,447 INFO [train.py:903] (0/4) Epoch 28, batch 1550, loss[loss=0.2001, simple_loss=0.2896, pruned_loss=0.05529, over 19800.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2845, pruned_loss=0.0609, over 3813844.92 frames. ], batch size: 56, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:11:45,477 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:11:58,295 INFO [train.py:903] (0/4) Epoch 28, batch 1600, loss[loss=0.229, simple_loss=0.3157, pruned_loss=0.07112, over 19790.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2842, pruned_loss=0.06051, over 3825639.14 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:12:20,847 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 11:12:23,177 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.773e+02 5.899e+02 6.974e+02 1.687e+03, threshold=1.180e+03, percent-clipped=2.0 +2023-04-03 11:12:28,398 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-03 11:12:54,129 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-186000.pt +2023-04-03 11:13:03,463 INFO [train.py:903] (0/4) Epoch 28, batch 1650, loss[loss=0.2658, simple_loss=0.3352, pruned_loss=0.09818, over 19112.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2837, pruned_loss=0.06011, over 3839962.85 frames. ], batch size: 69, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:13:07,279 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5358, 2.4026, 2.2754, 2.6507, 2.2185, 2.1235, 1.9612, 2.5418], + device='cuda:0'), covar=tensor([0.0991, 0.1629, 0.1410, 0.1003, 0.1504, 0.0551, 0.1601, 0.0688], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0362, 0.0320, 0.0259, 0.0309, 0.0259, 0.0323, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 11:13:26,163 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7866, 4.3649, 2.8110, 3.8068, 1.0726, 4.3690, 4.1904, 4.3322], + device='cuda:0'), covar=tensor([0.0556, 0.0936, 0.1927, 0.0859, 0.3909, 0.0595, 0.0896, 0.1176], + device='cuda:0'), in_proj_covar=tensor([0.0526, 0.0427, 0.0515, 0.0360, 0.0410, 0.0457, 0.0451, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:14:07,004 INFO [train.py:903] (0/4) Epoch 28, batch 1700, loss[loss=0.1899, simple_loss=0.2719, pruned_loss=0.05396, over 19672.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2842, pruned_loss=0.06016, over 3832061.71 frames. ], batch size: 53, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:14:29,613 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.751e+02 5.670e+02 7.170e+02 1.723e+03, threshold=1.134e+03, percent-clipped=7.0 +2023-04-03 11:14:44,414 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 11:15:08,848 INFO [train.py:903] (0/4) Epoch 28, batch 1750, loss[loss=0.2105, simple_loss=0.2878, pruned_loss=0.06664, over 19589.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2854, pruned_loss=0.06064, over 3830020.83 frames. ], batch size: 52, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:15:24,172 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:15:28,462 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:16:11,527 INFO [train.py:903] (0/4) Epoch 28, batch 1800, loss[loss=0.1882, simple_loss=0.2782, pruned_loss=0.04913, over 19888.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2857, pruned_loss=0.06111, over 3843205.53 frames. ], batch size: 55, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:16:18,360 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186161.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:16:36,634 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.184e+02 6.048e+02 8.514e+02 1.613e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 11:16:38,124 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186176.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:16:42,852 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1438, 1.4069, 1.8739, 1.4803, 3.0007, 4.5334, 4.4508, 4.9735], + device='cuda:0'), covar=tensor([0.1798, 0.3932, 0.3557, 0.2557, 0.0732, 0.0244, 0.0174, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0333, 0.0366, 0.0273, 0.0257, 0.0197, 0.0220, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 11:17:02,769 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 11:17:08,586 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 11:17:09,009 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186201.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:15,219 INFO [train.py:903] (0/4) Epoch 28, batch 1850, loss[loss=0.1686, simple_loss=0.246, pruned_loss=0.04559, over 19710.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2852, pruned_loss=0.06107, over 3841738.26 frames. ], batch size: 45, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:17:39,951 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:46,436 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 11:17:51,544 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186236.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:18:17,542 INFO [train.py:903] (0/4) Epoch 28, batch 1900, loss[loss=0.2281, simple_loss=0.3107, pruned_loss=0.0728, over 19726.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2857, pruned_loss=0.06093, over 3858866.47 frames. ], batch size: 63, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:18:33,597 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 11:18:38,425 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 11:18:39,538 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.824e+02 4.995e+02 5.845e+02 7.128e+02 1.193e+03, threshold=1.169e+03, percent-clipped=0.0 +2023-04-03 11:18:40,999 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186276.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:18:42,897 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:19:02,976 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 11:19:19,221 INFO [train.py:903] (0/4) Epoch 28, batch 1950, loss[loss=0.1884, simple_loss=0.2834, pruned_loss=0.04665, over 17430.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.285, pruned_loss=0.0608, over 3832072.64 frames. ], batch size: 101, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:20,362 INFO [train.py:903] (0/4) Epoch 28, batch 2000, loss[loss=0.2231, simple_loss=0.3045, pruned_loss=0.0708, over 19543.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2855, pruned_loss=0.06114, over 3823208.84 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:32,046 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3833, 3.1419, 2.4740, 2.3950, 2.4733, 2.6990, 1.1910, 2.2411], + device='cuda:0'), covar=tensor([0.0759, 0.0590, 0.0762, 0.1196, 0.1009, 0.1087, 0.1467, 0.1135], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0365, 0.0370, 0.0392, 0.0470, 0.0396, 0.0346, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 11:20:33,486 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 11:20:45,109 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.060e+02 6.550e+02 8.587e+02 3.446e+03, threshold=1.310e+03, percent-clipped=8.0 +2023-04-03 11:21:10,085 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3225, 1.3205, 1.5004, 1.4748, 1.7895, 1.8313, 1.8410, 0.6010], + device='cuda:0'), covar=tensor([0.2638, 0.4618, 0.2851, 0.2113, 0.1698, 0.2482, 0.1476, 0.5246], + device='cuda:0'), in_proj_covar=tensor([0.0555, 0.0671, 0.0757, 0.0509, 0.0638, 0.0546, 0.0671, 0.0574], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 11:21:19,993 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 11:21:24,401 INFO [train.py:903] (0/4) Epoch 28, batch 2050, loss[loss=0.2071, simple_loss=0.2954, pruned_loss=0.05937, over 19097.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2849, pruned_loss=0.06088, over 3824240.97 frames. ], batch size: 69, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:21:40,397 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 11:21:42,542 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 11:22:02,202 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 11:22:04,678 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:26,600 INFO [train.py:903] (0/4) Epoch 28, batch 2100, loss[loss=0.1664, simple_loss=0.2471, pruned_loss=0.04279, over 19729.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2852, pruned_loss=0.06101, over 3826649.08 frames. ], batch size: 51, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:22:35,115 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:49,632 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.089e+02 6.089e+02 7.390e+02 1.324e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 11:22:58,700 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 11:23:11,820 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:13,750 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:20,667 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 11:23:28,666 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.9746, 5.4236, 2.9294, 4.6544, 1.1412, 5.6166, 5.3380, 5.5914], + device='cuda:0'), covar=tensor([0.0341, 0.0805, 0.1976, 0.0774, 0.4081, 0.0483, 0.0859, 0.0960], + device='cuda:0'), in_proj_covar=tensor([0.0528, 0.0430, 0.0516, 0.0360, 0.0410, 0.0458, 0.0452, 0.0481], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:23:29,662 INFO [train.py:903] (0/4) Epoch 28, batch 2150, loss[loss=0.2045, simple_loss=0.2912, pruned_loss=0.05891, over 19517.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2853, pruned_loss=0.06111, over 3822251.10 frames. ], batch size: 64, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:23:42,668 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:45,790 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186520.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:02,574 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186532.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:24:06,044 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:30,567 INFO [train.py:903] (0/4) Epoch 28, batch 2200, loss[loss=0.1732, simple_loss=0.2533, pruned_loss=0.04651, over 19764.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2852, pruned_loss=0.06141, over 3820489.35 frames. ], batch size: 48, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:24:32,075 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186557.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:32,188 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186557.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:24:55,776 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 4.864e+02 5.746e+02 7.619e+02 1.717e+03, threshold=1.149e+03, percent-clipped=3.0 +2023-04-03 11:24:58,630 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:09,374 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:32,792 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:34,933 INFO [train.py:903] (0/4) Epoch 28, batch 2250, loss[loss=0.1586, simple_loss=0.2394, pruned_loss=0.03896, over 19759.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2841, pruned_loss=0.06092, over 3799340.51 frames. ], batch size: 46, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:25:54,472 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186621.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:09,358 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 11:26:12,596 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186635.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:39,385 INFO [train.py:903] (0/4) Epoch 28, batch 2300, loss[loss=0.1648, simple_loss=0.2505, pruned_loss=0.03961, over 19862.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2838, pruned_loss=0.06084, over 3790627.34 frames. ], batch size: 52, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:26:55,478 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 11:27:02,271 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 4.896e+02 5.750e+02 7.152e+02 2.246e+03, threshold=1.150e+03, percent-clipped=6.0 +2023-04-03 11:27:42,236 INFO [train.py:903] (0/4) Epoch 28, batch 2350, loss[loss=0.219, simple_loss=0.3063, pruned_loss=0.06579, over 19324.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2839, pruned_loss=0.0607, over 3792838.72 frames. ], batch size: 66, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:28:07,862 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:20,484 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:26,081 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 11:28:42,185 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 11:28:44,646 INFO [train.py:903] (0/4) Epoch 28, batch 2400, loss[loss=0.2398, simple_loss=0.3198, pruned_loss=0.07993, over 19263.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.05995, over 3808765.09 frames. ], batch size: 66, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:29:08,813 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 4.439e+02 5.729e+02 7.466e+02 1.887e+03, threshold=1.146e+03, percent-clipped=9.0 +2023-04-03 11:29:20,254 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:29:47,722 INFO [train.py:903] (0/4) Epoch 28, batch 2450, loss[loss=0.1769, simple_loss=0.2642, pruned_loss=0.04477, over 19603.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05945, over 3808209.87 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:01,663 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 11:30:02,270 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2889, 2.0522, 1.8712, 2.1415, 1.9357, 1.8665, 1.7360, 2.1069], + device='cuda:0'), covar=tensor([0.1027, 0.1508, 0.1479, 0.1095, 0.1385, 0.0574, 0.1596, 0.0730], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0362, 0.0321, 0.0259, 0.0308, 0.0258, 0.0323, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 11:30:22,250 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:26,673 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:50,780 INFO [train.py:903] (0/4) Epoch 28, batch 2500, loss[loss=0.1793, simple_loss=0.2536, pruned_loss=0.05244, over 19715.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2809, pruned_loss=0.0594, over 3808532.83 frames. ], batch size: 45, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:54,459 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:54,580 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:15,070 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.341e+02 4.860e+02 5.866e+02 7.110e+02 2.029e+03, threshold=1.173e+03, percent-clipped=7.0 +2023-04-03 11:31:19,818 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186879.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:35,069 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186891.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:45,046 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:48,252 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:54,935 INFO [train.py:903] (0/4) Epoch 28, batch 2550, loss[loss=0.1706, simple_loss=0.2459, pruned_loss=0.04763, over 19292.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2818, pruned_loss=0.05998, over 3813259.24 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:32:06,784 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186916.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,496 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,643 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:47,690 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186948.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:50,867 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 11:32:52,300 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:56,643 INFO [train.py:903] (0/4) Epoch 28, batch 2600, loss[loss=0.1714, simple_loss=0.2647, pruned_loss=0.03905, over 19789.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2817, pruned_loss=0.05995, over 3816106.95 frames. ], batch size: 56, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:33:06,496 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5502, 1.5625, 1.7110, 1.7856, 2.2318, 2.2132, 2.3051, 0.9399], + device='cuda:0'), covar=tensor([0.2558, 0.4582, 0.2924, 0.1921, 0.1658, 0.2253, 0.1514, 0.4987], + device='cuda:0'), in_proj_covar=tensor([0.0553, 0.0669, 0.0755, 0.0507, 0.0633, 0.0545, 0.0667, 0.0571], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 11:33:20,687 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.127e+02 5.898e+02 7.788e+02 1.720e+03, threshold=1.180e+03, percent-clipped=7.0 +2023-04-03 11:33:43,069 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186992.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:45,238 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:59,859 INFO [train.py:903] (0/4) Epoch 28, batch 2650, loss[loss=0.2424, simple_loss=0.3315, pruned_loss=0.07662, over 19738.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2827, pruned_loss=0.06046, over 3808957.23 frames. ], batch size: 63, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:34:12,821 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:14,013 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:22,437 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 11:34:42,879 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4191, 2.4388, 2.5715, 2.9738, 2.5126, 2.9279, 2.6182, 2.5311], + device='cuda:0'), covar=tensor([0.3489, 0.3353, 0.1619, 0.2009, 0.3241, 0.1782, 0.3685, 0.2586], + device='cuda:0'), in_proj_covar=tensor([0.0941, 0.1020, 0.0747, 0.0955, 0.0917, 0.0858, 0.0867, 0.0814], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 11:34:46,251 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187043.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:47,535 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:02,636 INFO [train.py:903] (0/4) Epoch 28, batch 2700, loss[loss=0.1649, simple_loss=0.2417, pruned_loss=0.0441, over 19373.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06073, over 3808471.88 frames. ], batch size: 47, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:35:02,883 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:13,333 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:22,254 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187071.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:26,784 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 4.685e+02 5.845e+02 7.450e+02 1.306e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 11:36:06,491 INFO [train.py:903] (0/4) Epoch 28, batch 2750, loss[loss=0.2137, simple_loss=0.2986, pruned_loss=0.06438, over 19508.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06003, over 3799536.00 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:36:10,442 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5566, 2.2600, 1.7522, 1.5406, 2.0561, 1.4560, 1.4071, 2.0465], + device='cuda:0'), covar=tensor([0.1140, 0.0942, 0.1106, 0.0971, 0.0643, 0.1349, 0.0861, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0320, 0.0341, 0.0273, 0.0252, 0.0345, 0.0293, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:36:13,210 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 11:36:15,452 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 11:36:18,804 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6924, 1.5751, 1.6001, 2.2916, 1.6205, 2.0262, 1.9572, 1.8049], + device='cuda:0'), covar=tensor([0.0879, 0.0919, 0.0997, 0.0722, 0.0913, 0.0770, 0.0874, 0.0697], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0225, 0.0213, 0.0187, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 11:36:43,428 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2815, 1.3555, 1.7889, 1.4612, 2.8885, 3.7669, 3.4209, 3.9530], + device='cuda:0'), covar=tensor([0.1554, 0.3702, 0.3271, 0.2308, 0.0552, 0.0170, 0.0209, 0.0270], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0333, 0.0365, 0.0272, 0.0256, 0.0197, 0.0220, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 11:36:59,815 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 11:37:06,427 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:08,436 INFO [train.py:903] (0/4) Epoch 28, batch 2800, loss[loss=0.1759, simple_loss=0.2571, pruned_loss=0.04733, over 19862.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2824, pruned_loss=0.05964, over 3813014.22 frames. ], batch size: 52, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:37:31,623 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.065e+02 4.906e+02 5.640e+02 7.444e+02 1.445e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-04-03 11:37:36,918 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:46,991 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187186.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:03,783 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3580, 2.3861, 2.2797, 2.5521, 2.3423, 1.9997, 2.1744, 2.3613], + device='cuda:0'), covar=tensor([0.0866, 0.1125, 0.1036, 0.0762, 0.1027, 0.0501, 0.1200, 0.0602], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0360, 0.0319, 0.0257, 0.0305, 0.0257, 0.0322, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:38:05,764 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187202.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:10,176 INFO [train.py:903] (0/4) Epoch 28, batch 2850, loss[loss=0.2467, simple_loss=0.3145, pruned_loss=0.08946, over 19349.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2828, pruned_loss=0.06001, over 3806738.46 frames. ], batch size: 66, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:38:13,786 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187208.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:44,920 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187233.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:05,529 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3460, 1.0379, 1.2998, 1.9619, 1.4971, 1.2795, 1.4495, 1.2256], + device='cuda:0'), covar=tensor([0.1173, 0.1723, 0.1306, 0.0830, 0.1183, 0.1489, 0.1313, 0.1146], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0226, 0.0213, 0.0187, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 11:39:05,587 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187250.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:11,670 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 11:39:12,690 INFO [train.py:903] (0/4) Epoch 28, batch 2900, loss[loss=0.2263, simple_loss=0.3174, pruned_loss=0.06758, over 19668.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.0596, over 3804502.01 frames. ], batch size: 60, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:39:13,294 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 11:39:29,225 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8067, 1.9178, 1.9071, 2.6783, 2.0190, 2.5796, 1.9827, 1.5914], + device='cuda:0'), covar=tensor([0.5044, 0.4871, 0.2979, 0.3205, 0.4892, 0.2628, 0.6623, 0.5470], + device='cuda:0'), in_proj_covar=tensor([0.0938, 0.1016, 0.0744, 0.0953, 0.0914, 0.0857, 0.0864, 0.0812], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 11:39:32,576 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:33,466 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:35,532 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.012e+02 6.172e+02 7.442e+02 2.226e+03, threshold=1.234e+03, percent-clipped=8.0 +2023-04-03 11:39:35,902 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:02,569 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:06,077 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187300.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:13,525 INFO [train.py:903] (0/4) Epoch 28, batch 2950, loss[loss=0.185, simple_loss=0.2779, pruned_loss=0.0461, over 19527.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2841, pruned_loss=0.06082, over 3805017.10 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:40:25,515 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4345, 1.8189, 1.4685, 1.4037, 1.7139, 1.3131, 1.3784, 1.6979], + device='cuda:0'), covar=tensor([0.0859, 0.0858, 0.0811, 0.0782, 0.0532, 0.1093, 0.0655, 0.0500], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0320, 0.0342, 0.0273, 0.0253, 0.0346, 0.0294, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:40:26,515 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187317.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:28,967 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:35,742 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:59,956 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187344.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:41:13,838 INFO [train.py:903] (0/4) Epoch 28, batch 3000, loss[loss=0.1865, simple_loss=0.2724, pruned_loss=0.05026, over 19661.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06116, over 3803195.87 frames. ], batch size: 53, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:41:13,838 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 11:41:26,714 INFO [train.py:937] (0/4) Epoch 28, validation: loss=0.1673, simple_loss=0.2667, pruned_loss=0.03394, over 944034.00 frames. +2023-04-03 11:41:26,715 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 11:41:29,148 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 11:41:49,227 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.693e+02 4.898e+02 6.411e+02 7.995e+02 1.373e+03, threshold=1.282e+03, percent-clipped=5.0 +2023-04-03 11:42:05,094 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:06,472 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:08,757 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:18,233 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-03 11:42:21,164 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:27,787 INFO [train.py:903] (0/4) Epoch 28, batch 3050, loss[loss=0.1785, simple_loss=0.2633, pruned_loss=0.0468, over 19740.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2849, pruned_loss=0.06106, over 3783270.86 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:43:13,158 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:29,480 INFO [train.py:903] (0/4) Epoch 28, batch 3100, loss[loss=0.2147, simple_loss=0.2935, pruned_loss=0.06796, over 19533.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2836, pruned_loss=0.06059, over 3781604.95 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:43:43,911 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:54,449 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.792e+02 5.797e+02 7.675e+02 1.223e+03, threshold=1.159e+03, percent-clipped=0.0 +2023-04-03 11:44:27,043 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187502.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:32,170 INFO [train.py:903] (0/4) Epoch 28, batch 3150, loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.07386, over 17049.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2823, pruned_loss=0.05982, over 3787430.34 frames. ], batch size: 100, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:44:44,911 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:54,656 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 11:45:01,635 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:05,052 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6851, 4.2907, 2.8302, 3.7695, 1.0065, 4.2506, 4.0638, 4.2030], + device='cuda:0'), covar=tensor([0.0631, 0.0957, 0.1884, 0.0850, 0.3956, 0.0707, 0.0939, 0.1172], + device='cuda:0'), in_proj_covar=tensor([0.0534, 0.0435, 0.0521, 0.0364, 0.0414, 0.0462, 0.0456, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:45:10,634 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:11,885 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2738, 1.2184, 1.2248, 1.3463, 0.9655, 1.3749, 1.2624, 1.2869], + device='cuda:0'), covar=tensor([0.0920, 0.1002, 0.1109, 0.0677, 0.0940, 0.0904, 0.0883, 0.0810], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0226, 0.0214, 0.0188, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 11:45:15,208 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:35,289 INFO [train.py:903] (0/4) Epoch 28, batch 3200, loss[loss=0.2143, simple_loss=0.298, pruned_loss=0.06534, over 19603.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06011, over 3790567.49 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:45:45,083 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5462, 1.6308, 1.9560, 1.7943, 2.7516, 2.1579, 2.9461, 1.5902], + device='cuda:0'), covar=tensor([0.2711, 0.4460, 0.2797, 0.2169, 0.1576, 0.2509, 0.1473, 0.4604], + device='cuda:0'), in_proj_covar=tensor([0.0553, 0.0670, 0.0754, 0.0507, 0.0635, 0.0545, 0.0669, 0.0572], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 11:45:55,551 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:57,382 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.914e+02 6.228e+02 8.001e+02 2.182e+03, threshold=1.246e+03, percent-clipped=10.0 +2023-04-03 11:46:27,891 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187598.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:46:36,797 INFO [train.py:903] (0/4) Epoch 28, batch 3250, loss[loss=0.1759, simple_loss=0.2645, pruned_loss=0.04368, over 19515.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2828, pruned_loss=0.05998, over 3790550.29 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:46:42,134 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-03 11:47:23,915 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187644.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:47:37,446 INFO [train.py:903] (0/4) Epoch 28, batch 3300, loss[loss=0.2145, simple_loss=0.2851, pruned_loss=0.07198, over 19613.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2826, pruned_loss=0.06008, over 3800252.03 frames. ], batch size: 50, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:47:37,470 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 11:47:54,272 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:48:01,982 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.770e+02 6.093e+02 7.830e+02 1.620e+03, threshold=1.219e+03, percent-clipped=4.0 +2023-04-03 11:48:07,213 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-03 11:48:09,499 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:48:40,836 INFO [train.py:903] (0/4) Epoch 28, batch 3350, loss[loss=0.1737, simple_loss=0.2572, pruned_loss=0.04509, over 19481.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06052, over 3810197.94 frames. ], batch size: 49, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:49:14,753 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:49:42,978 INFO [train.py:903] (0/4) Epoch 28, batch 3400, loss[loss=0.1717, simple_loss=0.2492, pruned_loss=0.04704, over 19364.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06033, over 3826172.80 frames. ], batch size: 47, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:49:45,544 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:01,974 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:07,735 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.801e+02 5.865e+02 7.575e+02 1.695e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-03 11:50:16,874 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:32,875 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:46,601 INFO [train.py:903] (0/4) Epoch 28, batch 3450, loss[loss=0.1838, simple_loss=0.258, pruned_loss=0.05481, over 19035.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2848, pruned_loss=0.06088, over 3836873.73 frames. ], batch size: 42, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:50:47,820 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 11:51:41,440 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:51:49,295 INFO [train.py:903] (0/4) Epoch 28, batch 3500, loss[loss=0.1886, simple_loss=0.2656, pruned_loss=0.05586, over 19656.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06025, over 3833164.42 frames. ], batch size: 53, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:52:11,978 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:14,011 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.602e+02 4.936e+02 6.004e+02 7.154e+02 1.224e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 11:52:20,872 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:26,776 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:50,775 INFO [train.py:903] (0/4) Epoch 28, batch 3550, loss[loss=0.2232, simple_loss=0.3028, pruned_loss=0.07179, over 18767.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.06012, over 3835117.11 frames. ], batch size: 74, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:53:37,511 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9076, 1.8629, 1.8121, 1.6634, 1.4847, 1.6197, 0.5242, 0.9469], + device='cuda:0'), covar=tensor([0.0731, 0.0719, 0.0478, 0.0783, 0.1268, 0.0859, 0.1418, 0.1159], + device='cuda:0'), in_proj_covar=tensor([0.0367, 0.0366, 0.0372, 0.0393, 0.0472, 0.0397, 0.0346, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 11:53:52,901 INFO [train.py:903] (0/4) Epoch 28, batch 3600, loss[loss=0.1772, simple_loss=0.2586, pruned_loss=0.04783, over 19752.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2846, pruned_loss=0.06083, over 3829763.11 frames. ], batch size: 51, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:54:03,439 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1378, 1.7984, 1.4453, 1.2302, 1.5889, 1.1942, 1.1421, 1.6477], + device='cuda:0'), covar=tensor([0.0972, 0.0849, 0.1240, 0.0919, 0.0640, 0.1442, 0.0740, 0.0475], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0320, 0.0344, 0.0273, 0.0253, 0.0348, 0.0294, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:54:17,595 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.904e+02 5.870e+02 7.567e+02 1.667e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 11:54:34,389 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:42,580 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:46,912 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-188000.pt +2023-04-03 11:54:48,307 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:56,052 INFO [train.py:903] (0/4) Epoch 28, batch 3650, loss[loss=0.2126, simple_loss=0.3036, pruned_loss=0.06079, over 19671.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06073, over 3829475.99 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:55:51,316 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8246, 1.3526, 1.0690, 0.9814, 1.1454, 1.0208, 0.8976, 1.2092], + device='cuda:0'), covar=tensor([0.0742, 0.0925, 0.1239, 0.0879, 0.0643, 0.1448, 0.0736, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0322, 0.0345, 0.0274, 0.0254, 0.0350, 0.0296, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 11:55:57,939 INFO [train.py:903] (0/4) Epoch 28, batch 3700, loss[loss=0.1805, simple_loss=0.2624, pruned_loss=0.04933, over 19722.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2846, pruned_loss=0.06132, over 3797744.76 frames. ], batch size: 51, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:56:23,997 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.013e+02 6.013e+02 7.793e+02 2.143e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 11:56:59,793 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:57:00,598 INFO [train.py:903] (0/4) Epoch 28, batch 3750, loss[loss=0.2036, simple_loss=0.2864, pruned_loss=0.06041, over 13470.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06076, over 3804027.25 frames. ], batch size: 136, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:57:32,219 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188130.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:58:03,786 INFO [train.py:903] (0/4) Epoch 28, batch 3800, loss[loss=0.2049, simple_loss=0.2873, pruned_loss=0.06126, over 17222.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2827, pruned_loss=0.05991, over 3820532.63 frames. ], batch size: 101, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:58:30,498 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.177e+02 6.017e+02 8.386e+02 1.721e+03, threshold=1.203e+03, percent-clipped=7.0 +2023-04-03 11:58:33,991 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 11:59:06,276 INFO [train.py:903] (0/4) Epoch 28, batch 3850, loss[loss=0.1961, simple_loss=0.2863, pruned_loss=0.05289, over 19680.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.283, pruned_loss=0.05971, over 3829078.96 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:59:56,106 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:05,189 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188252.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:09,170 INFO [train.py:903] (0/4) Epoch 28, batch 3900, loss[loss=0.2153, simple_loss=0.2911, pruned_loss=0.06973, over 19587.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2835, pruned_loss=0.05987, over 3829764.29 frames. ], batch size: 52, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:00:09,596 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:26,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188270.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:34,027 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.053e+02 6.264e+02 8.116e+02 1.975e+03, threshold=1.253e+03, percent-clipped=4.0 +2023-04-03 12:00:34,423 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:39,133 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188281.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:04,048 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:04,150 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5345, 2.6467, 2.2934, 2.7585, 2.4596, 2.2186, 2.2095, 2.5542], + device='cuda:0'), covar=tensor([0.0992, 0.1430, 0.1403, 0.0957, 0.1312, 0.0556, 0.1475, 0.0672], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0358, 0.0318, 0.0256, 0.0305, 0.0256, 0.0321, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:01:09,683 INFO [train.py:903] (0/4) Epoch 28, batch 3950, loss[loss=0.1984, simple_loss=0.2906, pruned_loss=0.05314, over 19275.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2833, pruned_loss=0.05964, over 3828691.03 frames. ], batch size: 66, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:01:15,495 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 12:02:12,176 INFO [train.py:903] (0/4) Epoch 28, batch 4000, loss[loss=0.2678, simple_loss=0.3428, pruned_loss=0.09639, over 19311.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2835, pruned_loss=0.05951, over 3832896.13 frames. ], batch size: 66, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:02:38,281 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.378e+02 4.980e+02 6.272e+02 8.090e+02 1.579e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 12:02:59,863 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 12:03:14,916 INFO [train.py:903] (0/4) Epoch 28, batch 4050, loss[loss=0.2675, simple_loss=0.3437, pruned_loss=0.09564, over 13432.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2833, pruned_loss=0.05965, over 3832102.45 frames. ], batch size: 136, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:04:17,051 INFO [train.py:903] (0/4) Epoch 28, batch 4100, loss[loss=0.2237, simple_loss=0.3014, pruned_loss=0.07299, over 19772.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2844, pruned_loss=0.06036, over 3823482.08 frames. ], batch size: 56, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:04:43,424 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.116e+02 6.170e+02 8.694e+02 1.686e+03, threshold=1.234e+03, percent-clipped=5.0 +2023-04-03 12:04:48,571 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7295, 1.7651, 1.6225, 1.4258, 1.3755, 1.4269, 0.3336, 0.7289], + device='cuda:0'), covar=tensor([0.0629, 0.0645, 0.0479, 0.0759, 0.1296, 0.0832, 0.1346, 0.1172], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0365, 0.0372, 0.0394, 0.0474, 0.0398, 0.0346, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 12:04:53,769 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 12:05:11,254 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:05:19,164 INFO [train.py:903] (0/4) Epoch 28, batch 4150, loss[loss=0.1986, simple_loss=0.2883, pruned_loss=0.05445, over 19304.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2838, pruned_loss=0.05999, over 3810848.69 frames. ], batch size: 70, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:05:56,442 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2461, 1.2426, 1.6470, 1.0159, 2.3482, 3.0658, 2.7398, 3.2397], + device='cuda:0'), covar=tensor([0.1577, 0.4140, 0.3590, 0.2875, 0.0673, 0.0233, 0.0274, 0.0346], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0333, 0.0367, 0.0273, 0.0257, 0.0198, 0.0222, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 12:06:21,499 INFO [train.py:903] (0/4) Epoch 28, batch 4200, loss[loss=0.2209, simple_loss=0.3095, pruned_loss=0.06615, over 19671.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2847, pruned_loss=0.06065, over 3803678.04 frames. ], batch size: 55, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:06:24,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 12:06:46,553 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 4.775e+02 5.885e+02 7.540e+02 1.202e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-03 12:07:22,476 INFO [train.py:903] (0/4) Epoch 28, batch 4250, loss[loss=0.2038, simple_loss=0.284, pruned_loss=0.0618, over 19777.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06078, over 3794366.71 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:07:33,737 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 12:07:44,072 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 12:07:52,218 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2346, 1.3681, 1.9988, 1.7516, 3.0870, 4.5361, 4.3664, 4.9444], + device='cuda:0'), covar=tensor([0.1822, 0.4129, 0.3437, 0.2368, 0.0667, 0.0232, 0.0198, 0.0218], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0333, 0.0367, 0.0273, 0.0257, 0.0198, 0.0222, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 12:08:10,309 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:08:24,256 INFO [train.py:903] (0/4) Epoch 28, batch 4300, loss[loss=0.2021, simple_loss=0.2874, pruned_loss=0.05846, over 19746.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2839, pruned_loss=0.06029, over 3811131.45 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:08:51,750 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.988e+02 4.883e+02 5.889e+02 8.258e+02 1.553e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-03 12:09:15,652 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 12:09:26,300 INFO [train.py:903] (0/4) Epoch 28, batch 4350, loss[loss=0.2102, simple_loss=0.2975, pruned_loss=0.06151, over 19316.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2833, pruned_loss=0.05996, over 3819861.80 frames. ], batch size: 66, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:09:32,306 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-03 12:09:44,346 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-03 12:10:13,823 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1099, 2.0305, 1.8052, 2.1239, 1.8521, 1.8131, 1.7377, 1.9774], + device='cuda:0'), covar=tensor([0.0979, 0.1284, 0.1445, 0.0966, 0.1301, 0.0546, 0.1513, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0357, 0.0317, 0.0255, 0.0302, 0.0254, 0.0319, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:10:25,207 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0807, 5.5171, 3.1575, 4.8457, 1.1737, 5.7219, 5.5085, 5.7332], + device='cuda:0'), covar=tensor([0.0368, 0.0819, 0.1721, 0.0741, 0.3911, 0.0482, 0.0744, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0527, 0.0430, 0.0514, 0.0361, 0.0410, 0.0456, 0.0452, 0.0482], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:10:30,424 INFO [train.py:903] (0/4) Epoch 28, batch 4400, loss[loss=0.2093, simple_loss=0.2991, pruned_loss=0.05976, over 19272.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05976, over 3828592.01 frames. ], batch size: 66, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:10:35,431 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:10:52,235 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 12:10:57,774 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.797e+02 5.749e+02 7.341e+02 1.454e+03, threshold=1.150e+03, percent-clipped=2.0 +2023-04-03 12:11:02,383 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 12:11:31,835 INFO [train.py:903] (0/4) Epoch 28, batch 4450, loss[loss=0.2217, simple_loss=0.3038, pruned_loss=0.06979, over 19730.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05978, over 3830735.31 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:12:01,898 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.56 vs. limit=5.0 +2023-04-03 12:12:19,630 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:12:36,103 INFO [train.py:903] (0/4) Epoch 28, batch 4500, loss[loss=0.2142, simple_loss=0.2964, pruned_loss=0.06604, over 18213.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2819, pruned_loss=0.05907, over 3828191.81 frames. ], batch size: 83, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:13:04,408 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.536e+02 5.549e+02 7.562e+02 1.666e+03, threshold=1.110e+03, percent-clipped=5.0 +2023-04-03 12:13:38,230 INFO [train.py:903] (0/4) Epoch 28, batch 4550, loss[loss=0.1866, simple_loss=0.2816, pruned_loss=0.04585, over 19607.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.0597, over 3822574.28 frames. ], batch size: 57, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:13:44,770 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6890, 1.3406, 1.6484, 1.5193, 3.2839, 1.2695, 2.5946, 3.6971], + device='cuda:0'), covar=tensor([0.0488, 0.2844, 0.2775, 0.1923, 0.0645, 0.2470, 0.1156, 0.0252], + device='cuda:0'), in_proj_covar=tensor([0.0425, 0.0380, 0.0398, 0.0353, 0.0383, 0.0359, 0.0398, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:13:46,952 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 12:14:12,245 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 12:14:29,479 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:31,821 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:39,521 INFO [train.py:903] (0/4) Epoch 28, batch 4600, loss[loss=0.1948, simple_loss=0.2813, pruned_loss=0.05416, over 19765.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05985, over 3827540.45 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:14:43,188 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:15:07,935 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.624e+02 5.779e+02 7.629e+02 1.899e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 12:15:08,228 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8689, 4.4718, 2.6929, 3.8950, 1.1144, 4.4025, 4.3126, 4.3439], + device='cuda:0'), covar=tensor([0.0584, 0.0917, 0.1905, 0.0833, 0.3796, 0.0679, 0.0882, 0.1052], + device='cuda:0'), in_proj_covar=tensor([0.0525, 0.0428, 0.0512, 0.0359, 0.0407, 0.0454, 0.0450, 0.0480], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:15:13,472 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.46 vs. limit=5.0 +2023-04-03 12:15:43,037 INFO [train.py:903] (0/4) Epoch 28, batch 4650, loss[loss=0.2067, simple_loss=0.2921, pruned_loss=0.0607, over 19531.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.05988, over 3821305.18 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:15:56,020 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:01,295 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 12:16:12,658 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 12:16:25,660 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:34,726 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:34,937 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6507, 1.7398, 2.0465, 2.0354, 1.5437, 2.0028, 2.0188, 1.9007], + device='cuda:0'), covar=tensor([0.4374, 0.3958, 0.2056, 0.2406, 0.4158, 0.2278, 0.5349, 0.3578], + device='cuda:0'), in_proj_covar=tensor([0.0937, 0.1014, 0.0741, 0.0951, 0.0913, 0.0854, 0.0859, 0.0806], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:16:44,450 INFO [train.py:903] (0/4) Epoch 28, batch 4700, loss[loss=0.2341, simple_loss=0.307, pruned_loss=0.08063, over 19644.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2823, pruned_loss=0.0598, over 3824559.73 frames. ], batch size: 60, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:17:07,720 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 12:17:10,870 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.848e+02 5.842e+02 7.352e+02 2.015e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-03 12:17:17,594 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:36,158 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:46,810 INFO [train.py:903] (0/4) Epoch 28, batch 4750, loss[loss=0.1799, simple_loss=0.2729, pruned_loss=0.04347, over 19612.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05957, over 3825152.19 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:18:47,829 INFO [train.py:903] (0/4) Epoch 28, batch 4800, loss[loss=0.2899, simple_loss=0.3456, pruned_loss=0.1171, over 12852.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2828, pruned_loss=0.05979, over 3818802.68 frames. ], batch size: 136, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:19:16,004 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 5.029e+02 6.156e+02 7.557e+02 1.439e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-03 12:19:43,398 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 12:19:50,819 INFO [train.py:903] (0/4) Epoch 28, batch 4850, loss[loss=0.2367, simple_loss=0.304, pruned_loss=0.08467, over 13231.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2811, pruned_loss=0.05934, over 3808752.65 frames. ], batch size: 135, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:20:01,538 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:12,590 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 12:20:33,008 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:33,799 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 12:20:39,539 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 12:20:40,693 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 12:20:51,046 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 12:20:53,319 INFO [train.py:903] (0/4) Epoch 28, batch 4900, loss[loss=0.1591, simple_loss=0.2457, pruned_loss=0.0362, over 19838.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2806, pruned_loss=0.05903, over 3808186.12 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:21:05,674 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189266.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:10,810 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 12:21:19,662 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.742e+02 6.083e+02 7.635e+02 1.565e+03, threshold=1.217e+03, percent-clipped=2.0 +2023-04-03 12:21:35,164 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:38,173 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189293.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:53,523 INFO [train.py:903] (0/4) Epoch 28, batch 4950, loss[loss=0.2387, simple_loss=0.3156, pruned_loss=0.08087, over 18812.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2828, pruned_loss=0.0602, over 3815711.22 frames. ], batch size: 74, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:22:10,605 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 12:22:34,627 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 12:22:41,944 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5125, 1.5570, 1.8433, 1.7377, 2.4325, 2.2393, 2.5814, 0.9050], + device='cuda:0'), covar=tensor([0.2629, 0.4640, 0.2911, 0.2133, 0.1636, 0.2332, 0.1499, 0.5339], + device='cuda:0'), in_proj_covar=tensor([0.0556, 0.0673, 0.0758, 0.0508, 0.0637, 0.0548, 0.0673, 0.0575], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:22:56,070 INFO [train.py:903] (0/4) Epoch 28, batch 5000, loss[loss=0.1591, simple_loss=0.2431, pruned_loss=0.03761, over 19616.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.281, pruned_loss=0.05944, over 3828684.31 frames. ], batch size: 50, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:23:04,873 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 12:23:08,782 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189365.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:15,753 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 12:23:24,845 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.003e+02 6.019e+02 8.012e+02 1.544e+03, threshold=1.204e+03, percent-clipped=7.0 +2023-04-03 12:23:42,149 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:59,724 INFO [train.py:903] (0/4) Epoch 28, batch 5050, loss[loss=0.1584, simple_loss=0.2348, pruned_loss=0.04102, over 19733.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2811, pruned_loss=0.05924, over 3835533.00 frames. ], batch size: 45, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:24:00,199 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:02,597 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:24,219 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:33,187 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 12:24:44,062 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189441.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:25:01,213 INFO [train.py:903] (0/4) Epoch 28, batch 5100, loss[loss=0.2094, simple_loss=0.2976, pruned_loss=0.06058, over 19466.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2814, pruned_loss=0.05896, over 3844187.76 frames. ], batch size: 64, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:25:10,106 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 12:25:12,426 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 12:25:19,115 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 12:25:27,902 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.191e+02 6.540e+02 8.236e+02 1.634e+03, threshold=1.308e+03, percent-clipped=9.0 +2023-04-03 12:26:01,392 INFO [train.py:903] (0/4) Epoch 28, batch 5150, loss[loss=0.1896, simple_loss=0.28, pruned_loss=0.04964, over 19511.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2815, pruned_loss=0.059, over 3838285.51 frames. ], batch size: 64, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:26:02,864 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:11,415 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 12:26:18,129 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:34,989 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1423, 2.0274, 1.9327, 1.7871, 1.6087, 1.7154, 0.5659, 1.0877], + device='cuda:0'), covar=tensor([0.0643, 0.0663, 0.0524, 0.0869, 0.1221, 0.0925, 0.1476, 0.1170], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0364, 0.0369, 0.0392, 0.0471, 0.0397, 0.0346, 0.0348], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 12:26:44,870 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:46,842 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 12:27:02,661 INFO [train.py:903] (0/4) Epoch 28, batch 5200, loss[loss=0.2245, simple_loss=0.3057, pruned_loss=0.07171, over 19681.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2821, pruned_loss=0.05939, over 3821886.61 frames. ], batch size: 59, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:27:03,038 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189556.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:27:04,112 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3829, 3.1178, 2.3149, 2.8001, 0.7391, 3.1083, 2.9639, 3.0536], + device='cuda:0'), covar=tensor([0.1051, 0.1330, 0.2058, 0.1096, 0.3921, 0.0992, 0.1156, 0.1387], + device='cuda:0'), in_proj_covar=tensor([0.0529, 0.0429, 0.0517, 0.0360, 0.0410, 0.0456, 0.0451, 0.0483], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:27:17,229 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 12:27:30,436 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 4.850e+02 5.941e+02 7.550e+02 1.552e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-03 12:28:01,626 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 12:28:04,870 INFO [train.py:903] (0/4) Epoch 28, batch 5250, loss[loss=0.2446, simple_loss=0.3159, pruned_loss=0.08666, over 19725.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2818, pruned_loss=0.05933, over 3823566.54 frames. ], batch size: 63, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:28:09,610 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:04,829 INFO [train.py:903] (0/4) Epoch 28, batch 5300, loss[loss=0.1742, simple_loss=0.2554, pruned_loss=0.04646, over 19381.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05977, over 3826023.98 frames. ], batch size: 48, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:29:08,428 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189658.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:12,954 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:14,979 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189664.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:21,477 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 12:29:31,784 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.811e+02 6.188e+02 7.440e+02 1.460e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-03 12:29:44,186 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:46,156 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189689.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:05,519 INFO [train.py:903] (0/4) Epoch 28, batch 5350, loss[loss=0.1919, simple_loss=0.2861, pruned_loss=0.04888, over 17151.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06024, over 3816153.84 frames. ], batch size: 101, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:30:09,128 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:17,203 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8618, 2.0021, 2.2539, 2.4296, 1.9032, 2.3580, 2.2153, 2.0730], + device='cuda:0'), covar=tensor([0.4141, 0.3789, 0.1859, 0.2479, 0.3949, 0.2172, 0.4776, 0.3240], + device='cuda:0'), in_proj_covar=tensor([0.0940, 0.1018, 0.0745, 0.0954, 0.0916, 0.0858, 0.0863, 0.0812], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:30:17,966 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4419, 2.8280, 2.9874, 3.0125, 1.3622, 2.8421, 2.4695, 2.5988], + device='cuda:0'), covar=tensor([0.2949, 0.2415, 0.1502, 0.2019, 0.7715, 0.2705, 0.1574, 0.2484], + device='cuda:0'), in_proj_covar=tensor([0.0814, 0.0782, 0.0991, 0.0870, 0.0861, 0.0756, 0.0583, 0.0922], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 12:30:25,502 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:29,868 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:38,363 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 12:30:54,233 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:04,004 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0377, 3.6971, 2.4962, 3.2822, 0.7846, 3.6735, 3.5342, 3.6265], + device='cuda:0'), covar=tensor([0.0793, 0.1024, 0.2036, 0.0978, 0.3956, 0.0781, 0.1016, 0.1228], + device='cuda:0'), in_proj_covar=tensor([0.0530, 0.0430, 0.0518, 0.0361, 0.0411, 0.0457, 0.0452, 0.0483], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:31:04,996 INFO [train.py:903] (0/4) Epoch 28, batch 5400, loss[loss=0.1812, simple_loss=0.2727, pruned_loss=0.04487, over 19605.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.0604, over 3820004.68 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:31:15,157 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:33,364 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 4.839e+02 5.999e+02 7.745e+02 2.007e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 12:31:44,951 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:56,145 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:07,667 INFO [train.py:903] (0/4) Epoch 28, batch 5450, loss[loss=0.2062, simple_loss=0.2927, pruned_loss=0.05981, over 19739.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2839, pruned_loss=0.06061, over 3803633.78 frames. ], batch size: 63, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:32:15,159 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189812.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:27,696 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:30,083 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189824.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:45,824 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:08,051 INFO [train.py:903] (0/4) Epoch 28, batch 5500, loss[loss=0.1573, simple_loss=0.2368, pruned_loss=0.03886, over 19742.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06042, over 3809346.05 frames. ], batch size: 45, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:33:17,135 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:30,499 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 12:33:35,035 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 4.755e+02 5.876e+02 8.325e+02 1.733e+03, threshold=1.175e+03, percent-clipped=4.0 +2023-04-03 12:34:07,021 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7929, 1.9178, 2.1456, 2.2272, 1.6737, 2.1405, 2.0987, 1.9733], + device='cuda:0'), covar=tensor([0.4237, 0.3670, 0.1994, 0.2509, 0.4091, 0.2286, 0.5283, 0.3566], + device='cuda:0'), in_proj_covar=tensor([0.0940, 0.1017, 0.0745, 0.0954, 0.0917, 0.0858, 0.0862, 0.0811], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:34:10,042 INFO [train.py:903] (0/4) Epoch 28, batch 5550, loss[loss=0.1567, simple_loss=0.2411, pruned_loss=0.03612, over 19798.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2843, pruned_loss=0.06095, over 3819012.58 frames. ], batch size: 48, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:34:17,019 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 12:35:06,754 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 12:35:11,463 INFO [train.py:903] (0/4) Epoch 28, batch 5600, loss[loss=0.2136, simple_loss=0.2992, pruned_loss=0.06396, over 19621.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2831, pruned_loss=0.05996, over 3832652.31 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:35:40,028 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.597e+02 5.831e+02 7.652e+02 2.230e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 12:35:40,321 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:35:44,628 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:06,871 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-190000.pt +2023-04-03 12:36:11,923 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190002.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:17,374 INFO [train.py:903] (0/4) Epoch 28, batch 5650, loss[loss=0.2039, simple_loss=0.2901, pruned_loss=0.05881, over 19743.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2826, pruned_loss=0.05944, over 3833018.77 frames. ], batch size: 63, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:36:17,791 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:04,788 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 12:37:18,832 INFO [train.py:903] (0/4) Epoch 28, batch 5700, loss[loss=0.2177, simple_loss=0.3032, pruned_loss=0.06607, over 19504.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05974, over 3823424.39 frames. ], batch size: 64, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:37:26,614 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 12:37:27,064 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5072, 3.7264, 4.0322, 4.0510, 2.3320, 3.7703, 3.4491, 3.8217], + device='cuda:0'), covar=tensor([0.1569, 0.2885, 0.0690, 0.0803, 0.4811, 0.1521, 0.0668, 0.1094], + device='cuda:0'), in_proj_covar=tensor([0.0822, 0.0788, 0.0999, 0.0879, 0.0869, 0.0765, 0.0589, 0.0928], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 12:37:31,550 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190066.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:45,569 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.283e+02 6.430e+02 7.892e+02 1.365e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-04-03 12:37:49,415 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190080.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:54,212 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2839, 2.3813, 2.4977, 3.0898, 2.4935, 3.0077, 2.4843, 2.2809], + device='cuda:0'), covar=tensor([0.4309, 0.3901, 0.2051, 0.2481, 0.3977, 0.2153, 0.5007, 0.3530], + device='cuda:0'), in_proj_covar=tensor([0.0940, 0.1018, 0.0745, 0.0955, 0.0917, 0.0858, 0.0862, 0.0811], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:38:02,430 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,051 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,824 INFO [train.py:903] (0/4) Epoch 28, batch 5750, loss[loss=0.177, simple_loss=0.2595, pruned_loss=0.04727, over 19027.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05961, over 3816413.16 frames. ], batch size: 42, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:38:24,226 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 12:38:32,488 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 12:38:35,207 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:37,078 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 12:38:45,177 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4469, 1.5395, 1.8927, 1.6946, 2.5806, 2.2224, 2.7745, 1.0794], + device='cuda:0'), covar=tensor([0.2557, 0.4353, 0.2773, 0.2038, 0.1505, 0.2242, 0.1400, 0.4930], + device='cuda:0'), in_proj_covar=tensor([0.0558, 0.0675, 0.0760, 0.0512, 0.0640, 0.0551, 0.0674, 0.0579], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:39:23,781 INFO [train.py:903] (0/4) Epoch 28, batch 5800, loss[loss=0.1877, simple_loss=0.2649, pruned_loss=0.05528, over 19481.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05979, over 3825547.30 frames. ], batch size: 49, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:39:25,231 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190157.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:39:41,042 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190169.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:39:52,006 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.681e+02 5.738e+02 7.022e+02 1.341e+03, threshold=1.148e+03, percent-clipped=1.0 +2023-04-03 12:39:56,698 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:01,278 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0812, 5.5078, 3.2412, 4.8426, 1.4257, 5.7180, 5.5361, 5.7079], + device='cuda:0'), covar=tensor([0.0374, 0.0931, 0.1802, 0.0770, 0.3751, 0.0516, 0.0798, 0.1045], + device='cuda:0'), in_proj_covar=tensor([0.0531, 0.0431, 0.0519, 0.0361, 0.0411, 0.0458, 0.0451, 0.0484], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:40:26,166 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190205.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:27,628 INFO [train.py:903] (0/4) Epoch 28, batch 5850, loss[loss=0.175, simple_loss=0.2498, pruned_loss=0.05005, over 19742.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2835, pruned_loss=0.05975, over 3821835.05 frames. ], batch size: 47, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:02,540 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:29,993 INFO [train.py:903] (0/4) Epoch 28, batch 5900, loss[loss=0.2088, simple_loss=0.2935, pruned_loss=0.06211, over 19385.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2831, pruned_loss=0.05938, over 3828594.77 frames. ], batch size: 70, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:32,305 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 12:41:32,753 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0182, 1.9528, 1.9126, 1.7036, 1.6455, 1.6777, 0.4992, 0.9136], + device='cuda:0'), covar=tensor([0.0673, 0.0670, 0.0445, 0.0757, 0.1210, 0.0830, 0.1418, 0.1195], + device='cuda:0'), in_proj_covar=tensor([0.0367, 0.0366, 0.0367, 0.0393, 0.0471, 0.0398, 0.0346, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 12:41:33,912 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190259.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:54,172 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 12:41:56,488 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.583e+02 5.674e+02 7.520e+02 1.844e+03, threshold=1.135e+03, percent-clipped=9.0 +2023-04-03 12:42:24,248 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 12:42:31,979 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8433, 1.9514, 2.1851, 2.3305, 1.7894, 2.2331, 2.1676, 2.0635], + device='cuda:0'), covar=tensor([0.4131, 0.3853, 0.2044, 0.2406, 0.4012, 0.2306, 0.5128, 0.3475], + device='cuda:0'), in_proj_covar=tensor([0.0938, 0.1017, 0.0746, 0.0955, 0.0916, 0.0857, 0.0862, 0.0810], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:42:32,653 INFO [train.py:903] (0/4) Epoch 28, batch 5950, loss[loss=0.2234, simple_loss=0.3024, pruned_loss=0.07224, over 18730.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2825, pruned_loss=0.05927, over 3842259.75 frames. ], batch size: 74, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:43:07,998 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190334.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:43:13,912 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6264, 1.5104, 1.6251, 1.6313, 3.1874, 1.1721, 2.4580, 3.7049], + device='cuda:0'), covar=tensor([0.0495, 0.2669, 0.2829, 0.1788, 0.0688, 0.2524, 0.1305, 0.0234], + device='cuda:0'), in_proj_covar=tensor([0.0425, 0.0379, 0.0399, 0.0354, 0.0383, 0.0359, 0.0399, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:43:34,404 INFO [train.py:903] (0/4) Epoch 28, batch 6000, loss[loss=0.1956, simple_loss=0.2853, pruned_loss=0.05302, over 19534.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.05894, over 3824820.67 frames. ], batch size: 56, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:43:34,404 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 12:43:48,500 INFO [train.py:937] (0/4) Epoch 28, validation: loss=0.1668, simple_loss=0.2663, pruned_loss=0.03368, over 944034.00 frames. +2023-04-03 12:43:48,501 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 12:44:08,734 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:15,152 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 4.800e+02 5.909e+02 8.119e+02 1.607e+03, threshold=1.182e+03, percent-clipped=4.0 +2023-04-03 12:44:41,353 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:50,436 INFO [train.py:903] (0/4) Epoch 28, batch 6050, loss[loss=0.2895, simple_loss=0.3517, pruned_loss=0.1137, over 18603.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2824, pruned_loss=0.05933, over 3821870.68 frames. ], batch size: 84, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:04,557 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:11,941 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1333, 5.5967, 2.9027, 4.8824, 1.3330, 5.8376, 5.5603, 5.7521], + device='cuda:0'), covar=tensor([0.0382, 0.0802, 0.1992, 0.0802, 0.3611, 0.0463, 0.0770, 0.0914], + device='cuda:0'), in_proj_covar=tensor([0.0531, 0.0431, 0.0519, 0.0361, 0.0411, 0.0458, 0.0450, 0.0483], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:45:28,529 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190437.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:50,732 INFO [train.py:903] (0/4) Epoch 28, batch 6100, loss[loss=0.1851, simple_loss=0.2558, pruned_loss=0.05723, over 19764.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2818, pruned_loss=0.05883, over 3833359.26 frames. ], batch size: 47, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:57,913 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190461.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:58,976 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:17,944 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 4.836e+02 5.950e+02 7.277e+02 1.439e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 12:46:21,259 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.62 vs. limit=5.0 +2023-04-03 12:46:28,780 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:46,946 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190501.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:46:53,523 INFO [train.py:903] (0/4) Epoch 28, batch 6150, loss[loss=0.1802, simple_loss=0.2545, pruned_loss=0.05294, over 19770.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2817, pruned_loss=0.05909, over 3832239.97 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:47:02,041 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:47:23,080 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 12:47:56,358 INFO [train.py:903] (0/4) Epoch 28, batch 6200, loss[loss=0.2379, simple_loss=0.3168, pruned_loss=0.07949, over 19061.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2818, pruned_loss=0.05905, over 3821871.25 frames. ], batch size: 69, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:48:23,170 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.897e+02 5.800e+02 7.277e+02 1.783e+03, threshold=1.160e+03, percent-clipped=5.0 +2023-04-03 12:48:59,514 INFO [train.py:903] (0/4) Epoch 28, batch 6250, loss[loss=0.224, simple_loss=0.302, pruned_loss=0.073, over 17549.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2814, pruned_loss=0.05866, over 3814422.76 frames. ], batch size: 101, lr: 2.90e-03, grad_scale: 16.0 +2023-04-03 12:49:03,304 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190609.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:49:11,045 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190616.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:49:26,251 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:27,618 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190628.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:31,959 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 12:49:58,804 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 12:50:00,489 INFO [train.py:903] (0/4) Epoch 28, batch 6300, loss[loss=0.1881, simple_loss=0.2753, pruned_loss=0.05048, over 19650.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2818, pruned_loss=0.05884, over 3823742.01 frames. ], batch size: 55, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:50:28,870 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:50:29,806 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.803e+02 6.276e+02 7.910e+02 2.564e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 12:50:47,619 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:51:04,091 INFO [train.py:903] (0/4) Epoch 28, batch 6350, loss[loss=0.2119, simple_loss=0.3019, pruned_loss=0.061, over 19526.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2807, pruned_loss=0.05823, over 3819297.94 frames. ], batch size: 56, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:06,519 INFO [train.py:903] (0/4) Epoch 28, batch 6400, loss[loss=0.2081, simple_loss=0.2984, pruned_loss=0.05892, over 19531.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2805, pruned_loss=0.05787, over 3828454.93 frames. ], batch size: 54, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:13,578 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:52:36,099 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.755e+02 6.032e+02 8.092e+02 1.400e+03, threshold=1.206e+03, percent-clipped=2.0 +2023-04-03 12:52:52,684 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:53:08,682 INFO [train.py:903] (0/4) Epoch 28, batch 6450, loss[loss=0.1832, simple_loss=0.2735, pruned_loss=0.0465, over 19653.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2804, pruned_loss=0.05804, over 3828085.94 frames. ], batch size: 55, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:53:09,393 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 12:53:55,217 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 12:54:10,281 INFO [train.py:903] (0/4) Epoch 28, batch 6500, loss[loss=0.1986, simple_loss=0.2852, pruned_loss=0.05601, over 17380.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2808, pruned_loss=0.05821, over 3822605.27 frames. ], batch size: 101, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:54:17,926 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 12:54:32,041 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190872.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:54:36,560 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:54:36,683 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8373, 1.9195, 2.0740, 2.3281, 1.9071, 2.2698, 2.1091, 2.0071], + device='cuda:0'), covar=tensor([0.3542, 0.3134, 0.1760, 0.2036, 0.3285, 0.1827, 0.3859, 0.2731], + device='cuda:0'), in_proj_covar=tensor([0.0939, 0.1018, 0.0746, 0.0954, 0.0915, 0.0857, 0.0861, 0.0809], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 12:54:40,960 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.758e+02 5.991e+02 7.680e+02 1.999e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-03 12:54:47,189 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:54:49,298 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4559, 2.3785, 2.1944, 2.6245, 2.3471, 2.0194, 1.9808, 2.4491], + device='cuda:0'), covar=tensor([0.1004, 0.1623, 0.1443, 0.0988, 0.1336, 0.0583, 0.1564, 0.0687], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0358, 0.0316, 0.0256, 0.0304, 0.0253, 0.0319, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:55:01,700 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190897.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:55:13,652 INFO [train.py:903] (0/4) Epoch 28, batch 6550, loss[loss=0.1938, simple_loss=0.2693, pruned_loss=0.05916, over 19562.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.05896, over 3817992.43 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:55:18,344 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:55:45,215 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0122, 3.6774, 2.6401, 3.2023, 0.6932, 3.6336, 3.4811, 3.5884], + device='cuda:0'), covar=tensor([0.0811, 0.1124, 0.1919, 0.1059, 0.4193, 0.0808, 0.1028, 0.1254], + device='cuda:0'), in_proj_covar=tensor([0.0534, 0.0433, 0.0521, 0.0362, 0.0411, 0.0459, 0.0452, 0.0485], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 12:56:14,193 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190953.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:56:17,101 INFO [train.py:903] (0/4) Epoch 28, batch 6600, loss[loss=0.2154, simple_loss=0.3005, pruned_loss=0.0651, over 19519.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.05908, over 3832716.07 frames. ], batch size: 56, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:56:35,867 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190971.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:56:48,224 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.870e+02 6.050e+02 8.150e+02 2.542e+03, threshold=1.210e+03, percent-clipped=13.0 +2023-04-03 12:56:50,203 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 12:56:53,412 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2712, 1.3458, 1.9338, 1.7062, 2.8047, 4.6013, 4.4427, 5.1804], + device='cuda:0'), covar=tensor([0.1940, 0.5216, 0.4516, 0.2599, 0.0828, 0.0227, 0.0233, 0.0220], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0335, 0.0367, 0.0273, 0.0257, 0.0198, 0.0222, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 12:57:19,914 INFO [train.py:903] (0/4) Epoch 28, batch 6650, loss[loss=0.2339, simple_loss=0.3189, pruned_loss=0.07449, over 19540.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2824, pruned_loss=0.0593, over 3837826.00 frames. ], batch size: 56, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:57:54,087 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-04-03 12:57:59,368 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:14,533 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:22,325 INFO [train.py:903] (0/4) Epoch 28, batch 6700, loss[loss=0.1689, simple_loss=0.2476, pruned_loss=0.04506, over 19749.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2819, pruned_loss=0.05914, over 3828178.91 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:58:38,427 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191068.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:58:45,532 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191074.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:52,989 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.245e+02 6.198e+02 8.145e+02 2.088e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 12:58:58,999 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:59,066 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:59:21,821 INFO [train.py:903] (0/4) Epoch 28, batch 6750, loss[loss=0.175, simple_loss=0.2609, pruned_loss=0.04454, over 19668.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2832, pruned_loss=0.05978, over 3805924.69 frames. ], batch size: 53, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:59:51,524 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:11,310 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:13,715 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:17,983 INFO [train.py:903] (0/4) Epoch 28, batch 6800, loss[loss=0.1961, simple_loss=0.2794, pruned_loss=0.05637, over 19536.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2838, pruned_loss=0.06011, over 3816233.78 frames. ], batch size: 54, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 13:00:19,490 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:45,263 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.099e+02 6.292e+02 8.518e+02 1.501e+03, threshold=1.258e+03, percent-clipped=3.0 +2023-04-03 13:00:48,674 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-28.pt +2023-04-03 13:01:05,957 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 13:01:06,445 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 13:01:08,828 INFO [train.py:903] (0/4) Epoch 29, batch 0, loss[loss=0.2003, simple_loss=0.2883, pruned_loss=0.05617, over 18823.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2883, pruned_loss=0.05617, over 18823.00 frames. ], batch size: 74, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:01:08,829 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 13:01:20,496 INFO [train.py:937] (0/4) Epoch 29, validation: loss=0.1669, simple_loss=0.2669, pruned_loss=0.03339, over 944034.00 frames. +2023-04-03 13:01:20,497 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 13:01:31,763 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 13:01:43,687 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191203.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:01:46,195 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6549, 1.7459, 2.0120, 1.9767, 1.5393, 1.9562, 2.0154, 1.8810], + device='cuda:0'), covar=tensor([0.4020, 0.3599, 0.2026, 0.2367, 0.3737, 0.2223, 0.5053, 0.3505], + device='cuda:0'), in_proj_covar=tensor([0.0946, 0.1023, 0.0750, 0.0960, 0.0921, 0.0863, 0.0868, 0.0815], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 13:02:19,669 INFO [train.py:903] (0/4) Epoch 29, batch 50, loss[loss=0.202, simple_loss=0.2879, pruned_loss=0.05809, over 19742.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2811, pruned_loss=0.05866, over 872862.15 frames. ], batch size: 63, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:02:55,005 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 13:03:15,130 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.210e+02 6.215e+02 8.429e+02 1.754e+03, threshold=1.243e+03, percent-clipped=6.0 +2023-04-03 13:03:18,590 INFO [train.py:903] (0/4) Epoch 29, batch 100, loss[loss=0.1833, simple_loss=0.2713, pruned_loss=0.04765, over 19540.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2822, pruned_loss=0.0588, over 1526969.76 frames. ], batch size: 56, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:03:33,067 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 13:04:07,329 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191324.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:04:19,667 INFO [train.py:903] (0/4) Epoch 29, batch 150, loss[loss=0.1764, simple_loss=0.2502, pruned_loss=0.05131, over 19301.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2833, pruned_loss=0.05964, over 2013027.66 frames. ], batch size: 44, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:04:29,084 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191342.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:04:36,951 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191349.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:04:59,838 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:15,368 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.272e+02 6.303e+02 7.742e+02 1.475e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-03 13:05:15,449 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 13:05:18,068 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:18,900 INFO [train.py:903] (0/4) Epoch 29, batch 200, loss[loss=0.2178, simple_loss=0.2946, pruned_loss=0.07046, over 19795.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2836, pruned_loss=0.05994, over 2425215.77 frames. ], batch size: 56, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:05:37,012 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3029, 2.9324, 2.2858, 2.3180, 2.2971, 2.5119, 0.8850, 2.1395], + device='cuda:0'), covar=tensor([0.0709, 0.0625, 0.0744, 0.1228, 0.1022, 0.1191, 0.1610, 0.1147], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0366, 0.0367, 0.0393, 0.0471, 0.0398, 0.0346, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 13:05:48,138 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:13,703 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191430.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:17,621 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:20,059 INFO [train.py:903] (0/4) Epoch 29, batch 250, loss[loss=0.1883, simple_loss=0.2725, pruned_loss=0.05204, over 19664.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2816, pruned_loss=0.05901, over 2748616.44 frames. ], batch size: 53, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:06:41,362 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-03 13:07:16,621 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.311e+02 6.503e+02 8.385e+02 2.446e+03, threshold=1.301e+03, percent-clipped=7.0 +2023-04-03 13:07:20,125 INFO [train.py:903] (0/4) Epoch 29, batch 300, loss[loss=0.1947, simple_loss=0.2739, pruned_loss=0.05776, over 19833.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2819, pruned_loss=0.05939, over 2979696.08 frames. ], batch size: 52, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:07:33,147 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:08:19,686 INFO [train.py:903] (0/4) Epoch 29, batch 350, loss[loss=0.1992, simple_loss=0.2842, pruned_loss=0.05711, over 19335.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.0597, over 3170914.21 frames. ], batch size: 66, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:08:27,197 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 13:08:33,067 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191545.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:08:35,154 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:09:16,624 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.772e+02 5.697e+02 6.938e+02 1.378e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 13:09:19,962 INFO [train.py:903] (0/4) Epoch 29, batch 400, loss[loss=0.2231, simple_loss=0.3019, pruned_loss=0.07212, over 19657.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2809, pruned_loss=0.05896, over 3322485.48 frames. ], batch size: 58, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:09:50,409 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191609.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:11,890 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-03 13:10:19,945 INFO [train.py:903] (0/4) Epoch 29, batch 450, loss[loss=0.235, simple_loss=0.3042, pruned_loss=0.08287, over 13449.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2817, pruned_loss=0.05911, over 3434051.01 frames. ], batch size: 135, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:10:55,256 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:57,246 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 13:10:58,386 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 13:11:17,385 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.702e+02 5.973e+02 7.083e+02 1.398e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 13:11:20,347 INFO [train.py:903] (0/4) Epoch 29, batch 500, loss[loss=0.1817, simple_loss=0.2593, pruned_loss=0.0521, over 19785.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2813, pruned_loss=0.05891, over 3518217.97 frames. ], batch size: 45, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:12:13,366 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:12:21,066 INFO [train.py:903] (0/4) Epoch 29, batch 550, loss[loss=0.2079, simple_loss=0.2914, pruned_loss=0.06215, over 18220.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2816, pruned_loss=0.05947, over 3581040.59 frames. ], batch size: 83, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:12:45,860 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2717, 2.0061, 1.9369, 2.1795, 1.7816, 1.8891, 1.8086, 2.1956], + device='cuda:0'), covar=tensor([0.1067, 0.1536, 0.1503, 0.1097, 0.1549, 0.0578, 0.1563, 0.0713], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0362, 0.0320, 0.0259, 0.0307, 0.0256, 0.0323, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 13:13:04,063 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8793, 1.1925, 1.5389, 0.6472, 1.8558, 2.2385, 1.9701, 2.3331], + device='cuda:0'), covar=tensor([0.1577, 0.3629, 0.3144, 0.2846, 0.0851, 0.0346, 0.0371, 0.0443], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0334, 0.0366, 0.0272, 0.0256, 0.0198, 0.0221, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 13:13:04,169 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2699, 1.3050, 1.4462, 1.4091, 1.6527, 1.7366, 1.6479, 0.6429], + device='cuda:0'), covar=tensor([0.2293, 0.3890, 0.2448, 0.1860, 0.1543, 0.2155, 0.1432, 0.4968], + device='cuda:0'), in_proj_covar=tensor([0.0555, 0.0672, 0.0759, 0.0509, 0.0635, 0.0546, 0.0671, 0.0575], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 13:13:06,427 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4467, 1.4989, 1.8737, 1.7514, 2.7986, 2.3295, 2.8950, 1.4504], + device='cuda:0'), covar=tensor([0.2599, 0.4426, 0.2761, 0.1921, 0.1372, 0.2174, 0.1400, 0.4341], + device='cuda:0'), in_proj_covar=tensor([0.0555, 0.0672, 0.0759, 0.0509, 0.0635, 0.0546, 0.0671, 0.0575], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 13:13:19,228 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.419e+02 6.471e+02 8.968e+02 1.855e+03, threshold=1.294e+03, percent-clipped=10.0 +2023-04-03 13:13:22,383 INFO [train.py:903] (0/4) Epoch 29, batch 600, loss[loss=0.1732, simple_loss=0.2562, pruned_loss=0.04511, over 19752.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05938, over 3631455.58 frames. ], batch size: 51, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:13:41,790 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:02,329 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 13:14:13,249 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191826.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:22,136 INFO [train.py:903] (0/4) Epoch 29, batch 650, loss[loss=0.1674, simple_loss=0.2482, pruned_loss=0.04333, over 19760.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06058, over 3655996.87 frames. ], batch size: 47, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:14:31,527 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:42,693 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-03 13:15:01,021 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:18,381 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.883e+02 6.286e+02 7.850e+02 2.365e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-03 13:15:21,601 INFO [train.py:903] (0/4) Epoch 29, batch 700, loss[loss=0.1974, simple_loss=0.2742, pruned_loss=0.06031, over 19492.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06048, over 3702764.79 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:15:29,636 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191890.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:16:03,817 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191918.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:16:23,392 INFO [train.py:903] (0/4) Epoch 29, batch 750, loss[loss=0.2083, simple_loss=0.2925, pruned_loss=0.06211, over 19546.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05944, over 3721460.19 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:16:34,311 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:17:21,355 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.754e+02 5.591e+02 7.301e+02 1.189e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-03 13:17:23,701 INFO [train.py:903] (0/4) Epoch 29, batch 800, loss[loss=0.2078, simple_loss=0.2919, pruned_loss=0.06183, over 19729.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2814, pruned_loss=0.059, over 3749317.86 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:17:24,007 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3774, 3.8577, 3.9771, 3.9827, 1.6141, 3.8097, 3.2877, 3.7413], + device='cuda:0'), covar=tensor([0.1664, 0.0882, 0.0705, 0.0849, 0.6020, 0.0972, 0.0777, 0.1200], + device='cuda:0'), in_proj_covar=tensor([0.0821, 0.0786, 0.0994, 0.0877, 0.0864, 0.0762, 0.0588, 0.0927], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 13:17:36,666 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 13:17:42,712 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-192000.pt +2023-04-03 13:18:16,913 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3258, 2.1386, 2.0815, 2.1813, 1.9153, 1.9373, 1.8602, 2.1940], + device='cuda:0'), covar=tensor([0.0951, 0.1391, 0.1347, 0.1096, 0.1405, 0.0558, 0.1503, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0362, 0.0321, 0.0260, 0.0307, 0.0257, 0.0323, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 13:18:26,430 INFO [train.py:903] (0/4) Epoch 29, batch 850, loss[loss=0.1794, simple_loss=0.2618, pruned_loss=0.04853, over 19485.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2807, pruned_loss=0.05851, over 3763863.88 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:17,573 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 13:19:23,135 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 4.771e+02 5.573e+02 7.809e+02 2.111e+03, threshold=1.115e+03, percent-clipped=7.0 +2023-04-03 13:19:25,393 INFO [train.py:903] (0/4) Epoch 29, batch 900, loss[loss=0.1781, simple_loss=0.2655, pruned_loss=0.04541, over 19537.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2814, pruned_loss=0.05895, over 3780780.47 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:43,249 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:07,630 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-03 13:20:09,363 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192120.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:12,946 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192123.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:26,583 INFO [train.py:903] (0/4) Epoch 29, batch 950, loss[loss=0.1869, simple_loss=0.2741, pruned_loss=0.04982, over 19777.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2809, pruned_loss=0.05865, over 3781050.28 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:20:30,184 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 13:21:13,667 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:21:24,651 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.272e+02 6.130e+02 7.632e+02 1.213e+03, threshold=1.226e+03, percent-clipped=2.0 +2023-04-03 13:21:27,729 INFO [train.py:903] (0/4) Epoch 29, batch 1000, loss[loss=0.1573, simple_loss=0.2443, pruned_loss=0.03512, over 19402.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2808, pruned_loss=0.05858, over 3792073.79 frames. ], batch size: 48, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:23,176 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 13:22:27,654 INFO [train.py:903] (0/4) Epoch 29, batch 1050, loss[loss=0.1964, simple_loss=0.2865, pruned_loss=0.05322, over 19657.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2802, pruned_loss=0.05852, over 3796295.25 frames. ], batch size: 58, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:31,117 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192237.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:22:34,317 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:23:02,357 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 13:23:25,659 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 4.755e+02 5.709e+02 7.263e+02 1.610e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 13:23:28,075 INFO [train.py:903] (0/4) Epoch 29, batch 1100, loss[loss=0.1808, simple_loss=0.2625, pruned_loss=0.04959, over 19591.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2806, pruned_loss=0.05871, over 3793710.27 frames. ], batch size: 52, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:29,310 INFO [train.py:903] (0/4) Epoch 29, batch 1150, loss[loss=0.1828, simple_loss=0.2711, pruned_loss=0.04726, over 19687.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2807, pruned_loss=0.05859, over 3804476.52 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:53,834 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5786, 1.5425, 1.8954, 1.8280, 2.7352, 2.4404, 2.9183, 1.4433], + device='cuda:0'), covar=tensor([0.2490, 0.4485, 0.2808, 0.1924, 0.1568, 0.2070, 0.1447, 0.4508], + device='cuda:0'), in_proj_covar=tensor([0.0555, 0.0672, 0.0760, 0.0511, 0.0635, 0.0545, 0.0670, 0.0574], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 13:25:27,745 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 4.839e+02 5.803e+02 7.271e+02 1.538e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 13:25:30,850 INFO [train.py:903] (0/4) Epoch 29, batch 1200, loss[loss=0.1972, simple_loss=0.2825, pruned_loss=0.05598, over 19690.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2811, pruned_loss=0.05907, over 3802958.49 frames. ], batch size: 60, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:01,109 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:01,911 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 13:26:10,325 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:30,220 INFO [train.py:903] (0/4) Epoch 29, batch 1250, loss[loss=0.1824, simple_loss=0.2726, pruned_loss=0.04609, over 18203.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2809, pruned_loss=0.05892, over 3794555.45 frames. ], batch size: 83, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:50,089 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192451.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:07,149 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:28,110 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.043e+02 5.075e+02 6.202e+02 7.494e+02 1.680e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 13:27:30,144 INFO [train.py:903] (0/4) Epoch 29, batch 1300, loss[loss=0.1875, simple_loss=0.2815, pruned_loss=0.04676, over 19512.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2818, pruned_loss=0.05916, over 3793275.64 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:28:10,143 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:28:14,833 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2301, 2.0941, 1.9529, 1.8727, 1.6310, 1.7372, 0.5581, 1.1780], + device='cuda:0'), covar=tensor([0.0655, 0.0691, 0.0549, 0.0947, 0.1264, 0.1086, 0.1529, 0.1220], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0365, 0.0367, 0.0391, 0.0471, 0.0398, 0.0345, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 13:28:15,767 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3470, 3.9808, 2.5490, 3.5211, 1.0229, 3.9287, 3.8120, 3.8859], + device='cuda:0'), covar=tensor([0.0724, 0.1031, 0.2096, 0.0921, 0.3935, 0.0747, 0.1014, 0.1194], + device='cuda:0'), in_proj_covar=tensor([0.0531, 0.0431, 0.0519, 0.0361, 0.0410, 0.0457, 0.0452, 0.0482], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:28:30,908 INFO [train.py:903] (0/4) Epoch 29, batch 1350, loss[loss=0.1705, simple_loss=0.2528, pruned_loss=0.04407, over 19631.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.0597, over 3802828.02 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:28:45,937 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6356, 1.3259, 1.4504, 2.2449, 1.6015, 1.7386, 1.7960, 1.5849], + device='cuda:0'), covar=tensor([0.0884, 0.1115, 0.1073, 0.0695, 0.0909, 0.0885, 0.0909, 0.0814], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0222, 0.0229, 0.0239, 0.0226, 0.0214, 0.0187, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 13:29:25,922 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192579.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:27,901 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:28,924 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.533e+02 7.774e+02 1.028e+03 2.542e+03, threshold=1.555e+03, percent-clipped=13.0 +2023-04-03 13:29:30,284 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:32,193 INFO [train.py:903] (0/4) Epoch 29, batch 1400, loss[loss=0.1998, simple_loss=0.276, pruned_loss=0.06187, over 19402.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05992, over 3809732.65 frames. ], batch size: 48, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:30:30,380 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:32,433 INFO [train.py:903] (0/4) Epoch 29, batch 1450, loss[loss=0.1871, simple_loss=0.2704, pruned_loss=0.05194, over 19578.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2815, pruned_loss=0.05937, over 3820049.80 frames. ], batch size: 61, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:30:32,467 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 13:30:32,707 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:46,938 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2269, 1.4113, 1.9666, 1.7467, 3.0772, 4.5419, 4.3991, 4.9830], + device='cuda:0'), covar=tensor([0.1753, 0.4041, 0.3599, 0.2388, 0.0645, 0.0214, 0.0183, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0335, 0.0368, 0.0274, 0.0257, 0.0198, 0.0221, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 13:31:29,875 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.834e+02 5.734e+02 7.377e+02 1.406e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 13:31:32,942 INFO [train.py:903] (0/4) Epoch 29, batch 1500, loss[loss=0.181, simple_loss=0.264, pruned_loss=0.04902, over 19595.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05927, over 3829834.43 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:31:41,373 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192691.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:47,423 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192696.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:50,540 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:59,313 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7837, 1.6713, 1.6535, 2.3311, 1.7049, 1.9741, 2.0620, 1.8178], + device='cuda:0'), covar=tensor([0.0815, 0.0897, 0.0981, 0.0708, 0.0875, 0.0821, 0.0867, 0.0708], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0221, 0.0228, 0.0239, 0.0225, 0.0214, 0.0187, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 13:32:32,342 INFO [train.py:903] (0/4) Epoch 29, batch 1550, loss[loss=0.2319, simple_loss=0.3067, pruned_loss=0.07857, over 18875.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05964, over 3835116.02 frames. ], batch size: 74, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:32:56,574 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:57,533 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:05,619 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:31,827 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 5.130e+02 6.287e+02 7.695e+02 1.691e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-03 13:33:34,856 INFO [train.py:903] (0/4) Epoch 29, batch 1600, loss[loss=0.2026, simple_loss=0.2677, pruned_loss=0.06874, over 15933.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2825, pruned_loss=0.05935, over 3824285.98 frames. ], batch size: 35, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:33:47,553 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:58,469 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 13:34:34,143 INFO [train.py:903] (0/4) Epoch 29, batch 1650, loss[loss=0.1933, simple_loss=0.2898, pruned_loss=0.04842, over 19540.00 frames. ], tot_loss[loss=0.201, simple_loss=0.283, pruned_loss=0.05955, over 3836313.72 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:34:35,736 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:34:51,830 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192849.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:35:03,992 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:15,832 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:23,804 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:30,385 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 4.944e+02 5.872e+02 7.824e+02 1.796e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 13:35:32,723 INFO [train.py:903] (0/4) Epoch 29, batch 1700, loss[loss=0.2444, simple_loss=0.3151, pruned_loss=0.08686, over 19618.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2828, pruned_loss=0.05984, over 3838858.79 frames. ], batch size: 57, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:35:38,434 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:39,569 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1918, 2.0855, 1.9197, 1.8207, 1.6301, 1.7570, 0.6955, 1.1784], + device='cuda:0'), covar=tensor([0.0722, 0.0675, 0.0546, 0.0953, 0.1172, 0.1066, 0.1472, 0.1223], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0364, 0.0368, 0.0393, 0.0472, 0.0398, 0.0347, 0.0348], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 13:36:04,834 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:08,224 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:13,393 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 13:36:24,706 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.0616, 5.1519, 5.8641, 5.9089, 2.2230, 5.5567, 4.6880, 5.5388], + device='cuda:0'), covar=tensor([0.1727, 0.0812, 0.0545, 0.0599, 0.6004, 0.0868, 0.0613, 0.1070], + device='cuda:0'), in_proj_covar=tensor([0.0824, 0.0788, 0.0997, 0.0877, 0.0869, 0.0765, 0.0588, 0.0930], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 13:36:32,300 INFO [train.py:903] (0/4) Epoch 29, batch 1750, loss[loss=0.1881, simple_loss=0.2792, pruned_loss=0.04853, over 17300.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2827, pruned_loss=0.05969, over 3839344.51 frames. ], batch size: 101, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:36:55,768 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:57,976 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:24,841 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:25,776 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:27,191 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:31,714 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 4.741e+02 6.117e+02 7.797e+02 1.758e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 13:37:34,999 INFO [train.py:903] (0/4) Epoch 29, batch 1800, loss[loss=0.182, simple_loss=0.2741, pruned_loss=0.04494, over 18855.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.283, pruned_loss=0.06012, over 3808841.26 frames. ], batch size: 74, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:37:58,077 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.4971, 2.5002, 2.0991, 2.4707, 2.0744, 1.9550, 1.9038, 2.2893], + device='cuda:0'), covar=tensor([0.1131, 0.1716, 0.1757, 0.1297, 0.1821, 0.0775, 0.1881, 0.0879], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0362, 0.0321, 0.0259, 0.0309, 0.0258, 0.0324, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 13:38:08,538 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 13:38:29,367 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 13:38:33,986 INFO [train.py:903] (0/4) Epoch 29, batch 1850, loss[loss=0.1549, simple_loss=0.24, pruned_loss=0.03495, over 19768.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2825, pruned_loss=0.06002, over 3800156.06 frames. ], batch size: 47, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:38:34,336 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.9458, 1.4524, 1.7960, 1.5458, 4.4580, 1.0674, 2.5754, 4.8422], + device='cuda:0'), covar=tensor([0.0478, 0.2973, 0.2900, 0.2103, 0.0732, 0.2874, 0.1616, 0.0172], + device='cuda:0'), in_proj_covar=tensor([0.0426, 0.0380, 0.0401, 0.0355, 0.0384, 0.0361, 0.0400, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:38:35,288 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:38:35,537 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7113, 1.3690, 1.3550, 1.5797, 1.2240, 1.4299, 1.2490, 1.5288], + device='cuda:0'), covar=tensor([0.1081, 0.1008, 0.1563, 0.1035, 0.1248, 0.0595, 0.1624, 0.0825], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0360, 0.0320, 0.0258, 0.0307, 0.0257, 0.0323, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 13:39:04,665 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193059.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:05,537 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 13:39:32,452 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.498e+02 4.794e+02 5.817e+02 7.567e+02 2.266e+03, threshold=1.163e+03, percent-clipped=4.0 +2023-04-03 13:39:34,755 INFO [train.py:903] (0/4) Epoch 29, batch 1900, loss[loss=0.2247, simple_loss=0.3065, pruned_loss=0.07146, over 18130.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.0595, over 3795548.81 frames. ], batch size: 83, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:39:46,009 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:49,035 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 13:39:50,320 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:55,322 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 13:40:18,666 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 13:40:25,416 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:33,523 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:35,430 INFO [train.py:903] (0/4) Epoch 29, batch 1950, loss[loss=0.268, simple_loss=0.3254, pruned_loss=0.1052, over 13579.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2827, pruned_loss=0.06046, over 3769738.81 frames. ], batch size: 136, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:40:56,480 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:56,520 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:04,215 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:15,390 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:34,349 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.41 vs. limit=2.0 +2023-04-03 13:41:34,716 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.104e+02 4.788e+02 5.811e+02 7.006e+02 1.429e+03, threshold=1.162e+03, percent-clipped=3.0 +2023-04-03 13:41:37,847 INFO [train.py:903] (0/4) Epoch 29, batch 2000, loss[loss=0.2043, simple_loss=0.2888, pruned_loss=0.05986, over 19610.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.05972, over 3782870.30 frames. ], batch size: 61, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:41:46,012 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:48,017 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193193.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:42:10,515 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:23,961 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-03 13:42:31,353 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 13:42:31,583 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193229.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:37,085 INFO [train.py:903] (0/4) Epoch 29, batch 2050, loss[loss=0.2023, simple_loss=0.2846, pruned_loss=0.05997, over 18189.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2839, pruned_loss=0.06077, over 3783327.60 frames. ], batch size: 83, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:42:50,738 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 13:42:50,767 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 13:43:13,504 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 13:43:34,585 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.912e+02 6.403e+02 8.614e+02 2.001e+03, threshold=1.281e+03, percent-clipped=8.0 +2023-04-03 13:43:36,908 INFO [train.py:903] (0/4) Epoch 29, batch 2100, loss[loss=0.2161, simple_loss=0.304, pruned_loss=0.06406, over 19318.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2833, pruned_loss=0.06089, over 3796853.31 frames. ], batch size: 66, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:43:59,250 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:44:07,572 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 13:44:07,888 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193308.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:44:25,613 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 13:44:26,804 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.9708, 2.7958, 2.3243, 2.3636, 1.9618, 2.4795, 1.1124, 2.0263], + device='cuda:0'), covar=tensor([0.0689, 0.0663, 0.0764, 0.1116, 0.1288, 0.1139, 0.1533, 0.1139], + device='cuda:0'), in_proj_covar=tensor([0.0370, 0.0366, 0.0370, 0.0393, 0.0475, 0.0399, 0.0348, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 13:44:32,084 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1740, 1.7774, 1.9240, 2.7934, 1.9522, 2.1802, 2.3309, 2.0772], + device='cuda:0'), covar=tensor([0.0793, 0.0937, 0.1014, 0.0741, 0.0855, 0.0802, 0.0869, 0.0679], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0229, 0.0240, 0.0226, 0.0214, 0.0188, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 13:44:37,257 INFO [train.py:903] (0/4) Epoch 29, batch 2150, loss[loss=0.1973, simple_loss=0.2878, pruned_loss=0.05344, over 19602.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2838, pruned_loss=0.06087, over 3792743.46 frames. ], batch size: 57, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:44:56,837 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193349.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:25,810 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:35,170 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.242e+02 6.391e+02 8.913e+02 2.261e+03, threshold=1.278e+03, percent-clipped=9.0 +2023-04-03 13:45:37,408 INFO [train.py:903] (0/4) Epoch 29, batch 2200, loss[loss=0.2048, simple_loss=0.2904, pruned_loss=0.05958, over 19621.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06151, over 3797976.25 frames. ], batch size: 61, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:45:58,782 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5845, 1.6758, 1.9087, 1.8625, 1.4857, 1.8462, 1.9126, 1.7647], + device='cuda:0'), covar=tensor([0.3806, 0.3445, 0.1854, 0.2192, 0.3594, 0.2067, 0.4678, 0.3247], + device='cuda:0'), in_proj_covar=tensor([0.0946, 0.1026, 0.0749, 0.0959, 0.0925, 0.0862, 0.0867, 0.0814], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 13:46:00,710 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:04,393 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:36,021 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:39,101 INFO [train.py:903] (0/4) Epoch 29, batch 2250, loss[loss=0.1999, simple_loss=0.2848, pruned_loss=0.05748, over 19668.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06122, over 3794545.90 frames. ], batch size: 55, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:40,757 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2842, 2.2258, 2.0799, 2.0173, 1.7769, 1.9756, 0.8947, 1.3454], + device='cuda:0'), covar=tensor([0.0620, 0.0651, 0.0524, 0.0855, 0.1181, 0.0978, 0.1357, 0.1087], + device='cuda:0'), in_proj_covar=tensor([0.0370, 0.0366, 0.0370, 0.0394, 0.0474, 0.0400, 0.0348, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 13:46:45,266 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:07,483 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7585, 1.6333, 1.6631, 2.1758, 1.5768, 1.8309, 1.8820, 1.8280], + device='cuda:0'), covar=tensor([0.0790, 0.0857, 0.0973, 0.0720, 0.0886, 0.0806, 0.0954, 0.0686], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0229, 0.0239, 0.0226, 0.0214, 0.0188, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 13:47:20,664 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:25,802 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193472.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:47:36,426 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 4.884e+02 5.894e+02 7.303e+02 1.661e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 13:47:38,735 INFO [train.py:903] (0/4) Epoch 29, batch 2300, loss[loss=0.1779, simple_loss=0.2715, pruned_loss=0.04215, over 19665.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2837, pruned_loss=0.06033, over 3802525.81 frames. ], batch size: 53, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:47:50,440 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:53,665 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 13:48:20,984 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193518.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:48:39,108 INFO [train.py:903] (0/4) Epoch 29, batch 2350, loss[loss=0.2193, simple_loss=0.2915, pruned_loss=0.07352, over 19722.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2842, pruned_loss=0.06072, over 3805480.97 frames. ], batch size: 51, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:16,352 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193564.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:49:20,464 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 13:49:27,312 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:49:36,485 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 13:49:37,594 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.546e+02 5.901e+02 7.828e+02 1.716e+03, threshold=1.180e+03, percent-clipped=11.0 +2023-04-03 13:49:40,761 INFO [train.py:903] (0/4) Epoch 29, batch 2400, loss[loss=0.2294, simple_loss=0.3167, pruned_loss=0.07101, over 18874.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.284, pruned_loss=0.06052, over 3810673.03 frames. ], batch size: 74, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:47,435 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193589.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:50:39,568 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:50:41,249 INFO [train.py:903] (0/4) Epoch 29, batch 2450, loss[loss=0.2207, simple_loss=0.3124, pruned_loss=0.06449, over 17335.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2849, pruned_loss=0.06107, over 3808428.09 frames. ], batch size: 101, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:50:54,653 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:39,133 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.882e+02 5.957e+02 8.043e+02 1.393e+03, threshold=1.191e+03, percent-clipped=5.0 +2023-04-03 13:51:41,314 INFO [train.py:903] (0/4) Epoch 29, batch 2500, loss[loss=0.2232, simple_loss=0.3001, pruned_loss=0.07312, over 19664.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2846, pruned_loss=0.06069, over 3816595.88 frames. ], batch size: 58, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:51:45,865 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193688.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:47,045 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1586, 1.7955, 1.4435, 1.1713, 1.6100, 1.1567, 1.1613, 1.7028], + device='cuda:0'), covar=tensor([0.0867, 0.0843, 0.1015, 0.0888, 0.0600, 0.1306, 0.0666, 0.0450], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0319, 0.0341, 0.0273, 0.0253, 0.0346, 0.0293, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:51:54,119 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 13:52:17,908 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 13:52:40,715 INFO [train.py:903] (0/4) Epoch 29, batch 2550, loss[loss=0.2278, simple_loss=0.305, pruned_loss=0.07523, over 19772.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06125, over 3818976.08 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:53:06,576 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 13:53:15,302 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:30,067 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:35,072 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 13:53:38,229 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.99 vs. limit=5.0 +2023-04-03 13:53:41,002 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 5.186e+02 6.141e+02 8.140e+02 1.584e+03, threshold=1.228e+03, percent-clipped=10.0 +2023-04-03 13:53:41,159 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:42,121 INFO [train.py:903] (0/4) Epoch 29, batch 2600, loss[loss=0.1962, simple_loss=0.2882, pruned_loss=0.0521, over 19692.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06053, over 3821279.42 frames. ], batch size: 59, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:02,311 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193799.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:54:22,055 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193816.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:54:44,182 INFO [train.py:903] (0/4) Epoch 29, batch 2650, loss[loss=0.2436, simple_loss=0.3145, pruned_loss=0.08638, over 19426.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.0598, over 3832515.92 frames. ], batch size: 70, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:44,584 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0920, 3.2884, 1.8752, 1.9792, 2.9284, 1.7258, 1.4518, 2.2358], + device='cuda:0'), covar=tensor([0.1363, 0.0801, 0.1238, 0.0936, 0.0641, 0.1378, 0.1087, 0.0746], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0317, 0.0339, 0.0271, 0.0251, 0.0344, 0.0290, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:54:47,932 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.6345, 1.4205, 1.5825, 1.5715, 3.2207, 1.1999, 2.4260, 3.6892], + device='cuda:0'), covar=tensor([0.0462, 0.2777, 0.2833, 0.1847, 0.0629, 0.2559, 0.1372, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0423, 0.0378, 0.0398, 0.0353, 0.0381, 0.0358, 0.0395, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:54:53,439 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.8013, 3.2990, 3.3394, 3.3604, 1.4223, 3.2214, 2.7456, 3.1196], + device='cuda:0'), covar=tensor([0.1877, 0.1122, 0.0838, 0.0987, 0.5651, 0.1176, 0.0961, 0.1301], + device='cuda:0'), in_proj_covar=tensor([0.0825, 0.0792, 0.1002, 0.0883, 0.0872, 0.0766, 0.0592, 0.0929], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 13:54:59,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 13:55:18,879 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-03 13:55:43,540 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 4.600e+02 5.538e+02 7.187e+02 1.315e+03, threshold=1.108e+03, percent-clipped=1.0 +2023-04-03 13:55:44,728 INFO [train.py:903] (0/4) Epoch 29, batch 2700, loss[loss=0.2314, simple_loss=0.3116, pruned_loss=0.07561, over 19746.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.05962, over 3845450.50 frames. ], batch size: 63, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:55:48,445 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6796, 1.3011, 1.3101, 1.5578, 1.1309, 1.4107, 1.2980, 1.5040], + device='cuda:0'), covar=tensor([0.1220, 0.1177, 0.1798, 0.1087, 0.1395, 0.0715, 0.1813, 0.0930], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0361, 0.0321, 0.0258, 0.0308, 0.0258, 0.0323, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 13:56:00,565 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:05,901 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193902.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:42,182 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193931.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:56:45,202 INFO [train.py:903] (0/4) Epoch 29, batch 2750, loss[loss=0.2419, simple_loss=0.3132, pruned_loss=0.08525, over 13362.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2824, pruned_loss=0.05937, over 3840387.45 frames. ], batch size: 135, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:56:58,030 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:27,657 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.3104, 5.8276, 3.2607, 5.0400, 1.4031, 5.9220, 5.7696, 5.9120], + device='cuda:0'), covar=tensor([0.0338, 0.0725, 0.1716, 0.0690, 0.3689, 0.0465, 0.0757, 0.0754], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0432, 0.0523, 0.0359, 0.0414, 0.0460, 0.0455, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:57:27,853 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193969.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:36,663 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:44,120 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.680e+02 5.858e+02 7.532e+02 1.982e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-03 13:57:45,310 INFO [train.py:903] (0/4) Epoch 29, batch 2800, loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.0577, over 19534.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06015, over 3820832.44 frames. ], batch size: 64, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:06,194 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-194000.pt +2023-04-03 13:58:26,734 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:58:48,029 INFO [train.py:903] (0/4) Epoch 29, batch 2850, loss[loss=0.1905, simple_loss=0.2606, pruned_loss=0.06018, over 18638.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05983, over 3818186.27 frames. ], batch size: 41, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:58,385 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:01,780 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1347, 1.7399, 1.4367, 1.1315, 1.6854, 1.1356, 1.1157, 1.7268], + device='cuda:0'), covar=tensor([0.0889, 0.0829, 0.1069, 0.1002, 0.0553, 0.1353, 0.0742, 0.0425], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0319, 0.0342, 0.0273, 0.0253, 0.0345, 0.0292, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 13:59:45,132 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 13:59:47,408 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.652e+02 5.661e+02 7.187e+02 2.087e+03, threshold=1.132e+03, percent-clipped=4.0 +2023-04-03 13:59:48,552 INFO [train.py:903] (0/4) Epoch 29, batch 2900, loss[loss=0.1942, simple_loss=0.2656, pruned_loss=0.06134, over 19794.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06006, over 3822154.98 frames. ], batch size: 48, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:59:55,297 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:57,600 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:00:47,116 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1976, 1.8721, 1.8530, 2.0284, 1.7290, 1.8461, 1.7231, 2.0748], + device='cuda:0'), covar=tensor([0.1019, 0.1483, 0.1479, 0.1140, 0.1535, 0.0596, 0.1506, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0362, 0.0322, 0.0260, 0.0310, 0.0260, 0.0325, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 14:00:47,882 INFO [train.py:903] (0/4) Epoch 29, batch 2950, loss[loss=0.1656, simple_loss=0.2439, pruned_loss=0.04367, over 19736.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.06043, over 3826204.39 frames. ], batch size: 45, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:13,341 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:31,697 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-03 14:01:42,908 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:46,909 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 4.745e+02 5.839e+02 7.587e+02 1.918e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-04-03 14:01:48,092 INFO [train.py:903] (0/4) Epoch 29, batch 3000, loss[loss=0.2074, simple_loss=0.2898, pruned_loss=0.06251, over 19580.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2832, pruned_loss=0.06061, over 3813477.33 frames. ], batch size: 61, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:48,093 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 14:02:01,080 INFO [train.py:937] (0/4) Epoch 29, validation: loss=0.1668, simple_loss=0.2661, pruned_loss=0.03375, over 944034.00 frames. +2023-04-03 14:02:01,081 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 14:02:02,331 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 14:02:05,028 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194187.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:02:36,837 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194212.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:03:01,745 INFO [train.py:903] (0/4) Epoch 29, batch 3050, loss[loss=0.1993, simple_loss=0.2868, pruned_loss=0.05587, over 19700.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2827, pruned_loss=0.06009, over 3827865.89 frames. ], batch size: 60, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:03:17,754 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194246.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:04:02,884 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 5.546e+02 6.449e+02 7.988e+02 2.570e+03, threshold=1.290e+03, percent-clipped=8.0 +2023-04-03 14:04:03,995 INFO [train.py:903] (0/4) Epoch 29, batch 3100, loss[loss=0.1915, simple_loss=0.278, pruned_loss=0.05249, over 19675.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2816, pruned_loss=0.05956, over 3825786.52 frames. ], batch size: 53, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:04,139 INFO [train.py:903] (0/4) Epoch 29, batch 3150, loss[loss=0.1705, simple_loss=0.251, pruned_loss=0.04497, over 19770.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2805, pruned_loss=0.05868, over 3833099.29 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:06,647 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:19,913 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:27,475 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 14:05:36,256 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:49,252 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194371.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:51,710 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:59,641 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194380.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:06:02,721 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.902e+02 5.790e+02 7.151e+02 3.080e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-03 14:06:03,883 INFO [train.py:903] (0/4) Epoch 29, batch 3200, loss[loss=0.2127, simple_loss=0.2943, pruned_loss=0.06555, over 19641.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2814, pruned_loss=0.05915, over 3825364.18 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:06:28,873 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.23 vs. limit=5.0 +2023-04-03 14:07:04,909 INFO [train.py:903] (0/4) Epoch 29, batch 3250, loss[loss=0.2059, simple_loss=0.2827, pruned_loss=0.06455, over 19695.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05923, over 3823795.68 frames. ], batch size: 53, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:07:05,051 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194434.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:08:04,291 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.916e+02 6.594e+02 9.076e+02 2.390e+03, threshold=1.319e+03, percent-clipped=9.0 +2023-04-03 14:08:05,470 INFO [train.py:903] (0/4) Epoch 29, batch 3300, loss[loss=0.2005, simple_loss=0.287, pruned_loss=0.05703, over 19651.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.282, pruned_loss=0.05949, over 3837752.29 frames. ], batch size: 53, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:08:09,432 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 14:09:07,375 INFO [train.py:903] (0/4) Epoch 29, batch 3350, loss[loss=0.2537, simple_loss=0.3233, pruned_loss=0.09209, over 19772.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2829, pruned_loss=0.05977, over 3844697.02 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:09:24,567 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:10:06,126 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.166e+02 6.518e+02 8.363e+02 1.379e+03, threshold=1.304e+03, percent-clipped=1.0 +2023-04-03 14:10:07,290 INFO [train.py:903] (0/4) Epoch 29, batch 3400, loss[loss=0.2062, simple_loss=0.2861, pruned_loss=0.06309, over 18302.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2823, pruned_loss=0.05934, over 3846091.64 frames. ], batch size: 83, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:10:48,227 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194617.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:03,208 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6150, 1.7544, 2.1622, 1.9446, 3.3400, 2.6388, 3.5547, 1.9205], + device='cuda:0'), covar=tensor([0.2769, 0.4624, 0.3009, 0.2053, 0.1458, 0.2388, 0.1624, 0.4387], + device='cuda:0'), in_proj_covar=tensor([0.0559, 0.0676, 0.0765, 0.0513, 0.0638, 0.0550, 0.0674, 0.0578], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:11:07,042 INFO [train.py:903] (0/4) Epoch 29, batch 3450, loss[loss=0.2121, simple_loss=0.2961, pruned_loss=0.06404, over 18776.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.05915, over 3841732.51 frames. ], batch size: 74, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:11:09,266 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 14:11:17,038 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:29,980 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:01,640 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194680.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:05,585 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.627e+02 5.712e+02 7.363e+02 1.612e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 14:12:06,660 INFO [train.py:903] (0/4) Epoch 29, batch 3500, loss[loss=0.2897, simple_loss=0.3446, pruned_loss=0.1174, over 13541.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05943, over 3821317.60 frames. ], batch size: 135, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:12:43,629 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194715.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:54,391 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194724.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:13:06,876 INFO [train.py:903] (0/4) Epoch 29, batch 3550, loss[loss=0.1977, simple_loss=0.2788, pruned_loss=0.05832, over 19530.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.282, pruned_loss=0.05983, over 3812947.40 frames. ], batch size: 54, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:13:49,785 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.48 vs. limit=5.0 +2023-04-03 14:14:05,432 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1600, 5.5754, 3.2094, 4.8577, 1.2235, 5.8074, 5.5185, 5.7743], + device='cuda:0'), covar=tensor([0.0407, 0.0811, 0.1779, 0.0738, 0.4054, 0.0489, 0.0782, 0.1050], + device='cuda:0'), in_proj_covar=tensor([0.0529, 0.0426, 0.0516, 0.0355, 0.0409, 0.0456, 0.0451, 0.0482], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:14:06,360 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.806e+02 5.826e+02 6.989e+02 1.690e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 14:14:07,578 INFO [train.py:903] (0/4) Epoch 29, batch 3600, loss[loss=0.2066, simple_loss=0.2897, pruned_loss=0.06178, over 19509.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05952, over 3807217.81 frames. ], batch size: 64, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:14:20,139 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:14:33,736 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194805.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,770 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,812 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:06,261 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 14:15:06,688 INFO [train.py:903] (0/4) Epoch 29, batch 3650, loss[loss=0.2002, simple_loss=0.2812, pruned_loss=0.05959, over 19839.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.0595, over 3805302.24 frames. ], batch size: 52, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:15:12,772 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194839.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:15:45,388 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:16:05,568 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.663e+02 5.135e+02 6.261e+02 7.520e+02 2.080e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 14:16:06,488 INFO [train.py:903] (0/4) Epoch 29, batch 3700, loss[loss=0.2762, simple_loss=0.3458, pruned_loss=0.1033, over 19613.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05992, over 3814202.66 frames. ], batch size: 61, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:17:08,166 INFO [train.py:903] (0/4) Epoch 29, batch 3750, loss[loss=0.1864, simple_loss=0.2748, pruned_loss=0.04899, over 19791.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06033, over 3807146.63 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:06,323 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 4.741e+02 6.225e+02 7.602e+02 2.236e+03, threshold=1.245e+03, percent-clipped=10.0 +2023-04-03 14:18:07,443 INFO [train.py:903] (0/4) Epoch 29, batch 3800, loss[loss=0.1899, simple_loss=0.2854, pruned_loss=0.04726, over 19533.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05978, over 3817436.07 frames. ], batch size: 54, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:21,201 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:18:34,661 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 14:19:01,235 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195029.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:06,531 INFO [train.py:903] (0/4) Epoch 29, batch 3850, loss[loss=0.207, simple_loss=0.2882, pruned_loss=0.0629, over 19623.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2819, pruned_loss=0.0597, over 3814435.60 frames. ], batch size: 57, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:19:27,234 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195051.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:41,487 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195062.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:19:44,000 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2492, 2.2646, 2.5398, 2.9599, 2.3480, 2.8359, 2.4691, 2.2621], + device='cuda:0'), covar=tensor([0.4470, 0.4409, 0.2036, 0.2704, 0.4655, 0.2365, 0.5119, 0.3660], + device='cuda:0'), in_proj_covar=tensor([0.0951, 0.1030, 0.0754, 0.0964, 0.0928, 0.0866, 0.0869, 0.0818], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:19:56,985 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195076.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:06,403 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.743e+02 5.822e+02 7.676e+02 1.767e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-04-03 14:20:06,426 INFO [train.py:903] (0/4) Epoch 29, batch 3900, loss[loss=0.1754, simple_loss=0.2461, pruned_loss=0.05235, over 19303.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.281, pruned_loss=0.05922, over 3815356.62 frames. ], batch size: 44, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:20:08,963 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:13,187 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:20,910 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195095.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:20:23,858 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5753, 4.1153, 4.2805, 4.2703, 1.6926, 4.0453, 3.4744, 4.0515], + device='cuda:0'), covar=tensor([0.1729, 0.0840, 0.0643, 0.0751, 0.6075, 0.0926, 0.0811, 0.1141], + device='cuda:0'), in_proj_covar=tensor([0.0826, 0.0794, 0.1004, 0.0880, 0.0871, 0.0769, 0.0594, 0.0935], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 14:20:39,809 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:39,848 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:49,224 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-03 14:20:50,174 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195120.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:21:07,592 INFO [train.py:903] (0/4) Epoch 29, batch 3950, loss[loss=0.197, simple_loss=0.2841, pruned_loss=0.05497, over 19673.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2814, pruned_loss=0.05915, over 3810095.65 frames. ], batch size: 55, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:21:12,680 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 14:21:43,413 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4184, 1.3043, 1.9435, 1.5719, 2.9936, 4.4943, 4.4345, 4.9330], + device='cuda:0'), covar=tensor([0.1631, 0.4341, 0.3640, 0.2596, 0.0666, 0.0220, 0.0178, 0.0201], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0273, 0.0258, 0.0198, 0.0221, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 14:22:08,284 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.632e+02 5.828e+02 7.591e+02 1.503e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 14:22:08,302 INFO [train.py:903] (0/4) Epoch 29, batch 4000, loss[loss=0.1579, simple_loss=0.2394, pruned_loss=0.0382, over 19306.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2814, pruned_loss=0.05922, over 3815850.50 frames. ], batch size: 44, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:22:38,710 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:22:52,712 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 14:23:07,865 INFO [train.py:903] (0/4) Epoch 29, batch 4050, loss[loss=0.2083, simple_loss=0.2796, pruned_loss=0.06846, over 19381.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06032, over 3810821.54 frames. ], batch size: 48, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:23:19,843 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 14:23:24,432 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 14:24:08,388 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.197e+02 5.118e+02 6.147e+02 8.377e+02 1.783e+03, threshold=1.229e+03, percent-clipped=5.0 +2023-04-03 14:24:08,406 INFO [train.py:903] (0/4) Epoch 29, batch 4100, loss[loss=0.2106, simple_loss=0.2926, pruned_loss=0.06432, over 19656.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2823, pruned_loss=0.06013, over 3823297.25 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:24:41,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 14:24:56,207 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:08,841 INFO [train.py:903] (0/4) Epoch 29, batch 4150, loss[loss=0.2101, simple_loss=0.2893, pruned_loss=0.06545, over 17507.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06029, over 3815516.87 frames. ], batch size: 101, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:25:49,120 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:56,296 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:10,052 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 4.925e+02 6.547e+02 8.865e+02 3.200e+03, threshold=1.309e+03, percent-clipped=9.0 +2023-04-03 14:26:10,071 INFO [train.py:903] (0/4) Epoch 29, batch 4200, loss[loss=0.1975, simple_loss=0.285, pruned_loss=0.05499, over 19305.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2812, pruned_loss=0.05969, over 3818557.27 frames. ], batch size: 70, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:26:13,438 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 14:26:19,408 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:35,643 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195406.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:26:40,089 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:07,654 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:08,632 INFO [train.py:903] (0/4) Epoch 29, batch 4250, loss[loss=0.2146, simple_loss=0.297, pruned_loss=0.06613, over 19653.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2819, pruned_loss=0.06043, over 3818709.23 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:27:22,567 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 14:27:32,635 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 14:28:08,329 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 4.660e+02 5.852e+02 7.750e+02 1.648e+03, threshold=1.170e+03, percent-clipped=3.0 +2023-04-03 14:28:08,348 INFO [train.py:903] (0/4) Epoch 29, batch 4300, loss[loss=0.178, simple_loss=0.2527, pruned_loss=0.05165, over 19735.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2824, pruned_loss=0.06071, over 3815522.36 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:28:14,094 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195488.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:28:54,525 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195521.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:29:01,487 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 14:29:09,903 INFO [train.py:903] (0/4) Epoch 29, batch 4350, loss[loss=0.194, simple_loss=0.265, pruned_loss=0.0615, over 19776.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2822, pruned_loss=0.06003, over 3820274.82 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:29:28,070 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:29:59,932 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.5825, 1.1528, 1.3797, 1.3695, 2.2578, 1.1626, 2.1216, 2.5363], + device='cuda:0'), covar=tensor([0.0686, 0.2967, 0.3041, 0.1591, 0.0839, 0.2006, 0.1122, 0.0451], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0386, 0.0361, 0.0400, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:30:05,591 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:10,386 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 4.994e+02 6.103e+02 7.630e+02 1.747e+03, threshold=1.221e+03, percent-clipped=1.0 +2023-04-03 14:30:10,405 INFO [train.py:903] (0/4) Epoch 29, batch 4400, loss[loss=0.2071, simple_loss=0.3079, pruned_loss=0.05311, over 19671.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2836, pruned_loss=0.06063, over 3806977.33 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:30:29,902 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 14:30:35,612 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:38,554 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 14:31:09,647 INFO [train.py:903] (0/4) Epoch 29, batch 4450, loss[loss=0.1575, simple_loss=0.2397, pruned_loss=0.03764, over 16094.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.06045, over 3808408.81 frames. ], batch size: 35, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:32:07,397 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 14:32:08,763 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.083e+02 5.988e+02 7.824e+02 1.664e+03, threshold=1.198e+03, percent-clipped=3.0 +2023-04-03 14:32:08,782 INFO [train.py:903] (0/4) Epoch 29, batch 4500, loss[loss=0.2248, simple_loss=0.3013, pruned_loss=0.07413, over 19468.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05939, over 3813633.53 frames. ], batch size: 64, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:10,065 INFO [train.py:903] (0/4) Epoch 29, batch 4550, loss[loss=0.2306, simple_loss=0.3191, pruned_loss=0.07107, over 19776.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2819, pruned_loss=0.05971, over 3813140.84 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:15,610 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 14:33:21,552 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:33,980 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:39,345 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 14:33:51,816 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195769.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:59,658 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 14:34:01,508 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195777.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:34:08,708 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.047e+02 5.905e+02 8.200e+02 1.793e+03, threshold=1.181e+03, percent-clipped=7.0 +2023-04-03 14:34:08,726 INFO [train.py:903] (0/4) Epoch 29, batch 4600, loss[loss=0.1998, simple_loss=0.2898, pruned_loss=0.05487, over 19603.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06031, over 3797355.97 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:34:31,284 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195802.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:34:33,593 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:05,153 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:10,614 INFO [train.py:903] (0/4) Epoch 29, batch 4650, loss[loss=0.1608, simple_loss=0.247, pruned_loss=0.03728, over 19413.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2821, pruned_loss=0.05976, over 3803617.92 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:35:22,172 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 14:35:33,354 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 14:35:54,034 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:36:00,411 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7616, 4.2452, 4.4719, 4.4489, 1.7801, 4.2190, 3.6812, 4.2306], + device='cuda:0'), covar=tensor([0.1708, 0.1017, 0.0617, 0.0752, 0.6092, 0.1030, 0.0711, 0.1098], + device='cuda:0'), in_proj_covar=tensor([0.0829, 0.0799, 0.1009, 0.0884, 0.0877, 0.0775, 0.0595, 0.0941], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 14:36:10,280 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.073e+02 5.115e+02 6.253e+02 8.443e+02 2.371e+03, threshold=1.251e+03, percent-clipped=7.0 +2023-04-03 14:36:10,298 INFO [train.py:903] (0/4) Epoch 29, batch 4700, loss[loss=0.2031, simple_loss=0.2938, pruned_loss=0.0562, over 19534.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2825, pruned_loss=0.05979, over 3810385.20 frames. ], batch size: 61, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:36:29,172 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 14:37:02,884 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4502, 1.4969, 1.8312, 1.7312, 2.7404, 2.2784, 2.9509, 1.2962], + device='cuda:0'), covar=tensor([0.2653, 0.4566, 0.2921, 0.2031, 0.1566, 0.2357, 0.1483, 0.4819], + device='cuda:0'), in_proj_covar=tensor([0.0560, 0.0679, 0.0768, 0.0515, 0.0641, 0.0553, 0.0676, 0.0580], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:37:11,420 INFO [train.py:903] (0/4) Epoch 29, batch 4750, loss[loss=0.2024, simple_loss=0.2929, pruned_loss=0.05596, over 18895.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2822, pruned_loss=0.05952, over 3804441.87 frames. ], batch size: 74, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:37:25,294 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 14:38:03,277 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 14:38:08,658 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4840, 2.2168, 1.7107, 1.6131, 2.0420, 1.4053, 1.3868, 1.9315], + device='cuda:0'), covar=tensor([0.1138, 0.0841, 0.1114, 0.0852, 0.0682, 0.1340, 0.0831, 0.0518], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0322, 0.0344, 0.0274, 0.0255, 0.0348, 0.0292, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:38:11,673 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.980e+02 6.350e+02 8.555e+02 2.103e+03, threshold=1.270e+03, percent-clipped=3.0 +2023-04-03 14:38:11,691 INFO [train.py:903] (0/4) Epoch 29, batch 4800, loss[loss=0.1729, simple_loss=0.265, pruned_loss=0.04046, over 19679.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2816, pruned_loss=0.0596, over 3808616.21 frames. ], batch size: 53, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:38:20,092 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.75 vs. limit=5.0 +2023-04-03 14:38:31,634 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-196000.pt +2023-04-03 14:38:56,380 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4806, 1.5278, 1.8184, 1.7318, 2.5341, 2.2429, 2.6812, 1.2103], + device='cuda:0'), covar=tensor([0.2620, 0.4601, 0.2914, 0.2033, 0.1699, 0.2406, 0.1602, 0.4913], + device='cuda:0'), in_proj_covar=tensor([0.0562, 0.0681, 0.0770, 0.0517, 0.0643, 0.0555, 0.0678, 0.0583], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:39:14,504 INFO [train.py:903] (0/4) Epoch 29, batch 4850, loss[loss=0.1755, simple_loss=0.2551, pruned_loss=0.04793, over 19585.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2817, pruned_loss=0.05959, over 3822278.33 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:39:37,046 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 14:39:55,383 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 14:40:01,746 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 14:40:02,677 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 14:40:11,420 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 14:40:13,822 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 4.641e+02 5.767e+02 7.663e+02 1.512e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-03 14:40:13,840 INFO [train.py:903] (0/4) Epoch 29, batch 4900, loss[loss=0.193, simple_loss=0.2727, pruned_loss=0.05665, over 19773.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.05996, over 3812266.85 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:40:22,183 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2051, 1.2665, 1.2901, 1.1058, 1.0689, 1.1245, 0.1529, 0.4056], + device='cuda:0'), covar=tensor([0.0859, 0.0764, 0.0515, 0.0702, 0.1580, 0.0770, 0.1599, 0.1358], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0364, 0.0370, 0.0394, 0.0475, 0.0400, 0.0347, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 14:40:32,814 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 14:41:03,531 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=196125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:14,068 INFO [train.py:903] (0/4) Epoch 29, batch 4950, loss[loss=0.156, simple_loss=0.2427, pruned_loss=0.03459, over 19384.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2826, pruned_loss=0.05982, over 3821310.92 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:41:14,818 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 14:41:16,764 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9861, 1.4809, 1.6493, 1.5936, 3.6067, 1.0955, 2.6324, 4.0688], + device='cuda:0'), covar=tensor([0.0459, 0.2784, 0.2818, 0.1968, 0.0626, 0.2661, 0.1197, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0384, 0.0404, 0.0358, 0.0387, 0.0361, 0.0402, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:41:31,282 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 14:41:33,812 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=196150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:54,207 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 14:41:58,580 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8071, 1.5105, 1.8983, 1.8042, 4.3552, 1.3875, 2.6222, 4.6991], + device='cuda:0'), covar=tensor([0.0498, 0.2961, 0.2768, 0.1962, 0.0732, 0.2650, 0.1604, 0.0173], + device='cuda:0'), in_proj_covar=tensor([0.0428, 0.0385, 0.0404, 0.0358, 0.0387, 0.0362, 0.0403, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:42:14,324 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.427e+02 4.864e+02 6.197e+02 7.725e+02 1.739e+03, threshold=1.239e+03, percent-clipped=10.0 +2023-04-03 14:42:14,342 INFO [train.py:903] (0/4) Epoch 29, batch 5000, loss[loss=0.2536, simple_loss=0.3233, pruned_loss=0.09193, over 19524.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2819, pruned_loss=0.05931, over 3822832.98 frames. ], batch size: 64, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:42:14,684 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0929, 1.3167, 1.7589, 1.2660, 2.6472, 3.4814, 3.1800, 3.6700], + device='cuda:0'), covar=tensor([0.1799, 0.4048, 0.3599, 0.2856, 0.0686, 0.0241, 0.0242, 0.0306], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0335, 0.0367, 0.0274, 0.0258, 0.0199, 0.0221, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 14:42:16,062 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.60 vs. limit=5.0 +2023-04-03 14:42:23,520 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 14:42:35,057 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 14:42:54,571 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-03 14:43:14,846 INFO [train.py:903] (0/4) Epoch 29, batch 5050, loss[loss=0.1857, simple_loss=0.2795, pruned_loss=0.04597, over 19368.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2818, pruned_loss=0.05882, over 3828858.22 frames. ], batch size: 70, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:43:49,704 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 14:44:15,609 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 4.774e+02 5.782e+02 7.176e+02 1.455e+03, threshold=1.156e+03, percent-clipped=5.0 +2023-04-03 14:44:15,627 INFO [train.py:903] (0/4) Epoch 29, batch 5100, loss[loss=0.1699, simple_loss=0.2556, pruned_loss=0.04215, over 19834.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2806, pruned_loss=0.05829, over 3823761.66 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:44:24,665 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 14:44:27,971 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 14:44:32,504 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 14:45:16,433 INFO [train.py:903] (0/4) Epoch 29, batch 5150, loss[loss=0.1765, simple_loss=0.2542, pruned_loss=0.04939, over 19313.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2814, pruned_loss=0.0589, over 3814804.99 frames. ], batch size: 44, lr: 2.81e-03, grad_scale: 4.0 +2023-04-03 14:45:17,917 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.2447, 2.8851, 2.3323, 2.4071, 2.1851, 2.6590, 1.0168, 2.0709], + device='cuda:0'), covar=tensor([0.0698, 0.0686, 0.0723, 0.1154, 0.1149, 0.1091, 0.1560, 0.1247], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0473, 0.0398, 0.0347, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 14:45:23,039 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:45:26,214 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 14:45:59,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 14:46:10,236 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:46:15,837 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8047, 1.9138, 2.1473, 2.2586, 1.7386, 2.2048, 2.0983, 1.9364], + device='cuda:0'), covar=tensor([0.4264, 0.3715, 0.2022, 0.2421, 0.3998, 0.2263, 0.5291, 0.3605], + device='cuda:0'), in_proj_covar=tensor([0.0945, 0.1023, 0.0748, 0.0958, 0.0923, 0.0862, 0.0863, 0.0812], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:46:17,533 INFO [train.py:903] (0/4) Epoch 29, batch 5200, loss[loss=0.2233, simple_loss=0.3021, pruned_loss=0.07225, over 18213.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2808, pruned_loss=0.05877, over 3808809.50 frames. ], batch size: 83, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:46:18,467 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 5.187e+02 6.337e+02 8.295e+02 1.863e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 14:46:28,779 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 14:47:12,668 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 14:47:19,214 INFO [train.py:903] (0/4) Epoch 29, batch 5250, loss[loss=0.2114, simple_loss=0.2905, pruned_loss=0.06614, over 19724.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2818, pruned_loss=0.05922, over 3805682.67 frames. ], batch size: 51, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:47:43,296 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8484, 1.9274, 2.2598, 2.3135, 1.8260, 2.3184, 2.2275, 2.0213], + device='cuda:0'), covar=tensor([0.4234, 0.4055, 0.1999, 0.2532, 0.4027, 0.2263, 0.5182, 0.3715], + device='cuda:0'), in_proj_covar=tensor([0.0949, 0.1026, 0.0751, 0.0961, 0.0926, 0.0864, 0.0866, 0.0815], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:48:07,776 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2498, 2.2790, 2.5993, 2.8973, 2.2387, 2.7876, 2.5915, 2.3008], + device='cuda:0'), covar=tensor([0.4389, 0.4249, 0.1979, 0.2794, 0.4496, 0.2386, 0.4961, 0.3543], + device='cuda:0'), in_proj_covar=tensor([0.0948, 0.1025, 0.0750, 0.0960, 0.0925, 0.0864, 0.0865, 0.0814], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 14:48:20,445 INFO [train.py:903] (0/4) Epoch 29, batch 5300, loss[loss=0.2518, simple_loss=0.3274, pruned_loss=0.08811, over 19685.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2823, pruned_loss=0.0592, over 3813510.25 frames. ], batch size: 58, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:48:21,521 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.792e+02 5.709e+02 7.130e+02 1.478e+03, threshold=1.142e+03, percent-clipped=2.0 +2023-04-03 14:48:35,852 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 14:49:20,266 INFO [train.py:903] (0/4) Epoch 29, batch 5350, loss[loss=0.1933, simple_loss=0.2719, pruned_loss=0.05732, over 19858.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2819, pruned_loss=0.05903, over 3826433.68 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:49:51,997 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 14:50:09,929 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.8398, 4.3726, 2.8680, 3.8571, 1.0258, 4.3902, 4.2554, 4.3800], + device='cuda:0'), covar=tensor([0.0624, 0.1101, 0.1856, 0.0891, 0.4131, 0.0630, 0.0906, 0.1089], + device='cuda:0'), in_proj_covar=tensor([0.0534, 0.0432, 0.0518, 0.0357, 0.0409, 0.0457, 0.0452, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:50:21,626 INFO [train.py:903] (0/4) Epoch 29, batch 5400, loss[loss=0.1926, simple_loss=0.2885, pruned_loss=0.0484, over 19680.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.281, pruned_loss=0.05824, over 3828071.23 frames. ], batch size: 59, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:50:22,760 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.834e+02 5.946e+02 7.840e+02 1.782e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-03 14:50:36,245 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:50:59,765 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3342, 2.0779, 1.6614, 1.4354, 1.8380, 1.3725, 1.3350, 1.8182], + device='cuda:0'), covar=tensor([0.0946, 0.0829, 0.1149, 0.0870, 0.0602, 0.1358, 0.0751, 0.0506], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0324, 0.0346, 0.0277, 0.0255, 0.0350, 0.0294, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:51:22,494 INFO [train.py:903] (0/4) Epoch 29, batch 5450, loss[loss=0.2325, simple_loss=0.3127, pruned_loss=0.07617, over 19736.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2799, pruned_loss=0.05787, over 3839517.80 frames. ], batch size: 63, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:51:34,881 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0575, 4.4838, 4.8386, 4.8392, 1.8174, 4.5264, 3.9014, 4.5704], + device='cuda:0'), covar=tensor([0.1825, 0.0841, 0.0658, 0.0667, 0.6435, 0.0971, 0.0709, 0.1134], + device='cuda:0'), in_proj_covar=tensor([0.0829, 0.0802, 0.1013, 0.0886, 0.0879, 0.0774, 0.0596, 0.0938], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 14:51:37,092 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7501, 1.7599, 1.7241, 1.5012, 1.5421, 1.4902, 0.3269, 0.7205], + device='cuda:0'), covar=tensor([0.0737, 0.0667, 0.0442, 0.0704, 0.1211, 0.0820, 0.1353, 0.1219], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0365, 0.0370, 0.0394, 0.0474, 0.0400, 0.0348, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 14:51:41,424 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.0370, 5.5254, 2.9177, 4.7708, 1.0405, 5.6733, 5.4532, 5.5610], + device='cuda:0'), covar=tensor([0.0408, 0.0783, 0.1954, 0.0725, 0.3985, 0.0472, 0.0813, 0.1080], + device='cuda:0'), in_proj_covar=tensor([0.0537, 0.0435, 0.0522, 0.0360, 0.0413, 0.0460, 0.0456, 0.0491], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 14:52:21,536 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:52:22,558 INFO [train.py:903] (0/4) Epoch 29, batch 5500, loss[loss=0.1871, simple_loss=0.275, pruned_loss=0.04962, over 17437.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05954, over 3825867.01 frames. ], batch size: 101, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:52:23,692 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 4.767e+02 5.823e+02 7.066e+02 1.237e+03, threshold=1.165e+03, percent-clipped=1.0 +2023-04-03 14:52:46,212 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 14:53:08,296 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:53:22,918 INFO [train.py:903] (0/4) Epoch 29, batch 5550, loss[loss=0.216, simple_loss=0.3043, pruned_loss=0.06384, over 18160.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.282, pruned_loss=0.0592, over 3834587.06 frames. ], batch size: 83, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:53:30,049 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 14:53:40,091 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.30 vs. limit=5.0 +2023-04-03 14:54:18,749 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 14:54:22,913 INFO [train.py:903] (0/4) Epoch 29, batch 5600, loss[loss=0.1978, simple_loss=0.2854, pruned_loss=0.0551, over 19533.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.0593, over 3821802.94 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:54:24,076 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.937e+02 5.977e+02 7.468e+02 1.263e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 14:54:39,832 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:54:40,983 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:55:24,428 INFO [train.py:903] (0/4) Epoch 29, batch 5650, loss[loss=0.1699, simple_loss=0.2449, pruned_loss=0.04741, over 19753.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2809, pruned_loss=0.05866, over 3819993.35 frames. ], batch size: 47, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:55:28,124 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:10,093 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:10,963 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 14:56:24,244 INFO [train.py:903] (0/4) Epoch 29, batch 5700, loss[loss=0.1914, simple_loss=0.2871, pruned_loss=0.04779, over 19666.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05848, over 3814961.56 frames. ], batch size: 55, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:56:25,387 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.159e+02 6.169e+02 7.777e+02 1.148e+03, threshold=1.234e+03, percent-clipped=0.0 +2023-04-03 14:56:41,796 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:46,521 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 14:57:24,049 INFO [train.py:903] (0/4) Epoch 29, batch 5750, loss[loss=0.2232, simple_loss=0.2987, pruned_loss=0.07383, over 18077.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2828, pruned_loss=0.05943, over 3819547.49 frames. ], batch size: 83, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:57:25,246 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 14:57:30,929 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:57:31,936 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 14:57:38,028 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 14:58:24,228 INFO [train.py:903] (0/4) Epoch 29, batch 5800, loss[loss=0.2297, simple_loss=0.3083, pruned_loss=0.07561, over 19300.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2829, pruned_loss=0.05958, over 3817733.32 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:58:25,407 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.987e+02 6.141e+02 8.368e+02 1.662e+03, threshold=1.228e+03, percent-clipped=5.0 +2023-04-03 14:59:24,885 INFO [train.py:903] (0/4) Epoch 29, batch 5850, loss[loss=0.1639, simple_loss=0.2429, pruned_loss=0.04245, over 19370.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.05937, over 3825724.68 frames. ], batch size: 48, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 14:59:27,415 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 14:59:49,575 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197054.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:59:50,544 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:19,099 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197079.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:24,408 INFO [train.py:903] (0/4) Epoch 29, batch 5900, loss[loss=0.2275, simple_loss=0.2993, pruned_loss=0.0778, over 13613.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05953, over 3812462.56 frames. ], batch size: 136, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:00:25,177 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.76 vs. limit=5.0 +2023-04-03 15:00:25,528 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.889e+02 5.905e+02 7.501e+02 2.168e+03, threshold=1.181e+03, percent-clipped=6.0 +2023-04-03 15:00:26,682 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 15:00:35,838 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:46,566 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 15:00:47,958 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.1718, 5.5770, 3.0366, 4.8579, 1.1135, 5.7917, 5.6600, 5.7834], + device='cuda:0'), covar=tensor([0.0390, 0.0865, 0.1917, 0.0694, 0.4071, 0.0521, 0.0755, 0.1162], + device='cuda:0'), in_proj_covar=tensor([0.0534, 0.0433, 0.0520, 0.0358, 0.0409, 0.0459, 0.0454, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:01:06,458 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:24,666 INFO [train.py:903] (0/4) Epoch 29, batch 5950, loss[loss=0.2083, simple_loss=0.2975, pruned_loss=0.05954, over 18064.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2832, pruned_loss=0.06031, over 3808040.70 frames. ], batch size: 83, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:01:32,755 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:59,523 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4726, 1.2968, 1.6168, 1.6771, 3.0687, 1.4149, 2.4102, 3.4557], + device='cuda:0'), covar=tensor([0.0541, 0.3087, 0.2905, 0.1830, 0.0738, 0.2239, 0.1201, 0.0267], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0383, 0.0402, 0.0357, 0.0385, 0.0360, 0.0400, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:02:18,285 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2127, 1.2655, 1.2409, 1.0559, 1.0692, 1.0815, 0.0969, 0.3420], + device='cuda:0'), covar=tensor([0.0802, 0.0703, 0.0507, 0.0650, 0.1303, 0.0735, 0.1536, 0.1237], + device='cuda:0'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0471, 0.0400, 0.0347, 0.0348], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 15:02:24,688 INFO [train.py:903] (0/4) Epoch 29, batch 6000, loss[loss=0.1926, simple_loss=0.2813, pruned_loss=0.052, over 19664.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2822, pruned_loss=0.05986, over 3822999.83 frames. ], batch size: 58, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:02:24,689 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 15:02:43,151 INFO [train.py:937] (0/4) Epoch 29, validation: loss=0.167, simple_loss=0.2662, pruned_loss=0.03392, over 944034.00 frames. +2023-04-03 15:02:43,152 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 15:02:44,361 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 4.704e+02 5.833e+02 7.372e+02 1.660e+03, threshold=1.167e+03, percent-clipped=5.0 +2023-04-03 15:02:48,714 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.06 vs. limit=5.0 +2023-04-03 15:03:23,050 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:03:44,117 INFO [train.py:903] (0/4) Epoch 29, batch 6050, loss[loss=0.1953, simple_loss=0.2831, pruned_loss=0.05375, over 18144.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2816, pruned_loss=0.05943, over 3823760.82 frames. ], batch size: 83, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:03:55,194 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:11,936 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:45,073 INFO [train.py:903] (0/4) Epoch 29, batch 6100, loss[loss=0.1936, simple_loss=0.278, pruned_loss=0.05462, over 18066.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05958, over 3813204.82 frames. ], batch size: 83, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:04:46,195 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 5.045e+02 6.568e+02 8.070e+02 1.422e+03, threshold=1.314e+03, percent-clipped=5.0 +2023-04-03 15:05:17,438 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:42,450 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:45,474 INFO [train.py:903] (0/4) Epoch 29, batch 6150, loss[loss=0.1924, simple_loss=0.2761, pruned_loss=0.05436, over 19572.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.05992, over 3816737.87 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:05:48,290 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:13,308 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 15:06:14,644 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:18,107 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0644, 1.0890, 1.2101, 1.1807, 1.4041, 1.4480, 1.4005, 0.6408], + device='cuda:0'), covar=tensor([0.1881, 0.3416, 0.2026, 0.1592, 0.1312, 0.1866, 0.1175, 0.4344], + device='cuda:0'), in_proj_covar=tensor([0.0561, 0.0679, 0.0770, 0.0517, 0.0644, 0.0550, 0.0677, 0.0581], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 15:06:28,788 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3098, 1.9696, 1.5533, 1.4498, 1.8377, 1.3298, 1.4071, 1.7848], + device='cuda:0'), covar=tensor([0.1008, 0.0887, 0.1106, 0.0855, 0.0584, 0.1267, 0.0661, 0.0473], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0323, 0.0346, 0.0277, 0.0255, 0.0349, 0.0294, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:06:46,118 INFO [train.py:903] (0/4) Epoch 29, batch 6200, loss[loss=0.2248, simple_loss=0.3021, pruned_loss=0.07375, over 19493.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2852, pruned_loss=0.0615, over 3787015.04 frames. ], batch size: 64, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:06:47,111 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.150e+02 5.975e+02 7.505e+02 2.002e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-03 15:06:49,795 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7832, 1.7145, 1.3533, 1.7204, 1.6109, 1.4234, 1.4717, 1.6049], + device='cuda:0'), covar=tensor([0.1322, 0.1522, 0.2011, 0.1378, 0.1524, 0.1033, 0.1962, 0.1091], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0359, 0.0319, 0.0258, 0.0308, 0.0259, 0.0322, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 15:07:45,763 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197433.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:07:46,599 INFO [train.py:903] (0/4) Epoch 29, batch 6250, loss[loss=0.2233, simple_loss=0.3109, pruned_loss=0.06783, over 19544.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.06119, over 3797314.27 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:08:18,022 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 15:08:44,657 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.38 vs. limit=5.0 +2023-04-03 15:08:48,229 INFO [train.py:903] (0/4) Epoch 29, batch 6300, loss[loss=0.1777, simple_loss=0.2524, pruned_loss=0.05151, over 19755.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06117, over 3793077.12 frames. ], batch size: 47, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:08:50,598 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.620e+02 5.575e+02 6.491e+02 1.508e+03, threshold=1.115e+03, percent-clipped=2.0 +2023-04-03 15:09:21,986 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:49,201 INFO [train.py:903] (0/4) Epoch 29, batch 6350, loss[loss=0.2079, simple_loss=0.2944, pruned_loss=0.06074, over 19308.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2848, pruned_loss=0.06129, over 3800495.23 frames. ], batch size: 66, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:09:49,506 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197534.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:52,872 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:23,553 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197563.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:49,097 INFO [train.py:903] (0/4) Epoch 29, batch 6400, loss[loss=0.2095, simple_loss=0.295, pruned_loss=0.06198, over 19386.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2842, pruned_loss=0.06069, over 3809740.91 frames. ], batch size: 70, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:10:52,221 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.287e+02 5.708e+02 7.339e+02 1.464e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-04-03 15:10:53,753 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:59,369 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2262, 1.2866, 1.2702, 1.0575, 1.0889, 1.1275, 0.1092, 0.4252], + device='cuda:0'), covar=tensor([0.0774, 0.0729, 0.0488, 0.0663, 0.1415, 0.0726, 0.1504, 0.1259], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0364, 0.0369, 0.0394, 0.0472, 0.0400, 0.0347, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 15:11:23,628 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197612.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:25,916 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:29,838 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4096, 1.3655, 1.3945, 1.8375, 1.4249, 1.5988, 1.5758, 1.4702], + device='cuda:0'), covar=tensor([0.0908, 0.0966, 0.1085, 0.0695, 0.0868, 0.0842, 0.0867, 0.0790], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0225, 0.0230, 0.0243, 0.0228, 0.0215, 0.0189, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 15:11:49,897 INFO [train.py:903] (0/4) Epoch 29, batch 6450, loss[loss=0.2009, simple_loss=0.2893, pruned_loss=0.05625, over 19746.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06081, over 3801571.89 frames. ], batch size: 63, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:11:56,894 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:12:27,630 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.7961, 4.3766, 2.6972, 3.8403, 1.2522, 4.3631, 4.2488, 4.3695], + device='cuda:0'), covar=tensor([0.0582, 0.0987, 0.1914, 0.0839, 0.3697, 0.0597, 0.0918, 0.1013], + device='cuda:0'), in_proj_covar=tensor([0.0532, 0.0431, 0.0519, 0.0356, 0.0409, 0.0458, 0.0451, 0.0485], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:12:37,432 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 15:12:37,699 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.0125, 4.4506, 4.7708, 4.7628, 1.6876, 4.4460, 3.8262, 4.4918], + device='cuda:0'), covar=tensor([0.1831, 0.0932, 0.0594, 0.0690, 0.6852, 0.1071, 0.0744, 0.1183], + device='cuda:0'), in_proj_covar=tensor([0.0824, 0.0793, 0.1001, 0.0876, 0.0868, 0.0766, 0.0590, 0.0932], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 15:12:49,913 INFO [train.py:903] (0/4) Epoch 29, batch 6500, loss[loss=0.2109, simple_loss=0.2943, pruned_loss=0.0637, over 19770.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2839, pruned_loss=0.0603, over 3803799.19 frames. ], batch size: 56, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:12:52,103 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 4.970e+02 6.097e+02 8.194e+02 1.467e+03, threshold=1.219e+03, percent-clipped=8.0 +2023-04-03 15:12:58,637 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 15:13:50,290 INFO [train.py:903] (0/4) Epoch 29, batch 6550, loss[loss=0.224, simple_loss=0.3146, pruned_loss=0.06677, over 19667.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05982, over 3815505.91 frames. ], batch size: 55, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:14:24,643 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.9562, 1.2027, 1.5519, 0.6181, 1.8791, 2.3986, 2.0865, 2.5340], + device='cuda:0'), covar=tensor([0.1628, 0.4051, 0.3632, 0.3166, 0.0786, 0.0338, 0.0382, 0.0438], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0275, 0.0256, 0.0199, 0.0221, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 15:14:42,371 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197777.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:14:50,633 INFO [train.py:903] (0/4) Epoch 29, batch 6600, loss[loss=0.178, simple_loss=0.268, pruned_loss=0.044, over 19662.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2839, pruned_loss=0.06042, over 3801837.29 frames. ], batch size: 55, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:14:54,068 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.758e+02 5.997e+02 7.159e+02 2.116e+03, threshold=1.199e+03, percent-clipped=4.0 +2023-04-03 15:15:51,179 INFO [train.py:903] (0/4) Epoch 29, batch 6650, loss[loss=0.2047, simple_loss=0.291, pruned_loss=0.05918, over 19601.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2822, pruned_loss=0.05951, over 3813723.62 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:45,534 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:16:52,896 INFO [train.py:903] (0/4) Epoch 29, batch 6700, loss[loss=0.2083, simple_loss=0.2952, pruned_loss=0.06069, over 18762.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2816, pruned_loss=0.05915, over 3813597.74 frames. ], batch size: 74, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:56,372 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.093e+02 5.944e+02 7.634e+02 1.783e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-03 15:17:02,443 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197892.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:17:19,980 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197907.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:23,570 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:50,663 INFO [train.py:903] (0/4) Epoch 29, batch 6750, loss[loss=0.1983, simple_loss=0.2792, pruned_loss=0.05866, over 19762.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2813, pruned_loss=0.0587, over 3815494.96 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:18:45,950 INFO [train.py:903] (0/4) Epoch 29, batch 6800, loss[loss=0.238, simple_loss=0.3137, pruned_loss=0.08118, over 19402.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05859, over 3813259.75 frames. ], batch size: 70, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:18:49,169 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.543e+02 4.853e+02 6.089e+02 7.661e+02 1.249e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 15:18:56,375 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:19:03,965 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-198000.pt +2023-04-03 15:19:17,309 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-29.pt +2023-04-03 15:19:33,599 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 15:19:34,669 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 15:19:37,004 INFO [train.py:903] (0/4) Epoch 30, batch 0, loss[loss=0.2249, simple_loss=0.3002, pruned_loss=0.0748, over 19771.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3002, pruned_loss=0.0748, over 19771.00 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:19:37,004 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 15:19:49,312 INFO [train.py:937] (0/4) Epoch 30, validation: loss=0.167, simple_loss=0.2667, pruned_loss=0.03362, over 944034.00 frames. +2023-04-03 15:19:49,313 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 15:20:02,508 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198022.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:20:03,317 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 15:20:25,396 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 15:20:51,425 INFO [train.py:903] (0/4) Epoch 30, batch 50, loss[loss=0.229, simple_loss=0.3054, pruned_loss=0.07632, over 17614.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2761, pruned_loss=0.05579, over 872107.73 frames. ], batch size: 101, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:21:20,382 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 4.959e+02 5.838e+02 7.711e+02 1.808e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 15:21:26,250 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 15:21:36,610 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198099.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:21:52,097 INFO [train.py:903] (0/4) Epoch 30, batch 100, loss[loss=0.1592, simple_loss=0.234, pruned_loss=0.04222, over 19741.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2775, pruned_loss=0.05627, over 1545461.15 frames. ], batch size: 45, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:22:04,498 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 15:22:37,742 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198148.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:22:54,433 INFO [train.py:903] (0/4) Epoch 30, batch 150, loss[loss=0.1766, simple_loss=0.2654, pruned_loss=0.0439, over 19672.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2801, pruned_loss=0.05747, over 2047784.52 frames. ], batch size: 60, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:23:07,463 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198173.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:23:25,369 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.519e+02 4.863e+02 5.858e+02 7.546e+02 1.579e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-03 15:23:52,163 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 15:23:55,535 INFO [train.py:903] (0/4) Epoch 30, batch 200, loss[loss=0.2071, simple_loss=0.2887, pruned_loss=0.06275, over 19640.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2784, pruned_loss=0.05684, over 2453991.56 frames. ], batch size: 53, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:23:58,278 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5472, 1.6390, 1.8638, 1.8355, 2.7768, 2.3637, 2.9232, 1.3988], + device='cuda:0'), covar=tensor([0.2634, 0.4509, 0.2962, 0.1956, 0.1540, 0.2301, 0.1554, 0.4659], + device='cuda:0'), in_proj_covar=tensor([0.0561, 0.0681, 0.0770, 0.0516, 0.0642, 0.0552, 0.0676, 0.0579], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 15:24:42,391 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:47,668 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:57,539 INFO [train.py:903] (0/4) Epoch 30, batch 250, loss[loss=0.2272, simple_loss=0.3003, pruned_loss=0.07709, over 19470.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2792, pruned_loss=0.05787, over 2759369.44 frames. ], batch size: 49, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:25:13,978 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:19,546 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198278.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:29,212 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.793e+02 5.720e+02 7.294e+02 1.524e+03, threshold=1.144e+03, percent-clipped=6.0 +2023-04-03 15:25:49,137 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:26:01,281 INFO [train.py:903] (0/4) Epoch 30, batch 300, loss[loss=0.1592, simple_loss=0.2396, pruned_loss=0.03939, over 19332.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2789, pruned_loss=0.0579, over 3004329.50 frames. ], batch size: 44, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:03,946 INFO [train.py:903] (0/4) Epoch 30, batch 350, loss[loss=0.1795, simple_loss=0.2688, pruned_loss=0.04515, over 19863.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2792, pruned_loss=0.05873, over 3170505.65 frames. ], batch size: 52, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:06,264 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 15:27:12,103 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:27:33,409 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.876e+02 6.987e+02 8.997e+02 2.429e+03, threshold=1.397e+03, percent-clipped=9.0 +2023-04-03 15:27:48,237 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5972, 1.4318, 2.0364, 1.7728, 3.1165, 4.4695, 4.3289, 4.8586], + device='cuda:0'), covar=tensor([0.1563, 0.4094, 0.3557, 0.2394, 0.0680, 0.0242, 0.0193, 0.0260], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0331, 0.0364, 0.0272, 0.0254, 0.0198, 0.0219, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 15:28:04,751 INFO [train.py:903] (0/4) Epoch 30, batch 400, loss[loss=0.1762, simple_loss=0.2611, pruned_loss=0.04566, over 19778.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2812, pruned_loss=0.05957, over 3315189.04 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:28:08,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([5.7090, 5.2824, 3.1311, 4.6619, 1.4578, 5.3080, 5.1562, 5.2584], + device='cuda:0'), covar=tensor([0.0400, 0.0727, 0.1820, 0.0635, 0.3548, 0.0550, 0.0810, 0.0918], + device='cuda:0'), in_proj_covar=tensor([0.0535, 0.0435, 0.0523, 0.0359, 0.0412, 0.0460, 0.0454, 0.0488], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:28:44,693 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:28:56,374 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.4188, 4.0284, 2.6079, 3.5882, 0.8729, 4.0411, 3.8822, 3.9667], + device='cuda:0'), covar=tensor([0.0650, 0.1025, 0.2058, 0.0860, 0.3976, 0.0698, 0.0981, 0.1182], + device='cuda:0'), in_proj_covar=tensor([0.0533, 0.0434, 0.0522, 0.0358, 0.0411, 0.0459, 0.0453, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:29:06,540 INFO [train.py:903] (0/4) Epoch 30, batch 450, loss[loss=0.2052, simple_loss=0.2831, pruned_loss=0.06359, over 19769.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2805, pruned_loss=0.059, over 3424628.58 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:29:38,786 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.102e+02 6.085e+02 7.779e+02 1.785e+03, threshold=1.217e+03, percent-clipped=4.0 +2023-04-03 15:29:40,854 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 15:29:42,036 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 15:30:08,498 INFO [train.py:903] (0/4) Epoch 30, batch 500, loss[loss=0.1856, simple_loss=0.271, pruned_loss=0.05004, over 19774.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2816, pruned_loss=0.05939, over 3521856.30 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:30:15,668 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:06,286 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:11,502 INFO [train.py:903] (0/4) Epoch 30, batch 550, loss[loss=0.1754, simple_loss=0.2593, pruned_loss=0.04572, over 19749.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.281, pruned_loss=0.05922, over 3599555.86 frames. ], batch size: 51, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:31:40,924 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.928e+02 6.417e+02 8.667e+02 2.852e+03, threshold=1.283e+03, percent-clipped=10.0 +2023-04-03 15:32:13,288 INFO [train.py:903] (0/4) Epoch 30, batch 600, loss[loss=0.2101, simple_loss=0.2965, pruned_loss=0.06184, over 19769.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2799, pruned_loss=0.0584, over 3652335.21 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:32:28,418 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198625.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:38,384 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:52,871 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 15:33:01,369 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198650.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:33:14,980 INFO [train.py:903] (0/4) Epoch 30, batch 650, loss[loss=0.2111, simple_loss=0.295, pruned_loss=0.06357, over 19524.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2793, pruned_loss=0.05779, over 3678609.31 frames. ], batch size: 56, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:33:46,592 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.000e+02 5.780e+02 7.067e+02 2.104e+03, threshold=1.156e+03, percent-clipped=2.0 +2023-04-03 15:34:16,543 INFO [train.py:903] (0/4) Epoch 30, batch 700, loss[loss=0.1804, simple_loss=0.2677, pruned_loss=0.04654, over 18251.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2801, pruned_loss=0.05823, over 3711478.10 frames. ], batch size: 84, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:18,689 INFO [train.py:903] (0/4) Epoch 30, batch 750, loss[loss=0.2246, simple_loss=0.2868, pruned_loss=0.08118, over 19777.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2808, pruned_loss=0.05858, over 3741736.86 frames. ], batch size: 45, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:29,180 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3228, 1.4238, 1.5733, 1.4976, 1.7428, 1.8157, 1.8177, 0.7196], + device='cuda:0'), covar=tensor([0.2543, 0.4344, 0.2633, 0.2013, 0.1715, 0.2423, 0.1532, 0.4862], + device='cuda:0'), in_proj_covar=tensor([0.0559, 0.0679, 0.0766, 0.0515, 0.0640, 0.0551, 0.0674, 0.0578], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 15:35:51,148 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.908e+02 5.891e+02 7.920e+02 1.978e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 15:36:04,801 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 15:36:05,385 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1676, 2.1823, 2.5408, 2.8421, 2.1590, 2.6675, 2.5059, 2.3025], + device='cuda:0'), covar=tensor([0.4408, 0.4474, 0.2065, 0.2944, 0.4896, 0.2650, 0.5126, 0.3599], + device='cuda:0'), in_proj_covar=tensor([0.0946, 0.1029, 0.0750, 0.0960, 0.0926, 0.0866, 0.0866, 0.0815], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 15:36:23,528 INFO [train.py:903] (0/4) Epoch 30, batch 800, loss[loss=0.2072, simple_loss=0.2862, pruned_loss=0.06406, over 19790.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2812, pruned_loss=0.05837, over 3772742.10 frames. ], batch size: 56, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:36:26,289 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:36:39,843 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 15:36:56,797 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198839.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:25,153 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:26,192 INFO [train.py:903] (0/4) Epoch 30, batch 850, loss[loss=0.2004, simple_loss=0.2881, pruned_loss=0.05641, over 18883.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2817, pruned_loss=0.05875, over 3793097.01 frames. ], batch size: 74, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:37:35,492 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198870.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:55,560 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 4.755e+02 5.781e+02 7.125e+02 1.514e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 15:37:59,378 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.6657, 2.6246, 2.3261, 2.7110, 2.4258, 2.2320, 2.0867, 2.4842], + device='cuda:0'), covar=tensor([0.0978, 0.1453, 0.1392, 0.1070, 0.1388, 0.0551, 0.1455, 0.0731], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0360, 0.0321, 0.0259, 0.0311, 0.0260, 0.0324, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 15:38:13,706 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:38:20,330 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 15:38:26,181 INFO [train.py:903] (0/4) Epoch 30, batch 900, loss[loss=0.1868, simple_loss=0.2737, pruned_loss=0.04997, over 19696.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2804, pruned_loss=0.05794, over 3816051.52 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:07,730 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:27,822 INFO [train.py:903] (0/4) Epoch 30, batch 950, loss[loss=0.1904, simple_loss=0.2827, pruned_loss=0.04907, over 19764.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2803, pruned_loss=0.05787, over 3820819.08 frames. ], batch size: 56, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:32,359 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 15:39:46,217 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:48,040 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:59,668 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.052e+02 6.305e+02 7.870e+02 1.588e+03, threshold=1.261e+03, percent-clipped=4.0 +2023-04-03 15:40:30,256 INFO [train.py:903] (0/4) Epoch 30, batch 1000, loss[loss=0.1954, simple_loss=0.2747, pruned_loss=0.0581, over 19745.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2803, pruned_loss=0.05767, over 3827067.28 frames. ], batch size: 51, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:40:46,862 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 15:41:23,982 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 15:41:33,182 INFO [train.py:903] (0/4) Epoch 30, batch 1050, loss[loss=0.1912, simple_loss=0.2818, pruned_loss=0.05028, over 19327.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2797, pruned_loss=0.05785, over 3832974.96 frames. ], batch size: 66, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:42:03,435 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.270e+02 4.631e+02 5.961e+02 7.864e+02 1.953e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 15:42:04,592 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 15:42:07,364 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0583, 1.2081, 1.4774, 0.6231, 1.9376, 2.4832, 2.1532, 2.5899], + device='cuda:0'), covar=tensor([0.1488, 0.3943, 0.3690, 0.2991, 0.0713, 0.0290, 0.0354, 0.0390], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0333, 0.0365, 0.0273, 0.0256, 0.0199, 0.0220, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 15:42:10,992 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:42:35,182 INFO [train.py:903] (0/4) Epoch 30, batch 1100, loss[loss=0.1952, simple_loss=0.2849, pruned_loss=0.05281, over 19612.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2802, pruned_loss=0.05793, over 3835497.46 frames. ], batch size: 57, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:43:38,415 INFO [train.py:903] (0/4) Epoch 30, batch 1150, loss[loss=0.1803, simple_loss=0.2599, pruned_loss=0.05036, over 19373.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2793, pruned_loss=0.05769, over 3835161.21 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:03,851 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.9060, 1.3963, 1.6240, 1.4640, 3.5328, 1.1311, 2.4999, 3.9724], + device='cuda:0'), covar=tensor([0.0481, 0.2982, 0.2781, 0.2024, 0.0631, 0.2689, 0.1403, 0.0201], + device='cuda:0'), in_proj_covar=tensor([0.0426, 0.0381, 0.0400, 0.0355, 0.0385, 0.0359, 0.0401, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:44:10,294 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.110e+02 5.915e+02 7.639e+02 1.372e+03, threshold=1.183e+03, percent-clipped=6.0 +2023-04-03 15:44:18,764 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:44:41,424 INFO [train.py:903] (0/4) Epoch 30, batch 1200, loss[loss=0.1981, simple_loss=0.284, pruned_loss=0.05607, over 19666.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2806, pruned_loss=0.05825, over 3847549.88 frames. ], batch size: 60, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:43,951 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:06,864 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:14,390 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 15:45:22,181 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:38,781 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:44,964 INFO [train.py:903] (0/4) Epoch 30, batch 1250, loss[loss=0.2132, simple_loss=0.2967, pruned_loss=0.06484, over 19746.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2808, pruned_loss=0.05804, over 3843948.83 frames. ], batch size: 63, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:46:14,639 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.004e+02 5.991e+02 7.632e+02 1.398e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-03 15:46:17,138 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:46:46,512 INFO [train.py:903] (0/4) Epoch 30, batch 1300, loss[loss=0.1751, simple_loss=0.2565, pruned_loss=0.04678, over 19606.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2808, pruned_loss=0.05811, over 3845229.27 frames. ], batch size: 50, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:47:07,591 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:22,765 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6082, 2.3373, 1.8384, 1.5834, 2.1589, 1.5186, 1.4254, 2.0383], + device='cuda:0'), covar=tensor([0.1235, 0.0870, 0.1132, 0.1012, 0.0660, 0.1334, 0.0873, 0.0577], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0320, 0.0344, 0.0275, 0.0253, 0.0348, 0.0290, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:47:32,054 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:46,062 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199360.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:47,889 INFO [train.py:903] (0/4) Epoch 30, batch 1350, loss[loss=0.2087, simple_loss=0.3004, pruned_loss=0.05856, over 19085.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2799, pruned_loss=0.05791, over 3839793.06 frames. ], batch size: 69, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:03,226 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:13,520 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2389, 2.0544, 1.8703, 2.2054, 1.9018, 1.8284, 1.7198, 2.0879], + device='cuda:0'), covar=tensor([0.1015, 0.1415, 0.1491, 0.1168, 0.1484, 0.0595, 0.1594, 0.0743], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0360, 0.0321, 0.0259, 0.0310, 0.0260, 0.0323, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 15:48:20,885 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 4.672e+02 5.739e+02 7.239e+02 1.592e+03, threshold=1.148e+03, percent-clipped=7.0 +2023-04-03 15:48:40,982 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199404.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:50,926 INFO [train.py:903] (0/4) Epoch 30, batch 1400, loss[loss=0.198, simple_loss=0.2872, pruned_loss=0.05434, over 19677.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2801, pruned_loss=0.05788, over 3823658.17 frames. ], batch size: 60, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:58,114 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199418.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:49:55,030 INFO [train.py:903] (0/4) Epoch 30, batch 1450, loss[loss=0.1958, simple_loss=0.2791, pruned_loss=0.05629, over 19676.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2795, pruned_loss=0.05787, over 3820438.83 frames. ], batch size: 58, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:49:56,195 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 15:50:11,579 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199476.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:50:25,541 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.040e+02 6.121e+02 7.927e+02 1.972e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 15:50:56,161 INFO [train.py:903] (0/4) Epoch 30, batch 1500, loss[loss=0.1854, simple_loss=0.2749, pruned_loss=0.04792, over 19648.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2798, pruned_loss=0.05814, over 3816021.92 frames. ], batch size: 55, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:50:56,473 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199512.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:51:28,282 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:51:46,243 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6889, 1.7076, 1.8569, 1.5387, 4.2440, 1.2121, 2.8212, 4.5309], + device='cuda:0'), covar=tensor([0.0553, 0.2845, 0.2860, 0.2262, 0.0815, 0.2868, 0.1479, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0388, 0.0362, 0.0403, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 15:51:47,398 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199553.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:51:58,248 INFO [train.py:903] (0/4) Epoch 30, batch 1550, loss[loss=0.2109, simple_loss=0.2931, pruned_loss=0.06434, over 19657.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2794, pruned_loss=0.05781, over 3826858.24 frames. ], batch size: 55, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:52:28,222 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:52:31,301 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.717e+02 5.804e+02 6.903e+02 1.639e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 15:52:45,778 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1862, 2.8318, 2.2427, 2.3477, 2.0455, 2.4727, 1.1970, 2.0365], + device='cuda:0'), covar=tensor([0.0732, 0.0680, 0.0739, 0.1227, 0.1170, 0.1182, 0.1539, 0.1204], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0364, 0.0370, 0.0395, 0.0472, 0.0398, 0.0347, 0.0348], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 15:52:59,336 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:01,355 INFO [train.py:903] (0/4) Epoch 30, batch 1600, loss[loss=0.1738, simple_loss=0.2519, pruned_loss=0.04791, over 19803.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2797, pruned_loss=0.05813, over 3824324.93 frames. ], batch size: 49, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:53:06,435 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:26,404 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 15:53:37,680 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:53,145 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:01,260 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199660.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:03,854 INFO [train.py:903] (0/4) Epoch 30, batch 1650, loss[loss=0.2191, simple_loss=0.3067, pruned_loss=0.06574, over 18847.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2798, pruned_loss=0.05813, over 3827715.06 frames. ], batch size: 74, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:54:32,751 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199685.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:35,822 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 4.724e+02 5.858e+02 7.730e+02 3.208e+03, threshold=1.172e+03, percent-clipped=5.0 +2023-04-03 15:55:06,080 INFO [train.py:903] (0/4) Epoch 30, batch 1700, loss[loss=0.1741, simple_loss=0.2522, pruned_loss=0.04797, over 19366.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2795, pruned_loss=0.05761, over 3819352.24 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:55:24,025 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6804, 1.6778, 1.6139, 1.4445, 1.3360, 1.4156, 0.3305, 0.7149], + device='cuda:0'), covar=tensor([0.0730, 0.0704, 0.0519, 0.0728, 0.1387, 0.0837, 0.1420, 0.1269], + device='cuda:0'), in_proj_covar=tensor([0.0371, 0.0366, 0.0373, 0.0398, 0.0476, 0.0402, 0.0350, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 15:55:40,547 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 15:55:47,583 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 15:56:09,058 INFO [train.py:903] (0/4) Epoch 30, batch 1750, loss[loss=0.208, simple_loss=0.2789, pruned_loss=0.06859, over 19775.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2804, pruned_loss=0.0582, over 3824623.98 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:56:09,219 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199762.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:56:42,934 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.893e+02 6.270e+02 7.375e+02 1.627e+03, threshold=1.254e+03, percent-clipped=1.0 +2023-04-03 15:57:00,379 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.2394, 4.3147, 4.8086, 4.8246, 2.7390, 4.5060, 4.0286, 4.5826], + device='cuda:0'), covar=tensor([0.1516, 0.3121, 0.0633, 0.0695, 0.4853, 0.1216, 0.0651, 0.1052], + device='cuda:0'), in_proj_covar=tensor([0.0829, 0.0803, 0.1015, 0.0891, 0.0877, 0.0776, 0.0600, 0.0943], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 15:57:11,412 INFO [train.py:903] (0/4) Epoch 30, batch 1800, loss[loss=0.1997, simple_loss=0.2838, pruned_loss=0.05781, over 19451.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2799, pruned_loss=0.05826, over 3808683.04 frames. ], batch size: 70, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:57:21,128 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199820.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:02,048 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3152, 1.2958, 1.8566, 1.5434, 2.9677, 4.5721, 4.4414, 5.0475], + device='cuda:0'), covar=tensor([0.1850, 0.4401, 0.3735, 0.2574, 0.0716, 0.0228, 0.0209, 0.0228], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0335, 0.0368, 0.0275, 0.0258, 0.0200, 0.0221, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 15:58:06,485 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199856.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:58:08,434 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 15:58:13,037 INFO [train.py:903] (0/4) Epoch 30, batch 1850, loss[loss=0.2362, simple_loss=0.3069, pruned_loss=0.08271, over 19601.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2798, pruned_loss=0.05806, over 3817486.75 frames. ], batch size: 61, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:58:17,124 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 15:58:30,832 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 15:58:32,555 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199877.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:46,820 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.043e+02 6.338e+02 8.325e+02 2.069e+03, threshold=1.268e+03, percent-clipped=7.0 +2023-04-03 15:58:46,868 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 15:58:57,062 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199897.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:59:12,917 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:15,995 INFO [train.py:903] (0/4) Epoch 30, batch 1900, loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07113, over 19588.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2809, pruned_loss=0.05896, over 3809785.28 frames. ], batch size: 57, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:59:21,954 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.85 vs. limit=5.0 +2023-04-03 15:59:33,274 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 15:59:33,545 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199927.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:38,037 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 15:59:42,738 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:43,900 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199935.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:00:03,959 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 16:00:17,079 INFO [train.py:903] (0/4) Epoch 30, batch 1950, loss[loss=0.2135, simple_loss=0.2905, pruned_loss=0.06824, over 19747.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2806, pruned_loss=0.05877, over 3827638.14 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 16:00:28,492 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199971.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:00:51,089 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 5.211e+02 6.525e+02 7.685e+02 1.771e+03, threshold=1.305e+03, percent-clipped=2.0 +2023-04-03 16:01:05,405 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-200000.pt +2023-04-03 16:01:21,200 INFO [train.py:903] (0/4) Epoch 30, batch 2000, loss[loss=0.1797, simple_loss=0.2592, pruned_loss=0.05007, over 19620.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2808, pruned_loss=0.05884, over 3828056.15 frames. ], batch size: 50, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:01:21,565 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200012.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:02:21,494 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 16:02:23,756 INFO [train.py:903] (0/4) Epoch 30, batch 2050, loss[loss=0.1963, simple_loss=0.2741, pruned_loss=0.05926, over 19747.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2812, pruned_loss=0.05902, over 3831160.84 frames. ], batch size: 51, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:02:43,238 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 16:02:43,273 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 16:02:57,798 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 4.986e+02 6.252e+02 8.206e+02 1.738e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 16:03:03,565 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 16:03:21,109 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3554, 1.4220, 1.5205, 1.5187, 1.7171, 1.8160, 1.7825, 0.5660], + device='cuda:0'), covar=tensor([0.2670, 0.4468, 0.2910, 0.2107, 0.1824, 0.2586, 0.1638, 0.5408], + device='cuda:0'), in_proj_covar=tensor([0.0559, 0.0679, 0.0768, 0.0516, 0.0640, 0.0550, 0.0673, 0.0580], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 16:03:26,468 INFO [train.py:903] (0/4) Epoch 30, batch 2100, loss[loss=0.2132, simple_loss=0.2815, pruned_loss=0.07246, over 13386.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2814, pruned_loss=0.05938, over 3809446.76 frames. ], batch size: 137, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:03:32,776 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4820, 2.1009, 1.5691, 1.4137, 1.9124, 1.1999, 1.4767, 1.8982], + device='cuda:0'), covar=tensor([0.1034, 0.0799, 0.1199, 0.0908, 0.0675, 0.1485, 0.0729, 0.0507], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0318, 0.0343, 0.0275, 0.0252, 0.0347, 0.0289, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:03:52,976 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200133.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:03:57,170 WARNING [train.py:1073] (0/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 16:04:18,934 WARNING [train.py:1073] (0/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 16:04:24,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:04:29,061 INFO [train.py:903] (0/4) Epoch 30, batch 2150, loss[loss=0.2519, simple_loss=0.3188, pruned_loss=0.09252, over 13002.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2815, pruned_loss=0.05932, over 3809361.14 frames. ], batch size: 136, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:05:02,783 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.819e+02 5.957e+02 8.119e+02 2.108e+03, threshold=1.191e+03, percent-clipped=2.0 +2023-04-03 16:05:05,506 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:10,684 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200195.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:05:18,887 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2991, 1.9759, 1.5909, 1.3739, 1.8153, 1.2725, 1.3695, 1.8022], + device='cuda:0'), covar=tensor([0.0945, 0.0856, 0.1167, 0.0930, 0.0696, 0.1353, 0.0651, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0319, 0.0343, 0.0275, 0.0252, 0.0346, 0.0289, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:05:30,915 INFO [train.py:903] (0/4) Epoch 30, batch 2200, loss[loss=0.2026, simple_loss=0.2923, pruned_loss=0.05646, over 19788.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05972, over 3811871.14 frames. ], batch size: 54, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:05:37,225 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:50,090 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200227.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:06:11,241 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.5075, 1.4880, 1.6258, 1.5267, 3.1106, 1.2367, 2.4522, 3.5500], + device='cuda:0'), covar=tensor([0.0482, 0.2708, 0.2893, 0.1855, 0.0648, 0.2460, 0.1220, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0428, 0.0385, 0.0406, 0.0358, 0.0389, 0.0363, 0.0405, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:06:19,640 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1311, 1.8454, 1.9874, 2.7415, 1.9784, 2.2116, 2.2344, 2.0826], + device='cuda:0'), covar=tensor([0.0793, 0.0876, 0.0925, 0.0716, 0.0901, 0.0782, 0.0933, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0224, 0.0229, 0.0242, 0.0228, 0.0216, 0.0189, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 16:06:20,833 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200252.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:33,037 INFO [train.py:903] (0/4) Epoch 30, batch 2250, loss[loss=0.1825, simple_loss=0.2638, pruned_loss=0.05061, over 19860.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2829, pruned_loss=0.05991, over 3807230.28 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:06:41,166 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200268.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:44,364 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:08,305 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.298e+02 6.530e+02 8.599e+02 1.543e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-03 16:07:12,065 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200293.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:07:17,586 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:35,304 INFO [train.py:903] (0/4) Epoch 30, batch 2300, loss[loss=0.2146, simple_loss=0.2987, pruned_loss=0.06522, over 19291.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2836, pruned_loss=0.06013, over 3812799.06 frames. ], batch size: 66, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:07:51,139 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 16:08:37,753 INFO [train.py:903] (0/4) Epoch 30, batch 2350, loss[loss=0.1757, simple_loss=0.2564, pruned_loss=0.04752, over 19796.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.05987, over 3816309.69 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:09:07,650 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:09:12,873 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.379e+02 4.836e+02 5.833e+02 7.163e+02 1.475e+03, threshold=1.167e+03, percent-clipped=1.0 +2023-04-03 16:09:21,112 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 16:09:38,177 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 16:09:39,769 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8016, 1.7048, 1.6899, 2.4203, 1.9718, 2.0989, 2.0900, 1.8908], + device='cuda:0'), covar=tensor([0.0770, 0.0843, 0.0933, 0.0619, 0.0773, 0.0691, 0.0812, 0.0642], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0240, 0.0227, 0.0215, 0.0188, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 16:09:40,547 INFO [train.py:903] (0/4) Epoch 30, batch 2400, loss[loss=0.241, simple_loss=0.3151, pruned_loss=0.08347, over 17400.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.05965, over 3824367.60 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:10:09,850 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-04-03 16:10:43,362 INFO [train.py:903] (0/4) Epoch 30, batch 2450, loss[loss=0.2149, simple_loss=0.2996, pruned_loss=0.06514, over 19399.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2825, pruned_loss=0.05939, over 3824879.47 frames. ], batch size: 70, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:11:19,106 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.121e+02 5.946e+02 7.814e+02 2.121e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-03 16:11:46,457 INFO [train.py:903] (0/4) Epoch 30, batch 2500, loss[loss=0.2122, simple_loss=0.2902, pruned_loss=0.06714, over 18260.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2819, pruned_loss=0.05841, over 3837051.94 frames. ], batch size: 84, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:12:20,432 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200539.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:12:48,790 INFO [train.py:903] (0/4) Epoch 30, batch 2550, loss[loss=0.1615, simple_loss=0.2452, pruned_loss=0.0389, over 19123.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2817, pruned_loss=0.05843, over 3830576.42 frames. ], batch size: 42, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:13:23,054 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 4.923e+02 6.163e+02 7.973e+02 2.573e+03, threshold=1.233e+03, percent-clipped=12.0 +2023-04-03 16:13:47,194 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 16:13:51,623 INFO [train.py:903] (0/4) Epoch 30, batch 2600, loss[loss=0.1922, simple_loss=0.2826, pruned_loss=0.05088, over 18791.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2823, pruned_loss=0.05864, over 3836716.69 frames. ], batch size: 74, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:09,391 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([0.8892, 1.3659, 1.1150, 1.0369, 1.1870, 1.0441, 0.9228, 1.2388], + device='cuda:0'), covar=tensor([0.0748, 0.0991, 0.1229, 0.0901, 0.0693, 0.1475, 0.0712, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0318, 0.0342, 0.0274, 0.0251, 0.0346, 0.0289, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:14:28,338 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:29,742 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:44,503 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200654.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:14:54,171 INFO [train.py:903] (0/4) Epoch 30, batch 2650, loss[loss=0.2565, simple_loss=0.3177, pruned_loss=0.09761, over 13263.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2813, pruned_loss=0.05853, over 3820649.54 frames. ], batch size: 136, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:55,575 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200663.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:00,342 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200667.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:16,869 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 16:15:28,264 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 4.881e+02 6.412e+02 8.116e+02 1.737e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 16:15:52,122 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2002, 1.3003, 1.2131, 1.0657, 1.0309, 1.0797, 0.1873, 0.3750], + device='cuda:0'), covar=tensor([0.0849, 0.0868, 0.0573, 0.0742, 0.1670, 0.0969, 0.1618, 0.1523], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0364, 0.0371, 0.0395, 0.0475, 0.0401, 0.0348, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:15:55,154 INFO [train.py:903] (0/4) Epoch 30, batch 2700, loss[loss=0.1906, simple_loss=0.2694, pruned_loss=0.05587, over 19487.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2804, pruned_loss=0.05828, over 3825806.49 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:16:51,764 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:16:59,614 INFO [train.py:903] (0/4) Epoch 30, batch 2750, loss[loss=0.2005, simple_loss=0.287, pruned_loss=0.05696, over 19684.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2798, pruned_loss=0.05774, over 3818786.33 frames. ], batch size: 59, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:17:22,995 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.9401, 1.8966, 1.8537, 1.6693, 1.4737, 1.5880, 0.5401, 0.9378], + device='cuda:0'), covar=tensor([0.0681, 0.0677, 0.0455, 0.0793, 0.1332, 0.0879, 0.1373, 0.1155], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0363, 0.0370, 0.0394, 0.0474, 0.0400, 0.0347, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:17:34,086 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.266e+02 4.594e+02 5.767e+02 6.844e+02 1.269e+03, threshold=1.153e+03, percent-clipped=0.0 +2023-04-03 16:18:02,570 INFO [train.py:903] (0/4) Epoch 30, batch 2800, loss[loss=0.1913, simple_loss=0.2845, pruned_loss=0.04907, over 19658.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2809, pruned_loss=0.0584, over 3799620.43 frames. ], batch size: 58, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:05,173 INFO [train.py:903] (0/4) Epoch 30, batch 2850, loss[loss=0.1837, simple_loss=0.2634, pruned_loss=0.05199, over 19580.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2799, pruned_loss=0.05775, over 3814007.58 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:26,053 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-03 16:19:34,827 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4329, 1.4485, 1.6712, 1.6397, 3.0610, 1.2039, 2.4336, 3.4557], + device='cuda:0'), covar=tensor([0.0498, 0.2829, 0.2797, 0.1783, 0.0654, 0.2452, 0.1280, 0.0268], + device='cuda:0'), in_proj_covar=tensor([0.0428, 0.0384, 0.0405, 0.0356, 0.0388, 0.0363, 0.0404, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:19:39,284 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.865e+02 5.683e+02 7.765e+02 1.857e+03, threshold=1.137e+03, percent-clipped=6.0 +2023-04-03 16:20:04,653 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200910.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:20:06,528 INFO [train.py:903] (0/4) Epoch 30, batch 2900, loss[loss=0.2035, simple_loss=0.2923, pruned_loss=0.05733, over 19333.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2793, pruned_loss=0.05735, over 3822582.25 frames. ], batch size: 70, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:20:07,764 WARNING [train.py:1073] (0/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 16:20:36,315 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200935.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:21:08,767 INFO [train.py:903] (0/4) Epoch 30, batch 2950, loss[loss=0.1993, simple_loss=0.2745, pruned_loss=0.06204, over 19460.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.281, pruned_loss=0.05838, over 3818708.56 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:21:14,435 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.3662, 5.8310, 3.8589, 5.1092, 1.7569, 5.9972, 5.7842, 6.0724], + device='cuda:0'), covar=tensor([0.0338, 0.0802, 0.1486, 0.0761, 0.3656, 0.0473, 0.0777, 0.0902], + device='cuda:0'), in_proj_covar=tensor([0.0537, 0.0434, 0.0522, 0.0361, 0.0411, 0.0461, 0.0454, 0.0491], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:21:36,039 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.6204, 4.2390, 2.9111, 3.7311, 1.4307, 4.2101, 4.0450, 4.1943], + device='cuda:0'), covar=tensor([0.0665, 0.0923, 0.1877, 0.0860, 0.3494, 0.0681, 0.0992, 0.1242], + device='cuda:0'), in_proj_covar=tensor([0.0538, 0.0435, 0.0522, 0.0361, 0.0412, 0.0461, 0.0454, 0.0491], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:21:44,100 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.844e+02 6.179e+02 7.399e+02 1.416e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-03 16:22:05,886 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=4.95 vs. limit=5.0 +2023-04-03 16:22:06,373 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:11,917 INFO [train.py:903] (0/4) Epoch 30, batch 3000, loss[loss=0.2064, simple_loss=0.2909, pruned_loss=0.0609, over 18812.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2811, pruned_loss=0.0587, over 3813620.97 frames. ], batch size: 74, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:22:11,917 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 16:22:26,185 INFO [train.py:937] (0/4) Epoch 30, validation: loss=0.1666, simple_loss=0.266, pruned_loss=0.03357, over 944034.00 frames. +2023-04-03 16:22:26,186 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 16:22:26,650 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201012.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:32,380 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 16:22:57,393 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:23:27,833 INFO [train.py:903] (0/4) Epoch 30, batch 3050, loss[loss=0.1988, simple_loss=0.2737, pruned_loss=0.06199, over 19752.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2821, pruned_loss=0.05923, over 3809623.78 frames. ], batch size: 47, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:24:01,432 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:24:03,232 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.896e+02 5.871e+02 7.483e+02 2.064e+03, threshold=1.174e+03, percent-clipped=5.0 +2023-04-03 16:24:32,128 INFO [train.py:903] (0/4) Epoch 30, batch 3100, loss[loss=0.1833, simple_loss=0.2633, pruned_loss=0.05164, over 19473.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2819, pruned_loss=0.05886, over 3814234.05 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:24:43,634 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:25:33,510 INFO [train.py:903] (0/4) Epoch 30, batch 3150, loss[loss=0.1979, simple_loss=0.2881, pruned_loss=0.0539, over 19360.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2816, pruned_loss=0.05872, over 3810997.60 frames. ], batch size: 66, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:25:55,006 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.3325, 0.9464, 1.3649, 1.3458, 2.7208, 1.0973, 2.3930, 3.1857], + device='cuda:0'), covar=tensor([0.0759, 0.4078, 0.3404, 0.2382, 0.1178, 0.2981, 0.1419, 0.0450], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0385, 0.0404, 0.0356, 0.0388, 0.0362, 0.0405, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:26:02,514 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 16:26:10,329 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 4.625e+02 5.752e+02 7.402e+02 1.953e+03, threshold=1.150e+03, percent-clipped=5.0 +2023-04-03 16:26:35,927 INFO [train.py:903] (0/4) Epoch 30, batch 3200, loss[loss=0.1742, simple_loss=0.255, pruned_loss=0.04673, over 19755.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.281, pruned_loss=0.05814, over 3813566.96 frames. ], batch size: 47, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:27:40,087 INFO [train.py:903] (0/4) Epoch 30, batch 3250, loss[loss=0.2124, simple_loss=0.2966, pruned_loss=0.06408, over 19523.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2814, pruned_loss=0.05808, over 3823998.58 frames. ], batch size: 54, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:27:51,501 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:14,884 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.466e+02 4.729e+02 5.856e+02 7.119e+02 1.424e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 16:28:16,382 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:38,652 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4654, 1.5642, 1.8362, 1.7463, 2.7358, 2.2686, 2.8806, 1.3872], + device='cuda:0'), covar=tensor([0.2583, 0.4492, 0.2974, 0.2058, 0.1499, 0.2271, 0.1470, 0.4668], + device='cuda:0'), in_proj_covar=tensor([0.0555, 0.0678, 0.0765, 0.0514, 0.0636, 0.0548, 0.0671, 0.0580], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:28:42,757 INFO [train.py:903] (0/4) Epoch 30, batch 3300, loss[loss=0.2117, simple_loss=0.2923, pruned_loss=0.06551, over 17576.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2814, pruned_loss=0.05826, over 3814906.49 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:28:49,701 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 16:29:19,893 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2571, 1.1697, 1.1788, 1.5708, 1.3681, 1.3669, 1.3795, 1.2720], + device='cuda:0'), covar=tensor([0.0718, 0.0790, 0.0896, 0.0568, 0.0883, 0.0757, 0.0816, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0239, 0.0226, 0.0215, 0.0186, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 16:29:44,727 INFO [train.py:903] (0/4) Epoch 30, batch 3350, loss[loss=0.1919, simple_loss=0.2707, pruned_loss=0.05649, over 19725.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2817, pruned_loss=0.05873, over 3810889.96 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:30:05,895 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:21,793 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.184e+02 4.772e+02 5.759e+02 7.012e+02 1.305e+03, threshold=1.152e+03, percent-clipped=2.0 +2023-04-03 16:30:29,192 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.2197, 2.1220, 2.0121, 1.8428, 1.7392, 1.8637, 0.6812, 1.2057], + device='cuda:0'), covar=tensor([0.0690, 0.0694, 0.0556, 0.0980, 0.1315, 0.0981, 0.1531, 0.1291], + device='cuda:0'), in_proj_covar=tensor([0.0369, 0.0366, 0.0371, 0.0395, 0.0474, 0.0401, 0.0349, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:30:37,339 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:48,356 INFO [train.py:903] (0/4) Epoch 30, batch 3400, loss[loss=0.222, simple_loss=0.304, pruned_loss=0.07001, over 19581.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.05911, over 3795587.18 frames. ], batch size: 61, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:30:53,485 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.3132, 1.4007, 1.9985, 1.5552, 3.0997, 4.6645, 4.5429, 5.1281], + device='cuda:0'), covar=tensor([0.1693, 0.4048, 0.3438, 0.2499, 0.0657, 0.0233, 0.0169, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0334, 0.0368, 0.0275, 0.0257, 0.0200, 0.0222, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 16:31:15,990 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:31:51,802 INFO [train.py:903] (0/4) Epoch 30, batch 3450, loss[loss=0.1743, simple_loss=0.2471, pruned_loss=0.05077, over 18205.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2819, pruned_loss=0.05868, over 3801291.50 frames. ], batch size: 40, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:57,383 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 16:32:27,319 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.269e+02 6.567e+02 8.372e+02 2.121e+03, threshold=1.313e+03, percent-clipped=9.0 +2023-04-03 16:32:55,648 INFO [train.py:903] (0/4) Epoch 30, batch 3500, loss[loss=0.1944, simple_loss=0.2794, pruned_loss=0.0547, over 19543.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2834, pruned_loss=0.05947, over 3800380.95 frames. ], batch size: 56, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:33:41,235 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:33:58,201 INFO [train.py:903] (0/4) Epoch 30, batch 3550, loss[loss=0.2087, simple_loss=0.2957, pruned_loss=0.06081, over 19542.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2832, pruned_loss=0.05973, over 3793512.14 frames. ], batch size: 56, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:34:21,221 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.51 vs. limit=5.0 +2023-04-03 16:34:35,157 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.645e+02 6.006e+02 7.208e+02 1.141e+03, threshold=1.201e+03, percent-clipped=0.0 +2023-04-03 16:34:43,644 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.8563, 1.9476, 2.2766, 2.4151, 1.8535, 2.3267, 2.2342, 2.0596], + device='cuda:0'), covar=tensor([0.4265, 0.4061, 0.2121, 0.2503, 0.4231, 0.2374, 0.5316, 0.3611], + device='cuda:0'), in_proj_covar=tensor([0.0952, 0.1036, 0.0754, 0.0963, 0.0929, 0.0870, 0.0869, 0.0820], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 16:35:01,951 INFO [train.py:903] (0/4) Epoch 30, batch 3600, loss[loss=0.1949, simple_loss=0.2824, pruned_loss=0.05376, over 19486.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05933, over 3797600.17 frames. ], batch size: 64, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:35:06,951 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201615.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:35:22,560 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.7388, 1.5388, 1.6209, 1.4174, 3.2247, 1.1943, 2.5773, 3.7900], + device='cuda:0'), covar=tensor([0.0615, 0.2989, 0.3128, 0.2385, 0.0870, 0.2826, 0.1378, 0.0272], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0386, 0.0405, 0.0358, 0.0390, 0.0364, 0.0404, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:35:32,740 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:04,976 INFO [train.py:903] (0/4) Epoch 30, batch 3650, loss[loss=0.2434, simple_loss=0.3306, pruned_loss=0.07809, over 19685.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05983, over 3789151.22 frames. ], batch size: 60, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:36:05,277 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:27,511 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201679.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:42,040 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 4.844e+02 5.974e+02 7.556e+02 1.962e+03, threshold=1.195e+03, percent-clipped=4.0 +2023-04-03 16:37:09,559 INFO [train.py:903] (0/4) Epoch 30, batch 3700, loss[loss=0.2136, simple_loss=0.2926, pruned_loss=0.06729, over 17281.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2829, pruned_loss=0.05986, over 3781310.03 frames. ], batch size: 101, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:37:23,482 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201724.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:31,395 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201730.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:44,548 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 16:37:57,759 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201751.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:38:11,312 INFO [train.py:903] (0/4) Epoch 30, batch 3750, loss[loss=0.1623, simple_loss=0.2377, pruned_loss=0.04341, over 19762.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2813, pruned_loss=0.05896, over 3793043.19 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:38:25,360 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.0030, 1.2936, 1.5552, 0.6288, 2.0346, 2.4343, 2.1296, 2.5894], + device='cuda:0'), covar=tensor([0.1658, 0.4000, 0.3737, 0.3090, 0.0708, 0.0331, 0.0393, 0.0445], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0336, 0.0369, 0.0276, 0.0259, 0.0201, 0.0223, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 16:38:47,453 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.921e+02 6.534e+02 8.381e+02 2.079e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 16:39:04,358 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:14,005 INFO [train.py:903] (0/4) Epoch 30, batch 3800, loss[loss=0.2415, simple_loss=0.3231, pruned_loss=0.0799, over 19523.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.281, pruned_loss=0.05877, over 3805157.17 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:39:35,650 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:41,272 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 16:39:43,446 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 16:40:15,946 INFO [train.py:903] (0/4) Epoch 30, batch 3850, loss[loss=0.21, simple_loss=0.2947, pruned_loss=0.06266, over 19520.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2811, pruned_loss=0.05873, over 3808928.40 frames. ], batch size: 64, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:40:51,663 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.891e+02 4.977e+02 6.719e+02 8.916e+02 2.147e+03, threshold=1.344e+03, percent-clipped=8.0 +2023-04-03 16:41:18,289 INFO [train.py:903] (0/4) Epoch 30, batch 3900, loss[loss=0.2006, simple_loss=0.2929, pruned_loss=0.05418, over 18851.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05847, over 3818095.06 frames. ], batch size: 74, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:20,923 INFO [train.py:903] (0/4) Epoch 30, batch 3950, loss[loss=0.1916, simple_loss=0.2816, pruned_loss=0.05079, over 19349.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2813, pruned_loss=0.05901, over 3809500.49 frames. ], batch size: 66, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:20,941 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 16:42:51,071 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:42:56,913 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 4.814e+02 5.725e+02 7.208e+02 1.816e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 16:43:08,291 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-202000.pt +2023-04-03 16:43:11,140 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0058, 1.9457, 1.8124, 1.6715, 1.6773, 1.6468, 0.4372, 0.8853], + device='cuda:0'), covar=tensor([0.0649, 0.0668, 0.0473, 0.0776, 0.1156, 0.0842, 0.1365, 0.1209], + device='cuda:0'), in_proj_covar=tensor([0.0369, 0.0366, 0.0371, 0.0394, 0.0473, 0.0400, 0.0348, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:43:16,585 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:18,097 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:19,102 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:22,778 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:24,459 INFO [train.py:903] (0/4) Epoch 30, batch 4000, loss[loss=0.191, simple_loss=0.2724, pruned_loss=0.05487, over 19581.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2815, pruned_loss=0.05941, over 3807185.64 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:43:38,597 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:49,619 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202032.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:44:08,184 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 16:44:17,512 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.7296, 1.7516, 1.7295, 1.5432, 1.4440, 1.5197, 0.4577, 0.7162], + device='cuda:0'), covar=tensor([0.0663, 0.0635, 0.0408, 0.0617, 0.1201, 0.0742, 0.1266, 0.1146], + device='cuda:0'), in_proj_covar=tensor([0.0370, 0.0368, 0.0372, 0.0396, 0.0474, 0.0402, 0.0350, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:44:27,670 INFO [train.py:903] (0/4) Epoch 30, batch 4050, loss[loss=0.1782, simple_loss=0.2698, pruned_loss=0.04331, over 18640.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2812, pruned_loss=0.05882, over 3805389.26 frames. ], batch size: 74, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:44:34,768 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:45:02,985 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.674e+02 6.115e+02 7.260e+02 2.667e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 16:45:30,397 INFO [train.py:903] (0/4) Epoch 30, batch 4100, loss[loss=0.1937, simple_loss=0.2626, pruned_loss=0.0624, over 19739.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2815, pruned_loss=0.05875, over 3808840.13 frames. ], batch size: 51, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:45:40,874 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:46:03,029 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202138.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:46:04,977 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 16:46:32,907 INFO [train.py:903] (0/4) Epoch 30, batch 4150, loss[loss=0.1905, simple_loss=0.2671, pruned_loss=0.05692, over 19864.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2816, pruned_loss=0.05902, over 3808220.93 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:46:59,278 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202183.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:47:08,858 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.823e+02 5.653e+02 7.013e+02 1.196e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-03 16:47:34,529 INFO [train.py:903] (0/4) Epoch 30, batch 4200, loss[loss=0.1819, simple_loss=0.2515, pruned_loss=0.05614, over 19755.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2811, pruned_loss=0.05885, over 3810851.88 frames. ], batch size: 46, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:47:37,801 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 16:48:22,549 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([4.3951, 4.0133, 2.6489, 3.5331, 1.0932, 4.0112, 3.8250, 3.9054], + device='cuda:0'), covar=tensor([0.0676, 0.0916, 0.1954, 0.0935, 0.3781, 0.0643, 0.0990, 0.1183], + device='cuda:0'), in_proj_covar=tensor([0.0536, 0.0435, 0.0519, 0.0360, 0.0412, 0.0460, 0.0454, 0.0490], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:48:35,838 INFO [train.py:903] (0/4) Epoch 30, batch 4250, loss[loss=0.2168, simple_loss=0.2942, pruned_loss=0.06967, over 19672.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06023, over 3807846.23 frames. ], batch size: 55, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:48:52,011 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 16:49:03,275 WARNING [train.py:1073] (0/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 16:49:12,500 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.334e+02 6.771e+02 9.277e+02 2.113e+03, threshold=1.354e+03, percent-clipped=7.0 +2023-04-03 16:49:26,578 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 16:49:38,448 INFO [train.py:903] (0/4) Epoch 30, batch 4300, loss[loss=0.191, simple_loss=0.2788, pruned_loss=0.05162, over 19763.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2828, pruned_loss=0.05986, over 3819784.87 frames. ], batch size: 54, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:49:41,191 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-03 16:50:27,842 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1200, 2.0319, 1.9135, 1.8190, 1.5568, 1.7266, 0.6288, 1.1917], + device='cuda:0'), covar=tensor([0.0700, 0.0735, 0.0533, 0.0871, 0.1342, 0.1108, 0.1517, 0.1220], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0366, 0.0371, 0.0393, 0.0472, 0.0400, 0.0347, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 16:50:28,701 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:50:34,354 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 16:50:41,236 INFO [train.py:903] (0/4) Epoch 30, batch 4350, loss[loss=0.2202, simple_loss=0.3056, pruned_loss=0.06736, over 19706.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.05941, over 3819530.13 frames. ], batch size: 59, lr: 2.72e-03, grad_scale: 4.0 +2023-04-03 16:50:59,658 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:18,784 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.712e+02 5.570e+02 7.585e+02 1.545e+03, threshold=1.114e+03, percent-clipped=3.0 +2023-04-03 16:51:21,618 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:31,108 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:43,048 INFO [train.py:903] (0/4) Epoch 30, batch 4400, loss[loss=0.1885, simple_loss=0.2666, pruned_loss=0.0552, over 19418.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2821, pruned_loss=0.05926, over 3820364.86 frames. ], batch size: 48, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:51:52,579 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:08,197 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 16:52:17,015 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 16:52:17,359 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:34,058 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.45 vs. limit=5.0 +2023-04-03 16:52:46,168 INFO [train.py:903] (0/4) Epoch 30, batch 4450, loss[loss=0.2239, simple_loss=0.3019, pruned_loss=0.07295, over 18067.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05932, over 3805606.99 frames. ], batch size: 83, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:52:48,916 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:52,444 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:53:23,823 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 4.882e+02 5.921e+02 7.342e+02 1.295e+03, threshold=1.184e+03, percent-clipped=2.0 +2023-04-03 16:53:38,941 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([6.3069, 5.7634, 3.1657, 5.0478, 1.4301, 5.8975, 5.8033, 5.8941], + device='cuda:0'), covar=tensor([0.0362, 0.0944, 0.1975, 0.0774, 0.3835, 0.0546, 0.0841, 0.1043], + device='cuda:0'), in_proj_covar=tensor([0.0541, 0.0439, 0.0524, 0.0364, 0.0416, 0.0464, 0.0458, 0.0495], + device='cuda:0'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 16:53:48,736 INFO [train.py:903] (0/4) Epoch 30, batch 4500, loss[loss=0.2259, simple_loss=0.3095, pruned_loss=0.07116, over 19325.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2819, pruned_loss=0.05888, over 3823662.73 frames. ], batch size: 66, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:54:50,685 INFO [train.py:903] (0/4) Epoch 30, batch 4550, loss[loss=0.227, simple_loss=0.3059, pruned_loss=0.07409, over 18212.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05952, over 3817511.63 frames. ], batch size: 84, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:55:01,220 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 16:55:25,937 WARNING [train.py:1073] (0/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 16:55:27,104 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 5.006e+02 6.254e+02 8.173e+02 1.576e+03, threshold=1.251e+03, percent-clipped=3.0 +2023-04-03 16:55:52,915 INFO [train.py:903] (0/4) Epoch 30, batch 4600, loss[loss=0.2734, simple_loss=0.3396, pruned_loss=0.1036, over 13090.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05988, over 3821064.79 frames. ], batch size: 136, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:56:54,631 INFO [train.py:903] (0/4) Epoch 30, batch 4650, loss[loss=0.211, simple_loss=0.2918, pruned_loss=0.06516, over 19730.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2822, pruned_loss=0.05954, over 3825258.29 frames. ], batch size: 63, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:57:12,740 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 16:57:23,009 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 16:57:31,779 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.029e+02 5.947e+02 7.618e+02 1.686e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-03 16:57:36,926 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.1262, 1.2592, 1.6598, 1.2039, 2.6535, 3.5206, 3.2241, 3.7234], + device='cuda:0'), covar=tensor([0.1737, 0.4057, 0.3761, 0.2732, 0.0658, 0.0182, 0.0226, 0.0309], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0336, 0.0369, 0.0275, 0.0258, 0.0200, 0.0221, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 16:57:55,569 INFO [train.py:903] (0/4) Epoch 30, batch 4700, loss[loss=0.173, simple_loss=0.2494, pruned_loss=0.04829, over 19728.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2822, pruned_loss=0.05984, over 3824753.29 frames. ], batch size: 46, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:58:10,497 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:18,367 WARNING [train.py:1073] (0/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 16:58:41,330 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:59,207 INFO [train.py:903] (0/4) Epoch 30, batch 4750, loss[loss=0.1968, simple_loss=0.2906, pruned_loss=0.05149, over 19554.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05971, over 3819778.85 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:59:35,969 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.055e+02 5.959e+02 7.636e+02 1.705e+03, threshold=1.192e+03, percent-clipped=6.0 +2023-04-03 17:00:00,024 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.7352, 3.2442, 3.2806, 3.2752, 1.4513, 3.1120, 2.7521, 3.0720], + device='cuda:0'), covar=tensor([0.1933, 0.1319, 0.0889, 0.1038, 0.5655, 0.1287, 0.0895, 0.1396], + device='cuda:0'), in_proj_covar=tensor([0.0823, 0.0801, 0.1008, 0.0886, 0.0871, 0.0774, 0.0599, 0.0941], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-03 17:00:01,983 INFO [train.py:903] (0/4) Epoch 30, batch 4800, loss[loss=0.2132, simple_loss=0.2983, pruned_loss=0.06406, over 19525.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.05962, over 3818887.46 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:00:19,400 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2686, 1.4849, 1.9071, 1.7726, 3.0213, 4.5454, 4.4944, 5.0698], + device='cuda:0'), covar=tensor([0.1737, 0.3983, 0.3643, 0.2430, 0.0700, 0.0232, 0.0187, 0.0266], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0337, 0.0368, 0.0275, 0.0258, 0.0200, 0.0221, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-04-03 17:00:25,692 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 17:00:39,731 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.1369, 1.7718, 1.7349, 1.9574, 1.6957, 1.8112, 1.6212, 1.9836], + device='cuda:0'), covar=tensor([0.1066, 0.1443, 0.1595, 0.1244, 0.1437, 0.0588, 0.1591, 0.0760], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0361, 0.0323, 0.0262, 0.0311, 0.0261, 0.0326, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 17:00:47,813 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202848.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:01:03,241 INFO [train.py:903] (0/4) Epoch 30, batch 4850, loss[loss=0.1843, simple_loss=0.2679, pruned_loss=0.05032, over 19483.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.05966, over 3820077.19 frames. ], batch size: 64, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:01:27,942 WARNING [train.py:1073] (0/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 17:01:41,382 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.041e+02 6.050e+02 7.335e+02 2.031e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:01:47,272 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 17:01:53,244 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 17:01:55,294 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 17:02:03,499 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 17:02:05,916 INFO [train.py:903] (0/4) Epoch 30, batch 4900, loss[loss=0.246, simple_loss=0.3154, pruned_loss=0.08824, over 19562.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2831, pruned_loss=0.05981, over 3825334.65 frames. ], batch size: 61, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:02:25,017 WARNING [train.py:1073] (0/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 17:02:49,242 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 17:03:07,819 INFO [train.py:903] (0/4) Epoch 30, batch 4950, loss[loss=0.258, simple_loss=0.3231, pruned_loss=0.0965, over 19383.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2837, pruned_loss=0.06032, over 3830939.14 frames. ], batch size: 70, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:03:22,390 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 17:03:43,837 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.771e+02 6.104e+02 8.250e+02 2.222e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 17:03:46,184 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 17:04:09,795 INFO [train.py:903] (0/4) Epoch 30, batch 5000, loss[loss=0.1743, simple_loss=0.2466, pruned_loss=0.05101, over 19761.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2823, pruned_loss=0.05951, over 3834232.52 frames. ], batch size: 46, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:04:15,808 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.8541, 1.3102, 1.4899, 1.6529, 3.3805, 1.1911, 2.5494, 3.8353], + device='cuda:0'), covar=tensor([0.0594, 0.3245, 0.3178, 0.1975, 0.0877, 0.2815, 0.1405, 0.0288], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0385, 0.0404, 0.0358, 0.0389, 0.0363, 0.0403, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 17:04:16,558 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 17:04:28,735 WARNING [train.py:1073] (0/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 17:05:02,503 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:05:09,013 INFO [train.py:903] (0/4) Epoch 30, batch 5050, loss[loss=0.1719, simple_loss=0.2527, pruned_loss=0.04556, over 19748.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2815, pruned_loss=0.05886, over 3840231.09 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:05:45,269 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 17:05:46,420 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.511e+02 5.411e+02 6.959e+02 2.884e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 17:05:59,223 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:06:10,209 INFO [train.py:903] (0/4) Epoch 30, batch 5100, loss[loss=0.2406, simple_loss=0.3135, pruned_loss=0.0838, over 13285.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2818, pruned_loss=0.05873, over 3829954.11 frames. ], batch size: 136, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:06:23,809 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 17:06:27,361 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 17:06:30,785 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 17:07:12,617 INFO [train.py:903] (0/4) Epoch 30, batch 5150, loss[loss=0.1951, simple_loss=0.2833, pruned_loss=0.05346, over 19494.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2814, pruned_loss=0.05811, over 3838791.96 frames. ], batch size: 64, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:07:27,927 WARNING [train.py:1073] (0/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 17:07:49,458 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.115e+02 6.435e+02 8.085e+02 2.061e+03, threshold=1.287e+03, percent-clipped=7.0 +2023-04-03 17:07:49,619 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203192.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:08:01,706 WARNING [train.py:1073] (0/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 17:08:14,990 INFO [train.py:903] (0/4) Epoch 30, batch 5200, loss[loss=0.2052, simple_loss=0.2802, pruned_loss=0.06511, over 19767.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.282, pruned_loss=0.05828, over 3834663.79 frames. ], batch size: 47, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:08:30,115 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 17:09:14,165 WARNING [train.py:1073] (0/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 17:09:16,394 INFO [train.py:903] (0/4) Epoch 30, batch 5250, loss[loss=0.2124, simple_loss=0.3081, pruned_loss=0.05839, over 19662.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2821, pruned_loss=0.05849, over 3840136.14 frames. ], batch size: 60, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:09:53,736 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.978e+02 4.929e+02 6.276e+02 8.119e+02 2.486e+03, threshold=1.255e+03, percent-clipped=4.0 +2023-04-03 17:10:12,243 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203307.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:10:17,557 INFO [train.py:903] (0/4) Epoch 30, batch 5300, loss[loss=0.2016, simple_loss=0.2932, pruned_loss=0.05495, over 19721.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2833, pruned_loss=0.05951, over 3825440.03 frames. ], batch size: 51, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:10:36,836 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 17:10:45,012 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0273, 2.0991, 2.4380, 2.5938, 1.9942, 2.5420, 2.4372, 2.2245], + device='cuda:0'), covar=tensor([0.4435, 0.4140, 0.1928, 0.2634, 0.4341, 0.2384, 0.5048, 0.3475], + device='cuda:0'), in_proj_covar=tensor([0.0955, 0.1038, 0.0755, 0.0965, 0.0933, 0.0871, 0.0870, 0.0819], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 17:11:18,766 INFO [train.py:903] (0/4) Epoch 30, batch 5350, loss[loss=0.2075, simple_loss=0.2936, pruned_loss=0.0607, over 19616.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2835, pruned_loss=0.05956, over 3833045.16 frames. ], batch size: 57, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:11:53,885 WARNING [train.py:1073] (0/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 17:11:56,049 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 5.524e+02 6.450e+02 8.325e+02 1.910e+03, threshold=1.290e+03, percent-clipped=5.0 +2023-04-03 17:12:05,343 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:12:20,978 INFO [train.py:903] (0/4) Epoch 30, batch 5400, loss[loss=0.2405, simple_loss=0.3162, pruned_loss=0.08242, over 19534.00 frames. ], tot_loss[loss=0.202, simple_loss=0.284, pruned_loss=0.06002, over 3820058.75 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:02,933 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203447.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:20,920 INFO [train.py:903] (0/4) Epoch 30, batch 5450, loss[loss=0.2169, simple_loss=0.3017, pruned_loss=0.06604, over 19763.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2844, pruned_loss=0.06044, over 3798857.13 frames. ], batch size: 51, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:30,385 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4992, 1.5674, 1.8339, 1.7732, 2.7615, 2.3231, 2.9728, 1.3612], + device='cuda:0'), covar=tensor([0.2663, 0.4576, 0.2931, 0.2020, 0.1523, 0.2282, 0.1460, 0.4852], + device='cuda:0'), in_proj_covar=tensor([0.0563, 0.0683, 0.0770, 0.0519, 0.0640, 0.0554, 0.0674, 0.0584], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 17:13:50,566 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:59,461 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.910e+02 6.531e+02 8.117e+02 1.984e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-03 17:14:10,083 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.4689, 1.3851, 1.5914, 1.3924, 3.0961, 1.1108, 2.3585, 3.4991], + device='cuda:0'), covar=tensor([0.0531, 0.2852, 0.2918, 0.1964, 0.0700, 0.2562, 0.1279, 0.0256], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0385, 0.0404, 0.0358, 0.0389, 0.0363, 0.0403, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 17:14:23,409 INFO [train.py:903] (0/4) Epoch 30, batch 5500, loss[loss=0.188, simple_loss=0.278, pruned_loss=0.04903, over 19783.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2827, pruned_loss=0.05934, over 3795541.38 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:14:26,909 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:14:48,291 WARNING [train.py:1073] (0/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 17:15:24,340 INFO [train.py:903] (0/4) Epoch 30, batch 5550, loss[loss=0.2316, simple_loss=0.3091, pruned_loss=0.07705, over 18147.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2813, pruned_loss=0.05883, over 3810232.83 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:15:24,675 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203562.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:15:25,788 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203563.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:15:31,874 WARNING [train.py:1073] (0/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 17:15:56,752 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203588.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:16:01,701 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 4.952e+02 6.311e+02 7.819e+02 2.120e+03, threshold=1.262e+03, percent-clipped=2.0 +2023-04-03 17:16:09,340 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 17:16:22,138 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 17:16:27,768 INFO [train.py:903] (0/4) Epoch 30, batch 5600, loss[loss=0.2357, simple_loss=0.3055, pruned_loss=0.08299, over 13742.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05993, over 3796603.12 frames. ], batch size: 136, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:16:29,344 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.2481, 1.2157, 1.2551, 1.3790, 1.0943, 1.3396, 1.2751, 1.3140], + device='cuda:0'), covar=tensor([0.0918, 0.0931, 0.1030, 0.0624, 0.0830, 0.0852, 0.0819, 0.0782], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0222, 0.0227, 0.0238, 0.0226, 0.0215, 0.0187, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 17:17:29,533 INFO [train.py:903] (0/4) Epoch 30, batch 5650, loss[loss=0.1824, simple_loss=0.2614, pruned_loss=0.05174, over 19787.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05938, over 3803639.62 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:18:06,339 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.913e+02 5.869e+02 7.597e+02 1.607e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 17:18:18,654 WARNING [train.py:1073] (0/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 17:18:29,974 INFO [train.py:903] (0/4) Epoch 30, batch 5700, loss[loss=0.2251, simple_loss=0.2926, pruned_loss=0.07878, over 19462.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05929, over 3812060.69 frames. ], batch size: 49, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:31,946 INFO [train.py:903] (0/4) Epoch 30, batch 5750, loss[loss=0.2169, simple_loss=0.2971, pruned_loss=0.0683, over 19114.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05958, over 3818445.51 frames. ], batch size: 69, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:35,360 WARNING [train.py:1073] (0/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 17:19:35,648 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:44,335 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:45,202 WARNING [train.py:1073] (0/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 17:19:51,012 WARNING [train.py:1073] (0/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 17:20:00,638 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:09,170 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.021e+02 5.985e+02 7.897e+02 1.857e+03, threshold=1.197e+03, percent-clipped=7.0 +2023-04-03 17:20:15,313 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:35,334 INFO [train.py:903] (0/4) Epoch 30, batch 5800, loss[loss=0.2436, simple_loss=0.3228, pruned_loss=0.08223, over 19684.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2827, pruned_loss=0.05991, over 3814682.66 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:20:43,781 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:56,427 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:13,735 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:30,585 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.5652, 2.1941, 1.6325, 1.5798, 1.9913, 1.3053, 1.5160, 1.9094], + device='cuda:0'), covar=tensor([0.1087, 0.0851, 0.1104, 0.0871, 0.0607, 0.1362, 0.0727, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0323, 0.0346, 0.0279, 0.0256, 0.0350, 0.0292, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 17:21:37,906 INFO [train.py:903] (0/4) Epoch 30, batch 5850, loss[loss=0.2249, simple_loss=0.3085, pruned_loss=0.0707, over 19659.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.05998, over 3807837.05 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:21:41,678 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:22:15,806 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.919e+02 5.824e+02 7.977e+02 1.569e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 17:22:20,468 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0083, 1.9498, 1.6975, 2.0428, 1.8465, 1.7322, 1.6109, 1.9285], + device='cuda:0'), covar=tensor([0.1168, 0.1437, 0.1686, 0.1105, 0.1379, 0.0614, 0.1686, 0.0853], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0363, 0.0324, 0.0261, 0.0312, 0.0261, 0.0327, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-04-03 17:22:39,481 INFO [train.py:903] (0/4) Epoch 30, batch 5900, loss[loss=0.172, simple_loss=0.2598, pruned_loss=0.04207, over 19668.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2828, pruned_loss=0.05924, over 3821149.58 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:22:42,676 WARNING [train.py:1073] (0/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 17:22:45,444 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=2.54 vs. limit=5.0 +2023-04-03 17:22:47,235 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203918.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:23:06,097 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 17:23:08,132 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 17:23:19,584 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:23:21,446 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 17:23:40,675 INFO [train.py:903] (0/4) Epoch 30, batch 5950, loss[loss=0.2017, simple_loss=0.2893, pruned_loss=0.05708, over 19589.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2834, pruned_loss=0.05963, over 3829804.17 frames. ], batch size: 61, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:18,148 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.092e+02 6.183e+02 7.281e+02 1.501e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-03 17:24:28,252 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/checkpoint-204000.pt +2023-04-03 17:24:45,143 INFO [train.py:903] (0/4) Epoch 30, batch 6000, loss[loss=0.2127, simple_loss=0.2975, pruned_loss=0.06394, over 17351.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2827, pruned_loss=0.05911, over 3812607.32 frames. ], batch size: 101, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:45,144 INFO [train.py:928] (0/4) Computing validation loss +2023-04-03 17:24:54,740 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4881, 1.5356, 1.8355, 1.8573, 1.3966, 1.7772, 1.7675, 1.6420], + device='cuda:0'), covar=tensor([0.3936, 0.4292, 0.1894, 0.2544, 0.4383, 0.2385, 0.4573, 0.3370], + device='cuda:0'), in_proj_covar=tensor([0.0955, 0.1039, 0.0755, 0.0968, 0.0934, 0.0872, 0.0868, 0.0820], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 17:24:58,730 INFO [train.py:937] (0/4) Epoch 30, validation: loss=0.167, simple_loss=0.2658, pruned_loss=0.03407, over 944034.00 frames. +2023-04-03 17:24:58,732 INFO [train.py:938] (0/4) Maximum memory allocated so far is 18803MB +2023-04-03 17:25:11,720 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.0577, 3.3204, 2.0615, 1.6066, 3.1201, 1.5407, 1.6433, 2.3812], + device='cuda:0'), covar=tensor([0.1117, 0.0503, 0.0841, 0.0879, 0.0416, 0.1133, 0.0782, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0324, 0.0347, 0.0279, 0.0257, 0.0351, 0.0294, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-03 17:25:21,218 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-03 17:26:02,074 INFO [train.py:903] (0/4) Epoch 30, batch 6050, loss[loss=0.1883, simple_loss=0.2821, pruned_loss=0.04724, over 19666.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.282, pruned_loss=0.05853, over 3820175.19 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:26:17,195 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-03 17:26:38,220 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 5.054e+02 6.352e+02 7.854e+02 1.582e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-03 17:26:42,111 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:26:57,988 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 17:27:00,739 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:27:04,103 INFO [train.py:903] (0/4) Epoch 30, batch 6100, loss[loss=0.2409, simple_loss=0.3132, pruned_loss=0.08434, over 19618.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2831, pruned_loss=0.05946, over 3818509.64 frames. ], batch size: 57, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:27:22,493 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:04,388 INFO [train.py:903] (0/4) Epoch 30, batch 6150, loss[loss=0.1716, simple_loss=0.2555, pruned_loss=0.04389, over 19118.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2841, pruned_loss=0.06024, over 3812607.14 frames. ], batch size: 42, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:28:29,853 INFO [zipformer.py:1188] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:34,972 WARNING [train.py:1073] (0/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 17:28:42,858 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.061e+02 6.485e+02 7.945e+02 1.594e+03, threshold=1.297e+03, percent-clipped=2.0 +2023-04-03 17:28:52,784 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204200.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:03,790 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:07,246 INFO [train.py:903] (0/4) Epoch 30, batch 6200, loss[loss=0.1588, simple_loss=0.2396, pruned_loss=0.03904, over 19772.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2833, pruned_loss=0.05958, over 3820228.95 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:29:24,464 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:25,548 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:45,904 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:30:09,328 INFO [train.py:903] (0/4) Epoch 30, batch 6250, loss[loss=0.2136, simple_loss=0.2923, pruned_loss=0.06745, over 19121.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2832, pruned_loss=0.05966, over 3817723.36 frames. ], batch size: 69, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:30:10,639 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204262.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:30:38,588 WARNING [train.py:1073] (0/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 17:30:46,054 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.004e+02 5.938e+02 7.424e+02 1.100e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-03 17:31:10,305 INFO [train.py:903] (0/4) Epoch 30, batch 6300, loss[loss=0.2081, simple_loss=0.2816, pruned_loss=0.06728, over 19859.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2826, pruned_loss=0.05909, over 3820009.07 frames. ], batch size: 52, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:31:12,722 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.4860, 1.4471, 1.4824, 1.8416, 1.3115, 1.6186, 1.5815, 1.5804], + device='cuda:0'), covar=tensor([0.0928, 0.0965, 0.1047, 0.0647, 0.0860, 0.0866, 0.0897, 0.0763], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0221, 0.0228, 0.0238, 0.0226, 0.0215, 0.0187, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:0') +2023-04-03 17:31:24,171 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:32:11,313 INFO [train.py:903] (0/4) Epoch 30, batch 6350, loss[loss=0.1996, simple_loss=0.2869, pruned_loss=0.05618, over 19707.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2824, pruned_loss=0.05922, over 3813078.36 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:32:30,530 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204377.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:32:50,298 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.728e+02 6.049e+02 7.442e+02 1.533e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:33:12,659 INFO [train.py:903] (0/4) Epoch 30, batch 6400, loss[loss=0.1842, simple_loss=0.2684, pruned_loss=0.05001, over 19748.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2832, pruned_loss=0.05955, over 3812173.32 frames. ], batch size: 51, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:33:45,675 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:34:13,521 INFO [train.py:903] (0/4) Epoch 30, batch 6450, loss[loss=0.2117, simple_loss=0.2979, pruned_loss=0.06271, over 19665.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.283, pruned_loss=0.05961, over 3812500.88 frames. ], batch size: 55, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:34:35,926 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:34:51,302 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 4.981e+02 6.276e+02 8.010e+02 2.155e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 17:34:54,951 WARNING [train.py:1073] (0/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 17:34:59,449 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:08,012 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:11,893 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-03 17:35:16,836 INFO [train.py:903] (0/4) Epoch 30, batch 6500, loss[loss=0.2101, simple_loss=0.2991, pruned_loss=0.0605, over 19698.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2829, pruned_loss=0.05974, over 3820588.58 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:35:19,123 WARNING [train.py:1073] (0/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 17:35:30,896 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204524.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:31,791 INFO [zipformer.py:1188] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:09,128 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:17,963 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 17:36:18,056 INFO [train.py:903] (0/4) Epoch 30, batch 6550, loss[loss=0.231, simple_loss=0.3193, pruned_loss=0.0713, over 19552.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05971, over 3821735.51 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:36:21,768 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([2.3102, 2.1354, 1.9696, 1.8493, 1.6086, 1.8156, 0.9201, 1.2540], + device='cuda:0'), covar=tensor([0.0704, 0.0767, 0.0613, 0.1065, 0.1428, 0.1170, 0.1402, 0.1257], + device='cuda:0'), in_proj_covar=tensor([0.0368, 0.0367, 0.0371, 0.0396, 0.0475, 0.0400, 0.0350, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 17:36:39,390 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([3.1933, 2.8940, 2.2905, 2.2515, 2.0730, 2.6209, 1.1226, 2.0783], + device='cuda:0'), covar=tensor([0.0692, 0.0648, 0.0747, 0.1248, 0.1155, 0.1111, 0.1417, 0.1160], + device='cuda:0'), in_proj_covar=tensor([0.0367, 0.0367, 0.0371, 0.0395, 0.0474, 0.0400, 0.0349, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-03 17:36:40,570 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:56,735 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 17:36:56,887 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.288e+02 6.916e+02 9.470e+02 2.608e+03, threshold=1.383e+03, percent-clipped=11.0 +2023-04-03 17:37:11,416 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:37:20,114 INFO [train.py:903] (0/4) Epoch 30, batch 6600, loss[loss=0.182, simple_loss=0.2644, pruned_loss=0.04975, over 19603.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2828, pruned_loss=0.0598, over 3825812.54 frames. ], batch size: 52, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:37:46,903 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204633.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:37:54,578 INFO [zipformer.py:1188] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204640.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:38:17,108 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204658.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:38:21,421 INFO [train.py:903] (0/4) Epoch 30, batch 6650, loss[loss=0.2293, simple_loss=0.3094, pruned_loss=0.07456, over 19722.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.06, over 3819669.79 frames. ], batch size: 63, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:38:33,351 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 17:38:59,519 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 4.923e+02 6.497e+02 8.594e+02 3.232e+03, threshold=1.299e+03, percent-clipped=5.0 +2023-04-03 17:39:24,423 INFO [train.py:903] (0/4) Epoch 30, batch 6700, loss[loss=0.2118, simple_loss=0.2912, pruned_loss=0.06615, over 19541.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2823, pruned_loss=0.05976, over 3818842.80 frames. ], batch size: 54, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:40:21,767 INFO [train.py:903] (0/4) Epoch 30, batch 6750, loss[loss=0.2123, simple_loss=0.2786, pruned_loss=0.07298, over 19297.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2824, pruned_loss=0.06005, over 3821451.34 frames. ], batch size: 44, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:40:31,887 INFO [zipformer.py:2441] (0/4) attn_weights_entropy = tensor([1.6385, 1.7560, 1.9958, 1.8657, 2.6923, 2.3478, 2.8272, 1.5101], + device='cuda:0'), covar=tensor([0.2433, 0.4124, 0.2626, 0.1948, 0.1604, 0.2275, 0.1509, 0.4473], + device='cuda:0'), in_proj_covar=tensor([0.0563, 0.0685, 0.0771, 0.0518, 0.0640, 0.0554, 0.0674, 0.0584], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-04-03 17:40:34,745 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 17:40:58,541 INFO [optim.py:369] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.849e+02 6.262e+02 9.905e+02 2.863e+03, threshold=1.252e+03, percent-clipped=11.0 +2023-04-03 17:41:17,955 INFO [zipformer.py:1188] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:19,750 INFO [train.py:903] (0/4) Epoch 30, batch 6800, loss[loss=0.2134, simple_loss=0.2954, pruned_loss=0.06564, over 17354.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2825, pruned_loss=0.05976, over 3826462.74 frames. ], batch size: 101, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:41:45,179 INFO [zipformer.py:1188] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:50,448 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_streaming/exp/v2/epoch-30.pt +2023-04-03 17:41:51,270 INFO [train.py:1171] (0/4) Done! diff --git a/log/log-train-2023-03-31-18-51-54-1 b/log/log-train-2023-03-31-18-51-54-1 new file mode 100644 index 0000000000000000000000000000000000000000..5dc76ac05c745b417cf1256aafcb4b754307085b --- /dev/null +++ b/log/log-train-2023-03-31-18-51-54-1 @@ -0,0 +1,25623 @@ +2023-03-31 18:51:54,786 INFO [train.py:975] (1/4) Training started +2023-03-31 18:51:54,787 INFO [train.py:985] (1/4) Device: cuda:1 +2023-03-31 18:51:54,826 INFO [train.py:994] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r2n03', 'IP address': '10.1.2.3'}, 'world_size': 4, 'master_port': 54321, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 10, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-31 18:51:54,826 INFO [train.py:996] (1/4) About to create model +2023-03-31 18:51:55,680 INFO [zipformer.py:405] (1/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-31 18:51:55,693 INFO [train.py:1000] (1/4) Number of model parameters: 20697573 +2023-03-31 18:52:03,003 INFO [train.py:1019] (1/4) Using DDP +2023-03-31 18:52:03,650 INFO [asr_datamodule.py:429] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts, combined with their reverberated versions +2023-03-31 18:52:03,691 INFO [asr_datamodule.py:224] (1/4) Enable MUSAN +2023-03-31 18:52:03,691 INFO [asr_datamodule.py:225] (1/4) About to get Musan cuts +2023-03-31 18:52:05,879 INFO [asr_datamodule.py:249] (1/4) Enable SpecAugment +2023-03-31 18:52:05,879 INFO [asr_datamodule.py:250] (1/4) Time warp factor: 80 +2023-03-31 18:52:05,879 INFO [asr_datamodule.py:260] (1/4) Num frame mask: 10 +2023-03-31 18:52:05,879 INFO [asr_datamodule.py:273] (1/4) About to create train dataset +2023-03-31 18:52:05,879 INFO [asr_datamodule.py:300] (1/4) Using DynamicBucketingSampler. +2023-03-31 18:52:08,162 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:08,613 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:08,889 INFO [asr_datamodule.py:315] (1/4) About to create train dataloader +2023-03-31 18:52:08,890 INFO [asr_datamodule.py:440] (1/4) About to get dev-clean cuts +2023-03-31 18:52:08,925 INFO [asr_datamodule.py:447] (1/4) About to get dev-other cuts +2023-03-31 18:52:08,952 INFO [asr_datamodule.py:346] (1/4) About to create dev dataset +2023-03-31 18:52:09,395 INFO [asr_datamodule.py:363] (1/4) About to create dev dataloader +2023-03-31 18:52:24,008 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:24,461 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:36,261 INFO [train.py:903] (1/4) Epoch 1, batch 0, loss[loss=7.307, simple_loss=6.612, pruned_loss=6.939, over 19592.00 frames. ], tot_loss[loss=7.307, simple_loss=6.612, pruned_loss=6.939, over 19592.00 frames. ], batch size: 57, lr: 2.50e-02, grad_scale: 2.0 +2023-03-31 18:52:36,262 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 18:52:49,136 INFO [train.py:937] (1/4) Epoch 1, validation: loss=6.888, simple_loss=6.229, pruned_loss=6.575, over 944034.00 frames. +2023-03-31 18:52:49,137 INFO [train.py:938] (1/4) Maximum memory allocated so far is 10937MB +2023-03-31 18:53:02,994 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 18:53:58,984 INFO [train.py:903] (1/4) Epoch 1, batch 50, loss[loss=1.39, simple_loss=1.231, pruned_loss=1.422, over 18824.00 frames. ], tot_loss[loss=2.167, simple_loss=1.957, pruned_loss=2.01, over 863774.73 frames. ], batch size: 74, lr: 2.75e-02, grad_scale: 0.125 +2023-03-31 18:53:59,782 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=108.63 vs. limit=5.0 +2023-03-31 18:54:00,563 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:54:36,583 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:54:41,871 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 18:55:04,257 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=7.36 vs. limit=2.0 +2023-03-31 18:55:11,344 INFO [train.py:903] (1/4) Epoch 1, batch 100, loss[loss=1.151, simple_loss=0.9889, pruned_loss=1.285, over 19535.00 frames. ], tot_loss[loss=1.626, simple_loss=1.447, pruned_loss=1.612, over 1527276.03 frames. ], batch size: 56, lr: 3.00e-02, grad_scale: 0.25 +2023-03-31 18:55:11,608 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 18:55:17,831 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.479e+01 1.678e+02 3.237e+02 1.260e+03 8.630e+04, threshold=6.475e+02, percent-clipped=0.0 +2023-03-31 18:55:25,911 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 18:56:18,847 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.88 vs. limit=2.0 +2023-03-31 18:56:20,070 INFO [train.py:903] (1/4) Epoch 1, batch 150, loss[loss=0.9524, simple_loss=0.8089, pruned_loss=1.037, over 19758.00 frames. ], tot_loss[loss=1.377, simple_loss=1.21, pruned_loss=1.412, over 2042012.87 frames. ], batch size: 51, lr: 3.25e-02, grad_scale: 0.25 +2023-03-31 18:57:32,423 INFO [train.py:903] (1/4) Epoch 1, batch 200, loss[loss=1.027, simple_loss=0.8696, pruned_loss=1.054, over 19791.00 frames. ], tot_loss[loss=1.235, simple_loss=1.075, pruned_loss=1.274, over 2448634.22 frames. ], batch size: 56, lr: 3.50e-02, grad_scale: 0.5 +2023-03-31 18:57:32,458 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 18:57:39,435 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.067e+01 1.186e+02 1.653e+02 2.090e+02 5.158e+02, threshold=3.307e+02, percent-clipped=0.0 +2023-03-31 18:58:43,201 INFO [train.py:903] (1/4) Epoch 1, batch 250, loss[loss=0.9682, simple_loss=0.8141, pruned_loss=0.9637, over 19578.00 frames. ], tot_loss[loss=1.155, simple_loss=0.9981, pruned_loss=1.182, over 2744782.19 frames. ], batch size: 52, lr: 3.75e-02, grad_scale: 0.5 +2023-03-31 18:59:06,197 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=5.80 vs. limit=5.0 +2023-03-31 18:59:48,220 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=5.19 vs. limit=5.0 +2023-03-31 18:59:51,885 INFO [train.py:903] (1/4) Epoch 1, batch 300, loss[loss=0.9232, simple_loss=0.7764, pruned_loss=0.8717, over 18180.00 frames. ], tot_loss[loss=1.096, simple_loss=0.9406, pruned_loss=1.108, over 2987007.86 frames. ], batch size: 83, lr: 4.00e-02, grad_scale: 1.0 +2023-03-31 18:59:56,684 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.717e+01 1.166e+02 1.521e+02 1.991e+02 3.277e+02, threshold=3.043e+02, percent-clipped=0.0 +2023-03-31 18:59:58,411 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=306.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:00:09,460 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=314.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:00:58,524 INFO [train.py:903] (1/4) Epoch 1, batch 350, loss[loss=0.93, simple_loss=0.7704, pruned_loss=0.8809, over 19654.00 frames. ], tot_loss[loss=1.055, simple_loss=0.8985, pruned_loss=1.052, over 3177969.89 frames. ], batch size: 58, lr: 4.25e-02, grad_scale: 1.0 +2023-03-31 19:01:05,464 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 19:02:08,280 INFO [train.py:903] (1/4) Epoch 1, batch 400, loss[loss=0.8668, simple_loss=0.7112, pruned_loss=0.809, over 19087.00 frames. ], tot_loss[loss=1.025, simple_loss=0.867, pruned_loss=1.006, over 3316833.49 frames. ], batch size: 42, lr: 4.50e-02, grad_scale: 2.0 +2023-03-31 19:02:13,389 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.278e+02 1.546e+02 1.978e+02 5.474e+02, threshold=3.091e+02, percent-clipped=7.0 +2023-03-31 19:02:13,641 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=405.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:02:33,548 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=421.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:03:05,053 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=445.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:03:07,118 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=6.64 vs. limit=5.0 +2023-03-31 19:03:12,892 INFO [train.py:903] (1/4) Epoch 1, batch 450, loss[loss=0.9279, simple_loss=0.7633, pruned_loss=0.8273, over 19842.00 frames. ], tot_loss[loss=1, simple_loss=0.8411, pruned_loss=0.9638, over 3436282.94 frames. ], batch size: 52, lr: 4.75e-02, grad_scale: 2.0 +2023-03-31 19:03:17,852 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7085, 4.3057, 3.7241, 3.4803, 4.0774, 3.6254, 3.8994, 4.3769], + device='cuda:1'), covar=tensor([0.0201, 0.0141, 0.0239, 0.0172, 0.0110, 0.0894, 0.0162, 0.0117], + device='cuda:1'), in_proj_covar=tensor([0.0014, 0.0015, 0.0015, 0.0015, 0.0015, 0.0015, 0.0015, 0.0014], + device='cuda:1'), out_proj_covar=tensor([9.5444e-06, 9.7004e-06, 9.7803e-06, 9.4442e-06, 9.9968e-06, 9.4626e-06, + 9.9635e-06, 9.9348e-06], device='cuda:1') +2023-03-31 19:03:49,664 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 19:03:51,777 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 19:04:16,584 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=6.17 vs. limit=5.0 +2023-03-31 19:04:19,393 INFO [train.py:903] (1/4) Epoch 1, batch 500, loss[loss=0.8732, simple_loss=0.7238, pruned_loss=0.7377, over 19624.00 frames. ], tot_loss[loss=0.9809, simple_loss=0.8218, pruned_loss=0.9232, over 3520212.27 frames. ], batch size: 50, lr: 4.99e-02, grad_scale: 2.0 +2023-03-31 19:04:25,204 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.386e+02 1.860e+02 2.529e+02 4.736e+02, threshold=3.719e+02, percent-clipped=12.0 +2023-03-31 19:05:27,842 INFO [train.py:903] (1/4) Epoch 1, batch 550, loss[loss=0.9376, simple_loss=0.7807, pruned_loss=0.7601, over 18718.00 frames. ], tot_loss[loss=0.9629, simple_loss=0.8051, pruned_loss=0.8826, over 3565533.85 frames. ], batch size: 74, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:05:28,393 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.64 vs. limit=2.0 +2023-03-31 19:05:40,551 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=560.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:03,868 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=580.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:06:12,419 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=586.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:32,555 INFO [train.py:903] (1/4) Epoch 1, batch 600, loss[loss=0.8076, simple_loss=0.6809, pruned_loss=0.6181, over 19853.00 frames. ], tot_loss[loss=0.9411, simple_loss=0.787, pruned_loss=0.8366, over 3628105.98 frames. ], batch size: 52, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:06:36,892 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 2.910e+02 4.086e+02 6.136e+02 1.097e+03, threshold=8.173e+02, percent-clipped=60.0 +2023-03-31 19:06:40,831 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=608.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:07:11,500 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 19:07:38,082 INFO [train.py:903] (1/4) Epoch 1, batch 650, loss[loss=0.8372, simple_loss=0.7112, pruned_loss=0.6153, over 19542.00 frames. ], tot_loss[loss=0.9129, simple_loss=0.7648, pruned_loss=0.7862, over 3675516.72 frames. ], batch size: 56, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:07:47,306 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=658.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:08:13,404 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=677.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:41,834 INFO [train.py:903] (1/4) Epoch 1, batch 700, loss[loss=0.7309, simple_loss=0.6267, pruned_loss=0.5152, over 19480.00 frames. ], tot_loss[loss=0.8871, simple_loss=0.7452, pruned_loss=0.7398, over 3709486.30 frames. ], batch size: 49, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:08:43,407 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=702.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:46,605 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.173e+02 6.580e+02 8.914e+02 3.039e+03, threshold=1.316e+03, percent-clipped=29.0 +2023-03-31 19:08:59,142 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2137, 1.4595, 2.4414, 1.7821, 2.5404, 2.2759, 2.4021, 2.3107], + device='cuda:1'), covar=tensor([0.2391, 0.5703, 0.2943, 0.4250, 0.1630, 0.2413, 0.3088, 0.2743], + device='cuda:1'), in_proj_covar=tensor([0.0036, 0.0044, 0.0037, 0.0041, 0.0037, 0.0039, 0.0043, 0.0037], + device='cuda:1'), out_proj_covar=tensor([2.2808e-05, 3.1594e-05, 2.2624e-05, 2.8300e-05, 2.3633e-05, 2.2850e-05, + 2.7749e-05, 2.4723e-05], device='cuda:1') +2023-03-31 19:09:43,577 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=749.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:09:45,318 INFO [train.py:903] (1/4) Epoch 1, batch 750, loss[loss=0.8168, simple_loss=0.6967, pruned_loss=0.5727, over 18162.00 frames. ], tot_loss[loss=0.8608, simple_loss=0.7257, pruned_loss=0.6953, over 3724401.58 frames. ], batch size: 83, lr: 4.97e-02, grad_scale: 2.0 +2023-03-31 19:10:14,484 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=773.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:10:48,972 INFO [train.py:903] (1/4) Epoch 1, batch 800, loss[loss=0.7225, simple_loss=0.6226, pruned_loss=0.4879, over 19592.00 frames. ], tot_loss[loss=0.8352, simple_loss=0.7067, pruned_loss=0.6539, over 3745627.59 frames. ], batch size: 61, lr: 4.97e-02, grad_scale: 4.0 +2023-03-31 19:10:53,094 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.827e+02 7.991e+02 1.030e+03 2.888e+03, threshold=1.598e+03, percent-clipped=14.0 +2023-03-31 19:11:01,626 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 19:11:03,310 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.78 vs. limit=5.0 +2023-03-31 19:11:08,447 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=816.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:11:40,358 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=841.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:11:52,164 INFO [train.py:903] (1/4) Epoch 1, batch 850, loss[loss=0.7578, simple_loss=0.6557, pruned_loss=0.5003, over 19616.00 frames. ], tot_loss[loss=0.8125, simple_loss=0.6901, pruned_loss=0.6178, over 3748691.42 frames. ], batch size: 61, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:06,984 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9724, 0.7910, 1.0493, 1.1723, 1.8002, 1.8163, 1.2395, 1.4915], + device='cuda:1'), covar=tensor([0.9893, 2.0870, 2.0057, 1.5953, 0.6676, 1.9937, 1.8155, 1.2857], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0115, 0.0118, 0.0100, 0.0072, 0.0129, 0.0101, 0.0096], + device='cuda:1'), out_proj_covar=tensor([5.8551e-05, 7.7465e-05, 7.7369e-05, 5.9372e-05, 4.2425e-05, 8.5179e-05, + 6.3534e-05, 5.8211e-05], device='cuda:1') +2023-03-31 19:12:10,465 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=864.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:12:12,314 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=865.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:12:43,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 19:12:46,975 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0647, 1.4073, 1.1270, 2.0397, 1.1530, 2.0910, 2.0768, 1.4011], + device='cuda:1'), covar=tensor([0.4878, 1.1753, 1.3744, 0.4644, 1.7246, 0.4836, 0.5788, 0.9036], + device='cuda:1'), in_proj_covar=tensor([0.0036, 0.0052, 0.0063, 0.0038, 0.0068, 0.0041, 0.0042, 0.0044], + device='cuda:1'), out_proj_covar=tensor([2.2538e-05, 3.6620e-05, 4.5423e-05, 2.4333e-05, 5.0861e-05, 2.4064e-05, + 2.5554e-05, 3.0093e-05], device='cuda:1') +2023-03-31 19:12:54,612 INFO [train.py:903] (1/4) Epoch 1, batch 900, loss[loss=0.6634, simple_loss=0.5795, pruned_loss=0.4242, over 19853.00 frames. ], tot_loss[loss=0.7905, simple_loss=0.6738, pruned_loss=0.5845, over 3760553.25 frames. ], batch size: 52, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:59,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 6.072e+02 7.456e+02 9.579e+02 1.181e+04, threshold=1.491e+03, percent-clipped=3.0 +2023-03-31 19:13:21,570 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=924.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:13:30,380 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=930.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:13:53,705 INFO [train.py:903] (1/4) Epoch 1, batch 950, loss[loss=0.6765, simple_loss=0.5929, pruned_loss=0.4253, over 19543.00 frames. ], tot_loss[loss=0.7675, simple_loss=0.6569, pruned_loss=0.5522, over 3786940.23 frames. ], batch size: 54, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:13:53,727 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 19:13:56,045 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=952.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:14:06,053 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.92 vs. limit=2.0 +2023-03-31 19:14:21,085 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.92 vs. limit=5.0 +2023-03-31 19:14:51,782 INFO [train.py:903] (1/4) Epoch 1, batch 1000, loss[loss=0.6989, simple_loss=0.6128, pruned_loss=0.4351, over 18209.00 frames. ], tot_loss[loss=0.7478, simple_loss=0.6424, pruned_loss=0.5248, over 3802889.71 frames. ], batch size: 83, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:14:56,983 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 5.980e+02 7.509e+02 1.052e+03 2.029e+03, threshold=1.502e+03, percent-clipped=4.0 +2023-03-31 19:15:25,851 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1029.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:15:38,851 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1039.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:41,719 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 19:15:45,035 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1045.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:15:52,670 INFO [train.py:903] (1/4) Epoch 1, batch 1050, loss[loss=0.6844, simple_loss=0.6015, pruned_loss=0.4206, over 19537.00 frames. ], tot_loss[loss=0.729, simple_loss=0.629, pruned_loss=0.4991, over 3813165.64 frames. ], batch size: 54, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:15:56,724 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1054.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:12,550 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1067.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:20,717 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 19:16:53,095 INFO [train.py:903] (1/4) Epoch 1, batch 1100, loss[loss=0.7346, simple_loss=0.6295, pruned_loss=0.4675, over 19486.00 frames. ], tot_loss[loss=0.713, simple_loss=0.6173, pruned_loss=0.4779, over 3810250.38 frames. ], batch size: 64, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:16:57,404 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.036e+02 7.117e+02 8.563e+02 1.068e+03 2.368e+03, threshold=1.713e+03, percent-clipped=4.0 +2023-03-31 19:17:14,947 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1120.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:17:44,294 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1145.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:17:51,774 INFO [train.py:903] (1/4) Epoch 1, batch 1150, loss[loss=0.5991, simple_loss=0.5247, pruned_loss=0.3647, over 19726.00 frames. ], tot_loss[loss=0.6964, simple_loss=0.6054, pruned_loss=0.4572, over 3803545.10 frames. ], batch size: 47, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:18:12,988 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1171.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:18:47,615 INFO [train.py:903] (1/4) Epoch 1, batch 1200, loss[loss=0.6781, simple_loss=0.5987, pruned_loss=0.4046, over 19330.00 frames. ], tot_loss[loss=0.6872, simple_loss=0.5986, pruned_loss=0.4436, over 3802456.67 frames. ], batch size: 70, lr: 4.93e-02, grad_scale: 8.0 +2023-03-31 19:18:52,224 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 7.433e+02 9.314e+02 1.239e+03 3.000e+03, threshold=1.863e+03, percent-clipped=16.0 +2023-03-31 19:18:56,103 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1209.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:19:16,424 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 19:19:42,376 INFO [train.py:903] (1/4) Epoch 1, batch 1250, loss[loss=0.6207, simple_loss=0.5452, pruned_loss=0.371, over 19583.00 frames. ], tot_loss[loss=0.674, simple_loss=0.5899, pruned_loss=0.4268, over 3813708.36 frames. ], batch size: 52, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:32,299 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1295.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:20:39,095 INFO [train.py:903] (1/4) Epoch 1, batch 1300, loss[loss=0.5945, simple_loss=0.5288, pruned_loss=0.3467, over 19425.00 frames. ], tot_loss[loss=0.6613, simple_loss=0.5811, pruned_loss=0.4118, over 3822283.43 frames. ], batch size: 48, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:39,513 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1301.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:20:41,405 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4125, 0.8917, 0.9701, 0.9627, 1.1289, 1.4159, 1.2797, 1.2666], + device='cuda:1'), covar=tensor([0.5466, 1.0360, 1.0409, 0.7195, 0.7256, 0.9843, 0.9189, 0.6223], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0159, 0.0164, 0.0129, 0.0147, 0.0168, 0.0148, 0.0129], + device='cuda:1'), out_proj_covar=tensor([8.3574e-05, 1.0621e-04, 1.1064e-04, 8.3685e-05, 9.4480e-05, 1.1128e-04, + 9.9269e-05, 8.3402e-05], device='cuda:1') +2023-03-31 19:20:43,725 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+02 7.699e+02 1.048e+03 1.379e+03 4.741e+03, threshold=2.097e+03, percent-clipped=13.0 +2023-03-31 19:21:00,020 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1320.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:03,702 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1323.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:04,828 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1324.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:06,822 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1326.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:33,464 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1348.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:35,925 INFO [train.py:903] (1/4) Epoch 1, batch 1350, loss[loss=0.5545, simple_loss=0.4991, pruned_loss=0.3164, over 19755.00 frames. ], tot_loss[loss=0.6498, simple_loss=0.573, pruned_loss=0.3986, over 3827585.89 frames. ], batch size: 46, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:31,171 INFO [train.py:903] (1/4) Epoch 1, batch 1400, loss[loss=0.6623, simple_loss=0.5891, pruned_loss=0.3826, over 17334.00 frames. ], tot_loss[loss=0.6395, simple_loss=0.5663, pruned_loss=0.3863, over 3821449.43 frames. ], batch size: 101, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:35,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.777e+02 7.620e+02 9.515e+02 1.230e+03 4.278e+03, threshold=1.903e+03, percent-clipped=3.0 +2023-03-31 19:23:04,500 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0148, 1.6877, 1.5409, 2.2630, 2.8006, 2.1618, 2.6960, 2.4339], + device='cuda:1'), covar=tensor([0.0774, 0.3930, 0.6025, 0.2283, 0.1106, 0.5041, 0.1202, 0.1613], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0109, 0.0143, 0.0092, 0.0108, 0.0166, 0.0102, 0.0087], + device='cuda:1'), out_proj_covar=tensor([4.7813e-05, 7.5708e-05, 9.8948e-05, 6.4597e-05, 6.5640e-05, 1.0900e-04, + 6.4754e-05, 5.9698e-05], device='cuda:1') +2023-03-31 19:23:24,304 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 19:23:25,222 INFO [train.py:903] (1/4) Epoch 1, batch 1450, loss[loss=0.6741, simple_loss=0.597, pruned_loss=0.39, over 19332.00 frames. ], tot_loss[loss=0.6331, simple_loss=0.5622, pruned_loss=0.3779, over 3810254.48 frames. ], batch size: 70, lr: 4.90e-02, grad_scale: 8.0 +2023-03-31 19:23:26,461 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1452.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:24:18,031 INFO [train.py:903] (1/4) Epoch 1, batch 1500, loss[loss=0.5461, simple_loss=0.4957, pruned_loss=0.3048, over 19420.00 frames. ], tot_loss[loss=0.6252, simple_loss=0.5571, pruned_loss=0.3687, over 3810163.72 frames. ], batch size: 48, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:24:23,064 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+02 9.104e+02 1.060e+03 1.370e+03 5.981e+03, threshold=2.119e+03, percent-clipped=12.0 +2023-03-31 19:24:35,376 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1515.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:24:39,510 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4290, 1.5968, 1.5091, 2.3667, 3.0200, 1.6879, 2.8665, 2.8632], + device='cuda:1'), covar=tensor([0.0760, 0.3989, 0.5445, 0.2031, 0.1069, 0.6120, 0.1106, 0.1119], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0121, 0.0152, 0.0102, 0.0117, 0.0185, 0.0112, 0.0094], + device='cuda:1'), out_proj_covar=tensor([4.9373e-05, 8.3634e-05, 1.0634e-04, 7.2056e-05, 7.0243e-05, 1.2044e-04, + 7.2726e-05, 6.3695e-05], device='cuda:1') +2023-03-31 19:25:14,433 INFO [train.py:903] (1/4) Epoch 1, batch 1550, loss[loss=0.6676, simple_loss=0.5955, pruned_loss=0.3797, over 19739.00 frames. ], tot_loss[loss=0.6167, simple_loss=0.5518, pruned_loss=0.3595, over 3801034.09 frames. ], batch size: 63, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:25:28,743 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-31 19:25:43,582 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1580.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:25:47,370 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1584.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:05,888 INFO [train.py:903] (1/4) Epoch 1, batch 1600, loss[loss=0.5073, simple_loss=0.4801, pruned_loss=0.2674, over 19730.00 frames. ], tot_loss[loss=0.607, simple_loss=0.5455, pruned_loss=0.3498, over 3811866.58 frames. ], batch size: 51, lr: 4.88e-02, grad_scale: 8.0 +2023-03-31 19:26:10,815 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.811e+02 9.198e+02 1.152e+03 1.497e+03 2.578e+03, threshold=2.303e+03, percent-clipped=3.0 +2023-03-31 19:26:11,244 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1605.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:25,445 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 19:26:35,448 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1629.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:36,323 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1630.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:57,905 INFO [train.py:903] (1/4) Epoch 1, batch 1650, loss[loss=0.4481, simple_loss=0.4279, pruned_loss=0.2335, over 19729.00 frames. ], tot_loss[loss=0.6003, simple_loss=0.5413, pruned_loss=0.3427, over 3810853.67 frames. ], batch size: 45, lr: 4.87e-02, grad_scale: 8.0 +2023-03-31 19:27:31,042 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4424, 1.4281, 1.4141, 2.7005, 3.1729, 1.9710, 2.8652, 2.9171], + device='cuda:1'), covar=tensor([0.0396, 0.2893, 0.5012, 0.1298, 0.0469, 0.3330, 0.0607, 0.0710], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0130, 0.0164, 0.0115, 0.0120, 0.0201, 0.0116, 0.0100], + device='cuda:1'), out_proj_covar=tensor([4.8306e-05, 9.0777e-05, 1.1418e-04, 8.1079e-05, 7.2026e-05, 1.3056e-04, + 7.4271e-05, 6.7406e-05], device='cuda:1') +2023-03-31 19:27:40,175 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9988, 1.2595, 2.1222, 1.3895, 2.1346, 2.5744, 2.5065, 2.6047], + device='cuda:1'), covar=tensor([0.3332, 0.5230, 0.2262, 0.6011, 0.2276, 0.0783, 0.1135, 0.1129], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0123, 0.0102, 0.0142, 0.0102, 0.0072, 0.0089, 0.0074], + device='cuda:1'), out_proj_covar=tensor([8.5116e-05, 8.1632e-05, 6.4274e-05, 9.2415e-05, 6.4919e-05, 3.9783e-05, + 5.0504e-05, 4.0153e-05], device='cuda:1') +2023-03-31 19:27:52,350 INFO [train.py:903] (1/4) Epoch 1, batch 1700, loss[loss=0.543, simple_loss=0.5064, pruned_loss=0.2912, over 19786.00 frames. ], tot_loss[loss=0.5906, simple_loss=0.5347, pruned_loss=0.334, over 3820011.67 frames. ], batch size: 56, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:27:56,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.719e+02 9.402e+02 1.223e+03 1.535e+03 2.582e+03, threshold=2.447e+03, percent-clipped=3.0 +2023-03-31 19:28:26,633 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 19:28:29,038 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6691, 1.4474, 1.3243, 2.1126, 1.7555, 2.3074, 1.9913, 1.2015], + device='cuda:1'), covar=tensor([0.3219, 0.3605, 0.5419, 0.1833, 0.4355, 0.1423, 0.2660, 0.4714], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0068, 0.0096, 0.0078, 0.0115, 0.0053, 0.0068, 0.0080], + device='cuda:1'), out_proj_covar=tensor([4.5606e-05, 4.6950e-05, 6.7610e-05, 5.6521e-05, 7.9562e-05, 3.1947e-05, + 5.1156e-05, 5.6833e-05], device='cuda:1') +2023-03-31 19:28:46,841 INFO [train.py:903] (1/4) Epoch 1, batch 1750, loss[loss=0.6009, simple_loss=0.5462, pruned_loss=0.3309, over 18244.00 frames. ], tot_loss[loss=0.5832, simple_loss=0.5307, pruned_loss=0.3266, over 3817193.99 frames. ], batch size: 83, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:29:37,333 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1796.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:29:43,445 INFO [train.py:903] (1/4) Epoch 1, batch 1800, loss[loss=0.4744, simple_loss=0.4537, pruned_loss=0.2471, over 19319.00 frames. ], tot_loss[loss=0.5782, simple_loss=0.5272, pruned_loss=0.3218, over 3799184.18 frames. ], batch size: 44, lr: 4.85e-02, grad_scale: 8.0 +2023-03-31 19:29:47,613 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.927e+02 9.266e+02 1.209e+03 1.539e+03 2.564e+03, threshold=2.418e+03, percent-clipped=2.0 +2023-03-31 19:30:36,184 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 19:30:40,566 INFO [train.py:903] (1/4) Epoch 1, batch 1850, loss[loss=0.6044, simple_loss=0.544, pruned_loss=0.3346, over 12719.00 frames. ], tot_loss[loss=0.5697, simple_loss=0.5223, pruned_loss=0.3143, over 3803666.65 frames. ], batch size: 135, lr: 4.84e-02, grad_scale: 8.0 +2023-03-31 19:30:45,855 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1856.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:06,928 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1875.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:11,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 19:31:20,232 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1886.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:22,094 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1888.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:36,186 INFO [train.py:903] (1/4) Epoch 1, batch 1900, loss[loss=0.5169, simple_loss=0.4854, pruned_loss=0.2744, over 19405.00 frames. ], tot_loss[loss=0.5643, simple_loss=0.5196, pruned_loss=0.3091, over 3813448.61 frames. ], batch size: 48, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:31:40,288 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 9.078e+02 1.104e+03 1.499e+03 2.754e+03, threshold=2.207e+03, percent-clipped=2.0 +2023-03-31 19:31:47,458 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1911.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:47,477 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1911.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:31:52,298 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 19:31:56,237 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 19:32:06,347 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:32:19,668 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 19:32:31,502 INFO [train.py:903] (1/4) Epoch 1, batch 1950, loss[loss=0.5425, simple_loss=0.5183, pruned_loss=0.2832, over 19367.00 frames. ], tot_loss[loss=0.5623, simple_loss=0.5184, pruned_loss=0.3068, over 3790921.09 frames. ], batch size: 70, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:32:57,189 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1973.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:33:13,493 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-31 19:33:29,173 INFO [train.py:903] (1/4) Epoch 1, batch 2000, loss[loss=0.4398, simple_loss=0.4299, pruned_loss=0.2249, over 19758.00 frames. ], tot_loss[loss=0.555, simple_loss=0.5143, pruned_loss=0.3008, over 3792896.18 frames. ], batch size: 47, lr: 4.82e-02, grad_scale: 8.0 +2023-03-31 19:33:33,532 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.917e+02 1.007e+03 1.260e+03 1.703e+03 3.255e+03, threshold=2.521e+03, percent-clipped=11.0 +2023-03-31 19:34:19,000 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2043.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:34:25,213 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 19:34:27,589 INFO [train.py:903] (1/4) Epoch 1, batch 2050, loss[loss=0.5522, simple_loss=0.524, pruned_loss=0.2902, over 19604.00 frames. ], tot_loss[loss=0.5484, simple_loss=0.511, pruned_loss=0.2952, over 3797751.54 frames. ], batch size: 57, lr: 4.81e-02, grad_scale: 16.0 +2023-03-31 19:34:43,686 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 19:34:43,723 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 19:35:06,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 19:35:12,662 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2088.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:35:27,374 INFO [train.py:903] (1/4) Epoch 1, batch 2100, loss[loss=0.5711, simple_loss=0.531, pruned_loss=0.3056, over 18636.00 frames. ], tot_loss[loss=0.5406, simple_loss=0.5071, pruned_loss=0.2888, over 3804675.74 frames. ], batch size: 74, lr: 4.80e-02, grad_scale: 16.0 +2023-03-31 19:35:30,829 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.5241, 4.8430, 2.3555, 4.9175, 1.8370, 5.7358, 5.1072, 5.2773], + device='cuda:1'), covar=tensor([0.0342, 0.0682, 0.2341, 0.0344, 0.2408, 0.0216, 0.0472, 0.0390], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0177, 0.0179, 0.0122, 0.0195, 0.0108, 0.0123, 0.0112], + device='cuda:1'), out_proj_covar=tensor([1.1510e-04, 1.3543e-04, 1.2023e-04, 8.8821e-05, 1.3606e-04, 7.8351e-05, + 8.7946e-05, 8.3719e-05], device='cuda:1') +2023-03-31 19:35:31,674 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+02 9.211e+02 1.091e+03 1.524e+03 2.851e+03, threshold=2.182e+03, percent-clipped=6.0 +2023-03-31 19:35:56,698 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 19:36:17,092 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 19:36:24,972 INFO [train.py:903] (1/4) Epoch 1, batch 2150, loss[loss=0.4753, simple_loss=0.4737, pruned_loss=0.2385, over 19669.00 frames. ], tot_loss[loss=0.531, simple_loss=0.5013, pruned_loss=0.2817, over 3815778.78 frames. ], batch size: 58, lr: 4.79e-02, grad_scale: 16.0 +2023-03-31 19:36:45,539 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2167.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:37:13,894 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2192.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:37:24,837 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:37:25,806 INFO [train.py:903] (1/4) Epoch 1, batch 2200, loss[loss=0.5041, simple_loss=0.4962, pruned_loss=0.256, over 19665.00 frames. ], tot_loss[loss=0.526, simple_loss=0.4993, pruned_loss=0.2774, over 3818616.67 frames. ], batch size: 59, lr: 4.78e-02, grad_scale: 16.0 +2023-03-31 19:37:31,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.347e+02 9.332e+02 1.145e+03 1.435e+03 3.303e+03, threshold=2.290e+03, percent-clipped=7.0 +2023-03-31 19:37:49,477 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2219.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:37:55,065 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9732, 4.2114, 2.6159, 4.2865, 1.7849, 5.0783, 4.5984, 4.6067], + device='cuda:1'), covar=tensor([0.0394, 0.0849, 0.2427, 0.0491, 0.2592, 0.0303, 0.0527, 0.0689], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0186, 0.0191, 0.0130, 0.0203, 0.0116, 0.0128, 0.0117], + device='cuda:1'), out_proj_covar=tensor([1.2508e-04, 1.4385e-04, 1.2808e-04, 9.5759e-05, 1.4200e-04, 8.4706e-05, + 9.1586e-05, 8.7291e-05], device='cuda:1') +2023-03-31 19:38:03,614 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:16,705 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4583, 1.3205, 1.1032, 1.6203, 1.4337, 1.7238, 1.6284, 1.4912], + device='cuda:1'), covar=tensor([0.2296, 0.3138, 0.5152, 0.2586, 0.6390, 0.1928, 0.3499, 0.2691], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0160, 0.0214, 0.0155, 0.0249, 0.0159, 0.0186, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-31 19:38:27,459 INFO [train.py:903] (1/4) Epoch 1, batch 2250, loss[loss=0.518, simple_loss=0.4944, pruned_loss=0.2707, over 19864.00 frames. ], tot_loss[loss=0.5174, simple_loss=0.4939, pruned_loss=0.2713, over 3820476.44 frames. ], batch size: 52, lr: 4.77e-02, grad_scale: 16.0 +2023-03-31 19:39:11,079 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4602, 1.7084, 1.8435, 2.1472, 3.0286, 1.2272, 2.4028, 3.1739], + device='cuda:1'), covar=tensor([0.0317, 0.2551, 0.3370, 0.2197, 0.0513, 0.4283, 0.1075, 0.0570], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0178, 0.0201, 0.0186, 0.0142, 0.0267, 0.0165, 0.0141], + device='cuda:1'), out_proj_covar=tensor([7.0953e-05, 1.2776e-04, 1.4438e-04, 1.4056e-04, 9.5749e-05, 1.7669e-04, + 1.2525e-04, 1.0042e-04], device='cuda:1') +2023-03-31 19:39:24,146 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2299.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:39:25,913 INFO [train.py:903] (1/4) Epoch 1, batch 2300, loss[loss=0.4353, simple_loss=0.4397, pruned_loss=0.2155, over 19731.00 frames. ], tot_loss[loss=0.5119, simple_loss=0.4909, pruned_loss=0.2671, over 3823204.60 frames. ], batch size: 51, lr: 4.77e-02, grad_scale: 8.0 +2023-03-31 19:39:31,315 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+02 9.458e+02 1.205e+03 1.557e+03 3.326e+03, threshold=2.410e+03, percent-clipped=10.0 +2023-03-31 19:39:39,125 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 19:39:41,688 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2315.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:39:53,919 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2324.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:04,979 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2334.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:17,267 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2344.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:20,640 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2347.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:24,828 INFO [train.py:903] (1/4) Epoch 1, batch 2350, loss[loss=0.4883, simple_loss=0.4893, pruned_loss=0.2437, over 19782.00 frames. ], tot_loss[loss=0.5095, simple_loss=0.4902, pruned_loss=0.2649, over 3831592.03 frames. ], batch size: 56, lr: 4.76e-02, grad_scale: 8.0 +2023-03-31 19:40:33,243 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2358.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:48,369 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2369.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:57,588 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.89 vs. limit=2.0 +2023-03-31 19:41:07,208 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 19:41:23,351 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 19:41:26,485 INFO [train.py:903] (1/4) Epoch 1, batch 2400, loss[loss=0.5022, simple_loss=0.5081, pruned_loss=0.2482, over 19647.00 frames. ], tot_loss[loss=0.506, simple_loss=0.4886, pruned_loss=0.2621, over 3830833.45 frames. ], batch size: 60, lr: 4.75e-02, grad_scale: 8.0 +2023-03-31 19:41:33,165 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+02 9.458e+02 1.226e+03 1.613e+03 2.603e+03, threshold=2.451e+03, percent-clipped=4.0 +2023-03-31 19:42:26,147 INFO [train.py:903] (1/4) Epoch 1, batch 2450, loss[loss=0.4677, simple_loss=0.4536, pruned_loss=0.2409, over 19488.00 frames. ], tot_loss[loss=0.5023, simple_loss=0.4868, pruned_loss=0.2592, over 3828572.27 frames. ], batch size: 49, lr: 4.74e-02, grad_scale: 8.0 +2023-03-31 19:43:24,714 INFO [train.py:903] (1/4) Epoch 1, batch 2500, loss[loss=0.5837, simple_loss=0.5339, pruned_loss=0.3167, over 19466.00 frames. ], tot_loss[loss=0.5005, simple_loss=0.4854, pruned_loss=0.258, over 3821894.69 frames. ], batch size: 64, lr: 4.73e-02, grad_scale: 8.0 +2023-03-31 19:43:30,995 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.049e+02 1.082e+03 1.390e+03 1.742e+03 4.873e+03, threshold=2.779e+03, percent-clipped=5.0 +2023-03-31 19:44:22,064 INFO [train.py:903] (1/4) Epoch 1, batch 2550, loss[loss=0.4602, simple_loss=0.4571, pruned_loss=0.2317, over 19588.00 frames. ], tot_loss[loss=0.4982, simple_loss=0.4843, pruned_loss=0.2563, over 3821050.90 frames. ], batch size: 52, lr: 4.72e-02, grad_scale: 8.0 +2023-03-31 19:44:47,134 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2571.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:08,998 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2590.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:14,139 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 19:45:15,574 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2596.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:21,670 INFO [train.py:903] (1/4) Epoch 1, batch 2600, loss[loss=0.4827, simple_loss=0.4733, pruned_loss=0.246, over 18785.00 frames. ], tot_loss[loss=0.4903, simple_loss=0.4793, pruned_loss=0.2507, over 3817858.70 frames. ], batch size: 74, lr: 4.71e-02, grad_scale: 8.0 +2023-03-31 19:45:24,454 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2603.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:28,253 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+02 9.154e+02 1.259e+03 1.710e+03 2.682e+03, threshold=2.519e+03, percent-clipped=0.0 +2023-03-31 19:45:39,542 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2615.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:55,210 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2628.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:46:22,934 INFO [train.py:903] (1/4) Epoch 1, batch 2650, loss[loss=0.4487, simple_loss=0.4473, pruned_loss=0.225, over 19608.00 frames. ], tot_loss[loss=0.4866, simple_loss=0.4771, pruned_loss=0.2481, over 3810995.90 frames. ], batch size: 50, lr: 4.70e-02, grad_scale: 8.0 +2023-03-31 19:46:39,350 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 19:47:23,181 INFO [train.py:903] (1/4) Epoch 1, batch 2700, loss[loss=0.4836, simple_loss=0.4844, pruned_loss=0.2414, over 17184.00 frames. ], tot_loss[loss=0.4811, simple_loss=0.4742, pruned_loss=0.2441, over 3812808.31 frames. ], batch size: 101, lr: 4.69e-02, grad_scale: 8.0 +2023-03-31 19:47:24,582 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2702.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:47:25,671 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2703.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:47:29,726 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+02 8.490e+02 1.133e+03 1.436e+03 3.154e+03, threshold=2.267e+03, percent-clipped=3.0 +2023-03-31 19:48:24,803 INFO [train.py:903] (1/4) Epoch 1, batch 2750, loss[loss=0.4358, simple_loss=0.4557, pruned_loss=0.2079, over 19523.00 frames. ], tot_loss[loss=0.4767, simple_loss=0.4715, pruned_loss=0.241, over 3813883.48 frames. ], batch size: 54, lr: 4.68e-02, grad_scale: 8.0 +2023-03-31 19:49:25,709 INFO [train.py:903] (1/4) Epoch 1, batch 2800, loss[loss=0.4808, simple_loss=0.4809, pruned_loss=0.2403, over 17225.00 frames. ], tot_loss[loss=0.4747, simple_loss=0.4707, pruned_loss=0.2394, over 3803816.22 frames. ], batch size: 101, lr: 4.67e-02, grad_scale: 8.0 +2023-03-31 19:49:31,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 1.002e+03 1.265e+03 1.511e+03 4.462e+03, threshold=2.529e+03, percent-clipped=7.0 +2023-03-31 19:49:45,690 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2817.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:50:15,412 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2842.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:50:26,209 INFO [train.py:903] (1/4) Epoch 1, batch 2850, loss[loss=0.4826, simple_loss=0.4758, pruned_loss=0.2446, over 19757.00 frames. ], tot_loss[loss=0.4733, simple_loss=0.4696, pruned_loss=0.2385, over 3813363.01 frames. ], batch size: 54, lr: 4.66e-02, grad_scale: 8.0 +2023-03-31 19:51:22,261 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 19:51:25,262 INFO [train.py:903] (1/4) Epoch 1, batch 2900, loss[loss=0.4438, simple_loss=0.4527, pruned_loss=0.2174, over 19591.00 frames. ], tot_loss[loss=0.4695, simple_loss=0.4671, pruned_loss=0.2359, over 3819617.16 frames. ], batch size: 57, lr: 4.65e-02, grad_scale: 8.0 +2023-03-31 19:51:30,473 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.237e+02 1.045e+03 1.349e+03 1.754e+03 3.463e+03, threshold=2.699e+03, percent-clipped=4.0 +2023-03-31 19:52:12,180 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2987, 1.0528, 1.1257, 1.4045, 1.4411, 1.6557, 1.5136, 1.4849], + device='cuda:1'), covar=tensor([0.1326, 0.2684, 0.2301, 0.1559, 0.2831, 0.0953, 0.1393, 0.1534], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0143, 0.0158, 0.0130, 0.0187, 0.0118, 0.0132, 0.0121], + device='cuda:1'), out_proj_covar=tensor([7.8904e-05, 1.0216e-04, 1.0859e-04, 9.3535e-05, 1.3275e-04, 8.3586e-05, + 9.1483e-05, 8.6552e-05], device='cuda:1') +2023-03-31 19:52:25,021 INFO [train.py:903] (1/4) Epoch 1, batch 2950, loss[loss=0.4407, simple_loss=0.4566, pruned_loss=0.2124, over 19661.00 frames. ], tot_loss[loss=0.4675, simple_loss=0.466, pruned_loss=0.2345, over 3811471.93 frames. ], batch size: 55, lr: 4.64e-02, grad_scale: 8.0 +2023-03-31 19:52:30,813 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2955.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:52:32,690 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.85 vs. limit=2.0 +2023-03-31 19:53:26,125 INFO [train.py:903] (1/4) Epoch 1, batch 3000, loss[loss=0.4436, simple_loss=0.4581, pruned_loss=0.2145, over 19450.00 frames. ], tot_loss[loss=0.4631, simple_loss=0.4633, pruned_loss=0.2315, over 3820259.06 frames. ], batch size: 64, lr: 4.63e-02, grad_scale: 8.0 +2023-03-31 19:53:26,126 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 19:53:37,095 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2079, 1.1985, 1.2532, 1.1677, 1.1447, 0.7766, 0.5395, 1.2007], + device='cuda:1'), covar=tensor([0.1550, 0.0771, 0.0671, 0.1136, 0.1257, 0.1364, 0.2354, 0.1184], + device='cuda:1'), in_proj_covar=tensor([0.0200, 0.0127, 0.0119, 0.0151, 0.0129, 0.0170, 0.0197, 0.0173], + device='cuda:1'), out_proj_covar=tensor([1.5072e-04, 9.8014e-05, 9.2597e-05, 1.1889e-04, 1.0297e-04, 1.3044e-04, + 1.4498e-04, 1.3205e-04], device='cuda:1') +2023-03-31 19:53:38,705 INFO [train.py:937] (1/4) Epoch 1, validation: loss=0.3995, simple_loss=0.4801, pruned_loss=0.1594, over 944034.00 frames. +2023-03-31 19:53:38,706 INFO [train.py:938] (1/4) Maximum memory allocated so far is 16499MB +2023-03-31 19:53:43,179 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 19:53:45,669 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 9.060e+02 1.151e+03 1.550e+03 2.691e+03, threshold=2.303e+03, percent-clipped=0.0 +2023-03-31 19:54:23,279 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3037.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:35,524 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3047.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:40,055 INFO [train.py:903] (1/4) Epoch 1, batch 3050, loss[loss=0.4647, simple_loss=0.4713, pruned_loss=0.2291, over 19778.00 frames. ], tot_loss[loss=0.4598, simple_loss=0.4608, pruned_loss=0.2294, over 3818699.47 frames. ], batch size: 56, lr: 4.62e-02, grad_scale: 8.0 +2023-03-31 19:55:07,327 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3073.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:55:36,291 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3098.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:55:37,609 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-31 19:55:39,201 INFO [train.py:903] (1/4) Epoch 1, batch 3100, loss[loss=0.4919, simple_loss=0.48, pruned_loss=0.2519, over 19672.00 frames. ], tot_loss[loss=0.4607, simple_loss=0.4613, pruned_loss=0.2301, over 3825696.79 frames. ], batch size: 60, lr: 4.61e-02, grad_scale: 8.0 +2023-03-31 19:55:45,832 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+02 1.021e+03 1.362e+03 1.815e+03 5.785e+03, threshold=2.723e+03, percent-clipped=14.0 +2023-03-31 19:56:41,856 INFO [train.py:903] (1/4) Epoch 1, batch 3150, loss[loss=0.4158, simple_loss=0.4248, pruned_loss=0.2034, over 19802.00 frames. ], tot_loss[loss=0.4608, simple_loss=0.4621, pruned_loss=0.2297, over 3828550.79 frames. ], batch size: 49, lr: 4.60e-02, grad_scale: 8.0 +2023-03-31 19:56:54,535 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3162.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:08,962 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 19:57:24,055 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3186.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:26,957 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-31 19:57:36,131 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.88 vs. limit=2.0 +2023-03-31 19:57:40,821 INFO [train.py:903] (1/4) Epoch 1, batch 3200, loss[loss=0.4424, simple_loss=0.4609, pruned_loss=0.212, over 19574.00 frames. ], tot_loss[loss=0.4587, simple_loss=0.461, pruned_loss=0.2282, over 3844104.19 frames. ], batch size: 57, lr: 4.59e-02, grad_scale: 8.0 +2023-03-31 19:57:45,653 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6291, 1.2717, 1.4082, 1.0521, 2.2069, 2.2286, 2.0157, 2.0588], + device='cuda:1'), covar=tensor([0.1281, 0.1813, 0.1273, 0.2224, 0.0525, 0.0162, 0.0291, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0215, 0.0203, 0.0259, 0.0181, 0.0103, 0.0129, 0.0107], + device='cuda:1'), out_proj_covar=tensor([1.7718e-04, 1.4783e-04, 1.4082e-04, 1.7845e-04, 1.4670e-04, 6.7671e-05, + 9.2051e-05, 7.5712e-05], device='cuda:1') +2023-03-31 19:57:46,447 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+02 9.158e+02 1.127e+03 1.418e+03 2.574e+03, threshold=2.253e+03, percent-clipped=0.0 +2023-03-31 19:58:41,766 INFO [train.py:903] (1/4) Epoch 1, batch 3250, loss[loss=0.4658, simple_loss=0.4723, pruned_loss=0.2296, over 19305.00 frames. ], tot_loss[loss=0.4585, simple_loss=0.4611, pruned_loss=0.2279, over 3842612.81 frames. ], batch size: 66, lr: 4.58e-02, grad_scale: 8.0 +2023-03-31 19:59:13,240 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6255, 1.4060, 1.6593, 1.3346, 1.1061, 1.4225, 0.6466, 1.0014], + device='cuda:1'), covar=tensor([0.0644, 0.0545, 0.0418, 0.0624, 0.0928, 0.0858, 0.1344, 0.1298], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0086, 0.0092, 0.0115, 0.0122, 0.0120, 0.0146, 0.0140], + device='cuda:1'), out_proj_covar=tensor([6.9507e-05, 6.1082e-05, 7.0318e-05, 8.8108e-05, 9.1278e-05, 8.9758e-05, + 1.0871e-04, 1.0688e-04], device='cuda:1') +2023-03-31 19:59:40,379 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3299.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:42,431 INFO [train.py:903] (1/4) Epoch 1, batch 3300, loss[loss=0.4492, simple_loss=0.4451, pruned_loss=0.2266, over 19581.00 frames. ], tot_loss[loss=0.4539, simple_loss=0.458, pruned_loss=0.2249, over 3837231.61 frames. ], batch size: 52, lr: 4.57e-02, grad_scale: 8.0 +2023-03-31 19:59:42,835 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3301.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:48,772 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+02 9.991e+02 1.183e+03 1.562e+03 4.237e+03, threshold=2.366e+03, percent-clipped=7.0 +2023-03-31 19:59:48,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 19:59:49,095 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3306.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:00:35,188 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9358, 1.2821, 2.2660, 1.6083, 2.8121, 4.2201, 3.2661, 2.6013], + device='cuda:1'), covar=tensor([0.2436, 0.1571, 0.1882, 0.2181, 0.1073, 0.0244, 0.0938, 0.1382], + device='cuda:1'), in_proj_covar=tensor([0.0179, 0.0144, 0.0166, 0.0194, 0.0172, 0.0095, 0.0164, 0.0171], + device='cuda:1'), out_proj_covar=tensor([1.2361e-04, 9.9629e-05, 1.1801e-04, 1.3407e-04, 1.1563e-04, 6.7293e-05, + 1.0771e-04, 1.1344e-04], device='cuda:1') +2023-03-31 20:00:43,738 INFO [train.py:903] (1/4) Epoch 1, batch 3350, loss[loss=0.4168, simple_loss=0.4346, pruned_loss=0.1995, over 19483.00 frames. ], tot_loss[loss=0.4505, simple_loss=0.4554, pruned_loss=0.2228, over 3838058.11 frames. ], batch size: 49, lr: 4.56e-02, grad_scale: 8.0 +2023-03-31 20:01:22,255 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3381.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:01:46,098 INFO [train.py:903] (1/4) Epoch 1, batch 3400, loss[loss=0.4803, simple_loss=0.4743, pruned_loss=0.2432, over 19613.00 frames. ], tot_loss[loss=0.4511, simple_loss=0.4555, pruned_loss=0.2234, over 3836945.33 frames. ], batch size: 57, lr: 4.55e-02, grad_scale: 8.0 +2023-03-31 20:01:52,899 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.525e+02 9.967e+02 1.253e+03 1.611e+03 4.007e+03, threshold=2.507e+03, percent-clipped=3.0 +2023-03-31 20:02:02,098 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3414.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:07,714 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3418.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:02:38,872 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3443.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:48,478 INFO [train.py:903] (1/4) Epoch 1, batch 3450, loss[loss=0.4269, simple_loss=0.4443, pruned_loss=0.2048, over 19448.00 frames. ], tot_loss[loss=0.4526, simple_loss=0.4565, pruned_loss=0.2243, over 3810457.99 frames. ], batch size: 64, lr: 4.54e-02, grad_scale: 8.0 +2023-03-31 20:02:50,744 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 20:03:25,943 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2249, 1.0979, 0.9747, 1.3957, 1.2862, 1.2689, 1.2689, 1.2672], + device='cuda:1'), covar=tensor([0.1141, 0.1588, 0.1858, 0.1391, 0.1777, 0.1793, 0.2019, 0.1283], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0255, 0.0259, 0.0280, 0.0363, 0.0246, 0.0323, 0.0225], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 20:03:43,985 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:03:50,180 INFO [train.py:903] (1/4) Epoch 1, batch 3500, loss[loss=0.454, simple_loss=0.4719, pruned_loss=0.218, over 19555.00 frames. ], tot_loss[loss=0.4509, simple_loss=0.4558, pruned_loss=0.223, over 3825907.44 frames. ], batch size: 56, lr: 4.53e-02, grad_scale: 8.0 +2023-03-31 20:03:56,687 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+02 9.703e+02 1.213e+03 1.703e+03 9.610e+03, threshold=2.427e+03, percent-clipped=9.0 +2023-03-31 20:04:52,056 INFO [train.py:903] (1/4) Epoch 1, batch 3550, loss[loss=0.4379, simple_loss=0.4517, pruned_loss=0.212, over 19787.00 frames. ], tot_loss[loss=0.4465, simple_loss=0.4529, pruned_loss=0.22, over 3829761.75 frames. ], batch size: 56, lr: 4.51e-02, grad_scale: 8.0 +2023-03-31 20:05:00,241 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3557.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:05:07,891 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3564.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:05:30,991 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3582.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:05:44,033 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=5.07 vs. limit=5.0 +2023-03-31 20:05:53,927 INFO [train.py:903] (1/4) Epoch 1, batch 3600, loss[loss=0.3539, simple_loss=0.3813, pruned_loss=0.1632, over 16449.00 frames. ], tot_loss[loss=0.4589, simple_loss=0.4604, pruned_loss=0.2287, over 3818352.54 frames. ], batch size: 36, lr: 4.50e-02, grad_scale: 8.0 +2023-03-31 20:06:00,964 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+02 9.459e+02 1.417e+03 1.964e+03 2.103e+04, threshold=2.834e+03, percent-clipped=17.0 +2023-03-31 20:06:55,025 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3650.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:06:55,790 INFO [train.py:903] (1/4) Epoch 1, batch 3650, loss[loss=0.4618, simple_loss=0.4376, pruned_loss=0.243, over 19763.00 frames. ], tot_loss[loss=0.4552, simple_loss=0.4585, pruned_loss=0.2259, over 3822099.68 frames. ], batch size: 46, lr: 4.49e-02, grad_scale: 8.0 +2023-03-31 20:07:18,743 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3670.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:49,493 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3694.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:07:50,793 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3695.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:56,859 INFO [train.py:903] (1/4) Epoch 1, batch 3700, loss[loss=0.4446, simple_loss=0.4404, pruned_loss=0.2245, over 19099.00 frames. ], tot_loss[loss=0.4619, simple_loss=0.4632, pruned_loss=0.2303, over 3813277.64 frames. ], batch size: 42, lr: 4.48e-02, grad_scale: 8.0 +2023-03-31 20:08:05,842 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.255e+02 1.022e+03 1.666e+03 2.666e+03 1.441e+04, threshold=3.331e+03, percent-clipped=22.0 +2023-03-31 20:09:01,163 INFO [train.py:903] (1/4) Epoch 1, batch 3750, loss[loss=0.4832, simple_loss=0.4924, pruned_loss=0.237, over 19663.00 frames. ], tot_loss[loss=0.4562, simple_loss=0.4594, pruned_loss=0.2264, over 3818948.61 frames. ], batch size: 60, lr: 4.47e-02, grad_scale: 8.0 +2023-03-31 20:09:02,739 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3752.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:09:19,385 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3765.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:09:34,305 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3777.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:09:37,704 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9413, 1.0756, 0.9195, 0.9422, 1.0197, 0.7316, 0.2774, 1.2271], + device='cuda:1'), covar=tensor([0.1006, 0.0641, 0.0932, 0.1098, 0.0911, 0.1472, 0.1968, 0.0936], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0153, 0.0155, 0.0193, 0.0145, 0.0214, 0.0233, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 20:09:51,927 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7686, 1.2041, 1.5542, 1.0056, 2.5370, 3.1192, 2.7500, 2.6548], + device='cuda:1'), covar=tensor([0.1854, 0.2843, 0.2059, 0.2894, 0.0635, 0.0189, 0.0283, 0.0392], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0253, 0.0254, 0.0285, 0.0199, 0.0130, 0.0151, 0.0129], + device='cuda:1'), out_proj_covar=tensor([2.0324e-04, 1.8362e-04, 1.8715e-04, 2.0750e-04, 1.7186e-04, 8.9677e-05, + 1.1534e-04, 9.8026e-05], device='cuda:1') +2023-03-31 20:09:54,755 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.02 vs. limit=5.0 +2023-03-31 20:10:05,359 INFO [train.py:903] (1/4) Epoch 1, batch 3800, loss[loss=0.3939, simple_loss=0.4054, pruned_loss=0.1912, over 19373.00 frames. ], tot_loss[loss=0.4495, simple_loss=0.4552, pruned_loss=0.2219, over 3819876.54 frames. ], batch size: 47, lr: 4.46e-02, grad_scale: 8.0 +2023-03-31 20:10:12,575 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.844e+02 1.035e+03 1.394e+03 1.973e+03 4.112e+03, threshold=2.788e+03, percent-clipped=1.0 +2023-03-31 20:10:41,855 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 20:11:08,740 INFO [train.py:903] (1/4) Epoch 1, batch 3850, loss[loss=0.4954, simple_loss=0.4751, pruned_loss=0.2578, over 13554.00 frames. ], tot_loss[loss=0.4494, simple_loss=0.4551, pruned_loss=0.2218, over 3801379.37 frames. ], batch size: 136, lr: 4.45e-02, grad_scale: 8.0 +2023-03-31 20:12:11,133 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9160, 1.0017, 2.1149, 1.4536, 2.4281, 3.3048, 3.0733, 2.0063], + device='cuda:1'), covar=tensor([0.2522, 0.2251, 0.1996, 0.2103, 0.1312, 0.0559, 0.1134, 0.1809], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0205, 0.0208, 0.0239, 0.0217, 0.0148, 0.0213, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-31 20:12:13,108 INFO [train.py:903] (1/4) Epoch 1, batch 3900, loss[loss=0.4498, simple_loss=0.4566, pruned_loss=0.2215, over 19593.00 frames. ], tot_loss[loss=0.4481, simple_loss=0.4541, pruned_loss=0.221, over 3792573.00 frames. ], batch size: 57, lr: 4.44e-02, grad_scale: 8.0 +2023-03-31 20:12:22,003 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.917e+02 1.152e+03 1.441e+03 1.935e+03 3.736e+03, threshold=2.883e+03, percent-clipped=2.0 +2023-03-31 20:12:23,348 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3908.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:13:08,696 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:13:18,187 INFO [train.py:903] (1/4) Epoch 1, batch 3950, loss[loss=0.4322, simple_loss=0.457, pruned_loss=0.2037, over 18077.00 frames. ], tot_loss[loss=0.4488, simple_loss=0.4547, pruned_loss=0.2215, over 3810997.76 frames. ], batch size: 83, lr: 4.43e-02, grad_scale: 8.0 +2023-03-31 20:13:23,982 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 20:13:24,176 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6535, 4.2361, 2.2096, 3.7958, 1.4131, 4.3091, 3.8693, 3.9636], + device='cuda:1'), covar=tensor([0.0401, 0.0851, 0.2409, 0.0583, 0.3049, 0.0676, 0.0482, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0203, 0.0219, 0.0251, 0.0197, 0.0265, 0.0198, 0.0156, 0.0162], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:1') +2023-03-31 20:14:23,991 INFO [train.py:903] (1/4) Epoch 1, batch 4000, loss[loss=0.3436, simple_loss=0.3692, pruned_loss=0.1591, over 19700.00 frames. ], tot_loss[loss=0.4437, simple_loss=0.4514, pruned_loss=0.218, over 3815695.04 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 8.0 +2023-03-31 20:14:30,939 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.942e+02 1.053e+03 1.358e+03 1.948e+03 9.883e+03, threshold=2.717e+03, percent-clipped=12.0 +2023-03-31 20:14:49,349 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4021.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:14:52,454 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4023.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:14:54,228 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.23 vs. limit=5.0 +2023-03-31 20:15:11,861 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4038.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:15:12,778 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 20:15:22,680 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4046.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:15:28,264 INFO [train.py:903] (1/4) Epoch 1, batch 4050, loss[loss=0.4485, simple_loss=0.4485, pruned_loss=0.2243, over 19845.00 frames. ], tot_loss[loss=0.4427, simple_loss=0.4503, pruned_loss=0.2175, over 3809615.81 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 8.0 +2023-03-31 20:15:40,280 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2430, 1.2100, 0.9462, 1.0533, 0.8335, 1.0357, 0.2163, 0.5934], + device='cuda:1'), covar=tensor([0.0666, 0.0722, 0.0555, 0.0758, 0.1224, 0.0959, 0.1978, 0.1666], + device='cuda:1'), in_proj_covar=tensor([0.0113, 0.0117, 0.0125, 0.0146, 0.0170, 0.0159, 0.0188, 0.0182], + device='cuda:1'), out_proj_covar=tensor([9.9255e-05, 9.3466e-05, 9.9753e-05, 1.1115e-04, 1.3213e-04, 1.2078e-04, + 1.4041e-04, 1.4460e-04], device='cuda:1') +2023-03-31 20:16:32,903 INFO [train.py:903] (1/4) Epoch 1, batch 4100, loss[loss=0.5205, simple_loss=0.5065, pruned_loss=0.2672, over 16985.00 frames. ], tot_loss[loss=0.4416, simple_loss=0.4495, pruned_loss=0.2169, over 3805313.16 frames. ], batch size: 101, lr: 4.40e-02, grad_scale: 8.0 +2023-03-31 20:16:41,799 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.649e+02 1.198e+03 1.458e+03 1.833e+03 3.490e+03, threshold=2.915e+03, percent-clipped=3.0 +2023-03-31 20:17:08,497 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 20:17:38,860 INFO [train.py:903] (1/4) Epoch 1, batch 4150, loss[loss=0.4902, simple_loss=0.4799, pruned_loss=0.2503, over 13634.00 frames. ], tot_loss[loss=0.4391, simple_loss=0.448, pruned_loss=0.2152, over 3789411.88 frames. ], batch size: 136, lr: 4.39e-02, grad_scale: 8.0 +2023-03-31 20:17:41,677 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4153.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:17:49,785 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4159.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:18:31,271 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-03-31 20:18:44,276 INFO [train.py:903] (1/4) Epoch 1, batch 4200, loss[loss=0.3237, simple_loss=0.3629, pruned_loss=0.1422, over 19740.00 frames. ], tot_loss[loss=0.4347, simple_loss=0.4452, pruned_loss=0.2121, over 3803331.30 frames. ], batch size: 46, lr: 4.38e-02, grad_scale: 8.0 +2023-03-31 20:18:46,642 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 20:18:51,463 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.919e+02 1.098e+03 1.489e+03 3.268e+03, threshold=2.196e+03, percent-clipped=3.0 +2023-03-31 20:19:47,734 INFO [train.py:903] (1/4) Epoch 1, batch 4250, loss[loss=0.4104, simple_loss=0.4301, pruned_loss=0.1954, over 19671.00 frames. ], tot_loss[loss=0.4349, simple_loss=0.4455, pruned_loss=0.2122, over 3814747.85 frames. ], batch size: 58, lr: 4.36e-02, grad_scale: 8.0 +2023-03-31 20:19:57,795 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-31 20:20:02,063 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 20:20:15,219 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 20:20:25,098 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4279.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:20:36,582 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:20:52,965 INFO [train.py:903] (1/4) Epoch 1, batch 4300, loss[loss=0.4446, simple_loss=0.4543, pruned_loss=0.2175, over 19670.00 frames. ], tot_loss[loss=0.4339, simple_loss=0.4446, pruned_loss=0.2116, over 3820725.35 frames. ], batch size: 60, lr: 4.35e-02, grad_scale: 8.0 +2023-03-31 20:20:57,860 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4304.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:21:00,201 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4306.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:21:02,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.021e+02 1.171e+03 1.478e+03 2.100e+03 3.660e+03, threshold=2.957e+03, percent-clipped=20.0 +2023-03-31 20:21:46,486 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 20:21:59,419 INFO [train.py:903] (1/4) Epoch 1, batch 4350, loss[loss=0.3566, simple_loss=0.3741, pruned_loss=0.1696, over 19755.00 frames. ], tot_loss[loss=0.4293, simple_loss=0.4416, pruned_loss=0.2085, over 3819141.90 frames. ], batch size: 46, lr: 4.34e-02, grad_scale: 8.0 +2023-03-31 20:22:08,240 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9706, 2.0312, 1.3950, 2.0002, 1.6757, 1.0389, 1.1149, 1.3655], + device='cuda:1'), covar=tensor([0.1147, 0.0825, 0.1059, 0.1018, 0.1209, 0.1370, 0.2485, 0.1474], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0090, 0.0119, 0.0130, 0.0136, 0.0079, 0.0136, 0.0113], + device='cuda:1'), out_proj_covar=tensor([7.3150e-05, 6.2377e-05, 7.7721e-05, 8.6248e-05, 8.5480e-05, 4.8552e-05, + 9.9747e-05, 7.6768e-05], device='cuda:1') +2023-03-31 20:23:03,132 INFO [train.py:903] (1/4) Epoch 1, batch 4400, loss[loss=0.4334, simple_loss=0.4404, pruned_loss=0.2132, over 19511.00 frames. ], tot_loss[loss=0.429, simple_loss=0.4418, pruned_loss=0.2081, over 3827576.28 frames. ], batch size: 54, lr: 4.33e-02, grad_scale: 8.0 +2023-03-31 20:23:05,853 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4403.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:23:11,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.454e+02 9.391e+02 1.114e+03 1.514e+03 3.216e+03, threshold=2.228e+03, percent-clipped=1.0 +2023-03-31 20:23:14,559 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4409.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:23:29,261 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 20:23:39,400 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 20:23:46,884 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4434.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:24:06,851 INFO [train.py:903] (1/4) Epoch 1, batch 4450, loss[loss=0.4172, simple_loss=0.4367, pruned_loss=0.1989, over 19668.00 frames. ], tot_loss[loss=0.4295, simple_loss=0.4426, pruned_loss=0.2082, over 3826043.59 frames. ], batch size: 55, lr: 4.32e-02, grad_scale: 8.0 +2023-03-31 20:25:09,797 INFO [train.py:903] (1/4) Epoch 1, batch 4500, loss[loss=0.3992, simple_loss=0.4329, pruned_loss=0.1827, over 18068.00 frames. ], tot_loss[loss=0.4264, simple_loss=0.4402, pruned_loss=0.2063, over 3822719.15 frames. ], batch size: 84, lr: 4.31e-02, grad_scale: 8.0 +2023-03-31 20:25:12,514 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4503.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:25:18,130 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.500e+02 1.049e+03 1.357e+03 1.620e+03 3.962e+03, threshold=2.713e+03, percent-clipped=8.0 +2023-03-31 20:26:14,039 INFO [train.py:903] (1/4) Epoch 1, batch 4550, loss[loss=0.5119, simple_loss=0.4995, pruned_loss=0.2621, over 13797.00 frames. ], tot_loss[loss=0.4243, simple_loss=0.439, pruned_loss=0.2048, over 3805142.19 frames. ], batch size: 135, lr: 4.30e-02, grad_scale: 8.0 +2023-03-31 20:26:18,724 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6466, 1.3281, 1.3300, 2.4763, 3.2058, 1.5295, 2.2505, 3.2461], + device='cuda:1'), covar=tensor([0.0373, 0.2847, 0.3077, 0.1356, 0.0438, 0.2288, 0.1066, 0.0429], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0265, 0.0256, 0.0254, 0.0191, 0.0310, 0.0227, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 20:26:24,210 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 20:26:47,295 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 20:26:58,296 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3100, 1.4014, 1.0846, 1.4932, 1.3120, 1.5146, 1.2512, 1.4399], + device='cuda:1'), covar=tensor([0.1374, 0.2269, 0.2132, 0.1352, 0.2367, 0.0969, 0.1962, 0.1016], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0283, 0.0252, 0.0213, 0.0278, 0.0206, 0.0231, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 20:27:16,034 INFO [train.py:903] (1/4) Epoch 1, batch 4600, loss[loss=0.4438, simple_loss=0.4645, pruned_loss=0.2116, over 19752.00 frames. ], tot_loss[loss=0.4251, simple_loss=0.4394, pruned_loss=0.2054, over 3804425.14 frames. ], batch size: 63, lr: 4.29e-02, grad_scale: 4.0 +2023-03-31 20:27:24,059 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.210e+02 9.691e+02 1.279e+03 1.723e+03 8.130e+03, threshold=2.557e+03, percent-clipped=7.0 +2023-03-31 20:27:36,614 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4618.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:27:41,329 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7191, 1.2298, 1.1174, 1.4701, 1.3736, 1.5869, 1.5129, 1.6207], + device='cuda:1'), covar=tensor([0.0799, 0.1643, 0.1632, 0.1488, 0.1898, 0.1602, 0.1935, 0.0936], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0296, 0.0286, 0.0311, 0.0392, 0.0279, 0.0350, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 20:28:17,147 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4650.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:18,170 INFO [train.py:903] (1/4) Epoch 1, batch 4650, loss[loss=0.3356, simple_loss=0.3666, pruned_loss=0.1524, over 19802.00 frames. ], tot_loss[loss=0.423, simple_loss=0.4382, pruned_loss=0.2039, over 3794985.35 frames. ], batch size: 47, lr: 4.28e-02, grad_scale: 4.0 +2023-03-31 20:28:27,882 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4659.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:34,604 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 20:28:44,784 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 20:28:58,620 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4684.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:29:18,955 INFO [train.py:903] (1/4) Epoch 1, batch 4700, loss[loss=0.4113, simple_loss=0.4231, pruned_loss=0.1998, over 19783.00 frames. ], tot_loss[loss=0.422, simple_loss=0.4376, pruned_loss=0.2032, over 3794812.40 frames. ], batch size: 47, lr: 4.27e-02, grad_scale: 4.0 +2023-03-31 20:29:28,009 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+02 9.658e+02 1.202e+03 1.526e+03 2.859e+03, threshold=2.405e+03, percent-clipped=1.0 +2023-03-31 20:29:39,197 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 20:30:21,814 INFO [train.py:903] (1/4) Epoch 1, batch 4750, loss[loss=0.4236, simple_loss=0.4454, pruned_loss=0.2009, over 18855.00 frames. ], tot_loss[loss=0.4215, simple_loss=0.4374, pruned_loss=0.2028, over 3790455.65 frames. ], batch size: 74, lr: 4.26e-02, grad_scale: 4.0 +2023-03-31 20:30:39,338 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4765.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:31:23,846 INFO [train.py:903] (1/4) Epoch 1, batch 4800, loss[loss=0.321, simple_loss=0.3623, pruned_loss=0.1399, over 19412.00 frames. ], tot_loss[loss=0.4199, simple_loss=0.436, pruned_loss=0.2019, over 3801507.49 frames. ], batch size: 48, lr: 4.25e-02, grad_scale: 8.0 +2023-03-31 20:31:32,976 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.504e+02 1.038e+03 1.224e+03 1.522e+03 3.175e+03, threshold=2.447e+03, percent-clipped=5.0 +2023-03-31 20:32:25,500 INFO [train.py:903] (1/4) Epoch 1, batch 4850, loss[loss=0.3662, simple_loss=0.3847, pruned_loss=0.1739, over 19796.00 frames. ], tot_loss[loss=0.419, simple_loss=0.4347, pruned_loss=0.2016, over 3791559.55 frames. ], batch size: 48, lr: 4.24e-02, grad_scale: 8.0 +2023-03-31 20:32:46,766 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 20:32:54,335 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4874.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:06,425 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 20:33:12,764 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 20:33:12,785 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 20:33:23,800 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 20:33:25,326 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4899.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:27,240 INFO [train.py:903] (1/4) Epoch 1, batch 4900, loss[loss=0.4146, simple_loss=0.4418, pruned_loss=0.1937, over 18055.00 frames. ], tot_loss[loss=0.4194, simple_loss=0.4355, pruned_loss=0.2016, over 3777807.24 frames. ], batch size: 83, lr: 4.23e-02, grad_scale: 8.0 +2023-03-31 20:33:37,034 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.319e+02 9.845e+02 1.167e+03 1.485e+03 2.856e+03, threshold=2.333e+03, percent-clipped=2.0 +2023-03-31 20:33:44,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 20:34:20,318 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2607, 1.5325, 2.0314, 2.0926, 2.6961, 4.3158, 3.8467, 4.3012], + device='cuda:1'), covar=tensor([0.1706, 0.2914, 0.2222, 0.2044, 0.0630, 0.0145, 0.0163, 0.0146], + device='cuda:1'), in_proj_covar=tensor([0.0309, 0.0283, 0.0296, 0.0308, 0.0199, 0.0129, 0.0167, 0.0125], + device='cuda:1'), out_proj_covar=tensor([2.4239e-04, 2.2402e-04, 2.3288e-04, 2.4574e-04, 1.8619e-04, 9.9757e-05, + 1.3597e-04, 1.0763e-04], device='cuda:1') +2023-03-31 20:34:29,499 INFO [train.py:903] (1/4) Epoch 1, batch 4950, loss[loss=0.6014, simple_loss=0.5492, pruned_loss=0.3268, over 17292.00 frames. ], tot_loss[loss=0.4182, simple_loss=0.4347, pruned_loss=0.2009, over 3784414.98 frames. ], batch size: 101, lr: 4.21e-02, grad_scale: 8.0 +2023-03-31 20:34:40,566 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 20:35:04,989 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6713, 1.4334, 1.6290, 1.2211, 2.7743, 3.6233, 3.1903, 3.6600], + device='cuda:1'), covar=tensor([0.1842, 0.2733, 0.2383, 0.2650, 0.0539, 0.0170, 0.0259, 0.0206], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0276, 0.0292, 0.0303, 0.0198, 0.0128, 0.0169, 0.0127], + device='cuda:1'), out_proj_covar=tensor([2.3959e-04, 2.1881e-04, 2.3047e-04, 2.4149e-04, 1.8403e-04, 9.7999e-05, + 1.3699e-04, 1.0993e-04], device='cuda:1') +2023-03-31 20:35:05,870 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 20:35:31,801 INFO [train.py:903] (1/4) Epoch 1, batch 5000, loss[loss=0.4384, simple_loss=0.4587, pruned_loss=0.209, over 19671.00 frames. ], tot_loss[loss=0.4187, simple_loss=0.4354, pruned_loss=0.201, over 3781073.92 frames. ], batch size: 60, lr: 4.20e-02, grad_scale: 8.0 +2023-03-31 20:35:35,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 20:35:40,124 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.692e+02 8.720e+02 1.063e+03 1.451e+03 3.452e+03, threshold=2.125e+03, percent-clipped=4.0 +2023-03-31 20:35:46,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 20:35:56,107 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5021.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:27,167 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5046.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:33,277 INFO [train.py:903] (1/4) Epoch 1, batch 5050, loss[loss=0.3968, simple_loss=0.4283, pruned_loss=0.1827, over 19308.00 frames. ], tot_loss[loss=0.4134, simple_loss=0.4317, pruned_loss=0.1976, over 3784420.70 frames. ], batch size: 66, lr: 4.19e-02, grad_scale: 8.0 +2023-03-31 20:37:02,514 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 20:37:09,240 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-03-31 20:37:34,521 INFO [train.py:903] (1/4) Epoch 1, batch 5100, loss[loss=0.517, simple_loss=0.4967, pruned_loss=0.2687, over 13467.00 frames. ], tot_loss[loss=0.4127, simple_loss=0.4312, pruned_loss=0.1971, over 3785906.82 frames. ], batch size: 136, lr: 4.18e-02, grad_scale: 8.0 +2023-03-31 20:37:39,760 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 20:37:43,121 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+02 1.065e+03 1.254e+03 1.490e+03 3.647e+03, threshold=2.509e+03, percent-clipped=6.0 +2023-03-31 20:37:43,157 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 20:37:47,692 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 20:38:36,973 INFO [train.py:903] (1/4) Epoch 1, batch 5150, loss[loss=0.4607, simple_loss=0.4564, pruned_loss=0.2325, over 13440.00 frames. ], tot_loss[loss=0.4105, simple_loss=0.4293, pruned_loss=0.1958, over 3791462.67 frames. ], batch size: 135, lr: 4.17e-02, grad_scale: 8.0 +2023-03-31 20:38:46,441 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 20:39:20,323 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 20:39:37,966 INFO [train.py:903] (1/4) Epoch 1, batch 5200, loss[loss=0.3731, simple_loss=0.3936, pruned_loss=0.1763, over 19344.00 frames. ], tot_loss[loss=0.4128, simple_loss=0.4308, pruned_loss=0.1974, over 3799894.63 frames. ], batch size: 47, lr: 4.16e-02, grad_scale: 8.0 +2023-03-31 20:39:45,881 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+02 1.028e+03 1.252e+03 1.630e+03 4.880e+03, threshold=2.504e+03, percent-clipped=1.0 +2023-03-31 20:39:50,384 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 20:40:32,315 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 20:40:39,294 INFO [train.py:903] (1/4) Epoch 1, batch 5250, loss[loss=0.3615, simple_loss=0.3927, pruned_loss=0.1652, over 19768.00 frames. ], tot_loss[loss=0.4119, simple_loss=0.4303, pruned_loss=0.1968, over 3802107.43 frames. ], batch size: 49, lr: 4.15e-02, grad_scale: 8.0 +2023-03-31 20:40:52,182 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.44 vs. limit=5.0 +2023-03-31 20:41:39,595 INFO [train.py:903] (1/4) Epoch 1, batch 5300, loss[loss=0.3769, simple_loss=0.3867, pruned_loss=0.1835, over 19696.00 frames. ], tot_loss[loss=0.4135, simple_loss=0.4309, pruned_loss=0.198, over 3786681.54 frames. ], batch size: 45, lr: 4.14e-02, grad_scale: 8.0 +2023-03-31 20:41:48,685 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.962e+02 9.394e+02 1.191e+03 1.647e+03 4.206e+03, threshold=2.383e+03, percent-clipped=5.0 +2023-03-31 20:41:53,358 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 20:42:41,823 INFO [train.py:903] (1/4) Epoch 1, batch 5350, loss[loss=0.3582, simple_loss=0.3813, pruned_loss=0.1675, over 19065.00 frames. ], tot_loss[loss=0.4128, simple_loss=0.4304, pruned_loss=0.1976, over 3794358.51 frames. ], batch size: 42, lr: 4.13e-02, grad_scale: 8.0 +2023-03-31 20:43:13,704 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 20:43:43,593 INFO [train.py:903] (1/4) Epoch 1, batch 5400, loss[loss=0.3244, simple_loss=0.3705, pruned_loss=0.1391, over 19384.00 frames. ], tot_loss[loss=0.4089, simple_loss=0.4278, pruned_loss=0.195, over 3807349.29 frames. ], batch size: 48, lr: 4.12e-02, grad_scale: 8.0 +2023-03-31 20:43:51,070 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.073e+02 9.364e+02 1.084e+03 1.611e+03 4.795e+03, threshold=2.168e+03, percent-clipped=7.0 +2023-03-31 20:44:44,635 INFO [train.py:903] (1/4) Epoch 1, batch 5450, loss[loss=0.3966, simple_loss=0.4035, pruned_loss=0.1949, over 19409.00 frames. ], tot_loss[loss=0.4073, simple_loss=0.4269, pruned_loss=0.1938, over 3813598.36 frames. ], batch size: 48, lr: 4.11e-02, grad_scale: 8.0 +2023-03-31 20:45:05,113 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-31 20:45:46,527 INFO [train.py:903] (1/4) Epoch 1, batch 5500, loss[loss=0.4794, simple_loss=0.4829, pruned_loss=0.238, over 19763.00 frames. ], tot_loss[loss=0.4075, simple_loss=0.4273, pruned_loss=0.1939, over 3804334.01 frames. ], batch size: 56, lr: 4.10e-02, grad_scale: 8.0 +2023-03-31 20:45:54,008 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+02 9.504e+02 1.107e+03 1.412e+03 4.004e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 20:46:08,673 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 20:46:46,623 INFO [train.py:903] (1/4) Epoch 1, batch 5550, loss[loss=0.334, simple_loss=0.3665, pruned_loss=0.1507, over 19727.00 frames. ], tot_loss[loss=0.4062, simple_loss=0.4262, pruned_loss=0.1932, over 3810644.35 frames. ], batch size: 46, lr: 4.09e-02, grad_scale: 8.0 +2023-03-31 20:46:53,465 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 20:46:54,449 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.78 vs. limit=5.0 +2023-03-31 20:47:35,046 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0942, 1.2886, 0.8923, 0.9682, 1.1164, 0.8650, 0.3908, 1.3420], + device='cuda:1'), covar=tensor([0.0898, 0.0527, 0.1108, 0.0811, 0.0696, 0.1519, 0.1547, 0.0714], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0150, 0.0185, 0.0222, 0.0157, 0.0246, 0.0249, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 20:47:42,845 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 20:47:47,483 INFO [train.py:903] (1/4) Epoch 1, batch 5600, loss[loss=0.401, simple_loss=0.4304, pruned_loss=0.1859, over 19765.00 frames. ], tot_loss[loss=0.4055, simple_loss=0.4258, pruned_loss=0.1926, over 3817057.57 frames. ], batch size: 56, lr: 4.08e-02, grad_scale: 8.0 +2023-03-31 20:47:56,550 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.862e+02 1.009e+03 1.185e+03 1.400e+03 2.216e+03, threshold=2.370e+03, percent-clipped=2.0 +2023-03-31 20:48:48,825 INFO [train.py:903] (1/4) Epoch 1, batch 5650, loss[loss=0.4417, simple_loss=0.4612, pruned_loss=0.2111, over 19732.00 frames. ], tot_loss[loss=0.4092, simple_loss=0.4283, pruned_loss=0.1951, over 3802967.29 frames. ], batch size: 63, lr: 4.07e-02, grad_scale: 8.0 +2023-03-31 20:49:09,682 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5668.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:49:32,788 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 20:49:49,715 INFO [train.py:903] (1/4) Epoch 1, batch 5700, loss[loss=0.4574, simple_loss=0.4678, pruned_loss=0.2235, over 18802.00 frames. ], tot_loss[loss=0.4101, simple_loss=0.429, pruned_loss=0.1956, over 3796701.70 frames. ], batch size: 74, lr: 4.06e-02, grad_scale: 8.0 +2023-03-31 20:49:57,492 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.193e+02 1.084e+03 1.385e+03 1.754e+03 4.325e+03, threshold=2.770e+03, percent-clipped=14.0 +2023-03-31 20:50:51,576 INFO [train.py:903] (1/4) Epoch 1, batch 5750, loss[loss=0.4221, simple_loss=0.4428, pruned_loss=0.2007, over 18181.00 frames. ], tot_loss[loss=0.4067, simple_loss=0.4266, pruned_loss=0.1934, over 3807550.26 frames. ], batch size: 83, lr: 4.05e-02, grad_scale: 8.0 +2023-03-31 20:50:51,595 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 20:50:59,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 20:51:05,161 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 20:51:18,556 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5773.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:51:21,937 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8724, 4.2761, 5.5800, 5.1874, 1.7560, 4.9035, 4.6047, 4.6523], + device='cuda:1'), covar=tensor([0.0204, 0.0420, 0.0273, 0.0172, 0.2847, 0.0173, 0.0246, 0.0722], + device='cuda:1'), in_proj_covar=tensor([0.0184, 0.0226, 0.0276, 0.0190, 0.0370, 0.0139, 0.0203, 0.0290], + device='cuda:1'), out_proj_covar=tensor([1.2034e-04, 1.4622e-04, 1.7960e-04, 1.1123e-04, 2.0182e-04, 9.0813e-05, + 1.2534e-04, 1.6646e-04], device='cuda:1') +2023-03-31 20:51:52,608 INFO [train.py:903] (1/4) Epoch 1, batch 5800, loss[loss=0.3823, simple_loss=0.4157, pruned_loss=0.1744, over 19659.00 frames. ], tot_loss[loss=0.404, simple_loss=0.4248, pruned_loss=0.1916, over 3822824.57 frames. ], batch size: 58, lr: 4.04e-02, grad_scale: 8.0 +2023-03-31 20:51:59,111 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8654, 1.4310, 1.7138, 2.2353, 1.5936, 2.2075, 2.7789, 2.2904], + device='cuda:1'), covar=tensor([0.0840, 0.2132, 0.1978, 0.1666, 0.2732, 0.1662, 0.1879, 0.1209], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0298, 0.0305, 0.0315, 0.0391, 0.0272, 0.0351, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 20:52:02,181 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.310e+02 8.969e+02 1.169e+03 1.352e+03 2.735e+03, threshold=2.337e+03, percent-clipped=0.0 +2023-03-31 20:52:53,453 INFO [train.py:903] (1/4) Epoch 1, batch 5850, loss[loss=0.3267, simple_loss=0.3634, pruned_loss=0.145, over 19763.00 frames. ], tot_loss[loss=0.4024, simple_loss=0.4235, pruned_loss=0.1907, over 3818144.03 frames. ], batch size: 48, lr: 4.03e-02, grad_scale: 8.0 +2023-03-31 20:53:23,806 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5876.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:53:55,080 INFO [train.py:903] (1/4) Epoch 1, batch 5900, loss[loss=0.3432, simple_loss=0.3874, pruned_loss=0.1495, over 19592.00 frames. ], tot_loss[loss=0.3988, simple_loss=0.4209, pruned_loss=0.1883, over 3809432.95 frames. ], batch size: 52, lr: 4.02e-02, grad_scale: 8.0 +2023-03-31 20:53:58,584 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 20:54:03,105 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.633e+02 8.668e+02 1.127e+03 1.397e+03 3.736e+03, threshold=2.255e+03, percent-clipped=4.0 +2023-03-31 20:54:21,183 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 20:54:45,250 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.6615, 5.0726, 3.3979, 4.6162, 1.5987, 5.4741, 4.9160, 5.3490], + device='cuda:1'), covar=tensor([0.0554, 0.1197, 0.1942, 0.0589, 0.3608, 0.0673, 0.0567, 0.0447], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0244, 0.0280, 0.0226, 0.0294, 0.0228, 0.0177, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 20:54:55,343 INFO [train.py:903] (1/4) Epoch 1, batch 5950, loss[loss=0.4766, simple_loss=0.4696, pruned_loss=0.2418, over 13987.00 frames. ], tot_loss[loss=0.3991, simple_loss=0.4215, pruned_loss=0.1883, over 3817026.85 frames. ], batch size: 136, lr: 4.01e-02, grad_scale: 8.0 +2023-03-31 20:55:47,584 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 20:55:57,493 INFO [train.py:903] (1/4) Epoch 1, batch 6000, loss[loss=0.4279, simple_loss=0.4502, pruned_loss=0.2028, over 18816.00 frames. ], tot_loss[loss=0.3978, simple_loss=0.4202, pruned_loss=0.1878, over 3808889.93 frames. ], batch size: 74, lr: 4.00e-02, grad_scale: 8.0 +2023-03-31 20:55:57,493 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 20:56:10,586 INFO [train.py:937] (1/4) Epoch 1, validation: loss=0.2784, simple_loss=0.3626, pruned_loss=0.09714, over 944034.00 frames. +2023-03-31 20:56:10,587 INFO [train.py:938] (1/4) Maximum memory allocated so far is 17726MB +2023-03-31 20:56:19,579 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.849e+02 9.012e+02 1.240e+03 1.620e+03 2.952e+03, threshold=2.480e+03, percent-clipped=5.0 +2023-03-31 20:56:19,913 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:56:24,072 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6012.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:57:10,741 INFO [train.py:903] (1/4) Epoch 1, batch 6050, loss[loss=0.3891, simple_loss=0.4131, pruned_loss=0.1825, over 19621.00 frames. ], tot_loss[loss=0.3993, simple_loss=0.4212, pruned_loss=0.1887, over 3816629.04 frames. ], batch size: 50, lr: 3.99e-02, grad_scale: 8.0 +2023-03-31 20:57:23,117 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6543, 4.1827, 2.3483, 3.5510, 1.3260, 4.1281, 3.6835, 3.8392], + device='cuda:1'), covar=tensor([0.0641, 0.1282, 0.2511, 0.0725, 0.3878, 0.0809, 0.0696, 0.0681], + device='cuda:1'), in_proj_covar=tensor([0.0240, 0.0243, 0.0272, 0.0226, 0.0297, 0.0229, 0.0173, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 20:57:24,343 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:57:27,990 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0146, 1.8816, 1.3591, 1.5331, 1.3306, 1.5774, 0.1971, 0.9696], + device='cuda:1'), covar=tensor([0.0610, 0.0538, 0.0375, 0.0584, 0.1088, 0.0708, 0.1636, 0.1269], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0166, 0.0162, 0.0207, 0.0236, 0.0211, 0.0229, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 20:58:12,899 INFO [train.py:903] (1/4) Epoch 1, batch 6100, loss[loss=0.4256, simple_loss=0.4349, pruned_loss=0.2081, over 19693.00 frames. ], tot_loss[loss=0.3982, simple_loss=0.4207, pruned_loss=0.1879, over 3814552.33 frames. ], batch size: 53, lr: 3.98e-02, grad_scale: 8.0 +2023-03-31 20:58:20,960 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+02 9.509e+02 1.169e+03 1.489e+03 2.977e+03, threshold=2.338e+03, percent-clipped=4.0 +2023-03-31 20:58:32,472 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6117.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:58:44,729 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6127.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:58:47,275 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-31 20:59:13,351 INFO [train.py:903] (1/4) Epoch 1, batch 6150, loss[loss=0.3969, simple_loss=0.422, pruned_loss=0.1859, over 18176.00 frames. ], tot_loss[loss=0.398, simple_loss=0.4207, pruned_loss=0.1876, over 3816146.70 frames. ], batch size: 83, lr: 3.97e-02, grad_scale: 8.0 +2023-03-31 20:59:15,696 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6153.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:59:42,658 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 20:59:50,712 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4243, 2.0191, 1.6889, 1.6724, 1.7482, 0.8333, 0.8137, 1.7524], + device='cuda:1'), covar=tensor([0.1040, 0.0444, 0.1049, 0.0631, 0.0867, 0.1546, 0.1361, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0150, 0.0215, 0.0230, 0.0160, 0.0251, 0.0258, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 20:59:58,475 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6188.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:02,020 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1482, 1.5352, 1.3920, 2.2838, 1.4949, 2.2467, 1.9202, 2.1652], + device='cuda:1'), covar=tensor([0.0794, 0.1589, 0.1735, 0.1266, 0.2285, 0.1200, 0.1910, 0.0922], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0306, 0.0300, 0.0323, 0.0391, 0.0278, 0.0343, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 21:00:13,785 INFO [train.py:903] (1/4) Epoch 1, batch 6200, loss[loss=0.4141, simple_loss=0.4381, pruned_loss=0.195, over 17364.00 frames. ], tot_loss[loss=0.3975, simple_loss=0.4206, pruned_loss=0.1872, over 3830012.50 frames. ], batch size: 101, lr: 3.96e-02, grad_scale: 8.0 +2023-03-31 21:00:22,726 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 9.585e+02 1.181e+03 1.511e+03 2.920e+03, threshold=2.362e+03, percent-clipped=2.0 +2023-03-31 21:00:31,011 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6721, 1.0367, 1.4006, 0.7976, 2.6167, 2.7594, 2.4716, 2.5485], + device='cuda:1'), covar=tensor([0.1688, 0.3158, 0.2730, 0.2816, 0.0441, 0.0175, 0.0318, 0.0318], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0287, 0.0316, 0.0304, 0.0202, 0.0117, 0.0180, 0.0128], + device='cuda:1'), out_proj_covar=tensor([2.5590e-04, 2.4325e-04, 2.6154e-04, 2.5763e-04, 1.9470e-04, 9.8021e-05, + 1.5082e-04, 1.1786e-04], device='cuda:1') +2023-03-31 21:00:38,390 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:39,677 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:49,036 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6229.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:52,525 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:01:16,023 INFO [train.py:903] (1/4) Epoch 1, batch 6250, loss[loss=0.403, simple_loss=0.4275, pruned_loss=0.1893, over 19664.00 frames. ], tot_loss[loss=0.3965, simple_loss=0.4201, pruned_loss=0.1865, over 3824266.94 frames. ], batch size: 53, lr: 3.95e-02, grad_scale: 8.0 +2023-03-31 21:01:46,721 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 21:01:56,423 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6284.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:02:18,553 INFO [train.py:903] (1/4) Epoch 1, batch 6300, loss[loss=0.362, simple_loss=0.3946, pruned_loss=0.1647, over 19579.00 frames. ], tot_loss[loss=0.3942, simple_loss=0.4188, pruned_loss=0.1848, over 3825732.04 frames. ], batch size: 52, lr: 3.94e-02, grad_scale: 8.0 +2023-03-31 21:02:26,555 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.984e+02 8.812e+02 1.125e+03 1.363e+03 2.149e+03, threshold=2.249e+03, percent-clipped=0.0 +2023-03-31 21:02:41,813 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3271, 2.0315, 2.0520, 2.9162, 1.9235, 2.5777, 1.9735, 1.9000], + device='cuda:1'), covar=tensor([0.0721, 0.0695, 0.0505, 0.0420, 0.0918, 0.0418, 0.1311, 0.0815], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0179, 0.0194, 0.0246, 0.0245, 0.0139, 0.0273, 0.0205], + device='cuda:1'), out_proj_covar=tensor([1.4865e-04, 1.3274e-04, 1.2952e-04, 1.6485e-04, 1.5998e-04, 9.6092e-05, + 1.9448e-04, 1.4389e-04], device='cuda:1') +2023-03-31 21:02:58,278 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6335.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:17,634 INFO [train.py:903] (1/4) Epoch 1, batch 6350, loss[loss=0.3566, simple_loss=0.4043, pruned_loss=0.1545, over 19770.00 frames. ], tot_loss[loss=0.3982, simple_loss=0.4214, pruned_loss=0.1875, over 3833720.64 frames. ], batch size: 54, lr: 3.93e-02, grad_scale: 8.0 +2023-03-31 21:03:18,875 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6352.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:58,098 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6383.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:04:18,897 INFO [train.py:903] (1/4) Epoch 1, batch 6400, loss[loss=0.4027, simple_loss=0.4367, pruned_loss=0.1844, over 19383.00 frames. ], tot_loss[loss=0.3978, simple_loss=0.4217, pruned_loss=0.1869, over 3829366.40 frames. ], batch size: 70, lr: 3.92e-02, grad_scale: 8.0 +2023-03-31 21:04:24,482 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6405.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:04:27,871 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.880e+02 9.359e+02 1.206e+03 1.547e+03 5.333e+03, threshold=2.412e+03, percent-clipped=7.0 +2023-03-31 21:04:28,285 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6408.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:05:19,056 INFO [train.py:903] (1/4) Epoch 1, batch 6450, loss[loss=0.4214, simple_loss=0.4245, pruned_loss=0.2092, over 19755.00 frames. ], tot_loss[loss=0.3989, simple_loss=0.4225, pruned_loss=0.1877, over 3807886.86 frames. ], batch size: 47, lr: 3.91e-02, grad_scale: 8.0 +2023-03-31 21:05:39,886 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:05:55,625 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6480.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:01,919 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6485.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:05,060 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 21:06:05,477 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6488.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:07,528 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7543, 2.1216, 2.2942, 2.6960, 4.1396, 1.3238, 2.1934, 4.0787], + device='cuda:1'), covar=tensor([0.0244, 0.2389, 0.2498, 0.1643, 0.0298, 0.2382, 0.1172, 0.0358], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0287, 0.0272, 0.0267, 0.0219, 0.0309, 0.0239, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:06:15,203 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6497.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:21,207 INFO [train.py:903] (1/4) Epoch 1, batch 6500, loss[loss=0.3395, simple_loss=0.3806, pruned_loss=0.1492, over 19589.00 frames. ], tot_loss[loss=0.3973, simple_loss=0.4211, pruned_loss=0.1867, over 3809625.36 frames. ], batch size: 52, lr: 3.90e-02, grad_scale: 8.0 +2023-03-31 21:06:25,586 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 21:06:28,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+02 9.704e+02 1.201e+03 1.443e+03 2.205e+03, threshold=2.402e+03, percent-clipped=0.0 +2023-03-31 21:06:35,119 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6513.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:43,872 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6520.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:58,405 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:59,669 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6055, 4.2023, 5.2740, 4.9516, 2.0194, 4.5474, 4.2659, 4.5910], + device='cuda:1'), covar=tensor([0.0228, 0.0478, 0.0355, 0.0185, 0.2898, 0.0205, 0.0335, 0.0730], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0251, 0.0304, 0.0219, 0.0394, 0.0150, 0.0228, 0.0326], + device='cuda:1'), out_proj_covar=tensor([1.3459e-04, 1.5947e-04, 1.9683e-04, 1.2751e-04, 2.1279e-04, 9.5569e-05, + 1.3716e-04, 1.8500e-04], device='cuda:1') +2023-03-31 21:07:21,792 INFO [train.py:903] (1/4) Epoch 1, batch 6550, loss[loss=0.4543, simple_loss=0.4655, pruned_loss=0.2215, over 19610.00 frames. ], tot_loss[loss=0.3957, simple_loss=0.4197, pruned_loss=0.1858, over 3786893.96 frames. ], batch size: 57, lr: 3.89e-02, grad_scale: 8.0 +2023-03-31 21:07:37,340 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3301, 1.9829, 1.5371, 1.5371, 1.6632, 0.8793, 0.7923, 1.7908], + device='cuda:1'), covar=tensor([0.1316, 0.0497, 0.1285, 0.0761, 0.0975, 0.1727, 0.1559, 0.0866], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0157, 0.0232, 0.0238, 0.0165, 0.0269, 0.0257, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:07:39,358 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6565.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:48,412 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6573.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:10,395 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6591.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:22,316 INFO [train.py:903] (1/4) Epoch 1, batch 6600, loss[loss=0.3492, simple_loss=0.3894, pruned_loss=0.1545, over 19848.00 frames. ], tot_loss[loss=0.3912, simple_loss=0.4165, pruned_loss=0.1829, over 3800825.91 frames. ], batch size: 52, lr: 3.89e-02, grad_scale: 16.0 +2023-03-31 21:08:31,046 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.089e+02 8.736e+02 1.082e+03 1.231e+03 3.386e+03, threshold=2.164e+03, percent-clipped=2.0 +2023-03-31 21:08:36,141 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6612.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:40,707 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6616.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:54,950 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6677, 4.2976, 2.3659, 3.9267, 1.4295, 4.1500, 3.8180, 4.2186], + device='cuda:1'), covar=tensor([0.0536, 0.1078, 0.1829, 0.0617, 0.2983, 0.0728, 0.0619, 0.0491], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0243, 0.0276, 0.0241, 0.0292, 0.0236, 0.0183, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 21:08:56,073 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6628.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:19,946 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:20,018 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:24,251 INFO [train.py:903] (1/4) Epoch 1, batch 6650, loss[loss=0.3427, simple_loss=0.3841, pruned_loss=0.1506, over 19844.00 frames. ], tot_loss[loss=0.3906, simple_loss=0.4162, pruned_loss=0.1825, over 3806677.44 frames. ], batch size: 52, lr: 3.88e-02, grad_scale: 4.0 +2023-03-31 21:09:59,890 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6680.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:08,149 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1190, 1.4498, 1.6081, 2.1353, 2.9633, 1.4286, 2.0298, 2.7619], + device='cuda:1'), covar=tensor([0.0314, 0.2295, 0.2291, 0.1315, 0.0282, 0.2118, 0.1102, 0.0431], + device='cuda:1'), in_proj_covar=tensor([0.0192, 0.0291, 0.0274, 0.0266, 0.0220, 0.0316, 0.0246, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:10:09,254 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6688.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:23,709 INFO [train.py:903] (1/4) Epoch 1, batch 6700, loss[loss=0.4039, simple_loss=0.4298, pruned_loss=0.189, over 19404.00 frames. ], tot_loss[loss=0.3941, simple_loss=0.4188, pruned_loss=0.1847, over 3801708.77 frames. ], batch size: 70, lr: 3.87e-02, grad_scale: 4.0 +2023-03-31 21:10:32,322 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5293, 1.6012, 1.8578, 2.2392, 3.2068, 1.5009, 2.1925, 3.2067], + device='cuda:1'), covar=tensor([0.0328, 0.2486, 0.2454, 0.1605, 0.0364, 0.2313, 0.1220, 0.0369], + device='cuda:1'), in_proj_covar=tensor([0.0189, 0.0286, 0.0272, 0.0264, 0.0219, 0.0311, 0.0242, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:10:34,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7756, 3.4536, 2.0290, 3.1129, 1.4592, 3.3118, 3.0572, 3.2647], + device='cuda:1'), covar=tensor([0.0726, 0.1249, 0.2545, 0.0843, 0.3552, 0.1041, 0.0732, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0245, 0.0280, 0.0241, 0.0299, 0.0235, 0.0183, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 21:10:35,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+02 8.354e+02 1.101e+03 1.693e+03 1.016e+04, threshold=2.202e+03, percent-clipped=16.0 +2023-03-31 21:10:51,582 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6723.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:14,953 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6743.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:20,754 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6748.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:23,798 INFO [train.py:903] (1/4) Epoch 1, batch 6750, loss[loss=0.4772, simple_loss=0.4667, pruned_loss=0.2438, over 13426.00 frames. ], tot_loss[loss=0.3919, simple_loss=0.4177, pruned_loss=0.1831, over 3811036.49 frames. ], batch size: 137, lr: 3.86e-02, grad_scale: 4.0 +2023-03-31 21:11:38,467 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0383, 1.0762, 1.7169, 1.2194, 2.4406, 2.3622, 2.6052, 1.3066], + device='cuda:1'), covar=tensor([0.1697, 0.2039, 0.1459, 0.1753, 0.0941, 0.0870, 0.1014, 0.1842], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0299, 0.0287, 0.0301, 0.0325, 0.0269, 0.0370, 0.0320], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:11:51,635 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:18,951 INFO [train.py:903] (1/4) Epoch 1, batch 6800, loss[loss=0.377, simple_loss=0.4084, pruned_loss=0.1728, over 19772.00 frames. ], tot_loss[loss=0.3933, simple_loss=0.418, pruned_loss=0.1843, over 3793435.54 frames. ], batch size: 54, lr: 3.85e-02, grad_scale: 8.0 +2023-03-31 21:12:19,295 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6801.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:28,857 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.487e+02 9.125e+02 1.072e+03 1.412e+03 3.162e+03, threshold=2.143e+03, percent-clipped=4.0 +2023-03-31 21:12:43,734 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6824.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:13:03,380 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 21:13:04,526 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 21:13:06,751 INFO [train.py:903] (1/4) Epoch 2, batch 0, loss[loss=0.397, simple_loss=0.42, pruned_loss=0.187, over 19687.00 frames. ], tot_loss[loss=0.397, simple_loss=0.42, pruned_loss=0.187, over 19687.00 frames. ], batch size: 53, lr: 3.77e-02, grad_scale: 8.0 +2023-03-31 21:13:06,752 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 21:13:18,415 INFO [train.py:937] (1/4) Epoch 2, validation: loss=0.2802, simple_loss=0.3637, pruned_loss=0.09835, over 944034.00 frames. +2023-03-31 21:13:18,416 INFO [train.py:938] (1/4) Maximum memory allocated so far is 17726MB +2023-03-31 21:13:18,576 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6829.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:13:28,813 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 21:14:06,569 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6868.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:20,895 INFO [train.py:903] (1/4) Epoch 2, batch 50, loss[loss=0.3684, simple_loss=0.3877, pruned_loss=0.1745, over 19757.00 frames. ], tot_loss[loss=0.3884, simple_loss=0.4136, pruned_loss=0.1816, over 859170.51 frames. ], batch size: 47, lr: 3.76e-02, grad_scale: 8.0 +2023-03-31 21:14:37,429 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6893.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:41,561 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6896.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:14:49,461 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6903.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:54,389 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 21:14:57,929 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.488e+02 9.076e+02 1.150e+03 1.515e+03 2.802e+03, threshold=2.301e+03, percent-clipped=3.0 +2023-03-31 21:15:19,042 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6927.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,147 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,721 INFO [train.py:903] (1/4) Epoch 2, batch 100, loss[loss=0.469, simple_loss=0.4743, pruned_loss=0.2318, over 18221.00 frames. ], tot_loss[loss=0.3898, simple_loss=0.4152, pruned_loss=0.1822, over 1522420.68 frames. ], batch size: 83, lr: 3.75e-02, grad_scale: 8.0 +2023-03-31 21:15:29,257 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6936.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:32,200 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 21:15:33,671 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6939.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,418 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,486 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:43,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.03 vs. limit=5.0 +2023-03-31 21:16:01,530 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6961.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:10,910 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6969.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:23,040 INFO [train.py:903] (1/4) Epoch 2, batch 150, loss[loss=0.2712, simple_loss=0.3276, pruned_loss=0.1075, over 19803.00 frames. ], tot_loss[loss=0.3895, simple_loss=0.4151, pruned_loss=0.182, over 2036124.77 frames. ], batch size: 48, lr: 3.74e-02, grad_scale: 4.0 +2023-03-31 21:16:38,557 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6991.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:49,257 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6999.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:53,972 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0167, 1.9408, 1.8307, 1.5315, 1.5455, 1.4794, 0.1804, 1.1357], + device='cuda:1'), covar=tensor([0.0646, 0.0520, 0.0262, 0.0527, 0.0857, 0.0651, 0.1344, 0.1049], + device='cuda:1'), in_proj_covar=tensor([0.0183, 0.0186, 0.0180, 0.0221, 0.0259, 0.0233, 0.0234, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:17:03,027 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.870e+02 9.855e+02 1.288e+03 4.108e+03, threshold=1.971e+03, percent-clipped=4.0 +2023-03-31 21:17:19,695 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7024.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:24,042 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 21:17:25,199 INFO [train.py:903] (1/4) Epoch 2, batch 200, loss[loss=0.3784, simple_loss=0.4095, pruned_loss=0.1737, over 19611.00 frames. ], tot_loss[loss=0.3818, simple_loss=0.4096, pruned_loss=0.177, over 2450044.05 frames. ], batch size: 57, lr: 3.73e-02, grad_scale: 4.0 +2023-03-31 21:18:11,617 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7066.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:18:29,136 INFO [train.py:903] (1/4) Epoch 2, batch 250, loss[loss=0.3674, simple_loss=0.3867, pruned_loss=0.174, over 19782.00 frames. ], tot_loss[loss=0.3825, simple_loss=0.4104, pruned_loss=0.1773, over 2762459.85 frames. ], batch size: 49, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:19:03,855 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:19:09,453 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 7.747e+02 9.740e+02 1.157e+03 2.695e+03, threshold=1.948e+03, percent-clipped=1.0 +2023-03-31 21:19:17,176 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-31 21:19:33,822 INFO [train.py:903] (1/4) Epoch 2, batch 300, loss[loss=0.4056, simple_loss=0.4348, pruned_loss=0.1882, over 19498.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.4076, pruned_loss=0.174, over 2993667.54 frames. ], batch size: 64, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:20:35,435 INFO [train.py:903] (1/4) Epoch 2, batch 350, loss[loss=0.3296, simple_loss=0.381, pruned_loss=0.1391, over 19561.00 frames. ], tot_loss[loss=0.383, simple_loss=0.4112, pruned_loss=0.1774, over 3179411.35 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 4.0 +2023-03-31 21:20:37,904 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7181.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:20:39,817 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 21:20:55,913 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7195.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:01,711 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:15,795 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 9.781e+02 1.245e+03 1.512e+03 3.081e+03, threshold=2.489e+03, percent-clipped=8.0 +2023-03-31 21:21:26,537 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:32,341 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7225.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:37,136 INFO [train.py:903] (1/4) Epoch 2, batch 400, loss[loss=0.4526, simple_loss=0.4662, pruned_loss=0.2195, over 19366.00 frames. ], tot_loss[loss=0.3835, simple_loss=0.4114, pruned_loss=0.1778, over 3331808.28 frames. ], batch size: 66, lr: 3.70e-02, grad_scale: 8.0 +2023-03-31 21:21:51,114 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7240.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:22:30,121 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7271.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:22:40,123 INFO [train.py:903] (1/4) Epoch 2, batch 450, loss[loss=0.3663, simple_loss=0.3975, pruned_loss=0.1676, over 19750.00 frames. ], tot_loss[loss=0.384, simple_loss=0.412, pruned_loss=0.1779, over 3438970.04 frames. ], batch size: 51, lr: 3.69e-02, grad_scale: 8.0 +2023-03-31 21:22:58,508 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:23:14,126 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 21:23:15,288 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 21:23:19,468 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.877e+02 8.763e+02 1.192e+03 1.491e+03 2.950e+03, threshold=2.384e+03, percent-clipped=4.0 +2023-03-31 21:23:43,126 INFO [train.py:903] (1/4) Epoch 2, batch 500, loss[loss=0.3749, simple_loss=0.4086, pruned_loss=0.1706, over 18702.00 frames. ], tot_loss[loss=0.3845, simple_loss=0.4123, pruned_loss=0.1783, over 3523912.78 frames. ], batch size: 74, lr: 3.68e-02, grad_scale: 8.0 +2023-03-31 21:24:14,633 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7355.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:24:23,957 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7362.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:45,278 INFO [train.py:903] (1/4) Epoch 2, batch 550, loss[loss=0.3609, simple_loss=0.385, pruned_loss=0.1684, over 19732.00 frames. ], tot_loss[loss=0.3849, simple_loss=0.4124, pruned_loss=0.1787, over 3591293.67 frames. ], batch size: 45, lr: 3.67e-02, grad_scale: 8.0 +2023-03-31 21:24:53,919 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7386.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:55,088 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7387.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:24,046 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7410.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:25:26,117 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+02 9.206e+02 1.127e+03 1.377e+03 2.659e+03, threshold=2.254e+03, percent-clipped=2.0 +2023-03-31 21:25:43,337 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:47,649 INFO [train.py:903] (1/4) Epoch 2, batch 600, loss[loss=0.4151, simple_loss=0.438, pruned_loss=0.1961, over 19186.00 frames. ], tot_loss[loss=0.3835, simple_loss=0.4115, pruned_loss=0.1777, over 3654752.63 frames. ], batch size: 69, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:26:09,116 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 21:26:29,910 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 21:26:36,178 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1282, 1.1689, 0.9814, 1.0926, 0.9488, 1.1644, 0.0360, 0.5413], + device='cuda:1'), covar=tensor([0.0473, 0.0597, 0.0307, 0.0348, 0.0984, 0.0501, 0.1126, 0.1030], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0198, 0.0191, 0.0226, 0.0267, 0.0240, 0.0241, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:26:41,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6768, 1.2769, 1.2218, 1.8089, 1.2705, 1.6448, 1.8388, 1.8022], + device='cuda:1'), covar=tensor([0.1076, 0.1611, 0.1908, 0.1371, 0.2008, 0.1336, 0.1579, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0306, 0.0308, 0.0330, 0.0383, 0.0277, 0.0342, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 21:26:50,577 INFO [train.py:903] (1/4) Epoch 2, batch 650, loss[loss=0.4052, simple_loss=0.4363, pruned_loss=0.187, over 19796.00 frames. ], tot_loss[loss=0.3835, simple_loss=0.4115, pruned_loss=0.1777, over 3679748.10 frames. ], batch size: 56, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:26:59,956 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1332, 4.5120, 5.7850, 5.5594, 1.7237, 5.2051, 4.6713, 5.1483], + device='cuda:1'), covar=tensor([0.0190, 0.0398, 0.0303, 0.0157, 0.2905, 0.0178, 0.0250, 0.0637], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0253, 0.0321, 0.0217, 0.0392, 0.0153, 0.0228, 0.0330], + device='cuda:1'), out_proj_covar=tensor([1.3334e-04, 1.5696e-04, 2.0104e-04, 1.2492e-04, 2.1041e-04, 9.8423e-05, + 1.3335e-04, 1.8358e-04], device='cuda:1') +2023-03-31 21:27:30,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 8.519e+02 1.041e+03 1.431e+03 3.840e+03, threshold=2.082e+03, percent-clipped=3.0 +2023-03-31 21:27:49,382 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7525.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:27:49,567 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7525.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:27:53,956 INFO [train.py:903] (1/4) Epoch 2, batch 700, loss[loss=0.3293, simple_loss=0.3681, pruned_loss=0.1453, over 19737.00 frames. ], tot_loss[loss=0.3852, simple_loss=0.4126, pruned_loss=0.1789, over 3702764.88 frames. ], batch size: 46, lr: 3.65e-02, grad_scale: 8.0 +2023-03-31 21:28:56,978 INFO [train.py:903] (1/4) Epoch 2, batch 750, loss[loss=0.3852, simple_loss=0.4218, pruned_loss=0.1743, over 19580.00 frames. ], tot_loss[loss=0.3866, simple_loss=0.414, pruned_loss=0.1796, over 3739940.09 frames. ], batch size: 61, lr: 3.64e-02, grad_scale: 8.0 +2023-03-31 21:29:35,962 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.266e+02 8.676e+02 1.032e+03 1.220e+03 3.020e+03, threshold=2.064e+03, percent-clipped=5.0 +2023-03-31 21:29:36,423 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7611.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:29:58,614 INFO [train.py:903] (1/4) Epoch 2, batch 800, loss[loss=0.3843, simple_loss=0.4138, pruned_loss=0.1774, over 17376.00 frames. ], tot_loss[loss=0.3839, simple_loss=0.4123, pruned_loss=0.1777, over 3766331.01 frames. ], batch size: 101, lr: 3.63e-02, grad_scale: 8.0 +2023-03-31 21:30:07,144 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7636.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:30:09,184 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7637.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:12,802 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7640.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,239 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,376 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:16,154 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 21:30:41,310 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1317, 2.7131, 2.1839, 2.5273, 1.8648, 1.8390, 0.3748, 1.9774], + device='cuda:1'), covar=tensor([0.0564, 0.0354, 0.0325, 0.0445, 0.0889, 0.0764, 0.1460, 0.1026], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0195, 0.0189, 0.0232, 0.0272, 0.0233, 0.0243, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:30:46,815 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7667.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:31:01,249 INFO [train.py:903] (1/4) Epoch 2, batch 850, loss[loss=0.4121, simple_loss=0.4417, pruned_loss=0.1913, over 19754.00 frames. ], tot_loss[loss=0.3808, simple_loss=0.4099, pruned_loss=0.1758, over 3778021.43 frames. ], batch size: 63, lr: 3.62e-02, grad_scale: 8.0 +2023-03-31 21:31:36,340 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0505, 1.4710, 1.4305, 1.9958, 1.4862, 2.0701, 2.1022, 2.0702], + device='cuda:1'), covar=tensor([0.0890, 0.1585, 0.1683, 0.1362, 0.2087, 0.1082, 0.1563, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0304, 0.0303, 0.0328, 0.0377, 0.0271, 0.0337, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 21:31:41,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+02 9.016e+02 1.057e+03 1.450e+03 5.160e+03, threshold=2.114e+03, percent-clipped=6.0 +2023-03-31 21:31:56,577 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 21:32:02,495 INFO [train.py:903] (1/4) Epoch 2, batch 900, loss[loss=0.3985, simple_loss=0.4196, pruned_loss=0.1887, over 19530.00 frames. ], tot_loss[loss=0.3811, simple_loss=0.4097, pruned_loss=0.1762, over 3773559.66 frames. ], batch size: 54, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:32:32,753 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7752.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:43,824 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:53,073 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7769.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:33:06,312 INFO [train.py:903] (1/4) Epoch 2, batch 950, loss[loss=0.3766, simple_loss=0.4118, pruned_loss=0.1707, over 19576.00 frames. ], tot_loss[loss=0.3817, simple_loss=0.4104, pruned_loss=0.1765, over 3779026.22 frames. ], batch size: 61, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:33:09,107 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7781.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:33:10,928 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 21:33:40,006 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7806.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:33:46,539 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.660e+02 8.698e+02 1.089e+03 1.494e+03 2.916e+03, threshold=2.178e+03, percent-clipped=6.0 +2023-03-31 21:33:58,936 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7820.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:34:09,629 INFO [train.py:903] (1/4) Epoch 2, batch 1000, loss[loss=0.4056, simple_loss=0.4324, pruned_loss=0.1895, over 18807.00 frames. ], tot_loss[loss=0.3814, simple_loss=0.4106, pruned_loss=0.1761, over 3787738.71 frames. ], batch size: 74, lr: 3.60e-02, grad_scale: 4.0 +2023-03-31 21:34:29,513 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0628, 1.9787, 1.7409, 2.9948, 1.8847, 3.4046, 2.3088, 1.8903], + device='cuda:1'), covar=tensor([0.0996, 0.0769, 0.0576, 0.0483, 0.1092, 0.0206, 0.1086, 0.0820], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0231, 0.0251, 0.0323, 0.0323, 0.0172, 0.0339, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:34:58,586 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 21:35:04,952 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 21:35:11,833 INFO [train.py:903] (1/4) Epoch 2, batch 1050, loss[loss=0.3544, simple_loss=0.3914, pruned_loss=0.1587, over 19611.00 frames. ], tot_loss[loss=0.3777, simple_loss=0.4081, pruned_loss=0.1737, over 3802405.76 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 4.0 +2023-03-31 21:35:18,877 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7884.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:33,750 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7896.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:46,815 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 21:35:53,415 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+02 8.878e+02 9.952e+02 1.228e+03 3.126e+03, threshold=1.990e+03, percent-clipped=5.0 +2023-03-31 21:36:05,095 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3648, 0.9899, 1.2835, 0.6782, 2.9002, 2.8624, 2.5734, 2.9531], + device='cuda:1'), covar=tensor([0.1680, 0.3064, 0.3058, 0.2579, 0.0317, 0.0149, 0.0290, 0.0181], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0282, 0.0324, 0.0295, 0.0197, 0.0110, 0.0186, 0.0115], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 21:36:05,151 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7921.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:36:14,161 INFO [train.py:903] (1/4) Epoch 2, batch 1100, loss[loss=0.4675, simple_loss=0.4713, pruned_loss=0.2319, over 19330.00 frames. ], tot_loss[loss=0.3784, simple_loss=0.4084, pruned_loss=0.1742, over 3814339.60 frames. ], batch size: 70, lr: 3.58e-02, grad_scale: 4.0 +2023-03-31 21:37:16,835 INFO [train.py:903] (1/4) Epoch 2, batch 1150, loss[loss=0.4496, simple_loss=0.4386, pruned_loss=0.2303, over 19368.00 frames. ], tot_loss[loss=0.3785, simple_loss=0.408, pruned_loss=0.1745, over 3806675.63 frames. ], batch size: 47, lr: 3.57e-02, grad_scale: 4.0 +2023-03-31 21:37:20,372 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5269, 1.1438, 1.5031, 0.7134, 2.7967, 2.7807, 2.5838, 2.8626], + device='cuda:1'), covar=tensor([0.1428, 0.2767, 0.2537, 0.2506, 0.0309, 0.0181, 0.0302, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0282, 0.0324, 0.0295, 0.0197, 0.0109, 0.0185, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 21:37:26,687 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7986.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:54,521 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:58,747 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+02 8.681e+02 1.035e+03 1.273e+03 2.854e+03, threshold=2.070e+03, percent-clipped=5.0 +2023-03-31 21:38:06,515 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.31 vs. limit=5.0 +2023-03-31 21:38:21,517 INFO [train.py:903] (1/4) Epoch 2, batch 1200, loss[loss=0.3329, simple_loss=0.3739, pruned_loss=0.146, over 19749.00 frames. ], tot_loss[loss=0.3775, simple_loss=0.4075, pruned_loss=0.1738, over 3811733.89 frames. ], batch size: 51, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:38:26,445 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8033.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:38:52,436 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 21:39:22,658 INFO [train.py:903] (1/4) Epoch 2, batch 1250, loss[loss=0.4358, simple_loss=0.4527, pruned_loss=0.2094, over 18767.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.4072, pruned_loss=0.1742, over 3806576.31 frames. ], batch size: 74, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:39:50,803 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8101.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:39:57,189 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:40:05,139 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.392e+02 1.041e+03 1.254e+03 3.427e+03, threshold=2.083e+03, percent-clipped=3.0 +2023-03-31 21:40:25,457 INFO [train.py:903] (1/4) Epoch 2, batch 1300, loss[loss=0.3497, simple_loss=0.3796, pruned_loss=0.16, over 19731.00 frames. ], tot_loss[loss=0.3758, simple_loss=0.4057, pruned_loss=0.1729, over 3809534.46 frames. ], batch size: 51, lr: 3.55e-02, grad_scale: 8.0 +2023-03-31 21:40:39,826 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8140.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:09,967 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8164.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:11,407 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8165.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:28,090 INFO [train.py:903] (1/4) Epoch 2, batch 1350, loss[loss=0.3724, simple_loss=0.4215, pruned_loss=0.1616, over 19779.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.4048, pruned_loss=0.1728, over 3809627.88 frames. ], batch size: 56, lr: 3.54e-02, grad_scale: 8.0 +2023-03-31 21:41:32,933 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0270, 1.5255, 1.4335, 2.0694, 1.4645, 1.8688, 1.8917, 1.7321], + device='cuda:1'), covar=tensor([0.0792, 0.1252, 0.1539, 0.1241, 0.1999, 0.1230, 0.1428, 0.1011], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0292, 0.0303, 0.0320, 0.0370, 0.0270, 0.0335, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 21:42:08,818 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.809e+02 9.088e+02 1.109e+03 1.527e+03 2.312e+03, threshold=2.218e+03, percent-clipped=6.0 +2023-03-31 21:42:19,637 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:42:30,881 INFO [train.py:903] (1/4) Epoch 2, batch 1400, loss[loss=0.3367, simple_loss=0.3791, pruned_loss=0.1471, over 19856.00 frames. ], tot_loss[loss=0.3767, simple_loss=0.4066, pruned_loss=0.1733, over 3811853.04 frames. ], batch size: 52, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:14,084 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-31 21:43:32,694 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 21:43:33,842 INFO [train.py:903] (1/4) Epoch 2, batch 1450, loss[loss=0.3526, simple_loss=0.3963, pruned_loss=0.1544, over 19658.00 frames. ], tot_loss[loss=0.3768, simple_loss=0.4069, pruned_loss=0.1734, over 3798489.62 frames. ], batch size: 58, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:34,141 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8279.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:43:38,615 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8283.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:44:15,429 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+02 8.724e+02 1.078e+03 1.353e+03 2.729e+03, threshold=2.156e+03, percent-clipped=3.0 +2023-03-31 21:44:26,444 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0740, 1.1983, 0.9097, 1.0520, 1.1341, 0.8110, 0.4153, 1.2699], + device='cuda:1'), covar=tensor([0.0707, 0.0649, 0.1146, 0.0561, 0.0639, 0.1348, 0.1086, 0.0648], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0178, 0.0265, 0.0241, 0.0177, 0.0287, 0.0259, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:44:35,645 INFO [train.py:903] (1/4) Epoch 2, batch 1500, loss[loss=0.4115, simple_loss=0.4321, pruned_loss=0.1954, over 19702.00 frames. ], tot_loss[loss=0.3772, simple_loss=0.4069, pruned_loss=0.1738, over 3801736.62 frames. ], batch size: 59, lr: 3.52e-02, grad_scale: 8.0 +2023-03-31 21:45:10,891 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8357.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:45:38,212 INFO [train.py:903] (1/4) Epoch 2, batch 1550, loss[loss=0.3638, simple_loss=0.4043, pruned_loss=0.1617, over 19530.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4058, pruned_loss=0.1725, over 3800761.49 frames. ], batch size: 56, lr: 3.51e-02, grad_scale: 8.0 +2023-03-31 21:45:42,362 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8382.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:45:51,885 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-31 21:46:19,242 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.712e+02 9.525e+02 1.175e+03 1.582e+03 3.285e+03, threshold=2.351e+03, percent-clipped=5.0 +2023-03-31 21:46:41,075 INFO [train.py:903] (1/4) Epoch 2, batch 1600, loss[loss=0.3122, simple_loss=0.3605, pruned_loss=0.132, over 19392.00 frames. ], tot_loss[loss=0.3751, simple_loss=0.4057, pruned_loss=0.1723, over 3794063.67 frames. ], batch size: 47, lr: 3.50e-02, grad_scale: 8.0 +2023-03-31 21:47:02,066 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 21:47:39,687 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:47:42,841 INFO [train.py:903] (1/4) Epoch 2, batch 1650, loss[loss=0.343, simple_loss=0.404, pruned_loss=0.141, over 19665.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.4066, pruned_loss=0.1724, over 3793339.66 frames. ], batch size: 58, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:07,776 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3322, 2.4088, 2.4164, 2.3561, 2.3727, 2.2394, 0.3733, 1.9302], + device='cuda:1'), covar=tensor([0.0460, 0.0568, 0.0312, 0.0487, 0.0628, 0.0630, 0.1366, 0.0887], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0205, 0.0195, 0.0231, 0.0269, 0.0240, 0.0245, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 21:48:10,108 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:48:23,195 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.971e+02 8.791e+02 1.048e+03 1.403e+03 4.696e+03, threshold=2.096e+03, percent-clipped=2.0 +2023-03-31 21:48:44,116 INFO [train.py:903] (1/4) Epoch 2, batch 1700, loss[loss=0.3561, simple_loss=0.376, pruned_loss=0.1681, over 19336.00 frames. ], tot_loss[loss=0.3781, simple_loss=0.4079, pruned_loss=0.1741, over 3804652.89 frames. ], batch size: 44, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:52,071 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8535.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:23,164 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8560.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:23,914 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 21:49:46,288 INFO [train.py:903] (1/4) Epoch 2, batch 1750, loss[loss=0.3256, simple_loss=0.3754, pruned_loss=0.1379, over 19612.00 frames. ], tot_loss[loss=0.3771, simple_loss=0.4074, pruned_loss=0.1734, over 3802149.58 frames. ], batch size: 50, lr: 3.48e-02, grad_scale: 8.0 +2023-03-31 21:50:27,274 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.711e+02 8.589e+02 1.062e+03 1.367e+03 2.706e+03, threshold=2.124e+03, percent-clipped=6.0 +2023-03-31 21:50:47,001 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8627.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:50:48,941 INFO [train.py:903] (1/4) Epoch 2, batch 1800, loss[loss=0.4182, simple_loss=0.4426, pruned_loss=0.1969, over 19779.00 frames. ], tot_loss[loss=0.3751, simple_loss=0.406, pruned_loss=0.1721, over 3792541.63 frames. ], batch size: 54, lr: 3.47e-02, grad_scale: 8.0 +2023-03-31 21:51:10,780 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-31 21:51:48,878 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 21:51:52,354 INFO [train.py:903] (1/4) Epoch 2, batch 1850, loss[loss=0.4483, simple_loss=0.45, pruned_loss=0.2233, over 13581.00 frames. ], tot_loss[loss=0.3736, simple_loss=0.4049, pruned_loss=0.1712, over 3786374.72 frames. ], batch size: 136, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:52:26,738 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 21:52:32,412 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.683e+02 8.545e+02 1.022e+03 1.402e+03 2.945e+03, threshold=2.044e+03, percent-clipped=4.0 +2023-03-31 21:52:53,440 INFO [train.py:903] (1/4) Epoch 2, batch 1900, loss[loss=0.3881, simple_loss=0.407, pruned_loss=0.1846, over 19760.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.4041, pruned_loss=0.1705, over 3791633.54 frames. ], batch size: 51, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:53:10,049 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:13,037 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 21:53:19,069 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 21:53:30,857 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4717, 1.1212, 1.4101, 0.8312, 2.6824, 2.8003, 2.5954, 2.8558], + device='cuda:1'), covar=tensor([0.1464, 0.2743, 0.2776, 0.2184, 0.0346, 0.0151, 0.0296, 0.0196], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0285, 0.0331, 0.0294, 0.0200, 0.0109, 0.0192, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 21:53:44,519 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 21:53:56,617 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8778.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:57,531 INFO [train.py:903] (1/4) Epoch 2, batch 1950, loss[loss=0.3547, simple_loss=0.3938, pruned_loss=0.1578, over 19716.00 frames. ], tot_loss[loss=0.3709, simple_loss=0.4034, pruned_loss=0.1692, over 3809296.13 frames. ], batch size: 59, lr: 3.45e-02, grad_scale: 8.0 +2023-03-31 21:54:08,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3208, 1.6209, 1.3079, 2.0948, 1.8581, 2.0930, 2.0414, 2.0324], + device='cuda:1'), covar=tensor([0.0751, 0.1466, 0.1765, 0.1343, 0.1681, 0.1207, 0.1790, 0.0924], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0301, 0.0306, 0.0329, 0.0372, 0.0274, 0.0345, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 21:54:18,053 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.15 vs. limit=2.0 +2023-03-31 21:54:38,566 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.007e+02 8.441e+02 1.013e+03 1.280e+03 2.038e+03, threshold=2.026e+03, percent-clipped=0.0 +2023-03-31 21:54:43,184 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-31 21:55:00,983 INFO [train.py:903] (1/4) Epoch 2, batch 2000, loss[loss=0.3942, simple_loss=0.4273, pruned_loss=0.1806, over 19526.00 frames. ], tot_loss[loss=0.3707, simple_loss=0.4029, pruned_loss=0.1693, over 3791638.69 frames. ], batch size: 56, lr: 3.44e-02, grad_scale: 8.0 +2023-03-31 21:55:08,315 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1527, 1.4842, 1.7031, 2.0927, 2.0762, 2.3496, 2.8546, 1.8661], + device='cuda:1'), covar=tensor([0.1083, 0.2209, 0.1711, 0.1594, 0.1945, 0.1397, 0.1490, 0.1411], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0298, 0.0301, 0.0328, 0.0372, 0.0272, 0.0340, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 21:56:00,565 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 21:56:01,579 INFO [train.py:903] (1/4) Epoch 2, batch 2050, loss[loss=0.4851, simple_loss=0.4702, pruned_loss=0.25, over 19782.00 frames. ], tot_loss[loss=0.3727, simple_loss=0.4041, pruned_loss=0.1706, over 3802855.69 frames. ], batch size: 56, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:56:19,575 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 21:56:20,763 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 21:56:41,673 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 21:56:44,109 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.978e+02 1.029e+03 1.194e+03 1.427e+03 4.040e+03, threshold=2.389e+03, percent-clipped=7.0 +2023-03-31 21:57:05,503 INFO [train.py:903] (1/4) Epoch 2, batch 2100, loss[loss=0.3512, simple_loss=0.3905, pruned_loss=0.156, over 19574.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.4041, pruned_loss=0.1705, over 3809731.09 frames. ], batch size: 61, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:57:25,723 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8945.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:57:36,780 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 21:57:53,987 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-31 21:57:59,426 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 21:58:07,608 INFO [train.py:903] (1/4) Epoch 2, batch 2150, loss[loss=0.3142, simple_loss=0.348, pruned_loss=0.1401, over 19790.00 frames. ], tot_loss[loss=0.3699, simple_loss=0.4023, pruned_loss=0.1687, over 3814506.00 frames. ], batch size: 48, lr: 3.42e-02, grad_scale: 8.0 +2023-03-31 21:58:32,397 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8998.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:39,151 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7972, 1.4040, 1.2713, 1.8230, 1.4768, 1.6551, 1.3536, 1.5826], + device='cuda:1'), covar=tensor([0.0804, 0.1843, 0.1468, 0.1111, 0.1699, 0.0687, 0.1252, 0.0729], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0382, 0.0288, 0.0262, 0.0329, 0.0262, 0.0282, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 21:58:40,128 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9004.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:49,978 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+02 7.392e+02 9.171e+02 1.181e+03 2.165e+03, threshold=1.834e+03, percent-clipped=0.0 +2023-03-31 21:59:04,384 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9023.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:59:11,991 INFO [train.py:903] (1/4) Epoch 2, batch 2200, loss[loss=0.3821, simple_loss=0.4208, pruned_loss=0.1716, over 19684.00 frames. ], tot_loss[loss=0.3707, simple_loss=0.4036, pruned_loss=0.1689, over 3816127.70 frames. ], batch size: 55, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 21:59:14,443 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9031.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:00:13,975 INFO [train.py:903] (1/4) Epoch 2, batch 2250, loss[loss=0.4086, simple_loss=0.4421, pruned_loss=0.1876, over 19442.00 frames. ], tot_loss[loss=0.3701, simple_loss=0.4033, pruned_loss=0.1684, over 3819723.04 frames. ], batch size: 70, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 22:00:39,598 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2524, 2.2308, 1.8821, 2.7085, 2.0310, 2.6224, 3.0468, 2.3853], + device='cuda:1'), covar=tensor([0.0621, 0.1285, 0.1549, 0.1366, 0.1742, 0.1200, 0.1267, 0.0902], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0300, 0.0297, 0.0328, 0.0367, 0.0271, 0.0335, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 22:00:56,138 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.993e+02 8.727e+02 9.943e+02 1.293e+03 2.077e+03, threshold=1.989e+03, percent-clipped=4.0 +2023-03-31 22:01:08,140 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9122.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:01:10,659 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=5.40 vs. limit=5.0 +2023-03-31 22:01:17,886 INFO [train.py:903] (1/4) Epoch 2, batch 2300, loss[loss=0.362, simple_loss=0.404, pruned_loss=0.16, over 19462.00 frames. ], tot_loss[loss=0.3704, simple_loss=0.4037, pruned_loss=0.1685, over 3820461.75 frames. ], batch size: 64, lr: 3.40e-02, grad_scale: 8.0 +2023-03-31 22:01:31,582 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 22:02:19,095 INFO [train.py:903] (1/4) Epoch 2, batch 2350, loss[loss=0.3862, simple_loss=0.4218, pruned_loss=0.1753, over 19512.00 frames. ], tot_loss[loss=0.3702, simple_loss=0.4034, pruned_loss=0.1684, over 3827423.88 frames. ], batch size: 64, lr: 3.39e-02, grad_scale: 8.0 +2023-03-31 22:03:00,734 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.988e+02 9.117e+02 1.090e+03 1.432e+03 2.529e+03, threshold=2.180e+03, percent-clipped=5.0 +2023-03-31 22:03:00,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 22:03:18,257 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 22:03:22,887 INFO [train.py:903] (1/4) Epoch 2, batch 2400, loss[loss=0.3796, simple_loss=0.4185, pruned_loss=0.1703, over 19681.00 frames. ], tot_loss[loss=0.3678, simple_loss=0.4015, pruned_loss=0.167, over 3834403.61 frames. ], batch size: 59, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:03:32,282 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9237.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:24,300 INFO [train.py:903] (1/4) Epoch 2, batch 2450, loss[loss=0.3914, simple_loss=0.4251, pruned_loss=0.1788, over 18150.00 frames. ], tot_loss[loss=0.3692, simple_loss=0.4024, pruned_loss=0.168, over 3838686.08 frames. ], batch size: 83, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:04:38,156 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9289.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:50,698 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-31 22:05:06,391 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 8.791e+02 1.096e+03 1.484e+03 3.289e+03, threshold=2.192e+03, percent-clipped=7.0 +2023-03-31 22:05:13,396 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.5299, 0.8221, 0.6010, 0.6753, 0.7390, 0.5032, 0.2130, 0.8156], + device='cuda:1'), covar=tensor([0.0589, 0.0359, 0.0835, 0.0423, 0.0466, 0.1016, 0.0890, 0.0474], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0184, 0.0280, 0.0250, 0.0191, 0.0293, 0.0263, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-31 22:05:27,994 INFO [train.py:903] (1/4) Epoch 2, batch 2500, loss[loss=0.3596, simple_loss=0.4001, pruned_loss=0.1596, over 19666.00 frames. ], tot_loss[loss=0.3691, simple_loss=0.4021, pruned_loss=0.168, over 3832258.80 frames. ], batch size: 60, lr: 3.37e-02, grad_scale: 8.0 +2023-03-31 22:05:52,442 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9348.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:25,078 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9375.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:29,684 INFO [train.py:903] (1/4) Epoch 2, batch 2550, loss[loss=0.3275, simple_loss=0.3624, pruned_loss=0.1463, over 19486.00 frames. ], tot_loss[loss=0.3683, simple_loss=0.4018, pruned_loss=0.1674, over 3831397.46 frames. ], batch size: 49, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:01,788 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9404.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:07:11,556 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+02 8.134e+02 9.492e+02 1.275e+03 2.544e+03, threshold=1.898e+03, percent-clipped=3.0 +2023-03-31 22:07:26,384 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 22:07:33,270 INFO [train.py:903] (1/4) Epoch 2, batch 2600, loss[loss=0.3555, simple_loss=0.3998, pruned_loss=0.1556, over 19580.00 frames. ], tot_loss[loss=0.3678, simple_loss=0.4016, pruned_loss=0.167, over 3836683.86 frames. ], batch size: 61, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:36,914 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:14,519 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9463.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:34,690 INFO [train.py:903] (1/4) Epoch 2, batch 2650, loss[loss=0.3614, simple_loss=0.4024, pruned_loss=0.1602, over 19781.00 frames. ], tot_loss[loss=0.367, simple_loss=0.401, pruned_loss=0.1665, over 3842174.43 frames. ], batch size: 56, lr: 3.35e-02, grad_scale: 8.0 +2023-03-31 22:08:49,063 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9490.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:52,740 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9493.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:53,507 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 22:09:16,684 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.258e+02 8.887e+02 1.023e+03 1.383e+03 3.476e+03, threshold=2.047e+03, percent-clipped=7.0 +2023-03-31 22:09:23,987 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9518.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:09:36,965 INFO [train.py:903] (1/4) Epoch 2, batch 2700, loss[loss=0.4118, simple_loss=0.4404, pruned_loss=0.1916, over 19787.00 frames. ], tot_loss[loss=0.3689, simple_loss=0.4023, pruned_loss=0.1678, over 3817902.08 frames. ], batch size: 56, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:09:58,953 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.72 vs. limit=5.0 +2023-03-31 22:10:39,553 INFO [train.py:903] (1/4) Epoch 2, batch 2750, loss[loss=0.3351, simple_loss=0.381, pruned_loss=0.1446, over 19785.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4015, pruned_loss=0.1673, over 3808852.03 frames. ], batch size: 54, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:11:08,792 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9602.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:11:20,990 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.046e+02 9.047e+02 1.065e+03 1.297e+03 2.590e+03, threshold=2.130e+03, percent-clipped=3.0 +2023-03-31 22:11:43,324 INFO [train.py:903] (1/4) Epoch 2, batch 2800, loss[loss=0.4096, simple_loss=0.4456, pruned_loss=0.1868, over 19605.00 frames. ], tot_loss[loss=0.3682, simple_loss=0.4025, pruned_loss=0.1669, over 3821285.38 frames. ], batch size: 57, lr: 3.33e-02, grad_scale: 8.0 +2023-03-31 22:12:20,933 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9660.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:41,335 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9676.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:44,607 INFO [train.py:903] (1/4) Epoch 2, batch 2850, loss[loss=0.2846, simple_loss=0.3453, pruned_loss=0.1119, over 19761.00 frames. ], tot_loss[loss=0.3679, simple_loss=0.4024, pruned_loss=0.1667, over 3813492.07 frames. ], batch size: 51, lr: 3.32e-02, grad_scale: 8.0 +2023-03-31 22:12:51,798 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9685.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:26,587 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.408e+02 8.898e+02 1.125e+03 1.384e+03 2.599e+03, threshold=2.251e+03, percent-clipped=6.0 +2023-03-31 22:13:35,203 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:43,464 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6175, 3.8306, 4.1231, 3.9599, 1.5035, 3.6177, 3.3273, 3.5603], + device='cuda:1'), covar=tensor([0.0364, 0.0450, 0.0386, 0.0328, 0.2885, 0.0205, 0.0400, 0.1005], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0281, 0.0375, 0.0263, 0.0421, 0.0183, 0.0260, 0.0379], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 22:13:45,582 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 22:13:46,840 INFO [train.py:903] (1/4) Epoch 2, batch 2900, loss[loss=0.3228, simple_loss=0.3507, pruned_loss=0.1475, over 19718.00 frames. ], tot_loss[loss=0.366, simple_loss=0.4008, pruned_loss=0.1656, over 3816697.75 frames. ], batch size: 46, lr: 3.31e-02, grad_scale: 16.0 +2023-03-31 22:14:06,881 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9744.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:09,003 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9746.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:23,218 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9020, 1.4905, 1.2039, 1.6710, 1.4345, 1.9120, 2.0751, 1.8706], + device='cuda:1'), covar=tensor([0.0915, 0.1417, 0.1830, 0.1662, 0.2118, 0.1066, 0.1330, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0307, 0.0303, 0.0332, 0.0361, 0.0273, 0.0331, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 22:14:27,818 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:39,344 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:45,602 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:49,535 INFO [train.py:903] (1/4) Epoch 2, batch 2950, loss[loss=0.4028, simple_loss=0.4153, pruned_loss=0.1952, over 19391.00 frames. ], tot_loss[loss=0.3644, simple_loss=0.3994, pruned_loss=0.1647, over 3830385.43 frames. ], batch size: 48, lr: 3.31e-02, grad_scale: 8.0 +2023-03-31 22:14:52,367 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-31 22:15:31,703 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+02 8.475e+02 1.131e+03 1.413e+03 3.215e+03, threshold=2.262e+03, percent-clipped=4.0 +2023-03-31 22:15:53,078 INFO [train.py:903] (1/4) Epoch 2, batch 3000, loss[loss=0.3281, simple_loss=0.3638, pruned_loss=0.1462, over 19769.00 frames. ], tot_loss[loss=0.3633, simple_loss=0.3988, pruned_loss=0.1639, over 3827110.36 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 4.0 +2023-03-31 22:15:53,078 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 22:16:06,234 INFO [train.py:937] (1/4) Epoch 2, validation: loss=0.2513, simple_loss=0.3423, pruned_loss=0.08019, over 944034.00 frames. +2023-03-31 22:16:06,236 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-03-31 22:16:12,116 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 22:16:54,068 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-31 22:17:08,086 INFO [train.py:903] (1/4) Epoch 2, batch 3050, loss[loss=0.3549, simple_loss=0.3804, pruned_loss=0.1646, over 19770.00 frames. ], tot_loss[loss=0.3615, simple_loss=0.3971, pruned_loss=0.1629, over 3843416.39 frames. ], batch size: 47, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:17:22,798 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9891.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:17:50,737 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 8.806e+02 1.158e+03 1.402e+03 3.282e+03, threshold=2.315e+03, percent-clipped=2.0 +2023-03-31 22:18:10,040 INFO [train.py:903] (1/4) Epoch 2, batch 3100, loss[loss=0.3503, simple_loss=0.3875, pruned_loss=0.1565, over 19853.00 frames. ], tot_loss[loss=0.3651, simple_loss=0.3993, pruned_loss=0.1654, over 3819621.03 frames. ], batch size: 52, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:18:29,700 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9946.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:19:11,240 INFO [train.py:903] (1/4) Epoch 2, batch 3150, loss[loss=0.4158, simple_loss=0.4329, pruned_loss=0.1993, over 19356.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.3981, pruned_loss=0.1646, over 3830043.30 frames. ], batch size: 70, lr: 3.28e-02, grad_scale: 4.0 +2023-03-31 22:19:42,639 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 22:19:56,512 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 7.345e+02 9.404e+02 1.247e+03 3.615e+03, threshold=1.881e+03, percent-clipped=3.0 +2023-03-31 22:20:03,835 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10020.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:15,207 INFO [train.py:903] (1/4) Epoch 2, batch 3200, loss[loss=0.3589, simple_loss=0.4061, pruned_loss=0.1559, over 19600.00 frames. ], tot_loss[loss=0.3629, simple_loss=0.3977, pruned_loss=0.1641, over 3829689.91 frames. ], batch size: 57, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:20:50,873 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10057.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:55,701 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:21:19,932 INFO [train.py:903] (1/4) Epoch 2, batch 3250, loss[loss=0.5004, simple_loss=0.4978, pruned_loss=0.2516, over 19507.00 frames. ], tot_loss[loss=0.3601, simple_loss=0.3959, pruned_loss=0.1621, over 3846718.88 frames. ], batch size: 64, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:21:21,254 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10080.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:21:50,580 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:02,472 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+02 8.486e+02 1.037e+03 1.291e+03 3.604e+03, threshold=2.074e+03, percent-clipped=6.0 +2023-03-31 22:22:20,718 INFO [train.py:903] (1/4) Epoch 2, batch 3300, loss[loss=0.3392, simple_loss=0.3839, pruned_loss=0.1473, over 19520.00 frames. ], tot_loss[loss=0.3624, simple_loss=0.3979, pruned_loss=0.1635, over 3843812.94 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:22:25,202 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 22:22:27,873 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10135.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:42,693 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10147.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:14,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10172.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:18,679 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-31 22:23:22,721 INFO [train.py:903] (1/4) Epoch 2, batch 3350, loss[loss=0.3886, simple_loss=0.4179, pruned_loss=0.1796, over 19770.00 frames. ], tot_loss[loss=0.3616, simple_loss=0.3969, pruned_loss=0.1631, over 3833640.25 frames. ], batch size: 56, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:24:07,628 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.361e+02 9.909e+02 1.198e+03 2.844e+03, threshold=1.982e+03, percent-clipped=3.0 +2023-03-31 22:24:14,942 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:24:25,934 INFO [train.py:903] (1/4) Epoch 2, batch 3400, loss[loss=0.355, simple_loss=0.3949, pruned_loss=0.1576, over 19671.00 frames. ], tot_loss[loss=0.3595, simple_loss=0.3957, pruned_loss=0.1617, over 3832080.79 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 8.0 +2023-03-31 22:24:47,983 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1209, 1.1199, 1.5174, 1.2367, 1.9576, 1.9085, 2.0824, 0.5590], + device='cuda:1'), covar=tensor([0.1212, 0.1697, 0.0996, 0.1222, 0.0630, 0.0801, 0.0662, 0.1490], + device='cuda:1'), in_proj_covar=tensor([0.0355, 0.0375, 0.0347, 0.0355, 0.0410, 0.0332, 0.0475, 0.0360], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 22:25:29,363 INFO [train.py:903] (1/4) Epoch 2, batch 3450, loss[loss=0.3068, simple_loss=0.3607, pruned_loss=0.1264, over 19733.00 frames. ], tot_loss[loss=0.3618, simple_loss=0.3967, pruned_loss=0.1634, over 3822717.92 frames. ], batch size: 51, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:25:32,672 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 22:26:13,338 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+02 9.409e+02 1.166e+03 1.453e+03 2.796e+03, threshold=2.333e+03, percent-clipped=9.0 +2023-03-31 22:26:17,028 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10317.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:26:31,595 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-31 22:26:31,777 INFO [train.py:903] (1/4) Epoch 2, batch 3500, loss[loss=0.2757, simple_loss=0.3274, pruned_loss=0.112, over 19748.00 frames. ], tot_loss[loss=0.3599, simple_loss=0.3954, pruned_loss=0.1622, over 3827593.27 frames. ], batch size: 45, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:26:47,019 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10342.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:27:33,762 INFO [train.py:903] (1/4) Epoch 2, batch 3550, loss[loss=0.3623, simple_loss=0.3978, pruned_loss=0.1633, over 18141.00 frames. ], tot_loss[loss=0.3625, simple_loss=0.3974, pruned_loss=0.1638, over 3823961.02 frames. ], batch size: 83, lr: 3.23e-02, grad_scale: 4.0 +2023-03-31 22:27:49,487 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10391.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:04,141 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10401.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:20,153 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+02 7.776e+02 1.013e+03 1.369e+03 3.978e+03, threshold=2.027e+03, percent-clipped=2.0 +2023-03-31 22:28:21,549 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10416.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:30,587 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10424.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:28:35,010 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10427.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:37,136 INFO [train.py:903] (1/4) Epoch 2, batch 3600, loss[loss=0.3571, simple_loss=0.3895, pruned_loss=0.1624, over 19837.00 frames. ], tot_loss[loss=0.3619, simple_loss=0.3971, pruned_loss=0.1634, over 3818951.71 frames. ], batch size: 52, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:29:26,943 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-31 22:29:32,580 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10472.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:37,345 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:41,347 INFO [train.py:903] (1/4) Epoch 2, batch 3650, loss[loss=0.3031, simple_loss=0.3506, pruned_loss=0.1278, over 19390.00 frames. ], tot_loss[loss=0.3601, simple_loss=0.3962, pruned_loss=0.162, over 3828222.94 frames. ], batch size: 48, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:30:09,140 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:11,812 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-31 22:30:26,008 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+02 8.454e+02 1.072e+03 1.396e+03 2.688e+03, threshold=2.143e+03, percent-clipped=6.0 +2023-03-31 22:30:27,515 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10516.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:45,121 INFO [train.py:903] (1/4) Epoch 2, batch 3700, loss[loss=0.336, simple_loss=0.3764, pruned_loss=0.1478, over 19622.00 frames. ], tot_loss[loss=0.3615, simple_loss=0.397, pruned_loss=0.163, over 3828861.92 frames. ], batch size: 50, lr: 3.21e-02, grad_scale: 8.0 +2023-03-31 22:30:57,263 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10539.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:30:59,693 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6774, 1.6099, 1.6547, 2.2623, 1.5834, 2.1135, 1.8637, 1.4926], + device='cuda:1'), covar=tensor([0.0844, 0.0705, 0.0433, 0.0374, 0.0773, 0.0269, 0.0883, 0.0724], + device='cuda:1'), in_proj_covar=tensor([0.0346, 0.0314, 0.0328, 0.0423, 0.0394, 0.0231, 0.0427, 0.0336], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 22:31:47,573 INFO [train.py:903] (1/4) Epoch 2, batch 3750, loss[loss=0.3208, simple_loss=0.3796, pruned_loss=0.131, over 19798.00 frames. ], tot_loss[loss=0.3578, simple_loss=0.3948, pruned_loss=0.1604, over 3836045.75 frames. ], batch size: 56, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:32:33,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+02 8.986e+02 1.057e+03 1.326e+03 2.585e+03, threshold=2.114e+03, percent-clipped=3.0 +2023-03-31 22:32:50,174 INFO [train.py:903] (1/4) Epoch 2, batch 3800, loss[loss=0.4934, simple_loss=0.4847, pruned_loss=0.2511, over 19470.00 frames. ], tot_loss[loss=0.3593, simple_loss=0.396, pruned_loss=0.1613, over 3825111.17 frames. ], batch size: 64, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:33:09,668 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2313, 0.9061, 0.9975, 1.4706, 0.9994, 1.1556, 1.3977, 1.1068], + device='cuda:1'), covar=tensor([0.0964, 0.1782, 0.1510, 0.0962, 0.1309, 0.1318, 0.1051, 0.1147], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0311, 0.0302, 0.0325, 0.0357, 0.0277, 0.0324, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 22:33:23,955 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 22:33:51,757 INFO [train.py:903] (1/4) Epoch 2, batch 3850, loss[loss=0.4155, simple_loss=0.4421, pruned_loss=0.1945, over 19299.00 frames. ], tot_loss[loss=0.3595, simple_loss=0.3964, pruned_loss=0.1613, over 3829732.06 frames. ], batch size: 66, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:34:37,544 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.106e+02 8.604e+02 1.077e+03 1.442e+03 2.985e+03, threshold=2.155e+03, percent-clipped=6.0 +2023-03-31 22:34:40,397 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.04 vs. limit=5.0 +2023-03-31 22:34:41,605 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-31 22:34:55,297 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-31 22:34:56,855 INFO [train.py:903] (1/4) Epoch 2, batch 3900, loss[loss=0.4025, simple_loss=0.4387, pruned_loss=0.1831, over 19478.00 frames. ], tot_loss[loss=0.3607, simple_loss=0.3977, pruned_loss=0.1618, over 3820087.10 frames. ], batch size: 64, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:35:36,279 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10762.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:49,462 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:50,828 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10772.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:58,742 INFO [train.py:903] (1/4) Epoch 2, batch 3950, loss[loss=0.3548, simple_loss=0.3977, pruned_loss=0.156, over 19598.00 frames. ], tot_loss[loss=0.3586, simple_loss=0.3963, pruned_loss=0.1605, over 3831702.78 frames. ], batch size: 61, lr: 3.18e-02, grad_scale: 8.0 +2023-03-31 22:36:04,546 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 22:36:06,222 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3366, 1.2947, 2.0230, 1.5638, 3.0920, 3.3928, 3.5171, 1.2855], + device='cuda:1'), covar=tensor([0.1249, 0.1919, 0.1189, 0.1180, 0.0883, 0.0659, 0.1040, 0.2051], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0379, 0.0351, 0.0349, 0.0409, 0.0330, 0.0482, 0.0368], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 22:36:14,864 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-31 22:36:15,292 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10792.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:16,546 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7482, 1.5790, 1.6905, 1.7239, 2.9004, 4.4269, 4.4062, 4.6610], + device='cuda:1'), covar=tensor([0.1486, 0.2606, 0.2754, 0.1985, 0.0480, 0.0095, 0.0136, 0.0088], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0279, 0.0324, 0.0282, 0.0196, 0.0105, 0.0188, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 22:36:18,920 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10795.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:36:21,056 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10797.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:44,828 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.316e+02 7.972e+02 1.008e+03 1.220e+03 2.629e+03, threshold=2.016e+03, percent-clipped=1.0 +2023-03-31 22:36:46,286 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10816.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:51,323 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10820.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:37:01,759 INFO [train.py:903] (1/4) Epoch 2, batch 4000, loss[loss=0.3406, simple_loss=0.3804, pruned_loss=0.1504, over 19841.00 frames. ], tot_loss[loss=0.3572, simple_loss=0.395, pruned_loss=0.1596, over 3809135.73 frames. ], batch size: 52, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:37:49,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 22:37:59,127 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6355, 1.6348, 1.5946, 2.0459, 3.2012, 1.4354, 2.1148, 3.1815], + device='cuda:1'), covar=tensor([0.0309, 0.2393, 0.2367, 0.1542, 0.0413, 0.2014, 0.1083, 0.0435], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0296, 0.0278, 0.0274, 0.0255, 0.0309, 0.0248, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 22:38:04,673 INFO [train.py:903] (1/4) Epoch 2, batch 4050, loss[loss=0.3669, simple_loss=0.4012, pruned_loss=0.1663, over 13555.00 frames. ], tot_loss[loss=0.3574, simple_loss=0.3948, pruned_loss=0.1599, over 3796100.62 frames. ], batch size: 136, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:38:15,732 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10886.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:38:20,803 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-31 22:38:35,400 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10903.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:38:49,942 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.194e+02 9.186e+02 1.101e+03 1.338e+03 4.215e+03, threshold=2.201e+03, percent-clipped=7.0 +2023-03-31 22:39:10,611 INFO [train.py:903] (1/4) Epoch 2, batch 4100, loss[loss=0.3485, simple_loss=0.3917, pruned_loss=0.1527, over 19694.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3932, pruned_loss=0.1588, over 3785573.21 frames. ], batch size: 59, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:39:13,300 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10931.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:39:39,786 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.6106, 0.9037, 0.6696, 0.7374, 0.8109, 0.5385, 0.3043, 0.7823], + device='cuda:1'), covar=tensor([0.0522, 0.0345, 0.0732, 0.0419, 0.0367, 0.0847, 0.0692, 0.0403], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0198, 0.0284, 0.0249, 0.0196, 0.0291, 0.0270, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-31 22:39:45,325 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 22:39:58,418 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9031, 1.4775, 1.6253, 1.6037, 2.7277, 4.5198, 4.4578, 4.8423], + device='cuda:1'), covar=tensor([0.1364, 0.2663, 0.2783, 0.1997, 0.0528, 0.0091, 0.0116, 0.0068], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0279, 0.0329, 0.0285, 0.0195, 0.0106, 0.0188, 0.0108], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 22:40:13,127 INFO [train.py:903] (1/4) Epoch 2, batch 4150, loss[loss=0.3466, simple_loss=0.3979, pruned_loss=0.1476, over 19305.00 frames. ], tot_loss[loss=0.3546, simple_loss=0.3928, pruned_loss=0.1582, over 3792538.21 frames. ], batch size: 66, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:40:59,340 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 7.731e+02 1.007e+03 1.258e+03 2.097e+03, threshold=2.015e+03, percent-clipped=0.0 +2023-03-31 22:41:07,856 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7993, 2.0419, 2.2825, 2.3668, 3.6215, 1.9059, 2.7055, 3.3566], + device='cuda:1'), covar=tensor([0.0392, 0.1959, 0.1690, 0.1205, 0.0306, 0.1650, 0.1060, 0.0395], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0304, 0.0288, 0.0279, 0.0254, 0.0318, 0.0259, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 22:41:15,495 INFO [train.py:903] (1/4) Epoch 2, batch 4200, loss[loss=0.344, simple_loss=0.3798, pruned_loss=0.1541, over 19847.00 frames. ], tot_loss[loss=0.3548, simple_loss=0.3931, pruned_loss=0.1582, over 3794251.63 frames. ], batch size: 52, lr: 3.15e-02, grad_scale: 8.0 +2023-03-31 22:41:18,919 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 22:41:43,744 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-31 22:42:18,002 INFO [train.py:903] (1/4) Epoch 2, batch 4250, loss[loss=0.3991, simple_loss=0.4076, pruned_loss=0.1953, over 19833.00 frames. ], tot_loss[loss=0.3557, simple_loss=0.3931, pruned_loss=0.1591, over 3801006.64 frames. ], batch size: 52, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:42:26,299 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1511, 1.9446, 1.8250, 2.7943, 2.0295, 2.9496, 2.3236, 1.7487], + device='cuda:1'), covar=tensor([0.0888, 0.0743, 0.0511, 0.0478, 0.0907, 0.0222, 0.0937, 0.0813], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0322, 0.0342, 0.0446, 0.0410, 0.0242, 0.0445, 0.0353], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 22:42:35,133 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 22:42:45,399 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 22:42:52,693 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:03,987 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.611e+02 8.485e+02 1.107e+03 1.406e+03 3.284e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 22:43:21,882 INFO [train.py:903] (1/4) Epoch 2, batch 4300, loss[loss=0.2916, simple_loss=0.3438, pruned_loss=0.1197, over 19377.00 frames. ], tot_loss[loss=0.3543, simple_loss=0.3922, pruned_loss=0.1582, over 3799439.36 frames. ], batch size: 48, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:43:30,408 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11136.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:37,635 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11142.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:09,025 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11167.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:16,407 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 22:44:23,250 INFO [train.py:903] (1/4) Epoch 2, batch 4350, loss[loss=0.3548, simple_loss=0.3997, pruned_loss=0.1549, over 18859.00 frames. ], tot_loss[loss=0.3527, simple_loss=0.391, pruned_loss=0.1572, over 3804982.94 frames. ], batch size: 74, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:44:32,889 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11187.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:39,332 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11192.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:04,735 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11212.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:09,091 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.029e+02 7.841e+02 9.559e+02 1.160e+03 2.939e+03, threshold=1.912e+03, percent-clipped=2.0 +2023-03-31 22:45:15,625 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:24,371 INFO [train.py:903] (1/4) Epoch 2, batch 4400, loss[loss=0.3854, simple_loss=0.4217, pruned_loss=0.1746, over 19662.00 frames. ], tot_loss[loss=0.351, simple_loss=0.3898, pruned_loss=0.1561, over 3817225.24 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:45:48,406 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11247.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:45:50,511 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 22:45:54,217 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11251.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:46:00,715 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 22:46:21,482 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-31 22:46:27,505 INFO [train.py:903] (1/4) Epoch 2, batch 4450, loss[loss=0.3571, simple_loss=0.4001, pruned_loss=0.157, over 19525.00 frames. ], tot_loss[loss=0.3497, simple_loss=0.3887, pruned_loss=0.1553, over 3820060.47 frames. ], batch size: 54, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:47:14,189 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.820e+02 1.081e+03 1.372e+03 2.333e+03, threshold=2.162e+03, percent-clipped=5.0 +2023-03-31 22:47:31,602 INFO [train.py:903] (1/4) Epoch 2, batch 4500, loss[loss=0.4342, simple_loss=0.4441, pruned_loss=0.2121, over 13520.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3895, pruned_loss=0.1565, over 3795329.65 frames. ], batch size: 136, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:48:00,561 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7009, 1.5142, 1.4305, 2.0136, 1.7996, 1.5719, 1.6431, 1.8105], + device='cuda:1'), covar=tensor([0.0846, 0.1888, 0.1339, 0.0998, 0.1188, 0.0673, 0.0991, 0.0605], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0376, 0.0286, 0.0258, 0.0311, 0.0260, 0.0275, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 22:48:12,960 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11362.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:48:34,556 INFO [train.py:903] (1/4) Epoch 2, batch 4550, loss[loss=0.266, simple_loss=0.3231, pruned_loss=0.1045, over 19757.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.389, pruned_loss=0.1558, over 3795513.28 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 8.0 +2023-03-31 22:48:37,411 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9456, 1.5422, 1.8847, 1.7908, 2.8902, 4.0614, 4.3557, 4.8692], + device='cuda:1'), covar=tensor([0.1429, 0.2733, 0.2585, 0.1948, 0.0472, 0.0205, 0.0145, 0.0088], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0277, 0.0324, 0.0281, 0.0191, 0.0106, 0.0189, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 22:48:45,435 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 22:49:08,222 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 22:49:21,603 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.316e+02 7.788e+02 1.003e+03 1.220e+03 2.356e+03, threshold=2.005e+03, percent-clipped=1.0 +2023-03-31 22:49:31,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7715, 4.5824, 5.3998, 5.2917, 1.9340, 4.9079, 4.3901, 4.8039], + device='cuda:1'), covar=tensor([0.0311, 0.0452, 0.0284, 0.0181, 0.2903, 0.0151, 0.0256, 0.0678], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0293, 0.0408, 0.0294, 0.0436, 0.0189, 0.0268, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 22:49:37,101 INFO [train.py:903] (1/4) Epoch 2, batch 4600, loss[loss=0.3535, simple_loss=0.4003, pruned_loss=0.1533, over 19599.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.387, pruned_loss=0.1539, over 3815373.49 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:37,505 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11477.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:50:39,644 INFO [train.py:903] (1/4) Epoch 2, batch 4650, loss[loss=0.3221, simple_loss=0.3569, pruned_loss=0.1437, over 19069.00 frames. ], tot_loss[loss=0.3499, simple_loss=0.3889, pruned_loss=0.1554, over 3812299.31 frames. ], batch size: 42, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:59,949 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 22:51:02,638 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8053, 1.8429, 1.6812, 2.3014, 1.8591, 2.6587, 2.6045, 2.1859], + device='cuda:1'), covar=tensor([0.0699, 0.1299, 0.1435, 0.1460, 0.1699, 0.0933, 0.1365, 0.0873], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0310, 0.0296, 0.0325, 0.0352, 0.0269, 0.0326, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-31 22:51:09,695 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11502.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:10,450 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 22:51:15,583 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11507.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:26,519 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.039e+02 8.212e+02 1.126e+03 1.409e+03 2.689e+03, threshold=2.252e+03, percent-clipped=6.0 +2023-03-31 22:51:42,948 INFO [train.py:903] (1/4) Epoch 2, batch 4700, loss[loss=0.3999, simple_loss=0.4256, pruned_loss=0.1871, over 19598.00 frames. ], tot_loss[loss=0.3496, simple_loss=0.3892, pruned_loss=0.155, over 3821494.88 frames. ], batch size: 61, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:51:48,018 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:52,267 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11536.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:05,041 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 22:52:08,750 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11550.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:46,155 INFO [train.py:903] (1/4) Epoch 2, batch 4750, loss[loss=0.3083, simple_loss=0.3526, pruned_loss=0.132, over 19388.00 frames. ], tot_loss[loss=0.35, simple_loss=0.3895, pruned_loss=0.1552, over 3815706.33 frames. ], batch size: 48, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:53:23,995 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-31 22:53:32,502 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+02 7.433e+02 1.012e+03 1.332e+03 3.283e+03, threshold=2.025e+03, percent-clipped=2.0 +2023-03-31 22:53:35,149 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11618.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:53:36,118 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8775, 3.5584, 2.2497, 3.1688, 1.3150, 3.3864, 3.1306, 3.2629], + device='cuda:1'), covar=tensor([0.0671, 0.1040, 0.1883, 0.0733, 0.3067, 0.0848, 0.0705, 0.0822], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0281, 0.0311, 0.0256, 0.0320, 0.0279, 0.0220, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 22:53:45,440 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0927, 1.5052, 1.6381, 2.3129, 1.9953, 2.5298, 2.8830, 2.2987], + device='cuda:1'), covar=tensor([0.0449, 0.1274, 0.1304, 0.1249, 0.1432, 0.0902, 0.1167, 0.0746], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0306, 0.0292, 0.0321, 0.0345, 0.0264, 0.0320, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-31 22:53:47,416 INFO [train.py:903] (1/4) Epoch 2, batch 4800, loss[loss=0.3652, simple_loss=0.4053, pruned_loss=0.1626, over 19657.00 frames. ], tot_loss[loss=0.3501, simple_loss=0.3895, pruned_loss=0.1553, over 3814818.85 frames. ], batch size: 60, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:53:53,497 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-31 22:54:04,647 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11643.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:54:15,083 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11651.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:19,416 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:49,928 INFO [train.py:903] (1/4) Epoch 2, batch 4850, loss[loss=0.3838, simple_loss=0.42, pruned_loss=0.1738, over 19743.00 frames. ], tot_loss[loss=0.3527, simple_loss=0.3914, pruned_loss=0.157, over 3807579.89 frames. ], batch size: 63, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:55:14,996 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 22:55:34,589 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 22:55:36,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.585e+02 1.008e+03 1.288e+03 2.592e+03, threshold=2.016e+03, percent-clipped=4.0 +2023-03-31 22:55:38,523 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:55:40,653 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 22:55:41,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 22:55:42,123 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9530, 1.3321, 1.2011, 1.8220, 1.4877, 1.9278, 2.2121, 1.6833], + device='cuda:1'), covar=tensor([0.0778, 0.1482, 0.1602, 0.1434, 0.1588, 0.1108, 0.1284, 0.0918], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0310, 0.0293, 0.0321, 0.0350, 0.0270, 0.0321, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-31 22:55:51,139 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 22:55:53,220 INFO [train.py:903] (1/4) Epoch 2, batch 4900, loss[loss=0.3419, simple_loss=0.3792, pruned_loss=0.1523, over 19471.00 frames. ], tot_loss[loss=0.3509, simple_loss=0.3904, pruned_loss=0.1557, over 3820926.81 frames. ], batch size: 49, lr: 3.07e-02, grad_scale: 8.0 +2023-03-31 22:56:13,257 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 22:56:25,972 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-31 22:56:55,832 INFO [train.py:903] (1/4) Epoch 2, batch 4950, loss[loss=0.307, simple_loss=0.3435, pruned_loss=0.1353, over 19338.00 frames. ], tot_loss[loss=0.3515, simple_loss=0.3907, pruned_loss=0.1561, over 3829793.76 frames. ], batch size: 44, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:57:12,065 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 22:57:37,558 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 22:57:41,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+02 9.257e+02 1.136e+03 1.412e+03 3.441e+03, threshold=2.272e+03, percent-clipped=4.0 +2023-03-31 22:57:57,804 INFO [train.py:903] (1/4) Epoch 2, batch 5000, loss[loss=0.3266, simple_loss=0.3658, pruned_loss=0.1438, over 19332.00 frames. ], tot_loss[loss=0.353, simple_loss=0.3919, pruned_loss=0.1571, over 3815097.49 frames. ], batch size: 44, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:58:04,641 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:58:07,425 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 22:58:16,656 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 22:58:42,531 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11864.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:00,547 INFO [train.py:903] (1/4) Epoch 2, batch 5050, loss[loss=0.3117, simple_loss=0.3502, pruned_loss=0.1366, over 19699.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3915, pruned_loss=0.1576, over 3820056.85 frames. ], batch size: 45, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 22:59:19,025 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11894.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:36,730 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:38,657 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 22:59:48,010 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 8.836e+02 1.119e+03 1.460e+03 3.605e+03, threshold=2.237e+03, percent-clipped=7.0 +2023-03-31 22:59:48,451 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2683, 2.0514, 1.4647, 1.8064, 1.4472, 1.5628, 0.2429, 1.2010], + device='cuda:1'), covar=tensor([0.0292, 0.0271, 0.0271, 0.0224, 0.0611, 0.0420, 0.0697, 0.0514], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0230, 0.0230, 0.0244, 0.0307, 0.0263, 0.0260, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 23:00:03,340 INFO [train.py:903] (1/4) Epoch 2, batch 5100, loss[loss=0.3284, simple_loss=0.3847, pruned_loss=0.1361, over 19487.00 frames. ], tot_loss[loss=0.3514, simple_loss=0.3903, pruned_loss=0.1563, over 3821190.68 frames. ], batch size: 64, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 23:00:08,038 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11932.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:16,232 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11938.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:16,322 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5122, 1.0620, 1.0889, 1.6585, 1.3143, 1.6834, 1.8309, 1.3422], + device='cuda:1'), covar=tensor([0.0863, 0.1501, 0.1613, 0.1143, 0.1304, 0.0921, 0.1053, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0306, 0.0294, 0.0325, 0.0343, 0.0263, 0.0315, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-31 23:00:19,213 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 23:00:21,237 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-31 23:00:22,858 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 23:00:26,277 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 23:00:43,217 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1312, 4.7665, 5.7839, 5.5397, 1.8463, 5.1459, 4.6305, 5.1666], + device='cuda:1'), covar=tensor([0.0341, 0.0399, 0.0312, 0.0203, 0.2818, 0.0124, 0.0312, 0.0639], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0297, 0.0398, 0.0295, 0.0436, 0.0197, 0.0276, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 23:00:50,278 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11966.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:07,394 INFO [train.py:903] (1/4) Epoch 2, batch 5150, loss[loss=0.3428, simple_loss=0.3906, pruned_loss=0.1475, over 19788.00 frames. ], tot_loss[loss=0.3514, simple_loss=0.3903, pruned_loss=0.1563, over 3818986.52 frames. ], batch size: 56, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:01:20,935 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:01:29,904 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11997.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:36,653 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12002.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:44,883 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:54,415 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 8.254e+02 9.989e+02 1.287e+03 2.673e+03, threshold=1.998e+03, percent-clipped=4.0 +2023-03-31 23:01:56,786 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:02:10,382 INFO [train.py:903] (1/4) Epoch 2, batch 5200, loss[loss=0.3272, simple_loss=0.3712, pruned_loss=0.1416, over 19591.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3904, pruned_loss=0.156, over 3819171.04 frames. ], batch size: 52, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:02:16,670 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2516, 4.8921, 6.0270, 5.6268, 2.0111, 5.5482, 4.8619, 5.3113], + device='cuda:1'), covar=tensor([0.0323, 0.0424, 0.0302, 0.0231, 0.3135, 0.0103, 0.0302, 0.0799], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0299, 0.0405, 0.0301, 0.0438, 0.0200, 0.0280, 0.0411], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 23:02:25,414 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 23:02:52,147 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:11,210 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 23:03:13,777 INFO [train.py:903] (1/4) Epoch 2, batch 5250, loss[loss=0.2866, simple_loss=0.33, pruned_loss=0.1216, over 19317.00 frames. ], tot_loss[loss=0.3502, simple_loss=0.3895, pruned_loss=0.1555, over 3821012.84 frames. ], batch size: 44, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:03:32,756 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12094.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:55,634 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12112.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:59,693 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.115e+02 8.666e+02 1.057e+03 1.421e+03 4.195e+03, threshold=2.115e+03, percent-clipped=5.0 +2023-03-31 23:04:14,787 INFO [train.py:903] (1/4) Epoch 2, batch 5300, loss[loss=0.3109, simple_loss=0.3681, pruned_loss=0.1269, over 19539.00 frames. ], tot_loss[loss=0.3511, simple_loss=0.3902, pruned_loss=0.156, over 3809327.69 frames. ], batch size: 56, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:04:35,580 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 23:04:58,629 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-31 23:05:00,389 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0179, 5.3797, 2.7802, 4.8208, 0.9301, 5.4256, 5.2249, 5.4666], + device='cuda:1'), covar=tensor([0.0386, 0.0867, 0.1777, 0.0520, 0.3743, 0.0642, 0.0361, 0.0428], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0291, 0.0320, 0.0270, 0.0339, 0.0289, 0.0230, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 23:05:13,792 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12176.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:15,925 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12178.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:16,933 INFO [train.py:903] (1/4) Epoch 2, batch 5350, loss[loss=0.3654, simple_loss=0.4025, pruned_loss=0.1642, over 19113.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3903, pruned_loss=0.1566, over 3817147.78 frames. ], batch size: 69, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:05:53,523 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 23:05:53,656 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12208.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:06:03,422 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 9.122e+02 1.169e+03 1.506e+03 3.477e+03, threshold=2.338e+03, percent-clipped=13.0 +2023-03-31 23:06:20,779 INFO [train.py:903] (1/4) Epoch 2, batch 5400, loss[loss=0.2986, simple_loss=0.3593, pruned_loss=0.1189, over 19658.00 frames. ], tot_loss[loss=0.3523, simple_loss=0.3907, pruned_loss=0.1569, over 3815950.30 frames. ], batch size: 53, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:07:06,441 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12265.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:23,451 INFO [train.py:903] (1/4) Epoch 2, batch 5450, loss[loss=0.355, simple_loss=0.4005, pruned_loss=0.1547, over 19459.00 frames. ], tot_loss[loss=0.3516, simple_loss=0.3905, pruned_loss=0.1563, over 3815707.18 frames. ], batch size: 64, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:07:27,096 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12282.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:36,440 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12290.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:39,770 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:01,552 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12310.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:08,445 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 7.539e+02 9.464e+02 1.137e+03 1.898e+03, threshold=1.893e+03, percent-clipped=0.0 +2023-03-31 23:08:17,872 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12323.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:23,992 INFO [train.py:903] (1/4) Epoch 2, batch 5500, loss[loss=0.3339, simple_loss=0.3779, pruned_loss=0.1449, over 19592.00 frames. ], tot_loss[loss=0.3516, simple_loss=0.3909, pruned_loss=0.1562, over 3826392.77 frames. ], batch size: 52, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:08:38,828 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2888, 1.2496, 2.2330, 1.5283, 2.9255, 3.0929, 3.4715, 1.5629], + device='cuda:1'), covar=tensor([0.1345, 0.2164, 0.1191, 0.1249, 0.0972, 0.0829, 0.1190, 0.1965], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0403, 0.0369, 0.0366, 0.0437, 0.0354, 0.0514, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:08:47,596 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12346.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:49,885 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 23:09:14,242 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12368.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:17,007 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.78 vs. limit=5.0 +2023-03-31 23:09:28,525 INFO [train.py:903] (1/4) Epoch 2, batch 5550, loss[loss=0.3483, simple_loss=0.3927, pruned_loss=0.152, over 19668.00 frames. ], tot_loss[loss=0.3513, simple_loss=0.3906, pruned_loss=0.1559, over 3807306.86 frames. ], batch size: 60, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:09:36,184 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 23:09:46,135 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12393.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:50,681 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12397.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:05,526 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9750, 4.1536, 4.5615, 4.4506, 1.5220, 3.9676, 3.8092, 4.0349], + device='cuda:1'), covar=tensor([0.0404, 0.0420, 0.0317, 0.0215, 0.2921, 0.0190, 0.0292, 0.0794], + device='cuda:1'), in_proj_covar=tensor([0.0313, 0.0300, 0.0404, 0.0303, 0.0447, 0.0198, 0.0281, 0.0410], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-31 23:10:14,304 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+02 8.577e+02 1.018e+03 1.198e+03 2.956e+03, threshold=2.037e+03, percent-clipped=3.0 +2023-03-31 23:10:25,907 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 23:10:27,073 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:31,331 INFO [train.py:903] (1/4) Epoch 2, batch 5600, loss[loss=0.3757, simple_loss=0.4153, pruned_loss=0.168, over 19091.00 frames. ], tot_loss[loss=0.3507, simple_loss=0.3903, pruned_loss=0.1555, over 3823708.83 frames. ], batch size: 69, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:10:35,294 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:41,859 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12438.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:06,543 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12457.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:12,018 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12461.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:16,296 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12464.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:33,642 INFO [train.py:903] (1/4) Epoch 2, batch 5650, loss[loss=0.309, simple_loss=0.3636, pruned_loss=0.1272, over 19692.00 frames. ], tot_loss[loss=0.3495, simple_loss=0.3891, pruned_loss=0.1549, over 3817243.65 frames. ], batch size: 53, lr: 2.99e-02, grad_scale: 8.0 +2023-03-31 23:12:19,586 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.393e+02 8.152e+02 1.040e+03 1.340e+03 2.595e+03, threshold=2.080e+03, percent-clipped=4.0 +2023-03-31 23:12:21,925 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 23:12:32,531 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0316, 2.0549, 2.0948, 2.8534, 4.4217, 1.8014, 2.2418, 4.4116], + device='cuda:1'), covar=tensor([0.0172, 0.1909, 0.1830, 0.1047, 0.0292, 0.1626, 0.0964, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0295, 0.0276, 0.0263, 0.0256, 0.0304, 0.0253, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:12:35,558 INFO [train.py:903] (1/4) Epoch 2, batch 5700, loss[loss=0.3586, simple_loss=0.4062, pruned_loss=0.1555, over 19656.00 frames. ], tot_loss[loss=0.3521, simple_loss=0.3907, pruned_loss=0.1568, over 3828561.37 frames. ], batch size: 55, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:12:42,951 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2683, 1.0404, 1.0007, 1.4237, 1.0522, 1.2950, 1.2926, 1.2665], + device='cuda:1'), covar=tensor([0.0845, 0.1327, 0.1438, 0.0889, 0.1198, 0.1018, 0.1117, 0.0900], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0301, 0.0290, 0.0322, 0.0345, 0.0272, 0.0312, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-31 23:13:02,395 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:06,799 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12553.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:32,315 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12574.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:37,679 INFO [train.py:903] (1/4) Epoch 2, batch 5750, loss[loss=0.318, simple_loss=0.3629, pruned_loss=0.1365, over 19754.00 frames. ], tot_loss[loss=0.3499, simple_loss=0.3896, pruned_loss=0.1551, over 3834124.61 frames. ], batch size: 51, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:38,102 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12579.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:42,211 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 23:13:50,271 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 23:13:56,868 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 23:14:10,454 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:14:23,857 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+02 8.083e+02 9.516e+02 1.322e+03 3.330e+03, threshold=1.903e+03, percent-clipped=5.0 +2023-03-31 23:14:40,961 INFO [train.py:903] (1/4) Epoch 2, batch 5800, loss[loss=0.3164, simple_loss=0.36, pruned_loss=0.1364, over 19390.00 frames. ], tot_loss[loss=0.3504, simple_loss=0.39, pruned_loss=0.1554, over 3838139.48 frames. ], batch size: 48, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:14:58,358 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9394, 2.0069, 1.9747, 2.3993, 2.2266, 2.0663, 1.7329, 2.2372], + device='cuda:1'), covar=tensor([0.0663, 0.1335, 0.0908, 0.0676, 0.0947, 0.0456, 0.0875, 0.0459], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0376, 0.0281, 0.0252, 0.0319, 0.0260, 0.0277, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:15:10,978 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:42,684 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12678.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:43,508 INFO [train.py:903] (1/4) Epoch 2, batch 5850, loss[loss=0.3973, simple_loss=0.4278, pruned_loss=0.1833, over 19594.00 frames. ], tot_loss[loss=0.349, simple_loss=0.389, pruned_loss=0.1545, over 3831931.13 frames. ], batch size: 61, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:15:46,262 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12681.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:18,363 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12706.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:30,352 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 7.980e+02 9.827e+02 1.217e+03 2.781e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-31 23:16:31,914 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:46,098 INFO [train.py:903] (1/4) Epoch 2, batch 5900, loss[loss=0.3491, simple_loss=0.3993, pruned_loss=0.1495, over 19498.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3895, pruned_loss=0.155, over 3826518.49 frames. ], batch size: 64, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:16:49,455 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 23:17:03,736 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:17:11,481 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 23:17:49,031 INFO [train.py:903] (1/4) Epoch 2, batch 5950, loss[loss=0.3769, simple_loss=0.407, pruned_loss=0.1734, over 17504.00 frames. ], tot_loss[loss=0.3495, simple_loss=0.3895, pruned_loss=0.1547, over 3820425.72 frames. ], batch size: 101, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:17:52,908 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12781.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:25,412 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12808.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:26,825 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12809.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:34,471 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+02 9.090e+02 1.139e+03 1.452e+03 3.383e+03, threshold=2.279e+03, percent-clipped=8.0 +2023-03-31 23:18:51,784 INFO [train.py:903] (1/4) Epoch 2, batch 6000, loss[loss=0.3455, simple_loss=0.3975, pruned_loss=0.1468, over 19664.00 frames. ], tot_loss[loss=0.3488, simple_loss=0.3893, pruned_loss=0.1542, over 3819598.77 frames. ], batch size: 58, lr: 2.95e-02, grad_scale: 8.0 +2023-03-31 23:18:51,784 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 23:19:06,008 INFO [train.py:937] (1/4) Epoch 2, validation: loss=0.246, simple_loss=0.337, pruned_loss=0.07745, over 944034.00 frames. +2023-03-31 23:19:06,010 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-03-31 23:19:13,340 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:19:43,052 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0099, 1.0412, 1.4043, 1.1046, 1.8663, 1.8041, 1.9400, 0.6292], + device='cuda:1'), covar=tensor([0.1486, 0.2123, 0.1158, 0.1493, 0.0796, 0.1033, 0.0825, 0.1876], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0401, 0.0360, 0.0361, 0.0436, 0.0347, 0.0507, 0.0381], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:20:08,251 INFO [train.py:903] (1/4) Epoch 2, batch 6050, loss[loss=0.3162, simple_loss=0.3681, pruned_loss=0.1322, over 19571.00 frames. ], tot_loss[loss=0.3508, simple_loss=0.3904, pruned_loss=0.1556, over 3825782.21 frames. ], batch size: 52, lr: 2.95e-02, grad_scale: 4.0 +2023-03-31 23:20:56,514 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+02 7.840e+02 9.937e+02 1.323e+03 8.220e+03, threshold=1.987e+03, percent-clipped=9.0 +2023-03-31 23:21:03,484 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12923.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:21:10,258 INFO [train.py:903] (1/4) Epoch 2, batch 6100, loss[loss=0.331, simple_loss=0.3796, pruned_loss=0.1412, over 19666.00 frames. ], tot_loss[loss=0.3489, simple_loss=0.3889, pruned_loss=0.1544, over 3832313.08 frames. ], batch size: 60, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:11,867 INFO [train.py:903] (1/4) Epoch 2, batch 6150, loss[loss=0.3034, simple_loss=0.3603, pruned_loss=0.1233, over 19834.00 frames. ], tot_loss[loss=0.349, simple_loss=0.3888, pruned_loss=0.1546, over 3823884.57 frames. ], batch size: 52, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:28,848 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5259, 1.1237, 1.1061, 1.7532, 1.2695, 1.6242, 1.5782, 1.3927], + device='cuda:1'), covar=tensor([0.0861, 0.1395, 0.1559, 0.0995, 0.1282, 0.0969, 0.1290, 0.1009], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0305, 0.0294, 0.0327, 0.0345, 0.0274, 0.0311, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-31 23:22:40,948 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2897, 1.2026, 1.8691, 1.5131, 2.4605, 2.5864, 2.7187, 1.1878], + device='cuda:1'), covar=tensor([0.1404, 0.2159, 0.1289, 0.1288, 0.1013, 0.0931, 0.1269, 0.2111], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0400, 0.0368, 0.0367, 0.0442, 0.0353, 0.0519, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:22:42,915 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 23:23:00,772 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+02 7.749e+02 1.029e+03 1.287e+03 3.235e+03, threshold=2.059e+03, percent-clipped=7.0 +2023-03-31 23:23:13,348 INFO [train.py:903] (1/4) Epoch 2, batch 6200, loss[loss=0.3469, simple_loss=0.3867, pruned_loss=0.1535, over 19772.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.3876, pruned_loss=0.1536, over 3830166.36 frames. ], batch size: 54, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:15,393 INFO [train.py:903] (1/4) Epoch 2, batch 6250, loss[loss=0.4393, simple_loss=0.441, pruned_loss=0.2187, over 13248.00 frames. ], tot_loss[loss=0.3463, simple_loss=0.3866, pruned_loss=0.153, over 3826204.49 frames. ], batch size: 136, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:47,225 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 23:25:04,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.358e+02 8.552e+02 1.023e+03 1.333e+03 3.705e+03, threshold=2.046e+03, percent-clipped=2.0 +2023-03-31 23:25:13,038 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13125.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:25:17,708 INFO [train.py:903] (1/4) Epoch 2, batch 6300, loss[loss=0.3248, simple_loss=0.3831, pruned_loss=0.1333, over 19356.00 frames. ], tot_loss[loss=0.3466, simple_loss=0.3866, pruned_loss=0.1533, over 3821485.59 frames. ], batch size: 66, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:19,875 INFO [train.py:903] (1/4) Epoch 2, batch 6350, loss[loss=0.2451, simple_loss=0.3021, pruned_loss=0.09405, over 19712.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3843, pruned_loss=0.1513, over 3807868.44 frames. ], batch size: 45, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:20,309 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13179.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:26:49,675 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13204.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:27:06,689 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.673e+02 8.539e+02 1.071e+03 1.407e+03 4.202e+03, threshold=2.141e+03, percent-clipped=6.0 +2023-03-31 23:27:19,358 INFO [train.py:903] (1/4) Epoch 2, batch 6400, loss[loss=0.4104, simple_loss=0.4279, pruned_loss=0.1964, over 18732.00 frames. ], tot_loss[loss=0.3434, simple_loss=0.3843, pruned_loss=0.1512, over 3815702.97 frames. ], batch size: 74, lr: 2.92e-02, grad_scale: 8.0 +2023-03-31 23:27:33,749 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13240.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:28:22,168 INFO [train.py:903] (1/4) Epoch 2, batch 6450, loss[loss=0.3485, simple_loss=0.3869, pruned_loss=0.1551, over 19489.00 frames. ], tot_loss[loss=0.342, simple_loss=0.383, pruned_loss=0.1505, over 3823126.46 frames. ], batch size: 64, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:09,605 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 23:29:10,634 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+02 7.874e+02 9.907e+02 1.203e+03 2.411e+03, threshold=1.981e+03, percent-clipped=4.0 +2023-03-31 23:29:24,102 INFO [train.py:903] (1/4) Epoch 2, batch 6500, loss[loss=0.3001, simple_loss=0.3568, pruned_loss=0.1217, over 19831.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.3843, pruned_loss=0.151, over 3830142.87 frames. ], batch size: 52, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:30,893 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 23:29:41,462 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7703, 1.0888, 1.4072, 1.0463, 2.5292, 3.3891, 3.1653, 3.4654], + device='cuda:1'), covar=tensor([0.1218, 0.2685, 0.2745, 0.2138, 0.0447, 0.0105, 0.0218, 0.0113], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0273, 0.0319, 0.0274, 0.0186, 0.0108, 0.0191, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-31 23:30:26,693 INFO [train.py:903] (1/4) Epoch 2, batch 6550, loss[loss=0.2915, simple_loss=0.344, pruned_loss=0.1195, over 19615.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3829, pruned_loss=0.1497, over 3819299.02 frames. ], batch size: 50, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:31:14,493 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+02 7.924e+02 9.507e+02 1.218e+03 2.525e+03, threshold=1.901e+03, percent-clipped=3.0 +2023-03-31 23:31:27,086 INFO [train.py:903] (1/4) Epoch 2, batch 6600, loss[loss=0.4018, simple_loss=0.4279, pruned_loss=0.1879, over 19522.00 frames. ], tot_loss[loss=0.3426, simple_loss=0.3838, pruned_loss=0.1507, over 3831277.92 frames. ], batch size: 54, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:32:15,794 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-31 23:32:20,370 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-03-31 23:32:29,104 INFO [train.py:903] (1/4) Epoch 2, batch 6650, loss[loss=0.3893, simple_loss=0.4181, pruned_loss=0.1803, over 19788.00 frames. ], tot_loss[loss=0.345, simple_loss=0.3854, pruned_loss=0.1523, over 3819750.69 frames. ], batch size: 56, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:32:38,046 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1574, 1.1536, 1.9052, 1.3194, 2.4991, 2.2645, 2.6783, 1.1316], + device='cuda:1'), covar=tensor([0.1468, 0.2303, 0.1257, 0.1316, 0.0827, 0.0975, 0.0963, 0.1917], + device='cuda:1'), in_proj_covar=tensor([0.0382, 0.0415, 0.0375, 0.0369, 0.0440, 0.0359, 0.0525, 0.0391], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:32:50,443 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:17,438 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 9.096e+02 1.181e+03 1.471e+03 3.411e+03, threshold=2.361e+03, percent-clipped=10.0 +2023-03-31 23:33:21,166 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13521.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:29,660 INFO [train.py:903] (1/4) Epoch 2, batch 6700, loss[loss=0.3413, simple_loss=0.3871, pruned_loss=0.1478, over 19516.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3846, pruned_loss=0.1516, over 3831368.93 frames. ], batch size: 64, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:34:27,765 INFO [train.py:903] (1/4) Epoch 2, batch 6750, loss[loss=0.3902, simple_loss=0.4171, pruned_loss=0.1817, over 19597.00 frames. ], tot_loss[loss=0.3447, simple_loss=0.3855, pruned_loss=0.152, over 3826433.34 frames. ], batch size: 61, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:34:33,435 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7641, 1.7062, 2.0097, 2.5482, 4.3106, 1.3798, 2.4671, 4.2390], + device='cuda:1'), covar=tensor([0.0230, 0.2471, 0.2177, 0.1432, 0.0361, 0.2202, 0.0962, 0.0332], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0303, 0.0284, 0.0274, 0.0275, 0.0320, 0.0263, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:35:12,680 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.052e+02 8.174e+02 9.996e+02 1.293e+03 2.664e+03, threshold=1.999e+03, percent-clipped=1.0 +2023-03-31 23:35:25,232 INFO [train.py:903] (1/4) Epoch 2, batch 6800, loss[loss=0.3436, simple_loss=0.398, pruned_loss=0.1446, over 19523.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3873, pruned_loss=0.1533, over 3810698.81 frames. ], batch size: 56, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:36:10,334 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 23:36:11,469 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 23:36:13,723 INFO [train.py:903] (1/4) Epoch 3, batch 0, loss[loss=0.3454, simple_loss=0.3681, pruned_loss=0.1613, over 19750.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3681, pruned_loss=0.1613, over 19750.00 frames. ], batch size: 46, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:36:13,724 INFO [train.py:928] (1/4) Computing validation loss +2023-03-31 23:36:24,488 INFO [train.py:937] (1/4) Epoch 3, validation: loss=0.241, simple_loss=0.3346, pruned_loss=0.07374, over 944034.00 frames. +2023-03-31 23:36:24,489 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-03-31 23:36:37,424 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 23:37:25,523 INFO [train.py:903] (1/4) Epoch 3, batch 50, loss[loss=0.2776, simple_loss=0.3319, pruned_loss=0.1117, over 19769.00 frames. ], tot_loss[loss=0.3369, simple_loss=0.3794, pruned_loss=0.1472, over 851938.49 frames. ], batch size: 47, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:37:38,309 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 7.787e+02 9.326e+02 1.115e+03 3.182e+03, threshold=1.865e+03, percent-clipped=5.0 +2023-03-31 23:37:49,194 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-31 23:37:58,967 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 23:38:23,959 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13755.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:38:25,858 INFO [train.py:903] (1/4) Epoch 3, batch 100, loss[loss=0.3243, simple_loss=0.3608, pruned_loss=0.1438, over 19777.00 frames. ], tot_loss[loss=0.3369, simple_loss=0.3807, pruned_loss=0.1465, over 1504901.87 frames. ], batch size: 49, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:38:35,109 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 23:39:27,419 INFO [train.py:903] (1/4) Epoch 3, batch 150, loss[loss=0.2804, simple_loss=0.3342, pruned_loss=0.1133, over 19377.00 frames. ], tot_loss[loss=0.3418, simple_loss=0.3844, pruned_loss=0.1496, over 2021395.17 frames. ], batch size: 48, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:39:30,164 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4548, 1.0652, 1.3230, 1.4073, 2.1930, 1.2920, 1.9103, 2.1445], + device='cuda:1'), covar=tensor([0.0418, 0.2347, 0.2045, 0.1277, 0.0534, 0.1368, 0.0705, 0.0574], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0303, 0.0287, 0.0275, 0.0273, 0.0318, 0.0265, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:39:40,067 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+02 7.521e+02 1.009e+03 1.351e+03 3.530e+03, threshold=2.018e+03, percent-clipped=10.0 +2023-03-31 23:40:28,867 INFO [train.py:903] (1/4) Epoch 3, batch 200, loss[loss=0.3951, simple_loss=0.4178, pruned_loss=0.1862, over 19667.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.3824, pruned_loss=0.1477, over 2430099.76 frames. ], batch size: 58, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:40:28,910 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 23:41:28,987 INFO [train.py:903] (1/4) Epoch 3, batch 250, loss[loss=0.2986, simple_loss=0.3544, pruned_loss=0.1214, over 19683.00 frames. ], tot_loss[loss=0.3431, simple_loss=0.385, pruned_loss=0.1506, over 2737408.59 frames. ], batch size: 53, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:41:44,255 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+02 8.729e+02 1.056e+03 1.304e+03 3.760e+03, threshold=2.113e+03, percent-clipped=6.0 +2023-03-31 23:41:49,459 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-31 23:42:33,073 INFO [train.py:903] (1/4) Epoch 3, batch 300, loss[loss=0.3195, simple_loss=0.3554, pruned_loss=0.1418, over 19785.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3828, pruned_loss=0.1488, over 2985583.95 frames. ], batch size: 47, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:43:34,497 INFO [train.py:903] (1/4) Epoch 3, batch 350, loss[loss=0.3078, simple_loss=0.3582, pruned_loss=0.1287, over 19670.00 frames. ], tot_loss[loss=0.3388, simple_loss=0.3812, pruned_loss=0.1482, over 3170047.03 frames. ], batch size: 53, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:43:40,034 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:43:46,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 7.628e+02 9.853e+02 1.217e+03 3.369e+03, threshold=1.971e+03, percent-clipped=3.0 +2023-03-31 23:44:34,932 INFO [train.py:903] (1/4) Epoch 3, batch 400, loss[loss=0.3771, simple_loss=0.4193, pruned_loss=0.1675, over 19525.00 frames. ], tot_loss[loss=0.3391, simple_loss=0.3815, pruned_loss=0.1484, over 3323361.82 frames. ], batch size: 56, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:45:17,155 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14090.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:22,985 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2317, 1.2421, 1.8648, 1.4080, 2.5629, 2.3651, 2.9305, 1.2163], + device='cuda:1'), covar=tensor([0.1177, 0.1781, 0.1002, 0.1042, 0.0824, 0.0835, 0.0974, 0.1733], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0417, 0.0385, 0.0374, 0.0455, 0.0366, 0.0536, 0.0393], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:45:27,294 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14099.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:36,305 INFO [train.py:903] (1/4) Epoch 3, batch 450, loss[loss=0.3416, simple_loss=0.3846, pruned_loss=0.1493, over 19348.00 frames. ], tot_loss[loss=0.3368, simple_loss=0.3801, pruned_loss=0.1467, over 3438705.05 frames. ], batch size: 66, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:45:52,383 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+02 8.213e+02 1.015e+03 1.206e+03 3.609e+03, threshold=2.029e+03, percent-clipped=6.0 +2023-03-31 23:45:55,489 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.84 vs. limit=5.0 +2023-03-31 23:45:55,957 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14121.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:59,453 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0259, 1.0639, 1.8308, 1.3924, 2.4512, 2.0279, 2.6486, 1.0108], + device='cuda:1'), covar=tensor([0.1537, 0.2308, 0.1146, 0.1258, 0.0904, 0.1112, 0.1063, 0.2062], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0420, 0.0385, 0.0370, 0.0455, 0.0368, 0.0533, 0.0391], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:46:10,162 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 23:46:11,152 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 23:46:14,946 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14138.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:46:38,894 INFO [train.py:903] (1/4) Epoch 3, batch 500, loss[loss=0.3308, simple_loss=0.3658, pruned_loss=0.1479, over 19674.00 frames. ], tot_loss[loss=0.3391, simple_loss=0.3815, pruned_loss=0.1484, over 3524044.84 frames. ], batch size: 53, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:47:38,989 INFO [train.py:903] (1/4) Epoch 3, batch 550, loss[loss=0.3097, simple_loss=0.3533, pruned_loss=0.133, over 19341.00 frames. ], tot_loss[loss=0.339, simple_loss=0.3815, pruned_loss=0.1482, over 3594694.30 frames. ], batch size: 47, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:47:47,487 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14214.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:47:51,334 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+02 8.063e+02 9.949e+02 1.307e+03 2.222e+03, threshold=1.990e+03, percent-clipped=3.0 +2023-03-31 23:48:38,812 INFO [train.py:903] (1/4) Epoch 3, batch 600, loss[loss=0.3169, simple_loss=0.365, pruned_loss=0.1344, over 19735.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3816, pruned_loss=0.1479, over 3643562.24 frames. ], batch size: 51, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:16,712 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 23:49:39,181 INFO [train.py:903] (1/4) Epoch 3, batch 650, loss[loss=0.339, simple_loss=0.3814, pruned_loss=0.1483, over 19780.00 frames. ], tot_loss[loss=0.3379, simple_loss=0.3812, pruned_loss=0.1473, over 3686016.46 frames. ], batch size: 54, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:48,981 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.91 vs. limit=5.0 +2023-03-31 23:49:54,623 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.103e+02 8.257e+02 1.098e+03 1.322e+03 3.191e+03, threshold=2.197e+03, percent-clipped=10.0 +2023-03-31 23:50:04,329 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9423, 1.4263, 1.3656, 1.8465, 1.6435, 1.6024, 1.5070, 1.6622], + device='cuda:1'), covar=tensor([0.0645, 0.1345, 0.1172, 0.0703, 0.0901, 0.0502, 0.0814, 0.0636], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0380, 0.0290, 0.0255, 0.0321, 0.0271, 0.0278, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-31 23:50:41,498 INFO [train.py:903] (1/4) Epoch 3, batch 700, loss[loss=0.3101, simple_loss=0.3556, pruned_loss=0.1323, over 19721.00 frames. ], tot_loss[loss=0.3369, simple_loss=0.3801, pruned_loss=0.1468, over 3720611.88 frames. ], batch size: 51, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:43,784 INFO [train.py:903] (1/4) Epoch 3, batch 750, loss[loss=0.295, simple_loss=0.3421, pruned_loss=0.1239, over 19467.00 frames. ], tot_loss[loss=0.3365, simple_loss=0.3801, pruned_loss=0.1465, over 3735242.79 frames. ], batch size: 49, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:56,450 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.798e+02 9.551e+02 1.191e+03 2.807e+03, threshold=1.910e+03, percent-clipped=6.0 +2023-03-31 23:52:11,652 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14431.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:14,915 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14434.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:44,747 INFO [train.py:903] (1/4) Epoch 3, batch 800, loss[loss=0.3176, simple_loss=0.3644, pruned_loss=0.1354, over 19607.00 frames. ], tot_loss[loss=0.3365, simple_loss=0.3803, pruned_loss=0.1464, over 3754242.35 frames. ], batch size: 50, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:52:53,415 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-31 23:52:53,873 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14465.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:54,933 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:52:59,786 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14470.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:04,268 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.7696, 1.2694, 0.9794, 0.8527, 1.1828, 0.8195, 0.2312, 1.0989], + device='cuda:1'), covar=tensor([0.0634, 0.0553, 0.0984, 0.0591, 0.0452, 0.1172, 0.1019, 0.0519], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0219, 0.0310, 0.0254, 0.0204, 0.0316, 0.0271, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-31 23:53:15,249 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14482.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:53:30,978 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14495.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:42,697 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14505.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:44,740 INFO [train.py:903] (1/4) Epoch 3, batch 850, loss[loss=0.4422, simple_loss=0.4436, pruned_loss=0.2204, over 12994.00 frames. ], tot_loss[loss=0.3393, simple_loss=0.3819, pruned_loss=0.1484, over 3756727.42 frames. ], batch size: 136, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:53:58,374 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.292e+02 8.633e+02 1.105e+03 1.534e+03 3.114e+03, threshold=2.210e+03, percent-clipped=11.0 +2023-03-31 23:54:11,047 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1524, 1.2338, 1.3716, 1.9830, 2.6469, 1.8429, 1.9766, 2.6298], + device='cuda:1'), covar=tensor([0.0463, 0.2818, 0.2328, 0.1311, 0.0661, 0.1639, 0.1068, 0.0629], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0302, 0.0290, 0.0277, 0.0270, 0.0319, 0.0260, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:54:32,152 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 23:54:32,565 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7572, 1.8262, 1.3286, 1.3851, 1.2650, 1.3859, 0.1991, 0.6977], + device='cuda:1'), covar=tensor([0.0223, 0.0212, 0.0154, 0.0162, 0.0483, 0.0230, 0.0427, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0238, 0.0239, 0.0251, 0.0311, 0.0265, 0.0255, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-31 23:54:35,854 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:54:45,564 INFO [train.py:903] (1/4) Epoch 3, batch 900, loss[loss=0.3627, simple_loss=0.3978, pruned_loss=0.1638, over 13821.00 frames. ], tot_loss[loss=0.3396, simple_loss=0.3826, pruned_loss=0.1482, over 3763109.74 frames. ], batch size: 136, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:55:15,227 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14580.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:55:33,859 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14597.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:55:46,731 INFO [train.py:903] (1/4) Epoch 3, batch 950, loss[loss=0.3312, simple_loss=0.3874, pruned_loss=0.1375, over 19516.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3816, pruned_loss=0.1466, over 3784060.85 frames. ], batch size: 64, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:55:46,741 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 23:56:00,994 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+02 7.381e+02 9.246e+02 1.263e+03 4.500e+03, threshold=1.849e+03, percent-clipped=5.0 +2023-03-31 23:56:46,894 INFO [train.py:903] (1/4) Epoch 3, batch 1000, loss[loss=0.3293, simple_loss=0.3857, pruned_loss=0.1365, over 19516.00 frames. ], tot_loss[loss=0.3363, simple_loss=0.3805, pruned_loss=0.146, over 3795784.32 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:57:10,651 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9301, 1.8503, 1.8079, 2.6833, 1.7538, 2.5799, 2.3473, 1.8398], + device='cuda:1'), covar=tensor([0.0790, 0.0643, 0.0422, 0.0337, 0.0730, 0.0205, 0.0649, 0.0608], + device='cuda:1'), in_proj_covar=tensor([0.0415, 0.0390, 0.0397, 0.0517, 0.0468, 0.0301, 0.0489, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-31 23:57:38,691 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 23:57:47,677 INFO [train.py:903] (1/4) Epoch 3, batch 1050, loss[loss=0.3786, simple_loss=0.4227, pruned_loss=0.1673, over 19372.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.3797, pruned_loss=0.1454, over 3805689.55 frames. ], batch size: 66, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:01,062 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.305e+02 8.951e+02 1.118e+03 2.421e+03, threshold=1.790e+03, percent-clipped=2.0 +2023-03-31 23:58:17,596 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 23:58:48,367 INFO [train.py:903] (1/4) Epoch 3, batch 1100, loss[loss=0.2907, simple_loss=0.3335, pruned_loss=0.124, over 19745.00 frames. ], tot_loss[loss=0.3351, simple_loss=0.3793, pruned_loss=0.1455, over 3791513.54 frames. ], batch size: 45, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:49,809 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14758.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:10,910 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14775.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:11,733 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-31 23:59:47,759 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14805.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:49,456 INFO [train.py:903] (1/4) Epoch 3, batch 1150, loss[loss=0.3836, simple_loss=0.4126, pruned_loss=0.1774, over 17274.00 frames. ], tot_loss[loss=0.3351, simple_loss=0.3793, pruned_loss=0.1454, over 3785701.18 frames. ], batch size: 101, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:59:50,857 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2096, 2.8857, 1.7252, 2.6711, 0.9699, 2.7815, 2.5963, 2.7400], + device='cuda:1'), covar=tensor([0.0986, 0.1700, 0.2254, 0.0948, 0.3899, 0.1209, 0.0858, 0.1112], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0286, 0.0318, 0.0260, 0.0338, 0.0284, 0.0233, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 00:00:03,474 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-04-01 00:00:03,884 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+02 7.465e+02 1.021e+03 1.238e+03 3.548e+03, threshold=2.043e+03, percent-clipped=7.0 +2023-04-01 00:00:16,791 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:23,624 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:40,912 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:45,704 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:00:49,934 INFO [train.py:903] (1/4) Epoch 3, batch 1200, loss[loss=0.2988, simple_loss=0.3439, pruned_loss=0.1269, over 19361.00 frames. ], tot_loss[loss=0.3337, simple_loss=0.3784, pruned_loss=0.1446, over 3798740.41 frames. ], batch size: 47, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:00:55,788 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:15,108 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14878.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:01:17,848 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 00:01:31,260 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14890.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:51,491 INFO [train.py:903] (1/4) Epoch 3, batch 1250, loss[loss=0.2622, simple_loss=0.3134, pruned_loss=0.1055, over 19757.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3764, pruned_loss=0.1432, over 3811993.77 frames. ], batch size: 47, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:02:05,938 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+02 7.701e+02 1.002e+03 1.250e+03 2.941e+03, threshold=2.004e+03, percent-clipped=3.0 +2023-04-01 00:02:26,242 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.06 vs. limit=5.0 +2023-04-01 00:02:53,118 INFO [train.py:903] (1/4) Epoch 3, batch 1300, loss[loss=0.3276, simple_loss=0.3775, pruned_loss=0.1389, over 19378.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.3752, pruned_loss=0.1422, over 3820195.41 frames. ], batch size: 70, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:03:02,025 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:03:54,923 INFO [train.py:903] (1/4) Epoch 3, batch 1350, loss[loss=0.3226, simple_loss=0.373, pruned_loss=0.1361, over 19664.00 frames. ], tot_loss[loss=0.3323, simple_loss=0.3767, pruned_loss=0.144, over 3817698.31 frames. ], batch size: 58, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:04:10,656 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+02 8.351e+02 9.883e+02 1.225e+03 3.360e+03, threshold=1.977e+03, percent-clipped=2.0 +2023-04-01 00:04:57,762 INFO [train.py:903] (1/4) Epoch 3, batch 1400, loss[loss=0.3939, simple_loss=0.4199, pruned_loss=0.184, over 19585.00 frames. ], tot_loss[loss=0.3312, simple_loss=0.376, pruned_loss=0.1432, over 3829194.48 frames. ], batch size: 61, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:05:53,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15102.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:05:59,109 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 00:06:00,262 INFO [train.py:903] (1/4) Epoch 3, batch 1450, loss[loss=0.3139, simple_loss=0.3702, pruned_loss=0.1288, over 19528.00 frames. ], tot_loss[loss=0.3304, simple_loss=0.3758, pruned_loss=0.1425, over 3834532.92 frames. ], batch size: 54, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:06:13,780 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+02 7.923e+02 9.351e+02 1.150e+03 2.880e+03, threshold=1.870e+03, percent-clipped=3.0 +2023-04-01 00:06:49,262 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:07:01,532 INFO [train.py:903] (1/4) Epoch 3, batch 1500, loss[loss=0.3339, simple_loss=0.3888, pruned_loss=0.1395, over 18942.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3751, pruned_loss=0.1422, over 3832009.79 frames. ], batch size: 74, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:07:20,416 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15171.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:07:26,939 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6993, 4.1773, 2.4182, 3.8103, 1.5009, 3.9184, 3.7927, 4.0190], + device='cuda:1'), covar=tensor([0.0469, 0.1043, 0.1959, 0.0649, 0.3580, 0.0815, 0.0624, 0.0666], + device='cuda:1'), in_proj_covar=tensor([0.0310, 0.0293, 0.0326, 0.0273, 0.0352, 0.0291, 0.0242, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 00:07:44,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-01 00:08:03,684 INFO [train.py:903] (1/4) Epoch 3, batch 1550, loss[loss=0.2849, simple_loss=0.3447, pruned_loss=0.1125, over 19596.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.3761, pruned_loss=0.1425, over 3833591.87 frames. ], batch size: 52, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:08:12,780 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:18,377 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:20,276 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.825e+02 9.432e+02 1.149e+03 3.008e+03, threshold=1.886e+03, percent-clipped=3.0 +2023-04-01 00:08:21,906 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:52,246 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:09:08,811 INFO [train.py:903] (1/4) Epoch 3, batch 1600, loss[loss=0.3683, simple_loss=0.4177, pruned_loss=0.1595, over 18768.00 frames. ], tot_loss[loss=0.3304, simple_loss=0.3761, pruned_loss=0.1423, over 3835043.00 frames. ], batch size: 74, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:09:32,825 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 00:10:10,174 INFO [train.py:903] (1/4) Epoch 3, batch 1650, loss[loss=0.3935, simple_loss=0.4184, pruned_loss=0.1843, over 18835.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3759, pruned_loss=0.1424, over 3827688.63 frames. ], batch size: 74, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:10:24,984 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+02 8.096e+02 9.310e+02 1.117e+03 2.889e+03, threshold=1.862e+03, percent-clipped=6.0 +2023-04-01 00:10:43,139 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6086, 1.6095, 1.4429, 2.1996, 1.5284, 1.9698, 1.8169, 1.1897], + device='cuda:1'), covar=tensor([0.1271, 0.1061, 0.0885, 0.0517, 0.1017, 0.0382, 0.1407, 0.1305], + device='cuda:1'), in_proj_covar=tensor([0.0430, 0.0404, 0.0411, 0.0544, 0.0480, 0.0314, 0.0505, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:11:11,888 INFO [train.py:903] (1/4) Epoch 3, batch 1700, loss[loss=0.3324, simple_loss=0.3772, pruned_loss=0.1438, over 19633.00 frames. ], tot_loss[loss=0.3311, simple_loss=0.3766, pruned_loss=0.1429, over 3822721.07 frames. ], batch size: 61, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:11:50,341 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 00:12:13,187 INFO [train.py:903] (1/4) Epoch 3, batch 1750, loss[loss=0.3039, simple_loss=0.3479, pruned_loss=0.1299, over 19398.00 frames. ], tot_loss[loss=0.3302, simple_loss=0.3758, pruned_loss=0.1423, over 3825187.76 frames. ], batch size: 48, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:12:26,539 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.87 vs. limit=5.0 +2023-04-01 00:12:30,188 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+02 8.359e+02 1.065e+03 1.276e+03 4.198e+03, threshold=2.129e+03, percent-clipped=6.0 +2023-04-01 00:13:17,301 INFO [train.py:903] (1/4) Epoch 3, batch 1800, loss[loss=0.3141, simple_loss=0.3596, pruned_loss=0.1343, over 19861.00 frames. ], tot_loss[loss=0.3301, simple_loss=0.3759, pruned_loss=0.1422, over 3834180.37 frames. ], batch size: 52, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:13:37,270 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15473.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:07,862 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:14,379 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 00:14:17,659 INFO [train.py:903] (1/4) Epoch 3, batch 1850, loss[loss=0.3001, simple_loss=0.3538, pruned_loss=0.1232, over 19733.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3749, pruned_loss=0.1413, over 3828128.11 frames. ], batch size: 51, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:14:32,117 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+02 7.448e+02 9.407e+02 1.169e+03 2.273e+03, threshold=1.881e+03, percent-clipped=1.0 +2023-04-01 00:14:50,378 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 00:15:18,051 INFO [train.py:903] (1/4) Epoch 3, batch 1900, loss[loss=0.3159, simple_loss=0.3506, pruned_loss=0.1406, over 19311.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3749, pruned_loss=0.1412, over 3824482.57 frames. ], batch size: 44, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:15:18,208 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:15:22,371 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 00:15:36,168 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 00:15:42,573 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 00:16:04,587 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 00:16:19,662 INFO [train.py:903] (1/4) Epoch 3, batch 1950, loss[loss=0.3107, simple_loss=0.3456, pruned_loss=0.1378, over 19737.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.375, pruned_loss=0.141, over 3821348.58 frames. ], batch size: 48, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:16:20,354 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 00:16:36,885 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+02 7.789e+02 9.617e+02 1.296e+03 2.448e+03, threshold=1.923e+03, percent-clipped=3.0 +2023-04-01 00:17:10,323 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 00:17:22,855 INFO [train.py:903] (1/4) Epoch 3, batch 2000, loss[loss=0.3615, simple_loss=0.3982, pruned_loss=0.1623, over 18120.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3743, pruned_loss=0.1403, over 3831245.00 frames. ], batch size: 83, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:17:41,420 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:18:20,124 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 00:18:21,670 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6790, 1.4307, 1.2713, 1.7643, 1.4933, 1.5796, 1.5022, 1.7593], + device='cuda:1'), covar=tensor([0.0990, 0.1806, 0.1578, 0.0889, 0.1262, 0.0579, 0.0993, 0.0679], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0388, 0.0289, 0.0257, 0.0324, 0.0263, 0.0276, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:18:23,586 INFO [train.py:903] (1/4) Epoch 3, batch 2050, loss[loss=0.3757, simple_loss=0.4182, pruned_loss=0.1666, over 19110.00 frames. ], tot_loss[loss=0.3293, simple_loss=0.3757, pruned_loss=0.1415, over 3828269.98 frames. ], batch size: 69, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:18:38,231 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+02 7.410e+02 9.269e+02 1.172e+03 2.915e+03, threshold=1.854e+03, percent-clipped=8.0 +2023-04-01 00:18:38,294 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 00:18:39,623 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 00:18:57,731 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9533, 4.0645, 4.5110, 4.4610, 1.5328, 4.1066, 3.7047, 4.0337], + device='cuda:1'), covar=tensor([0.0487, 0.0495, 0.0422, 0.0289, 0.3267, 0.0230, 0.0362, 0.0870], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0335, 0.0460, 0.0346, 0.0474, 0.0238, 0.0305, 0.0439], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 00:18:59,542 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 00:19:25,007 INFO [train.py:903] (1/4) Epoch 3, batch 2100, loss[loss=0.3193, simple_loss=0.3734, pruned_loss=0.1326, over 19382.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3747, pruned_loss=0.1409, over 3819442.19 frames. ], batch size: 70, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:19:42,757 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0836, 1.9843, 1.8905, 3.2020, 2.3488, 3.6515, 3.1291, 1.8200], + device='cuda:1'), covar=tensor([0.1043, 0.0800, 0.0483, 0.0515, 0.0874, 0.0185, 0.0610, 0.0740], + device='cuda:1'), in_proj_covar=tensor([0.0440, 0.0409, 0.0418, 0.0563, 0.0493, 0.0328, 0.0520, 0.0420], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:19:52,237 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 00:20:13,817 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 00:20:25,853 INFO [train.py:903] (1/4) Epoch 3, batch 2150, loss[loss=0.3667, simple_loss=0.4134, pruned_loss=0.16, over 18152.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3739, pruned_loss=0.1401, over 3824485.06 frames. ], batch size: 83, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:20:42,350 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 7.356e+02 9.022e+02 1.284e+03 2.686e+03, threshold=1.804e+03, percent-clipped=4.0 +2023-04-01 00:21:28,850 INFO [train.py:903] (1/4) Epoch 3, batch 2200, loss[loss=0.3022, simple_loss=0.3606, pruned_loss=0.1219, over 19752.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3739, pruned_loss=0.1406, over 3840141.38 frames. ], batch size: 63, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:23,678 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:22:30,393 INFO [train.py:903] (1/4) Epoch 3, batch 2250, loss[loss=0.2643, simple_loss=0.3244, pruned_loss=0.1022, over 19790.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3735, pruned_loss=0.1402, over 3853202.84 frames. ], batch size: 49, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:33,905 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1693, 1.0338, 1.3063, 1.2491, 1.9623, 1.0798, 1.5436, 1.9104], + device='cuda:1'), covar=tensor([0.0465, 0.2196, 0.1930, 0.1261, 0.0602, 0.1624, 0.0995, 0.0569], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0300, 0.0295, 0.0278, 0.0280, 0.0322, 0.0266, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 00:22:44,787 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+02 7.012e+02 9.226e+02 1.146e+03 2.721e+03, threshold=1.845e+03, percent-clipped=4.0 +2023-04-01 00:22:55,678 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:26,930 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:31,837 INFO [train.py:903] (1/4) Epoch 3, batch 2300, loss[loss=0.4015, simple_loss=0.4273, pruned_loss=0.1878, over 19596.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3735, pruned_loss=0.1399, over 3852004.60 frames. ], batch size: 57, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:23:44,547 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 00:24:33,426 INFO [train.py:903] (1/4) Epoch 3, batch 2350, loss[loss=0.3722, simple_loss=0.4087, pruned_loss=0.1678, over 19528.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3748, pruned_loss=0.1413, over 3826141.89 frames. ], batch size: 56, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:24:48,804 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 7.519e+02 9.138e+02 1.115e+03 3.205e+03, threshold=1.828e+03, percent-clipped=8.0 +2023-04-01 00:24:49,967 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:25:08,174 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6368, 1.5907, 1.7936, 1.5720, 2.7697, 4.1386, 4.2287, 4.5375], + device='cuda:1'), covar=tensor([0.1451, 0.2478, 0.2719, 0.1784, 0.0501, 0.0126, 0.0137, 0.0083], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0281, 0.0330, 0.0276, 0.0196, 0.0106, 0.0197, 0.0115], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 00:25:15,413 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 00:25:31,127 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 00:25:34,316 INFO [train.py:903] (1/4) Epoch 3, batch 2400, loss[loss=0.3832, simple_loss=0.4151, pruned_loss=0.1757, over 19593.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3756, pruned_loss=0.1419, over 3829538.60 frames. ], batch size: 57, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:26:33,067 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:26:35,417 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8221, 0.8758, 1.4224, 1.2446, 2.6541, 3.3708, 3.3416, 3.7508], + device='cuda:1'), covar=tensor([0.1413, 0.3964, 0.3883, 0.2315, 0.0510, 0.0171, 0.0259, 0.0126], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0278, 0.0325, 0.0271, 0.0194, 0.0102, 0.0194, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 00:26:37,322 INFO [train.py:903] (1/4) Epoch 3, batch 2450, loss[loss=0.2454, simple_loss=0.3073, pruned_loss=0.0917, over 19746.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3744, pruned_loss=0.141, over 3841817.54 frames. ], batch size: 46, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:26:51,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+02 8.374e+02 9.822e+02 1.305e+03 3.634e+03, threshold=1.964e+03, percent-clipped=9.0 +2023-04-01 00:26:58,746 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6405, 4.1530, 2.3386, 3.7266, 1.1975, 4.0474, 3.7757, 3.9777], + device='cuda:1'), covar=tensor([0.0508, 0.1166, 0.1793, 0.0582, 0.3375, 0.0644, 0.0549, 0.0605], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0291, 0.0319, 0.0258, 0.0335, 0.0283, 0.0237, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 00:27:38,194 INFO [train.py:903] (1/4) Epoch 3, batch 2500, loss[loss=0.4078, simple_loss=0.4351, pruned_loss=0.1902, over 19387.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3742, pruned_loss=0.1404, over 3845912.33 frames. ], batch size: 70, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:27:50,910 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:28:07,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-01 00:28:40,050 INFO [train.py:903] (1/4) Epoch 3, batch 2550, loss[loss=0.3163, simple_loss=0.3743, pruned_loss=0.1291, over 19658.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3739, pruned_loss=0.1408, over 3848091.29 frames. ], batch size: 53, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:28:56,248 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+02 7.641e+02 9.209e+02 1.283e+03 2.881e+03, threshold=1.842e+03, percent-clipped=1.0 +2023-04-01 00:29:06,035 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2187, 1.1855, 1.8107, 1.3537, 2.3544, 2.3201, 2.5643, 0.8384], + device='cuda:1'), covar=tensor([0.1376, 0.2231, 0.1150, 0.1245, 0.0877, 0.0978, 0.1044, 0.2146], + device='cuda:1'), in_proj_covar=tensor([0.0405, 0.0439, 0.0407, 0.0385, 0.0471, 0.0394, 0.0564, 0.0405], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:29:09,277 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6578, 3.9261, 4.2570, 4.1272, 1.4082, 3.8081, 3.5090, 3.7527], + device='cuda:1'), covar=tensor([0.0682, 0.0588, 0.0475, 0.0361, 0.3571, 0.0286, 0.0444, 0.1060], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0335, 0.0454, 0.0344, 0.0470, 0.0241, 0.0297, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 00:29:28,549 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:29:36,513 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 00:29:44,017 INFO [train.py:903] (1/4) Epoch 3, batch 2600, loss[loss=0.3781, simple_loss=0.4151, pruned_loss=0.1706, over 19589.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3738, pruned_loss=0.141, over 3837532.41 frames. ], batch size: 61, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:30:07,653 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 00:30:46,335 INFO [train.py:903] (1/4) Epoch 3, batch 2650, loss[loss=0.3297, simple_loss=0.3801, pruned_loss=0.1396, over 19784.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3719, pruned_loss=0.1389, over 3848407.34 frames. ], batch size: 56, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:00,229 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+02 7.570e+02 8.863e+02 1.283e+03 4.568e+03, threshold=1.773e+03, percent-clipped=9.0 +2023-04-01 00:31:06,931 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 00:31:29,695 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 00:31:44,569 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 00:31:47,142 INFO [train.py:903] (1/4) Epoch 3, batch 2700, loss[loss=0.3083, simple_loss=0.3436, pruned_loss=0.1365, over 19747.00 frames. ], tot_loss[loss=0.3253, simple_loss=0.3722, pruned_loss=0.1392, over 3832131.03 frames. ], batch size: 46, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:51,946 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:31:54,875 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:26,182 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16389.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:47,343 INFO [train.py:903] (1/4) Epoch 3, batch 2750, loss[loss=0.3091, simple_loss=0.355, pruned_loss=0.1316, over 19580.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3729, pruned_loss=0.14, over 3827579.19 frames. ], batch size: 52, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:33:01,693 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.986e+02 7.672e+02 9.838e+02 1.209e+03 2.463e+03, threshold=1.968e+03, percent-clipped=5.0 +2023-04-01 00:33:07,380 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3024, 3.6774, 3.9698, 4.0565, 1.5218, 3.6871, 3.4288, 3.2836], + device='cuda:1'), covar=tensor([0.1127, 0.0801, 0.0727, 0.0685, 0.4097, 0.0534, 0.0563, 0.1490], + device='cuda:1'), in_proj_covar=tensor([0.0365, 0.0332, 0.0463, 0.0346, 0.0476, 0.0242, 0.0299, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 00:33:07,472 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6593, 1.1726, 1.3892, 1.1204, 2.7075, 3.3290, 3.1124, 3.5715], + device='cuda:1'), covar=tensor([0.1131, 0.2611, 0.2681, 0.1869, 0.0407, 0.0106, 0.0209, 0.0081], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0277, 0.0324, 0.0269, 0.0195, 0.0104, 0.0194, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 00:33:35,972 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:33:46,965 INFO [train.py:903] (1/4) Epoch 3, batch 2800, loss[loss=0.3267, simple_loss=0.3646, pruned_loss=0.1444, over 19420.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3731, pruned_loss=0.1397, over 3822615.84 frames. ], batch size: 48, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:13,547 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:34:48,737 INFO [train.py:903] (1/4) Epoch 3, batch 2850, loss[loss=0.3162, simple_loss=0.3745, pruned_loss=0.129, over 19689.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3744, pruned_loss=0.1402, over 3827921.04 frames. ], batch size: 59, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:54,288 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16511.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:35:03,282 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+02 7.862e+02 1.093e+03 1.433e+03 3.382e+03, threshold=2.185e+03, percent-clipped=3.0 +2023-04-01 00:35:47,689 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 00:35:49,902 INFO [train.py:903] (1/4) Epoch 3, batch 2900, loss[loss=0.4034, simple_loss=0.4312, pruned_loss=0.1877, over 19107.00 frames. ], tot_loss[loss=0.3298, simple_loss=0.3758, pruned_loss=0.1419, over 3826501.24 frames. ], batch size: 69, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:35:57,104 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16562.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:36:44,347 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-04-01 00:36:51,759 INFO [train.py:903] (1/4) Epoch 3, batch 2950, loss[loss=0.2873, simple_loss=0.3412, pruned_loss=0.1167, over 19385.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3741, pruned_loss=0.1405, over 3829283.69 frames. ], batch size: 48, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:02,809 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16616.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:07,332 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 7.271e+02 9.405e+02 1.170e+03 2.853e+03, threshold=1.881e+03, percent-clipped=4.0 +2023-04-01 00:37:15,841 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:33,959 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:52,195 INFO [train.py:903] (1/4) Epoch 3, batch 3000, loss[loss=0.3392, simple_loss=0.3805, pruned_loss=0.1489, over 19771.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3739, pruned_loss=0.1402, over 3839817.43 frames. ], batch size: 54, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:52,195 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 00:38:05,259 INFO [train.py:937] (1/4) Epoch 3, validation: loss=0.231, simple_loss=0.3246, pruned_loss=0.06867, over 944034.00 frames. +2023-04-01 00:38:05,260 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 00:38:08,696 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 00:39:07,682 INFO [train.py:903] (1/4) Epoch 3, batch 3050, loss[loss=0.2986, simple_loss=0.3408, pruned_loss=0.1282, over 19783.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3733, pruned_loss=0.1396, over 3831696.85 frames. ], batch size: 48, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:39:22,588 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+02 7.860e+02 1.014e+03 1.267e+03 1.851e+03, threshold=2.027e+03, percent-clipped=0.0 +2023-04-01 00:39:38,693 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16733.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:39:40,011 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:08,118 INFO [train.py:903] (1/4) Epoch 3, batch 3100, loss[loss=0.3024, simple_loss=0.355, pruned_loss=0.1249, over 19490.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3739, pruned_loss=0.1401, over 3816871.23 frames. ], batch size: 49, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:40:11,518 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:08,250 INFO [train.py:903] (1/4) Epoch 3, batch 3150, loss[loss=0.2687, simple_loss=0.3209, pruned_loss=0.1083, over 18631.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3743, pruned_loss=0.1405, over 3819184.53 frames. ], batch size: 41, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:41:10,839 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2341, 1.2254, 1.6091, 1.1068, 2.6100, 3.3164, 3.2958, 3.5833], + device='cuda:1'), covar=tensor([0.1385, 0.2503, 0.2478, 0.1910, 0.0436, 0.0106, 0.0201, 0.0082], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0279, 0.0329, 0.0270, 0.0195, 0.0106, 0.0201, 0.0112], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 00:41:21,861 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16818.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:22,518 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.096e+02 7.784e+02 9.820e+02 1.270e+03 2.923e+03, threshold=1.964e+03, percent-clipped=2.0 +2023-04-01 00:41:31,804 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 00:41:51,889 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:57,390 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:07,316 INFO [train.py:903] (1/4) Epoch 3, batch 3200, loss[loss=0.3348, simple_loss=0.3826, pruned_loss=0.1435, over 18839.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3758, pruned_loss=0.1416, over 3822176.05 frames. ], batch size: 74, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:42:39,246 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16882.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:43,594 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16886.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:43:08,573 INFO [train.py:903] (1/4) Epoch 3, batch 3250, loss[loss=0.3933, simple_loss=0.4288, pruned_loss=0.1789, over 19326.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3753, pruned_loss=0.1411, over 3830570.41 frames. ], batch size: 66, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:43:08,971 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:43:24,381 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+02 8.507e+02 1.021e+03 1.288e+03 2.328e+03, threshold=2.042e+03, percent-clipped=1.0 +2023-04-01 00:43:29,328 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:44:06,547 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9867, 1.8577, 1.6804, 2.8155, 1.8435, 2.9180, 2.4595, 1.9097], + device='cuda:1'), covar=tensor([0.0971, 0.0792, 0.0492, 0.0390, 0.0860, 0.0227, 0.0774, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0450, 0.0429, 0.0428, 0.0567, 0.0506, 0.0340, 0.0527, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:44:09,581 INFO [train.py:903] (1/4) Epoch 3, batch 3300, loss[loss=0.3274, simple_loss=0.3816, pruned_loss=0.1366, over 18687.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3743, pruned_loss=0.1403, over 3843963.95 frames. ], batch size: 74, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:44:16,139 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 00:45:10,104 INFO [train.py:903] (1/4) Epoch 3, batch 3350, loss[loss=0.2926, simple_loss=0.3563, pruned_loss=0.1145, over 19680.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.373, pruned_loss=0.1394, over 3833387.96 frames. ], batch size: 58, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:45:24,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+02 7.847e+02 9.419e+02 1.175e+03 3.710e+03, threshold=1.884e+03, percent-clipped=5.0 +2023-04-01 00:45:31,276 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7691, 4.1065, 4.4380, 4.3308, 1.6384, 3.8687, 3.5832, 3.8688], + device='cuda:1'), covar=tensor([0.0599, 0.0555, 0.0498, 0.0381, 0.3461, 0.0346, 0.0428, 0.1095], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0345, 0.0485, 0.0368, 0.0494, 0.0252, 0.0314, 0.0469], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 00:45:42,946 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0479, 1.0200, 1.3742, 1.1764, 1.7743, 1.6978, 1.8817, 0.5465], + device='cuda:1'), covar=tensor([0.1434, 0.2305, 0.1185, 0.1265, 0.0936, 0.1192, 0.0978, 0.2112], + device='cuda:1'), in_proj_covar=tensor([0.0407, 0.0447, 0.0409, 0.0386, 0.0492, 0.0391, 0.0570, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:46:10,036 INFO [train.py:903] (1/4) Epoch 3, batch 3400, loss[loss=0.2793, simple_loss=0.3493, pruned_loss=0.1047, over 19511.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3739, pruned_loss=0.14, over 3840821.81 frames. ], batch size: 56, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:47:08,469 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:47:12,305 INFO [train.py:903] (1/4) Epoch 3, batch 3450, loss[loss=0.3142, simple_loss=0.3547, pruned_loss=0.1369, over 19392.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3731, pruned_loss=0.1388, over 3847346.36 frames. ], batch size: 48, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:47:14,347 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 00:47:28,181 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 8.544e+02 1.014e+03 1.278e+03 1.988e+03, threshold=2.028e+03, percent-clipped=3.0 +2023-04-01 00:47:39,736 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:48:12,438 INFO [train.py:903] (1/4) Epoch 3, batch 3500, loss[loss=0.3393, simple_loss=0.4067, pruned_loss=0.136, over 19625.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3753, pruned_loss=0.1406, over 3836259.29 frames. ], batch size: 57, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:48:18,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 00:49:12,365 INFO [train.py:903] (1/4) Epoch 3, batch 3550, loss[loss=0.285, simple_loss=0.3381, pruned_loss=0.1159, over 19587.00 frames. ], tot_loss[loss=0.3273, simple_loss=0.3745, pruned_loss=0.1401, over 3831472.99 frames. ], batch size: 52, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:49:12,640 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:49:26,839 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.377e+02 8.429e+02 1.068e+03 1.302e+03 2.755e+03, threshold=2.137e+03, percent-clipped=4.0 +2023-04-01 00:49:40,878 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17230.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:50:11,406 INFO [train.py:903] (1/4) Epoch 3, batch 3600, loss[loss=0.2731, simple_loss=0.3265, pruned_loss=0.1099, over 19781.00 frames. ], tot_loss[loss=0.3258, simple_loss=0.3737, pruned_loss=0.1389, over 3831276.81 frames. ], batch size: 47, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:50:24,837 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:51:11,855 INFO [train.py:903] (1/4) Epoch 3, batch 3650, loss[loss=0.3515, simple_loss=0.3899, pruned_loss=0.1565, over 13099.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3746, pruned_loss=0.1396, over 3817887.14 frames. ], batch size: 136, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:51:27,523 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.083e+02 7.894e+02 9.345e+02 1.119e+03 1.949e+03, threshold=1.869e+03, percent-clipped=0.0 +2023-04-01 00:51:32,199 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1143, 1.6675, 2.0343, 2.7270, 2.0395, 2.4097, 2.0275, 2.8234], + device='cuda:1'), covar=tensor([0.0796, 0.1647, 0.1093, 0.0688, 0.1082, 0.0412, 0.0891, 0.0490], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0371, 0.0281, 0.0248, 0.0313, 0.0262, 0.0274, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 00:51:33,450 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5556, 2.2424, 1.6259, 1.8505, 2.1080, 1.1345, 1.1876, 1.6716], + device='cuda:1'), covar=tensor([0.0824, 0.0472, 0.0992, 0.0496, 0.0452, 0.1148, 0.0848, 0.0511], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0233, 0.0316, 0.0265, 0.0229, 0.0310, 0.0280, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 00:51:57,309 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17345.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:52:12,709 INFO [train.py:903] (1/4) Epoch 3, batch 3700, loss[loss=0.3643, simple_loss=0.401, pruned_loss=0.1638, over 19768.00 frames. ], tot_loss[loss=0.3258, simple_loss=0.3741, pruned_loss=0.1387, over 3833890.19 frames. ], batch size: 63, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:52:39,382 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:52:42,829 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:53:05,565 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17401.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:53:12,837 INFO [train.py:903] (1/4) Epoch 3, batch 3750, loss[loss=0.3484, simple_loss=0.3944, pruned_loss=0.1512, over 19619.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3737, pruned_loss=0.1387, over 3836510.10 frames. ], batch size: 57, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:53:27,566 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 9.192e+02 1.060e+03 1.489e+03 3.397e+03, threshold=2.120e+03, percent-clipped=7.0 +2023-04-01 00:54:12,635 INFO [train.py:903] (1/4) Epoch 3, batch 3800, loss[loss=0.3216, simple_loss=0.3723, pruned_loss=0.1355, over 19768.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3737, pruned_loss=0.1385, over 3829525.14 frames. ], batch size: 54, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:54:45,638 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 00:55:11,732 INFO [train.py:903] (1/4) Epoch 3, batch 3850, loss[loss=0.3694, simple_loss=0.4068, pruned_loss=0.166, over 19532.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3744, pruned_loss=0.1394, over 3824411.09 frames. ], batch size: 54, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:55:28,331 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+02 7.944e+02 9.720e+02 1.209e+03 3.103e+03, threshold=1.944e+03, percent-clipped=2.0 +2023-04-01 00:56:04,068 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17551.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:56:12,746 INFO [train.py:903] (1/4) Epoch 3, batch 3900, loss[loss=0.2707, simple_loss=0.3213, pruned_loss=0.11, over 19713.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3742, pruned_loss=0.1394, over 3831613.10 frames. ], batch size: 46, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:05,249 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17601.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:57:11,566 INFO [train.py:903] (1/4) Epoch 3, batch 3950, loss[loss=0.3232, simple_loss=0.3761, pruned_loss=0.1352, over 19659.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3746, pruned_loss=0.1393, over 3829105.26 frames. ], batch size: 60, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:14,031 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0634, 1.1387, 1.7545, 1.3157, 2.4492, 2.2606, 2.6499, 0.9552], + device='cuda:1'), covar=tensor([0.1453, 0.2244, 0.1214, 0.1262, 0.0811, 0.0989, 0.0910, 0.2075], + device='cuda:1'), in_proj_covar=tensor([0.0415, 0.0456, 0.0412, 0.0388, 0.0490, 0.0402, 0.0575, 0.0411], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:57:18,154 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 00:57:27,262 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+02 7.211e+02 9.089e+02 1.148e+03 2.193e+03, threshold=1.818e+03, percent-clipped=2.0 +2023-04-01 00:57:27,665 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4433, 2.4575, 1.5637, 1.6902, 1.9921, 1.2049, 0.8356, 1.8351], + device='cuda:1'), covar=tensor([0.0925, 0.0368, 0.1112, 0.0501, 0.0502, 0.1242, 0.1009, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0235, 0.0314, 0.0262, 0.0224, 0.0312, 0.0279, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 00:57:34,436 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17626.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:57:51,678 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:57:59,696 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2413, 2.1830, 1.9403, 3.6477, 2.3014, 4.0800, 3.3572, 1.8565], + device='cuda:1'), covar=tensor([0.1213, 0.0921, 0.0514, 0.0488, 0.1127, 0.0189, 0.0797, 0.0829], + device='cuda:1'), in_proj_covar=tensor([0.0477, 0.0444, 0.0447, 0.0587, 0.0524, 0.0358, 0.0544, 0.0443], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 00:58:12,125 INFO [train.py:903] (1/4) Epoch 3, batch 4000, loss[loss=0.3701, simple_loss=0.4109, pruned_loss=0.1647, over 19582.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3741, pruned_loss=0.1389, over 3816190.23 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:58:20,399 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:22,658 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:32,269 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-01 00:58:59,486 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 00:59:11,470 INFO [train.py:903] (1/4) Epoch 3, batch 4050, loss[loss=0.3851, simple_loss=0.4092, pruned_loss=0.1805, over 13376.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3735, pruned_loss=0.1386, over 3797479.79 frames. ], batch size: 136, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:59:14,520 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-01 00:59:25,529 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:28,911 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+02 7.398e+02 9.459e+02 1.250e+03 4.446e+03, threshold=1.892e+03, percent-clipped=10.0 +2023-04-01 00:59:32,671 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:57,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17745.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:00:01,022 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17748.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:00:12,217 INFO [train.py:903] (1/4) Epoch 3, batch 4100, loss[loss=0.2539, simple_loss=0.306, pruned_loss=0.1009, over 19734.00 frames. ], tot_loss[loss=0.3264, simple_loss=0.3735, pruned_loss=0.1397, over 3791766.87 frames. ], batch size: 46, lr: 2.43e-02, grad_scale: 4.0 +2023-04-01 01:00:38,305 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8217, 1.6730, 1.6706, 2.4820, 1.8138, 2.7119, 2.7996, 2.6248], + device='cuda:1'), covar=tensor([0.0622, 0.1159, 0.1319, 0.1297, 0.1366, 0.0854, 0.1114, 0.0651], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0278, 0.0270, 0.0311, 0.0314, 0.0260, 0.0282, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 01:00:48,457 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 01:01:11,675 INFO [train.py:903] (1/4) Epoch 3, batch 4150, loss[loss=0.3126, simple_loss=0.3686, pruned_loss=0.1283, over 19668.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3712, pruned_loss=0.1379, over 3800539.91 frames. ], batch size: 60, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:01:28,529 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+02 7.784e+02 9.706e+02 1.186e+03 3.618e+03, threshold=1.941e+03, percent-clipped=3.0 +2023-04-01 01:01:45,058 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 01:01:49,964 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:02:10,744 INFO [train.py:903] (1/4) Epoch 3, batch 4200, loss[loss=0.3174, simple_loss=0.3679, pruned_loss=0.1334, over 19680.00 frames. ], tot_loss[loss=0.3233, simple_loss=0.3715, pruned_loss=0.1376, over 3800614.57 frames. ], batch size: 53, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:02:14,267 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17860.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:02:14,912 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 01:03:09,267 INFO [train.py:903] (1/4) Epoch 3, batch 4250, loss[loss=0.3379, simple_loss=0.3762, pruned_loss=0.1498, over 19581.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3728, pruned_loss=0.1387, over 3816675.12 frames. ], batch size: 52, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:03:26,775 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.782e+02 8.334e+02 9.808e+02 1.259e+03 2.577e+03, threshold=1.962e+03, percent-clipped=5.0 +2023-04-01 01:03:26,829 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 01:03:28,251 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:03:37,987 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 01:03:57,614 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:04:08,379 INFO [train.py:903] (1/4) Epoch 3, batch 4300, loss[loss=0.3007, simple_loss=0.3504, pruned_loss=0.1255, over 19586.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3728, pruned_loss=0.139, over 3814024.67 frames. ], batch size: 52, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:04:35,031 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:04:57,242 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3297, 2.2509, 1.5496, 1.5574, 2.0294, 1.0293, 0.9437, 1.5577], + device='cuda:1'), covar=tensor([0.0957, 0.0541, 0.1156, 0.0741, 0.0660, 0.1429, 0.0917, 0.0606], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0239, 0.0320, 0.0259, 0.0218, 0.0315, 0.0277, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 01:05:06,032 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 01:05:10,744 INFO [train.py:903] (1/4) Epoch 3, batch 4350, loss[loss=0.2652, simple_loss=0.338, pruned_loss=0.09626, over 19851.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.371, pruned_loss=0.1377, over 3820523.86 frames. ], batch size: 52, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:05:27,029 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+02 7.530e+02 9.485e+02 1.282e+03 2.824e+03, threshold=1.897e+03, percent-clipped=4.0 +2023-04-01 01:06:09,595 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1919, 2.1279, 1.9575, 2.8803, 2.0638, 2.9061, 2.7498, 1.9845], + device='cuda:1'), covar=tensor([0.0986, 0.0756, 0.0478, 0.0501, 0.0884, 0.0248, 0.0768, 0.0719], + device='cuda:1'), in_proj_covar=tensor([0.0473, 0.0445, 0.0445, 0.0593, 0.0519, 0.0361, 0.0542, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:06:11,119 INFO [train.py:903] (1/4) Epoch 3, batch 4400, loss[loss=0.3101, simple_loss=0.3533, pruned_loss=0.1335, over 19715.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3702, pruned_loss=0.1374, over 3833128.88 frames. ], batch size: 51, lr: 2.41e-02, grad_scale: 8.0 +2023-04-01 01:06:17,186 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:32,864 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 01:06:43,601 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 01:06:53,791 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18092.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:57,450 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:10,934 INFO [train.py:903] (1/4) Epoch 3, batch 4450, loss[loss=0.3226, simple_loss=0.3853, pruned_loss=0.13, over 19308.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3696, pruned_loss=0.1363, over 3823308.11 frames. ], batch size: 66, lr: 2.40e-02, grad_scale: 8.0 +2023-04-01 01:07:21,267 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18116.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:07:26,614 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:28,236 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.891e+02 7.625e+02 9.248e+02 1.171e+03 2.408e+03, threshold=1.850e+03, percent-clipped=4.0 +2023-04-01 01:07:53,374 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18141.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:08:11,163 INFO [train.py:903] (1/4) Epoch 3, batch 4500, loss[loss=0.3333, simple_loss=0.3852, pruned_loss=0.1407, over 19668.00 frames. ], tot_loss[loss=0.3195, simple_loss=0.3687, pruned_loss=0.1351, over 3826236.28 frames. ], batch size: 58, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:08:37,483 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:54,078 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18192.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:55,158 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:11,759 INFO [train.py:903] (1/4) Epoch 3, batch 4550, loss[loss=0.3203, simple_loss=0.3738, pruned_loss=0.1334, over 19691.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3667, pruned_loss=0.1343, over 3835990.33 frames. ], batch size: 53, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:09:12,129 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:19,321 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 01:09:29,444 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.087e+02 7.616e+02 9.596e+02 1.201e+03 2.125e+03, threshold=1.919e+03, percent-clipped=4.0 +2023-04-01 01:09:42,206 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 01:10:11,981 INFO [train.py:903] (1/4) Epoch 3, batch 4600, loss[loss=0.348, simple_loss=0.3909, pruned_loss=0.1526, over 19511.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3675, pruned_loss=0.1351, over 3826693.93 frames. ], batch size: 64, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:08,586 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0068, 2.0682, 1.4576, 1.5093, 1.3896, 1.6796, 0.1966, 0.8098], + device='cuda:1'), covar=tensor([0.0233, 0.0197, 0.0154, 0.0180, 0.0453, 0.0211, 0.0437, 0.0373], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0251, 0.0245, 0.0266, 0.0321, 0.0257, 0.0253, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 01:11:11,633 INFO [train.py:903] (1/4) Epoch 3, batch 4650, loss[loss=0.3597, simple_loss=0.4048, pruned_loss=0.1573, over 19698.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3684, pruned_loss=0.1356, over 3822338.29 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:27,784 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 01:11:28,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+02 7.851e+02 9.796e+02 1.308e+03 3.825e+03, threshold=1.959e+03, percent-clipped=6.0 +2023-04-01 01:11:29,087 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:11:38,744 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 01:11:54,698 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2566, 2.1698, 1.8646, 3.3360, 2.3436, 3.6158, 3.0624, 2.0022], + device='cuda:1'), covar=tensor([0.1164, 0.0881, 0.0547, 0.0528, 0.1075, 0.0211, 0.0774, 0.0772], + device='cuda:1'), in_proj_covar=tensor([0.0476, 0.0451, 0.0449, 0.0593, 0.0524, 0.0356, 0.0549, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:12:10,876 INFO [train.py:903] (1/4) Epoch 3, batch 4700, loss[loss=0.3266, simple_loss=0.3538, pruned_loss=0.1498, over 19394.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3698, pruned_loss=0.1369, over 3805498.43 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:12:33,352 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 01:13:07,535 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0013, 1.9951, 1.7359, 2.9463, 1.9666, 2.8720, 2.4406, 1.6512], + device='cuda:1'), covar=tensor([0.1127, 0.0902, 0.0542, 0.0503, 0.1026, 0.0270, 0.0918, 0.0857], + device='cuda:1'), in_proj_covar=tensor([0.0480, 0.0450, 0.0449, 0.0599, 0.0525, 0.0361, 0.0552, 0.0449], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:13:13,550 INFO [train.py:903] (1/4) Epoch 3, batch 4750, loss[loss=0.2979, simple_loss=0.3392, pruned_loss=0.1283, over 19307.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3692, pruned_loss=0.1368, over 3817463.61 frames. ], batch size: 44, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:13:31,290 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.455e+02 7.810e+02 9.309e+02 1.222e+03 2.382e+03, threshold=1.862e+03, percent-clipped=4.0 +2023-04-01 01:13:44,338 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18433.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:13:48,805 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:15,102 INFO [train.py:903] (1/4) Epoch 3, batch 4800, loss[loss=0.2551, simple_loss=0.3279, pruned_loss=0.09111, over 19661.00 frames. ], tot_loss[loss=0.3209, simple_loss=0.3688, pruned_loss=0.1365, over 3823189.01 frames. ], batch size: 53, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:14:16,620 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:19,800 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8441, 3.4597, 2.1212, 3.1092, 1.1775, 3.1931, 3.0793, 3.2346], + device='cuda:1'), covar=tensor([0.0761, 0.1318, 0.2230, 0.0945, 0.3773, 0.1026, 0.0817, 0.1106], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0288, 0.0339, 0.0278, 0.0344, 0.0291, 0.0246, 0.0286], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 01:14:22,344 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18463.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:53,517 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18488.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:16,092 INFO [train.py:903] (1/4) Epoch 3, batch 4850, loss[loss=0.3042, simple_loss=0.3417, pruned_loss=0.1334, over 19753.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3663, pruned_loss=0.1345, over 3822005.57 frames. ], batch size: 46, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:15:17,542 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:34,581 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+02 7.295e+02 9.130e+02 1.063e+03 1.681e+03, threshold=1.826e+03, percent-clipped=0.0 +2023-04-01 01:15:38,187 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 01:15:40,626 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18526.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:15:52,381 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:53,573 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18537.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:16:00,126 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 01:16:02,865 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0209, 2.0348, 1.6697, 2.9488, 2.1698, 3.0847, 2.7237, 1.8727], + device='cuda:1'), covar=tensor([0.1037, 0.0750, 0.0534, 0.0477, 0.0868, 0.0244, 0.0775, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0458, 0.0460, 0.0610, 0.0538, 0.0372, 0.0558, 0.0460], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:16:04,601 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 01:16:05,767 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 01:16:14,590 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 01:16:15,641 INFO [train.py:903] (1/4) Epoch 3, batch 4900, loss[loss=0.2788, simple_loss=0.3347, pruned_loss=0.1114, over 19350.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3665, pruned_loss=0.1344, over 3829259.31 frames. ], batch size: 47, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:16:34,639 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 01:17:14,799 INFO [train.py:903] (1/4) Epoch 3, batch 4950, loss[loss=0.3491, simple_loss=0.3925, pruned_loss=0.1528, over 19727.00 frames. ], tot_loss[loss=0.3204, simple_loss=0.3688, pruned_loss=0.1361, over 3830079.65 frames. ], batch size: 63, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:17:26,350 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2426, 2.2260, 1.5414, 1.4141, 1.9684, 1.0120, 0.7876, 1.5145], + device='cuda:1'), covar=tensor([0.0800, 0.0400, 0.0961, 0.0581, 0.0416, 0.1220, 0.0879, 0.0493], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0240, 0.0314, 0.0260, 0.0220, 0.0314, 0.0284, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 01:17:30,516 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 01:17:34,976 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.825e+02 8.688e+02 1.059e+03 1.337e+03 3.400e+03, threshold=2.119e+03, percent-clipped=10.0 +2023-04-01 01:17:52,113 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 01:18:09,817 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18651.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:10,916 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:17,089 INFO [train.py:903] (1/4) Epoch 3, batch 5000, loss[loss=0.336, simple_loss=0.3719, pruned_loss=0.15, over 19280.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.368, pruned_loss=0.1354, over 3825891.67 frames. ], batch size: 44, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:18:21,592 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 01:18:32,451 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 01:18:59,268 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:00,347 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18693.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:16,429 INFO [train.py:903] (1/4) Epoch 3, batch 5050, loss[loss=0.3156, simple_loss=0.3776, pruned_loss=0.1268, over 19542.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3704, pruned_loss=0.1376, over 3808973.92 frames. ], batch size: 56, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:19:29,205 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:35,295 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.108e+02 7.776e+02 1.028e+03 1.229e+03 3.550e+03, threshold=2.057e+03, percent-clipped=2.0 +2023-04-01 01:19:44,363 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9358, 1.9395, 2.2631, 2.1971, 2.9743, 3.5000, 3.6066, 3.7080], + device='cuda:1'), covar=tensor([0.1048, 0.1858, 0.1889, 0.1254, 0.0508, 0.0192, 0.0154, 0.0109], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0279, 0.0322, 0.0266, 0.0199, 0.0108, 0.0201, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:19:48,428 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 01:20:16,939 INFO [train.py:903] (1/4) Epoch 3, batch 5100, loss[loss=0.3208, simple_loss=0.3708, pruned_loss=0.1354, over 19668.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.372, pruned_loss=0.1376, over 3806251.23 frames. ], batch size: 55, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:20:24,654 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 01:20:27,878 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 01:20:32,464 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 01:21:19,959 INFO [train.py:903] (1/4) Epoch 3, batch 5150, loss[loss=0.3049, simple_loss=0.3665, pruned_loss=0.1216, over 19685.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.371, pruned_loss=0.1361, over 3808815.10 frames. ], batch size: 53, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:21:31,908 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 01:21:40,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.924e+02 8.047e+02 1.080e+03 1.932e+03, threshold=1.609e+03, percent-clipped=0.0 +2023-04-01 01:22:03,173 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1318, 1.1104, 1.7013, 1.2437, 2.5125, 2.5057, 2.7954, 1.0139], + device='cuda:1'), covar=tensor([0.1802, 0.2648, 0.1517, 0.1658, 0.1118, 0.1084, 0.1139, 0.2380], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0463, 0.0430, 0.0390, 0.0506, 0.0412, 0.0585, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:22:07,001 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 01:22:14,771 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18852.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:22:20,356 INFO [train.py:903] (1/4) Epoch 3, batch 5200, loss[loss=0.285, simple_loss=0.3375, pruned_loss=0.1162, over 19468.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3703, pruned_loss=0.1353, over 3826525.50 frames. ], batch size: 49, lr: 2.36e-02, grad_scale: 8.0 +2023-04-01 01:22:37,382 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 01:22:37,525 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18870.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:23:21,224 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 01:23:23,224 INFO [train.py:903] (1/4) Epoch 3, batch 5250, loss[loss=0.4379, simple_loss=0.4431, pruned_loss=0.2163, over 13131.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3694, pruned_loss=0.1352, over 3817167.93 frames. ], batch size: 135, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:23:23,680 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:24,855 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:42,383 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+02 7.388e+02 9.793e+02 1.246e+03 4.620e+03, threshold=1.959e+03, percent-clipped=9.0 +2023-04-01 01:23:53,535 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:54,590 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:12,780 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:22,195 INFO [train.py:903] (1/4) Epoch 3, batch 5300, loss[loss=0.3319, simple_loss=0.3821, pruned_loss=0.1409, over 19470.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3701, pruned_loss=0.1355, over 3813590.11 frames. ], batch size: 64, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:24:35,252 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:39,530 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 01:24:56,456 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18985.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:25:06,616 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5923, 1.3979, 2.0856, 1.6085, 2.8171, 2.5042, 2.9147, 1.7442], + device='cuda:1'), covar=tensor([0.1049, 0.1950, 0.0969, 0.1033, 0.0655, 0.0783, 0.0720, 0.1494], + device='cuda:1'), in_proj_covar=tensor([0.0417, 0.0466, 0.0428, 0.0392, 0.0507, 0.0408, 0.0581, 0.0418], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:25:22,315 INFO [train.py:903] (1/4) Epoch 3, batch 5350, loss[loss=0.3019, simple_loss=0.3393, pruned_loss=0.1322, over 19761.00 frames. ], tot_loss[loss=0.3209, simple_loss=0.3701, pruned_loss=0.1358, over 3817825.11 frames. ], batch size: 48, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:25:42,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+02 7.837e+02 1.011e+03 1.314e+03 3.062e+03, threshold=2.023e+03, percent-clipped=6.0 +2023-04-01 01:25:51,174 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2415, 1.0239, 1.3226, 0.4295, 2.4614, 2.3941, 2.0745, 2.4360], + device='cuda:1'), covar=tensor([0.1161, 0.2782, 0.2800, 0.2167, 0.0351, 0.0167, 0.0354, 0.0182], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0281, 0.0328, 0.0266, 0.0198, 0.0109, 0.0201, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:25:55,447 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 01:25:56,694 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19036.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:26:20,407 INFO [train.py:903] (1/4) Epoch 3, batch 5400, loss[loss=0.3077, simple_loss=0.357, pruned_loss=0.1292, over 19471.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3706, pruned_loss=0.1367, over 3816707.46 frames. ], batch size: 49, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:27:22,187 INFO [train.py:903] (1/4) Epoch 3, batch 5450, loss[loss=0.3071, simple_loss=0.3671, pruned_loss=0.1235, over 19334.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3702, pruned_loss=0.1358, over 3830544.01 frames. ], batch size: 66, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:27:41,223 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+02 7.941e+02 9.480e+02 1.159e+03 2.761e+03, threshold=1.896e+03, percent-clipped=1.0 +2023-04-01 01:28:10,077 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19147.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:28:14,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:28:21,131 INFO [train.py:903] (1/4) Epoch 3, batch 5500, loss[loss=0.3724, simple_loss=0.4072, pruned_loss=0.1688, over 19542.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3698, pruned_loss=0.1362, over 3831350.35 frames. ], batch size: 54, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:28:45,303 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 01:29:20,073 INFO [train.py:903] (1/4) Epoch 3, batch 5550, loss[loss=0.3035, simple_loss=0.3526, pruned_loss=0.1272, over 19730.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3699, pruned_loss=0.137, over 3816973.27 frames. ], batch size: 51, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:29:27,432 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 01:29:37,979 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5241, 0.9331, 1.5545, 0.9807, 2.7597, 3.4537, 3.4154, 3.8226], + device='cuda:1'), covar=tensor([0.1315, 0.3017, 0.2749, 0.2101, 0.0404, 0.0119, 0.0190, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0281, 0.0326, 0.0266, 0.0197, 0.0108, 0.0202, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:29:41,412 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:29:42,151 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 7.947e+02 9.800e+02 1.342e+03 2.993e+03, threshold=1.960e+03, percent-clipped=6.0 +2023-04-01 01:30:02,049 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19241.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:30:09,698 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:30:15,624 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 01:30:17,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1431, 1.7109, 1.9561, 2.3575, 4.6717, 1.6196, 2.6240, 4.5512], + device='cuda:1'), covar=tensor([0.0205, 0.2160, 0.2050, 0.1310, 0.0303, 0.1843, 0.0944, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0307, 0.0300, 0.0277, 0.0289, 0.0314, 0.0272, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:30:21,461 INFO [train.py:903] (1/4) Epoch 3, batch 5600, loss[loss=0.3167, simple_loss=0.3831, pruned_loss=0.1252, over 19675.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3691, pruned_loss=0.1358, over 3830020.52 frames. ], batch size: 55, lr: 2.34e-02, grad_scale: 8.0 +2023-04-01 01:30:33,698 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19266.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:31:03,988 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:31:22,079 INFO [train.py:903] (1/4) Epoch 3, batch 5650, loss[loss=0.3707, simple_loss=0.4068, pruned_loss=0.1673, over 19668.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3695, pruned_loss=0.1364, over 3821731.03 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:31:41,038 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 7.403e+02 9.102e+02 1.185e+03 3.385e+03, threshold=1.820e+03, percent-clipped=3.0 +2023-04-01 01:32:09,525 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 01:32:21,083 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9076, 1.7074, 1.3574, 1.8608, 1.5405, 1.6603, 1.4595, 1.7233], + device='cuda:1'), covar=tensor([0.0745, 0.1311, 0.1218, 0.0826, 0.1073, 0.0527, 0.0903, 0.0639], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0379, 0.0283, 0.0251, 0.0313, 0.0263, 0.0269, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 01:32:21,815 INFO [train.py:903] (1/4) Epoch 3, batch 5700, loss[loss=0.3055, simple_loss=0.3614, pruned_loss=0.1248, over 19630.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3699, pruned_loss=0.1364, over 3826687.58 frames. ], batch size: 50, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:32:50,542 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9492, 1.9194, 1.7415, 2.8955, 1.9048, 2.8117, 2.4781, 1.7413], + device='cuda:1'), covar=tensor([0.1127, 0.0863, 0.0561, 0.0441, 0.1048, 0.0280, 0.0893, 0.0856], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0471, 0.0470, 0.0628, 0.0547, 0.0387, 0.0563, 0.0461], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:33:22,521 INFO [train.py:903] (1/4) Epoch 3, batch 5750, loss[loss=0.3111, simple_loss=0.3702, pruned_loss=0.126, over 19647.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3689, pruned_loss=0.1353, over 3821615.41 frames. ], batch size: 58, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:33:22,866 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:22,936 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:24,192 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3907, 2.1277, 1.9759, 3.5255, 2.2325, 3.9103, 3.3910, 1.9134], + device='cuda:1'), covar=tensor([0.1251, 0.0943, 0.0544, 0.0565, 0.1177, 0.0234, 0.0785, 0.0852], + device='cuda:1'), in_proj_covar=tensor([0.0491, 0.0465, 0.0464, 0.0622, 0.0540, 0.0383, 0.0553, 0.0455], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:33:24,900 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 01:33:30,118 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:34,052 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 01:33:39,377 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 01:33:43,561 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.525e+02 9.538e+02 1.231e+03 3.556e+03, threshold=1.908e+03, percent-clipped=6.0 +2023-04-01 01:33:53,845 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:34:22,888 INFO [train.py:903] (1/4) Epoch 3, batch 5800, loss[loss=0.3195, simple_loss=0.3818, pruned_loss=0.1287, over 19662.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3709, pruned_loss=0.1369, over 3815220.24 frames. ], batch size: 55, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:34:24,441 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3794, 1.1026, 1.5128, 0.9287, 2.4800, 3.1970, 2.8333, 3.4280], + device='cuda:1'), covar=tensor([0.1323, 0.2616, 0.2599, 0.2012, 0.0432, 0.0101, 0.0253, 0.0106], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0276, 0.0319, 0.0266, 0.0196, 0.0107, 0.0199, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:35:03,993 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19491.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:35:09,079 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-04-01 01:35:23,137 INFO [train.py:903] (1/4) Epoch 3, batch 5850, loss[loss=0.3475, simple_loss=0.3978, pruned_loss=0.1486, over 19684.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3712, pruned_loss=0.1367, over 3816951.91 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:35:37,152 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9138, 1.8844, 1.6912, 2.7076, 1.9397, 2.6868, 2.3758, 1.7880], + device='cuda:1'), covar=tensor([0.1106, 0.0864, 0.0539, 0.0514, 0.1006, 0.0315, 0.0958, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0500, 0.0473, 0.0470, 0.0632, 0.0550, 0.0390, 0.0571, 0.0462], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:35:43,378 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+02 8.354e+02 1.035e+03 1.319e+03 5.609e+03, threshold=2.070e+03, percent-clipped=8.0 +2023-04-01 01:36:18,436 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 01:36:23,587 INFO [train.py:903] (1/4) Epoch 3, batch 5900, loss[loss=0.3516, simple_loss=0.3924, pruned_loss=0.1554, over 19299.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3703, pruned_loss=0.1361, over 3808297.63 frames. ], batch size: 66, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:36:26,962 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 01:36:46,202 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 01:37:21,778 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19606.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:37:22,518 INFO [train.py:903] (1/4) Epoch 3, batch 5950, loss[loss=0.3518, simple_loss=0.394, pruned_loss=0.1548, over 19779.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3678, pruned_loss=0.1348, over 3800499.49 frames. ], batch size: 56, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:37:45,644 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.894e+02 9.030e+02 1.163e+03 3.004e+03, threshold=1.806e+03, percent-clipped=5.0 +2023-04-01 01:37:59,453 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:38:24,425 INFO [train.py:903] (1/4) Epoch 3, batch 6000, loss[loss=0.3764, simple_loss=0.3951, pruned_loss=0.1788, over 13546.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.3686, pruned_loss=0.135, over 3796403.60 frames. ], batch size: 136, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:38:24,425 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 01:38:37,333 INFO [train.py:937] (1/4) Epoch 3, validation: loss=0.2218, simple_loss=0.3182, pruned_loss=0.06273, over 944034.00 frames. +2023-04-01 01:38:37,334 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 01:38:45,541 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:15,734 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:37,220 INFO [train.py:903] (1/4) Epoch 3, batch 6050, loss[loss=0.3716, simple_loss=0.3933, pruned_loss=0.1749, over 19467.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3668, pruned_loss=0.1339, over 3806574.91 frames. ], batch size: 49, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:39:59,298 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+02 7.768e+02 9.451e+02 1.306e+03 2.772e+03, threshold=1.890e+03, percent-clipped=6.0 +2023-04-01 01:40:37,795 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:40:38,850 INFO [train.py:903] (1/4) Epoch 3, batch 6100, loss[loss=0.2602, simple_loss=0.3215, pruned_loss=0.09942, over 19693.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.3672, pruned_loss=0.1342, over 3799437.36 frames. ], batch size: 53, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:41:19,574 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 01:41:38,885 INFO [train.py:903] (1/4) Epoch 3, batch 6150, loss[loss=0.349, simple_loss=0.3762, pruned_loss=0.1609, over 19808.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3696, pruned_loss=0.1363, over 3805097.39 frames. ], batch size: 48, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:01,434 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+02 8.602e+02 1.123e+03 1.510e+03 2.312e+03, threshold=2.246e+03, percent-clipped=7.0 +2023-04-01 01:42:04,016 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6421, 1.2830, 1.2401, 1.7108, 1.4525, 1.4755, 1.3507, 1.5891], + device='cuda:1'), covar=tensor([0.0782, 0.1357, 0.1263, 0.0750, 0.1028, 0.0509, 0.0897, 0.0607], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0388, 0.0286, 0.0251, 0.0319, 0.0262, 0.0274, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 01:42:04,686 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 01:42:38,569 INFO [train.py:903] (1/4) Epoch 3, batch 6200, loss[loss=0.3055, simple_loss=0.3674, pruned_loss=0.1218, over 19670.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3703, pruned_loss=0.1367, over 3802274.72 frames. ], batch size: 58, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:46,183 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19862.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:42:57,072 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:43:15,887 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19887.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:43:39,797 INFO [train.py:903] (1/4) Epoch 3, batch 6250, loss[loss=0.362, simple_loss=0.4019, pruned_loss=0.161, over 19121.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3708, pruned_loss=0.1366, over 3806044.56 frames. ], batch size: 69, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:44:01,767 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+02 7.100e+02 9.208e+02 1.133e+03 3.490e+03, threshold=1.842e+03, percent-clipped=3.0 +2023-04-01 01:44:09,641 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 01:44:40,501 INFO [train.py:903] (1/4) Epoch 3, batch 6300, loss[loss=0.3089, simple_loss=0.3688, pruned_loss=0.1245, over 19573.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3706, pruned_loss=0.1359, over 3805583.14 frames. ], batch size: 67, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:44:57,893 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.09 vs. limit=5.0 +2023-04-01 01:45:02,069 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2934, 1.0984, 2.1075, 1.6056, 3.1313, 2.9715, 3.5058, 1.5150], + device='cuda:1'), covar=tensor([0.1521, 0.2582, 0.1396, 0.1234, 0.1034, 0.1057, 0.1175, 0.2340], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0473, 0.0437, 0.0401, 0.0523, 0.0423, 0.0595, 0.0427], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 01:45:07,282 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19980.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:45:10,846 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 01:45:42,100 INFO [train.py:903] (1/4) Epoch 3, batch 6350, loss[loss=0.334, simple_loss=0.387, pruned_loss=0.1405, over 19659.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.3689, pruned_loss=0.1348, over 3800256.60 frames. ], batch size: 55, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:46:03,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.424e+02 9.293e+02 1.158e+03 2.968e+03, threshold=1.859e+03, percent-clipped=5.0 +2023-04-01 01:46:20,207 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20038.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:46:20,731 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 01:46:42,148 INFO [train.py:903] (1/4) Epoch 3, batch 6400, loss[loss=0.265, simple_loss=0.3185, pruned_loss=0.1058, over 19732.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3701, pruned_loss=0.1358, over 3792065.53 frames. ], batch size: 51, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:29,294 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:47:32,635 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7770, 1.3873, 1.7377, 1.6962, 2.8549, 4.6054, 4.6021, 5.0323], + device='cuda:1'), covar=tensor([0.1245, 0.2605, 0.2633, 0.1703, 0.0419, 0.0128, 0.0119, 0.0068], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0275, 0.0321, 0.0264, 0.0192, 0.0110, 0.0200, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:47:43,490 INFO [train.py:903] (1/4) Epoch 3, batch 6450, loss[loss=0.3599, simple_loss=0.4003, pruned_loss=0.1598, over 19660.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3686, pruned_loss=0.1347, over 3796892.39 frames. ], batch size: 58, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:43,788 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:05,562 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.028e+02 7.833e+02 9.380e+02 1.145e+03 2.427e+03, threshold=1.876e+03, percent-clipped=3.0 +2023-04-01 01:48:08,342 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:12,611 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:29,094 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 01:48:39,345 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:44,547 INFO [train.py:903] (1/4) Epoch 3, batch 6500, loss[loss=0.3153, simple_loss=0.3707, pruned_loss=0.1299, over 19521.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3673, pruned_loss=0.1337, over 3811262.77 frames. ], batch size: 54, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:48:52,292 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 01:49:45,351 INFO [train.py:903] (1/4) Epoch 3, batch 6550, loss[loss=0.3771, simple_loss=0.4063, pruned_loss=0.1739, over 19539.00 frames. ], tot_loss[loss=0.3183, simple_loss=0.368, pruned_loss=0.1343, over 3799813.10 frames. ], batch size: 56, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:50:03,598 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:50:07,448 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.085e+02 9.596e+02 1.224e+03 2.377e+03, threshold=1.919e+03, percent-clipped=5.0 +2023-04-01 01:50:46,685 INFO [train.py:903] (1/4) Epoch 3, batch 6600, loss[loss=0.2926, simple_loss=0.3549, pruned_loss=0.1152, over 19782.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3675, pruned_loss=0.1336, over 3816019.91 frames. ], batch size: 54, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:51:20,946 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2963, 0.7195, 1.1023, 1.2724, 1.8751, 0.9924, 1.7485, 1.8633], + device='cuda:1'), covar=tensor([0.0820, 0.3523, 0.3020, 0.1857, 0.1126, 0.2267, 0.1240, 0.0976], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0302, 0.0299, 0.0277, 0.0288, 0.0310, 0.0273, 0.0284], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 01:51:47,898 INFO [train.py:903] (1/4) Epoch 3, batch 6650, loss[loss=0.3146, simple_loss=0.3688, pruned_loss=0.1301, over 19541.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.365, pruned_loss=0.1318, over 3813180.30 frames. ], batch size: 56, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:51:57,028 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4848, 2.3594, 1.4694, 1.6473, 2.1292, 1.0897, 1.0644, 1.6735], + device='cuda:1'), covar=tensor([0.0856, 0.0412, 0.1149, 0.0541, 0.0370, 0.1254, 0.0756, 0.0461], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0245, 0.0314, 0.0251, 0.0212, 0.0314, 0.0279, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 01:52:10,191 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+02 7.672e+02 8.992e+02 1.144e+03 3.524e+03, threshold=1.798e+03, percent-clipped=4.0 +2023-04-01 01:52:42,295 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:52:49,535 INFO [train.py:903] (1/4) Epoch 3, batch 6700, loss[loss=0.2663, simple_loss=0.3281, pruned_loss=0.1022, over 19484.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3666, pruned_loss=0.1332, over 3785346.37 frames. ], batch size: 49, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:52:50,835 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5874, 1.8732, 1.8975, 2.4231, 2.2148, 2.6102, 2.7987, 2.3325], + device='cuda:1'), covar=tensor([0.0701, 0.1066, 0.1159, 0.1222, 0.1215, 0.0722, 0.1101, 0.0701], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0281, 0.0267, 0.0305, 0.0312, 0.0246, 0.0275, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 01:53:11,819 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:17,972 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:44,487 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:46,032 INFO [train.py:903] (1/4) Epoch 3, batch 6750, loss[loss=0.3128, simple_loss=0.3666, pruned_loss=0.1295, over 18134.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3676, pruned_loss=0.1343, over 3791271.53 frames. ], batch size: 83, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:54:05,351 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+02 7.667e+02 1.002e+03 1.269e+03 2.908e+03, threshold=2.004e+03, percent-clipped=6.0 +2023-04-01 01:54:17,387 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:34,785 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:41,604 INFO [train.py:903] (1/4) Epoch 3, batch 6800, loss[loss=0.2922, simple_loss=0.349, pruned_loss=0.1177, over 19837.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3662, pruned_loss=0.1332, over 3796617.56 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:55:00,654 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:55:25,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 01:55:26,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 01:55:29,043 INFO [train.py:903] (1/4) Epoch 4, batch 0, loss[loss=0.3371, simple_loss=0.3824, pruned_loss=0.1459, over 19697.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.3824, pruned_loss=0.1459, over 19697.00 frames. ], batch size: 53, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:55:29,043 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 01:55:40,524 INFO [train.py:937] (1/4) Epoch 4, validation: loss=0.2245, simple_loss=0.3205, pruned_loss=0.06426, over 944034.00 frames. +2023-04-01 01:55:40,525 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 01:55:51,821 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9558, 2.7501, 1.9747, 2.1204, 1.8850, 2.0723, 0.6621, 2.0526], + device='cuda:1'), covar=tensor([0.0197, 0.0205, 0.0208, 0.0261, 0.0396, 0.0321, 0.0462, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0256, 0.0255, 0.0280, 0.0334, 0.0274, 0.0258, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 01:55:53,633 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 01:55:55,144 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20497.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:56:07,943 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.90 vs. limit=5.0 +2023-04-01 01:56:27,869 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.855e+02 8.790e+02 1.166e+03 2.960e+03, threshold=1.758e+03, percent-clipped=3.0 +2023-04-01 01:56:41,007 INFO [train.py:903] (1/4) Epoch 4, batch 50, loss[loss=0.3208, simple_loss=0.3801, pruned_loss=0.1308, over 19573.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3654, pruned_loss=0.1335, over 863796.41 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:01,516 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3977, 1.4835, 1.5846, 1.2962, 2.6004, 3.3691, 3.3549, 3.6962], + device='cuda:1'), covar=tensor([0.1236, 0.2204, 0.2344, 0.1653, 0.0391, 0.0125, 0.0177, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0274, 0.0316, 0.0261, 0.0187, 0.0104, 0.0193, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:57:14,493 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 01:57:15,795 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:16,001 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:36,910 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5012, 3.9049, 4.0935, 4.0511, 1.3733, 3.6705, 3.3936, 3.6873], + device='cuda:1'), covar=tensor([0.0740, 0.0508, 0.0455, 0.0351, 0.3587, 0.0290, 0.0412, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0409, 0.0370, 0.0492, 0.0388, 0.0506, 0.0271, 0.0329, 0.0475], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 01:57:40,114 INFO [train.py:903] (1/4) Epoch 4, batch 100, loss[loss=0.3177, simple_loss=0.3736, pruned_loss=0.1309, over 17241.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3673, pruned_loss=0.1338, over 1524315.59 frames. ], batch size: 101, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:46,143 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20589.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:52,769 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 01:58:05,917 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:58:16,534 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3848, 1.0534, 1.6859, 1.2870, 2.8482, 3.5296, 3.5167, 3.8691], + device='cuda:1'), covar=tensor([0.1372, 0.2857, 0.2523, 0.1790, 0.0395, 0.0105, 0.0218, 0.0094], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0275, 0.0318, 0.0262, 0.0189, 0.0106, 0.0196, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 01:58:29,319 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 7.763e+02 9.275e+02 1.175e+03 2.763e+03, threshold=1.855e+03, percent-clipped=7.0 +2023-04-01 01:58:41,723 INFO [train.py:903] (1/4) Epoch 4, batch 150, loss[loss=0.2725, simple_loss=0.342, pruned_loss=0.1015, over 19779.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3618, pruned_loss=0.1298, over 2032942.88 frames. ], batch size: 56, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:35,932 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20681.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:59:40,168 INFO [train.py:903] (1/4) Epoch 4, batch 200, loss[loss=0.3113, simple_loss=0.3669, pruned_loss=0.1278, over 19466.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3626, pruned_loss=0.1294, over 2445663.68 frames. ], batch size: 64, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:41,285 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 02:00:28,681 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 7.097e+02 9.184e+02 1.257e+03 2.857e+03, threshold=1.837e+03, percent-clipped=5.0 +2023-04-01 02:00:39,435 INFO [train.py:903] (1/4) Epoch 4, batch 250, loss[loss=0.3084, simple_loss=0.3691, pruned_loss=0.1239, over 19671.00 frames. ], tot_loss[loss=0.3139, simple_loss=0.3645, pruned_loss=0.1317, over 2742675.71 frames. ], batch size: 58, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:00:41,857 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3086, 2.1546, 2.1628, 3.2074, 2.5415, 3.3696, 3.0671, 2.0353], + device='cuda:1'), covar=tensor([0.1170, 0.0901, 0.0477, 0.0540, 0.0882, 0.0230, 0.0666, 0.0797], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0483, 0.0479, 0.0647, 0.0551, 0.0401, 0.0579, 0.0482], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:00:57,790 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:02,409 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:32,721 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:34,829 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:35,025 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9728, 1.3640, 1.0736, 1.0357, 1.2635, 0.7810, 0.6560, 1.2278], + device='cuda:1'), covar=tensor([0.0507, 0.0426, 0.0626, 0.0407, 0.0326, 0.0822, 0.0557, 0.0264], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0247, 0.0309, 0.0248, 0.0214, 0.0309, 0.0277, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 02:01:41,111 INFO [train.py:903] (1/4) Epoch 4, batch 300, loss[loss=0.2924, simple_loss=0.3395, pruned_loss=0.1227, over 19803.00 frames. ], tot_loss[loss=0.3135, simple_loss=0.364, pruned_loss=0.1315, over 2969314.80 frames. ], batch size: 48, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:02:25,241 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:29,141 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 7.559e+02 9.012e+02 1.206e+03 2.235e+03, threshold=1.802e+03, percent-clipped=6.0 +2023-04-01 02:02:30,503 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1898, 1.5118, 1.7906, 2.6391, 1.8559, 2.4453, 2.6927, 2.5087], + device='cuda:1'), covar=tensor([0.0751, 0.1132, 0.1091, 0.0996, 0.1203, 0.0714, 0.0966, 0.0612], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0275, 0.0267, 0.0298, 0.0308, 0.0249, 0.0268, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 02:02:40,181 INFO [train.py:903] (1/4) Epoch 4, batch 350, loss[loss=0.3321, simple_loss=0.3842, pruned_loss=0.14, over 19658.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.364, pruned_loss=0.1314, over 3169033.71 frames. ], batch size: 60, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:02:45,647 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 02:02:52,693 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:54,956 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20847.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:59,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5491, 3.8267, 4.0663, 4.0806, 1.4186, 3.6795, 3.2988, 3.6666], + device='cuda:1'), covar=tensor([0.0782, 0.0509, 0.0492, 0.0393, 0.3576, 0.0303, 0.0466, 0.1023], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0375, 0.0508, 0.0391, 0.0514, 0.0278, 0.0333, 0.0484], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 02:03:18,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20865.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:23,900 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:40,719 INFO [train.py:903] (1/4) Epoch 4, batch 400, loss[loss=0.3608, simple_loss=0.4001, pruned_loss=0.1607, over 19473.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3639, pruned_loss=0.1311, over 3321743.21 frames. ], batch size: 64, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:03:52,356 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:18,388 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5556, 1.1769, 1.3846, 1.8776, 3.0981, 1.2970, 2.0752, 3.0727], + device='cuda:1'), covar=tensor([0.0302, 0.2275, 0.2103, 0.1139, 0.0455, 0.1738, 0.0963, 0.0383], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0304, 0.0303, 0.0277, 0.0295, 0.0313, 0.0279, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 02:04:27,878 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.529e+02 9.870e+02 1.265e+03 2.610e+03, threshold=1.974e+03, percent-clipped=3.0 +2023-04-01 02:04:39,088 INFO [train.py:903] (1/4) Epoch 4, batch 450, loss[loss=0.3194, simple_loss=0.3661, pruned_loss=0.1363, over 19666.00 frames. ], tot_loss[loss=0.311, simple_loss=0.3627, pruned_loss=0.1297, over 3443276.43 frames. ], batch size: 53, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:04:41,827 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:44,742 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 02:04:58,386 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:12,736 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 02:05:13,096 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:13,939 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 02:05:40,644 INFO [train.py:903] (1/4) Epoch 4, batch 500, loss[loss=0.2607, simple_loss=0.3295, pruned_loss=0.096, over 19740.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.3622, pruned_loss=0.1288, over 3531338.77 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:06:27,513 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.944e+02 7.370e+02 9.144e+02 1.191e+03 3.185e+03, threshold=1.829e+03, percent-clipped=4.0 +2023-04-01 02:06:40,256 INFO [train.py:903] (1/4) Epoch 4, batch 550, loss[loss=0.3487, simple_loss=0.3895, pruned_loss=0.154, over 19539.00 frames. ], tot_loss[loss=0.3105, simple_loss=0.3622, pruned_loss=0.1294, over 3588139.52 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:07:12,079 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2060, 1.1819, 1.8582, 1.4235, 2.7139, 2.2621, 2.8973, 1.0391], + device='cuda:1'), covar=tensor([0.1527, 0.2531, 0.1325, 0.1325, 0.0910, 0.1181, 0.1092, 0.2419], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0471, 0.0437, 0.0396, 0.0512, 0.0413, 0.0596, 0.0421], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:07:17,092 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:23,277 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:37,586 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4966, 4.1275, 2.2993, 3.7010, 1.4357, 3.7898, 3.6510, 3.9368], + device='cuda:1'), covar=tensor([0.0506, 0.0961, 0.2140, 0.0622, 0.3276, 0.0805, 0.0697, 0.0707], + device='cuda:1'), in_proj_covar=tensor([0.0321, 0.0295, 0.0338, 0.0273, 0.0339, 0.0292, 0.0251, 0.0288], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 02:07:39,553 INFO [train.py:903] (1/4) Epoch 4, batch 600, loss[loss=0.3055, simple_loss=0.3648, pruned_loss=0.1231, over 19666.00 frames. ], tot_loss[loss=0.3115, simple_loss=0.3631, pruned_loss=0.13, over 3639744.34 frames. ], batch size: 53, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:15,024 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8362, 1.3500, 1.2949, 2.0215, 1.5451, 2.0288, 2.1205, 1.9367], + device='cuda:1'), covar=tensor([0.0812, 0.1231, 0.1336, 0.1081, 0.1155, 0.0756, 0.1024, 0.0724], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0272, 0.0267, 0.0296, 0.0300, 0.0249, 0.0263, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 02:08:19,255 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 02:08:23,045 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21121.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:27,137 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.199e+02 8.023e+02 1.001e+03 1.305e+03 2.804e+03, threshold=2.003e+03, percent-clipped=3.0 +2023-04-01 02:08:39,443 INFO [train.py:903] (1/4) Epoch 4, batch 650, loss[loss=0.2815, simple_loss=0.3489, pruned_loss=0.107, over 18130.00 frames. ], tot_loss[loss=0.3088, simple_loss=0.3609, pruned_loss=0.1283, over 3699564.08 frames. ], batch size: 84, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:40,616 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:52,182 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:00,259 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:16,963 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3939, 1.1121, 1.3563, 1.8208, 3.0171, 1.2034, 2.0781, 3.1625], + device='cuda:1'), covar=tensor([0.0376, 0.2451, 0.2158, 0.1322, 0.0485, 0.2101, 0.1074, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0303, 0.0299, 0.0277, 0.0294, 0.0318, 0.0282, 0.0284], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 02:09:29,085 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21176.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:38,634 INFO [train.py:903] (1/4) Epoch 4, batch 700, loss[loss=0.2961, simple_loss=0.3453, pruned_loss=0.1235, over 19780.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.3603, pruned_loss=0.1276, over 3722213.30 frames. ], batch size: 47, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:10:26,834 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 7.118e+02 8.929e+02 1.162e+03 2.438e+03, threshold=1.786e+03, percent-clipped=3.0 +2023-04-01 02:10:40,518 INFO [train.py:903] (1/4) Epoch 4, batch 750, loss[loss=0.2666, simple_loss=0.3146, pruned_loss=0.1093, over 19737.00 frames. ], tot_loss[loss=0.3076, simple_loss=0.3601, pruned_loss=0.1275, over 3757347.84 frames. ], batch size: 47, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:10:44,231 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7129, 1.3700, 1.1354, 1.4807, 1.3930, 1.4530, 1.2301, 1.5577], + device='cuda:1'), covar=tensor([0.0849, 0.1401, 0.1499, 0.0896, 0.1123, 0.0571, 0.0997, 0.0696], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0374, 0.0279, 0.0247, 0.0313, 0.0259, 0.0262, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 02:10:48,865 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1483, 1.0523, 1.4954, 1.2419, 1.7894, 1.7674, 1.9350, 0.4626], + device='cuda:1'), covar=tensor([0.1563, 0.2539, 0.1265, 0.1361, 0.0945, 0.1262, 0.0957, 0.2331], + device='cuda:1'), in_proj_covar=tensor([0.0424, 0.0476, 0.0438, 0.0396, 0.0517, 0.0414, 0.0597, 0.0425], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:11:40,042 INFO [train.py:903] (1/4) Epoch 4, batch 800, loss[loss=0.2922, simple_loss=0.3524, pruned_loss=0.116, over 19660.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3597, pruned_loss=0.1271, over 3760622.74 frames. ], batch size: 53, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:11:56,116 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 02:12:25,406 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:30,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.379e+02 8.113e+02 1.001e+03 1.207e+03 2.017e+03, threshold=2.002e+03, percent-clipped=2.0 +2023-04-01 02:12:35,446 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:37,644 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1361, 5.3892, 3.0084, 4.7645, 1.2703, 5.2155, 5.2388, 5.5131], + device='cuda:1'), covar=tensor([0.0353, 0.1040, 0.1707, 0.0611, 0.3898, 0.0621, 0.0526, 0.0541], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0291, 0.0339, 0.0267, 0.0339, 0.0292, 0.0248, 0.0285], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 02:12:41,604 INFO [train.py:903] (1/4) Epoch 4, batch 850, loss[loss=0.2598, simple_loss=0.3203, pruned_loss=0.09969, over 19780.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3583, pruned_loss=0.1258, over 3776017.40 frames. ], batch size: 46, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:12:54,280 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:54,387 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:13:30,942 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 02:13:39,981 INFO [train.py:903] (1/4) Epoch 4, batch 900, loss[loss=0.3667, simple_loss=0.3967, pruned_loss=0.1684, over 13107.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3591, pruned_loss=0.1263, over 3775275.10 frames. ], batch size: 138, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:14,506 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:14:28,709 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+02 7.656e+02 9.192e+02 1.106e+03 2.022e+03, threshold=1.838e+03, percent-clipped=1.0 +2023-04-01 02:14:40,651 INFO [train.py:903] (1/4) Epoch 4, batch 950, loss[loss=0.2813, simple_loss=0.3541, pruned_loss=0.1042, over 19705.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3598, pruned_loss=0.1267, over 3781089.90 frames. ], batch size: 59, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:43,011 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 02:15:35,419 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:15:40,656 INFO [train.py:903] (1/4) Epoch 4, batch 1000, loss[loss=0.3392, simple_loss=0.3782, pruned_loss=0.1501, over 19588.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3598, pruned_loss=0.1269, over 3788145.08 frames. ], batch size: 52, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:16:10,654 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8466, 4.3331, 2.4044, 3.8963, 1.1631, 3.8086, 3.8607, 4.0444], + device='cuda:1'), covar=tensor([0.0458, 0.1093, 0.1997, 0.0671, 0.4056, 0.0892, 0.0702, 0.0832], + device='cuda:1'), in_proj_covar=tensor([0.0326, 0.0298, 0.0345, 0.0275, 0.0347, 0.0299, 0.0257, 0.0290], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 02:16:29,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.436e+02 9.242e+02 1.292e+03 2.692e+03, threshold=1.848e+03, percent-clipped=7.0 +2023-04-01 02:16:33,307 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 02:16:33,595 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:16:40,202 INFO [train.py:903] (1/4) Epoch 4, batch 1050, loss[loss=0.2983, simple_loss=0.3531, pruned_loss=0.1217, over 19603.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3604, pruned_loss=0.127, over 3812027.89 frames. ], batch size: 52, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:16:41,963 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 02:17:14,216 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 02:17:40,182 INFO [train.py:903] (1/4) Epoch 4, batch 1100, loss[loss=0.2738, simple_loss=0.3382, pruned_loss=0.1048, over 19779.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3598, pruned_loss=0.1271, over 3805588.89 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:17:48,981 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 02:17:52,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:10,462 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1416, 0.9777, 1.2680, 0.8693, 2.2886, 2.9485, 2.8803, 3.2113], + device='cuda:1'), covar=tensor([0.1733, 0.4142, 0.4108, 0.2420, 0.0570, 0.0232, 0.0324, 0.0157], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0278, 0.0319, 0.0261, 0.0190, 0.0109, 0.0198, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 02:18:15,012 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:30,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+02 7.921e+02 9.622e+02 1.275e+03 2.981e+03, threshold=1.924e+03, percent-clipped=6.0 +2023-04-01 02:18:36,426 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21631.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:43,698 INFO [train.py:903] (1/4) Epoch 4, batch 1150, loss[loss=0.3015, simple_loss=0.3618, pruned_loss=0.1206, over 18763.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3623, pruned_loss=0.1283, over 3807504.44 frames. ], batch size: 74, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:30,211 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 02:19:30,749 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:19:45,550 INFO [train.py:903] (1/4) Epoch 4, batch 1200, loss[loss=0.285, simple_loss=0.3501, pruned_loss=0.1099, over 19353.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3605, pruned_loss=0.1268, over 3795926.03 frames. ], batch size: 66, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:51,427 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:20:14,901 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 02:20:17,275 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1890, 0.9771, 1.3971, 1.1085, 2.4173, 3.4022, 3.2213, 3.6923], + device='cuda:1'), covar=tensor([0.1682, 0.3926, 0.3725, 0.2250, 0.0547, 0.0134, 0.0293, 0.0101], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0279, 0.0323, 0.0262, 0.0193, 0.0108, 0.0201, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 02:20:35,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+02 7.515e+02 9.038e+02 1.111e+03 2.454e+03, threshold=1.808e+03, percent-clipped=2.0 +2023-04-01 02:20:45,434 INFO [train.py:903] (1/4) Epoch 4, batch 1250, loss[loss=0.3548, simple_loss=0.4005, pruned_loss=0.1546, over 19671.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3608, pruned_loss=0.1271, over 3810920.16 frames. ], batch size: 58, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:39,800 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:44,769 INFO [train.py:903] (1/4) Epoch 4, batch 1300, loss[loss=0.2508, simple_loss=0.3091, pruned_loss=0.09627, over 19779.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.362, pruned_loss=0.1277, over 3808961.52 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:45,186 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:49,611 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:08,435 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-01 02:22:10,063 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:15,769 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:34,650 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 7.410e+02 9.034e+02 1.121e+03 1.849e+03, threshold=1.807e+03, percent-clipped=1.0 +2023-04-01 02:22:45,117 INFO [train.py:903] (1/4) Epoch 4, batch 1350, loss[loss=0.3451, simple_loss=0.3898, pruned_loss=0.1502, over 19652.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3603, pruned_loss=0.1267, over 3819611.65 frames. ], batch size: 55, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:23:04,109 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:23,939 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:33,958 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:45,406 INFO [train.py:903] (1/4) Epoch 4, batch 1400, loss[loss=0.3878, simple_loss=0.4197, pruned_loss=0.1779, over 19133.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3593, pruned_loss=0.1267, over 3820459.34 frames. ], batch size: 69, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:24:35,700 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.358e+02 7.364e+02 9.517e+02 1.310e+03 2.254e+03, threshold=1.903e+03, percent-clipped=6.0 +2023-04-01 02:24:42,745 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 02:24:46,194 INFO [train.py:903] (1/4) Epoch 4, batch 1450, loss[loss=0.2563, simple_loss=0.3118, pruned_loss=0.1004, over 19733.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3589, pruned_loss=0.1265, over 3816271.78 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:24:48,096 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 02:24:54,260 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9986, 1.1632, 1.1907, 1.9001, 1.6285, 1.9452, 2.2953, 1.7150], + device='cuda:1'), covar=tensor([0.0713, 0.1219, 0.1432, 0.1238, 0.1198, 0.0834, 0.0900, 0.0832], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0269, 0.0267, 0.0302, 0.0303, 0.0245, 0.0265, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 02:25:13,215 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:34,253 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:45,922 INFO [train.py:903] (1/4) Epoch 4, batch 1500, loss[loss=0.3497, simple_loss=0.3967, pruned_loss=0.1514, over 19125.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3599, pruned_loss=0.1276, over 3823226.46 frames. ], batch size: 69, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:25:49,637 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:26:36,547 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+02 6.805e+02 9.241e+02 1.170e+03 2.581e+03, threshold=1.848e+03, percent-clipped=2.0 +2023-04-01 02:26:47,125 INFO [train.py:903] (1/4) Epoch 4, batch 1550, loss[loss=0.3019, simple_loss=0.3647, pruned_loss=0.1195, over 19617.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.358, pruned_loss=0.1261, over 3822710.46 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:01,643 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:08,632 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-01 02:27:19,321 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:30,468 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:32,771 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:49,224 INFO [train.py:903] (1/4) Epoch 4, batch 1600, loss[loss=0.2579, simple_loss=0.3195, pruned_loss=0.09819, over 19611.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3582, pruned_loss=0.1266, over 3829663.80 frames. ], batch size: 50, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:50,752 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22086.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:55,259 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:10,563 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 02:28:37,641 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:39,601 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+02 8.565e+02 1.081e+03 1.346e+03 3.673e+03, threshold=2.162e+03, percent-clipped=6.0 +2023-04-01 02:28:44,589 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5899, 1.2290, 1.3219, 1.8592, 1.5436, 1.7115, 2.0449, 1.6274], + device='cuda:1'), covar=tensor([0.0789, 0.1180, 0.1220, 0.1000, 0.1039, 0.0801, 0.0812, 0.0718], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0269, 0.0265, 0.0301, 0.0300, 0.0245, 0.0265, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 02:28:48,843 INFO [train.py:903] (1/4) Epoch 4, batch 1650, loss[loss=0.3224, simple_loss=0.3782, pruned_loss=0.1333, over 19673.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3584, pruned_loss=0.1264, over 3827250.67 frames. ], batch size: 60, lr: 2.05e-02, grad_scale: 4.0 +2023-04-01 02:29:04,258 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22148.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:29:12,053 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9064, 2.3225, 2.2453, 2.9662, 2.7076, 2.5120, 2.5410, 2.9862], + device='cuda:1'), covar=tensor([0.0690, 0.1647, 0.1177, 0.0710, 0.0993, 0.0413, 0.0775, 0.0459], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0368, 0.0280, 0.0245, 0.0301, 0.0251, 0.0260, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 02:29:18,652 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3019, 1.0720, 1.5088, 1.1148, 2.6752, 3.5302, 3.4097, 3.7436], + device='cuda:1'), covar=tensor([0.1320, 0.2815, 0.2730, 0.1924, 0.0387, 0.0111, 0.0200, 0.0102], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0278, 0.0322, 0.0261, 0.0193, 0.0111, 0.0200, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 02:29:49,248 INFO [train.py:903] (1/4) Epoch 4, batch 1700, loss[loss=0.3227, simple_loss=0.3743, pruned_loss=0.1355, over 19278.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3588, pruned_loss=0.1264, over 3828331.83 frames. ], batch size: 66, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:00,383 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22194.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:23,322 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22212.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:25,807 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6544, 1.7970, 1.7821, 2.2439, 4.3072, 1.2300, 2.3385, 4.1463], + device='cuda:1'), covar=tensor([0.0226, 0.1951, 0.1996, 0.1191, 0.0331, 0.1939, 0.1092, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0298, 0.0299, 0.0272, 0.0284, 0.0311, 0.0270, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:30:27,862 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 02:30:40,332 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.920e+02 6.276e+02 7.753e+02 9.050e+02 1.909e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 02:30:48,105 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22233.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:49,927 INFO [train.py:903] (1/4) Epoch 4, batch 1750, loss[loss=0.2958, simple_loss=0.3376, pruned_loss=0.127, over 19852.00 frames. ], tot_loss[loss=0.3067, simple_loss=0.3599, pruned_loss=0.1267, over 3815582.66 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:57,753 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:15,616 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22255.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:50,782 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4792, 2.4243, 1.4573, 1.8084, 2.0931, 1.2023, 1.1729, 1.8827], + device='cuda:1'), covar=tensor([0.0924, 0.0402, 0.0884, 0.0494, 0.0403, 0.0928, 0.0751, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0242, 0.0309, 0.0245, 0.0212, 0.0306, 0.0278, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 02:31:52,438 INFO [train.py:903] (1/4) Epoch 4, batch 1800, loss[loss=0.3218, simple_loss=0.3689, pruned_loss=0.1373, over 19652.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3598, pruned_loss=0.1266, over 3829422.64 frames. ], batch size: 53, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:32:43,154 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+02 7.313e+02 8.961e+02 1.128e+03 3.443e+03, threshold=1.792e+03, percent-clipped=8.0 +2023-04-01 02:32:43,415 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:44,439 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:46,329 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 02:32:48,652 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22332.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:52,003 INFO [train.py:903] (1/4) Epoch 4, batch 1850, loss[loss=0.257, simple_loss=0.3131, pruned_loss=0.1005, over 19737.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3592, pruned_loss=0.1264, over 3830033.85 frames. ], batch size: 45, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:33:00,794 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 02:33:04,821 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:13,957 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:15,100 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0827, 1.1683, 1.4604, 0.4993, 2.3218, 2.4203, 2.0987, 2.5600], + device='cuda:1'), covar=tensor([0.1191, 0.2725, 0.2606, 0.2192, 0.0378, 0.0164, 0.0365, 0.0153], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0278, 0.0322, 0.0259, 0.0191, 0.0109, 0.0199, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 02:33:24,813 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 02:33:36,225 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:37,407 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2273, 1.2045, 2.0366, 1.5154, 3.1789, 2.8345, 3.4496, 1.2886], + device='cuda:1'), covar=tensor([0.1735, 0.2952, 0.1538, 0.1339, 0.1079, 0.1160, 0.1353, 0.2656], + device='cuda:1'), in_proj_covar=tensor([0.0426, 0.0471, 0.0441, 0.0395, 0.0514, 0.0410, 0.0592, 0.0422], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:33:39,600 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0288, 1.0353, 1.3347, 1.1058, 1.5987, 1.5588, 1.6875, 0.5275], + device='cuda:1'), covar=tensor([0.1500, 0.2448, 0.1269, 0.1265, 0.0930, 0.1215, 0.0898, 0.2169], + device='cuda:1'), in_proj_covar=tensor([0.0425, 0.0470, 0.0440, 0.0394, 0.0513, 0.0409, 0.0590, 0.0421], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:33:51,847 INFO [train.py:903] (1/4) Epoch 4, batch 1900, loss[loss=0.3385, simple_loss=0.3912, pruned_loss=0.1429, over 19788.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3583, pruned_loss=0.1256, over 3840243.56 frames. ], batch size: 56, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:34:09,607 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 02:34:14,802 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 02:34:15,074 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:34:36,874 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-01 02:34:39,429 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 02:34:42,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.044e+02 7.549e+02 9.520e+02 1.192e+03 3.384e+03, threshold=1.904e+03, percent-clipped=5.0 +2023-04-01 02:34:51,945 INFO [train.py:903] (1/4) Epoch 4, batch 1950, loss[loss=0.2625, simple_loss=0.3139, pruned_loss=0.1056, over 19747.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3569, pruned_loss=0.1253, over 3830997.78 frames. ], batch size: 46, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:34:56,976 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.20 vs. limit=5.0 +2023-04-01 02:35:08,568 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:35:53,485 INFO [train.py:903] (1/4) Epoch 4, batch 2000, loss[loss=0.3741, simple_loss=0.4043, pruned_loss=0.1719, over 12982.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.358, pruned_loss=0.1257, over 3823846.38 frames. ], batch size: 136, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:01,813 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22492.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:06,598 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:37,353 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:45,511 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+02 6.735e+02 8.799e+02 1.102e+03 2.294e+03, threshold=1.760e+03, percent-clipped=1.0 +2023-04-01 02:36:46,719 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 02:36:54,492 INFO [train.py:903] (1/4) Epoch 4, batch 2050, loss[loss=0.4219, simple_loss=0.4537, pruned_loss=0.1951, over 19344.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3577, pruned_loss=0.1251, over 3803486.53 frames. ], batch size: 66, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:58,157 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:04,877 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 02:37:06,032 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 02:37:27,447 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 02:37:40,490 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 02:37:45,363 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22577.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:52,332 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:54,190 INFO [train.py:903] (1/4) Epoch 4, batch 2100, loss[loss=0.2864, simple_loss=0.3454, pruned_loss=0.1137, over 19666.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3582, pruned_loss=0.1254, over 3817417.84 frames. ], batch size: 53, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:38:10,699 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:21,642 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 02:38:22,048 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:23,350 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:42,451 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 02:38:44,807 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.772e+02 6.954e+02 8.861e+02 1.126e+03 2.028e+03, threshold=1.772e+03, percent-clipped=3.0 +2023-04-01 02:38:52,955 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5602, 1.9299, 1.9701, 2.2994, 1.8014, 2.6730, 2.5280, 2.6919], + device='cuda:1'), covar=tensor([0.0632, 0.0900, 0.0989, 0.1040, 0.1099, 0.0603, 0.0913, 0.0493], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0264, 0.0263, 0.0298, 0.0298, 0.0246, 0.0262, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 02:38:53,754 INFO [train.py:903] (1/4) Epoch 4, batch 2150, loss[loss=0.3033, simple_loss=0.3603, pruned_loss=0.1232, over 19342.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3588, pruned_loss=0.1258, over 3821948.33 frames. ], batch size: 66, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:39:17,733 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22653.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:22,203 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22657.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:56,340 INFO [train.py:903] (1/4) Epoch 4, batch 2200, loss[loss=0.3194, simple_loss=0.3812, pruned_loss=0.1288, over 19290.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3594, pruned_loss=0.1264, over 3792780.96 frames. ], batch size: 66, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:40:05,420 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:18,048 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:30,198 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:46,719 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1823, 0.9555, 0.9831, 1.4526, 1.2555, 1.3520, 1.4949, 1.1956], + device='cuda:1'), covar=tensor([0.0697, 0.0906, 0.0960, 0.0672, 0.0747, 0.0675, 0.0705, 0.0614], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0264, 0.0264, 0.0298, 0.0296, 0.0248, 0.0263, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 02:40:47,464 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+02 6.866e+02 8.417e+02 1.098e+03 2.160e+03, threshold=1.683e+03, percent-clipped=4.0 +2023-04-01 02:40:49,019 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:56,701 INFO [train.py:903] (1/4) Epoch 4, batch 2250, loss[loss=0.2831, simple_loss=0.3516, pruned_loss=0.1073, over 19564.00 frames. ], tot_loss[loss=0.3074, simple_loss=0.3604, pruned_loss=0.1272, over 3791224.42 frames. ], batch size: 61, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:41:10,274 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:41:57,019 INFO [train.py:903] (1/4) Epoch 4, batch 2300, loss[loss=0.3084, simple_loss=0.3618, pruned_loss=0.1275, over 19592.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3597, pruned_loss=0.1262, over 3804934.06 frames. ], batch size: 52, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:42:09,275 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 02:42:31,170 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22812.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:42:47,875 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+02 7.223e+02 9.387e+02 1.200e+03 1.860e+03, threshold=1.877e+03, percent-clipped=8.0 +2023-04-01 02:42:56,885 INFO [train.py:903] (1/4) Epoch 4, batch 2350, loss[loss=0.2913, simple_loss=0.3377, pruned_loss=0.1224, over 19715.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3596, pruned_loss=0.1267, over 3806734.61 frames. ], batch size: 45, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:43:06,745 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:30,995 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:32,262 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:38,744 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 02:43:54,379 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 02:43:57,664 INFO [train.py:903] (1/4) Epoch 4, batch 2400, loss[loss=0.2977, simple_loss=0.3515, pruned_loss=0.1219, over 19681.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3603, pruned_loss=0.1271, over 3803410.23 frames. ], batch size: 53, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:44:03,297 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:27,690 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:49,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+02 7.425e+02 9.466e+02 1.182e+03 3.064e+03, threshold=1.893e+03, percent-clipped=2.0 +2023-04-01 02:44:56,186 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 02:44:58,858 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:59,530 INFO [train.py:903] (1/4) Epoch 4, batch 2450, loss[loss=0.3978, simple_loss=0.415, pruned_loss=0.1903, over 13911.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3605, pruned_loss=0.1272, over 3795895.85 frames. ], batch size: 137, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:45:14,076 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:31,525 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1038, 1.1648, 1.4225, 1.2381, 1.7954, 1.7418, 1.9328, 0.5352], + device='cuda:1'), covar=tensor([0.1409, 0.2338, 0.1306, 0.1179, 0.0857, 0.1141, 0.0815, 0.2189], + device='cuda:1'), in_proj_covar=tensor([0.0425, 0.0477, 0.0449, 0.0398, 0.0516, 0.0416, 0.0599, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 02:45:40,908 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:44,372 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:57,747 INFO [train.py:903] (1/4) Epoch 4, batch 2500, loss[loss=0.3038, simple_loss=0.3567, pruned_loss=0.1254, over 19663.00 frames. ], tot_loss[loss=0.3091, simple_loss=0.3619, pruned_loss=0.1282, over 3809682.93 frames. ], batch size: 53, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:46:09,322 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22995.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:16,262 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:35,134 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3482, 1.1614, 1.4212, 1.1883, 2.6491, 3.3820, 3.3036, 3.6894], + device='cuda:1'), covar=tensor([0.1366, 0.2793, 0.2893, 0.2015, 0.0458, 0.0150, 0.0192, 0.0102], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0282, 0.0323, 0.0260, 0.0194, 0.0109, 0.0200, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 02:46:48,542 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 7.308e+02 8.619e+02 1.083e+03 2.930e+03, threshold=1.724e+03, percent-clipped=3.0 +2023-04-01 02:46:57,735 INFO [train.py:903] (1/4) Epoch 4, batch 2550, loss[loss=0.3312, simple_loss=0.3852, pruned_loss=0.1386, over 19167.00 frames. ], tot_loss[loss=0.3074, simple_loss=0.3607, pruned_loss=0.127, over 3801071.99 frames. ], batch size: 69, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:47:47,719 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6594, 1.4216, 1.9486, 1.7282, 2.8875, 4.5525, 4.4987, 4.9483], + device='cuda:1'), covar=tensor([0.1287, 0.2598, 0.2472, 0.1769, 0.0443, 0.0091, 0.0124, 0.0061], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0279, 0.0319, 0.0261, 0.0193, 0.0107, 0.0200, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 02:47:49,710 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 02:47:57,382 INFO [train.py:903] (1/4) Epoch 4, batch 2600, loss[loss=0.3734, simple_loss=0.4045, pruned_loss=0.1712, over 12720.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3596, pruned_loss=0.1265, over 3807996.36 frames. ], batch size: 136, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:48:33,813 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23116.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:36,237 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:46,620 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:48,301 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+02 6.946e+02 8.555e+02 1.074e+03 2.756e+03, threshold=1.711e+03, percent-clipped=6.0 +2023-04-01 02:48:58,589 INFO [train.py:903] (1/4) Epoch 4, batch 2650, loss[loss=0.2799, simple_loss=0.3499, pruned_loss=0.105, over 19702.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3593, pruned_loss=0.1265, over 3797446.14 frames. ], batch size: 59, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:49:08,268 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:17,081 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 02:49:22,930 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:58,046 INFO [train.py:903] (1/4) Epoch 4, batch 2700, loss[loss=0.3071, simple_loss=0.3593, pruned_loss=0.1275, over 17524.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3578, pruned_loss=0.1251, over 3802509.33 frames. ], batch size: 101, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:00,483 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23187.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:50:48,019 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.628e+02 7.002e+02 8.947e+02 1.091e+03 2.361e+03, threshold=1.789e+03, percent-clipped=7.0 +2023-04-01 02:50:57,145 INFO [train.py:903] (1/4) Epoch 4, batch 2750, loss[loss=0.2357, simple_loss=0.3004, pruned_loss=0.08547, over 19776.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.357, pruned_loss=0.125, over 3803203.66 frames. ], batch size: 47, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:57,417 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:11,540 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:41,024 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:56,445 INFO [train.py:903] (1/4) Epoch 4, batch 2800, loss[loss=0.3462, simple_loss=0.3988, pruned_loss=0.1468, over 19614.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3591, pruned_loss=0.1259, over 3798787.10 frames. ], batch size: 57, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:52:17,019 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:52:31,864 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 02:52:45,199 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+02 7.909e+02 1.044e+03 1.347e+03 2.323e+03, threshold=2.087e+03, percent-clipped=7.0 +2023-04-01 02:52:56,802 INFO [train.py:903] (1/4) Epoch 4, batch 2850, loss[loss=0.3547, simple_loss=0.3958, pruned_loss=0.1568, over 19678.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.3587, pruned_loss=0.1255, over 3802863.06 frames. ], batch size: 53, lr: 1.99e-02, grad_scale: 8.0 +2023-04-01 02:52:57,426 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.14 vs. limit=5.0 +2023-04-01 02:53:41,811 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23372.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:53:56,270 INFO [train.py:903] (1/4) Epoch 4, batch 2900, loss[loss=0.2873, simple_loss=0.3487, pruned_loss=0.1129, over 19672.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3573, pruned_loss=0.1246, over 3805925.29 frames. ], batch size: 53, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:53:56,290 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 02:54:10,043 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:54:44,138 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 02:54:45,629 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 7.852e+02 1.023e+03 1.284e+03 2.319e+03, threshold=2.047e+03, percent-clipped=2.0 +2023-04-01 02:54:53,633 INFO [train.py:903] (1/4) Epoch 4, batch 2950, loss[loss=0.3302, simple_loss=0.3864, pruned_loss=0.137, over 19516.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3577, pruned_loss=0.125, over 3813601.06 frames. ], batch size: 64, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:03,494 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 02:55:15,417 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 02:55:21,784 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1506, 2.6861, 2.1136, 2.1236, 1.8850, 2.3435, 0.7604, 2.0692], + device='cuda:1'), covar=tensor([0.0301, 0.0213, 0.0201, 0.0324, 0.0448, 0.0353, 0.0506, 0.0433], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0261, 0.0261, 0.0288, 0.0350, 0.0276, 0.0266, 0.0283], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 02:55:35,111 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:55:52,223 INFO [train.py:903] (1/4) Epoch 4, batch 3000, loss[loss=0.3131, simple_loss=0.3566, pruned_loss=0.1348, over 19844.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3571, pruned_loss=0.124, over 3821486.26 frames. ], batch size: 52, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:52,223 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 02:56:05,134 INFO [train.py:937] (1/4) Epoch 4, validation: loss=0.2145, simple_loss=0.3118, pruned_loss=0.05862, over 944034.00 frames. +2023-04-01 02:56:05,135 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 02:56:09,790 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 02:56:32,401 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,155 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23527.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,886 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+02 6.234e+02 7.977e+02 1.046e+03 2.333e+03, threshold=1.595e+03, percent-clipped=2.0 +2023-04-01 02:57:05,034 INFO [train.py:903] (1/4) Epoch 4, batch 3050, loss[loss=0.2866, simple_loss=0.3475, pruned_loss=0.1128, over 19620.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3578, pruned_loss=0.1245, over 3819668.45 frames. ], batch size: 50, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:57:26,951 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23552.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:28,441 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 02:57:33,681 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:53,409 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.92 vs. limit=5.0 +2023-04-01 02:57:58,299 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:04,150 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:06,131 INFO [train.py:903] (1/4) Epoch 4, batch 3100, loss[loss=0.2497, simple_loss=0.3097, pruned_loss=0.09491, over 19751.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3579, pruned_loss=0.1246, over 3801279.16 frames. ], batch size: 45, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:58:06,480 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:13,285 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:55,942 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.915e+02 8.546e+02 1.092e+03 2.878e+03, threshold=1.709e+03, percent-clipped=7.0 +2023-04-01 02:59:03,920 INFO [train.py:903] (1/4) Epoch 4, batch 3150, loss[loss=0.35, simple_loss=0.3949, pruned_loss=0.1526, over 18438.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3586, pruned_loss=0.1253, over 3803190.89 frames. ], batch size: 84, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:59:28,008 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 03:00:02,534 INFO [train.py:903] (1/4) Epoch 4, batch 3200, loss[loss=0.3138, simple_loss=0.3679, pruned_loss=0.1298, over 19504.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3595, pruned_loss=0.1264, over 3804308.66 frames. ], batch size: 64, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:00:13,168 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23694.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:29,436 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:53,663 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 7.379e+02 9.197e+02 1.143e+03 1.957e+03, threshold=1.839e+03, percent-clipped=5.0 +2023-04-01 03:01:02,115 INFO [train.py:903] (1/4) Epoch 4, batch 3250, loss[loss=0.2918, simple_loss=0.3567, pruned_loss=0.1135, over 19658.00 frames. ], tot_loss[loss=0.3044, simple_loss=0.3584, pruned_loss=0.1252, over 3807372.65 frames. ], batch size: 55, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:01,903 INFO [train.py:903] (1/4) Epoch 4, batch 3300, loss[loss=0.2988, simple_loss=0.3507, pruned_loss=0.1234, over 19585.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3588, pruned_loss=0.1257, over 3809317.85 frames. ], batch size: 52, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:04,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 03:02:29,554 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0332, 1.0138, 1.4531, 1.1468, 1.7727, 1.6868, 1.9699, 0.6329], + device='cuda:1'), covar=tensor([0.1314, 0.2213, 0.1139, 0.1129, 0.0787, 0.1019, 0.0782, 0.1981], + device='cuda:1'), in_proj_covar=tensor([0.0433, 0.0485, 0.0452, 0.0404, 0.0523, 0.0428, 0.0609, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:02:54,104 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 7.772e+02 9.614e+02 1.210e+03 2.492e+03, threshold=1.923e+03, percent-clipped=5.0 +2023-04-01 03:03:02,118 INFO [train.py:903] (1/4) Epoch 4, batch 3350, loss[loss=0.2466, simple_loss=0.3025, pruned_loss=0.09535, over 19799.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.359, pruned_loss=0.1257, over 3805672.52 frames. ], batch size: 48, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:03:09,292 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:19,201 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:40,413 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:59,993 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2321, 2.9519, 1.9562, 2.8022, 1.0331, 2.7863, 2.6822, 2.7351], + device='cuda:1'), covar=tensor([0.0927, 0.1246, 0.2021, 0.0877, 0.3555, 0.1086, 0.0857, 0.1250], + device='cuda:1'), in_proj_covar=tensor([0.0329, 0.0295, 0.0345, 0.0279, 0.0345, 0.0295, 0.0258, 0.0294], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 03:04:01,842 INFO [train.py:903] (1/4) Epoch 4, batch 3400, loss[loss=0.3259, simple_loss=0.3821, pruned_loss=0.1349, over 19344.00 frames. ], tot_loss[loss=0.303, simple_loss=0.3571, pruned_loss=0.1244, over 3814094.99 frames. ], batch size: 70, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:04:53,666 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.081e+02 7.350e+02 9.318e+02 1.202e+03 2.145e+03, threshold=1.864e+03, percent-clipped=3.0 +2023-04-01 03:05:01,726 INFO [train.py:903] (1/4) Epoch 4, batch 3450, loss[loss=0.2649, simple_loss=0.3248, pruned_loss=0.1025, over 16045.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3584, pruned_loss=0.1254, over 3796541.76 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:05:01,750 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 03:05:16,206 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-01 03:05:22,649 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:36,134 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:39,402 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:50,599 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:04,980 INFO [train.py:903] (1/4) Epoch 4, batch 3500, loss[loss=0.2975, simple_loss=0.3496, pruned_loss=0.1226, over 19398.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3594, pruned_loss=0.126, over 3807929.05 frames. ], batch size: 48, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:06:07,715 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:14,718 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8534, 1.4893, 1.3545, 1.9668, 1.3896, 2.0576, 2.1804, 1.8577], + device='cuda:1'), covar=tensor([0.0742, 0.1039, 0.1246, 0.1020, 0.1144, 0.0735, 0.0952, 0.0650], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0264, 0.0260, 0.0293, 0.0296, 0.0249, 0.0256, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 03:06:58,144 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+02 7.196e+02 8.612e+02 1.120e+03 2.630e+03, threshold=1.722e+03, percent-clipped=5.0 +2023-04-01 03:07:06,280 INFO [train.py:903] (1/4) Epoch 4, batch 3550, loss[loss=0.2514, simple_loss=0.3226, pruned_loss=0.09011, over 19845.00 frames. ], tot_loss[loss=0.3043, simple_loss=0.358, pruned_loss=0.1253, over 3822159.89 frames. ], batch size: 52, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:07:28,433 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:08:05,263 INFO [train.py:903] (1/4) Epoch 4, batch 3600, loss[loss=0.3296, simple_loss=0.3887, pruned_loss=0.1353, over 18879.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.358, pruned_loss=0.1257, over 3819027.33 frames. ], batch size: 74, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:08:56,914 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+02 7.148e+02 8.733e+02 1.077e+03 2.339e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 03:09:04,848 INFO [train.py:903] (1/4) Epoch 4, batch 3650, loss[loss=0.2814, simple_loss=0.3301, pruned_loss=0.1164, over 19052.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3564, pruned_loss=0.1243, over 3824480.01 frames. ], batch size: 42, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:10:05,501 INFO [train.py:903] (1/4) Epoch 4, batch 3700, loss[loss=0.3285, simple_loss=0.3776, pruned_loss=0.1397, over 19590.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3563, pruned_loss=0.1238, over 3842258.37 frames. ], batch size: 61, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:10:48,438 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:10:58,949 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.207e+02 9.640e+02 1.134e+03 2.323e+03, threshold=1.928e+03, percent-clipped=6.0 +2023-04-01 03:11:07,279 INFO [train.py:903] (1/4) Epoch 4, batch 3750, loss[loss=0.3307, simple_loss=0.3825, pruned_loss=0.1394, over 19728.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3576, pruned_loss=0.1246, over 3832197.68 frames. ], batch size: 63, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:11:20,290 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:12:07,331 INFO [train.py:903] (1/4) Epoch 4, batch 3800, loss[loss=0.2511, simple_loss=0.3169, pruned_loss=0.09268, over 19804.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3575, pruned_loss=0.1247, over 3815228.02 frames. ], batch size: 48, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:12:38,479 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 03:13:00,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.239e+02 7.507e+02 9.076e+02 1.248e+03 3.254e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-04-01 03:13:07,202 INFO [train.py:903] (1/4) Epoch 4, batch 3850, loss[loss=0.2678, simple_loss=0.3197, pruned_loss=0.108, over 19755.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3576, pruned_loss=0.1243, over 3817424.32 frames. ], batch size: 45, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:13:38,775 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6795, 3.0899, 3.1513, 3.1420, 1.1892, 2.8359, 2.6236, 2.8317], + device='cuda:1'), covar=tensor([0.0977, 0.0646, 0.0734, 0.0611, 0.3603, 0.0488, 0.0608, 0.1389], + device='cuda:1'), in_proj_covar=tensor([0.0445, 0.0392, 0.0532, 0.0423, 0.0535, 0.0307, 0.0347, 0.0511], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 03:14:06,612 INFO [train.py:903] (1/4) Epoch 4, batch 3900, loss[loss=0.2631, simple_loss=0.3169, pruned_loss=0.1046, over 19406.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3568, pruned_loss=0.1238, over 3814376.07 frames. ], batch size: 48, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:14:25,893 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:14:28,916 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-01 03:14:51,401 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 03:15:00,804 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+02 8.238e+02 9.729e+02 1.230e+03 4.971e+03, threshold=1.946e+03, percent-clipped=9.0 +2023-04-01 03:15:09,420 INFO [train.py:903] (1/4) Epoch 4, batch 3950, loss[loss=0.3158, simple_loss=0.381, pruned_loss=0.1253, over 19688.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.356, pruned_loss=0.1234, over 3815334.52 frames. ], batch size: 59, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:15:17,176 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 03:15:37,402 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.48 vs. limit=5.0 +2023-04-01 03:16:10,710 INFO [train.py:903] (1/4) Epoch 4, batch 4000, loss[loss=0.3106, simple_loss=0.366, pruned_loss=0.1276, over 19770.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3557, pruned_loss=0.1232, over 3816740.60 frames. ], batch size: 54, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:16:42,943 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1766, 1.1054, 1.5259, 0.8609, 2.4328, 2.8911, 2.6801, 3.0622], + device='cuda:1'), covar=tensor([0.1340, 0.3043, 0.2904, 0.1984, 0.0420, 0.0133, 0.0291, 0.0139], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0284, 0.0327, 0.0259, 0.0193, 0.0108, 0.0206, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 03:16:45,808 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24514.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:16:58,866 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 03:17:03,420 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.288e+02 6.850e+02 8.525e+02 1.041e+03 2.187e+03, threshold=1.705e+03, percent-clipped=1.0 +2023-04-01 03:17:09,917 INFO [train.py:903] (1/4) Epoch 4, batch 4050, loss[loss=0.3218, simple_loss=0.3737, pruned_loss=0.1349, over 19574.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3547, pruned_loss=0.1225, over 3827252.70 frames. ], batch size: 52, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:17:47,506 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24565.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:18:02,830 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 03:18:10,047 INFO [train.py:903] (1/4) Epoch 4, batch 4100, loss[loss=0.2591, simple_loss=0.3248, pruned_loss=0.09671, over 19622.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3541, pruned_loss=0.1219, over 3834943.85 frames. ], batch size: 50, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:18:12,612 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:18:19,369 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8724, 1.2923, 0.9690, 1.0267, 1.1575, 0.8371, 0.6464, 1.2169], + device='cuda:1'), covar=tensor([0.0425, 0.0458, 0.0798, 0.0357, 0.0407, 0.0873, 0.0581, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0249, 0.0313, 0.0239, 0.0218, 0.0306, 0.0278, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:18:49,633 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 03:19:04,403 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+02 6.813e+02 8.989e+02 1.047e+03 2.179e+03, threshold=1.798e+03, percent-clipped=2.0 +2023-04-01 03:19:12,696 INFO [train.py:903] (1/4) Epoch 4, batch 4150, loss[loss=0.3121, simple_loss=0.3684, pruned_loss=0.1279, over 19412.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3547, pruned_loss=0.1225, over 3825261.68 frames. ], batch size: 70, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:12,643 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0819, 2.0080, 2.3078, 2.8026, 2.1151, 2.8836, 2.7192, 2.9197], + device='cuda:1'), covar=tensor([0.0511, 0.0948, 0.0872, 0.0982, 0.1065, 0.0594, 0.0983, 0.0455], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0264, 0.0255, 0.0294, 0.0296, 0.0243, 0.0251, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 03:20:13,431 INFO [train.py:903] (1/4) Epoch 4, batch 4200, loss[loss=0.2457, simple_loss=0.3211, pruned_loss=0.08517, over 19595.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3547, pruned_loss=0.1222, over 3824500.62 frames. ], batch size: 52, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:19,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 03:21:05,880 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 7.220e+02 8.850e+02 1.090e+03 2.101e+03, threshold=1.770e+03, percent-clipped=3.0 +2023-04-01 03:21:12,813 INFO [train.py:903] (1/4) Epoch 4, batch 4250, loss[loss=0.3775, simple_loss=0.3989, pruned_loss=0.178, over 13450.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3548, pruned_loss=0.1224, over 3828460.99 frames. ], batch size: 136, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:21:29,767 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 03:21:41,557 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 03:21:45,258 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5941, 1.1622, 1.4641, 1.7637, 2.9011, 1.1177, 2.1774, 3.1556], + device='cuda:1'), covar=tensor([0.0472, 0.2904, 0.2580, 0.1466, 0.0735, 0.2319, 0.1191, 0.0483], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0309, 0.0303, 0.0279, 0.0298, 0.0315, 0.0283, 0.0288], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:21:56,461 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:10,841 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24783.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:12,642 INFO [train.py:903] (1/4) Epoch 4, batch 4300, loss[loss=0.3026, simple_loss=0.3673, pruned_loss=0.119, over 19480.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3556, pruned_loss=0.1232, over 3821928.35 frames. ], batch size: 64, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:22:26,972 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:23:06,661 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.009e+02 7.093e+02 8.869e+02 1.163e+03 2.104e+03, threshold=1.774e+03, percent-clipped=1.0 +2023-04-01 03:23:08,962 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 03:23:14,529 INFO [train.py:903] (1/4) Epoch 4, batch 4350, loss[loss=0.3306, simple_loss=0.3809, pruned_loss=0.1402, over 19130.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3558, pruned_loss=0.1231, over 3831911.24 frames. ], batch size: 69, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:23:52,214 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:23:58,049 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1379, 1.2046, 1.8618, 1.2917, 2.4533, 2.0654, 2.6672, 1.0385], + device='cuda:1'), covar=tensor([0.1832, 0.2894, 0.1529, 0.1692, 0.1247, 0.1462, 0.1462, 0.2708], + device='cuda:1'), in_proj_covar=tensor([0.0437, 0.0485, 0.0458, 0.0409, 0.0532, 0.0427, 0.0608, 0.0433], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:24:15,080 INFO [train.py:903] (1/4) Epoch 4, batch 4400, loss[loss=0.2893, simple_loss=0.3583, pruned_loss=0.1101, over 18764.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3544, pruned_loss=0.1222, over 3828880.67 frames. ], batch size: 74, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:24:40,905 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 03:24:44,308 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:50,766 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 03:25:09,651 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.331e+02 8.078e+02 9.889e+02 1.280e+03 3.768e+03, threshold=1.978e+03, percent-clipped=10.0 +2023-04-01 03:25:12,015 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:25:16,496 INFO [train.py:903] (1/4) Epoch 4, batch 4450, loss[loss=0.2829, simple_loss=0.3418, pruned_loss=0.112, over 19615.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3562, pruned_loss=0.124, over 3817008.43 frames. ], batch size: 50, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:25:49,039 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2862, 2.2997, 2.0206, 3.4660, 2.2891, 3.6163, 3.4601, 2.2434], + device='cuda:1'), covar=tensor([0.1610, 0.1284, 0.0688, 0.0811, 0.1626, 0.0391, 0.0949, 0.1019], + device='cuda:1'), in_proj_covar=tensor([0.0569, 0.0550, 0.0515, 0.0714, 0.0611, 0.0461, 0.0620, 0.0531], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:26:17,087 INFO [train.py:903] (1/4) Epoch 4, batch 4500, loss[loss=0.327, simple_loss=0.3725, pruned_loss=0.1407, over 19675.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3554, pruned_loss=0.1236, over 3801740.25 frames. ], batch size: 53, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:49,247 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1744, 1.2367, 1.5801, 1.2973, 2.2516, 2.0180, 2.4016, 0.8467], + device='cuda:1'), covar=tensor([0.1552, 0.2510, 0.1406, 0.1294, 0.0920, 0.1196, 0.0971, 0.2364], + device='cuda:1'), in_proj_covar=tensor([0.0428, 0.0477, 0.0452, 0.0397, 0.0521, 0.0418, 0.0596, 0.0430], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:26:52,463 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:04,620 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:11,028 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.968e+02 6.473e+02 7.865e+02 1.057e+03 2.211e+03, threshold=1.573e+03, percent-clipped=1.0 +2023-04-01 03:27:16,564 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2818, 1.2419, 1.2901, 1.6523, 2.8086, 1.2269, 1.9283, 2.8793], + device='cuda:1'), covar=tensor([0.0410, 0.2647, 0.2605, 0.1477, 0.0615, 0.2282, 0.1227, 0.0475], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0313, 0.0312, 0.0284, 0.0305, 0.0321, 0.0287, 0.0299], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:27:18,545 INFO [train.py:903] (1/4) Epoch 4, batch 4550, loss[loss=0.2969, simple_loss=0.3648, pruned_loss=0.1145, over 18841.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3557, pruned_loss=0.1235, over 3811978.18 frames. ], batch size: 74, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:27:27,123 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 03:27:31,937 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:50,584 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 03:28:18,971 INFO [train.py:903] (1/4) Epoch 4, batch 4600, loss[loss=0.3498, simple_loss=0.3934, pruned_loss=0.1531, over 19623.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3549, pruned_loss=0.1227, over 3816298.03 frames. ], batch size: 57, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:29:10,645 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:29:12,893 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+02 7.418e+02 9.211e+02 1.176e+03 2.853e+03, threshold=1.842e+03, percent-clipped=7.0 +2023-04-01 03:29:20,333 INFO [train.py:903] (1/4) Epoch 4, batch 4650, loss[loss=0.4104, simple_loss=0.4184, pruned_loss=0.2012, over 13126.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3541, pruned_loss=0.1222, over 3810823.70 frames. ], batch size: 137, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:29:37,186 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 03:29:47,537 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 03:30:19,351 INFO [train.py:903] (1/4) Epoch 4, batch 4700, loss[loss=0.2539, simple_loss=0.3127, pruned_loss=0.09749, over 19746.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3536, pruned_loss=0.1221, over 3817745.62 frames. ], batch size: 46, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:30:42,833 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 03:30:50,629 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:13,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+02 7.587e+02 9.394e+02 1.259e+03 3.233e+03, threshold=1.879e+03, percent-clipped=11.0 +2023-04-01 03:31:21,429 INFO [train.py:903] (1/4) Epoch 4, batch 4750, loss[loss=0.292, simple_loss=0.3488, pruned_loss=0.1176, over 19767.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3539, pruned_loss=0.1224, over 3833835.04 frames. ], batch size: 56, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:31:30,698 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25242.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:31,680 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6320, 4.2007, 2.7194, 3.7179, 1.1560, 3.8170, 3.7836, 3.9932], + device='cuda:1'), covar=tensor([0.0533, 0.1014, 0.1738, 0.0711, 0.3793, 0.0966, 0.0635, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0326, 0.0289, 0.0342, 0.0272, 0.0340, 0.0298, 0.0265, 0.0295], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 03:32:16,214 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:21,434 INFO [train.py:903] (1/4) Epoch 4, batch 4800, loss[loss=0.3504, simple_loss=0.3875, pruned_loss=0.1566, over 13583.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3544, pruned_loss=0.1225, over 3834179.74 frames. ], batch size: 136, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:32:41,277 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:44,325 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:09,547 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:11,886 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:13,790 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+02 7.095e+02 8.715e+02 1.261e+03 2.828e+03, threshold=1.743e+03, percent-clipped=4.0 +2023-04-01 03:33:21,567 INFO [train.py:903] (1/4) Epoch 4, batch 4850, loss[loss=0.302, simple_loss=0.3573, pruned_loss=0.1233, over 19531.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3538, pruned_loss=0.1221, over 3840080.49 frames. ], batch size: 54, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:33:22,981 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7596, 1.8675, 1.7856, 2.8911, 2.4523, 2.3030, 2.4124, 2.5372], + device='cuda:1'), covar=tensor([0.0641, 0.1518, 0.1229, 0.0687, 0.1033, 0.0416, 0.0687, 0.0524], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0366, 0.0281, 0.0243, 0.0304, 0.0255, 0.0268, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:33:45,814 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 03:33:48,285 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:34:03,171 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 03:34:04,841 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 03:34:11,328 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 03:34:12,494 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 03:34:21,580 INFO [train.py:903] (1/4) Epoch 4, batch 4900, loss[loss=0.3037, simple_loss=0.3657, pruned_loss=0.1208, over 18749.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3545, pruned_loss=0.1223, over 3824542.24 frames. ], batch size: 74, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:34:21,606 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 03:34:23,046 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1740, 1.5998, 2.4727, 2.8579, 2.5000, 2.6645, 2.6841, 2.8941], + device='cuda:1'), covar=tensor([0.0504, 0.1752, 0.0901, 0.0753, 0.1070, 0.0368, 0.0655, 0.0450], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0362, 0.0276, 0.0242, 0.0301, 0.0252, 0.0267, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:34:31,773 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9645, 1.2474, 1.4477, 1.5146, 2.5763, 1.1768, 1.7966, 2.6360], + device='cuda:1'), covar=tensor([0.0447, 0.2313, 0.2212, 0.1390, 0.0590, 0.1824, 0.0983, 0.0474], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0302, 0.0299, 0.0277, 0.0292, 0.0312, 0.0278, 0.0287], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:34:42,596 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 03:35:16,119 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.171e+02 7.094e+02 8.660e+02 1.069e+03 1.655e+03, threshold=1.732e+03, percent-clipped=0.0 +2023-04-01 03:35:23,610 INFO [train.py:903] (1/4) Epoch 4, batch 4950, loss[loss=0.2748, simple_loss=0.3418, pruned_loss=0.1039, over 19699.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3531, pruned_loss=0.1211, over 3823065.86 frames. ], batch size: 53, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:35:37,194 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25446.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:35:41,238 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 03:36:04,435 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 03:36:07,331 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.12 vs. limit=5.0 +2023-04-01 03:36:08,203 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:10,077 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5355, 4.1160, 2.5327, 3.7010, 1.0629, 3.6111, 3.6012, 3.8972], + device='cuda:1'), covar=tensor([0.0629, 0.1481, 0.1899, 0.0699, 0.4156, 0.0910, 0.0721, 0.0759], + device='cuda:1'), in_proj_covar=tensor([0.0334, 0.0296, 0.0355, 0.0277, 0.0352, 0.0301, 0.0267, 0.0298], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 03:36:14,162 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.84 vs. limit=5.0 +2023-04-01 03:36:24,197 INFO [train.py:903] (1/4) Epoch 4, batch 5000, loss[loss=0.2948, simple_loss=0.3563, pruned_loss=0.1167, over 19482.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3531, pruned_loss=0.121, over 3830304.62 frames. ], batch size: 64, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:36:33,150 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 03:36:40,147 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:44,425 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 03:37:10,982 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25523.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:37:17,568 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.823e+02 8.824e+02 1.078e+03 2.588e+03, threshold=1.765e+03, percent-clipped=9.0 +2023-04-01 03:37:24,345 INFO [train.py:903] (1/4) Epoch 4, batch 5050, loss[loss=0.328, simple_loss=0.3785, pruned_loss=0.1388, over 17278.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3534, pruned_loss=0.121, over 3827708.06 frames. ], batch size: 101, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:37:36,450 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.42 vs. limit=5.0 +2023-04-01 03:38:02,118 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 03:38:21,770 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:38:25,842 INFO [train.py:903] (1/4) Epoch 4, batch 5100, loss[loss=0.2771, simple_loss=0.3276, pruned_loss=0.1133, over 19733.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3522, pruned_loss=0.1201, over 3831029.64 frames. ], batch size: 45, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:36,809 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 03:38:40,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 03:38:44,293 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 03:38:52,397 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:39:19,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.903e+02 6.354e+02 8.688e+02 1.168e+03 2.387e+03, threshold=1.738e+03, percent-clipped=4.0 +2023-04-01 03:39:27,222 INFO [train.py:903] (1/4) Epoch 4, batch 5150, loss[loss=0.3569, simple_loss=0.3932, pruned_loss=0.1603, over 13613.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3548, pruned_loss=0.1216, over 3828512.63 frames. ], batch size: 136, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:39:29,728 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7463, 4.2194, 2.7204, 3.7871, 1.0082, 4.0812, 3.9266, 4.2582], + device='cuda:1'), covar=tensor([0.0521, 0.0947, 0.1769, 0.0695, 0.3845, 0.0773, 0.0654, 0.0712], + device='cuda:1'), in_proj_covar=tensor([0.0339, 0.0296, 0.0357, 0.0281, 0.0349, 0.0304, 0.0269, 0.0302], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 03:39:39,301 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 03:39:46,304 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2112, 1.3033, 0.9414, 0.9194, 1.0018, 1.0890, 0.0231, 0.3848], + device='cuda:1'), covar=tensor([0.0262, 0.0250, 0.0183, 0.0205, 0.0516, 0.0193, 0.0474, 0.0403], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0276, 0.0267, 0.0297, 0.0356, 0.0285, 0.0277, 0.0282], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 03:39:58,757 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2626, 1.3116, 1.8974, 1.4471, 3.0463, 2.6576, 3.4250, 1.4437], + device='cuda:1'), covar=tensor([0.1947, 0.3102, 0.1707, 0.1528, 0.1338, 0.1481, 0.1537, 0.2752], + device='cuda:1'), in_proj_covar=tensor([0.0447, 0.0495, 0.0461, 0.0409, 0.0534, 0.0440, 0.0614, 0.0434], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:40:09,899 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7742, 1.3759, 1.5449, 1.9016, 3.1558, 1.2357, 2.2481, 3.4098], + device='cuda:1'), covar=tensor([0.0355, 0.2734, 0.2534, 0.1519, 0.0627, 0.2425, 0.1207, 0.0386], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0310, 0.0309, 0.0281, 0.0305, 0.0319, 0.0287, 0.0292], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:40:12,884 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 03:40:28,111 INFO [train.py:903] (1/4) Epoch 4, batch 5200, loss[loss=0.2406, simple_loss=0.3043, pruned_loss=0.08843, over 17370.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3534, pruned_loss=0.1212, over 3827310.52 frames. ], batch size: 38, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:40:38,655 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4392, 1.3691, 1.4947, 1.8221, 2.9692, 1.1846, 1.8666, 3.0188], + device='cuda:1'), covar=tensor([0.0376, 0.2513, 0.2328, 0.1418, 0.0566, 0.2273, 0.1239, 0.0444], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0307, 0.0307, 0.0280, 0.0303, 0.0317, 0.0284, 0.0291], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:40:42,802 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 03:41:21,050 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:41:21,774 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 7.304e+02 9.145e+02 1.165e+03 2.884e+03, threshold=1.829e+03, percent-clipped=6.0 +2023-04-01 03:41:25,397 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 03:41:28,709 INFO [train.py:903] (1/4) Epoch 4, batch 5250, loss[loss=0.3319, simple_loss=0.3793, pruned_loss=0.1422, over 19473.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3534, pruned_loss=0.1209, over 3816587.74 frames. ], batch size: 49, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:41:51,369 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:42:30,031 INFO [train.py:903] (1/4) Epoch 4, batch 5300, loss[loss=0.3459, simple_loss=0.3774, pruned_loss=0.1572, over 19629.00 frames. ], tot_loss[loss=0.2952, simple_loss=0.3513, pruned_loss=0.1196, over 3825975.50 frames. ], batch size: 50, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:42:35,924 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25790.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:42:48,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 03:42:49,426 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9748, 1.5869, 1.4518, 1.9889, 1.8259, 1.8682, 1.7007, 1.8857], + device='cuda:1'), covar=tensor([0.0780, 0.1406, 0.1246, 0.0827, 0.1014, 0.0419, 0.0778, 0.0553], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0371, 0.0285, 0.0245, 0.0311, 0.0257, 0.0274, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:43:23,327 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 7.857e+02 9.756e+02 1.201e+03 3.803e+03, threshold=1.951e+03, percent-clipped=8.0 +2023-04-01 03:43:28,060 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0104, 2.0345, 1.8711, 3.0331, 2.0651, 3.0661, 2.7836, 1.8436], + device='cuda:1'), covar=tensor([0.1657, 0.1316, 0.0733, 0.0774, 0.1546, 0.0423, 0.1155, 0.1216], + device='cuda:1'), in_proj_covar=tensor([0.0575, 0.0561, 0.0524, 0.0721, 0.0624, 0.0479, 0.0630, 0.0537], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:43:31,864 INFO [train.py:903] (1/4) Epoch 4, batch 5350, loss[loss=0.3311, simple_loss=0.3837, pruned_loss=0.1392, over 18829.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3517, pruned_loss=0.1198, over 3811498.70 frames. ], batch size: 74, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:43:45,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 03:44:04,161 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 03:44:13,281 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3392, 1.2246, 1.8734, 1.4996, 2.9534, 2.3918, 3.0631, 1.3574], + device='cuda:1'), covar=tensor([0.1871, 0.3127, 0.1763, 0.1519, 0.1179, 0.1469, 0.1478, 0.2719], + device='cuda:1'), in_proj_covar=tensor([0.0446, 0.0494, 0.0464, 0.0406, 0.0538, 0.0443, 0.0620, 0.0438], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:44:21,895 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25876.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:44:32,189 INFO [train.py:903] (1/4) Epoch 4, batch 5400, loss[loss=0.2317, simple_loss=0.3052, pruned_loss=0.07915, over 19718.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3529, pruned_loss=0.1204, over 3809885.83 frames. ], batch size: 51, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:44:56,685 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:45:16,356 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-04-01 03:45:26,555 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.180e+02 6.939e+02 8.636e+02 1.056e+03 2.577e+03, threshold=1.727e+03, percent-clipped=2.0 +2023-04-01 03:45:33,337 INFO [train.py:903] (1/4) Epoch 4, batch 5450, loss[loss=0.3198, simple_loss=0.3596, pruned_loss=0.14, over 19401.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3529, pruned_loss=0.1203, over 3808966.38 frames. ], batch size: 48, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:46:34,625 INFO [train.py:903] (1/4) Epoch 4, batch 5500, loss[loss=0.2624, simple_loss=0.3124, pruned_loss=0.1062, over 15181.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3535, pruned_loss=0.1209, over 3796956.25 frames. ], batch size: 33, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:46:42,600 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4705, 1.1752, 1.1837, 1.7969, 1.4739, 1.6543, 1.8784, 1.3315], + device='cuda:1'), covar=tensor([0.0857, 0.1040, 0.1070, 0.0760, 0.0822, 0.0725, 0.0717, 0.0775], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0257, 0.0248, 0.0285, 0.0286, 0.0240, 0.0248, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 03:46:58,094 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 03:47:31,757 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.568e+02 9.013e+02 1.115e+03 1.816e+03, threshold=1.803e+03, percent-clipped=1.0 +2023-04-01 03:47:37,482 INFO [train.py:903] (1/4) Epoch 4, batch 5550, loss[loss=0.2639, simple_loss=0.3254, pruned_loss=0.1012, over 19841.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3525, pruned_loss=0.1201, over 3816307.32 frames. ], batch size: 52, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:47:45,435 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 03:47:48,990 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0836, 3.5386, 3.6771, 3.7035, 1.3440, 3.4005, 3.0380, 3.2241], + device='cuda:1'), covar=tensor([0.1130, 0.0709, 0.0654, 0.0592, 0.3811, 0.0480, 0.0586, 0.1292], + device='cuda:1'), in_proj_covar=tensor([0.0463, 0.0413, 0.0548, 0.0445, 0.0540, 0.0320, 0.0362, 0.0517], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 03:48:08,130 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0787, 1.8647, 1.3788, 1.3023, 1.7234, 1.1313, 0.9129, 1.7011], + device='cuda:1'), covar=tensor([0.0603, 0.0488, 0.0880, 0.0432, 0.0375, 0.0911, 0.0606, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0258, 0.0315, 0.0243, 0.0218, 0.0312, 0.0282, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:48:33,398 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 03:48:38,940 INFO [train.py:903] (1/4) Epoch 4, batch 5600, loss[loss=0.2707, simple_loss=0.3276, pruned_loss=0.1068, over 19060.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3528, pruned_loss=0.1199, over 3832672.08 frames. ], batch size: 42, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:48:41,419 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9322, 1.5479, 2.0976, 1.7685, 2.7906, 4.7432, 4.6354, 4.9008], + device='cuda:1'), covar=tensor([0.1062, 0.2472, 0.2339, 0.1560, 0.0423, 0.0082, 0.0112, 0.0070], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0279, 0.0317, 0.0259, 0.0196, 0.0116, 0.0203, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-04-01 03:49:22,515 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:49:34,176 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+02 7.669e+02 9.359e+02 1.114e+03 3.409e+03, threshold=1.872e+03, percent-clipped=3.0 +2023-04-01 03:49:40,064 INFO [train.py:903] (1/4) Epoch 4, batch 5650, loss[loss=0.3163, simple_loss=0.3721, pruned_loss=0.1303, over 18161.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3529, pruned_loss=0.12, over 3831344.04 frames. ], batch size: 84, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:12,574 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26161.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:50:25,381 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 03:50:38,345 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9048, 1.8610, 1.7967, 2.8435, 1.9932, 2.7596, 2.6269, 1.7822], + device='cuda:1'), covar=tensor([0.1677, 0.1197, 0.0719, 0.0654, 0.1349, 0.0433, 0.1102, 0.1158], + device='cuda:1'), in_proj_covar=tensor([0.0577, 0.0556, 0.0517, 0.0711, 0.0622, 0.0475, 0.0629, 0.0536], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:50:40,063 INFO [train.py:903] (1/4) Epoch 4, batch 5700, loss[loss=0.3192, simple_loss=0.3667, pruned_loss=0.1358, over 19752.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3528, pruned_loss=0.1203, over 3817626.30 frames. ], batch size: 54, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:42,193 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:50:42,303 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26186.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:51:23,014 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26220.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:51:35,339 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.235e+02 7.760e+02 9.507e+02 1.157e+03 2.773e+03, threshold=1.901e+03, percent-clipped=5.0 +2023-04-01 03:51:39,942 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 03:51:40,952 INFO [train.py:903] (1/4) Epoch 4, batch 5750, loss[loss=0.2785, simple_loss=0.3429, pruned_loss=0.1071, over 19569.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3534, pruned_loss=0.1208, over 3813783.23 frames. ], batch size: 52, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:51:48,678 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 03:51:52,101 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9569, 4.9112, 5.7672, 5.7302, 1.7595, 5.3595, 4.7950, 5.2067], + device='cuda:1'), covar=tensor([0.0692, 0.0504, 0.0355, 0.0277, 0.3619, 0.0197, 0.0353, 0.0765], + device='cuda:1'), in_proj_covar=tensor([0.0457, 0.0410, 0.0536, 0.0437, 0.0529, 0.0315, 0.0357, 0.0508], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 03:51:52,967 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 03:52:10,485 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2430, 3.6302, 3.7870, 3.7536, 1.3646, 3.3521, 3.0831, 3.3959], + device='cuda:1'), covar=tensor([0.0919, 0.0617, 0.0513, 0.0461, 0.3836, 0.0480, 0.0557, 0.1049], + device='cuda:1'), in_proj_covar=tensor([0.0461, 0.0414, 0.0539, 0.0441, 0.0537, 0.0318, 0.0359, 0.0514], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 03:52:20,348 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:52:40,302 INFO [train.py:903] (1/4) Epoch 4, batch 5800, loss[loss=0.2843, simple_loss=0.3408, pruned_loss=0.1139, over 19867.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3552, pruned_loss=0.122, over 3817460.88 frames. ], batch size: 52, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:53:35,866 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8942, 1.8162, 1.6804, 2.3648, 4.3446, 1.1868, 2.2398, 4.3692], + device='cuda:1'), covar=tensor([0.0229, 0.2275, 0.2398, 0.1264, 0.0423, 0.2231, 0.1228, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0310, 0.0307, 0.0283, 0.0304, 0.0313, 0.0287, 0.0293], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:53:36,644 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.787e+02 8.542e+02 1.114e+03 2.576e+03, threshold=1.708e+03, percent-clipped=4.0 +2023-04-01 03:53:39,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8708, 1.9586, 1.8418, 2.9002, 2.0120, 2.9957, 2.6742, 1.7887], + device='cuda:1'), covar=tensor([0.1782, 0.1309, 0.0733, 0.0783, 0.1532, 0.0438, 0.1331, 0.1232], + device='cuda:1'), in_proj_covar=tensor([0.0578, 0.0556, 0.0519, 0.0722, 0.0618, 0.0479, 0.0627, 0.0538], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:53:41,253 INFO [train.py:903] (1/4) Epoch 4, batch 5850, loss[loss=0.2692, simple_loss=0.3425, pruned_loss=0.09796, over 19668.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3535, pruned_loss=0.1213, over 3825573.05 frames. ], batch size: 55, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:53:41,609 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26335.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:54:30,690 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4398, 4.0247, 2.3793, 3.6154, 1.1414, 3.6704, 3.6566, 3.9015], + device='cuda:1'), covar=tensor([0.0664, 0.1012, 0.2111, 0.0745, 0.3743, 0.0907, 0.0739, 0.0849], + device='cuda:1'), in_proj_covar=tensor([0.0335, 0.0298, 0.0352, 0.0278, 0.0343, 0.0294, 0.0265, 0.0291], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 03:54:40,807 INFO [train.py:903] (1/4) Epoch 4, batch 5900, loss[loss=0.3527, simple_loss=0.3987, pruned_loss=0.1534, over 19588.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3546, pruned_loss=0.1219, over 3826971.40 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:54:41,869 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 03:55:03,874 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 03:55:37,697 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+02 6.673e+02 8.574e+02 1.115e+03 3.080e+03, threshold=1.715e+03, percent-clipped=4.0 +2023-04-01 03:55:39,778 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:55:43,005 INFO [train.py:903] (1/4) Epoch 4, batch 5950, loss[loss=0.2996, simple_loss=0.369, pruned_loss=0.1151, over 19659.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3546, pruned_loss=0.1221, over 3813379.26 frames. ], batch size: 58, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:56:17,507 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:56:43,845 INFO [train.py:903] (1/4) Epoch 4, batch 6000, loss[loss=0.3184, simple_loss=0.3808, pruned_loss=0.128, over 19669.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3533, pruned_loss=0.1207, over 3820738.65 frames. ], batch size: 58, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:56:43,846 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 03:56:57,349 INFO [train.py:937] (1/4) Epoch 4, validation: loss=0.2103, simple_loss=0.3081, pruned_loss=0.05622, over 944034.00 frames. +2023-04-01 03:56:57,350 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 03:56:58,915 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7033, 1.2549, 1.5577, 2.0547, 3.2535, 1.0357, 2.0561, 3.1830], + device='cuda:1'), covar=tensor([0.0275, 0.2482, 0.2212, 0.1174, 0.0487, 0.2328, 0.1212, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0315, 0.0307, 0.0284, 0.0307, 0.0317, 0.0287, 0.0294], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:57:41,682 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0378, 1.7927, 1.6686, 2.0167, 1.8544, 1.9678, 1.6855, 1.9312], + device='cuda:1'), covar=tensor([0.0757, 0.1291, 0.1150, 0.0809, 0.0968, 0.0444, 0.1000, 0.0594], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0359, 0.0278, 0.0241, 0.0301, 0.0252, 0.0268, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 03:57:51,992 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:57:52,984 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+02 7.085e+02 8.722e+02 1.145e+03 2.334e+03, threshold=1.744e+03, percent-clipped=4.0 +2023-04-01 03:57:57,456 INFO [train.py:903] (1/4) Epoch 4, batch 6050, loss[loss=0.317, simple_loss=0.3718, pruned_loss=0.1311, over 19763.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.353, pruned_loss=0.1206, over 3827525.80 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:58:49,946 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:58:57,302 INFO [train.py:903] (1/4) Epoch 4, batch 6100, loss[loss=0.2834, simple_loss=0.3434, pruned_loss=0.1117, over 19598.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3524, pruned_loss=0.1202, over 3830233.39 frames. ], batch size: 57, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:59:04,729 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26591.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:59:29,869 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:59:30,170 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4961, 1.5280, 1.3441, 2.0362, 1.3776, 1.8619, 1.7529, 1.1542], + device='cuda:1'), covar=tensor([0.2135, 0.1732, 0.1377, 0.0940, 0.1802, 0.0773, 0.2343, 0.2330], + device='cuda:1'), in_proj_covar=tensor([0.0586, 0.0570, 0.0534, 0.0732, 0.0628, 0.0491, 0.0635, 0.0543], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 03:59:34,627 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26616.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:59:51,597 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 6.768e+02 7.963e+02 1.032e+03 2.370e+03, threshold=1.593e+03, percent-clipped=5.0 +2023-04-01 03:59:56,247 INFO [train.py:903] (1/4) Epoch 4, batch 6150, loss[loss=0.2499, simple_loss=0.3103, pruned_loss=0.09479, over 19423.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3512, pruned_loss=0.1195, over 3830748.39 frames. ], batch size: 48, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:00:08,578 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:00:23,801 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 04:00:54,795 INFO [train.py:903] (1/4) Epoch 4, batch 6200, loss[loss=0.3043, simple_loss=0.3656, pruned_loss=0.1215, over 19675.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.352, pruned_loss=0.1207, over 3829807.80 frames. ], batch size: 59, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:01:39,826 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1683, 2.7319, 1.7761, 1.9711, 2.0178, 2.2156, 0.4348, 2.1577], + device='cuda:1'), covar=tensor([0.0262, 0.0254, 0.0311, 0.0431, 0.0475, 0.0364, 0.0609, 0.0407], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0274, 0.0268, 0.0298, 0.0353, 0.0281, 0.0273, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:01:45,243 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26727.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:01:51,166 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+02 7.340e+02 8.907e+02 1.200e+03 2.630e+03, threshold=1.781e+03, percent-clipped=5.0 +2023-04-01 04:01:55,634 INFO [train.py:903] (1/4) Epoch 4, batch 6250, loss[loss=0.3023, simple_loss=0.3562, pruned_loss=0.1242, over 19580.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3522, pruned_loss=0.1209, over 3827097.89 frames. ], batch size: 52, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:02:24,771 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 04:02:44,564 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:02:55,314 INFO [train.py:903] (1/4) Epoch 4, batch 6300, loss[loss=0.2959, simple_loss=0.3404, pruned_loss=0.1257, over 19749.00 frames. ], tot_loss[loss=0.2949, simple_loss=0.3508, pruned_loss=0.1195, over 3831557.94 frames. ], batch size: 47, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:03:49,878 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.898e+02 7.473e+02 9.358e+02 1.215e+03 3.413e+03, threshold=1.872e+03, percent-clipped=4.0 +2023-04-01 04:03:54,555 INFO [train.py:903] (1/4) Epoch 4, batch 6350, loss[loss=0.2972, simple_loss=0.3459, pruned_loss=0.1242, over 19389.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.351, pruned_loss=0.1201, over 3821179.31 frames. ], batch size: 48, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:03:54,982 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:25,999 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:55,431 INFO [train.py:903] (1/4) Epoch 4, batch 6400, loss[loss=0.288, simple_loss=0.3571, pruned_loss=0.1095, over 19653.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3496, pruned_loss=0.1188, over 3821763.80 frames. ], batch size: 58, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:05:05,491 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26891.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:16,518 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:28,815 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6394, 1.7257, 1.6763, 2.4917, 1.7235, 2.2349, 2.2400, 1.6109], + device='cuda:1'), covar=tensor([0.1660, 0.1276, 0.0783, 0.0701, 0.1398, 0.0564, 0.1352, 0.1276], + device='cuda:1'), in_proj_covar=tensor([0.0584, 0.0566, 0.0531, 0.0726, 0.0627, 0.0485, 0.0637, 0.0537], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:05:33,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2414, 1.1801, 1.9672, 1.4269, 2.8925, 2.5133, 3.1880, 1.2834], + device='cuda:1'), covar=tensor([0.1883, 0.3071, 0.1570, 0.1533, 0.1310, 0.1369, 0.1458, 0.2852], + device='cuda:1'), in_proj_covar=tensor([0.0445, 0.0495, 0.0459, 0.0413, 0.0538, 0.0435, 0.0611, 0.0435], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:05:41,179 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26922.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:05:45,916 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:52,840 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+02 6.991e+02 8.678e+02 1.023e+03 2.915e+03, threshold=1.736e+03, percent-clipped=2.0 +2023-04-01 04:05:58,077 INFO [train.py:903] (1/4) Epoch 4, batch 6450, loss[loss=0.3029, simple_loss=0.3516, pruned_loss=0.1271, over 19772.00 frames. ], tot_loss[loss=0.2934, simple_loss=0.3498, pruned_loss=0.1184, over 3824542.92 frames. ], batch size: 56, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:06:05,252 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3168, 2.1271, 1.4938, 1.5530, 1.8918, 1.0150, 1.2243, 1.8279], + device='cuda:1'), covar=tensor([0.0650, 0.0397, 0.0750, 0.0375, 0.0382, 0.0977, 0.0557, 0.0265], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0256, 0.0315, 0.0237, 0.0214, 0.0308, 0.0284, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:06:33,660 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7193, 3.9846, 4.2945, 4.2955, 1.4753, 3.8808, 3.5025, 3.8338], + device='cuda:1'), covar=tensor([0.0856, 0.0666, 0.0545, 0.0404, 0.4060, 0.0361, 0.0492, 0.1023], + device='cuda:1'), in_proj_covar=tensor([0.0475, 0.0420, 0.0565, 0.0446, 0.0549, 0.0322, 0.0369, 0.0528], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 04:06:36,883 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:39,711 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 04:06:56,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:58,600 INFO [train.py:903] (1/4) Epoch 4, batch 6500, loss[loss=0.2558, simple_loss=0.3099, pruned_loss=0.1009, over 19165.00 frames. ], tot_loss[loss=0.2934, simple_loss=0.3498, pruned_loss=0.1185, over 3821820.51 frames. ], batch size: 42, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:07:03,003 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 04:07:25,326 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27008.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:07:53,581 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+02 7.867e+02 9.982e+02 1.245e+03 2.621e+03, threshold=1.996e+03, percent-clipped=6.0 +2023-04-01 04:07:57,611 INFO [train.py:903] (1/4) Epoch 4, batch 6550, loss[loss=0.2866, simple_loss=0.3285, pruned_loss=0.1223, over 19759.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3504, pruned_loss=0.1198, over 3833307.84 frames. ], batch size: 45, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:08:54,149 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4520, 1.3130, 1.8716, 1.6002, 2.5059, 2.0838, 2.6094, 1.4519], + device='cuda:1'), covar=tensor([0.1415, 0.2330, 0.1156, 0.1188, 0.0850, 0.1167, 0.0968, 0.1948], + device='cuda:1'), in_proj_covar=tensor([0.0439, 0.0486, 0.0453, 0.0404, 0.0532, 0.0432, 0.0602, 0.0427], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:08:57,146 INFO [train.py:903] (1/4) Epoch 4, batch 6600, loss[loss=0.2527, simple_loss=0.3217, pruned_loss=0.09185, over 19599.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3512, pruned_loss=0.1202, over 3826156.54 frames. ], batch size: 52, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:09:53,358 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+02 7.635e+02 9.605e+02 1.185e+03 2.942e+03, threshold=1.921e+03, percent-clipped=6.0 +2023-04-01 04:09:58,811 INFO [train.py:903] (1/4) Epoch 4, batch 6650, loss[loss=0.2601, simple_loss=0.3178, pruned_loss=0.1012, over 19398.00 frames. ], tot_loss[loss=0.2937, simple_loss=0.3494, pruned_loss=0.119, over 3828692.96 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:10:13,160 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:35,295 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:42,762 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:51,989 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7085, 4.2027, 2.3956, 3.8191, 0.9278, 3.9231, 3.8631, 4.0283], + device='cuda:1'), covar=tensor([0.0597, 0.1202, 0.2373, 0.0784, 0.4903, 0.0954, 0.0866, 0.1138], + device='cuda:1'), in_proj_covar=tensor([0.0343, 0.0306, 0.0355, 0.0282, 0.0349, 0.0297, 0.0267, 0.0302], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:10:58,823 INFO [train.py:903] (1/4) Epoch 4, batch 6700, loss[loss=0.2389, simple_loss=0.2991, pruned_loss=0.08939, over 19774.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3489, pruned_loss=0.1188, over 3834862.19 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:11:52,644 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.097e+02 7.210e+02 9.176e+02 1.266e+03 4.477e+03, threshold=1.835e+03, percent-clipped=7.0 +2023-04-01 04:11:57,026 INFO [train.py:903] (1/4) Epoch 4, batch 6750, loss[loss=0.3001, simple_loss=0.3585, pruned_loss=0.1208, over 19543.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.3494, pruned_loss=0.1184, over 3833743.80 frames. ], batch size: 56, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:12:30,348 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-01 04:12:31,960 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27266.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:12:52,952 INFO [train.py:903] (1/4) Epoch 4, batch 6800, loss[loss=0.3078, simple_loss=0.3679, pruned_loss=0.1239, over 19529.00 frames. ], tot_loss[loss=0.2943, simple_loss=0.3503, pruned_loss=0.1192, over 3832696.85 frames. ], batch size: 56, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:13:37,202 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 04:13:37,650 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 04:13:40,504 INFO [train.py:903] (1/4) Epoch 5, batch 0, loss[loss=0.3154, simple_loss=0.3692, pruned_loss=0.1308, over 19693.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3692, pruned_loss=0.1308, over 19693.00 frames. ], batch size: 60, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:13:40,504 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 04:13:52,268 INFO [train.py:937] (1/4) Epoch 5, validation: loss=0.2121, simple_loss=0.3102, pruned_loss=0.05704, over 944034.00 frames. +2023-04-01 04:13:52,268 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 04:13:52,406 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:13:55,638 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9600, 5.3028, 2.8228, 4.6499, 1.4931, 5.2751, 5.0847, 5.4132], + device='cuda:1'), covar=tensor([0.0413, 0.0920, 0.1903, 0.0575, 0.3675, 0.0653, 0.0587, 0.0686], + device='cuda:1'), in_proj_covar=tensor([0.0336, 0.0303, 0.0352, 0.0280, 0.0347, 0.0293, 0.0263, 0.0297], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:14:04,628 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 04:14:16,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.218e+02 7.861e+02 9.859e+02 1.236e+03 2.711e+03, threshold=1.972e+03, percent-clipped=3.0 +2023-04-01 04:14:37,962 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0746, 1.9310, 1.9906, 2.2795, 4.5347, 1.3158, 2.5202, 4.6052], + device='cuda:1'), covar=tensor([0.0242, 0.2182, 0.2071, 0.1382, 0.0425, 0.2191, 0.1028, 0.0287], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0305, 0.0305, 0.0282, 0.0296, 0.0311, 0.0278, 0.0291], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:14:52,470 INFO [train.py:903] (1/4) Epoch 5, batch 50, loss[loss=0.295, simple_loss=0.3596, pruned_loss=0.1152, over 17449.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3546, pruned_loss=0.1206, over 858507.72 frames. ], batch size: 101, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:14:55,322 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 04:15:15,084 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27381.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:15:22,372 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 04:15:26,080 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 04:15:26,410 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2954, 1.3249, 1.6951, 2.4851, 1.8491, 2.2557, 2.5420, 2.4081], + device='cuda:1'), covar=tensor([0.0808, 0.1349, 0.1261, 0.1183, 0.1227, 0.0875, 0.1127, 0.0747], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0254, 0.0250, 0.0288, 0.0280, 0.0233, 0.0247, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 04:15:53,858 INFO [train.py:903] (1/4) Epoch 5, batch 100, loss[loss=0.2503, simple_loss=0.3199, pruned_loss=0.09039, over 19602.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3533, pruned_loss=0.1191, over 1522542.99 frames. ], batch size: 52, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:16:05,290 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 04:16:11,150 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:16:15,198 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.953e+02 8.679e+02 1.081e+03 2.199e+03, threshold=1.736e+03, percent-clipped=1.0 +2023-04-01 04:16:53,730 INFO [train.py:903] (1/4) Epoch 5, batch 150, loss[loss=0.275, simple_loss=0.3368, pruned_loss=0.1065, over 19607.00 frames. ], tot_loss[loss=0.2945, simple_loss=0.3513, pruned_loss=0.1188, over 2010904.50 frames. ], batch size: 50, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:52,391 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:17:54,460 INFO [train.py:903] (1/4) Epoch 5, batch 200, loss[loss=0.2959, simple_loss=0.3624, pruned_loss=0.1147, over 19596.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.3503, pruned_loss=0.1176, over 2419797.87 frames. ], batch size: 57, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:54,474 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 04:18:19,387 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+02 6.870e+02 8.382e+02 1.064e+03 2.606e+03, threshold=1.676e+03, percent-clipped=3.0 +2023-04-01 04:18:40,888 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4004, 1.4365, 1.7023, 2.3560, 1.8724, 2.1830, 2.5777, 2.5405], + device='cuda:1'), covar=tensor([0.0681, 0.1086, 0.1052, 0.1100, 0.1073, 0.0760, 0.0890, 0.0630], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0254, 0.0250, 0.0289, 0.0283, 0.0234, 0.0245, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 04:18:43,129 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2555, 2.0931, 1.5780, 1.4127, 1.9089, 1.0782, 1.0663, 1.5615], + device='cuda:1'), covar=tensor([0.0615, 0.0464, 0.0889, 0.0469, 0.0333, 0.0968, 0.0588, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0260, 0.0322, 0.0241, 0.0221, 0.0312, 0.0283, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:18:56,502 INFO [train.py:903] (1/4) Epoch 5, batch 250, loss[loss=0.3085, simple_loss=0.3603, pruned_loss=0.1283, over 19828.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.35, pruned_loss=0.1176, over 2721180.90 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:19:51,630 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 04:19:58,938 INFO [train.py:903] (1/4) Epoch 5, batch 300, loss[loss=0.2919, simple_loss=0.3478, pruned_loss=0.118, over 19799.00 frames. ], tot_loss[loss=0.293, simple_loss=0.3503, pruned_loss=0.1179, over 2973810.20 frames. ], batch size: 56, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:20:15,324 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:20:22,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+02 6.581e+02 8.607e+02 1.103e+03 1.922e+03, threshold=1.721e+03, percent-clipped=6.0 +2023-04-01 04:20:28,906 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27637.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:21:01,019 INFO [train.py:903] (1/4) Epoch 5, batch 350, loss[loss=0.2842, simple_loss=0.3405, pruned_loss=0.114, over 19667.00 frames. ], tot_loss[loss=0.2941, simple_loss=0.3509, pruned_loss=0.1187, over 3162457.82 frames. ], batch size: 53, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:21:01,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27662.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:21:07,182 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 04:21:26,433 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:21:27,596 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2470, 1.2813, 1.9314, 1.3560, 2.5789, 2.3785, 2.6869, 0.9778], + device='cuda:1'), covar=tensor([0.1749, 0.2900, 0.1457, 0.1446, 0.1113, 0.1202, 0.1198, 0.2659], + device='cuda:1'), in_proj_covar=tensor([0.0448, 0.0493, 0.0466, 0.0414, 0.0541, 0.0439, 0.0615, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:21:58,722 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:22:02,932 INFO [train.py:903] (1/4) Epoch 5, batch 400, loss[loss=0.3034, simple_loss=0.3728, pruned_loss=0.117, over 19540.00 frames. ], tot_loss[loss=0.2925, simple_loss=0.3498, pruned_loss=0.1176, over 3320229.46 frames. ], batch size: 56, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:22:08,108 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.87 vs. limit=5.0 +2023-04-01 04:22:27,973 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.948e+02 7.305e+02 9.017e+02 1.065e+03 1.815e+03, threshold=1.803e+03, percent-clipped=3.0 +2023-04-01 04:22:28,314 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27732.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:22:58,635 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2667, 1.1461, 1.3580, 1.0969, 2.6627, 3.3801, 3.3273, 3.6966], + device='cuda:1'), covar=tensor([0.1729, 0.4081, 0.4207, 0.2403, 0.0505, 0.0203, 0.0282, 0.0125], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0281, 0.0318, 0.0258, 0.0197, 0.0115, 0.0206, 0.0135], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:23:04,208 INFO [train.py:903] (1/4) Epoch 5, batch 450, loss[loss=0.2766, simple_loss=0.336, pruned_loss=0.1086, over 19583.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3489, pruned_loss=0.1167, over 3445098.33 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:23:45,988 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 04:23:47,109 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 04:24:07,111 INFO [train.py:903] (1/4) Epoch 5, batch 500, loss[loss=0.3351, simple_loss=0.3892, pruned_loss=0.1404, over 19605.00 frames. ], tot_loss[loss=0.2906, simple_loss=0.3486, pruned_loss=0.1163, over 3537152.68 frames. ], batch size: 57, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:24:31,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+02 6.150e+02 8.318e+02 1.057e+03 1.987e+03, threshold=1.664e+03, percent-clipped=1.0 +2023-04-01 04:24:49,523 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3034, 1.2612, 1.5936, 0.9893, 2.5862, 3.2125, 3.0579, 3.4085], + device='cuda:1'), covar=tensor([0.1323, 0.2775, 0.2606, 0.1992, 0.0377, 0.0139, 0.0204, 0.0116], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0279, 0.0315, 0.0255, 0.0194, 0.0114, 0.0203, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:25:11,308 INFO [train.py:903] (1/4) Epoch 5, batch 550, loss[loss=0.3276, simple_loss=0.3794, pruned_loss=0.138, over 19552.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3485, pruned_loss=0.1163, over 3603848.63 frames. ], batch size: 61, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:25:35,631 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:07,925 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:14,567 INFO [train.py:903] (1/4) Epoch 5, batch 600, loss[loss=0.2775, simple_loss=0.3262, pruned_loss=0.1145, over 19736.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3479, pruned_loss=0.116, over 3648616.85 frames. ], batch size: 45, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:26:38,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.595e+02 8.388e+02 1.023e+03 2.578e+03, threshold=1.678e+03, percent-clipped=3.0 +2023-04-01 04:26:48,522 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-04-01 04:27:02,819 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 04:27:17,951 INFO [train.py:903] (1/4) Epoch 5, batch 650, loss[loss=0.2689, simple_loss=0.3334, pruned_loss=0.1022, over 19676.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3475, pruned_loss=0.1159, over 3677190.22 frames. ], batch size: 53, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:28:20,085 INFO [train.py:903] (1/4) Epoch 5, batch 700, loss[loss=0.3719, simple_loss=0.4103, pruned_loss=0.1667, over 18810.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.348, pruned_loss=0.1162, over 3711893.85 frames. ], batch size: 74, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:28:47,055 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 7.486e+02 9.333e+02 1.140e+03 2.488e+03, threshold=1.867e+03, percent-clipped=5.0 +2023-04-01 04:28:48,612 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8712, 4.2136, 4.5650, 4.4925, 1.3534, 4.1292, 3.6166, 4.1400], + device='cuda:1'), covar=tensor([0.0925, 0.0533, 0.0454, 0.0396, 0.4191, 0.0308, 0.0515, 0.0969], + device='cuda:1'), in_proj_covar=tensor([0.0477, 0.0426, 0.0567, 0.0450, 0.0552, 0.0323, 0.0367, 0.0525], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 04:29:25,954 INFO [train.py:903] (1/4) Epoch 5, batch 750, loss[loss=0.2999, simple_loss=0.3574, pruned_loss=0.1212, over 19775.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3474, pruned_loss=0.1158, over 3734520.67 frames. ], batch size: 54, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:29:43,607 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28076.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:30:27,876 INFO [train.py:903] (1/4) Epoch 5, batch 800, loss[loss=0.281, simple_loss=0.3437, pruned_loss=0.1092, over 19689.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3467, pruned_loss=0.1149, over 3757854.80 frames. ], batch size: 59, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:30:48,156 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 04:30:52,980 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.190e+02 6.427e+02 8.567e+02 1.032e+03 2.729e+03, threshold=1.713e+03, percent-clipped=3.0 +2023-04-01 04:31:32,197 INFO [train.py:903] (1/4) Epoch 5, batch 850, loss[loss=0.295, simple_loss=0.3568, pruned_loss=0.1166, over 19611.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.3472, pruned_loss=0.1148, over 3777197.79 frames. ], batch size: 57, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:32:08,722 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28191.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:32:29,187 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 04:32:33,621 INFO [train.py:903] (1/4) Epoch 5, batch 900, loss[loss=0.285, simple_loss=0.3468, pruned_loss=0.1116, over 17302.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3465, pruned_loss=0.1149, over 3790717.22 frames. ], batch size: 101, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:32:59,334 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+02 6.938e+02 8.196e+02 1.125e+03 2.658e+03, threshold=1.639e+03, percent-clipped=4.0 +2023-04-01 04:33:17,414 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:33:22,451 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.17 vs. limit=5.0 +2023-04-01 04:33:36,683 INFO [train.py:903] (1/4) Epoch 5, batch 950, loss[loss=0.3249, simple_loss=0.3745, pruned_loss=0.1376, over 19536.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3474, pruned_loss=0.1153, over 3793106.16 frames. ], batch size: 54, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:33:42,370 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 04:33:49,427 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28273.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:34:28,459 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:34:36,191 INFO [train.py:903] (1/4) Epoch 5, batch 1000, loss[loss=0.2592, simple_loss=0.3084, pruned_loss=0.105, over 19372.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3477, pruned_loss=0.1156, over 3806577.74 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:34:45,493 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1097, 1.8769, 1.3901, 1.1998, 1.7067, 0.9272, 0.8312, 1.5346], + device='cuda:1'), covar=tensor([0.0549, 0.0432, 0.0881, 0.0499, 0.0379, 0.0997, 0.0582, 0.0331], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0259, 0.0311, 0.0236, 0.0218, 0.0308, 0.0277, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:34:59,300 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 7.143e+02 8.888e+02 1.138e+03 2.880e+03, threshold=1.778e+03, percent-clipped=9.0 +2023-04-01 04:35:30,267 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 04:35:36,615 INFO [train.py:903] (1/4) Epoch 5, batch 1050, loss[loss=0.2849, simple_loss=0.3377, pruned_loss=0.116, over 19723.00 frames. ], tot_loss[loss=0.2904, simple_loss=0.348, pruned_loss=0.1164, over 3812819.55 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:35:44,075 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2583, 1.3183, 1.2210, 1.0071, 0.9857, 1.2213, 0.0259, 0.4736], + device='cuda:1'), covar=tensor([0.0262, 0.0237, 0.0159, 0.0200, 0.0551, 0.0183, 0.0404, 0.0389], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0273, 0.0269, 0.0294, 0.0355, 0.0279, 0.0267, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:36:09,492 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 04:36:36,346 INFO [train.py:903] (1/4) Epoch 5, batch 1100, loss[loss=0.3699, simple_loss=0.4151, pruned_loss=0.1624, over 19512.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.3488, pruned_loss=0.1169, over 3814180.45 frames. ], batch size: 64, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:37:01,574 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+02 6.917e+02 8.996e+02 1.190e+03 3.192e+03, threshold=1.799e+03, percent-clipped=6.0 +2023-04-01 04:37:19,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28447.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:37:37,664 INFO [train.py:903] (1/4) Epoch 5, batch 1150, loss[loss=0.3378, simple_loss=0.3755, pruned_loss=0.15, over 13435.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.349, pruned_loss=0.1169, over 3804322.14 frames. ], batch size: 137, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:37:50,298 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28472.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:38:11,028 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9895, 1.8781, 1.9457, 2.0983, 4.4295, 1.1855, 2.4475, 4.6577], + device='cuda:1'), covar=tensor([0.0228, 0.2196, 0.2083, 0.1256, 0.0440, 0.2262, 0.1100, 0.0228], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0305, 0.0306, 0.0283, 0.0299, 0.0313, 0.0282, 0.0296], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:38:29,147 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:36,075 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:38,083 INFO [train.py:903] (1/4) Epoch 5, batch 1200, loss[loss=0.268, simple_loss=0.3424, pruned_loss=0.09681, over 19322.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3488, pruned_loss=0.1165, over 3818610.91 frames. ], batch size: 70, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:39:01,758 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.666e+02 7.999e+02 1.012e+03 1.920e+03, threshold=1.600e+03, percent-clipped=0.0 +2023-04-01 04:39:12,213 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 04:39:37,127 INFO [train.py:903] (1/4) Epoch 5, batch 1250, loss[loss=0.2571, simple_loss=0.32, pruned_loss=0.09706, over 19391.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3487, pruned_loss=0.1166, over 3803631.64 frames. ], batch size: 47, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:13,035 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:40:37,857 INFO [train.py:903] (1/4) Epoch 5, batch 1300, loss[loss=0.2649, simple_loss=0.3375, pruned_loss=0.09617, over 19668.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3478, pruned_loss=0.1156, over 3820466.84 frames. ], batch size: 58, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:43,785 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28617.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:41:03,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 7.072e+02 8.596e+02 1.219e+03 1.879e+03, threshold=1.719e+03, percent-clipped=8.0 +2023-04-01 04:41:23,362 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:41:37,882 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4487, 1.3005, 1.3222, 1.7310, 3.0049, 1.2577, 2.0346, 3.2204], + device='cuda:1'), covar=tensor([0.0333, 0.2386, 0.2537, 0.1379, 0.0524, 0.2033, 0.1144, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0306, 0.0307, 0.0284, 0.0298, 0.0313, 0.0281, 0.0295], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:41:39,854 INFO [train.py:903] (1/4) Epoch 5, batch 1350, loss[loss=0.3382, simple_loss=0.3836, pruned_loss=0.1464, over 19514.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3464, pruned_loss=0.1145, over 3831258.31 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:42:14,813 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2854, 2.3213, 2.0687, 3.4263, 2.2452, 3.2893, 2.9532, 2.0154], + device='cuda:1'), covar=tensor([0.1792, 0.1342, 0.0726, 0.0819, 0.1692, 0.0517, 0.1218, 0.1221], + device='cuda:1'), in_proj_covar=tensor([0.0600, 0.0591, 0.0540, 0.0745, 0.0642, 0.0508, 0.0653, 0.0557], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:42:34,081 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:42:37,528 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3273, 2.2477, 1.5494, 1.5421, 2.0549, 1.0367, 1.2007, 1.7233], + device='cuda:1'), covar=tensor([0.0718, 0.0433, 0.0819, 0.0508, 0.0382, 0.0964, 0.0602, 0.0375], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0258, 0.0309, 0.0238, 0.0219, 0.0308, 0.0277, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:42:40,551 INFO [train.py:903] (1/4) Epoch 5, batch 1400, loss[loss=0.303, simple_loss=0.3628, pruned_loss=0.1216, over 19663.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3463, pruned_loss=0.1142, over 3829248.32 frames. ], batch size: 55, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:04,141 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 6.792e+02 9.169e+02 1.129e+03 1.829e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-04-01 04:43:04,462 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28732.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:43:40,834 INFO [train.py:903] (1/4) Epoch 5, batch 1450, loss[loss=0.2693, simple_loss=0.3401, pruned_loss=0.09927, over 19666.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3469, pruned_loss=0.1145, over 3836984.35 frames. ], batch size: 59, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:43,179 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 04:43:43,512 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28764.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:44:37,599 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2132, 1.6179, 1.7366, 2.6002, 1.7416, 2.3610, 2.6017, 2.2270], + device='cuda:1'), covar=tensor([0.0726, 0.1052, 0.1101, 0.1005, 0.1208, 0.0743, 0.0932, 0.0738], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0261, 0.0253, 0.0289, 0.0286, 0.0241, 0.0247, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 04:44:41,294 INFO [train.py:903] (1/4) Epoch 5, batch 1500, loss[loss=0.2876, simple_loss=0.3244, pruned_loss=0.1254, over 19034.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3456, pruned_loss=0.1138, over 3847760.21 frames. ], batch size: 42, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:45:06,145 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.912e+02 8.562e+02 1.022e+03 2.509e+03, threshold=1.712e+03, percent-clipped=1.0 +2023-04-01 04:45:24,298 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:31,721 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:42,352 INFO [train.py:903] (1/4) Epoch 5, batch 1550, loss[loss=0.2877, simple_loss=0.3574, pruned_loss=0.109, over 19665.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3479, pruned_loss=0.115, over 3843760.28 frames. ], batch size: 60, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:45:56,766 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4437, 1.2578, 1.7762, 1.2420, 2.8291, 3.5981, 3.5311, 3.7696], + device='cuda:1'), covar=tensor([0.1333, 0.2767, 0.2527, 0.1832, 0.0398, 0.0118, 0.0178, 0.0120], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0282, 0.0320, 0.0257, 0.0199, 0.0117, 0.0206, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:46:02,322 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28879.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:46:04,681 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6230, 4.1252, 4.2819, 4.2226, 1.3979, 3.9255, 3.5462, 3.8233], + device='cuda:1'), covar=tensor([0.1029, 0.0598, 0.0579, 0.0508, 0.3809, 0.0357, 0.0515, 0.1194], + device='cuda:1'), in_proj_covar=tensor([0.0504, 0.0451, 0.0600, 0.0479, 0.0572, 0.0342, 0.0387, 0.0553], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 04:46:41,466 INFO [train.py:903] (1/4) Epoch 5, batch 1600, loss[loss=0.2603, simple_loss=0.3039, pruned_loss=0.1084, over 19734.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3483, pruned_loss=0.1154, over 3845247.23 frames. ], batch size: 47, lr: 1.67e-02, grad_scale: 8.0 +2023-04-01 04:47:04,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+02 6.984e+02 8.420e+02 1.102e+03 2.946e+03, threshold=1.684e+03, percent-clipped=4.0 +2023-04-01 04:47:04,569 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 04:47:28,988 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 04:47:41,581 INFO [train.py:903] (1/4) Epoch 5, batch 1650, loss[loss=0.2384, simple_loss=0.3007, pruned_loss=0.08805, over 19776.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3477, pruned_loss=0.1148, over 3839559.95 frames. ], batch size: 45, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:47:41,971 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:43,165 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28963.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:49,859 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:00,800 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1192, 1.2643, 1.9569, 1.3799, 2.6712, 2.1868, 2.8165, 1.0692], + device='cuda:1'), covar=tensor([0.1854, 0.3015, 0.1487, 0.1553, 0.1201, 0.1456, 0.1346, 0.2657], + device='cuda:1'), in_proj_covar=tensor([0.0435, 0.0498, 0.0469, 0.0402, 0.0541, 0.0437, 0.0620, 0.0432], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:48:12,488 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:14,414 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28988.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:48:30,715 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9449, 1.4089, 1.5206, 2.0732, 1.6877, 1.6673, 1.8295, 1.8512], + device='cuda:1'), covar=tensor([0.0800, 0.1656, 0.1291, 0.0851, 0.1147, 0.0520, 0.0819, 0.0677], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0354, 0.0277, 0.0238, 0.0304, 0.0248, 0.0270, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:48:42,351 INFO [train.py:903] (1/4) Epoch 5, batch 1700, loss[loss=0.3554, simple_loss=0.3899, pruned_loss=0.1604, over 18167.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.3479, pruned_loss=0.1153, over 3833082.23 frames. ], batch size: 83, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:48:43,820 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29013.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:48:51,931 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:52,063 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:04,396 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1811, 1.4859, 1.6443, 2.1107, 1.6698, 1.9076, 1.8965, 2.0265], + device='cuda:1'), covar=tensor([0.0738, 0.1698, 0.1291, 0.0854, 0.1249, 0.0430, 0.0924, 0.0576], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0360, 0.0281, 0.0239, 0.0306, 0.0249, 0.0274, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:49:08,763 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+02 6.699e+02 8.227e+02 1.083e+03 2.721e+03, threshold=1.645e+03, percent-clipped=4.0 +2023-04-01 04:49:21,047 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 04:49:22,490 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:42,474 INFO [train.py:903] (1/4) Epoch 5, batch 1750, loss[loss=0.3077, simple_loss=0.3638, pruned_loss=0.1258, over 19536.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3468, pruned_loss=0.1145, over 3829874.83 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:43,926 INFO [train.py:903] (1/4) Epoch 5, batch 1800, loss[loss=0.3417, simple_loss=0.3904, pruned_loss=0.1465, over 17398.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3469, pruned_loss=0.1144, over 3819222.44 frames. ], batch size: 101, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:54,482 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5087, 1.2487, 1.3464, 1.7350, 2.9777, 0.9793, 2.0622, 3.1326], + device='cuda:1'), covar=tensor([0.0347, 0.2532, 0.2510, 0.1445, 0.0567, 0.2491, 0.1173, 0.0422], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0307, 0.0310, 0.0287, 0.0301, 0.0314, 0.0281, 0.0300], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:51:07,717 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.740e+02 6.050e+02 7.849e+02 1.011e+03 2.328e+03, threshold=1.570e+03, percent-clipped=7.0 +2023-04-01 04:51:27,789 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 04:51:39,751 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 04:51:44,192 INFO [train.py:903] (1/4) Epoch 5, batch 1850, loss[loss=0.2672, simple_loss=0.3251, pruned_loss=0.1047, over 19748.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3451, pruned_loss=0.1131, over 3820821.99 frames. ], batch size: 46, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:17,655 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 04:52:43,727 INFO [train.py:903] (1/4) Epoch 5, batch 1900, loss[loss=0.2529, simple_loss=0.316, pruned_loss=0.09496, over 19618.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3456, pruned_loss=0.1136, over 3825564.53 frames. ], batch size: 50, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:53,173 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29219.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:52:58,362 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:01,718 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:03,249 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 04:53:07,800 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 04:53:11,315 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+02 6.988e+02 9.073e+02 1.156e+03 1.890e+03, threshold=1.815e+03, percent-clipped=5.0 +2023-04-01 04:53:12,554 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6539, 1.3516, 1.2533, 1.7152, 1.4273, 1.5515, 1.2952, 1.5985], + device='cuda:1'), covar=tensor([0.0803, 0.1246, 0.1303, 0.0690, 0.0949, 0.0465, 0.0954, 0.0671], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0368, 0.0287, 0.0239, 0.0305, 0.0247, 0.0274, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:53:23,913 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:28,888 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 04:53:30,419 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:35,748 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:44,960 INFO [train.py:903] (1/4) Epoch 5, batch 1950, loss[loss=0.2852, simple_loss=0.3502, pruned_loss=0.1101, over 19515.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3464, pruned_loss=0.1142, over 3824482.57 frames. ], batch size: 64, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:54:46,875 INFO [train.py:903] (1/4) Epoch 5, batch 2000, loss[loss=0.2338, simple_loss=0.293, pruned_loss=0.08728, over 19725.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3461, pruned_loss=0.1138, over 3834566.55 frames. ], batch size: 46, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:10,279 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.142e+02 7.102e+02 9.141e+02 1.135e+03 3.050e+03, threshold=1.828e+03, percent-clipped=2.0 +2023-04-01 04:55:16,876 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:21,655 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 04:55:36,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3650, 1.1904, 1.6893, 1.1420, 2.7896, 3.3812, 3.2777, 3.6307], + device='cuda:1'), covar=tensor([0.1280, 0.2865, 0.2644, 0.1915, 0.0388, 0.0131, 0.0183, 0.0115], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0280, 0.0314, 0.0256, 0.0196, 0.0115, 0.0204, 0.0135], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 04:55:43,159 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 04:55:46,565 INFO [train.py:903] (1/4) Epoch 5, batch 2050, loss[loss=0.2817, simple_loss=0.3454, pruned_loss=0.1089, over 19625.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3467, pruned_loss=0.114, over 3816962.25 frames. ], batch size: 57, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:49,013 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:59,868 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 04:56:00,828 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 04:56:23,069 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 04:56:37,156 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3950, 1.5359, 2.0722, 1.5028, 2.9429, 2.9010, 3.3930, 1.3411], + device='cuda:1'), covar=tensor([0.1716, 0.2840, 0.1634, 0.1418, 0.1323, 0.1224, 0.1401, 0.2762], + device='cuda:1'), in_proj_covar=tensor([0.0440, 0.0502, 0.0472, 0.0405, 0.0538, 0.0434, 0.0620, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 04:56:45,827 INFO [train.py:903] (1/4) Epoch 5, batch 2100, loss[loss=0.2436, simple_loss=0.3123, pruned_loss=0.0875, over 19758.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3463, pruned_loss=0.1143, over 3799425.16 frames. ], batch size: 51, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:57:07,794 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-01 04:57:12,304 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 7.145e+02 9.347e+02 1.270e+03 4.921e+03, threshold=1.869e+03, percent-clipped=10.0 +2023-04-01 04:57:13,527 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 04:57:20,943 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1343, 2.1227, 1.6162, 1.2105, 1.9387, 0.8447, 0.9226, 1.7657], + device='cuda:1'), covar=tensor([0.0672, 0.0364, 0.0685, 0.0560, 0.0278, 0.1050, 0.0595, 0.0252], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0260, 0.0306, 0.0236, 0.0212, 0.0306, 0.0280, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 04:57:34,264 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 04:57:46,682 INFO [train.py:903] (1/4) Epoch 5, batch 2150, loss[loss=0.2545, simple_loss=0.3195, pruned_loss=0.09468, over 19741.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3476, pruned_loss=0.1152, over 3804339.81 frames. ], batch size: 51, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:58:08,975 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29479.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:58:48,386 INFO [train.py:903] (1/4) Epoch 5, batch 2200, loss[loss=0.283, simple_loss=0.3322, pruned_loss=0.1169, over 19811.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3479, pruned_loss=0.116, over 3810981.61 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:11,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+02 6.760e+02 8.285e+02 1.117e+03 1.782e+03, threshold=1.657e+03, percent-clipped=0.0 +2023-04-01 04:59:47,483 INFO [train.py:903] (1/4) Epoch 5, batch 2250, loss[loss=0.2944, simple_loss=0.3588, pruned_loss=0.1151, over 19688.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.3482, pruned_loss=0.1158, over 3822122.10 frames. ], batch size: 60, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:55,512 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:28,206 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:31,572 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6466, 1.1515, 1.2017, 1.8542, 1.4371, 1.8777, 1.9929, 1.5965], + device='cuda:1'), covar=tensor([0.0832, 0.1149, 0.1163, 0.1049, 0.1012, 0.0740, 0.0970, 0.0802], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0253, 0.0245, 0.0283, 0.0277, 0.0230, 0.0240, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 05:00:32,406 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:48,023 INFO [train.py:903] (1/4) Epoch 5, batch 2300, loss[loss=0.2453, simple_loss=0.3187, pruned_loss=0.08594, over 19675.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3483, pruned_loss=0.1158, over 3813478.50 frames. ], batch size: 60, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 05:00:56,341 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29619.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:01:03,671 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 05:01:15,151 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.694e+02 6.380e+02 7.654e+02 9.572e+02 1.859e+03, threshold=1.531e+03, percent-clipped=2.0 +2023-04-01 05:01:48,834 INFO [train.py:903] (1/4) Epoch 5, batch 2350, loss[loss=0.2873, simple_loss=0.3531, pruned_loss=0.1108, over 17387.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3465, pruned_loss=0.114, over 3827715.62 frames. ], batch size: 101, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:11,372 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29680.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:02:28,857 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 05:02:45,294 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 05:02:49,425 INFO [train.py:903] (1/4) Epoch 5, batch 2400, loss[loss=0.2738, simple_loss=0.3429, pruned_loss=0.1023, over 19741.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3463, pruned_loss=0.1138, over 3828292.31 frames. ], batch size: 63, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:50,615 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:12,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.376e+02 6.843e+02 7.889e+02 1.103e+03 3.246e+03, threshold=1.578e+03, percent-clipped=5.0 +2023-04-01 05:03:15,322 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:46,786 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:48,491 INFO [train.py:903] (1/4) Epoch 5, batch 2450, loss[loss=0.3223, simple_loss=0.3578, pruned_loss=0.1434, over 19373.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3458, pruned_loss=0.1136, over 3839501.91 frames. ], batch size: 47, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:48,603 INFO [train.py:903] (1/4) Epoch 5, batch 2500, loss[loss=0.2088, simple_loss=0.2767, pruned_loss=0.07041, over 19793.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.345, pruned_loss=0.1137, over 3838057.88 frames. ], batch size: 48, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:57,742 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:05:14,590 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 6.790e+02 8.500e+02 1.086e+03 2.138e+03, threshold=1.700e+03, percent-clipped=3.0 +2023-04-01 05:05:48,274 INFO [train.py:903] (1/4) Epoch 5, batch 2550, loss[loss=0.3296, simple_loss=0.384, pruned_loss=0.1376, over 18009.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3454, pruned_loss=0.114, over 3811281.60 frames. ], batch size: 83, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:40,399 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 05:06:48,587 INFO [train.py:903] (1/4) Epoch 5, batch 2600, loss[loss=0.2647, simple_loss=0.3264, pruned_loss=0.1015, over 19621.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3469, pruned_loss=0.1155, over 3794034.89 frames. ], batch size: 50, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:50,692 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:07:05,031 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9177, 1.8343, 1.9169, 2.9363, 1.8737, 2.7580, 2.6507, 1.8719], + device='cuda:1'), covar=tensor([0.1848, 0.1494, 0.0780, 0.0781, 0.1693, 0.0558, 0.1345, 0.1352], + device='cuda:1'), in_proj_covar=tensor([0.0607, 0.0597, 0.0541, 0.0755, 0.0646, 0.0516, 0.0654, 0.0563], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 05:07:13,514 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.882e+02 9.175e+02 1.239e+03 1.828e+03, threshold=1.835e+03, percent-clipped=5.0 +2023-04-01 05:07:50,211 INFO [train.py:903] (1/4) Epoch 5, batch 2650, loss[loss=0.2665, simple_loss=0.3375, pruned_loss=0.09771, over 19751.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3462, pruned_loss=0.1148, over 3802353.59 frames. ], batch size: 54, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:07:58,249 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:08,349 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 05:08:29,864 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:48,857 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9846, 1.8568, 1.3606, 1.2241, 1.7326, 0.9310, 0.9484, 1.6699], + device='cuda:1'), covar=tensor([0.0641, 0.0417, 0.0787, 0.0459, 0.0305, 0.0941, 0.0565, 0.0273], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0266, 0.0313, 0.0237, 0.0221, 0.0310, 0.0288, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:08:50,700 INFO [train.py:903] (1/4) Epoch 5, batch 2700, loss[loss=0.2702, simple_loss=0.3323, pruned_loss=0.1041, over 19659.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3455, pruned_loss=0.1143, over 3794218.12 frames. ], batch size: 53, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:09:04,702 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:10,326 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:17,304 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.255e+02 6.674e+02 8.375e+02 9.964e+02 1.932e+03, threshold=1.675e+03, percent-clipped=1.0 +2023-04-01 05:09:31,506 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1550, 2.1085, 1.9930, 3.2136, 2.2633, 3.1744, 2.8532, 1.9549], + device='cuda:1'), covar=tensor([0.1972, 0.1520, 0.0828, 0.0920, 0.1822, 0.0583, 0.1462, 0.1387], + device='cuda:1'), in_proj_covar=tensor([0.0604, 0.0596, 0.0543, 0.0754, 0.0651, 0.0518, 0.0662, 0.0564], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 05:09:49,948 INFO [train.py:903] (1/4) Epoch 5, batch 2750, loss[loss=0.2695, simple_loss=0.329, pruned_loss=0.105, over 19394.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3452, pruned_loss=0.1139, over 3804493.16 frames. ], batch size: 48, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:10:10,883 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9725, 4.4120, 4.6710, 4.5895, 1.5700, 4.2663, 3.8076, 4.2014], + device='cuda:1'), covar=tensor([0.0879, 0.0504, 0.0422, 0.0369, 0.4058, 0.0323, 0.0439, 0.0968], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0448, 0.0598, 0.0487, 0.0575, 0.0353, 0.0381, 0.0552], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 05:10:42,536 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4739, 1.2728, 1.3406, 1.6068, 3.0739, 1.0603, 2.0284, 2.9815], + device='cuda:1'), covar=tensor([0.0356, 0.2308, 0.2397, 0.1458, 0.0537, 0.2135, 0.1244, 0.0435], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0313, 0.0312, 0.0287, 0.0304, 0.0311, 0.0288, 0.0303], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:10:50,795 INFO [train.py:903] (1/4) Epoch 5, batch 2800, loss[loss=0.2826, simple_loss=0.3434, pruned_loss=0.1109, over 19529.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3462, pruned_loss=0.1143, over 3813633.43 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:11:17,048 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.279e+02 7.041e+02 9.072e+02 1.191e+03 2.188e+03, threshold=1.814e+03, percent-clipped=6.0 +2023-04-01 05:11:22,780 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30139.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:27,357 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:51,983 INFO [train.py:903] (1/4) Epoch 5, batch 2850, loss[loss=0.2446, simple_loss=0.3103, pruned_loss=0.08949, over 19783.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3468, pruned_loss=0.1145, over 3813802.66 frames. ], batch size: 49, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:11:54,295 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30164.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:12:15,906 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8398, 1.4571, 1.4446, 2.0025, 1.5593, 2.0418, 2.1481, 2.0074], + device='cuda:1'), covar=tensor([0.0746, 0.1022, 0.1064, 0.0965, 0.1013, 0.0769, 0.0832, 0.0635], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0259, 0.0249, 0.0288, 0.0282, 0.0239, 0.0242, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 05:12:46,630 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2784, 2.1908, 1.6740, 1.4904, 1.9865, 1.1710, 1.1313, 1.8856], + device='cuda:1'), covar=tensor([0.0560, 0.0401, 0.0783, 0.0483, 0.0342, 0.0961, 0.0563, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0265, 0.0310, 0.0235, 0.0219, 0.0310, 0.0286, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:12:51,657 INFO [train.py:903] (1/4) Epoch 5, batch 2900, loss[loss=0.3045, simple_loss=0.3684, pruned_loss=0.1203, over 19577.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3475, pruned_loss=0.1148, over 3817162.37 frames. ], batch size: 61, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:12:51,674 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 05:13:09,285 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:13:20,142 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.023e+02 7.237e+02 9.091e+02 1.153e+03 2.755e+03, threshold=1.818e+03, percent-clipped=7.0 +2023-04-01 05:13:28,894 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5474, 1.2447, 1.2139, 2.0751, 1.6016, 1.7925, 2.1892, 1.8013], + device='cuda:1'), covar=tensor([0.0873, 0.1124, 0.1186, 0.0835, 0.1006, 0.0823, 0.0781, 0.0689], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0259, 0.0248, 0.0286, 0.0280, 0.0237, 0.0242, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 05:13:51,429 INFO [train.py:903] (1/4) Epoch 5, batch 2950, loss[loss=0.2413, simple_loss=0.2915, pruned_loss=0.09557, over 18969.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3476, pruned_loss=0.1149, over 3807402.84 frames. ], batch size: 42, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:12,794 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:18,160 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30284.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:29,249 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9402, 1.6051, 1.6578, 2.2990, 1.8883, 2.1281, 2.2458, 2.0417], + device='cuda:1'), covar=tensor([0.0670, 0.0934, 0.0865, 0.0667, 0.0844, 0.0626, 0.0679, 0.0552], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0258, 0.0247, 0.0282, 0.0279, 0.0235, 0.0240, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 05:14:46,949 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30309.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:50,603 INFO [train.py:903] (1/4) Epoch 5, batch 3000, loss[loss=0.3269, simple_loss=0.3783, pruned_loss=0.1378, over 18077.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3486, pruned_loss=0.1158, over 3806522.43 frames. ], batch size: 83, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:50,603 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 05:15:03,126 INFO [train.py:937] (1/4) Epoch 5, validation: loss=0.2047, simple_loss=0.3034, pruned_loss=0.05296, over 944034.00 frames. +2023-04-01 05:15:03,127 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 05:15:05,714 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 05:15:08,541 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1497, 1.1952, 1.6838, 1.3680, 2.5733, 2.2016, 2.7471, 0.9618], + device='cuda:1'), covar=tensor([0.1793, 0.2982, 0.1643, 0.1464, 0.1178, 0.1423, 0.1288, 0.2763], + device='cuda:1'), in_proj_covar=tensor([0.0444, 0.0507, 0.0478, 0.0408, 0.0550, 0.0439, 0.0626, 0.0441], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 05:15:33,552 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 7.155e+02 8.736e+02 1.085e+03 2.346e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 05:16:06,358 INFO [train.py:903] (1/4) Epoch 5, batch 3050, loss[loss=0.2684, simple_loss=0.3433, pruned_loss=0.09676, over 19643.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3469, pruned_loss=0.1143, over 3812191.99 frames. ], batch size: 55, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:16:19,957 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3691, 2.1835, 1.8293, 1.8055, 1.5357, 1.8157, 0.4906, 1.2346], + device='cuda:1'), covar=tensor([0.0238, 0.0233, 0.0183, 0.0269, 0.0495, 0.0301, 0.0481, 0.0428], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0280, 0.0282, 0.0299, 0.0365, 0.0287, 0.0275, 0.0291], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 05:16:22,185 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9107, 1.7540, 1.7587, 2.0365, 4.2815, 0.9554, 2.2603, 4.2092], + device='cuda:1'), covar=tensor([0.0249, 0.2221, 0.2180, 0.1332, 0.0457, 0.2346, 0.1185, 0.0319], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0311, 0.0311, 0.0286, 0.0307, 0.0317, 0.0289, 0.0304], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:16:26,382 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:16:45,447 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30395.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:07,569 INFO [train.py:903] (1/4) Epoch 5, batch 3100, loss[loss=0.2781, simple_loss=0.3387, pruned_loss=0.1088, over 19734.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3467, pruned_loss=0.1142, over 3808600.73 frames. ], batch size: 51, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:17:17,149 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30420.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:33,697 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+02 6.847e+02 8.274e+02 1.001e+03 3.134e+03, threshold=1.655e+03, percent-clipped=2.0 +2023-04-01 05:17:41,071 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-04-01 05:18:06,710 INFO [train.py:903] (1/4) Epoch 5, batch 3150, loss[loss=0.3162, simple_loss=0.3674, pruned_loss=0.1325, over 18781.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3469, pruned_loss=0.1147, over 3818768.97 frames. ], batch size: 74, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:18:34,306 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 05:18:37,227 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30487.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:19:06,281 INFO [train.py:903] (1/4) Epoch 5, batch 3200, loss[loss=0.2923, simple_loss=0.3592, pruned_loss=0.1127, over 19691.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.346, pruned_loss=0.1138, over 3818907.67 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:19:23,695 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4626, 1.3493, 1.4713, 1.7548, 3.1612, 1.0469, 2.0244, 3.2698], + device='cuda:1'), covar=tensor([0.0354, 0.2120, 0.2204, 0.1354, 0.0438, 0.2169, 0.1139, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0306, 0.0306, 0.0285, 0.0300, 0.0312, 0.0284, 0.0302], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:19:26,759 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9934, 4.3166, 4.6533, 4.6481, 1.6642, 4.2735, 3.8006, 4.2208], + device='cuda:1'), covar=tensor([0.0931, 0.0628, 0.0438, 0.0403, 0.3929, 0.0367, 0.0473, 0.0939], + device='cuda:1'), in_proj_covar=tensor([0.0498, 0.0437, 0.0589, 0.0479, 0.0566, 0.0352, 0.0380, 0.0554], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 05:19:35,545 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 7.005e+02 8.588e+02 1.128e+03 3.335e+03, threshold=1.718e+03, percent-clipped=13.0 +2023-04-01 05:19:35,989 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30535.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:04,478 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:06,322 INFO [train.py:903] (1/4) Epoch 5, batch 3250, loss[loss=0.3064, simple_loss=0.3632, pruned_loss=0.1248, over 18193.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3449, pruned_loss=0.1128, over 3826838.22 frames. ], batch size: 83, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:20:19,560 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:55,340 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:21:08,677 INFO [train.py:903] (1/4) Epoch 5, batch 3300, loss[loss=0.416, simple_loss=0.4445, pruned_loss=0.1937, over 13845.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3461, pruned_loss=0.1134, over 3828759.11 frames. ], batch size: 136, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:21:16,483 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 05:21:35,516 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 6.699e+02 7.755e+02 9.198e+02 2.155e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 05:22:09,096 INFO [train.py:903] (1/4) Epoch 5, batch 3350, loss[loss=0.2753, simple_loss=0.3492, pruned_loss=0.1007, over 19687.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.345, pruned_loss=0.1126, over 3844121.09 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:22:21,636 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:22:38,249 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:09,773 INFO [train.py:903] (1/4) Epoch 5, batch 3400, loss[loss=0.2852, simple_loss=0.355, pruned_loss=0.1078, over 19474.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3451, pruned_loss=0.1124, over 3845809.46 frames. ], batch size: 64, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:23:22,329 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:39,933 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+02 6.893e+02 8.724e+02 1.073e+03 2.213e+03, threshold=1.745e+03, percent-clipped=7.0 +2023-04-01 05:24:11,363 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 05:24:11,823 INFO [train.py:903] (1/4) Epoch 5, batch 3450, loss[loss=0.2581, simple_loss=0.3099, pruned_loss=0.1031, over 19705.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3459, pruned_loss=0.1126, over 3835743.78 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:24:15,170 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 05:25:13,747 INFO [train.py:903] (1/4) Epoch 5, batch 3500, loss[loss=0.3141, simple_loss=0.3751, pruned_loss=0.1266, over 19609.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3452, pruned_loss=0.1121, over 3832294.99 frames. ], batch size: 57, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:25:39,211 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 7.015e+02 8.195e+02 1.097e+03 2.546e+03, threshold=1.639e+03, percent-clipped=5.0 +2023-04-01 05:25:41,856 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:09,300 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30858.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:13,373 INFO [train.py:903] (1/4) Epoch 5, batch 3550, loss[loss=0.3158, simple_loss=0.3708, pruned_loss=0.1304, over 19320.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.344, pruned_loss=0.1119, over 3826918.97 frames. ], batch size: 66, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:26:38,722 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:13,893 INFO [train.py:903] (1/4) Epoch 5, batch 3600, loss[loss=0.2764, simple_loss=0.3281, pruned_loss=0.1123, over 19320.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3431, pruned_loss=0.1114, over 3828887.20 frames. ], batch size: 44, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:27:24,193 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30921.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:43,239 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+02 6.769e+02 8.289e+02 1.066e+03 2.218e+03, threshold=1.658e+03, percent-clipped=4.0 +2023-04-01 05:27:51,229 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:52,184 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:57,956 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1917, 1.1589, 1.6157, 1.3917, 2.4485, 2.0641, 2.5623, 0.9344], + device='cuda:1'), covar=tensor([0.1867, 0.3137, 0.1743, 0.1511, 0.1068, 0.1493, 0.1175, 0.2875], + device='cuda:1'), in_proj_covar=tensor([0.0445, 0.0507, 0.0481, 0.0409, 0.0556, 0.0446, 0.0629, 0.0448], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 05:28:02,219 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:09,088 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9922, 5.3653, 2.8573, 4.7589, 1.1654, 5.1026, 5.2262, 5.3676], + device='cuda:1'), covar=tensor([0.0392, 0.0801, 0.1772, 0.0517, 0.3928, 0.0608, 0.0496, 0.0729], + device='cuda:1'), in_proj_covar=tensor([0.0348, 0.0308, 0.0363, 0.0286, 0.0355, 0.0301, 0.0284, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 05:28:13,483 INFO [train.py:903] (1/4) Epoch 5, batch 3650, loss[loss=0.252, simple_loss=0.3124, pruned_loss=0.09578, over 19786.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3425, pruned_loss=0.1112, over 3837517.97 frames. ], batch size: 48, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:28:20,082 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:45,408 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9891, 2.0659, 1.6094, 1.5276, 1.4634, 1.6238, 0.3065, 0.8777], + device='cuda:1'), covar=tensor([0.0277, 0.0258, 0.0175, 0.0235, 0.0511, 0.0273, 0.0498, 0.0447], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0284, 0.0287, 0.0309, 0.0376, 0.0298, 0.0280, 0.0293], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 05:29:02,306 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8916, 1.3307, 1.0097, 1.0305, 1.2570, 0.9223, 0.8977, 1.2208], + device='cuda:1'), covar=tensor([0.0511, 0.0627, 0.1004, 0.0459, 0.0415, 0.1027, 0.0504, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0274, 0.0317, 0.0243, 0.0225, 0.0314, 0.0286, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:29:14,331 INFO [train.py:903] (1/4) Epoch 5, batch 3700, loss[loss=0.3187, simple_loss=0.3614, pruned_loss=0.138, over 14015.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3431, pruned_loss=0.1118, over 3824155.65 frames. ], batch size: 136, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:29:21,021 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:29:34,006 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31029.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:29:40,640 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+02 7.099e+02 8.962e+02 1.140e+03 3.223e+03, threshold=1.792e+03, percent-clipped=9.0 +2023-04-01 05:30:15,304 INFO [train.py:903] (1/4) Epoch 5, batch 3750, loss[loss=0.259, simple_loss=0.3315, pruned_loss=0.09323, over 19481.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3437, pruned_loss=0.1124, over 3824243.58 frames. ], batch size: 64, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:30:53,645 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:15,740 INFO [train.py:903] (1/4) Epoch 5, batch 3800, loss[loss=0.2781, simple_loss=0.3461, pruned_loss=0.1051, over 19487.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3438, pruned_loss=0.112, over 3830300.43 frames. ], batch size: 64, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:31:22,804 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:26,008 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1527, 1.5492, 1.7031, 2.0270, 1.8334, 1.8223, 1.5917, 1.9579], + device='cuda:1'), covar=tensor([0.0655, 0.1347, 0.1130, 0.0746, 0.1040, 0.0418, 0.0937, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0359, 0.0279, 0.0231, 0.0299, 0.0240, 0.0265, 0.0225], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:31:40,727 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:44,604 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 5.856e+02 8.467e+02 1.115e+03 2.554e+03, threshold=1.693e+03, percent-clipped=5.0 +2023-04-01 05:31:49,118 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 05:32:15,378 INFO [train.py:903] (1/4) Epoch 5, batch 3850, loss[loss=0.2675, simple_loss=0.3292, pruned_loss=0.1029, over 19707.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.343, pruned_loss=0.1113, over 3837415.77 frames. ], batch size: 59, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:32:37,689 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-01 05:33:19,167 INFO [train.py:903] (1/4) Epoch 5, batch 3900, loss[loss=0.318, simple_loss=0.3745, pruned_loss=0.1307, over 19343.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3428, pruned_loss=0.1112, over 3835802.06 frames. ], batch size: 66, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:33:45,288 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.653e+02 6.709e+02 8.069e+02 1.055e+03 2.198e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 05:34:09,845 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3119, 2.0351, 1.5634, 1.3477, 1.8873, 1.0946, 1.1862, 1.6755], + device='cuda:1'), covar=tensor([0.0689, 0.0524, 0.0865, 0.0488, 0.0396, 0.0918, 0.0498, 0.0349], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0270, 0.0313, 0.0233, 0.0219, 0.0306, 0.0279, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:34:18,967 INFO [train.py:903] (1/4) Epoch 5, batch 3950, loss[loss=0.3412, simple_loss=0.385, pruned_loss=0.1487, over 19668.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3431, pruned_loss=0.1113, over 3841880.06 frames. ], batch size: 60, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:34:22,383 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:34:24,549 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 05:34:47,790 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:00,476 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31296.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:18,261 INFO [train.py:903] (1/4) Epoch 5, batch 4000, loss[loss=0.3015, simple_loss=0.3619, pruned_loss=0.1205, over 19525.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.344, pruned_loss=0.1129, over 3820360.50 frames. ], batch size: 54, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:35:28,433 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7264, 1.3891, 1.3581, 2.0486, 1.5469, 2.2170, 2.1004, 1.9287], + device='cuda:1'), covar=tensor([0.0765, 0.1045, 0.1132, 0.0870, 0.0919, 0.0591, 0.0834, 0.0592], + device='cuda:1'), in_proj_covar=tensor([0.0230, 0.0252, 0.0246, 0.0280, 0.0273, 0.0233, 0.0231, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 05:35:29,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0553, 1.0059, 1.2715, 1.5700, 2.6058, 1.0939, 1.8466, 2.6531], + device='cuda:1'), covar=tensor([0.0389, 0.2484, 0.2385, 0.1304, 0.0605, 0.2031, 0.1078, 0.0472], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0307, 0.0307, 0.0279, 0.0294, 0.0306, 0.0283, 0.0295], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:35:48,957 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+02 7.061e+02 8.767e+02 1.081e+03 2.366e+03, threshold=1.753e+03, percent-clipped=7.0 +2023-04-01 05:36:06,113 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 05:36:11,766 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2983, 1.1931, 1.6375, 1.0564, 2.7092, 3.4254, 3.3489, 3.6680], + device='cuda:1'), covar=tensor([0.1351, 0.2944, 0.2739, 0.1979, 0.0408, 0.0123, 0.0184, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0280, 0.0317, 0.0249, 0.0197, 0.0120, 0.0205, 0.0136], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 05:36:18,326 INFO [train.py:903] (1/4) Epoch 5, batch 4050, loss[loss=0.2495, simple_loss=0.3272, pruned_loss=0.0859, over 19712.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3444, pruned_loss=0.1132, over 3821296.00 frames. ], batch size: 59, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:36:20,389 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-01 05:36:33,360 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31373.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:36:42,200 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:36:51,310 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:07,163 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:18,596 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:19,399 INFO [train.py:903] (1/4) Epoch 5, batch 4100, loss[loss=0.271, simple_loss=0.3498, pruned_loss=0.09612, over 19308.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3449, pruned_loss=0.1133, over 3814242.19 frames. ], batch size: 66, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:37:20,940 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:37,721 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:48,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.964e+02 7.397e+02 9.605e+02 3.908e+03, threshold=1.479e+03, percent-clipped=5.0 +2023-04-01 05:37:49,641 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 05:37:53,722 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 05:38:20,924 INFO [train.py:903] (1/4) Epoch 5, batch 4150, loss[loss=0.2785, simple_loss=0.3458, pruned_loss=0.1056, over 19602.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3437, pruned_loss=0.1125, over 3819451.56 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:38:50,999 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31488.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:39:16,822 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31509.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:39:20,063 INFO [train.py:903] (1/4) Epoch 5, batch 4200, loss[loss=0.2685, simple_loss=0.3362, pruned_loss=0.1004, over 18761.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3437, pruned_loss=0.112, over 3823562.34 frames. ], batch size: 74, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:39:23,664 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 05:39:50,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.017e+02 6.856e+02 8.101e+02 1.045e+03 3.023e+03, threshold=1.620e+03, percent-clipped=6.0 +2023-04-01 05:40:19,233 INFO [train.py:903] (1/4) Epoch 5, batch 4250, loss[loss=0.2764, simple_loss=0.3482, pruned_loss=0.1023, over 19737.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3439, pruned_loss=0.1118, over 3832204.85 frames. ], batch size: 63, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:40:21,977 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6957, 1.2420, 1.2411, 1.8629, 1.4560, 2.0257, 2.0820, 1.9149], + device='cuda:1'), covar=tensor([0.0834, 0.1187, 0.1275, 0.1099, 0.1130, 0.0745, 0.1005, 0.0641], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0253, 0.0245, 0.0280, 0.0274, 0.0234, 0.0237, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 05:40:35,451 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 05:40:46,369 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 05:41:20,811 INFO [train.py:903] (1/4) Epoch 5, batch 4300, loss[loss=0.2616, simple_loss=0.324, pruned_loss=0.09956, over 19738.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3422, pruned_loss=0.1105, over 3843629.11 frames. ], batch size: 51, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:41:41,978 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4080, 1.0033, 1.0175, 1.7114, 1.3956, 1.3577, 1.6671, 1.3469], + device='cuda:1'), covar=tensor([0.0828, 0.1241, 0.1271, 0.0790, 0.0953, 0.0992, 0.0914, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0251, 0.0245, 0.0277, 0.0271, 0.0232, 0.0233, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 05:41:51,069 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:41:51,761 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 7.144e+02 8.425e+02 1.091e+03 2.021e+03, threshold=1.685e+03, percent-clipped=5.0 +2023-04-01 05:42:13,882 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 05:42:18,613 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:21,719 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:22,334 INFO [train.py:903] (1/4) Epoch 5, batch 4350, loss[loss=0.25, simple_loss=0.306, pruned_loss=0.09701, over 19111.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.343, pruned_loss=0.1112, over 3849576.28 frames. ], batch size: 42, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:42:28,428 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:45,942 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:57,800 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:04,358 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3741, 1.2996, 1.4550, 1.5332, 2.8201, 0.9723, 1.9686, 3.1094], + device='cuda:1'), covar=tensor([0.0457, 0.2557, 0.2473, 0.1718, 0.0694, 0.2483, 0.1355, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0310, 0.0307, 0.0280, 0.0297, 0.0308, 0.0282, 0.0296], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:43:22,083 INFO [train.py:903] (1/4) Epoch 5, batch 4400, loss[loss=0.2578, simple_loss=0.3355, pruned_loss=0.0901, over 19526.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3434, pruned_loss=0.1113, over 3857111.31 frames. ], batch size: 56, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:43:34,003 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:44,919 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 05:43:53,319 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.183e+02 7.191e+02 8.879e+02 1.082e+03 1.961e+03, threshold=1.776e+03, percent-clipped=1.0 +2023-04-01 05:43:55,410 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 05:43:58,384 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.79 vs. limit=5.0 +2023-04-01 05:44:02,752 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31744.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:44:16,187 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0767, 5.0774, 5.9197, 5.7817, 1.5655, 5.4966, 4.6806, 5.3494], + device='cuda:1'), covar=tensor([0.0836, 0.0507, 0.0401, 0.0344, 0.4644, 0.0299, 0.0462, 0.0985], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0440, 0.0583, 0.0483, 0.0571, 0.0356, 0.0384, 0.0557], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 05:44:22,697 INFO [train.py:903] (1/4) Epoch 5, batch 4450, loss[loss=0.2432, simple_loss=0.3048, pruned_loss=0.0908, over 19776.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3429, pruned_loss=0.1108, over 3852436.21 frames. ], batch size: 48, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:44:30,906 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31769.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:44:32,868 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:16,682 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:20,581 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 05:45:23,177 INFO [train.py:903] (1/4) Epoch 5, batch 4500, loss[loss=0.3083, simple_loss=0.3683, pruned_loss=0.1241, over 19073.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.343, pruned_loss=0.1113, over 3840099.48 frames. ], batch size: 69, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:45:53,689 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.666e+02 8.776e+02 1.149e+03 2.550e+03, threshold=1.755e+03, percent-clipped=7.0 +2023-04-01 05:45:58,564 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:46:12,834 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:46:24,485 INFO [train.py:903] (1/4) Epoch 5, batch 4550, loss[loss=0.244, simple_loss=0.3074, pruned_loss=0.09034, over 19028.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3437, pruned_loss=0.1116, over 3833734.29 frames. ], batch size: 42, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:46:30,265 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 05:46:51,968 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 05:46:52,273 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:05,630 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31896.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:25,654 INFO [train.py:903] (1/4) Epoch 5, batch 4600, loss[loss=0.256, simple_loss=0.3139, pruned_loss=0.09907, over 19776.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3438, pruned_loss=0.1116, over 3833173.42 frames. ], batch size: 47, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:47:50,433 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:55,743 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 6.775e+02 7.918e+02 9.900e+02 3.222e+03, threshold=1.584e+03, percent-clipped=3.0 +2023-04-01 05:48:10,574 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8854, 1.4896, 1.4444, 2.1462, 1.8018, 2.1296, 2.3163, 1.9852], + device='cuda:1'), covar=tensor([0.0695, 0.1044, 0.1096, 0.0900, 0.0881, 0.0721, 0.0848, 0.0612], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0255, 0.0246, 0.0280, 0.0269, 0.0233, 0.0234, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 05:48:25,663 INFO [train.py:903] (1/4) Epoch 5, batch 4650, loss[loss=0.3255, simple_loss=0.3796, pruned_loss=0.1357, over 19684.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3434, pruned_loss=0.1109, over 3838725.67 frames. ], batch size: 53, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:48:32,723 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:48:32,788 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31968.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:48:41,062 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 05:48:52,751 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 05:49:25,340 INFO [train.py:903] (1/4) Epoch 5, batch 4700, loss[loss=0.2454, simple_loss=0.3095, pruned_loss=0.09061, over 19749.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3428, pruned_loss=0.1109, over 3835093.51 frames. ], batch size: 47, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:49:48,439 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 05:49:56,507 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.386e+02 6.540e+02 8.637e+02 1.061e+03 2.519e+03, threshold=1.727e+03, percent-clipped=5.0 +2023-04-01 05:50:27,324 INFO [train.py:903] (1/4) Epoch 5, batch 4750, loss[loss=0.2918, simple_loss=0.362, pruned_loss=0.1108, over 19467.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3435, pruned_loss=0.1112, over 3838462.10 frames. ], batch size: 64, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:50:32,824 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:50:34,215 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1982, 2.1510, 1.6271, 1.5718, 1.9729, 1.1762, 1.0958, 1.7288], + device='cuda:1'), covar=tensor([0.0687, 0.0414, 0.0796, 0.0452, 0.0288, 0.0926, 0.0549, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0264, 0.0312, 0.0234, 0.0215, 0.0309, 0.0281, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:51:28,012 INFO [train.py:903] (1/4) Epoch 5, batch 4800, loss[loss=0.2775, simple_loss=0.346, pruned_loss=0.1045, over 19742.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3439, pruned_loss=0.112, over 3832965.57 frames. ], batch size: 63, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:51:57,641 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+02 7.051e+02 8.763e+02 1.042e+03 3.094e+03, threshold=1.753e+03, percent-clipped=4.0 +2023-04-01 05:52:01,634 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3438, 1.3381, 1.8547, 1.5216, 3.1303, 4.4990, 4.5009, 4.9330], + device='cuda:1'), covar=tensor([0.1470, 0.2897, 0.2828, 0.1773, 0.0408, 0.0130, 0.0148, 0.0076], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0280, 0.0317, 0.0251, 0.0197, 0.0117, 0.0204, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 05:52:04,785 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32142.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:14,225 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:27,937 INFO [train.py:903] (1/4) Epoch 5, batch 4850, loss[loss=0.2964, simple_loss=0.3553, pruned_loss=0.1187, over 19533.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3439, pruned_loss=0.112, over 3832472.15 frames. ], batch size: 64, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:52:34,137 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:51,451 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:54,112 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 05:52:56,539 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:53:10,262 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 05:53:12,907 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 05:53:17,713 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 05:53:18,683 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 05:53:26,876 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 05:53:27,947 INFO [train.py:903] (1/4) Epoch 5, batch 4900, loss[loss=0.288, simple_loss=0.3552, pruned_loss=0.1104, over 19491.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3433, pruned_loss=0.112, over 3820348.08 frames. ], batch size: 64, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:53:44,404 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32224.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:53:48,952 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 05:53:59,194 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.307e+02 6.585e+02 7.912e+02 1.039e+03 2.328e+03, threshold=1.582e+03, percent-clipped=1.0 +2023-04-01 05:54:02,587 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:13,257 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32249.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:54:29,205 INFO [train.py:903] (1/4) Epoch 5, batch 4950, loss[loss=0.2421, simple_loss=0.3148, pruned_loss=0.08473, over 19659.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3442, pruned_loss=0.1125, over 3817737.76 frames. ], batch size: 55, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:54:33,930 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:48,150 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32277.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:49,216 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 05:55:12,815 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 05:55:15,562 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:30,485 INFO [train.py:903] (1/4) Epoch 5, batch 5000, loss[loss=0.2775, simple_loss=0.3283, pruned_loss=0.1134, over 19357.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3438, pruned_loss=0.1127, over 3830016.41 frames. ], batch size: 47, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:55:30,635 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:40,412 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 05:55:50,536 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 05:55:58,320 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+02 7.030e+02 8.789e+02 1.165e+03 2.289e+03, threshold=1.758e+03, percent-clipped=7.0 +2023-04-01 05:56:21,716 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:56:29,552 INFO [train.py:903] (1/4) Epoch 5, batch 5050, loss[loss=0.2393, simple_loss=0.3042, pruned_loss=0.08717, over 19322.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3443, pruned_loss=0.1129, over 3806394.18 frames. ], batch size: 44, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:57:05,108 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 05:57:07,774 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:57:29,764 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 05:57:30,257 INFO [train.py:903] (1/4) Epoch 5, batch 5100, loss[loss=0.2332, simple_loss=0.3008, pruned_loss=0.08276, over 19393.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3437, pruned_loss=0.1121, over 3815460.10 frames. ], batch size: 47, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:57:33,095 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6248, 1.8233, 2.4275, 2.5945, 2.4161, 2.2621, 2.5205, 2.7726], + device='cuda:1'), covar=tensor([0.0780, 0.1785, 0.1137, 0.0897, 0.1175, 0.0463, 0.0864, 0.0482], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0359, 0.0282, 0.0240, 0.0304, 0.0242, 0.0268, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:57:40,586 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 05:57:44,911 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 05:57:51,242 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 05:57:51,527 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:02,216 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 6.879e+02 8.267e+02 1.044e+03 2.791e+03, threshold=1.653e+03, percent-clipped=3.0 +2023-04-01 05:58:02,672 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:32,000 INFO [train.py:903] (1/4) Epoch 5, batch 5150, loss[loss=0.2678, simple_loss=0.3407, pruned_loss=0.09738, over 19666.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3412, pruned_loss=0.1098, over 3821797.67 frames. ], batch size: 58, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:58:32,465 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32462.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:45,282 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 05:59:19,204 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 05:59:33,454 INFO [train.py:903] (1/4) Epoch 5, batch 5200, loss[loss=0.2849, simple_loss=0.3567, pruned_loss=0.1065, over 19641.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3416, pruned_loss=0.1102, over 3822159.71 frames. ], batch size: 60, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:59:42,703 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5557, 1.7314, 2.1502, 2.6196, 2.2544, 2.2518, 2.3923, 2.6332], + device='cuda:1'), covar=tensor([0.0731, 0.1906, 0.1238, 0.0839, 0.1221, 0.0423, 0.0823, 0.0496], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0365, 0.0283, 0.0238, 0.0305, 0.0239, 0.0268, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 05:59:43,923 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:59:45,961 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 06:00:02,761 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 6.175e+02 7.903e+02 1.065e+03 1.799e+03, threshold=1.581e+03, percent-clipped=1.0 +2023-04-01 06:00:15,191 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:26,231 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1446, 5.1053, 5.9989, 5.9112, 1.8870, 5.5756, 4.7388, 5.4550], + device='cuda:1'), covar=tensor([0.0833, 0.0478, 0.0433, 0.0362, 0.4194, 0.0268, 0.0414, 0.0967], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0447, 0.0589, 0.0492, 0.0571, 0.0359, 0.0388, 0.0559], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 06:00:26,391 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:29,357 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 06:00:32,701 INFO [train.py:903] (1/4) Epoch 5, batch 5250, loss[loss=0.3319, simple_loss=0.3871, pruned_loss=0.1384, over 17222.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3409, pruned_loss=0.1096, over 3823781.40 frames. ], batch size: 101, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:00:37,577 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6924, 4.1493, 4.3370, 4.3307, 1.4468, 3.9835, 3.4893, 3.9635], + device='cuda:1'), covar=tensor([0.0841, 0.0495, 0.0466, 0.0404, 0.3979, 0.0344, 0.0506, 0.0952], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0448, 0.0590, 0.0493, 0.0573, 0.0360, 0.0389, 0.0559], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 06:00:55,260 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,157 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,913 INFO [train.py:903] (1/4) Epoch 5, batch 5300, loss[loss=0.2812, simple_loss=0.3472, pruned_loss=0.1076, over 19660.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3412, pruned_loss=0.1098, over 3824383.87 frames. ], batch size: 55, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:01:51,389 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 06:02:04,557 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:05,307 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 6.583e+02 8.041e+02 1.027e+03 2.106e+03, threshold=1.608e+03, percent-clipped=4.0 +2023-04-01 06:02:17,829 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32648.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:34,322 INFO [train.py:903] (1/4) Epoch 5, batch 5350, loss[loss=0.261, simple_loss=0.3312, pruned_loss=0.09542, over 19624.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.342, pruned_loss=0.1103, over 3836865.39 frames. ], batch size: 57, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:02:48,920 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:00,295 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:06,670 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 06:03:27,094 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 06:03:31,105 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:35,120 INFO [train.py:903] (1/4) Epoch 5, batch 5400, loss[loss=0.2517, simple_loss=0.3265, pruned_loss=0.08848, over 19685.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3426, pruned_loss=0.1109, over 3832724.13 frames. ], batch size: 53, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:04:03,208 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.192e+02 6.673e+02 8.431e+02 1.064e+03 2.658e+03, threshold=1.686e+03, percent-clipped=8.0 +2023-04-01 06:04:04,938 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 06:04:11,107 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3273, 2.4259, 1.7199, 1.3684, 2.2869, 1.1370, 1.1225, 1.7778], + device='cuda:1'), covar=tensor([0.0745, 0.0393, 0.0700, 0.0556, 0.0299, 0.0885, 0.0598, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0264, 0.0316, 0.0238, 0.0216, 0.0304, 0.0282, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 06:04:33,649 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7107, 4.0518, 4.3144, 4.3354, 1.5206, 3.9866, 3.5488, 3.8920], + device='cuda:1'), covar=tensor([0.0941, 0.0666, 0.0499, 0.0398, 0.4094, 0.0401, 0.0491, 0.1040], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0456, 0.0603, 0.0500, 0.0578, 0.0366, 0.0392, 0.0568], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 06:04:34,472 INFO [train.py:903] (1/4) Epoch 5, batch 5450, loss[loss=0.2608, simple_loss=0.3297, pruned_loss=0.09593, over 19668.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3411, pruned_loss=0.1102, over 3840596.33 frames. ], batch size: 55, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:05:33,890 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0155, 1.4920, 1.7786, 2.2851, 2.0324, 1.8831, 2.3250, 1.9418], + device='cuda:1'), covar=tensor([0.0879, 0.1433, 0.1135, 0.1054, 0.0999, 0.1151, 0.0926, 0.0854], + device='cuda:1'), in_proj_covar=tensor([0.0230, 0.0246, 0.0241, 0.0269, 0.0263, 0.0232, 0.0228, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 06:05:34,678 INFO [train.py:903] (1/4) Epoch 5, batch 5500, loss[loss=0.3308, simple_loss=0.3774, pruned_loss=0.1421, over 13342.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3415, pruned_loss=0.1103, over 3834985.15 frames. ], batch size: 136, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:05:56,690 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 06:06:05,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+02 6.283e+02 7.782e+02 1.000e+03 2.107e+03, threshold=1.556e+03, percent-clipped=4.0 +2023-04-01 06:06:34,471 INFO [train.py:903] (1/4) Epoch 5, batch 5550, loss[loss=0.2757, simple_loss=0.3451, pruned_loss=0.1032, over 19687.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.342, pruned_loss=0.1106, over 3835201.93 frames. ], batch size: 60, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:06:34,912 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1600, 1.9766, 1.4783, 1.4929, 1.3146, 1.6048, 0.2694, 0.9377], + device='cuda:1'), covar=tensor([0.0282, 0.0267, 0.0230, 0.0320, 0.0595, 0.0340, 0.0548, 0.0483], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0290, 0.0285, 0.0306, 0.0381, 0.0299, 0.0280, 0.0299], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:06:40,854 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 06:07:29,977 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 06:07:36,764 INFO [train.py:903] (1/4) Epoch 5, batch 5600, loss[loss=0.2737, simple_loss=0.3209, pruned_loss=0.1133, over 19356.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3427, pruned_loss=0.1111, over 3820559.53 frames. ], batch size: 47, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:08:06,605 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 7.188e+02 9.159e+02 1.163e+03 2.158e+03, threshold=1.832e+03, percent-clipped=9.0 +2023-04-01 06:08:23,137 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1330, 1.2633, 1.8868, 1.3427, 2.7869, 2.1774, 2.8805, 1.1163], + device='cuda:1'), covar=tensor([0.1741, 0.2884, 0.1509, 0.1424, 0.1071, 0.1388, 0.1164, 0.2671], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0500, 0.0479, 0.0403, 0.0550, 0.0444, 0.0622, 0.0439], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 06:08:38,295 INFO [train.py:903] (1/4) Epoch 5, batch 5650, loss[loss=0.2507, simple_loss=0.3268, pruned_loss=0.08731, over 19723.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3418, pruned_loss=0.1101, over 3815837.81 frames. ], batch size: 63, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:08:54,132 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-01 06:09:04,596 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:09:24,870 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 06:09:38,279 INFO [train.py:903] (1/4) Epoch 5, batch 5700, loss[loss=0.3061, simple_loss=0.3659, pruned_loss=0.1232, over 18272.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.342, pruned_loss=0.1106, over 3803363.30 frames. ], batch size: 84, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:09:42,492 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 06:10:09,258 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.491e+02 7.117e+02 8.783e+02 1.086e+03 2.576e+03, threshold=1.757e+03, percent-clipped=4.0 +2023-04-01 06:10:38,546 INFO [train.py:903] (1/4) Epoch 5, batch 5750, loss[loss=0.2943, simple_loss=0.3565, pruned_loss=0.1161, over 19729.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3431, pruned_loss=0.111, over 3797009.19 frames. ], batch size: 63, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:10:39,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 06:10:47,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 06:10:52,568 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 06:11:01,652 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1731, 1.2326, 1.6182, 1.3881, 2.1550, 2.0610, 2.3820, 0.8097], + device='cuda:1'), covar=tensor([0.1889, 0.3128, 0.1662, 0.1484, 0.1177, 0.1447, 0.1153, 0.2824], + device='cuda:1'), in_proj_covar=tensor([0.0442, 0.0496, 0.0476, 0.0402, 0.0549, 0.0437, 0.0617, 0.0437], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 06:11:09,077 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33087.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:11:40,192 INFO [train.py:903] (1/4) Epoch 5, batch 5800, loss[loss=0.3001, simple_loss=0.3534, pruned_loss=0.1234, over 19501.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3413, pruned_loss=0.1099, over 3800127.53 frames. ], batch size: 49, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:08,911 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 7.070e+02 8.520e+02 1.129e+03 2.712e+03, threshold=1.704e+03, percent-clipped=8.0 +2023-04-01 06:12:26,073 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6918, 1.2974, 1.3939, 2.0519, 1.7068, 1.9980, 2.2510, 1.9296], + device='cuda:1'), covar=tensor([0.0772, 0.1056, 0.0969, 0.0819, 0.0874, 0.0689, 0.0748, 0.0597], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0249, 0.0245, 0.0274, 0.0267, 0.0232, 0.0232, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 06:12:40,679 INFO [train.py:903] (1/4) Epoch 5, batch 5850, loss[loss=0.2456, simple_loss=0.3156, pruned_loss=0.08784, over 19582.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3418, pruned_loss=0.1104, over 3804872.87 frames. ], batch size: 52, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:53,495 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:13:40,939 INFO [train.py:903] (1/4) Epoch 5, batch 5900, loss[loss=0.2513, simple_loss=0.3287, pruned_loss=0.08697, over 19520.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3409, pruned_loss=0.1103, over 3814802.52 frames. ], batch size: 56, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:13:43,338 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 06:14:04,498 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 06:14:11,989 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.345e+02 7.066e+02 8.762e+02 1.130e+03 2.300e+03, threshold=1.752e+03, percent-clipped=6.0 +2023-04-01 06:14:41,682 INFO [train.py:903] (1/4) Epoch 5, batch 5950, loss[loss=0.2733, simple_loss=0.3393, pruned_loss=0.1037, over 19654.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3412, pruned_loss=0.1102, over 3809379.95 frames. ], batch size: 58, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:23,367 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1935, 2.1798, 2.0985, 3.3103, 2.3426, 3.6783, 3.0511, 1.9628], + device='cuda:1'), covar=tensor([0.2468, 0.1793, 0.0878, 0.1163, 0.2073, 0.0534, 0.1762, 0.1619], + device='cuda:1'), in_proj_covar=tensor([0.0646, 0.0634, 0.0560, 0.0797, 0.0677, 0.0558, 0.0697, 0.0594], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 06:15:43,947 INFO [train.py:903] (1/4) Epoch 5, batch 6000, loss[loss=0.2654, simple_loss=0.3238, pruned_loss=0.1035, over 19481.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3409, pruned_loss=0.1098, over 3818262.06 frames. ], batch size: 49, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:43,947 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 06:15:56,873 INFO [train.py:937] (1/4) Epoch 5, validation: loss=0.203, simple_loss=0.3017, pruned_loss=0.05213, over 944034.00 frames. +2023-04-01 06:15:56,874 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 06:16:18,147 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:16:28,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.050e+02 6.893e+02 8.503e+02 1.056e+03 1.945e+03, threshold=1.701e+03, percent-clipped=4.0 +2023-04-01 06:16:59,412 INFO [train.py:903] (1/4) Epoch 5, batch 6050, loss[loss=0.2811, simple_loss=0.3328, pruned_loss=0.1147, over 19629.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3403, pruned_loss=0.1096, over 3819465.06 frames. ], batch size: 50, lr: 1.56e-02, grad_scale: 16.0 +2023-04-01 06:17:34,354 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8751, 1.3839, 1.4150, 2.0809, 1.6409, 1.9698, 2.0484, 1.8068], + device='cuda:1'), covar=tensor([0.0714, 0.1110, 0.1113, 0.0892, 0.0978, 0.0778, 0.1015, 0.0688], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0243, 0.0240, 0.0269, 0.0258, 0.0227, 0.0225, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 06:18:00,559 INFO [train.py:903] (1/4) Epoch 5, batch 6100, loss[loss=0.2965, simple_loss=0.3655, pruned_loss=0.1137, over 19569.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3406, pruned_loss=0.1092, over 3830616.24 frames. ], batch size: 61, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:18:23,031 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33431.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:18:32,606 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.262e+02 7.464e+02 9.853e+02 2.581e+03, threshold=1.493e+03, percent-clipped=2.0 +2023-04-01 06:18:39,223 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:19:00,961 INFO [train.py:903] (1/4) Epoch 5, batch 6150, loss[loss=0.2994, simple_loss=0.3552, pruned_loss=0.1218, over 19382.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3417, pruned_loss=0.1098, over 3830604.80 frames. ], batch size: 70, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:19:29,657 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 06:19:35,772 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:01,183 INFO [train.py:903] (1/4) Epoch 5, batch 6200, loss[loss=0.2924, simple_loss=0.3568, pruned_loss=0.114, over 19605.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3417, pruned_loss=0.1103, over 3832349.79 frames. ], batch size: 61, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:20:08,945 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:09,240 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3584, 2.9468, 2.0076, 2.2488, 1.9854, 2.2463, 0.8526, 2.1745], + device='cuda:1'), covar=tensor([0.0234, 0.0253, 0.0313, 0.0373, 0.0546, 0.0452, 0.0574, 0.0450], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0278, 0.0281, 0.0297, 0.0372, 0.0290, 0.0271, 0.0290], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:20:34,185 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.213e+02 6.728e+02 8.677e+02 1.165e+03 2.777e+03, threshold=1.735e+03, percent-clipped=13.0 +2023-04-01 06:20:43,559 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:21:03,395 INFO [train.py:903] (1/4) Epoch 5, batch 6250, loss[loss=0.2319, simple_loss=0.2971, pruned_loss=0.08337, over 19731.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3418, pruned_loss=0.1101, over 3833940.58 frames. ], batch size: 46, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:21:31,242 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 06:22:03,933 INFO [train.py:903] (1/4) Epoch 5, batch 6300, loss[loss=0.2256, simple_loss=0.2903, pruned_loss=0.08047, over 19380.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3404, pruned_loss=0.1097, over 3831685.89 frames. ], batch size: 47, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:22:28,442 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:22:35,563 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 6.435e+02 8.030e+02 9.840e+02 2.632e+03, threshold=1.606e+03, percent-clipped=3.0 +2023-04-01 06:23:04,055 INFO [train.py:903] (1/4) Epoch 5, batch 6350, loss[loss=0.1983, simple_loss=0.2723, pruned_loss=0.0622, over 19764.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3398, pruned_loss=0.1091, over 3814568.43 frames. ], batch size: 47, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:23:45,924 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:23:50,552 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:05,037 INFO [train.py:903] (1/4) Epoch 5, batch 6400, loss[loss=0.2612, simple_loss=0.3163, pruned_loss=0.1031, over 19794.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3398, pruned_loss=0.1088, over 3813682.14 frames. ], batch size: 48, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:24:20,305 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:37,365 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.704e+02 6.874e+02 8.420e+02 1.031e+03 3.616e+03, threshold=1.684e+03, percent-clipped=3.0 +2023-04-01 06:25:05,852 INFO [train.py:903] (1/4) Epoch 5, batch 6450, loss[loss=0.3145, simple_loss=0.3633, pruned_loss=0.1328, over 19580.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3407, pruned_loss=0.1095, over 3819192.51 frames. ], batch size: 52, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:25:18,529 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3577, 1.3298, 1.5449, 1.3932, 3.0092, 4.4501, 4.4186, 4.8449], + device='cuda:1'), covar=tensor([0.1465, 0.2878, 0.3072, 0.1849, 0.0413, 0.0122, 0.0136, 0.0065], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0282, 0.0316, 0.0250, 0.0198, 0.0116, 0.0203, 0.0143], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:25:49,027 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 06:25:54,052 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:06,349 INFO [train.py:903] (1/4) Epoch 5, batch 6500, loss[loss=0.2713, simple_loss=0.3329, pruned_loss=0.1049, over 17407.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3415, pruned_loss=0.11, over 3823825.52 frames. ], batch size: 101, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:26:12,165 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 06:26:24,735 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:32,483 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33834.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:36,708 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.680e+02 8.171e+02 1.132e+03 2.519e+03, threshold=1.634e+03, percent-clipped=6.0 +2023-04-01 06:27:07,199 INFO [train.py:903] (1/4) Epoch 5, batch 6550, loss[loss=0.336, simple_loss=0.3826, pruned_loss=0.1447, over 13213.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3419, pruned_loss=0.1101, over 3823479.85 frames. ], batch size: 135, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:27:38,528 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:27:59,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5115, 2.4858, 1.6492, 1.6417, 2.3970, 1.1842, 1.1328, 1.7551], + device='cuda:1'), covar=tensor([0.0737, 0.0440, 0.0954, 0.0493, 0.0347, 0.0991, 0.0635, 0.0411], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0269, 0.0312, 0.0237, 0.0222, 0.0303, 0.0285, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 06:28:06,986 INFO [train.py:903] (1/4) Epoch 5, batch 6600, loss[loss=0.2244, simple_loss=0.2879, pruned_loss=0.08046, over 19749.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.341, pruned_loss=0.1098, over 3826036.74 frames. ], batch size: 46, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:28:08,496 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:28:40,230 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.407e+02 7.502e+02 9.137e+02 1.060e+03 2.817e+03, threshold=1.827e+03, percent-clipped=6.0 +2023-04-01 06:28:52,863 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33949.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:01,842 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:09,037 INFO [train.py:903] (1/4) Epoch 5, batch 6650, loss[loss=0.2604, simple_loss=0.3228, pruned_loss=0.09906, over 19730.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3404, pruned_loss=0.1096, over 3812011.43 frames. ], batch size: 51, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:30:10,060 INFO [train.py:903] (1/4) Epoch 5, batch 6700, loss[loss=0.2957, simple_loss=0.3592, pruned_loss=0.1162, over 19676.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3417, pruned_loss=0.1106, over 3811278.83 frames. ], batch size: 60, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:30:40,325 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 7.257e+02 9.131e+02 1.100e+03 2.314e+03, threshold=1.826e+03, percent-clipped=7.0 +2023-04-01 06:30:40,465 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:31:06,264 INFO [train.py:903] (1/4) Epoch 5, batch 6750, loss[loss=0.2089, simple_loss=0.2847, pruned_loss=0.06655, over 19476.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3423, pruned_loss=0.1112, over 3810106.99 frames. ], batch size: 49, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:31:06,529 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:31:22,403 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3538, 1.1924, 1.2689, 1.7100, 1.4085, 1.5611, 1.6449, 1.4313], + device='cuda:1'), covar=tensor([0.0861, 0.1071, 0.1146, 0.0895, 0.0938, 0.0821, 0.0908, 0.0751], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0251, 0.0249, 0.0278, 0.0268, 0.0233, 0.0231, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 06:32:02,682 INFO [train.py:903] (1/4) Epoch 5, batch 6800, loss[loss=0.3041, simple_loss=0.3447, pruned_loss=0.1318, over 19777.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3426, pruned_loss=0.1111, over 3803958.52 frames. ], batch size: 49, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:32:30,647 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.271e+02 6.317e+02 7.808e+02 9.230e+02 1.582e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-04-01 06:32:47,618 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 06:32:48,644 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 06:32:50,744 INFO [train.py:903] (1/4) Epoch 6, batch 0, loss[loss=0.3173, simple_loss=0.3712, pruned_loss=0.1318, over 19592.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3712, pruned_loss=0.1318, over 19592.00 frames. ], batch size: 61, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:32:50,744 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 06:33:02,090 INFO [train.py:937] (1/4) Epoch 6, validation: loss=0.2022, simple_loss=0.3015, pruned_loss=0.05149, over 944034.00 frames. +2023-04-01 06:33:02,091 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 06:33:15,311 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 06:33:20,309 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:33:30,289 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:03,526 INFO [train.py:903] (1/4) Epoch 6, batch 50, loss[loss=0.2657, simple_loss=0.339, pruned_loss=0.09617, over 19486.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3458, pruned_loss=0.1111, over 857011.86 frames. ], batch size: 64, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:34:22,167 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34205.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:40,722 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 06:34:53,304 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34230.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:35:05,223 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.721e+02 5.756e+02 7.149e+02 1.025e+03 3.166e+03, threshold=1.430e+03, percent-clipped=7.0 +2023-04-01 06:35:06,296 INFO [train.py:903] (1/4) Epoch 6, batch 100, loss[loss=0.2567, simple_loss=0.3206, pruned_loss=0.09646, over 19599.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3394, pruned_loss=0.1067, over 1521808.10 frames. ], batch size: 52, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:35:18,598 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 06:35:23,498 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4034, 1.1442, 1.2016, 1.2036, 2.0755, 0.9424, 1.8547, 2.1914], + device='cuda:1'), covar=tensor([0.0558, 0.2327, 0.2303, 0.1394, 0.0704, 0.1889, 0.0860, 0.0516], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0311, 0.0313, 0.0286, 0.0306, 0.0316, 0.0283, 0.0301], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 06:35:26,672 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.2726, 3.9044, 2.4370, 3.5711, 0.9742, 3.5608, 3.5883, 3.7796], + device='cuda:1'), covar=tensor([0.0660, 0.0962, 0.1916, 0.0657, 0.3966, 0.0729, 0.0748, 0.0757], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0313, 0.0371, 0.0287, 0.0353, 0.0304, 0.0287, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:36:06,448 INFO [train.py:903] (1/4) Epoch 6, batch 150, loss[loss=0.271, simple_loss=0.3362, pruned_loss=0.1029, over 19697.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3398, pruned_loss=0.1082, over 2017729.07 frames. ], batch size: 59, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:36:19,471 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:37:08,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.164e+02 6.478e+02 8.283e+02 9.993e+02 1.951e+03, threshold=1.657e+03, percent-clipped=7.0 +2023-04-01 06:37:08,922 INFO [train.py:903] (1/4) Epoch 6, batch 200, loss[loss=0.2563, simple_loss=0.3285, pruned_loss=0.09205, over 19667.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.3362, pruned_loss=0.1055, over 2426769.22 frames. ], batch size: 55, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:37:08,940 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 06:37:44,178 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3128, 1.4676, 2.2352, 1.6608, 3.2310, 2.7291, 3.4456, 1.6357], + device='cuda:1'), covar=tensor([0.1907, 0.3082, 0.1716, 0.1464, 0.1216, 0.1436, 0.1472, 0.2831], + device='cuda:1'), in_proj_covar=tensor([0.0450, 0.0503, 0.0485, 0.0411, 0.0553, 0.0448, 0.0627, 0.0444], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 06:38:12,099 INFO [train.py:903] (1/4) Epoch 6, batch 250, loss[loss=0.2876, simple_loss=0.3541, pruned_loss=0.1106, over 19681.00 frames. ], tot_loss[loss=0.2734, simple_loss=0.3361, pruned_loss=0.1054, over 2742035.77 frames. ], batch size: 59, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:38:33,065 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:38,006 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:38,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.74 vs. limit=5.0 +2023-04-01 06:38:44,824 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9177, 3.5551, 2.3161, 3.2883, 1.0889, 3.1754, 3.2439, 3.3749], + device='cuda:1'), covar=tensor([0.0734, 0.1304, 0.2034, 0.0842, 0.3835, 0.1092, 0.0884, 0.0972], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0310, 0.0369, 0.0286, 0.0355, 0.0308, 0.0286, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:38:44,990 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:08,854 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:14,123 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 6.949e+02 8.663e+02 1.115e+03 2.860e+03, threshold=1.733e+03, percent-clipped=3.0 +2023-04-01 06:39:14,141 INFO [train.py:903] (1/4) Epoch 6, batch 300, loss[loss=0.3138, simple_loss=0.3545, pruned_loss=0.1366, over 13087.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3369, pruned_loss=0.1059, over 2969694.92 frames. ], batch size: 135, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:39:15,668 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3179, 1.2904, 1.3960, 1.5575, 2.8440, 1.0337, 1.9761, 2.9823], + device='cuda:1'), covar=tensor([0.0389, 0.2489, 0.2478, 0.1526, 0.0644, 0.2407, 0.1202, 0.0443], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0316, 0.0320, 0.0288, 0.0311, 0.0318, 0.0289, 0.0307], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 06:40:17,202 INFO [train.py:903] (1/4) Epoch 6, batch 350, loss[loss=0.2258, simple_loss=0.2851, pruned_loss=0.08325, over 19716.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3365, pruned_loss=0.1059, over 3159717.33 frames. ], batch size: 46, lr: 1.43e-02, grad_scale: 4.0 +2023-04-01 06:40:22,949 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 06:40:37,841 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:40:55,506 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:41:18,578 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+02 6.755e+02 8.258e+02 9.979e+02 1.871e+03, threshold=1.652e+03, percent-clipped=1.0 +2023-04-01 06:41:18,597 INFO [train.py:903] (1/4) Epoch 6, batch 400, loss[loss=0.2378, simple_loss=0.3116, pruned_loss=0.08201, over 19481.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3387, pruned_loss=0.1075, over 3298169.31 frames. ], batch size: 49, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:42:20,647 INFO [train.py:903] (1/4) Epoch 6, batch 450, loss[loss=0.2671, simple_loss=0.3299, pruned_loss=0.1021, over 19766.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3384, pruned_loss=0.1075, over 3428216.93 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:42:48,995 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:42:54,482 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 06:42:55,448 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 06:43:01,475 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:23,824 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.432e+02 6.970e+02 8.269e+02 1.071e+03 2.551e+03, threshold=1.654e+03, percent-clipped=6.0 +2023-04-01 06:43:23,841 INFO [train.py:903] (1/4) Epoch 6, batch 500, loss[loss=0.2986, simple_loss=0.3649, pruned_loss=0.1161, over 19611.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3379, pruned_loss=0.1068, over 3528132.34 frames. ], batch size: 57, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:43:24,075 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34640.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:29,887 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:03,467 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:27,115 INFO [train.py:903] (1/4) Epoch 6, batch 550, loss[loss=0.2956, simple_loss=0.356, pruned_loss=0.1176, over 19675.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3369, pruned_loss=0.1068, over 3592438.65 frames. ], batch size: 55, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:44:37,112 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:45:17,117 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34728.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:45:31,835 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.191e+02 8.093e+02 9.820e+02 1.880e+03, threshold=1.619e+03, percent-clipped=2.0 +2023-04-01 06:45:31,854 INFO [train.py:903] (1/4) Epoch 6, batch 600, loss[loss=0.224, simple_loss=0.2967, pruned_loss=0.07562, over 19484.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.3372, pruned_loss=0.1065, over 3648813.71 frames. ], batch size: 49, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:13,298 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 06:46:15,092 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 06:46:20,223 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34777.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:46:35,482 INFO [train.py:903] (1/4) Epoch 6, batch 650, loss[loss=0.217, simple_loss=0.2868, pruned_loss=0.07361, over 19062.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3394, pruned_loss=0.1078, over 3678724.06 frames. ], batch size: 42, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:51,841 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:47:38,641 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 6.280e+02 8.587e+02 1.153e+03 3.497e+03, threshold=1.717e+03, percent-clipped=9.0 +2023-04-01 06:47:38,660 INFO [train.py:903] (1/4) Epoch 6, batch 700, loss[loss=0.2277, simple_loss=0.2968, pruned_loss=0.07935, over 19482.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3393, pruned_loss=0.1082, over 3716257.93 frames. ], batch size: 49, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:48:27,752 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:48:43,528 INFO [train.py:903] (1/4) Epoch 6, batch 750, loss[loss=0.2561, simple_loss=0.3291, pruned_loss=0.09148, over 19538.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3389, pruned_loss=0.1077, over 3731303.03 frames. ], batch size: 56, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:49:00,050 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:49:06,790 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2884, 1.1483, 1.5758, 1.0657, 2.5250, 3.4107, 3.1588, 3.4778], + device='cuda:1'), covar=tensor([0.1483, 0.3082, 0.2770, 0.1985, 0.0453, 0.0188, 0.0205, 0.0137], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0281, 0.0311, 0.0246, 0.0198, 0.0118, 0.0201, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:49:45,087 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.784e+02 6.254e+02 7.861e+02 1.094e+03 2.828e+03, threshold=1.572e+03, percent-clipped=5.0 +2023-04-01 06:49:45,105 INFO [train.py:903] (1/4) Epoch 6, batch 800, loss[loss=0.2793, simple_loss=0.3422, pruned_loss=0.1083, over 19782.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.3382, pruned_loss=0.1072, over 3768305.00 frames. ], batch size: 56, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:50:02,472 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 06:50:03,746 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:21,725 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 06:50:41,616 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:46,219 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:48,467 INFO [train.py:903] (1/4) Epoch 6, batch 850, loss[loss=0.2566, simple_loss=0.3112, pruned_loss=0.101, over 18675.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.3381, pruned_loss=0.1073, over 3774191.04 frames. ], batch size: 41, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:51:32,728 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2096, 1.2238, 1.7223, 1.4775, 2.3252, 2.0696, 2.4994, 0.8664], + device='cuda:1'), covar=tensor([0.1860, 0.3110, 0.1654, 0.1425, 0.1169, 0.1513, 0.1285, 0.2842], + device='cuda:1'), in_proj_covar=tensor([0.0448, 0.0511, 0.0488, 0.0412, 0.0556, 0.0449, 0.0625, 0.0448], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 06:51:42,518 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 06:51:49,532 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 6.223e+02 7.935e+02 9.772e+02 2.166e+03, threshold=1.587e+03, percent-clipped=2.0 +2023-04-01 06:51:49,550 INFO [train.py:903] (1/4) Epoch 6, batch 900, loss[loss=0.3149, simple_loss=0.3797, pruned_loss=0.125, over 19574.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.338, pruned_loss=0.1074, over 3795659.78 frames. ], batch size: 61, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:51:51,218 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3843, 1.0326, 1.4262, 1.2079, 2.5123, 3.3454, 3.2799, 3.6852], + device='cuda:1'), covar=tensor([0.1442, 0.3835, 0.3621, 0.1939, 0.0504, 0.0194, 0.0243, 0.0127], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0280, 0.0311, 0.0248, 0.0198, 0.0119, 0.0201, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:52:28,231 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:52:30,297 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35072.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:52:51,733 INFO [train.py:903] (1/4) Epoch 6, batch 950, loss[loss=0.2703, simple_loss=0.3276, pruned_loss=0.1065, over 19736.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3376, pruned_loss=0.1069, over 3811881.41 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:52:57,534 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 06:53:04,328 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:09,190 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:18,081 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:55,217 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 7.194e+02 8.589e+02 1.083e+03 2.096e+03, threshold=1.718e+03, percent-clipped=5.0 +2023-04-01 06:53:55,236 INFO [train.py:903] (1/4) Epoch 6, batch 1000, loss[loss=0.2946, simple_loss=0.3428, pruned_loss=0.1232, over 19463.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3376, pruned_loss=0.1068, over 3818344.30 frames. ], batch size: 49, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:54:48,376 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 06:54:52,231 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35187.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:54:55,176 INFO [train.py:903] (1/4) Epoch 6, batch 1050, loss[loss=0.3119, simple_loss=0.3632, pruned_loss=0.1303, over 17334.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3397, pruned_loss=0.1083, over 3817162.63 frames. ], batch size: 101, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:55:30,964 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 06:55:49,752 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 06:55:57,166 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.102e+02 7.066e+02 8.619e+02 1.238e+03 3.302e+03, threshold=1.724e+03, percent-clipped=8.0 +2023-04-01 06:55:57,185 INFO [train.py:903] (1/4) Epoch 6, batch 1100, loss[loss=0.3319, simple_loss=0.375, pruned_loss=0.1443, over 13313.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3393, pruned_loss=0.1083, over 3813077.03 frames. ], batch size: 136, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:56:09,913 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9913, 1.9457, 1.6323, 1.3756, 1.4141, 1.6189, 0.2745, 0.7614], + device='cuda:1'), covar=tensor([0.0253, 0.0288, 0.0172, 0.0294, 0.0547, 0.0311, 0.0545, 0.0500], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0290, 0.0289, 0.0308, 0.0374, 0.0294, 0.0285, 0.0298], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 06:56:48,020 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.78 vs. limit=5.0 +2023-04-01 06:56:59,592 INFO [train.py:903] (1/4) Epoch 6, batch 1150, loss[loss=0.2932, simple_loss=0.3594, pruned_loss=0.1135, over 19681.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3397, pruned_loss=0.1083, over 3813845.91 frames. ], batch size: 59, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:57:45,412 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:04,197 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.124e+02 7.469e+02 9.050e+02 1.884e+03, threshold=1.494e+03, percent-clipped=1.0 +2023-04-01 06:58:04,215 INFO [train.py:903] (1/4) Epoch 6, batch 1200, loss[loss=0.288, simple_loss=0.3606, pruned_loss=0.1077, over 19786.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3401, pruned_loss=0.1084, over 3810100.20 frames. ], batch size: 56, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:58:17,322 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:23,058 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:27,680 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:32,172 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35363.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:58:36,592 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 06:58:55,127 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:56,267 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2503, 1.2730, 1.8219, 1.4437, 2.6599, 2.2850, 2.8140, 0.9393], + device='cuda:1'), covar=tensor([0.1798, 0.2997, 0.1545, 0.1496, 0.1032, 0.1376, 0.1071, 0.2812], + device='cuda:1'), in_proj_covar=tensor([0.0447, 0.0508, 0.0494, 0.0413, 0.0562, 0.0451, 0.0626, 0.0452], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 06:58:59,580 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:05,730 INFO [train.py:903] (1/4) Epoch 6, batch 1250, loss[loss=0.2941, simple_loss=0.3513, pruned_loss=0.1184, over 18701.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3393, pruned_loss=0.1078, over 3820550.83 frames. ], batch size: 74, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:59:09,373 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:42,219 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6449, 1.7083, 1.4574, 2.4580, 1.7129, 2.2770, 1.8879, 1.2993], + device='cuda:1'), covar=tensor([0.2812, 0.2283, 0.1585, 0.1253, 0.2331, 0.0970, 0.2975, 0.2873], + device='cuda:1'), in_proj_covar=tensor([0.0655, 0.0640, 0.0570, 0.0793, 0.0677, 0.0565, 0.0697, 0.0605], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 07:00:08,109 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+02 6.685e+02 8.523e+02 1.077e+03 2.432e+03, threshold=1.705e+03, percent-clipped=5.0 +2023-04-01 07:00:08,127 INFO [train.py:903] (1/4) Epoch 6, batch 1300, loss[loss=0.2844, simple_loss=0.347, pruned_loss=0.1109, over 19758.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3409, pruned_loss=0.1089, over 3802719.30 frames. ], batch size: 54, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 07:00:12,109 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35443.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:00:25,337 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0409, 3.7624, 2.1839, 2.4373, 3.3258, 1.8691, 1.1390, 1.9374], + device='cuda:1'), covar=tensor([0.1026, 0.0295, 0.0776, 0.0546, 0.0398, 0.0881, 0.0893, 0.0609], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0277, 0.0318, 0.0236, 0.0231, 0.0309, 0.0283, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:00:26,224 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35454.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:00:29,030 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-01 07:00:37,007 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0986, 1.0569, 1.0565, 1.3218, 1.0363, 1.3041, 1.2938, 1.2219], + device='cuda:1'), covar=tensor([0.0966, 0.1150, 0.1121, 0.0741, 0.0950, 0.0871, 0.0875, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0230, 0.0250, 0.0239, 0.0274, 0.0265, 0.0232, 0.0226, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 07:00:43,921 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35468.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:00:57,890 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2788, 5.5816, 2.7857, 4.9461, 1.2970, 5.5519, 5.4861, 5.6913], + device='cuda:1'), covar=tensor([0.0297, 0.0699, 0.1753, 0.0496, 0.3457, 0.0437, 0.0421, 0.0510], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0312, 0.0371, 0.0289, 0.0350, 0.0310, 0.0289, 0.0321], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 07:01:08,277 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 07:01:11,030 INFO [train.py:903] (1/4) Epoch 6, batch 1350, loss[loss=0.2705, simple_loss=0.3359, pruned_loss=0.1025, over 19402.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.341, pruned_loss=0.1087, over 3804283.14 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:01:40,808 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3943, 1.5643, 2.0567, 1.6734, 2.8417, 4.6134, 4.5197, 4.8847], + device='cuda:1'), covar=tensor([0.1408, 0.2776, 0.2732, 0.1696, 0.0487, 0.0113, 0.0163, 0.0075], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0285, 0.0312, 0.0249, 0.0201, 0.0122, 0.0202, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 07:02:13,027 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+02 6.783e+02 8.495e+02 1.071e+03 2.340e+03, threshold=1.699e+03, percent-clipped=3.0 +2023-04-01 07:02:13,045 INFO [train.py:903] (1/4) Epoch 6, batch 1400, loss[loss=0.3267, simple_loss=0.3683, pruned_loss=0.1426, over 19833.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3397, pruned_loss=0.1079, over 3821484.85 frames. ], batch size: 52, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:02:22,733 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:02:47,168 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:03:13,361 INFO [train.py:903] (1/4) Epoch 6, batch 1450, loss[loss=0.3472, simple_loss=0.3926, pruned_loss=0.1509, over 19267.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3403, pruned_loss=0.1088, over 3825741.68 frames. ], batch size: 66, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:03:13,400 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 07:03:52,451 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3375, 1.4382, 1.4895, 1.5180, 2.8101, 1.0636, 1.9248, 2.9132], + device='cuda:1'), covar=tensor([0.0394, 0.2375, 0.2399, 0.1526, 0.0714, 0.2302, 0.1182, 0.0447], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0313, 0.0316, 0.0286, 0.0313, 0.0312, 0.0286, 0.0305], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:04:15,922 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+02 6.609e+02 8.520e+02 1.101e+03 2.891e+03, threshold=1.704e+03, percent-clipped=3.0 +2023-04-01 07:04:15,941 INFO [train.py:903] (1/4) Epoch 6, batch 1500, loss[loss=0.2888, simple_loss=0.3387, pruned_loss=0.1195, over 19829.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.34, pruned_loss=0.1086, over 3812842.26 frames. ], batch size: 52, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:05:17,132 INFO [train.py:903] (1/4) Epoch 6, batch 1550, loss[loss=0.311, simple_loss=0.3522, pruned_loss=0.1349, over 19486.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3404, pruned_loss=0.1092, over 3812008.23 frames. ], batch size: 49, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:05:39,874 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35707.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:05:57,553 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6112, 1.3085, 1.3424, 2.0136, 1.5129, 2.0677, 2.0737, 1.9005], + device='cuda:1'), covar=tensor([0.0828, 0.1010, 0.1111, 0.0885, 0.1032, 0.0675, 0.0843, 0.0591], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0248, 0.0239, 0.0270, 0.0263, 0.0231, 0.0223, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 07:06:05,680 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8856, 1.3160, 1.0406, 1.0224, 1.2145, 0.8510, 0.8607, 1.2400], + device='cuda:1'), covar=tensor([0.0420, 0.0545, 0.0837, 0.0439, 0.0374, 0.0913, 0.0504, 0.0332], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0274, 0.0313, 0.0238, 0.0228, 0.0305, 0.0284, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:06:16,821 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:06:22,433 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 6.450e+02 9.026e+02 1.093e+03 2.835e+03, threshold=1.805e+03, percent-clipped=5.0 +2023-04-01 07:06:22,452 INFO [train.py:903] (1/4) Epoch 6, batch 1600, loss[loss=0.2228, simple_loss=0.2955, pruned_loss=0.07509, over 19845.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3388, pruned_loss=0.1078, over 3826990.97 frames. ], batch size: 52, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:06:44,232 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 07:06:53,853 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0573, 1.1352, 1.4246, 1.6005, 2.6714, 1.0166, 1.9283, 2.6779], + device='cuda:1'), covar=tensor([0.0403, 0.2475, 0.2374, 0.1312, 0.0599, 0.2043, 0.0969, 0.0471], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0310, 0.0313, 0.0286, 0.0308, 0.0309, 0.0283, 0.0302], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:07:23,499 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:07:24,376 INFO [train.py:903] (1/4) Epoch 6, batch 1650, loss[loss=0.3481, simple_loss=0.3959, pruned_loss=0.1502, over 18902.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3391, pruned_loss=0.1078, over 3830837.62 frames. ], batch size: 74, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:07:35,329 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0344, 2.0643, 1.9541, 3.1698, 1.9959, 3.3329, 2.8405, 1.8678], + device='cuda:1'), covar=tensor([0.2776, 0.2207, 0.1059, 0.1390, 0.2718, 0.0740, 0.2058, 0.1964], + device='cuda:1'), in_proj_covar=tensor([0.0659, 0.0649, 0.0575, 0.0807, 0.0686, 0.0568, 0.0701, 0.0612], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 07:07:50,596 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 07:07:51,784 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 07:08:05,691 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35822.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:08:09,298 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:13,893 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35829.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:27,427 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.226e+02 6.593e+02 7.720e+02 9.793e+02 2.227e+03, threshold=1.544e+03, percent-clipped=1.0 +2023-04-01 07:08:27,445 INFO [train.py:903] (1/4) Epoch 6, batch 1700, loss[loss=0.254, simple_loss=0.3209, pruned_loss=0.09354, over 19843.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3378, pruned_loss=0.1068, over 3836352.70 frames. ], batch size: 52, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:08:38,865 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:40,021 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:09:06,037 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 07:09:29,408 INFO [train.py:903] (1/4) Epoch 6, batch 1750, loss[loss=0.3245, simple_loss=0.3849, pruned_loss=0.1321, over 19699.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3374, pruned_loss=0.1062, over 3829208.31 frames. ], batch size: 58, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:09:31,965 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35892.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:10:33,848 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+02 6.695e+02 8.372e+02 1.116e+03 2.634e+03, threshold=1.674e+03, percent-clipped=7.0 +2023-04-01 07:10:33,866 INFO [train.py:903] (1/4) Epoch 6, batch 1800, loss[loss=0.3091, simple_loss=0.3798, pruned_loss=0.1192, over 19490.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3379, pruned_loss=0.1067, over 3820361.73 frames. ], batch size: 64, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:10:57,795 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1135, 1.2253, 1.5319, 0.9579, 2.5693, 2.8692, 2.6973, 3.0808], + device='cuda:1'), covar=tensor([0.1392, 0.2754, 0.2680, 0.1909, 0.0420, 0.0313, 0.0254, 0.0158], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0281, 0.0315, 0.0247, 0.0201, 0.0123, 0.0201, 0.0147], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 07:11:31,904 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 07:11:36,740 INFO [train.py:903] (1/4) Epoch 6, batch 1850, loss[loss=0.2918, simple_loss=0.3561, pruned_loss=0.1138, over 18889.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3387, pruned_loss=0.1076, over 3820382.31 frames. ], batch size: 74, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:11:59,155 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36007.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:12:11,437 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 07:12:40,884 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.024e+02 7.330e+02 9.053e+02 1.086e+03 1.723e+03, threshold=1.811e+03, percent-clipped=2.0 +2023-04-01 07:12:40,902 INFO [train.py:903] (1/4) Epoch 6, batch 1900, loss[loss=0.1961, simple_loss=0.2716, pruned_loss=0.06024, over 19774.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3377, pruned_loss=0.1067, over 3831862.81 frames. ], batch size: 46, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:12:57,328 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 07:13:04,077 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 07:13:27,622 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 07:13:29,155 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36078.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:13:36,009 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5477, 1.2545, 1.3823, 1.5741, 3.1017, 1.0630, 1.9524, 3.2052], + device='cuda:1'), covar=tensor([0.0351, 0.2454, 0.2547, 0.1560, 0.0596, 0.2367, 0.1301, 0.0383], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0317, 0.0321, 0.0295, 0.0311, 0.0320, 0.0291, 0.0310], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:13:42,467 INFO [train.py:903] (1/4) Epoch 6, batch 1950, loss[loss=0.2725, simple_loss=0.3404, pruned_loss=0.1023, over 19438.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3382, pruned_loss=0.1066, over 3833781.13 frames. ], batch size: 70, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:14:00,154 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36103.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:14:05,015 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:35,254 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:36,138 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36133.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:45,004 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.894e+02 6.352e+02 7.868e+02 9.806e+02 1.510e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-04-01 07:14:45,021 INFO [train.py:903] (1/4) Epoch 6, batch 2000, loss[loss=0.3132, simple_loss=0.3615, pruned_loss=0.1324, over 13214.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3385, pruned_loss=0.1066, over 3818733.81 frames. ], batch size: 136, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:24,945 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:15:42,582 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 07:15:46,069 INFO [train.py:903] (1/4) Epoch 6, batch 2050, loss[loss=0.2096, simple_loss=0.2772, pruned_loss=0.07093, over 18638.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3388, pruned_loss=0.1074, over 3805086.10 frames. ], batch size: 41, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:57,096 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36199.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:16:00,513 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 07:16:03,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 07:16:22,950 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 07:16:23,186 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2213, 5.5153, 3.0399, 4.8265, 1.2662, 5.3755, 5.2719, 5.5620], + device='cuda:1'), covar=tensor([0.0379, 0.0923, 0.1880, 0.0609, 0.4032, 0.0601, 0.0543, 0.0590], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0321, 0.0377, 0.0296, 0.0360, 0.0317, 0.0297, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 07:16:37,667 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4135, 2.3661, 1.6117, 1.5781, 2.2249, 1.3149, 1.1465, 1.8515], + device='cuda:1'), covar=tensor([0.0779, 0.0419, 0.0807, 0.0510, 0.0312, 0.0917, 0.0661, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0276, 0.0310, 0.0240, 0.0228, 0.0309, 0.0287, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:16:47,772 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.109e+02 6.690e+02 8.798e+02 1.173e+03 2.442e+03, threshold=1.760e+03, percent-clipped=12.0 +2023-04-01 07:16:47,790 INFO [train.py:903] (1/4) Epoch 6, batch 2100, loss[loss=0.2246, simple_loss=0.2967, pruned_loss=0.07621, over 19858.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.338, pruned_loss=0.1068, over 3798855.18 frames. ], batch size: 52, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:16:57,873 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:17,845 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36263.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:18,576 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 07:17:39,999 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 07:17:49,930 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:49,972 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:51,846 INFO [train.py:903] (1/4) Epoch 6, batch 2150, loss[loss=0.3227, simple_loss=0.3654, pruned_loss=0.14, over 19845.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3376, pruned_loss=0.1068, over 3792713.25 frames. ], batch size: 52, lr: 1.40e-02, grad_scale: 16.0 +2023-04-01 07:18:48,679 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7453, 4.2210, 4.4512, 4.3969, 1.5095, 4.0191, 3.6396, 4.0623], + device='cuda:1'), covar=tensor([0.1017, 0.0470, 0.0364, 0.0387, 0.3736, 0.0436, 0.0450, 0.0777], + device='cuda:1'), in_proj_covar=tensor([0.0539, 0.0470, 0.0627, 0.0522, 0.0599, 0.0391, 0.0399, 0.0592], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 07:18:53,904 INFO [train.py:903] (1/4) Epoch 6, batch 2200, loss[loss=0.2162, simple_loss=0.2916, pruned_loss=0.07042, over 19618.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3375, pruned_loss=0.1075, over 3785209.43 frames. ], batch size: 50, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:18:55,064 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 6.300e+02 8.031e+02 1.073e+03 2.013e+03, threshold=1.606e+03, percent-clipped=1.0 +2023-04-01 07:19:06,902 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 07:19:54,195 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 07:19:57,016 INFO [train.py:903] (1/4) Epoch 6, batch 2250, loss[loss=0.262, simple_loss=0.316, pruned_loss=0.104, over 16006.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3382, pruned_loss=0.1075, over 3797392.30 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:58,357 INFO [train.py:903] (1/4) Epoch 6, batch 2300, loss[loss=0.2633, simple_loss=0.327, pruned_loss=0.09976, over 19476.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3359, pruned_loss=0.1053, over 3807173.18 frames. ], batch size: 49, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:59,562 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.689e+02 7.482e+02 9.822e+02 1.768e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-04-01 07:21:11,047 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5630, 4.0764, 2.4177, 3.6616, 1.0287, 3.5535, 3.7568, 3.8102], + device='cuda:1'), covar=tensor([0.0602, 0.1111, 0.2167, 0.0670, 0.3964, 0.0911, 0.0699, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0321, 0.0374, 0.0294, 0.0358, 0.0319, 0.0294, 0.0329], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 07:21:14,318 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 07:22:00,467 INFO [train.py:903] (1/4) Epoch 6, batch 2350, loss[loss=0.3212, simple_loss=0.372, pruned_loss=0.1352, over 18022.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3361, pruned_loss=0.1054, over 3817402.48 frames. ], batch size: 83, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:22:19,306 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:22:43,438 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 07:22:49,441 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:00,562 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 07:23:02,881 INFO [train.py:903] (1/4) Epoch 6, batch 2400, loss[loss=0.289, simple_loss=0.3553, pruned_loss=0.1113, over 17384.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3357, pruned_loss=0.1047, over 3821483.07 frames. ], batch size: 101, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:23:04,016 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.742e+02 5.696e+02 7.249e+02 9.157e+02 1.479e+03, threshold=1.450e+03, percent-clipped=0.0 +2023-04-01 07:23:07,400 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:08,778 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36544.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:26,795 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.49 vs. limit=5.0 +2023-04-01 07:23:39,016 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:06,992 INFO [train.py:903] (1/4) Epoch 6, batch 2450, loss[loss=0.3296, simple_loss=0.37, pruned_loss=0.1446, over 13150.00 frames. ], tot_loss[loss=0.2744, simple_loss=0.3375, pruned_loss=0.1057, over 3815412.89 frames. ], batch size: 136, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:24:46,721 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:51,017 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:56,997 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:25:07,900 INFO [train.py:903] (1/4) Epoch 6, batch 2500, loss[loss=0.2698, simple_loss=0.3408, pruned_loss=0.09942, over 19547.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3371, pruned_loss=0.1055, over 3805406.12 frames. ], batch size: 56, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:25:09,076 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.265e+02 8.006e+02 9.274e+02 1.564e+03, threshold=1.601e+03, percent-clipped=1.0 +2023-04-01 07:25:30,329 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:26:09,567 INFO [train.py:903] (1/4) Epoch 6, batch 2550, loss[loss=0.3398, simple_loss=0.3788, pruned_loss=0.1504, over 13241.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3359, pruned_loss=0.1049, over 3800848.90 frames. ], batch size: 136, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:26:44,789 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36718.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:26:46,949 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8142, 1.0295, 1.3499, 0.5885, 2.1195, 2.1990, 1.9580, 2.2875], + device='cuda:1'), covar=tensor([0.1423, 0.2960, 0.2816, 0.2133, 0.0450, 0.0313, 0.0357, 0.0222], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0282, 0.0312, 0.0247, 0.0200, 0.0122, 0.0200, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 07:26:53,871 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9033, 5.3068, 2.8920, 4.7663, 1.5378, 4.9841, 5.1490, 5.3066], + device='cuda:1'), covar=tensor([0.0445, 0.0996, 0.1987, 0.0510, 0.3711, 0.0713, 0.0568, 0.0670], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0314, 0.0372, 0.0291, 0.0353, 0.0313, 0.0294, 0.0327], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 07:27:05,088 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 07:27:10,904 INFO [train.py:903] (1/4) Epoch 6, batch 2600, loss[loss=0.2865, simple_loss=0.3449, pruned_loss=0.114, over 19543.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3366, pruned_loss=0.1059, over 3811300.82 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:27:12,685 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.492e+02 8.253e+02 1.085e+03 2.742e+03, threshold=1.651e+03, percent-clipped=10.0 +2023-04-01 07:28:14,513 INFO [train.py:903] (1/4) Epoch 6, batch 2650, loss[loss=0.2518, simple_loss=0.3261, pruned_loss=0.0888, over 19535.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3359, pruned_loss=0.1051, over 3813499.48 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:28:37,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 07:28:55,452 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36823.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:29:16,467 INFO [train.py:903] (1/4) Epoch 6, batch 2700, loss[loss=0.3096, simple_loss=0.3586, pruned_loss=0.1303, over 19481.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3358, pruned_loss=0.1053, over 3822964.59 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:29:17,596 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.342e+02 7.427e+02 9.812e+02 2.890e+03, threshold=1.485e+03, percent-clipped=2.0 +2023-04-01 07:30:05,316 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 07:30:18,801 INFO [train.py:903] (1/4) Epoch 6, batch 2750, loss[loss=0.221, simple_loss=0.2901, pruned_loss=0.07597, over 19828.00 frames. ], tot_loss[loss=0.2734, simple_loss=0.3359, pruned_loss=0.1055, over 3832374.73 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:30:49,706 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 07:30:51,757 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36914.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:09,562 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:22,192 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:23,024 INFO [train.py:903] (1/4) Epoch 6, batch 2800, loss[loss=0.2677, simple_loss=0.3292, pruned_loss=0.1031, over 19849.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3349, pruned_loss=0.1043, over 3838408.07 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:31:24,217 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+02 7.168e+02 8.701e+02 1.243e+03 3.330e+03, threshold=1.740e+03, percent-clipped=17.0 +2023-04-01 07:31:47,718 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9663, 4.2742, 4.6341, 4.5657, 1.6629, 4.1765, 3.7006, 4.2503], + device='cuda:1'), covar=tensor([0.0884, 0.0631, 0.0468, 0.0384, 0.3955, 0.0378, 0.0537, 0.0875], + device='cuda:1'), in_proj_covar=tensor([0.0554, 0.0477, 0.0649, 0.0532, 0.0609, 0.0399, 0.0413, 0.0606], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 07:31:56,922 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:00,392 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:06,250 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:08,492 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:23,712 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0837, 2.0365, 1.6463, 1.5212, 1.5269, 1.6722, 0.4184, 0.8759], + device='cuda:1'), covar=tensor([0.0250, 0.0253, 0.0186, 0.0263, 0.0538, 0.0286, 0.0501, 0.0465], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0298, 0.0296, 0.0310, 0.0385, 0.0309, 0.0286, 0.0306], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 07:32:26,410 INFO [train.py:903] (1/4) Epoch 6, batch 2850, loss[loss=0.2647, simple_loss=0.3433, pruned_loss=0.09304, over 19694.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3347, pruned_loss=0.1042, over 3834878.31 frames. ], batch size: 59, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:33:05,604 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:33:28,652 INFO [train.py:903] (1/4) Epoch 6, batch 2900, loss[loss=0.2809, simple_loss=0.3446, pruned_loss=0.1087, over 19671.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3343, pruned_loss=0.1039, over 3833429.69 frames. ], batch size: 60, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:33:28,666 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 07:33:29,872 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.105e+02 7.947e+02 1.025e+03 2.308e+03, threshold=1.589e+03, percent-clipped=2.0 +2023-04-01 07:33:55,107 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37062.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:34:20,473 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:24,058 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37085.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:28,759 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37089.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:29,562 INFO [train.py:903] (1/4) Epoch 6, batch 2950, loss[loss=0.2431, simple_loss=0.3027, pruned_loss=0.09174, over 19318.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3338, pruned_loss=0.1037, over 3835936.88 frames. ], batch size: 44, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:35:31,156 INFO [train.py:903] (1/4) Epoch 6, batch 3000, loss[loss=0.2458, simple_loss=0.3034, pruned_loss=0.09414, over 19428.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3332, pruned_loss=0.1033, over 3846311.35 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:35:31,157 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 07:35:43,639 INFO [train.py:937] (1/4) Epoch 6, validation: loss=0.1968, simple_loss=0.2962, pruned_loss=0.04867, over 944034.00 frames. +2023-04-01 07:35:43,640 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 07:35:44,844 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.497e+02 6.001e+02 7.289e+02 9.626e+02 1.809e+03, threshold=1.458e+03, percent-clipped=5.0 +2023-04-01 07:35:48,602 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 07:36:18,756 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37167.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:36:30,287 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37177.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:36:45,870 INFO [train.py:903] (1/4) Epoch 6, batch 3050, loss[loss=0.2809, simple_loss=0.3507, pruned_loss=0.1056, over 19668.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.335, pruned_loss=0.1047, over 3839336.13 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:48,502 INFO [train.py:903] (1/4) Epoch 6, batch 3100, loss[loss=0.2696, simple_loss=0.3351, pruned_loss=0.1021, over 19664.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.335, pruned_loss=0.1051, over 3825858.36 frames. ], batch size: 53, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:49,788 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 6.699e+02 8.375e+02 1.038e+03 2.239e+03, threshold=1.675e+03, percent-clipped=7.0 +2023-04-01 07:38:28,940 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37273.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:41,380 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37282.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:38:46,546 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37286.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:50,729 INFO [train.py:903] (1/4) Epoch 6, batch 3150, loss[loss=0.2094, simple_loss=0.2796, pruned_loss=0.06966, over 19568.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3359, pruned_loss=0.106, over 3808173.31 frames. ], batch size: 52, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:38:57,015 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2237, 1.3127, 1.8601, 1.3952, 2.4919, 2.0873, 2.6931, 1.1232], + device='cuda:1'), covar=tensor([0.2083, 0.3536, 0.1882, 0.1691, 0.1364, 0.1755, 0.1408, 0.3019], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0507, 0.0493, 0.0407, 0.0559, 0.0445, 0.0627, 0.0451], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 07:39:13,498 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 07:39:27,512 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37320.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:49,088 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:51,047 INFO [train.py:903] (1/4) Epoch 6, batch 3200, loss[loss=0.246, simple_loss=0.3045, pruned_loss=0.09375, over 19422.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.337, pruned_loss=0.1065, over 3820315.72 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:39:52,144 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.326e+02 6.253e+02 8.171e+02 9.916e+02 1.975e+03, threshold=1.634e+03, percent-clipped=4.0 +2023-04-01 07:39:52,618 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:58,084 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37345.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:59,734 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 07:40:20,389 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,432 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,647 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:29,187 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37370.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:42,562 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:50,628 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:52,610 INFO [train.py:903] (1/4) Epoch 6, batch 3250, loss[loss=0.2204, simple_loss=0.2844, pruned_loss=0.07813, over 19766.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3372, pruned_loss=0.1066, over 3827407.64 frames. ], batch size: 45, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:30,771 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2694, 2.1191, 1.5254, 1.3621, 1.8790, 1.0919, 1.0702, 1.7251], + device='cuda:1'), covar=tensor([0.0630, 0.0399, 0.0836, 0.0498, 0.0349, 0.0922, 0.0572, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0276, 0.0309, 0.0241, 0.0221, 0.0310, 0.0280, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:41:45,872 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37433.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:41:48,044 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:41:53,523 INFO [train.py:903] (1/4) Epoch 6, batch 3300, loss[loss=0.247, simple_loss=0.3162, pruned_loss=0.08893, over 19572.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.338, pruned_loss=0.1065, over 3834858.63 frames. ], batch size: 52, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:57,418 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 6.108e+02 8.102e+02 1.002e+03 3.053e+03, threshold=1.620e+03, percent-clipped=3.0 +2023-04-01 07:42:01,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 07:42:17,188 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37458.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:42:44,820 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:42:56,298 INFO [train.py:903] (1/4) Epoch 6, batch 3350, loss[loss=0.2733, simple_loss=0.3284, pruned_loss=0.1091, over 19733.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3368, pruned_loss=0.1054, over 3831436.72 frames. ], batch size: 47, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:43:56,155 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37538.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:43:57,993 INFO [train.py:903] (1/4) Epoch 6, batch 3400, loss[loss=0.2775, simple_loss=0.3245, pruned_loss=0.1153, over 19765.00 frames. ], tot_loss[loss=0.273, simple_loss=0.336, pruned_loss=0.1051, over 3822868.62 frames. ], batch size: 46, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:44:00,242 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.377e+02 8.364e+02 1.096e+03 2.128e+03, threshold=1.673e+03, percent-clipped=5.0 +2023-04-01 07:44:26,237 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37563.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:44:59,269 INFO [train.py:903] (1/4) Epoch 6, batch 3450, loss[loss=0.2851, simple_loss=0.3513, pruned_loss=0.1095, over 19093.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3345, pruned_loss=0.1035, over 3834725.46 frames. ], batch size: 69, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:45:07,183 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 07:45:24,714 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.49 vs. limit=5.0 +2023-04-01 07:45:39,234 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8204, 4.3516, 2.5146, 3.9421, 0.9507, 3.9908, 4.0796, 4.2430], + device='cuda:1'), covar=tensor([0.0516, 0.0963, 0.1996, 0.0617, 0.3899, 0.0811, 0.0720, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0324, 0.0384, 0.0294, 0.0360, 0.0319, 0.0297, 0.0332], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 07:45:49,547 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:02,064 INFO [train.py:903] (1/4) Epoch 6, batch 3500, loss[loss=0.2831, simple_loss=0.3518, pruned_loss=0.1072, over 19549.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3349, pruned_loss=0.1038, over 3830081.60 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:46:04,578 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.340e+02 8.060e+02 1.060e+03 3.220e+03, threshold=1.612e+03, percent-clipped=3.0 +2023-04-01 07:46:07,415 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:23,280 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7118, 1.3580, 1.2192, 1.8190, 1.5164, 1.8222, 1.8954, 1.5857], + device='cuda:1'), covar=tensor([0.0807, 0.1082, 0.1201, 0.0948, 0.0947, 0.0794, 0.0944, 0.0789], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0244, 0.0237, 0.0274, 0.0262, 0.0230, 0.0226, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-04-01 07:46:38,321 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:54,394 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0661, 1.7493, 1.9040, 2.0134, 4.4271, 0.9478, 2.3173, 4.5704], + device='cuda:1'), covar=tensor([0.0228, 0.2374, 0.2234, 0.1433, 0.0494, 0.2548, 0.1277, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0316, 0.0311, 0.0287, 0.0311, 0.0312, 0.0288, 0.0304], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:47:04,849 INFO [train.py:903] (1/4) Epoch 6, batch 3550, loss[loss=0.2918, simple_loss=0.358, pruned_loss=0.1128, over 19366.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3358, pruned_loss=0.1045, over 3825832.65 frames. ], batch size: 66, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:47:07,135 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37691.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:12,748 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8497, 1.2047, 1.4432, 1.8625, 3.3410, 1.1004, 2.0985, 3.3803], + device='cuda:1'), covar=tensor([0.0299, 0.2543, 0.2405, 0.1292, 0.0526, 0.2222, 0.1265, 0.0343], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0318, 0.0315, 0.0288, 0.0312, 0.0313, 0.0288, 0.0305], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:47:34,902 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37716.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:49,116 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:02,995 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:05,907 INFO [train.py:903] (1/4) Epoch 6, batch 3600, loss[loss=0.3084, simple_loss=0.3539, pruned_loss=0.1315, over 19751.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3356, pruned_loss=0.1044, over 3832702.06 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:48:08,229 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.040e+02 8.572e+02 1.227e+03 4.209e+03, threshold=1.714e+03, percent-clipped=12.0 +2023-04-01 07:48:12,114 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:33,238 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:49:08,248 INFO [train.py:903] (1/4) Epoch 6, batch 3650, loss[loss=0.3571, simple_loss=0.3824, pruned_loss=0.1658, over 13711.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3357, pruned_loss=0.1049, over 3820609.25 frames. ], batch size: 136, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:10,246 INFO [train.py:903] (1/4) Epoch 6, batch 3700, loss[loss=0.2356, simple_loss=0.2991, pruned_loss=0.0861, over 19482.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3352, pruned_loss=0.1047, over 3820466.18 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:11,834 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:50:12,525 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 5.969e+02 7.272e+02 1.002e+03 1.787e+03, threshold=1.454e+03, percent-clipped=1.0 +2023-04-01 07:51:00,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6906, 1.7856, 1.9526, 2.6569, 2.4730, 2.1825, 2.1931, 2.4978], + device='cuda:1'), covar=tensor([0.0693, 0.1862, 0.1281, 0.0886, 0.1226, 0.0432, 0.0891, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0360, 0.0283, 0.0237, 0.0304, 0.0240, 0.0265, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 07:51:13,475 INFO [train.py:903] (1/4) Epoch 6, batch 3750, loss[loss=0.2999, simple_loss=0.3573, pruned_loss=0.1212, over 18129.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3351, pruned_loss=0.1044, over 3825163.84 frames. ], batch size: 83, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:16,179 INFO [train.py:903] (1/4) Epoch 6, batch 3800, loss[loss=0.2305, simple_loss=0.292, pruned_loss=0.0845, over 15049.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3336, pruned_loss=0.1037, over 3821933.37 frames. ], batch size: 33, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:18,420 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.762e+02 7.572e+02 9.178e+02 2.007e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-04-01 07:52:47,800 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 07:53:17,772 INFO [train.py:903] (1/4) Epoch 6, batch 3850, loss[loss=0.2597, simple_loss=0.3215, pruned_loss=0.09898, over 19480.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3336, pruned_loss=0.1036, over 3808707.88 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:53:25,801 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37997.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:53:32,695 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:00,114 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 07:54:03,030 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:20,061 INFO [train.py:903] (1/4) Epoch 6, batch 3900, loss[loss=0.2075, simple_loss=0.278, pruned_loss=0.06848, over 19769.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3355, pruned_loss=0.105, over 3806573.91 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:54:22,364 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.032e+02 6.773e+02 7.927e+02 9.711e+02 2.220e+03, threshold=1.585e+03, percent-clipped=5.0 +2023-04-01 07:54:32,143 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-01 07:55:24,002 INFO [train.py:903] (1/4) Epoch 6, batch 3950, loss[loss=0.2646, simple_loss=0.3176, pruned_loss=0.1058, over 19421.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3352, pruned_loss=0.1046, over 3816076.80 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:55:27,714 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 07:55:33,048 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:03,031 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:25,971 INFO [train.py:903] (1/4) Epoch 6, batch 4000, loss[loss=0.2821, simple_loss=0.3472, pruned_loss=0.1085, over 19706.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3345, pruned_loss=0.1038, over 3821640.68 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:56:28,273 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 6.005e+02 7.523e+02 9.814e+02 1.567e+03, threshold=1.505e+03, percent-clipped=0.0 +2023-04-01 07:57:10,232 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 07:57:25,997 INFO [train.py:903] (1/4) Epoch 6, batch 4050, loss[loss=0.3004, simple_loss=0.3606, pruned_loss=0.1201, over 19612.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3338, pruned_loss=0.1034, over 3814766.00 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:27,528 INFO [train.py:903] (1/4) Epoch 6, batch 4100, loss[loss=0.3566, simple_loss=0.3928, pruned_loss=0.1602, over 17259.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3334, pruned_loss=0.1031, over 3807409.62 frames. ], batch size: 101, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:30,599 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.486e+02 7.817e+02 9.790e+02 2.532e+03, threshold=1.563e+03, percent-clipped=8.0 +2023-04-01 07:59:04,831 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 07:59:30,597 INFO [train.py:903] (1/4) Epoch 6, batch 4150, loss[loss=0.2546, simple_loss=0.335, pruned_loss=0.08713, over 18741.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3337, pruned_loss=0.1031, over 3814161.30 frames. ], batch size: 74, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:34,879 INFO [train.py:903] (1/4) Epoch 6, batch 4200, loss[loss=0.2658, simple_loss=0.3351, pruned_loss=0.09824, over 18707.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3343, pruned_loss=0.1038, over 3813185.91 frames. ], batch size: 74, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:36,323 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:00:37,310 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.104e+02 7.462e+02 9.650e+02 2.123e+03, threshold=1.492e+03, percent-clipped=5.0 +2023-04-01 08:00:37,727 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1132, 1.1214, 1.3233, 1.1779, 1.7638, 1.6365, 1.8071, 0.5172], + device='cuda:1'), covar=tensor([0.1644, 0.2744, 0.1473, 0.1382, 0.0960, 0.1497, 0.0927, 0.2640], + device='cuda:1'), in_proj_covar=tensor([0.0448, 0.0515, 0.0497, 0.0409, 0.0561, 0.0452, 0.0633, 0.0450], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 08:00:39,345 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 08:01:21,315 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:01:34,727 INFO [train.py:903] (1/4) Epoch 6, batch 4250, loss[loss=0.2666, simple_loss=0.3186, pruned_loss=0.1073, over 19738.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3345, pruned_loss=0.1039, over 3822563.71 frames. ], batch size: 51, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:01:48,234 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 08:02:01,562 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 08:02:34,846 INFO [train.py:903] (1/4) Epoch 6, batch 4300, loss[loss=0.2463, simple_loss=0.3147, pruned_loss=0.089, over 19676.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3359, pruned_loss=0.1048, over 3799480.96 frames. ], batch size: 55, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:02:37,124 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+02 6.582e+02 8.580e+02 1.078e+03 2.349e+03, threshold=1.716e+03, percent-clipped=8.0 +2023-04-01 08:02:56,015 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38456.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:02:57,124 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1887, 1.5920, 1.5709, 2.0009, 1.7653, 1.6785, 1.5560, 2.0592], + device='cuda:1'), covar=tensor([0.0906, 0.1891, 0.1647, 0.1093, 0.1592, 0.1015, 0.1431, 0.0827], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0355, 0.0281, 0.0236, 0.0299, 0.0243, 0.0266, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:03:06,290 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4302, 2.4140, 1.6822, 1.4595, 2.2085, 1.2148, 1.3025, 1.7891], + device='cuda:1'), covar=tensor([0.0746, 0.0453, 0.0869, 0.0624, 0.0417, 0.1044, 0.0666, 0.0406], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0285, 0.0320, 0.0246, 0.0229, 0.0319, 0.0289, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:03:27,653 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 08:03:35,913 INFO [train.py:903] (1/4) Epoch 6, batch 4350, loss[loss=0.2301, simple_loss=0.2925, pruned_loss=0.08385, over 19748.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3358, pruned_loss=0.1049, over 3821844.05 frames. ], batch size: 45, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:40,410 INFO [train.py:903] (1/4) Epoch 6, batch 4400, loss[loss=0.3235, simple_loss=0.3814, pruned_loss=0.1328, over 19084.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3361, pruned_loss=0.105, over 3815072.17 frames. ], batch size: 69, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:42,534 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+02 6.588e+02 8.122e+02 1.160e+03 2.348e+03, threshold=1.624e+03, percent-clipped=4.0 +2023-04-01 08:05:05,902 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 08:05:13,671 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 08:05:40,416 INFO [train.py:903] (1/4) Epoch 6, batch 4450, loss[loss=0.2848, simple_loss=0.3469, pruned_loss=0.1113, over 19699.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3358, pruned_loss=0.1049, over 3806492.69 frames. ], batch size: 59, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:07,953 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 08:06:42,157 INFO [train.py:903] (1/4) Epoch 6, batch 4500, loss[loss=0.2503, simple_loss=0.3214, pruned_loss=0.08961, over 19486.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3342, pruned_loss=0.1039, over 3819775.01 frames. ], batch size: 49, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:44,523 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 6.139e+02 7.598e+02 9.442e+02 2.713e+03, threshold=1.520e+03, percent-clipped=3.0 +2023-04-01 08:06:55,375 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 08:07:42,586 INFO [train.py:903] (1/4) Epoch 6, batch 4550, loss[loss=0.2205, simple_loss=0.2947, pruned_loss=0.07313, over 19473.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3341, pruned_loss=0.1035, over 3817545.46 frames. ], batch size: 49, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:07:52,243 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5141, 3.9389, 4.1920, 4.1687, 1.4006, 3.8441, 3.4970, 3.7845], + device='cuda:1'), covar=tensor([0.1007, 0.0604, 0.0496, 0.0433, 0.4092, 0.0435, 0.0473, 0.0983], + device='cuda:1'), in_proj_covar=tensor([0.0561, 0.0475, 0.0663, 0.0534, 0.0614, 0.0403, 0.0413, 0.0606], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 08:07:53,984 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 08:08:10,858 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38712.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:17,330 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 08:08:22,021 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:23,445 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6969, 1.8498, 2.3602, 2.9919, 2.4537, 2.7174, 2.1985, 2.9992], + device='cuda:1'), covar=tensor([0.0653, 0.1593, 0.1135, 0.0713, 0.1167, 0.0354, 0.0959, 0.0427], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0354, 0.0282, 0.0233, 0.0298, 0.0239, 0.0267, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:08:41,577 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:45,375 INFO [train.py:903] (1/4) Epoch 6, batch 4600, loss[loss=0.2501, simple_loss=0.3153, pruned_loss=0.09239, over 19668.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3347, pruned_loss=0.1035, over 3823926.22 frames. ], batch size: 53, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:08:47,714 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 6.588e+02 8.068e+02 1.040e+03 1.807e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 08:09:02,393 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3311, 1.4709, 2.1122, 1.5967, 3.2669, 2.6349, 3.5600, 1.5970], + device='cuda:1'), covar=tensor([0.1851, 0.3301, 0.1777, 0.1420, 0.1274, 0.1471, 0.1483, 0.2840], + device='cuda:1'), in_proj_covar=tensor([0.0452, 0.0522, 0.0505, 0.0413, 0.0565, 0.0456, 0.0639, 0.0455], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 08:09:45,874 INFO [train.py:903] (1/4) Epoch 6, batch 4650, loss[loss=0.2531, simple_loss=0.3062, pruned_loss=0.1, over 19757.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.333, pruned_loss=0.1026, over 3821651.62 frames. ], batch size: 47, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:01,545 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 08:10:11,697 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 08:10:43,218 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:46,233 INFO [train.py:903] (1/4) Epoch 6, batch 4700, loss[loss=0.3668, simple_loss=0.3958, pruned_loss=0.1689, over 13568.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3332, pruned_loss=0.103, over 3814765.49 frames. ], batch size: 136, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:47,736 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:48,619 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.701e+02 8.528e+02 1.085e+03 2.106e+03, threshold=1.706e+03, percent-clipped=3.0 +2023-04-01 08:11:07,238 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 08:11:46,443 INFO [train.py:903] (1/4) Epoch 6, batch 4750, loss[loss=0.2831, simple_loss=0.3521, pruned_loss=0.107, over 19739.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3329, pruned_loss=0.1026, over 3814421.48 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:47,942 INFO [train.py:903] (1/4) Epoch 6, batch 4800, loss[loss=0.2476, simple_loss=0.3048, pruned_loss=0.09517, over 19734.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3351, pruned_loss=0.1041, over 3821600.85 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:52,284 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 6.641e+02 7.688e+02 1.063e+03 2.770e+03, threshold=1.538e+03, percent-clipped=4.0 +2023-04-01 08:13:49,881 INFO [train.py:903] (1/4) Epoch 6, batch 4850, loss[loss=0.2584, simple_loss=0.3332, pruned_loss=0.09179, over 19611.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3339, pruned_loss=0.1033, over 3817736.60 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:14,774 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 08:14:33,511 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8954, 4.2948, 4.5940, 4.5456, 1.4509, 4.2320, 3.8152, 4.1990], + device='cuda:1'), covar=tensor([0.1118, 0.0612, 0.0493, 0.0422, 0.4509, 0.0400, 0.0523, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0566, 0.0480, 0.0664, 0.0543, 0.0618, 0.0403, 0.0414, 0.0607], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 08:14:36,521 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 08:14:42,285 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 08:14:42,313 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 08:14:51,528 INFO [train.py:903] (1/4) Epoch 6, batch 4900, loss[loss=0.3889, simple_loss=0.4233, pruned_loss=0.1772, over 13086.00 frames. ], tot_loss[loss=0.269, simple_loss=0.333, pruned_loss=0.1025, over 3829809.96 frames. ], batch size: 136, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:51,568 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 08:14:55,127 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 6.820e+02 8.455e+02 1.082e+03 3.554e+03, threshold=1.691e+03, percent-clipped=3.0 +2023-04-01 08:15:04,828 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.6765, 5.1346, 2.8390, 4.4423, 1.6095, 4.5370, 4.8032, 5.0591], + device='cuda:1'), covar=tensor([0.0333, 0.0682, 0.1740, 0.0551, 0.3389, 0.0689, 0.0605, 0.0663], + device='cuda:1'), in_proj_covar=tensor([0.0365, 0.0316, 0.0378, 0.0289, 0.0354, 0.0315, 0.0296, 0.0329], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 08:15:12,443 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 08:15:15,321 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1803, 3.8275, 2.2639, 2.3202, 3.3469, 1.9565, 1.4120, 1.9869], + device='cuda:1'), covar=tensor([0.0910, 0.0332, 0.0779, 0.0544, 0.0419, 0.0879, 0.0824, 0.0633], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0286, 0.0315, 0.0239, 0.0228, 0.0313, 0.0287, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:15:52,286 INFO [train.py:903] (1/4) Epoch 6, batch 4950, loss[loss=0.2621, simple_loss=0.3281, pruned_loss=0.098, over 19481.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3324, pruned_loss=0.1024, over 3821574.59 frames. ], batch size: 49, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:15:56,133 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:11,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 08:16:27,918 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:34,736 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 08:16:54,838 INFO [train.py:903] (1/4) Epoch 6, batch 5000, loss[loss=0.2988, simple_loss=0.352, pruned_loss=0.1228, over 18087.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3323, pruned_loss=0.1027, over 3813948.47 frames. ], batch size: 83, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:16:59,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.473e+02 6.699e+02 7.802e+02 1.019e+03 2.317e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 08:17:05,608 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 08:17:10,518 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0313, 1.9956, 1.9662, 3.0735, 2.0403, 2.7366, 2.5760, 1.8847], + device='cuda:1'), covar=tensor([0.2315, 0.1834, 0.0987, 0.1042, 0.2166, 0.0797, 0.1860, 0.1739], + device='cuda:1'), in_proj_covar=tensor([0.0677, 0.0676, 0.0583, 0.0828, 0.0705, 0.0590, 0.0713, 0.0620], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 08:17:14,762 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 08:17:31,083 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4985, 1.8699, 2.0973, 2.6385, 2.5222, 2.3304, 2.1382, 2.7192], + device='cuda:1'), covar=tensor([0.0719, 0.1444, 0.1101, 0.0797, 0.1009, 0.0396, 0.0890, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0359, 0.0285, 0.0236, 0.0303, 0.0245, 0.0272, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:17:51,514 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:17:57,111 INFO [train.py:903] (1/4) Epoch 6, batch 5050, loss[loss=0.3249, simple_loss=0.3676, pruned_loss=0.1412, over 13476.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3321, pruned_loss=0.1024, over 3820727.62 frames. ], batch size: 136, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:18:12,185 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39203.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:18:29,777 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 08:18:46,960 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 08:18:57,184 INFO [train.py:903] (1/4) Epoch 6, batch 5100, loss[loss=0.2637, simple_loss=0.325, pruned_loss=0.1012, over 19790.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3332, pruned_loss=0.103, over 3824222.73 frames. ], batch size: 48, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:19:00,438 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.152e+02 6.811e+02 8.395e+02 1.062e+03 1.934e+03, threshold=1.679e+03, percent-clipped=3.0 +2023-04-01 08:19:05,477 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 08:19:08,940 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 08:19:13,350 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 08:19:31,957 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5723, 1.6300, 1.7503, 2.1521, 1.4242, 1.7403, 2.0505, 1.6305], + device='cuda:1'), covar=tensor([0.2411, 0.1920, 0.1058, 0.1037, 0.2049, 0.0982, 0.2130, 0.1882], + device='cuda:1'), in_proj_covar=tensor([0.0683, 0.0681, 0.0585, 0.0829, 0.0711, 0.0591, 0.0722, 0.0629], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 08:19:57,722 INFO [train.py:903] (1/4) Epoch 6, batch 5150, loss[loss=0.2281, simple_loss=0.3075, pruned_loss=0.07436, over 19782.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3338, pruned_loss=0.1034, over 3831857.65 frames. ], batch size: 54, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:20:08,410 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 08:20:11,736 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:13,880 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:43,360 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 08:21:00,827 INFO [train.py:903] (1/4) Epoch 6, batch 5200, loss[loss=0.2146, simple_loss=0.2762, pruned_loss=0.07653, over 19766.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.335, pruned_loss=0.1045, over 3819290.88 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:21:04,235 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.495e+02 6.191e+02 7.598e+02 1.014e+03 2.218e+03, threshold=1.520e+03, percent-clipped=2.0 +2023-04-01 08:21:14,273 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 08:21:36,148 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.95 vs. limit=5.0 +2023-04-01 08:21:57,369 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 08:22:03,016 INFO [train.py:903] (1/4) Epoch 6, batch 5250, loss[loss=0.2931, simple_loss=0.3522, pruned_loss=0.117, over 19663.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3355, pruned_loss=0.1048, over 3823999.46 frames. ], batch size: 53, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:22:29,345 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 08:23:05,650 INFO [train.py:903] (1/4) Epoch 6, batch 5300, loss[loss=0.2519, simple_loss=0.3264, pruned_loss=0.08872, over 19520.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3329, pruned_loss=0.1026, over 3830971.67 frames. ], batch size: 56, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:23:10,375 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.313e+02 6.384e+02 7.596e+02 9.799e+02 2.153e+03, threshold=1.519e+03, percent-clipped=7.0 +2023-04-01 08:23:22,423 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 08:23:56,520 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:24:06,724 INFO [train.py:903] (1/4) Epoch 6, batch 5350, loss[loss=0.2663, simple_loss=0.3274, pruned_loss=0.1026, over 19761.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3336, pruned_loss=0.1032, over 3825363.22 frames. ], batch size: 51, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:24:13,821 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8152, 4.3612, 2.6313, 3.9153, 1.0413, 4.0242, 4.1058, 4.2847], + device='cuda:1'), covar=tensor([0.0521, 0.0920, 0.1884, 0.0603, 0.3967, 0.0797, 0.0632, 0.0697], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0325, 0.0384, 0.0291, 0.0360, 0.0317, 0.0300, 0.0332], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 08:24:41,834 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 08:25:07,430 INFO [train.py:903] (1/4) Epoch 6, batch 5400, loss[loss=0.3172, simple_loss=0.3752, pruned_loss=0.1296, over 19142.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3342, pruned_loss=0.1035, over 3807934.90 frames. ], batch size: 69, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:25:15,197 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.754e+02 6.102e+02 7.285e+02 1.015e+03 2.320e+03, threshold=1.457e+03, percent-clipped=6.0 +2023-04-01 08:25:19,789 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39547.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:25:30,311 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:25:39,825 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 08:25:52,467 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 08:26:00,359 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:26:13,205 INFO [train.py:903] (1/4) Epoch 6, batch 5450, loss[loss=0.3129, simple_loss=0.3594, pruned_loss=0.1332, over 19533.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.333, pruned_loss=0.1026, over 3813329.94 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:27:13,643 INFO [train.py:903] (1/4) Epoch 6, batch 5500, loss[loss=0.3252, simple_loss=0.3778, pruned_loss=0.1363, over 19647.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3357, pruned_loss=0.1047, over 3819240.65 frames. ], batch size: 58, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:27:18,180 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.238e+02 6.796e+02 8.775e+02 1.086e+03 2.226e+03, threshold=1.755e+03, percent-clipped=13.0 +2023-04-01 08:27:20,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:27:33,868 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39657.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:27:36,847 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 08:27:40,448 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39662.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:28:14,676 INFO [train.py:903] (1/4) Epoch 6, batch 5550, loss[loss=0.2868, simple_loss=0.3422, pruned_loss=0.1157, over 19629.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3353, pruned_loss=0.1043, over 3810202.68 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:28:21,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 08:28:43,665 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7216, 3.1270, 3.2049, 3.2087, 1.2125, 2.9954, 2.6494, 2.9315], + device='cuda:1'), covar=tensor([0.1246, 0.0710, 0.0731, 0.0674, 0.4085, 0.0583, 0.0721, 0.1239], + device='cuda:1'), in_proj_covar=tensor([0.0562, 0.0481, 0.0656, 0.0540, 0.0613, 0.0406, 0.0414, 0.0606], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 08:29:11,944 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 08:29:15,360 INFO [train.py:903] (1/4) Epoch 6, batch 5600, loss[loss=0.295, simple_loss=0.3586, pruned_loss=0.1157, over 19612.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.335, pruned_loss=0.1042, over 3824406.63 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:29:20,710 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 6.221e+02 7.656e+02 9.358e+02 1.388e+03, threshold=1.531e+03, percent-clipped=0.0 +2023-04-01 08:29:43,084 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:11,515 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:19,676 INFO [train.py:903] (1/4) Epoch 6, batch 5650, loss[loss=0.2215, simple_loss=0.285, pruned_loss=0.07897, over 19341.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3349, pruned_loss=0.1042, over 3824309.96 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:30:47,697 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1753, 1.2127, 1.6823, 1.3601, 2.4716, 2.0804, 2.6244, 0.9156], + device='cuda:1'), covar=tensor([0.1922, 0.3340, 0.1821, 0.1601, 0.1180, 0.1610, 0.1234, 0.2978], + device='cuda:1'), in_proj_covar=tensor([0.0444, 0.0519, 0.0506, 0.0409, 0.0565, 0.0449, 0.0630, 0.0454], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 08:31:01,127 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:08,824 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 08:31:13,797 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39833.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:21,777 INFO [train.py:903] (1/4) Epoch 6, batch 5700, loss[loss=0.2746, simple_loss=0.3358, pruned_loss=0.1068, over 19578.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.335, pruned_loss=0.1043, over 3809348.98 frames. ], batch size: 61, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:31:22,132 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0852, 1.3162, 1.3641, 1.3648, 2.6563, 0.9089, 1.8850, 2.6894], + device='cuda:1'), covar=tensor([0.0427, 0.2285, 0.2354, 0.1496, 0.0677, 0.2259, 0.1066, 0.0444], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0317, 0.0319, 0.0300, 0.0320, 0.0319, 0.0295, 0.0312], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:31:26,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.066e+02 6.807e+02 8.639e+02 1.032e+03 2.369e+03, threshold=1.728e+03, percent-clipped=2.0 +2023-04-01 08:31:57,155 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9530, 1.4011, 1.5683, 1.9951, 2.1039, 1.6326, 1.6137, 1.8070], + device='cuda:1'), covar=tensor([0.0854, 0.1685, 0.1329, 0.0915, 0.0991, 0.0538, 0.1069, 0.0682], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0354, 0.0282, 0.0237, 0.0301, 0.0243, 0.0273, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:32:12,192 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-01 08:32:17,452 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5396, 2.0757, 1.6028, 1.5799, 1.9605, 1.3000, 1.4178, 1.6050], + device='cuda:1'), covar=tensor([0.0553, 0.0412, 0.0599, 0.0445, 0.0339, 0.0722, 0.0491, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0275, 0.0311, 0.0236, 0.0222, 0.0307, 0.0283, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:32:21,376 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.79 vs. limit=5.0 +2023-04-01 08:32:21,660 INFO [train.py:903] (1/4) Epoch 6, batch 5750, loss[loss=0.2838, simple_loss=0.3532, pruned_loss=0.1071, over 18788.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3346, pruned_loss=0.104, over 3808558.06 frames. ], batch size: 74, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:32:23,981 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 08:32:30,895 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 08:32:35,613 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 08:32:52,593 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8450, 1.7659, 2.1540, 2.8152, 2.4328, 2.3641, 2.3703, 2.7398], + device='cuda:1'), covar=tensor([0.0715, 0.1779, 0.1173, 0.0978, 0.1255, 0.0412, 0.0920, 0.0487], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0355, 0.0283, 0.0238, 0.0303, 0.0243, 0.0273, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:32:58,446 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:23,258 INFO [train.py:903] (1/4) Epoch 6, batch 5800, loss[loss=0.2428, simple_loss=0.3145, pruned_loss=0.08557, over 19770.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3342, pruned_loss=0.1039, over 3800671.71 frames. ], batch size: 54, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:33:23,690 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:28,189 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:29,042 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+02 6.519e+02 7.840e+02 9.394e+02 2.455e+03, threshold=1.568e+03, percent-clipped=4.0 +2023-04-01 08:34:01,397 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9902, 1.0839, 1.5406, 0.5905, 2.1856, 2.4584, 2.0995, 2.5856], + device='cuda:1'), covar=tensor([0.1361, 0.3122, 0.2782, 0.2096, 0.0402, 0.0231, 0.0325, 0.0199], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0284, 0.0314, 0.0248, 0.0201, 0.0130, 0.0200, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 08:34:26,992 INFO [train.py:903] (1/4) Epoch 6, batch 5850, loss[loss=0.2583, simple_loss=0.332, pruned_loss=0.09232, over 19792.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3328, pruned_loss=0.103, over 3809056.18 frames. ], batch size: 56, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:34:28,639 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2167, 2.1467, 1.7614, 1.7458, 1.5445, 1.7303, 0.3285, 1.0066], + device='cuda:1'), covar=tensor([0.0284, 0.0297, 0.0224, 0.0309, 0.0665, 0.0376, 0.0647, 0.0525], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0298, 0.0299, 0.0314, 0.0386, 0.0312, 0.0289, 0.0303], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 08:34:40,714 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40001.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:34:59,479 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:28,999 INFO [train.py:903] (1/4) Epoch 6, batch 5900, loss[loss=0.2674, simple_loss=0.3205, pruned_loss=0.1071, over 19708.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3333, pruned_loss=0.1035, over 3809605.16 frames. ], batch size: 45, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:35:31,444 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 08:35:31,819 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:33,716 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.177e+02 6.123e+02 7.695e+02 9.772e+02 2.844e+03, threshold=1.539e+03, percent-clipped=4.0 +2023-04-01 08:35:53,058 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 08:36:10,183 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 08:36:30,103 INFO [train.py:903] (1/4) Epoch 6, batch 5950, loss[loss=0.2699, simple_loss=0.3435, pruned_loss=0.09814, over 19665.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.332, pruned_loss=0.1024, over 3824213.97 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:03,209 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40116.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:37:18,121 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:37:30,831 INFO [train.py:903] (1/4) Epoch 6, batch 6000, loss[loss=0.2744, simple_loss=0.3414, pruned_loss=0.1037, over 19301.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3323, pruned_loss=0.1022, over 3839194.04 frames. ], batch size: 66, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:30,831 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 08:37:43,219 INFO [train.py:937] (1/4) Epoch 6, validation: loss=0.1955, simple_loss=0.2951, pruned_loss=0.04789, over 944034.00 frames. +2023-04-01 08:37:43,220 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 08:37:47,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 6.298e+02 7.514e+02 9.544e+02 1.960e+03, threshold=1.503e+03, percent-clipped=1.0 +2023-04-01 08:38:29,640 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:38:44,845 INFO [train.py:903] (1/4) Epoch 6, batch 6050, loss[loss=0.2948, simple_loss=0.346, pruned_loss=0.1218, over 13118.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3306, pruned_loss=0.1009, over 3840977.66 frames. ], batch size: 136, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:38:53,728 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:14,913 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:24,383 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:43,576 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40236.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:48,074 INFO [train.py:903] (1/4) Epoch 6, batch 6100, loss[loss=0.2098, simple_loss=0.2818, pruned_loss=0.06893, over 19728.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3309, pruned_loss=0.1011, over 3822322.88 frames. ], batch size: 46, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:39:54,266 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 6.367e+02 7.728e+02 1.144e+03 2.582e+03, threshold=1.546e+03, percent-clipped=10.0 +2023-04-01 08:39:54,673 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:15,215 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-01 08:40:21,494 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9417, 4.9307, 5.8204, 5.7217, 1.7222, 5.4739, 4.6333, 5.3029], + device='cuda:1'), covar=tensor([0.1005, 0.0710, 0.0457, 0.0376, 0.4445, 0.0278, 0.0435, 0.1002], + device='cuda:1'), in_proj_covar=tensor([0.0569, 0.0490, 0.0667, 0.0550, 0.0622, 0.0415, 0.0424, 0.0611], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 08:40:51,266 INFO [train.py:903] (1/4) Epoch 6, batch 6150, loss[loss=0.2694, simple_loss=0.3305, pruned_loss=0.1041, over 19477.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3322, pruned_loss=0.1019, over 3825189.26 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:40:51,597 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40290.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:54,065 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:41:19,441 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 08:41:52,300 INFO [train.py:903] (1/4) Epoch 6, batch 6200, loss[loss=0.2784, simple_loss=0.3424, pruned_loss=0.1072, over 19767.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3327, pruned_loss=0.1021, over 3836024.32 frames. ], batch size: 54, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:41:57,219 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 6.810e+02 8.218e+02 1.008e+03 2.334e+03, threshold=1.644e+03, percent-clipped=5.0 +2023-04-01 08:42:01,834 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:42:33,816 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40372.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:42:54,271 INFO [train.py:903] (1/4) Epoch 6, batch 6250, loss[loss=0.3408, simple_loss=0.3848, pruned_loss=0.1484, over 19486.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3325, pruned_loss=0.1023, over 3832112.82 frames. ], batch size: 64, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:43:04,601 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40397.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:43:14,690 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4756, 1.9570, 2.0555, 2.6738, 2.3584, 2.0928, 1.8874, 2.6739], + device='cuda:1'), covar=tensor([0.0747, 0.1501, 0.1132, 0.0735, 0.1058, 0.0453, 0.1013, 0.0469], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0362, 0.0287, 0.0239, 0.0306, 0.0248, 0.0277, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:43:27,274 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 08:43:58,136 INFO [train.py:903] (1/4) Epoch 6, batch 6300, loss[loss=0.2362, simple_loss=0.305, pruned_loss=0.08367, over 19572.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3317, pruned_loss=0.1017, over 3821355.88 frames. ], batch size: 52, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:44:03,767 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.389e+02 8.265e+02 1.061e+03 2.633e+03, threshold=1.653e+03, percent-clipped=7.0 +2023-04-01 08:44:46,061 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 08:44:52,526 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 08:45:00,749 INFO [train.py:903] (1/4) Epoch 6, batch 6350, loss[loss=0.2408, simple_loss=0.3185, pruned_loss=0.0815, over 19601.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3331, pruned_loss=0.1029, over 3835157.73 frames. ], batch size: 57, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:45:12,801 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:24,327 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:44,956 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40525.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:02,597 INFO [train.py:903] (1/4) Epoch 6, batch 6400, loss[loss=0.2713, simple_loss=0.3481, pruned_loss=0.09723, over 19271.00 frames. ], tot_loss[loss=0.268, simple_loss=0.332, pruned_loss=0.102, over 3845180.21 frames. ], batch size: 66, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:46:07,241 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.528e+02 8.039e+02 1.077e+03 1.980e+03, threshold=1.608e+03, percent-clipped=3.0 +2023-04-01 08:46:12,175 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:23,186 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:44,134 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 08:46:44,883 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:52,950 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40580.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:47:04,378 INFO [train.py:903] (1/4) Epoch 6, batch 6450, loss[loss=0.2614, simple_loss=0.3225, pruned_loss=0.1002, over 19611.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3322, pruned_loss=0.1016, over 3840595.94 frames. ], batch size: 50, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:47:20,546 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 08:47:46,786 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9585, 1.0961, 1.3699, 0.5701, 2.0774, 2.1987, 1.9479, 2.3015], + device='cuda:1'), covar=tensor([0.1373, 0.2983, 0.2809, 0.2266, 0.0485, 0.0394, 0.0357, 0.0254], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0282, 0.0312, 0.0246, 0.0204, 0.0129, 0.0200, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 08:47:50,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 08:47:59,673 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:07,603 INFO [train.py:903] (1/4) Epoch 6, batch 6500, loss[loss=0.244, simple_loss=0.3033, pruned_loss=0.09234, over 19384.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3329, pruned_loss=0.1021, over 3819251.25 frames. ], batch size: 48, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:48:13,017 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 6.039e+02 7.833e+02 1.001e+03 2.233e+03, threshold=1.567e+03, percent-clipped=5.0 +2023-04-01 08:48:15,440 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 08:48:44,319 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:47,960 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:12,715 INFO [train.py:903] (1/4) Epoch 6, batch 6550, loss[loss=0.2705, simple_loss=0.3161, pruned_loss=0.1124, over 18603.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3332, pruned_loss=0.1025, over 3821470.46 frames. ], batch size: 41, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:49:15,166 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:18,789 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:50:15,125 INFO [train.py:903] (1/4) Epoch 6, batch 6600, loss[loss=0.3896, simple_loss=0.4184, pruned_loss=0.1804, over 17339.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3339, pruned_loss=0.1029, over 3827651.26 frames. ], batch size: 101, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:50:19,763 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+02 6.030e+02 7.716e+02 9.913e+02 2.888e+03, threshold=1.543e+03, percent-clipped=3.0 +2023-04-01 08:50:23,601 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3475, 3.9090, 2.4664, 3.5385, 1.2404, 3.4692, 3.4811, 3.6218], + device='cuda:1'), covar=tensor([0.0661, 0.0997, 0.2009, 0.0766, 0.3521, 0.0904, 0.0824, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0372, 0.0327, 0.0382, 0.0297, 0.0361, 0.0316, 0.0307, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 08:50:26,126 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40749.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:51:17,591 INFO [train.py:903] (1/4) Epoch 6, batch 6650, loss[loss=0.2736, simple_loss=0.321, pruned_loss=0.1131, over 19776.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3342, pruned_loss=0.1032, over 3814407.86 frames. ], batch size: 47, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:51:40,494 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40807.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:52:19,368 INFO [train.py:903] (1/4) Epoch 6, batch 6700, loss[loss=0.215, simple_loss=0.2837, pruned_loss=0.07311, over 19781.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3334, pruned_loss=0.103, over 3827245.97 frames. ], batch size: 47, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:52:24,133 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.981e+02 6.315e+02 8.385e+02 9.952e+02 2.559e+03, threshold=1.677e+03, percent-clipped=5.0 +2023-04-01 08:52:39,409 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:10,596 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-01 08:53:13,429 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:19,358 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6263, 1.2493, 1.3519, 1.7387, 3.1777, 0.9653, 2.1070, 3.3698], + device='cuda:1'), covar=tensor([0.0388, 0.2569, 0.2626, 0.1416, 0.0661, 0.2436, 0.1285, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0310, 0.0315, 0.0292, 0.0315, 0.0311, 0.0290, 0.0310], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 08:53:21,368 INFO [train.py:903] (1/4) Epoch 6, batch 6750, loss[loss=0.2381, simple_loss=0.2978, pruned_loss=0.08924, over 19367.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3324, pruned_loss=0.1023, over 3817370.63 frames. ], batch size: 47, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:53:43,437 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2071, 2.1594, 1.7133, 1.7010, 1.5746, 1.6538, 0.1923, 0.9598], + device='cuda:1'), covar=tensor([0.0315, 0.0292, 0.0240, 0.0354, 0.0654, 0.0393, 0.0666, 0.0533], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0304, 0.0300, 0.0322, 0.0394, 0.0318, 0.0292, 0.0308], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 08:53:53,186 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:04,516 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:17,517 INFO [train.py:903] (1/4) Epoch 6, batch 6800, loss[loss=0.3068, simple_loss=0.3502, pruned_loss=0.1317, over 13384.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3332, pruned_loss=0.103, over 3810475.37 frames. ], batch size: 136, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:54:23,019 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.879e+02 7.609e+02 1.019e+03 2.150e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 08:54:31,421 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:33,650 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:04,569 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 08:55:05,006 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 08:55:08,785 INFO [train.py:903] (1/4) Epoch 7, batch 0, loss[loss=0.3174, simple_loss=0.3633, pruned_loss=0.1357, over 19699.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3633, pruned_loss=0.1357, over 19699.00 frames. ], batch size: 53, lr: 1.24e-02, grad_scale: 8.0 +2023-04-01 08:55:08,786 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 08:55:20,407 INFO [train.py:937] (1/4) Epoch 7, validation: loss=0.1957, simple_loss=0.2957, pruned_loss=0.04779, over 944034.00 frames. +2023-04-01 08:55:20,408 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 08:55:21,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:30,897 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:32,922 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 08:56:02,422 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3244, 1.5061, 2.0617, 1.6517, 3.0596, 2.5802, 3.1711, 1.4159], + device='cuda:1'), covar=tensor([0.1905, 0.3242, 0.1882, 0.1517, 0.1478, 0.1635, 0.1628, 0.3082], + device='cuda:1'), in_proj_covar=tensor([0.0452, 0.0528, 0.0509, 0.0409, 0.0569, 0.0456, 0.0638, 0.0456], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 08:56:04,723 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:15,986 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:19,115 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41015.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:22,013 INFO [train.py:903] (1/4) Epoch 7, batch 50, loss[loss=0.2503, simple_loss=0.3224, pruned_loss=0.08909, over 19376.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3311, pruned_loss=0.09892, over 879878.00 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:56:36,152 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41030.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:51,483 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 08:56:51,946 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.786e+02 6.089e+02 7.435e+02 1.027e+03 3.072e+03, threshold=1.487e+03, percent-clipped=7.0 +2023-04-01 08:56:56,504 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 08:57:17,924 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:18,061 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:23,453 INFO [train.py:903] (1/4) Epoch 7, batch 100, loss[loss=0.365, simple_loss=0.396, pruned_loss=0.167, over 13818.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3311, pruned_loss=0.09998, over 1534307.46 frames. ], batch size: 135, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:57:34,768 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 08:57:46,214 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41088.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:24,482 INFO [train.py:903] (1/4) Epoch 7, batch 150, loss[loss=0.1994, simple_loss=0.2759, pruned_loss=0.06142, over 14749.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3288, pruned_loss=0.09899, over 2031629.28 frames. ], batch size: 32, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:58:26,857 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:36,364 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:56,746 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.181e+02 6.219e+02 8.190e+02 1.094e+03 2.901e+03, threshold=1.638e+03, percent-clipped=4.0 +2023-04-01 08:59:22,643 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 08:59:23,716 INFO [train.py:903] (1/4) Epoch 7, batch 200, loss[loss=0.2802, simple_loss=0.344, pruned_loss=0.1081, over 19319.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3307, pruned_loss=0.1008, over 2433232.17 frames. ], batch size: 70, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:00:06,211 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3818, 0.9658, 1.2210, 1.1829, 1.9681, 0.7998, 1.8132, 2.0577], + device='cuda:1'), covar=tensor([0.0818, 0.2832, 0.2704, 0.1585, 0.1104, 0.2204, 0.1098, 0.0701], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0318, 0.0321, 0.0299, 0.0322, 0.0316, 0.0295, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:00:27,760 INFO [train.py:903] (1/4) Epoch 7, batch 250, loss[loss=0.2487, simple_loss=0.3167, pruned_loss=0.09031, over 19837.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3322, pruned_loss=0.1016, over 2755062.50 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:00:38,020 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:40,036 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:59,432 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.194e+02 6.866e+02 8.894e+02 1.080e+03 3.290e+03, threshold=1.779e+03, percent-clipped=6.0 +2023-04-01 09:01:06,563 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:23,879 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:27,396 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7253, 4.3080, 2.4666, 3.7830, 1.2913, 3.9090, 3.9059, 4.0360], + device='cuda:1'), covar=tensor([0.0553, 0.0973, 0.1852, 0.0760, 0.3516, 0.0728, 0.0682, 0.0985], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0330, 0.0376, 0.0296, 0.0358, 0.0311, 0.0300, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 09:01:30,757 INFO [train.py:903] (1/4) Epoch 7, batch 300, loss[loss=0.2372, simple_loss=0.3047, pruned_loss=0.08483, over 19582.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3295, pruned_loss=0.1002, over 3005745.78 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:31,485 INFO [train.py:903] (1/4) Epoch 7, batch 350, loss[loss=0.2193, simple_loss=0.2838, pruned_loss=0.07735, over 19473.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3314, pruned_loss=0.1008, over 3186394.97 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:33,981 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 09:03:02,044 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:04,940 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.797e+02 7.460e+02 9.435e+02 2.818e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-04-01 09:03:22,750 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:32,953 INFO [train.py:903] (1/4) Epoch 7, batch 400, loss[loss=0.2972, simple_loss=0.3589, pruned_loss=0.1178, over 19481.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3297, pruned_loss=0.0994, over 3332707.29 frames. ], batch size: 64, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:03:43,461 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41377.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:54,211 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:07,919 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3338, 2.1970, 1.9576, 1.8347, 1.5510, 1.8750, 0.4083, 1.1482], + device='cuda:1'), covar=tensor([0.0274, 0.0292, 0.0211, 0.0276, 0.0608, 0.0357, 0.0608, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0305, 0.0301, 0.0323, 0.0392, 0.0320, 0.0290, 0.0306], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 09:04:20,760 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:23,219 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:34,710 INFO [train.py:903] (1/4) Epoch 7, batch 450, loss[loss=0.2657, simple_loss=0.3221, pruned_loss=0.1047, over 19744.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3298, pruned_loss=0.09983, over 3418137.86 frames. ], batch size: 51, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:05:02,681 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 09:05:03,838 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 09:05:06,099 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 5.824e+02 7.719e+02 9.807e+02 3.448e+03, threshold=1.544e+03, percent-clipped=7.0 +2023-04-01 09:05:31,583 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:05:37,399 INFO [train.py:903] (1/4) Epoch 7, batch 500, loss[loss=0.2332, simple_loss=0.2972, pruned_loss=0.08461, over 19362.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3316, pruned_loss=0.1009, over 3513693.20 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 16.0 +2023-04-01 09:05:44,885 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:06:38,625 INFO [train.py:903] (1/4) Epoch 7, batch 550, loss[loss=0.259, simple_loss=0.3298, pruned_loss=0.09407, over 19694.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3313, pruned_loss=0.1011, over 3573133.04 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:06:43,705 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41522.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:07:09,850 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 6.032e+02 7.411e+02 9.275e+02 1.625e+03, threshold=1.482e+03, percent-clipped=1.0 +2023-04-01 09:07:37,911 INFO [train.py:903] (1/4) Epoch 7, batch 600, loss[loss=0.2445, simple_loss=0.3086, pruned_loss=0.09022, over 19816.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3323, pruned_loss=0.1019, over 3627029.55 frames. ], batch size: 48, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:07:44,495 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-01 09:07:50,844 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:13,424 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 09:08:16,202 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:39,311 INFO [train.py:903] (1/4) Epoch 7, batch 650, loss[loss=0.2685, simple_loss=0.3369, pruned_loss=0.1001, over 19460.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.331, pruned_loss=0.1007, over 3677798.97 frames. ], batch size: 64, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:08:45,289 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:00,389 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41633.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:13,511 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 5.719e+02 7.418e+02 1.065e+03 4.334e+03, threshold=1.484e+03, percent-clipped=7.0 +2023-04-01 09:09:14,982 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4763, 1.9914, 1.9892, 2.6730, 1.8202, 2.7661, 2.9226, 2.6382], + device='cuda:1'), covar=tensor([0.0701, 0.0927, 0.0989, 0.0904, 0.1065, 0.0629, 0.0819, 0.0577], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0234, 0.0234, 0.0263, 0.0255, 0.0218, 0.0218, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 09:09:28,654 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:40,760 INFO [train.py:903] (1/4) Epoch 7, batch 700, loss[loss=0.2885, simple_loss=0.3577, pruned_loss=0.1096, over 19098.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.331, pruned_loss=0.1003, over 3723612.88 frames. ], batch size: 69, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:09:48,541 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:10:02,987 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 09:10:45,012 INFO [train.py:903] (1/4) Epoch 7, batch 750, loss[loss=0.2623, simple_loss=0.3421, pruned_loss=0.09118, over 19272.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3299, pruned_loss=0.09953, over 3749822.20 frames. ], batch size: 66, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:10:59,406 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:15,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.751e+02 6.886e+02 8.721e+02 1.519e+03, threshold=1.377e+03, percent-clipped=2.0 +2023-04-01 09:11:26,559 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2142, 2.1566, 2.3594, 3.4886, 2.2068, 3.4734, 3.2542, 2.1429], + device='cuda:1'), covar=tensor([0.2923, 0.2473, 0.1020, 0.1382, 0.2945, 0.0872, 0.1998, 0.1943], + device='cuda:1'), in_proj_covar=tensor([0.0700, 0.0694, 0.0596, 0.0840, 0.0711, 0.0606, 0.0731, 0.0641], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 09:11:29,953 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 09:11:31,818 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:46,751 INFO [train.py:903] (1/4) Epoch 7, batch 800, loss[loss=0.2845, simple_loss=0.3479, pruned_loss=0.1105, over 19769.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3297, pruned_loss=0.09907, over 3772214.93 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:11:56,200 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 09:11:58,775 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:27,398 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7674, 3.1848, 3.2373, 3.2808, 1.0992, 3.0677, 2.7253, 2.9371], + device='cuda:1'), covar=tensor([0.1207, 0.0792, 0.0716, 0.0635, 0.4279, 0.0635, 0.0691, 0.1268], + device='cuda:1'), in_proj_covar=tensor([0.0570, 0.0495, 0.0666, 0.0553, 0.0621, 0.0422, 0.0424, 0.0619], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 09:12:31,075 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:48,110 INFO [train.py:903] (1/4) Epoch 7, batch 850, loss[loss=0.248, simple_loss=0.3075, pruned_loss=0.09431, over 19765.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3293, pruned_loss=0.09938, over 3775714.12 frames. ], batch size: 47, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:13:10,282 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:12,180 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8652, 4.2304, 4.4977, 4.5543, 1.4908, 4.1376, 3.6368, 4.0748], + device='cuda:1'), covar=tensor([0.1110, 0.0669, 0.0588, 0.0466, 0.4863, 0.0487, 0.0606, 0.1145], + device='cuda:1'), in_proj_covar=tensor([0.0571, 0.0493, 0.0665, 0.0551, 0.0622, 0.0424, 0.0425, 0.0618], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 09:13:23,186 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.000e+02 6.514e+02 8.083e+02 9.646e+02 1.896e+03, threshold=1.617e+03, percent-clipped=5.0 +2023-04-01 09:13:38,259 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 09:13:41,028 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:50,188 INFO [train.py:903] (1/4) Epoch 7, batch 900, loss[loss=0.2561, simple_loss=0.3239, pruned_loss=0.09418, over 19595.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3295, pruned_loss=0.09989, over 3792091.80 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:14:05,378 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4911, 4.0452, 2.2434, 3.6245, 0.9295, 3.6815, 3.7498, 3.8755], + device='cuda:1'), covar=tensor([0.0621, 0.1101, 0.2295, 0.0738, 0.4089, 0.0827, 0.0757, 0.1033], + device='cuda:1'), in_proj_covar=tensor([0.0379, 0.0332, 0.0383, 0.0300, 0.0357, 0.0313, 0.0307, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 09:14:51,584 INFO [train.py:903] (1/4) Epoch 7, batch 950, loss[loss=0.2825, simple_loss=0.3352, pruned_loss=0.1149, over 19757.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3291, pruned_loss=0.09923, over 3811253.63 frames. ], batch size: 47, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:14:55,108 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 09:15:01,406 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:15:26,240 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.318e+02 6.505e+02 7.519e+02 9.487e+02 1.757e+03, threshold=1.504e+03, percent-clipped=1.0 +2023-04-01 09:15:53,978 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41966.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:15:55,890 INFO [train.py:903] (1/4) Epoch 7, batch 1000, loss[loss=0.2724, simple_loss=0.3418, pruned_loss=0.1015, over 19462.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3308, pruned_loss=0.1003, over 3799998.54 frames. ], batch size: 64, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:16:45,430 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7933, 1.5030, 1.3940, 1.8072, 1.7544, 1.6426, 1.6192, 1.8441], + device='cuda:1'), covar=tensor([0.0875, 0.1518, 0.1284, 0.0874, 0.1026, 0.0459, 0.0937, 0.0588], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0361, 0.0289, 0.0238, 0.0303, 0.0248, 0.0274, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:16:48,566 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 09:16:56,956 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:16:59,084 INFO [train.py:903] (1/4) Epoch 7, batch 1050, loss[loss=0.2943, simple_loss=0.3426, pruned_loss=0.123, over 19714.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3316, pruned_loss=0.1007, over 3814300.61 frames. ], batch size: 51, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:17:14,992 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6700, 1.3147, 1.3585, 1.8443, 1.4638, 1.9275, 1.7731, 1.6677], + device='cuda:1'), covar=tensor([0.0813, 0.1081, 0.1158, 0.0893, 0.0976, 0.0703, 0.0967, 0.0731], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0234, 0.0234, 0.0265, 0.0255, 0.0217, 0.0218, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 09:17:19,836 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 09:17:29,383 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 09:17:33,747 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+02 5.894e+02 7.129e+02 9.584e+02 2.561e+03, threshold=1.426e+03, percent-clipped=5.0 +2023-04-01 09:18:00,561 INFO [train.py:903] (1/4) Epoch 7, batch 1100, loss[loss=0.2717, simple_loss=0.3157, pruned_loss=0.1139, over 19760.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3296, pruned_loss=0.09971, over 3815834.47 frames. ], batch size: 47, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:03,370 INFO [train.py:903] (1/4) Epoch 7, batch 1150, loss[loss=0.2717, simple_loss=0.3413, pruned_loss=0.1011, over 19050.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3299, pruned_loss=0.1001, over 3794565.72 frames. ], batch size: 69, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:21,137 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:19:37,737 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.892e+02 7.369e+02 1.011e+03 1.805e+03, threshold=1.474e+03, percent-clipped=4.0 +2023-04-01 09:20:05,775 INFO [train.py:903] (1/4) Epoch 7, batch 1200, loss[loss=0.2814, simple_loss=0.3508, pruned_loss=0.106, over 19303.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3305, pruned_loss=0.1003, over 3793234.29 frames. ], batch size: 66, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:20:30,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 09:21:08,028 INFO [train.py:903] (1/4) Epoch 7, batch 1250, loss[loss=0.2351, simple_loss=0.32, pruned_loss=0.07515, over 19529.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3295, pruned_loss=0.1, over 3799697.97 frames. ], batch size: 56, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:21:43,466 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 6.237e+02 7.824e+02 1.034e+03 2.254e+03, threshold=1.565e+03, percent-clipped=6.0 +2023-04-01 09:22:09,629 INFO [train.py:903] (1/4) Epoch 7, batch 1300, loss[loss=0.2909, simple_loss=0.3529, pruned_loss=0.1144, over 19526.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3304, pruned_loss=0.1003, over 3800033.44 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:22:09,809 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:23:02,499 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42310.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:23:12,314 INFO [train.py:903] (1/4) Epoch 7, batch 1350, loss[loss=0.3393, simple_loss=0.3802, pruned_loss=0.1492, over 13371.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3294, pruned_loss=0.09992, over 3807643.42 frames. ], batch size: 136, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:23:29,490 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6788, 2.0990, 1.7041, 1.6749, 1.9995, 1.4440, 1.4382, 1.6978], + device='cuda:1'), covar=tensor([0.0612, 0.0538, 0.0598, 0.0455, 0.0365, 0.0704, 0.0576, 0.0387], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0283, 0.0315, 0.0242, 0.0229, 0.0311, 0.0291, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:23:47,361 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.606e+02 6.578e+02 7.819e+02 1.024e+03 2.032e+03, threshold=1.564e+03, percent-clipped=3.0 +2023-04-01 09:24:07,204 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2432, 1.2714, 1.6464, 1.3878, 2.1252, 1.9601, 2.3437, 0.7228], + device='cuda:1'), covar=tensor([0.1850, 0.3367, 0.1867, 0.1551, 0.1230, 0.1602, 0.1157, 0.3074], + device='cuda:1'), in_proj_covar=tensor([0.0458, 0.0526, 0.0518, 0.0413, 0.0565, 0.0457, 0.0631, 0.0457], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 09:24:15,698 INFO [train.py:903] (1/4) Epoch 7, batch 1400, loss[loss=0.232, simple_loss=0.3054, pruned_loss=0.07933, over 19747.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3282, pruned_loss=0.09886, over 3805983.75 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:24:34,411 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:39,183 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42387.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:50,665 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:10,408 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:12,454 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 09:25:17,935 INFO [train.py:903] (1/4) Epoch 7, batch 1450, loss[loss=0.2182, simple_loss=0.2885, pruned_loss=0.07395, over 19402.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3266, pruned_loss=0.09803, over 3803176.27 frames. ], batch size: 48, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:25:26,426 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42425.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:25:53,241 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.957e+02 5.854e+02 7.466e+02 9.791e+02 2.115e+03, threshold=1.493e+03, percent-clipped=4.0 +2023-04-01 09:26:19,590 INFO [train.py:903] (1/4) Epoch 7, batch 1500, loss[loss=0.2207, simple_loss=0.2944, pruned_loss=0.07348, over 19751.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3268, pruned_loss=0.09822, over 3805491.11 frames. ], batch size: 51, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:07,129 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-01 09:27:20,486 INFO [train.py:903] (1/4) Epoch 7, batch 1550, loss[loss=0.29, simple_loss=0.356, pruned_loss=0.112, over 19308.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.329, pruned_loss=0.09959, over 3802277.47 frames. ], batch size: 66, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:26,957 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9812, 1.6480, 1.4538, 1.9103, 1.8270, 1.7207, 1.4350, 1.7945], + device='cuda:1'), covar=tensor([0.0691, 0.1223, 0.1262, 0.0765, 0.0876, 0.0447, 0.1010, 0.0589], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0356, 0.0285, 0.0236, 0.0302, 0.0245, 0.0266, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:27:29,126 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:27:55,714 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 6.427e+02 8.071e+02 9.919e+02 2.182e+03, threshold=1.614e+03, percent-clipped=7.0 +2023-04-01 09:28:23,008 INFO [train.py:903] (1/4) Epoch 7, batch 1600, loss[loss=0.2253, simple_loss=0.2893, pruned_loss=0.08067, over 19327.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3288, pruned_loss=0.09945, over 3811394.72 frames. ], batch size: 44, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:28:41,307 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 09:29:04,602 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5367, 1.4826, 1.6541, 1.8007, 3.9685, 1.0171, 2.1735, 3.8508], + device='cuda:1'), covar=tensor([0.0316, 0.2570, 0.2605, 0.1649, 0.0637, 0.2614, 0.1379, 0.0387], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0319, 0.0326, 0.0294, 0.0323, 0.0316, 0.0296, 0.0318], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:29:24,654 INFO [train.py:903] (1/4) Epoch 7, batch 1650, loss[loss=0.2223, simple_loss=0.2996, pruned_loss=0.07254, over 19478.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3269, pruned_loss=0.0986, over 3814103.51 frames. ], batch size: 49, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:29:50,150 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:29:59,296 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 09:29:59,742 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.059e+02 6.175e+02 7.945e+02 9.824e+02 2.630e+03, threshold=1.589e+03, percent-clipped=4.0 +2023-04-01 09:30:23,021 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:26,333 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:27,192 INFO [train.py:903] (1/4) Epoch 7, batch 1700, loss[loss=0.2791, simple_loss=0.3431, pruned_loss=0.1076, over 19493.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3271, pruned_loss=0.09832, over 3818271.52 frames. ], batch size: 64, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:30:43,953 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42681.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:31:02,728 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 09:31:15,460 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42706.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 09:31:29,071 INFO [train.py:903] (1/4) Epoch 7, batch 1750, loss[loss=0.2688, simple_loss=0.3363, pruned_loss=0.1006, over 19562.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.326, pruned_loss=0.09746, over 3828816.52 frames. ], batch size: 61, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:31:59,015 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 09:31:59,542 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:32:05,277 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.682e+02 7.177e+02 9.179e+02 1.731e+03, threshold=1.435e+03, percent-clipped=1.0 +2023-04-01 09:32:33,267 INFO [train.py:903] (1/4) Epoch 7, batch 1800, loss[loss=0.2749, simple_loss=0.3466, pruned_loss=0.1016, over 19552.00 frames. ], tot_loss[loss=0.26, simple_loss=0.326, pruned_loss=0.09701, over 3825564.11 frames. ], batch size: 54, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:33:27,626 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 09:33:35,066 INFO [train.py:903] (1/4) Epoch 7, batch 1850, loss[loss=0.2702, simple_loss=0.3216, pruned_loss=0.1094, over 19348.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3264, pruned_loss=0.09734, over 3829102.58 frames. ], batch size: 47, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:04,793 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 09:34:09,013 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 6.596e+02 7.909e+02 1.066e+03 2.536e+03, threshold=1.582e+03, percent-clipped=10.0 +2023-04-01 09:34:22,466 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:37,123 INFO [train.py:903] (1/4) Epoch 7, batch 1900, loss[loss=0.2742, simple_loss=0.3436, pruned_loss=0.1024, over 19449.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3269, pruned_loss=0.09742, over 3820232.60 frames. ], batch size: 64, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:37,305 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:48,050 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:51,101 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 09:34:51,276 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.8338, 5.1507, 2.8513, 4.5320, 1.2746, 4.8482, 5.0276, 5.3010], + device='cuda:1'), covar=tensor([0.0441, 0.0952, 0.1950, 0.0648, 0.4017, 0.0666, 0.0653, 0.0795], + device='cuda:1'), in_proj_covar=tensor([0.0381, 0.0337, 0.0389, 0.0300, 0.0363, 0.0320, 0.0306, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 09:34:57,083 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 09:35:14,092 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:35:21,941 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 09:35:32,110 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-01 09:35:38,556 INFO [train.py:903] (1/4) Epoch 7, batch 1950, loss[loss=0.2204, simple_loss=0.285, pruned_loss=0.07789, over 19715.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3266, pruned_loss=0.09717, over 3825024.32 frames. ], batch size: 45, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:36:15,248 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.682e+02 8.244e+02 9.665e+02 2.689e+03, threshold=1.649e+03, percent-clipped=3.0 +2023-04-01 09:36:41,125 INFO [train.py:903] (1/4) Epoch 7, batch 2000, loss[loss=0.302, simple_loss=0.3617, pruned_loss=0.1211, over 19524.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3282, pruned_loss=0.09829, over 3833010.08 frames. ], batch size: 64, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:00,296 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:35,126 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:36,083 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 09:37:43,660 INFO [train.py:903] (1/4) Epoch 7, batch 2050, loss[loss=0.2876, simple_loss=0.3493, pruned_loss=0.1129, over 19662.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3285, pruned_loss=0.09823, over 3834318.81 frames. ], batch size: 55, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:56,190 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 09:37:57,389 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 09:38:08,484 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6541, 1.2666, 1.4027, 1.8660, 1.4471, 1.9541, 2.0133, 1.6960], + device='cuda:1'), covar=tensor([0.0820, 0.1053, 0.1095, 0.1007, 0.0990, 0.0719, 0.0813, 0.0671], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0236, 0.0232, 0.0264, 0.0254, 0.0221, 0.0216, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 09:38:17,409 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.956e+02 6.149e+02 7.669e+02 9.586e+02 2.177e+03, threshold=1.534e+03, percent-clipped=1.0 +2023-04-01 09:38:19,526 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 09:38:46,695 INFO [train.py:903] (1/4) Epoch 7, batch 2100, loss[loss=0.3089, simple_loss=0.3587, pruned_loss=0.1295, over 13879.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.327, pruned_loss=0.09724, over 3837181.25 frames. ], batch size: 136, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:39:13,039 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 09:39:16,693 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5365, 1.1060, 1.2998, 1.2750, 2.1523, 0.9243, 1.8597, 2.2249], + device='cuda:1'), covar=tensor([0.0533, 0.2344, 0.2323, 0.1348, 0.0795, 0.1891, 0.0910, 0.0553], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0316, 0.0326, 0.0295, 0.0322, 0.0318, 0.0298, 0.0320], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:39:35,491 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 09:39:41,467 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:39:48,065 INFO [train.py:903] (1/4) Epoch 7, batch 2150, loss[loss=0.2807, simple_loss=0.3448, pruned_loss=0.1083, over 19515.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3271, pruned_loss=0.09733, over 3843806.63 frames. ], batch size: 64, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:39:57,658 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:12,363 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:23,149 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 6.527e+02 7.673e+02 9.951e+02 2.226e+03, threshold=1.535e+03, percent-clipped=3.0 +2023-04-01 09:40:40,238 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 09:40:49,524 INFO [train.py:903] (1/4) Epoch 7, batch 2200, loss[loss=0.2531, simple_loss=0.3243, pruned_loss=0.09089, over 17562.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3266, pruned_loss=0.09687, over 3833149.08 frames. ], batch size: 101, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:16,779 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3556, 1.1724, 1.6830, 1.2310, 2.9197, 3.7121, 3.6026, 3.9352], + device='cuda:1'), covar=tensor([0.1380, 0.3056, 0.2801, 0.1817, 0.0391, 0.0141, 0.0183, 0.0129], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0284, 0.0314, 0.0247, 0.0203, 0.0134, 0.0204, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 09:41:53,418 INFO [train.py:903] (1/4) Epoch 7, batch 2250, loss[loss=0.2682, simple_loss=0.3392, pruned_loss=0.09865, over 18387.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3271, pruned_loss=0.09746, over 3825940.09 frames. ], batch size: 84, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:57,901 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:19,734 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:21,846 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43241.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:27,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+02 6.206e+02 7.577e+02 9.251e+02 2.641e+03, threshold=1.515e+03, percent-clipped=5.0 +2023-04-01 09:42:52,555 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:56,673 INFO [train.py:903] (1/4) Epoch 7, batch 2300, loss[loss=0.2352, simple_loss=0.3007, pruned_loss=0.08482, over 19803.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3272, pruned_loss=0.09745, over 3829691.93 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:43:10,466 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 09:43:50,078 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 09:43:59,263 INFO [train.py:903] (1/4) Epoch 7, batch 2350, loss[loss=0.2329, simple_loss=0.2949, pruned_loss=0.08542, over 18674.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.326, pruned_loss=0.09642, over 3831148.99 frames. ], batch size: 41, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:44:20,947 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:34,240 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 6.151e+02 7.487e+02 9.533e+02 1.563e+03, threshold=1.497e+03, percent-clipped=2.0 +2023-04-01 09:44:43,149 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 09:44:46,800 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43356.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:55,073 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8915, 4.3203, 4.6386, 4.5970, 1.6636, 4.2354, 3.7034, 4.2617], + device='cuda:1'), covar=tensor([0.1151, 0.0771, 0.0506, 0.0453, 0.4470, 0.0498, 0.0620, 0.1008], + device='cuda:1'), in_proj_covar=tensor([0.0579, 0.0503, 0.0673, 0.0556, 0.0633, 0.0424, 0.0430, 0.0627], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 09:44:59,570 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 09:45:00,595 INFO [train.py:903] (1/4) Epoch 7, batch 2400, loss[loss=0.3087, simple_loss=0.3696, pruned_loss=0.1239, over 19664.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3269, pruned_loss=0.09712, over 3830554.88 frames. ], batch size: 58, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:45:20,074 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:45:49,725 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:46:04,429 INFO [train.py:903] (1/4) Epoch 7, batch 2450, loss[loss=0.2767, simple_loss=0.3423, pruned_loss=0.1056, over 18806.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3265, pruned_loss=0.09694, over 3837572.75 frames. ], batch size: 74, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:46:38,155 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.802e+02 7.669e+02 8.855e+02 2.284e+03, threshold=1.534e+03, percent-clipped=5.0 +2023-04-01 09:46:43,553 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 09:47:02,416 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0156, 1.2504, 1.4197, 0.5707, 2.2586, 2.3996, 2.1483, 2.5765], + device='cuda:1'), covar=tensor([0.1343, 0.3016, 0.2973, 0.2307, 0.0417, 0.0233, 0.0343, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0286, 0.0316, 0.0250, 0.0205, 0.0135, 0.0205, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 09:47:06,559 INFO [train.py:903] (1/4) Epoch 7, batch 2500, loss[loss=0.2598, simple_loss=0.319, pruned_loss=0.1003, over 19774.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3266, pruned_loss=0.09694, over 3838641.32 frames. ], batch size: 47, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:47:38,455 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-01 09:48:09,514 INFO [train.py:903] (1/4) Epoch 7, batch 2550, loss[loss=0.2351, simple_loss=0.3145, pruned_loss=0.07783, over 19669.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3257, pruned_loss=0.09597, over 3834409.34 frames. ], batch size: 55, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:48:25,706 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:48:44,536 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.143e+02 6.070e+02 7.288e+02 8.830e+02 1.707e+03, threshold=1.458e+03, percent-clipped=2.0 +2023-04-01 09:49:01,582 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0137, 1.1193, 1.2564, 1.2115, 2.6327, 0.8851, 1.7719, 2.7674], + device='cuda:1'), covar=tensor([0.0438, 0.2470, 0.2573, 0.1666, 0.0697, 0.2307, 0.1214, 0.0422], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0315, 0.0323, 0.0295, 0.0321, 0.0319, 0.0300, 0.0316], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:49:05,678 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 09:49:10,228 INFO [train.py:903] (1/4) Epoch 7, batch 2600, loss[loss=0.3258, simple_loss=0.3723, pruned_loss=0.1396, over 12989.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.326, pruned_loss=0.09625, over 3832317.85 frames. ], batch size: 135, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:49:41,330 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:49:59,872 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.12 vs. limit=5.0 +2023-04-01 09:50:06,453 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:12,269 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:12,985 INFO [train.py:903] (1/4) Epoch 7, batch 2650, loss[loss=0.275, simple_loss=0.3433, pruned_loss=0.1033, over 19667.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3266, pruned_loss=0.09669, over 3827084.89 frames. ], batch size: 58, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:50:28,063 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1849, 2.0770, 1.8042, 1.6805, 1.6178, 1.8282, 0.2594, 0.9773], + device='cuda:1'), covar=tensor([0.0280, 0.0292, 0.0215, 0.0344, 0.0559, 0.0375, 0.0638, 0.0512], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0310, 0.0303, 0.0329, 0.0396, 0.0322, 0.0291, 0.0306], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 09:50:35,409 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 09:50:38,368 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43637.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:49,494 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 6.243e+02 7.316e+02 9.618e+02 1.411e+03, threshold=1.463e+03, percent-clipped=0.0 +2023-04-01 09:51:16,493 INFO [train.py:903] (1/4) Epoch 7, batch 2700, loss[loss=0.2154, simple_loss=0.285, pruned_loss=0.07295, over 19732.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3277, pruned_loss=0.09755, over 3825160.74 frames. ], batch size: 45, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:51:19,201 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6784, 1.5547, 1.3648, 1.9673, 1.5106, 2.1041, 2.1032, 2.0444], + device='cuda:1'), covar=tensor([0.0816, 0.0928, 0.1060, 0.0888, 0.0917, 0.0641, 0.0791, 0.0558], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0241, 0.0236, 0.0271, 0.0258, 0.0223, 0.0219, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 09:51:25,739 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5507, 1.5725, 1.3533, 1.8596, 1.4468, 2.0657, 1.9348, 1.8977], + device='cuda:1'), covar=tensor([0.0775, 0.0853, 0.0976, 0.0805, 0.0896, 0.0588, 0.0801, 0.0564], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0241, 0.0236, 0.0271, 0.0258, 0.0222, 0.0219, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 09:52:19,230 INFO [train.py:903] (1/4) Epoch 7, batch 2750, loss[loss=0.275, simple_loss=0.3398, pruned_loss=0.1051, over 19613.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3284, pruned_loss=0.09787, over 3823960.16 frames. ], batch size: 57, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:52:43,660 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3569, 1.4337, 1.5006, 1.5114, 2.9426, 0.8859, 1.9727, 3.1602], + device='cuda:1'), covar=tensor([0.0377, 0.2269, 0.2320, 0.1504, 0.0628, 0.2347, 0.1121, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0313, 0.0322, 0.0291, 0.0319, 0.0312, 0.0297, 0.0314], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:52:55,466 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.869e+02 8.299e+02 1.091e+03 2.331e+03, threshold=1.660e+03, percent-clipped=8.0 +2023-04-01 09:53:20,548 INFO [train.py:903] (1/4) Epoch 7, batch 2800, loss[loss=0.2201, simple_loss=0.2792, pruned_loss=0.08047, over 19733.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3275, pruned_loss=0.09723, over 3837511.08 frames. ], batch size: 46, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:53:35,923 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5276, 1.1486, 1.5438, 1.2976, 2.5910, 3.7579, 3.5781, 3.9979], + device='cuda:1'), covar=tensor([0.1265, 0.2939, 0.2721, 0.1916, 0.0502, 0.0148, 0.0176, 0.0118], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0283, 0.0311, 0.0245, 0.0200, 0.0132, 0.0203, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 09:54:07,570 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:54:22,953 INFO [train.py:903] (1/4) Epoch 7, batch 2850, loss[loss=0.3048, simple_loss=0.3626, pruned_loss=0.1236, over 19631.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3279, pruned_loss=0.09742, over 3844823.56 frames. ], batch size: 57, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:54:50,790 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 09:54:59,106 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.316e+02 8.520e+02 1.005e+03 1.613e+03, threshold=1.704e+03, percent-clipped=0.0 +2023-04-01 09:55:25,670 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 09:55:26,751 INFO [train.py:903] (1/4) Epoch 7, batch 2900, loss[loss=0.2613, simple_loss=0.3236, pruned_loss=0.09956, over 17520.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3276, pruned_loss=0.0972, over 3840280.70 frames. ], batch size: 101, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:55:36,422 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:56:28,083 INFO [train.py:903] (1/4) Epoch 7, batch 2950, loss[loss=0.3107, simple_loss=0.3704, pruned_loss=0.1254, over 19457.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.329, pruned_loss=0.09764, over 3822583.91 frames. ], batch size: 64, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:56:39,862 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:57:04,993 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.923e+02 7.218e+02 9.278e+02 2.092e+03, threshold=1.444e+03, percent-clipped=1.0 +2023-04-01 09:57:30,516 INFO [train.py:903] (1/4) Epoch 7, batch 3000, loss[loss=0.2467, simple_loss=0.3168, pruned_loss=0.08826, over 19615.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3279, pruned_loss=0.09781, over 3817743.08 frames. ], batch size: 50, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:57:30,516 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 09:57:43,097 INFO [train.py:937] (1/4) Epoch 7, validation: loss=0.1917, simple_loss=0.2919, pruned_loss=0.04574, over 944034.00 frames. +2023-04-01 09:57:43,098 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 09:57:49,876 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 09:57:54,965 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:07,980 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9135, 3.4811, 1.7515, 2.2295, 2.9001, 1.6179, 1.1644, 1.8307], + device='cuda:1'), covar=tensor([0.1108, 0.0422, 0.0999, 0.0584, 0.0406, 0.0892, 0.0886, 0.0559], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0277, 0.0315, 0.0237, 0.0224, 0.0305, 0.0276, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 09:58:13,872 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:47,326 INFO [train.py:903] (1/4) Epoch 7, batch 3050, loss[loss=0.2419, simple_loss=0.317, pruned_loss=0.08333, over 19667.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.329, pruned_loss=0.09857, over 3800873.97 frames. ], batch size: 53, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:59:24,083 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+02 5.923e+02 7.306e+02 8.819e+02 1.422e+03, threshold=1.461e+03, percent-clipped=0.0 +2023-04-01 09:59:50,240 INFO [train.py:903] (1/4) Epoch 7, batch 3100, loss[loss=0.2873, simple_loss=0.3534, pruned_loss=0.1106, over 17396.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3282, pruned_loss=0.09758, over 3813946.94 frames. ], batch size: 101, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:00:17,743 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2169, 1.2795, 1.8430, 1.4503, 2.7039, 2.2276, 2.8324, 1.1632], + device='cuda:1'), covar=tensor([0.1863, 0.3287, 0.1720, 0.1468, 0.1170, 0.1525, 0.1277, 0.2970], + device='cuda:1'), in_proj_covar=tensor([0.0457, 0.0524, 0.0518, 0.0409, 0.0562, 0.0457, 0.0631, 0.0460], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 10:00:50,925 INFO [train.py:903] (1/4) Epoch 7, batch 3150, loss[loss=0.2838, simple_loss=0.3443, pruned_loss=0.1116, over 17120.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3281, pruned_loss=0.09763, over 3822599.85 frames. ], batch size: 101, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:01:18,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 10:01:26,078 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 6.470e+02 8.010e+02 1.018e+03 2.357e+03, threshold=1.602e+03, percent-clipped=4.0 +2023-04-01 10:01:28,517 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:01:51,332 INFO [train.py:903] (1/4) Epoch 7, batch 3200, loss[loss=0.2433, simple_loss=0.3103, pruned_loss=0.08814, over 19593.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3279, pruned_loss=0.09808, over 3816132.39 frames. ], batch size: 52, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:02:51,374 INFO [train.py:903] (1/4) Epoch 7, batch 3250, loss[loss=0.2762, simple_loss=0.3386, pruned_loss=0.1069, over 19675.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3268, pruned_loss=0.09737, over 3831075.28 frames. ], batch size: 60, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:21,326 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4249, 1.1363, 1.5345, 1.0235, 2.4193, 3.2629, 3.0383, 3.4625], + device='cuda:1'), covar=tensor([0.1393, 0.3183, 0.2957, 0.2168, 0.0491, 0.0182, 0.0225, 0.0154], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0287, 0.0316, 0.0251, 0.0205, 0.0135, 0.0206, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 10:03:27,995 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 6.408e+02 8.261e+02 1.024e+03 1.757e+03, threshold=1.652e+03, percent-clipped=4.0 +2023-04-01 10:03:28,408 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:49,212 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:52,723 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8736, 1.9204, 1.9277, 2.9750, 1.8213, 2.6133, 2.4908, 1.8076], + device='cuda:1'), covar=tensor([0.2850, 0.2207, 0.1108, 0.1249, 0.2759, 0.0961, 0.2386, 0.2111], + device='cuda:1'), in_proj_covar=tensor([0.0697, 0.0697, 0.0597, 0.0844, 0.0720, 0.0611, 0.0735, 0.0641], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 10:03:53,351 INFO [train.py:903] (1/4) Epoch 7, batch 3300, loss[loss=0.2466, simple_loss=0.3178, pruned_loss=0.08768, over 19778.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.327, pruned_loss=0.09773, over 3829470.61 frames. ], batch size: 56, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:59,180 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:00,253 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 10:04:00,648 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44272.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:57,169 INFO [train.py:903] (1/4) Epoch 7, batch 3350, loss[loss=0.2554, simple_loss=0.3169, pruned_loss=0.09698, over 19046.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3271, pruned_loss=0.09763, over 3842808.51 frames. ], batch size: 42, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:05:00,808 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:05:32,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 6.124e+02 7.865e+02 1.014e+03 2.362e+03, threshold=1.573e+03, percent-clipped=3.0 +2023-04-01 10:05:41,284 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 10:05:58,468 INFO [train.py:903] (1/4) Epoch 7, batch 3400, loss[loss=0.2777, simple_loss=0.3341, pruned_loss=0.1107, over 19660.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3259, pruned_loss=0.09681, over 3849884.50 frames. ], batch size: 53, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:06:20,860 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:01,359 INFO [train.py:903] (1/4) Epoch 7, batch 3450, loss[loss=0.2882, simple_loss=0.3471, pruned_loss=0.1146, over 19738.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3252, pruned_loss=0.09658, over 3850635.83 frames. ], batch size: 63, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:07:06,944 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 10:07:20,143 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7953, 3.1799, 3.2417, 3.2626, 1.1047, 3.0913, 2.7631, 2.9717], + device='cuda:1'), covar=tensor([0.1207, 0.0800, 0.0698, 0.0648, 0.4127, 0.0646, 0.0650, 0.1159], + device='cuda:1'), in_proj_covar=tensor([0.0577, 0.0507, 0.0681, 0.0557, 0.0631, 0.0427, 0.0433, 0.0627], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 10:07:25,063 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:39,986 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 5.624e+02 7.209e+02 9.228e+02 1.461e+03, threshold=1.442e+03, percent-clipped=0.0 +2023-04-01 10:08:03,659 INFO [train.py:903] (1/4) Epoch 7, batch 3500, loss[loss=0.2332, simple_loss=0.3168, pruned_loss=0.07481, over 19778.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3252, pruned_loss=0.09604, over 3851615.62 frames. ], batch size: 56, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:08:21,647 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8803, 4.9065, 5.6509, 5.6131, 1.6160, 5.3133, 4.6008, 5.1945], + device='cuda:1'), covar=tensor([0.1050, 0.0645, 0.0464, 0.0426, 0.4494, 0.0355, 0.0439, 0.0916], + device='cuda:1'), in_proj_covar=tensor([0.0576, 0.0501, 0.0679, 0.0556, 0.0630, 0.0425, 0.0434, 0.0626], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 10:09:07,867 INFO [train.py:903] (1/4) Epoch 7, batch 3550, loss[loss=0.2421, simple_loss=0.3079, pruned_loss=0.08817, over 19855.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3254, pruned_loss=0.09612, over 3850575.70 frames. ], batch size: 52, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:10,436 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:10,463 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3034, 1.2562, 1.8551, 1.4713, 2.4734, 2.0188, 2.6098, 1.2330], + device='cuda:1'), covar=tensor([0.2067, 0.3485, 0.1849, 0.1726, 0.1280, 0.1799, 0.1365, 0.2968], + device='cuda:1'), in_proj_covar=tensor([0.0457, 0.0532, 0.0522, 0.0415, 0.0571, 0.0463, 0.0639, 0.0462], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 10:09:40,628 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:40,740 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:43,709 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 5.736e+02 7.487e+02 9.426e+02 2.431e+03, threshold=1.497e+03, percent-clipped=5.0 +2023-04-01 10:09:57,365 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4739, 2.5502, 1.7283, 1.6042, 2.0713, 1.3220, 1.3144, 1.7646], + device='cuda:1'), covar=tensor([0.0846, 0.0486, 0.0917, 0.0595, 0.0455, 0.0996, 0.0679, 0.0465], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0280, 0.0317, 0.0242, 0.0224, 0.0314, 0.0288, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 10:10:10,024 INFO [train.py:903] (1/4) Epoch 7, batch 3600, loss[loss=0.2113, simple_loss=0.2836, pruned_loss=0.06946, over 19756.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3261, pruned_loss=0.09649, over 3853510.56 frames. ], batch size: 47, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:10:10,286 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:12,091 INFO [train.py:903] (1/4) Epoch 7, batch 3650, loss[loss=0.299, simple_loss=0.3457, pruned_loss=0.1262, over 13246.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3255, pruned_loss=0.09621, over 3846115.62 frames. ], batch size: 135, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:11:32,686 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7665, 2.1052, 2.0871, 1.7680, 4.1841, 1.1272, 2.4110, 4.5349], + device='cuda:1'), covar=tensor([0.0361, 0.2314, 0.2153, 0.1672, 0.0626, 0.2520, 0.1230, 0.0244], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0314, 0.0322, 0.0293, 0.0320, 0.0315, 0.0295, 0.0312], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 10:11:42,322 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:49,798 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 6.222e+02 7.769e+02 9.647e+02 2.431e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:12:05,237 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 10:12:12,966 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:12:13,735 INFO [train.py:903] (1/4) Epoch 7, batch 3700, loss[loss=0.2407, simple_loss=0.3133, pruned_loss=0.08403, over 19779.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.327, pruned_loss=0.09719, over 3821598.73 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:12:45,589 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:13:16,193 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:13:16,968 INFO [train.py:903] (1/4) Epoch 7, batch 3750, loss[loss=0.2455, simple_loss=0.3073, pruned_loss=0.09182, over 19734.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3274, pruned_loss=0.09765, over 3818299.56 frames. ], batch size: 51, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:13:53,509 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+02 6.149e+02 7.382e+02 9.468e+02 1.650e+03, threshold=1.476e+03, percent-clipped=2.0 +2023-04-01 10:14:18,148 INFO [train.py:903] (1/4) Epoch 7, batch 3800, loss[loss=0.2669, simple_loss=0.3121, pruned_loss=0.1108, over 19763.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3285, pruned_loss=0.09906, over 3797058.66 frames. ], batch size: 48, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:14:23,282 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 10:14:50,714 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 10:15:04,142 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 10:15:19,321 INFO [train.py:903] (1/4) Epoch 7, batch 3850, loss[loss=0.2918, simple_loss=0.358, pruned_loss=0.1128, over 19445.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3277, pruned_loss=0.09883, over 3800185.08 frames. ], batch size: 64, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:15:57,070 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 6.658e+02 8.285e+02 1.095e+03 3.075e+03, threshold=1.657e+03, percent-clipped=10.0 +2023-04-01 10:16:20,989 INFO [train.py:903] (1/4) Epoch 7, batch 3900, loss[loss=0.3232, simple_loss=0.3707, pruned_loss=0.1379, over 19321.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3276, pruned_loss=0.09859, over 3808328.51 frames. ], batch size: 66, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:16:48,352 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44889.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:16:56,963 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 10:17:15,631 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:24,389 INFO [train.py:903] (1/4) Epoch 7, batch 3950, loss[loss=0.2334, simple_loss=0.3134, pruned_loss=0.07676, over 19657.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3265, pruned_loss=0.09771, over 3815125.86 frames. ], batch size: 53, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:17:29,145 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 10:17:58,837 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5267, 2.4863, 1.7390, 1.6173, 2.1823, 1.2801, 1.2879, 1.7406], + device='cuda:1'), covar=tensor([0.0775, 0.0568, 0.0956, 0.0617, 0.0493, 0.1053, 0.0741, 0.0489], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0280, 0.0314, 0.0240, 0.0225, 0.0309, 0.0285, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 10:18:00,740 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+02 5.655e+02 7.377e+02 9.527e+02 2.304e+03, threshold=1.475e+03, percent-clipped=3.0 +2023-04-01 10:18:26,678 INFO [train.py:903] (1/4) Epoch 7, batch 4000, loss[loss=0.2549, simple_loss=0.3249, pruned_loss=0.09247, over 19667.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3256, pruned_loss=0.09697, over 3809394.26 frames. ], batch size: 53, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:12,015 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45004.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:19:15,224 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 10:19:27,779 INFO [train.py:903] (1/4) Epoch 7, batch 4050, loss[loss=0.2531, simple_loss=0.3263, pruned_loss=0.08995, over 19651.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3256, pruned_loss=0.0971, over 3788049.84 frames. ], batch size: 58, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:39,284 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:20:05,076 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 6.019e+02 6.941e+02 8.524e+02 1.564e+03, threshold=1.388e+03, percent-clipped=1.0 +2023-04-01 10:20:28,972 INFO [train.py:903] (1/4) Epoch 7, batch 4100, loss[loss=0.2286, simple_loss=0.3124, pruned_loss=0.07242, over 19679.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3259, pruned_loss=0.09685, over 3788096.87 frames. ], batch size: 59, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:20:36,277 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:20:47,565 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7589, 4.2891, 2.4409, 3.9123, 0.9870, 3.9630, 4.0717, 4.1985], + device='cuda:1'), covar=tensor([0.0649, 0.1107, 0.2209, 0.0677, 0.3941, 0.0789, 0.0733, 0.0932], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0334, 0.0397, 0.0297, 0.0361, 0.0324, 0.0309, 0.0343], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 10:21:05,605 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 10:21:07,155 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:21:30,604 INFO [train.py:903] (1/4) Epoch 7, batch 4150, loss[loss=0.2133, simple_loss=0.2892, pruned_loss=0.06868, over 19613.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3263, pruned_loss=0.0972, over 3799153.60 frames. ], batch size: 50, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:07,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 6.430e+02 8.235e+02 1.004e+03 2.607e+03, threshold=1.647e+03, percent-clipped=6.0 +2023-04-01 10:22:32,972 INFO [train.py:903] (1/4) Epoch 7, batch 4200, loss[loss=0.2888, simple_loss=0.3533, pruned_loss=0.1122, over 19303.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.326, pruned_loss=0.09675, over 3801446.97 frames. ], batch size: 66, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:38,352 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 10:23:13,784 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.12 vs. limit=5.0 +2023-04-01 10:23:33,351 INFO [train.py:903] (1/4) Epoch 7, batch 4250, loss[loss=0.2483, simple_loss=0.3184, pruned_loss=0.08905, over 19776.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3268, pruned_loss=0.09745, over 3781540.04 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:23:49,642 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 10:24:01,342 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 10:24:12,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.934e+02 6.110e+02 8.094e+02 1.072e+03 2.309e+03, threshold=1.619e+03, percent-clipped=5.0 +2023-04-01 10:24:27,382 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:37,369 INFO [train.py:903] (1/4) Epoch 7, batch 4300, loss[loss=0.2584, simple_loss=0.33, pruned_loss=0.09342, over 19755.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.326, pruned_loss=0.09713, over 3790167.17 frames. ], batch size: 63, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:24:57,140 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45283.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:59,372 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45285.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:02,933 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 10:25:27,055 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:32,136 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 10:25:39,640 INFO [train.py:903] (1/4) Epoch 7, batch 4350, loss[loss=0.2492, simple_loss=0.3075, pruned_loss=0.0954, over 19756.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3283, pruned_loss=0.09826, over 3784683.15 frames. ], batch size: 47, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:26:16,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 6.479e+02 7.493e+02 1.002e+03 2.532e+03, threshold=1.499e+03, percent-clipped=5.0 +2023-04-01 10:26:39,970 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45366.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:26:42,860 INFO [train.py:903] (1/4) Epoch 7, batch 4400, loss[loss=0.2399, simple_loss=0.3141, pruned_loss=0.08291, over 19532.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3269, pruned_loss=0.09699, over 3804140.95 frames. ], batch size: 56, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:27:06,062 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 10:27:14,913 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 10:27:36,669 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6729, 1.9168, 2.3372, 2.1181, 3.1498, 3.4622, 3.6153, 3.8936], + device='cuda:1'), covar=tensor([0.1327, 0.2356, 0.2092, 0.1572, 0.0736, 0.0473, 0.0181, 0.0146], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0284, 0.0312, 0.0247, 0.0203, 0.0133, 0.0202, 0.0162], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 10:27:36,744 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4213, 2.3599, 1.9116, 1.9268, 1.7657, 2.1059, 1.2021, 2.0009], + device='cuda:1'), covar=tensor([0.0281, 0.0322, 0.0274, 0.0410, 0.0506, 0.0485, 0.0529, 0.0417], + device='cuda:1'), in_proj_covar=tensor([0.0309, 0.0310, 0.0299, 0.0331, 0.0405, 0.0325, 0.0287, 0.0310], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 10:27:44,672 INFO [train.py:903] (1/4) Epoch 7, batch 4450, loss[loss=0.2479, simple_loss=0.3232, pruned_loss=0.08631, over 19356.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3263, pruned_loss=0.09657, over 3806375.09 frames. ], batch size: 70, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:27:44,834 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:16,050 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:22,921 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.630e+02 6.151e+02 7.769e+02 9.586e+02 4.695e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:28:46,110 INFO [train.py:903] (1/4) Epoch 7, batch 4500, loss[loss=0.2422, simple_loss=0.2993, pruned_loss=0.09252, over 19735.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3261, pruned_loss=0.09688, over 3808284.27 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:29:48,541 INFO [train.py:903] (1/4) Epoch 7, batch 4550, loss[loss=0.2874, simple_loss=0.3525, pruned_loss=0.1111, over 19702.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.327, pruned_loss=0.0971, over 3827099.06 frames. ], batch size: 59, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:30:00,887 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 10:30:09,105 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:17,530 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 10:30:23,717 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 10:30:27,034 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.783e+02 6.251e+02 7.662e+02 9.726e+02 2.453e+03, threshold=1.532e+03, percent-clipped=6.0 +2023-04-01 10:30:38,163 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:42,535 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5658, 4.1315, 2.3774, 3.6244, 1.0937, 3.8168, 3.8393, 3.8365], + device='cuda:1'), covar=tensor([0.0606, 0.0962, 0.2063, 0.0644, 0.3647, 0.0734, 0.0660, 0.0952], + device='cuda:1'), in_proj_covar=tensor([0.0378, 0.0328, 0.0386, 0.0291, 0.0353, 0.0317, 0.0304, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 10:30:51,049 INFO [train.py:903] (1/4) Epoch 7, batch 4600, loss[loss=0.3537, simple_loss=0.3887, pruned_loss=0.1594, over 13319.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3282, pruned_loss=0.09804, over 3815576.41 frames. ], batch size: 136, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:31:21,662 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9340, 2.0484, 2.1455, 2.9418, 1.8432, 2.6429, 2.6615, 2.0594], + device='cuda:1'), covar=tensor([0.2908, 0.2355, 0.1159, 0.1406, 0.2832, 0.1089, 0.2297, 0.1974], + device='cuda:1'), in_proj_covar=tensor([0.0711, 0.0709, 0.0600, 0.0851, 0.0728, 0.0622, 0.0743, 0.0646], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 10:31:48,241 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-01 10:31:54,302 INFO [train.py:903] (1/4) Epoch 7, batch 4650, loss[loss=0.2309, simple_loss=0.2968, pruned_loss=0.08247, over 19464.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3276, pruned_loss=0.09757, over 3820340.99 frames. ], batch size: 49, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:31:57,360 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 10:32:11,878 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 10:32:24,511 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 10:32:33,275 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.859e+02 7.403e+02 8.847e+02 2.429e+03, threshold=1.481e+03, percent-clipped=2.0 +2023-04-01 10:32:55,951 INFO [train.py:903] (1/4) Epoch 7, batch 4700, loss[loss=0.2476, simple_loss=0.3068, pruned_loss=0.09415, over 19757.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3265, pruned_loss=0.09708, over 3816049.99 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:33:09,342 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 10:33:18,882 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 10:33:47,801 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45710.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:33:58,623 INFO [train.py:903] (1/4) Epoch 7, batch 4750, loss[loss=0.2977, simple_loss=0.3655, pruned_loss=0.1149, over 19116.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3268, pruned_loss=0.0972, over 3829137.43 frames. ], batch size: 69, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:34:01,195 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:34:11,843 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1145, 2.1223, 1.6194, 1.6344, 1.3639, 1.5862, 0.4712, 0.9960], + device='cuda:1'), covar=tensor([0.0528, 0.0454, 0.0375, 0.0538, 0.0944, 0.0702, 0.0752, 0.0770], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0313, 0.0310, 0.0332, 0.0404, 0.0327, 0.0288, 0.0316], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 10:34:36,016 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 6.576e+02 8.008e+02 9.322e+02 2.457e+03, threshold=1.602e+03, percent-clipped=6.0 +2023-04-01 10:35:01,462 INFO [train.py:903] (1/4) Epoch 7, batch 4800, loss[loss=0.3598, simple_loss=0.3953, pruned_loss=0.1621, over 13702.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.327, pruned_loss=0.09715, over 3825639.46 frames. ], batch size: 135, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:35:14,194 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 10:35:26,476 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:29,632 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:30,830 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0599, 4.3719, 4.6905, 4.7152, 1.5636, 4.3464, 3.9414, 4.3037], + device='cuda:1'), covar=tensor([0.1051, 0.0678, 0.0513, 0.0418, 0.4578, 0.0420, 0.0469, 0.1013], + device='cuda:1'), in_proj_covar=tensor([0.0574, 0.0504, 0.0681, 0.0557, 0.0633, 0.0428, 0.0433, 0.0632], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 10:35:37,411 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5502, 1.3065, 1.3965, 1.8770, 1.2969, 1.6719, 1.7272, 1.5808], + device='cuda:1'), covar=tensor([0.0713, 0.0931, 0.0948, 0.0670, 0.0864, 0.0715, 0.0787, 0.0669], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0237, 0.0233, 0.0264, 0.0253, 0.0220, 0.0217, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 10:35:58,119 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:58,149 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:02,115 INFO [train.py:903] (1/4) Epoch 7, batch 4850, loss[loss=0.2568, simple_loss=0.3345, pruned_loss=0.08954, over 18662.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3278, pruned_loss=0.09823, over 3825328.58 frames. ], batch size: 74, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:36:10,586 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45825.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:36:23,725 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 10:36:25,129 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 10:36:27,414 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:40,692 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.989e+02 6.823e+02 8.718e+02 1.115e+03 2.265e+03, threshold=1.744e+03, percent-clipped=6.0 +2023-04-01 10:36:48,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 10:36:54,261 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 10:36:54,291 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 10:37:03,556 INFO [train.py:903] (1/4) Epoch 7, batch 4900, loss[loss=0.2295, simple_loss=0.3052, pruned_loss=0.07687, over 18693.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3274, pruned_loss=0.09799, over 3829462.74 frames. ], batch size: 74, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:37:04,793 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 10:37:14,476 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:37:26,127 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 10:38:05,106 INFO [train.py:903] (1/4) Epoch 7, batch 4950, loss[loss=0.2233, simple_loss=0.2972, pruned_loss=0.07476, over 19384.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3274, pruned_loss=0.09721, over 3832661.65 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:38:24,756 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 10:38:44,274 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+02 6.277e+02 7.738e+02 9.356e+02 2.304e+03, threshold=1.548e+03, percent-clipped=1.0 +2023-04-01 10:38:44,961 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.95 vs. limit=5.0 +2023-04-01 10:38:47,739 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 10:39:09,683 INFO [train.py:903] (1/4) Epoch 7, batch 5000, loss[loss=0.2361, simple_loss=0.2951, pruned_loss=0.08856, over 19760.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3274, pruned_loss=0.09758, over 3826389.18 frames. ], batch size: 46, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:39:18,622 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1858, 0.9395, 1.1562, 1.8150, 1.2578, 1.2613, 1.5003, 1.3171], + device='cuda:1'), covar=tensor([0.1098, 0.1693, 0.1328, 0.0826, 0.1038, 0.1336, 0.1098, 0.1038], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0235, 0.0234, 0.0264, 0.0252, 0.0217, 0.0217, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 10:39:20,567 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 10:39:30,942 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 10:40:12,336 INFO [train.py:903] (1/4) Epoch 7, batch 5050, loss[loss=0.2216, simple_loss=0.293, pruned_loss=0.07504, over 19593.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3281, pruned_loss=0.09763, over 3834239.73 frames. ], batch size: 50, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:40:49,615 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 10:40:51,923 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 6.208e+02 7.894e+02 9.516e+02 2.052e+03, threshold=1.579e+03, percent-clipped=3.0 +2023-04-01 10:41:08,571 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46064.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:41:13,141 INFO [train.py:903] (1/4) Epoch 7, batch 5100, loss[loss=0.2623, simple_loss=0.335, pruned_loss=0.09474, over 19772.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3269, pruned_loss=0.09672, over 3850091.90 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:41:24,901 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 10:41:28,160 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 10:41:28,598 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46081.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:41:32,533 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 10:42:00,040 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46106.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:42:08,080 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3599, 1.2608, 1.6886, 1.5706, 3.2238, 4.4425, 4.3994, 4.8562], + device='cuda:1'), covar=tensor([0.1426, 0.3078, 0.2861, 0.1739, 0.0393, 0.0178, 0.0137, 0.0084], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0244, 0.0204, 0.0133, 0.0203, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 10:42:13,343 INFO [train.py:903] (1/4) Epoch 7, batch 5150, loss[loss=0.2217, simple_loss=0.2875, pruned_loss=0.07797, over 19701.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3271, pruned_loss=0.09688, over 3845123.35 frames. ], batch size: 45, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:42:27,452 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 10:42:37,818 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:42:53,642 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.315e+02 6.102e+02 7.228e+02 8.853e+02 1.806e+03, threshold=1.446e+03, percent-clipped=2.0 +2023-04-01 10:43:00,551 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 10:43:15,665 INFO [train.py:903] (1/4) Epoch 7, batch 5200, loss[loss=0.2759, simple_loss=0.3341, pruned_loss=0.1088, over 19390.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3259, pruned_loss=0.09624, over 3849103.93 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:43:30,884 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46179.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:43:31,710 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 10:44:18,556 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 10:44:19,699 INFO [train.py:903] (1/4) Epoch 7, batch 5250, loss[loss=0.2669, simple_loss=0.3397, pruned_loss=0.097, over 18194.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3264, pruned_loss=0.09629, over 3842319.61 frames. ], batch size: 83, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:44:23,434 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:44:52,979 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1004, 1.6042, 1.6906, 2.0119, 2.0169, 1.8024, 1.8625, 1.9675], + device='cuda:1'), covar=tensor([0.0790, 0.1548, 0.1176, 0.0909, 0.1040, 0.0468, 0.0878, 0.0565], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0356, 0.0285, 0.0240, 0.0299, 0.0242, 0.0272, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 10:44:58,427 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 5.998e+02 7.245e+02 8.826e+02 1.462e+03, threshold=1.449e+03, percent-clipped=1.0 +2023-04-01 10:45:00,901 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:45:21,289 INFO [train.py:903] (1/4) Epoch 7, batch 5300, loss[loss=0.2586, simple_loss=0.3254, pruned_loss=0.09595, over 18996.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3263, pruned_loss=0.09653, over 3833978.91 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:45:34,735 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 10:45:39,519 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 10:45:42,162 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8272, 1.5458, 1.4979, 1.9744, 1.7230, 2.2264, 2.2541, 1.9144], + device='cuda:1'), covar=tensor([0.0752, 0.0928, 0.1025, 0.0922, 0.0881, 0.0622, 0.0838, 0.0616], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0234, 0.0234, 0.0263, 0.0253, 0.0219, 0.0216, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 10:45:53,397 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46294.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:46:22,053 INFO [train.py:903] (1/4) Epoch 7, batch 5350, loss[loss=0.2844, simple_loss=0.3497, pruned_loss=0.1096, over 19667.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3259, pruned_loss=0.09663, over 3840263.13 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:46:46,801 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:47:00,334 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 10:47:03,770 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.681e+02 7.157e+02 9.578e+02 3.754e+03, threshold=1.431e+03, percent-clipped=4.0 +2023-04-01 10:47:12,091 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7218, 1.7443, 1.4008, 1.3088, 1.2420, 1.4490, 0.1083, 0.5542], + device='cuda:1'), covar=tensor([0.0357, 0.0342, 0.0222, 0.0305, 0.0749, 0.0326, 0.0616, 0.0617], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0307, 0.0304, 0.0328, 0.0397, 0.0317, 0.0287, 0.0310], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 10:47:26,532 INFO [train.py:903] (1/4) Epoch 7, batch 5400, loss[loss=0.2588, simple_loss=0.3249, pruned_loss=0.09639, over 19478.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3255, pruned_loss=0.09608, over 3837575.80 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:47:37,299 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.91 vs. limit=5.0 +2023-04-01 10:48:29,979 INFO [train.py:903] (1/4) Epoch 7, batch 5450, loss[loss=0.225, simple_loss=0.3021, pruned_loss=0.07392, over 19406.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3272, pruned_loss=0.09777, over 3833875.76 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:48:35,955 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0005, 4.4827, 4.7475, 4.6701, 1.5656, 4.3430, 3.8552, 4.3792], + device='cuda:1'), covar=tensor([0.1174, 0.0667, 0.0486, 0.0453, 0.4708, 0.0407, 0.0479, 0.0975], + device='cuda:1'), in_proj_covar=tensor([0.0578, 0.0511, 0.0691, 0.0573, 0.0642, 0.0438, 0.0437, 0.0644], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 10:48:49,839 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:10,723 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 7.102e+02 8.361e+02 1.036e+03 2.875e+03, threshold=1.672e+03, percent-clipped=7.0 +2023-04-01 10:49:23,131 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46460.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:31,706 INFO [train.py:903] (1/4) Epoch 7, batch 5500, loss[loss=0.2188, simple_loss=0.2903, pruned_loss=0.07363, over 19752.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3274, pruned_loss=0.09761, over 3838540.24 frames. ], batch size: 47, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:49:58,195 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 10:50:20,852 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:50:33,176 INFO [train.py:903] (1/4) Epoch 7, batch 5550, loss[loss=0.2456, simple_loss=0.3195, pruned_loss=0.08582, over 19784.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.326, pruned_loss=0.09664, over 3838841.41 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:50:43,592 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 10:50:51,410 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:51:02,849 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 10:51:15,056 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 5.815e+02 7.044e+02 9.146e+02 3.032e+03, threshold=1.409e+03, percent-clipped=3.0 +2023-04-01 10:51:31,072 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 10:51:35,753 INFO [train.py:903] (1/4) Epoch 7, batch 5600, loss[loss=0.2707, simple_loss=0.3417, pruned_loss=0.09981, over 19471.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3243, pruned_loss=0.09515, over 3848351.34 frames. ], batch size: 64, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:06,682 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:32,951 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:38,683 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:40,322 INFO [train.py:903] (1/4) Epoch 7, batch 5650, loss[loss=0.2585, simple_loss=0.3311, pruned_loss=0.09291, over 19667.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3253, pruned_loss=0.09593, over 3852176.34 frames. ], batch size: 60, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:42,904 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:03,526 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46638.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:20,361 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.976e+02 7.674e+02 9.559e+02 1.706e+03, threshold=1.535e+03, percent-clipped=4.0 +2023-04-01 10:53:28,305 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 10:53:42,080 INFO [train.py:903] (1/4) Epoch 7, batch 5700, loss[loss=0.2767, simple_loss=0.349, pruned_loss=0.1022, over 19671.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3254, pruned_loss=0.09586, over 3845239.43 frames. ], batch size: 55, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:53:55,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4079, 2.2886, 1.6682, 1.4102, 2.0768, 1.1775, 1.2972, 1.6066], + device='cuda:1'), covar=tensor([0.0824, 0.0472, 0.0884, 0.0625, 0.0443, 0.1042, 0.0655, 0.0498], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0288, 0.0320, 0.0243, 0.0227, 0.0317, 0.0287, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 10:54:43,275 INFO [train.py:903] (1/4) Epoch 7, batch 5750, loss[loss=0.2632, simple_loss=0.3392, pruned_loss=0.09358, over 19370.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3254, pruned_loss=0.09567, over 3852563.31 frames. ], batch size: 70, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:54:45,352 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.83 vs. limit=5.0 +2023-04-01 10:54:45,641 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 10:54:55,169 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 10:54:59,797 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 10:55:25,495 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+02 6.148e+02 7.021e+02 8.191e+02 1.564e+03, threshold=1.404e+03, percent-clipped=1.0 +2023-04-01 10:55:28,123 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:55:43,512 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8975, 4.4883, 2.6767, 3.9412, 1.3496, 4.1245, 4.1387, 4.3275], + device='cuda:1'), covar=tensor([0.0501, 0.0948, 0.1946, 0.0673, 0.3737, 0.0627, 0.0718, 0.0702], + device='cuda:1'), in_proj_covar=tensor([0.0389, 0.0341, 0.0402, 0.0298, 0.0367, 0.0323, 0.0318, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 10:55:45,401 INFO [train.py:903] (1/4) Epoch 7, batch 5800, loss[loss=0.2257, simple_loss=0.2907, pruned_loss=0.08038, over 19063.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3247, pruned_loss=0.09493, over 3849691.04 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:56:49,191 INFO [train.py:903] (1/4) Epoch 7, batch 5850, loss[loss=0.1979, simple_loss=0.2734, pruned_loss=0.0612, over 19136.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3244, pruned_loss=0.09502, over 3844807.40 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:02,083 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:57:29,389 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.374e+02 9.303e+02 2.879e+03, threshold=1.475e+03, percent-clipped=6.0 +2023-04-01 10:57:51,529 INFO [train.py:903] (1/4) Epoch 7, batch 5900, loss[loss=0.199, simple_loss=0.2725, pruned_loss=0.06272, over 19429.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3247, pruned_loss=0.09523, over 3840492.24 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:57,281 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 10:58:16,962 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 10:58:52,945 INFO [train.py:903] (1/4) Epoch 7, batch 5950, loss[loss=0.207, simple_loss=0.2798, pruned_loss=0.06708, over 19383.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3252, pruned_loss=0.09543, over 3829509.50 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:59:34,060 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.644e+02 7.805e+02 9.690e+02 1.794e+03, threshold=1.561e+03, percent-clipped=4.0 +2023-04-01 10:59:40,934 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:48,547 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:52,803 INFO [train.py:903] (1/4) Epoch 7, batch 6000, loss[loss=0.2822, simple_loss=0.338, pruned_loss=0.1133, over 17476.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.326, pruned_loss=0.09682, over 3829300.31 frames. ], batch size: 101, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 10:59:52,804 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 11:00:05,284 INFO [train.py:937] (1/4) Epoch 7, validation: loss=0.1903, simple_loss=0.2902, pruned_loss=0.04516, over 944034.00 frames. +2023-04-01 11:00:05,285 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 11:00:57,533 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47009.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:09,015 INFO [train.py:903] (1/4) Epoch 7, batch 6050, loss[loss=0.2677, simple_loss=0.3362, pruned_loss=0.09954, over 19653.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3265, pruned_loss=0.09671, over 3820078.92 frames. ], batch size: 58, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:01:30,886 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:42,123 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2263, 2.9200, 2.1033, 2.6787, 0.9989, 2.6800, 2.6358, 2.7785], + device='cuda:1'), covar=tensor([0.1085, 0.1364, 0.1993, 0.0921, 0.3402, 0.1063, 0.1067, 0.1268], + device='cuda:1'), in_proj_covar=tensor([0.0388, 0.0341, 0.0399, 0.0295, 0.0366, 0.0322, 0.0316, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 11:01:49,942 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.317e+02 6.018e+02 7.577e+02 1.005e+03 2.434e+03, threshold=1.515e+03, percent-clipped=7.0 +2023-04-01 11:01:57,659 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-01 11:02:04,379 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 11:02:13,566 INFO [train.py:903] (1/4) Epoch 7, batch 6100, loss[loss=0.2637, simple_loss=0.3298, pruned_loss=0.09876, over 19735.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3259, pruned_loss=0.0965, over 3817274.77 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:02:18,752 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:02:26,737 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:02:34,929 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1440, 2.0868, 1.6426, 1.4735, 1.2960, 1.6308, 0.3662, 0.9841], + device='cuda:1'), covar=tensor([0.0294, 0.0307, 0.0252, 0.0400, 0.0768, 0.0420, 0.0643, 0.0598], + device='cuda:1'), in_proj_covar=tensor([0.0309, 0.0312, 0.0309, 0.0329, 0.0405, 0.0323, 0.0290, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:03:09,039 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2492, 2.7896, 2.9105, 2.6172, 4.7684, 1.7383, 3.3439, 4.9242], + device='cuda:1'), covar=tensor([0.0269, 0.1891, 0.1852, 0.1497, 0.0539, 0.2356, 0.1014, 0.0227], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0319, 0.0327, 0.0301, 0.0331, 0.0322, 0.0300, 0.0319], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:03:15,748 INFO [train.py:903] (1/4) Epoch 7, batch 6150, loss[loss=0.2658, simple_loss=0.3298, pruned_loss=0.1009, over 17358.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3242, pruned_loss=0.09532, over 3806857.55 frames. ], batch size: 101, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:03:23,651 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 11:03:41,639 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 11:03:56,564 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.986e+02 7.602e+02 9.492e+02 2.168e+03, threshold=1.520e+03, percent-clipped=5.0 +2023-04-01 11:04:15,681 INFO [train.py:903] (1/4) Epoch 7, batch 6200, loss[loss=0.2331, simple_loss=0.2937, pruned_loss=0.08627, over 19776.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3253, pruned_loss=0.09633, over 3811555.26 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:04:20,356 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:05:17,520 INFO [train.py:903] (1/4) Epoch 7, batch 6250, loss[loss=0.2674, simple_loss=0.339, pruned_loss=0.09797, over 19673.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3257, pruned_loss=0.09653, over 3803721.06 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:05:26,814 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 11:05:47,057 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 11:05:57,378 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 6.379e+02 7.672e+02 9.726e+02 2.182e+03, threshold=1.534e+03, percent-clipped=2.0 +2023-04-01 11:06:08,316 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:06:15,466 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0420, 3.6779, 2.0851, 1.8636, 3.2180, 1.6219, 1.1027, 2.0176], + device='cuda:1'), covar=tensor([0.0699, 0.0263, 0.0633, 0.0559, 0.0291, 0.0794, 0.0756, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0285, 0.0317, 0.0241, 0.0226, 0.0314, 0.0288, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:06:19,225 INFO [train.py:903] (1/4) Epoch 7, batch 6300, loss[loss=0.2211, simple_loss=0.2897, pruned_loss=0.07627, over 19376.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3256, pruned_loss=0.09673, over 3814070.09 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:06:43,112 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:21,231 INFO [train.py:903] (1/4) Epoch 7, batch 6350, loss[loss=0.2347, simple_loss=0.3149, pruned_loss=0.0772, over 19580.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3249, pruned_loss=0.09624, over 3813228.22 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:07:33,187 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:41,331 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47335.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:49,423 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.97 vs. limit=5.0 +2023-04-01 11:08:02,955 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 5.885e+02 7.911e+02 1.072e+03 3.322e+03, threshold=1.582e+03, percent-clipped=7.0 +2023-04-01 11:08:05,650 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:13,920 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:22,746 INFO [train.py:903] (1/4) Epoch 7, batch 6400, loss[loss=0.2413, simple_loss=0.2952, pruned_loss=0.09364, over 19369.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3241, pruned_loss=0.09548, over 3819117.65 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:08:48,795 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.48 vs. limit=5.0 +2023-04-01 11:09:00,237 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:09:25,579 INFO [train.py:903] (1/4) Epoch 7, batch 6450, loss[loss=0.2317, simple_loss=0.3084, pruned_loss=0.07747, over 19611.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3245, pruned_loss=0.09533, over 3811277.27 frames. ], batch size: 50, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:05,689 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 6.283e+02 7.588e+02 1.008e+03 1.535e+03, threshold=1.518e+03, percent-clipped=0.0 +2023-04-01 11:10:08,034 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 11:10:26,639 INFO [train.py:903] (1/4) Epoch 7, batch 6500, loss[loss=0.2471, simple_loss=0.3036, pruned_loss=0.0953, over 19741.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3246, pruned_loss=0.09496, over 3825013.80 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:29,891 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 11:11:27,822 INFO [train.py:903] (1/4) Epoch 7, batch 6550, loss[loss=0.2712, simple_loss=0.3417, pruned_loss=0.1003, over 19590.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3244, pruned_loss=0.09513, over 3829258.27 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:11:58,243 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:12:10,167 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+02 6.347e+02 8.071e+02 1.082e+03 2.174e+03, threshold=1.614e+03, percent-clipped=4.0 +2023-04-01 11:12:20,950 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3270, 1.2516, 1.7339, 1.2061, 2.8084, 3.8005, 3.5643, 3.9982], + device='cuda:1'), covar=tensor([0.1447, 0.3137, 0.2799, 0.2092, 0.0438, 0.0142, 0.0183, 0.0144], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0286, 0.0316, 0.0247, 0.0208, 0.0136, 0.0205, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:12:29,849 INFO [train.py:903] (1/4) Epoch 7, batch 6600, loss[loss=0.2089, simple_loss=0.2759, pruned_loss=0.07098, over 19762.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3239, pruned_loss=0.09528, over 3820925.45 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:12:30,268 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:13,381 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:31,696 INFO [train.py:903] (1/4) Epoch 7, batch 6650, loss[loss=0.2288, simple_loss=0.2949, pruned_loss=0.08129, over 19383.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3241, pruned_loss=0.09556, over 3815478.14 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:14:11,832 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 6.649e+02 7.986e+02 1.008e+03 1.623e+03, threshold=1.597e+03, percent-clipped=0.0 +2023-04-01 11:14:32,253 INFO [train.py:903] (1/4) Epoch 7, batch 6700, loss[loss=0.2402, simple_loss=0.3035, pruned_loss=0.08842, over 19753.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3234, pruned_loss=0.09511, over 3830876.71 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:30,803 INFO [train.py:903] (1/4) Epoch 7, batch 6750, loss[loss=0.3223, simple_loss=0.3796, pruned_loss=0.1325, over 19699.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3242, pruned_loss=0.09548, over 3836503.99 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:31,133 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:15:58,014 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:09,115 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.932e+02 5.831e+02 7.200e+02 8.445e+02 1.747e+03, threshold=1.440e+03, percent-clipped=3.0 +2023-04-01 11:16:15,178 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:28,686 INFO [train.py:903] (1/4) Epoch 7, batch 6800, loss[loss=0.2292, simple_loss=0.3, pruned_loss=0.07922, over 19740.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3251, pruned_loss=0.09618, over 3809356.08 frames. ], batch size: 51, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:16:34,644 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7358, 1.5514, 1.5642, 2.0663, 1.6084, 2.1420, 2.2330, 1.9565], + device='cuda:1'), covar=tensor([0.0826, 0.0983, 0.1081, 0.0927, 0.1015, 0.0646, 0.0825, 0.0634], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0234, 0.0233, 0.0263, 0.0248, 0.0218, 0.0211, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 11:16:47,733 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=5.26 vs. limit=5.0 +2023-04-01 11:17:14,699 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 11:17:15,198 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 11:17:18,389 INFO [train.py:903] (1/4) Epoch 8, batch 0, loss[loss=0.2448, simple_loss=0.3202, pruned_loss=0.08467, over 19375.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3202, pruned_loss=0.08467, over 19375.00 frames. ], batch size: 70, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:17:18,389 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 11:17:30,985 INFO [train.py:937] (1/4) Epoch 8, validation: loss=0.1916, simple_loss=0.2915, pruned_loss=0.0458, over 944034.00 frames. +2023-04-01 11:17:30,987 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 11:17:41,966 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 11:17:43,326 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:18:07,542 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47827.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:18:08,104 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 11:18:32,755 INFO [train.py:903] (1/4) Epoch 8, batch 50, loss[loss=0.2579, simple_loss=0.3279, pruned_loss=0.09401, over 19567.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3227, pruned_loss=0.09352, over 873366.38 frames. ], batch size: 52, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:18:38,614 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 5.925e+02 7.453e+02 9.478e+02 2.348e+03, threshold=1.491e+03, percent-clipped=8.0 +2023-04-01 11:18:44,789 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:19:06,088 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 11:19:32,492 INFO [train.py:903] (1/4) Epoch 8, batch 100, loss[loss=0.3817, simple_loss=0.4061, pruned_loss=0.1787, over 13444.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3222, pruned_loss=0.09393, over 1519701.34 frames. ], batch size: 135, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:19:42,621 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 11:20:32,671 INFO [train.py:903] (1/4) Epoch 8, batch 150, loss[loss=0.2907, simple_loss=0.3415, pruned_loss=0.1199, over 13185.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3232, pruned_loss=0.09385, over 2034019.98 frames. ], batch size: 136, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:20:38,421 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.629e+02 6.208e+02 7.626e+02 9.440e+02 2.273e+03, threshold=1.525e+03, percent-clipped=3.0 +2023-04-01 11:21:07,748 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:21:29,212 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 11:21:32,696 INFO [train.py:903] (1/4) Epoch 8, batch 200, loss[loss=0.2623, simple_loss=0.333, pruned_loss=0.09573, over 19053.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3226, pruned_loss=0.09301, over 2442455.40 frames. ], batch size: 69, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:21:36,549 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:22:15,575 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9692, 1.1916, 1.4888, 0.5786, 2.1375, 2.4818, 2.0596, 2.5867], + device='cuda:1'), covar=tensor([0.1364, 0.3046, 0.2842, 0.2136, 0.0437, 0.0216, 0.0379, 0.0229], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0247, 0.0208, 0.0136, 0.0204, 0.0172], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:22:35,821 INFO [train.py:903] (1/4) Epoch 8, batch 250, loss[loss=0.2508, simple_loss=0.3251, pruned_loss=0.08827, over 19702.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3218, pruned_loss=0.09268, over 2747953.26 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:22:42,341 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 6.073e+02 7.344e+02 8.973e+02 2.163e+03, threshold=1.469e+03, percent-clipped=4.0 +2023-04-01 11:23:30,336 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:31,705 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1913, 1.2925, 1.6977, 1.4223, 2.4765, 2.0567, 2.7117, 0.9378], + device='cuda:1'), covar=tensor([0.1941, 0.3294, 0.1860, 0.1546, 0.1268, 0.1707, 0.1227, 0.3226], + device='cuda:1'), in_proj_covar=tensor([0.0455, 0.0533, 0.0530, 0.0416, 0.0572, 0.0467, 0.0624, 0.0459], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 11:23:36,833 INFO [train.py:903] (1/4) Epoch 8, batch 300, loss[loss=0.1932, simple_loss=0.2668, pruned_loss=0.05979, over 19096.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3229, pruned_loss=0.09338, over 2984969.22 frames. ], batch size: 42, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:23:41,627 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:55,915 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:59,571 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2362, 1.4640, 2.0188, 1.5544, 2.8958, 2.5579, 3.1548, 1.2889], + device='cuda:1'), covar=tensor([0.2156, 0.3662, 0.2080, 0.1645, 0.1624, 0.1769, 0.1790, 0.3441], + device='cuda:1'), in_proj_covar=tensor([0.0458, 0.0536, 0.0532, 0.0418, 0.0575, 0.0468, 0.0629, 0.0461], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 11:24:14,333 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8252, 1.8459, 1.9092, 2.5022, 1.8969, 2.3880, 2.2494, 1.8238], + device='cuda:1'), covar=tensor([0.2153, 0.1812, 0.0857, 0.0912, 0.1778, 0.0703, 0.1513, 0.1457], + device='cuda:1'), in_proj_covar=tensor([0.0721, 0.0723, 0.0607, 0.0852, 0.0734, 0.0632, 0.0748, 0.0657], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 11:24:27,784 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:37,682 INFO [train.py:903] (1/4) Epoch 8, batch 350, loss[loss=0.2201, simple_loss=0.3042, pruned_loss=0.06797, over 19705.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3221, pruned_loss=0.09251, over 3176291.76 frames. ], batch size: 59, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:24:39,954 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 11:24:42,106 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:43,059 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.994e+02 6.192e+02 7.149e+02 9.949e+02 1.629e+03, threshold=1.430e+03, percent-clipped=6.0 +2023-04-01 11:25:08,559 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48171.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:25:22,164 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0697, 0.8389, 1.0609, 1.4982, 1.0570, 0.9455, 1.2787, 1.0106], + device='cuda:1'), covar=tensor([0.1145, 0.2059, 0.1448, 0.0762, 0.0960, 0.1599, 0.1061, 0.1250], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0235, 0.0232, 0.0262, 0.0246, 0.0217, 0.0211, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 11:25:33,647 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0402, 1.3292, 2.0171, 1.9757, 3.0622, 4.5865, 4.6062, 4.9844], + device='cuda:1'), covar=tensor([0.1120, 0.3286, 0.2737, 0.1591, 0.0461, 0.0148, 0.0131, 0.0086], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0284, 0.0313, 0.0244, 0.0207, 0.0136, 0.0202, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:25:37,719 INFO [train.py:903] (1/4) Epoch 8, batch 400, loss[loss=0.304, simple_loss=0.3674, pruned_loss=0.1203, over 19662.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3243, pruned_loss=0.09432, over 3327774.16 frames. ], batch size: 55, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:26:03,419 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:26:05,323 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-01 11:26:21,197 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48230.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:26:33,624 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7244, 1.3930, 1.3517, 1.9711, 1.5715, 1.9248, 2.0209, 1.6985], + device='cuda:1'), covar=tensor([0.0715, 0.0974, 0.1099, 0.0824, 0.0850, 0.0689, 0.0763, 0.0665], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0235, 0.0234, 0.0263, 0.0248, 0.0219, 0.0212, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 11:26:38,936 INFO [train.py:903] (1/4) Epoch 8, batch 450, loss[loss=0.3024, simple_loss=0.3602, pruned_loss=0.1223, over 19487.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3233, pruned_loss=0.09333, over 3449642.32 frames. ], batch size: 64, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:26:45,631 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.626e+02 7.034e+02 8.568e+02 1.629e+03, threshold=1.407e+03, percent-clipped=1.0 +2023-04-01 11:27:03,636 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:27:11,137 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 11:27:12,251 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 11:27:27,200 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48286.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:27:41,080 INFO [train.py:903] (1/4) Epoch 8, batch 500, loss[loss=0.2374, simple_loss=0.2986, pruned_loss=0.08814, over 19751.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3233, pruned_loss=0.09417, over 3525147.60 frames. ], batch size: 45, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:37,119 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:28:41,343 INFO [train.py:903] (1/4) Epoch 8, batch 550, loss[loss=0.2907, simple_loss=0.3531, pruned_loss=0.1141, over 19690.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3227, pruned_loss=0.09371, over 3602891.43 frames. ], batch size: 59, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:47,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.262e+02 6.228e+02 7.563e+02 9.337e+02 1.593e+03, threshold=1.513e+03, percent-clipped=3.0 +2023-04-01 11:28:47,302 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7843, 4.3127, 2.7057, 3.6796, 1.1277, 3.9770, 4.0403, 4.1458], + device='cuda:1'), covar=tensor([0.0544, 0.1009, 0.1833, 0.0786, 0.3752, 0.0738, 0.0706, 0.0825], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0344, 0.0398, 0.0302, 0.0366, 0.0325, 0.0318, 0.0352], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 11:29:23,945 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2317, 1.3357, 1.1878, 0.9699, 1.0140, 1.1309, 0.1126, 0.4133], + device='cuda:1'), covar=tensor([0.0346, 0.0363, 0.0218, 0.0313, 0.0811, 0.0288, 0.0652, 0.0590], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0313, 0.0309, 0.0328, 0.0401, 0.0321, 0.0289, 0.0312], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:29:42,860 INFO [train.py:903] (1/4) Epoch 8, batch 600, loss[loss=0.3092, simple_loss=0.3622, pruned_loss=0.1281, over 19342.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3215, pruned_loss=0.09285, over 3652975.11 frames. ], batch size: 66, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:30:05,023 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:26,077 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 11:30:31,029 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:39,410 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:44,669 INFO [train.py:903] (1/4) Epoch 8, batch 650, loss[loss=0.2534, simple_loss=0.3243, pruned_loss=0.09124, over 19747.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3223, pruned_loss=0.09328, over 3692346.09 frames. ], batch size: 51, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:30:50,367 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 6.183e+02 7.491e+02 9.829e+02 2.830e+03, threshold=1.498e+03, percent-clipped=3.0 +2023-04-01 11:31:17,328 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48471.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:31:45,460 INFO [train.py:903] (1/4) Epoch 8, batch 700, loss[loss=0.2684, simple_loss=0.3453, pruned_loss=0.09569, over 19667.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.322, pruned_loss=0.09291, over 3724812.84 frames. ], batch size: 55, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:31:45,787 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:17,639 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:44,017 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48542.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:32:48,880 INFO [train.py:903] (1/4) Epoch 8, batch 750, loss[loss=0.2587, simple_loss=0.3291, pruned_loss=0.09419, over 19537.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3234, pruned_loss=0.09386, over 3752117.31 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:32:49,279 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:52,550 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48549.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:55,664 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.054e+02 7.636e+02 9.380e+02 1.990e+03, threshold=1.527e+03, percent-clipped=3.0 +2023-04-01 11:33:13,731 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48567.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:33:21,448 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48574.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:33:36,163 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4086, 1.5429, 1.9727, 1.8623, 3.0231, 3.9940, 3.9569, 4.4219], + device='cuda:1'), covar=tensor([0.1528, 0.3057, 0.2732, 0.1752, 0.0577, 0.0257, 0.0165, 0.0112], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0285, 0.0316, 0.0246, 0.0209, 0.0135, 0.0202, 0.0172], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:33:38,457 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6686, 1.9106, 2.1273, 2.6568, 2.3400, 2.3717, 2.0010, 2.6339], + device='cuda:1'), covar=tensor([0.0777, 0.1843, 0.1237, 0.0914, 0.1323, 0.0438, 0.1114, 0.0539], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0358, 0.0285, 0.0238, 0.0303, 0.0243, 0.0273, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:33:49,543 INFO [train.py:903] (1/4) Epoch 8, batch 800, loss[loss=0.287, simple_loss=0.3531, pruned_loss=0.1104, over 19522.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3231, pruned_loss=0.09354, over 3767333.55 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:02,658 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 11:34:34,465 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2178, 5.4872, 2.9458, 4.7037, 1.2399, 5.3475, 5.3518, 5.5737], + device='cuda:1'), covar=tensor([0.0414, 0.0964, 0.1779, 0.0592, 0.3660, 0.0547, 0.0584, 0.0600], + device='cuda:1'), in_proj_covar=tensor([0.0394, 0.0344, 0.0401, 0.0302, 0.0366, 0.0328, 0.0318, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 11:34:51,086 INFO [train.py:903] (1/4) Epoch 8, batch 850, loss[loss=0.2726, simple_loss=0.3464, pruned_loss=0.09938, over 19509.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3236, pruned_loss=0.09336, over 3784065.46 frames. ], batch size: 64, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:57,942 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 5.922e+02 7.936e+02 9.993e+02 1.897e+03, threshold=1.587e+03, percent-clipped=5.0 +2023-04-01 11:35:39,775 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 11:35:39,888 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:35:43,310 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48689.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:35:50,881 INFO [train.py:903] (1/4) Epoch 8, batch 900, loss[loss=0.2587, simple_loss=0.3307, pruned_loss=0.09336, over 19770.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3239, pruned_loss=0.09358, over 3793163.58 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:35,744 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0261, 2.0194, 1.6856, 1.4788, 1.5045, 1.5662, 0.3051, 0.8989], + device='cuda:1'), covar=tensor([0.0322, 0.0338, 0.0222, 0.0357, 0.0694, 0.0412, 0.0634, 0.0585], + device='cuda:1'), in_proj_covar=tensor([0.0312, 0.0313, 0.0311, 0.0329, 0.0402, 0.0324, 0.0291, 0.0311], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:36:54,666 INFO [train.py:903] (1/4) Epoch 8, batch 950, loss[loss=0.2174, simple_loss=0.2933, pruned_loss=0.07075, over 19482.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3229, pruned_loss=0.0929, over 3812150.80 frames. ], batch size: 49, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:56,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 11:37:03,059 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.996e+02 5.931e+02 7.048e+02 8.289e+02 1.665e+03, threshold=1.410e+03, percent-clipped=1.0 +2023-04-01 11:37:11,347 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:42,377 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:57,226 INFO [train.py:903] (1/4) Epoch 8, batch 1000, loss[loss=0.2498, simple_loss=0.3229, pruned_loss=0.08838, over 19471.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3217, pruned_loss=0.09218, over 3804747.09 frames. ], batch size: 64, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:38:03,535 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:08,263 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:40,036 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:50,187 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 11:38:51,464 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:59,276 INFO [train.py:903] (1/4) Epoch 8, batch 1050, loss[loss=0.2113, simple_loss=0.2795, pruned_loss=0.07151, over 19714.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3217, pruned_loss=0.09243, over 3812257.43 frames. ], batch size: 45, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:39:06,227 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.500e+02 6.561e+02 8.180e+02 1.521e+03, threshold=1.312e+03, percent-clipped=1.0 +2023-04-01 11:39:11,103 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:30,878 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 11:39:34,224 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:45,604 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0059, 4.9372, 5.7909, 5.7056, 1.7395, 5.4359, 4.5964, 5.3763], + device='cuda:1'), covar=tensor([0.1125, 0.0613, 0.0437, 0.0448, 0.4852, 0.0368, 0.0485, 0.0920], + device='cuda:1'), in_proj_covar=tensor([0.0584, 0.0514, 0.0691, 0.0581, 0.0643, 0.0439, 0.0442, 0.0642], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 11:39:45,753 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2688, 2.9896, 1.9030, 2.1190, 1.8483, 2.4534, 0.8166, 1.9636], + device='cuda:1'), covar=tensor([0.0321, 0.0334, 0.0420, 0.0649, 0.0722, 0.0523, 0.0748, 0.0655], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0312, 0.0309, 0.0329, 0.0401, 0.0320, 0.0289, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 11:39:58,991 INFO [train.py:903] (1/4) Epoch 8, batch 1100, loss[loss=0.2351, simple_loss=0.3137, pruned_loss=0.07827, over 19532.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.322, pruned_loss=0.09277, over 3823442.32 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:40:04,070 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48900.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:41:00,139 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48945.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:41:00,832 INFO [train.py:903] (1/4) Epoch 8, batch 1150, loss[loss=0.2699, simple_loss=0.3447, pruned_loss=0.09757, over 19687.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3221, pruned_loss=0.09216, over 3839316.61 frames. ], batch size: 59, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:41:09,120 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.943e+02 6.952e+02 8.882e+02 1.618e+03, threshold=1.390e+03, percent-clipped=5.0 +2023-04-01 11:41:30,935 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:42:04,104 INFO [train.py:903] (1/4) Epoch 8, batch 1200, loss[loss=0.2236, simple_loss=0.2905, pruned_loss=0.07836, over 19785.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3228, pruned_loss=0.09273, over 3841145.54 frames. ], batch size: 47, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:42:32,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 11:43:05,008 INFO [train.py:903] (1/4) Epoch 8, batch 1250, loss[loss=0.2885, simple_loss=0.3552, pruned_loss=0.1109, over 19661.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3237, pruned_loss=0.0936, over 3827223.68 frames. ], batch size: 60, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:43:11,744 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.529e+02 6.811e+02 8.521e+02 1.008e+03 2.064e+03, threshold=1.704e+03, percent-clipped=4.0 +2023-04-01 11:43:17,968 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:43:39,590 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 11:43:50,271 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:44:06,041 INFO [train.py:903] (1/4) Epoch 8, batch 1300, loss[loss=0.2294, simple_loss=0.3044, pruned_loss=0.07717, over 19663.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.325, pruned_loss=0.09493, over 3820733.80 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:44:48,997 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:07,322 INFO [train.py:903] (1/4) Epoch 8, batch 1350, loss[loss=0.3032, simple_loss=0.357, pruned_loss=0.1247, over 13612.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3233, pruned_loss=0.09408, over 3826212.77 frames. ], batch size: 136, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:45:08,145 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 11:45:16,535 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.835e+02 7.092e+02 8.908e+02 2.388e+03, threshold=1.418e+03, percent-clipped=3.0 +2023-04-01 11:45:20,435 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:22,504 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:41,540 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8681, 3.5850, 1.7772, 2.1868, 2.9819, 1.7747, 1.2053, 1.8552], + device='cuda:1'), covar=tensor([0.1094, 0.0316, 0.0923, 0.0568, 0.0403, 0.0872, 0.0873, 0.0660], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0290, 0.0315, 0.0243, 0.0232, 0.0309, 0.0284, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:45:42,677 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0825, 1.9732, 2.2732, 1.6322, 4.5800, 0.9265, 2.5288, 4.7938], + device='cuda:1'), covar=tensor([0.0295, 0.2201, 0.2051, 0.1685, 0.0571, 0.2549, 0.1177, 0.0199], + device='cuda:1'), in_proj_covar=tensor([0.0313, 0.0317, 0.0325, 0.0298, 0.0325, 0.0319, 0.0297, 0.0321], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:45:52,053 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:54,119 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49183.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:46:10,832 INFO [train.py:903] (1/4) Epoch 8, batch 1400, loss[loss=0.2462, simple_loss=0.3181, pruned_loss=0.08715, over 19580.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3234, pruned_loss=0.09436, over 3825227.88 frames. ], batch size: 61, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:46:17,514 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49200.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:47:12,312 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 11:47:13,438 INFO [train.py:903] (1/4) Epoch 8, batch 1450, loss[loss=0.2203, simple_loss=0.2818, pruned_loss=0.0794, over 19765.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.323, pruned_loss=0.09396, over 3841104.18 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:47:19,909 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 6.215e+02 8.146e+02 9.729e+02 2.293e+03, threshold=1.629e+03, percent-clipped=2.0 +2023-04-01 11:47:22,582 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:14,510 INFO [train.py:903] (1/4) Epoch 8, batch 1500, loss[loss=0.2613, simple_loss=0.3346, pruned_loss=0.094, over 19556.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3219, pruned_loss=0.09317, over 3840826.31 frames. ], batch size: 61, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:48:16,759 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49298.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:38,609 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49315.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:49:05,432 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5509, 1.8697, 2.1618, 2.5535, 2.2943, 1.8399, 1.7247, 2.3949], + device='cuda:1'), covar=tensor([0.0765, 0.1718, 0.1185, 0.0902, 0.1198, 0.0677, 0.1241, 0.0597], + device='cuda:1'), in_proj_covar=tensor([0.0241, 0.0346, 0.0278, 0.0232, 0.0290, 0.0238, 0.0266, 0.0225], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:49:14,529 INFO [train.py:903] (1/4) Epoch 8, batch 1550, loss[loss=0.2571, simple_loss=0.3276, pruned_loss=0.09333, over 19671.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3233, pruned_loss=0.09436, over 3835751.98 frames. ], batch size: 53, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:49:23,152 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+02 6.392e+02 7.844e+02 9.464e+02 1.840e+03, threshold=1.569e+03, percent-clipped=1.0 +2023-04-01 11:49:42,518 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7867, 4.3590, 2.6044, 3.8461, 1.0324, 4.0205, 4.1147, 4.3041], + device='cuda:1'), covar=tensor([0.0586, 0.0964, 0.2059, 0.0701, 0.4088, 0.0770, 0.0719, 0.0833], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0341, 0.0399, 0.0298, 0.0368, 0.0326, 0.0318, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 11:50:17,462 INFO [train.py:903] (1/4) Epoch 8, batch 1600, loss[loss=0.2713, simple_loss=0.3396, pruned_loss=0.1015, over 19077.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3224, pruned_loss=0.09357, over 3841077.49 frames. ], batch size: 75, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:50:25,486 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6915, 4.0904, 4.4435, 4.3756, 1.7008, 4.1323, 3.5742, 4.0495], + device='cuda:1'), covar=tensor([0.1150, 0.0990, 0.0530, 0.0499, 0.4872, 0.0502, 0.0605, 0.0996], + device='cuda:1'), in_proj_covar=tensor([0.0600, 0.0533, 0.0717, 0.0598, 0.0667, 0.0458, 0.0454, 0.0663], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 11:50:38,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 11:51:12,153 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:51:20,005 INFO [train.py:903] (1/4) Epoch 8, batch 1650, loss[loss=0.2182, simple_loss=0.2924, pruned_loss=0.07199, over 19876.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3228, pruned_loss=0.09354, over 3844744.18 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:51:24,764 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49450.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:51:26,747 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 6.230e+02 7.478e+02 9.229e+02 3.510e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-04-01 11:51:52,824 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 11:52:21,800 INFO [train.py:903] (1/4) Epoch 8, batch 1700, loss[loss=0.2916, simple_loss=0.3528, pruned_loss=0.1152, over 19744.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3209, pruned_loss=0.09159, over 3856779.05 frames. ], batch size: 63, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:02,520 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 11:53:04,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 11:53:23,394 INFO [train.py:903] (1/4) Epoch 8, batch 1750, loss[loss=0.2419, simple_loss=0.3139, pruned_loss=0.08494, over 19601.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3206, pruned_loss=0.09203, over 3830651.10 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:31,458 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.147e+02 7.390e+02 1.012e+03 1.809e+03, threshold=1.478e+03, percent-clipped=6.0 +2023-04-01 11:53:35,052 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49554.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:56,597 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:06,069 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:23,092 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 11:54:27,936 INFO [train.py:903] (1/4) Epoch 8, batch 1800, loss[loss=0.2646, simple_loss=0.3372, pruned_loss=0.09602, over 19488.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3205, pruned_loss=0.09202, over 3816801.83 frames. ], batch size: 64, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:54:28,341 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:31,424 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:32,681 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5748, 1.6729, 1.7117, 1.7223, 4.1512, 1.0667, 2.3260, 4.2358], + device='cuda:1'), covar=tensor([0.0345, 0.2297, 0.2407, 0.1571, 0.0600, 0.2448, 0.1350, 0.0263], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0322, 0.0331, 0.0299, 0.0331, 0.0321, 0.0303, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:55:18,163 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.6377, 0.8746, 0.6856, 0.6421, 0.7852, 0.5865, 0.6429, 0.8251], + device='cuda:1'), covar=tensor([0.0299, 0.0369, 0.0518, 0.0308, 0.0291, 0.0614, 0.0308, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0288, 0.0313, 0.0241, 0.0230, 0.0311, 0.0285, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:55:25,414 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 11:55:30,998 INFO [train.py:903] (1/4) Epoch 8, batch 1850, loss[loss=0.2759, simple_loss=0.3409, pruned_loss=0.1055, over 17547.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3218, pruned_loss=0.09289, over 3815401.15 frames. ], batch size: 101, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:55:38,000 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.845e+02 7.519e+02 8.649e+02 2.522e+03, threshold=1.504e+03, percent-clipped=4.0 +2023-04-01 11:55:45,094 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3271, 2.2653, 1.5610, 1.4139, 2.1486, 1.2184, 1.1314, 1.6270], + device='cuda:1'), covar=tensor([0.0791, 0.0521, 0.0830, 0.0612, 0.0390, 0.0976, 0.0670, 0.0429], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0290, 0.0315, 0.0242, 0.0231, 0.0312, 0.0286, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 11:56:02,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 11:56:31,218 INFO [train.py:903] (1/4) Epoch 8, batch 1900, loss[loss=0.2497, simple_loss=0.3254, pruned_loss=0.08696, over 19663.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3224, pruned_loss=0.09337, over 3824640.90 frames. ], batch size: 53, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:56:48,641 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 11:56:52,370 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:56:54,371 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 11:57:18,977 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 11:57:33,007 INFO [train.py:903] (1/4) Epoch 8, batch 1950, loss[loss=0.2491, simple_loss=0.3044, pruned_loss=0.09687, over 19327.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3227, pruned_loss=0.09381, over 3824370.52 frames. ], batch size: 44, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:57:40,106 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.492e+02 5.402e+02 6.624e+02 8.916e+02 2.925e+03, threshold=1.325e+03, percent-clipped=4.0 +2023-04-01 11:58:20,353 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:20,512 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:32,991 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49794.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:58:35,919 INFO [train.py:903] (1/4) Epoch 8, batch 2000, loss[loss=0.2691, simple_loss=0.3426, pruned_loss=0.09776, over 19681.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.323, pruned_loss=0.0941, over 3805183.79 frames. ], batch size: 60, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:35,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 11:59:38,066 INFO [train.py:903] (1/4) Epoch 8, batch 2050, loss[loss=0.2523, simple_loss=0.3094, pruned_loss=0.09758, over 19399.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3225, pruned_loss=0.09393, over 3802769.22 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:45,935 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.411e+02 7.156e+02 9.098e+02 3.444e+03, threshold=1.431e+03, percent-clipped=9.0 +2023-04-01 11:59:51,700 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7694, 4.3173, 2.6179, 3.8457, 1.1373, 3.9821, 4.0041, 4.1419], + device='cuda:1'), covar=tensor([0.0538, 0.0950, 0.1967, 0.0699, 0.3746, 0.0745, 0.0763, 0.0804], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0340, 0.0402, 0.0298, 0.0363, 0.0328, 0.0314, 0.0356], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 11:59:53,833 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 11:59:55,105 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 12:00:17,459 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 12:00:29,260 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0985, 2.1687, 2.2027, 3.4999, 2.2689, 3.3206, 2.7984, 1.8842], + device='cuda:1'), covar=tensor([0.3225, 0.2571, 0.1177, 0.1317, 0.3146, 0.0981, 0.2677, 0.2352], + device='cuda:1'), in_proj_covar=tensor([0.0723, 0.0732, 0.0611, 0.0855, 0.0733, 0.0635, 0.0758, 0.0658], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:00:39,973 INFO [train.py:903] (1/4) Epoch 8, batch 2100, loss[loss=0.2705, simple_loss=0.3415, pruned_loss=0.09975, over 19707.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3217, pruned_loss=0.09311, over 3808850.51 frames. ], batch size: 60, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:00:43,849 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:00:55,216 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49909.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:01:04,920 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6116, 4.1762, 2.8286, 3.8110, 0.9860, 3.9406, 3.8982, 4.0301], + device='cuda:1'), covar=tensor([0.0562, 0.1070, 0.1728, 0.0677, 0.4194, 0.0744, 0.0692, 0.0811], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0338, 0.0398, 0.0295, 0.0363, 0.0325, 0.0315, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 12:01:09,457 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 12:01:32,579 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 12:01:40,040 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7419, 1.7862, 1.8891, 2.4553, 1.5424, 2.1263, 2.2826, 1.8055], + device='cuda:1'), covar=tensor([0.2612, 0.2179, 0.1181, 0.1304, 0.2531, 0.1117, 0.2436, 0.2079], + device='cuda:1'), in_proj_covar=tensor([0.0724, 0.0734, 0.0612, 0.0859, 0.0731, 0.0636, 0.0760, 0.0658], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:01:41,831 INFO [train.py:903] (1/4) Epoch 8, batch 2150, loss[loss=0.2533, simple_loss=0.3247, pruned_loss=0.09095, over 18818.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3218, pruned_loss=0.09317, over 3800843.70 frames. ], batch size: 74, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:01:48,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 5.997e+02 7.086e+02 8.659e+02 2.224e+03, threshold=1.417e+03, percent-clipped=8.0 +2023-04-01 12:01:48,672 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3530, 2.1842, 1.6810, 1.6105, 1.4865, 1.7393, 0.5315, 1.2447], + device='cuda:1'), covar=tensor([0.0284, 0.0316, 0.0299, 0.0490, 0.0664, 0.0454, 0.0640, 0.0534], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0307, 0.0308, 0.0327, 0.0397, 0.0319, 0.0288, 0.0306], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:02:06,546 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2360, 1.3766, 1.7535, 1.4541, 2.7380, 2.3263, 2.8763, 1.1419], + device='cuda:1'), covar=tensor([0.1867, 0.3175, 0.1829, 0.1480, 0.1103, 0.1481, 0.1271, 0.2883], + device='cuda:1'), in_proj_covar=tensor([0.0454, 0.0530, 0.0526, 0.0411, 0.0567, 0.0461, 0.0628, 0.0459], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:02:12,071 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:40,685 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:43,585 INFO [train.py:903] (1/4) Epoch 8, batch 2200, loss[loss=0.2261, simple_loss=0.2974, pruned_loss=0.07739, over 19791.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3224, pruned_loss=0.09357, over 3809445.31 frames. ], batch size: 48, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:48,609 INFO [train.py:903] (1/4) Epoch 8, batch 2250, loss[loss=0.2204, simple_loss=0.2977, pruned_loss=0.0715, over 19544.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3228, pruned_loss=0.09418, over 3807196.17 frames. ], batch size: 56, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:51,232 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3574, 3.9515, 2.4641, 3.6011, 1.0516, 3.6431, 3.7021, 3.7467], + device='cuda:1'), covar=tensor([0.0600, 0.0989, 0.1995, 0.0725, 0.3649, 0.0769, 0.0777, 0.1018], + device='cuda:1'), in_proj_covar=tensor([0.0390, 0.0338, 0.0398, 0.0294, 0.0360, 0.0323, 0.0312, 0.0353], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 12:03:55,469 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+02 6.238e+02 7.806e+02 1.014e+03 2.092e+03, threshold=1.561e+03, percent-clipped=8.0 +2023-04-01 12:03:58,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3190, 3.9139, 2.4776, 3.6033, 1.0039, 3.5901, 3.6720, 3.7373], + device='cuda:1'), covar=tensor([0.0695, 0.1197, 0.2050, 0.0697, 0.3861, 0.0799, 0.0706, 0.0982], + device='cuda:1'), in_proj_covar=tensor([0.0390, 0.0339, 0.0398, 0.0294, 0.0360, 0.0323, 0.0313, 0.0353], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 12:04:51,003 INFO [train.py:903] (1/4) Epoch 8, batch 2300, loss[loss=0.2429, simple_loss=0.3006, pruned_loss=0.09264, over 18999.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3227, pruned_loss=0.09392, over 3818258.83 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:03,035 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4672, 1.7586, 2.0413, 2.9490, 2.1907, 2.2156, 2.6321, 2.3735], + device='cuda:1'), covar=tensor([0.0722, 0.1032, 0.1013, 0.0886, 0.0954, 0.0943, 0.0925, 0.0737], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0232, 0.0229, 0.0258, 0.0245, 0.0217, 0.0208, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 12:05:04,996 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 12:05:11,317 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2925, 2.2008, 1.7732, 1.7374, 1.5497, 1.7664, 0.5575, 1.1234], + device='cuda:1'), covar=tensor([0.0299, 0.0307, 0.0278, 0.0386, 0.0727, 0.0425, 0.0654, 0.0587], + device='cuda:1'), in_proj_covar=tensor([0.0312, 0.0312, 0.0312, 0.0328, 0.0403, 0.0324, 0.0293, 0.0310], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:05:31,273 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:37,134 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2305, 1.2263, 1.6244, 1.3422, 2.7336, 3.6399, 3.4578, 3.8941], + device='cuda:1'), covar=tensor([0.1509, 0.3187, 0.2874, 0.1965, 0.0443, 0.0182, 0.0194, 0.0139], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0284, 0.0312, 0.0247, 0.0208, 0.0135, 0.0204, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:05:52,927 INFO [train.py:903] (1/4) Epoch 8, batch 2350, loss[loss=0.2536, simple_loss=0.3285, pruned_loss=0.08937, over 19597.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3214, pruned_loss=0.093, over 3826183.59 frames. ], batch size: 57, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:53,229 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:59,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.938e+02 7.771e+02 9.106e+02 1.869e+03, threshold=1.554e+03, percent-clipped=2.0 +2023-04-01 12:06:03,825 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:18,070 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50165.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:06:36,693 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:38,680 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 12:06:47,984 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50190.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:06:54,684 INFO [train.py:903] (1/4) Epoch 8, batch 2400, loss[loss=0.3306, simple_loss=0.3863, pruned_loss=0.1375, over 17223.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3214, pruned_loss=0.09268, over 3829310.35 frames. ], batch size: 101, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:06:54,695 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 12:07:20,927 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50215.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:07:54,636 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:07:58,090 INFO [train.py:903] (1/4) Epoch 8, batch 2450, loss[loss=0.2349, simple_loss=0.3176, pruned_loss=0.07615, over 18725.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3211, pruned_loss=0.09223, over 3829618.40 frames. ], batch size: 74, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:08:05,246 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.846e+02 6.354e+02 7.339e+02 9.160e+02 2.255e+03, threshold=1.468e+03, percent-clipped=3.0 +2023-04-01 12:09:00,864 INFO [train.py:903] (1/4) Epoch 8, batch 2500, loss[loss=0.2442, simple_loss=0.3114, pruned_loss=0.08849, over 19590.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3211, pruned_loss=0.09219, over 3827424.43 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:03,028 INFO [train.py:903] (1/4) Epoch 8, batch 2550, loss[loss=0.2381, simple_loss=0.3081, pruned_loss=0.08406, over 19847.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3205, pruned_loss=0.09116, over 3823501.35 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:09,527 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.479e+02 6.803e+02 8.076e+02 1.672e+03, threshold=1.361e+03, percent-clipped=2.0 +2023-04-01 12:10:09,870 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8891, 0.8945, 1.1578, 0.5518, 1.6300, 1.7062, 1.5337, 1.8075], + device='cuda:1'), covar=tensor([0.1023, 0.2420, 0.2132, 0.1977, 0.0649, 0.0441, 0.0299, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0285, 0.0312, 0.0247, 0.0208, 0.0137, 0.0204, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:10:59,294 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 12:11:05,085 INFO [train.py:903] (1/4) Epoch 8, batch 2600, loss[loss=0.2569, simple_loss=0.3276, pruned_loss=0.09316, over 17446.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3214, pruned_loss=0.09191, over 3819985.40 frames. ], batch size: 101, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:09,317 INFO [train.py:903] (1/4) Epoch 8, batch 2650, loss[loss=0.3097, simple_loss=0.3639, pruned_loss=0.1277, over 18223.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.321, pruned_loss=0.09189, over 3826485.89 frames. ], batch size: 83, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:15,975 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.098e+02 6.836e+02 8.198e+02 1.046e+03 1.620e+03, threshold=1.640e+03, percent-clipped=8.0 +2023-04-01 12:12:27,572 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 12:12:52,968 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 12:13:04,667 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:11,329 INFO [train.py:903] (1/4) Epoch 8, batch 2700, loss[loss=0.3248, simple_loss=0.3784, pruned_loss=0.1356, over 19591.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3217, pruned_loss=0.0926, over 3817609.93 frames. ], batch size: 61, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:13:16,526 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50499.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:16,892 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 12:13:17,796 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9953, 2.0611, 2.1017, 2.8358, 1.8622, 2.6928, 2.6169, 1.9897], + device='cuda:1'), covar=tensor([0.2862, 0.2244, 0.1159, 0.1449, 0.2832, 0.1052, 0.2445, 0.2074], + device='cuda:1'), in_proj_covar=tensor([0.0723, 0.0729, 0.0611, 0.0851, 0.0731, 0.0635, 0.0759, 0.0655], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:13:46,639 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:14:15,291 INFO [train.py:903] (1/4) Epoch 8, batch 2750, loss[loss=0.2337, simple_loss=0.2923, pruned_loss=0.08757, over 19349.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3222, pruned_loss=0.09278, over 3813676.47 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:14:23,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 5.912e+02 7.181e+02 9.047e+02 1.864e+03, threshold=1.436e+03, percent-clipped=1.0 +2023-04-01 12:14:30,887 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50559.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:14:37,754 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50564.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:14:44,503 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7074, 1.7305, 1.8339, 2.5183, 1.5672, 2.2531, 2.1552, 1.7519], + device='cuda:1'), covar=tensor([0.2909, 0.2339, 0.1308, 0.1196, 0.2618, 0.1087, 0.2815, 0.2252], + device='cuda:1'), in_proj_covar=tensor([0.0731, 0.0739, 0.0618, 0.0861, 0.0742, 0.0644, 0.0769, 0.0663], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:15:10,438 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 12:15:18,011 INFO [train.py:903] (1/4) Epoch 8, batch 2800, loss[loss=0.2773, simple_loss=0.3256, pruned_loss=0.1145, over 19422.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3226, pruned_loss=0.093, over 3810555.23 frames. ], batch size: 48, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:15:29,723 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50605.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:16:21,989 INFO [train.py:903] (1/4) Epoch 8, batch 2850, loss[loss=0.3336, simple_loss=0.3751, pruned_loss=0.146, over 13315.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.323, pruned_loss=0.09308, over 3805420.33 frames. ], batch size: 135, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:16:31,181 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 5.617e+02 7.073e+02 8.787e+02 1.544e+03, threshold=1.415e+03, percent-clipped=2.0 +2023-04-01 12:16:57,548 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50674.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:17:03,736 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9392, 1.9935, 2.0535, 2.9102, 1.9392, 2.6509, 2.5477, 1.9299], + device='cuda:1'), covar=tensor([0.3064, 0.2428, 0.1209, 0.1327, 0.2710, 0.1089, 0.2489, 0.2158], + device='cuda:1'), in_proj_covar=tensor([0.0724, 0.0731, 0.0611, 0.0846, 0.0729, 0.0633, 0.0752, 0.0652], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:17:25,975 INFO [train.py:903] (1/4) Epoch 8, batch 2900, loss[loss=0.3068, simple_loss=0.3688, pruned_loss=0.1224, over 19652.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3233, pruned_loss=0.09354, over 3805695.43 frames. ], batch size: 60, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:17:26,012 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 12:17:36,254 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 12:18:26,806 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-04-01 12:18:29,362 INFO [train.py:903] (1/4) Epoch 8, batch 2950, loss[loss=0.2459, simple_loss=0.3208, pruned_loss=0.08548, over 19530.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3245, pruned_loss=0.09455, over 3784009.72 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:18:37,517 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 6.074e+02 7.951e+02 1.027e+03 2.467e+03, threshold=1.590e+03, percent-clipped=7.0 +2023-04-01 12:19:20,726 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1109, 1.1909, 1.4838, 0.9919, 1.9282, 2.2265, 2.1325, 2.3640], + device='cuda:1'), covar=tensor([0.1286, 0.2624, 0.2343, 0.2111, 0.0885, 0.0570, 0.0287, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0286, 0.0315, 0.0249, 0.0208, 0.0137, 0.0207, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:19:31,422 INFO [train.py:903] (1/4) Epoch 8, batch 3000, loss[loss=0.2635, simple_loss=0.3272, pruned_loss=0.09993, over 19538.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.325, pruned_loss=0.09498, over 3773493.17 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:19:31,423 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 12:19:38,338 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8280, 3.4778, 2.3834, 3.4020, 0.7980, 3.2190, 3.1246, 3.5193], + device='cuda:1'), covar=tensor([0.0765, 0.0875, 0.2478, 0.0598, 0.4861, 0.0975, 0.0828, 0.0859], + device='cuda:1'), in_proj_covar=tensor([0.0394, 0.0343, 0.0405, 0.0296, 0.0365, 0.0329, 0.0319, 0.0358], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 12:19:44,057 INFO [train.py:937] (1/4) Epoch 8, validation: loss=0.1875, simple_loss=0.2879, pruned_loss=0.04358, over 944034.00 frames. +2023-04-01 12:19:44,058 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 12:19:46,423 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 12:19:49,174 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50800.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:20:21,742 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50826.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:20:45,722 INFO [train.py:903] (1/4) Epoch 8, batch 3050, loss[loss=0.2019, simple_loss=0.2757, pruned_loss=0.06408, over 19709.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3245, pruned_loss=0.09431, over 3782912.16 frames. ], batch size: 45, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:20:55,178 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.189e+02 5.734e+02 7.199e+02 9.163e+02 1.650e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-04-01 12:21:06,918 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:36,767 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:49,951 INFO [train.py:903] (1/4) Epoch 8, batch 3100, loss[loss=0.2254, simple_loss=0.293, pruned_loss=0.07897, over 19694.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3227, pruned_loss=0.09361, over 3780494.30 frames. ], batch size: 53, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:22:04,759 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:22:32,041 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50930.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:22:52,354 INFO [train.py:903] (1/4) Epoch 8, batch 3150, loss[loss=0.2346, simple_loss=0.3108, pruned_loss=0.07927, over 19794.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3227, pruned_loss=0.09341, over 3792641.32 frames. ], batch size: 56, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:23:00,490 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 5.915e+02 7.023e+02 8.955e+02 1.571e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-01 12:23:03,134 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50955.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:23:14,734 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:23:20,380 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 12:23:45,417 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7570, 2.6543, 2.0398, 2.0067, 1.7737, 2.2076, 1.1741, 1.9947], + device='cuda:1'), covar=tensor([0.0324, 0.0349, 0.0361, 0.0536, 0.0628, 0.0577, 0.0626, 0.0511], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0316, 0.0316, 0.0333, 0.0408, 0.0330, 0.0295, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:23:54,308 INFO [train.py:903] (1/4) Epoch 8, batch 3200, loss[loss=0.221, simple_loss=0.2908, pruned_loss=0.07562, over 19744.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3217, pruned_loss=0.09295, over 3807261.88 frames. ], batch size: 45, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:24:10,783 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 12:24:30,138 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51023.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:24:57,557 INFO [train.py:903] (1/4) Epoch 8, batch 3250, loss[loss=0.2295, simple_loss=0.3099, pruned_loss=0.07454, over 19534.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3219, pruned_loss=0.09299, over 3805095.70 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:25:00,409 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8893, 1.9379, 1.9779, 2.8422, 2.0131, 2.6600, 2.3941, 1.9221], + device='cuda:1'), covar=tensor([0.2814, 0.2375, 0.1173, 0.1209, 0.2536, 0.1044, 0.2428, 0.2088], + device='cuda:1'), in_proj_covar=tensor([0.0727, 0.0738, 0.0615, 0.0856, 0.0731, 0.0641, 0.0758, 0.0660], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:25:05,782 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 6.087e+02 7.883e+02 9.942e+02 3.174e+03, threshold=1.577e+03, percent-clipped=7.0 +2023-04-01 12:25:10,903 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8730, 4.4318, 2.3970, 3.9210, 1.2538, 4.0936, 4.0575, 4.3018], + device='cuda:1'), covar=tensor([0.0565, 0.0970, 0.2164, 0.0643, 0.3724, 0.0708, 0.0778, 0.0879], + device='cuda:1'), in_proj_covar=tensor([0.0401, 0.0348, 0.0407, 0.0299, 0.0372, 0.0332, 0.0326, 0.0362], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 12:25:20,241 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.49 vs. limit=5.0 +2023-04-01 12:25:40,469 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:26:00,812 INFO [train.py:903] (1/4) Epoch 8, batch 3300, loss[loss=0.2464, simple_loss=0.3062, pruned_loss=0.09328, over 19613.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3218, pruned_loss=0.09298, over 3816036.71 frames. ], batch size: 50, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:26:01,340 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5168, 1.5957, 1.6844, 2.0894, 1.3441, 1.7413, 2.0017, 1.6110], + device='cuda:1'), covar=tensor([0.2778, 0.2265, 0.1240, 0.1277, 0.2524, 0.1220, 0.2634, 0.2169], + device='cuda:1'), in_proj_covar=tensor([0.0726, 0.0738, 0.0614, 0.0857, 0.0732, 0.0644, 0.0758, 0.0659], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:26:08,875 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 12:26:26,570 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3662, 1.4041, 1.8534, 1.5199, 2.7291, 2.2412, 2.7676, 1.2617], + device='cuda:1'), covar=tensor([0.1954, 0.3179, 0.1890, 0.1620, 0.1292, 0.1688, 0.1416, 0.3150], + device='cuda:1'), in_proj_covar=tensor([0.0462, 0.0537, 0.0535, 0.0416, 0.0573, 0.0469, 0.0631, 0.0465], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:26:44,960 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0378, 3.5815, 1.9612, 2.2175, 3.1771, 1.9007, 1.1808, 1.9872], + device='cuda:1'), covar=tensor([0.0890, 0.0416, 0.0793, 0.0600, 0.0343, 0.0855, 0.0849, 0.0593], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0292, 0.0312, 0.0239, 0.0230, 0.0311, 0.0280, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 12:27:02,452 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51144.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:27:04,792 INFO [train.py:903] (1/4) Epoch 8, batch 3350, loss[loss=0.2581, simple_loss=0.3318, pruned_loss=0.09225, over 19666.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3229, pruned_loss=0.0941, over 3809145.24 frames. ], batch size: 60, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:27:12,718 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.942e+02 7.377e+02 9.279e+02 2.136e+03, threshold=1.475e+03, percent-clipped=2.0 +2023-04-01 12:27:14,292 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1410, 1.2293, 1.5923, 1.3452, 2.1901, 1.9411, 2.2612, 0.6796], + device='cuda:1'), covar=tensor([0.2007, 0.3442, 0.1957, 0.1599, 0.1255, 0.1728, 0.1256, 0.3421], + device='cuda:1'), in_proj_covar=tensor([0.0464, 0.0541, 0.0537, 0.0418, 0.0574, 0.0470, 0.0633, 0.0467], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:27:34,025 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51170.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:28:00,622 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51190.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:28:07,180 INFO [train.py:903] (1/4) Epoch 8, batch 3400, loss[loss=0.2479, simple_loss=0.32, pruned_loss=0.0879, over 19666.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3239, pruned_loss=0.09438, over 3819898.06 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:28:26,744 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-01 12:29:10,668 INFO [train.py:903] (1/4) Epoch 8, batch 3450, loss[loss=0.2781, simple_loss=0.3465, pruned_loss=0.1049, over 19693.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3235, pruned_loss=0.09383, over 3822127.68 frames. ], batch size: 60, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:16,194 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 12:29:18,574 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+02 6.280e+02 7.477e+02 9.686e+02 1.820e+03, threshold=1.495e+03, percent-clipped=3.0 +2023-04-01 12:29:27,053 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:29:52,038 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:00,084 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51285.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:30:01,385 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2294, 1.6895, 1.7882, 2.1744, 1.9518, 1.9146, 1.7983, 2.1578], + device='cuda:1'), covar=tensor([0.0742, 0.1482, 0.1197, 0.0747, 0.1102, 0.0439, 0.0966, 0.0518], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0352, 0.0285, 0.0236, 0.0295, 0.0240, 0.0269, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 12:30:12,716 INFO [train.py:903] (1/4) Epoch 8, batch 3500, loss[loss=0.2674, simple_loss=0.3395, pruned_loss=0.09762, over 19615.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3233, pruned_loss=0.09363, over 3827975.48 frames. ], batch size: 57, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:30:25,552 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51304.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:29,262 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8586, 1.3391, 1.0545, 1.0066, 1.1911, 0.9547, 0.9311, 1.2411], + device='cuda:1'), covar=tensor([0.0487, 0.0681, 0.0936, 0.0515, 0.0417, 0.1091, 0.0514, 0.0384], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0292, 0.0315, 0.0242, 0.0230, 0.0309, 0.0283, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 12:30:30,171 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:31:19,055 INFO [train.py:903] (1/4) Epoch 8, batch 3550, loss[loss=0.2751, simple_loss=0.3421, pruned_loss=0.1041, over 19483.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3232, pruned_loss=0.09385, over 3809680.36 frames. ], batch size: 64, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:31:27,383 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.209e+02 6.396e+02 8.561e+02 1.899e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-01 12:31:30,122 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1831, 1.1590, 1.3619, 1.3030, 1.6824, 1.7592, 1.7134, 0.5054], + device='cuda:1'), covar=tensor([0.1996, 0.3376, 0.1988, 0.1593, 0.1254, 0.1794, 0.1209, 0.3325], + device='cuda:1'), in_proj_covar=tensor([0.0466, 0.0539, 0.0540, 0.0419, 0.0575, 0.0469, 0.0634, 0.0468], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:32:21,070 INFO [train.py:903] (1/4) Epoch 8, batch 3600, loss[loss=0.2557, simple_loss=0.3229, pruned_loss=0.09429, over 17356.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3228, pruned_loss=0.09391, over 3813878.78 frames. ], batch size: 101, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:32:26,129 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51400.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:38,753 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:55,836 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:56,815 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:33:22,342 INFO [train.py:903] (1/4) Epoch 8, batch 3650, loss[loss=0.2619, simple_loss=0.3324, pruned_loss=0.09567, over 19090.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3229, pruned_loss=0.0936, over 3815197.01 frames. ], batch size: 69, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:33:31,510 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 6.415e+02 7.779e+02 9.918e+02 2.619e+03, threshold=1.556e+03, percent-clipped=14.0 +2023-04-01 12:34:24,454 INFO [train.py:903] (1/4) Epoch 8, batch 3700, loss[loss=0.2708, simple_loss=0.3428, pruned_loss=0.09936, over 19765.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3239, pruned_loss=0.0944, over 3822272.12 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:34:31,565 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2131, 2.0519, 1.5110, 1.4660, 1.9139, 1.1367, 1.1924, 1.7689], + device='cuda:1'), covar=tensor([0.0721, 0.0574, 0.0888, 0.0533, 0.0382, 0.1007, 0.0557, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0290, 0.0315, 0.0242, 0.0227, 0.0311, 0.0279, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 12:34:46,883 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:34:49,337 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51515.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:12,250 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51534.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:35:18,876 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51539.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:20,110 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51540.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:22,153 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51541.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:35:28,598 INFO [train.py:903] (1/4) Epoch 8, batch 3750, loss[loss=0.2436, simple_loss=0.3143, pruned_loss=0.08646, over 19602.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.324, pruned_loss=0.09423, over 3809588.46 frames. ], batch size: 61, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:35:36,643 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.985e+02 5.907e+02 7.282e+02 9.270e+02 2.268e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:35:51,385 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.39 vs. limit=5.0 +2023-04-01 12:35:52,290 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51566.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:36:30,191 INFO [train.py:903] (1/4) Epoch 8, batch 3800, loss[loss=0.241, simple_loss=0.3103, pruned_loss=0.08579, over 19695.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3241, pruned_loss=0.0944, over 3795166.92 frames. ], batch size: 59, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:02,476 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 12:37:31,362 INFO [train.py:903] (1/4) Epoch 8, batch 3850, loss[loss=0.291, simple_loss=0.3641, pruned_loss=0.109, over 19508.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3234, pruned_loss=0.09394, over 3794299.63 frames. ], batch size: 64, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:35,087 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51649.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:37:40,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.568e+02 6.155e+02 7.716e+02 1.023e+03 2.199e+03, threshold=1.543e+03, percent-clipped=8.0 +2023-04-01 12:38:13,424 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51679.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:38:33,776 INFO [train.py:903] (1/4) Epoch 8, batch 3900, loss[loss=0.2184, simple_loss=0.2974, pruned_loss=0.06971, over 19685.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3218, pruned_loss=0.09253, over 3813776.10 frames. ], batch size: 53, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:38:44,951 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51704.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:28,917 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 12:39:33,665 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51744.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:37,151 INFO [train.py:903] (1/4) Epoch 8, batch 3950, loss[loss=0.2343, simple_loss=0.3101, pruned_loss=0.07927, over 19301.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3213, pruned_loss=0.09246, over 3808211.47 frames. ], batch size: 66, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:39:41,707 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 12:39:45,221 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+02 5.815e+02 7.280e+02 9.203e+02 2.422e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:39:46,650 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:59,509 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51765.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:37,949 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:38,725 INFO [train.py:903] (1/4) Epoch 8, batch 4000, loss[loss=0.2582, simple_loss=0.3329, pruned_loss=0.09177, over 19498.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3213, pruned_loss=0.09213, over 3803152.72 frames. ], batch size: 64, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:09,893 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:27,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 12:41:41,697 INFO [train.py:903] (1/4) Epoch 8, batch 4050, loss[loss=0.2968, simple_loss=0.3587, pruned_loss=0.1175, over 17477.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3219, pruned_loss=0.09255, over 3781484.36 frames. ], batch size: 101, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:50,759 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.576e+02 5.742e+02 7.614e+02 9.901e+02 2.045e+03, threshold=1.523e+03, percent-clipped=5.0 +2023-04-01 12:41:55,464 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:59,733 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51859.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:11,936 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51869.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:33,321 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 12:42:43,847 INFO [train.py:903] (1/4) Epoch 8, batch 4100, loss[loss=0.2622, simple_loss=0.329, pruned_loss=0.09769, over 19786.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.322, pruned_loss=0.09236, over 3800355.30 frames. ], batch size: 56, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:42:54,390 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 12:42:56,231 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:43:21,444 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 12:43:26,433 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51930.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:43:47,778 INFO [train.py:903] (1/4) Epoch 8, batch 4150, loss[loss=0.2269, simple_loss=0.2898, pruned_loss=0.082, over 19781.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3214, pruned_loss=0.09218, over 3795912.63 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:43:56,802 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.129e+02 6.343e+02 7.798e+02 9.790e+02 2.215e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 12:44:19,720 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:44:50,678 INFO [train.py:903] (1/4) Epoch 8, batch 4200, loss[loss=0.2597, simple_loss=0.327, pruned_loss=0.09618, over 19412.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3205, pruned_loss=0.09159, over 3801714.29 frames. ], batch size: 70, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:44:57,640 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 12:45:04,991 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3846, 2.3551, 2.4318, 3.2465, 2.6518, 3.2320, 2.9098, 2.3716], + device='cuda:1'), covar=tensor([0.2470, 0.1956, 0.0974, 0.1104, 0.2096, 0.0790, 0.1754, 0.1572], + device='cuda:1'), in_proj_covar=tensor([0.0727, 0.0737, 0.0616, 0.0860, 0.0734, 0.0643, 0.0759, 0.0662], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:45:53,322 INFO [train.py:903] (1/4) Epoch 8, batch 4250, loss[loss=0.2042, simple_loss=0.2723, pruned_loss=0.06802, over 18712.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3198, pruned_loss=0.09152, over 3802900.13 frames. ], batch size: 41, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:46:01,321 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 5.455e+02 6.472e+02 8.916e+02 2.597e+03, threshold=1.294e+03, percent-clipped=4.0 +2023-04-01 12:46:08,438 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:46:11,482 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 12:46:21,888 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 12:46:55,486 INFO [train.py:903] (1/4) Epoch 8, batch 4300, loss[loss=0.257, simple_loss=0.3226, pruned_loss=0.09571, over 19832.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3203, pruned_loss=0.09194, over 3782085.96 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:47:14,058 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52109.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:22,339 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:29,334 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6090, 1.6450, 1.7821, 2.2035, 1.4416, 1.9207, 2.0737, 1.7345], + device='cuda:1'), covar=tensor([0.2801, 0.2173, 0.1221, 0.1189, 0.2355, 0.1091, 0.2764, 0.2139], + device='cuda:1'), in_proj_covar=tensor([0.0728, 0.0739, 0.0616, 0.0863, 0.0737, 0.0648, 0.0761, 0.0664], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:47:34,043 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:50,782 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 12:47:53,227 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:00,440 INFO [train.py:903] (1/4) Epoch 8, batch 4350, loss[loss=0.2619, simple_loss=0.3135, pruned_loss=0.1051, over 19792.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3196, pruned_loss=0.09153, over 3780568.72 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:48:06,434 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:09,408 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.672e+02 7.291e+02 9.101e+02 1.997e+03, threshold=1.458e+03, percent-clipped=8.0 +2023-04-01 12:49:03,039 INFO [train.py:903] (1/4) Epoch 8, batch 4400, loss[loss=0.2926, simple_loss=0.349, pruned_loss=0.1181, over 17353.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3209, pruned_loss=0.09193, over 3778662.40 frames. ], batch size: 101, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:49:21,135 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52211.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:26,733 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 12:49:38,179 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 12:49:38,557 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:43,301 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:50:04,685 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 12:50:05,239 INFO [train.py:903] (1/4) Epoch 8, batch 4450, loss[loss=0.2299, simple_loss=0.3061, pruned_loss=0.07688, over 18815.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3212, pruned_loss=0.0918, over 3796657.10 frames. ], batch size: 74, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:50:13,310 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.880e+02 7.086e+02 8.839e+02 1.936e+03, threshold=1.417e+03, percent-clipped=3.0 +2023-04-01 12:50:13,728 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:51:06,386 INFO [train.py:903] (1/4) Epoch 8, batch 4500, loss[loss=0.2041, simple_loss=0.2713, pruned_loss=0.06847, over 19801.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3211, pruned_loss=0.09208, over 3797321.31 frames. ], batch size: 45, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:51:08,366 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 12:51:50,106 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:51:57,139 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3720, 1.7863, 1.8594, 2.5453, 1.9542, 2.4755, 2.4543, 2.2320], + device='cuda:1'), covar=tensor([0.0707, 0.0962, 0.1003, 0.1016, 0.1000, 0.0681, 0.0953, 0.0642], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0234, 0.0229, 0.0259, 0.0247, 0.0219, 0.0210, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 12:52:10,344 INFO [train.py:903] (1/4) Epoch 8, batch 4550, loss[loss=0.2499, simple_loss=0.3177, pruned_loss=0.09109, over 19528.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3199, pruned_loss=0.09145, over 3801277.41 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:52:18,695 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.922e+02 7.010e+02 8.869e+02 1.679e+03, threshold=1.402e+03, percent-clipped=2.0 +2023-04-01 12:52:18,728 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 12:52:28,212 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3657, 2.1519, 1.8192, 1.7743, 1.6475, 1.7070, 0.4501, 1.2222], + device='cuda:1'), covar=tensor([0.0325, 0.0353, 0.0314, 0.0447, 0.0666, 0.0496, 0.0790, 0.0614], + device='cuda:1'), in_proj_covar=tensor([0.0321, 0.0319, 0.0316, 0.0335, 0.0413, 0.0332, 0.0302, 0.0318], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 12:52:28,251 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2183, 1.2862, 1.6259, 1.4220, 2.6708, 2.3022, 2.8333, 1.1441], + device='cuda:1'), covar=tensor([0.2102, 0.3499, 0.2087, 0.1630, 0.1358, 0.1654, 0.1352, 0.3320], + device='cuda:1'), in_proj_covar=tensor([0.0470, 0.0546, 0.0545, 0.0422, 0.0584, 0.0475, 0.0646, 0.0472], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:52:41,942 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 12:52:51,954 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.87 vs. limit=5.0 +2023-04-01 12:53:11,430 INFO [train.py:903] (1/4) Epoch 8, batch 4600, loss[loss=0.2255, simple_loss=0.306, pruned_loss=0.0725, over 19544.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3198, pruned_loss=0.09108, over 3811263.75 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:53:18,528 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:54:12,886 INFO [train.py:903] (1/4) Epoch 8, batch 4650, loss[loss=0.2199, simple_loss=0.2848, pruned_loss=0.07755, over 19783.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3203, pruned_loss=0.09109, over 3815616.62 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:54:21,260 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 5.664e+02 6.903e+02 8.285e+02 1.576e+03, threshold=1.381e+03, percent-clipped=2.0 +2023-04-01 12:54:30,512 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 12:54:42,674 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 12:54:56,903 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:15,231 INFO [train.py:903] (1/4) Epoch 8, batch 4700, loss[loss=0.2023, simple_loss=0.2859, pruned_loss=0.05941, over 19874.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3178, pruned_loss=0.08974, over 3823401.03 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:55:27,906 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52505.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:39,731 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 12:55:43,520 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:18,062 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 12:56:18,360 INFO [train.py:903] (1/4) Epoch 8, batch 4750, loss[loss=0.2904, simple_loss=0.3523, pruned_loss=0.1142, over 19756.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.319, pruned_loss=0.09083, over 3822288.81 frames. ], batch size: 54, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:56:29,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.555e+02 6.316e+02 7.348e+02 9.529e+02 1.491e+03, threshold=1.470e+03, percent-clipped=3.0 +2023-04-01 12:56:31,067 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:52,298 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52573.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:57:12,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0085, 1.3296, 0.9866, 0.9972, 1.1514, 0.9345, 0.8732, 1.2366], + device='cuda:1'), covar=tensor([0.0424, 0.0601, 0.0932, 0.0503, 0.0457, 0.0979, 0.0536, 0.0362], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0292, 0.0315, 0.0241, 0.0229, 0.0312, 0.0288, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 12:57:22,235 INFO [train.py:903] (1/4) Epoch 8, batch 4800, loss[loss=0.274, simple_loss=0.344, pruned_loss=0.102, over 17360.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3201, pruned_loss=0.09113, over 3823701.92 frames. ], batch size: 101, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:16,379 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 12:58:22,761 INFO [train.py:903] (1/4) Epoch 8, batch 4850, loss[loss=0.2526, simple_loss=0.3314, pruned_loss=0.08684, over 19097.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3202, pruned_loss=0.09119, over 3820011.20 frames. ], batch size: 69, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:32,081 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 6.021e+02 7.604e+02 9.872e+02 2.114e+03, threshold=1.521e+03, percent-clipped=8.0 +2023-04-01 12:58:46,867 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 12:58:52,939 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52670.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:58:58,261 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:58:59,836 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8124, 1.8830, 1.9490, 2.6694, 1.7256, 2.3668, 2.3743, 1.8919], + device='cuda:1'), covar=tensor([0.2964, 0.2453, 0.1273, 0.1430, 0.2727, 0.1133, 0.2645, 0.2155], + device='cuda:1'), in_proj_covar=tensor([0.0729, 0.0739, 0.0610, 0.0867, 0.0733, 0.0638, 0.0761, 0.0659], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 12:59:00,297 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 12:59:08,337 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 12:59:14,086 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 12:59:14,111 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 12:59:23,224 INFO [train.py:903] (1/4) Epoch 8, batch 4900, loss[loss=0.2682, simple_loss=0.3196, pruned_loss=0.1084, over 19380.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3204, pruned_loss=0.09144, over 3813533.40 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:59:24,410 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 12:59:44,290 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 13:00:22,662 INFO [train.py:903] (1/4) Epoch 8, batch 4950, loss[loss=0.2577, simple_loss=0.3326, pruned_loss=0.09136, over 19775.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3207, pruned_loss=0.09165, over 3819649.95 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 13:00:35,712 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 6.374e+02 8.178e+02 1.048e+03 2.702e+03, threshold=1.636e+03, percent-clipped=11.0 +2023-04-01 13:00:40,357 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 13:00:53,296 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:00:56,991 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:04,909 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 13:01:17,600 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:26,606 INFO [train.py:903] (1/4) Epoch 8, batch 5000, loss[loss=0.2239, simple_loss=0.3006, pruned_loss=0.07362, over 19691.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3206, pruned_loss=0.09183, over 3813333.52 frames. ], batch size: 53, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:01:29,394 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:35,984 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 13:01:47,277 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 13:02:28,523 INFO [train.py:903] (1/4) Epoch 8, batch 5050, loss[loss=0.2003, simple_loss=0.2767, pruned_loss=0.06191, over 17760.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3196, pruned_loss=0.09106, over 3807153.88 frames. ], batch size: 39, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:02:39,042 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 5.648e+02 7.062e+02 8.811e+02 1.795e+03, threshold=1.412e+03, percent-clipped=2.0 +2023-04-01 13:03:04,976 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 13:03:22,581 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0098, 0.9554, 1.3679, 0.6426, 2.2940, 2.4589, 2.2283, 2.7213], + device='cuda:1'), covar=tensor([0.1564, 0.4449, 0.3945, 0.2292, 0.0465, 0.0260, 0.0422, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0287, 0.0316, 0.0249, 0.0207, 0.0140, 0.0205, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 13:03:30,507 INFO [train.py:903] (1/4) Epoch 8, batch 5100, loss[loss=0.2378, simple_loss=0.3188, pruned_loss=0.07841, over 19740.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3209, pruned_loss=0.09182, over 3800671.46 frames. ], batch size: 63, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:03:41,015 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 13:03:45,417 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 13:03:50,849 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 13:03:59,150 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52917.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:04:06,195 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2552, 1.2865, 1.6521, 1.4321, 2.2708, 2.0933, 2.4500, 0.8148], + device='cuda:1'), covar=tensor([0.2022, 0.3451, 0.2078, 0.1647, 0.1437, 0.1698, 0.1388, 0.3462], + device='cuda:1'), in_proj_covar=tensor([0.0462, 0.0532, 0.0538, 0.0412, 0.0574, 0.0468, 0.0631, 0.0466], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 13:04:09,597 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:23,604 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5642, 1.6123, 1.6895, 2.1276, 1.3048, 1.9297, 2.0062, 1.6251], + device='cuda:1'), covar=tensor([0.2802, 0.2346, 0.1291, 0.1330, 0.2615, 0.1119, 0.2828, 0.2266], + device='cuda:1'), in_proj_covar=tensor([0.0723, 0.0739, 0.0609, 0.0864, 0.0733, 0.0639, 0.0761, 0.0655], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 13:04:32,211 INFO [train.py:903] (1/4) Epoch 8, batch 5150, loss[loss=0.2207, simple_loss=0.2879, pruned_loss=0.07679, over 19745.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3209, pruned_loss=0.09149, over 3811711.95 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:04:41,017 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:45,911 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.391e+02 6.060e+02 7.983e+02 1.041e+03 2.368e+03, threshold=1.597e+03, percent-clipped=6.0 +2023-04-01 13:04:47,183 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:05:19,618 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:05:37,038 INFO [train.py:903] (1/4) Epoch 8, batch 5200, loss[loss=0.2632, simple_loss=0.3445, pruned_loss=0.09098, over 17961.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3209, pruned_loss=0.09133, over 3813422.30 frames. ], batch size: 83, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:05:51,348 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 13:06:21,624 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53032.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:06:36,725 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 13:06:38,262 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:06:39,062 INFO [train.py:903] (1/4) Epoch 8, batch 5250, loss[loss=0.3123, simple_loss=0.3635, pruned_loss=0.1306, over 13652.00 frames. ], tot_loss[loss=0.251, simple_loss=0.32, pruned_loss=0.09098, over 3815430.69 frames. ], batch size: 136, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:06:49,012 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.973e+02 7.081e+02 8.822e+02 3.028e+03, threshold=1.416e+03, percent-clipped=2.0 +2023-04-01 13:07:08,287 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:13,776 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:39,272 INFO [train.py:903] (1/4) Epoch 8, batch 5300, loss[loss=0.2538, simple_loss=0.317, pruned_loss=0.09529, over 19546.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3183, pruned_loss=0.09043, over 3815899.21 frames. ], batch size: 54, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:07:57,431 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 13:08:03,083 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:08:41,250 INFO [train.py:903] (1/4) Epoch 8, batch 5350, loss[loss=0.2643, simple_loss=0.3357, pruned_loss=0.09648, over 19624.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3185, pruned_loss=0.09017, over 3827214.37 frames. ], batch size: 57, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:08:52,773 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 6.176e+02 7.478e+02 9.376e+02 1.338e+03, threshold=1.496e+03, percent-clipped=0.0 +2023-04-01 13:09:14,972 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-01 13:09:18,561 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 13:09:44,008 INFO [train.py:903] (1/4) Epoch 8, batch 5400, loss[loss=0.296, simple_loss=0.359, pruned_loss=0.1165, over 17098.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3187, pruned_loss=0.08988, over 3819738.59 frames. ], batch size: 101, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:15,109 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9544, 1.4877, 1.6756, 2.4533, 1.9433, 2.0877, 2.3769, 1.9786], + device='cuda:1'), covar=tensor([0.0905, 0.1237, 0.1091, 0.0988, 0.0987, 0.0812, 0.0948, 0.0769], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0232, 0.0228, 0.0258, 0.0245, 0.0217, 0.0208, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 13:10:24,924 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:10:47,201 INFO [train.py:903] (1/4) Epoch 8, batch 5450, loss[loss=0.2216, simple_loss=0.2871, pruned_loss=0.07806, over 19387.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3193, pruned_loss=0.09091, over 3802396.13 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:53,232 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3549, 1.4392, 2.2817, 1.7286, 3.2277, 2.4557, 3.1855, 1.6909], + device='cuda:1'), covar=tensor([0.2085, 0.3671, 0.1909, 0.1613, 0.1397, 0.1824, 0.1832, 0.3070], + device='cuda:1'), in_proj_covar=tensor([0.0468, 0.0539, 0.0545, 0.0417, 0.0577, 0.0473, 0.0637, 0.0470], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 13:10:57,343 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 5.768e+02 7.150e+02 9.218e+02 2.127e+03, threshold=1.430e+03, percent-clipped=3.0 +2023-04-01 13:11:02,296 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:11:40,430 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53288.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:11:48,929 INFO [train.py:903] (1/4) Epoch 8, batch 5500, loss[loss=0.2547, simple_loss=0.3107, pruned_loss=0.09933, over 19748.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3204, pruned_loss=0.09178, over 3805870.97 frames. ], batch size: 46, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:11,194 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53313.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:12:16,565 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 13:12:23,269 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:34,907 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-01 13:12:35,593 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53333.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:50,182 INFO [train.py:903] (1/4) Epoch 8, batch 5550, loss[loss=0.2196, simple_loss=0.2965, pruned_loss=0.07131, over 19599.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3179, pruned_loss=0.09008, over 3818578.46 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:56,429 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4558, 2.2510, 1.8737, 1.7530, 1.6619, 1.8154, 0.4628, 1.1987], + device='cuda:1'), covar=tensor([0.0321, 0.0377, 0.0315, 0.0486, 0.0739, 0.0530, 0.0767, 0.0625], + device='cuda:1'), in_proj_covar=tensor([0.0320, 0.0319, 0.0316, 0.0331, 0.0411, 0.0331, 0.0296, 0.0312], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 13:13:00,594 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 13:13:03,076 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 6.394e+02 7.821e+02 9.803e+02 2.197e+03, threshold=1.564e+03, percent-clipped=2.0 +2023-04-01 13:13:40,338 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:13:50,350 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 13:13:53,923 INFO [train.py:903] (1/4) Epoch 8, batch 5600, loss[loss=0.2087, simple_loss=0.2866, pruned_loss=0.06545, over 19741.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3174, pruned_loss=0.08928, over 3821018.13 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:14:21,365 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:14:57,497 INFO [train.py:903] (1/4) Epoch 8, batch 5650, loss[loss=0.2595, simple_loss=0.3349, pruned_loss=0.09201, over 18166.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3182, pruned_loss=0.08976, over 3823845.20 frames. ], batch size: 83, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:15:07,835 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.837e+02 5.961e+02 7.306e+02 9.270e+02 2.985e+03, threshold=1.461e+03, percent-clipped=1.0 +2023-04-01 13:15:45,791 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 13:15:46,224 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:15:58,760 INFO [train.py:903] (1/4) Epoch 8, batch 5700, loss[loss=0.2443, simple_loss=0.3235, pruned_loss=0.08257, over 19662.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3181, pruned_loss=0.08934, over 3828686.88 frames. ], batch size: 55, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:16:15,382 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:44,894 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:59,703 INFO [train.py:903] (1/4) Epoch 8, batch 5750, loss[loss=0.2025, simple_loss=0.2849, pruned_loss=0.06002, over 19692.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3171, pruned_loss=0.08913, over 3809638.20 frames. ], batch size: 53, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:17:00,947 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 13:17:10,146 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 13:17:10,455 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 13:17:11,664 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 5.699e+02 6.647e+02 8.184e+02 1.829e+03, threshold=1.329e+03, percent-clipped=1.0 +2023-04-01 13:17:15,818 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 13:18:02,105 INFO [train.py:903] (1/4) Epoch 8, batch 5800, loss[loss=0.2434, simple_loss=0.3249, pruned_loss=0.0809, over 19688.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3181, pruned_loss=0.08997, over 3818457.31 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:18:12,770 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:18:56,128 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-01 13:19:04,521 INFO [train.py:903] (1/4) Epoch 8, batch 5850, loss[loss=0.2493, simple_loss=0.3233, pruned_loss=0.0876, over 19790.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3174, pruned_loss=0.08937, over 3818661.80 frames. ], batch size: 56, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:19:15,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.371e+02 7.746e+02 9.220e+02 2.993e+03, threshold=1.549e+03, percent-clipped=10.0 +2023-04-01 13:19:27,608 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:19:42,004 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:05,078 INFO [train.py:903] (1/4) Epoch 8, batch 5900, loss[loss=0.253, simple_loss=0.3185, pruned_loss=0.09378, over 19742.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.317, pruned_loss=0.08945, over 3827493.01 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:20:09,570 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 13:20:30,254 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 13:20:30,494 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8924, 4.2066, 4.5564, 4.4957, 2.0051, 4.2002, 3.7218, 4.1902], + device='cuda:1'), covar=tensor([0.1220, 0.1103, 0.0534, 0.0524, 0.4381, 0.0550, 0.0543, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0611, 0.0534, 0.0727, 0.0606, 0.0669, 0.0469, 0.0456, 0.0663], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 13:20:32,883 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:47,011 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53729.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:21:06,832 INFO [train.py:903] (1/4) Epoch 8, batch 5950, loss[loss=0.2292, simple_loss=0.3072, pruned_loss=0.07562, over 19745.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3181, pruned_loss=0.09026, over 3812315.36 frames. ], batch size: 63, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:21:19,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 5.886e+02 7.195e+02 1.025e+03 2.007e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-04-01 13:21:50,794 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:21:52,059 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3365, 1.3409, 1.8884, 1.5010, 3.1275, 2.5346, 3.2571, 1.3153], + device='cuda:1'), covar=tensor([0.2203, 0.3596, 0.2244, 0.1766, 0.1432, 0.1742, 0.1574, 0.3432], + device='cuda:1'), in_proj_covar=tensor([0.0469, 0.0543, 0.0549, 0.0418, 0.0579, 0.0478, 0.0641, 0.0476], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 13:22:00,062 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:04,689 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:09,974 INFO [train.py:903] (1/4) Epoch 8, batch 6000, loss[loss=0.2068, simple_loss=0.2781, pruned_loss=0.06769, over 19363.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3188, pruned_loss=0.09071, over 3819485.86 frames. ], batch size: 44, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:22:09,975 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 13:22:18,966 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8288, 3.3955, 2.6555, 3.3843, 0.8755, 3.2565, 3.3366, 3.4363], + device='cuda:1'), covar=tensor([0.0788, 0.0927, 0.1873, 0.0729, 0.4143, 0.1107, 0.0747, 0.0889], + device='cuda:1'), in_proj_covar=tensor([0.0406, 0.0347, 0.0409, 0.0303, 0.0369, 0.0336, 0.0329, 0.0364], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 13:22:22,632 INFO [train.py:937] (1/4) Epoch 8, validation: loss=0.1864, simple_loss=0.2865, pruned_loss=0.04314, over 944034.00 frames. +2023-04-01 13:22:22,633 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 13:22:48,334 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:54,263 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.44 vs. limit=5.0 +2023-04-01 13:23:24,848 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53844.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:26,617 INFO [train.py:903] (1/4) Epoch 8, batch 6050, loss[loss=0.2489, simple_loss=0.324, pruned_loss=0.08685, over 19652.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3194, pruned_loss=0.09084, over 3820298.95 frames. ], batch size: 55, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:23:39,145 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.583e+02 7.013e+02 9.917e+02 2.418e+03, threshold=1.403e+03, percent-clipped=8.0 +2023-04-01 13:23:47,675 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2248, 1.5229, 2.0719, 1.5977, 3.2107, 4.9009, 4.6871, 5.0792], + device='cuda:1'), covar=tensor([0.1503, 0.2955, 0.2685, 0.1750, 0.0381, 0.0111, 0.0130, 0.0079], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0286, 0.0316, 0.0246, 0.0208, 0.0141, 0.0206, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 13:24:30,145 INFO [train.py:903] (1/4) Epoch 8, batch 6100, loss[loss=0.283, simple_loss=0.3465, pruned_loss=0.1097, over 19434.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3176, pruned_loss=0.08951, over 3833180.86 frames. ], batch size: 70, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:24:37,005 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8850, 4.4362, 2.6879, 3.8090, 1.0400, 4.0348, 4.0785, 4.2784], + device='cuda:1'), covar=tensor([0.0567, 0.1007, 0.1930, 0.0790, 0.3999, 0.0750, 0.0826, 0.0983], + device='cuda:1'), in_proj_covar=tensor([0.0408, 0.0347, 0.0410, 0.0307, 0.0372, 0.0335, 0.0329, 0.0365], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 13:24:52,630 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2208, 1.2528, 1.5717, 1.4000, 2.2426, 1.9585, 2.2877, 0.7496], + device='cuda:1'), covar=tensor([0.2033, 0.3548, 0.2040, 0.1645, 0.1316, 0.1752, 0.1300, 0.3402], + device='cuda:1'), in_proj_covar=tensor([0.0465, 0.0540, 0.0548, 0.0416, 0.0576, 0.0472, 0.0636, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 13:25:31,563 INFO [train.py:903] (1/4) Epoch 8, batch 6150, loss[loss=0.2715, simple_loss=0.3404, pruned_loss=0.1013, over 19526.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.318, pruned_loss=0.0893, over 3827064.58 frames. ], batch size: 54, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:25:42,184 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.923e+02 5.703e+02 7.303e+02 8.849e+02 1.874e+03, threshold=1.461e+03, percent-clipped=4.0 +2023-04-01 13:25:56,461 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 13:26:07,892 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:26:33,372 INFO [train.py:903] (1/4) Epoch 8, batch 6200, loss[loss=0.2205, simple_loss=0.2928, pruned_loss=0.07414, over 19738.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3181, pruned_loss=0.08953, over 3818463.52 frames. ], batch size: 46, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:26:37,440 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:25,494 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54037.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:37,612 INFO [train.py:903] (1/4) Epoch 8, batch 6250, loss[loss=0.2783, simple_loss=0.3485, pruned_loss=0.104, over 18072.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.318, pruned_loss=0.08943, over 3819187.42 frames. ], batch size: 83, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:27:40,337 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:49,335 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.888e+02 5.723e+02 7.068e+02 9.572e+02 2.133e+03, threshold=1.414e+03, percent-clipped=6.0 +2023-04-01 13:27:58,068 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:04,583 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 13:28:10,762 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:40,882 INFO [train.py:903] (1/4) Epoch 8, batch 6300, loss[loss=0.234, simple_loss=0.3144, pruned_loss=0.07677, over 19582.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3193, pruned_loss=0.08994, over 3818574.29 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:28:45,535 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:16,204 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:41,755 INFO [train.py:903] (1/4) Epoch 8, batch 6350, loss[loss=0.2138, simple_loss=0.2837, pruned_loss=0.07201, over 19385.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3204, pruned_loss=0.09052, over 3820221.99 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:29:52,049 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.055e+02 7.520e+02 8.667e+02 2.456e+03, threshold=1.504e+03, percent-clipped=3.0 +2023-04-01 13:30:43,419 INFO [train.py:903] (1/4) Epoch 8, batch 6400, loss[loss=0.2797, simple_loss=0.3509, pruned_loss=0.1043, over 19320.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.319, pruned_loss=0.09013, over 3808949.84 frames. ], batch size: 66, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:45,991 INFO [train.py:903] (1/4) Epoch 8, batch 6450, loss[loss=0.2351, simple_loss=0.3136, pruned_loss=0.07833, over 19696.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3187, pruned_loss=0.08954, over 3823171.93 frames. ], batch size: 59, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:58,338 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 5.571e+02 6.761e+02 8.500e+02 1.702e+03, threshold=1.352e+03, percent-clipped=3.0 +2023-04-01 13:32:21,619 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 13:32:30,246 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 13:32:48,752 INFO [train.py:903] (1/4) Epoch 8, batch 6500, loss[loss=0.2204, simple_loss=0.2877, pruned_loss=0.07652, over 19784.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3203, pruned_loss=0.0904, over 3822392.27 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:32:54,426 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 13:33:34,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8909, 4.4099, 2.5275, 3.8089, 1.0043, 4.1569, 4.1283, 4.3306], + device='cuda:1'), covar=tensor([0.0540, 0.0955, 0.2068, 0.0675, 0.4092, 0.0752, 0.0730, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0407, 0.0345, 0.0411, 0.0303, 0.0371, 0.0334, 0.0328, 0.0362], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 13:33:50,182 INFO [train.py:903] (1/4) Epoch 8, batch 6550, loss[loss=0.2867, simple_loss=0.3505, pruned_loss=0.1115, over 18824.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.32, pruned_loss=0.09051, over 3814447.03 frames. ], batch size: 74, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:34:00,555 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+02 6.329e+02 8.151e+02 1.084e+03 2.341e+03, threshold=1.630e+03, percent-clipped=12.0 +2023-04-01 13:34:51,146 INFO [train.py:903] (1/4) Epoch 8, batch 6600, loss[loss=0.2729, simple_loss=0.3382, pruned_loss=0.1038, over 19671.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3211, pruned_loss=0.09107, over 3807119.16 frames. ], batch size: 60, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:35:53,829 INFO [train.py:903] (1/4) Epoch 8, batch 6650, loss[loss=0.2649, simple_loss=0.3381, pruned_loss=0.09585, over 19706.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3213, pruned_loss=0.0915, over 3800237.22 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:35:56,425 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3454, 3.7398, 3.8845, 3.8952, 1.5394, 3.5409, 3.2124, 3.5948], + device='cuda:1'), covar=tensor([0.1300, 0.0756, 0.0650, 0.0655, 0.4441, 0.0680, 0.0687, 0.1122], + device='cuda:1'), in_proj_covar=tensor([0.0611, 0.0532, 0.0726, 0.0605, 0.0659, 0.0470, 0.0455, 0.0663], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 13:36:04,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.746e+02 5.865e+02 7.808e+02 1.010e+03 1.907e+03, threshold=1.562e+03, percent-clipped=2.0 +2023-04-01 13:36:19,259 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3293, 3.0170, 2.3385, 2.2960, 1.8552, 2.4617, 0.7147, 2.0537], + device='cuda:1'), covar=tensor([0.0395, 0.0335, 0.0399, 0.0679, 0.0832, 0.0686, 0.0876, 0.0694], + device='cuda:1'), in_proj_covar=tensor([0.0325, 0.0321, 0.0317, 0.0332, 0.0413, 0.0338, 0.0296, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 13:36:55,567 INFO [train.py:903] (1/4) Epoch 8, batch 6700, loss[loss=0.2155, simple_loss=0.2886, pruned_loss=0.07119, over 19592.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3202, pruned_loss=0.09129, over 3805467.38 frames. ], batch size: 52, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:37:52,319 INFO [train.py:903] (1/4) Epoch 8, batch 6750, loss[loss=0.2513, simple_loss=0.333, pruned_loss=0.08473, over 19513.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3202, pruned_loss=0.09148, over 3794144.70 frames. ], batch size: 64, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:38:03,630 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.992e+02 6.380e+02 7.224e+02 9.353e+02 2.017e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 13:38:50,507 INFO [train.py:903] (1/4) Epoch 8, batch 6800, loss[loss=0.2582, simple_loss=0.328, pruned_loss=0.09417, over 17187.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3194, pruned_loss=0.09137, over 3791421.84 frames. ], batch size: 101, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:39:34,403 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 13:39:34,850 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 13:39:38,462 INFO [train.py:903] (1/4) Epoch 9, batch 0, loss[loss=0.2631, simple_loss=0.3346, pruned_loss=0.09575, over 19682.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3346, pruned_loss=0.09575, over 19682.00 frames. ], batch size: 58, lr: 9.56e-03, grad_scale: 8.0 +2023-04-01 13:39:38,462 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 13:39:49,508 INFO [train.py:937] (1/4) Epoch 9, validation: loss=0.1866, simple_loss=0.2872, pruned_loss=0.04294, over 944034.00 frames. +2023-04-01 13:39:49,509 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 13:40:03,797 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 13:40:28,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.879e+02 5.500e+02 7.208e+02 8.930e+02 1.459e+03, threshold=1.442e+03, percent-clipped=1.0 +2023-04-01 13:40:51,455 INFO [train.py:903] (1/4) Epoch 9, batch 50, loss[loss=0.2379, simple_loss=0.3037, pruned_loss=0.08599, over 19832.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3159, pruned_loss=0.08874, over 870420.51 frames. ], batch size: 52, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:41:02,951 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54682.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:41:26,449 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 13:41:53,037 INFO [train.py:903] (1/4) Epoch 9, batch 100, loss[loss=0.2322, simple_loss=0.2894, pruned_loss=0.08752, over 18655.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.314, pruned_loss=0.08766, over 1527815.80 frames. ], batch size: 41, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:42:05,359 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 13:42:31,168 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.564e+02 7.190e+02 9.001e+02 2.877e+03, threshold=1.438e+03, percent-clipped=2.0 +2023-04-01 13:42:53,294 INFO [train.py:903] (1/4) Epoch 9, batch 150, loss[loss=0.3229, simple_loss=0.3751, pruned_loss=0.1354, over 19282.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.316, pruned_loss=0.08852, over 2050723.76 frames. ], batch size: 66, lr: 9.54e-03, grad_scale: 16.0 +2023-04-01 13:43:53,897 INFO [train.py:903] (1/4) Epoch 9, batch 200, loss[loss=0.2931, simple_loss=0.3573, pruned_loss=0.1144, over 19593.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3169, pruned_loss=0.08874, over 2450984.75 frames. ], batch size: 61, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:43:56,330 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 13:44:03,609 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8205, 1.7684, 1.6872, 2.0185, 1.9734, 1.6943, 1.7406, 1.8687], + device='cuda:1'), covar=tensor([0.0711, 0.1135, 0.0955, 0.0593, 0.0772, 0.0409, 0.0812, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0355, 0.0290, 0.0239, 0.0297, 0.0244, 0.0269, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 13:44:36,452 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.883e+02 7.738e+02 9.204e+02 1.688e+03, threshold=1.548e+03, percent-clipped=2.0 +2023-04-01 13:44:47,200 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9010, 1.6711, 1.8374, 1.7003, 4.3558, 1.0474, 2.4778, 4.5587], + device='cuda:1'), covar=tensor([0.0313, 0.2440, 0.2430, 0.1795, 0.0624, 0.2513, 0.1237, 0.0240], + device='cuda:1'), in_proj_covar=tensor([0.0332, 0.0330, 0.0339, 0.0309, 0.0339, 0.0323, 0.0313, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 13:44:57,168 INFO [train.py:903] (1/4) Epoch 9, batch 250, loss[loss=0.2622, simple_loss=0.3356, pruned_loss=0.09437, over 19659.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.318, pruned_loss=0.08911, over 2761494.45 frames. ], batch size: 55, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:45:06,558 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:45:57,849 INFO [train.py:903] (1/4) Epoch 9, batch 300, loss[loss=0.2394, simple_loss=0.3207, pruned_loss=0.079, over 19693.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3185, pruned_loss=0.08997, over 2999747.61 frames. ], batch size: 59, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:46:39,713 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 5.660e+02 7.032e+02 9.457e+02 2.087e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-04-01 13:47:01,338 INFO [train.py:903] (1/4) Epoch 9, batch 350, loss[loss=0.2227, simple_loss=0.2923, pruned_loss=0.07652, over 19774.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3183, pruned_loss=0.08989, over 3180076.98 frames. ], batch size: 47, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:47:07,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:48:03,063 INFO [train.py:903] (1/4) Epoch 9, batch 400, loss[loss=0.2169, simple_loss=0.3026, pruned_loss=0.06566, over 19661.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3189, pruned_loss=0.09031, over 3311386.04 frames. ], batch size: 58, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:48:06,560 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55026.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:48:38,431 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-01 13:48:44,523 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.195e+02 5.719e+02 7.898e+02 1.018e+03 2.327e+03, threshold=1.580e+03, percent-clipped=4.0 +2023-04-01 13:49:04,314 INFO [train.py:903] (1/4) Epoch 9, batch 450, loss[loss=0.261, simple_loss=0.3379, pruned_loss=0.09204, over 19616.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3187, pruned_loss=0.09005, over 3433701.51 frames. ], batch size: 57, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:49:42,318 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 13:49:43,519 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 13:50:07,765 INFO [train.py:903] (1/4) Epoch 9, batch 500, loss[loss=0.3412, simple_loss=0.3818, pruned_loss=0.1503, over 13660.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3179, pruned_loss=0.08961, over 3526656.37 frames. ], batch size: 136, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:50:30,920 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55141.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:50:47,944 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 6.183e+02 7.261e+02 8.870e+02 1.589e+03, threshold=1.452e+03, percent-clipped=1.0 +2023-04-01 13:51:01,602 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:51:10,561 INFO [train.py:903] (1/4) Epoch 9, batch 550, loss[loss=0.2193, simple_loss=0.2929, pruned_loss=0.07284, over 19627.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3192, pruned_loss=0.09059, over 3591449.68 frames. ], batch size: 50, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:14,553 INFO [train.py:903] (1/4) Epoch 9, batch 600, loss[loss=0.2758, simple_loss=0.3325, pruned_loss=0.1095, over 19468.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3193, pruned_loss=0.09026, over 3656351.99 frames. ], batch size: 49, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:15,818 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:21,746 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:46,694 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2817, 2.9294, 2.0967, 2.7000, 0.7712, 2.8444, 2.7634, 2.8557], + device='cuda:1'), covar=tensor([0.1114, 0.1462, 0.2190, 0.0996, 0.4059, 0.1122, 0.1049, 0.1319], + device='cuda:1'), in_proj_covar=tensor([0.0408, 0.0347, 0.0415, 0.0306, 0.0373, 0.0342, 0.0331, 0.0366], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 13:52:55,191 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.838e+02 6.783e+02 8.586e+02 3.812e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 13:52:58,747 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 13:53:16,399 INFO [train.py:903] (1/4) Epoch 9, batch 650, loss[loss=0.2491, simple_loss=0.3243, pruned_loss=0.08698, over 19662.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3195, pruned_loss=0.09029, over 3686702.67 frames. ], batch size: 60, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:19,295 INFO [train.py:903] (1/4) Epoch 9, batch 700, loss[loss=0.2573, simple_loss=0.3182, pruned_loss=0.09823, over 19860.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3196, pruned_loss=0.0903, over 3722887.95 frames. ], batch size: 52, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:41,865 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55339.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:55:02,469 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.553e+02 6.808e+02 9.255e+02 2.546e+03, threshold=1.362e+03, percent-clipped=4.0 +2023-04-01 13:55:22,970 INFO [train.py:903] (1/4) Epoch 9, batch 750, loss[loss=0.2703, simple_loss=0.3407, pruned_loss=0.1, over 19580.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3194, pruned_loss=0.08985, over 3747936.43 frames. ], batch size: 61, lr: 9.49e-03, grad_scale: 4.0 +2023-04-01 13:55:53,593 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55397.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:56:08,129 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0634, 5.4054, 2.8572, 4.7746, 1.3237, 5.2744, 5.2957, 5.5067], + device='cuda:1'), covar=tensor([0.0413, 0.1049, 0.1872, 0.0623, 0.3831, 0.0559, 0.0584, 0.0888], + device='cuda:1'), in_proj_covar=tensor([0.0407, 0.0347, 0.0414, 0.0306, 0.0372, 0.0339, 0.0331, 0.0366], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 13:56:25,354 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 13:56:25,919 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55422.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:56:26,679 INFO [train.py:903] (1/4) Epoch 9, batch 800, loss[loss=0.2491, simple_loss=0.3314, pruned_loss=0.08343, over 19102.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3199, pruned_loss=0.08987, over 3765683.07 frames. ], batch size: 69, lr: 9.49e-03, grad_scale: 8.0 +2023-04-01 13:56:41,967 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:57:07,881 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.631e+02 7.179e+02 9.586e+02 1.610e+03, threshold=1.436e+03, percent-clipped=4.0 +2023-04-01 13:57:28,767 INFO [train.py:903] (1/4) Epoch 9, batch 850, loss[loss=0.301, simple_loss=0.3525, pruned_loss=0.1248, over 13447.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3187, pruned_loss=0.08931, over 3776847.63 frames. ], batch size: 136, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:57:46,893 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 13:58:09,665 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 13:58:14,385 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:58:22,720 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 13:58:29,590 INFO [train.py:903] (1/4) Epoch 9, batch 900, loss[loss=0.2535, simple_loss=0.3146, pruned_loss=0.09619, over 19763.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3194, pruned_loss=0.08982, over 3791600.08 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:12,469 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.962e+02 5.974e+02 7.143e+02 8.825e+02 2.413e+03, threshold=1.429e+03, percent-clipped=6.0 +2023-04-01 13:59:32,016 INFO [train.py:903] (1/4) Epoch 9, batch 950, loss[loss=0.2171, simple_loss=0.2915, pruned_loss=0.07137, over 19762.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3183, pruned_loss=0.089, over 3810935.42 frames. ], batch size: 47, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:32,175 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:59:37,628 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 13:59:49,679 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2019, 1.0109, 1.4649, 1.2370, 2.4586, 3.4958, 3.2664, 3.7992], + device='cuda:1'), covar=tensor([0.1840, 0.4719, 0.4104, 0.2312, 0.0659, 0.0254, 0.0314, 0.0204], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0286, 0.0317, 0.0250, 0.0209, 0.0142, 0.0206, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:00:01,045 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:32,397 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:35,414 INFO [train.py:903] (1/4) Epoch 9, batch 1000, loss[loss=0.2406, simple_loss=0.309, pruned_loss=0.0861, over 19683.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3183, pruned_loss=0.08917, over 3808744.53 frames. ], batch size: 58, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:00:37,993 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:44,435 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 14:01:18,077 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.465e+02 5.596e+02 6.834e+02 8.838e+02 1.578e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 14:01:29,787 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 14:01:39,287 INFO [train.py:903] (1/4) Epoch 9, batch 1050, loss[loss=0.2876, simple_loss=0.3545, pruned_loss=0.1104, over 19687.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.318, pruned_loss=0.0891, over 3802456.40 frames. ], batch size: 58, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:01:57,382 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:02:02,634 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 14:02:10,954 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 14:02:35,955 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2378, 3.7473, 3.8765, 3.8376, 1.4255, 3.5847, 3.1358, 3.5553], + device='cuda:1'), covar=tensor([0.1270, 0.0725, 0.0540, 0.0581, 0.4768, 0.0628, 0.0685, 0.1028], + device='cuda:1'), in_proj_covar=tensor([0.0617, 0.0544, 0.0733, 0.0616, 0.0674, 0.0481, 0.0465, 0.0674], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 14:02:42,619 INFO [train.py:903] (1/4) Epoch 9, batch 1100, loss[loss=0.2652, simple_loss=0.3322, pruned_loss=0.09911, over 19378.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3182, pruned_loss=0.08888, over 3809247.32 frames. ], batch size: 70, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:02:48,791 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8876, 1.9979, 2.0776, 2.8193, 1.8288, 2.5029, 2.4530, 2.0216], + device='cuda:1'), covar=tensor([0.3363, 0.2704, 0.1286, 0.1591, 0.3204, 0.1288, 0.2877, 0.2254], + device='cuda:1'), in_proj_covar=tensor([0.0747, 0.0755, 0.0626, 0.0875, 0.0750, 0.0658, 0.0772, 0.0677], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:03:25,907 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 6.007e+02 7.412e+02 9.315e+02 2.515e+03, threshold=1.482e+03, percent-clipped=6.0 +2023-04-01 14:03:36,850 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55766.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:03:45,595 INFO [train.py:903] (1/4) Epoch 9, batch 1150, loss[loss=0.2165, simple_loss=0.2973, pruned_loss=0.06783, over 19678.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3179, pruned_loss=0.08907, over 3804788.08 frames. ], batch size: 53, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:04:50,175 INFO [train.py:903] (1/4) Epoch 9, batch 1200, loss[loss=0.2044, simple_loss=0.2779, pruned_loss=0.0654, over 19720.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3175, pruned_loss=0.08926, over 3815981.30 frames. ], batch size: 46, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:05:18,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 14:05:31,642 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.284e+02 5.926e+02 7.645e+02 1.010e+03 3.329e+03, threshold=1.529e+03, percent-clipped=6.0 +2023-04-01 14:05:44,738 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:05:44,893 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1598, 2.0614, 1.7382, 1.5942, 1.5347, 1.7196, 0.2704, 0.8879], + device='cuda:1'), covar=tensor([0.0327, 0.0359, 0.0255, 0.0433, 0.0745, 0.0423, 0.0746, 0.0662], + device='cuda:1'), in_proj_covar=tensor([0.0314, 0.0315, 0.0311, 0.0327, 0.0403, 0.0330, 0.0290, 0.0311], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:05:53,923 INFO [train.py:903] (1/4) Epoch 9, batch 1250, loss[loss=0.2451, simple_loss=0.3204, pruned_loss=0.08489, over 19300.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.318, pruned_loss=0.08949, over 3808428.83 frames. ], batch size: 66, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:06:03,173 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:09,655 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-01 14:06:35,374 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:36,769 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.23 vs. limit=5.0 +2023-04-01 14:06:55,976 INFO [train.py:903] (1/4) Epoch 9, batch 1300, loss[loss=0.2316, simple_loss=0.2974, pruned_loss=0.08288, over 19579.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.317, pruned_loss=0.08875, over 3819500.36 frames. ], batch size: 52, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:07:23,894 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:39,624 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.793e+02 6.909e+02 8.321e+02 2.022e+03, threshold=1.382e+03, percent-clipped=2.0 +2023-04-01 14:07:54,328 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:58,594 INFO [train.py:903] (1/4) Epoch 9, batch 1350, loss[loss=0.2224, simple_loss=0.3015, pruned_loss=0.07169, over 19587.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3174, pruned_loss=0.08899, over 3814743.53 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:08:43,438 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 14:09:02,608 INFO [train.py:903] (1/4) Epoch 9, batch 1400, loss[loss=0.234, simple_loss=0.3146, pruned_loss=0.07666, over 19662.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3179, pruned_loss=0.08926, over 3821924.76 frames. ], batch size: 55, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:09:19,733 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6415, 1.6897, 1.4195, 1.2710, 1.0781, 1.2741, 0.2705, 0.5319], + device='cuda:1'), covar=tensor([0.0543, 0.0526, 0.0335, 0.0474, 0.1118, 0.0554, 0.0774, 0.0906], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0316, 0.0314, 0.0329, 0.0405, 0.0333, 0.0291, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:09:37,353 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0671, 1.2283, 1.3260, 1.3677, 2.6404, 0.9404, 1.7817, 2.7826], + device='cuda:1'), covar=tensor([0.0449, 0.2458, 0.2629, 0.1574, 0.0731, 0.2391, 0.1274, 0.0417], + device='cuda:1'), in_proj_covar=tensor([0.0327, 0.0326, 0.0337, 0.0307, 0.0333, 0.0325, 0.0314, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:09:47,082 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.536e+02 7.314e+02 8.995e+02 2.483e+03, threshold=1.463e+03, percent-clipped=9.0 +2023-04-01 14:10:07,183 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 14:10:08,163 INFO [train.py:903] (1/4) Epoch 9, batch 1450, loss[loss=0.3054, simple_loss=0.3682, pruned_loss=0.1213, over 19412.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.319, pruned_loss=0.09, over 3811682.56 frames. ], batch size: 70, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:10:27,134 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8555, 4.2721, 4.4980, 4.4807, 1.4667, 4.0773, 3.6938, 4.1469], + device='cuda:1'), covar=tensor([0.1211, 0.0573, 0.0497, 0.0518, 0.5133, 0.0470, 0.0613, 0.0989], + device='cuda:1'), in_proj_covar=tensor([0.0616, 0.0543, 0.0740, 0.0621, 0.0682, 0.0486, 0.0467, 0.0677], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 14:10:55,232 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:11:11,145 INFO [train.py:903] (1/4) Epoch 9, batch 1500, loss[loss=0.2491, simple_loss=0.3192, pruned_loss=0.08948, over 19742.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3198, pruned_loss=0.0905, over 3824139.52 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:11:12,165 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 14:11:39,374 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3880, 1.4371, 2.2816, 1.6510, 2.8049, 2.3096, 2.9444, 1.5315], + device='cuda:1'), covar=tensor([0.2233, 0.3693, 0.1965, 0.1672, 0.1531, 0.1870, 0.1827, 0.3273], + device='cuda:1'), in_proj_covar=tensor([0.0474, 0.0545, 0.0555, 0.0422, 0.0573, 0.0473, 0.0637, 0.0472], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:11:40,562 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0130, 0.9374, 1.0543, 0.9878, 1.6858, 0.7507, 1.4974, 1.7957], + device='cuda:1'), covar=tensor([0.0525, 0.2063, 0.2055, 0.1260, 0.0790, 0.1671, 0.0810, 0.0436], + device='cuda:1'), in_proj_covar=tensor([0.0329, 0.0328, 0.0338, 0.0308, 0.0333, 0.0326, 0.0316, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:11:52,401 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.776e+02 6.008e+02 7.164e+02 9.066e+02 2.093e+03, threshold=1.433e+03, percent-clipped=3.0 +2023-04-01 14:11:54,314 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 14:12:01,103 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:12:12,300 INFO [train.py:903] (1/4) Epoch 9, batch 1550, loss[loss=0.2132, simple_loss=0.2905, pruned_loss=0.068, over 19565.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3207, pruned_loss=0.0912, over 3821741.02 frames. ], batch size: 61, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:12:19,309 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0520, 5.4177, 2.7174, 4.7060, 1.1010, 5.2737, 5.3064, 5.5164], + device='cuda:1'), covar=tensor([0.0432, 0.0952, 0.2081, 0.0690, 0.4195, 0.0598, 0.0657, 0.0841], + device='cuda:1'), in_proj_covar=tensor([0.0408, 0.0344, 0.0413, 0.0306, 0.0374, 0.0341, 0.0334, 0.0367], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 14:12:59,841 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:15,315 INFO [train.py:903] (1/4) Epoch 9, batch 1600, loss[loss=0.2257, simple_loss=0.2968, pruned_loss=0.07734, over 19858.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3194, pruned_loss=0.09001, over 3831524.92 frames. ], batch size: 52, lr: 9.42e-03, grad_scale: 8.0 +2023-04-01 14:13:18,970 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:41,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 14:13:59,883 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.781e+02 6.769e+02 8.617e+02 2.222e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-01 14:14:16,067 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2273, 1.2898, 1.1655, 1.0111, 1.0754, 1.1095, 0.1042, 0.4201], + device='cuda:1'), covar=tensor([0.0413, 0.0424, 0.0242, 0.0322, 0.0812, 0.0350, 0.0678, 0.0647], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0315, 0.0315, 0.0331, 0.0406, 0.0334, 0.0293, 0.0316], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:14:19,018 INFO [train.py:903] (1/4) Epoch 9, batch 1650, loss[loss=0.2082, simple_loss=0.2681, pruned_loss=0.07411, over 19777.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3198, pruned_loss=0.09003, over 3834392.93 frames. ], batch size: 46, lr: 9.42e-03, grad_scale: 4.0 +2023-04-01 14:14:21,984 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9041, 2.0335, 2.0732, 2.8437, 1.9456, 2.5659, 2.4783, 1.9021], + device='cuda:1'), covar=tensor([0.3113, 0.2505, 0.1280, 0.1346, 0.2877, 0.1213, 0.2781, 0.2315], + device='cuda:1'), in_proj_covar=tensor([0.0743, 0.0751, 0.0624, 0.0864, 0.0747, 0.0661, 0.0764, 0.0673], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:15:22,544 INFO [train.py:903] (1/4) Epoch 9, batch 1700, loss[loss=0.2272, simple_loss=0.31, pruned_loss=0.0722, over 19400.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3207, pruned_loss=0.09075, over 3825558.25 frames. ], batch size: 70, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:15:25,162 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:16:02,358 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 14:16:05,919 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 5.619e+02 6.768e+02 8.904e+02 2.101e+03, threshold=1.354e+03, percent-clipped=6.0 +2023-04-01 14:16:24,636 INFO [train.py:903] (1/4) Epoch 9, batch 1750, loss[loss=0.2499, simple_loss=0.3232, pruned_loss=0.08826, over 19635.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3202, pruned_loss=0.09076, over 3808759.96 frames. ], batch size: 57, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:17:26,692 INFO [train.py:903] (1/4) Epoch 9, batch 1800, loss[loss=0.2219, simple_loss=0.3007, pruned_loss=0.0715, over 19832.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3204, pruned_loss=0.09119, over 3793962.79 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:09,953 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.839e+02 7.002e+02 8.564e+02 1.629e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 14:18:25,587 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 14:18:29,983 INFO [train.py:903] (1/4) Epoch 9, batch 1850, loss[loss=0.3003, simple_loss=0.3556, pruned_loss=0.1225, over 19581.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3206, pruned_loss=0.09108, over 3799672.76 frames. ], batch size: 61, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:36,522 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 14:18:40,562 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:02,323 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 14:19:06,824 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9197, 4.3248, 4.6113, 4.6313, 1.7889, 4.3422, 3.8436, 4.2495], + device='cuda:1'), covar=tensor([0.1090, 0.0652, 0.0515, 0.0446, 0.4182, 0.0477, 0.0528, 0.0972], + device='cuda:1'), in_proj_covar=tensor([0.0620, 0.0543, 0.0742, 0.0619, 0.0686, 0.0485, 0.0463, 0.0684], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 14:19:11,098 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:12,018 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:33,000 INFO [train.py:903] (1/4) Epoch 9, batch 1900, loss[loss=0.2395, simple_loss=0.2954, pruned_loss=0.09185, over 19192.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3192, pruned_loss=0.09002, over 3807966.36 frames. ], batch size: 42, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:19:48,395 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 14:19:54,892 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 14:19:55,176 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56541.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:20:16,637 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.665e+02 6.760e+02 8.403e+02 1.758e+03, threshold=1.352e+03, percent-clipped=2.0 +2023-04-01 14:20:18,933 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 14:20:35,969 INFO [train.py:903] (1/4) Epoch 9, batch 1950, loss[loss=0.2453, simple_loss=0.3159, pruned_loss=0.08742, over 19611.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3169, pruned_loss=0.08889, over 3811043.01 frames. ], batch size: 50, lr: 9.39e-03, grad_scale: 4.0 +2023-04-01 14:20:46,780 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:19,019 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:38,528 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:39,356 INFO [train.py:903] (1/4) Epoch 9, batch 2000, loss[loss=0.2778, simple_loss=0.3444, pruned_loss=0.1055, over 19680.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.317, pruned_loss=0.08868, over 3808680.36 frames. ], batch size: 58, lr: 9.39e-03, grad_scale: 8.0 +2023-04-01 14:22:22,876 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.506e+02 7.081e+02 9.090e+02 3.144e+03, threshold=1.416e+03, percent-clipped=7.0 +2023-04-01 14:22:33,281 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9985, 2.0167, 2.1233, 2.8690, 2.0540, 2.7182, 2.5203, 2.0253], + device='cuda:1'), covar=tensor([0.3029, 0.2559, 0.1237, 0.1494, 0.2844, 0.1173, 0.2533, 0.2177], + device='cuda:1'), in_proj_covar=tensor([0.0745, 0.0749, 0.0620, 0.0859, 0.0742, 0.0659, 0.0763, 0.0668], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:22:36,101 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 14:22:42,513 INFO [train.py:903] (1/4) Epoch 9, batch 2050, loss[loss=0.2715, simple_loss=0.3409, pruned_loss=0.1011, over 19533.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3181, pruned_loss=0.08978, over 3805636.01 frames. ], batch size: 56, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:22:56,328 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 14:22:57,511 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 14:23:17,614 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-01 14:23:19,221 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 14:23:44,269 INFO [train.py:903] (1/4) Epoch 9, batch 2100, loss[loss=0.2504, simple_loss=0.3271, pruned_loss=0.08689, over 18128.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3181, pruned_loss=0.08944, over 3816872.74 frames. ], batch size: 83, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:23:49,639 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 14:23:56,216 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4440, 2.5016, 2.5999, 3.5134, 2.4434, 3.5415, 3.1731, 2.5345], + device='cuda:1'), covar=tensor([0.3245, 0.2708, 0.1169, 0.1567, 0.3185, 0.1150, 0.2561, 0.2060], + device='cuda:1'), in_proj_covar=tensor([0.0754, 0.0758, 0.0628, 0.0871, 0.0749, 0.0668, 0.0775, 0.0678], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:24:12,117 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 14:24:29,329 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.187e+02 5.573e+02 6.906e+02 8.990e+02 1.566e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-01 14:24:35,327 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 14:24:48,312 INFO [train.py:903] (1/4) Epoch 9, batch 2150, loss[loss=0.2512, simple_loss=0.329, pruned_loss=0.0867, over 19783.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3179, pruned_loss=0.08875, over 3816791.22 frames. ], batch size: 63, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:25:11,566 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1323, 1.8276, 1.6985, 2.1516, 1.9280, 1.8263, 1.6737, 2.0697], + device='cuda:1'), covar=tensor([0.0840, 0.1448, 0.1302, 0.0864, 0.1190, 0.0499, 0.1142, 0.0592], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0348, 0.0285, 0.0236, 0.0296, 0.0241, 0.0269, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:25:46,482 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6220, 1.8729, 2.2309, 2.0519, 3.1413, 3.6696, 3.6185, 3.8666], + device='cuda:1'), covar=tensor([0.1371, 0.2640, 0.2419, 0.1620, 0.0708, 0.0144, 0.0207, 0.0200], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0289, 0.0319, 0.0248, 0.0208, 0.0142, 0.0204, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:25:48,792 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56821.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:25:50,901 INFO [train.py:903] (1/4) Epoch 9, batch 2200, loss[loss=0.2073, simple_loss=0.2724, pruned_loss=0.07115, over 19717.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3166, pruned_loss=0.08844, over 3818912.42 frames. ], batch size: 46, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:26:36,146 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 5.854e+02 7.300e+02 9.690e+02 2.298e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 14:26:57,251 INFO [train.py:903] (1/4) Epoch 9, batch 2250, loss[loss=0.2376, simple_loss=0.3127, pruned_loss=0.08128, over 19609.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3163, pruned_loss=0.08767, over 3823365.63 frames. ], batch size: 57, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:27:03,680 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:27:08,852 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-01 14:27:11,763 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56885.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:27:34,021 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:00,647 INFO [train.py:903] (1/4) Epoch 9, batch 2300, loss[loss=0.2329, simple_loss=0.2992, pruned_loss=0.0833, over 19728.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3148, pruned_loss=0.08685, over 3824396.43 frames. ], batch size: 51, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:28:05,834 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:13,792 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 14:28:24,092 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2561, 1.2967, 1.6295, 1.4237, 2.6393, 2.2319, 2.9151, 1.1670], + device='cuda:1'), covar=tensor([0.2149, 0.3567, 0.2266, 0.1742, 0.1345, 0.1746, 0.1443, 0.3315], + device='cuda:1'), in_proj_covar=tensor([0.0466, 0.0537, 0.0553, 0.0417, 0.0572, 0.0470, 0.0633, 0.0469], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:28:46,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.809e+02 7.207e+02 9.233e+02 1.673e+03, threshold=1.441e+03, percent-clipped=4.0 +2023-04-01 14:29:05,091 INFO [train.py:903] (1/4) Epoch 9, batch 2350, loss[loss=0.253, simple_loss=0.3127, pruned_loss=0.09666, over 19472.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.315, pruned_loss=0.08695, over 3815507.86 frames. ], batch size: 49, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:29:40,044 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57000.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:29:46,431 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 14:29:53,646 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:29:55,788 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:30:01,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 14:30:07,031 INFO [train.py:903] (1/4) Epoch 9, batch 2400, loss[loss=0.2122, simple_loss=0.2912, pruned_loss=0.06659, over 19677.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3152, pruned_loss=0.08731, over 3815510.99 frames. ], batch size: 53, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:30:31,225 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6737, 1.8265, 2.2419, 2.7555, 2.4100, 2.0825, 2.2021, 2.8567], + device='cuda:1'), covar=tensor([0.0718, 0.1628, 0.1109, 0.0853, 0.1075, 0.0482, 0.0951, 0.0449], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0349, 0.0290, 0.0239, 0.0297, 0.0243, 0.0270, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:30:51,626 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.434e+02 5.803e+02 7.402e+02 8.988e+02 1.700e+03, threshold=1.480e+03, percent-clipped=2.0 +2023-04-01 14:31:11,535 INFO [train.py:903] (1/4) Epoch 9, batch 2450, loss[loss=0.219, simple_loss=0.2807, pruned_loss=0.07868, over 19759.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3157, pruned_loss=0.08694, over 3824335.68 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:31:34,284 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2763, 2.9659, 2.2353, 2.7541, 0.7853, 2.7918, 2.7505, 2.8453], + device='cuda:1'), covar=tensor([0.1180, 0.1534, 0.2010, 0.0959, 0.3932, 0.1135, 0.1009, 0.1252], + device='cuda:1'), in_proj_covar=tensor([0.0407, 0.0348, 0.0413, 0.0304, 0.0371, 0.0339, 0.0332, 0.0366], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 14:32:15,742 INFO [train.py:903] (1/4) Epoch 9, batch 2500, loss[loss=0.2511, simple_loss=0.3277, pruned_loss=0.08729, over 19618.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3152, pruned_loss=0.0869, over 3833661.93 frames. ], batch size: 57, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:33:00,911 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.325e+02 6.954e+02 9.918e+02 1.981e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 14:33:09,586 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57165.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:33:19,772 INFO [train.py:903] (1/4) Epoch 9, batch 2550, loss[loss=0.2116, simple_loss=0.2916, pruned_loss=0.06582, over 18257.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3141, pruned_loss=0.08629, over 3836634.93 frames. ], batch size: 83, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:33:59,381 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57204.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:13,726 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:15,575 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 14:34:22,473 INFO [train.py:903] (1/4) Epoch 9, batch 2600, loss[loss=0.2708, simple_loss=0.3437, pruned_loss=0.09895, over 19508.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3147, pruned_loss=0.0864, over 3825350.67 frames. ], batch size: 56, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:05,270 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57256.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:35:07,158 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.816e+02 6.932e+02 7.774e+02 1.592e+03, threshold=1.386e+03, percent-clipped=3.0 +2023-04-01 14:35:12,160 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:22,411 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57270.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:25,689 INFO [train.py:903] (1/4) Epoch 9, batch 2650, loss[loss=0.3496, simple_loss=0.3941, pruned_loss=0.1526, over 13157.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3134, pruned_loss=0.08568, over 3827486.85 frames. ], batch size: 136, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:36,477 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:37,721 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57281.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:35:45,907 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 14:36:31,183 INFO [train.py:903] (1/4) Epoch 9, batch 2700, loss[loss=0.2766, simple_loss=0.3482, pruned_loss=0.1025, over 19614.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.315, pruned_loss=0.08671, over 3825028.68 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:12,076 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:13,795 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-01 14:37:14,482 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:15,519 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.587e+02 5.592e+02 7.209e+02 8.892e+02 3.755e+03, threshold=1.442e+03, percent-clipped=7.0 +2023-04-01 14:37:28,473 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9685, 1.8058, 2.0606, 1.9772, 4.5222, 1.1484, 2.5546, 4.7731], + device='cuda:1'), covar=tensor([0.0265, 0.2160, 0.2189, 0.1457, 0.0535, 0.2332, 0.1136, 0.0176], + device='cuda:1'), in_proj_covar=tensor([0.0333, 0.0324, 0.0337, 0.0302, 0.0333, 0.0321, 0.0308, 0.0330], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:37:30,697 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3137, 2.2470, 1.8934, 1.7262, 1.6567, 1.8067, 0.4209, 1.0694], + device='cuda:1'), covar=tensor([0.0344, 0.0314, 0.0268, 0.0407, 0.0667, 0.0442, 0.0693, 0.0618], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0316, 0.0314, 0.0328, 0.0412, 0.0332, 0.0294, 0.0314], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:37:33,719 INFO [train.py:903] (1/4) Epoch 9, batch 2750, loss[loss=0.2223, simple_loss=0.3054, pruned_loss=0.06956, over 19793.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3165, pruned_loss=0.0879, over 3804658.59 frames. ], batch size: 56, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:49,387 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:30,590 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:38,517 INFO [train.py:903] (1/4) Epoch 9, batch 2800, loss[loss=0.2407, simple_loss=0.315, pruned_loss=0.08323, over 19311.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3173, pruned_loss=0.08783, over 3800043.89 frames. ], batch size: 66, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:23,168 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.525e+02 6.599e+02 8.446e+02 1.316e+03, threshold=1.320e+03, percent-clipped=0.0 +2023-04-01 14:39:38,423 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:41,722 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:42,554 INFO [train.py:903] (1/4) Epoch 9, batch 2850, loss[loss=0.2208, simple_loss=0.3034, pruned_loss=0.06911, over 17138.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3176, pruned_loss=0.0884, over 3792935.85 frames. ], batch size: 101, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:51,241 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:00,530 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8632, 1.3261, 0.9551, 0.9916, 1.1556, 0.9538, 0.9054, 1.2231], + device='cuda:1'), covar=tensor([0.0498, 0.0673, 0.0985, 0.0553, 0.0472, 0.1105, 0.0519, 0.0407], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0289, 0.0318, 0.0239, 0.0231, 0.0315, 0.0287, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:40:33,729 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:41,907 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 14:40:46,553 INFO [train.py:903] (1/4) Epoch 9, batch 2900, loss[loss=0.2387, simple_loss=0.3215, pruned_loss=0.0779, over 19622.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3174, pruned_loss=0.08866, over 3783727.40 frames. ], batch size: 57, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:41:03,523 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:16,949 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:30,793 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.787e+02 5.829e+02 7.475e+02 8.920e+02 2.516e+03, threshold=1.495e+03, percent-clipped=6.0 +2023-04-01 14:41:32,226 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57559.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:34,839 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57561.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:49,398 INFO [train.py:903] (1/4) Epoch 9, batch 2950, loss[loss=0.2205, simple_loss=0.3098, pruned_loss=0.06564, over 19587.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3164, pruned_loss=0.0875, over 3788021.44 frames. ], batch size: 57, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:41:53,377 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 14:42:32,119 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:42:53,540 INFO [train.py:903] (1/4) Epoch 9, batch 3000, loss[loss=0.2548, simple_loss=0.3238, pruned_loss=0.09292, over 19791.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3169, pruned_loss=0.08825, over 3812962.18 frames. ], batch size: 56, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:53,541 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 14:43:06,203 INFO [train.py:937] (1/4) Epoch 9, validation: loss=0.1831, simple_loss=0.2838, pruned_loss=0.04122, over 944034.00 frames. +2023-04-01 14:43:06,204 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 14:43:08,508 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 14:43:28,402 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:43:50,442 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.061e+02 7.914e+02 9.800e+02 2.087e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-04-01 14:43:56,568 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:43:57,062 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.99 vs. limit=5.0 +2023-04-01 14:44:00,186 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:00,214 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7578, 1.7800, 1.5445, 1.4192, 1.3561, 1.3941, 0.2454, 0.5582], + device='cuda:1'), covar=tensor([0.0339, 0.0342, 0.0228, 0.0338, 0.0744, 0.0387, 0.0662, 0.0665], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0322, 0.0318, 0.0335, 0.0412, 0.0338, 0.0296, 0.0319], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:44:08,768 INFO [train.py:903] (1/4) Epoch 9, batch 3050, loss[loss=0.2904, simple_loss=0.3515, pruned_loss=0.1147, over 19327.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3176, pruned_loss=0.08879, over 3810884.29 frames. ], batch size: 66, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:44:10,220 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:33,016 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4677, 3.1106, 2.1669, 2.2941, 2.3191, 2.5061, 0.9608, 2.1461], + device='cuda:1'), covar=tensor([0.0449, 0.0397, 0.0495, 0.0655, 0.0708, 0.0778, 0.0852, 0.0781], + device='cuda:1'), in_proj_covar=tensor([0.0317, 0.0322, 0.0318, 0.0335, 0.0411, 0.0338, 0.0296, 0.0320], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:45:09,137 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:11,163 INFO [train.py:903] (1/4) Epoch 9, batch 3100, loss[loss=0.267, simple_loss=0.3307, pruned_loss=0.1017, over 18736.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3168, pruned_loss=0.08808, over 3826283.87 frames. ], batch size: 74, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:45:16,014 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:19,259 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:47,435 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:47,537 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:49,820 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:54,416 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2872, 2.3738, 2.4758, 3.1676, 2.1872, 3.1394, 2.9522, 2.3546], + device='cuda:1'), covar=tensor([0.3078, 0.2681, 0.1146, 0.1803, 0.3221, 0.1225, 0.2604, 0.2121], + device='cuda:1'), in_proj_covar=tensor([0.0749, 0.0759, 0.0625, 0.0874, 0.0747, 0.0659, 0.0772, 0.0677], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:45:54,923 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.860e+02 5.906e+02 7.408e+02 1.029e+03 2.368e+03, threshold=1.482e+03, percent-clipped=3.0 +2023-04-01 14:45:57,422 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:46:06,785 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2376, 2.8849, 2.2934, 2.1068, 1.9030, 2.3010, 0.7609, 2.0038], + device='cuda:1'), covar=tensor([0.0387, 0.0378, 0.0399, 0.0666, 0.0812, 0.0657, 0.0889, 0.0700], + device='cuda:1'), in_proj_covar=tensor([0.0315, 0.0320, 0.0316, 0.0334, 0.0409, 0.0336, 0.0294, 0.0316], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:46:14,163 INFO [train.py:903] (1/4) Epoch 9, batch 3150, loss[loss=0.2029, simple_loss=0.2784, pruned_loss=0.06363, over 19312.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3171, pruned_loss=0.08782, over 3829012.93 frames. ], batch size: 44, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:46:42,107 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 14:47:14,703 INFO [train.py:903] (1/4) Epoch 9, batch 3200, loss[loss=0.2356, simple_loss=0.3122, pruned_loss=0.07946, over 19752.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3189, pruned_loss=0.08907, over 3819173.30 frames. ], batch size: 54, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:47:16,009 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57824.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:55,926 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:56,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.791e+02 7.100e+02 9.261e+02 4.038e+03, threshold=1.420e+03, percent-clipped=7.0 +2023-04-01 14:48:14,924 INFO [train.py:903] (1/4) Epoch 9, batch 3250, loss[loss=0.2193, simple_loss=0.2933, pruned_loss=0.07265, over 19707.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3185, pruned_loss=0.08904, over 3810477.15 frames. ], batch size: 51, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:48:18,527 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:12,236 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57919.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:16,424 INFO [train.py:903] (1/4) Epoch 9, batch 3300, loss[loss=0.2361, simple_loss=0.3105, pruned_loss=0.08087, over 18006.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3188, pruned_loss=0.08906, over 3816177.45 frames. ], batch size: 83, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:49:23,985 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 14:49:26,386 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57930.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:36,426 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:43,145 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:56,565 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:59,469 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.470e+02 6.783e+02 8.234e+02 1.579e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 14:50:17,158 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:17,941 INFO [train.py:903] (1/4) Epoch 9, batch 3350, loss[loss=0.2471, simple_loss=0.3159, pruned_loss=0.08918, over 19788.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3182, pruned_loss=0.08871, over 3820904.22 frames. ], batch size: 56, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:50:22,883 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:33,023 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1746, 2.1608, 2.2071, 3.1142, 2.1944, 3.1104, 2.6173, 2.0845], + device='cuda:1'), covar=tensor([0.3307, 0.2952, 0.1343, 0.1825, 0.3364, 0.1226, 0.2970, 0.2300], + device='cuda:1'), in_proj_covar=tensor([0.0748, 0.0762, 0.0628, 0.0871, 0.0749, 0.0660, 0.0771, 0.0679], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:50:45,335 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6757, 4.8029, 5.4701, 5.4156, 2.0598, 5.0541, 4.3520, 5.0309], + device='cuda:1'), covar=tensor([0.1151, 0.0837, 0.0473, 0.0453, 0.4721, 0.0460, 0.0596, 0.0947], + device='cuda:1'), in_proj_covar=tensor([0.0624, 0.0550, 0.0747, 0.0619, 0.0686, 0.0490, 0.0468, 0.0683], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 14:50:54,297 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58002.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:51:20,070 INFO [train.py:903] (1/4) Epoch 9, batch 3400, loss[loss=0.2045, simple_loss=0.2748, pruned_loss=0.06707, over 18190.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3175, pruned_loss=0.08836, over 3831624.14 frames. ], batch size: 40, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:51:56,100 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7060, 1.6121, 1.3786, 1.7079, 1.6332, 1.0624, 1.0471, 1.5081], + device='cuda:1'), covar=tensor([0.0946, 0.1492, 0.1537, 0.0937, 0.1354, 0.1402, 0.1607, 0.0963], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0353, 0.0291, 0.0240, 0.0300, 0.0241, 0.0272, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:52:02,957 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.309e+02 6.592e+02 8.930e+02 1.711e+03, threshold=1.318e+03, percent-clipped=4.0 +2023-04-01 14:52:20,816 INFO [train.py:903] (1/4) Epoch 9, batch 3450, loss[loss=0.2711, simple_loss=0.3303, pruned_loss=0.106, over 19528.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3169, pruned_loss=0.08824, over 3816063.16 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:52:25,119 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 14:52:48,527 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:53:10,922 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.92 vs. limit=5.0 +2023-04-01 14:53:12,077 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 14:53:22,376 INFO [train.py:903] (1/4) Epoch 9, batch 3500, loss[loss=0.2288, simple_loss=0.289, pruned_loss=0.08435, over 19751.00 frames. ], tot_loss[loss=0.246, simple_loss=0.316, pruned_loss=0.08799, over 3817314.59 frames. ], batch size: 46, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:53:33,963 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:03,544 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:05,472 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.015e+02 7.407e+02 9.414e+02 2.837e+03, threshold=1.481e+03, percent-clipped=3.0 +2023-04-01 14:54:06,978 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58159.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:10,374 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:22,219 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0324, 5.0606, 5.7777, 5.7703, 1.9796, 5.4417, 4.7345, 5.3860], + device='cuda:1'), covar=tensor([0.1178, 0.0687, 0.0478, 0.0452, 0.4647, 0.0415, 0.0454, 0.0964], + device='cuda:1'), in_proj_covar=tensor([0.0627, 0.0554, 0.0752, 0.0622, 0.0692, 0.0495, 0.0469, 0.0691], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 14:54:24,280 INFO [train.py:903] (1/4) Epoch 9, batch 3550, loss[loss=0.2532, simple_loss=0.327, pruned_loss=0.08968, over 18156.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3163, pruned_loss=0.0881, over 3825334.27 frames. ], batch size: 83, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:54:50,587 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:09,179 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:20,433 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:24,267 INFO [train.py:903] (1/4) Epoch 9, batch 3600, loss[loss=0.2905, simple_loss=0.3545, pruned_loss=0.1133, over 19491.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3166, pruned_loss=0.08837, over 3835850.98 frames. ], batch size: 64, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:55:30,320 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:54,914 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0910, 2.0539, 1.7679, 1.5776, 1.5283, 1.6162, 0.2736, 0.8130], + device='cuda:1'), covar=tensor([0.0305, 0.0339, 0.0233, 0.0350, 0.0712, 0.0425, 0.0671, 0.0638], + device='cuda:1'), in_proj_covar=tensor([0.0317, 0.0321, 0.0317, 0.0334, 0.0407, 0.0333, 0.0297, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 14:56:01,039 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:56:06,161 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.487e+02 6.500e+02 7.997e+02 2.924e+03, threshold=1.300e+03, percent-clipped=2.0 +2023-04-01 14:56:17,831 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9552, 2.0118, 2.1709, 2.8307, 1.8524, 2.8445, 2.5978, 1.9688], + device='cuda:1'), covar=tensor([0.3149, 0.2656, 0.1232, 0.1512, 0.3130, 0.1104, 0.2749, 0.2318], + device='cuda:1'), in_proj_covar=tensor([0.0744, 0.0757, 0.0627, 0.0866, 0.0750, 0.0663, 0.0767, 0.0679], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 14:56:23,762 INFO [train.py:903] (1/4) Epoch 9, batch 3650, loss[loss=0.2385, simple_loss=0.3128, pruned_loss=0.08211, over 18841.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3169, pruned_loss=0.08873, over 3827779.88 frames. ], batch size: 75, lr: 9.26e-03, grad_scale: 16.0 +2023-04-01 14:57:24,454 INFO [train.py:903] (1/4) Epoch 9, batch 3700, loss[loss=0.1842, simple_loss=0.2556, pruned_loss=0.0564, over 19745.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3156, pruned_loss=0.08783, over 3825958.60 frames. ], batch size: 47, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:58:07,749 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 6.060e+02 7.736e+02 9.843e+02 2.060e+03, threshold=1.547e+03, percent-clipped=9.0 +2023-04-01 14:58:23,939 INFO [train.py:903] (1/4) Epoch 9, batch 3750, loss[loss=0.2305, simple_loss=0.2975, pruned_loss=0.08172, over 19688.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.316, pruned_loss=0.08804, over 3833303.67 frames. ], batch size: 53, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:58:28,588 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7485, 1.5340, 1.4041, 2.1645, 1.6613, 2.1016, 2.2339, 1.8950], + device='cuda:1'), covar=tensor([0.0781, 0.0915, 0.1040, 0.0855, 0.0902, 0.0681, 0.0777, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0233, 0.0228, 0.0258, 0.0243, 0.0218, 0.0204, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 14:58:37,437 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.13 vs. limit=5.0 +2023-04-01 14:59:09,814 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.2884, 3.8463, 2.6728, 3.4497, 1.2119, 3.7084, 3.6703, 3.7114], + device='cuda:1'), covar=tensor([0.0655, 0.1021, 0.1747, 0.0797, 0.3347, 0.0772, 0.0762, 0.0997], + device='cuda:1'), in_proj_covar=tensor([0.0410, 0.0349, 0.0418, 0.0314, 0.0368, 0.0344, 0.0337, 0.0371], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 14:59:24,637 INFO [train.py:903] (1/4) Epoch 9, batch 3800, loss[loss=0.2427, simple_loss=0.318, pruned_loss=0.08375, over 18693.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3156, pruned_loss=0.08771, over 3820440.50 frames. ], batch size: 74, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 14:59:38,865 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6200, 1.9775, 2.0878, 2.8883, 2.4807, 2.2778, 2.3576, 2.7698], + device='cuda:1'), covar=tensor([0.0813, 0.1816, 0.1368, 0.0910, 0.1225, 0.0459, 0.0936, 0.0516], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0353, 0.0291, 0.0240, 0.0299, 0.0241, 0.0272, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 14:59:54,353 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 15:00:08,584 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 5.230e+02 6.601e+02 8.580e+02 1.875e+03, threshold=1.320e+03, percent-clipped=3.0 +2023-04-01 15:00:17,603 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58466.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:00:24,959 INFO [train.py:903] (1/4) Epoch 9, batch 3850, loss[loss=0.2736, simple_loss=0.3349, pruned_loss=0.1062, over 13888.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3153, pruned_loss=0.08695, over 3815658.37 frames. ], batch size: 136, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:00:27,780 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3759, 1.4326, 1.7359, 1.5378, 2.4363, 2.0950, 2.5240, 1.2248], + device='cuda:1'), covar=tensor([0.1915, 0.3302, 0.1954, 0.1581, 0.1243, 0.1731, 0.1224, 0.3076], + device='cuda:1'), in_proj_covar=tensor([0.0471, 0.0550, 0.0564, 0.0423, 0.0581, 0.0476, 0.0638, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 15:00:46,674 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:01,848 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:06,098 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:25,307 INFO [train.py:903] (1/4) Epoch 9, batch 3900, loss[loss=0.2925, simple_loss=0.3588, pruned_loss=0.1131, over 19751.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3165, pruned_loss=0.0877, over 3824750.37 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:02:06,362 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5173, 1.3708, 1.9839, 1.6187, 3.0155, 4.6487, 4.6347, 5.0205], + device='cuda:1'), covar=tensor([0.1381, 0.3271, 0.2727, 0.1865, 0.0445, 0.0143, 0.0132, 0.0098], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0285, 0.0316, 0.0245, 0.0208, 0.0144, 0.0203, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 15:02:08,186 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 6.109e+02 7.308e+02 8.933e+02 2.200e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 15:02:26,012 INFO [train.py:903] (1/4) Epoch 9, batch 3950, loss[loss=0.1904, simple_loss=0.2641, pruned_loss=0.05836, over 19752.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3155, pruned_loss=0.08733, over 3826284.14 frames. ], batch size: 45, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:02:28,973 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 15:03:21,996 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58618.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:25,501 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:28,125 INFO [train.py:903] (1/4) Epoch 9, batch 4000, loss[loss=0.2522, simple_loss=0.3068, pruned_loss=0.09886, over 19589.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3149, pruned_loss=0.08663, over 3816520.91 frames. ], batch size: 52, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:04:10,730 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.392e+02 5.642e+02 6.845e+02 8.578e+02 2.087e+03, threshold=1.369e+03, percent-clipped=3.0 +2023-04-01 15:04:10,788 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 15:04:27,006 INFO [train.py:903] (1/4) Epoch 9, batch 4050, loss[loss=0.2141, simple_loss=0.2975, pruned_loss=0.06533, over 19774.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3142, pruned_loss=0.0864, over 3833584.43 frames. ], batch size: 54, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:04:46,279 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9178, 1.6763, 2.0476, 1.8712, 4.4613, 1.1482, 2.3736, 4.6427], + device='cuda:1'), covar=tensor([0.0307, 0.2327, 0.2298, 0.1573, 0.0619, 0.2437, 0.1277, 0.0196], + device='cuda:1'), in_proj_covar=tensor([0.0334, 0.0325, 0.0336, 0.0306, 0.0338, 0.0322, 0.0309, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:04:59,268 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:05:28,158 INFO [train.py:903] (1/4) Epoch 9, batch 4100, loss[loss=0.2904, simple_loss=0.3617, pruned_loss=0.1096, over 19785.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3145, pruned_loss=0.0867, over 3840203.51 frames. ], batch size: 56, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:03,196 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 15:06:11,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.572e+02 6.953e+02 8.904e+02 1.888e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 15:06:28,251 INFO [train.py:903] (1/4) Epoch 9, batch 4150, loss[loss=0.3085, simple_loss=0.3657, pruned_loss=0.1256, over 13432.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3156, pruned_loss=0.08755, over 3827098.79 frames. ], batch size: 136, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:54,425 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58793.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:07:30,066 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7351, 1.5238, 1.4072, 2.1005, 1.7230, 1.9468, 2.0696, 1.8288], + device='cuda:1'), covar=tensor([0.0773, 0.0978, 0.1112, 0.0797, 0.0863, 0.0749, 0.0822, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0231, 0.0227, 0.0255, 0.0242, 0.0214, 0.0201, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 15:07:30,859 INFO [train.py:903] (1/4) Epoch 9, batch 4200, loss[loss=0.2012, simple_loss=0.2768, pruned_loss=0.06284, over 19411.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3149, pruned_loss=0.08674, over 3838499.23 frames. ], batch size: 47, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:07:34,200 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 15:07:44,062 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 15:08:14,668 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.798e+02 5.991e+02 7.229e+02 9.145e+02 1.621e+03, threshold=1.446e+03, percent-clipped=3.0 +2023-04-01 15:08:32,532 INFO [train.py:903] (1/4) Epoch 9, batch 4250, loss[loss=0.2672, simple_loss=0.3318, pruned_loss=0.1013, over 19654.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3132, pruned_loss=0.08528, over 3846120.47 frames. ], batch size: 60, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:08:34,223 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:37,507 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:48,676 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 15:08:59,992 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 15:09:03,829 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:08,232 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:32,789 INFO [train.py:903] (1/4) Epoch 9, batch 4300, loss[loss=0.1982, simple_loss=0.2691, pruned_loss=0.06359, over 19778.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3142, pruned_loss=0.08567, over 3849486.86 frames. ], batch size: 48, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:09:34,166 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:10:05,294 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4465, 2.3526, 1.6348, 1.5950, 2.2500, 1.2691, 1.1271, 1.7861], + device='cuda:1'), covar=tensor([0.0883, 0.0572, 0.0953, 0.0596, 0.0357, 0.1049, 0.0830, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0296, 0.0322, 0.0242, 0.0229, 0.0318, 0.0288, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:10:17,294 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.360e+02 5.584e+02 7.321e+02 9.114e+02 2.155e+03, threshold=1.464e+03, percent-clipped=5.0 +2023-04-01 15:10:24,490 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1704, 1.1103, 1.1325, 1.4006, 1.1308, 1.2685, 1.3259, 1.2394], + device='cuda:1'), covar=tensor([0.0791, 0.0904, 0.0982, 0.0603, 0.0723, 0.0714, 0.0694, 0.0654], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0229, 0.0224, 0.0254, 0.0240, 0.0212, 0.0201, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 15:10:26,452 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 15:10:33,821 INFO [train.py:903] (1/4) Epoch 9, batch 4350, loss[loss=0.2233, simple_loss=0.3006, pruned_loss=0.07295, over 19736.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.314, pruned_loss=0.0856, over 3840508.19 frames. ], batch size: 51, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:11:34,343 INFO [train.py:903] (1/4) Epoch 9, batch 4400, loss[loss=0.2459, simple_loss=0.3123, pruned_loss=0.08975, over 18239.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3154, pruned_loss=0.08711, over 3829120.59 frames. ], batch size: 83, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:11:58,695 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 15:11:58,796 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:12:08,641 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 15:12:18,355 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.626e+02 6.682e+02 1.014e+03 2.125e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-01 15:12:35,840 INFO [train.py:903] (1/4) Epoch 9, batch 4450, loss[loss=0.2337, simple_loss=0.2982, pruned_loss=0.08461, over 16129.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3149, pruned_loss=0.08684, over 3834578.54 frames. ], batch size: 35, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:36,621 INFO [train.py:903] (1/4) Epoch 9, batch 4500, loss[loss=0.24, simple_loss=0.2986, pruned_loss=0.09068, over 19726.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3141, pruned_loss=0.08701, over 3825624.76 frames. ], batch size: 45, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:54,291 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:20,653 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59158.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:21,306 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 15:14:21,456 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.665e+02 5.638e+02 7.209e+02 9.049e+02 2.430e+03, threshold=1.442e+03, percent-clipped=5.0 +2023-04-01 15:14:38,162 INFO [train.py:903] (1/4) Epoch 9, batch 4550, loss[loss=0.2419, simple_loss=0.3113, pruned_loss=0.08623, over 19621.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3134, pruned_loss=0.08593, over 3829626.24 frames. ], batch size: 50, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:14:46,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 15:14:49,760 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59182.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:15:11,272 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 15:15:39,255 INFO [train.py:903] (1/4) Epoch 9, batch 4600, loss[loss=0.2215, simple_loss=0.2961, pruned_loss=0.07346, over 19772.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3147, pruned_loss=0.08657, over 3834614.83 frames. ], batch size: 48, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:15,344 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:24,167 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 5.555e+02 6.855e+02 8.371e+02 1.742e+03, threshold=1.371e+03, percent-clipped=2.0 +2023-04-01 15:16:35,123 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:40,781 INFO [train.py:903] (1/4) Epoch 9, batch 4650, loss[loss=0.2531, simple_loss=0.32, pruned_loss=0.09309, over 19607.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.315, pruned_loss=0.08628, over 3827343.41 frames. ], batch size: 50, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:51,378 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 15:16:56,380 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 15:17:05,559 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.55 vs. limit=5.0 +2023-04-01 15:17:09,147 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 15:17:41,933 INFO [train.py:903] (1/4) Epoch 9, batch 4700, loss[loss=0.2319, simple_loss=0.3043, pruned_loss=0.07974, over 19765.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3139, pruned_loss=0.08549, over 3840240.59 frames. ], batch size: 54, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:03,974 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 15:18:25,859 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.586e+02 6.716e+02 8.168e+02 1.499e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-01 15:18:41,883 INFO [train.py:903] (1/4) Epoch 9, batch 4750, loss[loss=0.2215, simple_loss=0.2933, pruned_loss=0.07489, over 19757.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3143, pruned_loss=0.08566, over 3838737.42 frames. ], batch size: 51, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:51,377 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3934, 2.2455, 1.8018, 1.6871, 1.6060, 1.7787, 0.4127, 1.3167], + device='cuda:1'), covar=tensor([0.0373, 0.0368, 0.0343, 0.0552, 0.0814, 0.0601, 0.0840, 0.0690], + device='cuda:1'), in_proj_covar=tensor([0.0325, 0.0322, 0.0323, 0.0339, 0.0413, 0.0340, 0.0300, 0.0320], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 15:18:54,742 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:32,457 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:43,261 INFO [train.py:903] (1/4) Epoch 9, batch 4800, loss[loss=0.2604, simple_loss=0.3214, pruned_loss=0.09964, over 19751.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3143, pruned_loss=0.08602, over 3840229.08 frames. ], batch size: 63, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:19:51,183 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9066, 4.3117, 4.6059, 4.5875, 1.7336, 4.2557, 3.7268, 4.2829], + device='cuda:1'), covar=tensor([0.1211, 0.0660, 0.0525, 0.0508, 0.4718, 0.0483, 0.0564, 0.1019], + device='cuda:1'), in_proj_covar=tensor([0.0624, 0.0557, 0.0746, 0.0626, 0.0690, 0.0504, 0.0460, 0.0692], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 15:20:03,379 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59439.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:20:13,002 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.50 vs. limit=5.0 +2023-04-01 15:20:27,560 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.762e+02 6.893e+02 8.818e+02 1.836e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-01 15:20:43,942 INFO [train.py:903] (1/4) Epoch 9, batch 4850, loss[loss=0.2063, simple_loss=0.2941, pruned_loss=0.05922, over 19631.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3138, pruned_loss=0.0859, over 3832380.65 frames. ], batch size: 57, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:08,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 15:21:27,157 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:29,003 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 15:21:35,286 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 15:21:36,461 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 15:21:42,201 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:45,373 INFO [train.py:903] (1/4) Epoch 9, batch 4900, loss[loss=0.2529, simple_loss=0.3261, pruned_loss=0.08984, over 19622.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3126, pruned_loss=0.08498, over 3836741.47 frames. ], batch size: 50, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:46,551 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 15:21:49,055 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59526.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:56,968 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:22:05,289 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 15:22:29,458 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 5.274e+02 6.528e+02 8.023e+02 2.606e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-01 15:22:45,479 INFO [train.py:903] (1/4) Epoch 9, batch 4950, loss[loss=0.3137, simple_loss=0.3693, pruned_loss=0.129, over 18843.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3139, pruned_loss=0.08626, over 3813162.81 frames. ], batch size: 74, lr: 9.15e-03, grad_scale: 8.0 +2023-04-01 15:22:48,367 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 15:23:01,071 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 15:23:24,587 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 15:23:46,155 INFO [train.py:903] (1/4) Epoch 9, batch 5000, loss[loss=0.227, simple_loss=0.287, pruned_loss=0.08347, over 19764.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.314, pruned_loss=0.0863, over 3804515.53 frames. ], batch size: 46, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:23:53,577 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 15:24:04,778 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 15:24:06,384 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:08,534 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:30,504 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.818e+02 6.890e+02 9.185e+02 2.943e+03, threshold=1.378e+03, percent-clipped=3.0 +2023-04-01 15:24:35,434 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:46,099 INFO [train.py:903] (1/4) Epoch 9, batch 5050, loss[loss=0.2693, simple_loss=0.3428, pruned_loss=0.09792, over 19792.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3149, pruned_loss=0.08671, over 3790431.33 frames. ], batch size: 56, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:24:47,599 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1160, 1.1964, 1.7631, 1.2221, 2.7793, 3.7347, 3.4524, 3.9050], + device='cuda:1'), covar=tensor([0.1508, 0.3265, 0.2829, 0.1944, 0.0415, 0.0134, 0.0200, 0.0141], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0289, 0.0321, 0.0246, 0.0212, 0.0147, 0.0205, 0.0185], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 15:25:21,702 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 15:25:47,319 INFO [train.py:903] (1/4) Epoch 9, batch 5100, loss[loss=0.1974, simple_loss=0.2617, pruned_loss=0.06654, over 19728.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3128, pruned_loss=0.08552, over 3809317.27 frames. ], batch size: 46, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:25:56,495 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 15:25:59,769 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 15:26:05,154 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 15:26:33,076 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 5.962e+02 7.316e+02 9.170e+02 1.645e+03, threshold=1.463e+03, percent-clipped=5.0 +2023-04-01 15:26:47,515 INFO [train.py:903] (1/4) Epoch 9, batch 5150, loss[loss=0.3427, simple_loss=0.3712, pruned_loss=0.1571, over 13601.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3128, pruned_loss=0.08574, over 3794085.77 frames. ], batch size: 135, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:26:58,278 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 15:27:32,211 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 15:27:49,842 INFO [train.py:903] (1/4) Epoch 9, batch 5200, loss[loss=0.2687, simple_loss=0.3468, pruned_loss=0.09525, over 19812.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3147, pruned_loss=0.08706, over 3792008.17 frames. ], batch size: 56, lr: 9.14e-03, grad_scale: 8.0 +2023-04-01 15:27:59,709 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 15:28:34,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.759e+02 5.545e+02 6.726e+02 8.723e+02 1.623e+03, threshold=1.345e+03, percent-clipped=2.0 +2023-04-01 15:28:39,329 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59864.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:41,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 15:28:41,869 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:50,361 INFO [train.py:903] (1/4) Epoch 9, batch 5250, loss[loss=0.2319, simple_loss=0.3158, pruned_loss=0.07397, over 19623.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3149, pruned_loss=0.08722, over 3801782.13 frames. ], batch size: 57, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:29:19,305 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:31,328 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,070 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,795 INFO [train.py:903] (1/4) Epoch 9, batch 5300, loss[loss=0.265, simple_loss=0.3361, pruned_loss=0.09691, over 19648.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3151, pruned_loss=0.08717, over 3802961.70 frames. ], batch size: 60, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:30:04,495 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 15:30:36,748 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.525e+02 7.204e+02 8.995e+02 2.228e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-01 15:30:50,971 INFO [train.py:903] (1/4) Epoch 9, batch 5350, loss[loss=0.2475, simple_loss=0.319, pruned_loss=0.08795, over 18144.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3161, pruned_loss=0.08778, over 3814431.50 frames. ], batch size: 83, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:30:58,316 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:31:24,016 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 15:31:52,157 INFO [train.py:903] (1/4) Epoch 9, batch 5400, loss[loss=0.2735, simple_loss=0.3419, pruned_loss=0.1025, over 19544.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3164, pruned_loss=0.0879, over 3829958.04 frames. ], batch size: 56, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:32:34,980 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:36,999 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 5.587e+02 7.112e+02 9.365e+02 1.948e+03, threshold=1.422e+03, percent-clipped=3.0 +2023-04-01 15:32:49,986 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:53,203 INFO [train.py:903] (1/4) Epoch 9, batch 5450, loss[loss=0.2177, simple_loss=0.2812, pruned_loss=0.07713, over 15815.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3148, pruned_loss=0.08708, over 3828513.04 frames. ], batch size: 35, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:33:06,621 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60083.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:21,813 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60096.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:55,444 INFO [train.py:903] (1/4) Epoch 9, batch 5500, loss[loss=0.2591, simple_loss=0.3331, pruned_loss=0.09252, over 19856.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.315, pruned_loss=0.08735, over 3820830.33 frames. ], batch size: 52, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:34:17,039 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 15:34:40,129 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.485e+02 6.858e+02 8.862e+02 1.983e+03, threshold=1.372e+03, percent-clipped=4.0 +2023-04-01 15:34:56,050 INFO [train.py:903] (1/4) Epoch 9, batch 5550, loss[loss=0.2385, simple_loss=0.3068, pruned_loss=0.08513, over 19617.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.314, pruned_loss=0.08658, over 3823811.41 frames. ], batch size: 50, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:35:01,729 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 15:35:42,127 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:48,964 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60216.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:49,877 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 15:35:51,216 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9225, 1.6662, 1.7368, 2.1057, 1.8320, 1.8644, 1.7447, 1.9467], + device='cuda:1'), covar=tensor([0.1059, 0.1719, 0.1367, 0.0866, 0.1274, 0.0480, 0.1137, 0.0666], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0351, 0.0290, 0.0240, 0.0298, 0.0241, 0.0275, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:35:57,509 INFO [train.py:903] (1/4) Epoch 9, batch 5600, loss[loss=0.2409, simple_loss=0.3155, pruned_loss=0.08314, over 19664.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3138, pruned_loss=0.08687, over 3818789.99 frames. ], batch size: 55, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:36:12,978 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:31,698 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:36,797 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.64 vs. limit=5.0 +2023-04-01 15:36:41,623 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.287e+02 5.903e+02 7.092e+02 9.595e+02 1.671e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 15:36:42,034 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:58,114 INFO [train.py:903] (1/4) Epoch 9, batch 5650, loss[loss=0.2128, simple_loss=0.2962, pruned_loss=0.06467, over 19665.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3141, pruned_loss=0.08701, over 3821201.18 frames. ], batch size: 53, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:37:25,224 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5267, 1.6192, 1.7919, 2.1128, 1.4335, 1.8633, 1.9876, 1.6718], + device='cuda:1'), covar=tensor([0.3252, 0.2658, 0.1366, 0.1438, 0.2787, 0.1350, 0.3337, 0.2532], + device='cuda:1'), in_proj_covar=tensor([0.0755, 0.0764, 0.0628, 0.0871, 0.0748, 0.0668, 0.0775, 0.0686], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 15:37:45,050 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 15:38:00,339 INFO [train.py:903] (1/4) Epoch 9, batch 5700, loss[loss=0.2333, simple_loss=0.3047, pruned_loss=0.08088, over 19576.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3132, pruned_loss=0.08624, over 3829997.40 frames. ], batch size: 52, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:38:02,936 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:45,033 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.631e+02 6.670e+02 7.834e+02 2.342e+03, threshold=1.334e+03, percent-clipped=2.0 +2023-04-01 15:38:51,870 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8624, 1.5570, 1.4786, 2.0716, 1.6322, 2.1813, 2.1164, 1.9990], + device='cuda:1'), covar=tensor([0.0682, 0.0889, 0.0926, 0.0750, 0.0793, 0.0591, 0.0731, 0.0561], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0232, 0.0230, 0.0256, 0.0245, 0.0217, 0.0202, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 15:38:53,003 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:59,668 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 15:39:00,760 INFO [train.py:903] (1/4) Epoch 9, batch 5750, loss[loss=0.2537, simple_loss=0.3168, pruned_loss=0.09528, over 19486.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3148, pruned_loss=0.08688, over 3818851.63 frames. ], batch size: 49, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:39:07,410 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 15:39:12,813 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 15:39:36,134 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:39:50,840 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:00,911 INFO [train.py:903] (1/4) Epoch 9, batch 5800, loss[loss=0.2496, simple_loss=0.3125, pruned_loss=0.09332, over 19854.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3146, pruned_loss=0.08666, over 3823804.61 frames. ], batch size: 52, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:40:06,306 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:22,426 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:45,417 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.333e+02 6.303e+02 7.771e+02 9.979e+02 2.257e+03, threshold=1.554e+03, percent-clipped=12.0 +2023-04-01 15:41:01,367 INFO [train.py:903] (1/4) Epoch 9, batch 5850, loss[loss=0.2748, simple_loss=0.3463, pruned_loss=0.1017, over 19326.00 frames. ], tot_loss[loss=0.247, simple_loss=0.317, pruned_loss=0.08851, over 3824911.97 frames. ], batch size: 66, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:41:56,059 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:02,434 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 15:42:03,583 INFO [train.py:903] (1/4) Epoch 9, batch 5900, loss[loss=0.1948, simple_loss=0.271, pruned_loss=0.0593, over 19403.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3162, pruned_loss=0.08789, over 3834223.94 frames. ], batch size: 48, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:42:10,681 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:22,848 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 15:42:25,420 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60542.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:42,871 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:46,398 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-01 15:42:47,839 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.460e+02 7.799e+02 9.723e+02 2.713e+03, threshold=1.560e+03, percent-clipped=1.0 +2023-04-01 15:42:48,020 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:58,283 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2439, 1.9968, 1.5525, 1.2252, 1.8999, 1.1274, 1.1515, 1.6895], + device='cuda:1'), covar=tensor([0.0792, 0.0668, 0.0910, 0.0647, 0.0409, 0.1090, 0.0673, 0.0405], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0299, 0.0327, 0.0246, 0.0233, 0.0324, 0.0294, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:43:02,773 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3547, 1.1302, 1.0867, 1.2949, 1.1133, 1.2708, 1.1122, 1.2694], + device='cuda:1'), covar=tensor([0.1013, 0.1231, 0.1329, 0.0801, 0.1056, 0.0521, 0.1140, 0.0688], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0349, 0.0288, 0.0239, 0.0296, 0.0239, 0.0274, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:43:03,516 INFO [train.py:903] (1/4) Epoch 9, batch 5950, loss[loss=0.2667, simple_loss=0.3356, pruned_loss=0.09888, over 18098.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3164, pruned_loss=0.08803, over 3844057.77 frames. ], batch size: 83, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:43:13,171 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:28,346 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.2176, 3.8115, 2.6971, 3.4481, 0.9974, 3.4527, 3.5786, 3.7532], + device='cuda:1'), covar=tensor([0.0746, 0.1038, 0.1900, 0.0786, 0.3902, 0.0974, 0.0803, 0.0964], + device='cuda:1'), in_proj_covar=tensor([0.0406, 0.0346, 0.0415, 0.0305, 0.0368, 0.0341, 0.0332, 0.0369], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 15:43:45,276 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:03,702 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:04,423 INFO [train.py:903] (1/4) Epoch 9, batch 6000, loss[loss=0.2327, simple_loss=0.3162, pruned_loss=0.07464, over 19783.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3152, pruned_loss=0.08674, over 3847637.24 frames. ], batch size: 56, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:44:04,423 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 15:44:16,868 INFO [train.py:937] (1/4) Epoch 9, validation: loss=0.1828, simple_loss=0.2835, pruned_loss=0.04105, over 944034.00 frames. +2023-04-01 15:44:16,869 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 15:44:28,522 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3711, 1.2284, 1.4427, 1.5228, 2.9176, 1.0307, 2.2835, 3.1787], + device='cuda:1'), covar=tensor([0.0460, 0.2558, 0.2515, 0.1611, 0.0696, 0.2304, 0.1039, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0337, 0.0327, 0.0336, 0.0309, 0.0334, 0.0323, 0.0315, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:44:47,014 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:45:00,686 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0206, 1.6967, 1.6065, 2.1280, 1.8857, 1.8975, 1.7184, 2.0085], + device='cuda:1'), covar=tensor([0.0913, 0.1621, 0.1363, 0.0840, 0.1138, 0.0450, 0.1143, 0.0601], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0351, 0.0290, 0.0239, 0.0297, 0.0241, 0.0274, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:45:02,308 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.785e+02 7.329e+02 9.590e+02 1.552e+03, threshold=1.466e+03, percent-clipped=0.0 +2023-04-01 15:45:05,837 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7874, 3.1883, 3.2793, 3.2945, 1.2092, 3.1268, 2.7861, 2.9983], + device='cuda:1'), covar=tensor([0.1305, 0.0894, 0.0774, 0.0744, 0.4726, 0.0704, 0.0705, 0.1251], + device='cuda:1'), in_proj_covar=tensor([0.0631, 0.0567, 0.0755, 0.0633, 0.0695, 0.0500, 0.0466, 0.0702], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 15:45:17,223 INFO [train.py:903] (1/4) Epoch 9, batch 6050, loss[loss=0.2714, simple_loss=0.3412, pruned_loss=0.1008, over 19759.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3147, pruned_loss=0.08648, over 3850261.56 frames. ], batch size: 63, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:45:19,798 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:46:18,288 INFO [train.py:903] (1/4) Epoch 9, batch 6100, loss[loss=0.23, simple_loss=0.2963, pruned_loss=0.08183, over 19780.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3146, pruned_loss=0.0865, over 3837459.81 frames. ], batch size: 48, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:46:34,575 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:03,004 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.339e+02 6.367e+02 8.531e+02 1.806e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-01 15:47:18,987 INFO [train.py:903] (1/4) Epoch 9, batch 6150, loss[loss=0.2493, simple_loss=0.3116, pruned_loss=0.09345, over 16559.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3157, pruned_loss=0.08715, over 3841876.01 frames. ], batch size: 36, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:47:19,408 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:23,770 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0808, 2.0529, 1.6678, 1.5381, 1.3243, 1.5341, 0.4254, 0.8869], + device='cuda:1'), covar=tensor([0.0351, 0.0364, 0.0297, 0.0400, 0.0768, 0.0501, 0.0684, 0.0615], + device='cuda:1'), in_proj_covar=tensor([0.0322, 0.0317, 0.0318, 0.0333, 0.0413, 0.0337, 0.0295, 0.0316], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 15:47:33,341 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:44,098 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 15:47:48,615 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:48,645 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:03,546 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:04,568 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60811.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:18,466 INFO [train.py:903] (1/4) Epoch 9, batch 6200, loss[loss=0.2941, simple_loss=0.3473, pruned_loss=0.1204, over 13181.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3158, pruned_loss=0.0872, over 3838253.82 frames. ], batch size: 136, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:48:18,859 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:34,245 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:49:03,645 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.706e+02 5.965e+02 7.614e+02 9.330e+02 2.107e+03, threshold=1.523e+03, percent-clipped=6.0 +2023-04-01 15:49:06,425 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0681, 1.6997, 1.6956, 1.9927, 1.8001, 1.8012, 1.6276, 2.0106], + device='cuda:1'), covar=tensor([0.0798, 0.1522, 0.1315, 0.0839, 0.1120, 0.0474, 0.1125, 0.0592], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0351, 0.0288, 0.0237, 0.0295, 0.0240, 0.0271, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 15:49:19,605 INFO [train.py:903] (1/4) Epoch 9, batch 6250, loss[loss=0.2417, simple_loss=0.3027, pruned_loss=0.09041, over 16939.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3153, pruned_loss=0.08704, over 3836296.06 frames. ], batch size: 37, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:49:49,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 15:50:20,310 INFO [train.py:903] (1/4) Epoch 9, batch 6300, loss[loss=0.2128, simple_loss=0.2974, pruned_loss=0.06413, over 19655.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3155, pruned_loss=0.08665, over 3840077.08 frames. ], batch size: 58, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:50:31,337 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:01,572 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60956.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:05,847 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.456e+02 6.999e+02 8.896e+02 1.665e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 15:51:21,557 INFO [train.py:903] (1/4) Epoch 9, batch 6350, loss[loss=0.2176, simple_loss=0.2963, pruned_loss=0.06948, over 19605.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3147, pruned_loss=0.08604, over 3836840.07 frames. ], batch size: 57, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:52:22,396 INFO [train.py:903] (1/4) Epoch 9, batch 6400, loss[loss=0.2312, simple_loss=0.3088, pruned_loss=0.07683, over 19659.00 frames. ], tot_loss[loss=0.243, simple_loss=0.314, pruned_loss=0.08596, over 3844504.76 frames. ], batch size: 53, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:52:25,890 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5893, 2.9614, 3.0370, 3.0413, 1.3530, 2.8187, 2.5397, 2.7978], + device='cuda:1'), covar=tensor([0.1347, 0.1258, 0.0717, 0.0770, 0.4349, 0.0882, 0.0693, 0.1187], + device='cuda:1'), in_proj_covar=tensor([0.0628, 0.0562, 0.0754, 0.0627, 0.0692, 0.0499, 0.0464, 0.0697], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 15:52:39,073 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2891, 1.3948, 1.6182, 1.5048, 2.3261, 2.1268, 2.2806, 0.7992], + device='cuda:1'), covar=tensor([0.2051, 0.3567, 0.2115, 0.1678, 0.1229, 0.1712, 0.1279, 0.3528], + device='cuda:1'), in_proj_covar=tensor([0.0469, 0.0549, 0.0565, 0.0422, 0.0583, 0.0471, 0.0631, 0.0475], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 15:52:48,100 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 15:53:05,775 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 15:53:07,179 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 6.196e+02 7.384e+02 8.672e+02 1.804e+03, threshold=1.477e+03, percent-clipped=4.0 +2023-04-01 15:53:22,489 INFO [train.py:903] (1/4) Epoch 9, batch 6450, loss[loss=0.2865, simple_loss=0.3474, pruned_loss=0.1128, over 19660.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3155, pruned_loss=0.08684, over 3835662.35 frames. ], batch size: 60, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:53:30,374 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:53:31,426 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:54:05,092 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 15:54:22,077 INFO [train.py:903] (1/4) Epoch 9, batch 6500, loss[loss=0.233, simple_loss=0.3102, pruned_loss=0.07795, over 19787.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3159, pruned_loss=0.08761, over 3848484.81 frames. ], batch size: 56, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:54:27,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 15:55:06,066 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.706e+02 8.302e+02 1.012e+03 2.679e+03, threshold=1.660e+03, percent-clipped=7.0 +2023-04-01 15:55:21,994 INFO [train.py:903] (1/4) Epoch 9, batch 6550, loss[loss=0.2994, simple_loss=0.3713, pruned_loss=0.1138, over 19578.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3155, pruned_loss=0.08735, over 3844246.46 frames. ], batch size: 61, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:55:50,217 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:56:24,270 INFO [train.py:903] (1/4) Epoch 9, batch 6600, loss[loss=0.2069, simple_loss=0.2803, pruned_loss=0.06677, over 19395.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.315, pruned_loss=0.08724, over 3836927.54 frames. ], batch size: 48, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:57:09,244 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 5.654e+02 6.784e+02 8.392e+02 1.741e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 15:57:24,976 INFO [train.py:903] (1/4) Epoch 9, batch 6650, loss[loss=0.2652, simple_loss=0.3395, pruned_loss=0.0955, over 19536.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3158, pruned_loss=0.08716, over 3838952.23 frames. ], batch size: 54, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:58:25,574 INFO [train.py:903] (1/4) Epoch 9, batch 6700, loss[loss=0.2815, simple_loss=0.346, pruned_loss=0.1085, over 19508.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3144, pruned_loss=0.08633, over 3826919.42 frames. ], batch size: 64, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 15:59:08,505 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.683e+02 7.501e+02 9.618e+02 2.603e+03, threshold=1.500e+03, percent-clipped=7.0 +2023-04-01 15:59:23,018 INFO [train.py:903] (1/4) Epoch 9, batch 6750, loss[loss=0.2567, simple_loss=0.3361, pruned_loss=0.08865, over 19604.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3149, pruned_loss=0.08685, over 3823268.53 frames. ], batch size: 57, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 16:00:18,969 INFO [train.py:903] (1/4) Epoch 9, batch 6800, loss[loss=0.2525, simple_loss=0.3299, pruned_loss=0.08755, over 19607.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.315, pruned_loss=0.08666, over 3820518.25 frames. ], batch size: 57, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 16:00:19,095 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:02,913 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 16:01:03,886 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 16:01:06,621 INFO [train.py:903] (1/4) Epoch 10, batch 0, loss[loss=0.2375, simple_loss=0.3102, pruned_loss=0.08246, over 19570.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3102, pruned_loss=0.08246, over 19570.00 frames. ], batch size: 52, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:01:06,622 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 16:01:17,499 INFO [train.py:937] (1/4) Epoch 10, validation: loss=0.1825, simple_loss=0.2836, pruned_loss=0.04072, over 944034.00 frames. +2023-04-01 16:01:17,500 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 16:01:17,929 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:27,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.689e+02 6.760e+02 8.116e+02 1.440e+03, threshold=1.352e+03, percent-clipped=0.0 +2023-04-01 16:01:29,691 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 16:01:48,111 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:50,677 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.96 vs. limit=5.0 +2023-04-01 16:02:17,430 INFO [train.py:903] (1/4) Epoch 10, batch 50, loss[loss=0.2499, simple_loss=0.3298, pruned_loss=0.08497, over 19538.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3208, pruned_loss=0.08991, over 861903.62 frames. ], batch size: 56, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:02:50,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 16:02:55,443 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8290, 1.3378, 1.0559, 0.8746, 1.1628, 0.8789, 0.8948, 1.2167], + device='cuda:1'), covar=tensor([0.0510, 0.0682, 0.0991, 0.0584, 0.0460, 0.1151, 0.0543, 0.0399], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0300, 0.0324, 0.0245, 0.0234, 0.0326, 0.0289, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:03:03,261 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:03:18,484 INFO [train.py:903] (1/4) Epoch 10, batch 100, loss[loss=0.2346, simple_loss=0.3143, pruned_loss=0.07746, over 18936.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3238, pruned_loss=0.09262, over 1506611.73 frames. ], batch size: 74, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:03:25,125 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 16:03:29,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.266e+02 7.755e+02 9.384e+02 2.029e+03, threshold=1.551e+03, percent-clipped=6.0 +2023-04-01 16:04:19,613 INFO [train.py:903] (1/4) Epoch 10, batch 150, loss[loss=0.218, simple_loss=0.2945, pruned_loss=0.07072, over 19622.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.32, pruned_loss=0.08977, over 2009327.33 frames. ], batch size: 50, lr: 8.56e-03, grad_scale: 16.0 +2023-04-01 16:05:12,419 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 16:05:20,114 INFO [train.py:903] (1/4) Epoch 10, batch 200, loss[loss=0.2497, simple_loss=0.3238, pruned_loss=0.08776, over 19546.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3172, pruned_loss=0.08784, over 2403810.55 frames. ], batch size: 56, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:05:32,340 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.325e+02 6.934e+02 9.117e+02 1.602e+03, threshold=1.387e+03, percent-clipped=3.0 +2023-04-01 16:05:34,722 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9831, 1.7665, 1.5640, 2.1274, 1.9408, 1.8322, 1.6839, 1.8827], + device='cuda:1'), covar=tensor([0.0885, 0.1435, 0.1357, 0.0904, 0.1083, 0.0457, 0.1051, 0.0616], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0354, 0.0289, 0.0238, 0.0297, 0.0243, 0.0274, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:05:35,096 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 16:05:45,492 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0146, 5.1340, 5.9012, 5.8572, 1.9880, 5.5262, 4.6968, 5.4568], + device='cuda:1'), covar=tensor([0.1302, 0.0600, 0.0417, 0.0438, 0.4986, 0.0401, 0.0530, 0.1003], + device='cuda:1'), in_proj_covar=tensor([0.0632, 0.0563, 0.0751, 0.0633, 0.0694, 0.0506, 0.0465, 0.0701], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 16:06:20,989 INFO [train.py:903] (1/4) Epoch 10, batch 250, loss[loss=0.2502, simple_loss=0.3244, pruned_loss=0.088, over 19680.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3136, pruned_loss=0.08573, over 2727842.28 frames. ], batch size: 60, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:07:19,627 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:07:20,498 INFO [train.py:903] (1/4) Epoch 10, batch 300, loss[loss=0.2666, simple_loss=0.3377, pruned_loss=0.09776, over 19782.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3149, pruned_loss=0.08664, over 2971985.68 frames. ], batch size: 56, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:07:32,773 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.563e+02 6.785e+02 8.281e+02 1.821e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 16:07:33,012 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:12,847 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61794.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:20,594 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 16:08:21,878 INFO [train.py:903] (1/4) Epoch 10, batch 350, loss[loss=0.1957, simple_loss=0.2667, pruned_loss=0.06238, over 19306.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3153, pruned_loss=0.08716, over 3161159.80 frames. ], batch size: 44, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:08:44,831 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61819.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:09:23,130 INFO [train.py:903] (1/4) Epoch 10, batch 400, loss[loss=0.2827, simple_loss=0.3417, pruned_loss=0.1119, over 13589.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3166, pruned_loss=0.08778, over 3297592.11 frames. ], batch size: 137, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:09:36,145 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+02 5.438e+02 6.846e+02 8.745e+02 2.106e+03, threshold=1.369e+03, percent-clipped=7.0 +2023-04-01 16:10:26,723 INFO [train.py:903] (1/4) Epoch 10, batch 450, loss[loss=0.2899, simple_loss=0.3516, pruned_loss=0.1141, over 19294.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3161, pruned_loss=0.08774, over 3421229.01 frames. ], batch size: 66, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:10:29,713 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 16:10:50,948 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 16:10:50,978 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 16:11:05,169 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:11:19,229 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8821, 4.3541, 4.6086, 4.5647, 1.5845, 4.3038, 3.7466, 4.2541], + device='cuda:1'), covar=tensor([0.1232, 0.0707, 0.0461, 0.0532, 0.5142, 0.0583, 0.0564, 0.0943], + device='cuda:1'), in_proj_covar=tensor([0.0624, 0.0559, 0.0743, 0.0631, 0.0687, 0.0500, 0.0460, 0.0690], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 16:11:27,992 INFO [train.py:903] (1/4) Epoch 10, batch 500, loss[loss=0.2556, simple_loss=0.3332, pruned_loss=0.08903, over 19461.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3146, pruned_loss=0.08619, over 3508363.67 frames. ], batch size: 49, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:11:30,094 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 16:11:39,882 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.473e+02 6.472e+02 7.882e+02 1.512e+03, threshold=1.294e+03, percent-clipped=2.0 +2023-04-01 16:12:30,839 INFO [train.py:903] (1/4) Epoch 10, batch 550, loss[loss=0.2276, simple_loss=0.2924, pruned_loss=0.08139, over 19343.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3128, pruned_loss=0.0851, over 3592109.64 frames. ], batch size: 44, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:32,125 INFO [train.py:903] (1/4) Epoch 10, batch 600, loss[loss=0.3382, simple_loss=0.3735, pruned_loss=0.1515, over 12885.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3135, pruned_loss=0.08551, over 3640389.63 frames. ], batch size: 135, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:42,201 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 16:13:46,038 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.499e+02 5.366e+02 7.096e+02 8.541e+02 1.663e+03, threshold=1.419e+03, percent-clipped=2.0 +2023-04-01 16:14:11,915 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 16:14:26,432 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:14:35,324 INFO [train.py:903] (1/4) Epoch 10, batch 650, loss[loss=0.2397, simple_loss=0.3253, pruned_loss=0.07707, over 19674.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3149, pruned_loss=0.08651, over 3679514.42 frames. ], batch size: 55, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:14:40,022 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62105.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:24,625 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:39,373 INFO [train.py:903] (1/4) Epoch 10, batch 700, loss[loss=0.2329, simple_loss=0.3014, pruned_loss=0.08217, over 19477.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3135, pruned_loss=0.08591, over 3708722.16 frames. ], batch size: 49, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:15:51,155 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.410e+02 5.986e+02 7.007e+02 9.230e+02 2.462e+03, threshold=1.401e+03, percent-clipped=6.0 +2023-04-01 16:15:51,631 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1926, 1.3134, 1.7656, 1.5503, 2.5031, 1.9125, 2.4331, 1.0694], + device='cuda:1'), covar=tensor([0.2276, 0.3916, 0.2078, 0.1743, 0.1384, 0.2121, 0.1647, 0.3613], + device='cuda:1'), in_proj_covar=tensor([0.0476, 0.0557, 0.0575, 0.0426, 0.0585, 0.0480, 0.0639, 0.0475], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 16:16:41,935 INFO [train.py:903] (1/4) Epoch 10, batch 750, loss[loss=0.2609, simple_loss=0.3334, pruned_loss=0.09416, over 19343.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3133, pruned_loss=0.08563, over 3741649.78 frames. ], batch size: 70, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:16:51,035 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:04,548 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:11,177 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8017, 1.5088, 1.7675, 1.7521, 4.2521, 0.9019, 2.2132, 4.5048], + device='cuda:1'), covar=tensor([0.0342, 0.2554, 0.2716, 0.1679, 0.0705, 0.2688, 0.1496, 0.0238], + device='cuda:1'), in_proj_covar=tensor([0.0338, 0.0326, 0.0337, 0.0311, 0.0338, 0.0322, 0.0317, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:17:35,231 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.81 vs. limit=5.0 +2023-04-01 16:17:42,578 INFO [train.py:903] (1/4) Epoch 10, batch 800, loss[loss=0.2624, simple_loss=0.3256, pruned_loss=0.09962, over 19478.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.314, pruned_loss=0.08585, over 3766113.46 frames. ], batch size: 49, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:17:54,854 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.237e+02 6.930e+02 8.487e+02 1.526e+03, threshold=1.386e+03, percent-clipped=2.0 +2023-04-01 16:17:58,877 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 16:18:14,965 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:32,368 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:43,719 INFO [train.py:903] (1/4) Epoch 10, batch 850, loss[loss=0.2361, simple_loss=0.3121, pruned_loss=0.08007, over 19666.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3147, pruned_loss=0.08624, over 3772253.61 frames. ], batch size: 53, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:37,721 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 16:19:45,734 INFO [train.py:903] (1/4) Epoch 10, batch 900, loss[loss=0.2749, simple_loss=0.3371, pruned_loss=0.1063, over 19710.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3146, pruned_loss=0.08635, over 3777740.42 frames. ], batch size: 63, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:59,162 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 6.023e+02 7.076e+02 9.770e+02 2.916e+03, threshold=1.415e+03, percent-clipped=7.0 +2023-04-01 16:20:16,796 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1288, 2.1849, 2.2460, 3.1874, 2.1676, 3.1757, 2.6466, 1.9285], + device='cuda:1'), covar=tensor([0.3475, 0.2899, 0.1400, 0.1683, 0.3327, 0.1241, 0.3123, 0.2666], + device='cuda:1'), in_proj_covar=tensor([0.0762, 0.0769, 0.0629, 0.0879, 0.0759, 0.0674, 0.0774, 0.0687], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 16:20:35,854 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:20:48,986 INFO [train.py:903] (1/4) Epoch 10, batch 950, loss[loss=0.2491, simple_loss=0.3228, pruned_loss=0.08765, over 17218.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3147, pruned_loss=0.08672, over 3789319.70 frames. ], batch size: 101, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:20:50,168 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 16:20:58,654 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:21:50,240 INFO [train.py:903] (1/4) Epoch 10, batch 1000, loss[loss=0.2575, simple_loss=0.3256, pruned_loss=0.09464, over 19347.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3151, pruned_loss=0.08681, over 3797791.90 frames. ], batch size: 66, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:01,606 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.402e+02 5.469e+02 6.659e+02 8.311e+02 1.987e+03, threshold=1.332e+03, percent-clipped=4.0 +2023-04-01 16:22:07,646 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:21,435 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:31,379 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62484.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:38,715 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:43,042 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 16:22:51,162 INFO [train.py:903] (1/4) Epoch 10, batch 1050, loss[loss=0.2382, simple_loss=0.3146, pruned_loss=0.08092, over 19349.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3139, pruned_loss=0.0857, over 3806501.11 frames. ], batch size: 66, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:51,512 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62501.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:23:23,752 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 16:23:54,502 INFO [train.py:903] (1/4) Epoch 10, batch 1100, loss[loss=0.2844, simple_loss=0.3567, pruned_loss=0.1061, over 19767.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3142, pruned_loss=0.08597, over 3809161.86 frames. ], batch size: 56, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:24:07,773 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.569e+02 6.927e+02 9.101e+02 1.941e+03, threshold=1.385e+03, percent-clipped=3.0 +2023-04-01 16:24:10,208 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62563.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:54,297 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:56,396 INFO [train.py:903] (1/4) Epoch 10, batch 1150, loss[loss=0.2308, simple_loss=0.2854, pruned_loss=0.08813, over 19741.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.3147, pruned_loss=0.08593, over 3813981.41 frames. ], batch size: 46, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:24:59,688 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:15,637 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2789, 2.0869, 1.6526, 1.3787, 1.9414, 1.2032, 1.1536, 1.7383], + device='cuda:1'), covar=tensor([0.0694, 0.0531, 0.0858, 0.0617, 0.0351, 0.1102, 0.0558, 0.0373], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0296, 0.0321, 0.0239, 0.0231, 0.0325, 0.0285, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:25:37,686 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:47,589 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 16:25:53,819 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:58,136 INFO [train.py:903] (1/4) Epoch 10, batch 1200, loss[loss=0.297, simple_loss=0.3584, pruned_loss=0.1178, over 19361.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3139, pruned_loss=0.08552, over 3819723.90 frames. ], batch size: 66, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:26:09,506 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.581e+02 6.902e+02 9.588e+02 2.703e+03, threshold=1.380e+03, percent-clipped=8.0 +2023-04-01 16:26:24,936 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:32,549 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 16:26:39,718 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62684.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:50,277 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-01 16:26:52,246 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1348, 1.8185, 1.5925, 2.1124, 1.9434, 1.7466, 1.7351, 1.9605], + device='cuda:1'), covar=tensor([0.0869, 0.1512, 0.1395, 0.0937, 0.1210, 0.0538, 0.1120, 0.0721], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0355, 0.0288, 0.0241, 0.0297, 0.0244, 0.0273, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:26:53,300 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8934, 1.7083, 1.4764, 2.0629, 1.8191, 1.6200, 1.6068, 1.8204], + device='cuda:1'), covar=tensor([0.0904, 0.1526, 0.1359, 0.0875, 0.1132, 0.0529, 0.1078, 0.0700], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0355, 0.0288, 0.0241, 0.0297, 0.0244, 0.0273, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:26:59,719 INFO [train.py:903] (1/4) Epoch 10, batch 1250, loss[loss=0.2512, simple_loss=0.3254, pruned_loss=0.08852, over 19443.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.315, pruned_loss=0.08626, over 3813286.68 frames. ], batch size: 64, lr: 8.49e-03, grad_scale: 4.0 +2023-04-01 16:27:08,182 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7242, 1.4740, 1.3806, 2.2127, 1.7941, 1.9891, 2.1712, 1.8331], + device='cuda:1'), covar=tensor([0.0800, 0.0995, 0.1117, 0.0770, 0.0762, 0.0762, 0.0806, 0.0707], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0229, 0.0227, 0.0255, 0.0239, 0.0215, 0.0199, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 16:28:00,887 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:01,682 INFO [train.py:903] (1/4) Epoch 10, batch 1300, loss[loss=0.2055, simple_loss=0.2759, pruned_loss=0.06754, over 19769.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3138, pruned_loss=0.08566, over 3831606.16 frames. ], batch size: 46, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:28:05,045 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:16,587 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.064e+02 6.596e+02 8.862e+02 1.920e+03, threshold=1.319e+03, percent-clipped=1.0 +2023-04-01 16:28:51,852 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:29:05,059 INFO [train.py:903] (1/4) Epoch 10, batch 1350, loss[loss=0.2733, simple_loss=0.3394, pruned_loss=0.1037, over 19675.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3141, pruned_loss=0.08578, over 3835469.39 frames. ], batch size: 58, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:29:58,563 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 16:30:07,948 INFO [train.py:903] (1/4) Epoch 10, batch 1400, loss[loss=0.2902, simple_loss=0.3502, pruned_loss=0.1151, over 17238.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3144, pruned_loss=0.08591, over 3817580.66 frames. ], batch size: 101, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:30:13,139 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62855.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:20,948 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.591e+02 6.749e+02 8.217e+02 1.554e+03, threshold=1.350e+03, percent-clipped=4.0 +2023-04-01 16:30:28,002 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:44,897 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:31:07,150 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 16:31:09,395 INFO [train.py:903] (1/4) Epoch 10, batch 1450, loss[loss=0.2228, simple_loss=0.3054, pruned_loss=0.07006, over 19687.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3132, pruned_loss=0.08523, over 3812844.07 frames. ], batch size: 59, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:31:11,268 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.99 vs. limit=5.0 +2023-04-01 16:31:16,536 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:07,379 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:11,880 INFO [train.py:903] (1/4) Epoch 10, batch 1500, loss[loss=0.2252, simple_loss=0.3059, pruned_loss=0.0723, over 19347.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3125, pruned_loss=0.08499, over 3818437.44 frames. ], batch size: 70, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:32:27,777 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 5.675e+02 6.883e+02 8.252e+02 2.690e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-01 16:33:00,313 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6887, 1.9021, 2.2719, 1.9560, 2.9599, 3.3305, 3.2774, 3.5169], + device='cuda:1'), covar=tensor([0.1272, 0.2449, 0.2176, 0.1674, 0.0768, 0.0288, 0.0171, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0293, 0.0323, 0.0249, 0.0213, 0.0149, 0.0206, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 16:33:17,111 INFO [train.py:903] (1/4) Epoch 10, batch 1550, loss[loss=0.2933, simple_loss=0.3596, pruned_loss=0.1135, over 19323.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3124, pruned_loss=0.08483, over 3814660.88 frames. ], batch size: 66, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:33:23,635 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:42,967 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:49,635 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:53,330 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:20,243 INFO [train.py:903] (1/4) Epoch 10, batch 1600, loss[loss=0.2816, simple_loss=0.3409, pruned_loss=0.1111, over 19593.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3117, pruned_loss=0.08438, over 3803945.84 frames. ], batch size: 57, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:34:31,099 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8068, 1.8985, 1.9723, 2.4911, 1.6809, 2.2331, 2.2575, 1.9955], + device='cuda:1'), covar=tensor([0.3082, 0.2492, 0.1300, 0.1465, 0.2822, 0.1328, 0.2898, 0.2208], + device='cuda:1'), in_proj_covar=tensor([0.0763, 0.0772, 0.0630, 0.0875, 0.0753, 0.0674, 0.0771, 0.0685], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 16:34:33,007 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.609e+02 5.367e+02 6.713e+02 8.573e+02 1.582e+03, threshold=1.343e+03, percent-clipped=2.0 +2023-04-01 16:34:33,387 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:42,288 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 16:35:21,330 INFO [train.py:903] (1/4) Epoch 10, batch 1650, loss[loss=0.2567, simple_loss=0.3337, pruned_loss=0.08984, over 17089.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.313, pruned_loss=0.08519, over 3797709.02 frames. ], batch size: 101, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:35:32,994 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 16:35:51,935 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:05,475 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:14,686 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:21,892 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:23,862 INFO [train.py:903] (1/4) Epoch 10, batch 1700, loss[loss=0.2004, simple_loss=0.2711, pruned_loss=0.06485, over 19746.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3128, pruned_loss=0.08497, over 3808746.68 frames. ], batch size: 45, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:36:38,454 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.819e+02 7.178e+02 9.040e+02 2.117e+03, threshold=1.436e+03, percent-clipped=7.0 +2023-04-01 16:37:04,077 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 16:37:28,653 INFO [train.py:903] (1/4) Epoch 10, batch 1750, loss[loss=0.2207, simple_loss=0.28, pruned_loss=0.08069, over 19084.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3132, pruned_loss=0.08536, over 3818553.18 frames. ], batch size: 42, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:37:36,123 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2355, 1.3033, 1.1529, 0.9803, 1.0418, 1.0280, 0.0443, 0.3735], + device='cuda:1'), covar=tensor([0.0414, 0.0409, 0.0244, 0.0356, 0.0783, 0.0359, 0.0737, 0.0677], + device='cuda:1'), in_proj_covar=tensor([0.0326, 0.0322, 0.0326, 0.0338, 0.0413, 0.0338, 0.0303, 0.0318], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 16:38:14,933 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,194 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,879 INFO [train.py:903] (1/4) Epoch 10, batch 1800, loss[loss=0.2492, simple_loss=0.3294, pruned_loss=0.08453, over 19686.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3123, pruned_loss=0.08476, over 3824952.17 frames. ], batch size: 59, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:38:44,559 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.870e+02 7.659e+02 9.293e+02 2.596e+03, threshold=1.532e+03, percent-clipped=8.0 +2023-04-01 16:39:03,322 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:16,053 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7074, 1.4932, 1.6854, 1.8414, 4.1300, 1.1104, 2.1000, 4.3872], + device='cuda:1'), covar=tensor([0.0417, 0.2600, 0.2777, 0.1732, 0.0751, 0.2579, 0.1643, 0.0262], + device='cuda:1'), in_proj_covar=tensor([0.0343, 0.0327, 0.0341, 0.0315, 0.0340, 0.0325, 0.0322, 0.0343], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:39:30,783 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 16:39:32,958 INFO [train.py:903] (1/4) Epoch 10, batch 1850, loss[loss=0.1981, simple_loss=0.2699, pruned_loss=0.06312, over 19769.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3115, pruned_loss=0.0842, over 3820030.05 frames. ], batch size: 47, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:39:35,786 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:54,126 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:10,163 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 16:40:26,813 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:35,815 INFO [train.py:903] (1/4) Epoch 10, batch 1900, loss[loss=0.267, simple_loss=0.3426, pruned_loss=0.09577, over 19340.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3097, pruned_loss=0.08272, over 3828255.73 frames. ], batch size: 70, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:40:45,541 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 16:40:52,653 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 5.562e+02 7.038e+02 8.618e+02 1.834e+03, threshold=1.408e+03, percent-clipped=3.0 +2023-04-01 16:40:55,973 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 16:41:01,866 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 16:41:24,920 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 16:41:37,241 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:41:40,426 INFO [train.py:903] (1/4) Epoch 10, batch 1950, loss[loss=0.2171, simple_loss=0.2806, pruned_loss=0.07682, over 19311.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3104, pruned_loss=0.08341, over 3826823.43 frames. ], batch size: 44, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:41:58,525 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6970, 3.9127, 4.2623, 4.2212, 2.3680, 3.8766, 3.6663, 3.9925], + device='cuda:1'), covar=tensor([0.1065, 0.1997, 0.0524, 0.0545, 0.3887, 0.0891, 0.0525, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0642, 0.0583, 0.0763, 0.0644, 0.0712, 0.0516, 0.0478, 0.0708], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 16:42:10,539 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:27,978 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:31,429 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:44,769 INFO [train.py:903] (1/4) Epoch 10, batch 2000, loss[loss=0.2993, simple_loss=0.3551, pruned_loss=0.1218, over 19424.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3125, pruned_loss=0.08468, over 3796409.93 frames. ], batch size: 70, lr: 8.44e-03, grad_scale: 8.0 +2023-04-01 16:43:00,248 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.731e+02 5.221e+02 6.641e+02 8.776e+02 2.044e+03, threshold=1.328e+03, percent-clipped=3.0 +2023-04-01 16:43:43,946 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 16:43:47,387 INFO [train.py:903] (1/4) Epoch 10, batch 2050, loss[loss=0.2692, simple_loss=0.3364, pruned_loss=0.101, over 13624.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3125, pruned_loss=0.08453, over 3798148.45 frames. ], batch size: 136, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:43:53,649 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:03,084 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6533, 1.4559, 1.6413, 1.6661, 3.1843, 1.1623, 2.3106, 3.5098], + device='cuda:1'), covar=tensor([0.0371, 0.2477, 0.2505, 0.1698, 0.0672, 0.2515, 0.1314, 0.0291], + device='cuda:1'), in_proj_covar=tensor([0.0343, 0.0328, 0.0344, 0.0313, 0.0342, 0.0328, 0.0323, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:44:04,853 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 16:44:06,009 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 16:44:26,383 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:28,443 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 16:44:50,592 INFO [train.py:903] (1/4) Epoch 10, batch 2100, loss[loss=0.2421, simple_loss=0.3056, pruned_loss=0.08928, over 19742.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3122, pruned_loss=0.08456, over 3810142.72 frames. ], batch size: 46, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:45:06,392 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.645e+02 7.348e+02 9.688e+02 2.351e+03, threshold=1.470e+03, percent-clipped=4.0 +2023-04-01 16:45:22,180 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1898, 1.2731, 1.8414, 1.5719, 3.0790, 4.5254, 4.4909, 4.9798], + device='cuda:1'), covar=tensor([0.1650, 0.3283, 0.2988, 0.1904, 0.0461, 0.0150, 0.0150, 0.0100], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0292, 0.0321, 0.0249, 0.0212, 0.0151, 0.0205, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 16:45:26,359 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 16:45:32,499 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:33,931 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4137, 2.9758, 2.1206, 2.2453, 2.0902, 2.4789, 0.7448, 2.0942], + device='cuda:1'), covar=tensor([0.0401, 0.0394, 0.0527, 0.0699, 0.0784, 0.0715, 0.0992, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0330, 0.0328, 0.0329, 0.0343, 0.0420, 0.0342, 0.0306, 0.0323], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 16:45:37,323 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:46,595 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 16:45:54,843 INFO [train.py:903] (1/4) Epoch 10, batch 2150, loss[loss=0.2609, simple_loss=0.3336, pruned_loss=0.09405, over 19535.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3128, pruned_loss=0.085, over 3812480.02 frames. ], batch size: 54, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:46:00,142 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63604.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:46:58,770 INFO [train.py:903] (1/4) Epoch 10, batch 2200, loss[loss=0.2158, simple_loss=0.2954, pruned_loss=0.06808, over 19701.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3132, pruned_loss=0.08545, over 3808634.27 frames. ], batch size: 59, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:47:13,922 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.200e+02 5.616e+02 6.875e+02 8.680e+02 1.983e+03, threshold=1.375e+03, percent-clipped=4.0 +2023-04-01 16:48:00,194 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:48:03,390 INFO [train.py:903] (1/4) Epoch 10, batch 2250, loss[loss=0.2898, simple_loss=0.355, pruned_loss=0.1123, over 19628.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.314, pruned_loss=0.08624, over 3809460.41 frames. ], batch size: 57, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:49:04,928 INFO [train.py:903] (1/4) Epoch 10, batch 2300, loss[loss=0.2441, simple_loss=0.324, pruned_loss=0.08208, over 17980.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3135, pruned_loss=0.08598, over 3814970.45 frames. ], batch size: 83, lr: 8.42e-03, grad_scale: 4.0 +2023-04-01 16:49:18,697 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 16:49:23,044 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.837e+02 6.965e+02 8.642e+02 2.205e+03, threshold=1.393e+03, percent-clipped=3.0 +2023-04-01 16:49:45,520 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63782.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:49:47,900 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:09,268 INFO [train.py:903] (1/4) Epoch 10, batch 2350, loss[loss=0.2448, simple_loss=0.3197, pruned_loss=0.08495, over 19759.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3142, pruned_loss=0.08596, over 3808322.90 frames. ], batch size: 54, lr: 8.41e-03, grad_scale: 4.0 +2023-04-01 16:50:44,299 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:50,893 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 16:51:10,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 16:51:11,956 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 16:51:13,625 INFO [train.py:903] (1/4) Epoch 10, batch 2400, loss[loss=0.2975, simple_loss=0.3594, pruned_loss=0.1178, over 19064.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3162, pruned_loss=0.08704, over 3786298.90 frames. ], batch size: 69, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:51:29,381 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:51:30,159 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.763e+02 5.915e+02 7.022e+02 9.244e+02 2.907e+03, threshold=1.404e+03, percent-clipped=10.0 +2023-04-01 16:51:55,379 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6917, 1.7838, 1.8894, 2.4631, 1.6869, 2.2509, 2.1532, 1.8350], + device='cuda:1'), covar=tensor([0.3254, 0.2632, 0.1358, 0.1495, 0.2783, 0.1286, 0.3206, 0.2468], + device='cuda:1'), in_proj_covar=tensor([0.0772, 0.0780, 0.0638, 0.0882, 0.0763, 0.0683, 0.0781, 0.0695], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 16:52:03,536 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-01 16:52:12,815 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:15,946 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:17,962 INFO [train.py:903] (1/4) Epoch 10, batch 2450, loss[loss=0.2485, simple_loss=0.3306, pruned_loss=0.08321, over 19677.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3159, pruned_loss=0.08662, over 3792335.72 frames. ], batch size: 60, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:52:55,530 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:59,294 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:16,595 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:19,961 INFO [train.py:903] (1/4) Epoch 10, batch 2500, loss[loss=0.2093, simple_loss=0.2878, pruned_loss=0.06537, over 19747.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3142, pruned_loss=0.08548, over 3801674.70 frames. ], batch size: 51, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:53:24,570 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:35,920 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.566e+02 5.261e+02 6.826e+02 9.233e+02 1.687e+03, threshold=1.365e+03, percent-clipped=6.0 +2023-04-01 16:53:56,277 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:54:00,242 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 16:54:23,067 INFO [train.py:903] (1/4) Epoch 10, batch 2550, loss[loss=0.2366, simple_loss=0.2947, pruned_loss=0.08924, over 19750.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3133, pruned_loss=0.08526, over 3790700.32 frames. ], batch size: 46, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:54:23,594 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2187, 2.2180, 2.2840, 3.2956, 2.1331, 3.0886, 2.6876, 2.1431], + device='cuda:1'), covar=tensor([0.3668, 0.3250, 0.1456, 0.1734, 0.3745, 0.1362, 0.3371, 0.2631], + device='cuda:1'), in_proj_covar=tensor([0.0770, 0.0778, 0.0636, 0.0876, 0.0760, 0.0679, 0.0777, 0.0692], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 16:54:39,877 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8056, 1.1786, 1.4426, 1.5587, 3.2719, 1.0326, 2.3129, 3.5616], + device='cuda:1'), covar=tensor([0.0372, 0.2724, 0.2720, 0.1744, 0.0725, 0.2457, 0.1228, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0343, 0.0331, 0.0348, 0.0316, 0.0342, 0.0329, 0.0325, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:54:44,604 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4399, 1.4309, 1.6033, 1.5227, 2.3402, 2.0056, 2.3132, 1.2634], + device='cuda:1'), covar=tensor([0.1652, 0.3018, 0.1810, 0.1430, 0.1009, 0.1476, 0.1072, 0.2917], + device='cuda:1'), in_proj_covar=tensor([0.0483, 0.0563, 0.0582, 0.0428, 0.0584, 0.0481, 0.0647, 0.0483], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 16:54:56,132 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9496, 1.9790, 1.6972, 2.1462, 1.9743, 1.7731, 1.6590, 1.9620], + device='cuda:1'), covar=tensor([0.0851, 0.1249, 0.1296, 0.0835, 0.1081, 0.0499, 0.1125, 0.0626], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0347, 0.0285, 0.0236, 0.0293, 0.0241, 0.0271, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:55:13,524 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:15,683 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 16:55:20,789 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:26,447 INFO [train.py:903] (1/4) Epoch 10, batch 2600, loss[loss=0.2573, simple_loss=0.3206, pruned_loss=0.09698, over 19763.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3125, pruned_loss=0.08459, over 3785097.93 frames. ], batch size: 63, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:55:41,485 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:42,246 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.625e+02 5.188e+02 6.411e+02 7.864e+02 1.888e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 16:55:48,731 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8143, 1.4644, 1.3862, 2.0480, 1.6445, 2.0356, 2.0908, 1.7737], + device='cuda:1'), covar=tensor([0.0792, 0.1037, 0.1031, 0.0880, 0.0964, 0.0684, 0.0874, 0.0715], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0228, 0.0226, 0.0256, 0.0241, 0.0214, 0.0201, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 16:56:27,599 INFO [train.py:903] (1/4) Epoch 10, batch 2650, loss[loss=0.2714, simple_loss=0.3351, pruned_loss=0.1039, over 18801.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3138, pruned_loss=0.08586, over 3779477.82 frames. ], batch size: 74, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:56:44,902 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 16:56:45,322 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 16:57:23,755 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8693, 4.4020, 2.6998, 3.9343, 0.9445, 4.0849, 4.1086, 4.3242], + device='cuda:1'), covar=tensor([0.0537, 0.0883, 0.1879, 0.0689, 0.3872, 0.0767, 0.0661, 0.0823], + device='cuda:1'), in_proj_covar=tensor([0.0411, 0.0351, 0.0417, 0.0311, 0.0370, 0.0345, 0.0341, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 16:57:29,262 INFO [train.py:903] (1/4) Epoch 10, batch 2700, loss[loss=0.1999, simple_loss=0.276, pruned_loss=0.06188, over 19611.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3133, pruned_loss=0.08521, over 3792925.86 frames. ], batch size: 50, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:57:32,857 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64153.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:35,332 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:45,503 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.891e+02 6.747e+02 8.779e+02 3.257e+03, threshold=1.349e+03, percent-clipped=11.0 +2023-04-01 16:57:56,919 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:04,115 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64178.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:07,364 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:33,079 INFO [train.py:903] (1/4) Epoch 10, batch 2750, loss[loss=0.2141, simple_loss=0.28, pruned_loss=0.0741, over 19785.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3145, pruned_loss=0.08593, over 3791348.30 frames. ], batch size: 47, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:58:41,444 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,110 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,899 INFO [train.py:903] (1/4) Epoch 10, batch 2800, loss[loss=0.2461, simple_loss=0.3208, pruned_loss=0.08574, over 19523.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3139, pruned_loss=0.08578, over 3794508.36 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 16:59:42,949 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8372, 2.0659, 2.1763, 2.1779, 3.5441, 1.7298, 2.8176, 3.5381], + device='cuda:1'), covar=tensor([0.0395, 0.1915, 0.1962, 0.1370, 0.0599, 0.2011, 0.1642, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0339, 0.0327, 0.0342, 0.0313, 0.0339, 0.0326, 0.0319, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 16:59:45,208 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5437, 4.0926, 2.6985, 3.6752, 1.0544, 3.8076, 3.8256, 3.8911], + device='cuda:1'), covar=tensor([0.0580, 0.1005, 0.1808, 0.0672, 0.3670, 0.0751, 0.0765, 0.0927], + device='cuda:1'), in_proj_covar=tensor([0.0409, 0.0353, 0.0415, 0.0309, 0.0368, 0.0343, 0.0341, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 16:59:52,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.243e+02 6.695e+02 7.923e+02 2.040e+03, threshold=1.339e+03, percent-clipped=2.0 +2023-04-01 17:00:10,662 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:21,813 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9928, 1.2406, 1.6513, 0.8388, 2.3410, 2.9910, 2.6961, 3.1563], + device='cuda:1'), covar=tensor([0.1630, 0.3294, 0.2971, 0.2236, 0.0493, 0.0235, 0.0255, 0.0220], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0291, 0.0320, 0.0249, 0.0211, 0.0149, 0.0203, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 17:00:21,826 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:40,324 INFO [train.py:903] (1/4) Epoch 10, batch 2850, loss[loss=0.2001, simple_loss=0.2804, pruned_loss=0.05989, over 19861.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3122, pruned_loss=0.08484, over 3795561.77 frames. ], batch size: 52, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:00:41,955 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:03,188 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:06,424 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:12,465 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:35,615 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:39,751 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 17:01:43,209 INFO [train.py:903] (1/4) Epoch 10, batch 2900, loss[loss=0.2482, simple_loss=0.3246, pruned_loss=0.08596, over 19663.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3125, pruned_loss=0.08484, over 3807167.46 frames. ], batch size: 58, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:01:58,180 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:58,968 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 6.015e+02 7.349e+02 1.031e+03 2.008e+03, threshold=1.470e+03, percent-clipped=12.0 +2023-04-01 17:02:26,622 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:28,058 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2852, 1.9743, 1.5421, 1.2253, 1.9046, 1.1082, 1.1918, 1.6748], + device='cuda:1'), covar=tensor([0.0819, 0.0691, 0.0912, 0.0703, 0.0430, 0.1148, 0.0639, 0.0408], + device='cuda:1'), in_proj_covar=tensor([0.0288, 0.0296, 0.0323, 0.0242, 0.0231, 0.0322, 0.0288, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:02:36,424 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64393.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:44,659 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5701, 1.3004, 1.1723, 1.4469, 1.2177, 1.3557, 1.1563, 1.3718], + device='cuda:1'), covar=tensor([0.0880, 0.1058, 0.1408, 0.0840, 0.1031, 0.0573, 0.1191, 0.0730], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0349, 0.0288, 0.0237, 0.0296, 0.0243, 0.0273, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:02:46,686 INFO [train.py:903] (1/4) Epoch 10, batch 2950, loss[loss=0.2465, simple_loss=0.3298, pruned_loss=0.08159, over 19019.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3137, pruned_loss=0.08577, over 3805369.39 frames. ], batch size: 69, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:48,250 INFO [train.py:903] (1/4) Epoch 10, batch 3000, loss[loss=0.2073, simple_loss=0.2965, pruned_loss=0.0591, over 19777.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3139, pruned_loss=0.08607, over 3809453.60 frames. ], batch size: 56, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:48,250 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 17:04:00,866 INFO [train.py:937] (1/4) Epoch 10, validation: loss=0.1811, simple_loss=0.2816, pruned_loss=0.04036, over 944034.00 frames. +2023-04-01 17:04:00,867 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 17:04:01,547 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 17:04:04,327 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 17:04:18,119 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 5.533e+02 6.661e+02 8.174e+02 1.809e+03, threshold=1.332e+03, percent-clipped=2.0 +2023-04-01 17:04:26,738 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 17:04:41,353 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64483.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:03,230 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:04,025 INFO [train.py:903] (1/4) Epoch 10, batch 3050, loss[loss=0.2331, simple_loss=0.3139, pruned_loss=0.07616, over 19534.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3136, pruned_loss=0.08563, over 3820105.52 frames. ], batch size: 56, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:05:48,594 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7354, 1.4807, 1.4727, 1.9906, 1.7406, 2.0656, 2.0220, 1.8700], + device='cuda:1'), covar=tensor([0.0840, 0.1004, 0.1037, 0.0919, 0.0864, 0.0698, 0.0899, 0.0695], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0253, 0.0239, 0.0216, 0.0202, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 17:05:57,668 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9086, 1.6599, 1.6073, 2.1406, 1.8943, 2.1366, 2.0981, 1.9391], + device='cuda:1'), covar=tensor([0.0678, 0.0811, 0.0853, 0.0738, 0.0758, 0.0613, 0.0805, 0.0578], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0254, 0.0240, 0.0216, 0.0202, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 17:05:57,692 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:06,595 INFO [train.py:903] (1/4) Epoch 10, batch 3100, loss[loss=0.2024, simple_loss=0.2744, pruned_loss=0.06518, over 19817.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3136, pruned_loss=0.08564, over 3817676.95 frames. ], batch size: 47, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:06:22,857 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.961e+02 7.216e+02 8.690e+02 2.208e+03, threshold=1.443e+03, percent-clipped=3.0 +2023-04-01 17:06:27,950 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:40,848 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64578.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:01,270 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:10,478 INFO [train.py:903] (1/4) Epoch 10, batch 3150, loss[loss=0.2195, simple_loss=0.3034, pruned_loss=0.06783, over 18791.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3112, pruned_loss=0.08401, over 3826368.85 frames. ], batch size: 74, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:07:12,974 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:17,461 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:21,730 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9517, 4.4193, 4.6640, 4.6431, 1.5321, 4.2915, 3.7840, 4.3340], + device='cuda:1'), covar=tensor([0.1196, 0.0576, 0.0446, 0.0469, 0.4929, 0.0533, 0.0539, 0.0908], + device='cuda:1'), in_proj_covar=tensor([0.0645, 0.0581, 0.0768, 0.0645, 0.0713, 0.0518, 0.0476, 0.0712], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 17:07:31,624 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-01 17:07:39,764 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 17:08:10,683 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:12,597 INFO [train.py:903] (1/4) Epoch 10, batch 3200, loss[loss=0.1973, simple_loss=0.2711, pruned_loss=0.06172, over 19405.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3112, pruned_loss=0.08378, over 3835453.70 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:08:30,175 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.483e+02 6.754e+02 8.204e+02 1.644e+03, threshold=1.351e+03, percent-clipped=2.0 +2023-04-01 17:08:32,806 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:43,804 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:16,013 INFO [train.py:903] (1/4) Epoch 10, batch 3250, loss[loss=0.2169, simple_loss=0.291, pruned_loss=0.07138, over 19488.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3112, pruned_loss=0.08367, over 3822338.17 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:09:24,035 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64707.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:26,663 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:42,846 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:20,326 INFO [train.py:903] (1/4) Epoch 10, batch 3300, loss[loss=0.2567, simple_loss=0.3296, pruned_loss=0.09188, over 19525.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3124, pruned_loss=0.08415, over 3826064.52 frames. ], batch size: 54, lr: 8.35e-03, grad_scale: 8.0 +2023-04-01 17:10:26,789 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:28,774 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 17:10:35,550 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:37,425 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.384e+02 6.605e+02 8.281e+02 2.311e+03, threshold=1.321e+03, percent-clipped=9.0 +2023-04-01 17:10:43,935 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:57,704 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:23,411 INFO [train.py:903] (1/4) Epoch 10, batch 3350, loss[loss=0.2772, simple_loss=0.3488, pruned_loss=0.1028, over 19541.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3107, pruned_loss=0.08296, over 3834408.12 frames. ], batch size: 56, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:11:49,526 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:55,093 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:12:04,573 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4933, 1.3685, 1.3055, 1.9169, 1.5225, 1.8397, 1.8383, 1.7315], + device='cuda:1'), covar=tensor([0.0857, 0.1000, 0.1069, 0.0813, 0.0881, 0.0753, 0.0856, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0227, 0.0225, 0.0253, 0.0239, 0.0215, 0.0201, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 17:12:24,009 INFO [train.py:903] (1/4) Epoch 10, batch 3400, loss[loss=0.2284, simple_loss=0.2946, pruned_loss=0.08109, over 15255.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3127, pruned_loss=0.08437, over 3821918.64 frames. ], batch size: 33, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:12:42,252 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.893e+02 5.724e+02 7.342e+02 8.665e+02 1.913e+03, threshold=1.468e+03, percent-clipped=6.0 +2023-04-01 17:13:27,825 INFO [train.py:903] (1/4) Epoch 10, batch 3450, loss[loss=0.252, simple_loss=0.324, pruned_loss=0.09001, over 19745.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3128, pruned_loss=0.08432, over 3826973.16 frames. ], batch size: 51, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:13:35,091 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 17:13:36,885 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 17:14:20,594 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:30,654 INFO [train.py:903] (1/4) Epoch 10, batch 3500, loss[loss=0.2288, simple_loss=0.3032, pruned_loss=0.07725, over 19828.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3131, pruned_loss=0.08473, over 3831502.21 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:14:31,773 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:48,656 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 5.616e+02 6.822e+02 8.996e+02 1.764e+03, threshold=1.364e+03, percent-clipped=1.0 +2023-04-01 17:14:49,112 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:50,321 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2872, 1.4218, 1.7762, 1.4895, 2.4972, 2.1977, 2.7798, 1.1010], + device='cuda:1'), covar=tensor([0.2108, 0.3635, 0.2244, 0.1604, 0.1433, 0.1765, 0.1350, 0.3470], + device='cuda:1'), in_proj_covar=tensor([0.0485, 0.0569, 0.0588, 0.0430, 0.0589, 0.0486, 0.0648, 0.0486], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 17:15:05,373 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-01 17:15:19,929 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:25,301 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:34,047 INFO [train.py:903] (1/4) Epoch 10, batch 3550, loss[loss=0.239, simple_loss=0.2958, pruned_loss=0.09116, over 19751.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3115, pruned_loss=0.08369, over 3836550.44 frames. ], batch size: 47, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:15:44,360 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:13,757 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8661, 1.5294, 1.7145, 1.8341, 4.2111, 1.0171, 2.2730, 4.4070], + device='cuda:1'), covar=tensor([0.0357, 0.2690, 0.2730, 0.1806, 0.0769, 0.2744, 0.1517, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0330, 0.0347, 0.0318, 0.0341, 0.0329, 0.0321, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:16:34,666 INFO [train.py:903] (1/4) Epoch 10, batch 3600, loss[loss=0.2327, simple_loss=0.3018, pruned_loss=0.08184, over 19472.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3105, pruned_loss=0.08298, over 3844232.70 frames. ], batch size: 49, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:16:50,081 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9965, 1.4837, 1.6246, 2.5354, 1.7203, 2.3305, 2.5698, 2.3174], + device='cuda:1'), covar=tensor([0.0858, 0.1071, 0.1051, 0.0927, 0.1059, 0.0768, 0.0891, 0.0674], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0225, 0.0224, 0.0251, 0.0240, 0.0215, 0.0200, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 17:16:52,000 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.791e+02 6.813e+02 8.568e+02 1.743e+03, threshold=1.363e+03, percent-clipped=4.0 +2023-04-01 17:16:52,169 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:53,525 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5680, 1.3395, 1.4778, 1.5080, 3.1187, 1.0393, 2.2471, 3.4708], + device='cuda:1'), covar=tensor([0.0416, 0.2356, 0.2489, 0.1664, 0.0747, 0.2340, 0.1168, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0330, 0.0347, 0.0316, 0.0341, 0.0327, 0.0321, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:16:53,570 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:02,292 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:08,111 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:35,954 INFO [train.py:903] (1/4) Epoch 10, batch 3650, loss[loss=0.2818, simple_loss=0.3556, pruned_loss=0.1041, over 17484.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3104, pruned_loss=0.08287, over 3849568.94 frames. ], batch size: 101, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:17:38,638 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:42,760 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:44,238 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5196, 1.6888, 2.0881, 1.7927, 3.0427, 2.7286, 3.5438, 1.4487], + device='cuda:1'), covar=tensor([0.2136, 0.3708, 0.2273, 0.1706, 0.1586, 0.1751, 0.1686, 0.3607], + device='cuda:1'), in_proj_covar=tensor([0.0486, 0.0570, 0.0590, 0.0432, 0.0592, 0.0486, 0.0650, 0.0488], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 17:17:51,899 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:05,473 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:21,258 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7597, 1.5332, 1.4047, 1.7717, 1.5009, 1.5561, 1.3385, 1.6885], + device='cuda:1'), covar=tensor([0.0954, 0.1288, 0.1313, 0.0901, 0.1183, 0.0515, 0.1175, 0.0637], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0348, 0.0287, 0.0237, 0.0295, 0.0241, 0.0274, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:18:26,952 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 17:18:36,262 INFO [train.py:903] (1/4) Epoch 10, batch 3700, loss[loss=0.2391, simple_loss=0.3208, pruned_loss=0.07869, over 19676.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3109, pruned_loss=0.08358, over 3837951.00 frames. ], batch size: 60, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:18:47,859 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-01 17:18:53,963 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 5.629e+02 7.088e+02 8.754e+02 1.818e+03, threshold=1.418e+03, percent-clipped=5.0 +2023-04-01 17:19:05,500 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 17:19:12,161 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:21,096 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7986, 4.3926, 2.5351, 3.9082, 1.0785, 4.1278, 4.1696, 4.2560], + device='cuda:1'), covar=tensor([0.0542, 0.0937, 0.1903, 0.0708, 0.3681, 0.0676, 0.0724, 0.0854], + device='cuda:1'), in_proj_covar=tensor([0.0419, 0.0359, 0.0420, 0.0312, 0.0373, 0.0352, 0.0348, 0.0376], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 17:19:33,111 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8679, 4.2566, 4.5718, 4.5147, 1.5568, 4.2586, 3.6999, 4.2124], + device='cuda:1'), covar=tensor([0.1229, 0.0713, 0.0533, 0.0519, 0.5108, 0.0543, 0.0603, 0.1070], + device='cuda:1'), in_proj_covar=tensor([0.0636, 0.0575, 0.0765, 0.0647, 0.0704, 0.0520, 0.0473, 0.0705], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 17:19:33,232 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65198.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:37,048 INFO [train.py:903] (1/4) Epoch 10, batch 3750, loss[loss=0.2003, simple_loss=0.2675, pruned_loss=0.06653, over 19131.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3115, pruned_loss=0.08407, over 3826331.94 frames. ], batch size: 42, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:02,486 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:03,643 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:10,521 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:36,852 INFO [train.py:903] (1/4) Epoch 10, batch 3800, loss[loss=0.2101, simple_loss=0.2787, pruned_loss=0.07075, over 19757.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3111, pruned_loss=0.08392, over 3820712.34 frames. ], batch size: 46, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:53,996 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 5.620e+02 6.620e+02 8.005e+02 1.692e+03, threshold=1.324e+03, percent-clipped=4.0 +2023-04-01 17:21:06,342 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 17:21:22,121 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4049, 3.1216, 2.2004, 2.2004, 2.0216, 2.4396, 0.8599, 2.0846], + device='cuda:1'), covar=tensor([0.0427, 0.0389, 0.0479, 0.0788, 0.0799, 0.0872, 0.1002, 0.0783], + device='cuda:1'), in_proj_covar=tensor([0.0325, 0.0325, 0.0322, 0.0343, 0.0416, 0.0341, 0.0303, 0.0319], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 17:21:37,758 INFO [train.py:903] (1/4) Epoch 10, batch 3850, loss[loss=0.2168, simple_loss=0.2875, pruned_loss=0.07302, over 19609.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.312, pruned_loss=0.08429, over 3810205.85 frames. ], batch size: 52, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:21:40,425 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:03,660 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:21,616 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:33,979 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:37,970 INFO [train.py:903] (1/4) Epoch 10, batch 3900, loss[loss=0.2486, simple_loss=0.3193, pruned_loss=0.08895, over 18700.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3121, pruned_loss=0.08427, over 3802095.46 frames. ], batch size: 74, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:22:55,821 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 5.653e+02 7.180e+02 9.093e+02 1.633e+03, threshold=1.436e+03, percent-clipped=2.0 +2023-04-01 17:22:57,717 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.02 vs. limit=5.0 +2023-04-01 17:23:14,882 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65381.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:40,377 INFO [train.py:903] (1/4) Epoch 10, batch 3950, loss[loss=0.2497, simple_loss=0.3192, pruned_loss=0.09015, over 19382.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3118, pruned_loss=0.08372, over 3809338.37 frames. ], batch size: 70, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:23:40,410 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 17:23:46,512 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:58,951 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:23,308 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:41,317 INFO [train.py:903] (1/4) Epoch 10, batch 4000, loss[loss=0.2322, simple_loss=0.3032, pruned_loss=0.08057, over 19480.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3105, pruned_loss=0.08256, over 3825728.71 frames. ], batch size: 49, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:24:44,090 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65453.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:54,616 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65461.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:58,711 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.016e+02 5.448e+02 7.265e+02 9.508e+02 1.942e+03, threshold=1.453e+03, percent-clipped=2.0 +2023-04-01 17:25:14,565 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:23,065 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 17:25:23,367 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:41,796 INFO [train.py:903] (1/4) Epoch 10, batch 4050, loss[loss=0.301, simple_loss=0.3504, pruned_loss=0.1258, over 13120.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3112, pruned_loss=0.08317, over 3809956.88 frames. ], batch size: 136, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:25:45,307 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:52,229 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4095, 1.8602, 1.4352, 1.4185, 1.8022, 1.2102, 1.3180, 1.4672], + device='cuda:1'), covar=tensor([0.0668, 0.0549, 0.0711, 0.0519, 0.0373, 0.0877, 0.0486, 0.0393], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0298, 0.0326, 0.0243, 0.0233, 0.0320, 0.0291, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:25:53,331 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:19,253 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:40,654 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65550.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:26:41,421 INFO [train.py:903] (1/4) Epoch 10, batch 4100, loss[loss=0.3357, simple_loss=0.3795, pruned_loss=0.146, over 13736.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3119, pruned_loss=0.08402, over 3810123.63 frames. ], batch size: 136, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:26:59,038 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.925e+02 7.032e+02 8.463e+02 2.911e+03, threshold=1.406e+03, percent-clipped=6.0 +2023-04-01 17:27:11,836 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 17:27:42,541 INFO [train.py:903] (1/4) Epoch 10, batch 4150, loss[loss=0.2556, simple_loss=0.3295, pruned_loss=0.09084, over 17484.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3115, pruned_loss=0.08382, over 3805644.66 frames. ], batch size: 101, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:03,940 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8316, 1.3965, 1.4090, 1.7497, 1.5799, 1.5573, 1.4118, 1.6599], + device='cuda:1'), covar=tensor([0.0880, 0.1385, 0.1422, 0.0811, 0.1075, 0.0530, 0.1197, 0.0686], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0352, 0.0292, 0.0240, 0.0298, 0.0245, 0.0277, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:28:10,369 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6998, 2.6821, 1.9827, 1.9799, 1.8090, 2.2463, 1.0362, 1.9409], + device='cuda:1'), covar=tensor([0.0491, 0.0430, 0.0471, 0.0669, 0.0776, 0.0779, 0.0872, 0.0690], + device='cuda:1'), in_proj_covar=tensor([0.0332, 0.0333, 0.0332, 0.0352, 0.0427, 0.0350, 0.0309, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 17:28:39,191 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:28:43,358 INFO [train.py:903] (1/4) Epoch 10, batch 4200, loss[loss=0.2468, simple_loss=0.3124, pruned_loss=0.09061, over 19687.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.311, pruned_loss=0.08323, over 3817288.21 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:43,401 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 17:28:59,413 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.075e+02 5.851e+02 6.725e+02 9.221e+02 2.199e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-01 17:29:02,678 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9632, 1.6022, 1.6305, 2.0493, 1.9189, 1.8253, 1.6487, 1.9674], + device='cuda:1'), covar=tensor([0.0890, 0.1587, 0.1307, 0.0886, 0.1122, 0.0468, 0.1122, 0.0597], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0349, 0.0289, 0.0237, 0.0294, 0.0243, 0.0274, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:29:42,453 INFO [train.py:903] (1/4) Epoch 10, batch 4250, loss[loss=0.2513, simple_loss=0.3269, pruned_loss=0.08787, over 18708.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3115, pruned_loss=0.08374, over 3817953.41 frames. ], batch size: 74, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:29:54,079 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:29:55,747 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 17:29:56,212 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9468, 1.9664, 2.0718, 2.7234, 1.8642, 2.5793, 2.4611, 2.0286], + device='cuda:1'), covar=tensor([0.3269, 0.2840, 0.1366, 0.1531, 0.3023, 0.1294, 0.2997, 0.2354], + device='cuda:1'), in_proj_covar=tensor([0.0769, 0.0787, 0.0638, 0.0880, 0.0767, 0.0690, 0.0780, 0.0697], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 17:30:06,597 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 17:30:22,811 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:30:43,320 INFO [train.py:903] (1/4) Epoch 10, batch 4300, loss[loss=0.3157, simple_loss=0.3636, pruned_loss=0.1339, over 12916.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3125, pruned_loss=0.08453, over 3826020.62 frames. ], batch size: 136, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:30:56,175 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:00,063 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 5.358e+02 7.223e+02 8.854e+02 2.636e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 17:31:27,975 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:35,310 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 17:31:43,144 INFO [train.py:903] (1/4) Epoch 10, batch 4350, loss[loss=0.2388, simple_loss=0.317, pruned_loss=0.08033, over 19299.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3144, pruned_loss=0.08548, over 3816462.21 frames. ], batch size: 70, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:31:58,612 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:32:03,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 17:32:44,686 INFO [train.py:903] (1/4) Epoch 10, batch 4400, loss[loss=0.2218, simple_loss=0.2943, pruned_loss=0.07461, over 19727.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3134, pruned_loss=0.08508, over 3822120.16 frames. ], batch size: 51, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:32:50,628 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65856.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:33:00,106 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.748e+02 7.554e+02 9.760e+02 1.805e+03, threshold=1.511e+03, percent-clipped=4.0 +2023-04-01 17:33:10,941 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 17:33:19,850 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 17:33:21,078 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4440, 0.9988, 1.2924, 1.1741, 1.9236, 0.8360, 2.0795, 2.1258], + device='cuda:1'), covar=tensor([0.0893, 0.3338, 0.3012, 0.1856, 0.1307, 0.2395, 0.0965, 0.0759], + device='cuda:1'), in_proj_covar=tensor([0.0347, 0.0332, 0.0350, 0.0318, 0.0342, 0.0328, 0.0323, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:33:28,210 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 17:33:36,075 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65894.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:33:44,056 INFO [train.py:903] (1/4) Epoch 10, batch 4450, loss[loss=0.2517, simple_loss=0.3285, pruned_loss=0.08743, over 19159.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3129, pruned_loss=0.08511, over 3826712.41 frames. ], batch size: 69, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:34:12,379 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 17:34:33,958 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4494, 1.2233, 1.2680, 1.3946, 2.2163, 1.0456, 1.8138, 2.3772], + device='cuda:1'), covar=tensor([0.0476, 0.2072, 0.2159, 0.1351, 0.0612, 0.1958, 0.1413, 0.0428], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0334, 0.0353, 0.0320, 0.0344, 0.0332, 0.0325, 0.0348], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:34:45,019 INFO [train.py:903] (1/4) Epoch 10, batch 4500, loss[loss=0.2493, simple_loss=0.3291, pruned_loss=0.08476, over 19317.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3136, pruned_loss=0.08512, over 3822479.47 frames. ], batch size: 66, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:35:01,306 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.376e+02 6.626e+02 8.325e+02 1.832e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-04-01 17:35:47,221 INFO [train.py:903] (1/4) Epoch 10, batch 4550, loss[loss=0.2708, simple_loss=0.341, pruned_loss=0.1002, over 19744.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3141, pruned_loss=0.08512, over 3834109.50 frames. ], batch size: 63, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:35:56,020 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 17:35:56,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66009.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:36:02,809 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3573, 1.4076, 1.9057, 1.5454, 3.4522, 2.8749, 3.8628, 1.4510], + device='cuda:1'), covar=tensor([0.1961, 0.3508, 0.2179, 0.1479, 0.1148, 0.1485, 0.1100, 0.3174], + device='cuda:1'), in_proj_covar=tensor([0.0479, 0.0558, 0.0582, 0.0428, 0.0584, 0.0483, 0.0641, 0.0481], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 17:36:06,997 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:18,445 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 17:36:36,237 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:45,869 INFO [train.py:903] (1/4) Epoch 10, batch 4600, loss[loss=0.2771, simple_loss=0.3503, pruned_loss=0.102, over 19783.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3141, pruned_loss=0.0855, over 3831656.65 frames. ], batch size: 56, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:37:00,874 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 5.689e+02 6.858e+02 9.462e+02 1.667e+03, threshold=1.372e+03, percent-clipped=8.0 +2023-04-01 17:37:43,789 INFO [train.py:903] (1/4) Epoch 10, batch 4650, loss[loss=0.2536, simple_loss=0.3203, pruned_loss=0.09349, over 12986.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.315, pruned_loss=0.08597, over 3840459.82 frames. ], batch size: 136, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:38:00,793 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 17:38:10,776 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 17:38:44,404 INFO [train.py:903] (1/4) Epoch 10, batch 4700, loss[loss=0.2392, simple_loss=0.3125, pruned_loss=0.08292, over 19734.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3144, pruned_loss=0.08578, over 3835037.73 frames. ], batch size: 63, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:38:57,788 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:39:00,970 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.007e+02 6.063e+02 7.892e+02 1.039e+03 2.104e+03, threshold=1.578e+03, percent-clipped=6.0 +2023-04-01 17:39:05,606 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 17:39:17,520 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66178.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:39:17,930 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.13 vs. limit=5.0 +2023-04-01 17:39:24,684 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 17:39:43,296 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66200.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:39:44,101 INFO [train.py:903] (1/4) Epoch 10, batch 4750, loss[loss=0.2106, simple_loss=0.279, pruned_loss=0.07107, over 19754.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.314, pruned_loss=0.08562, over 3843911.57 frames. ], batch size: 46, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:40:33,538 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8796, 1.1566, 1.4096, 0.6210, 2.0863, 2.4847, 2.1935, 2.5551], + device='cuda:1'), covar=tensor([0.1496, 0.3290, 0.2966, 0.2202, 0.0470, 0.0230, 0.0314, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0293, 0.0316, 0.0248, 0.0211, 0.0151, 0.0203, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 17:40:43,461 INFO [train.py:903] (1/4) Epoch 10, batch 4800, loss[loss=0.2341, simple_loss=0.2991, pruned_loss=0.08461, over 19408.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3132, pruned_loss=0.08535, over 3851117.01 frames. ], batch size: 48, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:41:01,216 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.914e+02 7.080e+02 8.206e+02 1.527e+03, threshold=1.416e+03, percent-clipped=0.0 +2023-04-01 17:41:01,686 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66265.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:41:15,546 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:41:31,327 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66290.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:41:44,760 INFO [train.py:903] (1/4) Epoch 10, batch 4850, loss[loss=0.2354, simple_loss=0.3146, pruned_loss=0.07814, over 19716.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3125, pruned_loss=0.08486, over 3832079.31 frames. ], batch size: 63, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:41:58,682 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.82 vs. limit=5.0 +2023-04-01 17:42:01,807 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66315.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:42:10,351 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 17:42:30,018 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 17:42:35,763 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 17:42:35,785 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 17:42:45,284 INFO [train.py:903] (1/4) Epoch 10, batch 4900, loss[loss=0.2668, simple_loss=0.3369, pruned_loss=0.09834, over 18473.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3127, pruned_loss=0.08478, over 3815613.04 frames. ], batch size: 84, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:42:45,298 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 17:43:01,822 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.269e+02 9.008e+02 2.184e+03, threshold=1.454e+03, percent-clipped=11.0 +2023-04-01 17:43:03,045 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 17:43:14,216 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2158, 2.0264, 1.7423, 1.6465, 1.4519, 1.6973, 0.4030, 1.1029], + device='cuda:1'), covar=tensor([0.0363, 0.0404, 0.0317, 0.0467, 0.0844, 0.0525, 0.0820, 0.0649], + device='cuda:1'), in_proj_covar=tensor([0.0328, 0.0326, 0.0325, 0.0345, 0.0415, 0.0345, 0.0301, 0.0317], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 17:43:33,665 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 17:43:45,939 INFO [train.py:903] (1/4) Epoch 10, batch 4950, loss[loss=0.1848, simple_loss=0.2682, pruned_loss=0.05066, over 19736.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3131, pruned_loss=0.08458, over 3824772.18 frames. ], batch size: 51, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:02,376 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 17:44:16,778 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0583, 5.4163, 2.9862, 4.6939, 1.1639, 5.2312, 5.2888, 5.5065], + device='cuda:1'), covar=tensor([0.0391, 0.0876, 0.1766, 0.0654, 0.3776, 0.0562, 0.0626, 0.0810], + device='cuda:1'), in_proj_covar=tensor([0.0426, 0.0360, 0.0425, 0.0316, 0.0375, 0.0354, 0.0349, 0.0383], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 17:44:19,200 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4593, 2.2367, 1.7934, 1.8062, 1.7788, 1.8746, 0.7130, 1.3084], + device='cuda:1'), covar=tensor([0.0342, 0.0424, 0.0351, 0.0485, 0.0736, 0.0582, 0.0753, 0.0667], + device='cuda:1'), in_proj_covar=tensor([0.0328, 0.0323, 0.0324, 0.0344, 0.0412, 0.0343, 0.0300, 0.0318], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 17:44:26,190 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 17:44:44,953 INFO [train.py:903] (1/4) Epoch 10, batch 5000, loss[loss=0.2837, simple_loss=0.3604, pruned_loss=0.1035, over 19730.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3136, pruned_loss=0.08495, over 3833935.57 frames. ], batch size: 63, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:56,395 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 17:45:01,937 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.419e+02 6.464e+02 8.305e+02 1.628e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 17:45:02,443 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3661, 1.6388, 2.0681, 1.5866, 3.3200, 2.6520, 3.7017, 1.4915], + device='cuda:1'), covar=tensor([0.2097, 0.3451, 0.2076, 0.1606, 0.1369, 0.1711, 0.1334, 0.3369], + device='cuda:1'), in_proj_covar=tensor([0.0479, 0.0566, 0.0585, 0.0430, 0.0586, 0.0487, 0.0643, 0.0484], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 17:45:06,363 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 17:45:45,181 INFO [train.py:903] (1/4) Epoch 10, batch 5050, loss[loss=0.2419, simple_loss=0.3142, pruned_loss=0.08482, over 19728.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3132, pruned_loss=0.08472, over 3843286.87 frames. ], batch size: 63, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:45:51,099 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:46:10,885 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66522.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:46:16,357 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8319, 1.3231, 1.0280, 0.9674, 1.1659, 0.9746, 0.7846, 1.2184], + device='cuda:1'), covar=tensor([0.0561, 0.0693, 0.0976, 0.0496, 0.0445, 0.1034, 0.0570, 0.0366], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0298, 0.0325, 0.0243, 0.0234, 0.0319, 0.0289, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:46:19,332 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 17:46:31,364 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66539.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:46:44,532 INFO [train.py:903] (1/4) Epoch 10, batch 5100, loss[loss=0.2479, simple_loss=0.3243, pruned_loss=0.08574, over 19530.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3133, pruned_loss=0.08519, over 3837342.92 frames. ], batch size: 56, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:46:56,052 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 17:46:57,213 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 17:46:58,791 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2781, 2.2814, 2.4090, 3.5047, 2.2387, 3.2968, 2.9885, 2.3699], + device='cuda:1'), covar=tensor([0.3467, 0.3093, 0.1365, 0.1537, 0.3466, 0.1275, 0.2779, 0.2447], + device='cuda:1'), in_proj_covar=tensor([0.0774, 0.0785, 0.0640, 0.0885, 0.0764, 0.0690, 0.0774, 0.0701], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 17:46:59,502 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 17:47:01,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.864e+02 7.142e+02 9.030e+02 2.803e+03, threshold=1.428e+03, percent-clipped=6.0 +2023-04-01 17:47:04,003 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 17:47:08,998 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66571.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:47:39,575 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66596.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:47:45,734 INFO [train.py:903] (1/4) Epoch 10, batch 5150, loss[loss=0.2448, simple_loss=0.3, pruned_loss=0.09479, over 19753.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3122, pruned_loss=0.08518, over 3832868.28 frames. ], batch size: 46, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:47:56,914 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 17:48:09,055 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:10,475 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:29,316 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66637.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:48:33,116 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 17:48:45,641 INFO [train.py:903] (1/4) Epoch 10, batch 5200, loss[loss=0.2895, simple_loss=0.341, pruned_loss=0.119, over 19302.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3123, pruned_loss=0.08478, over 3838986.37 frames. ], batch size: 66, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:49:00,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 17:49:02,887 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 5.688e+02 7.032e+02 8.430e+02 1.656e+03, threshold=1.406e+03, percent-clipped=2.0 +2023-04-01 17:49:44,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 17:49:46,893 INFO [train.py:903] (1/4) Epoch 10, batch 5250, loss[loss=0.2092, simple_loss=0.2904, pruned_loss=0.06401, over 19609.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3118, pruned_loss=0.08462, over 3819254.63 frames. ], batch size: 61, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:49:59,399 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 17:50:28,121 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:50:44,987 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66749.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:50:47,099 INFO [train.py:903] (1/4) Epoch 10, batch 5300, loss[loss=0.1993, simple_loss=0.2703, pruned_loss=0.06412, over 16905.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3108, pruned_loss=0.08398, over 3804633.61 frames. ], batch size: 37, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:51:03,221 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 17:51:04,359 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.985e+02 7.858e+02 1.069e+03 1.957e+03, threshold=1.572e+03, percent-clipped=7.0 +2023-04-01 17:51:47,694 INFO [train.py:903] (1/4) Epoch 10, batch 5350, loss[loss=0.2364, simple_loss=0.3136, pruned_loss=0.07955, over 19703.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.312, pruned_loss=0.08451, over 3812056.44 frames. ], batch size: 59, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:52:18,694 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1138, 1.2459, 1.4475, 1.3232, 2.6668, 0.7859, 1.9563, 2.8985], + device='cuda:1'), covar=tensor([0.0447, 0.2602, 0.2556, 0.1620, 0.0715, 0.2481, 0.1105, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0345, 0.0333, 0.0349, 0.0313, 0.0341, 0.0330, 0.0319, 0.0342], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:52:19,367 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 17:52:46,931 INFO [train.py:903] (1/4) Epoch 10, batch 5400, loss[loss=0.2307, simple_loss=0.3112, pruned_loss=0.07508, over 19680.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3119, pruned_loss=0.08416, over 3818755.96 frames. ], batch size: 58, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:05,689 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.633e+02 6.871e+02 8.512e+02 1.525e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-04-01 17:53:18,085 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:53:26,395 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66883.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:53:37,766 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66893.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:53:47,836 INFO [train.py:903] (1/4) Epoch 10, batch 5450, loss[loss=0.2717, simple_loss=0.3335, pruned_loss=0.105, over 13648.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3115, pruned_loss=0.08437, over 3798665.45 frames. ], batch size: 136, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:49,463 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:54:08,222 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66918.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:54:21,771 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 17:54:47,620 INFO [train.py:903] (1/4) Epoch 10, batch 5500, loss[loss=0.2414, simple_loss=0.3105, pruned_loss=0.08613, over 19842.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3113, pruned_loss=0.0843, over 3820046.07 frames. ], batch size: 52, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:06,145 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.851e+02 7.428e+02 9.674e+02 2.066e+03, threshold=1.486e+03, percent-clipped=6.0 +2023-04-01 17:55:10,698 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 17:55:16,777 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:36,714 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:44,732 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66998.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:55:48,005 INFO [train.py:903] (1/4) Epoch 10, batch 5550, loss[loss=0.1996, simple_loss=0.2704, pruned_loss=0.06439, over 19606.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3113, pruned_loss=0.08407, over 3832421.73 frames. ], batch size: 50, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:54,300 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 17:56:05,776 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:56:11,216 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 17:56:42,308 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 17:56:47,795 INFO [train.py:903] (1/4) Epoch 10, batch 5600, loss[loss=0.2739, simple_loss=0.3378, pruned_loss=0.105, over 19531.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3126, pruned_loss=0.0849, over 3821109.11 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:06,481 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 5.918e+02 7.804e+02 1.015e+03 2.269e+03, threshold=1.561e+03, percent-clipped=7.0 +2023-04-01 17:57:38,374 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67093.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:57:47,807 INFO [train.py:903] (1/4) Epoch 10, batch 5650, loss[loss=0.2363, simple_loss=0.3034, pruned_loss=0.0846, over 19401.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3138, pruned_loss=0.08584, over 3829117.03 frames. ], batch size: 48, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:48,106 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:58:33,177 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 17:58:47,422 INFO [train.py:903] (1/4) Epoch 10, batch 5700, loss[loss=0.2525, simple_loss=0.3245, pruned_loss=0.09021, over 17637.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3141, pruned_loss=0.08584, over 3829339.12 frames. ], batch size: 101, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:05,253 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 6.145e+02 7.592e+02 1.064e+03 2.520e+03, threshold=1.518e+03, percent-clipped=7.0 +2023-04-01 17:59:32,594 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5181, 2.2316, 2.2208, 2.4840, 2.3838, 1.8986, 2.0020, 2.4530], + device='cuda:1'), covar=tensor([0.0940, 0.1778, 0.1444, 0.1050, 0.1442, 0.0726, 0.1349, 0.0789], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0350, 0.0288, 0.0237, 0.0292, 0.0241, 0.0275, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 17:59:47,163 INFO [train.py:903] (1/4) Epoch 10, batch 5750, loss[loss=0.251, simple_loss=0.3315, pruned_loss=0.08531, over 19629.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3131, pruned_loss=0.08522, over 3838241.66 frames. ], batch size: 57, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:48,371 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 17:59:57,174 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67208.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:59:57,967 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 18:00:02,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 18:00:47,882 INFO [train.py:903] (1/4) Epoch 10, batch 5800, loss[loss=0.2293, simple_loss=0.315, pruned_loss=0.07174, over 19618.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3139, pruned_loss=0.08514, over 3843680.42 frames. ], batch size: 57, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 18:00:51,514 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67254.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:01:06,014 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.662e+02 5.628e+02 6.923e+02 8.923e+02 2.275e+03, threshold=1.385e+03, percent-clipped=6.0 +2023-04-01 18:01:20,796 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67279.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:01:47,312 INFO [train.py:903] (1/4) Epoch 10, batch 5850, loss[loss=0.2528, simple_loss=0.3287, pruned_loss=0.08849, over 19480.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3133, pruned_loss=0.0848, over 3847134.89 frames. ], batch size: 64, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:07,169 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9341, 4.4925, 2.7656, 4.0216, 1.0480, 4.1912, 4.2979, 4.3471], + device='cuda:1'), covar=tensor([0.0476, 0.0837, 0.1691, 0.0599, 0.3628, 0.0652, 0.0640, 0.0754], + device='cuda:1'), in_proj_covar=tensor([0.0419, 0.0356, 0.0420, 0.0308, 0.0372, 0.0352, 0.0345, 0.0376], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 18:02:09,356 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:02:48,429 INFO [train.py:903] (1/4) Epoch 10, batch 5900, loss[loss=0.1875, simple_loss=0.2678, pruned_loss=0.05357, over 19793.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3141, pruned_loss=0.0851, over 3836699.04 frames. ], batch size: 49, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:52,959 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 18:03:05,150 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.688e+02 6.588e+02 8.588e+02 1.646e+03, threshold=1.318e+03, percent-clipped=2.0 +2023-04-01 18:03:11,561 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 18:03:47,173 INFO [train.py:903] (1/4) Epoch 10, batch 5950, loss[loss=0.2372, simple_loss=0.3143, pruned_loss=0.0801, over 17346.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3129, pruned_loss=0.08387, over 3824400.67 frames. ], batch size: 101, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:04:26,265 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:38,305 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67445.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:45,816 INFO [train.py:903] (1/4) Epoch 10, batch 6000, loss[loss=0.2467, simple_loss=0.3277, pruned_loss=0.08283, over 19725.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3142, pruned_loss=0.08492, over 3828689.38 frames. ], batch size: 63, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:04:45,816 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 18:04:58,242 INFO [train.py:937] (1/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2805, pruned_loss=0.03952, over 944034.00 frames. +2023-04-01 18:04:58,243 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 18:05:07,593 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4749, 2.4151, 1.7765, 1.4575, 2.3011, 1.2959, 1.2044, 1.7720], + device='cuda:1'), covar=tensor([0.0885, 0.0609, 0.0913, 0.0741, 0.0400, 0.1096, 0.0778, 0.0446], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0300, 0.0328, 0.0248, 0.0236, 0.0319, 0.0291, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:05:15,092 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67464.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:05:17,958 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.414e+02 6.867e+02 8.657e+02 1.897e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 18:05:44,666 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67489.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:05:59,570 INFO [train.py:903] (1/4) Epoch 10, batch 6050, loss[loss=0.2279, simple_loss=0.3103, pruned_loss=0.07275, over 17118.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3157, pruned_loss=0.08608, over 3815862.28 frames. ], batch size: 101, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:06:35,124 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:06:59,147 INFO [train.py:903] (1/4) Epoch 10, batch 6100, loss[loss=0.2397, simple_loss=0.3116, pruned_loss=0.08392, over 19667.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3157, pruned_loss=0.08608, over 3818966.25 frames. ], batch size: 53, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:07:10,580 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:07:19,955 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.925e+02 6.739e+02 8.410e+02 2.337e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-01 18:07:23,153 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-01 18:07:59,354 INFO [train.py:903] (1/4) Epoch 10, batch 6150, loss[loss=0.283, simple_loss=0.339, pruned_loss=0.1135, over 19386.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3145, pruned_loss=0.0854, over 3819421.12 frames. ], batch size: 70, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:08:10,099 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.31 vs. limit=5.0 +2023-04-01 18:08:28,205 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 18:08:59,751 INFO [train.py:903] (1/4) Epoch 10, batch 6200, loss[loss=0.2825, simple_loss=0.3489, pruned_loss=0.1081, over 18188.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3135, pruned_loss=0.08462, over 3813740.74 frames. ], batch size: 83, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:09:20,095 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.059e+02 6.776e+02 9.553e+02 2.024e+03, threshold=1.355e+03, percent-clipped=7.0 +2023-04-01 18:09:46,813 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:56,514 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:59,567 INFO [train.py:903] (1/4) Epoch 10, batch 6250, loss[loss=0.2715, simple_loss=0.3404, pruned_loss=0.1013, over 19099.00 frames. ], tot_loss[loss=0.242, simple_loss=0.314, pruned_loss=0.08501, over 3811453.47 frames. ], batch size: 69, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:10:09,915 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67710.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:15,805 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67715.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:30,840 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 18:10:59,324 INFO [train.py:903] (1/4) Epoch 10, batch 6300, loss[loss=0.2441, simple_loss=0.3132, pruned_loss=0.08754, over 19693.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3139, pruned_loss=0.08547, over 3817054.88 frames. ], batch size: 60, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:11:19,321 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 5.371e+02 6.798e+02 8.698e+02 1.915e+03, threshold=1.360e+03, percent-clipped=7.0 +2023-04-01 18:11:49,380 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 18:11:58,677 INFO [train.py:903] (1/4) Epoch 10, batch 6350, loss[loss=0.2471, simple_loss=0.3185, pruned_loss=0.08792, over 19372.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3152, pruned_loss=0.08623, over 3812886.90 frames. ], batch size: 47, lr: 8.16e-03, grad_scale: 4.0 +2023-04-01 18:12:16,848 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67816.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:47,280 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:58,574 INFO [train.py:903] (1/4) Epoch 10, batch 6400, loss[loss=0.2728, simple_loss=0.3405, pruned_loss=0.1025, over 19505.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3138, pruned_loss=0.0853, over 3810040.39 frames. ], batch size: 64, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:13:18,757 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.283e+02 6.601e+02 9.298e+02 1.582e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-01 18:13:26,990 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:13:59,327 INFO [train.py:903] (1/4) Epoch 10, batch 6450, loss[loss=0.2707, simple_loss=0.3305, pruned_loss=0.1054, over 19486.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3133, pruned_loss=0.08497, over 3808181.35 frames. ], batch size: 64, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:14:09,732 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4013, 2.3271, 1.7424, 1.4692, 2.1705, 1.3154, 1.2898, 1.9291], + device='cuda:1'), covar=tensor([0.0951, 0.0663, 0.0950, 0.0737, 0.0411, 0.1055, 0.0666, 0.0400], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0298, 0.0328, 0.0246, 0.0234, 0.0317, 0.0286, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:14:42,669 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 18:14:59,316 INFO [train.py:903] (1/4) Epoch 10, batch 6500, loss[loss=0.2203, simple_loss=0.2862, pruned_loss=0.07718, over 19754.00 frames. ], tot_loss[loss=0.241, simple_loss=0.313, pruned_loss=0.08451, over 3820165.91 frames. ], batch size: 46, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:15:05,075 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 18:15:19,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.752e+02 5.758e+02 7.165e+02 9.309e+02 2.290e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 18:15:27,696 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4251, 1.4729, 2.0396, 1.7171, 3.4175, 2.8747, 3.6107, 1.5789], + device='cuda:1'), covar=tensor([0.2117, 0.3826, 0.2235, 0.1597, 0.1239, 0.1614, 0.1381, 0.3296], + device='cuda:1'), in_proj_covar=tensor([0.0482, 0.0568, 0.0587, 0.0432, 0.0589, 0.0488, 0.0646, 0.0487], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:15:27,740 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2236, 2.3197, 2.5068, 3.4118, 2.2392, 3.4185, 2.9682, 2.2776], + device='cuda:1'), covar=tensor([0.3569, 0.2986, 0.1290, 0.1664, 0.3504, 0.1242, 0.3062, 0.2512], + device='cuda:1'), in_proj_covar=tensor([0.0770, 0.0786, 0.0638, 0.0881, 0.0765, 0.0689, 0.0776, 0.0692], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:15:47,549 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:15:57,573 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:16:01,761 INFO [train.py:903] (1/4) Epoch 10, batch 6550, loss[loss=0.2481, simple_loss=0.322, pruned_loss=0.0871, over 19361.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3122, pruned_loss=0.08391, over 3816765.79 frames. ], batch size: 66, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:16:51,306 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:02,072 INFO [train.py:903] (1/4) Epoch 10, batch 6600, loss[loss=0.2059, simple_loss=0.2795, pruned_loss=0.06622, over 19736.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3121, pruned_loss=0.08358, over 3820245.89 frames. ], batch size: 51, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:17:05,663 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68054.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:23,331 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.670e+02 5.913e+02 7.324e+02 8.709e+02 1.479e+03, threshold=1.465e+03, percent-clipped=1.0 +2023-04-01 18:17:49,969 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2032, 2.1218, 1.8179, 1.6714, 1.7515, 1.7691, 0.3964, 1.0104], + device='cuda:1'), covar=tensor([0.0399, 0.0384, 0.0283, 0.0463, 0.0747, 0.0503, 0.0824, 0.0734], + device='cuda:1'), in_proj_covar=tensor([0.0326, 0.0323, 0.0324, 0.0343, 0.0416, 0.0339, 0.0302, 0.0316], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 18:18:02,155 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2564, 1.3090, 1.2044, 1.0972, 1.0777, 1.0672, 0.0165, 0.3068], + device='cuda:1'), covar=tensor([0.0389, 0.0396, 0.0256, 0.0301, 0.0792, 0.0377, 0.0754, 0.0692], + device='cuda:1'), in_proj_covar=tensor([0.0327, 0.0324, 0.0325, 0.0344, 0.0417, 0.0341, 0.0303, 0.0317], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 18:18:02,821 INFO [train.py:903] (1/4) Epoch 10, batch 6650, loss[loss=0.2722, simple_loss=0.3264, pruned_loss=0.109, over 19759.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3125, pruned_loss=0.08408, over 3820260.38 frames. ], batch size: 54, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:18:07,714 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68105.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:18:24,118 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-01 18:19:02,659 INFO [train.py:903] (1/4) Epoch 10, batch 6700, loss[loss=0.1768, simple_loss=0.2563, pruned_loss=0.04862, over 19352.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3131, pruned_loss=0.08444, over 3821242.21 frames. ], batch size: 47, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:19:09,865 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:19:21,305 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-01 18:19:22,649 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.762e+02 5.919e+02 7.224e+02 9.287e+02 2.274e+03, threshold=1.445e+03, percent-clipped=4.0 +2023-04-01 18:19:24,182 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:19:28,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0055, 1.9069, 1.6770, 1.5020, 1.4500, 1.5295, 0.1784, 0.8201], + device='cuda:1'), covar=tensor([0.0400, 0.0439, 0.0290, 0.0443, 0.0835, 0.0543, 0.0882, 0.0761], + device='cuda:1'), in_proj_covar=tensor([0.0326, 0.0323, 0.0325, 0.0346, 0.0418, 0.0341, 0.0304, 0.0317], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 18:19:35,687 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 18:20:00,033 INFO [train.py:903] (1/4) Epoch 10, batch 6750, loss[loss=0.2394, simple_loss=0.3185, pruned_loss=0.08016, over 19549.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3129, pruned_loss=0.08451, over 3820738.98 frames. ], batch size: 56, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:20:16,563 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8917, 1.9982, 2.1162, 2.8294, 1.9408, 2.6969, 2.4380, 1.9974], + device='cuda:1'), covar=tensor([0.3689, 0.3029, 0.1373, 0.1837, 0.3349, 0.1404, 0.3231, 0.2573], + device='cuda:1'), in_proj_covar=tensor([0.0775, 0.0788, 0.0641, 0.0881, 0.0767, 0.0692, 0.0775, 0.0697], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:20:51,822 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:56,747 INFO [train.py:903] (1/4) Epoch 10, batch 6800, loss[loss=0.2077, simple_loss=0.2946, pruned_loss=0.06042, over 19611.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3125, pruned_loss=0.08424, over 3808752.57 frames. ], batch size: 57, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:21:15,043 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.210e+02 6.150e+02 7.610e+02 9.082e+02 1.904e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 18:21:18,279 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:21:40,721 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 18:21:41,163 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 18:21:44,654 INFO [train.py:903] (1/4) Epoch 11, batch 0, loss[loss=0.2256, simple_loss=0.3086, pruned_loss=0.07134, over 17218.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3086, pruned_loss=0.07134, over 17218.00 frames. ], batch size: 101, lr: 7.77e-03, grad_scale: 8.0 +2023-04-01 18:21:44,654 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 18:21:56,763 INFO [train.py:937] (1/4) Epoch 11, validation: loss=0.181, simple_loss=0.2818, pruned_loss=0.04012, over 944034.00 frames. +2023-04-01 18:21:56,763 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 18:22:09,361 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 18:22:57,935 INFO [train.py:903] (1/4) Epoch 11, batch 50, loss[loss=0.228, simple_loss=0.294, pruned_loss=0.08102, over 18636.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3125, pruned_loss=0.08369, over 863862.14 frames. ], batch size: 41, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:22:58,158 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0976, 5.4209, 2.9553, 4.7892, 1.0884, 5.4289, 5.4209, 5.6178], + device='cuda:1'), covar=tensor([0.0394, 0.0895, 0.1795, 0.0615, 0.3918, 0.0512, 0.0533, 0.0816], + device='cuda:1'), in_proj_covar=tensor([0.0421, 0.0358, 0.0425, 0.0313, 0.0375, 0.0355, 0.0348, 0.0379], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 18:23:15,260 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:23:23,486 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9841, 1.8590, 2.0246, 1.6369, 4.4358, 0.9588, 2.3646, 4.7270], + device='cuda:1'), covar=tensor([0.0361, 0.2467, 0.2404, 0.1840, 0.0701, 0.2710, 0.1378, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0348, 0.0333, 0.0349, 0.0313, 0.0344, 0.0328, 0.0324, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:23:35,570 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 18:23:46,894 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 5.747e+02 7.027e+02 9.557e+02 1.564e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 18:24:00,203 INFO [train.py:903] (1/4) Epoch 11, batch 100, loss[loss=0.2532, simple_loss=0.325, pruned_loss=0.09073, over 17367.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3076, pruned_loss=0.08097, over 1530020.68 frames. ], batch size: 101, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:24:13,686 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 18:24:42,554 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:47,941 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:57,058 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68425.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:01,040 INFO [train.py:903] (1/4) Epoch 11, batch 150, loss[loss=0.2426, simple_loss=0.2965, pruned_loss=0.09432, over 19743.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3125, pruned_loss=0.08515, over 2032099.87 frames. ], batch size: 45, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:25:11,925 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:25,314 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68449.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:25:26,562 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68450.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:36,615 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:47,784 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.281e+02 6.719e+02 8.986e+02 1.619e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-01 18:26:00,680 INFO [train.py:903] (1/4) Epoch 11, batch 200, loss[loss=0.203, simple_loss=0.2777, pruned_loss=0.06413, over 19388.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3112, pruned_loss=0.08435, over 2435673.36 frames. ], batch size: 48, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:26:02,038 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 18:27:03,586 INFO [train.py:903] (1/4) Epoch 11, batch 250, loss[loss=0.3064, simple_loss=0.3591, pruned_loss=0.1269, over 13167.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3096, pruned_loss=0.08309, over 2749832.44 frames. ], batch size: 136, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:27:46,862 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68564.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:27:50,296 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2796, 1.1535, 1.6751, 1.2581, 2.7896, 3.7048, 3.4606, 3.9365], + device='cuda:1'), covar=tensor([0.1523, 0.3566, 0.2970, 0.2086, 0.0442, 0.0156, 0.0189, 0.0155], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0295, 0.0321, 0.0251, 0.0212, 0.0154, 0.0204, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 18:27:51,040 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.512e+02 6.613e+02 8.406e+02 1.798e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 18:28:07,068 INFO [train.py:903] (1/4) Epoch 11, batch 300, loss[loss=0.2922, simple_loss=0.351, pruned_loss=0.1167, over 13471.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.309, pruned_loss=0.08318, over 2990931.51 frames. ], batch size: 137, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:28:27,740 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:28:30,603 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-01 18:29:09,810 INFO [train.py:903] (1/4) Epoch 11, batch 350, loss[loss=0.243, simple_loss=0.3217, pruned_loss=0.08214, over 19729.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3091, pruned_loss=0.08282, over 3175133.30 frames. ], batch size: 63, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:29:16,666 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 18:29:40,809 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4192, 2.1910, 1.8632, 1.7469, 1.4280, 1.5826, 0.5340, 1.2190], + device='cuda:1'), covar=tensor([0.0356, 0.0409, 0.0359, 0.0565, 0.0862, 0.0685, 0.0841, 0.0744], + device='cuda:1'), in_proj_covar=tensor([0.0329, 0.0326, 0.0327, 0.0349, 0.0420, 0.0347, 0.0305, 0.0321], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 18:29:57,402 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.409e+02 6.235e+02 7.842e+02 1.948e+03, threshold=1.247e+03, percent-clipped=7.0 +2023-04-01 18:30:09,871 INFO [train.py:903] (1/4) Epoch 11, batch 400, loss[loss=0.2051, simple_loss=0.2749, pruned_loss=0.06766, over 19333.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3104, pruned_loss=0.08355, over 3322446.99 frames. ], batch size: 44, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:30:10,265 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2216, 1.9510, 1.5318, 1.3458, 1.7626, 1.1075, 1.3077, 1.6011], + device='cuda:1'), covar=tensor([0.0804, 0.0687, 0.0973, 0.0632, 0.0524, 0.1105, 0.0554, 0.0400], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0294, 0.0326, 0.0244, 0.0233, 0.0317, 0.0286, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:30:40,453 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:30:54,263 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:11,162 INFO [train.py:903] (1/4) Epoch 11, batch 450, loss[loss=0.2227, simple_loss=0.2903, pruned_loss=0.07759, over 19756.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.31, pruned_loss=0.08283, over 3449944.95 frames. ], batch size: 46, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:31:25,476 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68739.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:49,551 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 18:31:50,648 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 18:31:51,656 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:59,522 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.180e+02 6.491e+02 8.751e+02 1.660e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-01 18:32:13,197 INFO [train.py:903] (1/4) Epoch 11, batch 500, loss[loss=0.1728, simple_loss=0.2499, pruned_loss=0.04783, over 19749.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3099, pruned_loss=0.08268, over 3537187.26 frames. ], batch size: 45, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:32:31,375 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3709, 1.5088, 1.8329, 1.6367, 3.0918, 2.4556, 3.2909, 1.5118], + device='cuda:1'), covar=tensor([0.2176, 0.3667, 0.2335, 0.1705, 0.1399, 0.1800, 0.1617, 0.3509], + device='cuda:1'), in_proj_covar=tensor([0.0479, 0.0565, 0.0588, 0.0432, 0.0585, 0.0485, 0.0644, 0.0485], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:33:02,552 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-01 18:33:05,583 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68820.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:33:11,256 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5612, 1.1787, 1.4045, 1.1455, 2.2080, 0.9033, 2.0207, 2.3476], + device='cuda:1'), covar=tensor([0.0677, 0.2567, 0.2428, 0.1615, 0.0910, 0.2127, 0.0890, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0336, 0.0351, 0.0319, 0.0347, 0.0335, 0.0331, 0.0348], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:33:17,981 INFO [train.py:903] (1/4) Epoch 11, batch 550, loss[loss=0.1803, simple_loss=0.2551, pruned_loss=0.05277, over 19721.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3084, pruned_loss=0.08162, over 3616123.41 frames. ], batch size: 45, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:33:36,721 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68845.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:34:06,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.206e+02 6.523e+02 8.559e+02 1.532e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 18:34:18,030 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:34:21,114 INFO [train.py:903] (1/4) Epoch 11, batch 600, loss[loss=0.2192, simple_loss=0.2949, pruned_loss=0.0718, over 19846.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3077, pruned_loss=0.08121, over 3666328.74 frames. ], batch size: 52, lr: 7.73e-03, grad_scale: 8.0 +2023-04-01 18:35:07,407 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 18:35:23,576 INFO [train.py:903] (1/4) Epoch 11, batch 650, loss[loss=0.2496, simple_loss=0.3203, pruned_loss=0.08945, over 17213.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3089, pruned_loss=0.08179, over 3698779.89 frames. ], batch size: 101, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:35:36,883 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:36:14,702 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 5.561e+02 7.157e+02 8.945e+02 2.089e+03, threshold=1.431e+03, percent-clipped=8.0 +2023-04-01 18:36:26,501 INFO [train.py:903] (1/4) Epoch 11, batch 700, loss[loss=0.2355, simple_loss=0.3095, pruned_loss=0.08076, over 19581.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3093, pruned_loss=0.08184, over 3720689.12 frames. ], batch size: 52, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:36:46,877 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4005, 1.6344, 2.2443, 1.8492, 2.9490, 2.4473, 3.1366, 1.4799], + device='cuda:1'), covar=tensor([0.2364, 0.3884, 0.2318, 0.1740, 0.1683, 0.2023, 0.2008, 0.3617], + device='cuda:1'), in_proj_covar=tensor([0.0481, 0.0566, 0.0585, 0.0432, 0.0580, 0.0486, 0.0641, 0.0483], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:37:00,609 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7348, 1.8801, 1.6190, 2.8428, 1.8572, 2.5836, 1.9380, 1.4097], + device='cuda:1'), covar=tensor([0.4180, 0.3430, 0.2231, 0.2213, 0.3934, 0.1731, 0.4668, 0.4062], + device='cuda:1'), in_proj_covar=tensor([0.0781, 0.0795, 0.0642, 0.0889, 0.0776, 0.0698, 0.0783, 0.0703], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:37:30,468 INFO [train.py:903] (1/4) Epoch 11, batch 750, loss[loss=0.2455, simple_loss=0.3138, pruned_loss=0.08857, over 19681.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3087, pruned_loss=0.08148, over 3743355.18 frames. ], batch size: 53, lr: 7.72e-03, grad_scale: 4.0 +2023-04-01 18:37:44,705 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8919, 4.9016, 5.6717, 5.6450, 2.1201, 5.3142, 4.5113, 5.2284], + device='cuda:1'), covar=tensor([0.1173, 0.0789, 0.0477, 0.0447, 0.4815, 0.0513, 0.0512, 0.1004], + device='cuda:1'), in_proj_covar=tensor([0.0659, 0.0596, 0.0782, 0.0667, 0.0720, 0.0542, 0.0482, 0.0726], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 18:37:52,521 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69047.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:01,922 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:09,126 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:20,853 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.289e+02 6.423e+02 8.063e+02 1.861e+03, threshold=1.285e+03, percent-clipped=2.0 +2023-04-01 18:38:33,459 INFO [train.py:903] (1/4) Epoch 11, batch 800, loss[loss=0.243, simple_loss=0.3169, pruned_loss=0.08455, over 19695.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3104, pruned_loss=0.08224, over 3757231.99 frames. ], batch size: 60, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:38:35,044 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7555, 4.3072, 2.4708, 3.8279, 1.1878, 4.1463, 4.0659, 4.1746], + device='cuda:1'), covar=tensor([0.0597, 0.1105, 0.2084, 0.0714, 0.3740, 0.0723, 0.0742, 0.0928], + device='cuda:1'), in_proj_covar=tensor([0.0424, 0.0362, 0.0430, 0.0316, 0.0373, 0.0361, 0.0349, 0.0382], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 18:38:40,023 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 18:38:49,985 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 18:39:34,228 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5446, 4.0820, 2.5231, 3.6492, 0.9404, 3.8655, 3.8398, 3.9564], + device='cuda:1'), covar=tensor([0.0588, 0.0993, 0.1958, 0.0732, 0.3918, 0.0779, 0.0760, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0421, 0.0362, 0.0429, 0.0315, 0.0373, 0.0361, 0.0349, 0.0382], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 18:39:35,201 INFO [train.py:903] (1/4) Epoch 11, batch 850, loss[loss=0.2493, simple_loss=0.3271, pruned_loss=0.08574, over 19754.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3091, pruned_loss=0.08173, over 3781662.22 frames. ], batch size: 63, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:39:39,344 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:39:41,071 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 18:40:04,893 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7844, 1.8069, 1.9079, 2.4475, 1.8253, 2.3534, 2.1549, 1.8413], + device='cuda:1'), covar=tensor([0.2783, 0.2287, 0.1134, 0.1261, 0.2414, 0.1053, 0.2382, 0.1921], + device='cuda:1'), in_proj_covar=tensor([0.0779, 0.0795, 0.0641, 0.0886, 0.0771, 0.0698, 0.0780, 0.0704], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 18:40:09,397 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7490, 4.0675, 4.4652, 4.5525, 1.6752, 4.2595, 3.7440, 3.8857], + device='cuda:1'), covar=tensor([0.1662, 0.1185, 0.0706, 0.0820, 0.5648, 0.1102, 0.0896, 0.1476], + device='cuda:1'), in_proj_covar=tensor([0.0651, 0.0589, 0.0772, 0.0660, 0.0711, 0.0531, 0.0476, 0.0716], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 18:40:12,592 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:18,314 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:26,035 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.808e+02 7.359e+02 9.451e+02 2.011e+03, threshold=1.472e+03, percent-clipped=12.0 +2023-04-01 18:40:32,120 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 18:40:38,097 INFO [train.py:903] (1/4) Epoch 11, batch 900, loss[loss=0.2132, simple_loss=0.2868, pruned_loss=0.06982, over 19729.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3102, pruned_loss=0.08265, over 3781804.70 frames. ], batch size: 51, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:40:39,676 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:48,350 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:41:28,503 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 18:41:42,668 INFO [train.py:903] (1/4) Epoch 11, batch 950, loss[loss=0.2448, simple_loss=0.315, pruned_loss=0.08727, over 19541.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3096, pruned_loss=0.08236, over 3801448.83 frames. ], batch size: 56, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:41:49,497 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 18:42:01,240 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:42:11,247 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 18:42:24,842 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69263.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:42:33,740 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.204e+02 6.315e+02 7.601e+02 2.315e+03, threshold=1.263e+03, percent-clipped=1.0 +2023-04-01 18:42:47,232 INFO [train.py:903] (1/4) Epoch 11, batch 1000, loss[loss=0.2578, simple_loss=0.3233, pruned_loss=0.09616, over 19762.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3104, pruned_loss=0.08306, over 3808288.75 frames. ], batch size: 54, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:01,211 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:27,050 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:43,501 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 18:43:48,901 INFO [train.py:903] (1/4) Epoch 11, batch 1050, loss[loss=0.2874, simple_loss=0.3451, pruned_loss=0.1149, over 14070.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3104, pruned_loss=0.08263, over 3818820.47 frames. ], batch size: 136, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:57,031 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,186 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7873, 4.3390, 2.5803, 3.8088, 1.0127, 4.0958, 4.0754, 4.1353], + device='cuda:1'), covar=tensor([0.0554, 0.0933, 0.1907, 0.0728, 0.3993, 0.0715, 0.0773, 0.0909], + device='cuda:1'), in_proj_covar=tensor([0.0423, 0.0361, 0.0428, 0.0312, 0.0375, 0.0361, 0.0350, 0.0383], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 18:43:59,276 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,411 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1114, 2.0717, 1.7739, 1.5469, 1.3441, 1.6054, 0.4090, 1.0809], + device='cuda:1'), covar=tensor([0.0604, 0.0606, 0.0472, 0.0821, 0.1276, 0.0858, 0.1110, 0.1001], + device='cuda:1'), in_proj_covar=tensor([0.0336, 0.0329, 0.0331, 0.0352, 0.0425, 0.0348, 0.0309, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 18:44:02,784 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3764, 2.1870, 1.5695, 1.4458, 2.1114, 1.2013, 1.3216, 1.7447], + device='cuda:1'), covar=tensor([0.0968, 0.0647, 0.0996, 0.0715, 0.0452, 0.1148, 0.0706, 0.0475], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0301, 0.0328, 0.0246, 0.0237, 0.0318, 0.0289, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:44:24,560 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 18:44:38,147 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.222e+02 6.475e+02 7.672e+02 1.315e+03, threshold=1.295e+03, percent-clipped=1.0 +2023-04-01 18:44:49,369 INFO [train.py:903] (1/4) Epoch 11, batch 1100, loss[loss=0.2306, simple_loss=0.3007, pruned_loss=0.0803, over 19480.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3105, pruned_loss=0.08294, over 3809629.14 frames. ], batch size: 49, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:44:57,869 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:24,098 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69405.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:32,712 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:39,607 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:53,089 INFO [train.py:903] (1/4) Epoch 11, batch 1150, loss[loss=0.2042, simple_loss=0.2948, pruned_loss=0.0568, over 19543.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3098, pruned_loss=0.08264, over 3800208.70 frames. ], batch size: 54, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:45:59,910 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 18:46:11,275 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:46:41,981 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.885e+02 7.181e+02 8.495e+02 1.651e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-01 18:46:55,765 INFO [train.py:903] (1/4) Epoch 11, batch 1200, loss[loss=0.2798, simple_loss=0.3474, pruned_loss=0.1061, over 19587.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3097, pruned_loss=0.08267, over 3809477.25 frames. ], batch size: 61, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:46:59,310 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7010, 1.4412, 1.4974, 2.2671, 1.7592, 2.0409, 2.1653, 1.9054], + device='cuda:1'), covar=tensor([0.0737, 0.0962, 0.0969, 0.0747, 0.0812, 0.0670, 0.0765, 0.0635], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0227, 0.0224, 0.0250, 0.0238, 0.0213, 0.0199, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 18:47:27,288 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 18:47:48,985 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:53,392 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:58,998 INFO [train.py:903] (1/4) Epoch 11, batch 1250, loss[loss=0.2228, simple_loss=0.2902, pruned_loss=0.07766, over 19775.00 frames. ], tot_loss[loss=0.238, simple_loss=0.31, pruned_loss=0.08296, over 3804429.76 frames. ], batch size: 54, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:48:00,172 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:48:49,513 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.575e+02 6.899e+02 8.428e+02 1.860e+03, threshold=1.380e+03, percent-clipped=3.0 +2023-04-01 18:49:00,823 INFO [train.py:903] (1/4) Epoch 11, batch 1300, loss[loss=0.2337, simple_loss=0.3168, pruned_loss=0.07526, over 19343.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3107, pruned_loss=0.08317, over 3809141.11 frames. ], batch size: 70, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:49:10,185 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:49:38,652 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69607.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:50:04,325 INFO [train.py:903] (1/4) Epoch 11, batch 1350, loss[loss=0.2684, simple_loss=0.3443, pruned_loss=0.09628, over 18784.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3099, pruned_loss=0.08264, over 3805202.19 frames. ], batch size: 74, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:50:13,536 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:19,142 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:26,795 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:54,484 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.709e+02 6.895e+02 8.476e+02 1.895e+03, threshold=1.379e+03, percent-clipped=5.0 +2023-04-01 18:51:08,229 INFO [train.py:903] (1/4) Epoch 11, batch 1400, loss[loss=0.2278, simple_loss=0.2998, pruned_loss=0.07788, over 19592.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3092, pruned_loss=0.08218, over 3807401.13 frames. ], batch size: 50, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:51:11,993 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69682.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:51:25,017 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69692.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:51:36,494 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69702.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:03,576 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:52:09,961 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 18:52:10,950 INFO [train.py:903] (1/4) Epoch 11, batch 1450, loss[loss=0.2441, simple_loss=0.3308, pruned_loss=0.07871, over 19584.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3105, pruned_loss=0.08264, over 3810432.99 frames. ], batch size: 61, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:52:12,377 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:36,767 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:44,800 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:01,501 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.344e+02 6.531e+02 8.357e+02 2.062e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 18:53:10,378 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:13,431 INFO [train.py:903] (1/4) Epoch 11, batch 1500, loss[loss=0.1822, simple_loss=0.2578, pruned_loss=0.05328, over 19709.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3102, pruned_loss=0.0826, over 3808568.61 frames. ], batch size: 46, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:53:34,495 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5081, 1.3188, 1.3096, 1.9066, 1.4469, 1.9410, 1.8742, 1.6501], + device='cuda:1'), covar=tensor([0.0869, 0.0975, 0.1035, 0.0766, 0.0873, 0.0597, 0.0797, 0.0691], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0226, 0.0222, 0.0250, 0.0237, 0.0214, 0.0200, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 18:53:36,846 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:43,097 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:54:15,942 INFO [train.py:903] (1/4) Epoch 11, batch 1550, loss[loss=0.2378, simple_loss=0.3214, pruned_loss=0.07708, over 19532.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3096, pruned_loss=0.08182, over 3800210.21 frames. ], batch size: 54, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:54:38,367 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:07,632 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 5.814e+02 6.999e+02 8.951e+02 2.972e+03, threshold=1.400e+03, percent-clipped=5.0 +2023-04-01 18:55:09,171 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:20,439 INFO [train.py:903] (1/4) Epoch 11, batch 1600, loss[loss=0.1826, simple_loss=0.2611, pruned_loss=0.05208, over 19743.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3098, pruned_loss=0.08179, over 3806717.63 frames. ], batch size: 47, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:55:41,208 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:46,726 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 18:55:48,168 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:12,825 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69920.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:20,542 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:23,825 INFO [train.py:903] (1/4) Epoch 11, batch 1650, loss[loss=0.235, simple_loss=0.312, pruned_loss=0.07894, over 19775.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3105, pruned_loss=0.08217, over 3808277.15 frames. ], batch size: 54, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:56:48,514 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9108, 4.3816, 2.7346, 3.9366, 0.8567, 4.2313, 4.2750, 4.2707], + device='cuda:1'), covar=tensor([0.0525, 0.0938, 0.1723, 0.0684, 0.4042, 0.0637, 0.0702, 0.0877], + device='cuda:1'), in_proj_covar=tensor([0.0428, 0.0365, 0.0430, 0.0315, 0.0376, 0.0364, 0.0356, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 18:57:00,763 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69958.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:16,651 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 5.294e+02 6.504e+02 8.314e+02 1.576e+03, threshold=1.301e+03, percent-clipped=2.0 +2023-04-01 18:57:21,095 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 18:57:26,304 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69978.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:57:27,006 INFO [train.py:903] (1/4) Epoch 11, batch 1700, loss[loss=0.2305, simple_loss=0.3119, pruned_loss=0.07458, over 19565.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3092, pruned_loss=0.08156, over 3817249.15 frames. ], batch size: 56, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:57:32,069 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:59,070 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70003.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:58:03,166 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:09,523 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 18:58:29,317 INFO [train.py:903] (1/4) Epoch 11, batch 1750, loss[loss=0.2586, simple_loss=0.3281, pruned_loss=0.09458, over 19610.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3094, pruned_loss=0.08234, over 3820346.09 frames. ], batch size: 57, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:58:32,022 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:39,012 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70036.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:59:02,463 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70053.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:11,018 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 18:59:22,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 6.480e+02 8.127e+02 9.904e+02 1.581e+03, threshold=1.625e+03, percent-clipped=5.0 +2023-04-01 18:59:34,161 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:34,850 INFO [train.py:903] (1/4) Epoch 11, batch 1800, loss[loss=0.2632, simple_loss=0.3415, pruned_loss=0.09245, over 19693.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3086, pruned_loss=0.08191, over 3822125.53 frames. ], batch size: 59, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 19:00:02,675 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:08,719 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-01 19:00:34,727 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:35,522 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 19:00:35,947 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:37,728 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:38,700 INFO [train.py:903] (1/4) Epoch 11, batch 1850, loss[loss=0.2551, simple_loss=0.317, pruned_loss=0.09661, over 13665.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3082, pruned_loss=0.0815, over 3821073.81 frames. ], batch size: 136, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:05,176 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70151.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:01:06,351 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:01:13,659 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 19:01:30,924 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.171e+02 6.551e+02 7.968e+02 1.780e+03, threshold=1.310e+03, percent-clipped=1.0 +2023-04-01 19:01:41,531 INFO [train.py:903] (1/4) Epoch 11, batch 1900, loss[loss=0.2106, simple_loss=0.2905, pruned_loss=0.06533, over 19781.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3086, pruned_loss=0.08139, over 3827669.31 frames. ], batch size: 56, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:58,102 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 19:02:04,767 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 19:02:28,868 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 19:02:42,956 INFO [train.py:903] (1/4) Epoch 11, batch 1950, loss[loss=0.2664, simple_loss=0.3393, pruned_loss=0.0968, over 19047.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3095, pruned_loss=0.08236, over 3821572.35 frames. ], batch size: 69, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:03:35,384 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.987e+02 6.114e+02 7.804e+02 3.131e+03, threshold=1.223e+03, percent-clipped=9.0 +2023-04-01 19:03:37,258 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 19:03:43,625 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2251, 3.7582, 2.3756, 2.2584, 3.3801, 1.8708, 1.4326, 2.1750], + device='cuda:1'), covar=tensor([0.0982, 0.0341, 0.0765, 0.0675, 0.0372, 0.0965, 0.0823, 0.0523], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0293, 0.0321, 0.0242, 0.0231, 0.0314, 0.0284, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:03:45,801 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:03:46,665 INFO [train.py:903] (1/4) Epoch 11, batch 2000, loss[loss=0.2265, simple_loss=0.2942, pruned_loss=0.07939, over 19081.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3086, pruned_loss=0.08113, over 3830217.20 frames. ], batch size: 42, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:04:48,750 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 19:04:51,060 INFO [train.py:903] (1/4) Epoch 11, batch 2050, loss[loss=0.251, simple_loss=0.331, pruned_loss=0.08553, over 19518.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3079, pruned_loss=0.08055, over 3821401.61 frames. ], batch size: 64, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:05:07,061 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 19:05:08,146 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 19:05:27,814 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 19:05:43,672 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.481e+02 7.126e+02 9.531e+02 2.002e+03, threshold=1.425e+03, percent-clipped=10.0 +2023-04-01 19:05:54,409 INFO [train.py:903] (1/4) Epoch 11, batch 2100, loss[loss=0.3308, simple_loss=0.3696, pruned_loss=0.146, over 13327.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3088, pruned_loss=0.0815, over 3807403.18 frames. ], batch size: 136, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:06:24,769 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 19:06:30,839 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70407.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:06:48,496 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 19:06:57,558 INFO [train.py:903] (1/4) Epoch 11, batch 2150, loss[loss=0.2249, simple_loss=0.3004, pruned_loss=0.07468, over 19580.00 frames. ], tot_loss[loss=0.236, simple_loss=0.309, pruned_loss=0.08156, over 3812430.23 frames. ], batch size: 52, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:07:01,569 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70432.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:07:51,371 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.544e+02 5.585e+02 6.733e+02 8.759e+02 1.859e+03, threshold=1.347e+03, percent-clipped=4.0 +2023-04-01 19:07:52,830 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:08:02,115 INFO [train.py:903] (1/4) Epoch 11, batch 2200, loss[loss=0.2586, simple_loss=0.3224, pruned_loss=0.09737, over 19632.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3099, pruned_loss=0.08213, over 3811397.30 frames. ], batch size: 50, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:09:06,387 INFO [train.py:903] (1/4) Epoch 11, batch 2250, loss[loss=0.2361, simple_loss=0.3129, pruned_loss=0.07966, over 17127.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.31, pruned_loss=0.08232, over 3802947.78 frames. ], batch size: 101, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:01,004 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.990e+02 6.839e+02 8.349e+02 1.575e+03, threshold=1.368e+03, percent-clipped=2.0 +2023-04-01 19:10:10,422 INFO [train.py:903] (1/4) Epoch 11, batch 2300, loss[loss=0.2155, simple_loss=0.2887, pruned_loss=0.07112, over 19662.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3099, pruned_loss=0.08244, over 3804771.20 frames. ], batch size: 53, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:19,777 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:10:23,136 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 19:11:05,023 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:11:13,191 INFO [train.py:903] (1/4) Epoch 11, batch 2350, loss[loss=0.2805, simple_loss=0.3413, pruned_loss=0.1098, over 17438.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3106, pruned_loss=0.0826, over 3798901.14 frames. ], batch size: 101, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:11:26,861 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5773, 1.6760, 1.8227, 2.1630, 1.5131, 1.9118, 2.0269, 1.7264], + device='cuda:1'), covar=tensor([0.3335, 0.2629, 0.1392, 0.1470, 0.2836, 0.1362, 0.3439, 0.2627], + device='cuda:1'), in_proj_covar=tensor([0.0788, 0.0799, 0.0644, 0.0887, 0.0776, 0.0694, 0.0779, 0.0706], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 19:11:56,882 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 19:12:06,973 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.270e+02 6.524e+02 8.351e+02 2.247e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 19:12:12,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 19:12:16,796 INFO [train.py:903] (1/4) Epoch 11, batch 2400, loss[loss=0.2282, simple_loss=0.3085, pruned_loss=0.07389, over 19665.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.31, pruned_loss=0.08237, over 3806098.81 frames. ], batch size: 55, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:12:58,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1354, 1.2160, 1.7978, 1.1333, 2.5733, 3.5981, 3.2613, 3.7200], + device='cuda:1'), covar=tensor([0.1598, 0.3452, 0.2865, 0.2159, 0.0501, 0.0141, 0.0192, 0.0174], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0295, 0.0320, 0.0251, 0.0215, 0.0155, 0.0205, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:13:20,240 INFO [train.py:903] (1/4) Epoch 11, batch 2450, loss[loss=0.2194, simple_loss=0.2977, pruned_loss=0.0706, over 19586.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3098, pruned_loss=0.08221, over 3821737.81 frames. ], batch size: 52, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:13:32,435 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:13:44,523 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1060, 2.2941, 2.4638, 3.0068, 2.9341, 2.4546, 2.2090, 2.9369], + device='cuda:1'), covar=tensor([0.0614, 0.1541, 0.1204, 0.0926, 0.1023, 0.0424, 0.1119, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0347, 0.0287, 0.0238, 0.0296, 0.0242, 0.0276, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:14:14,278 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.717e+02 6.752e+02 8.721e+02 4.215e+03, threshold=1.350e+03, percent-clipped=8.0 +2023-04-01 19:14:25,717 INFO [train.py:903] (1/4) Epoch 11, batch 2500, loss[loss=0.2643, simple_loss=0.3339, pruned_loss=0.09739, over 19308.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3084, pruned_loss=0.0812, over 3831745.96 frames. ], batch size: 66, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:27,565 INFO [train.py:903] (1/4) Epoch 11, batch 2550, loss[loss=0.2312, simple_loss=0.3128, pruned_loss=0.07484, over 19391.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3084, pruned_loss=0.08095, over 3848588.84 frames. ], batch size: 70, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:45,706 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:17,533 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:20,506 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.586e+02 5.428e+02 6.511e+02 7.969e+02 1.423e+03, threshold=1.302e+03, percent-clipped=4.0 +2023-04-01 19:16:26,480 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 19:16:30,147 INFO [train.py:903] (1/4) Epoch 11, batch 2600, loss[loss=0.2578, simple_loss=0.3306, pruned_loss=0.09255, over 19676.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3073, pruned_loss=0.08011, over 3851689.68 frames. ], batch size: 58, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:06,533 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 19:17:32,112 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-01 19:17:34,834 INFO [train.py:903] (1/4) Epoch 11, batch 2650, loss[loss=0.2528, simple_loss=0.3214, pruned_loss=0.09213, over 19539.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3081, pruned_loss=0.08087, over 3837608.71 frames. ], batch size: 54, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:56,630 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 19:18:20,080 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1350, 2.1910, 2.2467, 3.2417, 2.2117, 3.1382, 2.6752, 2.0650], + device='cuda:1'), covar=tensor([0.3750, 0.3356, 0.1518, 0.1992, 0.3800, 0.1508, 0.3533, 0.2835], + device='cuda:1'), in_proj_covar=tensor([0.0792, 0.0804, 0.0650, 0.0894, 0.0778, 0.0698, 0.0787, 0.0709], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 19:18:28,055 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 5.322e+02 6.264e+02 9.085e+02 2.401e+03, threshold=1.253e+03, percent-clipped=10.0 +2023-04-01 19:18:39,383 INFO [train.py:903] (1/4) Epoch 11, batch 2700, loss[loss=0.1947, simple_loss=0.2601, pruned_loss=0.06464, over 19704.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3086, pruned_loss=0.08124, over 3825618.71 frames. ], batch size: 45, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:18:56,202 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70993.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:18,181 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:30,024 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:43,442 INFO [train.py:903] (1/4) Epoch 11, batch 2750, loss[loss=0.2465, simple_loss=0.3181, pruned_loss=0.08742, over 13890.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3087, pruned_loss=0.08102, over 3811558.67 frames. ], batch size: 135, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:20:08,222 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:20:37,008 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.301e+02 6.565e+02 7.951e+02 1.458e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 19:20:45,877 INFO [train.py:903] (1/4) Epoch 11, batch 2800, loss[loss=0.2375, simple_loss=0.3135, pruned_loss=0.08076, over 19331.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3104, pruned_loss=0.08197, over 3822482.16 frames. ], batch size: 66, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:21:18,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0507, 1.3252, 1.4827, 1.4584, 2.6217, 0.9482, 1.9151, 2.9579], + device='cuda:1'), covar=tensor([0.0522, 0.2585, 0.2528, 0.1570, 0.0738, 0.2405, 0.1179, 0.0331], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0336, 0.0351, 0.0318, 0.0342, 0.0332, 0.0327, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:21:51,269 INFO [train.py:903] (1/4) Epoch 11, batch 2850, loss[loss=0.2159, simple_loss=0.2917, pruned_loss=0.07011, over 19673.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3099, pruned_loss=0.08146, over 3817960.31 frames. ], batch size: 53, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:22:41,893 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6654, 1.3816, 1.3302, 1.8700, 1.4581, 1.9108, 1.8806, 1.6629], + device='cuda:1'), covar=tensor([0.0760, 0.0945, 0.1034, 0.0797, 0.0872, 0.0663, 0.0855, 0.0650], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0226, 0.0222, 0.0249, 0.0236, 0.0213, 0.0196, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 19:22:45,077 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.466e+02 6.713e+02 8.640e+02 2.236e+03, threshold=1.343e+03, percent-clipped=7.0 +2023-04-01 19:22:49,017 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9935, 5.3585, 2.8410, 4.7294, 1.1876, 5.3494, 5.2685, 5.4938], + device='cuda:1'), covar=tensor([0.0402, 0.0807, 0.1834, 0.0648, 0.3916, 0.0583, 0.0627, 0.0861], + device='cuda:1'), in_proj_covar=tensor([0.0432, 0.0367, 0.0440, 0.0322, 0.0383, 0.0369, 0.0357, 0.0393], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:22:54,329 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 19:22:55,473 INFO [train.py:903] (1/4) Epoch 11, batch 2900, loss[loss=0.2025, simple_loss=0.2876, pruned_loss=0.05873, over 19590.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.31, pruned_loss=0.08177, over 3797286.36 frames. ], batch size: 57, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:24:00,113 INFO [train.py:903] (1/4) Epoch 11, batch 2950, loss[loss=0.3307, simple_loss=0.3748, pruned_loss=0.1433, over 18336.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.309, pruned_loss=0.08124, over 3805564.37 frames. ], batch size: 83, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:24:25,803 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 19:24:53,527 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.471e+02 6.960e+02 8.532e+02 1.699e+03, threshold=1.392e+03, percent-clipped=7.0 +2023-04-01 19:25:02,842 INFO [train.py:903] (1/4) Epoch 11, batch 3000, loss[loss=0.2127, simple_loss=0.2958, pruned_loss=0.06484, over 19614.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3085, pruned_loss=0.08091, over 3816174.90 frames. ], batch size: 57, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:25:02,842 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 19:25:16,077 INFO [train.py:937] (1/4) Epoch 11, validation: loss=0.1785, simple_loss=0.2793, pruned_loss=0.0389, over 944034.00 frames. +2023-04-01 19:25:16,079 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 19:25:20,541 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 19:26:20,104 INFO [train.py:903] (1/4) Epoch 11, batch 3050, loss[loss=0.2461, simple_loss=0.3206, pruned_loss=0.08576, over 19761.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3089, pruned_loss=0.08115, over 3828494.09 frames. ], batch size: 63, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:26:51,672 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:27:13,587 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.484e+02 7.005e+02 9.170e+02 2.820e+03, threshold=1.401e+03, percent-clipped=8.0 +2023-04-01 19:27:22,999 INFO [train.py:903] (1/4) Epoch 11, batch 3100, loss[loss=0.1994, simple_loss=0.2693, pruned_loss=0.06474, over 19768.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3091, pruned_loss=0.08121, over 3837255.83 frames. ], batch size: 46, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:27:27,743 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3421, 1.2648, 1.4030, 1.5438, 2.9045, 0.8899, 2.0812, 3.2734], + device='cuda:1'), covar=tensor([0.0460, 0.2654, 0.2746, 0.1648, 0.0752, 0.2646, 0.1201, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0337, 0.0352, 0.0318, 0.0347, 0.0336, 0.0330, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:27:36,322 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 19:27:39,069 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:28:24,231 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3747, 1.6214, 1.9566, 2.6799, 2.1230, 2.6211, 2.8283, 2.3557], + device='cuda:1'), covar=tensor([0.0645, 0.0931, 0.0888, 0.0864, 0.0855, 0.0624, 0.0761, 0.0570], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0226, 0.0223, 0.0250, 0.0236, 0.0214, 0.0196, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 19:28:26,275 INFO [train.py:903] (1/4) Epoch 11, batch 3150, loss[loss=0.2242, simple_loss=0.3037, pruned_loss=0.07231, over 17986.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3088, pruned_loss=0.08113, over 3839046.55 frames. ], batch size: 83, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:28:55,930 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 19:29:17,605 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:29:19,600 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.450e+02 6.508e+02 8.826e+02 1.534e+03, threshold=1.302e+03, percent-clipped=1.0 +2023-04-01 19:29:31,369 INFO [train.py:903] (1/4) Epoch 11, batch 3200, loss[loss=0.1654, simple_loss=0.2431, pruned_loss=0.04389, over 19047.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3075, pruned_loss=0.07997, over 3842902.78 frames. ], batch size: 42, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:30:03,852 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6051, 1.2716, 1.2394, 1.4758, 1.1024, 1.3699, 1.2147, 1.4488], + device='cuda:1'), covar=tensor([0.0911, 0.1070, 0.1362, 0.0847, 0.1115, 0.0560, 0.1201, 0.0752], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0348, 0.0288, 0.0236, 0.0296, 0.0244, 0.0277, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:30:05,862 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:30:32,440 INFO [train.py:903] (1/4) Epoch 11, batch 3250, loss[loss=0.2845, simple_loss=0.3446, pruned_loss=0.1122, over 19770.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3093, pruned_loss=0.08139, over 3831059.70 frames. ], batch size: 54, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:31:26,700 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.701e+02 5.157e+02 6.415e+02 8.641e+02 1.397e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-04-01 19:31:36,062 INFO [train.py:903] (1/4) Epoch 11, batch 3300, loss[loss=0.225, simple_loss=0.3045, pruned_loss=0.07278, over 19661.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3097, pruned_loss=0.08182, over 3833603.36 frames. ], batch size: 55, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:31:41,720 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 19:32:39,937 INFO [train.py:903] (1/4) Epoch 11, batch 3350, loss[loss=0.3015, simple_loss=0.3605, pruned_loss=0.1213, over 18236.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.31, pruned_loss=0.08177, over 3825473.68 frames. ], batch size: 83, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:32:47,338 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:33:13,349 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2309, 1.9794, 1.5543, 1.3658, 1.8911, 1.1623, 1.2733, 1.7985], + device='cuda:1'), covar=tensor([0.0807, 0.0688, 0.1008, 0.0729, 0.0435, 0.1102, 0.0593, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0300, 0.0329, 0.0250, 0.0233, 0.0320, 0.0287, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:33:32,601 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 19:33:34,249 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.202e+02 6.815e+02 8.173e+02 2.322e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 19:33:44,446 INFO [train.py:903] (1/4) Epoch 11, batch 3400, loss[loss=0.2345, simple_loss=0.3179, pruned_loss=0.07559, over 19693.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3098, pruned_loss=0.08168, over 3828247.15 frames. ], batch size: 53, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:33:45,319 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-01 19:34:08,997 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2236, 1.2537, 1.1955, 0.9338, 0.9100, 0.9791, 0.0786, 0.2892], + device='cuda:1'), covar=tensor([0.0624, 0.0606, 0.0362, 0.0437, 0.1159, 0.0555, 0.1031, 0.1019], + device='cuda:1'), in_proj_covar=tensor([0.0336, 0.0332, 0.0330, 0.0353, 0.0425, 0.0353, 0.0311, 0.0320], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:34:44,147 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71725.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:34:48,499 INFO [train.py:903] (1/4) Epoch 11, batch 3450, loss[loss=0.1844, simple_loss=0.2632, pruned_loss=0.0528, over 19351.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3102, pruned_loss=0.0823, over 3810367.17 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:52,164 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 19:35:15,972 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:33,066 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:35,393 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1976, 1.0688, 1.1110, 1.2834, 1.0567, 1.3616, 1.4098, 1.2176], + device='cuda:1'), covar=tensor([0.0865, 0.1025, 0.1074, 0.0744, 0.0791, 0.0743, 0.0767, 0.0742], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0221, 0.0247, 0.0233, 0.0211, 0.0194, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 19:35:42,108 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.649e+02 6.835e+02 8.667e+02 1.565e+03, threshold=1.367e+03, percent-clipped=4.0 +2023-04-01 19:35:52,285 INFO [train.py:903] (1/4) Epoch 11, batch 3500, loss[loss=0.2529, simple_loss=0.3374, pruned_loss=0.08419, over 19669.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3083, pruned_loss=0.08114, over 3831791.37 frames. ], batch size: 59, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:36:04,521 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:36:56,412 INFO [train.py:903] (1/4) Epoch 11, batch 3550, loss[loss=0.2469, simple_loss=0.3191, pruned_loss=0.08739, over 19557.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3074, pruned_loss=0.08062, over 3829780.21 frames. ], batch size: 61, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:37:12,229 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 19:37:49,024 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.129e+02 6.236e+02 8.069e+02 1.994e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-01 19:37:58,908 INFO [train.py:903] (1/4) Epoch 11, batch 3600, loss[loss=0.2366, simple_loss=0.2875, pruned_loss=0.09284, over 19331.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3071, pruned_loss=0.08049, over 3840934.62 frames. ], batch size: 44, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:38:47,074 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-01 19:39:03,005 INFO [train.py:903] (1/4) Epoch 11, batch 3650, loss[loss=0.2354, simple_loss=0.3201, pruned_loss=0.07533, over 19510.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3068, pruned_loss=0.08023, over 3846949.53 frames. ], batch size: 64, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:13,505 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:39:55,948 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.422e+02 6.726e+02 7.997e+02 1.955e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-04-01 19:40:05,242 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:40:06,261 INFO [train.py:903] (1/4) Epoch 11, batch 3700, loss[loss=0.2015, simple_loss=0.2744, pruned_loss=0.06428, over 19407.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3074, pruned_loss=0.08059, over 3846801.28 frames. ], batch size: 48, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:40:10,654 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-01 19:40:39,343 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3167, 2.0575, 1.9871, 2.5045, 2.1688, 2.1107, 2.0716, 2.3824], + device='cuda:1'), covar=tensor([0.0877, 0.1621, 0.1297, 0.0923, 0.1287, 0.0471, 0.1045, 0.0605], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0355, 0.0292, 0.0239, 0.0299, 0.0247, 0.0278, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:41:11,920 INFO [train.py:903] (1/4) Epoch 11, batch 3750, loss[loss=0.1959, simple_loss=0.2741, pruned_loss=0.05882, over 19584.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3079, pruned_loss=0.08106, over 3822822.10 frames. ], batch size: 52, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:41:40,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3402, 2.4005, 2.6731, 3.2338, 2.2299, 3.1817, 2.8484, 2.4168], + device='cuda:1'), covar=tensor([0.3517, 0.3052, 0.1265, 0.1940, 0.3689, 0.1470, 0.3183, 0.2466], + device='cuda:1'), in_proj_covar=tensor([0.0788, 0.0804, 0.0647, 0.0892, 0.0778, 0.0705, 0.0782, 0.0706], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 19:42:06,335 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.141e+02 6.053e+02 7.322e+02 1.150e+03, threshold=1.211e+03, percent-clipped=0.0 +2023-04-01 19:42:17,090 INFO [train.py:903] (1/4) Epoch 11, batch 3800, loss[loss=0.2683, simple_loss=0.3381, pruned_loss=0.09924, over 17525.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3071, pruned_loss=0.08071, over 3818878.25 frames. ], batch size: 101, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:35,675 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:42:46,989 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 19:43:06,527 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.71 vs. limit=5.0 +2023-04-01 19:43:21,151 INFO [train.py:903] (1/4) Epoch 11, batch 3850, loss[loss=0.2268, simple_loss=0.3041, pruned_loss=0.07477, over 19655.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3065, pruned_loss=0.08038, over 3821441.52 frames. ], batch size: 53, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:43:45,207 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:44:16,371 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 5.600e+02 6.929e+02 8.545e+02 2.074e+03, threshold=1.386e+03, percent-clipped=5.0 +2023-04-01 19:44:25,955 INFO [train.py:903] (1/4) Epoch 11, batch 3900, loss[loss=0.2361, simple_loss=0.3145, pruned_loss=0.07887, over 19484.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3074, pruned_loss=0.08101, over 3812235.63 frames. ], batch size: 64, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:30,660 INFO [train.py:903] (1/4) Epoch 11, batch 3950, loss[loss=0.2584, simple_loss=0.309, pruned_loss=0.1039, over 19716.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3058, pruned_loss=0.07993, over 3826035.36 frames. ], batch size: 46, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:31,934 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 19:46:23,815 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 5.760e+02 7.487e+02 9.416e+02 2.541e+03, threshold=1.497e+03, percent-clipped=9.0 +2023-04-01 19:46:34,092 INFO [train.py:903] (1/4) Epoch 11, batch 4000, loss[loss=0.2722, simple_loss=0.3413, pruned_loss=0.1015, over 19695.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3067, pruned_loss=0.08042, over 3837754.86 frames. ], batch size: 59, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:46:36,459 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72281.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:47:05,063 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8228, 1.7333, 1.5693, 1.4435, 1.3972, 1.4413, 0.3700, 0.7966], + device='cuda:1'), covar=tensor([0.0323, 0.0355, 0.0242, 0.0351, 0.0665, 0.0456, 0.0699, 0.0638], + device='cuda:1'), in_proj_covar=tensor([0.0338, 0.0333, 0.0334, 0.0356, 0.0429, 0.0357, 0.0313, 0.0326], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:47:22,618 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 19:47:37,790 INFO [train.py:903] (1/4) Epoch 11, batch 4050, loss[loss=0.198, simple_loss=0.2736, pruned_loss=0.06119, over 19392.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3049, pruned_loss=0.07916, over 3844164.35 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:47:57,889 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:05,237 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:33,049 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.561e+02 4.924e+02 6.616e+02 8.581e+02 2.031e+03, threshold=1.323e+03, percent-clipped=3.0 +2023-04-01 19:48:37,036 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:43,655 INFO [train.py:903] (1/4) Epoch 11, batch 4100, loss[loss=0.2506, simple_loss=0.3213, pruned_loss=0.08997, over 19622.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3058, pruned_loss=0.07975, over 3845271.93 frames. ], batch size: 50, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:49:05,249 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72396.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:49:18,929 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 19:49:26,315 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2965, 2.9521, 2.2284, 2.7446, 1.0309, 2.9349, 2.7884, 2.9293], + device='cuda:1'), covar=tensor([0.1104, 0.1439, 0.1966, 0.0915, 0.3471, 0.0986, 0.0982, 0.1326], + device='cuda:1'), in_proj_covar=tensor([0.0429, 0.0368, 0.0438, 0.0318, 0.0378, 0.0370, 0.0355, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:49:48,312 INFO [train.py:903] (1/4) Epoch 11, batch 4150, loss[loss=0.2073, simple_loss=0.2938, pruned_loss=0.06042, over 19785.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3067, pruned_loss=0.0799, over 3853356.61 frames. ], batch size: 56, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:50:42,225 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.902e+02 6.015e+02 8.099e+02 1.569e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-01 19:50:51,449 INFO [train.py:903] (1/4) Epoch 11, batch 4200, loss[loss=0.2268, simple_loss=0.3009, pruned_loss=0.0764, over 19623.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3077, pruned_loss=0.08028, over 3847113.17 frames. ], batch size: 50, lr: 7.54e-03, grad_scale: 16.0 +2023-04-01 19:50:57,218 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 19:51:07,239 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:51:38,227 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9609, 1.4727, 1.9121, 1.4849, 3.0014, 4.6878, 4.4851, 4.8894], + device='cuda:1'), covar=tensor([0.1702, 0.3301, 0.3016, 0.2075, 0.0528, 0.0136, 0.0149, 0.0115], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0295, 0.0323, 0.0252, 0.0215, 0.0155, 0.0207, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:51:56,568 INFO [train.py:903] (1/4) Epoch 11, batch 4250, loss[loss=0.2571, simple_loss=0.3176, pruned_loss=0.09835, over 19849.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3075, pruned_loss=0.08037, over 3851420.73 frames. ], batch size: 52, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:52:17,361 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 19:52:28,262 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 19:52:28,527 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9733, 1.1110, 1.2941, 1.1469, 2.4610, 0.8373, 1.8493, 2.7734], + device='cuda:1'), covar=tensor([0.0733, 0.3037, 0.3039, 0.2018, 0.1049, 0.2754, 0.1501, 0.0470], + device='cuda:1'), in_proj_covar=tensor([0.0355, 0.0339, 0.0353, 0.0322, 0.0348, 0.0334, 0.0337, 0.0354], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:52:51,992 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.433e+02 6.415e+02 7.675e+02 1.468e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-01 19:53:01,657 INFO [train.py:903] (1/4) Epoch 11, batch 4300, loss[loss=0.208, simple_loss=0.2916, pruned_loss=0.06217, over 19389.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3072, pruned_loss=0.08002, over 3855917.61 frames. ], batch size: 48, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:53:36,395 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:00,212 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 19:54:04,825 INFO [train.py:903] (1/4) Epoch 11, batch 4350, loss[loss=0.2666, simple_loss=0.3361, pruned_loss=0.09853, over 19766.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3074, pruned_loss=0.08016, over 3843272.60 frames. ], batch size: 56, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:54:13,340 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1462, 3.8036, 2.3391, 2.1733, 3.3462, 1.8550, 1.3862, 2.1353], + device='cuda:1'), covar=tensor([0.1299, 0.0420, 0.0870, 0.0807, 0.0391, 0.1051, 0.1051, 0.0622], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0304, 0.0329, 0.0252, 0.0236, 0.0320, 0.0291, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:54:15,575 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3420, 1.4142, 2.1726, 1.7360, 2.9811, 2.5668, 3.0726, 1.5215], + device='cuda:1'), covar=tensor([0.2410, 0.4179, 0.2244, 0.1835, 0.1678, 0.2007, 0.1993, 0.3852], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0575, 0.0601, 0.0438, 0.0592, 0.0492, 0.0646, 0.0496], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 19:54:33,935 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2307, 1.3187, 1.9185, 1.6692, 2.8647, 4.7035, 4.6612, 4.8705], + device='cuda:1'), covar=tensor([0.1497, 0.3311, 0.2780, 0.1839, 0.0515, 0.0122, 0.0129, 0.0120], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0295, 0.0323, 0.0252, 0.0214, 0.0155, 0.0207, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:54:34,018 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:55,755 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0424, 1.2109, 1.6739, 0.9333, 2.4067, 3.3575, 3.0653, 3.5089], + device='cuda:1'), covar=tensor([0.1562, 0.3287, 0.3000, 0.2323, 0.0554, 0.0153, 0.0207, 0.0203], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0297, 0.0325, 0.0253, 0.0215, 0.0156, 0.0208, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:54:58,892 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.206e+02 6.524e+02 8.372e+02 1.509e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-01 19:55:06,331 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:08,344 INFO [train.py:903] (1/4) Epoch 11, batch 4400, loss[loss=0.2151, simple_loss=0.2996, pruned_loss=0.0653, over 19346.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3058, pruned_loss=0.07939, over 3844640.95 frames. ], batch size: 66, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:55:20,134 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:30,983 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0134, 1.9766, 1.6876, 1.4976, 1.4301, 1.5384, 0.2874, 0.8703], + device='cuda:1'), covar=tensor([0.0418, 0.0396, 0.0288, 0.0514, 0.0835, 0.0583, 0.0880, 0.0740], + device='cuda:1'), in_proj_covar=tensor([0.0337, 0.0331, 0.0330, 0.0357, 0.0427, 0.0354, 0.0311, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 19:55:37,557 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 19:55:47,757 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 19:56:05,992 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7471, 1.4381, 1.3384, 1.7166, 1.4681, 1.4894, 1.4226, 1.5905], + device='cuda:1'), covar=tensor([0.0965, 0.1380, 0.1455, 0.0961, 0.1205, 0.0540, 0.1211, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0352, 0.0289, 0.0237, 0.0294, 0.0244, 0.0275, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 19:56:12,262 INFO [train.py:903] (1/4) Epoch 11, batch 4450, loss[loss=0.2225, simple_loss=0.2876, pruned_loss=0.0787, over 19783.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3051, pruned_loss=0.07865, over 3839546.12 frames. ], batch size: 48, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:57:06,051 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.681e+02 5.500e+02 6.922e+02 8.863e+02 1.965e+03, threshold=1.384e+03, percent-clipped=10.0 +2023-04-01 19:57:14,262 INFO [train.py:903] (1/4) Epoch 11, batch 4500, loss[loss=0.2224, simple_loss=0.2996, pruned_loss=0.07263, over 19762.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3048, pruned_loss=0.07895, over 3843544.30 frames. ], batch size: 63, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:57:46,549 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:58:19,534 INFO [train.py:903] (1/4) Epoch 11, batch 4550, loss[loss=0.2586, simple_loss=0.3302, pruned_loss=0.09348, over 19564.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3055, pruned_loss=0.07937, over 3820259.61 frames. ], batch size: 61, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:58:28,668 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 19:58:53,286 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 19:59:02,249 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:59:11,462 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5868, 1.6482, 1.7755, 2.0885, 1.3762, 1.8795, 1.9374, 1.6992], + device='cuda:1'), covar=tensor([0.3321, 0.2937, 0.1523, 0.1552, 0.3055, 0.1460, 0.3705, 0.2717], + device='cuda:1'), in_proj_covar=tensor([0.0781, 0.0805, 0.0641, 0.0885, 0.0771, 0.0698, 0.0782, 0.0702], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 19:59:15,579 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.895e+02 7.318e+02 9.053e+02 1.617e+03, threshold=1.464e+03, percent-clipped=1.0 +2023-04-01 19:59:23,903 INFO [train.py:903] (1/4) Epoch 11, batch 4600, loss[loss=0.2574, simple_loss=0.3231, pruned_loss=0.09581, over 18761.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3061, pruned_loss=0.07964, over 3826321.61 frames. ], batch size: 74, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:59:34,629 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72887.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:59:59,198 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1443, 2.2986, 2.4285, 3.3786, 2.3277, 3.4243, 2.7593, 2.3830], + device='cuda:1'), covar=tensor([0.3859, 0.3371, 0.1429, 0.1771, 0.3543, 0.1285, 0.3464, 0.2506], + device='cuda:1'), in_proj_covar=tensor([0.0781, 0.0804, 0.0641, 0.0887, 0.0772, 0.0698, 0.0782, 0.0701], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:00:06,147 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:27,758 INFO [train.py:903] (1/4) Epoch 11, batch 4650, loss[loss=0.2668, simple_loss=0.3307, pruned_loss=0.1015, over 19666.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3067, pruned_loss=0.07982, over 3815978.98 frames. ], batch size: 53, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 20:00:41,032 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1675, 5.2344, 6.0827, 6.0483, 1.8051, 5.7360, 4.8444, 5.7260], + device='cuda:1'), covar=tensor([0.1319, 0.0667, 0.0486, 0.0511, 0.5491, 0.0410, 0.0498, 0.1014], + device='cuda:1'), in_proj_covar=tensor([0.0665, 0.0597, 0.0787, 0.0678, 0.0728, 0.0553, 0.0482, 0.0729], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 20:00:46,733 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 20:00:57,790 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 20:01:22,039 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.839e+02 5.234e+02 6.593e+02 8.122e+02 1.324e+03, threshold=1.319e+03, percent-clipped=0.0 +2023-04-01 20:01:30,180 INFO [train.py:903] (1/4) Epoch 11, batch 4700, loss[loss=0.2043, simple_loss=0.2741, pruned_loss=0.06726, over 19400.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3078, pruned_loss=0.08085, over 3802148.01 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:01:34,988 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72982.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:01:55,717 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 20:02:34,869 INFO [train.py:903] (1/4) Epoch 11, batch 4750, loss[loss=0.2175, simple_loss=0.2939, pruned_loss=0.07054, over 19487.00 frames. ], tot_loss[loss=0.236, simple_loss=0.309, pruned_loss=0.08147, over 3808863.57 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:11,999 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:03:28,794 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.728e+02 6.958e+02 8.518e+02 2.013e+03, threshold=1.392e+03, percent-clipped=2.0 +2023-04-01 20:03:37,831 INFO [train.py:903] (1/4) Epoch 11, batch 4800, loss[loss=0.256, simple_loss=0.3317, pruned_loss=0.09015, over 18092.00 frames. ], tot_loss[loss=0.237, simple_loss=0.31, pruned_loss=0.082, over 3805525.81 frames. ], batch size: 83, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:38,473 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 20:03:44,170 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73084.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:04:18,432 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9728, 3.6437, 2.3601, 3.2593, 0.9793, 3.4642, 3.4139, 3.4426], + device='cuda:1'), covar=tensor([0.0786, 0.1074, 0.1995, 0.0754, 0.3653, 0.0761, 0.0865, 0.1168], + device='cuda:1'), in_proj_covar=tensor([0.0430, 0.0364, 0.0433, 0.0315, 0.0376, 0.0370, 0.0356, 0.0389], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:04:26,338 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5582, 1.6186, 2.3342, 1.8291, 3.1047, 2.6476, 3.2115, 1.6282], + device='cuda:1'), covar=tensor([0.2317, 0.4222, 0.2488, 0.1873, 0.1703, 0.2033, 0.1893, 0.3741], + device='cuda:1'), in_proj_covar=tensor([0.0490, 0.0577, 0.0601, 0.0438, 0.0591, 0.0493, 0.0645, 0.0497], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:04:40,997 INFO [train.py:903] (1/4) Epoch 11, batch 4850, loss[loss=0.2028, simple_loss=0.2819, pruned_loss=0.06183, over 19585.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3092, pruned_loss=0.0818, over 3803042.81 frames. ], batch size: 52, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:05:02,016 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 20:05:22,856 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 20:05:29,936 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 20:05:29,969 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 20:05:35,782 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.053e+02 5.592e+02 6.791e+02 8.044e+02 1.982e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 20:05:39,486 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 20:05:44,089 INFO [train.py:903] (1/4) Epoch 11, batch 4900, loss[loss=0.2639, simple_loss=0.3321, pruned_loss=0.09788, over 19390.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3094, pruned_loss=0.08156, over 3806893.45 frames. ], batch size: 70, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:05:57,064 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9904, 1.6556, 1.9153, 1.8655, 4.3893, 1.0794, 2.5664, 4.7202], + device='cuda:1'), covar=tensor([0.0388, 0.2624, 0.2594, 0.1762, 0.0730, 0.2676, 0.1235, 0.0209], + device='cuda:1'), in_proj_covar=tensor([0.0355, 0.0337, 0.0352, 0.0321, 0.0344, 0.0332, 0.0331, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:06:01,219 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 20:06:48,396 INFO [train.py:903] (1/4) Epoch 11, batch 4950, loss[loss=0.2067, simple_loss=0.2805, pruned_loss=0.06644, over 19565.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3091, pruned_loss=0.08142, over 3810293.98 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:00,959 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 20:07:21,264 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73256.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:07:24,681 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 20:07:42,672 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.726e+02 5.448e+02 7.218e+02 1.051e+03 2.092e+03, threshold=1.444e+03, percent-clipped=5.0 +2023-04-01 20:07:51,007 INFO [train.py:903] (1/4) Epoch 11, batch 5000, loss[loss=0.2048, simple_loss=0.2683, pruned_loss=0.07062, over 19388.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3088, pruned_loss=0.08142, over 3821995.75 frames. ], batch size: 47, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:55,895 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 20:08:08,611 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 20:08:13,759 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9847, 1.5738, 1.9887, 2.7346, 1.9473, 2.2631, 2.5739, 2.2542], + device='cuda:1'), covar=tensor([0.0871, 0.1051, 0.0986, 0.0903, 0.0956, 0.0764, 0.0910, 0.0626], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0224, 0.0223, 0.0250, 0.0235, 0.0213, 0.0194, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 20:08:32,066 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:08:50,651 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73326.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:08:54,847 INFO [train.py:903] (1/4) Epoch 11, batch 5050, loss[loss=0.1788, simple_loss=0.257, pruned_loss=0.05034, over 19325.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3081, pruned_loss=0.08095, over 3821211.77 frames. ], batch size: 44, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:09:28,973 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 20:09:47,572 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:09:48,289 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.337e+02 6.487e+02 8.820e+02 1.986e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-01 20:09:56,476 INFO [train.py:903] (1/4) Epoch 11, batch 5100, loss[loss=0.2409, simple_loss=0.3163, pruned_loss=0.08277, over 19309.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3076, pruned_loss=0.0812, over 3822192.79 frames. ], batch size: 66, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:10:04,903 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 20:10:10,258 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 20:10:13,763 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 20:10:22,130 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8889, 4.4531, 2.7375, 3.9421, 0.9799, 4.2367, 4.1730, 4.3505], + device='cuda:1'), covar=tensor([0.0540, 0.0843, 0.1782, 0.0621, 0.3836, 0.0657, 0.0707, 0.0808], + device='cuda:1'), in_proj_covar=tensor([0.0432, 0.0363, 0.0437, 0.0317, 0.0379, 0.0373, 0.0357, 0.0390], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:11:00,238 INFO [train.py:903] (1/4) Epoch 11, batch 5150, loss[loss=0.2244, simple_loss=0.3054, pruned_loss=0.07175, over 19673.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3082, pruned_loss=0.08142, over 3818617.84 frames. ], batch size: 60, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:11:09,658 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 20:11:16,047 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73441.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:11:35,124 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4595, 1.0707, 1.3141, 1.2981, 2.1188, 0.9833, 1.8337, 2.2861], + device='cuda:1'), covar=tensor([0.0686, 0.2644, 0.2575, 0.1458, 0.0890, 0.1971, 0.1039, 0.0516], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0334, 0.0348, 0.0320, 0.0344, 0.0331, 0.0328, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:11:43,059 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:11:50,316 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1508, 1.0064, 1.0854, 1.3304, 1.0322, 1.2048, 1.3194, 1.1896], + device='cuda:1'), covar=tensor([0.0899, 0.1086, 0.1117, 0.0690, 0.0917, 0.0856, 0.0831, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0226, 0.0225, 0.0250, 0.0237, 0.0214, 0.0194, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 20:11:54,731 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.282e+02 5.910e+02 7.077e+02 9.438e+02 2.777e+03, threshold=1.415e+03, percent-clipped=4.0 +2023-04-01 20:12:04,016 INFO [train.py:903] (1/4) Epoch 11, batch 5200, loss[loss=0.225, simple_loss=0.2938, pruned_loss=0.07806, over 19851.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3079, pruned_loss=0.08099, over 3817401.96 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:12:16,632 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 20:13:00,064 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 20:13:07,090 INFO [train.py:903] (1/4) Epoch 11, batch 5250, loss[loss=0.2193, simple_loss=0.3003, pruned_loss=0.06917, over 19533.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.308, pruned_loss=0.08127, over 3811090.40 frames. ], batch size: 56, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:13:16,323 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 20:13:40,014 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 20:14:02,587 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 5.338e+02 6.658e+02 8.757e+02 1.866e+03, threshold=1.332e+03, percent-clipped=3.0 +2023-04-01 20:14:11,100 INFO [train.py:903] (1/4) Epoch 11, batch 5300, loss[loss=0.2458, simple_loss=0.3219, pruned_loss=0.08489, over 19663.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3071, pruned_loss=0.08087, over 3819937.79 frames. ], batch size: 60, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:14:14,915 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7295, 1.8005, 2.0034, 2.4602, 1.6271, 2.2031, 2.2495, 1.9211], + device='cuda:1'), covar=tensor([0.3411, 0.2894, 0.1400, 0.1480, 0.3168, 0.1473, 0.3406, 0.2592], + device='cuda:1'), in_proj_covar=tensor([0.0788, 0.0808, 0.0646, 0.0889, 0.0777, 0.0702, 0.0786, 0.0710], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:14:26,093 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 20:14:29,974 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2669, 2.9467, 2.1314, 2.1529, 1.7771, 2.3311, 0.7757, 2.1388], + device='cuda:1'), covar=tensor([0.0444, 0.0420, 0.0565, 0.0756, 0.0888, 0.0806, 0.0965, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0334, 0.0326, 0.0329, 0.0348, 0.0424, 0.0349, 0.0304, 0.0318], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 20:14:43,174 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2881, 3.7651, 3.8613, 3.8910, 1.4307, 3.6647, 3.1857, 3.5687], + device='cuda:1'), covar=tensor([0.1417, 0.0820, 0.0659, 0.0658, 0.5054, 0.0688, 0.0700, 0.1173], + device='cuda:1'), in_proj_covar=tensor([0.0666, 0.0600, 0.0794, 0.0678, 0.0724, 0.0553, 0.0485, 0.0730], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 20:14:49,183 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:13,628 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73627.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:15,331 INFO [train.py:903] (1/4) Epoch 11, batch 5350, loss[loss=0.2223, simple_loss=0.2957, pruned_loss=0.07443, over 19865.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3084, pruned_loss=0.08115, over 3808814.84 frames. ], batch size: 52, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:15:46,116 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:46,889 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 20:15:49,354 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73655.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:16:09,901 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.285e+02 6.573e+02 8.256e+02 1.333e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-04-01 20:16:19,918 INFO [train.py:903] (1/4) Epoch 11, batch 5400, loss[loss=0.2765, simple_loss=0.3439, pruned_loss=0.1046, over 19733.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3087, pruned_loss=0.0816, over 3802420.02 frames. ], batch size: 63, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:16:41,358 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73697.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:17:12,958 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:17:20,595 INFO [train.py:903] (1/4) Epoch 11, batch 5450, loss[loss=0.2492, simple_loss=0.3232, pruned_loss=0.08765, over 19772.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3086, pruned_loss=0.08171, over 3815431.83 frames. ], batch size: 56, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:17:45,851 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:12,062 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:14,011 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.707e+02 7.323e+02 9.356e+02 2.024e+03, threshold=1.465e+03, percent-clipped=8.0 +2023-04-01 20:18:23,354 INFO [train.py:903] (1/4) Epoch 11, batch 5500, loss[loss=0.2122, simple_loss=0.2736, pruned_loss=0.07546, over 19756.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3095, pruned_loss=0.08236, over 3820381.50 frames. ], batch size: 47, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:18:50,424 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 20:19:25,479 INFO [train.py:903] (1/4) Epoch 11, batch 5550, loss[loss=0.2167, simple_loss=0.2978, pruned_loss=0.06774, over 19560.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3097, pruned_loss=0.08235, over 3832126.66 frames. ], batch size: 61, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:19:35,123 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 20:20:04,780 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2573, 1.3375, 1.7899, 1.4219, 2.7364, 3.4745, 3.3302, 3.7235], + device='cuda:1'), covar=tensor([0.1560, 0.3306, 0.2851, 0.2047, 0.0498, 0.0284, 0.0196, 0.0192], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0292, 0.0321, 0.0248, 0.0212, 0.0156, 0.0205, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 20:20:20,659 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.761e+02 6.752e+02 8.536e+02 1.570e+03, threshold=1.350e+03, percent-clipped=1.0 +2023-04-01 20:20:26,343 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 20:20:29,856 INFO [train.py:903] (1/4) Epoch 11, batch 5600, loss[loss=0.254, simple_loss=0.3314, pruned_loss=0.08834, over 19692.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.31, pruned_loss=0.08263, over 3827148.23 frames. ], batch size: 53, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:21:24,330 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8807, 1.7907, 1.8688, 1.4781, 4.3230, 0.8714, 2.4397, 4.6986], + device='cuda:1'), covar=tensor([0.0330, 0.2410, 0.2509, 0.1995, 0.0701, 0.2837, 0.1384, 0.0189], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0338, 0.0349, 0.0322, 0.0347, 0.0331, 0.0332, 0.0353], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:21:34,094 INFO [train.py:903] (1/4) Epoch 11, batch 5650, loss[loss=0.1797, simple_loss=0.2473, pruned_loss=0.05602, over 19723.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.308, pruned_loss=0.08117, over 3830317.33 frames. ], batch size: 45, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:22:03,021 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:22:23,877 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 20:22:28,536 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.880e+02 7.006e+02 8.632e+02 1.687e+03, threshold=1.401e+03, percent-clipped=2.0 +2023-04-01 20:22:37,801 INFO [train.py:903] (1/4) Epoch 11, batch 5700, loss[loss=0.2421, simple_loss=0.311, pruned_loss=0.08659, over 19619.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3067, pruned_loss=0.08043, over 3844626.47 frames. ], batch size: 50, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:13,091 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3839, 1.4602, 1.7144, 1.6099, 2.6131, 2.2100, 2.6981, 1.0831], + device='cuda:1'), covar=tensor([0.2204, 0.3691, 0.2262, 0.1649, 0.1353, 0.1810, 0.1331, 0.3591], + device='cuda:1'), in_proj_covar=tensor([0.0492, 0.0574, 0.0599, 0.0436, 0.0594, 0.0492, 0.0645, 0.0494], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:23:38,892 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:23:42,027 INFO [train.py:903] (1/4) Epoch 11, batch 5750, loss[loss=0.2955, simple_loss=0.3462, pruned_loss=0.1224, over 12881.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3068, pruned_loss=0.08072, over 3825114.84 frames. ], batch size: 136, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:43,285 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 20:23:51,329 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 20:23:56,779 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 20:24:11,305 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74051.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:30,683 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74067.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:36,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.413e+02 6.547e+02 7.617e+02 1.298e+03, threshold=1.309e+03, percent-clipped=0.0 +2023-04-01 20:24:45,231 INFO [train.py:903] (1/4) Epoch 11, batch 5800, loss[loss=0.2322, simple_loss=0.303, pruned_loss=0.08071, over 19470.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3076, pruned_loss=0.08135, over 3835174.14 frames. ], batch size: 49, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:24:50,599 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:04,165 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:46,662 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1720, 2.7733, 2.1572, 2.1309, 1.8035, 2.2724, 0.8620, 2.0022], + device='cuda:1'), covar=tensor([0.0459, 0.0463, 0.0491, 0.0770, 0.0848, 0.0836, 0.0913, 0.0765], + device='cuda:1'), in_proj_covar=tensor([0.0331, 0.0325, 0.0327, 0.0346, 0.0421, 0.0344, 0.0305, 0.0318], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 20:25:48,478 INFO [train.py:903] (1/4) Epoch 11, batch 5850, loss[loss=0.2265, simple_loss=0.3019, pruned_loss=0.0755, over 19835.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3078, pruned_loss=0.08122, over 3830003.13 frames. ], batch size: 52, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:25:52,588 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 20:26:04,396 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 20:26:42,000 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.778e+02 5.734e+02 7.251e+02 9.100e+02 2.751e+03, threshold=1.450e+03, percent-clipped=7.0 +2023-04-01 20:26:51,315 INFO [train.py:903] (1/4) Epoch 11, batch 5900, loss[loss=0.2151, simple_loss=0.2957, pruned_loss=0.06723, over 19773.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3087, pruned_loss=0.08158, over 3832335.86 frames. ], batch size: 54, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:26:53,585 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 20:27:15,056 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 20:27:30,982 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:27:55,166 INFO [train.py:903] (1/4) Epoch 11, batch 5950, loss[loss=0.2174, simple_loss=0.2827, pruned_loss=0.07612, over 19783.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3074, pruned_loss=0.08078, over 3842216.63 frames. ], batch size: 48, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:33,384 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74258.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:28:49,682 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 4.883e+02 6.160e+02 7.607e+02 1.521e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-01 20:28:59,182 INFO [train.py:903] (1/4) Epoch 11, batch 6000, loss[loss=0.1937, simple_loss=0.2655, pruned_loss=0.0609, over 19732.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3088, pruned_loss=0.0815, over 3845124.50 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:59,182 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 20:29:11,906 INFO [train.py:937] (1/4) Epoch 11, validation: loss=0.1778, simple_loss=0.2787, pruned_loss=0.03847, over 944034.00 frames. +2023-04-01 20:29:11,909 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 20:30:09,755 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74323.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:30:18,373 INFO [train.py:903] (1/4) Epoch 11, batch 6050, loss[loss=0.2115, simple_loss=0.295, pruned_loss=0.06398, over 19595.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3085, pruned_loss=0.08108, over 3829507.08 frames. ], batch size: 52, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:30:28,827 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7886, 4.2245, 4.5171, 4.4875, 1.6450, 4.2265, 3.6146, 4.1792], + device='cuda:1'), covar=tensor([0.1489, 0.0955, 0.0534, 0.0603, 0.5301, 0.0645, 0.0642, 0.1069], + device='cuda:1'), in_proj_covar=tensor([0.0672, 0.0600, 0.0800, 0.0682, 0.0725, 0.0557, 0.0485, 0.0732], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 20:30:42,862 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:31:12,598 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.290e+02 5.870e+02 7.295e+02 8.913e+02 1.728e+03, threshold=1.459e+03, percent-clipped=8.0 +2023-04-01 20:31:21,969 INFO [train.py:903] (1/4) Epoch 11, batch 6100, loss[loss=0.2313, simple_loss=0.3077, pruned_loss=0.07742, over 19768.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3085, pruned_loss=0.08098, over 3824583.49 frames. ], batch size: 56, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:31:56,338 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0649, 1.2609, 1.4635, 1.2784, 2.6128, 0.9047, 1.9059, 2.8831], + device='cuda:1'), covar=tensor([0.0523, 0.2476, 0.2479, 0.1677, 0.0759, 0.2386, 0.1217, 0.0362], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0334, 0.0347, 0.0317, 0.0344, 0.0330, 0.0332, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:32:22,429 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74426.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:32:25,901 INFO [train.py:903] (1/4) Epoch 11, batch 6150, loss[loss=0.1852, simple_loss=0.262, pruned_loss=0.05418, over 19649.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3079, pruned_loss=0.08026, over 3836776.55 frames. ], batch size: 50, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:54,729 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 20:33:12,628 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:33:20,926 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.576e+02 6.382e+02 8.608e+02 2.367e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-01 20:33:29,312 INFO [train.py:903] (1/4) Epoch 11, batch 6200, loss[loss=0.2536, simple_loss=0.3286, pruned_loss=0.0893, over 19660.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3077, pruned_loss=0.08021, over 3834923.19 frames. ], batch size: 60, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:33:43,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:34:31,919 INFO [train.py:903] (1/4) Epoch 11, batch 6250, loss[loss=0.2485, simple_loss=0.3338, pruned_loss=0.08158, over 19555.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3089, pruned_loss=0.08092, over 3827548.96 frames. ], batch size: 56, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:34:39,623 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.79 vs. limit=5.0 +2023-04-01 20:34:48,638 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74541.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:35:02,986 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 20:35:09,829 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-01 20:35:10,724 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3536, 2.2004, 1.9055, 1.7331, 1.6867, 1.7844, 0.4114, 1.1501], + device='cuda:1'), covar=tensor([0.0403, 0.0415, 0.0340, 0.0548, 0.0875, 0.0618, 0.0903, 0.0786], + device='cuda:1'), in_proj_covar=tensor([0.0338, 0.0332, 0.0333, 0.0353, 0.0427, 0.0352, 0.0308, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 20:35:30,740 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 5.334e+02 6.699e+02 8.918e+02 1.691e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-01 20:35:38,658 INFO [train.py:903] (1/4) Epoch 11, batch 6300, loss[loss=0.2868, simple_loss=0.346, pruned_loss=0.1137, over 17502.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3076, pruned_loss=0.0802, over 3826709.76 frames. ], batch size: 101, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:36:06,829 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:21,235 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:40,491 INFO [train.py:903] (1/4) Epoch 11, batch 6350, loss[loss=0.2276, simple_loss=0.2918, pruned_loss=0.08172, over 19701.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3071, pruned_loss=0.07995, over 3823599.97 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:16,964 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4994, 1.2954, 1.3364, 1.9378, 1.4798, 1.7205, 1.9036, 1.6131], + device='cuda:1'), covar=tensor([0.0859, 0.1059, 0.1090, 0.0821, 0.0922, 0.0740, 0.0786, 0.0695], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0225, 0.0223, 0.0246, 0.0236, 0.0212, 0.0194, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 20:37:36,488 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 5.622e+02 6.880e+02 9.256e+02 3.776e+03, threshold=1.376e+03, percent-clipped=5.0 +2023-04-01 20:37:44,831 INFO [train.py:903] (1/4) Epoch 11, batch 6400, loss[loss=0.2359, simple_loss=0.3163, pruned_loss=0.07779, over 19661.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3089, pruned_loss=0.08098, over 3804050.28 frames. ], batch size: 55, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:38:33,474 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:38:37,067 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1469, 1.1879, 1.3106, 1.2918, 1.6523, 1.6155, 1.5580, 0.5538], + device='cuda:1'), covar=tensor([0.1919, 0.3344, 0.1972, 0.1587, 0.1323, 0.1784, 0.1253, 0.3582], + device='cuda:1'), in_proj_covar=tensor([0.0485, 0.0569, 0.0596, 0.0435, 0.0589, 0.0486, 0.0640, 0.0488], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:38:44,787 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5125, 1.3450, 1.4512, 1.6140, 3.0383, 1.1549, 2.3268, 3.4259], + device='cuda:1'), covar=tensor([0.0416, 0.2588, 0.2675, 0.1679, 0.0693, 0.2400, 0.1208, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0338, 0.0351, 0.0320, 0.0348, 0.0335, 0.0337, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:38:46,860 INFO [train.py:903] (1/4) Epoch 11, batch 6450, loss[loss=0.1977, simple_loss=0.2708, pruned_loss=0.06229, over 19004.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3088, pruned_loss=0.08108, over 3814734.15 frames. ], batch size: 42, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:39:28,526 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 20:39:44,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 5.590e+02 7.165e+02 9.175e+02 2.194e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 20:39:53,342 INFO [train.py:903] (1/4) Epoch 11, batch 6500, loss[loss=0.2354, simple_loss=0.2979, pruned_loss=0.08647, over 19465.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3077, pruned_loss=0.08093, over 3816220.50 frames. ], batch size: 49, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:39:54,533 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 20:40:16,077 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:48,432 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:52,517 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.81 vs. limit=5.0 +2023-04-01 20:40:54,505 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8111, 1.9378, 2.0920, 2.6573, 1.7638, 2.3609, 2.3395, 1.9684], + device='cuda:1'), covar=tensor([0.3376, 0.2804, 0.1364, 0.1597, 0.3223, 0.1452, 0.3191, 0.2483], + device='cuda:1'), in_proj_covar=tensor([0.0787, 0.0810, 0.0641, 0.0890, 0.0778, 0.0702, 0.0782, 0.0705], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:40:56,363 INFO [train.py:903] (1/4) Epoch 11, batch 6550, loss[loss=0.2389, simple_loss=0.3176, pruned_loss=0.08014, over 19609.00 frames. ], tot_loss[loss=0.235, simple_loss=0.308, pruned_loss=0.081, over 3801570.44 frames. ], batch size: 61, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:41:52,115 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 5.036e+02 6.201e+02 8.020e+02 1.527e+03, threshold=1.240e+03, percent-clipped=1.0 +2023-04-01 20:41:59,122 INFO [train.py:903] (1/4) Epoch 11, batch 6600, loss[loss=0.2217, simple_loss=0.2988, pruned_loss=0.07235, over 19677.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3097, pruned_loss=0.08222, over 3795823.55 frames. ], batch size: 58, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:42:46,208 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3490, 1.4467, 1.7504, 1.6132, 2.7018, 2.2505, 2.7963, 1.1662], + device='cuda:1'), covar=tensor([0.2230, 0.3718, 0.2308, 0.1751, 0.1371, 0.1842, 0.1459, 0.3651], + device='cuda:1'), in_proj_covar=tensor([0.0496, 0.0577, 0.0604, 0.0441, 0.0597, 0.0493, 0.0651, 0.0495], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:43:01,234 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:43:03,143 INFO [train.py:903] (1/4) Epoch 11, batch 6650, loss[loss=0.3372, simple_loss=0.3817, pruned_loss=0.1464, over 13293.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3077, pruned_loss=0.08093, over 3806705.86 frames. ], batch size: 136, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:43:32,770 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9422, 1.7564, 1.5898, 2.0534, 1.9304, 1.7712, 1.6526, 1.9556], + device='cuda:1'), covar=tensor([0.0928, 0.1482, 0.1318, 0.0836, 0.1093, 0.0491, 0.1126, 0.0662], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0352, 0.0291, 0.0239, 0.0296, 0.0242, 0.0279, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:43:40,227 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:00,890 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.506e+02 7.046e+02 9.043e+02 1.602e+03, threshold=1.409e+03, percent-clipped=2.0 +2023-04-01 20:44:01,337 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:07,814 INFO [train.py:903] (1/4) Epoch 11, batch 6700, loss[loss=0.2007, simple_loss=0.2774, pruned_loss=0.06199, over 19853.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3076, pruned_loss=0.08089, over 3813551.85 frames. ], batch size: 52, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:44:12,436 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74982.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:31,776 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74998.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:45:06,535 INFO [train.py:903] (1/4) Epoch 11, batch 6750, loss[loss=0.2389, simple_loss=0.32, pruned_loss=0.07885, over 19666.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3084, pruned_loss=0.08125, over 3822469.61 frames. ], batch size: 58, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:45:30,092 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 20:45:55,351 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:45:56,174 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 5.649e+02 7.002e+02 8.709e+02 1.691e+03, threshold=1.400e+03, percent-clipped=2.0 +2023-04-01 20:46:04,211 INFO [train.py:903] (1/4) Epoch 11, batch 6800, loss[loss=0.2435, simple_loss=0.3237, pruned_loss=0.08164, over 19754.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3083, pruned_loss=0.08098, over 3833737.23 frames. ], batch size: 54, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:46:52,333 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 20:46:53,786 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 20:46:56,127 INFO [train.py:903] (1/4) Epoch 12, batch 0, loss[loss=0.259, simple_loss=0.3076, pruned_loss=0.1052, over 19754.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3076, pruned_loss=0.1052, over 19754.00 frames. ], batch size: 46, lr: 7.10e-03, grad_scale: 8.0 +2023-04-01 20:46:56,128 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 20:47:06,643 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5869, 3.2801, 2.6680, 3.0217, 1.2601, 2.9903, 3.0119, 3.1006], + device='cuda:1'), covar=tensor([0.0863, 0.0814, 0.1619, 0.0821, 0.3106, 0.1114, 0.0823, 0.1220], + device='cuda:1'), in_proj_covar=tensor([0.0430, 0.0361, 0.0432, 0.0312, 0.0374, 0.0364, 0.0352, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-04-01 20:47:08,133 INFO [train.py:937] (1/4) Epoch 12, validation: loss=0.1777, simple_loss=0.2788, pruned_loss=0.03825, over 944034.00 frames. +2023-04-01 20:47:08,134 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 20:47:20,786 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 20:48:11,513 INFO [train.py:903] (1/4) Epoch 12, batch 50, loss[loss=0.1975, simple_loss=0.2679, pruned_loss=0.06353, over 19715.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3051, pruned_loss=0.07898, over 870432.97 frames. ], batch size: 46, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:48:20,847 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3850, 1.2088, 0.9842, 1.2445, 1.1102, 1.1649, 0.9843, 1.1791], + device='cuda:1'), covar=tensor([0.1134, 0.1224, 0.1764, 0.1072, 0.1315, 0.0974, 0.1657, 0.1046], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0352, 0.0291, 0.0239, 0.0296, 0.0243, 0.0279, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:48:29,477 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.251e+02 6.823e+02 1.011e+03 3.055e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-01 20:48:41,946 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 20:49:13,868 INFO [train.py:903] (1/4) Epoch 12, batch 100, loss[loss=0.231, simple_loss=0.3131, pruned_loss=0.07449, over 19301.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.309, pruned_loss=0.08127, over 1513017.50 frames. ], batch size: 70, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:49:22,066 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 20:50:02,907 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:16,771 INFO [train.py:903] (1/4) Epoch 12, batch 150, loss[loss=0.2305, simple_loss=0.3013, pruned_loss=0.07981, over 19584.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3066, pruned_loss=0.07909, over 2025783.63 frames. ], batch size: 52, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:50:34,012 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:36,118 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.206e+02 6.373e+02 8.343e+02 1.576e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-01 20:50:56,161 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:15,326 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 20:51:18,693 INFO [train.py:903] (1/4) Epoch 12, batch 200, loss[loss=0.2296, simple_loss=0.3059, pruned_loss=0.07663, over 19594.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3058, pruned_loss=0.07885, over 2429406.21 frames. ], batch size: 61, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:51:42,634 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:45,339 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:46,573 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8244, 1.9223, 2.1090, 2.6438, 1.7848, 2.4406, 2.3355, 1.9488], + device='cuda:1'), covar=tensor([0.3640, 0.2924, 0.1443, 0.1652, 0.3326, 0.1470, 0.3478, 0.2689], + device='cuda:1'), in_proj_covar=tensor([0.0789, 0.0808, 0.0642, 0.0891, 0.0775, 0.0703, 0.0777, 0.0703], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:52:16,212 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:52:22,163 INFO [train.py:903] (1/4) Epoch 12, batch 250, loss[loss=0.2076, simple_loss=0.2907, pruned_loss=0.06227, over 18656.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3053, pruned_loss=0.07912, over 2748349.10 frames. ], batch size: 74, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:52:41,710 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.714e+02 6.835e+02 8.470e+02 1.829e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 20:52:56,669 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:53:25,608 INFO [train.py:903] (1/4) Epoch 12, batch 300, loss[loss=0.1989, simple_loss=0.2812, pruned_loss=0.05833, over 19766.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3057, pruned_loss=0.07865, over 3002157.74 frames. ], batch size: 54, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:53:34,115 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8468, 1.4059, 1.4940, 1.6061, 3.3298, 0.9466, 2.2490, 3.7900], + device='cuda:1'), covar=tensor([0.0403, 0.2634, 0.2638, 0.1723, 0.0690, 0.2705, 0.1394, 0.0231], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0340, 0.0352, 0.0320, 0.0346, 0.0332, 0.0336, 0.0356], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 20:54:07,328 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:54:28,461 INFO [train.py:903] (1/4) Epoch 12, batch 350, loss[loss=0.2428, simple_loss=0.317, pruned_loss=0.08425, over 19491.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3075, pruned_loss=0.07991, over 3190840.48 frames. ], batch size: 64, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:54:31,954 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:54:45,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.463e+02 6.814e+02 8.627e+02 1.955e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-01 20:55:24,620 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 20:55:30,816 INFO [train.py:903] (1/4) Epoch 12, batch 400, loss[loss=0.2978, simple_loss=0.3509, pruned_loss=0.1224, over 13349.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3072, pruned_loss=0.07974, over 3322947.77 frames. ], batch size: 136, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:56:16,839 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4334, 1.5304, 1.8748, 1.6444, 3.0557, 2.5082, 3.2716, 1.5593], + device='cuda:1'), covar=tensor([0.2190, 0.3688, 0.2203, 0.1686, 0.1379, 0.1764, 0.1491, 0.3399], + device='cuda:1'), in_proj_covar=tensor([0.0492, 0.0577, 0.0604, 0.0438, 0.0598, 0.0493, 0.0646, 0.0492], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 20:56:32,366 INFO [train.py:903] (1/4) Epoch 12, batch 450, loss[loss=0.2529, simple_loss=0.3274, pruned_loss=0.08915, over 19665.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3072, pruned_loss=0.07982, over 3432249.80 frames. ], batch size: 58, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:56:51,546 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.172e+02 6.423e+02 7.976e+02 2.291e+03, threshold=1.285e+03, percent-clipped=4.0 +2023-04-01 20:57:09,451 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 20:57:10,692 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 20:57:13,224 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75590.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:57:17,040 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7832, 4.2137, 4.4169, 4.4096, 1.5617, 4.0850, 3.5753, 4.1004], + device='cuda:1'), covar=tensor([0.1302, 0.0803, 0.0597, 0.0584, 0.5294, 0.0721, 0.0643, 0.1100], + device='cuda:1'), in_proj_covar=tensor([0.0677, 0.0603, 0.0804, 0.0688, 0.0731, 0.0559, 0.0492, 0.0741], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 20:57:36,544 INFO [train.py:903] (1/4) Epoch 12, batch 500, loss[loss=0.1955, simple_loss=0.2861, pruned_loss=0.05248, over 19620.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3076, pruned_loss=0.08028, over 3519313.37 frames. ], batch size: 57, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:06,142 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:17,971 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:38,568 INFO [train.py:903] (1/4) Epoch 12, batch 550, loss[loss=0.2473, simple_loss=0.3158, pruned_loss=0.08939, over 13121.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3096, pruned_loss=0.08121, over 3587916.89 frames. ], batch size: 135, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:50,357 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:56,746 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.816e+02 6.939e+02 9.327e+02 2.224e+03, threshold=1.388e+03, percent-clipped=13.0 +2023-04-01 20:59:06,364 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5616, 1.3789, 1.3890, 1.8494, 1.6198, 1.8934, 1.9341, 1.7212], + device='cuda:1'), covar=tensor([0.0862, 0.0981, 0.1040, 0.0870, 0.0838, 0.0699, 0.0822, 0.0666], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0222, 0.0246, 0.0234, 0.0212, 0.0195, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 20:59:26,721 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8241, 4.3410, 4.6048, 4.5858, 1.5873, 4.2869, 3.7436, 4.2506], + device='cuda:1'), covar=tensor([0.1362, 0.0721, 0.0522, 0.0527, 0.5305, 0.0621, 0.0597, 0.1078], + device='cuda:1'), in_proj_covar=tensor([0.0675, 0.0599, 0.0799, 0.0686, 0.0725, 0.0558, 0.0490, 0.0741], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 20:59:26,878 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:38,240 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75705.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:41,417 INFO [train.py:903] (1/4) Epoch 12, batch 600, loss[loss=0.2344, simple_loss=0.2991, pruned_loss=0.08487, over 19759.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3091, pruned_loss=0.08052, over 3642722.59 frames. ], batch size: 47, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:59:57,550 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:57,728 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.48 vs. limit=5.0 +2023-04-01 21:00:22,881 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 21:00:30,168 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:42,510 INFO [train.py:903] (1/4) Epoch 12, batch 650, loss[loss=0.1981, simple_loss=0.2834, pruned_loss=0.05638, over 19540.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3095, pruned_loss=0.0809, over 3661401.65 frames. ], batch size: 54, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 21:00:59,398 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2708, 1.3165, 1.7744, 1.5986, 2.4985, 1.9682, 2.4958, 1.1319], + device='cuda:1'), covar=tensor([0.2419, 0.4182, 0.2414, 0.1871, 0.1468, 0.2269, 0.1660, 0.3698], + device='cuda:1'), in_proj_covar=tensor([0.0495, 0.0582, 0.0607, 0.0438, 0.0598, 0.0495, 0.0648, 0.0492], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 21:01:01,243 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.224e+02 6.354e+02 7.929e+02 1.382e+03, threshold=1.271e+03, percent-clipped=0.0 +2023-04-01 21:01:31,198 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 21:01:45,565 INFO [train.py:903] (1/4) Epoch 12, batch 700, loss[loss=0.203, simple_loss=0.2782, pruned_loss=0.06396, over 19778.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3081, pruned_loss=0.08036, over 3695191.15 frames. ], batch size: 48, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:02:10,790 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4948, 1.6336, 1.7899, 1.7778, 3.9859, 1.1684, 2.4759, 4.0530], + device='cuda:1'), covar=tensor([0.0394, 0.2562, 0.2634, 0.1841, 0.0728, 0.2722, 0.1469, 0.0277], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0341, 0.0353, 0.0321, 0.0349, 0.0333, 0.0337, 0.0356], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:02:45,696 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6164, 1.3562, 1.4616, 1.5473, 3.1125, 1.0755, 2.2841, 3.4082], + device='cuda:1'), covar=tensor([0.0410, 0.2492, 0.2587, 0.1752, 0.0698, 0.2502, 0.1193, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0338, 0.0350, 0.0318, 0.0346, 0.0330, 0.0334, 0.0353], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:02:46,462 INFO [train.py:903] (1/4) Epoch 12, batch 750, loss[loss=0.2364, simple_loss=0.307, pruned_loss=0.08292, over 19862.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3064, pruned_loss=0.07921, over 3726787.66 frames. ], batch size: 52, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:03:05,207 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 5.547e+02 6.814e+02 8.571e+02 2.504e+03, threshold=1.363e+03, percent-clipped=8.0 +2023-04-01 21:03:49,642 INFO [train.py:903] (1/4) Epoch 12, batch 800, loss[loss=0.246, simple_loss=0.3213, pruned_loss=0.08541, over 19308.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.307, pruned_loss=0.07989, over 3757542.86 frames. ], batch size: 66, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:07,045 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 21:04:50,833 INFO [train.py:903] (1/4) Epoch 12, batch 850, loss[loss=0.226, simple_loss=0.3039, pruned_loss=0.07401, over 19538.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3071, pruned_loss=0.0801, over 3773780.97 frames. ], batch size: 54, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:54,860 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75961.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:10,028 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.208e+02 6.361e+02 7.722e+02 1.579e+03, threshold=1.272e+03, percent-clipped=2.0 +2023-04-01 21:05:25,769 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75986.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:29,792 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1701, 1.1568, 1.6094, 0.8859, 2.3196, 2.9921, 2.6758, 3.1519], + device='cuda:1'), covar=tensor([0.1542, 0.3556, 0.3049, 0.2359, 0.0546, 0.0212, 0.0266, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0294, 0.0324, 0.0251, 0.0214, 0.0157, 0.0206, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 21:05:46,905 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 21:05:48,487 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:53,894 INFO [train.py:903] (1/4) Epoch 12, batch 900, loss[loss=0.222, simple_loss=0.2993, pruned_loss=0.07241, over 19534.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3062, pruned_loss=0.07892, over 3774457.97 frames. ], batch size: 54, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:06:13,739 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3124, 1.6958, 1.8815, 2.5471, 1.9397, 2.5263, 2.4546, 2.4656], + device='cuda:1'), covar=tensor([0.0721, 0.0970, 0.1007, 0.0957, 0.0998, 0.0630, 0.0890, 0.0586], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0224, 0.0222, 0.0244, 0.0235, 0.0212, 0.0195, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 21:06:20,385 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:06:57,938 INFO [train.py:903] (1/4) Epoch 12, batch 950, loss[loss=0.2731, simple_loss=0.3495, pruned_loss=0.09837, over 19353.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3062, pruned_loss=0.07886, over 3780726.45 frames. ], batch size: 70, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:07:02,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 21:07:17,511 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 5.102e+02 6.493e+02 8.387e+02 1.985e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 21:07:59,850 INFO [train.py:903] (1/4) Epoch 12, batch 1000, loss[loss=0.2134, simple_loss=0.2882, pruned_loss=0.06925, over 19611.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3062, pruned_loss=0.07877, over 3789302.36 frames. ], batch size: 50, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:08:42,484 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1175, 3.5107, 1.9627, 2.1387, 3.2334, 1.6826, 1.3091, 2.0421], + device='cuda:1'), covar=tensor([0.1132, 0.0484, 0.0980, 0.0689, 0.0379, 0.1035, 0.0952, 0.0638], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0303, 0.0326, 0.0247, 0.0233, 0.0314, 0.0289, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:08:54,939 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 21:09:02,674 INFO [train.py:903] (1/4) Epoch 12, batch 1050, loss[loss=0.2259, simple_loss=0.3049, pruned_loss=0.0735, over 19680.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3068, pruned_loss=0.07938, over 3799503.43 frames. ], batch size: 55, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:09:20,681 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 5.178e+02 6.428e+02 8.624e+02 1.751e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-01 21:09:36,014 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 21:10:05,084 INFO [train.py:903] (1/4) Epoch 12, batch 1100, loss[loss=0.228, simple_loss=0.3026, pruned_loss=0.07666, over 19655.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3064, pruned_loss=0.0793, over 3804887.93 frames. ], batch size: 58, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:10:12,392 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8156, 1.4161, 1.4120, 1.7705, 1.5337, 1.5950, 1.4621, 1.6640], + device='cuda:1'), covar=tensor([0.0878, 0.1319, 0.1309, 0.0809, 0.1072, 0.0505, 0.1175, 0.0654], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0352, 0.0292, 0.0241, 0.0296, 0.0243, 0.0280, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:11:08,893 INFO [train.py:903] (1/4) Epoch 12, batch 1150, loss[loss=0.2632, simple_loss=0.3216, pruned_loss=0.1023, over 19747.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3072, pruned_loss=0.07967, over 3816870.97 frames. ], batch size: 51, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:11:27,475 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 5.432e+02 6.664e+02 8.568e+02 1.731e+03, threshold=1.333e+03, percent-clipped=3.0 +2023-04-01 21:11:44,833 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76287.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:12:10,717 INFO [train.py:903] (1/4) Epoch 12, batch 1200, loss[loss=0.2096, simple_loss=0.2868, pruned_loss=0.06618, over 19678.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3063, pruned_loss=0.07931, over 3818263.47 frames. ], batch size: 60, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:12:15,533 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:12:45,755 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 21:12:52,904 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76341.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:13:14,063 INFO [train.py:903] (1/4) Epoch 12, batch 1250, loss[loss=0.2374, simple_loss=0.3115, pruned_loss=0.08169, over 19500.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3075, pruned_loss=0.08014, over 3816416.78 frames. ], batch size: 64, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:13:31,217 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 5.242e+02 6.279e+02 7.669e+02 1.575e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-01 21:14:15,615 INFO [train.py:903] (1/4) Epoch 12, batch 1300, loss[loss=0.2437, simple_loss=0.3198, pruned_loss=0.08378, over 19661.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3065, pruned_loss=0.07969, over 3825027.43 frames. ], batch size: 58, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:15:18,800 INFO [train.py:903] (1/4) Epoch 12, batch 1350, loss[loss=0.2331, simple_loss=0.3126, pruned_loss=0.0768, over 19175.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3054, pruned_loss=0.07901, over 3828492.95 frames. ], batch size: 69, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:15:37,068 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 5.247e+02 6.486e+02 7.910e+02 1.390e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 21:16:20,448 INFO [train.py:903] (1/4) Epoch 12, batch 1400, loss[loss=0.2739, simple_loss=0.3479, pruned_loss=0.0999, over 19483.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3056, pruned_loss=0.07888, over 3827752.19 frames. ], batch size: 64, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:16:50,873 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0273, 1.6931, 1.5085, 1.9315, 1.9655, 1.7616, 1.5002, 1.9106], + device='cuda:1'), covar=tensor([0.0857, 0.1535, 0.1448, 0.1002, 0.1077, 0.0505, 0.1316, 0.0660], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0354, 0.0292, 0.0242, 0.0298, 0.0245, 0.0279, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:17:01,834 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5626, 4.0987, 2.7642, 3.5905, 1.0882, 3.8887, 3.8979, 3.9409], + device='cuda:1'), covar=tensor([0.0631, 0.1006, 0.1775, 0.0799, 0.3733, 0.0845, 0.0780, 0.1084], + device='cuda:1'), in_proj_covar=tensor([0.0442, 0.0367, 0.0442, 0.0319, 0.0380, 0.0374, 0.0358, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:17:23,464 INFO [train.py:903] (1/4) Epoch 12, batch 1450, loss[loss=0.2526, simple_loss=0.3253, pruned_loss=0.08995, over 19647.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3057, pruned_loss=0.07896, over 3826651.50 frames. ], batch size: 55, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:17:25,874 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 21:17:40,981 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 4.935e+02 6.510e+02 8.250e+02 1.774e+03, threshold=1.302e+03, percent-clipped=3.0 +2023-04-01 21:18:24,869 INFO [train.py:903] (1/4) Epoch 12, batch 1500, loss[loss=0.2078, simple_loss=0.2919, pruned_loss=0.06181, over 19680.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3057, pruned_loss=0.07924, over 3837659.37 frames. ], batch size: 53, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:18:53,780 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76631.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:19:24,220 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76656.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:19:27,188 INFO [train.py:903] (1/4) Epoch 12, batch 1550, loss[loss=0.2343, simple_loss=0.3049, pruned_loss=0.08189, over 19490.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3055, pruned_loss=0.07921, over 3833595.16 frames. ], batch size: 64, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:19:46,335 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 5.709e+02 6.905e+02 9.210e+02 1.884e+03, threshold=1.381e+03, percent-clipped=4.0 +2023-04-01 21:20:01,238 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76685.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:20:19,367 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 21:20:23,676 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-01 21:20:29,930 INFO [train.py:903] (1/4) Epoch 12, batch 1600, loss[loss=0.2193, simple_loss=0.3013, pruned_loss=0.06868, over 19674.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3062, pruned_loss=0.07916, over 3823477.81 frames. ], batch size: 58, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:20:53,997 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 21:21:16,436 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76746.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:21:23,393 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2169, 2.0318, 1.5223, 1.2686, 1.8770, 1.0516, 1.2167, 1.8420], + device='cuda:1'), covar=tensor([0.0886, 0.0619, 0.0935, 0.0742, 0.0427, 0.1164, 0.0656, 0.0334], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0300, 0.0321, 0.0243, 0.0231, 0.0316, 0.0289, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:21:29,665 INFO [train.py:903] (1/4) Epoch 12, batch 1650, loss[loss=0.2488, simple_loss=0.3283, pruned_loss=0.08462, over 19580.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3069, pruned_loss=0.07956, over 3827884.07 frames. ], batch size: 61, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:21:30,361 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-01 21:21:46,992 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:21:49,936 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.672e+02 5.088e+02 6.360e+02 7.707e+02 1.579e+03, threshold=1.272e+03, percent-clipped=3.0 +2023-04-01 21:22:06,308 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6956, 1.7811, 1.6827, 2.7454, 1.7648, 2.5248, 1.9678, 1.3949], + device='cuda:1'), covar=tensor([0.4250, 0.3568, 0.2197, 0.2073, 0.3865, 0.1693, 0.4714, 0.4092], + device='cuda:1'), in_proj_covar=tensor([0.0791, 0.0816, 0.0646, 0.0889, 0.0781, 0.0706, 0.0779, 0.0709], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 21:22:22,654 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76800.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:22:33,393 INFO [train.py:903] (1/4) Epoch 12, batch 1700, loss[loss=0.268, simple_loss=0.33, pruned_loss=0.103, over 19780.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3068, pruned_loss=0.08008, over 3808245.06 frames. ], batch size: 54, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:22:59,166 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5874, 1.3937, 1.3984, 1.8671, 1.5500, 1.8883, 1.8963, 1.6884], + device='cuda:1'), covar=tensor([0.0744, 0.0861, 0.0966, 0.0795, 0.0856, 0.0654, 0.0790, 0.0605], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0225, 0.0224, 0.0246, 0.0236, 0.0213, 0.0195, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 21:23:10,306 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 21:23:16,984 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1028, 2.0003, 1.8275, 1.6670, 1.4662, 1.6412, 0.3265, 1.0142], + device='cuda:1'), covar=tensor([0.0465, 0.0464, 0.0328, 0.0606, 0.0962, 0.0631, 0.1011, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0339, 0.0329, 0.0332, 0.0356, 0.0427, 0.0354, 0.0309, 0.0322], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 21:23:33,278 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:23:34,183 INFO [train.py:903] (1/4) Epoch 12, batch 1750, loss[loss=0.1852, simple_loss=0.2563, pruned_loss=0.05706, over 19713.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3075, pruned_loss=0.08095, over 3791606.37 frames. ], batch size: 46, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:23:36,422 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 21:23:53,640 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.587e+02 7.065e+02 8.864e+02 2.096e+03, threshold=1.413e+03, percent-clipped=6.0 +2023-04-01 21:24:37,132 INFO [train.py:903] (1/4) Epoch 12, batch 1800, loss[loss=0.2321, simple_loss=0.2953, pruned_loss=0.08448, over 19782.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3061, pruned_loss=0.08001, over 3802848.90 frames. ], batch size: 49, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:24:44,476 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0835, 3.6261, 2.1608, 2.2173, 3.3335, 1.8662, 1.3115, 2.2553], + device='cuda:1'), covar=tensor([0.1311, 0.0515, 0.0996, 0.0724, 0.0429, 0.1049, 0.1012, 0.0580], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0300, 0.0320, 0.0243, 0.0231, 0.0317, 0.0289, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:25:32,729 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 21:25:38,686 INFO [train.py:903] (1/4) Epoch 12, batch 1850, loss[loss=0.2336, simple_loss=0.3152, pruned_loss=0.07603, over 19603.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3063, pruned_loss=0.08018, over 3773203.06 frames. ], batch size: 57, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:25:45,874 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.06 vs. limit=5.0 +2023-04-01 21:25:59,472 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 5.640e+02 7.026e+02 8.457e+02 1.689e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 21:26:12,301 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 21:26:34,388 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77002.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:26:41,890 INFO [train.py:903] (1/4) Epoch 12, batch 1900, loss[loss=0.2049, simple_loss=0.28, pruned_loss=0.06488, over 19835.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3072, pruned_loss=0.08094, over 3774626.53 frames. ], batch size: 52, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:27:00,222 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 21:27:04,914 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 21:27:06,401 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:27:06,420 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:28,488 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 21:27:36,569 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77052.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:42,271 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77056.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:27:43,957 INFO [train.py:903] (1/4) Epoch 12, batch 1950, loss[loss=0.2195, simple_loss=0.284, pruned_loss=0.07747, over 19409.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3063, pruned_loss=0.08006, over 3793178.37 frames. ], batch size: 48, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:28:03,223 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.700e+02 5.468e+02 7.092e+02 9.065e+02 1.810e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 21:28:11,819 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77081.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:28:45,470 INFO [train.py:903] (1/4) Epoch 12, batch 2000, loss[loss=0.2151, simple_loss=0.2953, pruned_loss=0.06746, over 19578.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3055, pruned_loss=0.07937, over 3802527.40 frames. ], batch size: 52, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:28:51,484 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77113.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:29:15,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6536, 2.2322, 2.0360, 2.5818, 2.5784, 2.1084, 1.8681, 2.5421], + device='cuda:1'), covar=tensor([0.0828, 0.1572, 0.1440, 0.0976, 0.1187, 0.0485, 0.1293, 0.0603], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0349, 0.0292, 0.0238, 0.0294, 0.0242, 0.0278, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:29:43,034 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 21:29:46,534 INFO [train.py:903] (1/4) Epoch 12, batch 2050, loss[loss=0.1989, simple_loss=0.2907, pruned_loss=0.05357, over 19542.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3048, pruned_loss=0.07864, over 3818264.33 frames. ], batch size: 54, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:30:02,213 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 21:30:03,450 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 21:30:06,829 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.225e+02 5.837e+02 7.308e+02 9.815e+02 2.165e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 21:30:26,558 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 21:30:40,591 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77201.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:30:49,627 INFO [train.py:903] (1/4) Epoch 12, batch 2100, loss[loss=0.2182, simple_loss=0.284, pruned_loss=0.07616, over 19731.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3062, pruned_loss=0.07926, over 3826363.73 frames. ], batch size: 46, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:31:10,328 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 21:31:12,359 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9596, 3.6061, 2.4468, 3.2686, 1.1490, 3.4434, 3.3494, 3.4625], + device='cuda:1'), covar=tensor([0.0843, 0.1284, 0.1974, 0.0938, 0.3606, 0.0889, 0.0987, 0.1160], + device='cuda:1'), in_proj_covar=tensor([0.0445, 0.0369, 0.0444, 0.0321, 0.0384, 0.0378, 0.0364, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:31:17,027 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:31:20,296 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 21:31:28,740 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1787, 1.1163, 1.1372, 1.3270, 1.0482, 1.3566, 1.2914, 1.2465], + device='cuda:1'), covar=tensor([0.0945, 0.1036, 0.1109, 0.0741, 0.0913, 0.0784, 0.0840, 0.0794], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0226, 0.0226, 0.0249, 0.0236, 0.0215, 0.0197, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 21:31:40,676 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 21:31:52,741 INFO [train.py:903] (1/4) Epoch 12, batch 2150, loss[loss=0.2709, simple_loss=0.3359, pruned_loss=0.103, over 19465.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3067, pruned_loss=0.07962, over 3822835.36 frames. ], batch size: 64, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:32:13,146 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.202e+02 5.327e+02 7.168e+02 9.347e+02 2.125e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-04-01 21:32:33,277 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:39,473 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77295.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:51,916 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-04-01 21:32:53,896 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0231, 1.2171, 1.7616, 0.9028, 2.3911, 3.0675, 2.7953, 3.2232], + device='cuda:1'), covar=tensor([0.1553, 0.3479, 0.2830, 0.2279, 0.0451, 0.0183, 0.0246, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0295, 0.0324, 0.0250, 0.0215, 0.0158, 0.0205, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 21:32:55,907 INFO [train.py:903] (1/4) Epoch 12, batch 2200, loss[loss=0.2088, simple_loss=0.286, pruned_loss=0.06583, over 19743.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3058, pruned_loss=0.07908, over 3815446.64 frames. ], batch size: 51, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:33:05,519 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77316.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:33:25,223 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 21:33:57,353 INFO [train.py:903] (1/4) Epoch 12, batch 2250, loss[loss=0.2223, simple_loss=0.3065, pruned_loss=0.06903, over 19602.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3068, pruned_loss=0.07915, over 3807427.23 frames. ], batch size: 57, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:34:08,704 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0990, 1.2988, 1.6383, 1.1776, 2.7245, 3.4799, 3.2007, 3.6744], + device='cuda:1'), covar=tensor([0.1555, 0.3326, 0.3088, 0.2141, 0.0477, 0.0154, 0.0215, 0.0202], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0294, 0.0325, 0.0250, 0.0215, 0.0158, 0.0206, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 21:34:18,143 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.298e+02 6.341e+02 7.997e+02 1.542e+03, threshold=1.268e+03, percent-clipped=2.0 +2023-04-01 21:34:27,405 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7595, 3.0690, 3.2621, 3.2570, 1.6385, 3.0078, 2.7813, 3.0284], + device='cuda:1'), covar=tensor([0.1347, 0.2323, 0.0599, 0.0676, 0.4022, 0.1177, 0.0601, 0.1016], + device='cuda:1'), in_proj_covar=tensor([0.0683, 0.0613, 0.0812, 0.0690, 0.0738, 0.0565, 0.0498, 0.0749], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 21:34:35,672 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 21:34:58,616 INFO [train.py:903] (1/4) Epoch 12, batch 2300, loss[loss=0.2505, simple_loss=0.3242, pruned_loss=0.08843, over 19685.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3065, pruned_loss=0.07913, over 3813737.46 frames. ], batch size: 59, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:35:11,953 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 21:35:59,772 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77457.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:36:00,826 INFO [train.py:903] (1/4) Epoch 12, batch 2350, loss[loss=0.2026, simple_loss=0.2739, pruned_loss=0.06565, over 19755.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3055, pruned_loss=0.07844, over 3830733.19 frames. ], batch size: 46, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:36:22,276 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.245e+02 6.457e+02 8.417e+02 4.507e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-01 21:36:41,243 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 21:36:59,094 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 21:37:02,537 INFO [train.py:903] (1/4) Epoch 12, batch 2400, loss[loss=0.2499, simple_loss=0.3184, pruned_loss=0.09069, over 19794.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3054, pruned_loss=0.07832, over 3819137.41 frames. ], batch size: 56, lr: 6.99e-03, grad_scale: 8.0 +2023-04-01 21:37:17,475 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4017, 1.2082, 1.2007, 1.7481, 1.5021, 1.6081, 1.8433, 1.4128], + device='cuda:1'), covar=tensor([0.0836, 0.0982, 0.1090, 0.0729, 0.0748, 0.0722, 0.0694, 0.0744], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0224, 0.0224, 0.0248, 0.0234, 0.0214, 0.0196, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 21:37:36,493 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6558, 1.7187, 1.9213, 2.1879, 1.5319, 2.0116, 2.0972, 1.8197], + device='cuda:1'), covar=tensor([0.3294, 0.2735, 0.1426, 0.1479, 0.2835, 0.1324, 0.3492, 0.2530], + device='cuda:1'), in_proj_covar=tensor([0.0802, 0.0823, 0.0653, 0.0897, 0.0788, 0.0711, 0.0789, 0.0719], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 21:38:04,504 INFO [train.py:903] (1/4) Epoch 12, batch 2450, loss[loss=0.2309, simple_loss=0.2989, pruned_loss=0.08145, over 19833.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3045, pruned_loss=0.07785, over 3823711.70 frames. ], batch size: 52, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:38:21,457 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:21,528 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:22,403 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:24,484 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.530e+02 6.529e+02 8.263e+02 1.639e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 21:38:52,862 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77597.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:56,131 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8000, 4.3155, 2.7484, 3.9026, 1.3353, 4.1524, 4.1256, 4.2836], + device='cuda:1'), covar=tensor([0.0575, 0.0966, 0.1926, 0.0717, 0.3591, 0.0710, 0.0734, 0.0969], + device='cuda:1'), in_proj_covar=tensor([0.0440, 0.0365, 0.0440, 0.0319, 0.0380, 0.0373, 0.0360, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:39:05,891 INFO [train.py:903] (1/4) Epoch 12, batch 2500, loss[loss=0.2809, simple_loss=0.3458, pruned_loss=0.108, over 19406.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3049, pruned_loss=0.07805, over 3826195.98 frames. ], batch size: 70, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:39:27,226 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6497, 1.3944, 1.4283, 2.0206, 1.6064, 1.9382, 1.9679, 1.8066], + device='cuda:1'), covar=tensor([0.0724, 0.0888, 0.0967, 0.0746, 0.0866, 0.0641, 0.0789, 0.0579], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0244, 0.0232, 0.0211, 0.0193, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 21:39:39,437 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,540 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,770 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7773, 1.5922, 1.4177, 1.8280, 1.6819, 1.3442, 1.4714, 1.6822], + device='cuda:1'), covar=tensor([0.1141, 0.1812, 0.1742, 0.1190, 0.1448, 0.1011, 0.1600, 0.0916], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0347, 0.0290, 0.0237, 0.0292, 0.0240, 0.0277, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:39:45,170 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:08,015 INFO [train.py:903] (1/4) Epoch 12, batch 2550, loss[loss=0.2431, simple_loss=0.3122, pruned_loss=0.08699, over 19489.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3054, pruned_loss=0.07836, over 3812580.24 frames. ], batch size: 64, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:40:30,542 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.340e+02 6.709e+02 8.508e+02 1.809e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-01 21:40:46,038 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:54,809 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:41:06,542 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 21:41:10,775 INFO [train.py:903] (1/4) Epoch 12, batch 2600, loss[loss=0.261, simple_loss=0.3312, pruned_loss=0.09542, over 19672.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3055, pruned_loss=0.07874, over 3815638.17 frames. ], batch size: 58, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:41:54,847 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 21:42:02,057 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:07,417 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:11,597 INFO [train.py:903] (1/4) Epoch 12, batch 2650, loss[loss=0.242, simple_loss=0.3217, pruned_loss=0.08115, over 18159.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3061, pruned_loss=0.07894, over 3811045.78 frames. ], batch size: 83, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:42:32,180 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.775e+02 6.860e+02 8.613e+02 1.817e+03, threshold=1.372e+03, percent-clipped=5.0 +2023-04-01 21:42:33,293 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 21:43:06,944 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6084, 4.2189, 2.6515, 3.7982, 1.2836, 3.9022, 3.9580, 4.0165], + device='cuda:1'), covar=tensor([0.0623, 0.0978, 0.2011, 0.0689, 0.3672, 0.0785, 0.0763, 0.1079], + device='cuda:1'), in_proj_covar=tensor([0.0449, 0.0374, 0.0450, 0.0326, 0.0386, 0.0379, 0.0367, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:43:12,308 INFO [train.py:903] (1/4) Epoch 12, batch 2700, loss[loss=0.2294, simple_loss=0.2941, pruned_loss=0.08238, over 19385.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3055, pruned_loss=0.07894, over 3813728.96 frames. ], batch size: 48, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:43:38,351 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:08,780 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77853.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:14,414 INFO [train.py:903] (1/4) Epoch 12, batch 2750, loss[loss=0.2115, simple_loss=0.2848, pruned_loss=0.06912, over 19807.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.306, pruned_loss=0.07923, over 3824501.08 frames. ], batch size: 49, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:44:36,445 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.785e+02 6.935e+02 8.560e+02 1.739e+03, threshold=1.387e+03, percent-clipped=4.0 +2023-04-01 21:45:15,196 INFO [train.py:903] (1/4) Epoch 12, batch 2800, loss[loss=0.1736, simple_loss=0.2554, pruned_loss=0.04591, over 19313.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3046, pruned_loss=0.07839, over 3829890.85 frames. ], batch size: 44, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:46:01,236 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:19,705 INFO [train.py:903] (1/4) Epoch 12, batch 2850, loss[loss=0.2383, simple_loss=0.3152, pruned_loss=0.0807, over 19372.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3051, pruned_loss=0.07862, over 3820199.65 frames. ], batch size: 66, lr: 6.97e-03, grad_scale: 4.0 +2023-04-01 21:46:22,373 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6954, 1.3213, 1.4614, 1.6485, 3.2214, 0.9973, 2.2443, 3.5793], + device='cuda:1'), covar=tensor([0.0456, 0.2615, 0.2714, 0.1679, 0.0771, 0.2650, 0.1172, 0.0273], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0333, 0.0345, 0.0315, 0.0340, 0.0330, 0.0331, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:46:32,709 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:41,209 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.493e+02 5.663e+02 6.634e+02 8.773e+02 3.814e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-01 21:46:43,718 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:20,404 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:22,313 INFO [train.py:903] (1/4) Epoch 12, batch 2900, loss[loss=0.1924, simple_loss=0.2726, pruned_loss=0.05606, over 19478.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3042, pruned_loss=0.0779, over 3817598.55 frames. ], batch size: 49, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:47:22,357 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 21:47:25,113 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:39,742 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3159, 1.5043, 1.9357, 1.6230, 3.1917, 2.6688, 3.5073, 1.4655], + device='cuda:1'), covar=tensor([0.2078, 0.3548, 0.2179, 0.1625, 0.1268, 0.1677, 0.1419, 0.3398], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0578, 0.0608, 0.0437, 0.0594, 0.0489, 0.0644, 0.0495], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 21:47:52,624 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:57,903 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78035.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:02,341 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:25,027 INFO [train.py:903] (1/4) Epoch 12, batch 2950, loss[loss=0.2723, simple_loss=0.3419, pruned_loss=0.1013, over 19253.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3046, pruned_loss=0.07796, over 3821989.75 frames. ], batch size: 66, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:48:48,725 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.583e+02 6.866e+02 9.102e+02 1.641e+03, threshold=1.373e+03, percent-clipped=7.0 +2023-04-01 21:49:10,010 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:49:28,175 INFO [train.py:903] (1/4) Epoch 12, batch 3000, loss[loss=0.2616, simple_loss=0.3457, pruned_loss=0.08871, over 19664.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.305, pruned_loss=0.07832, over 3826602.52 frames. ], batch size: 58, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:49:28,175 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 21:49:40,670 INFO [train.py:937] (1/4) Epoch 12, validation: loss=0.1772, simple_loss=0.2779, pruned_loss=0.0383, over 944034.00 frames. +2023-04-01 21:49:40,671 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 21:49:45,519 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 21:50:38,086 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:50:42,290 INFO [train.py:903] (1/4) Epoch 12, batch 3050, loss[loss=0.1845, simple_loss=0.2647, pruned_loss=0.05221, over 19743.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3059, pruned_loss=0.07889, over 3833725.64 frames. ], batch size: 46, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:51:04,780 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 5.238e+02 6.566e+02 8.426e+02 1.854e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 21:51:20,725 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8836, 1.9529, 2.1031, 2.5784, 1.7025, 2.4298, 2.3292, 2.0211], + device='cuda:1'), covar=tensor([0.3328, 0.3100, 0.1415, 0.1722, 0.3349, 0.1528, 0.3426, 0.2618], + device='cuda:1'), in_proj_covar=tensor([0.0795, 0.0822, 0.0648, 0.0891, 0.0783, 0.0708, 0.0785, 0.0715], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 21:51:43,592 INFO [train.py:903] (1/4) Epoch 12, batch 3100, loss[loss=0.2138, simple_loss=0.2909, pruned_loss=0.06835, over 19581.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3063, pruned_loss=0.07953, over 3815555.84 frames. ], batch size: 52, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:52:14,949 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4874, 2.3315, 1.6592, 1.5640, 2.1889, 1.2618, 1.4253, 1.9331], + device='cuda:1'), covar=tensor([0.1007, 0.0681, 0.1057, 0.0731, 0.0438, 0.1179, 0.0672, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0301, 0.0323, 0.0244, 0.0234, 0.0322, 0.0286, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:52:18,451 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 21:52:36,405 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:52:45,572 INFO [train.py:903] (1/4) Epoch 12, batch 3150, loss[loss=0.2782, simple_loss=0.3479, pruned_loss=0.1043, over 19666.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3064, pruned_loss=0.07923, over 3831550.74 frames. ], batch size: 53, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:52:53,234 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.20 vs. limit=5.0 +2023-04-01 21:53:07,631 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.745e+02 5.622e+02 7.364e+02 8.683e+02 1.879e+03, threshold=1.473e+03, percent-clipped=3.0 +2023-04-01 21:53:15,174 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 21:53:32,886 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5452, 1.6921, 2.0968, 1.6930, 2.5594, 2.9289, 2.9202, 3.1285], + device='cuda:1'), covar=tensor([0.1318, 0.2711, 0.2333, 0.2047, 0.0868, 0.0382, 0.0200, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0298, 0.0326, 0.0252, 0.0216, 0.0159, 0.0205, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 21:53:47,084 INFO [train.py:903] (1/4) Epoch 12, batch 3200, loss[loss=0.1952, simple_loss=0.2666, pruned_loss=0.06187, over 19718.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3062, pruned_loss=0.07963, over 3828539.39 frames. ], batch size: 46, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:54:39,758 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:54:50,766 INFO [train.py:903] (1/4) Epoch 12, batch 3250, loss[loss=0.2207, simple_loss=0.2915, pruned_loss=0.07494, over 19467.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3058, pruned_loss=0.0793, over 3829758.46 frames. ], batch size: 49, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:55:11,650 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:13,652 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.227e+02 6.164e+02 7.465e+02 1.234e+03, threshold=1.233e+03, percent-clipped=0.0 +2023-04-01 21:55:54,296 INFO [train.py:903] (1/4) Epoch 12, batch 3300, loss[loss=0.2397, simple_loss=0.305, pruned_loss=0.08716, over 19844.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3069, pruned_loss=0.07985, over 3818748.69 frames. ], batch size: 52, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:55:57,162 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:57,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 21:56:26,557 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:56:55,619 INFO [train.py:903] (1/4) Epoch 12, batch 3350, loss[loss=0.2417, simple_loss=0.3145, pruned_loss=0.08449, over 19606.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3067, pruned_loss=0.07943, over 3814359.84 frames. ], batch size: 61, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:57:18,019 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 5.251e+02 6.145e+02 6.896e+02 1.617e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-01 21:57:20,556 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:57:36,597 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2991, 2.0666, 1.9702, 2.4447, 2.0639, 1.7392, 1.7153, 2.2719], + device='cuda:1'), covar=tensor([0.0961, 0.1660, 0.1501, 0.0897, 0.1417, 0.0717, 0.1424, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0348, 0.0291, 0.0236, 0.0293, 0.0240, 0.0276, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 21:57:57,201 INFO [train.py:903] (1/4) Epoch 12, batch 3400, loss[loss=0.2199, simple_loss=0.2941, pruned_loss=0.07284, over 19743.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.306, pruned_loss=0.07868, over 3820475.33 frames. ], batch size: 51, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:59:00,755 INFO [train.py:903] (1/4) Epoch 12, batch 3450, loss[loss=0.2179, simple_loss=0.2887, pruned_loss=0.0735, over 19396.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3061, pruned_loss=0.07889, over 3828795.39 frames. ], batch size: 48, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:59:06,189 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 21:59:21,439 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78574.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:59:23,280 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.692e+02 7.472e+02 9.495e+02 2.057e+03, threshold=1.494e+03, percent-clipped=9.0 +2023-04-01 21:59:44,667 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:00:03,311 INFO [train.py:903] (1/4) Epoch 12, batch 3500, loss[loss=0.3044, simple_loss=0.3572, pruned_loss=0.1258, over 13044.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3071, pruned_loss=0.07953, over 3827784.49 frames. ], batch size: 135, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 22:00:34,886 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-01 22:00:55,214 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1538, 1.9333, 1.8003, 2.1601, 1.8204, 1.8672, 1.7584, 2.1097], + device='cuda:1'), covar=tensor([0.0818, 0.1298, 0.1254, 0.0850, 0.1211, 0.0457, 0.1144, 0.0592], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0349, 0.0293, 0.0239, 0.0294, 0.0241, 0.0278, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:01:05,329 INFO [train.py:903] (1/4) Epoch 12, batch 3550, loss[loss=0.2251, simple_loss=0.2955, pruned_loss=0.07731, over 19458.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3067, pruned_loss=0.0795, over 3817828.04 frames. ], batch size: 49, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:01:26,754 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 5.737e+02 7.042e+02 1.025e+03 1.962e+03, threshold=1.408e+03, percent-clipped=6.0 +2023-04-01 22:02:07,311 INFO [train.py:903] (1/4) Epoch 12, batch 3600, loss[loss=0.2115, simple_loss=0.2959, pruned_loss=0.06357, over 19623.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3058, pruned_loss=0.07904, over 3835847.88 frames. ], batch size: 57, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:02:07,819 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2766, 2.1692, 1.9610, 1.7087, 1.6428, 1.8307, 0.4641, 1.0615], + device='cuda:1'), covar=tensor([0.0496, 0.0466, 0.0359, 0.0601, 0.0942, 0.0665, 0.1002, 0.0856], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0336, 0.0334, 0.0360, 0.0432, 0.0359, 0.0317, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 22:02:08,941 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:03:09,176 INFO [train.py:903] (1/4) Epoch 12, batch 3650, loss[loss=0.3104, simple_loss=0.3606, pruned_loss=0.1301, over 13349.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.306, pruned_loss=0.07905, over 3836917.10 frames. ], batch size: 136, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:03:33,806 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.318e+02 6.562e+02 8.219e+02 2.478e+03, threshold=1.312e+03, percent-clipped=4.0 +2023-04-01 22:03:37,859 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2845, 1.3335, 1.5844, 1.4950, 2.1782, 2.0022, 2.2114, 0.7927], + device='cuda:1'), covar=tensor([0.2195, 0.3715, 0.2207, 0.1739, 0.1351, 0.1848, 0.1325, 0.3681], + device='cuda:1'), in_proj_covar=tensor([0.0488, 0.0577, 0.0606, 0.0437, 0.0592, 0.0488, 0.0643, 0.0492], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 22:04:14,202 INFO [train.py:903] (1/4) Epoch 12, batch 3700, loss[loss=0.2143, simple_loss=0.2966, pruned_loss=0.06603, over 19663.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.306, pruned_loss=0.07878, over 3836741.89 frames. ], batch size: 55, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:04:31,407 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:05:15,856 INFO [train.py:903] (1/4) Epoch 12, batch 3750, loss[loss=0.1827, simple_loss=0.2656, pruned_loss=0.0499, over 19746.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3059, pruned_loss=0.07909, over 3832863.10 frames. ], batch size: 51, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:05:18,442 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3684, 3.9431, 2.5389, 3.5533, 0.9995, 3.7280, 3.7210, 3.8359], + device='cuda:1'), covar=tensor([0.0650, 0.1012, 0.2031, 0.0739, 0.3870, 0.0740, 0.0812, 0.0947], + device='cuda:1'), in_proj_covar=tensor([0.0444, 0.0369, 0.0445, 0.0321, 0.0381, 0.0376, 0.0364, 0.0399], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:05:37,742 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.608e+02 5.233e+02 6.179e+02 8.242e+02 1.500e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-01 22:06:16,426 INFO [train.py:903] (1/4) Epoch 12, batch 3800, loss[loss=0.2767, simple_loss=0.354, pruned_loss=0.09975, over 19336.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3049, pruned_loss=0.07823, over 3843437.01 frames. ], batch size: 66, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:06:29,218 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:06:53,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 22:06:54,063 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:17,950 INFO [train.py:903] (1/4) Epoch 12, batch 3850, loss[loss=0.206, simple_loss=0.2772, pruned_loss=0.0674, over 19348.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3058, pruned_loss=0.07914, over 3823085.79 frames. ], batch size: 47, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:07:19,949 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 22:07:27,616 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 22:07:28,345 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:40,748 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.982e+02 6.977e+02 9.373e+02 2.137e+03, threshold=1.395e+03, percent-clipped=8.0 +2023-04-01 22:07:49,892 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:58,116 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:08:21,052 INFO [train.py:903] (1/4) Epoch 12, batch 3900, loss[loss=0.2339, simple_loss=0.3114, pruned_loss=0.07824, over 19608.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3055, pruned_loss=0.07852, over 3824457.24 frames. ], batch size: 57, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:08:51,055 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79033.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:09:22,296 INFO [train.py:903] (1/4) Epoch 12, batch 3950, loss[loss=0.2093, simple_loss=0.29, pruned_loss=0.06431, over 19596.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3047, pruned_loss=0.07792, over 3831057.52 frames. ], batch size: 57, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:09:29,052 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 22:09:40,885 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2283, 2.0240, 1.4462, 1.2943, 1.8473, 1.0539, 1.2407, 1.6617], + device='cuda:1'), covar=tensor([0.0891, 0.0657, 0.1086, 0.0730, 0.0437, 0.1257, 0.0663, 0.0403], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0300, 0.0326, 0.0244, 0.0235, 0.0318, 0.0287, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:09:43,634 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.132e+02 6.993e+02 8.356e+02 2.478e+03, threshold=1.399e+03, percent-clipped=5.0 +2023-04-01 22:10:04,420 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.44 vs. limit=5.0 +2023-04-01 22:10:18,481 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3708, 1.2029, 1.2099, 1.7510, 1.5307, 1.6003, 1.7525, 1.4262], + device='cuda:1'), covar=tensor([0.0951, 0.1053, 0.1182, 0.0753, 0.0823, 0.0770, 0.0799, 0.0773], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0225, 0.0223, 0.0247, 0.0236, 0.0211, 0.0195, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 22:10:22,896 INFO [train.py:903] (1/4) Epoch 12, batch 4000, loss[loss=0.29, simple_loss=0.3584, pruned_loss=0.1108, over 19575.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3056, pruned_loss=0.07886, over 3829065.82 frames. ], batch size: 61, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:13,132 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 22:11:24,603 INFO [train.py:903] (1/4) Epoch 12, batch 4050, loss[loss=0.2489, simple_loss=0.3205, pruned_loss=0.0886, over 19088.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3053, pruned_loss=0.07882, over 3829147.26 frames. ], batch size: 69, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:47,128 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.067e+02 6.190e+02 7.758e+02 2.001e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-01 22:12:07,821 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:12:26,661 INFO [train.py:903] (1/4) Epoch 12, batch 4100, loss[loss=0.2383, simple_loss=0.3073, pruned_loss=0.08468, over 19481.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3058, pruned_loss=0.079, over 3827337.22 frames. ], batch size: 64, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:12:39,310 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79218.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:13:03,856 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 22:13:27,004 INFO [train.py:903] (1/4) Epoch 12, batch 4150, loss[loss=0.2919, simple_loss=0.3489, pruned_loss=0.1175, over 13230.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3058, pruned_loss=0.079, over 3816107.97 frames. ], batch size: 135, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:13:45,807 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3211, 2.2779, 2.4155, 3.5116, 2.3207, 3.1529, 2.9168, 2.2624], + device='cuda:1'), covar=tensor([0.3844, 0.3362, 0.1441, 0.1738, 0.3751, 0.1452, 0.3382, 0.2737], + device='cuda:1'), in_proj_covar=tensor([0.0801, 0.0826, 0.0654, 0.0897, 0.0788, 0.0714, 0.0793, 0.0715], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 22:13:49,453 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 5.552e+02 6.928e+02 9.304e+02 2.111e+03, threshold=1.386e+03, percent-clipped=6.0 +2023-04-01 22:14:06,867 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79289.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:23,433 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79303.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:14:28,763 INFO [train.py:903] (1/4) Epoch 12, batch 4200, loss[loss=0.2943, simple_loss=0.3473, pruned_loss=0.1206, over 14022.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3053, pruned_loss=0.07844, over 3819886.47 frames. ], batch size: 137, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:14:33,318 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 22:14:35,749 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79314.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:53,795 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:22,528 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:31,219 INFO [train.py:903] (1/4) Epoch 12, batch 4250, loss[loss=0.2615, simple_loss=0.3354, pruned_loss=0.09381, over 17816.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3054, pruned_loss=0.0786, over 3797523.76 frames. ], batch size: 83, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:15:43,246 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 22:15:52,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 4.925e+02 6.557e+02 8.003e+02 1.515e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-01 22:15:54,756 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 22:16:00,672 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:16:13,131 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79393.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:16:33,046 INFO [train.py:903] (1/4) Epoch 12, batch 4300, loss[loss=0.2581, simple_loss=0.3235, pruned_loss=0.09636, over 19577.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3064, pruned_loss=0.07885, over 3817236.95 frames. ], batch size: 61, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:17:14,372 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79442.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:17:24,324 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 22:17:29,465 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1053, 1.9808, 1.7834, 2.2172, 2.0302, 1.8148, 1.9839, 2.0000], + device='cuda:1'), covar=tensor([0.0825, 0.1372, 0.1231, 0.0849, 0.1123, 0.0451, 0.1038, 0.0615], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0352, 0.0292, 0.0240, 0.0297, 0.0242, 0.0281, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:17:32,601 INFO [train.py:903] (1/4) Epoch 12, batch 4350, loss[loss=0.1941, simple_loss=0.2699, pruned_loss=0.05908, over 19745.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3062, pruned_loss=0.0787, over 3805609.28 frames. ], batch size: 45, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:17:52,358 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5135, 4.0750, 4.2359, 4.1901, 1.5141, 3.9736, 3.4997, 3.9130], + device='cuda:1'), covar=tensor([0.1396, 0.0773, 0.0516, 0.0585, 0.5312, 0.0710, 0.0595, 0.1024], + device='cuda:1'), in_proj_covar=tensor([0.0682, 0.0618, 0.0812, 0.0691, 0.0740, 0.0567, 0.0496, 0.0749], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 22:17:54,382 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.544e+02 6.866e+02 8.754e+02 2.036e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 22:18:34,726 INFO [train.py:903] (1/4) Epoch 12, batch 4400, loss[loss=0.2087, simple_loss=0.2802, pruned_loss=0.06861, over 19776.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3047, pruned_loss=0.078, over 3816277.06 frames. ], batch size: 48, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:18:58,958 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 22:19:07,331 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 22:19:38,009 INFO [train.py:903] (1/4) Epoch 12, batch 4450, loss[loss=0.2233, simple_loss=0.3052, pruned_loss=0.07071, over 19531.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3057, pruned_loss=0.07886, over 3801591.39 frames. ], batch size: 56, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:20:00,036 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.129e+02 5.261e+02 6.909e+02 8.531e+02 1.990e+03, threshold=1.382e+03, percent-clipped=5.0 +2023-04-01 22:20:00,417 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9956, 1.2114, 1.5593, 0.7850, 2.3930, 3.0436, 2.7267, 3.2303], + device='cuda:1'), covar=tensor([0.1637, 0.3656, 0.3140, 0.2448, 0.0514, 0.0174, 0.0259, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0300, 0.0329, 0.0252, 0.0219, 0.0161, 0.0207, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 22:20:33,637 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:20:41,079 INFO [train.py:903] (1/4) Epoch 12, batch 4500, loss[loss=0.201, simple_loss=0.2813, pruned_loss=0.06031, over 19842.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3051, pruned_loss=0.07833, over 3805068.10 frames. ], batch size: 52, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:20:50,886 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4106, 2.0667, 1.9383, 2.9793, 2.3230, 2.6391, 2.7347, 2.6322], + device='cuda:1'), covar=tensor([0.0709, 0.0806, 0.0947, 0.0790, 0.0790, 0.0624, 0.0780, 0.0543], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0223, 0.0222, 0.0245, 0.0234, 0.0211, 0.0193, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 22:21:29,751 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79647.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:21:43,340 INFO [train.py:903] (1/4) Epoch 12, batch 4550, loss[loss=0.2228, simple_loss=0.2848, pruned_loss=0.08035, over 19778.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3054, pruned_loss=0.07817, over 3819698.78 frames. ], batch size: 47, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:21:47,118 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0495, 1.2169, 1.5709, 0.6281, 2.0231, 2.4622, 2.1757, 2.6267], + device='cuda:1'), covar=tensor([0.1411, 0.3400, 0.2936, 0.2295, 0.0543, 0.0231, 0.0328, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0298, 0.0327, 0.0250, 0.0218, 0.0160, 0.0206, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 22:21:52,476 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 22:21:58,518 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5086, 1.1479, 1.2875, 1.2738, 2.1753, 0.9337, 2.0261, 2.3339], + device='cuda:1'), covar=tensor([0.0663, 0.2667, 0.2665, 0.1495, 0.0809, 0.2015, 0.0976, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0342, 0.0352, 0.0320, 0.0344, 0.0330, 0.0336, 0.0356], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:22:04,161 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:04,985 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.458e+02 6.277e+02 7.572e+02 1.495e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-01 22:22:15,964 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 22:22:29,768 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:33,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:44,776 INFO [train.py:903] (1/4) Epoch 12, batch 4600, loss[loss=0.2382, simple_loss=0.3174, pruned_loss=0.07954, over 19703.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.305, pruned_loss=0.0778, over 3832414.71 frames. ], batch size: 53, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:22:47,085 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4656, 1.8346, 1.9555, 2.8862, 2.0926, 2.8834, 2.6586, 2.6686], + device='cuda:1'), covar=tensor([0.0654, 0.0880, 0.0902, 0.0814, 0.0885, 0.0548, 0.0834, 0.0547], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0225, 0.0223, 0.0247, 0.0236, 0.0212, 0.0194, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 22:22:58,547 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9457, 3.5714, 2.3734, 3.2723, 0.8456, 3.4142, 3.4103, 3.4990], + device='cuda:1'), covar=tensor([0.0827, 0.1259, 0.2022, 0.0779, 0.3750, 0.0932, 0.0842, 0.1177], + device='cuda:1'), in_proj_covar=tensor([0.0444, 0.0372, 0.0443, 0.0317, 0.0381, 0.0377, 0.0367, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:23:04,285 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79723.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:07,643 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:21,779 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79737.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:23:46,649 INFO [train.py:903] (1/4) Epoch 12, batch 4650, loss[loss=0.2428, simple_loss=0.3154, pruned_loss=0.08512, over 19613.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3053, pruned_loss=0.07827, over 3828991.00 frames. ], batch size: 50, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:51,762 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79762.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:24:06,326 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 22:24:09,608 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 5.614e+02 7.010e+02 8.934e+02 1.991e+03, threshold=1.402e+03, percent-clipped=7.0 +2023-04-01 22:24:13,555 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1644, 1.9349, 1.9617, 2.3193, 2.1210, 1.8350, 1.8927, 2.1681], + device='cuda:1'), covar=tensor([0.0721, 0.1197, 0.1055, 0.0662, 0.0934, 0.0449, 0.0987, 0.0509], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0354, 0.0294, 0.0241, 0.0299, 0.0244, 0.0283, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:24:14,686 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:24:15,616 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 22:24:48,917 INFO [train.py:903] (1/4) Epoch 12, batch 4700, loss[loss=0.1913, simple_loss=0.2691, pruned_loss=0.05676, over 19721.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3062, pruned_loss=0.07913, over 3819190.12 frames. ], batch size: 51, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:24:52,458 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:12,487 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 22:25:30,392 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:45,590 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79852.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:25:52,119 INFO [train.py:903] (1/4) Epoch 12, batch 4750, loss[loss=0.2222, simple_loss=0.303, pruned_loss=0.07072, over 19304.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3052, pruned_loss=0.07822, over 3834859.36 frames. ], batch size: 66, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:26:14,211 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.848e+02 5.173e+02 6.366e+02 7.623e+02 1.625e+03, threshold=1.273e+03, percent-clipped=2.0 +2023-04-01 22:26:20,953 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 22:26:54,253 INFO [train.py:903] (1/4) Epoch 12, batch 4800, loss[loss=0.2667, simple_loss=0.335, pruned_loss=0.09918, over 19614.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3055, pruned_loss=0.07807, over 3827262.93 frames. ], batch size: 57, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:27:16,980 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2348, 1.2771, 1.2581, 1.0629, 1.0663, 1.0966, 0.0450, 0.3340], + device='cuda:1'), covar=tensor([0.0440, 0.0458, 0.0273, 0.0354, 0.0891, 0.0473, 0.0845, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0340, 0.0327, 0.0330, 0.0355, 0.0428, 0.0356, 0.0310, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 22:27:42,679 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:27:45,278 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9416, 1.9633, 2.2060, 1.9746, 3.1569, 2.5957, 3.2322, 2.2231], + device='cuda:1'), covar=tensor([0.1718, 0.2974, 0.1900, 0.1505, 0.1107, 0.1561, 0.1140, 0.2528], + device='cuda:1'), in_proj_covar=tensor([0.0494, 0.0586, 0.0612, 0.0441, 0.0596, 0.0497, 0.0648, 0.0497], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 22:27:56,278 INFO [train.py:903] (1/4) Epoch 12, batch 4850, loss[loss=0.2679, simple_loss=0.3387, pruned_loss=0.09854, over 19712.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3052, pruned_loss=0.07805, over 3829882.91 frames. ], batch size: 59, lr: 6.88e-03, grad_scale: 16.0 +2023-04-01 22:27:57,043 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.97 vs. limit=5.0 +2023-04-01 22:28:19,180 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 5.432e+02 6.685e+02 9.186e+02 1.976e+03, threshold=1.337e+03, percent-clipped=11.0 +2023-04-01 22:28:23,585 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 22:28:43,813 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 22:28:48,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 22:28:51,008 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 22:28:52,775 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-01 22:28:59,979 INFO [train.py:903] (1/4) Epoch 12, batch 4900, loss[loss=0.1996, simple_loss=0.2878, pruned_loss=0.05571, over 19628.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3042, pruned_loss=0.07778, over 3821949.48 frames. ], batch size: 57, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:29:02,313 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 22:29:13,787 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80018.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:29:14,555 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:29:21,284 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 22:29:43,697 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80043.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:30:03,747 INFO [train.py:903] (1/4) Epoch 12, batch 4950, loss[loss=0.2817, simple_loss=0.3468, pruned_loss=0.1083, over 18832.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3044, pruned_loss=0.07821, over 3804118.91 frames. ], batch size: 74, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:30:08,656 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:13,652 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:20,440 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 22:30:26,823 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.263e+02 6.788e+02 8.958e+02 2.034e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 22:30:44,596 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80091.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:45,327 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 22:30:52,462 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:04,649 INFO [train.py:903] (1/4) Epoch 12, batch 5000, loss[loss=0.2691, simple_loss=0.3389, pruned_loss=0.09959, over 19384.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3058, pruned_loss=0.07927, over 3803196.01 frames. ], batch size: 70, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:31:05,110 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80108.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:31:13,753 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 22:31:22,088 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:24,081 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:25,179 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 22:31:37,147 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80133.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:31:38,231 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:32:06,839 INFO [train.py:903] (1/4) Epoch 12, batch 5050, loss[loss=0.2131, simple_loss=0.2846, pruned_loss=0.07075, over 19855.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3053, pruned_loss=0.07899, over 3812362.90 frames. ], batch size: 52, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:32:30,894 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.181e+02 5.736e+02 7.301e+02 9.465e+02 2.500e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 22:32:35,788 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6312, 1.4074, 1.4378, 2.0101, 1.5626, 1.9325, 1.8649, 1.6758], + device='cuda:1'), covar=tensor([0.0797, 0.0984, 0.1041, 0.0744, 0.0853, 0.0673, 0.0871, 0.0681], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0226, 0.0225, 0.0246, 0.0235, 0.0212, 0.0196, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 22:32:41,375 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 22:33:08,740 INFO [train.py:903] (1/4) Epoch 12, batch 5100, loss[loss=0.1933, simple_loss=0.2751, pruned_loss=0.05581, over 19743.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3061, pruned_loss=0.07947, over 3815487.37 frames. ], batch size: 51, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:33:21,056 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 22:33:23,168 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 22:33:26,512 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 22:33:47,554 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:34:11,775 INFO [train.py:903] (1/4) Epoch 12, batch 5150, loss[loss=0.2862, simple_loss=0.3487, pruned_loss=0.1119, over 18731.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3064, pruned_loss=0.07949, over 3822384.33 frames. ], batch size: 74, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:34:23,653 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 22:34:34,583 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.377e+02 5.205e+02 6.062e+02 7.794e+02 1.645e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-01 22:34:59,538 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 22:35:02,338 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4681, 1.5322, 1.8244, 1.6724, 2.7410, 2.3172, 2.9596, 1.2981], + device='cuda:1'), covar=tensor([0.2177, 0.3869, 0.2320, 0.1756, 0.1391, 0.1866, 0.1384, 0.3645], + device='cuda:1'), in_proj_covar=tensor([0.0492, 0.0583, 0.0609, 0.0440, 0.0595, 0.0497, 0.0647, 0.0497], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 22:35:05,765 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:13,722 INFO [train.py:903] (1/4) Epoch 12, batch 5200, loss[loss=0.2065, simple_loss=0.2926, pruned_loss=0.06019, over 19761.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3057, pruned_loss=0.07874, over 3828484.52 frames. ], batch size: 54, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:35:26,531 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:27,324 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 22:35:58,851 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:36:03,532 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9027, 4.2976, 4.6093, 4.5905, 1.4610, 4.2494, 3.6955, 4.3217], + device='cuda:1'), covar=tensor([0.1627, 0.0686, 0.0569, 0.0664, 0.6171, 0.0718, 0.0633, 0.1132], + device='cuda:1'), in_proj_covar=tensor([0.0682, 0.0615, 0.0812, 0.0691, 0.0731, 0.0562, 0.0490, 0.0736], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 22:36:13,577 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 22:36:16,614 INFO [train.py:903] (1/4) Epoch 12, batch 5250, loss[loss=0.2665, simple_loss=0.3343, pruned_loss=0.09936, over 19763.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3054, pruned_loss=0.07834, over 3820668.59 frames. ], batch size: 63, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:36:41,376 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.664e+02 5.603e+02 6.465e+02 8.351e+02 1.434e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 22:36:57,041 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80390.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:18,490 INFO [train.py:903] (1/4) Epoch 12, batch 5300, loss[loss=0.2226, simple_loss=0.3034, pruned_loss=0.07095, over 19687.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3035, pruned_loss=0.07679, over 3830729.66 frames. ], batch size: 59, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:37:22,934 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:28,730 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:42,147 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 22:38:23,299 INFO [train.py:903] (1/4) Epoch 12, batch 5350, loss[loss=0.1912, simple_loss=0.2669, pruned_loss=0.05771, over 19733.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3035, pruned_loss=0.07681, over 3834599.00 frames. ], batch size: 51, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:38:44,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.085e+02 6.728e+02 8.660e+02 2.071e+03, threshold=1.346e+03, percent-clipped=4.0 +2023-04-01 22:38:50,312 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:38:59,005 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 22:39:08,327 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80495.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:23,543 INFO [train.py:903] (1/4) Epoch 12, batch 5400, loss[loss=0.2403, simple_loss=0.3124, pruned_loss=0.08411, over 17350.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3045, pruned_loss=0.07773, over 3815170.07 frames. ], batch size: 101, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:39:28,312 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80512.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:38,431 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:40:24,807 INFO [train.py:903] (1/4) Epoch 12, batch 5450, loss[loss=0.2189, simple_loss=0.3023, pruned_loss=0.06773, over 19780.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3059, pruned_loss=0.07867, over 3817627.39 frames. ], batch size: 56, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:40:49,073 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 5.165e+02 6.319e+02 8.444e+02 1.726e+03, threshold=1.264e+03, percent-clipped=5.0 +2023-04-01 22:41:26,465 INFO [train.py:903] (1/4) Epoch 12, batch 5500, loss[loss=0.1994, simple_loss=0.2811, pruned_loss=0.05884, over 19600.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3047, pruned_loss=0.07802, over 3821329.40 frames. ], batch size: 50, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:41:53,692 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 22:42:12,372 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:42:29,084 INFO [train.py:903] (1/4) Epoch 12, batch 5550, loss[loss=0.2327, simple_loss=0.3125, pruned_loss=0.07644, over 19673.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3042, pruned_loss=0.07786, over 3815369.60 frames. ], batch size: 59, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:42:38,027 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 22:42:51,885 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.910e+02 5.291e+02 6.725e+02 8.423e+02 1.958e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-01 22:42:53,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 22:43:28,262 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 22:43:31,868 INFO [train.py:903] (1/4) Epoch 12, batch 5600, loss[loss=0.2052, simple_loss=0.273, pruned_loss=0.06874, over 19711.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3044, pruned_loss=0.07814, over 3832713.18 frames. ], batch size: 46, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:44:12,809 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6706, 2.0266, 2.3524, 2.8672, 2.2502, 2.2502, 2.1116, 2.7683], + device='cuda:1'), covar=tensor([0.0761, 0.1715, 0.1170, 0.0812, 0.1238, 0.0435, 0.1110, 0.0525], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0351, 0.0293, 0.0238, 0.0299, 0.0240, 0.0281, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:44:29,071 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:30,112 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:33,522 INFO [train.py:903] (1/4) Epoch 12, batch 5650, loss[loss=0.223, simple_loss=0.3047, pruned_loss=0.0706, over 19676.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3043, pruned_loss=0.0785, over 3816762.32 frames. ], batch size: 58, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:44:36,175 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:57,739 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.237e+02 6.303e+02 7.862e+02 2.175e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-01 22:45:15,155 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:45:20,703 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 22:45:35,188 INFO [train.py:903] (1/4) Epoch 12, batch 5700, loss[loss=0.2217, simple_loss=0.3054, pruned_loss=0.069, over 19678.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3044, pruned_loss=0.07861, over 3816330.68 frames. ], batch size: 53, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:45:57,689 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:35,804 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:38,764 INFO [train.py:903] (1/4) Epoch 12, batch 5750, loss[loss=0.1812, simple_loss=0.2571, pruned_loss=0.05259, over 19761.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3043, pruned_loss=0.07816, over 3830626.59 frames. ], batch size: 47, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:46:39,963 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 22:46:47,871 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 22:46:52,465 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 22:46:52,775 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:47:00,404 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.415e+02 6.686e+02 8.336e+02 1.819e+03, threshold=1.337e+03, percent-clipped=1.0 +2023-04-01 22:47:40,324 INFO [train.py:903] (1/4) Epoch 12, batch 5800, loss[loss=0.2064, simple_loss=0.2744, pruned_loss=0.06925, over 19429.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3038, pruned_loss=0.0776, over 3831646.65 frames. ], batch size: 48, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:16,662 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 22:48:21,655 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:48:34,497 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8454, 2.0666, 1.7105, 3.1079, 2.3203, 3.1364, 2.0269, 1.4425], + device='cuda:1'), covar=tensor([0.5207, 0.4204, 0.2665, 0.2671, 0.4352, 0.1959, 0.5718, 0.4883], + device='cuda:1'), in_proj_covar=tensor([0.0803, 0.0829, 0.0654, 0.0891, 0.0793, 0.0721, 0.0789, 0.0718], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 22:48:41,888 INFO [train.py:903] (1/4) Epoch 12, batch 5850, loss[loss=0.2142, simple_loss=0.2983, pruned_loss=0.06508, over 19416.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3038, pruned_loss=0.07783, over 3829330.97 frames. ], batch size: 70, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:58,000 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80971.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:49:06,076 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.387e+02 6.409e+02 7.183e+02 1.679e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 22:49:10,746 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5568, 4.1289, 2.4895, 3.7057, 0.8701, 3.8554, 3.8887, 3.9673], + device='cuda:1'), covar=tensor([0.0691, 0.1158, 0.2159, 0.0779, 0.4456, 0.0826, 0.0828, 0.1183], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0372, 0.0442, 0.0320, 0.0379, 0.0375, 0.0366, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:49:18,788 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5796, 4.1277, 2.6353, 3.7135, 0.8912, 3.8616, 3.8535, 3.9607], + device='cuda:1'), covar=tensor([0.0633, 0.1152, 0.1963, 0.0773, 0.4245, 0.0813, 0.0846, 0.1178], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0372, 0.0442, 0.0320, 0.0379, 0.0375, 0.0366, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:49:43,641 INFO [train.py:903] (1/4) Epoch 12, batch 5900, loss[loss=0.2231, simple_loss=0.3152, pruned_loss=0.06557, over 18787.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3049, pruned_loss=0.07866, over 3823708.74 frames. ], batch size: 74, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:49:47,128 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 22:49:55,125 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5323, 2.4701, 1.7628, 1.6531, 2.2305, 1.4036, 1.3519, 1.9374], + device='cuda:1'), covar=tensor([0.0996, 0.0628, 0.0883, 0.0665, 0.0438, 0.1064, 0.0669, 0.0434], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0307, 0.0328, 0.0249, 0.0242, 0.0323, 0.0287, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:49:55,145 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:50:09,739 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 22:50:25,095 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:50:35,645 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8880, 1.5499, 1.4134, 1.8224, 1.5501, 1.6007, 1.5140, 1.6463], + device='cuda:1'), covar=tensor([0.0846, 0.1193, 0.1397, 0.0822, 0.1095, 0.0486, 0.1153, 0.0707], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0358, 0.0298, 0.0242, 0.0304, 0.0246, 0.0284, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:50:47,179 INFO [train.py:903] (1/4) Epoch 12, batch 5950, loss[loss=0.1979, simple_loss=0.2773, pruned_loss=0.05927, over 19688.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3048, pruned_loss=0.07853, over 3834965.43 frames. ], batch size: 53, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:10,058 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.377e+02 6.760e+02 8.757e+02 1.989e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-01 22:51:36,989 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81098.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:51:49,674 INFO [train.py:903] (1/4) Epoch 12, batch 6000, loss[loss=0.239, simple_loss=0.3133, pruned_loss=0.08238, over 18882.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3035, pruned_loss=0.0776, over 3831752.11 frames. ], batch size: 74, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:49,674 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 22:52:03,359 INFO [train.py:937] (1/4) Epoch 12, validation: loss=0.1765, simple_loss=0.2774, pruned_loss=0.03779, over 944034.00 frames. +2023-04-01 22:52:03,361 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 22:52:25,573 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:34,826 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:35,857 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:57,875 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:53:05,460 INFO [train.py:903] (1/4) Epoch 12, batch 6050, loss[loss=0.2319, simple_loss=0.3069, pruned_loss=0.07844, over 19651.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3031, pruned_loss=0.07785, over 3832494.50 frames. ], batch size: 58, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:53:27,718 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.041e+02 6.677e+02 8.260e+02 1.738e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-01 22:53:52,488 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:53:57,464 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 22:54:05,934 INFO [train.py:903] (1/4) Epoch 12, batch 6100, loss[loss=0.2562, simple_loss=0.3296, pruned_loss=0.09138, over 19656.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3043, pruned_loss=0.0782, over 3830869.48 frames. ], batch size: 55, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:54:11,849 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:20,970 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:28,710 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:57,891 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:00,248 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:06,761 INFO [train.py:903] (1/4) Epoch 12, batch 6150, loss[loss=0.2292, simple_loss=0.3098, pruned_loss=0.07427, over 18116.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3032, pruned_loss=0.07776, over 3842807.25 frames. ], batch size: 83, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:55:15,126 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-01 22:55:33,027 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 5.279e+02 6.402e+02 8.020e+02 2.167e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-04-01 22:55:39,197 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 22:55:40,515 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5696, 4.1181, 2.5793, 3.6461, 1.0901, 3.9728, 3.9651, 3.9403], + device='cuda:1'), covar=tensor([0.0639, 0.1038, 0.2024, 0.0828, 0.3763, 0.0720, 0.0762, 0.1191], + device='cuda:1'), in_proj_covar=tensor([0.0441, 0.0370, 0.0440, 0.0318, 0.0377, 0.0374, 0.0362, 0.0394], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 22:56:12,045 INFO [train.py:903] (1/4) Epoch 12, batch 6200, loss[loss=0.2483, simple_loss=0.3133, pruned_loss=0.09164, over 19527.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.304, pruned_loss=0.07823, over 3822015.89 frames. ], batch size: 54, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:56:22,853 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81317.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:56:25,200 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:57:11,775 INFO [train.py:903] (1/4) Epoch 12, batch 6250, loss[loss=0.2733, simple_loss=0.3362, pruned_loss=0.1052, over 19668.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3041, pruned_loss=0.078, over 3829196.50 frames. ], batch size: 59, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:57:23,878 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3271, 1.4030, 1.7104, 1.6280, 2.7568, 2.2695, 2.8844, 1.1709], + device='cuda:1'), covar=tensor([0.2373, 0.4072, 0.2526, 0.1835, 0.1470, 0.2036, 0.1499, 0.4033], + device='cuda:1'), in_proj_covar=tensor([0.0495, 0.0589, 0.0619, 0.0442, 0.0598, 0.0505, 0.0652, 0.0502], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 22:57:33,898 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.420e+02 5.051e+02 6.163e+02 7.297e+02 1.401e+03, threshold=1.233e+03, percent-clipped=3.0 +2023-04-01 22:57:42,927 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 22:57:43,141 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 22:58:13,176 INFO [train.py:903] (1/4) Epoch 12, batch 6300, loss[loss=0.2139, simple_loss=0.2777, pruned_loss=0.07507, over 16570.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3041, pruned_loss=0.07772, over 3840038.14 frames. ], batch size: 36, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:59:14,516 INFO [train.py:903] (1/4) Epoch 12, batch 6350, loss[loss=0.2563, simple_loss=0.3197, pruned_loss=0.09644, over 13265.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3042, pruned_loss=0.07807, over 3835636.30 frames. ], batch size: 137, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 22:59:28,528 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:38,668 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3949, 1.5006, 1.7595, 1.5901, 2.6326, 2.2289, 2.7023, 1.1696], + device='cuda:1'), covar=tensor([0.2145, 0.3701, 0.2278, 0.1794, 0.1313, 0.1874, 0.1320, 0.3568], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0582, 0.0613, 0.0437, 0.0591, 0.0497, 0.0645, 0.0497], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 22:59:39,338 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 5.412e+02 6.997e+02 8.497e+02 1.750e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-04-01 22:59:40,730 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:58,963 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 22:59:59,487 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81494.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:14,192 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:16,036 INFO [train.py:903] (1/4) Epoch 12, batch 6400, loss[loss=0.2366, simple_loss=0.3095, pruned_loss=0.08187, over 19469.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3051, pruned_loss=0.07871, over 3821518.68 frames. ], batch size: 64, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:00:45,268 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:01:19,151 INFO [train.py:903] (1/4) Epoch 12, batch 6450, loss[loss=0.2182, simple_loss=0.295, pruned_loss=0.07066, over 19756.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3058, pruned_loss=0.07886, over 3819213.52 frames. ], batch size: 54, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:01:41,360 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 5.839e+02 6.972e+02 8.326e+02 2.886e+03, threshold=1.394e+03, percent-clipped=3.0 +2023-04-01 23:02:03,363 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81593.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:02:06,518 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 23:02:20,381 INFO [train.py:903] (1/4) Epoch 12, batch 6500, loss[loss=0.2877, simple_loss=0.3451, pruned_loss=0.1151, over 13936.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3047, pruned_loss=0.07795, over 3821886.25 frames. ], batch size: 136, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:02:27,391 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 23:03:22,584 INFO [train.py:903] (1/4) Epoch 12, batch 6550, loss[loss=0.2338, simple_loss=0.3083, pruned_loss=0.07967, over 19863.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3038, pruned_loss=0.07714, over 3816919.60 frames. ], batch size: 52, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:03:26,197 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:28,586 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:47,197 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 5.338e+02 6.617e+02 7.892e+02 1.534e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 23:04:00,911 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:04:08,934 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0642, 2.7966, 2.1638, 2.5270, 0.8621, 2.6843, 2.5751, 2.7078], + device='cuda:1'), covar=tensor([0.1224, 0.1332, 0.1986, 0.0998, 0.3429, 0.1027, 0.1147, 0.1326], + device='cuda:1'), in_proj_covar=tensor([0.0441, 0.0372, 0.0440, 0.0317, 0.0378, 0.0372, 0.0366, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:04:24,231 INFO [train.py:903] (1/4) Epoch 12, batch 6600, loss[loss=0.1825, simple_loss=0.2588, pruned_loss=0.05316, over 19734.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3043, pruned_loss=0.07754, over 3814408.18 frames. ], batch size: 46, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:04:56,151 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-01 23:05:15,796 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1410, 1.2106, 1.9009, 1.5822, 3.1188, 4.4040, 4.3073, 4.8031], + device='cuda:1'), covar=tensor([0.1694, 0.3733, 0.3047, 0.2047, 0.0503, 0.0190, 0.0161, 0.0124], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0299, 0.0328, 0.0252, 0.0220, 0.0163, 0.0206, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:05:26,106 INFO [train.py:903] (1/4) Epoch 12, batch 6650, loss[loss=0.2168, simple_loss=0.302, pruned_loss=0.06578, over 19477.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3048, pruned_loss=0.07749, over 3809521.74 frames. ], batch size: 64, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:05:46,860 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:05:47,589 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.145e+02 6.461e+02 8.134e+02 1.737e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-01 23:05:49,140 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:06:26,738 INFO [train.py:903] (1/4) Epoch 12, batch 6700, loss[loss=0.2643, simple_loss=0.3337, pruned_loss=0.09745, over 19740.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3055, pruned_loss=0.07811, over 3809573.80 frames. ], batch size: 63, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:07:17,011 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:26,538 INFO [train.py:903] (1/4) Epoch 12, batch 6750, loss[loss=0.2661, simple_loss=0.3398, pruned_loss=0.09623, over 19270.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3069, pruned_loss=0.07926, over 3816407.27 frames. ], batch size: 66, lr: 6.80e-03, grad_scale: 4.0 +2023-04-01 23:07:37,176 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2047, 3.6347, 2.2200, 2.2193, 3.2953, 1.8661, 1.6733, 2.2491], + device='cuda:1'), covar=tensor([0.1338, 0.0525, 0.0988, 0.0754, 0.0447, 0.1107, 0.0874, 0.0666], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0306, 0.0325, 0.0246, 0.0240, 0.0325, 0.0288, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:07:45,324 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:48,743 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8937, 1.5062, 1.4784, 1.7751, 1.6193, 1.6357, 1.4456, 1.7315], + device='cuda:1'), covar=tensor([0.0894, 0.1227, 0.1356, 0.0868, 0.1038, 0.0519, 0.1250, 0.0698], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0349, 0.0293, 0.0237, 0.0293, 0.0240, 0.0278, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:07:49,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+02 6.276e+02 7.333e+02 1.082e+03 2.540e+03, threshold=1.467e+03, percent-clipped=11.0 +2023-04-01 23:08:23,232 INFO [train.py:903] (1/4) Epoch 12, batch 6800, loss[loss=0.2507, simple_loss=0.3291, pruned_loss=0.08617, over 18810.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3058, pruned_loss=0.07832, over 3825968.36 frames. ], batch size: 74, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:09:09,106 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 23:09:10,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 23:09:12,679 INFO [train.py:903] (1/4) Epoch 13, batch 0, loss[loss=0.2187, simple_loss=0.3003, pruned_loss=0.06854, over 19638.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.3003, pruned_loss=0.06854, over 19638.00 frames. ], batch size: 61, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:09:12,679 INFO [train.py:928] (1/4) Computing validation loss +2023-04-01 23:09:23,578 INFO [train.py:937] (1/4) Epoch 13, validation: loss=0.176, simple_loss=0.2772, pruned_loss=0.03738, over 944034.00 frames. +2023-04-01 23:09:23,579 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18649MB +2023-04-01 23:09:35,402 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 23:09:41,795 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6143, 2.2008, 2.1414, 2.6273, 2.4662, 2.0262, 1.7976, 2.6830], + device='cuda:1'), covar=tensor([0.0848, 0.1884, 0.1489, 0.0988, 0.1400, 0.0688, 0.1478, 0.0662], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0352, 0.0294, 0.0239, 0.0295, 0.0242, 0.0279, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:09:47,676 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.44 vs. limit=5.0 +2023-04-01 23:10:14,565 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 5.222e+02 6.740e+02 8.452e+02 3.268e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-01 23:10:17,105 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2944, 3.0082, 2.1547, 2.7310, 0.7927, 2.9003, 2.8211, 2.9120], + device='cuda:1'), covar=tensor([0.1143, 0.1490, 0.2189, 0.1096, 0.3856, 0.1078, 0.1090, 0.1450], + device='cuda:1'), in_proj_covar=tensor([0.0441, 0.0371, 0.0441, 0.0316, 0.0377, 0.0370, 0.0364, 0.0398], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:10:23,830 INFO [train.py:903] (1/4) Epoch 13, batch 50, loss[loss=0.2059, simple_loss=0.2761, pruned_loss=0.0678, over 19416.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3002, pruned_loss=0.07428, over 859330.05 frames. ], batch size: 48, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:10:46,563 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:10:58,445 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3800, 3.0251, 2.3363, 2.2841, 2.1597, 2.3507, 0.9865, 2.1820], + device='cuda:1'), covar=tensor([0.0543, 0.0432, 0.0525, 0.0913, 0.0908, 0.0948, 0.1050, 0.0832], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0330, 0.0330, 0.0357, 0.0428, 0.0356, 0.0312, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:10:59,203 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 23:11:20,796 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:21,013 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:23,300 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:25,072 INFO [train.py:903] (1/4) Epoch 13, batch 100, loss[loss=0.2277, simple_loss=0.3025, pruned_loss=0.07648, over 19096.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3002, pruned_loss=0.07405, over 1522794.91 frames. ], batch size: 69, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:11:36,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 23:11:52,552 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:55,566 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:11,460 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:16,819 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.805e+02 6.218e+02 7.513e+02 1.266e+03, threshold=1.244e+03, percent-clipped=0.0 +2023-04-01 23:12:25,778 INFO [train.py:903] (1/4) Epoch 13, batch 150, loss[loss=0.2505, simple_loss=0.3191, pruned_loss=0.09094, over 19623.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3045, pruned_loss=0.07638, over 2025701.06 frames. ], batch size: 57, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:10,427 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3453, 3.9392, 2.6426, 3.5281, 1.0996, 3.7487, 3.7702, 3.7926], + device='cuda:1'), covar=tensor([0.0695, 0.1074, 0.1905, 0.0781, 0.3804, 0.0772, 0.0804, 0.1104], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0370, 0.0444, 0.0316, 0.0380, 0.0374, 0.0367, 0.0399], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:13:23,612 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 23:13:24,756 INFO [train.py:903] (1/4) Epoch 13, batch 200, loss[loss=0.2114, simple_loss=0.3023, pruned_loss=0.06029, over 19775.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3054, pruned_loss=0.07696, over 2442785.17 frames. ], batch size: 56, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:40,458 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:13:42,567 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0729, 1.7677, 1.6239, 2.0952, 1.9407, 1.7793, 1.7000, 1.9641], + device='cuda:1'), covar=tensor([0.0879, 0.1531, 0.1417, 0.0946, 0.1168, 0.0492, 0.1164, 0.0690], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0348, 0.0292, 0.0238, 0.0292, 0.0240, 0.0278, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:14:04,570 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1912, 3.7171, 2.3597, 2.2327, 3.3299, 2.0154, 1.4951, 2.2103], + device='cuda:1'), covar=tensor([0.1204, 0.0483, 0.0887, 0.0722, 0.0449, 0.0994, 0.0882, 0.0630], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0306, 0.0326, 0.0246, 0.0241, 0.0321, 0.0288, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:14:14,407 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.002e+02 5.972e+02 7.403e+02 2.257e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-01 23:14:26,895 INFO [train.py:903] (1/4) Epoch 13, batch 250, loss[loss=0.2356, simple_loss=0.3095, pruned_loss=0.08087, over 19601.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3039, pruned_loss=0.07674, over 2751552.45 frames. ], batch size: 57, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:15:26,891 INFO [train.py:903] (1/4) Epoch 13, batch 300, loss[loss=0.2079, simple_loss=0.2741, pruned_loss=0.07081, over 19733.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3035, pruned_loss=0.07686, over 2966225.66 frames. ], batch size: 46, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:16:18,765 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.753e+02 6.815e+02 9.164e+02 1.837e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 23:16:28,128 INFO [train.py:903] (1/4) Epoch 13, batch 350, loss[loss=0.2535, simple_loss=0.3284, pruned_loss=0.08933, over 19615.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3036, pruned_loss=0.07692, over 3174445.25 frames. ], batch size: 57, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:16:30,470 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 23:17:28,528 INFO [train.py:903] (1/4) Epoch 13, batch 400, loss[loss=0.2257, simple_loss=0.2989, pruned_loss=0.07622, over 19545.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3037, pruned_loss=0.0771, over 3324029.04 frames. ], batch size: 54, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:17:47,112 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:04,893 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:10,379 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5631, 4.1418, 2.6504, 3.6496, 0.8706, 3.9457, 3.9215, 4.0621], + device='cuda:1'), covar=tensor([0.0643, 0.1035, 0.1822, 0.0779, 0.3928, 0.0708, 0.0775, 0.0983], + device='cuda:1'), in_proj_covar=tensor([0.0440, 0.0370, 0.0442, 0.0318, 0.0378, 0.0371, 0.0365, 0.0398], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:18:21,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.325e+02 6.166e+02 7.720e+02 2.046e+03, threshold=1.233e+03, percent-clipped=4.0 +2023-04-01 23:18:28,267 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2360, 1.7880, 1.9373, 2.6583, 2.0679, 2.4776, 2.6591, 2.5353], + device='cuda:1'), covar=tensor([0.0755, 0.0931, 0.0977, 0.0899, 0.0911, 0.0741, 0.0839, 0.0573], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0226, 0.0226, 0.0245, 0.0234, 0.0211, 0.0193, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 23:18:31,268 INFO [train.py:903] (1/4) Epoch 13, batch 450, loss[loss=0.2453, simple_loss=0.323, pruned_loss=0.08378, over 19743.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3016, pruned_loss=0.07585, over 3432298.09 frames. ], batch size: 51, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:18:53,540 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:04,678 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 23:19:04,711 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 23:19:09,596 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:23,759 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82428.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:25,737 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1375, 1.0904, 1.4354, 1.1633, 2.3852, 3.1839, 2.9772, 3.5257], + device='cuda:1'), covar=tensor([0.1794, 0.4754, 0.4254, 0.2272, 0.0670, 0.0247, 0.0292, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0299, 0.0327, 0.0252, 0.0221, 0.0161, 0.0206, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:19:33,807 INFO [train.py:903] (1/4) Epoch 13, batch 500, loss[loss=0.2147, simple_loss=0.2779, pruned_loss=0.07572, over 19742.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3017, pruned_loss=0.07635, over 3520311.68 frames. ], batch size: 47, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:20:06,477 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:20:27,378 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.153e+02 6.569e+02 8.401e+02 1.477e+03, threshold=1.314e+03, percent-clipped=3.0 +2023-04-01 23:20:35,263 INFO [train.py:903] (1/4) Epoch 13, batch 550, loss[loss=0.245, simple_loss=0.3235, pruned_loss=0.08328, over 19502.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.302, pruned_loss=0.07632, over 3590772.18 frames. ], batch size: 64, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:21:05,900 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82511.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:21:30,868 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:21:35,044 INFO [train.py:903] (1/4) Epoch 13, batch 600, loss[loss=0.1926, simple_loss=0.2686, pruned_loss=0.05825, over 19847.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.303, pruned_loss=0.07712, over 3635233.90 frames. ], batch size: 52, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:22:17,360 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 23:22:20,684 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9061, 4.4273, 2.8733, 3.8769, 1.0826, 4.3040, 4.2699, 4.3306], + device='cuda:1'), covar=tensor([0.0533, 0.0982, 0.1767, 0.0751, 0.3725, 0.0629, 0.0706, 0.0907], + device='cuda:1'), in_proj_covar=tensor([0.0442, 0.0372, 0.0442, 0.0319, 0.0381, 0.0374, 0.0367, 0.0401], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:22:28,776 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.313e+02 6.751e+02 8.249e+02 1.619e+03, threshold=1.350e+03, percent-clipped=3.0 +2023-04-01 23:22:36,736 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 23:22:36,937 INFO [train.py:903] (1/4) Epoch 13, batch 650, loss[loss=0.2257, simple_loss=0.3009, pruned_loss=0.07529, over 19736.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3018, pruned_loss=0.07656, over 3683730.17 frames. ], batch size: 51, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:22:38,408 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:23:40,974 INFO [train.py:903] (1/4) Epoch 13, batch 700, loss[loss=0.2062, simple_loss=0.2761, pruned_loss=0.06815, over 19741.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3029, pruned_loss=0.07677, over 3706759.43 frames. ], batch size: 51, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:24:21,263 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9793, 3.4095, 1.9730, 1.8817, 3.0118, 1.5729, 1.3471, 2.2120], + device='cuda:1'), covar=tensor([0.1252, 0.0499, 0.0952, 0.0851, 0.0411, 0.1142, 0.0873, 0.0586], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0307, 0.0329, 0.0248, 0.0242, 0.0323, 0.0289, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:24:36,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.358e+02 6.603e+02 8.553e+02 2.977e+03, threshold=1.321e+03, percent-clipped=4.0 +2023-04-01 23:24:44,577 INFO [train.py:903] (1/4) Epoch 13, batch 750, loss[loss=0.2773, simple_loss=0.3379, pruned_loss=0.1083, over 13357.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3041, pruned_loss=0.0776, over 3718742.48 frames. ], batch size: 138, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:10,146 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:28,783 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:47,330 INFO [train.py:903] (1/4) Epoch 13, batch 800, loss[loss=0.2403, simple_loss=0.3199, pruned_loss=0.0804, over 19533.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3044, pruned_loss=0.07743, over 3741918.89 frames. ], batch size: 54, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:58,375 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:26:01,431 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 23:26:42,349 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.821e+02 5.442e+02 6.436e+02 7.821e+02 1.140e+03, threshold=1.287e+03, percent-clipped=0.0 +2023-04-01 23:26:50,564 INFO [train.py:903] (1/4) Epoch 13, batch 850, loss[loss=0.2143, simple_loss=0.2993, pruned_loss=0.06461, over 19718.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3039, pruned_loss=0.07702, over 3769884.04 frames. ], batch size: 59, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:26:53,211 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:27,000 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:38,355 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:44,767 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 23:27:52,721 INFO [train.py:903] (1/4) Epoch 13, batch 900, loss[loss=0.275, simple_loss=0.3229, pruned_loss=0.1135, over 19758.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3037, pruned_loss=0.07735, over 3792663.55 frames. ], batch size: 46, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:27:59,015 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 23:28:09,491 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 23:28:19,295 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82855.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:28:47,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.906e+02 6.958e+02 9.103e+02 2.196e+03, threshold=1.392e+03, percent-clipped=5.0 +2023-04-01 23:28:59,864 INFO [train.py:903] (1/4) Epoch 13, batch 950, loss[loss=0.1982, simple_loss=0.2803, pruned_loss=0.058, over 19840.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3034, pruned_loss=0.07732, over 3808641.02 frames. ], batch size: 52, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:29:04,321 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 23:29:42,097 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8582, 3.9792, 4.3531, 4.3671, 2.6029, 4.0150, 3.7266, 4.0759], + device='cuda:1'), covar=tensor([0.1138, 0.2127, 0.0573, 0.0538, 0.3866, 0.0933, 0.0545, 0.0906], + device='cuda:1'), in_proj_covar=tensor([0.0707, 0.0623, 0.0837, 0.0714, 0.0754, 0.0581, 0.0506, 0.0761], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-01 23:29:54,815 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0320, 3.7072, 2.7911, 3.2357, 1.7832, 3.4549, 3.4288, 3.5715], + device='cuda:1'), covar=tensor([0.0813, 0.1043, 0.1983, 0.0931, 0.2782, 0.0897, 0.0938, 0.1302], + device='cuda:1'), in_proj_covar=tensor([0.0444, 0.0371, 0.0444, 0.0321, 0.0382, 0.0376, 0.0369, 0.0401], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:29:55,944 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:30:01,534 INFO [train.py:903] (1/4) Epoch 13, batch 1000, loss[loss=0.1918, simple_loss=0.2686, pruned_loss=0.05748, over 19780.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3017, pruned_loss=0.0762, over 3802419.41 frames. ], batch size: 47, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:30:39,444 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 23:30:44,771 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:30:50,683 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3169, 2.0495, 2.0556, 2.7980, 2.3086, 2.2077, 2.2350, 2.6033], + device='cuda:1'), covar=tensor([0.0879, 0.1665, 0.1333, 0.0795, 0.1254, 0.0469, 0.1067, 0.0584], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0350, 0.0296, 0.0238, 0.0296, 0.0241, 0.0280, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:30:52,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 23:30:54,571 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.161e+02 6.395e+02 8.326e+02 2.115e+03, threshold=1.279e+03, percent-clipped=2.0 +2023-04-01 23:30:57,047 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82981.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:31:02,718 INFO [train.py:903] (1/4) Epoch 13, batch 1050, loss[loss=0.2264, simple_loss=0.3057, pruned_loss=0.07361, over 19608.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3032, pruned_loss=0.07715, over 3811262.38 frames. ], batch size: 61, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:31:25,460 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3276, 2.2079, 1.9187, 1.7893, 1.7417, 1.8136, 0.5417, 1.1182], + device='cuda:1'), covar=tensor([0.0470, 0.0444, 0.0348, 0.0555, 0.0853, 0.0629, 0.1011, 0.0844], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0333, 0.0334, 0.0360, 0.0429, 0.0356, 0.0316, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:31:34,337 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 23:32:04,667 INFO [train.py:903] (1/4) Epoch 13, batch 1100, loss[loss=0.304, simple_loss=0.3613, pruned_loss=0.1233, over 13913.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3035, pruned_loss=0.07729, over 3815092.94 frames. ], batch size: 136, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:32:19,542 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:32:57,834 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 5.014e+02 6.117e+02 7.878e+02 1.226e+03, threshold=1.223e+03, percent-clipped=0.0 +2023-04-01 23:32:58,284 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:08,489 INFO [train.py:903] (1/4) Epoch 13, batch 1150, loss[loss=0.2485, simple_loss=0.3202, pruned_loss=0.08845, over 19778.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3028, pruned_loss=0.07699, over 3825096.22 frames. ], batch size: 56, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:33:30,502 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:40,733 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2573, 1.3761, 1.6824, 1.4674, 2.5175, 2.1442, 2.6514, 1.0473], + device='cuda:1'), covar=tensor([0.2260, 0.3847, 0.2329, 0.1773, 0.1455, 0.1900, 0.1396, 0.3779], + device='cuda:1'), in_proj_covar=tensor([0.0494, 0.0583, 0.0618, 0.0442, 0.0598, 0.0498, 0.0646, 0.0499], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:34:10,933 INFO [train.py:903] (1/4) Epoch 13, batch 1200, loss[loss=0.2406, simple_loss=0.3129, pruned_loss=0.08414, over 18278.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3025, pruned_loss=0.07681, over 3824180.69 frames. ], batch size: 83, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:34:40,376 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 23:35:06,465 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.101e+02 7.395e+02 1.032e+03 1.939e+03, threshold=1.479e+03, percent-clipped=13.0 +2023-04-01 23:35:12,380 INFO [train.py:903] (1/4) Epoch 13, batch 1250, loss[loss=0.2146, simple_loss=0.2985, pruned_loss=0.06533, over 19527.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3019, pruned_loss=0.07672, over 3833631.43 frames. ], batch size: 54, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:35:19,062 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 23:35:48,382 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83214.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:36:02,574 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83226.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:36:13,266 INFO [train.py:903] (1/4) Epoch 13, batch 1300, loss[loss=0.2207, simple_loss=0.2979, pruned_loss=0.07179, over 19656.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3025, pruned_loss=0.07705, over 3836498.18 frames. ], batch size: 53, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:36:33,767 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83251.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:37:08,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 5.042e+02 6.195e+02 7.677e+02 1.204e+03, threshold=1.239e+03, percent-clipped=0.0 +2023-04-01 23:37:17,158 INFO [train.py:903] (1/4) Epoch 13, batch 1350, loss[loss=0.2823, simple_loss=0.3417, pruned_loss=0.1115, over 19739.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3023, pruned_loss=0.07702, over 3845181.95 frames. ], batch size: 63, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:37:37,997 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:37:38,131 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:07,354 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:09,913 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:21,071 INFO [train.py:903] (1/4) Epoch 13, batch 1400, loss[loss=0.2056, simple_loss=0.2795, pruned_loss=0.06587, over 19378.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3019, pruned_loss=0.07657, over 3845993.21 frames. ], batch size: 47, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:38:30,598 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7746, 4.4010, 2.6210, 3.8352, 1.2006, 4.1235, 4.1356, 4.2190], + device='cuda:1'), covar=tensor([0.0551, 0.0919, 0.1963, 0.0711, 0.3574, 0.0712, 0.0793, 0.0940], + device='cuda:1'), in_proj_covar=tensor([0.0442, 0.0370, 0.0444, 0.0322, 0.0381, 0.0378, 0.0369, 0.0402], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:39:16,727 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.533e+02 6.494e+02 7.603e+02 1.656e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 23:39:20,296 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 23:39:22,624 INFO [train.py:903] (1/4) Epoch 13, batch 1450, loss[loss=0.2466, simple_loss=0.3019, pruned_loss=0.09563, over 19796.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3026, pruned_loss=0.07694, over 3847953.62 frames. ], batch size: 49, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:24,403 INFO [train.py:903] (1/4) Epoch 13, batch 1500, loss[loss=0.2078, simple_loss=0.2747, pruned_loss=0.07044, over 19360.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3025, pruned_loss=0.07675, over 3849919.43 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:29,249 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:41:19,836 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.552e+02 5.496e+02 6.437e+02 7.955e+02 2.023e+03, threshold=1.287e+03, percent-clipped=5.0 +2023-04-01 23:41:26,532 INFO [train.py:903] (1/4) Epoch 13, batch 1550, loss[loss=0.1964, simple_loss=0.2762, pruned_loss=0.05824, over 19406.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3025, pruned_loss=0.07668, over 3826207.91 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:41:32,716 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 23:42:30,072 INFO [train.py:903] (1/4) Epoch 13, batch 1600, loss[loss=0.198, simple_loss=0.2689, pruned_loss=0.06352, over 19764.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3028, pruned_loss=0.07689, over 3833672.46 frames. ], batch size: 47, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:42:52,458 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8206, 1.1971, 1.5696, 0.5518, 1.9786, 2.3970, 2.0921, 2.5552], + device='cuda:1'), covar=tensor([0.1599, 0.3370, 0.2889, 0.2417, 0.0556, 0.0263, 0.0320, 0.0299], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0298, 0.0325, 0.0251, 0.0219, 0.0161, 0.0205, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:42:53,310 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 23:42:55,751 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:43:25,327 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 5.445e+02 6.386e+02 7.908e+02 1.256e+03, threshold=1.277e+03, percent-clipped=0.0 +2023-04-01 23:43:31,085 INFO [train.py:903] (1/4) Epoch 13, batch 1650, loss[loss=0.2133, simple_loss=0.2932, pruned_loss=0.06667, over 19370.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3037, pruned_loss=0.07772, over 3829744.32 frames. ], batch size: 70, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:44:33,574 INFO [train.py:903] (1/4) Epoch 13, batch 1700, loss[loss=0.1953, simple_loss=0.2719, pruned_loss=0.05935, over 19315.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3043, pruned_loss=0.07813, over 3823794.33 frames. ], batch size: 44, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:44:38,735 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4515, 1.6126, 1.7408, 1.8023, 4.0672, 1.0609, 2.4890, 4.3061], + device='cuda:1'), covar=tensor([0.0468, 0.2566, 0.2577, 0.1782, 0.0744, 0.2750, 0.1456, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0343, 0.0354, 0.0322, 0.0347, 0.0332, 0.0338, 0.0365], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:44:38,836 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1018, 3.1480, 1.9827, 1.9918, 2.8604, 1.7205, 1.4102, 2.1143], + device='cuda:1'), covar=tensor([0.1117, 0.0614, 0.0955, 0.0740, 0.0503, 0.1108, 0.0867, 0.0620], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0305, 0.0325, 0.0248, 0.0238, 0.0321, 0.0284, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:44:42,287 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0499, 1.2559, 1.6566, 1.3968, 2.8688, 3.8082, 3.5852, 4.0491], + device='cuda:1'), covar=tensor([0.1626, 0.3428, 0.3021, 0.2002, 0.0472, 0.0131, 0.0184, 0.0179], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0297, 0.0325, 0.0250, 0.0218, 0.0160, 0.0204, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:44:45,679 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:15,482 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 23:45:19,389 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:27,806 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.386e+02 6.785e+02 9.131e+02 1.645e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 23:45:33,647 INFO [train.py:903] (1/4) Epoch 13, batch 1750, loss[loss=0.2118, simple_loss=0.2843, pruned_loss=0.06961, over 19858.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3033, pruned_loss=0.07767, over 3816833.91 frames. ], batch size: 52, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:45:49,000 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83696.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:19,392 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:37,939 INFO [train.py:903] (1/4) Epoch 13, batch 1800, loss[loss=0.24, simple_loss=0.2959, pruned_loss=0.092, over 19390.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3029, pruned_loss=0.07703, over 3806712.54 frames. ], batch size: 48, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:47:08,593 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:29,816 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:32,957 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.542e+02 6.939e+02 8.095e+02 2.139e+03, threshold=1.388e+03, percent-clipped=3.0 +2023-04-01 23:47:35,257 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 23:47:39,807 INFO [train.py:903] (1/4) Epoch 13, batch 1850, loss[loss=0.2306, simple_loss=0.3123, pruned_loss=0.07443, over 19552.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3039, pruned_loss=0.07727, over 3820874.76 frames. ], batch size: 61, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:11,512 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 23:48:38,894 INFO [train.py:903] (1/4) Epoch 13, batch 1900, loss[loss=0.2411, simple_loss=0.3153, pruned_loss=0.08344, over 18699.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3044, pruned_loss=0.07745, over 3819836.66 frames. ], batch size: 74, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:56,286 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 23:49:00,755 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 23:49:23,793 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 23:49:32,947 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.453e+02 6.640e+02 7.751e+02 1.927e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-01 23:49:36,680 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8242, 4.3600, 2.7502, 3.8486, 1.0354, 4.1616, 4.1930, 4.2358], + device='cuda:1'), covar=tensor([0.0543, 0.0937, 0.1817, 0.0720, 0.3687, 0.0718, 0.0691, 0.0806], + device='cuda:1'), in_proj_covar=tensor([0.0439, 0.0367, 0.0438, 0.0316, 0.0375, 0.0374, 0.0364, 0.0396], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:49:38,631 INFO [train.py:903] (1/4) Epoch 13, batch 1950, loss[loss=0.2472, simple_loss=0.3135, pruned_loss=0.09046, over 19517.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3047, pruned_loss=0.07768, over 3816984.61 frames. ], batch size: 54, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:50:31,120 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:50:40,981 INFO [train.py:903] (1/4) Epoch 13, batch 2000, loss[loss=0.2077, simple_loss=0.2787, pruned_loss=0.06837, over 19724.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.304, pruned_loss=0.07715, over 3820050.09 frames. ], batch size: 51, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:02,466 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:17,791 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:36,105 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.295e+02 5.067e+02 6.527e+02 8.467e+02 1.955e+03, threshold=1.305e+03, percent-clipped=7.0 +2023-04-01 23:51:38,291 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 23:51:38,745 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2359, 1.3055, 1.2597, 1.0717, 1.0927, 1.1654, 0.0385, 0.4096], + device='cuda:1'), covar=tensor([0.0584, 0.0537, 0.0343, 0.0438, 0.1064, 0.0455, 0.0980, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0333, 0.0333, 0.0356, 0.0423, 0.0356, 0.0313, 0.0323], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:51:42,654 INFO [train.py:903] (1/4) Epoch 13, batch 2050, loss[loss=0.2413, simple_loss=0.3152, pruned_loss=0.08369, over 18183.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3045, pruned_loss=0.07794, over 3819714.09 frames. ], batch size: 83, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:46,683 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2840, 1.2522, 1.6571, 1.3662, 2.8034, 3.7396, 3.5104, 3.9899], + device='cuda:1'), covar=tensor([0.1527, 0.3548, 0.3124, 0.2070, 0.0498, 0.0160, 0.0198, 0.0177], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0299, 0.0328, 0.0252, 0.0219, 0.0162, 0.0207, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:51:56,805 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 23:51:57,792 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 23:52:01,352 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6202, 1.4209, 1.4775, 2.1128, 1.6541, 1.8912, 2.0242, 1.7188], + device='cuda:1'), covar=tensor([0.0793, 0.0965, 0.1018, 0.0787, 0.0871, 0.0726, 0.0866, 0.0703], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0221, 0.0222, 0.0244, 0.0232, 0.0210, 0.0194, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-01 23:52:03,616 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:21,312 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 23:52:22,804 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:44,657 INFO [train.py:903] (1/4) Epoch 13, batch 2100, loss[loss=0.2464, simple_loss=0.3192, pruned_loss=0.08677, over 19784.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3051, pruned_loss=0.07818, over 3819107.82 frames. ], batch size: 56, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:52:52,281 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:57,292 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7202, 1.8191, 2.0522, 2.3741, 1.6455, 2.1515, 2.2591, 1.9422], + device='cuda:1'), covar=tensor([0.3522, 0.2833, 0.1393, 0.1726, 0.3181, 0.1592, 0.3527, 0.2637], + device='cuda:1'), in_proj_covar=tensor([0.0812, 0.0841, 0.0659, 0.0897, 0.0797, 0.0726, 0.0800, 0.0722], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 23:53:14,858 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 23:53:36,228 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 23:53:39,575 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.545e+02 6.946e+02 9.457e+02 3.064e+03, threshold=1.389e+03, percent-clipped=12.0 +2023-04-01 23:53:45,268 INFO [train.py:903] (1/4) Epoch 13, batch 2150, loss[loss=0.2314, simple_loss=0.3088, pruned_loss=0.07705, over 19765.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3061, pruned_loss=0.0787, over 3804672.63 frames. ], batch size: 54, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:54:23,413 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:31,389 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:49,420 INFO [train.py:903] (1/4) Epoch 13, batch 2200, loss[loss=0.2854, simple_loss=0.3529, pruned_loss=0.109, over 19718.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3054, pruned_loss=0.07806, over 3809094.31 frames. ], batch size: 63, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:55:44,488 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.936e+02 7.647e+02 9.699e+02 2.302e+03, threshold=1.529e+03, percent-clipped=8.0 +2023-04-01 23:55:50,229 INFO [train.py:903] (1/4) Epoch 13, batch 2250, loss[loss=0.2304, simple_loss=0.3037, pruned_loss=0.07856, over 17497.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3061, pruned_loss=0.07846, over 3806499.07 frames. ], batch size: 101, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:55:56,006 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5559, 1.1464, 1.3993, 1.1906, 2.2196, 0.9319, 1.9622, 2.3817], + device='cuda:1'), covar=tensor([0.0656, 0.2645, 0.2556, 0.1603, 0.0869, 0.2043, 0.0976, 0.0494], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0345, 0.0356, 0.0324, 0.0349, 0.0334, 0.0344, 0.0366], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:56:28,845 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:56:33,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8508, 2.0293, 2.3861, 2.1938, 3.1369, 3.7328, 3.6049, 3.9620], + device='cuda:1'), covar=tensor([0.1293, 0.2628, 0.2340, 0.1776, 0.0935, 0.0277, 0.0165, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0300, 0.0327, 0.0251, 0.0219, 0.0163, 0.0206, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:56:36,138 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1302, 5.5206, 3.1219, 4.8354, 1.1394, 5.5643, 5.4932, 5.6231], + device='cuda:1'), covar=tensor([0.0400, 0.0757, 0.1622, 0.0584, 0.3936, 0.0505, 0.0620, 0.0773], + device='cuda:1'), in_proj_covar=tensor([0.0441, 0.0369, 0.0445, 0.0318, 0.0380, 0.0374, 0.0368, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-01 23:56:51,981 INFO [train.py:903] (1/4) Epoch 13, batch 2300, loss[loss=0.2286, simple_loss=0.3068, pruned_loss=0.07519, over 19618.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3056, pruned_loss=0.0784, over 3797473.11 frames. ], batch size: 57, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:53,453 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84237.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:05,854 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 23:57:15,080 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:47,146 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 5.192e+02 6.483e+02 8.696e+02 2.103e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 23:57:52,183 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4267, 1.4186, 1.6962, 1.6092, 2.7267, 2.3642, 2.8391, 1.1612], + device='cuda:1'), covar=tensor([0.2257, 0.4094, 0.2581, 0.1817, 0.1434, 0.1869, 0.1423, 0.3892], + device='cuda:1'), in_proj_covar=tensor([0.0494, 0.0585, 0.0619, 0.0441, 0.0597, 0.0497, 0.0647, 0.0502], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-01 23:57:52,885 INFO [train.py:903] (1/4) Epoch 13, batch 2350, loss[loss=0.1972, simple_loss=0.2694, pruned_loss=0.06252, over 19487.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3044, pruned_loss=0.07748, over 3802088.96 frames. ], batch size: 49, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:58:25,984 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:58:37,175 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 23:58:54,395 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 23:58:57,889 INFO [train.py:903] (1/4) Epoch 13, batch 2400, loss[loss=0.2508, simple_loss=0.3224, pruned_loss=0.0896, over 19342.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3039, pruned_loss=0.07716, over 3804512.02 frames. ], batch size: 66, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:59:11,480 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:20,714 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:54,315 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.138e+02 6.932e+02 8.383e+02 1.660e+03, threshold=1.386e+03, percent-clipped=4.0 +2023-04-01 23:59:54,739 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4434, 2.2486, 2.0602, 1.9865, 1.7072, 1.8127, 0.7390, 1.2267], + device='cuda:1'), covar=tensor([0.0417, 0.0440, 0.0325, 0.0517, 0.0825, 0.0690, 0.0911, 0.0783], + device='cuda:1'), in_proj_covar=tensor([0.0344, 0.0338, 0.0338, 0.0361, 0.0432, 0.0359, 0.0318, 0.0329], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-01 23:59:59,974 INFO [train.py:903] (1/4) Epoch 13, batch 2450, loss[loss=0.2897, simple_loss=0.3632, pruned_loss=0.1081, over 18229.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3033, pruned_loss=0.07726, over 3810240.69 frames. ], batch size: 83, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:00:05,004 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7160, 4.0684, 4.3436, 4.3198, 1.9617, 4.0119, 3.6463, 4.0611], + device='cuda:1'), covar=tensor([0.1368, 0.1342, 0.0545, 0.0584, 0.4911, 0.0866, 0.0547, 0.0974], + device='cuda:1'), in_proj_covar=tensor([0.0696, 0.0624, 0.0830, 0.0707, 0.0750, 0.0574, 0.0499, 0.0758], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 00:00:51,591 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84426.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:03,750 INFO [train.py:903] (1/4) Epoch 13, batch 2500, loss[loss=0.2273, simple_loss=0.3202, pruned_loss=0.06721, over 19654.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3032, pruned_loss=0.07714, over 3812689.42 frames. ], batch size: 59, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:01:33,210 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:38,066 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:00,518 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 5.326e+02 7.100e+02 9.098e+02 1.657e+03, threshold=1.420e+03, percent-clipped=3.0 +2023-04-02 00:02:06,433 INFO [train.py:903] (1/4) Epoch 13, batch 2550, loss[loss=0.2327, simple_loss=0.3157, pruned_loss=0.07484, over 19630.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3049, pruned_loss=0.07804, over 3808461.88 frames. ], batch size: 57, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:02:16,073 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84493.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:30,050 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2538, 1.1574, 1.1921, 1.4004, 1.1291, 1.3036, 1.3685, 1.2553], + device='cuda:1'), covar=tensor([0.0852, 0.1013, 0.1093, 0.0652, 0.0805, 0.0839, 0.0831, 0.0783], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0222, 0.0222, 0.0243, 0.0228, 0.0208, 0.0191, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 00:02:48,814 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84518.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:04,536 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 00:03:10,396 INFO [train.py:903] (1/4) Epoch 13, batch 2600, loss[loss=0.222, simple_loss=0.2909, pruned_loss=0.07661, over 19410.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3035, pruned_loss=0.07707, over 3816940.01 frames. ], batch size: 47, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:03:21,124 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:42,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:59,459 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:09,373 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.097e+02 6.359e+02 8.045e+02 2.004e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 00:04:13,180 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3123, 1.8814, 1.9842, 1.9761, 2.9708, 1.6937, 2.5544, 3.2066], + device='cuda:1'), covar=tensor([0.0563, 0.2126, 0.2181, 0.1601, 0.0721, 0.1996, 0.1787, 0.0485], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0344, 0.0355, 0.0324, 0.0347, 0.0332, 0.0342, 0.0366], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:04:15,154 INFO [train.py:903] (1/4) Epoch 13, batch 2650, loss[loss=0.1915, simple_loss=0.2681, pruned_loss=0.05741, over 18672.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.303, pruned_loss=0.07662, over 3808146.32 frames. ], batch size: 41, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:04:30,342 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:34,944 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 00:05:17,498 INFO [train.py:903] (1/4) Epoch 13, batch 2700, loss[loss=0.179, simple_loss=0.26, pruned_loss=0.04898, over 19840.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3025, pruned_loss=0.07643, over 3803074.13 frames. ], batch size: 52, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:05:36,196 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:08,572 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:14,002 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.394e+02 6.395e+02 8.456e+02 1.799e+03, threshold=1.279e+03, percent-clipped=4.0 +2023-04-02 00:06:16,785 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:21,012 INFO [train.py:903] (1/4) Epoch 13, batch 2750, loss[loss=0.2404, simple_loss=0.3137, pruned_loss=0.08355, over 19576.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3032, pruned_loss=0.07681, over 3791694.45 frames. ], batch size: 61, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:06:37,480 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:49,124 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:56,197 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:01,993 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:24,552 INFO [train.py:903] (1/4) Epoch 13, batch 2800, loss[loss=0.2646, simple_loss=0.3253, pruned_loss=0.102, over 13867.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3031, pruned_loss=0.077, over 3811318.81 frames. ], batch size: 135, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:07:34,084 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:08:22,668 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.423e+02 6.884e+02 8.957e+02 1.568e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-02 00:08:29,878 INFO [train.py:903] (1/4) Epoch 13, batch 2850, loss[loss=0.2351, simple_loss=0.3202, pruned_loss=0.07505, over 19651.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3042, pruned_loss=0.07726, over 3817291.05 frames. ], batch size: 58, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:09:04,281 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:11,184 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84818.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:26,619 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:33,381 INFO [train.py:903] (1/4) Epoch 13, batch 2900, loss[loss=0.2336, simple_loss=0.3109, pruned_loss=0.07822, over 19613.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3043, pruned_loss=0.07689, over 3816690.50 frames. ], batch size: 57, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:09:33,412 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 00:09:38,690 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5136, 4.0349, 4.2326, 4.2206, 1.6744, 3.9680, 3.5128, 3.8923], + device='cuda:1'), covar=tensor([0.1434, 0.0875, 0.0609, 0.0605, 0.5056, 0.0698, 0.0601, 0.1110], + device='cuda:1'), in_proj_covar=tensor([0.0697, 0.0620, 0.0832, 0.0697, 0.0749, 0.0570, 0.0496, 0.0757], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 00:09:58,743 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:10:21,107 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 00:10:31,984 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.833e+02 5.191e+02 6.710e+02 8.572e+02 2.238e+03, threshold=1.342e+03, percent-clipped=4.0 +2023-04-02 00:10:38,026 INFO [train.py:903] (1/4) Epoch 13, batch 2950, loss[loss=0.1871, simple_loss=0.2593, pruned_loss=0.05744, over 19709.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3036, pruned_loss=0.07637, over 3826041.16 frames. ], batch size: 45, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:10:40,473 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:00,938 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3400, 2.1604, 1.9153, 1.6680, 1.4821, 1.7334, 0.3735, 1.2166], + device='cuda:1'), covar=tensor([0.0445, 0.0454, 0.0357, 0.0691, 0.0981, 0.0757, 0.1047, 0.0800], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0334, 0.0335, 0.0357, 0.0430, 0.0358, 0.0316, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 00:11:39,702 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:45,163 INFO [train.py:903] (1/4) Epoch 13, batch 3000, loss[loss=0.2146, simple_loss=0.2952, pruned_loss=0.06697, over 19777.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3021, pruned_loss=0.07527, over 3842542.14 frames. ], batch size: 54, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:11:45,163 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 00:12:00,827 INFO [train.py:937] (1/4) Epoch 13, validation: loss=0.1754, simple_loss=0.276, pruned_loss=0.03742, over 944034.00 frames. +2023-04-02 00:12:00,828 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 00:12:05,791 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 00:12:29,052 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 00:12:29,873 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84957.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:43,910 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:44,056 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:59,474 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.728e+02 4.901e+02 6.331e+02 8.447e+02 1.208e+03, threshold=1.266e+03, percent-clipped=0.0 +2023-04-02 00:13:05,445 INFO [train.py:903] (1/4) Epoch 13, batch 3050, loss[loss=0.2262, simple_loss=0.3051, pruned_loss=0.07365, over 18077.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3018, pruned_loss=0.07512, over 3841541.89 frames. ], batch size: 83, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:13:17,137 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84994.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:18,168 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84995.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:26,452 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:14:09,067 INFO [train.py:903] (1/4) Epoch 13, batch 3100, loss[loss=0.2884, simple_loss=0.3445, pruned_loss=0.1161, over 19156.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3029, pruned_loss=0.07639, over 3840861.68 frames. ], batch size: 69, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:14:50,637 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:05,349 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.118e+02 6.727e+02 8.323e+02 1.616e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-02 00:15:11,227 INFO [train.py:903] (1/4) Epoch 13, batch 3150, loss[loss=0.2308, simple_loss=0.3115, pruned_loss=0.07502, over 19642.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.302, pruned_loss=0.07592, over 3819445.97 frames. ], batch size: 58, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:15:20,616 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:38,368 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 00:15:41,864 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85110.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:00,119 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:09,357 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:12,577 INFO [train.py:903] (1/4) Epoch 13, batch 3200, loss[loss=0.2178, simple_loss=0.3059, pruned_loss=0.06483, over 19537.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3024, pruned_loss=0.07644, over 3808714.85 frames. ], batch size: 56, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:16:46,249 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:17:10,847 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.817e+02 5.513e+02 6.647e+02 8.266e+02 1.326e+03, threshold=1.329e+03, percent-clipped=0.0 +2023-04-02 00:17:16,618 INFO [train.py:903] (1/4) Epoch 13, batch 3250, loss[loss=0.2329, simple_loss=0.3133, pruned_loss=0.07625, over 18419.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3022, pruned_loss=0.07639, over 3804554.71 frames. ], batch size: 84, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:18:20,663 INFO [train.py:903] (1/4) Epoch 13, batch 3300, loss[loss=0.2169, simple_loss=0.2904, pruned_loss=0.07172, over 19687.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3015, pruned_loss=0.07575, over 3806166.84 frames. ], batch size: 53, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:18:21,894 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 00:18:47,654 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:12,446 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:17,833 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 5.269e+02 6.447e+02 8.165e+02 1.494e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 00:19:19,464 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:22,455 INFO [train.py:903] (1/4) Epoch 13, batch 3350, loss[loss=0.2231, simple_loss=0.3074, pruned_loss=0.06938, over 19661.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3014, pruned_loss=0.07572, over 3802565.74 frames. ], batch size: 58, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:19:57,318 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:20:24,670 INFO [train.py:903] (1/4) Epoch 13, batch 3400, loss[loss=0.2147, simple_loss=0.2874, pruned_loss=0.07103, over 19586.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3011, pruned_loss=0.07549, over 3816165.85 frames. ], batch size: 52, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:20:42,512 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1215, 3.3276, 2.0278, 2.1433, 2.9510, 1.8731, 1.6321, 2.0610], + device='cuda:1'), covar=tensor([0.1055, 0.0495, 0.0876, 0.0732, 0.0456, 0.1004, 0.0776, 0.0608], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0301, 0.0323, 0.0247, 0.0236, 0.0317, 0.0283, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:20:42,590 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2163, 2.2461, 2.3238, 3.1099, 2.2162, 3.0771, 2.5877, 2.1060], + device='cuda:1'), covar=tensor([0.3760, 0.3370, 0.1609, 0.2049, 0.3929, 0.1544, 0.3870, 0.2892], + device='cuda:1'), in_proj_covar=tensor([0.0806, 0.0835, 0.0658, 0.0890, 0.0793, 0.0718, 0.0794, 0.0718], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 00:21:03,104 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:22,545 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.371e+02 6.826e+02 8.500e+02 2.504e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-02 00:21:27,160 INFO [train.py:903] (1/4) Epoch 13, batch 3450, loss[loss=0.2439, simple_loss=0.3164, pruned_loss=0.08574, over 19777.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3025, pruned_loss=0.07607, over 3828909.83 frames. ], batch size: 56, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:21:30,637 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 00:21:34,245 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85391.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:47,609 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5765, 1.4442, 1.4196, 1.8296, 1.6046, 1.8943, 1.8150, 1.7046], + device='cuda:1'), covar=tensor([0.0765, 0.0936, 0.0994, 0.0832, 0.0800, 0.0662, 0.0877, 0.0667], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0225, 0.0225, 0.0245, 0.0231, 0.0212, 0.0193, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 00:22:19,367 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:22:28,797 INFO [train.py:903] (1/4) Epoch 13, batch 3500, loss[loss=0.1955, simple_loss=0.2777, pruned_loss=0.05664, over 19619.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3041, pruned_loss=0.07688, over 3827311.31 frames. ], batch size: 50, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:22:42,093 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:10,409 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:19,014 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 00:23:20,667 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:27,197 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.339e+02 6.675e+02 8.042e+02 1.644e+03, threshold=1.335e+03, percent-clipped=3.0 +2023-04-02 00:23:31,545 INFO [train.py:903] (1/4) Epoch 13, batch 3550, loss[loss=0.2509, simple_loss=0.3099, pruned_loss=0.09591, over 19785.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3027, pruned_loss=0.07599, over 3842950.03 frames. ], batch size: 48, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:23:50,557 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85501.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:31,228 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:34,229 INFO [train.py:903] (1/4) Epoch 13, batch 3600, loss[loss=0.2492, simple_loss=0.3213, pruned_loss=0.08861, over 19683.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3033, pruned_loss=0.07638, over 3841485.07 frames. ], batch size: 59, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:24:50,350 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:03,563 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85558.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:33,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.969e+02 7.367e+02 9.197e+02 1.857e+03, threshold=1.473e+03, percent-clipped=2.0 +2023-04-02 00:25:36,786 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:38,622 INFO [train.py:903] (1/4) Epoch 13, batch 3650, loss[loss=0.2201, simple_loss=0.3074, pruned_loss=0.06645, over 19610.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3026, pruned_loss=0.07623, over 3833057.57 frames. ], batch size: 57, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:25:46,148 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:26:41,870 INFO [train.py:903] (1/4) Epoch 13, batch 3700, loss[loss=0.2235, simple_loss=0.2898, pruned_loss=0.07856, over 19271.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3022, pruned_loss=0.07588, over 3849449.76 frames. ], batch size: 44, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:26:51,237 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:41,800 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.337e+02 6.385e+02 1.025e+03 1.989e+03, threshold=1.277e+03, percent-clipped=4.0 +2023-04-02 00:27:44,633 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:46,548 INFO [train.py:903] (1/4) Epoch 13, batch 3750, loss[loss=0.2452, simple_loss=0.3299, pruned_loss=0.08027, over 19659.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3023, pruned_loss=0.07562, over 3849511.33 frames. ], batch size: 60, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:28:14,946 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:28:48,157 INFO [train.py:903] (1/4) Epoch 13, batch 3800, loss[loss=0.2142, simple_loss=0.2943, pruned_loss=0.06706, over 19666.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3025, pruned_loss=0.07589, over 3846811.78 frames. ], batch size: 58, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:14,351 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6560, 4.2031, 2.7163, 3.7194, 1.1485, 4.0482, 3.9787, 4.1126], + device='cuda:1'), covar=tensor([0.0609, 0.0875, 0.1950, 0.0760, 0.3739, 0.0706, 0.0849, 0.1026], + device='cuda:1'), in_proj_covar=tensor([0.0446, 0.0375, 0.0449, 0.0323, 0.0388, 0.0383, 0.0373, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:29:20,860 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 00:29:44,889 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 5.587e+02 7.205e+02 9.194e+02 2.888e+03, threshold=1.441e+03, percent-clipped=8.0 +2023-04-02 00:29:50,625 INFO [train.py:903] (1/4) Epoch 13, batch 3850, loss[loss=0.2275, simple_loss=0.3094, pruned_loss=0.07281, over 19698.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3018, pruned_loss=0.07532, over 3846257.72 frames. ], batch size: 59, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:56,228 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:29:56,576 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9040, 1.9853, 2.1725, 2.6052, 1.8771, 2.4744, 2.3144, 2.0375], + device='cuda:1'), covar=tensor([0.3518, 0.3168, 0.1442, 0.1781, 0.3330, 0.1506, 0.3534, 0.2616], + device='cuda:1'), in_proj_covar=tensor([0.0812, 0.0846, 0.0661, 0.0899, 0.0797, 0.0723, 0.0800, 0.0722], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 00:30:53,507 INFO [train.py:903] (1/4) Epoch 13, batch 3900, loss[loss=0.2431, simple_loss=0.3125, pruned_loss=0.08682, over 19740.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3021, pruned_loss=0.07537, over 3838383.86 frames. ], batch size: 63, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:31:00,072 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:06,824 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85845.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:10,504 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:30,213 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:40,468 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:49,440 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3319, 3.0161, 2.2289, 2.7220, 0.9941, 2.9748, 2.8527, 2.9408], + device='cuda:1'), covar=tensor([0.1065, 0.1441, 0.2008, 0.1034, 0.3538, 0.0949, 0.1006, 0.1300], + device='cuda:1'), in_proj_covar=tensor([0.0446, 0.0372, 0.0449, 0.0323, 0.0388, 0.0381, 0.0373, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:31:51,443 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.450e+02 6.738e+02 8.252e+02 2.044e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-02 00:31:57,034 INFO [train.py:903] (1/4) Epoch 13, batch 3950, loss[loss=0.2973, simple_loss=0.3531, pruned_loss=0.1207, over 17487.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.304, pruned_loss=0.07672, over 3829774.18 frames. ], batch size: 101, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:32:00,540 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 00:32:04,165 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85892.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:05,566 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:20,802 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:40,635 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 00:32:59,798 INFO [train.py:903] (1/4) Epoch 13, batch 4000, loss[loss=0.1785, simple_loss=0.2536, pruned_loss=0.0517, over 19731.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3033, pruned_loss=0.07676, over 3814577.59 frames. ], batch size: 45, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:33:14,755 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85948.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:33:30,816 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85960.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:46,142 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 00:33:48,603 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:56,481 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.280e+02 6.258e+02 8.022e+02 1.377e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 00:34:02,168 INFO [train.py:903] (1/4) Epoch 13, batch 4050, loss[loss=0.256, simple_loss=0.336, pruned_loss=0.08801, over 19582.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3037, pruned_loss=0.07682, over 3824314.16 frames. ], batch size: 61, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:34:03,494 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:06,944 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6883, 1.2354, 1.7057, 1.2676, 2.6162, 3.4814, 3.2419, 3.6783], + device='cuda:1'), covar=tensor([0.1249, 0.3579, 0.3178, 0.2192, 0.0533, 0.0193, 0.0212, 0.0212], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0300, 0.0331, 0.0251, 0.0219, 0.0163, 0.0206, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 00:34:30,787 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:46,116 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:46,324 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8704, 1.9455, 2.1508, 2.4932, 1.6835, 2.3771, 2.3061, 2.1084], + device='cuda:1'), covar=tensor([0.3624, 0.3388, 0.1635, 0.2102, 0.3693, 0.1787, 0.3969, 0.2788], + device='cuda:1'), in_proj_covar=tensor([0.0813, 0.0845, 0.0660, 0.0899, 0.0798, 0.0725, 0.0800, 0.0723], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 00:35:05,743 INFO [train.py:903] (1/4) Epoch 13, batch 4100, loss[loss=0.1957, simple_loss=0.2717, pruned_loss=0.0599, over 19477.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3032, pruned_loss=0.07702, over 3816636.92 frames. ], batch size: 49, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:35:07,466 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5811, 1.2933, 1.2998, 2.0322, 1.5798, 1.9345, 1.9731, 1.6102], + device='cuda:1'), covar=tensor([0.0794, 0.1008, 0.1108, 0.0761, 0.0857, 0.0674, 0.0810, 0.0712], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0224, 0.0225, 0.0243, 0.0232, 0.0210, 0.0191, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 00:35:41,500 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 00:35:48,369 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86070.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:04,017 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.943e+02 6.094e+02 7.890e+02 1.986e+03, threshold=1.219e+03, percent-clipped=5.0 +2023-04-02 00:36:08,782 INFO [train.py:903] (1/4) Epoch 13, batch 4150, loss[loss=0.1932, simple_loss=0.2751, pruned_loss=0.05566, over 19417.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3012, pruned_loss=0.07533, over 3831855.10 frames. ], batch size: 48, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:36:11,047 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:24,550 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 00:36:28,552 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:37:11,334 INFO [train.py:903] (1/4) Epoch 13, batch 4200, loss[loss=0.2009, simple_loss=0.281, pruned_loss=0.06047, over 19750.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3009, pruned_loss=0.07526, over 3830281.39 frames. ], batch size: 51, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:37:14,932 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 00:37:42,922 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:08,166 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.506e+02 6.651e+02 9.015e+02 1.898e+03, threshold=1.330e+03, percent-clipped=8.0 +2023-04-02 00:38:12,743 INFO [train.py:903] (1/4) Epoch 13, batch 4250, loss[loss=0.3048, simple_loss=0.3569, pruned_loss=0.1263, over 18061.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3016, pruned_loss=0.07596, over 3820433.85 frames. ], batch size: 83, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:38:13,162 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:31,027 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 00:38:43,582 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 00:38:52,364 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86216.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:15,648 INFO [train.py:903] (1/4) Epoch 13, batch 4300, loss[loss=0.3294, simple_loss=0.376, pruned_loss=0.1414, over 13193.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3024, pruned_loss=0.07658, over 3811624.66 frames. ], batch size: 135, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:39:17,000 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86237.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:17,303 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6049, 2.5263, 1.8724, 1.6768, 2.3215, 1.5355, 1.5711, 2.0411], + device='cuda:1'), covar=tensor([0.1065, 0.0602, 0.0954, 0.0760, 0.0432, 0.1065, 0.0603, 0.0432], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0304, 0.0327, 0.0251, 0.0237, 0.0320, 0.0287, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:39:22,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86241.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:50,480 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:51,570 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86264.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:13,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.647e+02 5.244e+02 6.314e+02 8.271e+02 2.210e+03, threshold=1.263e+03, percent-clipped=7.0 +2023-04-02 00:40:15,212 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 00:40:19,672 INFO [train.py:903] (1/4) Epoch 13, batch 4350, loss[loss=0.2612, simple_loss=0.3442, pruned_loss=0.08912, over 19609.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3014, pruned_loss=0.07605, over 3828011.29 frames. ], batch size: 61, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:40:22,476 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:27,958 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86292.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:41:00,080 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86319.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:22,570 INFO [train.py:903] (1/4) Epoch 13, batch 4400, loss[loss=0.3222, simple_loss=0.3702, pruned_loss=0.1372, over 17321.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3017, pruned_loss=0.0762, over 3805673.88 frames. ], batch size: 101, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:41:42,060 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:49,347 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:50,086 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 00:41:58,576 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:00,827 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 00:42:20,364 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.626e+02 5.303e+02 6.614e+02 7.903e+02 1.976e+03, threshold=1.323e+03, percent-clipped=7.0 +2023-04-02 00:42:21,956 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:25,157 INFO [train.py:903] (1/4) Epoch 13, batch 4450, loss[loss=0.2312, simple_loss=0.2978, pruned_loss=0.08227, over 19604.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3022, pruned_loss=0.07651, over 3817983.18 frames. ], batch size: 50, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:42:52,213 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86407.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:43:00,999 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:21,647 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:25,451 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:27,361 INFO [train.py:903] (1/4) Epoch 13, batch 4500, loss[loss=0.1857, simple_loss=0.2609, pruned_loss=0.05521, over 19334.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3014, pruned_loss=0.0761, over 3813009.71 frames. ], batch size: 47, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:43:52,077 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7889, 1.7707, 1.6547, 1.4052, 1.3705, 1.4154, 0.2246, 0.7078], + device='cuda:1'), covar=tensor([0.0421, 0.0437, 0.0261, 0.0412, 0.0847, 0.0459, 0.0811, 0.0715], + device='cuda:1'), in_proj_covar=tensor([0.0343, 0.0337, 0.0334, 0.0360, 0.0433, 0.0360, 0.0316, 0.0327], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 00:43:55,635 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:01,557 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:21,244 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:24,463 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.430e+02 6.459e+02 7.870e+02 1.871e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-02 00:44:29,968 INFO [train.py:903] (1/4) Epoch 13, batch 4550, loss[loss=0.2718, simple_loss=0.3413, pruned_loss=0.1011, over 19467.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3018, pruned_loss=0.07646, over 3798672.18 frames. ], batch size: 64, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:44:39,230 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 00:45:02,116 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 00:45:07,113 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5131, 4.0831, 2.5100, 3.6109, 0.9933, 3.8786, 3.8947, 4.0373], + device='cuda:1'), covar=tensor([0.0701, 0.1163, 0.2223, 0.0819, 0.4215, 0.0855, 0.0777, 0.1234], + device='cuda:1'), in_proj_covar=tensor([0.0448, 0.0374, 0.0450, 0.0323, 0.0388, 0.0382, 0.0373, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:45:16,170 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 00:45:25,007 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86529.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:45:33,588 INFO [train.py:903] (1/4) Epoch 13, batch 4600, loss[loss=0.2498, simple_loss=0.3199, pruned_loss=0.0898, over 19715.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3017, pruned_loss=0.07636, over 3790326.25 frames. ], batch size: 63, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:45:45,517 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:45:50,082 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8542, 1.5530, 1.2856, 1.6531, 1.6310, 1.4344, 1.3216, 1.6446], + device='cuda:1'), covar=tensor([0.0995, 0.1460, 0.1757, 0.1080, 0.1297, 0.0941, 0.1593, 0.0913], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0346, 0.0292, 0.0240, 0.0292, 0.0239, 0.0281, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:46:31,574 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.452e+02 6.634e+02 8.020e+02 1.693e+03, threshold=1.327e+03, percent-clipped=1.0 +2023-04-02 00:46:35,925 INFO [train.py:903] (1/4) Epoch 13, batch 4650, loss[loss=0.2577, simple_loss=0.3265, pruned_loss=0.09441, over 19756.00 frames. ], tot_loss[loss=0.226, simple_loss=0.301, pruned_loss=0.07545, over 3815193.18 frames. ], batch size: 54, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:46:51,396 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:46:52,260 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 00:46:57,295 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 00:47:03,884 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 00:47:04,001 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:04,266 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:35,054 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:35,329 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 00:47:38,166 INFO [train.py:903] (1/4) Epoch 13, batch 4700, loss[loss=0.1787, simple_loss=0.2684, pruned_loss=0.0445, over 19787.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3002, pruned_loss=0.07477, over 3821127.32 frames. ], batch size: 56, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:47:53,761 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 00:48:04,337 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 00:48:05,982 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7682, 2.6555, 2.1241, 2.0385, 1.8880, 2.3412, 1.0270, 2.0336], + device='cuda:1'), covar=tensor([0.0416, 0.0458, 0.0505, 0.0731, 0.0796, 0.0686, 0.0948, 0.0709], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0335, 0.0332, 0.0357, 0.0432, 0.0357, 0.0314, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 00:48:13,311 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86663.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:48:35,954 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.083e+02 5.953e+02 7.054e+02 1.649e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-02 00:48:42,062 INFO [train.py:903] (1/4) Epoch 13, batch 4750, loss[loss=0.2204, simple_loss=0.3053, pruned_loss=0.06777, over 18836.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3004, pruned_loss=0.07462, over 3831203.42 frames. ], batch size: 74, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:48:45,761 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86688.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:48:48,063 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86690.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:48:58,169 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9684, 1.6155, 1.7959, 1.6336, 4.4038, 0.9421, 2.4613, 4.7064], + device='cuda:1'), covar=tensor([0.0409, 0.2577, 0.2661, 0.1929, 0.0795, 0.2738, 0.1384, 0.0206], + device='cuda:1'), in_proj_covar=tensor([0.0365, 0.0345, 0.0356, 0.0325, 0.0352, 0.0332, 0.0346, 0.0365], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:49:07,679 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 00:49:10,081 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 00:49:15,252 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-02 00:49:18,525 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:27,255 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:31,061 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 00:49:43,088 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:43,756 INFO [train.py:903] (1/4) Epoch 13, batch 4800, loss[loss=0.2133, simple_loss=0.2802, pruned_loss=0.07322, over 17408.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3015, pruned_loss=0.07564, over 3824164.54 frames. ], batch size: 38, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:50:12,865 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9060, 1.6099, 1.4876, 1.8603, 1.7779, 1.6326, 1.4849, 1.7487], + device='cuda:1'), covar=tensor([0.0910, 0.1417, 0.1381, 0.0960, 0.1065, 0.0510, 0.1224, 0.0705], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0348, 0.0293, 0.0241, 0.0294, 0.0240, 0.0282, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:50:12,873 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:41,010 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.270e+02 6.552e+02 8.488e+02 1.715e+03, threshold=1.310e+03, percent-clipped=7.0 +2023-04-02 00:50:45,010 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:45,815 INFO [train.py:903] (1/4) Epoch 13, batch 4850, loss[loss=0.27, simple_loss=0.3301, pruned_loss=0.1049, over 13460.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3015, pruned_loss=0.07567, over 3825860.80 frames. ], batch size: 136, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:51:06,440 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:06,710 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:08,593 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 00:51:14,209 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:18,131 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:28,829 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 00:51:31,701 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 00:51:37,261 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 00:51:37,288 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 00:51:38,813 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:45,264 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86832.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:47,470 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 00:51:49,795 INFO [train.py:903] (1/4) Epoch 13, batch 4900, loss[loss=0.2189, simple_loss=0.299, pruned_loss=0.0694, over 19855.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3015, pruned_loss=0.07539, over 3831488.45 frames. ], batch size: 52, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:51:59,396 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2338, 2.0696, 1.9069, 2.6670, 1.8651, 2.4477, 2.5413, 2.4608], + device='cuda:1'), covar=tensor([0.0709, 0.0808, 0.0961, 0.0886, 0.0967, 0.0709, 0.0831, 0.0563], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0224, 0.0224, 0.0244, 0.0230, 0.0211, 0.0192, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 00:52:07,093 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 00:52:46,046 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 5.393e+02 6.845e+02 8.725e+02 1.892e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 00:52:50,738 INFO [train.py:903] (1/4) Epoch 13, batch 4950, loss[loss=0.2598, simple_loss=0.3355, pruned_loss=0.09207, over 19672.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3024, pruned_loss=0.07583, over 3835549.05 frames. ], batch size: 60, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:53:05,802 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86896.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:08,080 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 00:53:31,777 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:32,652 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 00:53:37,655 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:56,231 INFO [train.py:903] (1/4) Epoch 13, batch 5000, loss[loss=0.3288, simple_loss=0.3714, pruned_loss=0.1431, over 13781.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3006, pruned_loss=0.0752, over 3826493.19 frames. ], batch size: 136, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:54:03,395 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 00:54:03,562 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:14,010 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 00:54:49,281 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:52,319 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.998e+02 5.650e+02 6.679e+02 8.615e+02 1.943e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 00:54:56,905 INFO [train.py:903] (1/4) Epoch 13, batch 5050, loss[loss=0.2205, simple_loss=0.3069, pruned_loss=0.06706, over 19735.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3009, pruned_loss=0.07554, over 3801333.10 frames. ], batch size: 63, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:55:19,939 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87004.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:55:32,447 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 00:55:38,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3597, 1.0796, 1.0560, 1.2741, 1.0475, 1.1876, 1.0800, 1.2263], + device='cuda:1'), covar=tensor([0.0983, 0.1223, 0.1368, 0.0836, 0.1124, 0.0569, 0.1269, 0.0784], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0349, 0.0292, 0.0239, 0.0295, 0.0241, 0.0281, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 00:55:41,826 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87020.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:56:01,002 INFO [train.py:903] (1/4) Epoch 13, batch 5100, loss[loss=0.2394, simple_loss=0.3022, pruned_loss=0.08832, over 19607.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3015, pruned_loss=0.076, over 3803435.20 frames. ], batch size: 50, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:56:07,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 00:56:11,369 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 00:56:18,085 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 00:56:28,876 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:56:58,518 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.089e+02 6.519e+02 8.713e+02 1.677e+03, threshold=1.304e+03, percent-clipped=2.0 +2023-04-02 00:57:03,280 INFO [train.py:903] (1/4) Epoch 13, batch 5150, loss[loss=0.2424, simple_loss=0.3185, pruned_loss=0.08311, over 18704.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3018, pruned_loss=0.07602, over 3799191.91 frames. ], batch size: 74, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:57:15,153 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 00:57:46,754 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5910, 1.6478, 1.8483, 1.9835, 1.3783, 1.8112, 1.9817, 1.7584], + device='cuda:1'), covar=tensor([0.3541, 0.2954, 0.1524, 0.1790, 0.3039, 0.1662, 0.3935, 0.2787], + device='cuda:1'), in_proj_covar=tensor([0.0815, 0.0844, 0.0661, 0.0894, 0.0795, 0.0728, 0.0796, 0.0728], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 00:57:48,508 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 00:58:07,739 INFO [train.py:903] (1/4) Epoch 13, batch 5200, loss[loss=0.2184, simple_loss=0.2856, pruned_loss=0.07562, over 19757.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3028, pruned_loss=0.07668, over 3805551.32 frames. ], batch size: 47, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:58:18,410 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 00:58:36,927 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2974, 3.8063, 3.9272, 3.9305, 1.5755, 3.7178, 3.2929, 3.6451], + device='cuda:1'), covar=tensor([0.1503, 0.0722, 0.0631, 0.0662, 0.5094, 0.0715, 0.0653, 0.1101], + device='cuda:1'), in_proj_covar=tensor([0.0705, 0.0622, 0.0829, 0.0704, 0.0748, 0.0578, 0.0499, 0.0768], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 00:58:54,674 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87173.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:58:57,969 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87176.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:01,460 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:04,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 00:59:05,687 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.198e+02 5.751e+02 6.794e+02 9.141e+02 2.147e+03, threshold=1.359e+03, percent-clipped=10.0 +2023-04-02 00:59:10,442 INFO [train.py:903] (1/4) Epoch 13, batch 5250, loss[loss=0.2143, simple_loss=0.2947, pruned_loss=0.06696, over 19522.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.302, pruned_loss=0.07562, over 3820771.51 frames. ], batch size: 56, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 00:59:25,754 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87198.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:30,575 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6312, 1.7201, 1.8742, 2.0686, 1.4972, 1.9313, 2.0290, 1.8064], + device='cuda:1'), covar=tensor([0.3529, 0.2743, 0.1580, 0.1667, 0.2916, 0.1553, 0.3821, 0.2729], + device='cuda:1'), in_proj_covar=tensor([0.0814, 0.0843, 0.0661, 0.0896, 0.0797, 0.0728, 0.0796, 0.0726], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 00:59:31,678 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:39,975 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-02 01:00:08,225 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5694, 4.0455, 4.2036, 4.1928, 1.6616, 3.9677, 3.4811, 3.9377], + device='cuda:1'), covar=tensor([0.1392, 0.0735, 0.0594, 0.0654, 0.5296, 0.0723, 0.0652, 0.1066], + device='cuda:1'), in_proj_covar=tensor([0.0718, 0.0634, 0.0845, 0.0720, 0.0763, 0.0590, 0.0509, 0.0781], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 01:00:13,783 INFO [train.py:903] (1/4) Epoch 13, batch 5300, loss[loss=0.2573, simple_loss=0.3269, pruned_loss=0.09387, over 19664.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3008, pruned_loss=0.0749, over 3813611.88 frames. ], batch size: 58, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:00:18,341 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87240.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:27,910 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87248.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:28,740 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 01:01:10,978 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.140e+02 5.034e+02 6.226e+02 7.840e+02 1.929e+03, threshold=1.245e+03, percent-clipped=3.0 +2023-04-02 01:01:15,816 INFO [train.py:903] (1/4) Epoch 13, batch 5350, loss[loss=0.2466, simple_loss=0.3158, pruned_loss=0.08874, over 19679.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3013, pruned_loss=0.07531, over 3809471.39 frames. ], batch size: 60, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:01:24,133 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:01:49,521 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 01:01:51,107 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:08,434 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8319, 1.9569, 2.3192, 1.9702, 2.9426, 3.2004, 3.2156, 3.4696], + device='cuda:1'), covar=tensor([0.1243, 0.2720, 0.2415, 0.1819, 0.0794, 0.0374, 0.0182, 0.0264], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0303, 0.0333, 0.0253, 0.0224, 0.0164, 0.0209, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:02:21,303 INFO [train.py:903] (1/4) Epoch 13, batch 5400, loss[loss=0.2497, simple_loss=0.3214, pruned_loss=0.089, over 19453.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3009, pruned_loss=0.07551, over 3797821.33 frames. ], batch size: 64, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 01:02:24,127 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:44,281 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87355.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:52,486 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 01:02:55,503 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87364.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:03:01,391 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0380, 2.7719, 2.0404, 2.4980, 0.8070, 2.6811, 2.6572, 2.6511], + device='cuda:1'), covar=tensor([0.1296, 0.1442, 0.2087, 0.1029, 0.3611, 0.1043, 0.1026, 0.1506], + device='cuda:1'), in_proj_covar=tensor([0.0447, 0.0372, 0.0450, 0.0322, 0.0384, 0.0383, 0.0372, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:03:16,675 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87380.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:03:19,990 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 4.952e+02 6.479e+02 7.900e+02 1.578e+03, threshold=1.296e+03, percent-clipped=3.0 +2023-04-02 01:03:23,439 INFO [train.py:903] (1/4) Epoch 13, batch 5450, loss[loss=0.1802, simple_loss=0.2646, pruned_loss=0.04793, over 19852.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3002, pruned_loss=0.07546, over 3795907.61 frames. ], batch size: 52, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:25,756 INFO [train.py:903] (1/4) Epoch 13, batch 5500, loss[loss=0.2456, simple_loss=0.3206, pruned_loss=0.08532, over 19669.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.301, pruned_loss=0.07599, over 3801982.10 frames. ], batch size: 58, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:47,876 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 01:05:21,409 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:05:21,482 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87479.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:05:25,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 5.041e+02 6.443e+02 8.590e+02 2.643e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 01:05:29,325 INFO [train.py:903] (1/4) Epoch 13, batch 5550, loss[loss=0.2518, simple_loss=0.3284, pruned_loss=0.08762, over 19614.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3004, pruned_loss=0.07561, over 3802772.97 frames. ], batch size: 57, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:05:33,840 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 01:06:16,491 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:06:23,083 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 01:06:33,799 INFO [train.py:903] (1/4) Epoch 13, batch 5600, loss[loss=0.1996, simple_loss=0.2662, pruned_loss=0.06644, over 18661.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3006, pruned_loss=0.07562, over 3805260.76 frames. ], batch size: 41, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:06:47,647 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:17,518 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87572.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:32,977 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 5.023e+02 6.222e+02 8.051e+02 1.328e+03, threshold=1.244e+03, percent-clipped=2.0 +2023-04-02 01:07:36,359 INFO [train.py:903] (1/4) Epoch 13, batch 5650, loss[loss=0.2364, simple_loss=0.3134, pruned_loss=0.07971, over 19563.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3004, pruned_loss=0.07531, over 3815079.23 frames. ], batch size: 61, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:07:43,445 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:56,303 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:01,785 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:06,361 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:23,538 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:24,450 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 01:08:38,517 INFO [train.py:903] (1/4) Epoch 13, batch 5700, loss[loss=0.1996, simple_loss=0.2787, pruned_loss=0.06025, over 19765.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3006, pruned_loss=0.075, over 3819464.42 frames. ], batch size: 54, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:08:39,009 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:09:36,755 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 4.997e+02 6.690e+02 8.841e+02 1.921e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 01:09:40,136 INFO [train.py:903] (1/4) Epoch 13, batch 5750, loss[loss=0.2283, simple_loss=0.2994, pruned_loss=0.07861, over 19504.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3016, pruned_loss=0.07583, over 3818292.91 frames. ], batch size: 49, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:09:41,277 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 01:09:49,519 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 01:09:55,217 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 01:10:07,765 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:27,831 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87724.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:41,827 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87735.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:10:42,503 INFO [train.py:903] (1/4) Epoch 13, batch 5800, loss[loss=0.3317, simple_loss=0.3761, pruned_loss=0.1437, over 13137.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3016, pruned_loss=0.0758, over 3813038.08 frames. ], batch size: 136, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:10:58,848 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2214, 2.0174, 1.8201, 1.7032, 1.5638, 1.6778, 0.4261, 1.0540], + device='cuda:1'), covar=tensor([0.0387, 0.0434, 0.0322, 0.0485, 0.0858, 0.0580, 0.0908, 0.0715], + device='cuda:1'), in_proj_covar=tensor([0.0345, 0.0341, 0.0337, 0.0364, 0.0439, 0.0363, 0.0321, 0.0332], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:11:13,084 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87760.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:11:43,258 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 5.041e+02 6.242e+02 7.928e+02 1.581e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 01:11:46,975 INFO [train.py:903] (1/4) Epoch 13, batch 5850, loss[loss=0.2414, simple_loss=0.3163, pruned_loss=0.08324, over 19606.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.302, pruned_loss=0.07595, over 3833777.23 frames. ], batch size: 57, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:33,211 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87823.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:12:47,993 INFO [train.py:903] (1/4) Epoch 13, batch 5900, loss[loss=0.2242, simple_loss=0.2967, pruned_loss=0.07587, over 19739.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3026, pruned_loss=0.07627, over 3840930.52 frames. ], batch size: 51, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:49,016 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 01:12:51,614 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:10,535 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 01:13:27,496 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:47,011 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.447e+02 6.662e+02 8.353e+02 2.279e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 01:13:50,401 INFO [train.py:903] (1/4) Epoch 13, batch 5950, loss[loss=0.2743, simple_loss=0.339, pruned_loss=0.1049, over 18050.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3028, pruned_loss=0.07666, over 3828478.57 frames. ], batch size: 83, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:13:57,836 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8059, 1.9070, 2.1170, 2.6337, 1.8541, 2.5039, 2.2848, 1.9062], + device='cuda:1'), covar=tensor([0.3760, 0.3150, 0.1554, 0.1833, 0.3476, 0.1563, 0.3840, 0.2921], + device='cuda:1'), in_proj_covar=tensor([0.0817, 0.0849, 0.0660, 0.0897, 0.0798, 0.0729, 0.0798, 0.0725], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 01:14:53,070 INFO [train.py:903] (1/4) Epoch 13, batch 6000, loss[loss=0.2412, simple_loss=0.317, pruned_loss=0.08276, over 19798.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3026, pruned_loss=0.07642, over 3829338.31 frames. ], batch size: 56, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:14:53,071 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 01:15:06,412 INFO [train.py:937] (1/4) Epoch 13, validation: loss=0.175, simple_loss=0.2755, pruned_loss=0.03726, over 944034.00 frames. +2023-04-02 01:15:06,415 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 01:15:09,194 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87938.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:17,514 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3094, 1.2949, 1.4303, 1.4381, 1.7694, 1.8606, 1.7420, 0.5098], + device='cuda:1'), covar=tensor([0.2158, 0.3947, 0.2439, 0.1808, 0.1468, 0.2073, 0.1313, 0.4204], + device='cuda:1'), in_proj_covar=tensor([0.0500, 0.0591, 0.0632, 0.0447, 0.0603, 0.0498, 0.0649, 0.0509], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 01:15:19,414 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:25,216 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:36,225 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0744, 1.3859, 1.7422, 1.2532, 2.8966, 3.8901, 3.5963, 4.0691], + device='cuda:1'), covar=tensor([0.1572, 0.3343, 0.3002, 0.2068, 0.0463, 0.0134, 0.0177, 0.0172], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0299, 0.0330, 0.0251, 0.0221, 0.0162, 0.0207, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:15:37,486 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2752, 1.6031, 2.0176, 1.5470, 3.1391, 4.8996, 4.7355, 5.0726], + device='cuda:1'), covar=tensor([0.1536, 0.3204, 0.2908, 0.1988, 0.0505, 0.0153, 0.0142, 0.0155], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0299, 0.0330, 0.0251, 0.0220, 0.0162, 0.0207, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:15:41,058 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:45,511 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:48,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 01:16:04,014 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:04,764 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.142e+02 6.649e+02 8.424e+02 2.027e+03, threshold=1.330e+03, percent-clipped=9.0 +2023-04-02 01:16:08,330 INFO [train.py:903] (1/4) Epoch 13, batch 6050, loss[loss=0.2509, simple_loss=0.3254, pruned_loss=0.08813, over 19338.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3027, pruned_loss=0.07667, over 3830317.69 frames. ], batch size: 66, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:16:12,356 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:34,842 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8380, 1.6469, 1.9961, 1.7491, 4.3459, 1.0374, 2.5474, 4.7510], + device='cuda:1'), covar=tensor([0.0375, 0.2542, 0.2353, 0.1873, 0.0727, 0.2763, 0.1374, 0.0166], + device='cuda:1'), in_proj_covar=tensor([0.0365, 0.0345, 0.0356, 0.0325, 0.0352, 0.0332, 0.0343, 0.0367], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:17:12,302 INFO [train.py:903] (1/4) Epoch 13, batch 6100, loss[loss=0.2132, simple_loss=0.2846, pruned_loss=0.07092, over 19045.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3034, pruned_loss=0.07747, over 3821160.52 frames. ], batch size: 42, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:17:42,742 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:49,523 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:09,962 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:10,749 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.311e+02 6.775e+02 8.712e+02 1.953e+03, threshold=1.355e+03, percent-clipped=3.0 +2023-04-02 01:18:14,178 INFO [train.py:903] (1/4) Epoch 13, batch 6150, loss[loss=0.2258, simple_loss=0.2981, pruned_loss=0.07674, over 19666.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3031, pruned_loss=0.07725, over 3823147.82 frames. ], batch size: 53, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:18:18,117 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8590, 1.5150, 1.8511, 1.7263, 4.3453, 0.9795, 2.2684, 4.6117], + device='cuda:1'), covar=tensor([0.0384, 0.2738, 0.2845, 0.1918, 0.0757, 0.2729, 0.1498, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0345, 0.0357, 0.0324, 0.0352, 0.0331, 0.0343, 0.0367], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:18:24,944 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:38,419 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 01:18:54,520 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88118.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:56,866 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:19:15,200 INFO [train.py:903] (1/4) Epoch 13, batch 6200, loss[loss=0.2303, simple_loss=0.306, pruned_loss=0.07734, over 19676.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3029, pruned_loss=0.07667, over 3839636.97 frames. ], batch size: 53, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:19:31,807 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 01:20:13,326 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.409e+02 5.304e+02 6.722e+02 8.627e+02 2.252e+03, threshold=1.344e+03, percent-clipped=3.0 +2023-04-02 01:20:16,888 INFO [train.py:903] (1/4) Epoch 13, batch 6250, loss[loss=0.2339, simple_loss=0.3065, pruned_loss=0.08068, over 18720.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3027, pruned_loss=0.07651, over 3820771.54 frames. ], batch size: 74, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:20:29,076 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88194.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:20:44,732 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 01:20:58,217 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:20,745 INFO [train.py:903] (1/4) Epoch 13, batch 6300, loss[loss=0.215, simple_loss=0.2946, pruned_loss=0.06773, over 19664.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3033, pruned_loss=0.07674, over 3826023.61 frames. ], batch size: 55, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:21:23,571 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:32,847 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.45 vs. limit=5.0 +2023-04-02 01:21:52,901 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:22:18,335 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.052e+02 6.199e+02 7.712e+02 1.989e+03, threshold=1.240e+03, percent-clipped=5.0 +2023-04-02 01:22:21,896 INFO [train.py:903] (1/4) Epoch 13, batch 6350, loss[loss=0.2187, simple_loss=0.2861, pruned_loss=0.07559, over 19751.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.303, pruned_loss=0.07667, over 3835895.10 frames. ], batch size: 46, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:22:32,648 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2360, 5.5573, 2.8571, 4.8522, 1.4476, 5.5261, 5.5766, 5.7277], + device='cuda:1'), covar=tensor([0.0348, 0.0885, 0.1986, 0.0649, 0.3707, 0.0560, 0.0611, 0.0939], + device='cuda:1'), in_proj_covar=tensor([0.0444, 0.0374, 0.0451, 0.0321, 0.0386, 0.0380, 0.0373, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:22:50,128 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88309.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:23:02,584 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:08,241 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:24,615 INFO [train.py:903] (1/4) Epoch 13, batch 6400, loss[loss=0.2297, simple_loss=0.3004, pruned_loss=0.07949, over 19763.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3027, pruned_loss=0.07608, over 3828487.18 frames. ], batch size: 54, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:23:27,236 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:31,697 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:38,213 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:58,506 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:58,567 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3767, 1.3838, 1.5997, 1.5713, 2.2060, 2.0731, 2.2556, 0.7490], + device='cuda:1'), covar=tensor([0.2092, 0.3794, 0.2289, 0.1734, 0.1417, 0.1922, 0.1251, 0.3996], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0592, 0.0629, 0.0448, 0.0603, 0.0499, 0.0645, 0.0505], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:24:16,116 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9134, 2.0409, 2.1761, 2.7091, 1.8589, 2.5844, 2.3074, 2.0339], + device='cuda:1'), covar=tensor([0.3845, 0.3140, 0.1591, 0.1905, 0.3588, 0.1579, 0.3873, 0.2799], + device='cuda:1'), in_proj_covar=tensor([0.0817, 0.0848, 0.0662, 0.0901, 0.0798, 0.0733, 0.0800, 0.0726], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 01:24:18,746 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-02 01:24:22,195 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.590e+02 5.639e+02 6.967e+02 8.699e+02 1.659e+03, threshold=1.393e+03, percent-clipped=7.0 +2023-04-02 01:24:25,784 INFO [train.py:903] (1/4) Epoch 13, batch 6450, loss[loss=0.2191, simple_loss=0.2999, pruned_loss=0.06917, over 19787.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3028, pruned_loss=0.07637, over 3818445.72 frames. ], batch size: 56, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:25:05,097 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 01:25:19,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7061, 1.8400, 1.9817, 2.3283, 1.6467, 2.2582, 2.1510, 1.8549], + device='cuda:1'), covar=tensor([0.3745, 0.3154, 0.1675, 0.1808, 0.3330, 0.1571, 0.3972, 0.2886], + device='cuda:1'), in_proj_covar=tensor([0.0813, 0.0845, 0.0661, 0.0898, 0.0796, 0.0729, 0.0797, 0.0722], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 01:25:28,931 INFO [train.py:903] (1/4) Epoch 13, batch 6500, loss[loss=0.2315, simple_loss=0.3117, pruned_loss=0.07566, over 19733.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3024, pruned_loss=0.07605, over 3825567.98 frames. ], batch size: 63, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:25:30,980 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 01:26:00,488 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:26:26,785 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.190e+02 6.145e+02 7.751e+02 1.614e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 01:26:30,115 INFO [train.py:903] (1/4) Epoch 13, batch 6550, loss[loss=0.2743, simple_loss=0.3245, pruned_loss=0.1121, over 19113.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3017, pruned_loss=0.07614, over 3830034.30 frames. ], batch size: 42, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:27:11,760 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1279, 1.7958, 1.4562, 1.2326, 1.6542, 1.1471, 1.0882, 1.6482], + device='cuda:1'), covar=tensor([0.0716, 0.0745, 0.0940, 0.0656, 0.0455, 0.1085, 0.0571, 0.0360], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0304, 0.0326, 0.0249, 0.0239, 0.0321, 0.0293, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:27:31,536 INFO [train.py:903] (1/4) Epoch 13, batch 6600, loss[loss=0.2674, simple_loss=0.3295, pruned_loss=0.1026, over 19528.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3015, pruned_loss=0.07584, over 3829925.77 frames. ], batch size: 56, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:28:21,382 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:28:28,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 5.175e+02 6.632e+02 9.239e+02 1.861e+03, threshold=1.326e+03, percent-clipped=7.0 +2023-04-02 01:28:32,108 INFO [train.py:903] (1/4) Epoch 13, batch 6650, loss[loss=0.1941, simple_loss=0.2658, pruned_loss=0.06116, over 19737.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3026, pruned_loss=0.07662, over 3830728.89 frames. ], batch size: 46, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:34,640 INFO [train.py:903] (1/4) Epoch 13, batch 6700, loss[loss=0.2261, simple_loss=0.3032, pruned_loss=0.07456, over 19539.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3035, pruned_loss=0.07706, over 3825503.74 frames. ], batch size: 56, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:55,526 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88653.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:30:29,681 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 5.222e+02 6.682e+02 8.627e+02 1.913e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-02 01:30:33,147 INFO [train.py:903] (1/4) Epoch 13, batch 6750, loss[loss=0.1952, simple_loss=0.2648, pruned_loss=0.06279, over 19730.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3026, pruned_loss=0.07607, over 3841142.82 frames. ], batch size: 47, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:31:28,207 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9000, 2.6772, 1.7903, 1.9670, 1.6328, 1.9891, 0.9787, 1.7812], + device='cuda:1'), covar=tensor([0.0824, 0.0721, 0.0752, 0.1257, 0.1362, 0.1370, 0.1300, 0.1192], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0337, 0.0331, 0.0358, 0.0431, 0.0357, 0.0316, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:31:31,217 INFO [train.py:903] (1/4) Epoch 13, batch 6800, loss[loss=0.2309, simple_loss=0.3065, pruned_loss=0.07764, over 19756.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3012, pruned_loss=0.07532, over 3837798.71 frames. ], batch size: 54, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:32:17,356 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 01:32:18,469 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 01:32:21,842 INFO [train.py:903] (1/4) Epoch 14, batch 0, loss[loss=0.3343, simple_loss=0.3877, pruned_loss=0.1404, over 19677.00 frames. ], tot_loss[loss=0.3343, simple_loss=0.3877, pruned_loss=0.1404, over 19677.00 frames. ], batch size: 60, lr: 6.05e-03, grad_scale: 8.0 +2023-04-02 01:32:21,842 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 01:32:33,653 INFO [train.py:937] (1/4) Epoch 14, validation: loss=0.1763, simple_loss=0.2772, pruned_loss=0.03773, over 944034.00 frames. +2023-04-02 01:32:33,654 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 01:32:41,851 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88768.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:32:49,788 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 01:32:59,168 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.998e+02 6.377e+02 7.989e+02 1.719e+03, threshold=1.275e+03, percent-clipped=2.0 +2023-04-02 01:33:10,456 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 01:33:40,050 INFO [train.py:903] (1/4) Epoch 14, batch 50, loss[loss=0.1969, simple_loss=0.2845, pruned_loss=0.05464, over 19574.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2968, pruned_loss=0.07171, over 871526.77 frames. ], batch size: 52, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:01,775 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:15,362 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 01:34:34,177 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:40,589 INFO [train.py:903] (1/4) Epoch 14, batch 100, loss[loss=0.2156, simple_loss=0.2954, pruned_loss=0.06794, over 19667.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3012, pruned_loss=0.07589, over 1517255.67 frames. ], batch size: 60, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:51,064 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 01:35:02,778 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.271e+02 6.621e+02 8.700e+02 2.391e+03, threshold=1.324e+03, percent-clipped=10.0 +2023-04-02 01:35:41,058 INFO [train.py:903] (1/4) Epoch 14, batch 150, loss[loss=0.2177, simple_loss=0.2977, pruned_loss=0.06891, over 19740.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3021, pruned_loss=0.07693, over 2018696.70 frames. ], batch size: 63, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:35:53,911 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1461, 1.8825, 1.9801, 2.7963, 1.9596, 2.5410, 2.4743, 2.4701], + device='cuda:1'), covar=tensor([0.0742, 0.0845, 0.0959, 0.0827, 0.0860, 0.0668, 0.0918, 0.0555], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0223, 0.0225, 0.0244, 0.0228, 0.0210, 0.0193, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 01:36:00,288 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-02 01:36:29,646 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0276, 1.3025, 1.7388, 0.6088, 2.1086, 2.4804, 2.1636, 2.5308], + device='cuda:1'), covar=tensor([0.1386, 0.3244, 0.2650, 0.2298, 0.0489, 0.0247, 0.0342, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0301, 0.0331, 0.0251, 0.0220, 0.0165, 0.0207, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 01:36:38,770 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 01:36:39,936 INFO [train.py:903] (1/4) Epoch 14, batch 200, loss[loss=0.2126, simple_loss=0.2986, pruned_loss=0.06336, over 19570.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3024, pruned_loss=0.07638, over 2418987.98 frames. ], batch size: 61, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:03,950 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 5.170e+02 6.545e+02 8.695e+02 1.666e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 01:37:37,743 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1744, 3.7528, 3.8681, 3.8327, 1.4536, 3.6174, 3.1392, 3.5686], + device='cuda:1'), covar=tensor([0.1650, 0.0966, 0.0672, 0.0770, 0.5595, 0.0853, 0.0726, 0.1268], + device='cuda:1'), in_proj_covar=tensor([0.0707, 0.0631, 0.0835, 0.0715, 0.0755, 0.0583, 0.0503, 0.0765], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 01:37:41,109 INFO [train.py:903] (1/4) Epoch 14, batch 250, loss[loss=0.2031, simple_loss=0.2846, pruned_loss=0.06082, over 19689.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3012, pruned_loss=0.07565, over 2732164.66 frames. ], batch size: 53, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:53,625 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89024.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:38:17,641 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:38:22,442 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89049.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:38:43,483 INFO [train.py:903] (1/4) Epoch 14, batch 300, loss[loss=0.2025, simple_loss=0.2781, pruned_loss=0.06341, over 19611.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3019, pruned_loss=0.07561, over 2960855.87 frames. ], batch size: 50, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:38:52,818 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1071, 5.1151, 5.9825, 5.8932, 2.0889, 5.5641, 4.6130, 5.5668], + device='cuda:1'), covar=tensor([0.1369, 0.0677, 0.0495, 0.0558, 0.5128, 0.0600, 0.0607, 0.1074], + device='cuda:1'), in_proj_covar=tensor([0.0707, 0.0630, 0.0836, 0.0715, 0.0753, 0.0583, 0.0504, 0.0764], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 01:39:05,428 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.830e+02 5.923e+02 7.544e+02 2.111e+03, threshold=1.185e+03, percent-clipped=1.0 +2023-04-02 01:39:45,141 INFO [train.py:903] (1/4) Epoch 14, batch 350, loss[loss=0.2459, simple_loss=0.3196, pruned_loss=0.08611, over 19264.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3014, pruned_loss=0.07557, over 3153031.94 frames. ], batch size: 66, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:39:47,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 01:40:29,861 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8377, 1.8708, 1.9973, 2.4694, 1.8554, 2.3475, 2.1749, 1.9212], + device='cuda:1'), covar=tensor([0.3263, 0.2753, 0.1346, 0.1561, 0.2824, 0.1313, 0.3070, 0.2283], + device='cuda:1'), in_proj_covar=tensor([0.0824, 0.0859, 0.0666, 0.0908, 0.0807, 0.0738, 0.0811, 0.0728], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 01:40:46,818 INFO [train.py:903] (1/4) Epoch 14, batch 400, loss[loss=0.1857, simple_loss=0.2512, pruned_loss=0.0601, over 19744.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3011, pruned_loss=0.07538, over 3297962.65 frames. ], batch size: 46, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:41:11,936 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.893e+02 5.942e+02 7.582e+02 1.529e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-02 01:41:16,774 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:41:36,346 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1215, 1.8563, 1.7278, 2.0325, 1.7865, 1.8730, 1.7854, 2.1138], + device='cuda:1'), covar=tensor([0.0886, 0.1510, 0.1450, 0.1002, 0.1382, 0.0488, 0.1206, 0.0623], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0353, 0.0297, 0.0240, 0.0298, 0.0243, 0.0285, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:41:47,614 INFO [train.py:903] (1/4) Epoch 14, batch 450, loss[loss=0.1859, simple_loss=0.2612, pruned_loss=0.05533, over 19738.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3013, pruned_loss=0.0753, over 3422310.84 frames. ], batch size: 47, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:42:19,961 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 01:42:20,947 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 01:42:51,695 INFO [train.py:903] (1/4) Epoch 14, batch 500, loss[loss=0.2504, simple_loss=0.317, pruned_loss=0.09187, over 19543.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3011, pruned_loss=0.07534, over 3501569.75 frames. ], batch size: 56, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:43:04,791 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6642, 1.7242, 1.3673, 1.7701, 1.7902, 1.3905, 1.4136, 1.6325], + device='cuda:1'), covar=tensor([0.1159, 0.1383, 0.1666, 0.1027, 0.1153, 0.0760, 0.1501, 0.0865], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0350, 0.0295, 0.0239, 0.0295, 0.0241, 0.0285, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:43:13,299 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.086e+02 6.501e+02 8.394e+02 1.936e+03, threshold=1.300e+03, percent-clipped=6.0 +2023-04-02 01:43:51,222 INFO [train.py:903] (1/4) Epoch 14, batch 550, loss[loss=0.1956, simple_loss=0.2712, pruned_loss=0.05997, over 19469.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.301, pruned_loss=0.07526, over 3570637.99 frames. ], batch size: 49, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:44:05,920 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 01:44:50,881 INFO [train.py:903] (1/4) Epoch 14, batch 600, loss[loss=0.2453, simple_loss=0.3218, pruned_loss=0.08443, over 18691.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3007, pruned_loss=0.07498, over 3639262.51 frames. ], batch size: 74, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:45:14,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.437e+02 6.463e+02 8.394e+02 1.645e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 01:45:23,619 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:27,423 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:32,878 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 01:45:52,140 INFO [train.py:903] (1/4) Epoch 14, batch 650, loss[loss=0.2305, simple_loss=0.308, pruned_loss=0.07655, over 19781.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2997, pruned_loss=0.07445, over 3684945.05 frames. ], batch size: 54, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:46:29,612 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89443.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:46:45,753 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89457.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:46:53,905 INFO [train.py:903] (1/4) Epoch 14, batch 700, loss[loss=0.2387, simple_loss=0.3123, pruned_loss=0.08249, over 19496.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3, pruned_loss=0.07495, over 3722445.43 frames. ], batch size: 64, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:47:21,067 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.092e+02 5.132e+02 6.716e+02 8.076e+02 1.293e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-02 01:47:44,531 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:47:59,820 INFO [train.py:903] (1/4) Epoch 14, batch 750, loss[loss=0.2227, simple_loss=0.3072, pruned_loss=0.06909, over 19663.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2995, pruned_loss=0.07457, over 3747475.84 frames. ], batch size: 55, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:48:14,759 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:48:19,167 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:48:30,280 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5227, 1.2363, 1.1732, 1.4089, 1.1230, 1.3157, 1.1705, 1.3689], + device='cuda:1'), covar=tensor([0.1101, 0.1235, 0.1518, 0.0974, 0.1212, 0.0586, 0.1412, 0.0785], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0350, 0.0294, 0.0239, 0.0294, 0.0241, 0.0284, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:48:48,362 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 01:49:01,549 INFO [train.py:903] (1/4) Epoch 14, batch 800, loss[loss=0.2309, simple_loss=0.303, pruned_loss=0.07943, over 19681.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3001, pruned_loss=0.07484, over 3763838.50 frames. ], batch size: 53, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:49:16,438 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 01:49:24,440 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.343e+02 6.523e+02 8.164e+02 1.688e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 01:50:01,790 INFO [train.py:903] (1/4) Epoch 14, batch 850, loss[loss=0.1918, simple_loss=0.2668, pruned_loss=0.05842, over 19476.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3002, pruned_loss=0.0747, over 3774665.56 frames. ], batch size: 49, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:50:38,092 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1731, 1.9843, 1.5589, 1.1688, 1.8088, 1.1210, 1.1237, 1.7740], + device='cuda:1'), covar=tensor([0.0949, 0.0745, 0.1079, 0.0963, 0.0535, 0.1295, 0.0748, 0.0440], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0307, 0.0328, 0.0250, 0.0240, 0.0326, 0.0297, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:50:42,887 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:50:55,556 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 01:51:03,552 INFO [train.py:903] (1/4) Epoch 14, batch 900, loss[loss=0.2259, simple_loss=0.3002, pruned_loss=0.07577, over 19572.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3003, pruned_loss=0.07478, over 3782097.58 frames. ], batch size: 61, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:51:29,677 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 4.983e+02 6.131e+02 7.467e+02 1.791e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 01:51:46,421 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:52:07,432 INFO [train.py:903] (1/4) Epoch 14, batch 950, loss[loss=0.2028, simple_loss=0.2837, pruned_loss=0.06094, over 19738.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3005, pruned_loss=0.07511, over 3764926.37 frames. ], batch size: 51, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:52:07,464 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 01:52:33,958 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:06,417 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:10,800 INFO [train.py:903] (1/4) Epoch 14, batch 1000, loss[loss=0.2233, simple_loss=0.3063, pruned_loss=0.07017, over 19544.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3008, pruned_loss=0.07555, over 3766031.78 frames. ], batch size: 56, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:53:34,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.523e+02 5.300e+02 6.720e+02 8.420e+02 1.635e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-02 01:53:36,340 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:38,545 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:59,171 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89801.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:54:02,260 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 01:54:13,790 INFO [train.py:903] (1/4) Epoch 14, batch 1050, loss[loss=0.2426, simple_loss=0.319, pruned_loss=0.08305, over 17991.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3014, pruned_loss=0.07572, over 3786867.96 frames. ], batch size: 83, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:54:46,483 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 01:55:02,232 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:16,495 INFO [train.py:903] (1/4) Epoch 14, batch 1100, loss[loss=0.2183, simple_loss=0.2941, pruned_loss=0.07127, over 19541.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3017, pruned_loss=0.07584, over 3803792.26 frames. ], batch size: 56, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:55:24,731 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,001 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89883.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,839 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 5.450e+02 6.738e+02 9.006e+02 2.173e+03, threshold=1.348e+03, percent-clipped=6.0 +2023-04-02 01:56:05,596 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:05,668 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:19,136 INFO [train.py:903] (1/4) Epoch 14, batch 1150, loss[loss=0.2382, simple_loss=0.321, pruned_loss=0.07772, over 19574.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3016, pruned_loss=0.07577, over 3814786.83 frames. ], batch size: 61, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:56:22,882 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89916.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:56:39,062 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:54,410 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.1865, 3.7870, 2.5207, 3.3927, 1.1987, 3.6155, 3.5929, 3.6594], + device='cuda:1'), covar=tensor([0.0864, 0.1115, 0.2185, 0.0844, 0.3957, 0.0865, 0.0949, 0.1256], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0371, 0.0446, 0.0324, 0.0390, 0.0381, 0.0374, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 01:57:25,200 INFO [train.py:903] (1/4) Epoch 14, batch 1200, loss[loss=0.2365, simple_loss=0.3266, pruned_loss=0.07325, over 19339.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3017, pruned_loss=0.07556, over 3814496.94 frames. ], batch size: 66, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:57:49,394 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.889e+02 5.798e+02 7.146e+02 1.027e+03, threshold=1.160e+03, percent-clipped=0.0 +2023-04-02 01:57:51,931 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:57:55,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 01:58:28,849 INFO [train.py:903] (1/4) Epoch 14, batch 1250, loss[loss=0.22, simple_loss=0.2955, pruned_loss=0.07221, over 17419.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3036, pruned_loss=0.07694, over 3793059.70 frames. ], batch size: 101, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:04,877 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:59:31,266 INFO [train.py:903] (1/4) Epoch 14, batch 1300, loss[loss=0.2311, simple_loss=0.2969, pruned_loss=0.08267, over 19619.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3036, pruned_loss=0.07723, over 3798047.79 frames. ], batch size: 50, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:57,906 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 5.410e+02 6.769e+02 7.813e+02 2.183e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-02 02:00:25,742 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:00:33,598 INFO [train.py:903] (1/4) Epoch 14, batch 1350, loss[loss=0.2364, simple_loss=0.3166, pruned_loss=0.07809, over 19594.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3033, pruned_loss=0.07683, over 3802511.10 frames. ], batch size: 61, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:00:44,611 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 02:00:58,161 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:16,437 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:16,840 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-02 02:01:22,827 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 02:01:28,085 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:29,296 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:35,838 INFO [train.py:903] (1/4) Epoch 14, batch 1400, loss[loss=0.1953, simple_loss=0.2761, pruned_loss=0.05721, over 19499.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3046, pruned_loss=0.07761, over 3800585.52 frames. ], batch size: 49, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:01:39,491 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6334, 1.6000, 1.2419, 1.5247, 1.5898, 1.0488, 0.9826, 1.4737], + device='cuda:1'), covar=tensor([0.1093, 0.1352, 0.1706, 0.1106, 0.1359, 0.1414, 0.1916, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0356, 0.0297, 0.0243, 0.0298, 0.0242, 0.0288, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:01:46,545 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90172.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:01:59,344 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:02:00,107 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.450e+02 7.234e+02 1.006e+03 2.886e+03, threshold=1.447e+03, percent-clipped=11.0 +2023-04-02 02:02:17,603 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90197.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:02:34,936 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 02:02:38,344 INFO [train.py:903] (1/4) Epoch 14, batch 1450, loss[loss=0.2566, simple_loss=0.3296, pruned_loss=0.09184, over 17398.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3041, pruned_loss=0.07711, over 3816977.32 frames. ], batch size: 101, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:02:53,328 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90227.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:12,051 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:22,147 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:38,187 INFO [train.py:903] (1/4) Epoch 14, batch 1500, loss[loss=0.2696, simple_loss=0.3338, pruned_loss=0.1027, over 19664.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3037, pruned_loss=0.07674, over 3824777.55 frames. ], batch size: 60, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:03:42,138 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:04:03,121 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.072e+02 6.110e+02 7.921e+02 2.000e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 02:04:09,166 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2779, 3.8075, 3.8854, 3.8892, 1.5929, 3.6656, 3.1779, 3.6074], + device='cuda:1'), covar=tensor([0.1376, 0.0813, 0.0570, 0.0631, 0.4723, 0.0768, 0.0658, 0.1024], + device='cuda:1'), in_proj_covar=tensor([0.0700, 0.0624, 0.0827, 0.0710, 0.0746, 0.0576, 0.0499, 0.0764], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 02:04:23,042 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4983, 2.4172, 1.7129, 1.5193, 2.2076, 1.3633, 1.2174, 1.9418], + device='cuda:1'), covar=tensor([0.0992, 0.0600, 0.0937, 0.0772, 0.0455, 0.1160, 0.0792, 0.0473], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0308, 0.0327, 0.0251, 0.0238, 0.0327, 0.0298, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:04:39,089 INFO [train.py:903] (1/4) Epoch 14, batch 1550, loss[loss=0.2303, simple_loss=0.3112, pruned_loss=0.07471, over 19745.00 frames. ], tot_loss[loss=0.228, simple_loss=0.303, pruned_loss=0.07655, over 3805180.75 frames. ], batch size: 63, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:16,351 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:05:42,813 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8349, 1.3340, 1.0772, 0.9576, 1.2101, 0.9723, 0.9967, 1.2732], + device='cuda:1'), covar=tensor([0.0563, 0.0731, 0.1017, 0.0653, 0.0447, 0.1179, 0.0513, 0.0406], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0308, 0.0327, 0.0250, 0.0239, 0.0327, 0.0297, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:05:44,733 INFO [train.py:903] (1/4) Epoch 14, batch 1600, loss[loss=0.2017, simple_loss=0.2702, pruned_loss=0.06663, over 19751.00 frames. ], tot_loss[loss=0.228, simple_loss=0.303, pruned_loss=0.07647, over 3798015.07 frames. ], batch size: 47, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:51,698 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:02,629 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 02:06:08,370 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.288e+02 6.387e+02 7.705e+02 1.564e+03, threshold=1.277e+03, percent-clipped=2.0 +2023-04-02 02:06:46,512 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:47,278 INFO [train.py:903] (1/4) Epoch 14, batch 1650, loss[loss=0.2604, simple_loss=0.3348, pruned_loss=0.09297, over 19746.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3034, pruned_loss=0.07643, over 3804965.22 frames. ], batch size: 63, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:15,894 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:07:47,910 INFO [train.py:903] (1/4) Epoch 14, batch 1700, loss[loss=0.2134, simple_loss=0.2889, pruned_loss=0.0689, over 19682.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3025, pruned_loss=0.07583, over 3810970.00 frames. ], batch size: 53, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:50,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2028, 2.2662, 2.3978, 3.1354, 2.2991, 2.9173, 2.6846, 2.2368], + device='cuda:1'), covar=tensor([0.3888, 0.3305, 0.1609, 0.2010, 0.3602, 0.1698, 0.3649, 0.2878], + device='cuda:1'), in_proj_covar=tensor([0.0817, 0.0852, 0.0665, 0.0893, 0.0797, 0.0737, 0.0800, 0.0729], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 02:08:03,287 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:13,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 5.354e+02 6.441e+02 8.114e+02 1.316e+03, threshold=1.288e+03, percent-clipped=1.0 +2023-04-02 02:08:19,178 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 02:08:23,205 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:25,327 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 02:08:49,500 INFO [train.py:903] (1/4) Epoch 14, batch 1750, loss[loss=0.2528, simple_loss=0.3336, pruned_loss=0.08602, over 19266.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3018, pruned_loss=0.07531, over 3810963.46 frames. ], batch size: 66, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:08:49,789 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:09:54,000 INFO [train.py:903] (1/4) Epoch 14, batch 1800, loss[loss=0.2173, simple_loss=0.3043, pruned_loss=0.06512, over 19688.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3015, pruned_loss=0.07473, over 3802995.36 frames. ], batch size: 59, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:10:17,898 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.06 vs. limit=5.0 +2023-04-02 02:10:18,177 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.749e+02 5.076e+02 6.157e+02 8.007e+02 1.318e+03, threshold=1.231e+03, percent-clipped=1.0 +2023-04-02 02:10:29,852 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:34,872 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:47,912 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:48,789 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 02:10:55,672 INFO [train.py:903] (1/4) Epoch 14, batch 1850, loss[loss=0.3079, simple_loss=0.3641, pruned_loss=0.1258, over 17620.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3015, pruned_loss=0.07485, over 3792441.58 frames. ], batch size: 101, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:11:07,281 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:11:24,423 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 02:11:58,537 INFO [train.py:903] (1/4) Epoch 14, batch 1900, loss[loss=0.1767, simple_loss=0.2559, pruned_loss=0.04873, over 19316.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3017, pruned_loss=0.07508, over 3789361.01 frames. ], batch size: 44, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:12:07,977 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2333, 3.7556, 3.8761, 3.8874, 1.5064, 3.6459, 3.1969, 3.5982], + device='cuda:1'), covar=tensor([0.1551, 0.0878, 0.0592, 0.0670, 0.5163, 0.0813, 0.0705, 0.1089], + device='cuda:1'), in_proj_covar=tensor([0.0713, 0.0635, 0.0844, 0.0730, 0.0758, 0.0590, 0.0508, 0.0778], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 02:12:12,349 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 02:12:13,967 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4718, 2.3040, 1.6110, 1.5414, 2.1259, 1.2588, 1.2543, 1.8491], + device='cuda:1'), covar=tensor([0.0971, 0.0669, 0.0987, 0.0710, 0.0470, 0.1201, 0.0728, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0309, 0.0329, 0.0252, 0.0241, 0.0328, 0.0297, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:12:17,057 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 02:12:18,378 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7367, 4.2224, 4.4398, 4.4241, 1.6513, 4.1855, 3.6406, 4.1219], + device='cuda:1'), covar=tensor([0.1448, 0.0780, 0.0511, 0.0576, 0.5214, 0.0655, 0.0612, 0.1043], + device='cuda:1'), in_proj_covar=tensor([0.0713, 0.0634, 0.0842, 0.0729, 0.0756, 0.0590, 0.0507, 0.0777], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 02:12:24,249 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.065e+02 6.349e+02 7.558e+02 1.663e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-02 02:12:29,235 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0950, 5.1419, 5.9414, 5.8939, 2.1207, 5.5805, 4.7601, 5.5531], + device='cuda:1'), covar=tensor([0.1423, 0.0682, 0.0457, 0.0468, 0.5191, 0.0624, 0.0511, 0.1002], + device='cuda:1'), in_proj_covar=tensor([0.0713, 0.0633, 0.0842, 0.0729, 0.0756, 0.0590, 0.0507, 0.0778], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 02:12:43,821 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 02:12:48,698 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4084, 1.2763, 1.3368, 1.6896, 1.3716, 1.6160, 1.7220, 1.5262], + device='cuda:1'), covar=tensor([0.0873, 0.0989, 0.1070, 0.0811, 0.0840, 0.0774, 0.0781, 0.0732], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0222, 0.0223, 0.0243, 0.0230, 0.0209, 0.0192, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 02:12:54,512 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:12:59,237 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:13:00,240 INFO [train.py:903] (1/4) Epoch 14, batch 1950, loss[loss=0.1654, simple_loss=0.2464, pruned_loss=0.04222, over 19751.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3008, pruned_loss=0.07423, over 3812812.23 frames. ], batch size: 47, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:13:40,006 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9568, 1.9584, 1.7955, 1.6939, 1.6021, 1.7963, 0.8529, 1.3852], + device='cuda:1'), covar=tensor([0.0403, 0.0470, 0.0290, 0.0488, 0.0715, 0.0536, 0.0918, 0.0672], + device='cuda:1'), in_proj_covar=tensor([0.0339, 0.0336, 0.0331, 0.0359, 0.0434, 0.0358, 0.0315, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 02:14:03,110 INFO [train.py:903] (1/4) Epoch 14, batch 2000, loss[loss=0.18, simple_loss=0.266, pruned_loss=0.04693, over 19769.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2996, pruned_loss=0.07394, over 3809594.40 frames. ], batch size: 49, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:14:22,404 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5745, 1.0963, 1.3702, 1.2045, 2.2078, 0.9731, 1.9400, 2.4198], + device='cuda:1'), covar=tensor([0.0631, 0.2681, 0.2614, 0.1622, 0.0825, 0.2045, 0.1038, 0.0450], + device='cuda:1'), in_proj_covar=tensor([0.0370, 0.0347, 0.0360, 0.0329, 0.0351, 0.0335, 0.0346, 0.0368], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:14:27,794 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.207e+02 5.999e+02 7.179e+02 1.269e+03, threshold=1.200e+03, percent-clipped=0.0 +2023-04-02 02:14:59,970 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 02:15:05,783 INFO [train.py:903] (1/4) Epoch 14, batch 2050, loss[loss=0.2633, simple_loss=0.3281, pruned_loss=0.09925, over 12550.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3003, pruned_loss=0.07424, over 3812862.09 frames. ], batch size: 136, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:15:14,224 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90821.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:18,958 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 02:15:20,135 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 02:15:24,012 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90828.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:43,046 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 02:16:01,448 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:07,374 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:08,090 INFO [train.py:903] (1/4) Epoch 14, batch 2100, loss[loss=0.1934, simple_loss=0.2678, pruned_loss=0.05952, over 19749.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3011, pruned_loss=0.07505, over 3814518.90 frames. ], batch size: 47, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:16:12,799 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:32,689 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.435e+02 5.132e+02 6.276e+02 7.348e+02 1.970e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 02:16:37,030 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 02:16:38,585 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:46,388 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5838, 2.2697, 2.3493, 2.9158, 2.5378, 2.3611, 2.1807, 2.7892], + device='cuda:1'), covar=tensor([0.0849, 0.1587, 0.1197, 0.0845, 0.1197, 0.0437, 0.1135, 0.0543], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0354, 0.0296, 0.0240, 0.0295, 0.0240, 0.0288, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:16:58,534 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 02:17:09,966 INFO [train.py:903] (1/4) Epoch 14, batch 2150, loss[loss=0.2318, simple_loss=0.3096, pruned_loss=0.07698, over 19704.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3005, pruned_loss=0.07477, over 3825996.43 frames. ], batch size: 63, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:17:38,836 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:17:53,046 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-02 02:18:10,991 INFO [train.py:903] (1/4) Epoch 14, batch 2200, loss[loss=0.1762, simple_loss=0.2578, pruned_loss=0.04736, over 19467.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3005, pruned_loss=0.07444, over 3838384.52 frames. ], batch size: 49, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:18:12,477 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:18,922 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2672, 1.2358, 1.6266, 0.9943, 2.2314, 3.0353, 2.7491, 3.2445], + device='cuda:1'), covar=tensor([0.1432, 0.3562, 0.3117, 0.2350, 0.0576, 0.0208, 0.0246, 0.0229], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0302, 0.0332, 0.0254, 0.0225, 0.0166, 0.0206, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 02:18:23,353 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:35,015 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-02 02:18:35,323 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 5.132e+02 5.971e+02 7.684e+02 1.888e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-02 02:18:43,490 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90990.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:19:13,351 INFO [train.py:903] (1/4) Epoch 14, batch 2250, loss[loss=0.3188, simple_loss=0.3587, pruned_loss=0.1394, over 13641.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07436, over 3830068.74 frames. ], batch size: 136, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:14,195 INFO [train.py:903] (1/4) Epoch 14, batch 2300, loss[loss=0.1886, simple_loss=0.2694, pruned_loss=0.05389, over 19858.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3005, pruned_loss=0.07529, over 3834378.08 frames. ], batch size: 52, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:26,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 02:20:38,185 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.417e+02 6.829e+02 8.730e+02 1.535e+03, threshold=1.366e+03, percent-clipped=12.0 +2023-04-02 02:20:38,659 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:20:47,645 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0181, 1.7175, 1.6902, 1.9907, 1.8594, 1.7988, 1.6099, 2.0181], + device='cuda:1'), covar=tensor([0.0927, 0.1432, 0.1331, 0.0897, 0.1118, 0.0480, 0.1210, 0.0592], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0350, 0.0294, 0.0239, 0.0293, 0.0239, 0.0285, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:21:11,038 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:21:16,445 INFO [train.py:903] (1/4) Epoch 14, batch 2350, loss[loss=0.2035, simple_loss=0.2713, pruned_loss=0.0678, over 19790.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3012, pruned_loss=0.07551, over 3830079.37 frames. ], batch size: 47, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:21:57,694 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 02:22:13,113 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 02:22:14,983 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.69 vs. limit=5.0 +2023-04-02 02:22:17,783 INFO [train.py:903] (1/4) Epoch 14, batch 2400, loss[loss=0.1941, simple_loss=0.2829, pruned_loss=0.05268, over 19665.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3011, pruned_loss=0.07534, over 3809198.04 frames. ], batch size: 53, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:22:42,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.466e+02 6.599e+02 8.174e+02 1.804e+03, threshold=1.320e+03, percent-clipped=5.0 +2023-04-02 02:22:52,549 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9518, 4.9605, 5.6819, 5.6522, 1.8329, 5.2994, 4.5387, 5.2547], + device='cuda:1'), covar=tensor([0.1310, 0.0853, 0.0539, 0.0544, 0.5719, 0.0646, 0.0596, 0.1290], + device='cuda:1'), in_proj_covar=tensor([0.0708, 0.0636, 0.0839, 0.0724, 0.0756, 0.0586, 0.0504, 0.0777], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 02:22:52,702 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:15,500 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91211.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:19,600 INFO [train.py:903] (1/4) Epoch 14, batch 2450, loss[loss=0.1905, simple_loss=0.2651, pruned_loss=0.05792, over 18173.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3011, pruned_loss=0.0753, over 3821975.52 frames. ], batch size: 40, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:23:23,106 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:37,735 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91229.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:08,197 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:19,329 INFO [train.py:903] (1/4) Epoch 14, batch 2500, loss[loss=0.2013, simple_loss=0.2851, pruned_loss=0.05881, over 19679.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3001, pruned_loss=0.07442, over 3815869.66 frames. ], batch size: 53, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:24:32,853 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5116, 1.4330, 1.3496, 1.8679, 1.4726, 1.7561, 1.9238, 1.6639], + device='cuda:1'), covar=tensor([0.0870, 0.0947, 0.1097, 0.0789, 0.0841, 0.0767, 0.0814, 0.0696], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0224, 0.0241, 0.0228, 0.0209, 0.0191, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 02:24:35,618 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 02:24:39,626 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3325, 3.9105, 2.5343, 3.5500, 1.0241, 3.8024, 3.7294, 3.8510], + device='cuda:1'), covar=tensor([0.0752, 0.1173, 0.2131, 0.0885, 0.4163, 0.0733, 0.0812, 0.1041], + device='cuda:1'), in_proj_covar=tensor([0.0452, 0.0380, 0.0454, 0.0330, 0.0392, 0.0386, 0.0378, 0.0413], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:24:42,799 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.074e+02 5.867e+02 6.968e+02 8.618e+02 1.617e+03, threshold=1.394e+03, percent-clipped=2.0 +2023-04-02 02:24:45,446 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2320, 1.5211, 2.0116, 1.6788, 3.0878, 4.8063, 4.6801, 5.2275], + device='cuda:1'), covar=tensor([0.1588, 0.3403, 0.2944, 0.1920, 0.0503, 0.0156, 0.0143, 0.0126], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0301, 0.0330, 0.0253, 0.0223, 0.0166, 0.0206, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 02:24:58,191 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2280, 2.0541, 1.9159, 2.3803, 2.6316, 1.8970, 1.8104, 2.3889], + device='cuda:1'), covar=tensor([0.1112, 0.1844, 0.1780, 0.1106, 0.1306, 0.0870, 0.1651, 0.0837], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0352, 0.0294, 0.0240, 0.0294, 0.0240, 0.0288, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:25:06,900 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91303.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:25:19,879 INFO [train.py:903] (1/4) Epoch 14, batch 2550, loss[loss=0.2795, simple_loss=0.3423, pruned_loss=0.1084, over 13164.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2993, pruned_loss=0.07391, over 3825881.81 frames. ], batch size: 136, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:25:33,155 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:26:12,376 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 02:26:19,133 INFO [train.py:903] (1/4) Epoch 14, batch 2600, loss[loss=0.182, simple_loss=0.2554, pruned_loss=0.05432, over 19760.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2988, pruned_loss=0.0737, over 3817609.03 frames. ], batch size: 46, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:26:44,915 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.557e+02 7.203e+02 9.059e+02 1.995e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-02 02:27:21,521 INFO [train.py:903] (1/4) Epoch 14, batch 2650, loss[loss=0.2554, simple_loss=0.3286, pruned_loss=0.09107, over 19312.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2991, pruned_loss=0.07424, over 3819504.33 frames. ], batch size: 66, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:27:41,110 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 02:28:11,797 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7646, 1.3953, 1.6054, 1.5449, 3.2907, 1.1277, 2.1682, 3.7410], + device='cuda:1'), covar=tensor([0.0445, 0.2537, 0.2714, 0.1708, 0.0707, 0.2401, 0.1415, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0346, 0.0363, 0.0327, 0.0355, 0.0338, 0.0347, 0.0369], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:28:18,967 INFO [train.py:903] (1/4) Epoch 14, batch 2700, loss[loss=0.1883, simple_loss=0.2634, pruned_loss=0.05659, over 19379.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3005, pruned_loss=0.07516, over 3820915.65 frames. ], batch size: 47, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:28:43,685 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.145e+02 6.524e+02 8.606e+02 2.089e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 02:29:13,059 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.41 vs. limit=5.0 +2023-04-02 02:29:20,022 INFO [train.py:903] (1/4) Epoch 14, batch 2750, loss[loss=0.1856, simple_loss=0.2756, pruned_loss=0.04785, over 19528.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3008, pruned_loss=0.07501, over 3831413.16 frames. ], batch size: 54, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:29:44,615 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-02 02:30:14,288 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91560.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:30:18,387 INFO [train.py:903] (1/4) Epoch 14, batch 2800, loss[loss=0.2509, simple_loss=0.3221, pruned_loss=0.08985, over 19531.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3005, pruned_loss=0.07489, over 3834328.63 frames. ], batch size: 54, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:30:41,211 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:30:44,142 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 4.991e+02 6.176e+02 8.428e+02 2.269e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 02:31:10,021 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:31:19,255 INFO [train.py:903] (1/4) Epoch 14, batch 2850, loss[loss=0.2336, simple_loss=0.3127, pruned_loss=0.07723, over 19527.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3023, pruned_loss=0.07583, over 3822534.08 frames. ], batch size: 56, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:31:58,529 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91647.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:32:20,011 INFO [train.py:903] (1/4) Epoch 14, batch 2900, loss[loss=0.1953, simple_loss=0.2768, pruned_loss=0.05688, over 19748.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3005, pruned_loss=0.07462, over 3822867.38 frames. ], batch size: 51, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:32:20,834 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 02:32:44,072 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.659e+02 5.126e+02 6.310e+02 7.919e+02 2.445e+03, threshold=1.262e+03, percent-clipped=4.0 +2023-04-02 02:33:15,675 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 02:33:19,571 INFO [train.py:903] (1/4) Epoch 14, batch 2950, loss[loss=0.1947, simple_loss=0.2728, pruned_loss=0.05829, over 19758.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3005, pruned_loss=0.07439, over 3817837.77 frames. ], batch size: 51, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:17,799 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91762.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:34:19,630 INFO [train.py:903] (1/4) Epoch 14, batch 3000, loss[loss=0.2457, simple_loss=0.3136, pruned_loss=0.08886, over 19659.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3017, pruned_loss=0.07513, over 3826770.68 frames. ], batch size: 55, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:19,631 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 02:34:36,637 INFO [train.py:937] (1/4) Epoch 14, validation: loss=0.1742, simple_loss=0.2751, pruned_loss=0.03671, over 944034.00 frames. +2023-04-02 02:34:36,639 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 02:34:42,023 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 02:35:02,708 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.261e+02 6.370e+02 8.625e+02 1.479e+03, threshold=1.274e+03, percent-clipped=4.0 +2023-04-02 02:35:37,796 INFO [train.py:903] (1/4) Epoch 14, batch 3050, loss[loss=0.1992, simple_loss=0.2793, pruned_loss=0.05955, over 19579.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3015, pruned_loss=0.07494, over 3826890.55 frames. ], batch size: 52, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:36:37,007 INFO [train.py:903] (1/4) Epoch 14, batch 3100, loss[loss=0.2598, simple_loss=0.3269, pruned_loss=0.09633, over 19702.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3011, pruned_loss=0.0749, over 3814894.68 frames. ], batch size: 59, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:37:02,383 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.361e+02 6.622e+02 8.860e+02 2.580e+03, threshold=1.324e+03, percent-clipped=11.0 +2023-04-02 02:37:05,889 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1047, 1.8574, 1.4554, 1.2529, 1.5812, 1.1696, 1.2153, 1.6207], + device='cuda:1'), covar=tensor([0.0763, 0.0747, 0.1023, 0.0685, 0.0495, 0.1207, 0.0585, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0306, 0.0325, 0.0249, 0.0238, 0.0325, 0.0293, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:37:24,387 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91904.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:37:31,772 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:37:37,887 INFO [train.py:903] (1/4) Epoch 14, batch 3150, loss[loss=0.2333, simple_loss=0.3057, pruned_loss=0.08039, over 19684.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3013, pruned_loss=0.07531, over 3811304.39 frames. ], batch size: 59, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:04,255 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 02:38:10,545 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4302, 1.4743, 1.7928, 1.6012, 2.5488, 2.2880, 2.6783, 1.1568], + device='cuda:1'), covar=tensor([0.2399, 0.4086, 0.2462, 0.1886, 0.1498, 0.2042, 0.1478, 0.4043], + device='cuda:1'), in_proj_covar=tensor([0.0505, 0.0591, 0.0637, 0.0451, 0.0602, 0.0504, 0.0651, 0.0507], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 02:38:37,357 INFO [train.py:903] (1/4) Epoch 14, batch 3200, loss[loss=0.2711, simple_loss=0.3408, pruned_loss=0.1007, over 17569.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3011, pruned_loss=0.07513, over 3812476.18 frames. ], batch size: 101, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:48,692 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:02,848 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.150e+02 6.206e+02 7.874e+02 1.849e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 02:39:20,927 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91999.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:39:39,710 INFO [train.py:903] (1/4) Epoch 14, batch 3250, loss[loss=0.2138, simple_loss=0.2932, pruned_loss=0.06723, over 19854.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3006, pruned_loss=0.07462, over 3814056.45 frames. ], batch size: 52, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:39:44,757 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:45,894 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92019.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:40:15,619 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:40:39,288 INFO [train.py:903] (1/4) Epoch 14, batch 3300, loss[loss=0.2395, simple_loss=0.3142, pruned_loss=0.08243, over 18792.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2993, pruned_loss=0.07372, over 3823266.80 frames. ], batch size: 74, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:40:44,816 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 02:40:52,239 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 02:41:04,997 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.974e+02 6.176e+02 7.406e+02 2.018e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-02 02:41:41,635 INFO [train.py:903] (1/4) Epoch 14, batch 3350, loss[loss=0.215, simple_loss=0.2962, pruned_loss=0.06689, over 19683.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2997, pruned_loss=0.0739, over 3820098.67 frames. ], batch size: 53, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:42:40,611 INFO [train.py:903] (1/4) Epoch 14, batch 3400, loss[loss=0.2278, simple_loss=0.309, pruned_loss=0.07325, over 19733.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2995, pruned_loss=0.0742, over 3819520.78 frames. ], batch size: 63, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:42:46,780 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.1949, 4.2720, 4.7779, 4.7641, 2.7404, 4.4053, 4.0171, 4.4838], + device='cuda:1'), covar=tensor([0.1207, 0.2920, 0.0524, 0.0556, 0.4060, 0.0879, 0.0561, 0.0986], + device='cuda:1'), in_proj_covar=tensor([0.0717, 0.0647, 0.0857, 0.0738, 0.0763, 0.0594, 0.0517, 0.0782], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 02:43:05,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.934e+02 6.017e+02 7.496e+02 1.650e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-02 02:43:42,226 INFO [train.py:903] (1/4) Epoch 14, batch 3450, loss[loss=0.2229, simple_loss=0.2842, pruned_loss=0.08086, over 19759.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2999, pruned_loss=0.0745, over 3792335.17 frames. ], batch size: 45, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:43:44,677 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 02:44:14,167 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 02:44:28,715 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:44:40,674 INFO [train.py:903] (1/4) Epoch 14, batch 3500, loss[loss=0.2006, simple_loss=0.28, pruned_loss=0.06055, over 19597.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3005, pruned_loss=0.07516, over 3801570.60 frames. ], batch size: 52, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:44:42,973 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0227, 3.4770, 2.0001, 2.0527, 3.0909, 1.6558, 1.4490, 2.0894], + device='cuda:1'), covar=tensor([0.1198, 0.0460, 0.0954, 0.0774, 0.0453, 0.1093, 0.0847, 0.0661], + device='cuda:1'), in_proj_covar=tensor([0.0300, 0.0311, 0.0331, 0.0254, 0.0241, 0.0331, 0.0299, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:44:43,976 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2031, 5.5365, 3.1418, 4.8138, 1.2654, 5.5618, 5.5573, 5.6844], + device='cuda:1'), covar=tensor([0.0360, 0.0887, 0.1842, 0.0688, 0.3966, 0.0591, 0.0655, 0.0720], + device='cuda:1'), in_proj_covar=tensor([0.0449, 0.0377, 0.0451, 0.0327, 0.0390, 0.0389, 0.0376, 0.0413], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:44:46,017 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3537, 2.0382, 2.2259, 3.0834, 2.3272, 2.6410, 2.7633, 2.5157], + device='cuda:1'), covar=tensor([0.0627, 0.0818, 0.0787, 0.0681, 0.0746, 0.0658, 0.0801, 0.0541], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0221, 0.0221, 0.0240, 0.0225, 0.0206, 0.0189, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 02:44:54,864 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92275.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:45:05,726 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.239e+02 6.377e+02 7.871e+02 1.606e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-02 02:45:24,250 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92300.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:45:34,195 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5520, 2.3180, 1.6887, 1.6325, 2.1594, 1.2784, 1.3999, 1.9151], + device='cuda:1'), covar=tensor([0.1016, 0.0643, 0.0944, 0.0692, 0.0584, 0.1164, 0.0701, 0.0478], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0312, 0.0332, 0.0254, 0.0242, 0.0332, 0.0300, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:45:41,110 INFO [train.py:903] (1/4) Epoch 14, batch 3550, loss[loss=0.2206, simple_loss=0.2993, pruned_loss=0.07101, over 19477.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2997, pruned_loss=0.07472, over 3805087.35 frames. ], batch size: 64, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:45:44,521 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:46:15,216 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92343.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:46:39,900 INFO [train.py:903] (1/4) Epoch 14, batch 3600, loss[loss=0.1948, simple_loss=0.2841, pruned_loss=0.05272, over 19780.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3009, pruned_loss=0.07505, over 3801449.57 frames. ], batch size: 56, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:46:44,958 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92368.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:46:55,932 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9728, 3.6084, 2.3129, 3.2804, 1.0504, 3.4675, 3.3863, 3.5079], + device='cuda:1'), covar=tensor([0.0789, 0.1118, 0.2124, 0.0813, 0.3602, 0.0851, 0.0791, 0.1066], + device='cuda:1'), in_proj_covar=tensor([0.0449, 0.0378, 0.0453, 0.0329, 0.0392, 0.0391, 0.0378, 0.0413], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:47:04,638 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.209e+02 6.321e+02 7.842e+02 1.520e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 02:47:40,754 INFO [train.py:903] (1/4) Epoch 14, batch 3650, loss[loss=0.1943, simple_loss=0.2738, pruned_loss=0.0574, over 19737.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3005, pruned_loss=0.07429, over 3819898.01 frames. ], batch size: 51, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:48:02,178 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92432.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:48:34,077 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92458.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:48:40,399 INFO [train.py:903] (1/4) Epoch 14, batch 3700, loss[loss=0.2427, simple_loss=0.3218, pruned_loss=0.08175, over 19789.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.301, pruned_loss=0.0748, over 3812735.77 frames. ], batch size: 56, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:48:51,194 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 02:49:05,824 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.888e+02 6.023e+02 8.004e+02 1.682e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 02:49:10,965 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7645, 1.5991, 1.5465, 2.3066, 1.6921, 2.0302, 2.1072, 1.8612], + device='cuda:1'), covar=tensor([0.0776, 0.0910, 0.1029, 0.0757, 0.0859, 0.0702, 0.0867, 0.0650], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0222, 0.0222, 0.0242, 0.0226, 0.0207, 0.0191, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 02:49:11,217 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 02:49:13,121 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8495, 1.5949, 1.8160, 1.5723, 4.3002, 1.0206, 2.3412, 4.6906], + device='cuda:1'), covar=tensor([0.0340, 0.2524, 0.2688, 0.1947, 0.0749, 0.2658, 0.1490, 0.0166], + device='cuda:1'), in_proj_covar=tensor([0.0370, 0.0345, 0.0362, 0.0326, 0.0356, 0.0336, 0.0345, 0.0369], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:49:41,646 INFO [train.py:903] (1/4) Epoch 14, batch 3750, loss[loss=0.2149, simple_loss=0.2937, pruned_loss=0.06804, over 19784.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3004, pruned_loss=0.07439, over 3808300.72 frames. ], batch size: 63, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:50:42,053 INFO [train.py:903] (1/4) Epoch 14, batch 3800, loss[loss=0.1902, simple_loss=0.2775, pruned_loss=0.05149, over 19597.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2994, pruned_loss=0.07368, over 3822600.07 frames. ], batch size: 52, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:06,367 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.992e+02 6.384e+02 8.353e+02 1.667e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 02:51:11,002 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 02:51:40,637 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 02:51:42,072 INFO [train.py:903] (1/4) Epoch 14, batch 3850, loss[loss=0.2159, simple_loss=0.282, pruned_loss=0.07495, over 19762.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.299, pruned_loss=0.07367, over 3818639.72 frames. ], batch size: 48, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:54,736 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92624.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:19,031 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:26,085 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:43,617 INFO [train.py:903] (1/4) Epoch 14, batch 3900, loss[loss=0.1909, simple_loss=0.2638, pruned_loss=0.05904, over 19382.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3002, pruned_loss=0.07467, over 3804821.18 frames. ], batch size: 47, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:10,396 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.910e+02 5.705e+02 7.277e+02 9.203e+02 2.913e+03, threshold=1.455e+03, percent-clipped=9.0 +2023-04-02 02:53:13,202 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92688.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:43,655 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:44,354 INFO [train.py:903] (1/4) Epoch 14, batch 3950, loss[loss=0.2114, simple_loss=0.2956, pruned_loss=0.06366, over 19719.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3006, pruned_loss=0.07451, over 3799023.09 frames. ], batch size: 63, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:44,787 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92714.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:53:48,548 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 02:54:12,979 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:54:14,262 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92739.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:54:45,453 INFO [train.py:903] (1/4) Epoch 14, batch 4000, loss[loss=0.1884, simple_loss=0.2729, pruned_loss=0.05192, over 19673.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3002, pruned_loss=0.07402, over 3803325.13 frames. ], batch size: 53, lr: 5.91e-03, grad_scale: 8.0 +2023-04-02 02:55:11,213 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.983e+02 6.436e+02 8.255e+02 1.908e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 02:55:33,050 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 02:55:45,562 INFO [train.py:903] (1/4) Epoch 14, batch 4050, loss[loss=0.1775, simple_loss=0.2595, pruned_loss=0.0477, over 19735.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.299, pruned_loss=0.07326, over 3823544.78 frames. ], batch size: 51, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:55:52,887 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0907, 1.7028, 1.5857, 1.9348, 1.6602, 1.7329, 1.4801, 1.9691], + device='cuda:1'), covar=tensor([0.0910, 0.1381, 0.1449, 0.1027, 0.1325, 0.0512, 0.1367, 0.0691], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0349, 0.0294, 0.0241, 0.0297, 0.0243, 0.0287, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 02:56:28,995 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:56:45,576 INFO [train.py:903] (1/4) Epoch 14, batch 4100, loss[loss=0.2441, simple_loss=0.3175, pruned_loss=0.08537, over 19589.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.298, pruned_loss=0.07281, over 3828120.83 frames. ], batch size: 61, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:12,587 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-02 02:57:13,886 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 5.414e+02 6.604e+02 8.302e+02 1.654e+03, threshold=1.321e+03, percent-clipped=7.0 +2023-04-02 02:57:21,834 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 02:57:45,521 INFO [train.py:903] (1/4) Epoch 14, batch 4150, loss[loss=0.2191, simple_loss=0.3021, pruned_loss=0.06804, over 19357.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2998, pruned_loss=0.07378, over 3830065.76 frames. ], batch size: 66, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:51,032 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:19,981 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4850, 1.5526, 1.8695, 1.6753, 2.8362, 2.3256, 2.8984, 1.3320], + device='cuda:1'), covar=tensor([0.2182, 0.3650, 0.2244, 0.1727, 0.1354, 0.1868, 0.1335, 0.3710], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0588, 0.0633, 0.0446, 0.0599, 0.0500, 0.0645, 0.0503], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 02:58:28,821 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92949.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:48,464 INFO [train.py:903] (1/4) Epoch 14, batch 4200, loss[loss=0.2194, simple_loss=0.3112, pruned_loss=0.06384, over 19691.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2994, pruned_loss=0.07355, over 3818074.14 frames. ], batch size: 59, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:58:51,708 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 02:59:15,442 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.424e+02 5.036e+02 6.286e+02 7.807e+02 1.684e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-02 02:59:15,614 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:22,673 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:48,127 INFO [train.py:903] (1/4) Epoch 14, batch 4250, loss[loss=0.1911, simple_loss=0.2731, pruned_loss=0.05454, over 19402.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2997, pruned_loss=0.07425, over 3817760.81 frames. ], batch size: 48, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 03:00:03,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 03:00:05,133 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:15,028 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 03:00:37,782 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:47,386 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.98 vs. limit=5.0 +2023-04-02 03:00:48,956 INFO [train.py:903] (1/4) Epoch 14, batch 4300, loss[loss=0.2013, simple_loss=0.2717, pruned_loss=0.06539, over 19759.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2983, pruned_loss=0.07339, over 3833207.74 frames. ], batch size: 47, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:01:12,680 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:18,339 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.279e+02 5.152e+02 6.308e+02 8.497e+02 2.668e+03, threshold=1.262e+03, percent-clipped=7.0 +2023-04-02 03:01:18,720 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7440, 1.4847, 1.2744, 1.6225, 1.5095, 1.3087, 1.1592, 1.5523], + device='cuda:1'), covar=tensor([0.1048, 0.1305, 0.1662, 0.1041, 0.1247, 0.0779, 0.1646, 0.0836], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0349, 0.0294, 0.0241, 0.0295, 0.0244, 0.0287, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:01:36,162 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:42,508 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 03:01:50,402 INFO [train.py:903] (1/4) Epoch 14, batch 4350, loss[loss=0.1868, simple_loss=0.269, pruned_loss=0.05229, over 19743.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2977, pruned_loss=0.07307, over 3824050.40 frames. ], batch size: 51, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:02:14,541 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:02:52,476 INFO [train.py:903] (1/4) Epoch 14, batch 4400, loss[loss=0.2471, simple_loss=0.3104, pruned_loss=0.09185, over 19713.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2991, pruned_loss=0.07409, over 3820935.31 frames. ], batch size: 51, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:03:15,301 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 03:03:18,717 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.134e+02 6.154e+02 7.916e+02 2.681e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 03:03:25,269 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 03:03:26,640 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:31,888 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93197.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:47,756 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3767, 2.2035, 2.0149, 1.8985, 1.6769, 1.9037, 0.4909, 1.3458], + device='cuda:1'), covar=tensor([0.0441, 0.0482, 0.0380, 0.0646, 0.0966, 0.0745, 0.1048, 0.0795], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0337, 0.0333, 0.0362, 0.0437, 0.0362, 0.0315, 0.0323], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:03:52,930 INFO [train.py:903] (1/4) Epoch 14, batch 4450, loss[loss=0.2382, simple_loss=0.3096, pruned_loss=0.08338, over 19842.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2985, pruned_loss=0.07362, over 3823525.94 frames. ], batch size: 52, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:04:49,432 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93261.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:04:52,743 INFO [train.py:903] (1/4) Epoch 14, batch 4500, loss[loss=0.1775, simple_loss=0.2609, pruned_loss=0.04701, over 19397.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2984, pruned_loss=0.07365, over 3827490.75 frames. ], batch size: 48, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:05:21,651 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 5.314e+02 6.344e+02 7.896e+02 1.749e+03, threshold=1.269e+03, percent-clipped=5.0 +2023-04-02 03:05:25,895 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 03:05:28,487 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:45,694 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93308.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:52,289 INFO [train.py:903] (1/4) Epoch 14, batch 4550, loss[loss=0.2557, simple_loss=0.3287, pruned_loss=0.09136, over 19509.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.298, pruned_loss=0.07316, over 3819140.50 frames. ], batch size: 64, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:06:06,151 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 03:06:21,890 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:27,650 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 03:06:39,765 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.43 vs. limit=5.0 +2023-04-02 03:06:46,896 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:55,967 INFO [train.py:903] (1/4) Epoch 14, batch 4600, loss[loss=0.2069, simple_loss=0.2934, pruned_loss=0.06021, over 19793.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2975, pruned_loss=0.07279, over 3821435.65 frames. ], batch size: 56, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:07:05,026 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:09,477 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:17,775 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:22,006 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.339e+02 6.388e+02 8.227e+02 2.509e+03, threshold=1.278e+03, percent-clipped=2.0 +2023-04-02 03:07:35,566 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:49,443 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:56,013 INFO [train.py:903] (1/4) Epoch 14, batch 4650, loss[loss=0.2931, simple_loss=0.3543, pruned_loss=0.1159, over 13553.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2978, pruned_loss=0.07298, over 3813779.92 frames. ], batch size: 136, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:07:56,725 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 03:08:12,038 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 03:08:23,193 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 03:08:43,050 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:44,284 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93453.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:45,671 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.75 vs. limit=5.0 +2023-04-02 03:08:56,300 INFO [train.py:903] (1/4) Epoch 14, batch 4700, loss[loss=0.1988, simple_loss=0.2817, pruned_loss=0.05795, over 19598.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2982, pruned_loss=0.07295, over 3825979.98 frames. ], batch size: 52, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:09:11,832 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:13,376 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:20,327 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 03:09:25,716 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 5.199e+02 6.337e+02 7.857e+02 1.524e+03, threshold=1.267e+03, percent-clipped=2.0 +2023-04-02 03:09:26,075 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,156 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,940 INFO [train.py:903] (1/4) Epoch 14, batch 4750, loss[loss=0.1788, simple_loss=0.2542, pruned_loss=0.05168, over 19328.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2979, pruned_loss=0.07304, over 3825410.55 frames. ], batch size: 44, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:55,740 INFO [train.py:903] (1/4) Epoch 14, batch 4800, loss[loss=0.2293, simple_loss=0.3069, pruned_loss=0.07585, over 19384.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2986, pruned_loss=0.07344, over 3825692.28 frames. ], batch size: 70, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:56,149 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:18,610 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6838, 1.8746, 2.2407, 1.9512, 2.9632, 3.2656, 3.2349, 3.4916], + device='cuda:1'), covar=tensor([0.1420, 0.2897, 0.2439, 0.1971, 0.0901, 0.0379, 0.0190, 0.0273], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0301, 0.0330, 0.0254, 0.0222, 0.0164, 0.0207, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:11:22,945 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.541e+02 6.642e+02 8.296e+02 2.320e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 03:11:25,723 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:29,076 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:39,593 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 03:11:43,676 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1680, 5.2138, 6.1189, 6.0139, 1.9138, 5.7499, 5.0110, 5.7046], + device='cuda:1'), covar=tensor([0.1512, 0.0650, 0.0480, 0.0531, 0.5896, 0.0490, 0.0470, 0.1037], + device='cuda:1'), in_proj_covar=tensor([0.0710, 0.0644, 0.0851, 0.0729, 0.0757, 0.0594, 0.0510, 0.0776], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 03:11:57,047 INFO [train.py:903] (1/4) Epoch 14, batch 4850, loss[loss=0.2435, simple_loss=0.3074, pruned_loss=0.0898, over 19842.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2997, pruned_loss=0.07409, over 3818370.98 frames. ], batch size: 52, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:17,835 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:23,245 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 03:12:42,754 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 03:12:47,284 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 03:12:48,583 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 03:12:48,962 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93657.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:56,528 INFO [train.py:903] (1/4) Epoch 14, batch 4900, loss[loss=0.2406, simple_loss=0.3163, pruned_loss=0.08246, over 19602.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3002, pruned_loss=0.07426, over 3802695.94 frames. ], batch size: 57, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:56,938 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:57,707 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 03:13:18,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 03:13:25,555 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.842e+02 5.818e+02 7.123e+02 1.786e+03, threshold=1.164e+03, percent-clipped=2.0 +2023-04-02 03:13:26,357 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 03:13:26,360 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 03:13:28,151 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93689.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:33,914 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7131, 1.8349, 1.6480, 2.7201, 1.9802, 2.5372, 1.8996, 1.5499], + device='cuda:1'), covar=tensor([0.4667, 0.4150, 0.2479, 0.2514, 0.4068, 0.1939, 0.5382, 0.4388], + device='cuda:1'), in_proj_covar=tensor([0.0824, 0.0861, 0.0669, 0.0901, 0.0810, 0.0745, 0.0806, 0.0733], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 03:13:49,664 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:55,991 INFO [train.py:903] (1/4) Epoch 14, batch 4950, loss[loss=0.2377, simple_loss=0.3134, pruned_loss=0.08099, over 19749.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2997, pruned_loss=0.07355, over 3808116.33 frames. ], batch size: 63, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:14:16,294 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 03:14:21,090 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:32,410 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:36,689 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 03:14:58,213 INFO [train.py:903] (1/4) Epoch 14, batch 5000, loss[loss=0.253, simple_loss=0.3339, pruned_loss=0.08609, over 18599.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2995, pruned_loss=0.07358, over 3824084.33 frames. ], batch size: 74, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:15:04,082 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93768.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:05,337 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:07,254 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 03:15:17,358 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 03:15:18,126 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.98 vs. limit=5.0 +2023-04-02 03:15:25,225 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 5.347e+02 6.962e+02 9.103e+02 2.417e+03, threshold=1.392e+03, percent-clipped=9.0 +2023-04-02 03:15:26,646 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93788.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:33,775 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:56,382 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3072, 1.9207, 1.5009, 1.3704, 1.8456, 1.2508, 1.3372, 1.7269], + device='cuda:1'), covar=tensor([0.0726, 0.0626, 0.0765, 0.0622, 0.0399, 0.0920, 0.0486, 0.0358], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0303, 0.0324, 0.0246, 0.0236, 0.0323, 0.0288, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:15:59,315 INFO [train.py:903] (1/4) Epoch 14, batch 5050, loss[loss=0.2362, simple_loss=0.3135, pruned_loss=0.07949, over 19732.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2983, pruned_loss=0.07274, over 3831835.33 frames. ], batch size: 63, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:16:35,021 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 03:16:40,863 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:16:59,046 INFO [train.py:903] (1/4) Epoch 14, batch 5100, loss[loss=0.2021, simple_loss=0.2738, pruned_loss=0.06525, over 15571.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2979, pruned_loss=0.07267, over 3824840.22 frames. ], batch size: 34, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:17:09,141 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:17:09,903 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 03:17:13,175 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 03:17:16,699 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 03:17:26,263 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 4.904e+02 5.934e+02 7.645e+02 1.361e+03, threshold=1.187e+03, percent-clipped=0.0 +2023-04-02 03:17:56,876 INFO [train.py:903] (1/4) Epoch 14, batch 5150, loss[loss=0.2222, simple_loss=0.3018, pruned_loss=0.07125, over 18311.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2989, pruned_loss=0.07331, over 3820349.33 frames. ], batch size: 84, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:18:09,225 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 03:18:43,207 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 03:18:58,008 INFO [train.py:903] (1/4) Epoch 14, batch 5200, loss[loss=0.291, simple_loss=0.3493, pruned_loss=0.1164, over 18797.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2986, pruned_loss=0.07311, over 3828905.46 frames. ], batch size: 74, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:19:13,843 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 03:19:25,470 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 5.268e+02 6.485e+02 8.631e+02 2.638e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 03:19:39,236 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:19:57,305 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 03:19:59,390 INFO [train.py:903] (1/4) Epoch 14, batch 5250, loss[loss=0.2381, simple_loss=0.318, pruned_loss=0.07913, over 19850.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2979, pruned_loss=0.07251, over 3824014.10 frames. ], batch size: 52, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:20:59,247 INFO [train.py:903] (1/4) Epoch 14, batch 5300, loss[loss=0.2328, simple_loss=0.3118, pruned_loss=0.07695, over 19598.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2971, pruned_loss=0.07234, over 3822526.35 frames. ], batch size: 61, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:21:16,449 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 03:21:27,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.389e+02 5.368e+02 7.020e+02 9.283e+02 2.840e+03, threshold=1.404e+03, percent-clipped=4.0 +2023-04-02 03:21:58,997 INFO [train.py:903] (1/4) Epoch 14, batch 5350, loss[loss=0.1817, simple_loss=0.2707, pruned_loss=0.04639, over 19765.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2976, pruned_loss=0.07282, over 3816174.51 frames. ], batch size: 54, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:22:23,143 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:22:34,750 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 03:23:01,401 INFO [train.py:903] (1/4) Epoch 14, batch 5400, loss[loss=0.1844, simple_loss=0.255, pruned_loss=0.05693, over 19736.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2984, pruned_loss=0.07288, over 3809818.33 frames. ], batch size: 45, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:23:29,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.537e+02 7.248e+02 8.700e+02 2.021e+03, threshold=1.450e+03, percent-clipped=3.0 +2023-04-02 03:24:03,213 INFO [train.py:903] (1/4) Epoch 14, batch 5450, loss[loss=0.2062, simple_loss=0.2829, pruned_loss=0.06478, over 19846.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2988, pruned_loss=0.07291, over 3812335.63 frames. ], batch size: 52, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:24:33,948 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94241.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:24:43,539 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:24:54,139 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2136, 1.1930, 1.4393, 1.3133, 2.3226, 2.0089, 2.4527, 1.0876], + device='cuda:1'), covar=tensor([0.2432, 0.4210, 0.2541, 0.2062, 0.1562, 0.2104, 0.1460, 0.3948], + device='cuda:1'), in_proj_covar=tensor([0.0502, 0.0591, 0.0639, 0.0450, 0.0604, 0.0505, 0.0649, 0.0510], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:25:02,870 INFO [train.py:903] (1/4) Epoch 14, batch 5500, loss[loss=0.2589, simple_loss=0.3281, pruned_loss=0.0949, over 17548.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2994, pruned_loss=0.07318, over 3812054.87 frames. ], batch size: 101, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:25:24,846 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 03:25:30,870 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.805e+02 5.794e+02 7.462e+02 1.465e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 03:25:46,411 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2094, 2.9276, 2.0126, 2.1228, 1.9505, 2.3843, 0.7510, 2.0718], + device='cuda:1'), covar=tensor([0.0579, 0.0517, 0.0668, 0.1034, 0.1011, 0.0895, 0.1204, 0.0900], + device='cuda:1'), in_proj_covar=tensor([0.0344, 0.0342, 0.0336, 0.0363, 0.0437, 0.0363, 0.0316, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:26:01,458 INFO [train.py:903] (1/4) Epoch 14, batch 5550, loss[loss=0.2326, simple_loss=0.31, pruned_loss=0.07757, over 18692.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3, pruned_loss=0.07386, over 3800093.06 frames. ], batch size: 74, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:26:08,355 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 03:26:23,616 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.14 vs. limit=5.0 +2023-04-02 03:26:37,729 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:26:53,084 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 03:26:57,912 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 03:27:01,443 INFO [train.py:903] (1/4) Epoch 14, batch 5600, loss[loss=0.2088, simple_loss=0.2892, pruned_loss=0.06413, over 19667.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3003, pruned_loss=0.07409, over 3809276.93 frames. ], batch size: 58, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:27:05,647 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:27:30,022 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.188e+02 6.005e+02 7.911e+02 1.925e+03, threshold=1.201e+03, percent-clipped=8.0 +2023-04-02 03:28:03,375 INFO [train.py:903] (1/4) Epoch 14, batch 5650, loss[loss=0.2259, simple_loss=0.3088, pruned_loss=0.07152, over 19723.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3003, pruned_loss=0.07403, over 3824732.49 frames. ], batch size: 63, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:28:49,715 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 03:28:55,923 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:29:02,016 INFO [train.py:903] (1/4) Epoch 14, batch 5700, loss[loss=0.2317, simple_loss=0.315, pruned_loss=0.07424, over 18744.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3012, pruned_loss=0.07495, over 3831051.53 frames. ], batch size: 74, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:29:29,828 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 4.949e+02 6.008e+02 7.817e+02 2.884e+03, threshold=1.202e+03, percent-clipped=11.0 +2023-04-02 03:29:50,189 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94503.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:30:02,383 INFO [train.py:903] (1/4) Epoch 14, batch 5750, loss[loss=0.2026, simple_loss=0.2809, pruned_loss=0.06217, over 19723.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3004, pruned_loss=0.07436, over 3839061.40 frames. ], batch size: 51, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:30:04,717 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 03:30:09,071 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-02 03:30:11,537 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 03:30:17,736 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 03:30:21,288 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:31:04,683 INFO [train.py:903] (1/4) Epoch 14, batch 5800, loss[loss=0.2166, simple_loss=0.2958, pruned_loss=0.06867, over 19661.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3003, pruned_loss=0.07423, over 3826780.43 frames. ], batch size: 55, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:31:30,529 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94585.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:31:32,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.430e+02 7.155e+02 9.192e+02 1.752e+03, threshold=1.431e+03, percent-clipped=10.0 +2023-04-02 03:31:35,425 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 03:31:38,986 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 03:32:06,961 INFO [train.py:903] (1/4) Epoch 14, batch 5850, loss[loss=0.2542, simple_loss=0.3345, pruned_loss=0.08695, over 19704.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3009, pruned_loss=0.0746, over 3829580.06 frames. ], batch size: 59, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:33:06,738 INFO [train.py:903] (1/4) Epoch 14, batch 5900, loss[loss=0.2478, simple_loss=0.3218, pruned_loss=0.0869, over 19696.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.302, pruned_loss=0.07515, over 3807593.94 frames. ], batch size: 59, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:33:07,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 03:33:27,825 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 03:33:33,163 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.122e+02 5.971e+02 8.409e+02 2.018e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-02 03:33:43,156 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2421, 2.0681, 2.0139, 2.4789, 2.3528, 1.8927, 1.8627, 2.4353], + device='cuda:1'), covar=tensor([0.1023, 0.1785, 0.1453, 0.0950, 0.1369, 0.0654, 0.1460, 0.0681], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0356, 0.0300, 0.0246, 0.0301, 0.0246, 0.0292, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:33:50,048 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94700.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:34:01,315 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94710.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:05,812 INFO [train.py:903] (1/4) Epoch 14, batch 5950, loss[loss=0.1958, simple_loss=0.2742, pruned_loss=0.05869, over 19734.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3015, pruned_loss=0.07486, over 3824189.50 frames. ], batch size: 46, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:34:06,225 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94714.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:37,319 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94739.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:35:04,629 INFO [train.py:903] (1/4) Epoch 14, batch 6000, loss[loss=0.2892, simple_loss=0.3529, pruned_loss=0.1127, over 18229.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3021, pruned_loss=0.0751, over 3830036.79 frames. ], batch size: 84, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:35:04,629 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 03:35:17,168 INFO [train.py:937] (1/4) Epoch 14, validation: loss=0.1744, simple_loss=0.2748, pruned_loss=0.03705, over 944034.00 frames. +2023-04-02 03:35:17,169 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 03:35:47,201 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.018e+02 6.191e+02 7.483e+02 1.325e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-02 03:36:17,853 INFO [train.py:903] (1/4) Epoch 14, batch 6050, loss[loss=0.2517, simple_loss=0.323, pruned_loss=0.09017, over 19506.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3003, pruned_loss=0.07415, over 3841594.31 frames. ], batch size: 64, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:36:33,154 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94825.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:37:20,914 INFO [train.py:903] (1/4) Epoch 14, batch 6100, loss[loss=0.1968, simple_loss=0.267, pruned_loss=0.06328, over 19751.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3005, pruned_loss=0.07451, over 3836327.13 frames. ], batch size: 46, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:37:48,990 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.260e+02 6.294e+02 8.137e+02 1.551e+03, threshold=1.259e+03, percent-clipped=3.0 +2023-04-02 03:38:21,673 INFO [train.py:903] (1/4) Epoch 14, batch 6150, loss[loss=0.2605, simple_loss=0.326, pruned_loss=0.09748, over 17382.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3001, pruned_loss=0.07443, over 3829246.66 frames. ], batch size: 101, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:38:48,820 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 03:39:13,109 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94956.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:39:21,882 INFO [train.py:903] (1/4) Epoch 14, batch 6200, loss[loss=0.1804, simple_loss=0.2588, pruned_loss=0.05097, over 19394.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3003, pruned_loss=0.07451, over 3823989.79 frames. ], batch size: 48, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:39:44,503 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94981.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:39:51,883 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 5.470e+02 6.385e+02 8.085e+02 2.296e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 03:40:19,513 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8539, 1.1173, 1.5624, 0.4878, 2.0308, 2.4464, 2.0822, 2.5796], + device='cuda:1'), covar=tensor([0.1618, 0.3755, 0.3139, 0.2601, 0.0557, 0.0258, 0.0353, 0.0308], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0303, 0.0330, 0.0253, 0.0224, 0.0167, 0.0208, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:40:22,504 INFO [train.py:903] (1/4) Epoch 14, batch 6250, loss[loss=0.2252, simple_loss=0.3106, pruned_loss=0.06991, over 18862.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3, pruned_loss=0.07401, over 3828714.82 frames. ], batch size: 74, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:40:31,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 03:40:55,024 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 03:41:24,163 INFO [train.py:903] (1/4) Epoch 14, batch 6300, loss[loss=0.2038, simple_loss=0.283, pruned_loss=0.06225, over 19380.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3006, pruned_loss=0.07441, over 3826422.24 frames. ], batch size: 48, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:41:44,490 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:41:51,896 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.537e+02 5.238e+02 6.215e+02 7.195e+02 1.642e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 03:42:15,045 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95106.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:42:24,131 INFO [train.py:903] (1/4) Epoch 14, batch 6350, loss[loss=0.1768, simple_loss=0.2556, pruned_loss=0.04897, over 19747.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3001, pruned_loss=0.07452, over 3829779.00 frames. ], batch size: 45, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:42:34,692 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95123.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:42:50,475 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1271, 2.2813, 2.3788, 3.0982, 2.2629, 2.9686, 2.6110, 2.1616], + device='cuda:1'), covar=tensor([0.3947, 0.3477, 0.1577, 0.2171, 0.3955, 0.1745, 0.3708, 0.2857], + device='cuda:1'), in_proj_covar=tensor([0.0826, 0.0864, 0.0667, 0.0903, 0.0811, 0.0752, 0.0809, 0.0735], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 03:43:23,501 INFO [train.py:903] (1/4) Epoch 14, batch 6400, loss[loss=0.2056, simple_loss=0.2865, pruned_loss=0.06238, over 19858.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3015, pruned_loss=0.07543, over 3829969.78 frames. ], batch size: 52, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:43:31,393 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8906, 0.7972, 0.8508, 1.0371, 0.8584, 0.9323, 1.0013, 0.8730], + device='cuda:1'), covar=tensor([0.0715, 0.0819, 0.0831, 0.0563, 0.0746, 0.0686, 0.0737, 0.0646], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0222, 0.0243, 0.0228, 0.0211, 0.0194, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 03:43:52,814 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 5.689e+02 7.116e+02 8.755e+02 2.889e+03, threshold=1.423e+03, percent-clipped=3.0 +2023-04-02 03:44:23,637 INFO [train.py:903] (1/4) Epoch 14, batch 6450, loss[loss=0.2051, simple_loss=0.2976, pruned_loss=0.05635, over 19687.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3015, pruned_loss=0.07521, over 3818357.80 frames. ], batch size: 60, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:09,466 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 03:45:25,844 INFO [train.py:903] (1/4) Epoch 14, batch 6500, loss[loss=0.183, simple_loss=0.2714, pruned_loss=0.04729, over 19673.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2995, pruned_loss=0.07374, over 3822232.38 frames. ], batch size: 53, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:32,076 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-02 03:45:32,332 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 03:45:43,522 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:52,733 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:54,573 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.245e+02 6.559e+02 8.783e+02 2.152e+03, threshold=1.312e+03, percent-clipped=6.0 +2023-04-02 03:46:27,867 INFO [train.py:903] (1/4) Epoch 14, batch 6550, loss[loss=0.2107, simple_loss=0.2877, pruned_loss=0.06687, over 19393.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2999, pruned_loss=0.07386, over 3808924.98 frames. ], batch size: 47, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:47:20,329 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95357.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:47:24,832 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1279, 1.4403, 1.8738, 1.6265, 2.9287, 4.3280, 4.3142, 4.8626], + device='cuda:1'), covar=tensor([0.1679, 0.3539, 0.2995, 0.1969, 0.0557, 0.0210, 0.0164, 0.0131], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0304, 0.0330, 0.0253, 0.0225, 0.0167, 0.0207, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:47:28,131 INFO [train.py:903] (1/4) Epoch 14, batch 6600, loss[loss=0.2373, simple_loss=0.3223, pruned_loss=0.07615, over 19509.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2999, pruned_loss=0.07322, over 3822643.23 frames. ], batch size: 64, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:47:57,394 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.401e+02 5.166e+02 6.061e+02 7.266e+02 1.890e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 03:48:28,437 INFO [train.py:903] (1/4) Epoch 14, batch 6650, loss[loss=0.2449, simple_loss=0.3234, pruned_loss=0.08323, over 19757.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3003, pruned_loss=0.07363, over 3821094.01 frames. ], batch size: 54, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:49:29,378 INFO [train.py:903] (1/4) Epoch 14, batch 6700, loss[loss=0.1678, simple_loss=0.2463, pruned_loss=0.04467, over 19743.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2992, pruned_loss=0.07377, over 3813311.33 frames. ], batch size: 47, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:49:33,785 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95467.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:49:53,206 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2314, 1.3880, 1.8473, 1.2912, 2.6608, 3.7552, 3.5429, 3.9575], + device='cuda:1'), covar=tensor([0.1500, 0.3362, 0.2696, 0.2086, 0.0519, 0.0159, 0.0176, 0.0180], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0302, 0.0328, 0.0252, 0.0223, 0.0167, 0.0207, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:49:55,591 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3894, 1.4999, 1.8454, 1.7044, 2.7313, 2.4024, 2.8243, 1.3270], + device='cuda:1'), covar=tensor([0.2268, 0.3941, 0.2390, 0.1704, 0.1451, 0.1805, 0.1482, 0.3707], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0589, 0.0639, 0.0449, 0.0598, 0.0504, 0.0646, 0.0508], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:49:57,461 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.267e+02 5.901e+02 8.158e+02 1.902e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-02 03:50:25,036 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6383, 2.5622, 2.4612, 2.9736, 2.8599, 2.4403, 2.3237, 2.9046], + device='cuda:1'), covar=tensor([0.0752, 0.1268, 0.1034, 0.0799, 0.0970, 0.0395, 0.1006, 0.0473], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0353, 0.0297, 0.0245, 0.0298, 0.0245, 0.0288, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:50:25,738 INFO [train.py:903] (1/4) Epoch 14, batch 6750, loss[loss=0.1999, simple_loss=0.2733, pruned_loss=0.06322, over 19482.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2998, pruned_loss=0.0745, over 3798672.61 frames. ], batch size: 49, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:21,226 INFO [train.py:903] (1/4) Epoch 14, batch 6800, loss[loss=0.2246, simple_loss=0.3026, pruned_loss=0.0733, over 18667.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3008, pruned_loss=0.07539, over 3808891.38 frames. ], batch size: 74, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:41,557 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95582.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:51:46,734 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.198e+02 6.166e+02 8.008e+02 1.508e+03, threshold=1.233e+03, percent-clipped=6.0 +2023-04-02 03:52:06,365 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 03:52:07,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 03:52:10,204 INFO [train.py:903] (1/4) Epoch 15, batch 0, loss[loss=0.2369, simple_loss=0.3082, pruned_loss=0.08283, over 19584.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3082, pruned_loss=0.08283, over 19584.00 frames. ], batch size: 52, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:52:10,204 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 03:52:21,744 INFO [train.py:937] (1/4) Epoch 15, validation: loss=0.1744, simple_loss=0.2751, pruned_loss=0.03681, over 944034.00 frames. +2023-04-02 03:52:21,744 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 03:52:26,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3945, 1.4538, 1.7619, 1.6596, 2.6826, 2.2880, 2.8095, 1.0163], + device='cuda:1'), covar=tensor([0.2331, 0.4075, 0.2578, 0.1820, 0.1425, 0.1963, 0.1475, 0.4187], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0592, 0.0642, 0.0452, 0.0602, 0.0507, 0.0650, 0.0510], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 03:52:33,134 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 03:52:58,948 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:08,225 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:14,626 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:22,136 INFO [train.py:903] (1/4) Epoch 15, batch 50, loss[loss=0.2313, simple_loss=0.2943, pruned_loss=0.08419, over 19749.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3028, pruned_loss=0.07383, over 881537.54 frames. ], batch size: 47, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:53:57,869 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5360, 1.1640, 1.3691, 1.2755, 2.1283, 0.9809, 1.9279, 2.4582], + device='cuda:1'), covar=tensor([0.0646, 0.2657, 0.2628, 0.1512, 0.0896, 0.1965, 0.1007, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0374, 0.0347, 0.0367, 0.0327, 0.0357, 0.0336, 0.0344, 0.0371], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:53:58,791 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 03:54:07,437 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 03:54:20,258 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 5.472e+02 6.461e+02 8.212e+02 1.912e+03, threshold=1.292e+03, percent-clipped=7.0 +2023-04-02 03:54:26,851 INFO [train.py:903] (1/4) Epoch 15, batch 100, loss[loss=0.1881, simple_loss=0.2595, pruned_loss=0.0584, over 19752.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2992, pruned_loss=0.07311, over 1543074.68 frames. ], batch size: 46, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:54:37,473 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 03:54:37,589 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95701.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:22,804 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:28,252 INFO [train.py:903] (1/4) Epoch 15, batch 150, loss[loss=0.2247, simple_loss=0.3065, pruned_loss=0.07146, over 17194.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2991, pruned_loss=0.07288, over 2050728.00 frames. ], batch size: 101, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:55:32,093 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:56:23,968 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 5.351e+02 6.322e+02 7.452e+02 1.833e+03, threshold=1.264e+03, percent-clipped=1.0 +2023-04-02 03:56:27,304 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 03:56:28,490 INFO [train.py:903] (1/4) Epoch 15, batch 200, loss[loss=0.174, simple_loss=0.2453, pruned_loss=0.0514, over 19101.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2997, pruned_loss=0.07349, over 2441490.99 frames. ], batch size: 42, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:56:45,610 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4566, 2.2090, 2.1448, 2.5855, 2.4715, 2.1984, 1.8488, 2.6220], + device='cuda:1'), covar=tensor([0.0912, 0.1638, 0.1405, 0.1065, 0.1293, 0.0490, 0.1386, 0.0606], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0357, 0.0301, 0.0247, 0.0301, 0.0247, 0.0291, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:56:51,099 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5636, 1.2010, 1.4227, 1.4997, 2.1877, 1.1606, 1.9962, 2.4872], + device='cuda:1'), covar=tensor([0.0676, 0.2538, 0.2529, 0.1268, 0.0870, 0.1750, 0.0986, 0.0435], + device='cuda:1'), in_proj_covar=tensor([0.0374, 0.0346, 0.0369, 0.0328, 0.0354, 0.0337, 0.0344, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 03:56:59,688 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:57:15,604 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95830.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:24,888 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95838.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:29,639 INFO [train.py:903] (1/4) Epoch 15, batch 250, loss[loss=0.2407, simple_loss=0.308, pruned_loss=0.08674, over 19657.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3003, pruned_loss=0.07374, over 2753227.61 frames. ], batch size: 60, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:57:56,109 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95863.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:58,216 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:58:24,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.277e+02 6.948e+02 9.039e+02 3.101e+03, threshold=1.390e+03, percent-clipped=9.0 +2023-04-02 03:58:28,478 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5116, 1.5221, 1.7756, 1.6873, 2.5579, 2.2889, 2.5613, 1.2631], + device='cuda:1'), covar=tensor([0.2055, 0.3844, 0.2288, 0.1660, 0.1312, 0.1724, 0.1331, 0.3521], + device='cuda:1'), in_proj_covar=tensor([0.0498, 0.0589, 0.0636, 0.0449, 0.0599, 0.0503, 0.0644, 0.0507], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 03:58:30,109 INFO [train.py:903] (1/4) Epoch 15, batch 300, loss[loss=0.1935, simple_loss=0.2671, pruned_loss=0.05989, over 16037.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.074, over 2988748.15 frames. ], batch size: 35, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:32,839 INFO [train.py:903] (1/4) Epoch 15, batch 350, loss[loss=0.1882, simple_loss=0.2752, pruned_loss=0.05057, over 19844.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3001, pruned_loss=0.07383, over 3165834.78 frames. ], batch size: 52, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:33,868 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 03:59:58,802 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7933, 4.8712, 5.5846, 5.5470, 2.1224, 5.2046, 4.4657, 5.1858], + device='cuda:1'), covar=tensor([0.1408, 0.1043, 0.0521, 0.0556, 0.5416, 0.0689, 0.0567, 0.1150], + device='cuda:1'), in_proj_covar=tensor([0.0719, 0.0646, 0.0856, 0.0740, 0.0762, 0.0600, 0.0516, 0.0787], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:00:17,462 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:28,193 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.018e+02 5.906e+02 6.897e+02 1.495e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-04-02 04:00:32,776 INFO [train.py:903] (1/4) Epoch 15, batch 400, loss[loss=0.2233, simple_loss=0.2997, pruned_loss=0.07343, over 19578.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2999, pruned_loss=0.07427, over 3320809.67 frames. ], batch size: 52, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 04:00:34,350 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:39,881 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95998.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:44,452 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:04,791 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:12,818 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3514, 1.3484, 1.4415, 1.4873, 1.7744, 1.8971, 1.7467, 0.4449], + device='cuda:1'), covar=tensor([0.2250, 0.3887, 0.2478, 0.1911, 0.1565, 0.2157, 0.1361, 0.4193], + device='cuda:1'), in_proj_covar=tensor([0.0502, 0.0590, 0.0643, 0.0453, 0.0605, 0.0508, 0.0648, 0.0510], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:01:16,058 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:33,841 INFO [train.py:903] (1/4) Epoch 15, batch 450, loss[loss=0.2467, simple_loss=0.3152, pruned_loss=0.08906, over 13509.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2993, pruned_loss=0.07376, over 3438283.54 frames. ], batch size: 136, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:07,790 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 04:02:07,820 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 04:02:12,880 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:31,246 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.837e+02 5.995e+02 7.453e+02 1.580e+03, threshold=1.199e+03, percent-clipped=6.0 +2023-04-02 04:02:36,669 INFO [train.py:903] (1/4) Epoch 15, batch 500, loss[loss=0.2201, simple_loss=0.293, pruned_loss=0.07359, over 19451.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2993, pruned_loss=0.07378, over 3531510.65 frames. ], batch size: 49, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:39,309 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:43,796 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:44,805 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:03:38,906 INFO [train.py:903] (1/4) Epoch 15, batch 550, loss[loss=0.2835, simple_loss=0.3463, pruned_loss=0.1104, over 19286.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3001, pruned_loss=0.07405, over 3604247.15 frames. ], batch size: 66, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:04:18,072 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96174.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:04:29,221 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:04:35,629 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.342e+02 6.491e+02 8.104e+02 1.503e+03, threshold=1.298e+03, percent-clipped=3.0 +2023-04-02 04:04:40,051 INFO [train.py:903] (1/4) Epoch 15, batch 600, loss[loss=0.2543, simple_loss=0.3391, pruned_loss=0.08471, over 19669.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2995, pruned_loss=0.07354, over 3655719.16 frames. ], batch size: 55, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:05:00,805 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:05:20,412 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 04:05:35,506 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9963, 2.1162, 2.3023, 2.7434, 2.0473, 2.6306, 2.5013, 2.0608], + device='cuda:1'), covar=tensor([0.3755, 0.3140, 0.1496, 0.2083, 0.3523, 0.1682, 0.3668, 0.2827], + device='cuda:1'), in_proj_covar=tensor([0.0828, 0.0867, 0.0667, 0.0899, 0.0814, 0.0747, 0.0806, 0.0731], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 04:05:43,298 INFO [train.py:903] (1/4) Epoch 15, batch 650, loss[loss=0.2179, simple_loss=0.3006, pruned_loss=0.06755, over 19289.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2991, pruned_loss=0.07369, over 3687358.98 frames. ], batch size: 66, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:06:41,549 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.105e+02 6.385e+02 8.770e+02 1.706e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 04:06:42,914 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96289.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:06:46,045 INFO [train.py:903] (1/4) Epoch 15, batch 700, loss[loss=0.2118, simple_loss=0.2903, pruned_loss=0.06662, over 19662.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2988, pruned_loss=0.07267, over 3720085.16 frames. ], batch size: 53, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:12,184 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-02 04:07:19,714 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7366, 1.8157, 2.0280, 2.3256, 1.6282, 2.1960, 2.1821, 1.8693], + device='cuda:1'), covar=tensor([0.3807, 0.3441, 0.1637, 0.1980, 0.3610, 0.1799, 0.4087, 0.3064], + device='cuda:1'), in_proj_covar=tensor([0.0830, 0.0868, 0.0668, 0.0902, 0.0816, 0.0749, 0.0807, 0.0732], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 04:07:26,410 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,390 INFO [train.py:903] (1/4) Epoch 15, batch 750, loss[loss=0.1995, simple_loss=0.2704, pruned_loss=0.06433, over 19745.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.301, pruned_loss=0.07385, over 3734229.52 frames. ], batch size: 47, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:47,554 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,699 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:57,753 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:29,486 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:44,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.276e+02 6.207e+02 7.536e+02 1.572e+03, threshold=1.241e+03, percent-clipped=2.0 +2023-04-02 04:08:49,809 INFO [train.py:903] (1/4) Epoch 15, batch 800, loss[loss=0.2273, simple_loss=0.3057, pruned_loss=0.07442, over 19661.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3009, pruned_loss=0.07376, over 3755585.26 frames. ], batch size: 55, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:08:53,677 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6392, 1.3358, 1.4623, 1.4394, 3.2232, 0.9623, 2.2594, 3.5874], + device='cuda:1'), covar=tensor([0.0423, 0.2659, 0.2784, 0.1784, 0.0736, 0.2528, 0.1226, 0.0247], + device='cuda:1'), in_proj_covar=tensor([0.0374, 0.0347, 0.0367, 0.0327, 0.0353, 0.0334, 0.0344, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:09:04,710 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 04:09:09,567 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:09:50,620 INFO [train.py:903] (1/4) Epoch 15, batch 850, loss[loss=0.1937, simple_loss=0.2588, pruned_loss=0.06435, over 19741.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3006, pruned_loss=0.07405, over 3775918.60 frames. ], batch size: 46, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:09:51,925 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:10,272 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:41,148 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 04:10:47,719 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.238e+02 6.465e+02 7.879e+02 1.664e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 04:10:52,486 INFO [train.py:903] (1/4) Epoch 15, batch 900, loss[loss=0.2016, simple_loss=0.2882, pruned_loss=0.05752, over 19746.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3012, pruned_loss=0.07451, over 3791485.20 frames. ], batch size: 54, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:20,266 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2487, 1.3068, 1.4527, 1.4068, 1.7514, 1.7679, 1.7580, 0.5710], + device='cuda:1'), covar=tensor([0.2289, 0.3872, 0.2423, 0.1809, 0.1498, 0.2182, 0.1366, 0.4068], + device='cuda:1'), in_proj_covar=tensor([0.0498, 0.0586, 0.0637, 0.0448, 0.0598, 0.0503, 0.0641, 0.0505], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:11:36,486 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:11:55,050 INFO [train.py:903] (1/4) Epoch 15, batch 950, loss[loss=0.2342, simple_loss=0.3157, pruned_loss=0.07638, over 19681.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3012, pruned_loss=0.0744, over 3801514.16 frames. ], batch size: 63, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:56,232 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 04:11:59,030 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96545.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:12:14,131 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:30,393 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96570.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:12:41,739 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:52,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.225e+02 6.143e+02 7.839e+02 1.754e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-02 04:12:53,871 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2659, 3.7246, 3.8797, 3.8765, 1.4392, 3.6538, 3.2122, 3.5754], + device='cuda:1'), covar=tensor([0.1592, 0.0917, 0.0677, 0.0715, 0.5730, 0.0862, 0.0696, 0.1148], + device='cuda:1'), in_proj_covar=tensor([0.0732, 0.0659, 0.0873, 0.0745, 0.0777, 0.0609, 0.0523, 0.0802], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:12:57,219 INFO [train.py:903] (1/4) Epoch 15, batch 1000, loss[loss=0.1771, simple_loss=0.2586, pruned_loss=0.04777, over 19729.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3005, pruned_loss=0.07379, over 3824376.28 frames. ], batch size: 47, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:13,543 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:13:51,308 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 04:13:59,278 INFO [train.py:903] (1/4) Epoch 15, batch 1050, loss[loss=0.179, simple_loss=0.2532, pruned_loss=0.05237, over 19754.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2986, pruned_loss=0.07263, over 3838885.66 frames. ], batch size: 47, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:59,635 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:31,315 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 04:14:53,730 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:57,023 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.831e+02 5.170e+02 6.505e+02 8.376e+02 1.590e+03, threshold=1.301e+03, percent-clipped=4.0 +2023-04-02 04:15:01,344 INFO [train.py:903] (1/4) Epoch 15, batch 1100, loss[loss=0.2404, simple_loss=0.3158, pruned_loss=0.08247, over 19749.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2981, pruned_loss=0.07274, over 3833803.56 frames. ], batch size: 63, lr: 5.60e-03, grad_scale: 4.0 +2023-04-02 04:15:14,750 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1194, 3.4191, 1.9125, 2.1677, 3.1050, 1.8434, 1.5150, 2.1763], + device='cuda:1'), covar=tensor([0.1226, 0.0534, 0.1024, 0.0704, 0.0482, 0.1089, 0.0919, 0.0699], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0305, 0.0325, 0.0248, 0.0238, 0.0328, 0.0291, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:15:28,053 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:15:58,781 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:16:02,838 INFO [train.py:903] (1/4) Epoch 15, batch 1150, loss[loss=0.2467, simple_loss=0.3211, pruned_loss=0.08617, over 18140.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2972, pruned_loss=0.07202, over 3830010.61 frames. ], batch size: 83, lr: 5.59e-03, grad_scale: 4.0 +2023-04-02 04:16:15,362 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:01,609 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.122e+02 6.460e+02 8.216e+02 1.619e+03, threshold=1.292e+03, percent-clipped=5.0 +2023-04-02 04:17:06,202 INFO [train.py:903] (1/4) Epoch 15, batch 1200, loss[loss=0.1886, simple_loss=0.2676, pruned_loss=0.05478, over 19789.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2984, pruned_loss=0.07284, over 3827035.34 frames. ], batch size: 49, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:17:08,690 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:17,127 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:32,588 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:39,085 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 04:18:03,059 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:18:07,999 INFO [train.py:903] (1/4) Epoch 15, batch 1250, loss[loss=0.2166, simple_loss=0.2824, pruned_loss=0.07537, over 19618.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2988, pruned_loss=0.07254, over 3836618.68 frames. ], batch size: 50, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:18:38,294 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:05,685 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 5.442e+02 6.897e+02 8.528e+02 1.967e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 04:19:09,075 INFO [train.py:903] (1/4) Epoch 15, batch 1300, loss[loss=0.2584, simple_loss=0.3262, pruned_loss=0.09525, over 19321.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2997, pruned_loss=0.07338, over 3834673.52 frames. ], batch size: 66, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:19:17,614 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:48,615 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:20:12,477 INFO [train.py:903] (1/4) Epoch 15, batch 1350, loss[loss=0.2255, simple_loss=0.3085, pruned_loss=0.07125, over 19313.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2996, pruned_loss=0.07354, over 3839243.43 frames. ], batch size: 66, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:11,447 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.520e+02 5.515e+02 7.272e+02 1.782e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-04-02 04:21:15,870 INFO [train.py:903] (1/4) Epoch 15, batch 1400, loss[loss=0.2572, simple_loss=0.3256, pruned_loss=0.09438, over 13040.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2983, pruned_loss=0.0727, over 3828444.39 frames. ], batch size: 137, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:49,679 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:22:19,067 INFO [train.py:903] (1/4) Epoch 15, batch 1450, loss[loss=0.2384, simple_loss=0.3232, pruned_loss=0.07681, over 19675.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2989, pruned_loss=0.07297, over 3820371.13 frames. ], batch size: 55, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:22:20,263 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 04:22:38,917 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:09,811 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:18,570 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.198e+02 6.417e+02 9.294e+02 1.968e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 04:23:21,918 INFO [train.py:903] (1/4) Epoch 15, batch 1500, loss[loss=0.2336, simple_loss=0.3138, pruned_loss=0.0767, over 19681.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2977, pruned_loss=0.07247, over 3832923.42 frames. ], batch size: 59, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:23:30,901 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 04:24:01,256 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:20,465 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:24,793 INFO [train.py:903] (1/4) Epoch 15, batch 1550, loss[loss=0.1937, simple_loss=0.2702, pruned_loss=0.05858, over 19290.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2987, pruned_loss=0.07316, over 3819109.78 frames. ], batch size: 44, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:24:31,842 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:25:22,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.223e+02 6.490e+02 8.468e+02 1.572e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-02 04:25:26,786 INFO [train.py:903] (1/4) Epoch 15, batch 1600, loss[loss=0.2236, simple_loss=0.3032, pruned_loss=0.07199, over 17363.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2994, pruned_loss=0.07328, over 3821448.23 frames. ], batch size: 101, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:25:51,568 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 04:26:20,094 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 04:26:28,830 INFO [train.py:903] (1/4) Epoch 15, batch 1650, loss[loss=0.226, simple_loss=0.317, pruned_loss=0.06751, over 19660.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2993, pruned_loss=0.07337, over 3828699.88 frames. ], batch size: 55, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:26:42,598 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:03,853 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7646, 2.1556, 2.0032, 2.7082, 2.5079, 2.3063, 2.1396, 2.6570], + device='cuda:1'), covar=tensor([0.0774, 0.1668, 0.1416, 0.0913, 0.1153, 0.0469, 0.1215, 0.0576], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0349, 0.0296, 0.0242, 0.0293, 0.0244, 0.0287, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:27:14,810 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:27,487 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.574e+02 5.168e+02 6.563e+02 8.938e+02 1.305e+03, threshold=1.313e+03, percent-clipped=1.0 +2023-04-02 04:27:30,909 INFO [train.py:903] (1/4) Epoch 15, batch 1700, loss[loss=0.2371, simple_loss=0.3068, pruned_loss=0.08369, over 19772.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2996, pruned_loss=0.07321, over 3835869.76 frames. ], batch size: 54, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:27:49,503 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 04:28:02,160 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3546, 3.9334, 2.6968, 3.4991, 0.8491, 3.8497, 3.7115, 3.9013], + device='cuda:1'), covar=tensor([0.0722, 0.1185, 0.1928, 0.0863, 0.4146, 0.0735, 0.0766, 0.0863], + device='cuda:1'), in_proj_covar=tensor([0.0457, 0.0384, 0.0458, 0.0326, 0.0389, 0.0391, 0.0382, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:28:11,292 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 04:28:32,695 INFO [train.py:903] (1/4) Epoch 15, batch 1750, loss[loss=0.1915, simple_loss=0.2623, pruned_loss=0.06034, over 19776.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2985, pruned_loss=0.07271, over 3829691.47 frames. ], batch size: 47, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:28:55,651 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:28:57,846 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:01,596 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:30,673 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.096e+02 6.393e+02 8.037e+02 1.428e+03, threshold=1.279e+03, percent-clipped=5.0 +2023-04-02 04:29:33,918 INFO [train.py:903] (1/4) Epoch 15, batch 1800, loss[loss=0.2398, simple_loss=0.3183, pruned_loss=0.08071, over 18204.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2987, pruned_loss=0.07293, over 3821553.05 frames. ], batch size: 83, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:29:34,237 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:30:03,917 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3751, 2.9950, 2.2921, 2.3387, 2.1975, 2.5913, 1.0693, 2.1430], + device='cuda:1'), covar=tensor([0.0531, 0.0504, 0.0570, 0.0906, 0.0935, 0.0892, 0.1153, 0.0896], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0340, 0.0337, 0.0366, 0.0438, 0.0365, 0.0319, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:30:32,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 04:30:36,841 INFO [train.py:903] (1/4) Epoch 15, batch 1850, loss[loss=0.2547, simple_loss=0.3199, pruned_loss=0.09479, over 19721.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2968, pruned_loss=0.07178, over 3816195.16 frames. ], batch size: 63, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:10,807 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 04:31:21,349 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:31:25,848 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9168, 4.3658, 4.6697, 4.6547, 1.6822, 4.3668, 3.8600, 4.3539], + device='cuda:1'), covar=tensor([0.1538, 0.0708, 0.0563, 0.0591, 0.5537, 0.0693, 0.0609, 0.1073], + device='cuda:1'), in_proj_covar=tensor([0.0728, 0.0655, 0.0865, 0.0744, 0.0773, 0.0609, 0.0523, 0.0797], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:31:35,889 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.535e+02 5.674e+02 7.772e+02 1.771e+03, threshold=1.135e+03, percent-clipped=3.0 +2023-04-02 04:31:39,234 INFO [train.py:903] (1/4) Epoch 15, batch 1900, loss[loss=0.1982, simple_loss=0.2778, pruned_loss=0.05933, over 19725.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2978, pruned_loss=0.07249, over 3806603.80 frames. ], batch size: 51, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:47,728 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5551, 4.0667, 4.2186, 4.2376, 1.6360, 3.9939, 3.4544, 3.9449], + device='cuda:1'), covar=tensor([0.1424, 0.0727, 0.0598, 0.0614, 0.5007, 0.0715, 0.0642, 0.1024], + device='cuda:1'), in_proj_covar=tensor([0.0725, 0.0654, 0.0863, 0.0743, 0.0771, 0.0607, 0.0522, 0.0794], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:31:58,013 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 04:32:00,939 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97509.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:02,867 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 04:32:28,310 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 04:32:32,289 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:40,693 INFO [train.py:903] (1/4) Epoch 15, batch 1950, loss[loss=0.2269, simple_loss=0.2911, pruned_loss=0.08138, over 19621.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2984, pruned_loss=0.07297, over 3807044.37 frames. ], batch size: 50, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:33:19,888 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:33:39,666 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 4.980e+02 6.456e+02 8.636e+02 2.349e+03, threshold=1.291e+03, percent-clipped=8.0 +2023-04-02 04:33:43,294 INFO [train.py:903] (1/4) Epoch 15, batch 2000, loss[loss=0.1852, simple_loss=0.2591, pruned_loss=0.05563, over 19755.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2974, pruned_loss=0.07243, over 3802209.92 frames. ], batch size: 46, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:34:06,540 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-02 04:34:20,870 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:34:35,244 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 04:34:42,264 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 04:34:47,027 INFO [train.py:903] (1/4) Epoch 15, batch 2050, loss[loss=0.2211, simple_loss=0.3058, pruned_loss=0.06817, over 19596.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2971, pruned_loss=0.07215, over 3807939.10 frames. ], batch size: 61, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:01,879 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 04:35:03,049 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 04:35:23,949 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 04:35:47,018 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.998e+02 6.604e+02 7.928e+02 1.987e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 04:35:50,609 INFO [train.py:903] (1/4) Epoch 15, batch 2100, loss[loss=0.2216, simple_loss=0.3017, pruned_loss=0.07076, over 19498.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2966, pruned_loss=0.07171, over 3810901.61 frames. ], batch size: 64, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:50,923 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97692.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:35:56,735 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5214, 4.0201, 4.1847, 4.2032, 1.5741, 3.9460, 3.4894, 3.8778], + device='cuda:1'), covar=tensor([0.1465, 0.0925, 0.0641, 0.0604, 0.5689, 0.0935, 0.0645, 0.1132], + device='cuda:1'), in_proj_covar=tensor([0.0732, 0.0658, 0.0867, 0.0747, 0.0777, 0.0612, 0.0524, 0.0798], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:36:04,914 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:11,603 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:20,739 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 04:36:41,966 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:43,971 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 04:36:45,261 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:46,649 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:49,900 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1074, 1.3080, 1.7183, 1.1166, 2.5485, 3.3662, 3.1064, 3.6013], + device='cuda:1'), covar=tensor([0.1576, 0.3463, 0.2932, 0.2231, 0.0505, 0.0166, 0.0212, 0.0214], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0305, 0.0331, 0.0253, 0.0225, 0.0169, 0.0208, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:36:51,863 INFO [train.py:903] (1/4) Epoch 15, batch 2150, loss[loss=0.2156, simple_loss=0.2944, pruned_loss=0.06841, over 19854.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2957, pruned_loss=0.07084, over 3822885.75 frames. ], batch size: 52, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:37:12,262 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:37:49,741 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.862e+02 6.186e+02 7.185e+02 1.323e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 04:37:54,000 INFO [train.py:903] (1/4) Epoch 15, batch 2200, loss[loss=0.1949, simple_loss=0.2739, pruned_loss=0.05795, over 19722.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.296, pruned_loss=0.07069, over 3831343.84 frames. ], batch size: 51, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:37:55,550 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1050, 2.7837, 2.0125, 2.0649, 1.8237, 2.4296, 0.9180, 1.9289], + device='cuda:1'), covar=tensor([0.0494, 0.0456, 0.0571, 0.0893, 0.0933, 0.0840, 0.1064, 0.0877], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0340, 0.0336, 0.0365, 0.0438, 0.0364, 0.0318, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:38:26,774 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7255, 1.3340, 1.5359, 1.3784, 3.2116, 0.9509, 2.2860, 3.6688], + device='cuda:1'), covar=tensor([0.0406, 0.2742, 0.2730, 0.1943, 0.0737, 0.2678, 0.1310, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0375, 0.0350, 0.0366, 0.0329, 0.0356, 0.0338, 0.0350, 0.0375], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:38:28,035 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:33,740 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:57,678 INFO [train.py:903] (1/4) Epoch 15, batch 2250, loss[loss=0.2435, simple_loss=0.3199, pruned_loss=0.0835, over 19673.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2961, pruned_loss=0.07146, over 3824107.76 frames. ], batch size: 59, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:39:09,185 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:39:56,865 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.977e+02 6.292e+02 8.077e+02 1.831e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 04:40:00,318 INFO [train.py:903] (1/4) Epoch 15, batch 2300, loss[loss=0.238, simple_loss=0.3097, pruned_loss=0.08312, over 19699.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2956, pruned_loss=0.07128, over 3818957.80 frames. ], batch size: 59, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:40:12,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 04:40:16,279 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:40:29,876 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:40:37,935 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8705, 1.9116, 2.1119, 2.5228, 1.9119, 2.4445, 2.2559, 2.0520], + device='cuda:1'), covar=tensor([0.3540, 0.2993, 0.1464, 0.1828, 0.3232, 0.1443, 0.3548, 0.2482], + device='cuda:1'), in_proj_covar=tensor([0.0834, 0.0871, 0.0671, 0.0903, 0.0817, 0.0750, 0.0806, 0.0735], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 04:41:01,537 INFO [train.py:903] (1/4) Epoch 15, batch 2350, loss[loss=0.2088, simple_loss=0.2974, pruned_loss=0.06007, over 19763.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2973, pruned_loss=0.07176, over 3821132.10 frames. ], batch size: 54, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:41:44,937 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 04:41:58,281 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.416e+02 6.244e+02 7.823e+02 1.587e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-04-02 04:41:58,343 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 04:42:02,750 INFO [train.py:903] (1/4) Epoch 15, batch 2400, loss[loss=0.1718, simple_loss=0.2507, pruned_loss=0.04649, over 19762.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2968, pruned_loss=0.0716, over 3819364.10 frames. ], batch size: 47, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:42:04,391 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:36,274 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:53,715 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:58,101 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98036.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:43:05,611 INFO [train.py:903] (1/4) Epoch 15, batch 2450, loss[loss=0.1831, simple_loss=0.262, pruned_loss=0.05212, over 19443.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.296, pruned_loss=0.07106, over 3822201.12 frames. ], batch size: 48, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:43:47,635 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98075.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:43:54,226 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:05,272 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.305e+02 6.204e+02 8.091e+02 1.870e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 04:44:09,724 INFO [train.py:903] (1/4) Epoch 15, batch 2500, loss[loss=0.2567, simple_loss=0.3278, pruned_loss=0.09285, over 19675.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2964, pruned_loss=0.07101, over 3815479.75 frames. ], batch size: 59, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:44:19,640 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98100.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:25,570 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98105.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:27,999 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:46,010 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 04:44:51,235 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98124.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:00,593 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:12,196 INFO [train.py:903] (1/4) Epoch 15, batch 2550, loss[loss=0.2408, simple_loss=0.3217, pruned_loss=0.07993, over 19751.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2974, pruned_loss=0.07158, over 3813342.50 frames. ], batch size: 51, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:45:12,519 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4681, 3.9594, 4.1569, 4.1338, 1.7480, 3.8807, 3.3788, 3.8378], + device='cuda:1'), covar=tensor([0.1673, 0.1028, 0.0673, 0.0727, 0.5165, 0.0911, 0.0734, 0.1191], + device='cuda:1'), in_proj_covar=tensor([0.0724, 0.0654, 0.0863, 0.0742, 0.0770, 0.0609, 0.0521, 0.0788], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:45:23,242 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98151.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:45:52,140 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9262, 5.2894, 2.9925, 4.6140, 1.3308, 5.2699, 5.1904, 5.3731], + device='cuda:1'), covar=tensor([0.0358, 0.0867, 0.2016, 0.0720, 0.3892, 0.0683, 0.0781, 0.0953], + device='cuda:1'), in_proj_covar=tensor([0.0462, 0.0382, 0.0461, 0.0326, 0.0392, 0.0395, 0.0383, 0.0419], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:46:07,120 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 04:46:10,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 5.247e+02 6.339e+02 8.638e+02 2.352e+03, threshold=1.268e+03, percent-clipped=5.0 +2023-04-02 04:46:14,038 INFO [train.py:903] (1/4) Epoch 15, batch 2600, loss[loss=0.1889, simple_loss=0.2721, pruned_loss=0.05288, over 19809.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2965, pruned_loss=0.07123, over 3811902.69 frames. ], batch size: 49, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:18,022 INFO [train.py:903] (1/4) Epoch 15, batch 2650, loss[loss=0.2485, simple_loss=0.3302, pruned_loss=0.08342, over 19543.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2974, pruned_loss=0.07169, over 3817596.60 frames. ], batch size: 56, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:28,827 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:47:39,871 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 04:48:17,340 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:48:18,027 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.990e+02 6.118e+02 7.575e+02 1.335e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 04:48:21,600 INFO [train.py:903] (1/4) Epoch 15, batch 2700, loss[loss=0.2128, simple_loss=0.2927, pruned_loss=0.06642, over 19603.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2975, pruned_loss=0.07194, over 3815032.47 frames. ], batch size: 57, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:48:29,611 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7689, 2.2534, 2.2491, 2.9111, 2.6243, 2.5165, 2.0007, 3.1030], + device='cuda:1'), covar=tensor([0.0789, 0.1635, 0.1285, 0.0906, 0.1278, 0.0421, 0.1308, 0.0490], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0351, 0.0297, 0.0244, 0.0295, 0.0245, 0.0290, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:48:47,474 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:49:24,161 INFO [train.py:903] (1/4) Epoch 15, batch 2750, loss[loss=0.1912, simple_loss=0.2789, pruned_loss=0.05175, over 19660.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2974, pruned_loss=0.07209, over 3814615.14 frames. ], batch size: 53, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:49:46,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8724, 1.4165, 1.5488, 1.7252, 3.4068, 1.1887, 2.4354, 3.8203], + device='cuda:1'), covar=tensor([0.0402, 0.2825, 0.2862, 0.1738, 0.0709, 0.2433, 0.1219, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0375, 0.0350, 0.0368, 0.0329, 0.0357, 0.0339, 0.0347, 0.0375], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:49:54,681 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:50:23,802 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.173e+02 6.107e+02 7.991e+02 1.431e+03, threshold=1.221e+03, percent-clipped=3.0 +2023-04-02 04:50:27,282 INFO [train.py:903] (1/4) Epoch 15, batch 2800, loss[loss=0.2262, simple_loss=0.3123, pruned_loss=0.07007, over 19685.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2967, pruned_loss=0.07158, over 3822349.63 frames. ], batch size: 59, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:50:48,801 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98407.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:51:08,186 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9256, 2.0152, 2.2742, 2.6793, 1.9151, 2.6011, 2.4553, 2.0601], + device='cuda:1'), covar=tensor([0.3937, 0.3479, 0.1559, 0.2014, 0.3731, 0.1724, 0.3947, 0.3034], + device='cuda:1'), in_proj_covar=tensor([0.0829, 0.0875, 0.0670, 0.0902, 0.0816, 0.0748, 0.0807, 0.0737], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 04:51:13,925 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:51:18,527 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98432.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:51:30,816 INFO [train.py:903] (1/4) Epoch 15, batch 2850, loss[loss=0.2102, simple_loss=0.2891, pruned_loss=0.06566, over 19730.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2957, pruned_loss=0.07126, over 3816869.70 frames. ], batch size: 51, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:51:44,200 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 04:52:03,918 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:52:30,661 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 5.164e+02 6.295e+02 8.927e+02 2.262e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 04:52:34,167 INFO [train.py:903] (1/4) Epoch 15, batch 2900, loss[loss=0.23, simple_loss=0.3048, pruned_loss=0.07764, over 17394.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2966, pruned_loss=0.07193, over 3801512.31 frames. ], batch size: 101, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:52:35,432 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 04:52:52,023 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0517, 1.8695, 1.6271, 2.2154, 1.9151, 1.8214, 1.7285, 2.0172], + device='cuda:1'), covar=tensor([0.1005, 0.1480, 0.1446, 0.0871, 0.1269, 0.0506, 0.1296, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0352, 0.0297, 0.0245, 0.0295, 0.0246, 0.0290, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 04:53:36,705 INFO [train.py:903] (1/4) Epoch 15, batch 2950, loss[loss=0.2546, simple_loss=0.3175, pruned_loss=0.09589, over 19671.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2972, pruned_loss=0.07248, over 3813085.58 frames. ], batch size: 58, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:53:47,983 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2917, 2.1516, 1.8838, 1.7916, 1.6300, 1.8776, 0.4931, 1.1402], + device='cuda:1'), covar=tensor([0.0500, 0.0484, 0.0388, 0.0669, 0.0977, 0.0651, 0.1075, 0.0862], + device='cuda:1'), in_proj_covar=tensor([0.0344, 0.0342, 0.0340, 0.0369, 0.0442, 0.0368, 0.0320, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:54:00,568 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:29,176 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:35,382 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.043e+02 6.244e+02 8.249e+02 2.456e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 04:54:38,823 INFO [train.py:903] (1/4) Epoch 15, batch 3000, loss[loss=0.1872, simple_loss=0.2644, pruned_loss=0.05503, over 19332.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2979, pruned_loss=0.07335, over 3797184.85 frames. ], batch size: 47, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:38,824 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 04:54:51,336 INFO [train.py:937] (1/4) Epoch 15, validation: loss=0.1735, simple_loss=0.2738, pruned_loss=0.0366, over 944034.00 frames. +2023-04-02 04:54:51,337 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 04:54:53,545 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 04:55:28,729 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:55:52,975 INFO [train.py:903] (1/4) Epoch 15, batch 3050, loss[loss=0.2047, simple_loss=0.2883, pruned_loss=0.06051, over 18076.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2977, pruned_loss=0.07301, over 3811254.61 frames. ], batch size: 83, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:55:57,893 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:56:51,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.607e+02 7.054e+02 9.171e+02 2.046e+03, threshold=1.411e+03, percent-clipped=6.0 +2023-04-02 04:56:54,314 INFO [train.py:903] (1/4) Epoch 15, batch 3100, loss[loss=0.264, simple_loss=0.3432, pruned_loss=0.09241, over 19752.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2978, pruned_loss=0.07279, over 3815367.01 frames. ], batch size: 63, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:57:58,288 INFO [train.py:903] (1/4) Epoch 15, batch 3150, loss[loss=0.2076, simple_loss=0.2976, pruned_loss=0.05883, over 19673.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.298, pruned_loss=0.07293, over 3819939.63 frames. ], batch size: 59, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:58:26,300 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 04:58:31,122 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0644, 4.4610, 4.7872, 4.7850, 1.7988, 4.5064, 4.0050, 4.4813], + device='cuda:1'), covar=tensor([0.1486, 0.0711, 0.0535, 0.0558, 0.5329, 0.0638, 0.0581, 0.1019], + device='cuda:1'), in_proj_covar=tensor([0.0725, 0.0656, 0.0860, 0.0744, 0.0774, 0.0609, 0.0517, 0.0795], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 04:58:34,635 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:58:58,670 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.220e+02 6.366e+02 8.908e+02 1.802e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-02 04:59:01,103 INFO [train.py:903] (1/4) Epoch 15, batch 3200, loss[loss=0.2569, simple_loss=0.323, pruned_loss=0.0954, over 19534.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2965, pruned_loss=0.07199, over 3831828.53 frames. ], batch size: 64, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:59:33,978 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0066, 1.2959, 1.5664, 1.1795, 2.6199, 3.8054, 3.5146, 3.9561], + device='cuda:1'), covar=tensor([0.1766, 0.3579, 0.3297, 0.2295, 0.0613, 0.0165, 0.0195, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0303, 0.0330, 0.0251, 0.0223, 0.0168, 0.0206, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 04:59:59,407 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:02,401 INFO [train.py:903] (1/4) Epoch 15, batch 3250, loss[loss=0.2417, simple_loss=0.3305, pruned_loss=0.07645, over 18159.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2972, pruned_loss=0.07216, over 3827285.64 frames. ], batch size: 83, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:00:29,951 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:56,791 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:59,973 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.355e+02 4.885e+02 6.090e+02 7.322e+02 1.846e+03, threshold=1.218e+03, percent-clipped=3.0 +2023-04-02 05:01:02,394 INFO [train.py:903] (1/4) Epoch 15, batch 3300, loss[loss=0.1756, simple_loss=0.252, pruned_loss=0.04963, over 19362.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2977, pruned_loss=0.0725, over 3813186.76 frames. ], batch size: 47, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:01:08,196 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 05:01:20,961 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:25,246 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98908.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:30,541 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:41,396 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5882, 1.6893, 1.8625, 2.0006, 1.4434, 1.8611, 1.9606, 1.7597], + device='cuda:1'), covar=tensor([0.3898, 0.3126, 0.1642, 0.1910, 0.3364, 0.1758, 0.4372, 0.2960], + device='cuda:1'), in_proj_covar=tensor([0.0831, 0.0873, 0.0669, 0.0903, 0.0815, 0.0748, 0.0807, 0.0735], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 05:02:07,324 INFO [train.py:903] (1/4) Epoch 15, batch 3350, loss[loss=0.2194, simple_loss=0.3024, pruned_loss=0.0682, over 19597.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2982, pruned_loss=0.07263, over 3799148.95 frames. ], batch size: 57, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:02:09,397 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 05:03:06,853 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98989.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:03:07,561 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 5.628e+02 7.317e+02 9.748e+02 2.071e+03, threshold=1.463e+03, percent-clipped=8.0 +2023-04-02 05:03:09,815 INFO [train.py:903] (1/4) Epoch 15, batch 3400, loss[loss=0.211, simple_loss=0.2949, pruned_loss=0.06351, over 19595.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2976, pruned_loss=0.07246, over 3817805.71 frames. ], batch size: 57, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:03:44,058 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:04:10,160 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5253, 2.2732, 1.5958, 1.6852, 2.0798, 1.3395, 1.4649, 1.8437], + device='cuda:1'), covar=tensor([0.0944, 0.0648, 0.0979, 0.0573, 0.0509, 0.1093, 0.0642, 0.0514], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0307, 0.0324, 0.0251, 0.0239, 0.0327, 0.0293, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:04:10,819 INFO [train.py:903] (1/4) Epoch 15, batch 3450, loss[loss=0.2653, simple_loss=0.3312, pruned_loss=0.09974, over 19395.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2975, pruned_loss=0.07227, over 3818908.75 frames. ], batch size: 70, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:04:14,058 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 05:04:34,759 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2482, 1.1395, 1.2069, 1.3884, 1.1001, 1.3436, 1.3407, 1.2916], + device='cuda:1'), covar=tensor([0.0917, 0.1061, 0.1099, 0.0672, 0.0846, 0.0831, 0.0873, 0.0755], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0240, 0.0225, 0.0207, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 05:04:45,794 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:05:11,143 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.946e+02 5.886e+02 7.201e+02 1.354e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-02 05:05:12,313 INFO [train.py:903] (1/4) Epoch 15, batch 3500, loss[loss=0.2109, simple_loss=0.2872, pruned_loss=0.06731, over 19455.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2966, pruned_loss=0.0717, over 3831630.59 frames. ], batch size: 49, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:15,625 INFO [train.py:903] (1/4) Epoch 15, batch 3550, loss[loss=0.1931, simple_loss=0.2764, pruned_loss=0.05494, over 19565.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2966, pruned_loss=0.07224, over 3819255.55 frames. ], batch size: 52, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:18,419 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:48,304 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:53,014 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6231, 1.4207, 1.4078, 2.0434, 1.6108, 1.9206, 1.9813, 1.6660], + device='cuda:1'), covar=tensor([0.0818, 0.0949, 0.1023, 0.0779, 0.0830, 0.0712, 0.0832, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0240, 0.0226, 0.0208, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 05:07:18,042 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 4.734e+02 6.110e+02 7.809e+02 1.736e+03, threshold=1.222e+03, percent-clipped=8.0 +2023-04-02 05:07:19,077 INFO [train.py:903] (1/4) Epoch 15, batch 3600, loss[loss=0.2188, simple_loss=0.2871, pruned_loss=0.07532, over 19636.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2972, pruned_loss=0.0722, over 3833959.07 frames. ], batch size: 50, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:20,334 INFO [train.py:903] (1/4) Epoch 15, batch 3650, loss[loss=0.2126, simple_loss=0.2962, pruned_loss=0.0645, over 18902.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2978, pruned_loss=0.07257, over 3826165.43 frames. ], batch size: 74, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:26,432 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:32,040 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:37,706 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:03,806 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:15,281 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:20,758 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.562e+02 6.527e+02 8.007e+02 1.277e+03, threshold=1.305e+03, percent-clipped=3.0 +2023-04-02 05:09:21,908 INFO [train.py:903] (1/4) Epoch 15, batch 3700, loss[loss=0.2709, simple_loss=0.3391, pruned_loss=0.1014, over 19661.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2992, pruned_loss=0.07341, over 3819606.53 frames. ], batch size: 60, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:09:32,857 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:13,724 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99333.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:24,822 INFO [train.py:903] (1/4) Epoch 15, batch 3750, loss[loss=0.2138, simple_loss=0.3004, pruned_loss=0.06356, over 19491.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.299, pruned_loss=0.07317, over 3820629.45 frames. ], batch size: 64, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:10:25,120 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:40,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6296, 2.0624, 2.2692, 2.7443, 2.4581, 2.3200, 2.0275, 2.6875], + device='cuda:1'), covar=tensor([0.0899, 0.1833, 0.1294, 0.1030, 0.1333, 0.0487, 0.1257, 0.0621], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0354, 0.0297, 0.0244, 0.0298, 0.0249, 0.0292, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:10:56,150 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99367.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:01,984 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:26,374 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.684e+02 5.640e+02 6.861e+02 8.808e+02 1.909e+03, threshold=1.372e+03, percent-clipped=3.0 +2023-04-02 05:11:28,560 INFO [train.py:903] (1/4) Epoch 15, batch 3800, loss[loss=0.217, simple_loss=0.2908, pruned_loss=0.0716, over 19569.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.07389, over 3807643.56 frames. ], batch size: 52, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:11:41,815 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3471, 2.1444, 1.9166, 1.8990, 1.6317, 1.8003, 0.4950, 1.2406], + device='cuda:1'), covar=tensor([0.0467, 0.0524, 0.0451, 0.0690, 0.1022, 0.0814, 0.1185, 0.0900], + device='cuda:1'), in_proj_covar=tensor([0.0346, 0.0346, 0.0341, 0.0373, 0.0444, 0.0369, 0.0322, 0.0330], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:11:53,180 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:58,869 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 05:12:26,312 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:30,510 INFO [train.py:903] (1/4) Epoch 15, batch 3850, loss[loss=0.2233, simple_loss=0.3108, pruned_loss=0.06787, over 19614.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2991, pruned_loss=0.07354, over 3801078.37 frames. ], batch size: 57, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:12:37,667 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:44,235 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-02 05:13:25,909 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99486.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:13:32,483 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 4.923e+02 6.054e+02 7.410e+02 1.518e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 05:13:32,501 INFO [train.py:903] (1/4) Epoch 15, batch 3900, loss[loss=0.1861, simple_loss=0.2603, pruned_loss=0.05593, over 19853.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2982, pruned_loss=0.07293, over 3800265.64 frames. ], batch size: 52, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:18,206 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:14:33,478 INFO [train.py:903] (1/4) Epoch 15, batch 3950, loss[loss=0.23, simple_loss=0.3077, pruned_loss=0.07612, over 19776.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2976, pruned_loss=0.07261, over 3815213.45 frames. ], batch size: 56, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:41,168 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 05:15:28,304 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99586.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:35,962 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:36,880 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.329e+02 6.331e+02 8.679e+02 1.427e+03, threshold=1.266e+03, percent-clipped=6.0 +2023-04-02 05:15:36,899 INFO [train.py:903] (1/4) Epoch 15, batch 4000, loss[loss=0.2646, simple_loss=0.3317, pruned_loss=0.09874, over 19088.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2991, pruned_loss=0.07336, over 3808089.18 frames. ], batch size: 69, lr: 5.51e-03, grad_scale: 8.0 +2023-04-02 05:15:54,644 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:14,225 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:20,951 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:23,025 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 05:16:23,146 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:30,447 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0234, 3.1657, 1.8523, 1.8842, 2.7802, 1.5632, 1.4332, 1.9871], + device='cuda:1'), covar=tensor([0.1219, 0.0599, 0.0955, 0.0889, 0.0543, 0.1208, 0.0894, 0.0660], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0305, 0.0321, 0.0250, 0.0239, 0.0325, 0.0290, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:16:38,158 INFO [train.py:903] (1/4) Epoch 15, batch 4050, loss[loss=0.2257, simple_loss=0.305, pruned_loss=0.07319, over 19541.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2984, pruned_loss=0.07269, over 3813663.07 frames. ], batch size: 56, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:16:45,324 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:50,828 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:32,691 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:39,405 INFO [train.py:903] (1/4) Epoch 15, batch 4100, loss[loss=0.2636, simple_loss=0.3377, pruned_loss=0.09477, over 19602.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2971, pruned_loss=0.07222, over 3820075.11 frames. ], batch size: 57, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:17:40,546 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.802e+02 5.566e+02 7.429e+02 9.161e+02 2.166e+03, threshold=1.486e+03, percent-clipped=8.0 +2023-04-02 05:17:46,992 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:54,944 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:58,070 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:14,908 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 05:18:25,800 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5515, 4.0829, 4.2679, 4.2543, 1.5232, 3.9943, 3.4684, 3.9549], + device='cuda:1'), covar=tensor([0.1493, 0.0778, 0.0588, 0.0617, 0.5547, 0.0828, 0.0667, 0.1081], + device='cuda:1'), in_proj_covar=tensor([0.0725, 0.0657, 0.0860, 0.0736, 0.0771, 0.0609, 0.0519, 0.0797], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 05:18:27,081 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:43,271 INFO [train.py:903] (1/4) Epoch 15, batch 4150, loss[loss=0.1948, simple_loss=0.2776, pruned_loss=0.05598, over 19604.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2969, pruned_loss=0.07142, over 3822839.01 frames. ], batch size: 50, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:18:47,063 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:11,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0515, 1.1945, 1.7040, 1.0187, 2.4212, 3.0256, 2.7422, 3.2151], + device='cuda:1'), covar=tensor([0.1612, 0.3661, 0.3038, 0.2328, 0.0518, 0.0187, 0.0265, 0.0270], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0304, 0.0332, 0.0252, 0.0226, 0.0169, 0.0207, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:19:13,809 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 05:19:31,718 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:35,410 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99784.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:45,181 INFO [train.py:903] (1/4) Epoch 15, batch 4200, loss[loss=0.1763, simple_loss=0.2529, pruned_loss=0.04987, over 18644.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2966, pruned_loss=0.07139, over 3826202.59 frames. ], batch size: 41, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:19:47,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.549e+02 6.772e+02 8.720e+02 1.402e+03, threshold=1.354e+03, percent-clipped=0.0 +2023-04-02 05:19:50,955 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 05:19:57,147 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:06,407 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:32,496 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99830.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:20:47,373 INFO [train.py:903] (1/4) Epoch 15, batch 4250, loss[loss=0.3191, simple_loss=0.3796, pruned_loss=0.1293, over 19623.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2957, pruned_loss=0.07107, over 3836732.47 frames. ], batch size: 61, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:03,965 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 05:21:15,068 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 05:21:47,822 INFO [train.py:903] (1/4) Epoch 15, batch 4300, loss[loss=0.2401, simple_loss=0.3114, pruned_loss=0.08441, over 19661.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2966, pruned_loss=0.07132, over 3844971.83 frames. ], batch size: 55, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:48,970 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.517e+02 6.494e+02 8.260e+02 1.741e+03, threshold=1.299e+03, percent-clipped=4.0 +2023-04-02 05:21:53,749 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:35,370 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99930.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:41,699 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 05:22:48,542 INFO [train.py:903] (1/4) Epoch 15, batch 4350, loss[loss=0.2118, simple_loss=0.2943, pruned_loss=0.06463, over 19795.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07226, over 3824639.06 frames. ], batch size: 56, lr: 5.50e-03, grad_scale: 4.0 +2023-04-02 05:22:52,432 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99945.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:23:01,033 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:15,990 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99962.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:45,440 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:51,927 INFO [train.py:903] (1/4) Epoch 15, batch 4400, loss[loss=0.218, simple_loss=0.2978, pruned_loss=0.06904, over 19687.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2961, pruned_loss=0.07133, over 3836607.88 frames. ], batch size: 59, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:23:53,157 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.844e+02 5.787e+02 7.470e+02 1.170e+03, threshold=1.157e+03, percent-clipped=0.0 +2023-04-02 05:23:56,390 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.1794, 1.3015, 1.6187, 1.8074, 3.5452, 1.3719, 2.8197, 3.8014], + device='cuda:1'), covar=tensor([0.0626, 0.3488, 0.3273, 0.2218, 0.1046, 0.2971, 0.1484, 0.0429], + device='cuda:1'), in_proj_covar=tensor([0.0379, 0.0349, 0.0370, 0.0329, 0.0356, 0.0339, 0.0350, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:24:06,750 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:23,724 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 05:24:31,846 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 05:24:34,564 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100025.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:24:35,770 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:47,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.92 vs. limit=5.0 +2023-04-02 05:24:57,046 INFO [train.py:903] (1/4) Epoch 15, batch 4450, loss[loss=0.248, simple_loss=0.3273, pruned_loss=0.08437, over 19299.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2966, pruned_loss=0.07168, over 3808560.98 frames. ], batch size: 66, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:24:57,237 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:00,853 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:07,801 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7162, 1.3638, 1.5688, 1.5270, 3.2482, 1.0694, 2.2457, 3.6325], + device='cuda:1'), covar=tensor([0.0438, 0.2683, 0.2687, 0.1774, 0.0710, 0.2603, 0.1316, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0379, 0.0350, 0.0369, 0.0330, 0.0357, 0.0339, 0.0350, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:25:14,926 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:25,300 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:47,697 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:59,166 INFO [train.py:903] (1/4) Epoch 15, batch 4500, loss[loss=0.2333, simple_loss=0.318, pruned_loss=0.07436, over 19618.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2967, pruned_loss=0.07152, over 3812509.38 frames. ], batch size: 57, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:25:59,584 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4248, 1.4659, 1.7378, 1.6248, 2.5596, 2.1670, 2.5601, 1.1080], + device='cuda:1'), covar=tensor([0.2387, 0.4257, 0.2496, 0.1899, 0.1597, 0.2178, 0.1653, 0.4221], + device='cuda:1'), in_proj_covar=tensor([0.0508, 0.0600, 0.0651, 0.0456, 0.0606, 0.0508, 0.0646, 0.0515], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:26:00,176 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.166e+02 6.659e+02 8.670e+02 1.796e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 05:27:01,359 INFO [train.py:903] (1/4) Epoch 15, batch 4550, loss[loss=0.1892, simple_loss=0.2747, pruned_loss=0.05184, over 19438.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2965, pruned_loss=0.07104, over 3815925.87 frames. ], batch size: 48, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:27:10,388 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 05:27:16,340 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:21,791 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:38,129 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 05:27:47,833 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:28:04,926 INFO [train.py:903] (1/4) Epoch 15, batch 4600, loss[loss=0.2127, simple_loss=0.2903, pruned_loss=0.06753, over 19658.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2959, pruned_loss=0.07048, over 3826516.79 frames. ], batch size: 53, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:28:06,058 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.707e+02 5.654e+02 7.541e+02 1.184e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-02 05:28:19,063 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100201.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:28:30,974 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1209, 5.2056, 6.0228, 5.9970, 1.8987, 5.6877, 4.7764, 5.6911], + device='cuda:1'), covar=tensor([0.1389, 0.0625, 0.0427, 0.0460, 0.5683, 0.0544, 0.0549, 0.0827], + device='cuda:1'), in_proj_covar=tensor([0.0728, 0.0664, 0.0865, 0.0744, 0.0771, 0.0612, 0.0522, 0.0799], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 05:28:47,089 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100226.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:29:08,985 INFO [train.py:903] (1/4) Epoch 15, batch 4650, loss[loss=0.2132, simple_loss=0.2842, pruned_loss=0.07108, over 19842.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2961, pruned_loss=0.07063, over 3832931.05 frames. ], batch size: 52, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:29:25,484 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 05:29:25,778 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:29:35,849 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 05:30:11,077 INFO [train.py:903] (1/4) Epoch 15, batch 4700, loss[loss=0.1772, simple_loss=0.2472, pruned_loss=0.05362, over 19741.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2953, pruned_loss=0.07058, over 3835876.28 frames. ], batch size: 46, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:30:12,227 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.601e+02 6.328e+02 8.331e+02 3.311e+03, threshold=1.266e+03, percent-clipped=13.0 +2023-04-02 05:30:22,215 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:33,232 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 05:30:50,868 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:55,458 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:13,902 INFO [train.py:903] (1/4) Epoch 15, batch 4750, loss[loss=0.2271, simple_loss=0.3006, pruned_loss=0.07681, over 19770.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2945, pruned_loss=0.06978, over 3837171.04 frames. ], batch size: 54, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:31:21,218 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:49,662 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100369.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:32:16,840 INFO [train.py:903] (1/4) Epoch 15, batch 4800, loss[loss=0.2001, simple_loss=0.2703, pruned_loss=0.06495, over 19707.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2947, pruned_loss=0.07005, over 3841060.04 frames. ], batch size: 46, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:32:18,029 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.713e+02 6.387e+02 8.455e+02 2.006e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 05:32:44,427 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:32:52,660 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:17,165 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:21,577 INFO [train.py:903] (1/4) Epoch 15, batch 4850, loss[loss=0.1824, simple_loss=0.2596, pruned_loss=0.05263, over 19375.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2962, pruned_loss=0.07108, over 3840217.87 frames. ], batch size: 47, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:33:48,708 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 05:34:10,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 05:34:14,880 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100484.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:34:15,715 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 05:34:17,774 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 05:34:24,793 INFO [train.py:903] (1/4) Epoch 15, batch 4900, loss[loss=0.1606, simple_loss=0.2395, pruned_loss=0.04086, over 19329.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2954, pruned_loss=0.07108, over 3831161.74 frames. ], batch size: 44, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:34:25,927 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.042e+02 5.846e+02 7.925e+02 1.600e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-02 05:34:25,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 05:34:46,436 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 05:35:19,182 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5926, 2.3593, 2.2980, 2.7712, 2.3652, 2.3655, 2.1278, 2.6625], + device='cuda:1'), covar=tensor([0.0872, 0.1565, 0.1222, 0.0904, 0.1303, 0.0439, 0.1204, 0.0587], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0355, 0.0297, 0.0242, 0.0298, 0.0245, 0.0292, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:35:27,073 INFO [train.py:903] (1/4) Epoch 15, batch 4950, loss[loss=0.1769, simple_loss=0.2565, pruned_loss=0.04861, over 19722.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2945, pruned_loss=0.07035, over 3832683.22 frames. ], batch size: 46, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:35:45,772 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 05:36:05,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2047, 5.1277, 6.1067, 6.0598, 2.0177, 5.7120, 4.9163, 5.7390], + device='cuda:1'), covar=tensor([0.1450, 0.0706, 0.0517, 0.0514, 0.5664, 0.0483, 0.0534, 0.1040], + device='cuda:1'), in_proj_covar=tensor([0.0735, 0.0669, 0.0873, 0.0750, 0.0778, 0.0619, 0.0528, 0.0808], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 05:36:09,664 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 05:36:29,678 INFO [train.py:903] (1/4) Epoch 15, batch 5000, loss[loss=0.213, simple_loss=0.2925, pruned_loss=0.06678, over 19605.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2956, pruned_loss=0.07114, over 3830226.24 frames. ], batch size: 50, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:36:31,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 5.199e+02 6.623e+02 8.418e+02 1.165e+03, threshold=1.325e+03, percent-clipped=0.0 +2023-04-02 05:36:40,510 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 05:36:40,821 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:36:52,295 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 05:37:33,609 INFO [train.py:903] (1/4) Epoch 15, batch 5050, loss[loss=0.2241, simple_loss=0.315, pruned_loss=0.06661, over 19762.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2944, pruned_loss=0.07053, over 3837527.85 frames. ], batch size: 56, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:38:09,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 05:38:37,492 INFO [train.py:903] (1/4) Epoch 15, batch 5100, loss[loss=0.2298, simple_loss=0.3088, pruned_loss=0.0754, over 19787.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.295, pruned_loss=0.07079, over 3823847.87 frames. ], batch size: 56, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:38:39,901 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 5.134e+02 6.124e+02 7.830e+02 1.941e+03, threshold=1.225e+03, percent-clipped=4.0 +2023-04-02 05:38:49,377 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 05:38:51,815 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 05:38:57,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 05:38:57,698 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1211, 1.1990, 1.7393, 1.4528, 2.9249, 4.4969, 4.3763, 4.8593], + device='cuda:1'), covar=tensor([0.1713, 0.3870, 0.3386, 0.2184, 0.0585, 0.0191, 0.0178, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0302, 0.0332, 0.0251, 0.0225, 0.0169, 0.0206, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:39:05,807 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:39:37,104 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100740.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:39:38,888 INFO [train.py:903] (1/4) Epoch 15, batch 5150, loss[loss=0.2281, simple_loss=0.3167, pruned_loss=0.06974, over 18780.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2957, pruned_loss=0.07094, over 3818831.35 frames. ], batch size: 74, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:39:39,296 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9849, 2.6996, 2.1176, 2.0631, 1.6955, 2.2513, 0.9927, 1.9741], + device='cuda:1'), covar=tensor([0.0619, 0.0563, 0.0569, 0.0929, 0.1131, 0.0994, 0.1160, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0347, 0.0342, 0.0340, 0.0369, 0.0442, 0.0370, 0.0322, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:39:47,018 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100748.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:39:51,353 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 05:40:07,665 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:40:09,096 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100765.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:40:28,216 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 05:40:41,942 INFO [train.py:903] (1/4) Epoch 15, batch 5200, loss[loss=0.2142, simple_loss=0.2857, pruned_loss=0.07135, over 19605.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2959, pruned_loss=0.07128, over 3822896.34 frames. ], batch size: 50, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:40:44,524 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 5.325e+02 6.515e+02 8.501e+02 1.618e+03, threshold=1.303e+03, percent-clipped=5.0 +2023-04-02 05:40:58,645 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 05:41:43,051 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 05:41:47,605 INFO [train.py:903] (1/4) Epoch 15, batch 5250, loss[loss=0.2101, simple_loss=0.282, pruned_loss=0.06911, over 19451.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2975, pruned_loss=0.07211, over 3813257.48 frames. ], batch size: 49, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:20,902 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7902, 1.9287, 2.1314, 2.6085, 1.8963, 2.4892, 2.2186, 1.9392], + device='cuda:1'), covar=tensor([0.3821, 0.3422, 0.1694, 0.1940, 0.3610, 0.1628, 0.4183, 0.3048], + device='cuda:1'), in_proj_covar=tensor([0.0835, 0.0877, 0.0675, 0.0906, 0.0817, 0.0752, 0.0812, 0.0739], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 05:42:25,538 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1404, 1.2125, 1.7069, 1.4012, 2.7180, 3.6836, 3.4693, 4.0128], + device='cuda:1'), covar=tensor([0.1746, 0.3797, 0.3225, 0.2292, 0.0566, 0.0212, 0.0204, 0.0197], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0304, 0.0333, 0.0252, 0.0225, 0.0170, 0.0208, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:42:33,438 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100879.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:42:50,408 INFO [train.py:903] (1/4) Epoch 15, batch 5300, loss[loss=0.2015, simple_loss=0.2804, pruned_loss=0.06136, over 19592.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2966, pruned_loss=0.07157, over 3826355.81 frames. ], batch size: 52, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:52,714 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.131e+02 6.120e+02 8.353e+02 1.768e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 05:43:08,018 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 05:43:52,929 INFO [train.py:903] (1/4) Epoch 15, batch 5350, loss[loss=0.2097, simple_loss=0.2986, pruned_loss=0.06039, over 19670.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.296, pruned_loss=0.07119, over 3821482.32 frames. ], batch size: 55, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:29,379 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 05:44:30,857 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:44:50,766 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6925, 1.5844, 1.4474, 2.3618, 1.8359, 2.1251, 2.0455, 1.7741], + device='cuda:1'), covar=tensor([0.0815, 0.0915, 0.1045, 0.0702, 0.0776, 0.0681, 0.0848, 0.0694], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0221, 0.0242, 0.0226, 0.0209, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 05:44:56,005 INFO [train.py:903] (1/4) Epoch 15, batch 5400, loss[loss=0.2055, simple_loss=0.2924, pruned_loss=0.05934, over 18256.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2975, pruned_loss=0.07199, over 3810282.20 frames. ], batch size: 83, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:58,255 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.355e+02 6.832e+02 8.412e+02 2.240e+03, threshold=1.366e+03, percent-clipped=7.0 +2023-04-02 05:45:01,674 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:46:00,085 INFO [train.py:903] (1/4) Epoch 15, batch 5450, loss[loss=0.2359, simple_loss=0.3119, pruned_loss=0.07997, over 19682.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2977, pruned_loss=0.07206, over 3813359.38 frames. ], batch size: 60, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:46:08,863 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9868, 3.5943, 2.6811, 3.2611, 0.8639, 3.4759, 3.3759, 3.5026], + device='cuda:1'), covar=tensor([0.0853, 0.1217, 0.1901, 0.0922, 0.4208, 0.0834, 0.0934, 0.1221], + device='cuda:1'), in_proj_covar=tensor([0.0462, 0.0379, 0.0458, 0.0326, 0.0390, 0.0393, 0.0383, 0.0420], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:46:32,261 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4112, 2.1057, 1.5054, 1.4533, 1.9394, 1.0558, 1.2460, 1.8312], + device='cuda:1'), covar=tensor([0.1033, 0.0772, 0.1044, 0.0810, 0.0540, 0.1372, 0.0769, 0.0431], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0307, 0.0326, 0.0252, 0.0239, 0.0330, 0.0294, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:47:04,589 INFO [train.py:903] (1/4) Epoch 15, batch 5500, loss[loss=0.2205, simple_loss=0.3072, pruned_loss=0.06692, over 19700.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2964, pruned_loss=0.07149, over 3816489.77 frames. ], batch size: 59, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:47:04,772 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101092.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:47:05,797 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5731, 4.1336, 2.6438, 3.6209, 0.7829, 3.9835, 3.8826, 4.0478], + device='cuda:1'), covar=tensor([0.0662, 0.1076, 0.2077, 0.0871, 0.4444, 0.0739, 0.0893, 0.1046], + device='cuda:1'), in_proj_covar=tensor([0.0466, 0.0381, 0.0462, 0.0329, 0.0393, 0.0395, 0.0386, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:47:06,824 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 5.715e+02 6.860e+02 8.623e+02 1.531e+03, threshold=1.372e+03, percent-clipped=1.0 +2023-04-02 05:47:08,470 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7445, 1.6425, 1.5146, 2.1868, 1.6242, 2.0705, 2.0891, 1.8578], + device='cuda:1'), covar=tensor([0.0778, 0.0906, 0.1017, 0.0801, 0.0904, 0.0707, 0.0869, 0.0669], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0243, 0.0227, 0.0209, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 05:47:27,319 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 05:47:30,839 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8410, 1.7254, 1.4695, 1.8642, 1.7013, 1.3799, 1.4274, 1.7533], + device='cuda:1'), covar=tensor([0.1114, 0.1562, 0.1681, 0.1139, 0.1441, 0.0788, 0.1640, 0.0838], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0354, 0.0298, 0.0246, 0.0299, 0.0246, 0.0294, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:47:45,301 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:47:58,086 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:48:05,674 INFO [train.py:903] (1/4) Epoch 15, batch 5550, loss[loss=0.2436, simple_loss=0.3235, pruned_loss=0.08187, over 19676.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2969, pruned_loss=0.07188, over 3816216.51 frames. ], batch size: 53, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:48:12,790 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 05:48:24,404 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0782, 1.9738, 1.6937, 2.8098, 2.0438, 2.5980, 2.4759, 2.1499], + device='cuda:1'), covar=tensor([0.0761, 0.0830, 0.1056, 0.0828, 0.0838, 0.0663, 0.0883, 0.0667], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0222, 0.0243, 0.0227, 0.0209, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 05:48:28,803 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:49:04,235 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 05:49:08,929 INFO [train.py:903] (1/4) Epoch 15, batch 5600, loss[loss=0.2393, simple_loss=0.3176, pruned_loss=0.08053, over 19431.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2968, pruned_loss=0.07163, over 3825033.77 frames. ], batch size: 64, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:49:11,012 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 5.272e+02 6.555e+02 8.017e+02 1.573e+03, threshold=1.311e+03, percent-clipped=2.0 +2023-04-02 05:49:28,262 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101207.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:50:11,398 INFO [train.py:903] (1/4) Epoch 15, batch 5650, loss[loss=0.2848, simple_loss=0.3337, pruned_loss=0.118, over 13367.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2968, pruned_loss=0.07173, over 3823069.63 frames. ], batch size: 137, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:51:00,539 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 05:51:14,089 INFO [train.py:903] (1/4) Epoch 15, batch 5700, loss[loss=0.2212, simple_loss=0.3049, pruned_loss=0.06875, over 19649.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07229, over 3842373.71 frames. ], batch size: 58, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:51:17,701 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.651e+02 6.610e+02 8.419e+02 1.957e+03, threshold=1.322e+03, percent-clipped=7.0 +2023-04-02 05:51:48,345 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8277, 1.6276, 1.5658, 1.9659, 1.6280, 1.6782, 1.5881, 1.8433], + device='cuda:1'), covar=tensor([0.1007, 0.1429, 0.1379, 0.0943, 0.1296, 0.0516, 0.1255, 0.0680], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0351, 0.0296, 0.0244, 0.0297, 0.0244, 0.0291, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:52:17,921 INFO [train.py:903] (1/4) Epoch 15, batch 5750, loss[loss=0.2003, simple_loss=0.286, pruned_loss=0.05733, over 18694.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2965, pruned_loss=0.07156, over 3852579.95 frames. ], batch size: 74, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:52:20,179 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 05:52:28,373 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 05:52:32,998 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 05:52:51,640 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2996, 1.2246, 1.5304, 1.1202, 2.4137, 3.3385, 3.0492, 3.5241], + device='cuda:1'), covar=tensor([0.1489, 0.3642, 0.3305, 0.2340, 0.0588, 0.0171, 0.0230, 0.0236], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0304, 0.0334, 0.0253, 0.0225, 0.0170, 0.0208, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:53:10,170 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5414, 1.3733, 1.5917, 1.5605, 3.0867, 1.0721, 2.2726, 3.4468], + device='cuda:1'), covar=tensor([0.0444, 0.2665, 0.2617, 0.1826, 0.0682, 0.2573, 0.1341, 0.0277], + device='cuda:1'), in_proj_covar=tensor([0.0382, 0.0353, 0.0370, 0.0336, 0.0362, 0.0341, 0.0354, 0.0374], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:53:21,267 INFO [train.py:903] (1/4) Epoch 15, batch 5800, loss[loss=0.1928, simple_loss=0.2755, pruned_loss=0.05503, over 19586.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2967, pruned_loss=0.07146, over 3846084.49 frames. ], batch size: 52, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:53:23,495 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.554e+02 5.132e+02 6.075e+02 7.663e+02 2.298e+03, threshold=1.215e+03, percent-clipped=3.0 +2023-04-02 05:54:24,295 INFO [train.py:903] (1/4) Epoch 15, batch 5850, loss[loss=0.2076, simple_loss=0.2778, pruned_loss=0.06871, over 19779.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2964, pruned_loss=0.07135, over 3849795.49 frames. ], batch size: 47, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:54:37,742 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0533, 2.0803, 2.2283, 2.6991, 2.0099, 2.6631, 2.2892, 2.0366], + device='cuda:1'), covar=tensor([0.4045, 0.3640, 0.1760, 0.2194, 0.3895, 0.1762, 0.4442, 0.3141], + device='cuda:1'), in_proj_covar=tensor([0.0836, 0.0882, 0.0679, 0.0910, 0.0823, 0.0756, 0.0815, 0.0744], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 05:54:52,493 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101463.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:54:58,858 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:55:24,159 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101488.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:55:28,446 INFO [train.py:903] (1/4) Epoch 15, batch 5900, loss[loss=0.2294, simple_loss=0.307, pruned_loss=0.0759, over 18875.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2973, pruned_loss=0.07171, over 3844510.51 frames. ], batch size: 74, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:55:30,755 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.163e+02 6.557e+02 7.983e+02 1.612e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-02 05:55:30,812 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 05:55:51,849 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 05:56:03,826 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9145, 1.5393, 1.4953, 1.8025, 1.5086, 1.6424, 1.4517, 1.8316], + device='cuda:1'), covar=tensor([0.0958, 0.1290, 0.1453, 0.0959, 0.1292, 0.0539, 0.1421, 0.0692], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0352, 0.0296, 0.0245, 0.0297, 0.0244, 0.0291, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 05:56:20,785 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4168, 1.4588, 1.6198, 1.5666, 2.2949, 2.0294, 2.2868, 0.9927], + device='cuda:1'), covar=tensor([0.2210, 0.4045, 0.2471, 0.1810, 0.1411, 0.2059, 0.1357, 0.4049], + device='cuda:1'), in_proj_covar=tensor([0.0508, 0.0608, 0.0654, 0.0457, 0.0609, 0.0513, 0.0650, 0.0516], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:56:30,793 INFO [train.py:903] (1/4) Epoch 15, batch 5950, loss[loss=0.2201, simple_loss=0.2956, pruned_loss=0.0723, over 19575.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2965, pruned_loss=0.07125, over 3819219.22 frames. ], batch size: 52, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:24,024 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:57:34,460 INFO [train.py:903] (1/4) Epoch 15, batch 6000, loss[loss=0.2273, simple_loss=0.3109, pruned_loss=0.07181, over 19741.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2975, pruned_loss=0.07186, over 3828935.39 frames. ], batch size: 63, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:34,460 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 05:57:47,185 INFO [train.py:937] (1/4) Epoch 15, validation: loss=0.1729, simple_loss=0.2735, pruned_loss=0.0362, over 944034.00 frames. +2023-04-02 05:57:47,186 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 05:57:49,631 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.208e+02 6.128e+02 8.316e+02 1.573e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 05:58:26,438 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3079, 2.0601, 2.0481, 2.8329, 2.0675, 2.6338, 2.3886, 2.4042], + device='cuda:1'), covar=tensor([0.0743, 0.0839, 0.0922, 0.0850, 0.0897, 0.0709, 0.0959, 0.0593], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0221, 0.0241, 0.0227, 0.0207, 0.0187, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 05:58:49,959 INFO [train.py:903] (1/4) Epoch 15, batch 6050, loss[loss=0.1823, simple_loss=0.2532, pruned_loss=0.05569, over 18987.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2973, pruned_loss=0.07207, over 3815023.00 frames. ], batch size: 42, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:17,804 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4536, 1.4497, 1.8083, 1.4386, 2.3418, 2.6806, 2.5538, 2.8225], + device='cuda:1'), covar=tensor([0.1325, 0.3079, 0.2554, 0.2277, 0.1047, 0.0352, 0.0259, 0.0338], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0305, 0.0334, 0.0253, 0.0225, 0.0169, 0.0208, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 05:59:52,010 INFO [train.py:903] (1/4) Epoch 15, batch 6100, loss[loss=0.2743, simple_loss=0.3412, pruned_loss=0.1037, over 18121.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.297, pruned_loss=0.07174, over 3817493.30 frames. ], batch size: 84, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:55,079 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.053e+02 5.047e+02 6.326e+02 7.867e+02 1.574e+03, threshold=1.265e+03, percent-clipped=9.0 +2023-04-02 06:00:05,531 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:00:56,520 INFO [train.py:903] (1/4) Epoch 15, batch 6150, loss[loss=0.1802, simple_loss=0.2587, pruned_loss=0.05088, over 19782.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2966, pruned_loss=0.07151, over 3822347.77 frames. ], batch size: 45, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 06:01:23,273 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-02 06:01:23,845 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 06:01:59,351 INFO [train.py:903] (1/4) Epoch 15, batch 6200, loss[loss=0.2171, simple_loss=0.2971, pruned_loss=0.06851, over 19596.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2966, pruned_loss=0.07177, over 3826958.53 frames. ], batch size: 52, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:02:01,561 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.959e+02 6.303e+02 7.991e+02 1.526e+03, threshold=1.261e+03, percent-clipped=1.0 +2023-04-02 06:02:59,281 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:02,065 INFO [train.py:903] (1/4) Epoch 15, batch 6250, loss[loss=0.2075, simple_loss=0.2763, pruned_loss=0.06929, over 19186.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2967, pruned_loss=0.07183, over 3821257.63 frames. ], batch size: 42, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:03:30,467 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 06:03:30,831 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:04:04,525 INFO [train.py:903] (1/4) Epoch 15, batch 6300, loss[loss=0.2069, simple_loss=0.2847, pruned_loss=0.06456, over 19855.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2963, pruned_loss=0.0712, over 3832795.65 frames. ], batch size: 52, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:04:07,996 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.483e+02 5.666e+02 6.616e+02 7.934e+02 1.912e+03, threshold=1.323e+03, percent-clipped=2.0 +2023-04-02 06:05:03,699 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101938.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:05:08,246 INFO [train.py:903] (1/4) Epoch 15, batch 6350, loss[loss=0.2344, simple_loss=0.3162, pruned_loss=0.07627, over 19509.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2964, pruned_loss=0.07111, over 3828651.75 frames. ], batch size: 64, lr: 5.45e-03, grad_scale: 2.0 +2023-04-02 06:05:17,916 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0381, 2.0066, 1.8035, 1.6313, 1.5929, 1.6543, 0.4501, 0.8864], + device='cuda:1'), covar=tensor([0.0482, 0.0481, 0.0318, 0.0544, 0.0978, 0.0619, 0.1037, 0.0918], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0341, 0.0338, 0.0370, 0.0442, 0.0370, 0.0323, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:05:55,737 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2072, 1.2788, 1.2171, 1.0250, 1.0746, 1.0989, 0.0707, 0.3537], + device='cuda:1'), covar=tensor([0.0568, 0.0545, 0.0330, 0.0471, 0.1049, 0.0477, 0.1066, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0340, 0.0338, 0.0370, 0.0442, 0.0370, 0.0323, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:06:11,821 INFO [train.py:903] (1/4) Epoch 15, batch 6400, loss[loss=0.2409, simple_loss=0.3181, pruned_loss=0.08184, over 17467.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.298, pruned_loss=0.07209, over 3817220.86 frames. ], batch size: 101, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:06:16,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 4.874e+02 5.936e+02 7.490e+02 2.019e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 06:06:30,770 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:07:05,512 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8319, 1.6312, 1.8503, 1.8365, 4.3150, 1.1305, 2.4647, 4.6899], + device='cuda:1'), covar=tensor([0.0381, 0.2788, 0.2701, 0.1776, 0.0771, 0.2671, 0.1391, 0.0218], + device='cuda:1'), in_proj_covar=tensor([0.0377, 0.0348, 0.0366, 0.0331, 0.0357, 0.0337, 0.0349, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:07:17,094 INFO [train.py:903] (1/4) Epoch 15, batch 6450, loss[loss=0.2272, simple_loss=0.3094, pruned_loss=0.07251, over 19853.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2978, pruned_loss=0.07199, over 3812908.16 frames. ], batch size: 52, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:07:23,152 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102046.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:08:03,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 06:08:08,946 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 06:08:20,704 INFO [train.py:903] (1/4) Epoch 15, batch 6500, loss[loss=0.2019, simple_loss=0.2719, pruned_loss=0.06599, over 19331.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2981, pruned_loss=0.07196, over 3819894.18 frames. ], batch size: 44, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:08:25,529 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.860e+02 6.100e+02 7.866e+02 2.286e+03, threshold=1.220e+03, percent-clipped=9.0 +2023-04-02 06:08:26,642 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 06:08:41,072 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-02 06:09:23,516 INFO [train.py:903] (1/4) Epoch 15, batch 6550, loss[loss=0.2426, simple_loss=0.3146, pruned_loss=0.08536, over 19473.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2981, pruned_loss=0.07196, over 3815803.84 frames. ], batch size: 64, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:09:47,187 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:10:26,465 INFO [train.py:903] (1/4) Epoch 15, batch 6600, loss[loss=0.2087, simple_loss=0.2771, pruned_loss=0.07018, over 19676.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2972, pruned_loss=0.07182, over 3823688.51 frames. ], batch size: 53, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:10:31,177 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 4.937e+02 6.611e+02 8.175e+02 1.787e+03, threshold=1.322e+03, percent-clipped=6.0 +2023-04-02 06:11:10,228 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4288, 1.4785, 1.9986, 1.6113, 3.3090, 2.6415, 3.5291, 1.6252], + device='cuda:1'), covar=tensor([0.2378, 0.4151, 0.2553, 0.1807, 0.1386, 0.1927, 0.1539, 0.3780], + device='cuda:1'), in_proj_covar=tensor([0.0507, 0.0605, 0.0655, 0.0457, 0.0608, 0.0514, 0.0653, 0.0515], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:11:29,855 INFO [train.py:903] (1/4) Epoch 15, batch 6650, loss[loss=0.2299, simple_loss=0.3075, pruned_loss=0.07612, over 17575.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2955, pruned_loss=0.07071, over 3833414.77 frames. ], batch size: 101, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:11:39,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2018, 1.8385, 1.4348, 1.2165, 1.6214, 1.1987, 1.1285, 1.6638], + device='cuda:1'), covar=tensor([0.0708, 0.0691, 0.1015, 0.0726, 0.0464, 0.1156, 0.0598, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0309, 0.0329, 0.0254, 0.0241, 0.0329, 0.0293, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:12:01,500 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 06:12:20,466 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102282.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:12:21,725 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4170, 1.4611, 1.7909, 1.5664, 2.7345, 2.2747, 2.8610, 1.2463], + device='cuda:1'), covar=tensor([0.2385, 0.4085, 0.2437, 0.1888, 0.1424, 0.1977, 0.1396, 0.3974], + device='cuda:1'), in_proj_covar=tensor([0.0506, 0.0603, 0.0654, 0.0456, 0.0607, 0.0512, 0.0650, 0.0514], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:12:33,719 INFO [train.py:903] (1/4) Epoch 15, batch 6700, loss[loss=0.2023, simple_loss=0.2705, pruned_loss=0.06709, over 19358.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2956, pruned_loss=0.07097, over 3834837.86 frames. ], batch size: 47, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:12:38,442 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 5.015e+02 6.423e+02 7.841e+02 1.581e+03, threshold=1.285e+03, percent-clipped=1.0 +2023-04-02 06:13:32,250 INFO [train.py:903] (1/4) Epoch 15, batch 6750, loss[loss=0.1876, simple_loss=0.2643, pruned_loss=0.05543, over 18648.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2955, pruned_loss=0.07034, over 3833652.30 frames. ], batch size: 41, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:13:40,334 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:13:42,065 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-02 06:13:49,042 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-02 06:13:57,400 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3264, 1.3378, 1.5370, 1.4783, 2.3310, 2.0993, 2.3192, 0.8829], + device='cuda:1'), covar=tensor([0.2114, 0.3757, 0.2363, 0.1700, 0.1332, 0.1867, 0.1283, 0.3788], + device='cuda:1'), in_proj_covar=tensor([0.0504, 0.0601, 0.0651, 0.0454, 0.0606, 0.0511, 0.0649, 0.0514], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:14:17,262 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9196, 2.0267, 2.2514, 2.6447, 1.9260, 2.4973, 2.3650, 2.1006], + device='cuda:1'), covar=tensor([0.3909, 0.3478, 0.1602, 0.1982, 0.3695, 0.1754, 0.4131, 0.2886], + device='cuda:1'), in_proj_covar=tensor([0.0832, 0.0879, 0.0675, 0.0903, 0.0819, 0.0754, 0.0810, 0.0739], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 06:14:28,435 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2035, 2.0033, 2.0127, 2.9990, 2.2006, 2.6103, 2.5519, 2.5201], + device='cuda:1'), covar=tensor([0.0816, 0.0919, 0.1000, 0.0755, 0.0866, 0.0745, 0.0928, 0.0604], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0224, 0.0244, 0.0228, 0.0210, 0.0190, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 06:14:30,399 INFO [train.py:903] (1/4) Epoch 15, batch 6800, loss[loss=0.1985, simple_loss=0.2706, pruned_loss=0.0632, over 19391.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2971, pruned_loss=0.07154, over 3827656.66 frames. ], batch size: 48, lr: 5.44e-03, grad_scale: 8.0 +2023-04-02 06:14:35,326 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.506e+02 4.944e+02 6.022e+02 7.665e+02 3.022e+03, threshold=1.204e+03, percent-clipped=5.0 +2023-04-02 06:14:36,986 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102397.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:14:57,417 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:15:15,616 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 06:15:16,097 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 06:15:19,104 INFO [train.py:903] (1/4) Epoch 16, batch 0, loss[loss=0.2719, simple_loss=0.3319, pruned_loss=0.106, over 13218.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3319, pruned_loss=0.106, over 13218.00 frames. ], batch size: 136, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:15:19,104 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 06:15:29,714 INFO [train.py:937] (1/4) Epoch 16, validation: loss=0.1737, simple_loss=0.2745, pruned_loss=0.03646, over 944034.00 frames. +2023-04-02 06:15:29,715 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 06:15:45,604 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 06:15:58,368 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:24,717 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:33,054 INFO [train.py:903] (1/4) Epoch 16, batch 50, loss[loss=0.2048, simple_loss=0.2824, pruned_loss=0.06358, over 19770.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2888, pruned_loss=0.06884, over 870884.00 frames. ], batch size: 54, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:04,309 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.709e+02 4.865e+02 6.426e+02 8.395e+02 1.744e+03, threshold=1.285e+03, percent-clipped=5.0 +2023-04-02 06:17:08,775 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 06:17:33,677 INFO [train.py:903] (1/4) Epoch 16, batch 100, loss[loss=0.1942, simple_loss=0.276, pruned_loss=0.05625, over 19576.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2904, pruned_loss=0.06908, over 1529852.74 frames. ], batch size: 52, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:47,707 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 06:18:34,758 INFO [train.py:903] (1/4) Epoch 16, batch 150, loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05945, over 19837.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2937, pruned_loss=0.07067, over 2047842.55 frames. ], batch size: 52, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:07,433 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.578e+02 6.638e+02 8.298e+02 1.665e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 06:19:36,772 INFO [train.py:903] (1/4) Epoch 16, batch 200, loss[loss=0.2466, simple_loss=0.3178, pruned_loss=0.08768, over 19676.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.297, pruned_loss=0.07199, over 2446815.40 frames. ], batch size: 60, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:38,846 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 06:20:18,503 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102653.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:20:38,298 INFO [train.py:903] (1/4) Epoch 16, batch 250, loss[loss=0.2153, simple_loss=0.2761, pruned_loss=0.07722, over 19752.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2969, pruned_loss=0.07197, over 2756083.33 frames. ], batch size: 45, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:20:50,385 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102678.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:21:12,172 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 5.293e+02 5.976e+02 7.347e+02 1.638e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 06:21:43,451 INFO [train.py:903] (1/4) Epoch 16, batch 300, loss[loss=0.1932, simple_loss=0.2616, pruned_loss=0.06239, over 19312.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2972, pruned_loss=0.07223, over 2990763.79 frames. ], batch size: 44, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:21:43,849 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102720.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:13,592 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:44,947 INFO [train.py:903] (1/4) Epoch 16, batch 350, loss[loss=0.2305, simple_loss=0.3071, pruned_loss=0.0769, over 19677.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2969, pruned_loss=0.07214, over 3175432.07 frames. ], batch size: 60, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:22:50,836 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 06:23:16,118 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.254e+02 6.186e+02 7.503e+02 2.205e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 06:23:47,556 INFO [train.py:903] (1/4) Epoch 16, batch 400, loss[loss=0.1921, simple_loss=0.2603, pruned_loss=0.06197, over 19749.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2979, pruned_loss=0.07235, over 3324594.30 frames. ], batch size: 46, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:24:49,412 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102869.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:24:50,281 INFO [train.py:903] (1/4) Epoch 16, batch 450, loss[loss=0.1701, simple_loss=0.249, pruned_loss=0.04564, over 19393.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2975, pruned_loss=0.07188, over 3440838.88 frames. ], batch size: 48, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:25:22,281 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.837e+02 5.955e+02 8.003e+02 1.401e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 06:25:24,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 06:25:25,879 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 06:25:32,995 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:25:52,008 INFO [train.py:903] (1/4) Epoch 16, batch 500, loss[loss=0.1554, simple_loss=0.2376, pruned_loss=0.03663, over 19390.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.0712, over 3535605.14 frames. ], batch size: 47, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:26:37,590 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3842, 2.4630, 2.5554, 3.2712, 2.3759, 3.2491, 2.7258, 2.4204], + device='cuda:1'), covar=tensor([0.4151, 0.3652, 0.1704, 0.2097, 0.3997, 0.1706, 0.4060, 0.2875], + device='cuda:1'), in_proj_covar=tensor([0.0837, 0.0886, 0.0679, 0.0906, 0.0824, 0.0758, 0.0814, 0.0743], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 06:26:54,173 INFO [train.py:903] (1/4) Epoch 16, batch 550, loss[loss=0.2072, simple_loss=0.2917, pruned_loss=0.06141, over 19579.00 frames. ], tot_loss[loss=0.22, simple_loss=0.297, pruned_loss=0.07153, over 3599039.92 frames. ], batch size: 52, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:27:00,535 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.43 vs. limit=5.0 +2023-04-02 06:27:18,359 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9092, 1.2519, 1.5348, 0.6318, 2.0311, 2.4492, 2.1501, 2.5955], + device='cuda:1'), covar=tensor([0.1557, 0.3513, 0.3139, 0.2506, 0.0548, 0.0272, 0.0343, 0.0331], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0306, 0.0335, 0.0256, 0.0228, 0.0171, 0.0211, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:27:24,958 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.789e+02 5.227e+02 6.443e+02 7.702e+02 1.436e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 06:27:54,425 INFO [train.py:903] (1/4) Epoch 16, batch 600, loss[loss=0.2376, simple_loss=0.3025, pruned_loss=0.08632, over 19583.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2966, pruned_loss=0.07127, over 3647536.12 frames. ], batch size: 52, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:28:37,067 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 06:28:55,609 INFO [train.py:903] (1/4) Epoch 16, batch 650, loss[loss=0.1946, simple_loss=0.2783, pruned_loss=0.05545, over 19542.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07129, over 3685268.79 frames. ], batch size: 56, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:29:28,782 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.715e+02 6.008e+02 7.942e+02 1.451e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-04-02 06:29:53,906 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103116.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:29:58,298 INFO [train.py:903] (1/4) Epoch 16, batch 700, loss[loss=0.1784, simple_loss=0.2567, pruned_loss=0.05006, over 19754.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2967, pruned_loss=0.0714, over 3722127.32 frames. ], batch size: 46, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:30:34,540 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 06:30:35,138 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103150.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:31:00,302 INFO [train.py:903] (1/4) Epoch 16, batch 750, loss[loss=0.2738, simple_loss=0.3398, pruned_loss=0.1039, over 19606.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2977, pruned_loss=0.07238, over 3737046.05 frames. ], batch size: 61, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:31:33,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.236e+02 6.262e+02 8.063e+02 1.865e+03, threshold=1.252e+03, percent-clipped=5.0 +2023-04-02 06:31:55,090 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:32:03,182 INFO [train.py:903] (1/4) Epoch 16, batch 800, loss[loss=0.245, simple_loss=0.3182, pruned_loss=0.08588, over 18003.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2982, pruned_loss=0.07269, over 3744505.68 frames. ], batch size: 83, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:32:18,120 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 06:32:26,524 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1126, 1.7836, 1.4600, 1.1710, 1.6145, 1.2174, 1.0850, 1.6489], + device='cuda:1'), covar=tensor([0.0780, 0.0812, 0.1023, 0.0763, 0.0524, 0.1185, 0.0636, 0.0391], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0309, 0.0330, 0.0254, 0.0242, 0.0330, 0.0292, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:32:38,488 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103249.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:32:52,100 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0859, 5.1148, 5.9450, 5.9181, 1.8844, 5.6137, 4.6076, 5.5529], + device='cuda:1'), covar=tensor([0.1528, 0.0801, 0.0566, 0.0540, 0.5939, 0.0612, 0.0572, 0.1212], + device='cuda:1'), in_proj_covar=tensor([0.0730, 0.0666, 0.0869, 0.0746, 0.0772, 0.0615, 0.0519, 0.0802], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 06:33:04,736 INFO [train.py:903] (1/4) Epoch 16, batch 850, loss[loss=0.2144, simple_loss=0.2872, pruned_loss=0.07079, over 19472.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2962, pruned_loss=0.07141, over 3768176.84 frames. ], batch size: 49, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:33:38,414 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.865e+02 6.263e+02 7.829e+02 1.710e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 06:33:40,861 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7889, 4.0025, 4.3806, 4.3724, 2.6636, 4.0979, 3.7633, 4.1537], + device='cuda:1'), covar=tensor([0.1237, 0.2542, 0.0549, 0.0602, 0.3893, 0.0895, 0.0511, 0.0888], + device='cuda:1'), in_proj_covar=tensor([0.0730, 0.0665, 0.0870, 0.0747, 0.0770, 0.0616, 0.0520, 0.0802], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 06:33:44,513 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4042, 1.5479, 1.5727, 1.9061, 1.4751, 1.6774, 1.7609, 1.4066], + device='cuda:1'), covar=tensor([0.4454, 0.3853, 0.2473, 0.2479, 0.3727, 0.2174, 0.5398, 0.4400], + device='cuda:1'), in_proj_covar=tensor([0.0841, 0.0891, 0.0684, 0.0911, 0.0828, 0.0764, 0.0814, 0.0747], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 06:33:57,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 06:34:06,719 INFO [train.py:903] (1/4) Epoch 16, batch 900, loss[loss=0.1833, simple_loss=0.2693, pruned_loss=0.04863, over 19609.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2967, pruned_loss=0.07154, over 3783082.35 frames. ], batch size: 50, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:34:17,425 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:35:01,541 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:35:06,280 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7578, 2.6185, 1.9869, 1.8967, 1.8622, 2.1437, 1.2583, 1.9524], + device='cuda:1'), covar=tensor([0.0575, 0.0537, 0.0635, 0.0951, 0.0943, 0.0910, 0.1021, 0.0837], + device='cuda:1'), in_proj_covar=tensor([0.0346, 0.0338, 0.0336, 0.0364, 0.0437, 0.0367, 0.0320, 0.0327], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:35:08,179 INFO [train.py:903] (1/4) Epoch 16, batch 950, loss[loss=0.2125, simple_loss=0.2984, pruned_loss=0.06333, over 19538.00 frames. ], tot_loss[loss=0.219, simple_loss=0.296, pruned_loss=0.07103, over 3808300.05 frames. ], batch size: 56, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:35:13,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 06:35:19,093 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2362, 5.6318, 2.8743, 4.8766, 1.1389, 5.5887, 5.5376, 5.7012], + device='cuda:1'), covar=tensor([0.0377, 0.0789, 0.2034, 0.0674, 0.3997, 0.0558, 0.0735, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0470, 0.0384, 0.0460, 0.0329, 0.0391, 0.0398, 0.0392, 0.0427], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:35:27,489 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103384.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:35:40,817 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.367e+02 6.234e+02 7.823e+02 2.113e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-02 06:36:12,129 INFO [train.py:903] (1/4) Epoch 16, batch 1000, loss[loss=0.2795, simple_loss=0.3542, pruned_loss=0.1024, over 19723.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2969, pruned_loss=0.07157, over 3814661.21 frames. ], batch size: 63, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:03,132 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103460.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:07,731 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 06:37:14,579 INFO [train.py:903] (1/4) Epoch 16, batch 1050, loss[loss=0.2141, simple_loss=0.2831, pruned_loss=0.07255, over 19609.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2964, pruned_loss=0.07203, over 3830727.94 frames. ], batch size: 50, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:43,713 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:45,897 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.497e+02 6.450e+02 8.583e+02 2.663e+03, threshold=1.290e+03, percent-clipped=6.0 +2023-04-02 06:37:49,187 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 06:38:15,970 INFO [train.py:903] (1/4) Epoch 16, batch 1100, loss[loss=0.2543, simple_loss=0.3387, pruned_loss=0.08496, over 19608.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.298, pruned_loss=0.0726, over 3818908.33 frames. ], batch size: 61, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:38:21,841 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7855, 4.2833, 4.4814, 4.4667, 1.7055, 4.2008, 3.6680, 4.1627], + device='cuda:1'), covar=tensor([0.1476, 0.0782, 0.0552, 0.0606, 0.5493, 0.0812, 0.0601, 0.1071], + device='cuda:1'), in_proj_covar=tensor([0.0741, 0.0676, 0.0879, 0.0754, 0.0780, 0.0623, 0.0524, 0.0810], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 06:39:18,053 INFO [train.py:903] (1/4) Epoch 16, batch 1150, loss[loss=0.2175, simple_loss=0.2911, pruned_loss=0.07196, over 19687.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2977, pruned_loss=0.07209, over 3817463.03 frames. ], batch size: 53, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:39:24,748 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103575.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:37,382 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:50,620 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.106e+02 6.190e+02 8.719e+02 1.567e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 06:39:58,913 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4852, 1.5285, 2.0102, 1.6770, 3.0578, 2.5714, 3.3614, 1.4731], + device='cuda:1'), covar=tensor([0.2287, 0.4000, 0.2525, 0.1851, 0.1513, 0.1976, 0.1538, 0.4012], + device='cuda:1'), in_proj_covar=tensor([0.0501, 0.0599, 0.0653, 0.0453, 0.0604, 0.0509, 0.0647, 0.0514], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:40:06,805 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:06,846 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:21,166 INFO [train.py:903] (1/4) Epoch 16, batch 1200, loss[loss=0.1849, simple_loss=0.2642, pruned_loss=0.05278, over 19402.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2968, pruned_loss=0.07149, over 3827177.65 frames. ], batch size: 48, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:40:21,584 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:52,908 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:54,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 06:41:24,587 INFO [train.py:903] (1/4) Epoch 16, batch 1250, loss[loss=0.2166, simple_loss=0.2941, pruned_loss=0.06957, over 19739.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2975, pruned_loss=0.07168, over 3843113.24 frames. ], batch size: 51, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:41:56,584 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.406e+02 4.955e+02 6.144e+02 7.729e+02 1.641e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 06:42:20,680 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4371, 1.4649, 2.1754, 1.8189, 3.1110, 4.7465, 4.6885, 5.2202], + device='cuda:1'), covar=tensor([0.1505, 0.3432, 0.2804, 0.1897, 0.0527, 0.0200, 0.0150, 0.0155], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0307, 0.0336, 0.0257, 0.0228, 0.0172, 0.0211, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:42:24,690 INFO [train.py:903] (1/4) Epoch 16, batch 1300, loss[loss=0.229, simple_loss=0.3087, pruned_loss=0.07469, over 18059.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2969, pruned_loss=0.07159, over 3841057.39 frames. ], batch size: 83, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:42:35,158 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103728.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:42:53,056 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8273, 1.9611, 2.2257, 2.6513, 1.8753, 2.4244, 2.3752, 2.0670], + device='cuda:1'), covar=tensor([0.3893, 0.3377, 0.1633, 0.1928, 0.3712, 0.1781, 0.3907, 0.2890], + device='cuda:1'), in_proj_covar=tensor([0.0838, 0.0885, 0.0682, 0.0910, 0.0826, 0.0763, 0.0811, 0.0744], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 06:43:12,120 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3652, 3.9660, 2.5938, 3.4712, 0.8649, 3.8700, 3.7573, 3.8157], + device='cuda:1'), covar=tensor([0.0675, 0.1091, 0.1911, 0.0911, 0.3891, 0.0718, 0.0845, 0.1075], + device='cuda:1'), in_proj_covar=tensor([0.0473, 0.0386, 0.0462, 0.0334, 0.0395, 0.0400, 0.0395, 0.0431], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:43:12,210 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:26,084 INFO [train.py:903] (1/4) Epoch 16, batch 1350, loss[loss=0.2014, simple_loss=0.2778, pruned_loss=0.06248, over 19590.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2968, pruned_loss=0.07158, over 3840775.24 frames. ], batch size: 52, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:43:43,807 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:47,409 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.01 vs. limit=5.0 +2023-04-02 06:43:59,428 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 5.214e+02 6.633e+02 9.392e+02 2.118e+03, threshold=1.327e+03, percent-clipped=8.0 +2023-04-02 06:44:30,120 INFO [train.py:903] (1/4) Epoch 16, batch 1400, loss[loss=0.2201, simple_loss=0.2984, pruned_loss=0.07087, over 19381.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2965, pruned_loss=0.07166, over 3825555.37 frames. ], batch size: 70, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:44:43,946 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103831.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:44:57,984 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103843.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:45:14,492 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103856.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:20,193 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6945, 1.4790, 1.4022, 1.7501, 1.4804, 1.4592, 1.3888, 1.6461], + device='cuda:1'), covar=tensor([0.0957, 0.1308, 0.1358, 0.0894, 0.1189, 0.0554, 0.1296, 0.0717], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0354, 0.0298, 0.0246, 0.0301, 0.0247, 0.0293, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:45:26,666 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:32,154 INFO [train.py:903] (1/4) Epoch 16, batch 1450, loss[loss=0.2146, simple_loss=0.2958, pruned_loss=0.0667, over 18788.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2965, pruned_loss=0.07175, over 3830331.82 frames. ], batch size: 74, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:45:32,196 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 06:45:42,907 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3754, 3.9907, 2.6400, 3.6579, 1.0638, 3.8043, 3.8175, 3.8782], + device='cuda:1'), covar=tensor([0.0618, 0.1052, 0.1989, 0.0757, 0.3781, 0.0773, 0.0851, 0.0942], + device='cuda:1'), in_proj_covar=tensor([0.0470, 0.0385, 0.0461, 0.0331, 0.0392, 0.0398, 0.0393, 0.0430], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:45:56,453 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:00,956 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103894.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:03,876 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.072e+02 4.634e+02 5.962e+02 7.073e+02 1.523e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-02 06:46:33,192 INFO [train.py:903] (1/4) Epoch 16, batch 1500, loss[loss=0.2748, simple_loss=0.342, pruned_loss=0.1038, over 19776.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2974, pruned_loss=0.07204, over 3823632.41 frames. ], batch size: 56, lr: 5.23e-03, grad_scale: 16.0 +2023-04-02 06:47:20,908 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 06:47:35,232 INFO [train.py:903] (1/4) Epoch 16, batch 1550, loss[loss=0.2275, simple_loss=0.3068, pruned_loss=0.07414, over 19522.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2964, pruned_loss=0.07122, over 3836461.14 frames. ], batch size: 54, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:48:09,256 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.500e+02 6.727e+02 8.636e+02 1.580e+03, threshold=1.345e+03, percent-clipped=8.0 +2023-04-02 06:48:16,801 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 06:48:24,314 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:32,669 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:39,357 INFO [train.py:903] (1/4) Epoch 16, batch 1600, loss[loss=0.1911, simple_loss=0.2684, pruned_loss=0.05691, over 19756.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.295, pruned_loss=0.07018, over 3837580.84 frames. ], batch size: 51, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:49:05,526 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 06:49:10,531 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9951, 1.1702, 1.4783, 0.5850, 2.0312, 2.4314, 2.1075, 2.5696], + device='cuda:1'), covar=tensor([0.1544, 0.3720, 0.3248, 0.2615, 0.0561, 0.0255, 0.0343, 0.0338], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0307, 0.0335, 0.0257, 0.0228, 0.0172, 0.0209, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 06:49:38,437 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:49:41,614 INFO [train.py:903] (1/4) Epoch 16, batch 1650, loss[loss=0.2013, simple_loss=0.2718, pruned_loss=0.06538, over 19759.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2966, pruned_loss=0.071, over 3838235.55 frames. ], batch size: 46, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:14,808 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.025e+02 5.150e+02 6.179e+02 7.587e+02 1.568e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 06:50:18,560 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104099.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:50:21,715 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:50:37,185 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5677, 1.1360, 1.3875, 1.1838, 2.2059, 0.9742, 2.1028, 2.4186], + device='cuda:1'), covar=tensor([0.0708, 0.2716, 0.2693, 0.1695, 0.0895, 0.2139, 0.0972, 0.0474], + device='cuda:1'), in_proj_covar=tensor([0.0382, 0.0351, 0.0370, 0.0335, 0.0358, 0.0341, 0.0352, 0.0376], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:50:43,582 INFO [train.py:903] (1/4) Epoch 16, batch 1700, loss[loss=0.2527, simple_loss=0.3341, pruned_loss=0.08563, over 19492.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2957, pruned_loss=0.07052, over 3851268.59 frames. ], batch size: 64, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:48,577 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104124.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:50:51,812 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104127.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:51:25,773 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 06:51:29,501 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9446, 1.8316, 1.6271, 2.1636, 1.9319, 1.8359, 1.7477, 2.0336], + device='cuda:1'), covar=tensor([0.1000, 0.1607, 0.1377, 0.0919, 0.1261, 0.0507, 0.1248, 0.0665], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0356, 0.0300, 0.0249, 0.0303, 0.0249, 0.0295, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:51:44,976 INFO [train.py:903] (1/4) Epoch 16, batch 1750, loss[loss=0.1983, simple_loss=0.2826, pruned_loss=0.05693, over 19655.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2968, pruned_loss=0.07101, over 3848205.08 frames. ], batch size: 53, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:52:19,031 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.884e+02 5.867e+02 6.930e+02 2.034e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 06:52:44,495 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:52:48,734 INFO [train.py:903] (1/4) Epoch 16, batch 1800, loss[loss=0.1966, simple_loss=0.2683, pruned_loss=0.06248, over 19740.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2975, pruned_loss=0.0714, over 3834691.82 frames. ], batch size: 47, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:53:09,914 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:11,571 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 06:53:14,849 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:46,191 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 06:53:49,827 INFO [train.py:903] (1/4) Epoch 16, batch 1850, loss[loss=0.1954, simple_loss=0.268, pruned_loss=0.06134, over 19383.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2966, pruned_loss=0.07138, over 3826133.71 frames. ], batch size: 47, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:54:21,796 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 06:54:22,914 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.248e+02 6.749e+02 7.716e+02 1.558e+03, threshold=1.350e+03, percent-clipped=5.0 +2023-04-02 06:54:26,627 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 06:54:46,616 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1447, 1.2653, 1.6104, 1.3809, 2.7480, 1.1505, 2.1034, 2.9856], + device='cuda:1'), covar=tensor([0.0534, 0.2612, 0.2434, 0.1681, 0.0722, 0.2136, 0.1113, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0381, 0.0349, 0.0367, 0.0334, 0.0357, 0.0339, 0.0351, 0.0374], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 06:54:51,925 INFO [train.py:903] (1/4) Epoch 16, batch 1900, loss[loss=0.2066, simple_loss=0.2983, pruned_loss=0.05743, over 19773.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2969, pruned_loss=0.07121, over 3828293.11 frames. ], batch size: 54, lr: 5.22e-03, grad_scale: 4.0 +2023-04-02 06:55:09,094 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 06:55:15,645 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 06:55:31,887 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:34,127 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:39,871 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 06:55:41,199 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:53,983 INFO [train.py:903] (1/4) Epoch 16, batch 1950, loss[loss=0.2352, simple_loss=0.3117, pruned_loss=0.07934, over 19600.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2964, pruned_loss=0.07095, over 3830148.45 frames. ], batch size: 57, lr: 5.21e-03, grad_scale: 4.0 +2023-04-02 06:56:30,731 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.688e+02 6.377e+02 8.120e+02 1.703e+03, threshold=1.275e+03, percent-clipped=4.0 +2023-04-02 06:56:46,835 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104411.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:56:58,267 INFO [train.py:903] (1/4) Epoch 16, batch 2000, loss[loss=0.2089, simple_loss=0.2957, pruned_loss=0.06109, over 19593.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2956, pruned_loss=0.07063, over 3813962.84 frames. ], batch size: 61, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:57:57,350 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 06:57:57,641 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:00,766 INFO [train.py:903] (1/4) Epoch 16, batch 2050, loss[loss=0.249, simple_loss=0.3136, pruned_loss=0.0922, over 13589.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2963, pruned_loss=0.07089, over 3810883.42 frames. ], batch size: 136, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:58:04,673 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:05,641 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:09,153 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 06:58:14,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 06:58:15,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 06:58:35,726 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.885e+02 6.080e+02 8.555e+02 1.693e+03, threshold=1.216e+03, percent-clipped=6.0 +2023-04-02 06:58:36,108 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:36,139 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:39,798 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 06:58:40,119 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 06:59:02,741 INFO [train.py:903] (1/4) Epoch 16, batch 2100, loss[loss=0.2647, simple_loss=0.3208, pruned_loss=0.1043, over 13239.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2968, pruned_loss=0.07141, over 3796068.47 frames. ], batch size: 135, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:59:06,336 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:09,922 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:33,812 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 06:59:55,546 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 07:00:04,693 INFO [train.py:903] (1/4) Epoch 16, batch 2150, loss[loss=0.2644, simple_loss=0.3331, pruned_loss=0.09782, over 17320.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2978, pruned_loss=0.07179, over 3799515.79 frames. ], batch size: 101, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:00:13,893 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4776, 1.3465, 1.4086, 1.8052, 1.4046, 1.7182, 1.7791, 1.5917], + device='cuda:1'), covar=tensor([0.0913, 0.1033, 0.1074, 0.0701, 0.0822, 0.0771, 0.0832, 0.0717], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0222, 0.0223, 0.0243, 0.0225, 0.0210, 0.0190, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 07:00:27,137 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9275, 4.4738, 2.6303, 3.8644, 0.7878, 4.4109, 4.3248, 4.4210], + device='cuda:1'), covar=tensor([0.0575, 0.1041, 0.1993, 0.0742, 0.4076, 0.0601, 0.0756, 0.0890], + device='cuda:1'), in_proj_covar=tensor([0.0470, 0.0383, 0.0462, 0.0331, 0.0391, 0.0398, 0.0393, 0.0426], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:00:27,628 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 07:00:39,765 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.547e+02 6.907e+02 8.298e+02 2.194e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-02 07:00:53,794 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:01:08,301 INFO [train.py:903] (1/4) Epoch 16, batch 2200, loss[loss=0.2314, simple_loss=0.3085, pruned_loss=0.0772, over 19762.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2981, pruned_loss=0.07213, over 3808673.91 frames. ], batch size: 56, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:01:11,124 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9916, 2.0792, 2.2363, 2.7431, 1.9791, 2.5119, 2.4241, 2.0702], + device='cuda:1'), covar=tensor([0.3975, 0.3564, 0.1689, 0.2151, 0.3817, 0.1933, 0.3844, 0.2986], + device='cuda:1'), in_proj_covar=tensor([0.0842, 0.0888, 0.0680, 0.0907, 0.0822, 0.0760, 0.0810, 0.0743], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 07:01:12,378 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1530, 2.0172, 1.8632, 1.7262, 1.5128, 1.6798, 0.3954, 0.9695], + device='cuda:1'), covar=tensor([0.0460, 0.0524, 0.0402, 0.0674, 0.1087, 0.0782, 0.1166, 0.0944], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0345, 0.0342, 0.0371, 0.0443, 0.0373, 0.0325, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:01:26,397 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:02:12,264 INFO [train.py:903] (1/4) Epoch 16, batch 2250, loss[loss=0.2475, simple_loss=0.3161, pruned_loss=0.08939, over 18734.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.298, pruned_loss=0.07173, over 3823448.40 frames. ], batch size: 74, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:02:46,757 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.921e+02 5.898e+02 6.952e+02 1.646e+03, threshold=1.180e+03, percent-clipped=1.0 +2023-04-02 07:03:04,277 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2410, 1.1364, 1.2023, 1.3901, 1.1133, 1.2908, 1.3296, 1.2417], + device='cuda:1'), covar=tensor([0.0874, 0.1029, 0.1067, 0.0640, 0.0810, 0.0879, 0.0826, 0.0815], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0221, 0.0222, 0.0241, 0.0225, 0.0209, 0.0190, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 07:03:15,202 INFO [train.py:903] (1/4) Epoch 16, batch 2300, loss[loss=0.2322, simple_loss=0.2996, pruned_loss=0.0824, over 19474.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2986, pruned_loss=0.07219, over 3824283.49 frames. ], batch size: 49, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:03:19,228 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:20,851 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 07:03:27,440 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:29,362 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 07:03:51,604 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104748.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:59,864 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:12,348 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2904, 3.7378, 3.8516, 3.8669, 1.5483, 3.6038, 3.2079, 3.5786], + device='cuda:1'), covar=tensor([0.1503, 0.1034, 0.0666, 0.0647, 0.5402, 0.0951, 0.0646, 0.1204], + device='cuda:1'), in_proj_covar=tensor([0.0726, 0.0666, 0.0869, 0.0746, 0.0768, 0.0618, 0.0515, 0.0798], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 07:04:17,954 INFO [train.py:903] (1/4) Epoch 16, batch 2350, loss[loss=0.1652, simple_loss=0.243, pruned_loss=0.04369, over 19734.00 frames. ], tot_loss[loss=0.22, simple_loss=0.297, pruned_loss=0.07147, over 3820394.28 frames. ], batch size: 46, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:04:34,915 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:53,849 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.048e+02 6.738e+02 8.844e+02 1.580e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 07:05:00,896 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 07:05:05,761 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:05:18,350 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 07:05:22,503 INFO [train.py:903] (1/4) Epoch 16, batch 2400, loss[loss=0.2407, simple_loss=0.3081, pruned_loss=0.08668, over 19654.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.297, pruned_loss=0.07145, over 3814611.76 frames. ], batch size: 53, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:06:24,577 INFO [train.py:903] (1/4) Epoch 16, batch 2450, loss[loss=0.2199, simple_loss=0.3075, pruned_loss=0.06609, over 19667.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2988, pruned_loss=0.07238, over 3807296.50 frames. ], batch size: 55, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:07:00,037 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.877e+02 7.599e+02 9.302e+02 2.010e+03, threshold=1.520e+03, percent-clipped=8.0 +2023-04-02 07:07:27,191 INFO [train.py:903] (1/4) Epoch 16, batch 2500, loss[loss=0.1821, simple_loss=0.2599, pruned_loss=0.05215, over 19738.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2991, pruned_loss=0.07263, over 3808593.90 frames. ], batch size: 45, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:07:39,241 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-02 07:08:26,454 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3361, 3.0273, 2.2007, 2.7618, 0.7687, 2.8981, 2.8395, 2.9282], + device='cuda:1'), covar=tensor([0.1053, 0.1273, 0.2098, 0.1015, 0.4069, 0.1014, 0.1176, 0.1303], + device='cuda:1'), in_proj_covar=tensor([0.0469, 0.0384, 0.0459, 0.0329, 0.0390, 0.0396, 0.0391, 0.0424], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:08:29,688 INFO [train.py:903] (1/4) Epoch 16, batch 2550, loss[loss=0.2246, simple_loss=0.3108, pruned_loss=0.06923, over 19666.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2974, pruned_loss=0.07165, over 3819385.71 frames. ], batch size: 59, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:09:05,175 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 5.239e+02 6.384e+02 8.143e+02 1.987e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 07:09:24,251 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-02 07:09:24,641 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 07:09:32,158 INFO [train.py:903] (1/4) Epoch 16, batch 2600, loss[loss=0.2375, simple_loss=0.3148, pruned_loss=0.08008, over 19696.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2968, pruned_loss=0.07123, over 3818462.06 frames. ], batch size: 59, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:03,947 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8648, 2.3911, 2.4823, 2.8939, 2.6095, 2.5482, 2.3056, 2.8984], + device='cuda:1'), covar=tensor([0.0763, 0.1510, 0.1100, 0.0912, 0.1231, 0.0447, 0.1169, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0352, 0.0298, 0.0247, 0.0300, 0.0249, 0.0294, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:10:21,404 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2327, 2.0849, 1.6815, 1.3410, 1.9083, 1.3131, 1.1678, 1.7742], + device='cuda:1'), covar=tensor([0.0876, 0.0744, 0.0942, 0.0825, 0.0458, 0.1177, 0.0693, 0.0388], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0313, 0.0329, 0.0258, 0.0243, 0.0333, 0.0294, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:10:35,320 INFO [train.py:903] (1/4) Epoch 16, batch 2650, loss[loss=0.2131, simple_loss=0.2977, pruned_loss=0.06428, over 19680.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2963, pruned_loss=0.07111, over 3830133.76 frames. ], batch size: 53, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:54,817 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 07:11:09,898 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 5.317e+02 6.316e+02 8.001e+02 1.365e+03, threshold=1.263e+03, percent-clipped=2.0 +2023-04-02 07:11:36,915 INFO [train.py:903] (1/4) Epoch 16, batch 2700, loss[loss=0.246, simple_loss=0.3148, pruned_loss=0.08863, over 18291.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2971, pruned_loss=0.07226, over 3817780.88 frames. ], batch size: 83, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:12:39,773 INFO [train.py:903] (1/4) Epoch 16, batch 2750, loss[loss=0.2196, simple_loss=0.282, pruned_loss=0.07855, over 19758.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2973, pruned_loss=0.07246, over 3826813.74 frames. ], batch size: 45, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:13:15,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 5.170e+02 6.445e+02 8.543e+02 2.677e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 07:13:41,800 INFO [train.py:903] (1/4) Epoch 16, batch 2800, loss[loss=0.2282, simple_loss=0.2993, pruned_loss=0.07859, over 19587.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2961, pruned_loss=0.07164, over 3832736.87 frames. ], batch size: 52, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:14:44,336 INFO [train.py:903] (1/4) Epoch 16, batch 2850, loss[loss=0.2315, simple_loss=0.3066, pruned_loss=0.0782, over 18842.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2951, pruned_loss=0.0713, over 3827067.73 frames. ], batch size: 74, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:18,939 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.908e+02 5.066e+02 6.361e+02 8.222e+02 2.548e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 07:15:46,241 INFO [train.py:903] (1/4) Epoch 16, batch 2900, loss[loss=0.2099, simple_loss=0.2965, pruned_loss=0.06162, over 19483.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2966, pruned_loss=0.07183, over 3819283.81 frames. ], batch size: 64, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:46,268 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 07:16:48,796 INFO [train.py:903] (1/4) Epoch 16, batch 2950, loss[loss=0.2491, simple_loss=0.3284, pruned_loss=0.08488, over 19527.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2962, pruned_loss=0.07117, over 3818074.14 frames. ], batch size: 54, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:23,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.947e+02 6.195e+02 7.724e+02 2.015e+03, threshold=1.239e+03, percent-clipped=3.0 +2023-04-02 07:17:24,309 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1320, 1.8790, 1.7473, 2.1412, 1.9745, 1.8803, 1.6902, 2.0829], + device='cuda:1'), covar=tensor([0.0982, 0.1644, 0.1399, 0.1005, 0.1312, 0.0512, 0.1383, 0.0644], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0353, 0.0299, 0.0247, 0.0299, 0.0250, 0.0295, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:17:33,573 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:17:50,879 INFO [train.py:903] (1/4) Epoch 16, batch 3000, loss[loss=0.2751, simple_loss=0.3398, pruned_loss=0.1052, over 19571.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2968, pruned_loss=0.07163, over 3807358.92 frames. ], batch size: 61, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:50,879 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 07:18:04,140 INFO [train.py:937] (1/4) Epoch 16, validation: loss=0.1725, simple_loss=0.273, pruned_loss=0.03604, over 944034.00 frames. +2023-04-02 07:18:04,141 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 07:18:07,769 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 07:19:07,121 INFO [train.py:903] (1/4) Epoch 16, batch 3050, loss[loss=0.1825, simple_loss=0.2512, pruned_loss=0.05696, over 18714.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2969, pruned_loss=0.07172, over 3794294.07 frames. ], batch size: 41, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:19:41,628 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.577e+02 4.913e+02 6.190e+02 8.953e+02 2.496e+03, threshold=1.238e+03, percent-clipped=7.0 +2023-04-02 07:19:49,971 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:11,579 INFO [train.py:903] (1/4) Epoch 16, batch 3100, loss[loss=0.1893, simple_loss=0.2627, pruned_loss=0.05791, over 19770.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.296, pruned_loss=0.0709, over 3799995.64 frames. ], batch size: 46, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:20:11,899 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105520.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:40,054 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0971, 1.6611, 2.2141, 1.5846, 3.0593, 4.6439, 4.6570, 5.0501], + device='cuda:1'), covar=tensor([0.1718, 0.3328, 0.2863, 0.2091, 0.0559, 0.0179, 0.0160, 0.0157], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0307, 0.0337, 0.0255, 0.0230, 0.0174, 0.0210, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:21:13,221 INFO [train.py:903] (1/4) Epoch 16, batch 3150, loss[loss=0.1849, simple_loss=0.2657, pruned_loss=0.05203, over 19463.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2949, pruned_loss=0.07023, over 3812831.70 frames. ], batch size: 49, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:21:41,341 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 07:21:46,677 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.014e+02 6.069e+02 7.545e+02 2.509e+03, threshold=1.214e+03, percent-clipped=2.0 +2023-04-02 07:22:12,922 INFO [train.py:903] (1/4) Epoch 16, batch 3200, loss[loss=0.2179, simple_loss=0.2861, pruned_loss=0.07485, over 19104.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.296, pruned_loss=0.0714, over 3794327.14 frames. ], batch size: 42, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:15,485 INFO [train.py:903] (1/4) Epoch 16, batch 3250, loss[loss=0.1834, simple_loss=0.2633, pruned_loss=0.05173, over 19391.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2956, pruned_loss=0.07075, over 3796008.79 frames. ], batch size: 48, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:50,144 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 5.141e+02 5.939e+02 7.859e+02 1.653e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-02 07:24:19,069 INFO [train.py:903] (1/4) Epoch 16, batch 3300, loss[loss=0.2602, simple_loss=0.3288, pruned_loss=0.09576, over 19059.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2962, pruned_loss=0.07124, over 3782308.83 frames. ], batch size: 69, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:24:19,302 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3679, 3.9794, 2.5421, 3.5669, 0.9845, 3.9194, 3.8212, 3.9207], + device='cuda:1'), covar=tensor([0.0727, 0.0947, 0.2004, 0.0771, 0.3775, 0.0672, 0.0795, 0.1162], + device='cuda:1'), in_proj_covar=tensor([0.0471, 0.0387, 0.0463, 0.0328, 0.0393, 0.0398, 0.0394, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:24:22,592 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 07:24:48,424 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2926, 1.1662, 1.1576, 1.4837, 1.2521, 1.3852, 1.4166, 1.2838], + device='cuda:1'), covar=tensor([0.0610, 0.0748, 0.0773, 0.0577, 0.0802, 0.0614, 0.0784, 0.0578], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0220, 0.0221, 0.0240, 0.0224, 0.0207, 0.0189, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 07:24:55,822 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:25:21,450 INFO [train.py:903] (1/4) Epoch 16, batch 3350, loss[loss=0.2225, simple_loss=0.3027, pruned_loss=0.07114, over 19654.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2959, pruned_loss=0.07063, over 3795437.02 frames. ], batch size: 58, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:25:31,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 07:25:57,718 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 5.066e+02 6.242e+02 8.095e+02 2.652e+03, threshold=1.248e+03, percent-clipped=5.0 +2023-04-02 07:26:12,284 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:20,504 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105817.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:23,657 INFO [train.py:903] (1/4) Epoch 16, batch 3400, loss[loss=0.244, simple_loss=0.3237, pruned_loss=0.08211, over 18251.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2971, pruned_loss=0.07128, over 3786545.95 frames. ], batch size: 83, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:26:29,233 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-02 07:27:00,008 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:18,403 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:19,669 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:26,112 INFO [train.py:903] (1/4) Epoch 16, batch 3450, loss[loss=0.1989, simple_loss=0.2794, pruned_loss=0.05923, over 19834.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2982, pruned_loss=0.07191, over 3788802.84 frames. ], batch size: 52, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:27:29,348 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 07:28:00,500 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.355e+02 6.157e+02 7.690e+02 1.854e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 07:28:29,097 INFO [train.py:903] (1/4) Epoch 16, batch 3500, loss[loss=0.2034, simple_loss=0.2763, pruned_loss=0.0653, over 19777.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2977, pruned_loss=0.07155, over 3798944.42 frames. ], batch size: 48, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:29:23,565 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:29:31,025 INFO [train.py:903] (1/4) Epoch 16, batch 3550, loss[loss=0.1646, simple_loss=0.242, pruned_loss=0.04358, over 19697.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2987, pruned_loss=0.07226, over 3776598.39 frames. ], batch size: 45, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:29:42,161 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:30:05,948 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2945, 1.3819, 1.5540, 1.5128, 2.3326, 2.0433, 2.4473, 0.9436], + device='cuda:1'), covar=tensor([0.2409, 0.4175, 0.2581, 0.1931, 0.1554, 0.2154, 0.1476, 0.4388], + device='cuda:1'), in_proj_covar=tensor([0.0508, 0.0609, 0.0662, 0.0462, 0.0606, 0.0512, 0.0649, 0.0521], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:30:06,596 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 5.018e+02 6.219e+02 7.272e+02 1.453e+03, threshold=1.244e+03, percent-clipped=3.0 +2023-04-02 07:30:34,296 INFO [train.py:903] (1/4) Epoch 16, batch 3600, loss[loss=0.1793, simple_loss=0.2535, pruned_loss=0.05261, over 19770.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2985, pruned_loss=0.07188, over 3797660.95 frames. ], batch size: 45, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:31:37,379 INFO [train.py:903] (1/4) Epoch 16, batch 3650, loss[loss=0.2176, simple_loss=0.304, pruned_loss=0.06562, over 19795.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2976, pruned_loss=0.07132, over 3803545.90 frames. ], batch size: 56, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:09,753 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:32:13,040 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.371e+02 6.801e+02 8.277e+02 1.518e+03, threshold=1.360e+03, percent-clipped=5.0 +2023-04-02 07:32:42,531 INFO [train.py:903] (1/4) Epoch 16, batch 3700, loss[loss=0.2206, simple_loss=0.2996, pruned_loss=0.07083, over 19760.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.298, pruned_loss=0.07173, over 3802761.98 frames. ], batch size: 63, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:44,093 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:13,577 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:23,621 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:34,046 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:44,490 INFO [train.py:903] (1/4) Epoch 16, batch 3750, loss[loss=0.241, simple_loss=0.3306, pruned_loss=0.07575, over 19756.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.297, pruned_loss=0.07084, over 3808162.95 frames. ], batch size: 63, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:34:19,089 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.705e+02 5.517e+02 6.973e+02 1.532e+03, threshold=1.103e+03, percent-clipped=3.0 +2023-04-02 07:34:24,801 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 07:34:45,484 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:34:46,193 INFO [train.py:903] (1/4) Epoch 16, batch 3800, loss[loss=0.2082, simple_loss=0.2824, pruned_loss=0.06707, over 19624.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2966, pruned_loss=0.07066, over 3815065.65 frames. ], batch size: 50, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:06,414 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106235.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:18,148 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 07:35:18,445 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106244.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:36,721 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106260.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,043 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106269.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,820 INFO [train.py:903] (1/4) Epoch 16, batch 3850, loss[loss=0.1928, simple_loss=0.2772, pruned_loss=0.0542, over 19389.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2959, pruned_loss=0.07006, over 3816986.69 frames. ], batch size: 48, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:56,814 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:09,579 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:23,144 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.267e+02 6.364e+02 7.734e+02 1.610e+03, threshold=1.273e+03, percent-clipped=5.0 +2023-04-02 07:36:50,356 INFO [train.py:903] (1/4) Epoch 16, batch 3900, loss[loss=0.2484, simple_loss=0.3279, pruned_loss=0.08449, over 19601.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2959, pruned_loss=0.06961, over 3830057.63 frames. ], batch size: 57, lr: 5.17e-03, grad_scale: 16.0 +2023-04-02 07:37:32,175 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106354.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:37:53,351 INFO [train.py:903] (1/4) Epoch 16, batch 3950, loss[loss=0.1842, simple_loss=0.2612, pruned_loss=0.05359, over 19763.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2953, pruned_loss=0.06958, over 3821851.28 frames. ], batch size: 46, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:37:58,036 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 07:38:28,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.931e+02 6.019e+02 7.839e+02 1.392e+03, threshold=1.204e+03, percent-clipped=3.0 +2023-04-02 07:38:54,934 INFO [train.py:903] (1/4) Epoch 16, batch 4000, loss[loss=0.2286, simple_loss=0.3129, pruned_loss=0.07218, over 18020.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2944, pruned_loss=0.06924, over 3826190.36 frames. ], batch size: 83, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:03,764 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 07:39:19,323 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:39:23,134 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3865, 3.0994, 2.2673, 2.3767, 2.4090, 2.5857, 0.8220, 2.2109], + device='cuda:1'), covar=tensor([0.0582, 0.0547, 0.0620, 0.0979, 0.0839, 0.0975, 0.1312, 0.0897], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0345, 0.0343, 0.0371, 0.0445, 0.0375, 0.0323, 0.0330], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:39:45,507 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 07:39:56,761 INFO [train.py:903] (1/4) Epoch 16, batch 4050, loss[loss=0.1839, simple_loss=0.2667, pruned_loss=0.05052, over 19462.00 frames. ], tot_loss[loss=0.216, simple_loss=0.294, pruned_loss=0.06899, over 3824058.04 frames. ], batch size: 49, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:59,431 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:40:22,932 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.9086, 2.7011, 2.1197, 2.1458, 1.8796, 2.2783, 1.0390, 1.9837], + device='cuda:1'), covar=tensor([0.0549, 0.0559, 0.0575, 0.0930, 0.1014, 0.1098, 0.1187, 0.0857], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0346, 0.0345, 0.0372, 0.0447, 0.0378, 0.0325, 0.0332], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:40:34,188 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.509e+02 4.912e+02 6.033e+02 8.007e+02 1.551e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-02 07:40:59,899 INFO [train.py:903] (1/4) Epoch 16, batch 4100, loss[loss=0.2009, simple_loss=0.288, pruned_loss=0.05692, over 19689.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2942, pruned_loss=0.0691, over 3822798.35 frames. ], batch size: 59, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:41:07,079 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106525.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:15,885 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:34,839 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 07:41:36,425 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106550.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:41,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106554.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:45,528 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:42:02,445 INFO [train.py:903] (1/4) Epoch 16, batch 4150, loss[loss=0.2323, simple_loss=0.3106, pruned_loss=0.07694, over 19670.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2936, pruned_loss=0.06898, over 3835953.01 frames. ], batch size: 58, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:42:20,407 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3804, 1.4338, 1.8033, 1.6420, 2.7371, 2.1747, 2.8627, 1.3682], + device='cuda:1'), covar=tensor([0.2328, 0.4050, 0.2481, 0.1751, 0.1386, 0.2012, 0.1346, 0.3843], + device='cuda:1'), in_proj_covar=tensor([0.0506, 0.0606, 0.0658, 0.0458, 0.0602, 0.0506, 0.0647, 0.0517], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:42:35,031 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0845, 1.9791, 1.7244, 1.5848, 1.4745, 1.6253, 0.4776, 1.0356], + device='cuda:1'), covar=tensor([0.0556, 0.0589, 0.0436, 0.0757, 0.1095, 0.0942, 0.1117, 0.0932], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0346, 0.0343, 0.0371, 0.0446, 0.0375, 0.0324, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:42:36,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.926e+02 6.073e+02 7.216e+02 1.422e+03, threshold=1.215e+03, percent-clipped=1.0 +2023-04-02 07:43:03,954 INFO [train.py:903] (1/4) Epoch 16, batch 4200, loss[loss=0.1776, simple_loss=0.2693, pruned_loss=0.04292, over 19656.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2939, pruned_loss=0.06888, over 3835809.13 frames. ], batch size: 60, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:43:11,039 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 07:43:15,841 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:05,134 INFO [train.py:903] (1/4) Epoch 16, batch 4250, loss[loss=0.1899, simple_loss=0.2698, pruned_loss=0.05501, over 19487.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2953, pruned_loss=0.06996, over 3816506.59 frames. ], batch size: 49, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:44:20,457 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 07:44:32,738 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 07:44:41,264 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:42,277 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.502e+02 4.922e+02 6.114e+02 7.451e+02 1.808e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 07:45:08,519 INFO [train.py:903] (1/4) Epoch 16, batch 4300, loss[loss=0.1911, simple_loss=0.264, pruned_loss=0.05907, over 19705.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.295, pruned_loss=0.06976, over 3812884.95 frames. ], batch size: 46, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:45:38,991 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:45:58,078 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 07:45:58,462 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9231, 3.4208, 2.0427, 2.1748, 3.0809, 1.6722, 1.3516, 2.0962], + device='cuda:1'), covar=tensor([0.1347, 0.0561, 0.1012, 0.0737, 0.0515, 0.1180, 0.0974, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0311, 0.0329, 0.0254, 0.0244, 0.0330, 0.0291, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:46:11,653 INFO [train.py:903] (1/4) Epoch 16, batch 4350, loss[loss=0.2047, simple_loss=0.2812, pruned_loss=0.06413, over 19590.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2951, pruned_loss=0.06978, over 3811845.83 frames. ], batch size: 52, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:46:46,926 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.102e+02 6.153e+02 8.101e+02 2.041e+03, threshold=1.231e+03, percent-clipped=8.0 +2023-04-02 07:47:03,114 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:06,283 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:09,625 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:14,210 INFO [train.py:903] (1/4) Epoch 16, batch 4400, loss[loss=0.2188, simple_loss=0.3019, pruned_loss=0.06783, over 19475.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2936, pruned_loss=0.06931, over 3808844.69 frames. ], batch size: 64, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:47:31,039 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1801, 1.0676, 1.1315, 1.4475, 1.0288, 1.2309, 1.3479, 1.2147], + device='cuda:1'), covar=tensor([0.1051, 0.1277, 0.1276, 0.0685, 0.0945, 0.1045, 0.0925, 0.0909], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0224, 0.0246, 0.0227, 0.0209, 0.0192, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 07:47:33,133 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106835.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:36,412 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 07:47:46,708 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 07:47:52,783 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6624, 2.2881, 2.3665, 2.8473, 2.5801, 2.3408, 2.1899, 2.7541], + device='cuda:1'), covar=tensor([0.0906, 0.1630, 0.1300, 0.0900, 0.1195, 0.0466, 0.1232, 0.0520], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0351, 0.0298, 0.0242, 0.0296, 0.0245, 0.0290, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:48:17,291 INFO [train.py:903] (1/4) Epoch 16, batch 4450, loss[loss=0.2268, simple_loss=0.3021, pruned_loss=0.07579, over 19788.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2946, pruned_loss=0.06971, over 3810179.29 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:48:53,519 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.84 vs. limit=5.0 +2023-04-02 07:48:53,922 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.966e+02 6.259e+02 8.420e+02 1.632e+03, threshold=1.252e+03, percent-clipped=6.0 +2023-04-02 07:49:18,988 INFO [train.py:903] (1/4) Epoch 16, batch 4500, loss[loss=0.2136, simple_loss=0.3001, pruned_loss=0.06355, over 18725.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2936, pruned_loss=0.0691, over 3818868.40 frames. ], batch size: 74, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:49:34,394 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106931.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:50:23,584 INFO [train.py:903] (1/4) Epoch 16, batch 4550, loss[loss=0.1809, simple_loss=0.2719, pruned_loss=0.04491, over 19675.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2927, pruned_loss=0.06891, over 3821370.71 frames. ], batch size: 53, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:50:31,747 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 07:50:54,360 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 07:50:59,932 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 4.883e+02 5.860e+02 7.136e+02 1.225e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-02 07:51:02,586 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:51:27,842 INFO [train.py:903] (1/4) Epoch 16, batch 4600, loss[loss=0.2082, simple_loss=0.2926, pruned_loss=0.06185, over 19582.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2934, pruned_loss=0.0697, over 3809082.11 frames. ], batch size: 52, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:51:34,896 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:51:42,913 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3930, 4.0159, 2.6311, 3.6244, 1.0768, 3.8583, 3.8381, 3.9171], + device='cuda:1'), covar=tensor([0.0638, 0.1017, 0.1938, 0.0817, 0.3810, 0.0791, 0.0828, 0.0994], + device='cuda:1'), in_proj_covar=tensor([0.0471, 0.0386, 0.0466, 0.0329, 0.0393, 0.0398, 0.0396, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 07:52:06,104 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3716, 1.4584, 1.7805, 1.6179, 2.6730, 2.2138, 2.7272, 1.0532], + device='cuda:1'), covar=tensor([0.2307, 0.3955, 0.2486, 0.1814, 0.1337, 0.1942, 0.1305, 0.4054], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0607, 0.0660, 0.0459, 0.0605, 0.0509, 0.0648, 0.0520], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:52:17,745 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2567, 1.3564, 1.7874, 1.4430, 2.7235, 3.6931, 3.4296, 3.9130], + device='cuda:1'), covar=tensor([0.1664, 0.3559, 0.3158, 0.2239, 0.0642, 0.0191, 0.0200, 0.0232], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0306, 0.0335, 0.0255, 0.0230, 0.0174, 0.0210, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:52:25,467 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3065, 1.3595, 1.6815, 1.5408, 2.1441, 1.8758, 2.1387, 1.0277], + device='cuda:1'), covar=tensor([0.2366, 0.4004, 0.2339, 0.1884, 0.1533, 0.2293, 0.1508, 0.4157], + device='cuda:1'), in_proj_covar=tensor([0.0506, 0.0604, 0.0658, 0.0457, 0.0602, 0.0507, 0.0646, 0.0518], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 07:52:29,058 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:29,811 INFO [train.py:903] (1/4) Epoch 16, batch 4650, loss[loss=0.283, simple_loss=0.3386, pruned_loss=0.1137, over 13438.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2937, pruned_loss=0.07006, over 3820580.89 frames. ], batch size: 136, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:52:47,216 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 07:52:59,803 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 07:53:01,388 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:53:07,464 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.220e+02 5.459e+02 6.581e+02 8.910e+02 1.601e+03, threshold=1.316e+03, percent-clipped=6.0 +2023-04-02 07:53:31,764 INFO [train.py:903] (1/4) Epoch 16, batch 4700, loss[loss=0.2325, simple_loss=0.3078, pruned_loss=0.07855, over 19854.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2947, pruned_loss=0.07047, over 3816317.58 frames. ], batch size: 52, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:53:55,933 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 07:54:12,246 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 07:54:20,559 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107158.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 07:54:36,808 INFO [train.py:903] (1/4) Epoch 16, batch 4750, loss[loss=0.2027, simple_loss=0.2908, pruned_loss=0.05731, over 19646.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2962, pruned_loss=0.07112, over 3821176.81 frames. ], batch size: 55, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:54:37,125 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107170.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:54:58,973 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:11,904 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.545e+02 6.621e+02 8.650e+02 1.971e+03, threshold=1.324e+03, percent-clipped=6.0 +2023-04-02 07:55:29,391 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:40,217 INFO [train.py:903] (1/4) Epoch 16, batch 4800, loss[loss=0.2108, simple_loss=0.2973, pruned_loss=0.0621, over 19542.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2958, pruned_loss=0.0706, over 3833966.97 frames. ], batch size: 56, lr: 5.14e-03, grad_scale: 8.0 +2023-04-02 07:55:50,298 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 07:56:41,879 INFO [train.py:903] (1/4) Epoch 16, batch 4850, loss[loss=0.2657, simple_loss=0.3334, pruned_loss=0.09897, over 13040.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2969, pruned_loss=0.0711, over 3823595.05 frames. ], batch size: 136, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:07,068 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 07:57:19,837 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.930e+02 6.295e+02 8.478e+02 1.665e+03, threshold=1.259e+03, percent-clipped=1.0 +2023-04-02 07:57:27,485 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 07:57:32,774 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 07:57:32,801 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 07:57:43,099 INFO [train.py:903] (1/4) Epoch 16, batch 4900, loss[loss=0.2052, simple_loss=0.2957, pruned_loss=0.05732, over 19698.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07112, over 3834193.82 frames. ], batch size: 59, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:43,112 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 07:57:43,918 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.11 vs. limit=5.0 +2023-04-02 07:58:04,195 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 07:58:46,426 INFO [train.py:903] (1/4) Epoch 16, batch 4950, loss[loss=0.2458, simple_loss=0.3223, pruned_loss=0.08469, over 19326.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2965, pruned_loss=0.07122, over 3831373.16 frames. ], batch size: 66, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:04,257 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 07:59:22,735 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 5.680e+02 6.678e+02 8.404e+02 2.020e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 07:59:27,613 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 07:59:48,958 INFO [train.py:903] (1/4) Epoch 16, batch 5000, loss[loss=0.2319, simple_loss=0.3084, pruned_loss=0.07774, over 19085.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07121, over 3835928.00 frames. ], batch size: 69, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:58,933 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 08:00:08,997 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 08:00:50,459 INFO [train.py:903] (1/4) Epoch 16, batch 5050, loss[loss=0.2255, simple_loss=0.3008, pruned_loss=0.0751, over 18097.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2957, pruned_loss=0.07086, over 3845258.33 frames. ], batch size: 83, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:01:27,875 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.470e+02 6.454e+02 8.047e+02 2.188e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 08:01:27,918 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 08:01:30,416 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107502.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:01:44,956 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:01:51,918 INFO [train.py:903] (1/4) Epoch 16, batch 5100, loss[loss=0.2685, simple_loss=0.331, pruned_loss=0.103, over 13597.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.296, pruned_loss=0.0711, over 3819276.66 frames. ], batch size: 136, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:02:02,170 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:02:04,295 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 08:02:08,825 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 08:02:13,298 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 08:02:54,731 INFO [train.py:903] (1/4) Epoch 16, batch 5150, loss[loss=0.2003, simple_loss=0.2904, pruned_loss=0.05514, over 19664.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2953, pruned_loss=0.07046, over 3820138.57 frames. ], batch size: 55, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:03:08,979 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:03:31,791 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.461e+02 4.950e+02 6.087e+02 7.766e+02 1.818e+03, threshold=1.217e+03, percent-clipped=6.0 +2023-04-02 08:03:43,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:03:54,404 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107617.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:03:57,186 INFO [train.py:903] (1/4) Epoch 16, batch 5200, loss[loss=0.2071, simple_loss=0.2771, pruned_loss=0.06852, over 19726.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2956, pruned_loss=0.07089, over 3802192.45 frames. ], batch size: 46, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:04:08,703 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:04:09,526 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 08:04:55,315 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 08:04:59,581 INFO [train.py:903] (1/4) Epoch 16, batch 5250, loss[loss=0.1953, simple_loss=0.269, pruned_loss=0.06076, over 19769.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2951, pruned_loss=0.07087, over 3799301.54 frames. ], batch size: 47, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:05:07,708 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:05:36,450 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.464e+02 6.488e+02 8.647e+02 1.622e+03, threshold=1.298e+03, percent-clipped=8.0 +2023-04-02 08:05:42,454 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6516, 1.5487, 1.4868, 2.2065, 1.7111, 1.9676, 2.0791, 1.8097], + device='cuda:1'), covar=tensor([0.0812, 0.0914, 0.1025, 0.0712, 0.0790, 0.0707, 0.0753, 0.0658], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0222, 0.0245, 0.0225, 0.0207, 0.0189, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 08:06:00,480 INFO [train.py:903] (1/4) Epoch 16, batch 5300, loss[loss=0.1901, simple_loss=0.2632, pruned_loss=0.05847, over 17293.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2952, pruned_loss=0.0711, over 3803253.43 frames. ], batch size: 38, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:06:19,332 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 08:06:28,034 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3498, 3.0591, 2.2620, 2.8184, 0.7706, 2.9475, 2.8807, 2.9668], + device='cuda:1'), covar=tensor([0.1096, 0.1266, 0.2062, 0.0986, 0.3957, 0.1017, 0.1085, 0.1293], + device='cuda:1'), in_proj_covar=tensor([0.0470, 0.0385, 0.0463, 0.0328, 0.0391, 0.0399, 0.0398, 0.0427], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:06:28,315 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3488, 1.3522, 1.3840, 1.4622, 1.7912, 1.8728, 1.6629, 0.5985], + device='cuda:1'), covar=tensor([0.2285, 0.4208, 0.2614, 0.1869, 0.1550, 0.2107, 0.1457, 0.4271], + device='cuda:1'), in_proj_covar=tensor([0.0507, 0.0609, 0.0659, 0.0461, 0.0607, 0.0508, 0.0649, 0.0520], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:06:45,139 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:07:03,282 INFO [train.py:903] (1/4) Epoch 16, batch 5350, loss[loss=0.247, simple_loss=0.3267, pruned_loss=0.08366, over 19670.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2957, pruned_loss=0.07132, over 3811656.40 frames. ], batch size: 60, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:07:37,241 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 08:07:39,720 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3603, 3.9301, 2.7640, 3.6303, 0.7419, 3.8805, 3.7966, 3.9236], + device='cuda:1'), covar=tensor([0.0667, 0.1029, 0.1714, 0.0674, 0.4168, 0.0682, 0.0830, 0.0930], + device='cuda:1'), in_proj_covar=tensor([0.0470, 0.0384, 0.0463, 0.0328, 0.0391, 0.0399, 0.0399, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:07:40,607 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 4.506e+02 5.884e+02 6.805e+02 1.610e+03, threshold=1.177e+03, percent-clipped=2.0 +2023-04-02 08:08:06,503 INFO [train.py:903] (1/4) Epoch 16, batch 5400, loss[loss=0.2401, simple_loss=0.3107, pruned_loss=0.08472, over 19782.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2963, pruned_loss=0.07138, over 3823117.61 frames. ], batch size: 54, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:08:56,404 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 08:09:02,902 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4321, 1.6292, 1.8804, 1.6757, 3.2087, 2.6815, 3.4841, 1.6683], + device='cuda:1'), covar=tensor([0.2446, 0.4067, 0.2708, 0.1953, 0.1576, 0.1919, 0.1570, 0.3824], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0609, 0.0661, 0.0462, 0.0608, 0.0511, 0.0649, 0.0519], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:09:08,381 INFO [train.py:903] (1/4) Epoch 16, batch 5450, loss[loss=0.2256, simple_loss=0.3031, pruned_loss=0.07402, over 19681.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2971, pruned_loss=0.07142, over 3817135.01 frames. ], batch size: 53, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:09:10,571 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107872.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:11,938 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107873.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:09:26,833 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:44,570 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107898.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:09:46,448 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.026e+02 5.228e+02 6.592e+02 8.752e+02 1.860e+03, threshold=1.318e+03, percent-clipped=11.0 +2023-04-02 08:09:58,386 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:10:10,022 INFO [train.py:903] (1/4) Epoch 16, batch 5500, loss[loss=0.2481, simple_loss=0.3239, pruned_loss=0.0861, over 19299.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2971, pruned_loss=0.07133, over 3826448.90 frames. ], batch size: 66, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:10:12,820 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0049, 2.0542, 2.2282, 2.6796, 2.0956, 2.5751, 2.3376, 2.0995], + device='cuda:1'), covar=tensor([0.3427, 0.2918, 0.1364, 0.1700, 0.3018, 0.1434, 0.3227, 0.2390], + device='cuda:1'), in_proj_covar=tensor([0.0846, 0.0892, 0.0682, 0.0905, 0.0826, 0.0763, 0.0812, 0.0746], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:10:28,510 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9086, 4.3198, 4.6185, 4.5856, 1.6286, 4.3141, 3.7660, 4.3039], + device='cuda:1'), covar=tensor([0.1422, 0.0861, 0.0589, 0.0602, 0.5516, 0.0711, 0.0647, 0.1086], + device='cuda:1'), in_proj_covar=tensor([0.0736, 0.0678, 0.0879, 0.0761, 0.0784, 0.0630, 0.0524, 0.0817], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 08:10:34,926 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 08:10:43,671 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2714, 1.7932, 1.8907, 2.1180, 1.8863, 1.8733, 1.7051, 2.1160], + device='cuda:1'), covar=tensor([0.0899, 0.1623, 0.1338, 0.1008, 0.1355, 0.0529, 0.1349, 0.0658], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0354, 0.0302, 0.0244, 0.0299, 0.0248, 0.0292, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:11:00,088 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 08:11:13,100 INFO [train.py:903] (1/4) Epoch 16, batch 5550, loss[loss=0.2159, simple_loss=0.3004, pruned_loss=0.06573, over 19527.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2965, pruned_loss=0.07132, over 3802761.70 frames. ], batch size: 54, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:11:18,615 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 08:11:34,450 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:11:50,032 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.933e+02 6.287e+02 7.608e+02 2.106e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-02 08:12:10,704 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 08:12:17,773 INFO [train.py:903] (1/4) Epoch 16, batch 5600, loss[loss=0.1633, simple_loss=0.2429, pruned_loss=0.04192, over 19740.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2967, pruned_loss=0.07169, over 3793915.20 frames. ], batch size: 45, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:12:19,204 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:12:44,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7406, 1.5479, 1.6027, 2.3782, 1.6635, 1.9645, 2.1608, 1.8471], + device='cuda:1'), covar=tensor([0.0777, 0.0959, 0.0998, 0.0755, 0.0884, 0.0785, 0.0774, 0.0663], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0223, 0.0245, 0.0227, 0.0207, 0.0188, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 08:13:17,754 INFO [train.py:903] (1/4) Epoch 16, batch 5650, loss[loss=0.2529, simple_loss=0.3198, pruned_loss=0.09306, over 19539.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2963, pruned_loss=0.07154, over 3814480.61 frames. ], batch size: 54, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:13:55,641 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108099.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:13:56,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.688e+02 6.420e+02 8.034e+02 1.662e+03, threshold=1.284e+03, percent-clipped=4.0 +2023-04-02 08:14:03,618 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 08:14:04,428 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.01 vs. limit=5.0 +2023-04-02 08:14:20,999 INFO [train.py:903] (1/4) Epoch 16, batch 5700, loss[loss=0.2554, simple_loss=0.3286, pruned_loss=0.09111, over 18070.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2951, pruned_loss=0.07079, over 3809486.34 frames. ], batch size: 83, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:14:41,758 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:15:22,678 INFO [train.py:903] (1/4) Epoch 16, batch 5750, loss[loss=0.1876, simple_loss=0.267, pruned_loss=0.05413, over 19412.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2951, pruned_loss=0.0707, over 3808986.09 frames. ], batch size: 48, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:15:22,702 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 08:15:33,048 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 08:15:36,703 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 08:16:00,583 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.860e+02 6.382e+02 7.922e+02 1.732e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-02 08:16:19,198 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:16:26,739 INFO [train.py:903] (1/4) Epoch 16, batch 5800, loss[loss=0.2499, simple_loss=0.3206, pruned_loss=0.08958, over 19291.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2955, pruned_loss=0.07089, over 3813051.80 frames. ], batch size: 66, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:16:54,050 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:25,795 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:27,717 INFO [train.py:903] (1/4) Epoch 16, batch 5850, loss[loss=0.2365, simple_loss=0.3144, pruned_loss=0.07933, over 19778.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2949, pruned_loss=0.07058, over 3818882.62 frames. ], batch size: 56, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:17:53,820 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 08:18:05,296 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.738e+02 5.463e+02 6.888e+02 8.802e+02 1.964e+03, threshold=1.378e+03, percent-clipped=5.0 +2023-04-02 08:18:12,668 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5325, 2.9579, 3.0049, 3.0025, 1.2836, 2.8919, 2.5245, 2.8035], + device='cuda:1'), covar=tensor([0.1464, 0.0775, 0.0728, 0.0818, 0.4646, 0.0770, 0.0704, 0.1213], + device='cuda:1'), in_proj_covar=tensor([0.0730, 0.0673, 0.0877, 0.0758, 0.0777, 0.0630, 0.0523, 0.0812], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 08:18:24,146 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 08:18:28,344 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 08:18:29,521 INFO [train.py:903] (1/4) Epoch 16, batch 5900, loss[loss=0.2017, simple_loss=0.2753, pruned_loss=0.06409, over 19422.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2943, pruned_loss=0.07029, over 3809400.63 frames. ], batch size: 48, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:52,005 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 08:18:59,402 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:19:00,959 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-02 08:19:30,990 INFO [train.py:903] (1/4) Epoch 16, batch 5950, loss[loss=0.2422, simple_loss=0.317, pruned_loss=0.08372, over 13577.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2944, pruned_loss=0.06988, over 3811594.83 frames. ], batch size: 135, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:19:59,911 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:09,619 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 5.248e+02 6.439e+02 7.965e+02 2.252e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-02 08:20:30,453 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:35,482 INFO [train.py:903] (1/4) Epoch 16, batch 6000, loss[loss=0.204, simple_loss=0.2755, pruned_loss=0.06621, over 19759.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2946, pruned_loss=0.07024, over 3818487.01 frames. ], batch size: 45, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:20:35,482 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 08:20:47,901 INFO [train.py:937] (1/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2723, pruned_loss=0.03545, over 944034.00 frames. +2023-04-02 08:20:47,902 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 08:21:18,664 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0433, 5.4330, 2.8389, 4.7360, 0.9379, 5.4484, 5.3591, 5.6381], + device='cuda:1'), covar=tensor([0.0390, 0.0770, 0.1915, 0.0622, 0.4034, 0.0559, 0.0762, 0.0872], + device='cuda:1'), in_proj_covar=tensor([0.0469, 0.0385, 0.0463, 0.0330, 0.0391, 0.0402, 0.0399, 0.0426], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:21:25,597 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:21:51,718 INFO [train.py:903] (1/4) Epoch 16, batch 6050, loss[loss=0.2094, simple_loss=0.2909, pruned_loss=0.06391, over 19770.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2951, pruned_loss=0.07023, over 3819232.12 frames. ], batch size: 54, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:21:52,082 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108470.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:22,230 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108495.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:28,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.272e+02 6.402e+02 8.353e+02 1.883e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 08:22:53,805 INFO [train.py:903] (1/4) Epoch 16, batch 6100, loss[loss=0.2419, simple_loss=0.3148, pruned_loss=0.08453, over 19768.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06964, over 3820823.38 frames. ], batch size: 56, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:23:41,951 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9534, 1.2147, 1.5497, 0.6577, 2.2073, 2.4712, 2.2128, 2.6412], + device='cuda:1'), covar=tensor([0.1554, 0.3611, 0.3194, 0.2566, 0.0540, 0.0262, 0.0332, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0305, 0.0334, 0.0254, 0.0230, 0.0173, 0.0206, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:23:52,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7538, 1.8219, 2.0632, 2.3390, 1.6401, 2.1885, 2.1563, 1.9258], + device='cuda:1'), covar=tensor([0.3948, 0.3618, 0.1797, 0.2092, 0.3620, 0.1886, 0.4384, 0.3173], + device='cuda:1'), in_proj_covar=tensor([0.0850, 0.0894, 0.0682, 0.0906, 0.0828, 0.0768, 0.0814, 0.0748], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:23:56,079 INFO [train.py:903] (1/4) Epoch 16, batch 6150, loss[loss=0.2315, simple_loss=0.3123, pruned_loss=0.0753, over 19535.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2945, pruned_loss=0.07002, over 3820060.60 frames. ], batch size: 56, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:24:26,221 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 08:24:28,857 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:24:35,758 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.845e+02 6.012e+02 7.583e+02 1.796e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 08:24:58,796 INFO [train.py:903] (1/4) Epoch 16, batch 6200, loss[loss=0.2492, simple_loss=0.3236, pruned_loss=0.08737, over 18090.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2947, pruned_loss=0.06993, over 3829612.68 frames. ], batch size: 83, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:26:02,209 INFO [train.py:903] (1/4) Epoch 16, batch 6250, loss[loss=0.2222, simple_loss=0.3023, pruned_loss=0.07109, over 19691.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2951, pruned_loss=0.0697, over 3834041.82 frames. ], batch size: 60, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:26:22,928 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108687.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:26:27,135 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 08:26:29,318 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3289, 2.3260, 2.6297, 3.2631, 2.2592, 3.0304, 2.8290, 2.4609], + device='cuda:1'), covar=tensor([0.4007, 0.3808, 0.1684, 0.2380, 0.4270, 0.1856, 0.3667, 0.2888], + device='cuda:1'), in_proj_covar=tensor([0.0847, 0.0891, 0.0679, 0.0906, 0.0826, 0.0764, 0.0813, 0.0745], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:26:34,487 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 08:26:40,079 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 4.966e+02 6.026e+02 7.805e+02 1.726e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 08:27:04,698 INFO [train.py:903] (1/4) Epoch 16, batch 6300, loss[loss=0.1929, simple_loss=0.2703, pruned_loss=0.05776, over 19619.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.295, pruned_loss=0.07008, over 3816016.29 frames. ], batch size: 50, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:27:07,181 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9065, 4.3391, 4.5865, 4.5865, 1.6222, 4.3422, 3.6593, 4.2572], + device='cuda:1'), covar=tensor([0.1483, 0.0913, 0.0629, 0.0615, 0.5936, 0.0825, 0.0666, 0.1175], + device='cuda:1'), in_proj_covar=tensor([0.0741, 0.0684, 0.0886, 0.0768, 0.0789, 0.0638, 0.0532, 0.0820], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 08:27:10,265 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5369, 4.1297, 2.5919, 3.6817, 0.7403, 3.9161, 3.8828, 3.9747], + device='cuda:1'), covar=tensor([0.0665, 0.0934, 0.2037, 0.0799, 0.4433, 0.0777, 0.0937, 0.1088], + device='cuda:1'), in_proj_covar=tensor([0.0466, 0.0381, 0.0461, 0.0329, 0.0388, 0.0398, 0.0397, 0.0424], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:28:06,342 INFO [train.py:903] (1/4) Epoch 16, batch 6350, loss[loss=0.2199, simple_loss=0.2991, pruned_loss=0.07032, over 18839.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2954, pruned_loss=0.07025, over 3801672.79 frames. ], batch size: 74, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:28:38,938 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:47,226 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.125e+02 6.267e+02 8.166e+02 1.468e+03, threshold=1.253e+03, percent-clipped=8.0 +2023-04-02 08:28:48,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:54,168 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-02 08:28:56,965 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:29:01,782 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9808, 1.3445, 1.0745, 1.0311, 1.1739, 1.0411, 0.9575, 1.2243], + device='cuda:1'), covar=tensor([0.0537, 0.0774, 0.1018, 0.0674, 0.0516, 0.1240, 0.0572, 0.0468], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0306, 0.0330, 0.0254, 0.0242, 0.0329, 0.0289, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:29:09,799 INFO [train.py:903] (1/4) Epoch 16, batch 6400, loss[loss=0.2002, simple_loss=0.2689, pruned_loss=0.06579, over 19785.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2954, pruned_loss=0.07001, over 3808162.71 frames. ], batch size: 48, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:29:23,228 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-02 08:30:12,311 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5707, 1.1546, 1.3412, 1.2912, 2.2077, 0.9804, 2.0247, 2.4454], + device='cuda:1'), covar=tensor([0.0643, 0.2691, 0.2794, 0.1511, 0.0858, 0.1997, 0.1005, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0352, 0.0373, 0.0332, 0.0358, 0.0339, 0.0358, 0.0375], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:30:14,212 INFO [train.py:903] (1/4) Epoch 16, batch 6450, loss[loss=0.2556, simple_loss=0.3302, pruned_loss=0.0905, over 19715.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2966, pruned_loss=0.07082, over 3807653.68 frames. ], batch size: 63, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:30:52,179 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.109e+02 6.250e+02 7.655e+02 1.750e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-02 08:31:00,316 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 08:31:04,028 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:31:18,890 INFO [train.py:903] (1/4) Epoch 16, batch 6500, loss[loss=0.1814, simple_loss=0.2671, pruned_loss=0.04787, over 19410.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2957, pruned_loss=0.07006, over 3819547.15 frames. ], batch size: 48, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:31:24,486 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 08:31:40,938 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:32:20,051 INFO [train.py:903] (1/4) Epoch 16, batch 6550, loss[loss=0.2018, simple_loss=0.2896, pruned_loss=0.05696, over 19391.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.296, pruned_loss=0.06955, over 3821299.88 frames. ], batch size: 70, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:32:53,564 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9419, 2.0399, 2.2299, 2.7586, 1.9292, 2.5491, 2.4036, 2.0687], + device='cuda:1'), covar=tensor([0.3944, 0.3666, 0.1731, 0.2119, 0.3838, 0.1834, 0.3918, 0.3025], + device='cuda:1'), in_proj_covar=tensor([0.0849, 0.0892, 0.0682, 0.0905, 0.0829, 0.0767, 0.0815, 0.0746], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:32:58,931 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.624e+02 5.967e+02 6.821e+02 1.232e+03, threshold=1.193e+03, percent-clipped=0.0 +2023-04-02 08:33:21,266 INFO [train.py:903] (1/4) Epoch 16, batch 6600, loss[loss=0.2165, simple_loss=0.2957, pruned_loss=0.0687, over 19676.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2952, pruned_loss=0.06951, over 3830861.79 frames. ], batch size: 53, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:33:35,339 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 08:33:52,875 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8356, 1.9432, 2.1921, 2.5132, 1.7532, 2.4024, 2.3419, 2.0484], + device='cuda:1'), covar=tensor([0.4042, 0.3688, 0.1687, 0.2057, 0.3691, 0.1855, 0.4145, 0.3094], + device='cuda:1'), in_proj_covar=tensor([0.0847, 0.0892, 0.0682, 0.0905, 0.0828, 0.0766, 0.0815, 0.0747], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:34:03,940 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:05,035 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109055.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:34:08,552 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:22,843 INFO [train.py:903] (1/4) Epoch 16, batch 6650, loss[loss=0.209, simple_loss=0.2924, pruned_loss=0.06277, over 19701.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2954, pruned_loss=0.07019, over 3799559.73 frames. ], batch size: 63, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:34:41,480 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109083.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:35:01,848 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.420e+02 6.464e+02 8.289e+02 2.034e+03, threshold=1.293e+03, percent-clipped=5.0 +2023-04-02 08:35:27,580 INFO [train.py:903] (1/4) Epoch 16, batch 6700, loss[loss=0.2568, simple_loss=0.3312, pruned_loss=0.09123, over 19530.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2958, pruned_loss=0.07027, over 3801508.99 frames. ], batch size: 54, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:35:32,610 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8600, 1.7226, 1.7145, 2.2927, 1.7540, 2.1751, 2.1857, 1.9741], + device='cuda:1'), covar=tensor([0.0720, 0.0857, 0.0936, 0.0698, 0.0818, 0.0673, 0.0801, 0.0593], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0220, 0.0224, 0.0243, 0.0225, 0.0206, 0.0188, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 08:36:06,060 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:19,840 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:25,886 INFO [train.py:903] (1/4) Epoch 16, batch 6750, loss[loss=0.241, simple_loss=0.3153, pruned_loss=0.08335, over 17442.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2955, pruned_loss=0.06989, over 3800987.66 frames. ], batch size: 101, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:36:50,456 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:03,549 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 4.897e+02 5.778e+02 7.062e+02 1.289e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-02 08:37:15,740 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:24,175 INFO [train.py:903] (1/4) Epoch 16, batch 6800, loss[loss=0.2436, simple_loss=0.3141, pruned_loss=0.08651, over 19651.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2953, pruned_loss=0.06997, over 3807624.17 frames. ], batch size: 58, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:37:45,699 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-04-02 08:38:09,402 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 08:38:09,837 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 08:38:13,472 INFO [train.py:903] (1/4) Epoch 17, batch 0, loss[loss=0.2199, simple_loss=0.3108, pruned_loss=0.06449, over 19660.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3108, pruned_loss=0.06449, over 19660.00 frames. ], batch size: 55, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:38:13,473 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 08:38:24,297 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3524, 1.4079, 1.6580, 1.5492, 2.2272, 2.0449, 2.3042, 0.8950], + device='cuda:1'), covar=tensor([0.2421, 0.4305, 0.2734, 0.1850, 0.1534, 0.2221, 0.1382, 0.4378], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0612, 0.0664, 0.0460, 0.0606, 0.0512, 0.0650, 0.0520], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:38:26,005 INFO [train.py:937] (1/4) Epoch 17, validation: loss=0.1721, simple_loss=0.2728, pruned_loss=0.03571, over 944034.00 frames. +2023-04-02 08:38:26,006 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 08:38:39,466 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 08:38:51,285 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:29,171 INFO [train.py:903] (1/4) Epoch 17, batch 50, loss[loss=0.199, simple_loss=0.2721, pruned_loss=0.06296, over 19062.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2975, pruned_loss=0.07174, over 866563.93 frames. ], batch size: 42, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:39:32,720 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 5.319e+02 6.338e+02 7.961e+02 1.981e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-02 08:39:40,271 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4010, 1.5523, 2.0832, 1.6790, 3.2698, 2.5348, 3.5436, 1.7285], + device='cuda:1'), covar=tensor([0.2425, 0.4250, 0.2578, 0.1858, 0.1426, 0.2073, 0.1561, 0.3850], + device='cuda:1'), in_proj_covar=tensor([0.0508, 0.0611, 0.0663, 0.0461, 0.0604, 0.0511, 0.0651, 0.0521], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:39:43,519 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:45,711 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:59,852 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 08:40:13,459 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:14,426 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:28,821 INFO [train.py:903] (1/4) Epoch 17, batch 100, loss[loss=0.2218, simple_loss=0.3088, pruned_loss=0.06745, over 19487.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2956, pruned_loss=0.07094, over 1514802.86 frames. ], batch size: 64, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:40:36,784 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 08:41:16,025 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5844, 1.5608, 1.4937, 2.1704, 1.7965, 1.9440, 2.0441, 1.8139], + device='cuda:1'), covar=tensor([0.0825, 0.0917, 0.1091, 0.0771, 0.0803, 0.0744, 0.0857, 0.0668], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0222, 0.0225, 0.0245, 0.0226, 0.0207, 0.0190, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 08:41:29,301 INFO [train.py:903] (1/4) Epoch 17, batch 150, loss[loss=0.2106, simple_loss=0.2811, pruned_loss=0.07008, over 19396.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2941, pruned_loss=0.07003, over 2031858.37 frames. ], batch size: 47, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:41:30,556 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109399.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:41:32,668 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.062e+02 6.078e+02 8.331e+02 1.364e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 08:41:46,344 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6413, 1.3903, 1.5808, 1.4366, 3.2170, 0.9986, 2.2217, 3.5906], + device='cuda:1'), covar=tensor([0.0454, 0.2609, 0.2702, 0.1822, 0.0651, 0.2585, 0.1388, 0.0262], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0350, 0.0373, 0.0333, 0.0359, 0.0339, 0.0359, 0.0377], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:42:22,303 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 08:42:29,334 INFO [train.py:903] (1/4) Epoch 17, batch 200, loss[loss=0.1872, simple_loss=0.2534, pruned_loss=0.06045, over 19745.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2956, pruned_loss=0.07112, over 2427074.41 frames. ], batch size: 45, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:27,601 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9775, 1.8507, 1.7925, 1.6145, 1.4905, 1.5897, 0.3442, 0.8310], + device='cuda:1'), covar=tensor([0.0510, 0.0545, 0.0348, 0.0553, 0.0962, 0.0684, 0.1078, 0.0897], + device='cuda:1'), in_proj_covar=tensor([0.0342, 0.0343, 0.0340, 0.0365, 0.0439, 0.0371, 0.0320, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:43:32,097 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3649, 1.3474, 1.5693, 1.5145, 2.4061, 2.1144, 2.5518, 0.9045], + device='cuda:1'), covar=tensor([0.2227, 0.4032, 0.2488, 0.1884, 0.1500, 0.2009, 0.1409, 0.4222], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0606, 0.0658, 0.0457, 0.0601, 0.0507, 0.0645, 0.0515], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:43:32,829 INFO [train.py:903] (1/4) Epoch 17, batch 250, loss[loss=0.2267, simple_loss=0.3056, pruned_loss=0.0739, over 19624.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2943, pruned_loss=0.07002, over 2750112.49 frames. ], batch size: 57, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:36,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.403e+02 5.064e+02 6.047e+02 7.271e+02 1.663e+03, threshold=1.209e+03, percent-clipped=2.0 +2023-04-02 08:43:52,395 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109514.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:44:04,273 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:35,510 INFO [train.py:903] (1/4) Epoch 17, batch 300, loss[loss=0.2736, simple_loss=0.3363, pruned_loss=0.1054, over 13654.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2939, pruned_loss=0.06998, over 2959453.54 frames. ], batch size: 136, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:44:37,124 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:45,070 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:56,641 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3245, 2.7370, 2.7612, 3.1852, 2.9355, 2.7261, 2.4472, 3.1534], + device='cuda:1'), covar=tensor([0.0612, 0.1302, 0.1069, 0.0719, 0.1027, 0.0416, 0.1135, 0.0465], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0353, 0.0300, 0.0244, 0.0296, 0.0249, 0.0294, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:45:36,099 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109597.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:45:36,923 INFO [train.py:903] (1/4) Epoch 17, batch 350, loss[loss=0.1918, simple_loss=0.2778, pruned_loss=0.05286, over 19862.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2925, pruned_loss=0.06887, over 3160822.74 frames. ], batch size: 52, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:45:38,106 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:45:40,567 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.783e+02 5.858e+02 7.548e+02 1.929e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 08:45:49,724 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-04-02 08:46:38,845 INFO [train.py:903] (1/4) Epoch 17, batch 400, loss[loss=0.1811, simple_loss=0.2633, pruned_loss=0.04944, over 19737.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2924, pruned_loss=0.06889, over 3309559.72 frames. ], batch size: 51, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:46:49,103 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:09,952 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:16,747 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:20,118 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:40,321 INFO [train.py:903] (1/4) Epoch 17, batch 450, loss[loss=0.2196, simple_loss=0.3045, pruned_loss=0.06731, over 19651.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2932, pruned_loss=0.06894, over 3427267.59 frames. ], batch size: 53, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:47:44,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 5.202e+02 6.539e+02 8.058e+02 1.631e+03, threshold=1.308e+03, percent-clipped=6.0 +2023-04-02 08:48:12,289 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 08:48:12,747 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8941, 2.0028, 2.2279, 2.6877, 1.9358, 2.4971, 2.3401, 2.0562], + device='cuda:1'), covar=tensor([0.4081, 0.3676, 0.1688, 0.2256, 0.4031, 0.2015, 0.4109, 0.3050], + device='cuda:1'), in_proj_covar=tensor([0.0851, 0.0896, 0.0681, 0.0905, 0.0832, 0.0768, 0.0815, 0.0748], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:48:13,422 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 08:48:44,997 INFO [train.py:903] (1/4) Epoch 17, batch 500, loss[loss=0.2004, simple_loss=0.287, pruned_loss=0.05687, over 19774.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2943, pruned_loss=0.07013, over 3525074.49 frames. ], batch size: 56, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:48:48,151 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 08:48:51,245 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2177, 1.2458, 1.7171, 1.1389, 2.5009, 3.3199, 3.0466, 3.5114], + device='cuda:1'), covar=tensor([0.1590, 0.3664, 0.3147, 0.2527, 0.0588, 0.0228, 0.0216, 0.0262], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0309, 0.0337, 0.0257, 0.0231, 0.0175, 0.0208, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:49:01,142 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-02 08:49:11,991 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109770.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:49:12,970 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:33,449 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 08:49:44,292 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:44,336 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109795.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:49:47,259 INFO [train.py:903] (1/4) Epoch 17, batch 550, loss[loss=0.2308, simple_loss=0.2965, pruned_loss=0.08249, over 19370.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2942, pruned_loss=0.07014, over 3593774.42 frames. ], batch size: 47, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:49:50,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.960e+02 5.345e+02 7.483e+02 9.617e+02 2.288e+03, threshold=1.497e+03, percent-clipped=10.0 +2023-04-02 08:50:48,347 INFO [train.py:903] (1/4) Epoch 17, batch 600, loss[loss=0.1754, simple_loss=0.2547, pruned_loss=0.04804, over 19745.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2941, pruned_loss=0.07015, over 3641712.25 frames. ], batch size: 47, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:27,268 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 08:51:49,465 INFO [train.py:903] (1/4) Epoch 17, batch 650, loss[loss=0.2644, simple_loss=0.3224, pruned_loss=0.1032, over 13574.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.295, pruned_loss=0.07074, over 3688105.87 frames. ], batch size: 135, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:53,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.642e+02 6.698e+02 8.791e+02 1.815e+03, threshold=1.340e+03, percent-clipped=2.0 +2023-04-02 08:52:27,860 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:44,024 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109941.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:51,841 INFO [train.py:903] (1/4) Epoch 17, batch 700, loss[loss=0.2228, simple_loss=0.2864, pruned_loss=0.07956, over 19755.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2948, pruned_loss=0.07074, over 3711315.13 frames. ], batch size: 47, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:52:58,959 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109952.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:53:13,587 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2778, 5.2080, 6.0784, 6.0226, 2.2641, 5.7206, 4.8592, 5.6763], + device='cuda:1'), covar=tensor([0.1392, 0.0631, 0.0467, 0.0483, 0.5710, 0.0580, 0.0532, 0.1062], + device='cuda:1'), in_proj_covar=tensor([0.0738, 0.0678, 0.0880, 0.0764, 0.0788, 0.0629, 0.0527, 0.0810], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 08:53:57,850 INFO [train.py:903] (1/4) Epoch 17, batch 750, loss[loss=0.1985, simple_loss=0.2895, pruned_loss=0.0538, over 19680.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2953, pruned_loss=0.0706, over 3742252.53 frames. ], batch size: 59, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:54:02,550 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.697e+02 5.712e+02 7.217e+02 1.165e+03, threshold=1.142e+03, percent-clipped=0.0 +2023-04-02 08:54:25,901 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:33,276 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,193 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110035.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,330 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0947, 2.8151, 2.0567, 2.0502, 1.8891, 2.3542, 0.8762, 1.9412], + device='cuda:1'), covar=tensor([0.0621, 0.0578, 0.0744, 0.1104, 0.1068, 0.1045, 0.1228, 0.1007], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0349, 0.0347, 0.0371, 0.0448, 0.0378, 0.0326, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 08:55:00,581 INFO [train.py:903] (1/4) Epoch 17, batch 800, loss[loss=0.1767, simple_loss=0.2548, pruned_loss=0.0493, over 19392.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2949, pruned_loss=0.07048, over 3769981.01 frames. ], batch size: 48, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:55:04,546 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:05,665 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:10,573 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:14,906 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:55:17,656 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:35,628 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:54,368 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9604, 2.0279, 2.2254, 2.7204, 1.9548, 2.5419, 2.3276, 1.9666], + device='cuda:1'), covar=tensor([0.4032, 0.3712, 0.1730, 0.2220, 0.4061, 0.1931, 0.4287, 0.3190], + device='cuda:1'), in_proj_covar=tensor([0.0848, 0.0897, 0.0680, 0.0903, 0.0830, 0.0764, 0.0811, 0.0746], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 08:56:03,225 INFO [train.py:903] (1/4) Epoch 17, batch 850, loss[loss=0.1692, simple_loss=0.2407, pruned_loss=0.04887, over 19291.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2953, pruned_loss=0.07051, over 3785811.30 frames. ], batch size: 44, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:56:06,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.213e+02 6.563e+02 8.363e+02 2.159e+03, threshold=1.313e+03, percent-clipped=10.0 +2023-04-02 08:56:42,128 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:47,702 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:51,195 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:56,474 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 08:57:04,359 INFO [train.py:903] (1/4) Epoch 17, batch 900, loss[loss=0.1803, simple_loss=0.2543, pruned_loss=0.05321, over 19721.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2949, pruned_loss=0.07064, over 3793872.48 frames. ], batch size: 46, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:57:44,481 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1353, 2.1221, 1.6816, 2.1737, 2.1473, 1.7136, 1.7010, 2.0977], + device='cuda:1'), covar=tensor([0.1095, 0.1620, 0.1738, 0.1118, 0.1372, 0.0950, 0.1689, 0.0913], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0356, 0.0304, 0.0245, 0.0299, 0.0250, 0.0297, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 08:58:05,928 INFO [train.py:903] (1/4) Epoch 17, batch 950, loss[loss=0.2309, simple_loss=0.3101, pruned_loss=0.07586, over 19345.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2972, pruned_loss=0.07185, over 3792219.71 frames. ], batch size: 70, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:08,348 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 08:58:09,604 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.867e+02 6.255e+02 8.229e+02 2.250e+03, threshold=1.251e+03, percent-clipped=5.0 +2023-04-02 08:58:15,915 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5921, 1.4364, 1.3518, 2.0851, 1.5934, 1.9065, 1.9253, 1.6573], + device='cuda:1'), covar=tensor([0.0815, 0.0917, 0.1073, 0.0730, 0.0873, 0.0715, 0.0772, 0.0698], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0219, 0.0222, 0.0244, 0.0225, 0.0206, 0.0186, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 08:59:08,997 INFO [train.py:903] (1/4) Epoch 17, batch 1000, loss[loss=0.2064, simple_loss=0.2918, pruned_loss=0.06055, over 19693.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2953, pruned_loss=0.07059, over 3814593.99 frames. ], batch size: 59, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 08:59:44,523 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4112, 1.9283, 2.0171, 2.7944, 2.1236, 2.4545, 2.4877, 2.4495], + device='cuda:1'), covar=tensor([0.0660, 0.0835, 0.0890, 0.0774, 0.0811, 0.0691, 0.0803, 0.0561], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0220, 0.0223, 0.0244, 0.0226, 0.0207, 0.0187, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 09:00:05,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 09:00:11,647 INFO [train.py:903] (1/4) Epoch 17, batch 1050, loss[loss=0.2138, simple_loss=0.2966, pruned_loss=0.06552, over 19369.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2958, pruned_loss=0.07073, over 3816321.33 frames. ], batch size: 70, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:00:15,438 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:16,232 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 4.918e+02 6.228e+02 8.050e+02 1.872e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 09:00:28,170 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:40,483 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:44,971 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 09:01:00,216 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:01,475 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:13,838 INFO [train.py:903] (1/4) Epoch 17, batch 1100, loss[loss=0.2009, simple_loss=0.2903, pruned_loss=0.05572, over 19705.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2964, pruned_loss=0.07121, over 3810931.86 frames. ], batch size: 59, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:01:36,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7496, 1.2187, 1.4982, 1.3458, 3.2857, 1.0530, 2.3692, 3.6914], + device='cuda:1'), covar=tensor([0.0460, 0.2878, 0.2891, 0.1966, 0.0726, 0.2542, 0.1257, 0.0258], + device='cuda:1'), in_proj_covar=tensor([0.0389, 0.0355, 0.0375, 0.0335, 0.0360, 0.0342, 0.0357, 0.0381], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:01:54,277 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:09,872 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:16,596 INFO [train.py:903] (1/4) Epoch 17, batch 1150, loss[loss=0.2202, simple_loss=0.3061, pruned_loss=0.06716, over 19651.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2943, pruned_loss=0.06936, over 3825382.46 frames. ], batch size: 55, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:02:21,350 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.729e+02 5.705e+02 7.310e+02 1.426e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-02 09:02:28,350 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:43,159 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:20,433 INFO [train.py:903] (1/4) Epoch 17, batch 1200, loss[loss=0.223, simple_loss=0.2983, pruned_loss=0.07385, over 19305.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2934, pruned_loss=0.06871, over 3822480.95 frames. ], batch size: 66, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 09:03:22,087 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.05 vs. limit=5.0 +2023-04-02 09:03:49,892 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:53,250 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 09:03:55,645 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:19,573 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:23,819 INFO [train.py:903] (1/4) Epoch 17, batch 1250, loss[loss=0.2214, simple_loss=0.3047, pruned_loss=0.06902, over 19321.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2947, pruned_loss=0.06942, over 3811189.08 frames. ], batch size: 66, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:04:28,265 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.334e+02 6.614e+02 7.905e+02 1.343e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-02 09:04:48,487 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8276, 4.3731, 2.6547, 3.8444, 1.1225, 4.2368, 4.1836, 4.2746], + device='cuda:1'), covar=tensor([0.0578, 0.1037, 0.2053, 0.0856, 0.3879, 0.0637, 0.0795, 0.1065], + device='cuda:1'), in_proj_covar=tensor([0.0471, 0.0385, 0.0465, 0.0331, 0.0389, 0.0402, 0.0400, 0.0429], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:04:50,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110521.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:05:25,879 INFO [train.py:903] (1/4) Epoch 17, batch 1300, loss[loss=0.2168, simple_loss=0.2974, pruned_loss=0.06809, over 17369.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2947, pruned_loss=0.06949, over 3816374.21 frames. ], batch size: 101, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:02,876 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.60 vs. limit=5.0 +2023-04-02 09:06:14,229 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:20,236 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:26,784 INFO [train.py:903] (1/4) Epoch 17, batch 1350, loss[loss=0.1963, simple_loss=0.272, pruned_loss=0.06035, over 19720.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2946, pruned_loss=0.06951, over 3814627.65 frames. ], batch size: 51, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:31,255 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 4.458e+02 6.078e+02 8.226e+02 1.667e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 09:06:34,198 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3808, 1.4204, 1.8240, 1.6496, 2.6602, 2.1746, 2.7222, 1.3431], + device='cuda:1'), covar=tensor([0.2669, 0.4659, 0.2919, 0.2078, 0.1752, 0.2364, 0.1803, 0.4370], + device='cuda:1'), in_proj_covar=tensor([0.0515, 0.0621, 0.0675, 0.0467, 0.0612, 0.0518, 0.0658, 0.0525], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 09:07:07,100 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:26,707 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:28,059 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3598, 1.3438, 1.8029, 1.3353, 2.7184, 3.6919, 3.4685, 3.8859], + device='cuda:1'), covar=tensor([0.1541, 0.3633, 0.3074, 0.2290, 0.0557, 0.0185, 0.0195, 0.0242], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0308, 0.0337, 0.0258, 0.0232, 0.0175, 0.0209, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 09:07:29,984 INFO [train.py:903] (1/4) Epoch 17, batch 1400, loss[loss=0.1992, simple_loss=0.2732, pruned_loss=0.06258, over 19841.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2935, pruned_loss=0.06912, over 3818417.72 frames. ], batch size: 52, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:07:51,398 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110665.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:08,774 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:32,849 INFO [train.py:903] (1/4) Epoch 17, batch 1450, loss[loss=0.2054, simple_loss=0.2692, pruned_loss=0.07082, over 19772.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2932, pruned_loss=0.06933, over 3828179.74 frames. ], batch size: 46, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:08:32,898 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 09:08:38,609 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.047e+02 5.721e+02 7.117e+02 1.861e+03, threshold=1.144e+03, percent-clipped=3.0 +2023-04-02 09:09:35,379 INFO [train.py:903] (1/4) Epoch 17, batch 1500, loss[loss=0.1962, simple_loss=0.2732, pruned_loss=0.05963, over 19784.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2923, pruned_loss=0.06866, over 3833821.49 frames. ], batch size: 48, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:09:38,234 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:42,863 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:50,121 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:11,746 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:14,292 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110777.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:16,477 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2567, 3.7606, 3.8760, 3.8808, 1.5461, 3.6943, 3.2202, 3.6321], + device='cuda:1'), covar=tensor([0.1523, 0.0903, 0.0710, 0.0749, 0.5597, 0.0920, 0.0660, 0.1191], + device='cuda:1'), in_proj_covar=tensor([0.0742, 0.0686, 0.0885, 0.0771, 0.0792, 0.0638, 0.0534, 0.0818], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 09:10:17,711 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110780.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:27,703 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 09:10:35,429 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:38,194 INFO [train.py:903] (1/4) Epoch 17, batch 1550, loss[loss=0.2367, simple_loss=0.3151, pruned_loss=0.07912, over 19740.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2927, pruned_loss=0.0691, over 3825323.25 frames. ], batch size: 63, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:10:43,467 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:44,892 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.421e+02 5.256e+02 6.667e+02 1.625e+03, threshold=1.051e+03, percent-clipped=2.0 +2023-04-02 09:11:34,852 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:11:42,180 INFO [train.py:903] (1/4) Epoch 17, batch 1600, loss[loss=0.2287, simple_loss=0.3047, pruned_loss=0.07637, over 18807.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2945, pruned_loss=0.06979, over 3803465.98 frames. ], batch size: 74, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:11:42,652 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:05,497 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 09:12:05,884 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5432, 1.3947, 1.3873, 1.7967, 1.4103, 1.6619, 1.6690, 1.5756], + device='cuda:1'), covar=tensor([0.0808, 0.0988, 0.1077, 0.0703, 0.0791, 0.0787, 0.0811, 0.0735], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0220, 0.0222, 0.0244, 0.0226, 0.0208, 0.0188, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 09:12:07,077 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:12,588 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:29,566 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:45,459 INFO [train.py:903] (1/4) Epoch 17, batch 1650, loss[loss=0.2325, simple_loss=0.3198, pruned_loss=0.07264, over 19678.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.295, pruned_loss=0.07026, over 3788473.74 frames. ], batch size: 59, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:12:51,287 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 5.600e+02 6.791e+02 9.379e+02 3.114e+03, threshold=1.358e+03, percent-clipped=15.0 +2023-04-02 09:13:30,788 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0461, 1.6074, 1.6306, 2.5360, 2.0342, 2.2592, 2.3984, 2.1206], + device='cuda:1'), covar=tensor([0.0805, 0.1028, 0.1078, 0.0911, 0.0853, 0.0783, 0.0881, 0.0680], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0224, 0.0245, 0.0227, 0.0209, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 09:13:47,398 INFO [train.py:903] (1/4) Epoch 17, batch 1700, loss[loss=0.2199, simple_loss=0.2963, pruned_loss=0.07176, over 19675.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2956, pruned_loss=0.07, over 3805663.25 frames. ], batch size: 53, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:19,103 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:24,132 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110976.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:29,659 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 09:14:49,217 INFO [train.py:903] (1/4) Epoch 17, batch 1750, loss[loss=0.2414, simple_loss=0.3151, pruned_loss=0.08381, over 19377.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2963, pruned_loss=0.07055, over 3806614.39 frames. ], batch size: 66, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:55,347 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.870e+02 5.792e+02 7.151e+02 1.845e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 09:15:03,695 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7292, 1.5510, 1.5385, 2.0118, 1.5597, 1.9781, 2.0069, 1.7867], + device='cuda:1'), covar=tensor([0.0759, 0.0912, 0.0982, 0.0722, 0.0798, 0.0696, 0.0713, 0.0664], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0223, 0.0245, 0.0227, 0.0208, 0.0188, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 09:15:14,951 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:37,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111036.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:39,734 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9950, 5.0475, 5.8335, 5.7967, 2.1092, 5.4971, 4.6345, 5.4047], + device='cuda:1'), covar=tensor([0.1529, 0.0814, 0.0577, 0.0569, 0.5746, 0.0671, 0.0626, 0.1207], + device='cuda:1'), in_proj_covar=tensor([0.0738, 0.0679, 0.0883, 0.0766, 0.0788, 0.0638, 0.0530, 0.0809], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 09:15:43,367 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111041.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:51,900 INFO [train.py:903] (1/4) Epoch 17, batch 1800, loss[loss=0.1968, simple_loss=0.2778, pruned_loss=0.05791, over 19726.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2949, pruned_loss=0.06986, over 3817075.01 frames. ], batch size: 46, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:15:57,923 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:10,639 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:28,053 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:44,395 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:51,939 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 09:16:56,868 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0273, 2.0990, 2.2784, 2.7957, 2.0757, 2.6802, 2.3865, 2.1114], + device='cuda:1'), covar=tensor([0.3778, 0.3509, 0.1681, 0.2123, 0.3719, 0.1812, 0.3990, 0.2998], + device='cuda:1'), in_proj_covar=tensor([0.0853, 0.0902, 0.0682, 0.0909, 0.0833, 0.0770, 0.0812, 0.0752], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 09:16:57,618 INFO [train.py:903] (1/4) Epoch 17, batch 1850, loss[loss=0.233, simple_loss=0.3079, pruned_loss=0.07903, over 19355.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2949, pruned_loss=0.06992, over 3786589.02 frames. ], batch size: 70, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:16:57,849 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:17:03,773 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.241e+02 6.549e+02 7.790e+02 1.838e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-02 09:17:29,421 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 09:18:01,005 INFO [train.py:903] (1/4) Epoch 17, batch 1900, loss[loss=0.1788, simple_loss=0.2586, pruned_loss=0.04953, over 19766.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2941, pruned_loss=0.06979, over 3773738.94 frames. ], batch size: 47, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:18:17,330 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 09:18:24,352 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 09:18:34,785 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2430, 1.3586, 1.7891, 1.5289, 2.7281, 2.1423, 2.8361, 1.2003], + device='cuda:1'), covar=tensor([0.2493, 0.4220, 0.2538, 0.1926, 0.1417, 0.2136, 0.1371, 0.4121], + device='cuda:1'), in_proj_covar=tensor([0.0506, 0.0613, 0.0670, 0.0462, 0.0608, 0.0513, 0.0651, 0.0521], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 09:18:49,754 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 09:19:02,971 INFO [train.py:903] (1/4) Epoch 17, batch 1950, loss[loss=0.2045, simple_loss=0.2889, pruned_loss=0.06004, over 19790.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2952, pruned_loss=0.07001, over 3783745.88 frames. ], batch size: 56, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:19:08,707 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 5.118e+02 6.155e+02 7.161e+02 1.490e+03, threshold=1.231e+03, percent-clipped=2.0 +2023-04-02 09:19:10,543 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 09:19:23,889 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:24,912 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3440, 1.4606, 1.8402, 1.8207, 2.9412, 3.9174, 3.8064, 4.4088], + device='cuda:1'), covar=tensor([0.1892, 0.4796, 0.4101, 0.2272, 0.0796, 0.0346, 0.0282, 0.0260], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0309, 0.0339, 0.0259, 0.0233, 0.0176, 0.0210, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 09:19:44,079 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111230.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:59,226 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:20:05,434 INFO [train.py:903] (1/4) Epoch 17, batch 2000, loss[loss=0.2403, simple_loss=0.3106, pruned_loss=0.08503, over 13436.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2958, pruned_loss=0.07025, over 3784233.00 frames. ], batch size: 135, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:04,238 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 09:21:07,755 INFO [train.py:903] (1/4) Epoch 17, batch 2050, loss[loss=0.2306, simple_loss=0.3062, pruned_loss=0.07749, over 13802.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2963, pruned_loss=0.07044, over 3791054.25 frames. ], batch size: 136, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:14,881 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.413e+02 6.604e+02 7.706e+02 1.674e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 09:21:22,801 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 09:21:24,048 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 09:21:34,576 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:21:43,893 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 09:22:05,020 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:06,113 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:09,339 INFO [train.py:903] (1/4) Epoch 17, batch 2100, loss[loss=0.1801, simple_loss=0.2625, pruned_loss=0.04882, over 19583.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2961, pruned_loss=0.07038, over 3797922.66 frames. ], batch size: 52, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:22:34,065 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:38,047 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 09:22:55,859 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3319, 2.1827, 1.9994, 1.8332, 1.5677, 1.8157, 0.4753, 1.2428], + device='cuda:1'), covar=tensor([0.0484, 0.0568, 0.0441, 0.0770, 0.1194, 0.0840, 0.1315, 0.0940], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0350, 0.0348, 0.0374, 0.0450, 0.0382, 0.0326, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 09:23:01,368 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 09:23:10,553 INFO [train.py:903] (1/4) Epoch 17, batch 2150, loss[loss=0.2159, simple_loss=0.3077, pruned_loss=0.06202, over 19673.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2945, pruned_loss=0.06921, over 3821679.79 frames. ], batch size: 59, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:23:15,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2933, 1.3499, 1.5771, 1.4848, 2.2537, 1.9907, 2.3327, 0.9266], + device='cuda:1'), covar=tensor([0.2372, 0.4253, 0.2589, 0.1892, 0.1476, 0.2149, 0.1352, 0.4258], + device='cuda:1'), in_proj_covar=tensor([0.0508, 0.0615, 0.0671, 0.0463, 0.0611, 0.0514, 0.0653, 0.0522], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 09:23:16,490 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.943e+02 5.975e+02 7.359e+02 1.553e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 09:23:56,877 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111435.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:11,375 INFO [train.py:903] (1/4) Epoch 17, batch 2200, loss[loss=0.2265, simple_loss=0.2997, pruned_loss=0.07661, over 17623.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2956, pruned_loss=0.06997, over 3813876.21 frames. ], batch size: 101, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:24:38,950 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:58,506 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:09,823 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:15,240 INFO [train.py:903] (1/4) Epoch 17, batch 2250, loss[loss=0.1957, simple_loss=0.2848, pruned_loss=0.05328, over 18178.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2944, pruned_loss=0.06934, over 3805276.96 frames. ], batch size: 84, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:25:22,030 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.257e+02 6.827e+02 8.687e+02 2.303e+03, threshold=1.365e+03, percent-clipped=8.0 +2023-04-02 09:26:16,852 INFO [train.py:903] (1/4) Epoch 17, batch 2300, loss[loss=0.1735, simple_loss=0.2513, pruned_loss=0.0478, over 19372.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2946, pruned_loss=0.06911, over 3812078.13 frames. ], batch size: 47, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:26:27,082 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 09:26:38,553 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 09:27:05,561 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:18,439 INFO [train.py:903] (1/4) Epoch 17, batch 2350, loss[loss=0.23, simple_loss=0.3126, pruned_loss=0.07369, over 19276.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2956, pruned_loss=0.06981, over 3805705.53 frames. ], batch size: 66, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:27:22,391 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111601.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:24,245 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.149e+02 5.946e+02 7.803e+02 1.982e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-02 09:27:54,520 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:58,768 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 09:28:06,321 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-02 09:28:14,864 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 09:28:19,175 INFO [train.py:903] (1/4) Epoch 17, batch 2400, loss[loss=0.2162, simple_loss=0.2857, pruned_loss=0.07332, over 19737.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2954, pruned_loss=0.06981, over 3815240.23 frames. ], batch size: 51, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:28:24,021 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0077, 2.1236, 2.3328, 2.7576, 1.9581, 2.6007, 2.4034, 2.1952], + device='cuda:1'), covar=tensor([0.4174, 0.3806, 0.1794, 0.2231, 0.4073, 0.1928, 0.4509, 0.3176], + device='cuda:1'), in_proj_covar=tensor([0.0854, 0.0902, 0.0683, 0.0909, 0.0833, 0.0768, 0.0816, 0.0752], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 09:29:15,392 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:24,915 INFO [train.py:903] (1/4) Epoch 17, batch 2450, loss[loss=0.2241, simple_loss=0.3083, pruned_loss=0.06994, over 19664.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2955, pruned_loss=0.06946, over 3817966.84 frames. ], batch size: 58, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:29:29,901 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:30,394 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 09:29:32,591 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.976e+02 6.292e+02 8.323e+02 1.636e+03, threshold=1.258e+03, percent-clipped=0.0 +2023-04-02 09:29:33,020 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0183, 1.7000, 1.5990, 1.9752, 1.7045, 1.7066, 1.5377, 1.8553], + device='cuda:1'), covar=tensor([0.0982, 0.1451, 0.1485, 0.1010, 0.1291, 0.0539, 0.1389, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0354, 0.0302, 0.0245, 0.0299, 0.0248, 0.0297, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:29:47,077 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111716.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:30:18,841 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9582, 1.9997, 2.1993, 2.6553, 1.9418, 2.5188, 2.2847, 2.0710], + device='cuda:1'), covar=tensor([0.3881, 0.3624, 0.1669, 0.2176, 0.3684, 0.1884, 0.4380, 0.2917], + device='cuda:1'), in_proj_covar=tensor([0.0857, 0.0907, 0.0687, 0.0914, 0.0836, 0.0773, 0.0818, 0.0753], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 09:30:27,993 INFO [train.py:903] (1/4) Epoch 17, batch 2500, loss[loss=0.2426, simple_loss=0.3094, pruned_loss=0.0879, over 13435.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2955, pruned_loss=0.06976, over 3819462.35 frames. ], batch size: 136, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:30:51,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 09:31:31,116 INFO [train.py:903] (1/4) Epoch 17, batch 2550, loss[loss=0.2497, simple_loss=0.3258, pruned_loss=0.08677, over 19508.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2955, pruned_loss=0.06966, over 3823663.59 frames. ], batch size: 64, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:31:36,577 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9519, 1.6889, 1.5177, 1.9858, 1.6210, 1.6684, 1.5362, 1.8178], + device='cuda:1'), covar=tensor([0.0951, 0.1359, 0.1435, 0.0924, 0.1233, 0.0527, 0.1289, 0.0714], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0355, 0.0304, 0.0246, 0.0301, 0.0250, 0.0299, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:31:38,617 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.268e+02 6.331e+02 7.722e+02 1.710e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-02 09:31:44,790 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111809.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:32:13,241 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:32:28,429 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 09:32:34,316 INFO [train.py:903] (1/4) Epoch 17, batch 2600, loss[loss=0.2227, simple_loss=0.3019, pruned_loss=0.07176, over 19663.00 frames. ], tot_loss[loss=0.217, simple_loss=0.295, pruned_loss=0.06955, over 3824891.83 frames. ], batch size: 55, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:32:50,533 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1198, 5.5866, 2.9276, 4.7578, 0.9735, 5.5716, 5.5585, 5.7594], + device='cuda:1'), covar=tensor([0.0390, 0.0856, 0.1863, 0.0700, 0.3946, 0.0486, 0.0683, 0.0814], + device='cuda:1'), in_proj_covar=tensor([0.0481, 0.0392, 0.0475, 0.0337, 0.0395, 0.0410, 0.0410, 0.0438], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:33:07,602 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1143, 5.1061, 5.9560, 5.9452, 1.9468, 5.6105, 4.8278, 5.5639], + device='cuda:1'), covar=tensor([0.1489, 0.0710, 0.0511, 0.0544, 0.5810, 0.0659, 0.0525, 0.1065], + device='cuda:1'), in_proj_covar=tensor([0.0748, 0.0684, 0.0891, 0.0775, 0.0796, 0.0641, 0.0534, 0.0820], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 09:33:38,517 INFO [train.py:903] (1/4) Epoch 17, batch 2650, loss[loss=0.1949, simple_loss=0.2571, pruned_loss=0.06638, over 19757.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2941, pruned_loss=0.06942, over 3825672.10 frames. ], batch size: 46, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:33:46,334 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.387e+02 5.132e+02 6.430e+02 7.842e+02 1.964e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 09:33:58,643 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 09:34:09,585 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4474, 1.8482, 2.0253, 1.9568, 3.1809, 1.5909, 2.7068, 3.3543], + device='cuda:1'), covar=tensor([0.0446, 0.2137, 0.2122, 0.1468, 0.0549, 0.2075, 0.1393, 0.0317], + device='cuda:1'), in_proj_covar=tensor([0.0386, 0.0351, 0.0372, 0.0334, 0.0360, 0.0341, 0.0358, 0.0378], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:34:38,390 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:34:41,295 INFO [train.py:903] (1/4) Epoch 17, batch 2700, loss[loss=0.1875, simple_loss=0.2601, pruned_loss=0.05744, over 19729.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2952, pruned_loss=0.07008, over 3828967.33 frames. ], batch size: 46, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:34:54,264 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:34:58,877 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7802, 1.5950, 1.3961, 1.7693, 1.5008, 1.5406, 1.3927, 1.6581], + device='cuda:1'), covar=tensor([0.1109, 0.1301, 0.1544, 0.1030, 0.1312, 0.0576, 0.1392, 0.0773], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0355, 0.0303, 0.0246, 0.0300, 0.0249, 0.0298, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:35:26,054 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:35:43,883 INFO [train.py:903] (1/4) Epoch 17, batch 2750, loss[loss=0.1857, simple_loss=0.2624, pruned_loss=0.05453, over 19737.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2962, pruned_loss=0.07096, over 3815557.03 frames. ], batch size: 51, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:35:52,117 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.760e+02 5.468e+02 6.814e+02 8.726e+02 1.544e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-02 09:36:45,022 INFO [train.py:903] (1/4) Epoch 17, batch 2800, loss[loss=0.1838, simple_loss=0.2571, pruned_loss=0.05532, over 19297.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2968, pruned_loss=0.07149, over 3812285.38 frames. ], batch size: 44, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:26,092 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0753, 1.2387, 1.6673, 1.2019, 2.4622, 3.2826, 3.0355, 3.4936], + device='cuda:1'), covar=tensor([0.1734, 0.3720, 0.3134, 0.2398, 0.0614, 0.0227, 0.0228, 0.0278], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0307, 0.0337, 0.0256, 0.0229, 0.0174, 0.0208, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 09:37:29,672 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:37:48,059 INFO [train.py:903] (1/4) Epoch 17, batch 2850, loss[loss=0.2064, simple_loss=0.2822, pruned_loss=0.06528, over 19393.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2973, pruned_loss=0.07154, over 3810619.04 frames. ], batch size: 48, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:54,817 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 5.207e+02 6.661e+02 8.674e+02 1.797e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 09:37:57,423 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6878, 1.5011, 1.6386, 1.5986, 3.2860, 1.1852, 2.4749, 3.6003], + device='cuda:1'), covar=tensor([0.0448, 0.2598, 0.2756, 0.1865, 0.0635, 0.2523, 0.1354, 0.0278], + device='cuda:1'), in_proj_covar=tensor([0.0386, 0.0349, 0.0371, 0.0332, 0.0358, 0.0340, 0.0357, 0.0378], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:38:46,051 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 09:38:49,623 INFO [train.py:903] (1/4) Epoch 17, batch 2900, loss[loss=0.1762, simple_loss=0.2571, pruned_loss=0.04769, over 19603.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2959, pruned_loss=0.07077, over 3816036.82 frames. ], batch size: 50, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:38:56,302 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112153.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:39:51,804 INFO [train.py:903] (1/4) Epoch 17, batch 2950, loss[loss=0.2407, simple_loss=0.3103, pruned_loss=0.08558, over 19415.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2962, pruned_loss=0.07113, over 3820264.87 frames. ], batch size: 70, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:39:55,987 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:39:58,770 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 4.879e+02 6.137e+02 7.850e+02 1.399e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-04-02 09:40:27,813 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:40:36,012 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4019, 1.5034, 1.7438, 1.6115, 2.6127, 2.2906, 2.8224, 1.2128], + device='cuda:1'), covar=tensor([0.2299, 0.4004, 0.2456, 0.1792, 0.1457, 0.1944, 0.1316, 0.3974], + device='cuda:1'), in_proj_covar=tensor([0.0507, 0.0610, 0.0664, 0.0461, 0.0603, 0.0513, 0.0649, 0.0522], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 09:40:54,421 INFO [train.py:903] (1/4) Epoch 17, batch 3000, loss[loss=0.1769, simple_loss=0.2648, pruned_loss=0.04446, over 19749.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2952, pruned_loss=0.0706, over 3830579.51 frames. ], batch size: 51, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:40:54,422 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 09:41:09,008 INFO [train.py:937] (1/4) Epoch 17, validation: loss=0.1717, simple_loss=0.272, pruned_loss=0.03576, over 944034.00 frames. +2023-04-02 09:41:09,010 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 09:41:13,758 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 09:41:33,188 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112268.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:42:07,701 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2220, 5.5281, 3.2692, 4.7369, 0.8381, 5.7467, 5.5298, 5.8069], + device='cuda:1'), covar=tensor([0.0329, 0.0824, 0.1660, 0.0719, 0.4328, 0.0440, 0.0654, 0.0744], + device='cuda:1'), in_proj_covar=tensor([0.0475, 0.0388, 0.0472, 0.0333, 0.0392, 0.0406, 0.0405, 0.0434], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:42:09,759 INFO [train.py:903] (1/4) Epoch 17, batch 3050, loss[loss=0.252, simple_loss=0.3292, pruned_loss=0.08736, over 19724.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2956, pruned_loss=0.07088, over 3817397.95 frames. ], batch size: 63, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:42:16,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.197e+02 6.217e+02 9.038e+02 1.667e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-02 09:43:10,132 INFO [train.py:903] (1/4) Epoch 17, batch 3100, loss[loss=0.1818, simple_loss=0.2604, pruned_loss=0.05158, over 16476.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2956, pruned_loss=0.0711, over 3822888.52 frames. ], batch size: 36, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:44:14,541 INFO [train.py:903] (1/4) Epoch 17, batch 3150, loss[loss=0.2633, simple_loss=0.3371, pruned_loss=0.09477, over 18780.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2962, pruned_loss=0.0715, over 3821384.81 frames. ], batch size: 74, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:44:21,822 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.016e+02 6.190e+02 7.660e+02 1.883e+03, threshold=1.238e+03, percent-clipped=9.0 +2023-04-02 09:44:25,517 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112407.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:44:42,669 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 09:44:51,283 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:45:17,098 INFO [train.py:903] (1/4) Epoch 17, batch 3200, loss[loss=0.2696, simple_loss=0.338, pruned_loss=0.1006, over 19667.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2954, pruned_loss=0.07085, over 3808411.38 frames. ], batch size: 55, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:19,039 INFO [train.py:903] (1/4) Epoch 17, batch 3250, loss[loss=0.2063, simple_loss=0.2813, pruned_loss=0.06568, over 19378.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2959, pruned_loss=0.07082, over 3814198.69 frames. ], batch size: 48, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:26,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.958e+02 6.274e+02 7.840e+02 2.025e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 09:46:42,256 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4129, 1.3347, 1.3273, 1.7435, 1.3748, 1.6582, 1.6920, 1.5585], + device='cuda:1'), covar=tensor([0.0786, 0.0881, 0.1024, 0.0689, 0.0818, 0.0702, 0.0755, 0.0666], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0222, 0.0223, 0.0243, 0.0227, 0.0209, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 09:46:51,208 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112524.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:47:13,554 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:47:18,725 INFO [train.py:903] (1/4) Epoch 17, batch 3300, loss[loss=0.2382, simple_loss=0.3149, pruned_loss=0.0807, over 17539.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.296, pruned_loss=0.07134, over 3796720.51 frames. ], batch size: 101, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:47:20,384 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112549.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:47:25,359 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 09:48:00,388 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:20,961 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:24,058 INFO [train.py:903] (1/4) Epoch 17, batch 3350, loss[loss=0.2462, simple_loss=0.3185, pruned_loss=0.08695, over 19592.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2943, pruned_loss=0.07039, over 3802877.43 frames. ], batch size: 61, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:48:31,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.436e+02 6.846e+02 8.612e+02 1.565e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 09:49:09,924 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:49:24,116 INFO [train.py:903] (1/4) Epoch 17, batch 3400, loss[loss=0.2556, simple_loss=0.3286, pruned_loss=0.0913, over 19758.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2946, pruned_loss=0.07052, over 3792201.00 frames. ], batch size: 63, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:50:25,917 INFO [train.py:903] (1/4) Epoch 17, batch 3450, loss[loss=0.2222, simple_loss=0.2921, pruned_loss=0.07613, over 19830.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2941, pruned_loss=0.06975, over 3813079.46 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:50:28,239 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 09:50:33,005 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.932e+02 6.092e+02 9.481e+02 2.200e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-02 09:50:45,720 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7733, 1.2949, 1.6246, 1.5132, 3.3284, 1.0368, 2.3911, 3.7797], + device='cuda:1'), covar=tensor([0.0486, 0.2840, 0.2748, 0.1880, 0.0724, 0.2669, 0.1291, 0.0222], + device='cuda:1'), in_proj_covar=tensor([0.0388, 0.0351, 0.0371, 0.0331, 0.0359, 0.0340, 0.0356, 0.0378], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:51:27,355 INFO [train.py:903] (1/4) Epoch 17, batch 3500, loss[loss=0.2208, simple_loss=0.3018, pruned_loss=0.06991, over 18704.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2953, pruned_loss=0.07058, over 3810503.70 frames. ], batch size: 74, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:51:32,019 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:31,117 INFO [train.py:903] (1/4) Epoch 17, batch 3550, loss[loss=0.2208, simple_loss=0.2972, pruned_loss=0.07226, over 19674.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2949, pruned_loss=0.0703, over 3793791.10 frames. ], batch size: 58, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:52:32,892 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:38,380 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.345e+02 4.759e+02 5.980e+02 7.566e+02 1.638e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 09:53:03,320 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:53:10,140 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8854, 1.3465, 1.0630, 0.9420, 1.1709, 1.0011, 0.8964, 1.2639], + device='cuda:1'), covar=tensor([0.0627, 0.0769, 0.1148, 0.0695, 0.0523, 0.1235, 0.0621, 0.0440], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0311, 0.0333, 0.0258, 0.0245, 0.0333, 0.0296, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 09:53:33,312 INFO [train.py:903] (1/4) Epoch 17, batch 3600, loss[loss=0.2122, simple_loss=0.2992, pruned_loss=0.06259, over 19674.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2941, pruned_loss=0.06966, over 3817591.09 frames. ], batch size: 59, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:53:55,561 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112866.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:54:35,715 INFO [train.py:903] (1/4) Epoch 17, batch 3650, loss[loss=0.2028, simple_loss=0.2793, pruned_loss=0.06319, over 19604.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2946, pruned_loss=0.06951, over 3818795.46 frames. ], batch size: 50, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:54:43,571 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.951e+02 4.960e+02 5.826e+02 7.647e+02 1.614e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 09:55:09,846 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112924.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,601 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,734 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:38,877 INFO [train.py:903] (1/4) Epoch 17, batch 3700, loss[loss=0.1806, simple_loss=0.2625, pruned_loss=0.04933, over 19487.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2954, pruned_loss=0.06959, over 3825492.71 frames. ], batch size: 49, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:55:55,798 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1076, 1.3442, 1.7366, 1.4559, 2.7579, 3.7048, 3.4955, 3.9388], + device='cuda:1'), covar=tensor([0.1725, 0.3550, 0.3231, 0.2283, 0.0614, 0.0233, 0.0183, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0311, 0.0342, 0.0259, 0.0232, 0.0179, 0.0211, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 09:56:17,480 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:56:28,228 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 09:56:42,247 INFO [train.py:903] (1/4) Epoch 17, batch 3750, loss[loss=0.1706, simple_loss=0.2475, pruned_loss=0.04681, over 17314.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2948, pruned_loss=0.06937, over 3826874.35 frames. ], batch size: 38, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:49,255 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.723e+02 6.001e+02 7.947e+02 1.345e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-02 09:57:32,631 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113039.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:57:42,418 INFO [train.py:903] (1/4) Epoch 17, batch 3800, loss[loss=0.2342, simple_loss=0.3019, pruned_loss=0.0833, over 19745.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2955, pruned_loss=0.07002, over 3819459.70 frames. ], batch size: 51, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:57:49,549 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:14,097 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 09:58:39,370 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:43,325 INFO [train.py:903] (1/4) Epoch 17, batch 3850, loss[loss=0.1927, simple_loss=0.2716, pruned_loss=0.05694, over 19487.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.296, pruned_loss=0.0708, over 3805852.84 frames. ], batch size: 49, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:58:51,557 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.627e+02 5.316e+02 6.326e+02 9.097e+02 1.552e+03, threshold=1.265e+03, percent-clipped=8.0 +2023-04-02 09:59:14,526 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113122.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:29,828 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2740, 1.2608, 1.8587, 1.2519, 2.6540, 3.8015, 3.4808, 3.9089], + device='cuda:1'), covar=tensor([0.1521, 0.3666, 0.2955, 0.2373, 0.0591, 0.0163, 0.0184, 0.0210], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0309, 0.0338, 0.0257, 0.0230, 0.0176, 0.0209, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 09:59:44,563 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113147.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:45,346 INFO [train.py:903] (1/4) Epoch 17, batch 3900, loss[loss=0.1833, simple_loss=0.2634, pruned_loss=0.05155, over 19567.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2949, pruned_loss=0.06988, over 3808669.32 frames. ], batch size: 52, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:59:56,874 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1977, 1.1492, 1.5693, 1.0772, 2.4415, 3.5213, 3.1918, 3.6463], + device='cuda:1'), covar=tensor([0.1677, 0.4033, 0.3590, 0.2630, 0.0642, 0.0184, 0.0209, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0310, 0.0339, 0.0258, 0.0230, 0.0177, 0.0210, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 10:00:48,760 INFO [train.py:903] (1/4) Epoch 17, batch 3950, loss[loss=0.2404, simple_loss=0.3143, pruned_loss=0.08332, over 19429.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2943, pruned_loss=0.06921, over 3802040.45 frames. ], batch size: 70, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:56,124 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 10:00:57,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.545e+02 5.288e+02 6.585e+02 1.560e+03, threshold=1.058e+03, percent-clipped=1.0 +2023-04-02 10:01:51,456 INFO [train.py:903] (1/4) Epoch 17, batch 4000, loss[loss=0.2137, simple_loss=0.2987, pruned_loss=0.06433, over 19526.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2956, pruned_loss=0.07023, over 3794558.80 frames. ], batch size: 54, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:01:56,567 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:11,594 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 10:02:35,244 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:39,812 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 10:02:44,786 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2056, 1.1020, 1.0945, 1.4448, 1.2548, 1.3789, 1.3809, 1.2195], + device='cuda:1'), covar=tensor([0.0653, 0.0755, 0.0807, 0.0591, 0.0773, 0.0626, 0.0747, 0.0600], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0241, 0.0226, 0.0207, 0.0187, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 10:02:49,658 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:52,759 INFO [train.py:903] (1/4) Epoch 17, batch 4050, loss[loss=0.193, simple_loss=0.2669, pruned_loss=0.05957, over 15083.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06966, over 3797576.25 frames. ], batch size: 33, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:00,896 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.601e+02 4.703e+02 5.716e+02 7.594e+02 1.568e+03, threshold=1.143e+03, percent-clipped=5.0 +2023-04-02 10:03:08,237 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:21,618 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:38,671 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:54,677 INFO [train.py:903] (1/4) Epoch 17, batch 4100, loss[loss=0.2155, simple_loss=0.3006, pruned_loss=0.06523, over 19671.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2949, pruned_loss=0.0701, over 3796029.40 frames. ], batch size: 53, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:57,604 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:27,858 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 10:04:28,699 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:31,497 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 10:04:56,296 INFO [train.py:903] (1/4) Epoch 17, batch 4150, loss[loss=0.2258, simple_loss=0.3089, pruned_loss=0.07134, over 19035.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2941, pruned_loss=0.06968, over 3814358.69 frames. ], batch size: 75, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:04:56,639 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:05:03,828 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 5.375e+02 6.520e+02 8.152e+02 2.133e+03, threshold=1.304e+03, percent-clipped=6.0 +2023-04-02 10:05:39,332 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1018, 1.2674, 1.6302, 1.3908, 2.7202, 1.1378, 2.2669, 3.0300], + device='cuda:1'), covar=tensor([0.0591, 0.2699, 0.2552, 0.1770, 0.0753, 0.2284, 0.1047, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0389, 0.0353, 0.0374, 0.0334, 0.0361, 0.0342, 0.0359, 0.0379], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:05:57,673 INFO [train.py:903] (1/4) Epoch 17, batch 4200, loss[loss=0.2258, simple_loss=0.3107, pruned_loss=0.07047, over 18377.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2944, pruned_loss=0.07003, over 3802454.79 frames. ], batch size: 84, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:06:02,341 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 10:06:54,131 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2068, 5.6280, 2.8684, 4.8461, 1.2641, 5.7591, 5.6692, 5.7741], + device='cuda:1'), covar=tensor([0.0415, 0.0784, 0.1916, 0.0675, 0.3586, 0.0540, 0.0681, 0.0796], + device='cuda:1'), in_proj_covar=tensor([0.0473, 0.0387, 0.0468, 0.0330, 0.0391, 0.0406, 0.0403, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:06:59,520 INFO [train.py:903] (1/4) Epoch 17, batch 4250, loss[loss=0.2058, simple_loss=0.2721, pruned_loss=0.06978, over 19087.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.294, pruned_loss=0.06987, over 3802634.12 frames. ], batch size: 42, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:07:06,465 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.808e+02 5.898e+02 7.585e+02 1.571e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 10:07:13,472 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 10:07:25,853 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 10:08:02,056 INFO [train.py:903] (1/4) Epoch 17, batch 4300, loss[loss=0.2216, simple_loss=0.3029, pruned_loss=0.07014, over 18380.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2939, pruned_loss=0.06965, over 3811476.29 frames. ], batch size: 84, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:08:55,439 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 10:09:02,221 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113596.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:09:04,353 INFO [train.py:903] (1/4) Epoch 17, batch 4350, loss[loss=0.2369, simple_loss=0.3108, pruned_loss=0.08153, over 19623.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2936, pruned_loss=0.06897, over 3824252.84 frames. ], batch size: 57, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:09:12,445 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 4.847e+02 6.118e+02 7.738e+02 1.753e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 10:09:31,777 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.04 vs. limit=5.0 +2023-04-02 10:09:49,719 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:07,376 INFO [train.py:903] (1/4) Epoch 17, batch 4400, loss[loss=0.1927, simple_loss=0.2736, pruned_loss=0.05586, over 19763.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2937, pruned_loss=0.06939, over 3815882.29 frames. ], batch size: 54, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:10:14,904 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113654.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:26,357 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:33,143 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 10:10:43,125 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 10:10:45,258 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113679.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:11:07,786 INFO [train.py:903] (1/4) Epoch 17, batch 4450, loss[loss=0.248, simple_loss=0.323, pruned_loss=0.08652, over 19480.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2941, pruned_loss=0.06948, over 3824407.00 frames. ], batch size: 64, lr: 4.84e-03, grad_scale: 16.0 +2023-04-02 10:11:14,459 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.100e+02 6.811e+02 8.906e+02 1.680e+03, threshold=1.362e+03, percent-clipped=7.0 +2023-04-02 10:11:23,001 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113711.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:12:07,715 INFO [train.py:903] (1/4) Epoch 17, batch 4500, loss[loss=0.2425, simple_loss=0.3185, pruned_loss=0.0832, over 19525.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2949, pruned_loss=0.07045, over 3810318.91 frames. ], batch size: 56, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:12:12,819 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1138, 1.9985, 1.8863, 1.7045, 1.4245, 1.7278, 0.6623, 1.0938], + device='cuda:1'), covar=tensor([0.0608, 0.0566, 0.0383, 0.0698, 0.1124, 0.0745, 0.1078, 0.0921], + device='cuda:1'), in_proj_covar=tensor([0.0347, 0.0345, 0.0345, 0.0373, 0.0445, 0.0377, 0.0322, 0.0330], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 10:12:15,933 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7001, 4.1133, 4.3418, 4.3438, 2.0254, 4.0694, 3.6556, 4.0719], + device='cuda:1'), covar=tensor([0.1446, 0.1386, 0.0571, 0.0648, 0.5033, 0.0900, 0.0585, 0.0995], + device='cuda:1'), in_proj_covar=tensor([0.0745, 0.0691, 0.0896, 0.0778, 0.0799, 0.0648, 0.0539, 0.0822], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:13:09,293 INFO [train.py:903] (1/4) Epoch 17, batch 4550, loss[loss=0.242, simple_loss=0.3169, pruned_loss=0.08356, over 19312.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2951, pruned_loss=0.07049, over 3810285.99 frames. ], batch size: 66, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:13:19,358 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 10:13:20,452 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 5.090e+02 6.214e+02 7.749e+02 1.433e+03, threshold=1.243e+03, percent-clipped=2.0 +2023-04-02 10:13:42,463 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 10:14:12,730 INFO [train.py:903] (1/4) Epoch 17, batch 4600, loss[loss=0.2323, simple_loss=0.3102, pruned_loss=0.07719, over 19692.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2957, pruned_loss=0.07083, over 3800298.42 frames. ], batch size: 59, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:14,385 INFO [train.py:903] (1/4) Epoch 17, batch 4650, loss[loss=0.2446, simple_loss=0.3261, pruned_loss=0.08158, over 19535.00 frames. ], tot_loss[loss=0.219, simple_loss=0.296, pruned_loss=0.07097, over 3805122.97 frames. ], batch size: 56, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:23,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.276e+02 6.482e+02 7.907e+02 1.823e+03, threshold=1.296e+03, percent-clipped=2.0 +2023-04-02 10:15:31,174 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1324, 1.4414, 1.9926, 1.5772, 3.0691, 4.6419, 4.4975, 5.0032], + device='cuda:1'), covar=tensor([0.1700, 0.3596, 0.3034, 0.2150, 0.0575, 0.0159, 0.0153, 0.0154], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0313, 0.0342, 0.0260, 0.0233, 0.0178, 0.0212, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 10:15:32,011 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 10:15:44,652 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 10:15:55,314 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4479, 1.5125, 1.9142, 1.5964, 2.9313, 2.5879, 3.3291, 1.4680], + device='cuda:1'), covar=tensor([0.2303, 0.4094, 0.2636, 0.1854, 0.1590, 0.1951, 0.1506, 0.4018], + device='cuda:1'), in_proj_covar=tensor([0.0516, 0.0616, 0.0670, 0.0463, 0.0607, 0.0515, 0.0650, 0.0525], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 10:16:16,809 INFO [train.py:903] (1/4) Epoch 17, batch 4700, loss[loss=0.2295, simple_loss=0.3073, pruned_loss=0.07586, over 19720.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2948, pruned_loss=0.07027, over 3808497.25 frames. ], batch size: 63, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:16:42,948 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:16:43,741 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 10:16:56,546 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:12,201 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:19,851 INFO [train.py:903] (1/4) Epoch 17, batch 4750, loss[loss=0.2372, simple_loss=0.316, pruned_loss=0.07915, over 19644.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2942, pruned_loss=0.0696, over 3818661.92 frames. ], batch size: 60, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:17:32,731 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.836e+02 6.122e+02 7.624e+02 1.576e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 10:17:35,075 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:53,581 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6192, 1.4792, 1.4466, 1.9054, 1.6161, 1.8166, 1.8540, 1.6657], + device='cuda:1'), covar=tensor([0.0768, 0.0906, 0.0995, 0.0761, 0.0826, 0.0755, 0.0860, 0.0717], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0225, 0.0244, 0.0228, 0.0209, 0.0190, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 10:18:02,761 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2931, 2.1106, 1.9250, 1.8760, 1.5626, 1.7210, 0.6685, 1.2765], + device='cuda:1'), covar=tensor([0.0521, 0.0546, 0.0436, 0.0737, 0.1150, 0.0862, 0.1130, 0.0951], + device='cuda:1'), in_proj_covar=tensor([0.0347, 0.0346, 0.0345, 0.0371, 0.0444, 0.0377, 0.0323, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 10:18:24,446 INFO [train.py:903] (1/4) Epoch 17, batch 4800, loss[loss=0.2274, simple_loss=0.3005, pruned_loss=0.07715, over 18311.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2949, pruned_loss=0.07033, over 3812974.64 frames. ], batch size: 83, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:22,128 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:19:25,654 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2936, 3.7729, 3.8814, 3.8991, 1.5370, 3.7064, 3.2398, 3.6115], + device='cuda:1'), covar=tensor([0.1607, 0.1053, 0.0697, 0.0739, 0.5411, 0.0925, 0.0706, 0.1219], + device='cuda:1'), in_proj_covar=tensor([0.0739, 0.0691, 0.0887, 0.0772, 0.0791, 0.0643, 0.0536, 0.0814], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:19:26,568 INFO [train.py:903] (1/4) Epoch 17, batch 4850, loss[loss=0.2459, simple_loss=0.3188, pruned_loss=0.08655, over 19783.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2942, pruned_loss=0.07005, over 3815366.79 frames. ], batch size: 56, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:35,426 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.130e+02 6.675e+02 8.728e+02 1.864e+03, threshold=1.335e+03, percent-clipped=11.0 +2023-04-02 10:19:52,822 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 10:19:57,405 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:08,928 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8208, 3.3035, 3.3517, 3.3473, 1.3386, 3.2049, 2.8220, 3.1209], + device='cuda:1'), covar=tensor([0.1648, 0.0921, 0.0781, 0.0846, 0.5325, 0.0871, 0.0781, 0.1241], + device='cuda:1'), in_proj_covar=tensor([0.0742, 0.0694, 0.0892, 0.0777, 0.0798, 0.0646, 0.0539, 0.0819], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:20:14,647 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 10:20:19,129 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 10:20:20,353 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 10:20:25,163 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:28,431 INFO [train.py:903] (1/4) Epoch 17, batch 4900, loss[loss=0.2534, simple_loss=0.3304, pruned_loss=0.08815, over 17238.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2955, pruned_loss=0.07062, over 3803424.46 frames. ], batch size: 101, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:20:28,494 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 10:20:45,337 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0215, 2.0594, 2.2436, 2.6472, 1.8885, 2.4676, 2.3252, 2.0997], + device='cuda:1'), covar=tensor([0.3888, 0.3409, 0.1712, 0.2183, 0.3814, 0.1879, 0.4158, 0.3037], + device='cuda:1'), in_proj_covar=tensor([0.0856, 0.0905, 0.0687, 0.0913, 0.0836, 0.0773, 0.0817, 0.0755], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 10:20:48,124 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 10:21:29,546 INFO [train.py:903] (1/4) Epoch 17, batch 4950, loss[loss=0.2312, simple_loss=0.3129, pruned_loss=0.07481, over 13506.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2955, pruned_loss=0.07018, over 3802622.77 frames. ], batch size: 136, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:21:41,951 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.073e+02 6.090e+02 7.599e+02 1.461e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-02 10:21:48,500 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 10:22:09,556 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 10:22:31,813 INFO [train.py:903] (1/4) Epoch 17, batch 5000, loss[loss=0.2303, simple_loss=0.3032, pruned_loss=0.07877, over 19663.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2965, pruned_loss=0.0709, over 3801590.79 frames. ], batch size: 55, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:22:39,601 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 10:22:50,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 10:23:32,992 INFO [train.py:903] (1/4) Epoch 17, batch 5050, loss[loss=0.1773, simple_loss=0.2541, pruned_loss=0.05025, over 19777.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.295, pruned_loss=0.07011, over 3811149.52 frames. ], batch size: 47, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:23:42,367 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 5.068e+02 6.244e+02 7.899e+02 1.430e+03, threshold=1.249e+03, percent-clipped=5.0 +2023-04-02 10:24:10,958 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 10:24:34,969 INFO [train.py:903] (1/4) Epoch 17, batch 5100, loss[loss=0.1933, simple_loss=0.2795, pruned_loss=0.05358, over 19544.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2949, pruned_loss=0.0699, over 3806084.20 frames. ], batch size: 56, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:24:37,695 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:24:44,352 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 10:24:46,809 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 10:24:52,452 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 10:25:10,346 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:14,824 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:19,674 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5341, 2.1751, 1.6526, 1.4831, 2.1101, 1.3660, 1.4593, 1.8548], + device='cuda:1'), covar=tensor([0.0997, 0.0757, 0.0899, 0.0815, 0.0466, 0.1140, 0.0683, 0.0477], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0309, 0.0329, 0.0256, 0.0245, 0.0328, 0.0292, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:25:36,284 INFO [train.py:903] (1/4) Epoch 17, batch 5150, loss[loss=0.2285, simple_loss=0.3039, pruned_loss=0.07655, over 19569.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2961, pruned_loss=0.07052, over 3782537.02 frames. ], batch size: 52, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:25:46,456 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114404.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:49,640 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.288e+02 6.652e+02 7.839e+02 1.735e+03, threshold=1.330e+03, percent-clipped=3.0 +2023-04-02 10:25:50,836 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 10:26:24,427 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 10:26:41,819 INFO [train.py:903] (1/4) Epoch 17, batch 5200, loss[loss=0.2462, simple_loss=0.3271, pruned_loss=0.08267, over 19549.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2958, pruned_loss=0.07063, over 3794853.51 frames. ], batch size: 56, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:26:47,159 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 10:26:54,845 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 10:27:33,116 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114489.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:27:35,863 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2398, 1.2975, 1.3001, 1.0611, 1.1134, 1.1489, 0.0642, 0.3979], + device='cuda:1'), covar=tensor([0.0643, 0.0584, 0.0381, 0.0503, 0.1224, 0.0544, 0.1096, 0.0971], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0346, 0.0345, 0.0374, 0.0448, 0.0379, 0.0325, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 10:27:37,842 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 10:27:43,618 INFO [train.py:903] (1/4) Epoch 17, batch 5250, loss[loss=0.1979, simple_loss=0.2813, pruned_loss=0.05723, over 19531.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2959, pruned_loss=0.07073, over 3783956.13 frames. ], batch size: 54, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:27:53,070 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 4.802e+02 5.852e+02 7.465e+02 1.395e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 10:28:03,714 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1212, 1.9562, 1.6828, 2.0794, 1.9128, 1.8036, 1.6032, 1.9948], + device='cuda:1'), covar=tensor([0.0955, 0.1325, 0.1397, 0.1012, 0.1284, 0.0518, 0.1344, 0.0638], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0352, 0.0299, 0.0243, 0.0297, 0.0246, 0.0293, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:28:26,787 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 10:28:44,596 INFO [train.py:903] (1/4) Epoch 17, batch 5300, loss[loss=0.2167, simple_loss=0.3016, pruned_loss=0.06587, over 19740.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2956, pruned_loss=0.07073, over 3781400.01 frames. ], batch size: 63, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:28:59,050 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 10:29:44,123 INFO [train.py:903] (1/4) Epoch 17, batch 5350, loss[loss=0.2129, simple_loss=0.2976, pruned_loss=0.06411, over 19271.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2937, pruned_loss=0.06957, over 3788057.59 frames. ], batch size: 66, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:29:51,244 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:29:54,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 5.171e+02 6.688e+02 9.089e+02 2.274e+03, threshold=1.338e+03, percent-clipped=9.0 +2023-04-02 10:30:19,031 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 10:30:46,234 INFO [train.py:903] (1/4) Epoch 17, batch 5400, loss[loss=0.1696, simple_loss=0.2502, pruned_loss=0.0445, over 19729.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2932, pruned_loss=0.069, over 3790950.87 frames. ], batch size: 51, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:01,004 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9177, 4.4772, 2.8918, 3.8325, 0.8072, 4.4329, 4.3047, 4.4083], + device='cuda:1'), covar=tensor([0.0522, 0.0878, 0.1769, 0.0821, 0.4169, 0.0586, 0.0798, 0.1040], + device='cuda:1'), in_proj_covar=tensor([0.0474, 0.0390, 0.0474, 0.0336, 0.0394, 0.0410, 0.0408, 0.0435], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:31:47,161 INFO [train.py:903] (1/4) Epoch 17, batch 5450, loss[loss=0.2163, simple_loss=0.3005, pruned_loss=0.06606, over 19674.00 frames. ], tot_loss[loss=0.216, simple_loss=0.294, pruned_loss=0.06903, over 3788638.79 frames. ], batch size: 55, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:56,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.503e+02 5.761e+02 7.243e+02 1.420e+03, threshold=1.152e+03, percent-clipped=1.0 +2023-04-02 10:32:31,011 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:32:47,136 INFO [train.py:903] (1/4) Epoch 17, batch 5500, loss[loss=0.1966, simple_loss=0.2792, pruned_loss=0.05701, over 19582.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2928, pruned_loss=0.06817, over 3802307.48 frames. ], batch size: 52, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:33:10,830 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 10:33:45,881 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7707, 4.2056, 4.4406, 4.4601, 1.7068, 4.1774, 3.5775, 4.1392], + device='cuda:1'), covar=tensor([0.1672, 0.0967, 0.0658, 0.0684, 0.6033, 0.0862, 0.0756, 0.1188], + device='cuda:1'), in_proj_covar=tensor([0.0748, 0.0696, 0.0898, 0.0783, 0.0802, 0.0648, 0.0543, 0.0827], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:33:46,749 INFO [train.py:903] (1/4) Epoch 17, batch 5550, loss[loss=0.2191, simple_loss=0.299, pruned_loss=0.06955, over 19582.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.293, pruned_loss=0.06887, over 3813966.50 frames. ], batch size: 61, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:33:54,781 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 10:33:55,925 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.950e+02 6.230e+02 7.289e+02 1.704e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 10:34:41,616 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 10:34:48,925 INFO [train.py:903] (1/4) Epoch 17, batch 5600, loss[loss=0.2207, simple_loss=0.3056, pruned_loss=0.06791, over 17384.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2926, pruned_loss=0.06854, over 3813837.80 frames. ], batch size: 101, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:34:54,822 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6578, 1.4376, 1.4133, 2.2234, 1.7063, 1.9265, 2.0206, 1.6754], + device='cuda:1'), covar=tensor([0.0846, 0.1012, 0.1109, 0.0850, 0.0876, 0.0818, 0.0893, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0222, 0.0224, 0.0243, 0.0227, 0.0210, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 10:35:03,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:33,292 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:39,369 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114889.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:50,182 INFO [train.py:903] (1/4) Epoch 17, batch 5650, loss[loss=0.2251, simple_loss=0.2991, pruned_loss=0.07556, over 19327.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2928, pruned_loss=0.06879, over 3823574.42 frames. ], batch size: 66, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:59,363 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.160e+02 6.498e+02 8.575e+02 1.504e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-02 10:36:36,126 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 10:36:51,138 INFO [train.py:903] (1/4) Epoch 17, batch 5700, loss[loss=0.2399, simple_loss=0.3133, pruned_loss=0.08319, over 19441.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2933, pruned_loss=0.06888, over 3830994.11 frames. ], batch size: 70, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:50,261 INFO [train.py:903] (1/4) Epoch 17, batch 5750, loss[loss=0.1767, simple_loss=0.2485, pruned_loss=0.05251, over 19732.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2931, pruned_loss=0.06839, over 3839686.45 frames. ], batch size: 45, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:50,274 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 10:37:57,221 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 10:37:59,478 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 5.221e+02 6.429e+02 7.572e+02 1.818e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 10:38:04,571 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 10:38:50,579 INFO [train.py:903] (1/4) Epoch 17, batch 5800, loss[loss=0.1889, simple_loss=0.2677, pruned_loss=0.05507, over 19498.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2929, pruned_loss=0.06813, over 3841647.08 frames. ], batch size: 49, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:38:52,004 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2152, 3.7319, 3.8479, 3.8873, 1.6047, 3.6476, 3.1777, 3.5783], + device='cuda:1'), covar=tensor([0.1707, 0.1062, 0.0709, 0.0762, 0.5358, 0.1062, 0.0773, 0.1179], + device='cuda:1'), in_proj_covar=tensor([0.0741, 0.0691, 0.0891, 0.0775, 0.0794, 0.0645, 0.0536, 0.0819], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:39:27,226 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:39:28,586 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5800, 1.2035, 1.2031, 1.4646, 1.1496, 1.3698, 1.1941, 1.4313], + device='cuda:1'), covar=tensor([0.1051, 0.1163, 0.1561, 0.0980, 0.1269, 0.0609, 0.1430, 0.0782], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0352, 0.0302, 0.0245, 0.0298, 0.0247, 0.0294, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:39:35,785 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 10:39:52,203 INFO [train.py:903] (1/4) Epoch 17, batch 5850, loss[loss=0.2343, simple_loss=0.3192, pruned_loss=0.07471, over 19589.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2919, pruned_loss=0.06746, over 3847981.98 frames. ], batch size: 61, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:39:59,380 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:40:01,418 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 4.663e+02 6.050e+02 7.882e+02 1.454e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 10:40:51,600 INFO [train.py:903] (1/4) Epoch 17, batch 5900, loss[loss=0.1935, simple_loss=0.2664, pruned_loss=0.06033, over 15655.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2924, pruned_loss=0.06752, over 3848835.24 frames. ], batch size: 34, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:40:55,184 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 10:40:55,561 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9702, 3.3092, 1.9334, 2.0048, 2.9243, 1.6517, 1.4165, 2.0881], + device='cuda:1'), covar=tensor([0.1419, 0.0526, 0.0974, 0.0783, 0.0570, 0.1164, 0.0941, 0.0644], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0309, 0.0328, 0.0257, 0.0245, 0.0327, 0.0290, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:41:13,977 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 10:41:45,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:41:51,149 INFO [train.py:903] (1/4) Epoch 17, batch 5950, loss[loss=0.2798, simple_loss=0.3469, pruned_loss=0.1064, over 18397.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2925, pruned_loss=0.06762, over 3853119.08 frames. ], batch size: 84, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:00,467 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.938e+02 6.318e+02 8.201e+02 2.090e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:42:20,332 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3247, 1.3426, 1.6336, 1.4896, 2.2210, 2.0488, 2.3176, 0.9426], + device='cuda:1'), covar=tensor([0.2389, 0.4156, 0.2519, 0.1918, 0.1475, 0.2143, 0.1348, 0.4168], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0615, 0.0674, 0.0464, 0.0611, 0.0518, 0.0652, 0.0527], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 10:42:35,071 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115233.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:42:43,156 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0106, 5.4077, 3.0874, 4.7719, 1.3137, 5.4746, 5.3885, 5.5434], + device='cuda:1'), covar=tensor([0.0386, 0.0817, 0.1673, 0.0580, 0.3526, 0.0520, 0.0716, 0.0914], + device='cuda:1'), in_proj_covar=tensor([0.0471, 0.0388, 0.0469, 0.0336, 0.0391, 0.0406, 0.0404, 0.0433], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:42:51,742 INFO [train.py:903] (1/4) Epoch 17, batch 6000, loss[loss=0.1948, simple_loss=0.2706, pruned_loss=0.05949, over 19769.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2934, pruned_loss=0.06806, over 3848958.54 frames. ], batch size: 47, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:51,742 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 10:43:04,253 INFO [train.py:937] (1/4) Epoch 17, validation: loss=0.1707, simple_loss=0.2712, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 10:43:04,254 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 10:44:04,145 INFO [train.py:903] (1/4) Epoch 17, batch 6050, loss[loss=0.1771, simple_loss=0.2619, pruned_loss=0.04612, over 19845.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2922, pruned_loss=0.06724, over 3847207.60 frames. ], batch size: 52, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:44:15,951 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 5.156e+02 6.136e+02 7.598e+02 1.906e+03, threshold=1.227e+03, percent-clipped=4.0 +2023-04-02 10:45:06,510 INFO [train.py:903] (1/4) Epoch 17, batch 6100, loss[loss=0.2105, simple_loss=0.2993, pruned_loss=0.06088, over 19667.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2921, pruned_loss=0.06726, over 3851269.35 frames. ], batch size: 55, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:45:06,885 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115348.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:32,783 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9782, 2.5651, 1.8296, 1.8872, 2.3844, 1.7394, 1.6447, 2.0857], + device='cuda:1'), covar=tensor([0.0965, 0.0678, 0.0794, 0.0634, 0.0476, 0.0950, 0.0642, 0.0501], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0308, 0.0328, 0.0257, 0.0243, 0.0325, 0.0288, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:45:54,962 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:56,148 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:46:06,535 INFO [train.py:903] (1/4) Epoch 17, batch 6150, loss[loss=0.2026, simple_loss=0.2779, pruned_loss=0.06368, over 19621.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.06745, over 3854343.30 frames. ], batch size: 50, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:46:15,589 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.209e+02 6.440e+02 8.380e+02 1.538e+03, threshold=1.288e+03, percent-clipped=5.0 +2023-04-02 10:46:33,785 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 10:46:55,033 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6821, 1.3618, 1.2811, 1.5345, 1.2614, 1.4563, 1.3081, 1.5114], + device='cuda:1'), covar=tensor([0.1005, 0.0954, 0.1491, 0.0946, 0.1144, 0.0568, 0.1329, 0.0722], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0353, 0.0303, 0.0245, 0.0297, 0.0247, 0.0294, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:47:07,332 INFO [train.py:903] (1/4) Epoch 17, batch 6200, loss[loss=0.1956, simple_loss=0.2627, pruned_loss=0.06428, over 19745.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2921, pruned_loss=0.0677, over 3856513.16 frames. ], batch size: 45, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:47:07,478 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:08,911 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:13,991 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-02 10:47:39,845 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:48:07,512 INFO [train.py:903] (1/4) Epoch 17, batch 6250, loss[loss=0.2123, simple_loss=0.2962, pruned_loss=0.06422, over 19674.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2928, pruned_loss=0.06777, over 3848233.12 frames. ], batch size: 58, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:48:16,579 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.690e+02 5.769e+02 7.890e+02 2.007e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-02 10:48:37,593 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 10:48:38,468 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 10:48:52,594 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5382, 4.7362, 5.2989, 5.3011, 2.3639, 4.9871, 4.3697, 4.9599], + device='cuda:1'), covar=tensor([0.1486, 0.1398, 0.0507, 0.0573, 0.5079, 0.0732, 0.0596, 0.1009], + device='cuda:1'), in_proj_covar=tensor([0.0738, 0.0689, 0.0891, 0.0772, 0.0792, 0.0644, 0.0534, 0.0819], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:49:00,686 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2582, 2.0952, 1.8939, 1.7511, 1.6363, 1.8092, 0.5761, 1.1462], + device='cuda:1'), covar=tensor([0.0492, 0.0555, 0.0434, 0.0675, 0.0973, 0.0759, 0.1189, 0.0936], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0346, 0.0347, 0.0376, 0.0446, 0.0381, 0.0326, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 10:49:09,161 INFO [train.py:903] (1/4) Epoch 17, batch 6300, loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.05617, over 19758.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2926, pruned_loss=0.06752, over 3837034.89 frames. ], batch size: 47, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:49:27,667 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115563.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:12,486 INFO [train.py:903] (1/4) Epoch 17, batch 6350, loss[loss=0.228, simple_loss=0.3045, pruned_loss=0.0757, over 19672.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2912, pruned_loss=0.0667, over 3846453.48 frames. ], batch size: 55, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:50:19,854 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6317, 4.1512, 4.2808, 4.2700, 1.6803, 4.0347, 3.5318, 4.0258], + device='cuda:1'), covar=tensor([0.1563, 0.0800, 0.0634, 0.0707, 0.5418, 0.0803, 0.0695, 0.1098], + device='cuda:1'), in_proj_covar=tensor([0.0739, 0.0690, 0.0895, 0.0776, 0.0794, 0.0646, 0.0534, 0.0820], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:50:20,018 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:21,924 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.077e+02 8.091e+02 1.466e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 10:50:50,633 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:51:13,890 INFO [train.py:903] (1/4) Epoch 17, batch 6400, loss[loss=0.2637, simple_loss=0.3374, pruned_loss=0.09494, over 17506.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2926, pruned_loss=0.06793, over 3829595.40 frames. ], batch size: 102, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:15,088 INFO [train.py:903] (1/4) Epoch 17, batch 6450, loss[loss=0.1802, simple_loss=0.259, pruned_loss=0.05069, over 16491.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2922, pruned_loss=0.06795, over 3824134.87 frames. ], batch size: 36, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:25,104 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.829e+02 5.862e+02 7.962e+02 1.327e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 10:52:58,107 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115732.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:52:59,220 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:53:01,396 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 10:53:16,373 INFO [train.py:903] (1/4) Epoch 17, batch 6500, loss[loss=0.2695, simple_loss=0.3317, pruned_loss=0.1036, over 19398.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2922, pruned_loss=0.06795, over 3827351.72 frames. ], batch size: 70, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:53:24,001 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 10:54:18,473 INFO [train.py:903] (1/4) Epoch 17, batch 6550, loss[loss=0.221, simple_loss=0.2801, pruned_loss=0.08099, over 19748.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.293, pruned_loss=0.06817, over 3832041.81 frames. ], batch size: 47, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:54:28,764 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.150e+02 6.522e+02 8.804e+02 2.234e+03, threshold=1.304e+03, percent-clipped=7.0 +2023-04-02 10:54:41,251 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 10:54:43,128 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:15,257 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115844.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:18,733 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115847.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:19,552 INFO [train.py:903] (1/4) Epoch 17, batch 6600, loss[loss=0.1938, simple_loss=0.2648, pruned_loss=0.06144, over 19753.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2935, pruned_loss=0.06826, over 3826171.71 frames. ], batch size: 47, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:55:19,947 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:56:19,810 INFO [train.py:903] (1/4) Epoch 17, batch 6650, loss[loss=0.219, simple_loss=0.2987, pruned_loss=0.06968, over 19651.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2928, pruned_loss=0.068, over 3830574.11 frames. ], batch size: 60, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:56:21,357 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7744, 2.2352, 2.3127, 2.6253, 2.4182, 2.2822, 2.3236, 2.5916], + device='cuda:1'), covar=tensor([0.0817, 0.1668, 0.1323, 0.0988, 0.1376, 0.0484, 0.1109, 0.0610], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0354, 0.0305, 0.0245, 0.0298, 0.0247, 0.0294, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 10:56:30,891 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.833e+02 5.946e+02 8.225e+02 1.682e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-02 10:56:35,578 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115910.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 10:57:05,031 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2901, 2.0482, 2.2365, 2.8199, 1.9896, 2.7092, 2.5700, 2.6779], + device='cuda:1'), covar=tensor([0.0724, 0.0823, 0.0889, 0.0791, 0.0848, 0.0638, 0.0877, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0243, 0.0226, 0.0210, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 10:57:08,287 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5051, 4.0363, 4.2125, 4.2227, 1.7095, 3.9748, 3.4869, 3.9021], + device='cuda:1'), covar=tensor([0.1659, 0.1001, 0.0658, 0.0682, 0.5309, 0.0953, 0.0684, 0.1177], + device='cuda:1'), in_proj_covar=tensor([0.0733, 0.0688, 0.0890, 0.0772, 0.0789, 0.0643, 0.0533, 0.0812], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 10:57:21,958 INFO [train.py:903] (1/4) Epoch 17, batch 6700, loss[loss=0.211, simple_loss=0.2938, pruned_loss=0.06408, over 19534.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2932, pruned_loss=0.06816, over 3829820.07 frames. ], batch size: 56, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:20,393 INFO [train.py:903] (1/4) Epoch 17, batch 6750, loss[loss=0.2404, simple_loss=0.3239, pruned_loss=0.07849, over 19643.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2935, pruned_loss=0.06881, over 3828332.34 frames. ], batch size: 58, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:31,541 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.004e+02 5.304e+02 6.320e+02 7.514e+02 1.971e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:58:52,447 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-02 10:58:59,757 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:59:05,990 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-02 10:59:12,142 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9512, 2.0399, 2.2132, 2.6410, 1.9743, 2.5261, 2.3007, 2.0307], + device='cuda:1'), covar=tensor([0.4009, 0.3616, 0.1819, 0.2323, 0.3947, 0.1935, 0.4409, 0.3207], + device='cuda:1'), in_proj_covar=tensor([0.0857, 0.0909, 0.0686, 0.0911, 0.0837, 0.0773, 0.0818, 0.0754], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 10:59:17,211 INFO [train.py:903] (1/4) Epoch 17, batch 6800, loss[loss=0.1965, simple_loss=0.277, pruned_loss=0.05803, over 19780.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2943, pruned_loss=0.06944, over 3817593.44 frames. ], batch size: 54, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 11:00:03,229 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 11:00:04,647 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 11:00:07,138 INFO [train.py:903] (1/4) Epoch 18, batch 0, loss[loss=0.2232, simple_loss=0.3123, pruned_loss=0.06706, over 19520.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3123, pruned_loss=0.06706, over 19520.00 frames. ], batch size: 56, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:00:07,138 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 11:00:18,781 INFO [train.py:937] (1/4) Epoch 18, validation: loss=0.1712, simple_loss=0.2722, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 11:00:18,782 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 11:00:32,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 11:00:34,761 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6197, 4.1911, 2.6493, 3.6779, 0.9524, 4.0146, 4.0515, 4.0868], + device='cuda:1'), covar=tensor([0.0592, 0.0941, 0.1935, 0.0774, 0.4032, 0.0721, 0.0777, 0.1003], + device='cuda:1'), in_proj_covar=tensor([0.0472, 0.0389, 0.0470, 0.0331, 0.0394, 0.0408, 0.0402, 0.0432], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:00:51,653 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:52,786 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:55,571 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.457e+02 4.972e+02 6.494e+02 8.085e+02 1.604e+03, threshold=1.299e+03, percent-clipped=1.0 +2023-04-02 11:01:15,448 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:18,719 INFO [train.py:903] (1/4) Epoch 18, batch 50, loss[loss=0.2053, simple_loss=0.2876, pruned_loss=0.06148, over 19658.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3008, pruned_loss=0.07154, over 877091.59 frames. ], batch size: 55, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:01:21,430 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:23,499 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:30,196 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:52,476 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 11:02:00,153 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5628, 1.1529, 1.3632, 1.1638, 2.2310, 0.9648, 2.1230, 2.4174], + device='cuda:1'), covar=tensor([0.0696, 0.2773, 0.2790, 0.1737, 0.0850, 0.2111, 0.0976, 0.0486], + device='cuda:1'), in_proj_covar=tensor([0.0386, 0.0354, 0.0370, 0.0339, 0.0359, 0.0343, 0.0357, 0.0381], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:02:21,179 INFO [train.py:903] (1/4) Epoch 18, batch 100, loss[loss=0.2227, simple_loss=0.3094, pruned_loss=0.06793, over 19328.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2951, pruned_loss=0.06792, over 1544577.18 frames. ], batch size: 66, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:02:32,225 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 11:02:41,863 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1958, 5.6329, 3.1766, 4.8830, 1.0807, 5.6877, 5.6044, 5.7888], + device='cuda:1'), covar=tensor([0.0384, 0.0823, 0.1778, 0.0654, 0.4175, 0.0498, 0.0713, 0.0905], + device='cuda:1'), in_proj_covar=tensor([0.0472, 0.0390, 0.0472, 0.0332, 0.0395, 0.0408, 0.0403, 0.0433], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:02:58,309 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 4.838e+02 6.090e+02 7.458e+02 2.009e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 11:03:21,607 INFO [train.py:903] (1/4) Epoch 18, batch 150, loss[loss=0.218, simple_loss=0.2881, pruned_loss=0.07392, over 19338.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2929, pruned_loss=0.06795, over 2050321.51 frames. ], batch size: 44, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:03:56,115 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116254.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:04:20,461 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 11:04:21,616 INFO [train.py:903] (1/4) Epoch 18, batch 200, loss[loss=0.2132, simple_loss=0.3008, pruned_loss=0.06282, over 19665.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2922, pruned_loss=0.06818, over 2458672.89 frames. ], batch size: 55, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:01,398 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 4.750e+02 5.613e+02 7.153e+02 1.890e+03, threshold=1.123e+03, percent-clipped=2.0 +2023-04-02 11:05:24,090 INFO [train.py:903] (1/4) Epoch 18, batch 250, loss[loss=0.2023, simple_loss=0.2831, pruned_loss=0.06073, over 19397.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2916, pruned_loss=0.06752, over 2766899.31 frames. ], batch size: 48, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:43,834 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116341.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:05:57,157 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-02 11:06:06,000 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4410, 1.6147, 2.0945, 1.8670, 3.1553, 4.7168, 4.5651, 5.1258], + device='cuda:1'), covar=tensor([0.1566, 0.3508, 0.2981, 0.1976, 0.0559, 0.0179, 0.0164, 0.0153], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0312, 0.0340, 0.0258, 0.0232, 0.0176, 0.0210, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 11:06:18,033 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116369.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:06:19,007 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4048, 1.0281, 1.2138, 2.0271, 1.6071, 1.5089, 1.6717, 1.3983], + device='cuda:1'), covar=tensor([0.1079, 0.1662, 0.1411, 0.0986, 0.1091, 0.1376, 0.1194, 0.1090], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0220, 0.0223, 0.0240, 0.0225, 0.0209, 0.0186, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 11:06:25,430 INFO [train.py:903] (1/4) Epoch 18, batch 300, loss[loss=0.1937, simple_loss=0.294, pruned_loss=0.04673, over 19696.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2922, pruned_loss=0.0684, over 2997677.55 frames. ], batch size: 59, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:06:25,579 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:07:03,943 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 5.241e+02 6.705e+02 8.261e+02 1.478e+03, threshold=1.341e+03, percent-clipped=3.0 +2023-04-02 11:07:28,470 INFO [train.py:903] (1/4) Epoch 18, batch 350, loss[loss=0.2045, simple_loss=0.2802, pruned_loss=0.06437, over 19479.00 frames. ], tot_loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06843, over 3179204.94 frames. ], batch size: 49, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:07:33,274 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 11:07:44,272 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 11:08:16,138 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:19,411 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:29,490 INFO [train.py:903] (1/4) Epoch 18, batch 400, loss[loss=0.2147, simple_loss=0.2935, pruned_loss=0.06791, over 19600.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.293, pruned_loss=0.06823, over 3340844.98 frames. ], batch size: 57, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:08:31,845 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:48,738 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116491.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:08,777 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.874e+02 5.859e+02 7.069e+02 1.370e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:09:27,696 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:31,058 INFO [train.py:903] (1/4) Epoch 18, batch 450, loss[loss=0.1639, simple_loss=0.2396, pruned_loss=0.04413, over 19731.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2924, pruned_loss=0.06811, over 3459733.19 frames. ], batch size: 46, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:06,952 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 11:10:08,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 11:10:36,075 INFO [train.py:903] (1/4) Epoch 18, batch 500, loss[loss=0.2072, simple_loss=0.2941, pruned_loss=0.06015, over 19773.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2923, pruned_loss=0.0679, over 3549791.12 frames. ], batch size: 56, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:43,235 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:10:57,219 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116593.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:11:13,493 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.090e+02 6.291e+02 8.243e+02 1.843e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 11:11:38,135 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116625.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:11:38,807 INFO [train.py:903] (1/4) Epoch 18, batch 550, loss[loss=0.2267, simple_loss=0.2947, pruned_loss=0.07939, over 19733.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06813, over 3604761.34 frames. ], batch size: 45, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:11:51,117 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:12:07,796 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116650.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 11:12:07,847 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5528, 1.5858, 2.0072, 1.7681, 2.6981, 2.2209, 2.6987, 1.5947], + device='cuda:1'), covar=tensor([0.2402, 0.4078, 0.2400, 0.1904, 0.1589, 0.2204, 0.1630, 0.3942], + device='cuda:1'), in_proj_covar=tensor([0.0512, 0.0612, 0.0671, 0.0463, 0.0611, 0.0516, 0.0648, 0.0522], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 11:12:41,000 INFO [train.py:903] (1/4) Epoch 18, batch 600, loss[loss=0.2183, simple_loss=0.2989, pruned_loss=0.06885, over 19665.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2919, pruned_loss=0.06794, over 3648251.88 frames. ], batch size: 55, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:12:51,641 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116685.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:18,876 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.849e+02 4.880e+02 6.214e+02 8.095e+02 1.532e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 11:13:21,174 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 11:13:28,790 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3422, 2.2186, 2.0649, 2.5794, 2.2931, 2.2194, 1.9084, 2.4146], + device='cuda:1'), covar=tensor([0.1006, 0.1527, 0.1427, 0.0962, 0.1359, 0.0503, 0.1317, 0.0674], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0356, 0.0302, 0.0247, 0.0299, 0.0247, 0.0296, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:13:37,835 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116722.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:42,020 INFO [train.py:903] (1/4) Epoch 18, batch 650, loss[loss=0.2256, simple_loss=0.2997, pruned_loss=0.07578, over 19332.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2929, pruned_loss=0.06821, over 3695643.24 frames. ], batch size: 44, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:13:55,024 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9333, 1.6384, 1.8046, 1.6887, 4.4522, 1.0022, 2.6878, 4.8435], + device='cuda:1'), covar=tensor([0.0438, 0.2625, 0.2628, 0.1990, 0.0707, 0.2741, 0.1329, 0.0178], + device='cuda:1'), in_proj_covar=tensor([0.0390, 0.0354, 0.0374, 0.0340, 0.0362, 0.0345, 0.0359, 0.0384], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:14:08,712 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:34,419 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.46 vs. limit=5.0 +2023-04-02 11:14:38,753 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:43,009 INFO [train.py:903] (1/4) Epoch 18, batch 700, loss[loss=0.2157, simple_loss=0.296, pruned_loss=0.06768, over 19596.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2933, pruned_loss=0.06834, over 3730937.97 frames. ], batch size: 61, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:15:15,686 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:23,663 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.148e+02 4.855e+02 5.777e+02 7.000e+02 1.472e+03, threshold=1.155e+03, percent-clipped=1.0 +2023-04-02 11:15:24,935 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:27,853 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 11:15:39,747 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:47,508 INFO [train.py:903] (1/4) Epoch 18, batch 750, loss[loss=0.2232, simple_loss=0.3053, pruned_loss=0.07056, over 19451.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2933, pruned_loss=0.06829, over 3749969.47 frames. ], batch size: 64, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:16:03,683 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:16,552 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:35,181 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:39,887 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:50,594 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116874.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:52,580 INFO [train.py:903] (1/4) Epoch 18, batch 800, loss[loss=0.2273, simple_loss=0.3059, pruned_loss=0.07436, over 19525.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2945, pruned_loss=0.06897, over 3765206.80 frames. ], batch size: 54, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:17:06,582 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 11:17:32,326 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.513e+02 6.326e+02 7.669e+02 1.889e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-02 11:17:51,817 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:17:55,032 INFO [train.py:903] (1/4) Epoch 18, batch 850, loss[loss=0.1983, simple_loss=0.2694, pruned_loss=0.06364, over 18146.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2949, pruned_loss=0.0692, over 3783785.21 frames. ], batch size: 40, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:18:12,803 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1301, 1.3991, 1.7017, 1.0806, 2.4985, 3.4228, 3.0932, 3.5914], + device='cuda:1'), covar=tensor([0.1571, 0.3469, 0.3069, 0.2399, 0.0555, 0.0173, 0.0226, 0.0245], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0312, 0.0339, 0.0256, 0.0231, 0.0177, 0.0209, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 11:18:48,138 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 11:18:56,389 INFO [train.py:903] (1/4) Epoch 18, batch 900, loss[loss=0.2658, simple_loss=0.3303, pruned_loss=0.1006, over 13277.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2944, pruned_loss=0.06901, over 3805441.86 frames. ], batch size: 136, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:19:01,328 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116980.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:03,966 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:38,590 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 4.671e+02 5.637e+02 7.276e+02 1.422e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 11:20:00,610 INFO [train.py:903] (1/4) Epoch 18, batch 950, loss[loss=0.2367, simple_loss=0.316, pruned_loss=0.07865, over 19679.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2918, pruned_loss=0.06775, over 3819848.11 frames. ], batch size: 60, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:20:02,910 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 11:20:38,619 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:20:51,127 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:03,205 INFO [train.py:903] (1/4) Epoch 18, batch 1000, loss[loss=0.2614, simple_loss=0.3248, pruned_loss=0.09901, over 17473.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2932, pruned_loss=0.06842, over 3818516.19 frames. ], batch size: 101, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:21:11,172 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:27,337 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:43,056 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 4.999e+02 6.181e+02 7.829e+02 2.221e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 11:21:56,747 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 11:22:07,202 INFO [train.py:903] (1/4) Epoch 18, batch 1050, loss[loss=0.2849, simple_loss=0.3527, pruned_loss=0.1086, over 19716.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2926, pruned_loss=0.06808, over 3837898.46 frames. ], batch size: 63, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:22:40,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 11:22:55,009 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117164.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:09,065 INFO [train.py:903] (1/4) Epoch 18, batch 1100, loss[loss=0.1926, simple_loss=0.2777, pruned_loss=0.05375, over 19693.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.06745, over 3839019.19 frames. ], batch size: 59, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:23:13,165 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:15,471 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117181.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:24,955 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0908, 1.9292, 1.7700, 1.6142, 1.4176, 1.5616, 0.5874, 1.0190], + device='cuda:1'), covar=tensor([0.0555, 0.0622, 0.0466, 0.0707, 0.1252, 0.0937, 0.1143, 0.0997], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0347, 0.0347, 0.0376, 0.0449, 0.0382, 0.0328, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 11:23:44,468 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117204.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:49,508 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.961e+02 5.150e+02 6.225e+02 7.900e+02 1.283e+03, threshold=1.245e+03, percent-clipped=2.0 +2023-04-02 11:24:11,125 INFO [train.py:903] (1/4) Epoch 18, batch 1150, loss[loss=0.21, simple_loss=0.2934, pruned_loss=0.06332, over 19673.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2933, pruned_loss=0.06841, over 3830881.09 frames. ], batch size: 60, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:24:27,252 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:28,299 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117239.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:39,772 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9657, 5.3381, 3.0004, 4.5781, 0.8971, 5.4115, 5.3549, 5.4762], + device='cuda:1'), covar=tensor([0.0380, 0.0897, 0.1855, 0.0667, 0.4271, 0.0550, 0.0700, 0.0818], + device='cuda:1'), in_proj_covar=tensor([0.0479, 0.0393, 0.0475, 0.0337, 0.0395, 0.0413, 0.0406, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:24:58,323 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:14,042 INFO [train.py:903] (1/4) Epoch 18, batch 1200, loss[loss=0.2346, simple_loss=0.305, pruned_loss=0.08209, over 19535.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.293, pruned_loss=0.06831, over 3827250.03 frames. ], batch size: 54, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:25:18,957 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:31,264 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8239, 1.5539, 1.4770, 1.8515, 1.5346, 1.6021, 1.4940, 1.7244], + device='cuda:1'), covar=tensor([0.0951, 0.1255, 0.1432, 0.0952, 0.1161, 0.0541, 0.1274, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0355, 0.0302, 0.0248, 0.0299, 0.0247, 0.0295, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:25:50,730 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 11:25:54,168 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.775e+02 5.862e+02 7.562e+02 1.280e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:26:12,979 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 11:26:18,175 INFO [train.py:903] (1/4) Epoch 18, batch 1250, loss[loss=0.2235, simple_loss=0.3079, pruned_loss=0.06962, over 18252.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2927, pruned_loss=0.06756, over 3828660.56 frames. ], batch size: 83, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:26:43,786 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:49,489 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117351.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:58,465 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2674, 2.9190, 2.2208, 2.2308, 2.2398, 2.4937, 0.9077, 2.0418], + device='cuda:1'), covar=tensor([0.0632, 0.0619, 0.0717, 0.1099, 0.0998, 0.1142, 0.1408, 0.1065], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0347, 0.0348, 0.0376, 0.0449, 0.0381, 0.0328, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 11:27:20,900 INFO [train.py:903] (1/4) Epoch 18, batch 1300, loss[loss=0.1937, simple_loss=0.2682, pruned_loss=0.05964, over 19772.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2928, pruned_loss=0.06773, over 3830805.68 frames. ], batch size: 47, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:27:21,309 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:27:48,173 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6235, 1.5375, 1.6124, 2.3225, 1.6801, 2.0366, 2.0625, 1.7992], + device='cuda:1'), covar=tensor([0.0872, 0.0974, 0.1009, 0.0718, 0.0869, 0.0724, 0.0857, 0.0715], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0223, 0.0240, 0.0226, 0.0209, 0.0186, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 11:28:01,379 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.655e+02 5.418e+02 6.594e+02 8.357e+02 1.516e+03, threshold=1.319e+03, percent-clipped=5.0 +2023-04-02 11:28:22,257 INFO [train.py:903] (1/4) Epoch 18, batch 1350, loss[loss=0.2165, simple_loss=0.3071, pruned_loss=0.06292, over 19659.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2923, pruned_loss=0.0677, over 3824813.23 frames. ], batch size: 58, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:28:37,213 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:07,720 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:21,695 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 11:29:24,521 INFO [train.py:903] (1/4) Epoch 18, batch 1400, loss[loss=0.2153, simple_loss=0.3045, pruned_loss=0.06301, over 19781.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2925, pruned_loss=0.06802, over 3824020.76 frames. ], batch size: 56, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:30:04,552 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.042e+02 5.443e+02 6.741e+02 8.791e+02 2.167e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 11:30:28,260 INFO [train.py:903] (1/4) Epoch 18, batch 1450, loss[loss=0.2048, simple_loss=0.2876, pruned_loss=0.06097, over 19582.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2927, pruned_loss=0.06801, over 3822765.05 frames. ], batch size: 52, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:30:29,446 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 11:30:40,106 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117535.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:11,203 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117560.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:30,945 INFO [train.py:903] (1/4) Epoch 18, batch 1500, loss[loss=0.2047, simple_loss=0.2793, pruned_loss=0.06507, over 19608.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2925, pruned_loss=0.06758, over 3839298.66 frames. ], batch size: 50, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:31:39,057 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:53,638 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-02 11:32:11,896 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.657e+02 6.062e+02 7.787e+02 1.498e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 11:32:32,138 INFO [train.py:903] (1/4) Epoch 18, batch 1550, loss[loss=0.2318, simple_loss=0.3097, pruned_loss=0.07697, over 19774.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2932, pruned_loss=0.06821, over 3821064.21 frames. ], batch size: 56, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:33:34,531 INFO [train.py:903] (1/4) Epoch 18, batch 1600, loss[loss=0.2021, simple_loss=0.2761, pruned_loss=0.06404, over 19597.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2932, pruned_loss=0.06817, over 3816742.10 frames. ], batch size: 50, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:33:54,960 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:01,925 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 11:34:03,380 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:15,617 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.964e+02 5.954e+02 7.051e+02 1.393e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 11:34:37,828 INFO [train.py:903] (1/4) Epoch 18, batch 1650, loss[loss=0.2887, simple_loss=0.3571, pruned_loss=0.1102, over 19705.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2924, pruned_loss=0.0677, over 3825795.83 frames. ], batch size: 59, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:34:44,895 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3917, 1.4336, 1.9299, 1.5276, 2.8089, 3.4681, 3.2752, 3.6658], + device='cuda:1'), covar=tensor([0.1659, 0.3700, 0.3085, 0.2270, 0.0638, 0.0230, 0.0199, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0309, 0.0338, 0.0257, 0.0230, 0.0177, 0.0209, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 11:35:04,216 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3620, 2.4244, 2.6043, 3.3075, 2.3686, 3.1678, 2.7163, 2.4032], + device='cuda:1'), covar=tensor([0.4000, 0.3929, 0.1720, 0.2213, 0.4240, 0.1828, 0.4213, 0.3160], + device='cuda:1'), in_proj_covar=tensor([0.0858, 0.0911, 0.0689, 0.0910, 0.0837, 0.0775, 0.0818, 0.0757], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 11:35:39,488 INFO [train.py:903] (1/4) Epoch 18, batch 1700, loss[loss=0.2307, simple_loss=0.3122, pruned_loss=0.07455, over 18129.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2919, pruned_loss=0.06732, over 3826480.04 frames. ], batch size: 83, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:36:16,633 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:36:19,947 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.954e+02 5.098e+02 6.258e+02 7.253e+02 1.524e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 11:36:21,124 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 11:36:40,238 INFO [train.py:903] (1/4) Epoch 18, batch 1750, loss[loss=0.2364, simple_loss=0.3252, pruned_loss=0.07383, over 19719.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2931, pruned_loss=0.06833, over 3807236.66 frames. ], batch size: 63, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:37:14,810 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 11:37:43,040 INFO [train.py:903] (1/4) Epoch 18, batch 1800, loss[loss=0.1862, simple_loss=0.2732, pruned_loss=0.04957, over 19620.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2938, pruned_loss=0.06844, over 3808344.33 frames. ], batch size: 50, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:38:23,379 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.803e+02 6.041e+02 7.952e+02 1.877e+03, threshold=1.208e+03, percent-clipped=3.0 +2023-04-02 11:38:42,090 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 11:38:45,250 INFO [train.py:903] (1/4) Epoch 18, batch 1850, loss[loss=0.2115, simple_loss=0.2911, pruned_loss=0.06594, over 19776.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2935, pruned_loss=0.06834, over 3805680.63 frames. ], batch size: 56, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:18,903 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 11:39:19,310 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117954.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:39:47,639 INFO [train.py:903] (1/4) Epoch 18, batch 1900, loss[loss=0.1965, simple_loss=0.2625, pruned_loss=0.06525, over 19001.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2933, pruned_loss=0.0681, over 3817223.10 frames. ], batch size: 42, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:51,607 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:40:03,258 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 11:40:07,857 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 11:40:27,944 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.120e+02 6.309e+02 7.973e+02 1.539e+03, threshold=1.262e+03, percent-clipped=6.0 +2023-04-02 11:40:34,560 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 11:40:48,198 INFO [train.py:903] (1/4) Epoch 18, batch 1950, loss[loss=0.2105, simple_loss=0.2993, pruned_loss=0.06083, over 17854.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2939, pruned_loss=0.06864, over 3813305.10 frames. ], batch size: 83, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:41:28,971 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:33,499 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:49,956 INFO [train.py:903] (1/4) Epoch 18, batch 2000, loss[loss=0.2657, simple_loss=0.329, pruned_loss=0.1012, over 18708.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2944, pruned_loss=0.06867, over 3818366.24 frames. ], batch size: 74, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:42:05,421 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:42:11,305 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.60 vs. limit=5.0 +2023-04-02 11:42:31,794 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 5.367e+02 6.632e+02 8.072e+02 1.503e+03, threshold=1.326e+03, percent-clipped=5.0 +2023-04-02 11:42:47,639 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 11:42:54,016 INFO [train.py:903] (1/4) Epoch 18, batch 2050, loss[loss=0.2062, simple_loss=0.287, pruned_loss=0.06267, over 19683.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2937, pruned_loss=0.06843, over 3813427.43 frames. ], batch size: 53, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:43:06,268 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 11:43:07,436 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 11:43:26,099 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 11:43:55,359 INFO [train.py:903] (1/4) Epoch 18, batch 2100, loss[loss=0.1783, simple_loss=0.2566, pruned_loss=0.04994, over 19764.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2927, pruned_loss=0.06767, over 3815969.08 frames. ], batch size: 47, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:44:06,993 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:44:21,259 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 11:44:36,112 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.186e+02 6.689e+02 8.493e+02 1.656e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 11:44:44,889 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 11:44:56,294 INFO [train.py:903] (1/4) Epoch 18, batch 2150, loss[loss=0.2018, simple_loss=0.2703, pruned_loss=0.06666, over 19855.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2924, pruned_loss=0.06762, over 3815541.27 frames. ], batch size: 52, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:45:39,515 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6101, 2.4819, 2.3068, 2.7687, 2.5741, 2.4666, 2.1281, 2.9880], + device='cuda:1'), covar=tensor([0.0943, 0.1613, 0.1410, 0.1038, 0.1351, 0.0471, 0.1346, 0.0534], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0350, 0.0299, 0.0245, 0.0296, 0.0244, 0.0292, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:45:57,807 INFO [train.py:903] (1/4) Epoch 18, batch 2200, loss[loss=0.2013, simple_loss=0.2789, pruned_loss=0.06179, over 19870.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2922, pruned_loss=0.06777, over 3814441.65 frames. ], batch size: 52, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:46:13,139 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:37,312 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:39,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 5.366e+02 7.280e+02 9.450e+02 2.114e+03, threshold=1.456e+03, percent-clipped=6.0 +2023-04-02 11:47:01,200 INFO [train.py:903] (1/4) Epoch 18, batch 2250, loss[loss=0.2033, simple_loss=0.2834, pruned_loss=0.06156, over 19746.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.291, pruned_loss=0.06692, over 3824006.85 frames. ], batch size: 54, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:47:27,302 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:47:47,838 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.1630, 3.7994, 2.6555, 3.4430, 1.1204, 3.7065, 3.6711, 3.7047], + device='cuda:1'), covar=tensor([0.0614, 0.1013, 0.1740, 0.0767, 0.3660, 0.0727, 0.0826, 0.1169], + device='cuda:1'), in_proj_covar=tensor([0.0478, 0.0395, 0.0479, 0.0340, 0.0399, 0.0416, 0.0411, 0.0441], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:48:02,745 INFO [train.py:903] (1/4) Epoch 18, batch 2300, loss[loss=0.1873, simple_loss=0.2752, pruned_loss=0.04964, over 19157.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2924, pruned_loss=0.06801, over 3803442.33 frames. ], batch size: 69, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:48:15,073 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 11:48:22,236 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4488, 1.2660, 1.4774, 1.4599, 3.0308, 1.1700, 2.3076, 3.3105], + device='cuda:1'), covar=tensor([0.0510, 0.2736, 0.2666, 0.1844, 0.0701, 0.2338, 0.1139, 0.0310], + device='cuda:1'), in_proj_covar=tensor([0.0398, 0.0359, 0.0377, 0.0344, 0.0367, 0.0347, 0.0367, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:48:35,800 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:48:44,941 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.253e+02 6.243e+02 7.561e+02 1.558e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 11:49:05,630 INFO [train.py:903] (1/4) Epoch 18, batch 2350, loss[loss=0.2062, simple_loss=0.2868, pruned_loss=0.06285, over 18276.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2904, pruned_loss=0.06667, over 3816352.99 frames. ], batch size: 84, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:49:12,803 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2657, 2.0094, 1.5490, 1.2907, 1.8397, 1.2367, 1.2517, 1.7300], + device='cuda:1'), covar=tensor([0.0947, 0.0821, 0.1059, 0.0871, 0.0519, 0.1215, 0.0713, 0.0460], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0262, 0.0247, 0.0334, 0.0293, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:49:46,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 11:50:02,122 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 11:50:06,555 INFO [train.py:903] (1/4) Epoch 18, batch 2400, loss[loss=0.2188, simple_loss=0.3015, pruned_loss=0.06807, over 18754.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2925, pruned_loss=0.06803, over 3811930.27 frames. ], batch size: 74, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:50:48,691 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.821e+02 5.593e+02 7.730e+02 1.750e+03, threshold=1.119e+03, percent-clipped=3.0 +2023-04-02 11:50:58,551 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118517.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:10,678 INFO [train.py:903] (1/4) Epoch 18, batch 2450, loss[loss=0.1945, simple_loss=0.2812, pruned_loss=0.0539, over 19539.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2929, pruned_loss=0.06805, over 3803912.27 frames. ], batch size: 54, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:51:16,268 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118530.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:57,137 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0559, 1.4295, 1.7672, 1.1887, 2.7698, 3.7565, 3.4835, 3.9729], + device='cuda:1'), covar=tensor([0.1708, 0.3566, 0.3232, 0.2446, 0.0560, 0.0160, 0.0201, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0310, 0.0340, 0.0257, 0.0232, 0.0178, 0.0210, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 11:52:12,595 INFO [train.py:903] (1/4) Epoch 18, batch 2500, loss[loss=0.2167, simple_loss=0.2964, pruned_loss=0.06854, over 19668.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.294, pruned_loss=0.06844, over 3798096.12 frames. ], batch size: 60, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:52:53,549 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 5.081e+02 6.040e+02 7.266e+02 1.380e+03, threshold=1.208e+03, percent-clipped=1.0 +2023-04-02 11:53:13,674 INFO [train.py:903] (1/4) Epoch 18, batch 2550, loss[loss=0.2561, simple_loss=0.3287, pruned_loss=0.09173, over 17209.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2951, pruned_loss=0.06898, over 3801458.25 frames. ], batch size: 101, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:53:19,831 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118631.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:37,325 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-04-02 11:53:37,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:42,612 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:45,539 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:06,500 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 11:54:15,893 INFO [train.py:903] (1/4) Epoch 18, batch 2600, loss[loss=0.2155, simple_loss=0.2959, pruned_loss=0.06755, over 17965.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2943, pruned_loss=0.06861, over 3796214.97 frames. ], batch size: 83, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:54:34,614 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:56,096 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.742e+02 5.749e+02 7.114e+02 1.367e+03, threshold=1.150e+03, percent-clipped=3.0 +2023-04-02 11:55:16,582 INFO [train.py:903] (1/4) Epoch 18, batch 2650, loss[loss=0.1977, simple_loss=0.2748, pruned_loss=0.06028, over 19397.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2934, pruned_loss=0.06813, over 3803552.31 frames. ], batch size: 48, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:55:33,760 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 11:55:42,221 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:06,194 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:15,279 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:18,250 INFO [train.py:903] (1/4) Epoch 18, batch 2700, loss[loss=0.1857, simple_loss=0.27, pruned_loss=0.05065, over 19592.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.294, pruned_loss=0.06826, over 3810340.16 frames. ], batch size: 52, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:56:44,783 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118798.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:49,080 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.79 vs. limit=5.0 +2023-04-02 11:56:55,766 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:59,040 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.095e+02 6.154e+02 8.195e+02 1.746e+03, threshold=1.231e+03, percent-clipped=5.0 +2023-04-02 11:57:04,047 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1519, 5.4913, 3.3733, 4.9042, 0.8275, 5.6591, 5.5177, 5.6922], + device='cuda:1'), covar=tensor([0.0376, 0.0878, 0.1590, 0.0665, 0.4475, 0.0492, 0.0727, 0.0951], + device='cuda:1'), in_proj_covar=tensor([0.0483, 0.0394, 0.0480, 0.0341, 0.0402, 0.0419, 0.0413, 0.0445], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:57:06,562 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2552, 2.2024, 1.7339, 2.0937, 2.2747, 1.5848, 1.7983, 2.1267], + device='cuda:1'), covar=tensor([0.1061, 0.1685, 0.1887, 0.1426, 0.1565, 0.1098, 0.1766, 0.0938], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0353, 0.0299, 0.0248, 0.0298, 0.0246, 0.0295, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:57:20,472 INFO [train.py:903] (1/4) Epoch 18, batch 2750, loss[loss=0.1954, simple_loss=0.2655, pruned_loss=0.06264, over 19782.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2933, pruned_loss=0.06771, over 3815953.27 frames. ], batch size: 47, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:15,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2652, 1.2851, 1.5438, 1.4124, 2.3254, 2.0202, 2.4765, 1.0367], + device='cuda:1'), covar=tensor([0.2657, 0.4421, 0.2779, 0.2170, 0.1607, 0.2292, 0.1423, 0.4412], + device='cuda:1'), in_proj_covar=tensor([0.0519, 0.0622, 0.0679, 0.0468, 0.0615, 0.0521, 0.0655, 0.0530], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 11:58:23,109 INFO [train.py:903] (1/4) Epoch 18, batch 2800, loss[loss=0.2124, simple_loss=0.2936, pruned_loss=0.06557, over 17301.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2933, pruned_loss=0.06772, over 3826965.64 frames. ], batch size: 101, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:41,695 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9660, 1.8946, 1.7775, 1.5558, 1.4463, 1.5971, 0.3488, 0.8519], + device='cuda:1'), covar=tensor([0.0506, 0.0543, 0.0372, 0.0612, 0.1047, 0.0663, 0.1124, 0.0941], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0348, 0.0347, 0.0374, 0.0448, 0.0380, 0.0329, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 11:58:43,966 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9558, 2.0314, 2.0923, 2.0299, 3.6783, 1.6337, 2.9686, 3.7286], + device='cuda:1'), covar=tensor([0.0517, 0.2134, 0.2229, 0.1633, 0.0601, 0.2224, 0.1386, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0399, 0.0357, 0.0378, 0.0342, 0.0365, 0.0345, 0.0366, 0.0389], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 11:58:54,293 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:59:03,985 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.804e+02 6.148e+02 8.494e+02 1.418e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 11:59:24,485 INFO [train.py:903] (1/4) Epoch 18, batch 2850, loss[loss=0.2214, simple_loss=0.3087, pruned_loss=0.06701, over 19399.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2926, pruned_loss=0.06755, over 3834068.38 frames. ], batch size: 70, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:59:24,892 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118926.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:22,869 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 12:00:26,009 INFO [train.py:903] (1/4) Epoch 18, batch 2900, loss[loss=0.2056, simple_loss=0.293, pruned_loss=0.05911, over 19671.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2915, pruned_loss=0.06718, over 3824783.79 frames. ], batch size: 60, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:00:27,563 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0902, 1.9920, 1.8447, 1.7011, 1.5878, 1.6943, 0.4470, 1.0148], + device='cuda:1'), covar=tensor([0.0536, 0.0578, 0.0411, 0.0671, 0.1065, 0.0831, 0.1258, 0.0953], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0349, 0.0348, 0.0375, 0.0449, 0.0381, 0.0330, 0.0337], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:00:46,052 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:54,378 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-04-02 12:00:54,939 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3944, 1.4617, 1.8140, 1.7787, 2.4938, 2.2072, 2.7086, 1.2336], + device='cuda:1'), covar=tensor([0.2670, 0.4565, 0.2849, 0.1990, 0.1899, 0.2379, 0.1719, 0.4641], + device='cuda:1'), in_proj_covar=tensor([0.0520, 0.0625, 0.0682, 0.0469, 0.0618, 0.0524, 0.0659, 0.0533], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 12:00:56,049 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:58,541 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 12:01:05,216 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 5.127e+02 5.908e+02 8.371e+02 2.467e+03, threshold=1.182e+03, percent-clipped=10.0 +2023-04-02 12:01:21,050 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:25,122 INFO [train.py:903] (1/4) Epoch 18, batch 2950, loss[loss=0.211, simple_loss=0.2771, pruned_loss=0.07243, over 19771.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2925, pruned_loss=0.06779, over 3832637.91 frames. ], batch size: 46, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:01:26,629 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:50,457 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:08,479 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:24,526 INFO [train.py:903] (1/4) Epoch 18, batch 3000, loss[loss=0.2112, simple_loss=0.2901, pruned_loss=0.0661, over 19756.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2918, pruned_loss=0.06771, over 3838852.45 frames. ], batch size: 51, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:02:24,527 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 12:02:37,004 INFO [train.py:937] (1/4) Epoch 18, validation: loss=0.1707, simple_loss=0.2711, pruned_loss=0.03521, over 944034.00 frames. +2023-04-02 12:02:37,005 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 12:02:37,344 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9015, 1.5701, 1.7727, 1.6555, 4.4296, 1.0377, 2.5011, 4.7953], + device='cuda:1'), covar=tensor([0.0420, 0.2735, 0.2878, 0.1970, 0.0686, 0.2769, 0.1511, 0.0171], + device='cuda:1'), in_proj_covar=tensor([0.0398, 0.0359, 0.0379, 0.0343, 0.0366, 0.0347, 0.0367, 0.0390], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:02:40,533 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 12:02:50,762 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:57,635 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:58,893 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6331, 1.6843, 1.4956, 1.2517, 1.1513, 1.3160, 0.2931, 0.5838], + device='cuda:1'), covar=tensor([0.0810, 0.0718, 0.0481, 0.0713, 0.1468, 0.0870, 0.1281, 0.1234], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0348, 0.0349, 0.0374, 0.0449, 0.0381, 0.0329, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:03:17,159 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:17,950 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.504e+02 6.538e+02 8.295e+02 4.074e+03, threshold=1.308e+03, percent-clipped=8.0 +2023-04-02 12:03:20,399 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119111.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:37,844 INFO [train.py:903] (1/4) Epoch 18, batch 3050, loss[loss=0.2227, simple_loss=0.3067, pruned_loss=0.06932, over 19666.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.293, pruned_loss=0.06814, over 3842579.60 frames. ], batch size: 60, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:04:37,424 INFO [train.py:903] (1/4) Epoch 18, batch 3100, loss[loss=0.1849, simple_loss=0.2575, pruned_loss=0.05612, over 19767.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.06745, over 3838697.40 frames. ], batch size: 45, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:05:18,221 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.828e+02 5.979e+02 7.400e+02 1.693e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 12:05:39,400 INFO [train.py:903] (1/4) Epoch 18, batch 3150, loss[loss=0.2071, simple_loss=0.2933, pruned_loss=0.06041, over 19737.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2917, pruned_loss=0.06739, over 3842904.32 frames. ], batch size: 63, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:05:57,630 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5054, 1.5809, 1.7936, 1.7373, 2.7148, 2.3173, 2.8849, 1.4435], + device='cuda:1'), covar=tensor([0.2305, 0.3931, 0.2490, 0.1797, 0.1362, 0.1987, 0.1275, 0.3812], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0619, 0.0676, 0.0466, 0.0611, 0.0517, 0.0652, 0.0528], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:06:06,969 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 12:06:35,321 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5807, 1.4818, 1.5057, 1.9938, 1.7079, 1.8401, 1.8572, 1.6993], + device='cuda:1'), covar=tensor([0.0792, 0.0897, 0.0916, 0.0663, 0.0784, 0.0716, 0.0796, 0.0633], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0224, 0.0226, 0.0245, 0.0228, 0.0211, 0.0190, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 12:06:39,658 INFO [train.py:903] (1/4) Epoch 18, batch 3200, loss[loss=0.2426, simple_loss=0.3198, pruned_loss=0.08266, over 19467.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2925, pruned_loss=0.06782, over 3842669.77 frames. ], batch size: 64, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:06:45,753 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9413, 1.8312, 1.5723, 1.8993, 1.8198, 1.5133, 1.4009, 1.7981], + device='cuda:1'), covar=tensor([0.1065, 0.1463, 0.1614, 0.1195, 0.1422, 0.0762, 0.1713, 0.0834], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0355, 0.0303, 0.0250, 0.0301, 0.0248, 0.0298, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:07:18,202 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.710e+02 5.964e+02 8.137e+02 2.705e+03, threshold=1.193e+03, percent-clipped=4.0 +2023-04-02 12:07:39,012 INFO [train.py:903] (1/4) Epoch 18, batch 3250, loss[loss=0.2187, simple_loss=0.3014, pruned_loss=0.06802, over 19664.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2919, pruned_loss=0.06714, over 3843722.08 frames. ], batch size: 58, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:24,458 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:08:37,659 INFO [train.py:903] (1/4) Epoch 18, batch 3300, loss[loss=0.2192, simple_loss=0.2966, pruned_loss=0.07093, over 19581.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2925, pruned_loss=0.06763, over 3838765.43 frames. ], batch size: 61, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:42,297 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 12:08:53,604 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:09:16,508 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.9237, 5.3490, 3.1101, 4.6664, 1.2174, 5.4744, 5.2789, 5.5212], + device='cuda:1'), covar=tensor([0.0388, 0.0869, 0.1733, 0.0746, 0.4068, 0.0504, 0.0784, 0.0995], + device='cuda:1'), in_proj_covar=tensor([0.0480, 0.0393, 0.0478, 0.0340, 0.0397, 0.0415, 0.0410, 0.0442], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:09:17,496 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.369e+02 6.623e+02 8.148e+02 1.799e+03, threshold=1.325e+03, percent-clipped=5.0 +2023-04-02 12:09:37,823 INFO [train.py:903] (1/4) Epoch 18, batch 3350, loss[loss=0.2057, simple_loss=0.2902, pruned_loss=0.06062, over 19672.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2933, pruned_loss=0.06851, over 3824291.36 frames. ], batch size: 59, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:09:50,642 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:11,914 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:37,461 INFO [train.py:903] (1/4) Epoch 18, batch 3400, loss[loss=0.1697, simple_loss=0.2498, pruned_loss=0.04483, over 17648.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2942, pruned_loss=0.06936, over 3827370.80 frames. ], batch size: 39, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:18,511 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 4.894e+02 5.911e+02 7.983e+02 1.559e+03, threshold=1.182e+03, percent-clipped=2.0 +2023-04-02 12:11:37,555 INFO [train.py:903] (1/4) Epoch 18, batch 3450, loss[loss=0.1941, simple_loss=0.28, pruned_loss=0.05412, over 19522.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2938, pruned_loss=0.06896, over 3829617.16 frames. ], batch size: 54, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:43,120 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 12:11:45,616 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:09,596 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119552.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:31,765 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119570.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:38,201 INFO [train.py:903] (1/4) Epoch 18, batch 3500, loss[loss=0.2005, simple_loss=0.2734, pruned_loss=0.06378, over 19683.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2919, pruned_loss=0.06818, over 3826218.57 frames. ], batch size: 53, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:13:20,028 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.884e+02 6.516e+02 7.989e+02 1.670e+03, threshold=1.303e+03, percent-clipped=4.0 +2023-04-02 12:13:39,059 INFO [train.py:903] (1/4) Epoch 18, batch 3550, loss[loss=0.2017, simple_loss=0.2745, pruned_loss=0.06443, over 19373.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2921, pruned_loss=0.06796, over 3812862.64 frames. ], batch size: 47, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:14:39,531 INFO [train.py:903] (1/4) Epoch 18, batch 3600, loss[loss=0.2246, simple_loss=0.3018, pruned_loss=0.07367, over 19528.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06793, over 3810212.08 frames. ], batch size: 54, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:14:47,916 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:15:20,525 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.936e+02 6.005e+02 7.350e+02 2.220e+03, threshold=1.201e+03, percent-clipped=3.0 +2023-04-02 12:15:39,372 INFO [train.py:903] (1/4) Epoch 18, batch 3650, loss[loss=0.2456, simple_loss=0.3265, pruned_loss=0.08229, over 19774.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2944, pruned_loss=0.06892, over 3818001.10 frames. ], batch size: 56, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:14,039 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-02 12:16:40,015 INFO [train.py:903] (1/4) Epoch 18, batch 3700, loss[loss=0.2218, simple_loss=0.3038, pruned_loss=0.06996, over 19473.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2944, pruned_loss=0.06899, over 3816914.86 frames. ], batch size: 49, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:17:13,322 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0366, 3.5081, 1.9782, 2.0980, 3.0790, 1.7053, 1.5666, 2.1896], + device='cuda:1'), covar=tensor([0.1343, 0.0543, 0.1012, 0.0812, 0.0517, 0.1172, 0.0938, 0.0657], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0313, 0.0330, 0.0259, 0.0247, 0.0330, 0.0291, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:17:15,500 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4262, 1.6065, 1.9704, 1.7391, 3.2511, 2.6753, 3.5006, 1.5972], + device='cuda:1'), covar=tensor([0.2453, 0.4175, 0.2692, 0.1838, 0.1400, 0.1972, 0.1484, 0.4067], + device='cuda:1'), in_proj_covar=tensor([0.0521, 0.0621, 0.0682, 0.0470, 0.0613, 0.0521, 0.0656, 0.0532], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:17:19,627 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:21,520 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 5.017e+02 5.995e+02 7.636e+02 1.424e+03, threshold=1.199e+03, percent-clipped=3.0 +2023-04-02 12:17:40,474 INFO [train.py:903] (1/4) Epoch 18, batch 3750, loss[loss=0.1855, simple_loss=0.2632, pruned_loss=0.05389, over 19780.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2938, pruned_loss=0.06851, over 3832390.43 frames. ], batch size: 47, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:17:40,887 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119826.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:48,789 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:18:10,611 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:18:39,735 INFO [train.py:903] (1/4) Epoch 18, batch 3800, loss[loss=0.2165, simple_loss=0.2892, pruned_loss=0.07193, over 19746.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2937, pruned_loss=0.06862, over 3838315.94 frames. ], batch size: 51, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:18:39,893 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:18:44,344 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6327, 1.4500, 1.4874, 2.2379, 1.7649, 1.9718, 2.0517, 1.9044], + device='cuda:1'), covar=tensor([0.0836, 0.0926, 0.0981, 0.0701, 0.0783, 0.0727, 0.0823, 0.0627], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0223, 0.0242, 0.0226, 0.0208, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 12:19:12,057 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 12:19:16,277 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 12:19:22,039 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.499e+02 6.857e+02 8.596e+02 2.059e+03, threshold=1.371e+03, percent-clipped=8.0 +2023-04-02 12:19:41,466 INFO [train.py:903] (1/4) Epoch 18, batch 3850, loss[loss=0.25, simple_loss=0.3215, pruned_loss=0.0892, over 13499.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2936, pruned_loss=0.06814, over 3833695.65 frames. ], batch size: 136, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:19:45,479 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0884, 2.0253, 1.8053, 1.6440, 1.4997, 1.7037, 0.4868, 1.1200], + device='cuda:1'), covar=tensor([0.0548, 0.0550, 0.0425, 0.0768, 0.1085, 0.0836, 0.1205, 0.0941], + device='cuda:1'), in_proj_covar=tensor([0.0347, 0.0345, 0.0345, 0.0372, 0.0446, 0.0378, 0.0325, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:20:43,324 INFO [train.py:903] (1/4) Epoch 18, batch 3900, loss[loss=0.2137, simple_loss=0.2988, pruned_loss=0.0643, over 19510.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2928, pruned_loss=0.06799, over 3817539.88 frames. ], batch size: 54, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:20:52,085 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 12:21:02,810 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:26,389 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.199e+02 4.757e+02 5.722e+02 7.251e+02 1.425e+03, threshold=1.144e+03, percent-clipped=2.0 +2023-04-02 12:21:45,629 INFO [train.py:903] (1/4) Epoch 18, batch 3950, loss[loss=0.2163, simple_loss=0.2996, pruned_loss=0.06648, over 18093.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2931, pruned_loss=0.06796, over 3812378.76 frames. ], batch size: 83, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:47,756 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=120027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:49,995 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 12:22:47,956 INFO [train.py:903] (1/4) Epoch 18, batch 4000, loss[loss=0.1902, simple_loss=0.2658, pruned_loss=0.05728, over 19383.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2917, pruned_loss=0.06745, over 3811207.92 frames. ], batch size: 47, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:23:02,777 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2070, 1.2535, 1.3458, 1.3440, 1.6807, 1.7481, 1.7801, 0.5828], + device='cuda:1'), covar=tensor([0.2451, 0.4094, 0.2570, 0.1951, 0.1620, 0.2216, 0.1315, 0.4466], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0619, 0.0676, 0.0467, 0.0611, 0.0519, 0.0653, 0.0530], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:23:29,303 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 4.630e+02 5.764e+02 7.444e+02 1.534e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-02 12:23:36,025 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 12:23:49,495 INFO [train.py:903] (1/4) Epoch 18, batch 4050, loss[loss=0.2124, simple_loss=0.2936, pruned_loss=0.06557, over 19672.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2919, pruned_loss=0.06751, over 3824212.75 frames. ], batch size: 60, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:24:08,098 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=120142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:24:28,096 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.01 vs. limit=5.0 +2023-04-02 12:24:40,869 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6502, 3.9379, 2.6749, 2.7115, 3.6190, 2.4454, 2.1113, 2.7769], + device='cuda:1'), covar=tensor([0.1135, 0.0467, 0.0835, 0.0674, 0.0374, 0.0964, 0.0800, 0.0590], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0312, 0.0330, 0.0258, 0.0247, 0.0329, 0.0290, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:24:49,138 INFO [train.py:903] (1/4) Epoch 18, batch 4100, loss[loss=0.2019, simple_loss=0.2804, pruned_loss=0.06168, over 19831.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2925, pruned_loss=0.06762, over 3824859.05 frames. ], batch size: 52, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:25:20,530 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.4630, 5.0234, 3.2653, 4.4049, 1.6521, 4.9063, 4.8073, 4.9883], + device='cuda:1'), covar=tensor([0.0422, 0.0830, 0.1535, 0.0745, 0.3315, 0.0565, 0.0766, 0.0907], + device='cuda:1'), in_proj_covar=tensor([0.0483, 0.0394, 0.0480, 0.0340, 0.0395, 0.0417, 0.0409, 0.0443], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:25:24,886 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 12:25:29,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.087e+02 6.190e+02 7.903e+02 1.294e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 12:25:39,648 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3701, 2.0668, 1.5833, 1.3641, 1.8454, 1.3081, 1.3864, 1.8328], + device='cuda:1'), covar=tensor([0.0882, 0.0781, 0.1092, 0.0833, 0.0586, 0.1210, 0.0613, 0.0431], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0311, 0.0329, 0.0258, 0.0246, 0.0328, 0.0289, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:25:48,078 INFO [train.py:903] (1/4) Epoch 18, batch 4150, loss[loss=0.2321, simple_loss=0.3123, pruned_loss=0.07599, over 19613.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2921, pruned_loss=0.06746, over 3825303.88 frames. ], batch size: 57, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:14,740 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:46,392 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:50,727 INFO [train.py:903] (1/4) Epoch 18, batch 4200, loss[loss=0.2184, simple_loss=0.2979, pruned_loss=0.06942, over 19588.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2917, pruned_loss=0.06689, over 3810556.88 frames. ], batch size: 52, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:57,165 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 12:27:30,830 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 4.968e+02 6.385e+02 7.954e+02 1.571e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 12:27:51,828 INFO [train.py:903] (1/4) Epoch 18, batch 4250, loss[loss=0.2414, simple_loss=0.318, pruned_loss=0.08243, over 19537.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2916, pruned_loss=0.06713, over 3818919.95 frames. ], batch size: 54, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:27:56,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0930, 5.0949, 5.8735, 5.8906, 1.8928, 5.5839, 4.6092, 5.5384], + device='cuda:1'), covar=tensor([0.1540, 0.0847, 0.0487, 0.0600, 0.5952, 0.0684, 0.0577, 0.1053], + device='cuda:1'), in_proj_covar=tensor([0.0760, 0.0701, 0.0907, 0.0794, 0.0809, 0.0659, 0.0548, 0.0837], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 12:28:09,637 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 12:28:18,891 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 12:28:47,403 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4804, 2.1668, 1.6406, 1.4134, 2.0142, 1.3451, 1.3602, 1.8819], + device='cuda:1'), covar=tensor([0.0991, 0.0852, 0.1019, 0.0849, 0.0541, 0.1225, 0.0698, 0.0462], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0309, 0.0326, 0.0256, 0.0243, 0.0325, 0.0287, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:28:51,543 INFO [train.py:903] (1/4) Epoch 18, batch 4300, loss[loss=0.2037, simple_loss=0.2971, pruned_loss=0.05514, over 19770.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06699, over 3818134.44 frames. ], batch size: 54, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:19,817 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:34,133 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.084e+02 5.969e+02 7.631e+02 1.294e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 12:29:46,145 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 12:29:49,841 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120423.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:52,875 INFO [train.py:903] (1/4) Epoch 18, batch 4350, loss[loss=0.2139, simple_loss=0.2965, pruned_loss=0.06567, over 19678.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2925, pruned_loss=0.06803, over 3782851.59 frames. ], batch size: 59, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:57,423 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5950, 3.9261, 4.3054, 4.3636, 1.6585, 4.0685, 3.4482, 3.7115], + device='cuda:1'), covar=tensor([0.2749, 0.1597, 0.1033, 0.1492, 0.7904, 0.2055, 0.1340, 0.2252], + device='cuda:1'), in_proj_covar=tensor([0.0753, 0.0697, 0.0901, 0.0786, 0.0801, 0.0654, 0.0543, 0.0832], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 12:30:55,181 INFO [train.py:903] (1/4) Epoch 18, batch 4400, loss[loss=0.2165, simple_loss=0.2999, pruned_loss=0.06654, over 19595.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06811, over 3793421.52 frames. ], batch size: 52, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:31:18,742 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 12:31:28,328 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 12:31:29,868 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3294, 1.3477, 1.4271, 1.4892, 1.7010, 1.8471, 1.7560, 0.5350], + device='cuda:1'), covar=tensor([0.2322, 0.4113, 0.2585, 0.1849, 0.1662, 0.2218, 0.1369, 0.4565], + device='cuda:1'), in_proj_covar=tensor([0.0516, 0.0616, 0.0675, 0.0466, 0.0609, 0.0517, 0.0649, 0.0528], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:31:35,103 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.092e+02 6.306e+02 8.223e+02 1.474e+03, threshold=1.261e+03, percent-clipped=7.0 +2023-04-02 12:31:55,630 INFO [train.py:903] (1/4) Epoch 18, batch 4450, loss[loss=0.2676, simple_loss=0.3293, pruned_loss=0.103, over 13508.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2915, pruned_loss=0.06762, over 3793849.00 frames. ], batch size: 136, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:32:55,856 INFO [train.py:903] (1/4) Epoch 18, batch 4500, loss[loss=0.2518, simple_loss=0.3363, pruned_loss=0.08364, over 19507.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2924, pruned_loss=0.06795, over 3805483.39 frames. ], batch size: 64, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:33:37,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.202e+02 6.117e+02 7.756e+02 2.072e+03, threshold=1.223e+03, percent-clipped=4.0 +2023-04-02 12:33:56,191 INFO [train.py:903] (1/4) Epoch 18, batch 4550, loss[loss=0.2078, simple_loss=0.2901, pruned_loss=0.06275, over 19613.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.293, pruned_loss=0.06836, over 3805636.40 frames. ], batch size: 57, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:34:05,862 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 12:34:29,488 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 12:34:56,266 INFO [train.py:903] (1/4) Epoch 18, batch 4600, loss[loss=0.2075, simple_loss=0.2822, pruned_loss=0.06638, over 19852.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2932, pruned_loss=0.06891, over 3795055.80 frames. ], batch size: 52, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:35:04,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.09 vs. limit=5.0 +2023-04-02 12:35:09,467 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5705, 1.1350, 1.3775, 1.2962, 2.2248, 1.0190, 1.9234, 2.4537], + device='cuda:1'), covar=tensor([0.0682, 0.2811, 0.2672, 0.1603, 0.0920, 0.2080, 0.1178, 0.0469], + device='cuda:1'), in_proj_covar=tensor([0.0397, 0.0356, 0.0376, 0.0340, 0.0366, 0.0345, 0.0367, 0.0387], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:35:17,284 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=120694.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:35:33,745 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-02 12:35:35,195 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 4.865e+02 6.169e+02 7.995e+02 1.948e+03, threshold=1.234e+03, percent-clipped=9.0 +2023-04-02 12:35:37,843 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7793, 1.2592, 1.4654, 1.5784, 3.3518, 1.1262, 2.3284, 3.7324], + device='cuda:1'), covar=tensor([0.0437, 0.2878, 0.2892, 0.1851, 0.0742, 0.2628, 0.1394, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0398, 0.0357, 0.0377, 0.0341, 0.0367, 0.0346, 0.0368, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:35:55,081 INFO [train.py:903] (1/4) Epoch 18, batch 4650, loss[loss=0.2035, simple_loss=0.295, pruned_loss=0.05605, over 18741.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2923, pruned_loss=0.0685, over 3803918.59 frames. ], batch size: 74, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:36:13,554 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 12:36:24,599 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 12:36:55,839 INFO [train.py:903] (1/4) Epoch 18, batch 4700, loss[loss=0.2294, simple_loss=0.3059, pruned_loss=0.07648, over 17223.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2925, pruned_loss=0.0685, over 3789653.37 frames. ], batch size: 101, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:37:18,751 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 12:37:37,344 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.224e+02 6.544e+02 8.077e+02 2.112e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 12:37:56,116 INFO [train.py:903] (1/4) Epoch 18, batch 4750, loss[loss=0.2093, simple_loss=0.2925, pruned_loss=0.06304, over 19713.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2932, pruned_loss=0.06918, over 3789682.12 frames. ], batch size: 59, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:38:24,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6646, 2.3491, 2.1692, 2.7772, 2.3525, 2.2457, 1.9422, 2.4969], + device='cuda:1'), covar=tensor([0.0852, 0.1523, 0.1336, 0.0920, 0.1297, 0.0480, 0.1299, 0.0655], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0354, 0.0302, 0.0246, 0.0298, 0.0246, 0.0296, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:38:57,285 INFO [train.py:903] (1/4) Epoch 18, batch 4800, loss[loss=0.2287, simple_loss=0.3177, pruned_loss=0.0698, over 19491.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2929, pruned_loss=0.06846, over 3800336.61 frames. ], batch size: 64, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:39:38,286 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.063e+02 6.517e+02 8.294e+02 1.429e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-04-02 12:39:57,786 INFO [train.py:903] (1/4) Epoch 18, batch 4850, loss[loss=0.252, simple_loss=0.3227, pruned_loss=0.09068, over 19487.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2927, pruned_loss=0.06843, over 3810424.74 frames. ], batch size: 64, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:40:19,478 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 12:40:38,932 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 12:40:44,411 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 12:40:45,483 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 12:40:55,308 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 12:40:57,464 INFO [train.py:903] (1/4) Epoch 18, batch 4900, loss[loss=0.1953, simple_loss=0.2735, pruned_loss=0.05851, over 19355.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2929, pruned_loss=0.06868, over 3821964.11 frames. ], batch size: 47, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:41:15,394 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 12:41:38,441 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.463e+02 5.379e+02 6.693e+02 8.152e+02 1.326e+03, threshold=1.339e+03, percent-clipped=1.0 +2023-04-02 12:41:50,623 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:41:56,821 INFO [train.py:903] (1/4) Epoch 18, batch 4950, loss[loss=0.1583, simple_loss=0.231, pruned_loss=0.04279, over 19754.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2922, pruned_loss=0.0683, over 3832909.07 frames. ], batch size: 46, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:42:10,901 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121038.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:42:13,931 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 12:42:36,735 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 12:42:55,221 INFO [train.py:903] (1/4) Epoch 18, batch 5000, loss[loss=0.2262, simple_loss=0.3077, pruned_loss=0.07232, over 19712.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2935, pruned_loss=0.06875, over 3840082.99 frames. ], batch size: 59, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:43:05,397 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 12:43:16,151 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 12:43:36,117 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.049e+02 6.044e+02 8.198e+02 1.739e+03, threshold=1.209e+03, percent-clipped=8.0 +2023-04-02 12:43:52,965 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9583, 3.3512, 1.7849, 1.7897, 3.0934, 1.5774, 1.3628, 2.3614], + device='cuda:1'), covar=tensor([0.1215, 0.0515, 0.1107, 0.1017, 0.0503, 0.1238, 0.0994, 0.0566], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0313, 0.0328, 0.0259, 0.0247, 0.0332, 0.0291, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 12:43:55,696 INFO [train.py:903] (1/4) Epoch 18, batch 5050, loss[loss=0.2313, simple_loss=0.31, pruned_loss=0.07627, over 19522.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2933, pruned_loss=0.06876, over 3834307.13 frames. ], batch size: 56, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:44:04,541 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0241, 4.3610, 4.7047, 4.7111, 1.8591, 4.4037, 3.8637, 4.4021], + device='cuda:1'), covar=tensor([0.1460, 0.0976, 0.0558, 0.0565, 0.5323, 0.0958, 0.0629, 0.1026], + device='cuda:1'), in_proj_covar=tensor([0.0758, 0.0702, 0.0906, 0.0788, 0.0806, 0.0657, 0.0548, 0.0833], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 12:44:27,562 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:44:29,504 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 12:44:55,824 INFO [train.py:903] (1/4) Epoch 18, batch 5100, loss[loss=0.2072, simple_loss=0.2942, pruned_loss=0.06009, over 18287.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2941, pruned_loss=0.06917, over 3826140.63 frames. ], batch size: 83, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:45:03,010 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:45:05,063 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 12:45:07,234 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 12:45:12,776 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 12:45:37,241 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 4.928e+02 6.063e+02 8.354e+02 2.244e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 12:45:44,714 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9427, 1.8698, 1.7551, 1.5316, 1.3959, 1.5097, 0.3984, 0.8353], + device='cuda:1'), covar=tensor([0.0564, 0.0625, 0.0405, 0.0657, 0.1229, 0.0867, 0.1263, 0.1066], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0352, 0.0354, 0.0378, 0.0452, 0.0384, 0.0332, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 12:45:56,103 INFO [train.py:903] (1/4) Epoch 18, batch 5150, loss[loss=0.2237, simple_loss=0.3, pruned_loss=0.0737, over 19679.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2942, pruned_loss=0.06931, over 3796624.16 frames. ], batch size: 53, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:46:05,288 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 12:46:18,093 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 12:46:40,467 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 12:46:55,813 INFO [train.py:903] (1/4) Epoch 18, batch 5200, loss[loss=0.2123, simple_loss=0.2962, pruned_loss=0.06418, over 19482.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2947, pruned_loss=0.0697, over 3805677.60 frames. ], batch size: 64, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:08,966 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 12:47:36,015 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.110e+02 6.332e+02 8.392e+02 3.036e+03, threshold=1.266e+03, percent-clipped=7.0 +2023-04-02 12:47:49,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 12:47:54,555 INFO [train.py:903] (1/4) Epoch 18, batch 5250, loss[loss=0.2042, simple_loss=0.2911, pruned_loss=0.05865, over 19256.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2943, pruned_loss=0.06912, over 3800292.17 frames. ], batch size: 66, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:59,178 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121329.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 12:48:35,520 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0773, 1.9179, 1.9999, 2.5597, 1.6810, 2.3845, 2.3880, 2.1527], + device='cuda:1'), covar=tensor([0.0759, 0.0829, 0.0898, 0.0878, 0.0992, 0.0690, 0.0857, 0.0635], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0224, 0.0243, 0.0228, 0.0209, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 12:48:40,799 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:48:54,018 INFO [train.py:903] (1/4) Epoch 18, batch 5300, loss[loss=0.2665, simple_loss=0.3468, pruned_loss=0.09309, over 19332.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2937, pruned_loss=0.0687, over 3808264.28 frames. ], batch size: 66, lr: 4.56e-03, grad_scale: 4.0 +2023-04-02 12:49:10,547 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 12:49:32,297 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8756, 1.2806, 1.5601, 0.5835, 2.0210, 2.4394, 2.1423, 2.5781], + device='cuda:1'), covar=tensor([0.1618, 0.3614, 0.3276, 0.2768, 0.0603, 0.0284, 0.0339, 0.0356], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0313, 0.0343, 0.0259, 0.0235, 0.0180, 0.0212, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 12:49:33,466 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:35,383 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.404e+02 6.878e+02 8.269e+02 2.518e+03, threshold=1.376e+03, percent-clipped=11.0 +2023-04-02 12:49:52,481 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:53,390 INFO [train.py:903] (1/4) Epoch 18, batch 5350, loss[loss=0.2146, simple_loss=0.2975, pruned_loss=0.06581, over 19410.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2949, pruned_loss=0.06933, over 3807361.88 frames. ], batch size: 70, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:03,575 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:50:26,212 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 12:50:53,959 INFO [train.py:903] (1/4) Epoch 18, batch 5400, loss[loss=0.2033, simple_loss=0.2904, pruned_loss=0.05807, over 17417.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2948, pruned_loss=0.06941, over 3805495.74 frames. ], batch size: 101, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:59,582 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121480.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:35,379 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 5.292e+02 6.238e+02 7.901e+02 1.766e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 12:51:54,668 INFO [train.py:903] (1/4) Epoch 18, batch 5450, loss[loss=0.1973, simple_loss=0.2776, pruned_loss=0.05853, over 19730.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2947, pruned_loss=0.06927, over 3818029.00 frames. ], batch size: 51, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:51:54,830 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:52:20,878 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6661, 1.5483, 1.5296, 2.4475, 1.7821, 2.1479, 2.1663, 1.7795], + device='cuda:1'), covar=tensor([0.0789, 0.0928, 0.0984, 0.0640, 0.0862, 0.0632, 0.0752, 0.0669], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0225, 0.0243, 0.0229, 0.0211, 0.0190, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 12:52:25,223 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4594, 1.5506, 1.8720, 1.7058, 2.7975, 2.3918, 2.8242, 1.4080], + device='cuda:1'), covar=tensor([0.2388, 0.4059, 0.2521, 0.1781, 0.1406, 0.1936, 0.1459, 0.3908], + device='cuda:1'), in_proj_covar=tensor([0.0520, 0.0623, 0.0684, 0.0468, 0.0614, 0.0523, 0.0657, 0.0534], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 12:52:54,550 INFO [train.py:903] (1/4) Epoch 18, batch 5500, loss[loss=0.2416, simple_loss=0.3141, pruned_loss=0.08449, over 17167.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2946, pruned_loss=0.06881, over 3802263.77 frames. ], batch size: 101, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:53:19,886 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 12:53:37,491 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.754e+02 5.891e+02 7.661e+02 1.861e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-02 12:53:51,009 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 12:53:55,066 INFO [train.py:903] (1/4) Epoch 18, batch 5550, loss[loss=0.2177, simple_loss=0.3011, pruned_loss=0.06714, over 19666.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2936, pruned_loss=0.06865, over 3803888.36 frames. ], batch size: 55, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:54:02,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 12:54:13,205 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121641.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:54:51,523 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 12:54:52,547 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121673.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:54:55,722 INFO [train.py:903] (1/4) Epoch 18, batch 5600, loss[loss=0.2235, simple_loss=0.3086, pruned_loss=0.06917, over 19092.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.294, pruned_loss=0.06842, over 3821802.83 frames. ], batch size: 69, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:55:37,559 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.280e+02 4.851e+02 5.729e+02 6.791e+02 1.695e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-04-02 12:55:56,730 INFO [train.py:903] (1/4) Epoch 18, batch 5650, loss[loss=0.1907, simple_loss=0.2718, pruned_loss=0.05484, over 19845.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2932, pruned_loss=0.06814, over 3818227.79 frames. ], batch size: 52, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:56:08,950 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:37,729 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121761.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:44,590 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 12:56:48,217 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:57,049 INFO [train.py:903] (1/4) Epoch 18, batch 5700, loss[loss=0.1684, simple_loss=0.248, pruned_loss=0.04443, over 19465.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2924, pruned_loss=0.06784, over 3830806.43 frames. ], batch size: 49, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:11,007 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121788.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:57:39,339 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.821e+02 5.859e+02 7.608e+02 1.521e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-02 12:57:54,290 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 12:57:57,032 INFO [train.py:903] (1/4) Epoch 18, batch 5750, loss[loss=0.2724, simple_loss=0.3342, pruned_loss=0.1053, over 13377.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2929, pruned_loss=0.0682, over 3820005.39 frames. ], batch size: 136, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:58,060 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 12:58:05,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 12:58:11,381 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 12:58:57,512 INFO [train.py:903] (1/4) Epoch 18, batch 5800, loss[loss=0.2088, simple_loss=0.2923, pruned_loss=0.06264, over 19502.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2926, pruned_loss=0.06833, over 3819366.78 frames. ], batch size: 64, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:59:08,068 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:24,568 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:39,869 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.174e+02 6.386e+02 8.157e+02 2.937e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-02 12:59:54,647 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:59,626 INFO [train.py:903] (1/4) Epoch 18, batch 5850, loss[loss=0.2282, simple_loss=0.3093, pruned_loss=0.07355, over 18751.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2932, pruned_loss=0.0687, over 3812088.01 frames. ], batch size: 74, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:00:51,281 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121968.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:01:00,155 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9365, 1.6762, 1.6159, 1.9403, 1.6800, 1.7492, 1.6138, 1.8856], + device='cuda:1'), covar=tensor([0.0960, 0.1458, 0.1367, 0.0909, 0.1245, 0.0525, 0.1272, 0.0683], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0356, 0.0304, 0.0248, 0.0300, 0.0247, 0.0298, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:01:00,885 INFO [train.py:903] (1/4) Epoch 18, batch 5900, loss[loss=0.2293, simple_loss=0.3087, pruned_loss=0.07493, over 18823.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2926, pruned_loss=0.06817, over 3819097.25 frames. ], batch size: 74, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:01:02,075 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 13:01:23,312 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 13:01:43,749 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.950e+02 6.415e+02 8.144e+02 2.513e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-02 13:02:01,671 INFO [train.py:903] (1/4) Epoch 18, batch 5950, loss[loss=0.2033, simple_loss=0.2847, pruned_loss=0.06094, over 19587.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2932, pruned_loss=0.06843, over 3816064.56 frames. ], batch size: 52, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:02:03,446 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.38 vs. limit=5.0 +2023-04-02 13:02:24,064 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122044.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:02:42,884 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 13:02:53,800 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122069.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:03:01,812 INFO [train.py:903] (1/4) Epoch 18, batch 6000, loss[loss=0.2503, simple_loss=0.3261, pruned_loss=0.08727, over 18898.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2953, pruned_loss=0.06958, over 3810169.54 frames. ], batch size: 74, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:03:01,812 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 13:03:14,294 INFO [train.py:937] (1/4) Epoch 18, validation: loss=0.1702, simple_loss=0.2706, pruned_loss=0.03489, over 944034.00 frames. +2023-04-02 13:03:14,295 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 13:03:22,624 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3027, 1.3916, 1.8390, 1.3906, 2.8081, 3.8316, 3.5678, 4.0412], + device='cuda:1'), covar=tensor([0.1561, 0.3680, 0.3162, 0.2227, 0.0552, 0.0175, 0.0202, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0313, 0.0343, 0.0259, 0.0235, 0.0180, 0.0212, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 13:03:57,709 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.792e+02 5.055e+02 6.106e+02 7.549e+02 1.634e+03, threshold=1.221e+03, percent-clipped=4.0 +2023-04-02 13:04:15,900 INFO [train.py:903] (1/4) Epoch 18, batch 6050, loss[loss=0.2022, simple_loss=0.2874, pruned_loss=0.05848, over 19339.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2942, pruned_loss=0.06907, over 3820345.82 frames. ], batch size: 66, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:04:27,147 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:04:33,689 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:04,398 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:18,139 INFO [train.py:903] (1/4) Epoch 18, batch 6100, loss[loss=0.2211, simple_loss=0.2879, pruned_loss=0.07718, over 19456.00 frames. ], tot_loss[loss=0.217, simple_loss=0.295, pruned_loss=0.06952, over 3823076.02 frames. ], batch size: 49, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:05:59,975 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.309e+02 6.592e+02 8.010e+02 1.726e+03, threshold=1.318e+03, percent-clipped=1.0 +2023-04-02 13:06:03,460 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:06:18,827 INFO [train.py:903] (1/4) Epoch 18, batch 6150, loss[loss=0.2152, simple_loss=0.287, pruned_loss=0.07167, over 19621.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2949, pruned_loss=0.0694, over 3829216.44 frames. ], batch size: 50, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:06:46,721 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 13:07:20,188 INFO [train.py:903] (1/4) Epoch 18, batch 6200, loss[loss=0.2066, simple_loss=0.2832, pruned_loss=0.06499, over 19689.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2942, pruned_loss=0.06923, over 3821013.77 frames. ], batch size: 53, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:04,096 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.549e+02 5.662e+02 6.707e+02 1.660e+03, threshold=1.132e+03, percent-clipped=3.0 +2023-04-02 13:08:05,406 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:17,460 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 13:08:21,488 INFO [train.py:903] (1/4) Epoch 18, batch 6250, loss[loss=0.217, simple_loss=0.2984, pruned_loss=0.06779, over 18937.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2938, pruned_loss=0.06905, over 3802604.04 frames. ], batch size: 74, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:35,721 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:53,450 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 13:09:23,685 INFO [train.py:903] (1/4) Epoch 18, batch 6300, loss[loss=0.2508, simple_loss=0.3361, pruned_loss=0.08278, over 19769.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.293, pruned_loss=0.06859, over 3815209.54 frames. ], batch size: 56, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:06,341 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.826e+02 5.839e+02 7.420e+02 1.796e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-02 13:10:25,332 INFO [train.py:903] (1/4) Epoch 18, batch 6350, loss[loss=0.2453, simple_loss=0.3225, pruned_loss=0.08407, over 19574.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2911, pruned_loss=0.06709, over 3826383.51 frames. ], batch size: 61, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:26,751 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:25,780 INFO [train.py:903] (1/4) Epoch 18, batch 6400, loss[loss=0.215, simple_loss=0.301, pruned_loss=0.06448, over 19497.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.292, pruned_loss=0.0675, over 3831523.28 frames. ], batch size: 64, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:11:29,340 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:30,755 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9394, 1.8997, 1.8494, 1.6125, 1.3996, 1.6356, 0.4562, 0.8648], + device='cuda:1'), covar=tensor([0.0551, 0.0541, 0.0362, 0.0593, 0.1150, 0.0718, 0.1150, 0.0988], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0347, 0.0347, 0.0373, 0.0449, 0.0379, 0.0329, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 13:11:38,325 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:12:08,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.572e+02 5.352e+02 7.001e+02 2.095e+03, threshold=1.070e+03, percent-clipped=5.0 +2023-04-02 13:12:27,162 INFO [train.py:903] (1/4) Epoch 18, batch 6450, loss[loss=0.2534, simple_loss=0.334, pruned_loss=0.08642, over 19291.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.291, pruned_loss=0.06714, over 3817786.37 frames. ], batch size: 66, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:12:43,823 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 13:13:05,515 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:13:09,885 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 13:13:27,310 INFO [train.py:903] (1/4) Epoch 18, batch 6500, loss[loss=0.2339, simple_loss=0.3085, pruned_loss=0.07966, over 19533.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2914, pruned_loss=0.06724, over 3823269.31 frames. ], batch size: 54, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:13:32,627 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 13:13:41,119 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-02 13:13:50,395 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:14:06,031 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9692, 1.9505, 1.8425, 1.5452, 1.5262, 1.6341, 0.3165, 0.8171], + device='cuda:1'), covar=tensor([0.0530, 0.0546, 0.0354, 0.0666, 0.1109, 0.0701, 0.1196, 0.0963], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0347, 0.0348, 0.0375, 0.0451, 0.0380, 0.0330, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 13:14:10,391 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.824e+02 5.508e+02 6.898e+02 1.222e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 13:14:28,799 INFO [train.py:903] (1/4) Epoch 18, batch 6550, loss[loss=0.2065, simple_loss=0.2859, pruned_loss=0.0636, over 19383.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2906, pruned_loss=0.06696, over 3828388.30 frames. ], batch size: 48, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:24,479 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:29,442 INFO [train.py:903] (1/4) Epoch 18, batch 6600, loss[loss=0.2033, simple_loss=0.2887, pruned_loss=0.05894, over 19603.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06701, over 3837150.51 frames. ], batch size: 57, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:35,564 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:37,956 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:08,850 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:11,858 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.976e+02 6.114e+02 8.137e+02 1.508e+03, threshold=1.223e+03, percent-clipped=10.0 +2023-04-02 13:16:29,836 INFO [train.py:903] (1/4) Epoch 18, batch 6650, loss[loss=0.2164, simple_loss=0.2969, pruned_loss=0.06799, over 19686.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2908, pruned_loss=0.06665, over 3843218.32 frames. ], batch size: 53, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:30,797 INFO [train.py:903] (1/4) Epoch 18, batch 6700, loss[loss=0.1868, simple_loss=0.2754, pruned_loss=0.04913, over 19525.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2904, pruned_loss=0.06638, over 3842737.37 frames. ], batch size: 54, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:55,707 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:12,046 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.791e+02 5.844e+02 7.603e+02 1.856e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 13:18:28,392 INFO [train.py:903] (1/4) Epoch 18, batch 6750, loss[loss=0.1926, simple_loss=0.2699, pruned_loss=0.05763, over 19752.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2905, pruned_loss=0.06657, over 3842323.24 frames. ], batch size: 46, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:18:33,072 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:55,585 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:24,217 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:25,055 INFO [train.py:903] (1/4) Epoch 18, batch 6800, loss[loss=0.2254, simple_loss=0.3021, pruned_loss=0.07437, over 19301.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2916, pruned_loss=0.06761, over 3821714.62 frames. ], batch size: 66, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:19:40,829 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:20:08,835 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 13:20:09,296 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 13:20:12,595 INFO [train.py:903] (1/4) Epoch 19, batch 0, loss[loss=0.2199, simple_loss=0.3049, pruned_loss=0.06749, over 18769.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3049, pruned_loss=0.06749, over 18769.00 frames. ], batch size: 74, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:20:12,596 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 13:20:24,051 INFO [train.py:937] (1/4) Epoch 19, validation: loss=0.171, simple_loss=0.2713, pruned_loss=0.03533, over 944034.00 frames. +2023-04-02 13:20:24,052 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 13:20:32,696 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 4.987e+02 6.075e+02 7.792e+02 1.350e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 13:20:37,462 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 13:20:53,702 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122928.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:13,138 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-02 13:21:13,984 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:24,659 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:25,464 INFO [train.py:903] (1/4) Epoch 19, batch 50, loss[loss=0.2162, simple_loss=0.2984, pruned_loss=0.06701, over 19740.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2861, pruned_loss=0.06425, over 876989.95 frames. ], batch size: 63, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:21:51,204 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.30 vs. limit=5.0 +2023-04-02 13:22:04,205 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 13:22:16,582 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8656, 1.3474, 1.0499, 0.9769, 1.1712, 0.9842, 0.9561, 1.2674], + device='cuda:1'), covar=tensor([0.0616, 0.0865, 0.1156, 0.0729, 0.0564, 0.1350, 0.0571, 0.0486], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0314, 0.0331, 0.0259, 0.0247, 0.0334, 0.0291, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:22:27,932 INFO [train.py:903] (1/4) Epoch 19, batch 100, loss[loss=0.2076, simple_loss=0.2913, pruned_loss=0.06189, over 18328.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2908, pruned_loss=0.06773, over 1524087.60 frames. ], batch size: 83, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:22:35,845 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.975e+02 5.926e+02 8.151e+02 1.966e+03, threshold=1.185e+03, percent-clipped=7.0 +2023-04-02 13:22:37,410 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2529, 1.3072, 1.2455, 1.0595, 1.1037, 1.1653, 0.0653, 0.4033], + device='cuda:1'), covar=tensor([0.0541, 0.0560, 0.0351, 0.0473, 0.1067, 0.0557, 0.1130, 0.0912], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0347, 0.0347, 0.0374, 0.0450, 0.0380, 0.0328, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 13:22:40,146 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 13:23:25,700 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:23:26,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5987, 4.1536, 2.5793, 3.6651, 0.9263, 4.0867, 4.0178, 4.0516], + device='cuda:1'), covar=tensor([0.0653, 0.1098, 0.2024, 0.0844, 0.4078, 0.0691, 0.0825, 0.1105], + device='cuda:1'), in_proj_covar=tensor([0.0485, 0.0395, 0.0483, 0.0342, 0.0397, 0.0421, 0.0410, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:23:27,453 INFO [train.py:903] (1/4) Epoch 19, batch 150, loss[loss=0.2223, simple_loss=0.307, pruned_loss=0.06883, over 19673.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2919, pruned_loss=0.06816, over 2038952.19 frames. ], batch size: 58, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:23:55,483 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123077.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:24:27,288 INFO [train.py:903] (1/4) Epoch 19, batch 200, loss[loss=0.2719, simple_loss=0.3333, pruned_loss=0.1053, over 18305.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2923, pruned_loss=0.06857, over 2431957.63 frames. ], batch size: 83, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:24:29,571 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 13:24:35,482 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.223e+02 5.868e+02 7.012e+02 1.944e+03, threshold=1.174e+03, percent-clipped=7.0 +2023-04-02 13:24:36,969 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7813, 1.4060, 1.5817, 1.5865, 3.3096, 1.2048, 2.4891, 3.7711], + device='cuda:1'), covar=tensor([0.0487, 0.2733, 0.2744, 0.1847, 0.0708, 0.2490, 0.1166, 0.0240], + device='cuda:1'), in_proj_covar=tensor([0.0396, 0.0356, 0.0378, 0.0341, 0.0367, 0.0349, 0.0369, 0.0386], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:24:58,739 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2939, 5.6791, 3.2131, 4.9958, 1.1292, 5.7718, 5.6410, 5.8142], + device='cuda:1'), covar=tensor([0.0389, 0.0950, 0.1808, 0.0750, 0.4307, 0.0576, 0.0757, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0485, 0.0396, 0.0483, 0.0342, 0.0396, 0.0420, 0.0410, 0.0445], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:25:27,083 INFO [train.py:903] (1/4) Epoch 19, batch 250, loss[loss=0.1794, simple_loss=0.2631, pruned_loss=0.04781, over 19737.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2907, pruned_loss=0.06736, over 2738241.64 frames. ], batch size: 46, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:25,147 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:26:28,234 INFO [train.py:903] (1/4) Epoch 19, batch 300, loss[loss=0.1714, simple_loss=0.2528, pruned_loss=0.04496, over 19727.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2895, pruned_loss=0.06681, over 2974387.03 frames. ], batch size: 46, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:37,167 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.226e+02 7.852e+02 1.722e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-02 13:26:55,578 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:04,589 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:27,986 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.43 vs. limit=5.0 +2023-04-02 13:27:29,488 INFO [train.py:903] (1/4) Epoch 19, batch 350, loss[loss=0.198, simple_loss=0.2783, pruned_loss=0.05888, over 19507.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2892, pruned_loss=0.06666, over 3161890.32 frames. ], batch size: 54, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:27:35,243 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 13:27:46,487 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8460, 1.3412, 1.4894, 1.8106, 3.4131, 1.3252, 2.4474, 3.9083], + device='cuda:1'), covar=tensor([0.0489, 0.2755, 0.3010, 0.1759, 0.0726, 0.2529, 0.1349, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0397, 0.0357, 0.0380, 0.0342, 0.0367, 0.0350, 0.0370, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:28:10,780 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0938, 0.9343, 1.0741, 1.5029, 1.0436, 0.9925, 1.1103, 1.0490], + device='cuda:1'), covar=tensor([0.1267, 0.1805, 0.1498, 0.0733, 0.1100, 0.1594, 0.1185, 0.1282], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0219, 0.0224, 0.0241, 0.0226, 0.0210, 0.0186, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 13:28:30,167 INFO [train.py:903] (1/4) Epoch 19, batch 400, loss[loss=0.1866, simple_loss=0.2693, pruned_loss=0.05192, over 19678.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2902, pruned_loss=0.06706, over 3307460.17 frames. ], batch size: 53, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:28:37,932 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.151e+02 6.306e+02 7.944e+02 1.366e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-02 13:29:04,108 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123332.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:24,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:30,457 INFO [train.py:903] (1/4) Epoch 19, batch 450, loss[loss=0.2287, simple_loss=0.3032, pruned_loss=0.0771, over 17379.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2907, pruned_loss=0.06738, over 3419303.78 frames. ], batch size: 101, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:30:04,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 13:30:05,744 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 13:30:30,933 INFO [train.py:903] (1/4) Epoch 19, batch 500, loss[loss=0.26, simple_loss=0.3258, pruned_loss=0.09705, over 19784.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.292, pruned_loss=0.06834, over 3503893.00 frames. ], batch size: 56, lr: 4.40e-03, grad_scale: 16.0 +2023-04-02 13:30:39,751 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.352e+02 7.180e+02 8.361e+02 2.088e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-02 13:31:21,143 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:31:22,360 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5916, 1.1439, 1.4985, 1.2463, 2.2502, 1.0243, 2.1467, 2.4493], + device='cuda:1'), covar=tensor([0.0719, 0.2743, 0.2616, 0.1648, 0.0879, 0.2071, 0.0944, 0.0482], + device='cuda:1'), in_proj_covar=tensor([0.0400, 0.0359, 0.0380, 0.0344, 0.0369, 0.0350, 0.0371, 0.0389], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:31:30,113 INFO [train.py:903] (1/4) Epoch 19, batch 550, loss[loss=0.2314, simple_loss=0.3025, pruned_loss=0.08014, over 19731.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2924, pruned_loss=0.06814, over 3587512.07 frames. ], batch size: 51, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:30,276 INFO [train.py:903] (1/4) Epoch 19, batch 600, loss[loss=0.2013, simple_loss=0.2904, pruned_loss=0.05613, over 17163.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2929, pruned_loss=0.06828, over 3643085.66 frames. ], batch size: 101, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:39,907 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.960e+02 5.982e+02 8.370e+02 1.865e+03, threshold=1.196e+03, percent-clipped=4.0 +2023-04-02 13:32:49,059 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8189, 4.3325, 2.5517, 3.8601, 1.1349, 4.2849, 4.1740, 4.2604], + device='cuda:1'), covar=tensor([0.0589, 0.1013, 0.2041, 0.0767, 0.3689, 0.0684, 0.0791, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0480, 0.0396, 0.0480, 0.0339, 0.0394, 0.0418, 0.0407, 0.0443], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:33:14,016 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 13:33:30,843 INFO [train.py:903] (1/4) Epoch 19, batch 650, loss[loss=0.246, simple_loss=0.3177, pruned_loss=0.08718, over 17999.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2925, pruned_loss=0.06799, over 3691986.87 frames. ], batch size: 83, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:07,483 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4908, 2.0529, 1.6393, 1.5540, 1.9985, 1.4414, 1.4484, 1.8277], + device='cuda:1'), covar=tensor([0.0883, 0.0728, 0.0908, 0.0608, 0.0469, 0.1012, 0.0580, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0316, 0.0335, 0.0261, 0.0248, 0.0337, 0.0293, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:34:30,416 INFO [train.py:903] (1/4) Epoch 19, batch 700, loss[loss=0.2193, simple_loss=0.2976, pruned_loss=0.07046, over 19660.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2924, pruned_loss=0.06764, over 3734086.21 frames. ], batch size: 58, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:31,969 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:34:41,232 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.460e+02 6.000e+02 7.674e+02 1.249e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 13:35:02,794 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:35:31,844 INFO [train.py:903] (1/4) Epoch 19, batch 750, loss[loss=0.2019, simple_loss=0.2909, pruned_loss=0.05642, over 19662.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2927, pruned_loss=0.06746, over 3747588.35 frames. ], batch size: 55, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:35:59,275 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:36:34,036 INFO [train.py:903] (1/4) Epoch 19, batch 800, loss[loss=0.1855, simple_loss=0.258, pruned_loss=0.05652, over 19283.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2916, pruned_loss=0.06671, over 3768897.97 frames. ], batch size: 44, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:36:37,827 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0228, 1.8436, 1.7115, 2.0184, 1.8716, 1.7549, 1.6916, 1.9799], + device='cuda:1'), covar=tensor([0.0995, 0.1414, 0.1331, 0.0993, 0.1216, 0.0543, 0.1297, 0.0659], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0352, 0.0303, 0.0249, 0.0298, 0.0248, 0.0297, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:36:43,873 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.634e+02 4.695e+02 6.255e+02 7.449e+02 1.390e+03, threshold=1.251e+03, percent-clipped=2.0 +2023-04-02 13:36:47,178 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 13:37:20,978 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0414, 2.9205, 1.8361, 1.9063, 2.6811, 1.5883, 1.4823, 2.2294], + device='cuda:1'), covar=tensor([0.1242, 0.0684, 0.0984, 0.0751, 0.0458, 0.1192, 0.0907, 0.0616], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0313, 0.0332, 0.0259, 0.0245, 0.0335, 0.0291, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:37:34,010 INFO [train.py:903] (1/4) Epoch 19, batch 850, loss[loss=0.2202, simple_loss=0.3021, pruned_loss=0.06913, over 19799.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2923, pruned_loss=0.06727, over 3786985.89 frames. ], batch size: 63, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:17,258 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:18,584 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:23,845 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 13:38:34,410 INFO [train.py:903] (1/4) Epoch 19, batch 900, loss[loss=0.202, simple_loss=0.292, pruned_loss=0.05598, over 19530.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2915, pruned_loss=0.06666, over 3804285.52 frames. ], batch size: 54, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:38,092 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3426, 3.0757, 2.2819, 2.7497, 0.6326, 3.0550, 2.8843, 2.9950], + device='cuda:1'), covar=tensor([0.1126, 0.1349, 0.1944, 0.1028, 0.4034, 0.0897, 0.1050, 0.1342], + device='cuda:1'), in_proj_covar=tensor([0.0481, 0.0394, 0.0477, 0.0338, 0.0394, 0.0417, 0.0406, 0.0442], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:38:44,952 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.405e+02 4.993e+02 6.523e+02 8.112e+02 2.572e+03, threshold=1.305e+03, percent-clipped=9.0 +2023-04-02 13:39:14,822 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6915, 1.5927, 1.5292, 2.0039, 1.6460, 1.8219, 1.9754, 1.7463], + device='cuda:1'), covar=tensor([0.0802, 0.0883, 0.0999, 0.0737, 0.0779, 0.0728, 0.0790, 0.0684], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0221, 0.0226, 0.0242, 0.0227, 0.0211, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 13:39:35,378 INFO [train.py:903] (1/4) Epoch 19, batch 950, loss[loss=0.2156, simple_loss=0.2814, pruned_loss=0.07488, over 19805.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2912, pruned_loss=0.06654, over 3803292.61 frames. ], batch size: 48, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:39:35,392 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 13:39:38,942 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3589, 3.9743, 2.6260, 3.5701, 0.8818, 3.9429, 3.7859, 3.8927], + device='cuda:1'), covar=tensor([0.0663, 0.1015, 0.1823, 0.0833, 0.3992, 0.0654, 0.0866, 0.1078], + device='cuda:1'), in_proj_covar=tensor([0.0483, 0.0395, 0.0479, 0.0340, 0.0395, 0.0418, 0.0408, 0.0444], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:40:18,681 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:35,886 INFO [train.py:903] (1/4) Epoch 19, batch 1000, loss[loss=0.2044, simple_loss=0.2941, pruned_loss=0.05737, over 19519.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2921, pruned_loss=0.06742, over 3792846.32 frames. ], batch size: 56, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:40:37,276 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:44,459 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 5.533e+02 6.903e+02 9.244e+02 2.435e+03, threshold=1.381e+03, percent-clipped=8.0 +2023-04-02 13:41:23,916 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 13:41:33,150 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1885, 1.2660, 1.7992, 1.3479, 2.6260, 3.5169, 3.3386, 3.7947], + device='cuda:1'), covar=tensor([0.1637, 0.3847, 0.3228, 0.2361, 0.0665, 0.0210, 0.0219, 0.0247], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0311, 0.0343, 0.0259, 0.0236, 0.0179, 0.0211, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 13:41:33,874 INFO [train.py:903] (1/4) Epoch 19, batch 1050, loss[loss=0.2282, simple_loss=0.3082, pruned_loss=0.07409, over 18667.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.292, pruned_loss=0.06777, over 3788676.35 frames. ], batch size: 74, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:42:03,302 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 13:42:36,015 INFO [train.py:903] (1/4) Epoch 19, batch 1100, loss[loss=0.216, simple_loss=0.2922, pruned_loss=0.06983, over 19606.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2921, pruned_loss=0.0678, over 3800916.80 frames. ], batch size: 52, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:42:45,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.737e+02 5.703e+02 7.685e+02 1.238e+03, threshold=1.141e+03, percent-clipped=0.0 +2023-04-02 13:43:28,385 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:36,791 INFO [train.py:903] (1/4) Epoch 19, batch 1150, loss[loss=0.1824, simple_loss=0.2634, pruned_loss=0.05068, over 17816.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2916, pruned_loss=0.06765, over 3804929.22 frames. ], batch size: 39, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:43:37,070 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:59,193 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:07,956 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4117, 2.1292, 1.6580, 1.3836, 2.0104, 1.2059, 1.3001, 1.8771], + device='cuda:1'), covar=tensor([0.1011, 0.0810, 0.0968, 0.0867, 0.0536, 0.1245, 0.0698, 0.0426], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0311, 0.0331, 0.0258, 0.0243, 0.0332, 0.0288, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:44:08,935 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:37,474 INFO [train.py:903] (1/4) Epoch 19, batch 1200, loss[loss=0.2212, simple_loss=0.3064, pruned_loss=0.06803, over 19710.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2913, pruned_loss=0.06767, over 3821574.13 frames. ], batch size: 59, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:44:40,172 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9806, 1.3484, 1.0647, 0.9594, 1.1805, 1.0063, 1.0283, 1.2317], + device='cuda:1'), covar=tensor([0.0545, 0.0882, 0.1139, 0.0708, 0.0556, 0.1231, 0.0557, 0.0478], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0310, 0.0329, 0.0257, 0.0241, 0.0331, 0.0287, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:44:48,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.076e+02 6.121e+02 8.217e+02 1.455e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-02 13:45:09,226 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 13:45:38,203 INFO [train.py:903] (1/4) Epoch 19, batch 1250, loss[loss=0.2301, simple_loss=0.3102, pruned_loss=0.07503, over 19788.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2916, pruned_loss=0.06782, over 3820080.13 frames. ], batch size: 56, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:45:47,190 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:16,805 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:38,208 INFO [train.py:903] (1/4) Epoch 19, batch 1300, loss[loss=0.2159, simple_loss=0.2883, pruned_loss=0.07179, over 19391.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2916, pruned_loss=0.06766, over 3811952.92 frames. ], batch size: 48, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:46:47,590 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9749, 1.5967, 1.8030, 1.8693, 4.5284, 1.2421, 2.5973, 4.7980], + device='cuda:1'), covar=tensor([0.0397, 0.2778, 0.2729, 0.1914, 0.0640, 0.2645, 0.1446, 0.0189], + device='cuda:1'), in_proj_covar=tensor([0.0396, 0.0355, 0.0373, 0.0340, 0.0365, 0.0348, 0.0366, 0.0386], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:46:48,375 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.504e+02 4.873e+02 5.866e+02 7.986e+02 1.872e+03, threshold=1.173e+03, percent-clipped=5.0 +2023-04-02 13:47:15,082 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:47:37,770 INFO [train.py:903] (1/4) Epoch 19, batch 1350, loss[loss=0.2086, simple_loss=0.2916, pruned_loss=0.06283, over 19615.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2918, pruned_loss=0.06764, over 3807687.28 frames. ], batch size: 61, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:47:58,311 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2186, 1.2155, 1.2276, 1.4159, 1.0680, 1.2520, 1.2793, 1.2712], + device='cuda:1'), covar=tensor([0.0864, 0.0931, 0.1049, 0.0650, 0.0909, 0.0896, 0.0902, 0.0770], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0226, 0.0242, 0.0227, 0.0211, 0.0189, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 13:48:22,191 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:48:22,381 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9798, 2.0709, 2.3718, 2.7030, 1.9659, 2.5862, 2.3559, 2.1070], + device='cuda:1'), covar=tensor([0.4454, 0.4152, 0.1872, 0.2487, 0.4257, 0.2186, 0.4693, 0.3497], + device='cuda:1'), in_proj_covar=tensor([0.0871, 0.0933, 0.0699, 0.0919, 0.0855, 0.0792, 0.0826, 0.0766], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 13:48:33,040 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9353, 2.0095, 2.2962, 2.6553, 1.8965, 2.5104, 2.3550, 2.0928], + device='cuda:1'), covar=tensor([0.4120, 0.3955, 0.1862, 0.2239, 0.4097, 0.2065, 0.4668, 0.3258], + device='cuda:1'), in_proj_covar=tensor([0.0872, 0.0933, 0.0699, 0.0920, 0.0856, 0.0792, 0.0827, 0.0767], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 13:48:39,027 INFO [train.py:903] (1/4) Epoch 19, batch 1400, loss[loss=0.2103, simple_loss=0.2821, pruned_loss=0.06922, over 19733.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.0675, over 3805031.97 frames. ], batch size: 45, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:48,019 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3321, 3.9173, 2.3934, 3.5395, 0.9243, 3.8754, 3.7496, 3.8747], + device='cuda:1'), covar=tensor([0.0689, 0.1121, 0.2192, 0.0879, 0.4005, 0.0696, 0.0894, 0.1039], + device='cuda:1'), in_proj_covar=tensor([0.0488, 0.0394, 0.0482, 0.0339, 0.0396, 0.0421, 0.0409, 0.0443], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:48:48,825 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.508e+02 6.829e+02 9.566e+02 2.163e+03, threshold=1.366e+03, percent-clipped=9.0 +2023-04-02 13:49:32,985 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:49:36,287 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5503, 4.1108, 2.6182, 3.7064, 0.8929, 4.0182, 3.9280, 4.0540], + device='cuda:1'), covar=tensor([0.0678, 0.1118, 0.2146, 0.0883, 0.4260, 0.0803, 0.0916, 0.1217], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0395, 0.0485, 0.0340, 0.0397, 0.0423, 0.0411, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:49:38,314 INFO [train.py:903] (1/4) Epoch 19, batch 1450, loss[loss=0.1858, simple_loss=0.2695, pruned_loss=0.0511, over 19663.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2933, pruned_loss=0.06828, over 3791982.20 frames. ], batch size: 53, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:49:40,279 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 13:49:44,391 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 13:49:53,701 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124366.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:50:32,822 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:50:40,521 INFO [train.py:903] (1/4) Epoch 19, batch 1500, loss[loss=0.2504, simple_loss=0.3188, pruned_loss=0.09102, over 13606.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.294, pruned_loss=0.06872, over 3775977.53 frames. ], batch size: 135, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:50:50,230 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.822e+02 6.235e+02 8.378e+02 1.519e+03, threshold=1.247e+03, percent-clipped=2.0 +2023-04-02 13:51:04,414 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:35,992 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:39,959 INFO [train.py:903] (1/4) Epoch 19, batch 1550, loss[loss=0.2001, simple_loss=0.2696, pruned_loss=0.06526, over 19726.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2938, pruned_loss=0.06832, over 3789428.14 frames. ], batch size: 46, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:40,758 INFO [train.py:903] (1/4) Epoch 19, batch 1600, loss[loss=0.2298, simple_loss=0.3087, pruned_loss=0.07545, over 19711.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2928, pruned_loss=0.06787, over 3799869.62 frames. ], batch size: 59, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:51,838 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 4.800e+02 6.281e+02 8.115e+02 1.566e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-02 13:52:52,207 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:06,488 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 13:53:23,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:40,464 INFO [train.py:903] (1/4) Epoch 19, batch 1650, loss[loss=0.221, simple_loss=0.2908, pruned_loss=0.07554, over 19471.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2939, pruned_loss=0.06874, over 3798861.49 frames. ], batch size: 49, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:43,097 INFO [train.py:903] (1/4) Epoch 19, batch 1700, loss[loss=0.2213, simple_loss=0.3028, pruned_loss=0.06984, over 19529.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.294, pruned_loss=0.0687, over 3805540.35 frames. ], batch size: 56, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:44,641 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:54:53,185 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.721e+02 4.942e+02 5.736e+02 7.226e+02 1.444e+03, threshold=1.147e+03, percent-clipped=3.0 +2023-04-02 13:55:14,580 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:18,826 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:21,973 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 13:55:42,973 INFO [train.py:903] (1/4) Epoch 19, batch 1750, loss[loss=0.2102, simple_loss=0.2859, pruned_loss=0.06726, over 18715.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2938, pruned_loss=0.06842, over 3807677.95 frames. ], batch size: 74, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:55:46,449 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:16,352 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:20,585 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4945, 1.3417, 1.3898, 1.7731, 1.5715, 1.6016, 1.6625, 1.4957], + device='cuda:1'), covar=tensor([0.0661, 0.0763, 0.0808, 0.0615, 0.0867, 0.0688, 0.0837, 0.0614], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0220, 0.0225, 0.0243, 0.0227, 0.0211, 0.0189, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 13:56:44,197 INFO [train.py:903] (1/4) Epoch 19, batch 1800, loss[loss=0.1958, simple_loss=0.2609, pruned_loss=0.06531, over 19725.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2945, pruned_loss=0.06863, over 3793639.27 frames. ], batch size: 45, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:56:51,811 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124710.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:56:54,991 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.111e+02 6.286e+02 7.731e+02 1.656e+03, threshold=1.257e+03, percent-clipped=2.0 +2023-04-02 13:57:38,759 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 13:57:39,035 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:57:40,117 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0841, 1.9694, 2.0781, 1.7118, 4.6411, 1.1935, 2.8530, 5.0400], + device='cuda:1'), covar=tensor([0.0402, 0.2482, 0.2489, 0.1949, 0.0679, 0.2665, 0.1240, 0.0149], + device='cuda:1'), in_proj_covar=tensor([0.0398, 0.0357, 0.0377, 0.0342, 0.0366, 0.0349, 0.0369, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 13:57:44,335 INFO [train.py:903] (1/4) Epoch 19, batch 1850, loss[loss=0.1942, simple_loss=0.2752, pruned_loss=0.05666, over 19860.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2931, pruned_loss=0.06809, over 3799532.53 frames. ], batch size: 52, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:03,198 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:17,475 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 13:58:33,195 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:33,383 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:34,536 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:46,482 INFO [train.py:903] (1/4) Epoch 19, batch 1900, loss[loss=0.2366, simple_loss=0.3133, pruned_loss=0.07996, over 19493.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.293, pruned_loss=0.06787, over 3798537.95 frames. ], batch size: 64, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:56,687 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 4.862e+02 5.969e+02 7.822e+02 1.490e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 13:59:01,911 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 13:59:05,754 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:07,778 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 13:59:11,238 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124825.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:59:26,846 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:32,012 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 13:59:46,593 INFO [train.py:903] (1/4) Epoch 19, batch 1950, loss[loss=0.1867, simple_loss=0.2714, pruned_loss=0.05098, over 18000.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2923, pruned_loss=0.06726, over 3811101.18 frames. ], batch size: 83, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:47,544 INFO [train.py:903] (1/4) Epoch 19, batch 2000, loss[loss=0.1949, simple_loss=0.2751, pruned_loss=0.05735, over 19846.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2924, pruned_loss=0.06709, over 3816682.56 frames. ], batch size: 52, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:54,354 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:00:58,644 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.950e+02 6.179e+02 7.428e+02 1.573e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 14:01:07,234 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 14:01:45,078 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 14:01:48,486 INFO [train.py:903] (1/4) Epoch 19, batch 2050, loss[loss=0.1926, simple_loss=0.2765, pruned_loss=0.05441, over 19815.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2921, pruned_loss=0.06713, over 3824330.48 frames. ], batch size: 49, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:06,442 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 14:02:07,323 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 14:02:25,195 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 14:02:44,755 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:02:50,765 INFO [train.py:903] (1/4) Epoch 19, batch 2100, loss[loss=0.2149, simple_loss=0.302, pruned_loss=0.06392, over 19602.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2919, pruned_loss=0.06724, over 3832314.71 frames. ], batch size: 61, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:52,314 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:02:52,529 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.51 vs. limit=2.0 +2023-04-02 14:03:00,992 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.657e+02 4.808e+02 5.766e+02 7.881e+02 2.968e+03, threshold=1.153e+03, percent-clipped=4.0 +2023-04-02 14:03:15,326 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:15,688 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9067, 1.9983, 2.3062, 2.6263, 1.8705, 2.4510, 2.3880, 2.1448], + device='cuda:1'), covar=tensor([0.4243, 0.3850, 0.1792, 0.2086, 0.3909, 0.2027, 0.4382, 0.3194], + device='cuda:1'), in_proj_covar=tensor([0.0869, 0.0927, 0.0696, 0.0912, 0.0853, 0.0786, 0.0824, 0.0763], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:03:18,741 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 14:03:19,124 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6835, 2.4274, 2.2525, 2.8921, 2.3339, 2.2980, 2.1236, 2.6537], + device='cuda:1'), covar=tensor([0.0848, 0.1512, 0.1364, 0.0963, 0.1384, 0.0491, 0.1294, 0.0614], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0356, 0.0307, 0.0250, 0.0299, 0.0249, 0.0300, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:03:22,181 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125030.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:30,063 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6359, 1.7275, 2.0651, 1.8662, 3.0369, 2.4077, 3.2090, 1.9469], + device='cuda:1'), covar=tensor([0.2346, 0.4045, 0.2682, 0.1892, 0.1609, 0.2147, 0.1597, 0.3679], + device='cuda:1'), in_proj_covar=tensor([0.0515, 0.0624, 0.0684, 0.0467, 0.0614, 0.0518, 0.0653, 0.0532], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:03:39,548 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 14:03:49,805 INFO [train.py:903] (1/4) Epoch 19, batch 2150, loss[loss=0.1956, simple_loss=0.2663, pruned_loss=0.06248, over 19401.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2906, pruned_loss=0.06653, over 3840487.53 frames. ], batch size: 48, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:22,809 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125081.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:04:50,319 INFO [train.py:903] (1/4) Epoch 19, batch 2200, loss[loss=0.2401, simple_loss=0.3149, pruned_loss=0.08258, over 19512.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.291, pruned_loss=0.06635, over 3837356.28 frames. ], batch size: 56, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:53,031 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125106.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:05:01,344 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.808e+02 4.810e+02 5.592e+02 6.977e+02 1.826e+03, threshold=1.118e+03, percent-clipped=4.0 +2023-04-02 14:05:04,007 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125115.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:34,535 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:40,924 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 14:05:41,574 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:51,113 INFO [train.py:903] (1/4) Epoch 19, batch 2250, loss[loss=0.2453, simple_loss=0.3232, pruned_loss=0.08374, over 19526.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2906, pruned_loss=0.06607, over 3842894.92 frames. ], batch size: 56, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:05:52,656 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4088, 2.3800, 2.1737, 2.7620, 2.2340, 2.2321, 2.2163, 2.4477], + device='cuda:1'), covar=tensor([0.1007, 0.1644, 0.1406, 0.0996, 0.1465, 0.0503, 0.1246, 0.0677], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0356, 0.0307, 0.0249, 0.0299, 0.0249, 0.0300, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:06:05,782 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:25,692 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:35,059 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:52,194 INFO [train.py:903] (1/4) Epoch 19, batch 2300, loss[loss=0.192, simple_loss=0.2667, pruned_loss=0.05871, over 19298.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.29, pruned_loss=0.06563, over 3845445.64 frames. ], batch size: 44, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:07:04,356 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.756e+02 5.218e+02 6.176e+02 8.185e+02 2.110e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 14:07:06,714 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 14:07:52,837 INFO [train.py:903] (1/4) Epoch 19, batch 2350, loss[loss=0.188, simple_loss=0.2615, pruned_loss=0.05731, over 19719.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2898, pruned_loss=0.06553, over 3845139.04 frames. ], batch size: 46, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:08:32,711 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 14:08:45,069 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125297.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:08:49,231 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 14:08:52,806 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9988, 2.0315, 2.2352, 2.6397, 2.0271, 2.6432, 2.3242, 2.0413], + device='cuda:1'), covar=tensor([0.3872, 0.3748, 0.1781, 0.2371, 0.4034, 0.1981, 0.4331, 0.3219], + device='cuda:1'), in_proj_covar=tensor([0.0874, 0.0934, 0.0700, 0.0921, 0.0859, 0.0789, 0.0831, 0.0768], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:08:53,424 INFO [train.py:903] (1/4) Epoch 19, batch 2400, loss[loss=0.2207, simple_loss=0.3002, pruned_loss=0.07065, over 19766.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06647, over 3836078.60 frames. ], batch size: 56, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:08:54,884 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0817, 5.1199, 5.8793, 5.9186, 1.9018, 5.5430, 4.7198, 5.4852], + device='cuda:1'), covar=tensor([0.1522, 0.0805, 0.0575, 0.0558, 0.6243, 0.0831, 0.0596, 0.1151], + device='cuda:1'), in_proj_covar=tensor([0.0765, 0.0710, 0.0916, 0.0806, 0.0816, 0.0666, 0.0554, 0.0849], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 14:09:05,368 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.876e+02 4.925e+02 6.150e+02 7.515e+02 1.529e+03, threshold=1.230e+03, percent-clipped=3.0 +2023-04-02 14:09:45,467 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-02 14:09:53,256 INFO [train.py:903] (1/4) Epoch 19, batch 2450, loss[loss=0.1947, simple_loss=0.2745, pruned_loss=0.05743, over 19619.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2919, pruned_loss=0.06716, over 3840646.41 frames. ], batch size: 50, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:10:14,719 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125371.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:17,760 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125374.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:41,865 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,222 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,251 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:53,818 INFO [train.py:903] (1/4) Epoch 19, batch 2500, loss[loss=0.2135, simple_loss=0.2877, pruned_loss=0.06969, over 19585.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2921, pruned_loss=0.06737, over 3829493.69 frames. ], batch size: 52, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:11:05,670 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.898e+02 6.460e+02 8.264e+02 2.020e+03, threshold=1.292e+03, percent-clipped=4.0 +2023-04-02 14:11:13,948 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:35,789 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:54,215 INFO [train.py:903] (1/4) Epoch 19, batch 2550, loss[loss=0.1568, simple_loss=0.2384, pruned_loss=0.03765, over 19404.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2915, pruned_loss=0.06678, over 3835614.31 frames. ], batch size: 47, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:11:56,651 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.0729, 5.4963, 3.2397, 4.7894, 0.9702, 5.5288, 5.4546, 5.6384], + device='cuda:1'), covar=tensor([0.0415, 0.0965, 0.1800, 0.0730, 0.4193, 0.0539, 0.0768, 0.0969], + device='cuda:1'), in_proj_covar=tensor([0.0486, 0.0396, 0.0480, 0.0337, 0.0394, 0.0420, 0.0411, 0.0445], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:12:38,049 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:12:47,107 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 14:12:53,797 INFO [train.py:903] (1/4) Epoch 19, batch 2600, loss[loss=0.2531, simple_loss=0.3228, pruned_loss=0.09167, over 19530.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2908, pruned_loss=0.06643, over 3833585.15 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:13:05,879 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 4.815e+02 5.841e+02 7.665e+02 1.339e+03, threshold=1.168e+03, percent-clipped=2.0 +2023-04-02 14:13:21,465 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1888, 1.4526, 1.8375, 1.3151, 2.6467, 3.5788, 3.2857, 3.7701], + device='cuda:1'), covar=tensor([0.1589, 0.3469, 0.3086, 0.2313, 0.0535, 0.0168, 0.0209, 0.0230], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0311, 0.0343, 0.0260, 0.0237, 0.0179, 0.0212, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 14:13:48,578 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=1.97 vs. limit=5.0 +2023-04-02 14:13:52,895 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125553.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:13:53,673 INFO [train.py:903] (1/4) Epoch 19, batch 2650, loss[loss=0.1708, simple_loss=0.2557, pruned_loss=0.0429, over 19850.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2905, pruned_loss=0.06602, over 3846332.79 frames. ], batch size: 52, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:15,458 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 14:14:23,622 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125578.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:14:54,493 INFO [train.py:903] (1/4) Epoch 19, batch 2700, loss[loss=0.2099, simple_loss=0.2917, pruned_loss=0.06406, over 19768.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06619, over 3829982.58 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:55,976 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:15:07,180 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.817e+02 5.964e+02 7.468e+02 1.608e+03, threshold=1.193e+03, percent-clipped=5.0 +2023-04-02 14:15:15,690 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:15:31,469 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5952, 1.6371, 1.5438, 1.3818, 1.2742, 1.3888, 0.3397, 0.7112], + device='cuda:1'), covar=tensor([0.0584, 0.0551, 0.0355, 0.0519, 0.0933, 0.0618, 0.1064, 0.0885], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0349, 0.0351, 0.0374, 0.0451, 0.0385, 0.0331, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:15:35,556 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7336, 1.2396, 1.5642, 1.6038, 3.3161, 1.0192, 2.1746, 3.6965], + device='cuda:1'), covar=tensor([0.0471, 0.2830, 0.2807, 0.1743, 0.0645, 0.2606, 0.1467, 0.0247], + device='cuda:1'), in_proj_covar=tensor([0.0397, 0.0358, 0.0376, 0.0342, 0.0365, 0.0349, 0.0368, 0.0387], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:15:56,193 INFO [train.py:903] (1/4) Epoch 19, batch 2750, loss[loss=0.2308, simple_loss=0.3079, pruned_loss=0.07685, over 19692.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06631, over 3836510.86 frames. ], batch size: 59, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:16:05,831 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6249, 1.6544, 1.9944, 1.8640, 2.9304, 2.4114, 2.9408, 1.5968], + device='cuda:1'), covar=tensor([0.2500, 0.4355, 0.2699, 0.1940, 0.1446, 0.2150, 0.1503, 0.4034], + device='cuda:1'), in_proj_covar=tensor([0.0518, 0.0625, 0.0688, 0.0470, 0.0614, 0.0521, 0.0655, 0.0534], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:16:55,538 INFO [train.py:903] (1/4) Epoch 19, batch 2800, loss[loss=0.1749, simple_loss=0.2529, pruned_loss=0.04847, over 19743.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.06641, over 3835653.61 frames. ], batch size: 46, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:17:08,456 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.188e+02 6.338e+02 7.813e+02 1.733e+03, threshold=1.268e+03, percent-clipped=8.0 +2023-04-02 14:17:12,856 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:37,018 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:39,791 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 14:17:42,813 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:55,253 INFO [train.py:903] (1/4) Epoch 19, batch 2850, loss[loss=0.2135, simple_loss=0.2953, pruned_loss=0.06583, over 19661.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2914, pruned_loss=0.06666, over 3812992.92 frames. ], batch size: 58, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:25,401 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9729, 1.1546, 1.4676, 0.6180, 1.8974, 2.1574, 1.9586, 2.3183], + device='cuda:1'), covar=tensor([0.1431, 0.3464, 0.3000, 0.2686, 0.0775, 0.0389, 0.0343, 0.0408], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0312, 0.0343, 0.0260, 0.0238, 0.0179, 0.0212, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 14:18:30,689 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:18:56,093 INFO [train.py:903] (1/4) Epoch 19, batch 2900, loss[loss=0.2416, simple_loss=0.3171, pruned_loss=0.08301, over 19463.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2905, pruned_loss=0.06616, over 3807153.27 frames. ], batch size: 70, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:56,106 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 14:19:01,449 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9449, 4.4709, 2.6632, 3.9348, 0.9501, 4.4756, 4.3682, 4.4455], + device='cuda:1'), covar=tensor([0.0591, 0.0978, 0.2193, 0.0880, 0.4081, 0.0655, 0.0806, 0.1109], + device='cuda:1'), in_proj_covar=tensor([0.0487, 0.0398, 0.0481, 0.0338, 0.0395, 0.0419, 0.0410, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:19:09,043 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.625e+02 5.511e+02 7.350e+02 1.619e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 14:19:31,524 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:55,779 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:56,475 INFO [train.py:903] (1/4) Epoch 19, batch 2950, loss[loss=0.2239, simple_loss=0.3007, pruned_loss=0.07353, over 19520.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.29, pruned_loss=0.06571, over 3813013.58 frames. ], batch size: 54, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:05,693 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:35,694 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:50,759 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:57,388 INFO [train.py:903] (1/4) Epoch 19, batch 3000, loss[loss=0.1897, simple_loss=0.277, pruned_loss=0.0512, over 18344.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2902, pruned_loss=0.06623, over 3802721.37 frames. ], batch size: 84, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:57,388 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 14:21:10,733 INFO [train.py:937] (1/4) Epoch 19, validation: loss=0.1696, simple_loss=0.2702, pruned_loss=0.03451, over 944034.00 frames. +2023-04-02 14:21:10,733 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 14:21:10,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 14:21:24,049 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 4.999e+02 6.816e+02 8.693e+02 1.814e+03, threshold=1.363e+03, percent-clipped=12.0 +2023-04-02 14:22:11,565 INFO [train.py:903] (1/4) Epoch 19, batch 3050, loss[loss=0.1901, simple_loss=0.2675, pruned_loss=0.05636, over 19477.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06621, over 3814568.48 frames. ], batch size: 49, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:22:24,978 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:23:13,567 INFO [train.py:903] (1/4) Epoch 19, batch 3100, loss[loss=0.1846, simple_loss=0.266, pruned_loss=0.05163, over 19750.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2899, pruned_loss=0.06599, over 3816619.43 frames. ], batch size: 51, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:23:14,310 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 14:23:26,817 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.651e+02 5.591e+02 6.916e+02 1.279e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-02 14:23:28,010 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:13,739 INFO [train.py:903] (1/4) Epoch 19, batch 3150, loss[loss=0.1903, simple_loss=0.2757, pruned_loss=0.05243, over 19777.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2908, pruned_loss=0.06681, over 3815616.65 frames. ], batch size: 54, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:24:40,340 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 14:24:45,987 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:48,473 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 14:24:53,524 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:55,925 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:14,240 INFO [train.py:903] (1/4) Epoch 19, batch 3200, loss[loss=0.2306, simple_loss=0.3146, pruned_loss=0.07327, over 19667.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2908, pruned_loss=0.06698, over 3792220.71 frames. ], batch size: 58, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:25:21,256 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:26,849 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126114.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:27,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 4.817e+02 6.226e+02 7.515e+02 1.545e+03, threshold=1.245e+03, percent-clipped=7.0 +2023-04-02 14:25:51,111 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:15,152 INFO [train.py:903] (1/4) Epoch 19, batch 3250, loss[loss=0.2489, simple_loss=0.3288, pruned_loss=0.08451, over 19366.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2911, pruned_loss=0.06674, over 3799276.60 frames. ], batch size: 66, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:26:15,591 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:32,437 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7148, 1.5588, 1.6538, 2.1956, 1.7090, 1.9862, 2.0208, 1.8582], + device='cuda:1'), covar=tensor([0.0792, 0.0887, 0.0964, 0.0751, 0.0839, 0.0745, 0.0875, 0.0638], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0227, 0.0245, 0.0227, 0.0212, 0.0189, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 14:26:46,076 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:27:14,828 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:27:16,668 INFO [train.py:903] (1/4) Epoch 19, batch 3300, loss[loss=0.2273, simple_loss=0.3095, pruned_loss=0.0725, over 19548.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.291, pruned_loss=0.06665, over 3808634.51 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:27:20,145 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 14:27:30,249 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.439e+02 5.162e+02 6.410e+02 7.971e+02 2.422e+03, threshold=1.282e+03, percent-clipped=4.0 +2023-04-02 14:27:36,976 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9801, 4.5234, 2.8110, 3.9059, 0.9807, 4.3896, 4.3193, 4.4680], + device='cuda:1'), covar=tensor([0.0559, 0.0871, 0.1896, 0.0896, 0.4105, 0.0720, 0.0884, 0.0984], + device='cuda:1'), in_proj_covar=tensor([0.0488, 0.0394, 0.0479, 0.0337, 0.0394, 0.0420, 0.0410, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:28:17,444 INFO [train.py:903] (1/4) Epoch 19, batch 3350, loss[loss=0.2263, simple_loss=0.3064, pruned_loss=0.07312, over 19318.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2904, pruned_loss=0.06629, over 3826906.12 frames. ], batch size: 66, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:29:08,294 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2732, 2.1349, 1.9514, 1.7679, 1.6037, 1.7678, 0.5443, 1.2796], + device='cuda:1'), covar=tensor([0.0601, 0.0606, 0.0482, 0.0844, 0.1140, 0.0942, 0.1300, 0.0981], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0350, 0.0352, 0.0377, 0.0453, 0.0385, 0.0332, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:29:17,950 INFO [train.py:903] (1/4) Epoch 19, batch 3400, loss[loss=0.2504, simple_loss=0.3208, pruned_loss=0.09002, over 17464.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2909, pruned_loss=0.06712, over 3812886.79 frames. ], batch size: 101, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:29:25,161 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3798, 1.4403, 1.6410, 1.5972, 2.2015, 2.0594, 2.2459, 0.9439], + device='cuda:1'), covar=tensor([0.2391, 0.4101, 0.2559, 0.1914, 0.1526, 0.2096, 0.1403, 0.4397], + device='cuda:1'), in_proj_covar=tensor([0.0519, 0.0627, 0.0689, 0.0474, 0.0617, 0.0521, 0.0656, 0.0536], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:29:31,307 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.180e+02 6.014e+02 8.021e+02 1.733e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 14:29:57,168 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:18,213 INFO [train.py:903] (1/4) Epoch 19, batch 3450, loss[loss=0.2759, simple_loss=0.3449, pruned_loss=0.1034, over 19282.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.29, pruned_loss=0.06652, over 3818593.32 frames. ], batch size: 66, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:30:22,535 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 14:30:27,185 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:28,589 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126361.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:31:13,300 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:31:20,718 INFO [train.py:903] (1/4) Epoch 19, batch 3500, loss[loss=0.1877, simple_loss=0.2756, pruned_loss=0.04991, over 19768.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2897, pruned_loss=0.06588, over 3830163.78 frames. ], batch size: 54, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:31:34,958 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.908e+02 6.053e+02 7.325e+02 1.346e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 14:32:21,707 INFO [train.py:903] (1/4) Epoch 19, batch 3550, loss[loss=0.2063, simple_loss=0.2859, pruned_loss=0.06333, over 19674.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06685, over 3817436.16 frames. ], batch size: 53, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:32:23,061 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1381, 1.4365, 1.8820, 1.2963, 2.7364, 3.6996, 3.4340, 3.8867], + device='cuda:1'), covar=tensor([0.1686, 0.3627, 0.2997, 0.2308, 0.0560, 0.0166, 0.0204, 0.0250], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0313, 0.0344, 0.0260, 0.0238, 0.0181, 0.0214, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 14:32:26,679 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:38,974 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:48,151 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126475.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:57,924 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126483.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:33:21,951 INFO [train.py:903] (1/4) Epoch 19, batch 3600, loss[loss=0.2554, simple_loss=0.3289, pruned_loss=0.09094, over 19521.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2911, pruned_loss=0.06679, over 3830028.05 frames. ], batch size: 54, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:33:37,204 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.926e+02 5.826e+02 7.456e+02 2.258e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 14:34:04,272 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6134, 2.3851, 1.6836, 1.5688, 2.1929, 1.2920, 1.4560, 2.0291], + device='cuda:1'), covar=tensor([0.1030, 0.0740, 0.1121, 0.0836, 0.0528, 0.1320, 0.0734, 0.0470], + device='cuda:1'), in_proj_covar=tensor([0.0300, 0.0311, 0.0332, 0.0260, 0.0243, 0.0333, 0.0289, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:34:23,091 INFO [train.py:903] (1/4) Epoch 19, batch 3650, loss[loss=0.2062, simple_loss=0.2823, pruned_loss=0.06506, over 19680.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2913, pruned_loss=0.06692, over 3832232.41 frames. ], batch size: 53, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:24,546 INFO [train.py:903] (1/4) Epoch 19, batch 3700, loss[loss=0.2363, simple_loss=0.2998, pruned_loss=0.08635, over 19762.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2908, pruned_loss=0.0669, over 3835084.01 frames. ], batch size: 47, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:38,476 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.326e+02 6.409e+02 8.349e+02 1.648e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-02 14:36:23,995 INFO [train.py:903] (1/4) Epoch 19, batch 3750, loss[loss=0.2463, simple_loss=0.3296, pruned_loss=0.08155, over 18803.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2903, pruned_loss=0.06647, over 3836175.62 frames. ], batch size: 74, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:25,130 INFO [train.py:903] (1/4) Epoch 19, batch 3800, loss[loss=0.1999, simple_loss=0.2918, pruned_loss=0.05398, over 19653.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06694, over 3812060.71 frames. ], batch size: 55, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:40,984 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.757e+02 5.693e+02 7.302e+02 1.543e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-02 14:37:57,317 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 14:37:58,809 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126731.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:12,086 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:13,551 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1812, 2.2377, 2.4039, 3.0140, 2.2051, 2.9595, 2.5075, 2.1096], + device='cuda:1'), covar=tensor([0.4368, 0.3945, 0.1874, 0.2509, 0.4385, 0.2077, 0.4754, 0.3465], + device='cuda:1'), in_proj_covar=tensor([0.0873, 0.0932, 0.0699, 0.0918, 0.0856, 0.0789, 0.0825, 0.0766], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:38:26,703 INFO [train.py:903] (1/4) Epoch 19, batch 3850, loss[loss=0.2263, simple_loss=0.3061, pruned_loss=0.07327, over 17530.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.06631, over 3812259.37 frames. ], batch size: 101, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:38:30,342 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126756.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:46,100 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9767, 1.9503, 1.7844, 1.6133, 1.4701, 1.5746, 0.4409, 0.9196], + device='cuda:1'), covar=tensor([0.0557, 0.0532, 0.0387, 0.0619, 0.1122, 0.0757, 0.1179, 0.0997], + device='cuda:1'), in_proj_covar=tensor([0.0355, 0.0347, 0.0353, 0.0375, 0.0453, 0.0386, 0.0331, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:39:28,455 INFO [train.py:903] (1/4) Epoch 19, batch 3900, loss[loss=0.2057, simple_loss=0.2904, pruned_loss=0.06051, over 19613.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2905, pruned_loss=0.06594, over 3817487.89 frames. ], batch size: 57, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:39:37,773 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:39:40,355 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-02 14:39:42,910 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.082e+02 6.454e+02 7.734e+02 3.345e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-02 14:40:04,522 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126834.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:21,639 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9911, 2.0535, 2.3363, 2.6228, 1.8521, 2.4868, 2.3130, 2.1060], + device='cuda:1'), covar=tensor([0.4134, 0.3996, 0.1921, 0.2468, 0.4249, 0.2119, 0.4725, 0.3391], + device='cuda:1'), in_proj_covar=tensor([0.0871, 0.0929, 0.0696, 0.0915, 0.0853, 0.0787, 0.0822, 0.0763], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:40:29,132 INFO [train.py:903] (1/4) Epoch 19, batch 3950, loss[loss=0.1817, simple_loss=0.2566, pruned_loss=0.05342, over 19369.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2893, pruned_loss=0.06508, over 3823221.38 frames. ], batch size: 47, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:40:33,527 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126857.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:35,262 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 14:41:13,516 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3750, 1.4215, 1.8810, 1.6186, 2.4326, 2.0615, 2.5163, 1.0375], + device='cuda:1'), covar=tensor([0.2671, 0.4563, 0.2636, 0.2155, 0.1786, 0.2459, 0.1680, 0.4773], + device='cuda:1'), in_proj_covar=tensor([0.0518, 0.0626, 0.0690, 0.0473, 0.0616, 0.0520, 0.0656, 0.0535], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:41:29,517 INFO [train.py:903] (1/4) Epoch 19, batch 4000, loss[loss=0.2379, simple_loss=0.3218, pruned_loss=0.07702, over 19737.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2895, pruned_loss=0.06515, over 3815342.71 frames. ], batch size: 63, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:41:43,557 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.579e+02 4.980e+02 6.258e+02 9.023e+02 1.716e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-02 14:41:47,754 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0272, 3.6730, 2.5924, 3.2341, 0.8820, 3.5605, 3.5073, 3.5610], + device='cuda:1'), covar=tensor([0.0776, 0.1070, 0.1852, 0.0972, 0.3971, 0.0781, 0.0995, 0.1284], + device='cuda:1'), in_proj_covar=tensor([0.0486, 0.0394, 0.0480, 0.0338, 0.0396, 0.0419, 0.0409, 0.0444], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:41:57,631 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:42:16,842 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 14:42:30,357 INFO [train.py:903] (1/4) Epoch 19, batch 4050, loss[loss=0.2158, simple_loss=0.2941, pruned_loss=0.06878, over 19587.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2894, pruned_loss=0.06564, over 3825492.93 frames. ], batch size: 61, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:26,561 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1790, 1.1216, 1.4860, 1.2800, 2.6695, 3.5519, 3.3547, 3.7822], + device='cuda:1'), covar=tensor([0.1729, 0.4053, 0.3681, 0.2451, 0.0631, 0.0192, 0.0225, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0315, 0.0345, 0.0261, 0.0238, 0.0181, 0.0214, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 14:43:30,605 INFO [train.py:903] (1/4) Epoch 19, batch 4100, loss[loss=0.2055, simple_loss=0.291, pruned_loss=0.06003, over 17518.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2887, pruned_loss=0.06518, over 3828075.40 frames. ], batch size: 101, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:45,830 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.699e+02 5.681e+02 7.096e+02 1.300e+03, threshold=1.136e+03, percent-clipped=1.0 +2023-04-02 14:44:06,976 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 14:44:31,634 INFO [train.py:903] (1/4) Epoch 19, batch 4150, loss[loss=0.2197, simple_loss=0.2971, pruned_loss=0.07116, over 19672.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2908, pruned_loss=0.06613, over 3833626.47 frames. ], batch size: 60, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:44:42,403 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.81 vs. limit=5.0 +2023-04-02 14:45:04,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7316, 4.1945, 4.4512, 4.4402, 1.7003, 4.1971, 3.6938, 4.1489], + device='cuda:1'), covar=tensor([0.1611, 0.1018, 0.0587, 0.0655, 0.5968, 0.0797, 0.0631, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0770, 0.0716, 0.0921, 0.0808, 0.0821, 0.0675, 0.0556, 0.0858], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 14:45:23,652 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3866, 1.4822, 1.8798, 1.7047, 2.6834, 2.0708, 2.7367, 1.2315], + device='cuda:1'), covar=tensor([0.2699, 0.4554, 0.2881, 0.2066, 0.1588, 0.2551, 0.1714, 0.4605], + device='cuda:1'), in_proj_covar=tensor([0.0520, 0.0627, 0.0691, 0.0472, 0.0616, 0.0520, 0.0659, 0.0536], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:45:32,545 INFO [train.py:903] (1/4) Epoch 19, batch 4200, loss[loss=0.2057, simple_loss=0.2937, pruned_loss=0.05884, over 19469.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06599, over 3831137.96 frames. ], batch size: 49, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:45:35,885 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 14:45:43,054 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127113.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:45:46,906 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 4.915e+02 5.762e+02 6.825e+02 1.362e+03, threshold=1.152e+03, percent-clipped=8.0 +2023-04-02 14:46:14,867 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:46:32,718 INFO [train.py:903] (1/4) Epoch 19, batch 4250, loss[loss=0.1773, simple_loss=0.252, pruned_loss=0.05124, over 19745.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2901, pruned_loss=0.06553, over 3831990.44 frames. ], batch size: 46, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:46:33,001 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8154, 0.8334, 1.0098, 1.0316, 1.6062, 0.8363, 1.5483, 1.7656], + device='cuda:1'), covar=tensor([0.0576, 0.2229, 0.2110, 0.1237, 0.0651, 0.1630, 0.1182, 0.0516], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0357, 0.0375, 0.0340, 0.0365, 0.0346, 0.0366, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:46:50,065 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 14:47:01,529 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 14:47:03,645 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:09,518 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:34,698 INFO [train.py:903] (1/4) Epoch 19, batch 4300, loss[loss=0.1669, simple_loss=0.2452, pruned_loss=0.04432, over 19744.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2899, pruned_loss=0.06566, over 3814785.22 frames. ], batch size: 46, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:47:40,369 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127208.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:50,092 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.800e+02 4.893e+02 5.914e+02 7.996e+02 1.682e+03, threshold=1.183e+03, percent-clipped=7.0 +2023-04-02 14:48:17,679 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1318, 1.9977, 1.8449, 1.6669, 1.5101, 1.6011, 0.5469, 1.0717], + device='cuda:1'), covar=tensor([0.0524, 0.0594, 0.0435, 0.0734, 0.1165, 0.0884, 0.1243, 0.0989], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0345, 0.0350, 0.0374, 0.0452, 0.0381, 0.0328, 0.0336], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 14:48:28,059 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 14:48:35,537 INFO [train.py:903] (1/4) Epoch 19, batch 4350, loss[loss=0.2324, simple_loss=0.3131, pruned_loss=0.07582, over 18921.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2912, pruned_loss=0.06619, over 3802571.87 frames. ], batch size: 74, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:48:51,129 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:19,646 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8378, 1.8932, 1.4531, 1.8235, 1.8246, 1.4707, 1.4487, 1.6937], + device='cuda:1'), covar=tensor([0.1214, 0.1392, 0.1759, 0.1217, 0.1341, 0.0956, 0.1795, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0352, 0.0304, 0.0248, 0.0296, 0.0245, 0.0297, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:49:23,164 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:26,472 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5061, 1.0907, 1.3215, 1.0892, 2.1109, 0.9321, 1.9719, 2.3892], + device='cuda:1'), covar=tensor([0.0918, 0.3092, 0.3056, 0.2086, 0.1186, 0.2417, 0.1255, 0.0565], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0357, 0.0375, 0.0341, 0.0366, 0.0348, 0.0367, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:49:35,821 INFO [train.py:903] (1/4) Epoch 19, batch 4400, loss[loss=0.1919, simple_loss=0.2749, pruned_loss=0.05447, over 19725.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2911, pruned_loss=0.06611, over 3824820.70 frames. ], batch size: 51, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:49:36,053 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3601, 3.9192, 2.6812, 3.5665, 0.8268, 3.8188, 3.7498, 3.8528], + device='cuda:1'), covar=tensor([0.0696, 0.1017, 0.1888, 0.0862, 0.4076, 0.0772, 0.0912, 0.1134], + device='cuda:1'), in_proj_covar=tensor([0.0487, 0.0393, 0.0484, 0.0337, 0.0397, 0.0420, 0.0410, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:49:49,568 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.239e+02 6.175e+02 6.853e+02 1.670e+03, threshold=1.235e+03, percent-clipped=2.0 +2023-04-02 14:50:02,113 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 14:50:12,008 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 14:50:36,316 INFO [train.py:903] (1/4) Epoch 19, batch 4450, loss[loss=0.1952, simple_loss=0.271, pruned_loss=0.05968, over 19484.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06663, over 3815156.01 frames. ], batch size: 49, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:03,362 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3737, 3.9660, 2.7199, 3.5693, 0.8405, 3.8675, 3.7890, 3.8777], + device='cuda:1'), covar=tensor([0.0679, 0.0973, 0.1816, 0.0800, 0.4137, 0.0728, 0.0912, 0.1110], + device='cuda:1'), in_proj_covar=tensor([0.0487, 0.0393, 0.0482, 0.0337, 0.0397, 0.0419, 0.0410, 0.0445], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:51:38,004 INFO [train.py:903] (1/4) Epoch 19, batch 4500, loss[loss=0.2543, simple_loss=0.3292, pruned_loss=0.08969, over 17249.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06648, over 3813540.42 frames. ], batch size: 101, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:52,896 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.116e+02 6.133e+02 7.767e+02 1.446e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-02 14:51:57,330 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8177, 1.3657, 1.5743, 1.5486, 3.4155, 1.2705, 2.5228, 3.7979], + device='cuda:1'), covar=tensor([0.0447, 0.2672, 0.2735, 0.1886, 0.0682, 0.2388, 0.1106, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0393, 0.0357, 0.0376, 0.0341, 0.0367, 0.0347, 0.0368, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:52:16,014 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6025, 1.7206, 1.9306, 2.0110, 1.5334, 1.9223, 1.9640, 1.7987], + device='cuda:1'), covar=tensor([0.3887, 0.3304, 0.1817, 0.2088, 0.3448, 0.1896, 0.4582, 0.3137], + device='cuda:1'), in_proj_covar=tensor([0.0873, 0.0930, 0.0698, 0.0916, 0.0855, 0.0790, 0.0826, 0.0763], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:52:28,080 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127445.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:52:39,720 INFO [train.py:903] (1/4) Epoch 19, batch 4550, loss[loss=0.2186, simple_loss=0.3059, pruned_loss=0.06564, over 19665.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2914, pruned_loss=0.06651, over 3821221.90 frames. ], batch size: 60, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:52:48,345 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 14:53:11,995 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 14:53:40,574 INFO [train.py:903] (1/4) Epoch 19, batch 4600, loss[loss=0.2176, simple_loss=0.3005, pruned_loss=0.06739, over 19746.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2913, pruned_loss=0.06697, over 3825130.04 frames. ], batch size: 63, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:53:48,019 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 14:53:52,430 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1096, 1.7300, 1.7537, 2.7370, 2.1440, 2.2698, 2.4087, 2.0498], + device='cuda:1'), covar=tensor([0.0827, 0.0957, 0.1059, 0.0808, 0.0864, 0.0792, 0.0881, 0.0709], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0246, 0.0230, 0.0213, 0.0190, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 14:53:54,252 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.018e+02 6.286e+02 8.427e+02 2.189e+03, threshold=1.257e+03, percent-clipped=8.0 +2023-04-02 14:54:32,408 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:34,722 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:39,906 INFO [train.py:903] (1/4) Epoch 19, batch 4650, loss[loss=0.2311, simple_loss=0.3119, pruned_loss=0.07515, over 19522.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06691, over 3826596.88 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:54:55,864 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 14:55:02,163 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9077, 2.0688, 2.2438, 2.6523, 1.9286, 2.4468, 2.2443, 1.9079], + device='cuda:1'), covar=tensor([0.4546, 0.4034, 0.2072, 0.2535, 0.4335, 0.2351, 0.5112, 0.3767], + device='cuda:1'), in_proj_covar=tensor([0.0879, 0.0937, 0.0702, 0.0924, 0.0861, 0.0796, 0.0831, 0.0768], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 14:55:05,384 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:07,389 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 14:55:40,716 INFO [train.py:903] (1/4) Epoch 19, batch 4700, loss[loss=0.1981, simple_loss=0.2827, pruned_loss=0.05669, over 19791.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.291, pruned_loss=0.06667, over 3830054.99 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:55:41,052 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4035, 1.0942, 1.2787, 2.2463, 1.6513, 1.4049, 1.6396, 1.4499], + device='cuda:1'), covar=tensor([0.1195, 0.1794, 0.1421, 0.0970, 0.1149, 0.1475, 0.1366, 0.1105], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0245, 0.0229, 0.0211, 0.0189, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 14:55:50,499 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:55,863 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 5.036e+02 6.203e+02 8.078e+02 1.735e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-02 14:56:02,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 14:56:41,689 INFO [train.py:903] (1/4) Epoch 19, batch 4750, loss[loss=0.1629, simple_loss=0.2432, pruned_loss=0.04127, over 19795.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2914, pruned_loss=0.06688, over 3829714.94 frames. ], batch size: 48, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:13,617 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:57:33,552 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6032, 1.3335, 1.4683, 1.6040, 3.1956, 1.1185, 2.3003, 3.5770], + device='cuda:1'), covar=tensor([0.0516, 0.2771, 0.2824, 0.1755, 0.0688, 0.2547, 0.1369, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0395, 0.0359, 0.0377, 0.0341, 0.0367, 0.0347, 0.0369, 0.0387], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:57:41,808 INFO [train.py:903] (1/4) Epoch 19, batch 4800, loss[loss=0.2177, simple_loss=0.2931, pruned_loss=0.0712, over 18130.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2899, pruned_loss=0.06615, over 3829761.70 frames. ], batch size: 83, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:55,391 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.990e+02 6.329e+02 8.030e+02 1.437e+03, threshold=1.266e+03, percent-clipped=1.0 +2023-04-02 14:58:07,614 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127726.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:58:40,155 INFO [train.py:903] (1/4) Epoch 19, batch 4850, loss[loss=0.1747, simple_loss=0.2595, pruned_loss=0.04495, over 19606.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.291, pruned_loss=0.06698, over 3821019.93 frames. ], batch size: 50, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:04,710 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 14:59:22,986 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127789.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:59:25,076 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 14:59:30,829 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 14:59:30,850 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 14:59:32,258 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:59:35,937 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8220, 1.5593, 1.4181, 1.8894, 1.4536, 1.6080, 1.4484, 1.6668], + device='cuda:1'), covar=tensor([0.1027, 0.1271, 0.1444, 0.0854, 0.1253, 0.0531, 0.1307, 0.0733], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0351, 0.0305, 0.0248, 0.0296, 0.0246, 0.0296, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 14:59:40,800 INFO [train.py:903] (1/4) Epoch 19, batch 4900, loss[loss=0.196, simple_loss=0.283, pruned_loss=0.05454, over 19488.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2907, pruned_loss=0.0664, over 3827278.38 frames. ], batch size: 64, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:40,834 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 14:59:55,907 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.848e+02 5.865e+02 7.992e+02 2.664e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-02 15:00:01,798 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 15:00:41,588 INFO [train.py:903] (1/4) Epoch 19, batch 4950, loss[loss=0.2281, simple_loss=0.3118, pruned_loss=0.07217, over 19588.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.291, pruned_loss=0.06684, over 3821008.52 frames. ], batch size: 61, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:00:58,818 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:00:59,648 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 15:01:22,180 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 15:01:23,545 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2035, 1.1622, 1.5974, 1.0654, 2.3681, 3.3300, 3.0502, 3.5075], + device='cuda:1'), covar=tensor([0.1645, 0.3988, 0.3481, 0.2584, 0.0664, 0.0186, 0.0236, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0315, 0.0344, 0.0260, 0.0237, 0.0181, 0.0213, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 15:01:24,529 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:26,340 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127891.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:41,516 INFO [train.py:903] (1/4) Epoch 19, batch 5000, loss[loss=0.186, simple_loss=0.2609, pruned_loss=0.05551, over 19409.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2901, pruned_loss=0.06615, over 3827976.37 frames. ], batch size: 48, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:01:41,905 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127904.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:01:51,189 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 15:01:55,669 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.675e+02 5.614e+02 6.818e+02 2.294e+03, threshold=1.123e+03, percent-clipped=3.0 +2023-04-02 15:02:03,228 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 15:02:04,134 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-02 15:02:22,569 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-04-02 15:02:41,852 INFO [train.py:903] (1/4) Epoch 19, batch 5050, loss[loss=0.241, simple_loss=0.3229, pruned_loss=0.07952, over 19568.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2902, pruned_loss=0.06616, over 3827457.45 frames. ], batch size: 61, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:03:16,224 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:18,092 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 15:03:43,134 INFO [train.py:903] (1/4) Epoch 19, batch 5100, loss[loss=0.224, simple_loss=0.2977, pruned_loss=0.07509, over 19586.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2898, pruned_loss=0.06569, over 3836516.75 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:03:45,749 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:47,025 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:47,355 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 15:03:56,504 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 15:03:58,283 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.139e+02 4.818e+02 5.706e+02 8.227e+02 1.561e+03, threshold=1.141e+03, percent-clipped=7.0 +2023-04-02 15:04:00,665 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 15:04:04,160 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 15:04:07,801 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128024.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:04:43,092 INFO [train.py:903] (1/4) Epoch 19, batch 5150, loss[loss=0.2505, simple_loss=0.3193, pruned_loss=0.09081, over 13277.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2904, pruned_loss=0.06645, over 3818970.26 frames. ], batch size: 136, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:04:51,936 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7135, 1.6966, 1.5958, 1.3529, 1.3525, 1.4246, 0.2240, 0.6741], + device='cuda:1'), covar=tensor([0.0560, 0.0577, 0.0364, 0.0575, 0.1095, 0.0677, 0.1174, 0.0984], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0346, 0.0348, 0.0372, 0.0447, 0.0380, 0.0328, 0.0336], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 15:04:57,225 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:05:31,583 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:05:44,863 INFO [train.py:903] (1/4) Epoch 19, batch 5200, loss[loss=0.2225, simple_loss=0.2822, pruned_loss=0.08144, over 19758.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.292, pruned_loss=0.06755, over 3805959.14 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:05:59,011 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.969e+02 4.920e+02 6.208e+02 7.921e+02 1.726e+03, threshold=1.242e+03, percent-clipped=7.0 +2023-04-02 15:05:59,072 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 15:06:16,652 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 15:06:28,682 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128139.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:29,678 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1199, 1.3124, 1.4332, 1.2740, 2.7307, 1.0026, 2.1802, 3.0150], + device='cuda:1'), covar=tensor([0.0572, 0.2670, 0.2779, 0.1938, 0.0773, 0.2479, 0.1137, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0397, 0.0358, 0.0378, 0.0344, 0.0369, 0.0349, 0.0371, 0.0390], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:06:30,662 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128141.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:41,127 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 15:06:45,758 INFO [train.py:903] (1/4) Epoch 19, batch 5250, loss[loss=0.2022, simple_loss=0.2874, pruned_loss=0.05848, over 19527.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2923, pruned_loss=0.06761, over 3797714.36 frames. ], batch size: 56, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:06:53,640 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128160.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:07:23,135 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128185.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:07:45,449 INFO [train.py:903] (1/4) Epoch 19, batch 5300, loss[loss=0.2334, simple_loss=0.3132, pruned_loss=0.07686, over 19526.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2911, pruned_loss=0.06658, over 3811768.69 frames. ], batch size: 56, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:07:54,706 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:07:59,668 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.001e+02 6.088e+02 7.600e+02 1.403e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 15:08:00,870 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 15:08:21,807 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:34,536 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3198, 1.3460, 1.4672, 1.4505, 1.7019, 1.7960, 1.6607, 0.5737], + device='cuda:1'), covar=tensor([0.2352, 0.4056, 0.2517, 0.1894, 0.1601, 0.2178, 0.1406, 0.4654], + device='cuda:1'), in_proj_covar=tensor([0.0522, 0.0629, 0.0692, 0.0475, 0.0615, 0.0521, 0.0660, 0.0536], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:08:46,734 INFO [train.py:903] (1/4) Epoch 19, batch 5350, loss[loss=0.1765, simple_loss=0.2546, pruned_loss=0.04918, over 19749.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2908, pruned_loss=0.06614, over 3812410.99 frames. ], batch size: 46, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:08:50,307 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1328, 1.1697, 1.6777, 1.3133, 2.6393, 3.6876, 3.4448, 3.9557], + device='cuda:1'), covar=tensor([0.1759, 0.4006, 0.3530, 0.2479, 0.0652, 0.0210, 0.0212, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0315, 0.0346, 0.0260, 0.0238, 0.0181, 0.0213, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 15:08:50,319 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:57,142 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:20,023 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 15:09:26,910 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:47,392 INFO [train.py:903] (1/4) Epoch 19, batch 5400, loss[loss=0.2313, simple_loss=0.3123, pruned_loss=0.07513, over 19701.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2912, pruned_loss=0.06629, over 3822376.76 frames. ], batch size: 59, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:10:00,870 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.6623, 5.1218, 2.9800, 4.6012, 1.2828, 5.1352, 5.0454, 5.2701], + device='cuda:1'), covar=tensor([0.0420, 0.0714, 0.1928, 0.0615, 0.3839, 0.0540, 0.0748, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0394, 0.0482, 0.0340, 0.0396, 0.0420, 0.0410, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:10:01,764 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 4.631e+02 5.571e+02 7.152e+02 1.493e+03, threshold=1.114e+03, percent-clipped=2.0 +2023-04-02 15:10:15,374 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:29,852 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:42,711 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:48,541 INFO [train.py:903] (1/4) Epoch 19, batch 5450, loss[loss=0.303, simple_loss=0.3622, pruned_loss=0.1219, over 19080.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2912, pruned_loss=0.06666, over 3820912.45 frames. ], batch size: 69, lr: 4.31e-03, grad_scale: 16.0 +2023-04-02 15:11:39,531 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128395.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:11:50,173 INFO [train.py:903] (1/4) Epoch 19, batch 5500, loss[loss=0.2095, simple_loss=0.2979, pruned_loss=0.06053, over 19531.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.0657, over 3830900.93 frames. ], batch size: 54, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:06,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.166e+02 6.121e+02 7.872e+02 1.632e+03, threshold=1.224e+03, percent-clipped=5.0 +2023-04-02 15:12:10,674 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:12,700 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 15:12:16,416 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2110, 1.5339, 2.0621, 1.6165, 2.8274, 4.6071, 4.5226, 5.0793], + device='cuda:1'), covar=tensor([0.1647, 0.3638, 0.3109, 0.2159, 0.0680, 0.0180, 0.0161, 0.0167], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0315, 0.0346, 0.0260, 0.0238, 0.0181, 0.0214, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 15:12:23,762 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:50,275 INFO [train.py:903] (1/4) Epoch 19, batch 5550, loss[loss=0.2272, simple_loss=0.3005, pruned_loss=0.07694, over 19399.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2895, pruned_loss=0.06567, over 3825632.36 frames. ], batch size: 48, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:56,490 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 15:13:44,858 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 15:13:51,307 INFO [train.py:903] (1/4) Epoch 19, batch 5600, loss[loss=0.2642, simple_loss=0.3351, pruned_loss=0.09663, over 18466.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2913, pruned_loss=0.06699, over 3819622.85 frames. ], batch size: 84, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:14:01,870 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128512.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:07,037 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.843e+02 5.877e+02 7.578e+02 1.194e+03, threshold=1.175e+03, percent-clipped=0.0 +2023-04-02 15:14:32,645 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:52,040 INFO [train.py:903] (1/4) Epoch 19, batch 5650, loss[loss=0.2638, simple_loss=0.329, pruned_loss=0.0993, over 19472.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2912, pruned_loss=0.06695, over 3825730.88 frames. ], batch size: 64, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:27,811 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:37,262 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 15:15:53,142 INFO [train.py:903] (1/4) Epoch 19, batch 5700, loss[loss=0.1982, simple_loss=0.2898, pruned_loss=0.05331, over 19618.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.06628, over 3826186.56 frames. ], batch size: 57, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:54,811 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:58,249 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:08,027 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.156e+02 6.108e+02 7.232e+02 1.309e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 15:16:24,887 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:53,482 INFO [train.py:903] (1/4) Epoch 19, batch 5750, loss[loss=0.2345, simple_loss=0.3059, pruned_loss=0.08152, over 19539.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2902, pruned_loss=0.06646, over 3808565.00 frames. ], batch size: 54, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:16:55,732 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 15:17:05,285 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 15:17:09,596 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 15:17:26,653 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-04-02 15:17:27,291 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:17:42,188 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4321, 1.4859, 1.6906, 1.7033, 1.2812, 1.6813, 1.7300, 1.5696], + device='cuda:1'), covar=tensor([0.3419, 0.2902, 0.1547, 0.1884, 0.3165, 0.1711, 0.4195, 0.2797], + device='cuda:1'), in_proj_covar=tensor([0.0874, 0.0935, 0.0700, 0.0923, 0.0859, 0.0794, 0.0833, 0.0767], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:17:55,080 INFO [train.py:903] (1/4) Epoch 19, batch 5800, loss[loss=0.183, simple_loss=0.2625, pruned_loss=0.05178, over 19788.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2905, pruned_loss=0.06695, over 3808938.18 frames. ], batch size: 49, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:18:10,461 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 4.671e+02 6.414e+02 7.787e+02 1.302e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-02 15:18:24,046 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7562, 2.1106, 1.7315, 1.5030, 2.0507, 1.4933, 1.5629, 1.9941], + device='cuda:1'), covar=tensor([0.0758, 0.0626, 0.0756, 0.0792, 0.0427, 0.0980, 0.0615, 0.0392], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0309, 0.0327, 0.0259, 0.0242, 0.0332, 0.0289, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:18:55,624 INFO [train.py:903] (1/4) Epoch 19, batch 5850, loss[loss=0.216, simple_loss=0.2986, pruned_loss=0.0667, over 19656.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06701, over 3821966.50 frames. ], batch size: 58, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:19:20,624 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:48,469 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:50,613 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:55,909 INFO [train.py:903] (1/4) Epoch 19, batch 5900, loss[loss=0.1771, simple_loss=0.2523, pruned_loss=0.05098, over 19743.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2915, pruned_loss=0.06723, over 3821249.40 frames. ], batch size: 46, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:20:02,588 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 15:20:11,628 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.778e+02 5.849e+02 7.721e+02 1.320e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 15:20:22,220 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 15:20:56,175 INFO [train.py:903] (1/4) Epoch 19, batch 5950, loss[loss=0.2087, simple_loss=0.2947, pruned_loss=0.06138, over 19044.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2914, pruned_loss=0.06713, over 3812925.38 frames. ], batch size: 69, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:20:57,899 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 15:21:41,422 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:21:57,516 INFO [train.py:903] (1/4) Epoch 19, batch 6000, loss[loss=0.2132, simple_loss=0.3008, pruned_loss=0.06284, over 18267.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2921, pruned_loss=0.06728, over 3821404.13 frames. ], batch size: 83, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:21:57,516 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 15:22:07,158 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8083, 1.5689, 1.4396, 1.7960, 1.4710, 1.5894, 1.4028, 1.6334], + device='cuda:1'), covar=tensor([0.1128, 0.1278, 0.1571, 0.1071, 0.1424, 0.0575, 0.1639, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0355, 0.0308, 0.0249, 0.0298, 0.0248, 0.0300, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:22:12,608 INFO [train.py:937] (1/4) Epoch 19, validation: loss=0.1702, simple_loss=0.2702, pruned_loss=0.03514, over 944034.00 frames. +2023-04-02 15:22:12,609 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 15:22:17,233 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128908.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:22:28,003 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.136e+02 6.485e+02 9.043e+02 2.174e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 15:23:13,564 INFO [train.py:903] (1/4) Epoch 19, batch 6050, loss[loss=0.2001, simple_loss=0.2816, pruned_loss=0.05928, over 19831.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06693, over 3815244.19 frames. ], batch size: 52, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:23:33,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8673, 1.9761, 2.1929, 2.5063, 1.8558, 2.3933, 2.2093, 2.0030], + device='cuda:1'), covar=tensor([0.3985, 0.3540, 0.1728, 0.2131, 0.3739, 0.1924, 0.4480, 0.3196], + device='cuda:1'), in_proj_covar=tensor([0.0875, 0.0934, 0.0699, 0.0923, 0.0859, 0.0793, 0.0831, 0.0766], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:24:14,706 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4502, 1.6049, 1.9319, 1.7253, 3.3058, 2.7341, 3.5938, 1.7168], + device='cuda:1'), covar=tensor([0.2453, 0.4237, 0.2724, 0.1995, 0.1421, 0.1896, 0.1343, 0.3910], + device='cuda:1'), in_proj_covar=tensor([0.0523, 0.0632, 0.0695, 0.0476, 0.0620, 0.0524, 0.0664, 0.0540], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:24:15,355 INFO [train.py:903] (1/4) Epoch 19, batch 6100, loss[loss=0.2214, simple_loss=0.3036, pruned_loss=0.06962, over 19684.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2908, pruned_loss=0.06678, over 3808849.19 frames. ], batch size: 53, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:24:30,769 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.994e+02 6.076e+02 7.380e+02 1.472e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 15:25:14,919 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3704, 3.9787, 2.5247, 3.5362, 0.6691, 3.8548, 3.7285, 3.8716], + device='cuda:1'), covar=tensor([0.0668, 0.0974, 0.2048, 0.0849, 0.4299, 0.0732, 0.0927, 0.1347], + device='cuda:1'), in_proj_covar=tensor([0.0493, 0.0397, 0.0486, 0.0343, 0.0399, 0.0422, 0.0413, 0.0450], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:25:15,134 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129053.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:25:15,830 INFO [train.py:903] (1/4) Epoch 19, batch 6150, loss[loss=0.2149, simple_loss=0.2992, pruned_loss=0.06526, over 19688.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2914, pruned_loss=0.06723, over 3795922.15 frames. ], batch size: 59, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:25:37,316 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 15:25:44,279 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 15:25:44,623 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:26:16,319 INFO [train.py:903] (1/4) Epoch 19, batch 6200, loss[loss=0.1917, simple_loss=0.2708, pruned_loss=0.0563, over 19731.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2904, pruned_loss=0.0667, over 3806189.53 frames. ], batch size: 51, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:26:32,067 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.839e+02 4.815e+02 6.250e+02 7.621e+02 1.523e+03, threshold=1.250e+03, percent-clipped=7.0 +2023-04-02 15:26:53,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8082, 1.9239, 2.1151, 2.3570, 1.6905, 2.2562, 2.1351, 1.9658], + device='cuda:1'), covar=tensor([0.3940, 0.3223, 0.1778, 0.2012, 0.3502, 0.1860, 0.4375, 0.3078], + device='cuda:1'), in_proj_covar=tensor([0.0874, 0.0934, 0.0701, 0.0922, 0.0858, 0.0793, 0.0829, 0.0766], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:27:04,016 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:07,707 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:17,154 INFO [train.py:903] (1/4) Epoch 19, batch 6250, loss[loss=0.2167, simple_loss=0.3018, pruned_loss=0.06578, over 19293.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2903, pruned_loss=0.06665, over 3801675.72 frames. ], batch size: 66, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:27:38,408 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:47,709 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 15:28:17,840 INFO [train.py:903] (1/4) Epoch 19, batch 6300, loss[loss=0.2044, simple_loss=0.2935, pruned_loss=0.05766, over 19519.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2902, pruned_loss=0.0664, over 3807593.55 frames. ], batch size: 56, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:28:33,725 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 5.181e+02 6.373e+02 8.503e+02 1.874e+03, threshold=1.275e+03, percent-clipped=7.0 +2023-04-02 15:29:17,495 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129252.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:29:19,488 INFO [train.py:903] (1/4) Epoch 19, batch 6350, loss[loss=0.2089, simple_loss=0.2872, pruned_loss=0.06526, over 19852.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2896, pruned_loss=0.06643, over 3815022.86 frames. ], batch size: 52, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:29:25,234 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:30:20,708 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 15:30:21,271 INFO [train.py:903] (1/4) Epoch 19, batch 6400, loss[loss=0.2324, simple_loss=0.3079, pruned_loss=0.07845, over 19278.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2901, pruned_loss=0.06622, over 3821910.79 frames. ], batch size: 66, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:30:36,859 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.331e+02 4.991e+02 6.008e+02 7.727e+02 1.608e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-02 15:30:58,516 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1916, 3.6629, 2.1082, 2.3038, 3.1820, 1.7712, 1.5315, 2.3160], + device='cuda:1'), covar=tensor([0.1277, 0.0489, 0.1084, 0.0795, 0.0486, 0.1204, 0.0947, 0.0682], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0310, 0.0328, 0.0259, 0.0241, 0.0331, 0.0288, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:31:22,261 INFO [train.py:903] (1/4) Epoch 19, batch 6450, loss[loss=0.1839, simple_loss=0.2618, pruned_loss=0.053, over 19369.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2893, pruned_loss=0.06572, over 3829469.36 frames. ], batch size: 48, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:31:38,204 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129367.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:31:48,937 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6231, 2.4558, 1.7925, 1.6827, 2.1652, 1.4055, 1.5501, 2.0463], + device='cuda:1'), covar=tensor([0.1002, 0.0713, 0.1013, 0.0784, 0.0519, 0.1200, 0.0657, 0.0470], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0310, 0.0329, 0.0260, 0.0242, 0.0332, 0.0289, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:32:06,812 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 15:32:07,090 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7697, 1.4312, 1.5545, 1.5321, 3.3136, 1.0267, 2.3668, 3.7602], + device='cuda:1'), covar=tensor([0.0436, 0.2600, 0.2772, 0.1843, 0.0702, 0.2647, 0.1308, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0396, 0.0358, 0.0377, 0.0340, 0.0367, 0.0348, 0.0370, 0.0391], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:32:22,358 INFO [train.py:903] (1/4) Epoch 19, batch 6500, loss[loss=0.1678, simple_loss=0.2482, pruned_loss=0.04363, over 19382.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2899, pruned_loss=0.06594, over 3823729.17 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:32:29,762 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 15:32:38,768 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.340e+02 6.897e+02 8.888e+02 1.987e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 15:32:47,821 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5838, 1.2551, 1.4611, 1.2423, 2.2234, 1.0719, 2.0186, 2.5150], + device='cuda:1'), covar=tensor([0.0646, 0.2556, 0.2567, 0.1618, 0.0851, 0.2034, 0.1049, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0396, 0.0357, 0.0377, 0.0340, 0.0367, 0.0348, 0.0370, 0.0391], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:32:56,844 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129431.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:33:24,203 INFO [train.py:903] (1/4) Epoch 19, batch 6550, loss[loss=0.1878, simple_loss=0.2658, pruned_loss=0.05493, over 19487.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.29, pruned_loss=0.06647, over 3820842.10 frames. ], batch size: 49, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:07,474 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4937, 2.1152, 1.6340, 1.5333, 1.9804, 1.3262, 1.4098, 1.8416], + device='cuda:1'), covar=tensor([0.0883, 0.0664, 0.0965, 0.0719, 0.0473, 0.1122, 0.0628, 0.0436], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0312, 0.0331, 0.0261, 0.0243, 0.0335, 0.0291, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:34:24,936 INFO [train.py:903] (1/4) Epoch 19, batch 6600, loss[loss=0.1974, simple_loss=0.2816, pruned_loss=0.05659, over 19650.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2907, pruned_loss=0.06675, over 3820418.47 frames. ], batch size: 55, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:37,628 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:34:40,453 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.539e+02 5.847e+02 6.807e+02 8.552e+02 1.538e+03, threshold=1.361e+03, percent-clipped=4.0 +2023-04-02 15:35:03,129 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-02 15:35:03,895 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6354, 1.5367, 1.5784, 2.0994, 1.7108, 2.0299, 1.9868, 1.7069], + device='cuda:1'), covar=tensor([0.0834, 0.0935, 0.1030, 0.0820, 0.0843, 0.0736, 0.0908, 0.0739], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0245, 0.0227, 0.0209, 0.0189, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 15:35:07,266 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:35:25,975 INFO [train.py:903] (1/4) Epoch 19, batch 6650, loss[loss=0.1775, simple_loss=0.2529, pruned_loss=0.05101, over 19362.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2906, pruned_loss=0.06684, over 3813606.30 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:35:32,242 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4685, 2.4314, 2.6975, 3.3667, 2.5388, 3.1785, 2.8569, 2.4505], + device='cuda:1'), covar=tensor([0.3893, 0.3929, 0.1678, 0.2180, 0.3988, 0.1882, 0.4110, 0.3100], + device='cuda:1'), in_proj_covar=tensor([0.0868, 0.0927, 0.0697, 0.0917, 0.0854, 0.0787, 0.0826, 0.0761], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:36:26,340 INFO [train.py:903] (1/4) Epoch 19, batch 6700, loss[loss=0.2241, simple_loss=0.3114, pruned_loss=0.06839, over 19694.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2898, pruned_loss=0.06602, over 3821364.25 frames. ], batch size: 59, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:36:42,872 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 4.923e+02 5.649e+02 7.598e+02 1.428e+03, threshold=1.130e+03, percent-clipped=1.0 +2023-04-02 15:36:51,015 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129623.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:36:58,737 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:37:03,804 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 15:37:04,580 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9284, 1.9029, 1.7724, 2.9411, 1.9685, 2.8683, 1.9541, 1.4346], + device='cuda:1'), covar=tensor([0.4690, 0.4150, 0.2867, 0.2922, 0.4283, 0.2038, 0.6126, 0.5248], + device='cuda:1'), in_proj_covar=tensor([0.0869, 0.0929, 0.0697, 0.0918, 0.0855, 0.0788, 0.0826, 0.0762], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:37:18,806 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129648.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:37:25,089 INFO [train.py:903] (1/4) Epoch 19, batch 6750, loss[loss=0.1873, simple_loss=0.2601, pruned_loss=0.05728, over 19738.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06667, over 3817876.18 frames. ], batch size: 45, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:20,254 INFO [train.py:903] (1/4) Epoch 19, batch 6800, loss[loss=0.2058, simple_loss=0.2809, pruned_loss=0.06537, over 19747.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.291, pruned_loss=0.06661, over 3832884.19 frames. ], batch size: 51, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:34,410 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.884e+02 6.226e+02 8.201e+02 1.689e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-02 15:39:04,981 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 15:39:05,441 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 15:39:08,363 INFO [train.py:903] (1/4) Epoch 20, batch 0, loss[loss=0.2519, simple_loss=0.3371, pruned_loss=0.08333, over 17179.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3371, pruned_loss=0.08333, over 17179.00 frames. ], batch size: 101, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:39:08,363 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 15:39:19,739 INFO [train.py:937] (1/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2703, pruned_loss=0.03432, over 944034.00 frames. +2023-04-02 15:39:19,739 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 15:39:31,866 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 15:40:12,666 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129775.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:40:20,204 INFO [train.py:903] (1/4) Epoch 20, batch 50, loss[loss=0.2201, simple_loss=0.2921, pruned_loss=0.07403, over 18181.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2952, pruned_loss=0.06847, over 847815.54 frames. ], batch size: 40, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:40:51,322 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:40:54,556 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 15:41:03,010 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.543e+02 6.891e+02 8.835e+02 1.770e+03, threshold=1.378e+03, percent-clipped=8.0 +2023-04-02 15:41:05,588 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7434, 1.2727, 1.4913, 1.7005, 3.2931, 1.2293, 2.2863, 3.8045], + device='cuda:1'), covar=tensor([0.0500, 0.2847, 0.2856, 0.1707, 0.0733, 0.2437, 0.1373, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0394, 0.0355, 0.0375, 0.0337, 0.0365, 0.0345, 0.0369, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:41:20,211 INFO [train.py:903] (1/4) Epoch 20, batch 100, loss[loss=0.2465, simple_loss=0.3244, pruned_loss=0.08429, over 19527.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2955, pruned_loss=0.06873, over 1510798.61 frames. ], batch size: 54, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:41:31,359 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 15:42:14,112 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3033, 1.4629, 1.9026, 1.4416, 2.7133, 3.3987, 3.1930, 3.6007], + device='cuda:1'), covar=tensor([0.1744, 0.3831, 0.3359, 0.2537, 0.0776, 0.0277, 0.0267, 0.0346], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0316, 0.0346, 0.0261, 0.0237, 0.0181, 0.0213, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 15:42:21,224 INFO [train.py:903] (1/4) Epoch 20, batch 150, loss[loss=0.2585, simple_loss=0.343, pruned_loss=0.08698, over 19541.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2914, pruned_loss=0.066, over 2029734.47 frames. ], batch size: 56, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:42:30,221 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129890.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:43:03,459 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 4.799e+02 5.935e+02 7.467e+02 3.197e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 15:43:21,684 INFO [train.py:903] (1/4) Epoch 20, batch 200, loss[loss=0.2184, simple_loss=0.2937, pruned_loss=0.07153, over 19656.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2885, pruned_loss=0.06487, over 2432447.22 frames. ], batch size: 60, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:43:22,857 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 15:43:36,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 15:44:13,731 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:44:23,549 INFO [train.py:903] (1/4) Epoch 20, batch 250, loss[loss=0.196, simple_loss=0.2704, pruned_loss=0.06076, over 19611.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2866, pruned_loss=0.06451, over 2744469.43 frames. ], batch size: 50, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:45:06,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.298e+02 6.354e+02 8.098e+02 1.543e+03, threshold=1.271e+03, percent-clipped=7.0 +2023-04-02 15:45:08,329 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:10,537 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:25,487 INFO [train.py:903] (1/4) Epoch 20, batch 300, loss[loss=0.2232, simple_loss=0.3034, pruned_loss=0.07146, over 19464.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2875, pruned_loss=0.0655, over 2987963.97 frames. ], batch size: 64, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:45:57,464 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2578, 3.7470, 3.8855, 3.9041, 1.6488, 3.6947, 3.2293, 3.6317], + device='cuda:1'), covar=tensor([0.1642, 0.1363, 0.0720, 0.0741, 0.5659, 0.1062, 0.0735, 0.1176], + device='cuda:1'), in_proj_covar=tensor([0.0766, 0.0719, 0.0923, 0.0805, 0.0817, 0.0679, 0.0555, 0.0856], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 15:46:25,937 INFO [train.py:903] (1/4) Epoch 20, batch 350, loss[loss=0.2175, simple_loss=0.305, pruned_loss=0.06498, over 19531.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2869, pruned_loss=0.06483, over 3183249.95 frames. ], batch size: 54, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:46:35,011 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:46:35,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:46:38,879 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4069, 1.3903, 1.6474, 1.3752, 2.9923, 1.0708, 2.3569, 3.4023], + device='cuda:1'), covar=tensor([0.0535, 0.2668, 0.2678, 0.1934, 0.0779, 0.2520, 0.1143, 0.0272], + device='cuda:1'), in_proj_covar=tensor([0.0399, 0.0359, 0.0378, 0.0341, 0.0369, 0.0349, 0.0373, 0.0393], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:46:50,440 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4648, 1.3575, 1.3459, 1.8283, 1.4019, 1.6172, 1.6872, 1.4945], + device='cuda:1'), covar=tensor([0.0837, 0.0941, 0.1073, 0.0654, 0.0829, 0.0764, 0.0807, 0.0724], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0219, 0.0224, 0.0242, 0.0225, 0.0209, 0.0187, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 15:47:08,654 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.230e+02 6.397e+02 7.792e+02 1.393e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-02 15:47:26,635 INFO [train.py:903] (1/4) Epoch 20, batch 400, loss[loss=0.1776, simple_loss=0.2459, pruned_loss=0.05465, over 19755.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2869, pruned_loss=0.06487, over 3341905.53 frames. ], batch size: 46, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:47:41,009 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.34 vs. limit=5.0 +2023-04-02 15:47:42,982 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130146.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:47:53,127 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:48:14,973 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130171.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:48:27,090 INFO [train.py:903] (1/4) Epoch 20, batch 450, loss[loss=0.2013, simple_loss=0.291, pruned_loss=0.05583, over 19574.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2868, pruned_loss=0.06475, over 3458248.70 frames. ], batch size: 61, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:48:41,201 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3357, 2.2474, 2.0443, 1.8878, 1.7235, 1.8894, 0.7823, 1.3177], + device='cuda:1'), covar=tensor([0.0573, 0.0604, 0.0493, 0.0776, 0.1144, 0.1009, 0.1208, 0.0990], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0350, 0.0353, 0.0376, 0.0451, 0.0383, 0.0333, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 15:49:03,624 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 15:49:04,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 15:49:09,165 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.003e+02 6.443e+02 8.043e+02 1.786e+03, threshold=1.289e+03, percent-clipped=5.0 +2023-04-02 15:49:18,460 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3296, 1.3565, 1.4611, 1.4442, 1.7644, 1.8697, 1.7738, 0.6020], + device='cuda:1'), covar=tensor([0.2238, 0.3872, 0.2553, 0.1805, 0.1546, 0.2165, 0.1461, 0.4334], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0632, 0.0697, 0.0475, 0.0618, 0.0526, 0.0661, 0.0541], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 15:49:27,172 INFO [train.py:903] (1/4) Epoch 20, batch 500, loss[loss=0.1773, simple_loss=0.2615, pruned_loss=0.0465, over 19847.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2874, pruned_loss=0.06524, over 3539638.55 frames. ], batch size: 52, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:49:50,923 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-02 15:50:10,039 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:50:27,980 INFO [train.py:903] (1/4) Epoch 20, batch 550, loss[loss=0.2487, simple_loss=0.3214, pruned_loss=0.08798, over 19538.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2886, pruned_loss=0.06565, over 3606199.20 frames. ], batch size: 54, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:11,221 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.062e+02 6.327e+02 8.479e+02 2.088e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 15:51:28,461 INFO [train.py:903] (1/4) Epoch 20, batch 600, loss[loss=0.2284, simple_loss=0.3093, pruned_loss=0.07377, over 19361.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2905, pruned_loss=0.06666, over 3649822.96 frames. ], batch size: 70, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:44,829 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:06,341 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:08,702 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:15,670 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 15:52:16,042 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130370.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:30,044 INFO [train.py:903] (1/4) Epoch 20, batch 650, loss[loss=0.1993, simple_loss=0.2885, pruned_loss=0.05503, over 19542.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2902, pruned_loss=0.06647, over 3694883.10 frames. ], batch size: 56, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:52:40,950 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1560, 1.7205, 1.3595, 1.1111, 1.5576, 1.0450, 1.1669, 1.6075], + device='cuda:1'), covar=tensor([0.0760, 0.0724, 0.0970, 0.0763, 0.0476, 0.1198, 0.0580, 0.0387], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0313, 0.0330, 0.0261, 0.0244, 0.0336, 0.0290, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 15:52:56,051 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:53:13,834 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.222e+02 6.307e+02 8.250e+02 2.391e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 15:53:31,369 INFO [train.py:903] (1/4) Epoch 20, batch 700, loss[loss=0.2318, simple_loss=0.3062, pruned_loss=0.07873, over 19797.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2898, pruned_loss=0.0661, over 3722534.60 frames. ], batch size: 48, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:53:55,079 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:02,062 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:28,644 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:32,041 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:35,652 INFO [train.py:903] (1/4) Epoch 20, batch 750, loss[loss=0.2528, simple_loss=0.3185, pruned_loss=0.09355, over 12853.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.29, pruned_loss=0.06654, over 3746515.48 frames. ], batch size: 135, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:54:58,171 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 15:55:19,217 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.790e+02 6.042e+02 7.311e+02 1.890e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 15:55:28,441 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:55:37,192 INFO [train.py:903] (1/4) Epoch 20, batch 800, loss[loss=0.2828, simple_loss=0.3368, pruned_loss=0.1143, over 13213.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2908, pruned_loss=0.06671, over 3754613.14 frames. ], batch size: 135, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:55:53,534 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:55:58,692 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:55:59,928 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1889, 1.3565, 1.8261, 1.4421, 2.9228, 4.4343, 4.3584, 4.9561], + device='cuda:1'), covar=tensor([0.1691, 0.3914, 0.3439, 0.2433, 0.0651, 0.0259, 0.0194, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0316, 0.0347, 0.0262, 0.0238, 0.0182, 0.0213, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 15:56:21,753 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130566.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:27,694 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4077, 1.4030, 1.7247, 1.6420, 2.5214, 2.3039, 2.6862, 1.0675], + device='cuda:1'), covar=tensor([0.2456, 0.4419, 0.2676, 0.1947, 0.1580, 0.2063, 0.1469, 0.4510], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0633, 0.0696, 0.0476, 0.0616, 0.0526, 0.0659, 0.0540], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 15:56:40,732 INFO [train.py:903] (1/4) Epoch 20, batch 850, loss[loss=0.2148, simple_loss=0.3007, pruned_loss=0.06448, over 19785.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.29, pruned_loss=0.06591, over 3786993.33 frames. ], batch size: 56, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:56:41,930 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:57:07,826 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7326, 1.5319, 1.6100, 2.2274, 1.6511, 1.9684, 2.0123, 1.7539], + device='cuda:1'), covar=tensor([0.0808, 0.0956, 0.1008, 0.0682, 0.0823, 0.0738, 0.0833, 0.0729], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0220, 0.0226, 0.0243, 0.0227, 0.0209, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 15:57:25,283 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.920e+02 5.760e+02 7.852e+02 1.760e+03, threshold=1.152e+03, percent-clipped=6.0 +2023-04-02 15:57:33,253 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 15:57:40,813 INFO [train.py:903] (1/4) Epoch 20, batch 900, loss[loss=0.2248, simple_loss=0.301, pruned_loss=0.07433, over 19624.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2906, pruned_loss=0.06669, over 3802387.45 frames. ], batch size: 61, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:44,423 INFO [train.py:903] (1/4) Epoch 20, batch 950, loss[loss=0.24, simple_loss=0.3199, pruned_loss=0.08006, over 19306.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2908, pruned_loss=0.06609, over 3814951.85 frames. ], batch size: 66, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:47,640 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 15:59:28,725 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.928e+02 5.917e+02 7.294e+02 1.421e+03, threshold=1.183e+03, percent-clipped=3.0 +2023-04-02 15:59:46,665 INFO [train.py:903] (1/4) Epoch 20, batch 1000, loss[loss=0.2219, simple_loss=0.3125, pruned_loss=0.06562, over 19683.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2909, pruned_loss=0.06605, over 3823154.55 frames. ], batch size: 58, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:59:48,251 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:59:50,495 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:03,656 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:16,572 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 16:00:18,688 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:20,900 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:38,544 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 16:00:48,485 INFO [train.py:903] (1/4) Epoch 20, batch 1050, loss[loss=0.229, simple_loss=0.3106, pruned_loss=0.07366, over 19539.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2921, pruned_loss=0.06712, over 3814874.61 frames. ], batch size: 56, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:01:02,854 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:10,716 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:20,583 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 16:01:33,077 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.731e+02 5.562e+02 6.742e+02 8.268e+02 2.102e+03, threshold=1.348e+03, percent-clipped=2.0 +2023-04-02 16:01:49,837 INFO [train.py:903] (1/4) Epoch 20, batch 1100, loss[loss=0.2553, simple_loss=0.3192, pruned_loss=0.09567, over 13393.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2922, pruned_loss=0.06749, over 3806442.35 frames. ], batch size: 137, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:02:27,183 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:02:36,683 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 16:02:47,563 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 16:02:52,369 INFO [train.py:903] (1/4) Epoch 20, batch 1150, loss[loss=0.1655, simple_loss=0.2403, pruned_loss=0.0453, over 19704.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06696, over 3804674.81 frames. ], batch size: 45, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:03:26,707 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:27,635 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:33,827 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130915.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:39,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 5.063e+02 6.056e+02 7.993e+02 1.743e+03, threshold=1.211e+03, percent-clipped=5.0 +2023-04-02 16:03:50,400 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:55,680 INFO [train.py:903] (1/4) Epoch 20, batch 1200, loss[loss=0.2324, simple_loss=0.3047, pruned_loss=0.08009, over 17286.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.06678, over 3808974.91 frames. ], batch size: 101, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:04:23,988 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 16:04:56,110 INFO [train.py:903] (1/4) Epoch 20, batch 1250, loss[loss=0.1904, simple_loss=0.2663, pruned_loss=0.05725, over 19851.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.291, pruned_loss=0.06681, over 3805417.38 frames. ], batch size: 52, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:05:42,763 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.121e+02 6.297e+02 7.673e+02 2.016e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 16:05:50,221 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:05:52,263 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1475, 1.2459, 1.6630, 1.0660, 2.4528, 3.3433, 3.0120, 3.5282], + device='cuda:1'), covar=tensor([0.1627, 0.3770, 0.3353, 0.2557, 0.0611, 0.0191, 0.0222, 0.0239], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0317, 0.0348, 0.0262, 0.0238, 0.0183, 0.0213, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 16:05:58,785 INFO [train.py:903] (1/4) Epoch 20, batch 1300, loss[loss=0.1875, simple_loss=0.2807, pruned_loss=0.04713, over 19657.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2917, pruned_loss=0.06689, over 3801452.93 frames. ], batch size: 53, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:06:12,045 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:06:32,914 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7872, 4.2979, 4.4788, 4.4941, 1.5728, 4.1504, 3.6451, 4.1624], + device='cuda:1'), covar=tensor([0.1705, 0.0866, 0.0643, 0.0682, 0.6209, 0.0912, 0.0680, 0.1214], + device='cuda:1'), in_proj_covar=tensor([0.0758, 0.0716, 0.0917, 0.0799, 0.0815, 0.0675, 0.0551, 0.0856], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 16:06:59,469 INFO [train.py:903] (1/4) Epoch 20, batch 1350, loss[loss=0.2295, simple_loss=0.3116, pruned_loss=0.07363, over 19665.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2902, pruned_loss=0.06658, over 3812771.02 frames. ], batch size: 58, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:07:19,071 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 16:07:43,038 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131117.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:07:44,787 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.090e+02 6.517e+02 8.267e+02 2.193e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-02 16:08:02,288 INFO [train.py:903] (1/4) Epoch 20, batch 1400, loss[loss=0.2196, simple_loss=0.2852, pruned_loss=0.07697, over 19737.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2897, pruned_loss=0.06635, over 3814347.74 frames. ], batch size: 45, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:08:15,129 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2688, 1.1937, 1.2345, 1.3512, 1.0290, 1.3608, 1.3660, 1.2598], + device='cuda:1'), covar=tensor([0.0875, 0.0986, 0.1063, 0.0678, 0.0854, 0.0807, 0.0794, 0.0791], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0243, 0.0226, 0.0209, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 16:08:15,156 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:42,763 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:50,688 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:03,834 INFO [train.py:903] (1/4) Epoch 20, batch 1450, loss[loss=0.1777, simple_loss=0.2533, pruned_loss=0.05105, over 19767.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2898, pruned_loss=0.06609, over 3822276.14 frames. ], batch size: 46, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:09:06,068 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 16:09:14,494 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:16,703 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:21,595 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131196.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:50,840 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.028e+02 6.181e+02 7.641e+02 1.699e+03, threshold=1.236e+03, percent-clipped=6.0 +2023-04-02 16:10:06,715 INFO [train.py:903] (1/4) Epoch 20, batch 1500, loss[loss=0.1931, simple_loss=0.2747, pruned_loss=0.05576, over 19532.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2893, pruned_loss=0.06571, over 3828604.59 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:10:21,878 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6115, 4.1873, 2.7055, 3.7268, 1.2713, 4.1274, 4.0070, 4.1739], + device='cuda:1'), covar=tensor([0.0581, 0.0972, 0.1921, 0.0822, 0.3531, 0.0714, 0.0803, 0.1005], + device='cuda:1'), in_proj_covar=tensor([0.0494, 0.0397, 0.0485, 0.0345, 0.0400, 0.0423, 0.0416, 0.0447], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:10:49,079 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.29 vs. limit=5.0 +2023-04-02 16:11:07,004 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:07,712 INFO [train.py:903] (1/4) Epoch 20, batch 1550, loss[loss=0.2397, simple_loss=0.3145, pruned_loss=0.08239, over 19539.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2889, pruned_loss=0.06553, over 3843539.61 frames. ], batch size: 56, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:11:29,260 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:38,410 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:53,762 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 5.087e+02 6.243e+02 7.473e+02 1.350e+03, threshold=1.249e+03, percent-clipped=1.0 +2023-04-02 16:11:58,738 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:12:08,235 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5550, 1.1978, 1.3030, 1.2078, 2.2178, 0.9612, 2.0724, 2.4910], + device='cuda:1'), covar=tensor([0.0664, 0.2691, 0.2966, 0.1672, 0.0860, 0.2120, 0.1035, 0.0455], + device='cuda:1'), in_proj_covar=tensor([0.0399, 0.0357, 0.0377, 0.0339, 0.0368, 0.0348, 0.0371, 0.0393], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:12:10,215 INFO [train.py:903] (1/4) Epoch 20, batch 1600, loss[loss=0.1652, simple_loss=0.2488, pruned_loss=0.04076, over 19469.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2897, pruned_loss=0.06591, over 3831072.46 frames. ], batch size: 49, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:12:36,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 16:13:12,822 INFO [train.py:903] (1/4) Epoch 20, batch 1650, loss[loss=0.2017, simple_loss=0.2842, pruned_loss=0.05961, over 19713.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2891, pruned_loss=0.0656, over 3833449.33 frames. ], batch size: 63, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:13:59,215 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.697e+02 5.218e+02 6.304e+02 8.075e+02 1.501e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 16:14:08,599 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:15,058 INFO [train.py:903] (1/4) Epoch 20, batch 1700, loss[loss=0.196, simple_loss=0.2727, pruned_loss=0.05968, over 19793.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2904, pruned_loss=0.06674, over 3836519.51 frames. ], batch size: 48, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:14:17,648 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:55,857 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 16:15:16,165 INFO [train.py:903] (1/4) Epoch 20, batch 1750, loss[loss=0.2234, simple_loss=0.3046, pruned_loss=0.07114, over 19777.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2905, pruned_loss=0.06682, over 3835280.01 frames. ], batch size: 56, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:15:52,703 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131510.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:16:02,698 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.359e+02 6.412e+02 7.691e+02 1.507e+03, threshold=1.282e+03, percent-clipped=3.0 +2023-04-02 16:16:18,696 INFO [train.py:903] (1/4) Epoch 20, batch 1800, loss[loss=0.1991, simple_loss=0.2848, pruned_loss=0.05671, over 19612.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.06678, over 3829863.67 frames. ], batch size: 57, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:16:24,467 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131536.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:17:16,067 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 16:17:22,070 INFO [train.py:903] (1/4) Epoch 20, batch 1850, loss[loss=0.2115, simple_loss=0.2855, pruned_loss=0.06876, over 19482.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2921, pruned_loss=0.06703, over 3824415.01 frames. ], batch size: 49, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:17:33,624 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9078, 1.5675, 1.8834, 1.7224, 4.3968, 1.0256, 2.5175, 4.8205], + device='cuda:1'), covar=tensor([0.0443, 0.2790, 0.2636, 0.1900, 0.0784, 0.2740, 0.1463, 0.0171], + device='cuda:1'), in_proj_covar=tensor([0.0396, 0.0355, 0.0375, 0.0337, 0.0366, 0.0346, 0.0370, 0.0390], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:17:54,261 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 16:18:09,351 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3101, 1.3529, 1.7984, 1.5735, 2.5823, 2.0545, 2.6842, 1.1311], + device='cuda:1'), covar=tensor([0.2732, 0.4625, 0.2827, 0.2127, 0.1662, 0.2523, 0.1714, 0.4764], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0631, 0.0694, 0.0474, 0.0612, 0.0526, 0.0658, 0.0539], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 16:18:09,980 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.826e+02 6.634e+02 9.045e+02 2.049e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-02 16:18:25,161 INFO [train.py:903] (1/4) Epoch 20, batch 1900, loss[loss=0.2333, simple_loss=0.3098, pruned_loss=0.07842, over 19493.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2918, pruned_loss=0.06712, over 3819963.59 frames. ], batch size: 64, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:18:40,113 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 16:18:45,485 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 16:18:47,959 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:19:10,725 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 16:19:25,446 INFO [train.py:903] (1/4) Epoch 20, batch 1950, loss[loss=0.1962, simple_loss=0.2847, pruned_loss=0.05383, over 19517.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2925, pruned_loss=0.06737, over 3827553.28 frames. ], batch size: 64, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:19:49,191 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9414, 1.3089, 1.0532, 0.9264, 1.1933, 0.8839, 0.9742, 1.2089], + device='cuda:1'), covar=tensor([0.0719, 0.0673, 0.0788, 0.0646, 0.0439, 0.1029, 0.0502, 0.0401], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0316, 0.0335, 0.0263, 0.0248, 0.0337, 0.0293, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:20:03,223 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8630, 1.3329, 1.0375, 0.9156, 1.1364, 0.9354, 0.8897, 1.2063], + device='cuda:1'), covar=tensor([0.0651, 0.0824, 0.1134, 0.0769, 0.0582, 0.1319, 0.0648, 0.0497], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0315, 0.0335, 0.0263, 0.0247, 0.0336, 0.0292, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:20:13,300 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.129e+02 6.435e+02 8.344e+02 2.370e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 16:20:28,633 INFO [train.py:903] (1/4) Epoch 20, batch 2000, loss[loss=0.2009, simple_loss=0.2781, pruned_loss=0.06188, over 19840.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2927, pruned_loss=0.06775, over 3813152.00 frames. ], batch size: 52, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:18,590 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:25,583 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 16:21:28,782 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131778.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:33,329 INFO [train.py:903] (1/4) Epoch 20, batch 2050, loss[loss=0.2025, simple_loss=0.2915, pruned_loss=0.05673, over 18811.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2914, pruned_loss=0.0669, over 3812367.74 frames. ], batch size: 74, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:45,617 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 16:21:46,760 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 16:22:07,707 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 16:22:22,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 4.827e+02 6.005e+02 7.777e+02 1.829e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 16:22:35,844 INFO [train.py:903] (1/4) Epoch 20, batch 2100, loss[loss=0.1822, simple_loss=0.2574, pruned_loss=0.05344, over 19796.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.291, pruned_loss=0.0668, over 3815412.02 frames. ], batch size: 45, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:22:39,934 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7629, 1.8652, 2.1298, 2.3293, 1.7569, 2.2258, 2.1985, 1.9095], + device='cuda:1'), covar=tensor([0.3860, 0.3326, 0.1756, 0.2164, 0.3592, 0.1921, 0.4304, 0.3285], + device='cuda:1'), in_proj_covar=tensor([0.0881, 0.0943, 0.0703, 0.0925, 0.0863, 0.0796, 0.0828, 0.0773], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 16:23:03,392 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 16:23:03,509 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131854.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:26,825 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 16:23:27,226 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5910, 2.3284, 1.6153, 1.6158, 2.1443, 1.3082, 1.4791, 1.9727], + device='cuda:1'), covar=tensor([0.1128, 0.0795, 0.1104, 0.0809, 0.0542, 0.1346, 0.0748, 0.0525], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0316, 0.0335, 0.0262, 0.0247, 0.0339, 0.0294, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:23:29,482 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:37,620 INFO [train.py:903] (1/4) Epoch 20, batch 2150, loss[loss=0.2498, simple_loss=0.3256, pruned_loss=0.08699, over 18182.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2907, pruned_loss=0.06662, over 3814938.11 frames. ], batch size: 83, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:42,652 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:51,985 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:09,630 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:26,290 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.118e+02 6.039e+02 8.265e+02 1.505e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 16:24:39,694 INFO [train.py:903] (1/4) Epoch 20, batch 2200, loss[loss=0.2319, simple_loss=0.3127, pruned_loss=0.07555, over 19371.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2906, pruned_loss=0.06641, over 3821327.44 frames. ], batch size: 70, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:24:40,112 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:10,258 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7501, 4.2655, 4.4836, 4.4855, 1.7393, 4.2176, 3.6421, 4.2071], + device='cuda:1'), covar=tensor([0.1602, 0.0738, 0.0570, 0.0597, 0.5485, 0.0846, 0.0669, 0.1022], + device='cuda:1'), in_proj_covar=tensor([0.0762, 0.0714, 0.0920, 0.0807, 0.0814, 0.0680, 0.0553, 0.0855], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 16:25:26,188 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:42,868 INFO [train.py:903] (1/4) Epoch 20, batch 2250, loss[loss=0.173, simple_loss=0.2464, pruned_loss=0.04975, over 19739.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2893, pruned_loss=0.06572, over 3827648.15 frames. ], batch size: 46, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:25:54,526 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8015, 3.2911, 3.3292, 3.3483, 1.3206, 3.1867, 2.8159, 3.0934], + device='cuda:1'), covar=tensor([0.1743, 0.0992, 0.0789, 0.0956, 0.5429, 0.1069, 0.0769, 0.1332], + device='cuda:1'), in_proj_covar=tensor([0.0762, 0.0714, 0.0918, 0.0806, 0.0813, 0.0678, 0.0553, 0.0855], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 16:26:31,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.368e+02 4.978e+02 6.272e+02 7.965e+02 1.499e+03, threshold=1.254e+03, percent-clipped=2.0 +2023-04-02 16:26:44,538 INFO [train.py:903] (1/4) Epoch 20, batch 2300, loss[loss=0.2037, simple_loss=0.2839, pruned_loss=0.06175, over 19530.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.29, pruned_loss=0.06645, over 3819794.31 frames. ], batch size: 54, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:26:52,921 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1223, 1.7675, 1.8748, 2.6215, 2.1657, 2.3104, 2.4238, 2.0369], + device='cuda:1'), covar=tensor([0.0791, 0.0915, 0.0996, 0.0884, 0.0824, 0.0795, 0.0824, 0.0692], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0243, 0.0228, 0.0211, 0.0187, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 16:26:58,039 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 16:27:05,365 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132049.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:34,690 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:47,302 INFO [train.py:903] (1/4) Epoch 20, batch 2350, loss[loss=0.2646, simple_loss=0.3285, pruned_loss=0.1004, over 12716.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2905, pruned_loss=0.06659, over 3810230.52 frames. ], batch size: 136, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:28:26,805 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 16:28:35,982 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 4.795e+02 5.776e+02 7.778e+02 1.972e+03, threshold=1.155e+03, percent-clipped=8.0 +2023-04-02 16:28:42,737 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 16:28:49,775 INFO [train.py:903] (1/4) Epoch 20, batch 2400, loss[loss=0.2024, simple_loss=0.2863, pruned_loss=0.05925, over 19652.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.06644, over 3816150.43 frames. ], batch size: 55, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:29:03,470 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:11,445 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132149.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:32,890 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:41,688 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:51,981 INFO [train.py:903] (1/4) Epoch 20, batch 2450, loss[loss=0.2348, simple_loss=0.311, pruned_loss=0.07935, over 18122.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2916, pruned_loss=0.06693, over 3817634.23 frames. ], batch size: 83, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:30:07,508 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-02 16:30:38,900 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:41,020 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.995e+02 6.263e+02 8.063e+02 1.363e+03, threshold=1.253e+03, percent-clipped=5.0 +2023-04-02 16:30:46,976 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132225.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:54,279 INFO [train.py:903] (1/4) Epoch 20, batch 2500, loss[loss=0.1845, simple_loss=0.2694, pruned_loss=0.04984, over 19619.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2931, pruned_loss=0.0678, over 3803533.68 frames. ], batch size: 50, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:31:11,670 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-02 16:31:15,980 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:31:56,367 INFO [train.py:903] (1/4) Epoch 20, batch 2550, loss[loss=0.2387, simple_loss=0.3155, pruned_loss=0.08096, over 19741.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2933, pruned_loss=0.06749, over 3808530.72 frames. ], batch size: 63, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:32:45,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.513e+02 4.985e+02 5.787e+02 8.066e+02 1.995e+03, threshold=1.157e+03, percent-clipped=4.0 +2023-04-02 16:32:52,686 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 16:32:58,515 INFO [train.py:903] (1/4) Epoch 20, batch 2600, loss[loss=0.2247, simple_loss=0.302, pruned_loss=0.07371, over 19469.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2921, pruned_loss=0.06688, over 3800101.11 frames. ], batch size: 49, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:33:02,434 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:33:43,880 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4284, 1.3575, 1.4770, 1.5193, 2.9942, 1.2055, 2.3036, 3.4289], + device='cuda:1'), covar=tensor([0.0516, 0.2734, 0.2912, 0.1844, 0.0742, 0.2441, 0.1295, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0397, 0.0356, 0.0376, 0.0340, 0.0367, 0.0347, 0.0371, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:33:44,023 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3998, 1.5043, 1.7935, 1.6038, 2.3879, 2.1901, 2.4768, 1.0247], + device='cuda:1'), covar=tensor([0.2509, 0.4226, 0.2550, 0.1956, 0.1599, 0.2090, 0.1527, 0.4453], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0632, 0.0698, 0.0477, 0.0616, 0.0526, 0.0660, 0.0541], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 16:33:45,144 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9620, 1.7346, 1.5860, 1.8931, 1.6635, 1.7293, 1.6335, 1.8093], + device='cuda:1'), covar=tensor([0.1059, 0.1494, 0.1521, 0.1141, 0.1404, 0.0532, 0.1325, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0357, 0.0310, 0.0251, 0.0301, 0.0249, 0.0305, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:34:01,396 INFO [train.py:903] (1/4) Epoch 20, batch 2650, loss[loss=0.2388, simple_loss=0.3222, pruned_loss=0.07775, over 18740.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2931, pruned_loss=0.06732, over 3796494.18 frames. ], batch size: 74, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:34:15,342 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:23,152 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 16:34:26,516 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 16:34:44,196 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132416.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:50,448 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.865e+02 6.149e+02 7.368e+02 1.585e+03, threshold=1.230e+03, percent-clipped=4.0 +2023-04-02 16:35:04,062 INFO [train.py:903] (1/4) Epoch 20, batch 2700, loss[loss=0.1917, simple_loss=0.2714, pruned_loss=0.05595, over 19832.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2928, pruned_loss=0.06744, over 3813343.40 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:35:33,672 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6638, 1.4122, 1.4934, 2.1871, 1.6346, 1.8948, 1.8824, 1.7310], + device='cuda:1'), covar=tensor([0.0861, 0.0977, 0.1020, 0.0776, 0.0908, 0.0786, 0.0874, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0219, 0.0223, 0.0241, 0.0227, 0.0210, 0.0186, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 16:36:06,698 INFO [train.py:903] (1/4) Epoch 20, batch 2750, loss[loss=0.2265, simple_loss=0.3069, pruned_loss=0.07303, over 18583.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2926, pruned_loss=0.06722, over 3818975.39 frames. ], batch size: 74, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:36:39,128 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132508.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:36:55,655 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.186e+02 6.180e+02 7.968e+02 1.505e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-02 16:37:07,844 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:37:08,656 INFO [train.py:903] (1/4) Epoch 20, batch 2800, loss[loss=0.2101, simple_loss=0.298, pruned_loss=0.06112, over 18131.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2917, pruned_loss=0.06667, over 3820795.16 frames. ], batch size: 83, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:37:39,113 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7278, 1.6829, 1.5641, 1.4008, 1.3011, 1.4076, 0.2840, 0.6211], + device='cuda:1'), covar=tensor([0.0594, 0.0620, 0.0392, 0.0615, 0.1204, 0.0711, 0.1209, 0.1084], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0353, 0.0356, 0.0381, 0.0456, 0.0385, 0.0333, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 16:37:50,883 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.12 vs. limit=5.0 +2023-04-02 16:38:13,010 INFO [train.py:903] (1/4) Epoch 20, batch 2850, loss[loss=0.1979, simple_loss=0.2779, pruned_loss=0.05899, over 19628.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2916, pruned_loss=0.0668, over 3815500.41 frames. ], batch size: 50, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:38:22,443 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:38:29,077 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1278, 1.3227, 1.4393, 1.2993, 2.7299, 1.0953, 2.1166, 3.0736], + device='cuda:1'), covar=tensor([0.0538, 0.2561, 0.2747, 0.1800, 0.0728, 0.2314, 0.1186, 0.0333], + device='cuda:1'), in_proj_covar=tensor([0.0399, 0.0356, 0.0376, 0.0339, 0.0369, 0.0347, 0.0371, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:38:46,039 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8080, 1.3590, 1.5735, 1.4402, 3.3399, 1.1900, 2.5436, 3.7816], + device='cuda:1'), covar=tensor([0.0456, 0.2670, 0.2683, 0.1933, 0.0732, 0.2462, 0.1124, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0399, 0.0357, 0.0376, 0.0339, 0.0369, 0.0347, 0.0371, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:38:52,867 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:39:01,716 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 4.861e+02 5.815e+02 7.642e+02 3.357e+03, threshold=1.163e+03, percent-clipped=7.0 +2023-04-02 16:39:14,635 INFO [train.py:903] (1/4) Epoch 20, batch 2900, loss[loss=0.2534, simple_loss=0.3319, pruned_loss=0.08749, over 19617.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2911, pruned_loss=0.06638, over 3824775.06 frames. ], batch size: 61, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:39:14,672 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 16:40:13,481 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132678.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:40:18,627 INFO [train.py:903] (1/4) Epoch 20, batch 2950, loss[loss=0.253, simple_loss=0.3244, pruned_loss=0.09079, over 19583.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2902, pruned_loss=0.06568, over 3825686.48 frames. ], batch size: 61, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:09,138 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.627e+02 5.679e+02 7.371e+02 2.153e+03, threshold=1.136e+03, percent-clipped=3.0 +2023-04-02 16:41:20,817 INFO [train.py:903] (1/4) Epoch 20, batch 3000, loss[loss=0.2249, simple_loss=0.3025, pruned_loss=0.07362, over 19107.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2911, pruned_loss=0.06636, over 3828751.85 frames. ], batch size: 69, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:20,818 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 16:41:34,264 INFO [train.py:937] (1/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2697, pruned_loss=0.03462, over 944034.00 frames. +2023-04-02 16:41:34,266 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 16:41:40,250 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 16:42:10,873 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3344, 3.7676, 3.9089, 3.9274, 1.5405, 3.7154, 3.2817, 3.6406], + device='cuda:1'), covar=tensor([0.1657, 0.1028, 0.0712, 0.0770, 0.5886, 0.0993, 0.0677, 0.1257], + device='cuda:1'), in_proj_covar=tensor([0.0765, 0.0723, 0.0919, 0.0806, 0.0819, 0.0682, 0.0555, 0.0859], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 16:42:12,905 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:13,046 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:15,795 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 16:42:35,225 INFO [train.py:903] (1/4) Epoch 20, batch 3050, loss[loss=0.2228, simple_loss=0.2984, pruned_loss=0.07361, over 18840.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06648, over 3831051.97 frames. ], batch size: 74, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:42:41,468 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:43,550 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132789.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:13,096 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:24,234 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 4.785e+02 6.187e+02 7.720e+02 1.879e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-02 16:43:28,321 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9589, 2.0479, 2.2557, 2.6215, 1.8898, 2.4034, 2.2780, 2.0783], + device='cuda:1'), covar=tensor([0.4317, 0.4107, 0.1909, 0.2523, 0.4256, 0.2341, 0.4792, 0.3438], + device='cuda:1'), in_proj_covar=tensor([0.0886, 0.0948, 0.0708, 0.0931, 0.0866, 0.0800, 0.0837, 0.0776], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 16:43:37,016 INFO [train.py:903] (1/4) Epoch 20, batch 3100, loss[loss=0.1858, simple_loss=0.2755, pruned_loss=0.04803, over 19676.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2922, pruned_loss=0.06696, over 3827057.55 frames. ], batch size: 53, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:43:39,678 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7443, 1.7445, 1.5899, 1.3953, 1.4362, 1.3791, 0.1391, 0.6074], + device='cuda:1'), covar=tensor([0.0655, 0.0604, 0.0420, 0.0647, 0.1240, 0.0779, 0.1224, 0.1115], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0352, 0.0357, 0.0383, 0.0457, 0.0386, 0.0334, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 16:44:33,821 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7707, 4.2833, 4.4998, 4.5034, 1.6943, 4.2115, 3.6609, 4.2334], + device='cuda:1'), covar=tensor([0.1721, 0.0833, 0.0655, 0.0697, 0.6138, 0.0853, 0.0715, 0.1263], + device='cuda:1'), in_proj_covar=tensor([0.0766, 0.0722, 0.0921, 0.0807, 0.0821, 0.0682, 0.0556, 0.0859], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 16:44:40,220 INFO [train.py:903] (1/4) Epoch 20, batch 3150, loss[loss=0.2979, simple_loss=0.3653, pruned_loss=0.1153, over 19260.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2917, pruned_loss=0.06685, over 3827656.18 frames. ], batch size: 66, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:45:07,825 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 16:45:11,718 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5400, 1.6097, 1.8454, 1.7845, 2.5894, 2.2185, 2.6485, 1.3759], + device='cuda:1'), covar=tensor([0.2041, 0.3585, 0.2203, 0.1630, 0.1219, 0.1830, 0.1254, 0.3741], + device='cuda:1'), in_proj_covar=tensor([0.0524, 0.0630, 0.0694, 0.0476, 0.0612, 0.0523, 0.0656, 0.0540], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 16:45:29,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.025e+02 5.951e+02 7.011e+02 1.371e+03, threshold=1.190e+03, percent-clipped=2.0 +2023-04-02 16:45:42,514 INFO [train.py:903] (1/4) Epoch 20, batch 3200, loss[loss=0.2901, simple_loss=0.3431, pruned_loss=0.1185, over 13651.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2919, pruned_loss=0.067, over 3812556.08 frames. ], batch size: 136, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:46:13,784 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1674, 2.9189, 2.3278, 2.1496, 2.0475, 2.5153, 0.9261, 2.0482], + device='cuda:1'), covar=tensor([0.0644, 0.0557, 0.0679, 0.1101, 0.1134, 0.1062, 0.1448, 0.1096], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0352, 0.0358, 0.0383, 0.0458, 0.0386, 0.0334, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 16:46:14,344 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 16:46:46,021 INFO [train.py:903] (1/4) Epoch 20, batch 3250, loss[loss=0.2817, simple_loss=0.3569, pruned_loss=0.1032, over 19662.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2914, pruned_loss=0.06666, over 3812346.20 frames. ], batch size: 55, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:47:37,648 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.383e+02 4.867e+02 6.333e+02 8.818e+02 1.782e+03, threshold=1.267e+03, percent-clipped=7.0 +2023-04-02 16:47:37,829 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:47:49,136 INFO [train.py:903] (1/4) Epoch 20, batch 3300, loss[loss=0.1845, simple_loss=0.2543, pruned_loss=0.05737, over 19730.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2918, pruned_loss=0.0667, over 3803739.58 frames. ], batch size: 46, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:47:57,174 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 16:48:03,423 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5273, 1.5858, 2.0736, 1.7239, 3.1703, 4.0588, 3.9409, 4.4400], + device='cuda:1'), covar=tensor([0.1616, 0.3609, 0.3125, 0.2266, 0.0620, 0.0313, 0.0195, 0.0228], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0319, 0.0349, 0.0263, 0.0240, 0.0184, 0.0215, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 16:48:54,782 INFO [train.py:903] (1/4) Epoch 20, batch 3350, loss[loss=0.1865, simple_loss=0.2663, pruned_loss=0.05335, over 19288.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2901, pruned_loss=0.06533, over 3817268.31 frames. ], batch size: 44, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:49:27,434 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:49:45,519 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.956e+02 6.093e+02 7.175e+02 1.819e+03, threshold=1.219e+03, percent-clipped=1.0 +2023-04-02 16:49:57,527 INFO [train.py:903] (1/4) Epoch 20, batch 3400, loss[loss=0.2328, simple_loss=0.3102, pruned_loss=0.0777, over 19671.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2903, pruned_loss=0.0661, over 3820412.96 frames. ], batch size: 58, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:50:06,137 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133137.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:51:02,179 INFO [train.py:903] (1/4) Epoch 20, batch 3450, loss[loss=0.2122, simple_loss=0.2959, pruned_loss=0.06425, over 19590.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.29, pruned_loss=0.06619, over 3815283.49 frames. ], batch size: 52, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:51:08,031 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 16:51:41,631 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4117, 2.1095, 1.6851, 1.2535, 2.0350, 1.2807, 1.2493, 1.9612], + device='cuda:1'), covar=tensor([0.1043, 0.0751, 0.1021, 0.1045, 0.0527, 0.1278, 0.0790, 0.0434], + device='cuda:1'), in_proj_covar=tensor([0.0300, 0.0313, 0.0333, 0.0259, 0.0245, 0.0335, 0.0291, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 16:51:52,697 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 4.746e+02 5.634e+02 7.504e+02 1.582e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 16:51:54,124 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133223.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:52:04,224 INFO [train.py:903] (1/4) Epoch 20, batch 3500, loss[loss=0.2149, simple_loss=0.2994, pruned_loss=0.06524, over 19541.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2903, pruned_loss=0.06636, over 3815065.24 frames. ], batch size: 54, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:08,045 INFO [train.py:903] (1/4) Epoch 20, batch 3550, loss[loss=0.1814, simple_loss=0.2556, pruned_loss=0.05358, over 19782.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2899, pruned_loss=0.06609, over 3815980.36 frames. ], batch size: 47, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:15,828 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 16:53:52,739 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 16:53:58,716 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 5.017e+02 5.933e+02 7.980e+02 2.795e+03, threshold=1.187e+03, percent-clipped=11.0 +2023-04-02 16:54:10,372 INFO [train.py:903] (1/4) Epoch 20, batch 3600, loss[loss=0.1868, simple_loss=0.2733, pruned_loss=0.05011, over 19682.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2892, pruned_loss=0.0657, over 3818948.17 frames. ], batch size: 53, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:54:48,395 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9943, 2.0688, 2.3680, 2.7022, 2.0331, 2.5967, 2.4216, 2.1146], + device='cuda:1'), covar=tensor([0.4252, 0.3841, 0.1700, 0.2349, 0.4102, 0.2030, 0.4733, 0.3244], + device='cuda:1'), in_proj_covar=tensor([0.0883, 0.0945, 0.0705, 0.0928, 0.0864, 0.0799, 0.0834, 0.0771], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 16:55:15,910 INFO [train.py:903] (1/4) Epoch 20, batch 3650, loss[loss=0.22, simple_loss=0.3036, pruned_loss=0.06825, over 19749.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2896, pruned_loss=0.06584, over 3818113.37 frames. ], batch size: 51, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:55:28,125 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133392.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 16:55:29,479 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:55:30,478 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:55:33,868 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1612, 1.9601, 1.8892, 2.8048, 1.8299, 2.4260, 2.2433, 2.1726], + device='cuda:1'), covar=tensor([0.0858, 0.0938, 0.1040, 0.0865, 0.0981, 0.0745, 0.0998, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0243, 0.0228, 0.0211, 0.0187, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 16:56:02,469 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133418.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:56:06,666 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.934e+02 5.113e+02 6.456e+02 7.889e+02 1.610e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 16:56:18,442 INFO [train.py:903] (1/4) Epoch 20, batch 3700, loss[loss=0.2152, simple_loss=0.2951, pruned_loss=0.06767, over 19660.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2901, pruned_loss=0.06602, over 3830617.50 frames. ], batch size: 53, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:02,959 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 16:57:19,846 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:23,014 INFO [train.py:903] (1/4) Epoch 20, batch 3750, loss[loss=0.1796, simple_loss=0.2674, pruned_loss=0.04594, over 19854.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2911, pruned_loss=0.06661, over 3814688.43 frames. ], batch size: 52, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:34,899 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:39,349 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6397, 1.7453, 2.0038, 2.0318, 1.5596, 1.9456, 2.0394, 1.8866], + device='cuda:1'), covar=tensor([0.3926, 0.3366, 0.1816, 0.2152, 0.3543, 0.1976, 0.4604, 0.3216], + device='cuda:1'), in_proj_covar=tensor([0.0881, 0.0942, 0.0703, 0.0923, 0.0861, 0.0794, 0.0832, 0.0768], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 16:57:50,929 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:58:13,501 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.077e+02 5.935e+02 8.206e+02 1.595e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 16:58:24,812 INFO [train.py:903] (1/4) Epoch 20, batch 3800, loss[loss=0.3309, simple_loss=0.3765, pruned_loss=0.1427, over 17583.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2922, pruned_loss=0.06749, over 3817519.80 frames. ], batch size: 101, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:58:58,190 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 16:59:30,204 INFO [train.py:903] (1/4) Epoch 20, batch 3850, loss[loss=0.2004, simple_loss=0.2796, pruned_loss=0.06063, over 19674.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.292, pruned_loss=0.06746, over 3816062.05 frames. ], batch size: 60, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:59:31,747 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7293, 1.6437, 1.5964, 2.1465, 1.8342, 1.9616, 2.0407, 1.7686], + device='cuda:1'), covar=tensor([0.0821, 0.0859, 0.0969, 0.0732, 0.0816, 0.0732, 0.0817, 0.0684], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0224, 0.0242, 0.0227, 0.0210, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 16:59:48,046 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-04-02 17:00:20,968 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.079e+02 6.219e+02 7.261e+02 1.808e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-02 17:00:32,687 INFO [train.py:903] (1/4) Epoch 20, batch 3900, loss[loss=0.1768, simple_loss=0.2548, pruned_loss=0.04945, over 19758.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2914, pruned_loss=0.06691, over 3822876.08 frames. ], batch size: 47, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 17:01:21,169 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133670.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:01:37,398 INFO [train.py:903] (1/4) Epoch 20, batch 3950, loss[loss=0.2029, simple_loss=0.2886, pruned_loss=0.05863, over 17422.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2923, pruned_loss=0.06705, over 3822887.86 frames. ], batch size: 101, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:01:42,243 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 17:01:50,354 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-02 17:02:26,843 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 5.167e+02 6.244e+02 7.613e+02 1.189e+03, threshold=1.249e+03, percent-clipped=0.0 +2023-04-02 17:02:38,777 INFO [train.py:903] (1/4) Epoch 20, batch 4000, loss[loss=0.1942, simple_loss=0.2812, pruned_loss=0.05359, over 19683.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2917, pruned_loss=0.06625, over 3835675.58 frames. ], batch size: 60, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:02:43,760 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133736.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:02:46,991 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:02:53,752 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 17:03:26,438 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 17:03:41,981 INFO [train.py:903] (1/4) Epoch 20, batch 4050, loss[loss=0.2297, simple_loss=0.2915, pruned_loss=0.08398, over 19480.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.291, pruned_loss=0.06577, over 3833905.65 frames. ], batch size: 49, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:30,941 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.971e+02 6.413e+02 8.150e+02 1.897e+03, threshold=1.283e+03, percent-clipped=7.0 +2023-04-02 17:04:42,290 INFO [train.py:903] (1/4) Epoch 20, batch 4100, loss[loss=0.1961, simple_loss=0.2841, pruned_loss=0.05404, over 19670.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2913, pruned_loss=0.06581, over 3836699.46 frames. ], batch size: 58, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:46,021 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7689, 4.3751, 2.9813, 3.8248, 1.2829, 4.3007, 4.1624, 4.3382], + device='cuda:1'), covar=tensor([0.0611, 0.0883, 0.1751, 0.0748, 0.3728, 0.0607, 0.0844, 0.1062], + device='cuda:1'), in_proj_covar=tensor([0.0489, 0.0398, 0.0480, 0.0340, 0.0398, 0.0422, 0.0414, 0.0447], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:04:47,152 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:05:03,576 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.37 vs. limit=5.0 +2023-04-02 17:05:04,943 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 17:05:06,757 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133851.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:05:09,045 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:05:18,885 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 17:05:35,467 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9259, 2.0044, 2.2890, 2.5994, 1.8902, 2.3637, 2.3107, 2.0409], + device='cuda:1'), covar=tensor([0.4231, 0.3879, 0.1840, 0.2435, 0.4150, 0.2219, 0.4555, 0.3321], + device='cuda:1'), in_proj_covar=tensor([0.0887, 0.0948, 0.0707, 0.0931, 0.0868, 0.0801, 0.0836, 0.0773], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 17:05:45,970 INFO [train.py:903] (1/4) Epoch 20, batch 4150, loss[loss=0.1973, simple_loss=0.2771, pruned_loss=0.05876, over 19782.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2922, pruned_loss=0.06638, over 3833038.44 frames. ], batch size: 54, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:00,763 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 17:06:19,872 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3478, 1.3896, 1.5610, 1.5014, 2.1482, 1.9380, 2.2612, 1.0400], + device='cuda:1'), covar=tensor([0.1950, 0.3513, 0.2229, 0.1537, 0.1242, 0.1761, 0.1144, 0.3620], + device='cuda:1'), in_proj_covar=tensor([0.0525, 0.0633, 0.0698, 0.0477, 0.0613, 0.0524, 0.0658, 0.0541], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 17:06:35,678 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.109e+02 4.757e+02 5.877e+02 6.653e+02 1.329e+03, threshold=1.175e+03, percent-clipped=1.0 +2023-04-02 17:06:47,888 INFO [train.py:903] (1/4) Epoch 20, batch 4200, loss[loss=0.174, simple_loss=0.2539, pruned_loss=0.04708, over 19772.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2907, pruned_loss=0.06556, over 3832244.90 frames. ], batch size: 47, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:51,422 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 17:07:12,196 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:07:30,719 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3116, 3.0566, 2.2797, 2.7426, 0.9722, 3.0297, 2.9048, 2.9807], + device='cuda:1'), covar=tensor([0.1128, 0.1319, 0.2035, 0.1064, 0.3550, 0.0951, 0.1092, 0.1431], + device='cuda:1'), in_proj_covar=tensor([0.0492, 0.0398, 0.0483, 0.0342, 0.0399, 0.0423, 0.0415, 0.0449], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:07:31,262 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 17:07:50,931 INFO [train.py:903] (1/4) Epoch 20, batch 4250, loss[loss=0.1971, simple_loss=0.2862, pruned_loss=0.05403, over 19384.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2898, pruned_loss=0.06493, over 3837560.38 frames. ], batch size: 70, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:08:08,172 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 17:08:20,529 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 17:08:32,197 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:33,406 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:36,363 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 17:08:41,180 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.008e+02 5.790e+02 6.980e+02 1.679e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-02 17:08:54,654 INFO [train.py:903] (1/4) Epoch 20, batch 4300, loss[loss=0.2178, simple_loss=0.2981, pruned_loss=0.06874, over 19785.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2897, pruned_loss=0.06475, over 3840330.46 frames. ], batch size: 56, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:09:27,693 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-02 17:09:50,630 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 17:09:56,295 INFO [train.py:903] (1/4) Epoch 20, batch 4350, loss[loss=0.2195, simple_loss=0.2944, pruned_loss=0.07233, over 16026.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2897, pruned_loss=0.06478, over 3819254.30 frames. ], batch size: 35, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:10:30,274 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134107.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:10:32,506 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:10:48,203 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 5.216e+02 6.315e+02 8.361e+02 2.012e+03, threshold=1.263e+03, percent-clipped=10.0 +2023-04-02 17:10:57,690 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:00,857 INFO [train.py:903] (1/4) Epoch 20, batch 4400, loss[loss=0.2582, simple_loss=0.3269, pruned_loss=0.0948, over 19455.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2896, pruned_loss=0.06481, over 3829224.41 frames. ], batch size: 64, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:11:01,228 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134132.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:11:03,602 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:18,416 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-02 17:11:29,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 17:11:37,430 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 17:12:05,046 INFO [train.py:903] (1/4) Epoch 20, batch 4450, loss[loss=0.1952, simple_loss=0.2794, pruned_loss=0.05552, over 19521.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2901, pruned_loss=0.06499, over 3820062.01 frames. ], batch size: 54, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:12:36,938 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134207.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:12:56,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.942e+02 5.989e+02 7.537e+02 1.405e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-02 17:13:08,112 INFO [train.py:903] (1/4) Epoch 20, batch 4500, loss[loss=0.1927, simple_loss=0.2827, pruned_loss=0.05138, over 19750.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2905, pruned_loss=0.06504, over 3825988.80 frames. ], batch size: 54, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:13:08,535 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134232.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:14:10,220 INFO [train.py:903] (1/4) Epoch 20, batch 4550, loss[loss=0.2217, simple_loss=0.3007, pruned_loss=0.0714, over 17309.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2911, pruned_loss=0.06558, over 3815411.97 frames. ], batch size: 101, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:14:19,195 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 17:14:42,992 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 17:15:02,206 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:04,302 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.805e+02 4.924e+02 5.886e+02 8.898e+02 2.816e+03, threshold=1.177e+03, percent-clipped=9.0 +2023-04-02 17:15:09,040 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:10,307 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5580, 1.4381, 1.3999, 1.9521, 1.5191, 1.7229, 1.8953, 1.6130], + device='cuda:1'), covar=tensor([0.0934, 0.0982, 0.1074, 0.0736, 0.0868, 0.0824, 0.0813, 0.0730], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0243, 0.0227, 0.0211, 0.0187, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 17:15:14,419 INFO [train.py:903] (1/4) Epoch 20, batch 4600, loss[loss=0.183, simple_loss=0.2572, pruned_loss=0.05436, over 19798.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2912, pruned_loss=0.06564, over 3823743.02 frames. ], batch size: 49, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:15:47,462 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:15,900 INFO [train.py:903] (1/4) Epoch 20, batch 4650, loss[loss=0.213, simple_loss=0.2928, pruned_loss=0.06655, over 19537.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2919, pruned_loss=0.06577, over 3821715.40 frames. ], batch size: 54, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:16:20,717 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134385.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:32,930 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 17:16:44,600 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 17:16:52,759 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134410.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:17:09,009 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 4.575e+02 5.939e+02 8.134e+02 1.295e+03, threshold=1.188e+03, percent-clipped=3.0 +2023-04-02 17:17:14,338 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.37 vs. limit=5.0 +2023-04-02 17:17:19,254 INFO [train.py:903] (1/4) Epoch 20, batch 4700, loss[loss=0.1941, simple_loss=0.2667, pruned_loss=0.06074, over 19352.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2909, pruned_loss=0.06541, over 3837309.03 frames. ], batch size: 47, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:17:23,257 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4432, 2.2555, 1.7345, 1.5113, 2.0945, 1.3937, 1.2530, 1.9001], + device='cuda:1'), covar=tensor([0.1039, 0.0807, 0.1067, 0.0823, 0.0517, 0.1285, 0.0802, 0.0499], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0310, 0.0331, 0.0256, 0.0244, 0.0334, 0.0286, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:17:41,477 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 17:18:11,931 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:18:20,719 INFO [train.py:903] (1/4) Epoch 20, batch 4750, loss[loss=0.2635, simple_loss=0.3219, pruned_loss=0.1026, over 19683.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.292, pruned_loss=0.06669, over 3830993.18 frames. ], batch size: 53, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:18:58,067 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 17:19:09,029 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 17:19:14,952 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.001e+02 5.955e+02 7.090e+02 1.974e+03, threshold=1.191e+03, percent-clipped=7.0 +2023-04-02 17:19:15,472 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4083, 1.4618, 1.6577, 1.6576, 2.4151, 2.1711, 2.5368, 0.9494], + device='cuda:1'), covar=tensor([0.2421, 0.4310, 0.2632, 0.1916, 0.1570, 0.2168, 0.1459, 0.4686], + device='cuda:1'), in_proj_covar=tensor([0.0525, 0.0635, 0.0697, 0.0477, 0.0615, 0.0523, 0.0658, 0.0543], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 17:19:25,461 INFO [train.py:903] (1/4) Epoch 20, batch 4800, loss[loss=0.184, simple_loss=0.2692, pruned_loss=0.04941, over 19476.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2908, pruned_loss=0.0659, over 3820586.73 frames. ], batch size: 49, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:20:15,873 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4222, 1.3794, 1.3835, 1.8074, 1.3828, 1.6609, 1.5638, 1.4258], + device='cuda:1'), covar=tensor([0.0881, 0.0975, 0.1043, 0.0679, 0.0830, 0.0797, 0.0892, 0.0797], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0222, 0.0241, 0.0224, 0.0208, 0.0185, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 17:20:26,815 INFO [train.py:903] (1/4) Epoch 20, batch 4850, loss[loss=0.2089, simple_loss=0.2883, pruned_loss=0.06475, over 19762.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2909, pruned_loss=0.06597, over 3820930.75 frames. ], batch size: 54, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:20:49,982 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 17:21:13,058 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 17:21:18,741 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 17:21:18,771 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 17:21:21,956 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.787e+02 5.710e+02 7.760e+02 1.554e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-02 17:21:31,371 INFO [train.py:903] (1/4) Epoch 20, batch 4900, loss[loss=0.1957, simple_loss=0.2618, pruned_loss=0.06475, over 16430.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2903, pruned_loss=0.06577, over 3828355.11 frames. ], batch size: 36, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:21:31,379 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 17:21:51,449 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 17:21:53,084 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2582, 2.2854, 2.5548, 3.0169, 2.2544, 2.9521, 2.5629, 2.2941], + device='cuda:1'), covar=tensor([0.4085, 0.3752, 0.1667, 0.2568, 0.4361, 0.2095, 0.4659, 0.3147], + device='cuda:1'), in_proj_covar=tensor([0.0883, 0.0947, 0.0707, 0.0926, 0.0866, 0.0801, 0.0834, 0.0773], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 17:22:14,374 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134666.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:21,056 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:33,362 INFO [train.py:903] (1/4) Epoch 20, batch 4950, loss[loss=0.2162, simple_loss=0.2991, pruned_loss=0.06667, over 19335.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2899, pruned_loss=0.06548, over 3820040.42 frames. ], batch size: 66, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:22:49,210 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 17:23:15,473 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 17:23:28,331 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 4.876e+02 5.783e+02 7.285e+02 1.244e+03, threshold=1.157e+03, percent-clipped=2.0 +2023-04-02 17:23:34,558 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:23:37,527 INFO [train.py:903] (1/4) Epoch 20, batch 5000, loss[loss=0.2419, simple_loss=0.3188, pruned_loss=0.08245, over 19494.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.29, pruned_loss=0.0654, over 3827243.07 frames. ], batch size: 64, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:23:45,512 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 17:23:56,643 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 17:24:06,096 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:37,396 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4169, 1.1544, 1.4524, 1.4949, 2.9841, 1.0090, 2.1788, 3.3234], + device='cuda:1'), covar=tensor([0.0498, 0.3033, 0.2946, 0.1755, 0.0731, 0.2575, 0.1273, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0400, 0.0361, 0.0381, 0.0342, 0.0371, 0.0346, 0.0371, 0.0396], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:24:37,440 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134781.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:38,200 INFO [train.py:903] (1/4) Epoch 20, batch 5050, loss[loss=0.2038, simple_loss=0.2838, pruned_loss=0.06188, over 19681.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2893, pruned_loss=0.06517, over 3838969.21 frames. ], batch size: 53, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:24:44,151 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134786.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:56,610 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3430, 1.3937, 1.5200, 1.5286, 1.8668, 1.8862, 1.7823, 0.5627], + device='cuda:1'), covar=tensor([0.2653, 0.4599, 0.2881, 0.2132, 0.1636, 0.2445, 0.1441, 0.5162], + device='cuda:1'), in_proj_covar=tensor([0.0523, 0.0631, 0.0693, 0.0476, 0.0612, 0.0521, 0.0654, 0.0540], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 17:25:13,080 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 17:25:31,564 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.915e+02 6.341e+02 8.226e+02 2.739e+03, threshold=1.268e+03, percent-clipped=9.0 +2023-04-02 17:25:41,323 INFO [train.py:903] (1/4) Epoch 20, batch 5100, loss[loss=0.2153, simple_loss=0.2886, pruned_loss=0.07103, over 19787.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2893, pruned_loss=0.06543, over 3820680.32 frames. ], batch size: 47, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:25:42,924 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6296, 1.7450, 1.6566, 2.6565, 1.7634, 2.3675, 1.8371, 1.4002], + device='cuda:1'), covar=tensor([0.4987, 0.4615, 0.2902, 0.2693, 0.4697, 0.2460, 0.6218, 0.5395], + device='cuda:1'), in_proj_covar=tensor([0.0882, 0.0946, 0.0707, 0.0926, 0.0865, 0.0799, 0.0834, 0.0773], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 17:25:50,464 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 17:25:53,843 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 17:25:58,168 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 17:26:26,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7100, 3.4494, 2.5973, 3.0871, 1.3687, 3.3497, 3.2183, 3.3007], + device='cuda:1'), covar=tensor([0.0898, 0.1042, 0.1881, 0.0876, 0.2943, 0.0810, 0.0947, 0.1301], + device='cuda:1'), in_proj_covar=tensor([0.0490, 0.0399, 0.0486, 0.0341, 0.0398, 0.0422, 0.0415, 0.0450], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:26:41,262 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134880.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:26:43,293 INFO [train.py:903] (1/4) Epoch 20, batch 5150, loss[loss=0.1971, simple_loss=0.2854, pruned_loss=0.05446, over 19659.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2881, pruned_loss=0.06465, over 3824444.97 frames. ], batch size: 60, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:26:55,956 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 17:27:07,089 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 17:27:32,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 17:27:37,090 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.618e+02 4.964e+02 6.321e+02 8.056e+02 1.479e+03, threshold=1.264e+03, percent-clipped=3.0 +2023-04-02 17:27:46,172 INFO [train.py:903] (1/4) Epoch 20, batch 5200, loss[loss=0.2083, simple_loss=0.2914, pruned_loss=0.0626, over 19595.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2888, pruned_loss=0.06474, over 3820002.39 frames. ], batch size: 52, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:28:00,851 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 17:28:33,749 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 17:28:46,969 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 17:28:49,186 INFO [train.py:903] (1/4) Epoch 20, batch 5250, loss[loss=0.2083, simple_loss=0.2965, pruned_loss=0.06002, over 19682.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.289, pruned_loss=0.06487, over 3825282.25 frames. ], batch size: 58, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:42,806 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.845e+02 5.899e+02 8.450e+02 1.811e+03, threshold=1.180e+03, percent-clipped=4.0 +2023-04-02 17:29:51,988 INFO [train.py:903] (1/4) Epoch 20, batch 5300, loss[loss=0.2082, simple_loss=0.2982, pruned_loss=0.05912, over 19612.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2876, pruned_loss=0.06432, over 3813022.75 frames. ], batch size: 57, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:59,113 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135037.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:04,856 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:11,581 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 17:30:24,963 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2644, 2.2749, 2.5443, 3.1128, 2.3135, 2.9682, 2.6209, 2.3444], + device='cuda:1'), covar=tensor([0.4102, 0.3855, 0.1724, 0.2328, 0.4275, 0.1987, 0.4453, 0.3056], + device='cuda:1'), in_proj_covar=tensor([0.0885, 0.0947, 0.0708, 0.0928, 0.0867, 0.0800, 0.0837, 0.0773], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 17:30:30,713 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:36,480 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:54,697 INFO [train.py:903] (1/4) Epoch 20, batch 5350, loss[loss=0.2494, simple_loss=0.3148, pruned_loss=0.09203, over 19603.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06543, over 3804348.65 frames. ], batch size: 52, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:31:29,771 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 17:31:45,417 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0961, 1.9232, 1.7990, 2.1191, 1.8552, 1.7870, 1.7524, 1.9685], + device='cuda:1'), covar=tensor([0.0991, 0.1440, 0.1330, 0.1016, 0.1359, 0.0523, 0.1240, 0.0672], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0350, 0.0304, 0.0247, 0.0296, 0.0247, 0.0302, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:31:48,522 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.248e+02 6.375e+02 8.534e+02 1.946e+03, threshold=1.275e+03, percent-clipped=9.0 +2023-04-02 17:31:57,764 INFO [train.py:903] (1/4) Epoch 20, batch 5400, loss[loss=0.2257, simple_loss=0.3051, pruned_loss=0.07313, over 19492.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.06555, over 3824253.72 frames. ], batch size: 64, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:33:00,878 INFO [train.py:903] (1/4) Epoch 20, batch 5450, loss[loss=0.2028, simple_loss=0.2871, pruned_loss=0.05925, over 19679.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2898, pruned_loss=0.06558, over 3816657.55 frames. ], batch size: 55, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:33:09,027 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135189.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:36,848 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0977, 1.2461, 1.6228, 0.9445, 2.3804, 2.9977, 2.7304, 3.2028], + device='cuda:1'), covar=tensor([0.1652, 0.3720, 0.3314, 0.2639, 0.0582, 0.0239, 0.0255, 0.0308], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0316, 0.0344, 0.0262, 0.0237, 0.0183, 0.0213, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 17:33:52,671 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135224.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:54,892 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.255e+02 6.010e+02 7.558e+02 1.824e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 17:34:03,097 INFO [train.py:903] (1/4) Epoch 20, batch 5500, loss[loss=0.2268, simple_loss=0.3169, pruned_loss=0.06836, over 19080.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2907, pruned_loss=0.06594, over 3821288.69 frames. ], batch size: 69, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:34:29,198 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 17:35:05,520 INFO [train.py:903] (1/4) Epoch 20, batch 5550, loss[loss=0.2107, simple_loss=0.2779, pruned_loss=0.07177, over 19805.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2903, pruned_loss=0.06559, over 3818845.31 frames. ], batch size: 48, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:35:13,926 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 17:35:47,229 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2923, 3.0712, 2.1751, 2.7782, 0.7601, 2.9974, 2.8747, 2.9343], + device='cuda:1'), covar=tensor([0.1164, 0.1253, 0.2048, 0.1060, 0.3728, 0.1012, 0.1142, 0.1505], + device='cuda:1'), in_proj_covar=tensor([0.0492, 0.0401, 0.0488, 0.0344, 0.0401, 0.0424, 0.0419, 0.0454], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:36:00,656 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.884e+02 6.418e+02 8.039e+02 2.322e+03, threshold=1.284e+03, percent-clipped=8.0 +2023-04-02 17:36:04,003 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 17:36:07,368 INFO [train.py:903] (1/4) Epoch 20, batch 5600, loss[loss=0.2163, simple_loss=0.2958, pruned_loss=0.06841, over 19774.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06568, over 3826640.75 frames. ], batch size: 56, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:36:18,132 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135339.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:36:29,879 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3305, 1.3534, 1.5755, 1.5034, 2.2480, 2.0110, 2.3377, 0.9126], + device='cuda:1'), covar=tensor([0.2392, 0.4274, 0.2651, 0.1957, 0.1523, 0.2170, 0.1340, 0.4612], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0633, 0.0697, 0.0477, 0.0616, 0.0523, 0.0657, 0.0543], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 17:37:11,264 INFO [train.py:903] (1/4) Epoch 20, batch 5650, loss[loss=0.2023, simple_loss=0.2803, pruned_loss=0.06221, over 19586.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2894, pruned_loss=0.06551, over 3829976.13 frames. ], batch size: 52, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:38:01,372 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 17:38:05,717 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.892e+02 5.590e+02 6.975e+02 1.698e+03, threshold=1.118e+03, percent-clipped=3.0 +2023-04-02 17:38:12,543 INFO [train.py:903] (1/4) Epoch 20, batch 5700, loss[loss=0.1737, simple_loss=0.2502, pruned_loss=0.0486, over 19751.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.0656, over 3828695.19 frames. ], batch size: 46, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:38:47,545 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3658, 1.0747, 1.2708, 1.9841, 1.3990, 1.4096, 1.5420, 1.3131], + device='cuda:1'), covar=tensor([0.1255, 0.1790, 0.1383, 0.0866, 0.1165, 0.1399, 0.1327, 0.1117], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0222, 0.0225, 0.0243, 0.0227, 0.0211, 0.0186, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 17:38:48,713 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1709, 1.7896, 1.4652, 1.0505, 1.5927, 1.1091, 1.1908, 1.7729], + device='cuda:1'), covar=tensor([0.0799, 0.0739, 0.0982, 0.0952, 0.0507, 0.1297, 0.0625, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0311, 0.0331, 0.0257, 0.0244, 0.0333, 0.0289, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 17:39:14,115 INFO [train.py:903] (1/4) Epoch 20, batch 5750, loss[loss=0.2319, simple_loss=0.3062, pruned_loss=0.07877, over 19052.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2904, pruned_loss=0.0661, over 3808187.37 frames. ], batch size: 69, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:39:17,231 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 17:39:25,433 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 17:39:31,253 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 17:40:10,279 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 5.062e+02 5.931e+02 7.729e+02 1.708e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-02 17:40:18,212 INFO [train.py:903] (1/4) Epoch 20, batch 5800, loss[loss=0.2044, simple_loss=0.2934, pruned_loss=0.05774, over 19704.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2909, pruned_loss=0.06652, over 3782114.61 frames. ], batch size: 59, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:40:19,525 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:40:21,359 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 17:40:56,840 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-02 17:41:20,580 INFO [train.py:903] (1/4) Epoch 20, batch 5850, loss[loss=0.1638, simple_loss=0.2564, pruned_loss=0.03563, over 19841.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2901, pruned_loss=0.06623, over 3796255.30 frames. ], batch size: 52, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:41:35,405 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:03,219 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-02 17:42:09,020 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:15,863 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.765e+02 5.164e+02 6.346e+02 8.557e+02 2.066e+03, threshold=1.269e+03, percent-clipped=9.0 +2023-04-02 17:42:23,028 INFO [train.py:903] (1/4) Epoch 20, batch 5900, loss[loss=0.1763, simple_loss=0.2627, pruned_loss=0.04492, over 19778.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2893, pruned_loss=0.06613, over 3790040.67 frames. ], batch size: 51, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:42:25,435 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 17:42:43,057 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:43,129 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:46,334 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 17:43:19,368 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2944, 1.1902, 1.2453, 1.3398, 1.0849, 1.3643, 1.3617, 1.2516], + device='cuda:1'), covar=tensor([0.0935, 0.0999, 0.1084, 0.0677, 0.0851, 0.0816, 0.0826, 0.0785], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0244, 0.0227, 0.0212, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 17:43:24,694 INFO [train.py:903] (1/4) Epoch 20, batch 5950, loss[loss=0.2066, simple_loss=0.289, pruned_loss=0.06214, over 19546.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2909, pruned_loss=0.06701, over 3785305.16 frames. ], batch size: 56, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:00,301 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3184, 2.3563, 2.5721, 3.1345, 2.4506, 2.9867, 2.6453, 2.3192], + device='cuda:1'), covar=tensor([0.3988, 0.3818, 0.1754, 0.2299, 0.3967, 0.1995, 0.4172, 0.3153], + device='cuda:1'), in_proj_covar=tensor([0.0881, 0.0945, 0.0703, 0.0925, 0.0863, 0.0794, 0.0831, 0.0770], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 17:44:19,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.126e+02 6.708e+02 1.004e+03 2.382e+03, threshold=1.342e+03, percent-clipped=11.0 +2023-04-02 17:44:19,670 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 17:44:27,257 INFO [train.py:903] (1/4) Epoch 20, batch 6000, loss[loss=0.2106, simple_loss=0.2962, pruned_loss=0.06249, over 19748.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2905, pruned_loss=0.06672, over 3805369.83 frames. ], batch size: 63, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:27,257 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 17:44:39,927 INFO [train.py:937] (1/4) Epoch 20, validation: loss=0.1697, simple_loss=0.2697, pruned_loss=0.0349, over 944034.00 frames. +2023-04-02 17:44:39,928 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 17:45:41,586 INFO [train.py:903] (1/4) Epoch 20, batch 6050, loss[loss=0.2246, simple_loss=0.2987, pruned_loss=0.07529, over 19674.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2901, pruned_loss=0.06666, over 3812377.53 frames. ], batch size: 53, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:46:36,914 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 4.873e+02 5.806e+02 7.519e+02 1.887e+03, threshold=1.161e+03, percent-clipped=2.0 +2023-04-02 17:46:43,930 INFO [train.py:903] (1/4) Epoch 20, batch 6100, loss[loss=0.2064, simple_loss=0.291, pruned_loss=0.06089, over 19517.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2904, pruned_loss=0.06721, over 3811477.98 frames. ], batch size: 54, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:47:46,926 INFO [train.py:903] (1/4) Epoch 20, batch 6150, loss[loss=0.2194, simple_loss=0.2857, pruned_loss=0.07656, over 19756.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2906, pruned_loss=0.06679, over 3814945.02 frames. ], batch size: 47, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:48:15,340 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135904.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:16,095 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 17:48:41,891 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.701e+02 6.211e+02 7.118e+02 1.417e+03, threshold=1.242e+03, percent-clipped=3.0 +2023-04-02 17:48:45,710 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:49,677 INFO [train.py:903] (1/4) Epoch 20, batch 6200, loss[loss=0.2365, simple_loss=0.3173, pruned_loss=0.07791, over 19306.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2909, pruned_loss=0.06688, over 3817747.98 frames. ], batch size: 66, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:49:18,736 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7721, 1.5606, 1.6009, 2.3281, 1.6604, 1.9973, 2.0758, 1.7849], + device='cuda:1'), covar=tensor([0.0885, 0.1007, 0.1053, 0.0743, 0.0916, 0.0835, 0.0925, 0.0739], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0243, 0.0226, 0.0211, 0.0186, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 17:49:52,113 INFO [train.py:903] (1/4) Epoch 20, batch 6250, loss[loss=0.2137, simple_loss=0.2982, pruned_loss=0.06464, over 19649.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06688, over 3827168.24 frames. ], batch size: 58, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:50:04,690 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:15,862 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:24,790 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 17:50:30,669 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136012.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:48,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 5.185e+02 6.758e+02 8.208e+02 2.074e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-02 17:50:55,356 INFO [train.py:903] (1/4) Epoch 20, batch 6300, loss[loss=0.217, simple_loss=0.3003, pruned_loss=0.06681, over 19696.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2905, pruned_loss=0.06709, over 3822703.03 frames. ], batch size: 60, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:51:26,752 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7250, 4.2952, 4.4797, 4.4554, 1.6568, 4.1813, 3.6470, 4.1791], + device='cuda:1'), covar=tensor([0.1691, 0.0707, 0.0572, 0.0652, 0.6140, 0.0877, 0.0735, 0.1128], + device='cuda:1'), in_proj_covar=tensor([0.0770, 0.0724, 0.0927, 0.0810, 0.0820, 0.0685, 0.0557, 0.0856], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 17:51:58,465 INFO [train.py:903] (1/4) Epoch 20, batch 6350, loss[loss=0.2484, simple_loss=0.3227, pruned_loss=0.087, over 19332.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2896, pruned_loss=0.06608, over 3820669.24 frames. ], batch size: 66, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:52:30,869 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:52:53,613 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.045e+02 5.932e+02 7.156e+02 1.987e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 17:53:01,247 INFO [train.py:903] (1/4) Epoch 20, batch 6400, loss[loss=0.2133, simple_loss=0.3091, pruned_loss=0.05874, over 19722.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2885, pruned_loss=0.06545, over 3823096.88 frames. ], batch size: 59, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:53:36,374 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:54:02,569 INFO [train.py:903] (1/4) Epoch 20, batch 6450, loss[loss=0.2466, simple_loss=0.3204, pruned_loss=0.08637, over 19712.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2896, pruned_loss=0.06604, over 3829794.65 frames. ], batch size: 63, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:54:51,296 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 17:54:59,003 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.012e+02 6.099e+02 8.089e+02 3.011e+03, threshold=1.220e+03, percent-clipped=7.0 +2023-04-02 17:55:07,145 INFO [train.py:903] (1/4) Epoch 20, batch 6500, loss[loss=0.2408, simple_loss=0.3109, pruned_loss=0.08532, over 13677.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2896, pruned_loss=0.06593, over 3827725.57 frames. ], batch size: 136, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:55:12,970 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 17:55:44,588 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 17:56:09,977 INFO [train.py:903] (1/4) Epoch 20, batch 6550, loss[loss=0.1638, simple_loss=0.2418, pruned_loss=0.04292, over 19726.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2893, pruned_loss=0.06566, over 3818615.98 frames. ], batch size: 45, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:57:06,361 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.319e+02 6.594e+02 8.030e+02 1.579e+03, threshold=1.319e+03, percent-clipped=2.0 +2023-04-02 17:57:14,440 INFO [train.py:903] (1/4) Epoch 20, batch 6600, loss[loss=0.2072, simple_loss=0.2834, pruned_loss=0.06543, over 18652.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2888, pruned_loss=0.06536, over 3819721.36 frames. ], batch size: 41, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:57:28,427 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:41,122 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:45,655 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136356.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:53,972 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:16,829 INFO [train.py:903] (1/4) Epoch 20, batch 6650, loss[loss=0.2192, simple_loss=0.3059, pruned_loss=0.06625, over 19680.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.29, pruned_loss=0.06612, over 3809922.96 frames. ], batch size: 60, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:58:26,160 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:56,619 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 17:59:14,402 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.763e+02 5.879e+02 7.861e+02 2.647e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 17:59:22,072 INFO [train.py:903] (1/4) Epoch 20, batch 6700, loss[loss=0.2094, simple_loss=0.2937, pruned_loss=0.0625, over 19523.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.291, pruned_loss=0.06705, over 3809940.51 frames. ], batch size: 56, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:59:55,278 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:08,568 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136471.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:20,886 INFO [train.py:903] (1/4) Epoch 20, batch 6750, loss[loss=0.2413, simple_loss=0.3146, pruned_loss=0.08399, over 19111.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2915, pruned_loss=0.06728, over 3810853.29 frames. ], batch size: 69, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:00:45,732 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:01:11,794 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 5.547e+02 6.932e+02 9.039e+02 1.788e+03, threshold=1.386e+03, percent-clipped=9.0 +2023-04-02 18:01:19,069 INFO [train.py:903] (1/4) Epoch 20, batch 6800, loss[loss=0.2003, simple_loss=0.2772, pruned_loss=0.06173, over 19471.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2907, pruned_loss=0.06652, over 3810486.72 frames. ], batch size: 49, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:02:04,317 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 18:02:04,769 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 18:02:07,962 INFO [train.py:903] (1/4) Epoch 21, batch 0, loss[loss=0.2235, simple_loss=0.2955, pruned_loss=0.07576, over 19471.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2955, pruned_loss=0.07576, over 19471.00 frames. ], batch size: 49, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:02:07,962 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 18:02:18,720 INFO [train.py:937] (1/4) Epoch 21, validation: loss=0.1691, simple_loss=0.2696, pruned_loss=0.03427, over 944034.00 frames. +2023-04-02 18:02:18,721 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 18:02:30,981 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 18:03:20,704 INFO [train.py:903] (1/4) Epoch 21, batch 50, loss[loss=0.2047, simple_loss=0.2908, pruned_loss=0.0593, over 19542.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2888, pruned_loss=0.06441, over 864816.88 frames. ], batch size: 54, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:03:33,296 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136619.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:03:43,287 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.947e+02 6.173e+02 6.953e+02 1.295e+03, threshold=1.235e+03, percent-clipped=0.0 +2023-04-02 18:03:54,611 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 18:04:02,462 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.08 vs. limit=5.0 +2023-04-02 18:04:22,858 INFO [train.py:903] (1/4) Epoch 21, batch 100, loss[loss=0.2083, simple_loss=0.2827, pruned_loss=0.06696, over 19728.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2893, pruned_loss=0.06469, over 1519734.26 frames. ], batch size: 51, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:04:25,237 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:04:34,143 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 18:05:09,044 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136697.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:25,011 INFO [train.py:903] (1/4) Epoch 21, batch 150, loss[loss=0.1549, simple_loss=0.2368, pruned_loss=0.03651, over 19783.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2886, pruned_loss=0.06441, over 2044514.02 frames. ], batch size: 47, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:05:31,954 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:45,589 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 5.031e+02 5.954e+02 7.665e+02 1.668e+03, threshold=1.191e+03, percent-clipped=3.0 +2023-04-02 18:05:46,012 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136727.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:02,663 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136740.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:03,826 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6487, 1.5366, 1.5543, 2.0338, 1.5584, 1.8840, 1.8725, 1.6771], + device='cuda:1'), covar=tensor([0.0819, 0.0896, 0.0936, 0.0664, 0.0815, 0.0690, 0.0831, 0.0693], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0222, 0.0225, 0.0242, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 18:06:18,640 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:25,319 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 18:06:27,571 INFO [train.py:903] (1/4) Epoch 21, batch 200, loss[loss=0.221, simple_loss=0.303, pruned_loss=0.06945, over 19523.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06498, over 2423996.81 frames. ], batch size: 64, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:06:29,122 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3654, 1.8540, 1.8957, 2.8979, 2.1735, 2.6603, 2.5205, 2.0821], + device='cuda:1'), covar=tensor([0.0747, 0.0958, 0.0979, 0.0769, 0.0810, 0.0693, 0.0876, 0.0697], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0243, 0.0226, 0.0213, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 18:06:35,833 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9282, 3.5659, 2.5123, 3.1773, 0.8177, 3.4851, 3.3669, 3.5417], + device='cuda:1'), covar=tensor([0.0869, 0.1180, 0.2100, 0.0975, 0.4231, 0.0918, 0.1094, 0.1319], + device='cuda:1'), in_proj_covar=tensor([0.0496, 0.0403, 0.0491, 0.0343, 0.0403, 0.0426, 0.0422, 0.0458], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:06:43,875 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4172, 1.2527, 1.2779, 1.7337, 1.3930, 1.5911, 1.6255, 1.3364], + device='cuda:1'), covar=tensor([0.0923, 0.1066, 0.1106, 0.0719, 0.0783, 0.0777, 0.0860, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0243, 0.0226, 0.0213, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 18:07:12,671 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 18:07:29,906 INFO [train.py:903] (1/4) Epoch 21, batch 250, loss[loss=0.2181, simple_loss=0.302, pruned_loss=0.06713, over 19762.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2897, pruned_loss=0.06539, over 2739568.26 frames. ], batch size: 54, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:07:32,610 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:47,492 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:52,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.815e+02 6.209e+02 8.068e+02 1.278e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-04-02 18:08:31,167 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2980, 2.0138, 1.6887, 1.2856, 1.8197, 1.3082, 1.2764, 1.9110], + device='cuda:1'), covar=tensor([0.0947, 0.0760, 0.0970, 0.0870, 0.0540, 0.1211, 0.0681, 0.0424], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0314, 0.0338, 0.0260, 0.0247, 0.0336, 0.0292, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:08:33,027 INFO [train.py:903] (1/4) Epoch 21, batch 300, loss[loss=0.2149, simple_loss=0.2943, pruned_loss=0.0678, over 19577.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2891, pruned_loss=0.06501, over 2983935.33 frames. ], batch size: 52, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:08:53,791 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:23,344 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136900.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:36,421 INFO [train.py:903] (1/4) Epoch 21, batch 350, loss[loss=0.2082, simple_loss=0.2898, pruned_loss=0.0633, over 18755.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2888, pruned_loss=0.06563, over 3172305.65 frames. ], batch size: 74, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:09:38,653 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 18:09:56,984 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.835e+02 5.943e+02 7.281e+02 1.741e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-02 18:10:39,358 INFO [train.py:903] (1/4) Epoch 21, batch 400, loss[loss=0.2023, simple_loss=0.2894, pruned_loss=0.05759, over 19606.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2878, pruned_loss=0.06514, over 3323219.99 frames. ], batch size: 57, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:11:02,227 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-04-02 18:11:35,387 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:11:41,221 INFO [train.py:903] (1/4) Epoch 21, batch 450, loss[loss=0.2325, simple_loss=0.3032, pruned_loss=0.08088, over 18252.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2883, pruned_loss=0.06567, over 3440324.86 frames. ], batch size: 83, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:11:59,611 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8139, 4.0766, 4.5784, 4.6177, 1.9042, 4.2294, 3.5335, 4.0130], + device='cuda:1'), covar=tensor([0.2072, 0.1335, 0.0815, 0.1047, 0.6834, 0.1820, 0.1302, 0.1756], + device='cuda:1'), in_proj_covar=tensor([0.0767, 0.0719, 0.0928, 0.0817, 0.0819, 0.0687, 0.0560, 0.0860], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 18:12:00,879 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5541, 1.6875, 2.0096, 1.8221, 3.0717, 2.7642, 3.4105, 1.6200], + device='cuda:1'), covar=tensor([0.2504, 0.4367, 0.2851, 0.1938, 0.1681, 0.2025, 0.1634, 0.4118], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0634, 0.0700, 0.0479, 0.0618, 0.0525, 0.0657, 0.0540], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 18:12:03,759 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.613e+02 5.973e+02 7.527e+02 1.521e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-02 18:12:12,953 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 18:12:14,109 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 18:12:43,485 INFO [train.py:903] (1/4) Epoch 21, batch 500, loss[loss=0.2188, simple_loss=0.3009, pruned_loss=0.0684, over 19691.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2883, pruned_loss=0.0655, over 3537121.62 frames. ], batch size: 59, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:12:53,189 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:25,205 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:43,791 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3026, 3.8298, 3.9268, 3.9374, 1.5560, 3.7025, 3.2255, 3.6671], + device='cuda:1'), covar=tensor([0.1660, 0.0882, 0.0672, 0.0754, 0.5635, 0.1041, 0.0713, 0.1194], + device='cuda:1'), in_proj_covar=tensor([0.0769, 0.0720, 0.0929, 0.0817, 0.0820, 0.0688, 0.0560, 0.0860], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 18:13:45,871 INFO [train.py:903] (1/4) Epoch 21, batch 550, loss[loss=0.1939, simple_loss=0.2859, pruned_loss=0.05098, over 19620.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2886, pruned_loss=0.06555, over 3582173.81 frames. ], batch size: 61, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:13:59,933 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:14:09,847 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.365e+02 6.823e+02 8.347e+02 2.113e+03, threshold=1.365e+03, percent-clipped=7.0 +2023-04-02 18:14:33,025 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7255, 1.6068, 1.7375, 1.7346, 3.3179, 1.4100, 2.5754, 3.6669], + device='cuda:1'), covar=tensor([0.0460, 0.2491, 0.2589, 0.1810, 0.0646, 0.2349, 0.1213, 0.0244], + device='cuda:1'), in_proj_covar=tensor([0.0404, 0.0361, 0.0382, 0.0344, 0.0372, 0.0347, 0.0373, 0.0398], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:14:49,009 INFO [train.py:903] (1/4) Epoch 21, batch 600, loss[loss=0.2502, simple_loss=0.3184, pruned_loss=0.09105, over 19675.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2888, pruned_loss=0.0657, over 3619539.66 frames. ], batch size: 58, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:15:00,539 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:15:29,953 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 18:15:47,618 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3821, 1.1684, 1.2684, 1.7815, 1.4680, 1.4001, 1.3828, 1.3456], + device='cuda:1'), covar=tensor([0.0892, 0.1345, 0.1005, 0.0695, 0.1064, 0.1087, 0.1212, 0.0871], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0241, 0.0224, 0.0210, 0.0186, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 18:15:53,981 INFO [train.py:903] (1/4) Epoch 21, batch 650, loss[loss=0.1893, simple_loss=0.27, pruned_loss=0.0543, over 19674.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2896, pruned_loss=0.06645, over 3658962.13 frames. ], batch size: 53, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:16:16,603 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 4.775e+02 5.968e+02 8.002e+02 1.696e+03, threshold=1.194e+03, percent-clipped=7.0 +2023-04-02 18:16:56,145 INFO [train.py:903] (1/4) Epoch 21, batch 700, loss[loss=0.1951, simple_loss=0.2734, pruned_loss=0.05838, over 19872.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2906, pruned_loss=0.06708, over 3686176.60 frames. ], batch size: 52, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:17:03,350 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6397, 2.1298, 2.2162, 2.6088, 2.2340, 2.0533, 2.1638, 2.6425], + device='cuda:1'), covar=tensor([0.0842, 0.1617, 0.1274, 0.1005, 0.1329, 0.0541, 0.1231, 0.0581], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0354, 0.0309, 0.0249, 0.0298, 0.0251, 0.0307, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:17:26,131 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:18:00,859 INFO [train.py:903] (1/4) Epoch 21, batch 750, loss[loss=0.1949, simple_loss=0.2764, pruned_loss=0.05669, over 19767.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2904, pruned_loss=0.06671, over 3694012.76 frames. ], batch size: 54, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:18:18,217 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9688, 2.0395, 2.2765, 2.6089, 1.9778, 2.5313, 2.2886, 2.0551], + device='cuda:1'), covar=tensor([0.4240, 0.3918, 0.1874, 0.2418, 0.4141, 0.2080, 0.4722, 0.3302], + device='cuda:1'), in_proj_covar=tensor([0.0888, 0.0950, 0.0709, 0.0927, 0.0870, 0.0800, 0.0835, 0.0775], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 18:18:22,898 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 4.851e+02 6.236e+02 7.648e+02 2.101e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-04-02 18:18:42,013 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 18:19:01,635 INFO [train.py:903] (1/4) Epoch 21, batch 800, loss[loss=0.2487, simple_loss=0.3223, pruned_loss=0.08755, over 19058.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2916, pruned_loss=0.06705, over 3727354.34 frames. ], batch size: 69, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:19:07,853 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 18:19:22,493 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:19:53,548 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:20:05,543 INFO [train.py:903] (1/4) Epoch 21, batch 850, loss[loss=0.1938, simple_loss=0.2822, pruned_loss=0.05267, over 19660.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2914, pruned_loss=0.06684, over 3736164.20 frames. ], batch size: 55, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:20:27,121 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.583e+02 6.647e+02 8.818e+02 2.027e+03, threshold=1.329e+03, percent-clipped=5.0 +2023-04-02 18:20:48,405 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 18:21:06,665 INFO [train.py:903] (1/4) Epoch 21, batch 900, loss[loss=0.1701, simple_loss=0.2562, pruned_loss=0.04198, over 19740.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2912, pruned_loss=0.06681, over 3754399.68 frames. ], batch size: 51, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:21:59,598 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 18:22:08,627 INFO [train.py:903] (1/4) Epoch 21, batch 950, loss[loss=0.1933, simple_loss=0.2812, pruned_loss=0.05271, over 19767.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06685, over 3754177.07 frames. ], batch size: 54, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:22:31,547 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 5.256e+02 6.264e+02 7.895e+02 1.664e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 18:22:45,763 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:10,890 INFO [train.py:903] (1/4) Epoch 21, batch 1000, loss[loss=0.2258, simple_loss=0.3069, pruned_loss=0.07234, over 18372.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2904, pruned_loss=0.06629, over 3772216.01 frames. ], batch size: 84, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:23:15,635 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:55,433 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 18:23:56,778 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 18:24:14,001 INFO [train.py:903] (1/4) Epoch 21, batch 1050, loss[loss=0.2017, simple_loss=0.2705, pruned_loss=0.06646, over 19303.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2898, pruned_loss=0.06579, over 3784265.61 frames. ], batch size: 44, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:24:35,337 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.318e+02 6.486e+02 8.521e+02 3.216e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 18:24:36,541 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 18:25:17,799 INFO [train.py:903] (1/4) Epoch 21, batch 1100, loss[loss=0.184, simple_loss=0.2697, pruned_loss=0.04918, over 19662.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2896, pruned_loss=0.06573, over 3789442.55 frames. ], batch size: 53, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:26:19,700 INFO [train.py:903] (1/4) Epoch 21, batch 1150, loss[loss=0.1862, simple_loss=0.2757, pruned_loss=0.0483, over 19524.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2885, pruned_loss=0.06506, over 3805784.55 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 4.0 +2023-04-02 18:26:43,797 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.127e+02 6.263e+02 7.580e+02 1.245e+03, threshold=1.253e+03, percent-clipped=0.0 +2023-04-02 18:27:22,180 INFO [train.py:903] (1/4) Epoch 21, batch 1200, loss[loss=0.2122, simple_loss=0.2937, pruned_loss=0.06537, over 19365.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2895, pruned_loss=0.0655, over 3821332.94 frames. ], batch size: 66, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:27:46,785 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 18:28:10,912 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 18:28:25,534 INFO [train.py:903] (1/4) Epoch 21, batch 1250, loss[loss=0.2006, simple_loss=0.2853, pruned_loss=0.05799, over 19529.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.06513, over 3825353.42 frames. ], batch size: 64, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:28:48,827 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 5.084e+02 5.991e+02 7.123e+02 1.423e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-02 18:29:27,990 INFO [train.py:903] (1/4) Epoch 21, batch 1300, loss[loss=0.206, simple_loss=0.2855, pruned_loss=0.06322, over 19672.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2885, pruned_loss=0.06513, over 3831464.40 frames. ], batch size: 53, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:29:41,258 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0479, 1.9091, 1.9475, 2.7577, 1.9132, 2.4691, 2.3815, 2.1431], + device='cuda:1'), covar=tensor([0.0811, 0.0870, 0.0937, 0.0774, 0.0869, 0.0680, 0.0891, 0.0669], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0225, 0.0227, 0.0245, 0.0227, 0.0214, 0.0190, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 18:30:19,740 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.69 vs. limit=5.0 +2023-04-02 18:30:30,607 INFO [train.py:903] (1/4) Epoch 21, batch 1350, loss[loss=0.2332, simple_loss=0.3116, pruned_loss=0.07739, over 18843.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.289, pruned_loss=0.06527, over 3821302.88 frames. ], batch size: 74, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:30:30,920 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:34,463 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:54,787 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.098e+02 6.340e+02 8.452e+02 2.491e+03, threshold=1.268e+03, percent-clipped=6.0 +2023-04-02 18:31:33,372 INFO [train.py:903] (1/4) Epoch 21, batch 1400, loss[loss=0.1577, simple_loss=0.2365, pruned_loss=0.03941, over 19752.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2888, pruned_loss=0.06503, over 3826909.07 frames. ], batch size: 47, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:08,998 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2867, 2.0668, 2.2205, 2.8628, 1.9561, 2.5733, 2.3871, 2.3199], + device='cuda:1'), covar=tensor([0.0745, 0.0845, 0.0834, 0.0788, 0.0875, 0.0695, 0.0912, 0.0632], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0224, 0.0227, 0.0244, 0.0227, 0.0213, 0.0189, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 18:32:28,137 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 18:32:37,221 INFO [train.py:903] (1/4) Epoch 21, batch 1450, loss[loss=0.2064, simple_loss=0.2881, pruned_loss=0.0623, over 18085.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2904, pruned_loss=0.06616, over 3807813.16 frames. ], batch size: 83, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:41,599 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 18:32:45,900 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6814, 1.6911, 1.7138, 2.1823, 1.6760, 1.9968, 2.0359, 1.8267], + device='cuda:1'), covar=tensor([0.0842, 0.0837, 0.0928, 0.0775, 0.0850, 0.0741, 0.0845, 0.0674], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0224, 0.0228, 0.0244, 0.0227, 0.0213, 0.0189, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 18:33:01,290 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.676e+02 5.543e+02 6.978e+02 2.034e+03, threshold=1.109e+03, percent-clipped=2.0 +2023-04-02 18:33:39,042 INFO [train.py:903] (1/4) Epoch 21, batch 1500, loss[loss=0.1976, simple_loss=0.2853, pruned_loss=0.05491, over 19539.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2882, pruned_loss=0.06496, over 3825385.02 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:34:17,952 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2171, 1.6108, 2.0988, 1.6835, 3.2227, 4.9004, 4.6635, 5.2131], + device='cuda:1'), covar=tensor([0.1688, 0.3461, 0.2944, 0.2152, 0.0541, 0.0160, 0.0149, 0.0153], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0320, 0.0349, 0.0264, 0.0241, 0.0185, 0.0215, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 18:34:42,047 INFO [train.py:903] (1/4) Epoch 21, batch 1550, loss[loss=0.2309, simple_loss=0.3123, pruned_loss=0.07478, over 19143.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06602, over 3829406.51 frames. ], batch size: 69, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:34:43,696 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2024, 2.2477, 2.5128, 3.0206, 2.2010, 2.9529, 2.5405, 2.2088], + device='cuda:1'), covar=tensor([0.4310, 0.4116, 0.1814, 0.2430, 0.4443, 0.2018, 0.4834, 0.3403], + device='cuda:1'), in_proj_covar=tensor([0.0888, 0.0952, 0.0711, 0.0927, 0.0869, 0.0800, 0.0833, 0.0777], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 18:34:56,479 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9220, 1.5998, 1.9032, 1.4538, 4.4456, 1.0239, 2.3503, 4.8813], + device='cuda:1'), covar=tensor([0.0510, 0.2827, 0.2686, 0.2112, 0.0739, 0.2879, 0.1639, 0.0165], + device='cuda:1'), in_proj_covar=tensor([0.0408, 0.0365, 0.0385, 0.0346, 0.0374, 0.0349, 0.0377, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:35:05,480 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 5.280e+02 6.228e+02 7.726e+02 2.313e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 18:35:38,367 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9941, 5.0161, 5.7572, 5.7913, 2.0838, 5.4829, 4.7069, 5.3530], + device='cuda:1'), covar=tensor([0.1599, 0.0910, 0.0554, 0.0547, 0.6005, 0.0665, 0.0559, 0.1290], + device='cuda:1'), in_proj_covar=tensor([0.0770, 0.0721, 0.0929, 0.0816, 0.0817, 0.0687, 0.0561, 0.0862], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 18:35:44,861 INFO [train.py:903] (1/4) Epoch 21, batch 1600, loss[loss=0.2134, simple_loss=0.2754, pruned_loss=0.07567, over 19783.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2905, pruned_loss=0.06577, over 3835446.64 frames. ], batch size: 48, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:07,172 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 18:36:48,266 INFO [train.py:903] (1/4) Epoch 21, batch 1650, loss[loss=0.1753, simple_loss=0.2537, pruned_loss=0.04846, over 19405.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2893, pruned_loss=0.06501, over 3840979.77 frames. ], batch size: 48, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:50,157 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 18:37:12,981 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.591e+02 5.832e+02 7.172e+02 1.632e+03, threshold=1.166e+03, percent-clipped=1.0 +2023-04-02 18:37:43,169 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:46,577 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:49,827 INFO [train.py:903] (1/4) Epoch 21, batch 1700, loss[loss=0.2224, simple_loss=0.3038, pruned_loss=0.07054, over 19357.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.29, pruned_loss=0.06535, over 3839989.84 frames. ], batch size: 66, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:38:24,209 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:38:28,566 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 18:38:53,714 INFO [train.py:903] (1/4) Epoch 21, batch 1750, loss[loss=0.2173, simple_loss=0.3035, pruned_loss=0.0656, over 19315.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2901, pruned_loss=0.06535, over 3848826.51 frames. ], batch size: 70, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:39:16,068 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 4.819e+02 5.989e+02 8.047e+02 2.111e+03, threshold=1.198e+03, percent-clipped=8.0 +2023-04-02 18:39:30,916 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:39:55,287 INFO [train.py:903] (1/4) Epoch 21, batch 1800, loss[loss=0.2104, simple_loss=0.2937, pruned_loss=0.0635, over 17464.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2901, pruned_loss=0.06509, over 3843025.36 frames. ], batch size: 101, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:40:06,007 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:10,880 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:54,356 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 18:40:57,827 INFO [train.py:903] (1/4) Epoch 21, batch 1850, loss[loss=0.2436, simple_loss=0.3173, pruned_loss=0.08488, over 19378.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2898, pruned_loss=0.06511, over 3841443.12 frames. ], batch size: 70, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:41:22,804 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 4.784e+02 5.677e+02 7.800e+02 1.333e+03, threshold=1.135e+03, percent-clipped=2.0 +2023-04-02 18:41:34,203 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 18:42:01,842 INFO [train.py:903] (1/4) Epoch 21, batch 1900, loss[loss=0.1831, simple_loss=0.2662, pruned_loss=0.05, over 19622.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2885, pruned_loss=0.06454, over 3852161.78 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:42:18,816 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 18:42:19,482 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 18:42:23,569 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 18:42:48,373 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 18:43:04,462 INFO [train.py:903] (1/4) Epoch 21, batch 1950, loss[loss=0.1882, simple_loss=0.2721, pruned_loss=0.05221, over 19765.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.06475, over 3837729.40 frames. ], batch size: 54, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:43:27,762 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 4.783e+02 6.007e+02 7.405e+02 3.008e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 18:44:06,919 INFO [train.py:903] (1/4) Epoch 21, batch 2000, loss[loss=0.2091, simple_loss=0.2934, pruned_loss=0.06244, over 19475.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2891, pruned_loss=0.06523, over 3832825.21 frames. ], batch size: 49, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:44:14,585 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 18:45:03,882 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 18:45:08,469 INFO [train.py:903] (1/4) Epoch 21, batch 2050, loss[loss=0.2249, simple_loss=0.3062, pruned_loss=0.07176, over 19282.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.29, pruned_loss=0.06561, over 3828195.55 frames. ], batch size: 66, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:45:22,122 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 18:45:23,289 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 18:45:28,235 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138625.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:32,780 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:33,537 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.887e+02 4.818e+02 6.151e+02 8.119e+02 1.355e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 18:45:38,230 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:45,234 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 18:46:00,045 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:03,546 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:12,221 INFO [train.py:903] (1/4) Epoch 21, batch 2100, loss[loss=0.1644, simple_loss=0.2431, pruned_loss=0.04289, over 19772.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2878, pruned_loss=0.0647, over 3830538.56 frames. ], batch size: 48, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:46:13,855 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3329, 2.0334, 1.5902, 1.3542, 1.8604, 1.3006, 1.3234, 1.8335], + device='cuda:1'), covar=tensor([0.0900, 0.0801, 0.1070, 0.0890, 0.0553, 0.1309, 0.0632, 0.0439], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0313, 0.0334, 0.0260, 0.0245, 0.0336, 0.0290, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:46:39,336 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 18:46:41,753 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:00,962 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138699.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:02,055 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 18:47:06,972 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:14,541 INFO [train.py:903] (1/4) Epoch 21, batch 2150, loss[loss=0.2422, simple_loss=0.3287, pruned_loss=0.07792, over 19755.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2888, pruned_loss=0.06554, over 3830631.78 frames. ], batch size: 63, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:47:21,627 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:37,621 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.033e+02 5.895e+02 7.227e+02 1.459e+03, threshold=1.179e+03, percent-clipped=3.0 +2023-04-02 18:48:00,964 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:48:03,253 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5417, 4.0905, 4.2583, 4.2502, 1.6721, 3.9880, 3.4478, 3.9629], + device='cuda:1'), covar=tensor([0.1665, 0.0799, 0.0637, 0.0698, 0.5694, 0.0832, 0.0747, 0.1202], + device='cuda:1'), in_proj_covar=tensor([0.0781, 0.0735, 0.0941, 0.0824, 0.0829, 0.0696, 0.0570, 0.0870], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 18:48:18,135 INFO [train.py:903] (1/4) Epoch 21, batch 2200, loss[loss=0.2406, simple_loss=0.3278, pruned_loss=0.07671, over 18012.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2888, pruned_loss=0.06556, over 3830745.64 frames. ], batch size: 83, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:48:39,543 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6988, 2.2957, 2.2663, 2.7286, 2.3773, 2.2217, 2.3440, 2.7464], + device='cuda:1'), covar=tensor([0.0858, 0.1689, 0.1338, 0.1080, 0.1373, 0.0486, 0.1206, 0.0614], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0358, 0.0311, 0.0250, 0.0301, 0.0251, 0.0309, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 18:49:08,071 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:49:20,496 INFO [train.py:903] (1/4) Epoch 21, batch 2250, loss[loss=0.2472, simple_loss=0.317, pruned_loss=0.08869, over 19747.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2891, pruned_loss=0.06539, over 3831143.38 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:49:41,395 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2011, 1.3502, 1.8716, 1.3241, 2.7748, 3.7310, 3.4866, 3.8741], + device='cuda:1'), covar=tensor([0.1641, 0.3703, 0.3073, 0.2403, 0.0602, 0.0192, 0.0192, 0.0250], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0321, 0.0350, 0.0264, 0.0241, 0.0185, 0.0216, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 18:49:44,481 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.352e+02 5.230e+02 6.732e+02 8.271e+02 2.316e+03, threshold=1.346e+03, percent-clipped=7.0 +2023-04-02 18:50:12,143 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5665, 2.9911, 3.1525, 3.2008, 1.2943, 2.9569, 2.5966, 2.7073], + device='cuda:1'), covar=tensor([0.3223, 0.1932, 0.1550, 0.2089, 0.7388, 0.2410, 0.1681, 0.2879], + device='cuda:1'), in_proj_covar=tensor([0.0780, 0.0733, 0.0942, 0.0823, 0.0826, 0.0694, 0.0570, 0.0870], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 18:50:23,831 INFO [train.py:903] (1/4) Epoch 21, batch 2300, loss[loss=0.2159, simple_loss=0.2972, pruned_loss=0.06727, over 19586.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06541, over 3824585.71 frames. ], batch size: 61, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:50:38,391 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 18:51:27,174 INFO [train.py:903] (1/4) Epoch 21, batch 2350, loss[loss=0.1939, simple_loss=0.279, pruned_loss=0.05442, over 19786.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06495, over 3821007.88 frames. ], batch size: 56, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:51:48,934 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:51:50,724 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.807e+02 6.646e+02 8.268e+02 1.737e+03, threshold=1.329e+03, percent-clipped=3.0 +2023-04-02 18:52:08,336 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 18:52:24,468 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 18:52:31,448 INFO [train.py:903] (1/4) Epoch 21, batch 2400, loss[loss=0.2259, simple_loss=0.3106, pruned_loss=0.07063, over 19768.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2898, pruned_loss=0.06501, over 3831379.90 frames. ], batch size: 56, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:26,788 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139003.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:34,433 INFO [train.py:903] (1/4) Epoch 21, batch 2450, loss[loss=0.2082, simple_loss=0.2943, pruned_loss=0.06101, over 19456.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.06447, over 3830591.07 frames. ], batch size: 64, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:58,416 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:59,947 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.027e+02 6.383e+02 8.090e+02 1.476e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 18:54:17,225 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:22,831 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:31,501 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139055.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:38,304 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:39,196 INFO [train.py:903] (1/4) Epoch 21, batch 2500, loss[loss=0.269, simple_loss=0.3379, pruned_loss=0.1001, over 13562.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2891, pruned_loss=0.06491, over 3819880.12 frames. ], batch size: 136, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:55:05,366 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:55:20,462 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-02 18:55:34,451 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-02 18:55:42,980 INFO [train.py:903] (1/4) Epoch 21, batch 2550, loss[loss=0.1877, simple_loss=0.2618, pruned_loss=0.05676, over 19778.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2885, pruned_loss=0.06472, over 3826031.29 frames. ], batch size: 48, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:55:53,889 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2514, 2.0821, 1.9574, 1.8113, 1.4894, 1.7976, 0.6564, 1.2638], + device='cuda:1'), covar=tensor([0.0679, 0.0695, 0.0514, 0.0927, 0.1404, 0.1029, 0.1442, 0.1080], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0352, 0.0357, 0.0381, 0.0458, 0.0386, 0.0333, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 18:56:06,420 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 4.926e+02 6.170e+02 7.459e+02 2.294e+03, threshold=1.234e+03, percent-clipped=3.0 +2023-04-02 18:56:14,911 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1158, 1.3007, 1.7953, 1.3202, 2.7673, 3.7061, 3.4174, 3.9195], + device='cuda:1'), covar=tensor([0.1782, 0.3996, 0.3331, 0.2499, 0.0623, 0.0223, 0.0224, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0322, 0.0351, 0.0264, 0.0242, 0.0186, 0.0217, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 18:56:35,499 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 18:56:43,493 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:56:45,341 INFO [train.py:903] (1/4) Epoch 21, batch 2600, loss[loss=0.1989, simple_loss=0.282, pruned_loss=0.05792, over 18215.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.06504, over 3839320.56 frames. ], batch size: 84, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:56:49,249 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:02,177 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:47,159 INFO [train.py:903] (1/4) Epoch 21, batch 2650, loss[loss=0.2419, simple_loss=0.3167, pruned_loss=0.08353, over 19370.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06505, over 3852179.38 frames. ], batch size: 66, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:58:08,400 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 18:58:13,098 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 4.962e+02 5.879e+02 7.846e+02 2.263e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 18:58:45,340 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:58:49,507 INFO [train.py:903] (1/4) Epoch 21, batch 2700, loss[loss=0.2042, simple_loss=0.2821, pruned_loss=0.06312, over 19677.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2892, pruned_loss=0.06503, over 3839447.52 frames. ], batch size: 53, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 18:59:04,250 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139271.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:26,351 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 18:59:33,184 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139294.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:49,918 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:52,995 INFO [train.py:903] (1/4) Epoch 21, batch 2750, loss[loss=0.2171, simple_loss=0.2985, pruned_loss=0.06788, over 19502.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2897, pruned_loss=0.0655, over 3831190.14 frames. ], batch size: 64, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 19:00:18,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.705e+02 5.796e+02 7.407e+02 1.811e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-02 19:00:55,534 INFO [train.py:903] (1/4) Epoch 21, batch 2800, loss[loss=0.1821, simple_loss=0.2684, pruned_loss=0.04792, over 19582.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2895, pruned_loss=0.0653, over 3841768.41 frames. ], batch size: 52, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:01:08,569 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 19:01:28,017 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139386.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:01:45,344 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 19:01:58,450 INFO [train.py:903] (1/4) Epoch 21, batch 2850, loss[loss=0.2603, simple_loss=0.3322, pruned_loss=0.09416, over 19669.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06458, over 3837036.04 frames. ], batch size: 55, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:02:03,776 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:09,514 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139419.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:23,967 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.945e+02 5.282e+02 6.184e+02 7.725e+02 1.552e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 19:02:24,385 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139430.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:35,353 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:42,188 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139444.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:54,861 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:57,821 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 19:03:00,110 INFO [train.py:903] (1/4) Epoch 21, batch 2900, loss[loss=0.2272, simple_loss=0.3061, pruned_loss=0.07412, over 19661.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2877, pruned_loss=0.06404, over 3826976.46 frames. ], batch size: 55, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:03:31,310 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6484, 1.1506, 1.4790, 1.5916, 2.9676, 1.2742, 2.4083, 3.4472], + device='cuda:1'), covar=tensor([0.0644, 0.3496, 0.3293, 0.2147, 0.1085, 0.2746, 0.1439, 0.0421], + device='cuda:1'), in_proj_covar=tensor([0.0406, 0.0361, 0.0380, 0.0342, 0.0368, 0.0345, 0.0372, 0.0398], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:04:04,518 INFO [train.py:903] (1/4) Epoch 21, batch 2950, loss[loss=0.2383, simple_loss=0.3198, pruned_loss=0.07835, over 19665.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2885, pruned_loss=0.06462, over 3808413.24 frames. ], batch size: 55, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:04:28,835 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.684e+02 5.947e+02 7.442e+02 1.403e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-02 19:05:01,580 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0864, 1.2197, 1.5738, 0.9606, 2.4020, 3.0399, 2.7461, 3.1900], + device='cuda:1'), covar=tensor([0.1648, 0.3752, 0.3385, 0.2605, 0.0566, 0.0218, 0.0234, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0320, 0.0350, 0.0264, 0.0242, 0.0186, 0.0216, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 19:05:06,921 INFO [train.py:903] (1/4) Epoch 21, batch 3000, loss[loss=0.2183, simple_loss=0.3002, pruned_loss=0.06825, over 19526.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2882, pruned_loss=0.06459, over 3830605.97 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:05:06,921 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 19:05:13,987 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8351, 3.5730, 2.7360, 3.2371, 0.8405, 3.5354, 3.2838, 3.6026], + device='cuda:1'), covar=tensor([0.0788, 0.0728, 0.1893, 0.0819, 0.4429, 0.0699, 0.0791, 0.0903], + device='cuda:1'), in_proj_covar=tensor([0.0495, 0.0407, 0.0488, 0.0342, 0.0398, 0.0424, 0.0419, 0.0455], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:05:20,614 INFO [train.py:937] (1/4) Epoch 21, validation: loss=0.1693, simple_loss=0.2693, pruned_loss=0.03465, over 944034.00 frames. +2023-04-02 19:05:20,614 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 19:05:24,360 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 19:05:57,545 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:05,902 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 19:06:12,384 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:12,539 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:23,828 INFO [train.py:903] (1/4) Epoch 21, batch 3050, loss[loss=0.2004, simple_loss=0.2807, pruned_loss=0.06004, over 19615.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2897, pruned_loss=0.06535, over 3828417.51 frames. ], batch size: 50, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:06:35,312 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-02 19:06:48,012 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.171e+02 6.119e+02 7.965e+02 1.649e+03, threshold=1.224e+03, percent-clipped=3.0 +2023-04-02 19:06:57,090 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139638.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:03,688 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:14,487 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:24,495 INFO [train.py:903] (1/4) Epoch 21, batch 3100, loss[loss=0.2079, simple_loss=0.2948, pruned_loss=0.06054, over 19664.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2902, pruned_loss=0.06562, over 3839421.17 frames. ], batch size: 58, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:07:34,153 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139667.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:43,956 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8906, 1.5812, 1.8816, 1.6400, 4.3676, 0.9843, 2.6094, 4.7293], + device='cuda:1'), covar=tensor([0.0421, 0.2834, 0.2727, 0.2010, 0.0761, 0.2835, 0.1396, 0.0189], + device='cuda:1'), in_proj_covar=tensor([0.0407, 0.0363, 0.0381, 0.0342, 0.0369, 0.0346, 0.0373, 0.0399], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:08:25,950 INFO [train.py:903] (1/4) Epoch 21, batch 3150, loss[loss=0.1932, simple_loss=0.2832, pruned_loss=0.0516, over 19745.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06505, over 3838685.59 frames. ], batch size: 63, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:08:32,255 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:08:51,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.084e+02 6.643e+02 9.166e+02 2.493e+03, threshold=1.329e+03, percent-clipped=12.0 +2023-04-02 19:08:52,511 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 19:09:19,229 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139753.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:09:28,040 INFO [train.py:903] (1/4) Epoch 21, batch 3200, loss[loss=0.1798, simple_loss=0.2601, pruned_loss=0.04979, over 19849.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2893, pruned_loss=0.06511, over 3834327.18 frames. ], batch size: 52, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:09:36,946 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:01,980 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:27,856 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:30,931 INFO [train.py:903] (1/4) Epoch 21, batch 3250, loss[loss=0.2209, simple_loss=0.3054, pruned_loss=0.06827, over 19667.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.06537, over 3831760.39 frames. ], batch size: 58, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:10:46,030 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139822.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:48,411 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:55,283 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.059e+02 5.030e+02 6.558e+02 8.674e+02 2.471e+03, threshold=1.312e+03, percent-clipped=9.0 +2023-04-02 19:11:31,708 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7480, 1.7015, 1.6234, 1.4407, 1.3610, 1.4114, 0.3062, 0.6767], + device='cuda:1'), covar=tensor([0.0658, 0.0639, 0.0433, 0.0614, 0.1292, 0.0751, 0.1294, 0.1125], + device='cuda:1'), in_proj_covar=tensor([0.0356, 0.0351, 0.0354, 0.0379, 0.0456, 0.0384, 0.0331, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:11:32,327 INFO [train.py:903] (1/4) Epoch 21, batch 3300, loss[loss=0.2214, simple_loss=0.3029, pruned_loss=0.06999, over 19534.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2899, pruned_loss=0.06528, over 3827835.25 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:11:35,819 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 19:11:43,182 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:12:34,964 INFO [train.py:903] (1/4) Epoch 21, batch 3350, loss[loss=0.2267, simple_loss=0.3043, pruned_loss=0.07451, over 19523.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2914, pruned_loss=0.06616, over 3818836.05 frames. ], batch size: 64, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:12:36,483 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9893, 1.1991, 1.5570, 1.1604, 2.5183, 3.5328, 3.2499, 3.6669], + device='cuda:1'), covar=tensor([0.1813, 0.3930, 0.3554, 0.2478, 0.0646, 0.0177, 0.0225, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0321, 0.0350, 0.0263, 0.0242, 0.0185, 0.0216, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 19:12:36,719 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.13 vs. limit=5.0 +2023-04-02 19:12:59,279 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.69 vs. limit=5.0 +2023-04-02 19:13:00,646 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 19:13:00,886 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.734e+02 5.660e+02 7.256e+02 1.171e+03, threshold=1.132e+03, percent-clipped=0.0 +2023-04-02 19:13:04,626 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139933.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:08,525 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.82 vs. limit=5.0 +2023-04-02 19:13:17,284 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:37,525 INFO [train.py:903] (1/4) Epoch 21, batch 3400, loss[loss=0.1937, simple_loss=0.2674, pruned_loss=0.06002, over 16031.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.29, pruned_loss=0.06554, over 3827885.49 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:13:45,843 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9776, 2.8993, 1.8671, 1.9303, 2.6505, 1.6257, 1.5919, 2.2272], + device='cuda:1'), covar=tensor([0.1299, 0.0785, 0.1053, 0.0817, 0.0559, 0.1219, 0.0894, 0.0654], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0310, 0.0333, 0.0258, 0.0243, 0.0332, 0.0286, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:13:52,377 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:22,167 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:41,357 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140009.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:42,108 INFO [train.py:903] (1/4) Epoch 21, batch 3450, loss[loss=0.1913, simple_loss=0.2631, pruned_loss=0.05976, over 19738.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2898, pruned_loss=0.06574, over 3825302.18 frames. ], batch size: 45, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:14:43,274 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 19:14:56,560 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:06,287 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.068e+02 6.577e+02 8.644e+02 2.362e+03, threshold=1.315e+03, percent-clipped=9.0 +2023-04-02 19:15:11,005 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140034.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:27,227 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:28,360 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:41,653 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:42,387 INFO [train.py:903] (1/4) Epoch 21, batch 3500, loss[loss=0.2276, simple_loss=0.307, pruned_loss=0.07415, over 19359.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2912, pruned_loss=0.06654, over 3826894.79 frames. ], batch size: 70, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:45,563 INFO [train.py:903] (1/4) Epoch 21, batch 3550, loss[loss=0.1718, simple_loss=0.251, pruned_loss=0.04626, over 19734.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2914, pruned_loss=0.06682, over 3822659.68 frames. ], batch size: 47, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:59,729 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:08,444 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:10,319 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.095e+02 6.549e+02 8.089e+02 2.006e+03, threshold=1.310e+03, percent-clipped=2.0 +2023-04-02 19:17:11,696 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140131.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:25,658 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4856, 1.2879, 1.5394, 1.5134, 3.0753, 1.1939, 2.3630, 3.4101], + device='cuda:1'), covar=tensor([0.0505, 0.2744, 0.2743, 0.1819, 0.0724, 0.2375, 0.1154, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0405, 0.0362, 0.0381, 0.0341, 0.0368, 0.0346, 0.0373, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:17:30,343 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1167, 1.2541, 1.6736, 0.9872, 2.3584, 2.9729, 2.6940, 3.1287], + device='cuda:1'), covar=tensor([0.1642, 0.3866, 0.3341, 0.2712, 0.0653, 0.0278, 0.0268, 0.0307], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0322, 0.0351, 0.0263, 0.0243, 0.0185, 0.0216, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 19:17:36,884 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140151.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:47,035 INFO [train.py:903] (1/4) Epoch 21, batch 3600, loss[loss=0.1726, simple_loss=0.2491, pruned_loss=0.04798, over 19357.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2909, pruned_loss=0.06643, over 3813126.67 frames. ], batch size: 47, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:17:56,425 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140166.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:58,706 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:18:51,574 INFO [train.py:903] (1/4) Epoch 21, batch 3650, loss[loss=0.2366, simple_loss=0.2974, pruned_loss=0.08783, over 19865.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06596, over 3824360.66 frames. ], batch size: 52, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:18:54,129 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:06,103 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-02 19:19:15,567 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.123e+02 6.079e+02 7.474e+02 1.635e+03, threshold=1.216e+03, percent-clipped=1.0 +2023-04-02 19:19:37,412 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140246.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:38,724 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0722, 2.2037, 2.3983, 2.7540, 2.1009, 2.6278, 2.4397, 2.2332], + device='cuda:1'), covar=tensor([0.4160, 0.3819, 0.1829, 0.2281, 0.3982, 0.2057, 0.4269, 0.3128], + device='cuda:1'), in_proj_covar=tensor([0.0893, 0.0954, 0.0715, 0.0926, 0.0871, 0.0807, 0.0835, 0.0776], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 19:19:41,124 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6897, 1.6704, 1.5406, 1.3867, 1.3020, 1.4302, 0.2854, 0.7163], + device='cuda:1'), covar=tensor([0.0582, 0.0604, 0.0384, 0.0588, 0.1048, 0.0675, 0.1182, 0.0959], + device='cuda:1'), in_proj_covar=tensor([0.0356, 0.0353, 0.0357, 0.0382, 0.0458, 0.0386, 0.0334, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:19:54,186 INFO [train.py:903] (1/4) Epoch 21, batch 3700, loss[loss=0.1983, simple_loss=0.2836, pruned_loss=0.05648, over 19570.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2897, pruned_loss=0.06542, over 3830093.69 frames. ], batch size: 61, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:20:01,702 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140266.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:15,867 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:20,519 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:22,770 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:49,148 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140304.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:55,391 INFO [train.py:903] (1/4) Epoch 21, batch 3750, loss[loss=0.2149, simple_loss=0.2836, pruned_loss=0.07312, over 19621.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.06508, over 3832908.18 frames. ], batch size: 50, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:21:02,662 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:17,856 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:20,269 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140329.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:22,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.949e+02 4.554e+02 6.128e+02 7.118e+02 1.255e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-02 19:21:33,824 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:57,847 INFO [train.py:903] (1/4) Epoch 21, batch 3800, loss[loss=0.1954, simple_loss=0.2839, pruned_loss=0.05342, over 19628.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06479, over 3824250.74 frames. ], batch size: 57, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:22:19,205 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1049, 3.5060, 3.7513, 3.8371, 1.5607, 3.5126, 3.0519, 3.2263], + device='cuda:1'), covar=tensor([0.2533, 0.1658, 0.1215, 0.1511, 0.7686, 0.2153, 0.1325, 0.2424], + device='cuda:1'), in_proj_covar=tensor([0.0775, 0.0730, 0.0932, 0.0819, 0.0822, 0.0695, 0.0567, 0.0869], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 19:22:30,440 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 19:22:54,677 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:23:01,181 INFO [train.py:903] (1/4) Epoch 21, batch 3850, loss[loss=0.2322, simple_loss=0.3098, pruned_loss=0.0773, over 19728.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2895, pruned_loss=0.06485, over 3838249.12 frames. ], batch size: 51, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:23:15,788 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 19:23:25,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 5.035e+02 6.153e+02 7.922e+02 1.662e+03, threshold=1.231e+03, percent-clipped=3.0 +2023-04-02 19:24:03,194 INFO [train.py:903] (1/4) Epoch 21, batch 3900, loss[loss=0.2041, simple_loss=0.2897, pruned_loss=0.0592, over 19681.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2902, pruned_loss=0.06531, over 3834485.28 frames. ], batch size: 59, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:24:09,094 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:17,262 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:51,310 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4826, 1.6118, 1.6180, 1.9795, 1.5650, 1.8245, 1.7171, 1.4330], + device='cuda:1'), covar=tensor([0.4973, 0.4420, 0.3018, 0.2671, 0.4131, 0.2523, 0.6570, 0.5310], + device='cuda:1'), in_proj_covar=tensor([0.0894, 0.0956, 0.0716, 0.0931, 0.0873, 0.0811, 0.0838, 0.0779], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 19:24:55,671 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140502.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:01,340 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140507.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:04,352 INFO [train.py:903] (1/4) Epoch 21, batch 3950, loss[loss=0.1747, simple_loss=0.2722, pruned_loss=0.03861, over 19673.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2895, pruned_loss=0.06476, over 3840047.70 frames. ], batch size: 58, lr: 3.91e-03, grad_scale: 4.0 +2023-04-02 19:25:10,227 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 19:25:20,285 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140522.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,816 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,940 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:31,859 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.763e+02 5.824e+02 7.544e+02 1.413e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 19:25:40,212 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:42,508 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:51,820 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:07,098 INFO [train.py:903] (1/4) Epoch 21, batch 4000, loss[loss=0.1789, simple_loss=0.2605, pruned_loss=0.04869, over 19763.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06448, over 3847460.63 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:26:09,710 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140562.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:13,052 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:33,149 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:36,735 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:41,300 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:55,176 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 19:26:58,526 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-02 19:27:02,783 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 19:27:09,311 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:11,219 INFO [train.py:903] (1/4) Epoch 21, batch 4050, loss[loss=0.2133, simple_loss=0.2948, pruned_loss=0.06587, over 19541.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2893, pruned_loss=0.06478, over 3837974.93 frames. ], batch size: 54, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:27:24,120 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0395, 1.9373, 1.8402, 1.6608, 1.3668, 1.5920, 0.6673, 1.0008], + device='cuda:1'), covar=tensor([0.0778, 0.0769, 0.0497, 0.0915, 0.1417, 0.1117, 0.1462, 0.1303], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0351, 0.0356, 0.0380, 0.0456, 0.0384, 0.0332, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:27:25,060 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:36,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 5.165e+02 6.710e+02 8.332e+02 1.443e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-02 19:27:41,321 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5833, 1.3130, 1.2216, 1.4355, 1.1246, 1.3317, 1.1953, 1.3948], + device='cuda:1'), covar=tensor([0.1162, 0.1152, 0.1630, 0.1077, 0.1364, 0.0695, 0.1660, 0.0894], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0359, 0.0311, 0.0249, 0.0300, 0.0252, 0.0310, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:28:13,830 INFO [train.py:903] (1/4) Epoch 21, batch 4100, loss[loss=0.197, simple_loss=0.2656, pruned_loss=0.06417, over 19304.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06475, over 3838603.58 frames. ], batch size: 44, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:28:28,665 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=5.00 vs. limit=5.0 +2023-04-02 19:28:52,828 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 19:29:15,740 INFO [train.py:903] (1/4) Epoch 21, batch 4150, loss[loss=0.1741, simple_loss=0.2639, pruned_loss=0.04216, over 19841.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2891, pruned_loss=0.06482, over 3834396.83 frames. ], batch size: 52, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:29:42,681 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.318e+02 6.390e+02 7.957e+02 1.686e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 19:29:50,018 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:29:55,900 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140741.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:00,649 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7716, 4.3755, 2.7102, 3.7388, 0.8830, 4.3270, 4.1534, 4.2862], + device='cuda:1'), covar=tensor([0.0621, 0.0973, 0.1982, 0.0950, 0.4101, 0.0620, 0.0893, 0.1153], + device='cuda:1'), in_proj_covar=tensor([0.0500, 0.0409, 0.0490, 0.0347, 0.0399, 0.0430, 0.0423, 0.0458], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:30:05,104 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:17,539 INFO [train.py:903] (1/4) Epoch 21, batch 4200, loss[loss=0.2178, simple_loss=0.2837, pruned_loss=0.07589, over 19782.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06543, over 3823721.69 frames. ], batch size: 48, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:30:24,381 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 19:31:15,136 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0825, 1.2308, 1.4385, 1.4019, 2.7339, 3.6904, 3.4085, 3.9696], + device='cuda:1'), covar=tensor([0.1861, 0.4005, 0.3897, 0.2410, 0.0642, 0.0193, 0.0233, 0.0262], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0320, 0.0349, 0.0263, 0.0241, 0.0184, 0.0215, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 19:31:21,349 INFO [train.py:903] (1/4) Epoch 21, batch 4250, loss[loss=0.2211, simple_loss=0.3075, pruned_loss=0.06734, over 19543.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.0651, over 3817435.38 frames. ], batch size: 56, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:31:40,121 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 19:31:42,789 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:31:47,368 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 5.291e+02 6.401e+02 8.566e+02 1.506e+03, threshold=1.280e+03, percent-clipped=6.0 +2023-04-02 19:31:50,843 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 19:31:51,262 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7245, 1.7107, 1.6119, 1.3648, 1.3443, 1.4088, 0.2924, 0.6824], + device='cuda:1'), covar=tensor([0.0622, 0.0644, 0.0407, 0.0630, 0.1232, 0.0697, 0.1244, 0.1081], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0353, 0.0358, 0.0383, 0.0459, 0.0386, 0.0335, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:31:53,749 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:01,558 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:13,492 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:24,304 INFO [train.py:903] (1/4) Epoch 21, batch 4300, loss[loss=0.1848, simple_loss=0.2767, pruned_loss=0.04645, over 19792.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06456, over 3821477.15 frames. ], batch size: 56, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:32:25,822 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:29,377 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:33,859 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:37,169 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:53,903 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8723, 1.8426, 1.6831, 1.4302, 1.2259, 1.4014, 0.5115, 0.8036], + device='cuda:1'), covar=tensor([0.0821, 0.0781, 0.0481, 0.0910, 0.1503, 0.1129, 0.1446, 0.1309], + device='cuda:1'), in_proj_covar=tensor([0.0356, 0.0353, 0.0357, 0.0382, 0.0460, 0.0385, 0.0335, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:32:56,049 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5840, 4.6951, 5.2700, 5.2866, 2.1899, 4.9314, 4.3130, 4.9558], + device='cuda:1'), covar=tensor([0.1506, 0.1365, 0.0570, 0.0624, 0.5808, 0.0860, 0.0609, 0.1176], + device='cuda:1'), in_proj_covar=tensor([0.0785, 0.0736, 0.0950, 0.0831, 0.0831, 0.0704, 0.0573, 0.0878], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 19:33:18,586 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 19:33:24,432 INFO [train.py:903] (1/4) Epoch 21, batch 4350, loss[loss=0.2009, simple_loss=0.2924, pruned_loss=0.05463, over 18795.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2903, pruned_loss=0.06544, over 3817955.34 frames. ], batch size: 74, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:33:51,500 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.814e+02 5.846e+02 6.905e+02 1.613e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 19:33:51,985 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5027, 1.6722, 2.0055, 1.8092, 3.2492, 2.6628, 3.6720, 1.5958], + device='cuda:1'), covar=tensor([0.2499, 0.4267, 0.2727, 0.1863, 0.1423, 0.2048, 0.1395, 0.4182], + device='cuda:1'), in_proj_covar=tensor([0.0530, 0.0639, 0.0707, 0.0481, 0.0614, 0.0530, 0.0660, 0.0545], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:33:56,421 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:25,325 INFO [train.py:903] (1/4) Epoch 21, batch 4400, loss[loss=0.1853, simple_loss=0.2611, pruned_loss=0.05475, over 19368.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2913, pruned_loss=0.06614, over 3812208.06 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:34:29,844 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9089, 1.6488, 1.5734, 1.9222, 1.5881, 1.6369, 1.5582, 1.7617], + device='cuda:1'), covar=tensor([0.1139, 0.1507, 0.1600, 0.1049, 0.1449, 0.0588, 0.1528, 0.0779], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0362, 0.0314, 0.0252, 0.0302, 0.0254, 0.0312, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:34:32,127 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:33,377 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140966.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:53,843 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 19:34:58,767 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:02,859 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 19:35:05,678 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:26,727 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6865, 2.4780, 2.3288, 2.7887, 2.5426, 2.2560, 2.2334, 2.5588], + device='cuda:1'), covar=tensor([0.0920, 0.1473, 0.1322, 0.0935, 0.1262, 0.0486, 0.1249, 0.0623], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0360, 0.0312, 0.0250, 0.0301, 0.0252, 0.0310, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:35:27,476 INFO [train.py:903] (1/4) Epoch 21, batch 4450, loss[loss=0.1924, simple_loss=0.279, pruned_loss=0.05291, over 19691.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2914, pruned_loss=0.06637, over 3820043.88 frames. ], batch size: 59, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:35:37,899 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141017.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:54,619 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.226e+02 6.450e+02 8.222e+02 2.218e+03, threshold=1.290e+03, percent-clipped=9.0 +2023-04-02 19:36:06,103 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 19:36:32,491 INFO [train.py:903] (1/4) Epoch 21, batch 4500, loss[loss=0.2168, simple_loss=0.2982, pruned_loss=0.06774, over 19708.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2915, pruned_loss=0.06629, over 3809570.88 frames. ], batch size: 59, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:37:01,382 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141085.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:34,661 INFO [train.py:903] (1/4) Epoch 21, batch 4550, loss[loss=0.2123, simple_loss=0.2834, pruned_loss=0.07061, over 19590.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2923, pruned_loss=0.06688, over 3806712.23 frames. ], batch size: 52, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:37:46,079 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 19:37:46,498 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:59,732 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.292e+02 5.246e+02 6.195e+02 7.478e+02 1.454e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-04-02 19:38:10,839 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 19:38:18,657 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:28,874 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3790, 3.9592, 2.4410, 3.5508, 0.9344, 3.8955, 3.8246, 3.9252], + device='cuda:1'), covar=tensor([0.0705, 0.1160, 0.2217, 0.0897, 0.4024, 0.0777, 0.0888, 0.1157], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0408, 0.0489, 0.0345, 0.0397, 0.0428, 0.0420, 0.0455], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:38:35,234 INFO [train.py:903] (1/4) Epoch 21, batch 4600, loss[loss=0.2445, simple_loss=0.3146, pruned_loss=0.08717, over 13439.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2929, pruned_loss=0.06697, over 3794228.71 frames. ], batch size: 136, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:38:36,700 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:49,776 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:25,216 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141200.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:36,159 INFO [train.py:903] (1/4) Epoch 21, batch 4650, loss[loss=0.2028, simple_loss=0.2808, pruned_loss=0.06244, over 17469.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2919, pruned_loss=0.06619, over 3799264.48 frames. ], batch size: 101, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:39:53,044 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141222.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:56,082 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 19:40:02,331 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 4.513e+02 5.853e+02 7.712e+02 1.984e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-02 19:40:05,984 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 19:40:12,917 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1249, 5.5323, 2.7719, 4.9282, 1.0665, 5.7271, 5.5590, 5.7462], + device='cuda:1'), covar=tensor([0.0359, 0.0735, 0.2004, 0.0633, 0.3859, 0.0409, 0.0703, 0.0841], + device='cuda:1'), in_proj_covar=tensor([0.0495, 0.0406, 0.0486, 0.0343, 0.0395, 0.0424, 0.0419, 0.0451], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:40:15,513 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:18,356 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-02 19:40:21,266 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:37,901 INFO [train.py:903] (1/4) Epoch 21, batch 4700, loss[loss=0.1845, simple_loss=0.2652, pruned_loss=0.05185, over 19765.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2918, pruned_loss=0.06644, over 3806280.82 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:40:45,381 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 19:40:47,274 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:00,533 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 19:41:00,651 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:08,829 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:38,494 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141309.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:39,464 INFO [train.py:903] (1/4) Epoch 21, batch 4750, loss[loss=0.2141, simple_loss=0.2964, pruned_loss=0.06589, over 19865.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2918, pruned_loss=0.06654, over 3800583.02 frames. ], batch size: 52, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:41:59,019 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1492, 1.8651, 1.7956, 2.0772, 1.8884, 1.8275, 1.7492, 1.9695], + device='cuda:1'), covar=tensor([0.0979, 0.1371, 0.1371, 0.0987, 0.1238, 0.0532, 0.1277, 0.0689], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0360, 0.0312, 0.0251, 0.0301, 0.0253, 0.0309, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:42:03,068 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.546e+02 5.235e+02 6.308e+02 8.195e+02 2.468e+03, threshold=1.262e+03, percent-clipped=8.0 +2023-04-02 19:42:25,005 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:42:30,013 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 19:42:39,827 INFO [train.py:903] (1/4) Epoch 21, batch 4800, loss[loss=0.2495, simple_loss=0.3247, pruned_loss=0.08713, over 18233.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2925, pruned_loss=0.06703, over 3813519.89 frames. ], batch size: 83, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:43:23,121 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:43:41,803 INFO [train.py:903] (1/4) Epoch 21, batch 4850, loss[loss=0.2302, simple_loss=0.3071, pruned_loss=0.07663, over 19505.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2917, pruned_loss=0.06677, over 3818219.94 frames. ], batch size: 64, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:01,449 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:07,726 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 19:44:08,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.192e+02 6.534e+02 8.985e+02 2.500e+03, threshold=1.307e+03, percent-clipped=12.0 +2023-04-02 19:44:13,596 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141435.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:44:28,294 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 19:44:33,280 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 19:44:34,433 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 19:44:39,592 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:44,245 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:45,062 INFO [train.py:903] (1/4) Epoch 21, batch 4900, loss[loss=0.2238, simple_loss=0.303, pruned_loss=0.07226, over 19643.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2907, pruned_loss=0.06631, over 3808593.44 frames. ], batch size: 55, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:47,084 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 19:45:05,756 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 19:45:10,369 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141481.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:18,369 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8370, 0.8917, 0.8596, 0.7453, 0.7431, 0.7719, 0.1057, 0.2921], + device='cuda:1'), covar=tensor([0.0468, 0.0472, 0.0311, 0.0444, 0.0800, 0.0479, 0.1113, 0.0791], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0350, 0.0353, 0.0378, 0.0457, 0.0383, 0.0332, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:45:40,780 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141505.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:46,104 INFO [train.py:903] (1/4) Epoch 21, batch 4950, loss[loss=0.2063, simple_loss=0.2756, pruned_loss=0.06849, over 19722.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06696, over 3799029.96 frames. ], batch size: 51, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:03,680 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 19:46:10,482 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.116e+02 6.031e+02 7.721e+02 1.403e+03, threshold=1.206e+03, percent-clipped=1.0 +2023-04-02 19:46:26,541 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141542.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:46:28,472 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 19:46:46,921 INFO [train.py:903] (1/4) Epoch 21, batch 5000, loss[loss=0.1865, simple_loss=0.265, pruned_loss=0.05404, over 19376.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.292, pruned_loss=0.06733, over 3784245.50 frames. ], batch size: 47, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:53,574 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 19:46:55,131 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141567.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:47:08,253 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 19:47:48,210 INFO [train.py:903] (1/4) Epoch 21, batch 5050, loss[loss=0.2143, simple_loss=0.3015, pruned_loss=0.06352, over 19744.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2925, pruned_loss=0.06751, over 3789495.74 frames. ], batch size: 63, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:48:03,201 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:16,452 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.325e+02 4.754e+02 5.832e+02 6.826e+02 1.423e+03, threshold=1.166e+03, percent-clipped=2.0 +2023-04-02 19:48:25,900 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 19:48:39,227 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:52,475 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1976, 5.5753, 2.9975, 4.8510, 1.3107, 5.7205, 5.5800, 5.8033], + device='cuda:1'), covar=tensor([0.0381, 0.0806, 0.1978, 0.0788, 0.3791, 0.0489, 0.0708, 0.0865], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0408, 0.0490, 0.0346, 0.0400, 0.0429, 0.0422, 0.0455], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:48:53,397 INFO [train.py:903] (1/4) Epoch 21, batch 5100, loss[loss=0.1698, simple_loss=0.2515, pruned_loss=0.04408, over 19626.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2908, pruned_loss=0.06627, over 3797243.41 frames. ], batch size: 50, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:49:04,590 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 19:49:06,994 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 19:49:11,629 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 19:49:12,005 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141675.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:17,687 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:30,080 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:50,715 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141705.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:56,348 INFO [train.py:903] (1/4) Epoch 21, batch 5150, loss[loss=0.2123, simple_loss=0.2807, pruned_loss=0.072, over 19413.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2913, pruned_loss=0.06664, over 3806696.27 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:50:09,422 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 19:50:16,621 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4977, 0.9535, 1.2885, 1.1548, 1.9604, 1.0741, 2.1418, 2.3047], + device='cuda:1'), covar=tensor([0.1000, 0.3857, 0.3402, 0.2213, 0.1406, 0.2515, 0.1111, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0404, 0.0361, 0.0381, 0.0343, 0.0369, 0.0345, 0.0373, 0.0395], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:50:20,920 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 4.996e+02 6.424e+02 8.131e+02 1.633e+03, threshold=1.285e+03, percent-clipped=6.0 +2023-04-02 19:50:46,532 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 19:50:57,359 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0623, 2.1320, 2.3381, 2.6488, 2.0662, 2.5459, 2.3534, 2.1295], + device='cuda:1'), covar=tensor([0.4011, 0.3808, 0.1795, 0.2404, 0.4111, 0.2113, 0.4350, 0.3247], + device='cuda:1'), in_proj_covar=tensor([0.0888, 0.0952, 0.0710, 0.0925, 0.0869, 0.0807, 0.0830, 0.0774], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 19:50:58,082 INFO [train.py:903] (1/4) Epoch 21, batch 5200, loss[loss=0.2174, simple_loss=0.3031, pruned_loss=0.06582, over 18084.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2922, pruned_loss=0.06683, over 3824298.85 frames. ], batch size: 83, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:51:14,042 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 19:51:22,159 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141779.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:51:51,427 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141803.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:54,947 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:57,322 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9174, 1.2720, 1.5963, 0.6609, 1.9611, 2.3951, 2.0997, 2.5957], + device='cuda:1'), covar=tensor([0.1670, 0.3777, 0.3387, 0.2731, 0.0647, 0.0271, 0.0339, 0.0339], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0320, 0.0349, 0.0263, 0.0241, 0.0184, 0.0215, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 19:51:58,266 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 19:51:59,437 INFO [train.py:903] (1/4) Epoch 21, batch 5250, loss[loss=0.2286, simple_loss=0.3123, pruned_loss=0.07241, over 19656.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2924, pruned_loss=0.06706, over 3824084.74 frames. ], batch size: 60, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:52:28,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 4.884e+02 6.185e+02 7.574e+02 1.457e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 19:52:56,687 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-02 19:53:02,568 INFO [train.py:903] (1/4) Epoch 21, batch 5300, loss[loss=0.1886, simple_loss=0.2654, pruned_loss=0.05591, over 19844.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.293, pruned_loss=0.06716, over 3823331.75 frames. ], batch size: 52, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:53:23,419 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:53:24,173 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 19:53:45,676 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141894.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:53:52,726 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4102, 1.4338, 1.7944, 1.6538, 2.6641, 2.2839, 2.7695, 1.2638], + device='cuda:1'), covar=tensor([0.2687, 0.4627, 0.2871, 0.2167, 0.1668, 0.2329, 0.1743, 0.4732], + device='cuda:1'), in_proj_covar=tensor([0.0532, 0.0641, 0.0710, 0.0485, 0.0617, 0.0531, 0.0661, 0.0549], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:53:55,963 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:06,886 INFO [train.py:903] (1/4) Epoch 21, batch 5350, loss[loss=0.2543, simple_loss=0.341, pruned_loss=0.0838, over 19347.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2924, pruned_loss=0.06666, over 3833754.23 frames. ], batch size: 66, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:54:11,013 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 19:54:16,545 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:32,491 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.997e+02 5.942e+02 7.628e+02 1.099e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-02 19:54:35,105 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141934.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:43,723 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 19:54:51,469 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:58,493 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5870, 1.7149, 2.1053, 1.8263, 3.0522, 2.6329, 3.3553, 1.6828], + device='cuda:1'), covar=tensor([0.2397, 0.4123, 0.2525, 0.1883, 0.1527, 0.2028, 0.1529, 0.4126], + device='cuda:1'), in_proj_covar=tensor([0.0532, 0.0640, 0.0710, 0.0484, 0.0617, 0.0531, 0.0661, 0.0548], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 19:55:07,460 INFO [train.py:903] (1/4) Epoch 21, batch 5400, loss[loss=0.2091, simple_loss=0.2944, pruned_loss=0.06186, over 17366.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2915, pruned_loss=0.06628, over 3836115.25 frames. ], batch size: 101, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:55:22,664 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5904, 1.6884, 1.9476, 1.9151, 1.4212, 1.8932, 1.9611, 1.8214], + device='cuda:1'), covar=tensor([0.4094, 0.3466, 0.1837, 0.2330, 0.3776, 0.2106, 0.4872, 0.3270], + device='cuda:1'), in_proj_covar=tensor([0.0889, 0.0953, 0.0712, 0.0926, 0.0869, 0.0808, 0.0833, 0.0775], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 19:56:10,559 INFO [train.py:903] (1/4) Epoch 21, batch 5450, loss[loss=0.1704, simple_loss=0.2463, pruned_loss=0.04729, over 19758.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2908, pruned_loss=0.06569, over 3833399.00 frames. ], batch size: 45, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:56:39,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 5.354e+02 6.392e+02 8.231e+02 1.329e+03, threshold=1.278e+03, percent-clipped=1.0 +2023-04-02 19:57:14,050 INFO [train.py:903] (1/4) Epoch 21, batch 5500, loss[loss=0.2251, simple_loss=0.303, pruned_loss=0.07361, over 19778.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2907, pruned_loss=0.06584, over 3824207.46 frames. ], batch size: 56, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:57:17,989 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:57:42,007 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 19:57:48,335 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:58:17,753 INFO [train.py:903] (1/4) Epoch 21, batch 5550, loss[loss=0.1942, simple_loss=0.2842, pruned_loss=0.05213, over 19601.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2903, pruned_loss=0.06572, over 3822525.48 frames. ], batch size: 52, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:58:26,219 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 19:58:43,634 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.904e+02 5.778e+02 7.569e+02 2.193e+03, threshold=1.156e+03, percent-clipped=4.0 +2023-04-02 19:59:03,236 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.6817, 5.1480, 3.0237, 4.4948, 1.4615, 5.1959, 5.0774, 5.2169], + device='cuda:1'), covar=tensor([0.0366, 0.0788, 0.1903, 0.0743, 0.3631, 0.0561, 0.0691, 0.1053], + device='cuda:1'), in_proj_covar=tensor([0.0498, 0.0409, 0.0490, 0.0345, 0.0401, 0.0428, 0.0422, 0.0456], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 19:59:07,842 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142150.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 19:59:15,742 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 19:59:19,356 INFO [train.py:903] (1/4) Epoch 21, batch 5600, loss[loss=0.1772, simple_loss=0.2615, pruned_loss=0.04648, over 19725.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.0656, over 3815990.56 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:59:36,334 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:59:37,406 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142175.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:59:58,293 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1784, 1.2675, 1.7759, 1.0696, 2.5009, 3.3464, 3.0338, 3.5968], + device='cuda:1'), covar=tensor([0.1618, 0.3884, 0.3227, 0.2603, 0.0613, 0.0215, 0.0243, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0319, 0.0349, 0.0262, 0.0241, 0.0183, 0.0215, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 20:00:08,640 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142199.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:00:20,409 INFO [train.py:903] (1/4) Epoch 21, batch 5650, loss[loss=0.1948, simple_loss=0.2752, pruned_loss=0.05716, over 19352.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2901, pruned_loss=0.06594, over 3823613.83 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:00:49,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 4.896e+02 5.835e+02 7.915e+02 1.772e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-02 20:01:10,302 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 20:01:22,637 INFO [train.py:903] (1/4) Epoch 21, batch 5700, loss[loss=0.2094, simple_loss=0.2846, pruned_loss=0.06715, over 19733.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2911, pruned_loss=0.06624, over 3823299.06 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:01:45,771 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:01:59,759 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:02:09,013 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5250, 1.2700, 1.3028, 2.0848, 1.6157, 1.6207, 1.9327, 1.4464], + device='cuda:1'), covar=tensor([0.0986, 0.1174, 0.1191, 0.0762, 0.0889, 0.0927, 0.0853, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0226, 0.0239, 0.0224, 0.0210, 0.0186, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 20:02:26,061 INFO [train.py:903] (1/4) Epoch 21, batch 5750, loss[loss=0.2379, simple_loss=0.2971, pruned_loss=0.08935, over 19484.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.06581, over 3824499.09 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:02:28,347 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 20:02:36,341 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 20:02:41,002 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 20:02:51,482 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.170e+02 6.700e+02 8.460e+02 1.665e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-02 20:03:26,600 INFO [train.py:903] (1/4) Epoch 21, batch 5800, loss[loss=0.2006, simple_loss=0.2729, pruned_loss=0.06421, over 19345.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2896, pruned_loss=0.0655, over 3824644.57 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:04:08,908 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:22,748 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:28,056 INFO [train.py:903] (1/4) Epoch 21, batch 5850, loss[loss=0.2163, simple_loss=0.2991, pruned_loss=0.06681, over 18839.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06635, over 3811546.75 frames. ], batch size: 74, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:04:57,527 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.411e+02 4.867e+02 6.014e+02 7.504e+02 1.855e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 20:05:31,488 INFO [train.py:903] (1/4) Epoch 21, batch 5900, loss[loss=0.2068, simple_loss=0.2935, pruned_loss=0.06011, over 19501.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2898, pruned_loss=0.06539, over 3820932.80 frames. ], batch size: 64, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:05:35,050 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 20:05:58,749 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 20:06:36,836 INFO [train.py:903] (1/4) Epoch 21, batch 5950, loss[loss=0.2606, simple_loss=0.3279, pruned_loss=0.09664, over 18462.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06561, over 3818327.02 frames. ], batch size: 84, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:02,040 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.752e+02 5.691e+02 7.550e+02 2.003e+03, threshold=1.138e+03, percent-clipped=5.0 +2023-04-02 20:07:37,089 INFO [train.py:903] (1/4) Epoch 21, batch 6000, loss[loss=0.2022, simple_loss=0.2952, pruned_loss=0.0546, over 19081.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2895, pruned_loss=0.06542, over 3823681.36 frames. ], batch size: 69, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:37,090 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 20:07:50,388 INFO [train.py:937] (1/4) Epoch 21, validation: loss=0.1692, simple_loss=0.2693, pruned_loss=0.03459, over 944034.00 frames. +2023-04-02 20:07:50,389 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 20:08:26,825 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:08:52,457 INFO [train.py:903] (1/4) Epoch 21, batch 6050, loss[loss=0.2075, simple_loss=0.2922, pruned_loss=0.06138, over 19768.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2895, pruned_loss=0.06515, over 3813544.24 frames. ], batch size: 54, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:18,890 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.812e+02 5.741e+02 7.692e+02 1.541e+03, threshold=1.148e+03, percent-clipped=3.0 +2023-04-02 20:09:41,427 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:09:53,725 INFO [train.py:903] (1/4) Epoch 21, batch 6100, loss[loss=0.2416, simple_loss=0.3157, pruned_loss=0.08377, over 19685.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06479, over 3820085.87 frames. ], batch size: 59, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:55,369 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:12,477 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142674.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:17,586 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 20:10:28,113 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:55,903 INFO [train.py:903] (1/4) Epoch 21, batch 6150, loss[loss=0.2064, simple_loss=0.2872, pruned_loss=0.06287, over 19129.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2874, pruned_loss=0.06404, over 3831278.83 frames. ], batch size: 69, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:11:25,006 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.643e+02 5.511e+02 6.918e+02 9.659e+02 2.206e+03, threshold=1.384e+03, percent-clipped=13.0 +2023-04-02 20:11:26,176 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 20:11:36,780 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:11:59,805 INFO [train.py:903] (1/4) Epoch 21, batch 6200, loss[loss=0.2103, simple_loss=0.2932, pruned_loss=0.06365, over 19124.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2871, pruned_loss=0.06391, over 3837321.94 frames. ], batch size: 69, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:12:15,936 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3951, 1.4885, 1.5567, 1.5523, 1.7362, 1.9116, 1.7175, 0.5218], + device='cuda:1'), covar=tensor([0.2336, 0.3948, 0.2498, 0.1917, 0.1616, 0.2256, 0.1483, 0.4700], + device='cuda:1'), in_proj_covar=tensor([0.0530, 0.0643, 0.0710, 0.0485, 0.0619, 0.0532, 0.0664, 0.0548], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 20:13:02,738 INFO [train.py:903] (1/4) Epoch 21, batch 6250, loss[loss=0.2177, simple_loss=0.3069, pruned_loss=0.06424, over 19606.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2879, pruned_loss=0.06466, over 3817592.14 frames. ], batch size: 61, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:13:28,470 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 5.305e+02 6.117e+02 7.859e+02 2.157e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 20:13:30,683 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 20:13:46,816 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3586, 1.2002, 1.7067, 1.3339, 2.8002, 3.6437, 3.3491, 3.8309], + device='cuda:1'), covar=tensor([0.1593, 0.4037, 0.3529, 0.2418, 0.0578, 0.0199, 0.0212, 0.0228], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0319, 0.0349, 0.0263, 0.0240, 0.0184, 0.0214, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 20:14:05,180 INFO [train.py:903] (1/4) Epoch 21, batch 6300, loss[loss=0.181, simple_loss=0.2707, pruned_loss=0.04566, over 19647.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.287, pruned_loss=0.06373, over 3821780.91 frames. ], batch size: 53, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:14:47,686 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9681, 1.8708, 1.7960, 1.5387, 1.4289, 1.5568, 0.4021, 0.8693], + device='cuda:1'), covar=tensor([0.0641, 0.0638, 0.0416, 0.0729, 0.1221, 0.0883, 0.1262, 0.1082], + device='cuda:1'), in_proj_covar=tensor([0.0356, 0.0355, 0.0354, 0.0382, 0.0459, 0.0386, 0.0335, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 20:15:07,133 INFO [train.py:903] (1/4) Epoch 21, batch 6350, loss[loss=0.2327, simple_loss=0.3277, pruned_loss=0.06889, over 19670.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2879, pruned_loss=0.0642, over 3815175.14 frames. ], batch size: 55, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:15:36,285 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.232e+02 4.679e+02 5.550e+02 7.220e+02 1.923e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-02 20:15:39,976 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:16:11,266 INFO [train.py:903] (1/4) Epoch 21, batch 6400, loss[loss=0.1833, simple_loss=0.2681, pruned_loss=0.04924, over 19683.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2878, pruned_loss=0.0642, over 3822043.89 frames. ], batch size: 53, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:14,709 INFO [train.py:903] (1/4) Epoch 21, batch 6450, loss[loss=0.2735, simple_loss=0.332, pruned_loss=0.1075, over 12994.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2879, pruned_loss=0.06449, over 3819475.95 frames. ], batch size: 135, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:40,509 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.995e+02 6.270e+02 8.275e+02 2.312e+03, threshold=1.254e+03, percent-clipped=6.0 +2023-04-02 20:18:01,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 20:18:04,947 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:18:16,202 INFO [train.py:903] (1/4) Epoch 21, batch 6500, loss[loss=0.1782, simple_loss=0.2593, pruned_loss=0.04854, over 19733.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2884, pruned_loss=0.06484, over 3821514.48 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:18:23,429 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 20:18:24,240 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-02 20:18:48,994 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143086.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:19:16,589 INFO [train.py:903] (1/4) Epoch 21, batch 6550, loss[loss=0.1924, simple_loss=0.2762, pruned_loss=0.05432, over 19517.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06546, over 3810708.10 frames. ], batch size: 54, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:19:44,582 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 5.073e+02 6.169e+02 7.633e+02 2.146e+03, threshold=1.234e+03, percent-clipped=4.0 +2023-04-02 20:20:01,369 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0425, 1.7324, 1.6592, 1.9865, 1.6765, 1.6688, 1.6098, 1.9196], + device='cuda:1'), covar=tensor([0.0972, 0.1459, 0.1424, 0.1013, 0.1374, 0.0596, 0.1395, 0.0699], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0356, 0.0308, 0.0248, 0.0299, 0.0251, 0.0310, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:20:19,908 INFO [train.py:903] (1/4) Epoch 21, batch 6600, loss[loss=0.201, simple_loss=0.2684, pruned_loss=0.06687, over 19807.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2893, pruned_loss=0.0654, over 3805950.12 frames. ], batch size: 49, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:10,262 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:21:20,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4228, 2.0536, 1.6560, 1.4258, 1.8623, 1.3907, 1.3244, 1.8766], + device='cuda:1'), covar=tensor([0.0903, 0.0845, 0.1047, 0.0846, 0.0557, 0.1305, 0.0701, 0.0440], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0313, 0.0336, 0.0261, 0.0246, 0.0335, 0.0290, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:21:22,428 INFO [train.py:903] (1/4) Epoch 21, batch 6650, loss[loss=0.2044, simple_loss=0.2849, pruned_loss=0.06197, over 19751.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2896, pruned_loss=0.06532, over 3813361.33 frames. ], batch size: 63, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:47,870 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.940e+02 5.672e+02 7.065e+02 1.538e+03, threshold=1.134e+03, percent-clipped=2.0 +2023-04-02 20:22:23,645 INFO [train.py:903] (1/4) Epoch 21, batch 6700, loss[loss=0.2323, simple_loss=0.3031, pruned_loss=0.08074, over 13060.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2899, pruned_loss=0.0656, over 3807415.94 frames. ], batch size: 136, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:22:52,037 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 20:23:19,568 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:23,654 INFO [train.py:903] (1/4) Epoch 21, batch 6750, loss[loss=0.1869, simple_loss=0.2712, pruned_loss=0.0513, over 19597.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2891, pruned_loss=0.06517, over 3803822.71 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:23:48,060 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143331.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:48,849 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.995e+02 6.197e+02 7.772e+02 2.067e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-02 20:24:20,243 INFO [train.py:903] (1/4) Epoch 21, batch 6800, loss[loss=0.2061, simple_loss=0.2925, pruned_loss=0.05987, over 18241.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2898, pruned_loss=0.06584, over 3805950.98 frames. ], batch size: 84, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:25:05,675 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 20:25:06,810 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 20:25:09,791 INFO [train.py:903] (1/4) Epoch 22, batch 0, loss[loss=0.2118, simple_loss=0.2951, pruned_loss=0.06429, over 19525.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2951, pruned_loss=0.06429, over 19525.00 frames. ], batch size: 56, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:25:09,792 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 20:25:18,557 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1943, 1.1069, 1.1619, 1.4035, 1.0347, 1.2562, 1.3172, 1.2270], + device='cuda:1'), covar=tensor([0.0653, 0.0771, 0.0813, 0.0515, 0.0744, 0.0684, 0.0713, 0.0577], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 20:25:20,458 INFO [train.py:937] (1/4) Epoch 22, validation: loss=0.1683, simple_loss=0.2691, pruned_loss=0.03373, over 944034.00 frames. +2023-04-02 20:25:20,459 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 20:25:31,901 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 20:25:55,286 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143418.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:26:14,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.908e+02 5.891e+02 8.006e+02 1.582e+03, threshold=1.178e+03, percent-clipped=4.0 +2023-04-02 20:26:21,016 INFO [train.py:903] (1/4) Epoch 22, batch 50, loss[loss=0.1966, simple_loss=0.2783, pruned_loss=0.05746, over 19040.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2895, pruned_loss=0.06554, over 870575.16 frames. ], batch size: 69, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:26:24,449 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6736, 2.5413, 2.3075, 2.6955, 2.5515, 2.3078, 2.1627, 2.5429], + device='cuda:1'), covar=tensor([0.0921, 0.1536, 0.1422, 0.0990, 0.1213, 0.0492, 0.1324, 0.0650], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0353, 0.0308, 0.0246, 0.0296, 0.0248, 0.0307, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:26:42,107 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:26:53,918 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 20:27:13,735 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143482.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:27:19,994 INFO [train.py:903] (1/4) Epoch 22, batch 100, loss[loss=0.2068, simple_loss=0.2837, pruned_loss=0.06495, over 19757.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2923, pruned_loss=0.06706, over 1532209.94 frames. ], batch size: 51, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:27:23,806 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143491.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:27:31,528 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 20:27:56,476 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1550, 1.9969, 2.0701, 2.4968, 2.1473, 2.2906, 2.3535, 2.1385], + device='cuda:1'), covar=tensor([0.0640, 0.0721, 0.0763, 0.0619, 0.0760, 0.0646, 0.0726, 0.0584], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0239, 0.0225, 0.0212, 0.0186, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 20:28:12,241 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.227e+02 6.391e+02 8.671e+02 1.540e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 20:28:19,054 INFO [train.py:903] (1/4) Epoch 22, batch 150, loss[loss=0.2245, simple_loss=0.2927, pruned_loss=0.0781, over 19694.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2907, pruned_loss=0.06691, over 2041066.55 frames. ], batch size: 53, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:28:31,077 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2866, 3.0102, 2.1811, 2.7290, 0.8692, 2.9812, 2.8971, 2.8972], + device='cuda:1'), covar=tensor([0.1097, 0.1477, 0.2032, 0.1087, 0.3676, 0.1039, 0.1193, 0.1481], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0405, 0.0490, 0.0343, 0.0401, 0.0427, 0.0421, 0.0457], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:29:18,917 INFO [train.py:903] (1/4) Epoch 22, batch 200, loss[loss=0.2455, simple_loss=0.3183, pruned_loss=0.08631, over 19684.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2907, pruned_loss=0.06681, over 2446015.12 frames. ], batch size: 60, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:29:18,956 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 20:30:12,489 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.534e+02 5.144e+02 6.083e+02 7.742e+02 1.350e+03, threshold=1.217e+03, percent-clipped=1.0 +2023-04-02 20:30:20,926 INFO [train.py:903] (1/4) Epoch 22, batch 250, loss[loss=0.2417, simple_loss=0.3137, pruned_loss=0.08482, over 19514.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2907, pruned_loss=0.06616, over 2747651.77 frames. ], batch size: 54, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:31:20,917 INFO [train.py:903] (1/4) Epoch 22, batch 300, loss[loss=0.2191, simple_loss=0.2944, pruned_loss=0.07187, over 19541.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2907, pruned_loss=0.06611, over 2987866.61 frames. ], batch size: 56, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:31:56,662 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 20:32:15,075 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.028e+02 5.065e+02 6.247e+02 8.237e+02 1.383e+03, threshold=1.249e+03, percent-clipped=3.0 +2023-04-02 20:32:22,211 INFO [train.py:903] (1/4) Epoch 22, batch 350, loss[loss=0.221, simple_loss=0.2995, pruned_loss=0.07123, over 19458.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2892, pruned_loss=0.06495, over 3179244.53 frames. ], batch size: 64, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:32:29,138 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 20:32:51,098 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143762.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:32:52,791 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 20:33:20,944 INFO [train.py:903] (1/4) Epoch 22, batch 400, loss[loss=0.2008, simple_loss=0.271, pruned_loss=0.06534, over 19742.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06399, over 3339227.24 frames. ], batch size: 45, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:15,303 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.215e+02 6.557e+02 8.093e+02 2.351e+03, threshold=1.311e+03, percent-clipped=8.0 +2023-04-02 20:34:17,781 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143835.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:34:20,915 INFO [train.py:903] (1/4) Epoch 22, batch 450, loss[loss=0.2014, simple_loss=0.2821, pruned_loss=0.06036, over 19541.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.287, pruned_loss=0.06377, over 3453743.16 frames. ], batch size: 56, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:57,885 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 20:34:58,983 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 20:35:08,538 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143877.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:35:22,928 INFO [train.py:903] (1/4) Epoch 22, batch 500, loss[loss=0.1605, simple_loss=0.2394, pruned_loss=0.04077, over 19769.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.06434, over 3531591.17 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:17,498 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 5.123e+02 6.359e+02 8.434e+02 1.804e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 20:36:23,280 INFO [train.py:903] (1/4) Epoch 22, batch 550, loss[loss=0.2165, simple_loss=0.3012, pruned_loss=0.06587, over 18824.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2883, pruned_loss=0.06441, over 3588101.12 frames. ], batch size: 74, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:37,254 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143950.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:37:00,091 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4194, 1.3258, 1.3851, 1.7266, 1.3321, 1.6567, 1.7357, 1.5232], + device='cuda:1'), covar=tensor([0.0948, 0.1061, 0.1075, 0.0764, 0.0890, 0.0823, 0.0820, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0223, 0.0227, 0.0240, 0.0226, 0.0213, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 20:37:23,312 INFO [train.py:903] (1/4) Epoch 22, batch 600, loss[loss=0.2018, simple_loss=0.2924, pruned_loss=0.05564, over 19686.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06464, over 3636692.66 frames. ], batch size: 59, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:38:02,269 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5989, 4.1938, 2.5957, 3.6252, 0.8824, 4.1446, 3.9366, 4.1314], + device='cuda:1'), covar=tensor([0.0646, 0.0916, 0.2029, 0.0919, 0.4077, 0.0699, 0.1046, 0.1146], + device='cuda:1'), in_proj_covar=tensor([0.0500, 0.0408, 0.0492, 0.0345, 0.0402, 0.0430, 0.0426, 0.0459], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:38:06,644 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 20:38:17,797 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 4.914e+02 6.190e+02 8.004e+02 1.732e+03, threshold=1.238e+03, percent-clipped=3.0 +2023-04-02 20:38:23,572 INFO [train.py:903] (1/4) Epoch 22, batch 650, loss[loss=0.2255, simple_loss=0.3024, pruned_loss=0.07427, over 19575.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2876, pruned_loss=0.06436, over 3686918.02 frames. ], batch size: 61, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:39:26,361 INFO [train.py:903] (1/4) Epoch 22, batch 700, loss[loss=0.2328, simple_loss=0.3124, pruned_loss=0.07664, over 18663.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.288, pruned_loss=0.06412, over 3722385.86 frames. ], batch size: 74, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:40:19,659 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 4.796e+02 6.107e+02 7.975e+02 1.533e+03, threshold=1.221e+03, percent-clipped=5.0 +2023-04-02 20:40:20,084 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144133.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:40:21,171 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9805, 1.2274, 1.5963, 0.6737, 2.0337, 2.4731, 2.1460, 2.6339], + device='cuda:1'), covar=tensor([0.1516, 0.3796, 0.3229, 0.2640, 0.0619, 0.0266, 0.0345, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0321, 0.0350, 0.0265, 0.0243, 0.0185, 0.0215, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 20:40:26,360 INFO [train.py:903] (1/4) Epoch 22, batch 750, loss[loss=0.1942, simple_loss=0.2845, pruned_loss=0.05193, over 19698.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2886, pruned_loss=0.06491, over 3741866.27 frames. ], batch size: 59, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:40:49,192 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144158.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:41:21,372 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-02 20:41:26,351 INFO [train.py:903] (1/4) Epoch 22, batch 800, loss[loss=0.1806, simple_loss=0.2564, pruned_loss=0.05235, over 19776.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.06461, over 3761239.47 frames. ], batch size: 47, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:41:44,760 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 20:41:48,043 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144206.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:42:19,071 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144231.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:42:20,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.021e+02 6.351e+02 8.019e+02 1.751e+03, threshold=1.270e+03, percent-clipped=5.0 +2023-04-02 20:42:26,677 INFO [train.py:903] (1/4) Epoch 22, batch 850, loss[loss=0.2356, simple_loss=0.3154, pruned_loss=0.07784, over 19301.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2897, pruned_loss=0.06532, over 3762143.64 frames. ], batch size: 70, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:43:19,877 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 20:43:26,415 INFO [train.py:903] (1/4) Epoch 22, batch 900, loss[loss=0.2188, simple_loss=0.3031, pruned_loss=0.06727, over 19773.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2901, pruned_loss=0.06543, over 3777699.30 frames. ], batch size: 54, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:43:42,716 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9251, 4.4520, 2.8907, 3.9600, 0.8052, 4.4832, 4.3386, 4.4935], + device='cuda:1'), covar=tensor([0.0536, 0.0949, 0.1830, 0.0830, 0.4409, 0.0598, 0.0766, 0.1114], + device='cuda:1'), in_proj_covar=tensor([0.0497, 0.0408, 0.0488, 0.0343, 0.0400, 0.0426, 0.0421, 0.0457], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:44:05,049 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4700, 2.3030, 2.0823, 2.5879, 2.3211, 2.0526, 2.0883, 2.3170], + device='cuda:1'), covar=tensor([0.0990, 0.1616, 0.1521, 0.1112, 0.1390, 0.0549, 0.1355, 0.0756], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0357, 0.0312, 0.0250, 0.0300, 0.0249, 0.0308, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:44:21,526 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.404e+02 5.111e+02 6.361e+02 7.451e+02 1.172e+03, threshold=1.272e+03, percent-clipped=0.0 +2023-04-02 20:44:26,104 INFO [train.py:903] (1/4) Epoch 22, batch 950, loss[loss=0.1974, simple_loss=0.2866, pruned_loss=0.05411, over 19577.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2892, pruned_loss=0.06506, over 3789932.21 frames. ], batch size: 61, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:44:30,635 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 20:45:27,352 INFO [train.py:903] (1/4) Epoch 22, batch 1000, loss[loss=0.1526, simple_loss=0.2363, pruned_loss=0.03449, over 19756.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2891, pruned_loss=0.06494, over 3800058.93 frames. ], batch size: 48, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:46:17,113 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=144429.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:46:17,963 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 20:46:22,212 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 5.215e+02 6.539e+02 8.059e+02 1.779e+03, threshold=1.308e+03, percent-clipped=4.0 +2023-04-02 20:46:26,890 INFO [train.py:903] (1/4) Epoch 22, batch 1050, loss[loss=0.177, simple_loss=0.2611, pruned_loss=0.04643, over 19619.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2883, pruned_loss=0.06432, over 3820814.09 frames. ], batch size: 50, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:46:53,402 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2207, 5.1682, 5.9906, 6.0056, 1.9882, 5.5613, 4.7968, 5.6339], + device='cuda:1'), covar=tensor([0.1661, 0.0761, 0.0535, 0.0607, 0.6204, 0.0778, 0.0616, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0771, 0.0731, 0.0933, 0.0821, 0.0823, 0.0695, 0.0564, 0.0865], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 20:47:00,693 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 20:47:07,858 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6729, 4.2879, 2.6408, 3.7846, 1.0396, 4.2034, 4.1284, 4.1947], + device='cuda:1'), covar=tensor([0.0578, 0.0904, 0.1980, 0.0822, 0.3959, 0.0639, 0.0868, 0.0961], + device='cuda:1'), in_proj_covar=tensor([0.0496, 0.0407, 0.0486, 0.0342, 0.0400, 0.0427, 0.0421, 0.0457], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:47:26,635 INFO [train.py:903] (1/4) Epoch 22, batch 1100, loss[loss=0.2043, simple_loss=0.2875, pruned_loss=0.06056, over 19674.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2878, pruned_loss=0.06435, over 3825026.54 frames. ], batch size: 53, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:48:21,825 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.380e+02 5.103e+02 6.169e+02 7.547e+02 2.403e+03, threshold=1.234e+03, percent-clipped=2.0 +2023-04-02 20:48:27,954 INFO [train.py:903] (1/4) Epoch 22, batch 1150, loss[loss=0.2095, simple_loss=0.2926, pruned_loss=0.06327, over 19653.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2896, pruned_loss=0.0649, over 3825877.42 frames. ], batch size: 58, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:49:28,339 INFO [train.py:903] (1/4) Epoch 22, batch 1200, loss[loss=0.1893, simple_loss=0.2644, pruned_loss=0.05709, over 19471.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2883, pruned_loss=0.06429, over 3837215.38 frames. ], batch size: 49, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:49:59,948 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 20:50:23,754 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.877e+02 6.112e+02 7.869e+02 2.071e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 20:50:27,125 INFO [train.py:903] (1/4) Epoch 22, batch 1250, loss[loss=0.2197, simple_loss=0.2928, pruned_loss=0.07328, over 19844.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2886, pruned_loss=0.06445, over 3824841.49 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:50:33,308 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4403, 2.2844, 2.2132, 2.5344, 2.3233, 2.0433, 2.0947, 2.3597], + device='cuda:1'), covar=tensor([0.0745, 0.1230, 0.1065, 0.0765, 0.1004, 0.0458, 0.1125, 0.0568], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0357, 0.0312, 0.0249, 0.0299, 0.0250, 0.0308, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:51:26,175 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4724, 1.5930, 2.0357, 1.8476, 3.1381, 4.0712, 3.9788, 4.4361], + device='cuda:1'), covar=tensor([0.1594, 0.3622, 0.3115, 0.2175, 0.0679, 0.0263, 0.0181, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0320, 0.0349, 0.0263, 0.0241, 0.0184, 0.0214, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 20:51:28,163 INFO [train.py:903] (1/4) Epoch 22, batch 1300, loss[loss=0.205, simple_loss=0.2916, pruned_loss=0.05922, over 19640.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2887, pruned_loss=0.06441, over 3818831.04 frames. ], batch size: 57, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:51:33,959 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7550, 1.2329, 1.4491, 1.5773, 3.3222, 1.0093, 2.2389, 3.7586], + device='cuda:1'), covar=tensor([0.0483, 0.2842, 0.2927, 0.1763, 0.0730, 0.2605, 0.1428, 0.0236], + device='cuda:1'), in_proj_covar=tensor([0.0404, 0.0361, 0.0381, 0.0343, 0.0370, 0.0347, 0.0374, 0.0398], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:51:46,031 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.39 vs. limit=5.0 +2023-04-02 20:51:59,483 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 20:52:26,777 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.679e+02 5.951e+02 8.140e+02 2.957e+03, threshold=1.190e+03, percent-clipped=7.0 +2023-04-02 20:52:30,268 INFO [train.py:903] (1/4) Epoch 22, batch 1350, loss[loss=0.2281, simple_loss=0.3063, pruned_loss=0.07497, over 13646.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06469, over 3816948.96 frames. ], batch size: 138, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:52:39,356 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6360, 1.6965, 1.5689, 1.3250, 1.3287, 1.3828, 0.3052, 0.6802], + device='cuda:1'), covar=tensor([0.0679, 0.0610, 0.0419, 0.0637, 0.1233, 0.0751, 0.1323, 0.1071], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0355, 0.0357, 0.0381, 0.0457, 0.0385, 0.0334, 0.0339], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 20:53:12,557 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=144773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:53:31,338 INFO [train.py:903] (1/4) Epoch 22, batch 1400, loss[loss=0.2108, simple_loss=0.2858, pruned_loss=0.06785, over 19583.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2883, pruned_loss=0.06457, over 3820599.14 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:53:43,605 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.41 vs. limit=5.0 +2023-04-02 20:54:08,463 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9868, 4.5150, 2.7722, 3.9478, 1.2871, 4.4573, 4.3176, 4.4969], + device='cuda:1'), covar=tensor([0.0495, 0.0921, 0.1943, 0.0832, 0.3622, 0.0651, 0.0885, 0.0925], + device='cuda:1'), in_proj_covar=tensor([0.0498, 0.0411, 0.0494, 0.0344, 0.0404, 0.0431, 0.0424, 0.0460], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 20:54:28,426 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.383e+02 4.808e+02 5.945e+02 7.380e+02 1.517e+03, threshold=1.189e+03, percent-clipped=2.0 +2023-04-02 20:54:29,516 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 20:54:31,678 INFO [train.py:903] (1/4) Epoch 22, batch 1450, loss[loss=0.2607, simple_loss=0.3257, pruned_loss=0.09787, over 19518.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2888, pruned_loss=0.06496, over 3809331.47 frames. ], batch size: 54, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:55:30,804 INFO [train.py:903] (1/4) Epoch 22, batch 1500, loss[loss=0.1986, simple_loss=0.2793, pruned_loss=0.05901, over 19603.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2885, pruned_loss=0.06496, over 3818399.87 frames. ], batch size: 52, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:55:31,148 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=144888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:55:33,125 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1352, 1.3280, 1.6728, 0.9503, 2.3594, 3.0518, 2.7529, 3.2032], + device='cuda:1'), covar=tensor([0.1632, 0.3665, 0.3287, 0.2639, 0.0593, 0.0206, 0.0264, 0.0320], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0320, 0.0350, 0.0265, 0.0242, 0.0185, 0.0215, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 20:55:33,218 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3620, 1.3940, 1.6141, 1.5817, 2.2769, 1.9225, 2.3039, 0.8426], + device='cuda:1'), covar=tensor([0.2679, 0.4535, 0.2934, 0.2100, 0.1603, 0.2432, 0.1531, 0.4834], + device='cuda:1'), in_proj_covar=tensor([0.0537, 0.0649, 0.0718, 0.0488, 0.0625, 0.0533, 0.0666, 0.0554], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 20:56:27,870 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.957e+02 5.987e+02 8.036e+02 1.770e+03, threshold=1.197e+03, percent-clipped=5.0 +2023-04-02 20:56:31,410 INFO [train.py:903] (1/4) Epoch 22, batch 1550, loss[loss=0.1772, simple_loss=0.2616, pruned_loss=0.04637, over 19425.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2867, pruned_loss=0.0643, over 3819379.91 frames. ], batch size: 48, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:57:30,408 INFO [train.py:903] (1/4) Epoch 22, batch 1600, loss[loss=0.2514, simple_loss=0.3217, pruned_loss=0.09048, over 18743.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2863, pruned_loss=0.06393, over 3826642.48 frames. ], batch size: 74, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:57:50,817 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 20:58:02,387 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:58:27,680 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.884e+02 5.870e+02 7.908e+02 1.403e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-02 20:58:31,152 INFO [train.py:903] (1/4) Epoch 22, batch 1650, loss[loss=0.1978, simple_loss=0.2769, pruned_loss=0.05931, over 19379.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2864, pruned_loss=0.06395, over 3831597.56 frames. ], batch size: 48, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:58:39,542 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:26,986 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:31,372 INFO [train.py:903] (1/4) Epoch 22, batch 1700, loss[loss=0.2146, simple_loss=0.3013, pruned_loss=0.06392, over 19787.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2861, pruned_loss=0.06388, over 3834290.20 frames. ], batch size: 56, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:59:52,393 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 21:00:08,584 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 21:00:27,996 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 4.845e+02 6.228e+02 7.810e+02 2.223e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 21:00:33,049 INFO [train.py:903] (1/4) Epoch 22, batch 1750, loss[loss=0.2358, simple_loss=0.304, pruned_loss=0.08382, over 19739.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2857, pruned_loss=0.06349, over 3837104.23 frames. ], batch size: 51, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:00:40,273 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145144.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:09,176 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145169.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:31,689 INFO [train.py:903] (1/4) Epoch 22, batch 1800, loss[loss=0.217, simple_loss=0.2914, pruned_loss=0.0713, over 19469.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2859, pruned_loss=0.06323, over 3848924.60 frames. ], batch size: 49, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:01:55,259 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0220, 1.2111, 1.5531, 0.6281, 2.0342, 2.4376, 2.1440, 2.6342], + device='cuda:1'), covar=tensor([0.1576, 0.3911, 0.3482, 0.2799, 0.0623, 0.0300, 0.0355, 0.0362], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0216, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 21:02:27,946 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.845e+02 5.086e+02 5.993e+02 7.804e+02 1.410e+03, threshold=1.199e+03, percent-clipped=2.0 +2023-04-02 21:02:27,979 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 21:02:31,535 INFO [train.py:903] (1/4) Epoch 22, batch 1850, loss[loss=0.201, simple_loss=0.2765, pruned_loss=0.0627, over 19412.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2867, pruned_loss=0.06368, over 3836767.57 frames. ], batch size: 48, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:04,110 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 21:03:30,820 INFO [train.py:903] (1/4) Epoch 22, batch 1900, loss[loss=0.1901, simple_loss=0.2822, pruned_loss=0.04898, over 19783.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2867, pruned_loss=0.0637, over 3834075.88 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:45,039 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9405, 4.2687, 4.6805, 4.6681, 2.0814, 4.3290, 3.7562, 4.3859], + device='cuda:1'), covar=tensor([0.1533, 0.1281, 0.0572, 0.0645, 0.5598, 0.1050, 0.0685, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0775, 0.0737, 0.0934, 0.0825, 0.0825, 0.0698, 0.0561, 0.0872], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 21:03:48,272 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 21:03:52,776 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 21:04:15,286 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 21:04:26,521 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.830e+02 5.286e+02 6.049e+02 6.874e+02 1.450e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 21:04:30,790 INFO [train.py:903] (1/4) Epoch 22, batch 1950, loss[loss=0.2131, simple_loss=0.2911, pruned_loss=0.06753, over 19563.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2863, pruned_loss=0.06354, over 3818653.83 frames. ], batch size: 61, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:04:44,259 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0786, 1.9927, 1.7405, 2.0849, 1.9369, 1.7852, 1.7033, 2.0209], + device='cuda:1'), covar=tensor([0.0990, 0.1324, 0.1377, 0.0989, 0.1203, 0.0553, 0.1346, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0356, 0.0312, 0.0249, 0.0300, 0.0250, 0.0309, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:04:55,831 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:31,552 INFO [train.py:903] (1/4) Epoch 22, batch 2000, loss[loss=0.1993, simple_loss=0.2842, pruned_loss=0.05718, over 19681.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2859, pruned_loss=0.0633, over 3822688.90 frames. ], batch size: 53, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:05:32,826 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:32,963 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:19,675 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:27,596 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 4.801e+02 6.060e+02 7.916e+02 1.266e+03, threshold=1.212e+03, percent-clipped=1.0 +2023-04-02 21:06:27,634 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 21:06:30,165 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1724, 1.3218, 1.7843, 1.4678, 2.7542, 3.8191, 3.5754, 4.0753], + device='cuda:1'), covar=tensor([0.1697, 0.3748, 0.3229, 0.2232, 0.0570, 0.0180, 0.0195, 0.0225], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0216, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 21:06:30,896 INFO [train.py:903] (1/4) Epoch 22, batch 2050, loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06432, over 19379.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2859, pruned_loss=0.06291, over 3831988.49 frames. ], batch size: 47, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:06:46,543 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 21:06:46,573 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 21:07:06,389 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 21:07:13,295 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:07:30,911 INFO [train.py:903] (1/4) Epoch 22, batch 2100, loss[loss=0.2418, simple_loss=0.3175, pruned_loss=0.08302, over 19624.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2856, pruned_loss=0.06294, over 3831770.10 frames. ], batch size: 61, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:07:51,393 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:08:01,435 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 21:08:22,535 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 21:08:27,086 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 4.926e+02 6.113e+02 7.931e+02 1.598e+03, threshold=1.223e+03, percent-clipped=5.0 +2023-04-02 21:08:30,642 INFO [train.py:903] (1/4) Epoch 22, batch 2150, loss[loss=0.1966, simple_loss=0.279, pruned_loss=0.05711, over 19784.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2863, pruned_loss=0.06344, over 3843045.37 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:08:38,969 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:09:32,294 INFO [train.py:903] (1/4) Epoch 22, batch 2200, loss[loss=0.2287, simple_loss=0.3036, pruned_loss=0.07691, over 18798.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2856, pruned_loss=0.06324, over 3840408.08 frames. ], batch size: 74, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:10:00,388 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145612.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:10:08,469 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4264, 1.3502, 1.3402, 1.7739, 1.4596, 1.6293, 1.7410, 1.5118], + device='cuda:1'), covar=tensor([0.0912, 0.0944, 0.1083, 0.0701, 0.0794, 0.0808, 0.0825, 0.0733], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0221, 0.0226, 0.0240, 0.0225, 0.0212, 0.0186, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 21:10:29,863 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.056e+02 6.179e+02 8.064e+02 2.249e+03, threshold=1.236e+03, percent-clipped=3.0 +2023-04-02 21:10:32,074 INFO [train.py:903] (1/4) Epoch 22, batch 2250, loss[loss=0.1977, simple_loss=0.2828, pruned_loss=0.05628, over 19769.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2857, pruned_loss=0.06341, over 3840344.42 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:11:01,366 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:11:33,804 INFO [train.py:903] (1/4) Epoch 22, batch 2300, loss[loss=0.2171, simple_loss=0.2984, pruned_loss=0.06785, over 19733.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2861, pruned_loss=0.06353, over 3832875.71 frames. ], batch size: 63, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:11:45,977 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 21:12:22,713 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:27,054 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:30,301 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.899e+02 6.109e+02 7.402e+02 2.135e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 21:12:32,755 INFO [train.py:903] (1/4) Epoch 22, batch 2350, loss[loss=0.1651, simple_loss=0.2474, pruned_loss=0.04141, over 15512.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2869, pruned_loss=0.06371, over 3829834.64 frames. ], batch size: 34, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:12:54,262 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:01,087 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:14,123 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 21:13:31,586 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 21:13:31,995 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:35,682 INFO [train.py:903] (1/4) Epoch 22, batch 2400, loss[loss=0.3188, simple_loss=0.3831, pruned_loss=0.1272, over 19533.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2873, pruned_loss=0.0639, over 3842519.52 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:13:45,322 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2696, 1.2598, 1.2813, 1.3924, 1.0890, 1.3469, 1.3958, 1.3521], + device='cuda:1'), covar=tensor([0.0955, 0.0993, 0.1088, 0.0642, 0.0834, 0.0880, 0.0850, 0.0766], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0220, 0.0225, 0.0239, 0.0225, 0.0211, 0.0185, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 21:13:48,841 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:20,063 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:34,015 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.826e+02 5.747e+02 7.009e+02 1.532e+03, threshold=1.149e+03, percent-clipped=5.0 +2023-04-02 21:14:35,788 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4109, 1.4926, 1.6568, 1.7372, 1.3206, 1.6648, 1.6677, 1.5846], + device='cuda:1'), covar=tensor([0.3277, 0.2931, 0.1676, 0.1878, 0.3077, 0.1742, 0.4115, 0.2714], + device='cuda:1'), in_proj_covar=tensor([0.0893, 0.0957, 0.0714, 0.0929, 0.0873, 0.0811, 0.0840, 0.0780], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 21:14:36,508 INFO [train.py:903] (1/4) Epoch 22, batch 2450, loss[loss=0.1898, simple_loss=0.2634, pruned_loss=0.05814, over 16074.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2866, pruned_loss=0.06359, over 3828308.16 frames. ], batch size: 35, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:14:37,948 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145839.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:14:49,298 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:52,761 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8268, 1.3009, 1.4346, 1.7311, 3.4061, 1.2596, 2.4192, 3.8802], + device='cuda:1'), covar=tensor([0.0515, 0.2909, 0.3078, 0.1825, 0.0765, 0.2439, 0.1388, 0.0232], + device='cuda:1'), in_proj_covar=tensor([0.0409, 0.0366, 0.0387, 0.0350, 0.0375, 0.0350, 0.0380, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:15:28,951 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3816, 1.4832, 1.7844, 1.5960, 2.5442, 2.1582, 2.7491, 1.1979], + device='cuda:1'), covar=tensor([0.2596, 0.4367, 0.2747, 0.2015, 0.1548, 0.2236, 0.1442, 0.4468], + device='cuda:1'), in_proj_covar=tensor([0.0535, 0.0643, 0.0712, 0.0481, 0.0618, 0.0529, 0.0663, 0.0549], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 21:15:37,466 INFO [train.py:903] (1/4) Epoch 22, batch 2500, loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06126, over 19783.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06398, over 3812621.44 frames. ], batch size: 48, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:25,695 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8788, 4.8973, 5.6078, 5.6038, 1.9579, 5.2917, 4.5778, 5.2905], + device='cuda:1'), covar=tensor([0.1852, 0.1026, 0.0625, 0.0638, 0.6436, 0.0980, 0.0622, 0.1319], + device='cuda:1'), in_proj_covar=tensor([0.0781, 0.0741, 0.0941, 0.0828, 0.0830, 0.0704, 0.0565, 0.0878], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 21:16:34,420 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.066e+02 4.836e+02 5.791e+02 7.519e+02 1.267e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 21:16:36,602 INFO [train.py:903] (1/4) Epoch 22, batch 2550, loss[loss=0.2009, simple_loss=0.2888, pruned_loss=0.05654, over 19493.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2875, pruned_loss=0.06444, over 3814651.30 frames. ], batch size: 64, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:40,550 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.27 vs. limit=5.0 +2023-04-02 21:16:59,317 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145956.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:17:15,010 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9767, 4.5054, 2.8416, 3.9886, 1.0027, 4.5432, 4.4013, 4.4866], + device='cuda:1'), covar=tensor([0.0530, 0.0860, 0.1789, 0.0796, 0.3822, 0.0611, 0.0814, 0.1140], + device='cuda:1'), in_proj_covar=tensor([0.0501, 0.0407, 0.0489, 0.0342, 0.0399, 0.0427, 0.0422, 0.0456], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:17:33,995 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 21:17:38,152 INFO [train.py:903] (1/4) Epoch 22, batch 2600, loss[loss=0.1915, simple_loss=0.2802, pruned_loss=0.05137, over 19683.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.06479, over 3813894.62 frames. ], batch size: 59, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:17:59,427 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:18:28,341 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1837, 1.8268, 1.4900, 1.2939, 1.6325, 1.2295, 1.2563, 1.6742], + device='cuda:1'), covar=tensor([0.0791, 0.0763, 0.1100, 0.0773, 0.0559, 0.1297, 0.0561, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0315, 0.0338, 0.0264, 0.0248, 0.0336, 0.0290, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:18:38,072 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.022e+02 6.231e+02 7.783e+02 1.698e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 21:18:40,370 INFO [train.py:903] (1/4) Epoch 22, batch 2650, loss[loss=0.1975, simple_loss=0.2882, pruned_loss=0.05339, over 19523.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2877, pruned_loss=0.0642, over 3819597.21 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:19:00,423 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 21:19:21,366 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146071.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:19:35,495 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 21:19:41,307 INFO [train.py:903] (1/4) Epoch 22, batch 2700, loss[loss=0.2258, simple_loss=0.3074, pruned_loss=0.07211, over 19672.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.289, pruned_loss=0.06474, over 3812464.60 frames. ], batch size: 55, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:20:00,917 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:20,723 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:32,037 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:39,333 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.787e+02 6.288e+02 8.148e+02 2.582e+03, threshold=1.258e+03, percent-clipped=4.0 +2023-04-02 21:20:41,747 INFO [train.py:903] (1/4) Epoch 22, batch 2750, loss[loss=0.2207, simple_loss=0.3027, pruned_loss=0.06933, over 18724.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2875, pruned_loss=0.06381, over 3811786.29 frames. ], batch size: 74, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:21:05,843 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-02 21:21:08,977 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0818, 1.0767, 1.6362, 1.0798, 2.2014, 2.9857, 2.8054, 3.3518], + device='cuda:1'), covar=tensor([0.1776, 0.5135, 0.4355, 0.2589, 0.0707, 0.0290, 0.0307, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0320, 0.0352, 0.0265, 0.0242, 0.0186, 0.0215, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 21:21:18,147 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:21:37,433 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146183.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:21:43,820 INFO [train.py:903] (1/4) Epoch 22, batch 2800, loss[loss=0.1858, simple_loss=0.2656, pruned_loss=0.05297, over 19612.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.287, pruned_loss=0.0638, over 3805835.40 frames. ], batch size: 50, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:21:47,751 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.06 vs. limit=5.0 +2023-04-02 21:22:42,905 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 4.555e+02 5.863e+02 7.335e+02 1.249e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-02 21:22:45,127 INFO [train.py:903] (1/4) Epoch 22, batch 2850, loss[loss=0.2474, simple_loss=0.3229, pruned_loss=0.08599, over 19669.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06406, over 3799440.55 frames. ], batch size: 60, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:42,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 21:23:45,161 INFO [train.py:903] (1/4) Epoch 22, batch 2900, loss[loss=0.2058, simple_loss=0.2718, pruned_loss=0.06989, over 19758.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2876, pruned_loss=0.06397, over 3803056.07 frames. ], batch size: 47, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:51,884 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4128, 1.3031, 1.3314, 1.6924, 1.3573, 1.5496, 1.6280, 1.4725], + device='cuda:1'), covar=tensor([0.0868, 0.0990, 0.1064, 0.0723, 0.0948, 0.0889, 0.0922, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0239, 0.0225, 0.0210, 0.0186, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 21:23:57,291 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146298.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:24:33,155 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:24:43,683 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 4.789e+02 5.854e+02 7.393e+02 1.532e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-04-02 21:24:45,870 INFO [train.py:903] (1/4) Epoch 22, batch 2950, loss[loss=0.1721, simple_loss=0.2464, pruned_loss=0.04885, over 19756.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2878, pruned_loss=0.06395, over 3806054.92 frames. ], batch size: 48, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:25:04,169 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:32,559 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:33,970 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-02 21:25:46,774 INFO [train.py:903] (1/4) Epoch 22, batch 3000, loss[loss=0.191, simple_loss=0.2709, pruned_loss=0.05556, over 19719.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2871, pruned_loss=0.06386, over 3817281.09 frames. ], batch size: 51, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:25:46,774 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 21:25:59,181 INFO [train.py:937] (1/4) Epoch 22, validation: loss=0.1687, simple_loss=0.2687, pruned_loss=0.0344, over 944034.00 frames. +2023-04-02 21:25:59,182 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 21:26:02,615 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 21:26:16,133 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:26:58,620 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 5.066e+02 6.686e+02 8.533e+02 1.871e+03, threshold=1.337e+03, percent-clipped=6.0 +2023-04-02 21:26:59,750 INFO [train.py:903] (1/4) Epoch 22, batch 3050, loss[loss=0.2029, simple_loss=0.2862, pruned_loss=0.05974, over 19754.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2875, pruned_loss=0.06395, over 3820348.66 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:28:00,892 INFO [train.py:903] (1/4) Epoch 22, batch 3100, loss[loss=0.2053, simple_loss=0.2868, pruned_loss=0.06187, over 19516.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2873, pruned_loss=0.06398, over 3809103.37 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:28:15,869 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1286, 1.3243, 1.9863, 1.4742, 3.1442, 4.5514, 4.3980, 4.9521], + device='cuda:1'), covar=tensor([0.1780, 0.4091, 0.3373, 0.2442, 0.0633, 0.0216, 0.0187, 0.0227], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0215, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 21:28:27,617 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146511.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:28:38,309 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7434, 1.4177, 1.4459, 2.3017, 1.7396, 2.0542, 2.1989, 1.6987], + device='cuda:1'), covar=tensor([0.0841, 0.0989, 0.1083, 0.0728, 0.0869, 0.0725, 0.0791, 0.0737], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0239, 0.0225, 0.0209, 0.0186, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 21:28:59,181 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.441e+02 5.077e+02 6.322e+02 8.082e+02 1.628e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 21:29:00,363 INFO [train.py:903] (1/4) Epoch 22, batch 3150, loss[loss=0.1974, simple_loss=0.2821, pruned_loss=0.05632, over 19687.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06403, over 3824209.95 frames. ], batch size: 59, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:29:19,896 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146554.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:29:29,200 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 21:29:31,673 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2144, 1.2972, 1.2205, 1.0570, 1.0646, 1.0556, 0.0690, 0.3451], + device='cuda:1'), covar=tensor([0.0663, 0.0634, 0.0457, 0.0578, 0.1329, 0.0658, 0.1366, 0.1194], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0357, 0.0359, 0.0384, 0.0460, 0.0388, 0.0338, 0.0343], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 21:29:47,812 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4551, 2.5091, 2.6635, 3.1068, 2.5732, 3.0229, 2.6600, 2.4915], + device='cuda:1'), covar=tensor([0.3574, 0.3041, 0.1535, 0.2021, 0.3253, 0.1577, 0.3662, 0.2629], + device='cuda:1'), in_proj_covar=tensor([0.0897, 0.0961, 0.0717, 0.0930, 0.0878, 0.0814, 0.0844, 0.0781], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 21:29:51,122 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146579.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:30:00,733 INFO [train.py:903] (1/4) Epoch 22, batch 3200, loss[loss=0.2133, simple_loss=0.2882, pruned_loss=0.0692, over 19669.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2877, pruned_loss=0.06396, over 3822971.15 frames. ], batch size: 53, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:30:28,084 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:30:47,436 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:31:01,670 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 4.719e+02 5.878e+02 7.469e+02 1.229e+03, threshold=1.176e+03, percent-clipped=0.0 +2023-04-02 21:31:02,532 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 21:31:02,813 INFO [train.py:903] (1/4) Epoch 22, batch 3250, loss[loss=0.1756, simple_loss=0.2644, pruned_loss=0.0434, over 19777.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2882, pruned_loss=0.06406, over 3815569.91 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:31:10,874 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:32:03,196 INFO [train.py:903] (1/4) Epoch 22, batch 3300, loss[loss=0.2327, simple_loss=0.3086, pruned_loss=0.07838, over 19690.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2884, pruned_loss=0.06376, over 3833403.88 frames. ], batch size: 59, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:32:08,318 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 21:32:09,864 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 21:32:59,756 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.268e+02 6.638e+02 8.509e+02 1.642e+03, threshold=1.328e+03, percent-clipped=7.0 +2023-04-02 21:33:00,762 INFO [train.py:903] (1/4) Epoch 22, batch 3350, loss[loss=0.1785, simple_loss=0.2543, pruned_loss=0.05134, over 19292.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06364, over 3837585.19 frames. ], batch size: 44, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:34:00,058 INFO [train.py:903] (1/4) Epoch 22, batch 3400, loss[loss=0.2081, simple_loss=0.2833, pruned_loss=0.06643, over 19850.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06291, over 3843775.38 frames. ], batch size: 52, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:34:59,708 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 5.013e+02 6.159e+02 8.066e+02 2.491e+03, threshold=1.232e+03, percent-clipped=4.0 +2023-04-02 21:35:00,914 INFO [train.py:903] (1/4) Epoch 22, batch 3450, loss[loss=0.1978, simple_loss=0.2736, pruned_loss=0.06101, over 19577.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06359, over 3842949.17 frames. ], batch size: 52, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:35:04,226 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 21:35:28,804 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146862.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:35:54,084 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146882.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:36:01,443 INFO [train.py:903] (1/4) Epoch 22, batch 3500, loss[loss=0.2047, simple_loss=0.2715, pruned_loss=0.06895, over 19743.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2878, pruned_loss=0.06396, over 3834974.49 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:36:23,412 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:37:00,107 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.850e+02 4.615e+02 6.325e+02 8.235e+02 2.059e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 21:37:01,338 INFO [train.py:903] (1/4) Epoch 22, batch 3550, loss[loss=0.1782, simple_loss=0.2596, pruned_loss=0.04837, over 19371.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2875, pruned_loss=0.06428, over 3821011.81 frames. ], batch size: 47, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:37:18,262 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:37:40,823 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-02 21:38:02,337 INFO [train.py:903] (1/4) Epoch 22, batch 3600, loss[loss=0.1942, simple_loss=0.2655, pruned_loss=0.06145, over 19740.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06447, over 3811509.18 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:38:02,532 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:39:01,661 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.391e+02 4.948e+02 6.297e+02 8.667e+02 2.605e+03, threshold=1.259e+03, percent-clipped=8.0 +2023-04-02 21:39:02,706 INFO [train.py:903] (1/4) Epoch 22, batch 3650, loss[loss=0.2564, simple_loss=0.3361, pruned_loss=0.08838, over 19622.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2885, pruned_loss=0.06466, over 3817285.16 frames. ], batch size: 57, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:39:39,077 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:03,868 INFO [train.py:903] (1/4) Epoch 22, batch 3700, loss[loss=0.2531, simple_loss=0.339, pruned_loss=0.08358, over 19577.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2889, pruned_loss=0.0648, over 3823069.26 frames. ], batch size: 61, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:40:21,170 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:58,544 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5636, 4.1181, 2.6189, 3.6112, 0.7811, 4.1088, 3.9876, 4.0993], + device='cuda:1'), covar=tensor([0.0656, 0.1066, 0.2119, 0.0937, 0.4330, 0.0761, 0.0980, 0.1224], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0410, 0.0492, 0.0343, 0.0401, 0.0431, 0.0425, 0.0459], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:41:02,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.816e+02 5.960e+02 7.144e+02 1.653e+03, threshold=1.192e+03, percent-clipped=4.0 +2023-04-02 21:41:04,071 INFO [train.py:903] (1/4) Epoch 22, batch 3750, loss[loss=0.1724, simple_loss=0.2535, pruned_loss=0.04566, over 19389.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2884, pruned_loss=0.06454, over 3829886.35 frames. ], batch size: 48, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:41:21,578 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4023, 1.4379, 1.7058, 1.6379, 2.3337, 2.1968, 2.5079, 1.1321], + device='cuda:1'), covar=tensor([0.2581, 0.4296, 0.2633, 0.1969, 0.1621, 0.2166, 0.1493, 0.4478], + device='cuda:1'), in_proj_covar=tensor([0.0536, 0.0643, 0.0714, 0.0482, 0.0618, 0.0531, 0.0663, 0.0549], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 21:42:04,512 INFO [train.py:903] (1/4) Epoch 22, batch 3800, loss[loss=0.2086, simple_loss=0.2875, pruned_loss=0.06488, over 19521.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.06511, over 3833693.16 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:42:26,706 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147206.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:42:38,900 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 21:43:02,581 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.867e+02 5.077e+02 5.970e+02 7.548e+02 1.289e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 21:43:03,540 INFO [train.py:903] (1/4) Epoch 22, batch 3850, loss[loss=0.2514, simple_loss=0.319, pruned_loss=0.09196, over 18751.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2893, pruned_loss=0.0648, over 3828487.18 frames. ], batch size: 74, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:43:09,388 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7791, 2.4749, 2.2775, 2.7835, 2.4455, 2.2949, 2.3573, 2.6424], + device='cuda:1'), covar=tensor([0.0969, 0.1716, 0.1524, 0.1045, 0.1406, 0.0580, 0.1266, 0.0704], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0354, 0.0309, 0.0249, 0.0301, 0.0251, 0.0308, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:43:31,164 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147259.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:05,364 INFO [train.py:903] (1/4) Epoch 22, batch 3900, loss[loss=0.195, simple_loss=0.2804, pruned_loss=0.05477, over 19685.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.06394, over 3832700.53 frames. ], batch size: 58, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:44:28,433 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.49 vs. limit=5.0 +2023-04-02 21:44:41,115 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5992, 1.2650, 1.5503, 1.3237, 2.2367, 1.0269, 2.0980, 2.5276], + device='cuda:1'), covar=tensor([0.0745, 0.2860, 0.2737, 0.1652, 0.0951, 0.2064, 0.1038, 0.0460], + device='cuda:1'), in_proj_covar=tensor([0.0406, 0.0364, 0.0385, 0.0348, 0.0372, 0.0347, 0.0379, 0.0402], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:44:46,187 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:49,711 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:55,086 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:04,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 4.890e+02 6.799e+02 8.609e+02 1.784e+03, threshold=1.360e+03, percent-clipped=9.0 +2023-04-02 21:45:05,887 INFO [train.py:903] (1/4) Epoch 22, batch 3950, loss[loss=0.2207, simple_loss=0.3052, pruned_loss=0.06814, over 19652.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2882, pruned_loss=0.06432, over 3821676.50 frames. ], batch size: 58, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:45:08,142 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 21:45:18,239 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:30,140 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:00,818 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:04,831 INFO [train.py:903] (1/4) Epoch 22, batch 4000, loss[loss=0.2135, simple_loss=0.3019, pruned_loss=0.06257, over 19455.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2881, pruned_loss=0.06383, over 3836546.56 frames. ], batch size: 64, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:46:50,142 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:52,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 21:46:54,086 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 21:47:03,832 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.354e+02 4.920e+02 5.925e+02 7.748e+02 1.160e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-04-02 21:47:05,868 INFO [train.py:903] (1/4) Epoch 22, batch 4050, loss[loss=0.1854, simple_loss=0.2636, pruned_loss=0.05363, over 15171.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2877, pruned_loss=0.06358, over 3822472.96 frames. ], batch size: 33, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:47:24,618 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:47:45,016 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 21:48:07,527 INFO [train.py:903] (1/4) Epoch 22, batch 4100, loss[loss=0.2298, simple_loss=0.308, pruned_loss=0.07584, over 19740.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.288, pruned_loss=0.06387, over 3839847.84 frames. ], batch size: 63, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:48:44,892 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 21:49:07,909 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.254e+02 6.349e+02 7.493e+02 1.711e+03, threshold=1.270e+03, percent-clipped=4.0 +2023-04-02 21:49:09,106 INFO [train.py:903] (1/4) Epoch 22, batch 4150, loss[loss=0.1768, simple_loss=0.2591, pruned_loss=0.04722, over 19598.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06332, over 3848078.47 frames. ], batch size: 52, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:49:57,000 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:09,107 INFO [train.py:903] (1/4) Epoch 22, batch 4200, loss[loss=0.2392, simple_loss=0.3245, pruned_loss=0.07695, over 19659.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.0635, over 3846698.27 frames. ], batch size: 55, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:50:13,787 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 21:50:26,602 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:27,412 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147603.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:51:09,770 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.933e+02 5.671e+02 7.351e+02 2.024e+03, threshold=1.134e+03, percent-clipped=5.0 +2023-04-02 21:51:10,933 INFO [train.py:903] (1/4) Epoch 22, batch 4250, loss[loss=0.1822, simple_loss=0.2656, pruned_loss=0.04936, over 19580.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2863, pruned_loss=0.06303, over 3840922.97 frames. ], batch size: 52, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:51:25,894 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 21:51:38,186 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 21:51:51,752 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:51:57,371 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:52:11,226 INFO [train.py:903] (1/4) Epoch 22, batch 4300, loss[loss=0.1756, simple_loss=0.2597, pruned_loss=0.04575, over 19614.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2864, pruned_loss=0.0632, over 3840748.32 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:52:47,215 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:53:02,954 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 21:53:11,620 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 5.055e+02 6.219e+02 8.115e+02 2.735e+03, threshold=1.244e+03, percent-clipped=11.0 +2023-04-02 21:53:11,639 INFO [train.py:903] (1/4) Epoch 22, batch 4350, loss[loss=0.2077, simple_loss=0.2997, pruned_loss=0.05784, over 18832.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2876, pruned_loss=0.06411, over 3828832.11 frames. ], batch size: 74, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:53:48,691 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:11,229 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:12,000 INFO [train.py:903] (1/4) Epoch 22, batch 4400, loss[loss=0.2152, simple_loss=0.3025, pruned_loss=0.06396, over 19674.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06353, over 3831588.41 frames. ], batch size: 58, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:54:20,675 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:37,150 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 21:54:46,692 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 21:55:06,301 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:12,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.137e+02 6.374e+02 7.743e+02 1.534e+03, threshold=1.275e+03, percent-clipped=3.0 +2023-04-02 21:55:12,533 INFO [train.py:903] (1/4) Epoch 22, batch 4450, loss[loss=0.219, simple_loss=0.2997, pruned_loss=0.06913, over 18269.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2876, pruned_loss=0.06381, over 3826666.03 frames. ], batch size: 83, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:55:57,428 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:57,463 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9751, 1.7245, 1.9939, 1.7900, 4.5113, 1.2804, 2.5746, 4.9116], + device='cuda:1'), covar=tensor([0.0440, 0.2596, 0.2700, 0.2013, 0.0788, 0.2569, 0.1416, 0.0157], + device='cuda:1'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0348, 0.0373, 0.0349, 0.0382, 0.0404], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:56:08,760 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:56:12,929 INFO [train.py:903] (1/4) Epoch 22, batch 4500, loss[loss=0.1587, simple_loss=0.241, pruned_loss=0.03815, over 19396.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2869, pruned_loss=0.06345, over 3830977.56 frames. ], batch size: 48, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:56:41,845 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147911.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:03,781 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:15,409 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 4.535e+02 5.624e+02 7.235e+02 1.683e+03, threshold=1.125e+03, percent-clipped=3.0 +2023-04-02 21:57:15,427 INFO [train.py:903] (1/4) Epoch 22, batch 4550, loss[loss=0.2421, simple_loss=0.3227, pruned_loss=0.08081, over 18594.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2868, pruned_loss=0.06375, over 3820877.14 frames. ], batch size: 74, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:57:23,455 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 21:57:46,097 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 21:57:58,605 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:15,826 INFO [train.py:903] (1/4) Epoch 22, batch 4600, loss[loss=0.2101, simple_loss=0.3013, pruned_loss=0.05942, over 19526.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2872, pruned_loss=0.06389, over 3827398.68 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:58:28,677 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:56,679 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:04,092 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 21:59:12,793 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1030, 1.2607, 1.4097, 1.5548, 2.7231, 1.0820, 2.1813, 3.1063], + device='cuda:1'), covar=tensor([0.0584, 0.2741, 0.3046, 0.1610, 0.0754, 0.2312, 0.1164, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0347, 0.0374, 0.0348, 0.0381, 0.0403], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 21:59:16,087 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.857e+02 6.032e+02 8.227e+02 1.754e+03, threshold=1.206e+03, percent-clipped=4.0 +2023-04-02 21:59:16,110 INFO [train.py:903] (1/4) Epoch 22, batch 4650, loss[loss=0.1726, simple_loss=0.2488, pruned_loss=0.04822, over 19772.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2877, pruned_loss=0.0643, over 3814285.73 frames. ], batch size: 47, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 21:59:22,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:32,296 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 21:59:43,962 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 21:59:53,309 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:00:16,031 INFO [train.py:903] (1/4) Epoch 22, batch 4700, loss[loss=0.2327, simple_loss=0.3147, pruned_loss=0.07539, over 19678.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2876, pruned_loss=0.06432, over 3822819.49 frames. ], batch size: 59, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:00:39,952 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 22:01:15,707 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:17,615 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.448e+02 6.322e+02 7.572e+02 1.580e+03, threshold=1.264e+03, percent-clipped=4.0 +2023-04-02 22:01:17,634 INFO [train.py:903] (1/4) Epoch 22, batch 4750, loss[loss=0.201, simple_loss=0.2865, pruned_loss=0.05772, over 19594.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2868, pruned_loss=0.06384, over 3830722.86 frames. ], batch size: 61, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:01:21,283 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:34,524 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-02 22:01:50,456 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:52,735 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:05,544 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148177.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:06,208 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 22:02:18,752 INFO [train.py:903] (1/4) Epoch 22, batch 4800, loss[loss=0.2177, simple_loss=0.3034, pruned_loss=0.06599, over 19506.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06408, over 3820936.06 frames. ], batch size: 64, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:02:23,657 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:57,694 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:03:18,987 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.799e+02 5.785e+02 7.279e+02 1.291e+03, threshold=1.157e+03, percent-clipped=1.0 +2023-04-02 22:03:19,007 INFO [train.py:903] (1/4) Epoch 22, batch 4850, loss[loss=0.209, simple_loss=0.296, pruned_loss=0.06099, over 19608.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2876, pruned_loss=0.06467, over 3802562.29 frames. ], batch size: 57, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:03:44,995 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 22:04:01,912 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148273.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:02,921 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 22:04:08,081 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 22:04:09,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 22:04:18,711 INFO [train.py:903] (1/4) Epoch 22, batch 4900, loss[loss=0.2404, simple_loss=0.3105, pruned_loss=0.08518, over 17511.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2886, pruned_loss=0.06519, over 3814466.88 frames. ], batch size: 101, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:04:18,721 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 22:04:24,312 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:39,207 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 22:04:40,586 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148305.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:14,498 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:19,513 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.396e+02 6.572e+02 8.433e+02 1.736e+03, threshold=1.314e+03, percent-clipped=6.0 +2023-04-02 22:05:19,535 INFO [train.py:903] (1/4) Epoch 22, batch 4950, loss[loss=0.1897, simple_loss=0.2679, pruned_loss=0.0557, over 19836.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2871, pruned_loss=0.06443, over 3820343.74 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:05:26,426 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.29 vs. limit=5.0 +2023-04-02 22:05:35,936 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 22:06:01,177 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 22:06:20,897 INFO [train.py:903] (1/4) Epoch 22, batch 5000, loss[loss=0.2055, simple_loss=0.2864, pruned_loss=0.06232, over 19737.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2873, pruned_loss=0.06438, over 3813921.00 frames. ], batch size: 51, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:06:21,239 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:25,711 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:29,617 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 22:06:40,551 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 22:06:55,410 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:19,245 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.597e+02 4.828e+02 6.337e+02 7.923e+02 1.456e+03, threshold=1.267e+03, percent-clipped=1.0 +2023-04-02 22:07:19,264 INFO [train.py:903] (1/4) Epoch 22, batch 5050, loss[loss=0.2108, simple_loss=0.2931, pruned_loss=0.06424, over 19535.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.06499, over 3807704.39 frames. ], batch size: 54, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:07:30,067 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148447.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:46,920 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3990, 4.0032, 2.6769, 3.4976, 1.0026, 3.9587, 3.8211, 3.9145], + device='cuda:1'), covar=tensor([0.0661, 0.0987, 0.1916, 0.0894, 0.3850, 0.0656, 0.0962, 0.1094], + device='cuda:1'), in_proj_covar=tensor([0.0502, 0.0409, 0.0492, 0.0344, 0.0398, 0.0432, 0.0424, 0.0458], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:07:54,473 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 22:08:19,394 INFO [train.py:903] (1/4) Epoch 22, batch 5100, loss[loss=0.1788, simple_loss=0.2677, pruned_loss=0.04495, over 19670.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2886, pruned_loss=0.06504, over 3817801.77 frames. ], batch size: 55, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:08:21,060 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 22:08:30,475 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 22:08:33,794 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 22:08:39,204 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 22:08:53,267 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148516.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:19,535 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.468e+02 6.941e+02 9.893e+02 2.948e+03, threshold=1.388e+03, percent-clipped=12.0 +2023-04-02 22:09:19,553 INFO [train.py:903] (1/4) Epoch 22, batch 5150, loss[loss=0.2211, simple_loss=0.3021, pruned_loss=0.07006, over 18797.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2886, pruned_loss=0.06486, over 3817907.22 frames. ], batch size: 74, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:09:31,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 22:09:32,946 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:40,839 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 22:10:02,829 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:05,908 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:10:20,790 INFO [train.py:903] (1/4) Epoch 22, batch 5200, loss[loss=0.1853, simple_loss=0.2634, pruned_loss=0.05362, over 19490.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.288, pruned_loss=0.06477, over 3808869.84 frames. ], batch size: 49, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:10:23,519 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:33,168 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 22:10:53,683 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:58,738 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 22:11:17,518 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 22:11:21,001 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.558e+02 5.776e+02 7.251e+02 2.001e+03, threshold=1.155e+03, percent-clipped=2.0 +2023-04-02 22:11:21,019 INFO [train.py:903] (1/4) Epoch 22, batch 5250, loss[loss=0.1705, simple_loss=0.2444, pruned_loss=0.04833, over 19760.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.287, pruned_loss=0.06375, over 3821597.60 frames. ], batch size: 48, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:11:27,728 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9674, 2.0908, 2.2922, 2.0812, 3.6661, 1.7374, 2.9798, 3.7244], + device='cuda:1'), covar=tensor([0.0439, 0.2094, 0.2101, 0.1649, 0.0603, 0.2147, 0.1496, 0.0271], + device='cuda:1'), in_proj_covar=tensor([0.0410, 0.0367, 0.0387, 0.0348, 0.0375, 0.0351, 0.0384, 0.0405], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:11:27,830 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:33,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:35,101 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5712, 1.4853, 2.1561, 1.6603, 3.1236, 4.7985, 4.6905, 5.1383], + device='cuda:1'), covar=tensor([0.1500, 0.3643, 0.3011, 0.2253, 0.0594, 0.0159, 0.0153, 0.0204], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0326, 0.0357, 0.0268, 0.0248, 0.0190, 0.0218, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 22:11:58,260 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:58,362 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:12:20,277 INFO [train.py:903] (1/4) Epoch 22, batch 5300, loss[loss=0.1998, simple_loss=0.2772, pruned_loss=0.06126, over 19423.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2883, pruned_loss=0.06455, over 3820165.61 frames. ], batch size: 48, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:12:39,144 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 22:13:17,745 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:22,172 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.679e+02 5.309e+02 6.457e+02 8.011e+02 2.116e+03, threshold=1.291e+03, percent-clipped=5.0 +2023-04-02 22:13:22,191 INFO [train.py:903] (1/4) Epoch 22, batch 5350, loss[loss=0.2323, simple_loss=0.3091, pruned_loss=0.07772, over 17452.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2876, pruned_loss=0.06399, over 3819967.31 frames. ], batch size: 101, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:13:47,488 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9913, 2.0747, 2.3372, 2.7153, 2.0539, 2.5802, 2.3409, 2.1166], + device='cuda:1'), covar=tensor([0.4385, 0.4044, 0.1866, 0.2444, 0.4180, 0.2128, 0.4675, 0.3252], + device='cuda:1'), in_proj_covar=tensor([0.0897, 0.0961, 0.0716, 0.0929, 0.0877, 0.0814, 0.0840, 0.0780], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 22:13:50,705 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8867, 1.2688, 1.5944, 1.6758, 4.1031, 1.3149, 2.7871, 4.5799], + device='cuda:1'), covar=tensor([0.0559, 0.3872, 0.3526, 0.2434, 0.1195, 0.3084, 0.1467, 0.0296], + device='cuda:1'), in_proj_covar=tensor([0.0411, 0.0368, 0.0387, 0.0347, 0.0375, 0.0350, 0.0383, 0.0405], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:13:53,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:55,496 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 22:14:03,664 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1066, 1.2502, 1.4372, 1.5003, 2.6969, 1.1284, 2.1886, 3.1009], + device='cuda:1'), covar=tensor([0.0601, 0.2948, 0.3117, 0.1784, 0.0792, 0.2496, 0.1277, 0.0322], + device='cuda:1'), in_proj_covar=tensor([0.0412, 0.0369, 0.0388, 0.0348, 0.0376, 0.0351, 0.0384, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:14:24,408 INFO [train.py:903] (1/4) Epoch 22, batch 5400, loss[loss=0.2263, simple_loss=0.3054, pruned_loss=0.07358, over 19649.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2876, pruned_loss=0.06392, over 3817966.00 frames. ], batch size: 58, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:14:28,068 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:15:24,092 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 4.882e+02 5.931e+02 8.184e+02 2.288e+03, threshold=1.186e+03, percent-clipped=7.0 +2023-04-02 22:15:24,110 INFO [train.py:903] (1/4) Epoch 22, batch 5450, loss[loss=0.2151, simple_loss=0.2954, pruned_loss=0.06742, over 19424.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2872, pruned_loss=0.06354, over 3816661.47 frames. ], batch size: 70, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:15:50,187 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:24,051 INFO [train.py:903] (1/4) Epoch 22, batch 5500, loss[loss=0.1696, simple_loss=0.2458, pruned_loss=0.04664, over 19792.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2872, pruned_loss=0.06395, over 3817001.84 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:16:47,564 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:49,402 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 22:16:50,910 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6289, 1.4201, 1.9335, 1.6337, 3.0242, 4.5042, 4.2885, 4.8607], + device='cuda:1'), covar=tensor([0.1458, 0.3750, 0.3346, 0.2316, 0.0645, 0.0197, 0.0183, 0.0208], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0324, 0.0356, 0.0266, 0.0247, 0.0189, 0.0216, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 22:17:25,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.956e+02 6.031e+02 8.410e+02 1.981e+03, threshold=1.206e+03, percent-clipped=11.0 +2023-04-02 22:17:25,289 INFO [train.py:903] (1/4) Epoch 22, batch 5550, loss[loss=0.2247, simple_loss=0.3102, pruned_loss=0.06963, over 19614.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2883, pruned_loss=0.06442, over 3813979.62 frames. ], batch size: 57, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:17:33,842 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 22:17:57,660 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5801, 1.9796, 2.1562, 2.0640, 3.3021, 1.7606, 2.8617, 3.5319], + device='cuda:1'), covar=tensor([0.0434, 0.2278, 0.2284, 0.1598, 0.0569, 0.2080, 0.1697, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0409, 0.0367, 0.0386, 0.0347, 0.0375, 0.0351, 0.0382, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:18:10,799 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:18:21,568 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 22:18:27,000 INFO [train.py:903] (1/4) Epoch 22, batch 5600, loss[loss=0.2006, simple_loss=0.2903, pruned_loss=0.05544, over 19784.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2881, pruned_loss=0.06447, over 3815062.74 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:18:56,786 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149013.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:06,002 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:10,576 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 22:19:27,601 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 5.096e+02 6.059e+02 8.103e+02 1.757e+03, threshold=1.212e+03, percent-clipped=10.0 +2023-04-02 22:19:27,620 INFO [train.py:903] (1/4) Epoch 22, batch 5650, loss[loss=0.2005, simple_loss=0.2829, pruned_loss=0.05905, over 19669.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2873, pruned_loss=0.06387, over 3816669.56 frames. ], batch size: 60, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:19:30,328 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3751, 2.2364, 2.0271, 1.8841, 1.7501, 1.9535, 0.6165, 1.3172], + device='cuda:1'), covar=tensor([0.0585, 0.0552, 0.0485, 0.0853, 0.1178, 0.0882, 0.1357, 0.0988], + device='cuda:1'), in_proj_covar=tensor([0.0356, 0.0357, 0.0359, 0.0382, 0.0462, 0.0390, 0.0337, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 22:19:35,829 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:15,160 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 22:20:16,295 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:28,284 INFO [train.py:903] (1/4) Epoch 22, batch 5700, loss[loss=0.2157, simple_loss=0.2984, pruned_loss=0.06654, over 19730.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2867, pruned_loss=0.06361, over 3821109.62 frames. ], batch size: 63, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:17,779 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:21:29,596 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.618e+02 6.077e+02 7.635e+02 1.470e+03, threshold=1.215e+03, percent-clipped=6.0 +2023-04-02 22:21:29,615 INFO [train.py:903] (1/4) Epoch 22, batch 5750, loss[loss=0.1856, simple_loss=0.2773, pruned_loss=0.04688, over 19685.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2865, pruned_loss=0.06341, over 3822131.67 frames. ], batch size: 59, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:30,805 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 22:21:39,627 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 22:21:46,351 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 22:21:59,246 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:03,892 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1473, 1.2068, 1.3305, 1.3898, 1.0620, 1.3212, 1.3676, 1.2478], + device='cuda:1'), covar=tensor([0.2889, 0.2303, 0.1380, 0.1586, 0.2573, 0.1453, 0.3400, 0.2362], + device='cuda:1'), in_proj_covar=tensor([0.0897, 0.0962, 0.0718, 0.0933, 0.0880, 0.0814, 0.0841, 0.0781], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 22:22:29,841 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:30,653 INFO [train.py:903] (1/4) Epoch 22, batch 5800, loss[loss=0.1856, simple_loss=0.2804, pruned_loss=0.04533, over 19534.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2865, pruned_loss=0.06364, over 3826718.57 frames. ], batch size: 54, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:22:37,174 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:23,071 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149231.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:30,407 INFO [train.py:903] (1/4) Epoch 22, batch 5850, loss[loss=0.201, simple_loss=0.2856, pruned_loss=0.0582, over 19511.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06376, over 3832307.62 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:23:31,585 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.174e+02 6.346e+02 7.936e+02 1.645e+03, threshold=1.269e+03, percent-clipped=7.0 +2023-04-02 22:23:39,957 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6645, 2.3897, 2.1529, 2.6756, 2.2682, 2.2289, 2.0925, 2.5521], + device='cuda:1'), covar=tensor([0.0974, 0.1631, 0.1473, 0.1159, 0.1473, 0.0553, 0.1401, 0.0718], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0354, 0.0312, 0.0250, 0.0300, 0.0250, 0.0309, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:23:52,928 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:24:19,399 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7538, 1.5729, 1.5922, 2.1792, 1.6259, 2.0784, 2.1321, 1.8179], + device='cuda:1'), covar=tensor([0.0861, 0.0991, 0.1034, 0.0804, 0.0938, 0.0735, 0.0826, 0.0706], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0223, 0.0226, 0.0241, 0.0229, 0.0213, 0.0187, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 22:24:30,286 INFO [train.py:903] (1/4) Epoch 22, batch 5900, loss[loss=0.1984, simple_loss=0.2802, pruned_loss=0.05836, over 19846.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2874, pruned_loss=0.06405, over 3830493.56 frames. ], batch size: 52, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:24:35,580 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 22:24:56,377 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 22:25:31,743 INFO [train.py:903] (1/4) Epoch 22, batch 5950, loss[loss=0.2853, simple_loss=0.3481, pruned_loss=0.1112, over 19787.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2865, pruned_loss=0.06382, over 3835156.17 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:25:32,889 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.792e+02 4.957e+02 5.985e+02 7.132e+02 1.534e+03, threshold=1.197e+03, percent-clipped=1.0 +2023-04-02 22:26:26,172 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 22:26:28,504 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:26:33,220 INFO [train.py:903] (1/4) Epoch 22, batch 6000, loss[loss=0.2604, simple_loss=0.3259, pruned_loss=0.09746, over 13451.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2867, pruned_loss=0.06369, over 3831150.10 frames. ], batch size: 136, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:26:33,220 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 22:26:40,115 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4093, 1.4842, 1.4390, 1.8287, 1.4789, 1.6622, 1.6477, 1.5987], + device='cuda:1'), covar=tensor([0.1012, 0.0960, 0.1066, 0.0686, 0.0960, 0.0863, 0.0993, 0.0732], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0223, 0.0226, 0.0241, 0.0229, 0.0214, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 22:26:43,948 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0791, 3.5143, 3.5901, 3.6154, 1.9995, 3.2660, 3.1611, 3.3976], + device='cuda:1'), covar=tensor([0.1512, 0.0705, 0.0618, 0.0625, 0.4855, 0.1120, 0.0622, 0.0934], + device='cuda:1'), in_proj_covar=tensor([0.0783, 0.0743, 0.0949, 0.0833, 0.0835, 0.0714, 0.0566, 0.0883], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 22:26:46,899 INFO [train.py:937] (1/4) Epoch 22, validation: loss=0.1681, simple_loss=0.2682, pruned_loss=0.03398, over 944034.00 frames. +2023-04-02 22:26:46,900 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 22:27:13,578 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:27:48,666 INFO [train.py:903] (1/4) Epoch 22, batch 6050, loss[loss=0.1637, simple_loss=0.2482, pruned_loss=0.03959, over 19401.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2854, pruned_loss=0.06258, over 3838344.81 frames. ], batch size: 48, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:27:49,809 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.880e+02 5.766e+02 7.280e+02 1.810e+03, threshold=1.153e+03, percent-clipped=3.0 +2023-04-02 22:27:52,759 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.65 vs. limit=5.0 +2023-04-02 22:28:02,643 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:07,324 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5495, 2.2309, 1.6113, 1.5975, 2.0967, 1.3646, 1.4690, 1.9527], + device='cuda:1'), covar=tensor([0.1090, 0.0933, 0.1177, 0.0816, 0.0524, 0.1317, 0.0777, 0.0563], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0266, 0.0248, 0.0338, 0.0291, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:28:32,585 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:50,329 INFO [train.py:903] (1/4) Epoch 22, batch 6100, loss[loss=0.2116, simple_loss=0.2995, pruned_loss=0.06191, over 19775.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.286, pruned_loss=0.06258, over 3840365.36 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:28:59,850 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9820, 1.8919, 1.7321, 1.5636, 1.4914, 1.5596, 0.3658, 0.8752], + device='cuda:1'), covar=tensor([0.0659, 0.0674, 0.0473, 0.0737, 0.1299, 0.0893, 0.1399, 0.1179], + device='cuda:1'), in_proj_covar=tensor([0.0356, 0.0355, 0.0358, 0.0381, 0.0462, 0.0389, 0.0336, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 22:29:02,457 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.56 vs. limit=5.0 +2023-04-02 22:29:42,763 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:29:48,971 INFO [train.py:903] (1/4) Epoch 22, batch 6150, loss[loss=0.182, simple_loss=0.2565, pruned_loss=0.05376, over 19756.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2863, pruned_loss=0.06308, over 3825131.97 frames. ], batch size: 46, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:29:49,331 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5040, 1.4542, 1.4543, 1.7888, 1.2180, 1.6675, 1.6835, 1.6102], + device='cuda:1'), covar=tensor([0.0848, 0.0925, 0.0968, 0.0678, 0.0917, 0.0798, 0.0863, 0.0702], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0224, 0.0240, 0.0227, 0.0212, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 22:29:50,024 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.761e+02 6.063e+02 7.648e+02 1.908e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 22:29:50,467 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7446, 2.4478, 2.2548, 2.6804, 2.3302, 2.2897, 2.1898, 2.6562], + device='cuda:1'), covar=tensor([0.0890, 0.1557, 0.1398, 0.1064, 0.1385, 0.0512, 0.1314, 0.0657], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0354, 0.0312, 0.0250, 0.0301, 0.0250, 0.0309, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:30:15,360 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6466, 2.8775, 3.0728, 3.0651, 1.7322, 2.8727, 2.6259, 2.8943], + device='cuda:1'), covar=tensor([0.1362, 0.3088, 0.0657, 0.0790, 0.4176, 0.1534, 0.0630, 0.1003], + device='cuda:1'), in_proj_covar=tensor([0.0779, 0.0740, 0.0943, 0.0827, 0.0831, 0.0708, 0.0563, 0.0877], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 22:30:19,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 22:30:29,460 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.93 vs. limit=5.0 +2023-04-02 22:30:43,281 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:30:49,419 INFO [train.py:903] (1/4) Epoch 22, batch 6200, loss[loss=0.1942, simple_loss=0.2845, pruned_loss=0.052, over 17959.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2874, pruned_loss=0.06365, over 3819611.58 frames. ], batch size: 83, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:31:51,294 INFO [train.py:903] (1/4) Epoch 22, batch 6250, loss[loss=0.2085, simple_loss=0.2967, pruned_loss=0.06011, over 19544.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.06361, over 3796598.80 frames. ], batch size: 54, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:31:52,383 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.068e+02 6.178e+02 8.268e+02 1.694e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 22:32:21,864 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 22:32:52,464 INFO [train.py:903] (1/4) Epoch 22, batch 6300, loss[loss=0.1999, simple_loss=0.2875, pruned_loss=0.05615, over 19566.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2887, pruned_loss=0.06437, over 3783776.07 frames. ], batch size: 61, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:32:59,448 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7285, 1.6538, 1.5907, 2.1449, 1.5104, 2.0486, 2.0808, 1.8577], + device='cuda:1'), covar=tensor([0.0842, 0.0941, 0.0980, 0.0796, 0.0935, 0.0734, 0.0870, 0.0694], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0220, 0.0222, 0.0238, 0.0226, 0.0211, 0.0186, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 22:33:51,829 INFO [train.py:903] (1/4) Epoch 22, batch 6350, loss[loss=0.1771, simple_loss=0.249, pruned_loss=0.05265, over 19736.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.288, pruned_loss=0.06418, over 3795373.76 frames. ], batch size: 46, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:33:52,935 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.551e+02 6.531e+02 8.044e+02 1.579e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-02 22:34:23,920 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149764.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:34:52,496 INFO [train.py:903] (1/4) Epoch 22, batch 6400, loss[loss=0.2181, simple_loss=0.2997, pruned_loss=0.06827, over 17251.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06448, over 3785350.83 frames. ], batch size: 101, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:54,292 INFO [train.py:903] (1/4) Epoch 22, batch 6450, loss[loss=0.2151, simple_loss=0.2952, pruned_loss=0.06755, over 19753.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2892, pruned_loss=0.06473, over 3783558.55 frames. ], batch size: 63, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:55,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.788e+02 5.699e+02 7.070e+02 1.580e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-02 22:36:39,133 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 22:36:40,524 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:36:54,745 INFO [train.py:903] (1/4) Epoch 22, batch 6500, loss[loss=0.2511, simple_loss=0.3315, pruned_loss=0.0853, over 19664.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.289, pruned_loss=0.06506, over 3787583.70 frames. ], batch size: 55, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:00,223 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 22:37:25,307 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.1568, 4.3140, 4.7686, 4.7901, 2.8512, 4.4191, 4.0499, 4.5034], + device='cuda:1'), covar=tensor([0.1408, 0.2473, 0.0596, 0.0619, 0.4160, 0.1078, 0.0616, 0.0990], + device='cuda:1'), in_proj_covar=tensor([0.0784, 0.0743, 0.0950, 0.0827, 0.0835, 0.0710, 0.0565, 0.0879], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 22:37:41,914 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:37:55,100 INFO [train.py:903] (1/4) Epoch 22, batch 6550, loss[loss=0.1717, simple_loss=0.2546, pruned_loss=0.0444, over 19596.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2892, pruned_loss=0.06474, over 3794266.44 frames. ], batch size: 52, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:56,255 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 4.654e+02 5.933e+02 7.304e+02 1.667e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 22:38:45,123 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1389, 1.9680, 1.7645, 2.1136, 1.8778, 1.7806, 1.7678, 1.9940], + device='cuda:1'), covar=tensor([0.0963, 0.1431, 0.1406, 0.0972, 0.1320, 0.0552, 0.1302, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0354, 0.0312, 0.0250, 0.0302, 0.0251, 0.0308, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:38:55,839 INFO [train.py:903] (1/4) Epoch 22, batch 6600, loss[loss=0.2087, simple_loss=0.2929, pruned_loss=0.06221, over 17548.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2869, pruned_loss=0.06342, over 3798973.22 frames. ], batch size: 101, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:38:59,623 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:39:59,884 INFO [train.py:903] (1/4) Epoch 22, batch 6650, loss[loss=0.2122, simple_loss=0.2987, pruned_loss=0.06286, over 19544.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.287, pruned_loss=0.06326, over 3811233.09 frames. ], batch size: 56, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:40:01,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 4.673e+02 5.867e+02 7.414e+02 1.313e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 22:40:04,739 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:40:59,358 INFO [train.py:903] (1/4) Epoch 22, batch 6700, loss[loss=0.2154, simple_loss=0.3055, pruned_loss=0.06261, over 19778.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2878, pruned_loss=0.06383, over 3810735.37 frames. ], batch size: 56, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:10,389 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:41:10,939 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 22:41:23,647 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150108.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:41:57,264 INFO [train.py:903] (1/4) Epoch 22, batch 6750, loss[loss=0.1642, simple_loss=0.2484, pruned_loss=0.04003, over 19828.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2874, pruned_loss=0.06375, over 3811375.19 frames. ], batch size: 52, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:58,374 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 4.879e+02 6.504e+02 7.654e+02 1.720e+03, threshold=1.301e+03, percent-clipped=5.0 +2023-04-02 22:42:17,387 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7468, 2.5236, 2.2551, 2.7125, 2.3468, 1.9777, 2.1510, 2.7049], + device='cuda:1'), covar=tensor([0.0915, 0.1562, 0.1434, 0.1018, 0.1423, 0.0706, 0.1473, 0.0650], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0352, 0.0312, 0.0249, 0.0301, 0.0250, 0.0308, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:42:53,133 INFO [train.py:903] (1/4) Epoch 22, batch 6800, loss[loss=0.2197, simple_loss=0.2979, pruned_loss=0.07077, over 19443.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06414, over 3812725.19 frames. ], batch size: 70, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:43:38,752 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 22:43:39,212 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 22:43:42,629 INFO [train.py:903] (1/4) Epoch 23, batch 0, loss[loss=0.2153, simple_loss=0.2952, pruned_loss=0.06771, over 19039.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2952, pruned_loss=0.06771, over 19039.00 frames. ], batch size: 69, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:43:42,630 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 22:43:54,256 INFO [train.py:937] (1/4) Epoch 23, validation: loss=0.1688, simple_loss=0.2693, pruned_loss=0.03418, over 944034.00 frames. +2023-04-02 22:43:54,257 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 22:43:54,666 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4582, 1.6005, 2.0530, 1.7057, 3.3077, 2.6561, 3.5596, 1.6921], + device='cuda:1'), covar=tensor([0.2489, 0.4191, 0.2533, 0.1879, 0.1352, 0.2004, 0.1414, 0.3989], + device='cuda:1'), in_proj_covar=tensor([0.0536, 0.0643, 0.0714, 0.0483, 0.0620, 0.0531, 0.0662, 0.0548], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 22:44:03,498 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150223.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:44:06,626 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 22:44:21,436 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.848e+02 5.561e+02 7.527e+02 1.735e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-04-02 22:44:31,849 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:44:55,828 INFO [train.py:903] (1/4) Epoch 23, batch 50, loss[loss=0.2111, simple_loss=0.2905, pruned_loss=0.06591, over 19674.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2865, pruned_loss=0.06235, over 858439.33 frames. ], batch size: 58, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:45:03,070 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:27,176 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:30,282 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 22:45:36,450 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:57,949 INFO [train.py:903] (1/4) Epoch 23, batch 100, loss[loss=0.2134, simple_loss=0.2974, pruned_loss=0.06474, over 19726.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2856, pruned_loss=0.06142, over 1524057.76 frames. ], batch size: 59, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:46:06,459 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:46:07,253 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 22:46:26,516 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.910e+02 5.630e+02 7.676e+02 1.557e+03, threshold=1.126e+03, percent-clipped=7.0 +2023-04-02 22:46:59,548 INFO [train.py:903] (1/4) Epoch 23, batch 150, loss[loss=0.2, simple_loss=0.282, pruned_loss=0.05904, over 19613.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.06159, over 2048709.22 frames. ], batch size: 57, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:47:22,284 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8060, 4.3865, 2.7556, 3.8423, 0.9441, 4.3052, 4.2113, 4.3422], + device='cuda:1'), covar=tensor([0.0569, 0.0872, 0.1918, 0.0798, 0.3969, 0.0621, 0.0851, 0.1000], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0411, 0.0496, 0.0346, 0.0399, 0.0432, 0.0425, 0.0459], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:47:23,512 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8490, 1.7981, 1.8148, 2.2254, 1.8026, 2.0418, 2.1080, 1.9804], + device='cuda:1'), covar=tensor([0.0761, 0.0820, 0.0855, 0.0723, 0.0859, 0.0797, 0.0866, 0.0608], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0222, 0.0240, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 22:47:59,882 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 22:48:00,288 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6365, 1.5468, 1.5834, 2.0532, 1.5519, 1.8694, 1.9324, 1.7492], + device='cuda:1'), covar=tensor([0.0839, 0.0962, 0.0972, 0.0764, 0.0852, 0.0823, 0.0917, 0.0698], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0222, 0.0239, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 22:48:01,003 INFO [train.py:903] (1/4) Epoch 23, batch 200, loss[loss=0.1975, simple_loss=0.2782, pruned_loss=0.05846, over 19758.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2867, pruned_loss=0.06281, over 2453725.38 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:48:01,414 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6136, 1.5548, 1.5556, 1.9976, 1.5392, 1.8872, 1.8693, 1.7551], + device='cuda:1'), covar=tensor([0.0870, 0.0950, 0.0998, 0.0801, 0.0873, 0.0804, 0.0942, 0.0709], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0222, 0.0239, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 22:48:26,087 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 22:48:30,849 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 5.350e+02 6.653e+02 9.712e+02 2.771e+03, threshold=1.331e+03, percent-clipped=16.0 +2023-04-02 22:48:33,192 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150441.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:48:41,449 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5062, 2.2226, 1.6677, 1.5334, 2.1116, 1.4018, 1.3898, 1.8680], + device='cuda:1'), covar=tensor([0.1141, 0.0844, 0.1089, 0.0796, 0.0567, 0.1188, 0.0810, 0.0585], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0317, 0.0339, 0.0267, 0.0247, 0.0336, 0.0291, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:49:02,669 INFO [train.py:903] (1/4) Epoch 23, batch 250, loss[loss=0.1981, simple_loss=0.2631, pruned_loss=0.06657, over 19752.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2878, pruned_loss=0.064, over 2760206.11 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:49:20,004 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150479.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:49:36,997 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9191, 4.3877, 4.6664, 4.6571, 1.8415, 4.3881, 3.8672, 4.3962], + device='cuda:1'), covar=tensor([0.1673, 0.0792, 0.0613, 0.0647, 0.6020, 0.0839, 0.0630, 0.1142], + device='cuda:1'), in_proj_covar=tensor([0.0784, 0.0744, 0.0951, 0.0832, 0.0836, 0.0712, 0.0566, 0.0878], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 22:49:49,524 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3061, 3.0140, 2.1894, 2.6761, 0.7278, 2.9867, 2.8731, 3.0049], + device='cuda:1'), covar=tensor([0.1172, 0.1393, 0.2134, 0.1156, 0.3926, 0.0964, 0.1158, 0.1353], + device='cuda:1'), in_proj_covar=tensor([0.0504, 0.0412, 0.0497, 0.0345, 0.0400, 0.0433, 0.0425, 0.0460], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:49:49,695 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150504.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:50:05,858 INFO [train.py:903] (1/4) Epoch 23, batch 300, loss[loss=0.241, simple_loss=0.3225, pruned_loss=0.07969, over 19506.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06363, over 3007829.08 frames. ], batch size: 64, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:50:34,494 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.361e+02 5.024e+02 5.928e+02 7.198e+02 2.066e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 22:50:49,307 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0821, 4.5050, 4.8154, 4.8263, 1.9234, 4.4904, 3.9552, 4.5132], + device='cuda:1'), covar=tensor([0.1616, 0.0731, 0.0572, 0.0633, 0.5686, 0.0733, 0.0643, 0.1130], + device='cuda:1'), in_proj_covar=tensor([0.0788, 0.0745, 0.0955, 0.0833, 0.0838, 0.0713, 0.0568, 0.0880], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 22:50:54,923 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150555.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:50:56,024 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:51:07,068 INFO [train.py:903] (1/4) Epoch 23, batch 350, loss[loss=0.1849, simple_loss=0.2764, pruned_loss=0.04665, over 19670.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2865, pruned_loss=0.06354, over 3186207.46 frames. ], batch size: 58, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:51:11,927 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:52:09,968 INFO [train.py:903] (1/4) Epoch 23, batch 400, loss[loss=0.2083, simple_loss=0.272, pruned_loss=0.07233, over 19778.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2867, pruned_loss=0.06412, over 3325213.49 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:52:17,553 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3179, 1.9104, 1.5066, 1.3525, 1.7944, 1.3055, 1.3505, 1.7563], + device='cuda:1'), covar=tensor([0.0929, 0.0894, 0.0960, 0.0801, 0.0532, 0.1163, 0.0656, 0.0458], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0316, 0.0339, 0.0266, 0.0246, 0.0336, 0.0290, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:52:36,591 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:52:40,893 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.962e+02 5.093e+02 6.486e+02 8.008e+02 1.724e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 22:53:11,935 INFO [train.py:903] (1/4) Epoch 23, batch 450, loss[loss=0.2166, simple_loss=0.287, pruned_loss=0.07312, over 19464.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2855, pruned_loss=0.06346, over 3447235.69 frames. ], batch size: 49, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:53:46,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 22:53:46,076 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 22:54:13,701 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7856, 2.1711, 1.6990, 1.6663, 2.0596, 1.5575, 1.6857, 1.9501], + device='cuda:1'), covar=tensor([0.0862, 0.0698, 0.0778, 0.0727, 0.0493, 0.1066, 0.0582, 0.0476], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0315, 0.0339, 0.0265, 0.0246, 0.0336, 0.0289, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:54:15,720 INFO [train.py:903] (1/4) Epoch 23, batch 500, loss[loss=0.2313, simple_loss=0.3088, pruned_loss=0.0769, over 19118.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2855, pruned_loss=0.06366, over 3497281.78 frames. ], batch size: 69, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:54:45,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 5.191e+02 6.635e+02 8.528e+02 2.142e+03, threshold=1.327e+03, percent-clipped=5.0 +2023-04-02 22:54:57,353 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:54:58,656 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:55:17,063 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.62 vs. limit=5.0 +2023-04-02 22:55:17,408 INFO [train.py:903] (1/4) Epoch 23, batch 550, loss[loss=0.2072, simple_loss=0.2881, pruned_loss=0.06313, over 19608.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2861, pruned_loss=0.06383, over 3573527.71 frames. ], batch size: 50, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:55:30,133 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0081, 1.8856, 1.6760, 2.0738, 1.8415, 1.7374, 1.6942, 1.9774], + device='cuda:1'), covar=tensor([0.0980, 0.1330, 0.1407, 0.0932, 0.1203, 0.0541, 0.1397, 0.0670], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0353, 0.0313, 0.0250, 0.0302, 0.0251, 0.0309, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:56:09,314 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-02 22:56:14,500 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:18,589 INFO [train.py:903] (1/4) Epoch 23, batch 600, loss[loss=0.1765, simple_loss=0.2517, pruned_loss=0.0507, over 19742.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2859, pruned_loss=0.06374, over 3624508.43 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:56:38,343 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9837, 2.0701, 2.2431, 2.7310, 2.0512, 2.5802, 2.3051, 2.1001], + device='cuda:1'), covar=tensor([0.4324, 0.3922, 0.1908, 0.2370, 0.4199, 0.2198, 0.4771, 0.3331], + device='cuda:1'), in_proj_covar=tensor([0.0905, 0.0971, 0.0720, 0.0934, 0.0886, 0.0822, 0.0846, 0.0788], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 22:56:45,511 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150837.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:48,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+02 5.000e+02 5.859e+02 6.998e+02 1.831e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-02 22:56:59,190 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 22:57:21,218 INFO [train.py:903] (1/4) Epoch 23, batch 650, loss[loss=0.2209, simple_loss=0.3004, pruned_loss=0.07076, over 19506.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2873, pruned_loss=0.06441, over 3666923.47 frames. ], batch size: 64, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:02,429 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150899.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:23,644 INFO [train.py:903] (1/4) Epoch 23, batch 700, loss[loss=0.2035, simple_loss=0.2956, pruned_loss=0.0557, over 18165.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2872, pruned_loss=0.06423, over 3711399.18 frames. ], batch size: 84, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:27,264 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:52,730 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.019e+02 5.794e+02 7.164e+02 1.349e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 22:59:09,193 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4405, 1.1977, 1.3721, 1.3778, 3.0010, 1.0433, 2.2872, 3.3556], + device='cuda:1'), covar=tensor([0.0534, 0.2927, 0.3115, 0.1907, 0.0735, 0.2555, 0.1268, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0416, 0.0370, 0.0389, 0.0349, 0.0379, 0.0353, 0.0384, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:59:23,021 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2727, 3.5792, 2.0785, 2.2445, 3.3300, 1.8672, 1.6874, 2.4191], + device='cuda:1'), covar=tensor([0.1380, 0.0613, 0.1148, 0.0905, 0.0489, 0.1313, 0.1050, 0.0712], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0266, 0.0248, 0.0339, 0.0292, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 22:59:26,031 INFO [train.py:903] (1/4) Epoch 23, batch 750, loss[loss=0.1793, simple_loss=0.2487, pruned_loss=0.05492, over 19793.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2867, pruned_loss=0.06395, over 3729035.64 frames. ], batch size: 47, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:59:51,792 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7970, 1.8877, 2.1740, 2.3514, 1.7005, 2.2361, 2.1533, 2.0126], + device='cuda:1'), covar=tensor([0.4536, 0.4005, 0.2014, 0.2388, 0.4189, 0.2176, 0.5299, 0.3474], + device='cuda:1'), in_proj_covar=tensor([0.0903, 0.0969, 0.0718, 0.0933, 0.0885, 0.0820, 0.0846, 0.0785], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 23:00:17,626 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:26,915 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:28,660 INFO [train.py:903] (1/4) Epoch 23, batch 800, loss[loss=0.2253, simple_loss=0.294, pruned_loss=0.07832, over 19580.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2869, pruned_loss=0.06398, over 3746935.69 frames. ], batch size: 52, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 23:00:46,712 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 23:00:48,233 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:57,133 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.089e+02 6.161e+02 7.478e+02 1.780e+03, threshold=1.232e+03, percent-clipped=6.0 +2023-04-02 23:01:29,764 INFO [train.py:903] (1/4) Epoch 23, batch 850, loss[loss=0.214, simple_loss=0.2955, pruned_loss=0.06631, over 19664.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2876, pruned_loss=0.06445, over 3757580.89 frames. ], batch size: 55, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:02:04,866 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:02:25,412 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 23:02:31,809 INFO [train.py:903] (1/4) Epoch 23, batch 900, loss[loss=0.2188, simple_loss=0.2883, pruned_loss=0.07462, over 19391.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2869, pruned_loss=0.06383, over 3776418.42 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:02,018 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 5.070e+02 6.543e+02 8.443e+02 1.332e+03, threshold=1.309e+03, percent-clipped=3.0 +2023-04-02 23:03:31,177 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 23:03:32,679 INFO [train.py:903] (1/4) Epoch 23, batch 950, loss[loss=0.2008, simple_loss=0.2776, pruned_loss=0.06202, over 19421.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2884, pruned_loss=0.06493, over 3787335.86 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:39,545 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 23:04:17,085 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 23:04:17,819 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:27,683 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:35,170 INFO [train.py:903] (1/4) Epoch 23, batch 1000, loss[loss=0.2481, simple_loss=0.3271, pruned_loss=0.08455, over 18305.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2892, pruned_loss=0.06524, over 3785275.40 frames. ], batch size: 83, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:05:04,889 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.146e+02 6.401e+02 7.951e+02 1.702e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 23:05:32,307 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 23:05:33,592 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:05:37,845 INFO [train.py:903] (1/4) Epoch 23, batch 1050, loss[loss=0.2153, simple_loss=0.2991, pruned_loss=0.06571, over 19702.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.06514, over 3801193.81 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:05:42,821 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151270.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:12,810 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 23:06:14,315 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:21,334 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151300.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:40,101 INFO [train.py:903] (1/4) Epoch 23, batch 1100, loss[loss=0.2044, simple_loss=0.2954, pruned_loss=0.05671, over 19697.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06473, over 3819186.78 frames. ], batch size: 58, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:06:40,413 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151316.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:07:09,143 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.937e+02 5.070e+02 6.152e+02 7.617e+02 1.362e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-04-02 23:07:40,866 INFO [train.py:903] (1/4) Epoch 23, batch 1150, loss[loss=0.2367, simple_loss=0.3248, pruned_loss=0.07432, over 19610.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06452, over 3829385.08 frames. ], batch size: 61, lr: 3.60e-03, grad_scale: 4.0 +2023-04-02 23:07:55,722 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151377.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:08:43,930 INFO [train.py:903] (1/4) Epoch 23, batch 1200, loss[loss=0.2331, simple_loss=0.312, pruned_loss=0.07705, over 19701.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2897, pruned_loss=0.06494, over 3816772.05 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:09:10,476 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8778, 2.6287, 2.5271, 3.0130, 2.7340, 2.4210, 2.3868, 2.9571], + device='cuda:1'), covar=tensor([0.0848, 0.1528, 0.1318, 0.0961, 0.1227, 0.0484, 0.1251, 0.0551], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0355, 0.0314, 0.0251, 0.0303, 0.0252, 0.0309, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:09:14,769 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.896e+02 6.001e+02 7.643e+02 1.247e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 23:09:18,073 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 23:09:45,306 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:09:46,022 INFO [train.py:903] (1/4) Epoch 23, batch 1250, loss[loss=0.1926, simple_loss=0.2731, pruned_loss=0.05603, over 19585.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2883, pruned_loss=0.06483, over 3820874.72 frames. ], batch size: 52, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:10:16,512 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:10:27,794 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 23:10:46,707 INFO [train.py:903] (1/4) Epoch 23, batch 1300, loss[loss=0.2524, simple_loss=0.3341, pruned_loss=0.0853, over 19554.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2893, pruned_loss=0.06539, over 3816754.21 frames. ], batch size: 61, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:10:55,136 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4184, 1.5499, 1.5750, 1.9179, 1.5031, 1.7931, 1.7008, 1.3619], + device='cuda:1'), covar=tensor([0.4805, 0.4208, 0.2808, 0.2871, 0.4131, 0.2526, 0.6279, 0.5089], + device='cuda:1'), in_proj_covar=tensor([0.0906, 0.0973, 0.0719, 0.0936, 0.0884, 0.0817, 0.0844, 0.0787], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 23:10:55,327 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 23:11:16,029 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 5.118e+02 6.046e+02 8.032e+02 1.744e+03, threshold=1.209e+03, percent-clipped=5.0 +2023-04-02 23:11:22,847 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:11:25,506 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7821, 1.8346, 2.0014, 1.8990, 2.6440, 2.3226, 2.6609, 1.6912], + device='cuda:1'), covar=tensor([0.1972, 0.3350, 0.2225, 0.1630, 0.1228, 0.1762, 0.1249, 0.3722], + device='cuda:1'), in_proj_covar=tensor([0.0540, 0.0650, 0.0719, 0.0490, 0.0622, 0.0534, 0.0668, 0.0555], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 23:11:46,631 INFO [train.py:903] (1/4) Epoch 23, batch 1350, loss[loss=0.1746, simple_loss=0.2471, pruned_loss=0.05107, over 19768.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2893, pruned_loss=0.06552, over 3804780.64 frames. ], batch size: 47, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:12:48,223 INFO [train.py:903] (1/4) Epoch 23, batch 1400, loss[loss=0.1768, simple_loss=0.2621, pruned_loss=0.04579, over 19396.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2885, pruned_loss=0.06493, over 3804109.82 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:08,447 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:17,804 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 5.043e+02 6.240e+02 8.237e+02 1.280e+03, threshold=1.248e+03, percent-clipped=3.0 +2023-04-02 23:13:21,398 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:38,321 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151658.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:40,483 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151660.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:41,856 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:48,344 INFO [train.py:903] (1/4) Epoch 23, batch 1450, loss[loss=0.2022, simple_loss=0.2845, pruned_loss=0.06, over 19671.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2878, pruned_loss=0.06439, over 3812182.74 frames. ], batch size: 53, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:48,375 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 23:14:36,980 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:14:49,056 INFO [train.py:903] (1/4) Epoch 23, batch 1500, loss[loss=0.1839, simple_loss=0.2707, pruned_loss=0.04858, over 19671.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.06469, over 3802191.18 frames. ], batch size: 58, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:18,488 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.905e+02 6.054e+02 7.299e+02 2.065e+03, threshold=1.211e+03, percent-clipped=4.0 +2023-04-02 23:15:40,398 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151759.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:15:47,608 INFO [train.py:903] (1/4) Epoch 23, batch 1550, loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.06069, over 19530.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06478, over 3809072.23 frames. ], batch size: 54, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:59,927 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:16:50,042 INFO [train.py:903] (1/4) Epoch 23, batch 1600, loss[loss=0.2046, simple_loss=0.2823, pruned_loss=0.06349, over 19613.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2881, pruned_loss=0.06443, over 3789491.42 frames. ], batch size: 50, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:16:53,591 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:17:10,285 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 23:17:20,214 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.863e+02 5.887e+02 6.951e+02 2.426e+03, threshold=1.177e+03, percent-clipped=3.0 +2023-04-02 23:17:50,185 INFO [train.py:903] (1/4) Epoch 23, batch 1650, loss[loss=0.1794, simple_loss=0.2694, pruned_loss=0.04472, over 19790.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06477, over 3793398.36 frames. ], batch size: 56, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:18:39,685 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1123, 1.2647, 1.6793, 1.2563, 2.7544, 3.6869, 3.4290, 3.9614], + device='cuda:1'), covar=tensor([0.1842, 0.3973, 0.3570, 0.2645, 0.0646, 0.0219, 0.0235, 0.0272], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0323, 0.0355, 0.0265, 0.0244, 0.0189, 0.0217, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 23:18:51,776 INFO [train.py:903] (1/4) Epoch 23, batch 1700, loss[loss=0.1917, simple_loss=0.2811, pruned_loss=0.05115, over 19733.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2883, pruned_loss=0.06475, over 3784139.88 frames. ], batch size: 63, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:18:53,362 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:21,471 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.018e+02 6.073e+02 7.613e+02 1.748e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 23:19:23,163 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:26,196 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 23:19:52,379 INFO [train.py:903] (1/4) Epoch 23, batch 1750, loss[loss=0.254, simple_loss=0.3194, pruned_loss=0.09432, over 13248.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2879, pruned_loss=0.06438, over 3781077.05 frames. ], batch size: 135, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:20:53,697 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:20:54,416 INFO [train.py:903] (1/4) Epoch 23, batch 1800, loss[loss=0.2133, simple_loss=0.2969, pruned_loss=0.06485, over 17428.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2865, pruned_loss=0.06391, over 3783832.86 frames. ], batch size: 101, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:21:13,138 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:24,577 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152040.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:26,671 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 5.200e+02 6.601e+02 8.823e+02 1.720e+03, threshold=1.320e+03, percent-clipped=12.0 +2023-04-02 23:21:36,337 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:40,757 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2114, 1.4122, 1.7495, 1.4241, 2.8442, 3.7018, 3.4236, 3.8427], + device='cuda:1'), covar=tensor([0.1746, 0.3730, 0.3426, 0.2454, 0.0622, 0.0215, 0.0218, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0324, 0.0355, 0.0265, 0.0245, 0.0188, 0.0217, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 23:21:44,253 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:48,437 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 23:21:55,071 INFO [train.py:903] (1/4) Epoch 23, batch 1850, loss[loss=0.2063, simple_loss=0.2891, pruned_loss=0.06179, over 19669.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.0641, over 3781869.49 frames. ], batch size: 60, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:22:26,618 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 23:22:54,485 INFO [train.py:903] (1/4) Epoch 23, batch 1900, loss[loss=0.2494, simple_loss=0.3229, pruned_loss=0.08793, over 18797.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06457, over 3789334.74 frames. ], batch size: 74, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:23:09,879 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 23:23:16,372 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 23:23:26,803 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.505e+02 4.994e+02 5.968e+02 7.712e+02 2.482e+03, threshold=1.194e+03, percent-clipped=3.0 +2023-04-02 23:23:36,535 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4957, 1.5774, 1.8764, 1.7079, 2.9797, 2.4654, 3.2515, 1.6393], + device='cuda:1'), covar=tensor([0.2563, 0.4457, 0.2854, 0.2028, 0.1719, 0.2221, 0.1678, 0.4330], + device='cuda:1'), in_proj_covar=tensor([0.0534, 0.0643, 0.0715, 0.0486, 0.0618, 0.0530, 0.0663, 0.0549], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 23:23:41,413 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 23:23:52,764 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:55,364 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:56,154 INFO [train.py:903] (1/4) Epoch 23, batch 1950, loss[loss=0.1964, simple_loss=0.2872, pruned_loss=0.05276, over 19603.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2866, pruned_loss=0.06343, over 3795660.98 frames. ], batch size: 57, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:24:58,530 INFO [train.py:903] (1/4) Epoch 23, batch 2000, loss[loss=0.2289, simple_loss=0.3132, pruned_loss=0.07237, over 19610.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06302, over 3794320.36 frames. ], batch size: 57, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:25:06,465 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-02 23:25:28,732 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.661e+02 5.613e+02 7.041e+02 1.127e+03, threshold=1.123e+03, percent-clipped=0.0 +2023-04-02 23:25:30,260 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:25:53,680 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 23:25:58,367 INFO [train.py:903] (1/4) Epoch 23, batch 2050, loss[loss=0.1864, simple_loss=0.2765, pruned_loss=0.04813, over 19772.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2858, pruned_loss=0.06265, over 3806169.45 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:26:13,113 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 23:26:13,448 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:14,192 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 23:26:29,838 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:36,227 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 23:26:58,782 INFO [train.py:903] (1/4) Epoch 23, batch 2100, loss[loss=0.2171, simple_loss=0.2934, pruned_loss=0.07042, over 19764.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2865, pruned_loss=0.06284, over 3803912.57 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:27:27,815 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 23:27:31,217 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.808e+02 5.705e+02 7.050e+02 1.568e+03, threshold=1.141e+03, percent-clipped=6.0 +2023-04-02 23:27:48,004 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 23:27:59,782 INFO [train.py:903] (1/4) Epoch 23, batch 2150, loss[loss=0.2099, simple_loss=0.2844, pruned_loss=0.06775, over 19772.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.06318, over 3817332.58 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:28:24,359 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8901, 1.7657, 1.5622, 1.9770, 1.7107, 1.6487, 1.6002, 1.8115], + device='cuda:1'), covar=tensor([0.1109, 0.1561, 0.1618, 0.1108, 0.1406, 0.0624, 0.1447, 0.0848], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0355, 0.0314, 0.0253, 0.0304, 0.0252, 0.0310, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:28:35,511 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 23:29:00,653 INFO [train.py:903] (1/4) Epoch 23, batch 2200, loss[loss=0.2506, simple_loss=0.3296, pruned_loss=0.0858, over 19662.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2871, pruned_loss=0.06372, over 3815394.35 frames. ], batch size: 60, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:29:07,494 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:31,741 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.084e+02 6.029e+02 8.181e+02 1.825e+03, threshold=1.206e+03, percent-clipped=10.0 +2023-04-02 23:29:37,606 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:58,447 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:30:01,449 INFO [train.py:903] (1/4) Epoch 23, batch 2250, loss[loss=0.2549, simple_loss=0.336, pruned_loss=0.08687, over 19379.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2868, pruned_loss=0.06317, over 3825369.81 frames. ], batch size: 70, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:31:01,771 INFO [train.py:903] (1/4) Epoch 23, batch 2300, loss[loss=0.1973, simple_loss=0.2701, pruned_loss=0.06222, over 19472.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2865, pruned_loss=0.0634, over 3818113.32 frames. ], batch size: 49, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:31:17,227 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 23:31:25,349 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:31:36,142 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.927e+02 5.902e+02 7.617e+02 2.113e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 23:31:55,694 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152559.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:04,343 INFO [train.py:903] (1/4) Epoch 23, batch 2350, loss[loss=0.2014, simple_loss=0.2891, pruned_loss=0.05689, over 19769.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2858, pruned_loss=0.06283, over 3820894.65 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:32:30,248 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:39,675 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 23:32:43,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 23:32:45,481 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:54,447 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:03,103 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 23:33:05,166 INFO [train.py:903] (1/4) Epoch 23, batch 2400, loss[loss=0.243, simple_loss=0.3213, pruned_loss=0.08231, over 19553.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06362, over 3812289.31 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:33:28,192 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:38,290 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 5.347e+02 6.485e+02 7.646e+02 1.871e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 23:34:06,659 INFO [train.py:903] (1/4) Epoch 23, batch 2450, loss[loss=0.2067, simple_loss=0.2951, pruned_loss=0.05914, over 19665.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2873, pruned_loss=0.0635, over 3800060.17 frames. ], batch size: 58, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:34:37,185 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5364, 1.5407, 1.5898, 1.8264, 3.1204, 1.3047, 2.3845, 3.6101], + device='cuda:1'), covar=tensor([0.0523, 0.2686, 0.2846, 0.1651, 0.0690, 0.2425, 0.1377, 0.0250], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0369, 0.0390, 0.0350, 0.0377, 0.0355, 0.0385, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:34:51,448 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:06,561 INFO [train.py:903] (1/4) Epoch 23, batch 2500, loss[loss=0.1807, simple_loss=0.275, pruned_loss=0.04322, over 19780.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2873, pruned_loss=0.06328, over 3810255.41 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:35:12,381 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0307, 1.8023, 1.5995, 1.8961, 1.6940, 1.7439, 1.6258, 1.8884], + device='cuda:1'), covar=tensor([0.1072, 0.1340, 0.1605, 0.1197, 0.1369, 0.0561, 0.1483, 0.0764], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0355, 0.0313, 0.0252, 0.0302, 0.0250, 0.0309, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:35:29,554 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1468, 5.6014, 3.0218, 4.8595, 0.9041, 5.6793, 5.4496, 5.7395], + device='cuda:1'), covar=tensor([0.0346, 0.0743, 0.1954, 0.0739, 0.4339, 0.0513, 0.0750, 0.1022], + device='cuda:1'), in_proj_covar=tensor([0.0515, 0.0416, 0.0502, 0.0351, 0.0404, 0.0440, 0.0431, 0.0464], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:35:40,606 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.968e+02 5.949e+02 7.714e+02 2.745e+03, threshold=1.190e+03, percent-clipped=5.0 +2023-04-02 23:35:42,075 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152744.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:48,943 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:36:08,241 INFO [train.py:903] (1/4) Epoch 23, batch 2550, loss[loss=0.203, simple_loss=0.2807, pruned_loss=0.06268, over 19625.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2866, pruned_loss=0.06263, over 3815924.90 frames. ], batch size: 50, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:36:28,485 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1606, 3.4492, 2.0116, 2.0719, 3.0956, 1.6510, 1.6404, 2.3340], + device='cuda:1'), covar=tensor([0.1276, 0.0596, 0.1152, 0.0835, 0.0610, 0.1385, 0.0958, 0.0659], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0314, 0.0334, 0.0265, 0.0246, 0.0336, 0.0288, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:36:57,880 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:37:01,064 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 23:37:08,439 INFO [train.py:903] (1/4) Epoch 23, batch 2600, loss[loss=0.1939, simple_loss=0.2737, pruned_loss=0.0571, over 19736.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2854, pruned_loss=0.06253, over 3821603.47 frames. ], batch size: 63, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:37:37,470 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9590, 0.9871, 1.0954, 1.0814, 1.3098, 1.2874, 1.2468, 0.5561], + device='cuda:1'), covar=tensor([0.1806, 0.3170, 0.1870, 0.1495, 0.1222, 0.1759, 0.1114, 0.4019], + device='cuda:1'), in_proj_covar=tensor([0.0536, 0.0647, 0.0718, 0.0487, 0.0620, 0.0533, 0.0664, 0.0552], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 23:37:40,420 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 4.724e+02 5.510e+02 7.301e+02 1.657e+03, threshold=1.102e+03, percent-clipped=4.0 +2023-04-02 23:38:08,552 INFO [train.py:903] (1/4) Epoch 23, batch 2650, loss[loss=0.1807, simple_loss=0.26, pruned_loss=0.05066, over 19801.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2867, pruned_loss=0.06345, over 3809121.19 frames. ], batch size: 49, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:38:27,766 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 23:39:08,680 INFO [train.py:903] (1/4) Epoch 23, batch 2700, loss[loss=0.2227, simple_loss=0.3021, pruned_loss=0.07167, over 19305.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2859, pruned_loss=0.06309, over 3809971.37 frames. ], batch size: 66, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:39:16,550 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:42,349 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.895e+02 5.730e+02 7.672e+02 1.465e+03, threshold=1.146e+03, percent-clipped=5.0 +2023-04-02 23:39:43,656 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:51,725 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:00,073 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:09,445 INFO [train.py:903] (1/4) Epoch 23, batch 2750, loss[loss=0.2449, simple_loss=0.3248, pruned_loss=0.08247, over 19671.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2867, pruned_loss=0.06288, over 3828799.69 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:40:23,475 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4368, 1.4132, 1.6437, 1.4243, 3.0616, 1.1128, 2.2750, 3.4954], + device='cuda:1'), covar=tensor([0.0530, 0.2700, 0.2619, 0.1832, 0.0697, 0.2457, 0.1214, 0.0235], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0370, 0.0390, 0.0350, 0.0375, 0.0354, 0.0383, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:40:29,119 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9355, 0.8812, 0.9079, 1.0319, 0.8475, 0.9933, 0.9344, 0.9646], + device='cuda:1'), covar=tensor([0.0691, 0.0764, 0.0816, 0.0549, 0.0814, 0.0682, 0.0748, 0.0623], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0224, 0.0238, 0.0227, 0.0211, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 23:40:31,191 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:57,898 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:10,184 INFO [train.py:903] (1/4) Epoch 23, batch 2800, loss[loss=0.2099, simple_loss=0.2832, pruned_loss=0.06826, over 19607.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06382, over 3809337.71 frames. ], batch size: 52, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:41:27,938 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:42,352 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.940e+02 6.140e+02 7.865e+02 1.529e+03, threshold=1.228e+03, percent-clipped=3.0 +2023-04-02 23:41:54,199 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 23:42:02,472 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:10,891 INFO [train.py:903] (1/4) Epoch 23, batch 2850, loss[loss=0.1883, simple_loss=0.2631, pruned_loss=0.05679, over 19749.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2897, pruned_loss=0.06463, over 3809520.76 frames. ], batch size: 45, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:42:11,236 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:33,234 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8009, 3.2534, 3.3262, 3.3299, 1.3022, 3.2173, 2.7946, 3.0932], + device='cuda:1'), covar=tensor([0.1873, 0.1198, 0.0845, 0.0989, 0.6009, 0.1119, 0.0870, 0.1417], + device='cuda:1'), in_proj_covar=tensor([0.0786, 0.0750, 0.0957, 0.0840, 0.0844, 0.0718, 0.0570, 0.0892], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 23:42:36,390 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:51,798 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6477, 1.5091, 1.6194, 2.1826, 1.6222, 1.9309, 1.8566, 1.7753], + device='cuda:1'), covar=tensor([0.0819, 0.0891, 0.0918, 0.0654, 0.0841, 0.0750, 0.0857, 0.0675], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0239, 0.0228, 0.0212, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-02 23:43:09,810 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 23:43:11,000 INFO [train.py:903] (1/4) Epoch 23, batch 2900, loss[loss=0.1563, simple_loss=0.2369, pruned_loss=0.03785, over 19777.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2883, pruned_loss=0.06393, over 3807867.58 frames. ], batch size: 48, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:43:12,415 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6332, 1.3026, 1.4682, 1.5467, 3.2451, 1.0695, 2.2896, 3.6486], + device='cuda:1'), covar=tensor([0.0503, 0.2856, 0.3081, 0.1876, 0.0708, 0.2656, 0.1396, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0368, 0.0390, 0.0350, 0.0374, 0.0354, 0.0383, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:43:34,541 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:43:45,170 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.545e+02 5.074e+02 6.117e+02 7.609e+02 1.538e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 23:44:10,298 INFO [train.py:903] (1/4) Epoch 23, batch 2950, loss[loss=0.2172, simple_loss=0.287, pruned_loss=0.07366, over 19573.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2884, pruned_loss=0.06426, over 3812890.12 frames. ], batch size: 52, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:44:23,187 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0840, 5.0846, 5.8666, 5.8567, 1.9619, 5.5747, 4.6232, 5.5118], + device='cuda:1'), covar=tensor([0.1536, 0.0854, 0.0518, 0.0548, 0.6278, 0.0771, 0.0630, 0.1087], + device='cuda:1'), in_proj_covar=tensor([0.0780, 0.0743, 0.0950, 0.0833, 0.0838, 0.0711, 0.0566, 0.0885], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-02 23:44:25,472 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,243 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,283 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:45:09,888 INFO [train.py:903] (1/4) Epoch 23, batch 3000, loss[loss=0.2857, simple_loss=0.3446, pruned_loss=0.1134, over 13079.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2882, pruned_loss=0.06459, over 3818882.34 frames. ], batch size: 135, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:45:09,889 INFO [train.py:928] (1/4) Computing validation loss +2023-04-02 23:45:23,388 INFO [train.py:937] (1/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2685, pruned_loss=0.03441, over 944034.00 frames. +2023-04-02 23:45:23,388 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-02 23:45:26,695 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 23:45:27,892 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9138, 4.4513, 2.8304, 3.8722, 0.9899, 4.4562, 4.2634, 4.4544], + device='cuda:1'), covar=tensor([0.0571, 0.0944, 0.1936, 0.0861, 0.4170, 0.0651, 0.0929, 0.1036], + device='cuda:1'), in_proj_covar=tensor([0.0511, 0.0414, 0.0498, 0.0349, 0.0402, 0.0437, 0.0429, 0.0463], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:45:40,636 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 23:45:55,421 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9794, 2.0667, 2.3231, 2.6056, 1.9204, 2.4815, 2.3571, 2.1083], + device='cuda:1'), covar=tensor([0.4470, 0.4310, 0.2112, 0.2460, 0.4359, 0.2304, 0.5158, 0.3618], + device='cuda:1'), in_proj_covar=tensor([0.0908, 0.0975, 0.0721, 0.0934, 0.0885, 0.0822, 0.0848, 0.0787], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 23:45:57,208 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.963e+02 5.132e+02 6.544e+02 7.997e+02 1.730e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 23:46:23,490 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-02 23:46:24,009 INFO [train.py:903] (1/4) Epoch 23, batch 3050, loss[loss=0.2225, simple_loss=0.2992, pruned_loss=0.07293, over 17326.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2875, pruned_loss=0.06424, over 3839338.59 frames. ], batch size: 101, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:46:27,330 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7282, 4.3138, 2.8926, 3.8142, 0.8923, 4.2672, 4.1579, 4.2350], + device='cuda:1'), covar=tensor([0.0616, 0.0839, 0.1671, 0.0771, 0.4103, 0.0590, 0.0889, 0.0982], + device='cuda:1'), in_proj_covar=tensor([0.0511, 0.0414, 0.0497, 0.0349, 0.0401, 0.0437, 0.0430, 0.0463], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:47:00,337 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153296.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:05,913 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4398, 1.5286, 1.7692, 1.7246, 2.6352, 2.3465, 2.8248, 1.2000], + device='cuda:1'), covar=tensor([0.2478, 0.4315, 0.2690, 0.1904, 0.1473, 0.2088, 0.1343, 0.4453], + device='cuda:1'), in_proj_covar=tensor([0.0535, 0.0644, 0.0715, 0.0486, 0.0617, 0.0531, 0.0662, 0.0551], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 23:47:25,750 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:26,471 INFO [train.py:903] (1/4) Epoch 23, batch 3100, loss[loss=0.209, simple_loss=0.2881, pruned_loss=0.06493, over 19723.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2863, pruned_loss=0.0638, over 3836689.33 frames. ], batch size: 63, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:47:33,582 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:54,373 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:59,279 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 4.897e+02 6.414e+02 9.491e+02 6.432e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 23:48:01,106 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 23:48:03,113 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:48:13,743 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6782, 1.4813, 1.2484, 1.5446, 1.4463, 1.3329, 1.2456, 1.4874], + device='cuda:1'), covar=tensor([0.1239, 0.1527, 0.1961, 0.1251, 0.1414, 0.0987, 0.1930, 0.1058], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0357, 0.0314, 0.0254, 0.0305, 0.0252, 0.0310, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:48:25,986 INFO [train.py:903] (1/4) Epoch 23, batch 3150, loss[loss=0.1701, simple_loss=0.2535, pruned_loss=0.04336, over 19467.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2871, pruned_loss=0.06421, over 3815914.67 frames. ], batch size: 49, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:48:54,111 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 23:48:57,823 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4309, 1.4959, 1.5744, 1.4716, 3.0333, 1.1457, 2.4877, 3.3963], + device='cuda:1'), covar=tensor([0.0560, 0.2705, 0.2839, 0.1925, 0.0736, 0.2487, 0.1086, 0.0303], + device='cuda:1'), in_proj_covar=tensor([0.0415, 0.0370, 0.0392, 0.0352, 0.0376, 0.0354, 0.0385, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:49:03,432 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3270, 1.3845, 1.4938, 1.4915, 1.7695, 1.8299, 1.8186, 0.7156], + device='cuda:1'), covar=tensor([0.2393, 0.4157, 0.2635, 0.1898, 0.1617, 0.2275, 0.1409, 0.4505], + device='cuda:1'), in_proj_covar=tensor([0.0535, 0.0645, 0.0716, 0.0486, 0.0617, 0.0532, 0.0662, 0.0552], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-02 23:49:03,693 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-02 23:49:26,056 INFO [train.py:903] (1/4) Epoch 23, batch 3200, loss[loss=0.1941, simple_loss=0.2788, pruned_loss=0.05463, over 19476.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2853, pruned_loss=0.06327, over 3818076.98 frames. ], batch size: 64, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:49:54,888 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:00,125 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.043e+02 6.093e+02 8.078e+02 1.420e+03, threshold=1.219e+03, percent-clipped=2.0 +2023-04-02 23:50:17,468 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:26,643 INFO [train.py:903] (1/4) Epoch 23, batch 3250, loss[loss=0.232, simple_loss=0.3117, pruned_loss=0.07612, over 19608.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2861, pruned_loss=0.06377, over 3812304.00 frames. ], batch size: 57, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:50:43,099 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:48,913 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153484.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:51:05,697 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 23:51:27,758 INFO [train.py:903] (1/4) Epoch 23, batch 3300, loss[loss=0.2059, simple_loss=0.2902, pruned_loss=0.06077, over 19667.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2858, pruned_loss=0.06333, over 3830810.13 frames. ], batch size: 55, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:51:34,823 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 23:52:00,745 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.081e+02 6.216e+02 8.009e+02 2.047e+03, threshold=1.243e+03, percent-clipped=5.0 +2023-04-02 23:52:26,361 INFO [train.py:903] (1/4) Epoch 23, batch 3350, loss[loss=0.2015, simple_loss=0.2817, pruned_loss=0.06063, over 19693.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2871, pruned_loss=0.06389, over 3824776.19 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:52:56,913 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5387, 2.2339, 1.6479, 1.5296, 2.0110, 1.3343, 1.4182, 1.9451], + device='cuda:1'), covar=tensor([0.0993, 0.0736, 0.1207, 0.0858, 0.0621, 0.1354, 0.0755, 0.0483], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0319, 0.0344, 0.0269, 0.0252, 0.0343, 0.0295, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:53:00,283 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:26,223 INFO [train.py:903] (1/4) Epoch 23, batch 3400, loss[loss=0.2236, simple_loss=0.3033, pruned_loss=0.07193, over 17530.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2883, pruned_loss=0.06458, over 3824975.96 frames. ], batch size: 101, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:53:37,515 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-02 23:53:47,127 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2720, 1.8731, 1.8926, 2.7753, 1.9178, 2.3982, 2.3614, 2.2673], + device='cuda:1'), covar=tensor([0.0745, 0.0911, 0.0943, 0.0778, 0.0875, 0.0746, 0.0922, 0.0622], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0219, 0.0223, 0.0237, 0.0225, 0.0211, 0.0186, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-02 23:53:56,984 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153640.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:58,257 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3262, 1.3195, 1.7276, 1.1594, 2.5044, 3.4379, 3.1328, 3.6252], + device='cuda:1'), covar=tensor([0.1461, 0.3653, 0.3149, 0.2507, 0.0604, 0.0175, 0.0210, 0.0245], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0266, 0.0245, 0.0189, 0.0218, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-02 23:54:01,365 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.295e+02 6.743e+02 8.549e+02 2.424e+03, threshold=1.349e+03, percent-clipped=5.0 +2023-04-02 23:54:17,488 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4614, 2.2472, 2.3386, 2.6240, 2.3448, 2.1244, 2.1887, 2.4472], + device='cuda:1'), covar=tensor([0.0777, 0.1265, 0.1037, 0.0743, 0.1061, 0.0453, 0.1045, 0.0532], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0360, 0.0317, 0.0256, 0.0308, 0.0254, 0.0313, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:54:28,038 INFO [train.py:903] (1/4) Epoch 23, batch 3450, loss[loss=0.2103, simple_loss=0.2978, pruned_loss=0.06143, over 19791.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2875, pruned_loss=0.064, over 3821758.50 frames. ], batch size: 56, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:54:31,533 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 23:54:52,895 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0040, 2.0848, 2.3317, 2.7603, 2.0495, 2.5985, 2.4036, 2.1848], + device='cuda:1'), covar=tensor([0.4108, 0.3889, 0.1871, 0.2269, 0.3960, 0.2099, 0.4595, 0.3275], + device='cuda:1'), in_proj_covar=tensor([0.0907, 0.0972, 0.0719, 0.0934, 0.0882, 0.0819, 0.0847, 0.0788], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-02 23:55:30,145 INFO [train.py:903] (1/4) Epoch 23, batch 3500, loss[loss=0.2131, simple_loss=0.2961, pruned_loss=0.06507, over 19430.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06432, over 3821952.66 frames. ], batch size: 70, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:56:02,440 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.938e+02 5.821e+02 7.521e+02 2.332e+03, threshold=1.164e+03, percent-clipped=1.0 +2023-04-02 23:56:17,986 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:56:30,177 INFO [train.py:903] (1/4) Epoch 23, batch 3550, loss[loss=0.184, simple_loss=0.2705, pruned_loss=0.0488, over 19682.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2883, pruned_loss=0.06431, over 3816141.73 frames. ], batch size: 58, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:56:50,064 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:57:30,146 INFO [train.py:903] (1/4) Epoch 23, batch 3600, loss[loss=0.2021, simple_loss=0.2881, pruned_loss=0.0581, over 19541.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06398, over 3827467.60 frames. ], batch size: 54, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:57:47,202 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 23:58:05,069 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 5.155e+02 6.351e+02 8.015e+02 2.586e+03, threshold=1.270e+03, percent-clipped=6.0 +2023-04-02 23:58:12,392 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:58:30,878 INFO [train.py:903] (1/4) Epoch 23, batch 3650, loss[loss=0.2385, simple_loss=0.3102, pruned_loss=0.08343, over 18131.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2878, pruned_loss=0.06437, over 3819588.94 frames. ], batch size: 83, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:58:42,967 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:08,879 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:31,665 INFO [train.py:903] (1/4) Epoch 23, batch 3700, loss[loss=0.1933, simple_loss=0.2785, pruned_loss=0.05404, over 19490.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2881, pruned_loss=0.06464, over 3818250.13 frames. ], batch size: 49, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:59:57,424 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5583, 1.1860, 1.4909, 1.3613, 3.0931, 1.0581, 2.2951, 3.4781], + device='cuda:1'), covar=tensor([0.0625, 0.3177, 0.3072, 0.2198, 0.0838, 0.2778, 0.1453, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0369, 0.0391, 0.0352, 0.0375, 0.0352, 0.0385, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-02 23:59:57,956 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 00:00:00,754 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153941.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:00:04,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.617e+02 5.510e+02 6.874e+02 2.344e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-03 00:00:31,958 INFO [train.py:903] (1/4) Epoch 23, batch 3750, loss[loss=0.1913, simple_loss=0.2789, pruned_loss=0.05184, over 19529.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2889, pruned_loss=0.06486, over 3821262.88 frames. ], batch size: 56, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:24,683 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 00:01:27,853 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:01:33,254 INFO [train.py:903] (1/4) Epoch 23, batch 3800, loss[loss=0.2552, simple_loss=0.3185, pruned_loss=0.09598, over 13371.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2885, pruned_loss=0.0644, over 3826137.30 frames. ], batch size: 136, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:59,812 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154036.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:02:05,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 00:02:08,274 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 4.923e+02 6.103e+02 7.526e+02 2.694e+03, threshold=1.221e+03, percent-clipped=9.0 +2023-04-03 00:02:33,013 INFO [train.py:903] (1/4) Epoch 23, batch 3850, loss[loss=0.2183, simple_loss=0.3014, pruned_loss=0.06762, over 19393.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2876, pruned_loss=0.06383, over 3834218.90 frames. ], batch size: 70, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:02:35,318 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154067.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:03:01,627 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-03 00:03:09,351 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:18,588 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:35,946 INFO [train.py:903] (1/4) Epoch 23, batch 3900, loss[loss=0.164, simple_loss=0.2457, pruned_loss=0.04116, over 16880.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2873, pruned_loss=0.06347, over 3837586.00 frames. ], batch size: 37, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:09,480 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 4.608e+02 5.656e+02 7.392e+02 1.919e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-04-03 00:04:22,819 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:04:37,502 INFO [train.py:903] (1/4) Epoch 23, batch 3950, loss[loss=0.2633, simple_loss=0.3186, pruned_loss=0.104, over 12655.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2881, pruned_loss=0.06419, over 3824307.54 frames. ], batch size: 136, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:44,234 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 00:04:52,286 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:05:37,018 INFO [train.py:903] (1/4) Epoch 23, batch 4000, loss[loss=0.2124, simple_loss=0.2873, pruned_loss=0.06873, over 19666.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2875, pruned_loss=0.06414, over 3832389.04 frames. ], batch size: 55, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:06:06,068 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:06:12,360 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.130e+02 6.145e+02 8.525e+02 2.203e+03, threshold=1.229e+03, percent-clipped=9.0 +2023-04-03 00:06:27,019 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 00:06:37,055 INFO [train.py:903] (1/4) Epoch 23, batch 4050, loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06703, over 19771.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2877, pruned_loss=0.06422, over 3828362.85 frames. ], batch size: 63, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:07:01,635 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154285.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:17,531 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8317, 1.1666, 1.4885, 1.5317, 3.3443, 1.2466, 2.4959, 3.8940], + device='cuda:1'), covar=tensor([0.0548, 0.3372, 0.3251, 0.2229, 0.0813, 0.2648, 0.1389, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0416, 0.0371, 0.0394, 0.0355, 0.0377, 0.0354, 0.0387, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 00:07:20,309 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 00:07:23,123 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:29,631 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:37,599 INFO [train.py:903] (1/4) Epoch 23, batch 4100, loss[loss=0.2174, simple_loss=0.2783, pruned_loss=0.07826, over 19718.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2871, pruned_loss=0.0639, over 3821072.94 frames. ], batch size: 46, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:08:11,157 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.105e+02 5.940e+02 7.682e+02 1.555e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-03 00:08:13,557 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 00:08:16,176 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 00:08:25,599 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154355.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:08:39,413 INFO [train.py:903] (1/4) Epoch 23, batch 4150, loss[loss=0.1788, simple_loss=0.2612, pruned_loss=0.04823, over 19798.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06357, over 3798473.47 frames. ], batch size: 48, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:22,103 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:09:33,978 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154411.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:09:39,111 INFO [train.py:903] (1/4) Epoch 23, batch 4200, loss[loss=0.2018, simple_loss=0.2751, pruned_loss=0.0642, over 19473.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2872, pruned_loss=0.06351, over 3797668.01 frames. ], batch size: 49, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:41,419 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 00:10:07,065 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:14,757 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.103e+02 4.744e+02 5.863e+02 7.378e+02 1.705e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-03 00:10:17,212 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154446.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:40,150 INFO [train.py:903] (1/4) Epoch 23, batch 4250, loss[loss=0.2466, simple_loss=0.3161, pruned_loss=0.08852, over 12949.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2875, pruned_loss=0.06369, over 3790844.94 frames. ], batch size: 136, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:10:54,221 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 00:11:05,304 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 00:11:12,072 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:11:40,259 INFO [train.py:903] (1/4) Epoch 23, batch 4300, loss[loss=0.2035, simple_loss=0.2843, pruned_loss=0.06135, over 17278.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2874, pruned_loss=0.0638, over 3792930.58 frames. ], batch size: 102, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:11:53,655 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154526.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:12:02,702 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2879, 3.7732, 3.8908, 3.8949, 1.6773, 3.7285, 3.2532, 3.6613], + device='cuda:1'), covar=tensor([0.1691, 0.1267, 0.0687, 0.0810, 0.5685, 0.1167, 0.0752, 0.1113], + device='cuda:1'), in_proj_covar=tensor([0.0784, 0.0745, 0.0950, 0.0835, 0.0835, 0.0712, 0.0568, 0.0883], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 00:12:13,327 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.652e+02 5.888e+02 7.584e+02 1.931e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 00:12:24,494 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154553.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:33,748 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 00:12:35,167 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:41,454 INFO [train.py:903] (1/4) Epoch 23, batch 4350, loss[loss=0.2378, simple_loss=0.3243, pruned_loss=0.07565, over 18824.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2881, pruned_loss=0.064, over 3807840.35 frames. ], batch size: 74, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:13:01,188 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:13:40,250 INFO [train.py:903] (1/4) Epoch 23, batch 4400, loss[loss=0.2193, simple_loss=0.2986, pruned_loss=0.06996, over 19772.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06355, over 3813599.65 frames. ], batch size: 56, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:13:46,123 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9602, 1.2420, 1.6013, 0.9621, 2.2864, 3.0106, 2.6935, 3.2366], + device='cuda:1'), covar=tensor([0.1829, 0.4041, 0.3523, 0.2677, 0.0669, 0.0230, 0.0300, 0.0330], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0322, 0.0351, 0.0263, 0.0243, 0.0187, 0.0215, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 00:14:04,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 00:14:14,044 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.197e+02 6.555e+02 7.915e+02 1.480e+03, threshold=1.311e+03, percent-clipped=6.0 +2023-04-03 00:14:15,157 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 00:14:16,530 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:18,492 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154648.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:23,881 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:27,557 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154656.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:38,570 INFO [train.py:903] (1/4) Epoch 23, batch 4450, loss[loss=0.2431, simple_loss=0.3147, pruned_loss=0.08578, over 18848.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06387, over 3819920.88 frames. ], batch size: 74, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:14:58,456 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154681.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:18,465 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:19,380 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154699.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:15:38,748 INFO [train.py:903] (1/4) Epoch 23, batch 4500, loss[loss=0.233, simple_loss=0.3136, pruned_loss=0.07623, over 19574.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06486, over 3795496.61 frames. ], batch size: 61, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:06,371 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154738.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:13,919 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.922e+02 6.448e+02 7.735e+02 1.395e+03, threshold=1.290e+03, percent-clipped=1.0 +2023-04-03 00:16:31,820 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5669, 4.7448, 5.2815, 5.3072, 2.3088, 5.0043, 4.3537, 5.0107], + device='cuda:1'), covar=tensor([0.1618, 0.1519, 0.0512, 0.0638, 0.5545, 0.0857, 0.0595, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0783, 0.0748, 0.0953, 0.0839, 0.0837, 0.0715, 0.0570, 0.0886], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 00:16:37,165 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:41,137 INFO [train.py:903] (1/4) Epoch 23, batch 4550, loss[loss=0.1736, simple_loss=0.2512, pruned_loss=0.04805, over 19780.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2887, pruned_loss=0.06459, over 3811578.84 frames. ], batch size: 47, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:43,821 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:48,240 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 00:17:00,059 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154782.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:11,893 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 00:17:31,869 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154807.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:34,136 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154809.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:17:39,882 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154814.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:41,843 INFO [train.py:903] (1/4) Epoch 23, batch 4600, loss[loss=0.1703, simple_loss=0.2525, pruned_loss=0.04408, over 19748.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2881, pruned_loss=0.06418, over 3815528.64 frames. ], batch size: 47, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:17:43,482 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154817.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:02,715 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154834.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:04,539 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:13,493 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:17,273 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.757e+02 5.456e+02 7.137e+02 2.039e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-04-03 00:18:41,884 INFO [train.py:903] (1/4) Epoch 23, batch 4650, loss[loss=0.2289, simple_loss=0.3018, pruned_loss=0.078, over 19497.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2874, pruned_loss=0.06336, over 3820494.03 frames. ], batch size: 64, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:18:57,532 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 00:19:09,930 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 00:19:37,575 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7578, 1.7735, 1.6866, 1.4555, 1.4158, 1.5279, 0.3558, 0.7174], + device='cuda:1'), covar=tensor([0.0618, 0.0578, 0.0379, 0.0534, 0.1173, 0.0687, 0.1220, 0.1031], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0354, 0.0360, 0.0383, 0.0462, 0.0389, 0.0337, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 00:19:42,545 INFO [train.py:903] (1/4) Epoch 23, batch 4700, loss[loss=0.2285, simple_loss=0.3028, pruned_loss=0.07709, over 18070.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06353, over 3811285.20 frames. ], batch size: 83, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:20:04,429 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 00:20:17,976 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.550e+02 5.511e+02 7.065e+02 1.410e+03, threshold=1.102e+03, percent-clipped=2.0 +2023-04-03 00:20:25,052 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154951.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:28,231 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:39,150 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1171, 1.7735, 1.9069, 2.6549, 1.9575, 2.4025, 2.3953, 2.1569], + device='cuda:1'), covar=tensor([0.0818, 0.0966, 0.0972, 0.0826, 0.0893, 0.0748, 0.0873, 0.0698], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0240, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 00:20:44,141 INFO [train.py:903] (1/4) Epoch 23, batch 4750, loss[loss=0.199, simple_loss=0.2932, pruned_loss=0.0524, over 19666.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2861, pruned_loss=0.06335, over 3830047.50 frames. ], batch size: 55, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:21:00,301 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:12,337 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:17,077 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:45,304 INFO [train.py:903] (1/4) Epoch 23, batch 4800, loss[loss=0.2063, simple_loss=0.2953, pruned_loss=0.05863, over 19676.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2871, pruned_loss=0.06371, over 3818614.79 frames. ], batch size: 55, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:21:49,048 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:54,274 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:56,087 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.76 vs. limit=5.0 +2023-04-03 00:22:18,887 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:19,573 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.324e+02 6.216e+02 7.674e+02 2.163e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-03 00:22:26,085 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:44,536 INFO [train.py:903] (1/4) Epoch 23, batch 4850, loss[loss=0.234, simple_loss=0.2989, pruned_loss=0.08457, over 19694.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2876, pruned_loss=0.06435, over 3819118.15 frames. ], batch size: 53, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:22:49,279 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155070.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:23:03,361 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:09,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 00:23:11,851 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:21,468 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155095.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:23:29,055 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 00:23:32,629 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:34,405 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 00:23:34,434 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 00:23:44,624 INFO [train.py:903] (1/4) Epoch 23, batch 4900, loss[loss=0.2023, simple_loss=0.289, pruned_loss=0.05783, over 19670.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2879, pruned_loss=0.06461, over 3821805.34 frames. ], batch size: 58, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:23:44,639 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 00:24:04,404 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 00:24:20,243 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.924e+02 5.163e+02 5.938e+02 7.647e+02 1.407e+03, threshold=1.188e+03, percent-clipped=5.0 +2023-04-03 00:24:46,211 INFO [train.py:903] (1/4) Epoch 23, batch 4950, loss[loss=0.2119, simple_loss=0.2962, pruned_loss=0.06383, over 19680.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2878, pruned_loss=0.06419, over 3824034.26 frames. ], batch size: 59, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:01,061 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 00:25:19,885 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-03 00:25:21,558 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155197.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:22,302 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 00:25:30,762 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3587, 2.0026, 2.0450, 2.8190, 2.0169, 2.5534, 2.5570, 2.2883], + device='cuda:1'), covar=tensor([0.0725, 0.0878, 0.0923, 0.0796, 0.0849, 0.0774, 0.0883, 0.0649], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0222, 0.0225, 0.0240, 0.0226, 0.0212, 0.0187, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 00:25:34,798 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:44,533 INFO [train.py:903] (1/4) Epoch 23, batch 5000, loss[loss=0.2027, simple_loss=0.2921, pruned_loss=0.05665, over 19320.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2885, pruned_loss=0.06436, over 3822511.55 frames. ], batch size: 66, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:52,524 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 00:26:02,939 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:26:03,632 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 00:26:19,060 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.751e+02 5.889e+02 7.363e+02 1.722e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 00:26:28,348 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2484, 1.4702, 1.8396, 1.4743, 2.9546, 4.5622, 4.4487, 4.9589], + device='cuda:1'), covar=tensor([0.1695, 0.3819, 0.3498, 0.2409, 0.0650, 0.0194, 0.0185, 0.0201], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0265, 0.0246, 0.0188, 0.0217, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 00:26:30,619 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8234, 1.1089, 1.4425, 0.5854, 1.9234, 2.1862, 1.9580, 2.3532], + device='cuda:1'), covar=tensor([0.1661, 0.3668, 0.3221, 0.2850, 0.0787, 0.0354, 0.0381, 0.0438], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0265, 0.0246, 0.0188, 0.0217, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 00:26:43,566 INFO [train.py:903] (1/4) Epoch 23, batch 5050, loss[loss=0.265, simple_loss=0.3426, pruned_loss=0.09368, over 17492.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2881, pruned_loss=0.06386, over 3821031.50 frames. ], batch size: 101, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:17,602 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 00:27:42,608 INFO [train.py:903] (1/4) Epoch 23, batch 5100, loss[loss=0.2429, simple_loss=0.3129, pruned_loss=0.08647, over 13155.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2884, pruned_loss=0.06422, over 3820649.90 frames. ], batch size: 136, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:53,102 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 00:27:56,487 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 00:28:01,507 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 00:28:10,591 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:18,269 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.084e+02 6.467e+02 7.878e+02 1.414e+03, threshold=1.293e+03, percent-clipped=6.0 +2023-04-03 00:28:36,971 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:43,673 INFO [train.py:903] (1/4) Epoch 23, batch 5150, loss[loss=0.223, simple_loss=0.3007, pruned_loss=0.07263, over 19693.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.06366, over 3823659.95 frames. ], batch size: 59, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:28:56,722 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 00:29:03,276 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 00:29:08,592 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:29:31,259 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 00:29:45,063 INFO [train.py:903] (1/4) Epoch 23, batch 5200, loss[loss=0.1965, simple_loss=0.2813, pruned_loss=0.05585, over 19537.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2887, pruned_loss=0.06448, over 3830764.64 frames. ], batch size: 56, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:29:58,658 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 00:30:02,272 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:19,766 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 5.305e+02 6.432e+02 7.969e+02 2.733e+03, threshold=1.286e+03, percent-clipped=6.0 +2023-04-03 00:30:30,727 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:30,794 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:41,483 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 00:30:44,913 INFO [train.py:903] (1/4) Epoch 23, batch 5250, loss[loss=0.2118, simple_loss=0.2934, pruned_loss=0.06514, over 19590.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.289, pruned_loss=0.06466, over 3815931.12 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:30:55,549 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155475.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:59,146 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:31:35,362 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7400, 1.6321, 1.4963, 2.1575, 1.6351, 2.0820, 2.0334, 1.8829], + device='cuda:1'), covar=tensor([0.0811, 0.0893, 0.1002, 0.0743, 0.0879, 0.0705, 0.0832, 0.0649], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0225, 0.0239, 0.0225, 0.0212, 0.0186, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 00:31:45,243 INFO [train.py:903] (1/4) Epoch 23, batch 5300, loss[loss=0.2243, simple_loss=0.2983, pruned_loss=0.0751, over 19287.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06404, over 3825268.06 frames. ], batch size: 66, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:32:03,706 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 00:32:06,104 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4711, 1.5089, 1.7901, 1.6622, 2.4480, 2.1457, 2.4919, 1.2176], + device='cuda:1'), covar=tensor([0.2555, 0.4466, 0.2815, 0.2119, 0.1613, 0.2314, 0.1574, 0.4668], + device='cuda:1'), in_proj_covar=tensor([0.0537, 0.0647, 0.0718, 0.0489, 0.0621, 0.0535, 0.0659, 0.0552], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 00:32:21,414 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 4.703e+02 5.856e+02 7.687e+02 1.612e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 00:32:22,915 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:32:35,456 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4588, 1.5435, 1.8098, 1.6605, 2.8365, 2.2753, 2.9389, 1.4234], + device='cuda:1'), covar=tensor([0.2553, 0.4341, 0.2725, 0.1996, 0.1468, 0.2224, 0.1456, 0.4205], + device='cuda:1'), in_proj_covar=tensor([0.0538, 0.0649, 0.0720, 0.0490, 0.0623, 0.0536, 0.0661, 0.0554], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 00:32:46,437 INFO [train.py:903] (1/4) Epoch 23, batch 5350, loss[loss=0.2348, simple_loss=0.3056, pruned_loss=0.08199, over 13248.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.06401, over 3823252.64 frames. ], batch size: 136, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:18,091 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 00:33:46,934 INFO [train.py:903] (1/4) Epoch 23, batch 5400, loss[loss=0.2095, simple_loss=0.2792, pruned_loss=0.06988, over 19488.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.287, pruned_loss=0.06338, over 3817315.79 frames. ], batch size: 49, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:56,241 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155623.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:34:21,898 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.892e+02 4.747e+02 5.806e+02 7.220e+02 1.360e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 00:34:48,075 INFO [train.py:903] (1/4) Epoch 23, batch 5450, loss[loss=0.223, simple_loss=0.3029, pruned_loss=0.07151, over 19670.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2858, pruned_loss=0.06265, over 3823613.15 frames. ], batch size: 55, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:34:58,361 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155675.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:08,414 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2690, 2.2806, 2.4596, 2.9657, 2.2289, 2.8781, 2.6891, 2.3566], + device='cuda:1'), covar=tensor([0.4371, 0.4042, 0.1955, 0.2635, 0.4449, 0.2193, 0.4513, 0.3316], + device='cuda:1'), in_proj_covar=tensor([0.0908, 0.0977, 0.0726, 0.0938, 0.0888, 0.0824, 0.0846, 0.0789], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 00:35:39,952 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:47,565 INFO [train.py:903] (1/4) Epoch 23, batch 5500, loss[loss=0.2033, simple_loss=0.2834, pruned_loss=0.06161, over 18138.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2863, pruned_loss=0.06268, over 3832570.59 frames. ], batch size: 83, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:10,021 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:36:13,480 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 00:36:24,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.224e+02 5.057e+02 6.298e+02 8.158e+02 1.659e+03, threshold=1.260e+03, percent-clipped=6.0 +2023-04-03 00:36:46,706 INFO [train.py:903] (1/4) Epoch 23, batch 5550, loss[loss=0.2075, simple_loss=0.292, pruned_loss=0.06146, over 19580.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2865, pruned_loss=0.06302, over 3831652.76 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:50,863 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5067, 1.5032, 1.4453, 1.9792, 1.6124, 1.8077, 1.8590, 1.7106], + device='cuda:1'), covar=tensor([0.0898, 0.0959, 0.1053, 0.0700, 0.0788, 0.0757, 0.0767, 0.0695], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0240, 0.0225, 0.0212, 0.0186, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 00:36:56,205 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 00:37:30,479 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:30,612 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:42,232 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 00:37:48,755 INFO [train.py:903] (1/4) Epoch 23, batch 5600, loss[loss=0.2221, simple_loss=0.3009, pruned_loss=0.07164, over 18736.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06361, over 3819588.74 frames. ], batch size: 74, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:37:52,340 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155819.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:38:02,587 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:38:23,384 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 5.365e+02 7.033e+02 8.601e+02 1.530e+03, threshold=1.407e+03, percent-clipped=6.0 +2023-04-03 00:38:48,661 INFO [train.py:903] (1/4) Epoch 23, batch 5650, loss[loss=0.2611, simple_loss=0.3248, pruned_loss=0.09873, over 13961.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2883, pruned_loss=0.06431, over 3817851.16 frames. ], batch size: 137, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:39:33,338 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 00:39:47,802 INFO [train.py:903] (1/4) Epoch 23, batch 5700, loss[loss=0.2007, simple_loss=0.292, pruned_loss=0.05471, over 19545.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.06468, over 3804959.98 frames. ], batch size: 56, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:39:49,800 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-03 00:40:10,841 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:24,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.906e+02 6.184e+02 7.924e+02 2.131e+03, threshold=1.237e+03, percent-clipped=2.0 +2023-04-03 00:40:35,575 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4532, 2.1408, 1.6874, 1.5452, 2.0207, 1.3673, 1.3483, 1.8487], + device='cuda:1'), covar=tensor([0.1069, 0.0865, 0.1095, 0.0838, 0.0588, 0.1293, 0.0759, 0.0512], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0314, 0.0337, 0.0265, 0.0247, 0.0338, 0.0289, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 00:40:47,842 INFO [train.py:903] (1/4) Epoch 23, batch 5750, loss[loss=0.1753, simple_loss=0.267, pruned_loss=0.04184, over 19667.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2879, pruned_loss=0.06443, over 3816914.09 frames. ], batch size: 58, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:49,184 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155967.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:51,095 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 00:40:58,763 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 00:41:04,146 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 00:41:50,977 INFO [train.py:903] (1/4) Epoch 23, batch 5800, loss[loss=0.1859, simple_loss=0.2767, pruned_loss=0.04754, over 19528.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2877, pruned_loss=0.06433, over 3813011.26 frames. ], batch size: 54, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:41:54,496 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:42:25,072 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 4.982e+02 6.301e+02 7.857e+02 1.493e+03, threshold=1.260e+03, percent-clipped=3.0 +2023-04-03 00:42:50,181 INFO [train.py:903] (1/4) Epoch 23, batch 5850, loss[loss=0.1973, simple_loss=0.2802, pruned_loss=0.05722, over 19599.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06375, over 3820658.87 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:43:08,224 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:14,881 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156088.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:48,537 INFO [train.py:903] (1/4) Epoch 23, batch 5900, loss[loss=0.1801, simple_loss=0.268, pruned_loss=0.04613, over 19710.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2873, pruned_loss=0.06345, over 3824934.42 frames. ], batch size: 45, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:43:52,964 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 00:44:10,850 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:14,060 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 00:44:24,575 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.043e+02 6.365e+02 8.179e+02 2.050e+03, threshold=1.273e+03, percent-clipped=8.0 +2023-04-03 00:44:25,628 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:48,118 INFO [train.py:903] (1/4) Epoch 23, batch 5950, loss[loss=0.214, simple_loss=0.2927, pruned_loss=0.06766, over 19770.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2885, pruned_loss=0.06439, over 3819082.76 frames. ], batch size: 54, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:45:19,174 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:47,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:48,677 INFO [train.py:903] (1/4) Epoch 23, batch 6000, loss[loss=0.2009, simple_loss=0.2791, pruned_loss=0.06131, over 19502.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2874, pruned_loss=0.0639, over 3833407.48 frames. ], batch size: 49, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:45:48,677 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 00:46:01,142 INFO [train.py:937] (1/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2684, pruned_loss=0.03439, over 944034.00 frames. +2023-04-03 00:46:01,143 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 00:46:23,274 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:46:37,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.873e+02 6.527e+02 8.069e+02 1.468e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-03 00:46:47,420 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6408, 1.7628, 1.9549, 2.0219, 1.5614, 1.9694, 1.9623, 1.8441], + device='cuda:1'), covar=tensor([0.4006, 0.3357, 0.1973, 0.2155, 0.3662, 0.2032, 0.4949, 0.3163], + device='cuda:1'), in_proj_covar=tensor([0.0903, 0.0973, 0.0721, 0.0933, 0.0885, 0.0821, 0.0843, 0.0786], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 00:46:55,716 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156261.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:47:01,915 INFO [train.py:903] (1/4) Epoch 23, batch 6050, loss[loss=0.2008, simple_loss=0.2872, pruned_loss=0.05717, over 19516.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2881, pruned_loss=0.06427, over 3828966.55 frames. ], batch size: 54, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:47:02,325 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0484, 2.8577, 2.3167, 2.1480, 2.0674, 2.4542, 1.1944, 2.0004], + device='cuda:1'), covar=tensor([0.0688, 0.0682, 0.0692, 0.1166, 0.1124, 0.1172, 0.1427, 0.1172], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0354, 0.0360, 0.0385, 0.0464, 0.0391, 0.0338, 0.0342], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 00:47:53,128 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:47:57,936 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1054, 1.7965, 1.7215, 2.0078, 1.7649, 1.7996, 1.6502, 2.0074], + device='cuda:1'), covar=tensor([0.1019, 0.1441, 0.1526, 0.1111, 0.1442, 0.0566, 0.1502, 0.0701], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0355, 0.0314, 0.0252, 0.0304, 0.0253, 0.0311, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 00:48:02,113 INFO [train.py:903] (1/4) Epoch 23, batch 6100, loss[loss=0.1822, simple_loss=0.2605, pruned_loss=0.05202, over 19604.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2866, pruned_loss=0.06357, over 3837953.43 frames. ], batch size: 52, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:48:27,995 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:37,367 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.768e+02 6.249e+02 8.138e+02 1.749e+03, threshold=1.250e+03, percent-clipped=2.0 +2023-04-03 00:48:58,835 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:01,836 INFO [train.py:903] (1/4) Epoch 23, batch 6150, loss[loss=0.2799, simple_loss=0.3586, pruned_loss=0.1007, over 19681.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.287, pruned_loss=0.06364, over 3832134.99 frames. ], batch size: 58, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:49:31,045 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:31,821 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 00:50:00,579 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156415.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:01,337 INFO [train.py:903] (1/4) Epoch 23, batch 6200, loss[loss=0.1977, simple_loss=0.2845, pruned_loss=0.05545, over 19652.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2853, pruned_loss=0.06267, over 3841261.66 frames. ], batch size: 60, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:50:20,444 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4470, 1.8020, 1.4451, 1.2956, 1.7837, 1.3116, 1.3620, 1.7637], + device='cuda:1'), covar=tensor([0.0850, 0.0766, 0.0817, 0.0858, 0.0495, 0.1053, 0.0601, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0266, 0.0246, 0.0338, 0.0289, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 00:50:22,385 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156432.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:38,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.815e+02 5.704e+02 6.895e+02 2.552e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-03 00:51:02,800 INFO [train.py:903] (1/4) Epoch 23, batch 6250, loss[loss=0.299, simple_loss=0.3528, pruned_loss=0.1226, over 13787.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2858, pruned_loss=0.06305, over 3821549.53 frames. ], batch size: 136, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:51:32,693 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 00:52:02,820 INFO [train.py:903] (1/4) Epoch 23, batch 6300, loss[loss=0.2434, simple_loss=0.3231, pruned_loss=0.08189, over 19677.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2864, pruned_loss=0.06341, over 3822539.55 frames. ], batch size: 60, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:52:04,461 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:34,305 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156542.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:39,263 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.353e+02 6.743e+02 8.019e+02 1.408e+03, threshold=1.349e+03, percent-clipped=4.0 +2023-04-03 00:52:40,672 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:03,413 INFO [train.py:903] (1/4) Epoch 23, batch 6350, loss[loss=0.2394, simple_loss=0.3188, pruned_loss=0.07994, over 19653.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2861, pruned_loss=0.06297, over 3827628.57 frames. ], batch size: 60, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:53:17,257 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:32,812 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2443, 1.1838, 1.8282, 1.8041, 3.0806, 4.8403, 4.6010, 5.1321], + device='cuda:1'), covar=tensor([0.1628, 0.4827, 0.3957, 0.2035, 0.0601, 0.0147, 0.0219, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0325, 0.0354, 0.0265, 0.0246, 0.0190, 0.0217, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 00:54:02,673 INFO [train.py:903] (1/4) Epoch 23, batch 6400, loss[loss=0.1677, simple_loss=0.2448, pruned_loss=0.0453, over 19782.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2855, pruned_loss=0.06283, over 3841998.03 frames. ], batch size: 47, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:54:39,391 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 4.834e+02 5.927e+02 7.987e+02 2.615e+03, threshold=1.185e+03, percent-clipped=4.0 +2023-04-03 00:54:46,361 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:04,059 INFO [train.py:903] (1/4) Epoch 23, batch 6450, loss[loss=0.197, simple_loss=0.2865, pruned_loss=0.05378, over 19322.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2858, pruned_loss=0.06266, over 3832577.34 frames. ], batch size: 70, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:55:35,929 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:47,932 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 00:56:04,449 INFO [train.py:903] (1/4) Epoch 23, batch 6500, loss[loss=0.2338, simple_loss=0.309, pruned_loss=0.07927, over 17451.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2857, pruned_loss=0.06254, over 3848511.85 frames. ], batch size: 101, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:56:10,080 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 00:56:11,235 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4360, 1.5303, 1.8590, 1.4680, 2.3641, 2.6889, 2.5814, 2.8560], + device='cuda:1'), covar=tensor([0.1369, 0.3046, 0.2714, 0.2478, 0.1100, 0.0311, 0.0266, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0264, 0.0245, 0.0189, 0.0216, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 00:56:39,933 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.077e+02 6.090e+02 8.057e+02 1.603e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-03 00:57:04,758 INFO [train.py:903] (1/4) Epoch 23, batch 6550, loss[loss=0.2082, simple_loss=0.2854, pruned_loss=0.06549, over 19693.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2857, pruned_loss=0.06277, over 3850175.99 frames. ], batch size: 59, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:57:06,345 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:06,415 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:08,551 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0634, 1.7653, 1.9672, 1.6589, 4.5653, 1.3298, 2.6237, 4.8947], + device='cuda:1'), covar=tensor([0.0412, 0.2683, 0.2636, 0.2084, 0.0700, 0.2509, 0.1355, 0.0183], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0367, 0.0388, 0.0349, 0.0373, 0.0349, 0.0382, 0.0405], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 00:57:50,663 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:04,880 INFO [train.py:903] (1/4) Epoch 23, batch 6600, loss[loss=0.2396, simple_loss=0.3171, pruned_loss=0.08101, over 18846.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2863, pruned_loss=0.06306, over 3834346.08 frames. ], batch size: 74, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:58:20,332 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:41,921 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.163e+02 6.336e+02 8.010e+02 1.885e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 00:58:44,406 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3186, 1.4893, 1.9163, 1.4474, 2.8740, 3.8821, 3.5521, 4.0871], + device='cuda:1'), covar=tensor([0.1533, 0.3623, 0.3059, 0.2309, 0.0545, 0.0174, 0.0207, 0.0245], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0264, 0.0245, 0.0190, 0.0216, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 00:59:05,156 INFO [train.py:903] (1/4) Epoch 23, batch 6650, loss[loss=0.2055, simple_loss=0.2912, pruned_loss=0.05991, over 19618.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2865, pruned_loss=0.06314, over 3840347.68 frames. ], batch size: 57, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:59:51,159 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156903.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:00:07,303 INFO [train.py:903] (1/4) Epoch 23, batch 6700, loss[loss=0.2493, simple_loss=0.3235, pruned_loss=0.08756, over 19575.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2865, pruned_loss=0.06342, over 3833458.53 frames. ], batch size: 61, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:00:41,738 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.603e+02 5.252e+02 6.535e+02 7.903e+02 1.565e+03, threshold=1.307e+03, percent-clipped=2.0 +2023-04-03 01:00:45,400 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:01:04,456 INFO [train.py:903] (1/4) Epoch 23, batch 6750, loss[loss=0.2376, simple_loss=0.3222, pruned_loss=0.07655, over 17344.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06363, over 3835951.88 frames. ], batch size: 101, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:01:13,626 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:01:14,656 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8678, 1.3164, 1.5272, 1.5293, 3.4675, 1.2009, 2.4739, 3.8974], + device='cuda:1'), covar=tensor([0.0466, 0.2955, 0.2859, 0.1903, 0.0649, 0.2562, 0.1335, 0.0218], + device='cuda:1'), in_proj_covar=tensor([0.0415, 0.0369, 0.0391, 0.0352, 0.0374, 0.0352, 0.0384, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:02:00,906 INFO [train.py:903] (1/4) Epoch 23, batch 6800, loss[loss=0.1673, simple_loss=0.2513, pruned_loss=0.04165, over 19638.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2869, pruned_loss=0.06375, over 3841611.19 frames. ], batch size: 50, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:02:09,504 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:14,936 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:45,018 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 01:02:45,463 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 01:02:48,303 INFO [train.py:903] (1/4) Epoch 24, batch 0, loss[loss=0.1963, simple_loss=0.2738, pruned_loss=0.05935, over 19772.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2738, pruned_loss=0.05935, over 19772.00 frames. ], batch size: 49, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:02:48,304 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 01:02:59,925 INFO [train.py:937] (1/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2685, pruned_loss=0.03408, over 944034.00 frames. +2023-04-03 01:02:59,926 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 01:03:03,176 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.212e+02 6.445e+02 8.399e+02 3.393e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-03 01:03:05,662 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:03:12,278 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 01:03:57,953 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5363, 1.6194, 2.0411, 1.8125, 3.3470, 2.6778, 3.5732, 1.6201], + device='cuda:1'), covar=tensor([0.2570, 0.4427, 0.2754, 0.1960, 0.1445, 0.2115, 0.1475, 0.4230], + device='cuda:1'), in_proj_covar=tensor([0.0537, 0.0646, 0.0718, 0.0489, 0.0618, 0.0533, 0.0658, 0.0551], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 01:04:00,821 INFO [train.py:903] (1/4) Epoch 24, batch 50, loss[loss=0.1702, simple_loss=0.2511, pruned_loss=0.04465, over 19369.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2911, pruned_loss=0.06515, over 860406.62 frames. ], batch size: 47, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:04:20,660 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:04:32,470 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 01:05:01,217 INFO [train.py:903] (1/4) Epoch 24, batch 100, loss[loss=0.1805, simple_loss=0.2663, pruned_loss=0.04733, over 19621.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2863, pruned_loss=0.06214, over 1521588.15 frames. ], batch size: 50, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:05:03,494 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 5.500e+02 6.534e+02 8.918e+02 1.825e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 01:05:11,339 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 01:05:19,846 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157160.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:06:02,079 INFO [train.py:903] (1/4) Epoch 24, batch 150, loss[loss=0.187, simple_loss=0.2604, pruned_loss=0.0568, over 19395.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2872, pruned_loss=0.06259, over 2023126.98 frames. ], batch size: 47, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:06:42,430 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:07:01,358 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 01:07:02,474 INFO [train.py:903] (1/4) Epoch 24, batch 200, loss[loss=0.1682, simple_loss=0.2539, pruned_loss=0.0412, over 19397.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2868, pruned_loss=0.06231, over 2419828.79 frames. ], batch size: 48, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:07:04,623 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 4.992e+02 5.973e+02 7.088e+02 2.080e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 01:07:05,920 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:08:03,056 INFO [train.py:903] (1/4) Epoch 24, batch 250, loss[loss=0.2119, simple_loss=0.2912, pruned_loss=0.06633, over 19529.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2886, pruned_loss=0.06385, over 2737903.09 frames. ], batch size: 54, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:08:10,681 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-03 01:09:03,288 INFO [train.py:903] (1/4) Epoch 24, batch 300, loss[loss=0.2112, simple_loss=0.3001, pruned_loss=0.06115, over 18719.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2884, pruned_loss=0.06402, over 2981663.54 frames. ], batch size: 74, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:09:06,233 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.405e+02 6.557e+02 9.024e+02 1.464e+03, threshold=1.311e+03, percent-clipped=9.0 +2023-04-03 01:09:25,572 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157362.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:36,397 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:39,481 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:10:05,066 INFO [train.py:903] (1/4) Epoch 24, batch 350, loss[loss=0.1842, simple_loss=0.2707, pruned_loss=0.04887, over 19469.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06411, over 3160552.07 frames. ], batch size: 49, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:10:10,682 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 01:10:12,239 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:10:27,236 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0374, 2.1203, 2.4073, 2.7422, 2.0821, 2.6012, 2.3936, 2.2095], + device='cuda:1'), covar=tensor([0.4248, 0.4025, 0.1869, 0.2492, 0.4373, 0.2173, 0.4706, 0.3210], + device='cuda:1'), in_proj_covar=tensor([0.0906, 0.0975, 0.0722, 0.0932, 0.0887, 0.0823, 0.0846, 0.0787], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 01:10:39,437 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157422.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:11:05,171 INFO [train.py:903] (1/4) Epoch 24, batch 400, loss[loss=0.226, simple_loss=0.3067, pruned_loss=0.07266, over 19338.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2893, pruned_loss=0.06474, over 3304038.07 frames. ], batch size: 70, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:11:07,649 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 4.825e+02 6.674e+02 8.153e+02 1.427e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-03 01:11:52,596 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157482.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:11:58,245 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:05,697 INFO [train.py:903] (1/4) Epoch 24, batch 450, loss[loss=0.2235, simple_loss=0.3051, pruned_loss=0.07092, over 19528.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2891, pruned_loss=0.06453, over 3425809.72 frames. ], batch size: 54, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:12:11,725 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5977, 1.2308, 1.5772, 1.2192, 2.2599, 1.0667, 2.1653, 2.5042], + device='cuda:1'), covar=tensor([0.0717, 0.2751, 0.2626, 0.1742, 0.0839, 0.2106, 0.0966, 0.0453], + device='cuda:1'), in_proj_covar=tensor([0.0416, 0.0371, 0.0391, 0.0352, 0.0375, 0.0352, 0.0386, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:12:20,132 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:22,677 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3486, 1.4410, 1.6183, 1.5807, 3.0062, 1.3038, 2.3428, 3.3514], + device='cuda:1'), covar=tensor([0.0593, 0.2719, 0.2843, 0.1915, 0.0735, 0.2326, 0.1451, 0.0320], + device='cuda:1'), in_proj_covar=tensor([0.0416, 0.0371, 0.0391, 0.0352, 0.0375, 0.0352, 0.0385, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:12:24,879 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:40,671 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 01:12:40,702 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 01:12:44,551 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157524.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:13:08,941 INFO [train.py:903] (1/4) Epoch 24, batch 500, loss[loss=0.1975, simple_loss=0.2641, pruned_loss=0.06543, over 19737.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2865, pruned_loss=0.06317, over 3521969.98 frames. ], batch size: 45, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:13:12,135 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.617e+02 5.203e+02 6.096e+02 8.720e+02 1.456e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 01:14:02,828 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:12,208 INFO [train.py:903] (1/4) Epoch 24, batch 550, loss[loss=0.2, simple_loss=0.2725, pruned_loss=0.0638, over 16426.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06212, over 3590771.64 frames. ], batch size: 36, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:14:20,997 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6423, 4.2369, 2.6749, 3.6852, 1.0071, 4.1691, 4.0555, 4.1171], + device='cuda:1'), covar=tensor([0.0608, 0.0947, 0.1995, 0.0899, 0.3797, 0.0639, 0.0894, 0.1114], + device='cuda:1'), in_proj_covar=tensor([0.0515, 0.0415, 0.0499, 0.0349, 0.0403, 0.0439, 0.0433, 0.0469], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:14:41,067 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157618.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:42,112 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,057 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,770 INFO [train.py:903] (1/4) Epoch 24, batch 600, loss[loss=0.1804, simple_loss=0.276, pruned_loss=0.04242, over 19532.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2858, pruned_loss=0.06248, over 3622147.27 frames. ], batch size: 54, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:15:15,912 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.646e+02 5.574e+02 6.767e+02 1.170e+03, threshold=1.115e+03, percent-clipped=0.0 +2023-04-03 01:15:53,052 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 01:16:14,760 INFO [train.py:903] (1/4) Epoch 24, batch 650, loss[loss=0.1778, simple_loss=0.2531, pruned_loss=0.0513, over 19746.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.06327, over 3663844.46 frames. ], batch size: 46, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:16:45,957 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157718.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:07,775 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3129, 1.3235, 1.5162, 1.4934, 2.3322, 2.0504, 2.5168, 1.0011], + device='cuda:1'), covar=tensor([0.2706, 0.4561, 0.2810, 0.2193, 0.1690, 0.2334, 0.1513, 0.4651], + device='cuda:1'), in_proj_covar=tensor([0.0542, 0.0651, 0.0725, 0.0494, 0.0623, 0.0537, 0.0664, 0.0555], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 01:17:14,713 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157743.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:15,390 INFO [train.py:903] (1/4) Epoch 24, batch 700, loss[loss=0.2415, simple_loss=0.3086, pruned_loss=0.08725, over 19642.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2879, pruned_loss=0.06361, over 3704982.85 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:17:15,547 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:20,753 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.232e+02 6.869e+02 8.195e+02 1.483e+03, threshold=1.374e+03, percent-clipped=7.0 +2023-04-03 01:17:44,054 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157766.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:17:46,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:18:19,076 INFO [train.py:903] (1/4) Epoch 24, batch 750, loss[loss=0.2221, simple_loss=0.305, pruned_loss=0.06962, over 19688.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.288, pruned_loss=0.06383, over 3736291.67 frames. ], batch size: 59, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:19:06,793 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:19,804 INFO [train.py:903] (1/4) Epoch 24, batch 800, loss[loss=0.2026, simple_loss=0.2919, pruned_loss=0.05668, over 19537.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2877, pruned_loss=0.06347, over 3766410.09 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:19:22,489 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5408, 2.5062, 2.1738, 2.7260, 2.4610, 2.1196, 1.9207, 2.5415], + device='cuda:1'), covar=tensor([0.0949, 0.1542, 0.1444, 0.1019, 0.1321, 0.0565, 0.1545, 0.0691], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0356, 0.0315, 0.0254, 0.0305, 0.0255, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:19:23,265 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 5.074e+02 6.358e+02 8.526e+02 1.766e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-03 01:19:30,235 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 01:19:31,738 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6516, 2.5067, 2.2444, 2.6906, 2.4362, 2.2649, 2.0485, 2.6199], + device='cuda:1'), covar=tensor([0.1008, 0.1581, 0.1510, 0.1088, 0.1449, 0.0573, 0.1548, 0.0715], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0357, 0.0315, 0.0254, 0.0305, 0.0255, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:19:37,396 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157859.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:48,487 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157868.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:19:52,852 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:59,171 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157875.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:20:05,652 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157881.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:20:20,136 INFO [train.py:903] (1/4) Epoch 24, batch 850, loss[loss=0.1983, simple_loss=0.2764, pruned_loss=0.06004, over 19484.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2872, pruned_loss=0.06323, over 3777392.73 frames. ], batch size: 49, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:20:27,432 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157900.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:21:05,408 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:21:08,623 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 01:21:21,348 INFO [train.py:903] (1/4) Epoch 24, batch 900, loss[loss=0.1883, simple_loss=0.2616, pruned_loss=0.0575, over 19401.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2865, pruned_loss=0.06288, over 3785823.87 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:21:25,792 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.800e+02 5.808e+02 7.910e+02 1.683e+03, threshold=1.162e+03, percent-clipped=5.0 +2023-04-03 01:21:31,665 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6374, 4.0373, 4.5156, 4.5690, 1.9113, 4.3155, 3.5424, 3.9065], + device='cuda:1'), covar=tensor([0.2422, 0.1544, 0.1014, 0.1236, 0.7230, 0.1713, 0.1281, 0.2191], + device='cuda:1'), in_proj_covar=tensor([0.0789, 0.0756, 0.0965, 0.0841, 0.0844, 0.0727, 0.0579, 0.0887], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 01:22:10,074 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157983.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:22:22,960 INFO [train.py:903] (1/4) Epoch 24, batch 950, loss[loss=0.2133, simple_loss=0.2922, pruned_loss=0.06717, over 19678.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2867, pruned_loss=0.06297, over 3800406.20 frames. ], batch size: 55, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:22:23,014 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 01:22:38,872 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.1982, 5.7082, 3.2546, 4.8251, 0.8535, 5.8381, 5.6448, 5.8058], + device='cuda:1'), covar=tensor([0.0334, 0.0692, 0.1591, 0.0693, 0.4148, 0.0434, 0.0690, 0.0905], + device='cuda:1'), in_proj_covar=tensor([0.0512, 0.0413, 0.0496, 0.0347, 0.0401, 0.0438, 0.0432, 0.0466], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:23:26,860 INFO [train.py:903] (1/4) Epoch 24, batch 1000, loss[loss=0.1907, simple_loss=0.2812, pruned_loss=0.05011, over 19623.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2874, pruned_loss=0.06338, over 3801823.74 frames. ], batch size: 57, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:23:28,194 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:23:31,292 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.075e+02 5.979e+02 8.043e+02 1.884e+03, threshold=1.196e+03, percent-clipped=5.0 +2023-04-03 01:23:34,160 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7184, 1.8108, 1.9089, 2.4535, 1.8466, 2.2900, 1.9747, 1.6449], + device='cuda:1'), covar=tensor([0.4865, 0.4726, 0.2687, 0.2906, 0.4567, 0.2588, 0.6374, 0.5110], + device='cuda:1'), in_proj_covar=tensor([0.0909, 0.0980, 0.0723, 0.0935, 0.0888, 0.0825, 0.0845, 0.0787], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 01:24:17,475 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 01:24:22,235 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:27,542 INFO [train.py:903] (1/4) Epoch 24, batch 1050, loss[loss=0.2117, simple_loss=0.2932, pruned_loss=0.06514, over 19672.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2875, pruned_loss=0.06332, over 3808671.39 frames. ], batch size: 58, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:24:50,225 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158114.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:52,312 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158115.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:56,178 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 01:25:18,971 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4387, 1.3521, 1.5605, 1.5656, 1.6894, 1.9562, 1.7120, 0.5707], + device='cuda:1'), covar=tensor([0.2326, 0.4177, 0.2595, 0.1866, 0.1683, 0.2191, 0.1454, 0.4687], + device='cuda:1'), in_proj_covar=tensor([0.0538, 0.0648, 0.0720, 0.0492, 0.0619, 0.0534, 0.0660, 0.0553], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 01:25:20,064 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158137.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:25:23,266 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:23,354 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:27,550 INFO [train.py:903] (1/4) Epoch 24, batch 1100, loss[loss=0.2314, simple_loss=0.3094, pruned_loss=0.07673, over 19356.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.288, pruned_loss=0.06391, over 3815360.47 frames. ], batch size: 66, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:25:31,913 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 5.122e+02 6.777e+02 7.992e+02 2.032e+03, threshold=1.355e+03, percent-clipped=5.0 +2023-04-03 01:25:51,776 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158162.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:26:28,898 INFO [train.py:903] (1/4) Epoch 24, batch 1150, loss[loss=0.2008, simple_loss=0.2866, pruned_loss=0.05743, over 19595.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2885, pruned_loss=0.06393, over 3827723.74 frames. ], batch size: 57, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:26:56,849 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:27:25,301 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158239.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:27:31,330 INFO [train.py:903] (1/4) Epoch 24, batch 1200, loss[loss=0.1891, simple_loss=0.2691, pruned_loss=0.05449, over 19618.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.289, pruned_loss=0.06387, over 3835880.02 frames. ], batch size: 50, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:27:37,768 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.926e+02 5.852e+02 7.782e+02 1.430e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 01:27:56,377 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158264.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:28:02,802 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 01:28:21,997 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6440, 1.7195, 2.0328, 2.0205, 1.5278, 1.9923, 2.0148, 1.8650], + device='cuda:1'), covar=tensor([0.4176, 0.3857, 0.1897, 0.2382, 0.3921, 0.2085, 0.5044, 0.3294], + device='cuda:1'), in_proj_covar=tensor([0.0908, 0.0981, 0.0725, 0.0939, 0.0890, 0.0826, 0.0846, 0.0788], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 01:28:34,707 INFO [train.py:903] (1/4) Epoch 24, batch 1250, loss[loss=0.205, simple_loss=0.2922, pruned_loss=0.05889, over 19324.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2889, pruned_loss=0.06406, over 3828631.44 frames. ], batch size: 70, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:28:43,307 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:15,062 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:20,344 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:35,684 INFO [train.py:903] (1/4) Epoch 24, batch 1300, loss[loss=0.2015, simple_loss=0.2783, pruned_loss=0.06239, over 19488.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2884, pruned_loss=0.06378, over 3824213.74 frames. ], batch size: 49, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:29:40,388 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 5.283e+02 7.241e+02 8.945e+02 2.355e+03, threshold=1.448e+03, percent-clipped=9.0 +2023-04-03 01:30:36,942 INFO [train.py:903] (1/4) Epoch 24, batch 1350, loss[loss=0.168, simple_loss=0.242, pruned_loss=0.047, over 19735.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2882, pruned_loss=0.06356, over 3831191.63 frames. ], batch size: 46, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:30:47,162 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:31:38,834 INFO [train.py:903] (1/4) Epoch 24, batch 1400, loss[loss=0.2488, simple_loss=0.3224, pruned_loss=0.08762, over 18732.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2884, pruned_loss=0.06385, over 3834322.65 frames. ], batch size: 74, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:31:43,422 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.106e+02 6.656e+02 8.188e+02 2.197e+03, threshold=1.331e+03, percent-clipped=4.0 +2023-04-03 01:32:03,726 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0187, 1.0479, 1.4122, 1.3958, 2.4088, 1.1641, 2.2830, 2.9019], + device='cuda:1'), covar=tensor([0.0788, 0.3769, 0.3291, 0.2202, 0.1255, 0.2704, 0.1304, 0.0484], + device='cuda:1'), in_proj_covar=tensor([0.0417, 0.0371, 0.0391, 0.0352, 0.0375, 0.0354, 0.0386, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:32:16,295 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2998, 1.9621, 1.5880, 1.3525, 1.8397, 1.2582, 1.1426, 1.8079], + device='cuda:1'), covar=tensor([0.0968, 0.0898, 0.1033, 0.0884, 0.0539, 0.1306, 0.0781, 0.0462], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0270, 0.0249, 0.0340, 0.0292, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:32:27,056 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:32:39,522 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4539, 2.1345, 1.6585, 1.5113, 1.9763, 1.3461, 1.3682, 1.8424], + device='cuda:1'), covar=tensor([0.0964, 0.0814, 0.1048, 0.0867, 0.0578, 0.1258, 0.0700, 0.0495], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0317, 0.0339, 0.0270, 0.0248, 0.0340, 0.0291, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:32:40,271 INFO [train.py:903] (1/4) Epoch 24, batch 1450, loss[loss=0.1962, simple_loss=0.2734, pruned_loss=0.05955, over 19700.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2894, pruned_loss=0.06442, over 3826900.51 frames. ], batch size: 53, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:32:42,616 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 01:33:27,467 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9512, 4.4124, 4.7445, 4.7398, 1.7436, 4.3999, 3.8530, 4.4694], + device='cuda:1'), covar=tensor([0.1841, 0.0922, 0.0603, 0.0716, 0.6378, 0.0974, 0.0686, 0.1128], + device='cuda:1'), in_proj_covar=tensor([0.0790, 0.0756, 0.0961, 0.0839, 0.0844, 0.0727, 0.0578, 0.0888], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 01:33:41,524 INFO [train.py:903] (1/4) Epoch 24, batch 1500, loss[loss=0.1898, simple_loss=0.2831, pruned_loss=0.04831, over 19525.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.289, pruned_loss=0.06412, over 3835093.96 frames. ], batch size: 56, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:33:46,123 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.633e+02 4.870e+02 5.968e+02 7.477e+02 1.869e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-03 01:34:33,952 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:34:42,445 INFO [train.py:903] (1/4) Epoch 24, batch 1550, loss[loss=0.1649, simple_loss=0.2444, pruned_loss=0.04274, over 19730.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.288, pruned_loss=0.06341, over 3843943.49 frames. ], batch size: 46, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:34:48,606 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:04,336 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:46,052 INFO [train.py:903] (1/4) Epoch 24, batch 1600, loss[loss=0.2027, simple_loss=0.2884, pruned_loss=0.05845, over 19656.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2869, pruned_loss=0.06317, over 3815889.72 frames. ], batch size: 58, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:35:51,812 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.721e+02 6.000e+02 7.264e+02 1.836e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 01:36:12,024 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4657, 1.6326, 1.9973, 1.7314, 2.9434, 2.4609, 3.2824, 1.5059], + device='cuda:1'), covar=tensor([0.2638, 0.4424, 0.2730, 0.2017, 0.1705, 0.2230, 0.1660, 0.4491], + device='cuda:1'), in_proj_covar=tensor([0.0540, 0.0650, 0.0721, 0.0492, 0.0621, 0.0537, 0.0663, 0.0557], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 01:36:12,728 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 01:36:28,083 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5495, 1.6675, 1.9193, 1.9406, 1.4617, 1.8627, 1.9446, 1.7767], + device='cuda:1'), covar=tensor([0.4294, 0.3739, 0.1942, 0.2367, 0.3879, 0.2223, 0.5035, 0.3427], + device='cuda:1'), in_proj_covar=tensor([0.0911, 0.0981, 0.0724, 0.0937, 0.0890, 0.0827, 0.0845, 0.0789], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 01:36:50,311 INFO [train.py:903] (1/4) Epoch 24, batch 1650, loss[loss=0.181, simple_loss=0.2656, pruned_loss=0.04821, over 19474.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2872, pruned_loss=0.06318, over 3820141.69 frames. ], batch size: 49, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:52,881 INFO [train.py:903] (1/4) Epoch 24, batch 1700, loss[loss=0.1893, simple_loss=0.272, pruned_loss=0.05326, over 19776.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2859, pruned_loss=0.06219, over 3832437.27 frames. ], batch size: 47, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:55,338 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:37:57,376 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.745e+02 5.793e+02 7.168e+02 1.456e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 01:38:00,925 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3741, 3.9866, 2.8339, 3.5364, 0.9420, 3.9588, 3.7911, 3.9003], + device='cuda:1'), covar=tensor([0.0674, 0.0972, 0.1722, 0.0868, 0.3883, 0.0720, 0.0905, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0512, 0.0414, 0.0497, 0.0346, 0.0403, 0.0438, 0.0430, 0.0463], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:38:35,610 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 01:38:54,538 INFO [train.py:903] (1/4) Epoch 24, batch 1750, loss[loss=0.1727, simple_loss=0.2549, pruned_loss=0.04519, over 19755.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2863, pruned_loss=0.06277, over 3824879.49 frames. ], batch size: 47, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:38:56,134 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:03,092 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:57,565 INFO [train.py:903] (1/4) Epoch 24, batch 1800, loss[loss=0.2027, simple_loss=0.2774, pruned_loss=0.06397, over 19652.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2863, pruned_loss=0.06281, over 3823949.92 frames. ], batch size: 53, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:40:02,408 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.690e+02 5.979e+02 7.282e+02 2.087e+03, threshold=1.196e+03, percent-clipped=3.0 +2023-04-03 01:40:08,473 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158853.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:40:11,872 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158855.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:19,331 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:40,540 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:56,885 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 01:40:58,009 INFO [train.py:903] (1/4) Epoch 24, batch 1850, loss[loss=0.1999, simple_loss=0.2894, pruned_loss=0.05522, over 19598.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2867, pruned_loss=0.06306, over 3832757.80 frames. ], batch size: 57, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:41:32,428 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 01:41:49,535 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2373, 5.6480, 3.2179, 4.9512, 1.2094, 5.7262, 5.5770, 5.7519], + device='cuda:1'), covar=tensor([0.0329, 0.0781, 0.1744, 0.0651, 0.4039, 0.0489, 0.0788, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0515, 0.0416, 0.0499, 0.0347, 0.0404, 0.0440, 0.0432, 0.0465], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:42:00,397 INFO [train.py:903] (1/4) Epoch 24, batch 1900, loss[loss=0.1866, simple_loss=0.2668, pruned_loss=0.05324, over 19492.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06251, over 3821853.53 frames. ], batch size: 49, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:42:04,927 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 4.936e+02 5.873e+02 7.509e+02 2.125e+03, threshold=1.175e+03, percent-clipped=8.0 +2023-04-03 01:42:18,827 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 01:42:24,127 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 01:42:46,701 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 01:43:00,391 INFO [train.py:903] (1/4) Epoch 24, batch 1950, loss[loss=0.1833, simple_loss=0.275, pruned_loss=0.04587, over 19779.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2861, pruned_loss=0.06256, over 3817231.54 frames. ], batch size: 56, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:03,845 INFO [train.py:903] (1/4) Epoch 24, batch 2000, loss[loss=0.2212, simple_loss=0.3062, pruned_loss=0.06813, over 19373.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06266, over 3810583.11 frames. ], batch size: 70, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:08,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.106e+02 6.605e+02 9.481e+02 1.726e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-03 01:44:57,683 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:44:59,858 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 01:45:06,688 INFO [train.py:903] (1/4) Epoch 24, batch 2050, loss[loss=0.1765, simple_loss=0.2594, pruned_loss=0.04678, over 19413.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.287, pruned_loss=0.06291, over 3809245.75 frames. ], batch size: 48, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:45:19,174 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 01:45:20,305 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 01:45:35,038 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:45:41,053 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 01:46:02,787 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159139.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:06,499 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:08,201 INFO [train.py:903] (1/4) Epoch 24, batch 2100, loss[loss=0.2405, simple_loss=0.3084, pruned_loss=0.08625, over 19631.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06211, over 3821401.27 frames. ], batch size: 50, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:46:10,470 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159145.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:13,635 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.527e+02 4.786e+02 5.922e+02 8.131e+02 1.881e+03, threshold=1.184e+03, percent-clipped=4.0 +2023-04-03 01:46:37,350 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 01:46:49,814 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:58,711 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 01:47:00,992 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8446, 1.3144, 1.4877, 1.4205, 3.4266, 1.1410, 2.5424, 3.8942], + device='cuda:1'), covar=tensor([0.0535, 0.2921, 0.3141, 0.2094, 0.0710, 0.2729, 0.1412, 0.0224], + device='cuda:1'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0354, 0.0378, 0.0355, 0.0389, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:47:10,236 INFO [train.py:903] (1/4) Epoch 24, batch 2150, loss[loss=0.1822, simple_loss=0.2675, pruned_loss=0.04845, over 19655.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2865, pruned_loss=0.06264, over 3823187.81 frames. ], batch size: 53, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:47:13,876 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159197.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:48:12,839 INFO [train.py:903] (1/4) Epoch 24, batch 2200, loss[loss=0.2233, simple_loss=0.3016, pruned_loss=0.07251, over 19849.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2862, pruned_loss=0.06305, over 3814141.37 frames. ], batch size: 52, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:48:18,019 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.881e+02 6.157e+02 7.813e+02 2.191e+03, threshold=1.231e+03, percent-clipped=6.0 +2023-04-03 01:48:25,306 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:33,265 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:36,912 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7489, 2.7543, 2.1231, 2.1211, 1.6552, 2.1925, 1.0651, 1.9720], + device='cuda:1'), covar=tensor([0.1159, 0.0839, 0.0749, 0.1350, 0.1647, 0.1656, 0.1676, 0.1280], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0357, 0.0360, 0.0385, 0.0462, 0.0390, 0.0338, 0.0342], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 01:49:14,294 INFO [train.py:903] (1/4) Epoch 24, batch 2250, loss[loss=0.2464, simple_loss=0.3216, pruned_loss=0.08556, over 19777.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2866, pruned_loss=0.06338, over 3824190.83 frames. ], batch size: 54, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:49:31,900 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:49:37,433 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159312.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:50:16,681 INFO [train.py:903] (1/4) Epoch 24, batch 2300, loss[loss=0.1803, simple_loss=0.2568, pruned_loss=0.05189, over 19796.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06332, over 3821694.13 frames. ], batch size: 49, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:50:21,055 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.864e+02 6.208e+02 8.672e+02 1.812e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 01:50:31,289 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 01:51:19,168 INFO [train.py:903] (1/4) Epoch 24, batch 2350, loss[loss=0.2352, simple_loss=0.3164, pruned_loss=0.07698, over 19631.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2858, pruned_loss=0.06284, over 3830079.28 frames. ], batch size: 57, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:00,180 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 01:52:04,788 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:52:17,050 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 01:52:19,432 INFO [train.py:903] (1/4) Epoch 24, batch 2400, loss[loss=0.1956, simple_loss=0.2826, pruned_loss=0.05428, over 19745.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2872, pruned_loss=0.06324, over 3806780.36 frames. ], batch size: 63, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:21,069 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3578, 1.9277, 1.9326, 2.1451, 1.8222, 1.8753, 1.6989, 2.1448], + device='cuda:1'), covar=tensor([0.0900, 0.1479, 0.1374, 0.1040, 0.1444, 0.0548, 0.1507, 0.0700], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0358, 0.0313, 0.0255, 0.0304, 0.0253, 0.0315, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 01:52:25,061 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.946e+02 5.943e+02 8.368e+02 2.189e+03, threshold=1.189e+03, percent-clipped=6.0 +2023-04-03 01:53:21,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1392, 1.2482, 1.6698, 1.0116, 2.2667, 2.9711, 2.6746, 3.1864], + device='cuda:1'), covar=tensor([0.1686, 0.3960, 0.3411, 0.2868, 0.0717, 0.0270, 0.0285, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0325, 0.0355, 0.0265, 0.0246, 0.0190, 0.0216, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 01:53:22,620 INFO [train.py:903] (1/4) Epoch 24, batch 2450, loss[loss=0.2342, simple_loss=0.3203, pruned_loss=0.07405, over 19339.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06251, over 3821742.36 frames. ], batch size: 70, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:53:42,140 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159510.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:47,916 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 01:53:50,000 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:55,513 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:14,824 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:21,730 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:24,657 INFO [train.py:903] (1/4) Epoch 24, batch 2500, loss[loss=0.1695, simple_loss=0.2464, pruned_loss=0.04629, over 19789.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2867, pruned_loss=0.06315, over 3801755.26 frames. ], batch size: 48, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:54:27,451 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:29,312 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.941e+02 6.110e+02 7.649e+02 1.406e+03, threshold=1.222e+03, percent-clipped=1.0 +2023-04-03 01:54:55,472 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159568.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:54:56,911 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-03 01:55:25,723 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159593.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:55:26,480 INFO [train.py:903] (1/4) Epoch 24, batch 2550, loss[loss=0.1838, simple_loss=0.259, pruned_loss=0.05433, over 19763.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.0633, over 3814639.99 frames. ], batch size: 46, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:19,845 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:20,621 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 01:56:29,402 INFO [train.py:903] (1/4) Epoch 24, batch 2600, loss[loss=0.2521, simple_loss=0.314, pruned_loss=0.09516, over 13853.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2885, pruned_loss=0.06373, over 3809073.78 frames. ], batch size: 138, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:34,954 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.779e+02 5.928e+02 8.262e+02 1.528e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-03 01:56:36,559 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159649.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:39,984 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:57:31,603 INFO [train.py:903] (1/4) Epoch 24, batch 2650, loss[loss=0.2034, simple_loss=0.2892, pruned_loss=0.05882, over 19590.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2883, pruned_loss=0.06393, over 3801523.65 frames. ], batch size: 61, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:57:34,998 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2413, 1.3513, 1.7357, 1.2523, 2.6845, 3.5400, 3.2428, 3.7611], + device='cuda:1'), covar=tensor([0.1603, 0.3709, 0.3362, 0.2547, 0.0604, 0.0205, 0.0231, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0324, 0.0356, 0.0264, 0.0246, 0.0190, 0.0216, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 01:57:43,903 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 01:58:21,020 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3542, 1.4017, 1.5445, 1.5410, 1.7529, 1.8472, 1.7659, 0.5573], + device='cuda:1'), covar=tensor([0.2726, 0.4457, 0.2809, 0.2119, 0.1712, 0.2489, 0.1506, 0.5160], + device='cuda:1'), in_proj_covar=tensor([0.0544, 0.0656, 0.0727, 0.0495, 0.0625, 0.0537, 0.0667, 0.0560], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 01:58:34,705 INFO [train.py:903] (1/4) Epoch 24, batch 2700, loss[loss=0.1921, simple_loss=0.261, pruned_loss=0.06162, over 19730.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2887, pruned_loss=0.06406, over 3802885.86 frames. ], batch size: 45, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:58:39,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 5.237e+02 6.508e+02 8.466e+02 2.382e+03, threshold=1.302e+03, percent-clipped=8.0 +2023-04-03 01:59:03,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:59:36,001 INFO [train.py:903] (1/4) Epoch 24, batch 2750, loss[loss=0.1878, simple_loss=0.2619, pruned_loss=0.05684, over 19763.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2884, pruned_loss=0.06391, over 3824420.42 frames. ], batch size: 46, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:59:46,871 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:19,530 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:40,377 INFO [train.py:903] (1/4) Epoch 24, batch 2800, loss[loss=0.2273, simple_loss=0.3115, pruned_loss=0.07155, over 19672.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2872, pruned_loss=0.0633, over 3832490.88 frames. ], batch size: 60, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:00:45,935 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.444e+02 4.661e+02 5.641e+02 7.181e+02 2.352e+03, threshold=1.128e+03, percent-clipped=2.0 +2023-04-03 02:01:10,957 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-03 02:01:16,194 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:40,836 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:42,762 INFO [train.py:903] (1/4) Epoch 24, batch 2850, loss[loss=0.2076, simple_loss=0.2947, pruned_loss=0.06023, over 19431.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.287, pruned_loss=0.06309, over 3838300.99 frames. ], batch size: 70, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:10,656 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:25,913 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159928.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:39,636 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 02:02:45,315 INFO [train.py:903] (1/4) Epoch 24, batch 2900, loss[loss=0.1938, simple_loss=0.271, pruned_loss=0.05825, over 19608.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2865, pruned_loss=0.06298, over 3840046.51 frames. ], batch size: 50, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:51,070 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.874e+02 6.501e+02 8.672e+02 1.518e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-03 02:03:45,530 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:03:46,480 INFO [train.py:903] (1/4) Epoch 24, batch 2950, loss[loss=0.1876, simple_loss=0.2671, pruned_loss=0.05404, over 19760.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2858, pruned_loss=0.06284, over 3837316.64 frames. ], batch size: 48, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:24,717 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:04:45,836 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 02:04:49,458 INFO [train.py:903] (1/4) Epoch 24, batch 3000, loss[loss=0.2088, simple_loss=0.2947, pruned_loss=0.0615, over 19787.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2861, pruned_loss=0.06264, over 3835060.87 frames. ], batch size: 54, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:49,458 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 02:05:01,999 INFO [train.py:937] (1/4) Epoch 24, validation: loss=0.1679, simple_loss=0.268, pruned_loss=0.03397, over 944034.00 frames. +2023-04-03 02:05:02,000 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 02:05:08,064 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:05:08,848 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 4.966e+02 6.275e+02 7.790e+02 1.988e+03, threshold=1.255e+03, percent-clipped=5.0 +2023-04-03 02:06:04,591 INFO [train.py:903] (1/4) Epoch 24, batch 3050, loss[loss=0.2855, simple_loss=0.3476, pruned_loss=0.1117, over 13059.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2861, pruned_loss=0.06258, over 3831124.88 frames. ], batch size: 135, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:06:23,643 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:06:37,819 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 02:07:06,772 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3109, 2.1765, 2.0877, 1.9658, 1.6888, 1.8763, 0.5518, 1.3396], + device='cuda:1'), covar=tensor([0.0634, 0.0631, 0.0477, 0.0792, 0.1215, 0.0897, 0.1402, 0.1036], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0356, 0.0361, 0.0383, 0.0461, 0.0391, 0.0338, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:07:08,738 INFO [train.py:903] (1/4) Epoch 24, batch 3100, loss[loss=0.2261, simple_loss=0.3036, pruned_loss=0.07433, over 19543.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2876, pruned_loss=0.06315, over 3818760.79 frames. ], batch size: 56, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:07:14,571 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 4.859e+02 5.894e+02 7.109e+02 1.682e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 02:07:40,561 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6802, 4.2243, 4.4708, 4.4620, 1.6139, 4.1988, 3.6171, 4.1830], + device='cuda:1'), covar=tensor([0.1856, 0.0972, 0.0652, 0.0722, 0.6701, 0.1036, 0.0725, 0.1201], + device='cuda:1'), in_proj_covar=tensor([0.0793, 0.0757, 0.0963, 0.0844, 0.0847, 0.0733, 0.0576, 0.0895], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 02:08:10,532 INFO [train.py:903] (1/4) Epoch 24, batch 3150, loss[loss=0.2345, simple_loss=0.3031, pruned_loss=0.08299, over 19523.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2865, pruned_loss=0.06243, over 3821372.82 frames. ], batch size: 54, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:08:36,030 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 02:08:39,725 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160217.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:09:14,346 INFO [train.py:903] (1/4) Epoch 24, batch 3200, loss[loss=0.1881, simple_loss=0.2768, pruned_loss=0.04971, over 19757.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2867, pruned_loss=0.0628, over 3818733.01 frames. ], batch size: 54, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:09:20,051 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.134e+02 6.599e+02 8.700e+02 2.161e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-03 02:09:33,567 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9451, 1.2572, 1.5968, 0.8857, 2.2827, 3.0254, 2.7447, 3.2101], + device='cuda:1'), covar=tensor([0.1789, 0.3955, 0.3607, 0.2784, 0.0673, 0.0226, 0.0264, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0326, 0.0356, 0.0265, 0.0246, 0.0190, 0.0217, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 02:09:49,730 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:10:17,065 INFO [train.py:903] (1/4) Epoch 24, batch 3250, loss[loss=0.1685, simple_loss=0.2472, pruned_loss=0.0449, over 19760.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2853, pruned_loss=0.06202, over 3830364.70 frames. ], batch size: 45, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:10:56,761 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160325.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:11:04,807 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160332.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:11:21,829 INFO [train.py:903] (1/4) Epoch 24, batch 3300, loss[loss=0.1729, simple_loss=0.2519, pruned_loss=0.047, over 19372.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2859, pruned_loss=0.06271, over 3822277.31 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:11:24,399 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 02:11:27,809 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.973e+02 5.834e+02 7.675e+02 1.997e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-03 02:11:46,547 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160364.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:15,652 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:18,219 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160389.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:23,808 INFO [train.py:903] (1/4) Epoch 24, batch 3350, loss[loss=0.1881, simple_loss=0.264, pruned_loss=0.05605, over 19383.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2858, pruned_loss=0.06265, over 3833357.85 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:12:27,296 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 02:12:39,927 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-03 02:13:26,805 INFO [train.py:903] (1/4) Epoch 24, batch 3400, loss[loss=0.2054, simple_loss=0.2763, pruned_loss=0.06729, over 19726.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2852, pruned_loss=0.06241, over 3835657.75 frames. ], batch size: 51, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:13:32,540 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.133e+02 6.647e+02 9.203e+02 1.938e+03, threshold=1.329e+03, percent-clipped=8.0 +2023-04-03 02:13:48,729 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4780, 1.4847, 1.7674, 1.7154, 2.5150, 2.1982, 2.5543, 1.2860], + device='cuda:1'), covar=tensor([0.2485, 0.4283, 0.2742, 0.1922, 0.1461, 0.2131, 0.1485, 0.4394], + device='cuda:1'), in_proj_covar=tensor([0.0543, 0.0654, 0.0728, 0.0494, 0.0622, 0.0537, 0.0665, 0.0558], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:14:28,021 INFO [train.py:903] (1/4) Epoch 24, batch 3450, loss[loss=0.219, simple_loss=0.3068, pruned_loss=0.06565, over 19836.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2856, pruned_loss=0.06231, over 3839430.51 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:14:31,643 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 02:15:29,542 INFO [train.py:903] (1/4) Epoch 24, batch 3500, loss[loss=0.1765, simple_loss=0.2598, pruned_loss=0.04661, over 19588.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.284, pruned_loss=0.06169, over 3836836.12 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:15:38,055 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.685e+02 5.887e+02 7.904e+02 2.662e+03, threshold=1.177e+03, percent-clipped=4.0 +2023-04-03 02:15:42,029 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9902, 1.3340, 1.0691, 0.9724, 1.2313, 0.8774, 1.0046, 1.2394], + device='cuda:1'), covar=tensor([0.0664, 0.0682, 0.0774, 0.0642, 0.0443, 0.1039, 0.0486, 0.0365], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0266, 0.0246, 0.0339, 0.0289, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:15:50,496 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 02:16:26,898 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:16:33,578 INFO [train.py:903] (1/4) Epoch 24, batch 3550, loss[loss=0.1784, simple_loss=0.2619, pruned_loss=0.04741, over 19848.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2853, pruned_loss=0.06248, over 3816239.89 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:16:50,465 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0262, 4.4401, 4.7849, 4.7970, 1.7690, 4.5007, 3.8783, 4.4883], + device='cuda:1'), covar=tensor([0.1623, 0.0939, 0.0555, 0.0659, 0.5784, 0.0918, 0.0687, 0.1078], + device='cuda:1'), in_proj_covar=tensor([0.0793, 0.0757, 0.0963, 0.0842, 0.0846, 0.0733, 0.0576, 0.0895], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 02:16:57,371 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160613.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:16:58,911 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 02:17:20,640 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 02:17:36,600 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:17:37,270 INFO [train.py:903] (1/4) Epoch 24, batch 3600, loss[loss=0.1945, simple_loss=0.2645, pruned_loss=0.06225, over 19749.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2856, pruned_loss=0.06283, over 3800671.85 frames. ], batch size: 45, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:17:44,412 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 4.811e+02 5.669e+02 7.209e+02 1.690e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 02:18:07,971 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160668.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:18:08,811 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160669.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:18:30,058 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5356, 1.5794, 1.9021, 1.7828, 2.7353, 2.3681, 2.8800, 1.2126], + device='cuda:1'), covar=tensor([0.2600, 0.4455, 0.2757, 0.2050, 0.1589, 0.2223, 0.1447, 0.4667], + device='cuda:1'), in_proj_covar=tensor([0.0541, 0.0652, 0.0725, 0.0493, 0.0621, 0.0535, 0.0663, 0.0556], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:18:40,045 INFO [train.py:903] (1/4) Epoch 24, batch 3650, loss[loss=0.1816, simple_loss=0.2637, pruned_loss=0.04968, over 19368.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2851, pruned_loss=0.06229, over 3817006.27 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:18:48,164 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7845, 4.2285, 4.4707, 4.4993, 1.7887, 4.2070, 3.6624, 4.2078], + device='cuda:1'), covar=tensor([0.1668, 0.0913, 0.0640, 0.0682, 0.5664, 0.0941, 0.0700, 0.1128], + device='cuda:1'), in_proj_covar=tensor([0.0794, 0.0757, 0.0962, 0.0842, 0.0847, 0.0733, 0.0577, 0.0894], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 02:19:40,904 INFO [train.py:903] (1/4) Epoch 24, batch 3700, loss[loss=0.2577, simple_loss=0.3367, pruned_loss=0.08937, over 13433.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.06335, over 3817067.28 frames. ], batch size: 136, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:19:49,418 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.616e+02 6.347e+02 7.690e+02 1.972e+03, threshold=1.269e+03, percent-clipped=6.0 +2023-04-03 02:20:30,326 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160784.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:20:44,502 INFO [train.py:903] (1/4) Epoch 24, batch 3750, loss[loss=0.1993, simple_loss=0.2777, pruned_loss=0.06044, over 19849.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.287, pruned_loss=0.06379, over 3813936.68 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:45,656 INFO [train.py:903] (1/4) Epoch 24, batch 3800, loss[loss=0.1807, simple_loss=0.274, pruned_loss=0.0437, over 19681.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2878, pruned_loss=0.0641, over 3818105.95 frames. ], batch size: 53, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:53,460 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.186e+02 4.635e+02 5.250e+02 7.046e+02 1.734e+03, threshold=1.050e+03, percent-clipped=4.0 +2023-04-03 02:22:18,588 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 02:22:22,443 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4736, 1.1591, 1.1297, 1.3818, 1.0815, 1.2587, 1.1384, 1.3413], + device='cuda:1'), covar=tensor([0.1112, 0.1244, 0.1530, 0.0989, 0.1267, 0.0633, 0.1531, 0.0810], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0356, 0.0313, 0.0255, 0.0305, 0.0254, 0.0313, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:22:41,689 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.7679, 0.9227, 1.1865, 0.5863, 1.5153, 1.7398, 1.5785, 1.8042], + device='cuda:1'), covar=tensor([0.1320, 0.2983, 0.2618, 0.2454, 0.0838, 0.0392, 0.0320, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0325, 0.0355, 0.0264, 0.0245, 0.0190, 0.0216, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 02:22:47,096 INFO [train.py:903] (1/4) Epoch 24, batch 3850, loss[loss=0.1985, simple_loss=0.2935, pruned_loss=0.05177, over 19525.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2875, pruned_loss=0.06408, over 3818144.93 frames. ], batch size: 56, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:23:18,520 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160919.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:23:48,689 INFO [train.py:903] (1/4) Epoch 24, batch 3900, loss[loss=0.1792, simple_loss=0.2575, pruned_loss=0.05042, over 16871.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2876, pruned_loss=0.064, over 3817211.77 frames. ], batch size: 37, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:23:58,399 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 5.134e+02 6.381e+02 7.692e+02 1.884e+03, threshold=1.276e+03, percent-clipped=12.0 +2023-04-03 02:24:38,042 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:24:53,495 INFO [train.py:903] (1/4) Epoch 24, batch 3950, loss[loss=0.1854, simple_loss=0.2614, pruned_loss=0.05467, over 19802.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06334, over 3816561.48 frames. ], batch size: 49, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:24:57,072 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 02:25:51,223 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161040.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:25:55,366 INFO [train.py:903] (1/4) Epoch 24, batch 4000, loss[loss=0.1853, simple_loss=0.2658, pruned_loss=0.05239, over 19369.00 frames. ], tot_loss[loss=0.206, simple_loss=0.286, pruned_loss=0.06298, over 3812984.39 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:26:00,841 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.25 vs. limit=5.0 +2023-04-03 02:26:03,423 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.651e+02 5.074e+02 6.327e+02 7.723e+02 1.762e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-03 02:26:08,671 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6466, 1.2619, 1.2527, 1.4458, 1.0756, 1.2880, 1.1779, 1.4254], + device='cuda:1'), covar=tensor([0.1209, 0.1196, 0.1771, 0.1069, 0.1427, 0.0806, 0.1908, 0.0975], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0357, 0.0313, 0.0255, 0.0305, 0.0254, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:26:21,981 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161065.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:26:41,800 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 02:26:57,071 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 02:26:57,378 INFO [train.py:903] (1/4) Epoch 24, batch 4050, loss[loss=0.2137, simple_loss=0.2935, pruned_loss=0.0669, over 19649.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2868, pruned_loss=0.06375, over 3800588.54 frames. ], batch size: 58, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:27:50,008 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9840, 1.8581, 1.8332, 2.0621, 1.9161, 1.7351, 1.8215, 1.9968], + device='cuda:1'), covar=tensor([0.0859, 0.1257, 0.1147, 0.0800, 0.1015, 0.0481, 0.1124, 0.0562], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0357, 0.0313, 0.0254, 0.0305, 0.0254, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:27:57,727 INFO [train.py:903] (1/4) Epoch 24, batch 4100, loss[loss=0.1784, simple_loss=0.262, pruned_loss=0.0474, over 19661.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2865, pruned_loss=0.06371, over 3794832.40 frames. ], batch size: 53, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:27:58,488 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 02:28:06,052 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.915e+02 6.129e+02 7.795e+02 1.333e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 02:28:31,036 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 02:28:41,668 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8027, 1.8942, 2.2192, 2.2886, 1.7452, 2.2271, 2.2588, 2.0745], + device='cuda:1'), covar=tensor([0.4389, 0.4174, 0.1971, 0.2590, 0.4397, 0.2419, 0.5097, 0.3494], + device='cuda:1'), in_proj_covar=tensor([0.0919, 0.0989, 0.0729, 0.0940, 0.0894, 0.0830, 0.0852, 0.0790], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 02:29:00,662 INFO [train.py:903] (1/4) Epoch 24, batch 4150, loss[loss=0.2304, simple_loss=0.3078, pruned_loss=0.07648, over 13221.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2865, pruned_loss=0.0634, over 3791393.53 frames. ], batch size: 135, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:29:02,586 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-03 02:29:43,135 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0592, 2.8116, 2.1509, 2.2127, 1.9525, 2.4071, 1.0849, 2.0348], + device='cuda:1'), covar=tensor([0.0640, 0.0651, 0.0755, 0.1102, 0.1118, 0.1104, 0.1467, 0.1078], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0361, 0.0365, 0.0388, 0.0466, 0.0395, 0.0342, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:29:47,421 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6097, 1.5467, 1.6063, 2.0798, 1.6751, 1.9425, 1.9721, 1.8077], + device='cuda:1'), covar=tensor([0.0869, 0.0958, 0.1009, 0.0806, 0.0902, 0.0760, 0.0886, 0.0680], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0239, 0.0224, 0.0212, 0.0188, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 02:29:50,574 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:01,377 INFO [train.py:903] (1/4) Epoch 24, batch 4200, loss[loss=0.1864, simple_loss=0.2792, pruned_loss=0.04686, over 19654.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2876, pruned_loss=0.06388, over 3803627.24 frames. ], batch size: 58, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:30:02,579 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 02:30:08,532 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:09,257 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.684e+02 6.110e+02 7.845e+02 2.290e+03, threshold=1.222e+03, percent-clipped=7.0 +2023-04-03 02:30:24,258 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:31:03,259 INFO [train.py:903] (1/4) Epoch 24, batch 4250, loss[loss=0.1633, simple_loss=0.2401, pruned_loss=0.0433, over 19326.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.0632, over 3810392.87 frames. ], batch size: 44, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:31:17,080 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 02:31:28,262 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 02:31:44,255 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:31:47,947 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.51 vs. limit=2.0 +2023-04-03 02:32:04,741 INFO [train.py:903] (1/4) Epoch 24, batch 4300, loss[loss=0.2024, simple_loss=0.2838, pruned_loss=0.06049, over 19679.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2855, pruned_loss=0.06249, over 3814824.58 frames. ], batch size: 53, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:32:09,679 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1735, 5.1823, 6.0197, 6.0166, 1.9588, 5.6687, 4.7819, 5.6135], + device='cuda:1'), covar=tensor([0.1805, 0.0908, 0.0558, 0.0619, 0.6233, 0.0727, 0.0646, 0.1202], + device='cuda:1'), in_proj_covar=tensor([0.0794, 0.0760, 0.0961, 0.0842, 0.0844, 0.0728, 0.0575, 0.0893], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 02:32:12,548 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.585e+02 5.768e+02 7.257e+02 2.214e+03, threshold=1.154e+03, percent-clipped=5.0 +2023-04-03 02:32:38,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0185, 2.0142, 1.7947, 2.1241, 2.0236, 1.7882, 1.7833, 2.0098], + device='cuda:1'), covar=tensor([0.1066, 0.1380, 0.1415, 0.0957, 0.1218, 0.0564, 0.1380, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0358, 0.0314, 0.0256, 0.0306, 0.0254, 0.0315, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:32:47,609 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:32:47,677 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9904, 1.9230, 1.8736, 1.6715, 1.4593, 1.6843, 0.4436, 0.9482], + device='cuda:1'), covar=tensor([0.0679, 0.0662, 0.0415, 0.0681, 0.1279, 0.0785, 0.1299, 0.1072], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0360, 0.0364, 0.0387, 0.0465, 0.0393, 0.0341, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:32:57,525 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 02:33:06,262 INFO [train.py:903] (1/4) Epoch 24, batch 4350, loss[loss=0.18, simple_loss=0.2659, pruned_loss=0.04703, over 19654.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2857, pruned_loss=0.06238, over 3813228.87 frames. ], batch size: 53, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:33:19,515 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5433, 1.4739, 1.4774, 1.9042, 1.5004, 1.6222, 1.7314, 1.5826], + device='cuda:1'), covar=tensor([0.0864, 0.0965, 0.1048, 0.0613, 0.0793, 0.0828, 0.0857, 0.0731], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0238, 0.0224, 0.0212, 0.0188, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 02:33:37,843 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:33:38,148 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.25 vs. limit=5.0 +2023-04-03 02:34:07,079 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:34:08,989 INFO [train.py:903] (1/4) Epoch 24, batch 4400, loss[loss=0.2162, simple_loss=0.2955, pruned_loss=0.06847, over 19667.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2855, pruned_loss=0.06254, over 3816245.88 frames. ], batch size: 55, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:34:15,588 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.009e+02 6.093e+02 7.233e+02 1.222e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 02:34:31,975 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 02:34:41,593 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 02:34:50,948 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:35:10,542 INFO [train.py:903] (1/4) Epoch 24, batch 4450, loss[loss=0.2512, simple_loss=0.329, pruned_loss=0.08671, over 19493.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2871, pruned_loss=0.06348, over 3819185.19 frames. ], batch size: 64, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:35:14,646 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 02:35:49,135 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1709, 2.0616, 1.9920, 1.7848, 1.7234, 1.7667, 0.5040, 1.1008], + device='cuda:1'), covar=tensor([0.0719, 0.0675, 0.0470, 0.0784, 0.1167, 0.0899, 0.1424, 0.1127], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0359, 0.0361, 0.0385, 0.0463, 0.0391, 0.0339, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:36:10,890 INFO [train.py:903] (1/4) Epoch 24, batch 4500, loss[loss=0.1919, simple_loss=0.2795, pruned_loss=0.05217, over 19673.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2875, pruned_loss=0.06373, over 3819141.39 frames. ], batch size: 58, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:36:17,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.067e+02 5.206e+02 6.185e+02 8.214e+02 2.130e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 02:36:48,198 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5782, 1.3248, 1.1848, 1.4392, 1.0832, 1.3140, 1.1662, 1.4249], + device='cuda:1'), covar=tensor([0.1250, 0.1174, 0.1781, 0.1166, 0.1400, 0.0729, 0.1759, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0358, 0.0315, 0.0258, 0.0307, 0.0255, 0.0317, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:36:53,395 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:36:57,974 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5697, 2.2851, 1.6498, 1.5691, 2.0682, 1.3205, 1.4452, 1.8897], + device='cuda:1'), covar=tensor([0.1279, 0.0834, 0.1180, 0.0872, 0.0622, 0.1434, 0.0833, 0.0620], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0317, 0.0338, 0.0267, 0.0248, 0.0341, 0.0292, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:37:10,278 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:37:12,186 INFO [train.py:903] (1/4) Epoch 24, batch 4550, loss[loss=0.1921, simple_loss=0.2824, pruned_loss=0.05088, over 19661.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.287, pruned_loss=0.06358, over 3829987.82 frames. ], batch size: 55, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:37:20,034 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 02:37:35,014 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4172, 1.5463, 1.8048, 1.6863, 2.6398, 2.2778, 2.8110, 1.2760], + device='cuda:1'), covar=tensor([0.2668, 0.4595, 0.2859, 0.2057, 0.1729, 0.2257, 0.1638, 0.4628], + device='cuda:1'), in_proj_covar=tensor([0.0546, 0.0656, 0.0732, 0.0497, 0.0629, 0.0540, 0.0667, 0.0560], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 02:37:44,745 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 02:38:01,553 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:38:15,062 INFO [train.py:903] (1/4) Epoch 24, batch 4600, loss[loss=0.1866, simple_loss=0.2688, pruned_loss=0.05225, over 19617.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06381, over 3830456.38 frames. ], batch size: 50, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:38:21,974 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.894e+02 5.872e+02 7.852e+02 1.807e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 02:38:33,828 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:15,627 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:16,441 INFO [train.py:903] (1/4) Epoch 24, batch 4650, loss[loss=0.2302, simple_loss=0.3076, pruned_loss=0.07641, over 19691.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2866, pruned_loss=0.06328, over 3839098.77 frames. ], batch size: 59, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:39:22,565 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:23,689 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2695, 1.3490, 1.8560, 1.5927, 3.0517, 4.5175, 4.3961, 4.9621], + device='cuda:1'), covar=tensor([0.1697, 0.3829, 0.3311, 0.2303, 0.0597, 0.0203, 0.0171, 0.0198], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0326, 0.0356, 0.0266, 0.0246, 0.0191, 0.0217, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 02:39:33,574 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 02:39:33,869 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:44,940 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 02:39:53,568 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:57,797 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3851, 2.0577, 1.6269, 1.4313, 1.8831, 1.3473, 1.3219, 1.8251], + device='cuda:1'), covar=tensor([0.0947, 0.0799, 0.1282, 0.0854, 0.0568, 0.1401, 0.0719, 0.0473], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0315, 0.0338, 0.0267, 0.0247, 0.0340, 0.0291, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:40:19,304 INFO [train.py:903] (1/4) Epoch 24, batch 4700, loss[loss=0.2306, simple_loss=0.3104, pruned_loss=0.07543, over 19687.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2851, pruned_loss=0.06244, over 3832137.13 frames. ], batch size: 60, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:40:26,428 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 5.147e+02 6.134e+02 7.606e+02 1.537e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-03 02:40:39,906 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 02:40:43,342 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:41:20,592 INFO [train.py:903] (1/4) Epoch 24, batch 4750, loss[loss=0.201, simple_loss=0.2589, pruned_loss=0.07159, over 19031.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2844, pruned_loss=0.06202, over 3830558.37 frames. ], batch size: 42, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:41:57,353 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:09,923 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:23,974 INFO [train.py:903] (1/4) Epoch 24, batch 4800, loss[loss=0.195, simple_loss=0.2728, pruned_loss=0.05862, over 19606.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2841, pruned_loss=0.06181, over 3826727.97 frames. ], batch size: 50, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:42:31,546 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.225e+02 6.101e+02 7.305e+02 1.695e+03, threshold=1.220e+03, percent-clipped=2.0 +2023-04-03 02:43:04,858 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:43:21,623 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0847, 1.4096, 1.5695, 1.3402, 2.7153, 1.1909, 2.2210, 3.0833], + device='cuda:1'), covar=tensor([0.0571, 0.2529, 0.2761, 0.1913, 0.0695, 0.2252, 0.1244, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0368, 0.0390, 0.0350, 0.0373, 0.0352, 0.0387, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:43:25,940 INFO [train.py:903] (1/4) Epoch 24, batch 4850, loss[loss=0.2028, simple_loss=0.2856, pruned_loss=0.06002, over 18177.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2832, pruned_loss=0.06133, over 3833165.22 frames. ], batch size: 83, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:43:50,712 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 02:44:11,986 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 02:44:16,567 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 02:44:17,729 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 02:44:19,215 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161937.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:27,634 INFO [train.py:903] (1/4) Epoch 24, batch 4900, loss[loss=0.213, simple_loss=0.2971, pruned_loss=0.06444, over 18901.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2842, pruned_loss=0.06173, over 3816879.02 frames. ], batch size: 74, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:44:27,676 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 02:44:33,921 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:34,693 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 5.435e+02 6.496e+02 8.047e+02 2.666e+03, threshold=1.299e+03, percent-clipped=6.0 +2023-04-03 02:44:47,021 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 02:44:53,304 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:54,311 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2164, 3.4965, 3.7389, 3.7530, 2.0839, 3.5021, 3.1817, 3.5248], + device='cuda:1'), covar=tensor([0.1550, 0.2936, 0.0700, 0.0758, 0.4740, 0.1403, 0.0639, 0.1062], + device='cuda:1'), in_proj_covar=tensor([0.0794, 0.0759, 0.0966, 0.0848, 0.0847, 0.0731, 0.0578, 0.0894], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 02:45:05,672 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:23,802 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:28,147 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7910, 1.5326, 1.4774, 1.7990, 1.4811, 1.5434, 1.4514, 1.6792], + device='cuda:1'), covar=tensor([0.1075, 0.1324, 0.1466, 0.0951, 0.1245, 0.0596, 0.1455, 0.0779], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0357, 0.0311, 0.0255, 0.0305, 0.0253, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:45:28,862 INFO [train.py:903] (1/4) Epoch 24, batch 4950, loss[loss=0.2255, simple_loss=0.3096, pruned_loss=0.07065, over 18010.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2838, pruned_loss=0.06152, over 3812811.27 frames. ], batch size: 83, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:45:47,818 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 02:46:13,013 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 02:46:33,474 INFO [train.py:903] (1/4) Epoch 24, batch 5000, loss[loss=0.2371, simple_loss=0.3146, pruned_loss=0.07976, over 19666.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2837, pruned_loss=0.06148, over 3814233.26 frames. ], batch size: 55, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:46:41,312 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.589e+02 5.769e+02 7.322e+02 1.477e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-03 02:46:44,820 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 02:46:56,726 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 02:47:35,230 INFO [train.py:903] (1/4) Epoch 24, batch 5050, loss[loss=0.1991, simple_loss=0.2843, pruned_loss=0.05696, over 19847.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2839, pruned_loss=0.06151, over 3818497.87 frames. ], batch size: 52, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:47:36,675 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:47:40,202 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4919, 1.4334, 1.6942, 1.5039, 3.1003, 1.0217, 2.4226, 3.4815], + device='cuda:1'), covar=tensor([0.0504, 0.2639, 0.2539, 0.1735, 0.0613, 0.2434, 0.1138, 0.0239], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0368, 0.0388, 0.0350, 0.0372, 0.0352, 0.0386, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:48:14,994 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 02:48:25,521 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:37,740 INFO [train.py:903] (1/4) Epoch 24, batch 5100, loss[loss=0.1945, simple_loss=0.2694, pruned_loss=0.05983, over 19755.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.283, pruned_loss=0.06102, over 3818409.29 frames. ], batch size: 45, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:48:44,638 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.540e+02 5.777e+02 7.463e+02 1.637e+03, threshold=1.155e+03, percent-clipped=6.0 +2023-04-03 02:48:51,578 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 02:48:55,185 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 02:48:55,619 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:58,702 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 02:49:18,961 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:39,609 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162193.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:40,374 INFO [train.py:903] (1/4) Epoch 24, batch 5150, loss[loss=0.1984, simple_loss=0.285, pruned_loss=0.05596, over 19301.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2832, pruned_loss=0.06096, over 3813238.19 frames. ], batch size: 66, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:49:57,201 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 02:50:11,328 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:31,577 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 02:50:43,109 INFO [train.py:903] (1/4) Epoch 24, batch 5200, loss[loss=0.2137, simple_loss=0.295, pruned_loss=0.0662, over 18646.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2845, pruned_loss=0.06141, over 3805688.50 frames. ], batch size: 74, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:50:50,150 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:51,040 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.774e+02 5.644e+02 7.681e+02 1.514e+03, threshold=1.129e+03, percent-clipped=4.0 +2023-04-03 02:50:59,732 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 02:51:44,107 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 02:51:44,406 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:51:46,277 INFO [train.py:903] (1/4) Epoch 24, batch 5250, loss[loss=0.224, simple_loss=0.3057, pruned_loss=0.07113, over 19341.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06267, over 3784119.35 frames. ], batch size: 66, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:14,404 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8797, 1.3970, 1.5814, 1.4984, 3.4222, 1.1668, 2.4025, 3.9184], + device='cuda:1'), covar=tensor([0.0455, 0.2692, 0.2867, 0.1889, 0.0692, 0.2507, 0.1399, 0.0206], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0368, 0.0389, 0.0350, 0.0373, 0.0353, 0.0386, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:52:50,073 INFO [train.py:903] (1/4) Epoch 24, batch 5300, loss[loss=0.2205, simple_loss=0.3019, pruned_loss=0.06955, over 17413.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.06219, over 3787356.44 frames. ], batch size: 101, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:57,101 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.862e+02 5.825e+02 7.901e+02 2.284e+03, threshold=1.165e+03, percent-clipped=8.0 +2023-04-03 02:53:08,449 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 02:53:33,674 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6420, 1.2153, 1.2827, 1.5078, 1.0790, 1.4071, 1.2272, 1.4904], + device='cuda:1'), covar=tensor([0.1137, 0.1231, 0.1523, 0.1024, 0.1298, 0.0627, 0.1536, 0.0807], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0360, 0.0314, 0.0257, 0.0306, 0.0255, 0.0316, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:53:43,024 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-03 02:53:51,568 INFO [train.py:903] (1/4) Epoch 24, batch 5350, loss[loss=0.2123, simple_loss=0.2915, pruned_loss=0.06658, over 19626.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2857, pruned_loss=0.06238, over 3792173.29 frames. ], batch size: 57, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:54:28,285 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 02:54:47,990 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:54:54,712 INFO [train.py:903] (1/4) Epoch 24, batch 5400, loss[loss=0.1842, simple_loss=0.2583, pruned_loss=0.05507, over 19748.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2858, pruned_loss=0.06249, over 3796775.56 frames. ], batch size: 46, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:55:00,083 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 02:55:02,631 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.752e+02 4.703e+02 6.237e+02 7.666e+02 1.372e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 02:55:56,871 INFO [train.py:903] (1/4) Epoch 24, batch 5450, loss[loss=0.2059, simple_loss=0.2921, pruned_loss=0.05986, over 19656.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2867, pruned_loss=0.06284, over 3796603.55 frames. ], batch size: 58, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:56:37,127 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2734, 1.9378, 2.0794, 2.9623, 1.9864, 2.3408, 2.4954, 2.0976], + device='cuda:1'), covar=tensor([0.0759, 0.0908, 0.0939, 0.0751, 0.0908, 0.0774, 0.0882, 0.0679], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0226, 0.0213, 0.0189, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 02:56:38,283 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7710, 1.7344, 1.5695, 1.3722, 1.3532, 1.3654, 0.2462, 0.6521], + device='cuda:1'), covar=tensor([0.0598, 0.0581, 0.0414, 0.0649, 0.1145, 0.0736, 0.1243, 0.1054], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0359, 0.0362, 0.0386, 0.0465, 0.0394, 0.0340, 0.0348], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:56:40,433 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:00,250 INFO [train.py:903] (1/4) Epoch 24, batch 5500, loss[loss=0.2303, simple_loss=0.3025, pruned_loss=0.07903, over 18785.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06253, over 3793091.83 frames. ], batch size: 74, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:57:00,687 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4312, 2.4919, 2.6687, 3.1600, 2.6309, 3.0357, 2.7012, 2.5365], + device='cuda:1'), covar=tensor([0.3701, 0.3316, 0.1649, 0.2135, 0.3605, 0.1862, 0.3845, 0.2722], + device='cuda:1'), in_proj_covar=tensor([0.0916, 0.0989, 0.0729, 0.0940, 0.0895, 0.0830, 0.0850, 0.0793], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 02:57:05,452 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:07,335 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.077e+02 6.464e+02 7.861e+02 1.317e+03, threshold=1.293e+03, percent-clipped=1.0 +2023-04-03 02:57:12,152 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:25,028 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 02:57:36,167 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:49,665 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9532, 1.8745, 1.8232, 1.6119, 1.4603, 1.5854, 0.4639, 0.9417], + device='cuda:1'), covar=tensor([0.0656, 0.0641, 0.0427, 0.0725, 0.1219, 0.0860, 0.1329, 0.1127], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0360, 0.0364, 0.0388, 0.0467, 0.0396, 0.0341, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 02:58:01,101 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:58:01,999 INFO [train.py:903] (1/4) Epoch 24, batch 5550, loss[loss=0.195, simple_loss=0.273, pruned_loss=0.05848, over 19414.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06272, over 3769140.87 frames. ], batch size: 48, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:58:08,816 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 02:58:18,362 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4146, 1.2816, 1.4407, 1.3740, 2.9808, 1.0678, 2.2484, 3.4319], + device='cuda:1'), covar=tensor([0.0561, 0.2934, 0.3041, 0.1992, 0.0772, 0.2625, 0.1427, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0368, 0.0389, 0.0349, 0.0372, 0.0352, 0.0386, 0.0405], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:58:50,763 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3057, 3.6652, 2.2890, 2.1699, 3.3659, 1.9903, 1.7478, 2.5902], + device='cuda:1'), covar=tensor([0.1250, 0.0483, 0.1001, 0.0892, 0.0444, 0.1186, 0.0944, 0.0574], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0316, 0.0340, 0.0268, 0.0248, 0.0342, 0.0292, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 02:58:59,599 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 02:59:02,952 INFO [train.py:903] (1/4) Epoch 24, batch 5600, loss[loss=0.2376, simple_loss=0.3106, pruned_loss=0.0823, over 18782.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2859, pruned_loss=0.06245, over 3792935.23 frames. ], batch size: 74, lr: 3.40e-03, grad_scale: 16.0 +2023-04-03 02:59:12,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 5.034e+02 5.949e+02 8.933e+02 2.100e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 02:59:19,259 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-03 03:00:07,504 INFO [train.py:903] (1/4) Epoch 24, batch 5650, loss[loss=0.2072, simple_loss=0.2918, pruned_loss=0.06123, over 19699.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.0622, over 3804418.69 frames. ], batch size: 59, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:00:24,969 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:00:55,023 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 03:01:09,550 INFO [train.py:903] (1/4) Epoch 24, batch 5700, loss[loss=0.2079, simple_loss=0.2917, pruned_loss=0.06203, over 19768.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06247, over 3812148.02 frames. ], batch size: 56, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:01:17,480 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 4.704e+02 5.740e+02 7.100e+02 1.656e+03, threshold=1.148e+03, percent-clipped=4.0 +2023-04-03 03:01:23,595 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2452, 1.1813, 1.1810, 1.3462, 1.0462, 1.3418, 1.3186, 1.2435], + device='cuda:1'), covar=tensor([0.0907, 0.1010, 0.1093, 0.0672, 0.0874, 0.0846, 0.0874, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0239, 0.0225, 0.0213, 0.0189, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 03:01:56,784 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1735, 2.8693, 2.3156, 2.2253, 1.9268, 2.4744, 0.9242, 2.1441], + device='cuda:1'), covar=tensor([0.0628, 0.0588, 0.0691, 0.1172, 0.1306, 0.1128, 0.1538, 0.1037], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0357, 0.0360, 0.0385, 0.0464, 0.0393, 0.0338, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 03:02:02,759 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.28 vs. limit=5.0 +2023-04-03 03:02:11,498 INFO [train.py:903] (1/4) Epoch 24, batch 5750, loss[loss=0.2088, simple_loss=0.2961, pruned_loss=0.0607, over 19374.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2863, pruned_loss=0.06235, over 3820801.71 frames. ], batch size: 70, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:02:13,881 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 03:02:22,207 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 03:02:28,829 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 03:02:31,419 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:01,710 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 03:03:03,489 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:13,925 INFO [train.py:903] (1/4) Epoch 24, batch 5800, loss[loss=0.2355, simple_loss=0.3159, pruned_loss=0.07754, over 19654.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06142, over 3826337.82 frames. ], batch size: 60, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:03:15,414 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162845.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:22,913 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 4.857e+02 6.549e+02 8.249e+02 1.553e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-03 03:03:51,947 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:04:16,919 INFO [train.py:903] (1/4) Epoch 24, batch 5850, loss[loss=0.245, simple_loss=0.3203, pruned_loss=0.08486, over 19700.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.0616, over 3825114.87 frames. ], batch size: 59, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:05:20,445 INFO [train.py:903] (1/4) Epoch 24, batch 5900, loss[loss=0.1872, simple_loss=0.2725, pruned_loss=0.0509, over 19402.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2859, pruned_loss=0.06217, over 3819122.96 frames. ], batch size: 66, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:05:26,356 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 03:05:28,676 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 4.550e+02 5.410e+02 6.827e+02 1.573e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 03:05:38,168 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9849, 1.9357, 1.8305, 1.6622, 1.5928, 1.5964, 0.5049, 0.9038], + device='cuda:1'), covar=tensor([0.0703, 0.0710, 0.0454, 0.0693, 0.1241, 0.0879, 0.1321, 0.1145], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0359, 0.0362, 0.0387, 0.0465, 0.0394, 0.0341, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 03:05:43,775 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:05:44,428 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 03:06:15,201 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162988.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:16,452 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:21,816 INFO [train.py:903] (1/4) Epoch 24, batch 5950, loss[loss=0.1918, simple_loss=0.277, pruned_loss=0.05327, over 19594.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.286, pruned_loss=0.06252, over 3825538.18 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,796 INFO [train.py:903] (1/4) Epoch 24, batch 6000, loss[loss=0.2076, simple_loss=0.2806, pruned_loss=0.06731, over 19763.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2862, pruned_loss=0.06232, over 3828468.09 frames. ], batch size: 45, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,797 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 03:07:35,178 INFO [train.py:937] (1/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2679, pruned_loss=0.03436, over 944034.00 frames. +2023-04-03 03:07:35,180 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 03:07:43,473 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.276e+02 6.488e+02 8.005e+02 1.643e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-03 03:07:59,723 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9750, 1.9235, 1.7860, 1.5966, 1.4657, 1.5609, 0.4205, 0.8423], + device='cuda:1'), covar=tensor([0.0639, 0.0618, 0.0440, 0.0741, 0.1268, 0.0899, 0.1342, 0.1155], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0361, 0.0364, 0.0388, 0.0467, 0.0395, 0.0342, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 03:08:18,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-03 03:08:20,490 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5399, 1.5281, 1.8452, 1.7580, 2.5280, 2.3341, 2.7165, 1.0846], + device='cuda:1'), covar=tensor([0.2451, 0.4275, 0.2703, 0.1933, 0.1603, 0.2153, 0.1459, 0.4648], + device='cuda:1'), in_proj_covar=tensor([0.0542, 0.0653, 0.0730, 0.0494, 0.0624, 0.0540, 0.0664, 0.0559], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 03:08:29,503 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-03 03:08:35,923 INFO [train.py:903] (1/4) Epoch 24, batch 6050, loss[loss=0.2206, simple_loss=0.2969, pruned_loss=0.07213, over 19667.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2866, pruned_loss=0.06264, over 3835875.86 frames. ], batch size: 55, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:08:53,234 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:09:37,884 INFO [train.py:903] (1/4) Epoch 24, batch 6100, loss[loss=0.1818, simple_loss=0.2579, pruned_loss=0.0529, over 19372.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2855, pruned_loss=0.06218, over 3840563.83 frames. ], batch size: 47, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:09:45,776 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 5.048e+02 5.892e+02 7.607e+02 1.565e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 03:09:56,032 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1012, 5.1015, 5.8813, 5.9137, 2.1954, 5.5681, 4.7296, 5.5192], + device='cuda:1'), covar=tensor([0.1609, 0.0853, 0.0555, 0.0561, 0.5789, 0.0797, 0.0620, 0.1129], + device='cuda:1'), in_proj_covar=tensor([0.0797, 0.0755, 0.0965, 0.0844, 0.0844, 0.0731, 0.0577, 0.0896], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 03:10:33,906 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163189.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:10:40,167 INFO [train.py:903] (1/4) Epoch 24, batch 6150, loss[loss=0.2341, simple_loss=0.3075, pruned_loss=0.08035, over 18720.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06194, over 3835559.96 frames. ], batch size: 74, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:11:10,602 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 03:11:43,668 INFO [train.py:903] (1/4) Epoch 24, batch 6200, loss[loss=0.1632, simple_loss=0.243, pruned_loss=0.04165, over 16525.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.06154, over 3819367.89 frames. ], batch size: 36, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:11:44,112 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:11:51,414 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.538e+02 5.825e+02 7.342e+02 1.276e+03, threshold=1.165e+03, percent-clipped=3.0 +2023-04-03 03:12:00,554 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163258.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:14,129 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163269.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:43,994 INFO [train.py:903] (1/4) Epoch 24, batch 6250, loss[loss=0.2246, simple_loss=0.3087, pruned_loss=0.07031, over 19339.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06228, over 3812207.68 frames. ], batch size: 66, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:12:49,742 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3551, 3.0458, 2.1606, 2.7615, 0.7978, 3.0744, 2.9603, 3.0137], + device='cuda:1'), covar=tensor([0.1102, 0.1331, 0.2228, 0.1108, 0.3846, 0.0983, 0.1119, 0.1400], + device='cuda:1'), in_proj_covar=tensor([0.0511, 0.0418, 0.0502, 0.0351, 0.0401, 0.0442, 0.0436, 0.0466], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:12:56,470 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:13:12,646 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 03:13:45,519 INFO [train.py:903] (1/4) Epoch 24, batch 6300, loss[loss=0.205, simple_loss=0.2897, pruned_loss=0.0601, over 19428.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.0624, over 3826000.36 frames. ], batch size: 70, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:13:54,810 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.008e+02 6.887e+02 8.761e+02 2.377e+03, threshold=1.377e+03, percent-clipped=3.0 +2023-04-03 03:14:48,468 INFO [train.py:903] (1/4) Epoch 24, batch 6350, loss[loss=0.2387, simple_loss=0.3262, pruned_loss=0.07565, over 19655.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06179, over 3830554.75 frames. ], batch size: 58, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:50,632 INFO [train.py:903] (1/4) Epoch 24, batch 6400, loss[loss=0.1972, simple_loss=0.2823, pruned_loss=0.05606, over 19406.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2859, pruned_loss=0.06208, over 3832409.11 frames. ], batch size: 70, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:59,002 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 4.869e+02 5.854e+02 7.378e+02 1.563e+03, threshold=1.171e+03, percent-clipped=2.0 +2023-04-03 03:16:00,333 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:16:52,294 INFO [train.py:903] (1/4) Epoch 24, batch 6450, loss[loss=0.179, simple_loss=0.2579, pruned_loss=0.05011, over 19743.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2847, pruned_loss=0.06122, over 3838520.28 frames. ], batch size: 51, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:29,958 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9382, 3.6082, 2.6412, 3.2400, 0.9165, 3.5448, 3.4428, 3.5208], + device='cuda:1'), covar=tensor([0.0927, 0.1184, 0.1844, 0.0926, 0.3869, 0.0891, 0.1025, 0.1317], + device='cuda:1'), in_proj_covar=tensor([0.0512, 0.0418, 0.0502, 0.0352, 0.0402, 0.0443, 0.0436, 0.0467], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:17:34,401 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 03:17:53,149 INFO [train.py:903] (1/4) Epoch 24, batch 6500, loss[loss=0.2144, simple_loss=0.2973, pruned_loss=0.06577, over 18076.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.062, over 3819858.52 frames. ], batch size: 84, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:56,719 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 03:18:01,366 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.063e+02 4.704e+02 6.024e+02 8.376e+02 1.457e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 03:18:11,247 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.06 vs. limit=5.0 +2023-04-03 03:18:14,474 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163560.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:22,440 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:44,305 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:55,199 INFO [train.py:903] (1/4) Epoch 24, batch 6550, loss[loss=0.1588, simple_loss=0.2396, pruned_loss=0.03899, over 19754.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06126, over 3817056.72 frames. ], batch size: 47, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:19:05,569 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163602.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:19:54,268 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3840, 2.0448, 1.6310, 1.4025, 1.8353, 1.3379, 1.3675, 1.7562], + device='cuda:1'), covar=tensor([0.0910, 0.0841, 0.1100, 0.0905, 0.0617, 0.1304, 0.0669, 0.0497], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0315, 0.0337, 0.0267, 0.0248, 0.0341, 0.0290, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:19:57,508 INFO [train.py:903] (1/4) Epoch 24, batch 6600, loss[loss=0.2178, simple_loss=0.2954, pruned_loss=0.07013, over 18271.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06216, over 3822309.16 frames. ], batch size: 84, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:20:05,518 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.011e+02 6.018e+02 7.633e+02 1.393e+03, threshold=1.204e+03, percent-clipped=8.0 +2023-04-03 03:20:57,661 INFO [train.py:903] (1/4) Epoch 24, batch 6650, loss[loss=0.1866, simple_loss=0.2644, pruned_loss=0.05437, over 19849.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2853, pruned_loss=0.06277, over 3826561.82 frames. ], batch size: 52, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:20:59,592 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 03:21:25,820 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163717.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:21:33,086 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6999, 4.0629, 4.3439, 4.3543, 1.9146, 4.0953, 3.6033, 4.0742], + device='cuda:1'), covar=tensor([0.1610, 0.1522, 0.0632, 0.0724, 0.5967, 0.1087, 0.0668, 0.1181], + device='cuda:1'), in_proj_covar=tensor([0.0794, 0.0754, 0.0962, 0.0840, 0.0840, 0.0730, 0.0574, 0.0890], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 03:21:58,181 INFO [train.py:903] (1/4) Epoch 24, batch 6700, loss[loss=0.168, simple_loss=0.2497, pruned_loss=0.04317, over 19383.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2856, pruned_loss=0.06295, over 3822244.47 frames. ], batch size: 47, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:22:08,773 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.744e+02 6.010e+02 8.153e+02 1.593e+03, threshold=1.202e+03, percent-clipped=6.0 +2023-04-03 03:22:55,950 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.62 vs. limit=5.0 +2023-04-03 03:22:57,590 INFO [train.py:903] (1/4) Epoch 24, batch 6750, loss[loss=0.2322, simple_loss=0.307, pruned_loss=0.07869, over 19593.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2863, pruned_loss=0.06291, over 3839167.67 frames. ], batch size: 61, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:23:30,592 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:23:53,915 INFO [train.py:903] (1/4) Epoch 24, batch 6800, loss[loss=0.2039, simple_loss=0.2907, pruned_loss=0.05852, over 19790.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.285, pruned_loss=0.0624, over 3832029.16 frames. ], batch size: 56, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:23:58,808 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:24:03,002 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.890e+02 5.869e+02 7.347e+02 2.478e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 03:24:39,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 03:24:40,531 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 03:24:42,791 INFO [train.py:903] (1/4) Epoch 25, batch 0, loss[loss=0.2108, simple_loss=0.2945, pruned_loss=0.06349, over 19600.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2945, pruned_loss=0.06349, over 19600.00 frames. ], batch size: 57, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:24:42,792 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 03:24:54,385 INFO [train.py:937] (1/4) Epoch 25, validation: loss=0.1672, simple_loss=0.2675, pruned_loss=0.03346, over 944034.00 frames. +2023-04-03 03:24:54,386 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 03:25:06,933 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 03:25:40,923 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1398, 1.1851, 1.4606, 1.5090, 2.7271, 1.0666, 2.1959, 3.0766], + device='cuda:1'), covar=tensor([0.0587, 0.2971, 0.2894, 0.1729, 0.0747, 0.2382, 0.1265, 0.0351], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0368, 0.0391, 0.0347, 0.0374, 0.0352, 0.0386, 0.0405], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:25:57,056 INFO [train.py:903] (1/4) Epoch 25, batch 50, loss[loss=0.1916, simple_loss=0.2821, pruned_loss=0.0506, over 18044.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2862, pruned_loss=0.06238, over 856953.83 frames. ], batch size: 83, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:26:35,504 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.575e+02 5.589e+02 7.102e+02 2.434e+03, threshold=1.118e+03, percent-clipped=5.0 +2023-04-03 03:26:36,713 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 03:27:00,584 INFO [train.py:903] (1/4) Epoch 25, batch 100, loss[loss=0.1667, simple_loss=0.2491, pruned_loss=0.04218, over 19496.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2856, pruned_loss=0.06173, over 1514314.29 frames. ], batch size: 49, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:27:03,107 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163973.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:15,429 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 03:27:34,449 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:28:05,191 INFO [train.py:903] (1/4) Epoch 25, batch 150, loss[loss=0.2025, simple_loss=0.288, pruned_loss=0.05844, over 18415.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06145, over 2037063.57 frames. ], batch size: 83, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:28:42,973 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 5.520e+02 6.265e+02 7.455e+02 1.438e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-03 03:29:06,831 INFO [train.py:903] (1/4) Epoch 25, batch 200, loss[loss=0.1963, simple_loss=0.273, pruned_loss=0.05984, over 19774.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.0612, over 2450319.93 frames. ], batch size: 47, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:29:09,371 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 03:29:53,464 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:30:10,582 INFO [train.py:903] (1/4) Epoch 25, batch 250, loss[loss=0.2005, simple_loss=0.2811, pruned_loss=0.05995, over 19764.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2844, pruned_loss=0.06195, over 2760793.04 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:30:41,653 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 03:30:48,735 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 4.875e+02 6.095e+02 7.386e+02 2.001e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 03:31:07,753 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 03:31:13,863 INFO [train.py:903] (1/4) Epoch 25, batch 300, loss[loss=0.2212, simple_loss=0.3006, pruned_loss=0.07095, over 19530.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2833, pruned_loss=0.06143, over 3008289.12 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:31:16,571 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:32:18,086 INFO [train.py:903] (1/4) Epoch 25, batch 350, loss[loss=0.2151, simple_loss=0.2942, pruned_loss=0.068, over 18153.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2835, pruned_loss=0.06131, over 3190888.59 frames. ], batch size: 83, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:32:25,194 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 03:32:55,003 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.912e+02 5.440e+02 6.552e+02 9.332e+02 1.789e+03, threshold=1.310e+03, percent-clipped=12.0 +2023-04-03 03:33:20,727 INFO [train.py:903] (1/4) Epoch 25, batch 400, loss[loss=0.2746, simple_loss=0.335, pruned_loss=0.1071, over 13429.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2845, pruned_loss=0.06224, over 3322735.40 frames. ], batch size: 136, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:33:53,654 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7524, 1.5236, 1.4490, 1.7712, 1.5338, 1.5301, 1.4955, 1.6625], + device='cuda:1'), covar=tensor([0.1085, 0.1354, 0.1510, 0.1009, 0.1180, 0.0590, 0.1420, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0359, 0.0314, 0.0256, 0.0306, 0.0254, 0.0316, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:34:24,825 INFO [train.py:903] (1/4) Epoch 25, batch 450, loss[loss=0.2091, simple_loss=0.2933, pruned_loss=0.06245, over 18976.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2855, pruned_loss=0.06258, over 3434575.85 frames. ], batch size: 69, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:34:48,025 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3552, 3.8260, 3.9354, 3.9583, 1.5778, 3.7671, 3.2630, 3.6889], + device='cuda:1'), covar=tensor([0.1709, 0.1096, 0.0708, 0.0851, 0.5944, 0.1129, 0.0798, 0.1245], + device='cuda:1'), in_proj_covar=tensor([0.0807, 0.0767, 0.0971, 0.0852, 0.0848, 0.0736, 0.0580, 0.0903], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 03:34:59,265 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 03:35:00,457 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 03:35:02,815 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 4.778e+02 5.577e+02 7.081e+02 1.680e+03, threshold=1.115e+03, percent-clipped=3.0 +2023-04-03 03:35:27,619 INFO [train.py:903] (1/4) Epoch 25, batch 500, loss[loss=0.222, simple_loss=0.3084, pruned_loss=0.06784, over 19531.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2844, pruned_loss=0.06196, over 3534727.21 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:35:35,601 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:36:31,239 INFO [train.py:903] (1/4) Epoch 25, batch 550, loss[loss=0.1814, simple_loss=0.2617, pruned_loss=0.0505, over 19366.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2833, pruned_loss=0.06117, over 3590274.96 frames. ], batch size: 47, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:37:09,260 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 5.163e+02 6.218e+02 7.641e+02 1.675e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 03:37:09,455 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:37:14,119 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.4735, 5.0610, 3.0804, 4.3345, 1.4403, 4.9834, 4.8491, 5.0228], + device='cuda:1'), covar=tensor([0.0396, 0.0767, 0.1846, 0.0781, 0.3510, 0.0517, 0.0771, 0.0970], + device='cuda:1'), in_proj_covar=tensor([0.0521, 0.0424, 0.0509, 0.0356, 0.0408, 0.0447, 0.0442, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:37:34,085 INFO [train.py:903] (1/4) Epoch 25, batch 600, loss[loss=0.2571, simple_loss=0.3171, pruned_loss=0.09853, over 13042.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2827, pruned_loss=0.0607, over 3647315.28 frames. ], batch size: 136, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:38:16,671 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 03:38:30,626 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:38:36,205 INFO [train.py:903] (1/4) Epoch 25, batch 650, loss[loss=0.2183, simple_loss=0.2999, pruned_loss=0.06835, over 19532.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2843, pruned_loss=0.06152, over 3690651.37 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:15,259 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.538e+02 5.913e+02 7.820e+02 1.600e+03, threshold=1.183e+03, percent-clipped=2.0 +2023-04-03 03:39:33,669 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:39:39,463 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6271, 1.5299, 1.4411, 2.1647, 1.5690, 1.9345, 1.9560, 1.6576], + device='cuda:1'), covar=tensor([0.0855, 0.0946, 0.1068, 0.0758, 0.0920, 0.0757, 0.0870, 0.0723], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0221, 0.0226, 0.0237, 0.0225, 0.0211, 0.0188, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 03:39:40,284 INFO [train.py:903] (1/4) Epoch 25, batch 700, loss[loss=0.2153, simple_loss=0.3008, pruned_loss=0.06495, over 19550.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06155, over 3717002.44 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:58,628 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:11,637 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:44,580 INFO [train.py:903] (1/4) Epoch 25, batch 750, loss[loss=0.1753, simple_loss=0.2504, pruned_loss=0.05008, over 19311.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.06156, over 3723845.37 frames. ], batch size: 44, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:40:57,962 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:41:21,486 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.157e+02 6.753e+02 8.219e+02 1.587e+03, threshold=1.351e+03, percent-clipped=10.0 +2023-04-03 03:41:32,780 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 03:41:48,141 INFO [train.py:903] (1/4) Epoch 25, batch 800, loss[loss=0.2292, simple_loss=0.3172, pruned_loss=0.0706, over 18286.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2847, pruned_loss=0.06165, over 3746450.32 frames. ], batch size: 83, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:41:52,837 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164676.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:42:03,219 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 03:42:50,192 INFO [train.py:903] (1/4) Epoch 25, batch 850, loss[loss=0.209, simple_loss=0.2906, pruned_loss=0.06371, over 19663.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.285, pruned_loss=0.06208, over 3759347.00 frames. ], batch size: 55, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:42:50,364 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:29,202 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 4.928e+02 6.003e+02 7.929e+02 1.446e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 03:43:43,450 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164764.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:44,361 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 03:43:52,629 INFO [train.py:903] (1/4) Epoch 25, batch 900, loss[loss=0.2271, simple_loss=0.3055, pruned_loss=0.07432, over 19502.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2852, pruned_loss=0.06185, over 3777820.34 frames. ], batch size: 64, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:29,970 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-03 03:44:38,680 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9992, 2.0907, 2.3393, 2.6062, 1.9247, 2.4312, 2.3079, 2.1536], + device='cuda:1'), covar=tensor([0.4681, 0.4162, 0.2072, 0.2860, 0.4618, 0.2465, 0.5338, 0.3545], + device='cuda:1'), in_proj_covar=tensor([0.0915, 0.0986, 0.0726, 0.0936, 0.0894, 0.0826, 0.0852, 0.0791], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 03:44:56,069 INFO [train.py:903] (1/4) Epoch 25, batch 950, loss[loss=0.1939, simple_loss=0.2726, pruned_loss=0.05759, over 19803.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2836, pruned_loss=0.06122, over 3789883.77 frames. ], batch size: 48, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:57,278 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 03:44:57,673 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:15,726 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:28,682 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:32,820 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.681e+02 5.489e+02 6.922e+02 1.500e+03, threshold=1.098e+03, percent-clipped=4.0 +2023-04-03 03:45:43,208 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-03 03:46:00,724 INFO [train.py:903] (1/4) Epoch 25, batch 1000, loss[loss=0.2154, simple_loss=0.2943, pruned_loss=0.06826, over 19509.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2844, pruned_loss=0.06193, over 3787091.32 frames. ], batch size: 64, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:46:19,855 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:53,177 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:55,222 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 03:47:03,441 INFO [train.py:903] (1/4) Epoch 25, batch 1050, loss[loss=0.193, simple_loss=0.2811, pruned_loss=0.0524, over 18699.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2847, pruned_loss=0.0618, over 3794945.62 frames. ], batch size: 74, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:47:13,144 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:27,116 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:36,144 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 03:47:41,879 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.678e+02 5.688e+02 6.996e+02 1.189e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 03:47:45,578 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7427, 1.4404, 1.5854, 1.5252, 3.3337, 1.1731, 2.4654, 3.8329], + device='cuda:1'), covar=tensor([0.0468, 0.2657, 0.2808, 0.1884, 0.0724, 0.2535, 0.1285, 0.0203], + device='cuda:1'), in_proj_covar=tensor([0.0417, 0.0369, 0.0394, 0.0349, 0.0377, 0.0353, 0.0388, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:48:06,294 INFO [train.py:903] (1/4) Epoch 25, batch 1100, loss[loss=0.2042, simple_loss=0.2829, pruned_loss=0.06276, over 19835.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2861, pruned_loss=0.06272, over 3781646.92 frames. ], batch size: 52, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:07,182 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165020.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:09,315 INFO [train.py:903] (1/4) Epoch 25, batch 1150, loss[loss=0.2826, simple_loss=0.3476, pruned_loss=0.1088, over 19518.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2857, pruned_loss=0.06293, over 3797702.49 frames. ], batch size: 64, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:40,378 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:48,277 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.101e+02 6.369e+02 8.453e+02 1.568e+03, threshold=1.274e+03, percent-clipped=10.0 +2023-04-03 03:49:52,163 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:14,722 INFO [train.py:903] (1/4) Epoch 25, batch 1200, loss[loss=0.16, simple_loss=0.2376, pruned_loss=0.04118, over 17664.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2853, pruned_loss=0.0625, over 3800374.03 frames. ], batch size: 39, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:50:41,040 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:46,445 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 03:51:01,321 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:14,662 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:18,442 INFO [train.py:903] (1/4) Epoch 25, batch 1250, loss[loss=0.2614, simple_loss=0.3388, pruned_loss=0.09197, over 19086.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.06325, over 3805597.75 frames. ], batch size: 69, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:51:34,074 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165135.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:41,200 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:51:43,121 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8237, 4.2758, 4.4912, 4.5086, 1.6807, 4.2441, 3.7132, 4.1974], + device='cuda:1'), covar=tensor([0.1578, 0.1006, 0.0641, 0.0688, 0.6226, 0.1038, 0.0709, 0.1219], + device='cuda:1'), in_proj_covar=tensor([0.0802, 0.0766, 0.0974, 0.0850, 0.0856, 0.0736, 0.0579, 0.0903], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 03:51:43,140 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:49,266 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1860, 1.2800, 1.6645, 1.3296, 2.7352, 3.8165, 3.5054, 3.9975], + device='cuda:1'), covar=tensor([0.1696, 0.3907, 0.3544, 0.2495, 0.0628, 0.0178, 0.0205, 0.0244], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0329, 0.0359, 0.0267, 0.0248, 0.0191, 0.0218, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 03:51:57,598 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 4.844e+02 6.322e+02 8.270e+02 1.695e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-03 03:52:21,098 INFO [train.py:903] (1/4) Epoch 25, batch 1300, loss[loss=0.1661, simple_loss=0.2492, pruned_loss=0.04153, over 19358.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2864, pruned_loss=0.06333, over 3797822.70 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:23,839 INFO [train.py:903] (1/4) Epoch 25, batch 1350, loss[loss=0.2673, simple_loss=0.3402, pruned_loss=0.09725, over 18657.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2867, pruned_loss=0.06342, over 3801339.34 frames. ], batch size: 74, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:27,617 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:54:04,597 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.919e+02 6.162e+02 7.502e+02 1.875e+03, threshold=1.232e+03, percent-clipped=3.0 +2023-04-03 03:54:29,446 INFO [train.py:903] (1/4) Epoch 25, batch 1400, loss[loss=0.2049, simple_loss=0.2924, pruned_loss=0.05866, over 19609.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2855, pruned_loss=0.06257, over 3812820.48 frames. ], batch size: 61, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:54:50,993 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5796, 1.1882, 1.4241, 1.3493, 2.2407, 1.2094, 2.0767, 2.5168], + device='cuda:1'), covar=tensor([0.0736, 0.2820, 0.2862, 0.1576, 0.0875, 0.1919, 0.1119, 0.0479], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0367, 0.0391, 0.0348, 0.0373, 0.0350, 0.0386, 0.0407], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:54:53,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.7470, 2.6342, 2.1449, 2.1179, 1.9091, 2.3661, 1.2535, 1.9809], + device='cuda:1'), covar=tensor([0.0706, 0.0635, 0.0656, 0.1035, 0.1088, 0.0997, 0.1292, 0.1047], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0359, 0.0363, 0.0388, 0.0465, 0.0394, 0.0341, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 03:55:05,127 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:18,660 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:19,098 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-03 03:55:32,124 INFO [train.py:903] (1/4) Epoch 25, batch 1450, loss[loss=0.23, simple_loss=0.3003, pruned_loss=0.07986, over 19678.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2856, pruned_loss=0.06283, over 3806560.31 frames. ], batch size: 53, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:55:33,159 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 03:55:36,968 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:48,905 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:56:10,661 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.584e+02 6.100e+02 8.063e+02 1.924e+03, threshold=1.220e+03, percent-clipped=4.0 +2023-04-03 03:56:33,838 INFO [train.py:903] (1/4) Epoch 25, batch 1500, loss[loss=0.1984, simple_loss=0.274, pruned_loss=0.06147, over 19763.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2865, pruned_loss=0.06309, over 3814492.85 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:56:58,446 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165391.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:30,045 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:36,921 INFO [train.py:903] (1/4) Epoch 25, batch 1550, loss[loss=0.1675, simple_loss=0.2426, pruned_loss=0.04622, over 19726.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2865, pruned_loss=0.06311, over 3811384.47 frames. ], batch size: 46, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:58:06,783 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4665, 2.2345, 1.7086, 1.5689, 2.0309, 1.4058, 1.3761, 1.8963], + device='cuda:1'), covar=tensor([0.1125, 0.0803, 0.1141, 0.0835, 0.0604, 0.1296, 0.0747, 0.0539], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0268, 0.0249, 0.0343, 0.0293, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 03:58:17,715 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.229e+02 4.957e+02 6.075e+02 6.924e+02 1.069e+03, threshold=1.215e+03, percent-clipped=0.0 +2023-04-03 03:58:37,746 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 03:58:42,875 INFO [train.py:903] (1/4) Epoch 25, batch 1600, loss[loss=0.2085, simple_loss=0.2851, pruned_loss=0.06594, over 19587.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2859, pruned_loss=0.06281, over 3807481.90 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:58:51,603 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:58:59,594 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:06,532 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 03:59:21,442 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:44,248 INFO [train.py:903] (1/4) Epoch 25, batch 1650, loss[loss=0.1751, simple_loss=0.2551, pruned_loss=0.04759, over 19741.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2861, pruned_loss=0.06277, over 3820830.61 frames. ], batch size: 46, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:00:23,741 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.584e+02 6.289e+02 7.621e+02 1.672e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-03 04:00:44,046 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:00:47,357 INFO [train.py:903] (1/4) Epoch 25, batch 1700, loss[loss=0.2014, simple_loss=0.2876, pruned_loss=0.0576, over 19676.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2855, pruned_loss=0.06211, over 3831196.10 frames. ], batch size: 60, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:22,709 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:01:29,475 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 04:01:49,445 INFO [train.py:903] (1/4) Epoch 25, batch 1750, loss[loss=0.1725, simple_loss=0.2636, pruned_loss=0.04069, over 19827.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2854, pruned_loss=0.06191, over 3832345.47 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:53,780 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.65 vs. limit=5.0 +2023-04-03 04:02:29,216 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.689e+02 5.610e+02 7.383e+02 2.270e+03, threshold=1.122e+03, percent-clipped=4.0 +2023-04-03 04:02:53,370 INFO [train.py:903] (1/4) Epoch 25, batch 1800, loss[loss=0.2074, simple_loss=0.2855, pruned_loss=0.06465, over 19479.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2865, pruned_loss=0.06222, over 3831053.06 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:03:51,789 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 04:03:56,460 INFO [train.py:903] (1/4) Epoch 25, batch 1850, loss[loss=0.2347, simple_loss=0.3055, pruned_loss=0.082, over 12846.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2874, pruned_loss=0.06334, over 3784697.13 frames. ], batch size: 135, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:04:28,947 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 04:04:37,056 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 4.826e+02 5.776e+02 6.973e+02 2.376e+03, threshold=1.155e+03, percent-clipped=4.0 +2023-04-03 04:04:56,243 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:05:00,658 INFO [train.py:903] (1/4) Epoch 25, batch 1900, loss[loss=0.1774, simple_loss=0.2535, pruned_loss=0.05068, over 18671.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2877, pruned_loss=0.06352, over 3776914.99 frames. ], batch size: 41, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:05:16,877 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 04:05:21,692 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 04:05:24,248 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6087, 4.1131, 4.2871, 4.2890, 1.6259, 4.1040, 3.5291, 4.0283], + device='cuda:1'), covar=tensor([0.1709, 0.0878, 0.0648, 0.0709, 0.6051, 0.0856, 0.0675, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0809, 0.0770, 0.0979, 0.0852, 0.0855, 0.0740, 0.0581, 0.0907], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 04:05:48,079 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 04:06:02,690 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165821.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:06:03,528 INFO [train.py:903] (1/4) Epoch 25, batch 1950, loss[loss=0.1778, simple_loss=0.262, pruned_loss=0.04683, over 19732.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2862, pruned_loss=0.06276, over 3789802.72 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:06:20,330 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3132, 1.3059, 1.4428, 1.4367, 1.8639, 1.7479, 1.8546, 1.1111], + device='cuda:1'), covar=tensor([0.1823, 0.3242, 0.2068, 0.1461, 0.1224, 0.1748, 0.1167, 0.3873], + device='cuda:1'), in_proj_covar=tensor([0.0542, 0.0655, 0.0730, 0.0493, 0.0625, 0.0538, 0.0661, 0.0560], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:06:44,201 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.671e+02 6.009e+02 7.545e+02 1.239e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 04:06:46,988 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7838, 1.3585, 1.5660, 1.5249, 3.3254, 1.0628, 2.5504, 3.7973], + device='cuda:1'), covar=tensor([0.0479, 0.2923, 0.2989, 0.2022, 0.0758, 0.2825, 0.1279, 0.0235], + device='cuda:1'), in_proj_covar=tensor([0.0415, 0.0370, 0.0393, 0.0350, 0.0375, 0.0354, 0.0388, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:06:48,310 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165856.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:07:08,063 INFO [train.py:903] (1/4) Epoch 25, batch 2000, loss[loss=0.2001, simple_loss=0.2754, pruned_loss=0.06238, over 19741.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2863, pruned_loss=0.06267, over 3793481.20 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:07:20,319 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:00,826 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:07,746 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 04:08:12,467 INFO [train.py:903] (1/4) Epoch 25, batch 2050, loss[loss=0.1879, simple_loss=0.277, pruned_loss=0.04937, over 19533.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2869, pruned_loss=0.06249, over 3784240.92 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:08:27,939 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 04:08:27,984 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 04:08:38,775 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0111, 3.6937, 2.5233, 3.2801, 0.8466, 3.6403, 3.4899, 3.5525], + device='cuda:1'), covar=tensor([0.0805, 0.1059, 0.1921, 0.0885, 0.3866, 0.0742, 0.0964, 0.1146], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0421, 0.0503, 0.0351, 0.0402, 0.0444, 0.0437, 0.0468], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:08:48,973 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 04:08:51,219 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 5.221e+02 6.289e+02 7.653e+02 1.251e+03, threshold=1.258e+03, percent-clipped=2.0 +2023-04-03 04:09:15,839 INFO [train.py:903] (1/4) Epoch 25, batch 2100, loss[loss=0.1776, simple_loss=0.2566, pruned_loss=0.04931, over 19772.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2871, pruned_loss=0.06274, over 3772656.10 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:09:44,601 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 04:10:08,505 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 04:10:19,021 INFO [train.py:903] (1/4) Epoch 25, batch 2150, loss[loss=0.1922, simple_loss=0.2733, pruned_loss=0.05559, over 19472.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2883, pruned_loss=0.06365, over 3771109.40 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:10:26,463 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0271, 2.1328, 2.4010, 2.6183, 2.0593, 2.5115, 2.3940, 2.2183], + device='cuda:1'), covar=tensor([0.4378, 0.4001, 0.1986, 0.2445, 0.4261, 0.2303, 0.5047, 0.3356], + device='cuda:1'), in_proj_covar=tensor([0.0917, 0.0992, 0.0729, 0.0937, 0.0895, 0.0829, 0.0855, 0.0793], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 04:10:27,624 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:10:48,718 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2049, 2.1192, 1.9585, 1.7855, 1.5133, 1.7637, 0.6931, 1.1905], + device='cuda:1'), covar=tensor([0.0660, 0.0690, 0.0527, 0.0906, 0.1380, 0.1102, 0.1485, 0.1191], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0359, 0.0363, 0.0387, 0.0465, 0.0392, 0.0341, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:10:58,799 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 5.055e+02 6.614e+02 9.413e+02 1.694e+03, threshold=1.323e+03, percent-clipped=9.0 +2023-04-03 04:11:21,715 INFO [train.py:903] (1/4) Epoch 25, batch 2200, loss[loss=0.2323, simple_loss=0.314, pruned_loss=0.07533, over 17473.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2879, pruned_loss=0.06343, over 3781894.57 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:11:22,012 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166072.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:11:38,542 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1031, 1.9575, 1.7558, 2.1056, 1.8988, 1.8165, 1.6478, 1.9853], + device='cuda:1'), covar=tensor([0.0962, 0.1392, 0.1380, 0.0989, 0.1294, 0.0539, 0.1480, 0.0714], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0355, 0.0311, 0.0254, 0.0301, 0.0252, 0.0313, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:12:02,236 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3536, 2.0565, 1.6388, 1.4212, 1.8937, 1.3330, 1.3035, 1.8784], + device='cuda:1'), covar=tensor([0.0932, 0.0823, 0.1113, 0.0897, 0.0575, 0.1286, 0.0683, 0.0400], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0267, 0.0249, 0.0341, 0.0293, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:12:13,548 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166112.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:12:26,088 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9377, 1.3122, 1.0737, 0.8266, 1.1569, 0.9184, 0.9640, 1.2856], + device='cuda:1'), covar=tensor([0.0590, 0.0863, 0.1140, 0.0958, 0.0553, 0.1334, 0.0587, 0.0427], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0317, 0.0339, 0.0266, 0.0248, 0.0340, 0.0292, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:12:26,896 INFO [train.py:903] (1/4) Epoch 25, batch 2250, loss[loss=0.2209, simple_loss=0.3061, pruned_loss=0.06784, over 19694.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2873, pruned_loss=0.06253, over 3787147.11 frames. ], batch size: 59, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:12:34,205 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9518, 1.9324, 1.8217, 1.5881, 1.5294, 1.6370, 0.3997, 0.8593], + device='cuda:1'), covar=tensor([0.0659, 0.0662, 0.0422, 0.0765, 0.1265, 0.0855, 0.1447, 0.1201], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0360, 0.0364, 0.0389, 0.0466, 0.0393, 0.0343, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:13:04,899 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.777e+02 5.716e+02 7.657e+02 1.924e+03, threshold=1.143e+03, percent-clipped=2.0 +2023-04-03 04:13:17,345 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6556, 2.2727, 1.7025, 1.6843, 2.1079, 1.4948, 1.5027, 2.0122], + device='cuda:1'), covar=tensor([0.1018, 0.0702, 0.1013, 0.0741, 0.0560, 0.1147, 0.0682, 0.0477], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0317, 0.0340, 0.0267, 0.0248, 0.0341, 0.0292, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:13:21,655 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:21,807 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:31,169 INFO [train.py:903] (1/4) Epoch 25, batch 2300, loss[loss=0.1996, simple_loss=0.2818, pruned_loss=0.05875, over 19691.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.287, pruned_loss=0.06274, over 3781868.51 frames. ], batch size: 53, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:13:42,668 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 04:14:00,660 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5403, 1.3086, 1.3752, 2.3069, 1.6555, 1.8053, 1.8572, 1.5625], + device='cuda:1'), covar=tensor([0.0995, 0.1223, 0.1204, 0.0768, 0.0980, 0.0941, 0.1024, 0.0870], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0239, 0.0226, 0.0213, 0.0189, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 04:14:34,782 INFO [train.py:903] (1/4) Epoch 25, batch 2350, loss[loss=0.2069, simple_loss=0.2941, pruned_loss=0.05981, over 19435.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2866, pruned_loss=0.06272, over 3777896.40 frames. ], batch size: 70, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:14:42,124 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166227.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:14,902 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 5.104e+02 6.317e+02 8.219e+02 1.547e+03, threshold=1.263e+03, percent-clipped=3.0 +2023-04-03 04:15:17,167 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 04:15:19,888 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1631, 1.2811, 1.4918, 1.4315, 2.8166, 1.1711, 2.2949, 3.1995], + device='cuda:1'), covar=tensor([0.0514, 0.2680, 0.2828, 0.1784, 0.0706, 0.2310, 0.1107, 0.0272], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0370, 0.0391, 0.0348, 0.0375, 0.0353, 0.0389, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:15:34,636 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 04:15:38,211 INFO [train.py:903] (1/4) Epoch 25, batch 2400, loss[loss=0.1974, simple_loss=0.2818, pruned_loss=0.05647, over 19679.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.06237, over 3761520.05 frames. ], batch size: 53, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:15:48,903 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166280.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:55,316 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166284.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:59,843 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5878, 1.6761, 1.9194, 1.8450, 2.7214, 2.4055, 2.9100, 1.2381], + device='cuda:1'), covar=tensor([0.2449, 0.4202, 0.2716, 0.1823, 0.1506, 0.2023, 0.1351, 0.4539], + device='cuda:1'), in_proj_covar=tensor([0.0543, 0.0656, 0.0731, 0.0494, 0.0626, 0.0538, 0.0664, 0.0561], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:16:25,801 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:41,096 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6742, 1.7928, 2.0436, 1.9976, 1.5505, 1.9730, 2.0390, 1.9115], + device='cuda:1'), covar=tensor([0.4154, 0.3750, 0.1986, 0.2231, 0.3842, 0.2152, 0.4965, 0.3419], + device='cuda:1'), in_proj_covar=tensor([0.0917, 0.0990, 0.0729, 0.0938, 0.0895, 0.0830, 0.0852, 0.0793], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 04:16:41,763 INFO [train.py:903] (1/4) Epoch 25, batch 2450, loss[loss=0.2066, simple_loss=0.2817, pruned_loss=0.06574, over 19372.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2855, pruned_loss=0.06221, over 3775772.08 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:16:46,714 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:17:20,232 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.023e+02 4.903e+02 5.916e+02 7.490e+02 1.353e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-04-03 04:17:44,922 INFO [train.py:903] (1/4) Epoch 25, batch 2500, loss[loss=0.1923, simple_loss=0.2683, pruned_loss=0.05815, over 19626.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06214, over 3782815.99 frames. ], batch size: 50, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:17:51,989 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2887, 2.3431, 2.3237, 3.0496, 2.5917, 2.9641, 2.5105, 2.1283], + device='cuda:1'), covar=tensor([0.4021, 0.3731, 0.2264, 0.2538, 0.3730, 0.2075, 0.4641, 0.3817], + device='cuda:1'), in_proj_covar=tensor([0.0916, 0.0991, 0.0729, 0.0938, 0.0894, 0.0831, 0.0851, 0.0793], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 04:18:41,259 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166416.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:18:48,039 INFO [train.py:903] (1/4) Epoch 25, batch 2550, loss[loss=0.1588, simple_loss=0.2379, pruned_loss=0.03985, over 19735.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.286, pruned_loss=0.0623, over 3784321.62 frames. ], batch size: 46, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:19:21,986 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 04:19:28,546 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 5.112e+02 6.516e+02 8.109e+02 2.174e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-03 04:19:40,779 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0838, 2.0024, 1.9156, 1.7249, 1.5862, 1.7099, 0.5008, 1.0141], + device='cuda:1'), covar=tensor([0.0626, 0.0652, 0.0462, 0.0820, 0.1265, 0.0936, 0.1456, 0.1148], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0360, 0.0363, 0.0387, 0.0467, 0.0393, 0.0340, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:19:46,425 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 04:19:53,262 INFO [train.py:903] (1/4) Epoch 25, batch 2600, loss[loss=0.1925, simple_loss=0.2809, pruned_loss=0.05207, over 19278.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2843, pruned_loss=0.0615, over 3804909.27 frames. ], batch size: 66, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:20:08,567 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166483.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:29,567 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:34,776 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 04:20:40,195 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166508.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:41,207 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166509.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:58,302 INFO [train.py:903] (1/4) Epoch 25, batch 2650, loss[loss=0.2294, simple_loss=0.3047, pruned_loss=0.07705, over 19683.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2842, pruned_loss=0.06171, over 3804727.46 frames. ], batch size: 60, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:21:05,507 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:10,252 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166531.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:21:12,606 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9284, 1.1764, 1.5188, 0.6310, 1.9498, 2.4590, 2.1758, 2.6171], + device='cuda:1'), covar=tensor([0.1584, 0.3897, 0.3419, 0.2761, 0.0648, 0.0277, 0.0335, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0328, 0.0359, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 04:21:17,648 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166536.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:19,718 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 04:21:32,592 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:38,177 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.612e+02 5.770e+02 6.756e+02 1.610e+03, threshold=1.154e+03, percent-clipped=1.0 +2023-04-03 04:21:48,102 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:22:03,057 INFO [train.py:903] (1/4) Epoch 25, batch 2700, loss[loss=0.1998, simple_loss=0.2886, pruned_loss=0.05556, over 19671.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.06128, over 3819366.62 frames. ], batch size: 55, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:22:21,012 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-03 04:23:06,175 INFO [train.py:903] (1/4) Epoch 25, batch 2750, loss[loss=0.1905, simple_loss=0.2752, pruned_loss=0.05288, over 19620.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06175, over 3825230.94 frames. ], batch size: 50, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:23:08,990 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:23:45,202 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.014e+02 4.918e+02 6.109e+02 7.546e+02 1.552e+03, threshold=1.222e+03, percent-clipped=5.0 +2023-04-03 04:24:04,562 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:24:07,645 INFO [train.py:903] (1/4) Epoch 25, batch 2800, loss[loss=0.263, simple_loss=0.3299, pruned_loss=0.09807, over 19776.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2857, pruned_loss=0.06219, over 3842478.59 frames. ], batch size: 56, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:24:43,338 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0425, 4.4508, 4.7636, 4.7716, 1.8329, 4.4614, 3.8813, 4.4659], + device='cuda:1'), covar=tensor([0.1602, 0.0851, 0.0614, 0.0655, 0.5884, 0.0972, 0.0606, 0.1084], + device='cuda:1'), in_proj_covar=tensor([0.0803, 0.0766, 0.0975, 0.0853, 0.0851, 0.0740, 0.0580, 0.0905], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 04:25:11,686 INFO [train.py:903] (1/4) Epoch 25, batch 2850, loss[loss=0.2128, simple_loss=0.2954, pruned_loss=0.06511, over 17752.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2867, pruned_loss=0.06236, over 3830561.08 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:25:50,389 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.563e+02 5.950e+02 8.060e+02 1.987e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 04:25:50,631 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:11,840 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 04:26:14,225 INFO [train.py:903] (1/4) Epoch 25, batch 2900, loss[loss=0.1854, simple_loss=0.2717, pruned_loss=0.04953, over 19771.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2862, pruned_loss=0.06242, over 3831193.79 frames. ], batch size: 54, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:26:27,310 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166782.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:29,647 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:33,231 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166787.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:27:05,071 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166812.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:27:07,303 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3329, 2.3500, 2.5197, 2.9860, 2.3751, 2.8387, 2.5294, 2.4265], + device='cuda:1'), covar=tensor([0.4154, 0.4091, 0.1956, 0.2670, 0.4272, 0.2327, 0.4782, 0.3186], + device='cuda:1'), in_proj_covar=tensor([0.0918, 0.0990, 0.0729, 0.0938, 0.0894, 0.0829, 0.0853, 0.0793], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 04:27:17,227 INFO [train.py:903] (1/4) Epoch 25, batch 2950, loss[loss=0.2127, simple_loss=0.2917, pruned_loss=0.06685, over 19694.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06218, over 3828384.51 frames. ], batch size: 60, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:27:44,180 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:27:56,486 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.674e+02 5.957e+02 7.713e+02 2.101e+03, threshold=1.191e+03, percent-clipped=6.0 +2023-04-03 04:28:20,015 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:28:21,106 INFO [train.py:903] (1/4) Epoch 25, batch 3000, loss[loss=0.2498, simple_loss=0.3155, pruned_loss=0.09211, over 13203.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2851, pruned_loss=0.06175, over 3829089.80 frames. ], batch size: 136, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:28:21,107 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 04:28:33,779 INFO [train.py:937] (1/4) Epoch 25, validation: loss=0.1677, simple_loss=0.2674, pruned_loss=0.034, over 944034.00 frames. +2023-04-03 04:28:33,780 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 04:28:35,137 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 04:28:44,952 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:01,373 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166893.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:17,265 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166905.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:38,485 INFO [train.py:903] (1/4) Epoch 25, batch 3050, loss[loss=0.2283, simple_loss=0.3071, pruned_loss=0.0747, over 17435.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06169, over 3818022.45 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:09,922 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:17,432 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 4.942e+02 6.233e+02 8.295e+02 1.859e+03, threshold=1.247e+03, percent-clipped=9.0 +2023-04-03 04:30:23,331 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:37,564 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0965, 1.9534, 1.7287, 2.0864, 1.8203, 1.7459, 1.6486, 1.9622], + device='cuda:1'), covar=tensor([0.1024, 0.1469, 0.1461, 0.1049, 0.1446, 0.0584, 0.1527, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0355, 0.0314, 0.0254, 0.0304, 0.0254, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:30:41,892 INFO [train.py:903] (1/4) Epoch 25, batch 3100, loss[loss=0.2573, simple_loss=0.3192, pruned_loss=0.09774, over 13496.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2846, pruned_loss=0.0617, over 3817779.87 frames. ], batch size: 135, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:58,982 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:27,808 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:44,381 INFO [train.py:903] (1/4) Epoch 25, batch 3150, loss[loss=0.1855, simple_loss=0.2638, pruned_loss=0.05363, over 19795.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.285, pruned_loss=0.06165, over 3819425.85 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:32:08,006 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167040.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:08,791 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 04:32:24,691 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.039e+02 6.011e+02 8.459e+02 2.094e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-03 04:32:33,686 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 04:32:39,339 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167065.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:48,328 INFO [train.py:903] (1/4) Epoch 25, batch 3200, loss[loss=0.2148, simple_loss=0.291, pruned_loss=0.06927, over 19667.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.06188, over 3828713.76 frames. ], batch size: 53, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:33:20,650 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:23,568 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-03 04:33:47,231 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:50,767 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7905, 4.4190, 2.8377, 3.7917, 1.0414, 4.3674, 4.1837, 4.2808], + device='cuda:1'), covar=tensor([0.0566, 0.0852, 0.1732, 0.0817, 0.3784, 0.0629, 0.0853, 0.1097], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0419, 0.0504, 0.0354, 0.0403, 0.0446, 0.0438, 0.0470], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:33:52,591 INFO [train.py:903] (1/4) Epoch 25, batch 3250, loss[loss=0.2145, simple_loss=0.2996, pruned_loss=0.06474, over 19615.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2855, pruned_loss=0.06133, over 3838523.81 frames. ], batch size: 61, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:33:52,875 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:57,478 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167126.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:59,209 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-03 04:34:23,561 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5835, 1.8150, 2.2439, 1.9490, 3.2117, 2.6290, 3.5200, 1.7984], + device='cuda:1'), covar=tensor([0.2583, 0.4357, 0.2684, 0.1882, 0.1548, 0.2192, 0.1563, 0.4315], + device='cuda:1'), in_proj_covar=tensor([0.0542, 0.0656, 0.0730, 0.0494, 0.0622, 0.0537, 0.0663, 0.0561], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:34:33,313 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 4.897e+02 6.248e+02 7.764e+02 1.427e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 04:34:56,637 INFO [train.py:903] (1/4) Epoch 25, batch 3300, loss[loss=0.2261, simple_loss=0.3026, pruned_loss=0.07475, over 17518.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2861, pruned_loss=0.06144, over 3827166.82 frames. ], batch size: 101, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:34:56,669 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 04:35:15,534 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1777, 1.1607, 1.7579, 1.6228, 2.7111, 4.4206, 4.2694, 5.0141], + device='cuda:1'), covar=tensor([0.1861, 0.5576, 0.4761, 0.2611, 0.0820, 0.0262, 0.0258, 0.0212], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0328, 0.0359, 0.0268, 0.0250, 0.0192, 0.0219, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 04:35:47,900 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:50,081 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:59,019 INFO [train.py:903] (1/4) Epoch 25, batch 3350, loss[loss=0.2285, simple_loss=0.3086, pruned_loss=0.07418, over 18748.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2862, pruned_loss=0.06196, over 3836535.58 frames. ], batch size: 74, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:36:21,845 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:24,232 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167241.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:25,482 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167242.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:41,238 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.821e+02 5.770e+02 7.565e+02 1.496e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-04-03 04:36:53,570 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167264.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:56,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167267.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:02,309 INFO [train.py:903] (1/4) Epoch 25, batch 3400, loss[loss=0.2224, simple_loss=0.3099, pruned_loss=0.06739, over 19526.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2856, pruned_loss=0.0616, over 3834058.23 frames. ], batch size: 64, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:37:26,115 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:28,365 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:38:08,135 INFO [train.py:903] (1/4) Epoch 25, batch 3450, loss[loss=0.1942, simple_loss=0.2815, pruned_loss=0.05344, over 19600.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.06097, over 3840823.37 frames. ], batch size: 57, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:38:10,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 04:38:35,943 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6396, 1.6418, 1.5979, 1.3797, 1.3523, 1.4491, 0.3595, 0.7287], + device='cuda:1'), covar=tensor([0.0676, 0.0638, 0.0446, 0.0670, 0.1167, 0.0737, 0.1329, 0.1141], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0360, 0.0362, 0.0386, 0.0465, 0.0393, 0.0341, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:38:49,770 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.608e+02 5.631e+02 6.989e+02 1.333e+03, threshold=1.126e+03, percent-clipped=1.0 +2023-04-03 04:39:06,955 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3060, 2.1706, 2.1165, 1.9915, 1.6549, 1.9523, 0.6499, 1.3122], + device='cuda:1'), covar=tensor([0.0642, 0.0607, 0.0433, 0.0784, 0.1193, 0.0877, 0.1368, 0.1092], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0358, 0.0361, 0.0385, 0.0464, 0.0392, 0.0340, 0.0343], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 04:39:12,369 INFO [train.py:903] (1/4) Epoch 25, batch 3500, loss[loss=0.2283, simple_loss=0.3103, pruned_loss=0.07312, over 19662.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2847, pruned_loss=0.06214, over 3836518.84 frames. ], batch size: 60, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:39:42,200 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2763, 3.5857, 2.2810, 2.1595, 3.3738, 1.9527, 1.8115, 2.4647], + device='cuda:1'), covar=tensor([0.1345, 0.0614, 0.0949, 0.0944, 0.0493, 0.1163, 0.0910, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0318, 0.0336, 0.0266, 0.0248, 0.0342, 0.0291, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:39:56,126 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:15,427 INFO [train.py:903] (1/4) Epoch 25, batch 3550, loss[loss=0.1681, simple_loss=0.2453, pruned_loss=0.0455, over 19750.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2847, pruned_loss=0.06203, over 3830570.78 frames. ], batch size: 47, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:40:51,399 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167450.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:55,637 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.129e+02 6.252e+02 7.981e+02 1.792e+03, threshold=1.250e+03, percent-clipped=6.0 +2023-04-03 04:41:06,577 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:11,114 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167466.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:13,918 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:17,988 INFO [train.py:903] (1/4) Epoch 25, batch 3600, loss[loss=0.2605, simple_loss=0.3398, pruned_loss=0.09059, over 19751.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2838, pruned_loss=0.06137, over 3834417.24 frames. ], batch size: 63, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:41:44,845 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:50,891 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167497.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:01,218 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6189, 1.2935, 1.5826, 1.5719, 3.1990, 1.1178, 2.2615, 3.6865], + device='cuda:1'), covar=tensor([0.0481, 0.2896, 0.2808, 0.1761, 0.0676, 0.2585, 0.1361, 0.0229], + device='cuda:1'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0349, 0.0377, 0.0353, 0.0391, 0.0411], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:42:20,842 INFO [train.py:903] (1/4) Epoch 25, batch 3650, loss[loss=0.2163, simple_loss=0.2942, pruned_loss=0.06916, over 19320.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.0619, over 3823566.40 frames. ], batch size: 66, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:42:21,242 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:36,699 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 04:43:00,360 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 5.264e+02 6.466e+02 8.043e+02 1.528e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-03 04:43:24,181 INFO [train.py:903] (1/4) Epoch 25, batch 3700, loss[loss=0.2179, simple_loss=0.3013, pruned_loss=0.06727, over 19434.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06226, over 3823121.65 frames. ], batch size: 70, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:43:31,595 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:43:36,278 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:44:28,019 INFO [train.py:903] (1/4) Epoch 25, batch 3750, loss[loss=0.2144, simple_loss=0.2972, pruned_loss=0.06574, over 19560.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2858, pruned_loss=0.06252, over 3819447.38 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:44:57,789 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0190, 2.1086, 2.2977, 2.7296, 2.0706, 2.5806, 2.2745, 2.0621], + device='cuda:1'), covar=tensor([0.4347, 0.4066, 0.2041, 0.2469, 0.4276, 0.2215, 0.5367, 0.3691], + device='cuda:1'), in_proj_covar=tensor([0.0923, 0.0995, 0.0731, 0.0943, 0.0898, 0.0832, 0.0857, 0.0797], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 04:45:08,818 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.966e+02 5.285e+02 6.593e+02 8.150e+02 1.742e+03, threshold=1.319e+03, percent-clipped=4.0 +2023-04-03 04:45:19,685 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3852, 4.0135, 2.6136, 3.5381, 0.9193, 3.9842, 3.8356, 3.9065], + device='cuda:1'), covar=tensor([0.0724, 0.1069, 0.2070, 0.0887, 0.4084, 0.0735, 0.0995, 0.1400], + device='cuda:1'), in_proj_covar=tensor([0.0521, 0.0422, 0.0509, 0.0356, 0.0407, 0.0450, 0.0441, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:45:19,905 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:45:31,285 INFO [train.py:903] (1/4) Epoch 25, batch 3800, loss[loss=0.1931, simple_loss=0.2709, pruned_loss=0.05758, over 19675.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2861, pruned_loss=0.06257, over 3809599.62 frames. ], batch size: 53, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:45:50,892 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:46:01,966 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 04:46:32,954 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 04:46:35,340 INFO [train.py:903] (1/4) Epoch 25, batch 3850, loss[loss=0.2661, simple_loss=0.3374, pruned_loss=0.09744, over 19598.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2856, pruned_loss=0.06234, over 3811218.65 frames. ], batch size: 57, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:46:54,001 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167737.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:47:16,489 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.991e+02 5.959e+02 7.597e+02 1.650e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 04:47:39,474 INFO [train.py:903] (1/4) Epoch 25, batch 3900, loss[loss=0.1901, simple_loss=0.2679, pruned_loss=0.05622, over 19484.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2832, pruned_loss=0.06092, over 3832855.17 frames. ], batch size: 49, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:47:55,776 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167785.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:06,176 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167794.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:43,220 INFO [train.py:903] (1/4) Epoch 25, batch 3950, loss[loss=0.1837, simple_loss=0.2672, pruned_loss=0.05013, over 19571.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2829, pruned_loss=0.06095, over 3834705.33 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:48:45,713 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 04:48:56,934 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:01,643 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:13,784 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 04:49:25,121 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 4.848e+02 5.890e+02 7.638e+02 1.655e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 04:49:29,115 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:34,629 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:46,053 INFO [train.py:903] (1/4) Epoch 25, batch 4000, loss[loss=0.1997, simple_loss=0.2906, pruned_loss=0.05444, over 19701.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2825, pruned_loss=0.06054, over 3843194.68 frames. ], batch size: 59, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:50:32,172 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 04:50:33,704 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:50:50,808 INFO [train.py:903] (1/4) Epoch 25, batch 4050, loss[loss=0.1708, simple_loss=0.2518, pruned_loss=0.04489, over 19391.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.283, pruned_loss=0.06058, over 3847662.63 frames. ], batch size: 47, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:51:32,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.057e+02 6.529e+02 8.113e+02 1.821e+03, threshold=1.306e+03, percent-clipped=7.0 +2023-04-03 04:51:52,927 INFO [train.py:903] (1/4) Epoch 25, batch 4100, loss[loss=0.2177, simple_loss=0.2968, pruned_loss=0.06925, over 19308.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06077, over 3850401.50 frames. ], batch size: 44, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:52:24,783 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 04:52:27,605 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 04:52:56,842 INFO [train.py:903] (1/4) Epoch 25, batch 4150, loss[loss=0.2045, simple_loss=0.2816, pruned_loss=0.06369, over 19682.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.283, pruned_loss=0.06021, over 3856304.89 frames. ], batch size: 53, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:53:36,895 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168053.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:53:39,909 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 5.393e+02 6.582e+02 8.080e+02 1.683e+03, threshold=1.316e+03, percent-clipped=2.0 +2023-04-03 04:53:59,622 INFO [train.py:903] (1/4) Epoch 25, batch 4200, loss[loss=0.1963, simple_loss=0.2766, pruned_loss=0.05794, over 19793.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06083, over 3844315.25 frames. ], batch size: 49, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:54:01,966 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 04:54:10,942 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168081.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:54:18,321 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 04:55:03,246 INFO [train.py:903] (1/4) Epoch 25, batch 4250, loss[loss=0.1912, simple_loss=0.2871, pruned_loss=0.04766, over 19615.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06069, over 3833640.48 frames. ], batch size: 57, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:55:13,195 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:55:17,964 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 04:55:20,782 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-03 04:55:29,581 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 04:55:46,757 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.961e+02 6.439e+02 7.740e+02 2.119e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-03 04:55:59,646 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:07,294 INFO [train.py:903] (1/4) Epoch 25, batch 4300, loss[loss=0.1875, simple_loss=0.264, pruned_loss=0.05552, over 19732.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2853, pruned_loss=0.06151, over 3838108.63 frames. ], batch size: 51, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:56:15,588 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=1.97 vs. limit=5.0 +2023-04-03 04:56:29,218 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5679, 1.1439, 1.4625, 1.4498, 2.9593, 1.1762, 2.5341, 3.5316], + device='cuda:1'), covar=tensor([0.0692, 0.3843, 0.3367, 0.2302, 0.1172, 0.2981, 0.1279, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0419, 0.0375, 0.0395, 0.0351, 0.0379, 0.0354, 0.0393, 0.0413], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 04:56:30,430 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:37,473 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168196.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:42,773 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-04-03 04:56:57,665 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 04:57:00,302 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 04:57:10,764 INFO [train.py:903] (1/4) Epoch 25, batch 4350, loss[loss=0.2006, simple_loss=0.2839, pruned_loss=0.05859, over 19763.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2856, pruned_loss=0.06183, over 3835015.03 frames. ], batch size: 54, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:57:38,627 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:47,857 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168251.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:49,660 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 04:57:53,358 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.774e+02 5.735e+02 7.211e+02 1.236e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 04:58:13,423 INFO [train.py:903] (1/4) Epoch 25, batch 4400, loss[loss=0.1877, simple_loss=0.2749, pruned_loss=0.05029, over 19501.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2848, pruned_loss=0.06183, over 3838120.71 frames. ], batch size: 64, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:58:40,135 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 04:58:50,363 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 04:59:16,254 INFO [train.py:903] (1/4) Epoch 25, batch 4450, loss[loss=0.1759, simple_loss=0.2483, pruned_loss=0.05173, over 19736.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.06101, over 3844124.03 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:59:59,615 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.609e+02 5.804e+02 7.720e+02 1.927e+03, threshold=1.161e+03, percent-clipped=7.0 +2023-04-03 05:00:17,544 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-03 05:00:20,127 INFO [train.py:903] (1/4) Epoch 25, batch 4500, loss[loss=0.209, simple_loss=0.294, pruned_loss=0.06198, over 19751.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.285, pruned_loss=0.06136, over 3836774.33 frames. ], batch size: 63, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:00:44,722 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 05:00:52,068 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:01:23,378 INFO [train.py:903] (1/4) Epoch 25, batch 4550, loss[loss=0.1886, simple_loss=0.2694, pruned_loss=0.05393, over 19485.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2842, pruned_loss=0.06078, over 3846288.42 frames. ], batch size: 49, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:01:34,645 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 05:01:59,985 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 05:02:02,740 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:08,836 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 4.803e+02 5.692e+02 6.651e+02 1.392e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 05:02:15,291 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1418, 1.3056, 1.6998, 0.9545, 2.3241, 3.0869, 2.7825, 3.2762], + device='cuda:1'), covar=tensor([0.1521, 0.3898, 0.3296, 0.2756, 0.0628, 0.0213, 0.0260, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0327, 0.0357, 0.0267, 0.0248, 0.0191, 0.0217, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 05:02:27,737 INFO [train.py:903] (1/4) Epoch 25, batch 4600, loss[loss=0.2067, simple_loss=0.2915, pruned_loss=0.06101, over 18303.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2847, pruned_loss=0.06058, over 3842900.47 frames. ], batch size: 83, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:02:35,462 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168477.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:59,881 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3820, 4.0138, 2.6330, 3.4945, 0.8875, 3.9510, 3.8441, 3.9177], + device='cuda:1'), covar=tensor([0.0668, 0.0985, 0.1934, 0.0931, 0.4004, 0.0756, 0.0911, 0.1001], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0420, 0.0504, 0.0353, 0.0405, 0.0447, 0.0441, 0.0471], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:03:04,788 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168500.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:20,621 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:27,496 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 05:03:31,423 INFO [train.py:903] (1/4) Epoch 25, batch 4650, loss[loss=0.2293, simple_loss=0.3071, pruned_loss=0.07572, over 19751.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2838, pruned_loss=0.06007, over 3837931.25 frames. ], batch size: 63, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:03:35,306 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:50,730 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 05:04:02,131 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 05:04:02,404 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9841, 2.1243, 2.2306, 1.9903, 3.6560, 1.6814, 2.9994, 3.7821], + device='cuda:1'), covar=tensor([0.0558, 0.2301, 0.2260, 0.1807, 0.0640, 0.2302, 0.1739, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0373, 0.0394, 0.0351, 0.0378, 0.0352, 0.0393, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:04:16,034 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.060e+02 6.015e+02 7.632e+02 1.453e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 05:04:34,761 INFO [train.py:903] (1/4) Epoch 25, batch 4700, loss[loss=0.226, simple_loss=0.3034, pruned_loss=0.07433, over 12821.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2842, pruned_loss=0.06046, over 3824345.65 frames. ], batch size: 138, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:04:57,646 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 05:05:04,386 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:05:38,974 INFO [train.py:903] (1/4) Epoch 25, batch 4750, loss[loss=0.187, simple_loss=0.2731, pruned_loss=0.05045, over 19837.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2847, pruned_loss=0.06103, over 3822673.75 frames. ], batch size: 52, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:06:06,418 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 05:06:09,939 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 05:06:22,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.029e+02 6.051e+02 7.124e+02 1.309e+03, threshold=1.210e+03, percent-clipped=1.0 +2023-04-03 05:06:40,981 INFO [train.py:903] (1/4) Epoch 25, batch 4800, loss[loss=0.2718, simple_loss=0.3354, pruned_loss=0.1041, over 12776.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2855, pruned_loss=0.06148, over 3806108.58 frames. ], batch size: 136, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:06:59,395 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0583, 1.8005, 1.6648, 1.9907, 1.8714, 1.7156, 1.5397, 1.9269], + device='cuda:1'), covar=tensor([0.1043, 0.1270, 0.1462, 0.1001, 0.1152, 0.0571, 0.1536, 0.0757], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0356, 0.0315, 0.0253, 0.0303, 0.0254, 0.0314, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:07:29,195 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168710.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:07:43,821 INFO [train.py:903] (1/4) Epoch 25, batch 4850, loss[loss=0.1997, simple_loss=0.287, pruned_loss=0.05624, over 19761.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2849, pruned_loss=0.06111, over 3805375.08 frames. ], batch size: 63, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:08:10,303 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 05:08:29,303 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.536e+02 6.758e+02 9.275e+02 1.787e+03, threshold=1.352e+03, percent-clipped=12.0 +2023-04-03 05:08:30,524 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 05:08:36,436 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 05:08:36,485 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 05:08:43,795 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:08:47,079 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 05:08:48,232 INFO [train.py:903] (1/4) Epoch 25, batch 4900, loss[loss=0.1676, simple_loss=0.245, pruned_loss=0.04508, over 19753.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06098, over 3794679.44 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:09:06,553 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 05:09:10,239 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9468, 1.2749, 1.6943, 2.9155, 2.0807, 1.6493, 2.1991, 1.6729], + device='cuda:1'), covar=tensor([0.1053, 0.1754, 0.1316, 0.0885, 0.1126, 0.1437, 0.1313, 0.1043], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0238, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 05:09:15,962 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:09:52,873 INFO [train.py:903] (1/4) Epoch 25, batch 4950, loss[loss=0.1947, simple_loss=0.2743, pruned_loss=0.05761, over 19780.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06143, over 3796986.41 frames. ], batch size: 54, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:10:04,496 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 05:10:30,186 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 05:10:36,919 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.693e+02 5.645e+02 7.417e+02 1.662e+03, threshold=1.129e+03, percent-clipped=1.0 +2023-04-03 05:10:49,018 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6321, 1.7559, 2.1076, 1.8449, 3.1193, 2.6125, 3.2307, 1.6745], + device='cuda:1'), covar=tensor([0.2618, 0.4502, 0.2778, 0.2094, 0.1650, 0.2306, 0.1750, 0.4569], + device='cuda:1'), in_proj_covar=tensor([0.0548, 0.0666, 0.0739, 0.0501, 0.0630, 0.0544, 0.0670, 0.0567], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 05:10:50,517 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 05:10:55,825 INFO [train.py:903] (1/4) Epoch 25, batch 5000, loss[loss=0.1873, simple_loss=0.2742, pruned_loss=0.05022, over 18215.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06052, over 3804555.51 frames. ], batch size: 84, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:11:02,558 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 05:11:13,717 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 05:11:58,384 INFO [train.py:903] (1/4) Epoch 25, batch 5050, loss[loss=0.2161, simple_loss=0.2772, pruned_loss=0.07747, over 19365.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2826, pruned_loss=0.06055, over 3810778.31 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:12:06,806 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2639, 2.3023, 2.5518, 2.9864, 2.2851, 2.8406, 2.5795, 2.2832], + device='cuda:1'), covar=tensor([0.4371, 0.4267, 0.1886, 0.2822, 0.4640, 0.2342, 0.4910, 0.3488], + device='cuda:1'), in_proj_covar=tensor([0.0920, 0.0994, 0.0731, 0.0941, 0.0896, 0.0833, 0.0851, 0.0796], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 05:12:18,628 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9317, 2.0201, 2.3122, 2.4609, 1.8944, 2.3773, 2.2945, 2.0583], + device='cuda:1'), covar=tensor([0.4070, 0.3820, 0.1846, 0.2602, 0.4244, 0.2265, 0.4891, 0.3416], + device='cuda:1'), in_proj_covar=tensor([0.0920, 0.0994, 0.0731, 0.0942, 0.0897, 0.0833, 0.0851, 0.0796], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 05:12:33,918 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 05:12:41,885 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.713e+02 5.534e+02 7.099e+02 1.364e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-04-03 05:12:55,417 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168966.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:02,129 INFO [train.py:903] (1/4) Epoch 25, batch 5100, loss[loss=0.1844, simple_loss=0.2613, pruned_loss=0.05381, over 19844.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2823, pruned_loss=0.06011, over 3810823.49 frames. ], batch size: 52, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:13:11,308 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 05:13:14,755 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 05:13:19,307 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168985.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:20,102 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 05:13:21,526 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168987.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:13:26,024 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:31,792 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7959, 1.6378, 1.7036, 2.3249, 1.9118, 2.0803, 2.1193, 1.8693], + device='cuda:1'), covar=tensor([0.0797, 0.0887, 0.0960, 0.0667, 0.0817, 0.0719, 0.0811, 0.0642], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0224, 0.0226, 0.0239, 0.0226, 0.0214, 0.0189, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 05:14:05,074 INFO [train.py:903] (1/4) Epoch 25, batch 5150, loss[loss=0.1919, simple_loss=0.2852, pruned_loss=0.04933, over 19684.00 frames. ], tot_loss[loss=0.201, simple_loss=0.282, pruned_loss=0.05998, over 3812641.34 frames. ], batch size: 59, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:14:16,406 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 05:14:23,763 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6332, 4.2364, 2.9211, 3.7499, 1.1702, 4.2046, 4.0584, 4.1698], + device='cuda:1'), covar=tensor([0.0626, 0.0868, 0.1788, 0.0802, 0.3725, 0.0678, 0.0933, 0.1253], + device='cuda:1'), in_proj_covar=tensor([0.0524, 0.0427, 0.0511, 0.0358, 0.0412, 0.0454, 0.0446, 0.0477], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:14:48,108 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.295e+02 6.704e+02 8.101e+02 2.101e+03, threshold=1.341e+03, percent-clipped=6.0 +2023-04-03 05:14:52,541 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:15:08,233 INFO [train.py:903] (1/4) Epoch 25, batch 5200, loss[loss=0.2264, simple_loss=0.3107, pruned_loss=0.07109, over 19622.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2828, pruned_loss=0.06057, over 3789903.05 frames. ], batch size: 57, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:15:23,456 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 05:15:44,163 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169100.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:08,636 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 05:16:13,024 INFO [train.py:903] (1/4) Epoch 25, batch 5250, loss[loss=0.2358, simple_loss=0.3124, pruned_loss=0.07962, over 19654.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06063, over 3802271.04 frames. ], batch size: 60, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:16:27,842 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:56,981 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.211e+02 5.791e+02 7.204e+02 1.532e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 05:17:16,201 INFO [train.py:903] (1/4) Epoch 25, batch 5300, loss[loss=0.1718, simple_loss=0.2651, pruned_loss=0.03925, over 19667.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06098, over 3816242.26 frames. ], batch size: 55, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:17:22,416 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:17:34,466 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 05:18:18,931 INFO [train.py:903] (1/4) Epoch 25, batch 5350, loss[loss=0.2073, simple_loss=0.2718, pruned_loss=0.07133, over 19753.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.0609, over 3830803.30 frames. ], batch size: 45, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:18:56,377 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 05:19:04,350 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 4.992e+02 6.506e+02 8.048e+02 1.510e+03, threshold=1.301e+03, percent-clipped=6.0 +2023-04-03 05:19:07,176 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7358, 4.2355, 4.4733, 4.4442, 1.8315, 4.2278, 3.6183, 4.2139], + device='cuda:1'), covar=tensor([0.1723, 0.0996, 0.0647, 0.0753, 0.5927, 0.0959, 0.0756, 0.1116], + device='cuda:1'), in_proj_covar=tensor([0.0805, 0.0774, 0.0978, 0.0861, 0.0850, 0.0741, 0.0583, 0.0909], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 05:19:24,336 INFO [train.py:903] (1/4) Epoch 25, batch 5400, loss[loss=0.2385, simple_loss=0.3164, pruned_loss=0.08033, over 19690.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2853, pruned_loss=0.06146, over 3817909.12 frames. ], batch size: 59, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:19:38,033 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-03 05:19:38,683 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3836, 2.3883, 2.2340, 2.5769, 2.3443, 2.1177, 2.2073, 2.3540], + device='cuda:1'), covar=tensor([0.0842, 0.1212, 0.1049, 0.0817, 0.1066, 0.0451, 0.1113, 0.0589], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0355, 0.0313, 0.0251, 0.0302, 0.0254, 0.0314, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:20:27,258 INFO [train.py:903] (1/4) Epoch 25, batch 5450, loss[loss=0.201, simple_loss=0.2773, pruned_loss=0.06235, over 19407.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2852, pruned_loss=0.06135, over 3821839.06 frames. ], batch size: 48, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:20:33,628 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.37 vs. limit=5.0 +2023-04-03 05:20:36,085 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 05:20:36,562 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:20:39,943 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169331.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:20:46,792 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2365, 1.2783, 1.2259, 1.0167, 1.0491, 1.1232, 0.0717, 0.3340], + device='cuda:1'), covar=tensor([0.0619, 0.0626, 0.0429, 0.0583, 0.1212, 0.0614, 0.1375, 0.1087], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0363, 0.0364, 0.0391, 0.0467, 0.0399, 0.0344, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 05:20:50,641 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 05:21:11,460 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.467e+02 5.289e+02 7.172e+02 1.661e+03, threshold=1.058e+03, percent-clipped=2.0 +2023-04-03 05:21:29,438 INFO [train.py:903] (1/4) Epoch 25, batch 5500, loss[loss=0.2002, simple_loss=0.2838, pruned_loss=0.05833, over 19535.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.0616, over 3836513.10 frames. ], batch size: 56, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:21:57,703 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 05:22:24,178 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 05:22:33,001 INFO [train.py:903] (1/4) Epoch 25, batch 5550, loss[loss=0.225, simple_loss=0.3058, pruned_loss=0.07211, over 13436.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06194, over 3812521.35 frames. ], batch size: 136, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:22:43,786 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 05:23:00,564 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:00,776 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:04,001 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169446.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:23:17,172 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 5.192e+02 6.082e+02 7.576e+02 1.216e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-03 05:23:33,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 05:23:36,938 INFO [train.py:903] (1/4) Epoch 25, batch 5600, loss[loss=0.2098, simple_loss=0.293, pruned_loss=0.06332, over 19479.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2855, pruned_loss=0.06156, over 3811720.06 frames. ], batch size: 64, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:23:44,272 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:38,814 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:39,769 INFO [train.py:903] (1/4) Epoch 25, batch 5650, loss[loss=0.2167, simple_loss=0.2989, pruned_loss=0.06723, over 19684.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06111, over 3818158.74 frames. ], batch size: 60, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:25:24,924 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.961e+02 6.158e+02 8.288e+02 1.627e+03, threshold=1.232e+03, percent-clipped=5.0 +2023-04-03 05:25:27,491 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169559.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:25:30,433 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 05:25:37,669 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3693, 1.2999, 1.3441, 1.4074, 0.9803, 1.4324, 1.3530, 1.4024], + device='cuda:1'), covar=tensor([0.0860, 0.0947, 0.0983, 0.0639, 0.0848, 0.0799, 0.0851, 0.0728], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0225, 0.0238, 0.0225, 0.0212, 0.0188, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 05:25:43,210 INFO [train.py:903] (1/4) Epoch 25, batch 5700, loss[loss=0.2401, simple_loss=0.3225, pruned_loss=0.07887, over 19673.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2855, pruned_loss=0.0617, over 3816442.60 frames. ], batch size: 55, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:11,280 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:26:11,668 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 05:26:12,794 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.49 vs. limit=5.0 +2023-04-03 05:26:22,649 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-03 05:26:29,605 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0000, 2.0683, 2.2833, 2.7212, 2.0815, 2.5616, 2.2661, 2.0442], + device='cuda:1'), covar=tensor([0.4497, 0.3976, 0.1891, 0.2503, 0.4344, 0.2235, 0.5304, 0.3577], + device='cuda:1'), in_proj_covar=tensor([0.0920, 0.0995, 0.0730, 0.0942, 0.0898, 0.0833, 0.0853, 0.0795], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 05:26:47,653 INFO [train.py:903] (1/4) Epoch 25, batch 5750, loss[loss=0.1864, simple_loss=0.2763, pruned_loss=0.04819, over 19664.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2858, pruned_loss=0.06173, over 3813078.19 frames. ], batch size: 55, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:48,806 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 05:26:59,216 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 05:27:04,031 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 05:27:06,674 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:09,124 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3060, 1.4308, 2.0747, 1.6076, 3.1648, 4.7566, 4.5556, 5.0709], + device='cuda:1'), covar=tensor([0.1701, 0.3912, 0.3274, 0.2349, 0.0598, 0.0188, 0.0183, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0326, 0.0356, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 05:27:10,245 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:32,988 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.765e+02 5.968e+02 8.362e+02 1.575e+03, threshold=1.194e+03, percent-clipped=5.0 +2023-04-03 05:27:52,476 INFO [train.py:903] (1/4) Epoch 25, batch 5800, loss[loss=0.2026, simple_loss=0.2894, pruned_loss=0.0579, over 19401.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2861, pruned_loss=0.06173, over 3829186.83 frames. ], batch size: 70, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:28:20,515 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1836, 1.3026, 1.9135, 1.5338, 2.8993, 4.5907, 4.3907, 4.9083], + device='cuda:1'), covar=tensor([0.1759, 0.3934, 0.3401, 0.2405, 0.0715, 0.0208, 0.0181, 0.0234], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0326, 0.0357, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 05:28:27,879 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169700.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:28:30,466 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169702.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:28:55,625 INFO [train.py:903] (1/4) Epoch 25, batch 5850, loss[loss=0.2145, simple_loss=0.3043, pruned_loss=0.06233, over 19297.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2862, pruned_loss=0.06183, over 3812530.37 frames. ], batch size: 66, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:29:00,327 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:02,815 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169727.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:29:08,752 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7011, 1.7847, 2.0540, 2.0563, 3.3412, 2.8799, 3.4936, 1.6671], + device='cuda:1'), covar=tensor([0.2492, 0.4537, 0.2998, 0.1924, 0.1526, 0.1998, 0.1698, 0.4581], + device='cuda:1'), in_proj_covar=tensor([0.0548, 0.0663, 0.0736, 0.0501, 0.0630, 0.0542, 0.0668, 0.0565], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 05:29:28,428 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169747.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:41,209 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.003e+02 6.037e+02 8.413e+02 1.989e+03, threshold=1.207e+03, percent-clipped=6.0 +2023-04-03 05:29:51,800 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:59,816 INFO [train.py:903] (1/4) Epoch 25, batch 5900, loss[loss=0.1612, simple_loss=0.2361, pruned_loss=0.04312, over 19740.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2857, pruned_loss=0.06162, over 3803667.86 frames. ], batch size: 46, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:30:04,519 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 05:30:27,865 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 05:30:41,016 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:30:56,655 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:04,220 INFO [train.py:903] (1/4) Epoch 25, batch 5950, loss[loss=0.2141, simple_loss=0.2956, pruned_loss=0.06632, over 17420.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2851, pruned_loss=0.06126, over 3812211.01 frames. ], batch size: 101, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:31:28,366 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169840.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:39,983 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:49,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.837e+02 6.128e+02 7.395e+02 1.765e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-03 05:32:09,287 INFO [train.py:903] (1/4) Epoch 25, batch 6000, loss[loss=0.2189, simple_loss=0.2991, pruned_loss=0.06931, over 18189.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.06037, over 3811439.75 frames. ], batch size: 83, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:32:09,287 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 05:32:21,943 INFO [train.py:937] (1/4) Epoch 25, validation: loss=0.1675, simple_loss=0.2674, pruned_loss=0.03383, over 944034.00 frames. +2023-04-03 05:32:21,945 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 05:32:25,781 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:32:44,059 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-03 05:32:48,283 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:33:20,601 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:33:21,670 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1325, 1.2905, 1.5708, 1.2861, 2.7671, 1.1291, 2.1795, 3.0801], + device='cuda:1'), covar=tensor([0.0567, 0.2885, 0.2756, 0.2001, 0.0700, 0.2406, 0.1246, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0414, 0.0370, 0.0390, 0.0350, 0.0376, 0.0351, 0.0389, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:33:26,747 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 05:33:26,974 INFO [train.py:903] (1/4) Epoch 25, batch 6050, loss[loss=0.2015, simple_loss=0.2817, pruned_loss=0.06065, over 19669.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06101, over 3822527.54 frames. ], batch size: 58, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:00,504 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1084, 1.9933, 1.8708, 1.6703, 1.5954, 1.7017, 0.5499, 1.0642], + device='cuda:1'), covar=tensor([0.0672, 0.0682, 0.0505, 0.0844, 0.1192, 0.0994, 0.1426, 0.1154], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0360, 0.0362, 0.0387, 0.0464, 0.0396, 0.0342, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 05:34:12,786 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.971e+02 6.200e+02 7.955e+02 1.563e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 05:34:16,767 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4477, 1.2932, 1.3150, 1.9607, 1.4605, 1.5769, 1.7110, 1.4308], + device='cuda:1'), covar=tensor([0.1016, 0.1216, 0.1245, 0.0793, 0.0970, 0.1000, 0.1017, 0.0916], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0222, 0.0224, 0.0238, 0.0225, 0.0213, 0.0188, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:1') +2023-04-03 05:34:30,155 INFO [train.py:903] (1/4) Epoch 25, batch 6100, loss[loss=0.1945, simple_loss=0.2665, pruned_loss=0.06128, over 19735.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06075, over 3826300.49 frames. ], batch size: 51, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:44,073 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:35:07,103 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9697, 4.5158, 2.8397, 3.9393, 0.6231, 4.5489, 4.2907, 4.4723], + device='cuda:1'), covar=tensor([0.0519, 0.0921, 0.1878, 0.0778, 0.4541, 0.0566, 0.0820, 0.0987], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0420, 0.0503, 0.0353, 0.0406, 0.0444, 0.0441, 0.0470], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:35:20,547 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8048, 1.9267, 2.1853, 2.3422, 1.7649, 2.2105, 2.2060, 2.0150], + device='cuda:1'), covar=tensor([0.4266, 0.3834, 0.2005, 0.2483, 0.4180, 0.2333, 0.4849, 0.3412], + device='cuda:1'), in_proj_covar=tensor([0.0919, 0.0994, 0.0731, 0.0942, 0.0898, 0.0832, 0.0853, 0.0796], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 05:35:35,275 INFO [train.py:903] (1/4) Epoch 25, batch 6150, loss[loss=0.1656, simple_loss=0.2421, pruned_loss=0.04449, over 19089.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2825, pruned_loss=0.06009, over 3837283.27 frames. ], batch size: 42, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:36:07,480 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 05:36:18,154 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8871, 2.6270, 2.4700, 2.7896, 2.6518, 2.4905, 2.3666, 2.8447], + device='cuda:1'), covar=tensor([0.0893, 0.1590, 0.1363, 0.1123, 0.1370, 0.0499, 0.1410, 0.0606], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0353, 0.0311, 0.0251, 0.0300, 0.0252, 0.0312, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:36:22,371 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 4.967e+02 5.743e+02 7.363e+02 2.013e+03, threshold=1.149e+03, percent-clipped=2.0 +2023-04-03 05:36:28,744 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:36:40,353 INFO [train.py:903] (1/4) Epoch 25, batch 6200, loss[loss=0.2317, simple_loss=0.3114, pruned_loss=0.07597, over 18039.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2835, pruned_loss=0.0607, over 3837884.76 frames. ], batch size: 83, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:37:04,431 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:13,786 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:26,466 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:43,644 INFO [train.py:903] (1/4) Epoch 25, batch 6250, loss[loss=0.1751, simple_loss=0.2671, pruned_loss=0.04151, over 19671.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2831, pruned_loss=0.06049, over 3840707.56 frames. ], batch size: 59, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:38:16,020 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 05:38:16,212 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170148.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:38:29,526 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.199e+02 6.170e+02 7.836e+02 1.706e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 05:38:47,597 INFO [train.py:903] (1/4) Epoch 25, batch 6300, loss[loss=0.2614, simple_loss=0.3251, pruned_loss=0.09883, over 13630.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2832, pruned_loss=0.06071, over 3835190.13 frames. ], batch size: 136, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:39:32,202 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170206.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:39:51,010 INFO [train.py:903] (1/4) Epoch 25, batch 6350, loss[loss=0.1963, simple_loss=0.272, pruned_loss=0.06029, over 19485.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.06075, over 3844728.67 frames. ], batch size: 49, lr: 3.25e-03, grad_scale: 4.0 +2023-04-03 05:39:53,842 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:36,695 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.417e+02 5.475e+02 7.140e+02 1.571e+03, threshold=1.095e+03, percent-clipped=3.0 +2023-04-03 05:40:42,896 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:54,621 INFO [train.py:903] (1/4) Epoch 25, batch 6400, loss[loss=0.203, simple_loss=0.2803, pruned_loss=0.06285, over 19794.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06058, over 3846666.31 frames. ], batch size: 49, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:41:40,119 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-03 05:41:46,479 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1306, 1.2916, 1.6679, 1.2535, 2.7488, 3.5947, 3.2266, 3.7904], + device='cuda:1'), covar=tensor([0.1699, 0.3955, 0.3471, 0.2635, 0.0574, 0.0198, 0.0220, 0.0261], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0330, 0.0360, 0.0269, 0.0251, 0.0193, 0.0218, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 05:41:59,021 INFO [train.py:903] (1/4) Epoch 25, batch 6450, loss[loss=0.2041, simple_loss=0.2974, pruned_loss=0.05542, over 19685.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.05999, over 3840021.20 frames. ], batch size: 60, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:42:40,350 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:42:44,564 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.688e+02 6.468e+02 8.318e+02 2.178e+03, threshold=1.294e+03, percent-clipped=13.0 +2023-04-03 05:42:45,723 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 05:43:03,216 INFO [train.py:903] (1/4) Epoch 25, batch 6500, loss[loss=0.2196, simple_loss=0.2887, pruned_loss=0.07522, over 19392.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06048, over 3842542.03 frames. ], batch size: 47, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:43:07,934 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 05:43:12,742 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:43:48,128 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170407.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:44:06,265 INFO [train.py:903] (1/4) Epoch 25, batch 6550, loss[loss=0.2035, simple_loss=0.2787, pruned_loss=0.06413, over 19728.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06131, over 3846582.29 frames. ], batch size: 51, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:44:52,745 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.881e+02 6.158e+02 7.799e+02 1.457e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-03 05:44:57,807 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:09,732 INFO [train.py:903] (1/4) Epoch 25, batch 6600, loss[loss=0.1749, simple_loss=0.2585, pruned_loss=0.0457, over 19751.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06135, over 3831631.90 frames. ], batch size: 51, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:45:19,102 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:20,451 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:28,700 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:51,415 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:09,045 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:13,015 INFO [train.py:903] (1/4) Epoch 25, batch 6650, loss[loss=0.218, simple_loss=0.2981, pruned_loss=0.06891, over 19683.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.0612, over 3835619.35 frames. ], batch size: 60, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:46:13,384 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:41,290 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170544.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:58,409 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.827e+02 5.978e+02 7.974e+02 2.215e+03, threshold=1.196e+03, percent-clipped=6.0 +2023-04-03 05:47:16,988 INFO [train.py:903] (1/4) Epoch 25, batch 6700, loss[loss=0.2148, simple_loss=0.2943, pruned_loss=0.06762, over 19608.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.283, pruned_loss=0.06056, over 3830550.83 frames. ], batch size: 61, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:47:36,685 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:48:14,054 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 05:48:17,850 INFO [train.py:903] (1/4) Epoch 25, batch 6750, loss[loss=0.1874, simple_loss=0.2722, pruned_loss=0.05126, over 19663.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.06127, over 3824848.29 frames. ], batch size: 53, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:48:59,806 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.526e+02 5.801e+02 6.899e+02 1.670e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 05:49:15,798 INFO [train.py:903] (1/4) Epoch 25, batch 6800, loss[loss=0.2242, simple_loss=0.3128, pruned_loss=0.06779, over 19299.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06072, over 3822854.67 frames. ], batch size: 66, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:50:02,603 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 05:50:03,063 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 05:50:07,362 INFO [train.py:903] (1/4) Epoch 26, batch 0, loss[loss=0.1801, simple_loss=0.2603, pruned_loss=0.04994, over 19374.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2603, pruned_loss=0.04994, over 19374.00 frames. ], batch size: 48, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:50:07,362 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 05:50:19,302 INFO [train.py:937] (1/4) Epoch 26, validation: loss=0.1673, simple_loss=0.2675, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 05:50:19,303 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 05:50:26,498 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5884, 2.5801, 2.1485, 2.6053, 2.6123, 2.2960, 2.1129, 2.5499], + device='cuda:1'), covar=tensor([0.1036, 0.1631, 0.1560, 0.1107, 0.1285, 0.0546, 0.1527, 0.0696], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0355, 0.0314, 0.0253, 0.0303, 0.0254, 0.0314, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:50:32,194 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 05:51:20,626 INFO [train.py:903] (1/4) Epoch 26, batch 50, loss[loss=0.2164, simple_loss=0.2969, pruned_loss=0.06798, over 18030.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2827, pruned_loss=0.06063, over 871316.83 frames. ], batch size: 83, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:51:30,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.268e+02 6.216e+02 7.845e+02 1.668e+03, threshold=1.243e+03, percent-clipped=9.0 +2023-04-03 05:51:54,944 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170778.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:51:55,738 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 05:52:21,933 INFO [train.py:903] (1/4) Epoch 26, batch 100, loss[loss=0.1778, simple_loss=0.2571, pruned_loss=0.04922, over 19760.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2819, pruned_loss=0.05958, over 1540647.77 frames. ], batch size: 46, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:52:26,035 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:52:32,353 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 05:52:50,719 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:53:12,614 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 05:53:24,753 INFO [train.py:903] (1/4) Epoch 26, batch 150, loss[loss=0.2204, simple_loss=0.3045, pruned_loss=0.06821, over 19771.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06064, over 2044313.45 frames. ], batch size: 63, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:53:36,325 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 4.908e+02 6.470e+02 7.917e+02 1.560e+03, threshold=1.294e+03, percent-clipped=6.0 +2023-04-03 05:54:20,353 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1257, 1.9395, 1.7848, 2.0737, 1.7603, 1.8424, 1.6995, 2.0119], + device='cuda:1'), covar=tensor([0.0979, 0.1391, 0.1419, 0.0950, 0.1427, 0.0566, 0.1495, 0.0717], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0352, 0.0312, 0.0252, 0.0302, 0.0253, 0.0312, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:54:25,520 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 05:54:26,701 INFO [train.py:903] (1/4) Epoch 26, batch 200, loss[loss=0.2216, simple_loss=0.3009, pruned_loss=0.07116, over 19436.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2851, pruned_loss=0.06116, over 2439130.34 frames. ], batch size: 64, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:05,046 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170931.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:14,493 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:24,845 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170946.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:30,410 INFO [train.py:903] (1/4) Epoch 26, batch 250, loss[loss=0.2018, simple_loss=0.2859, pruned_loss=0.05883, over 18803.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2868, pruned_loss=0.06209, over 2748096.70 frames. ], batch size: 74, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:31,162 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 05:55:39,620 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8349, 1.3225, 1.0627, 0.9272, 1.1381, 0.9970, 0.9315, 1.2085], + device='cuda:1'), covar=tensor([0.0689, 0.0935, 0.1193, 0.0844, 0.0594, 0.1386, 0.0677, 0.0552], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0336, 0.0270, 0.0249, 0.0342, 0.0294, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 05:55:42,603 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.848e+02 5.950e+02 8.014e+02 1.769e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 05:56:34,946 INFO [train.py:903] (1/4) Epoch 26, batch 300, loss[loss=0.1903, simple_loss=0.2839, pruned_loss=0.04832, over 18860.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06145, over 2995287.27 frames. ], batch size: 74, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:34,240 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171046.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:57:34,578 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-03 05:57:38,573 INFO [train.py:903] (1/4) Epoch 26, batch 350, loss[loss=0.1951, simple_loss=0.2699, pruned_loss=0.06018, over 19367.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2828, pruned_loss=0.06022, over 3186422.36 frames. ], batch size: 47, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:45,661 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:57:49,078 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 4.764e+02 6.020e+02 7.720e+02 1.602e+03, threshold=1.204e+03, percent-clipped=4.0 +2023-04-03 05:58:42,186 INFO [train.py:903] (1/4) Epoch 26, batch 400, loss[loss=0.2095, simple_loss=0.2853, pruned_loss=0.06682, over 19592.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2839, pruned_loss=0.06072, over 3319470.87 frames. ], batch size: 52, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:58:55,397 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171110.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:59:43,979 INFO [train.py:903] (1/4) Epoch 26, batch 450, loss[loss=0.2141, simple_loss=0.2957, pruned_loss=0.06622, over 19717.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2842, pruned_loss=0.06143, over 3435385.63 frames. ], batch size: 63, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:59:56,340 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 5.217e+02 6.577e+02 9.059e+02 2.566e+03, threshold=1.315e+03, percent-clipped=7.0 +2023-04-03 06:00:19,593 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 06:00:20,813 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 06:00:30,492 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2272, 1.9978, 1.8275, 2.1446, 1.8767, 1.8711, 1.7166, 2.1179], + device='cuda:1'), covar=tensor([0.0960, 0.1395, 0.1419, 0.0948, 0.1392, 0.0574, 0.1470, 0.0694], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0354, 0.0314, 0.0254, 0.0304, 0.0255, 0.0315, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:00:38,896 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:00:47,725 INFO [train.py:903] (1/4) Epoch 26, batch 500, loss[loss=0.1755, simple_loss=0.2644, pruned_loss=0.04334, over 19740.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2853, pruned_loss=0.0616, over 3532348.41 frames. ], batch size: 51, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:01:05,600 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:11,706 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:52,229 INFO [train.py:903] (1/4) Epoch 26, batch 550, loss[loss=0.1829, simple_loss=0.2665, pruned_loss=0.04964, over 16009.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2842, pruned_loss=0.06089, over 3594833.59 frames. ], batch size: 35, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:03,093 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.798e+02 6.471e+02 7.842e+02 1.459e+03, threshold=1.294e+03, percent-clipped=3.0 +2023-04-03 06:02:12,074 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 06:02:44,063 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171290.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:02:55,680 INFO [train.py:903] (1/4) Epoch 26, batch 600, loss[loss=0.2114, simple_loss=0.2929, pruned_loss=0.06492, over 19484.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.0606, over 3635273.34 frames. ], batch size: 64, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:58,477 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:31,076 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:36,404 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 06:03:57,571 INFO [train.py:903] (1/4) Epoch 26, batch 650, loss[loss=0.2056, simple_loss=0.2883, pruned_loss=0.06141, over 19799.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2828, pruned_loss=0.06048, over 3684938.91 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:04:09,344 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.689e+02 4.759e+02 5.860e+02 7.918e+02 1.260e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-03 06:04:15,260 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:05:01,713 INFO [train.py:903] (1/4) Epoch 26, batch 700, loss[loss=0.2012, simple_loss=0.2894, pruned_loss=0.05648, over 19710.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.06117, over 3701612.22 frames. ], batch size: 59, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:05:09,285 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171405.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:08,113 INFO [train.py:903] (1/4) Epoch 26, batch 750, loss[loss=0.1766, simple_loss=0.2581, pruned_loss=0.04751, over 19492.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.06189, over 3724352.22 frames. ], batch size: 49, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:06:11,881 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4649, 1.3051, 1.5487, 1.5433, 3.0429, 1.2253, 2.2472, 3.4240], + device='cuda:1'), covar=tensor([0.0505, 0.2916, 0.2944, 0.1854, 0.0689, 0.2384, 0.1345, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0374, 0.0394, 0.0354, 0.0379, 0.0354, 0.0392, 0.0413], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:06:12,969 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171454.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:18,672 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.123e+02 6.550e+02 8.686e+02 2.549e+03, threshold=1.310e+03, percent-clipped=11.0 +2023-04-03 06:07:12,528 INFO [train.py:903] (1/4) Epoch 26, batch 800, loss[loss=0.1862, simple_loss=0.2708, pruned_loss=0.05081, over 19692.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06137, over 3746973.95 frames. ], batch size: 53, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:07:25,360 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 06:07:32,343 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:14,615 INFO [train.py:903] (1/4) Epoch 26, batch 850, loss[loss=0.1891, simple_loss=0.2876, pruned_loss=0.04526, over 19532.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2856, pruned_loss=0.06148, over 3772401.39 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:08:24,827 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:25,743 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.689e+02 5.699e+02 7.261e+02 1.636e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-03 06:08:40,921 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:09:05,344 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 06:09:18,238 INFO [train.py:903] (1/4) Epoch 26, batch 900, loss[loss=0.1982, simple_loss=0.2811, pruned_loss=0.05764, over 19661.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2852, pruned_loss=0.06147, over 3787000.43 frames. ], batch size: 55, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:17,102 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-03 06:10:22,218 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 06:10:23,374 INFO [train.py:903] (1/4) Epoch 26, batch 950, loss[loss=0.1549, simple_loss=0.2344, pruned_loss=0.03771, over 19748.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2855, pruned_loss=0.06168, over 3798821.85 frames. ], batch size: 45, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:34,908 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.213e+02 6.211e+02 8.451e+02 1.981e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 06:10:37,583 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171661.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:10:45,899 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9000, 1.3480, 1.0904, 0.8676, 1.1850, 0.9975, 0.8284, 1.2662], + device='cuda:1'), covar=tensor([0.0641, 0.0755, 0.1043, 0.0907, 0.0561, 0.1273, 0.0639, 0.0441], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0318, 0.0337, 0.0270, 0.0250, 0.0343, 0.0295, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:10:51,862 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171673.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:00,267 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5022, 1.4825, 1.4564, 1.8363, 1.3523, 1.6530, 1.7021, 1.5772], + device='cuda:1'), covar=tensor([0.0830, 0.0918, 0.1001, 0.0658, 0.0840, 0.0765, 0.0796, 0.0700], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0226, 0.0214, 0.0189, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 06:11:09,442 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171686.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:27,632 INFO [train.py:903] (1/4) Epoch 26, batch 1000, loss[loss=0.1939, simple_loss=0.2621, pruned_loss=0.06284, over 19738.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.285, pruned_loss=0.06167, over 3811255.54 frames. ], batch size: 46, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:11:36,041 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171707.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:12:19,800 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 06:12:31,840 INFO [train.py:903] (1/4) Epoch 26, batch 1050, loss[loss=0.1879, simple_loss=0.2603, pruned_loss=0.05776, over 19770.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2845, pruned_loss=0.06148, over 3800248.97 frames. ], batch size: 48, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:12:42,477 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.465e+02 6.383e+02 7.807e+02 1.569e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-03 06:12:55,279 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1620, 1.4470, 1.9028, 1.5014, 2.7833, 3.8274, 3.5289, 4.0256], + device='cuda:1'), covar=tensor([0.1681, 0.3832, 0.3312, 0.2401, 0.0620, 0.0188, 0.0214, 0.0290], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0270, 0.0252, 0.0192, 0.0219, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 06:13:01,870 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 06:13:34,460 INFO [train.py:903] (1/4) Epoch 26, batch 1100, loss[loss=0.2348, simple_loss=0.3136, pruned_loss=0.07802, over 19346.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06217, over 3794197.90 frames. ], batch size: 66, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:05,449 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:09,038 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171825.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:40,985 INFO [train.py:903] (1/4) Epoch 26, batch 1150, loss[loss=0.1984, simple_loss=0.2891, pruned_loss=0.05382, over 18816.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.06157, over 3805610.85 frames. ], batch size: 74, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:41,470 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171850.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:54,274 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.298e+02 6.784e+02 8.276e+02 1.649e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-03 06:14:55,497 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:15:46,128 INFO [train.py:903] (1/4) Epoch 26, batch 1200, loss[loss=0.185, simple_loss=0.2586, pruned_loss=0.05573, over 18577.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2852, pruned_loss=0.06176, over 3813877.96 frames. ], batch size: 41, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:12,809 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 06:16:22,262 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:48,465 INFO [train.py:903] (1/4) Epoch 26, batch 1250, loss[loss=0.1951, simple_loss=0.2687, pruned_loss=0.06069, over 19483.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2841, pruned_loss=0.06149, over 3829539.64 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:53,688 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:54,744 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171955.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:58,918 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.182e+02 6.196e+02 7.754e+02 1.405e+03, threshold=1.239e+03, percent-clipped=1.0 +2023-04-03 06:17:21,574 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:17:34,210 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0060, 2.0871, 2.3158, 2.6571, 2.0541, 2.5775, 2.3432, 2.1386], + device='cuda:1'), covar=tensor([0.4414, 0.4219, 0.2085, 0.2634, 0.4493, 0.2384, 0.5083, 0.3637], + device='cuda:1'), in_proj_covar=tensor([0.0925, 0.1000, 0.0734, 0.0946, 0.0903, 0.0838, 0.0857, 0.0801], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 06:17:51,746 INFO [train.py:903] (1/4) Epoch 26, batch 1300, loss[loss=0.1766, simple_loss=0.2608, pruned_loss=0.04626, over 19848.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06137, over 3818406.22 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:24,709 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 06:18:56,754 INFO [train.py:903] (1/4) Epoch 26, batch 1350, loss[loss=0.2155, simple_loss=0.3017, pruned_loss=0.06461, over 19711.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06165, over 3811873.97 frames. ], batch size: 59, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:19:09,470 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.487e+02 6.725e+02 8.152e+02 1.378e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-03 06:19:25,619 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3339, 3.5761, 2.1326, 2.3201, 3.2255, 1.9861, 1.5882, 2.5434], + device='cuda:1'), covar=tensor([0.1296, 0.0612, 0.1104, 0.0860, 0.0527, 0.1209, 0.1079, 0.0654], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0317, 0.0336, 0.0270, 0.0249, 0.0341, 0.0292, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:19:34,030 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:01,388 INFO [train.py:903] (1/4) Epoch 26, batch 1400, loss[loss=0.199, simple_loss=0.2812, pruned_loss=0.05842, over 19679.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2841, pruned_loss=0.06118, over 3815891.66 frames. ], batch size: 53, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:20:07,094 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:47,864 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172136.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:21:05,098 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 06:21:06,113 INFO [train.py:903] (1/4) Epoch 26, batch 1450, loss[loss=0.1744, simple_loss=0.2552, pruned_loss=0.04684, over 19726.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06128, over 3814564.14 frames. ], batch size: 51, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:21:16,562 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 4.778e+02 6.066e+02 7.310e+02 2.231e+03, threshold=1.213e+03, percent-clipped=2.0 +2023-04-03 06:21:26,484 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-03 06:22:09,970 INFO [train.py:903] (1/4) Epoch 26, batch 1500, loss[loss=0.1674, simple_loss=0.2476, pruned_loss=0.04358, over 19822.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.06115, over 3822185.71 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:22:51,448 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172231.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:14,775 INFO [train.py:903] (1/4) Epoch 26, batch 1550, loss[loss=0.1758, simple_loss=0.2501, pruned_loss=0.05079, over 19766.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06129, over 3806966.20 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:23:23,379 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:26,537 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.787e+02 4.585e+02 5.725e+02 7.044e+02 1.122e+03, threshold=1.145e+03, percent-clipped=0.0 +2023-04-03 06:23:59,131 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2998, 1.1963, 1.3819, 1.4151, 2.8309, 1.2333, 2.3623, 3.3552], + device='cuda:1'), covar=tensor([0.0742, 0.3267, 0.3348, 0.2251, 0.0955, 0.2729, 0.1498, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0374, 0.0395, 0.0354, 0.0380, 0.0354, 0.0393, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:24:17,338 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:24:18,407 INFO [train.py:903] (1/4) Epoch 26, batch 1600, loss[loss=0.1785, simple_loss=0.2531, pruned_loss=0.0519, over 19126.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06112, over 3821482.20 frames. ], batch size: 42, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:24:30,670 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:24:45,016 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 06:24:49,249 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1686, 3.1751, 1.9070, 2.0631, 2.8333, 1.6572, 1.5413, 2.4036], + device='cuda:1'), covar=tensor([0.1339, 0.0757, 0.1123, 0.0911, 0.0626, 0.1360, 0.1048, 0.0671], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0320, 0.0340, 0.0272, 0.0251, 0.0345, 0.0294, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:25:22,891 INFO [train.py:903] (1/4) Epoch 26, batch 1650, loss[loss=0.2353, simple_loss=0.3164, pruned_loss=0.07706, over 19533.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06048, over 3835642.56 frames. ], batch size: 54, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:25:32,972 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 4.806e+02 6.116e+02 7.815e+02 1.931e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 06:26:24,989 INFO [train.py:903] (1/4) Epoch 26, batch 1700, loss[loss=0.1508, simple_loss=0.2292, pruned_loss=0.03619, over 19743.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.284, pruned_loss=0.06114, over 3822233.05 frames. ], batch size: 45, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:26:27,915 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:26:43,085 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172414.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:27:07,050 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 06:27:27,208 INFO [train.py:903] (1/4) Epoch 26, batch 1750, loss[loss=0.1703, simple_loss=0.2522, pruned_loss=0.04425, over 19378.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.0616, over 3826105.16 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:27:39,753 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.883e+02 5.717e+02 7.034e+02 1.807e+03, threshold=1.143e+03, percent-clipped=3.0 +2023-04-03 06:27:52,712 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0697, 1.9457, 2.0061, 2.5093, 2.1777, 2.1981, 2.1819, 2.1215], + device='cuda:1'), covar=tensor([0.0694, 0.0751, 0.0845, 0.0688, 0.0806, 0.0722, 0.0865, 0.0587], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0240, 0.0226, 0.0214, 0.0190, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 06:28:06,467 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:28:09,144 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2306, 1.4685, 2.0381, 1.4676, 3.1963, 4.6673, 4.5058, 5.0965], + device='cuda:1'), covar=tensor([0.1674, 0.3882, 0.3322, 0.2494, 0.0562, 0.0188, 0.0167, 0.0159], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0270, 0.0252, 0.0193, 0.0219, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 06:28:26,592 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.3195, 5.7249, 3.1647, 5.0927, 1.0845, 5.9135, 5.7776, 5.9090], + device='cuda:1'), covar=tensor([0.0332, 0.0829, 0.1821, 0.0684, 0.3881, 0.0463, 0.0696, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0518, 0.0420, 0.0507, 0.0355, 0.0406, 0.0447, 0.0441, 0.0472], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:28:32,027 INFO [train.py:903] (1/4) Epoch 26, batch 1800, loss[loss=0.2359, simple_loss=0.3115, pruned_loss=0.08018, over 17557.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06104, over 3822676.35 frames. ], batch size: 101, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:31,750 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 06:29:36,214 INFO [train.py:903] (1/4) Epoch 26, batch 1850, loss[loss=0.2254, simple_loss=0.3144, pruned_loss=0.06825, over 19731.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06127, over 3822332.02 frames. ], batch size: 63, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:46,976 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.880e+02 5.794e+02 7.257e+02 1.575e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 06:30:11,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 06:30:20,705 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3627, 3.1056, 2.2219, 2.8126, 0.8815, 3.0567, 2.9228, 2.9893], + device='cuda:1'), covar=tensor([0.1036, 0.1246, 0.2030, 0.1030, 0.3653, 0.0897, 0.1103, 0.1337], + device='cuda:1'), in_proj_covar=tensor([0.0521, 0.0423, 0.0511, 0.0357, 0.0407, 0.0450, 0.0443, 0.0475], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:30:34,172 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:30:39,841 INFO [train.py:903] (1/4) Epoch 26, batch 1900, loss[loss=0.2011, simple_loss=0.2859, pruned_loss=0.05809, over 19288.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2841, pruned_loss=0.06128, over 3824437.27 frames. ], batch size: 66, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:30:59,226 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 06:31:01,796 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:05,030 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 06:31:23,037 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0016, 5.0361, 5.8055, 5.8433, 1.9076, 5.4243, 4.5970, 5.4429], + device='cuda:1'), covar=tensor([0.1989, 0.0974, 0.0597, 0.0713, 0.6847, 0.0834, 0.0673, 0.1309], + device='cuda:1'), in_proj_covar=tensor([0.0806, 0.0774, 0.0978, 0.0859, 0.0852, 0.0745, 0.0584, 0.0906], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 06:31:30,028 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 06:31:36,140 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:44,209 INFO [train.py:903] (1/4) Epoch 26, batch 1950, loss[loss=0.1915, simple_loss=0.2844, pruned_loss=0.04926, over 19581.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2839, pruned_loss=0.06139, over 3819349.09 frames. ], batch size: 61, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:31:57,691 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.105e+02 6.247e+02 7.487e+02 1.872e+03, threshold=1.249e+03, percent-clipped=7.0 +2023-04-03 06:32:12,308 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172670.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:43,403 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172695.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:48,408 INFO [train.py:903] (1/4) Epoch 26, batch 2000, loss[loss=0.2472, simple_loss=0.3205, pruned_loss=0.08695, over 18042.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2835, pruned_loss=0.06116, over 3804691.89 frames. ], batch size: 83, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:14,447 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 06:33:39,031 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2444, 5.2871, 6.0643, 6.0845, 1.9866, 5.6879, 4.7750, 5.7438], + device='cuda:1'), covar=tensor([0.1819, 0.0758, 0.0591, 0.0621, 0.6426, 0.0818, 0.0656, 0.1155], + device='cuda:1'), in_proj_covar=tensor([0.0803, 0.0774, 0.0978, 0.0859, 0.0852, 0.0744, 0.0584, 0.0905], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 06:33:46,802 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 06:33:51,653 INFO [train.py:903] (1/4) Epoch 26, batch 2050, loss[loss=0.1759, simple_loss=0.2502, pruned_loss=0.05084, over 19289.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2838, pruned_loss=0.06121, over 3820338.63 frames. ], batch size: 44, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:55,269 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172752.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:34:03,944 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.217e+02 6.186e+02 8.512e+02 2.102e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 06:34:06,492 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 06:34:07,796 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 06:34:27,707 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 06:34:54,263 INFO [train.py:903] (1/4) Epoch 26, batch 2100, loss[loss=0.2078, simple_loss=0.298, pruned_loss=0.0588, over 19531.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06219, over 3826095.53 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:25,202 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 06:35:47,314 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 06:35:56,669 INFO [train.py:903] (1/4) Epoch 26, batch 2150, loss[loss=0.1774, simple_loss=0.2552, pruned_loss=0.04978, over 19470.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2853, pruned_loss=0.06234, over 3812734.06 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:58,269 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172851.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:02,860 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1064, 1.4436, 1.6120, 1.5289, 2.7291, 1.1796, 2.2326, 3.1252], + device='cuda:1'), covar=tensor([0.0569, 0.2665, 0.2666, 0.1758, 0.0744, 0.2227, 0.1203, 0.0290], + device='cuda:1'), in_proj_covar=tensor([0.0417, 0.0371, 0.0392, 0.0351, 0.0378, 0.0352, 0.0389, 0.0410], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:36:10,067 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.604e+02 4.749e+02 6.121e+02 8.086e+02 1.727e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 06:36:14,759 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:21,374 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-03 06:36:31,065 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:59,983 INFO [train.py:903] (1/4) Epoch 26, batch 2200, loss[loss=0.2054, simple_loss=0.2986, pruned_loss=0.05614, over 19620.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2847, pruned_loss=0.06198, over 3820592.16 frames. ], batch size: 50, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:38:04,010 INFO [train.py:903] (1/4) Epoch 26, batch 2250, loss[loss=0.1603, simple_loss=0.2413, pruned_loss=0.03965, over 19761.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2847, pruned_loss=0.06207, over 3812258.79 frames. ], batch size: 48, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:38:16,970 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172960.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:38:17,987 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.806e+02 5.924e+02 7.729e+02 1.368e+03, threshold=1.185e+03, percent-clipped=2.0 +2023-04-03 06:38:51,868 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:39:07,879 INFO [train.py:903] (1/4) Epoch 26, batch 2300, loss[loss=0.1823, simple_loss=0.2756, pruned_loss=0.04452, over 19657.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2848, pruned_loss=0.06181, over 3805842.17 frames. ], batch size: 58, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:39:13,393 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 06:39:19,744 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 06:40:11,049 INFO [train.py:903] (1/4) Epoch 26, batch 2350, loss[loss=0.1823, simple_loss=0.274, pruned_loss=0.04526, over 19546.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06148, over 3818338.38 frames. ], batch size: 56, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:40:25,951 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.175e+02 6.518e+02 7.971e+02 2.695e+03, threshold=1.304e+03, percent-clipped=8.0 +2023-04-03 06:40:43,706 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173075.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:40:54,471 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 06:41:08,278 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:10,573 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 06:41:14,043 INFO [train.py:903] (1/4) Epoch 26, batch 2400, loss[loss=0.2442, simple_loss=0.3208, pruned_loss=0.08382, over 19664.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2858, pruned_loss=0.06201, over 3819210.33 frames. ], batch size: 55, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:41:18,165 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:48,388 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5563, 1.4432, 2.0252, 1.6242, 3.1482, 4.7380, 4.5259, 5.1565], + device='cuda:1'), covar=tensor([0.1560, 0.3921, 0.3388, 0.2424, 0.0601, 0.0203, 0.0177, 0.0183], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0330, 0.0362, 0.0271, 0.0252, 0.0193, 0.0220, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 06:42:19,064 INFO [train.py:903] (1/4) Epoch 26, batch 2450, loss[loss=0.1968, simple_loss=0.2755, pruned_loss=0.05909, over 19598.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2856, pruned_loss=0.06202, over 3809984.91 frames. ], batch size: 52, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:42:32,917 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.972e+02 5.973e+02 7.732e+02 1.743e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 06:42:41,751 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173168.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:22,673 INFO [train.py:903] (1/4) Epoch 26, batch 2500, loss[loss=0.1674, simple_loss=0.2576, pruned_loss=0.03856, over 19542.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06258, over 3795773.00 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:43:24,454 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8906, 2.0018, 2.2342, 2.4632, 1.8873, 2.3669, 2.2243, 2.0537], + device='cuda:1'), covar=tensor([0.4123, 0.4053, 0.1975, 0.2353, 0.4141, 0.2252, 0.5002, 0.3445], + device='cuda:1'), in_proj_covar=tensor([0.0927, 0.1004, 0.0736, 0.0948, 0.0906, 0.0842, 0.0859, 0.0804], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 06:43:31,260 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:36,118 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173211.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:44:25,903 INFO [train.py:903] (1/4) Epoch 26, batch 2550, loss[loss=0.2055, simple_loss=0.288, pruned_loss=0.06145, over 19506.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.286, pruned_loss=0.06241, over 3798432.78 frames. ], batch size: 64, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:44:40,264 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3371, 2.0553, 1.6038, 1.3689, 1.8315, 1.2867, 1.3213, 1.8462], + device='cuda:1'), covar=tensor([0.1040, 0.0810, 0.1142, 0.0897, 0.0607, 0.1366, 0.0735, 0.0498], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0317, 0.0337, 0.0269, 0.0248, 0.0341, 0.0291, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:44:40,981 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.733e+02 4.982e+02 5.984e+02 8.594e+02 2.255e+03, threshold=1.197e+03, percent-clipped=6.0 +2023-04-03 06:45:23,058 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 06:45:30,060 INFO [train.py:903] (1/4) Epoch 26, batch 2600, loss[loss=0.2142, simple_loss=0.2998, pruned_loss=0.06434, over 19603.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06206, over 3814512.24 frames. ], batch size: 57, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:45:59,666 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173322.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,371 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,465 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4732, 1.4224, 1.4039, 1.7491, 1.3145, 1.6022, 1.6397, 1.5079], + device='cuda:1'), covar=tensor([0.0915, 0.0995, 0.1068, 0.0754, 0.0937, 0.0815, 0.0860, 0.0786], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0226, 0.0228, 0.0242, 0.0228, 0.0214, 0.0190, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 06:46:10,050 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:35,579 INFO [train.py:903] (1/4) Epoch 26, batch 2650, loss[loss=0.1921, simple_loss=0.2764, pruned_loss=0.05387, over 19757.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2863, pruned_loss=0.06246, over 3801478.68 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:46:41,847 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1956, 3.4154, 2.0661, 2.1263, 3.1475, 1.8532, 1.6332, 2.3623], + device='cuda:1'), covar=tensor([0.1354, 0.0689, 0.1128, 0.0895, 0.0547, 0.1241, 0.0991, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0316, 0.0336, 0.0269, 0.0248, 0.0340, 0.0290, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:46:43,004 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173356.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:45,487 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:49,589 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.033e+02 5.962e+02 7.363e+02 1.395e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 06:46:55,544 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 06:47:17,823 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:47:39,153 INFO [train.py:903] (1/4) Epoch 26, batch 2700, loss[loss=0.1942, simple_loss=0.2833, pruned_loss=0.05259, over 19125.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06249, over 3790292.28 frames. ], batch size: 69, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:00,265 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0378, 4.4564, 4.7824, 4.7931, 1.8876, 4.4868, 3.8553, 4.4789], + device='cuda:1'), covar=tensor([0.1712, 0.0869, 0.0582, 0.0698, 0.5989, 0.0887, 0.0689, 0.1146], + device='cuda:1'), in_proj_covar=tensor([0.0806, 0.0773, 0.0978, 0.0861, 0.0856, 0.0742, 0.0584, 0.0909], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 06:48:32,623 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3083, 2.3346, 2.5789, 3.0308, 2.3880, 2.9197, 2.5814, 2.3459], + device='cuda:1'), covar=tensor([0.4353, 0.4328, 0.1987, 0.2692, 0.4584, 0.2277, 0.4996, 0.3498], + device='cuda:1'), in_proj_covar=tensor([0.0928, 0.1004, 0.0736, 0.0949, 0.0908, 0.0841, 0.0859, 0.0805], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 06:48:39,711 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 06:48:41,213 INFO [train.py:903] (1/4) Epoch 26, batch 2750, loss[loss=0.214, simple_loss=0.2953, pruned_loss=0.06632, over 19544.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06198, over 3793475.14 frames. ], batch size: 56, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:54,807 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.847e+02 4.981e+02 6.163e+02 7.639e+02 1.916e+03, threshold=1.233e+03, percent-clipped=5.0 +2023-04-03 06:49:03,530 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:34,160 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:44,190 INFO [train.py:903] (1/4) Epoch 26, batch 2800, loss[loss=0.1615, simple_loss=0.2366, pruned_loss=0.04316, over 19759.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2847, pruned_loss=0.06169, over 3809661.83 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:49:53,017 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5004, 1.5445, 1.7292, 1.7276, 2.3913, 2.2222, 2.3493, 1.1240], + device='cuda:1'), covar=tensor([0.2511, 0.4455, 0.2873, 0.1983, 0.1447, 0.2166, 0.1422, 0.4532], + device='cuda:1'), in_proj_covar=tensor([0.0545, 0.0661, 0.0739, 0.0500, 0.0629, 0.0539, 0.0666, 0.0566], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 06:49:56,421 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173509.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 06:49:56,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.71 vs. limit=5.0 +2023-04-03 06:50:00,274 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:50:48,001 INFO [train.py:903] (1/4) Epoch 26, batch 2850, loss[loss=0.2535, simple_loss=0.3317, pruned_loss=0.08762, over 18220.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06204, over 3793112.47 frames. ], batch size: 83, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:01,767 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.954e+02 6.202e+02 8.183e+02 1.932e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 06:51:23,144 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:51:51,093 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 06:51:52,153 INFO [train.py:903] (1/4) Epoch 26, batch 2900, loss[loss=0.2167, simple_loss=0.2947, pruned_loss=0.06937, over 19677.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.0613, over 3809913.46 frames. ], batch size: 60, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:57,234 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173603.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:08,009 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9149, 1.9718, 2.2554, 2.5498, 1.9277, 2.4830, 2.2412, 2.1013], + device='cuda:1'), covar=tensor([0.4426, 0.4037, 0.2055, 0.2581, 0.4240, 0.2258, 0.5408, 0.3595], + device='cuda:1'), in_proj_covar=tensor([0.0928, 0.1003, 0.0736, 0.0946, 0.0907, 0.0840, 0.0859, 0.0805], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 06:52:27,669 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:33,491 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:56,769 INFO [train.py:903] (1/4) Epoch 26, batch 2950, loss[loss=0.1853, simple_loss=0.2643, pruned_loss=0.05313, over 19589.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06103, over 3815379.90 frames. ], batch size: 52, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:10,747 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.287e+02 6.719e+02 8.706e+02 2.181e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-03 06:53:23,943 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173671.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:53:59,611 INFO [train.py:903] (1/4) Epoch 26, batch 3000, loss[loss=0.2225, simple_loss=0.3064, pruned_loss=0.06928, over 18722.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2833, pruned_loss=0.06063, over 3811088.01 frames. ], batch size: 74, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:59,611 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 06:54:12,258 INFO [train.py:937] (1/4) Epoch 26, validation: loss=0.1681, simple_loss=0.2675, pruned_loss=0.03435, over 944034.00 frames. +2023-04-03 06:54:12,259 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 06:54:17,249 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 06:55:16,493 INFO [train.py:903] (1/4) Epoch 26, batch 3050, loss[loss=0.1727, simple_loss=0.2532, pruned_loss=0.04608, over 19408.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06081, over 3801707.48 frames. ], batch size: 48, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:55:25,128 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8307, 4.4159, 2.5823, 3.8659, 1.2882, 4.4377, 4.2594, 4.3184], + device='cuda:1'), covar=tensor([0.0543, 0.0882, 0.2120, 0.0854, 0.3523, 0.0649, 0.0896, 0.1216], + device='cuda:1'), in_proj_covar=tensor([0.0522, 0.0424, 0.0510, 0.0357, 0.0408, 0.0450, 0.0447, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 06:55:30,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 4.825e+02 5.950e+02 7.456e+02 1.374e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 06:56:02,590 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173786.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:56:20,031 INFO [train.py:903] (1/4) Epoch 26, batch 3100, loss[loss=0.1893, simple_loss=0.2802, pruned_loss=0.04924, over 19661.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2839, pruned_loss=0.06073, over 3813755.83 frames. ], batch size: 58, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:57:23,203 INFO [train.py:903] (1/4) Epoch 26, batch 3150, loss[loss=0.1811, simple_loss=0.2637, pruned_loss=0.04925, over 19723.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2824, pruned_loss=0.06044, over 3824624.44 frames. ], batch size: 51, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:57:26,872 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173853.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:57:37,134 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.081e+02 6.233e+02 7.406e+02 2.417e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 06:57:50,105 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 06:58:05,127 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173883.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:58:25,317 INFO [train.py:903] (1/4) Epoch 26, batch 3200, loss[loss=0.2128, simple_loss=0.2866, pruned_loss=0.06954, over 19724.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2832, pruned_loss=0.06119, over 3815302.35 frames. ], batch size: 51, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:58:35,898 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173908.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:59:25,935 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8243, 4.2302, 4.4923, 4.5212, 1.7265, 4.2598, 3.6566, 4.2097], + device='cuda:1'), covar=tensor([0.1559, 0.0926, 0.0599, 0.0687, 0.6012, 0.1042, 0.0737, 0.1171], + device='cuda:1'), in_proj_covar=tensor([0.0804, 0.0771, 0.0974, 0.0856, 0.0851, 0.0742, 0.0579, 0.0902], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 06:59:27,976 INFO [train.py:903] (1/4) Epoch 26, batch 3250, loss[loss=0.1936, simple_loss=0.2776, pruned_loss=0.05482, over 19677.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2841, pruned_loss=0.06176, over 3830558.78 frames. ], batch size: 55, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:59:42,854 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.716e+02 4.798e+02 6.185e+02 8.155e+02 2.789e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-03 06:59:52,489 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173968.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:59:55,190 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-03 07:00:01,778 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:00:31,320 INFO [train.py:903] (1/4) Epoch 26, batch 3300, loss[loss=0.2172, simple_loss=0.2796, pruned_loss=0.07743, over 19034.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2847, pruned_loss=0.06177, over 3821958.96 frames. ], batch size: 42, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 07:00:35,708 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 07:01:08,019 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-03 07:01:09,845 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8591, 1.9286, 1.4629, 1.8722, 1.9376, 1.5318, 1.5284, 1.7468], + device='cuda:1'), covar=tensor([0.1240, 0.1662, 0.1956, 0.1202, 0.1403, 0.0983, 0.1962, 0.1039], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0358, 0.0316, 0.0256, 0.0306, 0.0255, 0.0319, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:01:14,201 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174033.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:17,513 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:25,974 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:35,837 INFO [train.py:903] (1/4) Epoch 26, batch 3350, loss[loss=0.2309, simple_loss=0.3085, pruned_loss=0.07671, over 19120.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06162, over 3828431.68 frames. ], batch size: 69, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:01:39,779 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0899, 1.0334, 1.4482, 1.2933, 2.5041, 1.1655, 2.3061, 2.8823], + device='cuda:1'), covar=tensor([0.0758, 0.3898, 0.3308, 0.2272, 0.1175, 0.2665, 0.1182, 0.0487], + device='cuda:1'), in_proj_covar=tensor([0.0419, 0.0374, 0.0393, 0.0352, 0.0378, 0.0354, 0.0392, 0.0412], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:01:49,564 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.999e+02 5.843e+02 7.881e+02 1.777e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 07:01:56,840 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:01,253 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2209, 2.0636, 2.1348, 2.9786, 1.9098, 2.4341, 2.3411, 2.2842], + device='cuda:1'), covar=tensor([0.0792, 0.0865, 0.0884, 0.0727, 0.0866, 0.0704, 0.0880, 0.0632], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0224, 0.0227, 0.0241, 0.0226, 0.0213, 0.0189, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 07:02:25,285 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 07:02:28,229 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:39,919 INFO [train.py:903] (1/4) Epoch 26, batch 3400, loss[loss=0.2127, simple_loss=0.297, pruned_loss=0.06418, over 19543.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.06092, over 3824857.66 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:02:49,781 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9300, 1.7954, 1.5978, 1.9594, 1.6764, 1.6914, 1.5336, 1.8208], + device='cuda:1'), covar=tensor([0.1052, 0.1352, 0.1499, 0.0956, 0.1334, 0.0585, 0.1559, 0.0826], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0357, 0.0316, 0.0255, 0.0305, 0.0255, 0.0318, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:03:41,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 07:03:42,256 INFO [train.py:903] (1/4) Epoch 26, batch 3450, loss[loss=0.2519, simple_loss=0.3213, pruned_loss=0.09122, over 18188.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2843, pruned_loss=0.06135, over 3827180.70 frames. ], batch size: 83, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:03:53,283 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:03:57,805 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.666e+02 5.900e+02 7.449e+02 1.550e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-03 07:04:47,402 INFO [train.py:903] (1/4) Epoch 26, batch 3500, loss[loss=0.2002, simple_loss=0.2787, pruned_loss=0.06083, over 19532.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2838, pruned_loss=0.06105, over 3836604.47 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:05:18,028 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174224.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 07:05:49,660 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174249.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 07:05:50,379 INFO [train.py:903] (1/4) Epoch 26, batch 3550, loss[loss=0.1974, simple_loss=0.2774, pruned_loss=0.05875, over 19861.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.0617, over 3828996.56 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:03,174 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.676e+02 6.063e+02 7.972e+02 1.969e+03, threshold=1.213e+03, percent-clipped=7.0 +2023-04-03 07:06:53,384 INFO [train.py:903] (1/4) Epoch 26, batch 3600, loss[loss=0.1607, simple_loss=0.2441, pruned_loss=0.03865, over 19636.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2846, pruned_loss=0.06133, over 3831076.86 frames. ], batch size: 50, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:54,849 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:05,346 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:46,744 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 07:07:54,107 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:57,125 INFO [train.py:903] (1/4) Epoch 26, batch 3650, loss[loss=0.2927, simple_loss=0.3458, pruned_loss=0.1198, over 13091.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2837, pruned_loss=0.06112, over 3826225.18 frames. ], batch size: 136, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:08:12,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.064e+02 6.896e+02 8.623e+02 2.807e+03, threshold=1.379e+03, percent-clipped=9.0 +2023-04-03 07:08:26,840 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:32,710 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:35,144 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:09:02,314 INFO [train.py:903] (1/4) Epoch 26, batch 3700, loss[loss=0.2098, simple_loss=0.2947, pruned_loss=0.0625, over 19580.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06079, over 3837715.84 frames. ], batch size: 61, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:09:10,627 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:10:07,623 INFO [train.py:903] (1/4) Epoch 26, batch 3750, loss[loss=0.222, simple_loss=0.3002, pruned_loss=0.07197, over 19538.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.06072, over 3828404.21 frames. ], batch size: 64, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:10:20,549 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.722e+02 4.995e+02 5.757e+02 7.069e+02 1.518e+03, threshold=1.151e+03, percent-clipped=1.0 +2023-04-03 07:11:01,649 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:04,044 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:10,959 INFO [train.py:903] (1/4) Epoch 26, batch 3800, loss[loss=0.1652, simple_loss=0.2457, pruned_loss=0.04231, over 19844.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2837, pruned_loss=0.06057, over 3828466.02 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:11:12,311 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:42,936 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 07:12:14,058 INFO [train.py:903] (1/4) Epoch 26, batch 3850, loss[loss=0.2594, simple_loss=0.3421, pruned_loss=0.0883, over 19528.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06131, over 3827210.03 frames. ], batch size: 54, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:12:27,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 5.118e+02 6.382e+02 8.415e+02 1.605e+03, threshold=1.276e+03, percent-clipped=5.0 +2023-04-03 07:13:15,072 INFO [train.py:903] (1/4) Epoch 26, batch 3900, loss[loss=0.1916, simple_loss=0.2729, pruned_loss=0.05516, over 19869.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.285, pruned_loss=0.06194, over 3818372.01 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:13:29,239 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9085, 5.0907, 5.7353, 5.7178, 2.1669, 5.4426, 4.6697, 5.4133], + device='cuda:1'), covar=tensor([0.1700, 0.0815, 0.0529, 0.0612, 0.6262, 0.0893, 0.0600, 0.1070], + device='cuda:1'), in_proj_covar=tensor([0.0813, 0.0775, 0.0978, 0.0866, 0.0856, 0.0745, 0.0584, 0.0911], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 07:13:36,311 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:13,851 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:20,760 INFO [train.py:903] (1/4) Epoch 26, batch 3950, loss[loss=0.1974, simple_loss=0.2903, pruned_loss=0.05228, over 19315.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06142, over 3825606.33 frames. ], batch size: 66, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:14:24,354 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 07:14:24,466 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:34,032 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.927e+02 6.085e+02 7.650e+02 1.385e+03, threshold=1.217e+03, percent-clipped=3.0 +2023-04-03 07:15:24,383 INFO [train.py:903] (1/4) Epoch 26, batch 4000, loss[loss=0.1759, simple_loss=0.2573, pruned_loss=0.04726, over 19789.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2843, pruned_loss=0.06132, over 3822892.43 frames. ], batch size: 48, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:15:54,764 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:13,009 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 07:16:16,808 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4393, 1.5352, 1.8312, 1.7201, 2.6844, 2.3390, 2.8341, 1.3976], + device='cuda:1'), covar=tensor([0.2519, 0.4293, 0.2682, 0.1905, 0.1486, 0.2076, 0.1425, 0.4346], + device='cuda:1'), in_proj_covar=tensor([0.0549, 0.0666, 0.0742, 0.0502, 0.0629, 0.0542, 0.0666, 0.0567], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:16:23,821 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4695, 1.5098, 1.8504, 1.7073, 2.6979, 2.3934, 2.8348, 1.2232], + device='cuda:1'), covar=tensor([0.2479, 0.4452, 0.2773, 0.1974, 0.1586, 0.2047, 0.1486, 0.4716], + device='cuda:1'), in_proj_covar=tensor([0.0548, 0.0665, 0.0741, 0.0502, 0.0629, 0.0542, 0.0665, 0.0567], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:16:24,949 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:27,000 INFO [train.py:903] (1/4) Epoch 26, batch 4050, loss[loss=0.1521, simple_loss=0.2345, pruned_loss=0.03482, over 14636.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06104, over 3816085.43 frames. ], batch size: 32, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:16:27,156 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:27,412 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:40,322 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:41,099 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 4.947e+02 6.251e+02 7.600e+02 1.203e+03, threshold=1.250e+03, percent-clipped=0.0 +2023-04-03 07:16:52,104 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:59,292 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174773.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:01,624 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174775.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:32,067 INFO [train.py:903] (1/4) Epoch 26, batch 4100, loss[loss=0.2238, simple_loss=0.3064, pruned_loss=0.07063, over 19664.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2835, pruned_loss=0.06063, over 3805262.32 frames. ], batch size: 60, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:18:07,612 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 07:18:22,222 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3434, 3.0709, 2.4431, 2.7632, 0.8089, 3.0728, 2.9366, 2.9676], + device='cuda:1'), covar=tensor([0.1121, 0.1418, 0.1911, 0.1098, 0.4081, 0.1030, 0.1235, 0.1462], + device='cuda:1'), in_proj_covar=tensor([0.0521, 0.0421, 0.0506, 0.0354, 0.0409, 0.0447, 0.0444, 0.0471], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:18:38,182 INFO [train.py:903] (1/4) Epoch 26, batch 4150, loss[loss=0.1907, simple_loss=0.2711, pruned_loss=0.0551, over 19537.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06137, over 3804368.77 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:18:53,316 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.046e+02 6.484e+02 8.542e+02 2.236e+03, threshold=1.297e+03, percent-clipped=8.0 +2023-04-03 07:18:57,234 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:05,601 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:11,135 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3242, 3.0555, 2.3360, 2.4441, 2.3145, 2.7730, 1.1011, 2.2318], + device='cuda:1'), covar=tensor([0.0676, 0.0622, 0.0753, 0.1141, 0.1059, 0.1132, 0.1520, 0.1080], + device='cuda:1'), in_proj_covar=tensor([0.0365, 0.0362, 0.0366, 0.0389, 0.0469, 0.0396, 0.0345, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:19:38,547 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:41,589 INFO [train.py:903] (1/4) Epoch 26, batch 4200, loss[loss=0.1847, simple_loss=0.259, pruned_loss=0.05521, over 19358.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.282, pruned_loss=0.0598, over 3813114.18 frames. ], batch size: 47, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:19:42,856 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 07:20:14,720 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0887, 2.8747, 1.8219, 1.8983, 2.6368, 1.7281, 1.6031, 2.2732], + device='cuda:1'), covar=tensor([0.1234, 0.0824, 0.1113, 0.0875, 0.0591, 0.1243, 0.0913, 0.0665], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0320, 0.0339, 0.0272, 0.0250, 0.0345, 0.0293, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:20:36,869 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6909, 1.6305, 1.6116, 2.2045, 1.5772, 2.1046, 2.0342, 1.7755], + device='cuda:1'), covar=tensor([0.0841, 0.0895, 0.0979, 0.0692, 0.0935, 0.0684, 0.0809, 0.0662], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 07:20:44,729 INFO [train.py:903] (1/4) Epoch 26, batch 4250, loss[loss=0.2349, simple_loss=0.3229, pruned_loss=0.0735, over 19741.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2837, pruned_loss=0.06028, over 3809316.61 frames. ], batch size: 63, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:20:55,195 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 07:21:01,859 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.804e+02 5.687e+02 7.207e+02 1.586e+03, threshold=1.137e+03, percent-clipped=5.0 +2023-04-03 07:21:07,828 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 07:21:16,333 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 07:21:30,486 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6935, 1.5718, 1.6844, 2.3080, 1.6183, 1.9924, 2.0398, 1.7926], + device='cuda:1'), covar=tensor([0.0874, 0.0980, 0.1007, 0.0723, 0.0899, 0.0801, 0.0876, 0.0704], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0213, 0.0188, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 07:21:49,465 INFO [train.py:903] (1/4) Epoch 26, batch 4300, loss[loss=0.1665, simple_loss=0.2408, pruned_loss=0.04616, over 19742.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06033, over 3810570.48 frames. ], batch size: 46, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:22:11,471 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:20,965 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:38,878 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 07:22:41,527 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:52,842 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:53,607 INFO [train.py:903] (1/4) Epoch 26, batch 4350, loss[loss=0.2015, simple_loss=0.2768, pruned_loss=0.06308, over 19763.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2831, pruned_loss=0.06073, over 3811737.95 frames. ], batch size: 54, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:23:08,536 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 4.980e+02 6.586e+02 7.974e+02 2.340e+03, threshold=1.317e+03, percent-clipped=8.0 +2023-04-03 07:23:14,390 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:23:33,061 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2765, 2.3156, 2.0052, 2.3837, 2.1252, 2.1267, 1.9551, 2.3653], + device='cuda:1'), covar=tensor([0.1100, 0.1533, 0.1564, 0.1110, 0.1572, 0.0543, 0.1520, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0355, 0.0314, 0.0254, 0.0304, 0.0255, 0.0316, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:23:57,001 INFO [train.py:903] (1/4) Epoch 26, batch 4400, loss[loss=0.1981, simple_loss=0.2813, pruned_loss=0.05743, over 18829.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2839, pruned_loss=0.0612, over 3813711.20 frames. ], batch size: 74, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:24:17,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 07:24:23,101 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:27,366 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 07:24:41,394 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9943, 3.6712, 2.5920, 3.2829, 0.8709, 3.6200, 3.4876, 3.5191], + device='cuda:1'), covar=tensor([0.0862, 0.1052, 0.1822, 0.0881, 0.3981, 0.0806, 0.1055, 0.1291], + device='cuda:1'), in_proj_covar=tensor([0.0524, 0.0423, 0.0509, 0.0356, 0.0409, 0.0449, 0.0447, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:24:55,270 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:59,532 INFO [train.py:903] (1/4) Epoch 26, batch 4450, loss[loss=0.2013, simple_loss=0.2932, pruned_loss=0.05467, over 19682.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.06122, over 3807471.44 frames. ], batch size: 60, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:25:08,169 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:25:14,967 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 4.830e+02 6.413e+02 8.619e+02 2.132e+03, threshold=1.283e+03, percent-clipped=8.0 +2023-04-03 07:25:41,735 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175182.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:26:02,757 INFO [train.py:903] (1/4) Epoch 26, batch 4500, loss[loss=0.2528, simple_loss=0.3187, pruned_loss=0.09348, over 19290.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06135, over 3809193.31 frames. ], batch size: 66, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:26:06,570 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1857, 1.8382, 1.4819, 1.2912, 1.6234, 1.2757, 1.1993, 1.6372], + device='cuda:1'), covar=tensor([0.0829, 0.0830, 0.1105, 0.0865, 0.0588, 0.1319, 0.0648, 0.0455], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0319, 0.0338, 0.0271, 0.0249, 0.0344, 0.0292, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:26:36,001 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:26:38,420 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4086, 1.3242, 1.3654, 1.8209, 1.3435, 1.5591, 1.7349, 1.4587], + device='cuda:1'), covar=tensor([0.0904, 0.0986, 0.1072, 0.0653, 0.0905, 0.0818, 0.0816, 0.0769], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0225, 0.0229, 0.0242, 0.0227, 0.0214, 0.0190, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 07:27:08,199 INFO [train.py:903] (1/4) Epoch 26, batch 4550, loss[loss=0.207, simple_loss=0.3046, pruned_loss=0.05468, over 19611.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2851, pruned_loss=0.06148, over 3825496.64 frames. ], batch size: 57, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:27:15,027 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 07:27:23,519 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.834e+02 6.204e+02 7.660e+02 2.009e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-03 07:27:28,795 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5067, 1.4983, 1.7882, 1.7579, 2.6468, 2.2693, 2.8187, 1.1402], + device='cuda:1'), covar=tensor([0.2504, 0.4569, 0.2834, 0.2009, 0.1469, 0.2261, 0.1418, 0.4736], + device='cuda:1'), in_proj_covar=tensor([0.0545, 0.0661, 0.0738, 0.0500, 0.0626, 0.0540, 0.0662, 0.0564], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:27:40,258 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 07:28:13,307 INFO [train.py:903] (1/4) Epoch 26, batch 4600, loss[loss=0.1807, simple_loss=0.2758, pruned_loss=0.04284, over 19616.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2842, pruned_loss=0.06069, over 3834014.52 frames. ], batch size: 57, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:16,039 INFO [train.py:903] (1/4) Epoch 26, batch 4650, loss[loss=0.1865, simple_loss=0.2734, pruned_loss=0.04976, over 19691.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06013, over 3837562.53 frames. ], batch size: 59, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:29,903 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 4.879e+02 5.869e+02 7.462e+02 1.692e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 07:29:31,145 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 07:29:44,489 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 07:30:00,525 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0389, 1.2710, 1.4832, 1.4094, 2.5732, 1.1941, 2.2243, 3.0258], + device='cuda:1'), covar=tensor([0.0739, 0.3206, 0.3094, 0.2062, 0.1017, 0.2575, 0.1382, 0.0406], + device='cuda:1'), in_proj_covar=tensor([0.0418, 0.0372, 0.0392, 0.0350, 0.0380, 0.0355, 0.0391, 0.0412], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:30:03,972 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:17,583 INFO [train.py:903] (1/4) Epoch 26, batch 4700, loss[loss=0.1969, simple_loss=0.2732, pruned_loss=0.06029, over 19373.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2844, pruned_loss=0.0611, over 3816551.02 frames. ], batch size: 47, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:30:35,694 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:36,042 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-03 07:30:40,246 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2036, 1.2921, 1.2559, 1.0524, 1.0771, 1.1241, 0.0920, 0.3719], + device='cuda:1'), covar=tensor([0.0714, 0.0710, 0.0477, 0.0651, 0.1388, 0.0646, 0.1417, 0.1211], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0362, 0.0367, 0.0389, 0.0468, 0.0395, 0.0344, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:30:41,001 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 07:30:43,610 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2085, 1.9914, 2.1241, 2.5926, 2.1776, 2.3442, 2.3709, 2.1569], + device='cuda:1'), covar=tensor([0.0612, 0.0719, 0.0723, 0.0584, 0.0807, 0.0579, 0.0728, 0.0558], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0240, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 07:31:07,022 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:31:18,763 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8148, 1.5166, 1.4291, 1.7686, 1.4471, 1.5603, 1.4417, 1.6800], + device='cuda:1'), covar=tensor([0.1131, 0.1362, 0.1681, 0.1100, 0.1383, 0.0630, 0.1614, 0.0868], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0356, 0.0315, 0.0255, 0.0303, 0.0255, 0.0317, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:31:21,522 INFO [train.py:903] (1/4) Epoch 26, batch 4750, loss[loss=0.2131, simple_loss=0.2944, pruned_loss=0.0659, over 19685.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06116, over 3824308.04 frames. ], batch size: 60, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:31:37,186 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.945e+02 6.030e+02 7.655e+02 2.128e+03, threshold=1.206e+03, percent-clipped=6.0 +2023-04-03 07:31:38,753 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175463.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:22,893 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:23,789 INFO [train.py:903] (1/4) Epoch 26, batch 4800, loss[loss=0.1919, simple_loss=0.273, pruned_loss=0.05534, over 19769.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06119, over 3818026.98 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:32:25,139 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:29,427 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-03 07:32:43,064 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0369, 1.2598, 1.6466, 0.9261, 2.2802, 3.0470, 2.7794, 3.2556], + device='cuda:1'), covar=tensor([0.1693, 0.4010, 0.3523, 0.2795, 0.0666, 0.0223, 0.0276, 0.0334], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0329, 0.0363, 0.0270, 0.0253, 0.0195, 0.0219, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 07:33:28,511 INFO [train.py:903] (1/4) Epoch 26, batch 4850, loss[loss=0.1785, simple_loss=0.2633, pruned_loss=0.04687, over 19836.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2849, pruned_loss=0.06124, over 3834807.96 frames. ], batch size: 52, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:33:42,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.553e+02 5.462e+02 6.461e+02 1.537e+03, threshold=1.092e+03, percent-clipped=2.0 +2023-04-03 07:33:49,696 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 07:33:51,022 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:34:12,009 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 07:34:17,974 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 07:34:17,999 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 07:34:19,635 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4086, 1.4601, 1.7670, 1.6544, 2.5122, 2.0731, 2.6330, 1.0714], + device='cuda:1'), covar=tensor([0.2601, 0.4485, 0.2792, 0.2026, 0.1591, 0.2391, 0.1454, 0.4831], + device='cuda:1'), in_proj_covar=tensor([0.0547, 0.0662, 0.0741, 0.0503, 0.0629, 0.0542, 0.0664, 0.0565], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:34:27,164 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 07:34:30,597 INFO [train.py:903] (1/4) Epoch 26, batch 4900, loss[loss=0.1744, simple_loss=0.2586, pruned_loss=0.04512, over 19753.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.285, pruned_loss=0.06143, over 3823492.74 frames. ], batch size: 51, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:34:46,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 07:34:50,728 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:35:31,952 INFO [train.py:903] (1/4) Epoch 26, batch 4950, loss[loss=0.1994, simple_loss=0.2751, pruned_loss=0.06184, over 19484.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06129, over 3821513.17 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:35:49,946 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.878e+02 6.108e+02 7.489e+02 1.803e+03, threshold=1.222e+03, percent-clipped=10.0 +2023-04-03 07:35:49,998 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 07:36:13,145 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 07:36:15,582 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175684.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:36:36,317 INFO [train.py:903] (1/4) Epoch 26, batch 5000, loss[loss=0.2491, simple_loss=0.3175, pruned_loss=0.09032, over 12834.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06099, over 3818104.99 frames. ], batch size: 136, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:36:46,117 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 07:36:56,359 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 07:37:10,893 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175728.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:16,379 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175732.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:33,271 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 07:37:39,577 INFO [train.py:903] (1/4) Epoch 26, batch 5050, loss[loss=0.1818, simple_loss=0.2633, pruned_loss=0.05015, over 19770.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06073, over 3818437.15 frames. ], batch size: 47, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:37:46,797 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:53,679 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.916e+02 5.717e+02 6.994e+02 1.273e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-04-03 07:38:14,300 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 07:38:37,156 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 07:38:42,432 INFO [train.py:903] (1/4) Epoch 26, batch 5100, loss[loss=0.2093, simple_loss=0.2873, pruned_loss=0.06566, over 19407.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06158, over 3813796.65 frames. ], batch size: 48, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:38:44,298 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9207, 2.0468, 2.3070, 2.5893, 1.9920, 2.5149, 2.2714, 2.1010], + device='cuda:1'), covar=tensor([0.4317, 0.4113, 0.2002, 0.2480, 0.4318, 0.2278, 0.5034, 0.3558], + device='cuda:1'), in_proj_covar=tensor([0.0928, 0.1004, 0.0737, 0.0949, 0.0907, 0.0843, 0.0857, 0.0803], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 07:38:49,478 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 07:38:49,795 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1031, 2.7112, 2.5043, 3.0020, 2.6939, 2.4971, 2.3285, 2.9718], + device='cuda:1'), covar=tensor([0.0799, 0.1422, 0.1409, 0.0974, 0.1307, 0.0489, 0.1377, 0.0597], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0360, 0.0318, 0.0257, 0.0307, 0.0259, 0.0321, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:38:52,913 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 07:38:57,493 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 07:39:37,110 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:40,217 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-03 07:39:41,975 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175847.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:45,159 INFO [train.py:903] (1/4) Epoch 26, batch 5150, loss[loss=0.1661, simple_loss=0.2527, pruned_loss=0.03974, over 19767.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2857, pruned_loss=0.06196, over 3804878.15 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:39:55,549 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 07:39:58,800 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-03 07:40:01,362 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.003e+02 6.332e+02 8.398e+02 1.509e+03, threshold=1.266e+03, percent-clipped=8.0 +2023-04-03 07:40:14,018 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:15,270 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:30,109 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 07:40:47,068 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:49,313 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:50,207 INFO [train.py:903] (1/4) Epoch 26, batch 5200, loss[loss=0.1836, simple_loss=0.2762, pruned_loss=0.04544, over 19290.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2861, pruned_loss=0.06207, over 3799210.57 frames. ], batch size: 66, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:41:02,260 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 07:41:12,161 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0813, 2.0037, 1.9175, 1.8088, 1.5618, 1.7290, 0.6602, 1.0897], + device='cuda:1'), covar=tensor([0.0632, 0.0668, 0.0486, 0.0768, 0.1305, 0.0975, 0.1413, 0.1125], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0361, 0.0366, 0.0389, 0.0469, 0.0396, 0.0344, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 07:41:41,498 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:41:45,595 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 07:41:53,683 INFO [train.py:903] (1/4) Epoch 26, batch 5250, loss[loss=0.2399, simple_loss=0.314, pruned_loss=0.0829, over 18862.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2868, pruned_loss=0.06234, over 3805653.12 frames. ], batch size: 74, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:42:03,331 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:07,497 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.675e+02 5.849e+02 7.641e+02 1.436e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 07:42:08,950 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:11,312 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175965.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:54,948 INFO [train.py:903] (1/4) Epoch 26, batch 5300, loss[loss=0.2313, simple_loss=0.3146, pruned_loss=0.07399, over 19518.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2865, pruned_loss=0.06234, over 3814866.40 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:43:08,900 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 07:43:57,533 INFO [train.py:903] (1/4) Epoch 26, batch 5350, loss[loss=0.2761, simple_loss=0.34, pruned_loss=0.1061, over 12762.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2864, pruned_loss=0.06268, over 3806218.20 frames. ], batch size: 136, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:44:14,891 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.809e+02 5.990e+02 7.353e+02 1.688e+03, threshold=1.198e+03, percent-clipped=9.0 +2023-04-03 07:44:27,815 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176072.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:44:30,135 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 07:45:03,470 INFO [train.py:903] (1/4) Epoch 26, batch 5400, loss[loss=0.2102, simple_loss=0.2942, pruned_loss=0.06308, over 19594.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2855, pruned_loss=0.062, over 3804257.35 frames. ], batch size: 52, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:45:07,733 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:36,747 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:37,877 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:08,011 INFO [train.py:903] (1/4) Epoch 26, batch 5450, loss[loss=0.2085, simple_loss=0.2924, pruned_loss=0.06227, over 19672.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06173, over 3820350.70 frames. ], batch size: 53, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:46:10,684 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:23,252 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.815e+02 4.882e+02 5.855e+02 6.912e+02 1.680e+03, threshold=1.171e+03, percent-clipped=1.0 +2023-04-03 07:46:55,112 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:11,318 INFO [train.py:903] (1/4) Epoch 26, batch 5500, loss[loss=0.2045, simple_loss=0.2784, pruned_loss=0.06527, over 19713.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2857, pruned_loss=0.06208, over 3813439.09 frames. ], batch size: 51, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:47:28,795 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:30,704 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 07:47:48,449 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1297, 1.9093, 1.7932, 2.0610, 1.8230, 1.8323, 1.7081, 2.0457], + device='cuda:1'), covar=tensor([0.1086, 0.1484, 0.1555, 0.1128, 0.1416, 0.0582, 0.1591, 0.0750], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0355, 0.0315, 0.0255, 0.0304, 0.0255, 0.0317, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 07:48:02,309 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:06,792 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:14,672 INFO [train.py:903] (1/4) Epoch 26, batch 5550, loss[loss=0.2115, simple_loss=0.2883, pruned_loss=0.06733, over 19772.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2862, pruned_loss=0.06256, over 3807155.59 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:48:17,143 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 07:48:29,063 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:32,237 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.802e+02 5.930e+02 7.241e+02 1.738e+03, threshold=1.186e+03, percent-clipped=4.0 +2023-04-03 07:48:37,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-03 07:48:47,672 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:08,010 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 07:49:18,894 INFO [train.py:903] (1/4) Epoch 26, batch 5600, loss[loss=0.1864, simple_loss=0.2753, pruned_loss=0.04872, over 19744.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06198, over 3809389.93 frames. ], batch size: 63, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:49:28,226 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176307.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:48,579 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-03 07:50:23,052 INFO [train.py:903] (1/4) Epoch 26, batch 5650, loss[loss=0.1855, simple_loss=0.2673, pruned_loss=0.05182, over 19726.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2862, pruned_loss=0.06221, over 3807757.57 frames. ], batch size: 51, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:50:26,195 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 07:50:32,669 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:50:39,208 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.381e+02 5.619e+02 7.137e+02 2.187e+03, threshold=1.124e+03, percent-clipped=4.0 +2023-04-03 07:51:02,581 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 07:51:24,798 INFO [train.py:903] (1/4) Epoch 26, batch 5700, loss[loss=0.2147, simple_loss=0.2963, pruned_loss=0.06657, over 19537.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2867, pruned_loss=0.06263, over 3805528.39 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:51:52,416 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:19,138 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:22,361 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 07:52:26,807 INFO [train.py:903] (1/4) Epoch 26, batch 5750, loss[loss=0.1756, simple_loss=0.2458, pruned_loss=0.05273, over 18654.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2849, pruned_loss=0.0622, over 3803336.86 frames. ], batch size: 41, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:52:30,362 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 07:52:33,963 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 07:52:44,005 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.352e+02 5.112e+02 6.171e+02 7.862e+02 1.795e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 07:52:50,040 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:09,531 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:28,519 INFO [train.py:903] (1/4) Epoch 26, batch 5800, loss[loss=0.2344, simple_loss=0.3078, pruned_loss=0.08043, over 19683.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2844, pruned_loss=0.06198, over 3806824.82 frames. ], batch size: 59, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:53:57,698 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-03 07:54:32,079 INFO [train.py:903] (1/4) Epoch 26, batch 5850, loss[loss=0.1786, simple_loss=0.2587, pruned_loss=0.04923, over 19726.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2832, pruned_loss=0.06142, over 3820429.63 frames. ], batch size: 51, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:48,249 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.827e+02 6.114e+02 8.553e+02 2.097e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 07:55:29,979 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 07:55:33,108 INFO [train.py:903] (1/4) Epoch 26, batch 5900, loss[loss=0.1985, simple_loss=0.2839, pruned_loss=0.05657, over 19593.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2841, pruned_loss=0.06171, over 3827066.28 frames. ], batch size: 61, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:55:37,898 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:50,019 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:51,779 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 07:55:55,452 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:22,773 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:27,331 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:35,021 INFO [train.py:903] (1/4) Epoch 26, batch 5950, loss[loss=0.1694, simple_loss=0.2478, pruned_loss=0.04549, over 19733.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06171, over 3834305.91 frames. ], batch size: 47, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:56:51,411 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.421e+02 5.079e+02 6.315e+02 7.489e+02 1.732e+03, threshold=1.263e+03, percent-clipped=4.0 +2023-04-03 07:57:12,913 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:18,431 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:38,447 INFO [train.py:903] (1/4) Epoch 26, batch 6000, loss[loss=0.169, simple_loss=0.2586, pruned_loss=0.03976, over 19841.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2839, pruned_loss=0.06117, over 3826537.95 frames. ], batch size: 52, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:57:38,448 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 07:57:51,359 INFO [train.py:937] (1/4) Epoch 26, validation: loss=0.1675, simple_loss=0.2672, pruned_loss=0.03393, over 944034.00 frames. +2023-04-03 07:57:51,360 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 07:57:55,496 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176703.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:16,475 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176719.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:36,066 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:54,245 INFO [train.py:903] (1/4) Epoch 26, batch 6050, loss[loss=0.2059, simple_loss=0.2989, pruned_loss=0.05648, over 19688.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06127, over 3807733.64 frames. ], batch size: 59, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:59:11,767 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 5.094e+02 6.239e+02 7.368e+02 1.384e+03, threshold=1.248e+03, percent-clipped=1.0 +2023-04-03 07:59:28,834 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 07:59:57,922 INFO [train.py:903] (1/4) Epoch 26, batch 6100, loss[loss=0.2035, simple_loss=0.2872, pruned_loss=0.05988, over 19355.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06076, over 3816456.02 frames. ], batch size: 66, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:00:03,356 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.27 vs. limit=5.0 +2023-04-03 08:00:33,075 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:01:00,302 INFO [train.py:903] (1/4) Epoch 26, batch 6150, loss[loss=0.2309, simple_loss=0.3073, pruned_loss=0.07722, over 19479.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06167, over 3806903.40 frames. ], batch size: 64, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:01:18,087 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 4.899e+02 5.851e+02 7.446e+02 2.190e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 08:01:21,486 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 08:01:25,351 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:02:01,464 INFO [train.py:903] (1/4) Epoch 26, batch 6200, loss[loss=0.2216, simple_loss=0.2822, pruned_loss=0.08048, over 16436.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2835, pruned_loss=0.06155, over 3809325.13 frames. ], batch size: 36, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:02:54,333 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:02,383 INFO [train.py:903] (1/4) Epoch 26, batch 6250, loss[loss=0.234, simple_loss=0.3052, pruned_loss=0.08136, over 19740.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2855, pruned_loss=0.06234, over 3815157.84 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:03:20,781 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.905e+02 4.899e+02 6.025e+02 7.517e+02 2.005e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 08:03:29,821 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 08:03:33,668 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:47,539 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:51,237 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:03,339 INFO [train.py:903] (1/4) Epoch 26, batch 6300, loss[loss=0.2075, simple_loss=0.294, pruned_loss=0.06046, over 19290.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2865, pruned_loss=0.06264, over 3812746.91 frames. ], batch size: 66, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:04:03,955 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:23,642 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177015.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:37,551 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177027.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:40,417 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 08:05:06,346 INFO [train.py:903] (1/4) Epoch 26, batch 6350, loss[loss=0.2, simple_loss=0.2876, pruned_loss=0.05614, over 19527.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.06239, over 3794026.48 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:05:14,765 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8140, 1.9039, 2.1143, 2.2998, 1.7381, 2.2070, 2.1222, 1.9391], + device='cuda:1'), covar=tensor([0.4305, 0.3955, 0.2129, 0.2557, 0.4045, 0.2349, 0.4991, 0.3612], + device='cuda:1'), in_proj_covar=tensor([0.0930, 0.1008, 0.0739, 0.0952, 0.0909, 0.0845, 0.0861, 0.0806], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 08:05:26,132 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.448e+02 5.061e+02 6.104e+02 7.524e+02 1.291e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 08:06:11,969 INFO [train.py:903] (1/4) Epoch 26, batch 6400, loss[loss=0.2047, simple_loss=0.2982, pruned_loss=0.05556, over 19611.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2854, pruned_loss=0.06189, over 3799248.90 frames. ], batch size: 57, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:06:14,727 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:45,641 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:48,154 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:05,548 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:14,951 INFO [train.py:903] (1/4) Epoch 26, batch 6450, loss[loss=0.2012, simple_loss=0.2849, pruned_loss=0.05874, over 19756.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.06133, over 3802594.05 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:07:33,748 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 4.651e+02 5.846e+02 7.696e+02 2.286e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-03 08:07:56,648 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 08:08:15,579 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177199.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:16,324 INFO [train.py:903] (1/4) Epoch 26, batch 6500, loss[loss=0.2646, simple_loss=0.3283, pruned_loss=0.1004, over 13247.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06095, over 3809077.48 frames. ], batch size: 135, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:08:17,671 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 08:08:38,037 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:49,213 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:09:20,907 INFO [train.py:903] (1/4) Epoch 26, batch 6550, loss[loss=0.2157, simple_loss=0.3033, pruned_loss=0.06406, over 19670.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06047, over 3825022.51 frames. ], batch size: 60, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:09:28,586 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5286, 2.1566, 2.2863, 3.0892, 2.0152, 2.7161, 2.5214, 2.4401], + device='cuda:1'), covar=tensor([0.0722, 0.0859, 0.0865, 0.0710, 0.0919, 0.0721, 0.0884, 0.0616], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0224, 0.0227, 0.0239, 0.0225, 0.0212, 0.0188, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 08:09:39,901 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.796e+02 6.270e+02 7.966e+02 1.683e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 08:10:25,210 INFO [train.py:903] (1/4) Epoch 26, batch 6600, loss[loss=0.2245, simple_loss=0.2999, pruned_loss=0.07454, over 18354.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2848, pruned_loss=0.06087, over 3834214.27 frames. ], batch size: 83, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:11:02,477 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:27,590 INFO [train.py:903] (1/4) Epoch 26, batch 6650, loss[loss=0.1816, simple_loss=0.2675, pruned_loss=0.04785, over 19623.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2847, pruned_loss=0.06101, over 3820250.08 frames. ], batch size: 50, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:11:37,298 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:47,334 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.339e+02 5.694e+02 7.782e+02 1.307e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 08:12:11,067 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:28,604 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:30,470 INFO [train.py:903] (1/4) Epoch 26, batch 6700, loss[loss=0.2638, simple_loss=0.3302, pruned_loss=0.09867, over 13692.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06058, over 3820131.06 frames. ], batch size: 136, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:12:49,345 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4283, 1.4860, 1.6748, 1.6277, 2.2331, 2.0786, 2.3386, 0.9825], + device='cuda:1'), covar=tensor([0.2593, 0.4444, 0.2755, 0.2047, 0.1645, 0.2357, 0.1504, 0.4821], + device='cuda:1'), in_proj_covar=tensor([0.0549, 0.0665, 0.0743, 0.0502, 0.0630, 0.0543, 0.0664, 0.0567], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 08:13:01,365 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177423.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:02,832 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 08:13:31,550 INFO [train.py:903] (1/4) Epoch 26, batch 6750, loss[loss=0.18, simple_loss=0.2604, pruned_loss=0.04976, over 19750.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.284, pruned_loss=0.06054, over 3825244.50 frames. ], batch size: 51, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:13:48,518 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.721e+02 5.880e+02 7.244e+02 1.873e+03, threshold=1.176e+03, percent-clipped=5.0 +2023-04-03 08:13:55,565 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177471.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:57,770 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177473.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:14:07,413 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 08:14:28,174 INFO [train.py:903] (1/4) Epoch 26, batch 6800, loss[loss=0.1945, simple_loss=0.2868, pruned_loss=0.05107, over 19740.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2839, pruned_loss=0.06037, over 3821418.00 frames. ], batch size: 63, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:15:14,924 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 08:15:15,415 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 08:15:18,382 INFO [train.py:903] (1/4) Epoch 27, batch 0, loss[loss=0.1994, simple_loss=0.2691, pruned_loss=0.0648, over 18621.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2691, pruned_loss=0.0648, over 18621.00 frames. ], batch size: 41, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:15:18,383 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 08:15:30,265 INFO [train.py:937] (1/4) Epoch 27, validation: loss=0.1666, simple_loss=0.2668, pruned_loss=0.03317, over 944034.00 frames. +2023-04-03 08:15:30,266 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 08:15:35,648 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9483, 1.8870, 1.7744, 1.6160, 1.4138, 1.5531, 0.4871, 0.8582], + device='cuda:1'), covar=tensor([0.0709, 0.0678, 0.0469, 0.0788, 0.1336, 0.0919, 0.1434, 0.1229], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0360, 0.0364, 0.0387, 0.0468, 0.0392, 0.0342, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 08:15:42,925 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 08:16:15,175 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.975e+02 6.244e+02 7.696e+02 2.158e+03, threshold=1.249e+03, percent-clipped=8.0 +2023-04-03 08:16:24,654 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177571.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:33,685 INFO [train.py:903] (1/4) Epoch 27, batch 50, loss[loss=0.1797, simple_loss=0.2636, pruned_loss=0.04788, over 19781.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2837, pruned_loss=0.06123, over 848799.99 frames. ], batch size: 49, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:16:43,177 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:43,239 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:46,416 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:06,250 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 08:17:15,801 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:35,992 INFO [train.py:903] (1/4) Epoch 27, batch 100, loss[loss=0.2617, simple_loss=0.3342, pruned_loss=0.09454, over 19777.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2842, pruned_loss=0.06257, over 1510887.62 frames. ], batch size: 54, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:17:47,473 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 08:17:47,841 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3660, 1.4955, 1.8546, 1.6396, 2.9611, 4.5799, 4.4523, 5.0078], + device='cuda:1'), covar=tensor([0.1583, 0.3587, 0.3304, 0.2255, 0.0663, 0.0195, 0.0182, 0.0228], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0328, 0.0361, 0.0269, 0.0251, 0.0194, 0.0218, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 08:18:23,247 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.210e+02 6.703e+02 8.227e+02 2.617e+03, threshold=1.341e+03, percent-clipped=11.0 +2023-04-03 08:18:39,596 INFO [train.py:903] (1/4) Epoch 27, batch 150, loss[loss=0.1512, simple_loss=0.2315, pruned_loss=0.03542, over 19757.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2815, pruned_loss=0.061, over 2031770.09 frames. ], batch size: 47, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:18:44,578 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:19:40,062 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 08:19:42,199 INFO [train.py:903] (1/4) Epoch 27, batch 200, loss[loss=0.2195, simple_loss=0.3002, pruned_loss=0.0694, over 19356.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2831, pruned_loss=0.06151, over 2417055.89 frames. ], batch size: 70, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:20:29,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 4.383e+02 5.368e+02 7.234e+02 1.640e+03, threshold=1.074e+03, percent-clipped=1.0 +2023-04-03 08:20:46,585 INFO [train.py:903] (1/4) Epoch 27, batch 250, loss[loss=0.2261, simple_loss=0.2911, pruned_loss=0.0805, over 19731.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2822, pruned_loss=0.06074, over 2742415.91 frames. ], batch size: 45, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:21:12,220 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:32,171 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:43,032 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.01 vs. limit=5.0 +2023-04-03 08:21:50,874 INFO [train.py:903] (1/4) Epoch 27, batch 300, loss[loss=0.1939, simple_loss=0.2848, pruned_loss=0.05147, over 19437.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2819, pruned_loss=0.0603, over 2991837.47 frames. ], batch size: 70, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:22:08,570 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:10,836 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177844.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:36,375 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.100e+02 5.015e+02 6.251e+02 7.839e+02 1.329e+03, threshold=1.250e+03, percent-clipped=8.0 +2023-04-03 08:22:40,049 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177867.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:42,336 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:52,700 INFO [train.py:903] (1/4) Epoch 27, batch 350, loss[loss=0.1993, simple_loss=0.2796, pruned_loss=0.05949, over 19669.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2831, pruned_loss=0.06126, over 3173513.52 frames. ], batch size: 58, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:23:00,611 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 08:23:40,481 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177915.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:23:47,963 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0019, 1.9307, 1.8257, 1.6664, 1.5830, 1.6413, 0.4195, 0.9031], + device='cuda:1'), covar=tensor([0.0698, 0.0691, 0.0475, 0.0767, 0.1256, 0.0870, 0.1452, 0.1178], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0363, 0.0367, 0.0390, 0.0472, 0.0395, 0.0346, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 08:23:56,874 INFO [train.py:903] (1/4) Epoch 27, batch 400, loss[loss=0.2068, simple_loss=0.2865, pruned_loss=0.06354, over 19673.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2828, pruned_loss=0.06082, over 3331125.95 frames. ], batch size: 59, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:24:43,713 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.795e+02 5.555e+02 6.674e+02 1.146e+03, threshold=1.111e+03, percent-clipped=0.0 +2023-04-03 08:24:58,338 INFO [train.py:903] (1/4) Epoch 27, batch 450, loss[loss=0.1819, simple_loss=0.2538, pruned_loss=0.05497, over 19783.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2823, pruned_loss=0.06037, over 3443594.76 frames. ], batch size: 46, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:25:39,729 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 08:25:40,951 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 08:25:42,669 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7077, 1.7228, 1.6249, 1.4078, 1.3898, 1.4355, 0.3303, 0.7205], + device='cuda:1'), covar=tensor([0.0730, 0.0684, 0.0457, 0.0763, 0.1334, 0.0799, 0.1476, 0.1197], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0363, 0.0366, 0.0390, 0.0472, 0.0394, 0.0346, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 08:25:59,701 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178026.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:02,523 INFO [train.py:903] (1/4) Epoch 27, batch 500, loss[loss=0.2381, simple_loss=0.3147, pruned_loss=0.08078, over 18825.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2827, pruned_loss=0.06048, over 3536138.91 frames. ], batch size: 74, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:26:06,495 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:48,570 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 5.151e+02 6.325e+02 8.134e+02 1.856e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-03 08:27:07,119 INFO [train.py:903] (1/4) Epoch 27, batch 550, loss[loss=0.1975, simple_loss=0.2853, pruned_loss=0.05486, over 19533.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2817, pruned_loss=0.05981, over 3609358.69 frames. ], batch size: 56, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:27:22,529 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0746, 1.3471, 1.8641, 1.2392, 2.6620, 3.6477, 3.3566, 3.9057], + device='cuda:1'), covar=tensor([0.1800, 0.3977, 0.3288, 0.2723, 0.0673, 0.0210, 0.0228, 0.0265], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0329, 0.0361, 0.0269, 0.0252, 0.0195, 0.0218, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 08:28:09,845 INFO [train.py:903] (1/4) Epoch 27, batch 600, loss[loss=0.2334, simple_loss=0.3172, pruned_loss=0.07482, over 18444.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.06099, over 3650615.48 frames. ], batch size: 84, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:28:25,323 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:26,321 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:48,728 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:54,534 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 08:28:55,502 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 5.259e+02 6.338e+02 7.640e+02 1.730e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-03 08:29:11,517 INFO [train.py:903] (1/4) Epoch 27, batch 650, loss[loss=0.1779, simple_loss=0.2541, pruned_loss=0.05081, over 19740.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2846, pruned_loss=0.06182, over 3668577.86 frames. ], batch size: 48, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:30:11,957 INFO [train.py:903] (1/4) Epoch 27, batch 700, loss[loss=0.1981, simple_loss=0.29, pruned_loss=0.05314, over 19680.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2848, pruned_loss=0.06177, over 3681334.05 frames. ], batch size: 55, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:30:49,322 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:30:50,587 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9636, 1.8392, 1.6558, 1.9214, 1.7218, 1.6685, 1.5379, 1.8219], + device='cuda:1'), covar=tensor([0.1155, 0.1405, 0.1576, 0.1040, 0.1411, 0.0620, 0.1637, 0.0819], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0361, 0.0321, 0.0258, 0.0309, 0.0259, 0.0323, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 08:30:58,527 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.694e+02 6.359e+02 8.615e+02 1.569e+03, threshold=1.272e+03, percent-clipped=7.0 +2023-04-03 08:31:11,181 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:16,251 INFO [train.py:903] (1/4) Epoch 27, batch 750, loss[loss=0.2212, simple_loss=0.2985, pruned_loss=0.07194, over 19308.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2841, pruned_loss=0.06141, over 3703694.98 frames. ], batch size: 70, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:31:26,151 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:56,987 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:32:16,079 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1040, 2.0984, 1.8811, 1.6754, 1.5035, 1.6261, 0.9048, 1.2916], + device='cuda:1'), covar=tensor([0.0830, 0.0783, 0.0573, 0.1071, 0.1419, 0.1235, 0.1586, 0.1239], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0365, 0.0368, 0.0392, 0.0473, 0.0395, 0.0347, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 08:32:17,794 INFO [train.py:903] (1/4) Epoch 27, batch 800, loss[loss=0.2185, simple_loss=0.2976, pruned_loss=0.06964, over 17494.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2847, pruned_loss=0.06157, over 3733458.07 frames. ], batch size: 101, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:32:31,930 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 08:32:32,894 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 08:32:58,404 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8240, 3.2921, 3.3440, 3.3496, 1.4907, 3.2123, 2.7997, 3.1346], + device='cuda:1'), covar=tensor([0.1788, 0.1112, 0.0857, 0.0979, 0.5309, 0.1212, 0.0843, 0.1326], + device='cuda:1'), in_proj_covar=tensor([0.0820, 0.0784, 0.0989, 0.0871, 0.0863, 0.0754, 0.0582, 0.0917], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 08:33:04,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 4.845e+02 6.131e+02 7.065e+02 2.334e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 08:33:20,261 INFO [train.py:903] (1/4) Epoch 27, batch 850, loss[loss=0.1767, simple_loss=0.2581, pruned_loss=0.04766, over 19683.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2853, pruned_loss=0.06158, over 3754363.85 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:33:45,280 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:33:57,599 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2592, 5.6959, 3.2172, 4.9620, 1.1297, 5.9109, 5.6507, 5.8578], + device='cuda:1'), covar=tensor([0.0417, 0.0810, 0.1691, 0.0699, 0.3928, 0.0438, 0.0771, 0.0909], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0427, 0.0515, 0.0359, 0.0410, 0.0452, 0.0447, 0.0476], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 08:34:12,690 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 08:34:16,587 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:34:22,832 INFO [train.py:903] (1/4) Epoch 27, batch 900, loss[loss=0.1795, simple_loss=0.2573, pruned_loss=0.05084, over 16418.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2847, pruned_loss=0.06095, over 3741850.92 frames. ], batch size: 36, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:35:10,868 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.108e+02 4.416e+02 5.422e+02 6.593e+02 1.258e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-04-03 08:35:17,031 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178470.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:35:28,147 INFO [train.py:903] (1/4) Epoch 27, batch 950, loss[loss=0.1951, simple_loss=0.27, pruned_loss=0.06008, over 19580.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06099, over 3759722.15 frames. ], batch size: 52, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:35:30,648 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 08:36:11,746 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:32,191 INFO [train.py:903] (1/4) Epoch 27, batch 1000, loss[loss=0.2142, simple_loss=0.2967, pruned_loss=0.06591, over 18767.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2843, pruned_loss=0.06099, over 3777989.44 frames. ], batch size: 74, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:36:35,040 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:45,498 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:01,977 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 08:37:06,213 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178555.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:19,294 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.971e+02 6.338e+02 8.822e+02 2.004e+03, threshold=1.268e+03, percent-clipped=12.0 +2023-04-03 08:37:25,282 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 08:37:35,818 INFO [train.py:903] (1/4) Epoch 27, batch 1050, loss[loss=0.1962, simple_loss=0.2892, pruned_loss=0.05154, over 18070.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2844, pruned_loss=0.06108, over 3772496.87 frames. ], batch size: 83, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:38:09,368 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 08:38:38,440 INFO [train.py:903] (1/4) Epoch 27, batch 1100, loss[loss=0.2713, simple_loss=0.3289, pruned_loss=0.1069, over 13192.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.06121, over 3779812.23 frames. ], batch size: 136, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:38:50,308 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178637.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:39:07,821 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9903, 4.5704, 2.7463, 3.9858, 0.9219, 4.5654, 4.3902, 4.4848], + device='cuda:1'), covar=tensor([0.0509, 0.0873, 0.2049, 0.0838, 0.4135, 0.0603, 0.0883, 0.1171], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0428, 0.0515, 0.0361, 0.0410, 0.0453, 0.0448, 0.0476], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 08:39:27,052 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.758e+02 5.833e+02 7.524e+02 1.653e+03, threshold=1.167e+03, percent-clipped=2.0 +2023-04-03 08:39:42,276 INFO [train.py:903] (1/4) Epoch 27, batch 1150, loss[loss=0.2135, simple_loss=0.2952, pruned_loss=0.06587, over 19613.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06076, over 3786525.42 frames. ], batch size: 57, lr: 3.05e-03, grad_scale: 4.0 +2023-04-03 08:40:25,279 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178712.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:40:30,189 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178715.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:40:47,648 INFO [train.py:903] (1/4) Epoch 27, batch 1200, loss[loss=0.2715, simple_loss=0.3401, pruned_loss=0.1015, over 13505.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06088, over 3784756.27 frames. ], batch size: 137, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:41:18,720 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 08:41:38,084 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.626e+02 4.854e+02 6.177e+02 8.249e+02 1.593e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-03 08:41:53,217 INFO [train.py:903] (1/4) Epoch 27, batch 1250, loss[loss=0.1668, simple_loss=0.2424, pruned_loss=0.04554, over 19740.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2833, pruned_loss=0.0603, over 3802528.40 frames. ], batch size: 46, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:42:05,335 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:39,170 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:56,286 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-03 08:42:56,586 INFO [train.py:903] (1/4) Epoch 27, batch 1300, loss[loss=0.2001, simple_loss=0.2863, pruned_loss=0.05701, over 19742.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2829, pruned_loss=0.06026, over 3804311.87 frames. ], batch size: 63, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:43:44,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.004e+02 6.015e+02 7.494e+02 1.649e+03, threshold=1.203e+03, percent-clipped=2.0 +2023-04-03 08:43:58,885 INFO [train.py:903] (1/4) Epoch 27, batch 1350, loss[loss=0.1997, simple_loss=0.2846, pruned_loss=0.05742, over 19531.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06032, over 3792928.80 frames. ], batch size: 54, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:44:41,880 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178912.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:45:03,464 INFO [train.py:903] (1/4) Epoch 27, batch 1400, loss[loss=0.2417, simple_loss=0.3069, pruned_loss=0.08827, over 13356.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06052, over 3788216.69 frames. ], batch size: 135, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:45:05,013 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:45:09,899 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6682, 1.5055, 1.5827, 2.2093, 1.6762, 1.8939, 1.9648, 1.6546], + device='cuda:1'), covar=tensor([0.0852, 0.0970, 0.1032, 0.0717, 0.0863, 0.0819, 0.0831, 0.0765], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0225, 0.0228, 0.0240, 0.0227, 0.0214, 0.0188, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 08:45:50,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.777e+02 6.195e+02 8.137e+02 1.607e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-03 08:46:03,730 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 08:46:04,762 INFO [train.py:903] (1/4) Epoch 27, batch 1450, loss[loss=0.2233, simple_loss=0.3027, pruned_loss=0.07196, over 19604.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2832, pruned_loss=0.0608, over 3789778.10 frames. ], batch size: 57, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:46:09,315 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:46:43,746 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 08:47:07,119 INFO [train.py:903] (1/4) Epoch 27, batch 1500, loss[loss=0.2044, simple_loss=0.2904, pruned_loss=0.05921, over 18700.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2831, pruned_loss=0.06064, over 3805984.28 frames. ], batch size: 74, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:47:42,382 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:47:45,635 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179059.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:47:54,377 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.273e+02 4.749e+02 5.669e+02 7.445e+02 1.551e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 08:48:08,107 INFO [train.py:903] (1/4) Epoch 27, batch 1550, loss[loss=0.2102, simple_loss=0.2934, pruned_loss=0.06351, over 19567.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2836, pruned_loss=0.06112, over 3809174.99 frames. ], batch size: 61, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:48:32,740 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:12,291 INFO [train.py:903] (1/4) Epoch 27, batch 1600, loss[loss=0.2047, simple_loss=0.2917, pruned_loss=0.05882, over 17281.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2842, pruned_loss=0.0614, over 3816878.79 frames. ], batch size: 101, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:49:18,226 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:37,310 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 08:49:59,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.994e+02 6.186e+02 7.700e+02 1.707e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 08:50:06,421 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179171.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:07,878 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 08:50:09,878 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179174.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:50:14,088 INFO [train.py:903] (1/4) Epoch 27, batch 1650, loss[loss=0.2023, simple_loss=0.2863, pruned_loss=0.05909, over 19307.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2849, pruned_loss=0.06176, over 3811725.41 frames. ], batch size: 66, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:50:21,677 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4060, 1.3725, 1.5130, 1.5289, 1.8459, 1.8810, 1.8445, 0.5497], + device='cuda:1'), covar=tensor([0.2547, 0.4507, 0.2891, 0.2040, 0.1678, 0.2438, 0.1457, 0.5159], + device='cuda:1'), in_proj_covar=tensor([0.0550, 0.0662, 0.0743, 0.0501, 0.0631, 0.0541, 0.0665, 0.0567], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 08:50:23,740 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179185.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:51,064 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2178, 1.4180, 2.1084, 1.5662, 3.1550, 4.7901, 4.6067, 5.2559], + device='cuda:1'), covar=tensor([0.1738, 0.3977, 0.3229, 0.2432, 0.0553, 0.0189, 0.0161, 0.0187], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0330, 0.0363, 0.0271, 0.0253, 0.0196, 0.0219, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 08:50:55,267 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179210.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:17,041 INFO [train.py:903] (1/4) Epoch 27, batch 1700, loss[loss=0.1648, simple_loss=0.2544, pruned_loss=0.03758, over 19747.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06143, over 3824588.63 frames. ], batch size: 51, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:51:41,283 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:52,244 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179256.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:51:58,258 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 08:52:04,047 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 5.162e+02 6.407e+02 7.875e+02 2.392e+03, threshold=1.281e+03, percent-clipped=6.0 +2023-04-03 08:52:18,199 INFO [train.py:903] (1/4) Epoch 27, batch 1750, loss[loss=0.17, simple_loss=0.2563, pruned_loss=0.04189, over 19854.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06212, over 3819007.46 frames. ], batch size: 52, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:52:47,237 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8185, 1.5791, 1.4405, 1.7859, 1.4988, 1.5868, 1.4841, 1.6753], + device='cuda:1'), covar=tensor([0.1215, 0.1423, 0.1680, 0.1101, 0.1372, 0.0621, 0.1622, 0.0887], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0360, 0.0319, 0.0257, 0.0309, 0.0258, 0.0322, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 08:52:53,860 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2080, 1.3907, 1.7749, 1.2584, 2.5826, 3.5823, 3.3093, 3.7907], + device='cuda:1'), covar=tensor([0.1605, 0.3785, 0.3411, 0.2566, 0.0640, 0.0187, 0.0210, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0330, 0.0363, 0.0271, 0.0253, 0.0195, 0.0218, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 08:53:04,994 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 08:53:21,041 INFO [train.py:903] (1/4) Epoch 27, batch 1800, loss[loss=0.1755, simple_loss=0.2539, pruned_loss=0.04855, over 19356.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06169, over 3825833.89 frames. ], batch size: 47, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:53:51,524 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:08,231 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 4.924e+02 6.082e+02 7.444e+02 1.664e+03, threshold=1.216e+03, percent-clipped=4.0 +2023-04-03 08:54:15,340 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179371.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:54:21,585 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 08:54:23,149 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:23,937 INFO [train.py:903] (1/4) Epoch 27, batch 1850, loss[loss=0.2119, simple_loss=0.2934, pruned_loss=0.0652, over 19714.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06169, over 3821319.66 frames. ], batch size: 59, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:54:25,265 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:00,545 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 08:55:25,754 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3404, 1.5085, 1.9788, 1.4266, 2.7858, 3.7110, 3.4414, 3.9838], + device='cuda:1'), covar=tensor([0.1560, 0.3575, 0.3094, 0.2490, 0.0553, 0.0189, 0.0215, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0330, 0.0363, 0.0270, 0.0253, 0.0195, 0.0218, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 08:55:25,836 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179427.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:26,556 INFO [train.py:903] (1/4) Epoch 27, batch 1900, loss[loss=0.1632, simple_loss=0.2484, pruned_loss=0.03896, over 19743.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2858, pruned_loss=0.06229, over 3812170.76 frames. ], batch size: 45, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:55:29,215 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179430.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:55:45,919 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 08:55:49,550 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 08:55:56,529 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:56:00,928 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179455.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:56:09,140 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179462.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:56:13,659 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4072, 1.3974, 1.6518, 1.7328, 3.0518, 1.3959, 2.4081, 3.3888], + device='cuda:1'), covar=tensor([0.0554, 0.2755, 0.2758, 0.1684, 0.0681, 0.2230, 0.1321, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0376, 0.0394, 0.0352, 0.0380, 0.0357, 0.0392, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 08:56:14,507 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.488e+02 5.214e+02 6.120e+02 7.486e+02 1.853e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-03 08:56:15,789 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 08:56:28,548 INFO [train.py:903] (1/4) Epoch 27, batch 1950, loss[loss=0.1868, simple_loss=0.2712, pruned_loss=0.05121, over 19657.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.0618, over 3800938.67 frames. ], batch size: 53, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:01,354 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-04-03 08:57:02,016 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:57:31,833 INFO [train.py:903] (1/4) Epoch 27, batch 2000, loss[loss=0.2369, simple_loss=0.3084, pruned_loss=0.08265, over 13047.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.285, pruned_loss=0.06182, over 3794045.42 frames. ], batch size: 137, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:32,248 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179528.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:58:19,140 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.376e+02 6.611e+02 8.547e+02 2.231e+03, threshold=1.322e+03, percent-clipped=11.0 +2023-04-03 08:58:33,840 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 08:58:34,817 INFO [train.py:903] (1/4) Epoch 27, batch 2050, loss[loss=0.244, simple_loss=0.3174, pruned_loss=0.08524, over 19605.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2835, pruned_loss=0.061, over 3820685.03 frames. ], batch size: 61, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:58:53,478 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 08:58:54,682 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 08:59:14,232 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 08:59:37,294 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179627.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:59:37,899 INFO [train.py:903] (1/4) Epoch 27, batch 2100, loss[loss=0.2398, simple_loss=0.3288, pruned_loss=0.07541, over 19508.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.061, over 3818019.51 frames. ], batch size: 64, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:00:07,543 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 09:00:08,046 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179652.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:00:10,145 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179654.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:00:13,435 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8433, 1.3400, 1.0751, 0.9826, 1.1370, 1.0022, 0.9236, 1.2020], + device='cuda:1'), covar=tensor([0.0727, 0.0972, 0.1237, 0.0882, 0.0652, 0.1499, 0.0753, 0.0592], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0320, 0.0337, 0.0273, 0.0250, 0.0345, 0.0294, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:00:21,264 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1976, 1.4276, 1.7757, 1.5856, 2.9261, 4.6299, 4.5252, 5.2578], + device='cuda:1'), covar=tensor([0.1809, 0.4814, 0.4417, 0.2510, 0.0707, 0.0212, 0.0201, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0330, 0.0363, 0.0270, 0.0253, 0.0196, 0.0219, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 09:00:25,323 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.840e+02 4.609e+02 5.792e+02 6.874e+02 1.495e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 09:00:28,927 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 09:00:39,310 INFO [train.py:903] (1/4) Epoch 27, batch 2150, loss[loss=0.182, simple_loss=0.276, pruned_loss=0.04406, over 19694.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.06179, over 3803735.25 frames. ], batch size: 59, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:01:04,778 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 09:01:37,660 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:01:43,442 INFO [train.py:903] (1/4) Epoch 27, batch 2200, loss[loss=0.1852, simple_loss=0.267, pruned_loss=0.05172, over 19488.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06157, over 3790401.54 frames. ], batch size: 49, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:02:30,599 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.069e+02 5.235e+02 6.025e+02 7.394e+02 1.358e+03, threshold=1.205e+03, percent-clipped=1.0 +2023-04-03 09:02:32,032 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0963, 3.5644, 3.8308, 3.8520, 1.6584, 3.5999, 3.0445, 3.3589], + device='cuda:1'), covar=tensor([0.2633, 0.1544, 0.1077, 0.1466, 0.7436, 0.2098, 0.1424, 0.2077], + device='cuda:1'), in_proj_covar=tensor([0.0808, 0.0777, 0.0983, 0.0863, 0.0859, 0.0747, 0.0576, 0.0909], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 09:02:46,335 INFO [train.py:903] (1/4) Epoch 27, batch 2250, loss[loss=0.2535, simple_loss=0.3183, pruned_loss=0.09438, over 13115.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2841, pruned_loss=0.06141, over 3789900.32 frames. ], batch size: 136, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:03:21,838 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179806.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:03:33,117 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:03:50,157 INFO [train.py:903] (1/4) Epoch 27, batch 2300, loss[loss=0.196, simple_loss=0.2839, pruned_loss=0.05406, over 19724.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2831, pruned_loss=0.06074, over 3813361.12 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:03:58,676 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0229, 3.6601, 2.6541, 3.2743, 0.8711, 3.6476, 3.5272, 3.6111], + device='cuda:1'), covar=tensor([0.0718, 0.1110, 0.1914, 0.0948, 0.3906, 0.0753, 0.0944, 0.1168], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0427, 0.0512, 0.0360, 0.0407, 0.0454, 0.0446, 0.0475], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:04:02,220 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179838.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:04:03,018 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 09:04:05,830 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3252, 1.4834, 2.1435, 1.6010, 3.2213, 4.7710, 4.6091, 5.1964], + device='cuda:1'), covar=tensor([0.1706, 0.3853, 0.3105, 0.2384, 0.0578, 0.0177, 0.0166, 0.0184], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0331, 0.0364, 0.0272, 0.0254, 0.0197, 0.0220, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 09:04:20,344 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1833, 2.8827, 2.2984, 2.2590, 1.9152, 2.5290, 1.1965, 2.0825], + device='cuda:1'), covar=tensor([0.0743, 0.0651, 0.0707, 0.1218, 0.1308, 0.1160, 0.1414, 0.1200], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0363, 0.0367, 0.0391, 0.0469, 0.0396, 0.0345, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 09:04:32,015 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 09:04:38,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.173e+02 6.408e+02 8.011e+02 2.024e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 09:04:51,779 INFO [train.py:903] (1/4) Epoch 27, batch 2350, loss[loss=0.1955, simple_loss=0.2825, pruned_loss=0.05428, over 19244.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2834, pruned_loss=0.06097, over 3822959.25 frames. ], batch size: 66, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:05:34,711 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 09:05:35,358 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 09:05:45,230 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179921.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:05:48,602 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179924.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:05:51,525 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 09:05:53,778 INFO [train.py:903] (1/4) Epoch 27, batch 2400, loss[loss=0.2155, simple_loss=0.2992, pruned_loss=0.0659, over 19563.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2836, pruned_loss=0.06115, over 3817598.04 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:06:11,775 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179942.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:06:28,955 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7560, 4.1771, 4.4085, 4.4229, 2.0213, 4.1447, 3.6460, 4.1418], + device='cuda:1'), covar=tensor([0.1722, 0.1169, 0.0651, 0.0735, 0.5679, 0.1100, 0.0709, 0.1135], + device='cuda:1'), in_proj_covar=tensor([0.0817, 0.0780, 0.0990, 0.0869, 0.0864, 0.0753, 0.0582, 0.0916], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 09:06:41,354 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 4.979e+02 6.417e+02 8.310e+02 2.182e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-03 09:06:43,106 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-03 09:06:57,082 INFO [train.py:903] (1/4) Epoch 27, batch 2450, loss[loss=0.2168, simple_loss=0.3016, pruned_loss=0.06599, over 17660.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2835, pruned_loss=0.06098, over 3821077.91 frames. ], batch size: 101, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:07:21,328 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:08:00,985 INFO [train.py:903] (1/4) Epoch 27, batch 2500, loss[loss=0.1581, simple_loss=0.2426, pruned_loss=0.03679, over 19393.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2827, pruned_loss=0.06036, over 3813271.68 frames. ], batch size: 48, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:08:19,336 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:08:48,293 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 4.714e+02 5.802e+02 6.784e+02 1.958e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 09:09:02,315 INFO [train.py:903] (1/4) Epoch 27, batch 2550, loss[loss=0.2014, simple_loss=0.2815, pruned_loss=0.06066, over 19521.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2828, pruned_loss=0.06059, over 3820459.56 frames. ], batch size: 54, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:09:22,081 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:41,387 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0559, 2.1980, 2.3899, 2.6419, 2.1395, 2.5823, 2.3508, 2.2074], + device='cuda:1'), covar=tensor([0.4155, 0.3800, 0.1947, 0.2471, 0.4018, 0.2204, 0.4796, 0.3269], + device='cuda:1'), in_proj_covar=tensor([0.0932, 0.1008, 0.0741, 0.0951, 0.0908, 0.0850, 0.0857, 0.0806], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 09:09:47,038 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180113.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:54,371 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180119.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:58,714 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 09:10:04,539 INFO [train.py:903] (1/4) Epoch 27, batch 2600, loss[loss=0.1959, simple_loss=0.2707, pruned_loss=0.06052, over 19616.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2829, pruned_loss=0.06071, over 3828208.66 frames. ], batch size: 50, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:10:42,778 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:10:52,188 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.258e+02 6.240e+02 7.647e+02 1.495e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-03 09:11:07,734 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180177.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:11:08,384 INFO [train.py:903] (1/4) Epoch 27, batch 2650, loss[loss=0.2602, simple_loss=0.335, pruned_loss=0.09268, over 19580.00 frames. ], tot_loss[loss=0.202, simple_loss=0.283, pruned_loss=0.0605, over 3830124.08 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:11:29,718 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 09:11:38,037 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180202.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:12:04,733 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:12:11,778 INFO [train.py:903] (1/4) Epoch 27, batch 2700, loss[loss=0.2015, simple_loss=0.2936, pruned_loss=0.05467, over 19675.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06122, over 3840003.75 frames. ], batch size: 59, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:13:00,987 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.426e+02 5.233e+02 6.139e+02 8.955e+02 2.009e+03, threshold=1.228e+03, percent-clipped=9.0 +2023-04-03 09:13:02,194 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180268.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:13:08,017 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:13:13,279 INFO [train.py:903] (1/4) Epoch 27, batch 2750, loss[loss=0.1698, simple_loss=0.239, pruned_loss=0.05035, over 19298.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06138, over 3837416.06 frames. ], batch size: 44, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:13:21,776 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0955, 1.1574, 1.1282, 0.9664, 0.9612, 0.9930, 0.0945, 0.3478], + device='cuda:1'), covar=tensor([0.0683, 0.0670, 0.0492, 0.0603, 0.1193, 0.0681, 0.1438, 0.1139], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0361, 0.0367, 0.0387, 0.0466, 0.0394, 0.0344, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 09:13:22,804 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:13:54,499 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1784, 1.3179, 1.7038, 1.3094, 2.6284, 3.4125, 3.1691, 3.6657], + device='cuda:1'), covar=tensor([0.1773, 0.4081, 0.3685, 0.2602, 0.0706, 0.0255, 0.0262, 0.0325], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0330, 0.0363, 0.0269, 0.0253, 0.0196, 0.0219, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 09:14:15,236 INFO [train.py:903] (1/4) Epoch 27, batch 2800, loss[loss=0.2046, simple_loss=0.2894, pruned_loss=0.05994, over 19763.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2841, pruned_loss=0.06077, over 3831429.16 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:14:31,378 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4250, 1.3966, 1.5540, 1.5685, 1.6818, 1.9257, 1.7409, 0.5408], + device='cuda:1'), covar=tensor([0.2489, 0.4459, 0.2733, 0.1993, 0.1744, 0.2354, 0.1542, 0.5106], + device='cuda:1'), in_proj_covar=tensor([0.0553, 0.0669, 0.0750, 0.0505, 0.0635, 0.0547, 0.0670, 0.0572], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 09:14:49,743 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:04,817 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.838e+02 6.554e+02 8.151e+02 1.805e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-03 09:15:08,550 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:18,463 INFO [train.py:903] (1/4) Epoch 27, batch 2850, loss[loss=0.223, simple_loss=0.3071, pruned_loss=0.06941, over 19612.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.285, pruned_loss=0.06123, over 3833504.16 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:15:26,248 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180383.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:15:33,049 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:40,221 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:48,644 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180401.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:48,714 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1336, 1.9431, 1.9307, 1.7126, 1.5971, 1.6960, 0.4673, 1.0564], + device='cuda:1'), covar=tensor([0.0705, 0.0695, 0.0495, 0.0857, 0.1218, 0.0905, 0.1453, 0.1147], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0364, 0.0369, 0.0390, 0.0469, 0.0395, 0.0346, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 09:16:20,688 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 09:16:22,956 INFO [train.py:903] (1/4) Epoch 27, batch 2900, loss[loss=0.1732, simple_loss=0.2553, pruned_loss=0.04552, over 19615.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2847, pruned_loss=0.06123, over 3824330.18 frames. ], batch size: 50, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:16:42,351 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2079, 1.7373, 2.0802, 3.0105, 1.9091, 2.3290, 2.5792, 1.9463], + device='cuda:1'), covar=tensor([0.0810, 0.0958, 0.0946, 0.0739, 0.0895, 0.0812, 0.0843, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0225, 0.0228, 0.0240, 0.0225, 0.0213, 0.0189, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 09:17:13,071 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.922e+02 6.355e+02 8.136e+02 1.738e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-04-03 09:17:26,071 INFO [train.py:903] (1/4) Epoch 27, batch 2950, loss[loss=0.2206, simple_loss=0.299, pruned_loss=0.07104, over 19669.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2841, pruned_loss=0.0606, over 3821808.65 frames. ], batch size: 53, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:36,104 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.18 vs. limit=5.0 +2023-04-03 09:17:56,619 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:27,181 INFO [train.py:903] (1/4) Epoch 27, batch 3000, loss[loss=0.1909, simple_loss=0.2683, pruned_loss=0.05674, over 19852.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2833, pruned_loss=0.05997, over 3829399.38 frames. ], batch size: 52, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:18:27,181 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 09:18:39,751 INFO [train.py:937] (1/4) Epoch 27, validation: loss=0.1667, simple_loss=0.2664, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 09:18:39,751 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 09:18:41,249 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:43,432 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 09:19:11,464 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:29,335 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:30,289 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.587e+02 5.827e+02 7.595e+02 1.750e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 09:19:41,901 INFO [train.py:903] (1/4) Epoch 27, batch 3050, loss[loss=0.2316, simple_loss=0.3091, pruned_loss=0.07702, over 17178.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2836, pruned_loss=0.06015, over 3817409.59 frames. ], batch size: 101, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:19:54,671 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:20:12,196 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-04-03 09:20:44,773 INFO [train.py:903] (1/4) Epoch 27, batch 3100, loss[loss=0.277, simple_loss=0.3365, pruned_loss=0.1088, over 13804.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2839, pruned_loss=0.06041, over 3823829.44 frames. ], batch size: 137, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:20:59,858 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180639.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:21:14,929 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.96 vs. limit=5.0 +2023-04-03 09:21:20,343 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180657.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:29,206 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180664.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:21:33,520 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.719e+02 5.890e+02 7.652e+02 1.677e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 09:21:46,247 INFO [train.py:903] (1/4) Epoch 27, batch 3150, loss[loss=0.2079, simple_loss=0.2903, pruned_loss=0.06277, over 19616.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.06003, over 3834100.85 frames. ], batch size: 57, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:21:52,998 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:53,038 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:54,386 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.21 vs. limit=5.0 +2023-04-03 09:21:57,971 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 09:22:11,319 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:16,785 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 09:22:49,869 INFO [train.py:903] (1/4) Epoch 27, batch 3200, loss[loss=0.2339, simple_loss=0.3102, pruned_loss=0.07882, over 18708.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2828, pruned_loss=0.05998, over 3830354.25 frames. ], batch size: 74, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:16,646 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:27,986 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180759.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:30,252 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 09:23:39,759 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.255e+02 6.624e+02 9.042e+02 3.460e+03, threshold=1.325e+03, percent-clipped=12.0 +2023-04-03 09:23:51,466 INFO [train.py:903] (1/4) Epoch 27, batch 3250, loss[loss=0.207, simple_loss=0.2929, pruned_loss=0.06057, over 19752.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2831, pruned_loss=0.06021, over 3826154.49 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:59,357 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:07,814 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 09:24:36,408 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180813.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:44,424 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.2465, 5.1581, 5.9661, 6.0600, 2.1865, 5.6439, 4.6802, 5.6619], + device='cuda:1'), covar=tensor([0.1708, 0.0796, 0.0598, 0.0652, 0.6039, 0.0894, 0.0690, 0.1165], + device='cuda:1'), in_proj_covar=tensor([0.0819, 0.0781, 0.0994, 0.0874, 0.0864, 0.0753, 0.0585, 0.0919], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 09:24:54,214 INFO [train.py:903] (1/4) Epoch 27, batch 3300, loss[loss=0.219, simple_loss=0.2959, pruned_loss=0.07106, over 19338.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06047, over 3828947.35 frames. ], batch size: 70, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:25:00,021 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 09:25:13,373 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:25:43,900 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.181e+02 6.566e+02 8.708e+02 1.878e+03, threshold=1.313e+03, percent-clipped=7.0 +2023-04-03 09:25:56,106 INFO [train.py:903] (1/4) Epoch 27, batch 3350, loss[loss=0.1907, simple_loss=0.2787, pruned_loss=0.05134, over 19333.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.283, pruned_loss=0.06061, over 3823912.26 frames. ], batch size: 66, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:26:37,264 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 09:27:00,223 INFO [train.py:903] (1/4) Epoch 27, batch 3400, loss[loss=0.209, simple_loss=0.2951, pruned_loss=0.06143, over 19786.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.283, pruned_loss=0.06036, over 3822435.00 frames. ], batch size: 56, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:27:05,039 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180932.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:11,973 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:44,017 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:50,455 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 4.823e+02 5.841e+02 8.287e+02 1.627e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 09:28:02,289 INFO [train.py:903] (1/4) Epoch 27, batch 3450, loss[loss=0.1919, simple_loss=0.2818, pruned_loss=0.05094, over 19531.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2825, pruned_loss=0.0602, over 3831159.83 frames. ], batch size: 56, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:28:07,890 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 09:29:04,453 INFO [train.py:903] (1/4) Epoch 27, batch 3500, loss[loss=0.1829, simple_loss=0.2758, pruned_loss=0.04501, over 19460.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2819, pruned_loss=0.05997, over 3835900.93 frames. ], batch size: 64, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:29:28,491 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181047.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:29:53,844 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.177e+02 6.114e+02 8.226e+02 1.424e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 09:29:55,469 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181069.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:06,529 INFO [train.py:903] (1/4) Epoch 27, batch 3550, loss[loss=0.1986, simple_loss=0.2843, pruned_loss=0.05651, over 19615.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.283, pruned_loss=0.06031, over 3829562.01 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:30:27,210 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:27,457 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:31:09,215 INFO [train.py:903] (1/4) Epoch 27, batch 3600, loss[loss=0.1906, simple_loss=0.2709, pruned_loss=0.0551, over 19398.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2828, pruned_loss=0.06045, over 3815654.70 frames. ], batch size: 48, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:31:54,790 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.06 vs. limit=5.0 +2023-04-03 09:32:00,761 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 4.881e+02 5.766e+02 7.270e+02 1.755e+03, threshold=1.153e+03, percent-clipped=5.0 +2023-04-03 09:32:12,506 INFO [train.py:903] (1/4) Epoch 27, batch 3650, loss[loss=0.1839, simple_loss=0.2717, pruned_loss=0.04807, over 19765.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.06104, over 3810019.95 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:32:24,266 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:32:37,952 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5979, 1.1399, 1.4142, 1.1936, 2.2631, 1.0040, 2.0751, 2.5444], + device='cuda:1'), covar=tensor([0.0666, 0.2833, 0.2723, 0.1682, 0.0837, 0.2032, 0.1087, 0.0430], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0376, 0.0395, 0.0352, 0.0382, 0.0356, 0.0392, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:32:52,770 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:33:15,874 INFO [train.py:903] (1/4) Epoch 27, batch 3700, loss[loss=0.1582, simple_loss=0.237, pruned_loss=0.03971, over 19743.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2845, pruned_loss=0.06158, over 3807018.73 frames. ], batch size: 45, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:06,268 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.357e+02 6.375e+02 8.389e+02 2.807e+03, threshold=1.275e+03, percent-clipped=10.0 +2023-04-03 09:34:17,479 INFO [train.py:903] (1/4) Epoch 27, batch 3750, loss[loss=0.2275, simple_loss=0.3094, pruned_loss=0.07274, over 18802.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2834, pruned_loss=0.06098, over 3822571.16 frames. ], batch size: 74, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:40,814 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1169, 1.2984, 1.7113, 1.1587, 2.4177, 3.3109, 3.0271, 3.5565], + device='cuda:1'), covar=tensor([0.1710, 0.3951, 0.3528, 0.2586, 0.0678, 0.0225, 0.0245, 0.0312], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0332, 0.0365, 0.0270, 0.0255, 0.0196, 0.0219, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 09:34:48,381 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:34:49,692 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:04,219 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181315.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:20,124 INFO [train.py:903] (1/4) Epoch 27, batch 3800, loss[loss=0.2188, simple_loss=0.2963, pruned_loss=0.07066, over 18122.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2829, pruned_loss=0.06047, over 3824340.00 frames. ], batch size: 83, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:35:20,547 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181328.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:48,531 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 09:36:10,638 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.827e+02 5.403e+02 6.773e+02 8.761e+02 1.536e+03, threshold=1.355e+03, percent-clipped=8.0 +2023-04-03 09:36:22,057 INFO [train.py:903] (1/4) Epoch 27, batch 3850, loss[loss=0.2256, simple_loss=0.304, pruned_loss=0.07363, over 19654.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06121, over 3817091.95 frames. ], batch size: 55, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:36:58,904 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 09:37:25,666 INFO [train.py:903] (1/4) Epoch 27, batch 3900, loss[loss=0.2118, simple_loss=0.2922, pruned_loss=0.06567, over 19536.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06073, over 3828988.79 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:37:30,186 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-03 09:38:13,088 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181465.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:38:17,131 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.394e+02 5.123e+02 6.226e+02 8.139e+02 1.802e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-03 09:38:28,447 INFO [train.py:903] (1/4) Epoch 27, batch 3950, loss[loss=0.1943, simple_loss=0.2725, pruned_loss=0.05806, over 19730.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05982, over 3840395.81 frames. ], batch size: 51, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:38:30,650 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 09:38:43,806 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181490.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:39:12,745 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 09:39:30,607 INFO [train.py:903] (1/4) Epoch 27, batch 4000, loss[loss=0.1917, simple_loss=0.2773, pruned_loss=0.05305, over 19762.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.283, pruned_loss=0.06064, over 3825856.37 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:39:55,706 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:08,352 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:14,697 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 09:40:18,992 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 09:40:21,514 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.331e+02 6.990e+02 9.796e+02 2.756e+03, threshold=1.398e+03, percent-clipped=11.0 +2023-04-03 09:40:32,612 INFO [train.py:903] (1/4) Epoch 27, batch 4050, loss[loss=0.1794, simple_loss=0.262, pruned_loss=0.04841, over 19842.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2827, pruned_loss=0.06023, over 3833134.48 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:40:38,780 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:41:35,048 INFO [train.py:903] (1/4) Epoch 27, batch 4100, loss[loss=0.2649, simple_loss=0.3257, pruned_loss=0.102, over 13029.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06072, over 3816468.83 frames. ], batch size: 136, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:42:07,723 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 09:42:13,763 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:42:26,979 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 5.142e+02 6.045e+02 7.647e+02 1.406e+03, threshold=1.209e+03, percent-clipped=1.0 +2023-04-03 09:42:35,663 INFO [train.py:903] (1/4) Epoch 27, batch 4150, loss[loss=0.1705, simple_loss=0.2549, pruned_loss=0.04302, over 19732.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06138, over 3818699.62 frames. ], batch size: 51, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:42:58,886 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0573, 2.2368, 2.5344, 2.8350, 2.2027, 2.7305, 2.5016, 2.2071], + device='cuda:1'), covar=tensor([0.4456, 0.4084, 0.1867, 0.2424, 0.4185, 0.2222, 0.4899, 0.3517], + device='cuda:1'), in_proj_covar=tensor([0.0930, 0.1007, 0.0737, 0.0948, 0.0905, 0.0847, 0.0857, 0.0806], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 09:42:59,065 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 09:43:05,786 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5584, 1.2585, 1.5497, 1.5164, 3.0887, 1.3182, 2.3417, 3.5472], + device='cuda:1'), covar=tensor([0.0610, 0.3077, 0.2993, 0.2011, 0.0878, 0.2425, 0.1434, 0.0299], + device='cuda:1'), in_proj_covar=tensor([0.0420, 0.0375, 0.0392, 0.0351, 0.0381, 0.0354, 0.0390, 0.0413], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:43:38,714 INFO [train.py:903] (1/4) Epoch 27, batch 4200, loss[loss=0.1886, simple_loss=0.2799, pruned_loss=0.04859, over 19671.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2837, pruned_loss=0.06084, over 3832342.57 frames. ], batch size: 58, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:42,099 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 09:43:55,203 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4040, 1.7069, 1.8996, 1.8747, 4.0225, 1.4445, 2.8668, 4.2340], + device='cuda:1'), covar=tensor([0.0517, 0.2764, 0.2744, 0.1919, 0.0707, 0.2537, 0.1497, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0377, 0.0394, 0.0352, 0.0382, 0.0356, 0.0392, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:44:30,943 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.083e+02 6.482e+02 7.948e+02 1.533e+03, threshold=1.296e+03, percent-clipped=4.0 +2023-04-03 09:44:36,211 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:44:40,470 INFO [train.py:903] (1/4) Epoch 27, batch 4250, loss[loss=0.2065, simple_loss=0.2841, pruned_loss=0.06444, over 19488.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2827, pruned_loss=0.06029, over 3838202.24 frames. ], batch size: 49, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:44:56,632 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 09:45:08,439 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 09:45:43,725 INFO [train.py:903] (1/4) Epoch 27, batch 4300, loss[loss=0.1666, simple_loss=0.2533, pruned_loss=0.03997, over 19836.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2832, pruned_loss=0.06055, over 3836794.81 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:45:46,607 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.4680, 5.0720, 3.1830, 4.4532, 1.3453, 5.0324, 4.8778, 5.0293], + device='cuda:1'), covar=tensor([0.0408, 0.0770, 0.1761, 0.0699, 0.3726, 0.0605, 0.0867, 0.1023], + device='cuda:1'), in_proj_covar=tensor([0.0527, 0.0427, 0.0512, 0.0357, 0.0406, 0.0452, 0.0448, 0.0477], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:46:16,691 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0365, 4.4609, 4.7285, 4.7273, 1.8313, 4.4336, 3.9064, 4.4854], + device='cuda:1'), covar=tensor([0.1626, 0.0772, 0.0611, 0.0675, 0.5812, 0.0792, 0.0682, 0.1059], + device='cuda:1'), in_proj_covar=tensor([0.0823, 0.0784, 0.0994, 0.0870, 0.0864, 0.0755, 0.0588, 0.0922], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 09:46:36,879 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 4.917e+02 6.450e+02 8.224e+02 1.543e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-04-03 09:46:40,234 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 09:46:47,397 INFO [train.py:903] (1/4) Epoch 27, batch 4350, loss[loss=0.1812, simple_loss=0.2629, pruned_loss=0.04974, over 19721.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2822, pruned_loss=0.06008, over 3839372.20 frames. ], batch size: 51, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:46:59,906 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4659, 4.1164, 2.6971, 3.6077, 1.2668, 4.0018, 3.8708, 4.0032], + device='cuda:1'), covar=tensor([0.0693, 0.0962, 0.2063, 0.0848, 0.3734, 0.0735, 0.1043, 0.1286], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0428, 0.0513, 0.0358, 0.0408, 0.0454, 0.0450, 0.0479], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:47:05,497 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:47:25,481 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-03 09:47:49,881 INFO [train.py:903] (1/4) Epoch 27, batch 4400, loss[loss=0.1744, simple_loss=0.2552, pruned_loss=0.0468, over 19618.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2826, pruned_loss=0.06032, over 3820629.92 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:15,011 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 09:48:24,205 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 09:48:42,710 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.593e+02 4.919e+02 6.507e+02 9.105e+02 1.976e+03, threshold=1.301e+03, percent-clipped=10.0 +2023-04-03 09:48:52,990 INFO [train.py:903] (1/4) Epoch 27, batch 4450, loss[loss=0.2369, simple_loss=0.3068, pruned_loss=0.08351, over 18793.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.284, pruned_loss=0.06124, over 3822602.31 frames. ], batch size: 74, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:53,752 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 09:48:59,257 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0383, 2.1427, 2.3376, 2.6338, 2.0486, 2.5387, 2.2745, 2.1595], + device='cuda:1'), covar=tensor([0.4212, 0.4124, 0.1999, 0.2732, 0.4408, 0.2360, 0.4820, 0.3430], + device='cuda:1'), in_proj_covar=tensor([0.0930, 0.1006, 0.0739, 0.0948, 0.0907, 0.0848, 0.0855, 0.0805], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 09:49:16,012 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:30,783 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:36,523 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6931, 4.2774, 2.8778, 3.7604, 0.9706, 4.2387, 4.1276, 4.1813], + device='cuda:1'), covar=tensor([0.0576, 0.1027, 0.1869, 0.0840, 0.4032, 0.0642, 0.0925, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0427, 0.0513, 0.0357, 0.0407, 0.0453, 0.0449, 0.0477], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:49:56,884 INFO [train.py:903] (1/4) Epoch 27, batch 4500, loss[loss=0.1891, simple_loss=0.2689, pruned_loss=0.05459, over 19743.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2839, pruned_loss=0.06128, over 3816894.07 frames. ], batch size: 51, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:49:59,816 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:31,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:49,974 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.059e+02 6.218e+02 7.785e+02 2.105e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 09:51:00,204 INFO [train.py:903] (1/4) Epoch 27, batch 4550, loss[loss=0.203, simple_loss=0.2837, pruned_loss=0.06113, over 19858.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2851, pruned_loss=0.06171, over 3815250.30 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:51:09,775 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 09:51:32,217 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 09:51:35,765 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6252, 2.2853, 1.6534, 1.5188, 2.1252, 1.3706, 1.3856, 1.9826], + device='cuda:1'), covar=tensor([0.1108, 0.0835, 0.1120, 0.0991, 0.0558, 0.1399, 0.0838, 0.0538], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0321, 0.0337, 0.0273, 0.0251, 0.0345, 0.0292, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:52:02,911 INFO [train.py:903] (1/4) Epoch 27, batch 4600, loss[loss=0.2, simple_loss=0.2826, pruned_loss=0.05866, over 17226.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2842, pruned_loss=0.06135, over 3826101.67 frames. ], batch size: 101, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:52:54,758 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 4.830e+02 5.724e+02 7.323e+02 1.391e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 09:53:05,189 INFO [train.py:903] (1/4) Epoch 27, batch 4650, loss[loss=0.1757, simple_loss=0.2647, pruned_loss=0.04336, over 19670.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2834, pruned_loss=0.06018, over 3837075.61 frames. ], batch size: 53, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:53:22,617 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 09:53:34,185 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 09:53:57,746 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5693, 1.8231, 2.1292, 1.8817, 3.1267, 2.5745, 3.4678, 1.7016], + device='cuda:1'), covar=tensor([0.2537, 0.4216, 0.2635, 0.1947, 0.1584, 0.2213, 0.1513, 0.4232], + device='cuda:1'), in_proj_covar=tensor([0.0552, 0.0667, 0.0751, 0.0505, 0.0636, 0.0544, 0.0668, 0.0570], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 09:54:07,706 INFO [train.py:903] (1/4) Epoch 27, batch 4700, loss[loss=0.2226, simple_loss=0.2964, pruned_loss=0.07443, over 19629.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2832, pruned_loss=0.05976, over 3835622.13 frames. ], batch size: 50, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:54:30,876 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 09:54:51,517 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:54:59,130 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.994e+02 4.730e+02 5.895e+02 7.660e+02 1.174e+03, threshold=1.179e+03, percent-clipped=2.0 +2023-04-03 09:55:10,405 INFO [train.py:903] (1/4) Epoch 27, batch 4750, loss[loss=0.2057, simple_loss=0.2896, pruned_loss=0.06093, over 19510.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.059, over 3846733.15 frames. ], batch size: 56, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:55:22,518 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182288.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:55:36,008 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:12,226 INFO [train.py:903] (1/4) Epoch 27, batch 4800, loss[loss=0.1798, simple_loss=0.2673, pruned_loss=0.04613, over 19674.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.06004, over 3816024.35 frames. ], batch size: 53, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:56:26,913 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182340.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:27,639 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 09:56:42,892 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6110, 2.8542, 2.3199, 2.8182, 2.6899, 2.3786, 2.1970, 2.7778], + device='cuda:1'), covar=tensor([0.1051, 0.1405, 0.1461, 0.0976, 0.1236, 0.0500, 0.1480, 0.0626], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0358, 0.0317, 0.0257, 0.0307, 0.0256, 0.0321, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 09:57:03,570 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.862e+02 5.780e+02 7.243e+02 1.108e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-03 09:57:13,598 INFO [train.py:903] (1/4) Epoch 27, batch 4850, loss[loss=0.2112, simple_loss=0.2921, pruned_loss=0.06517, over 19332.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2845, pruned_loss=0.06076, over 3807071.28 frames. ], batch size: 66, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:57:36,912 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 09:57:58,336 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 09:58:03,902 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 09:58:03,927 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 09:58:13,254 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 09:58:14,420 INFO [train.py:903] (1/4) Epoch 27, batch 4900, loss[loss=0.2275, simple_loss=0.3031, pruned_loss=0.07592, over 19765.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2844, pruned_loss=0.06081, over 3798843.53 frames. ], batch size: 63, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:58:34,893 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 09:58:50,325 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182455.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:59:07,216 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.786e+02 5.871e+02 7.388e+02 1.622e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 09:59:18,868 INFO [train.py:903] (1/4) Epoch 27, batch 4950, loss[loss=0.1882, simple_loss=0.2787, pruned_loss=0.04892, over 19624.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2842, pruned_loss=0.06055, over 3808917.46 frames. ], batch size: 57, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:59:36,557 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 10:00:00,881 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 10:00:21,851 INFO [train.py:903] (1/4) Epoch 27, batch 5000, loss[loss=0.1637, simple_loss=0.2427, pruned_loss=0.04231, over 19765.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2832, pruned_loss=0.06036, over 3798385.22 frames. ], batch size: 47, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:00:27,430 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182532.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:00:32,550 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 10:00:44,445 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 10:01:15,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.878e+02 5.976e+02 7.448e+02 1.686e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 10:01:25,326 INFO [train.py:903] (1/4) Epoch 27, batch 5050, loss[loss=0.21, simple_loss=0.2937, pruned_loss=0.0631, over 19752.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06059, over 3793867.86 frames. ], batch size: 63, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:01:41,295 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.49 vs. limit=5.0 +2023-04-03 10:02:02,774 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 10:02:27,484 INFO [train.py:903] (1/4) Epoch 27, batch 5100, loss[loss=0.2074, simple_loss=0.2894, pruned_loss=0.06269, over 18765.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06029, over 3787586.03 frames. ], batch size: 74, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:37,796 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 10:02:42,003 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 10:02:46,621 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 10:02:46,774 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:03:19,758 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.110e+02 6.408e+02 8.268e+02 2.195e+03, threshold=1.282e+03, percent-clipped=9.0 +2023-04-03 10:03:30,281 INFO [train.py:903] (1/4) Epoch 27, batch 5150, loss[loss=0.205, simple_loss=0.2955, pruned_loss=0.05728, over 19665.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06051, over 3776794.79 frames. ], batch size: 58, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:03:44,254 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:04:12,442 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182711.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:21,109 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:04:23,045 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 10:04:34,488 INFO [train.py:903] (1/4) Epoch 27, batch 5200, loss[loss=0.2003, simple_loss=0.2768, pruned_loss=0.06191, over 19853.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2826, pruned_loss=0.06, over 3787707.95 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:04:45,073 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:50,601 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 10:05:11,541 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:05:17,511 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7402, 1.8588, 2.1811, 2.2362, 1.6761, 2.1448, 2.1552, 1.9440], + device='cuda:1'), covar=tensor([0.4289, 0.3973, 0.1959, 0.2569, 0.4248, 0.2292, 0.4918, 0.3573], + device='cuda:1'), in_proj_covar=tensor([0.0927, 0.1005, 0.0738, 0.0946, 0.0905, 0.0844, 0.0854, 0.0803], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 10:05:28,503 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.726e+02 5.796e+02 7.282e+02 1.371e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-03 10:05:35,290 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 10:05:37,678 INFO [train.py:903] (1/4) Epoch 27, batch 5250, loss[loss=0.1868, simple_loss=0.2718, pruned_loss=0.05095, over 19538.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06012, over 3785982.39 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:06:39,522 INFO [train.py:903] (1/4) Epoch 27, batch 5300, loss[loss=0.166, simple_loss=0.2415, pruned_loss=0.04524, over 19751.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2825, pruned_loss=0.05979, over 3788949.52 frames. ], batch size: 46, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:06:57,455 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 10:07:31,280 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4783, 1.5698, 1.8562, 1.7512, 2.6789, 2.2657, 2.7618, 1.1905], + device='cuda:1'), covar=tensor([0.2572, 0.4465, 0.2861, 0.1975, 0.1471, 0.2219, 0.1458, 0.4822], + device='cuda:1'), in_proj_covar=tensor([0.0553, 0.0668, 0.0753, 0.0505, 0.0635, 0.0545, 0.0669, 0.0571], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 10:07:34,153 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.440e+02 4.985e+02 5.848e+02 7.587e+02 2.195e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 10:07:37,804 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:39,953 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:42,167 INFO [train.py:903] (1/4) Epoch 27, batch 5350, loss[loss=0.1847, simple_loss=0.2613, pruned_loss=0.05406, over 19391.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2825, pruned_loss=0.06001, over 3787889.54 frames. ], batch size: 48, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:07:44,841 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:08:16,929 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 10:08:46,439 INFO [train.py:903] (1/4) Epoch 27, batch 5400, loss[loss=0.1769, simple_loss=0.2523, pruned_loss=0.05081, over 19767.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05952, over 3806409.20 frames. ], batch size: 45, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:09:18,292 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5246, 1.6075, 1.8818, 1.7760, 2.8149, 2.2927, 3.0070, 1.3085], + device='cuda:1'), covar=tensor([0.2609, 0.4485, 0.2935, 0.2055, 0.1508, 0.2291, 0.1393, 0.4840], + device='cuda:1'), in_proj_covar=tensor([0.0554, 0.0669, 0.0755, 0.0507, 0.0636, 0.0546, 0.0670, 0.0573], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 10:09:41,330 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.624e+02 4.932e+02 6.009e+02 7.341e+02 1.388e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 10:09:49,110 INFO [train.py:903] (1/4) Epoch 27, batch 5450, loss[loss=0.2323, simple_loss=0.3058, pruned_loss=0.0794, over 19504.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05986, over 3812506.13 frames. ], batch size: 64, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:10:04,459 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:35,589 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183014.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:50,992 INFO [train.py:903] (1/4) Epoch 27, batch 5500, loss[loss=0.2902, simple_loss=0.3534, pruned_loss=0.1135, over 12932.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2814, pruned_loss=0.05946, over 3814219.43 frames. ], batch size: 137, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:05,209 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183039.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:11:13,896 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 10:11:26,361 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0768, 2.0632, 1.7536, 2.1040, 1.8543, 1.7689, 1.7579, 2.0100], + device='cuda:1'), covar=tensor([0.1147, 0.1527, 0.1621, 0.1128, 0.1476, 0.0624, 0.1545, 0.0826], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0357, 0.0317, 0.0258, 0.0307, 0.0256, 0.0320, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:11:45,355 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.032e+02 6.184e+02 7.491e+02 1.557e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 10:11:52,115 INFO [train.py:903] (1/4) Epoch 27, batch 5550, loss[loss=0.2015, simple_loss=0.2937, pruned_loss=0.05465, over 19515.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05974, over 3810089.17 frames. ], batch size: 56, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:57,938 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 10:12:19,477 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3295, 3.8721, 3.9717, 3.9733, 1.6358, 3.7897, 3.3133, 3.7429], + device='cuda:1'), covar=tensor([0.1727, 0.0930, 0.0766, 0.0852, 0.6102, 0.1065, 0.0738, 0.1301], + device='cuda:1'), in_proj_covar=tensor([0.0818, 0.0782, 0.0991, 0.0871, 0.0862, 0.0753, 0.0586, 0.0919], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 10:12:39,053 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5039, 2.0582, 2.3029, 3.1162, 2.1724, 2.4879, 2.5295, 2.3283], + device='cuda:1'), covar=tensor([0.0744, 0.0924, 0.0907, 0.0715, 0.0847, 0.0773, 0.0920, 0.0686], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0225, 0.0229, 0.0241, 0.0226, 0.0213, 0.0188, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 10:12:46,687 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 10:12:50,105 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0423, 3.7099, 2.5622, 3.2890, 0.8012, 3.6629, 3.5415, 3.5544], + device='cuda:1'), covar=tensor([0.0816, 0.1073, 0.1974, 0.0955, 0.4013, 0.0767, 0.1096, 0.1356], + device='cuda:1'), in_proj_covar=tensor([0.0529, 0.0426, 0.0515, 0.0358, 0.0410, 0.0453, 0.0450, 0.0478], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:12:56,188 INFO [train.py:903] (1/4) Epoch 27, batch 5600, loss[loss=0.2841, simple_loss=0.3594, pruned_loss=0.1044, over 19709.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2829, pruned_loss=0.06015, over 3799034.83 frames. ], batch size: 63, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:13:25,412 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0672, 4.5248, 4.8470, 4.8158, 1.7517, 4.5120, 3.8646, 4.5642], + device='cuda:1'), covar=tensor([0.1668, 0.0808, 0.0547, 0.0659, 0.6363, 0.0884, 0.0696, 0.1086], + device='cuda:1'), in_proj_covar=tensor([0.0816, 0.0780, 0.0987, 0.0868, 0.0859, 0.0750, 0.0585, 0.0917], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 10:13:51,540 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.790e+02 6.069e+02 7.863e+02 1.689e+03, threshold=1.214e+03, percent-clipped=3.0 +2023-04-03 10:13:59,319 INFO [train.py:903] (1/4) Epoch 27, batch 5650, loss[loss=0.1541, simple_loss=0.2311, pruned_loss=0.03853, over 19732.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2833, pruned_loss=0.06065, over 3804004.90 frames. ], batch size: 46, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:14:44,837 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 10:14:49,511 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:14:56,356 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:00,999 INFO [train.py:903] (1/4) Epoch 27, batch 5700, loss[loss=0.2041, simple_loss=0.2877, pruned_loss=0.0603, over 19679.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2836, pruned_loss=0.06083, over 3787406.35 frames. ], batch size: 55, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:15:25,105 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:40,177 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4296, 2.4822, 2.6297, 3.0861, 2.5410, 2.9286, 2.6051, 2.4885], + device='cuda:1'), covar=tensor([0.3686, 0.3527, 0.1781, 0.2343, 0.3735, 0.2104, 0.4083, 0.2992], + device='cuda:1'), in_proj_covar=tensor([0.0930, 0.1009, 0.0740, 0.0946, 0.0908, 0.0847, 0.0857, 0.0804], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 10:15:54,498 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.103e+02 5.954e+02 7.593e+02 1.308e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-03 10:15:54,918 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:55,553 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-03 10:15:59,212 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 10:16:01,590 INFO [train.py:903] (1/4) Epoch 27, batch 5750, loss[loss=0.1689, simple_loss=0.241, pruned_loss=0.04837, over 19785.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2842, pruned_loss=0.06139, over 3785374.16 frames. ], batch size: 48, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:16:08,296 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 10:16:12,815 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 10:16:32,338 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4517, 1.3238, 1.5604, 1.3894, 3.0476, 1.1328, 2.4026, 3.4811], + device='cuda:1'), covar=tensor([0.0574, 0.2878, 0.2940, 0.1976, 0.0730, 0.2512, 0.1149, 0.0271], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0375, 0.0395, 0.0351, 0.0382, 0.0356, 0.0392, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:17:05,413 INFO [train.py:903] (1/4) Epoch 27, batch 5800, loss[loss=0.1602, simple_loss=0.2334, pruned_loss=0.0435, over 19775.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2834, pruned_loss=0.06078, over 3791238.77 frames. ], batch size: 45, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:17:11,539 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8613, 1.2462, 1.6233, 1.5457, 4.2488, 1.2253, 2.7123, 4.6886], + device='cuda:1'), covar=tensor([0.0596, 0.3822, 0.3470, 0.2441, 0.1160, 0.3145, 0.1519, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0376, 0.0395, 0.0352, 0.0383, 0.0357, 0.0394, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:17:11,585 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183333.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:19,543 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:59,574 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.364e+02 7.027e+02 9.052e+02 1.891e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-03 10:18:07,743 INFO [train.py:903] (1/4) Epoch 27, batch 5850, loss[loss=0.184, simple_loss=0.2707, pruned_loss=0.04866, over 19307.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.06101, over 3808757.98 frames. ], batch size: 66, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:19:01,473 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4430, 2.0500, 1.6105, 1.4477, 1.9760, 1.2323, 1.3797, 1.8211], + device='cuda:1'), covar=tensor([0.1050, 0.0939, 0.1131, 0.0888, 0.0561, 0.1370, 0.0781, 0.0531], + device='cuda:1'), in_proj_covar=tensor([0.0300, 0.0318, 0.0335, 0.0272, 0.0249, 0.0342, 0.0291, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:19:07,141 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:19:08,994 INFO [train.py:903] (1/4) Epoch 27, batch 5900, loss[loss=0.2196, simple_loss=0.3045, pruned_loss=0.06729, over 19412.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.06093, over 3805551.87 frames. ], batch size: 70, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:19:09,042 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 10:19:21,567 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 10:19:32,071 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 10:20:00,754 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 10:20:03,306 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 4.868e+02 5.663e+02 7.388e+02 1.454e+03, threshold=1.133e+03, percent-clipped=1.0 +2023-04-03 10:20:10,248 INFO [train.py:903] (1/4) Epoch 27, batch 5950, loss[loss=0.2219, simple_loss=0.3051, pruned_loss=0.06939, over 17344.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2842, pruned_loss=0.06098, over 3810691.51 frames. ], batch size: 101, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,625 INFO [train.py:903] (1/4) Epoch 27, batch 6000, loss[loss=0.1783, simple_loss=0.2699, pruned_loss=0.04336, over 19678.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06101, over 3805882.31 frames. ], batch size: 59, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,625 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 10:21:19,204 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6008, 1.6353, 1.6530, 1.4016, 1.4188, 1.3997, 0.3653, 0.7263], + device='cuda:1'), covar=tensor([0.0867, 0.0813, 0.0509, 0.0799, 0.1572, 0.0957, 0.1518, 0.1385], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0361, 0.0368, 0.0391, 0.0469, 0.0394, 0.0344, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 10:21:25,589 INFO [train.py:937] (1/4) Epoch 27, validation: loss=0.1675, simple_loss=0.2669, pruned_loss=0.03401, over 944034.00 frames. +2023-04-03 10:21:25,590 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 10:22:22,434 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.376e+02 6.105e+02 7.773e+02 1.848e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 10:22:28,411 INFO [train.py:903] (1/4) Epoch 27, batch 6050, loss[loss=0.2252, simple_loss=0.3069, pruned_loss=0.07179, over 18837.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.0612, over 3813203.56 frames. ], batch size: 74, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:22:43,111 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183589.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:22:50,991 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:14,128 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:20,871 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183620.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:31,578 INFO [train.py:903] (1/4) Epoch 27, batch 6100, loss[loss=0.277, simple_loss=0.3372, pruned_loss=0.1084, over 13326.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2827, pruned_loss=0.0603, over 3826447.50 frames. ], batch size: 135, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:23:37,180 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:37,353 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5468, 1.7285, 2.0421, 1.8332, 3.3511, 2.6157, 3.6291, 1.6478], + device='cuda:1'), covar=tensor([0.2555, 0.4325, 0.2826, 0.1959, 0.1379, 0.2158, 0.1359, 0.4333], + device='cuda:1'), in_proj_covar=tensor([0.0554, 0.0670, 0.0756, 0.0509, 0.0638, 0.0547, 0.0670, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 10:24:27,767 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.311e+02 5.176e+02 6.035e+02 7.763e+02 1.396e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-03 10:24:33,563 INFO [train.py:903] (1/4) Epoch 27, batch 6150, loss[loss=0.1901, simple_loss=0.2772, pruned_loss=0.05153, over 19527.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05994, over 3823809.05 frames. ], batch size: 54, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:25:01,325 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 10:25:34,990 INFO [train.py:903] (1/4) Epoch 27, batch 6200, loss[loss=0.2284, simple_loss=0.3123, pruned_loss=0.07224, over 18058.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05947, over 3831973.21 frames. ], batch size: 83, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:26:28,519 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183770.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:26:31,939 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.839e+02 5.826e+02 7.637e+02 1.855e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 10:26:37,752 INFO [train.py:903] (1/4) Epoch 27, batch 6250, loss[loss=0.1932, simple_loss=0.2726, pruned_loss=0.05693, over 19602.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2829, pruned_loss=0.06023, over 3834820.38 frames. ], batch size: 52, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:27:08,077 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 10:27:40,120 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5801, 1.2436, 1.2429, 1.5136, 1.1444, 1.3491, 1.2467, 1.4131], + device='cuda:1'), covar=tensor([0.1215, 0.1282, 0.1662, 0.1111, 0.1377, 0.0664, 0.1666, 0.0944], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0358, 0.0317, 0.0256, 0.0306, 0.0256, 0.0320, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:27:40,819 INFO [train.py:903] (1/4) Epoch 27, batch 6300, loss[loss=0.2455, simple_loss=0.3273, pruned_loss=0.08187, over 19265.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.283, pruned_loss=0.06017, over 3836378.74 frames. ], batch size: 66, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:20,269 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:28:36,678 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.928e+02 5.882e+02 7.199e+02 1.705e+03, threshold=1.176e+03, percent-clipped=2.0 +2023-04-03 10:28:43,631 INFO [train.py:903] (1/4) Epoch 27, batch 6350, loss[loss=0.22, simple_loss=0.2913, pruned_loss=0.07438, over 19748.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2831, pruned_loss=0.06041, over 3822468.30 frames. ], batch size: 51, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:51,881 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:29:43,990 INFO [train.py:903] (1/4) Epoch 27, batch 6400, loss[loss=0.192, simple_loss=0.2799, pruned_loss=0.05206, over 19775.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2839, pruned_loss=0.06079, over 3818002.89 frames. ], batch size: 54, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:30:39,967 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.757e+02 6.035e+02 7.575e+02 1.901e+03, threshold=1.207e+03, percent-clipped=7.0 +2023-04-03 10:30:43,676 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:30:45,040 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2936, 1.3240, 1.6916, 1.2464, 2.5180, 3.4097, 3.1219, 3.5910], + device='cuda:1'), covar=tensor([0.1543, 0.3837, 0.3453, 0.2626, 0.0627, 0.0194, 0.0221, 0.0256], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0270, 0.0253, 0.0195, 0.0219, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 10:30:45,878 INFO [train.py:903] (1/4) Epoch 27, batch 6450, loss[loss=0.1908, simple_loss=0.2675, pruned_loss=0.05709, over 19479.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06105, over 3821994.90 frames. ], batch size: 49, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:28,488 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 10:31:42,577 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3188, 1.9732, 1.5130, 1.3976, 1.8606, 1.2492, 1.3233, 1.8282], + device='cuda:1'), covar=tensor([0.0952, 0.0834, 0.1133, 0.0892, 0.0496, 0.1343, 0.0709, 0.0476], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0320, 0.0337, 0.0273, 0.0251, 0.0344, 0.0293, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:31:50,547 INFO [train.py:903] (1/4) Epoch 27, batch 6500, loss[loss=0.1959, simple_loss=0.2877, pruned_loss=0.05209, over 19607.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2829, pruned_loss=0.06006, over 3828363.62 frames. ], batch size: 61, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:52,981 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 10:32:04,489 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.0555, 1.7774, 2.0164, 1.7500, 4.5867, 1.2976, 2.6933, 4.9979], + device='cuda:1'), covar=tensor([0.0478, 0.2848, 0.2886, 0.2007, 0.0723, 0.2636, 0.1463, 0.0171], + device='cuda:1'), in_proj_covar=tensor([0.0424, 0.0378, 0.0397, 0.0354, 0.0385, 0.0357, 0.0395, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:32:11,857 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-03 10:32:40,167 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:32:46,399 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.542e+02 5.565e+02 7.286e+02 1.442e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-04-03 10:32:53,091 INFO [train.py:903] (1/4) Epoch 27, batch 6550, loss[loss=0.1832, simple_loss=0.2756, pruned_loss=0.04537, over 19767.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.05998, over 3827495.95 frames. ], batch size: 56, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:33:08,661 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:33:24,683 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6105, 1.6427, 1.8486, 1.8163, 2.6559, 2.3556, 2.7898, 1.1913], + device='cuda:1'), covar=tensor([0.2458, 0.4460, 0.2857, 0.1986, 0.1495, 0.2143, 0.1422, 0.4804], + device='cuda:1'), in_proj_covar=tensor([0.0552, 0.0669, 0.0751, 0.0507, 0.0636, 0.0544, 0.0666, 0.0571], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 10:33:51,928 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2240, 1.2989, 1.6058, 1.2367, 2.5503, 3.3747, 3.1092, 3.5867], + device='cuda:1'), covar=tensor([0.1588, 0.3887, 0.3557, 0.2586, 0.0646, 0.0213, 0.0259, 0.0299], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0332, 0.0363, 0.0271, 0.0253, 0.0195, 0.0220, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 10:33:55,011 INFO [train.py:903] (1/4) Epoch 27, batch 6600, loss[loss=0.2346, simple_loss=0.3124, pruned_loss=0.07845, over 19276.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05987, over 3833650.07 frames. ], batch size: 66, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:34:06,002 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4136, 2.4027, 2.5763, 3.2195, 2.4472, 3.0515, 2.4937, 2.4608], + device='cuda:1'), covar=tensor([0.4250, 0.4371, 0.1921, 0.2469, 0.4593, 0.2156, 0.5354, 0.3360], + device='cuda:1'), in_proj_covar=tensor([0.0933, 0.1009, 0.0740, 0.0947, 0.0909, 0.0846, 0.0859, 0.0803], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 10:34:11,902 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:43,108 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:46,826 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 10:34:46,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 10:34:50,802 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.027e+02 4.833e+02 5.798e+02 7.178e+02 1.551e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 10:34:58,088 INFO [train.py:903] (1/4) Epoch 27, batch 6650, loss[loss=0.1877, simple_loss=0.2805, pruned_loss=0.04749, over 19455.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.283, pruned_loss=0.05963, over 3824897.13 frames. ], batch size: 64, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:35:30,806 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184204.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:35:59,896 INFO [train.py:903] (1/4) Epoch 27, batch 6700, loss[loss=0.2087, simple_loss=0.2854, pruned_loss=0.06605, over 19691.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06004, over 3821091.67 frames. ], batch size: 53, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:36:52,206 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.122e+02 6.084e+02 8.596e+02 2.606e+03, threshold=1.217e+03, percent-clipped=9.0 +2023-04-03 10:36:57,977 INFO [train.py:903] (1/4) Epoch 27, batch 6750, loss[loss=0.1902, simple_loss=0.275, pruned_loss=0.05266, over 18253.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06048, over 3816114.45 frames. ], batch size: 83, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:37:31,820 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3486, 4.0099, 2.5543, 3.5200, 0.8463, 3.9258, 3.8118, 3.9233], + device='cuda:1'), covar=tensor([0.0752, 0.1050, 0.2053, 0.0858, 0.4027, 0.0740, 0.1017, 0.1180], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0425, 0.0516, 0.0357, 0.0407, 0.0455, 0.0451, 0.0479], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:37:44,317 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:37:54,076 INFO [train.py:903] (1/4) Epoch 27, batch 6800, loss[loss=0.2136, simple_loss=0.2911, pruned_loss=0.06805, over 19614.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06149, over 3802813.14 frames. ], batch size: 57, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:38:15,929 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:38:39,949 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 10:38:41,605 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 10:38:43,614 INFO [train.py:903] (1/4) Epoch 28, batch 0, loss[loss=0.2313, simple_loss=0.3111, pruned_loss=0.07572, over 19657.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3111, pruned_loss=0.07572, over 19657.00 frames. ], batch size: 55, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:38:43,615 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 10:38:54,475 INFO [train.py:937] (1/4) Epoch 28, validation: loss=0.1665, simple_loss=0.2666, pruned_loss=0.03316, over 944034.00 frames. +2023-04-03 10:38:54,476 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 10:39:08,348 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 10:39:14,468 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:15,177 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.715e+02 5.190e+02 6.304e+02 8.212e+02 1.288e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-03 10:39:21,702 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:57,733 INFO [train.py:903] (1/4) Epoch 28, batch 50, loss[loss=0.166, simple_loss=0.2465, pruned_loss=0.04271, over 19756.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2806, pruned_loss=0.05811, over 866180.12 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:40:04,863 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:40:25,864 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4914, 1.6389, 1.8717, 1.7859, 2.5738, 2.2698, 2.7757, 1.1652], + device='cuda:1'), covar=tensor([0.2541, 0.4426, 0.2847, 0.1960, 0.1590, 0.2224, 0.1488, 0.4853], + device='cuda:1'), in_proj_covar=tensor([0.0553, 0.0668, 0.0751, 0.0506, 0.0636, 0.0544, 0.0668, 0.0571], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 10:40:32,268 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 10:40:58,079 INFO [train.py:903] (1/4) Epoch 28, batch 100, loss[loss=0.2012, simple_loss=0.2874, pruned_loss=0.0575, over 18126.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2794, pruned_loss=0.05739, over 1535157.98 frames. ], batch size: 83, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:41:08,321 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 10:41:18,601 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 4.572e+02 5.776e+02 7.316e+02 1.195e+03, threshold=1.155e+03, percent-clipped=0.0 +2023-04-03 10:41:56,020 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:41:58,099 INFO [train.py:903] (1/4) Epoch 28, batch 150, loss[loss=0.1602, simple_loss=0.2393, pruned_loss=0.0406, over 19719.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2813, pruned_loss=0.05925, over 2041962.95 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:42:24,557 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:42:57,418 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 10:42:58,575 INFO [train.py:903] (1/4) Epoch 28, batch 200, loss[loss=0.1852, simple_loss=0.2792, pruned_loss=0.0456, over 18123.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2821, pruned_loss=0.05992, over 2438615.72 frames. ], batch size: 83, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:43:19,481 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 4.969e+02 6.258e+02 7.516e+02 2.266e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 10:43:22,260 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184575.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:24,687 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9835, 1.7777, 1.6953, 2.0188, 1.6869, 1.6669, 1.6536, 1.8746], + device='cuda:1'), covar=tensor([0.1126, 0.1641, 0.1522, 0.1162, 0.1499, 0.0626, 0.1559, 0.0811], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0360, 0.0320, 0.0258, 0.0307, 0.0258, 0.0321, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:43:51,887 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:59,861 INFO [train.py:903] (1/4) Epoch 28, batch 250, loss[loss=0.2497, simple_loss=0.3192, pruned_loss=0.0901, over 19543.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2836, pruned_loss=0.06079, over 2744999.45 frames. ], batch size: 56, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:01,674 INFO [train.py:903] (1/4) Epoch 28, batch 300, loss[loss=0.249, simple_loss=0.3192, pruned_loss=0.0894, over 19620.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2831, pruned_loss=0.06029, over 2991804.47 frames. ], batch size: 61, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:22,244 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.936e+02 6.396e+02 8.049e+02 1.564e+03, threshold=1.279e+03, percent-clipped=7.0 +2023-04-03 10:45:29,098 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 10:45:40,684 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1231, 1.2596, 1.6732, 1.0147, 2.3646, 3.0327, 2.7887, 3.2614], + device='cuda:1'), covar=tensor([0.1702, 0.4136, 0.3571, 0.2884, 0.0671, 0.0236, 0.0276, 0.0339], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0333, 0.0364, 0.0272, 0.0255, 0.0196, 0.0220, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 10:46:02,770 INFO [train.py:903] (1/4) Epoch 28, batch 350, loss[loss=0.1996, simple_loss=0.2911, pruned_loss=0.05405, over 19692.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.06057, over 3189196.01 frames. ], batch size: 59, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:46:06,321 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:46:11,121 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1449, 1.3508, 1.6474, 1.0353, 2.3244, 2.9847, 2.7268, 3.1848], + device='cuda:1'), covar=tensor([0.1687, 0.3890, 0.3568, 0.2924, 0.0717, 0.0297, 0.0287, 0.0369], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0334, 0.0364, 0.0272, 0.0255, 0.0196, 0.0220, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 10:46:20,186 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184721.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:47:04,866 INFO [train.py:903] (1/4) Epoch 28, batch 400, loss[loss=0.1953, simple_loss=0.2743, pruned_loss=0.0581, over 19469.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06061, over 3342713.13 frames. ], batch size: 49, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:47:07,990 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-03 10:47:24,994 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.910e+02 5.909e+02 7.518e+02 1.907e+03, threshold=1.182e+03, percent-clipped=3.0 +2023-04-03 10:47:33,359 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5847, 1.3135, 1.5246, 1.1990, 2.1929, 1.0520, 2.2181, 2.5752], + device='cuda:1'), covar=tensor([0.0760, 0.2804, 0.2781, 0.1804, 0.0932, 0.2192, 0.0964, 0.0437], + device='cuda:1'), in_proj_covar=tensor([0.0426, 0.0380, 0.0398, 0.0354, 0.0385, 0.0359, 0.0397, 0.0417], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:47:38,186 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:47:54,349 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8396, 1.6872, 1.4862, 1.8047, 1.5985, 1.5508, 1.4879, 1.7090], + device='cuda:1'), covar=tensor([0.1242, 0.1232, 0.1690, 0.1082, 0.1258, 0.0692, 0.1655, 0.0853], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0359, 0.0318, 0.0258, 0.0306, 0.0257, 0.0321, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:48:05,123 INFO [train.py:903] (1/4) Epoch 28, batch 450, loss[loss=0.2555, simple_loss=0.3304, pruned_loss=0.09028, over 18118.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.0604, over 3438655.55 frames. ], batch size: 83, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:48:07,819 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184808.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:20,498 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0677, 1.9006, 1.7813, 2.0702, 1.7032, 1.7316, 1.7491, 2.0164], + device='cuda:1'), covar=tensor([0.1125, 0.1481, 0.1568, 0.1049, 0.1487, 0.0612, 0.1529, 0.0764], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0358, 0.0318, 0.0257, 0.0306, 0.0256, 0.0320, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:48:32,961 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1295, 1.3123, 1.4395, 1.3511, 2.7601, 1.0712, 2.1969, 3.1462], + device='cuda:1'), covar=tensor([0.0568, 0.2848, 0.3029, 0.1942, 0.0717, 0.2501, 0.1258, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0424, 0.0379, 0.0398, 0.0353, 0.0384, 0.0358, 0.0396, 0.0417], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:48:38,475 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 10:48:39,491 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 10:48:43,493 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:56,807 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:49:06,861 INFO [train.py:903] (1/4) Epoch 28, batch 500, loss[loss=0.1801, simple_loss=0.2594, pruned_loss=0.05044, over 19635.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06034, over 3507846.05 frames. ], batch size: 50, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:49:28,424 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.109e+02 6.404e+02 8.256e+02 1.456e+03, threshold=1.281e+03, percent-clipped=5.0 +2023-04-03 10:49:33,517 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4252, 1.4913, 1.7477, 1.6540, 2.6295, 2.2165, 2.7761, 1.2321], + device='cuda:1'), covar=tensor([0.2587, 0.4428, 0.2867, 0.2061, 0.1548, 0.2269, 0.1435, 0.4544], + device='cuda:1'), in_proj_covar=tensor([0.0551, 0.0666, 0.0750, 0.0505, 0.0635, 0.0543, 0.0666, 0.0569], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 10:49:41,467 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:50:02,645 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.0958, 2.8406, 2.2238, 2.1183, 1.8505, 2.3968, 0.9726, 2.0449], + device='cuda:1'), covar=tensor([0.0756, 0.0739, 0.0875, 0.1374, 0.1409, 0.1324, 0.1657, 0.1219], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0365, 0.0371, 0.0395, 0.0472, 0.0397, 0.0347, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 10:50:09,152 INFO [train.py:903] (1/4) Epoch 28, batch 550, loss[loss=0.2018, simple_loss=0.2888, pruned_loss=0.05745, over 19114.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06001, over 3582490.85 frames. ], batch size: 69, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:08,833 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.35 vs. limit=5.0 +2023-04-03 10:51:11,433 INFO [train.py:903] (1/4) Epoch 28, batch 600, loss[loss=0.2351, simple_loss=0.2911, pruned_loss=0.08953, over 18212.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2835, pruned_loss=0.06012, over 3639147.53 frames. ], batch size: 40, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:12,910 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184957.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:19,892 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:31,268 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.007e+02 6.276e+02 8.222e+02 1.849e+03, threshold=1.255e+03, percent-clipped=3.0 +2023-04-03 10:51:50,596 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 10:52:14,345 INFO [train.py:903] (1/4) Epoch 28, batch 650, loss[loss=0.1993, simple_loss=0.2902, pruned_loss=0.05417, over 19533.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2825, pruned_loss=0.05934, over 3686580.85 frames. ], batch size: 54, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:16,128 INFO [train.py:903] (1/4) Epoch 28, batch 700, loss[loss=0.1735, simple_loss=0.2486, pruned_loss=0.0492, over 19716.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2818, pruned_loss=0.05919, over 3703572.96 frames. ], batch size: 46, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:38,016 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 4.589e+02 5.550e+02 7.126e+02 1.317e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-03 10:53:45,166 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:01,331 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:19,784 INFO [train.py:903] (1/4) Epoch 28, batch 750, loss[loss=0.2043, simple_loss=0.2811, pruned_loss=0.06369, over 19520.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06026, over 3723036.72 frames. ], batch size: 56, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:54:34,001 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:10,733 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185147.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:20,946 INFO [train.py:903] (1/4) Epoch 28, batch 800, loss[loss=0.204, simple_loss=0.2885, pruned_loss=0.05977, over 19662.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2844, pruned_loss=0.06084, over 3742571.71 frames. ], batch size: 55, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:55:30,807 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-03 10:55:34,902 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:55:41,840 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.493e+02 6.558e+02 7.858e+02 2.224e+03, threshold=1.312e+03, percent-clipped=8.0 +2023-04-03 10:56:22,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2452, 5.7099, 3.1233, 5.0825, 1.2028, 5.9469, 5.6681, 5.8471], + device='cuda:1'), covar=tensor([0.0366, 0.0779, 0.1820, 0.0699, 0.3859, 0.0500, 0.0731, 0.0971], + device='cuda:1'), in_proj_covar=tensor([0.0527, 0.0426, 0.0517, 0.0359, 0.0409, 0.0456, 0.0451, 0.0480], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:56:24,246 INFO [train.py:903] (1/4) Epoch 28, batch 850, loss[loss=0.18, simple_loss=0.2576, pruned_loss=0.05122, over 19379.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2842, pruned_loss=0.06024, over 3755156.39 frames. ], batch size: 47, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:56:39,446 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:56:51,614 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185228.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:11,558 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:15,606 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 10:57:24,911 INFO [train.py:903] (1/4) Epoch 28, batch 900, loss[loss=0.1747, simple_loss=0.2478, pruned_loss=0.0508, over 19750.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.285, pruned_loss=0.06098, over 3764862.18 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:57:37,059 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 10:57:47,702 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 4.798e+02 5.781e+02 7.336e+02 1.381e+03, threshold=1.156e+03, percent-clipped=1.0 +2023-04-03 10:58:21,555 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:58:28,121 INFO [train.py:903] (1/4) Epoch 28, batch 950, loss[loss=0.1995, simple_loss=0.2829, pruned_loss=0.05806, over 17374.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2838, pruned_loss=0.06034, over 3784636.38 frames. ], batch size: 101, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:58:29,317 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 10:59:11,094 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4176, 3.7308, 2.2006, 2.4227, 3.3857, 2.0678, 1.7603, 2.4860], + device='cuda:1'), covar=tensor([0.1101, 0.0460, 0.0974, 0.0699, 0.0439, 0.1059, 0.0811, 0.0588], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0317, 0.0335, 0.0271, 0.0249, 0.0341, 0.0290, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:59:15,343 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185343.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:59:32,239 INFO [train.py:903] (1/4) Epoch 28, batch 1000, loss[loss=0.1901, simple_loss=0.2763, pruned_loss=0.052, over 18139.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06029, over 3795212.82 frames. ], batch size: 83, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:59:49,500 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1521, 1.9013, 2.1616, 1.8983, 4.6959, 1.4259, 2.7098, 5.1566], + device='cuda:1'), covar=tensor([0.0440, 0.2522, 0.2459, 0.1869, 0.0705, 0.2511, 0.1475, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0421, 0.0376, 0.0395, 0.0350, 0.0381, 0.0356, 0.0393, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 10:59:53,731 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 4.891e+02 5.853e+02 7.878e+02 2.572e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-03 10:59:57,745 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 11:00:23,369 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 11:00:34,046 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4558, 3.1517, 2.6114, 2.6728, 2.5509, 2.7566, 1.2811, 2.3391], + device='cuda:1'), covar=tensor([0.0617, 0.0615, 0.0639, 0.0982, 0.0943, 0.0999, 0.1430, 0.1060], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0363, 0.0369, 0.0391, 0.0470, 0.0394, 0.0346, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 11:00:34,737 INFO [train.py:903] (1/4) Epoch 28, batch 1050, loss[loss=0.1702, simple_loss=0.2456, pruned_loss=0.04736, over 18654.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.05968, over 3814698.02 frames. ], batch size: 41, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:00:47,025 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:00:53,766 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:01:02,627 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 11:01:14,706 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9929, 4.4712, 4.7786, 4.7463, 1.7526, 4.4687, 3.8049, 4.4901], + device='cuda:1'), covar=tensor([0.1770, 0.0970, 0.0610, 0.0793, 0.6376, 0.1064, 0.0724, 0.1199], + device='cuda:1'), in_proj_covar=tensor([0.0824, 0.0785, 0.0998, 0.0874, 0.0867, 0.0762, 0.0588, 0.0926], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 11:01:35,152 INFO [train.py:903] (1/4) Epoch 28, batch 1100, loss[loss=0.275, simple_loss=0.3349, pruned_loss=0.1075, over 13278.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2822, pruned_loss=0.06036, over 3815918.45 frames. ], batch size: 136, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:01:57,233 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.947e+02 6.329e+02 8.288e+02 1.903e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-03 11:02:13,020 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:18,717 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185491.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:31,470 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7839, 1.4464, 1.6055, 1.5836, 3.3650, 1.0198, 2.2884, 3.8738], + device='cuda:1'), covar=tensor([0.0488, 0.2908, 0.2942, 0.1926, 0.0705, 0.2772, 0.1476, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0422, 0.0377, 0.0396, 0.0351, 0.0381, 0.0357, 0.0393, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:02:35,892 INFO [train.py:903] (1/4) Epoch 28, batch 1150, loss[loss=0.1708, simple_loss=0.2495, pruned_loss=0.04603, over 19752.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2831, pruned_loss=0.06097, over 3822780.73 frames. ], batch size: 46, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:03:15,647 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:03:28,125 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.15 vs. limit=5.0 +2023-04-03 11:03:40,781 INFO [train.py:903] (1/4) Epoch 28, batch 1200, loss[loss=0.1672, simple_loss=0.2515, pruned_loss=0.04143, over 19836.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.0601, over 3817407.57 frames. ], batch size: 52, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:04:01,639 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.383e+02 6.799e+02 8.653e+02 1.626e+03, threshold=1.360e+03, percent-clipped=3.0 +2023-04-03 11:04:11,775 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 11:04:34,064 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:04:41,623 INFO [train.py:903] (1/4) Epoch 28, batch 1250, loss[loss=0.2054, simple_loss=0.2837, pruned_loss=0.06356, over 19596.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.06092, over 3816652.42 frames. ], batch size: 52, lr: 2.94e-03, grad_scale: 16.0 +2023-04-03 11:04:42,042 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185606.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:03,712 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:44,893 INFO [train.py:903] (1/4) Epoch 28, batch 1300, loss[loss=0.2109, simple_loss=0.2991, pruned_loss=0.06131, over 17483.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2835, pruned_loss=0.06101, over 3805758.17 frames. ], batch size: 101, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:05:46,475 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9068, 1.4610, 1.6374, 1.5863, 4.4021, 1.1070, 2.6406, 4.8012], + device='cuda:1'), covar=tensor([0.0499, 0.2875, 0.3153, 0.2147, 0.0767, 0.2796, 0.1475, 0.0186], + device='cuda:1'), in_proj_covar=tensor([0.0421, 0.0376, 0.0396, 0.0351, 0.0381, 0.0357, 0.0392, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:06:04,624 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185672.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:06,496 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 4.542e+02 5.530e+02 7.592e+02 1.164e+03, threshold=1.106e+03, percent-clipped=0.0 +2023-04-03 11:06:35,902 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185697.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:39,506 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0260, 2.5358, 1.8111, 1.8807, 2.3659, 1.7425, 1.7506, 2.2248], + device='cuda:1'), covar=tensor([0.0921, 0.0777, 0.0835, 0.0762, 0.0508, 0.1037, 0.0677, 0.0544], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0320, 0.0339, 0.0273, 0.0250, 0.0346, 0.0292, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:06:46,291 INFO [train.py:903] (1/4) Epoch 28, batch 1350, loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.0591, over 19679.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06135, over 3791589.34 frames. ], batch size: 59, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:07:48,954 INFO [train.py:903] (1/4) Epoch 28, batch 1400, loss[loss=0.1889, simple_loss=0.2668, pruned_loss=0.05551, over 19791.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.06041, over 3807766.20 frames. ], batch size: 49, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:08:11,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.444e+02 4.899e+02 6.106e+02 7.699e+02 1.518e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 11:08:15,727 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185777.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:35,111 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:44,814 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8095, 3.2410, 3.3102, 3.3316, 1.3299, 3.1855, 2.7638, 3.1107], + device='cuda:1'), covar=tensor([0.1843, 0.1266, 0.0887, 0.1003, 0.6045, 0.1276, 0.0926, 0.1349], + device='cuda:1'), in_proj_covar=tensor([0.0826, 0.0788, 0.1001, 0.0875, 0.0869, 0.0763, 0.0591, 0.0928], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 11:08:49,361 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 11:08:51,546 INFO [train.py:903] (1/4) Epoch 28, batch 1450, loss[loss=0.2312, simple_loss=0.3226, pruned_loss=0.06989, over 19588.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2827, pruned_loss=0.05953, over 3821214.63 frames. ], batch size: 61, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:09:05,625 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185817.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:09:06,849 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:20,217 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:54,220 INFO [train.py:903] (1/4) Epoch 28, batch 1500, loss[loss=0.2044, simple_loss=0.2796, pruned_loss=0.06463, over 19385.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06041, over 3816994.43 frames. ], batch size: 48, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:10:02,009 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:15,979 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 5.017e+02 6.227e+02 8.666e+02 1.816e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-03 11:10:33,553 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185887.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:56,450 INFO [train.py:903] (1/4) Epoch 28, batch 1550, loss[loss=0.1497, simple_loss=0.232, pruned_loss=0.0337, over 19741.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.06029, over 3828222.69 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:11:45,513 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:11:58,297 INFO [train.py:903] (1/4) Epoch 28, batch 1600, loss[loss=0.217, simple_loss=0.3145, pruned_loss=0.05981, over 19550.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.06053, over 3821311.03 frames. ], batch size: 56, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:12:20,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 11:12:23,177 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.773e+02 5.899e+02 6.974e+02 1.687e+03, threshold=1.180e+03, percent-clipped=2.0 +2023-04-03 11:12:28,397 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.75 vs. limit=5.0 +2023-04-03 11:13:03,463 INFO [train.py:903] (1/4) Epoch 28, batch 1650, loss[loss=0.2349, simple_loss=0.3106, pruned_loss=0.07965, over 19099.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2845, pruned_loss=0.06098, over 3830584.79 frames. ], batch size: 69, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:13:07,307 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6668, 1.2554, 1.3116, 1.5560, 1.0483, 1.4109, 1.2877, 1.4983], + device='cuda:1'), covar=tensor([0.1224, 0.1321, 0.1737, 0.1061, 0.1518, 0.0652, 0.1759, 0.0919], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0362, 0.0320, 0.0259, 0.0309, 0.0259, 0.0323, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 11:13:26,194 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3690, 3.0797, 2.3661, 2.7786, 0.6270, 3.0754, 2.9365, 3.0318], + device='cuda:1'), covar=tensor([0.1104, 0.1431, 0.1981, 0.1092, 0.4055, 0.0958, 0.1215, 0.1322], + device='cuda:1'), in_proj_covar=tensor([0.0526, 0.0427, 0.0515, 0.0360, 0.0410, 0.0457, 0.0451, 0.0479], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:14:07,007 INFO [train.py:903] (1/4) Epoch 28, batch 1700, loss[loss=0.1866, simple_loss=0.2638, pruned_loss=0.05465, over 18991.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.06004, over 3838211.01 frames. ], batch size: 42, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:14:29,606 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.751e+02 5.670e+02 7.170e+02 1.723e+03, threshold=1.134e+03, percent-clipped=7.0 +2023-04-03 11:14:44,418 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 11:15:08,856 INFO [train.py:903] (1/4) Epoch 28, batch 1750, loss[loss=0.1907, simple_loss=0.2714, pruned_loss=0.05498, over 19584.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2818, pruned_loss=0.05966, over 3821531.63 frames. ], batch size: 52, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:15:24,237 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:15:28,460 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:16:11,526 INFO [train.py:903] (1/4) Epoch 28, batch 1800, loss[loss=0.1829, simple_loss=0.2637, pruned_loss=0.05103, over 19675.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2822, pruned_loss=0.05983, over 3814122.58 frames. ], batch size: 53, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:16:18,364 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186161.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:16:36,643 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.184e+02 6.048e+02 8.514e+02 1.613e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 11:16:38,107 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186176.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:16:42,875 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5184, 1.4929, 2.0227, 1.7194, 2.9899, 4.7608, 4.6559, 5.1792], + device='cuda:1'), covar=tensor([0.1541, 0.3889, 0.3447, 0.2413, 0.0674, 0.0207, 0.0163, 0.0184], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0333, 0.0366, 0.0273, 0.0257, 0.0197, 0.0220, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 11:17:02,768 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 11:17:08,561 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 11:17:09,023 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186201.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:15,219 INFO [train.py:903] (1/4) Epoch 28, batch 1850, loss[loss=0.2096, simple_loss=0.296, pruned_loss=0.0616, over 19579.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06015, over 3825957.44 frames. ], batch size: 61, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:17:39,893 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:46,433 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 11:17:52,379 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186236.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:18:17,540 INFO [train.py:903] (1/4) Epoch 28, batch 1900, loss[loss=0.2031, simple_loss=0.2832, pruned_loss=0.0615, over 19762.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2826, pruned_loss=0.06015, over 3818826.28 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:18:33,594 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 11:18:38,425 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 11:18:39,534 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.824e+02 4.995e+02 5.845e+02 7.128e+02 1.193e+03, threshold=1.169e+03, percent-clipped=0.0 +2023-04-03 11:18:41,962 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186276.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:18:42,901 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:19:02,962 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 11:19:19,221 INFO [train.py:903] (1/4) Epoch 28, batch 1950, loss[loss=0.1599, simple_loss=0.2455, pruned_loss=0.03716, over 19284.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06063, over 3824245.82 frames. ], batch size: 44, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:20,372 INFO [train.py:903] (1/4) Epoch 28, batch 2000, loss[loss=0.2232, simple_loss=0.2999, pruned_loss=0.0733, over 18945.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.283, pruned_loss=0.06073, over 3823414.73 frames. ], batch size: 74, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:32,032 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1591, 2.0867, 2.0086, 1.8323, 1.6566, 1.7489, 0.7161, 1.1279], + device='cuda:1'), covar=tensor([0.0639, 0.0667, 0.0469, 0.0786, 0.1207, 0.0897, 0.1323, 0.1107], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0365, 0.0370, 0.0392, 0.0470, 0.0396, 0.0346, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 11:20:33,485 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 11:20:45,110 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.060e+02 6.550e+02 8.587e+02 3.446e+03, threshold=1.310e+03, percent-clipped=8.0 +2023-04-03 11:21:10,085 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5199, 1.7315, 2.1271, 1.8011, 3.1772, 2.5303, 3.4586, 1.5914], + device='cuda:1'), covar=tensor([0.2767, 0.4596, 0.2879, 0.2066, 0.1572, 0.2307, 0.1601, 0.4660], + device='cuda:1'), in_proj_covar=tensor([0.0555, 0.0671, 0.0757, 0.0509, 0.0638, 0.0546, 0.0671, 0.0574], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 11:21:19,993 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 11:21:24,400 INFO [train.py:903] (1/4) Epoch 28, batch 2050, loss[loss=0.1807, simple_loss=0.267, pruned_loss=0.04721, over 19485.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.06001, over 3817305.83 frames. ], batch size: 49, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:21:40,390 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 11:21:42,556 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 11:22:02,217 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 11:22:04,680 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:26,598 INFO [train.py:903] (1/4) Epoch 28, batch 2100, loss[loss=0.2015, simple_loss=0.2899, pruned_loss=0.05662, over 19501.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2808, pruned_loss=0.05919, over 3820235.11 frames. ], batch size: 64, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:22:35,104 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:49,639 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.089e+02 6.089e+02 7.390e+02 1.324e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 11:22:58,694 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 11:23:11,858 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:13,751 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:20,654 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 11:23:28,689 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6233, 4.2424, 2.7450, 3.7747, 1.0881, 4.2356, 4.0105, 4.1468], + device='cuda:1'), covar=tensor([0.0610, 0.1003, 0.1941, 0.0828, 0.3896, 0.0650, 0.1019, 0.1111], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0430, 0.0516, 0.0360, 0.0410, 0.0458, 0.0452, 0.0481], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:23:29,683 INFO [train.py:903] (1/4) Epoch 28, batch 2150, loss[loss=0.1922, simple_loss=0.2787, pruned_loss=0.05287, over 19595.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2811, pruned_loss=0.05941, over 3820103.01 frames. ], batch size: 57, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:23:42,638 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:46,771 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186520.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:02,599 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186532.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:24:06,039 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:30,566 INFO [train.py:903] (1/4) Epoch 28, batch 2200, loss[loss=0.1991, simple_loss=0.2832, pruned_loss=0.05747, over 19510.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2813, pruned_loss=0.05936, over 3823390.64 frames. ], batch size: 56, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:24:32,076 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186557.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:32,183 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186557.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:24:55,772 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 4.864e+02 5.746e+02 7.619e+02 1.717e+03, threshold=1.149e+03, percent-clipped=3.0 +2023-04-03 11:24:58,668 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:09,387 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:32,836 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:34,941 INFO [train.py:903] (1/4) Epoch 28, batch 2250, loss[loss=0.1953, simple_loss=0.2838, pruned_loss=0.05338, over 19459.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.282, pruned_loss=0.05962, over 3806124.40 frames. ], batch size: 64, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:25:54,488 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186621.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:09,358 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 11:26:12,580 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186635.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:39,407 INFO [train.py:903] (1/4) Epoch 28, batch 2300, loss[loss=0.2167, simple_loss=0.3011, pruned_loss=0.06618, over 17335.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05939, over 3802227.79 frames. ], batch size: 101, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:26:55,469 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 11:27:02,265 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 4.896e+02 5.750e+02 7.152e+02 2.246e+03, threshold=1.150e+03, percent-clipped=6.0 +2023-04-03 11:27:42,250 INFO [train.py:903] (1/4) Epoch 28, batch 2350, loss[loss=0.196, simple_loss=0.2821, pruned_loss=0.05501, over 19759.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.282, pruned_loss=0.05956, over 3797071.67 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:28:07,857 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:20,530 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:26,056 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 11:28:42,177 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 11:28:44,642 INFO [train.py:903] (1/4) Epoch 28, batch 2400, loss[loss=0.2, simple_loss=0.2785, pruned_loss=0.06082, over 19597.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2821, pruned_loss=0.05938, over 3800577.83 frames. ], batch size: 52, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:29:08,814 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 4.439e+02 5.729e+02 7.466e+02 1.887e+03, threshold=1.146e+03, percent-clipped=9.0 +2023-04-03 11:29:20,260 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:29:47,720 INFO [train.py:903] (1/4) Epoch 28, batch 2450, loss[loss=0.1733, simple_loss=0.2579, pruned_loss=0.04431, over 19581.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2829, pruned_loss=0.05945, over 3816064.12 frames. ], batch size: 52, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:01,665 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-03 11:30:02,241 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5292, 2.5243, 2.2585, 2.6647, 2.3520, 2.1437, 2.1118, 2.4814], + device='cuda:1'), covar=tensor([0.1017, 0.1517, 0.1384, 0.1161, 0.1422, 0.0564, 0.1455, 0.0716], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0362, 0.0321, 0.0259, 0.0308, 0.0258, 0.0323, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 11:30:22,236 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:26,704 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:50,779 INFO [train.py:903] (1/4) Epoch 28, batch 2500, loss[loss=0.195, simple_loss=0.2827, pruned_loss=0.05362, over 19522.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2837, pruned_loss=0.0596, over 3818548.90 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:54,427 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:54,537 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:15,070 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.341e+02 4.860e+02 5.866e+02 7.110e+02 2.029e+03, threshold=1.173e+03, percent-clipped=7.0 +2023-04-03 11:31:19,829 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186879.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:35,086 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186891.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:45,070 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:48,258 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:54,933 INFO [train.py:903] (1/4) Epoch 28, batch 2550, loss[loss=0.1818, simple_loss=0.2628, pruned_loss=0.05041, over 19598.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2836, pruned_loss=0.05979, over 3809668.34 frames. ], batch size: 50, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:32:06,770 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186916.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,502 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,652 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:47,702 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186948.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:50,817 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 11:32:52,347 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:56,633 INFO [train.py:903] (1/4) Epoch 28, batch 2600, loss[loss=0.2015, simple_loss=0.2853, pruned_loss=0.05889, over 19725.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06012, over 3819781.88 frames. ], batch size: 63, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:33:07,516 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2697, 1.2739, 1.4182, 1.3935, 1.6721, 1.7325, 1.7157, 0.6959], + device='cuda:1'), covar=tensor([0.2420, 0.4263, 0.2704, 0.1981, 0.1628, 0.2333, 0.1448, 0.5061], + device='cuda:1'), in_proj_covar=tensor([0.0553, 0.0669, 0.0755, 0.0507, 0.0633, 0.0545, 0.0667, 0.0571], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 11:33:20,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.127e+02 5.898e+02 7.788e+02 1.720e+03, threshold=1.180e+03, percent-clipped=7.0 +2023-04-03 11:33:43,096 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186992.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:45,238 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:59,862 INFO [train.py:903] (1/4) Epoch 28, batch 2650, loss[loss=0.2381, simple_loss=0.3257, pruned_loss=0.07523, over 19729.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.06025, over 3819184.20 frames. ], batch size: 63, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:34:12,839 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:14,004 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:22,394 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 11:34:42,920 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0001, 2.0898, 2.3079, 2.6376, 2.0264, 2.5752, 2.3250, 2.1322], + device='cuda:1'), covar=tensor([0.4435, 0.4086, 0.2039, 0.2512, 0.4346, 0.2206, 0.5038, 0.3534], + device='cuda:1'), in_proj_covar=tensor([0.0941, 0.1020, 0.0747, 0.0955, 0.0917, 0.0858, 0.0867, 0.0814], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 11:34:46,275 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187043.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:47,536 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:02,637 INFO [train.py:903] (1/4) Epoch 28, batch 2700, loss[loss=0.1866, simple_loss=0.2599, pruned_loss=0.05668, over 19775.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.0598, over 3831668.76 frames. ], batch size: 46, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:35:02,868 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:13,307 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:22,266 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187071.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:26,786 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 4.685e+02 5.845e+02 7.450e+02 1.306e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 11:36:06,488 INFO [train.py:903] (1/4) Epoch 28, batch 2750, loss[loss=0.1974, simple_loss=0.2856, pruned_loss=0.05467, over 19577.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.282, pruned_loss=0.05926, over 3843693.55 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:36:10,419 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6692, 2.2617, 1.6442, 1.5393, 2.0665, 1.3013, 1.4673, 2.0597], + device='cuda:1'), covar=tensor([0.1072, 0.0821, 0.1133, 0.0911, 0.0578, 0.1407, 0.0823, 0.0517], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0320, 0.0341, 0.0273, 0.0252, 0.0345, 0.0293, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:36:13,203 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 11:36:15,445 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:36:18,795 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0633, 1.6554, 1.8400, 2.6729, 1.8048, 2.3679, 2.2623, 2.0196], + device='cuda:1'), covar=tensor([0.0843, 0.1013, 0.1027, 0.0808, 0.0972, 0.0804, 0.0908, 0.0746], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0225, 0.0213, 0.0187, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 11:36:43,426 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9002, 1.2825, 1.5957, 0.5935, 2.0250, 2.4626, 2.1378, 2.6308], + device='cuda:1'), covar=tensor([0.1696, 0.3758, 0.3424, 0.2925, 0.0645, 0.0284, 0.0368, 0.0393], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0333, 0.0365, 0.0272, 0.0256, 0.0197, 0.0220, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 11:36:59,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 11:37:06,480 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:08,439 INFO [train.py:903] (1/4) Epoch 28, batch 2800, loss[loss=0.1701, simple_loss=0.2418, pruned_loss=0.04924, over 19296.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2827, pruned_loss=0.05952, over 3842223.46 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:37:31,619 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.065e+02 4.906e+02 5.640e+02 7.444e+02 1.445e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-04-03 11:37:36,957 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:46,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187186.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:03,796 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9948, 2.0691, 1.7741, 2.1772, 1.9108, 1.7088, 1.7694, 1.9110], + device='cuda:1'), covar=tensor([0.1171, 0.1433, 0.1481, 0.1000, 0.1384, 0.0613, 0.1457, 0.0806], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0360, 0.0319, 0.0257, 0.0305, 0.0257, 0.0322, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:38:05,766 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187202.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:10,174 INFO [train.py:903] (1/4) Epoch 28, batch 2850, loss[loss=0.2131, simple_loss=0.2792, pruned_loss=0.0735, over 19627.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2826, pruned_loss=0.0593, over 3832375.47 frames. ], batch size: 50, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:38:13,784 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187208.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:44,959 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187233.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:05,579 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4894, 1.2899, 1.3791, 1.9913, 1.4154, 1.6963, 1.6586, 1.4898], + device='cuda:1'), covar=tensor([0.1041, 0.1197, 0.1194, 0.0728, 0.0950, 0.0990, 0.0964, 0.0890], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0226, 0.0213, 0.0187, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 11:39:05,636 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187250.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:11,667 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 11:39:12,697 INFO [train.py:903] (1/4) Epoch 28, batch 2900, loss[loss=0.2234, simple_loss=0.309, pruned_loss=0.06892, over 19595.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2827, pruned_loss=0.05941, over 3821548.49 frames. ], batch size: 57, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:39:13,295 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:39:29,214 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6031, 1.6001, 1.7770, 1.8010, 2.4519, 2.2857, 2.5888, 1.0598], + device='cuda:1'), covar=tensor([0.2464, 0.4422, 0.2870, 0.1969, 0.1561, 0.2282, 0.1466, 0.4857], + device='cuda:1'), in_proj_covar=tensor([0.0553, 0.0670, 0.0755, 0.0508, 0.0635, 0.0546, 0.0668, 0.0573], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 11:39:32,576 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:33,467 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:35,537 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.012e+02 6.172e+02 7.442e+02 2.226e+03, threshold=1.234e+03, percent-clipped=8.0 +2023-04-03 11:39:35,965 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:02,566 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:06,079 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187300.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:13,522 INFO [train.py:903] (1/4) Epoch 28, batch 2950, loss[loss=0.158, simple_loss=0.234, pruned_loss=0.04106, over 18580.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2817, pruned_loss=0.05908, over 3825729.36 frames. ], batch size: 41, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:40:25,550 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3940, 2.0761, 1.5462, 1.4576, 1.9179, 1.2226, 1.3634, 1.8574], + device='cuda:1'), covar=tensor([0.1103, 0.0892, 0.1261, 0.0892, 0.0629, 0.1426, 0.0782, 0.0530], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0320, 0.0342, 0.0273, 0.0253, 0.0346, 0.0294, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:40:26,515 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187317.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:28,978 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:35,700 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:41:00,003 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187344.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:41:13,833 INFO [train.py:903] (1/4) Epoch 28, batch 3000, loss[loss=0.1681, simple_loss=0.2443, pruned_loss=0.04589, over 19749.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2817, pruned_loss=0.05967, over 3831206.47 frames. ], batch size: 46, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:41:13,834 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 11:41:26,712 INFO [train.py:937] (1/4) Epoch 28, validation: loss=0.1673, simple_loss=0.2667, pruned_loss=0.03394, over 944034.00 frames. +2023-04-03 11:41:26,713 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 11:41:29,142 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 11:41:49,225 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.693e+02 4.898e+02 6.411e+02 7.995e+02 1.373e+03, threshold=1.282e+03, percent-clipped=5.0 +2023-04-03 11:42:05,123 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:06,465 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:08,749 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:18,223 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-03 11:42:21,157 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:27,790 INFO [train.py:903] (1/4) Epoch 28, batch 3050, loss[loss=0.2387, simple_loss=0.313, pruned_loss=0.08218, over 14003.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05992, over 3809525.85 frames. ], batch size: 135, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:43:13,154 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:29,484 INFO [train.py:903] (1/4) Epoch 28, batch 3100, loss[loss=0.1889, simple_loss=0.2634, pruned_loss=0.05722, over 19319.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2823, pruned_loss=0.05974, over 3808479.11 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:43:43,949 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:54,451 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.792e+02 5.797e+02 7.675e+02 1.223e+03, threshold=1.159e+03, percent-clipped=0.0 +2023-04-03 11:44:27,059 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187502.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:32,169 INFO [train.py:903] (1/4) Epoch 28, batch 3150, loss[loss=0.1874, simple_loss=0.2681, pruned_loss=0.05331, over 19779.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05922, over 3814194.68 frames. ], batch size: 48, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:44:40,711 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4266, 1.5162, 1.7582, 1.6566, 2.4088, 2.1112, 2.6140, 1.0388], + device='cuda:1'), covar=tensor([0.2572, 0.4505, 0.2862, 0.2043, 0.1587, 0.2357, 0.1438, 0.5076], + device='cuda:1'), in_proj_covar=tensor([0.0552, 0.0669, 0.0753, 0.0507, 0.0635, 0.0545, 0.0668, 0.0572], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 11:44:44,918 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:54,642 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 11:45:01,682 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:10,624 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:15,235 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:35,285 INFO [train.py:903] (1/4) Epoch 28, batch 3200, loss[loss=0.212, simple_loss=0.2876, pruned_loss=0.06816, over 19676.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2818, pruned_loss=0.05921, over 3813327.00 frames. ], batch size: 53, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:45:55,536 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:57,397 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.914e+02 6.228e+02 8.001e+02 2.182e+03, threshold=1.246e+03, percent-clipped=10.0 +2023-04-03 11:46:08,978 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.32 vs. limit=5.0 +2023-04-03 11:46:27,882 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187598.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:46:36,786 INFO [train.py:903] (1/4) Epoch 28, batch 3250, loss[loss=0.1925, simple_loss=0.2818, pruned_loss=0.05155, over 19593.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2823, pruned_loss=0.06, over 3817377.34 frames. ], batch size: 57, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:47:18,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1384, 1.9323, 1.7343, 2.1389, 1.8340, 1.7506, 1.6938, 1.9508], + device='cuda:1'), covar=tensor([0.1005, 0.1536, 0.1522, 0.1005, 0.1395, 0.0593, 0.1505, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0360, 0.0320, 0.0257, 0.0306, 0.0256, 0.0322, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:47:23,889 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187644.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:47:37,437 INFO [train.py:903] (1/4) Epoch 28, batch 3300, loss[loss=0.2347, simple_loss=0.3064, pruned_loss=0.08149, over 19465.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2835, pruned_loss=0.06071, over 3823598.41 frames. ], batch size: 70, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:47:37,459 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 11:47:51,893 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2924, 3.8172, 3.9250, 3.9382, 1.6720, 3.7326, 3.2334, 3.7000], + device='cuda:1'), covar=tensor([0.1731, 0.0976, 0.0703, 0.0826, 0.5751, 0.1058, 0.0819, 0.1174], + device='cuda:1'), in_proj_covar=tensor([0.0825, 0.0790, 0.1002, 0.0880, 0.0864, 0.0768, 0.0594, 0.0929], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 11:47:54,289 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:48:01,989 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.770e+02 6.093e+02 7.830e+02 1.620e+03, threshold=1.219e+03, percent-clipped=4.0 +2023-04-03 11:48:40,831 INFO [train.py:903] (1/4) Epoch 28, batch 3350, loss[loss=0.1917, simple_loss=0.2769, pruned_loss=0.05322, over 19761.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.0609, over 3811686.31 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:49:14,754 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:49:42,984 INFO [train.py:903] (1/4) Epoch 28, batch 3400, loss[loss=0.1804, simple_loss=0.2631, pruned_loss=0.04883, over 19413.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2828, pruned_loss=0.06044, over 3814904.84 frames. ], batch size: 48, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:49:45,551 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:02,005 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:07,731 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.801e+02 5.865e+02 7.575e+02 1.695e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-03 11:50:16,885 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:29,633 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 11:50:32,904 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:46,602 INFO [train.py:903] (1/4) Epoch 28, batch 3450, loss[loss=0.1884, simple_loss=0.2761, pruned_loss=0.05033, over 19594.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.0603, over 3805959.84 frames. ], batch size: 61, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:50:47,816 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 11:51:41,378 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:51:44,903 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1177, 2.8891, 1.8497, 1.8447, 2.6209, 1.6749, 1.7424, 2.3557], + device='cuda:1'), covar=tensor([0.1189, 0.0749, 0.1131, 0.0948, 0.0603, 0.1255, 0.0874, 0.0609], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0320, 0.0344, 0.0273, 0.0253, 0.0347, 0.0294, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:51:49,293 INFO [train.py:903] (1/4) Epoch 28, batch 3500, loss[loss=0.1814, simple_loss=0.2581, pruned_loss=0.05233, over 15107.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2815, pruned_loss=0.05949, over 3802671.57 frames. ], batch size: 33, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:52:11,985 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:14,011 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.602e+02 4.936e+02 6.004e+02 7.154e+02 1.224e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 11:52:20,884 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:26,774 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:50,774 INFO [train.py:903] (1/4) Epoch 28, batch 3550, loss[loss=0.208, simple_loss=0.2775, pruned_loss=0.06921, over 19747.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05943, over 3811462.98 frames. ], batch size: 46, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:53:23,562 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5118, 1.7237, 2.0808, 1.8484, 2.9899, 2.5264, 3.3889, 1.6845], + device='cuda:1'), covar=tensor([0.2717, 0.4539, 0.2816, 0.2040, 0.1698, 0.2296, 0.1673, 0.4592], + device='cuda:1'), in_proj_covar=tensor([0.0553, 0.0667, 0.0753, 0.0507, 0.0634, 0.0545, 0.0669, 0.0572], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 11:53:52,895 INFO [train.py:903] (1/4) Epoch 28, batch 3600, loss[loss=0.1823, simple_loss=0.273, pruned_loss=0.04581, over 19535.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2809, pruned_loss=0.05887, over 3821916.17 frames. ], batch size: 56, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:54:17,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.904e+02 5.870e+02 7.567e+02 1.667e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 11:54:34,361 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:42,627 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:48,378 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:56,060 INFO [train.py:903] (1/4) Epoch 28, batch 3650, loss[loss=0.2019, simple_loss=0.2906, pruned_loss=0.0566, over 19617.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.05965, over 3835100.39 frames. ], batch size: 61, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:55:36,435 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1913, 1.2354, 1.6681, 1.1186, 2.4554, 3.3514, 2.9666, 3.5350], + device='cuda:1'), covar=tensor([0.1655, 0.4062, 0.3669, 0.2842, 0.0695, 0.0225, 0.0255, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0331, 0.0364, 0.0270, 0.0255, 0.0196, 0.0220, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 11:55:48,390 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 11:55:57,939 INFO [train.py:903] (1/4) Epoch 28, batch 3700, loss[loss=0.1669, simple_loss=0.247, pruned_loss=0.04336, over 19784.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.05935, over 3840013.11 frames. ], batch size: 47, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:56:12,058 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8914, 1.5680, 1.5194, 1.8424, 1.4577, 1.6190, 1.5101, 1.7410], + device='cuda:1'), covar=tensor([0.1106, 0.1369, 0.1555, 0.1088, 0.1341, 0.0589, 0.1554, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0361, 0.0320, 0.0257, 0.0306, 0.0257, 0.0322, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:56:23,991 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.013e+02 6.013e+02 7.793e+02 2.143e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 11:56:41,380 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9631, 1.7460, 1.6250, 1.9455, 1.6996, 1.6967, 1.5742, 1.8450], + device='cuda:1'), covar=tensor([0.1097, 0.1531, 0.1576, 0.1116, 0.1377, 0.0599, 0.1567, 0.0793], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0361, 0.0320, 0.0258, 0.0307, 0.0257, 0.0322, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 11:56:59,855 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:57:00,129 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 11:57:00,595 INFO [train.py:903] (1/4) Epoch 28, batch 3750, loss[loss=0.1782, simple_loss=0.2535, pruned_loss=0.05147, over 19737.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2831, pruned_loss=0.05984, over 3829552.64 frames. ], batch size: 46, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:57:32,238 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188130.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:58:03,792 INFO [train.py:903] (1/4) Epoch 28, batch 3800, loss[loss=0.2132, simple_loss=0.2978, pruned_loss=0.06426, over 19274.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.283, pruned_loss=0.05959, over 3841099.81 frames. ], batch size: 66, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:58:29,767 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1061, 1.0823, 1.5755, 1.0934, 2.1665, 2.9659, 2.6958, 3.3062], + device='cuda:1'), covar=tensor([0.1821, 0.5373, 0.4502, 0.2900, 0.0819, 0.0292, 0.0360, 0.0345], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0332, 0.0365, 0.0272, 0.0256, 0.0197, 0.0221, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 11:58:30,496 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.177e+02 6.017e+02 8.386e+02 1.721e+03, threshold=1.203e+03, percent-clipped=7.0 +2023-04-03 11:58:33,980 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 11:59:06,271 INFO [train.py:903] (1/4) Epoch 28, batch 3850, loss[loss=0.2256, simple_loss=0.3072, pruned_loss=0.07197, over 18246.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.05999, over 3840107.64 frames. ], batch size: 83, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:59:56,095 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:00,686 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7862, 1.6314, 1.6643, 2.3093, 1.7148, 1.9778, 2.0567, 1.7910], + device='cuda:1'), covar=tensor([0.0855, 0.0893, 0.0993, 0.0722, 0.0895, 0.0791, 0.0877, 0.0697], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0225, 0.0214, 0.0187, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 12:00:05,190 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188252.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:09,178 INFO [train.py:903] (1/4) Epoch 28, batch 3900, loss[loss=0.1911, simple_loss=0.2815, pruned_loss=0.05037, over 19760.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.05906, over 3844356.97 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:00:09,599 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:26,418 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188270.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:34,023 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.053e+02 6.264e+02 8.116e+02 1.975e+03, threshold=1.253e+03, percent-clipped=4.0 +2023-04-03 12:00:34,452 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:39,120 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188281.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:50,587 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.44 vs. limit=5.0 +2023-04-03 12:01:04,070 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:09,682 INFO [train.py:903] (1/4) Epoch 28, batch 3950, loss[loss=0.1713, simple_loss=0.2499, pruned_loss=0.04641, over 19398.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05932, over 3823699.89 frames. ], batch size: 48, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:01:15,479 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 12:01:17,799 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2185, 5.6618, 2.9719, 4.9186, 1.2481, 5.7505, 5.5792, 5.7908], + device='cuda:1'), covar=tensor([0.0374, 0.0770, 0.1918, 0.0677, 0.3813, 0.0506, 0.0809, 0.0923], + device='cuda:1'), in_proj_covar=tensor([0.0530, 0.0431, 0.0518, 0.0362, 0.0412, 0.0458, 0.0454, 0.0485], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:02:12,182 INFO [train.py:903] (1/4) Epoch 28, batch 4000, loss[loss=0.2239, simple_loss=0.303, pruned_loss=0.07239, over 19698.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05911, over 3826214.97 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:02:38,285 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.378e+02 4.980e+02 6.272e+02 8.090e+02 1.579e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 12:02:59,869 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 12:03:14,927 INFO [train.py:903] (1/4) Epoch 28, batch 4050, loss[loss=0.2297, simple_loss=0.3052, pruned_loss=0.07708, over 19863.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05924, over 3823062.26 frames. ], batch size: 52, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:04:17,051 INFO [train.py:903] (1/4) Epoch 28, batch 4100, loss[loss=0.1749, simple_loss=0.2638, pruned_loss=0.04304, over 19657.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2816, pruned_loss=0.05931, over 3811851.69 frames. ], batch size: 55, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:04:43,427 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.116e+02 6.170e+02 8.694e+02 1.686e+03, threshold=1.234e+03, percent-clipped=5.0 +2023-04-03 12:04:53,774 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 12:05:11,282 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:05:19,165 INFO [train.py:903] (1/4) Epoch 28, batch 4150, loss[loss=0.1807, simple_loss=0.2779, pruned_loss=0.0418, over 19675.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2797, pruned_loss=0.05818, over 3826289.32 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:05:33,298 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-03 12:06:21,498 INFO [train.py:903] (1/4) Epoch 28, batch 4200, loss[loss=0.199, simple_loss=0.2771, pruned_loss=0.06043, over 19377.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2808, pruned_loss=0.05904, over 3807262.07 frames. ], batch size: 47, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:06:24,956 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 12:06:46,561 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 4.775e+02 5.885e+02 7.540e+02 1.202e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-03 12:07:22,474 INFO [train.py:903] (1/4) Epoch 28, batch 4250, loss[loss=0.1942, simple_loss=0.2832, pruned_loss=0.05263, over 19537.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2825, pruned_loss=0.06051, over 3799863.51 frames. ], batch size: 56, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:07:33,753 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 12:07:44,094 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 12:07:53,730 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-03 12:08:10,326 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:08:24,251 INFO [train.py:903] (1/4) Epoch 28, batch 4300, loss[loss=0.1871, simple_loss=0.2787, pruned_loss=0.0478, over 18058.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2816, pruned_loss=0.05978, over 3798919.20 frames. ], batch size: 83, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:08:35,241 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5411, 1.2244, 1.5478, 1.6736, 2.9491, 1.3503, 2.5506, 3.4417], + device='cuda:1'), covar=tensor([0.0687, 0.3549, 0.3098, 0.1982, 0.1074, 0.2627, 0.1290, 0.0386], + device='cuda:1'), in_proj_covar=tensor([0.0423, 0.0379, 0.0396, 0.0353, 0.0381, 0.0357, 0.0397, 0.0418], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:08:35,283 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3784, 1.3083, 1.7735, 1.3631, 2.6532, 3.5607, 3.2404, 3.7380], + device='cuda:1'), covar=tensor([0.1550, 0.3984, 0.3438, 0.2493, 0.0669, 0.0197, 0.0251, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0333, 0.0367, 0.0273, 0.0257, 0.0198, 0.0222, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 12:08:51,757 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.988e+02 4.883e+02 5.889e+02 8.258e+02 1.553e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-03 12:09:15,636 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 12:09:26,307 INFO [train.py:903] (1/4) Epoch 28, batch 4350, loss[loss=0.162, simple_loss=0.2431, pruned_loss=0.04051, over 19026.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2823, pruned_loss=0.05977, over 3805028.42 frames. ], batch size: 42, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:10:30,423 INFO [train.py:903] (1/4) Epoch 28, batch 4400, loss[loss=0.1726, simple_loss=0.2539, pruned_loss=0.04567, over 19824.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2819, pruned_loss=0.05961, over 3806949.38 frames. ], batch size: 49, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:10:35,397 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:10:52,225 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 12:10:57,774 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.797e+02 5.749e+02 7.341e+02 1.454e+03, threshold=1.150e+03, percent-clipped=2.0 +2023-04-03 12:11:02,388 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 12:11:31,826 INFO [train.py:903] (1/4) Epoch 28, batch 4450, loss[loss=0.1945, simple_loss=0.2721, pruned_loss=0.05851, over 19763.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06033, over 3813172.83 frames. ], batch size: 48, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:12:11,511 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7553, 1.9994, 2.2424, 2.0568, 3.3247, 2.9306, 3.5269, 1.6038], + device='cuda:1'), covar=tensor([0.2442, 0.4150, 0.2855, 0.1837, 0.1487, 0.1966, 0.1517, 0.4614], + device='cuda:1'), in_proj_covar=tensor([0.0557, 0.0673, 0.0759, 0.0510, 0.0638, 0.0549, 0.0672, 0.0576], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:12:19,630 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:12:32,891 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6837, 1.2540, 1.3249, 1.5658, 1.1441, 1.4238, 1.2792, 1.4982], + device='cuda:1'), covar=tensor([0.1117, 0.1252, 0.1651, 0.1007, 0.1353, 0.0646, 0.1688, 0.0873], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0358, 0.0317, 0.0255, 0.0303, 0.0254, 0.0319, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:12:36,100 INFO [train.py:903] (1/4) Epoch 28, batch 4500, loss[loss=0.2516, simple_loss=0.32, pruned_loss=0.09161, over 13716.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2829, pruned_loss=0.05996, over 3808855.79 frames. ], batch size: 135, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:12:44,874 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0202, 2.0888, 2.2978, 2.6284, 2.0378, 2.5039, 2.2429, 2.0996], + device='cuda:1'), covar=tensor([0.4226, 0.4096, 0.2002, 0.2578, 0.4335, 0.2353, 0.5197, 0.3604], + device='cuda:1'), in_proj_covar=tensor([0.0941, 0.1018, 0.0745, 0.0954, 0.0917, 0.0860, 0.0863, 0.0811], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:13:04,412 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.536e+02 5.549e+02 7.562e+02 1.666e+03, threshold=1.110e+03, percent-clipped=5.0 +2023-04-03 12:13:10,821 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-03 12:13:38,229 INFO [train.py:903] (1/4) Epoch 28, batch 4550, loss[loss=0.198, simple_loss=0.2841, pruned_loss=0.05596, over 19135.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.05997, over 3827578.86 frames. ], batch size: 69, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:13:46,950 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 12:14:12,237 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 12:14:29,503 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:31,816 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:39,522 INFO [train.py:903] (1/4) Epoch 28, batch 4600, loss[loss=0.1919, simple_loss=0.2731, pruned_loss=0.0554, over 19632.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2842, pruned_loss=0.06036, over 3831094.93 frames. ], batch size: 50, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:14:43,215 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:15:07,935 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.624e+02 5.779e+02 7.629e+02 1.899e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 12:15:08,213 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2118, 5.6495, 3.2047, 4.9312, 1.3420, 5.8140, 5.6505, 5.8089], + device='cuda:1'), covar=tensor([0.0332, 0.0731, 0.1681, 0.0731, 0.3627, 0.0445, 0.0735, 0.0752], + device='cuda:1'), in_proj_covar=tensor([0.0525, 0.0428, 0.0512, 0.0359, 0.0407, 0.0454, 0.0450, 0.0480], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:15:13,473 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.46 vs. limit=5.0 +2023-04-03 12:15:43,022 INFO [train.py:903] (1/4) Epoch 28, batch 4650, loss[loss=0.1633, simple_loss=0.2523, pruned_loss=0.03713, over 19657.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2838, pruned_loss=0.06018, over 3833764.91 frames. ], batch size: 53, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:15:55,977 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:01,294 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 12:16:12,621 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 12:16:26,483 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:34,710 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:34,913 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9595, 2.0368, 2.3049, 2.6205, 1.9825, 2.5050, 2.2745, 2.1194], + device='cuda:1'), covar=tensor([0.4478, 0.4222, 0.2021, 0.2638, 0.4575, 0.2435, 0.5122, 0.3531], + device='cuda:1'), in_proj_covar=tensor([0.0937, 0.1014, 0.0741, 0.0951, 0.0913, 0.0854, 0.0859, 0.0806], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:16:44,440 INFO [train.py:903] (1/4) Epoch 28, batch 4700, loss[loss=0.2804, simple_loss=0.3382, pruned_loss=0.1113, over 13942.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.06042, over 3834475.73 frames. ], batch size: 139, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:17:07,716 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 12:17:10,864 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.848e+02 5.842e+02 7.352e+02 2.015e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-03 12:17:17,565 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:36,140 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:46,801 INFO [train.py:903] (1/4) Epoch 28, batch 4750, loss[loss=0.1702, simple_loss=0.2578, pruned_loss=0.04129, over 19656.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06065, over 3823678.16 frames. ], batch size: 55, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:18:47,827 INFO [train.py:903] (1/4) Epoch 28, batch 4800, loss[loss=0.1895, simple_loss=0.2733, pruned_loss=0.05285, over 19597.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06046, over 3822538.66 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:19:16,002 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 5.029e+02 6.156e+02 7.557e+02 1.439e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-03 12:19:43,396 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 12:19:50,824 INFO [train.py:903] (1/4) Epoch 28, batch 4850, loss[loss=0.2027, simple_loss=0.2885, pruned_loss=0.05849, over 19493.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06069, over 3810321.22 frames. ], batch size: 64, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:20:01,556 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:10,566 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3224, 1.9299, 2.1327, 3.0529, 2.1760, 2.4790, 2.6037, 2.2254], + device='cuda:1'), covar=tensor([0.0784, 0.0936, 0.0954, 0.0738, 0.0830, 0.0763, 0.0848, 0.0682], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0242, 0.0226, 0.0214, 0.0189, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 12:20:12,593 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 12:20:32,917 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:33,808 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 12:20:39,535 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 12:20:40,697 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 12:20:51,044 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 12:20:53,318 INFO [train.py:903] (1/4) Epoch 28, batch 4900, loss[loss=0.1818, simple_loss=0.2604, pruned_loss=0.05163, over 19597.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2833, pruned_loss=0.06044, over 3796615.23 frames. ], batch size: 50, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:21:05,669 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189266.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:05,770 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9777, 1.8811, 1.6545, 2.0634, 1.7221, 1.6589, 1.6312, 1.8990], + device='cuda:1'), covar=tensor([0.1142, 0.1399, 0.1653, 0.1109, 0.1453, 0.0630, 0.1607, 0.0810], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0360, 0.0319, 0.0257, 0.0305, 0.0255, 0.0321, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:21:10,805 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 12:21:14,415 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8201, 1.4272, 1.6964, 1.7438, 4.3088, 1.1677, 2.4438, 4.7247], + device='cuda:1'), covar=tensor([0.0481, 0.3025, 0.2997, 0.1938, 0.0774, 0.2764, 0.1672, 0.0181], + device='cuda:1'), in_proj_covar=tensor([0.0426, 0.0381, 0.0400, 0.0354, 0.0383, 0.0359, 0.0399, 0.0421], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:21:19,662 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.742e+02 6.083e+02 7.635e+02 1.565e+03, threshold=1.217e+03, percent-clipped=2.0 +2023-04-03 12:21:35,181 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:38,179 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189293.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:53,518 INFO [train.py:903] (1/4) Epoch 28, batch 4950, loss[loss=0.1988, simple_loss=0.2805, pruned_loss=0.05852, over 19390.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2842, pruned_loss=0.06092, over 3801382.72 frames. ], batch size: 48, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:22:10,592 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 12:22:34,599 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 12:22:41,890 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6406, 1.6993, 1.9200, 1.8188, 2.6045, 2.3394, 2.7062, 1.5266], + device='cuda:1'), covar=tensor([0.2465, 0.4074, 0.2626, 0.1922, 0.1567, 0.2104, 0.1491, 0.4560], + device='cuda:1'), in_proj_covar=tensor([0.0556, 0.0673, 0.0758, 0.0508, 0.0637, 0.0548, 0.0673, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:22:56,069 INFO [train.py:903] (1/4) Epoch 28, batch 5000, loss[loss=0.2079, simple_loss=0.2894, pruned_loss=0.0632, over 19589.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06103, over 3779720.49 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:23:04,841 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 12:23:08,817 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189365.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:15,678 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 12:23:24,839 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.003e+02 6.019e+02 8.012e+02 1.544e+03, threshold=1.204e+03, percent-clipped=7.0 +2023-04-03 12:23:42,151 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:59,717 INFO [train.py:903] (1/4) Epoch 28, batch 5050, loss[loss=0.2007, simple_loss=0.2839, pruned_loss=0.05881, over 19592.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2844, pruned_loss=0.06065, over 3795202.29 frames. ], batch size: 61, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:24:00,151 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:02,595 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:24,214 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:33,178 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 12:24:44,053 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189441.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:25:01,213 INFO [train.py:903] (1/4) Epoch 28, batch 5100, loss[loss=0.1885, simple_loss=0.2664, pruned_loss=0.05524, over 19741.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2843, pruned_loss=0.06041, over 3808934.65 frames. ], batch size: 51, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:25:10,101 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 12:25:12,435 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 12:25:19,116 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 12:25:27,912 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.191e+02 6.540e+02 8.236e+02 1.634e+03, threshold=1.308e+03, percent-clipped=9.0 +2023-04-03 12:26:01,392 INFO [train.py:903] (1/4) Epoch 28, batch 5150, loss[loss=0.2519, simple_loss=0.3193, pruned_loss=0.09223, over 17385.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.285, pruned_loss=0.061, over 3783376.92 frames. ], batch size: 101, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:26:02,894 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:11,409 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 12:26:18,140 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:34,978 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7531, 1.7726, 1.7306, 1.5259, 1.4195, 1.5286, 0.3000, 0.7308], + device='cuda:1'), covar=tensor([0.0712, 0.0652, 0.0446, 0.0682, 0.1347, 0.0811, 0.1434, 0.1220], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0364, 0.0369, 0.0392, 0.0471, 0.0397, 0.0346, 0.0348], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 12:26:44,823 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:46,849 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 12:27:02,655 INFO [train.py:903] (1/4) Epoch 28, batch 5200, loss[loss=0.227, simple_loss=0.3012, pruned_loss=0.0764, over 19689.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2853, pruned_loss=0.06117, over 3791908.45 frames. ], batch size: 59, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:27:03,046 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189556.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:27:04,961 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7742, 4.3724, 2.6991, 3.9330, 0.9323, 4.4025, 4.2223, 4.3274], + device='cuda:1'), covar=tensor([0.0612, 0.0957, 0.2017, 0.0781, 0.4154, 0.0595, 0.0912, 0.1110], + device='cuda:1'), in_proj_covar=tensor([0.0529, 0.0429, 0.0517, 0.0360, 0.0410, 0.0456, 0.0451, 0.0483], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:27:17,238 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 12:27:30,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 4.850e+02 5.941e+02 7.550e+02 1.552e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-03 12:28:01,620 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 12:28:04,867 INFO [train.py:903] (1/4) Epoch 28, batch 5250, loss[loss=0.2152, simple_loss=0.2877, pruned_loss=0.07129, over 19732.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2849, pruned_loss=0.06081, over 3810341.87 frames. ], batch size: 51, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:28:09,606 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:04,828 INFO [train.py:903] (1/4) Epoch 28, batch 5300, loss[loss=0.1926, simple_loss=0.2856, pruned_loss=0.04977, over 19672.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2844, pruned_loss=0.06072, over 3814184.51 frames. ], batch size: 58, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:29:08,388 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189658.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:12,986 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:14,942 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189664.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:21,474 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 12:29:31,784 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.811e+02 6.188e+02 7.440e+02 1.460e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-03 12:29:44,190 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:46,196 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189689.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:05,519 INFO [train.py:903] (1/4) Epoch 28, batch 5350, loss[loss=0.2231, simple_loss=0.3037, pruned_loss=0.07122, over 19615.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2837, pruned_loss=0.06055, over 3794834.69 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:30:09,135 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:17,205 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7706, 1.8921, 2.1644, 2.2529, 1.7377, 2.1614, 2.0941, 1.9619], + device='cuda:1'), covar=tensor([0.4252, 0.3852, 0.2051, 0.2512, 0.4057, 0.2425, 0.5204, 0.3570], + device='cuda:1'), in_proj_covar=tensor([0.0940, 0.1018, 0.0745, 0.0954, 0.0916, 0.0858, 0.0863, 0.0812], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:30:18,029 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8298, 4.9137, 5.7977, 5.7695, 2.5643, 5.5637, 4.5307, 5.1856], + device='cuda:1'), covar=tensor([0.1884, 0.1260, 0.0632, 0.0750, 0.6524, 0.1704, 0.0949, 0.1367], + device='cuda:1'), in_proj_covar=tensor([0.0814, 0.0782, 0.0991, 0.0870, 0.0861, 0.0756, 0.0583, 0.0922], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 12:30:25,491 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:29,855 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:38,357 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 12:30:54,236 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:04,019 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8891, 4.4598, 2.7086, 3.9066, 1.0067, 4.4115, 4.3260, 4.3751], + device='cuda:1'), covar=tensor([0.0481, 0.0876, 0.2049, 0.0845, 0.3829, 0.0596, 0.0843, 0.0996], + device='cuda:1'), in_proj_covar=tensor([0.0530, 0.0430, 0.0518, 0.0361, 0.0411, 0.0457, 0.0452, 0.0483], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:31:04,989 INFO [train.py:903] (1/4) Epoch 28, batch 5400, loss[loss=0.1636, simple_loss=0.2387, pruned_loss=0.04429, over 19765.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06055, over 3810711.92 frames. ], batch size: 45, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:31:15,129 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:33,363 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 4.839e+02 5.999e+02 7.745e+02 2.007e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 12:31:44,953 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:56,108 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:07,656 INFO [train.py:903] (1/4) Epoch 28, batch 5450, loss[loss=0.182, simple_loss=0.2586, pruned_loss=0.05265, over 19395.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06005, over 3819360.65 frames. ], batch size: 47, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:32:15,153 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189812.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:27,686 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:30,068 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189824.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:45,795 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:08,050 INFO [train.py:903] (1/4) Epoch 28, batch 5500, loss[loss=0.1941, simple_loss=0.2902, pruned_loss=0.04897, over 19672.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2828, pruned_loss=0.05988, over 3831131.13 frames. ], batch size: 58, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:33:17,138 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:30,502 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 12:33:35,032 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 4.755e+02 5.876e+02 8.325e+02 1.733e+03, threshold=1.175e+03, percent-clipped=4.0 +2023-04-03 12:34:07,023 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9980, 2.1126, 2.2952, 2.6202, 1.9685, 2.4671, 2.2405, 2.1174], + device='cuda:1'), covar=tensor([0.4451, 0.4362, 0.2144, 0.2635, 0.4573, 0.2431, 0.5381, 0.3722], + device='cuda:1'), in_proj_covar=tensor([0.0940, 0.1017, 0.0745, 0.0954, 0.0917, 0.0858, 0.0862, 0.0811], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:34:10,042 INFO [train.py:903] (1/4) Epoch 28, batch 5550, loss[loss=0.1679, simple_loss=0.2392, pruned_loss=0.04829, over 19769.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.0596, over 3826938.12 frames. ], batch size: 45, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:34:17,008 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 12:35:06,749 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 12:35:11,467 INFO [train.py:903] (1/4) Epoch 28, batch 5600, loss[loss=0.2043, simple_loss=0.2926, pruned_loss=0.05794, over 19436.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2818, pruned_loss=0.05904, over 3812597.17 frames. ], batch size: 70, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:35:40,028 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.597e+02 5.831e+02 7.652e+02 2.230e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 12:35:40,280 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:35:44,668 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:11,925 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190002.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:17,389 INFO [train.py:903] (1/4) Epoch 28, batch 5650, loss[loss=0.1763, simple_loss=0.2599, pruned_loss=0.04632, over 19578.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2825, pruned_loss=0.05926, over 3814214.13 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:36:17,743 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:04,793 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 12:37:18,834 INFO [train.py:903] (1/4) Epoch 28, batch 5700, loss[loss=0.1851, simple_loss=0.2674, pruned_loss=0.0514, over 19455.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2839, pruned_loss=0.06026, over 3820825.56 frames. ], batch size: 49, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:37:26,617 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 12:37:27,080 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7715, 4.9014, 5.5013, 5.5348, 2.2826, 5.2249, 4.4865, 5.2145], + device='cuda:1'), covar=tensor([0.1621, 0.1238, 0.0559, 0.0615, 0.6006, 0.0939, 0.0641, 0.1061], + device='cuda:1'), in_proj_covar=tensor([0.0822, 0.0788, 0.0999, 0.0879, 0.0869, 0.0765, 0.0589, 0.0928], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 12:37:31,551 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190066.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:45,568 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.283e+02 6.430e+02 7.892e+02 1.365e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-04-03 12:37:49,398 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190080.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:54,215 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8133, 1.9319, 2.1972, 2.3206, 1.7840, 2.2824, 2.1514, 1.9716], + device='cuda:1'), covar=tensor([0.4347, 0.3991, 0.2096, 0.2500, 0.4171, 0.2334, 0.5205, 0.3671], + device='cuda:1'), in_proj_covar=tensor([0.0940, 0.1018, 0.0745, 0.0955, 0.0917, 0.0858, 0.0862, 0.0811], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:38:02,440 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,031 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,819 INFO [train.py:903] (1/4) Epoch 28, batch 5750, loss[loss=0.1759, simple_loss=0.2596, pruned_loss=0.04603, over 19787.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2831, pruned_loss=0.0595, over 3820160.01 frames. ], batch size: 49, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:38:24,215 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 12:38:32,477 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 12:38:35,173 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:37,089 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 12:38:45,190 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6649, 1.7630, 2.0683, 1.8908, 2.9101, 2.4023, 3.0774, 1.4170], + device='cuda:1'), covar=tensor([0.2442, 0.4293, 0.2685, 0.2015, 0.1365, 0.2272, 0.1320, 0.4497], + device='cuda:1'), in_proj_covar=tensor([0.0558, 0.0675, 0.0760, 0.0512, 0.0640, 0.0551, 0.0674, 0.0579], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:39:23,777 INFO [train.py:903] (1/4) Epoch 28, batch 5800, loss[loss=0.177, simple_loss=0.2535, pruned_loss=0.05025, over 19743.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2825, pruned_loss=0.05922, over 3820969.28 frames. ], batch size: 46, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:39:25,235 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190157.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:39:41,876 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190169.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:39:52,005 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.681e+02 5.738e+02 7.022e+02 1.341e+03, threshold=1.148e+03, percent-clipped=1.0 +2023-04-03 12:39:56,735 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:01,273 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.5723, 4.1746, 2.7500, 3.6525, 1.0293, 4.1430, 4.0086, 4.0996], + device='cuda:1'), covar=tensor([0.0645, 0.0986, 0.1984, 0.0946, 0.4185, 0.0743, 0.0980, 0.1498], + device='cuda:1'), in_proj_covar=tensor([0.0531, 0.0431, 0.0519, 0.0361, 0.0411, 0.0458, 0.0451, 0.0484], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:40:26,142 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190205.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:27,625 INFO [train.py:903] (1/4) Epoch 28, batch 5850, loss[loss=0.1882, simple_loss=0.2677, pruned_loss=0.0544, over 19838.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2822, pruned_loss=0.05917, over 3819656.09 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:02,546 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:29,981 INFO [train.py:903] (1/4) Epoch 28, batch 5900, loss[loss=0.1885, simple_loss=0.2771, pruned_loss=0.04991, over 19679.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2817, pruned_loss=0.0588, over 3822038.62 frames. ], batch size: 59, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:32,292 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 12:41:32,687 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1913, 1.2421, 1.1751, 1.0148, 1.0715, 1.0417, 0.0962, 0.3442], + device='cuda:1'), covar=tensor([0.0725, 0.0740, 0.0533, 0.0715, 0.1361, 0.0751, 0.1519, 0.1315], + device='cuda:1'), in_proj_covar=tensor([0.0367, 0.0366, 0.0367, 0.0393, 0.0471, 0.0398, 0.0346, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 12:41:33,923 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190259.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:54,171 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 12:41:56,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.583e+02 5.674e+02 7.520e+02 1.844e+03, threshold=1.135e+03, percent-clipped=9.0 +2023-04-03 12:42:24,247 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-03 12:42:31,996 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5972, 1.7039, 1.9533, 1.9438, 1.5127, 1.8795, 1.9340, 1.8122], + device='cuda:1'), covar=tensor([0.4232, 0.3955, 0.2155, 0.2558, 0.4049, 0.2424, 0.5406, 0.3660], + device='cuda:1'), in_proj_covar=tensor([0.0938, 0.1017, 0.0746, 0.0955, 0.0916, 0.0857, 0.0862, 0.0810], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:42:32,649 INFO [train.py:903] (1/4) Epoch 28, batch 5950, loss[loss=0.2167, simple_loss=0.3, pruned_loss=0.06666, over 19772.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2827, pruned_loss=0.05954, over 3808689.00 frames. ], batch size: 63, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:43:07,967 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190334.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:43:13,914 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.4035, 1.5370, 1.6137, 1.5680, 3.0101, 1.1447, 2.3688, 3.4421], + device='cuda:1'), covar=tensor([0.0558, 0.2624, 0.2802, 0.1821, 0.0683, 0.2526, 0.1277, 0.0264], + device='cuda:1'), in_proj_covar=tensor([0.0425, 0.0379, 0.0399, 0.0354, 0.0383, 0.0359, 0.0399, 0.0420], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:43:34,404 INFO [train.py:903] (1/4) Epoch 28, batch 6000, loss[loss=0.1837, simple_loss=0.2752, pruned_loss=0.04613, over 19653.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2833, pruned_loss=0.05968, over 3804253.80 frames. ], batch size: 55, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:43:34,405 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 12:43:48,493 INFO [train.py:937] (1/4) Epoch 28, validation: loss=0.1668, simple_loss=0.2663, pruned_loss=0.03368, over 944034.00 frames. +2023-04-03 12:43:48,494 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 12:44:08,735 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:15,160 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 4.800e+02 5.909e+02 8.119e+02 1.607e+03, threshold=1.182e+03, percent-clipped=4.0 +2023-04-03 12:44:41,367 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:50,436 INFO [train.py:903] (1/4) Epoch 28, batch 6050, loss[loss=0.2289, simple_loss=0.2974, pruned_loss=0.08022, over 19751.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2835, pruned_loss=0.05964, over 3802846.12 frames. ], batch size: 47, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:04,562 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:11,915 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.8437, 4.4552, 2.9493, 3.9098, 1.0623, 4.4380, 4.2590, 4.3504], + device='cuda:1'), covar=tensor([0.0579, 0.0812, 0.1808, 0.0866, 0.3854, 0.0600, 0.0903, 0.1186], + device='cuda:1'), in_proj_covar=tensor([0.0531, 0.0431, 0.0519, 0.0361, 0.0411, 0.0458, 0.0450, 0.0483], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:45:28,511 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190437.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:50,732 INFO [train.py:903] (1/4) Epoch 28, batch 6100, loss[loss=0.2077, simple_loss=0.2687, pruned_loss=0.0734, over 19370.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.06045, over 3802584.34 frames. ], batch size: 47, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:57,946 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190461.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:59,867 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:17,942 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 4.836e+02 5.950e+02 7.277e+02 1.439e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 12:46:21,259 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.57 vs. limit=5.0 +2023-04-03 12:46:28,813 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:46,944 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190501.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:46:53,524 INFO [train.py:903] (1/4) Epoch 28, batch 6150, loss[loss=0.1848, simple_loss=0.2641, pruned_loss=0.05272, over 19528.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2823, pruned_loss=0.05986, over 3812874.21 frames. ], batch size: 54, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:47:02,052 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:47:23,094 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 12:47:56,364 INFO [train.py:903] (1/4) Epoch 28, batch 6200, loss[loss=0.1906, simple_loss=0.2785, pruned_loss=0.05132, over 19779.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2813, pruned_loss=0.05879, over 3823473.33 frames. ], batch size: 56, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:48:23,170 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.897e+02 5.800e+02 7.277e+02 1.783e+03, threshold=1.160e+03, percent-clipped=5.0 +2023-04-03 12:48:59,510 INFO [train.py:903] (1/4) Epoch 28, batch 6250, loss[loss=0.1997, simple_loss=0.2895, pruned_loss=0.05497, over 19116.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2808, pruned_loss=0.05861, over 3822529.52 frames. ], batch size: 69, lr: 2.90e-03, grad_scale: 16.0 +2023-04-03 12:49:03,309 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190609.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:49:11,048 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190616.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:49:26,319 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:27,628 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190628.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:31,959 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 12:49:58,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 12:50:00,482 INFO [train.py:903] (1/4) Epoch 28, batch 6300, loss[loss=0.1842, simple_loss=0.2718, pruned_loss=0.04834, over 19590.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2793, pruned_loss=0.05776, over 3828703.89 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:50:28,879 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:50:29,804 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.803e+02 6.276e+02 7.910e+02 2.564e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 12:50:47,605 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:51:04,099 INFO [train.py:903] (1/4) Epoch 28, batch 6350, loss[loss=0.1767, simple_loss=0.2705, pruned_loss=0.0414, over 19633.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2798, pruned_loss=0.0581, over 3822943.17 frames. ], batch size: 55, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:06,522 INFO [train.py:903] (1/4) Epoch 28, batch 6400, loss[loss=0.1998, simple_loss=0.29, pruned_loss=0.05483, over 18796.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.28, pruned_loss=0.05808, over 3820160.94 frames. ], batch size: 74, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:13,581 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:52:36,095 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.755e+02 6.032e+02 8.092e+02 1.400e+03, threshold=1.206e+03, percent-clipped=2.0 +2023-04-03 12:52:52,702 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:53:08,684 INFO [train.py:903] (1/4) Epoch 28, batch 6450, loss[loss=0.2076, simple_loss=0.2955, pruned_loss=0.05979, over 19609.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.05819, over 3825535.82 frames. ], batch size: 61, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:53:09,391 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 12:53:55,127 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 12:54:10,281 INFO [train.py:903] (1/4) Epoch 28, batch 6500, loss[loss=0.2014, simple_loss=0.286, pruned_loss=0.05842, over 19604.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2805, pruned_loss=0.05795, over 3837679.53 frames. ], batch size: 61, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:54:17,913 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 12:54:32,004 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190872.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:54:36,601 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:54:36,729 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6031, 1.7038, 1.9821, 1.8970, 1.5046, 1.8852, 1.9454, 1.8181], + device='cuda:1'), covar=tensor([0.4335, 0.3921, 0.2018, 0.2619, 0.4095, 0.2430, 0.5273, 0.3621], + device='cuda:1'), in_proj_covar=tensor([0.0939, 0.1018, 0.0746, 0.0954, 0.0915, 0.0857, 0.0861, 0.0809], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 12:54:40,950 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.758e+02 5.991e+02 7.680e+02 1.999e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-03 12:54:47,140 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:54:49,293 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.4035, 2.3437, 2.1925, 2.6624, 2.2626, 2.0527, 2.0354, 2.5114], + device='cuda:1'), covar=tensor([0.1084, 0.1705, 0.1531, 0.1061, 0.1459, 0.0585, 0.1588, 0.0688], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0358, 0.0316, 0.0256, 0.0304, 0.0253, 0.0319, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:55:01,693 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190897.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:55:13,651 INFO [train.py:903] (1/4) Epoch 28, batch 6550, loss[loss=0.1706, simple_loss=0.2519, pruned_loss=0.04465, over 19774.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2812, pruned_loss=0.05842, over 3833104.25 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:55:18,347 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:55:45,225 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.3988, 4.0292, 2.7485, 3.5743, 1.0655, 3.9629, 3.8569, 3.9535], + device='cuda:1'), covar=tensor([0.0681, 0.1014, 0.1919, 0.0950, 0.3874, 0.0760, 0.0921, 0.1132], + device='cuda:1'), in_proj_covar=tensor([0.0534, 0.0433, 0.0521, 0.0362, 0.0411, 0.0459, 0.0452, 0.0485], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:56:14,194 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190953.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:56:17,102 INFO [train.py:903] (1/4) Epoch 28, batch 6600, loss[loss=0.1832, simple_loss=0.2603, pruned_loss=0.05307, over 19771.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2807, pruned_loss=0.05846, over 3836416.84 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:56:35,866 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190971.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:56:48,225 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.870e+02 6.050e+02 8.150e+02 2.542e+03, threshold=1.210e+03, percent-clipped=13.0 +2023-04-03 12:56:50,190 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 12:56:53,376 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7629, 1.6756, 1.8631, 1.6775, 4.2756, 1.2089, 2.7167, 4.6533], + device='cuda:1'), covar=tensor([0.0441, 0.2904, 0.2911, 0.2111, 0.0738, 0.2930, 0.1556, 0.0178], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0382, 0.0400, 0.0356, 0.0386, 0.0361, 0.0401, 0.0421], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 12:57:19,915 INFO [train.py:903] (1/4) Epoch 28, batch 6650, loss[loss=0.2117, simple_loss=0.2985, pruned_loss=0.06243, over 19702.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2817, pruned_loss=0.05906, over 3830733.50 frames. ], batch size: 59, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:57:54,086 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-03 12:57:59,360 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:14,554 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:22,332 INFO [train.py:903] (1/4) Epoch 28, batch 6700, loss[loss=0.209, simple_loss=0.2829, pruned_loss=0.06759, over 19563.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05854, over 3838807.05 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:58:38,460 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191068.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:58:45,528 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191074.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:52,989 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.245e+02 6.198e+02 8.145e+02 2.088e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 12:58:59,002 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:59,067 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:59:21,819 INFO [train.py:903] (1/4) Epoch 28, batch 6750, loss[loss=0.1897, simple_loss=0.2711, pruned_loss=0.05416, over 19579.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.05969, over 3811546.49 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:59:51,532 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:11,310 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:13,780 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:17,982 INFO [train.py:903] (1/4) Epoch 28, batch 6800, loss[loss=0.2517, simple_loss=0.3185, pruned_loss=0.09244, over 13536.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2828, pruned_loss=0.0604, over 3789208.81 frames. ], batch size: 136, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 13:00:19,447 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:20,961 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 13:00:45,258 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.099e+02 6.292e+02 8.518e+02 1.501e+03, threshold=1.258e+03, percent-clipped=3.0 +2023-04-03 13:01:04,676 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 13:01:05,146 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 13:01:08,829 INFO [train.py:903] (1/4) Epoch 29, batch 0, loss[loss=0.1662, simple_loss=0.2426, pruned_loss=0.0449, over 19484.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2426, pruned_loss=0.0449, over 19484.00 frames. ], batch size: 49, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:01:08,829 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 13:01:20,493 INFO [train.py:937] (1/4) Epoch 29, validation: loss=0.1669, simple_loss=0.2669, pruned_loss=0.03339, over 944034.00 frames. +2023-04-03 13:01:20,494 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 13:01:31,766 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 13:01:43,679 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191203.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:01:46,163 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6475, 1.7165, 2.0091, 2.0344, 1.5570, 2.0039, 1.9898, 1.8382], + device='cuda:1'), covar=tensor([0.4229, 0.3731, 0.2019, 0.2253, 0.3850, 0.2173, 0.5293, 0.3591], + device='cuda:1'), in_proj_covar=tensor([0.0946, 0.1023, 0.0750, 0.0960, 0.0921, 0.0863, 0.0868, 0.0815], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:02:19,662 INFO [train.py:903] (1/4) Epoch 29, batch 50, loss[loss=0.1955, simple_loss=0.2657, pruned_loss=0.06267, over 19764.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2798, pruned_loss=0.05887, over 871960.96 frames. ], batch size: 46, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:02:55,022 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 13:03:15,128 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.210e+02 6.215e+02 8.429e+02 1.754e+03, threshold=1.243e+03, percent-clipped=6.0 +2023-04-03 13:03:18,588 INFO [train.py:903] (1/4) Epoch 29, batch 100, loss[loss=0.1926, simple_loss=0.279, pruned_loss=0.05307, over 19655.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2806, pruned_loss=0.05841, over 1523997.28 frames. ], batch size: 55, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:03:33,065 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 13:04:07,317 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191324.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:04:19,665 INFO [train.py:903] (1/4) Epoch 29, batch 150, loss[loss=0.2008, simple_loss=0.2879, pruned_loss=0.05687, over 18093.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2791, pruned_loss=0.05766, over 2042649.02 frames. ], batch size: 83, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:04:29,101 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191342.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:04:36,957 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191349.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:04:59,842 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:15,367 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.272e+02 6.303e+02 7.742e+02 1.475e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-03 13:05:15,429 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 13:05:18,047 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:18,900 INFO [train.py:903] (1/4) Epoch 29, batch 200, loss[loss=0.2314, simple_loss=0.3061, pruned_loss=0.07833, over 17462.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2826, pruned_loss=0.05981, over 2421080.09 frames. ], batch size: 101, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:05:37,010 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2899, 2.1437, 2.0553, 1.9065, 1.7327, 1.8148, 0.7591, 1.2330], + device='cuda:1'), covar=tensor([0.0700, 0.0699, 0.0523, 0.0862, 0.1223, 0.1031, 0.1379, 0.1155], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0366, 0.0367, 0.0393, 0.0471, 0.0398, 0.0346, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:05:48,183 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:13,706 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191430.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:17,601 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:20,064 INFO [train.py:903] (1/4) Epoch 29, batch 250, loss[loss=0.1867, simple_loss=0.2785, pruned_loss=0.04746, over 19348.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.281, pruned_loss=0.05892, over 2736289.48 frames. ], batch size: 66, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:06:41,368 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-03 13:07:16,620 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.311e+02 6.503e+02 8.385e+02 2.446e+03, threshold=1.301e+03, percent-clipped=7.0 +2023-04-03 13:07:20,116 INFO [train.py:903] (1/4) Epoch 29, batch 300, loss[loss=0.1608, simple_loss=0.2366, pruned_loss=0.04247, over 19119.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05935, over 2980967.65 frames. ], batch size: 42, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:07:33,140 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:08:19,683 INFO [train.py:903] (1/4) Epoch 29, batch 350, loss[loss=0.2197, simple_loss=0.3084, pruned_loss=0.06554, over 19529.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05978, over 3179790.32 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:08:27,199 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 13:08:33,092 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191545.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:08:35,157 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:09:16,623 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.772e+02 5.697e+02 6.938e+02 1.378e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 13:09:19,970 INFO [train.py:903] (1/4) Epoch 29, batch 400, loss[loss=0.1858, simple_loss=0.2764, pruned_loss=0.04763, over 19546.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05974, over 3322433.00 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:09:50,420 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191609.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:11,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-03 13:10:19,939 INFO [train.py:903] (1/4) Epoch 29, batch 450, loss[loss=0.2344, simple_loss=0.3035, pruned_loss=0.0826, over 12950.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05948, over 3444718.73 frames. ], batch size: 137, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:10:55,268 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:57,256 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 13:10:58,390 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 13:11:17,385 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.702e+02 5.973e+02 7.083e+02 1.398e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 13:11:20,347 INFO [train.py:903] (1/4) Epoch 29, batch 500, loss[loss=0.2097, simple_loss=0.293, pruned_loss=0.0632, over 19716.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2826, pruned_loss=0.05953, over 3520280.66 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:12:13,382 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:12:21,067 INFO [train.py:903] (1/4) Epoch 29, batch 550, loss[loss=0.2198, simple_loss=0.3001, pruned_loss=0.06976, over 17690.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2832, pruned_loss=0.0598, over 3565320.13 frames. ], batch size: 101, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:12:45,844 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0821, 1.8101, 1.7002, 2.0001, 1.6503, 1.7291, 1.6057, 1.9446], + device='cuda:1'), covar=tensor([0.1040, 0.1323, 0.1458, 0.1091, 0.1390, 0.0549, 0.1587, 0.0732], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0362, 0.0320, 0.0259, 0.0307, 0.0256, 0.0323, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 13:13:04,092 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9850, 1.2779, 1.7197, 0.8847, 2.3163, 3.0551, 2.7115, 3.2321], + device='cuda:1'), covar=tensor([0.1725, 0.4068, 0.3382, 0.2937, 0.0652, 0.0231, 0.0279, 0.0322], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0334, 0.0366, 0.0272, 0.0256, 0.0198, 0.0221, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 13:13:04,201 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4049, 1.4497, 1.7196, 1.6218, 2.3106, 2.0752, 2.3950, 0.9958], + device='cuda:1'), covar=tensor([0.2551, 0.4438, 0.2698, 0.2014, 0.1511, 0.2340, 0.1379, 0.4786], + device='cuda:1'), in_proj_covar=tensor([0.0555, 0.0672, 0.0759, 0.0509, 0.0635, 0.0546, 0.0671, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:13:06,445 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5305, 1.6244, 1.9438, 1.8063, 2.8633, 2.4021, 2.9817, 1.4658], + device='cuda:1'), covar=tensor([0.2623, 0.4432, 0.2870, 0.2026, 0.1486, 0.2300, 0.1448, 0.4586], + device='cuda:1'), in_proj_covar=tensor([0.0555, 0.0672, 0.0759, 0.0509, 0.0635, 0.0546, 0.0671, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:13:19,226 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.419e+02 6.471e+02 8.968e+02 1.855e+03, threshold=1.294e+03, percent-clipped=10.0 +2023-04-03 13:13:22,383 INFO [train.py:903] (1/4) Epoch 29, batch 600, loss[loss=0.1857, simple_loss=0.2599, pruned_loss=0.05576, over 19079.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2837, pruned_loss=0.06009, over 3624961.13 frames. ], batch size: 42, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:13:41,913 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:02,330 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 13:14:13,338 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191826.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:22,139 INFO [train.py:903] (1/4) Epoch 29, batch 650, loss[loss=0.1932, simple_loss=0.2863, pruned_loss=0.05004, over 19736.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.05989, over 3666017.06 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:14:31,462 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:42,693 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-03 13:15:01,023 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:18,380 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.883e+02 6.286e+02 7.850e+02 2.365e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-03 13:15:21,594 INFO [train.py:903] (1/4) Epoch 29, batch 700, loss[loss=0.2119, simple_loss=0.2935, pruned_loss=0.06519, over 19743.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2844, pruned_loss=0.06008, over 3704310.43 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:15:29,659 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191890.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:16:03,792 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191918.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:16:23,390 INFO [train.py:903] (1/4) Epoch 29, batch 750, loss[loss=0.1938, simple_loss=0.2772, pruned_loss=0.05524, over 19710.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2836, pruned_loss=0.05973, over 3717825.89 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:16:34,317 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:17:21,354 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.754e+02 5.591e+02 7.301e+02 1.189e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-03 13:17:23,700 INFO [train.py:903] (1/4) Epoch 29, batch 800, loss[loss=0.2228, simple_loss=0.3083, pruned_loss=0.06869, over 18249.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2841, pruned_loss=0.05993, over 3747217.81 frames. ], batch size: 83, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:17:23,992 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8184, 3.3035, 3.3566, 3.3660, 1.3434, 3.2404, 2.8220, 3.1433], + device='cuda:1'), covar=tensor([0.1859, 0.1077, 0.0828, 0.1026, 0.5996, 0.1055, 0.0890, 0.1361], + device='cuda:1'), in_proj_covar=tensor([0.0821, 0.0786, 0.0994, 0.0877, 0.0864, 0.0762, 0.0588, 0.0927], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 13:17:36,670 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 13:18:16,905 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6354, 2.5367, 2.3354, 2.6216, 2.4226, 2.1319, 2.1858, 2.4865], + device='cuda:1'), covar=tensor([0.0992, 0.1506, 0.1427, 0.1106, 0.1363, 0.0583, 0.1484, 0.0708], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0362, 0.0321, 0.0260, 0.0307, 0.0257, 0.0323, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 13:18:26,428 INFO [train.py:903] (1/4) Epoch 29, batch 850, loss[loss=0.1698, simple_loss=0.2477, pruned_loss=0.046, over 19746.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.284, pruned_loss=0.0597, over 3772989.34 frames. ], batch size: 46, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:17,572 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 13:19:23,133 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 4.771e+02 5.573e+02 7.809e+02 2.111e+03, threshold=1.115e+03, percent-clipped=7.0 +2023-04-03 13:19:25,393 INFO [train.py:903] (1/4) Epoch 29, batch 900, loss[loss=0.2267, simple_loss=0.303, pruned_loss=0.07518, over 19504.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2841, pruned_loss=0.06016, over 3786407.05 frames. ], batch size: 64, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:43,262 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:07,631 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-03 13:20:09,365 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192120.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:12,966 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192123.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:26,582 INFO [train.py:903] (1/4) Epoch 29, batch 950, loss[loss=0.2124, simple_loss=0.2975, pruned_loss=0.06366, over 19694.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.05998, over 3806967.50 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:20:30,182 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 13:21:13,704 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:21:24,654 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.272e+02 6.130e+02 7.632e+02 1.213e+03, threshold=1.226e+03, percent-clipped=2.0 +2023-04-03 13:21:27,713 INFO [train.py:903] (1/4) Epoch 29, batch 1000, loss[loss=0.2028, simple_loss=0.2852, pruned_loss=0.0602, over 19513.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2815, pruned_loss=0.05936, over 3817643.07 frames. ], batch size: 64, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:23,179 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 13:22:27,655 INFO [train.py:903] (1/4) Epoch 29, batch 1050, loss[loss=0.2322, simple_loss=0.3113, pruned_loss=0.07654, over 17407.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.282, pruned_loss=0.05969, over 3815665.84 frames. ], batch size: 101, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:31,123 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192237.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:22:34,316 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:23:02,374 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 13:23:25,657 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 4.755e+02 5.709e+02 7.263e+02 1.610e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 13:23:28,074 INFO [train.py:903] (1/4) Epoch 29, batch 1100, loss[loss=0.1998, simple_loss=0.2947, pruned_loss=0.05239, over 19776.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05993, over 3816287.97 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:29,303 INFO [train.py:903] (1/4) Epoch 29, batch 1150, loss[loss=0.2135, simple_loss=0.2968, pruned_loss=0.06513, over 19741.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06023, over 3803659.37 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:53,861 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5708, 1.5847, 1.9300, 1.8282, 2.6899, 2.3935, 2.8808, 1.3124], + device='cuda:1'), covar=tensor([0.2586, 0.4613, 0.2857, 0.1992, 0.1538, 0.2183, 0.1408, 0.4830], + device='cuda:1'), in_proj_covar=tensor([0.0555, 0.0672, 0.0760, 0.0511, 0.0635, 0.0545, 0.0670, 0.0574], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:25:27,744 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 4.839e+02 5.803e+02 7.271e+02 1.538e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 13:25:30,847 INFO [train.py:903] (1/4) Epoch 29, batch 1200, loss[loss=0.1989, simple_loss=0.2885, pruned_loss=0.05465, over 19663.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2828, pruned_loss=0.05996, over 3802126.80 frames. ], batch size: 58, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:01,131 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:01,920 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 13:26:10,304 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:30,211 INFO [train.py:903] (1/4) Epoch 29, batch 1250, loss[loss=0.1797, simple_loss=0.2616, pruned_loss=0.04888, over 19580.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05947, over 3818166.91 frames. ], batch size: 52, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:50,089 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192451.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:07,153 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:28,114 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.043e+02 5.075e+02 6.202e+02 7.494e+02 1.680e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 13:27:30,147 INFO [train.py:903] (1/4) Epoch 29, batch 1300, loss[loss=0.2544, simple_loss=0.3242, pruned_loss=0.09225, over 17378.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05948, over 3821628.90 frames. ], batch size: 101, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:28:10,137 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:28:14,819 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2442, 1.2910, 1.2537, 1.0898, 1.1244, 1.0506, 0.0971, 0.4249], + device='cuda:1'), covar=tensor([0.0813, 0.0770, 0.0559, 0.0718, 0.1575, 0.0798, 0.1514, 0.1310], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0365, 0.0367, 0.0391, 0.0471, 0.0398, 0.0345, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:28:15,739 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7265, 3.4190, 2.5801, 3.0645, 1.5390, 3.3546, 3.2680, 3.3473], + device='cuda:1'), covar=tensor([0.0953, 0.1132, 0.2052, 0.0990, 0.2834, 0.0872, 0.1155, 0.1411], + device='cuda:1'), in_proj_covar=tensor([0.0531, 0.0431, 0.0519, 0.0361, 0.0410, 0.0457, 0.0452, 0.0482], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 13:28:30,905 INFO [train.py:903] (1/4) Epoch 29, batch 1350, loss[loss=0.1994, simple_loss=0.2781, pruned_loss=0.06032, over 19493.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2813, pruned_loss=0.05933, over 3831825.63 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:29:25,921 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192579.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:27,883 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:28,924 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.533e+02 7.774e+02 1.028e+03 2.542e+03, threshold=1.555e+03, percent-clipped=13.0 +2023-04-03 13:29:31,159 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:32,191 INFO [train.py:903] (1/4) Epoch 29, batch 1400, loss[loss=0.2234, simple_loss=0.3055, pruned_loss=0.07069, over 19384.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2813, pruned_loss=0.05887, over 3823371.60 frames. ], batch size: 70, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:29:42,125 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 13:29:44,071 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4676, 1.4847, 1.7049, 1.7126, 2.3821, 2.2030, 2.5409, 1.0523], + device='cuda:1'), covar=tensor([0.2619, 0.4675, 0.2997, 0.2051, 0.1709, 0.2254, 0.1549, 0.5032], + device='cuda:1'), in_proj_covar=tensor([0.0555, 0.0671, 0.0760, 0.0510, 0.0633, 0.0545, 0.0668, 0.0574], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:30:30,367 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:32,441 INFO [train.py:903] (1/4) Epoch 29, batch 1450, loss[loss=0.1812, simple_loss=0.257, pruned_loss=0.05271, over 19726.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2813, pruned_loss=0.05889, over 3826493.23 frames. ], batch size: 46, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:30:32,470 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 13:30:32,732 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:29,879 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.834e+02 5.734e+02 7.377e+02 1.406e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 13:31:32,941 INFO [train.py:903] (1/4) Epoch 29, batch 1500, loss[loss=0.1776, simple_loss=0.2653, pruned_loss=0.04494, over 19585.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2809, pruned_loss=0.05866, over 3821689.99 frames. ], batch size: 52, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:31:41,353 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192691.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:47,403 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192696.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:50,556 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:32,341 INFO [train.py:903] (1/4) Epoch 29, batch 1550, loss[loss=0.2244, simple_loss=0.3006, pruned_loss=0.07409, over 19524.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2828, pruned_loss=0.05982, over 3813945.24 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:32:56,579 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:57,536 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:05,626 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:31,827 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 5.130e+02 6.287e+02 7.695e+02 1.691e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-03 13:33:34,855 INFO [train.py:903] (1/4) Epoch 29, batch 1600, loss[loss=0.1823, simple_loss=0.2725, pruned_loss=0.04599, over 19660.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2823, pruned_loss=0.05933, over 3810482.84 frames. ], batch size: 53, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:33:47,550 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:51,041 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3733, 1.3647, 1.4942, 1.5413, 1.7503, 1.8843, 1.7912, 0.6160], + device='cuda:1'), covar=tensor([0.2448, 0.4291, 0.2888, 0.2021, 0.1733, 0.2340, 0.1525, 0.5210], + device='cuda:1'), in_proj_covar=tensor([0.0554, 0.0669, 0.0758, 0.0510, 0.0632, 0.0545, 0.0666, 0.0574], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:33:57,595 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 13:34:34,144 INFO [train.py:903] (1/4) Epoch 29, batch 1650, loss[loss=0.2404, simple_loss=0.3173, pruned_loss=0.08174, over 19324.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2825, pruned_loss=0.05953, over 3823680.51 frames. ], batch size: 66, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:34:35,675 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:34:51,822 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192849.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:35:04,026 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:15,785 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:23,787 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:30,386 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 4.944e+02 5.872e+02 7.824e+02 1.796e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 13:35:32,716 INFO [train.py:903] (1/4) Epoch 29, batch 1700, loss[loss=0.2085, simple_loss=0.2832, pruned_loss=0.06688, over 19618.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05982, over 3812518.11 frames. ], batch size: 50, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:35:38,395 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:40,583 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3722, 1.4488, 1.6645, 1.5739, 2.4832, 2.1020, 2.5909, 1.2486], + device='cuda:1'), covar=tensor([0.2547, 0.4481, 0.2761, 0.2125, 0.1508, 0.2365, 0.1383, 0.4659], + device='cuda:1'), in_proj_covar=tensor([0.0554, 0.0671, 0.0760, 0.0510, 0.0634, 0.0546, 0.0667, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:36:04,808 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:08,234 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:13,404 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 13:36:32,294 INFO [train.py:903] (1/4) Epoch 29, batch 1750, loss[loss=0.1783, simple_loss=0.2695, pruned_loss=0.04354, over 19657.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2827, pruned_loss=0.05963, over 3804956.79 frames. ], batch size: 60, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:36:55,825 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:58,009 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:24,864 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:25,785 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:27,139 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:31,715 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 4.741e+02 6.117e+02 7.797e+02 1.758e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 13:37:34,997 INFO [train.py:903] (1/4) Epoch 29, batch 1800, loss[loss=0.1682, simple_loss=0.2467, pruned_loss=0.04486, over 19741.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05978, over 3805044.86 frames. ], batch size: 46, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:37:46,814 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0785, 2.0692, 1.7606, 2.1179, 1.8329, 1.7355, 1.6683, 1.9229], + device='cuda:1'), covar=tensor([0.1059, 0.1321, 0.1597, 0.1100, 0.1438, 0.0600, 0.1603, 0.0786], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0362, 0.0321, 0.0259, 0.0309, 0.0258, 0.0324, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 13:37:47,939 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6971, 1.5733, 1.6524, 2.1085, 1.7831, 1.8168, 1.8260, 1.7082], + device='cuda:1'), covar=tensor([0.0676, 0.0771, 0.0803, 0.0539, 0.0848, 0.0698, 0.0859, 0.0626], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0222, 0.0229, 0.0239, 0.0226, 0.0214, 0.0188, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 13:38:29,366 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 13:38:33,981 INFO [train.py:903] (1/4) Epoch 29, batch 1850, loss[loss=0.1816, simple_loss=0.2754, pruned_loss=0.04396, over 19668.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2836, pruned_loss=0.06009, over 3809616.09 frames. ], batch size: 58, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:38:35,276 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:38:35,441 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7503, 1.9244, 2.2609, 1.9812, 2.9494, 3.3683, 3.2473, 3.5248], + device='cuda:1'), covar=tensor([0.1428, 0.3129, 0.2781, 0.2342, 0.0920, 0.0306, 0.0203, 0.0370], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0334, 0.0367, 0.0274, 0.0257, 0.0198, 0.0221, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 13:39:04,658 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193059.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:05,538 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 13:39:21,240 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4959, 1.6749, 2.1980, 1.8450, 3.0981, 2.2978, 3.2076, 1.8280], + device='cuda:1'), covar=tensor([0.2891, 0.4891, 0.3011, 0.2233, 0.1579, 0.2748, 0.1937, 0.4704], + device='cuda:1'), in_proj_covar=tensor([0.0556, 0.0674, 0.0763, 0.0512, 0.0637, 0.0549, 0.0669, 0.0576], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:39:28,424 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 13:39:32,458 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.498e+02 4.794e+02 5.817e+02 7.567e+02 2.266e+03, threshold=1.163e+03, percent-clipped=4.0 +2023-04-03 13:39:34,763 INFO [train.py:903] (1/4) Epoch 29, batch 1900, loss[loss=0.1918, simple_loss=0.2881, pruned_loss=0.04776, over 19694.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2843, pruned_loss=0.0601, over 3807499.77 frames. ], batch size: 59, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:39:45,981 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:49,044 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 13:39:50,327 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:55,324 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 13:40:18,684 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 13:40:25,443 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:32,279 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4541, 1.4346, 1.6528, 1.6223, 2.2093, 2.1354, 2.3194, 0.9194], + device='cuda:1'), covar=tensor([0.2557, 0.4687, 0.3008, 0.2099, 0.1675, 0.2315, 0.1481, 0.5100], + device='cuda:1'), in_proj_covar=tensor([0.0557, 0.0676, 0.0765, 0.0513, 0.0639, 0.0551, 0.0671, 0.0578], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:40:33,560 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:33,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 13:40:35,431 INFO [train.py:903] (1/4) Epoch 29, batch 1950, loss[loss=0.228, simple_loss=0.309, pruned_loss=0.07348, over 19569.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2829, pruned_loss=0.05918, over 3817998.95 frames. ], batch size: 61, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:40:56,425 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:56,463 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:04,201 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:15,373 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:34,710 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.104e+02 4.788e+02 5.811e+02 7.006e+02 1.429e+03, threshold=1.162e+03, percent-clipped=3.0 +2023-04-03 13:41:37,846 INFO [train.py:903] (1/4) Epoch 29, batch 2000, loss[loss=0.1777, simple_loss=0.2688, pruned_loss=0.04334, over 19766.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2815, pruned_loss=0.05851, over 3818905.20 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:41:46,094 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:48,027 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193193.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:42:10,521 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:31,348 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 13:42:31,630 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193229.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:37,085 INFO [train.py:903] (1/4) Epoch 29, batch 2050, loss[loss=0.2067, simple_loss=0.2945, pruned_loss=0.05952, over 19667.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.05942, over 3815833.07 frames. ], batch size: 58, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:42:50,736 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 13:42:50,761 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 13:43:00,864 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6548, 0.8970, 1.3134, 1.4907, 3.0013, 1.1822, 2.5139, 3.5433], + device='cuda:1'), covar=tensor([0.0742, 0.4217, 0.3764, 0.2424, 0.1190, 0.3127, 0.1497, 0.0410], + device='cuda:1'), in_proj_covar=tensor([0.0428, 0.0382, 0.0402, 0.0357, 0.0386, 0.0362, 0.0401, 0.0422], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 13:43:13,503 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 13:43:34,576 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.912e+02 6.403e+02 8.614e+02 2.001e+03, threshold=1.281e+03, percent-clipped=8.0 +2023-04-03 13:43:36,896 INFO [train.py:903] (1/4) Epoch 29, batch 2100, loss[loss=0.1982, simple_loss=0.2847, pruned_loss=0.05584, over 19661.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05973, over 3822370.41 frames. ], batch size: 55, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:43:40,750 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8898, 1.3590, 1.0875, 1.0647, 1.1926, 1.0301, 0.9432, 1.2839], + device='cuda:1'), covar=tensor([0.0786, 0.0965, 0.1151, 0.0875, 0.0662, 0.1463, 0.0659, 0.0548], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0318, 0.0342, 0.0272, 0.0253, 0.0346, 0.0293, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 13:43:59,250 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:44:07,582 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 13:44:07,929 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193308.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:44:25,614 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 13:44:37,260 INFO [train.py:903] (1/4) Epoch 29, batch 2150, loss[loss=0.1926, simple_loss=0.266, pruned_loss=0.05961, over 19356.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2825, pruned_loss=0.06015, over 3836632.83 frames. ], batch size: 47, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:44:56,808 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193349.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:25,877 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:35,169 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.242e+02 6.391e+02 8.913e+02 2.261e+03, threshold=1.278e+03, percent-clipped=9.0 +2023-04-03 13:45:37,407 INFO [train.py:903] (1/4) Epoch 29, batch 2200, loss[loss=0.2089, simple_loss=0.2902, pruned_loss=0.06385, over 19627.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.06011, over 3835600.97 frames. ], batch size: 61, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:00,710 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:04,394 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:29,042 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 13:46:36,073 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:39,103 INFO [train.py:903] (1/4) Epoch 29, batch 2250, loss[loss=0.1898, simple_loss=0.2698, pruned_loss=0.05486, over 19843.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.06011, over 3847623.40 frames. ], batch size: 52, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:40,737 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3431, 1.9317, 1.9710, 2.9479, 2.0298, 2.5747, 2.6246, 2.2863], + device='cuda:1'), covar=tensor([0.0761, 0.0897, 0.1002, 0.0731, 0.0848, 0.0708, 0.0813, 0.0646], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0223, 0.0229, 0.0240, 0.0227, 0.0214, 0.0188, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 13:46:45,248 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:15,470 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 13:47:20,667 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:25,760 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193472.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:47:36,426 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 4.884e+02 5.894e+02 7.303e+02 1.661e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 13:47:38,736 INFO [train.py:903] (1/4) Epoch 29, batch 2300, loss[loss=0.1865, simple_loss=0.2739, pruned_loss=0.0496, over 19664.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2827, pruned_loss=0.06054, over 3847958.61 frames. ], batch size: 53, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:47:39,134 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2761, 2.1594, 1.9872, 1.8780, 1.6406, 1.8462, 0.6940, 1.2098], + device='cuda:1'), covar=tensor([0.0650, 0.0715, 0.0563, 0.0961, 0.1227, 0.1080, 0.1468, 0.1243], + device='cuda:1'), in_proj_covar=tensor([0.0370, 0.0367, 0.0371, 0.0395, 0.0475, 0.0401, 0.0349, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 13:47:50,425 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:53,665 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 13:48:12,804 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 13:48:21,010 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193518.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:48:39,106 INFO [train.py:903] (1/4) Epoch 29, batch 2350, loss[loss=0.227, simple_loss=0.3132, pruned_loss=0.07047, over 19641.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2824, pruned_loss=0.06032, over 3850050.84 frames. ], batch size: 57, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:16,316 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193564.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:49:20,451 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 13:49:27,314 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:49:36,484 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 13:49:37,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.546e+02 5.901e+02 7.828e+02 1.716e+03, threshold=1.180e+03, percent-clipped=11.0 +2023-04-03 13:49:40,757 INFO [train.py:903] (1/4) Epoch 29, batch 2400, loss[loss=0.1895, simple_loss=0.2824, pruned_loss=0.04825, over 19676.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.0601, over 3836967.20 frames. ], batch size: 53, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:47,421 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193589.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:49:52,738 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1449, 2.2180, 2.4809, 2.7668, 2.1821, 2.6599, 2.4792, 2.3199], + device='cuda:1'), covar=tensor([0.4094, 0.3900, 0.1866, 0.2427, 0.3962, 0.2189, 0.4785, 0.3301], + device='cuda:1'), in_proj_covar=tensor([0.0950, 0.1031, 0.0753, 0.0962, 0.0927, 0.0866, 0.0870, 0.0817], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:50:14,632 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6897, 1.2595, 1.3232, 1.5817, 1.1339, 1.4445, 1.2917, 1.5129], + device='cuda:1'), covar=tensor([0.1144, 0.1231, 0.1638, 0.0958, 0.1359, 0.0620, 0.1652, 0.0879], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0362, 0.0322, 0.0259, 0.0309, 0.0259, 0.0324, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 13:50:40,470 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:50:41,249 INFO [train.py:903] (1/4) Epoch 29, batch 2450, loss[loss=0.1926, simple_loss=0.268, pruned_loss=0.05857, over 19733.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2829, pruned_loss=0.06022, over 3846165.61 frames. ], batch size: 51, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:50:54,654 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:39,133 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.882e+02 5.957e+02 8.043e+02 1.393e+03, threshold=1.191e+03, percent-clipped=5.0 +2023-04-03 13:51:41,319 INFO [train.py:903] (1/4) Epoch 29, batch 2500, loss[loss=0.3255, simple_loss=0.3728, pruned_loss=0.1391, over 13323.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2836, pruned_loss=0.0608, over 3829593.53 frames. ], batch size: 135, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:51:45,922 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193688.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:50,505 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0694, 1.2422, 1.7740, 1.0515, 2.5111, 3.3766, 3.0869, 3.5742], + device='cuda:1'), covar=tensor([0.1723, 0.3978, 0.3429, 0.2839, 0.0708, 0.0201, 0.0230, 0.0290], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0334, 0.0366, 0.0273, 0.0257, 0.0198, 0.0220, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 13:52:40,697 INFO [train.py:903] (1/4) Epoch 29, batch 2550, loss[loss=0.204, simple_loss=0.2869, pruned_loss=0.06052, over 19324.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.06088, over 3822648.27 frames. ], batch size: 66, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:52:46,734 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9776, 1.8719, 1.7185, 2.0125, 1.7762, 1.7256, 1.6813, 1.8950], + device='cuda:1'), covar=tensor([0.1025, 0.1298, 0.1409, 0.1031, 0.1286, 0.0543, 0.1416, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0361, 0.0320, 0.0259, 0.0308, 0.0258, 0.0323, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 13:52:54,644 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-03 13:53:09,924 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 13:53:15,297 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:30,092 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:35,068 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 13:53:40,999 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 5.186e+02 6.141e+02 8.140e+02 1.584e+03, threshold=1.228e+03, percent-clipped=10.0 +2023-04-03 13:53:41,155 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:42,120 INFO [train.py:903] (1/4) Epoch 29, batch 2600, loss[loss=0.2172, simple_loss=0.3008, pruned_loss=0.06681, over 18710.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.284, pruned_loss=0.06109, over 3804360.17 frames. ], batch size: 74, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:02,270 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193799.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:54:22,059 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193816.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:54:44,186 INFO [train.py:903] (1/4) Epoch 29, batch 2650, loss[loss=0.2079, simple_loss=0.2952, pruned_loss=0.06025, over 19523.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.0613, over 3783645.40 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:59,844 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 13:55:38,465 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 13:55:43,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 4.600e+02 5.538e+02 7.187e+02 1.315e+03, threshold=1.108e+03, percent-clipped=1.0 +2023-04-03 13:55:44,725 INFO [train.py:903] (1/4) Epoch 29, batch 2700, loss[loss=0.1515, simple_loss=0.2379, pruned_loss=0.03253, over 19739.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2831, pruned_loss=0.06025, over 3796290.84 frames. ], batch size: 46, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:55:55,015 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4828, 1.5524, 1.7902, 1.7252, 2.5487, 2.2642, 2.7788, 1.2299], + device='cuda:1'), covar=tensor([0.2630, 0.4541, 0.2895, 0.2053, 0.1594, 0.2256, 0.1410, 0.4782], + device='cuda:1'), in_proj_covar=tensor([0.0557, 0.0676, 0.0764, 0.0512, 0.0640, 0.0550, 0.0672, 0.0577], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:56:00,526 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:05,882 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193902.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:42,116 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193931.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:56:45,192 INFO [train.py:903] (1/4) Epoch 29, batch 2750, loss[loss=0.2243, simple_loss=0.2977, pruned_loss=0.0754, over 13371.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2826, pruned_loss=0.06018, over 3798585.75 frames. ], batch size: 135, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:56:58,028 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:27,795 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193969.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:29,114 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4353, 1.5798, 1.8289, 1.7181, 2.4551, 2.1370, 2.6409, 0.9680], + device='cuda:1'), covar=tensor([0.2654, 0.4409, 0.2899, 0.2047, 0.1586, 0.2417, 0.1437, 0.5172], + device='cuda:1'), in_proj_covar=tensor([0.0556, 0.0674, 0.0762, 0.0511, 0.0638, 0.0549, 0.0671, 0.0576], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 13:57:36,659 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:44,120 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.680e+02 5.858e+02 7.532e+02 1.982e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-03 13:57:45,309 INFO [train.py:903] (1/4) Epoch 29, batch 2800, loss[loss=0.1794, simple_loss=0.2728, pruned_loss=0.04302, over 19648.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2834, pruned_loss=0.06016, over 3813123.99 frames. ], batch size: 55, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:07,523 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3209, 1.4402, 1.8918, 1.4298, 2.7192, 3.8008, 3.5338, 3.9811], + device='cuda:1'), covar=tensor([0.1524, 0.3737, 0.3188, 0.2493, 0.0696, 0.0184, 0.0194, 0.0271], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0334, 0.0366, 0.0273, 0.0256, 0.0198, 0.0221, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 13:58:26,686 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:58:48,030 INFO [train.py:903] (1/4) Epoch 29, batch 2850, loss[loss=0.2044, simple_loss=0.2807, pruned_loss=0.06409, over 19595.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2813, pruned_loss=0.05886, over 3833168.97 frames. ], batch size: 50, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:58,403 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:45,132 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 13:59:47,409 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.652e+02 5.661e+02 7.187e+02 2.087e+03, threshold=1.132e+03, percent-clipped=4.0 +2023-04-03 13:59:48,549 INFO [train.py:903] (1/4) Epoch 29, batch 2900, loss[loss=0.1937, simple_loss=0.2826, pruned_loss=0.05237, over 19660.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2823, pruned_loss=0.05914, over 3825958.81 frames. ], batch size: 55, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:59:55,297 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:57,596 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:00:47,076 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1272, 3.0899, 1.8522, 1.9465, 2.7837, 1.7456, 1.6623, 2.3004], + device='cuda:1'), covar=tensor([0.1378, 0.0693, 0.1163, 0.0906, 0.0549, 0.1332, 0.0977, 0.0720], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0320, 0.0344, 0.0274, 0.0253, 0.0347, 0.0292, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:00:47,879 INFO [train.py:903] (1/4) Epoch 29, batch 2950, loss[loss=0.1934, simple_loss=0.2837, pruned_loss=0.05156, over 18279.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2824, pruned_loss=0.05937, over 3821341.18 frames. ], batch size: 83, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:13,340 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:37,556 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 14:01:42,871 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:46,908 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 4.745e+02 5.839e+02 7.587e+02 1.918e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-04-03 14:01:48,092 INFO [train.py:903] (1/4) Epoch 29, batch 3000, loss[loss=0.1894, simple_loss=0.269, pruned_loss=0.05492, over 19703.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2821, pruned_loss=0.05938, over 3821191.13 frames. ], batch size: 51, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:48,092 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 14:02:01,075 INFO [train.py:937] (1/4) Epoch 29, validation: loss=0.1668, simple_loss=0.2661, pruned_loss=0.03375, over 944034.00 frames. +2023-04-03 14:02:01,077 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 14:02:02,339 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 14:02:05,060 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194187.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:02:36,788 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194212.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:03:01,745 INFO [train.py:903] (1/4) Epoch 29, batch 3050, loss[loss=0.1676, simple_loss=0.2428, pruned_loss=0.04625, over 19362.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2818, pruned_loss=0.05902, over 3819588.42 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:03:17,757 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194246.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:04:02,892 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 5.546e+02 6.449e+02 7.988e+02 2.570e+03, threshold=1.290e+03, percent-clipped=8.0 +2023-04-03 14:04:03,996 INFO [train.py:903] (1/4) Epoch 29, batch 3100, loss[loss=0.1906, simple_loss=0.2809, pruned_loss=0.0501, over 19667.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2818, pruned_loss=0.0591, over 3828090.83 frames. ], batch size: 60, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:04,146 INFO [train.py:903] (1/4) Epoch 29, batch 3150, loss[loss=0.212, simple_loss=0.2831, pruned_loss=0.07047, over 19756.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2821, pruned_loss=0.05929, over 3835237.94 frames. ], batch size: 45, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:06,679 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:19,948 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:27,479 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 14:05:36,313 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:49,268 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194371.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:51,742 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:59,650 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194380.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:06:02,722 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.902e+02 5.790e+02 7.151e+02 3.080e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-03 14:06:03,882 INFO [train.py:903] (1/4) Epoch 29, batch 3200, loss[loss=0.1922, simple_loss=0.2671, pruned_loss=0.05862, over 19336.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2822, pruned_loss=0.05944, over 3829062.13 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:07:04,910 INFO [train.py:903] (1/4) Epoch 29, batch 3250, loss[loss=0.1656, simple_loss=0.2447, pruned_loss=0.04321, over 19772.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05929, over 3823040.42 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:07:05,064 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194434.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:07:23,391 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9948, 3.6647, 2.5259, 3.2707, 0.7321, 3.6468, 3.4750, 3.6014], + device='cuda:1'), covar=tensor([0.0815, 0.1128, 0.2002, 0.0965, 0.4111, 0.0769, 0.1096, 0.1288], + device='cuda:1'), in_proj_covar=tensor([0.0532, 0.0429, 0.0519, 0.0357, 0.0410, 0.0458, 0.0452, 0.0483], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:08:04,290 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.916e+02 6.594e+02 9.076e+02 2.390e+03, threshold=1.319e+03, percent-clipped=9.0 +2023-04-03 14:08:05,462 INFO [train.py:903] (1/4) Epoch 29, batch 3300, loss[loss=0.1803, simple_loss=0.2509, pruned_loss=0.05491, over 19399.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05954, over 3825223.89 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:08:09,431 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 14:09:07,386 INFO [train.py:903] (1/4) Epoch 29, batch 3350, loss[loss=0.2109, simple_loss=0.2811, pruned_loss=0.07031, over 19743.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.282, pruned_loss=0.05973, over 3822349.70 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:09:24,594 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:10:05,401 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6990, 1.7093, 1.6021, 1.4068, 1.3607, 1.4077, 0.2900, 0.6949], + device='cuda:1'), covar=tensor([0.0776, 0.0702, 0.0477, 0.0771, 0.1313, 0.0877, 0.1465, 0.1266], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0368, 0.0373, 0.0396, 0.0477, 0.0402, 0.0350, 0.0352], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 14:10:06,126 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.166e+02 6.518e+02 8.363e+02 1.379e+03, threshold=1.304e+03, percent-clipped=1.0 +2023-04-03 14:10:07,289 INFO [train.py:903] (1/4) Epoch 29, batch 3400, loss[loss=0.2259, simple_loss=0.3049, pruned_loss=0.07346, over 17342.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2816, pruned_loss=0.05985, over 3813868.46 frames. ], batch size: 101, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:10:48,233 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194617.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:06,749 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 14:11:07,040 INFO [train.py:903] (1/4) Epoch 29, batch 3450, loss[loss=0.196, simple_loss=0.2722, pruned_loss=0.05993, over 19726.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2818, pruned_loss=0.0597, over 3823730.21 frames. ], batch size: 51, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:11:09,269 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 14:11:17,003 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:29,965 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:01,641 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194680.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:05,596 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.627e+02 5.712e+02 7.363e+02 1.612e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 14:12:06,670 INFO [train.py:903] (1/4) Epoch 29, batch 3500, loss[loss=0.199, simple_loss=0.2733, pruned_loss=0.0624, over 19470.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2827, pruned_loss=0.06027, over 3824303.89 frames. ], batch size: 49, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:12:43,647 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194715.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:54,403 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194724.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:12:55,741 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5192, 1.5715, 1.8519, 1.7879, 2.7479, 2.3619, 2.9589, 1.2208], + device='cuda:1'), covar=tensor([0.2612, 0.4543, 0.3019, 0.2015, 0.1448, 0.2188, 0.1366, 0.4835], + device='cuda:1'), in_proj_covar=tensor([0.0556, 0.0672, 0.0760, 0.0510, 0.0635, 0.0547, 0.0670, 0.0574], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 14:13:06,874 INFO [train.py:903] (1/4) Epoch 29, batch 3550, loss[loss=0.2733, simple_loss=0.3433, pruned_loss=0.1017, over 19754.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06057, over 3822475.30 frames. ], batch size: 63, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:14:06,359 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.806e+02 5.826e+02 6.989e+02 1.690e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 14:14:07,571 INFO [train.py:903] (1/4) Epoch 29, batch 3600, loss[loss=0.1909, simple_loss=0.276, pruned_loss=0.05291, over 19680.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2829, pruned_loss=0.0601, over 3822471.27 frames. ], batch size: 59, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:14:20,128 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:14:33,714 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194805.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:14:37,434 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-03 14:15:02,712 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,752 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:06,686 INFO [train.py:903] (1/4) Epoch 29, batch 3650, loss[loss=0.2211, simple_loss=0.3037, pruned_loss=0.06919, over 19669.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.06012, over 3803328.41 frames. ], batch size: 60, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:15:12,771 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194839.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:15:45,377 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:16:05,571 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.663e+02 5.135e+02 6.261e+02 7.520e+02 2.080e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 14:16:06,488 INFO [train.py:903] (1/4) Epoch 29, batch 3700, loss[loss=0.275, simple_loss=0.3438, pruned_loss=0.1031, over 19756.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.06051, over 3805676.93 frames. ], batch size: 63, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:17:08,165 INFO [train.py:903] (1/4) Epoch 29, batch 3750, loss[loss=0.1772, simple_loss=0.2683, pruned_loss=0.04303, over 19613.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.06071, over 3811819.62 frames. ], batch size: 57, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:06,324 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 4.741e+02 6.225e+02 7.602e+02 2.236e+03, threshold=1.245e+03, percent-clipped=10.0 +2023-04-03 14:18:07,440 INFO [train.py:903] (1/4) Epoch 29, batch 3800, loss[loss=0.2062, simple_loss=0.2919, pruned_loss=0.06025, over 19339.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2843, pruned_loss=0.06088, over 3802667.90 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:21,207 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:18:34,663 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 14:19:01,231 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195029.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:06,512 INFO [train.py:903] (1/4) Epoch 29, batch 3850, loss[loss=0.2676, simple_loss=0.3411, pruned_loss=0.097, over 19523.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2841, pruned_loss=0.0607, over 3813242.34 frames. ], batch size: 64, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:19:27,245 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195051.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:41,483 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195062.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:19:43,970 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3041, 2.3130, 2.6312, 2.9652, 2.3558, 2.8688, 2.5825, 2.3771], + device='cuda:1'), covar=tensor([0.4297, 0.4162, 0.1830, 0.2655, 0.4364, 0.2210, 0.4802, 0.3285], + device='cuda:1'), in_proj_covar=tensor([0.0951, 0.1030, 0.0754, 0.0964, 0.0928, 0.0866, 0.0869, 0.0818], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 14:19:56,971 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195076.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:06,400 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.743e+02 5.822e+02 7.676e+02 1.767e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-04-03 14:20:06,418 INFO [train.py:903] (1/4) Epoch 29, batch 3900, loss[loss=0.2285, simple_loss=0.3058, pruned_loss=0.07562, over 19670.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2845, pruned_loss=0.06085, over 3820802.94 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:20:08,952 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:13,189 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:21,686 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195095.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:20:23,804 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.7929, 4.9548, 5.5827, 5.5939, 2.0918, 5.2661, 4.4864, 5.3185], + device='cuda:1'), covar=tensor([0.1819, 0.0994, 0.0570, 0.0686, 0.6126, 0.0924, 0.0685, 0.1139], + device='cuda:1'), in_proj_covar=tensor([0.0826, 0.0794, 0.1004, 0.0880, 0.0871, 0.0769, 0.0594, 0.0935], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 14:20:39,895 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:39,935 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:49,223 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-03 14:20:50,182 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195120.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:21:07,592 INFO [train.py:903] (1/4) Epoch 29, batch 3950, loss[loss=0.2051, simple_loss=0.2876, pruned_loss=0.06131, over 19785.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2829, pruned_loss=0.06004, over 3821583.96 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:21:12,684 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 14:21:43,433 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2585, 1.5215, 1.9499, 1.6123, 3.0567, 4.6268, 4.5471, 5.0534], + device='cuda:1'), covar=tensor([0.1706, 0.3826, 0.3492, 0.2461, 0.0656, 0.0195, 0.0184, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0273, 0.0258, 0.0198, 0.0221, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 14:22:08,284 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.632e+02 5.828e+02 7.591e+02 1.503e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 14:22:08,302 INFO [train.py:903] (1/4) Epoch 29, batch 4000, loss[loss=0.1948, simple_loss=0.2843, pruned_loss=0.05264, over 19482.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2821, pruned_loss=0.05939, over 3817884.49 frames. ], batch size: 64, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:22:38,703 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:22:52,711 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 14:23:07,863 INFO [train.py:903] (1/4) Epoch 29, batch 4050, loss[loss=0.2037, simple_loss=0.2886, pruned_loss=0.05939, over 19348.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2818, pruned_loss=0.05914, over 3819770.98 frames. ], batch size: 70, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:23:19,843 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 14:23:24,411 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-03 14:24:08,395 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.197e+02 5.118e+02 6.147e+02 8.377e+02 1.783e+03, threshold=1.229e+03, percent-clipped=5.0 +2023-04-03 14:24:08,413 INFO [train.py:903] (1/4) Epoch 29, batch 4100, loss[loss=0.2131, simple_loss=0.2929, pruned_loss=0.06662, over 19487.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2819, pruned_loss=0.05904, over 3807374.65 frames. ], batch size: 64, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:24:41,888 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 14:24:56,223 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:08,838 INFO [train.py:903] (1/4) Epoch 29, batch 4150, loss[loss=0.1995, simple_loss=0.2776, pruned_loss=0.06066, over 19682.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2817, pruned_loss=0.05881, over 3822570.60 frames. ], batch size: 60, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:25:49,116 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:56,297 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:10,052 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 4.925e+02 6.547e+02 8.865e+02 3.200e+03, threshold=1.309e+03, percent-clipped=9.0 +2023-04-03 14:26:10,070 INFO [train.py:903] (1/4) Epoch 29, batch 4200, loss[loss=0.1754, simple_loss=0.2544, pruned_loss=0.04821, over 18751.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2808, pruned_loss=0.05826, over 3828601.01 frames. ], batch size: 41, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:26:13,440 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 14:26:19,416 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:35,640 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195406.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:26:40,122 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:00,193 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-03 14:27:07,660 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:08,634 INFO [train.py:903] (1/4) Epoch 29, batch 4250, loss[loss=0.2063, simple_loss=0.2717, pruned_loss=0.07047, over 19753.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2808, pruned_loss=0.05876, over 3844092.85 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:27:22,556 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 14:27:32,649 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 14:28:08,329 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 4.660e+02 5.852e+02 7.750e+02 1.648e+03, threshold=1.170e+03, percent-clipped=3.0 +2023-04-03 14:28:08,347 INFO [train.py:903] (1/4) Epoch 29, batch 4300, loss[loss=0.1669, simple_loss=0.2489, pruned_loss=0.04243, over 19746.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05852, over 3843657.94 frames. ], batch size: 45, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:28:14,115 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195488.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:28:54,535 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195521.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:29:01,485 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 14:29:09,900 INFO [train.py:903] (1/4) Epoch 29, batch 4350, loss[loss=0.1924, simple_loss=0.2704, pruned_loss=0.0572, over 19611.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2811, pruned_loss=0.05913, over 3830473.00 frames. ], batch size: 50, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:29:28,095 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:29:58,861 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4689, 1.5612, 2.2129, 1.8419, 3.2423, 4.8956, 4.6739, 5.1844], + device='cuda:1'), covar=tensor([0.1610, 0.3835, 0.3230, 0.2239, 0.0574, 0.0178, 0.0166, 0.0196], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0334, 0.0366, 0.0272, 0.0257, 0.0198, 0.0220, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 14:30:05,583 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:10,384 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 4.994e+02 6.103e+02 7.630e+02 1.747e+03, threshold=1.221e+03, percent-clipped=1.0 +2023-04-03 14:30:10,402 INFO [train.py:903] (1/4) Epoch 29, batch 4400, loss[loss=0.229, simple_loss=0.3124, pruned_loss=0.07278, over 19559.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05971, over 3837914.88 frames. ], batch size: 61, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:30:29,901 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 14:30:35,591 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:38,555 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 14:30:40,905 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6005, 1.1779, 1.3967, 1.2995, 2.2677, 1.1848, 2.1830, 2.4993], + device='cuda:1'), covar=tensor([0.0673, 0.2835, 0.2851, 0.1698, 0.0813, 0.1986, 0.1006, 0.0441], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0386, 0.0360, 0.0400, 0.0420], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:30:45,375 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 14:31:09,645 INFO [train.py:903] (1/4) Epoch 29, batch 4450, loss[loss=0.1789, simple_loss=0.2559, pruned_loss=0.05101, over 18247.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2832, pruned_loss=0.06044, over 3818080.34 frames. ], batch size: 40, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:32:08,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.083e+02 5.988e+02 7.824e+02 1.664e+03, threshold=1.198e+03, percent-clipped=3.0 +2023-04-03 14:32:08,790 INFO [train.py:903] (1/4) Epoch 29, batch 4500, loss[loss=0.1841, simple_loss=0.2627, pruned_loss=0.05273, over 19584.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06052, over 3822020.92 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:10,063 INFO [train.py:903] (1/4) Epoch 29, batch 4550, loss[loss=0.2414, simple_loss=0.3206, pruned_loss=0.08108, over 19728.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2844, pruned_loss=0.06115, over 3808279.78 frames. ], batch size: 63, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:15,627 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 14:33:21,588 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:33,983 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:39,337 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 14:33:48,577 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.7845, 4.2758, 4.4733, 4.4971, 1.6595, 4.2066, 3.7130, 4.2160], + device='cuda:1'), covar=tensor([0.1668, 0.0892, 0.0621, 0.0682, 0.6102, 0.0963, 0.0684, 0.1129], + device='cuda:1'), in_proj_covar=tensor([0.0826, 0.0796, 0.1005, 0.0880, 0.0872, 0.0771, 0.0593, 0.0937], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 14:33:51,771 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195769.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:34:01,454 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195777.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:34:08,709 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.047e+02 5.905e+02 8.200e+02 1.793e+03, threshold=1.181e+03, percent-clipped=7.0 +2023-04-03 14:34:08,728 INFO [train.py:903] (1/4) Epoch 29, batch 4600, loss[loss=0.2004, simple_loss=0.2981, pruned_loss=0.0513, over 19669.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2839, pruned_loss=0.06062, over 3814381.82 frames. ], batch size: 58, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:34:31,273 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195802.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:34:33,629 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:05,198 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:10,613 INFO [train.py:903] (1/4) Epoch 29, batch 4650, loss[loss=0.162, simple_loss=0.2456, pruned_loss=0.03919, over 19354.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06066, over 3811187.21 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:35:22,166 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 14:35:33,357 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 14:35:54,034 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:36:10,278 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.073e+02 5.115e+02 6.253e+02 8.443e+02 2.371e+03, threshold=1.251e+03, percent-clipped=7.0 +2023-04-03 14:36:10,296 INFO [train.py:903] (1/4) Epoch 29, batch 4700, loss[loss=0.2372, simple_loss=0.3078, pruned_loss=0.08333, over 13123.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06069, over 3809743.24 frames. ], batch size: 135, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:36:29,173 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 14:36:29,929 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.40 vs. limit=2.0 +2023-04-03 14:37:11,407 INFO [train.py:903] (1/4) Epoch 29, batch 4750, loss[loss=0.2449, simple_loss=0.3185, pruned_loss=0.08566, over 18184.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.06036, over 3818001.12 frames. ], batch size: 83, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:38:11,675 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.980e+02 6.350e+02 8.555e+02 2.103e+03, threshold=1.270e+03, percent-clipped=3.0 +2023-04-03 14:38:11,694 INFO [train.py:903] (1/4) Epoch 29, batch 4800, loss[loss=0.1702, simple_loss=0.2584, pruned_loss=0.041, over 19864.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2837, pruned_loss=0.06016, over 3812993.58 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:38:28,761 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 14:39:14,512 INFO [train.py:903] (1/4) Epoch 29, batch 4850, loss[loss=0.1691, simple_loss=0.2517, pruned_loss=0.04324, over 19378.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05969, over 3824312.12 frames. ], batch size: 47, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:39:31,311 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-03 14:39:37,082 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 14:39:55,388 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 14:40:01,747 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 14:40:02,668 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 14:40:11,417 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 14:40:13,827 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 4.641e+02 5.767e+02 7.663e+02 1.512e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-03 14:40:13,845 INFO [train.py:903] (1/4) Epoch 29, batch 4900, loss[loss=0.1909, simple_loss=0.2618, pruned_loss=0.06001, over 15283.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2833, pruned_loss=0.05991, over 3821335.99 frames. ], batch size: 33, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:40:32,811 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 14:40:44,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5304, 2.2424, 1.7044, 1.5353, 2.0300, 1.3411, 1.4523, 1.9347], + device='cuda:1'), covar=tensor([0.1130, 0.0820, 0.1159, 0.0951, 0.0672, 0.1379, 0.0791, 0.0540], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0321, 0.0344, 0.0274, 0.0255, 0.0348, 0.0292, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:41:03,530 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=196125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:14,060 INFO [train.py:903] (1/4) Epoch 29, batch 4950, loss[loss=0.243, simple_loss=0.3187, pruned_loss=0.08363, over 19669.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06007, over 3816914.38 frames. ], batch size: 58, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:41:14,800 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 14:41:16,697 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5450, 1.5546, 1.8090, 1.6972, 3.2251, 1.3434, 2.5667, 3.5549], + device='cuda:1'), covar=tensor([0.0492, 0.2715, 0.2609, 0.1870, 0.0597, 0.2420, 0.1374, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0384, 0.0404, 0.0358, 0.0387, 0.0361, 0.0402, 0.0422], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:41:31,268 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 14:41:33,828 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=196150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:54,193 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 14:41:58,582 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8843, 1.4473, 1.7019, 1.6826, 3.4788, 1.3018, 2.4309, 3.9841], + device='cuda:1'), covar=tensor([0.0462, 0.2881, 0.2717, 0.1888, 0.0665, 0.2492, 0.1414, 0.0182], + device='cuda:1'), in_proj_covar=tensor([0.0428, 0.0385, 0.0404, 0.0358, 0.0387, 0.0362, 0.0403, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:42:14,317 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.427e+02 4.864e+02 6.197e+02 7.725e+02 1.739e+03, threshold=1.239e+03, percent-clipped=10.0 +2023-04-03 14:42:14,335 INFO [train.py:903] (1/4) Epoch 29, batch 5000, loss[loss=0.2113, simple_loss=0.3016, pruned_loss=0.06047, over 18331.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2828, pruned_loss=0.05994, over 3829666.99 frames. ], batch size: 84, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:42:14,674 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3483, 1.3342, 1.7827, 1.4138, 2.8728, 3.6788, 3.3834, 3.8287], + device='cuda:1'), covar=tensor([0.1682, 0.4061, 0.3653, 0.2768, 0.0658, 0.0229, 0.0228, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0335, 0.0367, 0.0274, 0.0258, 0.0199, 0.0221, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 14:42:16,057 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.37 vs. limit=5.0 +2023-04-03 14:42:23,515 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 14:42:35,047 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 14:42:54,571 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.84 vs. limit=5.0 +2023-04-03 14:43:14,837 INFO [train.py:903] (1/4) Epoch 29, batch 5050, loss[loss=0.1739, simple_loss=0.2505, pruned_loss=0.04865, over 19062.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2818, pruned_loss=0.05923, over 3825744.29 frames. ], batch size: 42, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:43:49,699 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 14:44:15,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 4.774e+02 5.782e+02 7.176e+02 1.455e+03, threshold=1.156e+03, percent-clipped=5.0 +2023-04-03 14:44:15,628 INFO [train.py:903] (1/4) Epoch 29, batch 5100, loss[loss=0.2549, simple_loss=0.3238, pruned_loss=0.09305, over 12476.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05976, over 3805282.61 frames. ], batch size: 136, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:44:24,662 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 14:44:27,971 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 14:44:32,505 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 14:45:16,433 INFO [train.py:903] (1/4) Epoch 29, batch 5150, loss[loss=0.1836, simple_loss=0.2618, pruned_loss=0.05266, over 19750.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2821, pruned_loss=0.05958, over 3805523.48 frames. ], batch size: 45, lr: 2.81e-03, grad_scale: 4.0 +2023-04-03 14:45:17,953 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.2498, 2.8758, 2.1877, 2.2401, 2.1782, 2.5203, 1.0867, 2.0444], + device='cuda:1'), covar=tensor([0.0710, 0.0671, 0.0784, 0.1331, 0.1105, 0.1239, 0.1526, 0.1206], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0473, 0.0398, 0.0347, 0.0349], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 14:45:23,032 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:45:26,216 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 14:45:59,984 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 14:46:10,215 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:46:15,862 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3892, 2.4327, 2.6484, 3.1401, 2.4468, 3.0620, 2.6041, 2.4433], + device='cuda:1'), covar=tensor([0.4430, 0.3931, 0.1971, 0.2630, 0.4500, 0.2230, 0.5377, 0.3502], + device='cuda:1'), in_proj_covar=tensor([0.0945, 0.1023, 0.0748, 0.0958, 0.0923, 0.0862, 0.0863, 0.0812], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 14:46:17,530 INFO [train.py:903] (1/4) Epoch 29, batch 5200, loss[loss=0.2021, simple_loss=0.2846, pruned_loss=0.05984, over 19402.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2829, pruned_loss=0.05992, over 3799622.89 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:46:18,467 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 5.187e+02 6.337e+02 8.295e+02 1.863e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 14:46:28,775 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 14:47:12,666 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 14:47:19,216 INFO [train.py:903] (1/4) Epoch 29, batch 5250, loss[loss=0.187, simple_loss=0.2691, pruned_loss=0.05251, over 19757.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.283, pruned_loss=0.05988, over 3793087.65 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:47:43,271 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8076, 1.9083, 2.2180, 2.2954, 1.7973, 2.2593, 2.1444, 1.9965], + device='cuda:1'), covar=tensor([0.4384, 0.3971, 0.2009, 0.2465, 0.4167, 0.2320, 0.5242, 0.3585], + device='cuda:1'), in_proj_covar=tensor([0.0949, 0.1026, 0.0751, 0.0961, 0.0926, 0.0864, 0.0866, 0.0815], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 14:48:07,755 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7968, 1.9077, 2.1942, 2.2645, 1.7712, 2.2344, 2.1582, 1.9747], + device='cuda:1'), covar=tensor([0.4136, 0.3722, 0.1933, 0.2482, 0.3952, 0.2216, 0.4906, 0.3463], + device='cuda:1'), in_proj_covar=tensor([0.0948, 0.1025, 0.0750, 0.0960, 0.0925, 0.0864, 0.0865, 0.0814], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 14:48:20,444 INFO [train.py:903] (1/4) Epoch 29, batch 5300, loss[loss=0.2106, simple_loss=0.2926, pruned_loss=0.06435, over 19580.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2824, pruned_loss=0.05942, over 3801135.80 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:48:21,521 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.792e+02 5.709e+02 7.130e+02 1.478e+03, threshold=1.142e+03, percent-clipped=2.0 +2023-04-03 14:48:35,845 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 14:49:20,274 INFO [train.py:903] (1/4) Epoch 29, batch 5350, loss[loss=0.1707, simple_loss=0.2586, pruned_loss=0.04139, over 19535.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2824, pruned_loss=0.05956, over 3794906.95 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:49:51,985 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 14:50:09,932 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3807, 3.1022, 2.2288, 2.8041, 0.7625, 3.1118, 2.9972, 3.0539], + device='cuda:1'), covar=tensor([0.1117, 0.1354, 0.2122, 0.1160, 0.3877, 0.0965, 0.1137, 0.1594], + device='cuda:1'), in_proj_covar=tensor([0.0534, 0.0432, 0.0518, 0.0357, 0.0409, 0.0457, 0.0452, 0.0487], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:50:21,630 INFO [train.py:903] (1/4) Epoch 29, batch 5400, loss[loss=0.1836, simple_loss=0.2551, pruned_loss=0.05601, over 19783.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.06001, over 3801795.90 frames. ], batch size: 47, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:50:22,761 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.834e+02 5.946e+02 7.840e+02 1.782e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-03 14:50:36,247 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:50:59,748 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3279, 2.0523, 1.6802, 1.4256, 1.8207, 1.3453, 1.3219, 1.8490], + device='cuda:1'), covar=tensor([0.0973, 0.0873, 0.1064, 0.0927, 0.0657, 0.1370, 0.0731, 0.0476], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0324, 0.0346, 0.0277, 0.0255, 0.0350, 0.0294, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:51:22,494 INFO [train.py:903] (1/4) Epoch 29, batch 5450, loss[loss=0.2226, simple_loss=0.3068, pruned_loss=0.0692, over 19583.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2839, pruned_loss=0.06039, over 3802163.99 frames. ], batch size: 61, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:51:34,866 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.5689, 4.1533, 4.3054, 4.2944, 1.7243, 4.0526, 3.5194, 4.0579], + device='cuda:1'), covar=tensor([0.1813, 0.0846, 0.0666, 0.0805, 0.6195, 0.0996, 0.0753, 0.1135], + device='cuda:1'), in_proj_covar=tensor([0.0829, 0.0802, 0.1013, 0.0886, 0.0879, 0.0774, 0.0596, 0.0938], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 14:51:37,064 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1893, 2.8531, 2.3704, 2.4379, 2.3038, 2.5628, 1.1953, 2.1242], + device='cuda:1'), covar=tensor([0.0748, 0.0714, 0.0725, 0.1262, 0.1092, 0.1300, 0.1535, 0.1164], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0365, 0.0370, 0.0394, 0.0474, 0.0400, 0.0348, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 14:51:41,431 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3088, 3.0379, 2.2375, 2.7376, 0.9729, 3.0214, 2.8768, 2.9444], + device='cuda:1'), covar=tensor([0.1237, 0.1396, 0.2096, 0.1134, 0.3663, 0.0977, 0.1229, 0.1788], + device='cuda:1'), in_proj_covar=tensor([0.0537, 0.0435, 0.0522, 0.0360, 0.0413, 0.0460, 0.0456, 0.0491], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 14:52:21,546 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:52:22,558 INFO [train.py:903] (1/4) Epoch 29, batch 5500, loss[loss=0.1895, simple_loss=0.2618, pruned_loss=0.05857, over 19607.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2831, pruned_loss=0.05992, over 3801495.09 frames. ], batch size: 50, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:52:23,694 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 4.767e+02 5.823e+02 7.066e+02 1.237e+03, threshold=1.165e+03, percent-clipped=1.0 +2023-04-03 14:52:46,224 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 14:53:08,302 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:53:22,918 INFO [train.py:903] (1/4) Epoch 29, batch 5550, loss[loss=0.1694, simple_loss=0.2584, pruned_loss=0.04018, over 19578.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2835, pruned_loss=0.06006, over 3801075.96 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:53:30,039 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 14:53:40,087 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.38 vs. limit=5.0 +2023-04-03 14:54:18,739 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 14:54:22,906 INFO [train.py:903] (1/4) Epoch 29, batch 5600, loss[loss=0.1884, simple_loss=0.2693, pruned_loss=0.05375, over 19637.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06011, over 3806458.24 frames. ], batch size: 50, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:54:24,067 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.937e+02 5.977e+02 7.468e+02 1.263e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 14:54:39,818 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:54:40,986 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:55:24,435 INFO [train.py:903] (1/4) Epoch 29, batch 5650, loss[loss=0.2036, simple_loss=0.291, pruned_loss=0.05817, over 19528.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2829, pruned_loss=0.0597, over 3797651.04 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:55:28,150 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:10,100 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:11,809 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 14:56:24,235 INFO [train.py:903] (1/4) Epoch 29, batch 5700, loss[loss=0.1749, simple_loss=0.249, pruned_loss=0.05045, over 19797.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.0593, over 3797652.02 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:56:25,388 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.159e+02 6.169e+02 7.777e+02 1.148e+03, threshold=1.234e+03, percent-clipped=0.0 +2023-04-03 14:56:41,823 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:46,521 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 14:57:24,057 INFO [train.py:903] (1/4) Epoch 29, batch 5750, loss[loss=0.2162, simple_loss=0.294, pruned_loss=0.06917, over 19663.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05938, over 3803028.89 frames. ], batch size: 55, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:57:25,244 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 14:57:30,934 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:57:31,938 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 14:57:38,015 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 14:58:24,228 INFO [train.py:903] (1/4) Epoch 29, batch 5800, loss[loss=0.2197, simple_loss=0.302, pruned_loss=0.06866, over 19748.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05971, over 3821939.26 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:58:25,406 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.987e+02 6.141e+02 8.368e+02 1.662e+03, threshold=1.228e+03, percent-clipped=5.0 +2023-04-03 14:59:24,882 INFO [train.py:903] (1/4) Epoch 29, batch 5850, loss[loss=0.1946, simple_loss=0.2741, pruned_loss=0.05759, over 19635.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.282, pruned_loss=0.05917, over 3826514.97 frames. ], batch size: 50, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 14:59:27,409 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 14:59:49,585 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197054.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:59:50,524 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:19,100 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197079.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:24,408 INFO [train.py:903] (1/4) Epoch 29, batch 5900, loss[loss=0.2088, simple_loss=0.2952, pruned_loss=0.06126, over 18765.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05966, over 3822706.97 frames. ], batch size: 74, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:00:25,160 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.74 vs. limit=5.0 +2023-04-03 15:00:25,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.889e+02 5.905e+02 7.501e+02 2.168e+03, threshold=1.181e+03, percent-clipped=6.0 +2023-04-03 15:00:26,680 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 15:00:35,898 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:46,559 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 15:00:47,965 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.4424, 4.0697, 2.5282, 3.6121, 1.0854, 4.0367, 3.9327, 3.9644], + device='cuda:1'), covar=tensor([0.0650, 0.0940, 0.2084, 0.0837, 0.3669, 0.0680, 0.0899, 0.1128], + device='cuda:1'), in_proj_covar=tensor([0.0534, 0.0433, 0.0520, 0.0358, 0.0409, 0.0459, 0.0454, 0.0487], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:01:06,458 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:24,661 INFO [train.py:903] (1/4) Epoch 29, batch 5950, loss[loss=0.2034, simple_loss=0.288, pruned_loss=0.05941, over 19756.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05967, over 3816746.85 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:01:32,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:59,496 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.6016, 1.2196, 1.4686, 1.2982, 2.2639, 1.1450, 2.1318, 2.5733], + device='cuda:1'), covar=tensor([0.0712, 0.2936, 0.2865, 0.1755, 0.0865, 0.2060, 0.1039, 0.0437], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0383, 0.0402, 0.0357, 0.0385, 0.0360, 0.0400, 0.0421], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:02:18,305 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1888, 1.2746, 1.2496, 1.0218, 1.1326, 1.0330, 0.0702, 0.3285], + device='cuda:1'), covar=tensor([0.0816, 0.0747, 0.0502, 0.0698, 0.1334, 0.0799, 0.1572, 0.1403], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0471, 0.0400, 0.0347, 0.0348], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 15:02:24,686 INFO [train.py:903] (1/4) Epoch 29, batch 6000, loss[loss=0.2398, simple_loss=0.3073, pruned_loss=0.08614, over 13693.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2828, pruned_loss=0.06003, over 3817734.15 frames. ], batch size: 136, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:02:24,686 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 15:02:43,145 INFO [train.py:937] (1/4) Epoch 29, validation: loss=0.167, simple_loss=0.2662, pruned_loss=0.03392, over 944034.00 frames. +2023-04-03 15:02:43,146 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 15:02:44,363 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 4.704e+02 5.833e+02 7.372e+02 1.660e+03, threshold=1.167e+03, percent-clipped=5.0 +2023-04-03 15:02:48,713 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-03 15:03:23,053 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:03:44,117 INFO [train.py:903] (1/4) Epoch 29, batch 6050, loss[loss=0.1883, simple_loss=0.2638, pruned_loss=0.05643, over 19584.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2819, pruned_loss=0.05967, over 3826213.97 frames. ], batch size: 52, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:03:55,189 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:11,945 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:45,073 INFO [train.py:903] (1/4) Epoch 29, batch 6100, loss[loss=0.1601, simple_loss=0.2518, pruned_loss=0.03422, over 19851.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2805, pruned_loss=0.05934, over 3826349.91 frames. ], batch size: 52, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:04:46,195 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 5.045e+02 6.568e+02 8.070e+02 1.422e+03, threshold=1.314e+03, percent-clipped=5.0 +2023-04-03 15:05:16,415 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3579, 2.4176, 2.6883, 3.1006, 2.3659, 2.9477, 2.6713, 2.5700], + device='cuda:1'), covar=tensor([0.4398, 0.4269, 0.2021, 0.2735, 0.4547, 0.2359, 0.4972, 0.3376], + device='cuda:1'), in_proj_covar=tensor([0.0947, 0.1027, 0.0750, 0.0960, 0.0926, 0.0864, 0.0868, 0.0815], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 15:05:17,434 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:42,448 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:45,466 INFO [train.py:903] (1/4) Epoch 29, batch 6150, loss[loss=0.2016, simple_loss=0.2916, pruned_loss=0.05583, over 19616.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2802, pruned_loss=0.05869, over 3839772.89 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:05:48,275 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:13,339 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 15:06:14,663 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:46,115 INFO [train.py:903] (1/4) Epoch 29, batch 6200, loss[loss=0.1684, simple_loss=0.2532, pruned_loss=0.04176, over 19621.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2809, pruned_loss=0.05911, over 3833982.06 frames. ], batch size: 50, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:06:47,111 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.150e+02 5.975e+02 7.505e+02 2.002e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-03 15:07:11,161 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6720, 1.5118, 1.5100, 2.0310, 1.5579, 1.8587, 1.8434, 1.6947], + device='cuda:1'), covar=tensor([0.0820, 0.0920, 0.1015, 0.0652, 0.0825, 0.0751, 0.0802, 0.0703], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0226, 0.0230, 0.0242, 0.0228, 0.0215, 0.0189, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 15:07:31,626 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-03 15:07:45,774 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197433.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:07:46,599 INFO [train.py:903] (1/4) Epoch 29, batch 6250, loss[loss=0.1932, simple_loss=0.2629, pruned_loss=0.06175, over 18230.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2815, pruned_loss=0.0596, over 3839268.71 frames. ], batch size: 40, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:08:18,032 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 15:08:48,228 INFO [train.py:903] (1/4) Epoch 29, batch 6300, loss[loss=0.1667, simple_loss=0.2419, pruned_loss=0.04575, over 19099.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2804, pruned_loss=0.05884, over 3838529.23 frames. ], batch size: 42, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:08:50,595 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.620e+02 5.575e+02 6.491e+02 1.508e+03, threshold=1.115e+03, percent-clipped=2.0 +2023-04-03 15:09:21,954 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:49,202 INFO [train.py:903] (1/4) Epoch 29, batch 6350, loss[loss=0.1872, simple_loss=0.2692, pruned_loss=0.05258, over 19472.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.28, pruned_loss=0.05846, over 3837899.01 frames. ], batch size: 49, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:09:49,506 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197534.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:52,928 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:23,557 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197563.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:49,098 INFO [train.py:903] (1/4) Epoch 29, batch 6400, loss[loss=0.1931, simple_loss=0.2688, pruned_loss=0.05872, over 19377.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2805, pruned_loss=0.05826, over 3851678.02 frames. ], batch size: 47, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:10:52,229 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.287e+02 5.708e+02 7.339e+02 1.464e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-04-03 15:10:53,775 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:53,985 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 15:11:23,600 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197612.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:25,873 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:49,896 INFO [train.py:903] (1/4) Epoch 29, batch 6450, loss[loss=0.2176, simple_loss=0.2998, pruned_loss=0.06767, over 19575.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2802, pruned_loss=0.05809, over 3860847.29 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:11:56,895 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:12:24,333 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8132, 1.1413, 0.9282, 0.8966, 1.0380, 0.9002, 0.8654, 1.0928], + device='cuda:1'), covar=tensor([0.0600, 0.0863, 0.1013, 0.0712, 0.0558, 0.1178, 0.0555, 0.0485], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0322, 0.0345, 0.0277, 0.0255, 0.0347, 0.0293, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:12:37,427 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 15:12:49,912 INFO [train.py:903] (1/4) Epoch 29, batch 6500, loss[loss=0.2025, simple_loss=0.2881, pruned_loss=0.0585, over 19475.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2819, pruned_loss=0.05907, over 3855848.32 frames. ], batch size: 64, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:12:52,112 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 4.970e+02 6.097e+02 8.194e+02 1.467e+03, threshold=1.219e+03, percent-clipped=8.0 +2023-04-03 15:12:58,634 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 15:13:50,299 INFO [train.py:903] (1/4) Epoch 29, batch 6550, loss[loss=0.2214, simple_loss=0.305, pruned_loss=0.06891, over 19669.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.0594, over 3845427.50 frames. ], batch size: 53, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:14:42,386 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197777.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:14:49,852 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7757, 1.9095, 2.2458, 2.0406, 2.8694, 3.2649, 3.1539, 3.4399], + device='cuda:1'), covar=tensor([0.1390, 0.3112, 0.2849, 0.2347, 0.1036, 0.0263, 0.0217, 0.0390], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0275, 0.0256, 0.0199, 0.0221, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 15:14:50,648 INFO [train.py:903] (1/4) Epoch 29, batch 6600, loss[loss=0.2399, simple_loss=0.3053, pruned_loss=0.08722, over 13152.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2809, pruned_loss=0.0586, over 3836925.13 frames. ], batch size: 137, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:14:54,077 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.758e+02 5.997e+02 7.159e+02 2.116e+03, threshold=1.199e+03, percent-clipped=4.0 +2023-04-03 15:15:42,977 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2630, 0.9797, 1.1954, 1.9081, 1.2915, 1.1432, 1.2843, 1.1483], + device='cuda:1'), covar=tensor([0.1196, 0.1851, 0.1416, 0.0793, 0.1152, 0.1554, 0.1285, 0.1230], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0226, 0.0230, 0.0244, 0.0228, 0.0216, 0.0190, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 15:15:51,170 INFO [train.py:903] (1/4) Epoch 29, batch 6650, loss[loss=0.2132, simple_loss=0.2997, pruned_loss=0.06342, over 19477.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2806, pruned_loss=0.05813, over 3831511.18 frames. ], batch size: 64, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:45,535 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:16:52,899 INFO [train.py:903] (1/4) Epoch 29, batch 6700, loss[loss=0.2571, simple_loss=0.3225, pruned_loss=0.0959, over 13261.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2802, pruned_loss=0.05817, over 3814977.66 frames. ], batch size: 139, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:56,373 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.093e+02 5.944e+02 7.634e+02 1.783e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-03 15:17:02,450 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197892.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:17:19,974 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197907.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:23,564 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:44,097 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=4.15 vs. limit=5.0 +2023-04-03 15:17:50,660 INFO [train.py:903] (1/4) Epoch 29, batch 6750, loss[loss=0.1961, simple_loss=0.2851, pruned_loss=0.05355, over 19141.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2809, pruned_loss=0.05861, over 3809826.51 frames. ], batch size: 69, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:18:29,601 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4750, 1.5398, 1.7910, 1.7113, 2.5387, 2.2520, 2.7245, 1.1563], + device='cuda:1'), covar=tensor([0.2587, 0.4445, 0.2825, 0.2027, 0.1654, 0.2268, 0.1528, 0.4906], + device='cuda:1'), in_proj_covar=tensor([0.0558, 0.0680, 0.0768, 0.0515, 0.0642, 0.0551, 0.0675, 0.0579], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 15:18:45,947 INFO [train.py:903] (1/4) Epoch 29, batch 6800, loss[loss=0.2309, simple_loss=0.3126, pruned_loss=0.07456, over 18859.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2811, pruned_loss=0.05912, over 3802495.08 frames. ], batch size: 74, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:18:49,168 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.543e+02 4.853e+02 6.089e+02 7.661e+02 1.249e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 15:18:56,334 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:18:59,789 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8160, 1.9271, 2.1670, 2.2795, 1.7920, 2.2280, 2.1269, 1.9957], + device='cuda:1'), covar=tensor([0.4379, 0.3916, 0.2082, 0.2599, 0.4077, 0.2323, 0.5300, 0.3647], + device='cuda:1'), in_proj_covar=tensor([0.0946, 0.1027, 0.0749, 0.0959, 0.0926, 0.0863, 0.0863, 0.0817], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 15:19:09,598 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1515, 3.3719, 3.6533, 3.6749, 1.9910, 3.4022, 3.0582, 3.4656], + device='cuda:1'), covar=tensor([0.1599, 0.3706, 0.0715, 0.0831, 0.5141, 0.1617, 0.0721, 0.1104], + device='cuda:1'), in_proj_covar=tensor([0.0824, 0.0797, 0.1004, 0.0881, 0.0873, 0.0771, 0.0592, 0.0937], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 15:19:33,727 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 15:19:34,154 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 15:19:37,016 INFO [train.py:903] (1/4) Epoch 30, batch 0, loss[loss=0.2303, simple_loss=0.3067, pruned_loss=0.07698, over 18204.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3067, pruned_loss=0.07698, over 18204.00 frames. ], batch size: 83, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:19:37,016 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 15:19:49,307 INFO [train.py:937] (1/4) Epoch 30, validation: loss=0.167, simple_loss=0.2667, pruned_loss=0.03362, over 944034.00 frames. +2023-04-03 15:19:49,308 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 15:20:02,490 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198022.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:20:03,299 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 15:20:25,395 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 15:20:51,416 INFO [train.py:903] (1/4) Epoch 30, batch 50, loss[loss=0.1981, simple_loss=0.2913, pruned_loss=0.05246, over 19624.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2812, pruned_loss=0.05902, over 853155.80 frames. ], batch size: 57, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:21:20,382 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 4.959e+02 5.838e+02 7.711e+02 1.808e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 15:21:26,252 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 15:21:36,615 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198099.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:21:52,085 INFO [train.py:903] (1/4) Epoch 30, batch 100, loss[loss=0.1916, simple_loss=0.2803, pruned_loss=0.05144, over 19665.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05979, over 1519029.05 frames. ], batch size: 55, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:22:04,483 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 15:22:37,707 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198148.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:22:54,436 INFO [train.py:903] (1/4) Epoch 30, batch 150, loss[loss=0.1987, simple_loss=0.2723, pruned_loss=0.06258, over 19403.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2838, pruned_loss=0.06017, over 2023172.64 frames. ], batch size: 48, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:23:07,432 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198173.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:23:25,375 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.519e+02 4.863e+02 5.858e+02 7.546e+02 1.579e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-03 15:23:52,146 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 15:23:55,535 INFO [train.py:903] (1/4) Epoch 30, batch 200, loss[loss=0.1965, simple_loss=0.278, pruned_loss=0.05755, over 19678.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2834, pruned_loss=0.05976, over 2422954.71 frames. ], batch size: 53, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:23:58,310 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6323, 1.7216, 1.8915, 1.8504, 2.5540, 2.3809, 2.6658, 1.1714], + device='cuda:1'), covar=tensor([0.2553, 0.4504, 0.2922, 0.1991, 0.1510, 0.2200, 0.1395, 0.4785], + device='cuda:1'), in_proj_covar=tensor([0.0561, 0.0681, 0.0770, 0.0516, 0.0642, 0.0552, 0.0676, 0.0579], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 15:24:42,400 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:47,693 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:57,534 INFO [train.py:903] (1/4) Epoch 30, batch 250, loss[loss=0.2171, simple_loss=0.3097, pruned_loss=0.06222, over 19540.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.05961, over 2718994.11 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:25:13,977 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:19,533 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198278.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:29,213 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.793e+02 5.720e+02 7.294e+02 1.524e+03, threshold=1.144e+03, percent-clipped=6.0 +2023-04-03 15:25:49,178 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:26:01,275 INFO [train.py:903] (1/4) Epoch 30, batch 300, loss[loss=0.2064, simple_loss=0.2976, pruned_loss=0.05761, over 19330.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05974, over 2965442.98 frames. ], batch size: 66, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:03,943 INFO [train.py:903] (1/4) Epoch 30, batch 350, loss[loss=0.1552, simple_loss=0.2366, pruned_loss=0.03693, over 19769.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06003, over 3146913.35 frames. ], batch size: 47, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:06,269 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 15:27:12,156 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:27:33,418 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.876e+02 6.987e+02 8.997e+02 2.429e+03, threshold=1.397e+03, percent-clipped=9.0 +2023-04-03 15:27:48,286 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6402, 1.4694, 1.8294, 1.4751, 2.8214, 3.8694, 3.5908, 4.0303], + device='cuda:1'), covar=tensor([0.1403, 0.3782, 0.3402, 0.2486, 0.0622, 0.0176, 0.0207, 0.0276], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0331, 0.0364, 0.0272, 0.0254, 0.0198, 0.0219, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 15:28:04,750 INFO [train.py:903] (1/4) Epoch 30, batch 400, loss[loss=0.2171, simple_loss=0.3046, pruned_loss=0.06479, over 19321.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.06009, over 3295692.59 frames. ], batch size: 70, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:28:08,526 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0444, 3.7142, 2.7131, 3.3276, 0.9847, 3.6986, 3.5418, 3.5873], + device='cuda:1'), covar=tensor([0.0752, 0.1069, 0.1894, 0.0897, 0.3895, 0.0736, 0.0992, 0.1209], + device='cuda:1'), in_proj_covar=tensor([0.0535, 0.0435, 0.0523, 0.0359, 0.0412, 0.0460, 0.0454, 0.0488], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:28:44,695 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:28:56,374 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0620, 3.7477, 2.6412, 3.3184, 0.8250, 3.7206, 3.5483, 3.6191], + device='cuda:1'), covar=tensor([0.0770, 0.1143, 0.1933, 0.0923, 0.3844, 0.0743, 0.1027, 0.1194], + device='cuda:1'), in_proj_covar=tensor([0.0533, 0.0434, 0.0522, 0.0358, 0.0411, 0.0459, 0.0453, 0.0487], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:29:06,535 INFO [train.py:903] (1/4) Epoch 30, batch 450, loss[loss=0.198, simple_loss=0.2846, pruned_loss=0.05567, over 19625.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2818, pruned_loss=0.05896, over 3430314.02 frames. ], batch size: 57, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:29:38,785 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.102e+02 6.085e+02 7.779e+02 1.785e+03, threshold=1.217e+03, percent-clipped=4.0 +2023-04-03 15:29:40,856 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 15:29:42,045 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 15:30:08,496 INFO [train.py:903] (1/4) Epoch 30, batch 500, loss[loss=0.1941, simple_loss=0.2604, pruned_loss=0.06393, over 19385.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2815, pruned_loss=0.05882, over 3506092.69 frames. ], batch size: 47, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:30:15,662 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:06,284 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:11,508 INFO [train.py:903] (1/4) Epoch 30, batch 550, loss[loss=0.2004, simple_loss=0.2883, pruned_loss=0.05622, over 19284.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2821, pruned_loss=0.0589, over 3588307.71 frames. ], batch size: 66, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:31:40,932 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.928e+02 6.417e+02 8.667e+02 2.852e+03, threshold=1.283e+03, percent-clipped=10.0 +2023-04-03 15:32:13,286 INFO [train.py:903] (1/4) Epoch 30, batch 600, loss[loss=0.2219, simple_loss=0.3034, pruned_loss=0.07016, over 17413.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.0591, over 3640161.76 frames. ], batch size: 101, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:32:28,413 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198625.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:38,378 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:51,217 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.43 vs. limit=5.0 +2023-04-03 15:32:52,895 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 15:33:01,368 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198650.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:33:14,980 INFO [train.py:903] (1/4) Epoch 30, batch 650, loss[loss=0.1628, simple_loss=0.246, pruned_loss=0.03984, over 19786.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2805, pruned_loss=0.0582, over 3696815.19 frames. ], batch size: 46, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:33:46,600 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.000e+02 5.780e+02 7.067e+02 2.104e+03, threshold=1.156e+03, percent-clipped=2.0 +2023-04-03 15:34:16,540 INFO [train.py:903] (1/4) Epoch 30, batch 700, loss[loss=0.18, simple_loss=0.2561, pruned_loss=0.05197, over 19592.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2801, pruned_loss=0.05839, over 3723815.13 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:18,682 INFO [train.py:903] (1/4) Epoch 30, batch 750, loss[loss=0.2166, simple_loss=0.2809, pruned_loss=0.07617, over 19769.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2802, pruned_loss=0.05847, over 3742555.90 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:29,204 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.3717, 1.3813, 1.5328, 1.5368, 1.6731, 1.8703, 1.7691, 0.4786], + device='cuda:1'), covar=tensor([0.2634, 0.4634, 0.2835, 0.2086, 0.1917, 0.2523, 0.1573, 0.5431], + device='cuda:1'), in_proj_covar=tensor([0.0559, 0.0679, 0.0766, 0.0515, 0.0640, 0.0551, 0.0674, 0.0578], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 15:35:51,149 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.908e+02 5.891e+02 7.920e+02 1.978e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 15:36:04,794 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 15:36:05,380 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6028, 1.6947, 1.9372, 1.9098, 1.4808, 1.8899, 1.9125, 1.7931], + device='cuda:1'), covar=tensor([0.4247, 0.3808, 0.2097, 0.2528, 0.4039, 0.2380, 0.5385, 0.3560], + device='cuda:1'), in_proj_covar=tensor([0.0946, 0.1029, 0.0750, 0.0960, 0.0926, 0.0866, 0.0866, 0.0815], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 15:36:23,529 INFO [train.py:903] (1/4) Epoch 30, batch 800, loss[loss=0.2128, simple_loss=0.2945, pruned_loss=0.06552, over 19616.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2807, pruned_loss=0.05852, over 3765009.85 frames. ], batch size: 61, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:36:27,272 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:36:39,846 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 15:36:56,795 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198839.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:25,167 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:26,181 INFO [train.py:903] (1/4) Epoch 30, batch 850, loss[loss=0.2074, simple_loss=0.2967, pruned_loss=0.05907, over 19660.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2814, pruned_loss=0.05884, over 3773753.71 frames. ], batch size: 60, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:37:35,466 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198870.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:55,569 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 4.755e+02 5.781e+02 7.125e+02 1.514e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 15:37:59,387 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9876, 1.6747, 1.5643, 1.8649, 1.5880, 1.6512, 1.5110, 1.7998], + device='cuda:1'), covar=tensor([0.1011, 0.1201, 0.1587, 0.1026, 0.1257, 0.0610, 0.1554, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0360, 0.0321, 0.0259, 0.0311, 0.0260, 0.0324, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 15:38:13,693 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:38:20,325 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 15:38:26,175 INFO [train.py:903] (1/4) Epoch 30, batch 900, loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04566, over 19770.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05916, over 3787384.12 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:07,728 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:27,824 INFO [train.py:903] (1/4) Epoch 30, batch 950, loss[loss=0.206, simple_loss=0.2968, pruned_loss=0.0576, over 19488.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.283, pruned_loss=0.05946, over 3809474.44 frames. ], batch size: 64, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:32,353 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 15:39:47,014 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:48,041 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:59,676 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.052e+02 6.305e+02 7.870e+02 1.588e+03, threshold=1.261e+03, percent-clipped=4.0 +2023-04-03 15:40:30,253 INFO [train.py:903] (1/4) Epoch 30, batch 1000, loss[loss=0.2046, simple_loss=0.2932, pruned_loss=0.05805, over 19606.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2829, pruned_loss=0.05907, over 3824851.00 frames. ], batch size: 57, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:40:46,863 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 15:41:23,995 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 15:41:33,183 INFO [train.py:903] (1/4) Epoch 30, batch 1050, loss[loss=0.1701, simple_loss=0.2571, pruned_loss=0.0415, over 19845.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.05939, over 3818809.85 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:42:03,436 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.270e+02 4.631e+02 5.961e+02 7.864e+02 1.953e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 15:42:04,594 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 15:42:07,343 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7880, 2.0643, 2.3733, 2.1433, 3.1844, 3.6655, 3.5323, 3.9514], + device='cuda:1'), covar=tensor([0.1458, 0.3070, 0.2833, 0.2197, 0.1000, 0.0387, 0.0217, 0.0356], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0333, 0.0365, 0.0273, 0.0256, 0.0199, 0.0220, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 15:42:11,044 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:42:35,185 INFO [train.py:903] (1/4) Epoch 30, batch 1100, loss[loss=0.164, simple_loss=0.2416, pruned_loss=0.04323, over 19724.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2813, pruned_loss=0.05872, over 3828901.27 frames. ], batch size: 46, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:43:38,421 INFO [train.py:903] (1/4) Epoch 30, batch 1150, loss[loss=0.1876, simple_loss=0.2704, pruned_loss=0.05238, over 19589.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2817, pruned_loss=0.05906, over 3816040.99 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:03,860 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1651, 1.2976, 1.5192, 1.4588, 2.8157, 1.2441, 2.2932, 3.1629], + device='cuda:1'), covar=tensor([0.0543, 0.2898, 0.2852, 0.1766, 0.0671, 0.2256, 0.1184, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0426, 0.0381, 0.0400, 0.0355, 0.0385, 0.0359, 0.0401, 0.0422], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:44:10,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.110e+02 5.915e+02 7.639e+02 1.372e+03, threshold=1.183e+03, percent-clipped=6.0 +2023-04-03 15:44:18,752 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:44:41,421 INFO [train.py:903] (1/4) Epoch 30, batch 1200, loss[loss=0.1702, simple_loss=0.2546, pruned_loss=0.04287, over 19842.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2828, pruned_loss=0.05945, over 3820071.15 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:43,950 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:06,839 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:14,390 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 15:45:22,181 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:38,774 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:44,960 INFO [train.py:903] (1/4) Epoch 30, batch 1250, loss[loss=0.2234, simple_loss=0.306, pruned_loss=0.07044, over 19371.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2819, pruned_loss=0.05881, over 3822855.63 frames. ], batch size: 66, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:46:14,637 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.004e+02 5.991e+02 7.632e+02 1.398e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-03 15:46:17,137 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:46:46,509 INFO [train.py:903] (1/4) Epoch 30, batch 1300, loss[loss=0.2025, simple_loss=0.2827, pruned_loss=0.06113, over 18342.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2816, pruned_loss=0.0586, over 3821683.26 frames. ], batch size: 84, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:47:07,591 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:22,756 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4685, 2.1454, 1.6153, 1.4547, 2.0335, 1.3214, 1.3321, 1.9581], + device='cuda:1'), covar=tensor([0.1201, 0.0932, 0.1140, 0.0936, 0.0629, 0.1378, 0.0825, 0.0585], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0320, 0.0344, 0.0275, 0.0253, 0.0348, 0.0290, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:47:32,044 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:46,099 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199360.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:47,888 INFO [train.py:903] (1/4) Epoch 30, batch 1350, loss[loss=0.1668, simple_loss=0.2471, pruned_loss=0.04319, over 19407.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2811, pruned_loss=0.0581, over 3839538.92 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:03,198 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:13,453 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2891, 2.2258, 2.1153, 2.3696, 2.1232, 1.9960, 2.1007, 2.2563], + device='cuda:1'), covar=tensor([0.0817, 0.1112, 0.1144, 0.0777, 0.1068, 0.0525, 0.1124, 0.0608], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0360, 0.0321, 0.0259, 0.0310, 0.0260, 0.0323, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 15:48:20,886 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 4.672e+02 5.739e+02 7.239e+02 1.592e+03, threshold=1.148e+03, percent-clipped=7.0 +2023-04-03 15:48:40,961 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199404.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:50,924 INFO [train.py:903] (1/4) Epoch 30, batch 1400, loss[loss=0.1813, simple_loss=0.2691, pruned_loss=0.04673, over 19678.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2811, pruned_loss=0.05797, over 3831576.18 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:59,010 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199418.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:49:55,027 INFO [train.py:903] (1/4) Epoch 30, batch 1450, loss[loss=0.2166, simple_loss=0.2922, pruned_loss=0.07051, over 18373.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2812, pruned_loss=0.05815, over 3823417.71 frames. ], batch size: 84, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:49:56,191 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 15:50:11,578 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199476.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:50:25,539 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.040e+02 6.121e+02 7.927e+02 1.972e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 15:50:56,161 INFO [train.py:903] (1/4) Epoch 30, batch 1500, loss[loss=0.1739, simple_loss=0.2563, pruned_loss=0.04572, over 19389.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2817, pruned_loss=0.05872, over 3825067.30 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:50:56,495 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199512.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:51:28,278 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:51:46,261 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6897, 1.3063, 1.4881, 1.5690, 3.3129, 1.1805, 2.4298, 3.6054], + device='cuda:1'), covar=tensor([0.0473, 0.2936, 0.2980, 0.1853, 0.0634, 0.2592, 0.1393, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0388, 0.0362, 0.0403, 0.0424], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 15:51:47,401 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199553.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:51:58,248 INFO [train.py:903] (1/4) Epoch 30, batch 1550, loss[loss=0.2147, simple_loss=0.2932, pruned_loss=0.06813, over 17808.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2808, pruned_loss=0.05859, over 3823817.54 frames. ], batch size: 101, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:52:28,212 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:52:31,298 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.717e+02 5.804e+02 6.903e+02 1.639e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 15:52:45,773 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6867, 1.6703, 1.6451, 1.4343, 1.3667, 1.3687, 0.3387, 0.7122], + device='cuda:1'), covar=tensor([0.0708, 0.0701, 0.0480, 0.0723, 0.1318, 0.0867, 0.1471, 0.1202], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0364, 0.0370, 0.0395, 0.0472, 0.0398, 0.0347, 0.0348], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 15:52:59,355 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:01,356 INFO [train.py:903] (1/4) Epoch 30, batch 1600, loss[loss=0.219, simple_loss=0.2871, pruned_loss=0.07544, over 19625.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.05817, over 3831083.03 frames. ], batch size: 50, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:53:06,439 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:26,395 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 15:53:37,720 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:53,130 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:01,264 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199660.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:03,841 INFO [train.py:903] (1/4) Epoch 30, batch 1650, loss[loss=0.2453, simple_loss=0.3224, pruned_loss=0.08413, over 17294.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2806, pruned_loss=0.05832, over 3819171.37 frames. ], batch size: 101, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:54:32,753 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199685.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:35,819 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 4.724e+02 5.858e+02 7.730e+02 3.208e+03, threshold=1.172e+03, percent-clipped=5.0 +2023-04-03 15:54:36,276 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9370, 1.8857, 1.8124, 1.6425, 1.4889, 1.5685, 0.5869, 0.8959], + device='cuda:1'), covar=tensor([0.0769, 0.0694, 0.0522, 0.0800, 0.1302, 0.0943, 0.1390, 0.1281], + device='cuda:1'), in_proj_covar=tensor([0.0370, 0.0365, 0.0372, 0.0397, 0.0475, 0.0401, 0.0348, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 15:54:50,264 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.0755, 1.2587, 1.7367, 0.9820, 2.3736, 3.0577, 2.7332, 3.2280], + device='cuda:1'), covar=tensor([0.1764, 0.4150, 0.3495, 0.2968, 0.0693, 0.0270, 0.0280, 0.0340], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0336, 0.0369, 0.0276, 0.0258, 0.0201, 0.0222, 0.0282], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-04-03 15:55:06,071 INFO [train.py:903] (1/4) Epoch 30, batch 1700, loss[loss=0.1763, simple_loss=0.2593, pruned_loss=0.0466, over 19674.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2794, pruned_loss=0.05762, over 3830096.76 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:55:24,010 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0822, 1.9510, 1.8295, 1.7171, 1.4779, 1.6597, 0.6486, 1.0433], + device='cuda:1'), covar=tensor([0.0751, 0.0700, 0.0617, 0.0982, 0.1463, 0.1007, 0.1476, 0.1314], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0366, 0.0373, 0.0398, 0.0476, 0.0402, 0.0350, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 15:55:40,547 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 15:55:46,466 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 15:56:09,056 INFO [train.py:903] (1/4) Epoch 30, batch 1750, loss[loss=0.1834, simple_loss=0.2563, pruned_loss=0.05526, over 19085.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2801, pruned_loss=0.0578, over 3816919.27 frames. ], batch size: 42, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:56:09,206 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199762.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:56:42,923 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.893e+02 6.270e+02 7.375e+02 1.627e+03, threshold=1.254e+03, percent-clipped=1.0 +2023-04-03 15:57:00,414 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.1156, 5.1657, 5.9233, 5.9452, 1.9456, 5.5837, 4.7128, 5.6206], + device='cuda:1'), covar=tensor([0.1837, 0.0858, 0.0593, 0.0680, 0.6596, 0.0962, 0.0666, 0.1172], + device='cuda:1'), in_proj_covar=tensor([0.0829, 0.0803, 0.1015, 0.0891, 0.0877, 0.0776, 0.0600, 0.0943], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 15:57:11,410 INFO [train.py:903] (1/4) Epoch 30, batch 1800, loss[loss=0.192, simple_loss=0.2774, pruned_loss=0.0533, over 19781.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2806, pruned_loss=0.05807, over 3815363.82 frames. ], batch size: 56, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:57:21,123 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199820.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:06,475 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199856.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:58:08,426 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 15:58:13,034 INFO [train.py:903] (1/4) Epoch 30, batch 1850, loss[loss=0.1704, simple_loss=0.2516, pruned_loss=0.0446, over 19404.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2797, pruned_loss=0.0575, over 3820649.14 frames. ], batch size: 48, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:58:15,397 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3363, 3.8425, 3.9621, 3.9641, 1.5640, 3.7871, 3.2847, 3.7388], + device='cuda:1'), covar=tensor([0.1708, 0.0962, 0.0684, 0.0757, 0.5912, 0.0951, 0.0797, 0.1112], + device='cuda:1'), in_proj_covar=tensor([0.0831, 0.0806, 0.1020, 0.0894, 0.0881, 0.0779, 0.0603, 0.0948], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 15:58:32,552 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199877.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:46,819 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.043e+02 6.338e+02 8.325e+02 2.069e+03, threshold=1.268e+03, percent-clipped=7.0 +2023-04-03 15:58:46,854 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 15:58:57,067 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199897.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:59:12,866 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:15,994 INFO [train.py:903] (1/4) Epoch 30, batch 1900, loss[loss=0.1825, simple_loss=0.2731, pruned_loss=0.04598, over 19697.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2809, pruned_loss=0.0584, over 3811036.62 frames. ], batch size: 59, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:59:33,278 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 15:59:33,521 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199927.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:38,032 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 15:59:42,751 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:43,926 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199935.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:00:03,960 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 16:00:17,076 INFO [train.py:903] (1/4) Epoch 30, batch 1950, loss[loss=0.2318, simple_loss=0.3139, pruned_loss=0.07481, over 19340.00 frames. ], tot_loss[loss=0.199, simple_loss=0.281, pruned_loss=0.05851, over 3802666.44 frames. ], batch size: 66, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 16:00:28,470 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199971.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:00:51,089 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 5.211e+02 6.525e+02 7.685e+02 1.771e+03, threshold=1.305e+03, percent-clipped=2.0 +2023-04-03 16:01:21,185 INFO [train.py:903] (1/4) Epoch 30, batch 2000, loss[loss=0.2156, simple_loss=0.295, pruned_loss=0.06816, over 19281.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2808, pruned_loss=0.05827, over 3807091.47 frames. ], batch size: 66, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:01:21,557 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200012.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:02:21,484 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 16:02:23,760 INFO [train.py:903] (1/4) Epoch 30, batch 2050, loss[loss=0.2196, simple_loss=0.2981, pruned_loss=0.07056, over 17470.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2805, pruned_loss=0.05806, over 3814231.86 frames. ], batch size: 101, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:02:43,234 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 16:02:43,266 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 16:02:50,419 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3251, 2.3371, 2.5311, 3.0086, 2.3530, 2.7847, 2.5275, 2.3337], + device='cuda:1'), covar=tensor([0.4165, 0.3959, 0.1933, 0.2525, 0.4269, 0.2290, 0.4751, 0.3295], + device='cuda:1'), in_proj_covar=tensor([0.0947, 0.1032, 0.0750, 0.0959, 0.0926, 0.0866, 0.0867, 0.0815], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 16:02:57,788 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 4.986e+02 6.252e+02 8.206e+02 1.738e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 16:03:03,565 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 16:03:11,190 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-03 16:03:26,457 INFO [train.py:903] (1/4) Epoch 30, batch 2100, loss[loss=0.1832, simple_loss=0.2783, pruned_loss=0.04408, over 19533.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2799, pruned_loss=0.05779, over 3813445.77 frames. ], batch size: 56, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:03:52,970 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200133.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:03:57,147 WARNING [train.py:1073] (1/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 16:04:18,959 WARNING [train.py:1073] (1/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 16:04:24,950 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:04:29,063 INFO [train.py:903] (1/4) Epoch 30, batch 2150, loss[loss=0.1955, simple_loss=0.2836, pruned_loss=0.0537, over 19683.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2794, pruned_loss=0.05748, over 3812405.61 frames. ], batch size: 60, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:05:02,778 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.819e+02 5.957e+02 8.119e+02 2.108e+03, threshold=1.191e+03, percent-clipped=2.0 +2023-04-03 16:05:05,537 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:10,671 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200195.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:05:30,902 INFO [train.py:903] (1/4) Epoch 30, batch 2200, loss[loss=0.2313, simple_loss=0.31, pruned_loss=0.07626, over 19518.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2794, pruned_loss=0.05729, over 3831234.93 frames. ], batch size: 64, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:05:37,176 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:39,436 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9043, 4.3901, 4.6498, 4.6573, 1.7233, 4.3638, 3.7963, 4.3509], + device='cuda:1'), covar=tensor([0.1795, 0.0948, 0.0651, 0.0688, 0.6489, 0.1062, 0.0744, 0.1256], + device='cuda:1'), in_proj_covar=tensor([0.0824, 0.0798, 0.1011, 0.0885, 0.0872, 0.0772, 0.0597, 0.0936], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 16:05:50,054 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200227.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:20,820 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200252.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:06:33,031 INFO [train.py:903] (1/4) Epoch 30, batch 2250, loss[loss=0.2293, simple_loss=0.3041, pruned_loss=0.07728, over 13369.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2811, pruned_loss=0.05845, over 3808965.81 frames. ], batch size: 135, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:06:41,174 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200268.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:44,363 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:08,302 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.298e+02 6.530e+02 8.599e+02 1.543e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-03 16:07:12,069 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200293.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:07:17,571 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:35,303 INFO [train.py:903] (1/4) Epoch 30, batch 2300, loss[loss=0.1691, simple_loss=0.2485, pruned_loss=0.04491, over 19753.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2819, pruned_loss=0.05862, over 3824458.14 frames. ], batch size: 47, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:07:51,099 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 16:08:18,932 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 16:08:37,753 INFO [train.py:903] (1/4) Epoch 30, batch 2350, loss[loss=0.2081, simple_loss=0.2815, pruned_loss=0.06741, over 19582.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2819, pruned_loss=0.05878, over 3822576.36 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:08:47,329 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.41 vs. limit=5.0 +2023-04-03 16:09:07,632 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:09:12,872 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.379e+02 4.836e+02 5.833e+02 7.163e+02 1.475e+03, threshold=1.167e+03, percent-clipped=1.0 +2023-04-03 16:09:21,097 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 16:09:34,074 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9953, 1.4104, 1.6285, 1.5209, 3.5879, 1.2328, 2.5042, 4.0033], + device='cuda:1'), covar=tensor([0.0488, 0.2849, 0.2908, 0.2025, 0.0665, 0.2601, 0.1362, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0426, 0.0383, 0.0404, 0.0356, 0.0386, 0.0361, 0.0402, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:09:38,134 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 16:09:40,554 INFO [train.py:903] (1/4) Epoch 30, batch 2400, loss[loss=0.1947, simple_loss=0.2783, pruned_loss=0.05557, over 19598.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05918, over 3809325.05 frames. ], batch size: 57, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:10:24,210 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6760, 1.2860, 1.5308, 1.5380, 3.2531, 1.3356, 2.5306, 3.6291], + device='cuda:1'), covar=tensor([0.0553, 0.3049, 0.3137, 0.2034, 0.0737, 0.2512, 0.1302, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0386, 0.0361, 0.0402, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:10:43,351 INFO [train.py:903] (1/4) Epoch 30, batch 2450, loss[loss=0.1945, simple_loss=0.2795, pruned_loss=0.05472, over 19700.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.282, pruned_loss=0.05878, over 3822445.14 frames. ], batch size: 60, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:11:19,105 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.121e+02 5.946e+02 7.814e+02 2.121e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-03 16:11:26,427 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8249, 3.2958, 3.3466, 3.3537, 1.3420, 3.2249, 2.8223, 3.1408], + device='cuda:1'), covar=tensor([0.1759, 0.1028, 0.0844, 0.0997, 0.5805, 0.1177, 0.0910, 0.1323], + device='cuda:1'), in_proj_covar=tensor([0.0819, 0.0795, 0.1006, 0.0883, 0.0868, 0.0770, 0.0593, 0.0935], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 16:11:46,465 INFO [train.py:903] (1/4) Epoch 30, batch 2500, loss[loss=0.1709, simple_loss=0.2498, pruned_loss=0.04605, over 19402.00 frames. ], tot_loss[loss=0.199, simple_loss=0.281, pruned_loss=0.05846, over 3822825.20 frames. ], batch size: 48, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:12:20,429 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200539.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:12:48,792 INFO [train.py:903] (1/4) Epoch 30, batch 2550, loss[loss=0.1904, simple_loss=0.2703, pruned_loss=0.05526, over 19479.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.0582, over 3815430.67 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:13:23,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 4.923e+02 6.163e+02 7.973e+02 2.573e+03, threshold=1.233e+03, percent-clipped=12.0 +2023-04-03 16:13:41,801 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0392, 3.6848, 2.7221, 3.3076, 0.8158, 3.6835, 3.5376, 3.6340], + device='cuda:1'), covar=tensor([0.0786, 0.1197, 0.1808, 0.0975, 0.4058, 0.0764, 0.1016, 0.1213], + device='cuda:1'), in_proj_covar=tensor([0.0533, 0.0431, 0.0517, 0.0358, 0.0409, 0.0458, 0.0449, 0.0487], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:13:47,180 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 16:13:51,615 INFO [train.py:903] (1/4) Epoch 30, batch 2600, loss[loss=0.1757, simple_loss=0.2537, pruned_loss=0.0488, over 19296.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2805, pruned_loss=0.05833, over 3809953.69 frames. ], batch size: 44, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:28,347 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:29,696 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:44,533 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200654.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:14:54,163 INFO [train.py:903] (1/4) Epoch 30, batch 2650, loss[loss=0.1894, simple_loss=0.2794, pruned_loss=0.04975, over 19657.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2809, pruned_loss=0.05864, over 3811967.07 frames. ], batch size: 58, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:55,567 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200663.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:00,393 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200667.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:12,706 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8549, 1.3882, 1.5612, 1.6767, 3.4394, 1.3130, 2.4792, 3.8878], + device='cuda:1'), covar=tensor([0.0493, 0.2857, 0.2947, 0.1793, 0.0673, 0.2498, 0.1329, 0.0224], + device='cuda:1'), in_proj_covar=tensor([0.0425, 0.0382, 0.0402, 0.0355, 0.0386, 0.0361, 0.0402, 0.0422], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:15:15,798 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 16:15:28,264 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 4.881e+02 6.412e+02 8.116e+02 1.737e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 16:15:55,161 INFO [train.py:903] (1/4) Epoch 30, batch 2700, loss[loss=0.1927, simple_loss=0.284, pruned_loss=0.05076, over 18059.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2812, pruned_loss=0.05857, over 3817929.66 frames. ], batch size: 83, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:16:34,930 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 16:16:51,799 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:16:59,613 INFO [train.py:903] (1/4) Epoch 30, batch 2750, loss[loss=0.2297, simple_loss=0.3133, pruned_loss=0.07302, over 18241.00 frames. ], tot_loss[loss=0.199, simple_loss=0.281, pruned_loss=0.05843, over 3814749.50 frames. ], batch size: 84, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:17:34,085 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.266e+02 4.594e+02 5.767e+02 6.844e+02 1.269e+03, threshold=1.153e+03, percent-clipped=0.0 +2023-04-03 16:18:02,566 INFO [train.py:903] (1/4) Epoch 30, batch 2800, loss[loss=0.1823, simple_loss=0.2715, pruned_loss=0.0465, over 18740.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.281, pruned_loss=0.05838, over 3809912.87 frames. ], batch size: 74, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:05,167 INFO [train.py:903] (1/4) Epoch 30, batch 2850, loss[loss=0.1553, simple_loss=0.2441, pruned_loss=0.03324, over 19737.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2808, pruned_loss=0.05837, over 3805170.21 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:39,283 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.865e+02 5.683e+02 7.765e+02 1.857e+03, threshold=1.137e+03, percent-clipped=6.0 +2023-04-03 16:20:04,681 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200910.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:20:06,533 INFO [train.py:903] (1/4) Epoch 30, batch 2900, loss[loss=0.2271, simple_loss=0.3114, pruned_loss=0.07135, over 19620.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2825, pruned_loss=0.05901, over 3803485.66 frames. ], batch size: 57, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:20:08,647 WARNING [train.py:1073] (1/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 16:20:36,311 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200935.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:21:00,030 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.9057, 1.3322, 1.0930, 0.9099, 1.1582, 0.9569, 0.9798, 1.2613], + device='cuda:1'), covar=tensor([0.0682, 0.1002, 0.1235, 0.0917, 0.0672, 0.1496, 0.0690, 0.0566], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0321, 0.0345, 0.0277, 0.0255, 0.0350, 0.0291, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:21:08,764 INFO [train.py:903] (1/4) Epoch 30, batch 2950, loss[loss=0.2055, simple_loss=0.2961, pruned_loss=0.05745, over 19657.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.283, pruned_loss=0.05913, over 3814483.69 frames. ], batch size: 58, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:21:44,099 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.844e+02 6.179e+02 7.399e+02 1.416e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-03 16:22:06,363 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:11,911 INFO [train.py:903] (1/4) Epoch 30, batch 3000, loss[loss=0.1678, simple_loss=0.2436, pruned_loss=0.04594, over 19744.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2822, pruned_loss=0.05868, over 3825596.32 frames. ], batch size: 46, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:22:11,912 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 16:22:26,179 INFO [train.py:937] (1/4) Epoch 30, validation: loss=0.1666, simple_loss=0.266, pruned_loss=0.03357, over 944034.00 frames. +2023-04-03 16:22:26,180 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 16:22:26,628 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201012.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:32,376 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 16:22:57,381 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:23:27,828 INFO [train.py:903] (1/4) Epoch 30, batch 3050, loss[loss=0.2026, simple_loss=0.2769, pruned_loss=0.06417, over 19408.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2823, pruned_loss=0.05892, over 3825700.55 frames. ], batch size: 48, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:24:02,286 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:24:03,235 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.896e+02 5.871e+02 7.483e+02 2.064e+03, threshold=1.174e+03, percent-clipped=5.0 +2023-04-03 16:24:17,335 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.6041, 2.0287, 2.2271, 2.1377, 3.3564, 1.7307, 2.8854, 3.5530], + device='cuda:1'), covar=tensor([0.0464, 0.2311, 0.2198, 0.1552, 0.0507, 0.2110, 0.1356, 0.0252], + device='cuda:1'), in_proj_covar=tensor([0.0428, 0.0385, 0.0404, 0.0356, 0.0388, 0.0363, 0.0404, 0.0425], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:24:32,128 INFO [train.py:903] (1/4) Epoch 30, batch 3100, loss[loss=0.178, simple_loss=0.2698, pruned_loss=0.0431, over 19792.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.283, pruned_loss=0.05918, over 3818232.19 frames. ], batch size: 56, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:24:38,535 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-03 16:24:43,602 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:25:33,506 INFO [train.py:903] (1/4) Epoch 30, batch 3150, loss[loss=0.1617, simple_loss=0.2518, pruned_loss=0.03586, over 19721.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2831, pruned_loss=0.05952, over 3809427.13 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:25:36,007 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.9343, 4.4539, 2.7175, 3.9440, 0.8253, 4.5093, 4.3319, 4.4822], + device='cuda:1'), covar=tensor([0.0568, 0.0958, 0.2132, 0.0876, 0.4390, 0.0638, 0.0990, 0.1240], + device='cuda:1'), in_proj_covar=tensor([0.0537, 0.0433, 0.0521, 0.0360, 0.0411, 0.0460, 0.0453, 0.0490], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:26:02,508 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 16:26:10,328 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 4.625e+02 5.752e+02 7.402e+02 1.953e+03, threshold=1.150e+03, percent-clipped=5.0 +2023-04-03 16:26:35,924 INFO [train.py:903] (1/4) Epoch 30, batch 3200, loss[loss=0.1501, simple_loss=0.2322, pruned_loss=0.03397, over 18499.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2826, pruned_loss=0.05934, over 3815049.62 frames. ], batch size: 41, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:27:32,993 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-03 16:27:40,086 INFO [train.py:903] (1/4) Epoch 30, batch 3250, loss[loss=0.1982, simple_loss=0.2831, pruned_loss=0.05667, over 19599.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2811, pruned_loss=0.05878, over 3820617.64 frames. ], batch size: 57, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:27:51,496 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:14,669 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-03 16:28:14,872 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.466e+02 4.729e+02 5.856e+02 7.119e+02 1.424e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 16:28:16,380 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:42,755 INFO [train.py:903] (1/4) Epoch 30, batch 3300, loss[loss=0.1918, simple_loss=0.2654, pruned_loss=0.05906, over 19620.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2806, pruned_loss=0.05871, over 3826685.93 frames. ], batch size: 50, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:28:49,687 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 16:28:53,526 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1571, 2.0854, 1.9411, 1.7804, 1.7309, 1.7889, 0.5864, 1.0924], + device='cuda:1'), covar=tensor([0.0711, 0.0639, 0.0514, 0.0820, 0.1215, 0.0953, 0.1408, 0.1204], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0364, 0.0370, 0.0393, 0.0472, 0.0399, 0.0347, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 16:29:03,926 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1357, 3.2547, 1.7814, 1.9508, 2.9385, 1.6191, 1.4847, 2.3348], + device='cuda:1'), covar=tensor([0.1414, 0.0641, 0.1242, 0.0964, 0.0525, 0.1403, 0.1049, 0.0703], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0320, 0.0344, 0.0276, 0.0254, 0.0348, 0.0291, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:29:44,706 INFO [train.py:903] (1/4) Epoch 30, batch 3350, loss[loss=0.1842, simple_loss=0.2684, pruned_loss=0.04999, over 19597.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.28, pruned_loss=0.05831, over 3824200.41 frames. ], batch size: 61, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:30:05,887 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:07,531 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 16:30:21,792 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.184e+02 4.772e+02 5.759e+02 7.012e+02 1.305e+03, threshold=1.152e+03, percent-clipped=2.0 +2023-04-03 16:30:37,302 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:48,352 INFO [train.py:903] (1/4) Epoch 30, batch 3400, loss[loss=0.2363, simple_loss=0.3184, pruned_loss=0.07714, over 19603.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2802, pruned_loss=0.05834, over 3824484.35 frames. ], batch size: 57, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:16,063 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:31:51,817 INFO [train.py:903] (1/4) Epoch 30, batch 3450, loss[loss=0.221, simple_loss=0.3023, pruned_loss=0.06981, over 19684.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2803, pruned_loss=0.05796, over 3841480.54 frames. ], batch size: 55, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:57,365 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 16:32:27,319 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.269e+02 6.567e+02 8.372e+02 2.121e+03, threshold=1.313e+03, percent-clipped=9.0 +2023-04-03 16:32:55,652 INFO [train.py:903] (1/4) Epoch 30, batch 3500, loss[loss=0.2041, simple_loss=0.293, pruned_loss=0.0576, over 17414.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2799, pruned_loss=0.05795, over 3834687.78 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:33:18,879 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 16:33:25,393 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.6673, 4.2734, 2.9099, 3.8031, 1.0373, 4.2416, 4.0842, 4.1758], + device='cuda:1'), covar=tensor([0.0605, 0.0898, 0.1709, 0.0831, 0.3801, 0.0632, 0.0956, 0.0965], + device='cuda:1'), in_proj_covar=tensor([0.0539, 0.0436, 0.0522, 0.0361, 0.0412, 0.0462, 0.0455, 0.0493], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:33:41,239 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:33:58,202 INFO [train.py:903] (1/4) Epoch 30, batch 3550, loss[loss=0.2392, simple_loss=0.3067, pruned_loss=0.08587, over 14359.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.281, pruned_loss=0.05827, over 3822661.64 frames. ], batch size: 141, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:34:35,157 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.645e+02 6.006e+02 7.208e+02 1.141e+03, threshold=1.201e+03, percent-clipped=0.0 +2023-04-03 16:35:01,950 INFO [train.py:903] (1/4) Epoch 30, batch 3600, loss[loss=0.1857, simple_loss=0.2738, pruned_loss=0.0488, over 19516.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2818, pruned_loss=0.05864, over 3822406.45 frames. ], batch size: 54, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:35:06,948 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201615.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:35:32,742 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:04,973 INFO [train.py:903] (1/4) Epoch 30, batch 3650, loss[loss=0.1884, simple_loss=0.2813, pruned_loss=0.04777, over 19769.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2816, pruned_loss=0.05832, over 3831383.29 frames. ], batch size: 54, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:36:05,292 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:27,551 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201679.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:42,038 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 4.844e+02 5.974e+02 7.556e+02 1.962e+03, threshold=1.195e+03, percent-clipped=4.0 +2023-04-03 16:37:09,552 INFO [train.py:903] (1/4) Epoch 30, batch 3700, loss[loss=0.1769, simple_loss=0.2544, pruned_loss=0.04969, over 19759.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2801, pruned_loss=0.05776, over 3830990.76 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:37:15,816 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9404, 2.0248, 1.9110, 3.0698, 2.2435, 2.9132, 1.9804, 1.6942], + device='cuda:1'), covar=tensor([0.5377, 0.5372, 0.3194, 0.3325, 0.5366, 0.2695, 0.7169, 0.5601], + device='cuda:1'), in_proj_covar=tensor([0.0953, 0.1036, 0.0755, 0.0962, 0.0929, 0.0870, 0.0869, 0.0820], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 16:37:19,156 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5834, 1.6363, 1.9401, 1.8297, 2.6938, 2.4034, 2.9419, 1.2099], + device='cuda:1'), covar=tensor([0.2547, 0.4409, 0.2759, 0.1954, 0.1551, 0.2190, 0.1473, 0.4971], + device='cuda:1'), in_proj_covar=tensor([0.0557, 0.0677, 0.0764, 0.0515, 0.0637, 0.0551, 0.0670, 0.0579], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 16:37:23,545 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201724.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:31,452 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201730.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:57,731 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201751.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:57,783 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9095, 1.8772, 1.7160, 1.5068, 1.3714, 1.4331, 0.5566, 0.8205], + device='cuda:1'), covar=tensor([0.0987, 0.0924, 0.0559, 0.1001, 0.1748, 0.1320, 0.1646, 0.1603], + device='cuda:1'), in_proj_covar=tensor([0.0369, 0.0366, 0.0371, 0.0395, 0.0473, 0.0401, 0.0348, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 16:38:11,302 INFO [train.py:903] (1/4) Epoch 30, batch 3750, loss[loss=0.2422, simple_loss=0.3136, pruned_loss=0.08543, over 13825.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2813, pruned_loss=0.05843, over 3806972.83 frames. ], batch size: 136, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:38:47,453 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.921e+02 6.534e+02 8.381e+02 2.079e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 16:39:04,313 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:14,010 INFO [train.py:903] (1/4) Epoch 30, batch 3800, loss[loss=0.2193, simple_loss=0.3064, pruned_loss=0.0661, over 19782.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2822, pruned_loss=0.05891, over 3791862.44 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:39:22,284 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.3996, 2.4618, 2.2598, 2.5738, 2.3767, 2.0950, 2.2387, 2.4015], + device='cuda:1'), covar=tensor([0.0882, 0.1170, 0.1125, 0.0819, 0.1095, 0.0493, 0.1092, 0.0615], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0358, 0.0319, 0.0259, 0.0309, 0.0258, 0.0323, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 16:39:35,640 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:41,267 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 16:40:13,005 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=2.25 vs. limit=5.0 +2023-04-03 16:40:15,944 INFO [train.py:903] (1/4) Epoch 30, batch 3850, loss[loss=0.1775, simple_loss=0.2543, pruned_loss=0.05031, over 19325.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2818, pruned_loss=0.05883, over 3798388.70 frames. ], batch size: 44, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:40:51,661 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.891e+02 4.977e+02 6.719e+02 8.916e+02 2.147e+03, threshold=1.344e+03, percent-clipped=8.0 +2023-04-03 16:41:12,702 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.4729, 1.4084, 1.3920, 1.7756, 1.3617, 1.6244, 1.6767, 1.4806], + device='cuda:1'), covar=tensor([0.0869, 0.0921, 0.1047, 0.0660, 0.0846, 0.0760, 0.0817, 0.0752], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0227, 0.0239, 0.0227, 0.0215, 0.0187, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 16:41:18,294 INFO [train.py:903] (1/4) Epoch 30, batch 3900, loss[loss=0.1772, simple_loss=0.2494, pruned_loss=0.05249, over 16827.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2811, pruned_loss=0.05835, over 3813778.98 frames. ], batch size: 37, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:20,922 INFO [train.py:903] (1/4) Epoch 30, batch 3950, loss[loss=0.1767, simple_loss=0.2558, pruned_loss=0.0488, over 19301.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2812, pruned_loss=0.05868, over 3799718.99 frames. ], batch size: 44, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:20,947 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 16:42:50,970 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9385, 1.3445, 1.5779, 1.6062, 3.5297, 1.2455, 2.6019, 4.0176], + device='cuda:1'), covar=tensor([0.0472, 0.3044, 0.3106, 0.1927, 0.0660, 0.2708, 0.1411, 0.0207], + device='cuda:1'), in_proj_covar=tensor([0.0431, 0.0386, 0.0405, 0.0358, 0.0390, 0.0365, 0.0405, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:42:51,087 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:42:56,903 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 4.814e+02 5.725e+02 7.208e+02 1.816e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 16:43:16,592 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:18,076 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:19,112 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:23,654 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:24,451 INFO [train.py:903] (1/4) Epoch 30, batch 4000, loss[loss=0.2254, simple_loss=0.3033, pruned_loss=0.07374, over 19510.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2812, pruned_loss=0.05856, over 3808961.23 frames. ], batch size: 64, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:43:38,594 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:49,621 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202032.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:44:08,204 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 16:44:27,670 INFO [train.py:903] (1/4) Epoch 30, batch 4050, loss[loss=0.1797, simple_loss=0.2655, pruned_loss=0.04694, over 19847.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2803, pruned_loss=0.05813, over 3819593.17 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:44:34,770 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:45:02,977 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.674e+02 6.115e+02 7.260e+02 2.667e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 16:45:25,459 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.9718, 2.0778, 2.3537, 2.6202, 2.0520, 2.5178, 2.3005, 2.0816], + device='cuda:1'), covar=tensor([0.4293, 0.4048, 0.2007, 0.2547, 0.4213, 0.2326, 0.5048, 0.3532], + device='cuda:1'), in_proj_covar=tensor([0.0952, 0.1035, 0.0752, 0.0962, 0.0929, 0.0870, 0.0868, 0.0818], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 16:45:30,403 INFO [train.py:903] (1/4) Epoch 30, batch 4100, loss[loss=0.2055, simple_loss=0.2915, pruned_loss=0.05973, over 19736.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2807, pruned_loss=0.05855, over 3814529.44 frames. ], batch size: 63, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:45:40,905 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:46:03,060 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202138.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:46:04,985 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 16:46:32,899 INFO [train.py:903] (1/4) Epoch 30, batch 4150, loss[loss=0.1788, simple_loss=0.26, pruned_loss=0.04878, over 19836.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2803, pruned_loss=0.05819, over 3820647.29 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:46:59,287 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202183.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:47:08,854 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.823e+02 5.653e+02 7.013e+02 1.196e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-03 16:47:34,526 INFO [train.py:903] (1/4) Epoch 30, batch 4200, loss[loss=0.1936, simple_loss=0.2677, pruned_loss=0.05968, over 19838.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2795, pruned_loss=0.05744, over 3833710.03 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:47:37,782 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 16:47:59,111 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 16:48:35,838 INFO [train.py:903] (1/4) Epoch 30, batch 4250, loss[loss=0.1951, simple_loss=0.284, pruned_loss=0.05307, over 19727.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2803, pruned_loss=0.05788, over 3832094.03 frames. ], batch size: 63, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:48:52,002 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 16:49:03,275 WARNING [train.py:1073] (1/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 16:49:12,500 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.334e+02 6.771e+02 9.277e+02 2.113e+03, threshold=1.354e+03, percent-clipped=7.0 +2023-04-03 16:49:31,143 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 16:49:38,444 INFO [train.py:903] (1/4) Epoch 30, batch 4300, loss[loss=0.2133, simple_loss=0.2939, pruned_loss=0.06631, over 19683.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2814, pruned_loss=0.05866, over 3831981.12 frames. ], batch size: 58, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:50:16,160 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([4.0348, 3.6480, 2.5303, 3.2206, 0.7669, 3.6505, 3.5378, 3.5849], + device='cuda:1'), covar=tensor([0.0792, 0.1158, 0.2028, 0.1063, 0.4010, 0.0781, 0.1054, 0.1262], + device='cuda:1'), in_proj_covar=tensor([0.0540, 0.0439, 0.0524, 0.0364, 0.0416, 0.0463, 0.0458, 0.0494], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:50:28,703 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:50:34,342 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 16:50:41,233 INFO [train.py:903] (1/4) Epoch 30, batch 4350, loss[loss=0.1805, simple_loss=0.259, pruned_loss=0.05098, over 19735.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2823, pruned_loss=0.05898, over 3811280.92 frames. ], batch size: 46, lr: 2.72e-03, grad_scale: 4.0 +2023-04-03 16:50:59,661 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:03,967 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7031, 1.6416, 1.4767, 1.7004, 1.5786, 1.4244, 1.4081, 1.6089], + device='cuda:1'), covar=tensor([0.1031, 0.1342, 0.1377, 0.0928, 0.1277, 0.0682, 0.1579, 0.0759], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0361, 0.0322, 0.0262, 0.0312, 0.0260, 0.0326, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 16:51:18,785 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.712e+02 5.570e+02 7.585e+02 1.545e+03, threshold=1.114e+03, percent-clipped=3.0 +2023-04-03 16:51:21,605 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:31,129 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:43,046 INFO [train.py:903] (1/4) Epoch 30, batch 4400, loss[loss=0.1769, simple_loss=0.2567, pruned_loss=0.04855, over 19822.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2821, pruned_loss=0.05933, over 3802804.52 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:51:52,553 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:08,203 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 16:52:16,996 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 16:52:17,423 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:46,168 INFO [train.py:903] (1/4) Epoch 30, batch 4450, loss[loss=0.1973, simple_loss=0.2865, pruned_loss=0.05411, over 19606.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2819, pruned_loss=0.05884, over 3813565.97 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:52:46,657 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8612, 1.9956, 2.2392, 2.4606, 1.8779, 2.3225, 2.2164, 2.0396], + device='cuda:1'), covar=tensor([0.4369, 0.4191, 0.2088, 0.2603, 0.4365, 0.2481, 0.5270, 0.3685], + device='cuda:1'), in_proj_covar=tensor([0.0953, 0.1035, 0.0753, 0.0963, 0.0929, 0.0871, 0.0867, 0.0818], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 16:52:48,917 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:52,458 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:53:23,818 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 4.882e+02 5.921e+02 7.342e+02 1.295e+03, threshold=1.184e+03, percent-clipped=2.0 +2023-04-03 16:53:48,733 INFO [train.py:903] (1/4) Epoch 30, batch 4500, loss[loss=0.195, simple_loss=0.2797, pruned_loss=0.05516, over 19523.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.059, over 3815630.97 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:54:32,461 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7900, 1.9072, 2.2290, 2.3724, 1.8067, 2.2709, 2.1813, 2.0112], + device='cuda:1'), covar=tensor([0.4480, 0.4036, 0.2110, 0.2450, 0.4223, 0.2266, 0.5498, 0.3783], + device='cuda:1'), in_proj_covar=tensor([0.0953, 0.1036, 0.0753, 0.0964, 0.0929, 0.0871, 0.0868, 0.0820], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 16:54:50,682 INFO [train.py:903] (1/4) Epoch 30, batch 4550, loss[loss=0.2351, simple_loss=0.32, pruned_loss=0.07507, over 18231.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2814, pruned_loss=0.0587, over 3827007.39 frames. ], batch size: 83, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:55:01,218 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 16:55:25,942 WARNING [train.py:1073] (1/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 16:55:27,102 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 5.006e+02 6.254e+02 8.173e+02 1.576e+03, threshold=1.251e+03, percent-clipped=3.0 +2023-04-03 16:55:52,912 INFO [train.py:903] (1/4) Epoch 30, batch 4600, loss[loss=0.2147, simple_loss=0.2968, pruned_loss=0.06627, over 18708.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05972, over 3810815.26 frames. ], batch size: 74, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:56:22,227 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-03 16:56:51,357 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([5.6896, 5.2634, 3.1725, 4.5791, 1.5515, 5.2349, 5.1255, 5.2546], + device='cuda:1'), covar=tensor([0.0402, 0.0752, 0.1768, 0.0725, 0.3426, 0.0556, 0.0877, 0.1081], + device='cuda:1'), in_proj_covar=tensor([0.0537, 0.0436, 0.0520, 0.0361, 0.0412, 0.0460, 0.0454, 0.0491], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 16:56:54,623 INFO [train.py:903] (1/4) Epoch 30, batch 4650, loss[loss=0.1955, simple_loss=0.2822, pruned_loss=0.05436, over 19681.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2821, pruned_loss=0.05933, over 3815606.67 frames. ], batch size: 60, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:57:12,732 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 16:57:23,001 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 16:57:31,775 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.029e+02 5.947e+02 7.618e+02 1.686e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-03 16:57:55,569 INFO [train.py:903] (1/4) Epoch 30, batch 4700, loss[loss=0.1546, simple_loss=0.234, pruned_loss=0.03767, over 19306.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.05964, over 3812433.50 frames. ], batch size: 44, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:58:10,506 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:18,348 WARNING [train.py:1073] (1/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 16:58:41,363 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:59,214 INFO [train.py:903] (1/4) Epoch 30, batch 4750, loss[loss=0.2089, simple_loss=0.291, pruned_loss=0.06342, over 19683.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2806, pruned_loss=0.0586, over 3828170.81 frames. ], batch size: 60, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:59:35,969 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.055e+02 5.959e+02 7.636e+02 1.705e+03, threshold=1.192e+03, percent-clipped=6.0 +2023-04-03 17:00:01,991 INFO [train.py:903] (1/4) Epoch 30, batch 4800, loss[loss=0.2202, simple_loss=0.3027, pruned_loss=0.06885, over 19608.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2807, pruned_loss=0.05839, over 3825474.47 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:00:38,738 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 17:00:47,792 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202848.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:01:03,238 INFO [train.py:903] (1/4) Epoch 30, batch 4850, loss[loss=0.1858, simple_loss=0.2725, pruned_loss=0.04956, over 19694.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2808, pruned_loss=0.05844, over 3829357.00 frames. ], batch size: 59, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:01:27,935 WARNING [train.py:1073] (1/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 17:01:41,380 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.041e+02 6.050e+02 7.335e+02 2.031e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:01:47,266 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 17:01:54,131 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 17:01:55,281 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 17:02:03,482 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 17:02:05,916 INFO [train.py:903] (1/4) Epoch 30, batch 4900, loss[loss=0.1942, simple_loss=0.2842, pruned_loss=0.05209, over 19667.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2814, pruned_loss=0.05879, over 3816657.53 frames. ], batch size: 58, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:02:12,231 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.5979, 2.9979, 3.0447, 3.0633, 1.3750, 2.9110, 2.5843, 2.8682], + device='cuda:1'), covar=tensor([0.1717, 0.1588, 0.0871, 0.1002, 0.5533, 0.1385, 0.0839, 0.1271], + device='cuda:1'), in_proj_covar=tensor([0.0825, 0.0805, 0.1013, 0.0890, 0.0876, 0.0778, 0.0601, 0.0943], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 17:02:25,011 WARNING [train.py:1073] (1/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 17:02:54,167 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-03 17:03:07,819 INFO [train.py:903] (1/4) Epoch 30, batch 4950, loss[loss=0.1969, simple_loss=0.2864, pruned_loss=0.0537, over 19583.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2833, pruned_loss=0.05998, over 3814272.04 frames. ], batch size: 61, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:03:22,384 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 17:03:43,838 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.771e+02 6.104e+02 8.250e+02 2.222e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 17:03:46,196 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 17:03:53,451 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 17:04:09,794 INFO [train.py:903] (1/4) Epoch 30, batch 5000, loss[loss=0.1934, simple_loss=0.283, pruned_loss=0.05189, over 19606.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2834, pruned_loss=0.05983, over 3817068.26 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:04:16,543 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 17:04:18,986 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.9712, 1.4708, 1.5957, 1.5427, 3.5751, 1.1087, 2.7164, 4.0089], + device='cuda:1'), covar=tensor([0.0410, 0.2795, 0.2902, 0.1897, 0.0635, 0.2600, 0.1158, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0429, 0.0385, 0.0404, 0.0358, 0.0389, 0.0363, 0.0403, 0.0426], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:04:28,733 WARNING [train.py:1073] (1/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 17:05:02,498 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:05:02,579 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2045, 1.8763, 2.0552, 2.8518, 1.9751, 2.4791, 2.2657, 2.3326], + device='cuda:1'), covar=tensor([0.0794, 0.0898, 0.0915, 0.0770, 0.0877, 0.0747, 0.0910, 0.0627], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0239, 0.0227, 0.0216, 0.0188, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 17:05:09,007 INFO [train.py:903] (1/4) Epoch 30, batch 5050, loss[loss=0.19, simple_loss=0.263, pruned_loss=0.05855, over 19732.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.0597, over 3828311.68 frames. ], batch size: 45, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:05:17,041 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.8792, 1.4304, 1.5613, 1.6918, 3.4676, 1.2536, 2.4727, 3.9469], + device='cuda:1'), covar=tensor([0.0478, 0.2911, 0.2973, 0.1911, 0.0683, 0.2533, 0.1426, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0428, 0.0384, 0.0403, 0.0357, 0.0388, 0.0362, 0.0403, 0.0425], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:05:45,249 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 17:05:46,417 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.511e+02 5.411e+02 6.959e+02 2.884e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 17:05:59,231 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:06:10,216 INFO [train.py:903] (1/4) Epoch 30, batch 5100, loss[loss=0.2082, simple_loss=0.3007, pruned_loss=0.05789, over 19523.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05933, over 3829876.45 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:06:23,802 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 17:06:27,355 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 17:06:30,779 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 17:07:12,616 INFO [train.py:903] (1/4) Epoch 30, batch 5150, loss[loss=0.1938, simple_loss=0.2735, pruned_loss=0.05708, over 19858.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2813, pruned_loss=0.05857, over 3839578.65 frames. ], batch size: 52, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:07:27,923 WARNING [train.py:1073] (1/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 17:07:49,456 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.115e+02 6.435e+02 8.085e+02 2.061e+03, threshold=1.287e+03, percent-clipped=7.0 +2023-04-03 17:07:49,623 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203192.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:08:01,701 WARNING [train.py:1073] (1/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 17:08:14,988 INFO [train.py:903] (1/4) Epoch 30, batch 5200, loss[loss=0.2409, simple_loss=0.3177, pruned_loss=0.08208, over 19679.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2809, pruned_loss=0.05901, over 3815796.77 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:08:30,097 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 17:09:14,142 WARNING [train.py:1073] (1/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 17:09:16,378 INFO [train.py:903] (1/4) Epoch 30, batch 5250, loss[loss=0.2007, simple_loss=0.2754, pruned_loss=0.06303, over 19403.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2813, pruned_loss=0.05905, over 3821414.07 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:09:53,734 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.978e+02 4.929e+02 6.276e+02 8.119e+02 2.486e+03, threshold=1.255e+03, percent-clipped=4.0 +2023-04-03 17:10:05,401 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.1369, 3.6405, 3.7684, 3.7769, 1.8221, 3.5331, 3.1622, 3.5551], + device='cuda:1'), covar=tensor([0.1871, 0.1623, 0.0751, 0.0852, 0.5482, 0.1337, 0.0791, 0.1182], + device='cuda:1'), in_proj_covar=tensor([0.0832, 0.0807, 0.1018, 0.0891, 0.0880, 0.0780, 0.0600, 0.0946], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 17:10:12,191 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203307.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:10:17,556 INFO [train.py:903] (1/4) Epoch 30, batch 5300, loss[loss=0.2403, simple_loss=0.3282, pruned_loss=0.07623, over 19708.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2816, pruned_loss=0.05903, over 3822760.26 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:10:36,825 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 17:11:08,662 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1717, 2.0399, 2.0227, 1.8876, 1.5912, 1.7959, 0.6251, 1.1556], + device='cuda:1'), covar=tensor([0.0739, 0.0732, 0.0501, 0.0827, 0.1370, 0.0939, 0.1476, 0.1193], + device='cuda:1'), in_proj_covar=tensor([0.0367, 0.0366, 0.0371, 0.0393, 0.0471, 0.0399, 0.0346, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 17:11:18,765 INFO [train.py:903] (1/4) Epoch 30, batch 5350, loss[loss=0.2149, simple_loss=0.301, pruned_loss=0.06438, over 19526.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.05938, over 3826526.69 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:11:38,863 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 17:11:53,867 WARNING [train.py:1073] (1/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 17:11:56,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 5.524e+02 6.450e+02 8.325e+02 1.910e+03, threshold=1.290e+03, percent-clipped=5.0 +2023-04-03 17:12:05,342 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:12:08,779 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([0.8464, 1.1936, 0.9589, 0.8066, 1.0401, 0.8064, 0.9436, 1.1002], + device='cuda:1'), covar=tensor([0.0711, 0.0862, 0.1083, 0.0890, 0.0621, 0.1428, 0.0571, 0.0547], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0324, 0.0348, 0.0280, 0.0257, 0.0352, 0.0294, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:12:20,976 INFO [train.py:903] (1/4) Epoch 30, batch 5400, loss[loss=0.2452, simple_loss=0.312, pruned_loss=0.08924, over 19696.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2835, pruned_loss=0.06022, over 3836202.66 frames. ], batch size: 63, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:02,927 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203447.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:20,914 INFO [train.py:903] (1/4) Epoch 30, batch 5450, loss[loss=0.2221, simple_loss=0.3116, pruned_loss=0.06631, over 18062.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2835, pruned_loss=0.06012, over 3836652.70 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:21,329 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.7122, 1.2495, 1.3569, 1.5808, 1.1086, 1.4464, 1.3486, 1.5256], + device='cuda:1'), covar=tensor([0.1137, 0.1300, 0.1624, 0.1046, 0.1486, 0.0655, 0.1653, 0.0863], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0364, 0.0323, 0.0262, 0.0312, 0.0261, 0.0327, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 17:13:50,568 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:59,454 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.910e+02 6.531e+02 8.117e+02 1.984e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-03 17:14:23,411 INFO [train.py:903] (1/4) Epoch 30, batch 5500, loss[loss=0.1873, simple_loss=0.2766, pruned_loss=0.04902, over 19726.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05916, over 3845605.51 frames. ], batch size: 63, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:14:26,944 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:14:48,311 WARNING [train.py:1073] (1/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 17:15:24,336 INFO [train.py:903] (1/4) Epoch 30, batch 5550, loss[loss=0.1968, simple_loss=0.2814, pruned_loss=0.05612, over 19592.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2817, pruned_loss=0.05892, over 3848966.44 frames. ], batch size: 61, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:15:24,639 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203562.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:15:25,780 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203563.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:15:31,867 WARNING [train.py:1073] (1/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 17:15:56,787 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203588.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:16:01,700 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 4.952e+02 6.311e+02 7.819e+02 2.120e+03, threshold=1.262e+03, percent-clipped=2.0 +2023-04-03 17:16:22,144 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 17:16:27,754 INFO [train.py:903] (1/4) Epoch 30, batch 5600, loss[loss=0.2281, simple_loss=0.3045, pruned_loss=0.07587, over 13307.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05958, over 3844655.06 frames. ], batch size: 136, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:16:38,804 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-03 17:16:59,740 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-03 17:17:29,529 INFO [train.py:903] (1/4) Epoch 30, batch 5650, loss[loss=0.1938, simple_loss=0.2763, pruned_loss=0.05561, over 19606.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2836, pruned_loss=0.05999, over 3857858.26 frames. ], batch size: 50, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:17:50,967 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([3.3359, 3.8425, 3.9511, 3.9627, 1.6544, 3.7647, 3.2848, 3.7287], + device='cuda:1'), covar=tensor([0.1751, 0.0963, 0.0725, 0.0847, 0.5823, 0.1088, 0.0765, 0.1190], + device='cuda:1'), in_proj_covar=tensor([0.0827, 0.0804, 0.1013, 0.0889, 0.0875, 0.0776, 0.0598, 0.0945], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 17:18:06,340 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.913e+02 5.869e+02 7.597e+02 1.607e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 17:18:18,649 WARNING [train.py:1073] (1/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 17:18:29,970 INFO [train.py:903] (1/4) Epoch 30, batch 5700, loss[loss=0.1977, simple_loss=0.2725, pruned_loss=0.06145, over 19332.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2822, pruned_loss=0.05924, over 3854801.45 frames. ], batch size: 47, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:18:31,155 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([6.2355, 5.6717, 3.4117, 5.0192, 1.1307, 5.8802, 5.6862, 5.8957], + device='cuda:1'), covar=tensor([0.0354, 0.0763, 0.1645, 0.0760, 0.3986, 0.0456, 0.0741, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0540, 0.0439, 0.0522, 0.0363, 0.0414, 0.0462, 0.0456, 0.0492], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:19:10,860 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.5376, 1.6222, 1.7839, 1.9253, 1.5429, 1.8443, 1.8052, 1.7397], + device='cuda:1'), covar=tensor([0.3365, 0.2956, 0.1531, 0.1898, 0.3003, 0.1765, 0.3788, 0.2538], + device='cuda:1'), in_proj_covar=tensor([0.0953, 0.1036, 0.0753, 0.0963, 0.0930, 0.0870, 0.0866, 0.0818], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 17:19:31,944 INFO [train.py:903] (1/4) Epoch 30, batch 5750, loss[loss=0.1932, simple_loss=0.2824, pruned_loss=0.05195, over 19776.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2832, pruned_loss=0.05968, over 3839857.31 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:35,374 WARNING [train.py:1073] (1/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 17:19:35,644 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:44,345 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:45,186 WARNING [train.py:1073] (1/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 17:19:51,014 WARNING [train.py:1073] (1/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 17:20:00,646 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:09,174 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.021e+02 5.985e+02 7.897e+02 1.857e+03, threshold=1.197e+03, percent-clipped=7.0 +2023-04-03 17:20:15,310 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:35,335 INFO [train.py:903] (1/4) Epoch 30, batch 5800, loss[loss=0.2024, simple_loss=0.2717, pruned_loss=0.06658, over 19740.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.283, pruned_loss=0.05974, over 3842589.38 frames. ], batch size: 46, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:20:41,479 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.1941, 1.8237, 1.4953, 1.3102, 1.6087, 1.2195, 1.1767, 1.6263], + device='cuda:1'), covar=tensor([0.0958, 0.0910, 0.1103, 0.0925, 0.0622, 0.1440, 0.0711, 0.0504], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0324, 0.0347, 0.0279, 0.0257, 0.0351, 0.0293, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:20:43,823 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:56,418 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:13,715 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:37,905 INFO [train.py:903] (1/4) Epoch 30, batch 5850, loss[loss=0.2095, simple_loss=0.2977, pruned_loss=0.06068, over 19474.00 frames. ], tot_loss[loss=0.201, simple_loss=0.283, pruned_loss=0.05952, over 3832074.33 frames. ], batch size: 64, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:21:41,664 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:22:15,805 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.919e+02 5.824e+02 7.977e+02 1.569e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 17:22:39,475 INFO [train.py:903] (1/4) Epoch 30, batch 5900, loss[loss=0.1961, simple_loss=0.2869, pruned_loss=0.05266, over 19537.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2831, pruned_loss=0.05962, over 3823197.12 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:22:42,664 WARNING [train.py:1073] (1/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 17:22:47,246 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203918.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:23:06,104 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 17:23:07,743 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2854, 1.9482, 1.5021, 1.2717, 1.7879, 1.1082, 1.2030, 1.7927], + device='cuda:1'), covar=tensor([0.1130, 0.0876, 0.1245, 0.0994, 0.0697, 0.1569, 0.0878, 0.0525], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0323, 0.0346, 0.0278, 0.0256, 0.0350, 0.0293, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:23:19,592 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:23:31,982 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.8212, 3.3119, 3.3389, 3.3572, 1.4464, 3.2101, 2.7657, 3.1339], + device='cuda:1'), covar=tensor([0.1824, 0.1104, 0.0823, 0.0987, 0.5591, 0.1159, 0.0917, 0.1294], + device='cuda:1'), in_proj_covar=tensor([0.0827, 0.0805, 0.1013, 0.0889, 0.0876, 0.0776, 0.0599, 0.0945], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-04-03 17:23:40,674 INFO [train.py:903] (1/4) Epoch 30, batch 5950, loss[loss=0.1986, simple_loss=0.2813, pruned_loss=0.05792, over 19668.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.0599, over 3824207.55 frames. ], batch size: 60, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:18,145 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.092e+02 6.183e+02 7.281e+02 1.501e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-03 17:24:45,144 INFO [train.py:903] (1/4) Epoch 30, batch 6000, loss[loss=0.2046, simple_loss=0.2908, pruned_loss=0.05919, over 19697.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05969, over 3817099.48 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:45,144 INFO [train.py:928] (1/4) Computing validation loss +2023-04-03 17:24:58,727 INFO [train.py:937] (1/4) Epoch 30, validation: loss=0.167, simple_loss=0.2658, pruned_loss=0.03407, over 944034.00 frames. +2023-04-03 17:24:58,728 INFO [train.py:938] (1/4) Maximum memory allocated so far is 18821MB +2023-04-03 17:26:02,073 INFO [train.py:903] (1/4) Epoch 30, batch 6050, loss[loss=0.1774, simple_loss=0.2708, pruned_loss=0.04197, over 19664.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2819, pruned_loss=0.0595, over 3822094.02 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:26:28,633 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0535, 1.8305, 1.6917, 1.9824, 1.6750, 1.7191, 1.6544, 1.9166], + device='cuda:1'), covar=tensor([0.0999, 0.1376, 0.1458, 0.1048, 0.1398, 0.0600, 0.1478, 0.0771], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0362, 0.0323, 0.0261, 0.0312, 0.0260, 0.0325, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 17:26:38,218 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 5.054e+02 6.352e+02 7.854e+02 1.582e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-03 17:26:42,118 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:26:54,293 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0604, 1.6878, 1.8419, 2.6492, 1.9858, 2.1901, 2.3326, 2.0019], + device='cuda:1'), covar=tensor([0.0832, 0.0960, 0.1031, 0.0772, 0.0882, 0.0840, 0.0881, 0.0746], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0223, 0.0229, 0.0240, 0.0227, 0.0217, 0.0188, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 17:27:00,734 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:27:04,111 INFO [train.py:903] (1/4) Epoch 30, batch 6100, loss[loss=0.2123, simple_loss=0.2868, pruned_loss=0.06891, over 19768.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05971, over 3824350.00 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:27:23,319 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:04,387 INFO [train.py:903] (1/4) Epoch 30, batch 6150, loss[loss=0.22, simple_loss=0.3089, pruned_loss=0.06555, over 19740.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05983, over 3815516.31 frames. ], batch size: 63, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:28:29,836 INFO [zipformer.py:1188] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:34,969 WARNING [train.py:1073] (1/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 17:28:42,856 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.061e+02 6.485e+02 7.945e+02 1.594e+03, threshold=1.297e+03, percent-clipped=2.0 +2023-04-03 17:28:52,805 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204200.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:03,786 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:07,242 INFO [train.py:903] (1/4) Epoch 30, batch 6200, loss[loss=0.1937, simple_loss=0.2681, pruned_loss=0.05969, over 19404.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.283, pruned_loss=0.06008, over 3808727.84 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:29:24,447 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:25,533 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:41,913 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-03 17:29:45,869 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:50,462 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.6657, 1.4869, 1.4939, 2.0682, 1.5369, 1.9376, 1.8652, 1.7504], + device='cuda:1'), covar=tensor([0.0808, 0.0891, 0.0987, 0.0653, 0.0789, 0.0737, 0.0781, 0.0669], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0222, 0.0227, 0.0238, 0.0225, 0.0215, 0.0186, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:1') +2023-04-03 17:30:09,325 INFO [train.py:903] (1/4) Epoch 30, batch 6250, loss[loss=0.1822, simple_loss=0.2657, pruned_loss=0.04934, over 19754.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05929, over 3825506.79 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:30:10,640 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204262.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:30:38,580 WARNING [train.py:1073] (1/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 17:30:46,052 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.004e+02 5.938e+02 7.424e+02 1.100e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-03 17:31:10,304 INFO [train.py:903] (1/4) Epoch 30, batch 6300, loss[loss=0.2522, simple_loss=0.3277, pruned_loss=0.08834, over 19744.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2821, pruned_loss=0.05948, over 3828889.53 frames. ], batch size: 63, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:31:24,153 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:31:56,763 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.0929, 2.0589, 1.9529, 1.6930, 1.3817, 1.6137, 0.6685, 1.1267], + device='cuda:1'), covar=tensor([0.0934, 0.0959, 0.0608, 0.1248, 0.1756, 0.1584, 0.1682, 0.1518], + device='cuda:1'), in_proj_covar=tensor([0.0369, 0.0368, 0.0371, 0.0396, 0.0474, 0.0400, 0.0348, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-04-03 17:32:11,321 INFO [train.py:903] (1/4) Epoch 30, batch 6350, loss[loss=0.1696, simple_loss=0.2474, pruned_loss=0.04585, over 18509.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2819, pruned_loss=0.05915, over 3831962.32 frames. ], batch size: 41, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:32:30,534 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204377.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:32:50,298 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.728e+02 6.049e+02 7.442e+02 1.533e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:33:12,659 INFO [train.py:903] (1/4) Epoch 30, batch 6400, loss[loss=0.1957, simple_loss=0.2806, pruned_loss=0.05542, over 18021.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.05985, over 3835072.73 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:33:45,677 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:34:13,526 INFO [train.py:903] (1/4) Epoch 30, batch 6450, loss[loss=0.2093, simple_loss=0.2881, pruned_loss=0.0652, over 18176.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2841, pruned_loss=0.06046, over 3816687.08 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:34:23,500 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.2186, 2.2001, 2.4269, 2.9897, 2.3260, 2.8162, 2.4577, 2.2562], + device='cuda:1'), covar=tensor([0.4425, 0.4518, 0.2123, 0.2796, 0.4689, 0.2569, 0.5362, 0.3674], + device='cuda:1'), in_proj_covar=tensor([0.0954, 0.1037, 0.0755, 0.0965, 0.0932, 0.0871, 0.0868, 0.0819], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-04-03 17:34:35,898 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:34:51,302 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 4.981e+02 6.276e+02 8.010e+02 2.155e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 17:34:54,942 WARNING [train.py:1073] (1/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 17:34:59,488 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:07,995 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:16,831 INFO [train.py:903] (1/4) Epoch 30, batch 6500, loss[loss=0.2079, simple_loss=0.293, pruned_loss=0.06138, over 19460.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2834, pruned_loss=0.05973, over 3817301.69 frames. ], batch size: 70, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:35:19,115 WARNING [train.py:1073] (1/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 17:35:30,901 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204524.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:31,787 INFO [zipformer.py:1188] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:09,107 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:18,053 INFO [train.py:903] (1/4) Epoch 30, batch 6550, loss[loss=0.2773, simple_loss=0.3382, pruned_loss=0.1082, over 13755.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05911, over 3823032.99 frames. ], batch size: 135, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:36:40,526 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:56,884 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.288e+02 6.916e+02 9.470e+02 2.608e+03, threshold=1.383e+03, percent-clipped=11.0 +2023-04-03 17:37:11,422 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:37:20,110 INFO [train.py:903] (1/4) Epoch 30, batch 6600, loss[loss=0.1719, simple_loss=0.2532, pruned_loss=0.04534, over 19381.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2823, pruned_loss=0.05919, over 3801405.46 frames. ], batch size: 47, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:37:46,951 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204633.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:37:54,527 INFO [zipformer.py:1188] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204640.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:38:02,586 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.2209, 1.7921, 1.4182, 1.2541, 1.5700, 1.1923, 1.2451, 1.6483], + device='cuda:1'), covar=tensor([0.0838, 0.0861, 0.1121, 0.0891, 0.0626, 0.1298, 0.0626, 0.0452], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0323, 0.0346, 0.0278, 0.0256, 0.0350, 0.0292, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:38:17,129 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204658.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:38:21,418 INFO [train.py:903] (1/4) Epoch 30, batch 6650, loss[loss=0.1971, simple_loss=0.2845, pruned_loss=0.05487, over 19724.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.05913, over 3796516.18 frames. ], batch size: 51, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:38:53,458 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([2.1049, 2.0636, 1.9725, 2.2050, 2.0158, 1.8735, 1.9638, 2.0881], + device='cuda:1'), covar=tensor([0.0931, 0.1150, 0.1160, 0.0873, 0.1126, 0.0527, 0.1180, 0.0617], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0361, 0.0321, 0.0261, 0.0311, 0.0261, 0.0325, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-04-03 17:38:59,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 4.923e+02 6.497e+02 8.594e+02 3.232e+03, threshold=1.299e+03, percent-clipped=5.0 +2023-04-03 17:39:24,423 INFO [train.py:903] (1/4) Epoch 30, batch 6700, loss[loss=0.2242, simple_loss=0.3034, pruned_loss=0.07249, over 18313.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2824, pruned_loss=0.05894, over 3799875.30 frames. ], batch size: 83, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:40:21,754 INFO [train.py:903] (1/4) Epoch 30, batch 6750, loss[loss=0.2399, simple_loss=0.3239, pruned_loss=0.07801, over 19614.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2827, pruned_loss=0.05916, over 3807904.05 frames. ], batch size: 57, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:40:58,539 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.849e+02 6.262e+02 9.905e+02 2.863e+03, threshold=1.252e+03, percent-clipped=11.0 +2023-04-03 17:41:17,887 INFO [zipformer.py:1188] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:19,747 INFO [train.py:903] (1/4) Epoch 30, batch 6800, loss[loss=0.2082, simple_loss=0.2954, pruned_loss=0.06046, over 19614.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2812, pruned_loss=0.05866, over 3817292.09 frames. ], batch size: 57, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:41:35,116 INFO [zipformer.py:2441] (1/4) attn_weights_entropy = tensor([1.8597, 0.8989, 1.0898, 1.0451, 1.6443, 0.8528, 1.5504, 1.8726], + device='cuda:1'), covar=tensor([0.0603, 0.2270, 0.2199, 0.1316, 0.0681, 0.1665, 0.1421, 0.0466], + device='cuda:1'), in_proj_covar=tensor([0.0427, 0.0382, 0.0403, 0.0357, 0.0388, 0.0362, 0.0402, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-04-03 17:41:45,196 INFO [zipformer.py:1188] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:49,972 INFO [train.py:1171] (1/4) Done! diff --git a/log/log-train-2023-03-31-18-51-54-2 b/log/log-train-2023-03-31-18-51-54-2 new file mode 100644 index 0000000000000000000000000000000000000000..6a87a10c7eed1191f59caaf313e6461e567adf08 --- /dev/null +++ b/log/log-train-2023-03-31-18-51-54-2 @@ -0,0 +1,25086 @@ +2023-03-31 18:51:54,783 INFO [train.py:975] (2/4) Training started +2023-03-31 18:51:54,784 INFO [train.py:985] (2/4) Device: cuda:2 +2023-03-31 18:51:54,826 INFO [train.py:994] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r2n03', 'IP address': '10.1.2.3'}, 'world_size': 4, 'master_port': 54321, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 10, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-31 18:51:54,827 INFO [train.py:996] (2/4) About to create model +2023-03-31 18:51:55,714 INFO [zipformer.py:405] (2/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-31 18:51:55,726 INFO [train.py:1000] (2/4) Number of model parameters: 20697573 +2023-03-31 18:52:03,012 INFO [train.py:1019] (2/4) Using DDP +2023-03-31 18:52:03,651 INFO [asr_datamodule.py:429] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts, combined with their reverberated versions +2023-03-31 18:52:03,690 INFO [asr_datamodule.py:224] (2/4) Enable MUSAN +2023-03-31 18:52:03,690 INFO [asr_datamodule.py:225] (2/4) About to get Musan cuts +2023-03-31 18:52:06,211 INFO [asr_datamodule.py:249] (2/4) Enable SpecAugment +2023-03-31 18:52:06,211 INFO [asr_datamodule.py:250] (2/4) Time warp factor: 80 +2023-03-31 18:52:06,211 INFO [asr_datamodule.py:260] (2/4) Num frame mask: 10 +2023-03-31 18:52:06,212 INFO [asr_datamodule.py:273] (2/4) About to create train dataset +2023-03-31 18:52:06,212 INFO [asr_datamodule.py:300] (2/4) Using DynamicBucketingSampler. +2023-03-31 18:52:08,534 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:09,007 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:09,278 INFO [asr_datamodule.py:315] (2/4) About to create train dataloader +2023-03-31 18:52:09,280 INFO [asr_datamodule.py:440] (2/4) About to get dev-clean cuts +2023-03-31 18:52:09,282 INFO [asr_datamodule.py:447] (2/4) About to get dev-other cuts +2023-03-31 18:52:09,283 INFO [asr_datamodule.py:346] (2/4) About to create dev dataset +2023-03-31 18:52:09,732 INFO [asr_datamodule.py:363] (2/4) About to create dev dataloader +2023-03-31 18:52:23,927 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:24,387 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:36,254 INFO [train.py:903] (2/4) Epoch 1, batch 0, loss[loss=7.16, simple_loss=6.477, pruned_loss=6.812, over 19807.00 frames. ], tot_loss[loss=7.16, simple_loss=6.477, pruned_loss=6.812, over 19807.00 frames. ], batch size: 49, lr: 2.50e-02, grad_scale: 2.0 +2023-03-31 18:52:36,255 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 18:52:49,153 INFO [train.py:937] (2/4) Epoch 1, validation: loss=6.888, simple_loss=6.229, pruned_loss=6.575, over 944034.00 frames. +2023-03-31 18:52:49,154 INFO [train.py:938] (2/4) Maximum memory allocated so far is 11268MB +2023-03-31 18:53:03,011 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 18:53:58,994 INFO [train.py:903] (2/4) Epoch 1, batch 50, loss[loss=1.187, simple_loss=1.05, pruned_loss=1.231, over 19761.00 frames. ], tot_loss[loss=2.159, simple_loss=1.949, pruned_loss=2.003, over 871458.35 frames. ], batch size: 47, lr: 2.75e-02, grad_scale: 0.125 +2023-03-31 18:54:00,897 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:54:24,616 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=23.11 vs. limit=2.0 +2023-03-31 18:54:36,585 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:54:41,908 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 18:55:11,363 INFO [train.py:903] (2/4) Epoch 1, batch 100, loss[loss=1.105, simple_loss=0.95, pruned_loss=1.232, over 19684.00 frames. ], tot_loss[loss=1.636, simple_loss=1.456, pruned_loss=1.617, over 1511130.08 frames. ], batch size: 59, lr: 3.00e-02, grad_scale: 0.25 +2023-03-31 18:55:11,630 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:55:17,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.479e+01 1.678e+02 3.237e+02 1.260e+03 8.630e+04, threshold=6.475e+02, percent-clipped=0.0 +2023-03-31 18:55:26,390 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 18:56:20,071 INFO [train.py:903] (2/4) Epoch 1, batch 150, loss[loss=1.005, simple_loss=0.8573, pruned_loss=1.075, over 19325.00 frames. ], tot_loss[loss=1.391, simple_loss=1.223, pruned_loss=1.423, over 2018075.65 frames. ], batch size: 66, lr: 3.25e-02, grad_scale: 0.25 +2023-03-31 18:57:32,268 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=3.37 vs. limit=2.0 +2023-03-31 18:57:32,425 INFO [train.py:903] (2/4) Epoch 1, batch 200, loss[loss=0.9877, simple_loss=0.8347, pruned_loss=1.024, over 19513.00 frames. ], tot_loss[loss=1.251, simple_loss=1.09, pruned_loss=1.286, over 2414520.04 frames. ], batch size: 54, lr: 3.50e-02, grad_scale: 0.5 +2023-03-31 18:57:32,455 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 18:57:39,438 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.067e+01 1.186e+02 1.653e+02 2.090e+02 5.158e+02, threshold=3.307e+02, percent-clipped=0.0 +2023-03-31 18:57:50,835 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-31 18:58:43,194 INFO [train.py:903] (2/4) Epoch 1, batch 250, loss[loss=0.8567, simple_loss=0.7191, pruned_loss=0.8582, over 19753.00 frames. ], tot_loss[loss=1.161, simple_loss=1.004, pruned_loss=1.186, over 2727174.34 frames. ], batch size: 45, lr: 3.75e-02, grad_scale: 0.5 +2023-03-31 18:59:28,855 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=5.57 vs. limit=5.0 +2023-03-31 18:59:51,891 INFO [train.py:903] (2/4) Epoch 1, batch 300, loss[loss=0.9876, simple_loss=0.8272, pruned_loss=0.9455, over 19286.00 frames. ], tot_loss[loss=1.095, simple_loss=0.9395, pruned_loss=1.106, over 2980083.04 frames. ], batch size: 66, lr: 4.00e-02, grad_scale: 1.0 +2023-03-31 18:59:56,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.717e+01 1.166e+02 1.521e+02 1.991e+02 3.277e+02, threshold=3.043e+02, percent-clipped=0.0 +2023-03-31 18:59:58,417 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=306.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:00:09,454 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=314.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:00:58,533 INFO [train.py:903] (2/4) Epoch 1, batch 350, loss[loss=0.936, simple_loss=0.776, pruned_loss=0.8844, over 19715.00 frames. ], tot_loss[loss=1.052, simple_loss=0.8961, pruned_loss=1.049, over 3174062.38 frames. ], batch size: 51, lr: 4.25e-02, grad_scale: 1.0 +2023-03-31 19:01:05,476 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 19:02:08,274 INFO [train.py:903] (2/4) Epoch 1, batch 400, loss[loss=0.9838, simple_loss=0.8161, pruned_loss=0.8895, over 17289.00 frames. ], tot_loss[loss=1.02, simple_loss=0.8627, pruned_loss=1.001, over 3316676.94 frames. ], batch size: 101, lr: 4.50e-02, grad_scale: 2.0 +2023-03-31 19:02:13,397 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.278e+02 1.546e+02 1.978e+02 5.474e+02, threshold=3.091e+02, percent-clipped=7.0 +2023-03-31 19:02:13,628 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=405.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:02:33,524 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=421.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:03:05,094 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=445.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:03:12,895 INFO [train.py:903] (2/4) Epoch 1, batch 450, loss[loss=0.9453, simple_loss=0.778, pruned_loss=0.8417, over 19668.00 frames. ], tot_loss[loss=0.9976, simple_loss=0.8391, pruned_loss=0.9614, over 3418967.75 frames. ], batch size: 55, lr: 4.75e-02, grad_scale: 2.0 +2023-03-31 19:03:32,723 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=3.66 vs. limit=2.0 +2023-03-31 19:03:35,144 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=7.46 vs. limit=5.0 +2023-03-31 19:03:49,658 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 19:03:51,738 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 19:04:19,399 INFO [train.py:903] (2/4) Epoch 1, batch 500, loss[loss=0.8066, simple_loss=0.668, pruned_loss=0.6829, over 19288.00 frames. ], tot_loss[loss=0.9763, simple_loss=0.8179, pruned_loss=0.9192, over 3515209.12 frames. ], batch size: 44, lr: 4.99e-02, grad_scale: 2.0 +2023-03-31 19:04:25,198 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.386e+02 1.860e+02 2.529e+02 4.736e+02, threshold=3.719e+02, percent-clipped=12.0 +2023-03-31 19:05:27,837 INFO [train.py:903] (2/4) Epoch 1, batch 550, loss[loss=0.8116, simple_loss=0.6782, pruned_loss=0.6521, over 19750.00 frames. ], tot_loss[loss=0.9579, simple_loss=0.8009, pruned_loss=0.8776, over 3579241.31 frames. ], batch size: 45, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:05:41,529 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=560.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:06:04,533 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=580.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:12,417 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=586.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:32,546 INFO [train.py:903] (2/4) Epoch 1, batch 600, loss[loss=0.863, simple_loss=0.7296, pruned_loss=0.656, over 19548.00 frames. ], tot_loss[loss=0.9361, simple_loss=0.7829, pruned_loss=0.8322, over 3628949.87 frames. ], batch size: 56, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:06:36,891 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 2.910e+02 4.086e+02 6.136e+02 1.097e+03, threshold=8.173e+02, percent-clipped=60.0 +2023-03-31 19:06:40,839 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=608.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:07:11,104 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=6.79 vs. limit=5.0 +2023-03-31 19:07:11,462 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 19:07:38,083 INFO [train.py:903] (2/4) Epoch 1, batch 650, loss[loss=0.8219, simple_loss=0.6973, pruned_loss=0.6059, over 19669.00 frames. ], tot_loss[loss=0.9118, simple_loss=0.7638, pruned_loss=0.7851, over 3670913.90 frames. ], batch size: 58, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:07:47,306 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=658.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:13,449 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=677.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:08:41,834 INFO [train.py:903] (2/4) Epoch 1, batch 700, loss[loss=0.8355, simple_loss=0.7132, pruned_loss=0.5951, over 19746.00 frames. ], tot_loss[loss=0.884, simple_loss=0.7428, pruned_loss=0.7367, over 3714734.86 frames. ], batch size: 63, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:08:43,401 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=702.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:46,604 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.173e+02 6.580e+02 8.914e+02 3.039e+03, threshold=1.316e+03, percent-clipped=29.0 +2023-03-31 19:09:43,557 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=749.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:09:45,315 INFO [train.py:903] (2/4) Epoch 1, batch 750, loss[loss=0.6873, simple_loss=0.5921, pruned_loss=0.4712, over 19404.00 frames. ], tot_loss[loss=0.859, simple_loss=0.7244, pruned_loss=0.6933, over 3722236.80 frames. ], batch size: 48, lr: 4.97e-02, grad_scale: 2.0 +2023-03-31 19:10:14,513 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=773.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:10:48,976 INFO [train.py:903] (2/4) Epoch 1, batch 800, loss[loss=0.82, simple_loss=0.6949, pruned_loss=0.5738, over 19679.00 frames. ], tot_loss[loss=0.832, simple_loss=0.7041, pruned_loss=0.6513, over 3745739.45 frames. ], batch size: 59, lr: 4.97e-02, grad_scale: 4.0 +2023-03-31 19:10:53,094 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.827e+02 7.991e+02 1.030e+03 2.888e+03, threshold=1.598e+03, percent-clipped=14.0 +2023-03-31 19:11:01,615 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 19:11:08,429 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=816.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:11:20,886 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2769, 2.3700, 1.8460, 3.1775, 1.7390, 3.6974, 3.5253, 3.3290], + device='cuda:2'), covar=tensor([0.0902, 0.1613, 0.2919, 0.1349, 0.2789, 0.1391, 0.1205, 0.1603], + device='cuda:2'), in_proj_covar=tensor([0.0037, 0.0045, 0.0051, 0.0038, 0.0052, 0.0038, 0.0037, 0.0040], + device='cuda:2'), out_proj_covar=tensor([2.4248e-05, 2.8229e-05, 3.4969e-05, 2.5514e-05, 3.4836e-05, 2.6374e-05, + 2.3830e-05, 2.6915e-05], device='cuda:2') +2023-03-31 19:11:40,326 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=841.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:11:52,165 INFO [train.py:903] (2/4) Epoch 1, batch 850, loss[loss=0.644, simple_loss=0.5622, pruned_loss=0.417, over 19474.00 frames. ], tot_loss[loss=0.8083, simple_loss=0.6867, pruned_loss=0.6139, over 3769073.37 frames. ], batch size: 49, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:10,425 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=864.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:12:12,292 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=865.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:12:43,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 19:12:54,612 INFO [train.py:903] (2/4) Epoch 1, batch 900, loss[loss=0.6381, simple_loss=0.5517, pruned_loss=0.4168, over 16491.00 frames. ], tot_loss[loss=0.7862, simple_loss=0.6703, pruned_loss=0.5808, over 3784559.25 frames. ], batch size: 36, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:59,597 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 6.072e+02 7.456e+02 9.579e+02 1.181e+04, threshold=1.491e+03, percent-clipped=3.0 +2023-03-31 19:13:21,567 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=924.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:13:30,350 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=930.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:13:53,696 INFO [train.py:903] (2/4) Epoch 1, batch 950, loss[loss=0.6959, simple_loss=0.6039, pruned_loss=0.4463, over 19516.00 frames. ], tot_loss[loss=0.7688, simple_loss=0.658, pruned_loss=0.5528, over 3801755.23 frames. ], batch size: 54, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:13:54,869 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 19:13:56,040 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=952.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:14:51,779 INFO [train.py:903] (2/4) Epoch 1, batch 1000, loss[loss=0.6924, simple_loss=0.6102, pruned_loss=0.4266, over 19591.00 frames. ], tot_loss[loss=0.7511, simple_loss=0.6452, pruned_loss=0.5267, over 3819545.91 frames. ], batch size: 61, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:14:56,988 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 5.980e+02 7.509e+02 1.052e+03 2.029e+03, threshold=1.502e+03, percent-clipped=4.0 +2023-03-31 19:15:22,071 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.19 vs. limit=2.0 +2023-03-31 19:15:25,850 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1029.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:38,955 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1039.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:41,752 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 19:15:45,057 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1045.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:52,665 INFO [train.py:903] (2/4) Epoch 1, batch 1050, loss[loss=0.7211, simple_loss=0.6258, pruned_loss=0.4533, over 19699.00 frames. ], tot_loss[loss=0.7349, simple_loss=0.6339, pruned_loss=0.5034, over 3806790.60 frames. ], batch size: 63, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:15:56,726 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1054.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:12,559 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1067.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:20,714 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 19:16:53,084 INFO [train.py:903] (2/4) Epoch 1, batch 1100, loss[loss=0.6462, simple_loss=0.5681, pruned_loss=0.3937, over 19511.00 frames. ], tot_loss[loss=0.717, simple_loss=0.621, pruned_loss=0.48, over 3815503.98 frames. ], batch size: 54, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:16:57,403 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.036e+02 7.117e+02 8.563e+02 1.068e+03 2.368e+03, threshold=1.713e+03, percent-clipped=4.0 +2023-03-31 19:17:08,305 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7311, 1.0686, 2.0455, 1.4811, 1.8340, 2.0314, 2.2892, 2.0064], + device='cuda:2'), covar=tensor([0.6994, 0.8534, 0.3809, 0.8253, 0.3312, 0.2441, 0.2198, 0.2818], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0078, 0.0068, 0.0090, 0.0069, 0.0057, 0.0067, 0.0058], + device='cuda:2'), out_proj_covar=tensor([5.3885e-05, 5.5279e-05, 4.1832e-05, 6.1413e-05, 4.1769e-05, 3.2531e-05, + 4.0243e-05, 3.4980e-05], device='cuda:2') +2023-03-31 19:17:14,919 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1120.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:17:44,304 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1145.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:17:51,774 INFO [train.py:903] (2/4) Epoch 1, batch 1150, loss[loss=0.621, simple_loss=0.55, pruned_loss=0.371, over 19776.00 frames. ], tot_loss[loss=0.7012, simple_loss=0.6095, pruned_loss=0.4601, over 3825428.99 frames. ], batch size: 54, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:18:12,984 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1171.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:18:14,108 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.97 vs. limit=5.0 +2023-03-31 19:18:47,614 INFO [train.py:903] (2/4) Epoch 1, batch 1200, loss[loss=0.6108, simple_loss=0.5562, pruned_loss=0.346, over 19656.00 frames. ], tot_loss[loss=0.6894, simple_loss=0.6007, pruned_loss=0.4446, over 3829565.66 frames. ], batch size: 53, lr: 4.93e-02, grad_scale: 8.0 +2023-03-31 19:18:52,223 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 7.433e+02 9.314e+02 1.239e+03 3.000e+03, threshold=1.863e+03, percent-clipped=16.0 +2023-03-31 19:18:56,116 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1209.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:19:16,410 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 19:19:42,388 INFO [train.py:903] (2/4) Epoch 1, batch 1250, loss[loss=0.5659, simple_loss=0.5087, pruned_loss=0.3262, over 19405.00 frames. ], tot_loss[loss=0.6715, simple_loss=0.5882, pruned_loss=0.4247, over 3830045.26 frames. ], batch size: 48, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:10,999 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0041, 1.2952, 2.8867, 2.0250, 2.6072, 3.2408, 3.5384, 3.5483], + device='cuda:2'), covar=tensor([0.5930, 0.6463, 0.2607, 0.5807, 0.1984, 0.1123, 0.0934, 0.1177], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0094, 0.0084, 0.0108, 0.0081, 0.0064, 0.0075, 0.0064], + device='cuda:2'), out_proj_covar=tensor([6.9173e-05, 6.6021e-05, 5.3078e-05, 7.2963e-05, 4.9873e-05, 3.6927e-05, + 4.3784e-05, 3.6574e-05], device='cuda:2') +2023-03-31 19:20:32,330 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1295.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:20:39,089 INFO [train.py:903] (2/4) Epoch 1, batch 1300, loss[loss=0.507, simple_loss=0.4626, pruned_loss=0.2843, over 19756.00 frames. ], tot_loss[loss=0.6585, simple_loss=0.579, pruned_loss=0.4097, over 3839139.99 frames. ], batch size: 45, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:39,491 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1301.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:20:43,731 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+02 7.699e+02 1.048e+03 1.379e+03 4.741e+03, threshold=2.097e+03, percent-clipped=13.0 +2023-03-31 19:21:00,029 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1320.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:03,680 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1323.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:04,895 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1324.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:06,789 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9675, 2.4534, 2.9932, 2.9439, 1.6383, 2.9575, 2.6931, 3.0591], + device='cuda:2'), covar=tensor([0.0499, 0.0786, 0.0402, 0.0426, 0.2236, 0.0485, 0.0678, 0.0437], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0059, 0.0050, 0.0048, 0.0089, 0.0050, 0.0058, 0.0057], + device='cuda:2'), out_proj_covar=tensor([2.9972e-05, 3.8308e-05, 2.8699e-05, 2.8905e-05, 5.9661e-05, 2.8463e-05, + 3.2832e-05, 3.0709e-05], device='cuda:2') +2023-03-31 19:21:06,852 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1326.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:33,448 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1348.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:35,932 INFO [train.py:903] (2/4) Epoch 1, batch 1350, loss[loss=0.5569, simple_loss=0.5051, pruned_loss=0.3141, over 19485.00 frames. ], tot_loss[loss=0.6496, simple_loss=0.573, pruned_loss=0.3982, over 3833463.92 frames. ], batch size: 49, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:04,700 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7012, 0.8867, 1.2242, 0.9463, 1.2510, 1.5526, 1.4883, 1.4235], + device='cuda:2'), covar=tensor([0.6480, 1.2272, 1.0975, 0.8718, 0.8504, 1.2197, 0.9071, 0.8062], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0181, 0.0185, 0.0143, 0.0167, 0.0184, 0.0161, 0.0143], + device='cuda:2'), out_proj_covar=tensor([9.1899e-05, 1.2023e-04, 1.2509e-04, 9.3129e-05, 1.0782e-04, 1.2267e-04, + 1.0814e-04, 9.3411e-05], device='cuda:2') +2023-03-31 19:22:31,192 INFO [train.py:903] (2/4) Epoch 1, batch 1400, loss[loss=0.4852, simple_loss=0.4524, pruned_loss=0.2619, over 18267.00 frames. ], tot_loss[loss=0.6357, simple_loss=0.5638, pruned_loss=0.3834, over 3837786.13 frames. ], batch size: 40, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:35,197 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.777e+02 7.620e+02 9.515e+02 1.230e+03 4.278e+03, threshold=1.903e+03, percent-clipped=3.0 +2023-03-31 19:23:09,467 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5595, 1.3356, 1.2620, 2.0215, 2.4070, 1.7210, 2.3980, 2.3702], + device='cuda:2'), covar=tensor([0.1062, 0.4111, 0.6142, 0.2159, 0.1401, 0.5558, 0.1318, 0.1413], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0110, 0.0142, 0.0092, 0.0108, 0.0165, 0.0102, 0.0088], + device='cuda:2'), out_proj_covar=tensor([4.7618e-05, 7.5816e-05, 9.8736e-05, 6.4463e-05, 6.5210e-05, 1.0831e-04, + 6.4613e-05, 5.9885e-05], device='cuda:2') +2023-03-31 19:23:24,312 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 19:23:25,237 INFO [train.py:903] (2/4) Epoch 1, batch 1450, loss[loss=0.6147, simple_loss=0.5582, pruned_loss=0.344, over 19522.00 frames. ], tot_loss[loss=0.6272, simple_loss=0.5581, pruned_loss=0.3734, over 3838407.53 frames. ], batch size: 54, lr: 4.90e-02, grad_scale: 8.0 +2023-03-31 19:23:26,443 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1452.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:24:18,021 INFO [train.py:903] (2/4) Epoch 1, batch 1500, loss[loss=0.4719, simple_loss=0.4397, pruned_loss=0.2542, over 19750.00 frames. ], tot_loss[loss=0.6198, simple_loss=0.5529, pruned_loss=0.365, over 3833522.75 frames. ], batch size: 46, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:24:23,064 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+02 9.104e+02 1.060e+03 1.370e+03 5.981e+03, threshold=2.119e+03, percent-clipped=12.0 +2023-03-31 19:24:35,375 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1515.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:25:14,420 INFO [train.py:903] (2/4) Epoch 1, batch 1550, loss[loss=0.553, simple_loss=0.5123, pruned_loss=0.2999, over 19787.00 frames. ], tot_loss[loss=0.6104, simple_loss=0.5471, pruned_loss=0.355, over 3830352.48 frames. ], batch size: 54, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:25:43,581 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1580.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:25:47,332 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1584.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:05,887 INFO [train.py:903] (2/4) Epoch 1, batch 1600, loss[loss=0.602, simple_loss=0.5572, pruned_loss=0.3264, over 18862.00 frames. ], tot_loss[loss=0.6059, simple_loss=0.5448, pruned_loss=0.3488, over 3827494.85 frames. ], batch size: 74, lr: 4.88e-02, grad_scale: 8.0 +2023-03-31 19:26:10,815 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.811e+02 9.198e+02 1.152e+03 1.497e+03 2.578e+03, threshold=2.303e+03, percent-clipped=3.0 +2023-03-31 19:26:11,224 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1605.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:25,447 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 19:26:31,700 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1418, 1.0615, 1.0945, 1.0273, 0.7163, 1.0832, 0.4710, 0.7475], + device='cuda:2'), covar=tensor([0.1080, 0.1513, 0.1254, 0.1992, 0.2430, 0.1487, 0.3926, 0.2261], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0088, 0.0102, 0.0112, 0.0115, 0.0103, 0.0159, 0.0118], + device='cuda:2'), out_proj_covar=tensor([6.5097e-05, 5.3903e-05, 6.5712e-05, 8.0639e-05, 8.2878e-05, 6.9879e-05, + 1.1592e-04, 8.4782e-05], device='cuda:2') +2023-03-31 19:26:35,475 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1629.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:36,300 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1630.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:57,904 INFO [train.py:903] (2/4) Epoch 1, batch 1650, loss[loss=0.5321, simple_loss=0.5036, pruned_loss=0.2803, over 19664.00 frames. ], tot_loss[loss=0.5976, simple_loss=0.5395, pruned_loss=0.3407, over 3830616.98 frames. ], batch size: 55, lr: 4.87e-02, grad_scale: 8.0 +2023-03-31 19:27:41,135 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6369, 2.4668, 2.6108, 2.6606, 1.1459, 2.5841, 2.3791, 2.6873], + device='cuda:2'), covar=tensor([0.0455, 0.0744, 0.0536, 0.0383, 0.2919, 0.0496, 0.0673, 0.0584], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0093, 0.0076, 0.0068, 0.0152, 0.0072, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([4.2272e-05, 6.1266e-05, 4.2275e-05, 4.0518e-05, 9.8463e-05, 4.1459e-05, + 5.2153e-05, 5.0454e-05], device='cuda:2') +2023-03-31 19:27:52,328 INFO [train.py:903] (2/4) Epoch 1, batch 1700, loss[loss=0.5081, simple_loss=0.472, pruned_loss=0.2737, over 19733.00 frames. ], tot_loss[loss=0.5907, simple_loss=0.5353, pruned_loss=0.3336, over 3827728.39 frames. ], batch size: 45, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:27:56,191 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.719e+02 9.402e+02 1.223e+03 1.535e+03 2.582e+03, threshold=2.447e+03, percent-clipped=3.0 +2023-03-31 19:28:26,611 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 19:28:46,833 INFO [train.py:903] (2/4) Epoch 1, batch 1750, loss[loss=0.549, simple_loss=0.5156, pruned_loss=0.2918, over 19552.00 frames. ], tot_loss[loss=0.5824, simple_loss=0.5304, pruned_loss=0.3258, over 3828792.70 frames. ], batch size: 56, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:29:37,338 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1796.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:29:43,441 INFO [train.py:903] (2/4) Epoch 1, batch 1800, loss[loss=0.5761, simple_loss=0.5438, pruned_loss=0.3044, over 19349.00 frames. ], tot_loss[loss=0.5787, simple_loss=0.5284, pruned_loss=0.3215, over 3841748.21 frames. ], batch size: 70, lr: 4.85e-02, grad_scale: 8.0 +2023-03-31 19:29:47,624 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.927e+02 9.266e+02 1.209e+03 1.539e+03 2.564e+03, threshold=2.418e+03, percent-clipped=2.0 +2023-03-31 19:30:36,172 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 19:30:40,566 INFO [train.py:903] (2/4) Epoch 1, batch 1850, loss[loss=0.5536, simple_loss=0.5168, pruned_loss=0.2958, over 17501.00 frames. ], tot_loss[loss=0.5708, simple_loss=0.5239, pruned_loss=0.3146, over 3839553.39 frames. ], batch size: 101, lr: 4.84e-02, grad_scale: 8.0 +2023-03-31 19:30:45,859 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1856.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:01,375 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4834, 1.0880, 1.0589, 1.1426, 1.5492, 1.8158, 1.5755, 1.3436], + device='cuda:2'), covar=tensor([0.2710, 0.4699, 0.6888, 0.4055, 0.7292, 0.2749, 0.4086, 0.3490], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0162, 0.0227, 0.0158, 0.0253, 0.0166, 0.0199, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-31 19:31:08,083 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1875.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:11,943 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 19:31:20,283 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1886.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:22,189 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1888.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:36,179 INFO [train.py:903] (2/4) Epoch 1, batch 1900, loss[loss=0.4856, simple_loss=0.461, pruned_loss=0.255, over 19763.00 frames. ], tot_loss[loss=0.5639, simple_loss=0.5199, pruned_loss=0.3084, over 3843968.84 frames. ], batch size: 47, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:31:40,294 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 9.078e+02 1.104e+03 1.499e+03 2.754e+03, threshold=2.207e+03, percent-clipped=2.0 +2023-03-31 19:31:47,483 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1911.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:31:47,502 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1911.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:52,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 19:31:56,249 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 19:32:06,352 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:32:19,669 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 19:32:31,504 INFO [train.py:903] (2/4) Epoch 1, batch 1950, loss[loss=0.5823, simple_loss=0.5373, pruned_loss=0.314, over 19353.00 frames. ], tot_loss[loss=0.56, simple_loss=0.5177, pruned_loss=0.3047, over 3848965.35 frames. ], batch size: 66, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:32:57,189 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1973.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:33:20,354 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4882, 1.7612, 1.7836, 2.5011, 3.1822, 1.6225, 2.9171, 3.2096], + device='cuda:2'), covar=tensor([0.0496, 0.2834, 0.5072, 0.2048, 0.0606, 0.5183, 0.0955, 0.0866], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0177, 0.0216, 0.0163, 0.0150, 0.0264, 0.0160, 0.0137], + device='cuda:2'), out_proj_covar=tensor([6.8431e-05, 1.2343e-04, 1.5073e-04, 1.2073e-04, 9.5912e-05, 1.7239e-04, + 1.1198e-04, 9.4123e-05], device='cuda:2') +2023-03-31 19:33:29,177 INFO [train.py:903] (2/4) Epoch 1, batch 2000, loss[loss=0.4777, simple_loss=0.459, pruned_loss=0.2483, over 19601.00 frames. ], tot_loss[loss=0.5547, simple_loss=0.5146, pruned_loss=0.3001, over 3855620.80 frames. ], batch size: 50, lr: 4.82e-02, grad_scale: 8.0 +2023-03-31 19:33:30,707 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9635, 1.5340, 1.5945, 2.2458, 2.6685, 1.6179, 2.6175, 2.6220], + device='cuda:2'), covar=tensor([0.0597, 0.3621, 0.5404, 0.2286, 0.0841, 0.5002, 0.1074, 0.1143], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0177, 0.0216, 0.0164, 0.0149, 0.0262, 0.0159, 0.0138], + device='cuda:2'), out_proj_covar=tensor([6.7696e-05, 1.2358e-04, 1.5077e-04, 1.2122e-04, 9.5069e-05, 1.7142e-04, + 1.1147e-04, 9.4524e-05], device='cuda:2') +2023-03-31 19:33:33,531 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.917e+02 1.007e+03 1.260e+03 1.703e+03 3.255e+03, threshold=2.521e+03, percent-clipped=11.0 +2023-03-31 19:33:42,755 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2682, 1.4799, 1.8844, 1.6919, 2.3271, 2.7993, 2.4662, 1.9901], + device='cuda:2'), covar=tensor([0.3309, 0.2066, 0.3453, 0.3157, 0.2101, 0.0698, 0.1508, 0.2767], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0072, 0.0092, 0.0102, 0.0102, 0.0052, 0.0078, 0.0103], + device='cuda:2'), out_proj_covar=tensor([6.0461e-05, 4.6700e-05, 6.1326e-05, 7.0817e-05, 7.0385e-05, 2.9950e-05, + 5.3822e-05, 6.8071e-05], device='cuda:2') +2023-03-31 19:34:19,000 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2043.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:34:25,204 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 19:34:27,588 INFO [train.py:903] (2/4) Epoch 1, batch 2050, loss[loss=0.4503, simple_loss=0.4384, pruned_loss=0.2311, over 19766.00 frames. ], tot_loss[loss=0.5438, simple_loss=0.5082, pruned_loss=0.2918, over 3852003.97 frames. ], batch size: 45, lr: 4.81e-02, grad_scale: 16.0 +2023-03-31 19:34:43,696 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 19:34:43,738 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 19:35:06,986 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 19:35:12,726 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2088.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:35:27,378 INFO [train.py:903] (2/4) Epoch 1, batch 2100, loss[loss=0.5703, simple_loss=0.5285, pruned_loss=0.3061, over 19661.00 frames. ], tot_loss[loss=0.5349, simple_loss=0.5032, pruned_loss=0.285, over 3847086.64 frames. ], batch size: 58, lr: 4.80e-02, grad_scale: 16.0 +2023-03-31 19:35:31,666 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+02 9.211e+02 1.091e+03 1.524e+03 2.851e+03, threshold=2.182e+03, percent-clipped=6.0 +2023-03-31 19:35:56,712 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 19:36:17,086 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 19:36:24,972 INFO [train.py:903] (2/4) Epoch 1, batch 2150, loss[loss=0.4561, simple_loss=0.4437, pruned_loss=0.2342, over 19761.00 frames. ], tot_loss[loss=0.5294, simple_loss=0.5005, pruned_loss=0.2804, over 3840716.90 frames. ], batch size: 48, lr: 4.79e-02, grad_scale: 16.0 +2023-03-31 19:36:45,568 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2167.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:37:13,923 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2192.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:37:24,850 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:37:25,807 INFO [train.py:903] (2/4) Epoch 1, batch 2200, loss[loss=0.4017, simple_loss=0.4078, pruned_loss=0.1978, over 19745.00 frames. ], tot_loss[loss=0.5227, simple_loss=0.4963, pruned_loss=0.2755, over 3815499.08 frames. ], batch size: 48, lr: 4.78e-02, grad_scale: 16.0 +2023-03-31 19:37:31,694 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.347e+02 9.332e+02 1.145e+03 1.435e+03 3.303e+03, threshold=2.290e+03, percent-clipped=7.0 +2023-03-31 19:37:49,480 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2219.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:03,592 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:27,474 INFO [train.py:903] (2/4) Epoch 1, batch 2250, loss[loss=0.4361, simple_loss=0.4304, pruned_loss=0.2209, over 18219.00 frames. ], tot_loss[loss=0.5178, simple_loss=0.4937, pruned_loss=0.2718, over 3801256.49 frames. ], batch size: 40, lr: 4.77e-02, grad_scale: 16.0 +2023-03-31 19:39:24,145 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2299.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:39:25,915 INFO [train.py:903] (2/4) Epoch 1, batch 2300, loss[loss=0.5205, simple_loss=0.5068, pruned_loss=0.2671, over 19664.00 frames. ], tot_loss[loss=0.5157, simple_loss=0.4936, pruned_loss=0.2696, over 3806135.22 frames. ], batch size: 59, lr: 4.77e-02, grad_scale: 8.0 +2023-03-31 19:39:31,322 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+02 9.458e+02 1.205e+03 1.557e+03 3.326e+03, threshold=2.410e+03, percent-clipped=10.0 +2023-03-31 19:39:39,114 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 19:39:41,702 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2315.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:39:54,006 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2324.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:40:04,960 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2334.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:17,395 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2344.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:40:20,659 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2347.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:24,834 INFO [train.py:903] (2/4) Epoch 1, batch 2350, loss[loss=0.5108, simple_loss=0.4959, pruned_loss=0.2629, over 19685.00 frames. ], tot_loss[loss=0.5105, simple_loss=0.4902, pruned_loss=0.2658, over 3819644.63 frames. ], batch size: 60, lr: 4.76e-02, grad_scale: 8.0 +2023-03-31 19:40:33,270 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2358.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:40:48,352 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2369.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:41:07,216 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 19:41:23,402 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 19:41:26,494 INFO [train.py:903] (2/4) Epoch 1, batch 2400, loss[loss=0.5064, simple_loss=0.5041, pruned_loss=0.2544, over 19707.00 frames. ], tot_loss[loss=0.5055, simple_loss=0.4873, pruned_loss=0.2622, over 3804940.19 frames. ], batch size: 59, lr: 4.75e-02, grad_scale: 8.0 +2023-03-31 19:41:33,163 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+02 9.458e+02 1.226e+03 1.613e+03 2.603e+03, threshold=2.451e+03, percent-clipped=4.0 +2023-03-31 19:42:26,034 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-31 19:42:26,146 INFO [train.py:903] (2/4) Epoch 1, batch 2450, loss[loss=0.4126, simple_loss=0.4193, pruned_loss=0.2029, over 19757.00 frames. ], tot_loss[loss=0.4999, simple_loss=0.4843, pruned_loss=0.2581, over 3822675.80 frames. ], batch size: 46, lr: 4.74e-02, grad_scale: 8.0 +2023-03-31 19:43:24,719 INFO [train.py:903] (2/4) Epoch 1, batch 2500, loss[loss=0.4776, simple_loss=0.4648, pruned_loss=0.2452, over 19841.00 frames. ], tot_loss[loss=0.4988, simple_loss=0.4843, pruned_loss=0.2569, over 3799727.69 frames. ], batch size: 52, lr: 4.73e-02, grad_scale: 8.0 +2023-03-31 19:43:30,996 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.049e+02 1.082e+03 1.390e+03 1.742e+03 4.873e+03, threshold=2.779e+03, percent-clipped=5.0 +2023-03-31 19:43:48,363 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1464, 1.4312, 1.8812, 1.6780, 2.0276, 2.2488, 2.2421, 2.1165], + device='cuda:2'), covar=tensor([0.0990, 0.2288, 0.1723, 0.1645, 0.2843, 0.1212, 0.1788, 0.1493], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0114, 0.0137, 0.0110, 0.0161, 0.0107, 0.0119, 0.0111], + device='cuda:2'), out_proj_covar=tensor([7.0435e-05, 8.0342e-05, 9.0913e-05, 7.7479e-05, 1.0959e-04, 7.3970e-05, + 7.8618e-05, 7.4883e-05], device='cuda:2') +2023-03-31 19:44:22,065 INFO [train.py:903] (2/4) Epoch 1, batch 2550, loss[loss=0.4618, simple_loss=0.465, pruned_loss=0.2293, over 19340.00 frames. ], tot_loss[loss=0.495, simple_loss=0.4819, pruned_loss=0.2542, over 3809671.17 frames. ], batch size: 66, lr: 4.72e-02, grad_scale: 8.0 +2023-03-31 19:44:47,195 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2571.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:09,115 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2590.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:14,140 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 19:45:15,646 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2596.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:21,675 INFO [train.py:903] (2/4) Epoch 1, batch 2600, loss[loss=0.5133, simple_loss=0.5113, pruned_loss=0.2577, over 19666.00 frames. ], tot_loss[loss=0.4887, simple_loss=0.4781, pruned_loss=0.2498, over 3815081.89 frames. ], batch size: 60, lr: 4.71e-02, grad_scale: 8.0 +2023-03-31 19:45:24,429 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2603.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:28,254 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+02 9.154e+02 1.259e+03 1.710e+03 2.682e+03, threshold=2.519e+03, percent-clipped=0.0 +2023-03-31 19:45:39,468 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2615.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:55,221 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2628.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:46:22,939 INFO [train.py:903] (2/4) Epoch 1, batch 2650, loss[loss=0.4149, simple_loss=0.4219, pruned_loss=0.204, over 19399.00 frames. ], tot_loss[loss=0.4856, simple_loss=0.4769, pruned_loss=0.2473, over 3816601.79 frames. ], batch size: 48, lr: 4.70e-02, grad_scale: 8.0 +2023-03-31 19:46:39,355 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 19:46:50,667 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8717, 1.7806, 1.9679, 1.6785, 0.9016, 1.4822, 0.7017, 1.6981], + device='cuda:2'), covar=tensor([0.0909, 0.0516, 0.0676, 0.1277, 0.1613, 0.1351, 0.2752, 0.1218], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0097, 0.0122, 0.0152, 0.0146, 0.0147, 0.0178, 0.0158], + device='cuda:2'), out_proj_covar=tensor([8.0411e-05, 6.8032e-05, 8.6877e-05, 1.1046e-04, 1.0632e-04, 1.0467e-04, + 1.2978e-04, 1.2102e-04], device='cuda:2') +2023-03-31 19:47:20,114 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2151, 1.3050, 1.1282, 0.9663, 0.8618, 1.1337, 0.3663, 0.8089], + device='cuda:2'), covar=tensor([0.0763, 0.0492, 0.0759, 0.1255, 0.1157, 0.0961, 0.2055, 0.1244], + device='cuda:2'), in_proj_covar=tensor([0.0113, 0.0100, 0.0126, 0.0159, 0.0147, 0.0150, 0.0183, 0.0162], + device='cuda:2'), out_proj_covar=tensor([8.2924e-05, 7.0299e-05, 9.0033e-05, 1.1556e-04, 1.0746e-04, 1.0649e-04, + 1.3334e-04, 1.2418e-04], device='cuda:2') +2023-03-31 19:47:23,186 INFO [train.py:903] (2/4) Epoch 1, batch 2700, loss[loss=0.4454, simple_loss=0.4572, pruned_loss=0.2168, over 19760.00 frames. ], tot_loss[loss=0.4808, simple_loss=0.4737, pruned_loss=0.2441, over 3810336.97 frames. ], batch size: 54, lr: 4.69e-02, grad_scale: 8.0 +2023-03-31 19:47:24,584 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2702.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:47:25,689 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2703.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:47:29,732 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+02 8.490e+02 1.133e+03 1.436e+03 3.154e+03, threshold=2.267e+03, percent-clipped=3.0 +2023-03-31 19:48:24,806 INFO [train.py:903] (2/4) Epoch 1, batch 2750, loss[loss=0.455, simple_loss=0.4671, pruned_loss=0.2215, over 19490.00 frames. ], tot_loss[loss=0.477, simple_loss=0.471, pruned_loss=0.2415, over 3810913.10 frames. ], batch size: 64, lr: 4.68e-02, grad_scale: 8.0 +2023-03-31 19:49:25,713 INFO [train.py:903] (2/4) Epoch 1, batch 2800, loss[loss=0.4618, simple_loss=0.4625, pruned_loss=0.2306, over 19783.00 frames. ], tot_loss[loss=0.4727, simple_loss=0.4685, pruned_loss=0.2385, over 3829804.11 frames. ], batch size: 54, lr: 4.67e-02, grad_scale: 8.0 +2023-03-31 19:49:31,053 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 1.002e+03 1.265e+03 1.511e+03 4.462e+03, threshold=2.529e+03, percent-clipped=7.0 +2023-03-31 19:49:45,686 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2817.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:50:15,472 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2842.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:50:26,211 INFO [train.py:903] (2/4) Epoch 1, batch 2850, loss[loss=0.4077, simple_loss=0.4274, pruned_loss=0.194, over 19862.00 frames. ], tot_loss[loss=0.4712, simple_loss=0.4673, pruned_loss=0.2376, over 3833660.55 frames. ], batch size: 52, lr: 4.66e-02, grad_scale: 8.0 +2023-03-31 19:51:22,270 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 19:51:25,263 INFO [train.py:903] (2/4) Epoch 1, batch 2900, loss[loss=0.4615, simple_loss=0.4653, pruned_loss=0.2289, over 19656.00 frames. ], tot_loss[loss=0.4725, simple_loss=0.4689, pruned_loss=0.238, over 3824694.58 frames. ], batch size: 55, lr: 4.65e-02, grad_scale: 8.0 +2023-03-31 19:51:30,478 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.237e+02 1.045e+03 1.349e+03 1.754e+03 3.463e+03, threshold=2.699e+03, percent-clipped=4.0 +2023-03-31 19:52:05,449 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9147, 2.7669, 3.1879, 2.4015, 2.4249, 1.1448, 1.2420, 1.9265], + device='cuda:2'), covar=tensor([0.2736, 0.0833, 0.0359, 0.1177, 0.2354, 0.1558, 0.2882, 0.1558], + device='cuda:2'), in_proj_covar=tensor([0.0198, 0.0124, 0.0114, 0.0145, 0.0127, 0.0168, 0.0194, 0.0167], + device='cuda:2'), out_proj_covar=tensor([1.4811e-04, 9.5141e-05, 8.8130e-05, 1.1317e-04, 1.0166e-04, 1.2746e-04, + 1.4279e-04, 1.2713e-04], device='cuda:2') +2023-03-31 19:52:25,021 INFO [train.py:903] (2/4) Epoch 1, batch 2950, loss[loss=0.46, simple_loss=0.4619, pruned_loss=0.2291, over 19484.00 frames. ], tot_loss[loss=0.4698, simple_loss=0.4676, pruned_loss=0.236, over 3835603.25 frames. ], batch size: 49, lr: 4.64e-02, grad_scale: 8.0 +2023-03-31 19:52:30,856 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2955.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:53:26,147 INFO [train.py:903] (2/4) Epoch 1, batch 3000, loss[loss=0.377, simple_loss=0.4062, pruned_loss=0.1739, over 19766.00 frames. ], tot_loss[loss=0.4642, simple_loss=0.4642, pruned_loss=0.2321, over 3837964.53 frames. ], batch size: 51, lr: 4.63e-02, grad_scale: 8.0 +2023-03-31 19:53:26,148 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 19:53:38,717 INFO [train.py:937] (2/4) Epoch 1, validation: loss=0.3995, simple_loss=0.4801, pruned_loss=0.1594, over 944034.00 frames. +2023-03-31 19:53:38,718 INFO [train.py:938] (2/4) Maximum memory allocated so far is 16393MB +2023-03-31 19:53:43,197 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 19:53:45,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 9.060e+02 1.151e+03 1.550e+03 2.691e+03, threshold=2.303e+03, percent-clipped=0.0 +2023-03-31 19:53:46,123 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1148, 0.9082, 0.9494, 1.0354, 1.0454, 1.3599, 1.2537, 1.1188], + device='cuda:2'), covar=tensor([0.1199, 0.2321, 0.2130, 0.1791, 0.2948, 0.1025, 0.1538, 0.1461], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0152, 0.0164, 0.0139, 0.0198, 0.0127, 0.0139, 0.0127], + device='cuda:2'), out_proj_covar=tensor([8.2249e-05, 1.0954e-04, 1.1313e-04, 1.0064e-04, 1.4105e-04, 9.0390e-05, + 9.6857e-05, 9.0224e-05], device='cuda:2') +2023-03-31 19:54:23,259 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3037.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:35,534 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3047.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:40,056 INFO [train.py:903] (2/4) Epoch 1, batch 3050, loss[loss=0.4012, simple_loss=0.4097, pruned_loss=0.1963, over 19477.00 frames. ], tot_loss[loss=0.4616, simple_loss=0.4627, pruned_loss=0.2303, over 3828578.26 frames. ], batch size: 49, lr: 4.62e-02, grad_scale: 8.0 +2023-03-31 19:54:58,664 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7965, 1.5819, 1.8897, 1.1636, 2.4932, 2.8615, 2.7795, 2.7508], + device='cuda:2'), covar=tensor([0.2287, 0.2879, 0.2019, 0.3628, 0.0952, 0.0250, 0.0352, 0.0408], + device='cuda:2'), in_proj_covar=tensor([0.0239, 0.0206, 0.0194, 0.0250, 0.0177, 0.0099, 0.0123, 0.0102], + device='cuda:2'), out_proj_covar=tensor([1.6562e-04, 1.4094e-04, 1.3203e-04, 1.7038e-04, 1.3806e-04, 6.3404e-05, + 8.4047e-05, 7.0720e-05], device='cuda:2') +2023-03-31 19:55:07,342 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3073.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:55:36,299 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3098.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:55:39,212 INFO [train.py:903] (2/4) Epoch 1, batch 3100, loss[loss=0.5437, simple_loss=0.5026, pruned_loss=0.2924, over 19284.00 frames. ], tot_loss[loss=0.4608, simple_loss=0.4618, pruned_loss=0.2299, over 3836415.20 frames. ], batch size: 66, lr: 4.61e-02, grad_scale: 8.0 +2023-03-31 19:55:45,834 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+02 1.021e+03 1.362e+03 1.815e+03 5.785e+03, threshold=2.723e+03, percent-clipped=14.0 +2023-03-31 19:56:02,185 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0884, 3.9305, 5.4714, 5.1239, 1.7030, 5.0036, 4.5766, 5.1064], + device='cuda:2'), covar=tensor([0.0232, 0.0562, 0.0253, 0.0185, 0.3929, 0.0250, 0.0476, 0.0530], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0161, 0.0185, 0.0125, 0.0292, 0.0112, 0.0157, 0.0167], + device='cuda:2'), out_proj_covar=tensor([8.9250e-05, 1.1298e-04, 1.1716e-04, 7.9339e-05, 1.6873e-04, 7.2447e-05, + 1.0551e-04, 1.0561e-04], device='cuda:2') +2023-03-31 19:56:18,450 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-31 19:56:28,166 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0229, 2.7253, 2.5506, 2.6624, 1.4494, 1.8669, 0.8822, 2.2545], + device='cuda:2'), covar=tensor([0.0516, 0.0373, 0.0420, 0.0515, 0.0995, 0.0981, 0.1593, 0.0922], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0087, 0.0099, 0.0122, 0.0128, 0.0128, 0.0145, 0.0141], + device='cuda:2'), out_proj_covar=tensor([7.1803e-05, 6.1938e-05, 7.3870e-05, 9.2406e-05, 9.4095e-05, 9.4394e-05, + 1.0644e-04, 1.0601e-04], device='cuda:2') +2023-03-31 19:56:41,856 INFO [train.py:903] (2/4) Epoch 1, batch 3150, loss[loss=0.4289, simple_loss=0.4351, pruned_loss=0.2114, over 19419.00 frames. ], tot_loss[loss=0.46, simple_loss=0.4617, pruned_loss=0.2292, over 3833489.97 frames. ], batch size: 48, lr: 4.60e-02, grad_scale: 8.0 +2023-03-31 19:56:54,515 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3162.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:02,419 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1344, 4.1628, 5.5669, 5.2586, 1.7409, 5.2661, 4.7767, 5.1223], + device='cuda:2'), covar=tensor([0.0197, 0.0465, 0.0275, 0.0151, 0.3391, 0.0237, 0.0353, 0.0516], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0167, 0.0187, 0.0129, 0.0300, 0.0114, 0.0161, 0.0171], + device='cuda:2'), out_proj_covar=tensor([9.1117e-05, 1.1839e-04, 1.1912e-04, 8.1201e-05, 1.7229e-04, 7.5144e-05, + 1.0753e-04, 1.0919e-04], device='cuda:2') +2023-03-31 19:57:09,035 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 19:57:24,066 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3186.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:40,826 INFO [train.py:903] (2/4) Epoch 1, batch 3200, loss[loss=0.4942, simple_loss=0.4843, pruned_loss=0.252, over 19432.00 frames. ], tot_loss[loss=0.4567, simple_loss=0.4597, pruned_loss=0.2269, over 3834716.13 frames. ], batch size: 70, lr: 4.59e-02, grad_scale: 8.0 +2023-03-31 19:57:46,450 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+02 9.158e+02 1.127e+03 1.418e+03 2.574e+03, threshold=2.253e+03, percent-clipped=0.0 +2023-03-31 19:58:16,568 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2661, 3.8417, 2.1043, 3.4089, 1.3454, 3.9814, 3.3003, 3.6625], + device='cuda:2'), covar=tensor([0.0680, 0.1306, 0.3011, 0.0715, 0.3720, 0.0550, 0.0709, 0.0546], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0219, 0.0232, 0.0170, 0.0250, 0.0164, 0.0142, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-31 19:58:41,766 INFO [train.py:903] (2/4) Epoch 1, batch 3250, loss[loss=0.4399, simple_loss=0.4515, pruned_loss=0.2142, over 19540.00 frames. ], tot_loss[loss=0.4538, simple_loss=0.4576, pruned_loss=0.225, over 3833128.73 frames. ], batch size: 54, lr: 4.58e-02, grad_scale: 8.0 +2023-03-31 19:59:40,389 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3299.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:42,433 INFO [train.py:903] (2/4) Epoch 1, batch 3300, loss[loss=0.4664, simple_loss=0.468, pruned_loss=0.2324, over 18083.00 frames. ], tot_loss[loss=0.4523, simple_loss=0.4566, pruned_loss=0.224, over 3813968.05 frames. ], batch size: 83, lr: 4.57e-02, grad_scale: 8.0 +2023-03-31 19:59:42,827 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3301.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:48,780 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+02 9.991e+02 1.183e+03 1.562e+03 4.237e+03, threshold=2.366e+03, percent-clipped=7.0 +2023-03-31 19:59:48,817 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 19:59:49,132 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3306.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:59:56,251 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.16 vs. limit=2.0 +2023-03-31 20:00:43,740 INFO [train.py:903] (2/4) Epoch 1, batch 3350, loss[loss=0.4228, simple_loss=0.4456, pruned_loss=0.1999, over 19661.00 frames. ], tot_loss[loss=0.4482, simple_loss=0.4539, pruned_loss=0.2212, over 3824092.01 frames. ], batch size: 55, lr: 4.56e-02, grad_scale: 8.0 +2023-03-31 20:00:55,952 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-31 20:01:22,251 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3381.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:01:46,109 INFO [train.py:903] (2/4) Epoch 1, batch 3400, loss[loss=0.4641, simple_loss=0.4468, pruned_loss=0.2407, over 19785.00 frames. ], tot_loss[loss=0.4475, simple_loss=0.4534, pruned_loss=0.2208, over 3828305.76 frames. ], batch size: 47, lr: 4.55e-02, grad_scale: 8.0 +2023-03-31 20:01:52,900 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.525e+02 9.967e+02 1.253e+03 1.611e+03 4.007e+03, threshold=2.507e+03, percent-clipped=3.0 +2023-03-31 20:02:02,148 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3414.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:07,709 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3418.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:02:30,890 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1349, 1.0884, 0.9546, 1.0893, 1.1082, 0.7683, 0.3070, 1.1872], + device='cuda:2'), covar=tensor([0.1199, 0.0744, 0.0952, 0.0914, 0.1017, 0.1432, 0.2429, 0.1050], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0145, 0.0143, 0.0178, 0.0140, 0.0188, 0.0218, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 20:02:38,889 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3443.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:44,439 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0857, 1.3018, 1.7957, 1.4384, 2.6340, 3.0003, 2.9282, 2.0770], + device='cuda:2'), covar=tensor([0.2341, 0.1530, 0.1791, 0.2146, 0.0855, 0.0393, 0.0713, 0.1271], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0149, 0.0168, 0.0196, 0.0175, 0.0101, 0.0166, 0.0174], + device='cuda:2'), out_proj_covar=tensor([1.2696e-04, 1.0389e-04, 1.1941e-04, 1.3586e-04, 1.1722e-04, 7.3086e-05, + 1.0936e-04, 1.1439e-04], device='cuda:2') +2023-03-31 20:02:48,481 INFO [train.py:903] (2/4) Epoch 1, batch 3450, loss[loss=0.4405, simple_loss=0.4469, pruned_loss=0.2171, over 19363.00 frames. ], tot_loss[loss=0.4486, simple_loss=0.4543, pruned_loss=0.2214, over 3827841.66 frames. ], batch size: 66, lr: 4.54e-02, grad_scale: 8.0 +2023-03-31 20:02:50,760 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 20:03:44,001 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:03:50,183 INFO [train.py:903] (2/4) Epoch 1, batch 3500, loss[loss=0.4564, simple_loss=0.4661, pruned_loss=0.2233, over 19777.00 frames. ], tot_loss[loss=0.4463, simple_loss=0.453, pruned_loss=0.2199, over 3830858.49 frames. ], batch size: 56, lr: 4.53e-02, grad_scale: 8.0 +2023-03-31 20:03:56,687 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+02 9.703e+02 1.213e+03 1.703e+03 9.610e+03, threshold=2.427e+03, percent-clipped=9.0 +2023-03-31 20:04:52,053 INFO [train.py:903] (2/4) Epoch 1, batch 3550, loss[loss=0.525, simple_loss=0.5046, pruned_loss=0.2727, over 18742.00 frames. ], tot_loss[loss=0.4466, simple_loss=0.4534, pruned_loss=0.22, over 3825605.25 frames. ], batch size: 74, lr: 4.51e-02, grad_scale: 8.0 +2023-03-31 20:05:00,219 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3557.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:05:07,898 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3564.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:05:31,034 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3582.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:05:53,932 INFO [train.py:903] (2/4) Epoch 1, batch 3600, loss[loss=0.446, simple_loss=0.4459, pruned_loss=0.2231, over 19743.00 frames. ], tot_loss[loss=0.4581, simple_loss=0.4599, pruned_loss=0.2281, over 3816992.73 frames. ], batch size: 51, lr: 4.50e-02, grad_scale: 8.0 +2023-03-31 20:06:00,970 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+02 9.459e+02 1.417e+03 1.964e+03 2.103e+04, threshold=2.834e+03, percent-clipped=17.0 +2023-03-31 20:06:55,025 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3650.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:06:55,790 INFO [train.py:903] (2/4) Epoch 1, batch 3650, loss[loss=0.5862, simple_loss=0.5408, pruned_loss=0.3158, over 19769.00 frames. ], tot_loss[loss=0.4556, simple_loss=0.4588, pruned_loss=0.2262, over 3829362.03 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2023-03-31 20:07:18,776 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3670.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:38,914 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1829, 4.2043, 5.1838, 5.0651, 1.9690, 5.1458, 4.8912, 4.0240], + device='cuda:2'), covar=tensor([0.0366, 0.0739, 0.0834, 0.0460, 0.3545, 0.0527, 0.0507, 0.1461], + device='cuda:2'), in_proj_covar=tensor([0.0132, 0.0173, 0.0212, 0.0144, 0.0303, 0.0120, 0.0166, 0.0191], + device='cuda:2'), out_proj_covar=tensor([9.2639e-05, 1.2021e-04, 1.3833e-04, 8.8513e-05, 1.7273e-04, 8.0903e-05, + 1.1011e-04, 1.1775e-04], device='cuda:2') +2023-03-31 20:07:49,526 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3694.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:07:50,796 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3695.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:56,857 INFO [train.py:903] (2/4) Epoch 1, batch 3700, loss[loss=0.3967, simple_loss=0.4065, pruned_loss=0.1934, over 19800.00 frames. ], tot_loss[loss=0.4604, simple_loss=0.4619, pruned_loss=0.2295, over 3826777.41 frames. ], batch size: 48, lr: 4.48e-02, grad_scale: 8.0 +2023-03-31 20:08:05,842 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.255e+02 1.022e+03 1.666e+03 2.666e+03 1.441e+04, threshold=3.331e+03, percent-clipped=22.0 +2023-03-31 20:09:01,160 INFO [train.py:903] (2/4) Epoch 1, batch 3750, loss[loss=0.5101, simple_loss=0.5074, pruned_loss=0.2564, over 18221.00 frames. ], tot_loss[loss=0.4563, simple_loss=0.4594, pruned_loss=0.2266, over 3828410.35 frames. ], batch size: 83, lr: 4.47e-02, grad_scale: 8.0 +2023-03-31 20:09:02,788 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3752.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:09:19,416 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3765.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:09:34,313 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3777.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:09:53,436 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-31 20:09:57,311 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-31 20:10:05,367 INFO [train.py:903] (2/4) Epoch 1, batch 3800, loss[loss=0.4371, simple_loss=0.4562, pruned_loss=0.209, over 19781.00 frames. ], tot_loss[loss=0.4533, simple_loss=0.458, pruned_loss=0.2243, over 3826084.37 frames. ], batch size: 56, lr: 4.46e-02, grad_scale: 8.0 +2023-03-31 20:10:12,576 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.844e+02 1.035e+03 1.394e+03 1.973e+03 4.112e+03, threshold=2.788e+03, percent-clipped=1.0 +2023-03-31 20:10:41,865 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 20:11:08,746 INFO [train.py:903] (2/4) Epoch 1, batch 3850, loss[loss=0.4103, simple_loss=0.4131, pruned_loss=0.2037, over 19746.00 frames. ], tot_loss[loss=0.449, simple_loss=0.4554, pruned_loss=0.2213, over 3833564.05 frames. ], batch size: 45, lr: 4.45e-02, grad_scale: 8.0 +2023-03-31 20:11:34,475 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.12 vs. limit=5.0 +2023-03-31 20:11:38,919 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-31 20:12:01,779 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-31 20:12:13,123 INFO [train.py:903] (2/4) Epoch 1, batch 3900, loss[loss=0.3601, simple_loss=0.3739, pruned_loss=0.1732, over 19760.00 frames. ], tot_loss[loss=0.4466, simple_loss=0.4538, pruned_loss=0.2197, over 3834643.01 frames. ], batch size: 47, lr: 4.44e-02, grad_scale: 8.0 +2023-03-31 20:12:22,008 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.917e+02 1.152e+03 1.441e+03 1.935e+03 3.736e+03, threshold=2.883e+03, percent-clipped=2.0 +2023-03-31 20:12:23,347 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3908.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:13:08,711 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:13:18,199 INFO [train.py:903] (2/4) Epoch 1, batch 3950, loss[loss=0.4231, simple_loss=0.4495, pruned_loss=0.1983, over 19751.00 frames. ], tot_loss[loss=0.4476, simple_loss=0.4544, pruned_loss=0.2204, over 3839451.61 frames. ], batch size: 63, lr: 4.43e-02, grad_scale: 8.0 +2023-03-31 20:13:24,032 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 20:14:23,993 INFO [train.py:903] (2/4) Epoch 1, batch 4000, loss[loss=0.3796, simple_loss=0.4094, pruned_loss=0.1749, over 19604.00 frames. ], tot_loss[loss=0.4438, simple_loss=0.4523, pruned_loss=0.2177, over 3823467.54 frames. ], batch size: 50, lr: 4.42e-02, grad_scale: 8.0 +2023-03-31 20:14:30,110 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2593, 3.3683, 3.8779, 3.5859, 1.3258, 3.3282, 2.9984, 3.3938], + device='cuda:2'), covar=tensor([0.0350, 0.0488, 0.0412, 0.0301, 0.3404, 0.0252, 0.0592, 0.0864], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0183, 0.0221, 0.0155, 0.0319, 0.0120, 0.0173, 0.0218], + device='cuda:2'), out_proj_covar=tensor([9.9809e-05, 1.2731e-04, 1.4580e-04, 9.5067e-05, 1.8015e-04, 8.2802e-05, + 1.1568e-04, 1.3348e-04], device='cuda:2') +2023-03-31 20:14:30,940 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.942e+02 1.053e+03 1.358e+03 1.948e+03 9.883e+03, threshold=2.717e+03, percent-clipped=12.0 +2023-03-31 20:14:49,361 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4021.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:14:52,413 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4023.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:15:11,871 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4038.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:15:12,787 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 20:15:22,681 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4046.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:15:28,263 INFO [train.py:903] (2/4) Epoch 1, batch 4050, loss[loss=0.4553, simple_loss=0.4592, pruned_loss=0.2257, over 19348.00 frames. ], tot_loss[loss=0.4427, simple_loss=0.4514, pruned_loss=0.217, over 3825756.17 frames. ], batch size: 66, lr: 4.41e-02, grad_scale: 8.0 +2023-03-31 20:16:32,896 INFO [train.py:903] (2/4) Epoch 1, batch 4100, loss[loss=0.4221, simple_loss=0.4471, pruned_loss=0.1985, over 18861.00 frames. ], tot_loss[loss=0.4408, simple_loss=0.4498, pruned_loss=0.216, over 3819103.00 frames. ], batch size: 74, lr: 4.40e-02, grad_scale: 8.0 +2023-03-31 20:16:41,801 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.649e+02 1.198e+03 1.458e+03 1.833e+03 3.490e+03, threshold=2.915e+03, percent-clipped=3.0 +2023-03-31 20:17:08,493 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 20:17:10,114 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6918, 1.6639, 1.7439, 2.6860, 3.3279, 1.1743, 2.2011, 3.4478], + device='cuda:2'), covar=tensor([0.0266, 0.2344, 0.2638, 0.1527, 0.0353, 0.3043, 0.1065, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0165, 0.0262, 0.0256, 0.0266, 0.0192, 0.0324, 0.0225, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 20:17:23,012 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4385, 3.4521, 3.9805, 3.6950, 1.5404, 3.4802, 3.2111, 3.4092], + device='cuda:2'), covar=tensor([0.0289, 0.0467, 0.0346, 0.0254, 0.3148, 0.0296, 0.0407, 0.0822], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0196, 0.0235, 0.0165, 0.0341, 0.0129, 0.0181, 0.0240], + device='cuda:2'), out_proj_covar=tensor([1.0553e-04, 1.3585e-04, 1.5575e-04, 1.0221e-04, 1.9168e-04, 8.8294e-05, + 1.2070e-04, 1.4738e-04], device='cuda:2') +2023-03-31 20:17:32,422 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-31 20:17:38,859 INFO [train.py:903] (2/4) Epoch 1, batch 4150, loss[loss=0.357, simple_loss=0.3889, pruned_loss=0.1626, over 19408.00 frames. ], tot_loss[loss=0.4359, simple_loss=0.4466, pruned_loss=0.2125, over 3834235.04 frames. ], batch size: 48, lr: 4.39e-02, grad_scale: 8.0 +2023-03-31 20:17:41,642 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4153.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:17:49,794 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4159.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:18:32,089 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-31 20:18:44,279 INFO [train.py:903] (2/4) Epoch 1, batch 4200, loss[loss=0.3701, simple_loss=0.3991, pruned_loss=0.1706, over 19483.00 frames. ], tot_loss[loss=0.433, simple_loss=0.4447, pruned_loss=0.2106, over 3844259.98 frames. ], batch size: 49, lr: 4.38e-02, grad_scale: 8.0 +2023-03-31 20:18:46,632 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 20:18:51,468 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.919e+02 1.098e+03 1.489e+03 3.268e+03, threshold=2.196e+03, percent-clipped=3.0 +2023-03-31 20:19:19,915 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7151, 1.7033, 1.7007, 1.6627, 1.8034, 2.7322, 2.5218, 2.5927], + device='cuda:2'), covar=tensor([0.1062, 0.1931, 0.2064, 0.2254, 0.3435, 0.1619, 0.2423, 0.1200], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0284, 0.0293, 0.0305, 0.0389, 0.0270, 0.0338, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 20:19:47,746 INFO [train.py:903] (2/4) Epoch 1, batch 4250, loss[loss=0.4652, simple_loss=0.4674, pruned_loss=0.2315, over 19784.00 frames. ], tot_loss[loss=0.4309, simple_loss=0.4428, pruned_loss=0.2095, over 3845724.84 frames. ], batch size: 56, lr: 4.36e-02, grad_scale: 8.0 +2023-03-31 20:20:02,062 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 20:20:15,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 20:20:25,116 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4279.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:20:33,052 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4200, 4.1002, 2.4308, 3.5378, 1.4823, 3.9967, 3.4639, 3.5377], + device='cuda:2'), covar=tensor([0.0502, 0.0971, 0.2024, 0.0628, 0.2823, 0.0570, 0.0596, 0.0532], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0233, 0.0259, 0.0210, 0.0280, 0.0208, 0.0161, 0.0169], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-31 20:20:36,687 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:20:52,966 INFO [train.py:903] (2/4) Epoch 1, batch 4300, loss[loss=0.3547, simple_loss=0.3787, pruned_loss=0.1653, over 19785.00 frames. ], tot_loss[loss=0.432, simple_loss=0.4435, pruned_loss=0.2103, over 3817501.33 frames. ], batch size: 48, lr: 4.35e-02, grad_scale: 8.0 +2023-03-31 20:20:57,841 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4304.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:20:58,995 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7124, 2.0945, 2.1267, 2.6730, 4.2219, 1.3391, 2.3870, 4.0884], + device='cuda:2'), covar=tensor([0.0335, 0.2558, 0.2935, 0.1881, 0.0358, 0.3103, 0.1204, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0265, 0.0253, 0.0261, 0.0189, 0.0314, 0.0228, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 20:21:00,242 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4306.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:21:02,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.021e+02 1.171e+03 1.478e+03 2.100e+03 3.660e+03, threshold=2.957e+03, percent-clipped=20.0 +2023-03-31 20:21:46,522 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 20:21:59,424 INFO [train.py:903] (2/4) Epoch 1, batch 4350, loss[loss=0.3801, simple_loss=0.3937, pruned_loss=0.1832, over 19078.00 frames. ], tot_loss[loss=0.4257, simple_loss=0.439, pruned_loss=0.2062, over 3828619.42 frames. ], batch size: 42, lr: 4.34e-02, grad_scale: 8.0 +2023-03-31 20:22:32,329 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-31 20:23:03,133 INFO [train.py:903] (2/4) Epoch 1, batch 4400, loss[loss=0.3925, simple_loss=0.4019, pruned_loss=0.1916, over 18676.00 frames. ], tot_loss[loss=0.4226, simple_loss=0.4373, pruned_loss=0.204, over 3834004.93 frames. ], batch size: 41, lr: 4.33e-02, grad_scale: 8.0 +2023-03-31 20:23:05,860 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4403.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:23:11,487 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.454e+02 9.391e+02 1.114e+03 1.514e+03 3.216e+03, threshold=2.228e+03, percent-clipped=1.0 +2023-03-31 20:23:14,591 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4409.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:23:29,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 20:23:39,398 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 20:23:46,913 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4434.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:24:06,850 INFO [train.py:903] (2/4) Epoch 1, batch 4450, loss[loss=0.4754, simple_loss=0.4812, pruned_loss=0.2348, over 19565.00 frames. ], tot_loss[loss=0.4228, simple_loss=0.4372, pruned_loss=0.2042, over 3834313.65 frames. ], batch size: 61, lr: 4.32e-02, grad_scale: 8.0 +2023-03-31 20:25:09,800 INFO [train.py:903] (2/4) Epoch 1, batch 4500, loss[loss=0.3781, simple_loss=0.394, pruned_loss=0.1811, over 19374.00 frames. ], tot_loss[loss=0.4201, simple_loss=0.4348, pruned_loss=0.2027, over 3839758.64 frames. ], batch size: 47, lr: 4.31e-02, grad_scale: 8.0 +2023-03-31 20:25:12,505 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4503.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:25:18,131 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.500e+02 1.049e+03 1.357e+03 1.620e+03 3.962e+03, threshold=2.713e+03, percent-clipped=8.0 +2023-03-31 20:25:49,020 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0253, 3.7198, 4.5989, 4.2887, 1.5262, 4.1283, 3.6622, 3.8343], + device='cuda:2'), covar=tensor([0.0250, 0.0452, 0.0316, 0.0259, 0.3286, 0.0195, 0.0359, 0.0814], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0203, 0.0244, 0.0177, 0.0350, 0.0131, 0.0183, 0.0256], + device='cuda:2'), out_proj_covar=tensor([1.0843e-04, 1.3862e-04, 1.6195e-04, 1.1079e-04, 1.9516e-04, 8.5915e-05, + 1.1915e-04, 1.5516e-04], device='cuda:2') +2023-03-31 20:26:14,048 INFO [train.py:903] (2/4) Epoch 1, batch 4550, loss[loss=0.4401, simple_loss=0.4557, pruned_loss=0.2122, over 19312.00 frames. ], tot_loss[loss=0.4199, simple_loss=0.4348, pruned_loss=0.2025, over 3827633.83 frames. ], batch size: 66, lr: 4.30e-02, grad_scale: 8.0 +2023-03-31 20:26:22,696 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-03-31 20:26:24,216 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 20:26:47,297 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 20:27:16,034 INFO [train.py:903] (2/4) Epoch 1, batch 4600, loss[loss=0.4492, simple_loss=0.4531, pruned_loss=0.2227, over 19530.00 frames. ], tot_loss[loss=0.4196, simple_loss=0.4346, pruned_loss=0.2023, over 3832924.21 frames. ], batch size: 54, lr: 4.29e-02, grad_scale: 4.0 +2023-03-31 20:27:24,059 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.210e+02 9.691e+02 1.279e+03 1.723e+03 8.130e+03, threshold=2.557e+03, percent-clipped=7.0 +2023-03-31 20:27:36,647 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4618.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:27:36,900 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-31 20:28:17,145 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4650.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:18,168 INFO [train.py:903] (2/4) Epoch 1, batch 4650, loss[loss=0.4041, simple_loss=0.4259, pruned_loss=0.1911, over 19584.00 frames. ], tot_loss[loss=0.4181, simple_loss=0.4338, pruned_loss=0.2012, over 3838836.96 frames. ], batch size: 52, lr: 4.28e-02, grad_scale: 4.0 +2023-03-31 20:28:21,975 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7851, 3.4897, 1.8571, 3.1565, 1.0837, 3.4779, 3.1128, 3.0873], + device='cuda:2'), covar=tensor([0.0583, 0.0941, 0.2240, 0.0700, 0.3242, 0.0715, 0.0563, 0.0739], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0224, 0.0267, 0.0212, 0.0281, 0.0217, 0.0164, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-31 20:28:27,823 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4659.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:34,608 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 20:28:44,794 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 20:28:58,618 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4684.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:29:18,957 INFO [train.py:903] (2/4) Epoch 1, batch 4700, loss[loss=0.4478, simple_loss=0.461, pruned_loss=0.2173, over 18775.00 frames. ], tot_loss[loss=0.418, simple_loss=0.434, pruned_loss=0.201, over 3823947.19 frames. ], batch size: 74, lr: 4.27e-02, grad_scale: 4.0 +2023-03-31 20:29:28,008 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+02 9.658e+02 1.202e+03 1.526e+03 2.859e+03, threshold=2.405e+03, percent-clipped=1.0 +2023-03-31 20:29:39,196 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 20:29:45,606 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0857, 1.3364, 2.4864, 1.4159, 2.7881, 3.6174, 3.3250, 2.1018], + device='cuda:2'), covar=tensor([0.1690, 0.1748, 0.1316, 0.1672, 0.1155, 0.0516, 0.1142, 0.1626], + device='cuda:2'), in_proj_covar=tensor([0.0239, 0.0231, 0.0221, 0.0251, 0.0256, 0.0191, 0.0266, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 20:30:21,810 INFO [train.py:903] (2/4) Epoch 1, batch 4750, loss[loss=0.3753, simple_loss=0.4056, pruned_loss=0.1725, over 19398.00 frames. ], tot_loss[loss=0.4173, simple_loss=0.4341, pruned_loss=0.2003, over 3818941.34 frames. ], batch size: 48, lr: 4.26e-02, grad_scale: 4.0 +2023-03-31 20:30:39,331 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4765.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:31:00,307 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5493, 1.4472, 1.3686, 2.0348, 1.4273, 1.4184, 1.3561, 1.2480], + device='cuda:2'), covar=tensor([0.0979, 0.0854, 0.0903, 0.0595, 0.0941, 0.0734, 0.1745, 0.1216], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0107, 0.0134, 0.0149, 0.0158, 0.0085, 0.0162, 0.0126], + device='cuda:2'), out_proj_covar=tensor([8.7416e-05, 7.5545e-05, 8.9631e-05, 9.7731e-05, 1.0180e-04, 5.2867e-05, + 1.1699e-04, 8.6588e-05], device='cuda:2') +2023-03-31 20:31:23,845 INFO [train.py:903] (2/4) Epoch 1, batch 4800, loss[loss=0.4701, simple_loss=0.4804, pruned_loss=0.2299, over 19575.00 frames. ], tot_loss[loss=0.4195, simple_loss=0.4358, pruned_loss=0.2016, over 3831259.49 frames. ], batch size: 61, lr: 4.25e-02, grad_scale: 8.0 +2023-03-31 20:31:32,985 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.504e+02 1.038e+03 1.224e+03 1.522e+03 3.175e+03, threshold=2.447e+03, percent-clipped=5.0 +2023-03-31 20:32:25,510 INFO [train.py:903] (2/4) Epoch 1, batch 4850, loss[loss=0.4685, simple_loss=0.4753, pruned_loss=0.2309, over 17960.00 frames. ], tot_loss[loss=0.4219, simple_loss=0.4373, pruned_loss=0.2032, over 3821124.06 frames. ], batch size: 83, lr: 4.24e-02, grad_scale: 8.0 +2023-03-31 20:32:46,769 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 20:32:54,302 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4874.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:06,424 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 20:33:12,764 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 20:33:12,785 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 20:33:23,800 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 20:33:25,313 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4899.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:27,239 INFO [train.py:903] (2/4) Epoch 1, batch 4900, loss[loss=0.3307, simple_loss=0.3608, pruned_loss=0.1503, over 19781.00 frames. ], tot_loss[loss=0.4193, simple_loss=0.4355, pruned_loss=0.2015, over 3829919.16 frames. ], batch size: 48, lr: 4.23e-02, grad_scale: 8.0 +2023-03-31 20:33:37,034 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.319e+02 9.845e+02 1.167e+03 1.485e+03 2.856e+03, threshold=2.333e+03, percent-clipped=2.0 +2023-03-31 20:33:44,815 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 20:33:52,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-31 20:34:29,521 INFO [train.py:903] (2/4) Epoch 1, batch 4950, loss[loss=0.5462, simple_loss=0.5096, pruned_loss=0.2914, over 19583.00 frames. ], tot_loss[loss=0.4179, simple_loss=0.4351, pruned_loss=0.2003, over 3846731.49 frames. ], batch size: 52, lr: 4.21e-02, grad_scale: 8.0 +2023-03-31 20:34:40,548 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 20:35:05,872 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 20:35:31,817 INFO [train.py:903] (2/4) Epoch 1, batch 5000, loss[loss=0.3575, simple_loss=0.3784, pruned_loss=0.1682, over 19739.00 frames. ], tot_loss[loss=0.4148, simple_loss=0.4329, pruned_loss=0.1983, over 3837083.33 frames. ], batch size: 46, lr: 4.20e-02, grad_scale: 8.0 +2023-03-31 20:35:35,564 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 20:35:40,116 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.692e+02 8.720e+02 1.063e+03 1.451e+03 3.452e+03, threshold=2.125e+03, percent-clipped=4.0 +2023-03-31 20:35:46,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 20:35:56,157 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5021.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:13,439 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7176, 1.8243, 1.6133, 2.4812, 3.2083, 1.5170, 2.1503, 3.2849], + device='cuda:2'), covar=tensor([0.0228, 0.2194, 0.2443, 0.1422, 0.0387, 0.2249, 0.1014, 0.0347], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0273, 0.0256, 0.0263, 0.0206, 0.0312, 0.0231, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 20:36:27,147 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5046.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:33,276 INFO [train.py:903] (2/4) Epoch 1, batch 5050, loss[loss=0.4452, simple_loss=0.462, pruned_loss=0.2142, over 19117.00 frames. ], tot_loss[loss=0.4132, simple_loss=0.4321, pruned_loss=0.1971, over 3819583.83 frames. ], batch size: 69, lr: 4.19e-02, grad_scale: 8.0 +2023-03-31 20:37:02,502 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 20:37:05,170 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0815, 1.3480, 1.2624, 1.7256, 1.3392, 1.9145, 1.8440, 1.8497], + device='cuda:2'), covar=tensor([0.0836, 0.1705, 0.1845, 0.1544, 0.2662, 0.1333, 0.1757, 0.1066], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0304, 0.0301, 0.0323, 0.0404, 0.0277, 0.0356, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-31 20:37:34,528 INFO [train.py:903] (2/4) Epoch 1, batch 5100, loss[loss=0.4387, simple_loss=0.4581, pruned_loss=0.2097, over 19693.00 frames. ], tot_loss[loss=0.413, simple_loss=0.4318, pruned_loss=0.1971, over 3828366.88 frames. ], batch size: 59, lr: 4.18e-02, grad_scale: 8.0 +2023-03-31 20:37:39,764 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 20:37:43,117 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+02 1.065e+03 1.254e+03 1.490e+03 3.647e+03, threshold=2.509e+03, percent-clipped=6.0 +2023-03-31 20:37:43,158 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 20:37:47,679 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 20:38:33,389 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8398, 3.5341, 1.8090, 3.1364, 1.1403, 3.4073, 3.0484, 3.0918], + device='cuda:2'), covar=tensor([0.0634, 0.1113, 0.2412, 0.0711, 0.3351, 0.0876, 0.0671, 0.0736], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0237, 0.0267, 0.0222, 0.0285, 0.0224, 0.0166, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-31 20:38:36,972 INFO [train.py:903] (2/4) Epoch 1, batch 5150, loss[loss=0.3081, simple_loss=0.3539, pruned_loss=0.1312, over 19737.00 frames. ], tot_loss[loss=0.4103, simple_loss=0.4297, pruned_loss=0.1954, over 3832345.47 frames. ], batch size: 46, lr: 4.17e-02, grad_scale: 8.0 +2023-03-31 20:38:38,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-31 20:38:40,519 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7983, 1.2941, 1.5182, 1.0730, 2.7326, 3.0918, 3.0911, 3.5102], + device='cuda:2'), covar=tensor([0.1848, 0.2975, 0.2639, 0.2847, 0.0583, 0.0199, 0.0264, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0277, 0.0292, 0.0303, 0.0204, 0.0123, 0.0170, 0.0125], + device='cuda:2'), out_proj_covar=tensor([2.3921e-04, 2.2217e-04, 2.3289e-04, 2.4293e-04, 1.8955e-04, 9.7472e-05, + 1.3935e-04, 1.1021e-04], device='cuda:2') +2023-03-31 20:38:46,441 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 20:38:54,611 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1941, 1.8449, 1.5931, 1.4282, 1.6156, 0.7754, 0.9280, 1.5568], + device='cuda:2'), covar=tensor([0.1328, 0.0445, 0.0838, 0.0828, 0.0862, 0.1653, 0.1440, 0.0869], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0146, 0.0176, 0.0216, 0.0154, 0.0241, 0.0242, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 20:39:20,330 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 20:39:37,971 INFO [train.py:903] (2/4) Epoch 1, batch 5200, loss[loss=0.3959, simple_loss=0.4288, pruned_loss=0.1815, over 19694.00 frames. ], tot_loss[loss=0.4107, simple_loss=0.4297, pruned_loss=0.1958, over 3835580.48 frames. ], batch size: 59, lr: 4.16e-02, grad_scale: 8.0 +2023-03-31 20:39:45,884 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+02 1.028e+03 1.252e+03 1.630e+03 4.880e+03, threshold=2.504e+03, percent-clipped=1.0 +2023-03-31 20:39:50,384 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 20:40:32,320 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 20:40:39,299 INFO [train.py:903] (2/4) Epoch 1, batch 5250, loss[loss=0.3702, simple_loss=0.3931, pruned_loss=0.1736, over 19739.00 frames. ], tot_loss[loss=0.4089, simple_loss=0.4287, pruned_loss=0.1946, over 3831053.99 frames. ], batch size: 46, lr: 4.15e-02, grad_scale: 8.0 +2023-03-31 20:41:36,536 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-31 20:41:37,423 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5783, 1.1392, 0.9848, 1.6019, 1.2891, 1.5597, 1.5514, 1.4851], + device='cuda:2'), covar=tensor([0.0932, 0.1605, 0.1827, 0.1452, 0.2019, 0.1318, 0.1831, 0.1102], + device='cuda:2'), in_proj_covar=tensor([0.0240, 0.0296, 0.0296, 0.0328, 0.0390, 0.0275, 0.0359, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-31 20:41:39,602 INFO [train.py:903] (2/4) Epoch 1, batch 5300, loss[loss=0.3295, simple_loss=0.3676, pruned_loss=0.1457, over 16028.00 frames. ], tot_loss[loss=0.4093, simple_loss=0.4285, pruned_loss=0.1951, over 3831783.46 frames. ], batch size: 35, lr: 4.14e-02, grad_scale: 8.0 +2023-03-31 20:41:48,688 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.962e+02 9.394e+02 1.191e+03 1.647e+03 4.206e+03, threshold=2.383e+03, percent-clipped=5.0 +2023-03-31 20:41:53,358 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 20:42:41,825 INFO [train.py:903] (2/4) Epoch 1, batch 5350, loss[loss=0.4222, simple_loss=0.4385, pruned_loss=0.2029, over 19334.00 frames. ], tot_loss[loss=0.4092, simple_loss=0.4283, pruned_loss=0.195, over 3829395.57 frames. ], batch size: 70, lr: 4.13e-02, grad_scale: 8.0 +2023-03-31 20:43:09,052 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-31 20:43:13,709 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 20:43:43,593 INFO [train.py:903] (2/4) Epoch 1, batch 5400, loss[loss=0.3935, simple_loss=0.4276, pruned_loss=0.1797, over 19743.00 frames. ], tot_loss[loss=0.4073, simple_loss=0.4271, pruned_loss=0.1937, over 3843435.30 frames. ], batch size: 63, lr: 4.12e-02, grad_scale: 8.0 +2023-03-31 20:43:49,000 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-31 20:43:51,071 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.073e+02 9.364e+02 1.084e+03 1.611e+03 4.795e+03, threshold=2.168e+03, percent-clipped=7.0 +2023-03-31 20:44:44,644 INFO [train.py:903] (2/4) Epoch 1, batch 5450, loss[loss=0.3989, simple_loss=0.4211, pruned_loss=0.1883, over 19873.00 frames. ], tot_loss[loss=0.408, simple_loss=0.4272, pruned_loss=0.1943, over 3835693.25 frames. ], batch size: 52, lr: 4.11e-02, grad_scale: 8.0 +2023-03-31 20:45:46,525 INFO [train.py:903] (2/4) Epoch 1, batch 5500, loss[loss=0.3951, simple_loss=0.4105, pruned_loss=0.1898, over 19572.00 frames. ], tot_loss[loss=0.4053, simple_loss=0.4255, pruned_loss=0.1926, over 3830802.14 frames. ], batch size: 52, lr: 4.10e-02, grad_scale: 8.0 +2023-03-31 20:45:54,007 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+02 9.504e+02 1.107e+03 1.412e+03 4.004e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 20:46:08,676 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 20:46:31,257 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7881, 1.3289, 1.2701, 1.9565, 1.6439, 1.9197, 1.7184, 1.4915], + device='cuda:2'), covar=tensor([0.1016, 0.1917, 0.1866, 0.1342, 0.2157, 0.1366, 0.2443, 0.1355], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0295, 0.0297, 0.0318, 0.0384, 0.0266, 0.0351, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-31 20:46:46,633 INFO [train.py:903] (2/4) Epoch 1, batch 5550, loss[loss=0.4139, simple_loss=0.442, pruned_loss=0.1929, over 19745.00 frames. ], tot_loss[loss=0.4055, simple_loss=0.4256, pruned_loss=0.1927, over 3825009.22 frames. ], batch size: 63, lr: 4.09e-02, grad_scale: 8.0 +2023-03-31 20:46:53,479 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 20:47:42,825 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 20:47:47,472 INFO [train.py:903] (2/4) Epoch 1, batch 5600, loss[loss=0.3845, simple_loss=0.4067, pruned_loss=0.1811, over 19620.00 frames. ], tot_loss[loss=0.4064, simple_loss=0.4263, pruned_loss=0.1932, over 3813888.50 frames. ], batch size: 50, lr: 4.08e-02, grad_scale: 8.0 +2023-03-31 20:47:56,558 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.862e+02 1.009e+03 1.185e+03 1.400e+03 2.216e+03, threshold=2.370e+03, percent-clipped=2.0 +2023-03-31 20:48:06,742 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.8549, 5.3939, 2.9015, 4.7865, 1.9299, 5.6075, 5.0859, 5.4716], + device='cuda:2'), covar=tensor([0.0693, 0.1365, 0.2357, 0.0585, 0.3256, 0.0693, 0.0573, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0230, 0.0279, 0.0221, 0.0290, 0.0226, 0.0167, 0.0185], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-31 20:48:41,562 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.30 vs. limit=5.0 +2023-03-31 20:48:48,824 INFO [train.py:903] (2/4) Epoch 1, batch 5650, loss[loss=0.3644, simple_loss=0.3859, pruned_loss=0.1714, over 19026.00 frames. ], tot_loss[loss=0.4073, simple_loss=0.4268, pruned_loss=0.1939, over 3801750.83 frames. ], batch size: 42, lr: 4.07e-02, grad_scale: 8.0 +2023-03-31 20:49:09,680 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5668.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:49:32,788 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 20:49:49,722 INFO [train.py:903] (2/4) Epoch 1, batch 5700, loss[loss=0.3654, simple_loss=0.3942, pruned_loss=0.1683, over 19358.00 frames. ], tot_loss[loss=0.4072, simple_loss=0.4264, pruned_loss=0.194, over 3807698.28 frames. ], batch size: 47, lr: 4.06e-02, grad_scale: 8.0 +2023-03-31 20:49:57,493 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.193e+02 1.084e+03 1.385e+03 1.754e+03 4.325e+03, threshold=2.770e+03, percent-clipped=14.0 +2023-03-31 20:50:51,585 INFO [train.py:903] (2/4) Epoch 1, batch 5750, loss[loss=0.4281, simple_loss=0.4513, pruned_loss=0.2024, over 19485.00 frames. ], tot_loss[loss=0.4062, simple_loss=0.4262, pruned_loss=0.1931, over 3809387.68 frames. ], batch size: 64, lr: 4.05e-02, grad_scale: 8.0 +2023-03-31 20:50:51,606 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 20:50:59,665 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 20:51:05,159 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 20:51:18,560 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5773.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:51:52,613 INFO [train.py:903] (2/4) Epoch 1, batch 5800, loss[loss=0.3017, simple_loss=0.3505, pruned_loss=0.1265, over 19747.00 frames. ], tot_loss[loss=0.4046, simple_loss=0.4253, pruned_loss=0.192, over 3819724.99 frames. ], batch size: 47, lr: 4.04e-02, grad_scale: 8.0 +2023-03-31 20:52:02,171 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.310e+02 8.969e+02 1.169e+03 1.352e+03 2.735e+03, threshold=2.337e+03, percent-clipped=0.0 +2023-03-31 20:52:11,578 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1225, 2.1000, 1.8616, 2.3579, 1.7555, 2.6762, 2.7106, 2.6267], + device='cuda:2'), covar=tensor([0.0717, 0.1414, 0.1731, 0.1431, 0.2540, 0.1287, 0.1796, 0.0980], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0301, 0.0307, 0.0316, 0.0392, 0.0274, 0.0352, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-31 20:52:16,014 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.26 vs. limit=5.0 +2023-03-31 20:52:53,452 INFO [train.py:903] (2/4) Epoch 1, batch 5850, loss[loss=0.4822, simple_loss=0.4792, pruned_loss=0.2426, over 13449.00 frames. ], tot_loss[loss=0.4043, simple_loss=0.4253, pruned_loss=0.1917, over 3818510.80 frames. ], batch size: 137, lr: 4.03e-02, grad_scale: 8.0 +2023-03-31 20:53:23,802 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5876.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:53:55,096 INFO [train.py:903] (2/4) Epoch 1, batch 5900, loss[loss=0.3665, simple_loss=0.4075, pruned_loss=0.1627, over 19658.00 frames. ], tot_loss[loss=0.4014, simple_loss=0.4235, pruned_loss=0.1897, over 3816582.11 frames. ], batch size: 55, lr: 4.02e-02, grad_scale: 8.0 +2023-03-31 20:53:58,596 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 20:54:03,122 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.633e+02 8.668e+02 1.127e+03 1.397e+03 3.736e+03, threshold=2.255e+03, percent-clipped=4.0 +2023-03-31 20:54:21,180 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 20:54:55,344 INFO [train.py:903] (2/4) Epoch 1, batch 5950, loss[loss=0.4134, simple_loss=0.4411, pruned_loss=0.1928, over 19502.00 frames. ], tot_loss[loss=0.3999, simple_loss=0.4227, pruned_loss=0.1885, over 3828499.33 frames. ], batch size: 64, lr: 4.01e-02, grad_scale: 8.0 +2023-03-31 20:55:57,489 INFO [train.py:903] (2/4) Epoch 1, batch 6000, loss[loss=0.4051, simple_loss=0.4355, pruned_loss=0.1874, over 19308.00 frames. ], tot_loss[loss=0.3995, simple_loss=0.4224, pruned_loss=0.1883, over 3834682.79 frames. ], batch size: 66, lr: 4.00e-02, grad_scale: 8.0 +2023-03-31 20:55:57,489 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 20:56:07,672 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9796, 3.0845, 3.4746, 3.3183, 1.5101, 3.0810, 2.7647, 2.9392], + device='cuda:2'), covar=tensor([0.0428, 0.0551, 0.0397, 0.0297, 0.3213, 0.0278, 0.0519, 0.0883], + device='cuda:2'), in_proj_covar=tensor([0.0200, 0.0236, 0.0293, 0.0200, 0.0381, 0.0146, 0.0219, 0.0309], + device='cuda:2'), out_proj_covar=tensor([1.2992e-04, 1.5195e-04, 1.9142e-04, 1.1747e-04, 2.0765e-04, 9.4078e-05, + 1.3428e-04, 1.7648e-04], device='cuda:2') +2023-03-31 20:56:10,606 INFO [train.py:937] (2/4) Epoch 1, validation: loss=0.2784, simple_loss=0.3626, pruned_loss=0.09714, over 944034.00 frames. +2023-03-31 20:56:10,607 INFO [train.py:938] (2/4) Maximum memory allocated so far is 17304MB +2023-03-31 20:56:19,578 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.849e+02 9.012e+02 1.240e+03 1.620e+03 2.952e+03, threshold=2.480e+03, percent-clipped=5.0 +2023-03-31 20:56:19,952 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:56:24,074 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6012.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:57:10,731 INFO [train.py:903] (2/4) Epoch 1, batch 6050, loss[loss=0.472, simple_loss=0.4705, pruned_loss=0.2368, over 18373.00 frames. ], tot_loss[loss=0.4012, simple_loss=0.4233, pruned_loss=0.1895, over 3813922.71 frames. ], batch size: 84, lr: 3.99e-02, grad_scale: 8.0 +2023-03-31 20:57:24,326 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:58:12,899 INFO [train.py:903] (2/4) Epoch 1, batch 6100, loss[loss=0.4243, simple_loss=0.4505, pruned_loss=0.1991, over 19595.00 frames. ], tot_loss[loss=0.4002, simple_loss=0.4224, pruned_loss=0.189, over 3814717.43 frames. ], batch size: 61, lr: 3.98e-02, grad_scale: 8.0 +2023-03-31 20:58:20,960 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+02 9.509e+02 1.169e+03 1.489e+03 2.977e+03, threshold=2.338e+03, percent-clipped=4.0 +2023-03-31 20:58:32,475 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6117.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:58:44,732 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6127.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:59:13,362 INFO [train.py:903] (2/4) Epoch 1, batch 6150, loss[loss=0.4077, simple_loss=0.4265, pruned_loss=0.1945, over 18433.00 frames. ], tot_loss[loss=0.4001, simple_loss=0.4224, pruned_loss=0.1889, over 3813934.33 frames. ], batch size: 84, lr: 3.97e-02, grad_scale: 8.0 +2023-03-31 20:59:16,573 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6153.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:59:42,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 20:59:58,528 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6188.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:13,788 INFO [train.py:903] (2/4) Epoch 1, batch 6200, loss[loss=0.4268, simple_loss=0.4345, pruned_loss=0.2096, over 19614.00 frames. ], tot_loss[loss=0.4025, simple_loss=0.4244, pruned_loss=0.1903, over 3810423.04 frames. ], batch size: 50, lr: 3.96e-02, grad_scale: 8.0 +2023-03-31 21:00:22,729 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 9.585e+02 1.181e+03 1.511e+03 2.920e+03, threshold=2.362e+03, percent-clipped=2.0 +2023-03-31 21:00:38,389 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:39,677 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:44,318 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0098, 1.9370, 1.9178, 2.2373, 2.1216, 1.6993, 0.2313, 1.9033], + device='cuda:2'), covar=tensor([0.0897, 0.0854, 0.0436, 0.0688, 0.0895, 0.1122, 0.2294, 0.1597], + device='cuda:2'), in_proj_covar=tensor([0.0163, 0.0166, 0.0164, 0.0217, 0.0237, 0.0218, 0.0229, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:00:49,009 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6229.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:53,390 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:01:16,025 INFO [train.py:903] (2/4) Epoch 1, batch 6250, loss[loss=0.4069, simple_loss=0.4313, pruned_loss=0.1912, over 18773.00 frames. ], tot_loss[loss=0.3987, simple_loss=0.422, pruned_loss=0.1877, over 3810523.50 frames. ], batch size: 74, lr: 3.95e-02, grad_scale: 8.0 +2023-03-31 21:01:34,909 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-31 21:01:46,717 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 21:01:56,436 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6284.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:02:18,553 INFO [train.py:903] (2/4) Epoch 1, batch 6300, loss[loss=0.3396, simple_loss=0.3766, pruned_loss=0.1513, over 19637.00 frames. ], tot_loss[loss=0.397, simple_loss=0.4208, pruned_loss=0.1866, over 3800993.98 frames. ], batch size: 50, lr: 3.94e-02, grad_scale: 8.0 +2023-03-31 21:02:24,692 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0816, 1.0822, 1.8915, 1.3300, 2.5122, 2.3374, 2.6594, 1.6392], + device='cuda:2'), covar=tensor([0.1509, 0.1856, 0.1180, 0.1449, 0.0787, 0.0748, 0.0874, 0.1351], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0294, 0.0279, 0.0299, 0.0326, 0.0259, 0.0351, 0.0313], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:02:26,557 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.984e+02 8.812e+02 1.125e+03 1.363e+03 2.149e+03, threshold=2.249e+03, percent-clipped=0.0 +2023-03-31 21:02:51,749 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-31 21:02:58,294 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6335.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:17,625 INFO [train.py:903] (2/4) Epoch 1, batch 6350, loss[loss=0.4557, simple_loss=0.4446, pruned_loss=0.2333, over 13246.00 frames. ], tot_loss[loss=0.3989, simple_loss=0.4218, pruned_loss=0.1881, over 3796782.17 frames. ], batch size: 135, lr: 3.93e-02, grad_scale: 8.0 +2023-03-31 21:03:18,877 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6352.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:58,122 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6383.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:04:18,896 INFO [train.py:903] (2/4) Epoch 1, batch 6400, loss[loss=0.4543, simple_loss=0.4569, pruned_loss=0.2258, over 19711.00 frames. ], tot_loss[loss=0.3966, simple_loss=0.42, pruned_loss=0.1866, over 3798731.88 frames. ], batch size: 59, lr: 3.92e-02, grad_scale: 8.0 +2023-03-31 21:04:20,407 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7460, 1.1835, 1.2539, 0.5313, 2.6092, 2.8020, 2.6710, 2.8449], + device='cuda:2'), covar=tensor([0.1516, 0.2973, 0.2867, 0.2781, 0.0434, 0.0167, 0.0272, 0.0211], + device='cuda:2'), in_proj_covar=tensor([0.0311, 0.0285, 0.0315, 0.0301, 0.0198, 0.0114, 0.0178, 0.0121], + device='cuda:2'), out_proj_covar=tensor([2.5880e-04, 2.4414e-04, 2.6332e-04, 2.5697e-04, 1.9251e-04, 9.6055e-05, + 1.4974e-04, 1.1367e-04], device='cuda:2') +2023-03-31 21:04:24,485 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6405.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:04:27,871 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.880e+02 9.359e+02 1.206e+03 1.547e+03 5.333e+03, threshold=2.412e+03, percent-clipped=7.0 +2023-03-31 21:04:28,300 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6408.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:04:28,612 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-31 21:05:19,065 INFO [train.py:903] (2/4) Epoch 1, batch 6450, loss[loss=0.4956, simple_loss=0.4994, pruned_loss=0.2459, over 19585.00 frames. ], tot_loss[loss=0.3944, simple_loss=0.4183, pruned_loss=0.1853, over 3812220.20 frames. ], batch size: 61, lr: 3.91e-02, grad_scale: 8.0 +2023-03-31 21:05:22,470 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.94 vs. limit=5.0 +2023-03-31 21:05:39,877 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:05:54,820 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0650, 1.9070, 1.5730, 2.6913, 1.7624, 2.7334, 1.8343, 1.6084], + device='cuda:2'), covar=tensor([0.0852, 0.0692, 0.0622, 0.0501, 0.1018, 0.0319, 0.1387, 0.0944], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0184, 0.0203, 0.0257, 0.0257, 0.0143, 0.0287, 0.0215], + device='cuda:2'), out_proj_covar=tensor([1.5487e-04, 1.3588e-04, 1.3575e-04, 1.7135e-04, 1.6795e-04, 9.8021e-05, + 2.0508e-04, 1.5093e-04], device='cuda:2') +2023-03-31 21:05:55,619 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6480.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:01,938 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6485.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:05,052 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 21:06:05,504 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6488.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:15,206 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6497.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:21,207 INFO [train.py:903] (2/4) Epoch 1, batch 6500, loss[loss=0.4458, simple_loss=0.4501, pruned_loss=0.2208, over 17205.00 frames. ], tot_loss[loss=0.3917, simple_loss=0.4162, pruned_loss=0.1836, over 3818395.66 frames. ], batch size: 101, lr: 3.90e-02, grad_scale: 8.0 +2023-03-31 21:06:25,571 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 21:06:28,779 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+02 9.704e+02 1.201e+03 1.443e+03 2.205e+03, threshold=2.402e+03, percent-clipped=0.0 +2023-03-31 21:06:32,896 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2787, 2.0887, 1.9048, 2.5802, 2.0722, 2.7765, 2.1506, 1.6660], + device='cuda:2'), covar=tensor([0.0812, 0.0679, 0.0494, 0.0609, 0.0961, 0.0339, 0.1238, 0.0927], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0184, 0.0203, 0.0256, 0.0253, 0.0141, 0.0285, 0.0214], + device='cuda:2'), out_proj_covar=tensor([1.5397e-04, 1.3520e-04, 1.3568e-04, 1.7073e-04, 1.6518e-04, 9.6068e-05, + 2.0367e-04, 1.5066e-04], device='cuda:2') +2023-03-31 21:06:35,116 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6513.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:43,932 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6520.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:58,397 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:21,792 INFO [train.py:903] (2/4) Epoch 1, batch 6550, loss[loss=0.4612, simple_loss=0.4643, pruned_loss=0.2291, over 19572.00 frames. ], tot_loss[loss=0.392, simple_loss=0.4169, pruned_loss=0.1836, over 3837005.03 frames. ], batch size: 61, lr: 3.89e-02, grad_scale: 8.0 +2023-03-31 21:07:39,353 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6565.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:48,422 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6573.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:04,917 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.30 vs. limit=5.0 +2023-03-31 21:08:10,398 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6591.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:22,326 INFO [train.py:903] (2/4) Epoch 1, batch 6600, loss[loss=0.4213, simple_loss=0.4484, pruned_loss=0.1971, over 19706.00 frames. ], tot_loss[loss=0.3898, simple_loss=0.4151, pruned_loss=0.1823, over 3823801.66 frames. ], batch size: 59, lr: 3.89e-02, grad_scale: 16.0 +2023-03-31 21:08:31,045 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.089e+02 8.736e+02 1.082e+03 1.231e+03 3.386e+03, threshold=2.164e+03, percent-clipped=2.0 +2023-03-31 21:08:36,146 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6612.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:40,723 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6616.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:56,071 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6628.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:09,066 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-31 21:09:19,917 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:19,980 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:24,248 INFO [train.py:903] (2/4) Epoch 1, batch 6650, loss[loss=0.3797, simple_loss=0.4126, pruned_loss=0.1734, over 19309.00 frames. ], tot_loss[loss=0.3899, simple_loss=0.4155, pruned_loss=0.1821, over 3830656.05 frames. ], batch size: 70, lr: 3.88e-02, grad_scale: 4.0 +2023-03-31 21:09:27,275 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-03-31 21:09:59,869 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6680.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:09,228 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6688.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:23,708 INFO [train.py:903] (2/4) Epoch 1, batch 6700, loss[loss=0.472, simple_loss=0.4674, pruned_loss=0.2384, over 13326.00 frames. ], tot_loss[loss=0.3905, simple_loss=0.4156, pruned_loss=0.1827, over 3809413.89 frames. ], batch size: 136, lr: 3.87e-02, grad_scale: 4.0 +2023-03-31 21:10:35,446 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+02 8.354e+02 1.101e+03 1.693e+03 1.016e+04, threshold=2.202e+03, percent-clipped=16.0 +2023-03-31 21:10:51,606 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6723.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:14,935 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6743.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:20,787 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6748.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:23,790 INFO [train.py:903] (2/4) Epoch 1, batch 6750, loss[loss=0.4633, simple_loss=0.4541, pruned_loss=0.2363, over 13305.00 frames. ], tot_loss[loss=0.3886, simple_loss=0.4147, pruned_loss=0.1813, over 3813008.43 frames. ], batch size: 137, lr: 3.86e-02, grad_scale: 4.0 +2023-03-31 21:11:25,378 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0893, 1.0970, 1.9760, 1.4861, 2.7963, 2.7687, 3.1318, 1.9987], + device='cuda:2'), covar=tensor([0.1594, 0.1939, 0.1266, 0.1439, 0.0821, 0.0709, 0.0921, 0.1519], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0296, 0.0282, 0.0298, 0.0324, 0.0267, 0.0363, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:11:51,648 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:01,377 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1219, 4.4453, 5.8339, 5.6244, 1.9547, 5.1961, 4.8677, 5.1028], + device='cuda:2'), covar=tensor([0.0193, 0.0379, 0.0252, 0.0137, 0.2775, 0.0146, 0.0283, 0.0588], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0240, 0.0297, 0.0214, 0.0389, 0.0146, 0.0224, 0.0322], + device='cuda:2'), out_proj_covar=tensor([1.3067e-04, 1.4978e-04, 1.8970e-04, 1.2347e-04, 2.0902e-04, 9.4401e-05, + 1.3287e-04, 1.8094e-04], device='cuda:2') +2023-03-31 21:12:18,944 INFO [train.py:903] (2/4) Epoch 1, batch 6800, loss[loss=0.3766, simple_loss=0.4124, pruned_loss=0.1704, over 18032.00 frames. ], tot_loss[loss=0.3882, simple_loss=0.4143, pruned_loss=0.1811, over 3822650.74 frames. ], batch size: 83, lr: 3.85e-02, grad_scale: 8.0 +2023-03-31 21:12:19,307 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6801.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:28,859 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.487e+02 9.125e+02 1.072e+03 1.412e+03 3.162e+03, threshold=2.143e+03, percent-clipped=4.0 +2023-03-31 21:12:43,095 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7318, 1.7312, 1.4244, 1.3967, 1.3363, 1.5868, 0.4710, 1.0896], + device='cuda:2'), covar=tensor([0.0381, 0.0460, 0.0335, 0.0405, 0.0781, 0.0549, 0.1268, 0.0978], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0181, 0.0184, 0.0222, 0.0261, 0.0228, 0.0239, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:12:43,735 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6824.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:43,783 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8885, 3.6871, 2.3655, 3.3313, 1.7485, 3.4634, 3.2022, 3.2507], + device='cuda:2'), covar=tensor([0.0669, 0.0999, 0.1712, 0.0865, 0.2717, 0.0787, 0.0605, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0249, 0.0279, 0.0241, 0.0304, 0.0234, 0.0181, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 21:13:03,235 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 21:13:04,242 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 21:13:06,752 INFO [train.py:903] (2/4) Epoch 2, batch 0, loss[loss=0.3539, simple_loss=0.3782, pruned_loss=0.1648, over 19774.00 frames. ], tot_loss[loss=0.3539, simple_loss=0.3782, pruned_loss=0.1648, over 19774.00 frames. ], batch size: 46, lr: 3.77e-02, grad_scale: 8.0 +2023-03-31 21:13:06,752 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 21:13:18,418 INFO [train.py:937] (2/4) Epoch 2, validation: loss=0.2802, simple_loss=0.3637, pruned_loss=0.09835, over 944034.00 frames. +2023-03-31 21:13:18,419 INFO [train.py:938] (2/4) Maximum memory allocated so far is 17398MB +2023-03-31 21:13:18,577 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6829.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:13:25,963 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0309, 2.0144, 1.5237, 1.4151, 1.4971, 1.6701, 0.2527, 0.8297], + device='cuda:2'), covar=tensor([0.0649, 0.0515, 0.0435, 0.0585, 0.1056, 0.0691, 0.1464, 0.1348], + device='cuda:2'), in_proj_covar=tensor([0.0181, 0.0181, 0.0184, 0.0223, 0.0264, 0.0231, 0.0240, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:13:28,823 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 21:14:06,666 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6868.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:20,889 INFO [train.py:903] (2/4) Epoch 2, batch 50, loss[loss=0.3793, simple_loss=0.3992, pruned_loss=0.1797, over 19847.00 frames. ], tot_loss[loss=0.3879, simple_loss=0.4159, pruned_loss=0.1799, over 857386.48 frames. ], batch size: 52, lr: 3.76e-02, grad_scale: 8.0 +2023-03-31 21:14:37,422 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6893.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:41,581 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6896.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:14:49,461 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6903.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:54,397 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 21:14:57,930 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.488e+02 9.076e+02 1.150e+03 1.515e+03 2.802e+03, threshold=2.301e+03, percent-clipped=3.0 +2023-03-31 21:15:06,596 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7964, 0.9866, 1.1778, 1.6679, 2.4317, 1.1387, 1.8562, 2.2700], + device='cuda:2'), covar=tensor([0.0578, 0.3499, 0.3440, 0.1812, 0.0645, 0.2776, 0.1219, 0.0837], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0287, 0.0275, 0.0262, 0.0219, 0.0313, 0.0244, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:15:19,026 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6927.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,158 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,717 INFO [train.py:903] (2/4) Epoch 2, batch 100, loss[loss=0.432, simple_loss=0.4482, pruned_loss=0.2078, over 19054.00 frames. ], tot_loss[loss=0.3943, simple_loss=0.419, pruned_loss=0.1847, over 1507705.01 frames. ], batch size: 69, lr: 3.75e-02, grad_scale: 8.0 +2023-03-31 21:15:30,159 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6936.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:32,200 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 21:15:33,651 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6939.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,467 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,540 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:43,891 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.50 vs. limit=5.0 +2023-03-31 21:16:01,550 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6961.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:10,913 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6969.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:23,040 INFO [train.py:903] (2/4) Epoch 2, batch 150, loss[loss=0.3357, simple_loss=0.3623, pruned_loss=0.1545, over 19746.00 frames. ], tot_loss[loss=0.3911, simple_loss=0.4178, pruned_loss=0.1821, over 2031226.77 frames. ], batch size: 47, lr: 3.74e-02, grad_scale: 4.0 +2023-03-31 21:16:33,982 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2082, 3.3895, 3.6795, 3.5681, 1.1762, 3.2872, 3.0004, 3.1685], + device='cuda:2'), covar=tensor([0.0354, 0.0480, 0.0450, 0.0260, 0.3180, 0.0233, 0.0395, 0.1035], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0254, 0.0318, 0.0219, 0.0398, 0.0152, 0.0234, 0.0339], + device='cuda:2'), out_proj_covar=tensor([1.3523e-04, 1.5789e-04, 2.0403e-04, 1.2752e-04, 2.1363e-04, 9.8211e-05, + 1.3891e-04, 1.9141e-04], device='cuda:2') +2023-03-31 21:16:38,557 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6991.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:40,437 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-31 21:16:49,241 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6999.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:03,019 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.870e+02 9.855e+02 1.288e+03 4.108e+03, threshold=1.971e+03, percent-clipped=4.0 +2023-03-31 21:17:06,925 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9931, 1.1628, 1.4781, 1.8465, 2.6848, 1.3384, 1.7080, 2.5797], + device='cuda:2'), covar=tensor([0.0349, 0.2603, 0.2499, 0.1499, 0.0401, 0.1992, 0.0989, 0.0513], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0289, 0.0277, 0.0266, 0.0225, 0.0315, 0.0242, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:17:19,697 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7024.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:24,077 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 21:17:25,201 INFO [train.py:903] (2/4) Epoch 2, batch 200, loss[loss=0.3633, simple_loss=0.4031, pruned_loss=0.1617, over 19517.00 frames. ], tot_loss[loss=0.3867, simple_loss=0.4144, pruned_loss=0.1795, over 2437503.71 frames. ], batch size: 54, lr: 3.73e-02, grad_scale: 4.0 +2023-03-31 21:18:11,585 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7066.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:18:29,134 INFO [train.py:903] (2/4) Epoch 2, batch 250, loss[loss=0.4168, simple_loss=0.4503, pruned_loss=0.1916, over 19273.00 frames. ], tot_loss[loss=0.3847, simple_loss=0.4129, pruned_loss=0.1783, over 2731783.04 frames. ], batch size: 66, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:19:03,891 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:19:09,446 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 7.747e+02 9.740e+02 1.157e+03 2.695e+03, threshold=1.948e+03, percent-clipped=1.0 +2023-03-31 21:19:17,177 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-31 21:19:33,829 INFO [train.py:903] (2/4) Epoch 2, batch 300, loss[loss=0.3601, simple_loss=0.3946, pruned_loss=0.1628, over 19738.00 frames. ], tot_loss[loss=0.3791, simple_loss=0.4089, pruned_loss=0.1747, over 2980069.09 frames. ], batch size: 51, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:20:35,437 INFO [train.py:903] (2/4) Epoch 2, batch 350, loss[loss=0.3554, simple_loss=0.3864, pruned_loss=0.1622, over 19393.00 frames. ], tot_loss[loss=0.3815, simple_loss=0.4104, pruned_loss=0.1762, over 3176688.47 frames. ], batch size: 48, lr: 3.71e-02, grad_scale: 4.0 +2023-03-31 21:20:37,912 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7181.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:20:39,846 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 21:20:55,890 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7195.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:01,672 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:15,795 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 9.781e+02 1.245e+03 1.512e+03 3.081e+03, threshold=2.489e+03, percent-clipped=8.0 +2023-03-31 21:21:26,524 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:32,341 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7225.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:37,142 INFO [train.py:903] (2/4) Epoch 2, batch 400, loss[loss=0.5344, simple_loss=0.4994, pruned_loss=0.2847, over 13291.00 frames. ], tot_loss[loss=0.3852, simple_loss=0.4126, pruned_loss=0.1789, over 3311790.84 frames. ], batch size: 138, lr: 3.70e-02, grad_scale: 8.0 +2023-03-31 21:21:38,961 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-31 21:21:51,112 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7240.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:22:30,128 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7271.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:22:40,124 INFO [train.py:903] (2/4) Epoch 2, batch 450, loss[loss=0.4216, simple_loss=0.438, pruned_loss=0.2026, over 17411.00 frames. ], tot_loss[loss=0.3872, simple_loss=0.4144, pruned_loss=0.1799, over 3430132.96 frames. ], batch size: 101, lr: 3.69e-02, grad_scale: 8.0 +2023-03-31 21:22:58,513 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:23:05,138 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.6079, 5.2734, 2.9030, 4.7429, 1.5037, 5.2773, 4.8720, 5.1912], + device='cuda:2'), covar=tensor([0.0503, 0.0915, 0.1669, 0.0489, 0.3062, 0.0561, 0.0493, 0.0461], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0251, 0.0287, 0.0239, 0.0305, 0.0246, 0.0198, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 21:23:14,127 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 21:23:15,294 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 21:23:19,470 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.877e+02 8.763e+02 1.192e+03 1.491e+03 2.950e+03, threshold=2.384e+03, percent-clipped=4.0 +2023-03-31 21:23:43,127 INFO [train.py:903] (2/4) Epoch 2, batch 500, loss[loss=0.398, simple_loss=0.4174, pruned_loss=0.1892, over 19674.00 frames. ], tot_loss[loss=0.3881, simple_loss=0.415, pruned_loss=0.1806, over 3512727.72 frames. ], batch size: 53, lr: 3.68e-02, grad_scale: 8.0 +2023-03-31 21:24:14,676 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7355.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:24:23,751 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7362.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:45,276 INFO [train.py:903] (2/4) Epoch 2, batch 550, loss[loss=0.399, simple_loss=0.427, pruned_loss=0.1855, over 19678.00 frames. ], tot_loss[loss=0.3857, simple_loss=0.4129, pruned_loss=0.1793, over 3586072.98 frames. ], batch size: 60, lr: 3.67e-02, grad_scale: 8.0 +2023-03-31 21:24:53,942 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7386.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:55,113 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7387.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:24,047 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7410.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:25:26,118 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+02 9.206e+02 1.127e+03 1.377e+03 2.659e+03, threshold=2.254e+03, percent-clipped=2.0 +2023-03-31 21:25:43,363 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:47,664 INFO [train.py:903] (2/4) Epoch 2, batch 600, loss[loss=0.3549, simple_loss=0.3851, pruned_loss=0.1624, over 19613.00 frames. ], tot_loss[loss=0.3851, simple_loss=0.4124, pruned_loss=0.1789, over 3631231.49 frames. ], batch size: 50, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:26:29,895 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 21:26:50,584 INFO [train.py:903] (2/4) Epoch 2, batch 650, loss[loss=0.3591, simple_loss=0.4046, pruned_loss=0.1568, over 19474.00 frames. ], tot_loss[loss=0.3847, simple_loss=0.4125, pruned_loss=0.1785, over 3677931.81 frames. ], batch size: 64, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:26:58,987 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6293, 2.5564, 1.7956, 1.9063, 2.1327, 1.1093, 1.1514, 1.7406], + device='cuda:2'), covar=tensor([0.0992, 0.0443, 0.0970, 0.0660, 0.0625, 0.1378, 0.1113, 0.0757], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0161, 0.0240, 0.0229, 0.0166, 0.0272, 0.0248, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:27:30,828 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 8.519e+02 1.041e+03 1.431e+03 3.840e+03, threshold=2.082e+03, percent-clipped=3.0 +2023-03-31 21:27:31,232 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7128, 2.0626, 2.1332, 2.6862, 4.4527, 0.9413, 2.0580, 4.2178], + device='cuda:2'), covar=tensor([0.0347, 0.2466, 0.2490, 0.1487, 0.0272, 0.2584, 0.1267, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0294, 0.0276, 0.0271, 0.0230, 0.0315, 0.0253, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:27:49,405 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7525.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:27:49,621 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7525.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:27:53,956 INFO [train.py:903] (2/4) Epoch 2, batch 700, loss[loss=0.4459, simple_loss=0.4513, pruned_loss=0.2202, over 12896.00 frames. ], tot_loss[loss=0.3849, simple_loss=0.4123, pruned_loss=0.1788, over 3690032.84 frames. ], batch size: 136, lr: 3.65e-02, grad_scale: 8.0 +2023-03-31 21:28:56,963 INFO [train.py:903] (2/4) Epoch 2, batch 750, loss[loss=0.395, simple_loss=0.4297, pruned_loss=0.1802, over 19588.00 frames. ], tot_loss[loss=0.3854, simple_loss=0.4132, pruned_loss=0.1788, over 3720845.42 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 8.0 +2023-03-31 21:29:35,961 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.266e+02 8.676e+02 1.032e+03 1.220e+03 3.020e+03, threshold=2.064e+03, percent-clipped=5.0 +2023-03-31 21:29:36,452 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7611.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:29:58,606 INFO [train.py:903] (2/4) Epoch 2, batch 800, loss[loss=0.3542, simple_loss=0.3994, pruned_loss=0.1545, over 19675.00 frames. ], tot_loss[loss=0.3823, simple_loss=0.4111, pruned_loss=0.1768, over 3741362.74 frames. ], batch size: 55, lr: 3.63e-02, grad_scale: 8.0 +2023-03-31 21:30:08,332 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7636.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:30:09,187 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7637.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:12,849 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7640.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,299 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,446 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:16,156 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 21:30:46,862 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7667.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:31:01,248 INFO [train.py:903] (2/4) Epoch 2, batch 850, loss[loss=0.3743, simple_loss=0.4173, pruned_loss=0.1657, over 19674.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.4096, pruned_loss=0.1754, over 3775348.99 frames. ], batch size: 58, lr: 3.62e-02, grad_scale: 8.0 +2023-03-31 21:31:41,593 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+02 9.016e+02 1.057e+03 1.450e+03 5.160e+03, threshold=2.114e+03, percent-clipped=6.0 +2023-03-31 21:31:49,177 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.99 vs. limit=2.0 +2023-03-31 21:31:56,589 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 21:32:02,481 INFO [train.py:903] (2/4) Epoch 2, batch 900, loss[loss=0.4302, simple_loss=0.4459, pruned_loss=0.2073, over 19314.00 frames. ], tot_loss[loss=0.3804, simple_loss=0.4096, pruned_loss=0.1756, over 3792571.70 frames. ], batch size: 66, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:32:32,785 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7752.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:43,854 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:53,076 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7769.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:33:06,315 INFO [train.py:903] (2/4) Epoch 2, batch 950, loss[loss=0.3627, simple_loss=0.399, pruned_loss=0.1632, over 19586.00 frames. ], tot_loss[loss=0.3791, simple_loss=0.4082, pruned_loss=0.175, over 3800574.22 frames. ], batch size: 52, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:33:06,634 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7305, 2.9496, 3.0832, 3.0503, 1.0139, 2.6830, 2.5878, 2.6898], + device='cuda:2'), covar=tensor([0.0502, 0.0616, 0.0772, 0.0429, 0.2995, 0.0377, 0.0523, 0.1406], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0251, 0.0333, 0.0219, 0.0395, 0.0152, 0.0231, 0.0343], + device='cuda:2'), out_proj_covar=tensor([1.3799e-04, 1.5349e-04, 2.0886e-04, 1.2552e-04, 2.1090e-04, 9.7895e-05, + 1.3306e-04, 1.8856e-04], device='cuda:2') +2023-03-31 21:33:09,107 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7781.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:33:10,917 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 21:33:39,973 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7806.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:33:46,541 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.660e+02 8.698e+02 1.089e+03 1.494e+03 2.916e+03, threshold=2.178e+03, percent-clipped=6.0 +2023-03-31 21:33:58,888 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7820.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:34:09,631 INFO [train.py:903] (2/4) Epoch 2, batch 1000, loss[loss=0.3882, simple_loss=0.4198, pruned_loss=0.1783, over 19708.00 frames. ], tot_loss[loss=0.3791, simple_loss=0.4082, pruned_loss=0.175, over 3802634.37 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 4.0 +2023-03-31 21:35:04,943 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 21:35:11,827 INFO [train.py:903] (2/4) Epoch 2, batch 1050, loss[loss=0.3646, simple_loss=0.4091, pruned_loss=0.1601, over 19506.00 frames. ], tot_loss[loss=0.3803, simple_loss=0.4095, pruned_loss=0.1756, over 3803016.28 frames. ], batch size: 64, lr: 3.59e-02, grad_scale: 4.0 +2023-03-31 21:35:18,849 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7884.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:33,760 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7896.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:46,833 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 21:35:53,416 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+02 8.878e+02 9.952e+02 1.228e+03 3.126e+03, threshold=1.990e+03, percent-clipped=5.0 +2023-03-31 21:36:05,099 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7921.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:36:14,162 INFO [train.py:903] (2/4) Epoch 2, batch 1100, loss[loss=0.52, simple_loss=0.4945, pruned_loss=0.2728, over 13374.00 frames. ], tot_loss[loss=0.381, simple_loss=0.4098, pruned_loss=0.1761, over 3796665.08 frames. ], batch size: 136, lr: 3.58e-02, grad_scale: 4.0 +2023-03-31 21:36:14,704 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-31 21:36:25,961 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1959, 2.4983, 2.2617, 2.5921, 1.9588, 1.6325, 0.2441, 2.0036], + device='cuda:2'), covar=tensor([0.0727, 0.0490, 0.0360, 0.0453, 0.0914, 0.1001, 0.1399, 0.1113], + device='cuda:2'), in_proj_covar=tensor([0.0198, 0.0195, 0.0186, 0.0231, 0.0268, 0.0239, 0.0241, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:37:16,845 INFO [train.py:903] (2/4) Epoch 2, batch 1150, loss[loss=0.5503, simple_loss=0.5188, pruned_loss=0.2909, over 13426.00 frames. ], tot_loss[loss=0.3796, simple_loss=0.4089, pruned_loss=0.1751, over 3801978.10 frames. ], batch size: 137, lr: 3.57e-02, grad_scale: 4.0 +2023-03-31 21:37:26,688 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7986.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:54,543 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:58,746 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+02 8.681e+02 1.035e+03 1.273e+03 2.854e+03, threshold=2.070e+03, percent-clipped=5.0 +2023-03-31 21:38:21,515 INFO [train.py:903] (2/4) Epoch 2, batch 1200, loss[loss=0.482, simple_loss=0.4737, pruned_loss=0.2452, over 13663.00 frames. ], tot_loss[loss=0.3783, simple_loss=0.4082, pruned_loss=0.1742, over 3798224.32 frames. ], batch size: 136, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:38:26,455 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8033.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:38:52,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 21:38:53,215 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-31 21:39:22,668 INFO [train.py:903] (2/4) Epoch 2, batch 1250, loss[loss=0.3967, simple_loss=0.4147, pruned_loss=0.1893, over 19477.00 frames. ], tot_loss[loss=0.3787, simple_loss=0.4085, pruned_loss=0.1744, over 3806146.82 frames. ], batch size: 49, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:39:50,775 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8101.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:39:57,188 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:40:05,141 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.392e+02 1.041e+03 1.254e+03 3.427e+03, threshold=2.083e+03, percent-clipped=3.0 +2023-03-31 21:40:25,457 INFO [train.py:903] (2/4) Epoch 2, batch 1300, loss[loss=0.4168, simple_loss=0.4351, pruned_loss=0.1992, over 17625.00 frames. ], tot_loss[loss=0.3785, simple_loss=0.4082, pruned_loss=0.1744, over 3811308.70 frames. ], batch size: 101, lr: 3.55e-02, grad_scale: 8.0 +2023-03-31 21:40:39,821 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8140.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:09,964 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8164.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:11,401 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8165.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:28,090 INFO [train.py:903] (2/4) Epoch 2, batch 1350, loss[loss=0.3573, simple_loss=0.3823, pruned_loss=0.1662, over 19732.00 frames. ], tot_loss[loss=0.3793, simple_loss=0.4087, pruned_loss=0.1749, over 3816147.40 frames. ], batch size: 45, lr: 3.54e-02, grad_scale: 8.0 +2023-03-31 21:42:08,819 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.809e+02 9.088e+02 1.109e+03 1.527e+03 2.312e+03, threshold=2.218e+03, percent-clipped=6.0 +2023-03-31 21:42:19,673 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:42:30,882 INFO [train.py:903] (2/4) Epoch 2, batch 1400, loss[loss=0.4192, simple_loss=0.4397, pruned_loss=0.1993, over 19691.00 frames. ], tot_loss[loss=0.3769, simple_loss=0.407, pruned_loss=0.1734, over 3826107.60 frames. ], batch size: 59, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:05,875 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-31 21:43:32,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 21:43:33,844 INFO [train.py:903] (2/4) Epoch 2, batch 1450, loss[loss=0.4183, simple_loss=0.4403, pruned_loss=0.1982, over 17299.00 frames. ], tot_loss[loss=0.3758, simple_loss=0.4065, pruned_loss=0.1725, over 3823965.44 frames. ], batch size: 101, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:34,149 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8279.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:43:37,778 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-31 21:43:39,572 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8283.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:44:15,431 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+02 8.724e+02 1.078e+03 1.353e+03 2.729e+03, threshold=2.156e+03, percent-clipped=3.0 +2023-03-31 21:44:35,645 INFO [train.py:903] (2/4) Epoch 2, batch 1500, loss[loss=0.4639, simple_loss=0.464, pruned_loss=0.2319, over 18272.00 frames. ], tot_loss[loss=0.375, simple_loss=0.4057, pruned_loss=0.1721, over 3828252.22 frames. ], batch size: 83, lr: 3.52e-02, grad_scale: 8.0 +2023-03-31 21:45:10,898 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8357.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:45:38,222 INFO [train.py:903] (2/4) Epoch 2, batch 1550, loss[loss=0.345, simple_loss=0.3921, pruned_loss=0.1489, over 19662.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.4059, pruned_loss=0.1722, over 3839739.85 frames. ], batch size: 53, lr: 3.51e-02, grad_scale: 8.0 +2023-03-31 21:45:42,373 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8382.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:46:11,620 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3110, 1.0308, 1.3139, 0.4128, 2.6324, 2.2970, 2.0886, 2.3381], + device='cuda:2'), covar=tensor([0.1401, 0.2706, 0.2593, 0.2581, 0.0292, 0.0185, 0.0346, 0.0212], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0277, 0.0320, 0.0290, 0.0199, 0.0108, 0.0183, 0.0112], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 21:46:14,585 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-31 21:46:19,232 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.712e+02 9.525e+02 1.175e+03 1.582e+03 3.285e+03, threshold=2.351e+03, percent-clipped=5.0 +2023-03-31 21:46:41,075 INFO [train.py:903] (2/4) Epoch 2, batch 1600, loss[loss=0.3703, simple_loss=0.3999, pruned_loss=0.1704, over 19475.00 frames. ], tot_loss[loss=0.3748, simple_loss=0.4057, pruned_loss=0.172, over 3840202.86 frames. ], batch size: 49, lr: 3.50e-02, grad_scale: 8.0 +2023-03-31 21:47:02,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 21:47:39,722 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:47:42,852 INFO [train.py:903] (2/4) Epoch 2, batch 1650, loss[loss=0.2976, simple_loss=0.3584, pruned_loss=0.1184, over 19662.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.4066, pruned_loss=0.1724, over 3835539.16 frames. ], batch size: 53, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:10,081 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:48:18,255 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-31 21:48:23,195 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.971e+02 8.791e+02 1.048e+03 1.403e+03 4.696e+03, threshold=2.096e+03, percent-clipped=2.0 +2023-03-31 21:48:44,123 INFO [train.py:903] (2/4) Epoch 2, batch 1700, loss[loss=0.4545, simple_loss=0.4655, pruned_loss=0.2217, over 19375.00 frames. ], tot_loss[loss=0.3776, simple_loss=0.4078, pruned_loss=0.1737, over 3843894.72 frames. ], batch size: 66, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:52,101 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8535.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:21,996 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0956, 1.1213, 1.7268, 1.4855, 2.4401, 2.5565, 2.6813, 1.0485], + device='cuda:2'), covar=tensor([0.1419, 0.1940, 0.1142, 0.1331, 0.0812, 0.0711, 0.0954, 0.1783], + device='cuda:2'), in_proj_covar=tensor([0.0326, 0.0351, 0.0317, 0.0334, 0.0375, 0.0301, 0.0436, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 21:49:23,206 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8560.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:23,912 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 21:49:30,591 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2092, 3.4812, 3.6472, 3.5774, 1.1545, 3.2596, 3.0934, 3.1374], + device='cuda:2'), covar=tensor([0.0383, 0.0531, 0.0520, 0.0311, 0.3044, 0.0252, 0.0370, 0.1118], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0262, 0.0350, 0.0241, 0.0396, 0.0166, 0.0236, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 21:49:34,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-31 21:49:46,292 INFO [train.py:903] (2/4) Epoch 2, batch 1750, loss[loss=0.3516, simple_loss=0.3925, pruned_loss=0.1553, over 19674.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4063, pruned_loss=0.1722, over 3844629.55 frames. ], batch size: 53, lr: 3.48e-02, grad_scale: 8.0 +2023-03-31 21:50:27,274 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.711e+02 8.589e+02 1.062e+03 1.367e+03 2.706e+03, threshold=2.124e+03, percent-clipped=6.0 +2023-03-31 21:50:47,015 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8627.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:50:48,943 INFO [train.py:903] (2/4) Epoch 2, batch 1800, loss[loss=0.4335, simple_loss=0.4467, pruned_loss=0.2102, over 19671.00 frames. ], tot_loss[loss=0.3724, simple_loss=0.4043, pruned_loss=0.1702, over 3856494.35 frames. ], batch size: 58, lr: 3.47e-02, grad_scale: 8.0 +2023-03-31 21:51:09,745 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-31 21:51:48,869 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 21:51:52,364 INFO [train.py:903] (2/4) Epoch 2, batch 1850, loss[loss=0.3158, simple_loss=0.3592, pruned_loss=0.1362, over 19619.00 frames. ], tot_loss[loss=0.3718, simple_loss=0.4038, pruned_loss=0.1699, over 3844143.18 frames. ], batch size: 50, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:52:26,722 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 21:52:32,412 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.683e+02 8.545e+02 1.022e+03 1.402e+03 2.945e+03, threshold=2.044e+03, percent-clipped=4.0 +2023-03-31 21:52:53,431 INFO [train.py:903] (2/4) Epoch 2, batch 1900, loss[loss=0.4198, simple_loss=0.4434, pruned_loss=0.1981, over 19709.00 frames. ], tot_loss[loss=0.3729, simple_loss=0.4053, pruned_loss=0.1703, over 3839858.65 frames. ], batch size: 63, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:53:05,487 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5707, 1.8631, 2.0499, 2.3583, 4.1448, 1.5158, 2.2010, 3.8022], + device='cuda:2'), covar=tensor([0.0295, 0.2539, 0.2450, 0.1526, 0.0274, 0.2250, 0.1274, 0.0426], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0300, 0.0280, 0.0272, 0.0245, 0.0317, 0.0258, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:53:10,064 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:13,031 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 21:53:19,063 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 21:53:44,514 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 21:53:56,641 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8778.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:57,530 INFO [train.py:903] (2/4) Epoch 2, batch 1950, loss[loss=0.3969, simple_loss=0.4149, pruned_loss=0.1895, over 19672.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.405, pruned_loss=0.1701, over 3819031.33 frames. ], batch size: 53, lr: 3.45e-02, grad_scale: 8.0 +2023-03-31 21:54:14,923 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-31 21:54:38,566 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.007e+02 8.441e+02 1.013e+03 1.280e+03 2.038e+03, threshold=2.026e+03, percent-clipped=0.0 +2023-03-31 21:55:00,991 INFO [train.py:903] (2/4) Epoch 2, batch 2000, loss[loss=0.3715, simple_loss=0.3921, pruned_loss=0.1755, over 19776.00 frames. ], tot_loss[loss=0.3723, simple_loss=0.4048, pruned_loss=0.1699, over 3816454.67 frames. ], batch size: 48, lr: 3.44e-02, grad_scale: 8.0 +2023-03-31 21:56:00,561 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 21:56:01,580 INFO [train.py:903] (2/4) Epoch 2, batch 2050, loss[loss=0.3968, simple_loss=0.4264, pruned_loss=0.1837, over 19448.00 frames. ], tot_loss[loss=0.3731, simple_loss=0.4052, pruned_loss=0.1705, over 3808928.56 frames. ], batch size: 62, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:56:19,570 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 21:56:20,744 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 21:56:41,660 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 21:56:44,118 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.978e+02 1.029e+03 1.194e+03 1.427e+03 4.040e+03, threshold=2.389e+03, percent-clipped=7.0 +2023-03-31 21:57:05,513 INFO [train.py:903] (2/4) Epoch 2, batch 2100, loss[loss=0.3738, simple_loss=0.4203, pruned_loss=0.1637, over 19682.00 frames. ], tot_loss[loss=0.3721, simple_loss=0.4048, pruned_loss=0.1697, over 3805153.31 frames. ], batch size: 60, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:57:22,239 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7228, 2.0582, 2.0366, 2.3660, 4.2652, 1.4213, 2.1012, 4.1651], + device='cuda:2'), covar=tensor([0.0272, 0.2435, 0.2323, 0.1726, 0.0285, 0.2335, 0.1259, 0.0334], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0298, 0.0278, 0.0273, 0.0240, 0.0315, 0.0254, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 21:57:25,751 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8945.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:57:36,778 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 21:57:59,422 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 21:58:07,617 INFO [train.py:903] (2/4) Epoch 2, batch 2150, loss[loss=0.3316, simple_loss=0.376, pruned_loss=0.1436, over 19766.00 frames. ], tot_loss[loss=0.3706, simple_loss=0.4043, pruned_loss=0.1685, over 3806235.60 frames. ], batch size: 54, lr: 3.42e-02, grad_scale: 8.0 +2023-03-31 21:58:32,356 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8998.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:40,134 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9004.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:49,982 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+02 7.392e+02 9.171e+02 1.181e+03 2.165e+03, threshold=1.834e+03, percent-clipped=0.0 +2023-03-31 21:59:04,421 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9023.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:59:11,989 INFO [train.py:903] (2/4) Epoch 2, batch 2200, loss[loss=0.3484, simple_loss=0.3961, pruned_loss=0.1504, over 19604.00 frames. ], tot_loss[loss=0.3698, simple_loss=0.4035, pruned_loss=0.1681, over 3810708.40 frames. ], batch size: 57, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 21:59:14,458 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9031.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:59:56,199 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8834, 1.1311, 1.3439, 1.6220, 2.6063, 1.1008, 1.8407, 2.5279], + device='cuda:2'), covar=tensor([0.0451, 0.2746, 0.2566, 0.1605, 0.0472, 0.2259, 0.1090, 0.0528], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0302, 0.0283, 0.0270, 0.0247, 0.0320, 0.0258, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:00:11,483 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-31 22:00:13,982 INFO [train.py:903] (2/4) Epoch 2, batch 2250, loss[loss=0.395, simple_loss=0.4305, pruned_loss=0.1797, over 19688.00 frames. ], tot_loss[loss=0.3697, simple_loss=0.403, pruned_loss=0.1682, over 3807147.73 frames. ], batch size: 59, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 22:00:56,140 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.993e+02 8.727e+02 9.943e+02 1.293e+03 2.077e+03, threshold=1.989e+03, percent-clipped=4.0 +2023-03-31 22:01:08,132 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9122.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:01:17,875 INFO [train.py:903] (2/4) Epoch 2, batch 2300, loss[loss=0.3618, simple_loss=0.4045, pruned_loss=0.1595, over 19340.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.4023, pruned_loss=0.1675, over 3797240.59 frames. ], batch size: 66, lr: 3.40e-02, grad_scale: 8.0 +2023-03-31 22:01:30,595 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 22:01:49,940 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3198, 3.5300, 3.7123, 3.6606, 1.3689, 3.3862, 3.0746, 3.2262], + device='cuda:2'), covar=tensor([0.0338, 0.0483, 0.0460, 0.0311, 0.2842, 0.0230, 0.0396, 0.0999], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0279, 0.0370, 0.0259, 0.0421, 0.0171, 0.0254, 0.0380], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 22:01:50,368 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.96 vs. limit=5.0 +2023-03-31 22:02:16,131 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1061, 1.2822, 0.7877, 0.8913, 0.9833, 1.1032, 0.0298, 0.4082], + device='cuda:2'), covar=tensor([0.0467, 0.0434, 0.0320, 0.0373, 0.0898, 0.0493, 0.1007, 0.0828], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0207, 0.0205, 0.0244, 0.0288, 0.0253, 0.0257, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:02:19,099 INFO [train.py:903] (2/4) Epoch 2, batch 2350, loss[loss=0.3976, simple_loss=0.4133, pruned_loss=0.191, over 19492.00 frames. ], tot_loss[loss=0.3683, simple_loss=0.4016, pruned_loss=0.1675, over 3795738.95 frames. ], batch size: 49, lr: 3.39e-02, grad_scale: 8.0 +2023-03-31 22:03:00,735 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.988e+02 9.117e+02 1.090e+03 1.432e+03 2.529e+03, threshold=2.180e+03, percent-clipped=5.0 +2023-03-31 22:03:00,812 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 22:03:18,264 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 22:03:22,887 INFO [train.py:903] (2/4) Epoch 2, batch 2400, loss[loss=0.3832, simple_loss=0.4165, pruned_loss=0.1749, over 19298.00 frames. ], tot_loss[loss=0.3698, simple_loss=0.403, pruned_loss=0.1683, over 3810727.81 frames. ], batch size: 66, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:03:32,292 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9237.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:24,300 INFO [train.py:903] (2/4) Epoch 2, batch 2450, loss[loss=0.3483, simple_loss=0.3902, pruned_loss=0.1532, over 19580.00 frames. ], tot_loss[loss=0.3721, simple_loss=0.4047, pruned_loss=0.1698, over 3795648.12 frames. ], batch size: 52, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:04:38,173 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9289.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:40,184 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-31 22:05:06,392 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 8.791e+02 1.096e+03 1.484e+03 3.289e+03, threshold=2.192e+03, percent-clipped=7.0 +2023-03-31 22:05:27,998 INFO [train.py:903] (2/4) Epoch 2, batch 2500, loss[loss=0.3167, simple_loss=0.3693, pruned_loss=0.132, over 19494.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.402, pruned_loss=0.1677, over 3805769.90 frames. ], batch size: 49, lr: 3.37e-02, grad_scale: 8.0 +2023-03-31 22:05:40,108 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1469, 1.1921, 1.3749, 1.6663, 2.7362, 1.2134, 2.0280, 2.7775], + device='cuda:2'), covar=tensor([0.0487, 0.2793, 0.2654, 0.1610, 0.0498, 0.2191, 0.1121, 0.0549], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0298, 0.0281, 0.0269, 0.0244, 0.0311, 0.0256, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:05:52,445 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9348.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:25,081 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9375.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:29,693 INFO [train.py:903] (2/4) Epoch 2, batch 2550, loss[loss=0.3555, simple_loss=0.3776, pruned_loss=0.1667, over 19746.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.4018, pruned_loss=0.1678, over 3804061.17 frames. ], batch size: 46, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:01,794 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9404.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:07:11,554 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+02 8.134e+02 9.492e+02 1.275e+03 2.544e+03, threshold=1.898e+03, percent-clipped=3.0 +2023-03-31 22:07:16,499 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7526, 1.1048, 1.4975, 0.9893, 2.4826, 2.8313, 2.7507, 3.0057], + device='cuda:2'), covar=tensor([0.1474, 0.3598, 0.3222, 0.2486, 0.0500, 0.0211, 0.0254, 0.0180], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0281, 0.0322, 0.0290, 0.0197, 0.0107, 0.0183, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 22:07:25,240 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 22:07:33,273 INFO [train.py:903] (2/4) Epoch 2, batch 2600, loss[loss=0.3035, simple_loss=0.3528, pruned_loss=0.1271, over 19758.00 frames. ], tot_loss[loss=0.3682, simple_loss=0.4015, pruned_loss=0.1675, over 3810826.33 frames. ], batch size: 47, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:36,930 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:07:38,568 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-31 22:07:41,706 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5556, 2.3902, 1.9168, 2.1087, 2.2790, 1.2383, 0.8377, 1.5976], + device='cuda:2'), covar=tensor([0.1095, 0.0373, 0.1055, 0.0520, 0.0570, 0.1470, 0.1284, 0.0826], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0182, 0.0272, 0.0240, 0.0183, 0.0289, 0.0260, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:08:14,503 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8370, 1.4751, 1.5808, 1.9628, 3.3590, 1.4277, 2.0988, 3.3344], + device='cuda:2'), covar=tensor([0.0239, 0.2385, 0.2304, 0.1470, 0.0319, 0.1959, 0.1081, 0.0335], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0300, 0.0283, 0.0268, 0.0243, 0.0307, 0.0257, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:08:14,549 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9463.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:34,700 INFO [train.py:903] (2/4) Epoch 2, batch 2650, loss[loss=0.4996, simple_loss=0.4717, pruned_loss=0.2638, over 13630.00 frames. ], tot_loss[loss=0.3705, simple_loss=0.4032, pruned_loss=0.1689, over 3800173.16 frames. ], batch size: 137, lr: 3.35e-02, grad_scale: 8.0 +2023-03-31 22:08:49,042 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9490.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:51,459 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9323, 1.2881, 0.8609, 1.0976, 1.1442, 0.9207, 0.4471, 1.2584], + device='cuda:2'), covar=tensor([0.0710, 0.0629, 0.1310, 0.0499, 0.0755, 0.1389, 0.1197, 0.0684], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0186, 0.0271, 0.0241, 0.0186, 0.0285, 0.0262, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-31 22:08:52,683 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9493.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:53,498 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 22:09:16,688 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.258e+02 8.887e+02 1.023e+03 1.383e+03 3.476e+03, threshold=2.047e+03, percent-clipped=7.0 +2023-03-31 22:09:24,059 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9518.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:09:36,968 INFO [train.py:903] (2/4) Epoch 2, batch 2700, loss[loss=0.3634, simple_loss=0.3877, pruned_loss=0.1696, over 19848.00 frames. ], tot_loss[loss=0.3689, simple_loss=0.4017, pruned_loss=0.1681, over 3809660.24 frames. ], batch size: 52, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:10:39,562 INFO [train.py:903] (2/4) Epoch 2, batch 2750, loss[loss=0.2886, simple_loss=0.335, pruned_loss=0.1211, over 19739.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4008, pruned_loss=0.1676, over 3795065.52 frames. ], batch size: 51, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:10:51,545 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-31 22:11:08,817 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9602.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:11:20,993 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.046e+02 9.047e+02 1.065e+03 1.297e+03 2.590e+03, threshold=2.130e+03, percent-clipped=3.0 +2023-03-31 22:11:43,313 INFO [train.py:903] (2/4) Epoch 2, batch 2800, loss[loss=0.3511, simple_loss=0.3673, pruned_loss=0.1675, over 19298.00 frames. ], tot_loss[loss=0.366, simple_loss=0.3996, pruned_loss=0.1662, over 3798771.51 frames. ], batch size: 44, lr: 3.33e-02, grad_scale: 8.0 +2023-03-31 22:12:03,379 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-31 22:12:06,803 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-31 22:12:20,948 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9660.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:41,331 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9676.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:44,610 INFO [train.py:903] (2/4) Epoch 2, batch 2850, loss[loss=0.3637, simple_loss=0.4055, pruned_loss=0.161, over 19521.00 frames. ], tot_loss[loss=0.3669, simple_loss=0.4005, pruned_loss=0.1666, over 3799309.40 frames. ], batch size: 56, lr: 3.32e-02, grad_scale: 8.0 +2023-03-31 22:12:51,757 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9685.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:26,588 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.408e+02 8.898e+02 1.125e+03 1.384e+03 2.599e+03, threshold=2.251e+03, percent-clipped=6.0 +2023-03-31 22:13:35,223 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:45,581 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 22:13:46,859 INFO [train.py:903] (2/4) Epoch 2, batch 2900, loss[loss=0.3111, simple_loss=0.357, pruned_loss=0.1326, over 16905.00 frames. ], tot_loss[loss=0.3654, simple_loss=0.3999, pruned_loss=0.1655, over 3811278.43 frames. ], batch size: 37, lr: 3.31e-02, grad_scale: 16.0 +2023-03-31 22:14:06,884 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9744.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:08,979 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9746.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:23,241 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3028, 1.0184, 1.2922, 0.3465, 2.6373, 2.2546, 2.0975, 2.4196], + device='cuda:2'), covar=tensor([0.1367, 0.2792, 0.2682, 0.2626, 0.0319, 0.0212, 0.0368, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0281, 0.0320, 0.0289, 0.0196, 0.0105, 0.0186, 0.0113], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 22:14:27,837 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:39,346 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:45,622 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:49,533 INFO [train.py:903] (2/4) Epoch 2, batch 2950, loss[loss=0.3803, simple_loss=0.4048, pruned_loss=0.1779, over 19667.00 frames. ], tot_loss[loss=0.3666, simple_loss=0.4005, pruned_loss=0.1663, over 3822268.10 frames. ], batch size: 53, lr: 3.31e-02, grad_scale: 8.0 +2023-03-31 22:15:31,702 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+02 8.475e+02 1.131e+03 1.413e+03 3.215e+03, threshold=2.262e+03, percent-clipped=4.0 +2023-03-31 22:15:53,087 INFO [train.py:903] (2/4) Epoch 2, batch 3000, loss[loss=0.2942, simple_loss=0.3538, pruned_loss=0.1174, over 19576.00 frames. ], tot_loss[loss=0.3655, simple_loss=0.3997, pruned_loss=0.1657, over 3833268.80 frames. ], batch size: 52, lr: 3.30e-02, grad_scale: 4.0 +2023-03-31 22:15:53,088 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 22:16:06,239 INFO [train.py:937] (2/4) Epoch 2, validation: loss=0.2513, simple_loss=0.3423, pruned_loss=0.08019, over 944034.00 frames. +2023-03-31 22:16:06,241 INFO [train.py:938] (2/4) Maximum memory allocated so far is 17398MB +2023-03-31 22:16:12,110 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 22:17:06,140 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6551, 1.8972, 0.9156, 1.3181, 1.1091, 1.2117, 0.0206, 0.7591], + device='cuda:2'), covar=tensor([0.0372, 0.0363, 0.0304, 0.0291, 0.0839, 0.0474, 0.0800, 0.0711], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0215, 0.0208, 0.0242, 0.0282, 0.0250, 0.0255, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:17:08,085 INFO [train.py:903] (2/4) Epoch 2, batch 3050, loss[loss=0.3586, simple_loss=0.4043, pruned_loss=0.1565, over 18126.00 frames. ], tot_loss[loss=0.3672, simple_loss=0.4015, pruned_loss=0.1664, over 3810867.07 frames. ], batch size: 83, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:17:22,787 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9891.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:17:50,736 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 8.806e+02 1.158e+03 1.402e+03 3.282e+03, threshold=2.315e+03, percent-clipped=2.0 +2023-03-31 22:18:10,042 INFO [train.py:903] (2/4) Epoch 2, batch 3100, loss[loss=0.3898, simple_loss=0.4185, pruned_loss=0.1806, over 18771.00 frames. ], tot_loss[loss=0.3673, simple_loss=0.4018, pruned_loss=0.1664, over 3809478.81 frames. ], batch size: 74, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:18:29,709 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9946.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:19:11,245 INFO [train.py:903] (2/4) Epoch 2, batch 3150, loss[loss=0.3559, simple_loss=0.3953, pruned_loss=0.1583, over 19775.00 frames. ], tot_loss[loss=0.3683, simple_loss=0.4028, pruned_loss=0.167, over 3810707.38 frames. ], batch size: 56, lr: 3.28e-02, grad_scale: 4.0 +2023-03-31 22:19:42,626 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 22:19:46,970 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-31 22:19:56,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 7.345e+02 9.404e+02 1.247e+03 3.615e+03, threshold=1.881e+03, percent-clipped=3.0 +2023-03-31 22:20:03,833 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10020.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:15,210 INFO [train.py:903] (2/4) Epoch 2, batch 3200, loss[loss=0.3099, simple_loss=0.3555, pruned_loss=0.1321, over 19480.00 frames. ], tot_loss[loss=0.3677, simple_loss=0.4021, pruned_loss=0.1667, over 3800564.33 frames. ], batch size: 49, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:20:50,896 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10057.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:55,731 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:21:19,925 INFO [train.py:903] (2/4) Epoch 2, batch 3250, loss[loss=0.4133, simple_loss=0.4148, pruned_loss=0.2059, over 19747.00 frames. ], tot_loss[loss=0.3663, simple_loss=0.4009, pruned_loss=0.1658, over 3809092.25 frames. ], batch size: 46, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:21:21,259 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10080.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:21:50,575 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:02,472 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+02 8.486e+02 1.037e+03 1.291e+03 3.604e+03, threshold=2.074e+03, percent-clipped=6.0 +2023-03-31 22:22:20,716 INFO [train.py:903] (2/4) Epoch 2, batch 3300, loss[loss=0.3843, simple_loss=0.4108, pruned_loss=0.179, over 19761.00 frames. ], tot_loss[loss=0.3658, simple_loss=0.4002, pruned_loss=0.1657, over 3807872.56 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:22:25,243 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 22:22:27,887 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10135.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:42,680 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10147.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:07,159 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-03-31 22:23:14,949 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10172.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:22,725 INFO [train.py:903] (2/4) Epoch 2, batch 3350, loss[loss=0.3121, simple_loss=0.3506, pruned_loss=0.1368, over 19732.00 frames. ], tot_loss[loss=0.3654, simple_loss=0.4, pruned_loss=0.1654, over 3803653.86 frames. ], batch size: 46, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:23:48,554 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-31 22:24:07,629 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.361e+02 9.909e+02 1.198e+03 2.844e+03, threshold=1.982e+03, percent-clipped=3.0 +2023-03-31 22:24:14,953 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:24:25,926 INFO [train.py:903] (2/4) Epoch 2, batch 3400, loss[loss=0.2716, simple_loss=0.325, pruned_loss=0.1091, over 19287.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.3986, pruned_loss=0.1642, over 3788668.48 frames. ], batch size: 44, lr: 3.25e-02, grad_scale: 8.0 +2023-03-31 22:25:29,367 INFO [train.py:903] (2/4) Epoch 2, batch 3450, loss[loss=0.3731, simple_loss=0.415, pruned_loss=0.1657, over 19782.00 frames. ], tot_loss[loss=0.3643, simple_loss=0.3992, pruned_loss=0.1647, over 3803865.13 frames. ], batch size: 56, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:25:32,672 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 22:25:59,046 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7476, 1.6570, 1.6385, 2.1709, 3.2885, 1.3804, 2.1324, 3.2061], + device='cuda:2'), covar=tensor([0.0278, 0.2180, 0.2347, 0.1493, 0.0373, 0.2116, 0.1048, 0.0437], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0293, 0.0283, 0.0275, 0.0246, 0.0313, 0.0255, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:26:04,771 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-31 22:26:13,346 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+02 9.409e+02 1.166e+03 1.453e+03 2.796e+03, threshold=2.333e+03, percent-clipped=9.0 +2023-03-31 22:26:17,079 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10317.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:26:31,786 INFO [train.py:903] (2/4) Epoch 2, batch 3500, loss[loss=0.3305, simple_loss=0.3687, pruned_loss=0.1462, over 19715.00 frames. ], tot_loss[loss=0.3635, simple_loss=0.3987, pruned_loss=0.1642, over 3805703.53 frames. ], batch size: 51, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:26:43,564 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6935, 1.1129, 1.4674, 1.0909, 2.4900, 3.2140, 3.3969, 3.5851], + device='cuda:2'), covar=tensor([0.1473, 0.2891, 0.2878, 0.2499, 0.0555, 0.0145, 0.0188, 0.0141], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0277, 0.0323, 0.0285, 0.0195, 0.0103, 0.0184, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 22:26:46,982 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10342.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:27:13,430 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7936, 1.7391, 1.3568, 2.7380, 1.9310, 2.9681, 1.9529, 1.1111], + device='cuda:2'), covar=tensor([0.1521, 0.1155, 0.1026, 0.0602, 0.1236, 0.0274, 0.1695, 0.1604], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0311, 0.0330, 0.0428, 0.0399, 0.0233, 0.0425, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:27:20,663 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.21 vs. limit=5.0 +2023-03-31 22:27:33,758 INFO [train.py:903] (2/4) Epoch 2, batch 3550, loss[loss=0.3232, simple_loss=0.3804, pruned_loss=0.133, over 19530.00 frames. ], tot_loss[loss=0.3613, simple_loss=0.3973, pruned_loss=0.1627, over 3803426.43 frames. ], batch size: 56, lr: 3.23e-02, grad_scale: 4.0 +2023-03-31 22:27:49,430 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10391.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:04,143 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10401.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:20,153 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+02 7.776e+02 1.013e+03 1.369e+03 3.978e+03, threshold=2.027e+03, percent-clipped=2.0 +2023-03-31 22:28:21,498 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10416.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:30,598 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10424.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:28:35,026 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10427.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:37,134 INFO [train.py:903] (2/4) Epoch 2, batch 3600, loss[loss=0.3644, simple_loss=0.4016, pruned_loss=0.1636, over 19786.00 frames. ], tot_loss[loss=0.3619, simple_loss=0.3977, pruned_loss=0.1631, over 3798477.37 frames. ], batch size: 56, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:29:32,588 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6727, 4.0357, 4.3916, 4.3152, 1.5930, 3.9880, 3.5892, 3.7679], + device='cuda:2'), covar=tensor([0.0448, 0.0467, 0.0434, 0.0236, 0.3277, 0.0214, 0.0360, 0.1026], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0280, 0.0383, 0.0271, 0.0427, 0.0176, 0.0257, 0.0379], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 22:29:32,606 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10472.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:37,370 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:41,347 INFO [train.py:903] (2/4) Epoch 2, batch 3650, loss[loss=0.3197, simple_loss=0.3729, pruned_loss=0.1333, over 19538.00 frames. ], tot_loss[loss=0.3601, simple_loss=0.3964, pruned_loss=0.1619, over 3808690.79 frames. ], batch size: 56, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:30:09,136 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:26,003 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+02 8.454e+02 1.072e+03 1.396e+03 2.688e+03, threshold=2.143e+03, percent-clipped=6.0 +2023-03-31 22:30:27,510 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10516.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:45,122 INFO [train.py:903] (2/4) Epoch 2, batch 3700, loss[loss=0.3084, simple_loss=0.3508, pruned_loss=0.133, over 19480.00 frames. ], tot_loss[loss=0.3608, simple_loss=0.397, pruned_loss=0.1623, over 3815896.42 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 8.0 +2023-03-31 22:30:57,234 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10539.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:31:47,580 INFO [train.py:903] (2/4) Epoch 2, batch 3750, loss[loss=0.3676, simple_loss=0.4033, pruned_loss=0.166, over 19675.00 frames. ], tot_loss[loss=0.3591, simple_loss=0.3958, pruned_loss=0.1612, over 3814593.23 frames. ], batch size: 58, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:32:29,429 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6932, 2.4274, 1.8084, 2.0190, 1.8155, 1.8822, 0.8528, 2.0409], + device='cuda:2'), covar=tensor([0.0425, 0.0383, 0.0283, 0.0347, 0.0551, 0.0576, 0.0729, 0.0600], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0222, 0.0220, 0.0244, 0.0293, 0.0258, 0.0251, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:32:33,530 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+02 8.986e+02 1.057e+03 1.326e+03 2.585e+03, threshold=2.114e+03, percent-clipped=3.0 +2023-03-31 22:32:50,170 INFO [train.py:903] (2/4) Epoch 2, batch 3800, loss[loss=0.4286, simple_loss=0.4262, pruned_loss=0.2155, over 19752.00 frames. ], tot_loss[loss=0.3581, simple_loss=0.3948, pruned_loss=0.1607, over 3814700.29 frames. ], batch size: 47, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:33:23,948 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 22:33:51,760 INFO [train.py:903] (2/4) Epoch 2, batch 3850, loss[loss=0.3959, simple_loss=0.4151, pruned_loss=0.1884, over 18075.00 frames. ], tot_loss[loss=0.3583, simple_loss=0.3946, pruned_loss=0.161, over 3810465.04 frames. ], batch size: 83, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:34:37,546 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.106e+02 8.604e+02 1.077e+03 1.442e+03 2.985e+03, threshold=2.155e+03, percent-clipped=6.0 +2023-03-31 22:34:56,858 INFO [train.py:903] (2/4) Epoch 2, batch 3900, loss[loss=0.3555, simple_loss=0.4018, pruned_loss=0.1546, over 18094.00 frames. ], tot_loss[loss=0.3561, simple_loss=0.3934, pruned_loss=0.1594, over 3826664.81 frames. ], batch size: 83, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:35:36,261 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10762.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:49,462 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:50,883 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10772.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:58,740 INFO [train.py:903] (2/4) Epoch 2, batch 3950, loss[loss=0.3164, simple_loss=0.3607, pruned_loss=0.1361, over 19723.00 frames. ], tot_loss[loss=0.3528, simple_loss=0.3908, pruned_loss=0.1574, over 3830047.04 frames. ], batch size: 51, lr: 3.18e-02, grad_scale: 8.0 +2023-03-31 22:36:04,569 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 22:36:04,744 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4142, 4.0573, 2.3387, 3.5891, 1.5474, 3.7788, 3.5078, 3.7768], + device='cuda:2'), covar=tensor([0.0577, 0.1074, 0.2068, 0.0819, 0.3310, 0.0891, 0.0693, 0.0708], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0280, 0.0313, 0.0256, 0.0325, 0.0279, 0.0217, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:36:15,268 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10792.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:18,962 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10795.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:36:21,164 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10797.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:44,829 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.316e+02 7.972e+02 1.008e+03 1.220e+03 2.629e+03, threshold=2.016e+03, percent-clipped=1.0 +2023-03-31 22:36:46,306 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10816.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:51,364 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10820.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:37:01,756 INFO [train.py:903] (2/4) Epoch 2, batch 4000, loss[loss=0.3351, simple_loss=0.39, pruned_loss=0.1401, over 19676.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3901, pruned_loss=0.1562, over 3833147.68 frames. ], batch size: 60, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:37:49,261 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 22:38:02,800 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1995, 1.1837, 2.1735, 1.5030, 3.2731, 3.2834, 3.5391, 1.5719], + device='cuda:2'), covar=tensor([0.1542, 0.2326, 0.1426, 0.1370, 0.0876, 0.0854, 0.1105, 0.2055], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0382, 0.0351, 0.0352, 0.0415, 0.0333, 0.0480, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:38:04,673 INFO [train.py:903] (2/4) Epoch 2, batch 4050, loss[loss=0.4058, simple_loss=0.429, pruned_loss=0.1914, over 17450.00 frames. ], tot_loss[loss=0.3518, simple_loss=0.3907, pruned_loss=0.1564, over 3821771.00 frames. ], batch size: 101, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:38:15,759 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10886.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:38:35,390 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10903.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:38:49,942 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.194e+02 9.186e+02 1.101e+03 1.338e+03 4.215e+03, threshold=2.201e+03, percent-clipped=7.0 +2023-03-31 22:39:10,604 INFO [train.py:903] (2/4) Epoch 2, batch 4100, loss[loss=0.3893, simple_loss=0.4147, pruned_loss=0.182, over 19302.00 frames. ], tot_loss[loss=0.3502, simple_loss=0.3897, pruned_loss=0.1554, over 3838861.56 frames. ], batch size: 66, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:39:13,325 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10931.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:39:45,342 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 22:39:53,031 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.09 vs. limit=2.0 +2023-03-31 22:40:11,021 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7981, 4.2645, 2.1910, 3.8246, 1.2869, 3.9896, 3.8334, 3.8280], + device='cuda:2'), covar=tensor([0.0528, 0.1360, 0.2447, 0.0738, 0.4072, 0.0965, 0.0709, 0.0806], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0290, 0.0317, 0.0262, 0.0333, 0.0285, 0.0219, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:40:13,120 INFO [train.py:903] (2/4) Epoch 2, batch 4150, loss[loss=0.3352, simple_loss=0.3715, pruned_loss=0.1495, over 19611.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3896, pruned_loss=0.1556, over 3833612.07 frames. ], batch size: 50, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:40:59,341 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 7.731e+02 1.007e+03 1.258e+03 2.097e+03, threshold=2.015e+03, percent-clipped=0.0 +2023-03-31 22:41:15,491 INFO [train.py:903] (2/4) Epoch 2, batch 4200, loss[loss=0.4251, simple_loss=0.4368, pruned_loss=0.2067, over 19669.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.3918, pruned_loss=0.1579, over 3831155.27 frames. ], batch size: 55, lr: 3.15e-02, grad_scale: 8.0 +2023-03-31 22:41:18,925 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 22:42:18,013 INFO [train.py:903] (2/4) Epoch 2, batch 4250, loss[loss=0.3466, simple_loss=0.3647, pruned_loss=0.1643, over 19736.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.3925, pruned_loss=0.1585, over 3837653.78 frames. ], batch size: 47, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:42:25,478 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-31 22:42:35,132 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 22:42:45,409 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 22:42:52,690 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:03,982 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.611e+02 8.485e+02 1.107e+03 1.406e+03 3.284e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 22:43:21,881 INFO [train.py:903] (2/4) Epoch 2, batch 4300, loss[loss=0.3935, simple_loss=0.4308, pruned_loss=0.1781, over 19540.00 frames. ], tot_loss[loss=0.3567, simple_loss=0.3941, pruned_loss=0.1596, over 3830287.55 frames. ], batch size: 56, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:43:29,552 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1017, 1.1617, 2.2282, 1.4178, 2.8938, 3.1783, 3.4404, 1.5694], + device='cuda:2'), covar=tensor([0.1383, 0.2009, 0.1101, 0.1294, 0.0973, 0.0723, 0.1217, 0.1898], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0378, 0.0350, 0.0350, 0.0413, 0.0328, 0.0487, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:43:30,409 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11136.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:37,590 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11142.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:09,023 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11167.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:16,385 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 22:44:17,148 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.63 vs. limit=5.0 +2023-03-31 22:44:23,253 INFO [train.py:903] (2/4) Epoch 2, batch 4350, loss[loss=0.2917, simple_loss=0.3525, pruned_loss=0.1155, over 19681.00 frames. ], tot_loss[loss=0.3553, simple_loss=0.3932, pruned_loss=0.1587, over 3831511.34 frames. ], batch size: 53, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:44:24,658 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1101, 3.2615, 3.5717, 3.5303, 1.7908, 3.1506, 2.9171, 3.2229], + device='cuda:2'), covar=tensor([0.0444, 0.0869, 0.0378, 0.0262, 0.2327, 0.0264, 0.0345, 0.0733], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0281, 0.0395, 0.0278, 0.0426, 0.0187, 0.0267, 0.0392], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 22:44:32,854 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11187.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:39,338 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11192.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:04,782 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11212.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:09,089 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.029e+02 7.841e+02 9.559e+02 1.160e+03 2.939e+03, threshold=1.912e+03, percent-clipped=2.0 +2023-03-31 22:45:15,684 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:24,368 INFO [train.py:903] (2/4) Epoch 2, batch 4400, loss[loss=0.3856, simple_loss=0.4172, pruned_loss=0.177, over 19745.00 frames. ], tot_loss[loss=0.3555, simple_loss=0.3928, pruned_loss=0.1591, over 3824735.92 frames. ], batch size: 63, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:45:46,894 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-31 22:45:48,408 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11247.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:45:50,524 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 22:45:54,223 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11251.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:46:00,719 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 22:46:27,507 INFO [train.py:903] (2/4) Epoch 2, batch 4450, loss[loss=0.3515, simple_loss=0.3925, pruned_loss=0.1552, over 18346.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.3909, pruned_loss=0.1575, over 3821927.37 frames. ], batch size: 84, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:46:28,035 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2069, 2.0646, 1.8975, 2.9762, 2.1020, 3.1052, 2.5219, 1.8442], + device='cuda:2'), covar=tensor([0.1019, 0.0805, 0.0504, 0.0566, 0.1076, 0.0249, 0.0970, 0.0787], + device='cuda:2'), in_proj_covar=tensor([0.0362, 0.0330, 0.0349, 0.0455, 0.0415, 0.0245, 0.0452, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:47:14,189 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.820e+02 1.081e+03 1.372e+03 2.333e+03, threshold=2.162e+03, percent-clipped=5.0 +2023-03-31 22:47:31,605 INFO [train.py:903] (2/4) Epoch 2, batch 4500, loss[loss=0.3826, simple_loss=0.4201, pruned_loss=0.1726, over 18807.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.3919, pruned_loss=0.1578, over 3825788.04 frames. ], batch size: 74, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:47:42,703 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2871, 3.0586, 2.1005, 2.1109, 1.8902, 2.0093, 0.3374, 2.2066], + device='cuda:2'), covar=tensor([0.0480, 0.0325, 0.0347, 0.0466, 0.0760, 0.0576, 0.0973, 0.0718], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0228, 0.0220, 0.0249, 0.0307, 0.0262, 0.0252, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:47:42,781 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6986, 2.3042, 2.2224, 3.7447, 2.6786, 4.3397, 3.3466, 2.0040], + device='cuda:2'), covar=tensor([0.0996, 0.0767, 0.0463, 0.0440, 0.1002, 0.0150, 0.0783, 0.0762], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0323, 0.0339, 0.0441, 0.0407, 0.0241, 0.0445, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:47:44,609 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-03-31 22:48:12,948 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11362.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:48:34,564 INFO [train.py:903] (2/4) Epoch 2, batch 4550, loss[loss=0.2813, simple_loss=0.3419, pruned_loss=0.1104, over 19853.00 frames. ], tot_loss[loss=0.3526, simple_loss=0.391, pruned_loss=0.1571, over 3824551.93 frames. ], batch size: 52, lr: 3.11e-02, grad_scale: 8.0 +2023-03-31 22:48:45,442 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 22:49:08,232 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 22:49:21,595 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.316e+02 7.788e+02 1.003e+03 1.220e+03 2.356e+03, threshold=2.005e+03, percent-clipped=1.0 +2023-03-31 22:49:34,106 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-31 22:49:37,110 INFO [train.py:903] (2/4) Epoch 2, batch 4600, loss[loss=0.3011, simple_loss=0.3399, pruned_loss=0.1312, over 19742.00 frames. ], tot_loss[loss=0.3519, simple_loss=0.3903, pruned_loss=0.1568, over 3819964.71 frames. ], batch size: 46, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:37,581 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11477.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:50:39,641 INFO [train.py:903] (2/4) Epoch 2, batch 4650, loss[loss=0.3927, simple_loss=0.4154, pruned_loss=0.185, over 13186.00 frames. ], tot_loss[loss=0.3522, simple_loss=0.3904, pruned_loss=0.157, over 3813537.79 frames. ], batch size: 136, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:47,210 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9008, 3.5092, 2.4306, 2.3265, 2.9119, 1.4789, 1.3331, 1.5191], + device='cuda:2'), covar=tensor([0.1394, 0.0365, 0.0773, 0.0655, 0.0607, 0.1220, 0.1239, 0.1064], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0199, 0.0295, 0.0247, 0.0200, 0.0299, 0.0267, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-31 22:50:59,963 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 22:51:09,697 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11502.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:10,457 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 22:51:15,557 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11507.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:23,518 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6700, 1.5969, 1.4494, 1.9996, 1.6006, 1.7713, 1.5857, 1.7055], + device='cuda:2'), covar=tensor([0.0872, 0.1788, 0.1331, 0.1016, 0.1392, 0.0551, 0.1073, 0.0669], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0373, 0.0290, 0.0257, 0.0321, 0.0261, 0.0276, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:51:26,520 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.039e+02 8.212e+02 1.126e+03 1.409e+03 2.689e+03, threshold=2.252e+03, percent-clipped=6.0 +2023-03-31 22:51:36,266 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.6603, 5.0802, 2.8429, 4.5241, 1.6629, 4.9625, 4.7856, 4.9274], + device='cuda:2'), covar=tensor([0.0441, 0.0957, 0.1878, 0.0535, 0.3249, 0.0788, 0.0541, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0280, 0.0311, 0.0253, 0.0321, 0.0285, 0.0221, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:51:42,427 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-31 22:51:42,947 INFO [train.py:903] (2/4) Epoch 2, batch 4700, loss[loss=0.3566, simple_loss=0.3897, pruned_loss=0.1618, over 19842.00 frames. ], tot_loss[loss=0.353, simple_loss=0.3911, pruned_loss=0.1575, over 3798959.00 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:51:47,971 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:52,281 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11536.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:55,393 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.74 vs. limit=5.0 +2023-03-31 22:52:05,036 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 22:52:08,744 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11550.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:45,449 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2167, 1.2510, 0.8485, 1.0543, 0.9532, 1.1921, 0.0087, 0.3552], + device='cuda:2'), covar=tensor([0.0325, 0.0341, 0.0227, 0.0219, 0.0643, 0.0309, 0.0641, 0.0555], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0221, 0.0246, 0.0298, 0.0264, 0.0251, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:52:46,162 INFO [train.py:903] (2/4) Epoch 2, batch 4750, loss[loss=0.4656, simple_loss=0.4669, pruned_loss=0.2322, over 19647.00 frames. ], tot_loss[loss=0.352, simple_loss=0.3904, pruned_loss=0.1568, over 3799733.19 frames. ], batch size: 59, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:53:32,497 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+02 7.433e+02 1.012e+03 1.332e+03 3.283e+03, threshold=2.025e+03, percent-clipped=2.0 +2023-03-31 22:53:35,148 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11618.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:53:47,421 INFO [train.py:903] (2/4) Epoch 2, batch 4800, loss[loss=0.2733, simple_loss=0.3358, pruned_loss=0.1055, over 19672.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.3928, pruned_loss=0.1583, over 3804960.51 frames. ], batch size: 53, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:54:04,645 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11643.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:54:06,936 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2447, 2.1947, 1.5116, 1.8427, 1.4044, 1.3880, 0.1643, 0.8962], + device='cuda:2'), covar=tensor([0.0328, 0.0274, 0.0234, 0.0328, 0.0622, 0.0500, 0.0722, 0.0595], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0224, 0.0219, 0.0246, 0.0298, 0.0261, 0.0249, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:54:15,095 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11651.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:19,384 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:49,929 INFO [train.py:903] (2/4) Epoch 2, batch 4850, loss[loss=0.2919, simple_loss=0.3404, pruned_loss=0.1217, over 19764.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.3917, pruned_loss=0.1571, over 3805388.89 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:55:13,213 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0681, 1.0546, 1.7365, 1.2776, 2.3457, 2.1396, 2.5167, 0.8997], + device='cuda:2'), covar=tensor([0.1208, 0.1812, 0.0950, 0.1192, 0.0637, 0.0798, 0.0732, 0.1555], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0383, 0.0353, 0.0356, 0.0419, 0.0328, 0.0490, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 22:55:14,987 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 22:55:28,971 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7136, 4.2871, 2.4774, 3.7878, 1.3436, 3.8562, 3.8474, 4.0374], + device='cuda:2'), covar=tensor([0.0456, 0.0869, 0.1844, 0.0647, 0.3123, 0.0866, 0.0547, 0.0567], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0293, 0.0317, 0.0266, 0.0330, 0.0288, 0.0224, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:55:29,695 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-31 22:55:34,592 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 22:55:36,946 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.585e+02 1.008e+03 1.288e+03 2.592e+03, threshold=2.016e+03, percent-clipped=4.0 +2023-03-31 22:55:38,558 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:55:40,643 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 22:55:41,789 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 22:55:51,130 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 22:55:53,217 INFO [train.py:903] (2/4) Epoch 2, batch 4900, loss[loss=0.3278, simple_loss=0.3699, pruned_loss=0.1428, over 19465.00 frames. ], tot_loss[loss=0.3535, simple_loss=0.3923, pruned_loss=0.1574, over 3808873.77 frames. ], batch size: 49, lr: 3.07e-02, grad_scale: 8.0 +2023-03-31 22:56:13,269 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 22:56:55,833 INFO [train.py:903] (2/4) Epoch 2, batch 4950, loss[loss=0.4646, simple_loss=0.4602, pruned_loss=0.2345, over 13514.00 frames. ], tot_loss[loss=0.3542, simple_loss=0.3926, pruned_loss=0.1579, over 3807033.26 frames. ], batch size: 136, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:57:13,099 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 22:57:32,460 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2029, 1.2771, 0.9983, 0.9950, 1.0541, 1.2910, 0.0268, 0.3664], + device='cuda:2'), covar=tensor([0.0318, 0.0330, 0.0215, 0.0253, 0.0621, 0.0275, 0.0630, 0.0534], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0227, 0.0224, 0.0246, 0.0303, 0.0259, 0.0252, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 22:57:37,546 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 22:57:41,842 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+02 9.257e+02 1.136e+03 1.412e+03 3.441e+03, threshold=2.272e+03, percent-clipped=4.0 +2023-03-31 22:57:57,810 INFO [train.py:903] (2/4) Epoch 2, batch 5000, loss[loss=0.3655, simple_loss=0.4078, pruned_loss=0.1616, over 19606.00 frames. ], tot_loss[loss=0.3522, simple_loss=0.3911, pruned_loss=0.1566, over 3813614.45 frames. ], batch size: 57, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:58:04,667 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:58:07,426 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 22:58:16,652 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 22:58:42,524 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11864.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:00,556 INFO [train.py:903] (2/4) Epoch 2, batch 5050, loss[loss=0.3623, simple_loss=0.3974, pruned_loss=0.1636, over 19585.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3912, pruned_loss=0.1578, over 3809702.94 frames. ], batch size: 52, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 22:59:19,027 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11894.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:36,723 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:38,649 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 22:59:48,011 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 8.836e+02 1.119e+03 1.460e+03 3.605e+03, threshold=2.237e+03, percent-clipped=7.0 +2023-03-31 23:00:03,331 INFO [train.py:903] (2/4) Epoch 2, batch 5100, loss[loss=0.4132, simple_loss=0.4316, pruned_loss=0.1974, over 19599.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3906, pruned_loss=0.1571, over 3805771.01 frames. ], batch size: 57, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 23:00:08,048 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11932.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:16,197 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11938.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:19,220 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 23:00:22,861 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 23:00:26,281 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 23:00:50,341 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11966.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:07,403 INFO [train.py:903] (2/4) Epoch 2, batch 5150, loss[loss=0.3618, simple_loss=0.4006, pruned_loss=0.1615, over 17513.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3893, pruned_loss=0.1557, over 3814062.62 frames. ], batch size: 101, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:01:20,926 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:01:29,908 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11997.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:36,628 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12002.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:44,902 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:54,406 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 8.254e+02 9.989e+02 1.287e+03 2.673e+03, threshold=1.998e+03, percent-clipped=4.0 +2023-03-31 23:01:56,775 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:02:10,381 INFO [train.py:903] (2/4) Epoch 2, batch 5200, loss[loss=0.3538, simple_loss=0.3987, pruned_loss=0.1545, over 19625.00 frames. ], tot_loss[loss=0.3506, simple_loss=0.3896, pruned_loss=0.1558, over 3817471.73 frames. ], batch size: 57, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:02:25,401 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 23:02:52,156 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:11,175 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 23:03:13,777 INFO [train.py:903] (2/4) Epoch 2, batch 5250, loss[loss=0.318, simple_loss=0.3724, pruned_loss=0.1318, over 19604.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3896, pruned_loss=0.1555, over 3818448.15 frames. ], batch size: 61, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:03:32,745 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12094.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:55,634 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12112.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:59,698 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.115e+02 8.666e+02 1.057e+03 1.421e+03 4.195e+03, threshold=2.115e+03, percent-clipped=5.0 +2023-03-31 23:04:02,195 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4038, 1.1173, 1.0535, 1.3136, 1.0213, 1.1932, 1.0994, 1.2720], + device='cuda:2'), covar=tensor([0.0850, 0.1326, 0.1376, 0.0949, 0.1132, 0.0651, 0.0989, 0.0718], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0378, 0.0289, 0.0259, 0.0325, 0.0266, 0.0275, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:04:14,782 INFO [train.py:903] (2/4) Epoch 2, batch 5300, loss[loss=0.3389, simple_loss=0.3949, pruned_loss=0.1415, over 19654.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3897, pruned_loss=0.1555, over 3823201.75 frames. ], batch size: 55, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:04:35,580 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 23:05:13,730 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12176.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:15,932 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12178.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:16,934 INFO [train.py:903] (2/4) Epoch 2, batch 5350, loss[loss=0.377, simple_loss=0.4184, pruned_loss=0.1678, over 19544.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3922, pruned_loss=0.1573, over 3825084.56 frames. ], batch size: 56, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:05:45,073 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1321, 1.1766, 1.9290, 1.4582, 2.6649, 2.7685, 2.8177, 1.1069], + device='cuda:2'), covar=tensor([0.1331, 0.2165, 0.1134, 0.1240, 0.0774, 0.0704, 0.1016, 0.1872], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0388, 0.0349, 0.0350, 0.0420, 0.0339, 0.0493, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:05:53,526 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 23:05:53,657 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12208.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:06:03,415 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 9.122e+02 1.169e+03 1.506e+03 3.477e+03, threshold=2.338e+03, percent-clipped=13.0 +2023-03-31 23:06:20,776 INFO [train.py:903] (2/4) Epoch 2, batch 5400, loss[loss=0.3785, simple_loss=0.4184, pruned_loss=0.1693, over 18876.00 frames. ], tot_loss[loss=0.352, simple_loss=0.3912, pruned_loss=0.1564, over 3826040.68 frames. ], batch size: 74, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:06:41,860 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-31 23:07:06,519 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12265.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:23,451 INFO [train.py:903] (2/4) Epoch 2, batch 5450, loss[loss=0.3775, simple_loss=0.4053, pruned_loss=0.1748, over 19489.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3905, pruned_loss=0.156, over 3814173.40 frames. ], batch size: 64, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:07:27,079 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12282.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:36,414 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12290.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:39,792 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:01,539 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12310.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:08,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 7.539e+02 9.464e+02 1.137e+03 1.898e+03, threshold=1.893e+03, percent-clipped=0.0 +2023-03-31 23:08:17,824 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12323.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:23,991 INFO [train.py:903] (2/4) Epoch 2, batch 5500, loss[loss=0.2979, simple_loss=0.3386, pruned_loss=0.1286, over 19747.00 frames. ], tot_loss[loss=0.3485, simple_loss=0.3883, pruned_loss=0.1543, over 3819462.83 frames. ], batch size: 46, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:08:47,651 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12346.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:49,884 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 23:09:14,245 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12368.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:28,523 INFO [train.py:903] (2/4) Epoch 2, batch 5550, loss[loss=0.3097, simple_loss=0.3677, pruned_loss=0.1259, over 19589.00 frames. ], tot_loss[loss=0.3471, simple_loss=0.387, pruned_loss=0.1536, over 3824650.27 frames. ], batch size: 52, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:09:36,195 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 23:09:46,119 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12393.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:50,696 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12397.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:14,298 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+02 8.577e+02 1.018e+03 1.198e+03 2.956e+03, threshold=2.037e+03, percent-clipped=3.0 +2023-03-31 23:10:25,893 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 23:10:27,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:29,522 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7911, 1.6289, 1.7170, 2.4183, 1.6322, 2.0729, 1.9359, 1.5247], + device='cuda:2'), covar=tensor([0.0757, 0.0631, 0.0412, 0.0289, 0.0628, 0.0252, 0.0766, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0384, 0.0351, 0.0366, 0.0475, 0.0433, 0.0268, 0.0460, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:10:31,329 INFO [train.py:903] (2/4) Epoch 2, batch 5600, loss[loss=0.422, simple_loss=0.4432, pruned_loss=0.2005, over 14013.00 frames. ], tot_loss[loss=0.3461, simple_loss=0.386, pruned_loss=0.1531, over 3814786.92 frames. ], batch size: 138, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:10:35,321 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:41,876 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12438.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:06,561 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12457.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:11,982 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12461.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:16,287 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12464.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:33,650 INFO [train.py:903] (2/4) Epoch 2, batch 5650, loss[loss=0.3247, simple_loss=0.3837, pruned_loss=0.1328, over 19620.00 frames. ], tot_loss[loss=0.3452, simple_loss=0.3858, pruned_loss=0.1523, over 3826377.81 frames. ], batch size: 57, lr: 2.99e-02, grad_scale: 8.0 +2023-03-31 23:11:49,525 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2260, 3.5958, 3.7603, 3.6277, 1.2847, 3.3448, 3.0756, 3.3304], + device='cuda:2'), covar=tensor([0.0519, 0.0484, 0.0488, 0.0337, 0.3145, 0.0254, 0.0405, 0.0921], + device='cuda:2'), in_proj_covar=tensor([0.0314, 0.0298, 0.0408, 0.0303, 0.0442, 0.0200, 0.0280, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 23:12:19,585 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.393e+02 8.152e+02 1.040e+03 1.340e+03 2.595e+03, threshold=2.080e+03, percent-clipped=4.0 +2023-03-31 23:12:21,240 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4823, 1.3821, 1.2637, 2.0993, 1.5742, 2.0525, 1.5839, 1.0625], + device='cuda:2'), covar=tensor([0.0988, 0.0847, 0.0714, 0.0408, 0.0752, 0.0239, 0.1104, 0.1019], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0348, 0.0367, 0.0475, 0.0435, 0.0270, 0.0460, 0.0367], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:12:21,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 23:12:26,951 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-31 23:12:30,043 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7123, 1.5647, 1.6778, 2.1656, 3.3006, 1.4905, 2.0156, 3.3538], + device='cuda:2'), covar=tensor([0.0218, 0.1942, 0.1785, 0.1047, 0.0331, 0.1623, 0.0930, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0296, 0.0276, 0.0263, 0.0256, 0.0305, 0.0253, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:12:35,557 INFO [train.py:903] (2/4) Epoch 2, batch 5700, loss[loss=0.3499, simple_loss=0.4024, pruned_loss=0.1487, over 19726.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3877, pruned_loss=0.1539, over 3825110.35 frames. ], batch size: 63, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:02,416 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:06,840 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12553.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:32,361 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12574.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:37,668 INFO [train.py:903] (2/4) Epoch 2, batch 5750, loss[loss=0.4073, simple_loss=0.4289, pruned_loss=0.1928, over 17472.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3873, pruned_loss=0.1533, over 3822324.73 frames. ], batch size: 101, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:38,060 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12579.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:42,209 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 23:13:50,269 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 23:13:56,866 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 23:14:10,442 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:14:23,847 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+02 8.083e+02 9.516e+02 1.322e+03 3.330e+03, threshold=1.903e+03, percent-clipped=5.0 +2023-03-31 23:14:30,014 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0771, 1.0755, 1.5999, 1.2090, 2.2696, 2.0706, 2.3960, 0.9127], + device='cuda:2'), covar=tensor([0.1332, 0.1987, 0.1029, 0.1295, 0.0757, 0.0969, 0.0924, 0.1694], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0397, 0.0361, 0.0363, 0.0432, 0.0352, 0.0510, 0.0382], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:14:40,960 INFO [train.py:903] (2/4) Epoch 2, batch 5800, loss[loss=0.413, simple_loss=0.4303, pruned_loss=0.1978, over 19670.00 frames. ], tot_loss[loss=0.3494, simple_loss=0.3889, pruned_loss=0.1549, over 3809586.05 frames. ], batch size: 60, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:15:10,958 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:17,360 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.20 vs. limit=5.0 +2023-03-31 23:15:42,701 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12678.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:43,506 INFO [train.py:903] (2/4) Epoch 2, batch 5850, loss[loss=0.3345, simple_loss=0.3637, pruned_loss=0.1526, over 19408.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3887, pruned_loss=0.1549, over 3814758.59 frames. ], batch size: 48, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:15:47,125 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12681.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:58,783 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2345, 2.6721, 2.0269, 2.2828, 1.8201, 1.8377, 0.6449, 2.2677], + device='cuda:2'), covar=tensor([0.0442, 0.0448, 0.0432, 0.0431, 0.0858, 0.0752, 0.0819, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0228, 0.0229, 0.0239, 0.0304, 0.0255, 0.0250, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-31 23:16:17,536 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-31 23:16:18,413 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12706.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:30,351 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 7.980e+02 9.827e+02 1.217e+03 2.781e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-31 23:16:31,934 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:46,105 INFO [train.py:903] (2/4) Epoch 2, batch 5900, loss[loss=0.2948, simple_loss=0.3332, pruned_loss=0.1282, over 19282.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3889, pruned_loss=0.1547, over 3817872.64 frames. ], batch size: 44, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:16:49,437 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 23:17:03,716 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:17:08,461 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5959, 1.4018, 1.7525, 2.7419, 4.2900, 1.3488, 2.2861, 4.0524], + device='cuda:2'), covar=tensor([0.0342, 0.2798, 0.2344, 0.1349, 0.0368, 0.2214, 0.1182, 0.0405], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0306, 0.0293, 0.0275, 0.0266, 0.0323, 0.0265, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:17:11,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 23:17:22,421 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-31 23:17:27,101 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-03-31 23:17:49,031 INFO [train.py:903] (2/4) Epoch 2, batch 5950, loss[loss=0.3227, simple_loss=0.3579, pruned_loss=0.1438, over 19741.00 frames. ], tot_loss[loss=0.3485, simple_loss=0.3884, pruned_loss=0.1542, over 3813583.71 frames. ], batch size: 51, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:17:52,889 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12781.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:25,411 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12808.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:26,846 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12809.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:34,473 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+02 9.090e+02 1.139e+03 1.452e+03 3.383e+03, threshold=2.279e+03, percent-clipped=8.0 +2023-03-31 23:18:49,058 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.99 vs. limit=5.0 +2023-03-31 23:18:51,784 INFO [train.py:903] (2/4) Epoch 2, batch 6000, loss[loss=0.3353, simple_loss=0.374, pruned_loss=0.1483, over 19846.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3878, pruned_loss=0.1544, over 3818894.11 frames. ], batch size: 52, lr: 2.95e-02, grad_scale: 8.0 +2023-03-31 23:18:51,784 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 23:19:06,013 INFO [train.py:937] (2/4) Epoch 2, validation: loss=0.246, simple_loss=0.337, pruned_loss=0.07745, over 944034.00 frames. +2023-03-31 23:19:06,014 INFO [train.py:938] (2/4) Maximum memory allocated so far is 17686MB +2023-03-31 23:19:13,304 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:19:57,377 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1787, 0.9052, 0.9862, 1.5486, 1.2149, 1.2541, 1.3031, 1.1955], + device='cuda:2'), covar=tensor([0.0928, 0.1433, 0.1389, 0.0777, 0.1214, 0.1077, 0.1131, 0.0982], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0301, 0.0289, 0.0318, 0.0340, 0.0263, 0.0306, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-31 23:20:08,252 INFO [train.py:903] (2/4) Epoch 2, batch 6050, loss[loss=0.3492, simple_loss=0.3989, pruned_loss=0.1497, over 19661.00 frames. ], tot_loss[loss=0.3505, simple_loss=0.3894, pruned_loss=0.1558, over 3812340.24 frames. ], batch size: 58, lr: 2.95e-02, grad_scale: 4.0 +2023-03-31 23:20:56,514 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+02 7.840e+02 9.937e+02 1.323e+03 8.220e+03, threshold=1.987e+03, percent-clipped=9.0 +2023-03-31 23:21:03,457 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12923.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:21:10,259 INFO [train.py:903] (2/4) Epoch 2, batch 6100, loss[loss=0.3703, simple_loss=0.41, pruned_loss=0.1653, over 19690.00 frames. ], tot_loss[loss=0.3465, simple_loss=0.3865, pruned_loss=0.1533, over 3815304.38 frames. ], batch size: 59, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:11,869 INFO [train.py:903] (2/4) Epoch 2, batch 6150, loss[loss=0.3053, simple_loss=0.3485, pruned_loss=0.131, over 19310.00 frames. ], tot_loss[loss=0.3484, simple_loss=0.3878, pruned_loss=0.1545, over 3819664.04 frames. ], batch size: 44, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:13,735 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-31 23:22:42,909 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 23:23:00,767 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+02 7.749e+02 1.029e+03 1.287e+03 3.235e+03, threshold=2.059e+03, percent-clipped=7.0 +2023-03-31 23:23:13,347 INFO [train.py:903] (2/4) Epoch 2, batch 6200, loss[loss=0.3323, simple_loss=0.3805, pruned_loss=0.1421, over 19535.00 frames. ], tot_loss[loss=0.3471, simple_loss=0.387, pruned_loss=0.1536, over 3820485.48 frames. ], batch size: 56, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:15,403 INFO [train.py:903] (2/4) Epoch 2, batch 6250, loss[loss=0.3005, simple_loss=0.3598, pruned_loss=0.1207, over 19743.00 frames. ], tot_loss[loss=0.3466, simple_loss=0.3872, pruned_loss=0.153, over 3819813.64 frames. ], batch size: 63, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:37,664 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-31 23:24:47,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 23:25:04,359 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.358e+02 8.552e+02 1.023e+03 1.333e+03 3.705e+03, threshold=2.046e+03, percent-clipped=2.0 +2023-03-31 23:25:08,194 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0326, 0.9773, 1.4877, 1.1344, 1.9305, 1.7214, 1.9505, 0.5059], + device='cuda:2'), covar=tensor([0.1491, 0.2195, 0.1129, 0.1434, 0.0764, 0.1138, 0.0773, 0.1969], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0399, 0.0362, 0.0360, 0.0437, 0.0347, 0.0504, 0.0379], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:25:13,052 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13125.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:25:17,708 INFO [train.py:903] (2/4) Epoch 2, batch 6300, loss[loss=0.2837, simple_loss=0.3441, pruned_loss=0.1117, over 19745.00 frames. ], tot_loss[loss=0.3457, simple_loss=0.3861, pruned_loss=0.1527, over 3819988.12 frames. ], batch size: 51, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:19,874 INFO [train.py:903] (2/4) Epoch 2, batch 6350, loss[loss=0.3328, simple_loss=0.3724, pruned_loss=0.1466, over 19850.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3851, pruned_loss=0.1513, over 3836557.94 frames. ], batch size: 52, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:20,325 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13179.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:26:49,717 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13204.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:27:04,771 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-31 23:27:06,688 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.673e+02 8.539e+02 1.071e+03 1.407e+03 4.202e+03, threshold=2.141e+03, percent-clipped=6.0 +2023-03-31 23:27:19,359 INFO [train.py:903] (2/4) Epoch 2, batch 6400, loss[loss=0.3731, simple_loss=0.4079, pruned_loss=0.1691, over 18432.00 frames. ], tot_loss[loss=0.3449, simple_loss=0.3861, pruned_loss=0.1519, over 3829090.32 frames. ], batch size: 83, lr: 2.92e-02, grad_scale: 8.0 +2023-03-31 23:27:33,750 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13240.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:28:22,172 INFO [train.py:903] (2/4) Epoch 2, batch 6450, loss[loss=0.3378, simple_loss=0.3874, pruned_loss=0.1441, over 19663.00 frames. ], tot_loss[loss=0.3443, simple_loss=0.3859, pruned_loss=0.1513, over 3825444.26 frames. ], batch size: 58, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:28:45,115 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1686, 1.0229, 1.1884, 0.4713, 2.5126, 2.3874, 2.1963, 2.4845], + device='cuda:2'), covar=tensor([0.1223, 0.2589, 0.2659, 0.2234, 0.0308, 0.0166, 0.0322, 0.0162], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0276, 0.0323, 0.0277, 0.0189, 0.0108, 0.0194, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 23:29:07,644 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2818, 1.7155, 1.3328, 1.3983, 1.5589, 0.8853, 1.0618, 1.4753], + device='cuda:2'), covar=tensor([0.0623, 0.0387, 0.0672, 0.0412, 0.0398, 0.1014, 0.0591, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0216, 0.0307, 0.0256, 0.0208, 0.0310, 0.0274, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-31 23:29:09,603 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 23:29:10,633 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+02 7.874e+02 9.907e+02 1.203e+03 2.411e+03, threshold=1.981e+03, percent-clipped=4.0 +2023-03-31 23:29:24,102 INFO [train.py:903] (2/4) Epoch 2, batch 6500, loss[loss=0.3581, simple_loss=0.3928, pruned_loss=0.1617, over 19488.00 frames. ], tot_loss[loss=0.3443, simple_loss=0.3855, pruned_loss=0.1515, over 3819081.71 frames. ], batch size: 49, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:29,966 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4561, 3.8287, 4.0303, 4.0284, 1.4226, 3.5585, 3.2297, 3.5684], + device='cuda:2'), covar=tensor([0.0580, 0.0496, 0.0499, 0.0308, 0.3282, 0.0262, 0.0431, 0.0963], + device='cuda:2'), in_proj_covar=tensor([0.0321, 0.0302, 0.0406, 0.0307, 0.0445, 0.0204, 0.0274, 0.0408], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-31 23:29:30,894 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 23:30:26,693 INFO [train.py:903] (2/4) Epoch 2, batch 6550, loss[loss=0.3458, simple_loss=0.3947, pruned_loss=0.1485, over 19681.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3866, pruned_loss=0.1521, over 3801716.66 frames. ], batch size: 60, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:31:14,488 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+02 7.924e+02 9.507e+02 1.218e+03 2.525e+03, threshold=1.901e+03, percent-clipped=3.0 +2023-03-31 23:31:27,083 INFO [train.py:903] (2/4) Epoch 2, batch 6600, loss[loss=0.4007, simple_loss=0.4311, pruned_loss=0.1851, over 19559.00 frames. ], tot_loss[loss=0.3471, simple_loss=0.388, pruned_loss=0.1531, over 3817570.50 frames. ], batch size: 61, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:31:41,930 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7203, 1.4357, 1.3430, 2.0178, 1.5001, 2.0602, 2.0159, 1.8926], + device='cuda:2'), covar=tensor([0.0914, 0.1318, 0.1466, 0.1186, 0.1333, 0.0808, 0.1213, 0.0728], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0304, 0.0289, 0.0331, 0.0334, 0.0265, 0.0309, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-31 23:31:52,571 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6094, 1.1059, 1.5074, 1.2055, 2.6269, 3.4586, 3.3974, 3.6594], + device='cuda:2'), covar=tensor([0.1276, 0.2660, 0.2658, 0.1985, 0.0404, 0.0130, 0.0194, 0.0102], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0272, 0.0324, 0.0275, 0.0187, 0.0107, 0.0191, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 23:32:29,098 INFO [train.py:903] (2/4) Epoch 2, batch 6650, loss[loss=0.4612, simple_loss=0.4584, pruned_loss=0.2319, over 12990.00 frames. ], tot_loss[loss=0.3495, simple_loss=0.3896, pruned_loss=0.1547, over 3817275.51 frames. ], batch size: 135, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:32:51,543 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:08,242 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-31 23:33:17,439 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 9.096e+02 1.181e+03 1.471e+03 3.411e+03, threshold=2.361e+03, percent-clipped=10.0 +2023-03-31 23:33:21,147 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13521.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:29,653 INFO [train.py:903] (2/4) Epoch 2, batch 6700, loss[loss=0.3776, simple_loss=0.4114, pruned_loss=0.1719, over 19775.00 frames. ], tot_loss[loss=0.3507, simple_loss=0.3904, pruned_loss=0.1555, over 3818112.43 frames. ], batch size: 56, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:34:27,763 INFO [train.py:903] (2/4) Epoch 2, batch 6750, loss[loss=0.3777, simple_loss=0.4068, pruned_loss=0.1743, over 19590.00 frames. ], tot_loss[loss=0.3509, simple_loss=0.3904, pruned_loss=0.1557, over 3811806.93 frames. ], batch size: 52, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:35:12,680 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.052e+02 8.174e+02 9.996e+02 1.293e+03 2.664e+03, threshold=1.999e+03, percent-clipped=1.0 +2023-03-31 23:35:25,231 INFO [train.py:903] (2/4) Epoch 2, batch 6800, loss[loss=0.3645, simple_loss=0.3949, pruned_loss=0.1671, over 19787.00 frames. ], tot_loss[loss=0.3476, simple_loss=0.3878, pruned_loss=0.1537, over 3823919.62 frames. ], batch size: 47, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:36:10,332 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 23:36:10,804 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 23:36:13,729 INFO [train.py:903] (2/4) Epoch 3, batch 0, loss[loss=0.3695, simple_loss=0.4117, pruned_loss=0.1637, over 17305.00 frames. ], tot_loss[loss=0.3695, simple_loss=0.4117, pruned_loss=0.1637, over 17305.00 frames. ], batch size: 102, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:36:13,730 INFO [train.py:928] (2/4) Computing validation loss +2023-03-31 23:36:24,494 INFO [train.py:937] (2/4) Epoch 3, validation: loss=0.241, simple_loss=0.3346, pruned_loss=0.07374, over 944034.00 frames. +2023-03-31 23:36:24,495 INFO [train.py:938] (2/4) Maximum memory allocated so far is 17686MB +2023-03-31 23:36:37,410 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 23:37:25,517 INFO [train.py:903] (2/4) Epoch 3, batch 50, loss[loss=0.3446, simple_loss=0.3996, pruned_loss=0.1448, over 19599.00 frames. ], tot_loss[loss=0.3345, simple_loss=0.3783, pruned_loss=0.1453, over 874397.88 frames. ], batch size: 61, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:37:38,312 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 7.787e+02 9.326e+02 1.115e+03 3.182e+03, threshold=1.865e+03, percent-clipped=5.0 +2023-03-31 23:37:58,984 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 23:38:24,031 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13755.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:38:25,859 INFO [train.py:903] (2/4) Epoch 3, batch 100, loss[loss=0.3618, simple_loss=0.401, pruned_loss=0.1613, over 19646.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.383, pruned_loss=0.1475, over 1521615.55 frames. ], batch size: 58, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:38:35,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 23:39:17,302 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4190, 1.0611, 1.4606, 0.8725, 2.6078, 3.1522, 3.1051, 3.3041], + device='cuda:2'), covar=tensor([0.1271, 0.2661, 0.2598, 0.2069, 0.0386, 0.0107, 0.0192, 0.0106], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0276, 0.0325, 0.0275, 0.0189, 0.0104, 0.0193, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 23:39:26,999 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-31 23:39:27,420 INFO [train.py:903] (2/4) Epoch 3, batch 150, loss[loss=0.3542, simple_loss=0.3918, pruned_loss=0.1583, over 18194.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3811, pruned_loss=0.1466, over 2045500.00 frames. ], batch size: 83, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:39:40,068 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+02 7.521e+02 1.009e+03 1.351e+03 3.530e+03, threshold=2.018e+03, percent-clipped=10.0 +2023-03-31 23:40:28,882 INFO [train.py:903] (2/4) Epoch 3, batch 200, loss[loss=0.4106, simple_loss=0.4338, pruned_loss=0.1937, over 18176.00 frames. ], tot_loss[loss=0.337, simple_loss=0.3812, pruned_loss=0.1464, over 2449590.62 frames. ], batch size: 83, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:40:28,933 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 23:41:28,996 INFO [train.py:903] (2/4) Epoch 3, batch 250, loss[loss=0.3528, simple_loss=0.3933, pruned_loss=0.1562, over 19633.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3838, pruned_loss=0.1487, over 2761792.24 frames. ], batch size: 60, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:41:36,400 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6049, 1.4660, 1.2584, 1.6115, 1.4546, 1.5736, 1.3588, 1.6329], + device='cuda:2'), covar=tensor([0.0951, 0.1703, 0.1495, 0.1021, 0.1315, 0.0627, 0.1086, 0.0703], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0389, 0.0292, 0.0259, 0.0327, 0.0274, 0.0281, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:41:44,257 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+02 8.729e+02 1.056e+03 1.304e+03 3.760e+03, threshold=2.113e+03, percent-clipped=6.0 +2023-03-31 23:42:33,082 INFO [train.py:903] (2/4) Epoch 3, batch 300, loss[loss=0.2992, simple_loss=0.3512, pruned_loss=0.1236, over 19679.00 frames. ], tot_loss[loss=0.3376, simple_loss=0.3813, pruned_loss=0.1469, over 3003619.65 frames. ], batch size: 53, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:43:34,502 INFO [train.py:903] (2/4) Epoch 3, batch 350, loss[loss=0.3258, simple_loss=0.3771, pruned_loss=0.1372, over 19586.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3819, pruned_loss=0.1473, over 3175835.25 frames. ], batch size: 61, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:43:40,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:43:43,604 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6384, 1.5957, 1.4239, 1.9557, 3.4141, 1.4427, 2.0813, 3.4155], + device='cuda:2'), covar=tensor([0.0259, 0.2103, 0.2074, 0.1372, 0.0348, 0.1883, 0.1009, 0.0311], + device='cuda:2'), in_proj_covar=tensor([0.0239, 0.0310, 0.0295, 0.0277, 0.0275, 0.0326, 0.0270, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:43:46,965 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 7.628e+02 9.853e+02 1.217e+03 3.369e+03, threshold=1.971e+03, percent-clipped=3.0 +2023-03-31 23:44:04,071 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5857, 1.1945, 1.4664, 1.1545, 2.7147, 3.3135, 3.3295, 3.5799], + device='cuda:2'), covar=tensor([0.1313, 0.2798, 0.2877, 0.2039, 0.0439, 0.0158, 0.0213, 0.0132], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0277, 0.0328, 0.0272, 0.0189, 0.0105, 0.0191, 0.0109], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-31 23:44:06,784 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.76 vs. limit=5.0 +2023-03-31 23:44:34,939 INFO [train.py:903] (2/4) Epoch 3, batch 400, loss[loss=0.3247, simple_loss=0.371, pruned_loss=0.1392, over 18756.00 frames. ], tot_loss[loss=0.336, simple_loss=0.3794, pruned_loss=0.1463, over 3310565.11 frames. ], batch size: 74, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:45:17,164 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14090.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:27,290 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14099.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:36,304 INFO [train.py:903] (2/4) Epoch 3, batch 450, loss[loss=0.3624, simple_loss=0.4043, pruned_loss=0.1602, over 18761.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.3807, pruned_loss=0.1467, over 3420741.41 frames. ], batch size: 74, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:45:52,385 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+02 8.213e+02 1.015e+03 1.206e+03 3.609e+03, threshold=2.029e+03, percent-clipped=6.0 +2023-03-31 23:45:55,985 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14121.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:46:10,177 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 23:46:11,139 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 23:46:14,955 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14138.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 23:46:25,079 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2479, 1.1991, 1.9884, 1.4488, 2.6539, 2.4814, 2.9637, 1.1935], + device='cuda:2'), covar=tensor([0.1345, 0.2104, 0.1145, 0.1191, 0.0768, 0.0818, 0.0903, 0.1861], + device='cuda:2'), in_proj_covar=tensor([0.0385, 0.0423, 0.0388, 0.0373, 0.0456, 0.0369, 0.0532, 0.0392], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:46:38,896 INFO [train.py:903] (2/4) Epoch 3, batch 500, loss[loss=0.2767, simple_loss=0.3257, pruned_loss=0.1138, over 19013.00 frames. ], tot_loss[loss=0.3386, simple_loss=0.3816, pruned_loss=0.1479, over 3506219.48 frames. ], batch size: 42, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:47:38,980 INFO [train.py:903] (2/4) Epoch 3, batch 550, loss[loss=0.4295, simple_loss=0.4395, pruned_loss=0.2098, over 12751.00 frames. ], tot_loss[loss=0.3403, simple_loss=0.3831, pruned_loss=0.1488, over 3559514.60 frames. ], batch size: 136, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:47:47,514 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14214.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:47:51,335 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+02 8.063e+02 9.949e+02 1.307e+03 2.222e+03, threshold=1.990e+03, percent-clipped=3.0 +2023-03-31 23:48:38,812 INFO [train.py:903] (2/4) Epoch 3, batch 600, loss[loss=0.3686, simple_loss=0.4011, pruned_loss=0.168, over 19465.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3828, pruned_loss=0.1488, over 3626050.24 frames. ], batch size: 64, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:16,719 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 23:49:39,173 INFO [train.py:903] (2/4) Epoch 3, batch 650, loss[loss=0.3576, simple_loss=0.3961, pruned_loss=0.1595, over 19598.00 frames. ], tot_loss[loss=0.3391, simple_loss=0.3826, pruned_loss=0.1478, over 3676385.53 frames. ], batch size: 61, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:54,626 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.103e+02 8.257e+02 1.098e+03 1.322e+03 3.191e+03, threshold=2.197e+03, percent-clipped=10.0 +2023-03-31 23:50:41,500 INFO [train.py:903] (2/4) Epoch 3, batch 700, loss[loss=0.344, simple_loss=0.3897, pruned_loss=0.1491, over 18295.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3825, pruned_loss=0.1475, over 3696239.16 frames. ], batch size: 84, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:43,792 INFO [train.py:903] (2/4) Epoch 3, batch 750, loss[loss=0.318, simple_loss=0.3689, pruned_loss=0.1335, over 19824.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3822, pruned_loss=0.1471, over 3720711.47 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:56,452 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.798e+02 9.551e+02 1.191e+03 2.807e+03, threshold=1.910e+03, percent-clipped=6.0 +2023-03-31 23:52:11,605 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14431.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:14,917 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14434.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:24,845 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0695, 2.0891, 1.7541, 3.2751, 2.2259, 3.3215, 2.8422, 1.9661], + device='cuda:2'), covar=tensor([0.0833, 0.0583, 0.0408, 0.0342, 0.0740, 0.0156, 0.0635, 0.0542], + device='cuda:2'), in_proj_covar=tensor([0.0422, 0.0392, 0.0401, 0.0530, 0.0475, 0.0299, 0.0498, 0.0399], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:52:44,745 INFO [train.py:903] (2/4) Epoch 3, batch 800, loss[loss=0.3111, simple_loss=0.3434, pruned_loss=0.1394, over 16521.00 frames. ], tot_loss[loss=0.3363, simple_loss=0.3808, pruned_loss=0.146, over 3728306.63 frames. ], batch size: 36, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:52:53,889 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14465.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:54,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:52:59,813 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14470.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:15,266 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14482.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 23:53:31,039 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14495.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:42,679 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14505.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:44,743 INFO [train.py:903] (2/4) Epoch 3, batch 850, loss[loss=0.3336, simple_loss=0.3609, pruned_loss=0.1532, over 19275.00 frames. ], tot_loss[loss=0.3377, simple_loss=0.3811, pruned_loss=0.1471, over 3747529.45 frames. ], batch size: 44, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:53:58,375 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.292e+02 8.633e+02 1.105e+03 1.534e+03 3.114e+03, threshold=2.210e+03, percent-clipped=11.0 +2023-03-31 23:54:20,419 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-03-31 23:54:32,139 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 23:54:35,834 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:54:40,442 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6885, 1.5187, 1.3594, 1.7246, 1.5698, 1.7477, 1.4864, 1.7392], + device='cuda:2'), covar=tensor([0.0829, 0.1340, 0.1202, 0.0771, 0.1078, 0.0481, 0.0880, 0.0558], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0378, 0.0290, 0.0257, 0.0321, 0.0270, 0.0281, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:54:45,552 INFO [train.py:903] (2/4) Epoch 3, batch 900, loss[loss=0.2915, simple_loss=0.3465, pruned_loss=0.1183, over 19850.00 frames. ], tot_loss[loss=0.337, simple_loss=0.3807, pruned_loss=0.1466, over 3765353.41 frames. ], batch size: 52, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:55:15,233 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14580.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:55:33,842 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14597.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:55:46,728 INFO [train.py:903] (2/4) Epoch 3, batch 950, loss[loss=0.3104, simple_loss=0.3595, pruned_loss=0.1306, over 19803.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3806, pruned_loss=0.1461, over 3784029.53 frames. ], batch size: 49, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:55:46,741 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 23:56:00,996 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+02 7.381e+02 9.246e+02 1.263e+03 4.500e+03, threshold=1.849e+03, percent-clipped=5.0 +2023-03-31 23:56:46,902 INFO [train.py:903] (2/4) Epoch 3, batch 1000, loss[loss=0.3173, simple_loss=0.3763, pruned_loss=0.1291, over 19778.00 frames. ], tot_loss[loss=0.3391, simple_loss=0.3827, pruned_loss=0.1477, over 3774725.87 frames. ], batch size: 56, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:57:13,176 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-31 23:57:38,714 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 23:57:47,678 INFO [train.py:903] (2/4) Epoch 3, batch 1050, loss[loss=0.3877, simple_loss=0.4236, pruned_loss=0.1759, over 18909.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3815, pruned_loss=0.1465, over 3786037.52 frames. ], batch size: 75, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:57:55,773 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7228, 1.2070, 1.5087, 1.9214, 3.3146, 1.2700, 2.1233, 3.3682], + device='cuda:2'), covar=tensor([0.0288, 0.2584, 0.2258, 0.1398, 0.0363, 0.2063, 0.1049, 0.0374], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0306, 0.0294, 0.0282, 0.0279, 0.0323, 0.0267, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:58:01,066 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.305e+02 8.951e+02 1.118e+03 2.421e+03, threshold=1.790e+03, percent-clipped=2.0 +2023-03-31 23:58:17,608 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 23:58:39,313 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0448, 1.0071, 1.4871, 1.1397, 1.8042, 1.7025, 1.9832, 0.5235], + device='cuda:2'), covar=tensor([0.1422, 0.2195, 0.1159, 0.1327, 0.0820, 0.1108, 0.0770, 0.1929], + device='cuda:2'), in_proj_covar=tensor([0.0398, 0.0426, 0.0397, 0.0385, 0.0465, 0.0378, 0.0551, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-31 23:58:48,364 INFO [train.py:903] (2/4) Epoch 3, batch 1100, loss[loss=0.3836, simple_loss=0.4111, pruned_loss=0.1781, over 13100.00 frames. ], tot_loss[loss=0.335, simple_loss=0.3798, pruned_loss=0.1451, over 3795641.45 frames. ], batch size: 137, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:49,831 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14758.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:10,902 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14775.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:20,667 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-31 23:59:47,773 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14805.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:49,457 INFO [train.py:903] (2/4) Epoch 3, batch 1150, loss[loss=0.3068, simple_loss=0.3438, pruned_loss=0.1348, over 19796.00 frames. ], tot_loss[loss=0.3335, simple_loss=0.3781, pruned_loss=0.1445, over 3798429.38 frames. ], batch size: 46, lr: 2.64e-02, grad_scale: 4.0 +2023-04-01 00:00:03,884 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+02 7.465e+02 1.021e+03 1.238e+03 3.548e+03, threshold=2.043e+03, percent-clipped=7.0 +2023-04-01 00:00:16,789 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:23,667 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:40,914 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:45,743 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:00:49,939 INFO [train.py:903] (2/4) Epoch 3, batch 1200, loss[loss=0.3192, simple_loss=0.3745, pruned_loss=0.1319, over 18773.00 frames. ], tot_loss[loss=0.3328, simple_loss=0.3777, pruned_loss=0.144, over 3809114.95 frames. ], batch size: 74, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:00:55,832 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:15,139 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14878.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:01:17,859 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 00:01:31,308 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14890.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:51,491 INFO [train.py:903] (2/4) Epoch 3, batch 1250, loss[loss=0.3539, simple_loss=0.4033, pruned_loss=0.1523, over 19544.00 frames. ], tot_loss[loss=0.3337, simple_loss=0.3782, pruned_loss=0.1446, over 3815240.03 frames. ], batch size: 56, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:02:05,941 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+02 7.701e+02 1.002e+03 1.250e+03 2.941e+03, threshold=2.004e+03, percent-clipped=3.0 +2023-04-01 00:02:20,103 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4540, 4.4499, 5.2322, 5.0965, 2.0254, 4.7326, 4.3460, 4.6959], + device='cuda:2'), covar=tensor([0.0448, 0.0520, 0.0290, 0.0205, 0.2642, 0.0178, 0.0262, 0.0665], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0315, 0.0448, 0.0343, 0.0466, 0.0234, 0.0300, 0.0436], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 00:02:42,272 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.81 vs. limit=2.0 +2023-04-01 00:02:53,118 INFO [train.py:903] (2/4) Epoch 3, batch 1300, loss[loss=0.325, simple_loss=0.3828, pruned_loss=0.1336, over 19760.00 frames. ], tot_loss[loss=0.3318, simple_loss=0.3771, pruned_loss=0.1433, over 3812488.75 frames. ], batch size: 56, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:03:02,122 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:03:54,919 INFO [train.py:903] (2/4) Epoch 3, batch 1350, loss[loss=0.2586, simple_loss=0.3149, pruned_loss=0.1012, over 19469.00 frames. ], tot_loss[loss=0.3322, simple_loss=0.377, pruned_loss=0.1437, over 3828488.31 frames. ], batch size: 49, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:04:08,775 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2119, 1.2255, 1.8097, 1.4741, 2.2836, 2.0818, 2.3882, 0.8515], + device='cuda:2'), covar=tensor([0.1292, 0.2108, 0.1132, 0.1156, 0.0752, 0.0954, 0.0784, 0.1815], + device='cuda:2'), in_proj_covar=tensor([0.0399, 0.0435, 0.0402, 0.0386, 0.0468, 0.0378, 0.0552, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:04:10,656 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+02 8.351e+02 9.883e+02 1.225e+03 3.360e+03, threshold=1.977e+03, percent-clipped=2.0 +2023-04-01 00:04:21,275 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0227, 4.8494, 5.8263, 5.7166, 1.9380, 5.3679, 4.8375, 5.1860], + device='cuda:2'), covar=tensor([0.0507, 0.0405, 0.0431, 0.0239, 0.3129, 0.0186, 0.0260, 0.0897], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0308, 0.0443, 0.0334, 0.0460, 0.0232, 0.0293, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 00:04:57,767 INFO [train.py:903] (2/4) Epoch 3, batch 1400, loss[loss=0.3659, simple_loss=0.3984, pruned_loss=0.1667, over 19594.00 frames. ], tot_loss[loss=0.3315, simple_loss=0.3768, pruned_loss=0.1431, over 3832591.37 frames. ], batch size: 57, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:05:23,838 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 00:05:53,868 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15102.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:05:59,128 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 00:06:00,259 INFO [train.py:903] (2/4) Epoch 3, batch 1450, loss[loss=0.3477, simple_loss=0.3983, pruned_loss=0.1486, over 17381.00 frames. ], tot_loss[loss=0.3318, simple_loss=0.3775, pruned_loss=0.143, over 3839289.79 frames. ], batch size: 101, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:06:13,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+02 7.923e+02 9.351e+02 1.150e+03 2.880e+03, threshold=1.870e+03, percent-clipped=3.0 +2023-04-01 00:06:49,295 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:07:01,535 INFO [train.py:903] (2/4) Epoch 3, batch 1500, loss[loss=0.3796, simple_loss=0.4195, pruned_loss=0.1698, over 19678.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3794, pruned_loss=0.1445, over 3815814.59 frames. ], batch size: 60, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:07:20,419 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15171.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:03,674 INFO [train.py:903] (2/4) Epoch 3, batch 1550, loss[loss=0.3395, simple_loss=0.3762, pruned_loss=0.1514, over 19774.00 frames. ], tot_loss[loss=0.3323, simple_loss=0.3781, pruned_loss=0.1432, over 3829898.59 frames. ], batch size: 54, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:08:12,805 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:18,432 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:20,279 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.825e+02 9.432e+02 1.149e+03 3.008e+03, threshold=1.886e+03, percent-clipped=3.0 +2023-04-01 00:08:21,853 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:24,498 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 00:08:52,273 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:09:08,812 INFO [train.py:903] (2/4) Epoch 3, batch 1600, loss[loss=0.3698, simple_loss=0.4088, pruned_loss=0.1654, over 17429.00 frames. ], tot_loss[loss=0.3325, simple_loss=0.378, pruned_loss=0.1435, over 3815754.76 frames. ], batch size: 101, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:09:32,835 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 00:10:10,183 INFO [train.py:903] (2/4) Epoch 3, batch 1650, loss[loss=0.3661, simple_loss=0.4026, pruned_loss=0.1648, over 19662.00 frames. ], tot_loss[loss=0.3325, simple_loss=0.3779, pruned_loss=0.1435, over 3811847.07 frames. ], batch size: 55, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:10:24,984 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+02 8.096e+02 9.310e+02 1.117e+03 2.889e+03, threshold=1.862e+03, percent-clipped=6.0 +2023-04-01 00:10:52,990 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9377, 1.8912, 1.8019, 2.9492, 1.9100, 3.0814, 2.6625, 1.8756], + device='cuda:2'), covar=tensor([0.0984, 0.0732, 0.0472, 0.0442, 0.0889, 0.0208, 0.0752, 0.0672], + device='cuda:2'), in_proj_covar=tensor([0.0433, 0.0407, 0.0414, 0.0547, 0.0483, 0.0317, 0.0507, 0.0412], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:11:11,888 INFO [train.py:903] (2/4) Epoch 3, batch 1700, loss[loss=0.3229, simple_loss=0.3676, pruned_loss=0.1391, over 19681.00 frames. ], tot_loss[loss=0.3335, simple_loss=0.3784, pruned_loss=0.1443, over 3811750.10 frames. ], batch size: 53, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:11:46,112 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3714, 2.1273, 1.6384, 1.7390, 1.9281, 1.1058, 1.1530, 1.8481], + device='cuda:2'), covar=tensor([0.0766, 0.0471, 0.1026, 0.0483, 0.0485, 0.1180, 0.0804, 0.0465], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0235, 0.0313, 0.0257, 0.0216, 0.0315, 0.0279, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:11:50,343 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 00:12:13,189 INFO [train.py:903] (2/4) Epoch 3, batch 1750, loss[loss=0.3644, simple_loss=0.4104, pruned_loss=0.1592, over 19544.00 frames. ], tot_loss[loss=0.3336, simple_loss=0.3783, pruned_loss=0.1444, over 3808097.70 frames. ], batch size: 54, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:12:30,191 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+02 8.359e+02 1.065e+03 1.276e+03 4.198e+03, threshold=2.129e+03, percent-clipped=6.0 +2023-04-01 00:13:17,303 INFO [train.py:903] (2/4) Epoch 3, batch 1800, loss[loss=0.3239, simple_loss=0.3788, pruned_loss=0.1345, over 19579.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3777, pruned_loss=0.1438, over 3813511.78 frames. ], batch size: 61, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:13:37,329 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15473.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:13:54,484 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8539, 1.0270, 1.3962, 1.4191, 2.4337, 1.3659, 1.9113, 2.4379], + device='cuda:2'), covar=tensor([0.0529, 0.2719, 0.2477, 0.1576, 0.0683, 0.1867, 0.0978, 0.0651], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0300, 0.0293, 0.0277, 0.0284, 0.0320, 0.0268, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:14:07,901 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:14,398 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 00:14:17,664 INFO [train.py:903] (2/4) Epoch 3, batch 1850, loss[loss=0.324, simple_loss=0.3818, pruned_loss=0.1331, over 19682.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3762, pruned_loss=0.1422, over 3826858.39 frames. ], batch size: 60, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:14:19,354 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 00:14:32,128 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+02 7.448e+02 9.407e+02 1.169e+03 2.273e+03, threshold=1.881e+03, percent-clipped=1.0 +2023-04-01 00:14:50,378 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 00:15:18,043 INFO [train.py:903] (2/4) Epoch 3, batch 1900, loss[loss=0.3423, simple_loss=0.3857, pruned_loss=0.1495, over 18723.00 frames. ], tot_loss[loss=0.3324, simple_loss=0.3776, pruned_loss=0.1437, over 3814538.54 frames. ], batch size: 74, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:15:18,210 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:15:36,161 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 00:15:41,742 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 00:15:55,569 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.8891, 5.3936, 2.8297, 4.7055, 1.4536, 5.2670, 5.2250, 5.5320], + device='cuda:2'), covar=tensor([0.0367, 0.0866, 0.1802, 0.0545, 0.3533, 0.0723, 0.0462, 0.0556], + device='cuda:2'), in_proj_covar=tensor([0.0315, 0.0296, 0.0327, 0.0270, 0.0343, 0.0285, 0.0239, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 00:16:01,253 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9685, 1.2820, 1.6079, 1.6811, 2.6105, 1.3855, 1.8083, 2.7119], + device='cuda:2'), covar=tensor([0.0394, 0.2364, 0.2065, 0.1281, 0.0481, 0.1791, 0.0956, 0.0454], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0299, 0.0290, 0.0273, 0.0280, 0.0317, 0.0266, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:16:04,600 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 00:16:19,672 INFO [train.py:903] (2/4) Epoch 3, batch 1950, loss[loss=0.3462, simple_loss=0.3975, pruned_loss=0.1474, over 19536.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3767, pruned_loss=0.143, over 3813943.18 frames. ], batch size: 56, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:16:36,886 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+02 7.789e+02 9.617e+02 1.296e+03 2.448e+03, threshold=1.923e+03, percent-clipped=3.0 +2023-04-01 00:16:39,487 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2829, 1.7821, 1.9674, 2.3238, 2.0146, 2.1982, 2.5062, 2.3736], + device='cuda:2'), covar=tensor([0.0645, 0.0998, 0.1014, 0.0974, 0.1023, 0.0802, 0.0783, 0.0556], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0298, 0.0283, 0.0326, 0.0325, 0.0271, 0.0304, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 00:16:45,204 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2757, 1.1776, 1.2854, 1.7117, 2.9219, 1.3596, 2.0503, 2.8752], + device='cuda:2'), covar=tensor([0.0318, 0.2463, 0.2370, 0.1340, 0.0399, 0.1974, 0.0970, 0.0447], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0301, 0.0293, 0.0274, 0.0281, 0.0317, 0.0267, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:16:57,467 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4171, 0.9239, 1.2238, 1.3131, 2.2288, 1.1334, 1.8271, 2.0422], + device='cuda:2'), covar=tensor([0.0528, 0.2654, 0.2451, 0.1365, 0.0553, 0.1814, 0.0849, 0.0691], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0303, 0.0295, 0.0276, 0.0281, 0.0319, 0.0268, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:17:22,859 INFO [train.py:903] (2/4) Epoch 3, batch 2000, loss[loss=0.3899, simple_loss=0.4134, pruned_loss=0.1832, over 17460.00 frames. ], tot_loss[loss=0.3334, simple_loss=0.3783, pruned_loss=0.1442, over 3813053.84 frames. ], batch size: 101, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:17:41,449 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:18:20,139 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 00:18:23,579 INFO [train.py:903] (2/4) Epoch 3, batch 2050, loss[loss=0.3295, simple_loss=0.3736, pruned_loss=0.1427, over 19764.00 frames. ], tot_loss[loss=0.3343, simple_loss=0.3792, pruned_loss=0.1447, over 3818451.80 frames. ], batch size: 54, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:18:38,223 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+02 7.410e+02 9.269e+02 1.172e+03 2.915e+03, threshold=1.854e+03, percent-clipped=8.0 +2023-04-01 00:18:38,375 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 00:18:39,622 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 00:18:58,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 00:19:25,007 INFO [train.py:903] (2/4) Epoch 3, batch 2100, loss[loss=0.2771, simple_loss=0.3295, pruned_loss=0.1124, over 19781.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3767, pruned_loss=0.143, over 3825275.23 frames. ], batch size: 47, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:19:52,252 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 00:20:09,571 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1468, 4.8740, 5.9274, 5.7464, 1.9640, 5.4698, 4.8437, 5.2835], + device='cuda:2'), covar=tensor([0.0451, 0.0501, 0.0330, 0.0255, 0.3128, 0.0161, 0.0330, 0.0781], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0336, 0.0458, 0.0345, 0.0476, 0.0236, 0.0306, 0.0439], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 00:20:13,820 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 00:20:25,852 INFO [train.py:903] (2/4) Epoch 3, batch 2150, loss[loss=0.3296, simple_loss=0.3808, pruned_loss=0.1392, over 19096.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.3761, pruned_loss=0.1425, over 3804878.27 frames. ], batch size: 69, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:20:42,352 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 7.356e+02 9.022e+02 1.284e+03 2.686e+03, threshold=1.804e+03, percent-clipped=4.0 +2023-04-01 00:21:28,851 INFO [train.py:903] (2/4) Epoch 3, batch 2200, loss[loss=0.3185, simple_loss=0.3741, pruned_loss=0.1314, over 19618.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.3756, pruned_loss=0.1421, over 3808631.92 frames. ], batch size: 57, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:23,692 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:22:30,401 INFO [train.py:903] (2/4) Epoch 3, batch 2250, loss[loss=0.2637, simple_loss=0.3233, pruned_loss=0.102, over 19737.00 frames. ], tot_loss[loss=0.3291, simple_loss=0.3754, pruned_loss=0.1414, over 3810735.51 frames. ], batch size: 45, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:44,788 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+02 7.012e+02 9.226e+02 1.146e+03 2.721e+03, threshold=1.845e+03, percent-clipped=4.0 +2023-04-01 00:22:55,707 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:08,197 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 00:23:26,928 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:31,837 INFO [train.py:903] (2/4) Epoch 3, batch 2300, loss[loss=0.3492, simple_loss=0.3779, pruned_loss=0.1602, over 19621.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3747, pruned_loss=0.1408, over 3811482.92 frames. ], batch size: 50, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:23:44,534 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 00:23:57,228 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 00:24:33,418 INFO [train.py:903] (2/4) Epoch 3, batch 2350, loss[loss=0.3513, simple_loss=0.3827, pruned_loss=0.1599, over 19619.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3745, pruned_loss=0.1406, over 3810753.57 frames. ], batch size: 50, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:24:48,796 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 7.519e+02 9.138e+02 1.115e+03 3.205e+03, threshold=1.828e+03, percent-clipped=8.0 +2023-04-01 00:24:50,002 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:25:07,155 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9965, 1.9063, 1.4289, 1.3973, 1.6633, 0.9305, 0.8294, 1.6000], + device='cuda:2'), covar=tensor([0.0830, 0.0543, 0.1007, 0.0533, 0.0494, 0.1268, 0.0711, 0.0416], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0227, 0.0309, 0.0259, 0.0214, 0.0309, 0.0274, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:25:15,405 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 00:25:31,110 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 00:25:34,308 INFO [train.py:903] (2/4) Epoch 3, batch 2400, loss[loss=0.3611, simple_loss=0.3999, pruned_loss=0.1612, over 17465.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3748, pruned_loss=0.1409, over 3822234.32 frames. ], batch size: 101, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:25:58,805 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9193, 1.5728, 1.7807, 2.2119, 4.3337, 1.2828, 2.2834, 3.8024], + device='cuda:2'), covar=tensor([0.0198, 0.2484, 0.2439, 0.1466, 0.0352, 0.2301, 0.1156, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0240, 0.0295, 0.0293, 0.0269, 0.0280, 0.0319, 0.0261, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:26:23,876 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4785, 1.6934, 2.4039, 2.9102, 1.8809, 2.4544, 2.0815, 2.8680], + device='cuda:2'), covar=tensor([0.0670, 0.1801, 0.0937, 0.0704, 0.1219, 0.0442, 0.0806, 0.0450], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0387, 0.0286, 0.0253, 0.0319, 0.0262, 0.0271, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:26:33,084 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:26:37,322 INFO [train.py:903] (2/4) Epoch 3, batch 2450, loss[loss=0.282, simple_loss=0.3466, pruned_loss=0.1087, over 19685.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3747, pruned_loss=0.1411, over 3821805.64 frames. ], batch size: 59, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:26:51,588 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+02 8.374e+02 9.822e+02 1.305e+03 3.634e+03, threshold=1.964e+03, percent-clipped=9.0 +2023-04-01 00:27:20,469 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2402, 2.1223, 1.8979, 3.2895, 2.4846, 4.0510, 3.3404, 1.8194], + device='cuda:2'), covar=tensor([0.1224, 0.0906, 0.0521, 0.0624, 0.1070, 0.0188, 0.0798, 0.0818], + device='cuda:2'), in_proj_covar=tensor([0.0449, 0.0419, 0.0423, 0.0558, 0.0500, 0.0329, 0.0516, 0.0420], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:27:38,202 INFO [train.py:903] (2/4) Epoch 3, batch 2500, loss[loss=0.3816, simple_loss=0.4098, pruned_loss=0.1766, over 17285.00 frames. ], tot_loss[loss=0.3286, simple_loss=0.3747, pruned_loss=0.1413, over 3815939.71 frames. ], batch size: 101, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:27:50,907 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:28:40,061 INFO [train.py:903] (2/4) Epoch 3, batch 2550, loss[loss=0.3762, simple_loss=0.4029, pruned_loss=0.1747, over 12686.00 frames. ], tot_loss[loss=0.3298, simple_loss=0.3754, pruned_loss=0.1421, over 3805040.27 frames. ], batch size: 136, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:28:56,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+02 7.641e+02 9.209e+02 1.283e+03 2.881e+03, threshold=1.842e+03, percent-clipped=1.0 +2023-04-01 00:29:28,552 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:29:36,514 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 00:29:44,030 INFO [train.py:903] (2/4) Epoch 3, batch 2600, loss[loss=0.2912, simple_loss=0.3398, pruned_loss=0.1213, over 19733.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3755, pruned_loss=0.1425, over 3807774.97 frames. ], batch size: 51, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:29:59,769 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 00:30:46,342 INFO [train.py:903] (2/4) Epoch 3, batch 2650, loss[loss=0.3212, simple_loss=0.376, pruned_loss=0.1332, over 19606.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3735, pruned_loss=0.1403, over 3804003.66 frames. ], batch size: 61, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:00,229 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+02 7.570e+02 8.863e+02 1.283e+03 4.568e+03, threshold=1.773e+03, percent-clipped=9.0 +2023-04-01 00:31:06,944 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 00:31:18,246 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5825, 4.1299, 2.6223, 3.8489, 1.2779, 3.9194, 3.8011, 3.9049], + device='cuda:2'), covar=tensor([0.0433, 0.0931, 0.1586, 0.0565, 0.3343, 0.0665, 0.0558, 0.0616], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0288, 0.0321, 0.0257, 0.0335, 0.0276, 0.0238, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 00:31:47,143 INFO [train.py:903] (2/4) Epoch 3, batch 2700, loss[loss=0.3808, simple_loss=0.4117, pruned_loss=0.175, over 19667.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3738, pruned_loss=0.1401, over 3811624.90 frames. ], batch size: 58, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:51,950 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:31:54,892 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:18,387 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.89 vs. limit=5.0 +2023-04-01 00:32:26,185 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16389.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:47,344 INFO [train.py:903] (2/4) Epoch 3, batch 2750, loss[loss=0.2729, simple_loss=0.3253, pruned_loss=0.1102, over 19367.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3735, pruned_loss=0.1402, over 3812067.58 frames. ], batch size: 47, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:33:01,694 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.986e+02 7.672e+02 9.838e+02 1.209e+03 2.463e+03, threshold=1.968e+03, percent-clipped=5.0 +2023-04-01 00:33:35,971 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:33:46,977 INFO [train.py:903] (2/4) Epoch 3, batch 2800, loss[loss=0.3471, simple_loss=0.3948, pruned_loss=0.1497, over 17478.00 frames. ], tot_loss[loss=0.3278, simple_loss=0.3742, pruned_loss=0.1407, over 3813482.95 frames. ], batch size: 101, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:33:48,292 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1957, 4.1553, 4.7453, 4.6724, 2.5337, 4.2403, 4.0067, 4.3179], + device='cuda:2'), covar=tensor([0.0406, 0.1102, 0.0291, 0.0215, 0.2161, 0.0205, 0.0297, 0.0625], + device='cuda:2'), in_proj_covar=tensor([0.0370, 0.0339, 0.0467, 0.0349, 0.0482, 0.0246, 0.0302, 0.0450], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 00:34:13,572 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:34:48,744 INFO [train.py:903] (2/4) Epoch 3, batch 2850, loss[loss=0.3629, simple_loss=0.4, pruned_loss=0.1629, over 19461.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.374, pruned_loss=0.1417, over 3797124.68 frames. ], batch size: 64, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:54,289 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16511.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:35:03,282 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+02 7.862e+02 1.093e+03 1.433e+03 3.382e+03, threshold=2.185e+03, percent-clipped=3.0 +2023-04-01 00:35:47,694 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 00:35:49,909 INFO [train.py:903] (2/4) Epoch 3, batch 2900, loss[loss=0.315, simple_loss=0.3608, pruned_loss=0.1346, over 19755.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3725, pruned_loss=0.1401, over 3807076.21 frames. ], batch size: 47, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:35:57,095 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16562.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:36:38,639 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8981, 1.8894, 1.7275, 2.7885, 1.9768, 2.8519, 2.4471, 1.8519], + device='cuda:2'), covar=tensor([0.0940, 0.0692, 0.0449, 0.0368, 0.0765, 0.0219, 0.0748, 0.0663], + device='cuda:2'), in_proj_covar=tensor([0.0449, 0.0418, 0.0423, 0.0554, 0.0500, 0.0332, 0.0512, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:36:51,758 INFO [train.py:903] (2/4) Epoch 3, batch 2950, loss[loss=0.3224, simple_loss=0.3727, pruned_loss=0.136, over 18836.00 frames. ], tot_loss[loss=0.3262, simple_loss=0.3726, pruned_loss=0.1399, over 3805483.76 frames. ], batch size: 74, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:02,822 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16616.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:07,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 7.271e+02 9.405e+02 1.170e+03 2.853e+03, threshold=1.881e+03, percent-clipped=4.0 +2023-04-01 00:37:15,840 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:33,995 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:52,189 INFO [train.py:903] (2/4) Epoch 3, batch 3000, loss[loss=0.303, simple_loss=0.3481, pruned_loss=0.129, over 19751.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.372, pruned_loss=0.1396, over 3808623.97 frames. ], batch size: 45, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:52,190 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 00:38:05,267 INFO [train.py:937] (2/4) Epoch 3, validation: loss=0.231, simple_loss=0.3246, pruned_loss=0.06867, over 944034.00 frames. +2023-04-01 00:38:05,268 INFO [train.py:938] (2/4) Maximum memory allocated so far is 17889MB +2023-04-01 00:38:08,687 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 00:38:53,850 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4958, 0.9711, 1.2834, 2.3986, 3.0598, 1.6561, 2.1413, 3.0307], + device='cuda:2'), covar=tensor([0.0450, 0.3100, 0.2733, 0.1183, 0.0599, 0.1882, 0.1185, 0.0588], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0307, 0.0294, 0.0277, 0.0287, 0.0320, 0.0272, 0.0282], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:39:07,674 INFO [train.py:903] (2/4) Epoch 3, batch 3050, loss[loss=0.3767, simple_loss=0.4149, pruned_loss=0.1693, over 19290.00 frames. ], tot_loss[loss=0.3253, simple_loss=0.372, pruned_loss=0.1393, over 3799740.08 frames. ], batch size: 66, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:39:22,585 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+02 7.860e+02 1.014e+03 1.267e+03 1.851e+03, threshold=2.027e+03, percent-clipped=0.0 +2023-04-01 00:39:35,557 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4026, 0.9374, 1.5646, 1.1695, 2.6121, 3.5848, 3.3941, 3.8114], + device='cuda:2'), covar=tensor([0.1425, 0.3044, 0.2674, 0.2014, 0.0474, 0.0105, 0.0216, 0.0085], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0277, 0.0329, 0.0271, 0.0196, 0.0106, 0.0197, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 00:39:38,685 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16733.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:39:40,013 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:02,113 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.37 vs. limit=5.0 +2023-04-01 00:40:08,119 INFO [train.py:903] (2/4) Epoch 3, batch 3100, loss[loss=0.3466, simple_loss=0.3945, pruned_loss=0.1493, over 19662.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3721, pruned_loss=0.1391, over 3798756.15 frames. ], batch size: 58, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:40:11,544 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:24,963 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8489, 1.7973, 1.8993, 2.2187, 4.5027, 1.3557, 2.2632, 4.3561], + device='cuda:2'), covar=tensor([0.0189, 0.2143, 0.2095, 0.1382, 0.0338, 0.1978, 0.1168, 0.0283], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0303, 0.0298, 0.0280, 0.0288, 0.0317, 0.0272, 0.0284], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:41:08,260 INFO [train.py:903] (2/4) Epoch 3, batch 3150, loss[loss=0.344, simple_loss=0.3875, pruned_loss=0.1503, over 19536.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3736, pruned_loss=0.1401, over 3814084.43 frames. ], batch size: 54, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:41:21,866 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16818.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:22,522 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.096e+02 7.784e+02 9.820e+02 1.270e+03 2.923e+03, threshold=1.964e+03, percent-clipped=2.0 +2023-04-01 00:41:31,806 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 00:41:51,866 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:57,428 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:07,323 INFO [train.py:903] (2/4) Epoch 3, batch 3200, loss[loss=0.3723, simple_loss=0.3973, pruned_loss=0.1736, over 19487.00 frames. ], tot_loss[loss=0.3272, simple_loss=0.3739, pruned_loss=0.1402, over 3815463.00 frames. ], batch size: 49, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:42:39,230 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16882.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:43,595 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16886.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:43:03,659 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1312, 1.1091, 1.8025, 1.2507, 2.5117, 2.4032, 2.6393, 1.0719], + device='cuda:2'), covar=tensor([0.1570, 0.2369, 0.1222, 0.1398, 0.1011, 0.1063, 0.1236, 0.2134], + device='cuda:2'), in_proj_covar=tensor([0.0407, 0.0446, 0.0411, 0.0387, 0.0486, 0.0393, 0.0570, 0.0413], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:43:08,573 INFO [train.py:903] (2/4) Epoch 3, batch 3250, loss[loss=0.3573, simple_loss=0.3909, pruned_loss=0.1619, over 19474.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3726, pruned_loss=0.1393, over 3827146.41 frames. ], batch size: 49, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:43:08,998 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:43:24,380 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+02 8.507e+02 1.021e+03 1.288e+03 2.328e+03, threshold=2.042e+03, percent-clipped=1.0 +2023-04-01 00:43:29,315 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:44:09,590 INFO [train.py:903] (2/4) Epoch 3, batch 3300, loss[loss=0.3276, simple_loss=0.3826, pruned_loss=0.1363, over 19665.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.373, pruned_loss=0.1394, over 3827902.43 frames. ], batch size: 55, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:44:16,149 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 00:45:10,102 INFO [train.py:903] (2/4) Epoch 3, batch 3350, loss[loss=0.2879, simple_loss=0.3444, pruned_loss=0.1157, over 19618.00 frames. ], tot_loss[loss=0.3262, simple_loss=0.3735, pruned_loss=0.1395, over 3827866.32 frames. ], batch size: 50, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:45:24,559 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+02 7.847e+02 9.419e+02 1.175e+03 3.710e+03, threshold=1.884e+03, percent-clipped=5.0 +2023-04-01 00:46:10,035 INFO [train.py:903] (2/4) Epoch 3, batch 3400, loss[loss=0.3393, simple_loss=0.3876, pruned_loss=0.1455, over 19316.00 frames. ], tot_loss[loss=0.3264, simple_loss=0.3738, pruned_loss=0.1395, over 3811971.05 frames. ], batch size: 66, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:46:30,811 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 00:47:08,494 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:47:12,304 INFO [train.py:903] (2/4) Epoch 3, batch 3450, loss[loss=0.3196, simple_loss=0.3632, pruned_loss=0.138, over 19626.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3715, pruned_loss=0.1378, over 3826007.51 frames. ], batch size: 50, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:47:14,334 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 00:47:28,178 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 8.544e+02 1.014e+03 1.278e+03 1.988e+03, threshold=2.028e+03, percent-clipped=3.0 +2023-04-01 00:47:39,710 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:48:12,442 INFO [train.py:903] (2/4) Epoch 3, batch 3500, loss[loss=0.4185, simple_loss=0.4259, pruned_loss=0.2056, over 13514.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3742, pruned_loss=0.1408, over 3813604.91 frames. ], batch size: 138, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:48:29,452 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9773, 2.5021, 1.8710, 2.2804, 2.2172, 1.5464, 1.4781, 1.7937], + device='cuda:2'), covar=tensor([0.0936, 0.0525, 0.0723, 0.0398, 0.0610, 0.0913, 0.0846, 0.0566], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0232, 0.0315, 0.0259, 0.0224, 0.0314, 0.0280, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:49:12,365 INFO [train.py:903] (2/4) Epoch 3, batch 3550, loss[loss=0.2956, simple_loss=0.3409, pruned_loss=0.1252, over 19622.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3729, pruned_loss=0.1401, over 3814208.50 frames. ], batch size: 50, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:49:12,651 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:49:18,404 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5167, 2.4180, 1.6688, 2.0720, 2.1064, 1.0839, 1.2558, 1.5465], + device='cuda:2'), covar=tensor([0.0873, 0.0512, 0.0959, 0.0479, 0.0504, 0.1261, 0.0807, 0.0523], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0238, 0.0323, 0.0262, 0.0228, 0.0321, 0.0284, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:49:24,943 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3853, 1.0562, 1.4782, 0.9760, 2.5788, 3.0993, 2.9628, 3.3356], + device='cuda:2'), covar=tensor([0.1344, 0.2786, 0.2665, 0.1985, 0.0410, 0.0110, 0.0242, 0.0102], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0276, 0.0324, 0.0269, 0.0193, 0.0105, 0.0200, 0.0112], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 00:49:26,838 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.377e+02 8.429e+02 1.068e+03 1.302e+03 2.755e+03, threshold=2.137e+03, percent-clipped=4.0 +2023-04-01 00:49:27,544 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-04-01 00:49:40,883 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17230.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:50:11,395 INFO [train.py:903] (2/4) Epoch 3, batch 3600, loss[loss=0.2922, simple_loss=0.361, pruned_loss=0.1117, over 19527.00 frames. ], tot_loss[loss=0.3264, simple_loss=0.3729, pruned_loss=0.14, over 3824597.51 frames. ], batch size: 56, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:50:24,848 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:51:11,858 INFO [train.py:903] (2/4) Epoch 3, batch 3650, loss[loss=0.3049, simple_loss=0.3582, pruned_loss=0.1258, over 19589.00 frames. ], tot_loss[loss=0.3258, simple_loss=0.3725, pruned_loss=0.1395, over 3807173.37 frames. ], batch size: 52, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:51:27,521 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.083e+02 7.894e+02 9.345e+02 1.119e+03 1.949e+03, threshold=1.869e+03, percent-clipped=0.0 +2023-04-01 00:51:38,317 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1913, 2.5835, 1.7839, 2.2373, 1.6419, 1.7316, 0.5954, 2.0281], + device='cuda:2'), covar=tensor([0.0328, 0.0300, 0.0330, 0.0415, 0.0640, 0.0547, 0.0570, 0.0496], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0248, 0.0241, 0.0268, 0.0328, 0.0264, 0.0254, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 00:51:57,344 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17345.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:52:12,711 INFO [train.py:903] (2/4) Epoch 3, batch 3700, loss[loss=0.3209, simple_loss=0.3768, pruned_loss=0.1325, over 19691.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3735, pruned_loss=0.14, over 3818490.43 frames. ], batch size: 59, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:52:23,839 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1573, 1.2833, 1.9237, 1.3908, 2.5666, 2.3212, 2.8076, 1.0834], + device='cuda:2'), covar=tensor([0.1407, 0.2224, 0.1120, 0.1208, 0.0823, 0.1017, 0.0878, 0.2008], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0448, 0.0411, 0.0386, 0.0483, 0.0403, 0.0568, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:52:39,388 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:52:42,875 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:53:02,165 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8160, 1.6628, 1.4809, 2.0190, 1.4334, 1.8780, 1.6545, 1.9612], + device='cuda:2'), covar=tensor([0.0775, 0.1331, 0.1120, 0.0772, 0.1165, 0.0415, 0.0793, 0.0555], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0370, 0.0279, 0.0252, 0.0312, 0.0259, 0.0269, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:53:05,646 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17401.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:53:12,837 INFO [train.py:903] (2/4) Epoch 3, batch 3750, loss[loss=0.3383, simple_loss=0.3726, pruned_loss=0.152, over 19853.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3745, pruned_loss=0.1407, over 3825993.20 frames. ], batch size: 52, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:53:27,556 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 9.192e+02 1.060e+03 1.489e+03 3.397e+03, threshold=2.120e+03, percent-clipped=7.0 +2023-04-01 00:54:12,635 INFO [train.py:903] (2/4) Epoch 3, batch 3800, loss[loss=0.2855, simple_loss=0.3332, pruned_loss=0.1189, over 14676.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3735, pruned_loss=0.1393, over 3825449.97 frames. ], batch size: 32, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:54:45,646 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 00:55:11,735 INFO [train.py:903] (2/4) Epoch 3, batch 3850, loss[loss=0.3058, simple_loss=0.359, pruned_loss=0.1262, over 19577.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3713, pruned_loss=0.1381, over 3836240.64 frames. ], batch size: 52, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:55:28,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+02 7.944e+02 9.720e+02 1.209e+03 3.103e+03, threshold=1.944e+03, percent-clipped=2.0 +2023-04-01 00:56:04,066 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17551.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:56:12,739 INFO [train.py:903] (2/4) Epoch 3, batch 3900, loss[loss=0.2682, simple_loss=0.3256, pruned_loss=0.1054, over 19807.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3708, pruned_loss=0.138, over 3820675.25 frames. ], batch size: 49, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:05,296 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17601.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:57:11,567 INFO [train.py:903] (2/4) Epoch 3, batch 3950, loss[loss=0.3201, simple_loss=0.3647, pruned_loss=0.1377, over 19398.00 frames. ], tot_loss[loss=0.3227, simple_loss=0.3703, pruned_loss=0.1376, over 3823055.28 frames. ], batch size: 48, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:18,162 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 00:57:27,253 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+02 7.211e+02 9.089e+02 1.148e+03 2.193e+03, threshold=1.818e+03, percent-clipped=2.0 +2023-04-01 00:57:34,440 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17626.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:57:51,684 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:00,745 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9271, 1.3472, 1.6343, 2.3406, 1.8088, 1.9279, 1.9278, 2.1276], + device='cuda:2'), covar=tensor([0.0931, 0.2202, 0.1563, 0.1004, 0.1580, 0.0733, 0.1168, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0378, 0.0286, 0.0257, 0.0317, 0.0263, 0.0273, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:58:04,606 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4225, 1.2552, 1.0477, 1.5741, 1.3314, 1.3765, 1.1807, 1.4660], + device='cuda:2'), covar=tensor([0.0876, 0.1412, 0.1430, 0.0823, 0.1058, 0.0532, 0.1035, 0.0686], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0376, 0.0285, 0.0256, 0.0315, 0.0262, 0.0272, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:58:12,127 INFO [train.py:903] (2/4) Epoch 3, batch 4000, loss[loss=0.3856, simple_loss=0.4154, pruned_loss=0.1779, over 19593.00 frames. ], tot_loss[loss=0.323, simple_loss=0.3707, pruned_loss=0.1376, over 3833925.27 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:58:20,421 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:22,699 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:26,146 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7053, 1.3737, 1.3621, 2.0428, 1.6155, 1.9435, 2.0907, 1.6145], + device='cuda:2'), covar=tensor([0.0688, 0.1070, 0.1163, 0.0747, 0.0983, 0.0700, 0.0701, 0.0729], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0282, 0.0272, 0.0312, 0.0316, 0.0259, 0.0280, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 00:58:59,493 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 00:58:59,862 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4193, 2.4118, 1.6266, 1.7646, 2.0871, 0.9975, 1.1036, 1.7887], + device='cuda:2'), covar=tensor([0.0888, 0.0360, 0.0979, 0.0459, 0.0444, 0.1327, 0.0891, 0.0476], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0236, 0.0315, 0.0258, 0.0224, 0.0313, 0.0279, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 00:59:11,470 INFO [train.py:903] (2/4) Epoch 3, batch 4050, loss[loss=0.3581, simple_loss=0.3918, pruned_loss=0.1622, over 17228.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3693, pruned_loss=0.1362, over 3836072.58 frames. ], batch size: 101, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:59:25,569 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:28,911 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+02 7.398e+02 9.459e+02 1.250e+03 4.446e+03, threshold=1.892e+03, percent-clipped=10.0 +2023-04-01 00:59:32,675 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:37,323 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6676, 1.4770, 1.4187, 1.9562, 3.2090, 1.3196, 1.9470, 3.0961], + device='cuda:2'), covar=tensor([0.0273, 0.2265, 0.2210, 0.1255, 0.0423, 0.1887, 0.1080, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0300, 0.0294, 0.0272, 0.0278, 0.0314, 0.0271, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 00:59:57,739 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17745.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:00:01,030 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17748.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:00:12,215 INFO [train.py:903] (2/4) Epoch 3, batch 4100, loss[loss=0.3071, simple_loss=0.3695, pruned_loss=0.1224, over 19434.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3692, pruned_loss=0.1364, over 3837907.92 frames. ], batch size: 64, lr: 2.43e-02, grad_scale: 4.0 +2023-04-01 01:00:48,455 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 01:00:53,253 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.60 vs. limit=5.0 +2023-04-01 01:01:11,675 INFO [train.py:903] (2/4) Epoch 3, batch 4150, loss[loss=0.2693, simple_loss=0.3376, pruned_loss=0.1005, over 19603.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3698, pruned_loss=0.1363, over 3843111.20 frames. ], batch size: 57, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:01:17,885 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 01:01:28,529 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+02 7.784e+02 9.706e+02 1.186e+03 3.618e+03, threshold=1.941e+03, percent-clipped=3.0 +2023-04-01 01:01:49,952 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:01:54,297 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8787, 4.8124, 5.7229, 5.5699, 1.6791, 5.3683, 4.5538, 5.0777], + device='cuda:2'), covar=tensor([0.0567, 0.0515, 0.0336, 0.0272, 0.3544, 0.0128, 0.0351, 0.0764], + device='cuda:2'), in_proj_covar=tensor([0.0380, 0.0346, 0.0478, 0.0367, 0.0492, 0.0251, 0.0316, 0.0459], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 01:02:10,745 INFO [train.py:903] (2/4) Epoch 3, batch 4200, loss[loss=0.2914, simple_loss=0.3411, pruned_loss=0.1208, over 19397.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3706, pruned_loss=0.1368, over 3837984.32 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:02:14,242 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17860.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:02:14,922 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 01:03:04,553 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.60 vs. limit=5.0 +2023-04-01 01:03:09,269 INFO [train.py:903] (2/4) Epoch 3, batch 4250, loss[loss=0.3408, simple_loss=0.3685, pruned_loss=0.1566, over 19749.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3717, pruned_loss=0.138, over 3830336.43 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:03:26,776 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.782e+02 8.334e+02 9.808e+02 1.259e+03 2.577e+03, threshold=1.962e+03, percent-clipped=5.0 +2023-04-01 01:03:26,830 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 01:03:28,272 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:03:38,010 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 01:03:57,685 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:04:08,381 INFO [train.py:903] (2/4) Epoch 3, batch 4300, loss[loss=0.3055, simple_loss=0.3509, pruned_loss=0.13, over 19668.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3716, pruned_loss=0.1382, over 3824428.17 frames. ], batch size: 53, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:04:15,406 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-04-01 01:04:35,068 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:05:06,027 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 01:05:10,742 INFO [train.py:903] (2/4) Epoch 3, batch 4350, loss[loss=0.3255, simple_loss=0.3653, pruned_loss=0.1428, over 19719.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3717, pruned_loss=0.1381, over 3823457.01 frames. ], batch size: 51, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:05:27,033 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+02 7.530e+02 9.485e+02 1.282e+03 2.824e+03, threshold=1.897e+03, percent-clipped=4.0 +2023-04-01 01:06:11,120 INFO [train.py:903] (2/4) Epoch 3, batch 4400, loss[loss=0.4023, simple_loss=0.416, pruned_loss=0.1943, over 13300.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3727, pruned_loss=0.1385, over 3796819.88 frames. ], batch size: 135, lr: 2.41e-02, grad_scale: 8.0 +2023-04-01 01:06:17,196 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:33,749 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 01:06:43,602 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 01:06:53,798 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18092.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:57,450 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:10,934 INFO [train.py:903] (2/4) Epoch 3, batch 4450, loss[loss=0.3343, simple_loss=0.3895, pruned_loss=0.1396, over 19576.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3721, pruned_loss=0.138, over 3787373.03 frames. ], batch size: 61, lr: 2.40e-02, grad_scale: 8.0 +2023-04-01 01:07:21,282 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18116.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:07:26,624 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:28,236 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.891e+02 7.625e+02 9.248e+02 1.171e+03 2.408e+03, threshold=1.850e+03, percent-clipped=4.0 +2023-04-01 01:07:48,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 01:07:53,387 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18141.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:08:11,165 INFO [train.py:903] (2/4) Epoch 3, batch 4500, loss[loss=0.3386, simple_loss=0.377, pruned_loss=0.1501, over 19685.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.371, pruned_loss=0.1371, over 3783527.43 frames. ], batch size: 60, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:08:37,502 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:54,076 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18192.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:55,185 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:11,759 INFO [train.py:903] (2/4) Epoch 3, batch 4550, loss[loss=0.3441, simple_loss=0.3894, pruned_loss=0.1494, over 19454.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3721, pruned_loss=0.138, over 3784698.09 frames. ], batch size: 64, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:09:12,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:18,455 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 01:09:29,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.087e+02 7.616e+02 9.596e+02 1.201e+03 2.125e+03, threshold=1.919e+03, percent-clipped=4.0 +2023-04-01 01:09:42,220 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 01:10:11,990 INFO [train.py:903] (2/4) Epoch 3, batch 4600, loss[loss=0.3258, simple_loss=0.3659, pruned_loss=0.1428, over 19741.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3727, pruned_loss=0.1388, over 3780192.75 frames. ], batch size: 51, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:11,636 INFO [train.py:903] (2/4) Epoch 3, batch 4650, loss[loss=0.2807, simple_loss=0.3364, pruned_loss=0.1124, over 19386.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.3725, pruned_loss=0.1382, over 3799352.66 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:27,781 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 01:11:28,861 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+02 7.851e+02 9.796e+02 1.308e+03 3.825e+03, threshold=1.959e+03, percent-clipped=6.0 +2023-04-01 01:11:29,082 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:11:38,728 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 01:12:10,878 INFO [train.py:903] (2/4) Epoch 3, batch 4700, loss[loss=0.4083, simple_loss=0.4346, pruned_loss=0.191, over 19727.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3735, pruned_loss=0.1395, over 3812578.74 frames. ], batch size: 63, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:12:33,348 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 01:12:53,109 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-01 01:13:13,553 INFO [train.py:903] (2/4) Epoch 3, batch 4750, loss[loss=0.301, simple_loss=0.3668, pruned_loss=0.1176, over 19566.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3721, pruned_loss=0.1381, over 3821061.54 frames. ], batch size: 61, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:13:31,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.455e+02 7.810e+02 9.309e+02 1.222e+03 2.382e+03, threshold=1.862e+03, percent-clipped=4.0 +2023-04-01 01:13:42,057 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1857, 2.1250, 1.6201, 1.6439, 1.3566, 1.5580, 0.1974, 1.0360], + device='cuda:2'), covar=tensor([0.0220, 0.0199, 0.0132, 0.0207, 0.0499, 0.0276, 0.0443, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0248, 0.0244, 0.0268, 0.0323, 0.0259, 0.0252, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 01:13:44,335 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18433.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:13:48,785 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:15,106 INFO [train.py:903] (2/4) Epoch 3, batch 4800, loss[loss=0.3125, simple_loss=0.356, pruned_loss=0.1345, over 19630.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3709, pruned_loss=0.137, over 3826985.65 frames. ], batch size: 50, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:14:16,585 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:22,286 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18463.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:53,534 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18488.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:16,092 INFO [train.py:903] (2/4) Epoch 3, batch 4850, loss[loss=0.2844, simple_loss=0.3298, pruned_loss=0.1195, over 19833.00 frames. ], tot_loss[loss=0.3209, simple_loss=0.3698, pruned_loss=0.136, over 3820690.85 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:15:17,552 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:20,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-01 01:15:34,582 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+02 7.295e+02 9.130e+02 1.063e+03 1.681e+03, threshold=1.826e+03, percent-clipped=0.0 +2023-04-01 01:15:38,174 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 01:15:40,602 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18526.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:15:52,387 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:53,567 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18537.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:16:00,113 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 01:16:04,602 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 01:16:05,768 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 01:16:14,595 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 01:16:15,641 INFO [train.py:903] (2/4) Epoch 3, batch 4900, loss[loss=0.2591, simple_loss=0.3124, pruned_loss=0.1029, over 19386.00 frames. ], tot_loss[loss=0.3209, simple_loss=0.3698, pruned_loss=0.136, over 3828245.38 frames. ], batch size: 47, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:16:34,649 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 01:17:11,786 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9685, 1.2357, 1.4126, 1.5558, 2.6804, 1.4719, 1.7857, 2.5737], + device='cuda:2'), covar=tensor([0.0425, 0.2239, 0.2092, 0.1393, 0.0452, 0.1667, 0.0979, 0.0492], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0300, 0.0297, 0.0274, 0.0283, 0.0315, 0.0270, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 01:17:14,800 INFO [train.py:903] (2/4) Epoch 3, batch 4950, loss[loss=0.3534, simple_loss=0.3999, pruned_loss=0.1535, over 19774.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3719, pruned_loss=0.1381, over 3826140.10 frames. ], batch size: 56, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:17:30,515 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 01:17:34,980 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.825e+02 8.688e+02 1.059e+03 1.337e+03 3.400e+03, threshold=2.119e+03, percent-clipped=10.0 +2023-04-01 01:17:52,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 01:18:09,838 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18651.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:10,967 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:17,098 INFO [train.py:903] (2/4) Epoch 3, batch 5000, loss[loss=0.2832, simple_loss=0.3298, pruned_loss=0.1183, over 19727.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.3727, pruned_loss=0.1389, over 3808586.81 frames. ], batch size: 46, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:18:21,589 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 01:18:32,449 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 01:18:43,453 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 01:18:59,271 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:00,357 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18693.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:16,429 INFO [train.py:903] (2/4) Epoch 3, batch 5050, loss[loss=0.3434, simple_loss=0.3734, pruned_loss=0.1567, over 19841.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3723, pruned_loss=0.139, over 3805952.94 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:19:29,200 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:35,293 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.108e+02 7.776e+02 1.028e+03 1.229e+03 3.550e+03, threshold=2.057e+03, percent-clipped=2.0 +2023-04-01 01:19:48,426 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 01:20:16,946 INFO [train.py:903] (2/4) Epoch 3, batch 5100, loss[loss=0.2817, simple_loss=0.3361, pruned_loss=0.1137, over 19609.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3692, pruned_loss=0.1365, over 3812691.74 frames. ], batch size: 50, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:20:24,660 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 01:20:27,867 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 01:20:32,465 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 01:21:19,966 INFO [train.py:903] (2/4) Epoch 3, batch 5150, loss[loss=0.3379, simple_loss=0.3854, pruned_loss=0.1452, over 18907.00 frames. ], tot_loss[loss=0.3201, simple_loss=0.3688, pruned_loss=0.1357, over 3822250.95 frames. ], batch size: 74, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:21:20,829 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 01:21:31,917 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 01:21:40,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.924e+02 8.047e+02 1.080e+03 1.932e+03, threshold=1.609e+03, percent-clipped=0.0 +2023-04-01 01:22:06,984 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 01:22:14,773 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18852.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:22:20,359 INFO [train.py:903] (2/4) Epoch 3, batch 5200, loss[loss=0.3344, simple_loss=0.3914, pruned_loss=0.1387, over 19591.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3662, pruned_loss=0.1332, over 3831369.52 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 8.0 +2023-04-01 01:22:24,119 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 01:22:37,383 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 01:22:37,520 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18870.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:23:21,215 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 01:23:23,216 INFO [train.py:903] (2/4) Epoch 3, batch 5250, loss[loss=0.2802, simple_loss=0.3414, pruned_loss=0.1094, over 19519.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3667, pruned_loss=0.1333, over 3843481.76 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:23:23,669 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:24,829 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:42,382 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+02 7.388e+02 9.793e+02 1.246e+03 4.620e+03, threshold=1.959e+03, percent-clipped=9.0 +2023-04-01 01:23:53,540 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:54,583 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:12,757 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:22,206 INFO [train.py:903] (2/4) Epoch 3, batch 5300, loss[loss=0.3634, simple_loss=0.413, pruned_loss=0.1569, over 19677.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.368, pruned_loss=0.1346, over 3839144.96 frames. ], batch size: 55, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:24:35,224 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:39,541 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 01:24:44,236 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2112, 1.9867, 1.5413, 1.6790, 1.3102, 1.5643, 0.3775, 0.9902], + device='cuda:2'), covar=tensor([0.0184, 0.0217, 0.0166, 0.0189, 0.0442, 0.0261, 0.0413, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0252, 0.0250, 0.0275, 0.0331, 0.0264, 0.0258, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 01:24:56,500 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18985.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:25:22,318 INFO [train.py:903] (2/4) Epoch 3, batch 5350, loss[loss=0.3287, simple_loss=0.3826, pruned_loss=0.1374, over 19524.00 frames. ], tot_loss[loss=0.3183, simple_loss=0.3675, pruned_loss=0.1345, over 3845749.26 frames. ], batch size: 64, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:25:42,786 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+02 7.837e+02 1.011e+03 1.314e+03 3.062e+03, threshold=2.023e+03, percent-clipped=6.0 +2023-04-01 01:25:50,093 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4029, 2.2443, 1.7316, 1.7390, 1.5293, 1.7899, 0.3372, 1.1522], + device='cuda:2'), covar=tensor([0.0254, 0.0212, 0.0160, 0.0222, 0.0484, 0.0259, 0.0475, 0.0392], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0248, 0.0248, 0.0275, 0.0331, 0.0261, 0.0258, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 01:25:55,461 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 01:25:56,693 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19036.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:26:20,423 INFO [train.py:903] (2/4) Epoch 3, batch 5400, loss[loss=0.2957, simple_loss=0.3611, pruned_loss=0.1152, over 19321.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3673, pruned_loss=0.1339, over 3847914.15 frames. ], batch size: 66, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:27:04,755 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4308, 1.0958, 1.5143, 0.8859, 2.5202, 3.0751, 3.0082, 3.2854], + device='cuda:2'), covar=tensor([0.1241, 0.2725, 0.2597, 0.2025, 0.0402, 0.0129, 0.0213, 0.0116], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0280, 0.0325, 0.0263, 0.0196, 0.0107, 0.0200, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 01:27:22,188 INFO [train.py:903] (2/4) Epoch 3, batch 5450, loss[loss=0.2872, simple_loss=0.3536, pruned_loss=0.1104, over 18225.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3659, pruned_loss=0.1325, over 3856775.20 frames. ], batch size: 83, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:27:41,224 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+02 7.941e+02 9.480e+02 1.159e+03 2.761e+03, threshold=1.896e+03, percent-clipped=1.0 +2023-04-01 01:27:50,486 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0015, 1.9074, 1.7859, 3.0685, 1.9931, 3.0691, 2.7915, 1.7620], + device='cuda:2'), covar=tensor([0.1305, 0.1011, 0.0593, 0.0554, 0.1205, 0.0313, 0.0954, 0.0964], + device='cuda:2'), in_proj_covar=tensor([0.0491, 0.0460, 0.0462, 0.0616, 0.0536, 0.0378, 0.0558, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 01:28:10,104 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19147.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:28:14,878 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:28:21,134 INFO [train.py:903] (2/4) Epoch 3, batch 5500, loss[loss=0.3842, simple_loss=0.4216, pruned_loss=0.1734, over 19364.00 frames. ], tot_loss[loss=0.3175, simple_loss=0.3673, pruned_loss=0.1338, over 3852258.93 frames. ], batch size: 70, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:28:45,302 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 01:29:20,074 INFO [train.py:903] (2/4) Epoch 3, batch 5550, loss[loss=0.4509, simple_loss=0.4447, pruned_loss=0.2286, over 13080.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3694, pruned_loss=0.136, over 3833923.36 frames. ], batch size: 136, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:29:27,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 01:29:34,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 01:29:41,400 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:29:42,142 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 7.947e+02 9.800e+02 1.342e+03 2.993e+03, threshold=1.960e+03, percent-clipped=6.0 +2023-04-01 01:29:53,830 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2764, 1.2565, 1.4076, 1.8227, 2.8686, 1.2468, 1.8732, 2.8306], + device='cuda:2'), covar=tensor([0.0328, 0.2365, 0.2211, 0.1352, 0.0455, 0.1952, 0.1110, 0.0443], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0305, 0.0297, 0.0275, 0.0287, 0.0312, 0.0271, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 01:30:02,025 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19241.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:30:02,998 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3245, 3.7050, 3.9600, 3.9298, 1.5445, 3.4586, 3.2581, 3.5258], + device='cuda:2'), covar=tensor([0.0731, 0.0645, 0.0521, 0.0415, 0.3431, 0.0332, 0.0452, 0.1009], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0357, 0.0489, 0.0379, 0.0495, 0.0262, 0.0313, 0.0466], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 01:30:09,722 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:30:15,636 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 01:30:21,462 INFO [train.py:903] (2/4) Epoch 3, batch 5600, loss[loss=0.3128, simple_loss=0.3659, pruned_loss=0.1299, over 19523.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3711, pruned_loss=0.1368, over 3824202.26 frames. ], batch size: 54, lr: 2.34e-02, grad_scale: 8.0 +2023-04-01 01:30:33,696 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19266.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:31:03,997 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:31:22,086 INFO [train.py:903] (2/4) Epoch 3, batch 5650, loss[loss=0.3247, simple_loss=0.3682, pruned_loss=0.1406, over 19662.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.37, pruned_loss=0.1365, over 3827896.98 frames. ], batch size: 58, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:31:41,040 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 7.403e+02 9.102e+02 1.185e+03 3.385e+03, threshold=1.820e+03, percent-clipped=3.0 +2023-04-01 01:32:09,521 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 01:32:21,818 INFO [train.py:903] (2/4) Epoch 3, batch 5700, loss[loss=0.27, simple_loss=0.3204, pruned_loss=0.1098, over 19739.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3703, pruned_loss=0.1364, over 3826869.41 frames. ], batch size: 47, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:32:39,011 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9680, 1.5958, 1.3177, 1.9439, 1.5786, 1.5726, 1.4626, 1.8232], + device='cuda:2'), covar=tensor([0.0809, 0.1698, 0.1390, 0.0841, 0.1184, 0.0674, 0.1008, 0.0702], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0380, 0.0283, 0.0250, 0.0313, 0.0264, 0.0269, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 01:32:43,522 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4811, 1.0785, 1.6556, 1.1624, 2.5953, 3.6303, 3.3586, 3.7661], + device='cuda:2'), covar=tensor([0.1281, 0.2827, 0.2643, 0.2006, 0.0473, 0.0121, 0.0207, 0.0096], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0277, 0.0320, 0.0265, 0.0198, 0.0106, 0.0200, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 01:33:22,532 INFO [train.py:903] (2/4) Epoch 3, batch 5750, loss[loss=0.291, simple_loss=0.3365, pruned_loss=0.1228, over 19736.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3683, pruned_loss=0.1347, over 3842423.15 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:33:22,921 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:22,999 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:24,904 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 01:33:30,167 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:34,048 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 01:33:39,379 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 01:33:43,560 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.525e+02 9.538e+02 1.231e+03 3.556e+03, threshold=1.908e+03, percent-clipped=6.0 +2023-04-01 01:33:53,877 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:34:22,887 INFO [train.py:903] (2/4) Epoch 3, batch 5800, loss[loss=0.2883, simple_loss=0.3377, pruned_loss=0.1195, over 19389.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3678, pruned_loss=0.1348, over 3845401.63 frames. ], batch size: 47, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:35:03,989 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19491.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:35:23,136 INFO [train.py:903] (2/4) Epoch 3, batch 5850, loss[loss=0.2989, simple_loss=0.3498, pruned_loss=0.124, over 19662.00 frames. ], tot_loss[loss=0.318, simple_loss=0.3672, pruned_loss=0.1344, over 3838052.21 frames. ], batch size: 53, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:35:43,370 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+02 8.354e+02 1.035e+03 1.319e+03 5.609e+03, threshold=2.070e+03, percent-clipped=8.0 +2023-04-01 01:36:23,578 INFO [train.py:903] (2/4) Epoch 3, batch 5900, loss[loss=0.3408, simple_loss=0.3924, pruned_loss=0.1446, over 19483.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3669, pruned_loss=0.1342, over 3820584.25 frames. ], batch size: 64, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:36:26,966 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 01:36:46,206 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 01:37:21,793 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19606.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:37:22,518 INFO [train.py:903] (2/4) Epoch 3, batch 5950, loss[loss=0.273, simple_loss=0.3251, pruned_loss=0.1105, over 19760.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3687, pruned_loss=0.1354, over 3814508.95 frames. ], batch size: 45, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:37:45,646 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.894e+02 9.030e+02 1.163e+03 3.004e+03, threshold=1.806e+03, percent-clipped=5.0 +2023-04-01 01:37:59,410 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:38:24,431 INFO [train.py:903] (2/4) Epoch 3, batch 6000, loss[loss=0.3673, simple_loss=0.4087, pruned_loss=0.1629, over 17264.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3677, pruned_loss=0.1349, over 3813721.06 frames. ], batch size: 101, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:38:24,431 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 01:38:37,345 INFO [train.py:937] (2/4) Epoch 3, validation: loss=0.2218, simple_loss=0.3182, pruned_loss=0.06273, over 944034.00 frames. +2023-04-01 01:38:37,346 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18149MB +2023-04-01 01:38:45,568 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:38:53,760 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3670, 1.4567, 2.2720, 1.5146, 3.0655, 3.2251, 3.4866, 1.6016], + device='cuda:2'), covar=tensor([0.1350, 0.2214, 0.1230, 0.1260, 0.0997, 0.0874, 0.1147, 0.2020], + device='cuda:2'), in_proj_covar=tensor([0.0414, 0.0465, 0.0423, 0.0393, 0.0509, 0.0412, 0.0587, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 01:39:15,761 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:30,800 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8525, 1.0639, 1.6417, 1.6760, 2.4216, 4.0622, 4.3357, 4.6873], + device='cuda:2'), covar=tensor([0.1365, 0.3895, 0.3597, 0.1963, 0.0557, 0.0238, 0.0189, 0.0105], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0272, 0.0318, 0.0265, 0.0195, 0.0108, 0.0198, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 01:39:37,218 INFO [train.py:903] (2/4) Epoch 3, batch 6050, loss[loss=0.2945, simple_loss=0.3476, pruned_loss=0.1207, over 19407.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.3683, pruned_loss=0.1352, over 3802508.59 frames. ], batch size: 48, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:39:59,307 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+02 7.768e+02 9.451e+02 1.306e+03 2.772e+03, threshold=1.890e+03, percent-clipped=6.0 +2023-04-01 01:40:37,817 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:40:38,852 INFO [train.py:903] (2/4) Epoch 3, batch 6100, loss[loss=0.3009, simple_loss=0.3462, pruned_loss=0.1278, over 17722.00 frames. ], tot_loss[loss=0.3195, simple_loss=0.3684, pruned_loss=0.1353, over 3818795.94 frames. ], batch size: 39, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:41:38,891 INFO [train.py:903] (2/4) Epoch 3, batch 6150, loss[loss=0.3089, simple_loss=0.3506, pruned_loss=0.1336, over 14729.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3686, pruned_loss=0.1356, over 3810914.51 frames. ], batch size: 32, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:01,430 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+02 8.602e+02 1.123e+03 1.510e+03 2.312e+03, threshold=2.246e+03, percent-clipped=7.0 +2023-04-01 01:42:04,692 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 01:42:16,404 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 01:42:38,570 INFO [train.py:903] (2/4) Epoch 3, batch 6200, loss[loss=0.3322, simple_loss=0.3811, pruned_loss=0.1417, over 19509.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3685, pruned_loss=0.1355, over 3814592.75 frames. ], batch size: 64, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:46,178 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19862.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:42:57,212 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:43:15,909 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19887.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:43:39,796 INFO [train.py:903] (2/4) Epoch 3, batch 6250, loss[loss=0.2711, simple_loss=0.3312, pruned_loss=0.1054, over 19597.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.3661, pruned_loss=0.1334, over 3823472.81 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:43:51,138 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9969, 1.8575, 1.6905, 1.8088, 1.5703, 1.7794, 0.7980, 1.4716], + device='cuda:2'), covar=tensor([0.0205, 0.0236, 0.0158, 0.0217, 0.0375, 0.0279, 0.0478, 0.0322], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0248, 0.0245, 0.0274, 0.0327, 0.0265, 0.0257, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 01:44:01,759 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+02 7.100e+02 9.208e+02 1.133e+03 3.490e+03, threshold=1.842e+03, percent-clipped=3.0 +2023-04-01 01:44:09,634 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 01:44:40,501 INFO [train.py:903] (2/4) Epoch 3, batch 6300, loss[loss=0.3171, simple_loss=0.3732, pruned_loss=0.1305, over 19514.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.366, pruned_loss=0.1331, over 3821427.49 frames. ], batch size: 64, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:45:07,293 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19980.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:45:42,110 INFO [train.py:903] (2/4) Epoch 3, batch 6350, loss[loss=0.4264, simple_loss=0.4501, pruned_loss=0.2013, over 17491.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3676, pruned_loss=0.1344, over 3825451.32 frames. ], batch size: 101, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:45:54,176 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 01:46:03,319 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.424e+02 9.293e+02 1.158e+03 2.968e+03, threshold=1.859e+03, percent-clipped=5.0 +2023-04-01 01:46:20,207 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20038.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:46:42,146 INFO [train.py:903] (2/4) Epoch 3, batch 6400, loss[loss=0.2732, simple_loss=0.3276, pruned_loss=0.1094, over 19757.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3676, pruned_loss=0.1344, over 3834720.05 frames. ], batch size: 47, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:29,314 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:47:43,492 INFO [train.py:903] (2/4) Epoch 3, batch 6450, loss[loss=0.3165, simple_loss=0.3704, pruned_loss=0.1313, over 18116.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3668, pruned_loss=0.1336, over 3836339.67 frames. ], batch size: 83, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:43,814 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:05,562 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.028e+02 7.833e+02 9.380e+02 1.145e+03 2.427e+03, threshold=1.876e+03, percent-clipped=3.0 +2023-04-01 01:48:08,331 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:12,615 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:29,097 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 01:48:39,362 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:44,547 INFO [train.py:903] (2/4) Epoch 3, batch 6500, loss[loss=0.3199, simple_loss=0.3686, pruned_loss=0.1356, over 19611.00 frames. ], tot_loss[loss=0.3158, simple_loss=0.366, pruned_loss=0.1328, over 3837350.66 frames. ], batch size: 50, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:48:52,301 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 01:49:07,001 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4531, 3.9633, 2.4559, 3.6560, 1.0363, 3.5920, 3.6876, 3.8726], + device='cuda:2'), covar=tensor([0.0775, 0.1419, 0.2096, 0.0724, 0.4085, 0.1017, 0.0698, 0.0862], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0302, 0.0346, 0.0274, 0.0350, 0.0294, 0.0248, 0.0292], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 01:49:45,350 INFO [train.py:903] (2/4) Epoch 3, batch 6550, loss[loss=0.3253, simple_loss=0.3834, pruned_loss=0.1336, over 19381.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3669, pruned_loss=0.1331, over 3823573.97 frames. ], batch size: 70, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:50:03,599 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:50:07,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.085e+02 9.596e+02 1.224e+03 2.377e+03, threshold=1.919e+03, percent-clipped=5.0 +2023-04-01 01:50:46,685 INFO [train.py:903] (2/4) Epoch 3, batch 6600, loss[loss=0.2641, simple_loss=0.3224, pruned_loss=0.1029, over 19756.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3667, pruned_loss=0.133, over 3818293.16 frames. ], batch size: 46, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:51:47,897 INFO [train.py:903] (2/4) Epoch 3, batch 6650, loss[loss=0.3344, simple_loss=0.3867, pruned_loss=0.141, over 17124.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.3659, pruned_loss=0.1327, over 3815668.15 frames. ], batch size: 101, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:52:10,182 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+02 7.672e+02 8.992e+02 1.144e+03 3.524e+03, threshold=1.798e+03, percent-clipped=4.0 +2023-04-01 01:52:42,311 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:52:49,535 INFO [train.py:903] (2/4) Epoch 3, batch 6700, loss[loss=0.2689, simple_loss=0.3165, pruned_loss=0.1107, over 19770.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.3661, pruned_loss=0.1326, over 3814344.02 frames. ], batch size: 48, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:53:11,831 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:17,979 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:44,497 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:46,032 INFO [train.py:903] (2/4) Epoch 3, batch 6750, loss[loss=0.3349, simple_loss=0.3853, pruned_loss=0.1422, over 19528.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.366, pruned_loss=0.1333, over 3826627.54 frames. ], batch size: 54, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:54:05,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+02 7.667e+02 1.002e+03 1.269e+03 2.908e+03, threshold=2.004e+03, percent-clipped=6.0 +2023-04-01 01:54:17,381 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:34,797 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:41,604 INFO [train.py:903] (2/4) Epoch 3, batch 6800, loss[loss=0.2762, simple_loss=0.3316, pruned_loss=0.1104, over 19722.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3653, pruned_loss=0.1325, over 3835414.16 frames. ], batch size: 51, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:55:00,655 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:55:25,483 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 01:55:25,906 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 01:55:29,039 INFO [train.py:903] (2/4) Epoch 4, batch 0, loss[loss=0.3522, simple_loss=0.3918, pruned_loss=0.1563, over 19790.00 frames. ], tot_loss[loss=0.3522, simple_loss=0.3918, pruned_loss=0.1563, over 19790.00 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:55:29,039 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 01:55:40,526 INFO [train.py:937] (2/4) Epoch 4, validation: loss=0.2245, simple_loss=0.3205, pruned_loss=0.06426, over 944034.00 frames. +2023-04-01 01:55:40,527 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18297MB +2023-04-01 01:55:53,644 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 01:55:55,139 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20497.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:56:27,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.855e+02 8.790e+02 1.166e+03 2.960e+03, threshold=1.758e+03, percent-clipped=3.0 +2023-04-01 01:56:40,998 INFO [train.py:903] (2/4) Epoch 4, batch 50, loss[loss=0.2996, simple_loss=0.363, pruned_loss=0.1181, over 19527.00 frames. ], tot_loss[loss=0.3127, simple_loss=0.3645, pruned_loss=0.1305, over 868586.33 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:14,485 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 01:57:15,794 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:16,009 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:40,113 INFO [train.py:903] (2/4) Epoch 4, batch 100, loss[loss=0.3141, simple_loss=0.3659, pruned_loss=0.1312, over 19657.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3658, pruned_loss=0.1332, over 1521315.49 frames. ], batch size: 58, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:46,178 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20589.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:52,768 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 01:58:05,875 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:58:29,320 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 7.763e+02 9.275e+02 1.175e+03 2.763e+03, threshold=1.855e+03, percent-clipped=7.0 +2023-04-01 01:58:41,721 INFO [train.py:903] (2/4) Epoch 4, batch 150, loss[loss=0.2878, simple_loss=0.3558, pruned_loss=0.1099, over 19526.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3642, pruned_loss=0.1303, over 2036328.30 frames. ], batch size: 56, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:35,949 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20681.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:59:40,166 INFO [train.py:903] (2/4) Epoch 4, batch 200, loss[loss=0.338, simple_loss=0.3887, pruned_loss=0.1436, over 19537.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3646, pruned_loss=0.1308, over 2442219.09 frames. ], batch size: 56, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:41,286 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 02:00:28,682 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 7.097e+02 9.184e+02 1.257e+03 2.857e+03, threshold=1.837e+03, percent-clipped=5.0 +2023-04-01 02:00:39,433 INFO [train.py:903] (2/4) Epoch 4, batch 250, loss[loss=0.2898, simple_loss=0.3537, pruned_loss=0.113, over 19626.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.3641, pruned_loss=0.1303, over 2754074.77 frames. ], batch size: 57, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:00:57,788 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:02,439 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:24,130 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3709, 1.2591, 1.0364, 1.2781, 1.0913, 1.2789, 1.0816, 1.3208], + device='cuda:2'), covar=tensor([0.0922, 0.1148, 0.1328, 0.0818, 0.1062, 0.0562, 0.0978, 0.0677], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0381, 0.0282, 0.0245, 0.0316, 0.0261, 0.0266, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 02:01:32,669 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:34,833 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:41,112 INFO [train.py:903] (2/4) Epoch 4, batch 300, loss[loss=0.3404, simple_loss=0.3818, pruned_loss=0.1495, over 17289.00 frames. ], tot_loss[loss=0.3116, simple_loss=0.3635, pruned_loss=0.1299, over 2985224.45 frames. ], batch size: 101, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:02:25,205 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:29,138 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 7.559e+02 9.012e+02 1.206e+03 2.235e+03, threshold=1.802e+03, percent-clipped=6.0 +2023-04-01 02:02:30,420 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9983, 4.6870, 5.7437, 5.7476, 1.7602, 5.3144, 4.5806, 5.1781], + device='cuda:2'), covar=tensor([0.0535, 0.0569, 0.0381, 0.0223, 0.3734, 0.0171, 0.0363, 0.0707], + device='cuda:2'), in_proj_covar=tensor([0.0422, 0.0379, 0.0512, 0.0395, 0.0520, 0.0278, 0.0337, 0.0489], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 02:02:31,771 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1665, 1.0863, 1.9188, 1.3400, 2.5218, 2.1699, 2.8085, 1.0140], + device='cuda:2'), covar=tensor([0.1758, 0.2938, 0.1483, 0.1596, 0.1130, 0.1338, 0.1212, 0.2668], + device='cuda:2'), in_proj_covar=tensor([0.0423, 0.0469, 0.0436, 0.0398, 0.0515, 0.0412, 0.0593, 0.0424], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 02:02:40,181 INFO [train.py:903] (2/4) Epoch 4, batch 350, loss[loss=0.2681, simple_loss=0.3396, pruned_loss=0.09831, over 19657.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.3625, pruned_loss=0.1294, over 3174448.24 frames. ], batch size: 55, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:02:45,647 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 02:02:52,634 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:54,935 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20847.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:18,371 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20865.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:23,838 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:40,719 INFO [train.py:903] (2/4) Epoch 4, batch 400, loss[loss=0.3055, simple_loss=0.3629, pruned_loss=0.1241, over 19683.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.3641, pruned_loss=0.1302, over 3312230.00 frames. ], batch size: 60, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:03:52,350 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:27,879 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.529e+02 9.870e+02 1.265e+03 2.610e+03, threshold=1.974e+03, percent-clipped=3.0 +2023-04-01 02:04:39,101 INFO [train.py:903] (2/4) Epoch 4, batch 450, loss[loss=0.3922, simple_loss=0.4132, pruned_loss=0.1856, over 12816.00 frames. ], tot_loss[loss=0.3102, simple_loss=0.3623, pruned_loss=0.129, over 3425236.59 frames. ], batch size: 135, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:04:41,840 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:58,401 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:12,740 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 02:05:13,145 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:13,948 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 02:05:24,360 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4376, 1.3637, 1.1115, 1.2987, 1.1083, 1.1857, 1.0278, 1.2867], + device='cuda:2'), covar=tensor([0.0842, 0.0912, 0.1199, 0.0688, 0.0907, 0.0662, 0.0968, 0.0695], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0371, 0.0278, 0.0243, 0.0304, 0.0260, 0.0263, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 02:05:40,652 INFO [train.py:903] (2/4) Epoch 4, batch 500, loss[loss=0.3126, simple_loss=0.3717, pruned_loss=0.1268, over 17245.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.3617, pruned_loss=0.1289, over 3524967.56 frames. ], batch size: 101, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:06:00,987 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1899, 2.1291, 1.5644, 1.7633, 1.4053, 1.7111, 0.2939, 0.9922], + device='cuda:2'), covar=tensor([0.0258, 0.0220, 0.0162, 0.0193, 0.0479, 0.0271, 0.0440, 0.0390], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0259, 0.0258, 0.0287, 0.0341, 0.0277, 0.0261, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:06:27,513 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.944e+02 7.370e+02 9.144e+02 1.191e+03 3.185e+03, threshold=1.829e+03, percent-clipped=4.0 +2023-04-01 02:06:40,248 INFO [train.py:903] (2/4) Epoch 4, batch 550, loss[loss=0.3244, simple_loss=0.3792, pruned_loss=0.1348, over 19390.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3619, pruned_loss=0.1286, over 3593196.56 frames. ], batch size: 70, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:07:17,103 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:23,318 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:39,555 INFO [train.py:903] (2/4) Epoch 4, batch 600, loss[loss=0.2968, simple_loss=0.3631, pruned_loss=0.1153, over 19613.00 frames. ], tot_loss[loss=0.3116, simple_loss=0.3633, pruned_loss=0.13, over 3640511.44 frames. ], batch size: 57, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:07:47,084 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2797, 3.0479, 1.9971, 2.8546, 1.0964, 2.8406, 2.7204, 2.8429], + device='cuda:2'), covar=tensor([0.0883, 0.1252, 0.1896, 0.0752, 0.3251, 0.1057, 0.0838, 0.0929], + device='cuda:2'), in_proj_covar=tensor([0.0323, 0.0296, 0.0340, 0.0273, 0.0339, 0.0294, 0.0252, 0.0289], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:08:19,250 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 02:08:23,048 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21121.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:27,129 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.199e+02 8.023e+02 1.001e+03 1.305e+03 2.804e+03, threshold=2.003e+03, percent-clipped=3.0 +2023-04-01 02:08:39,435 INFO [train.py:903] (2/4) Epoch 4, batch 650, loss[loss=0.2904, simple_loss=0.3516, pruned_loss=0.1146, over 19035.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3637, pruned_loss=0.1302, over 3662421.36 frames. ], batch size: 69, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:40,602 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:52,196 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:00,269 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:06,879 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8064, 1.5510, 1.4807, 1.9132, 1.8017, 1.6801, 1.4400, 1.9524], + device='cuda:2'), covar=tensor([0.0886, 0.1543, 0.1277, 0.0906, 0.1025, 0.0506, 0.0983, 0.0596], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0365, 0.0274, 0.0245, 0.0302, 0.0254, 0.0259, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 02:09:29,125 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21176.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:38,635 INFO [train.py:903] (2/4) Epoch 4, batch 700, loss[loss=0.3254, simple_loss=0.3755, pruned_loss=0.1376, over 19787.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3644, pruned_loss=0.1304, over 3692174.53 frames. ], batch size: 56, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:10:26,834 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 7.118e+02 8.929e+02 1.162e+03 2.438e+03, threshold=1.786e+03, percent-clipped=3.0 +2023-04-01 02:10:40,518 INFO [train.py:903] (2/4) Epoch 4, batch 750, loss[loss=0.345, simple_loss=0.3961, pruned_loss=0.1469, over 18792.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.366, pruned_loss=0.1316, over 3708160.69 frames. ], batch size: 74, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:11:39,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3481, 1.1678, 1.3070, 1.5608, 2.9729, 1.3598, 1.9154, 2.9095], + device='cuda:2'), covar=tensor([0.0332, 0.2273, 0.2396, 0.1428, 0.0428, 0.1731, 0.1072, 0.0409], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0302, 0.0301, 0.0274, 0.0290, 0.0312, 0.0277, 0.0283], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 02:11:40,042 INFO [train.py:903] (2/4) Epoch 4, batch 800, loss[loss=0.309, simple_loss=0.3672, pruned_loss=0.1254, over 19726.00 frames. ], tot_loss[loss=0.3137, simple_loss=0.3653, pruned_loss=0.131, over 3736981.10 frames. ], batch size: 51, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:11:56,123 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 02:12:10,352 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 02:12:25,412 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:27,944 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.42 vs. limit=5.0 +2023-04-01 02:12:30,558 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.379e+02 8.113e+02 1.001e+03 1.207e+03 2.017e+03, threshold=2.002e+03, percent-clipped=2.0 +2023-04-01 02:12:35,443 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:40,720 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7385, 3.0885, 3.1908, 3.1777, 1.1071, 2.9480, 2.6293, 2.8479], + device='cuda:2'), covar=tensor([0.0919, 0.0647, 0.0627, 0.0532, 0.3338, 0.0391, 0.0580, 0.1095], + device='cuda:2'), in_proj_covar=tensor([0.0420, 0.0367, 0.0508, 0.0389, 0.0509, 0.0277, 0.0330, 0.0471], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 02:12:41,604 INFO [train.py:903] (2/4) Epoch 4, batch 850, loss[loss=0.3725, simple_loss=0.4115, pruned_loss=0.1667, over 19662.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3639, pruned_loss=0.1295, over 3765630.71 frames. ], batch size: 60, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:12:54,272 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:54,379 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:13:30,969 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 02:13:39,991 INFO [train.py:903] (2/4) Epoch 4, batch 900, loss[loss=0.2681, simple_loss=0.3264, pruned_loss=0.1049, over 19736.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3632, pruned_loss=0.1294, over 3780427.45 frames. ], batch size: 51, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:14,505 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:14:28,701 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+02 7.656e+02 9.192e+02 1.106e+03 2.022e+03, threshold=1.838e+03, percent-clipped=1.0 +2023-04-01 02:14:40,651 INFO [train.py:903] (2/4) Epoch 4, batch 950, loss[loss=0.286, simple_loss=0.3369, pruned_loss=0.1176, over 19420.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3608, pruned_loss=0.128, over 3796073.70 frames. ], batch size: 48, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:43,030 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 02:15:29,266 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9762, 1.4362, 1.4910, 1.8820, 1.9427, 1.6894, 1.4766, 1.9410], + device='cuda:2'), covar=tensor([0.0756, 0.1612, 0.1198, 0.0814, 0.0906, 0.0494, 0.0983, 0.0587], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0372, 0.0274, 0.0248, 0.0305, 0.0259, 0.0263, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 02:15:35,415 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:15:40,662 INFO [train.py:903] (2/4) Epoch 4, batch 1000, loss[loss=0.4406, simple_loss=0.4472, pruned_loss=0.217, over 13001.00 frames. ], tot_loss[loss=0.3092, simple_loss=0.3617, pruned_loss=0.1283, over 3792370.35 frames. ], batch size: 136, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:16:15,655 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7946, 4.1623, 4.4455, 4.4235, 1.5351, 3.9736, 3.4150, 3.9662], + device='cuda:2'), covar=tensor([0.0815, 0.0517, 0.0464, 0.0331, 0.3895, 0.0273, 0.0514, 0.1003], + device='cuda:2'), in_proj_covar=tensor([0.0424, 0.0374, 0.0508, 0.0393, 0.0516, 0.0281, 0.0336, 0.0485], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 02:16:29,781 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.436e+02 9.242e+02 1.292e+03 2.692e+03, threshold=1.848e+03, percent-clipped=7.0 +2023-04-01 02:16:33,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 02:16:33,648 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:16:40,204 INFO [train.py:903] (2/4) Epoch 4, batch 1050, loss[loss=0.2479, simple_loss=0.3116, pruned_loss=0.09213, over 19723.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3612, pruned_loss=0.1279, over 3803050.59 frames. ], batch size: 46, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:17:14,226 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 02:17:40,184 INFO [train.py:903] (2/4) Epoch 4, batch 1100, loss[loss=0.2849, simple_loss=0.3515, pruned_loss=0.1091, over 19587.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3594, pruned_loss=0.1263, over 3817711.84 frames. ], batch size: 52, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:17:52,968 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:03,973 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2259, 1.2837, 1.1150, 1.0384, 0.9624, 1.2305, 0.0533, 0.5318], + device='cuda:2'), covar=tensor([0.0237, 0.0239, 0.0139, 0.0184, 0.0488, 0.0190, 0.0408, 0.0362], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0262, 0.0258, 0.0285, 0.0338, 0.0275, 0.0264, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:18:15,031 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:30,560 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+02 7.921e+02 9.622e+02 1.275e+03 2.981e+03, threshold=1.924e+03, percent-clipped=6.0 +2023-04-01 02:18:36,412 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21631.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:43,707 INFO [train.py:903] (2/4) Epoch 4, batch 1150, loss[loss=0.239, simple_loss=0.2986, pruned_loss=0.08965, over 19796.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.3612, pruned_loss=0.1279, over 3805008.91 frames. ], batch size: 48, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:31,577 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:19:45,559 INFO [train.py:903] (2/4) Epoch 4, batch 1200, loss[loss=0.2651, simple_loss=0.3192, pruned_loss=0.1054, over 19764.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3602, pruned_loss=0.127, over 3807016.65 frames. ], batch size: 48, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:51,437 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:19:54,809 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4662, 4.0758, 2.5507, 3.6842, 1.1503, 3.6852, 3.5888, 3.7866], + device='cuda:2'), covar=tensor([0.0575, 0.1132, 0.1691, 0.0641, 0.3750, 0.0826, 0.0710, 0.0782], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0302, 0.0342, 0.0278, 0.0348, 0.0295, 0.0259, 0.0289], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:20:14,908 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 02:20:35,530 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+02 7.515e+02 9.038e+02 1.111e+03 2.454e+03, threshold=1.808e+03, percent-clipped=2.0 +2023-04-01 02:20:45,440 INFO [train.py:903] (2/4) Epoch 4, batch 1250, loss[loss=0.2941, simple_loss=0.3549, pruned_loss=0.1166, over 19511.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3607, pruned_loss=0.1272, over 3811982.36 frames. ], batch size: 54, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:39,814 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:44,786 INFO [train.py:903] (2/4) Epoch 4, batch 1300, loss[loss=0.3213, simple_loss=0.3762, pruned_loss=0.1333, over 19794.00 frames. ], tot_loss[loss=0.3082, simple_loss=0.3613, pruned_loss=0.1275, over 3816079.00 frames. ], batch size: 56, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:45,224 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:49,713 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:08,387 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-01 02:22:10,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:15,801 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:34,651 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 7.410e+02 9.034e+02 1.121e+03 1.849e+03, threshold=1.807e+03, percent-clipped=1.0 +2023-04-01 02:22:45,109 INFO [train.py:903] (2/4) Epoch 4, batch 1350, loss[loss=0.3532, simple_loss=0.3941, pruned_loss=0.1561, over 19601.00 frames. ], tot_loss[loss=0.307, simple_loss=0.36, pruned_loss=0.127, over 3816837.00 frames. ], batch size: 61, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:23:04,112 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:23,914 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:33,950 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:45,407 INFO [train.py:903] (2/4) Epoch 4, batch 1400, loss[loss=0.3198, simple_loss=0.3766, pruned_loss=0.1315, over 19549.00 frames. ], tot_loss[loss=0.3093, simple_loss=0.3616, pruned_loss=0.1285, over 3809514.44 frames. ], batch size: 56, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:24:35,710 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.358e+02 7.364e+02 9.517e+02 1.310e+03 2.254e+03, threshold=1.903e+03, percent-clipped=6.0 +2023-04-01 02:24:42,755 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 02:24:46,201 INFO [train.py:903] (2/4) Epoch 4, batch 1450, loss[loss=0.2927, simple_loss=0.3602, pruned_loss=0.1126, over 18771.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3597, pruned_loss=0.1266, over 3823797.93 frames. ], batch size: 74, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:25:01,770 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 02:25:13,229 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:34,252 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:45,920 INFO [train.py:903] (2/4) Epoch 4, batch 1500, loss[loss=0.3791, simple_loss=0.4024, pruned_loss=0.1779, over 13176.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3613, pruned_loss=0.1278, over 3824695.78 frames. ], batch size: 135, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:25:49,618 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:26:36,548 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+02 6.805e+02 9.241e+02 1.170e+03 2.581e+03, threshold=1.848e+03, percent-clipped=2.0 +2023-04-01 02:26:47,130 INFO [train.py:903] (2/4) Epoch 4, batch 1550, loss[loss=0.2504, simple_loss=0.3095, pruned_loss=0.09565, over 19769.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3602, pruned_loss=0.1271, over 3824868.52 frames. ], batch size: 47, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:01,644 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:12,261 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-01 02:27:19,357 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:30,496 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:32,824 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:49,226 INFO [train.py:903] (2/4) Epoch 4, batch 1600, loss[loss=0.2999, simple_loss=0.3622, pruned_loss=0.1188, over 19594.00 frames. ], tot_loss[loss=0.3088, simple_loss=0.3615, pruned_loss=0.128, over 3833702.96 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:50,775 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22086.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:55,243 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:10,564 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 02:28:37,647 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:39,601 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+02 8.565e+02 1.081e+03 1.346e+03 3.673e+03, threshold=2.162e+03, percent-clipped=6.0 +2023-04-01 02:28:48,844 INFO [train.py:903] (2/4) Epoch 4, batch 1650, loss[loss=0.2895, simple_loss=0.3463, pruned_loss=0.1164, over 19764.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3613, pruned_loss=0.1283, over 3826885.91 frames. ], batch size: 47, lr: 2.05e-02, grad_scale: 4.0 +2023-04-01 02:28:54,193 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 02:29:04,248 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22148.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:29:49,257 INFO [train.py:903] (2/4) Epoch 4, batch 1700, loss[loss=0.2697, simple_loss=0.3374, pruned_loss=0.101, over 19803.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3597, pruned_loss=0.127, over 3814214.37 frames. ], batch size: 56, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:00,434 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22194.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:23,325 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22212.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:27,870 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 02:30:40,332 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.920e+02 6.276e+02 7.753e+02 9.050e+02 1.909e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 02:30:48,108 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22233.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:49,925 INFO [train.py:903] (2/4) Epoch 4, batch 1750, loss[loss=0.3371, simple_loss=0.3897, pruned_loss=0.1423, over 19747.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3586, pruned_loss=0.126, over 3812892.24 frames. ], batch size: 63, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:57,712 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:15,626 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22255.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:28,796 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-01 02:31:40,428 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1729, 3.7725, 2.3091, 3.4079, 1.2163, 3.3791, 3.4254, 3.6371], + device='cuda:2'), covar=tensor([0.0778, 0.1239, 0.2191, 0.0746, 0.3791, 0.1061, 0.0762, 0.0999], + device='cuda:2'), in_proj_covar=tensor([0.0336, 0.0294, 0.0344, 0.0269, 0.0342, 0.0293, 0.0253, 0.0289], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:31:52,437 INFO [train.py:903] (2/4) Epoch 4, batch 1800, loss[loss=0.3008, simple_loss=0.3452, pruned_loss=0.1283, over 19314.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3591, pruned_loss=0.1268, over 3811115.58 frames. ], batch size: 44, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:32:43,155 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+02 7.313e+02 8.961e+02 1.128e+03 3.443e+03, threshold=1.792e+03, percent-clipped=8.0 +2023-04-01 02:32:43,404 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:44,419 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:46,333 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 02:32:48,649 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22332.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:52,005 INFO [train.py:903] (2/4) Epoch 4, batch 1850, loss[loss=0.3179, simple_loss=0.3781, pruned_loss=0.1289, over 19684.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.3596, pruned_loss=0.1275, over 3804688.34 frames. ], batch size: 58, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:33:04,838 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:13,895 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:24,827 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 02:33:36,226 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:51,846 INFO [train.py:903] (2/4) Epoch 4, batch 1900, loss[loss=0.3925, simple_loss=0.4306, pruned_loss=0.1772, over 19510.00 frames. ], tot_loss[loss=0.3076, simple_loss=0.3598, pruned_loss=0.1277, over 3798772.15 frames. ], batch size: 64, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:34:09,608 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 02:34:14,797 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 02:34:15,088 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:34:39,436 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 02:34:42,868 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.044e+02 7.549e+02 9.520e+02 1.192e+03 3.384e+03, threshold=1.904e+03, percent-clipped=5.0 +2023-04-01 02:34:51,950 INFO [train.py:903] (2/4) Epoch 4, batch 1950, loss[loss=0.3147, simple_loss=0.3733, pruned_loss=0.128, over 19597.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.3608, pruned_loss=0.128, over 3808049.97 frames. ], batch size: 57, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:35:08,597 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:35:53,492 INFO [train.py:903] (2/4) Epoch 4, batch 2000, loss[loss=0.2884, simple_loss=0.3455, pruned_loss=0.1156, over 19593.00 frames. ], tot_loss[loss=0.3079, simple_loss=0.3608, pruned_loss=0.1275, over 3798829.37 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:01,814 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22492.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:06,614 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:37,370 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:45,514 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+02 6.735e+02 8.799e+02 1.102e+03 2.294e+03, threshold=1.760e+03, percent-clipped=1.0 +2023-04-01 02:36:46,724 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 02:36:54,498 INFO [train.py:903] (2/4) Epoch 4, batch 2050, loss[loss=0.4273, simple_loss=0.4403, pruned_loss=0.2071, over 17190.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3602, pruned_loss=0.1267, over 3803420.87 frames. ], batch size: 101, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:58,151 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:04,878 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 02:37:06,035 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 02:37:27,448 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 02:37:45,364 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22577.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:52,338 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:54,202 INFO [train.py:903] (2/4) Epoch 4, batch 2100, loss[loss=0.3258, simple_loss=0.381, pruned_loss=0.1353, over 19664.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3591, pruned_loss=0.1261, over 3795170.19 frames. ], batch size: 60, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:38:10,685 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:20,857 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2714, 1.1732, 1.8366, 1.4050, 2.5041, 2.2079, 2.7281, 0.9803], + device='cuda:2'), covar=tensor([0.1640, 0.2935, 0.1408, 0.1426, 0.1124, 0.1251, 0.1219, 0.2573], + device='cuda:2'), in_proj_covar=tensor([0.0429, 0.0483, 0.0449, 0.0402, 0.0522, 0.0417, 0.0606, 0.0430], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 02:38:21,632 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 02:38:21,982 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:23,343 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:36,192 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 02:38:42,463 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 02:38:44,807 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.772e+02 6.954e+02 8.861e+02 1.126e+03 2.028e+03, threshold=1.772e+03, percent-clipped=3.0 +2023-04-01 02:38:53,762 INFO [train.py:903] (2/4) Epoch 4, batch 2150, loss[loss=0.3148, simple_loss=0.3677, pruned_loss=0.1309, over 19508.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3588, pruned_loss=0.126, over 3809361.95 frames. ], batch size: 64, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:39:17,776 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22653.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:22,220 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22657.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:42,832 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-04-01 02:39:56,340 INFO [train.py:903] (2/4) Epoch 4, batch 2200, loss[loss=0.3506, simple_loss=0.3897, pruned_loss=0.1558, over 19754.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3596, pruned_loss=0.1262, over 3821667.56 frames. ], batch size: 54, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:40:05,447 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:18,029 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:19,021 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9494, 4.3061, 4.5909, 4.5542, 1.4298, 4.1458, 3.8817, 4.1127], + device='cuda:2'), covar=tensor([0.0784, 0.0491, 0.0445, 0.0358, 0.3633, 0.0253, 0.0420, 0.0882], + device='cuda:2'), in_proj_covar=tensor([0.0428, 0.0385, 0.0516, 0.0403, 0.0518, 0.0287, 0.0345, 0.0485], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 02:40:30,195 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:47,455 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+02 6.866e+02 8.417e+02 1.098e+03 2.160e+03, threshold=1.683e+03, percent-clipped=4.0 +2023-04-01 02:40:49,072 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:56,692 INFO [train.py:903] (2/4) Epoch 4, batch 2250, loss[loss=0.2872, simple_loss=0.353, pruned_loss=0.1107, over 19667.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3606, pruned_loss=0.1265, over 3820103.38 frames. ], batch size: 58, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:41:10,276 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:41:11,695 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9330, 1.9230, 1.7564, 2.7140, 1.8715, 2.4900, 2.4229, 1.8144], + device='cuda:2'), covar=tensor([0.1439, 0.1088, 0.0678, 0.0700, 0.1298, 0.0475, 0.1176, 0.1045], + device='cuda:2'), in_proj_covar=tensor([0.0541, 0.0521, 0.0497, 0.0689, 0.0585, 0.0431, 0.0603, 0.0502], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 02:41:57,018 INFO [train.py:903] (2/4) Epoch 4, batch 2300, loss[loss=0.298, simple_loss=0.3414, pruned_loss=0.1273, over 19780.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3603, pruned_loss=0.1264, over 3828308.31 frames. ], batch size: 48, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:42:06,242 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2755, 2.1181, 1.5756, 1.5461, 1.4435, 1.7359, 0.2867, 0.9587], + device='cuda:2'), covar=tensor([0.0222, 0.0222, 0.0172, 0.0254, 0.0449, 0.0283, 0.0443, 0.0386], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0258, 0.0259, 0.0282, 0.0337, 0.0270, 0.0261, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:42:09,279 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 02:42:31,190 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22812.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:42:47,874 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+02 7.223e+02 9.387e+02 1.200e+03 1.860e+03, threshold=1.877e+03, percent-clipped=8.0 +2023-04-01 02:42:56,887 INFO [train.py:903] (2/4) Epoch 4, batch 2350, loss[loss=0.3137, simple_loss=0.372, pruned_loss=0.1277, over 19762.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3602, pruned_loss=0.1263, over 3823450.22 frames. ], batch size: 63, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:43:06,752 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:31,022 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:32,308 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:38,746 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 02:43:54,375 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 02:43:57,673 INFO [train.py:903] (2/4) Epoch 4, batch 2400, loss[loss=0.3884, simple_loss=0.3945, pruned_loss=0.1911, over 19763.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.359, pruned_loss=0.1254, over 3833002.48 frames. ], batch size: 47, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:44:03,318 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:09,585 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.05 vs. limit=5.0 +2023-04-01 02:44:27,652 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:49,611 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+02 7.425e+02 9.466e+02 1.182e+03 3.064e+03, threshold=1.893e+03, percent-clipped=2.0 +2023-04-01 02:44:58,899 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:59,532 INFO [train.py:903] (2/4) Epoch 4, batch 2450, loss[loss=0.3438, simple_loss=0.3882, pruned_loss=0.1497, over 19695.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.359, pruned_loss=0.1253, over 3825433.43 frames. ], batch size: 59, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:45:14,079 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:40,936 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:44,388 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:57,753 INFO [train.py:903] (2/4) Epoch 4, batch 2500, loss[loss=0.3193, simple_loss=0.3745, pruned_loss=0.1321, over 19589.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3575, pruned_loss=0.1245, over 3828210.76 frames. ], batch size: 61, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:46:09,388 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22995.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:16,269 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:48,542 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 7.308e+02 8.619e+02 1.083e+03 2.930e+03, threshold=1.724e+03, percent-clipped=3.0 +2023-04-01 02:46:57,730 INFO [train.py:903] (2/4) Epoch 4, batch 2550, loss[loss=0.27, simple_loss=0.3209, pruned_loss=0.1095, over 19793.00 frames. ], tot_loss[loss=0.3024, simple_loss=0.3568, pruned_loss=0.124, over 3836879.43 frames. ], batch size: 48, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:47:49,716 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 02:47:57,389 INFO [train.py:903] (2/4) Epoch 4, batch 2600, loss[loss=0.318, simple_loss=0.3693, pruned_loss=0.1334, over 19690.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3585, pruned_loss=0.1249, over 3825968.86 frames. ], batch size: 53, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:48:33,825 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23116.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:36,226 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:46,653 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:47,920 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.28 vs. limit=5.0 +2023-04-01 02:48:48,300 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+02 6.946e+02 8.555e+02 1.074e+03 2.756e+03, threshold=1.711e+03, percent-clipped=6.0 +2023-04-01 02:48:58,592 INFO [train.py:903] (2/4) Epoch 4, batch 2650, loss[loss=0.3443, simple_loss=0.3889, pruned_loss=0.1499, over 19607.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.3594, pruned_loss=0.1259, over 3835033.05 frames. ], batch size: 57, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:49:08,308 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:17,083 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 02:49:22,936 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:58,047 INFO [train.py:903] (2/4) Epoch 4, batch 2700, loss[loss=0.2952, simple_loss=0.3592, pruned_loss=0.1156, over 19485.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.3589, pruned_loss=0.1255, over 3835407.22 frames. ], batch size: 64, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:00,484 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23187.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:50:48,010 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.628e+02 7.002e+02 8.947e+02 1.091e+03 2.361e+03, threshold=1.789e+03, percent-clipped=7.0 +2023-04-01 02:50:57,137 INFO [train.py:903] (2/4) Epoch 4, batch 2750, loss[loss=0.22, simple_loss=0.2864, pruned_loss=0.07675, over 19715.00 frames. ], tot_loss[loss=0.3041, simple_loss=0.358, pruned_loss=0.1251, over 3839014.17 frames. ], batch size: 46, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:57,426 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:11,591 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:41,031 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:54,561 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4817, 1.2927, 1.6918, 1.2950, 2.8268, 3.4517, 3.3346, 3.5925], + device='cuda:2'), covar=tensor([0.1272, 0.2637, 0.2697, 0.1885, 0.0405, 0.0122, 0.0199, 0.0111], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0279, 0.0322, 0.0263, 0.0192, 0.0107, 0.0202, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 02:51:56,437 INFO [train.py:903] (2/4) Epoch 4, batch 2800, loss[loss=0.2574, simple_loss=0.3203, pruned_loss=0.09724, over 19599.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3583, pruned_loss=0.1254, over 3823646.35 frames. ], batch size: 50, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:52:17,020 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:52:45,201 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+02 7.909e+02 1.044e+03 1.347e+03 2.323e+03, threshold=2.087e+03, percent-clipped=7.0 +2023-04-01 02:52:56,804 INFO [train.py:903] (2/4) Epoch 4, batch 2850, loss[loss=0.3447, simple_loss=0.3827, pruned_loss=0.1533, over 19689.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3588, pruned_loss=0.1259, over 3832296.92 frames. ], batch size: 60, lr: 1.99e-02, grad_scale: 8.0 +2023-04-01 02:53:03,164 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 02:53:41,846 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23372.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:53:44,370 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-01 02:53:56,278 INFO [train.py:903] (2/4) Epoch 4, batch 2900, loss[loss=0.3412, simple_loss=0.3934, pruned_loss=0.1445, over 19665.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3589, pruned_loss=0.1258, over 3824520.50 frames. ], batch size: 58, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:53:56,295 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 02:54:10,063 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:54:45,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 7.852e+02 1.023e+03 1.284e+03 2.319e+03, threshold=2.047e+03, percent-clipped=2.0 +2023-04-01 02:54:53,632 INFO [train.py:903] (2/4) Epoch 4, batch 2950, loss[loss=0.2656, simple_loss=0.3232, pruned_loss=0.104, over 19785.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3582, pruned_loss=0.1253, over 3823220.48 frames. ], batch size: 48, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:20,533 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3603, 1.0940, 1.3168, 1.3153, 2.0837, 1.0513, 1.7586, 2.0069], + device='cuda:2'), covar=tensor([0.0552, 0.2414, 0.2129, 0.1260, 0.0673, 0.1775, 0.1023, 0.0648], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0304, 0.0304, 0.0276, 0.0297, 0.0319, 0.0280, 0.0286], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 02:55:35,104 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:55:45,353 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2068, 3.7913, 2.4251, 3.4587, 1.0182, 3.4615, 3.3790, 3.6214], + device='cuda:2'), covar=tensor([0.0715, 0.1122, 0.1841, 0.0799, 0.4006, 0.1032, 0.0802, 0.0847], + device='cuda:2'), in_proj_covar=tensor([0.0338, 0.0301, 0.0350, 0.0277, 0.0347, 0.0302, 0.0255, 0.0294], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:55:52,224 INFO [train.py:903] (2/4) Epoch 4, batch 3000, loss[loss=0.2934, simple_loss=0.3558, pruned_loss=0.1155, over 19684.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3562, pruned_loss=0.1234, over 3834293.27 frames. ], batch size: 59, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:52,224 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 02:56:05,151 INFO [train.py:937] (2/4) Epoch 4, validation: loss=0.2145, simple_loss=0.3118, pruned_loss=0.05862, over 944034.00 frames. +2023-04-01 02:56:05,153 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18297MB +2023-04-01 02:56:09,792 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 02:56:32,363 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,220 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23527.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,881 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+02 6.234e+02 7.977e+02 1.046e+03 2.333e+03, threshold=1.595e+03, percent-clipped=2.0 +2023-04-01 02:57:05,055 INFO [train.py:903] (2/4) Epoch 4, batch 3050, loss[loss=0.2971, simple_loss=0.3568, pruned_loss=0.1186, over 19363.00 frames. ], tot_loss[loss=0.3017, simple_loss=0.3561, pruned_loss=0.1237, over 3827671.17 frames. ], batch size: 70, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:57:26,972 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23552.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:33,689 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:58,297 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:04,165 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:06,131 INFO [train.py:903] (2/4) Epoch 4, batch 3100, loss[loss=0.3042, simple_loss=0.3676, pruned_loss=0.1204, over 19630.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3557, pruned_loss=0.1234, over 3832279.19 frames. ], batch size: 61, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:58:06,522 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:13,281 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:55,942 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.915e+02 8.546e+02 1.092e+03 2.878e+03, threshold=1.709e+03, percent-clipped=7.0 +2023-04-01 02:59:01,868 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0391, 5.3263, 2.9088, 4.8253, 1.2124, 5.2844, 5.2340, 5.4314], + device='cuda:2'), covar=tensor([0.0439, 0.0975, 0.1810, 0.0584, 0.4021, 0.0667, 0.0541, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0337, 0.0297, 0.0346, 0.0276, 0.0344, 0.0294, 0.0257, 0.0291], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 02:59:03,926 INFO [train.py:903] (2/4) Epoch 4, batch 3150, loss[loss=0.3273, simple_loss=0.383, pruned_loss=0.1358, over 19757.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3568, pruned_loss=0.1243, over 3817487.85 frames. ], batch size: 54, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:59:28,006 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 03:00:02,534 INFO [train.py:903] (2/4) Epoch 4, batch 3200, loss[loss=0.2341, simple_loss=0.2958, pruned_loss=0.0862, over 18607.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3581, pruned_loss=0.1249, over 3821845.82 frames. ], batch size: 41, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:00:13,199 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23694.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:29,423 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:53,667 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 7.379e+02 9.197e+02 1.143e+03 1.957e+03, threshold=1.839e+03, percent-clipped=5.0 +2023-04-01 03:01:02,114 INFO [train.py:903] (2/4) Epoch 4, batch 3250, loss[loss=0.2776, simple_loss=0.3452, pruned_loss=0.105, over 19665.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3589, pruned_loss=0.1256, over 3807488.75 frames. ], batch size: 55, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:01,892 INFO [train.py:903] (2/4) Epoch 4, batch 3300, loss[loss=0.3621, simple_loss=0.4017, pruned_loss=0.1612, over 19671.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3597, pruned_loss=0.1262, over 3808967.20 frames. ], batch size: 58, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:04,057 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 03:02:54,096 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 7.772e+02 9.614e+02 1.210e+03 2.492e+03, threshold=1.923e+03, percent-clipped=5.0 +2023-04-01 03:03:02,111 INFO [train.py:903] (2/4) Epoch 4, batch 3350, loss[loss=0.2669, simple_loss=0.3422, pruned_loss=0.09576, over 19518.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.36, pruned_loss=0.1262, over 3812780.59 frames. ], batch size: 54, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:03:09,268 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:15,259 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 03:03:19,205 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:40,453 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:04:01,844 INFO [train.py:903] (2/4) Epoch 4, batch 3400, loss[loss=0.3666, simple_loss=0.4025, pruned_loss=0.1654, over 19635.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3588, pruned_loss=0.1253, over 3808813.61 frames. ], batch size: 61, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:04:53,667 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.081e+02 7.350e+02 9.318e+02 1.202e+03 2.145e+03, threshold=1.864e+03, percent-clipped=3.0 +2023-04-01 03:05:01,728 INFO [train.py:903] (2/4) Epoch 4, batch 3450, loss[loss=0.298, simple_loss=0.3564, pruned_loss=0.1198, over 19487.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3597, pruned_loss=0.1259, over 3808288.08 frames. ], batch size: 64, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:05:01,752 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 03:05:22,649 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:36,138 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:37,580 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 03:05:39,406 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:50,617 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:04,987 INFO [train.py:903] (2/4) Epoch 4, batch 3500, loss[loss=0.2795, simple_loss=0.3474, pruned_loss=0.1058, over 19694.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3594, pruned_loss=0.126, over 3810405.98 frames. ], batch size: 59, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:06:07,778 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:58,143 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+02 7.196e+02 8.612e+02 1.120e+03 2.630e+03, threshold=1.722e+03, percent-clipped=5.0 +2023-04-01 03:07:06,277 INFO [train.py:903] (2/4) Epoch 4, batch 3550, loss[loss=0.2842, simple_loss=0.3502, pruned_loss=0.1091, over 19530.00 frames. ], tot_loss[loss=0.3041, simple_loss=0.3587, pruned_loss=0.1248, over 3803881.54 frames. ], batch size: 54, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:07:28,440 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:07:45,451 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0754, 3.7362, 2.5304, 3.3635, 1.4660, 3.4883, 3.3466, 3.5135], + device='cuda:2'), covar=tensor([0.0709, 0.1037, 0.1804, 0.0735, 0.3229, 0.0942, 0.0816, 0.0990], + device='cuda:2'), in_proj_covar=tensor([0.0333, 0.0297, 0.0345, 0.0280, 0.0350, 0.0299, 0.0263, 0.0295], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 03:07:57,081 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-01 03:08:05,267 INFO [train.py:903] (2/4) Epoch 4, batch 3600, loss[loss=0.3179, simple_loss=0.383, pruned_loss=0.1264, over 19595.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3592, pruned_loss=0.1256, over 3804281.33 frames. ], batch size: 61, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:08:56,907 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+02 7.148e+02 8.733e+02 1.077e+03 2.339e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 03:09:04,842 INFO [train.py:903] (2/4) Epoch 4, batch 3650, loss[loss=0.25, simple_loss=0.3238, pruned_loss=0.0881, over 19577.00 frames. ], tot_loss[loss=0.3044, simple_loss=0.3585, pruned_loss=0.1251, over 3812515.52 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:09:34,971 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0037, 1.9797, 1.9393, 2.8190, 1.9874, 2.6884, 2.5745, 1.8797], + device='cuda:2'), covar=tensor([0.1407, 0.1070, 0.0636, 0.0606, 0.1178, 0.0407, 0.1075, 0.1016], + device='cuda:2'), in_proj_covar=tensor([0.0563, 0.0541, 0.0512, 0.0707, 0.0601, 0.0456, 0.0611, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:10:05,498 INFO [train.py:903] (2/4) Epoch 4, batch 3700, loss[loss=0.3264, simple_loss=0.3767, pruned_loss=0.138, over 19606.00 frames. ], tot_loss[loss=0.305, simple_loss=0.359, pruned_loss=0.1256, over 3816900.77 frames. ], batch size: 57, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:10:16,587 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1848, 1.1963, 1.7863, 1.2961, 2.4405, 2.0435, 2.5415, 0.8542], + device='cuda:2'), covar=tensor([0.1553, 0.2546, 0.1274, 0.1337, 0.0900, 0.1206, 0.1030, 0.2348], + device='cuda:2'), in_proj_covar=tensor([0.0434, 0.0481, 0.0451, 0.0398, 0.0525, 0.0423, 0.0602, 0.0434], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:10:37,976 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5063, 1.5557, 1.5010, 2.0690, 1.4119, 1.7450, 1.8565, 1.4599], + device='cuda:2'), covar=tensor([0.1274, 0.0960, 0.0630, 0.0515, 0.1043, 0.0452, 0.1045, 0.0985], + device='cuda:2'), in_proj_covar=tensor([0.0567, 0.0536, 0.0512, 0.0700, 0.0601, 0.0457, 0.0610, 0.0521], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:10:48,453 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:10:58,949 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.207e+02 9.640e+02 1.134e+03 2.323e+03, threshold=1.928e+03, percent-clipped=6.0 +2023-04-01 03:11:07,278 INFO [train.py:903] (2/4) Epoch 4, batch 3750, loss[loss=0.288, simple_loss=0.3432, pruned_loss=0.1164, over 19582.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3579, pruned_loss=0.1244, over 3825903.81 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:11:20,299 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:12:07,329 INFO [train.py:903] (2/4) Epoch 4, batch 3800, loss[loss=0.2926, simple_loss=0.3556, pruned_loss=0.1148, over 19655.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.357, pruned_loss=0.1234, over 3829803.19 frames. ], batch size: 55, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:12:38,499 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 03:12:53,008 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-01 03:13:00,197 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.239e+02 7.507e+02 9.076e+02 1.248e+03 3.254e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-04-01 03:13:07,201 INFO [train.py:903] (2/4) Epoch 4, batch 3850, loss[loss=0.3059, simple_loss=0.3675, pruned_loss=0.1221, over 19673.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3576, pruned_loss=0.1239, over 3829363.48 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:13:38,834 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6674, 1.4551, 1.4372, 1.8682, 1.7584, 1.6972, 1.4780, 1.7313], + device='cuda:2'), covar=tensor([0.0853, 0.1422, 0.1262, 0.0755, 0.0937, 0.0456, 0.0921, 0.0624], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0373, 0.0282, 0.0249, 0.0309, 0.0254, 0.0272, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:14:06,612 INFO [train.py:903] (2/4) Epoch 4, batch 3900, loss[loss=0.2862, simple_loss=0.3456, pruned_loss=0.1134, over 19658.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3568, pruned_loss=0.1234, over 3831294.93 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:14:11,569 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-04-01 03:14:25,895 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:14:27,392 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2613, 1.2476, 1.8506, 1.3762, 2.2945, 2.0699, 2.5548, 0.8474], + device='cuda:2'), covar=tensor([0.1600, 0.2702, 0.1290, 0.1443, 0.1092, 0.1361, 0.1197, 0.2522], + device='cuda:2'), in_proj_covar=tensor([0.0439, 0.0487, 0.0460, 0.0407, 0.0531, 0.0428, 0.0617, 0.0437], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:15:00,806 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+02 8.238e+02 9.729e+02 1.230e+03 4.971e+03, threshold=1.946e+03, percent-clipped=9.0 +2023-04-01 03:15:09,427 INFO [train.py:903] (2/4) Epoch 4, batch 3950, loss[loss=0.3473, simple_loss=0.3982, pruned_loss=0.1482, over 19299.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3561, pruned_loss=0.1231, over 3838526.68 frames. ], batch size: 66, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:15:17,178 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 03:16:10,711 INFO [train.py:903] (2/4) Epoch 4, batch 4000, loss[loss=0.2944, simple_loss=0.3599, pruned_loss=0.1145, over 19681.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3566, pruned_loss=0.1233, over 3831741.61 frames. ], batch size: 59, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:16:45,801 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24514.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:16:58,877 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 03:17:03,421 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.288e+02 6.850e+02 8.525e+02 1.041e+03 2.187e+03, threshold=1.705e+03, percent-clipped=1.0 +2023-04-01 03:17:08,325 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.7208, 0.8822, 0.6982, 0.7160, 0.8576, 0.5999, 0.4745, 0.8823], + device='cuda:2'), covar=tensor([0.0323, 0.0374, 0.0606, 0.0303, 0.0280, 0.0700, 0.0451, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0249, 0.0312, 0.0241, 0.0217, 0.0308, 0.0278, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:17:09,921 INFO [train.py:903] (2/4) Epoch 4, batch 4050, loss[loss=0.3509, simple_loss=0.3871, pruned_loss=0.1574, over 19693.00 frames. ], tot_loss[loss=0.304, simple_loss=0.358, pruned_loss=0.125, over 3807531.85 frames. ], batch size: 53, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:17:47,520 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24565.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:17:47,594 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7695, 1.4728, 1.7195, 1.6950, 3.1236, 4.3951, 4.5131, 4.8596], + device='cuda:2'), covar=tensor([0.1199, 0.2586, 0.2682, 0.1629, 0.0382, 0.0125, 0.0132, 0.0054], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0283, 0.0325, 0.0260, 0.0192, 0.0110, 0.0205, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 03:18:10,043 INFO [train.py:903] (2/4) Epoch 4, batch 4100, loss[loss=0.2625, simple_loss=0.34, pruned_loss=0.09254, over 19310.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.3555, pruned_loss=0.1232, over 3812710.53 frames. ], batch size: 66, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:18:12,599 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:18:49,627 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 03:19:04,395 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+02 6.813e+02 8.989e+02 1.047e+03 2.179e+03, threshold=1.798e+03, percent-clipped=2.0 +2023-04-01 03:19:12,696 INFO [train.py:903] (2/4) Epoch 4, batch 4150, loss[loss=0.3179, simple_loss=0.3707, pruned_loss=0.1326, over 17683.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3562, pruned_loss=0.1237, over 3793670.06 frames. ], batch size: 101, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:13,432 INFO [train.py:903] (2/4) Epoch 4, batch 4200, loss[loss=0.321, simple_loss=0.3784, pruned_loss=0.1318, over 19309.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3555, pruned_loss=0.123, over 3779628.01 frames. ], batch size: 66, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:19,911 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 03:20:33,568 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2715, 1.0277, 1.0706, 1.5051, 1.1346, 1.2721, 1.3261, 1.2594], + device='cuda:2'), covar=tensor([0.0878, 0.1241, 0.1200, 0.0794, 0.0964, 0.0906, 0.0889, 0.0797], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0269, 0.0260, 0.0298, 0.0299, 0.0247, 0.0255, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 03:21:05,877 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 7.220e+02 8.850e+02 1.090e+03 2.101e+03, threshold=1.770e+03, percent-clipped=3.0 +2023-04-01 03:21:12,814 INFO [train.py:903] (2/4) Epoch 4, batch 4250, loss[loss=0.2342, simple_loss=0.2964, pruned_loss=0.086, over 19036.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3562, pruned_loss=0.1239, over 3784215.16 frames. ], batch size: 42, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:21:29,766 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 03:21:41,561 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 03:21:56,456 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:10,833 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24783.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:12,642 INFO [train.py:903] (2/4) Epoch 4, batch 4300, loss[loss=0.3518, simple_loss=0.3898, pruned_loss=0.1569, over 13079.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3555, pruned_loss=0.1236, over 3785267.26 frames. ], batch size: 136, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:22:26,981 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:23:06,670 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.009e+02 7.093e+02 8.869e+02 1.163e+03 2.104e+03, threshold=1.774e+03, percent-clipped=1.0 +2023-04-01 03:23:08,964 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 03:23:14,545 INFO [train.py:903] (2/4) Epoch 4, batch 4350, loss[loss=0.3281, simple_loss=0.383, pruned_loss=0.1366, over 19672.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3553, pruned_loss=0.1231, over 3800126.58 frames. ], batch size: 55, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:23:23,220 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2115, 3.6437, 3.8202, 3.7264, 1.2116, 3.3040, 3.0365, 3.4617], + device='cuda:2'), covar=tensor([0.0837, 0.0551, 0.0532, 0.0459, 0.3671, 0.0436, 0.0532, 0.1022], + device='cuda:2'), in_proj_covar=tensor([0.0455, 0.0395, 0.0537, 0.0427, 0.0530, 0.0309, 0.0348, 0.0513], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 03:23:52,173 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:05,240 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0378, 1.1265, 1.2894, 0.5817, 2.3853, 2.3976, 2.1154, 2.5385], + device='cuda:2'), covar=tensor([0.1302, 0.2780, 0.2904, 0.1998, 0.0339, 0.0159, 0.0352, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0282, 0.0323, 0.0259, 0.0195, 0.0112, 0.0208, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 03:24:15,082 INFO [train.py:903] (2/4) Epoch 4, batch 4400, loss[loss=0.2546, simple_loss=0.3176, pruned_loss=0.09577, over 19765.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3551, pruned_loss=0.1228, over 3801308.40 frames. ], batch size: 48, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:24:40,899 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 03:24:44,304 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:50,767 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 03:24:53,145 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9447, 1.1424, 1.3189, 1.5642, 2.6163, 1.3480, 1.8791, 2.5383], + device='cuda:2'), covar=tensor([0.0427, 0.2603, 0.2321, 0.1394, 0.0535, 0.1899, 0.1113, 0.0503], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0316, 0.0310, 0.0285, 0.0304, 0.0322, 0.0288, 0.0294], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:25:09,649 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.331e+02 8.078e+02 9.889e+02 1.280e+03 3.768e+03, threshold=1.978e+03, percent-clipped=10.0 +2023-04-01 03:25:12,027 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:25:16,499 INFO [train.py:903] (2/4) Epoch 4, batch 4450, loss[loss=0.2921, simple_loss=0.3586, pruned_loss=0.1128, over 19663.00 frames. ], tot_loss[loss=0.3017, simple_loss=0.3559, pruned_loss=0.1237, over 3804927.19 frames. ], batch size: 55, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:17,088 INFO [train.py:903] (2/4) Epoch 4, batch 4500, loss[loss=0.224, simple_loss=0.2921, pruned_loss=0.07794, over 19472.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3546, pruned_loss=0.1226, over 3806266.05 frames. ], batch size: 49, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:52,434 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:04,617 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:11,028 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.968e+02 6.473e+02 7.865e+02 1.057e+03 2.211e+03, threshold=1.573e+03, percent-clipped=1.0 +2023-04-01 03:27:18,546 INFO [train.py:903] (2/4) Epoch 4, batch 4550, loss[loss=0.283, simple_loss=0.352, pruned_loss=0.1069, over 19654.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3531, pruned_loss=0.1214, over 3816201.48 frames. ], batch size: 55, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:27:27,111 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 03:27:31,924 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:50,579 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 03:27:54,641 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 03:28:18,977 INFO [train.py:903] (2/4) Epoch 4, batch 4600, loss[loss=0.3015, simple_loss=0.3627, pruned_loss=0.1202, over 19767.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.355, pruned_loss=0.1226, over 3801370.36 frames. ], batch size: 56, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:29:10,648 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:29:12,893 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+02 7.418e+02 9.211e+02 1.176e+03 2.853e+03, threshold=1.842e+03, percent-clipped=7.0 +2023-04-01 03:29:20,335 INFO [train.py:903] (2/4) Epoch 4, batch 4650, loss[loss=0.3373, simple_loss=0.3815, pruned_loss=0.1466, over 19669.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.354, pruned_loss=0.1214, over 3805360.10 frames. ], batch size: 53, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:29:37,196 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 03:29:46,706 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 03:30:19,352 INFO [train.py:903] (2/4) Epoch 4, batch 4700, loss[loss=0.3221, simple_loss=0.3698, pruned_loss=0.1372, over 19782.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.356, pruned_loss=0.1229, over 3793748.99 frames. ], batch size: 56, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:30:42,838 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 03:30:50,629 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:13,771 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+02 7.587e+02 9.394e+02 1.259e+03 3.233e+03, threshold=1.879e+03, percent-clipped=11.0 +2023-04-01 03:31:21,430 INFO [train.py:903] (2/4) Epoch 4, batch 4750, loss[loss=0.3407, simple_loss=0.3801, pruned_loss=0.1506, over 13666.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3559, pruned_loss=0.1232, over 3792317.31 frames. ], batch size: 136, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:31:30,706 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25242.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:39,442 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9496, 1.3771, 1.5008, 2.0899, 1.8142, 1.7721, 1.7217, 1.8190], + device='cuda:2'), covar=tensor([0.0737, 0.1564, 0.1153, 0.0715, 0.0995, 0.0459, 0.0803, 0.0571], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0376, 0.0284, 0.0250, 0.0312, 0.0262, 0.0275, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:31:44,151 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 03:32:16,232 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:21,434 INFO [train.py:903] (2/4) Epoch 4, batch 4800, loss[loss=0.2843, simple_loss=0.3451, pruned_loss=0.1118, over 19758.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3584, pruned_loss=0.1254, over 3782889.73 frames. ], batch size: 54, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:32:27,818 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 03:32:41,284 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:44,317 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:00,525 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2000, 3.5031, 3.6615, 3.6366, 1.5904, 3.3657, 3.0727, 3.2932], + device='cuda:2'), covar=tensor([0.0902, 0.0819, 0.0609, 0.0502, 0.3282, 0.0451, 0.0473, 0.1120], + device='cuda:2'), in_proj_covar=tensor([0.0457, 0.0404, 0.0537, 0.0436, 0.0529, 0.0310, 0.0347, 0.0505], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 03:33:09,560 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:11,873 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:13,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+02 7.095e+02 8.715e+02 1.261e+03 2.828e+03, threshold=1.743e+03, percent-clipped=4.0 +2023-04-01 03:33:21,559 INFO [train.py:903] (2/4) Epoch 4, batch 4850, loss[loss=0.299, simple_loss=0.3517, pruned_loss=0.1232, over 19748.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.3569, pruned_loss=0.1245, over 3780649.50 frames. ], batch size: 51, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:33:45,828 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 03:33:48,282 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:34:04,847 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 03:34:11,330 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 03:34:12,502 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 03:34:21,576 INFO [train.py:903] (2/4) Epoch 4, batch 4900, loss[loss=0.3055, simple_loss=0.3609, pruned_loss=0.125, over 19577.00 frames. ], tot_loss[loss=0.3024, simple_loss=0.3566, pruned_loss=0.1241, over 3791284.13 frames. ], batch size: 61, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:34:21,595 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 03:34:34,886 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6275, 1.3151, 2.0261, 1.4643, 2.9609, 4.6267, 4.6627, 5.0821], + device='cuda:2'), covar=tensor([0.1237, 0.2566, 0.2360, 0.1682, 0.0398, 0.0093, 0.0110, 0.0051], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0278, 0.0317, 0.0257, 0.0195, 0.0113, 0.0201, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 03:34:41,698 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 03:35:15,146 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2686, 3.9169, 2.5274, 3.6292, 1.1758, 3.6242, 3.5343, 3.7308], + device='cuda:2'), covar=tensor([0.0587, 0.0970, 0.1719, 0.0581, 0.3517, 0.0808, 0.0686, 0.0798], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0288, 0.0348, 0.0272, 0.0343, 0.0293, 0.0263, 0.0295], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 03:35:16,119 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.171e+02 7.094e+02 8.660e+02 1.069e+03 1.655e+03, threshold=1.732e+03, percent-clipped=0.0 +2023-04-01 03:35:23,612 INFO [train.py:903] (2/4) Epoch 4, batch 4950, loss[loss=0.275, simple_loss=0.347, pruned_loss=0.1015, over 19519.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3553, pruned_loss=0.123, over 3807102.67 frames. ], batch size: 54, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:35:32,098 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1750, 1.2222, 1.5028, 1.2690, 1.8058, 1.7636, 1.8993, 0.5512], + device='cuda:2'), covar=tensor([0.1599, 0.2423, 0.1273, 0.1341, 0.0931, 0.1360, 0.0987, 0.2255], + device='cuda:2'), in_proj_covar=tensor([0.0445, 0.0494, 0.0458, 0.0407, 0.0531, 0.0435, 0.0614, 0.0430], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:35:37,175 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25446.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:35:41,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 03:36:04,457 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 03:36:08,235 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:19,306 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6064, 1.5811, 1.5885, 2.1696, 1.4276, 1.7303, 2.0048, 1.5297], + device='cuda:2'), covar=tensor([0.1312, 0.1090, 0.0700, 0.0595, 0.1157, 0.0523, 0.1223, 0.1103], + device='cuda:2'), in_proj_covar=tensor([0.0566, 0.0551, 0.0519, 0.0714, 0.0613, 0.0471, 0.0622, 0.0529], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:36:24,198 INFO [train.py:903] (2/4) Epoch 4, batch 5000, loss[loss=0.2259, simple_loss=0.2865, pruned_loss=0.08261, over 18648.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3552, pruned_loss=0.1226, over 3806552.49 frames. ], batch size: 41, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:36:33,154 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 03:36:40,182 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:44,427 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 03:37:11,018 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25523.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:37:17,569 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.823e+02 8.824e+02 1.078e+03 2.588e+03, threshold=1.765e+03, percent-clipped=9.0 +2023-04-01 03:37:24,352 INFO [train.py:903] (2/4) Epoch 4, batch 5050, loss[loss=0.2935, simple_loss=0.3514, pruned_loss=0.1178, over 19536.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3538, pruned_loss=0.1219, over 3815065.57 frames. ], batch size: 54, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:02,132 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 03:38:21,768 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:38:25,841 INFO [train.py:903] (2/4) Epoch 4, batch 5100, loss[loss=0.2766, simple_loss=0.33, pruned_loss=0.1116, over 19399.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3534, pruned_loss=0.121, over 3815016.12 frames. ], batch size: 48, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:36,807 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 03:38:40,930 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 03:38:44,277 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 03:38:52,406 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:39:19,440 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.903e+02 6.354e+02 8.688e+02 1.168e+03 2.387e+03, threshold=1.738e+03, percent-clipped=4.0 +2023-04-01 03:39:27,225 INFO [train.py:903] (2/4) Epoch 4, batch 5150, loss[loss=0.276, simple_loss=0.3413, pruned_loss=0.1053, over 19512.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3545, pruned_loss=0.1216, over 3796555.50 frames. ], batch size: 56, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:39:39,302 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 03:40:12,892 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 03:40:28,120 INFO [train.py:903] (2/4) Epoch 4, batch 5200, loss[loss=0.3034, simple_loss=0.3674, pruned_loss=0.1197, over 19644.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3546, pruned_loss=0.1216, over 3806980.48 frames. ], batch size: 58, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:40:42,797 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 03:41:21,061 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:41:21,774 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 7.304e+02 9.145e+02 1.165e+03 2.884e+03, threshold=1.829e+03, percent-clipped=6.0 +2023-04-01 03:41:25,407 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 03:41:28,713 INFO [train.py:903] (2/4) Epoch 4, batch 5250, loss[loss=0.3633, simple_loss=0.4017, pruned_loss=0.1624, over 19756.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3528, pruned_loss=0.1209, over 3796232.00 frames. ], batch size: 54, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:41:51,432 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:41:57,311 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-01 03:42:30,029 INFO [train.py:903] (2/4) Epoch 4, batch 5300, loss[loss=0.288, simple_loss=0.3366, pruned_loss=0.1197, over 19089.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3533, pruned_loss=0.1212, over 3799463.09 frames. ], batch size: 42, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:42:36,784 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25790.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:42:48,057 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8681, 1.1498, 1.3262, 1.4609, 2.4876, 1.0085, 1.8574, 2.5805], + device='cuda:2'), covar=tensor([0.0502, 0.2532, 0.2482, 0.1464, 0.0682, 0.2130, 0.1058, 0.0481], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0310, 0.0306, 0.0281, 0.0303, 0.0314, 0.0283, 0.0291], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:42:48,920 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 03:43:23,330 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 7.857e+02 9.756e+02 1.201e+03 3.803e+03, threshold=1.951e+03, percent-clipped=8.0 +2023-04-01 03:43:31,869 INFO [train.py:903] (2/4) Epoch 4, batch 5350, loss[loss=0.4409, simple_loss=0.4412, pruned_loss=0.2203, over 13402.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.3555, pruned_loss=0.1227, over 3813493.53 frames. ], batch size: 137, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:43:44,654 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-01 03:43:48,748 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6882, 3.0681, 3.1248, 3.1154, 1.1049, 2.8441, 2.6133, 2.7165], + device='cuda:2'), covar=tensor([0.1100, 0.0745, 0.0709, 0.0631, 0.3749, 0.0560, 0.0642, 0.1319], + device='cuda:2'), in_proj_covar=tensor([0.0463, 0.0408, 0.0542, 0.0437, 0.0535, 0.0317, 0.0360, 0.0514], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 03:44:04,175 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 03:44:10,546 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 03:44:21,926 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25876.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:44:32,192 INFO [train.py:903] (2/4) Epoch 4, batch 5400, loss[loss=0.267, simple_loss=0.3308, pruned_loss=0.1017, over 19473.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.3556, pruned_loss=0.1231, over 3815001.23 frames. ], batch size: 49, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:44:56,634 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:45:26,555 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.180e+02 6.939e+02 8.636e+02 1.056e+03 2.577e+03, threshold=1.727e+03, percent-clipped=2.0 +2023-04-01 03:45:33,337 INFO [train.py:903] (2/4) Epoch 4, batch 5450, loss[loss=0.3523, simple_loss=0.3855, pruned_loss=0.1596, over 19773.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3567, pruned_loss=0.1241, over 3794739.95 frames. ], batch size: 54, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:46:34,626 INFO [train.py:903] (2/4) Epoch 4, batch 5500, loss[loss=0.3252, simple_loss=0.3755, pruned_loss=0.1375, over 18830.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3564, pruned_loss=0.124, over 3803158.49 frames. ], batch size: 74, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:46:58,092 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 03:47:31,759 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.568e+02 9.013e+02 1.115e+03 1.816e+03, threshold=1.803e+03, percent-clipped=1.0 +2023-04-01 03:47:37,484 INFO [train.py:903] (2/4) Epoch 4, batch 5550, loss[loss=0.2885, simple_loss=0.3557, pruned_loss=0.1107, over 19557.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.354, pruned_loss=0.1217, over 3816764.70 frames. ], batch size: 61, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:47:45,453 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 03:47:49,025 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5987, 4.0381, 4.2707, 4.2725, 1.3763, 3.9276, 3.4480, 3.7309], + device='cuda:2'), covar=tensor([0.0974, 0.0557, 0.0524, 0.0465, 0.3959, 0.0356, 0.0495, 0.1171], + device='cuda:2'), in_proj_covar=tensor([0.0463, 0.0413, 0.0548, 0.0445, 0.0540, 0.0320, 0.0362, 0.0517], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 03:48:08,115 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3754, 2.4076, 1.5902, 1.6695, 2.2028, 1.2127, 1.1882, 1.7831], + device='cuda:2'), covar=tensor([0.0857, 0.0436, 0.0881, 0.0449, 0.0354, 0.1001, 0.0715, 0.0400], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0258, 0.0315, 0.0243, 0.0218, 0.0312, 0.0282, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:48:33,402 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 03:48:38,941 INFO [train.py:903] (2/4) Epoch 4, batch 5600, loss[loss=0.3065, simple_loss=0.3674, pruned_loss=0.1228, over 18663.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3539, pruned_loss=0.1213, over 3810190.76 frames. ], batch size: 74, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:49:11,658 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 03:49:22,497 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:49:34,176 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+02 7.669e+02 9.359e+02 1.114e+03 3.409e+03, threshold=1.872e+03, percent-clipped=3.0 +2023-04-01 03:49:40,055 INFO [train.py:903] (2/4) Epoch 4, batch 5650, loss[loss=0.2971, simple_loss=0.3567, pruned_loss=0.1188, over 19492.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3551, pruned_loss=0.1216, over 3825405.80 frames. ], batch size: 64, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:49:44,977 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8856, 4.3152, 4.5713, 4.5059, 1.4830, 4.1477, 3.7420, 4.0557], + device='cuda:2'), covar=tensor([0.0854, 0.0591, 0.0469, 0.0378, 0.3934, 0.0334, 0.0478, 0.1005], + device='cuda:2'), in_proj_covar=tensor([0.0456, 0.0404, 0.0533, 0.0434, 0.0530, 0.0311, 0.0354, 0.0506], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 03:50:12,602 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26161.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:50:25,378 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 03:50:40,062 INFO [train.py:903] (2/4) Epoch 4, batch 5700, loss[loss=0.2963, simple_loss=0.3628, pruned_loss=0.1149, over 19679.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3563, pruned_loss=0.1222, over 3829435.70 frames. ], batch size: 59, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:42,179 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:50:42,287 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26186.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:51:23,011 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26220.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:51:35,340 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.235e+02 7.760e+02 9.507e+02 1.157e+03 2.773e+03, threshold=1.901e+03, percent-clipped=5.0 +2023-04-01 03:51:39,945 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 03:51:40,953 INFO [train.py:903] (2/4) Epoch 4, batch 5750, loss[loss=0.3348, simple_loss=0.3868, pruned_loss=0.1414, over 18699.00 frames. ], tot_loss[loss=0.3017, simple_loss=0.3571, pruned_loss=0.1231, over 3825312.89 frames. ], batch size: 74, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:51:48,700 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 03:51:50,014 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3178, 1.2444, 1.3277, 1.6713, 2.9039, 1.1781, 1.8284, 2.9132], + device='cuda:2'), covar=tensor([0.0330, 0.2355, 0.2323, 0.1353, 0.0524, 0.1962, 0.1133, 0.0406], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0308, 0.0304, 0.0280, 0.0300, 0.0310, 0.0286, 0.0288], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:51:52,990 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 03:52:04,018 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2113, 1.2607, 1.8187, 1.3891, 2.3859, 2.1143, 2.6922, 1.0824], + device='cuda:2'), covar=tensor([0.1754, 0.2781, 0.1441, 0.1503, 0.1251, 0.1462, 0.1406, 0.2659], + device='cuda:2'), in_proj_covar=tensor([0.0438, 0.0494, 0.0455, 0.0410, 0.0536, 0.0440, 0.0613, 0.0434], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:52:13,181 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-01 03:52:21,194 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:52:40,306 INFO [train.py:903] (2/4) Epoch 4, batch 5800, loss[loss=0.3732, simple_loss=0.4079, pruned_loss=0.1693, over 13500.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3569, pruned_loss=0.1231, over 3826155.95 frames. ], batch size: 137, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:53:36,643 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.787e+02 8.542e+02 1.114e+03 2.576e+03, threshold=1.708e+03, percent-clipped=4.0 +2023-04-01 03:53:41,252 INFO [train.py:903] (2/4) Epoch 4, batch 5850, loss[loss=0.3044, simple_loss=0.3644, pruned_loss=0.1221, over 19780.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3566, pruned_loss=0.1225, over 3831214.86 frames. ], batch size: 56, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:53:41,612 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26335.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:54:40,809 INFO [train.py:903] (2/4) Epoch 4, batch 5900, loss[loss=0.2958, simple_loss=0.3688, pruned_loss=0.1115, over 19607.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3555, pruned_loss=0.1219, over 3832892.83 frames. ], batch size: 57, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:54:41,849 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 03:55:03,853 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 03:55:06,496 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2225, 1.1141, 1.0705, 1.3156, 1.0333, 1.2290, 1.3999, 1.2104], + device='cuda:2'), covar=tensor([0.0908, 0.1132, 0.1239, 0.0891, 0.0965, 0.1022, 0.0912, 0.0839], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0262, 0.0254, 0.0292, 0.0287, 0.0242, 0.0251, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 03:55:37,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+02 6.673e+02 8.574e+02 1.115e+03 3.080e+03, threshold=1.715e+03, percent-clipped=4.0 +2023-04-01 03:55:39,792 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:55:43,002 INFO [train.py:903] (2/4) Epoch 4, batch 5950, loss[loss=0.255, simple_loss=0.3249, pruned_loss=0.09255, over 19848.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3543, pruned_loss=0.1211, over 3838198.57 frames. ], batch size: 52, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:56:17,508 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:56:43,854 INFO [train.py:903] (2/4) Epoch 4, batch 6000, loss[loss=0.3341, simple_loss=0.3859, pruned_loss=0.1411, over 18179.00 frames. ], tot_loss[loss=0.2959, simple_loss=0.3522, pruned_loss=0.1198, over 3841723.32 frames. ], batch size: 83, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:56:43,855 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 03:56:57,361 INFO [train.py:937] (2/4) Epoch 4, validation: loss=0.2103, simple_loss=0.3081, pruned_loss=0.05622, over 944034.00 frames. +2023-04-01 03:56:57,362 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 03:57:09,389 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0566, 5.3801, 2.9534, 4.8271, 1.4693, 5.2847, 5.3211, 5.5270], + device='cuda:2'), covar=tensor([0.0444, 0.0971, 0.1746, 0.0582, 0.3550, 0.0646, 0.0542, 0.0621], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0299, 0.0351, 0.0278, 0.0341, 0.0296, 0.0264, 0.0294], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 03:57:51,992 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:57:52,992 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+02 7.085e+02 8.722e+02 1.145e+03 2.334e+03, threshold=1.744e+03, percent-clipped=4.0 +2023-04-01 03:57:57,465 INFO [train.py:903] (2/4) Epoch 4, batch 6050, loss[loss=0.3304, simple_loss=0.3772, pruned_loss=0.1419, over 18198.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3533, pruned_loss=0.1199, over 3849821.56 frames. ], batch size: 83, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:58:49,942 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:58:57,303 INFO [train.py:903] (2/4) Epoch 4, batch 6100, loss[loss=0.2708, simple_loss=0.3402, pruned_loss=0.1007, over 19662.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3531, pruned_loss=0.1199, over 3839579.15 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:59:04,713 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26591.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:59:09,029 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1055, 1.0152, 1.3767, 0.8711, 2.4333, 2.8962, 2.7804, 3.1145], + device='cuda:2'), covar=tensor([0.1407, 0.3024, 0.2863, 0.2046, 0.0413, 0.0137, 0.0250, 0.0126], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0284, 0.0317, 0.0260, 0.0195, 0.0115, 0.0202, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 03:59:09,983 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3169, 1.2964, 1.3896, 1.7800, 2.9138, 1.0954, 1.8943, 3.0135], + device='cuda:2'), covar=tensor([0.0366, 0.2254, 0.2194, 0.1290, 0.0523, 0.2051, 0.1159, 0.0384], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0306, 0.0300, 0.0282, 0.0301, 0.0311, 0.0283, 0.0293], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 03:59:29,867 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:59:34,653 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26616.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:59:44,275 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1360, 1.1486, 1.6796, 1.2871, 2.4716, 2.0323, 2.5922, 0.9622], + device='cuda:2'), covar=tensor([0.1576, 0.2583, 0.1317, 0.1308, 0.0959, 0.1286, 0.1002, 0.2251], + device='cuda:2'), in_proj_covar=tensor([0.0434, 0.0486, 0.0452, 0.0404, 0.0530, 0.0431, 0.0601, 0.0427], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 03:59:51,599 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 6.768e+02 7.963e+02 1.032e+03 2.370e+03, threshold=1.593e+03, percent-clipped=5.0 +2023-04-01 03:59:56,257 INFO [train.py:903] (2/4) Epoch 4, batch 6150, loss[loss=0.3041, simple_loss=0.3646, pruned_loss=0.1218, over 19782.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3548, pruned_loss=0.1209, over 3842618.10 frames. ], batch size: 56, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:00:08,590 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:00:23,790 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 04:00:52,201 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7884, 1.7802, 1.7163, 2.6941, 1.6436, 2.3339, 2.3877, 1.6590], + device='cuda:2'), covar=tensor([0.1629, 0.1347, 0.0780, 0.0669, 0.1453, 0.0514, 0.1336, 0.1327], + device='cuda:2'), in_proj_covar=tensor([0.0589, 0.0571, 0.0539, 0.0736, 0.0633, 0.0492, 0.0638, 0.0547], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 04:00:54,794 INFO [train.py:903] (2/4) Epoch 4, batch 6200, loss[loss=0.3526, simple_loss=0.3913, pruned_loss=0.157, over 19133.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3543, pruned_loss=0.1201, over 3848875.74 frames. ], batch size: 69, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:01:45,274 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26727.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:01:51,167 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+02 7.340e+02 8.907e+02 1.200e+03 2.630e+03, threshold=1.781e+03, percent-clipped=5.0 +2023-04-01 04:01:55,635 INFO [train.py:903] (2/4) Epoch 4, batch 6250, loss[loss=0.3033, simple_loss=0.3686, pruned_loss=0.119, over 19604.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3543, pruned_loss=0.1207, over 3840356.08 frames. ], batch size: 61, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:02:24,764 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 04:02:44,565 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:02:52,591 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2911, 3.7572, 2.3824, 2.9440, 3.3838, 2.0853, 1.4467, 2.0602], + device='cuda:2'), covar=tensor([0.0996, 0.0330, 0.0778, 0.0465, 0.0288, 0.0861, 0.0813, 0.0522], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0253, 0.0314, 0.0241, 0.0215, 0.0309, 0.0280, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 04:02:55,313 INFO [train.py:903] (2/4) Epoch 4, batch 6300, loss[loss=0.3609, simple_loss=0.4099, pruned_loss=0.1559, over 19733.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3548, pruned_loss=0.1213, over 3832833.61 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:03:07,894 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8456, 1.8636, 1.7981, 2.4762, 1.7339, 2.3418, 2.2310, 1.7834], + device='cuda:2'), covar=tensor([0.1464, 0.1163, 0.0740, 0.0703, 0.1323, 0.0486, 0.1341, 0.1137], + device='cuda:2'), in_proj_covar=tensor([0.0584, 0.0567, 0.0531, 0.0730, 0.0631, 0.0483, 0.0635, 0.0539], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 04:03:49,861 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.898e+02 7.473e+02 9.358e+02 1.215e+03 3.413e+03, threshold=1.872e+03, percent-clipped=4.0 +2023-04-01 04:03:54,564 INFO [train.py:903] (2/4) Epoch 4, batch 6350, loss[loss=0.2833, simple_loss=0.3445, pruned_loss=0.1111, over 17231.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3547, pruned_loss=0.1213, over 3832633.99 frames. ], batch size: 101, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:03:54,971 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:26,022 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:51,101 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8438, 4.7507, 5.5160, 5.4956, 1.9483, 5.1511, 4.6277, 5.0034], + device='cuda:2'), covar=tensor([0.0728, 0.0666, 0.0392, 0.0274, 0.3679, 0.0264, 0.0340, 0.0837], + device='cuda:2'), in_proj_covar=tensor([0.0467, 0.0415, 0.0557, 0.0442, 0.0540, 0.0319, 0.0366, 0.0523], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 04:04:55,430 INFO [train.py:903] (2/4) Epoch 4, batch 6400, loss[loss=0.2772, simple_loss=0.3342, pruned_loss=0.1101, over 19738.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3531, pruned_loss=0.1195, over 3827374.67 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:05:05,474 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26891.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:16,567 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:41,164 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26922.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:05:45,920 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:52,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+02 6.991e+02 8.678e+02 1.023e+03 2.915e+03, threshold=1.736e+03, percent-clipped=2.0 +2023-04-01 04:05:55,164 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8276, 4.1102, 4.6166, 4.5152, 1.6449, 4.1986, 3.8805, 4.1307], + device='cuda:2'), covar=tensor([0.0804, 0.0680, 0.0433, 0.0375, 0.3794, 0.0356, 0.0399, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0470, 0.0424, 0.0568, 0.0450, 0.0552, 0.0323, 0.0369, 0.0527], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 04:05:58,078 INFO [train.py:903] (2/4) Epoch 4, batch 6450, loss[loss=0.3242, simple_loss=0.3704, pruned_loss=0.139, over 19133.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3544, pruned_loss=0.1201, over 3809443.78 frames. ], batch size: 69, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:06:37,699 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:39,716 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 04:06:56,671 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:58,601 INFO [train.py:903] (2/4) Epoch 4, batch 6500, loss[loss=0.3182, simple_loss=0.3693, pruned_loss=0.1336, over 19670.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3545, pruned_loss=0.1204, over 3807066.81 frames. ], batch size: 55, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:07:03,001 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 04:07:05,796 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 04:07:25,328 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27008.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:07:31,707 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 04:07:53,581 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+02 7.867e+02 9.982e+02 1.245e+03 2.621e+03, threshold=1.996e+03, percent-clipped=6.0 +2023-04-01 04:07:57,611 INFO [train.py:903] (2/4) Epoch 4, batch 6550, loss[loss=0.3385, simple_loss=0.3725, pruned_loss=0.1523, over 19766.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3554, pruned_loss=0.1214, over 3803855.68 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:08:57,146 INFO [train.py:903] (2/4) Epoch 4, batch 6600, loss[loss=0.3106, simple_loss=0.368, pruned_loss=0.1266, over 19289.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.354, pruned_loss=0.1208, over 3808071.22 frames. ], batch size: 66, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:09:53,358 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+02 7.635e+02 9.605e+02 1.185e+03 2.942e+03, threshold=1.921e+03, percent-clipped=6.0 +2023-04-01 04:09:58,813 INFO [train.py:903] (2/4) Epoch 4, batch 6650, loss[loss=0.2914, simple_loss=0.3552, pruned_loss=0.1138, over 19548.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3547, pruned_loss=0.1212, over 3817116.96 frames. ], batch size: 61, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:10:01,417 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2821, 1.1794, 1.5287, 1.1984, 2.7332, 3.6486, 3.5621, 3.9896], + device='cuda:2'), covar=tensor([0.1370, 0.2758, 0.2696, 0.1843, 0.0413, 0.0122, 0.0169, 0.0086], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0278, 0.0315, 0.0258, 0.0194, 0.0114, 0.0200, 0.0129], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 04:10:13,144 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:25,009 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7102, 4.0173, 4.2556, 4.2598, 1.5355, 3.8084, 3.5255, 3.8518], + device='cuda:2'), covar=tensor([0.0721, 0.0649, 0.0486, 0.0352, 0.3725, 0.0420, 0.0455, 0.0889], + device='cuda:2'), in_proj_covar=tensor([0.0465, 0.0409, 0.0557, 0.0441, 0.0542, 0.0323, 0.0361, 0.0528], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 04:10:35,270 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:42,762 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:58,830 INFO [train.py:903] (2/4) Epoch 4, batch 6700, loss[loss=0.2711, simple_loss=0.3268, pruned_loss=0.1077, over 19764.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3543, pruned_loss=0.1215, over 3811305.14 frames. ], batch size: 47, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:11:03,069 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 04:11:52,643 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.097e+02 7.210e+02 9.176e+02 1.266e+03 4.477e+03, threshold=1.835e+03, percent-clipped=7.0 +2023-04-01 04:11:57,029 INFO [train.py:903] (2/4) Epoch 4, batch 6750, loss[loss=0.2636, simple_loss=0.3165, pruned_loss=0.1053, over 19768.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3542, pruned_loss=0.1213, over 3805564.50 frames. ], batch size: 46, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:12:31,973 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27266.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:12:52,952 INFO [train.py:903] (2/4) Epoch 4, batch 6800, loss[loss=0.2807, simple_loss=0.3392, pruned_loss=0.1111, over 19402.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3545, pruned_loss=0.122, over 3822233.75 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:13:36,997 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 04:13:38,072 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 04:13:40,498 INFO [train.py:903] (2/4) Epoch 5, batch 0, loss[loss=0.3129, simple_loss=0.3693, pruned_loss=0.1283, over 18849.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3693, pruned_loss=0.1283, over 18849.00 frames. ], batch size: 74, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:13:40,499 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 04:13:52,273 INFO [train.py:937] (2/4) Epoch 5, validation: loss=0.2121, simple_loss=0.3102, pruned_loss=0.05704, over 944034.00 frames. +2023-04-01 04:13:52,274 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 04:13:52,415 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:14:04,619 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 04:14:16,053 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.218e+02 7.861e+02 9.859e+02 1.236e+03 2.711e+03, threshold=1.972e+03, percent-clipped=3.0 +2023-04-01 04:14:52,472 INFO [train.py:903] (2/4) Epoch 5, batch 50, loss[loss=0.2641, simple_loss=0.3324, pruned_loss=0.09792, over 19675.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.352, pruned_loss=0.1183, over 871318.01 frames. ], batch size: 53, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:15:15,108 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27381.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:15:26,101 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 04:15:53,859 INFO [train.py:903] (2/4) Epoch 5, batch 100, loss[loss=0.3634, simple_loss=0.3931, pruned_loss=0.1669, over 12724.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.3511, pruned_loss=0.1174, over 1528027.55 frames. ], batch size: 136, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:16:05,281 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 04:16:11,187 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:16:15,198 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.953e+02 8.679e+02 1.081e+03 2.199e+03, threshold=1.736e+03, percent-clipped=1.0 +2023-04-01 04:16:53,750 INFO [train.py:903] (2/4) Epoch 5, batch 150, loss[loss=0.2571, simple_loss=0.3149, pruned_loss=0.09969, over 19376.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3514, pruned_loss=0.1179, over 2049549.27 frames. ], batch size: 47, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:52,417 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:17:54,460 INFO [train.py:903] (2/4) Epoch 5, batch 200, loss[loss=0.2923, simple_loss=0.3391, pruned_loss=0.1227, over 19745.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3481, pruned_loss=0.1155, over 2454174.94 frames. ], batch size: 47, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:54,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 04:18:19,388 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+02 6.870e+02 8.382e+02 1.064e+03 2.606e+03, threshold=1.676e+03, percent-clipped=3.0 +2023-04-01 04:18:56,512 INFO [train.py:903] (2/4) Epoch 5, batch 250, loss[loss=0.2847, simple_loss=0.3376, pruned_loss=0.1159, over 19482.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.3505, pruned_loss=0.1174, over 2752333.59 frames. ], batch size: 49, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:19:58,941 INFO [train.py:903] (2/4) Epoch 5, batch 300, loss[loss=0.2258, simple_loss=0.2938, pruned_loss=0.07889, over 19323.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3481, pruned_loss=0.1166, over 2991917.53 frames. ], batch size: 47, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:20:15,375 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:20:22,960 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+02 6.581e+02 8.607e+02 1.103e+03 1.922e+03, threshold=1.721e+03, percent-clipped=6.0 +2023-04-01 04:20:28,926 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27637.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:21:01,029 INFO [train.py:903] (2/4) Epoch 5, batch 350, loss[loss=0.2798, simple_loss=0.3454, pruned_loss=0.1071, over 19870.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3476, pruned_loss=0.1162, over 3175416.24 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:21:01,371 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27662.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:21:07,181 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 04:21:26,434 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:21:58,725 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:22:02,932 INFO [train.py:903] (2/4) Epoch 5, batch 400, loss[loss=0.2727, simple_loss=0.3284, pruned_loss=0.1085, over 19765.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3472, pruned_loss=0.116, over 3321544.11 frames. ], batch size: 47, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:22:25,048 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9023, 1.8748, 1.9114, 2.5835, 1.6960, 2.3559, 2.3495, 1.8865], + device='cuda:2'), covar=tensor([0.1609, 0.1386, 0.0732, 0.0769, 0.1559, 0.0575, 0.1405, 0.1260], + device='cuda:2'), in_proj_covar=tensor([0.0595, 0.0583, 0.0534, 0.0742, 0.0638, 0.0499, 0.0647, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 04:22:27,962 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.948e+02 7.305e+02 9.017e+02 1.065e+03 1.815e+03, threshold=1.803e+03, percent-clipped=3.0 +2023-04-01 04:22:28,325 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27732.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:23:04,211 INFO [train.py:903] (2/4) Epoch 5, batch 450, loss[loss=0.3195, simple_loss=0.3757, pruned_loss=0.1316, over 19616.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3474, pruned_loss=0.1159, over 3442507.11 frames. ], batch size: 57, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:23:33,147 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-01 04:23:45,984 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 04:23:47,111 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 04:24:07,128 INFO [train.py:903] (2/4) Epoch 5, batch 500, loss[loss=0.3143, simple_loss=0.3677, pruned_loss=0.1304, over 19665.00 frames. ], tot_loss[loss=0.2891, simple_loss=0.3469, pruned_loss=0.1156, over 3526430.85 frames. ], batch size: 53, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:24:31,787 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+02 6.150e+02 8.318e+02 1.057e+03 1.987e+03, threshold=1.664e+03, percent-clipped=1.0 +2023-04-01 04:25:11,313 INFO [train.py:903] (2/4) Epoch 5, batch 550, loss[loss=0.3075, simple_loss=0.3547, pruned_loss=0.1302, over 19488.00 frames. ], tot_loss[loss=0.2906, simple_loss=0.3482, pruned_loss=0.1164, over 3594991.23 frames. ], batch size: 49, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:25:35,615 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:07,932 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:14,571 INFO [train.py:903] (2/4) Epoch 5, batch 600, loss[loss=0.2794, simple_loss=0.3469, pruned_loss=0.1059, over 19784.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3487, pruned_loss=0.1166, over 3638719.70 frames. ], batch size: 56, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:26:38,766 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.595e+02 8.388e+02 1.023e+03 2.578e+03, threshold=1.678e+03, percent-clipped=3.0 +2023-04-01 04:27:02,814 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 04:27:07,045 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 04:27:17,954 INFO [train.py:903] (2/4) Epoch 5, batch 650, loss[loss=0.2551, simple_loss=0.3226, pruned_loss=0.09385, over 19583.00 frames. ], tot_loss[loss=0.2922, simple_loss=0.3497, pruned_loss=0.1173, over 3682447.43 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:28:20,087 INFO [train.py:903] (2/4) Epoch 5, batch 700, loss[loss=0.3055, simple_loss=0.367, pruned_loss=0.122, over 19291.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.349, pruned_loss=0.1169, over 3714376.09 frames. ], batch size: 66, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:28:47,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 7.486e+02 9.333e+02 1.140e+03 2.488e+03, threshold=1.867e+03, percent-clipped=5.0 +2023-04-01 04:29:25,964 INFO [train.py:903] (2/4) Epoch 5, batch 750, loss[loss=0.2661, simple_loss=0.332, pruned_loss=0.1001, over 19861.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.35, pruned_loss=0.1173, over 3741206.87 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:29:43,613 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28076.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:30:27,878 INFO [train.py:903] (2/4) Epoch 5, batch 800, loss[loss=0.2135, simple_loss=0.2807, pruned_loss=0.07315, over 19742.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3495, pruned_loss=0.1167, over 3762294.48 frames. ], batch size: 45, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:30:48,163 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 04:30:52,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.190e+02 6.427e+02 8.567e+02 1.032e+03 2.729e+03, threshold=1.713e+03, percent-clipped=3.0 +2023-04-01 04:31:32,199 INFO [train.py:903] (2/4) Epoch 5, batch 850, loss[loss=0.3597, simple_loss=0.3963, pruned_loss=0.1616, over 19771.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3492, pruned_loss=0.1165, over 3785098.44 frames. ], batch size: 56, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:32:08,701 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28191.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:32:14,397 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6030, 3.9607, 4.1194, 4.1756, 1.3333, 3.8482, 3.4646, 3.7362], + device='cuda:2'), covar=tensor([0.0859, 0.0605, 0.0573, 0.0403, 0.4343, 0.0353, 0.0465, 0.1080], + device='cuda:2'), in_proj_covar=tensor([0.0483, 0.0431, 0.0579, 0.0449, 0.0562, 0.0329, 0.0375, 0.0532], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 04:32:29,190 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 04:32:33,623 INFO [train.py:903] (2/4) Epoch 5, batch 900, loss[loss=0.3077, simple_loss=0.3701, pruned_loss=0.1226, over 19708.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3492, pruned_loss=0.1162, over 3803290.77 frames. ], batch size: 59, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:32:59,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+02 6.938e+02 8.196e+02 1.125e+03 2.658e+03, threshold=1.639e+03, percent-clipped=4.0 +2023-04-01 04:33:17,437 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:33:36,692 INFO [train.py:903] (2/4) Epoch 5, batch 950, loss[loss=0.2602, simple_loss=0.33, pruned_loss=0.0952, over 19675.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.35, pruned_loss=0.1163, over 3808099.88 frames. ], batch size: 53, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:33:42,369 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 04:33:49,437 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28273.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:34:28,469 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:34:36,190 INFO [train.py:903] (2/4) Epoch 5, batch 1000, loss[loss=0.2787, simple_loss=0.3499, pruned_loss=0.1038, over 19615.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3495, pruned_loss=0.1161, over 3800728.54 frames. ], batch size: 57, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:34:51,467 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.80 vs. limit=5.0 +2023-04-01 04:34:59,300 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 7.143e+02 8.888e+02 1.138e+03 2.880e+03, threshold=1.778e+03, percent-clipped=9.0 +2023-04-01 04:35:30,269 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 04:35:36,628 INFO [train.py:903] (2/4) Epoch 5, batch 1050, loss[loss=0.3544, simple_loss=0.3944, pruned_loss=0.1573, over 19329.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3495, pruned_loss=0.1163, over 3807526.78 frames. ], batch size: 66, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:36:09,492 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 04:36:36,350 INFO [train.py:903] (2/4) Epoch 5, batch 1100, loss[loss=0.3358, simple_loss=0.3858, pruned_loss=0.1429, over 19657.00 frames. ], tot_loss[loss=0.2906, simple_loss=0.3489, pruned_loss=0.1162, over 3806226.32 frames. ], batch size: 55, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:36:56,333 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7780, 1.5623, 2.0779, 1.8336, 3.0179, 2.7057, 3.2279, 1.6211], + device='cuda:2'), covar=tensor([0.1429, 0.2706, 0.1479, 0.1213, 0.0994, 0.1091, 0.1091, 0.2373], + device='cuda:2'), in_proj_covar=tensor([0.0441, 0.0495, 0.0467, 0.0408, 0.0538, 0.0439, 0.0616, 0.0430], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 04:37:01,566 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+02 6.917e+02 8.996e+02 1.190e+03 3.192e+03, threshold=1.799e+03, percent-clipped=6.0 +2023-04-01 04:37:19,649 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28447.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:37:37,667 INFO [train.py:903] (2/4) Epoch 5, batch 1150, loss[loss=0.3326, simple_loss=0.3801, pruned_loss=0.1426, over 19681.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3488, pruned_loss=0.1161, over 3800070.89 frames. ], batch size: 60, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:37:50,309 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28472.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:38:11,073 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6104, 1.2619, 1.2299, 1.9619, 1.5278, 1.7465, 1.9307, 1.6699], + device='cuda:2'), covar=tensor([0.0858, 0.1117, 0.1190, 0.0838, 0.0941, 0.0780, 0.0904, 0.0726], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0254, 0.0245, 0.0279, 0.0280, 0.0237, 0.0242, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 04:38:29,216 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:36,126 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:38,089 INFO [train.py:903] (2/4) Epoch 5, batch 1200, loss[loss=0.3266, simple_loss=0.378, pruned_loss=0.1376, over 19667.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3482, pruned_loss=0.1161, over 3803284.29 frames. ], batch size: 59, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:39:01,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.666e+02 7.999e+02 1.012e+03 1.920e+03, threshold=1.600e+03, percent-clipped=0.0 +2023-04-01 04:39:11,402 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4198, 1.4381, 1.7255, 1.3980, 2.3620, 2.6597, 2.6681, 2.8371], + device='cuda:2'), covar=tensor([0.1165, 0.2180, 0.2087, 0.1705, 0.0616, 0.0390, 0.0226, 0.0157], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0281, 0.0318, 0.0255, 0.0197, 0.0114, 0.0205, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-04-01 04:39:12,216 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 04:39:37,127 INFO [train.py:903] (2/4) Epoch 5, batch 1250, loss[loss=0.2371, simple_loss=0.3042, pruned_loss=0.08503, over 19773.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3483, pruned_loss=0.1164, over 3804162.66 frames. ], batch size: 47, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:13,036 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:40:37,857 INFO [train.py:903] (2/4) Epoch 5, batch 1300, loss[loss=0.3162, simple_loss=0.3735, pruned_loss=0.1295, over 19475.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3484, pruned_loss=0.1156, over 3821335.12 frames. ], batch size: 64, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:43,794 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28617.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:41:03,191 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 7.072e+02 8.596e+02 1.219e+03 1.879e+03, threshold=1.719e+03, percent-clipped=8.0 +2023-04-01 04:41:11,335 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6856, 1.7618, 1.3008, 1.3439, 1.2154, 1.4618, 0.1021, 0.7776], + device='cuda:2'), covar=tensor([0.0238, 0.0214, 0.0174, 0.0201, 0.0470, 0.0206, 0.0429, 0.0364], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0277, 0.0275, 0.0296, 0.0360, 0.0277, 0.0273, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 04:41:23,386 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:41:39,864 INFO [train.py:903] (2/4) Epoch 5, batch 1350, loss[loss=0.2918, simple_loss=0.3577, pruned_loss=0.1129, over 19518.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3477, pruned_loss=0.115, over 3820856.11 frames. ], batch size: 56, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:42:34,107 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:42:40,556 INFO [train.py:903] (2/4) Epoch 5, batch 1400, loss[loss=0.2612, simple_loss=0.329, pruned_loss=0.09666, over 19609.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3484, pruned_loss=0.1152, over 3817502.17 frames. ], batch size: 50, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:01,006 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1589, 3.5511, 3.7245, 3.6950, 1.5261, 3.3597, 3.0920, 3.3057], + device='cuda:2'), covar=tensor([0.1041, 0.0835, 0.0584, 0.0489, 0.3602, 0.0468, 0.0553, 0.1050], + device='cuda:2'), in_proj_covar=tensor([0.0495, 0.0445, 0.0587, 0.0466, 0.0564, 0.0337, 0.0379, 0.0543], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 04:43:04,143 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 6.792e+02 9.169e+02 1.129e+03 1.829e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-04-01 04:43:04,544 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28732.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:43:40,826 INFO [train.py:903] (2/4) Epoch 5, batch 1450, loss[loss=0.2586, simple_loss=0.318, pruned_loss=0.09962, over 19743.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.3475, pruned_loss=0.1146, over 3818411.78 frames. ], batch size: 51, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:43,168 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 04:43:43,502 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28764.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:44:41,293 INFO [train.py:903] (2/4) Epoch 5, batch 1500, loss[loss=0.4456, simple_loss=0.4481, pruned_loss=0.2215, over 13437.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3463, pruned_loss=0.1142, over 3808148.19 frames. ], batch size: 135, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:45:06,144 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.912e+02 8.562e+02 1.022e+03 2.509e+03, threshold=1.712e+03, percent-clipped=1.0 +2023-04-01 04:45:24,295 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:31,731 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:42,343 INFO [train.py:903] (2/4) Epoch 5, batch 1550, loss[loss=0.2336, simple_loss=0.2882, pruned_loss=0.08957, over 19741.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3467, pruned_loss=0.1145, over 3797276.08 frames. ], batch size: 46, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:45:54,746 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7445, 1.7504, 1.7309, 2.5154, 1.6808, 2.2377, 2.3024, 1.7763], + device='cuda:2'), covar=tensor([0.1634, 0.1325, 0.0768, 0.0704, 0.1467, 0.0600, 0.1430, 0.1281], + device='cuda:2'), in_proj_covar=tensor([0.0606, 0.0595, 0.0546, 0.0753, 0.0650, 0.0508, 0.0658, 0.0563], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 04:46:02,328 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28879.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:46:23,062 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3130, 2.2129, 1.9214, 1.6357, 1.5952, 1.8768, 0.3649, 1.2318], + device='cuda:2'), covar=tensor([0.0247, 0.0213, 0.0149, 0.0318, 0.0491, 0.0249, 0.0514, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0292, 0.0282, 0.0278, 0.0304, 0.0369, 0.0285, 0.0278, 0.0285], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 04:46:41,468 INFO [train.py:903] (2/4) Epoch 5, batch 1600, loss[loss=0.3154, simple_loss=0.3642, pruned_loss=0.1333, over 19760.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3467, pruned_loss=0.1141, over 3818610.04 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 8.0 +2023-04-01 04:47:04,538 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+02 6.984e+02 8.420e+02 1.102e+03 2.946e+03, threshold=1.684e+03, percent-clipped=4.0 +2023-04-01 04:47:04,569 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 04:47:41,586 INFO [train.py:903] (2/4) Epoch 5, batch 1650, loss[loss=0.2845, simple_loss=0.3499, pruned_loss=0.1096, over 19058.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3464, pruned_loss=0.1136, over 3827229.89 frames. ], batch size: 69, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:47:42,038 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:43,187 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28963.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:49,868 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:11,664 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-04-01 04:48:12,489 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:14,415 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28988.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:48:42,351 INFO [train.py:903] (2/4) Epoch 5, batch 1700, loss[loss=0.2946, simple_loss=0.3372, pruned_loss=0.126, over 14729.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3477, pruned_loss=0.1147, over 3832687.93 frames. ], batch size: 32, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:48:43,857 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29013.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:48:51,929 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:52,073 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:08,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+02 6.699e+02 8.227e+02 1.083e+03 2.721e+03, threshold=1.645e+03, percent-clipped=4.0 +2023-04-01 04:49:21,045 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 04:49:22,488 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:42,474 INFO [train.py:903] (2/4) Epoch 5, batch 1750, loss[loss=0.24, simple_loss=0.3112, pruned_loss=0.08446, over 19721.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.3484, pruned_loss=0.1151, over 3832190.42 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:43,933 INFO [train.py:903] (2/4) Epoch 5, batch 1800, loss[loss=0.3999, simple_loss=0.4198, pruned_loss=0.19, over 13151.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3475, pruned_loss=0.1145, over 3824518.99 frames. ], batch size: 136, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:51:00,721 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 04:51:07,719 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.740e+02 6.050e+02 7.849e+02 1.011e+03 2.328e+03, threshold=1.570e+03, percent-clipped=7.0 +2023-04-01 04:51:39,757 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 04:51:44,191 INFO [train.py:903] (2/4) Epoch 5, batch 1850, loss[loss=0.2847, simple_loss=0.3482, pruned_loss=0.1106, over 19536.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3477, pruned_loss=0.1142, over 3817107.15 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:17,657 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 04:52:43,727 INFO [train.py:903] (2/4) Epoch 5, batch 1900, loss[loss=0.3235, simple_loss=0.3836, pruned_loss=0.1317, over 19381.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3482, pruned_loss=0.1155, over 3817240.03 frames. ], batch size: 70, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:53,238 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29219.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:52:59,221 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:01,728 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:02,418 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 04:53:07,799 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 04:53:11,312 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+02 6.988e+02 9.073e+02 1.156e+03 1.890e+03, threshold=1.815e+03, percent-clipped=5.0 +2023-04-01 04:53:23,883 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:24,460 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 04:53:28,885 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 04:53:30,456 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:35,747 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:44,960 INFO [train.py:903] (2/4) Epoch 5, batch 1950, loss[loss=0.2774, simple_loss=0.3326, pruned_loss=0.1112, over 19598.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3464, pruned_loss=0.1145, over 3814267.36 frames. ], batch size: 52, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:54:46,875 INFO [train.py:903] (2/4) Epoch 5, batch 2000, loss[loss=0.2619, simple_loss=0.3343, pruned_loss=0.09476, over 19529.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3457, pruned_loss=0.1135, over 3826062.15 frames. ], batch size: 56, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:10,279 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.142e+02 7.102e+02 9.141e+02 1.135e+03 3.050e+03, threshold=1.828e+03, percent-clipped=2.0 +2023-04-01 04:55:16,883 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:43,165 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 04:55:46,566 INFO [train.py:903] (2/4) Epoch 5, batch 2050, loss[loss=0.3043, simple_loss=0.3597, pruned_loss=0.1244, over 19536.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3469, pruned_loss=0.1143, over 3829433.63 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:49,012 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:59,087 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9364, 1.4661, 1.4949, 1.9626, 1.6289, 1.6996, 1.6403, 1.9419], + device='cuda:2'), covar=tensor([0.0751, 0.1440, 0.1234, 0.0844, 0.1130, 0.0451, 0.0903, 0.0570], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0370, 0.0286, 0.0242, 0.0306, 0.0249, 0.0273, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 04:55:59,871 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 04:56:00,832 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 04:56:23,067 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 04:56:45,828 INFO [train.py:903] (2/4) Epoch 5, batch 2100, loss[loss=0.27, simple_loss=0.3338, pruned_loss=0.1031, over 19777.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3469, pruned_loss=0.1146, over 3836371.74 frames. ], batch size: 56, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:57:12,304 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 7.145e+02 9.347e+02 1.270e+03 4.921e+03, threshold=1.869e+03, percent-clipped=10.0 +2023-04-01 04:57:13,533 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 04:57:34,263 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 04:57:46,685 INFO [train.py:903] (2/4) Epoch 5, batch 2150, loss[loss=0.2597, simple_loss=0.32, pruned_loss=0.09967, over 19873.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3479, pruned_loss=0.1158, over 3811899.91 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:58:01,874 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5179, 1.4270, 1.5636, 1.8771, 3.1300, 1.2092, 2.3208, 3.1296], + device='cuda:2'), covar=tensor([0.0316, 0.2283, 0.2199, 0.1341, 0.0497, 0.2234, 0.1195, 0.0388], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0308, 0.0310, 0.0286, 0.0302, 0.0315, 0.0286, 0.0300], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 04:58:08,960 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29479.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:58:48,385 INFO [train.py:903] (2/4) Epoch 5, batch 2200, loss[loss=0.3069, simple_loss=0.3678, pruned_loss=0.123, over 19626.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3469, pruned_loss=0.1146, over 3826778.59 frames. ], batch size: 61, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:11,936 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+02 6.760e+02 8.285e+02 1.117e+03 1.782e+03, threshold=1.657e+03, percent-clipped=0.0 +2023-04-01 04:59:47,491 INFO [train.py:903] (2/4) Epoch 5, batch 2250, loss[loss=0.2751, simple_loss=0.3254, pruned_loss=0.1124, over 19758.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3461, pruned_loss=0.1141, over 3836392.34 frames. ], batch size: 45, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:55,535 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:28,239 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:32,407 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:48,024 INFO [train.py:903] (2/4) Epoch 5, batch 2300, loss[loss=0.3172, simple_loss=0.3683, pruned_loss=0.133, over 17383.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3456, pruned_loss=0.1135, over 3848454.04 frames. ], batch size: 101, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 05:00:56,322 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29619.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:01:03,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 05:01:15,152 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.694e+02 6.380e+02 7.654e+02 9.572e+02 1.859e+03, threshold=1.531e+03, percent-clipped=2.0 +2023-04-01 05:01:48,834 INFO [train.py:903] (2/4) Epoch 5, batch 2350, loss[loss=0.3239, simple_loss=0.3771, pruned_loss=0.1354, over 18286.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3466, pruned_loss=0.1141, over 3827170.26 frames. ], batch size: 84, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:11,353 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29680.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:02:28,864 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 05:02:45,264 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 05:02:49,424 INFO [train.py:903] (2/4) Epoch 5, batch 2400, loss[loss=0.2795, simple_loss=0.3447, pruned_loss=0.1072, over 19774.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3472, pruned_loss=0.1143, over 3820764.50 frames. ], batch size: 56, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:50,600 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:12,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.376e+02 6.843e+02 7.889e+02 1.103e+03 3.246e+03, threshold=1.578e+03, percent-clipped=5.0 +2023-04-01 05:03:15,309 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:46,780 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:48,490 INFO [train.py:903] (2/4) Epoch 5, batch 2450, loss[loss=0.2645, simple_loss=0.3127, pruned_loss=0.1082, over 19801.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3475, pruned_loss=0.1145, over 3817285.52 frames. ], batch size: 47, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:38,515 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7768, 4.1794, 4.5106, 4.4533, 1.4152, 4.1203, 3.5897, 4.0405], + device='cuda:2'), covar=tensor([0.1040, 0.0558, 0.0464, 0.0437, 0.4359, 0.0326, 0.0505, 0.0979], + device='cuda:2'), in_proj_covar=tensor([0.0497, 0.0442, 0.0580, 0.0477, 0.0570, 0.0347, 0.0378, 0.0540], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:04:48,612 INFO [train.py:903] (2/4) Epoch 5, batch 2500, loss[loss=0.3014, simple_loss=0.3686, pruned_loss=0.1171, over 19602.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3464, pruned_loss=0.1136, over 3820957.42 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:57,783 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:05:14,591 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 6.790e+02 8.500e+02 1.086e+03 2.138e+03, threshold=1.700e+03, percent-clipped=3.0 +2023-04-01 05:05:45,456 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6575, 1.2958, 1.2870, 2.0811, 1.6176, 1.8996, 2.1317, 1.8669], + device='cuda:2'), covar=tensor([0.0733, 0.0990, 0.1058, 0.0767, 0.0874, 0.0775, 0.0747, 0.0648], + device='cuda:2'), in_proj_covar=tensor([0.0231, 0.0252, 0.0243, 0.0281, 0.0273, 0.0230, 0.0237, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 05:05:45,875 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.69 vs. limit=5.0 +2023-04-01 05:05:48,271 INFO [train.py:903] (2/4) Epoch 5, batch 2550, loss[loss=0.2643, simple_loss=0.3257, pruned_loss=0.1014, over 19377.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3471, pruned_loss=0.1145, over 3817489.99 frames. ], batch size: 48, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:40,390 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 05:06:43,847 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3087, 1.3724, 1.8755, 1.5563, 2.9206, 4.2727, 4.2733, 4.7136], + device='cuda:2'), covar=tensor([0.1477, 0.2890, 0.2771, 0.1865, 0.0453, 0.0135, 0.0162, 0.0083], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0282, 0.0317, 0.0254, 0.0197, 0.0117, 0.0207, 0.0139], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:06:48,588 INFO [train.py:903] (2/4) Epoch 5, batch 2600, loss[loss=0.2605, simple_loss=0.321, pruned_loss=0.09999, over 19391.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3467, pruned_loss=0.114, over 3826686.30 frames. ], batch size: 48, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:50,714 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:07:13,514 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.882e+02 9.175e+02 1.239e+03 1.828e+03, threshold=1.835e+03, percent-clipped=5.0 +2023-04-01 05:07:50,213 INFO [train.py:903] (2/4) Epoch 5, batch 2650, loss[loss=0.2965, simple_loss=0.3612, pruned_loss=0.1159, over 19338.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.344, pruned_loss=0.1124, over 3826507.20 frames. ], batch size: 66, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:07:58,234 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:08,344 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 05:08:29,807 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:50,702 INFO [train.py:903] (2/4) Epoch 5, batch 2700, loss[loss=0.2891, simple_loss=0.354, pruned_loss=0.1121, over 19674.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3435, pruned_loss=0.1119, over 3832077.54 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:09:04,703 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:10,314 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:17,305 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.255e+02 6.674e+02 8.375e+02 9.964e+02 1.932e+03, threshold=1.675e+03, percent-clipped=1.0 +2023-04-01 05:09:22,315 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9502, 1.9308, 1.8724, 2.7147, 1.9114, 2.5158, 2.4286, 1.7358], + device='cuda:2'), covar=tensor([0.1728, 0.1354, 0.0756, 0.0845, 0.1667, 0.0630, 0.1433, 0.1332], + device='cuda:2'), in_proj_covar=tensor([0.0602, 0.0592, 0.0541, 0.0753, 0.0648, 0.0517, 0.0658, 0.0562], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 05:09:49,950 INFO [train.py:903] (2/4) Epoch 5, batch 2750, loss[loss=0.2958, simple_loss=0.3525, pruned_loss=0.1195, over 19734.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3436, pruned_loss=0.1121, over 3843572.17 frames. ], batch size: 51, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:10:19,015 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9886, 4.4371, 4.6975, 4.6096, 1.4915, 4.2557, 3.8161, 4.2431], + device='cuda:2'), covar=tensor([0.0852, 0.0523, 0.0401, 0.0366, 0.4149, 0.0356, 0.0431, 0.0937], + device='cuda:2'), in_proj_covar=tensor([0.0503, 0.0448, 0.0597, 0.0486, 0.0575, 0.0354, 0.0380, 0.0552], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:10:50,796 INFO [train.py:903] (2/4) Epoch 5, batch 2800, loss[loss=0.3409, simple_loss=0.3867, pruned_loss=0.1475, over 13564.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3429, pruned_loss=0.1117, over 3823412.87 frames. ], batch size: 136, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:11:17,049 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.279e+02 7.041e+02 9.072e+02 1.191e+03 2.188e+03, threshold=1.814e+03, percent-clipped=6.0 +2023-04-01 05:11:22,827 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30139.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:27,404 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:51,983 INFO [train.py:903] (2/4) Epoch 5, batch 2850, loss[loss=0.2246, simple_loss=0.2867, pruned_loss=0.08125, over 19770.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3431, pruned_loss=0.1122, over 3834730.95 frames. ], batch size: 48, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:11:54,301 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30164.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:12:40,238 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-01 05:12:47,799 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5493, 1.5820, 1.6430, 2.1277, 1.3714, 1.7950, 1.9819, 1.6276], + device='cuda:2'), covar=tensor([0.1804, 0.1371, 0.0837, 0.0758, 0.1573, 0.0690, 0.1671, 0.1374], + device='cuda:2'), in_proj_covar=tensor([0.0617, 0.0601, 0.0549, 0.0765, 0.0664, 0.0527, 0.0669, 0.0572], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 05:12:51,657 INFO [train.py:903] (2/4) Epoch 5, batch 2900, loss[loss=0.2691, simple_loss=0.3384, pruned_loss=0.09989, over 19450.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3431, pruned_loss=0.1121, over 3848341.35 frames. ], batch size: 64, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:12:51,675 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 05:13:09,285 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:13:20,144 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.023e+02 7.237e+02 9.091e+02 1.153e+03 2.755e+03, threshold=1.818e+03, percent-clipped=7.0 +2023-04-01 05:13:51,429 INFO [train.py:903] (2/4) Epoch 5, batch 2950, loss[loss=0.2985, simple_loss=0.3561, pruned_loss=0.1205, over 19580.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3434, pruned_loss=0.1125, over 3833815.27 frames. ], batch size: 61, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:12,809 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:18,199 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30284.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:21,854 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-01 05:14:46,956 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30309.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:50,603 INFO [train.py:903] (2/4) Epoch 5, batch 3000, loss[loss=0.2851, simple_loss=0.3366, pruned_loss=0.1168, over 19487.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3437, pruned_loss=0.113, over 3823520.83 frames. ], batch size: 49, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:50,603 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 05:15:03,138 INFO [train.py:937] (2/4) Epoch 5, validation: loss=0.2047, simple_loss=0.3034, pruned_loss=0.05296, over 944034.00 frames. +2023-04-01 05:15:03,140 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 05:15:05,756 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 05:15:33,551 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 7.155e+02 8.736e+02 1.085e+03 2.346e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 05:16:06,356 INFO [train.py:903] (2/4) Epoch 5, batch 3050, loss[loss=0.2523, simple_loss=0.3249, pruned_loss=0.08987, over 19749.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3427, pruned_loss=0.1117, over 3813565.88 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:16:26,414 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:16:45,575 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30395.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:07,567 INFO [train.py:903] (2/4) Epoch 5, batch 3100, loss[loss=0.2825, simple_loss=0.3406, pruned_loss=0.1122, over 19671.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3441, pruned_loss=0.1132, over 3812647.92 frames. ], batch size: 58, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:17:17,136 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30420.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:25,522 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-04-01 05:17:33,698 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+02 6.847e+02 8.274e+02 1.001e+03 3.134e+03, threshold=1.655e+03, percent-clipped=2.0 +2023-04-01 05:18:06,700 INFO [train.py:903] (2/4) Epoch 5, batch 3150, loss[loss=0.2429, simple_loss=0.3022, pruned_loss=0.09178, over 19321.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3449, pruned_loss=0.1137, over 3821604.35 frames. ], batch size: 44, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:18:34,301 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 05:18:37,229 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30487.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:19:06,283 INFO [train.py:903] (2/4) Epoch 5, batch 3200, loss[loss=0.2596, simple_loss=0.3284, pruned_loss=0.0954, over 19856.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.344, pruned_loss=0.1134, over 3825546.81 frames. ], batch size: 52, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:19:35,547 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 7.005e+02 8.588e+02 1.128e+03 3.335e+03, threshold=1.718e+03, percent-clipped=13.0 +2023-04-01 05:19:35,955 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30535.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:04,521 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:06,325 INFO [train.py:903] (2/4) Epoch 5, batch 3250, loss[loss=0.3276, simple_loss=0.3857, pruned_loss=0.1347, over 19675.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3443, pruned_loss=0.1137, over 3818165.53 frames. ], batch size: 60, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:20:20,422 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:55,332 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:21:08,680 INFO [train.py:903] (2/4) Epoch 5, batch 3300, loss[loss=0.3651, simple_loss=0.4067, pruned_loss=0.1618, over 18356.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3425, pruned_loss=0.1125, over 3830534.59 frames. ], batch size: 84, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:21:16,491 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 05:21:31,848 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 05:21:35,518 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 6.699e+02 7.755e+02 9.198e+02 2.155e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 05:21:37,205 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9757, 2.0831, 1.4246, 1.4949, 1.2090, 1.5434, 0.3755, 0.8818], + device='cuda:2'), covar=tensor([0.0398, 0.0303, 0.0277, 0.0354, 0.0690, 0.0412, 0.0571, 0.0515], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0277, 0.0276, 0.0298, 0.0361, 0.0287, 0.0270, 0.0283], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:22:09,102 INFO [train.py:903] (2/4) Epoch 5, batch 3350, loss[loss=0.2658, simple_loss=0.3263, pruned_loss=0.1026, over 19614.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.344, pruned_loss=0.1135, over 3816133.03 frames. ], batch size: 50, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:22:21,630 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:22:38,251 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:09,780 INFO [train.py:903] (2/4) Epoch 5, batch 3400, loss[loss=0.2542, simple_loss=0.3048, pruned_loss=0.1018, over 19766.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3453, pruned_loss=0.1138, over 3813152.45 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:23:22,337 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:31,352 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7918, 1.8586, 1.5306, 1.5911, 1.4102, 1.6187, 0.6756, 1.2465], + device='cuda:2'), covar=tensor([0.0203, 0.0220, 0.0145, 0.0195, 0.0370, 0.0237, 0.0409, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0280, 0.0279, 0.0301, 0.0368, 0.0290, 0.0276, 0.0288], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:23:39,933 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+02 6.893e+02 8.724e+02 1.073e+03 2.213e+03, threshold=1.745e+03, percent-clipped=7.0 +2023-04-01 05:24:11,822 INFO [train.py:903] (2/4) Epoch 5, batch 3450, loss[loss=0.2977, simple_loss=0.3549, pruned_loss=0.1202, over 19659.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3438, pruned_loss=0.1133, over 3798068.24 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:24:15,168 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 05:25:13,755 INFO [train.py:903] (2/4) Epoch 5, batch 3500, loss[loss=0.2403, simple_loss=0.3149, pruned_loss=0.08286, over 19851.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3438, pruned_loss=0.1129, over 3800725.69 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:25:39,220 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 7.015e+02 8.195e+02 1.097e+03 2.546e+03, threshold=1.639e+03, percent-clipped=5.0 +2023-04-01 05:25:41,876 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:25:58,596 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 05:26:09,330 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30858.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:13,366 INFO [train.py:903] (2/4) Epoch 5, batch 3550, loss[loss=0.2332, simple_loss=0.3072, pruned_loss=0.07958, over 19751.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.345, pruned_loss=0.1134, over 3794715.70 frames. ], batch size: 51, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:26:38,709 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:56,060 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8745, 1.1540, 1.3125, 1.4247, 2.3458, 0.9459, 1.8379, 2.5208], + device='cuda:2'), covar=tensor([0.0628, 0.2746, 0.2498, 0.1567, 0.0863, 0.2367, 0.1218, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0311, 0.0308, 0.0283, 0.0301, 0.0312, 0.0286, 0.0303], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:27:13,895 INFO [train.py:903] (2/4) Epoch 5, batch 3600, loss[loss=0.3478, simple_loss=0.3785, pruned_loss=0.1586, over 19840.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.345, pruned_loss=0.1134, over 3789245.39 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:27:24,192 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30921.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:43,240 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+02 6.769e+02 8.289e+02 1.066e+03 2.218e+03, threshold=1.658e+03, percent-clipped=4.0 +2023-04-01 05:27:51,242 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:52,190 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:55,752 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0970, 1.8304, 1.3204, 1.0405, 1.6434, 0.8567, 0.8749, 1.6519], + device='cuda:2'), covar=tensor([0.0608, 0.0436, 0.0854, 0.0618, 0.0349, 0.1003, 0.0589, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0271, 0.0317, 0.0243, 0.0223, 0.0312, 0.0286, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:28:02,186 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:13,481 INFO [train.py:903] (2/4) Epoch 5, batch 3650, loss[loss=0.2517, simple_loss=0.3154, pruned_loss=0.09403, over 19586.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3452, pruned_loss=0.1139, over 3801810.84 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:28:20,044 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:29:14,323 INFO [train.py:903] (2/4) Epoch 5, batch 3700, loss[loss=0.2646, simple_loss=0.3396, pruned_loss=0.09483, over 19532.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3456, pruned_loss=0.1141, over 3795124.29 frames. ], batch size: 56, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:29:21,024 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:29:34,017 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31029.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:29:40,640 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+02 7.099e+02 8.962e+02 1.140e+03 3.223e+03, threshold=1.792e+03, percent-clipped=9.0 +2023-04-01 05:30:15,317 INFO [train.py:903] (2/4) Epoch 5, batch 3750, loss[loss=0.3253, simple_loss=0.376, pruned_loss=0.1373, over 17548.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3452, pruned_loss=0.1139, over 3802120.24 frames. ], batch size: 101, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:30:53,682 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:15,741 INFO [train.py:903] (2/4) Epoch 5, batch 3800, loss[loss=0.284, simple_loss=0.3372, pruned_loss=0.1154, over 19581.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3449, pruned_loss=0.1137, over 3797870.34 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:31:22,779 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:40,713 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:44,604 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 5.856e+02 8.467e+02 1.115e+03 2.554e+03, threshold=1.693e+03, percent-clipped=5.0 +2023-04-01 05:31:49,119 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 05:32:15,367 INFO [train.py:903] (2/4) Epoch 5, batch 3850, loss[loss=0.3007, simple_loss=0.3572, pruned_loss=0.1221, over 19540.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3446, pruned_loss=0.1137, over 3796825.55 frames. ], batch size: 56, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:32:36,226 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4340, 0.9412, 1.1588, 1.3503, 2.0562, 0.9140, 1.8449, 2.0845], + device='cuda:2'), covar=tensor([0.0610, 0.2535, 0.2449, 0.1351, 0.0772, 0.1975, 0.0942, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0309, 0.0311, 0.0286, 0.0302, 0.0312, 0.0286, 0.0303], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:33:19,175 INFO [train.py:903] (2/4) Epoch 5, batch 3900, loss[loss=0.3845, simple_loss=0.4108, pruned_loss=0.1791, over 13343.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3441, pruned_loss=0.1131, over 3795064.43 frames. ], batch size: 136, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:33:40,143 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4824, 1.1775, 1.2085, 1.9475, 1.5374, 1.7697, 1.9638, 1.6483], + device='cuda:2'), covar=tensor([0.0910, 0.1161, 0.1237, 0.0924, 0.1021, 0.0790, 0.0902, 0.0718], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0252, 0.0244, 0.0282, 0.0273, 0.0233, 0.0231, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 05:33:45,281 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.653e+02 6.709e+02 8.069e+02 1.055e+03 2.198e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 05:33:57,807 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0885, 2.1243, 1.6268, 1.6838, 1.5073, 1.6833, 0.3955, 1.1081], + device='cuda:2'), covar=tensor([0.0210, 0.0211, 0.0155, 0.0216, 0.0413, 0.0227, 0.0443, 0.0366], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0276, 0.0277, 0.0299, 0.0368, 0.0289, 0.0271, 0.0286], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:34:13,962 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3309, 3.0214, 2.0133, 2.1833, 2.0043, 2.4803, 0.5269, 2.1305], + device='cuda:2'), covar=tensor([0.0292, 0.0241, 0.0293, 0.0416, 0.0515, 0.0382, 0.0642, 0.0453], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0281, 0.0282, 0.0304, 0.0375, 0.0294, 0.0275, 0.0291], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:34:18,970 INFO [train.py:903] (2/4) Epoch 5, batch 3950, loss[loss=0.2662, simple_loss=0.3395, pruned_loss=0.09646, over 19619.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3434, pruned_loss=0.113, over 3794018.80 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:34:22,384 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:34:24,544 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 05:34:33,076 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 05:34:38,631 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 05:34:47,815 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:00,478 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31296.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:00,741 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2688, 2.2527, 1.5785, 1.3579, 2.0785, 0.9991, 1.0370, 1.6435], + device='cuda:2'), covar=tensor([0.0783, 0.0434, 0.0791, 0.0555, 0.0359, 0.0971, 0.0591, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0272, 0.0317, 0.0237, 0.0219, 0.0310, 0.0282, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:35:17,642 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3066, 1.4767, 2.1034, 1.7011, 2.9947, 2.6231, 3.2504, 1.4529], + device='cuda:2'), covar=tensor([0.1854, 0.2878, 0.1592, 0.1439, 0.1304, 0.1431, 0.1642, 0.2855], + device='cuda:2'), in_proj_covar=tensor([0.0445, 0.0506, 0.0482, 0.0410, 0.0548, 0.0446, 0.0624, 0.0448], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 05:35:18,261 INFO [train.py:903] (2/4) Epoch 5, batch 4000, loss[loss=0.2867, simple_loss=0.355, pruned_loss=0.1092, over 19771.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3444, pruned_loss=0.1135, over 3801219.51 frames. ], batch size: 56, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:35:48,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+02 7.061e+02 8.767e+02 1.081e+03 2.366e+03, threshold=1.753e+03, percent-clipped=7.0 +2023-04-01 05:36:02,938 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9875, 4.9354, 5.7892, 5.7827, 1.8551, 5.5505, 4.8821, 5.2626], + device='cuda:2'), covar=tensor([0.0843, 0.0559, 0.0407, 0.0284, 0.3882, 0.0239, 0.0380, 0.0832], + device='cuda:2'), in_proj_covar=tensor([0.0502, 0.0438, 0.0587, 0.0481, 0.0569, 0.0356, 0.0383, 0.0546], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:36:06,110 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 05:36:18,326 INFO [train.py:903] (2/4) Epoch 5, batch 4050, loss[loss=0.2906, simple_loss=0.3556, pruned_loss=0.1128, over 19600.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3441, pruned_loss=0.113, over 3813517.01 frames. ], batch size: 61, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:36:33,380 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31373.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:36:42,222 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:36:51,307 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:07,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:18,615 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:19,401 INFO [train.py:903] (2/4) Epoch 5, batch 4100, loss[loss=0.2548, simple_loss=0.3236, pruned_loss=0.09295, over 19621.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3442, pruned_loss=0.113, over 3815820.87 frames. ], batch size: 50, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:37:20,891 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:37,709 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:48,921 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.964e+02 7.397e+02 9.605e+02 3.908e+03, threshold=1.479e+03, percent-clipped=5.0 +2023-04-01 05:37:53,736 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 05:38:20,933 INFO [train.py:903] (2/4) Epoch 5, batch 4150, loss[loss=0.2681, simple_loss=0.3261, pruned_loss=0.1051, over 19785.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3437, pruned_loss=0.1129, over 3803617.78 frames. ], batch size: 48, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:38:50,985 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31488.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:39:16,873 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31509.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:39:20,063 INFO [train.py:903] (2/4) Epoch 5, batch 4200, loss[loss=0.231, simple_loss=0.2892, pruned_loss=0.08643, over 19766.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3433, pruned_loss=0.1126, over 3809666.87 frames. ], batch size: 47, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:39:23,673 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 05:39:50,934 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.017e+02 6.856e+02 8.101e+02 1.045e+03 3.023e+03, threshold=1.620e+03, percent-clipped=6.0 +2023-04-01 05:40:19,248 INFO [train.py:903] (2/4) Epoch 5, batch 4250, loss[loss=0.2351, simple_loss=0.311, pruned_loss=0.0796, over 19683.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3438, pruned_loss=0.1122, over 3806575.99 frames. ], batch size: 58, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:40:35,460 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 05:40:46,359 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 05:41:20,812 INFO [train.py:903] (2/4) Epoch 5, batch 4300, loss[loss=0.2531, simple_loss=0.3212, pruned_loss=0.09253, over 19604.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3438, pruned_loss=0.1117, over 3812869.81 frames. ], batch size: 50, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:41:51,079 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:41:51,774 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 7.144e+02 8.425e+02 1.091e+03 2.021e+03, threshold=1.685e+03, percent-clipped=5.0 +2023-04-01 05:42:13,886 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 05:42:18,604 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:21,699 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:22,341 INFO [train.py:903] (2/4) Epoch 5, batch 4350, loss[loss=0.4297, simple_loss=0.4413, pruned_loss=0.209, over 13415.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3445, pruned_loss=0.1125, over 3792703.96 frames. ], batch size: 136, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:42:28,421 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:45,986 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:57,882 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:22,093 INFO [train.py:903] (2/4) Epoch 5, batch 4400, loss[loss=0.2588, simple_loss=0.3299, pruned_loss=0.09381, over 19839.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3451, pruned_loss=0.1132, over 3798368.26 frames. ], batch size: 52, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:43:34,006 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:44,929 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 05:43:53,322 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.183e+02 7.191e+02 8.879e+02 1.082e+03 1.961e+03, threshold=1.776e+03, percent-clipped=1.0 +2023-04-01 05:43:55,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 05:44:01,523 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5091, 1.2410, 1.3659, 1.8980, 2.9500, 1.0213, 1.9077, 3.1421], + device='cuda:2'), covar=tensor([0.0335, 0.2525, 0.2376, 0.1278, 0.0575, 0.2266, 0.1211, 0.0391], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0309, 0.0306, 0.0279, 0.0298, 0.0308, 0.0281, 0.0295], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:44:02,773 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31744.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:44:15,733 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 05:44:22,702 INFO [train.py:903] (2/4) Epoch 5, batch 4450, loss[loss=0.2957, simple_loss=0.3553, pruned_loss=0.1181, over 19685.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3442, pruned_loss=0.1126, over 3794380.42 frames. ], batch size: 60, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:44:30,970 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31769.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:44:32,885 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:16,679 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:23,180 INFO [train.py:903] (2/4) Epoch 5, batch 4500, loss[loss=0.2238, simple_loss=0.2867, pruned_loss=0.08048, over 18661.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3432, pruned_loss=0.1121, over 3802313.21 frames. ], batch size: 41, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:45:27,467 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2415, 2.0451, 1.6268, 1.3341, 2.0256, 1.0584, 1.1993, 1.7446], + device='cuda:2'), covar=tensor([0.0634, 0.0502, 0.0747, 0.0539, 0.0306, 0.0942, 0.0511, 0.0339], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0265, 0.0315, 0.0238, 0.0213, 0.0309, 0.0281, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:45:53,690 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.666e+02 8.776e+02 1.149e+03 2.550e+03, threshold=1.755e+03, percent-clipped=7.0 +2023-04-01 05:45:55,220 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0145, 2.0314, 1.4963, 1.4441, 1.3568, 1.4513, 0.2168, 0.9474], + device='cuda:2'), covar=tensor([0.0218, 0.0216, 0.0186, 0.0238, 0.0501, 0.0289, 0.0472, 0.0368], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0280, 0.0279, 0.0301, 0.0374, 0.0293, 0.0273, 0.0289], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:45:58,570 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:46:12,827 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:46:24,489 INFO [train.py:903] (2/4) Epoch 5, batch 4550, loss[loss=0.2796, simple_loss=0.3486, pruned_loss=0.1053, over 19335.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3432, pruned_loss=0.1123, over 3796941.55 frames. ], batch size: 66, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:46:30,252 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 05:46:49,019 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8294, 1.8041, 1.8864, 2.7231, 1.7121, 2.3765, 2.6208, 1.7571], + device='cuda:2'), covar=tensor([0.2090, 0.1702, 0.0961, 0.0916, 0.1913, 0.0752, 0.1450, 0.1613], + device='cuda:2'), in_proj_covar=tensor([0.0624, 0.0614, 0.0554, 0.0769, 0.0660, 0.0529, 0.0680, 0.0583], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 05:46:51,946 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 05:46:52,312 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:05,612 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31896.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:06,878 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 05:47:25,656 INFO [train.py:903] (2/4) Epoch 5, batch 4600, loss[loss=0.3047, simple_loss=0.3658, pruned_loss=0.1218, over 18746.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3414, pruned_loss=0.1107, over 3807095.41 frames. ], batch size: 74, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:47:29,617 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5146, 1.3853, 1.3173, 1.8341, 1.5786, 1.9224, 2.0199, 1.6996], + device='cuda:2'), covar=tensor([0.0836, 0.1070, 0.1146, 0.0996, 0.0986, 0.0719, 0.0923, 0.0705], + device='cuda:2'), in_proj_covar=tensor([0.0231, 0.0256, 0.0245, 0.0282, 0.0272, 0.0235, 0.0235, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 05:47:31,124 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 05:47:50,451 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:55,744 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 6.775e+02 7.918e+02 9.900e+02 3.222e+03, threshold=1.584e+03, percent-clipped=3.0 +2023-04-01 05:48:25,674 INFO [train.py:903] (2/4) Epoch 5, batch 4650, loss[loss=0.3614, simple_loss=0.4061, pruned_loss=0.1584, over 19584.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3409, pruned_loss=0.1101, over 3822456.06 frames. ], batch size: 61, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:48:32,770 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:48:32,840 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31968.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:48:41,067 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 05:48:52,750 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 05:49:02,077 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1730, 1.1921, 1.7363, 1.3179, 2.4801, 2.0491, 2.8409, 0.9547], + device='cuda:2'), covar=tensor([0.1837, 0.3258, 0.1676, 0.1508, 0.1302, 0.1576, 0.1224, 0.3019], + device='cuda:2'), in_proj_covar=tensor([0.0440, 0.0500, 0.0477, 0.0405, 0.0549, 0.0443, 0.0617, 0.0441], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 05:49:19,928 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.2396, 5.1193, 5.9804, 5.9280, 1.8195, 5.4443, 4.8977, 5.4526], + device='cuda:2'), covar=tensor([0.0727, 0.0483, 0.0356, 0.0261, 0.4176, 0.0258, 0.0411, 0.0813], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0454, 0.0599, 0.0497, 0.0583, 0.0361, 0.0392, 0.0564], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:49:25,340 INFO [train.py:903] (2/4) Epoch 5, batch 4700, loss[loss=0.2674, simple_loss=0.3149, pruned_loss=0.1099, over 17816.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3413, pruned_loss=0.1102, over 3832143.27 frames. ], batch size: 39, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:49:48,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 05:49:56,507 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.386e+02 6.540e+02 8.637e+02 1.061e+03 2.519e+03, threshold=1.727e+03, percent-clipped=5.0 +2023-04-01 05:50:02,434 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8318, 4.8756, 5.6569, 5.5747, 1.7796, 5.1772, 4.5583, 5.1472], + device='cuda:2'), covar=tensor([0.0791, 0.0527, 0.0334, 0.0280, 0.3998, 0.0246, 0.0384, 0.0727], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0452, 0.0593, 0.0495, 0.0575, 0.0361, 0.0389, 0.0557], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:50:27,325 INFO [train.py:903] (2/4) Epoch 5, batch 4750, loss[loss=0.2303, simple_loss=0.2984, pruned_loss=0.08109, over 19368.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3421, pruned_loss=0.1109, over 3829541.70 frames. ], batch size: 47, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:50:32,841 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:51:28,010 INFO [train.py:903] (2/4) Epoch 5, batch 4800, loss[loss=0.3003, simple_loss=0.3604, pruned_loss=0.1201, over 19783.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3417, pruned_loss=0.1109, over 3825693.13 frames. ], batch size: 56, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:51:57,644 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+02 7.051e+02 8.763e+02 1.042e+03 3.094e+03, threshold=1.753e+03, percent-clipped=4.0 +2023-04-01 05:52:00,450 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8210, 1.4187, 1.5203, 2.0813, 1.6022, 2.1436, 2.1226, 2.0343], + device='cuda:2'), covar=tensor([0.0783, 0.1036, 0.1041, 0.0917, 0.1040, 0.0702, 0.0980, 0.0609], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0248, 0.0243, 0.0275, 0.0266, 0.0229, 0.0230, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 05:52:04,777 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32142.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:14,252 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:27,944 INFO [train.py:903] (2/4) Epoch 5, batch 4850, loss[loss=0.2501, simple_loss=0.3073, pruned_loss=0.09641, over 15978.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3413, pruned_loss=0.1101, over 3821433.85 frames. ], batch size: 35, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:52:34,190 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:51,465 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:54,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 05:52:57,432 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:53:12,909 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 05:53:17,731 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 05:53:18,685 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 05:53:26,872 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 05:53:27,949 INFO [train.py:903] (2/4) Epoch 5, batch 4900, loss[loss=0.2977, simple_loss=0.3537, pruned_loss=0.1209, over 19752.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3422, pruned_loss=0.111, over 3828136.05 frames. ], batch size: 63, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:53:45,221 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32224.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:53:48,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 05:53:54,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2409, 3.6257, 3.7733, 3.8075, 1.2466, 3.4927, 3.0987, 3.3728], + device='cuda:2'), covar=tensor([0.0898, 0.0622, 0.0516, 0.0479, 0.4090, 0.0402, 0.0576, 0.1108], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0452, 0.0592, 0.0498, 0.0579, 0.0363, 0.0393, 0.0560], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:53:59,194 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.307e+02 6.585e+02 7.912e+02 1.039e+03 2.328e+03, threshold=1.582e+03, percent-clipped=1.0 +2023-04-01 05:54:02,592 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:09,672 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2772, 1.2609, 1.3309, 1.6685, 2.7980, 0.9242, 1.8078, 2.8542], + device='cuda:2'), covar=tensor([0.0465, 0.2719, 0.2691, 0.1516, 0.0660, 0.2517, 0.1298, 0.0507], + device='cuda:2'), in_proj_covar=tensor([0.0288, 0.0320, 0.0319, 0.0285, 0.0307, 0.0316, 0.0289, 0.0306], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 05:54:13,281 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32249.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:54:19,411 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 05:54:29,207 INFO [train.py:903] (2/4) Epoch 5, batch 4950, loss[loss=0.3071, simple_loss=0.3607, pruned_loss=0.1267, over 19777.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3416, pruned_loss=0.1107, over 3837466.46 frames. ], batch size: 56, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:54:33,938 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:48,160 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32277.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:49,219 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 05:54:55,499 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9762, 1.9520, 1.6191, 1.4133, 1.3172, 1.5933, 0.2801, 0.7713], + device='cuda:2'), covar=tensor([0.0226, 0.0244, 0.0166, 0.0277, 0.0577, 0.0289, 0.0474, 0.0436], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0284, 0.0277, 0.0305, 0.0375, 0.0297, 0.0275, 0.0293], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:55:12,825 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 05:55:15,579 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:30,486 INFO [train.py:903] (2/4) Epoch 5, batch 5000, loss[loss=0.2949, simple_loss=0.355, pruned_loss=0.1174, over 18846.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3416, pruned_loss=0.1111, over 3839859.12 frames. ], batch size: 74, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:55:30,636 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:40,422 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 05:55:50,546 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 05:55:58,321 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+02 7.030e+02 8.789e+02 1.165e+03 2.289e+03, threshold=1.758e+03, percent-clipped=7.0 +2023-04-01 05:56:01,850 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2779, 3.0092, 2.1896, 2.7690, 0.8791, 2.7438, 2.6568, 2.8581], + device='cuda:2'), covar=tensor([0.0968, 0.1396, 0.1785, 0.0981, 0.3756, 0.1250, 0.1100, 0.1035], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0316, 0.0363, 0.0287, 0.0353, 0.0304, 0.0287, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 05:56:21,789 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:56:29,551 INFO [train.py:903] (2/4) Epoch 5, batch 5050, loss[loss=0.2567, simple_loss=0.3131, pruned_loss=0.1001, over 19384.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3409, pruned_loss=0.1111, over 3839013.49 frames. ], batch size: 47, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:57:05,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 05:57:07,722 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:57:30,264 INFO [train.py:903] (2/4) Epoch 5, batch 5100, loss[loss=0.291, simple_loss=0.3566, pruned_loss=0.1127, over 19770.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3419, pruned_loss=0.111, over 3816382.72 frames. ], batch size: 56, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:57:40,609 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 05:57:44,919 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 05:57:51,248 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 05:57:51,541 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:02,218 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 6.879e+02 8.267e+02 1.044e+03 2.791e+03, threshold=1.653e+03, percent-clipped=3.0 +2023-04-01 05:58:02,688 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:32,016 INFO [train.py:903] (2/4) Epoch 5, batch 5150, loss[loss=0.2988, simple_loss=0.3441, pruned_loss=0.1268, over 19412.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.342, pruned_loss=0.1108, over 3812487.19 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:58:32,381 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32462.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:45,292 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 05:59:18,360 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8523, 4.2283, 4.5651, 4.5267, 1.6961, 4.1223, 3.6694, 4.1374], + device='cuda:2'), covar=tensor([0.0883, 0.0573, 0.0459, 0.0426, 0.4034, 0.0375, 0.0494, 0.1026], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0452, 0.0598, 0.0501, 0.0579, 0.0366, 0.0395, 0.0567], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:59:19,213 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 05:59:33,455 INFO [train.py:903] (2/4) Epoch 5, batch 5200, loss[loss=0.2705, simple_loss=0.3337, pruned_loss=0.1036, over 19683.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3411, pruned_loss=0.1096, over 3829014.66 frames. ], batch size: 53, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:59:39,262 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6849, 4.1046, 4.3641, 4.3666, 1.7210, 3.9133, 3.5160, 3.9506], + device='cuda:2'), covar=tensor([0.0959, 0.0577, 0.0471, 0.0426, 0.3734, 0.0395, 0.0496, 0.1041], + device='cuda:2'), in_proj_covar=tensor([0.0514, 0.0449, 0.0594, 0.0497, 0.0576, 0.0363, 0.0393, 0.0564], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 05:59:43,873 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:59:45,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 06:00:02,753 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 6.175e+02 7.903e+02 1.065e+03 1.799e+03, threshold=1.581e+03, percent-clipped=1.0 +2023-04-01 06:00:15,171 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:26,334 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:29,348 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 06:00:32,701 INFO [train.py:903] (2/4) Epoch 5, batch 5250, loss[loss=0.2479, simple_loss=0.3076, pruned_loss=0.09406, over 19613.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3418, pruned_loss=0.1101, over 3810530.66 frames. ], batch size: 50, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:00:49,553 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-04-01 06:00:53,232 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 06:00:55,210 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,228 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,913 INFO [train.py:903] (2/4) Epoch 5, batch 5300, loss[loss=0.2436, simple_loss=0.2974, pruned_loss=0.0949, over 19774.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3409, pruned_loss=0.1101, over 3793800.50 frames. ], batch size: 47, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:01:51,035 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 06:01:51,395 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 06:02:04,596 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:05,309 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 6.583e+02 8.041e+02 1.027e+03 2.106e+03, threshold=1.608e+03, percent-clipped=4.0 +2023-04-01 06:02:15,440 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1534, 1.9838, 2.0924, 2.0683, 4.4817, 0.9830, 2.5253, 4.5571], + device='cuda:2'), covar=tensor([0.0226, 0.2333, 0.2119, 0.1561, 0.0469, 0.2588, 0.1097, 0.0285], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0312, 0.0315, 0.0286, 0.0303, 0.0316, 0.0288, 0.0304], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:02:17,889 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32648.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:30,208 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6812, 1.6485, 1.7778, 2.6666, 1.5807, 2.3112, 2.3061, 1.7021], + device='cuda:2'), covar=tensor([0.2210, 0.1860, 0.0907, 0.0869, 0.1970, 0.0761, 0.1851, 0.1624], + device='cuda:2'), in_proj_covar=tensor([0.0631, 0.0624, 0.0557, 0.0783, 0.0662, 0.0547, 0.0686, 0.0589], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:02:34,332 INFO [train.py:903] (2/4) Epoch 5, batch 5350, loss[loss=0.2997, simple_loss=0.3471, pruned_loss=0.1261, over 19741.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3407, pruned_loss=0.11, over 3786016.68 frames. ], batch size: 51, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:02:48,932 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:54,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2753, 3.8828, 2.2533, 3.4373, 1.1303, 3.3709, 3.4564, 3.6989], + device='cuda:2'), covar=tensor([0.0660, 0.1142, 0.2046, 0.0773, 0.3893, 0.0941, 0.0776, 0.0740], + device='cuda:2'), in_proj_covar=tensor([0.0347, 0.0307, 0.0361, 0.0285, 0.0354, 0.0303, 0.0288, 0.0310], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:03:00,325 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:06,675 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 06:03:31,059 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:35,126 INFO [train.py:903] (2/4) Epoch 5, batch 5400, loss[loss=0.3335, simple_loss=0.3767, pruned_loss=0.1451, over 12970.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3419, pruned_loss=0.1108, over 3793596.17 frames. ], batch size: 135, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:03:43,270 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4561, 1.1360, 1.1370, 1.3959, 1.1624, 1.2778, 1.1181, 1.3136], + device='cuda:2'), covar=tensor([0.0820, 0.1105, 0.1161, 0.0716, 0.0928, 0.0506, 0.1002, 0.0651], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0360, 0.0280, 0.0239, 0.0305, 0.0239, 0.0268, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:03:46,774 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3117, 2.3763, 1.7215, 1.4233, 2.2168, 1.1081, 0.9988, 1.7002], + device='cuda:2'), covar=tensor([0.0985, 0.0548, 0.0981, 0.0715, 0.0404, 0.1194, 0.0791, 0.0470], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0267, 0.0319, 0.0240, 0.0219, 0.0307, 0.0285, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:04:03,208 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.192e+02 6.673e+02 8.431e+02 1.064e+03 2.658e+03, threshold=1.686e+03, percent-clipped=8.0 +2023-04-01 06:04:34,472 INFO [train.py:903] (2/4) Epoch 5, batch 5450, loss[loss=0.2804, simple_loss=0.3396, pruned_loss=0.1106, over 19502.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.342, pruned_loss=0.1111, over 3792022.25 frames. ], batch size: 64, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:05:34,682 INFO [train.py:903] (2/4) Epoch 5, batch 5500, loss[loss=0.2541, simple_loss=0.3102, pruned_loss=0.09901, over 19780.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3419, pruned_loss=0.1109, over 3799104.71 frames. ], batch size: 47, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:05:56,680 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 06:06:05,437 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+02 6.283e+02 7.782e+02 1.000e+03 2.107e+03, threshold=1.556e+03, percent-clipped=4.0 +2023-04-01 06:06:06,856 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7265, 1.4188, 1.8122, 1.4621, 2.7559, 3.2369, 3.1832, 3.4073], + device='cuda:2'), covar=tensor([0.1142, 0.2711, 0.2472, 0.1740, 0.0460, 0.0264, 0.0211, 0.0141], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0283, 0.0315, 0.0248, 0.0197, 0.0116, 0.0204, 0.0139], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:06:34,477 INFO [train.py:903] (2/4) Epoch 5, batch 5550, loss[loss=0.2648, simple_loss=0.3212, pruned_loss=0.1042, over 19835.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3428, pruned_loss=0.1113, over 3795368.53 frames. ], batch size: 52, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:06:40,855 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 06:06:53,491 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1828, 1.1977, 1.7083, 1.3547, 2.5421, 2.1122, 2.6686, 1.0125], + device='cuda:2'), covar=tensor([0.1929, 0.3443, 0.1788, 0.1675, 0.1276, 0.1606, 0.1364, 0.2989], + device='cuda:2'), in_proj_covar=tensor([0.0447, 0.0505, 0.0481, 0.0409, 0.0552, 0.0444, 0.0627, 0.0444], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:07:29,978 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 06:07:36,770 INFO [train.py:903] (2/4) Epoch 5, batch 5600, loss[loss=0.2651, simple_loss=0.3445, pruned_loss=0.09285, over 19779.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.343, pruned_loss=0.1112, over 3807866.23 frames. ], batch size: 56, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:08:06,606 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 7.188e+02 9.159e+02 1.163e+03 2.158e+03, threshold=1.832e+03, percent-clipped=9.0 +2023-04-01 06:08:38,303 INFO [train.py:903] (2/4) Epoch 5, batch 5650, loss[loss=0.2667, simple_loss=0.3418, pruned_loss=0.09582, over 19540.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3423, pruned_loss=0.1102, over 3819644.83 frames. ], batch size: 56, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:09:04,591 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:09:24,859 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 06:09:38,279 INFO [train.py:903] (2/4) Epoch 5, batch 5700, loss[loss=0.343, simple_loss=0.3802, pruned_loss=0.1529, over 13327.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3425, pruned_loss=0.1104, over 3816934.55 frames. ], batch size: 137, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:09:59,223 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4169, 2.9656, 2.2328, 2.1938, 2.2156, 2.5027, 0.6848, 2.2182], + device='cuda:2'), covar=tensor([0.0255, 0.0237, 0.0257, 0.0364, 0.0462, 0.0398, 0.0561, 0.0422], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0288, 0.0287, 0.0300, 0.0375, 0.0297, 0.0276, 0.0296], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:10:09,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.491e+02 7.117e+02 8.783e+02 1.086e+03 2.576e+03, threshold=1.757e+03, percent-clipped=4.0 +2023-04-01 06:10:21,210 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 06:10:26,586 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3657, 1.4633, 1.5273, 1.8079, 2.9408, 1.2229, 2.0706, 3.2008], + device='cuda:2'), covar=tensor([0.0383, 0.2287, 0.2160, 0.1443, 0.0570, 0.2262, 0.1210, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0310, 0.0312, 0.0288, 0.0305, 0.0314, 0.0284, 0.0300], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:10:38,545 INFO [train.py:903] (2/4) Epoch 5, batch 5750, loss[loss=0.2877, simple_loss=0.3535, pruned_loss=0.111, over 19779.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3435, pruned_loss=0.1111, over 3814812.09 frames. ], batch size: 56, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:10:39,678 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 06:10:47,559 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 06:10:52,569 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 06:11:09,100 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33087.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:11:40,192 INFO [train.py:903] (2/4) Epoch 5, batch 5800, loss[loss=0.3176, simple_loss=0.3744, pruned_loss=0.1304, over 17448.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3435, pruned_loss=0.1109, over 3819065.22 frames. ], batch size: 101, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:08,910 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 7.070e+02 8.520e+02 1.129e+03 2.712e+03, threshold=1.704e+03, percent-clipped=8.0 +2023-04-01 06:12:29,055 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9485, 4.9696, 5.6738, 5.5931, 1.6287, 5.1518, 4.7354, 5.1720], + device='cuda:2'), covar=tensor([0.0802, 0.0528, 0.0399, 0.0328, 0.4398, 0.0327, 0.0373, 0.0818], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0448, 0.0606, 0.0504, 0.0578, 0.0370, 0.0389, 0.0568], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 06:12:40,686 INFO [train.py:903] (2/4) Epoch 5, batch 5850, loss[loss=0.3133, simple_loss=0.3659, pruned_loss=0.1303, over 19475.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.344, pruned_loss=0.1117, over 3809561.62 frames. ], batch size: 64, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:53,515 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:13:14,079 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9187, 4.2909, 4.5654, 4.5346, 1.5124, 4.2296, 3.7975, 4.2126], + device='cuda:2'), covar=tensor([0.0861, 0.0569, 0.0440, 0.0389, 0.3943, 0.0321, 0.0417, 0.0918], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0442, 0.0591, 0.0491, 0.0564, 0.0362, 0.0380, 0.0558], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 06:13:40,938 INFO [train.py:903] (2/4) Epoch 5, batch 5900, loss[loss=0.2942, simple_loss=0.3451, pruned_loss=0.1216, over 19368.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3437, pruned_loss=0.1114, over 3805188.83 frames. ], batch size: 47, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:13:43,344 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 06:14:04,504 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 06:14:11,994 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.345e+02 7.066e+02 8.762e+02 1.130e+03 2.300e+03, threshold=1.752e+03, percent-clipped=6.0 +2023-04-01 06:14:41,683 INFO [train.py:903] (2/4) Epoch 5, batch 5950, loss[loss=0.3546, simple_loss=0.3938, pruned_loss=0.1577, over 12528.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.344, pruned_loss=0.1109, over 3810158.26 frames. ], batch size: 135, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:43,948 INFO [train.py:903] (2/4) Epoch 5, batch 6000, loss[loss=0.328, simple_loss=0.3767, pruned_loss=0.1396, over 19743.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.343, pruned_loss=0.1103, over 3821607.77 frames. ], batch size: 63, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:43,948 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 06:15:56,881 INFO [train.py:937] (2/4) Epoch 5, validation: loss=0.203, simple_loss=0.3017, pruned_loss=0.05213, over 944034.00 frames. +2023-04-01 06:15:56,882 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 06:16:18,148 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:16:28,852 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.050e+02 6.893e+02 8.503e+02 1.056e+03 1.945e+03, threshold=1.701e+03, percent-clipped=4.0 +2023-04-01 06:16:59,409 INFO [train.py:903] (2/4) Epoch 5, batch 6050, loss[loss=0.2818, simple_loss=0.3463, pruned_loss=0.1087, over 19314.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3423, pruned_loss=0.1104, over 3822159.20 frames. ], batch size: 70, lr: 1.56e-02, grad_scale: 16.0 +2023-04-01 06:17:49,775 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7187, 4.2348, 2.4916, 3.8536, 1.1402, 3.9466, 3.9589, 4.1662], + device='cuda:2'), covar=tensor([0.0568, 0.1177, 0.1990, 0.0702, 0.3696, 0.0784, 0.0699, 0.0782], + device='cuda:2'), in_proj_covar=tensor([0.0348, 0.0312, 0.0365, 0.0285, 0.0352, 0.0306, 0.0288, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:18:00,557 INFO [train.py:903] (2/4) Epoch 5, batch 6100, loss[loss=0.3111, simple_loss=0.3691, pruned_loss=0.1265, over 19324.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3427, pruned_loss=0.1104, over 3826771.68 frames. ], batch size: 66, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:18:23,047 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33431.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:18:32,606 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.262e+02 7.464e+02 9.853e+02 2.581e+03, threshold=1.493e+03, percent-clipped=2.0 +2023-04-01 06:18:39,244 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:19:00,962 INFO [train.py:903] (2/4) Epoch 5, batch 6150, loss[loss=0.324, simple_loss=0.3775, pruned_loss=0.1352, over 17550.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3437, pruned_loss=0.1112, over 3816612.84 frames. ], batch size: 101, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:19:29,652 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 06:19:35,815 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:01,183 INFO [train.py:903] (2/4) Epoch 5, batch 6200, loss[loss=0.2822, simple_loss=0.3472, pruned_loss=0.1086, over 19455.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3444, pruned_loss=0.1117, over 3821604.17 frames. ], batch size: 64, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:20:08,938 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:18,630 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3658, 1.1488, 1.3101, 1.4410, 2.7731, 0.9605, 2.1165, 2.9450], + device='cuda:2'), covar=tensor([0.0593, 0.3196, 0.2951, 0.1832, 0.0926, 0.2822, 0.1291, 0.0583], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0309, 0.0307, 0.0289, 0.0308, 0.0316, 0.0282, 0.0299], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:20:34,185 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.213e+02 6.728e+02 8.677e+02 1.165e+03 2.777e+03, threshold=1.735e+03, percent-clipped=13.0 +2023-04-01 06:20:43,516 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:21:03,404 INFO [train.py:903] (2/4) Epoch 5, batch 6250, loss[loss=0.2826, simple_loss=0.3307, pruned_loss=0.1173, over 17816.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3434, pruned_loss=0.1113, over 3815595.39 frames. ], batch size: 39, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:21:31,254 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 06:21:47,419 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8157, 1.3951, 1.4843, 1.6324, 3.2934, 1.1309, 1.8855, 3.4858], + device='cuda:2'), covar=tensor([0.0331, 0.2239, 0.2202, 0.1444, 0.0541, 0.2196, 0.1316, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0307, 0.0306, 0.0288, 0.0306, 0.0312, 0.0284, 0.0296], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:22:03,934 INFO [train.py:903] (2/4) Epoch 5, batch 6300, loss[loss=0.3235, simple_loss=0.3751, pruned_loss=0.136, over 19682.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3443, pruned_loss=0.1114, over 3818941.65 frames. ], batch size: 59, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:22:28,391 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:22:35,552 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 6.435e+02 8.030e+02 9.840e+02 2.632e+03, threshold=1.606e+03, percent-clipped=3.0 +2023-04-01 06:23:01,036 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2557, 2.1205, 1.6248, 1.3378, 2.0166, 0.9505, 1.0092, 1.5714], + device='cuda:2'), covar=tensor([0.0764, 0.0531, 0.0850, 0.0560, 0.0388, 0.1112, 0.0668, 0.0394], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0266, 0.0311, 0.0238, 0.0223, 0.0304, 0.0283, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:23:04,056 INFO [train.py:903] (2/4) Epoch 5, batch 6350, loss[loss=0.2937, simple_loss=0.3571, pruned_loss=0.1151, over 19763.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3451, pruned_loss=0.112, over 3807127.62 frames. ], batch size: 56, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:23:05,722 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3573, 1.4122, 2.0485, 1.5767, 3.1514, 2.7809, 3.3329, 1.3726], + device='cuda:2'), covar=tensor([0.1870, 0.3005, 0.1677, 0.1395, 0.1215, 0.1343, 0.1302, 0.2785], + device='cuda:2'), in_proj_covar=tensor([0.0443, 0.0501, 0.0484, 0.0407, 0.0549, 0.0448, 0.0624, 0.0438], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:23:19,768 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 06:23:45,916 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:23:50,580 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:05,037 INFO [train.py:903] (2/4) Epoch 5, batch 6400, loss[loss=0.3053, simple_loss=0.3692, pruned_loss=0.1207, over 19520.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.344, pruned_loss=0.1114, over 3816403.40 frames. ], batch size: 54, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:24:20,319 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:25,422 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2603, 2.9683, 2.0059, 2.8282, 0.9146, 2.7497, 2.7765, 2.8384], + device='cuda:2'), covar=tensor([0.1038, 0.1323, 0.2044, 0.0861, 0.3796, 0.1139, 0.0937, 0.1201], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0312, 0.0364, 0.0284, 0.0354, 0.0302, 0.0286, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:24:37,366 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.704e+02 6.874e+02 8.420e+02 1.031e+03 3.616e+03, threshold=1.684e+03, percent-clipped=3.0 +2023-04-01 06:25:00,399 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8777, 2.0312, 1.9618, 2.6818, 1.7879, 2.5908, 2.5117, 1.8088], + device='cuda:2'), covar=tensor([0.2328, 0.1746, 0.0985, 0.1062, 0.2104, 0.0740, 0.1900, 0.1777], + device='cuda:2'), in_proj_covar=tensor([0.0641, 0.0632, 0.0557, 0.0787, 0.0669, 0.0544, 0.0690, 0.0593], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:25:05,853 INFO [train.py:903] (2/4) Epoch 5, batch 6450, loss[loss=0.2892, simple_loss=0.3428, pruned_loss=0.1178, over 19783.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3431, pruned_loss=0.1104, over 3830152.54 frames. ], batch size: 56, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:25:47,747 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-01 06:25:48,200 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 06:25:54,873 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:06,350 INFO [train.py:903] (2/4) Epoch 5, batch 6500, loss[loss=0.2485, simple_loss=0.3181, pruned_loss=0.08942, over 19777.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.341, pruned_loss=0.1094, over 3827450.50 frames. ], batch size: 54, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:26:12,185 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 06:26:24,741 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:32,486 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33834.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:36,708 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.680e+02 8.171e+02 1.132e+03 2.519e+03, threshold=1.634e+03, percent-clipped=6.0 +2023-04-01 06:26:43,786 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-01 06:27:07,201 INFO [train.py:903] (2/4) Epoch 5, batch 6550, loss[loss=0.2831, simple_loss=0.3529, pruned_loss=0.1066, over 19686.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3413, pruned_loss=0.1097, over 3817383.61 frames. ], batch size: 60, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:27:38,488 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:28:06,994 INFO [train.py:903] (2/4) Epoch 5, batch 6600, loss[loss=0.2296, simple_loss=0.2941, pruned_loss=0.0826, over 19401.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3421, pruned_loss=0.11, over 3821979.35 frames. ], batch size: 48, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:28:08,498 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:28:17,264 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8250, 1.3077, 0.9866, 0.9555, 1.2091, 0.8421, 0.7096, 1.2360], + device='cuda:2'), covar=tensor([0.0488, 0.0589, 0.0888, 0.0490, 0.0394, 0.0975, 0.0507, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0271, 0.0314, 0.0239, 0.0224, 0.0305, 0.0286, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:28:35,137 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-01 06:28:40,231 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.407e+02 7.502e+02 9.137e+02 1.060e+03 2.817e+03, threshold=1.827e+03, percent-clipped=6.0 +2023-04-01 06:28:52,816 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33949.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:01,875 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:09,039 INFO [train.py:903] (2/4) Epoch 5, batch 6650, loss[loss=0.2788, simple_loss=0.3473, pruned_loss=0.1051, over 19480.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3424, pruned_loss=0.1104, over 3820486.76 frames. ], batch size: 64, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:29:35,978 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.91 vs. limit=5.0 +2023-04-01 06:30:10,063 INFO [train.py:903] (2/4) Epoch 5, batch 6700, loss[loss=0.2985, simple_loss=0.357, pruned_loss=0.12, over 18710.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3411, pruned_loss=0.1097, over 3833015.74 frames. ], batch size: 74, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:30:18,351 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4460, 2.2174, 1.5277, 1.4949, 2.1006, 1.1294, 1.1236, 1.8033], + device='cuda:2'), covar=tensor([0.0663, 0.0494, 0.0820, 0.0472, 0.0360, 0.0872, 0.0607, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0274, 0.0317, 0.0241, 0.0225, 0.0307, 0.0290, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:30:40,326 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 7.257e+02 9.131e+02 1.100e+03 2.314e+03, threshold=1.826e+03, percent-clipped=7.0 +2023-04-01 06:30:40,466 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:31:06,275 INFO [train.py:903] (2/4) Epoch 5, batch 6750, loss[loss=0.3051, simple_loss=0.3624, pruned_loss=0.1239, over 19091.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3415, pruned_loss=0.1102, over 3841423.34 frames. ], batch size: 69, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:31:06,517 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:32:02,678 INFO [train.py:903] (2/4) Epoch 5, batch 6800, loss[loss=0.301, simple_loss=0.3409, pruned_loss=0.1305, over 19728.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.34, pruned_loss=0.1096, over 3829426.17 frames. ], batch size: 51, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:32:27,365 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0658, 0.8849, 0.8070, 0.9599, 0.8624, 0.9174, 0.8451, 0.9164], + device='cuda:2'), covar=tensor([0.0632, 0.0899, 0.0903, 0.0568, 0.0689, 0.0402, 0.0717, 0.0512], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0361, 0.0280, 0.0234, 0.0298, 0.0243, 0.0265, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:32:30,648 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.271e+02 6.317e+02 7.808e+02 9.230e+02 1.582e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-04-01 06:32:47,671 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 06:32:48,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 06:32:50,740 INFO [train.py:903] (2/4) Epoch 6, batch 0, loss[loss=0.3062, simple_loss=0.3414, pruned_loss=0.1355, over 19481.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3414, pruned_loss=0.1355, over 19481.00 frames. ], batch size: 49, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:32:50,740 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 06:33:02,102 INFO [train.py:937] (2/4) Epoch 6, validation: loss=0.2022, simple_loss=0.3015, pruned_loss=0.05149, over 944034.00 frames. +2023-04-01 06:33:02,103 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 06:33:15,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 06:33:20,330 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:33:30,285 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:33:36,218 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3259, 1.2672, 1.6926, 1.4524, 2.3200, 1.9091, 2.3215, 1.2245], + device='cuda:2'), covar=tensor([0.1418, 0.2509, 0.1431, 0.1225, 0.0857, 0.1277, 0.0995, 0.2322], + device='cuda:2'), in_proj_covar=tensor([0.0445, 0.0502, 0.0485, 0.0406, 0.0547, 0.0447, 0.0623, 0.0442], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:34:03,533 INFO [train.py:903] (2/4) Epoch 6, batch 50, loss[loss=0.2929, simple_loss=0.3552, pruned_loss=0.1153, over 19300.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.344, pruned_loss=0.1136, over 849640.00 frames. ], batch size: 70, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:34:15,335 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 06:34:22,187 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34205.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:29,752 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 06:34:40,720 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 06:34:54,285 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34230.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:35:05,224 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.721e+02 5.756e+02 7.149e+02 1.025e+03 3.166e+03, threshold=1.430e+03, percent-clipped=7.0 +2023-04-01 06:35:06,297 INFO [train.py:903] (2/4) Epoch 6, batch 100, loss[loss=0.2625, simple_loss=0.3376, pruned_loss=0.09372, over 19780.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3404, pruned_loss=0.108, over 1511547.46 frames. ], batch size: 54, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:35:18,590 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 06:35:20,122 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9739, 1.2328, 1.1896, 1.5265, 2.5662, 1.0657, 1.9647, 2.6502], + device='cuda:2'), covar=tensor([0.0448, 0.2413, 0.2578, 0.1454, 0.0689, 0.2235, 0.1055, 0.0462], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0311, 0.0313, 0.0286, 0.0307, 0.0316, 0.0284, 0.0301], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:36:06,449 INFO [train.py:903] (2/4) Epoch 6, batch 150, loss[loss=0.2635, simple_loss=0.3216, pruned_loss=0.1027, over 19389.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3404, pruned_loss=0.1089, over 2022619.84 frames. ], batch size: 47, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:36:19,468 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:37:08,892 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.164e+02 6.478e+02 8.283e+02 9.993e+02 1.951e+03, threshold=1.657e+03, percent-clipped=7.0 +2023-04-01 06:37:08,910 INFO [train.py:903] (2/4) Epoch 6, batch 200, loss[loss=0.285, simple_loss=0.3535, pruned_loss=0.1083, over 19516.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3413, pruned_loss=0.1087, over 2414483.04 frames. ], batch size: 54, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:37:08,922 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 06:38:12,101 INFO [train.py:903] (2/4) Epoch 6, batch 250, loss[loss=0.2645, simple_loss=0.3338, pruned_loss=0.09757, over 19458.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3401, pruned_loss=0.1083, over 2719549.04 frames. ], batch size: 64, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:38:28,627 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2521, 1.1930, 1.7772, 1.3881, 3.2476, 2.5329, 3.3585, 1.4251], + device='cuda:2'), covar=tensor([0.1959, 0.3024, 0.1773, 0.1619, 0.1002, 0.1356, 0.1233, 0.2689], + device='cuda:2'), in_proj_covar=tensor([0.0446, 0.0497, 0.0483, 0.0407, 0.0548, 0.0442, 0.0620, 0.0440], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:38:33,066 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:38,019 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:44,976 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:46,309 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3314, 2.2404, 1.6210, 1.3581, 2.0791, 1.1661, 1.1562, 1.7003], + device='cuda:2'), covar=tensor([0.0691, 0.0409, 0.0757, 0.0533, 0.0306, 0.0865, 0.0567, 0.0335], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0266, 0.0306, 0.0235, 0.0222, 0.0298, 0.0280, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:39:08,901 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:14,123 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 6.949e+02 8.663e+02 1.115e+03 2.860e+03, threshold=1.733e+03, percent-clipped=3.0 +2023-04-01 06:39:14,147 INFO [train.py:903] (2/4) Epoch 6, batch 300, loss[loss=0.2894, simple_loss=0.3562, pruned_loss=0.1113, over 19627.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3406, pruned_loss=0.1091, over 2955899.00 frames. ], batch size: 61, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:40:17,205 INFO [train.py:903] (2/4) Epoch 6, batch 350, loss[loss=0.289, simple_loss=0.3571, pruned_loss=0.1104, over 19602.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3394, pruned_loss=0.1081, over 3157751.33 frames. ], batch size: 61, lr: 1.43e-02, grad_scale: 4.0 +2023-04-01 06:40:22,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 06:40:37,844 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:40:45,483 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 06:40:55,542 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:41:02,912 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 06:41:18,581 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+02 6.755e+02 8.258e+02 9.979e+02 1.871e+03, threshold=1.652e+03, percent-clipped=1.0 +2023-04-01 06:41:18,611 INFO [train.py:903] (2/4) Epoch 6, batch 400, loss[loss=0.239, simple_loss=0.3107, pruned_loss=0.08366, over 19852.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.3399, pruned_loss=0.1086, over 3298839.89 frames. ], batch size: 52, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:41:51,724 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0501, 0.9037, 0.8041, 0.9988, 0.9066, 0.9329, 0.8476, 0.9225], + device='cuda:2'), covar=tensor([0.0613, 0.0827, 0.0942, 0.0502, 0.0653, 0.0372, 0.0752, 0.0511], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0362, 0.0287, 0.0235, 0.0301, 0.0245, 0.0272, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:42:20,648 INFO [train.py:903] (2/4) Epoch 6, batch 450, loss[loss=0.2642, simple_loss=0.326, pruned_loss=0.1012, over 19598.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3397, pruned_loss=0.1084, over 3417860.55 frames. ], batch size: 52, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:42:49,008 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:42:54,494 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 06:42:55,448 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 06:43:01,491 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:23,825 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.432e+02 6.970e+02 8.269e+02 1.071e+03 2.551e+03, threshold=1.654e+03, percent-clipped=6.0 +2023-04-01 06:43:23,843 INFO [train.py:903] (2/4) Epoch 6, batch 500, loss[loss=0.3011, simple_loss=0.3615, pruned_loss=0.1203, over 19676.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.3378, pruned_loss=0.1072, over 3526592.76 frames. ], batch size: 53, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:43:25,152 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34640.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:29,882 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:30,008 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1245, 1.8487, 1.4558, 1.2030, 1.7163, 0.9678, 1.0948, 1.5621], + device='cuda:2'), covar=tensor([0.0516, 0.0460, 0.0757, 0.0464, 0.0298, 0.0899, 0.0462, 0.0296], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0271, 0.0314, 0.0238, 0.0227, 0.0305, 0.0286, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:43:35,800 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7459, 3.1462, 3.2135, 3.1911, 1.2500, 2.9624, 2.6801, 2.9000], + device='cuda:2'), covar=tensor([0.1122, 0.0769, 0.0674, 0.0669, 0.3921, 0.0588, 0.0643, 0.1224], + device='cuda:2'), in_proj_covar=tensor([0.0532, 0.0464, 0.0610, 0.0510, 0.0598, 0.0381, 0.0390, 0.0576], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 06:44:03,460 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:27,130 INFO [train.py:903] (2/4) Epoch 6, batch 550, loss[loss=0.2883, simple_loss=0.3564, pruned_loss=0.1101, over 19668.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3387, pruned_loss=0.1077, over 3601250.75 frames. ], batch size: 58, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:44:37,142 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:42,301 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 06:45:17,136 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34728.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:45:31,828 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.191e+02 8.093e+02 9.820e+02 1.880e+03, threshold=1.619e+03, percent-clipped=2.0 +2023-04-01 06:45:31,861 INFO [train.py:903] (2/4) Epoch 6, batch 600, loss[loss=0.2777, simple_loss=0.341, pruned_loss=0.1072, over 19529.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3389, pruned_loss=0.1074, over 3651062.23 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:13,325 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 06:46:20,232 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34777.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:46:35,484 INFO [train.py:903] (2/4) Epoch 6, batch 650, loss[loss=0.2578, simple_loss=0.31, pruned_loss=0.1028, over 19734.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3384, pruned_loss=0.1075, over 3684646.32 frames. ], batch size: 47, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:51,901 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:47:38,641 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 6.280e+02 8.587e+02 1.153e+03 3.497e+03, threshold=1.717e+03, percent-clipped=9.0 +2023-04-01 06:47:38,667 INFO [train.py:903] (2/4) Epoch 6, batch 700, loss[loss=0.2524, simple_loss=0.3365, pruned_loss=0.08413, over 19607.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3381, pruned_loss=0.1076, over 3722608.46 frames. ], batch size: 57, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:47:51,733 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.9805, 5.2158, 2.5428, 4.5940, 1.3818, 5.1011, 5.1481, 5.4983], + device='cuda:2'), covar=tensor([0.0446, 0.0952, 0.2178, 0.0566, 0.3716, 0.0569, 0.0601, 0.0593], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0315, 0.0376, 0.0290, 0.0360, 0.0310, 0.0289, 0.0322], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:47:54,279 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2236, 1.3592, 1.1719, 1.0217, 1.0789, 1.1472, 0.0419, 0.4025], + device='cuda:2'), covar=tensor([0.0240, 0.0241, 0.0156, 0.0187, 0.0473, 0.0192, 0.0439, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0288, 0.0292, 0.0306, 0.0374, 0.0295, 0.0286, 0.0296], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:48:27,771 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:48:43,535 INFO [train.py:903] (2/4) Epoch 6, batch 750, loss[loss=0.2203, simple_loss=0.2888, pruned_loss=0.07592, over 19747.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3383, pruned_loss=0.1075, over 3754573.11 frames. ], batch size: 46, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:48:47,835 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9561, 1.9997, 1.9370, 2.9004, 2.0761, 2.8197, 2.5490, 1.8175], + device='cuda:2'), covar=tensor([0.2085, 0.1591, 0.0839, 0.0931, 0.1796, 0.0626, 0.1586, 0.1505], + device='cuda:2'), in_proj_covar=tensor([0.0652, 0.0638, 0.0570, 0.0800, 0.0681, 0.0557, 0.0701, 0.0605], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:49:00,068 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:49:45,081 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.784e+02 6.254e+02 7.861e+02 1.094e+03 2.828e+03, threshold=1.572e+03, percent-clipped=5.0 +2023-04-01 06:49:45,101 INFO [train.py:903] (2/4) Epoch 6, batch 800, loss[loss=0.304, simple_loss=0.3703, pruned_loss=0.1188, over 18608.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.339, pruned_loss=0.108, over 3770407.37 frames. ], batch size: 74, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:50:02,473 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 06:50:03,751 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:37,472 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6455, 1.4485, 1.4182, 1.7073, 3.1475, 1.1037, 2.1621, 3.4193], + device='cuda:2'), covar=tensor([0.0359, 0.2358, 0.2282, 0.1394, 0.0586, 0.2185, 0.1101, 0.0329], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0317, 0.0317, 0.0287, 0.0313, 0.0315, 0.0289, 0.0307], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:50:41,628 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:46,219 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:48,466 INFO [train.py:903] (2/4) Epoch 6, batch 850, loss[loss=0.2897, simple_loss=0.3564, pruned_loss=0.1115, over 19663.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3392, pruned_loss=0.1076, over 3782789.43 frames. ], batch size: 55, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:50:57,233 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 06:51:32,787 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6878, 1.7457, 1.7667, 2.4425, 1.6217, 2.2625, 2.3150, 1.7426], + device='cuda:2'), covar=tensor([0.2277, 0.1809, 0.0963, 0.0949, 0.1973, 0.0776, 0.1847, 0.1648], + device='cuda:2'), in_proj_covar=tensor([0.0661, 0.0650, 0.0580, 0.0808, 0.0686, 0.0564, 0.0705, 0.0612], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:51:42,522 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 06:51:49,534 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 6.223e+02 7.935e+02 9.772e+02 2.166e+03, threshold=1.587e+03, percent-clipped=2.0 +2023-04-01 06:51:49,552 INFO [train.py:903] (2/4) Epoch 6, batch 900, loss[loss=0.2965, simple_loss=0.3534, pruned_loss=0.1198, over 19541.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3395, pruned_loss=0.1076, over 3796799.65 frames. ], batch size: 64, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:52:08,666 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0260, 1.3355, 1.3923, 1.3489, 2.5950, 0.9382, 1.8465, 2.6942], + device='cuda:2'), covar=tensor([0.0427, 0.2373, 0.2242, 0.1526, 0.0636, 0.2234, 0.1081, 0.0457], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0313, 0.0313, 0.0284, 0.0310, 0.0316, 0.0290, 0.0306], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:52:28,189 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:52:30,301 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35072.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:52:51,731 INFO [train.py:903] (2/4) Epoch 6, batch 950, loss[loss=0.3678, simple_loss=0.4106, pruned_loss=0.1625, over 18161.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3399, pruned_loss=0.1081, over 3788242.22 frames. ], batch size: 83, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:52:56,792 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7249, 2.1983, 2.2412, 3.0897, 2.5947, 2.5009, 2.2964, 2.8986], + device='cuda:2'), covar=tensor([0.0648, 0.1406, 0.1148, 0.0663, 0.1074, 0.0408, 0.0846, 0.0450], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0366, 0.0289, 0.0236, 0.0308, 0.0244, 0.0272, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:52:58,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 06:53:04,380 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:09,177 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:18,108 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:35,179 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0585, 2.0323, 1.7751, 1.7264, 1.6843, 1.8731, 0.8971, 1.5464], + device='cuda:2'), covar=tensor([0.0186, 0.0273, 0.0187, 0.0273, 0.0361, 0.0278, 0.0471, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0288, 0.0292, 0.0292, 0.0310, 0.0378, 0.0295, 0.0287, 0.0299], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:53:55,218 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 7.194e+02 8.589e+02 1.083e+03 2.096e+03, threshold=1.718e+03, percent-clipped=5.0 +2023-04-01 06:53:55,236 INFO [train.py:903] (2/4) Epoch 6, batch 1000, loss[loss=0.3424, simple_loss=0.3803, pruned_loss=0.1523, over 19681.00 frames. ], tot_loss[loss=0.276, simple_loss=0.3382, pruned_loss=0.1069, over 3805242.57 frames. ], batch size: 60, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:54:03,642 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.52 vs. limit=5.0 +2023-04-01 06:54:13,560 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 06:54:48,373 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 06:54:52,229 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35187.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:54:55,175 INFO [train.py:903] (2/4) Epoch 6, batch 1050, loss[loss=0.2536, simple_loss=0.3231, pruned_loss=0.09207, over 19767.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3384, pruned_loss=0.1074, over 3814525.96 frames. ], batch size: 54, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:55:09,256 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1369, 5.4881, 3.0284, 4.8358, 1.1763, 5.2770, 5.2493, 5.5841], + device='cuda:2'), covar=tensor([0.0357, 0.0897, 0.1730, 0.0596, 0.4134, 0.0584, 0.0657, 0.0604], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0309, 0.0366, 0.0288, 0.0353, 0.0307, 0.0289, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 06:55:21,951 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1474, 1.4635, 1.5874, 1.6514, 2.8193, 1.0754, 2.1072, 2.9421], + device='cuda:2'), covar=tensor([0.0421, 0.2175, 0.2118, 0.1384, 0.0560, 0.2247, 0.1256, 0.0390], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0316, 0.0316, 0.0287, 0.0311, 0.0317, 0.0288, 0.0309], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:55:30,967 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 06:55:36,185 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.13 vs. limit=5.0 +2023-04-01 06:55:57,165 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.102e+02 7.066e+02 8.619e+02 1.238e+03 3.302e+03, threshold=1.724e+03, percent-clipped=8.0 +2023-04-01 06:55:57,183 INFO [train.py:903] (2/4) Epoch 6, batch 1100, loss[loss=0.2287, simple_loss=0.2997, pruned_loss=0.07879, over 19630.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3389, pruned_loss=0.1076, over 3830668.21 frames. ], batch size: 50, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:55:57,672 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1318, 1.1029, 1.4264, 1.2488, 1.7730, 1.6847, 1.8959, 0.4154], + device='cuda:2'), covar=tensor([0.1777, 0.3057, 0.1627, 0.1469, 0.1083, 0.1621, 0.1032, 0.2873], + device='cuda:2'), in_proj_covar=tensor([0.0441, 0.0505, 0.0486, 0.0408, 0.0555, 0.0447, 0.0618, 0.0450], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 06:56:45,777 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-01 06:56:59,598 INFO [train.py:903] (2/4) Epoch 6, batch 1150, loss[loss=0.3257, simple_loss=0.3916, pruned_loss=0.1299, over 19510.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3404, pruned_loss=0.1089, over 3830813.43 frames. ], batch size: 64, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:57:45,473 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:04,195 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.124e+02 7.469e+02 9.050e+02 1.884e+03, threshold=1.494e+03, percent-clipped=1.0 +2023-04-01 06:58:04,213 INFO [train.py:903] (2/4) Epoch 6, batch 1200, loss[loss=0.3231, simple_loss=0.3646, pruned_loss=0.1408, over 19752.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.339, pruned_loss=0.1079, over 3828917.50 frames. ], batch size: 45, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:58:17,351 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:23,086 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:27,691 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:32,195 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35363.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:58:36,614 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 06:58:55,047 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5479, 1.3813, 1.5282, 1.6193, 3.0667, 0.9611, 1.9862, 3.2734], + device='cuda:2'), covar=tensor([0.0309, 0.2320, 0.2262, 0.1394, 0.0571, 0.2256, 0.1168, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0315, 0.0317, 0.0287, 0.0311, 0.0315, 0.0288, 0.0306], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 06:58:55,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:59,609 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:05,735 INFO [train.py:903] (2/4) Epoch 6, batch 1250, loss[loss=0.285, simple_loss=0.3437, pruned_loss=0.1131, over 19782.00 frames. ], tot_loss[loss=0.2788, simple_loss=0.3401, pruned_loss=0.1087, over 3803258.98 frames. ], batch size: 56, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:59:09,401 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:31,156 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3789, 1.2373, 1.2253, 1.5916, 1.2431, 1.7447, 1.6482, 1.6011], + device='cuda:2'), covar=tensor([0.0871, 0.1044, 0.1098, 0.0836, 0.0949, 0.0702, 0.0850, 0.0661], + device='cuda:2'), in_proj_covar=tensor([0.0230, 0.0249, 0.0239, 0.0273, 0.0266, 0.0233, 0.0224, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 07:00:08,107 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+02 6.685e+02 8.523e+02 1.077e+03 2.432e+03, threshold=1.705e+03, percent-clipped=5.0 +2023-04-01 07:00:08,126 INFO [train.py:903] (2/4) Epoch 6, batch 1300, loss[loss=0.264, simple_loss=0.3356, pruned_loss=0.09621, over 19472.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3394, pruned_loss=0.1083, over 3802720.70 frames. ], batch size: 64, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 07:00:12,167 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35443.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:00:26,236 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35454.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:00:43,981 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35468.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:01:11,037 INFO [train.py:903] (2/4) Epoch 6, batch 1350, loss[loss=0.2756, simple_loss=0.3354, pruned_loss=0.1079, over 19730.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3386, pruned_loss=0.1073, over 3812541.24 frames. ], batch size: 51, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:01:19,177 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2594, 3.8068, 2.3576, 3.4488, 1.0153, 3.5223, 3.5206, 3.6866], + device='cuda:2'), covar=tensor([0.0715, 0.1188, 0.2195, 0.0769, 0.4110, 0.0946, 0.0721, 0.0901], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0310, 0.0369, 0.0288, 0.0349, 0.0309, 0.0288, 0.0320], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 07:01:31,071 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 07:02:13,022 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+02 6.783e+02 8.495e+02 1.071e+03 2.340e+03, threshold=1.699e+03, percent-clipped=3.0 +2023-04-01 07:02:13,045 INFO [train.py:903] (2/4) Epoch 6, batch 1400, loss[loss=0.3499, simple_loss=0.3834, pruned_loss=0.1581, over 19541.00 frames. ], tot_loss[loss=0.2788, simple_loss=0.3407, pruned_loss=0.1085, over 3818355.63 frames. ], batch size: 54, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:02:22,772 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:02:47,202 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:03:13,362 INFO [train.py:903] (2/4) Epoch 6, batch 1450, loss[loss=0.2967, simple_loss=0.3688, pruned_loss=0.1123, over 19508.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3408, pruned_loss=0.109, over 3806973.46 frames. ], batch size: 64, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:03:13,404 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 07:03:52,439 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4033, 1.1104, 1.3146, 0.9222, 2.1988, 2.7650, 2.6639, 3.0350], + device='cuda:2'), covar=tensor([0.1441, 0.4073, 0.4021, 0.2293, 0.0611, 0.0275, 0.0368, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0281, 0.0310, 0.0246, 0.0200, 0.0121, 0.0201, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 07:04:15,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+02 6.609e+02 8.520e+02 1.101e+03 2.891e+03, threshold=1.704e+03, percent-clipped=3.0 +2023-04-01 07:04:15,947 INFO [train.py:903] (2/4) Epoch 6, batch 1500, loss[loss=0.321, simple_loss=0.3708, pruned_loss=0.1356, over 19580.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3405, pruned_loss=0.1089, over 3806170.25 frames. ], batch size: 61, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:04:24,706 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7402, 1.6690, 2.1563, 2.8354, 2.2675, 2.6870, 2.3351, 3.1069], + device='cuda:2'), covar=tensor([0.0657, 0.1972, 0.1226, 0.0748, 0.1167, 0.0360, 0.0873, 0.0418], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0365, 0.0284, 0.0236, 0.0305, 0.0242, 0.0269, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:05:17,137 INFO [train.py:903] (2/4) Epoch 6, batch 1550, loss[loss=0.3186, simple_loss=0.3664, pruned_loss=0.1355, over 19344.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.3399, pruned_loss=0.1086, over 3816702.83 frames. ], batch size: 66, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:05:39,894 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35707.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:06:16,832 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:06:22,433 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 6.450e+02 9.026e+02 1.093e+03 2.835e+03, threshold=1.805e+03, percent-clipped=5.0 +2023-04-01 07:06:22,451 INFO [train.py:903] (2/4) Epoch 6, batch 1600, loss[loss=0.2868, simple_loss=0.3516, pruned_loss=0.111, over 19688.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3386, pruned_loss=0.1074, over 3815988.54 frames. ], batch size: 60, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:06:44,238 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 07:07:23,537 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:07:24,376 INFO [train.py:903] (2/4) Epoch 6, batch 1650, loss[loss=0.2094, simple_loss=0.2829, pruned_loss=0.06795, over 19392.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3368, pruned_loss=0.1059, over 3834991.38 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:08:05,737 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35822.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:08:09,326 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:10,584 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8020, 1.8295, 1.8015, 2.7320, 1.7951, 2.3996, 2.4945, 1.8452], + device='cuda:2'), covar=tensor([0.2259, 0.1868, 0.0964, 0.1019, 0.2013, 0.0789, 0.1772, 0.1655], + device='cuda:2'), in_proj_covar=tensor([0.0660, 0.0649, 0.0575, 0.0807, 0.0689, 0.0568, 0.0697, 0.0611], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 07:08:13,935 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35829.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:27,426 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.226e+02 6.593e+02 7.720e+02 9.793e+02 2.227e+03, threshold=1.544e+03, percent-clipped=1.0 +2023-04-01 07:08:27,448 INFO [train.py:903] (2/4) Epoch 6, batch 1700, loss[loss=0.3195, simple_loss=0.3676, pruned_loss=0.1357, over 17437.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3376, pruned_loss=0.1059, over 3833453.48 frames. ], batch size: 101, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:08:38,904 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:40,045 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:09:06,044 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 07:09:29,405 INFO [train.py:903] (2/4) Epoch 6, batch 1750, loss[loss=0.2767, simple_loss=0.3492, pruned_loss=0.102, over 19392.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3374, pruned_loss=0.1061, over 3828291.59 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:09:31,415 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.27 vs. limit=5.0 +2023-04-01 07:09:31,965 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35892.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:10:33,848 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+02 6.695e+02 8.372e+02 1.116e+03 2.634e+03, threshold=1.674e+03, percent-clipped=7.0 +2023-04-01 07:10:33,866 INFO [train.py:903] (2/4) Epoch 6, batch 1800, loss[loss=0.2798, simple_loss=0.3551, pruned_loss=0.1023, over 19582.00 frames. ], tot_loss[loss=0.2745, simple_loss=0.3371, pruned_loss=0.1059, over 3827404.36 frames. ], batch size: 61, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:11:31,911 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 07:11:36,736 INFO [train.py:903] (2/4) Epoch 6, batch 1850, loss[loss=0.2436, simple_loss=0.3062, pruned_loss=0.09053, over 19828.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3375, pruned_loss=0.1065, over 3832069.97 frames. ], batch size: 49, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:11:59,139 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36007.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:12:11,430 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 07:12:40,881 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.024e+02 7.330e+02 9.053e+02 1.086e+03 1.723e+03, threshold=1.811e+03, percent-clipped=2.0 +2023-04-01 07:12:40,904 INFO [train.py:903] (2/4) Epoch 6, batch 1900, loss[loss=0.2162, simple_loss=0.2843, pruned_loss=0.07403, over 19711.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3377, pruned_loss=0.1066, over 3813534.46 frames. ], batch size: 45, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:12:57,326 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 07:13:04,091 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 07:13:27,634 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 07:13:29,164 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36078.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:13:33,573 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9587, 3.6251, 2.8307, 3.1868, 1.7808, 3.0929, 3.2233, 3.5089], + device='cuda:2'), covar=tensor([0.0768, 0.0982, 0.1634, 0.0767, 0.2799, 0.1104, 0.0926, 0.1014], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0314, 0.0373, 0.0289, 0.0357, 0.0311, 0.0293, 0.0323], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 07:13:42,460 INFO [train.py:903] (2/4) Epoch 6, batch 1950, loss[loss=0.2677, simple_loss=0.3415, pruned_loss=0.09692, over 19500.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3392, pruned_loss=0.1076, over 3801042.86 frames. ], batch size: 64, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:14:00,233 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36103.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:14:04,982 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:35,274 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:36,145 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36133.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:44,996 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.894e+02 6.352e+02 7.868e+02 9.806e+02 1.510e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-04-01 07:14:45,014 INFO [train.py:903] (2/4) Epoch 6, batch 2000, loss[loss=0.2894, simple_loss=0.3594, pruned_loss=0.1097, over 19307.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3373, pruned_loss=0.106, over 3821770.99 frames. ], batch size: 66, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:04,557 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1321, 3.6307, 3.7620, 3.7402, 1.2890, 3.4875, 3.0326, 3.3728], + device='cuda:2'), covar=tensor([0.1056, 0.0741, 0.0594, 0.0500, 0.3967, 0.0486, 0.0629, 0.1090], + device='cuda:2'), in_proj_covar=tensor([0.0533, 0.0472, 0.0621, 0.0519, 0.0597, 0.0386, 0.0397, 0.0588], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 07:15:24,942 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:15:30,464 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4869, 1.0598, 1.2577, 1.2059, 2.1531, 0.8457, 1.9121, 2.0920], + device='cuda:2'), covar=tensor([0.0591, 0.2568, 0.2442, 0.1418, 0.0720, 0.2006, 0.0910, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0315, 0.0319, 0.0292, 0.0312, 0.0318, 0.0291, 0.0310], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:15:42,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 07:15:44,430 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-04-01 07:15:46,070 INFO [train.py:903] (2/4) Epoch 6, batch 2050, loss[loss=0.221, simple_loss=0.2831, pruned_loss=0.07946, over 19747.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3383, pruned_loss=0.1067, over 3818911.01 frames. ], batch size: 46, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:57,097 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36199.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:16:00,515 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 07:16:03,108 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 07:16:22,964 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 07:16:47,772 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.109e+02 6.690e+02 8.798e+02 1.173e+03 2.442e+03, threshold=1.760e+03, percent-clipped=12.0 +2023-04-01 07:16:47,790 INFO [train.py:903] (2/4) Epoch 6, batch 2100, loss[loss=0.2411, simple_loss=0.3018, pruned_loss=0.0902, over 19763.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.3363, pruned_loss=0.1055, over 3826906.11 frames. ], batch size: 46, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:16:57,901 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:06,277 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2668, 2.3104, 2.2895, 3.4588, 2.2700, 3.5836, 3.3197, 1.9721], + device='cuda:2'), covar=tensor([0.2452, 0.1828, 0.0897, 0.1139, 0.2344, 0.0643, 0.1586, 0.1829], + device='cuda:2'), in_proj_covar=tensor([0.0659, 0.0648, 0.0575, 0.0807, 0.0684, 0.0567, 0.0698, 0.0606], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 07:17:17,845 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36263.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:18,600 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 07:17:39,988 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 07:17:49,923 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:49,968 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:51,847 INFO [train.py:903] (2/4) Epoch 6, batch 2150, loss[loss=0.2793, simple_loss=0.3395, pruned_loss=0.1096, over 19600.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3349, pruned_loss=0.1045, over 3840904.22 frames. ], batch size: 50, lr: 1.40e-02, grad_scale: 16.0 +2023-04-01 07:18:53,905 INFO [train.py:903] (2/4) Epoch 6, batch 2200, loss[loss=0.2531, simple_loss=0.3273, pruned_loss=0.08942, over 19339.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3364, pruned_loss=0.1056, over 3847593.90 frames. ], batch size: 70, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:18:55,064 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 6.300e+02 8.031e+02 1.073e+03 2.013e+03, threshold=1.606e+03, percent-clipped=1.0 +2023-04-01 07:19:15,908 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0713, 1.4853, 1.5282, 1.7928, 1.6755, 1.6655, 1.4650, 1.8293], + device='cuda:2'), covar=tensor([0.0746, 0.1437, 0.1296, 0.0835, 0.1079, 0.0473, 0.1076, 0.0566], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0362, 0.0283, 0.0236, 0.0303, 0.0244, 0.0268, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:19:29,077 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-01 07:19:35,546 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4556, 2.3781, 1.6616, 1.6608, 2.2569, 1.2889, 1.2367, 1.7154], + device='cuda:2'), covar=tensor([0.0674, 0.0438, 0.0754, 0.0423, 0.0307, 0.0884, 0.0585, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0274, 0.0314, 0.0240, 0.0225, 0.0311, 0.0286, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:19:57,016 INFO [train.py:903] (2/4) Epoch 6, batch 2250, loss[loss=0.265, simple_loss=0.3218, pruned_loss=0.1041, over 19783.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3353, pruned_loss=0.1049, over 3860384.85 frames. ], batch size: 47, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:58,367 INFO [train.py:903] (2/4) Epoch 6, batch 2300, loss[loss=0.2612, simple_loss=0.3168, pruned_loss=0.1028, over 19746.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3349, pruned_loss=0.1046, over 3847388.18 frames. ], batch size: 46, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:59,554 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.689e+02 7.482e+02 9.822e+02 1.768e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-04-01 07:21:04,980 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 07:21:14,323 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 07:22:00,467 INFO [train.py:903] (2/4) Epoch 6, batch 2350, loss[loss=0.3262, simple_loss=0.3726, pruned_loss=0.1399, over 13727.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3348, pruned_loss=0.1038, over 3833002.54 frames. ], batch size: 136, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:22:19,345 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:22:43,440 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 07:22:49,465 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:00,583 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 07:23:02,880 INFO [train.py:903] (2/4) Epoch 6, batch 2400, loss[loss=0.2597, simple_loss=0.3213, pruned_loss=0.09901, over 19608.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3343, pruned_loss=0.1034, over 3826455.93 frames. ], batch size: 50, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:23:04,008 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.742e+02 5.696e+02 7.249e+02 9.157e+02 1.479e+03, threshold=1.450e+03, percent-clipped=0.0 +2023-04-01 07:23:07,412 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:08,774 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36544.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:39,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:06,971 INFO [train.py:903] (2/4) Epoch 6, batch 2450, loss[loss=0.2737, simple_loss=0.3432, pruned_loss=0.1021, over 19395.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3334, pruned_loss=0.103, over 3825156.32 frames. ], batch size: 70, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:24:27,655 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 07:24:46,754 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:51,101 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:56,969 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:25:07,693 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-01 07:25:07,901 INFO [train.py:903] (2/4) Epoch 6, batch 2500, loss[loss=0.2466, simple_loss=0.3073, pruned_loss=0.09297, over 19481.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3337, pruned_loss=0.1032, over 3817218.09 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:25:09,071 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.265e+02 8.006e+02 9.274e+02 1.564e+03, threshold=1.601e+03, percent-clipped=1.0 +2023-04-01 07:25:30,302 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:25:39,454 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5792, 1.3424, 1.3937, 1.9678, 1.7990, 1.8832, 2.0558, 1.8418], + device='cuda:2'), covar=tensor([0.0867, 0.1064, 0.1118, 0.0910, 0.0872, 0.0849, 0.1014, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0244, 0.0235, 0.0268, 0.0259, 0.0226, 0.0227, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 07:26:09,578 INFO [train.py:903] (2/4) Epoch 6, batch 2550, loss[loss=0.276, simple_loss=0.3456, pruned_loss=0.1032, over 19601.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3337, pruned_loss=0.1029, over 3831857.80 frames. ], batch size: 57, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:26:44,776 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36718.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:26:46,997 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9584, 1.0698, 1.4725, 0.8393, 2.3621, 2.9553, 2.7285, 3.1644], + device='cuda:2'), covar=tensor([0.1563, 0.3257, 0.3031, 0.2189, 0.0444, 0.0179, 0.0263, 0.0146], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0282, 0.0312, 0.0247, 0.0200, 0.0122, 0.0200, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 07:26:53,884 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0850, 5.4950, 3.0017, 4.8701, 1.3559, 5.1989, 5.3737, 5.5140], + device='cuda:2'), covar=tensor([0.0364, 0.0872, 0.1846, 0.0485, 0.3911, 0.0595, 0.0546, 0.0756], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0314, 0.0372, 0.0291, 0.0353, 0.0313, 0.0294, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 07:27:05,095 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 07:27:10,925 INFO [train.py:903] (2/4) Epoch 6, batch 2600, loss[loss=0.2716, simple_loss=0.341, pruned_loss=0.1011, over 19354.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3357, pruned_loss=0.1046, over 3834746.80 frames. ], batch size: 66, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:27:12,685 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.492e+02 8.253e+02 1.085e+03 2.742e+03, threshold=1.651e+03, percent-clipped=10.0 +2023-04-01 07:28:14,517 INFO [train.py:903] (2/4) Epoch 6, batch 2650, loss[loss=0.246, simple_loss=0.3103, pruned_loss=0.09091, over 19785.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3341, pruned_loss=0.103, over 3844138.87 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:28:37,557 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 07:28:55,456 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36823.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:29:16,468 INFO [train.py:903] (2/4) Epoch 6, batch 2700, loss[loss=0.2871, simple_loss=0.3555, pruned_loss=0.1094, over 19521.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3344, pruned_loss=0.1036, over 3827164.30 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:29:17,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.342e+02 7.427e+02 9.812e+02 2.890e+03, threshold=1.485e+03, percent-clipped=2.0 +2023-04-01 07:29:51,101 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8558, 3.6074, 1.9216, 2.1407, 3.1040, 1.4038, 1.1046, 1.7739], + device='cuda:2'), covar=tensor([0.1041, 0.0307, 0.0851, 0.0564, 0.0448, 0.1044, 0.0909, 0.0645], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0279, 0.0312, 0.0240, 0.0228, 0.0312, 0.0284, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:30:18,804 INFO [train.py:903] (2/4) Epoch 6, batch 2750, loss[loss=0.2838, simple_loss=0.3474, pruned_loss=0.1101, over 19681.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.334, pruned_loss=0.1036, over 3818382.13 frames. ], batch size: 58, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:30:51,763 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36914.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:09,578 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:22,180 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:23,042 INFO [train.py:903] (2/4) Epoch 6, batch 2800, loss[loss=0.2665, simple_loss=0.3265, pruned_loss=0.1032, over 19673.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3328, pruned_loss=0.1029, over 3818752.70 frames. ], batch size: 53, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:31:24,210 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+02 7.168e+02 8.701e+02 1.243e+03 3.330e+03, threshold=1.740e+03, percent-clipped=17.0 +2023-04-01 07:31:56,919 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:00,393 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:06,250 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:08,502 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:26,412 INFO [train.py:903] (2/4) Epoch 6, batch 2850, loss[loss=0.2424, simple_loss=0.3223, pruned_loss=0.08126, over 19530.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3331, pruned_loss=0.1031, over 3809736.13 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:32:34,859 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4298, 2.3240, 2.3917, 3.7603, 2.2066, 3.6425, 3.3154, 2.1176], + device='cuda:2'), covar=tensor([0.2469, 0.1987, 0.0841, 0.1145, 0.2482, 0.0656, 0.1639, 0.1712], + device='cuda:2'), in_proj_covar=tensor([0.0661, 0.0653, 0.0573, 0.0804, 0.0685, 0.0567, 0.0701, 0.0604], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 07:32:48,210 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2290, 3.6900, 3.7929, 3.8038, 1.4184, 3.5575, 3.1360, 3.4504], + device='cuda:2'), covar=tensor([0.1042, 0.0625, 0.0574, 0.0505, 0.3959, 0.0454, 0.0599, 0.1067], + device='cuda:2'), in_proj_covar=tensor([0.0546, 0.0471, 0.0641, 0.0527, 0.0601, 0.0395, 0.0410, 0.0601], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 07:32:52,105 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 07:33:05,610 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:33:05,645 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3084, 1.4455, 1.4668, 1.6323, 2.8821, 1.1101, 2.1384, 3.0443], + device='cuda:2'), covar=tensor([0.0369, 0.2218, 0.2222, 0.1382, 0.0564, 0.2113, 0.1057, 0.0367], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0315, 0.0315, 0.0290, 0.0307, 0.0314, 0.0289, 0.0305], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:33:28,656 INFO [train.py:903] (2/4) Epoch 6, batch 2900, loss[loss=0.2496, simple_loss=0.312, pruned_loss=0.09364, over 19421.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3322, pruned_loss=0.1027, over 3812907.83 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:33:28,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 07:33:28,976 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9144, 4.3018, 4.6970, 4.6016, 1.5375, 4.2254, 3.7263, 4.2591], + device='cuda:2'), covar=tensor([0.0980, 0.0602, 0.0477, 0.0429, 0.4345, 0.0430, 0.0579, 0.0982], + device='cuda:2'), in_proj_covar=tensor([0.0546, 0.0470, 0.0639, 0.0526, 0.0598, 0.0394, 0.0407, 0.0598], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 07:33:29,874 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.105e+02 7.947e+02 1.025e+03 2.308e+03, threshold=1.589e+03, percent-clipped=2.0 +2023-04-01 07:33:55,114 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37062.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:33:59,454 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7797, 2.0983, 1.9833, 2.7780, 2.6176, 2.4131, 2.0444, 2.7664], + device='cuda:2'), covar=tensor([0.0641, 0.1408, 0.1223, 0.0778, 0.0996, 0.0375, 0.0934, 0.0459], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0355, 0.0279, 0.0235, 0.0299, 0.0238, 0.0263, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:34:20,470 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:24,029 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37085.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:28,763 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37089.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:29,567 INFO [train.py:903] (2/4) Epoch 6, batch 2950, loss[loss=0.2939, simple_loss=0.3533, pruned_loss=0.1172, over 19533.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3339, pruned_loss=0.1036, over 3825796.19 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:34:58,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9336, 4.2662, 4.5436, 4.5162, 1.6187, 4.2257, 3.7407, 4.1587], + device='cuda:2'), covar=tensor([0.0944, 0.0613, 0.0469, 0.0404, 0.4050, 0.0346, 0.0482, 0.0984], + device='cuda:2'), in_proj_covar=tensor([0.0558, 0.0482, 0.0651, 0.0532, 0.0607, 0.0398, 0.0414, 0.0610], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 07:35:02,119 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1134, 1.0591, 1.7131, 1.2243, 2.4553, 1.8377, 2.5191, 1.1084], + device='cuda:2'), covar=tensor([0.1973, 0.3283, 0.1673, 0.1659, 0.1242, 0.1798, 0.1465, 0.2790], + device='cuda:2'), in_proj_covar=tensor([0.0451, 0.0516, 0.0496, 0.0412, 0.0568, 0.0454, 0.0634, 0.0452], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 07:35:31,147 INFO [train.py:903] (2/4) Epoch 6, batch 3000, loss[loss=0.2866, simple_loss=0.3513, pruned_loss=0.111, over 19703.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3334, pruned_loss=0.1035, over 3821453.48 frames. ], batch size: 59, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:35:31,148 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 07:35:43,653 INFO [train.py:937] (2/4) Epoch 6, validation: loss=0.1968, simple_loss=0.2962, pruned_loss=0.04867, over 944034.00 frames. +2023-04-01 07:35:43,654 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 07:35:44,848 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.497e+02 6.001e+02 7.289e+02 9.626e+02 1.809e+03, threshold=1.458e+03, percent-clipped=5.0 +2023-04-01 07:35:48,611 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 07:36:18,750 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37167.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:36:30,249 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37177.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:36:45,861 INFO [train.py:903] (2/4) Epoch 6, batch 3050, loss[loss=0.2171, simple_loss=0.2948, pruned_loss=0.0697, over 19628.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3336, pruned_loss=0.1035, over 3811348.81 frames. ], batch size: 50, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:48,510 INFO [train.py:903] (2/4) Epoch 6, batch 3100, loss[loss=0.2613, simple_loss=0.3304, pruned_loss=0.09604, over 19735.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3346, pruned_loss=0.1041, over 3792501.26 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:49,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 6.699e+02 8.375e+02 1.038e+03 2.239e+03, threshold=1.675e+03, percent-clipped=7.0 +2023-04-01 07:38:28,939 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37273.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:42,280 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37282.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:38:46,578 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37286.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:50,727 INFO [train.py:903] (2/4) Epoch 6, batch 3150, loss[loss=0.2584, simple_loss=0.3309, pruned_loss=0.09293, over 19664.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.334, pruned_loss=0.1037, over 3802105.04 frames. ], batch size: 58, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:39:13,512 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 07:39:27,510 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37320.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:49,102 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:51,049 INFO [train.py:903] (2/4) Epoch 6, batch 3200, loss[loss=0.2538, simple_loss=0.315, pruned_loss=0.09633, over 19366.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3345, pruned_loss=0.1039, over 3802553.80 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:39:52,141 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.326e+02 6.253e+02 8.171e+02 9.916e+02 1.975e+03, threshold=1.634e+03, percent-clipped=4.0 +2023-04-01 07:39:52,624 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:58,099 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8072, 1.2572, 1.3369, 1.6268, 1.5107, 1.5271, 1.2599, 1.6417], + device='cuda:2'), covar=tensor([0.0663, 0.1257, 0.1155, 0.0755, 0.0972, 0.0428, 0.0964, 0.0513], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0356, 0.0279, 0.0234, 0.0301, 0.0241, 0.0262, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:39:58,125 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37345.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:20,453 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,439 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,672 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:29,178 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37370.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:42,562 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:50,641 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:52,622 INFO [train.py:903] (2/4) Epoch 6, batch 3250, loss[loss=0.3111, simple_loss=0.3657, pruned_loss=0.1282, over 18098.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3354, pruned_loss=0.1044, over 3814228.34 frames. ], batch size: 83, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:30,760 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9240, 1.3462, 0.9892, 0.9448, 1.2109, 0.8651, 0.8170, 1.2761], + device='cuda:2'), covar=tensor([0.0407, 0.0493, 0.0878, 0.0419, 0.0386, 0.0925, 0.0452, 0.0299], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0276, 0.0309, 0.0241, 0.0221, 0.0310, 0.0280, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:41:45,922 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37433.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:41:48,067 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:41:53,529 INFO [train.py:903] (2/4) Epoch 6, batch 3300, loss[loss=0.2576, simple_loss=0.3311, pruned_loss=0.09203, over 19789.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3342, pruned_loss=0.1032, over 3830887.75 frames. ], batch size: 56, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:57,413 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 6.108e+02 8.102e+02 1.002e+03 3.053e+03, threshold=1.620e+03, percent-clipped=3.0 +2023-04-01 07:42:01,113 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 07:42:17,183 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37458.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:42:44,819 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:42:56,297 INFO [train.py:903] (2/4) Epoch 6, batch 3350, loss[loss=0.2806, simple_loss=0.3527, pruned_loss=0.1043, over 19663.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3349, pruned_loss=0.104, over 3827158.37 frames. ], batch size: 58, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:43:56,123 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37538.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:43:57,995 INFO [train.py:903] (2/4) Epoch 6, batch 3400, loss[loss=0.2081, simple_loss=0.2799, pruned_loss=0.06817, over 19754.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3342, pruned_loss=0.104, over 3826522.99 frames. ], batch size: 47, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:44:00,251 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.377e+02 8.364e+02 1.096e+03 2.128e+03, threshold=1.673e+03, percent-clipped=5.0 +2023-04-01 07:44:26,215 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37563.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:44:59,271 INFO [train.py:903] (2/4) Epoch 6, batch 3450, loss[loss=0.2558, simple_loss=0.321, pruned_loss=0.09526, over 19733.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3349, pruned_loss=0.1037, over 3832725.81 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:45:07,181 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 07:45:12,665 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-01 07:45:39,250 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8336, 3.4703, 2.2099, 3.2160, 1.0893, 3.1271, 3.1666, 3.3108], + device='cuda:2'), covar=tensor([0.0823, 0.1073, 0.2161, 0.0778, 0.3645, 0.1101, 0.0926, 0.1016], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0324, 0.0384, 0.0294, 0.0360, 0.0319, 0.0297, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 07:45:49,558 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:02,056 INFO [train.py:903] (2/4) Epoch 6, batch 3500, loss[loss=0.2858, simple_loss=0.3477, pruned_loss=0.1119, over 19460.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3346, pruned_loss=0.1037, over 3830003.36 frames. ], batch size: 64, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:46:04,578 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.340e+02 8.060e+02 1.060e+03 3.220e+03, threshold=1.612e+03, percent-clipped=3.0 +2023-04-01 07:46:07,422 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:23,231 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2126, 1.0295, 0.9965, 1.3329, 1.1648, 1.3426, 1.4130, 1.1876], + device='cuda:2'), covar=tensor([0.0637, 0.0796, 0.0844, 0.0652, 0.0730, 0.0630, 0.0683, 0.0576], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0244, 0.0237, 0.0274, 0.0262, 0.0230, 0.0226, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 07:46:38,316 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:54,389 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4813, 1.0324, 1.2199, 1.2579, 2.1415, 0.9321, 1.8891, 2.1474], + device='cuda:2'), covar=tensor([0.0591, 0.2676, 0.2349, 0.1379, 0.0743, 0.1955, 0.0899, 0.0594], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0316, 0.0311, 0.0287, 0.0311, 0.0312, 0.0288, 0.0304], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:47:04,852 INFO [train.py:903] (2/4) Epoch 6, batch 3550, loss[loss=0.3183, simple_loss=0.3702, pruned_loss=0.1332, over 19585.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3361, pruned_loss=0.1051, over 3806137.80 frames. ], batch size: 52, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:47:07,092 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37691.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:12,773 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5594, 1.0296, 1.2176, 1.3002, 2.2140, 0.9926, 1.9467, 2.1721], + device='cuda:2'), covar=tensor([0.0509, 0.2428, 0.2331, 0.1301, 0.0702, 0.1762, 0.0827, 0.0540], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0318, 0.0315, 0.0288, 0.0312, 0.0313, 0.0288, 0.0305], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:47:34,891 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37716.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:49,119 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:03,063 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:05,907 INFO [train.py:903] (2/4) Epoch 6, batch 3600, loss[loss=0.3666, simple_loss=0.4008, pruned_loss=0.1662, over 19693.00 frames. ], tot_loss[loss=0.2734, simple_loss=0.3361, pruned_loss=0.1054, over 3801398.05 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:48:06,565 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 07:48:08,230 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.040e+02 8.572e+02 1.227e+03 4.209e+03, threshold=1.714e+03, percent-clipped=12.0 +2023-04-01 07:48:12,116 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:33,187 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:49:08,248 INFO [train.py:903] (2/4) Epoch 6, batch 3650, loss[loss=0.2402, simple_loss=0.3136, pruned_loss=0.08342, over 19749.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3345, pruned_loss=0.104, over 3813118.27 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:10,248 INFO [train.py:903] (2/4) Epoch 6, batch 3700, loss[loss=0.2188, simple_loss=0.2977, pruned_loss=0.06999, over 19825.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3349, pruned_loss=0.1042, over 3829786.96 frames. ], batch size: 52, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:11,794 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:50:12,528 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 5.969e+02 7.272e+02 1.002e+03 1.787e+03, threshold=1.454e+03, percent-clipped=1.0 +2023-04-01 07:50:25,582 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 07:51:13,482 INFO [train.py:903] (2/4) Epoch 6, batch 3750, loss[loss=0.2756, simple_loss=0.3508, pruned_loss=0.1002, over 19341.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3353, pruned_loss=0.1043, over 3832593.61 frames. ], batch size: 66, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:51:26,359 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4849, 1.2316, 1.4148, 1.5216, 3.0020, 1.0134, 1.9779, 3.0780], + device='cuda:2'), covar=tensor([0.0355, 0.2327, 0.2222, 0.1377, 0.0568, 0.2186, 0.1153, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0315, 0.0315, 0.0291, 0.0314, 0.0313, 0.0289, 0.0307], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:51:28,771 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5322, 1.8986, 2.0450, 2.8732, 2.6947, 2.3504, 2.2805, 2.6011], + device='cuda:2'), covar=tensor([0.0725, 0.1728, 0.1233, 0.0759, 0.0961, 0.0392, 0.0823, 0.0515], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0360, 0.0282, 0.0237, 0.0303, 0.0240, 0.0265, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:52:14,129 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6262, 1.5789, 1.4805, 1.6808, 1.6187, 1.4738, 1.4919, 1.6225], + device='cuda:2'), covar=tensor([0.0664, 0.1013, 0.0886, 0.0658, 0.0834, 0.0399, 0.0702, 0.0426], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0365, 0.0285, 0.0240, 0.0307, 0.0243, 0.0267, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:52:16,170 INFO [train.py:903] (2/4) Epoch 6, batch 3800, loss[loss=0.2534, simple_loss=0.324, pruned_loss=0.09143, over 19677.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3348, pruned_loss=0.1041, over 3836600.90 frames. ], batch size: 55, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:18,412 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.762e+02 7.572e+02 9.178e+02 2.007e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-04-01 07:52:47,792 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 07:52:56,803 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3129, 1.4263, 1.3737, 1.6056, 2.8764, 1.1307, 2.0589, 3.0058], + device='cuda:2'), covar=tensor([0.0363, 0.2287, 0.2390, 0.1310, 0.0576, 0.2170, 0.1112, 0.0394], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0319, 0.0317, 0.0296, 0.0318, 0.0315, 0.0292, 0.0309], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 07:53:17,773 INFO [train.py:903] (2/4) Epoch 6, batch 3850, loss[loss=0.3281, simple_loss=0.3627, pruned_loss=0.1468, over 12801.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3346, pruned_loss=0.1037, over 3814997.85 frames. ], batch size: 136, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:53:25,802 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37997.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:53:32,693 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:03,055 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:20,061 INFO [train.py:903] (2/4) Epoch 6, batch 3900, loss[loss=0.266, simple_loss=0.339, pruned_loss=0.09654, over 19100.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3368, pruned_loss=0.1056, over 3794945.82 frames. ], batch size: 69, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:54:22,366 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.032e+02 6.773e+02 7.927e+02 9.711e+02 2.220e+03, threshold=1.585e+03, percent-clipped=5.0 +2023-04-01 07:54:39,725 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 07:55:13,658 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8349, 4.3809, 2.3377, 3.8889, 1.0110, 4.0671, 4.1135, 4.3442], + device='cuda:2'), covar=tensor([0.0561, 0.0962, 0.2240, 0.0730, 0.3842, 0.0722, 0.0664, 0.0763], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0328, 0.0388, 0.0297, 0.0363, 0.0321, 0.0302, 0.0336], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 07:55:15,177 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 07:55:24,001 INFO [train.py:903] (2/4) Epoch 6, batch 3950, loss[loss=0.2536, simple_loss=0.3168, pruned_loss=0.09515, over 19745.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.3363, pruned_loss=0.1047, over 3795336.13 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:55:27,731 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 07:55:33,086 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:03,039 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:25,971 INFO [train.py:903] (2/4) Epoch 6, batch 4000, loss[loss=0.2806, simple_loss=0.3458, pruned_loss=0.1077, over 19614.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3378, pruned_loss=0.1057, over 3784175.77 frames. ], batch size: 57, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:56:28,273 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 6.005e+02 7.523e+02 9.814e+02 1.567e+03, threshold=1.505e+03, percent-clipped=0.0 +2023-04-01 07:57:10,230 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 07:57:17,243 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7536, 1.4283, 1.3207, 2.0213, 1.6035, 2.1099, 1.9968, 1.9596], + device='cuda:2'), covar=tensor([0.0810, 0.1027, 0.1134, 0.1071, 0.1005, 0.0712, 0.1031, 0.0660], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0240, 0.0238, 0.0271, 0.0260, 0.0225, 0.0225, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-04-01 07:57:25,998 INFO [train.py:903] (2/4) Epoch 6, batch 4050, loss[loss=0.2799, simple_loss=0.3472, pruned_loss=0.1063, over 19669.00 frames. ], tot_loss[loss=0.2741, simple_loss=0.3374, pruned_loss=0.1054, over 3800518.49 frames. ], batch size: 58, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:27,537 INFO [train.py:903] (2/4) Epoch 6, batch 4100, loss[loss=0.2385, simple_loss=0.3008, pruned_loss=0.08811, over 19105.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3363, pruned_loss=0.1046, over 3807872.56 frames. ], batch size: 42, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:30,600 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.486e+02 7.817e+02 9.790e+02 2.532e+03, threshold=1.563e+03, percent-clipped=8.0 +2023-04-01 07:59:04,834 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 07:59:30,605 INFO [train.py:903] (2/4) Epoch 6, batch 4150, loss[loss=0.2203, simple_loss=0.2904, pruned_loss=0.07512, over 19393.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3371, pruned_loss=0.1053, over 3807138.88 frames. ], batch size: 48, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:59:59,062 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1002, 1.6659, 1.6288, 2.4271, 2.1363, 1.8211, 1.6902, 2.0881], + device='cuda:2'), covar=tensor([0.0982, 0.1898, 0.1669, 0.1041, 0.1433, 0.0885, 0.1385, 0.0797], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0362, 0.0284, 0.0238, 0.0306, 0.0244, 0.0270, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:00:34,879 INFO [train.py:903] (2/4) Epoch 6, batch 4200, loss[loss=0.3104, simple_loss=0.3664, pruned_loss=0.1272, over 19673.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.3361, pruned_loss=0.1048, over 3813702.73 frames. ], batch size: 60, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:36,323 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:00:37,309 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.104e+02 7.462e+02 9.650e+02 2.123e+03, threshold=1.492e+03, percent-clipped=5.0 +2023-04-01 08:00:39,346 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 08:01:21,340 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:01:25,182 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.44 vs. limit=5.0 +2023-04-01 08:01:34,728 INFO [train.py:903] (2/4) Epoch 6, batch 4250, loss[loss=0.2901, simple_loss=0.3521, pruned_loss=0.1141, over 19355.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3358, pruned_loss=0.1047, over 3825802.79 frames. ], batch size: 66, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:01:48,239 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 08:02:01,556 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 08:02:19,783 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 08:02:34,856 INFO [train.py:903] (2/4) Epoch 6, batch 4300, loss[loss=0.2969, simple_loss=0.3527, pruned_loss=0.1205, over 13349.00 frames. ], tot_loss[loss=0.2727, simple_loss=0.3362, pruned_loss=0.1046, over 3806215.08 frames. ], batch size: 136, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:02:37,124 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+02 6.582e+02 8.580e+02 1.078e+03 2.349e+03, threshold=1.716e+03, percent-clipped=8.0 +2023-04-01 08:02:55,996 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38456.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:03:27,673 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 08:03:35,913 INFO [train.py:903] (2/4) Epoch 6, batch 4350, loss[loss=0.265, simple_loss=0.3363, pruned_loss=0.09683, over 19681.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3353, pruned_loss=0.1041, over 3824276.07 frames. ], batch size: 55, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:40,411 INFO [train.py:903] (2/4) Epoch 6, batch 4400, loss[loss=0.2643, simple_loss=0.3365, pruned_loss=0.09602, over 19762.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.335, pruned_loss=0.1041, over 3833278.68 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:42,537 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+02 6.588e+02 8.122e+02 1.160e+03 2.348e+03, threshold=1.624e+03, percent-clipped=4.0 +2023-04-01 08:05:00,788 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3794, 1.3820, 2.2367, 1.6220, 3.5004, 2.5934, 3.5218, 1.6951], + device='cuda:2'), covar=tensor([0.1866, 0.3276, 0.1737, 0.1485, 0.1101, 0.1512, 0.1403, 0.2700], + device='cuda:2'), in_proj_covar=tensor([0.0447, 0.0514, 0.0496, 0.0406, 0.0555, 0.0451, 0.0629, 0.0450], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:05:05,903 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 08:05:13,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 08:05:34,944 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2011, 1.6635, 1.7619, 2.1671, 1.9785, 1.8667, 1.6074, 1.9848], + device='cuda:2'), covar=tensor([0.0679, 0.1405, 0.1114, 0.0805, 0.1003, 0.0414, 0.1001, 0.0552], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0349, 0.0275, 0.0230, 0.0294, 0.0237, 0.0263, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:05:40,416 INFO [train.py:903] (2/4) Epoch 6, batch 4450, loss[loss=0.2593, simple_loss=0.3356, pruned_loss=0.09147, over 19627.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3343, pruned_loss=0.1038, over 3823953.37 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:41,523 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8596, 1.9132, 1.9948, 2.8873, 1.9266, 2.5648, 2.5888, 2.0099], + device='cuda:2'), covar=tensor([0.2578, 0.2042, 0.1033, 0.1031, 0.2173, 0.0883, 0.1925, 0.1716], + device='cuda:2'), in_proj_covar=tensor([0.0676, 0.0674, 0.0585, 0.0818, 0.0700, 0.0586, 0.0715, 0.0623], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:06:42,159 INFO [train.py:903] (2/4) Epoch 6, batch 4500, loss[loss=0.2457, simple_loss=0.3171, pruned_loss=0.08714, over 19674.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.334, pruned_loss=0.1035, over 3829922.28 frames. ], batch size: 53, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:44,525 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 6.139e+02 7.598e+02 9.442e+02 2.713e+03, threshold=1.520e+03, percent-clipped=3.0 +2023-04-01 08:07:12,397 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.12 vs. limit=5.0 +2023-04-01 08:07:36,105 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8682, 1.9284, 1.9457, 2.9260, 1.9564, 2.9324, 2.6083, 1.8889], + device='cuda:2'), covar=tensor([0.2625, 0.2088, 0.1044, 0.1159, 0.2409, 0.0775, 0.2054, 0.1825], + device='cuda:2'), in_proj_covar=tensor([0.0673, 0.0672, 0.0585, 0.0819, 0.0698, 0.0586, 0.0716, 0.0620], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:07:42,587 INFO [train.py:903] (2/4) Epoch 6, batch 4550, loss[loss=0.2722, simple_loss=0.338, pruned_loss=0.1032, over 19543.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.334, pruned_loss=0.1033, over 3827593.96 frames. ], batch size: 56, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:07:53,128 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 08:08:10,836 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38712.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:17,330 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 08:08:22,008 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:41,581 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:45,376 INFO [train.py:903] (2/4) Epoch 6, batch 4600, loss[loss=0.2955, simple_loss=0.3574, pruned_loss=0.1168, over 19526.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3342, pruned_loss=0.1036, over 3826895.27 frames. ], batch size: 64, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:08:47,716 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 6.588e+02 8.068e+02 1.040e+03 1.807e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 08:09:34,459 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9274, 1.9111, 2.2075, 2.1120, 2.8034, 3.5412, 3.5840, 3.8403], + device='cuda:2'), covar=tensor([0.1381, 0.3121, 0.2904, 0.1800, 0.1176, 0.0286, 0.0244, 0.0179], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0287, 0.0318, 0.0250, 0.0202, 0.0129, 0.0204, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:09:45,875 INFO [train.py:903] (2/4) Epoch 6, batch 4650, loss[loss=0.2634, simple_loss=0.3287, pruned_loss=0.09906, over 19532.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3332, pruned_loss=0.1033, over 3822263.72 frames. ], batch size: 54, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:01,544 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 08:10:11,700 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 08:10:43,246 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:46,234 INFO [train.py:903] (2/4) Epoch 6, batch 4700, loss[loss=0.3174, simple_loss=0.3807, pruned_loss=0.1271, over 19551.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3347, pruned_loss=0.1046, over 3810527.93 frames. ], batch size: 61, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:47,762 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:48,621 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.701e+02 8.528e+02 1.085e+03 2.106e+03, threshold=1.706e+03, percent-clipped=3.0 +2023-04-01 08:11:07,245 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 08:11:46,437 INFO [train.py:903] (2/4) Epoch 6, batch 4750, loss[loss=0.2812, simple_loss=0.3463, pruned_loss=0.108, over 19497.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3351, pruned_loss=0.1047, over 3813627.08 frames. ], batch size: 64, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:47,940 INFO [train.py:903] (2/4) Epoch 6, batch 4800, loss[loss=0.3235, simple_loss=0.3794, pruned_loss=0.1338, over 19715.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3348, pruned_loss=0.1047, over 3813088.85 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:52,285 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 6.641e+02 7.688e+02 1.063e+03 2.770e+03, threshold=1.538e+03, percent-clipped=4.0 +2023-04-01 08:13:49,882 INFO [train.py:903] (2/4) Epoch 6, batch 4850, loss[loss=0.2466, simple_loss=0.3122, pruned_loss=0.09055, over 19738.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3355, pruned_loss=0.1052, over 3804662.80 frames. ], batch size: 51, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:14,750 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 08:14:36,520 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 08:14:42,291 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 08:14:42,335 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 08:14:51,529 INFO [train.py:903] (2/4) Epoch 6, batch 4900, loss[loss=0.3094, simple_loss=0.3645, pruned_loss=0.1272, over 17426.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3337, pruned_loss=0.1035, over 3804819.58 frames. ], batch size: 101, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:51,577 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 08:14:55,129 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 6.820e+02 8.455e+02 1.082e+03 3.554e+03, threshold=1.691e+03, percent-clipped=3.0 +2023-04-01 08:15:12,449 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 08:15:23,885 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2609, 2.2644, 1.7386, 1.6173, 1.5975, 1.7832, 0.3823, 1.0894], + device='cuda:2'), covar=tensor([0.0255, 0.0256, 0.0189, 0.0281, 0.0517, 0.0310, 0.0527, 0.0423], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0297, 0.0297, 0.0316, 0.0382, 0.0313, 0.0286, 0.0300], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:15:52,296 INFO [train.py:903] (2/4) Epoch 6, batch 4950, loss[loss=0.2537, simple_loss=0.3296, pruned_loss=0.08897, over 19670.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.334, pruned_loss=0.1035, over 3812597.29 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:15:56,184 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:01,561 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8364, 1.4405, 1.5269, 1.6633, 2.5639, 1.1425, 2.0787, 2.6003], + device='cuda:2'), covar=tensor([0.0400, 0.1924, 0.1911, 0.1228, 0.0560, 0.1943, 0.1356, 0.0412], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0316, 0.0316, 0.0295, 0.0316, 0.0315, 0.0294, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:16:11,577 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 08:16:27,941 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:34,738 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 08:16:54,847 INFO [train.py:903] (2/4) Epoch 6, batch 5000, loss[loss=0.2033, simple_loss=0.2737, pruned_loss=0.06642, over 19785.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3354, pruned_loss=0.1044, over 3795045.82 frames. ], batch size: 47, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:16:58,462 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5444, 1.1695, 1.1467, 1.8881, 1.4799, 1.5668, 1.8638, 1.3805], + device='cuda:2'), covar=tensor([0.0920, 0.1288, 0.1246, 0.0869, 0.1013, 0.0934, 0.0901, 0.0921], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0239, 0.0235, 0.0268, 0.0256, 0.0220, 0.0219, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 08:16:59,190 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.473e+02 6.699e+02 7.802e+02 1.019e+03 2.317e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 08:17:05,620 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 08:17:14,780 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 08:17:31,041 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7250, 1.3758, 1.4809, 1.7679, 3.1155, 0.9952, 2.0847, 3.3911], + device='cuda:2'), covar=tensor([0.0320, 0.2254, 0.2259, 0.1368, 0.0606, 0.2356, 0.1200, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0317, 0.0320, 0.0298, 0.0321, 0.0317, 0.0296, 0.0315], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:17:51,517 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:17:57,114 INFO [train.py:903] (2/4) Epoch 6, batch 5050, loss[loss=0.2734, simple_loss=0.3428, pruned_loss=0.102, over 19675.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3359, pruned_loss=0.1043, over 3809147.93 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:18:12,185 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39203.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:18:14,892 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 08:18:30,704 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 08:18:57,176 INFO [train.py:903] (2/4) Epoch 6, batch 5100, loss[loss=0.3564, simple_loss=0.3818, pruned_loss=0.1655, over 13430.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3362, pruned_loss=0.1044, over 3808608.59 frames. ], batch size: 136, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:19:00,429 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.152e+02 6.811e+02 8.395e+02 1.062e+03 1.934e+03, threshold=1.679e+03, percent-clipped=3.0 +2023-04-01 08:19:05,494 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 08:19:08,948 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 08:19:13,352 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 08:19:24,164 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7493, 1.4826, 1.3851, 1.9372, 1.8421, 1.7251, 1.6241, 1.8055], + device='cuda:2'), covar=tensor([0.0968, 0.1586, 0.1453, 0.0905, 0.1187, 0.0509, 0.0985, 0.0690], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0357, 0.0283, 0.0235, 0.0301, 0.0245, 0.0271, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:19:48,985 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7558, 1.3094, 1.4417, 1.6331, 3.1940, 1.0809, 1.9609, 3.4140], + device='cuda:2'), covar=tensor([0.0309, 0.2296, 0.2302, 0.1405, 0.0595, 0.2162, 0.1204, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0306, 0.0319, 0.0321, 0.0297, 0.0321, 0.0316, 0.0297, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:19:57,733 INFO [train.py:903] (2/4) Epoch 6, batch 5150, loss[loss=0.2702, simple_loss=0.3319, pruned_loss=0.1042, over 19333.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3336, pruned_loss=0.103, over 3799597.85 frames. ], batch size: 66, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:20:08,434 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 08:20:11,742 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:13,915 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:18,416 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2432, 2.0441, 1.5935, 1.3274, 1.8983, 1.1098, 1.0017, 1.6049], + device='cuda:2'), covar=tensor([0.0632, 0.0511, 0.0746, 0.0516, 0.0340, 0.0900, 0.0596, 0.0340], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0282, 0.0315, 0.0235, 0.0225, 0.0311, 0.0282, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:20:28,750 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1360, 2.0254, 1.5157, 1.2768, 1.8362, 0.9363, 1.0687, 1.5839], + device='cuda:2'), covar=tensor([0.0687, 0.0462, 0.0848, 0.0557, 0.0393, 0.1064, 0.0537, 0.0382], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0284, 0.0318, 0.0237, 0.0227, 0.0313, 0.0285, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:20:43,356 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 08:21:00,821 INFO [train.py:903] (2/4) Epoch 6, batch 5200, loss[loss=0.2774, simple_loss=0.3405, pruned_loss=0.1072, over 19605.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3338, pruned_loss=0.1031, over 3818161.93 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:21:04,236 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.495e+02 6.191e+02 7.598e+02 1.014e+03 2.218e+03, threshold=1.520e+03, percent-clipped=2.0 +2023-04-01 08:21:14,298 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 08:21:35,817 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0859, 1.3056, 1.5136, 1.1690, 2.8120, 3.4971, 3.3736, 3.8070], + device='cuda:2'), covar=tensor([0.1580, 0.2974, 0.2970, 0.1991, 0.0402, 0.0152, 0.0183, 0.0130], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0284, 0.0318, 0.0247, 0.0201, 0.0129, 0.0203, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:21:56,494 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 08:22:00,052 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1656, 1.8207, 1.3748, 1.2913, 1.6910, 1.0210, 1.0306, 1.6190], + device='cuda:2'), covar=tensor([0.0567, 0.0533, 0.0880, 0.0449, 0.0382, 0.0990, 0.0499, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0281, 0.0314, 0.0233, 0.0224, 0.0308, 0.0279, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:22:03,017 INFO [train.py:903] (2/4) Epoch 6, batch 5250, loss[loss=0.2841, simple_loss=0.3477, pruned_loss=0.1102, over 19677.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.334, pruned_loss=0.1032, over 3823540.13 frames. ], batch size: 60, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:23:05,670 INFO [train.py:903] (2/4) Epoch 6, batch 5300, loss[loss=0.314, simple_loss=0.3712, pruned_loss=0.1284, over 19484.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3345, pruned_loss=0.1031, over 3826300.86 frames. ], batch size: 64, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:23:10,377 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.313e+02 6.384e+02 7.596e+02 9.799e+02 2.153e+03, threshold=1.519e+03, percent-clipped=7.0 +2023-04-01 08:23:16,765 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1379, 1.5631, 1.6882, 2.1839, 1.8989, 2.0002, 1.8105, 2.0909], + device='cuda:2'), covar=tensor([0.0789, 0.1600, 0.1301, 0.0884, 0.1240, 0.0408, 0.0987, 0.0574], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0358, 0.0284, 0.0236, 0.0301, 0.0244, 0.0271, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:23:22,421 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 08:23:56,538 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:24:06,729 INFO [train.py:903] (2/4) Epoch 6, batch 5350, loss[loss=0.2434, simple_loss=0.3044, pruned_loss=0.09116, over 19402.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3326, pruned_loss=0.1021, over 3835333.99 frames. ], batch size: 48, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:24:41,839 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 08:25:07,430 INFO [train.py:903] (2/4) Epoch 6, batch 5400, loss[loss=0.2917, simple_loss=0.367, pruned_loss=0.1082, over 18699.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3335, pruned_loss=0.1024, over 3830981.56 frames. ], batch size: 74, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:25:15,197 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.754e+02 6.102e+02 7.285e+02 1.015e+03 2.320e+03, threshold=1.457e+03, percent-clipped=6.0 +2023-04-01 08:25:19,793 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39547.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:25:30,328 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:25:48,015 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.77 vs. limit=5.0 +2023-04-01 08:26:01,219 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:26:13,204 INFO [train.py:903] (2/4) Epoch 6, batch 5450, loss[loss=0.3009, simple_loss=0.3593, pruned_loss=0.1213, over 18228.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3345, pruned_loss=0.1033, over 3826244.02 frames. ], batch size: 84, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:27:13,643 INFO [train.py:903] (2/4) Epoch 6, batch 5500, loss[loss=0.2507, simple_loss=0.3152, pruned_loss=0.09311, over 19725.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3358, pruned_loss=0.1043, over 3829806.84 frames. ], batch size: 51, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:27:18,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.238e+02 6.796e+02 8.775e+02 1.086e+03 2.226e+03, threshold=1.755e+03, percent-clipped=13.0 +2023-04-01 08:27:20,742 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:27:33,884 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39657.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:27:36,842 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 08:27:40,449 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39662.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:28:00,341 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3836, 1.2210, 1.4843, 1.1755, 2.7669, 3.4816, 3.2331, 3.6837], + device='cuda:2'), covar=tensor([0.1355, 0.3043, 0.3002, 0.1804, 0.0377, 0.0140, 0.0218, 0.0138], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0284, 0.0316, 0.0246, 0.0198, 0.0128, 0.0201, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:28:14,675 INFO [train.py:903] (2/4) Epoch 6, batch 5550, loss[loss=0.2909, simple_loss=0.3493, pruned_loss=0.1163, over 19776.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3347, pruned_loss=0.1037, over 3832631.82 frames. ], batch size: 56, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:28:21,837 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 08:29:11,950 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 08:29:15,361 INFO [train.py:903] (2/4) Epoch 6, batch 5600, loss[loss=0.2743, simple_loss=0.3446, pruned_loss=0.102, over 19775.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3345, pruned_loss=0.1036, over 3831900.29 frames. ], batch size: 56, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:29:20,719 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 6.221e+02 7.656e+02 9.358e+02 1.388e+03, threshold=1.531e+03, percent-clipped=0.0 +2023-04-01 08:29:43,054 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:12,423 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:19,682 INFO [train.py:903] (2/4) Epoch 6, batch 5650, loss[loss=0.2668, simple_loss=0.3252, pruned_loss=0.1042, over 19622.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3342, pruned_loss=0.1037, over 3836176.67 frames. ], batch size: 50, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:30:47,705 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2187, 1.4363, 1.8882, 1.5440, 3.4249, 2.5395, 3.5830, 1.2638], + device='cuda:2'), covar=tensor([0.1963, 0.3142, 0.1916, 0.1465, 0.1154, 0.1483, 0.1267, 0.3000], + device='cuda:2'), in_proj_covar=tensor([0.0444, 0.0519, 0.0506, 0.0409, 0.0565, 0.0449, 0.0630, 0.0454], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:31:02,188 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:08,824 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 08:31:13,812 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39833.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:21,784 INFO [train.py:903] (2/4) Epoch 6, batch 5700, loss[loss=0.2331, simple_loss=0.301, pruned_loss=0.08263, over 19477.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3312, pruned_loss=0.1019, over 3838531.01 frames. ], batch size: 49, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:31:22,171 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1251, 1.1670, 1.5791, 0.9773, 2.5060, 2.8364, 2.7230, 3.2158], + device='cuda:2'), covar=tensor([0.1497, 0.3984, 0.3608, 0.2152, 0.0443, 0.0210, 0.0329, 0.0185], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0284, 0.0315, 0.0248, 0.0201, 0.0130, 0.0200, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:31:26,539 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.066e+02 6.807e+02 8.639e+02 1.032e+03 2.369e+03, threshold=1.728e+03, percent-clipped=2.0 +2023-04-01 08:31:57,164 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8703, 1.3453, 1.4644, 1.6993, 1.6797, 1.6106, 1.4836, 1.6862], + device='cuda:2'), covar=tensor([0.0862, 0.1473, 0.1384, 0.0989, 0.1108, 0.0536, 0.1086, 0.0715], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0354, 0.0282, 0.0237, 0.0301, 0.0243, 0.0273, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:32:12,193 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 08:32:17,525 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2248, 3.9713, 2.1616, 2.5331, 3.4092, 1.8507, 1.4171, 1.9621], + device='cuda:2'), covar=tensor([0.0851, 0.0252, 0.0891, 0.0520, 0.0323, 0.0910, 0.0835, 0.0575], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0275, 0.0311, 0.0236, 0.0222, 0.0307, 0.0283, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:32:21,377 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.87 vs. limit=5.0 +2023-04-01 08:32:21,667 INFO [train.py:903] (2/4) Epoch 6, batch 5750, loss[loss=0.2759, simple_loss=0.3424, pruned_loss=0.1047, over 19345.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3322, pruned_loss=0.103, over 3839178.36 frames. ], batch size: 66, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:32:23,990 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 08:32:30,905 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 08:32:35,621 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 08:32:52,585 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8412, 1.8526, 2.0953, 2.7534, 2.5264, 2.3195, 2.0568, 2.6648], + device='cuda:2'), covar=tensor([0.0696, 0.1729, 0.1247, 0.0918, 0.1274, 0.0425, 0.1032, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0355, 0.0283, 0.0238, 0.0303, 0.0243, 0.0273, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:32:58,465 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:23,265 INFO [train.py:903] (2/4) Epoch 6, batch 5800, loss[loss=0.3458, simple_loss=0.3877, pruned_loss=0.1519, over 19678.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3325, pruned_loss=0.103, over 3830097.78 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:33:23,690 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:28,288 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:29,043 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+02 6.519e+02 7.840e+02 9.394e+02 2.455e+03, threshold=1.568e+03, percent-clipped=4.0 +2023-04-01 08:34:01,395 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2117, 1.2124, 1.4760, 0.9678, 2.3813, 2.9954, 2.7036, 3.1822], + device='cuda:2'), covar=tensor([0.1417, 0.3046, 0.2948, 0.2089, 0.0443, 0.0210, 0.0235, 0.0154], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0284, 0.0314, 0.0248, 0.0201, 0.0130, 0.0200, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:34:26,991 INFO [train.py:903] (2/4) Epoch 6, batch 5850, loss[loss=0.2488, simple_loss=0.3105, pruned_loss=0.09359, over 19475.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3342, pruned_loss=0.1041, over 3795563.20 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:34:40,713 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40001.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:34:59,477 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:29,005 INFO [train.py:903] (2/4) Epoch 6, batch 5900, loss[loss=0.2762, simple_loss=0.3473, pruned_loss=0.1026, over 19791.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3347, pruned_loss=0.1042, over 3806197.41 frames. ], batch size: 56, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:35:31,456 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 08:35:31,778 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:33,716 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.177e+02 6.123e+02 7.695e+02 9.772e+02 2.844e+03, threshold=1.539e+03, percent-clipped=4.0 +2023-04-01 08:35:53,057 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 08:36:30,103 INFO [train.py:903] (2/4) Epoch 6, batch 5950, loss[loss=0.2905, simple_loss=0.3543, pruned_loss=0.1133, over 18253.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3347, pruned_loss=0.1038, over 3806034.50 frames. ], batch size: 83, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:03,223 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40116.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:37:18,131 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:37:30,836 INFO [train.py:903] (2/4) Epoch 6, batch 6000, loss[loss=0.2275, simple_loss=0.2938, pruned_loss=0.08063, over 19777.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3357, pruned_loss=0.1044, over 3794895.95 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:30,836 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 08:37:43,236 INFO [train.py:937] (2/4) Epoch 6, validation: loss=0.1955, simple_loss=0.2951, pruned_loss=0.04789, over 944034.00 frames. +2023-04-01 08:37:43,238 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 08:37:47,771 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 6.298e+02 7.514e+02 9.544e+02 1.960e+03, threshold=1.503e+03, percent-clipped=1.0 +2023-04-01 08:38:29,642 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:38:44,846 INFO [train.py:903] (2/4) Epoch 6, batch 6050, loss[loss=0.3052, simple_loss=0.3577, pruned_loss=0.1264, over 19521.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3338, pruned_loss=0.1033, over 3805441.71 frames. ], batch size: 54, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:38:53,716 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:14,854 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:24,382 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:43,632 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40236.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:48,066 INFO [train.py:903] (2/4) Epoch 6, batch 6100, loss[loss=0.2933, simple_loss=0.36, pruned_loss=0.1133, over 19611.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3333, pruned_loss=0.1029, over 3815563.62 frames. ], batch size: 57, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:39:54,256 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 6.367e+02 7.728e+02 1.144e+03 2.582e+03, threshold=1.546e+03, percent-clipped=10.0 +2023-04-01 08:39:54,676 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:51,267 INFO [train.py:903] (2/4) Epoch 6, batch 6150, loss[loss=0.2391, simple_loss=0.2987, pruned_loss=0.08973, over 14632.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.333, pruned_loss=0.1029, over 3822668.46 frames. ], batch size: 32, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:40:51,609 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40290.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:54,024 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:41:19,456 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 08:41:52,311 INFO [train.py:903] (2/4) Epoch 6, batch 6200, loss[loss=0.324, simple_loss=0.368, pruned_loss=0.14, over 13468.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3335, pruned_loss=0.1033, over 3812214.30 frames. ], batch size: 136, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:41:57,219 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 6.810e+02 8.218e+02 1.008e+03 2.334e+03, threshold=1.644e+03, percent-clipped=5.0 +2023-04-01 08:42:01,843 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:42:33,760 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40372.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:42:46,610 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3945, 1.3623, 1.9316, 1.5557, 3.0826, 2.4733, 3.1809, 1.6125], + device='cuda:2'), covar=tensor([0.1847, 0.3179, 0.1778, 0.1451, 0.1224, 0.1575, 0.1406, 0.2759], + device='cuda:2'), in_proj_covar=tensor([0.0449, 0.0522, 0.0506, 0.0410, 0.0566, 0.0454, 0.0632, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:42:54,274 INFO [train.py:903] (2/4) Epoch 6, batch 6250, loss[loss=0.244, simple_loss=0.3049, pruned_loss=0.09158, over 19607.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3335, pruned_loss=0.103, over 3809521.46 frames. ], batch size: 50, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:43:04,643 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40397.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:43:22,878 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4399, 2.4522, 1.8335, 1.9887, 1.8086, 2.1514, 1.0583, 1.9685], + device='cuda:2'), covar=tensor([0.0244, 0.0294, 0.0265, 0.0365, 0.0420, 0.0401, 0.0559, 0.0410], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0299, 0.0295, 0.0312, 0.0384, 0.0309, 0.0283, 0.0297], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:43:27,279 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 08:43:58,139 INFO [train.py:903] (2/4) Epoch 6, batch 6300, loss[loss=0.2408, simple_loss=0.3056, pruned_loss=0.08799, over 19730.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3326, pruned_loss=0.1023, over 3820205.90 frames. ], batch size: 51, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:44:03,777 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.389e+02 8.265e+02 1.061e+03 2.633e+03, threshold=1.653e+03, percent-clipped=7.0 +2023-04-01 08:44:16,518 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4448, 1.1831, 1.2039, 1.8192, 1.5503, 1.8082, 1.9200, 1.6296], + device='cuda:2'), covar=tensor([0.0896, 0.1066, 0.1204, 0.0843, 0.0923, 0.0725, 0.0881, 0.0696], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0238, 0.0234, 0.0267, 0.0257, 0.0221, 0.0219, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 08:44:26,807 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4902, 1.4266, 1.9563, 1.5256, 2.8430, 4.5614, 4.5905, 4.9084], + device='cuda:2'), covar=tensor([0.1433, 0.3101, 0.2857, 0.1939, 0.0466, 0.0140, 0.0145, 0.0083], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0280, 0.0310, 0.0245, 0.0205, 0.0128, 0.0199, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:44:46,060 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 08:44:52,523 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 08:45:00,750 INFO [train.py:903] (2/4) Epoch 6, batch 6350, loss[loss=0.239, simple_loss=0.32, pruned_loss=0.07902, over 19533.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.333, pruned_loss=0.1024, over 3812908.09 frames. ], batch size: 56, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:45:12,820 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:24,298 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:44,992 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40525.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:02,598 INFO [train.py:903] (2/4) Epoch 6, batch 6400, loss[loss=0.2755, simple_loss=0.3432, pruned_loss=0.1039, over 19557.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3336, pruned_loss=0.1029, over 3811676.87 frames. ], batch size: 61, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:46:07,241 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.528e+02 8.039e+02 1.077e+03 1.980e+03, threshold=1.608e+03, percent-clipped=3.0 +2023-04-01 08:46:12,202 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:23,192 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:44,136 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 08:46:44,892 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:52,958 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40580.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:47:04,375 INFO [train.py:903] (2/4) Epoch 6, batch 6450, loss[loss=0.2645, simple_loss=0.332, pruned_loss=0.0985, over 19782.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3327, pruned_loss=0.102, over 3825806.73 frames. ], batch size: 56, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:47:20,596 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-04-01 08:47:46,823 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1168, 1.3053, 1.4141, 1.1647, 2.6107, 3.5945, 3.4483, 3.8373], + device='cuda:2'), covar=tensor([0.1531, 0.2947, 0.3062, 0.1989, 0.0461, 0.0149, 0.0188, 0.0124], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0282, 0.0312, 0.0246, 0.0204, 0.0129, 0.0200, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:47:50,031 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 08:47:59,688 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:07,625 INFO [train.py:903] (2/4) Epoch 6, batch 6500, loss[loss=0.2505, simple_loss=0.3185, pruned_loss=0.09127, over 19853.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3331, pruned_loss=0.1024, over 3815877.19 frames. ], batch size: 52, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:48:12,837 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-01 08:48:13,022 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 6.039e+02 7.833e+02 1.001e+03 2.233e+03, threshold=1.567e+03, percent-clipped=5.0 +2023-04-01 08:48:15,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 08:48:44,353 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:47,963 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:12,727 INFO [train.py:903] (2/4) Epoch 6, batch 6550, loss[loss=0.2788, simple_loss=0.3452, pruned_loss=0.1062, over 17635.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3324, pruned_loss=0.1023, over 3807322.60 frames. ], batch size: 101, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:49:15,185 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:18,774 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:36,657 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 08:50:15,130 INFO [train.py:903] (2/4) Epoch 6, batch 6600, loss[loss=0.407, simple_loss=0.4276, pruned_loss=0.1931, over 19542.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3333, pruned_loss=0.1028, over 3805902.25 frames. ], batch size: 56, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:50:19,765 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+02 6.030e+02 7.716e+02 9.913e+02 2.888e+03, threshold=1.543e+03, percent-clipped=3.0 +2023-04-01 08:50:26,098 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40749.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:50:26,203 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4032, 1.4515, 1.9441, 1.5495, 3.2662, 2.6692, 3.4139, 1.4369], + device='cuda:2'), covar=tensor([0.1824, 0.3019, 0.1747, 0.1471, 0.1141, 0.1439, 0.1439, 0.2912], + device='cuda:2'), in_proj_covar=tensor([0.0450, 0.0527, 0.0507, 0.0408, 0.0566, 0.0455, 0.0639, 0.0454], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:51:17,595 INFO [train.py:903] (2/4) Epoch 6, batch 6650, loss[loss=0.2892, simple_loss=0.3522, pruned_loss=0.113, over 19657.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3332, pruned_loss=0.1024, over 3818500.23 frames. ], batch size: 60, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:51:40,497 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40807.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:52:19,361 INFO [train.py:903] (2/4) Epoch 6, batch 6700, loss[loss=0.2877, simple_loss=0.3528, pruned_loss=0.1113, over 19535.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3336, pruned_loss=0.103, over 3818360.94 frames. ], batch size: 56, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:52:24,133 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.981e+02 6.315e+02 8.385e+02 9.952e+02 2.559e+03, threshold=1.677e+03, percent-clipped=5.0 +2023-04-01 08:52:39,411 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:13,370 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:21,366 INFO [train.py:903] (2/4) Epoch 6, batch 6750, loss[loss=0.2146, simple_loss=0.2788, pruned_loss=0.07515, over 19288.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3328, pruned_loss=0.1019, over 3826109.32 frames. ], batch size: 44, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:53:27,533 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3833, 2.1020, 1.6095, 1.5493, 2.0598, 1.1813, 1.1104, 1.6938], + device='cuda:2'), covar=tensor([0.0654, 0.0578, 0.0748, 0.0487, 0.0370, 0.0920, 0.0616, 0.0327], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0282, 0.0316, 0.0238, 0.0227, 0.0314, 0.0288, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:53:43,413 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2755, 2.1557, 1.7729, 1.7824, 1.5597, 1.7155, 0.3184, 1.1484], + device='cuda:2'), covar=tensor([0.0227, 0.0261, 0.0214, 0.0333, 0.0547, 0.0363, 0.0621, 0.0466], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0304, 0.0300, 0.0322, 0.0394, 0.0318, 0.0292, 0.0308], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 08:53:53,182 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:04,523 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:17,529 INFO [train.py:903] (2/4) Epoch 6, batch 6800, loss[loss=0.2628, simple_loss=0.3303, pruned_loss=0.09766, over 19681.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3328, pruned_loss=0.1015, over 3837322.96 frames. ], batch size: 60, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:54:23,021 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.879e+02 7.609e+02 1.019e+03 2.150e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 08:54:31,375 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:33,675 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:04,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 08:55:06,579 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 08:55:08,785 INFO [train.py:903] (2/4) Epoch 7, batch 0, loss[loss=0.22, simple_loss=0.289, pruned_loss=0.07551, over 19764.00 frames. ], tot_loss[loss=0.22, simple_loss=0.289, pruned_loss=0.07551, over 19764.00 frames. ], batch size: 47, lr: 1.24e-02, grad_scale: 8.0 +2023-04-01 08:55:08,786 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 08:55:20,419 INFO [train.py:937] (2/4) Epoch 7, validation: loss=0.1957, simple_loss=0.2957, pruned_loss=0.04779, over 944034.00 frames. +2023-04-01 08:55:20,422 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18488MB +2023-04-01 08:55:22,002 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:30,922 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:32,938 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 08:55:40,133 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0434, 2.0332, 1.9257, 3.0111, 1.9818, 2.6795, 2.7045, 1.8510], + device='cuda:2'), covar=tensor([0.2334, 0.1884, 0.0968, 0.1070, 0.2269, 0.0831, 0.1794, 0.1714], + device='cuda:2'), in_proj_covar=tensor([0.0682, 0.0675, 0.0584, 0.0829, 0.0702, 0.0591, 0.0726, 0.0625], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 08:56:04,743 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:16,005 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:19,142 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41015.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:22,014 INFO [train.py:903] (2/4) Epoch 7, batch 50, loss[loss=0.3199, simple_loss=0.3742, pruned_loss=0.1328, over 19633.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.336, pruned_loss=0.1046, over 863620.64 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:56:36,202 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41030.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:51,947 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.786e+02 6.089e+02 7.435e+02 1.027e+03 3.072e+03, threshold=1.487e+03, percent-clipped=7.0 +2023-04-01 08:56:56,507 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 08:57:18,018 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:18,172 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:23,458 INFO [train.py:903] (2/4) Epoch 7, batch 100, loss[loss=0.2304, simple_loss=0.2974, pruned_loss=0.08166, over 19745.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3323, pruned_loss=0.102, over 1522101.48 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:57:34,751 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 08:57:41,751 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.7421, 1.3089, 1.0238, 0.8270, 1.1652, 0.8482, 0.6574, 1.2728], + device='cuda:2'), covar=tensor([0.0494, 0.0503, 0.0794, 0.0449, 0.0373, 0.0877, 0.0514, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0279, 0.0313, 0.0237, 0.0226, 0.0308, 0.0282, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 08:57:46,218 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41088.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:24,488 INFO [train.py:903] (2/4) Epoch 7, batch 150, loss[loss=0.2416, simple_loss=0.3036, pruned_loss=0.08976, over 19030.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3327, pruned_loss=0.1022, over 2025208.11 frames. ], batch size: 42, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:58:26,870 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:36,387 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:56,747 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.181e+02 6.219e+02 8.190e+02 1.094e+03 2.901e+03, threshold=1.638e+03, percent-clipped=4.0 +2023-04-01 08:59:22,647 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 08:59:23,719 INFO [train.py:903] (2/4) Epoch 7, batch 200, loss[loss=0.2778, simple_loss=0.3464, pruned_loss=0.1046, over 19661.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3344, pruned_loss=0.1038, over 2417100.99 frames. ], batch size: 60, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:59:44,606 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 09:00:18,742 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5330, 1.2196, 1.4864, 1.2291, 2.2216, 0.8700, 1.9274, 2.1653], + device='cuda:2'), covar=tensor([0.0562, 0.2320, 0.2216, 0.1372, 0.0785, 0.1866, 0.0841, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0317, 0.0321, 0.0297, 0.0321, 0.0314, 0.0294, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:00:27,761 INFO [train.py:903] (2/4) Epoch 7, batch 250, loss[loss=0.2563, simple_loss=0.329, pruned_loss=0.09184, over 19582.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.334, pruned_loss=0.1034, over 2734683.94 frames. ], batch size: 61, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:00:37,996 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:40,029 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:59,441 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.194e+02 6.866e+02 8.894e+02 1.080e+03 3.290e+03, threshold=1.779e+03, percent-clipped=6.0 +2023-04-01 09:01:06,582 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:06,858 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.18 vs. limit=5.0 +2023-04-01 09:01:23,879 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:30,755 INFO [train.py:903] (2/4) Epoch 7, batch 300, loss[loss=0.206, simple_loss=0.2889, pruned_loss=0.06155, over 19752.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3329, pruned_loss=0.102, over 2971787.40 frames. ], batch size: 51, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:31,489 INFO [train.py:903] (2/4) Epoch 7, batch 350, loss[loss=0.2303, simple_loss=0.2957, pruned_loss=0.08251, over 19806.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3312, pruned_loss=0.1013, over 3167463.32 frames. ], batch size: 48, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:34,002 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 09:03:02,008 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:04,942 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.797e+02 7.460e+02 9.435e+02 2.818e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-04-01 09:03:22,770 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:32,954 INFO [train.py:903] (2/4) Epoch 7, batch 400, loss[loss=0.3295, simple_loss=0.3823, pruned_loss=0.1384, over 13498.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.329, pruned_loss=0.09984, over 3288786.53 frames. ], batch size: 136, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:03:44,417 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41377.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:54,264 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:20,756 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:23,223 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:29,234 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3355, 1.5060, 2.0914, 1.6406, 3.2863, 2.7205, 3.4712, 1.4809], + device='cuda:2'), covar=tensor([0.1844, 0.3133, 0.1797, 0.1391, 0.1165, 0.1396, 0.1409, 0.2779], + device='cuda:2'), in_proj_covar=tensor([0.0452, 0.0523, 0.0510, 0.0409, 0.0561, 0.0455, 0.0631, 0.0456], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 09:04:34,728 INFO [train.py:903] (2/4) Epoch 7, batch 450, loss[loss=0.2554, simple_loss=0.3341, pruned_loss=0.08833, over 19790.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3294, pruned_loss=0.09946, over 3411636.59 frames. ], batch size: 56, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:05:02,696 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 09:05:03,841 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 09:05:06,101 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 5.824e+02 7.719e+02 9.807e+02 3.448e+03, threshold=1.544e+03, percent-clipped=7.0 +2023-04-01 09:05:31,578 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:05:37,396 INFO [train.py:903] (2/4) Epoch 7, batch 500, loss[loss=0.2288, simple_loss=0.296, pruned_loss=0.08073, over 19765.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3285, pruned_loss=0.09918, over 3513435.12 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 16.0 +2023-04-01 09:05:44,936 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:06:38,626 INFO [train.py:903] (2/4) Epoch 7, batch 550, loss[loss=0.2427, simple_loss=0.3283, pruned_loss=0.07852, over 19730.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.329, pruned_loss=0.09919, over 3581634.47 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:06:43,743 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41522.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:07:09,841 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 6.032e+02 7.411e+02 9.275e+02 1.625e+03, threshold=1.482e+03, percent-clipped=1.0 +2023-04-01 09:07:37,925 INFO [train.py:903] (2/4) Epoch 7, batch 600, loss[loss=0.2137, simple_loss=0.2873, pruned_loss=0.07004, over 19390.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3297, pruned_loss=0.09965, over 3631990.77 frames. ], batch size: 48, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:07:50,879 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:13,414 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 09:08:16,203 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:39,323 INFO [train.py:903] (2/4) Epoch 7, batch 650, loss[loss=0.2388, simple_loss=0.3112, pruned_loss=0.08318, over 19761.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3291, pruned_loss=0.09968, over 3669279.63 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:08:45,340 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:00,397 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41633.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:13,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 5.719e+02 7.418e+02 1.065e+03 4.334e+03, threshold=1.484e+03, percent-clipped=7.0 +2023-04-01 09:09:28,637 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:40,772 INFO [train.py:903] (2/4) Epoch 7, batch 700, loss[loss=0.2348, simple_loss=0.3116, pruned_loss=0.07895, over 19590.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3288, pruned_loss=0.09938, over 3701121.10 frames. ], batch size: 61, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:09:48,592 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:10:30,223 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 09:10:45,014 INFO [train.py:903] (2/4) Epoch 7, batch 750, loss[loss=0.2136, simple_loss=0.2887, pruned_loss=0.06927, over 19722.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.33, pruned_loss=0.09967, over 3720560.26 frames. ], batch size: 51, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:10:47,635 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2611, 2.9724, 1.8653, 2.7752, 1.0027, 2.8546, 2.7061, 2.8089], + device='cuda:2'), covar=tensor([0.1087, 0.1356, 0.2293, 0.0926, 0.3584, 0.0983, 0.0950, 0.1129], + device='cuda:2'), in_proj_covar=tensor([0.0378, 0.0334, 0.0378, 0.0300, 0.0356, 0.0312, 0.0304, 0.0338], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 09:10:59,397 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:15,967 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.751e+02 6.886e+02 8.721e+02 1.519e+03, threshold=1.377e+03, percent-clipped=2.0 +2023-04-01 09:11:31,828 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:46,758 INFO [train.py:903] (2/4) Epoch 7, batch 800, loss[loss=0.2459, simple_loss=0.3212, pruned_loss=0.08529, over 19539.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3301, pruned_loss=0.09974, over 3748428.63 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:11:56,193 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 09:11:58,734 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:09,723 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 09:12:31,079 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:48,123 INFO [train.py:903] (2/4) Epoch 7, batch 850, loss[loss=0.2502, simple_loss=0.3295, pruned_loss=0.08543, over 19773.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3299, pruned_loss=0.09908, over 3777282.05 frames. ], batch size: 56, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:13:10,276 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:23,188 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.000e+02 6.514e+02 8.083e+02 9.646e+02 1.896e+03, threshold=1.617e+03, percent-clipped=5.0 +2023-04-01 09:13:38,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 09:13:40,955 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:50,190 INFO [train.py:903] (2/4) Epoch 7, batch 900, loss[loss=0.2427, simple_loss=0.3162, pruned_loss=0.08465, over 19579.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3288, pruned_loss=0.09848, over 3789960.84 frames. ], batch size: 57, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:14:50,887 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2891, 3.8522, 2.4636, 3.4413, 1.3246, 3.4925, 3.5839, 3.6957], + device='cuda:2'), covar=tensor([0.0648, 0.1080, 0.1829, 0.0775, 0.3387, 0.0794, 0.0768, 0.0906], + device='cuda:2'), in_proj_covar=tensor([0.0374, 0.0328, 0.0378, 0.0297, 0.0351, 0.0309, 0.0301, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 09:14:51,585 INFO [train.py:903] (2/4) Epoch 7, batch 950, loss[loss=0.2801, simple_loss=0.3463, pruned_loss=0.1069, over 19605.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3288, pruned_loss=0.09824, over 3802855.18 frames. ], batch size: 57, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:14:55,104 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 09:15:01,483 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:15:02,871 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4286, 2.3334, 1.6260, 1.5468, 2.2254, 1.0708, 1.2451, 1.8561], + device='cuda:2'), covar=tensor([0.0677, 0.0492, 0.0827, 0.0512, 0.0339, 0.1011, 0.0604, 0.0335], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0280, 0.0310, 0.0237, 0.0223, 0.0305, 0.0287, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:15:26,245 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.318e+02 6.505e+02 7.519e+02 9.487e+02 1.757e+03, threshold=1.504e+03, percent-clipped=1.0 +2023-04-01 09:15:53,989 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41966.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:15:55,890 INFO [train.py:903] (2/4) Epoch 7, batch 1000, loss[loss=0.2596, simple_loss=0.3337, pruned_loss=0.09274, over 19283.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3269, pruned_loss=0.09722, over 3810964.69 frames. ], batch size: 66, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:16:22,295 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 09:16:37,831 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 09:16:46,614 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3536, 1.2345, 1.6677, 1.2402, 2.7959, 3.5638, 3.4301, 3.9271], + device='cuda:2'), covar=tensor([0.1437, 0.3063, 0.2822, 0.1954, 0.0414, 0.0164, 0.0185, 0.0118], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0278, 0.0307, 0.0244, 0.0200, 0.0132, 0.0199, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 09:16:48,570 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 09:16:56,967 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:16:59,086 INFO [train.py:903] (2/4) Epoch 7, batch 1050, loss[loss=0.2341, simple_loss=0.2967, pruned_loss=0.08578, over 19730.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3272, pruned_loss=0.09747, over 3809268.47 frames. ], batch size: 46, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:17:18,807 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 09:17:29,387 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 09:17:33,748 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+02 5.894e+02 7.129e+02 9.584e+02 2.561e+03, threshold=1.426e+03, percent-clipped=5.0 +2023-04-01 09:18:00,566 INFO [train.py:903] (2/4) Epoch 7, batch 1100, loss[loss=0.2997, simple_loss=0.3613, pruned_loss=0.1191, over 19550.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3275, pruned_loss=0.09769, over 3822311.93 frames. ], batch size: 56, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:03,370 INFO [train.py:903] (2/4) Epoch 7, batch 1150, loss[loss=0.281, simple_loss=0.3485, pruned_loss=0.1068, over 19648.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3271, pruned_loss=0.09757, over 3818146.42 frames. ], batch size: 58, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:21,142 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:19:33,556 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2869, 2.2551, 1.6777, 1.4552, 2.1734, 1.2836, 1.1983, 1.7956], + device='cuda:2'), covar=tensor([0.0710, 0.0533, 0.0805, 0.0593, 0.0322, 0.0932, 0.0651, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0281, 0.0313, 0.0237, 0.0226, 0.0308, 0.0289, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:19:37,737 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.892e+02 7.369e+02 1.011e+03 1.805e+03, threshold=1.474e+03, percent-clipped=4.0 +2023-04-01 09:19:51,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2811, 2.2310, 1.6057, 1.5356, 2.0931, 1.2031, 1.3288, 1.6818], + device='cuda:2'), covar=tensor([0.0691, 0.0481, 0.0707, 0.0480, 0.0357, 0.0874, 0.0534, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0280, 0.0313, 0.0237, 0.0225, 0.0308, 0.0287, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:20:05,771 INFO [train.py:903] (2/4) Epoch 7, batch 1200, loss[loss=0.2714, simple_loss=0.3384, pruned_loss=0.1021, over 17975.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3273, pruned_loss=0.09789, over 3821490.11 frames. ], batch size: 83, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:20:29,764 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2972, 2.3984, 1.7305, 1.5380, 2.2676, 1.2437, 1.2799, 1.7284], + device='cuda:2'), covar=tensor([0.0758, 0.0475, 0.0840, 0.0580, 0.0382, 0.0943, 0.0642, 0.0413], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0280, 0.0314, 0.0238, 0.0226, 0.0309, 0.0288, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:20:30,627 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 09:21:08,029 INFO [train.py:903] (2/4) Epoch 7, batch 1250, loss[loss=0.2537, simple_loss=0.3147, pruned_loss=0.09638, over 19775.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3274, pruned_loss=0.0982, over 3817896.68 frames. ], batch size: 47, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:21:43,463 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 6.237e+02 7.824e+02 1.034e+03 2.254e+03, threshold=1.565e+03, percent-clipped=6.0 +2023-04-01 09:22:09,630 INFO [train.py:903] (2/4) Epoch 7, batch 1300, loss[loss=0.3144, simple_loss=0.3632, pruned_loss=0.1328, over 13316.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3266, pruned_loss=0.09757, over 3810730.36 frames. ], batch size: 136, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:22:09,797 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:23:02,528 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42310.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:23:12,311 INFO [train.py:903] (2/4) Epoch 7, batch 1350, loss[loss=0.3392, simple_loss=0.3775, pruned_loss=0.1504, over 12782.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3284, pruned_loss=0.09886, over 3791397.13 frames. ], batch size: 136, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:23:20,418 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9356, 2.4775, 1.8321, 1.9469, 2.2680, 1.6425, 1.5250, 1.7907], + device='cuda:2'), covar=tensor([0.0683, 0.0570, 0.0586, 0.0443, 0.0396, 0.0662, 0.0628, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0282, 0.0314, 0.0241, 0.0228, 0.0309, 0.0290, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:23:47,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.606e+02 6.578e+02 7.819e+02 1.024e+03 2.032e+03, threshold=1.564e+03, percent-clipped=3.0 +2023-04-01 09:24:15,696 INFO [train.py:903] (2/4) Epoch 7, batch 1400, loss[loss=0.2276, simple_loss=0.2889, pruned_loss=0.08311, over 19766.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3286, pruned_loss=0.09826, over 3804676.32 frames. ], batch size: 45, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:24:34,411 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:39,185 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42387.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:50,702 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:10,391 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:12,409 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 09:25:17,932 INFO [train.py:903] (2/4) Epoch 7, batch 1450, loss[loss=0.2878, simple_loss=0.3561, pruned_loss=0.1098, over 19732.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3277, pruned_loss=0.0973, over 3813495.55 frames. ], batch size: 63, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:25:26,464 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42425.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:25:53,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.957e+02 5.854e+02 7.466e+02 9.791e+02 2.115e+03, threshold=1.493e+03, percent-clipped=4.0 +2023-04-01 09:25:56,891 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9708, 4.3881, 4.6328, 4.6643, 1.4712, 4.2893, 3.7944, 4.2818], + device='cuda:2'), covar=tensor([0.1192, 0.0629, 0.0563, 0.0442, 0.5050, 0.0463, 0.0629, 0.1080], + device='cuda:2'), in_proj_covar=tensor([0.0584, 0.0505, 0.0685, 0.0569, 0.0644, 0.0434, 0.0438, 0.0635], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 09:26:19,588 INFO [train.py:903] (2/4) Epoch 7, batch 1500, loss[loss=0.217, simple_loss=0.2894, pruned_loss=0.07233, over 19716.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3283, pruned_loss=0.0978, over 3796168.25 frames. ], batch size: 51, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:20,486 INFO [train.py:903] (2/4) Epoch 7, batch 1550, loss[loss=0.3072, simple_loss=0.3667, pruned_loss=0.1238, over 18880.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3297, pruned_loss=0.09932, over 3802346.83 frames. ], batch size: 74, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:29,147 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:27:55,723 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 6.427e+02 8.071e+02 9.919e+02 2.182e+03, threshold=1.614e+03, percent-clipped=7.0 +2023-04-01 09:28:00,622 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1412, 1.2253, 1.0706, 0.9542, 0.9169, 1.0437, 0.0382, 0.2959], + device='cuda:2'), covar=tensor([0.0468, 0.0460, 0.0277, 0.0326, 0.0913, 0.0376, 0.0763, 0.0732], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0302, 0.0297, 0.0324, 0.0389, 0.0316, 0.0287, 0.0300], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 09:28:23,012 INFO [train.py:903] (2/4) Epoch 7, batch 1600, loss[loss=0.2636, simple_loss=0.3197, pruned_loss=0.1038, over 19456.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3297, pruned_loss=0.09927, over 3808329.38 frames. ], batch size: 49, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:28:41,292 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 09:29:24,660 INFO [train.py:903] (2/4) Epoch 7, batch 1650, loss[loss=0.2346, simple_loss=0.3096, pruned_loss=0.07976, over 19764.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3299, pruned_loss=0.09941, over 3821146.49 frames. ], batch size: 54, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:29:50,189 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:29:59,743 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.059e+02 6.175e+02 7.945e+02 9.824e+02 2.630e+03, threshold=1.589e+03, percent-clipped=4.0 +2023-04-01 09:30:13,940 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0049, 1.9182, 1.4137, 1.3828, 1.3747, 1.5053, 0.1837, 0.8557], + device='cuda:2'), covar=tensor([0.0277, 0.0313, 0.0257, 0.0344, 0.0622, 0.0381, 0.0629, 0.0529], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0308, 0.0302, 0.0328, 0.0396, 0.0324, 0.0290, 0.0305], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 09:30:23,135 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:26,368 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:27,193 INFO [train.py:903] (2/4) Epoch 7, batch 1700, loss[loss=0.2443, simple_loss=0.3296, pruned_loss=0.07952, over 19620.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3306, pruned_loss=0.09992, over 3803023.23 frames. ], batch size: 57, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:30:43,956 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42681.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 09:30:47,446 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9873, 1.8860, 1.6240, 1.5852, 1.5344, 1.7083, 0.8434, 1.4476], + device='cuda:2'), covar=tensor([0.0227, 0.0328, 0.0218, 0.0299, 0.0468, 0.0374, 0.0550, 0.0396], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0309, 0.0304, 0.0329, 0.0398, 0.0324, 0.0291, 0.0307], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 09:31:02,725 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 09:31:15,474 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42706.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 09:31:29,082 INFO [train.py:903] (2/4) Epoch 7, batch 1750, loss[loss=0.2189, simple_loss=0.2809, pruned_loss=0.07841, over 19749.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3301, pruned_loss=0.09944, over 3811170.39 frames. ], batch size: 46, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:31:50,159 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 09:31:59,543 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:32:05,277 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.682e+02 7.177e+02 9.179e+02 1.731e+03, threshold=1.435e+03, percent-clipped=1.0 +2023-04-01 09:32:14,018 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2595, 1.3370, 2.0094, 1.5649, 2.9819, 2.6444, 3.2724, 1.5809], + device='cuda:2'), covar=tensor([0.2139, 0.3562, 0.2070, 0.1597, 0.1496, 0.1603, 0.1620, 0.3049], + device='cuda:2'), in_proj_covar=tensor([0.0455, 0.0520, 0.0517, 0.0412, 0.0568, 0.0459, 0.0632, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 09:32:32,517 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2244, 1.3658, 1.4312, 1.4479, 2.6650, 1.0027, 1.9339, 2.9111], + device='cuda:2'), covar=tensor([0.0520, 0.2525, 0.2378, 0.1658, 0.0879, 0.2470, 0.1235, 0.0463], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0315, 0.0323, 0.0292, 0.0321, 0.0316, 0.0295, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:32:33,267 INFO [train.py:903] (2/4) Epoch 7, batch 1800, loss[loss=0.2608, simple_loss=0.3145, pruned_loss=0.1035, over 19749.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3298, pruned_loss=0.09915, over 3808013.42 frames. ], batch size: 48, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:33:24,464 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8215, 0.7350, 0.7863, 1.0071, 0.7923, 0.8758, 0.9365, 0.8318], + device='cuda:2'), covar=tensor([0.0612, 0.0752, 0.0799, 0.0535, 0.0675, 0.0599, 0.0678, 0.0568], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0239, 0.0237, 0.0269, 0.0257, 0.0222, 0.0218, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 09:33:27,625 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 09:33:35,068 INFO [train.py:903] (2/4) Epoch 7, batch 1850, loss[loss=0.3074, simple_loss=0.3452, pruned_loss=0.1348, over 19732.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3299, pruned_loss=0.09943, over 3817580.71 frames. ], batch size: 46, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:04,806 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 09:34:09,006 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 6.596e+02 7.909e+02 1.066e+03 2.536e+03, threshold=1.582e+03, percent-clipped=10.0 +2023-04-01 09:34:22,466 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:25,535 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 09:34:37,132 INFO [train.py:903] (2/4) Epoch 7, batch 1900, loss[loss=0.2551, simple_loss=0.3093, pruned_loss=0.1005, over 19748.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3298, pruned_loss=0.09917, over 3828697.75 frames. ], batch size: 47, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:37,304 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:44,633 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2906, 2.8945, 1.7990, 2.1034, 1.8311, 2.4340, 0.5260, 2.1159], + device='cuda:2'), covar=tensor([0.0295, 0.0330, 0.0455, 0.0542, 0.0687, 0.0488, 0.0819, 0.0628], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0309, 0.0306, 0.0328, 0.0399, 0.0322, 0.0292, 0.0307], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 09:34:48,059 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:51,109 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 09:34:52,507 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9840, 4.3334, 4.6529, 4.6606, 1.6161, 4.3392, 3.7639, 4.2181], + device='cuda:2'), covar=tensor([0.1179, 0.0654, 0.0559, 0.0476, 0.4710, 0.0411, 0.0575, 0.1128], + device='cuda:2'), in_proj_covar=tensor([0.0577, 0.0499, 0.0684, 0.0560, 0.0638, 0.0429, 0.0434, 0.0639], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 09:34:55,765 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 09:35:14,081 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:35:21,945 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 09:35:38,564 INFO [train.py:903] (2/4) Epoch 7, batch 1950, loss[loss=0.22, simple_loss=0.2953, pruned_loss=0.07234, over 19497.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3298, pruned_loss=0.09883, over 3834721.54 frames. ], batch size: 49, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:35:44,714 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5967, 2.6160, 1.7049, 1.7012, 2.2416, 1.2266, 1.3544, 1.6700], + device='cuda:2'), covar=tensor([0.0729, 0.0385, 0.0862, 0.0537, 0.0398, 0.0983, 0.0589, 0.0415], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0282, 0.0320, 0.0243, 0.0227, 0.0316, 0.0289, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:35:49,491 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 09:36:15,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.682e+02 8.244e+02 9.665e+02 2.689e+03, threshold=1.649e+03, percent-clipped=3.0 +2023-04-01 09:36:41,128 INFO [train.py:903] (2/4) Epoch 7, batch 2000, loss[loss=0.2499, simple_loss=0.3304, pruned_loss=0.08471, over 19709.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3306, pruned_loss=0.09914, over 3838620.25 frames. ], batch size: 59, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:00,298 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:35,131 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:36,094 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 09:37:37,949 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 09:37:43,657 INFO [train.py:903] (2/4) Epoch 7, batch 2050, loss[loss=0.255, simple_loss=0.3252, pruned_loss=0.09241, over 19529.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3303, pruned_loss=0.09934, over 3833099.60 frames. ], batch size: 54, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:56,206 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 09:37:57,397 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 09:38:05,042 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1791, 2.0245, 1.4799, 1.2475, 1.8356, 1.0468, 1.0321, 1.5808], + device='cuda:2'), covar=tensor([0.0668, 0.0513, 0.0851, 0.0580, 0.0420, 0.0967, 0.0605, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0279, 0.0313, 0.0239, 0.0223, 0.0310, 0.0282, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:38:17,409 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.956e+02 6.149e+02 7.669e+02 9.586e+02 2.177e+03, threshold=1.534e+03, percent-clipped=1.0 +2023-04-01 09:38:18,633 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 09:38:46,703 INFO [train.py:903] (2/4) Epoch 7, batch 2100, loss[loss=0.2546, simple_loss=0.3222, pruned_loss=0.09351, over 19735.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3299, pruned_loss=0.0989, over 3836475.69 frames. ], batch size: 51, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:39:13,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 09:39:35,494 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 09:39:41,485 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:39:46,457 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-04-01 09:39:48,070 INFO [train.py:903] (2/4) Epoch 7, batch 2150, loss[loss=0.2843, simple_loss=0.35, pruned_loss=0.1093, over 19318.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3294, pruned_loss=0.09889, over 3840750.54 frames. ], batch size: 66, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:39:57,665 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:12,353 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:23,160 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 6.527e+02 7.673e+02 9.951e+02 2.226e+03, threshold=1.535e+03, percent-clipped=3.0 +2023-04-01 09:40:49,522 INFO [train.py:903] (2/4) Epoch 7, batch 2200, loss[loss=0.2901, simple_loss=0.3648, pruned_loss=0.1077, over 19672.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3302, pruned_loss=0.09916, over 3830712.40 frames. ], batch size: 59, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:40:59,680 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-01 09:41:37,677 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7539, 4.2266, 4.4139, 4.3695, 1.4594, 4.0842, 3.6589, 4.0773], + device='cuda:2'), covar=tensor([0.1051, 0.0568, 0.0470, 0.0479, 0.4286, 0.0440, 0.0539, 0.0955], + device='cuda:2'), in_proj_covar=tensor([0.0568, 0.0496, 0.0667, 0.0552, 0.0631, 0.0423, 0.0428, 0.0626], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 09:41:53,423 INFO [train.py:903] (2/4) Epoch 7, batch 2250, loss[loss=0.3648, simple_loss=0.3945, pruned_loss=0.1676, over 13152.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3293, pruned_loss=0.09862, over 3832243.87 frames. ], batch size: 136, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:57,903 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:19,768 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:21,856 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43241.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:27,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+02 6.206e+02 7.577e+02 9.251e+02 2.641e+03, threshold=1.515e+03, percent-clipped=5.0 +2023-04-01 09:42:31,612 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.8698, 5.2405, 2.7089, 4.6372, 1.1854, 4.9155, 5.0310, 5.2488], + device='cuda:2'), covar=tensor([0.0430, 0.0844, 0.2040, 0.0573, 0.3755, 0.0587, 0.0592, 0.0775], + device='cuda:2'), in_proj_covar=tensor([0.0373, 0.0329, 0.0382, 0.0292, 0.0356, 0.0315, 0.0297, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 09:42:52,556 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:56,683 INFO [train.py:903] (2/4) Epoch 7, batch 2300, loss[loss=0.2231, simple_loss=0.2937, pruned_loss=0.07622, over 19795.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.329, pruned_loss=0.09828, over 3840203.93 frames. ], batch size: 47, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:43:10,468 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 09:43:59,263 INFO [train.py:903] (2/4) Epoch 7, batch 2350, loss[loss=0.2472, simple_loss=0.3221, pruned_loss=0.08616, over 19675.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3286, pruned_loss=0.09784, over 3838239.17 frames. ], batch size: 55, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:44:22,123 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:34,242 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 6.151e+02 7.487e+02 9.533e+02 1.563e+03, threshold=1.497e+03, percent-clipped=2.0 +2023-04-01 09:44:43,141 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 09:44:46,830 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43356.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:59,580 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 09:45:00,609 INFO [train.py:903] (2/4) Epoch 7, batch 2400, loss[loss=0.3148, simple_loss=0.3649, pruned_loss=0.1324, over 19339.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3282, pruned_loss=0.09793, over 3840754.36 frames. ], batch size: 70, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:45:20,064 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:45:49,731 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:46:04,430 INFO [train.py:903] (2/4) Epoch 7, batch 2450, loss[loss=0.2075, simple_loss=0.2788, pruned_loss=0.06812, over 19792.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3283, pruned_loss=0.0978, over 3833852.07 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:46:38,157 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.802e+02 7.669e+02 8.855e+02 2.284e+03, threshold=1.534e+03, percent-clipped=5.0 +2023-04-01 09:47:06,554 INFO [train.py:903] (2/4) Epoch 7, batch 2500, loss[loss=0.2925, simple_loss=0.3551, pruned_loss=0.1149, over 19590.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.328, pruned_loss=0.09788, over 3830329.75 frames. ], batch size: 61, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:48:09,515 INFO [train.py:903] (2/4) Epoch 7, batch 2550, loss[loss=0.2645, simple_loss=0.3183, pruned_loss=0.1054, over 19387.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3261, pruned_loss=0.09672, over 3837444.14 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:48:25,880 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:48:44,537 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.143e+02 6.070e+02 7.288e+02 8.830e+02 1.707e+03, threshold=1.458e+03, percent-clipped=2.0 +2023-04-01 09:49:05,672 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 09:49:10,248 INFO [train.py:903] (2/4) Epoch 7, batch 2600, loss[loss=0.2536, simple_loss=0.3195, pruned_loss=0.09386, over 19459.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3266, pruned_loss=0.09653, over 3840338.00 frames. ], batch size: 64, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:49:41,284 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:06,467 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:12,269 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:13,002 INFO [train.py:903] (2/4) Epoch 7, batch 2650, loss[loss=0.2439, simple_loss=0.3288, pruned_loss=0.07953, over 19618.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3265, pruned_loss=0.09644, over 3839251.93 frames. ], batch size: 57, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:50:30,426 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3170, 2.2085, 1.5441, 1.4768, 2.0359, 1.1122, 1.1647, 1.7858], + device='cuda:2'), covar=tensor([0.0724, 0.0539, 0.0826, 0.0602, 0.0363, 0.0969, 0.0676, 0.0340], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0278, 0.0316, 0.0239, 0.0221, 0.0308, 0.0281, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:50:30,486 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2670, 1.3441, 1.7136, 1.4512, 2.6064, 2.1909, 2.6290, 1.1645], + device='cuda:2'), covar=tensor([0.1933, 0.3277, 0.1847, 0.1565, 0.1151, 0.1547, 0.1365, 0.2931], + device='cuda:2'), in_proj_covar=tensor([0.0461, 0.0522, 0.0526, 0.0413, 0.0571, 0.0462, 0.0638, 0.0462], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 09:50:35,415 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 09:50:38,365 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43637.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:49,496 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 6.243e+02 7.316e+02 9.618e+02 1.411e+03, threshold=1.463e+03, percent-clipped=0.0 +2023-04-01 09:51:16,499 INFO [train.py:903] (2/4) Epoch 7, batch 2700, loss[loss=0.2853, simple_loss=0.3403, pruned_loss=0.1151, over 13451.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3268, pruned_loss=0.09704, over 3830026.88 frames. ], batch size: 136, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:52:19,241 INFO [train.py:903] (2/4) Epoch 7, batch 2750, loss[loss=0.2408, simple_loss=0.3121, pruned_loss=0.08478, over 19352.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3277, pruned_loss=0.09781, over 3821563.31 frames. ], batch size: 70, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:52:43,751 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3371, 2.0306, 1.4952, 1.4654, 1.9078, 1.0947, 1.1878, 1.5940], + device='cuda:2'), covar=tensor([0.0594, 0.0541, 0.0887, 0.0467, 0.0325, 0.0890, 0.0585, 0.0354], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0282, 0.0321, 0.0240, 0.0225, 0.0311, 0.0284, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:52:55,474 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.869e+02 8.299e+02 1.091e+03 2.331e+03, threshold=1.660e+03, percent-clipped=8.0 +2023-04-01 09:53:20,569 INFO [train.py:903] (2/4) Epoch 7, batch 2800, loss[loss=0.274, simple_loss=0.3251, pruned_loss=0.1115, over 19071.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3285, pruned_loss=0.09809, over 3814692.02 frames. ], batch size: 42, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:54:07,589 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:54:22,955 INFO [train.py:903] (2/4) Epoch 7, batch 2850, loss[loss=0.2312, simple_loss=0.2999, pruned_loss=0.08124, over 17780.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3296, pruned_loss=0.09854, over 3806132.78 frames. ], batch size: 39, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:54:59,105 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.316e+02 8.520e+02 1.005e+03 1.613e+03, threshold=1.704e+03, percent-clipped=0.0 +2023-04-01 09:55:25,669 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 09:55:26,754 INFO [train.py:903] (2/4) Epoch 7, batch 2900, loss[loss=0.2657, simple_loss=0.3348, pruned_loss=0.09835, over 19644.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3282, pruned_loss=0.09772, over 3814180.65 frames. ], batch size: 60, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:55:36,425 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:55:43,245 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7731, 3.1781, 3.2490, 3.2836, 1.2974, 3.0830, 2.7321, 2.9750], + device='cuda:2'), covar=tensor([0.1270, 0.0731, 0.0711, 0.0646, 0.3827, 0.0570, 0.0665, 0.1217], + device='cuda:2'), in_proj_covar=tensor([0.0570, 0.0496, 0.0671, 0.0547, 0.0630, 0.0419, 0.0423, 0.0622], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 09:56:09,703 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 09:56:23,781 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3204, 3.0083, 2.1735, 2.8318, 0.8219, 2.8277, 2.7675, 2.9271], + device='cuda:2'), covar=tensor([0.0959, 0.1323, 0.1857, 0.0826, 0.3607, 0.1024, 0.0891, 0.1175], + device='cuda:2'), in_proj_covar=tensor([0.0385, 0.0336, 0.0390, 0.0300, 0.0363, 0.0321, 0.0303, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 09:56:28,082 INFO [train.py:903] (2/4) Epoch 7, batch 2950, loss[loss=0.3104, simple_loss=0.3658, pruned_loss=0.1275, over 19645.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3279, pruned_loss=0.09754, over 3806425.92 frames. ], batch size: 55, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:56:39,827 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:57:04,994 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.923e+02 7.218e+02 9.278e+02 2.092e+03, threshold=1.444e+03, percent-clipped=1.0 +2023-04-01 09:57:30,518 INFO [train.py:903] (2/4) Epoch 7, batch 3000, loss[loss=0.3754, simple_loss=0.4055, pruned_loss=0.1726, over 13006.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.329, pruned_loss=0.09854, over 3805482.41 frames. ], batch size: 135, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:57:30,519 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 09:57:43,113 INFO [train.py:937] (2/4) Epoch 7, validation: loss=0.1917, simple_loss=0.2919, pruned_loss=0.04574, over 944034.00 frames. +2023-04-01 09:57:43,114 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18546MB +2023-04-01 09:57:46,874 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3240, 2.0690, 2.1327, 2.4958, 2.3296, 2.0885, 2.0138, 2.3950], + device='cuda:2'), covar=tensor([0.0590, 0.1311, 0.0885, 0.0566, 0.0831, 0.0382, 0.0833, 0.0422], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0359, 0.0287, 0.0237, 0.0299, 0.0247, 0.0269, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 09:57:49,871 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 09:57:55,012 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:09,103 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9718, 5.0054, 5.7773, 5.7384, 1.8834, 5.4623, 4.7175, 5.3098], + device='cuda:2'), covar=tensor([0.1020, 0.0652, 0.0461, 0.0389, 0.4199, 0.0275, 0.0493, 0.0960], + device='cuda:2'), in_proj_covar=tensor([0.0562, 0.0491, 0.0666, 0.0541, 0.0622, 0.0416, 0.0419, 0.0618], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 09:58:13,892 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:47,338 INFO [train.py:903] (2/4) Epoch 7, batch 3050, loss[loss=0.2979, simple_loss=0.3536, pruned_loss=0.1211, over 13138.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3277, pruned_loss=0.09808, over 3777354.39 frames. ], batch size: 135, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:59:24,087 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+02 5.923e+02 7.306e+02 8.819e+02 1.422e+03, threshold=1.461e+03, percent-clipped=0.0 +2023-04-01 09:59:50,238 INFO [train.py:903] (2/4) Epoch 7, batch 3100, loss[loss=0.2719, simple_loss=0.3207, pruned_loss=0.1115, over 19400.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.327, pruned_loss=0.09766, over 3791040.56 frames. ], batch size: 48, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:00:50,928 INFO [train.py:903] (2/4) Epoch 7, batch 3150, loss[loss=0.2196, simple_loss=0.2952, pruned_loss=0.07196, over 19738.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3269, pruned_loss=0.09772, over 3785716.76 frames. ], batch size: 51, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:01:18,572 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 10:01:26,086 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 6.470e+02 8.010e+02 1.018e+03 2.357e+03, threshold=1.602e+03, percent-clipped=4.0 +2023-04-01 10:01:29,403 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:01:51,332 INFO [train.py:903] (2/4) Epoch 7, batch 3200, loss[loss=0.3011, simple_loss=0.3621, pruned_loss=0.1201, over 18895.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3275, pruned_loss=0.09813, over 3771999.06 frames. ], batch size: 74, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:02:51,381 INFO [train.py:903] (2/4) Epoch 7, batch 3250, loss[loss=0.2731, simple_loss=0.3411, pruned_loss=0.1025, over 18029.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3279, pruned_loss=0.09805, over 3787894.56 frames. ], batch size: 83, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:27,995 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 6.408e+02 8.261e+02 1.024e+03 1.757e+03, threshold=1.652e+03, percent-clipped=4.0 +2023-04-01 10:03:28,446 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:49,220 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:53,349 INFO [train.py:903] (2/4) Epoch 7, batch 3300, loss[loss=0.2986, simple_loss=0.3531, pruned_loss=0.122, over 19592.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3286, pruned_loss=0.09861, over 3787166.99 frames. ], batch size: 57, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:59,190 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:00,249 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 10:04:00,619 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44272.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:57,192 INFO [train.py:903] (2/4) Epoch 7, batch 3350, loss[loss=0.2313, simple_loss=0.2925, pruned_loss=0.08504, over 18586.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3299, pruned_loss=0.09924, over 3791969.77 frames. ], batch size: 41, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:05:00,868 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:05:32,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 6.124e+02 7.865e+02 1.014e+03 2.362e+03, threshold=1.573e+03, percent-clipped=3.0 +2023-04-01 10:05:58,469 INFO [train.py:903] (2/4) Epoch 7, batch 3400, loss[loss=0.2615, simple_loss=0.336, pruned_loss=0.09348, over 18772.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3296, pruned_loss=0.09879, over 3791316.57 frames. ], batch size: 74, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:06:05,646 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6673, 1.8390, 2.2597, 2.8501, 2.1260, 2.5542, 2.9696, 2.3524], + device='cuda:2'), covar=tensor([0.0660, 0.0978, 0.0890, 0.0873, 0.0883, 0.0719, 0.0750, 0.0632], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0235, 0.0231, 0.0264, 0.0249, 0.0216, 0.0214, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 10:06:20,876 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:06:48,158 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3892, 1.2768, 1.1676, 1.3983, 1.3655, 1.2343, 1.1257, 1.3391], + device='cuda:2'), covar=tensor([0.0690, 0.1120, 0.1018, 0.0635, 0.0787, 0.0450, 0.0915, 0.0509], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0353, 0.0285, 0.0231, 0.0296, 0.0243, 0.0265, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:07:01,367 INFO [train.py:903] (2/4) Epoch 7, batch 3450, loss[loss=0.2729, simple_loss=0.3414, pruned_loss=0.1022, over 19568.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3301, pruned_loss=0.09899, over 3785417.32 frames. ], batch size: 61, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:07:05,301 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 10:07:06,927 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 10:07:25,052 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:39,987 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 5.624e+02 7.209e+02 9.228e+02 1.461e+03, threshold=1.442e+03, percent-clipped=0.0 +2023-04-01 10:08:03,667 INFO [train.py:903] (2/4) Epoch 7, batch 3500, loss[loss=0.2606, simple_loss=0.3292, pruned_loss=0.096, over 18275.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3296, pruned_loss=0.099, over 3783427.12 frames. ], batch size: 83, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:07,866 INFO [train.py:903] (2/4) Epoch 7, batch 3550, loss[loss=0.2715, simple_loss=0.3434, pruned_loss=0.09977, over 19538.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3291, pruned_loss=0.09857, over 3791944.59 frames. ], batch size: 56, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:10,474 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:40,627 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:40,741 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:43,712 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 5.736e+02 7.487e+02 9.426e+02 2.431e+03, threshold=1.497e+03, percent-clipped=5.0 +2023-04-01 10:10:10,027 INFO [train.py:903] (2/4) Epoch 7, batch 3600, loss[loss=0.2422, simple_loss=0.314, pruned_loss=0.08517, over 19774.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3275, pruned_loss=0.09777, over 3810035.74 frames. ], batch size: 54, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:10:10,259 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:12,086 INFO [train.py:903] (2/4) Epoch 7, batch 3650, loss[loss=0.2397, simple_loss=0.3176, pruned_loss=0.08091, over 19530.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3269, pruned_loss=0.09704, over 3820631.04 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:11:42,345 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:49,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 6.222e+02 7.769e+02 9.647e+02 2.431e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:12:12,887 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:12:13,721 INFO [train.py:903] (2/4) Epoch 7, batch 3700, loss[loss=0.2689, simple_loss=0.3365, pruned_loss=0.1006, over 17297.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3266, pruned_loss=0.09699, over 3825501.83 frames. ], batch size: 101, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:12:45,622 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:13:16,188 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:13:16,967 INFO [train.py:903] (2/4) Epoch 7, batch 3750, loss[loss=0.262, simple_loss=0.3403, pruned_loss=0.09182, over 19540.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3269, pruned_loss=0.09721, over 3819033.30 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:13:53,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+02 6.149e+02 7.382e+02 9.468e+02 1.650e+03, threshold=1.476e+03, percent-clipped=2.0 +2023-04-01 10:14:18,150 INFO [train.py:903] (2/4) Epoch 7, batch 3800, loss[loss=0.2326, simple_loss=0.2989, pruned_loss=0.08318, over 19377.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3281, pruned_loss=0.0982, over 3827412.95 frames. ], batch size: 47, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:14:50,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 10:15:19,315 INFO [train.py:903] (2/4) Epoch 7, batch 3850, loss[loss=0.2585, simple_loss=0.3249, pruned_loss=0.09605, over 19573.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.329, pruned_loss=0.09899, over 3822221.10 frames. ], batch size: 52, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:15:57,069 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 6.658e+02 8.285e+02 1.095e+03 3.075e+03, threshold=1.657e+03, percent-clipped=10.0 +2023-04-01 10:16:20,986 INFO [train.py:903] (2/4) Epoch 7, batch 3900, loss[loss=0.2786, simple_loss=0.3308, pruned_loss=0.1132, over 19477.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3293, pruned_loss=0.09918, over 3810816.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:16:48,353 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44889.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:15,640 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:24,381 INFO [train.py:903] (2/4) Epoch 7, batch 3950, loss[loss=0.2865, simple_loss=0.3541, pruned_loss=0.1094, over 19661.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3294, pruned_loss=0.0989, over 3819251.29 frames. ], batch size: 55, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:17:29,149 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 10:18:00,735 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+02 5.655e+02 7.377e+02 9.527e+02 2.304e+03, threshold=1.475e+03, percent-clipped=3.0 +2023-04-01 10:18:17,426 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0869, 2.0789, 1.7600, 1.6351, 1.5285, 1.6921, 0.4066, 1.0639], + device='cuda:2'), covar=tensor([0.0301, 0.0275, 0.0212, 0.0305, 0.0594, 0.0361, 0.0551, 0.0487], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0302, 0.0296, 0.0323, 0.0399, 0.0314, 0.0284, 0.0299], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:18:19,641 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7377, 1.3764, 1.5021, 2.1631, 1.6626, 2.0352, 2.1254, 1.9585], + device='cuda:2'), covar=tensor([0.0917, 0.1108, 0.1107, 0.0878, 0.0996, 0.0748, 0.0902, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0237, 0.0233, 0.0266, 0.0252, 0.0218, 0.0218, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 10:18:26,676 INFO [train.py:903] (2/4) Epoch 7, batch 4000, loss[loss=0.264, simple_loss=0.3377, pruned_loss=0.09516, over 19512.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3277, pruned_loss=0.09733, over 3829625.53 frames. ], batch size: 56, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:11,993 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45004.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:19:15,230 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 10:19:27,789 INFO [train.py:903] (2/4) Epoch 7, batch 4050, loss[loss=0.2771, simple_loss=0.3395, pruned_loss=0.1073, over 17602.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3273, pruned_loss=0.09689, over 3833040.22 frames. ], batch size: 101, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:39,433 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:20:05,077 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 6.019e+02 6.941e+02 8.524e+02 1.564e+03, threshold=1.388e+03, percent-clipped=1.0 +2023-04-01 10:20:28,973 INFO [train.py:903] (2/4) Epoch 7, batch 4100, loss[loss=0.2498, simple_loss=0.3271, pruned_loss=0.08621, over 19783.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3281, pruned_loss=0.09728, over 3842183.76 frames. ], batch size: 56, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:20:36,312 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:21:05,597 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 10:21:07,130 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:21:15,578 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 10:21:30,597 INFO [train.py:903] (2/4) Epoch 7, batch 4150, loss[loss=0.2307, simple_loss=0.3151, pruned_loss=0.07313, over 19739.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3279, pruned_loss=0.09715, over 3844631.13 frames. ], batch size: 63, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:07,163 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 6.430e+02 8.235e+02 1.004e+03 2.607e+03, threshold=1.647e+03, percent-clipped=6.0 +2023-04-01 10:22:32,968 INFO [train.py:903] (2/4) Epoch 7, batch 4200, loss[loss=0.2585, simple_loss=0.3125, pruned_loss=0.1022, over 19737.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3262, pruned_loss=0.09615, over 3840438.96 frames. ], batch size: 45, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:38,323 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 10:23:33,354 INFO [train.py:903] (2/4) Epoch 7, batch 4250, loss[loss=0.2709, simple_loss=0.3455, pruned_loss=0.0981, over 19663.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3275, pruned_loss=0.09722, over 3835474.47 frames. ], batch size: 60, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:23:49,632 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 10:24:01,353 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 10:24:12,483 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.934e+02 6.110e+02 8.094e+02 1.072e+03 2.309e+03, threshold=1.619e+03, percent-clipped=5.0 +2023-04-01 10:24:27,338 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:37,359 INFO [train.py:903] (2/4) Epoch 7, batch 4300, loss[loss=0.2715, simple_loss=0.3359, pruned_loss=0.1035, over 19297.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3263, pruned_loss=0.09646, over 3821971.46 frames. ], batch size: 66, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:24:57,139 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45283.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:59,395 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45285.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:27,041 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:32,128 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 10:25:39,640 INFO [train.py:903] (2/4) Epoch 7, batch 4350, loss[loss=0.2453, simple_loss=0.298, pruned_loss=0.09634, over 19038.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3262, pruned_loss=0.09657, over 3837012.51 frames. ], batch size: 42, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:26:16,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 6.479e+02 7.493e+02 1.002e+03 2.532e+03, threshold=1.499e+03, percent-clipped=5.0 +2023-04-01 10:26:39,980 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45366.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:26:42,861 INFO [train.py:903] (2/4) Epoch 7, batch 4400, loss[loss=0.2089, simple_loss=0.2832, pruned_loss=0.06732, over 19797.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3263, pruned_loss=0.09725, over 3826242.17 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:27:06,062 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 10:27:14,919 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 10:27:34,989 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 10:27:44,673 INFO [train.py:903] (2/4) Epoch 7, batch 4450, loss[loss=0.2835, simple_loss=0.3499, pruned_loss=0.1085, over 19754.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3284, pruned_loss=0.09883, over 3818267.65 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:27:44,839 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:16,090 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:22,924 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.630e+02 6.151e+02 7.769e+02 9.586e+02 4.695e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:28:46,114 INFO [train.py:903] (2/4) Epoch 7, batch 4500, loss[loss=0.2459, simple_loss=0.3133, pruned_loss=0.08925, over 19710.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3286, pruned_loss=0.09887, over 3811132.91 frames. ], batch size: 53, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:29:20,352 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 10:29:29,043 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4011, 1.4587, 1.8353, 1.3784, 2.8527, 3.3613, 3.2833, 3.5682], + device='cuda:2'), covar=tensor([0.1432, 0.2785, 0.2579, 0.1935, 0.0533, 0.0314, 0.0200, 0.0176], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0283, 0.0311, 0.0245, 0.0202, 0.0133, 0.0201, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:29:48,550 INFO [train.py:903] (2/4) Epoch 7, batch 4550, loss[loss=0.2527, simple_loss=0.3333, pruned_loss=0.08605, over 19675.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.329, pruned_loss=0.09908, over 3809955.25 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:30:00,896 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 10:30:09,089 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:23,712 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 10:30:27,042 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.783e+02 6.251e+02 7.662e+02 9.726e+02 2.453e+03, threshold=1.532e+03, percent-clipped=6.0 +2023-04-01 10:30:38,146 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:51,055 INFO [train.py:903] (2/4) Epoch 7, batch 4600, loss[loss=0.2036, simple_loss=0.2803, pruned_loss=0.06341, over 19610.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3292, pruned_loss=0.09922, over 3810798.80 frames. ], batch size: 50, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:31:22,579 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2932, 5.5205, 3.1148, 4.8272, 1.1645, 5.5353, 5.5760, 5.8178], + device='cuda:2'), covar=tensor([0.0409, 0.0946, 0.1730, 0.0578, 0.3879, 0.0570, 0.0450, 0.0641], + device='cuda:2'), in_proj_covar=tensor([0.0380, 0.0332, 0.0391, 0.0296, 0.0357, 0.0321, 0.0307, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 10:31:54,304 INFO [train.py:903] (2/4) Epoch 7, batch 4650, loss[loss=0.2113, simple_loss=0.2767, pruned_loss=0.07293, over 19736.00 frames. ], tot_loss[loss=0.261, simple_loss=0.327, pruned_loss=0.09751, over 3815513.47 frames. ], batch size: 46, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:32:11,895 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 10:32:24,511 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 10:32:33,275 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.859e+02 7.403e+02 8.847e+02 2.429e+03, threshold=1.481e+03, percent-clipped=2.0 +2023-04-01 10:32:55,967 INFO [train.py:903] (2/4) Epoch 7, batch 4700, loss[loss=0.2481, simple_loss=0.3198, pruned_loss=0.08818, over 19692.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3285, pruned_loss=0.09821, over 3808212.35 frames. ], batch size: 59, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:32:59,882 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0188, 2.0172, 1.5556, 1.5330, 1.4229, 1.5246, 0.2519, 0.7765], + device='cuda:2'), covar=tensor([0.0322, 0.0326, 0.0251, 0.0341, 0.0702, 0.0431, 0.0593, 0.0566], + device='cuda:2'), in_proj_covar=tensor([0.0311, 0.0315, 0.0309, 0.0333, 0.0406, 0.0326, 0.0289, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:33:18,881 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 10:33:34,168 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2207, 1.3317, 1.7187, 1.4164, 2.5795, 2.1694, 2.5668, 1.1068], + device='cuda:2'), covar=tensor([0.1967, 0.3388, 0.1932, 0.1599, 0.1299, 0.1669, 0.1488, 0.3057], + device='cuda:2'), in_proj_covar=tensor([0.0460, 0.0537, 0.0526, 0.0419, 0.0574, 0.0466, 0.0636, 0.0460], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 10:33:47,812 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45710.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:33:58,616 INFO [train.py:903] (2/4) Epoch 7, batch 4750, loss[loss=0.3352, simple_loss=0.3793, pruned_loss=0.1456, over 13325.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3286, pruned_loss=0.09804, over 3803125.96 frames. ], batch size: 135, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:34:01,243 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:34:08,831 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 10:34:36,016 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 6.576e+02 8.008e+02 9.322e+02 2.457e+03, threshold=1.602e+03, percent-clipped=6.0 +2023-04-01 10:35:01,472 INFO [train.py:903] (2/4) Epoch 7, batch 4800, loss[loss=0.2532, simple_loss=0.32, pruned_loss=0.09317, over 19740.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3272, pruned_loss=0.09724, over 3809442.30 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:35:26,515 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:29,657 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:58,102 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:58,132 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:02,113 INFO [train.py:903] (2/4) Epoch 7, batch 4850, loss[loss=0.2553, simple_loss=0.3233, pruned_loss=0.09367, over 19594.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3274, pruned_loss=0.0977, over 3815205.65 frames. ], batch size: 52, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:36:10,648 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45825.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:36:25,121 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 10:36:25,449 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3865, 1.4799, 1.4130, 1.6615, 2.9581, 1.0371, 2.0883, 3.1730], + device='cuda:2'), covar=tensor([0.0369, 0.2182, 0.2304, 0.1367, 0.0612, 0.2226, 0.1058, 0.0327], + device='cuda:2'), in_proj_covar=tensor([0.0316, 0.0322, 0.0333, 0.0302, 0.0329, 0.0322, 0.0304, 0.0320], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:36:27,427 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:40,692 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.989e+02 6.823e+02 8.718e+02 1.115e+03 2.265e+03, threshold=1.744e+03, percent-clipped=6.0 +2023-04-01 10:36:48,793 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 10:36:54,242 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 10:36:54,265 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 10:37:00,547 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7516, 1.8139, 1.5118, 1.3259, 1.3055, 1.5003, 0.1599, 0.5951], + device='cuda:2'), covar=tensor([0.0325, 0.0285, 0.0196, 0.0307, 0.0712, 0.0288, 0.0539, 0.0550], + device='cuda:2'), in_proj_covar=tensor([0.0311, 0.0310, 0.0304, 0.0332, 0.0405, 0.0321, 0.0284, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:37:03,560 INFO [train.py:903] (2/4) Epoch 7, batch 4900, loss[loss=0.24, simple_loss=0.3186, pruned_loss=0.08072, over 19531.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3274, pruned_loss=0.0977, over 3828652.29 frames. ], batch size: 56, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:37:04,813 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 10:37:07,477 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9981, 1.1600, 1.3480, 1.5422, 2.4754, 1.0190, 1.8247, 2.6571], + device='cuda:2'), covar=tensor([0.0594, 0.2705, 0.2594, 0.1474, 0.0870, 0.2312, 0.1287, 0.0507], + device='cuda:2'), in_proj_covar=tensor([0.0312, 0.0318, 0.0329, 0.0297, 0.0326, 0.0318, 0.0300, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:37:09,936 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5211, 2.3395, 1.6648, 1.5199, 2.0394, 1.0888, 1.1921, 1.6299], + device='cuda:2'), covar=tensor([0.0756, 0.0472, 0.0783, 0.0528, 0.0356, 0.0976, 0.0685, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0278, 0.0306, 0.0235, 0.0223, 0.0306, 0.0280, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:37:14,473 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:37:26,158 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 10:38:05,106 INFO [train.py:903] (2/4) Epoch 7, batch 4950, loss[loss=0.236, simple_loss=0.3027, pruned_loss=0.08468, over 19382.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3287, pruned_loss=0.09852, over 3817764.92 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:38:24,782 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 10:38:44,273 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+02 6.277e+02 7.738e+02 9.356e+02 2.304e+03, threshold=1.548e+03, percent-clipped=1.0 +2023-04-01 10:38:47,748 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 10:38:50,851 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 10:39:09,692 INFO [train.py:903] (2/4) Epoch 7, batch 5000, loss[loss=0.248, simple_loss=0.3124, pruned_loss=0.0918, over 19759.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3281, pruned_loss=0.09801, over 3828641.89 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:39:20,566 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 10:39:30,934 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 10:40:12,335 INFO [train.py:903] (2/4) Epoch 7, batch 5050, loss[loss=0.2493, simple_loss=0.3227, pruned_loss=0.08796, over 19786.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.327, pruned_loss=0.09738, over 3833452.76 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:40:16,061 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6831, 2.0145, 2.0152, 3.0346, 2.7861, 2.3968, 2.2673, 2.6484], + device='cuda:2'), covar=tensor([0.0713, 0.1553, 0.1238, 0.0742, 0.0961, 0.0410, 0.0861, 0.0515], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0350, 0.0282, 0.0237, 0.0297, 0.0241, 0.0267, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:40:36,812 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3359, 1.3104, 1.3948, 1.5575, 2.9187, 1.0689, 2.0804, 3.1250], + device='cuda:2'), covar=tensor([0.0393, 0.2550, 0.2436, 0.1542, 0.0629, 0.2389, 0.1101, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0314, 0.0325, 0.0295, 0.0324, 0.0316, 0.0295, 0.0313], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:40:49,619 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 10:40:51,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 6.208e+02 7.894e+02 9.516e+02 2.052e+03, threshold=1.579e+03, percent-clipped=3.0 +2023-04-01 10:41:08,570 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46064.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:41:13,137 INFO [train.py:903] (2/4) Epoch 7, batch 5100, loss[loss=0.2288, simple_loss=0.3053, pruned_loss=0.07616, over 19658.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3272, pruned_loss=0.09788, over 3826154.19 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:41:13,947 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 10:41:24,932 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 10:41:28,141 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 10:41:28,560 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46081.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:41:32,529 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 10:42:00,064 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46106.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:42:08,084 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1171, 1.3091, 1.6361, 0.9611, 2.5642, 2.9862, 2.6941, 3.1444], + device='cuda:2'), covar=tensor([0.1434, 0.2881, 0.2640, 0.1941, 0.0446, 0.0230, 0.0242, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0244, 0.0204, 0.0133, 0.0203, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:42:13,344 INFO [train.py:903] (2/4) Epoch 7, batch 5150, loss[loss=0.3222, simple_loss=0.3677, pruned_loss=0.1383, over 19545.00 frames. ], tot_loss[loss=0.262, simple_loss=0.328, pruned_loss=0.09795, over 3815433.50 frames. ], batch size: 56, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:42:26,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 10:42:37,819 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:42:53,639 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.315e+02 6.102e+02 7.228e+02 8.853e+02 1.806e+03, threshold=1.446e+03, percent-clipped=2.0 +2023-04-01 10:43:00,628 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 10:43:15,671 INFO [train.py:903] (2/4) Epoch 7, batch 5200, loss[loss=0.2455, simple_loss=0.3262, pruned_loss=0.08238, over 19518.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3284, pruned_loss=0.09812, over 3807652.36 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:43:30,897 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46179.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:43:31,706 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 10:44:18,567 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 10:44:19,699 INFO [train.py:903] (2/4) Epoch 7, batch 5250, loss[loss=0.2686, simple_loss=0.3367, pruned_loss=0.1002, over 19675.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3283, pruned_loss=0.0979, over 3815682.06 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:44:23,443 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:44:52,983 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6570, 1.3626, 1.3099, 1.6483, 1.6287, 1.4537, 1.4801, 1.5374], + device='cuda:2'), covar=tensor([0.0885, 0.1378, 0.1282, 0.0868, 0.0957, 0.0513, 0.0916, 0.0647], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0356, 0.0285, 0.0240, 0.0299, 0.0242, 0.0272, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:44:58,427 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 5.998e+02 7.245e+02 8.826e+02 1.462e+03, threshold=1.449e+03, percent-clipped=1.0 +2023-04-01 10:45:00,959 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:45:21,291 INFO [train.py:903] (2/4) Epoch 7, batch 5300, loss[loss=0.2776, simple_loss=0.3404, pruned_loss=0.1074, over 19540.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3276, pruned_loss=0.09743, over 3821788.17 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:45:34,728 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 10:45:40,719 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 10:45:42,203 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7918, 1.4231, 1.3898, 1.8925, 1.5859, 2.0549, 2.1294, 1.7952], + device='cuda:2'), covar=tensor([0.0807, 0.1065, 0.1193, 0.1039, 0.1046, 0.0701, 0.0895, 0.0711], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0234, 0.0234, 0.0263, 0.0253, 0.0219, 0.0216, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 10:45:53,502 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46294.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:46:22,065 INFO [train.py:903] (2/4) Epoch 7, batch 5350, loss[loss=0.2718, simple_loss=0.3421, pruned_loss=0.1008, over 19389.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3269, pruned_loss=0.09688, over 3824824.36 frames. ], batch size: 70, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:46:46,821 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:47:00,332 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 10:47:03,772 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.681e+02 7.157e+02 9.578e+02 3.754e+03, threshold=1.431e+03, percent-clipped=4.0 +2023-04-01 10:47:12,095 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2179, 1.2982, 1.1112, 0.9862, 1.0242, 1.1202, 0.0397, 0.3325], + device='cuda:2'), covar=tensor([0.0343, 0.0341, 0.0224, 0.0289, 0.0709, 0.0270, 0.0626, 0.0596], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0307, 0.0304, 0.0328, 0.0397, 0.0317, 0.0287, 0.0310], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:47:26,524 INFO [train.py:903] (2/4) Epoch 7, batch 5400, loss[loss=0.2375, simple_loss=0.3199, pruned_loss=0.07758, over 18901.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3278, pruned_loss=0.09761, over 3789294.66 frames. ], batch size: 74, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:47:31,426 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7202, 1.2624, 1.3184, 1.9612, 1.6973, 1.9335, 2.2367, 1.6930], + device='cuda:2'), covar=tensor([0.0800, 0.1091, 0.1112, 0.0916, 0.0898, 0.0757, 0.0891, 0.0786], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0235, 0.0231, 0.0260, 0.0251, 0.0216, 0.0214, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 10:48:29,990 INFO [train.py:903] (2/4) Epoch 7, batch 5450, loss[loss=0.2366, simple_loss=0.3022, pruned_loss=0.08554, over 19485.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3271, pruned_loss=0.09729, over 3805188.03 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:48:35,355 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-01 10:48:49,881 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:10,733 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 7.102e+02 8.361e+02 1.036e+03 2.875e+03, threshold=1.672e+03, percent-clipped=7.0 +2023-04-01 10:49:23,124 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46460.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:31,708 INFO [train.py:903] (2/4) Epoch 7, batch 5500, loss[loss=0.3256, simple_loss=0.3696, pruned_loss=0.1408, over 13765.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3278, pruned_loss=0.09771, over 3804863.05 frames. ], batch size: 136, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:49:58,196 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 10:50:20,915 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:50:27,217 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 10:50:33,174 INFO [train.py:903] (2/4) Epoch 7, batch 5550, loss[loss=0.2806, simple_loss=0.3318, pruned_loss=0.1147, over 19710.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3275, pruned_loss=0.09765, over 3794743.12 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:50:43,596 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 10:50:51,407 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:51:15,056 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 5.815e+02 7.044e+02 9.146e+02 3.032e+03, threshold=1.409e+03, percent-clipped=3.0 +2023-04-01 10:51:23,840 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.97 vs. limit=5.0 +2023-04-01 10:51:32,400 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 10:51:35,754 INFO [train.py:903] (2/4) Epoch 7, batch 5600, loss[loss=0.3168, simple_loss=0.3664, pruned_loss=0.1336, over 19541.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3267, pruned_loss=0.09756, over 3778196.98 frames. ], batch size: 61, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:06,759 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:32,988 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:38,698 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:40,328 INFO [train.py:903] (2/4) Epoch 7, batch 5650, loss[loss=0.2228, simple_loss=0.2882, pruned_loss=0.07868, over 19780.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3265, pruned_loss=0.09774, over 3786063.61 frames. ], batch size: 47, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:42,926 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:03,561 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46638.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:20,358 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.976e+02 7.674e+02 9.559e+02 1.706e+03, threshold=1.535e+03, percent-clipped=4.0 +2023-04-01 10:53:28,324 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 10:53:42,089 INFO [train.py:903] (2/4) Epoch 7, batch 5700, loss[loss=0.2909, simple_loss=0.3633, pruned_loss=0.1092, over 19543.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3266, pruned_loss=0.09759, over 3778341.58 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:54:00,757 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1288, 1.1601, 1.3569, 1.2606, 1.6410, 1.6572, 1.8801, 0.5381], + device='cuda:2'), covar=tensor([0.1898, 0.3277, 0.1906, 0.1624, 0.1359, 0.1848, 0.1190, 0.3106], + device='cuda:2'), in_proj_covar=tensor([0.0460, 0.0539, 0.0533, 0.0421, 0.0585, 0.0468, 0.0645, 0.0464], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 10:54:43,287 INFO [train.py:903] (2/4) Epoch 7, batch 5750, loss[loss=0.2606, simple_loss=0.3364, pruned_loss=0.09241, over 19366.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3272, pruned_loss=0.09746, over 3779506.27 frames. ], batch size: 66, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:54:45,658 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 10:54:55,192 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 10:54:59,348 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 10:54:59,809 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 10:55:25,496 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+02 6.148e+02 7.021e+02 8.191e+02 1.564e+03, threshold=1.404e+03, percent-clipped=1.0 +2023-04-01 10:55:28,128 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:55:45,395 INFO [train.py:903] (2/4) Epoch 7, batch 5800, loss[loss=0.2897, simple_loss=0.3499, pruned_loss=0.1148, over 19661.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3262, pruned_loss=0.09666, over 3799322.36 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:56:46,142 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2560, 2.1500, 1.7102, 1.6219, 1.4038, 1.7209, 0.2628, 1.0479], + device='cuda:2'), covar=tensor([0.0300, 0.0300, 0.0265, 0.0383, 0.0702, 0.0403, 0.0676, 0.0568], + device='cuda:2'), in_proj_covar=tensor([0.0310, 0.0308, 0.0306, 0.0328, 0.0402, 0.0322, 0.0290, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 10:56:47,247 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4710, 1.2539, 1.1177, 1.3333, 1.1476, 1.3313, 1.0837, 1.2925], + device='cuda:2'), covar=tensor([0.0935, 0.1150, 0.1430, 0.0959, 0.1068, 0.0534, 0.1170, 0.0742], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0353, 0.0283, 0.0240, 0.0296, 0.0239, 0.0270, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 10:56:49,193 INFO [train.py:903] (2/4) Epoch 7, batch 5850, loss[loss=0.2476, simple_loss=0.3202, pruned_loss=0.08752, over 19615.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3266, pruned_loss=0.09659, over 3799630.89 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:02,129 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:57:29,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.374e+02 9.303e+02 2.879e+03, threshold=1.475e+03, percent-clipped=6.0 +2023-04-01 10:57:51,528 INFO [train.py:903] (2/4) Epoch 7, batch 5900, loss[loss=0.2988, simple_loss=0.3613, pruned_loss=0.1181, over 19299.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.326, pruned_loss=0.09627, over 3824814.25 frames. ], batch size: 66, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:57,272 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 10:58:16,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 10:58:52,930 INFO [train.py:903] (2/4) Epoch 7, batch 5950, loss[loss=0.2848, simple_loss=0.3422, pruned_loss=0.1137, over 19595.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3263, pruned_loss=0.09665, over 3823317.00 frames. ], batch size: 61, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:59:34,057 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.644e+02 7.805e+02 9.690e+02 1.794e+03, threshold=1.561e+03, percent-clipped=4.0 +2023-04-01 10:59:40,930 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:48,577 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:52,256 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.38 vs. limit=5.0 +2023-04-01 10:59:52,806 INFO [train.py:903] (2/4) Epoch 7, batch 6000, loss[loss=0.2911, simple_loss=0.3471, pruned_loss=0.1176, over 19686.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3276, pruned_loss=0.09772, over 3830140.52 frames. ], batch size: 53, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 10:59:52,806 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 11:00:05,305 INFO [train.py:937] (2/4) Epoch 7, validation: loss=0.1903, simple_loss=0.2902, pruned_loss=0.04516, over 944034.00 frames. +2023-04-01 11:00:05,306 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18546MB +2023-04-01 11:00:20,557 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9111, 1.1154, 1.3955, 0.5239, 2.2631, 2.4477, 2.1521, 2.5572], + device='cuda:2'), covar=tensor([0.1491, 0.3286, 0.3044, 0.2196, 0.0421, 0.0214, 0.0332, 0.0241], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0283, 0.0314, 0.0246, 0.0204, 0.0134, 0.0203, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:00:57,538 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47009.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:09,018 INFO [train.py:903] (2/4) Epoch 7, batch 6050, loss[loss=0.2478, simple_loss=0.3218, pruned_loss=0.08686, over 19855.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3276, pruned_loss=0.09739, over 3843627.62 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:01:30,893 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:49,944 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.317e+02 6.018e+02 7.577e+02 1.005e+03 2.434e+03, threshold=1.515e+03, percent-clipped=7.0 +2023-04-01 11:02:04,186 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 11:02:13,574 INFO [train.py:903] (2/4) Epoch 7, batch 6100, loss[loss=0.2333, simple_loss=0.3034, pruned_loss=0.08164, over 19574.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3275, pruned_loss=0.09695, over 3825076.80 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:02:18,695 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:02:19,975 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3526, 2.2758, 1.6918, 1.4903, 1.4357, 1.7440, 0.3087, 1.1164], + device='cuda:2'), covar=tensor([0.0294, 0.0307, 0.0286, 0.0453, 0.0768, 0.0444, 0.0674, 0.0594], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0310, 0.0309, 0.0328, 0.0403, 0.0322, 0.0291, 0.0313], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:02:26,684 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:03:15,748 INFO [train.py:903] (2/4) Epoch 7, batch 6150, loss[loss=0.2261, simple_loss=0.2952, pruned_loss=0.07845, over 16939.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.327, pruned_loss=0.09665, over 3818921.22 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:03:41,641 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 11:03:56,557 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.986e+02 7.602e+02 9.492e+02 2.168e+03, threshold=1.520e+03, percent-clipped=5.0 +2023-04-01 11:04:15,666 INFO [train.py:903] (2/4) Epoch 7, batch 6200, loss[loss=0.2668, simple_loss=0.3264, pruned_loss=0.1036, over 19872.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3267, pruned_loss=0.09682, over 3802122.55 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:04:20,356 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:04:59,842 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2272, 3.6777, 3.7742, 3.7926, 1.4692, 3.5253, 3.0494, 3.4195], + device='cuda:2'), covar=tensor([0.1069, 0.0802, 0.0621, 0.0544, 0.4264, 0.0528, 0.0686, 0.1163], + device='cuda:2'), in_proj_covar=tensor([0.0591, 0.0519, 0.0709, 0.0585, 0.0653, 0.0451, 0.0447, 0.0663], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 11:05:17,526 INFO [train.py:903] (2/4) Epoch 7, batch 6250, loss[loss=0.2453, simple_loss=0.3098, pruned_loss=0.09036, over 19581.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3262, pruned_loss=0.09636, over 3796870.49 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:05:47,057 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 11:05:57,378 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 6.379e+02 7.672e+02 9.726e+02 2.182e+03, threshold=1.534e+03, percent-clipped=2.0 +2023-04-01 11:06:08,355 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:06:19,229 INFO [train.py:903] (2/4) Epoch 7, batch 6300, loss[loss=0.2174, simple_loss=0.2799, pruned_loss=0.07742, over 19727.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.326, pruned_loss=0.09667, over 3789564.21 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:06:43,141 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:21,234 INFO [train.py:903] (2/4) Epoch 7, batch 6350, loss[loss=0.3278, simple_loss=0.3714, pruned_loss=0.1421, over 13622.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3268, pruned_loss=0.09689, over 3795819.37 frames. ], batch size: 138, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:07:33,246 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:42,340 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47335.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:02,956 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 5.885e+02 7.911e+02 1.072e+03 3.322e+03, threshold=1.582e+03, percent-clipped=7.0 +2023-04-01 11:08:05,641 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:13,863 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:22,754 INFO [train.py:903] (2/4) Epoch 7, batch 6400, loss[loss=0.2305, simple_loss=0.2891, pruned_loss=0.08597, over 19730.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3253, pruned_loss=0.0961, over 3811889.00 frames. ], batch size: 45, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:08:40,147 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2829, 3.8883, 2.6467, 3.4806, 1.0505, 3.5633, 3.6088, 3.7777], + device='cuda:2'), covar=tensor([0.0658, 0.1031, 0.1799, 0.0712, 0.3611, 0.0816, 0.0690, 0.0932], + device='cuda:2'), in_proj_covar=tensor([0.0387, 0.0340, 0.0399, 0.0294, 0.0365, 0.0322, 0.0318, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 11:09:00,257 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:09:25,580 INFO [train.py:903] (2/4) Epoch 7, batch 6450, loss[loss=0.2504, simple_loss=0.3261, pruned_loss=0.08737, over 19531.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3252, pruned_loss=0.09576, over 3802485.12 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:05,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 6.283e+02 7.588e+02 1.008e+03 1.535e+03, threshold=1.518e+03, percent-clipped=0.0 +2023-04-01 11:10:08,024 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 11:10:26,645 INFO [train.py:903] (2/4) Epoch 7, batch 6500, loss[loss=0.2631, simple_loss=0.3413, pruned_loss=0.09238, over 19685.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3251, pruned_loss=0.09563, over 3802725.77 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:29,905 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 11:11:15,975 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9285, 1.3606, 1.0692, 0.9841, 1.0942, 0.9257, 0.8490, 1.2073], + device='cuda:2'), covar=tensor([0.0448, 0.0585, 0.0866, 0.0471, 0.0400, 0.0953, 0.0496, 0.0343], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0279, 0.0310, 0.0237, 0.0223, 0.0306, 0.0280, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:11:19,620 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 11:11:27,830 INFO [train.py:903] (2/4) Epoch 7, batch 6550, loss[loss=0.2639, simple_loss=0.3328, pruned_loss=0.09747, over 19731.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3251, pruned_loss=0.09577, over 3806847.59 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:11:58,316 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:12:10,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+02 6.347e+02 8.071e+02 1.082e+03 2.174e+03, threshold=1.614e+03, percent-clipped=4.0 +2023-04-01 11:12:29,850 INFO [train.py:903] (2/4) Epoch 7, batch 6600, loss[loss=0.2223, simple_loss=0.2981, pruned_loss=0.07326, over 19690.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3263, pruned_loss=0.09623, over 3817167.32 frames. ], batch size: 53, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:12:30,288 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:13,411 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:31,698 INFO [train.py:903] (2/4) Epoch 7, batch 6650, loss[loss=0.1977, simple_loss=0.2678, pruned_loss=0.0638, over 15123.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3284, pruned_loss=0.0972, over 3809932.15 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:14:11,832 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 6.649e+02 7.986e+02 1.008e+03 1.623e+03, threshold=1.597e+03, percent-clipped=0.0 +2023-04-01 11:14:32,263 INFO [train.py:903] (2/4) Epoch 7, batch 6700, loss[loss=0.3116, simple_loss=0.3717, pruned_loss=0.1257, over 19687.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3276, pruned_loss=0.09683, over 3812261.23 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:28,525 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 11:15:30,803 INFO [train.py:903] (2/4) Epoch 7, batch 6750, loss[loss=0.3415, simple_loss=0.3808, pruned_loss=0.1511, over 19778.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3265, pruned_loss=0.09598, over 3822902.67 frames. ], batch size: 56, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:31,163 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:15:40,706 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 11:15:58,015 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:09,116 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.932e+02 5.831e+02 7.200e+02 8.445e+02 1.747e+03, threshold=1.440e+03, percent-clipped=3.0 +2023-04-01 11:16:15,220 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:19,790 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0701, 1.9827, 1.7933, 1.6902, 4.5276, 0.8263, 2.4772, 4.6592], + device='cuda:2'), covar=tensor([0.0256, 0.2118, 0.2302, 0.1647, 0.0547, 0.2683, 0.1168, 0.0235], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0316, 0.0323, 0.0296, 0.0324, 0.0319, 0.0296, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:16:28,689 INFO [train.py:903] (2/4) Epoch 7, batch 6800, loss[loss=0.2385, simple_loss=0.3059, pruned_loss=0.0856, over 19358.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3249, pruned_loss=0.09507, over 3831983.31 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:17:14,592 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 11:17:15,032 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 11:17:18,393 INFO [train.py:903] (2/4) Epoch 8, batch 0, loss[loss=0.2752, simple_loss=0.3451, pruned_loss=0.1026, over 19602.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3451, pruned_loss=0.1026, over 19602.00 frames. ], batch size: 57, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:17:18,393 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 11:17:30,982 INFO [train.py:937] (2/4) Epoch 8, validation: loss=0.1916, simple_loss=0.2915, pruned_loss=0.0458, over 944034.00 frames. +2023-04-01 11:17:30,984 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18546MB +2023-04-01 11:17:41,950 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 11:17:43,302 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:18:07,530 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47827.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:18:32,758 INFO [train.py:903] (2/4) Epoch 8, batch 50, loss[loss=0.2338, simple_loss=0.3149, pruned_loss=0.07636, over 19666.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3307, pruned_loss=0.09857, over 863066.83 frames. ], batch size: 58, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:18:38,608 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 5.925e+02 7.453e+02 9.478e+02 2.348e+03, threshold=1.491e+03, percent-clipped=8.0 +2023-04-01 11:18:44,812 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:18:52,187 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.85 vs. limit=5.0 +2023-04-01 11:19:06,097 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 11:19:32,495 INFO [train.py:903] (2/4) Epoch 8, batch 100, loss[loss=0.3337, simple_loss=0.3749, pruned_loss=0.1462, over 13095.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.329, pruned_loss=0.0978, over 1515047.64 frames. ], batch size: 137, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:19:42,621 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 11:20:32,672 INFO [train.py:903] (2/4) Epoch 8, batch 150, loss[loss=0.1885, simple_loss=0.2614, pruned_loss=0.05784, over 19765.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3269, pruned_loss=0.09667, over 2027804.57 frames. ], batch size: 47, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:20:38,423 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.629e+02 6.208e+02 7.626e+02 9.440e+02 2.273e+03, threshold=1.525e+03, percent-clipped=3.0 +2023-04-01 11:21:07,748 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:21:29,250 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 11:21:32,700 INFO [train.py:903] (2/4) Epoch 8, batch 200, loss[loss=0.2286, simple_loss=0.2999, pruned_loss=0.07868, over 19606.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3264, pruned_loss=0.09638, over 2417101.67 frames. ], batch size: 50, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:21:36,534 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:22:15,579 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8731, 1.2326, 1.5253, 0.5602, 2.0517, 2.4553, 2.0558, 2.5761], + device='cuda:2'), covar=tensor([0.1480, 0.3101, 0.2938, 0.2317, 0.0494, 0.0211, 0.0385, 0.0236], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0247, 0.0208, 0.0136, 0.0204, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:22:35,824 INFO [train.py:903] (2/4) Epoch 8, batch 250, loss[loss=0.242, simple_loss=0.3246, pruned_loss=0.07963, over 19713.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3258, pruned_loss=0.0958, over 2727439.76 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:22:42,346 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 6.073e+02 7.344e+02 8.973e+02 2.163e+03, threshold=1.469e+03, percent-clipped=4.0 +2023-04-01 11:23:30,354 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:36,836 INFO [train.py:903] (2/4) Epoch 8, batch 300, loss[loss=0.2232, simple_loss=0.3025, pruned_loss=0.07196, over 19667.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3254, pruned_loss=0.09563, over 2964737.00 frames. ], batch size: 58, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:23:37,265 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9038, 1.3528, 1.0554, 0.9277, 1.1837, 0.8739, 0.8170, 1.3007], + device='cuda:2'), covar=tensor([0.0451, 0.0557, 0.0972, 0.0590, 0.0406, 0.1035, 0.0557, 0.0348], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0285, 0.0320, 0.0243, 0.0231, 0.0314, 0.0288, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:23:41,630 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:55,968 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:59,613 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1928, 1.4095, 1.9904, 1.4902, 2.9388, 2.6361, 3.2255, 1.2921], + device='cuda:2'), covar=tensor([0.2090, 0.3509, 0.2002, 0.1624, 0.1520, 0.1664, 0.1736, 0.3287], + device='cuda:2'), in_proj_covar=tensor([0.0458, 0.0536, 0.0532, 0.0418, 0.0575, 0.0468, 0.0629, 0.0461], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 11:24:14,434 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0354, 2.0729, 2.0578, 2.9261, 1.9493, 2.6797, 2.5628, 1.9724], + device='cuda:2'), covar=tensor([0.2882, 0.2311, 0.1126, 0.1307, 0.2631, 0.1003, 0.2382, 0.2004], + device='cuda:2'), in_proj_covar=tensor([0.0721, 0.0723, 0.0607, 0.0852, 0.0734, 0.0632, 0.0748, 0.0657], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 11:24:27,816 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:37,682 INFO [train.py:903] (2/4) Epoch 8, batch 350, loss[loss=0.2461, simple_loss=0.3205, pruned_loss=0.08589, over 19663.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3236, pruned_loss=0.09453, over 3141373.01 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:24:39,964 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 11:24:42,119 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:43,059 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.994e+02 6.192e+02 7.149e+02 9.949e+02 1.629e+03, threshold=1.430e+03, percent-clipped=6.0 +2023-04-01 11:25:08,559 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48171.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:25:22,147 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2021, 1.5823, 1.7258, 2.4558, 1.9125, 1.9175, 2.3542, 1.9287], + device='cuda:2'), covar=tensor([0.0785, 0.1211, 0.1140, 0.1013, 0.0945, 0.0902, 0.0979, 0.0805], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0235, 0.0232, 0.0262, 0.0246, 0.0217, 0.0211, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 11:25:33,655 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0467, 1.2724, 1.6594, 0.8650, 2.5300, 3.0021, 2.7286, 3.1522], + device='cuda:2'), covar=tensor([0.1501, 0.3169, 0.2779, 0.2206, 0.0441, 0.0166, 0.0263, 0.0195], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0284, 0.0313, 0.0244, 0.0207, 0.0136, 0.0202, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:25:37,710 INFO [train.py:903] (2/4) Epoch 8, batch 400, loss[loss=0.3697, simple_loss=0.4118, pruned_loss=0.1638, over 19793.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3248, pruned_loss=0.09508, over 3300571.20 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:26:03,478 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:26:05,315 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 11:26:21,232 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48230.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:26:33,642 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4136, 1.2175, 1.1869, 1.7291, 1.4929, 1.5777, 1.6502, 1.4029], + device='cuda:2'), covar=tensor([0.0659, 0.0882, 0.0933, 0.0670, 0.0652, 0.0670, 0.0755, 0.0616], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0235, 0.0234, 0.0263, 0.0248, 0.0219, 0.0212, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 11:26:38,947 INFO [train.py:903] (2/4) Epoch 8, batch 450, loss[loss=0.3005, simple_loss=0.3581, pruned_loss=0.1215, over 19661.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.3239, pruned_loss=0.09422, over 3425492.44 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:26:45,637 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.626e+02 7.034e+02 8.568e+02 1.629e+03, threshold=1.407e+03, percent-clipped=1.0 +2023-04-01 11:27:03,639 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:27:11,128 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 11:27:12,253 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 11:27:27,260 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48286.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:27:41,082 INFO [train.py:903] (2/4) Epoch 8, batch 500, loss[loss=0.2192, simple_loss=0.2819, pruned_loss=0.07824, over 19747.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3234, pruned_loss=0.0942, over 3514498.07 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:37,115 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:28:41,343 INFO [train.py:903] (2/4) Epoch 8, batch 550, loss[loss=0.2671, simple_loss=0.3312, pruned_loss=0.1016, over 19849.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3236, pruned_loss=0.09388, over 3571311.13 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:47,053 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.262e+02 6.228e+02 7.563e+02 9.337e+02 1.593e+03, threshold=1.513e+03, percent-clipped=3.0 +2023-04-01 11:28:47,323 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3664, 3.9426, 2.5042, 3.5020, 0.8554, 3.6412, 3.6452, 3.6992], + device='cuda:2'), covar=tensor([0.0604, 0.1059, 0.1852, 0.0786, 0.3872, 0.0758, 0.0770, 0.0902], + device='cuda:2'), in_proj_covar=tensor([0.0393, 0.0344, 0.0398, 0.0302, 0.0366, 0.0325, 0.0318, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 11:29:23,918 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2935, 2.9159, 2.3525, 2.1833, 2.0565, 2.3167, 0.7539, 2.0118], + device='cuda:2'), covar=tensor([0.0367, 0.0351, 0.0349, 0.0665, 0.0694, 0.0683, 0.0789, 0.0714], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0313, 0.0309, 0.0328, 0.0401, 0.0321, 0.0289, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:29:42,878 INFO [train.py:903] (2/4) Epoch 8, batch 600, loss[loss=0.2719, simple_loss=0.3475, pruned_loss=0.09819, over 19732.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.323, pruned_loss=0.09371, over 3631987.11 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:30:05,058 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:26,097 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 11:30:31,036 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:39,389 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:44,674 INFO [train.py:903] (2/4) Epoch 8, batch 650, loss[loss=0.2125, simple_loss=0.2828, pruned_loss=0.07109, over 19754.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.323, pruned_loss=0.09418, over 3676355.23 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:30:50,368 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 6.183e+02 7.491e+02 9.829e+02 2.830e+03, threshold=1.498e+03, percent-clipped=3.0 +2023-04-01 11:31:17,374 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48471.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:31:45,456 INFO [train.py:903] (2/4) Epoch 8, batch 700, loss[loss=0.2467, simple_loss=0.3111, pruned_loss=0.09121, over 19393.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3228, pruned_loss=0.09358, over 3709060.25 frames. ], batch size: 47, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:31:45,800 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:17,631 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:44,079 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48542.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:32:48,888 INFO [train.py:903] (2/4) Epoch 8, batch 750, loss[loss=0.3394, simple_loss=0.3797, pruned_loss=0.1496, over 19580.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3235, pruned_loss=0.09419, over 3728167.16 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:32:49,362 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:52,580 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48549.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:55,670 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.054e+02 7.636e+02 9.380e+02 1.990e+03, threshold=1.527e+03, percent-clipped=3.0 +2023-04-01 11:33:13,742 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48567.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:33:21,444 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48574.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:33:36,198 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2360, 1.2020, 1.5053, 1.2027, 2.6638, 3.4931, 3.2649, 3.7882], + device='cuda:2'), covar=tensor([0.1505, 0.3297, 0.3007, 0.2045, 0.0479, 0.0164, 0.0216, 0.0144], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0285, 0.0316, 0.0246, 0.0209, 0.0135, 0.0202, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:33:38,455 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9975, 1.6174, 1.6113, 2.1026, 1.8394, 1.7508, 1.5669, 1.8881], + device='cuda:2'), covar=tensor([0.0836, 0.1602, 0.1264, 0.0804, 0.1199, 0.0499, 0.1114, 0.0618], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0358, 0.0285, 0.0238, 0.0303, 0.0243, 0.0273, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:33:49,557 INFO [train.py:903] (2/4) Epoch 8, batch 800, loss[loss=0.2913, simple_loss=0.3546, pruned_loss=0.114, over 19666.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3221, pruned_loss=0.09328, over 3742567.68 frames. ], batch size: 55, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:02,676 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 11:34:34,478 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9900, 3.6299, 2.3070, 3.3157, 1.0223, 3.3149, 3.2961, 3.4001], + device='cuda:2'), covar=tensor([0.0841, 0.1378, 0.2053, 0.0803, 0.3662, 0.0930, 0.0843, 0.1012], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0344, 0.0401, 0.0302, 0.0366, 0.0328, 0.0318, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 11:34:51,085 INFO [train.py:903] (2/4) Epoch 8, batch 850, loss[loss=0.2437, simple_loss=0.3137, pruned_loss=0.0868, over 19676.00 frames. ], tot_loss[loss=0.253, simple_loss=0.321, pruned_loss=0.09251, over 3764710.14 frames. ], batch size: 53, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:57,942 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 5.922e+02 7.936e+02 9.993e+02 1.897e+03, threshold=1.587e+03, percent-clipped=5.0 +2023-04-01 11:35:39,776 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 11:35:39,898 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:35:43,333 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48689.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:35:50,881 INFO [train.py:903] (2/4) Epoch 8, batch 900, loss[loss=0.2411, simple_loss=0.3149, pruned_loss=0.0836, over 19612.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3203, pruned_loss=0.0922, over 3780070.54 frames. ], batch size: 50, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:35,877 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0532, 2.0321, 1.6601, 1.4254, 1.5173, 1.5337, 0.2541, 0.8531], + device='cuda:2'), covar=tensor([0.0293, 0.0305, 0.0227, 0.0350, 0.0658, 0.0407, 0.0599, 0.0552], + device='cuda:2'), in_proj_covar=tensor([0.0312, 0.0313, 0.0311, 0.0329, 0.0402, 0.0324, 0.0291, 0.0311], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 11:36:54,667 INFO [train.py:903] (2/4) Epoch 8, batch 950, loss[loss=0.2553, simple_loss=0.3302, pruned_loss=0.09021, over 19674.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3198, pruned_loss=0.09194, over 3795137.68 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:56,556 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 11:37:03,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.996e+02 5.931e+02 7.048e+02 8.289e+02 1.665e+03, threshold=1.410e+03, percent-clipped=1.0 +2023-04-01 11:37:11,362 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:13,852 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8124, 4.2663, 4.4486, 4.4119, 1.4842, 4.1093, 3.5713, 4.1472], + device='cuda:2'), covar=tensor([0.1105, 0.0556, 0.0480, 0.0477, 0.4680, 0.0420, 0.0537, 0.0889], + device='cuda:2'), in_proj_covar=tensor([0.0587, 0.0519, 0.0699, 0.0588, 0.0647, 0.0441, 0.0444, 0.0648], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 11:37:42,411 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:57,228 INFO [train.py:903] (2/4) Epoch 8, batch 1000, loss[loss=0.2495, simple_loss=0.3192, pruned_loss=0.0899, over 19302.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3186, pruned_loss=0.0908, over 3820259.00 frames. ], batch size: 66, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:38:03,541 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:08,242 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:40,041 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:50,191 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 11:38:51,484 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:59,282 INFO [train.py:903] (2/4) Epoch 8, batch 1050, loss[loss=0.225, simple_loss=0.3051, pruned_loss=0.0724, over 19570.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3199, pruned_loss=0.09117, over 3813656.71 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:39:06,227 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.500e+02 6.561e+02 8.180e+02 1.521e+03, threshold=1.312e+03, percent-clipped=1.0 +2023-04-01 11:39:11,137 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:30,883 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 11:39:34,254 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:58,992 INFO [train.py:903] (2/4) Epoch 8, batch 1100, loss[loss=0.3264, simple_loss=0.3715, pruned_loss=0.1406, over 13490.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3217, pruned_loss=0.09242, over 3797650.57 frames. ], batch size: 137, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:40:03,993 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48900.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:41:00,192 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48945.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:41:00,839 INFO [train.py:903] (2/4) Epoch 8, batch 1150, loss[loss=0.2419, simple_loss=0.3063, pruned_loss=0.08878, over 19376.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3217, pruned_loss=0.09253, over 3799195.46 frames. ], batch size: 47, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:41:09,122 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.943e+02 6.952e+02 8.882e+02 1.618e+03, threshold=1.390e+03, percent-clipped=5.0 +2023-04-01 11:41:30,938 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:42:04,109 INFO [train.py:903] (2/4) Epoch 8, batch 1200, loss[loss=0.2907, simple_loss=0.351, pruned_loss=0.1152, over 19673.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3208, pruned_loss=0.09182, over 3816368.06 frames. ], batch size: 53, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:42:32,686 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 11:43:05,004 INFO [train.py:903] (2/4) Epoch 8, batch 1250, loss[loss=0.2161, simple_loss=0.2879, pruned_loss=0.0721, over 19478.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3221, pruned_loss=0.09282, over 3823461.78 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:43:11,746 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.529e+02 6.811e+02 8.521e+02 1.008e+03 2.064e+03, threshold=1.704e+03, percent-clipped=4.0 +2023-04-01 11:43:18,073 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:43:50,342 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:43:51,358 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9441, 4.2526, 4.6097, 4.5670, 1.6715, 4.2551, 3.7671, 4.1862], + device='cuda:2'), covar=tensor([0.1118, 0.0791, 0.0526, 0.0466, 0.4957, 0.0546, 0.0523, 0.1076], + device='cuda:2'), in_proj_covar=tensor([0.0592, 0.0521, 0.0706, 0.0595, 0.0658, 0.0451, 0.0447, 0.0655], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 11:44:05,529 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 11:44:06,037 INFO [train.py:903] (2/4) Epoch 8, batch 1300, loss[loss=0.2356, simple_loss=0.3188, pruned_loss=0.07619, over 19474.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3214, pruned_loss=0.09245, over 3833186.60 frames. ], batch size: 64, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:44:49,027 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:07,332 INFO [train.py:903] (2/4) Epoch 8, batch 1350, loss[loss=0.2656, simple_loss=0.3329, pruned_loss=0.09915, over 13710.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3218, pruned_loss=0.09309, over 3824727.04 frames. ], batch size: 135, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:45:08,146 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 11:45:16,539 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.835e+02 7.092e+02 8.908e+02 2.388e+03, threshold=1.418e+03, percent-clipped=3.0 +2023-04-01 11:45:20,488 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:22,506 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:41,553 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8683, 3.3620, 1.7814, 2.2155, 2.8166, 1.7833, 1.2476, 1.8043], + device='cuda:2'), covar=tensor([0.1053, 0.0355, 0.0882, 0.0526, 0.0436, 0.0847, 0.0855, 0.0606], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0290, 0.0315, 0.0243, 0.0232, 0.0309, 0.0284, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:45:42,716 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6676, 1.2057, 1.5667, 1.3767, 3.2484, 0.9112, 2.0879, 3.4810], + device='cuda:2'), covar=tensor([0.0387, 0.2551, 0.2401, 0.1669, 0.0607, 0.2472, 0.1278, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0313, 0.0317, 0.0325, 0.0298, 0.0325, 0.0319, 0.0297, 0.0321], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:45:52,089 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:54,148 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49183.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:46:10,833 INFO [train.py:903] (2/4) Epoch 8, batch 1400, loss[loss=0.2654, simple_loss=0.3348, pruned_loss=0.09803, over 19761.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3226, pruned_loss=0.09323, over 3833306.12 frames. ], batch size: 63, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:46:17,513 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49200.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:47:12,329 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 11:47:13,437 INFO [train.py:903] (2/4) Epoch 8, batch 1450, loss[loss=0.2454, simple_loss=0.3249, pruned_loss=0.08293, over 19682.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3231, pruned_loss=0.09391, over 3807562.16 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:47:19,910 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 6.215e+02 8.146e+02 9.729e+02 2.293e+03, threshold=1.629e+03, percent-clipped=2.0 +2023-04-01 11:47:22,611 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:14,511 INFO [train.py:903] (2/4) Epoch 8, batch 1500, loss[loss=0.2124, simple_loss=0.2786, pruned_loss=0.07307, over 19775.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.323, pruned_loss=0.09431, over 3801896.56 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:48:16,766 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49298.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:38,634 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49315.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:49:05,472 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8674, 1.5746, 1.4202, 1.8494, 1.7025, 1.5498, 1.4004, 1.6969], + device='cuda:2'), covar=tensor([0.0922, 0.1358, 0.1415, 0.0976, 0.1122, 0.0608, 0.1174, 0.0703], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0346, 0.0278, 0.0232, 0.0290, 0.0238, 0.0266, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 11:49:14,519 INFO [train.py:903] (2/4) Epoch 8, batch 1550, loss[loss=0.2612, simple_loss=0.3324, pruned_loss=0.09499, over 19122.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3224, pruned_loss=0.09428, over 3807640.96 frames. ], batch size: 69, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:49:23,151 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+02 6.392e+02 7.844e+02 9.464e+02 1.840e+03, threshold=1.569e+03, percent-clipped=1.0 +2023-04-01 11:49:42,560 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1536, 5.5431, 2.9922, 4.7694, 0.9868, 5.3861, 5.4686, 5.6646], + device='cuda:2'), covar=tensor([0.0407, 0.0927, 0.1903, 0.0613, 0.4226, 0.0507, 0.0556, 0.0615], + device='cuda:2'), in_proj_covar=tensor([0.0393, 0.0341, 0.0399, 0.0298, 0.0368, 0.0326, 0.0318, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 11:50:17,464 INFO [train.py:903] (2/4) Epoch 8, batch 1600, loss[loss=0.243, simple_loss=0.3232, pruned_loss=0.08139, over 19611.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3225, pruned_loss=0.09377, over 3819911.67 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:50:25,482 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5327, 3.7891, 4.1687, 4.0938, 2.3630, 3.8361, 3.4520, 3.8523], + device='cuda:2'), covar=tensor([0.1058, 0.2125, 0.0529, 0.0523, 0.3635, 0.0677, 0.0567, 0.0930], + device='cuda:2'), in_proj_covar=tensor([0.0600, 0.0533, 0.0717, 0.0598, 0.0667, 0.0458, 0.0454, 0.0663], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 11:50:32,384 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2230, 1.2605, 1.6648, 1.3683, 2.4568, 2.0295, 2.5469, 0.8873], + device='cuda:2'), covar=tensor([0.1891, 0.3230, 0.1843, 0.1591, 0.1179, 0.1640, 0.1200, 0.3126], + device='cuda:2'), in_proj_covar=tensor([0.0459, 0.0536, 0.0532, 0.0416, 0.0576, 0.0464, 0.0627, 0.0465], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 11:50:38,474 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 11:51:12,148 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:51:20,003 INFO [train.py:903] (2/4) Epoch 8, batch 1650, loss[loss=0.2154, simple_loss=0.2823, pruned_loss=0.07426, over 19732.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3228, pruned_loss=0.09411, over 3811491.02 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:51:24,786 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49450.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:51:26,747 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 6.230e+02 7.478e+02 9.229e+02 3.510e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-04-01 11:52:21,802 INFO [train.py:903] (2/4) Epoch 8, batch 1700, loss[loss=0.211, simple_loss=0.2761, pruned_loss=0.07298, over 19758.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3239, pruned_loss=0.09435, over 3801390.15 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:02,532 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 11:53:23,404 INFO [train.py:903] (2/4) Epoch 8, batch 1750, loss[loss=0.2806, simple_loss=0.3511, pruned_loss=0.1051, over 19672.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3238, pruned_loss=0.09441, over 3809511.75 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:31,457 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.147e+02 7.390e+02 1.012e+03 1.809e+03, threshold=1.478e+03, percent-clipped=6.0 +2023-04-01 11:53:35,097 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49554.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:56,585 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:58,159 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 11:54:06,091 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:09,496 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8547, 4.4361, 2.5404, 3.8628, 1.0951, 4.0129, 4.0749, 4.2256], + device='cuda:2'), covar=tensor([0.0558, 0.0906, 0.2044, 0.0741, 0.3959, 0.0778, 0.0774, 0.0934], + device='cuda:2'), in_proj_covar=tensor([0.0396, 0.0343, 0.0401, 0.0299, 0.0366, 0.0325, 0.0316, 0.0356], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 11:54:27,944 INFO [train.py:903] (2/4) Epoch 8, batch 1800, loss[loss=0.2477, simple_loss=0.3104, pruned_loss=0.09244, over 19549.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3235, pruned_loss=0.09402, over 3819458.53 frames. ], batch size: 56, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:54:28,353 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:31,433 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:51,865 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5224, 3.9785, 4.1872, 4.1671, 1.4450, 3.8345, 3.3570, 3.8145], + device='cuda:2'), covar=tensor([0.1122, 0.0774, 0.0550, 0.0494, 0.4809, 0.0577, 0.0623, 0.1044], + device='cuda:2'), in_proj_covar=tensor([0.0606, 0.0533, 0.0718, 0.0602, 0.0669, 0.0461, 0.0454, 0.0665], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 11:55:25,403 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 11:55:26,231 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-01 11:55:30,994 INFO [train.py:903] (2/4) Epoch 8, batch 1850, loss[loss=0.2793, simple_loss=0.346, pruned_loss=0.1063, over 19688.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3238, pruned_loss=0.09399, over 3829941.01 frames. ], batch size: 60, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:55:38,001 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.845e+02 7.519e+02 8.649e+02 2.522e+03, threshold=1.504e+03, percent-clipped=4.0 +2023-04-01 11:56:02,469 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 11:56:31,215 INFO [train.py:903] (2/4) Epoch 8, batch 1900, loss[loss=0.284, simple_loss=0.3493, pruned_loss=0.1093, over 17253.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3239, pruned_loss=0.09444, over 3832174.76 frames. ], batch size: 101, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:56:48,651 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 11:56:52,389 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:56:54,437 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 11:57:19,022 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 11:57:33,009 INFO [train.py:903] (2/4) Epoch 8, batch 1950, loss[loss=0.2621, simple_loss=0.3348, pruned_loss=0.09473, over 19659.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3223, pruned_loss=0.09315, over 3840221.52 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:57:40,107 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.492e+02 5.402e+02 6.624e+02 8.916e+02 2.925e+03, threshold=1.325e+03, percent-clipped=4.0 +2023-04-01 11:58:20,350 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:20,496 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:32,993 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49794.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:58:35,926 INFO [train.py:903] (2/4) Epoch 8, batch 2000, loss[loss=0.2468, simple_loss=0.3205, pruned_loss=0.08655, over 16989.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3213, pruned_loss=0.09257, over 3835243.47 frames. ], batch size: 101, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:34,104 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.99 vs. limit=5.0 +2023-04-01 11:59:35,612 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 11:59:38,072 INFO [train.py:903] (2/4) Epoch 8, batch 2050, loss[loss=0.225, simple_loss=0.2971, pruned_loss=0.07648, over 19377.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3218, pruned_loss=0.09262, over 3832561.12 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:45,939 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.411e+02 7.156e+02 9.098e+02 3.444e+03, threshold=1.431e+03, percent-clipped=9.0 +2023-04-01 11:59:53,828 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 11:59:55,113 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 12:00:17,461 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 12:00:39,981 INFO [train.py:903] (2/4) Epoch 8, batch 2100, loss[loss=0.2767, simple_loss=0.3429, pruned_loss=0.1052, over 19615.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3225, pruned_loss=0.09303, over 3820899.03 frames. ], batch size: 61, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:00:43,786 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:00:55,243 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49909.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:01:09,458 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 12:01:32,558 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 12:01:40,095 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9155, 2.0141, 2.0374, 2.9131, 1.9139, 2.6267, 2.5836, 1.9158], + device='cuda:2'), covar=tensor([0.2827, 0.2363, 0.1188, 0.1289, 0.2630, 0.1033, 0.2464, 0.2169], + device='cuda:2'), in_proj_covar=tensor([0.0724, 0.0734, 0.0612, 0.0859, 0.0731, 0.0636, 0.0760, 0.0658], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 12:01:41,840 INFO [train.py:903] (2/4) Epoch 8, batch 2150, loss[loss=0.2775, simple_loss=0.3468, pruned_loss=0.1041, over 19667.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3227, pruned_loss=0.09338, over 3827727.56 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:01:48,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 5.997e+02 7.086e+02 8.659e+02 2.224e+03, threshold=1.417e+03, percent-clipped=8.0 +2023-04-01 12:01:49,683 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3630, 2.1877, 1.6938, 1.6262, 1.4699, 1.7946, 0.4507, 1.2320], + device='cuda:2'), covar=tensor([0.0269, 0.0310, 0.0300, 0.0464, 0.0665, 0.0421, 0.0639, 0.0533], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0307, 0.0308, 0.0327, 0.0397, 0.0319, 0.0288, 0.0306], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 12:02:06,537 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3261, 1.4724, 1.9957, 1.5328, 3.3584, 2.7804, 3.5405, 1.5463], + device='cuda:2'), covar=tensor([0.1907, 0.3168, 0.1874, 0.1485, 0.1132, 0.1427, 0.1361, 0.2814], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0530, 0.0526, 0.0411, 0.0567, 0.0461, 0.0628, 0.0459], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 12:02:12,039 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:41,650 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:43,586 INFO [train.py:903] (2/4) Epoch 8, batch 2200, loss[loss=0.2251, simple_loss=0.2932, pruned_loss=0.07848, over 19805.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3226, pruned_loss=0.09307, over 3840282.84 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:48,609 INFO [train.py:903] (2/4) Epoch 8, batch 2250, loss[loss=0.231, simple_loss=0.3045, pruned_loss=0.07871, over 18140.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3227, pruned_loss=0.09291, over 3843265.84 frames. ], batch size: 83, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:51,231 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3520, 3.9677, 2.3519, 3.6172, 0.9969, 3.6033, 3.6565, 3.7345], + device='cuda:2'), covar=tensor([0.0611, 0.0988, 0.2046, 0.0729, 0.3742, 0.0879, 0.0816, 0.1108], + device='cuda:2'), in_proj_covar=tensor([0.0390, 0.0338, 0.0398, 0.0294, 0.0360, 0.0323, 0.0312, 0.0353], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 12:03:55,474 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+02 6.238e+02 7.806e+02 1.014e+03 2.092e+03, threshold=1.561e+03, percent-clipped=8.0 +2023-04-01 12:03:58,811 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2626, 5.6161, 3.2388, 4.9838, 1.4178, 5.4439, 5.5575, 5.6943], + device='cuda:2'), covar=tensor([0.0414, 0.0858, 0.1591, 0.0544, 0.3779, 0.0520, 0.0520, 0.0614], + device='cuda:2'), in_proj_covar=tensor([0.0390, 0.0339, 0.0398, 0.0294, 0.0360, 0.0323, 0.0313, 0.0353], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 12:04:40,213 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-01 12:04:51,001 INFO [train.py:903] (2/4) Epoch 8, batch 2300, loss[loss=0.2695, simple_loss=0.3433, pruned_loss=0.0979, over 19573.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3222, pruned_loss=0.09264, over 3833301.23 frames. ], batch size: 61, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:04,983 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 12:05:31,278 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:52,942 INFO [train.py:903] (2/4) Epoch 8, batch 2350, loss[loss=0.2566, simple_loss=0.3207, pruned_loss=0.09622, over 19841.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.322, pruned_loss=0.09228, over 3837523.51 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:53,214 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:59,868 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.938e+02 7.771e+02 9.106e+02 1.869e+03, threshold=1.554e+03, percent-clipped=2.0 +2023-04-01 12:06:03,833 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:18,129 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50165.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:06:36,731 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:38,685 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 12:06:47,964 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50190.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:06:54,685 INFO [train.py:903] (2/4) Epoch 8, batch 2400, loss[loss=0.2346, simple_loss=0.3145, pruned_loss=0.07733, over 19342.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3214, pruned_loss=0.09193, over 3831804.28 frames. ], batch size: 66, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:06:54,693 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 12:07:20,918 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50215.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:07:54,630 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:07:58,090 INFO [train.py:903] (2/4) Epoch 8, batch 2450, loss[loss=0.2299, simple_loss=0.3087, pruned_loss=0.07556, over 19834.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3232, pruned_loss=0.09291, over 3817131.35 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:08:05,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.846e+02 6.354e+02 7.339e+02 9.160e+02 2.255e+03, threshold=1.468e+03, percent-clipped=3.0 +2023-04-01 12:08:07,822 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-01 12:08:39,820 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 12:09:00,867 INFO [train.py:903] (2/4) Epoch 8, batch 2500, loss[loss=0.271, simple_loss=0.3322, pruned_loss=0.105, over 19668.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.322, pruned_loss=0.09282, over 3829359.23 frames. ], batch size: 55, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:09:56,380 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9811, 1.5967, 1.5230, 2.0721, 1.8853, 1.7114, 1.4422, 1.8930], + device='cuda:2'), covar=tensor([0.0820, 0.1427, 0.1326, 0.0912, 0.1009, 0.0467, 0.1122, 0.0633], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0354, 0.0284, 0.0236, 0.0293, 0.0237, 0.0267, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 12:10:03,029 INFO [train.py:903] (2/4) Epoch 8, batch 2550, loss[loss=0.2253, simple_loss=0.2936, pruned_loss=0.07846, over 19764.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3224, pruned_loss=0.09327, over 3816978.69 frames. ], batch size: 48, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:09,527 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.479e+02 6.803e+02 8.076e+02 1.672e+03, threshold=1.361e+03, percent-clipped=2.0 +2023-04-01 12:10:59,287 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 12:11:05,095 INFO [train.py:903] (2/4) Epoch 8, batch 2600, loss[loss=0.2569, simple_loss=0.3309, pruned_loss=0.09141, over 19661.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3215, pruned_loss=0.09295, over 3821736.23 frames. ], batch size: 58, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:09,320 INFO [train.py:903] (2/4) Epoch 8, batch 2650, loss[loss=0.2559, simple_loss=0.3165, pruned_loss=0.09765, over 19787.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3223, pruned_loss=0.0935, over 3811124.04 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:15,979 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.098e+02 6.836e+02 8.198e+02 1.046e+03 1.620e+03, threshold=1.640e+03, percent-clipped=8.0 +2023-04-01 12:12:27,571 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 12:12:35,151 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-04-01 12:13:04,665 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:11,319 INFO [train.py:903] (2/4) Epoch 8, batch 2700, loss[loss=0.2248, simple_loss=0.2832, pruned_loss=0.08321, over 19751.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3214, pruned_loss=0.09292, over 3821709.31 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:13:16,522 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50499.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:17,738 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3744, 1.3904, 1.8843, 1.5495, 3.1561, 2.7668, 3.4299, 1.6611], + device='cuda:2'), covar=tensor([0.2098, 0.3553, 0.2054, 0.1713, 0.1482, 0.1537, 0.1417, 0.3037], + device='cuda:2'), in_proj_covar=tensor([0.0462, 0.0535, 0.0538, 0.0416, 0.0577, 0.0469, 0.0632, 0.0466], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 12:13:46,638 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:14:15,285 INFO [train.py:903] (2/4) Epoch 8, batch 2750, loss[loss=0.2505, simple_loss=0.3215, pruned_loss=0.08981, over 19673.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3216, pruned_loss=0.09301, over 3830936.01 frames. ], batch size: 53, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:14:23,785 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 5.912e+02 7.181e+02 9.047e+02 1.864e+03, threshold=1.436e+03, percent-clipped=1.0 +2023-04-01 12:14:30,895 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50559.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:14:37,816 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50564.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:14:45,514 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3298, 1.2718, 1.5387, 1.8071, 2.9150, 1.0219, 2.1321, 3.1350], + device='cuda:2'), covar=tensor([0.0468, 0.2584, 0.2367, 0.1338, 0.0669, 0.2234, 0.1104, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0317, 0.0323, 0.0332, 0.0296, 0.0327, 0.0316, 0.0302, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 12:15:16,202 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1889, 2.0893, 1.7287, 1.5464, 1.5199, 1.6577, 0.3098, 0.9447], + device='cuda:2'), covar=tensor([0.0326, 0.0340, 0.0277, 0.0453, 0.0737, 0.0458, 0.0724, 0.0628], + device='cuda:2'), in_proj_covar=tensor([0.0318, 0.0315, 0.0319, 0.0334, 0.0409, 0.0331, 0.0298, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 12:15:18,013 INFO [train.py:903] (2/4) Epoch 8, batch 2800, loss[loss=0.2475, simple_loss=0.3189, pruned_loss=0.08798, over 19670.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3216, pruned_loss=0.09352, over 3825096.62 frames. ], batch size: 55, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:15:29,744 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50605.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:16:06,385 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9129, 3.5304, 2.2755, 3.1848, 0.7692, 3.3055, 3.3149, 3.4075], + device='cuda:2'), covar=tensor([0.0814, 0.1229, 0.2141, 0.0844, 0.4074, 0.0916, 0.0784, 0.1157], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0341, 0.0406, 0.0297, 0.0363, 0.0325, 0.0315, 0.0354], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 12:16:21,990 INFO [train.py:903] (2/4) Epoch 8, batch 2850, loss[loss=0.2905, simple_loss=0.3478, pruned_loss=0.1167, over 19381.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3223, pruned_loss=0.09359, over 3822688.70 frames. ], batch size: 70, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:16:31,182 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 5.617e+02 7.073e+02 8.787e+02 1.544e+03, threshold=1.415e+03, percent-clipped=2.0 +2023-04-01 12:16:57,640 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50674.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:17:03,981 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-01 12:17:19,586 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8211, 1.8360, 1.9665, 2.6655, 1.8479, 2.5253, 2.3344, 1.8227], + device='cuda:2'), covar=tensor([0.2835, 0.2191, 0.1177, 0.1272, 0.2504, 0.1003, 0.2573, 0.2163], + device='cuda:2'), in_proj_covar=tensor([0.0724, 0.0731, 0.0612, 0.0848, 0.0731, 0.0633, 0.0755, 0.0652], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 12:17:26,001 INFO [train.py:903] (2/4) Epoch 8, batch 2900, loss[loss=0.2508, simple_loss=0.31, pruned_loss=0.09579, over 19625.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.322, pruned_loss=0.0933, over 3824383.30 frames. ], batch size: 50, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:17:26,048 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 12:17:39,504 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5915, 1.5305, 1.3447, 2.0029, 1.6254, 1.9763, 2.0979, 1.7775], + device='cuda:2'), covar=tensor([0.0938, 0.1053, 0.1203, 0.0980, 0.0988, 0.0824, 0.0862, 0.0794], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0233, 0.0233, 0.0262, 0.0247, 0.0218, 0.0210, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 12:18:29,364 INFO [train.py:903] (2/4) Epoch 8, batch 2950, loss[loss=0.269, simple_loss=0.3393, pruned_loss=0.09928, over 19293.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3228, pruned_loss=0.09354, over 3808035.80 frames. ], batch size: 66, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:18:37,517 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 6.074e+02 7.951e+02 1.027e+03 2.467e+03, threshold=1.590e+03, percent-clipped=7.0 +2023-04-01 12:19:31,429 INFO [train.py:903] (2/4) Epoch 8, batch 3000, loss[loss=0.2856, simple_loss=0.3475, pruned_loss=0.1118, over 19771.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3229, pruned_loss=0.09389, over 3827464.40 frames. ], batch size: 56, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:19:31,429 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 12:19:44,076 INFO [train.py:937] (2/4) Epoch 8, validation: loss=0.1875, simple_loss=0.2879, pruned_loss=0.04358, over 944034.00 frames. +2023-04-01 12:19:44,077 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18546MB +2023-04-01 12:19:46,424 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 12:19:48,013 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9444, 1.8274, 1.4773, 1.9628, 1.8557, 1.6280, 1.4094, 1.8279], + device='cuda:2'), covar=tensor([0.0981, 0.1565, 0.1505, 0.1065, 0.1281, 0.0629, 0.1349, 0.0685], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0355, 0.0286, 0.0241, 0.0297, 0.0239, 0.0272, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 12:19:49,176 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50800.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:20:21,760 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50826.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:20:45,723 INFO [train.py:903] (2/4) Epoch 8, batch 3050, loss[loss=0.2285, simple_loss=0.2968, pruned_loss=0.08009, over 19427.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3222, pruned_loss=0.09336, over 3827480.14 frames. ], batch size: 48, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:20:55,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.189e+02 5.734e+02 7.199e+02 9.163e+02 1.650e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-04-01 12:21:05,877 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8398, 1.9183, 1.5528, 3.3507, 2.1240, 2.9853, 2.3316, 1.3896], + device='cuda:2'), covar=tensor([0.3578, 0.2833, 0.1848, 0.1895, 0.3210, 0.1312, 0.3300, 0.3250], + device='cuda:2'), in_proj_covar=tensor([0.0734, 0.0743, 0.0618, 0.0864, 0.0741, 0.0645, 0.0765, 0.0663], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 12:21:06,922 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:36,803 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:49,973 INFO [train.py:903] (2/4) Epoch 8, batch 3100, loss[loss=0.2213, simple_loss=0.2876, pruned_loss=0.07751, over 18996.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3233, pruned_loss=0.09371, over 3821278.60 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:22:01,845 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 12:22:04,756 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:22:32,055 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50930.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:22:52,354 INFO [train.py:903] (2/4) Epoch 8, batch 3150, loss[loss=0.2971, simple_loss=0.3614, pruned_loss=0.1165, over 18178.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3213, pruned_loss=0.09265, over 3834836.00 frames. ], batch size: 83, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:23:00,492 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 5.915e+02 7.023e+02 8.955e+02 1.571e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-01 12:23:03,146 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50955.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:23:14,707 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:23:20,378 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 12:23:54,305 INFO [train.py:903] (2/4) Epoch 8, batch 3200, loss[loss=0.2361, simple_loss=0.314, pruned_loss=0.0791, over 19533.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3217, pruned_loss=0.09285, over 3830770.11 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:24:30,138 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51023.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:24:57,558 INFO [train.py:903] (2/4) Epoch 8, batch 3250, loss[loss=0.2218, simple_loss=0.2882, pruned_loss=0.07765, over 19775.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3216, pruned_loss=0.0929, over 3844889.03 frames. ], batch size: 48, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:25:05,788 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 6.087e+02 7.883e+02 9.942e+02 3.174e+03, threshold=1.577e+03, percent-clipped=7.0 +2023-04-01 12:25:40,468 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:26:00,808 INFO [train.py:903] (2/4) Epoch 8, batch 3300, loss[loss=0.2331, simple_loss=0.3109, pruned_loss=0.07766, over 19067.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3205, pruned_loss=0.09238, over 3832827.08 frames. ], batch size: 69, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:26:08,882 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 12:27:02,462 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51144.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:27:04,795 INFO [train.py:903] (2/4) Epoch 8, batch 3350, loss[loss=0.2821, simple_loss=0.3442, pruned_loss=0.11, over 19774.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.321, pruned_loss=0.09298, over 3826079.64 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:27:12,718 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.942e+02 7.377e+02 9.279e+02 2.136e+03, threshold=1.475e+03, percent-clipped=2.0 +2023-04-01 12:27:34,020 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51170.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:27:47,838 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1976, 1.2989, 1.2215, 0.9700, 1.0582, 1.0361, 0.1060, 0.3713], + device='cuda:2'), covar=tensor([0.0366, 0.0393, 0.0231, 0.0318, 0.0785, 0.0327, 0.0647, 0.0608], + device='cuda:2'), in_proj_covar=tensor([0.0321, 0.0318, 0.0315, 0.0331, 0.0408, 0.0333, 0.0294, 0.0314], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 12:28:00,628 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51190.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:28:07,188 INFO [train.py:903] (2/4) Epoch 8, batch 3400, loss[loss=0.2614, simple_loss=0.3321, pruned_loss=0.09538, over 18168.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3215, pruned_loss=0.09294, over 3844741.42 frames. ], batch size: 83, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:05,386 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 12:29:10,671 INFO [train.py:903] (2/4) Epoch 8, batch 3450, loss[loss=0.2557, simple_loss=0.3288, pruned_loss=0.09136, over 19629.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3232, pruned_loss=0.09373, over 3840067.69 frames. ], batch size: 61, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:12,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-01 12:29:16,201 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 12:29:18,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+02 6.280e+02 7.477e+02 9.686e+02 1.820e+03, threshold=1.495e+03, percent-clipped=3.0 +2023-04-01 12:29:27,078 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:29:52,018 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:00,132 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51285.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:30:12,377 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 12:30:12,709 INFO [train.py:903] (2/4) Epoch 8, batch 3500, loss[loss=0.2902, simple_loss=0.3522, pruned_loss=0.1142, over 18725.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3217, pruned_loss=0.09313, over 3837339.18 frames. ], batch size: 74, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:30:25,521 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51304.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:30,177 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:31,844 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-01 12:31:19,053 INFO [train.py:903] (2/4) Epoch 8, batch 3550, loss[loss=0.2293, simple_loss=0.3051, pruned_loss=0.07672, over 19536.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3208, pruned_loss=0.09181, over 3838563.83 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:31:27,383 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.209e+02 6.396e+02 8.561e+02 1.899e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-01 12:32:21,069 INFO [train.py:903] (2/4) Epoch 8, batch 3600, loss[loss=0.2474, simple_loss=0.3263, pruned_loss=0.08421, over 19654.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3213, pruned_loss=0.0925, over 3842756.80 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:32:26,123 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51400.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:38,772 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:55,858 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:56,821 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:33:22,341 INFO [train.py:903] (2/4) Epoch 8, batch 3650, loss[loss=0.3279, simple_loss=0.3726, pruned_loss=0.1416, over 13366.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3214, pruned_loss=0.09271, over 3831757.48 frames. ], batch size: 136, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:33:31,507 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 6.415e+02 7.779e+02 9.918e+02 2.619e+03, threshold=1.556e+03, percent-clipped=14.0 +2023-04-01 12:34:24,447 INFO [train.py:903] (2/4) Epoch 8, batch 3700, loss[loss=0.2348, simple_loss=0.2934, pruned_loss=0.08809, over 19763.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3225, pruned_loss=0.09391, over 3827200.82 frames. ], batch size: 47, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:34:46,879 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:34:49,325 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51515.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:12,259 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51534.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:35:18,892 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51539.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:20,142 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51540.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:22,204 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51541.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:35:28,599 INFO [train.py:903] (2/4) Epoch 8, batch 3750, loss[loss=0.2351, simple_loss=0.3096, pruned_loss=0.08026, over 19670.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.3236, pruned_loss=0.09439, over 3820159.55 frames. ], batch size: 58, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:35:36,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.985e+02 5.907e+02 7.282e+02 9.270e+02 2.268e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:35:52,347 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51566.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:36:30,195 INFO [train.py:903] (2/4) Epoch 8, batch 3800, loss[loss=0.2488, simple_loss=0.3263, pruned_loss=0.08564, over 19321.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3231, pruned_loss=0.09411, over 3818985.66 frames. ], batch size: 66, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:02,504 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 12:37:31,373 INFO [train.py:903] (2/4) Epoch 8, batch 3850, loss[loss=0.2684, simple_loss=0.3421, pruned_loss=0.09732, over 19751.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3229, pruned_loss=0.09397, over 3814433.96 frames. ], batch size: 63, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:35,084 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51649.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:37:40,053 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.568e+02 6.155e+02 7.716e+02 1.023e+03 2.199e+03, threshold=1.543e+03, percent-clipped=8.0 +2023-04-01 12:38:13,442 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51679.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:38:33,778 INFO [train.py:903] (2/4) Epoch 8, batch 3900, loss[loss=0.2401, simple_loss=0.3184, pruned_loss=0.08089, over 19782.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3224, pruned_loss=0.09319, over 3818398.54 frames. ], batch size: 56, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:38:45,000 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51704.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:33,700 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51744.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:37,152 INFO [train.py:903] (2/4) Epoch 8, batch 3950, loss[loss=0.234, simple_loss=0.2999, pruned_loss=0.08405, over 19609.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.323, pruned_loss=0.09376, over 3814180.31 frames. ], batch size: 50, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:39:41,701 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 12:39:45,222 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+02 5.815e+02 7.280e+02 9.203e+02 2.422e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:39:46,652 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:59,576 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51765.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:37,963 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:38,734 INFO [train.py:903] (2/4) Epoch 8, batch 4000, loss[loss=0.2083, simple_loss=0.2879, pruned_loss=0.06439, over 19780.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3228, pruned_loss=0.09361, over 3823563.43 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:09,873 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:27,811 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 12:41:41,698 INFO [train.py:903] (2/4) Epoch 8, batch 4050, loss[loss=0.3271, simple_loss=0.3695, pruned_loss=0.1423, over 13302.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3226, pruned_loss=0.09378, over 3813063.35 frames. ], batch size: 135, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:50,761 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.576e+02 5.742e+02 7.614e+02 9.901e+02 2.045e+03, threshold=1.523e+03, percent-clipped=5.0 +2023-04-01 12:41:56,453 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:59,721 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51859.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:11,952 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51869.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:43,860 INFO [train.py:903] (2/4) Epoch 8, batch 4100, loss[loss=0.2598, simple_loss=0.3161, pruned_loss=0.1017, over 19399.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3225, pruned_loss=0.09357, over 3802565.77 frames. ], batch size: 48, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:42:56,253 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51905.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:43:21,458 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 12:43:26,477 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51930.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:43:39,198 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 12:43:47,764 INFO [train.py:903] (2/4) Epoch 8, batch 4150, loss[loss=0.2269, simple_loss=0.2958, pruned_loss=0.07899, over 19661.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3218, pruned_loss=0.09329, over 3803913.71 frames. ], batch size: 55, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:43:56,793 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.129e+02 6.343e+02 7.798e+02 9.790e+02 2.215e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 12:44:19,763 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:44:50,690 INFO [train.py:903] (2/4) Epoch 8, batch 4200, loss[loss=0.2255, simple_loss=0.3048, pruned_loss=0.0731, over 19790.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3221, pruned_loss=0.09258, over 3813907.97 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:44:57,642 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 12:45:04,789 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2309, 4.2461, 4.7642, 4.7390, 2.6630, 4.4059, 4.0486, 4.4417], + device='cuda:2'), covar=tensor([0.0957, 0.2147, 0.0439, 0.0463, 0.3535, 0.0535, 0.0466, 0.0873], + device='cuda:2'), in_proj_covar=tensor([0.0600, 0.0526, 0.0712, 0.0601, 0.0666, 0.0461, 0.0450, 0.0661], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 12:45:53,324 INFO [train.py:903] (2/4) Epoch 8, batch 4250, loss[loss=0.2064, simple_loss=0.2818, pruned_loss=0.06557, over 19372.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3218, pruned_loss=0.09216, over 3822595.01 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:45:57,237 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1274, 3.5243, 2.0516, 2.0995, 3.0917, 1.6912, 1.4157, 2.1048], + device='cuda:2'), covar=tensor([0.1009, 0.0410, 0.0814, 0.0644, 0.0412, 0.0888, 0.0761, 0.0579], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0290, 0.0314, 0.0241, 0.0226, 0.0308, 0.0284, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 12:46:01,321 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 5.455e+02 6.472e+02 8.916e+02 2.597e+03, threshold=1.294e+03, percent-clipped=4.0 +2023-04-01 12:46:08,481 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:46:11,540 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 12:46:18,995 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 12:46:21,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 12:46:55,487 INFO [train.py:903] (2/4) Epoch 8, batch 4300, loss[loss=0.249, simple_loss=0.317, pruned_loss=0.09046, over 19850.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3207, pruned_loss=0.09134, over 3819427.43 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:47:14,072 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52109.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:22,372 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:31,704 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7894, 2.1417, 2.3058, 2.6105, 2.5584, 2.3296, 2.1466, 2.6270], + device='cuda:2'), covar=tensor([0.0720, 0.1596, 0.1135, 0.0902, 0.1053, 0.0418, 0.0946, 0.0546], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0350, 0.0282, 0.0234, 0.0295, 0.0238, 0.0267, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 12:47:34,077 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:51,785 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 12:47:53,342 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:00,442 INFO [train.py:903] (2/4) Epoch 8, batch 4350, loss[loss=0.2624, simple_loss=0.3329, pruned_loss=0.09597, over 18848.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3199, pruned_loss=0.09054, over 3834591.79 frames. ], batch size: 74, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:48:06,453 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:09,401 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.672e+02 7.291e+02 9.101e+02 1.997e+03, threshold=1.458e+03, percent-clipped=8.0 +2023-04-01 12:48:48,790 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-01 12:49:03,054 INFO [train.py:903] (2/4) Epoch 8, batch 4400, loss[loss=0.2198, simple_loss=0.2887, pruned_loss=0.07544, over 19385.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3202, pruned_loss=0.0907, over 3841588.77 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:49:21,169 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52211.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:26,795 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 12:49:38,195 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 12:49:38,576 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:43,342 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:50:01,072 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-01 12:50:05,240 INFO [train.py:903] (2/4) Epoch 8, batch 4450, loss[loss=0.2194, simple_loss=0.2828, pruned_loss=0.07799, over 19738.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3194, pruned_loss=0.0909, over 3834719.54 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:50:13,313 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.880e+02 7.086e+02 8.839e+02 1.936e+03, threshold=1.417e+03, percent-clipped=3.0 +2023-04-01 12:50:13,764 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:50:23,898 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2602, 2.9484, 1.9936, 2.1812, 2.0287, 2.4210, 0.7719, 2.0764], + device='cuda:2'), covar=tensor([0.0447, 0.0367, 0.0497, 0.0652, 0.0660, 0.0668, 0.0884, 0.0663], + device='cuda:2'), in_proj_covar=tensor([0.0319, 0.0318, 0.0316, 0.0333, 0.0412, 0.0333, 0.0299, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 12:51:06,387 INFO [train.py:903] (2/4) Epoch 8, batch 4500, loss[loss=0.2518, simple_loss=0.3056, pruned_loss=0.099, over 17362.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3202, pruned_loss=0.09171, over 3826777.76 frames. ], batch size: 38, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:51:26,573 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 12:51:50,123 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:52:10,336 INFO [train.py:903] (2/4) Epoch 8, batch 4550, loss[loss=0.2408, simple_loss=0.3141, pruned_loss=0.08379, over 18677.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3208, pruned_loss=0.09171, over 3830627.19 frames. ], batch size: 74, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:52:18,695 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.922e+02 7.010e+02 8.869e+02 1.679e+03, threshold=1.402e+03, percent-clipped=2.0 +2023-04-01 12:52:18,729 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 12:52:41,944 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 12:52:46,052 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 12:53:11,436 INFO [train.py:903] (2/4) Epoch 8, batch 4600, loss[loss=0.205, simple_loss=0.2753, pruned_loss=0.06737, over 19386.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3205, pruned_loss=0.09116, over 3836415.68 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:53:18,532 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:53:49,129 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1768, 1.2873, 1.8312, 1.4192, 2.7097, 2.0553, 2.7983, 0.9919], + device='cuda:2'), covar=tensor([0.2248, 0.3660, 0.2066, 0.1788, 0.1325, 0.1914, 0.1480, 0.3570], + device='cuda:2'), in_proj_covar=tensor([0.0469, 0.0546, 0.0544, 0.0422, 0.0581, 0.0474, 0.0643, 0.0470], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 12:54:12,887 INFO [train.py:903] (2/4) Epoch 8, batch 4650, loss[loss=0.2883, simple_loss=0.3494, pruned_loss=0.1136, over 19104.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3217, pruned_loss=0.09145, over 3826134.05 frames. ], batch size: 69, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:54:21,260 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 5.664e+02 6.903e+02 8.285e+02 1.576e+03, threshold=1.381e+03, percent-clipped=2.0 +2023-04-01 12:54:30,499 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 12:54:42,705 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 12:54:56,975 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:15,234 INFO [train.py:903] (2/4) Epoch 8, batch 4700, loss[loss=0.2335, simple_loss=0.3089, pruned_loss=0.07902, over 19581.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3203, pruned_loss=0.09079, over 3819566.14 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:55:27,917 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52505.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:39,745 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 12:55:43,532 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:18,366 INFO [train.py:903] (2/4) Epoch 8, batch 4750, loss[loss=0.2584, simple_loss=0.328, pruned_loss=0.09439, over 19683.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3201, pruned_loss=0.0909, over 3811246.97 frames. ], batch size: 60, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:56:29,694 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.555e+02 6.316e+02 7.348e+02 9.529e+02 1.491e+03, threshold=1.470e+03, percent-clipped=3.0 +2023-04-01 12:56:31,060 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:41,236 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 12:56:52,323 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52573.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:57:22,246 INFO [train.py:903] (2/4) Epoch 8, batch 4800, loss[loss=0.2469, simple_loss=0.3139, pruned_loss=0.08995, over 19670.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3196, pruned_loss=0.0908, over 3814401.03 frames. ], batch size: 55, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:57:26,438 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 12:58:22,762 INFO [train.py:903] (2/4) Epoch 8, batch 4850, loss[loss=0.2744, simple_loss=0.3431, pruned_loss=0.1029, over 19666.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3191, pruned_loss=0.09071, over 3828702.24 frames. ], batch size: 59, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:32,082 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 6.021e+02 7.604e+02 9.872e+02 2.114e+03, threshold=1.521e+03, percent-clipped=8.0 +2023-04-01 12:58:46,010 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 12:58:52,968 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52670.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:58:58,364 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:59:08,341 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 12:59:14,114 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 12:59:14,152 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 12:59:23,225 INFO [train.py:903] (2/4) Epoch 8, batch 4900, loss[loss=0.311, simple_loss=0.3622, pruned_loss=0.1299, over 19650.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3183, pruned_loss=0.0906, over 3835245.24 frames. ], batch size: 58, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:59:24,414 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 12:59:44,293 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 13:00:22,663 INFO [train.py:903] (2/4) Epoch 8, batch 4950, loss[loss=0.2484, simple_loss=0.3171, pruned_loss=0.08992, over 18183.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3202, pruned_loss=0.09158, over 3829611.74 frames. ], batch size: 83, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 13:00:35,712 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 6.374e+02 8.178e+02 1.048e+03 2.702e+03, threshold=1.636e+03, percent-clipped=11.0 +2023-04-01 13:00:40,338 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 13:00:42,035 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9741, 3.4449, 1.9304, 1.9285, 2.9846, 1.8038, 1.2609, 2.0011], + device='cuda:2'), covar=tensor([0.1087, 0.0473, 0.0936, 0.0746, 0.0448, 0.0889, 0.0947, 0.0599], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0291, 0.0313, 0.0242, 0.0229, 0.0311, 0.0287, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:00:53,282 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:00:56,960 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:04,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 13:01:17,629 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:26,607 INFO [train.py:903] (2/4) Epoch 8, batch 5000, loss[loss=0.2833, simple_loss=0.353, pruned_loss=0.1068, over 18790.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3194, pruned_loss=0.09106, over 3834390.11 frames. ], batch size: 74, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:01:29,351 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:35,990 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 13:01:41,333 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 13:01:47,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 13:02:28,522 INFO [train.py:903] (2/4) Epoch 8, batch 5050, loss[loss=0.2137, simple_loss=0.2735, pruned_loss=0.07697, over 19720.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3187, pruned_loss=0.09052, over 3829896.19 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:02:39,041 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 5.648e+02 7.062e+02 8.811e+02 1.795e+03, threshold=1.412e+03, percent-clipped=2.0 +2023-04-01 13:03:04,998 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 13:03:30,504 INFO [train.py:903] (2/4) Epoch 8, batch 5100, loss[loss=0.2676, simple_loss=0.3316, pruned_loss=0.1018, over 19682.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3187, pruned_loss=0.09077, over 3822922.14 frames. ], batch size: 60, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:03:40,996 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 13:03:46,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 13:03:50,844 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 13:03:59,149 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52917.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:04:09,650 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:32,214 INFO [train.py:903] (2/4) Epoch 8, batch 5150, loss[loss=0.2869, simple_loss=0.348, pruned_loss=0.1129, over 19602.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3183, pruned_loss=0.09044, over 3831101.95 frames. ], batch size: 61, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:04:41,014 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:45,911 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.391e+02 6.060e+02 7.983e+02 1.041e+03 2.368e+03, threshold=1.597e+03, percent-clipped=6.0 +2023-04-01 13:04:47,191 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:05:11,099 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 13:05:19,609 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:05:37,037 INFO [train.py:903] (2/4) Epoch 8, batch 5200, loss[loss=0.2473, simple_loss=0.3206, pruned_loss=0.087, over 19676.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.32, pruned_loss=0.09142, over 3817960.07 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:05:42,297 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1814, 1.8020, 1.3396, 1.1290, 1.6211, 0.9965, 1.1039, 1.5983], + device='cuda:2'), covar=tensor([0.0614, 0.0638, 0.0877, 0.0604, 0.0409, 0.1091, 0.0571, 0.0317], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0293, 0.0315, 0.0242, 0.0228, 0.0315, 0.0288, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:05:51,343 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 13:06:21,601 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53032.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:06:36,724 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 13:06:38,318 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:06:39,063 INFO [train.py:903] (2/4) Epoch 8, batch 5250, loss[loss=0.2588, simple_loss=0.3198, pruned_loss=0.09892, over 19849.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3202, pruned_loss=0.09149, over 3826499.63 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:06:49,006 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.973e+02 7.081e+02 8.822e+02 3.028e+03, threshold=1.416e+03, percent-clipped=2.0 +2023-04-01 13:07:07,188 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3216, 2.1247, 1.8002, 1.7379, 1.4651, 1.6847, 0.4491, 1.0260], + device='cuda:2'), covar=tensor([0.0324, 0.0380, 0.0302, 0.0476, 0.0765, 0.0508, 0.0718, 0.0656], + device='cuda:2'), in_proj_covar=tensor([0.0321, 0.0322, 0.0318, 0.0337, 0.0414, 0.0332, 0.0298, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:07:08,274 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:13,808 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:39,288 INFO [train.py:903] (2/4) Epoch 8, batch 5300, loss[loss=0.2504, simple_loss=0.3153, pruned_loss=0.09278, over 19666.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3212, pruned_loss=0.09213, over 3819618.45 frames. ], batch size: 53, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:07:57,434 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 13:08:03,076 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:08:41,250 INFO [train.py:903] (2/4) Epoch 8, batch 5350, loss[loss=0.2474, simple_loss=0.3209, pruned_loss=0.08698, over 19443.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3204, pruned_loss=0.09126, over 3825164.62 frames. ], batch size: 64, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:08:52,776 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 6.176e+02 7.478e+02 9.376e+02 1.338e+03, threshold=1.496e+03, percent-clipped=0.0 +2023-04-01 13:09:18,550 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 13:09:44,005 INFO [train.py:903] (2/4) Epoch 8, batch 5400, loss[loss=0.2195, simple_loss=0.2966, pruned_loss=0.07122, over 19731.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3197, pruned_loss=0.09077, over 3841800.75 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:24,900 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:10:31,356 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0238, 1.6013, 1.7104, 1.8956, 1.6988, 1.8021, 1.6579, 1.9111], + device='cuda:2'), covar=tensor([0.0797, 0.1357, 0.1133, 0.0899, 0.1111, 0.0456, 0.0971, 0.0558], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0348, 0.0282, 0.0236, 0.0294, 0.0238, 0.0268, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:10:37,060 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4129, 3.1662, 2.3661, 2.4828, 2.0153, 2.4801, 0.9800, 2.0241], + device='cuda:2'), covar=tensor([0.0352, 0.0304, 0.0414, 0.0536, 0.0712, 0.0553, 0.0753, 0.0693], + device='cuda:2'), in_proj_covar=tensor([0.0317, 0.0316, 0.0315, 0.0330, 0.0408, 0.0329, 0.0293, 0.0311], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:10:47,201 INFO [train.py:903] (2/4) Epoch 8, batch 5450, loss[loss=0.203, simple_loss=0.2734, pruned_loss=0.06627, over 19788.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3196, pruned_loss=0.09065, over 3831323.92 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:57,345 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 5.768e+02 7.150e+02 9.218e+02 2.127e+03, threshold=1.430e+03, percent-clipped=3.0 +2023-04-01 13:11:02,310 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:11:09,974 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 13:11:40,427 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53288.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:11:48,913 INFO [train.py:903] (2/4) Epoch 8, batch 5500, loss[loss=0.245, simple_loss=0.3074, pruned_loss=0.09127, over 19800.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.318, pruned_loss=0.08977, over 3832919.33 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:11,196 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53313.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:12:16,565 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 13:12:23,262 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:34,513 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7618, 1.3941, 1.3490, 1.6326, 1.5129, 1.6049, 1.4251, 1.6545], + device='cuda:2'), covar=tensor([0.0849, 0.1263, 0.1299, 0.0931, 0.1034, 0.0481, 0.1019, 0.0643], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0351, 0.0286, 0.0239, 0.0296, 0.0240, 0.0269, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:12:35,590 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53333.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:50,185 INFO [train.py:903] (2/4) Epoch 8, batch 5550, loss[loss=0.2404, simple_loss=0.3025, pruned_loss=0.08915, over 19427.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3179, pruned_loss=0.08965, over 3846803.93 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:58,965 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.6395, 0.8772, 0.6883, 0.6470, 0.8140, 0.6229, 0.6162, 0.7759], + device='cuda:2'), covar=tensor([0.0339, 0.0444, 0.0569, 0.0329, 0.0285, 0.0722, 0.0365, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0294, 0.0315, 0.0243, 0.0227, 0.0316, 0.0288, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:12:59,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 13:13:03,078 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 6.394e+02 7.821e+02 9.803e+02 2.197e+03, threshold=1.564e+03, percent-clipped=2.0 +2023-04-01 13:13:15,684 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0416, 5.3854, 3.2134, 4.7794, 1.3498, 5.1720, 5.3052, 5.5168], + device='cuda:2'), covar=tensor([0.0444, 0.1015, 0.1652, 0.0543, 0.3729, 0.0708, 0.0691, 0.0869], + device='cuda:2'), in_proj_covar=tensor([0.0406, 0.0349, 0.0412, 0.0303, 0.0369, 0.0338, 0.0328, 0.0365], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 13:13:40,392 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:13:50,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 13:13:53,924 INFO [train.py:903] (2/4) Epoch 8, batch 5600, loss[loss=0.2083, simple_loss=0.2907, pruned_loss=0.06297, over 19492.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3188, pruned_loss=0.09044, over 3832924.03 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:14:21,360 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:14:57,499 INFO [train.py:903] (2/4) Epoch 8, batch 5650, loss[loss=0.317, simple_loss=0.3666, pruned_loss=0.1337, over 13018.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3186, pruned_loss=0.09082, over 3828885.41 frames. ], batch size: 136, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:15:07,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.837e+02 5.961e+02 7.306e+02 9.270e+02 2.985e+03, threshold=1.461e+03, percent-clipped=1.0 +2023-04-01 13:15:08,348 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6067, 1.6796, 1.7329, 2.2229, 1.4027, 1.8149, 2.0344, 1.7418], + device='cuda:2'), covar=tensor([0.2712, 0.2126, 0.1183, 0.1180, 0.2462, 0.1120, 0.2749, 0.2050], + device='cuda:2'), in_proj_covar=tensor([0.0729, 0.0739, 0.0611, 0.0862, 0.0735, 0.0644, 0.0764, 0.0662], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:15:45,797 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 13:15:46,223 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:15:54,383 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2855, 1.3649, 1.7116, 1.1520, 2.5312, 3.1285, 2.9066, 3.2295], + device='cuda:2'), covar=tensor([0.1458, 0.2968, 0.2796, 0.2143, 0.0604, 0.0300, 0.0220, 0.0211], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0286, 0.0316, 0.0246, 0.0206, 0.0140, 0.0204, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:15:58,761 INFO [train.py:903] (2/4) Epoch 8, batch 5700, loss[loss=0.295, simple_loss=0.354, pruned_loss=0.1181, over 18846.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3196, pruned_loss=0.09156, over 3826255.98 frames. ], batch size: 74, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:16:15,384 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:44,978 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:59,703 INFO [train.py:903] (2/4) Epoch 8, batch 5750, loss[loss=0.2781, simple_loss=0.3464, pruned_loss=0.1049, over 19117.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3183, pruned_loss=0.09042, over 3823567.18 frames. ], batch size: 69, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:17:00,963 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 13:17:10,466 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 13:17:11,665 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 5.699e+02 6.647e+02 8.184e+02 1.829e+03, threshold=1.329e+03, percent-clipped=1.0 +2023-04-01 13:17:15,844 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 13:17:22,038 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 13:18:02,105 INFO [train.py:903] (2/4) Epoch 8, batch 5800, loss[loss=0.2167, simple_loss=0.2969, pruned_loss=0.06831, over 19596.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3194, pruned_loss=0.09063, over 3833726.20 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:18:12,764 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:18:19,788 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0247, 1.3207, 0.9898, 0.8983, 1.1821, 0.8583, 1.0538, 1.2613], + device='cuda:2'), covar=tensor([0.0439, 0.0654, 0.0948, 0.0527, 0.0440, 0.1095, 0.0433, 0.0358], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0294, 0.0318, 0.0245, 0.0229, 0.0319, 0.0289, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:19:04,522 INFO [train.py:903] (2/4) Epoch 8, batch 5850, loss[loss=0.2267, simple_loss=0.2968, pruned_loss=0.0783, over 19624.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3196, pruned_loss=0.09064, over 3833769.60 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:19:15,050 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.371e+02 7.746e+02 9.220e+02 2.993e+03, threshold=1.549e+03, percent-clipped=10.0 +2023-04-01 13:19:27,627 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:19:42,002 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:05,079 INFO [train.py:903] (2/4) Epoch 8, batch 5900, loss[loss=0.2014, simple_loss=0.2799, pruned_loss=0.06143, over 19496.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3201, pruned_loss=0.09092, over 3820492.51 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:20:09,581 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 13:20:30,243 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 13:20:32,934 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:47,003 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53729.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:21:06,834 INFO [train.py:903] (2/4) Epoch 8, batch 5950, loss[loss=0.2656, simple_loss=0.3337, pruned_loss=0.09881, over 19711.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3201, pruned_loss=0.0913, over 3814051.00 frames. ], batch size: 59, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:21:19,050 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 5.886e+02 7.195e+02 1.025e+03 2.007e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-04-01 13:21:50,821 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:00,060 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:04,791 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:09,983 INFO [train.py:903] (2/4) Epoch 8, batch 6000, loss[loss=0.2308, simple_loss=0.3056, pruned_loss=0.07799, over 19845.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3196, pruned_loss=0.09093, over 3810842.13 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:22:09,983 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 13:22:22,643 INFO [train.py:937] (2/4) Epoch 8, validation: loss=0.1864, simple_loss=0.2865, pruned_loss=0.04314, over 944034.00 frames. +2023-04-01 13:22:22,644 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 13:22:30,387 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1632, 1.2765, 1.9461, 1.6316, 3.0023, 4.7816, 4.6401, 4.9667], + device='cuda:2'), covar=tensor([0.1580, 0.3265, 0.2840, 0.1787, 0.0460, 0.0124, 0.0142, 0.0111], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0288, 0.0317, 0.0246, 0.0208, 0.0140, 0.0205, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:22:48,378 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:24,788 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53844.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:26,609 INFO [train.py:903] (2/4) Epoch 8, batch 6050, loss[loss=0.2847, simple_loss=0.3483, pruned_loss=0.1105, over 19703.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3195, pruned_loss=0.0909, over 3805380.82 frames. ], batch size: 59, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:23:39,145 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.583e+02 7.013e+02 9.917e+02 2.418e+03, threshold=1.403e+03, percent-clipped=8.0 +2023-04-01 13:23:47,831 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8313, 1.8085, 1.8699, 2.4789, 1.6284, 2.1366, 2.2257, 1.8830], + device='cuda:2'), covar=tensor([0.2773, 0.2468, 0.1237, 0.1333, 0.2714, 0.1219, 0.2708, 0.2126], + device='cuda:2'), in_proj_covar=tensor([0.0730, 0.0741, 0.0613, 0.0865, 0.0742, 0.0648, 0.0765, 0.0664], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:24:30,156 INFO [train.py:903] (2/4) Epoch 8, batch 6100, loss[loss=0.2486, simple_loss=0.3094, pruned_loss=0.0939, over 19424.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3187, pruned_loss=0.09049, over 3808710.27 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:24:50,453 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-01 13:25:31,569 INFO [train.py:903] (2/4) Epoch 8, batch 6150, loss[loss=0.3265, simple_loss=0.3653, pruned_loss=0.1439, over 13263.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3186, pruned_loss=0.09062, over 3809904.05 frames. ], batch size: 136, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:25:36,481 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9127, 1.5272, 1.5354, 2.2148, 1.6824, 2.2189, 2.1325, 1.9769], + device='cuda:2'), covar=tensor([0.0712, 0.0947, 0.0999, 0.0911, 0.0916, 0.0654, 0.0915, 0.0611], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0230, 0.0229, 0.0257, 0.0245, 0.0213, 0.0209, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 13:25:42,181 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.923e+02 5.703e+02 7.303e+02 8.849e+02 1.874e+03, threshold=1.461e+03, percent-clipped=4.0 +2023-04-01 13:25:56,500 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 13:25:57,091 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.49 vs. limit=5.0 +2023-04-01 13:26:07,946 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:26:33,362 INFO [train.py:903] (2/4) Epoch 8, batch 6200, loss[loss=0.2508, simple_loss=0.3131, pruned_loss=0.0943, over 19619.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.32, pruned_loss=0.09189, over 3797876.73 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:26:38,498 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:26:38,757 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 13:27:22,539 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-01 13:27:25,506 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54037.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:37,622 INFO [train.py:903] (2/4) Epoch 8, batch 6250, loss[loss=0.2319, simple_loss=0.2918, pruned_loss=0.08596, over 19388.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3206, pruned_loss=0.09236, over 3794740.43 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:27:40,323 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:49,337 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.888e+02 5.723e+02 7.068e+02 9.572e+02 2.133e+03, threshold=1.414e+03, percent-clipped=6.0 +2023-04-01 13:27:53,692 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 13:27:58,063 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:04,592 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 13:28:10,776 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:12,248 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 13:28:40,883 INFO [train.py:903] (2/4) Epoch 8, batch 6300, loss[loss=0.2543, simple_loss=0.3272, pruned_loss=0.09067, over 17261.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3204, pruned_loss=0.09165, over 3795665.50 frames. ], batch size: 101, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:28:45,608 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:16,163 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:41,754 INFO [train.py:903] (2/4) Epoch 8, batch 6350, loss[loss=0.2983, simple_loss=0.3638, pruned_loss=0.1164, over 19353.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3193, pruned_loss=0.09145, over 3787379.51 frames. ], batch size: 70, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:29:52,040 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.055e+02 7.520e+02 8.667e+02 2.456e+03, threshold=1.504e+03, percent-clipped=3.0 +2023-04-01 13:29:54,716 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7569, 1.4463, 1.4737, 1.9340, 1.5667, 2.0999, 2.0161, 1.9070], + device='cuda:2'), covar=tensor([0.0779, 0.0925, 0.0991, 0.0902, 0.0916, 0.0587, 0.0789, 0.0572], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0231, 0.0227, 0.0254, 0.0244, 0.0213, 0.0205, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 13:30:36,810 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1223, 1.6794, 1.7862, 2.1295, 1.9854, 1.9656, 1.8227, 2.0141], + device='cuda:2'), covar=tensor([0.0849, 0.1642, 0.1232, 0.0939, 0.1104, 0.0443, 0.0975, 0.0635], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0361, 0.0288, 0.0240, 0.0298, 0.0243, 0.0273, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:30:43,419 INFO [train.py:903] (2/4) Epoch 8, batch 6400, loss[loss=0.2367, simple_loss=0.2916, pruned_loss=0.09091, over 19760.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3189, pruned_loss=0.09119, over 3809690.06 frames. ], batch size: 45, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:45,991 INFO [train.py:903] (2/4) Epoch 8, batch 6450, loss[loss=0.2106, simple_loss=0.2725, pruned_loss=0.07441, over 19046.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3174, pruned_loss=0.08998, over 3813651.17 frames. ], batch size: 42, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:58,336 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 5.571e+02 6.761e+02 8.500e+02 1.702e+03, threshold=1.352e+03, percent-clipped=3.0 +2023-04-01 13:32:23,619 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3060, 2.9818, 2.1613, 2.7448, 0.8611, 2.8103, 2.7678, 2.8861], + device='cuda:2'), covar=tensor([0.0991, 0.1308, 0.1997, 0.0939, 0.3738, 0.1065, 0.0910, 0.1235], + device='cuda:2'), in_proj_covar=tensor([0.0402, 0.0341, 0.0405, 0.0301, 0.0367, 0.0331, 0.0324, 0.0359], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 13:32:24,406 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-01 13:32:30,258 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 13:32:48,744 INFO [train.py:903] (2/4) Epoch 8, batch 6500, loss[loss=0.2318, simple_loss=0.3091, pruned_loss=0.07727, over 19779.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.318, pruned_loss=0.09072, over 3816067.42 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:32:54,441 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 13:32:54,805 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2819, 1.3599, 1.7579, 1.4169, 2.5755, 2.1554, 2.6302, 0.9375], + device='cuda:2'), covar=tensor([0.1986, 0.3415, 0.1889, 0.1653, 0.1225, 0.1677, 0.1285, 0.3361], + device='cuda:2'), in_proj_covar=tensor([0.0471, 0.0549, 0.0555, 0.0423, 0.0581, 0.0477, 0.0645, 0.0477], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:33:50,183 INFO [train.py:903] (2/4) Epoch 8, batch 6550, loss[loss=0.2807, simple_loss=0.3409, pruned_loss=0.1103, over 17668.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3192, pruned_loss=0.09146, over 3820081.25 frames. ], batch size: 101, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:34:00,553 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+02 6.329e+02 8.151e+02 1.084e+03 2.341e+03, threshold=1.630e+03, percent-clipped=12.0 +2023-04-01 13:34:51,138 INFO [train.py:903] (2/4) Epoch 8, batch 6600, loss[loss=0.2613, simple_loss=0.3316, pruned_loss=0.09554, over 19673.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3196, pruned_loss=0.09136, over 3809705.76 frames. ], batch size: 53, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:34:57,532 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4000, 1.3567, 1.7672, 1.3514, 2.7117, 3.3483, 3.1822, 3.5492], + device='cuda:2'), covar=tensor([0.1420, 0.3124, 0.2800, 0.2003, 0.0522, 0.0258, 0.0204, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0285, 0.0314, 0.0245, 0.0206, 0.0139, 0.0204, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:35:38,434 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8790, 2.1688, 2.3517, 2.7691, 2.2541, 2.5732, 2.2638, 2.8009], + device='cuda:2'), covar=tensor([0.0713, 0.1711, 0.1195, 0.0965, 0.1300, 0.0391, 0.1001, 0.0512], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0359, 0.0290, 0.0241, 0.0300, 0.0244, 0.0270, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:35:53,838 INFO [train.py:903] (2/4) Epoch 8, batch 6650, loss[loss=0.2478, simple_loss=0.3109, pruned_loss=0.09234, over 19418.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3186, pruned_loss=0.09114, over 3808799.41 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:36:04,277 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7772, 1.9716, 2.2015, 2.5728, 2.1041, 2.1942, 2.0412, 2.6827], + device='cuda:2'), covar=tensor([0.0608, 0.1590, 0.1124, 0.0779, 0.1154, 0.0481, 0.0960, 0.0504], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0358, 0.0290, 0.0241, 0.0298, 0.0243, 0.0270, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:36:04,916 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.746e+02 5.865e+02 7.808e+02 1.010e+03 1.907e+03, threshold=1.562e+03, percent-clipped=2.0 +2023-04-01 13:36:33,942 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7083, 1.7143, 1.4710, 1.3730, 1.2786, 1.4544, 0.1756, 0.5970], + device='cuda:2'), covar=tensor([0.0330, 0.0339, 0.0233, 0.0329, 0.0758, 0.0348, 0.0624, 0.0602], + device='cuda:2'), in_proj_covar=tensor([0.0325, 0.0321, 0.0319, 0.0333, 0.0414, 0.0339, 0.0297, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:36:55,566 INFO [train.py:903] (2/4) Epoch 8, batch 6700, loss[loss=0.2125, simple_loss=0.2925, pruned_loss=0.06619, over 19617.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3193, pruned_loss=0.09121, over 3806510.22 frames. ], batch size: 50, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:36:59,445 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1739, 1.2073, 1.4960, 1.2437, 2.4435, 2.0013, 2.5617, 0.9636], + device='cuda:2'), covar=tensor([0.1974, 0.3255, 0.1892, 0.1658, 0.1107, 0.1653, 0.1098, 0.3042], + device='cuda:2'), in_proj_covar=tensor([0.0469, 0.0545, 0.0550, 0.0420, 0.0574, 0.0474, 0.0640, 0.0474], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:37:52,318 INFO [train.py:903] (2/4) Epoch 8, batch 6750, loss[loss=0.2523, simple_loss=0.3256, pruned_loss=0.08948, over 19694.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3194, pruned_loss=0.09132, over 3811656.18 frames. ], batch size: 59, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:38:03,630 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.992e+02 6.380e+02 7.224e+02 9.353e+02 2.017e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 13:38:27,105 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 13:38:50,507 INFO [train.py:903] (2/4) Epoch 8, batch 6800, loss[loss=0.2285, simple_loss=0.3058, pruned_loss=0.07561, over 19619.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3185, pruned_loss=0.09045, over 3807902.19 frames. ], batch size: 50, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:39:34,212 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 13:39:34,671 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 13:39:38,456 INFO [train.py:903] (2/4) Epoch 9, batch 0, loss[loss=0.27, simple_loss=0.3383, pruned_loss=0.1009, over 19635.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3383, pruned_loss=0.1009, over 19635.00 frames. ], batch size: 57, lr: 9.56e-03, grad_scale: 8.0 +2023-04-01 13:39:38,457 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 13:39:49,513 INFO [train.py:937] (2/4) Epoch 9, validation: loss=0.1866, simple_loss=0.2872, pruned_loss=0.04294, over 944034.00 frames. +2023-04-01 13:39:49,514 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 13:40:03,815 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 13:40:28,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.879e+02 5.500e+02 7.208e+02 8.930e+02 1.459e+03, threshold=1.442e+03, percent-clipped=1.0 +2023-04-01 13:40:51,456 INFO [train.py:903] (2/4) Epoch 9, batch 50, loss[loss=0.1979, simple_loss=0.2697, pruned_loss=0.06305, over 19740.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3239, pruned_loss=0.09078, over 865081.83 frames. ], batch size: 47, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:41:02,936 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54682.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:41:26,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 13:41:53,038 INFO [train.py:903] (2/4) Epoch 9, batch 100, loss[loss=0.2461, simple_loss=0.3252, pruned_loss=0.08351, over 19342.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3195, pruned_loss=0.08929, over 1538796.71 frames. ], batch size: 70, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:42:05,379 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 13:42:18,536 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2561, 1.3234, 1.7030, 1.5454, 2.6690, 2.1552, 2.7295, 1.1907], + device='cuda:2'), covar=tensor([0.2122, 0.3656, 0.2195, 0.1641, 0.1363, 0.1811, 0.1460, 0.3314], + device='cuda:2'), in_proj_covar=tensor([0.0472, 0.0550, 0.0556, 0.0423, 0.0580, 0.0475, 0.0643, 0.0476], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:42:31,158 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.564e+02 7.190e+02 9.001e+02 2.877e+03, threshold=1.438e+03, percent-clipped=2.0 +2023-04-01 13:42:53,294 INFO [train.py:903] (2/4) Epoch 9, batch 150, loss[loss=0.2496, simple_loss=0.3094, pruned_loss=0.09485, over 18698.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3172, pruned_loss=0.0884, over 2058376.17 frames. ], batch size: 41, lr: 9.54e-03, grad_scale: 16.0 +2023-04-01 13:43:05,243 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3781, 1.4684, 1.8295, 1.6739, 2.9179, 2.4358, 3.0351, 1.2517], + device='cuda:2'), covar=tensor([0.1988, 0.3488, 0.2146, 0.1553, 0.1215, 0.1614, 0.1244, 0.3293], + device='cuda:2'), in_proj_covar=tensor([0.0474, 0.0549, 0.0556, 0.0424, 0.0583, 0.0477, 0.0642, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:43:43,812 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0372, 1.9106, 2.0990, 1.5975, 4.5092, 0.9497, 2.3363, 4.8694], + device='cuda:2'), covar=tensor([0.0305, 0.2361, 0.2253, 0.1776, 0.0611, 0.2552, 0.1316, 0.0198], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0329, 0.0339, 0.0307, 0.0337, 0.0321, 0.0311, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:43:53,897 INFO [train.py:903] (2/4) Epoch 9, batch 200, loss[loss=0.2742, simple_loss=0.3392, pruned_loss=0.1046, over 19611.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3189, pruned_loss=0.09045, over 2450567.54 frames. ], batch size: 61, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:43:56,306 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 13:43:56,659 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7024, 1.7160, 1.3879, 1.2571, 1.2426, 1.2887, 0.2803, 0.5389], + device='cuda:2'), covar=tensor([0.0386, 0.0408, 0.0290, 0.0405, 0.0780, 0.0433, 0.0658, 0.0709], + device='cuda:2'), in_proj_covar=tensor([0.0320, 0.0316, 0.0316, 0.0332, 0.0411, 0.0335, 0.0297, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:44:36,452 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.883e+02 7.738e+02 9.204e+02 1.688e+03, threshold=1.548e+03, percent-clipped=2.0 +2023-04-01 13:44:40,658 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 13:44:57,169 INFO [train.py:903] (2/4) Epoch 9, batch 250, loss[loss=0.2318, simple_loss=0.302, pruned_loss=0.08076, over 19388.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3169, pruned_loss=0.08903, over 2765282.05 frames. ], batch size: 48, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:45:06,569 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:45:39,287 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7340, 1.7583, 1.5341, 1.4945, 1.3633, 1.4805, 0.8133, 1.0880], + device='cuda:2'), covar=tensor([0.0341, 0.0392, 0.0265, 0.0370, 0.0574, 0.0465, 0.0617, 0.0521], + device='cuda:2'), in_proj_covar=tensor([0.0323, 0.0319, 0.0320, 0.0336, 0.0414, 0.0339, 0.0299, 0.0320], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:45:57,838 INFO [train.py:903] (2/4) Epoch 9, batch 300, loss[loss=0.2911, simple_loss=0.3434, pruned_loss=0.1194, over 13291.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3178, pruned_loss=0.08953, over 2983639.50 frames. ], batch size: 137, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:46:39,710 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 5.660e+02 7.032e+02 9.457e+02 2.087e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-04-01 13:47:01,338 INFO [train.py:903] (2/4) Epoch 9, batch 350, loss[loss=0.2248, simple_loss=0.2877, pruned_loss=0.08093, over 19805.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.318, pruned_loss=0.08965, over 3165894.04 frames. ], batch size: 48, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:47:07,248 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:48:03,058 INFO [train.py:903] (2/4) Epoch 9, batch 400, loss[loss=0.2569, simple_loss=0.3248, pruned_loss=0.09453, over 18260.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3199, pruned_loss=0.0906, over 3307035.28 frames. ], batch size: 84, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:48:06,563 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55026.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:48:38,451 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-04-01 13:48:44,520 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.195e+02 5.719e+02 7.898e+02 1.018e+03 2.327e+03, threshold=1.580e+03, percent-clipped=4.0 +2023-04-01 13:49:04,313 INFO [train.py:903] (2/4) Epoch 9, batch 450, loss[loss=0.3022, simple_loss=0.3652, pruned_loss=0.1196, over 18195.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3195, pruned_loss=0.09001, over 3426378.23 frames. ], batch size: 83, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:49:42,331 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 13:49:43,527 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 13:50:07,767 INFO [train.py:903] (2/4) Epoch 9, batch 500, loss[loss=0.2354, simple_loss=0.3092, pruned_loss=0.08081, over 19673.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3177, pruned_loss=0.0894, over 3513597.60 frames. ], batch size: 53, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:50:30,930 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55141.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:50:47,944 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 6.183e+02 7.261e+02 8.870e+02 1.589e+03, threshold=1.452e+03, percent-clipped=1.0 +2023-04-01 13:51:01,631 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:51:10,560 INFO [train.py:903] (2/4) Epoch 9, batch 550, loss[loss=0.268, simple_loss=0.3351, pruned_loss=0.1004, over 19527.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3161, pruned_loss=0.08816, over 3600054.46 frames. ], batch size: 54, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:51:28,575 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9625, 1.2544, 1.6280, 0.5399, 2.1035, 2.4801, 2.1109, 2.5795], + device='cuda:2'), covar=tensor([0.1386, 0.2996, 0.2669, 0.2148, 0.0452, 0.0220, 0.0331, 0.0235], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0290, 0.0319, 0.0250, 0.0210, 0.0142, 0.0207, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 13:52:14,549 INFO [train.py:903] (2/4) Epoch 9, batch 600, loss[loss=0.2697, simple_loss=0.3371, pruned_loss=0.1012, over 19675.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3159, pruned_loss=0.08794, over 3643970.27 frames. ], batch size: 58, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:15,868 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:21,692 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:55,192 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.838e+02 6.783e+02 8.586e+02 3.812e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 13:52:58,749 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 13:52:59,133 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6130, 1.1824, 1.3430, 1.4631, 3.1412, 0.9789, 1.9839, 3.3782], + device='cuda:2'), covar=tensor([0.0405, 0.2535, 0.2683, 0.1586, 0.0661, 0.2331, 0.1299, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0327, 0.0337, 0.0307, 0.0335, 0.0321, 0.0311, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 13:53:16,398 INFO [train.py:903] (2/4) Epoch 9, batch 650, loss[loss=0.2364, simple_loss=0.32, pruned_loss=0.07645, over 19676.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3158, pruned_loss=0.08818, over 3679496.59 frames. ], batch size: 59, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:19,286 INFO [train.py:903] (2/4) Epoch 9, batch 700, loss[loss=0.2719, simple_loss=0.3299, pruned_loss=0.1069, over 19651.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.316, pruned_loss=0.08815, over 3700485.26 frames. ], batch size: 55, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:41,861 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55339.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:55:02,465 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.553e+02 6.808e+02 9.255e+02 2.546e+03, threshold=1.362e+03, percent-clipped=4.0 +2023-04-01 13:55:22,969 INFO [train.py:903] (2/4) Epoch 9, batch 750, loss[loss=0.2357, simple_loss=0.3052, pruned_loss=0.08308, over 19850.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3165, pruned_loss=0.0884, over 3748079.76 frames. ], batch size: 52, lr: 9.49e-03, grad_scale: 4.0 +2023-04-01 13:55:53,584 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55397.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:56:25,960 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55422.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:56:26,682 INFO [train.py:903] (2/4) Epoch 9, batch 800, loss[loss=0.2262, simple_loss=0.2952, pruned_loss=0.07854, over 19474.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3174, pruned_loss=0.08887, over 3777476.97 frames. ], batch size: 49, lr: 9.49e-03, grad_scale: 8.0 +2023-04-01 13:56:42,001 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:57:07,881 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.631e+02 7.179e+02 9.586e+02 1.610e+03, threshold=1.436e+03, percent-clipped=4.0 +2023-04-01 13:57:13,012 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 13:57:28,777 INFO [train.py:903] (2/4) Epoch 9, batch 850, loss[loss=0.2173, simple_loss=0.2923, pruned_loss=0.07119, over 19460.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3164, pruned_loss=0.08822, over 3793029.30 frames. ], batch size: 49, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:58:01,106 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1898, 1.2367, 1.4201, 1.3307, 1.8004, 1.7007, 1.8643, 0.4537], + device='cuda:2'), covar=tensor([0.2055, 0.3484, 0.2074, 0.1648, 0.1274, 0.1937, 0.1155, 0.3530], + device='cuda:2'), in_proj_covar=tensor([0.0470, 0.0547, 0.0553, 0.0422, 0.0580, 0.0474, 0.0634, 0.0473], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 13:58:14,370 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:58:22,705 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 13:58:29,597 INFO [train.py:903] (2/4) Epoch 9, batch 900, loss[loss=0.2691, simple_loss=0.3388, pruned_loss=0.09974, over 19367.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3172, pruned_loss=0.0886, over 3798754.49 frames. ], batch size: 66, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:58:39,329 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 13:59:12,476 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.962e+02 5.974e+02 7.143e+02 8.825e+02 2.413e+03, threshold=1.429e+03, percent-clipped=6.0 +2023-04-01 13:59:32,014 INFO [train.py:903] (2/4) Epoch 9, batch 950, loss[loss=0.2924, simple_loss=0.3502, pruned_loss=0.1173, over 19616.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3175, pruned_loss=0.08867, over 3801931.08 frames. ], batch size: 50, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:32,188 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:59:37,625 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 14:00:01,022 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:07,146 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 14:00:32,429 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:35,416 INFO [train.py:903] (2/4) Epoch 9, batch 1000, loss[loss=0.2706, simple_loss=0.3396, pruned_loss=0.1008, over 18154.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3176, pruned_loss=0.08949, over 3788503.81 frames. ], batch size: 83, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:00:38,014 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:44,434 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 14:01:18,080 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.465e+02 5.596e+02 6.834e+02 8.838e+02 1.578e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 14:01:29,792 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 14:01:39,291 INFO [train.py:903] (2/4) Epoch 9, batch 1050, loss[loss=0.2489, simple_loss=0.3229, pruned_loss=0.0874, over 19733.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3179, pruned_loss=0.08925, over 3800684.47 frames. ], batch size: 63, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:01:57,381 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:02:02,667 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 14:02:10,980 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 14:02:35,960 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9958, 5.0451, 5.8794, 5.7883, 1.9618, 5.4508, 4.7223, 5.3983], + device='cuda:2'), covar=tensor([0.1153, 0.0611, 0.0392, 0.0396, 0.4752, 0.0393, 0.0523, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0617, 0.0544, 0.0733, 0.0616, 0.0674, 0.0481, 0.0465, 0.0674], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 14:02:42,620 INFO [train.py:903] (2/4) Epoch 9, batch 1100, loss[loss=0.2466, simple_loss=0.3266, pruned_loss=0.08327, over 19683.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3181, pruned_loss=0.08956, over 3811100.74 frames. ], batch size: 59, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:02:48,822 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5896, 1.6515, 1.8117, 2.1030, 1.3942, 1.8132, 2.0965, 1.7265], + device='cuda:2'), covar=tensor([0.3166, 0.2503, 0.1328, 0.1373, 0.2775, 0.1243, 0.3014, 0.2362], + device='cuda:2'), in_proj_covar=tensor([0.0747, 0.0755, 0.0626, 0.0875, 0.0750, 0.0658, 0.0772, 0.0677], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 14:03:25,907 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 6.007e+02 7.412e+02 9.315e+02 2.515e+03, threshold=1.482e+03, percent-clipped=6.0 +2023-04-01 14:03:36,855 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55766.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:03:45,614 INFO [train.py:903] (2/4) Epoch 9, batch 1150, loss[loss=0.222, simple_loss=0.2956, pruned_loss=0.07417, over 19744.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3178, pruned_loss=0.08914, over 3822526.72 frames. ], batch size: 51, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:04:50,171 INFO [train.py:903] (2/4) Epoch 9, batch 1200, loss[loss=0.2575, simple_loss=0.3316, pruned_loss=0.09172, over 17430.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3172, pruned_loss=0.08858, over 3818643.93 frames. ], batch size: 101, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:05:18,995 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 14:05:31,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.284e+02 5.926e+02 7.645e+02 1.010e+03 3.329e+03, threshold=1.529e+03, percent-clipped=6.0 +2023-04-01 14:05:44,736 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:05:44,875 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2004, 2.0642, 1.7849, 1.6329, 1.5511, 1.7356, 0.4953, 0.9019], + device='cuda:2'), covar=tensor([0.0322, 0.0357, 0.0234, 0.0403, 0.0743, 0.0390, 0.0644, 0.0663], + device='cuda:2'), in_proj_covar=tensor([0.0314, 0.0315, 0.0311, 0.0327, 0.0403, 0.0330, 0.0290, 0.0311], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:05:53,920 INFO [train.py:903] (2/4) Epoch 9, batch 1250, loss[loss=0.2443, simple_loss=0.3251, pruned_loss=0.08176, over 19626.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3165, pruned_loss=0.08829, over 3825860.87 frames. ], batch size: 61, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:06:03,210 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:09,678 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-01 14:06:35,411 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:36,769 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.42 vs. limit=5.0 +2023-04-01 14:06:55,962 INFO [train.py:903] (2/4) Epoch 9, batch 1300, loss[loss=0.2628, simple_loss=0.3376, pruned_loss=0.09405, over 19289.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3163, pruned_loss=0.08793, over 3836291.62 frames. ], batch size: 66, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:07:24,020 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:39,623 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.793e+02 6.909e+02 8.321e+02 2.022e+03, threshold=1.382e+03, percent-clipped=2.0 +2023-04-01 14:07:54,310 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:58,595 INFO [train.py:903] (2/4) Epoch 9, batch 1350, loss[loss=0.2259, simple_loss=0.3075, pruned_loss=0.07219, over 19519.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3156, pruned_loss=0.08733, over 3852700.06 frames. ], batch size: 64, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:09:02,600 INFO [train.py:903] (2/4) Epoch 9, batch 1400, loss[loss=0.2487, simple_loss=0.3161, pruned_loss=0.09066, over 19590.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3149, pruned_loss=0.08709, over 3852306.87 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:09:47,083 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.536e+02 7.314e+02 8.995e+02 2.483e+03, threshold=1.463e+03, percent-clipped=9.0 +2023-04-01 14:10:07,190 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 14:10:08,159 INFO [train.py:903] (2/4) Epoch 9, batch 1450, loss[loss=0.2294, simple_loss=0.3029, pruned_loss=0.07802, over 19451.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3145, pruned_loss=0.08717, over 3856720.04 frames. ], batch size: 49, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:10:55,228 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:11:11,141 INFO [train.py:903] (2/4) Epoch 9, batch 1500, loss[loss=0.2852, simple_loss=0.3504, pruned_loss=0.11, over 18809.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3156, pruned_loss=0.0876, over 3852251.66 frames. ], batch size: 74, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:11:19,745 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-04-01 14:11:27,085 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4044, 1.1280, 1.5706, 1.2916, 2.9299, 3.7998, 3.5509, 4.0509], + device='cuda:2'), covar=tensor([0.1323, 0.3204, 0.2960, 0.1912, 0.0431, 0.0140, 0.0205, 0.0157], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0286, 0.0317, 0.0246, 0.0208, 0.0142, 0.0206, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:11:52,399 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.776e+02 6.008e+02 7.164e+02 9.066e+02 2.093e+03, threshold=1.433e+03, percent-clipped=3.0 +2023-04-01 14:12:01,108 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:12:12,300 INFO [train.py:903] (2/4) Epoch 9, batch 1550, loss[loss=0.1875, simple_loss=0.262, pruned_loss=0.05648, over 19755.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.317, pruned_loss=0.08922, over 3829068.72 frames. ], batch size: 47, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:12:17,164 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9237, 1.6832, 1.5830, 2.1098, 2.0124, 1.8519, 1.7276, 1.9974], + device='cuda:2'), covar=tensor([0.0887, 0.1589, 0.1365, 0.0876, 0.1026, 0.0484, 0.1039, 0.0605], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0347, 0.0283, 0.0235, 0.0292, 0.0239, 0.0268, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:12:59,840 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:15,311 INFO [train.py:903] (2/4) Epoch 9, batch 1600, loss[loss=0.2031, simple_loss=0.2716, pruned_loss=0.06733, over 19381.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3167, pruned_loss=0.08872, over 3833333.18 frames. ], batch size: 47, lr: 9.42e-03, grad_scale: 8.0 +2023-04-01 14:13:18,988 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:41,636 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 14:13:59,885 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.781e+02 6.769e+02 8.617e+02 2.222e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-01 14:14:16,059 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2245, 1.2781, 1.1927, 1.0238, 1.0837, 1.0814, 0.0642, 0.3341], + device='cuda:2'), covar=tensor([0.0400, 0.0397, 0.0248, 0.0327, 0.0777, 0.0311, 0.0669, 0.0642], + device='cuda:2'), in_proj_covar=tensor([0.0316, 0.0315, 0.0315, 0.0331, 0.0406, 0.0334, 0.0293, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:14:19,016 INFO [train.py:903] (2/4) Epoch 9, batch 1650, loss[loss=0.2331, simple_loss=0.3146, pruned_loss=0.07581, over 19535.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3165, pruned_loss=0.08876, over 3834711.63 frames. ], batch size: 56, lr: 9.42e-03, grad_scale: 4.0 +2023-04-01 14:14:22,030 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1555, 2.2517, 2.2884, 3.4029, 2.1309, 3.2383, 2.9424, 2.1122], + device='cuda:2'), covar=tensor([0.3559, 0.2990, 0.1351, 0.1619, 0.3685, 0.1274, 0.2828, 0.2528], + device='cuda:2'), in_proj_covar=tensor([0.0743, 0.0751, 0.0624, 0.0864, 0.0747, 0.0661, 0.0764, 0.0673], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 14:15:21,808 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7180, 1.3680, 1.3578, 1.9950, 1.4781, 1.9640, 2.0016, 1.6759], + device='cuda:2'), covar=tensor([0.0795, 0.1052, 0.1064, 0.0823, 0.0962, 0.0699, 0.0854, 0.0696], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0231, 0.0226, 0.0254, 0.0241, 0.0214, 0.0204, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 14:15:22,543 INFO [train.py:903] (2/4) Epoch 9, batch 1700, loss[loss=0.2262, simple_loss=0.3062, pruned_loss=0.0731, over 19482.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3165, pruned_loss=0.08861, over 3838959.63 frames. ], batch size: 64, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:15:25,217 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:16:02,363 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 14:16:05,919 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 5.619e+02 6.768e+02 8.904e+02 2.101e+03, threshold=1.354e+03, percent-clipped=6.0 +2023-04-01 14:16:24,626 INFO [train.py:903] (2/4) Epoch 9, batch 1750, loss[loss=0.2431, simple_loss=0.3196, pruned_loss=0.08332, over 18232.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3161, pruned_loss=0.0883, over 3841693.48 frames. ], batch size: 83, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:16:26,184 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1612, 1.0478, 1.0719, 1.3978, 1.1524, 1.2197, 1.4287, 1.1155], + device='cuda:2'), covar=tensor([0.0873, 0.1054, 0.1099, 0.0629, 0.0821, 0.0839, 0.0737, 0.0808], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0232, 0.0228, 0.0255, 0.0243, 0.0215, 0.0205, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 14:16:42,919 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5383, 1.1149, 1.2867, 1.1463, 2.1607, 0.8969, 2.0045, 2.2211], + device='cuda:2'), covar=tensor([0.0609, 0.2501, 0.2535, 0.1473, 0.0809, 0.2071, 0.0911, 0.0544], + device='cuda:2'), in_proj_covar=tensor([0.0323, 0.0322, 0.0336, 0.0299, 0.0329, 0.0319, 0.0309, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:17:26,706 INFO [train.py:903] (2/4) Epoch 9, batch 1800, loss[loss=0.2248, simple_loss=0.309, pruned_loss=0.07027, over 19696.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3164, pruned_loss=0.0882, over 3837486.25 frames. ], batch size: 59, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:09,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.839e+02 7.002e+02 8.564e+02 1.629e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 14:18:25,586 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 14:18:29,987 INFO [train.py:903] (2/4) Epoch 9, batch 1850, loss[loss=0.242, simple_loss=0.3086, pruned_loss=0.08773, over 16314.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3156, pruned_loss=0.08767, over 3831542.83 frames. ], batch size: 36, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:40,558 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:02,320 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 14:19:11,132 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:12,035 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:32,999 INFO [train.py:903] (2/4) Epoch 9, batch 1900, loss[loss=0.245, simple_loss=0.3298, pruned_loss=0.08009, over 19597.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3154, pruned_loss=0.08699, over 3835082.94 frames. ], batch size: 57, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:19:48,400 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 14:19:54,882 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 14:19:55,197 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56541.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:20:16,626 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.665e+02 6.760e+02 8.403e+02 1.758e+03, threshold=1.352e+03, percent-clipped=2.0 +2023-04-01 14:20:18,944 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 14:20:35,966 INFO [train.py:903] (2/4) Epoch 9, batch 1950, loss[loss=0.2556, simple_loss=0.3264, pruned_loss=0.09237, over 19518.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.316, pruned_loss=0.08735, over 3822148.86 frames. ], batch size: 54, lr: 9.39e-03, grad_scale: 4.0 +2023-04-01 14:20:46,780 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:18,990 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:38,533 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:39,347 INFO [train.py:903] (2/4) Epoch 9, batch 2000, loss[loss=0.2535, simple_loss=0.323, pruned_loss=0.09201, over 18783.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3151, pruned_loss=0.08722, over 3811301.08 frames. ], batch size: 74, lr: 9.39e-03, grad_scale: 8.0 +2023-04-01 14:22:22,874 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.506e+02 7.081e+02 9.090e+02 3.144e+03, threshold=1.416e+03, percent-clipped=7.0 +2023-04-01 14:22:36,098 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 14:22:42,523 INFO [train.py:903] (2/4) Epoch 9, batch 2050, loss[loss=0.2261, simple_loss=0.295, pruned_loss=0.07864, over 19488.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.315, pruned_loss=0.08714, over 3819063.01 frames. ], batch size: 49, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:22:56,331 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 14:22:57,512 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 14:23:19,229 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 14:23:22,025 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2188, 2.8631, 1.8453, 1.9172, 1.9431, 2.3552, 0.9139, 1.9316], + device='cuda:2'), covar=tensor([0.0373, 0.0385, 0.0491, 0.0735, 0.0754, 0.0633, 0.0797, 0.0736], + device='cuda:2'), in_proj_covar=tensor([0.0323, 0.0320, 0.0316, 0.0337, 0.0415, 0.0336, 0.0295, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:23:33,099 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7482, 4.2746, 2.6815, 3.7717, 1.1751, 4.0007, 3.9715, 4.2028], + device='cuda:2'), covar=tensor([0.0587, 0.0970, 0.1813, 0.0683, 0.3491, 0.0703, 0.0710, 0.0847], + device='cuda:2'), in_proj_covar=tensor([0.0404, 0.0344, 0.0412, 0.0303, 0.0371, 0.0338, 0.0331, 0.0366], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 14:23:34,386 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2568, 1.8355, 1.7878, 2.9259, 2.1600, 2.6070, 2.6338, 2.3886], + device='cuda:2'), covar=tensor([0.0711, 0.0930, 0.1033, 0.0785, 0.0912, 0.0662, 0.0866, 0.0561], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0232, 0.0227, 0.0255, 0.0242, 0.0216, 0.0205, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 14:23:44,277 INFO [train.py:903] (2/4) Epoch 9, batch 2100, loss[loss=0.2913, simple_loss=0.3487, pruned_loss=0.117, over 19665.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3162, pruned_loss=0.08795, over 3813661.56 frames. ], batch size: 58, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:24:12,034 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 14:24:29,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.187e+02 5.573e+02 6.906e+02 8.990e+02 1.566e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-01 14:24:35,335 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 14:24:48,320 INFO [train.py:903] (2/4) Epoch 9, batch 2150, loss[loss=0.1983, simple_loss=0.2747, pruned_loss=0.061, over 19380.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3149, pruned_loss=0.08724, over 3816115.78 frames. ], batch size: 48, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:25:18,651 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-01 14:25:48,776 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56821.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:25:50,904 INFO [train.py:903] (2/4) Epoch 9, batch 2200, loss[loss=0.2097, simple_loss=0.2883, pruned_loss=0.06555, over 19555.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3162, pruned_loss=0.08776, over 3821486.58 frames. ], batch size: 56, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:26:23,359 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1565, 1.2244, 1.4865, 0.9409, 2.3213, 3.0201, 2.7825, 3.1691], + device='cuda:2'), covar=tensor([0.1536, 0.3405, 0.3035, 0.2189, 0.0521, 0.0180, 0.0280, 0.0216], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0290, 0.0321, 0.0249, 0.0209, 0.0143, 0.0205, 0.0179], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:26:36,147 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 5.854e+02 7.300e+02 9.690e+02 2.298e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 14:26:57,251 INFO [train.py:903] (2/4) Epoch 9, batch 2250, loss[loss=0.2205, simple_loss=0.2901, pruned_loss=0.07544, over 19458.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3168, pruned_loss=0.08816, over 3812691.03 frames. ], batch size: 49, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:27:03,730 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:27:11,761 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56885.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:27:34,039 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:00,651 INFO [train.py:903] (2/4) Epoch 9, batch 2300, loss[loss=0.2773, simple_loss=0.3469, pruned_loss=0.1039, over 19455.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.317, pruned_loss=0.08843, over 3802893.09 frames. ], batch size: 70, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:28:05,843 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:13,792 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 14:28:46,770 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.809e+02 7.207e+02 9.233e+02 1.673e+03, threshold=1.441e+03, percent-clipped=4.0 +2023-04-01 14:28:51,921 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4226, 1.6889, 1.7807, 1.8222, 3.8979, 1.1438, 2.5053, 4.1231], + device='cuda:2'), covar=tensor([0.0402, 0.2401, 0.2640, 0.1710, 0.0695, 0.2836, 0.1452, 0.0248], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0325, 0.0339, 0.0303, 0.0332, 0.0321, 0.0310, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:29:05,083 INFO [train.py:903] (2/4) Epoch 9, batch 2350, loss[loss=0.2548, simple_loss=0.33, pruned_loss=0.08979, over 19769.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3181, pruned_loss=0.08902, over 3792312.21 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:29:20,753 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 14:29:40,023 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57000.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:29:46,441 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 14:29:53,648 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:29:55,789 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:30:01,370 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 14:30:07,033 INFO [train.py:903] (2/4) Epoch 9, batch 2400, loss[loss=0.2669, simple_loss=0.3391, pruned_loss=0.09735, over 18669.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3181, pruned_loss=0.08899, over 3797705.40 frames. ], batch size: 74, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:30:31,591 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 14:30:51,620 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.434e+02 5.803e+02 7.402e+02 8.988e+02 1.700e+03, threshold=1.480e+03, percent-clipped=2.0 +2023-04-01 14:31:11,536 INFO [train.py:903] (2/4) Epoch 9, batch 2450, loss[loss=0.2611, simple_loss=0.3313, pruned_loss=0.09545, over 19583.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3182, pruned_loss=0.08923, over 3802004.71 frames. ], batch size: 61, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:32:15,753 INFO [train.py:903] (2/4) Epoch 9, batch 2500, loss[loss=0.1836, simple_loss=0.2635, pruned_loss=0.05182, over 19362.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3172, pruned_loss=0.08876, over 3809629.06 frames. ], batch size: 47, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:33:00,759 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 14:33:00,916 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.325e+02 6.954e+02 9.918e+02 1.981e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 14:33:09,588 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57165.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:33:19,757 INFO [train.py:903] (2/4) Epoch 9, batch 2550, loss[loss=0.241, simple_loss=0.3175, pruned_loss=0.08224, over 19763.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3171, pruned_loss=0.08884, over 3812129.46 frames. ], batch size: 54, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:33:51,256 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0942, 1.8121, 1.8926, 2.4602, 2.0550, 2.4204, 2.2794, 2.2210], + device='cuda:2'), covar=tensor([0.0703, 0.0843, 0.0920, 0.0839, 0.0862, 0.0588, 0.0925, 0.0573], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0231, 0.0229, 0.0256, 0.0243, 0.0215, 0.0205, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 14:33:51,280 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4108, 1.8463, 2.2516, 2.4926, 2.2409, 2.1979, 2.1852, 2.6131], + device='cuda:2'), covar=tensor([0.0689, 0.1589, 0.1090, 0.0729, 0.1116, 0.0418, 0.0894, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0351, 0.0290, 0.0240, 0.0298, 0.0244, 0.0271, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:33:59,379 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57204.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:11,384 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8570, 1.6751, 1.9856, 1.8900, 4.2804, 0.9311, 2.2308, 4.5894], + device='cuda:2'), covar=tensor([0.0326, 0.2442, 0.2260, 0.1502, 0.0623, 0.2705, 0.1345, 0.0196], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0324, 0.0338, 0.0303, 0.0333, 0.0320, 0.0309, 0.0330], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:34:13,770 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:15,593 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 14:34:22,488 INFO [train.py:903] (2/4) Epoch 9, batch 2600, loss[loss=0.2655, simple_loss=0.3381, pruned_loss=0.09645, over 18760.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3178, pruned_loss=0.08926, over 3797338.54 frames. ], batch size: 74, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:05,311 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57256.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:35:07,156 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.816e+02 6.932e+02 7.774e+02 1.592e+03, threshold=1.386e+03, percent-clipped=3.0 +2023-04-01 14:35:12,178 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:22,398 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57270.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:25,692 INFO [train.py:903] (2/4) Epoch 9, batch 2650, loss[loss=0.2463, simple_loss=0.3185, pruned_loss=0.08698, over 19742.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3175, pruned_loss=0.08915, over 3803459.17 frames. ], batch size: 51, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:36,490 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:37,642 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57281.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:35:45,907 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 14:36:08,551 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9327, 1.3520, 1.0573, 0.9692, 1.1984, 0.9242, 0.9067, 1.2462], + device='cuda:2'), covar=tensor([0.0441, 0.0595, 0.0943, 0.0539, 0.0445, 0.1032, 0.0483, 0.0368], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0291, 0.0319, 0.0242, 0.0232, 0.0317, 0.0288, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:36:31,185 INFO [train.py:903] (2/4) Epoch 9, batch 2700, loss[loss=0.1968, simple_loss=0.2638, pruned_loss=0.06494, over 19748.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3159, pruned_loss=0.08833, over 3822115.73 frames. ], batch size: 46, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:36:57,758 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3352, 1.3774, 1.7504, 1.5249, 2.7750, 2.4199, 2.8553, 1.2561], + device='cuda:2'), covar=tensor([0.2080, 0.3607, 0.2135, 0.1590, 0.1250, 0.1596, 0.1319, 0.3249], + device='cuda:2'), in_proj_covar=tensor([0.0478, 0.0554, 0.0562, 0.0426, 0.0584, 0.0479, 0.0646, 0.0478], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 14:37:12,086 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:14,465 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:15,509 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.587e+02 5.592e+02 7.209e+02 8.892e+02 3.755e+03, threshold=1.442e+03, percent-clipped=7.0 +2023-04-01 14:37:33,711 INFO [train.py:903] (2/4) Epoch 9, batch 2750, loss[loss=0.2121, simple_loss=0.2825, pruned_loss=0.07082, over 19382.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3151, pruned_loss=0.08754, over 3834112.52 frames. ], batch size: 47, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:49,357 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:30,663 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:38,519 INFO [train.py:903] (2/4) Epoch 9, batch 2800, loss[loss=0.2417, simple_loss=0.3186, pruned_loss=0.0824, over 19514.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3147, pruned_loss=0.08721, over 3837214.82 frames. ], batch size: 64, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:23,167 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.525e+02 6.599e+02 8.446e+02 1.316e+03, threshold=1.320e+03, percent-clipped=0.0 +2023-04-01 14:39:38,448 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:41,718 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:42,546 INFO [train.py:903] (2/4) Epoch 9, batch 2850, loss[loss=0.2545, simple_loss=0.333, pruned_loss=0.08801, over 17543.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3142, pruned_loss=0.08682, over 3827019.50 frames. ], batch size: 101, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:44,036 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7719, 1.6396, 1.4516, 1.9164, 1.8609, 1.3989, 1.4205, 1.7398], + device='cuda:2'), covar=tensor([0.1061, 0.1522, 0.1477, 0.0899, 0.1191, 0.0770, 0.1327, 0.0724], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0352, 0.0287, 0.0237, 0.0296, 0.0242, 0.0270, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:39:51,259 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:33,720 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:41,928 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 14:40:46,558 INFO [train.py:903] (2/4) Epoch 9, batch 2900, loss[loss=0.225, simple_loss=0.3007, pruned_loss=0.07459, over 19547.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3148, pruned_loss=0.0872, over 3819400.70 frames. ], batch size: 56, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:41:03,531 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:16,956 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:30,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.787e+02 5.829e+02 7.475e+02 8.920e+02 2.516e+03, threshold=1.495e+03, percent-clipped=6.0 +2023-04-01 14:41:32,220 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57559.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:35,771 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57561.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:49,416 INFO [train.py:903] (2/4) Epoch 9, batch 2950, loss[loss=0.3364, simple_loss=0.3862, pruned_loss=0.1433, over 12989.00 frames. ], tot_loss[loss=0.245, simple_loss=0.315, pruned_loss=0.0875, over 3816833.85 frames. ], batch size: 136, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:09,727 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7619, 4.1846, 4.4363, 4.3875, 1.6344, 4.1438, 3.6330, 4.1298], + device='cuda:2'), covar=tensor([0.1161, 0.0723, 0.0525, 0.0489, 0.4628, 0.0482, 0.0600, 0.0938], + device='cuda:2'), in_proj_covar=tensor([0.0626, 0.0553, 0.0742, 0.0615, 0.0686, 0.0490, 0.0468, 0.0688], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 14:42:17,218 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3017, 1.4003, 1.4544, 1.5242, 2.9032, 0.9069, 2.0232, 3.1699], + device='cuda:2'), covar=tensor([0.0496, 0.2402, 0.2527, 0.1575, 0.0719, 0.2586, 0.1231, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0327, 0.0339, 0.0306, 0.0334, 0.0324, 0.0313, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:42:32,121 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:42:53,534 INFO [train.py:903] (2/4) Epoch 9, batch 3000, loss[loss=0.2131, simple_loss=0.2871, pruned_loss=0.06954, over 19098.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3138, pruned_loss=0.08711, over 3808616.44 frames. ], batch size: 42, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:53,535 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 14:43:06,210 INFO [train.py:937] (2/4) Epoch 9, validation: loss=0.1831, simple_loss=0.2838, pruned_loss=0.04122, over 944034.00 frames. +2023-04-01 14:43:06,211 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 14:43:08,509 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 14:43:28,434 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:43:50,437 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.061e+02 7.914e+02 9.800e+02 2.087e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-04-01 14:43:56,598 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:00,202 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:08,759 INFO [train.py:903] (2/4) Epoch 9, batch 3050, loss[loss=0.3462, simple_loss=0.3823, pruned_loss=0.1551, over 19654.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3142, pruned_loss=0.08751, over 3812437.73 frames. ], batch size: 60, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:44:10,187 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:09,154 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:11,161 INFO [train.py:903] (2/4) Epoch 9, batch 3100, loss[loss=0.2187, simple_loss=0.2881, pruned_loss=0.07467, over 19779.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3144, pruned_loss=0.08774, over 3809295.98 frames. ], batch size: 48, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:45:16,070 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:19,256 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:27,303 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2040, 1.2549, 1.7082, 1.3936, 2.6001, 1.9035, 2.5979, 1.0451], + device='cuda:2'), covar=tensor([0.2166, 0.3710, 0.2106, 0.1706, 0.1158, 0.1956, 0.1264, 0.3365], + device='cuda:2'), in_proj_covar=tensor([0.0467, 0.0542, 0.0552, 0.0419, 0.0575, 0.0469, 0.0633, 0.0471], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 14:45:40,950 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8113, 2.1894, 2.1792, 2.7797, 2.6843, 2.3215, 2.3140, 2.9011], + device='cuda:2'), covar=tensor([0.0763, 0.1695, 0.1298, 0.0919, 0.1171, 0.0457, 0.0962, 0.0497], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0350, 0.0287, 0.0237, 0.0294, 0.0242, 0.0267, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:45:47,411 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:47,508 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:49,803 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:54,924 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.860e+02 5.906e+02 7.408e+02 1.029e+03 2.368e+03, threshold=1.482e+03, percent-clipped=3.0 +2023-04-01 14:45:57,423 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:46:14,158 INFO [train.py:903] (2/4) Epoch 9, batch 3150, loss[loss=0.2505, simple_loss=0.3196, pruned_loss=0.09071, over 19758.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3145, pruned_loss=0.08726, over 3812574.57 frames. ], batch size: 54, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:46:42,106 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 14:47:14,696 INFO [train.py:903] (2/4) Epoch 9, batch 3200, loss[loss=0.2164, simple_loss=0.2897, pruned_loss=0.07151, over 19408.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3153, pruned_loss=0.08802, over 3807451.53 frames. ], batch size: 48, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:47:16,007 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57824.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:30,902 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 14:47:55,923 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:56,959 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.791e+02 7.100e+02 9.261e+02 4.038e+03, threshold=1.420e+03, percent-clipped=7.0 +2023-04-01 14:48:14,909 INFO [train.py:903] (2/4) Epoch 9, batch 3250, loss[loss=0.2431, simple_loss=0.3132, pruned_loss=0.08652, over 19656.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3143, pruned_loss=0.08746, over 3821635.32 frames. ], batch size: 53, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:48:17,731 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-01 14:48:18,543 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:01,105 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5497, 1.5881, 1.8750, 1.6967, 2.8650, 2.4572, 2.8032, 1.4310], + device='cuda:2'), covar=tensor([0.1891, 0.3399, 0.2010, 0.1551, 0.1126, 0.1530, 0.1200, 0.3013], + device='cuda:2'), in_proj_covar=tensor([0.0473, 0.0556, 0.0561, 0.0427, 0.0583, 0.0477, 0.0641, 0.0478], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 14:49:12,236 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57919.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:16,424 INFO [train.py:903] (2/4) Epoch 9, batch 3300, loss[loss=0.2241, simple_loss=0.2915, pruned_loss=0.07838, over 19593.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3141, pruned_loss=0.08704, over 3820284.81 frames. ], batch size: 52, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:49:23,977 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 14:49:26,332 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57930.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:36,425 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:43,089 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:56,587 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:59,469 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.470e+02 6.783e+02 8.234e+02 1.579e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 14:50:17,161 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:17,932 INFO [train.py:903] (2/4) Epoch 9, batch 3350, loss[loss=0.2761, simple_loss=0.3475, pruned_loss=0.1024, over 19573.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3162, pruned_loss=0.08869, over 3799970.11 frames. ], batch size: 61, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:50:22,790 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:54,291 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58002.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:51:20,069 INFO [train.py:903] (2/4) Epoch 9, batch 3400, loss[loss=0.2431, simple_loss=0.3242, pruned_loss=0.08102, over 18716.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3157, pruned_loss=0.08847, over 3790680.78 frames. ], batch size: 74, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:52:02,948 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.309e+02 6.592e+02 8.930e+02 1.711e+03, threshold=1.318e+03, percent-clipped=4.0 +2023-04-01 14:52:14,506 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3409, 1.1168, 1.7231, 1.3749, 2.6204, 3.6886, 3.4654, 4.0110], + device='cuda:2'), covar=tensor([0.1586, 0.4410, 0.3599, 0.2149, 0.0581, 0.0203, 0.0261, 0.0172], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0290, 0.0322, 0.0249, 0.0211, 0.0146, 0.0207, 0.0185], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:52:20,806 INFO [train.py:903] (2/4) Epoch 9, batch 3450, loss[loss=0.2856, simple_loss=0.3518, pruned_loss=0.1097, over 18081.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3166, pruned_loss=0.08871, over 3784788.62 frames. ], batch size: 83, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:52:25,120 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 14:52:48,557 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:53:22,371 INFO [train.py:903] (2/4) Epoch 9, batch 3500, loss[loss=0.2536, simple_loss=0.3275, pruned_loss=0.0898, over 19422.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3158, pruned_loss=0.08828, over 3775979.50 frames. ], batch size: 70, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:53:33,945 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:03,524 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:05,471 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.015e+02 7.407e+02 9.414e+02 2.837e+03, threshold=1.481e+03, percent-clipped=3.0 +2023-04-01 14:54:06,931 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58159.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:10,326 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:24,270 INFO [train.py:903] (2/4) Epoch 9, batch 3550, loss[loss=0.2657, simple_loss=0.3378, pruned_loss=0.09681, over 19344.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3168, pruned_loss=0.08888, over 3780051.87 frames. ], batch size: 66, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:54:50,552 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:09,217 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:19,423 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2949, 1.3433, 1.6170, 1.4817, 2.5093, 2.1812, 2.5644, 0.9886], + device='cuda:2'), covar=tensor([0.2081, 0.3481, 0.1983, 0.1611, 0.1364, 0.1655, 0.1371, 0.3428], + device='cuda:2'), in_proj_covar=tensor([0.0468, 0.0547, 0.0556, 0.0422, 0.0578, 0.0474, 0.0635, 0.0473], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 14:55:20,412 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:24,264 INFO [train.py:903] (2/4) Epoch 9, batch 3600, loss[loss=0.2405, simple_loss=0.3216, pruned_loss=0.0797, over 19766.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3176, pruned_loss=0.08912, over 3784154.83 frames. ], batch size: 54, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:55:30,371 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:43,851 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3702, 1.3664, 2.0361, 1.8475, 2.9352, 4.6955, 4.6445, 4.9341], + device='cuda:2'), covar=tensor([0.1416, 0.3060, 0.2612, 0.1592, 0.0458, 0.0129, 0.0142, 0.0109], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0287, 0.0318, 0.0249, 0.0210, 0.0145, 0.0205, 0.0183], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:56:01,016 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:56:06,153 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.487e+02 6.500e+02 7.997e+02 2.924e+03, threshold=1.300e+03, percent-clipped=2.0 +2023-04-01 14:56:15,755 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.94 vs. limit=5.0 +2023-04-01 14:56:23,768 INFO [train.py:903] (2/4) Epoch 9, batch 3650, loss[loss=0.2369, simple_loss=0.3098, pruned_loss=0.08197, over 19518.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.318, pruned_loss=0.08925, over 3794025.30 frames. ], batch size: 54, lr: 9.26e-03, grad_scale: 16.0 +2023-04-01 14:57:24,455 INFO [train.py:903] (2/4) Epoch 9, batch 3700, loss[loss=0.2551, simple_loss=0.3199, pruned_loss=0.0952, over 19332.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3193, pruned_loss=0.09043, over 3772343.93 frames. ], batch size: 66, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:58:07,747 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 6.060e+02 7.736e+02 9.843e+02 2.060e+03, threshold=1.547e+03, percent-clipped=9.0 +2023-04-01 14:58:17,419 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2204, 2.1116, 1.8011, 1.7388, 1.6376, 1.7255, 0.4351, 1.0152], + device='cuda:2'), covar=tensor([0.0348, 0.0343, 0.0246, 0.0403, 0.0717, 0.0443, 0.0703, 0.0610], + device='cuda:2'), in_proj_covar=tensor([0.0316, 0.0319, 0.0317, 0.0333, 0.0407, 0.0333, 0.0297, 0.0314], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 14:58:23,949 INFO [train.py:903] (2/4) Epoch 9, batch 3750, loss[loss=0.2134, simple_loss=0.2774, pruned_loss=0.0747, over 19753.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3182, pruned_loss=0.08979, over 3785501.57 frames. ], batch size: 47, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:59:10,951 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8444, 1.4644, 1.6442, 1.6646, 3.3547, 0.9965, 2.3464, 3.6677], + device='cuda:2'), covar=tensor([0.0382, 0.2316, 0.2383, 0.1584, 0.0691, 0.2655, 0.1263, 0.0270], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0330, 0.0341, 0.0311, 0.0341, 0.0328, 0.0317, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 14:59:24,636 INFO [train.py:903] (2/4) Epoch 9, batch 3800, loss[loss=0.2121, simple_loss=0.2819, pruned_loss=0.0711, over 19772.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3176, pruned_loss=0.0888, over 3798282.51 frames. ], batch size: 47, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 14:59:54,343 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 15:00:07,803 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7378, 1.4884, 1.5067, 2.1451, 1.6989, 2.1217, 2.1253, 1.8152], + device='cuda:2'), covar=tensor([0.0810, 0.0950, 0.1058, 0.0815, 0.0859, 0.0685, 0.0838, 0.0666], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0230, 0.0226, 0.0257, 0.0239, 0.0216, 0.0203, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 15:00:08,584 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 5.230e+02 6.601e+02 8.580e+02 1.875e+03, threshold=1.320e+03, percent-clipped=3.0 +2023-04-01 15:00:17,604 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58466.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:00:22,296 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-01 15:00:24,957 INFO [train.py:903] (2/4) Epoch 9, batch 3850, loss[loss=0.209, simple_loss=0.2817, pruned_loss=0.06812, over 19382.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3157, pruned_loss=0.08735, over 3805476.28 frames. ], batch size: 48, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:00:36,666 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2597, 1.2140, 1.2237, 1.3374, 1.0984, 1.3631, 1.2990, 1.2901], + device='cuda:2'), covar=tensor([0.0854, 0.0973, 0.1024, 0.0694, 0.0808, 0.0800, 0.0828, 0.0731], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0230, 0.0226, 0.0257, 0.0239, 0.0216, 0.0203, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 15:00:46,689 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:02,730 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:06,097 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:25,308 INFO [train.py:903] (2/4) Epoch 9, batch 3900, loss[loss=0.2338, simple_loss=0.3102, pruned_loss=0.07864, over 19666.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.316, pruned_loss=0.0874, over 3826538.75 frames. ], batch size: 55, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:02:08,184 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 6.109e+02 7.308e+02 8.933e+02 2.200e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 15:02:19,964 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-01 15:02:26,008 INFO [train.py:903] (2/4) Epoch 9, batch 3950, loss[loss=0.2131, simple_loss=0.2887, pruned_loss=0.06874, over 19479.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3161, pruned_loss=0.0877, over 3817253.57 frames. ], batch size: 49, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:02:28,083 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 15:03:21,984 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58618.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:25,419 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:28,119 INFO [train.py:903] (2/4) Epoch 9, batch 4000, loss[loss=0.3297, simple_loss=0.3722, pruned_loss=0.1435, over 12884.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3153, pruned_loss=0.08719, over 3818986.45 frames. ], batch size: 136, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:04:10,730 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.392e+02 5.642e+02 6.845e+02 8.578e+02 2.087e+03, threshold=1.369e+03, percent-clipped=3.0 +2023-04-01 15:04:10,796 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 15:04:27,015 INFO [train.py:903] (2/4) Epoch 9, batch 4050, loss[loss=0.28, simple_loss=0.3333, pruned_loss=0.1133, over 13387.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3163, pruned_loss=0.0884, over 3791519.48 frames. ], batch size: 136, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:04:59,263 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:05:28,158 INFO [train.py:903] (2/4) Epoch 9, batch 4100, loss[loss=0.2919, simple_loss=0.3557, pruned_loss=0.114, over 19662.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3159, pruned_loss=0.088, over 3804735.60 frames. ], batch size: 58, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:03,196 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 15:06:11,190 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.572e+02 6.953e+02 8.904e+02 1.888e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 15:06:28,249 INFO [train.py:903] (2/4) Epoch 9, batch 4150, loss[loss=0.2501, simple_loss=0.3177, pruned_loss=0.0912, over 19773.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3147, pruned_loss=0.08719, over 3801968.89 frames. ], batch size: 56, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:54,431 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58793.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:07:30,859 INFO [train.py:903] (2/4) Epoch 9, batch 4200, loss[loss=0.2575, simple_loss=0.324, pruned_loss=0.09551, over 19338.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3144, pruned_loss=0.08688, over 3816649.10 frames. ], batch size: 66, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:07:34,199 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 15:08:14,668 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.798e+02 5.991e+02 7.229e+02 9.145e+02 1.621e+03, threshold=1.446e+03, percent-clipped=3.0 +2023-04-01 15:08:16,298 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8211, 1.8648, 1.9976, 2.6156, 1.7954, 2.5352, 2.3835, 1.8549], + device='cuda:2'), covar=tensor([0.3216, 0.2581, 0.1342, 0.1366, 0.2667, 0.1150, 0.2992, 0.2482], + device='cuda:2'), in_proj_covar=tensor([0.0747, 0.0763, 0.0625, 0.0869, 0.0749, 0.0665, 0.0766, 0.0679], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:08:32,532 INFO [train.py:903] (2/4) Epoch 9, batch 4250, loss[loss=0.2042, simple_loss=0.276, pruned_loss=0.06617, over 19388.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3137, pruned_loss=0.08632, over 3827632.72 frames. ], batch size: 48, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:08:34,196 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:37,523 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:48,674 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 15:09:00,002 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 15:09:03,850 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:08,239 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:15,829 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 15:09:32,785 INFO [train.py:903] (2/4) Epoch 9, batch 4300, loss[loss=0.2776, simple_loss=0.3522, pruned_loss=0.1015, over 19561.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.313, pruned_loss=0.08602, over 3835655.89 frames. ], batch size: 61, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:09:34,169 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:10:17,292 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.360e+02 5.584e+02 7.321e+02 9.114e+02 2.155e+03, threshold=1.464e+03, percent-clipped=5.0 +2023-04-01 15:10:26,471 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 15:10:33,830 INFO [train.py:903] (2/4) Epoch 9, batch 4350, loss[loss=0.2674, simple_loss=0.3399, pruned_loss=0.09749, over 19675.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3133, pruned_loss=0.08595, over 3831453.48 frames. ], batch size: 60, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:11:34,342 INFO [train.py:903] (2/4) Epoch 9, batch 4400, loss[loss=0.2303, simple_loss=0.2941, pruned_loss=0.08326, over 19360.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3127, pruned_loss=0.08613, over 3837808.46 frames. ], batch size: 47, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:11:58,685 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 15:11:58,800 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:12:08,634 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 15:12:18,355 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.626e+02 6.682e+02 1.014e+03 2.125e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-01 15:12:35,842 INFO [train.py:903] (2/4) Epoch 9, batch 4450, loss[loss=0.2761, simple_loss=0.3358, pruned_loss=0.1082, over 17549.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3144, pruned_loss=0.08729, over 3821974.54 frames. ], batch size: 101, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:01,303 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1374, 2.0355, 1.7468, 1.5707, 1.3944, 1.6453, 0.4912, 1.0000], + device='cuda:2'), covar=tensor([0.0323, 0.0378, 0.0260, 0.0400, 0.0769, 0.0454, 0.0701, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0325, 0.0321, 0.0321, 0.0335, 0.0413, 0.0338, 0.0299, 0.0319], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 15:13:36,621 INFO [train.py:903] (2/4) Epoch 9, batch 4500, loss[loss=0.199, simple_loss=0.2709, pruned_loss=0.06351, over 19765.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3143, pruned_loss=0.08749, over 3824141.32 frames. ], batch size: 48, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:54,296 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:20,642 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59158.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:21,456 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.665e+02 5.638e+02 7.209e+02 9.049e+02 2.430e+03, threshold=1.442e+03, percent-clipped=5.0 +2023-04-01 15:14:38,160 INFO [train.py:903] (2/4) Epoch 9, batch 4550, loss[loss=0.2546, simple_loss=0.3311, pruned_loss=0.08907, over 17974.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3139, pruned_loss=0.08726, over 3821092.46 frames. ], batch size: 83, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:14:38,495 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7578, 1.4765, 1.5846, 2.0739, 4.2354, 1.0495, 2.4572, 4.4018], + device='cuda:2'), covar=tensor([0.0335, 0.2572, 0.2826, 0.1441, 0.0681, 0.2567, 0.1274, 0.0263], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0324, 0.0336, 0.0308, 0.0337, 0.0320, 0.0313, 0.0333], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:14:46,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 15:14:49,754 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59182.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:50,757 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8642, 0.8431, 1.1592, 1.3397, 2.2695, 0.9298, 1.9331, 2.5367], + device='cuda:2'), covar=tensor([0.0767, 0.3433, 0.3217, 0.1847, 0.1217, 0.2642, 0.1281, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0323, 0.0336, 0.0308, 0.0337, 0.0319, 0.0313, 0.0333], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:14:55,977 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7639, 4.2410, 4.4535, 4.4288, 1.7484, 4.0830, 3.6583, 4.1260], + device='cuda:2'), covar=tensor([0.1316, 0.0710, 0.0543, 0.0544, 0.4700, 0.0598, 0.0616, 0.0976], + device='cuda:2'), in_proj_covar=tensor([0.0624, 0.0548, 0.0742, 0.0621, 0.0679, 0.0498, 0.0462, 0.0687], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 15:15:11,280 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 15:15:39,011 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 15:15:39,250 INFO [train.py:903] (2/4) Epoch 9, batch 4600, loss[loss=0.2399, simple_loss=0.3171, pruned_loss=0.08132, over 17409.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3143, pruned_loss=0.08724, over 3807835.81 frames. ], batch size: 101, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:15,413 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:24,166 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 5.555e+02 6.855e+02 8.371e+02 1.742e+03, threshold=1.371e+03, percent-clipped=2.0 +2023-04-01 15:16:35,117 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:40,776 INFO [train.py:903] (2/4) Epoch 9, batch 4650, loss[loss=0.2358, simple_loss=0.3072, pruned_loss=0.08222, over 19742.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3131, pruned_loss=0.0861, over 3822649.44 frames. ], batch size: 51, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:56,386 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 15:17:02,653 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 15:17:09,158 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 15:17:41,932 INFO [train.py:903] (2/4) Epoch 9, batch 4700, loss[loss=0.2554, simple_loss=0.3225, pruned_loss=0.09411, over 17387.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3133, pruned_loss=0.08612, over 3819454.69 frames. ], batch size: 101, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:03,976 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 15:18:08,312 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 15:18:25,860 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.586e+02 6.716e+02 8.168e+02 1.499e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-01 15:18:41,884 INFO [train.py:903] (2/4) Epoch 9, batch 4750, loss[loss=0.2221, simple_loss=0.2926, pruned_loss=0.07583, over 19686.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3142, pruned_loss=0.08657, over 3826316.48 frames. ], batch size: 53, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:54,786 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:32,488 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:43,257 INFO [train.py:903] (2/4) Epoch 9, batch 4800, loss[loss=0.2539, simple_loss=0.3271, pruned_loss=0.09033, over 19538.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3153, pruned_loss=0.08765, over 3815970.84 frames. ], batch size: 54, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:20:03,351 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59439.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:20:27,552 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.762e+02 6.893e+02 8.818e+02 1.836e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-01 15:20:43,944 INFO [train.py:903] (2/4) Epoch 9, batch 4850, loss[loss=0.2306, simple_loss=0.2961, pruned_loss=0.08251, over 19392.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.315, pruned_loss=0.08733, over 3816900.33 frames. ], batch size: 48, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:08,939 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 15:21:27,134 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:29,012 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 15:21:35,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 15:21:37,365 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 15:21:42,258 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:45,373 INFO [train.py:903] (2/4) Epoch 9, batch 4900, loss[loss=0.2383, simple_loss=0.3088, pruned_loss=0.08392, over 19701.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.314, pruned_loss=0.08689, over 3821946.04 frames. ], batch size: 53, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:46,553 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 15:21:49,057 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59526.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:56,982 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:22:05,284 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 15:22:25,050 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 15:22:29,460 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 5.274e+02 6.528e+02 8.023e+02 2.606e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-01 15:22:30,272 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-01 15:22:45,476 INFO [train.py:903] (2/4) Epoch 9, batch 4950, loss[loss=0.305, simple_loss=0.3642, pruned_loss=0.1229, over 19636.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3149, pruned_loss=0.08707, over 3830868.71 frames. ], batch size: 57, lr: 9.15e-03, grad_scale: 8.0 +2023-04-01 15:22:52,186 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8970, 4.2834, 4.5498, 4.5351, 1.6205, 4.1732, 3.7188, 4.2012], + device='cuda:2'), covar=tensor([0.1231, 0.0705, 0.0514, 0.0512, 0.5006, 0.0514, 0.0590, 0.0961], + device='cuda:2'), in_proj_covar=tensor([0.0632, 0.0564, 0.0750, 0.0631, 0.0695, 0.0506, 0.0463, 0.0699], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 15:23:01,087 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 15:23:24,571 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 15:23:30,057 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.83 vs. limit=5.0 +2023-04-01 15:23:46,153 INFO [train.py:903] (2/4) Epoch 9, batch 5000, loss[loss=0.2101, simple_loss=0.281, pruned_loss=0.06962, over 19765.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3142, pruned_loss=0.08636, over 3830762.47 frames. ], batch size: 47, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:23:53,573 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 15:24:04,780 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 15:24:06,320 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:08,594 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:11,084 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 15:24:30,504 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.818e+02 6.890e+02 9.185e+02 2.943e+03, threshold=1.378e+03, percent-clipped=3.0 +2023-04-01 15:24:35,464 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:46,095 INFO [train.py:903] (2/4) Epoch 9, batch 5050, loss[loss=0.2275, simple_loss=0.3093, pruned_loss=0.07286, over 19745.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.315, pruned_loss=0.08698, over 3831499.88 frames. ], batch size: 63, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:25:21,715 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 15:25:47,327 INFO [train.py:903] (2/4) Epoch 9, batch 5100, loss[loss=0.2314, simple_loss=0.3087, pruned_loss=0.07711, over 19689.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3145, pruned_loss=0.08697, over 3813299.50 frames. ], batch size: 53, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:25:56,477 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 15:25:59,769 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 15:26:05,154 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 15:26:33,073 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 5.962e+02 7.316e+02 9.170e+02 1.645e+03, threshold=1.463e+03, percent-clipped=5.0 +2023-04-01 15:26:47,506 INFO [train.py:903] (2/4) Epoch 9, batch 5150, loss[loss=0.3086, simple_loss=0.3664, pruned_loss=0.1254, over 17287.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3144, pruned_loss=0.08682, over 3813831.36 frames. ], batch size: 101, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:26:58,269 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 15:27:32,203 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 15:27:49,838 INFO [train.py:903] (2/4) Epoch 9, batch 5200, loss[loss=0.2386, simple_loss=0.3215, pruned_loss=0.07787, over 19700.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3153, pruned_loss=0.08737, over 3809778.98 frames. ], batch size: 59, lr: 9.14e-03, grad_scale: 8.0 +2023-04-01 15:27:59,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 15:28:34,607 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.759e+02 5.545e+02 6.726e+02 8.723e+02 1.623e+03, threshold=1.345e+03, percent-clipped=2.0 +2023-04-01 15:28:39,338 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59864.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:41,627 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 15:28:41,878 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:50,360 INFO [train.py:903] (2/4) Epoch 9, batch 5250, loss[loss=0.1732, simple_loss=0.2551, pruned_loss=0.04564, over 19718.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3155, pruned_loss=0.08701, over 3816719.82 frames. ], batch size: 45, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:29:19,313 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:31,294 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,072 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,795 INFO [train.py:903] (2/4) Epoch 9, batch 5300, loss[loss=0.231, simple_loss=0.3024, pruned_loss=0.07982, over 19782.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3161, pruned_loss=0.08723, over 3815470.52 frames. ], batch size: 54, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:30:04,489 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 15:30:05,322 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 15:30:13,439 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8015, 3.2731, 3.3117, 3.3206, 1.4199, 3.1802, 2.8236, 3.0479], + device='cuda:2'), covar=tensor([0.1227, 0.0710, 0.0667, 0.0673, 0.4060, 0.0586, 0.0636, 0.1162], + device='cuda:2'), in_proj_covar=tensor([0.0625, 0.0558, 0.0738, 0.0626, 0.0683, 0.0497, 0.0459, 0.0691], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 15:30:36,750 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.525e+02 7.204e+02 8.995e+02 2.228e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-01 15:30:50,960 INFO [train.py:903] (2/4) Epoch 9, batch 5350, loss[loss=0.2266, simple_loss=0.2942, pruned_loss=0.07948, over 19803.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3151, pruned_loss=0.08659, over 3831941.54 frames. ], batch size: 49, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:30:58,309 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:31:24,004 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 15:31:37,380 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8354, 1.9434, 2.0443, 2.8202, 1.7418, 2.4272, 2.4512, 1.9097], + device='cuda:2'), covar=tensor([0.3238, 0.2702, 0.1305, 0.1536, 0.3250, 0.1360, 0.3006, 0.2423], + device='cuda:2'), in_proj_covar=tensor([0.0754, 0.0763, 0.0624, 0.0872, 0.0750, 0.0671, 0.0776, 0.0683], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:31:52,157 INFO [train.py:903] (2/4) Epoch 9, batch 5400, loss[loss=0.2759, simple_loss=0.3405, pruned_loss=0.1056, over 18027.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3148, pruned_loss=0.08651, over 3832676.92 frames. ], batch size: 83, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:32:18,786 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0027, 5.3974, 3.0318, 4.7838, 1.1559, 5.2177, 5.3196, 5.4004], + device='cuda:2'), covar=tensor([0.0411, 0.0801, 0.1812, 0.0541, 0.3797, 0.0497, 0.0537, 0.0770], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0346, 0.0413, 0.0303, 0.0366, 0.0339, 0.0332, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 15:32:34,991 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:36,997 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 5.587e+02 7.112e+02 9.365e+02 1.948e+03, threshold=1.422e+03, percent-clipped=3.0 +2023-04-01 15:32:49,996 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:53,196 INFO [train.py:903] (2/4) Epoch 9, batch 5450, loss[loss=0.2566, simple_loss=0.3369, pruned_loss=0.08816, over 19688.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3139, pruned_loss=0.08597, over 3840183.57 frames. ], batch size: 59, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:33:06,627 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60083.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:21,799 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60096.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:55,433 INFO [train.py:903] (2/4) Epoch 9, batch 5500, loss[loss=0.2725, simple_loss=0.3451, pruned_loss=0.09994, over 19717.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3142, pruned_loss=0.08614, over 3847103.12 frames. ], batch size: 63, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:34:17,032 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 15:34:40,128 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.485e+02 6.858e+02 8.862e+02 1.983e+03, threshold=1.372e+03, percent-clipped=4.0 +2023-04-01 15:34:56,042 INFO [train.py:903] (2/4) Epoch 9, batch 5550, loss[loss=0.2192, simple_loss=0.3004, pruned_loss=0.069, over 19599.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3142, pruned_loss=0.08603, over 3836590.83 frames. ], batch size: 57, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:35:01,110 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3351, 1.5287, 2.0117, 1.6214, 3.1472, 2.5663, 3.3184, 1.5061], + device='cuda:2'), covar=tensor([0.2020, 0.3372, 0.2025, 0.1565, 0.1321, 0.1634, 0.1565, 0.3183], + device='cuda:2'), in_proj_covar=tensor([0.0470, 0.0552, 0.0564, 0.0420, 0.0581, 0.0472, 0.0635, 0.0473], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:35:01,735 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 15:35:42,125 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:45,679 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9025, 1.6146, 1.8303, 2.1603, 1.8173, 1.9557, 1.7668, 1.9678], + device='cuda:2'), covar=tensor([0.0984, 0.1714, 0.1192, 0.0857, 0.1243, 0.0434, 0.1071, 0.0630], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0351, 0.0289, 0.0239, 0.0297, 0.0240, 0.0274, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:35:48,946 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60216.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:49,881 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 15:35:57,517 INFO [train.py:903] (2/4) Epoch 9, batch 5600, loss[loss=0.2213, simple_loss=0.2841, pruned_loss=0.07925, over 19744.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3144, pruned_loss=0.08584, over 3843940.72 frames. ], batch size: 46, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:36:13,009 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:31,695 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:41,619 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.287e+02 5.903e+02 7.092e+02 9.595e+02 1.671e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 15:36:42,017 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:55,412 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 15:36:58,107 INFO [train.py:903] (2/4) Epoch 9, batch 5650, loss[loss=0.2398, simple_loss=0.3175, pruned_loss=0.081, over 19653.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3152, pruned_loss=0.08689, over 3833022.48 frames. ], batch size: 55, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:37:12,668 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5391, 1.3631, 1.3368, 1.8189, 1.4326, 1.7542, 1.6850, 1.6117], + device='cuda:2'), covar=tensor([0.0783, 0.1015, 0.1041, 0.0686, 0.0780, 0.0757, 0.0919, 0.0690], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0232, 0.0230, 0.0257, 0.0245, 0.0218, 0.0203, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 15:37:29,798 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0006, 1.7508, 1.9079, 2.2769, 4.4399, 1.2742, 2.4215, 4.7131], + device='cuda:2'), covar=tensor([0.0271, 0.2447, 0.2512, 0.1397, 0.0640, 0.2324, 0.1261, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0333, 0.0325, 0.0335, 0.0307, 0.0333, 0.0321, 0.0312, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:37:45,049 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 15:37:47,718 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7855, 1.4964, 1.4304, 2.0823, 1.6545, 2.1347, 2.1614, 1.8995], + device='cuda:2'), covar=tensor([0.0763, 0.0948, 0.1049, 0.0863, 0.0877, 0.0625, 0.0774, 0.0630], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0232, 0.0231, 0.0258, 0.0246, 0.0219, 0.0203, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 15:38:00,338 INFO [train.py:903] (2/4) Epoch 9, batch 5700, loss[loss=0.2334, simple_loss=0.3157, pruned_loss=0.07555, over 18325.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3156, pruned_loss=0.08685, over 3834357.64 frames. ], batch size: 84, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:38:02,966 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:45,025 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.631e+02 6.670e+02 7.834e+02 2.342e+03, threshold=1.334e+03, percent-clipped=2.0 +2023-04-01 15:38:53,015 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:59,676 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 15:39:00,755 INFO [train.py:903] (2/4) Epoch 9, batch 5750, loss[loss=0.2198, simple_loss=0.2938, pruned_loss=0.07287, over 19630.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3155, pruned_loss=0.08684, over 3831741.43 frames. ], batch size: 50, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:39:07,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 15:39:12,818 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 15:39:25,316 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8155, 1.9006, 2.0481, 2.6280, 1.7562, 2.4603, 2.4331, 1.9592], + device='cuda:2'), covar=tensor([0.3207, 0.2642, 0.1249, 0.1496, 0.3026, 0.1256, 0.3028, 0.2335], + device='cuda:2'), in_proj_covar=tensor([0.0754, 0.0765, 0.0628, 0.0869, 0.0752, 0.0668, 0.0775, 0.0689], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:39:36,974 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:39:50,847 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:00,906 INFO [train.py:903] (2/4) Epoch 9, batch 5800, loss[loss=0.2621, simple_loss=0.3343, pruned_loss=0.09497, over 19546.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3154, pruned_loss=0.08724, over 3823519.84 frames. ], batch size: 56, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:40:06,303 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:16,603 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-04-01 15:40:22,431 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:44,649 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6216, 1.3271, 1.3387, 1.7168, 1.3939, 1.6910, 1.7893, 1.6112], + device='cuda:2'), covar=tensor([0.0763, 0.0975, 0.1021, 0.0690, 0.0795, 0.0740, 0.0772, 0.0643], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0230, 0.0230, 0.0255, 0.0244, 0.0215, 0.0201, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 15:40:45,425 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.333e+02 6.303e+02 7.771e+02 9.979e+02 2.257e+03, threshold=1.554e+03, percent-clipped=12.0 +2023-04-01 15:41:01,367 INFO [train.py:903] (2/4) Epoch 9, batch 5850, loss[loss=0.2747, simple_loss=0.3377, pruned_loss=0.1059, over 19338.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3165, pruned_loss=0.08822, over 3805575.77 frames. ], batch size: 66, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:41:48,428 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7538, 1.5759, 1.4962, 1.8435, 1.8605, 1.6277, 1.4795, 1.7405], + device='cuda:2'), covar=tensor([0.0975, 0.1422, 0.1334, 0.0946, 0.1040, 0.0524, 0.1154, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0348, 0.0288, 0.0237, 0.0295, 0.0240, 0.0274, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:41:56,034 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:02,435 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 15:42:03,583 INFO [train.py:903] (2/4) Epoch 9, batch 5900, loss[loss=0.199, simple_loss=0.2768, pruned_loss=0.06066, over 19493.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3157, pruned_loss=0.08751, over 3815981.75 frames. ], batch size: 49, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:42:10,687 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:22,840 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 15:42:25,399 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60542.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:42,824 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:47,838 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.460e+02 7.799e+02 9.723e+02 2.713e+03, threshold=1.560e+03, percent-clipped=1.0 +2023-04-01 15:42:48,015 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:01,170 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 15:43:03,513 INFO [train.py:903] (2/4) Epoch 9, batch 5950, loss[loss=0.2419, simple_loss=0.3111, pruned_loss=0.08641, over 18292.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3165, pruned_loss=0.0884, over 3814206.92 frames. ], batch size: 84, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:43:07,541 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-01 15:43:13,168 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:27,271 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5276, 1.3087, 1.4630, 1.5634, 3.1019, 0.9468, 2.1502, 3.4176], + device='cuda:2'), covar=tensor([0.0478, 0.2528, 0.2674, 0.1595, 0.0678, 0.2500, 0.1254, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0331, 0.0339, 0.0311, 0.0337, 0.0324, 0.0316, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:43:41,945 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1135, 1.2443, 1.5973, 1.3459, 2.2862, 1.9481, 2.3943, 0.9463], + device='cuda:2'), covar=tensor([0.2458, 0.3908, 0.2317, 0.2071, 0.1495, 0.2087, 0.1494, 0.3834], + device='cuda:2'), in_proj_covar=tensor([0.0470, 0.0546, 0.0563, 0.0420, 0.0579, 0.0472, 0.0631, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:43:45,244 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:03,641 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:04,412 INFO [train.py:903] (2/4) Epoch 9, batch 6000, loss[loss=0.2319, simple_loss=0.3078, pruned_loss=0.07796, over 19435.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3153, pruned_loss=0.08754, over 3809953.81 frames. ], batch size: 70, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:44:04,412 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 15:44:16,877 INFO [train.py:937] (2/4) Epoch 9, validation: loss=0.1828, simple_loss=0.2835, pruned_loss=0.04105, over 944034.00 frames. +2023-04-01 15:44:16,877 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 15:44:37,651 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 15:44:47,019 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:48,299 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1037, 2.1109, 2.3131, 3.3140, 2.0769, 3.2313, 2.7897, 2.0826], + device='cuda:2'), covar=tensor([0.3787, 0.3288, 0.1344, 0.1670, 0.3821, 0.1310, 0.3052, 0.2563], + device='cuda:2'), in_proj_covar=tensor([0.0754, 0.0761, 0.0623, 0.0869, 0.0754, 0.0670, 0.0775, 0.0682], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:44:59,990 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-01 15:45:02,309 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.785e+02 7.329e+02 9.590e+02 1.552e+03, threshold=1.466e+03, percent-clipped=0.0 +2023-04-01 15:45:15,389 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4585, 1.2304, 1.0612, 1.2377, 1.0958, 1.2801, 1.0551, 1.2876], + device='cuda:2'), covar=tensor([0.1125, 0.1257, 0.1737, 0.1134, 0.1352, 0.0908, 0.1556, 0.1001], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0351, 0.0289, 0.0239, 0.0296, 0.0240, 0.0274, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:45:17,211 INFO [train.py:903] (2/4) Epoch 9, batch 6050, loss[loss=0.2234, simple_loss=0.3068, pruned_loss=0.07, over 19319.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.314, pruned_loss=0.08616, over 3825233.52 frames. ], batch size: 66, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:45:19,772 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:46:18,277 INFO [train.py:903] (2/4) Epoch 9, batch 6100, loss[loss=0.2544, simple_loss=0.3268, pruned_loss=0.09096, over 19666.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3129, pruned_loss=0.08531, over 3831395.75 frames. ], batch size: 60, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:46:26,650 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 15:46:34,585 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:03,005 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.339e+02 6.367e+02 8.531e+02 1.806e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-01 15:47:18,164 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7617, 1.5354, 1.3663, 1.5993, 1.4735, 1.4366, 1.2585, 1.6369], + device='cuda:2'), covar=tensor([0.0944, 0.1256, 0.1462, 0.0965, 0.1205, 0.0686, 0.1366, 0.0716], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0352, 0.0290, 0.0239, 0.0297, 0.0241, 0.0273, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 15:47:18,985 INFO [train.py:903] (2/4) Epoch 9, batch 6150, loss[loss=0.243, simple_loss=0.3084, pruned_loss=0.08876, over 19856.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3136, pruned_loss=0.08588, over 3830801.82 frames. ], batch size: 52, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:47:19,410 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:22,985 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.18 vs. limit=5.0 +2023-04-01 15:47:30,976 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7278, 1.4030, 1.4084, 2.2270, 1.8259, 1.9510, 2.1799, 1.8147], + device='cuda:2'), covar=tensor([0.0769, 0.0994, 0.1049, 0.0722, 0.0746, 0.0746, 0.0740, 0.0643], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0231, 0.0233, 0.0257, 0.0247, 0.0216, 0.0203, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 15:47:33,344 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:44,097 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 15:47:48,656 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:48,685 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:03,528 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:04,589 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60811.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:18,464 INFO [train.py:903] (2/4) Epoch 9, batch 6200, loss[loss=0.2398, simple_loss=0.3128, pruned_loss=0.08339, over 19761.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3148, pruned_loss=0.08691, over 3833034.33 frames. ], batch size: 54, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:48:19,743 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:34,276 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:49:03,643 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.706e+02 5.965e+02 7.614e+02 9.330e+02 2.107e+03, threshold=1.523e+03, percent-clipped=6.0 +2023-04-01 15:49:19,598 INFO [train.py:903] (2/4) Epoch 9, batch 6250, loss[loss=0.2116, simple_loss=0.2912, pruned_loss=0.06604, over 19850.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.314, pruned_loss=0.0867, over 3831809.95 frames. ], batch size: 52, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:49:38,719 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0695, 4.4643, 4.7903, 4.7383, 1.6275, 4.3866, 3.9115, 4.4471], + device='cuda:2'), covar=tensor([0.1143, 0.0614, 0.0465, 0.0512, 0.5008, 0.0517, 0.0515, 0.0934], + device='cuda:2'), in_proj_covar=tensor([0.0630, 0.0564, 0.0751, 0.0630, 0.0693, 0.0501, 0.0468, 0.0696], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 15:49:39,308 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 15:49:49,669 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 15:50:15,442 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.89 vs. limit=5.0 +2023-04-01 15:50:20,301 INFO [train.py:903] (2/4) Epoch 9, batch 6300, loss[loss=0.2099, simple_loss=0.2806, pruned_loss=0.06955, over 19638.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3137, pruned_loss=0.08668, over 3838497.04 frames. ], batch size: 50, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:50:31,341 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:01,602 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60956.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:05,839 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.456e+02 6.999e+02 8.896e+02 1.665e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 15:51:21,366 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-01 15:51:21,556 INFO [train.py:903] (2/4) Epoch 9, batch 6350, loss[loss=0.2474, simple_loss=0.3193, pruned_loss=0.08776, over 18247.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.313, pruned_loss=0.08623, over 3842740.26 frames. ], batch size: 83, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:52:22,394 INFO [train.py:903] (2/4) Epoch 9, batch 6400, loss[loss=0.1905, simple_loss=0.2602, pruned_loss=0.06039, over 19741.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.313, pruned_loss=0.08664, over 3832210.94 frames. ], batch size: 46, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:53:07,178 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 6.196e+02 7.384e+02 8.672e+02 1.804e+03, threshold=1.477e+03, percent-clipped=4.0 +2023-04-01 15:53:12,814 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-01 15:53:22,486 INFO [train.py:903] (2/4) Epoch 9, batch 6450, loss[loss=0.2488, simple_loss=0.3222, pruned_loss=0.08769, over 18398.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3134, pruned_loss=0.08719, over 3831339.13 frames. ], batch size: 83, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:53:30,381 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:53:31,426 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:53:31,557 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7993, 4.9301, 5.5949, 5.5392, 2.0068, 5.2077, 4.5639, 5.2233], + device='cuda:2'), covar=tensor([0.1192, 0.0821, 0.0456, 0.0452, 0.4884, 0.0478, 0.0483, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0627, 0.0560, 0.0750, 0.0627, 0.0690, 0.0498, 0.0461, 0.0694], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 15:54:05,107 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 15:54:22,076 INFO [train.py:903] (2/4) Epoch 9, batch 6500, loss[loss=0.2541, simple_loss=0.3102, pruned_loss=0.09897, over 19774.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3141, pruned_loss=0.08781, over 3825762.96 frames. ], batch size: 47, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:54:27,559 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 15:55:06,065 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.706e+02 8.302e+02 1.012e+03 2.679e+03, threshold=1.660e+03, percent-clipped=7.0 +2023-04-01 15:55:21,994 INFO [train.py:903] (2/4) Epoch 9, batch 6550, loss[loss=0.2097, simple_loss=0.2807, pruned_loss=0.0693, over 19756.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3144, pruned_loss=0.08744, over 3829788.15 frames. ], batch size: 47, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:55:50,210 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:56:24,271 INFO [train.py:903] (2/4) Epoch 9, batch 6600, loss[loss=0.2347, simple_loss=0.3133, pruned_loss=0.078, over 19287.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3147, pruned_loss=0.08757, over 3809663.60 frames. ], batch size: 66, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:57:09,254 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 5.654e+02 6.784e+02 8.392e+02 1.741e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 15:57:24,975 INFO [train.py:903] (2/4) Epoch 9, batch 6650, loss[loss=0.2117, simple_loss=0.295, pruned_loss=0.06422, over 19687.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3154, pruned_loss=0.08762, over 3807404.51 frames. ], batch size: 53, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:57:48,789 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3569, 1.4306, 1.8815, 1.5786, 2.6807, 2.3444, 2.8941, 1.0721], + device='cuda:2'), covar=tensor([0.2185, 0.3747, 0.2007, 0.1731, 0.1357, 0.1740, 0.1318, 0.3623], + device='cuda:2'), in_proj_covar=tensor([0.0476, 0.0554, 0.0570, 0.0427, 0.0586, 0.0479, 0.0639, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 15:58:25,585 INFO [train.py:903] (2/4) Epoch 9, batch 6700, loss[loss=0.2188, simple_loss=0.2994, pruned_loss=0.06906, over 19606.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3161, pruned_loss=0.08847, over 3788475.57 frames. ], batch size: 52, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 15:59:08,506 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.683e+02 7.501e+02 9.618e+02 2.603e+03, threshold=1.500e+03, percent-clipped=7.0 +2023-04-01 15:59:23,016 INFO [train.py:903] (2/4) Epoch 9, batch 6750, loss[loss=0.1783, simple_loss=0.2518, pruned_loss=0.0524, over 19735.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3168, pruned_loss=0.08896, over 3793030.55 frames. ], batch size: 47, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 16:00:18,968 INFO [train.py:903] (2/4) Epoch 9, batch 6800, loss[loss=0.2484, simple_loss=0.3259, pruned_loss=0.08546, over 19513.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.316, pruned_loss=0.08837, over 3804044.50 frames. ], batch size: 56, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 16:00:19,095 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:03,214 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 16:01:04,314 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 16:01:06,612 INFO [train.py:903] (2/4) Epoch 10, batch 0, loss[loss=0.2373, simple_loss=0.3166, pruned_loss=0.07894, over 19657.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3166, pruned_loss=0.07894, over 19657.00 frames. ], batch size: 58, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:01:06,612 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 16:01:17,503 INFO [train.py:937] (2/4) Epoch 10, validation: loss=0.1825, simple_loss=0.2836, pruned_loss=0.04072, over 944034.00 frames. +2023-04-01 16:01:17,504 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 16:01:17,977 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:27,607 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.689e+02 6.760e+02 8.116e+02 1.440e+03, threshold=1.352e+03, percent-clipped=0.0 +2023-04-01 16:01:29,696 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 16:01:37,950 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3076, 1.5156, 1.9824, 2.5652, 1.9540, 2.2727, 2.6865, 2.3599], + device='cuda:2'), covar=tensor([0.0807, 0.1319, 0.1056, 0.1090, 0.1046, 0.0964, 0.1003, 0.0790], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0233, 0.0231, 0.0261, 0.0247, 0.0218, 0.0206, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 16:01:48,108 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:02:17,429 INFO [train.py:903] (2/4) Epoch 10, batch 50, loss[loss=0.2135, simple_loss=0.2926, pruned_loss=0.06716, over 19827.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3163, pruned_loss=0.08786, over 866598.65 frames. ], batch size: 52, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:02:50,336 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 16:03:03,263 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:03:18,479 INFO [train.py:903] (2/4) Epoch 10, batch 100, loss[loss=0.2154, simple_loss=0.2872, pruned_loss=0.07181, over 19742.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3138, pruned_loss=0.08661, over 1533140.62 frames. ], batch size: 45, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:03:24,182 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 16:03:29,311 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.266e+02 7.755e+02 9.384e+02 2.029e+03, threshold=1.551e+03, percent-clipped=6.0 +2023-04-01 16:03:42,458 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7251, 1.4316, 1.4319, 1.9583, 1.5597, 1.9229, 2.1366, 1.8285], + device='cuda:2'), covar=tensor([0.0785, 0.1002, 0.1051, 0.0886, 0.0865, 0.0757, 0.0800, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0227, 0.0226, 0.0254, 0.0241, 0.0214, 0.0201, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 16:04:19,611 INFO [train.py:903] (2/4) Epoch 10, batch 150, loss[loss=0.26, simple_loss=0.3326, pruned_loss=0.0937, over 19533.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3124, pruned_loss=0.08479, over 2049206.21 frames. ], batch size: 56, lr: 8.56e-03, grad_scale: 16.0 +2023-04-01 16:05:12,399 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 16:05:20,093 INFO [train.py:903] (2/4) Epoch 10, batch 200, loss[loss=0.2752, simple_loss=0.3404, pruned_loss=0.105, over 19541.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3121, pruned_loss=0.08441, over 2439815.38 frames. ], batch size: 56, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:05:32,341 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.325e+02 6.934e+02 9.117e+02 1.602e+03, threshold=1.387e+03, percent-clipped=3.0 +2023-04-01 16:05:34,721 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5082, 1.3789, 1.1221, 1.4390, 1.2087, 1.3276, 1.1431, 1.3954], + device='cuda:2'), covar=tensor([0.0970, 0.1113, 0.1463, 0.0889, 0.1116, 0.0573, 0.1220, 0.0725], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0354, 0.0289, 0.0238, 0.0297, 0.0243, 0.0274, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:05:35,097 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 16:05:45,486 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0844, 5.1641, 5.9543, 5.9089, 2.0827, 5.5725, 4.7877, 5.5053], + device='cuda:2'), covar=tensor([0.1254, 0.0588, 0.0467, 0.0459, 0.5062, 0.0521, 0.0534, 0.1059], + device='cuda:2'), in_proj_covar=tensor([0.0632, 0.0563, 0.0751, 0.0633, 0.0694, 0.0506, 0.0465, 0.0701], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 16:06:20,989 INFO [train.py:903] (2/4) Epoch 10, batch 250, loss[loss=0.2133, simple_loss=0.2787, pruned_loss=0.07396, over 19742.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3114, pruned_loss=0.08421, over 2744998.55 frames. ], batch size: 46, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:07:19,644 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:07:20,496 INFO [train.py:903] (2/4) Epoch 10, batch 300, loss[loss=0.2159, simple_loss=0.2891, pruned_loss=0.07134, over 19776.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3125, pruned_loss=0.08421, over 2988600.87 frames. ], batch size: 48, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:07:32,775 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.563e+02 6.785e+02 8.281e+02 1.821e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 16:07:33,031 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:12,888 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61794.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:20,604 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 16:08:21,871 INFO [train.py:903] (2/4) Epoch 10, batch 350, loss[loss=0.2515, simple_loss=0.3262, pruned_loss=0.08845, over 19303.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3146, pruned_loss=0.08577, over 3174401.79 frames. ], batch size: 66, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:08:44,871 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61819.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:09:23,123 INFO [train.py:903] (2/4) Epoch 10, batch 400, loss[loss=0.262, simple_loss=0.3446, pruned_loss=0.08968, over 19587.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.315, pruned_loss=0.08635, over 3319389.39 frames. ], batch size: 61, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:09:36,144 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+02 5.438e+02 6.846e+02 8.745e+02 2.106e+03, threshold=1.369e+03, percent-clipped=7.0 +2023-04-01 16:10:26,713 INFO [train.py:903] (2/4) Epoch 10, batch 450, loss[loss=0.216, simple_loss=0.2875, pruned_loss=0.07229, over 19398.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3123, pruned_loss=0.08512, over 3429904.49 frames. ], batch size: 47, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:10:29,707 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 16:10:50,945 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 16:10:50,979 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 16:11:05,202 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:11:19,248 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9444, 4.3743, 4.6438, 4.6036, 1.6152, 4.3279, 3.7387, 4.2832], + device='cuda:2'), covar=tensor([0.1184, 0.0636, 0.0445, 0.0490, 0.4922, 0.0479, 0.0588, 0.0945], + device='cuda:2'), in_proj_covar=tensor([0.0624, 0.0559, 0.0743, 0.0631, 0.0687, 0.0500, 0.0460, 0.0690], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 16:11:27,991 INFO [train.py:903] (2/4) Epoch 10, batch 500, loss[loss=0.2873, simple_loss=0.3486, pruned_loss=0.113, over 17415.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3118, pruned_loss=0.08443, over 3532077.89 frames. ], batch size: 101, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:11:30,095 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-01 16:11:39,882 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.473e+02 6.472e+02 7.882e+02 1.512e+03, threshold=1.294e+03, percent-clipped=2.0 +2023-04-01 16:12:30,819 INFO [train.py:903] (2/4) Epoch 10, batch 550, loss[loss=0.2461, simple_loss=0.3097, pruned_loss=0.0913, over 19364.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3121, pruned_loss=0.08436, over 3596497.38 frames. ], batch size: 47, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:32,127 INFO [train.py:903] (2/4) Epoch 10, batch 600, loss[loss=0.2351, simple_loss=0.3145, pruned_loss=0.07786, over 19618.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3126, pruned_loss=0.08445, over 3657800.56 frames. ], batch size: 57, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:42,187 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 16:13:46,035 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.499e+02 5.366e+02 7.096e+02 8.541e+02 1.663e+03, threshold=1.419e+03, percent-clipped=2.0 +2023-04-01 16:14:11,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 16:14:26,434 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:14:35,312 INFO [train.py:903] (2/4) Epoch 10, batch 650, loss[loss=0.2379, simple_loss=0.3121, pruned_loss=0.08182, over 19527.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3122, pruned_loss=0.08461, over 3699877.57 frames. ], batch size: 56, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:14:40,049 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62105.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:24,608 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:39,378 INFO [train.py:903] (2/4) Epoch 10, batch 700, loss[loss=0.2601, simple_loss=0.328, pruned_loss=0.09615, over 17286.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3127, pruned_loss=0.08457, over 3731911.29 frames. ], batch size: 101, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:15:51,168 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.410e+02 5.986e+02 7.007e+02 9.230e+02 2.462e+03, threshold=1.401e+03, percent-clipped=6.0 +2023-04-01 16:15:51,691 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2396, 1.4639, 2.1467, 1.7271, 3.0752, 2.2684, 3.0566, 1.5400], + device='cuda:2'), covar=tensor([0.2341, 0.4105, 0.2205, 0.1743, 0.1470, 0.2091, 0.1815, 0.3592], + device='cuda:2'), in_proj_covar=tensor([0.0476, 0.0557, 0.0575, 0.0426, 0.0585, 0.0480, 0.0639, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 16:16:41,958 INFO [train.py:903] (2/4) Epoch 10, batch 750, loss[loss=0.2087, simple_loss=0.2766, pruned_loss=0.07044, over 19752.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.313, pruned_loss=0.08498, over 3760607.69 frames. ], batch size: 46, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:16:51,093 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:04,544 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:11,228 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5393, 1.0737, 1.2019, 1.1302, 2.1239, 0.8596, 1.8645, 2.2652], + device='cuda:2'), covar=tensor([0.0669, 0.2631, 0.2787, 0.1614, 0.0943, 0.2095, 0.0993, 0.0531], + device='cuda:2'), in_proj_covar=tensor([0.0338, 0.0326, 0.0337, 0.0311, 0.0338, 0.0322, 0.0317, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:17:35,231 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.86 vs. limit=5.0 +2023-04-01 16:17:42,575 INFO [train.py:903] (2/4) Epoch 10, batch 800, loss[loss=0.2541, simple_loss=0.3173, pruned_loss=0.09547, over 19813.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.314, pruned_loss=0.08568, over 3763851.48 frames. ], batch size: 49, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:17:54,855 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.237e+02 6.930e+02 8.487e+02 1.526e+03, threshold=1.386e+03, percent-clipped=2.0 +2023-04-01 16:17:58,888 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 16:18:14,992 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:32,364 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:43,720 INFO [train.py:903] (2/4) Epoch 10, batch 850, loss[loss=0.2658, simple_loss=0.3232, pruned_loss=0.1042, over 19408.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3133, pruned_loss=0.08498, over 3783056.74 frames. ], batch size: 48, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:37,727 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 16:19:45,736 INFO [train.py:903] (2/4) Epoch 10, batch 900, loss[loss=0.251, simple_loss=0.3258, pruned_loss=0.08812, over 19718.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3139, pruned_loss=0.08533, over 3798327.43 frames. ], batch size: 59, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:59,165 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 6.023e+02 7.076e+02 9.770e+02 2.916e+03, threshold=1.415e+03, percent-clipped=7.0 +2023-04-01 16:20:16,823 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9085, 1.9680, 1.9857, 2.6628, 1.7991, 2.5335, 2.3145, 1.8595], + device='cuda:2'), covar=tensor([0.3202, 0.2734, 0.1429, 0.1587, 0.3099, 0.1328, 0.3130, 0.2526], + device='cuda:2'), in_proj_covar=tensor([0.0762, 0.0769, 0.0629, 0.0879, 0.0759, 0.0674, 0.0774, 0.0687], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 16:20:35,896 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:20:48,993 INFO [train.py:903] (2/4) Epoch 10, batch 950, loss[loss=0.2386, simple_loss=0.3161, pruned_loss=0.08057, over 19666.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.313, pruned_loss=0.08471, over 3800698.78 frames. ], batch size: 60, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:20:50,189 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 16:20:58,633 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:21:50,239 INFO [train.py:903] (2/4) Epoch 10, batch 1000, loss[loss=0.1908, simple_loss=0.2623, pruned_loss=0.05968, over 19748.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3132, pruned_loss=0.08527, over 3790669.35 frames. ], batch size: 46, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:01,606 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.402e+02 5.469e+02 6.659e+02 8.311e+02 1.987e+03, threshold=1.332e+03, percent-clipped=4.0 +2023-04-01 16:22:07,635 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:21,459 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:31,377 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62484.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:38,767 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:43,039 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 16:22:51,156 INFO [train.py:903] (2/4) Epoch 10, batch 1050, loss[loss=0.2104, simple_loss=0.2903, pruned_loss=0.06525, over 19649.00 frames. ], tot_loss[loss=0.242, simple_loss=0.313, pruned_loss=0.08545, over 3790814.68 frames. ], batch size: 53, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:51,559 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62501.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:23:23,767 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 16:23:54,501 INFO [train.py:903] (2/4) Epoch 10, batch 1100, loss[loss=0.227, simple_loss=0.2913, pruned_loss=0.08132, over 16412.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.314, pruned_loss=0.08587, over 3789656.85 frames. ], batch size: 36, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:24:07,775 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.569e+02 6.927e+02 9.101e+02 1.941e+03, threshold=1.385e+03, percent-clipped=3.0 +2023-04-01 16:24:10,209 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62563.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:54,389 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:56,397 INFO [train.py:903] (2/4) Epoch 10, batch 1150, loss[loss=0.275, simple_loss=0.3462, pruned_loss=0.1019, over 19602.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3142, pruned_loss=0.08606, over 3799048.21 frames. ], batch size: 57, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:24:59,696 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:15,618 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0315, 3.6814, 2.0615, 2.1990, 3.2834, 1.8351, 1.3218, 2.1079], + device='cuda:2'), covar=tensor([0.1072, 0.0399, 0.0888, 0.0635, 0.0392, 0.0978, 0.0820, 0.0575], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0296, 0.0321, 0.0239, 0.0231, 0.0325, 0.0285, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:25:37,689 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:53,887 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:56,106 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3477, 3.9692, 2.6582, 3.6192, 0.9515, 3.7406, 3.7335, 3.8197], + device='cuda:2'), covar=tensor([0.0624, 0.0953, 0.1659, 0.0722, 0.3653, 0.0689, 0.0739, 0.0937], + device='cuda:2'), in_proj_covar=tensor([0.0411, 0.0350, 0.0420, 0.0308, 0.0369, 0.0344, 0.0339, 0.0370], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 16:25:58,141 INFO [train.py:903] (2/4) Epoch 10, batch 1200, loss[loss=0.2746, simple_loss=0.345, pruned_loss=0.1021, over 19211.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3141, pruned_loss=0.08594, over 3811358.83 frames. ], batch size: 70, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:26:09,507 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.581e+02 6.902e+02 9.588e+02 2.703e+03, threshold=1.380e+03, percent-clipped=8.0 +2023-04-01 16:26:24,980 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:32,555 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 16:26:39,737 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62684.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:59,722 INFO [train.py:903] (2/4) Epoch 10, batch 1250, loss[loss=0.2593, simple_loss=0.3332, pruned_loss=0.09266, over 18659.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3148, pruned_loss=0.0863, over 3799174.58 frames. ], batch size: 74, lr: 8.49e-03, grad_scale: 4.0 +2023-04-01 16:28:00,855 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:01,675 INFO [train.py:903] (2/4) Epoch 10, batch 1300, loss[loss=0.2503, simple_loss=0.3237, pruned_loss=0.08843, over 17310.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3157, pruned_loss=0.08685, over 3799957.67 frames. ], batch size: 101, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:28:05,077 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:16,589 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.064e+02 6.596e+02 8.862e+02 1.920e+03, threshold=1.319e+03, percent-clipped=1.0 +2023-04-01 16:28:27,844 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2442, 2.1485, 1.6468, 1.4138, 2.0259, 1.1749, 1.0813, 1.5650], + device='cuda:2'), covar=tensor([0.0838, 0.0648, 0.0907, 0.0619, 0.0458, 0.1122, 0.0654, 0.0454], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0298, 0.0323, 0.0241, 0.0233, 0.0324, 0.0286, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:28:51,872 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:29:05,053 INFO [train.py:903] (2/4) Epoch 10, batch 1350, loss[loss=0.2481, simple_loss=0.3114, pruned_loss=0.09245, over 19364.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3156, pruned_loss=0.08716, over 3807652.79 frames. ], batch size: 47, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:29:16,162 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 16:29:34,244 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5148, 1.0864, 1.3158, 1.1661, 2.1454, 0.8267, 1.9839, 2.3142], + device='cuda:2'), covar=tensor([0.0667, 0.2500, 0.2514, 0.1517, 0.0880, 0.2047, 0.0913, 0.0477], + device='cuda:2'), in_proj_covar=tensor([0.0343, 0.0326, 0.0342, 0.0313, 0.0339, 0.0324, 0.0320, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:30:07,951 INFO [train.py:903] (2/4) Epoch 10, batch 1400, loss[loss=0.216, simple_loss=0.2854, pruned_loss=0.07331, over 19747.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3148, pruned_loss=0.08637, over 3811780.25 frames. ], batch size: 47, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:30:13,135 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62855.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:20,951 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.591e+02 6.749e+02 8.217e+02 1.554e+03, threshold=1.350e+03, percent-clipped=4.0 +2023-04-01 16:30:28,011 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:44,952 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:31:07,151 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 16:31:09,402 INFO [train.py:903] (2/4) Epoch 10, batch 1450, loss[loss=0.2132, simple_loss=0.2942, pruned_loss=0.06606, over 19767.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3136, pruned_loss=0.08524, over 3818706.83 frames. ], batch size: 54, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:31:16,514 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:07,386 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:11,882 INFO [train.py:903] (2/4) Epoch 10, batch 1500, loss[loss=0.221, simple_loss=0.285, pruned_loss=0.0785, over 19796.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3124, pruned_loss=0.08489, over 3832942.33 frames. ], batch size: 47, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:32:27,780 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 5.675e+02 6.883e+02 8.252e+02 2.690e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-01 16:33:17,113 INFO [train.py:903] (2/4) Epoch 10, batch 1550, loss[loss=0.3318, simple_loss=0.3688, pruned_loss=0.1474, over 13403.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3129, pruned_loss=0.08539, over 3827208.49 frames. ], batch size: 136, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:33:23,627 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:42,947 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:49,632 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:53,320 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:18,377 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1178, 1.1750, 1.6854, 1.3008, 2.7475, 3.6543, 3.3796, 3.9223], + device='cuda:2'), covar=tensor([0.1661, 0.3445, 0.3046, 0.2064, 0.0507, 0.0180, 0.0216, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0293, 0.0323, 0.0248, 0.0213, 0.0149, 0.0205, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 16:34:20,237 INFO [train.py:903] (2/4) Epoch 10, batch 1600, loss[loss=0.2912, simple_loss=0.3579, pruned_loss=0.1123, over 18879.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3136, pruned_loss=0.08552, over 3819375.77 frames. ], batch size: 74, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:34:24,384 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 16:34:33,009 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.609e+02 5.367e+02 6.713e+02 8.573e+02 1.582e+03, threshold=1.343e+03, percent-clipped=2.0 +2023-04-01 16:34:33,388 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:42,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 16:35:21,340 INFO [train.py:903] (2/4) Epoch 10, batch 1650, loss[loss=0.2087, simple_loss=0.2905, pruned_loss=0.06342, over 19776.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3148, pruned_loss=0.08618, over 3825872.54 frames. ], batch size: 54, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:35:51,907 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:05,473 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:14,719 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:21,905 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:23,503 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 16:36:23,856 INFO [train.py:903] (2/4) Epoch 10, batch 1700, loss[loss=0.2183, simple_loss=0.2844, pruned_loss=0.07612, over 19308.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3149, pruned_loss=0.0867, over 3825423.83 frames. ], batch size: 44, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:36:38,459 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.819e+02 7.178e+02 9.040e+02 2.117e+03, threshold=1.436e+03, percent-clipped=7.0 +2023-04-01 16:36:58,831 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5700, 4.1619, 2.5969, 3.7176, 1.1384, 3.8506, 3.9752, 4.0416], + device='cuda:2'), covar=tensor([0.0661, 0.1014, 0.1984, 0.0738, 0.3690, 0.0762, 0.0669, 0.0901], + device='cuda:2'), in_proj_covar=tensor([0.0421, 0.0356, 0.0426, 0.0312, 0.0374, 0.0352, 0.0347, 0.0377], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 16:37:04,092 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 16:37:28,660 INFO [train.py:903] (2/4) Epoch 10, batch 1750, loss[loss=0.1843, simple_loss=0.2575, pruned_loss=0.05557, over 19388.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3143, pruned_loss=0.08644, over 3826825.90 frames. ], batch size: 48, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:38:14,984 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,199 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,883 INFO [train.py:903] (2/4) Epoch 10, batch 1800, loss[loss=0.2453, simple_loss=0.3171, pruned_loss=0.0868, over 19520.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.313, pruned_loss=0.08545, over 3832706.39 frames. ], batch size: 54, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:38:44,560 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.870e+02 7.659e+02 9.293e+02 2.596e+03, threshold=1.532e+03, percent-clipped=8.0 +2023-04-01 16:39:03,308 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:30,797 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 16:39:32,970 INFO [train.py:903] (2/4) Epoch 10, batch 1850, loss[loss=0.272, simple_loss=0.3424, pruned_loss=0.1008, over 19532.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.313, pruned_loss=0.08533, over 3832204.80 frames. ], batch size: 64, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:39:35,803 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:54,120 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:05,775 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6495, 1.4179, 1.3866, 2.0843, 1.5565, 2.1071, 2.0518, 1.8244], + device='cuda:2'), covar=tensor([0.0845, 0.1021, 0.1066, 0.0913, 0.0987, 0.0680, 0.0886, 0.0689], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0228, 0.0226, 0.0257, 0.0240, 0.0213, 0.0200, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 16:40:10,161 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 16:40:26,753 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:35,819 INFO [train.py:903] (2/4) Epoch 10, batch 1900, loss[loss=0.2973, simple_loss=0.354, pruned_loss=0.1203, over 12858.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3123, pruned_loss=0.08417, over 3827412.61 frames. ], batch size: 136, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:40:52,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 5.562e+02 7.038e+02 8.618e+02 1.834e+03, threshold=1.408e+03, percent-clipped=3.0 +2023-04-01 16:40:55,982 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 16:41:01,856 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 16:41:24,913 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 16:41:37,249 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:41:40,406 INFO [train.py:903] (2/4) Epoch 10, batch 1950, loss[loss=0.214, simple_loss=0.2952, pruned_loss=0.06634, over 19477.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3137, pruned_loss=0.08555, over 3828309.89 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:42:09,764 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 16:42:10,548 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:27,982 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:31,455 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:44,762 INFO [train.py:903] (2/4) Epoch 10, batch 2000, loss[loss=0.2578, simple_loss=0.3243, pruned_loss=0.09565, over 19657.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3137, pruned_loss=0.08517, over 3831709.38 frames. ], batch size: 55, lr: 8.44e-03, grad_scale: 8.0 +2023-04-01 16:43:00,248 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.731e+02 5.221e+02 6.641e+02 8.776e+02 2.044e+03, threshold=1.328e+03, percent-clipped=3.0 +2023-04-01 16:43:23,894 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2831, 1.4585, 1.5908, 1.5442, 2.8314, 1.0449, 2.2097, 3.0792], + device='cuda:2'), covar=tensor([0.0428, 0.2298, 0.2391, 0.1593, 0.0696, 0.2317, 0.1154, 0.0334], + device='cuda:2'), in_proj_covar=tensor([0.0342, 0.0326, 0.0343, 0.0313, 0.0342, 0.0326, 0.0322, 0.0345], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:43:43,942 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 16:43:47,385 INFO [train.py:903] (2/4) Epoch 10, batch 2050, loss[loss=0.214, simple_loss=0.2974, pruned_loss=0.06528, over 19646.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3138, pruned_loss=0.08559, over 3823467.67 frames. ], batch size: 53, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:43:53,594 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0159, 1.6878, 1.5970, 2.0552, 1.8507, 1.7819, 1.6337, 1.8854], + device='cuda:2'), covar=tensor([0.0852, 0.1512, 0.1464, 0.0994, 0.1214, 0.0488, 0.1182, 0.0682], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0349, 0.0285, 0.0235, 0.0292, 0.0242, 0.0272, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 16:43:53,634 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:03,928 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 16:44:05,940 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 16:44:26,418 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:28,404 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 16:44:50,577 INFO [train.py:903] (2/4) Epoch 10, batch 2100, loss[loss=0.2673, simple_loss=0.3326, pruned_loss=0.101, over 19671.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3135, pruned_loss=0.08477, over 3837863.70 frames. ], batch size: 53, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:45:06,402 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.645e+02 7.348e+02 9.688e+02 2.351e+03, threshold=1.470e+03, percent-clipped=4.0 +2023-04-01 16:45:10,684 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2049, 1.3257, 1.6694, 1.4908, 2.2950, 2.0310, 2.4222, 0.9555], + device='cuda:2'), covar=tensor([0.2355, 0.3898, 0.2201, 0.1785, 0.1508, 0.1943, 0.1531, 0.3706], + device='cuda:2'), in_proj_covar=tensor([0.0476, 0.0557, 0.0578, 0.0424, 0.0584, 0.0475, 0.0637, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 16:45:26,364 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 16:45:32,510 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:37,362 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:46,582 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 16:45:54,833 INFO [train.py:903] (2/4) Epoch 10, batch 2150, loss[loss=0.1976, simple_loss=0.2893, pruned_loss=0.05295, over 19784.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3133, pruned_loss=0.08461, over 3825349.74 frames. ], batch size: 56, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:46:00,149 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63604.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:46:58,771 INFO [train.py:903] (2/4) Epoch 10, batch 2200, loss[loss=0.1878, simple_loss=0.2617, pruned_loss=0.05693, over 19322.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3128, pruned_loss=0.08441, over 3824443.45 frames. ], batch size: 44, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:47:13,918 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.200e+02 5.616e+02 6.875e+02 8.680e+02 1.983e+03, threshold=1.375e+03, percent-clipped=4.0 +2023-04-01 16:48:00,198 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:48:03,390 INFO [train.py:903] (2/4) Epoch 10, batch 2250, loss[loss=0.228, simple_loss=0.3111, pruned_loss=0.07246, over 19684.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.313, pruned_loss=0.08427, over 3829079.59 frames. ], batch size: 60, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:49:04,913 INFO [train.py:903] (2/4) Epoch 10, batch 2300, loss[loss=0.1908, simple_loss=0.265, pruned_loss=0.05828, over 19388.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3125, pruned_loss=0.08449, over 3831389.57 frames. ], batch size: 48, lr: 8.42e-03, grad_scale: 4.0 +2023-04-01 16:49:19,549 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 16:49:23,046 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.837e+02 6.965e+02 8.642e+02 2.205e+03, threshold=1.393e+03, percent-clipped=3.0 +2023-04-01 16:49:28,478 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.14 vs. limit=2.0 +2023-04-01 16:49:45,523 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63782.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:49:47,896 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:09,266 INFO [train.py:903] (2/4) Epoch 10, batch 2350, loss[loss=0.2604, simple_loss=0.3412, pruned_loss=0.0898, over 19607.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3115, pruned_loss=0.08422, over 3829782.66 frames. ], batch size: 57, lr: 8.41e-03, grad_scale: 4.0 +2023-04-01 16:50:44,334 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:50,882 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 16:51:10,045 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 16:51:13,618 INFO [train.py:903] (2/4) Epoch 10, batch 2400, loss[loss=0.2288, simple_loss=0.3068, pruned_loss=0.07538, over 19667.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3115, pruned_loss=0.08416, over 3835451.24 frames. ], batch size: 55, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:51:29,400 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:51:30,159 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.763e+02 5.915e+02 7.022e+02 9.244e+02 2.907e+03, threshold=1.404e+03, percent-clipped=10.0 +2023-04-01 16:52:12,817 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:15,944 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:17,971 INFO [train.py:903] (2/4) Epoch 10, batch 2450, loss[loss=0.2505, simple_loss=0.3137, pruned_loss=0.09364, over 19678.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3095, pruned_loss=0.08281, over 3839550.23 frames. ], batch size: 53, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:52:55,569 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:59,286 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:16,594 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:19,963 INFO [train.py:903] (2/4) Epoch 10, batch 2500, loss[loss=0.1618, simple_loss=0.2422, pruned_loss=0.0407, over 19374.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3102, pruned_loss=0.08283, over 3837848.21 frames. ], batch size: 47, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:53:24,600 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:35,919 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.566e+02 5.261e+02 6.826e+02 9.233e+02 1.687e+03, threshold=1.365e+03, percent-clipped=6.0 +2023-04-01 16:53:48,735 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-04-01 16:53:56,267 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:54:23,070 INFO [train.py:903] (2/4) Epoch 10, batch 2550, loss[loss=0.2849, simple_loss=0.3368, pruned_loss=0.1165, over 19786.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3112, pruned_loss=0.08334, over 3827936.46 frames. ], batch size: 56, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:54:37,665 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0251, 2.0822, 2.1940, 2.7036, 1.8939, 2.5495, 2.3950, 1.9823], + device='cuda:2'), covar=tensor([0.3239, 0.2746, 0.1411, 0.1727, 0.3066, 0.1347, 0.3096, 0.2497], + device='cuda:2'), in_proj_covar=tensor([0.0768, 0.0777, 0.0635, 0.0875, 0.0759, 0.0678, 0.0777, 0.0691], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 16:55:13,497 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:15,688 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 16:55:20,832 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:26,449 INFO [train.py:903] (2/4) Epoch 10, batch 2600, loss[loss=0.2131, simple_loss=0.2962, pruned_loss=0.06504, over 19659.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3112, pruned_loss=0.08327, over 3829235.19 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:55:41,485 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:42,248 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.625e+02 5.188e+02 6.411e+02 7.864e+02 1.888e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 16:56:27,598 INFO [train.py:903] (2/4) Epoch 10, batch 2650, loss[loss=0.2364, simple_loss=0.3075, pruned_loss=0.08263, over 19630.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3132, pruned_loss=0.0849, over 3837441.57 frames. ], batch size: 50, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:56:34,117 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 16:56:43,807 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 16:56:45,315 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 16:57:21,034 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-01 16:57:29,287 INFO [train.py:903] (2/4) Epoch 10, batch 2700, loss[loss=0.1925, simple_loss=0.2709, pruned_loss=0.05702, over 19485.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.311, pruned_loss=0.08365, over 3846943.71 frames. ], batch size: 49, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:57:32,890 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64153.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:35,361 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:45,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.891e+02 6.747e+02 8.779e+02 3.257e+03, threshold=1.349e+03, percent-clipped=11.0 +2023-04-01 16:57:56,913 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:04,119 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64178.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:07,405 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:33,082 INFO [train.py:903] (2/4) Epoch 10, batch 2750, loss[loss=0.2432, simple_loss=0.3119, pruned_loss=0.08723, over 19655.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3128, pruned_loss=0.08466, over 3839335.88 frames. ], batch size: 58, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:58:41,444 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,127 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,901 INFO [train.py:903] (2/4) Epoch 10, batch 2800, loss[loss=0.2137, simple_loss=0.2873, pruned_loss=0.07005, over 19478.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3127, pruned_loss=0.08462, over 3839237.63 frames. ], batch size: 49, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 16:59:52,862 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.243e+02 6.695e+02 7.923e+02 2.040e+03, threshold=1.339e+03, percent-clipped=2.0 +2023-04-01 17:00:10,664 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:21,824 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:40,325 INFO [train.py:903] (2/4) Epoch 10, batch 2850, loss[loss=0.2128, simple_loss=0.2881, pruned_loss=0.06879, over 19661.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3125, pruned_loss=0.08455, over 3832011.50 frames. ], batch size: 53, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:00:41,943 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:50,231 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9277, 1.5911, 1.4591, 2.2156, 1.8029, 2.2195, 2.2601, 2.0656], + device='cuda:2'), covar=tensor([0.0691, 0.0877, 0.0981, 0.0777, 0.0792, 0.0605, 0.0726, 0.0593], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0223, 0.0223, 0.0251, 0.0236, 0.0211, 0.0199, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 17:01:03,210 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:06,420 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:12,450 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:35,577 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:39,735 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 17:01:43,204 INFO [train.py:903] (2/4) Epoch 10, batch 2900, loss[loss=0.2536, simple_loss=0.3229, pruned_loss=0.09214, over 19080.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3137, pruned_loss=0.08543, over 3826871.93 frames. ], batch size: 69, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:01:58,189 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:58,975 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 6.015e+02 7.349e+02 1.031e+03 2.008e+03, threshold=1.470e+03, percent-clipped=12.0 +2023-04-01 17:02:14,477 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2318, 2.3059, 2.3693, 2.9430, 2.3371, 2.8323, 2.6661, 2.2168], + device='cuda:2'), covar=tensor([0.2583, 0.2051, 0.1089, 0.1369, 0.2322, 0.1028, 0.2090, 0.1794], + device='cuda:2'), in_proj_covar=tensor([0.0765, 0.0776, 0.0634, 0.0872, 0.0761, 0.0683, 0.0777, 0.0690], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 17:02:26,637 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:36,433 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64393.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:46,691 INFO [train.py:903] (2/4) Epoch 10, batch 2950, loss[loss=0.2788, simple_loss=0.3574, pruned_loss=0.1001, over 19688.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3137, pruned_loss=0.08567, over 3822098.56 frames. ], batch size: 59, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:44,310 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 17:03:48,250 INFO [train.py:903] (2/4) Epoch 10, batch 3000, loss[loss=0.2169, simple_loss=0.2823, pruned_loss=0.07569, over 18186.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3126, pruned_loss=0.08454, over 3831169.51 frames. ], batch size: 40, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:48,250 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 17:04:00,871 INFO [train.py:937] (2/4) Epoch 10, validation: loss=0.1811, simple_loss=0.2816, pruned_loss=0.04036, over 944034.00 frames. +2023-04-01 17:04:00,872 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 17:04:02,406 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9582, 1.3092, 1.0505, 0.9047, 1.2172, 0.8148, 0.8633, 1.1089], + device='cuda:2'), covar=tensor([0.0514, 0.0548, 0.0615, 0.0472, 0.0335, 0.0796, 0.0439, 0.0359], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0294, 0.0321, 0.0239, 0.0231, 0.0319, 0.0286, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:04:04,355 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 17:04:13,261 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 17:04:18,119 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 5.533e+02 6.661e+02 8.174e+02 1.809e+03, threshold=1.332e+03, percent-clipped=2.0 +2023-04-01 17:04:41,335 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64483.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:03,229 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:04,026 INFO [train.py:903] (2/4) Epoch 10, batch 3050, loss[loss=0.2357, simple_loss=0.3125, pruned_loss=0.07943, over 19538.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3132, pruned_loss=0.08465, over 3816983.41 frames. ], batch size: 56, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:05:48,559 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9121, 0.8535, 0.8529, 1.0294, 0.8914, 1.0193, 1.0221, 0.9207], + device='cuda:2'), covar=tensor([0.0679, 0.0781, 0.0824, 0.0545, 0.0675, 0.0603, 0.0665, 0.0622], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0253, 0.0239, 0.0216, 0.0202, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 17:05:57,704 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6663, 1.6637, 1.4013, 1.8596, 1.9500, 1.4331, 1.4581, 1.7290], + device='cuda:2'), covar=tensor([0.1079, 0.1638, 0.1676, 0.1124, 0.1239, 0.0917, 0.1431, 0.0861], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0347, 0.0284, 0.0235, 0.0292, 0.0242, 0.0271, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:05:57,730 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:06,595 INFO [train.py:903] (2/4) Epoch 10, batch 3100, loss[loss=0.2073, simple_loss=0.2786, pruned_loss=0.06798, over 15088.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3137, pruned_loss=0.08532, over 3793324.56 frames. ], batch size: 33, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:06:22,864 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.961e+02 7.216e+02 8.690e+02 2.208e+03, threshold=1.443e+03, percent-clipped=3.0 +2023-04-01 17:06:27,966 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:40,885 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64578.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:01,273 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:10,484 INFO [train.py:903] (2/4) Epoch 10, batch 3150, loss[loss=0.2762, simple_loss=0.3387, pruned_loss=0.1069, over 13678.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3137, pruned_loss=0.08531, over 3798086.82 frames. ], batch size: 136, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:07:13,015 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:17,527 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:21,725 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5701, 4.0811, 4.2215, 4.2113, 1.4529, 3.9374, 3.4600, 3.8773], + device='cuda:2'), covar=tensor([0.1306, 0.0702, 0.0555, 0.0547, 0.5001, 0.0660, 0.0598, 0.1051], + device='cuda:2'), in_proj_covar=tensor([0.0645, 0.0581, 0.0768, 0.0645, 0.0713, 0.0518, 0.0476, 0.0712], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 17:07:31,636 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 17:07:39,762 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 17:08:10,650 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:12,600 INFO [train.py:903] (2/4) Epoch 10, batch 3200, loss[loss=0.2057, simple_loss=0.2807, pruned_loss=0.0654, over 19719.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3121, pruned_loss=0.08437, over 3811228.42 frames. ], batch size: 51, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:08:30,175 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.483e+02 6.754e+02 8.204e+02 1.644e+03, threshold=1.351e+03, percent-clipped=2.0 +2023-04-01 17:08:32,807 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:43,807 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:16,032 INFO [train.py:903] (2/4) Epoch 10, batch 3250, loss[loss=0.2145, simple_loss=0.2891, pruned_loss=0.06995, over 19398.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.311, pruned_loss=0.08361, over 3816102.33 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:09:24,059 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64707.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:26,685 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:42,843 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:20,329 INFO [train.py:903] (2/4) Epoch 10, batch 3300, loss[loss=0.223, simple_loss=0.3053, pruned_loss=0.07035, over 19272.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3102, pruned_loss=0.0831, over 3819235.92 frames. ], batch size: 66, lr: 8.35e-03, grad_scale: 8.0 +2023-04-01 17:10:26,820 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:28,776 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 17:10:35,537 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:37,429 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.384e+02 6.605e+02 8.281e+02 2.311e+03, threshold=1.321e+03, percent-clipped=9.0 +2023-04-01 17:10:43,924 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:57,743 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:23,425 INFO [train.py:903] (2/4) Epoch 10, batch 3350, loss[loss=0.2366, simple_loss=0.3007, pruned_loss=0.08625, over 19730.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3108, pruned_loss=0.08362, over 3797876.40 frames. ], batch size: 51, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:11:49,568 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:55,099 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:12:04,644 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5984, 1.4375, 1.3845, 1.8876, 1.5636, 1.8831, 1.8577, 1.7787], + device='cuda:2'), covar=tensor([0.0703, 0.0838, 0.0900, 0.0703, 0.0789, 0.0660, 0.0756, 0.0567], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0227, 0.0225, 0.0253, 0.0239, 0.0215, 0.0201, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 17:12:24,018 INFO [train.py:903] (2/4) Epoch 10, batch 3400, loss[loss=0.2705, simple_loss=0.3406, pruned_loss=0.1002, over 19539.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3131, pruned_loss=0.0852, over 3794608.63 frames. ], batch size: 56, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:12:42,252 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.893e+02 5.724e+02 7.342e+02 8.665e+02 1.913e+03, threshold=1.468e+03, percent-clipped=6.0 +2023-04-01 17:13:27,830 INFO [train.py:903] (2/4) Epoch 10, batch 3450, loss[loss=0.2481, simple_loss=0.3241, pruned_loss=0.08605, over 19500.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3122, pruned_loss=0.08452, over 3794727.66 frames. ], batch size: 64, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:13:35,095 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 17:13:36,889 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 17:14:20,582 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:30,656 INFO [train.py:903] (2/4) Epoch 10, batch 3500, loss[loss=0.2473, simple_loss=0.3205, pruned_loss=0.08698, over 19715.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3127, pruned_loss=0.08467, over 3807110.49 frames. ], batch size: 51, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:14:31,775 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:48,656 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 5.616e+02 6.822e+02 8.996e+02 1.764e+03, threshold=1.364e+03, percent-clipped=1.0 +2023-04-01 17:14:49,107 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:50,319 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3871, 1.5845, 1.9384, 1.6336, 2.6095, 2.2014, 2.9644, 1.1461], + device='cuda:2'), covar=tensor([0.1927, 0.3304, 0.2054, 0.1545, 0.1389, 0.1798, 0.1362, 0.3367], + device='cuda:2'), in_proj_covar=tensor([0.0485, 0.0569, 0.0588, 0.0430, 0.0589, 0.0486, 0.0648, 0.0486], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 17:15:05,375 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.39 vs. limit=5.0 +2023-04-01 17:15:19,919 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:25,283 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:34,054 INFO [train.py:903] (2/4) Epoch 10, batch 3550, loss[loss=0.2535, simple_loss=0.3184, pruned_loss=0.09429, over 19762.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3127, pruned_loss=0.0844, over 3804728.29 frames. ], batch size: 54, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:15:44,374 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:34,669 INFO [train.py:903] (2/4) Epoch 10, batch 3600, loss[loss=0.2993, simple_loss=0.3576, pruned_loss=0.1205, over 14206.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3134, pruned_loss=0.08504, over 3796176.98 frames. ], batch size: 135, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:16:52,002 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.791e+02 6.813e+02 8.568e+02 1.743e+03, threshold=1.363e+03, percent-clipped=4.0 +2023-04-01 17:16:52,162 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:53,550 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:02,288 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:08,152 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:35,955 INFO [train.py:903] (2/4) Epoch 10, batch 3650, loss[loss=0.2644, simple_loss=0.3317, pruned_loss=0.09853, over 19582.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3148, pruned_loss=0.08608, over 3794676.93 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:17:38,638 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:42,758 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:51,946 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:05,510 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:36,262 INFO [train.py:903] (2/4) Epoch 10, batch 3700, loss[loss=0.24, simple_loss=0.3043, pruned_loss=0.08785, over 19474.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3139, pruned_loss=0.08578, over 3800058.55 frames. ], batch size: 49, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:18:53,974 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 5.629e+02 7.088e+02 8.754e+02 1.818e+03, threshold=1.418e+03, percent-clipped=5.0 +2023-04-01 17:19:12,232 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:33,227 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65198.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:37,039 INFO [train.py:903] (2/4) Epoch 10, batch 3750, loss[loss=0.2371, simple_loss=0.3187, pruned_loss=0.07775, over 19659.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.315, pruned_loss=0.08646, over 3802430.46 frames. ], batch size: 58, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:02,488 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:03,653 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:10,502 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:36,852 INFO [train.py:903] (2/4) Epoch 10, batch 3800, loss[loss=0.2418, simple_loss=0.3045, pruned_loss=0.0895, over 19471.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.314, pruned_loss=0.08617, over 3803022.74 frames. ], batch size: 49, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:54,003 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 5.620e+02 6.620e+02 8.005e+02 1.692e+03, threshold=1.324e+03, percent-clipped=4.0 +2023-04-01 17:21:06,344 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 17:21:30,857 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8563, 3.3315, 1.8417, 1.5961, 3.1981, 1.2856, 1.0707, 2.0851], + device='cuda:2'), covar=tensor([0.1096, 0.0459, 0.0931, 0.0933, 0.0426, 0.1235, 0.0998, 0.0542], + device='cuda:2'), in_proj_covar=tensor([0.0288, 0.0298, 0.0323, 0.0245, 0.0234, 0.0319, 0.0288, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:21:37,759 INFO [train.py:903] (2/4) Epoch 10, batch 3850, loss[loss=0.2865, simple_loss=0.3556, pruned_loss=0.1087, over 19467.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3139, pruned_loss=0.08558, over 3808364.09 frames. ], batch size: 64, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:21:40,430 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:03,652 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:13,880 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2368, 1.9994, 1.5646, 1.3193, 1.8998, 1.1549, 1.1756, 1.7494], + device='cuda:2'), covar=tensor([0.0785, 0.0630, 0.0949, 0.0660, 0.0446, 0.1118, 0.0604, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0288, 0.0298, 0.0324, 0.0244, 0.0235, 0.0319, 0.0289, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:22:21,626 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:33,946 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:37,979 INFO [train.py:903] (2/4) Epoch 10, batch 3900, loss[loss=0.37, simple_loss=0.3974, pruned_loss=0.1713, over 13331.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3121, pruned_loss=0.08479, over 3794701.06 frames. ], batch size: 135, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:22:55,831 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 5.653e+02 7.180e+02 9.093e+02 1.633e+03, threshold=1.436e+03, percent-clipped=2.0 +2023-04-01 17:23:15,725 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65381.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:40,379 INFO [train.py:903] (2/4) Epoch 10, batch 3950, loss[loss=0.2379, simple_loss=0.3132, pruned_loss=0.08128, over 19658.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3126, pruned_loss=0.08474, over 3788482.30 frames. ], batch size: 60, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:23:40,402 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 17:23:46,493 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:46,521 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3542, 2.2355, 1.9164, 1.7256, 1.4934, 1.8483, 0.4478, 1.2979], + device='cuda:2'), covar=tensor([0.0397, 0.0368, 0.0314, 0.0564, 0.0812, 0.0527, 0.0874, 0.0694], + device='cuda:2'), in_proj_covar=tensor([0.0329, 0.0328, 0.0325, 0.0346, 0.0419, 0.0344, 0.0305, 0.0321], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 17:23:58,947 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:23,271 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:41,315 INFO [train.py:903] (2/4) Epoch 10, batch 4000, loss[loss=0.2534, simple_loss=0.3243, pruned_loss=0.09123, over 19667.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3116, pruned_loss=0.08398, over 3785022.66 frames. ], batch size: 58, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:24:44,063 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65453.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:48,569 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3903, 1.1963, 1.2677, 1.6928, 1.3607, 1.5935, 1.7471, 1.4646], + device='cuda:2'), covar=tensor([0.0899, 0.1005, 0.1105, 0.0805, 0.0873, 0.0787, 0.0791, 0.0738], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0227, 0.0225, 0.0251, 0.0241, 0.0215, 0.0201, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 17:24:54,631 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65461.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:58,705 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.016e+02 5.448e+02 7.265e+02 9.508e+02 1.942e+03, threshold=1.453e+03, percent-clipped=2.0 +2023-04-01 17:25:01,419 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 17:25:14,534 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:23,066 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 17:25:23,382 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:41,795 INFO [train.py:903] (2/4) Epoch 10, batch 4050, loss[loss=0.3073, simple_loss=0.3619, pruned_loss=0.1263, over 17993.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3119, pruned_loss=0.08442, over 3786865.44 frames. ], batch size: 83, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:25:45,338 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:53,316 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:11,621 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 17:26:19,252 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:22,554 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5409, 1.2265, 1.1807, 1.4296, 1.1705, 1.3216, 1.1507, 1.3728], + device='cuda:2'), covar=tensor([0.0925, 0.1120, 0.1422, 0.0874, 0.1075, 0.0577, 0.1233, 0.0751], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0347, 0.0288, 0.0237, 0.0295, 0.0242, 0.0273, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:26:40,673 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65550.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:26:41,425 INFO [train.py:903] (2/4) Epoch 10, batch 4100, loss[loss=0.2951, simple_loss=0.3671, pruned_loss=0.1115, over 18798.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3134, pruned_loss=0.08528, over 3797533.83 frames. ], batch size: 74, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:26:47,365 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9829, 1.9336, 1.7446, 1.5498, 1.3886, 1.5764, 0.6296, 0.9988], + device='cuda:2'), covar=tensor([0.0434, 0.0415, 0.0252, 0.0421, 0.0901, 0.0493, 0.0733, 0.0635], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0331, 0.0330, 0.0349, 0.0423, 0.0348, 0.0307, 0.0322], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 17:26:59,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.925e+02 7.032e+02 8.463e+02 2.911e+03, threshold=1.406e+03, percent-clipped=6.0 +2023-04-01 17:27:03,494 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 17:27:11,835 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 17:27:33,863 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3426, 3.9648, 2.4953, 3.5059, 0.8726, 3.6634, 3.7028, 3.9400], + device='cuda:2'), covar=tensor([0.0680, 0.1033, 0.1957, 0.0747, 0.3875, 0.0719, 0.0801, 0.0922], + device='cuda:2'), in_proj_covar=tensor([0.0424, 0.0364, 0.0425, 0.0317, 0.0376, 0.0357, 0.0351, 0.0382], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 17:27:42,544 INFO [train.py:903] (2/4) Epoch 10, batch 4150, loss[loss=0.2448, simple_loss=0.3181, pruned_loss=0.08573, over 19672.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3135, pruned_loss=0.0851, over 3816342.20 frames. ], batch size: 58, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:17,145 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3196, 1.4211, 1.8166, 1.5944, 2.7102, 2.1925, 2.9571, 1.0944], + device='cuda:2'), covar=tensor([0.2045, 0.3480, 0.2089, 0.1611, 0.1325, 0.1769, 0.1326, 0.3423], + device='cuda:2'), in_proj_covar=tensor([0.0481, 0.0563, 0.0584, 0.0428, 0.0585, 0.0481, 0.0646, 0.0482], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 17:28:39,194 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:28:43,357 INFO [train.py:903] (2/4) Epoch 10, batch 4200, loss[loss=0.2285, simple_loss=0.2939, pruned_loss=0.08157, over 19682.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3128, pruned_loss=0.08428, over 3824840.91 frames. ], batch size: 53, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:43,400 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 17:28:59,414 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.075e+02 5.851e+02 6.725e+02 9.221e+02 2.199e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-01 17:29:42,454 INFO [train.py:903] (2/4) Epoch 10, batch 4250, loss[loss=0.2309, simple_loss=0.2959, pruned_loss=0.08292, over 19474.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3132, pruned_loss=0.08507, over 3815613.26 frames. ], batch size: 49, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:29:54,078 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:29:55,751 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 17:30:06,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 17:30:22,807 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:30:43,328 INFO [train.py:903] (2/4) Epoch 10, batch 4300, loss[loss=0.1897, simple_loss=0.2702, pruned_loss=0.05459, over 19730.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3126, pruned_loss=0.08478, over 3822625.49 frames. ], batch size: 51, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:30:56,185 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:00,065 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 5.358e+02 7.223e+02 8.854e+02 2.636e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 17:31:28,027 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:35,316 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 17:31:43,143 INFO [train.py:903] (2/4) Epoch 10, batch 4350, loss[loss=0.2303, simple_loss=0.3079, pruned_loss=0.07633, over 19839.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3115, pruned_loss=0.0841, over 3830567.48 frames. ], batch size: 52, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:31:58,659 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:32:03,765 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 17:32:44,687 INFO [train.py:903] (2/4) Epoch 10, batch 4400, loss[loss=0.2439, simple_loss=0.2971, pruned_loss=0.09533, over 19735.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3115, pruned_loss=0.08401, over 3829656.82 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:32:50,639 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65856.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:33:00,115 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.748e+02 7.554e+02 9.760e+02 1.805e+03, threshold=1.511e+03, percent-clipped=4.0 +2023-04-01 17:33:10,945 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 17:33:19,849 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 17:33:36,098 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65894.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:33:44,058 INFO [train.py:903] (2/4) Epoch 10, batch 4450, loss[loss=0.1888, simple_loss=0.2608, pruned_loss=0.05834, over 19400.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3107, pruned_loss=0.08392, over 3811969.37 frames. ], batch size: 48, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:34:12,368 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-01 17:34:34,015 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1009, 1.3648, 1.4172, 1.3813, 2.7218, 0.9200, 2.1134, 2.9291], + device='cuda:2'), covar=tensor([0.0476, 0.2270, 0.2476, 0.1567, 0.0704, 0.2317, 0.1037, 0.0363], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0334, 0.0353, 0.0320, 0.0344, 0.0332, 0.0325, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:34:45,020 INFO [train.py:903] (2/4) Epoch 10, batch 4500, loss[loss=0.2294, simple_loss=0.308, pruned_loss=0.07538, over 19636.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3107, pruned_loss=0.08386, over 3809218.09 frames. ], batch size: 57, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:35:01,306 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.376e+02 6.626e+02 8.325e+02 1.832e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-04-01 17:35:47,224 INFO [train.py:903] (2/4) Epoch 10, batch 4550, loss[loss=0.2154, simple_loss=0.282, pruned_loss=0.0744, over 19768.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3103, pruned_loss=0.0839, over 3814019.63 frames. ], batch size: 47, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:35:56,040 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 17:35:56,375 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66009.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:36:02,839 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3704, 1.4776, 1.6903, 1.5630, 2.5319, 2.2663, 2.7333, 0.9594], + device='cuda:2'), covar=tensor([0.1944, 0.3378, 0.2042, 0.1541, 0.1155, 0.1641, 0.1144, 0.3252], + device='cuda:2'), in_proj_covar=tensor([0.0479, 0.0558, 0.0582, 0.0428, 0.0584, 0.0483, 0.0641, 0.0481], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 17:36:07,038 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:17,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 17:36:36,252 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:45,871 INFO [train.py:903] (2/4) Epoch 10, batch 4600, loss[loss=0.2545, simple_loss=0.3295, pruned_loss=0.08975, over 19657.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3121, pruned_loss=0.08463, over 3806257.91 frames. ], batch size: 59, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:37:00,875 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 5.689e+02 6.858e+02 9.462e+02 1.667e+03, threshold=1.372e+03, percent-clipped=8.0 +2023-04-01 17:37:43,790 INFO [train.py:903] (2/4) Epoch 10, batch 4650, loss[loss=0.2098, simple_loss=0.2823, pruned_loss=0.06865, over 19402.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3117, pruned_loss=0.08433, over 3810275.85 frames. ], batch size: 48, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:38:00,804 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 17:38:10,791 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 17:38:44,399 INFO [train.py:903] (2/4) Epoch 10, batch 4700, loss[loss=0.2193, simple_loss=0.2862, pruned_loss=0.07622, over 19752.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3131, pruned_loss=0.08516, over 3807080.17 frames. ], batch size: 48, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:38:57,797 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:39:00,971 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.007e+02 6.063e+02 7.892e+02 1.039e+03 2.104e+03, threshold=1.578e+03, percent-clipped=6.0 +2023-04-01 17:39:05,609 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 17:39:17,516 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66178.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:39:17,930 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-01 17:39:24,694 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 17:39:43,294 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66200.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:39:44,101 INFO [train.py:903] (2/4) Epoch 10, batch 4750, loss[loss=0.2434, simple_loss=0.3098, pruned_loss=0.0885, over 19543.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3131, pruned_loss=0.08507, over 3821538.29 frames. ], batch size: 54, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:40:33,544 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9593, 1.1483, 1.4151, 0.5841, 2.1087, 2.4445, 2.1753, 2.5735], + device='cuda:2'), covar=tensor([0.1482, 0.3360, 0.2947, 0.2283, 0.0461, 0.0249, 0.0320, 0.0271], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0293, 0.0316, 0.0248, 0.0211, 0.0151, 0.0203, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 17:40:43,464 INFO [train.py:903] (2/4) Epoch 10, batch 4800, loss[loss=0.2499, simple_loss=0.3244, pruned_loss=0.08773, over 19644.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3136, pruned_loss=0.08527, over 3834825.82 frames. ], batch size: 55, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:41:01,208 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.914e+02 7.080e+02 8.206e+02 1.527e+03, threshold=1.416e+03, percent-clipped=0.0 +2023-04-01 17:41:01,683 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66265.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:41:15,537 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:41:31,335 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66290.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:41:44,769 INFO [train.py:903] (2/4) Epoch 10, batch 4850, loss[loss=0.2307, simple_loss=0.2858, pruned_loss=0.08779, over 19283.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3119, pruned_loss=0.0843, over 3834927.10 frames. ], batch size: 44, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:41:58,682 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-01 17:42:02,678 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66315.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:42:10,364 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 17:42:30,017 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 17:42:35,769 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 17:42:35,794 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 17:42:45,282 INFO [train.py:903] (2/4) Epoch 10, batch 4900, loss[loss=0.2252, simple_loss=0.2914, pruned_loss=0.07946, over 19747.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3119, pruned_loss=0.08411, over 3832994.53 frames. ], batch size: 46, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:42:45,296 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 17:43:01,822 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.269e+02 9.008e+02 2.184e+03, threshold=1.454e+03, percent-clipped=11.0 +2023-04-01 17:43:03,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 17:43:14,253 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0784, 1.9704, 1.7419, 1.5727, 1.4809, 1.6224, 0.4402, 0.8466], + device='cuda:2'), covar=tensor([0.0332, 0.0396, 0.0275, 0.0397, 0.0849, 0.0480, 0.0719, 0.0669], + device='cuda:2'), in_proj_covar=tensor([0.0328, 0.0326, 0.0325, 0.0345, 0.0415, 0.0345, 0.0301, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 17:43:33,679 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-01 17:43:45,940 INFO [train.py:903] (2/4) Epoch 10, batch 4950, loss[loss=0.2758, simple_loss=0.3423, pruned_loss=0.1047, over 17595.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3099, pruned_loss=0.08303, over 3840907.66 frames. ], batch size: 101, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:02,380 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 17:44:16,766 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2520, 3.8354, 2.6348, 3.4914, 1.0515, 3.5088, 3.5505, 3.6667], + device='cuda:2'), covar=tensor([0.0850, 0.1206, 0.1966, 0.0803, 0.4001, 0.0932, 0.0827, 0.1110], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0360, 0.0425, 0.0316, 0.0375, 0.0354, 0.0349, 0.0383], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 17:44:19,201 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2997, 2.9842, 1.9690, 2.1952, 2.0827, 2.5053, 0.8868, 2.1484], + device='cuda:2'), covar=tensor([0.0520, 0.0411, 0.0547, 0.0781, 0.0765, 0.0762, 0.0945, 0.0761], + device='cuda:2'), in_proj_covar=tensor([0.0328, 0.0323, 0.0324, 0.0344, 0.0412, 0.0343, 0.0300, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 17:44:26,192 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 17:44:44,953 INFO [train.py:903] (2/4) Epoch 10, batch 5000, loss[loss=0.261, simple_loss=0.3126, pruned_loss=0.1048, over 19470.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3117, pruned_loss=0.08448, over 3831076.67 frames. ], batch size: 49, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:56,407 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 17:45:01,936 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.419e+02 6.464e+02 8.305e+02 1.628e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 17:45:02,455 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2396, 1.3084, 1.4135, 1.4065, 1.7322, 1.7941, 1.6991, 0.4898], + device='cuda:2'), covar=tensor([0.2121, 0.3554, 0.2217, 0.1600, 0.1379, 0.1938, 0.1256, 0.3712], + device='cuda:2'), in_proj_covar=tensor([0.0479, 0.0566, 0.0585, 0.0430, 0.0586, 0.0487, 0.0643, 0.0484], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 17:45:06,372 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 17:45:45,184 INFO [train.py:903] (2/4) Epoch 10, batch 5050, loss[loss=0.208, simple_loss=0.2824, pruned_loss=0.06684, over 17846.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3113, pruned_loss=0.08429, over 3822013.78 frames. ], batch size: 39, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:45:51,095 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:46:10,874 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66522.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:46:16,368 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3898, 2.3083, 1.7234, 1.5782, 2.1364, 1.2470, 1.1918, 1.8389], + device='cuda:2'), covar=tensor([0.0919, 0.0616, 0.0925, 0.0620, 0.0446, 0.1090, 0.0725, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0298, 0.0325, 0.0243, 0.0234, 0.0319, 0.0289, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:46:19,329 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 17:46:31,389 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66539.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:46:44,532 INFO [train.py:903] (2/4) Epoch 10, batch 5100, loss[loss=0.2514, simple_loss=0.3254, pruned_loss=0.08873, over 19628.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3121, pruned_loss=0.08456, over 3823224.46 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:46:56,036 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 17:46:57,228 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 17:46:58,844 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8481, 1.9166, 2.0379, 2.7383, 1.7838, 2.5182, 2.3972, 2.0399], + device='cuda:2'), covar=tensor([0.3391, 0.2877, 0.1440, 0.1490, 0.3038, 0.1305, 0.3237, 0.2538], + device='cuda:2'), in_proj_covar=tensor([0.0774, 0.0785, 0.0640, 0.0885, 0.0764, 0.0690, 0.0774, 0.0701], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 17:46:59,527 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 17:47:01,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.864e+02 7.142e+02 9.030e+02 2.803e+03, threshold=1.428e+03, percent-clipped=6.0 +2023-04-01 17:47:04,001 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 17:47:08,998 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66571.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:47:39,621 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66596.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:47:45,736 INFO [train.py:903] (2/4) Epoch 10, batch 5150, loss[loss=0.2724, simple_loss=0.3424, pruned_loss=0.1012, over 19599.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3133, pruned_loss=0.08529, over 3802709.96 frames. ], batch size: 61, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:47:56,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 17:48:09,057 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:10,481 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:29,330 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66637.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:48:33,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 17:48:45,644 INFO [train.py:903] (2/4) Epoch 10, batch 5200, loss[loss=0.2462, simple_loss=0.32, pruned_loss=0.08623, over 19361.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3127, pruned_loss=0.08463, over 3797677.97 frames. ], batch size: 70, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:49:00,689 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 17:49:02,885 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 5.688e+02 7.032e+02 8.430e+02 1.656e+03, threshold=1.406e+03, percent-clipped=2.0 +2023-04-01 17:49:44,616 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 17:49:46,891 INFO [train.py:903] (2/4) Epoch 10, batch 5250, loss[loss=0.199, simple_loss=0.2711, pruned_loss=0.06345, over 19745.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3118, pruned_loss=0.08419, over 3807987.22 frames. ], batch size: 45, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:50:28,125 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:50:44,996 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66749.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:50:47,100 INFO [train.py:903] (2/4) Epoch 10, batch 5300, loss[loss=0.2888, simple_loss=0.3413, pruned_loss=0.1181, over 13167.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3126, pruned_loss=0.08481, over 3806101.32 frames. ], batch size: 136, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:51:03,230 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 17:51:04,361 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.985e+02 7.858e+02 1.069e+03 1.957e+03, threshold=1.572e+03, percent-clipped=7.0 +2023-04-01 17:51:47,694 INFO [train.py:903] (2/4) Epoch 10, batch 5350, loss[loss=0.2547, simple_loss=0.3141, pruned_loss=0.09771, over 19772.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3129, pruned_loss=0.0851, over 3806103.48 frames. ], batch size: 49, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:52:18,672 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1312, 1.0383, 1.4066, 1.3390, 2.4798, 3.4961, 3.2797, 3.7544], + device='cuda:2'), covar=tensor([0.1888, 0.4643, 0.4103, 0.2226, 0.0692, 0.0263, 0.0268, 0.0217], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0290, 0.0316, 0.0246, 0.0209, 0.0151, 0.0202, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 17:52:19,370 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 17:52:46,947 INFO [train.py:903] (2/4) Epoch 10, batch 5400, loss[loss=0.1916, simple_loss=0.2648, pruned_loss=0.05924, over 19361.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3133, pruned_loss=0.08508, over 3814897.28 frames. ], batch size: 47, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:05,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.633e+02 6.871e+02 8.512e+02 1.525e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-04-01 17:53:18,110 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:53:26,402 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66883.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:53:36,319 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9771, 5.0893, 5.9120, 5.8418, 1.7240, 5.4858, 4.6486, 5.4476], + device='cuda:2'), covar=tensor([0.1293, 0.0751, 0.0449, 0.0432, 0.5701, 0.0467, 0.0563, 0.0862], + device='cuda:2'), in_proj_covar=tensor([0.0648, 0.0577, 0.0767, 0.0640, 0.0706, 0.0527, 0.0467, 0.0705], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 17:53:37,775 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66893.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:53:47,836 INFO [train.py:903] (2/4) Epoch 10, batch 5450, loss[loss=0.2099, simple_loss=0.275, pruned_loss=0.07241, over 19758.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3142, pruned_loss=0.08554, over 3811237.76 frames. ], batch size: 45, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:49,480 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:54:08,166 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66918.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:54:22,427 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7671, 1.3835, 1.3142, 1.7116, 1.4896, 1.5826, 1.4321, 1.5680], + device='cuda:2'), covar=tensor([0.0903, 0.1422, 0.1358, 0.0942, 0.1106, 0.0502, 0.1137, 0.0740], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0354, 0.0291, 0.0239, 0.0295, 0.0244, 0.0278, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:54:47,622 INFO [train.py:903] (2/4) Epoch 10, batch 5500, loss[loss=0.2324, simple_loss=0.2996, pruned_loss=0.08258, over 15146.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3146, pruned_loss=0.08607, over 3801480.54 frames. ], batch size: 33, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:06,147 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.851e+02 7.428e+02 9.674e+02 2.066e+03, threshold=1.486e+03, percent-clipped=6.0 +2023-04-01 17:55:10,684 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 17:55:16,788 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:36,726 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:44,732 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66998.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:55:48,014 INFO [train.py:903] (2/4) Epoch 10, batch 5550, loss[loss=0.2277, simple_loss=0.3045, pruned_loss=0.07547, over 19518.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3129, pruned_loss=0.08496, over 3801980.38 frames. ], batch size: 54, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:54,328 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 17:56:05,780 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:56:11,219 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 17:56:42,319 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 17:56:47,801 INFO [train.py:903] (2/4) Epoch 10, batch 5600, loss[loss=0.2157, simple_loss=0.2967, pruned_loss=0.06731, over 19534.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3125, pruned_loss=0.08461, over 3817538.29 frames. ], batch size: 54, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:06,482 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 5.918e+02 7.804e+02 1.015e+03 2.269e+03, threshold=1.561e+03, percent-clipped=7.0 +2023-04-01 17:57:38,365 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67093.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:57:47,806 INFO [train.py:903] (2/4) Epoch 10, batch 5650, loss[loss=0.2167, simple_loss=0.3011, pruned_loss=0.06622, over 19538.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3125, pruned_loss=0.08482, over 3813211.42 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:48,105 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:58:33,177 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 17:58:47,423 INFO [train.py:903] (2/4) Epoch 10, batch 5700, loss[loss=0.2959, simple_loss=0.3485, pruned_loss=0.1217, over 13275.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3132, pruned_loss=0.08538, over 3810184.86 frames. ], batch size: 136, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:05,256 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 6.145e+02 7.592e+02 1.064e+03 2.520e+03, threshold=1.518e+03, percent-clipped=7.0 +2023-04-01 17:59:32,590 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8382, 1.7553, 1.4454, 1.8552, 1.7607, 1.3692, 1.5002, 1.6700], + device='cuda:2'), covar=tensor([0.1053, 0.1578, 0.1582, 0.1051, 0.1350, 0.0784, 0.1449, 0.0898], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0350, 0.0288, 0.0237, 0.0292, 0.0241, 0.0275, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 17:59:47,165 INFO [train.py:903] (2/4) Epoch 10, batch 5750, loss[loss=0.2129, simple_loss=0.2864, pruned_loss=0.06973, over 19475.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3112, pruned_loss=0.08383, over 3816127.34 frames. ], batch size: 49, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:48,356 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 17:59:57,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67208.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:59:57,973 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 18:00:02,608 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 18:00:47,888 INFO [train.py:903] (2/4) Epoch 10, batch 5800, loss[loss=0.2647, simple_loss=0.3422, pruned_loss=0.09363, over 18714.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3112, pruned_loss=0.08369, over 3829012.75 frames. ], batch size: 74, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 18:00:51,546 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67254.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:01:06,016 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.662e+02 5.628e+02 6.923e+02 8.923e+02 2.275e+03, threshold=1.385e+03, percent-clipped=6.0 +2023-04-01 18:01:20,799 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67279.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:01:47,321 INFO [train.py:903] (2/4) Epoch 10, batch 5850, loss[loss=0.2537, simple_loss=0.3291, pruned_loss=0.08914, over 19667.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3127, pruned_loss=0.08461, over 3819167.73 frames. ], batch size: 55, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:07,175 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9458, 4.4701, 2.9022, 4.0391, 0.9857, 4.2251, 4.2870, 4.3926], + device='cuda:2'), covar=tensor([0.0462, 0.0921, 0.1669, 0.0641, 0.3871, 0.0648, 0.0668, 0.0859], + device='cuda:2'), in_proj_covar=tensor([0.0419, 0.0356, 0.0420, 0.0308, 0.0372, 0.0352, 0.0345, 0.0376], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 18:02:09,367 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:02:48,429 INFO [train.py:903] (2/4) Epoch 10, batch 5900, loss[loss=0.2216, simple_loss=0.2942, pruned_loss=0.07448, over 19593.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.312, pruned_loss=0.08419, over 3822121.51 frames. ], batch size: 50, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:52,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 18:03:05,150 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.688e+02 6.588e+02 8.588e+02 1.646e+03, threshold=1.318e+03, percent-clipped=2.0 +2023-04-01 18:03:11,565 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 18:03:47,182 INFO [train.py:903] (2/4) Epoch 10, batch 5950, loss[loss=0.2381, simple_loss=0.3116, pruned_loss=0.08234, over 19277.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3125, pruned_loss=0.08438, over 3825009.02 frames. ], batch size: 66, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:04:26,293 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:38,312 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67445.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:45,822 INFO [train.py:903] (2/4) Epoch 10, batch 6000, loss[loss=0.2045, simple_loss=0.2806, pruned_loss=0.06417, over 19605.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3128, pruned_loss=0.08478, over 3814405.96 frames. ], batch size: 50, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:04:45,823 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 18:04:58,249 INFO [train.py:937] (2/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2805, pruned_loss=0.03952, over 944034.00 frames. +2023-04-01 18:04:58,250 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 18:05:15,140 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67464.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:05:17,960 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.414e+02 6.867e+02 8.657e+02 1.897e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 18:05:44,653 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67489.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:05:59,559 INFO [train.py:903] (2/4) Epoch 10, batch 6050, loss[loss=0.1873, simple_loss=0.262, pruned_loss=0.05628, over 19421.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3127, pruned_loss=0.0848, over 3824435.02 frames. ], batch size: 48, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:06:35,098 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:06:59,165 INFO [train.py:903] (2/4) Epoch 10, batch 6100, loss[loss=0.1789, simple_loss=0.2551, pruned_loss=0.05129, over 19768.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3125, pruned_loss=0.08476, over 3813381.82 frames. ], batch size: 47, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:07:10,603 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:07:19,953 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.925e+02 6.739e+02 8.410e+02 2.337e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-01 18:07:22,757 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3100, 1.4002, 1.9499, 1.5172, 3.0084, 2.3959, 3.3362, 1.3758], + device='cuda:2'), covar=tensor([0.2192, 0.3694, 0.2227, 0.1768, 0.1523, 0.1891, 0.1630, 0.3528], + device='cuda:2'), in_proj_covar=tensor([0.0475, 0.0560, 0.0582, 0.0427, 0.0583, 0.0483, 0.0640, 0.0480], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 18:07:59,371 INFO [train.py:903] (2/4) Epoch 10, batch 6150, loss[loss=0.1837, simple_loss=0.263, pruned_loss=0.05219, over 19384.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3129, pruned_loss=0.08499, over 3812280.33 frames. ], batch size: 47, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:08:28,208 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 18:08:59,752 INFO [train.py:903] (2/4) Epoch 10, batch 6200, loss[loss=0.2361, simple_loss=0.3102, pruned_loss=0.081, over 19560.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3133, pruned_loss=0.08518, over 3813826.33 frames. ], batch size: 61, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:09:20,095 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.059e+02 6.776e+02 9.553e+02 2.024e+03, threshold=1.355e+03, percent-clipped=7.0 +2023-04-01 18:09:46,796 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:56,499 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:59,569 INFO [train.py:903] (2/4) Epoch 10, batch 6250, loss[loss=0.2109, simple_loss=0.2878, pruned_loss=0.06699, over 19732.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3121, pruned_loss=0.08461, over 3813613.48 frames. ], batch size: 51, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:10:09,930 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67710.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:15,779 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67715.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:16,840 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9770, 4.3422, 4.6524, 4.6605, 1.5645, 4.3544, 3.7748, 4.2793], + device='cuda:2'), covar=tensor([0.1182, 0.0735, 0.0545, 0.0516, 0.5346, 0.0640, 0.0652, 0.1006], + device='cuda:2'), in_proj_covar=tensor([0.0649, 0.0585, 0.0772, 0.0649, 0.0711, 0.0530, 0.0476, 0.0709], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 18:10:30,843 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 18:10:59,325 INFO [train.py:903] (2/4) Epoch 10, batch 6300, loss[loss=0.1851, simple_loss=0.2623, pruned_loss=0.05395, over 19725.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3103, pruned_loss=0.08288, over 3823905.16 frames. ], batch size: 51, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:11:19,321 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 5.371e+02 6.798e+02 8.698e+02 1.915e+03, threshold=1.360e+03, percent-clipped=7.0 +2023-04-01 18:11:41,352 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1669, 4.3223, 4.7618, 4.7567, 2.7423, 4.4553, 4.0723, 4.4970], + device='cuda:2'), covar=tensor([0.1042, 0.2038, 0.0488, 0.0487, 0.3521, 0.0701, 0.0481, 0.0813], + device='cuda:2'), in_proj_covar=tensor([0.0641, 0.0578, 0.0762, 0.0643, 0.0702, 0.0523, 0.0471, 0.0700], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 18:11:58,679 INFO [train.py:903] (2/4) Epoch 10, batch 6350, loss[loss=0.2448, simple_loss=0.3301, pruned_loss=0.07978, over 19530.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3122, pruned_loss=0.08411, over 3811503.49 frames. ], batch size: 54, lr: 8.16e-03, grad_scale: 4.0 +2023-04-01 18:12:16,887 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67816.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:47,272 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:58,586 INFO [train.py:903] (2/4) Epoch 10, batch 6400, loss[loss=0.2616, simple_loss=0.3282, pruned_loss=0.09754, over 19340.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3128, pruned_loss=0.08466, over 3811371.59 frames. ], batch size: 70, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:13:17,934 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5395, 1.0518, 1.2912, 1.2073, 2.1473, 0.9056, 1.8228, 2.3879], + device='cuda:2'), covar=tensor([0.0638, 0.2859, 0.2766, 0.1629, 0.0875, 0.2289, 0.1096, 0.0484], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0333, 0.0347, 0.0314, 0.0340, 0.0329, 0.0322, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 18:13:18,759 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.283e+02 6.601e+02 9.298e+02 1.582e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-01 18:13:27,001 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:13:59,329 INFO [train.py:903] (2/4) Epoch 10, batch 6450, loss[loss=0.2249, simple_loss=0.2856, pruned_loss=0.08214, over 19745.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3118, pruned_loss=0.084, over 3817672.60 frames. ], batch size: 46, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:14:42,674 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 18:14:59,326 INFO [train.py:903] (2/4) Epoch 10, batch 6500, loss[loss=0.2235, simple_loss=0.3093, pruned_loss=0.06881, over 19612.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3115, pruned_loss=0.08354, over 3810118.40 frames. ], batch size: 57, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:15:00,916 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2243, 1.2376, 1.5171, 1.3756, 2.1041, 1.9259, 2.1369, 0.7735], + device='cuda:2'), covar=tensor([0.2332, 0.4090, 0.2318, 0.1903, 0.1507, 0.2062, 0.1537, 0.3834], + device='cuda:2'), in_proj_covar=tensor([0.0481, 0.0567, 0.0586, 0.0431, 0.0588, 0.0486, 0.0644, 0.0486], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 18:15:05,087 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 18:15:19,445 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.752e+02 5.758e+02 7.165e+02 9.309e+02 2.290e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 18:15:47,504 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:15:57,571 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:16:01,747 INFO [train.py:903] (2/4) Epoch 10, batch 6550, loss[loss=0.2431, simple_loss=0.3204, pruned_loss=0.08294, over 19668.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3114, pruned_loss=0.08347, over 3802980.37 frames. ], batch size: 60, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:16:51,309 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:02,075 INFO [train.py:903] (2/4) Epoch 10, batch 6600, loss[loss=0.2453, simple_loss=0.3236, pruned_loss=0.08353, over 17143.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3116, pruned_loss=0.08365, over 3802483.36 frames. ], batch size: 101, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:17:05,669 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68054.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:23,335 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.670e+02 5.913e+02 7.324e+02 8.709e+02 1.479e+03, threshold=1.465e+03, percent-clipped=1.0 +2023-04-01 18:17:40,406 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-01 18:17:41,119 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4974, 4.0575, 2.5937, 3.5892, 1.0156, 3.7818, 3.8167, 3.8984], + device='cuda:2'), covar=tensor([0.0657, 0.1090, 0.1988, 0.0750, 0.3991, 0.0759, 0.0755, 0.1018], + device='cuda:2'), in_proj_covar=tensor([0.0424, 0.0364, 0.0429, 0.0315, 0.0380, 0.0358, 0.0350, 0.0381], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 18:18:02,112 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4037, 2.0600, 1.6516, 1.3894, 1.9280, 1.2229, 1.3552, 1.7523], + device='cuda:2'), covar=tensor([0.0715, 0.0568, 0.0730, 0.0555, 0.0379, 0.0898, 0.0515, 0.0360], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0297, 0.0329, 0.0244, 0.0235, 0.0317, 0.0286, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 18:18:02,823 INFO [train.py:903] (2/4) Epoch 10, batch 6650, loss[loss=0.2693, simple_loss=0.3314, pruned_loss=0.1036, over 17251.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3104, pruned_loss=0.08327, over 3797466.85 frames. ], batch size: 101, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:18:07,742 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68105.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:18:33,861 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7377, 4.1671, 4.4450, 4.3988, 1.4803, 4.1229, 3.4771, 4.0938], + device='cuda:2'), covar=tensor([0.1340, 0.0771, 0.0511, 0.0531, 0.5255, 0.0581, 0.0697, 0.1043], + device='cuda:2'), in_proj_covar=tensor([0.0650, 0.0589, 0.0768, 0.0653, 0.0711, 0.0531, 0.0479, 0.0711], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 18:18:52,660 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7198, 1.8377, 1.9740, 2.4349, 1.6173, 2.1326, 2.2147, 1.8763], + device='cuda:2'), covar=tensor([0.3205, 0.2585, 0.1286, 0.1554, 0.2898, 0.1417, 0.3099, 0.2369], + device='cuda:2'), in_proj_covar=tensor([0.0776, 0.0791, 0.0643, 0.0885, 0.0769, 0.0693, 0.0780, 0.0700], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 18:19:02,662 INFO [train.py:903] (2/4) Epoch 10, batch 6700, loss[loss=0.2385, simple_loss=0.3145, pruned_loss=0.0812, over 18005.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.31, pruned_loss=0.08269, over 3812386.54 frames. ], batch size: 83, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:19:09,887 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:19:22,649 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.762e+02 5.919e+02 7.224e+02 9.287e+02 2.274e+03, threshold=1.445e+03, percent-clipped=4.0 +2023-04-01 18:19:24,156 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:00,041 INFO [train.py:903] (2/4) Epoch 10, batch 6750, loss[loss=0.2033, simple_loss=0.2821, pruned_loss=0.06223, over 19840.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3112, pruned_loss=0.0835, over 3815821.48 frames. ], batch size: 52, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:20:51,831 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:56,747 INFO [train.py:903] (2/4) Epoch 10, batch 6800, loss[loss=0.2224, simple_loss=0.2886, pruned_loss=0.07806, over 19839.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3128, pruned_loss=0.08413, over 3816881.36 frames. ], batch size: 52, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:21:15,044 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.210e+02 6.150e+02 7.610e+02 9.082e+02 1.904e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 18:21:18,302 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:21:40,677 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 18:21:41,102 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 18:21:44,651 INFO [train.py:903] (2/4) Epoch 11, batch 0, loss[loss=0.2348, simple_loss=0.3111, pruned_loss=0.0792, over 19549.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3111, pruned_loss=0.0792, over 19549.00 frames. ], batch size: 56, lr: 7.77e-03, grad_scale: 8.0 +2023-04-01 18:21:44,651 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 18:21:56,762 INFO [train.py:937] (2/4) Epoch 11, validation: loss=0.181, simple_loss=0.2818, pruned_loss=0.04012, over 944034.00 frames. +2023-04-01 18:21:56,762 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 18:22:09,366 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 18:22:22,222 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 18:22:36,719 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5109, 1.2366, 1.1100, 1.3552, 1.1407, 1.3276, 1.1569, 1.3127], + device='cuda:2'), covar=tensor([0.1022, 0.1170, 0.1444, 0.0943, 0.1123, 0.0573, 0.1298, 0.0797], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0355, 0.0290, 0.0240, 0.0297, 0.0246, 0.0277, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 18:22:57,937 INFO [train.py:903] (2/4) Epoch 11, batch 50, loss[loss=0.2359, simple_loss=0.3129, pruned_loss=0.07951, over 19542.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3156, pruned_loss=0.08492, over 877722.94 frames. ], batch size: 56, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:23:15,269 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:23:35,576 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 18:23:46,892 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 5.747e+02 7.027e+02 9.557e+02 1.564e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 18:24:00,196 INFO [train.py:903] (2/4) Epoch 11, batch 100, loss[loss=0.2327, simple_loss=0.3002, pruned_loss=0.08262, over 19476.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3125, pruned_loss=0.08245, over 1541595.98 frames. ], batch size: 49, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:24:13,706 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 18:24:32,245 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8363, 1.9092, 2.0789, 2.5400, 1.6982, 2.3307, 2.3644, 2.0404], + device='cuda:2'), covar=tensor([0.3311, 0.2803, 0.1393, 0.1502, 0.3019, 0.1365, 0.3294, 0.2449], + device='cuda:2'), in_proj_covar=tensor([0.0773, 0.0789, 0.0641, 0.0880, 0.0771, 0.0695, 0.0779, 0.0696], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 18:24:42,604 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:47,978 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:57,057 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68425.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:01,050 INFO [train.py:903] (2/4) Epoch 11, batch 150, loss[loss=0.2848, simple_loss=0.3472, pruned_loss=0.1112, over 17301.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3117, pruned_loss=0.08409, over 2037854.52 frames. ], batch size: 101, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:25:11,958 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:25,310 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68449.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:25:26,525 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68450.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:36,638 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:47,789 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.281e+02 6.719e+02 8.986e+02 1.619e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-01 18:26:00,672 INFO [train.py:903] (2/4) Epoch 11, batch 200, loss[loss=0.218, simple_loss=0.299, pruned_loss=0.06848, over 19768.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3117, pruned_loss=0.08392, over 2437202.50 frames. ], batch size: 56, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:26:02,035 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 18:27:03,598 INFO [train.py:903] (2/4) Epoch 11, batch 250, loss[loss=0.1898, simple_loss=0.2662, pruned_loss=0.05672, over 19766.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3113, pruned_loss=0.08345, over 2741828.19 frames. ], batch size: 45, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:27:46,877 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68564.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:27:50,273 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1884, 1.3258, 1.7479, 1.1515, 2.5612, 3.2160, 3.0088, 3.3688], + device='cuda:2'), covar=tensor([0.1584, 0.3278, 0.2886, 0.2217, 0.0554, 0.0260, 0.0218, 0.0218], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0295, 0.0321, 0.0251, 0.0212, 0.0154, 0.0204, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 18:27:51,041 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.512e+02 6.613e+02 8.406e+02 1.798e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 18:28:07,068 INFO [train.py:903] (2/4) Epoch 11, batch 300, loss[loss=0.2116, simple_loss=0.287, pruned_loss=0.06811, over 19773.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3098, pruned_loss=0.08259, over 2987845.32 frames. ], batch size: 54, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:28:27,754 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:28:30,370 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 18:29:09,803 INFO [train.py:903] (2/4) Epoch 11, batch 350, loss[loss=0.2151, simple_loss=0.278, pruned_loss=0.0761, over 19763.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3089, pruned_loss=0.08245, over 3169412.15 frames. ], batch size: 47, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:29:16,646 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 18:29:44,192 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8853, 1.5464, 1.4899, 1.7967, 1.5141, 1.6387, 1.5577, 1.7862], + device='cuda:2'), covar=tensor([0.0914, 0.1458, 0.1351, 0.1004, 0.1294, 0.0520, 0.1186, 0.0697], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0353, 0.0291, 0.0239, 0.0298, 0.0247, 0.0277, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 18:29:57,405 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.409e+02 6.235e+02 7.842e+02 1.948e+03, threshold=1.247e+03, percent-clipped=7.0 +2023-04-01 18:30:09,870 INFO [train.py:903] (2/4) Epoch 11, batch 400, loss[loss=0.2929, simple_loss=0.3663, pruned_loss=0.1098, over 19772.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3091, pruned_loss=0.08281, over 3320755.33 frames. ], batch size: 56, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:30:40,469 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:30:54,270 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:11,175 INFO [train.py:903] (2/4) Epoch 11, batch 450, loss[loss=0.23, simple_loss=0.3111, pruned_loss=0.07446, over 18825.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3091, pruned_loss=0.08254, over 3434273.53 frames. ], batch size: 74, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:31:25,506 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68739.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:49,566 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 18:31:50,666 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 18:31:51,684 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:59,522 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.180e+02 6.491e+02 8.751e+02 1.660e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-01 18:32:13,202 INFO [train.py:903] (2/4) Epoch 11, batch 500, loss[loss=0.2324, simple_loss=0.3077, pruned_loss=0.07852, over 19603.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3097, pruned_loss=0.08272, over 3520362.83 frames. ], batch size: 61, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:33:05,564 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68820.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:33:17,987 INFO [train.py:903] (2/4) Epoch 11, batch 550, loss[loss=0.2322, simple_loss=0.3104, pruned_loss=0.07699, over 19492.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3098, pruned_loss=0.08204, over 3594164.16 frames. ], batch size: 64, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:33:36,737 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68845.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:34:06,691 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.206e+02 6.523e+02 8.559e+02 1.532e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 18:34:18,046 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:34:21,116 INFO [train.py:903] (2/4) Epoch 11, batch 600, loss[loss=0.2089, simple_loss=0.2852, pruned_loss=0.06632, over 19556.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3085, pruned_loss=0.08129, over 3655291.75 frames. ], batch size: 52, lr: 7.73e-03, grad_scale: 8.0 +2023-04-01 18:35:07,377 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 18:35:08,213 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 18:35:09,955 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0166, 1.9254, 1.6379, 1.4387, 1.4012, 1.6085, 0.3340, 0.8487], + device='cuda:2'), covar=tensor([0.0368, 0.0394, 0.0290, 0.0435, 0.0837, 0.0481, 0.0787, 0.0695], + device='cuda:2'), in_proj_covar=tensor([0.0329, 0.0327, 0.0329, 0.0350, 0.0422, 0.0346, 0.0307, 0.0321], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 18:35:23,583 INFO [train.py:903] (2/4) Epoch 11, batch 650, loss[loss=0.2398, simple_loss=0.3176, pruned_loss=0.08101, over 19578.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3093, pruned_loss=0.08216, over 3694813.49 frames. ], batch size: 61, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:35:36,870 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:36:14,702 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 5.561e+02 7.157e+02 8.945e+02 2.089e+03, threshold=1.431e+03, percent-clipped=8.0 +2023-04-01 18:36:26,502 INFO [train.py:903] (2/4) Epoch 11, batch 700, loss[loss=0.3034, simple_loss=0.3659, pruned_loss=0.1204, over 19486.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3093, pruned_loss=0.08215, over 3718851.22 frames. ], batch size: 64, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:36:41,608 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.33 vs. limit=5.0 +2023-04-01 18:37:30,460 INFO [train.py:903] (2/4) Epoch 11, batch 750, loss[loss=0.2149, simple_loss=0.297, pruned_loss=0.06641, over 19610.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3114, pruned_loss=0.08386, over 3730765.73 frames. ], batch size: 57, lr: 7.72e-03, grad_scale: 4.0 +2023-04-01 18:37:44,835 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3222, 2.1872, 1.8321, 1.7266, 1.4956, 1.7028, 0.3635, 1.1533], + device='cuda:2'), covar=tensor([0.0390, 0.0407, 0.0363, 0.0543, 0.0833, 0.0681, 0.0913, 0.0697], + device='cuda:2'), in_proj_covar=tensor([0.0333, 0.0329, 0.0330, 0.0352, 0.0425, 0.0349, 0.0309, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 18:37:52,532 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69047.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:01,890 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:10,059 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:20,856 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.289e+02 6.423e+02 8.063e+02 1.861e+03, threshold=1.285e+03, percent-clipped=2.0 +2023-04-01 18:38:33,454 INFO [train.py:903] (2/4) Epoch 11, batch 800, loss[loss=0.2264, simple_loss=0.3039, pruned_loss=0.07451, over 18214.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.312, pruned_loss=0.08378, over 3755505.86 frames. ], batch size: 83, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:38:49,967 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 18:39:20,884 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 18:39:26,278 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9216, 2.0137, 2.1853, 2.8110, 1.8670, 2.5765, 2.5312, 2.0873], + device='cuda:2'), covar=tensor([0.3413, 0.2759, 0.1301, 0.1569, 0.3207, 0.1421, 0.3081, 0.2431], + device='cuda:2'), in_proj_covar=tensor([0.0778, 0.0794, 0.0639, 0.0887, 0.0771, 0.0697, 0.0777, 0.0701], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 18:39:35,210 INFO [train.py:903] (2/4) Epoch 11, batch 850, loss[loss=0.2232, simple_loss=0.3039, pruned_loss=0.07126, over 19656.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3121, pruned_loss=0.08413, over 3752121.49 frames. ], batch size: 55, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:39:39,371 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:12,615 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:18,347 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:26,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.808e+02 7.359e+02 9.451e+02 2.011e+03, threshold=1.472e+03, percent-clipped=12.0 +2023-04-01 18:40:32,125 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 18:40:32,652 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6290, 1.7058, 1.9134, 2.0047, 1.3085, 1.8306, 2.1009, 1.8170], + device='cuda:2'), covar=tensor([0.3418, 0.2818, 0.1406, 0.1642, 0.3092, 0.1540, 0.3463, 0.2581], + device='cuda:2'), in_proj_covar=tensor([0.0783, 0.0797, 0.0644, 0.0889, 0.0775, 0.0700, 0.0784, 0.0707], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 18:40:38,100 INFO [train.py:903] (2/4) Epoch 11, batch 900, loss[loss=0.2434, simple_loss=0.3208, pruned_loss=0.08298, over 19697.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3108, pruned_loss=0.08356, over 3777787.36 frames. ], batch size: 59, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:40:39,673 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:48,337 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:41:42,673 INFO [train.py:903] (2/4) Epoch 11, batch 950, loss[loss=0.1893, simple_loss=0.276, pruned_loss=0.05137, over 19690.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.311, pruned_loss=0.0837, over 3790110.83 frames. ], batch size: 53, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:41:49,506 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 18:42:01,306 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:42:11,240 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 18:42:24,823 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69263.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:42:33,741 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.204e+02 6.315e+02 7.601e+02 2.315e+03, threshold=1.263e+03, percent-clipped=1.0 +2023-04-01 18:42:47,226 INFO [train.py:903] (2/4) Epoch 11, batch 1000, loss[loss=0.207, simple_loss=0.29, pruned_loss=0.06202, over 19681.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3106, pruned_loss=0.08353, over 3797220.72 frames. ], batch size: 53, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:01,192 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:27,085 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:43,490 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 18:43:48,898 INFO [train.py:903] (2/4) Epoch 11, batch 1050, loss[loss=0.2276, simple_loss=0.3044, pruned_loss=0.07542, over 19773.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3094, pruned_loss=0.08244, over 3802952.93 frames. ], batch size: 56, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:57,034 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,165 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7854, 4.3012, 2.4213, 3.8113, 1.0078, 4.0977, 4.0652, 4.1722], + device='cuda:2'), covar=tensor([0.0514, 0.0879, 0.2151, 0.0740, 0.3939, 0.0654, 0.0757, 0.0982], + device='cuda:2'), in_proj_covar=tensor([0.0423, 0.0361, 0.0428, 0.0312, 0.0375, 0.0361, 0.0350, 0.0383], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 18:43:59,252 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,381 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0951, 2.0669, 1.7311, 1.5468, 1.2560, 1.5525, 0.4833, 1.0699], + device='cuda:2'), covar=tensor([0.0645, 0.0608, 0.0496, 0.0811, 0.1305, 0.0933, 0.1085, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0336, 0.0329, 0.0331, 0.0352, 0.0425, 0.0348, 0.0309, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 18:44:02,820 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1023, 3.5602, 2.0830, 2.2048, 3.2788, 1.9997, 1.4130, 2.0659], + device='cuda:2'), covar=tensor([0.1228, 0.0511, 0.0945, 0.0706, 0.0434, 0.0961, 0.0931, 0.0598], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0301, 0.0328, 0.0246, 0.0237, 0.0318, 0.0289, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 18:44:24,571 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 18:44:38,149 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.222e+02 6.475e+02 7.672e+02 1.315e+03, threshold=1.295e+03, percent-clipped=1.0 +2023-04-01 18:44:49,374 INFO [train.py:903] (2/4) Epoch 11, batch 1100, loss[loss=0.2362, simple_loss=0.3046, pruned_loss=0.0839, over 19750.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3107, pruned_loss=0.0833, over 3811331.68 frames. ], batch size: 51, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:44:57,816 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:24,089 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69405.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:32,701 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:39,645 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:53,091 INFO [train.py:903] (2/4) Epoch 11, batch 1150, loss[loss=0.2144, simple_loss=0.2899, pruned_loss=0.06939, over 19476.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3092, pruned_loss=0.0826, over 3813866.60 frames. ], batch size: 49, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:46:11,250 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:46:41,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.885e+02 7.181e+02 8.495e+02 1.651e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-01 18:46:55,768 INFO [train.py:903] (2/4) Epoch 11, batch 1200, loss[loss=0.2541, simple_loss=0.323, pruned_loss=0.09266, over 19734.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3093, pruned_loss=0.08271, over 3801544.24 frames. ], batch size: 63, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:47:27,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 18:47:48,982 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:53,433 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:59,002 INFO [train.py:903] (2/4) Epoch 11, batch 1250, loss[loss=0.1963, simple_loss=0.2698, pruned_loss=0.06142, over 19728.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3087, pruned_loss=0.08193, over 3813285.72 frames. ], batch size: 46, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:48:00,183 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:48:49,514 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.575e+02 6.899e+02 8.428e+02 1.860e+03, threshold=1.380e+03, percent-clipped=3.0 +2023-04-01 18:49:00,825 INFO [train.py:903] (2/4) Epoch 11, batch 1300, loss[loss=0.2383, simple_loss=0.3135, pruned_loss=0.08153, over 19531.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3081, pruned_loss=0.08171, over 3823825.92 frames. ], batch size: 54, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:49:10,209 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:49:38,645 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69607.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:49:39,101 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 18:50:04,334 INFO [train.py:903] (2/4) Epoch 11, batch 1350, loss[loss=0.2216, simple_loss=0.3094, pruned_loss=0.0669, over 19604.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3071, pruned_loss=0.081, over 3833085.45 frames. ], batch size: 57, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:50:13,536 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:19,136 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:26,826 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:54,483 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.709e+02 6.895e+02 8.476e+02 1.895e+03, threshold=1.379e+03, percent-clipped=5.0 +2023-04-01 18:51:08,229 INFO [train.py:903] (2/4) Epoch 11, batch 1400, loss[loss=0.3078, simple_loss=0.3631, pruned_loss=0.1262, over 13226.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3075, pruned_loss=0.08143, over 3814970.65 frames. ], batch size: 136, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:51:11,991 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69682.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:51:25,033 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69692.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:51:36,497 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69702.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:03,535 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:52:09,960 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 18:52:10,953 INFO [train.py:903] (2/4) Epoch 11, batch 1450, loss[loss=0.2768, simple_loss=0.3463, pruned_loss=0.1037, over 19673.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3089, pruned_loss=0.08227, over 3817096.91 frames. ], batch size: 58, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:52:12,414 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:36,785 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:44,822 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:01,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.344e+02 6.531e+02 8.357e+02 2.062e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 18:53:10,357 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:13,435 INFO [train.py:903] (2/4) Epoch 11, batch 1500, loss[loss=0.1971, simple_loss=0.2723, pruned_loss=0.06099, over 19756.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3073, pruned_loss=0.08123, over 3835952.68 frames. ], batch size: 51, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:53:34,492 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5046, 1.3389, 1.3374, 1.9202, 1.5366, 1.9514, 2.0139, 1.6801], + device='cuda:2'), covar=tensor([0.0870, 0.1002, 0.1014, 0.0803, 0.0821, 0.0639, 0.0788, 0.0698], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0226, 0.0222, 0.0250, 0.0237, 0.0214, 0.0200, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 18:53:36,864 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:43,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:54:15,934 INFO [train.py:903] (2/4) Epoch 11, batch 1550, loss[loss=0.2055, simple_loss=0.2891, pruned_loss=0.06093, over 19759.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3063, pruned_loss=0.08067, over 3845685.44 frames. ], batch size: 54, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:54:38,390 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:07,630 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 5.814e+02 6.999e+02 8.951e+02 2.972e+03, threshold=1.400e+03, percent-clipped=5.0 +2023-04-01 18:55:09,154 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:20,440 INFO [train.py:903] (2/4) Epoch 11, batch 1600, loss[loss=0.2153, simple_loss=0.295, pruned_loss=0.06781, over 19763.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3072, pruned_loss=0.08103, over 3831427.99 frames. ], batch size: 56, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:55:41,189 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:46,718 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 18:55:48,183 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:12,787 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69920.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:20,532 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:23,825 INFO [train.py:903] (2/4) Epoch 11, batch 1650, loss[loss=0.2258, simple_loss=0.2986, pruned_loss=0.0765, over 19727.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3078, pruned_loss=0.0812, over 3836030.40 frames. ], batch size: 51, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:57:00,804 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69958.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:16,652 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 5.294e+02 6.504e+02 8.314e+02 1.576e+03, threshold=1.301e+03, percent-clipped=2.0 +2023-04-01 18:57:26,354 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69978.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:57:27,012 INFO [train.py:903] (2/4) Epoch 11, batch 1700, loss[loss=0.2262, simple_loss=0.3036, pruned_loss=0.07444, over 19836.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3069, pruned_loss=0.08047, over 3835090.91 frames. ], batch size: 52, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:57:32,055 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:59,118 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70003.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:58:03,168 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:09,485 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 18:58:11,920 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6133, 4.1995, 2.5756, 3.7885, 1.2012, 3.9122, 3.9411, 4.0620], + device='cuda:2'), covar=tensor([0.0616, 0.0977, 0.1922, 0.0725, 0.3534, 0.0739, 0.0768, 0.0992], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0364, 0.0432, 0.0316, 0.0376, 0.0365, 0.0357, 0.0386], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 18:58:29,317 INFO [train.py:903] (2/4) Epoch 11, batch 1750, loss[loss=0.2032, simple_loss=0.2756, pruned_loss=0.06538, over 19356.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3076, pruned_loss=0.08144, over 3835513.33 frames. ], batch size: 47, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:58:32,053 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:40,220 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70036.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:58:44,996 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3334, 1.2488, 1.2698, 1.7469, 1.3357, 1.6487, 1.6539, 1.5751], + device='cuda:2'), covar=tensor([0.0943, 0.1035, 0.1105, 0.0786, 0.0854, 0.0764, 0.0882, 0.0694], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0229, 0.0226, 0.0251, 0.0240, 0.0217, 0.0201, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 18:59:02,477 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70053.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:22,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 6.480e+02 8.127e+02 9.904e+02 1.581e+03, threshold=1.625e+03, percent-clipped=5.0 +2023-04-01 18:59:34,177 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:34,847 INFO [train.py:903] (2/4) Epoch 11, batch 1800, loss[loss=0.2527, simple_loss=0.3289, pruned_loss=0.08826, over 19462.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.308, pruned_loss=0.08171, over 3835498.82 frames. ], batch size: 64, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:59:43,614 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 19:00:02,673 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:14,319 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 19:00:32,733 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-01 19:00:34,715 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:35,505 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 19:00:35,953 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:37,769 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:38,711 INFO [train.py:903] (2/4) Epoch 11, batch 1850, loss[loss=0.2435, simple_loss=0.3173, pruned_loss=0.08487, over 19739.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3085, pruned_loss=0.08163, over 3839792.10 frames. ], batch size: 63, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:05,127 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70151.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:01:06,352 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:01:13,665 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 19:01:19,261 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 19:01:30,925 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.171e+02 6.551e+02 7.968e+02 1.780e+03, threshold=1.310e+03, percent-clipped=1.0 +2023-04-01 19:01:41,531 INFO [train.py:903] (2/4) Epoch 11, batch 1900, loss[loss=0.2377, simple_loss=0.3164, pruned_loss=0.07948, over 19675.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3081, pruned_loss=0.0814, over 3819074.75 frames. ], batch size: 59, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:57,138 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 19:02:04,757 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 19:02:28,857 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 19:02:42,954 INFO [train.py:903] (2/4) Epoch 11, batch 1950, loss[loss=0.2154, simple_loss=0.2991, pruned_loss=0.06582, over 19717.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3081, pruned_loss=0.08134, over 3819445.27 frames. ], batch size: 59, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:03:35,382 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.987e+02 6.114e+02 7.804e+02 3.131e+03, threshold=1.223e+03, percent-clipped=9.0 +2023-04-01 19:03:37,258 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 19:03:43,632 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8867, 1.3199, 1.0458, 0.9884, 1.1313, 0.9564, 0.9649, 1.2772], + device='cuda:2'), covar=tensor([0.0460, 0.0639, 0.0907, 0.0546, 0.0466, 0.1019, 0.0481, 0.0370], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0293, 0.0321, 0.0242, 0.0231, 0.0314, 0.0284, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:03:45,817 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:03:46,665 INFO [train.py:903] (2/4) Epoch 11, batch 2000, loss[loss=0.1997, simple_loss=0.2738, pruned_loss=0.06273, over 19616.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3076, pruned_loss=0.08114, over 3828444.92 frames. ], batch size: 50, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:04:48,762 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 19:04:51,058 INFO [train.py:903] (2/4) Epoch 11, batch 2050, loss[loss=0.2155, simple_loss=0.2881, pruned_loss=0.07147, over 19728.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3077, pruned_loss=0.08092, over 3822997.49 frames. ], batch size: 51, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:05:07,046 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 19:05:08,127 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 19:05:28,944 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 19:05:43,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.481e+02 7.126e+02 9.531e+02 2.002e+03, threshold=1.425e+03, percent-clipped=10.0 +2023-04-01 19:05:54,411 INFO [train.py:903] (2/4) Epoch 11, batch 2100, loss[loss=0.2104, simple_loss=0.2732, pruned_loss=0.07383, over 19792.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3088, pruned_loss=0.08226, over 3827440.50 frames. ], batch size: 48, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:06:24,764 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 19:06:30,845 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70407.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:06:48,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 19:06:57,555 INFO [train.py:903] (2/4) Epoch 11, batch 2150, loss[loss=0.2025, simple_loss=0.268, pruned_loss=0.06847, over 19736.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3087, pruned_loss=0.08166, over 3831621.35 frames. ], batch size: 46, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:07:01,570 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70432.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 19:07:51,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.544e+02 5.585e+02 6.733e+02 8.759e+02 1.859e+03, threshold=1.347e+03, percent-clipped=4.0 +2023-04-01 19:07:52,886 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:08:02,122 INFO [train.py:903] (2/4) Epoch 11, batch 2200, loss[loss=0.2359, simple_loss=0.3147, pruned_loss=0.07861, over 19675.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3083, pruned_loss=0.08149, over 3823201.96 frames. ], batch size: 53, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:09:06,398 INFO [train.py:903] (2/4) Epoch 11, batch 2250, loss[loss=0.2028, simple_loss=0.2715, pruned_loss=0.06706, over 19752.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3085, pruned_loss=0.08151, over 3821187.80 frames. ], batch size: 45, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:01,002 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.990e+02 6.839e+02 8.349e+02 1.575e+03, threshold=1.368e+03, percent-clipped=2.0 +2023-04-01 19:10:10,417 INFO [train.py:903] (2/4) Epoch 11, batch 2300, loss[loss=0.2397, simple_loss=0.3155, pruned_loss=0.08198, over 19615.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3103, pruned_loss=0.08278, over 3804654.61 frames. ], batch size: 57, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:19,799 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:10:23,131 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 19:11:05,011 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:11:13,190 INFO [train.py:903] (2/4) Epoch 11, batch 2350, loss[loss=0.2565, simple_loss=0.3274, pruned_loss=0.09281, over 19668.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3103, pruned_loss=0.08241, over 3802826.42 frames. ], batch size: 58, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:11:27,812 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1106, 2.1780, 2.2496, 3.2341, 2.1289, 2.9901, 2.6549, 2.0824], + device='cuda:2'), covar=tensor([0.3640, 0.3054, 0.1406, 0.1774, 0.3683, 0.1453, 0.3187, 0.2612], + device='cuda:2'), in_proj_covar=tensor([0.0788, 0.0799, 0.0644, 0.0887, 0.0776, 0.0694, 0.0779, 0.0706], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:11:56,873 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 19:12:06,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.270e+02 6.524e+02 8.351e+02 2.247e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 19:12:12,589 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 19:12:16,796 INFO [train.py:903] (2/4) Epoch 11, batch 2400, loss[loss=0.2475, simple_loss=0.3215, pruned_loss=0.08671, over 18023.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3095, pruned_loss=0.082, over 3813950.85 frames. ], batch size: 83, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:12:58,793 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0990, 1.2435, 1.7622, 1.1109, 2.6100, 3.5601, 3.2208, 3.6437], + device='cuda:2'), covar=tensor([0.1586, 0.3407, 0.2909, 0.2211, 0.0494, 0.0186, 0.0200, 0.0192], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0295, 0.0320, 0.0251, 0.0215, 0.0155, 0.0205, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 19:13:20,246 INFO [train.py:903] (2/4) Epoch 11, batch 2450, loss[loss=0.2282, simple_loss=0.3172, pruned_loss=0.0696, over 19683.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.31, pruned_loss=0.08272, over 3814687.12 frames. ], batch size: 58, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:13:32,449 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:13:44,567 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8989, 2.1840, 2.3351, 2.9437, 2.5863, 2.3514, 2.2049, 2.8333], + device='cuda:2'), covar=tensor([0.0687, 0.1536, 0.1227, 0.0808, 0.1113, 0.0440, 0.1068, 0.0517], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0347, 0.0287, 0.0238, 0.0296, 0.0242, 0.0276, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:14:14,278 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.717e+02 6.752e+02 8.721e+02 4.215e+03, threshold=1.350e+03, percent-clipped=8.0 +2023-04-01 19:14:25,711 INFO [train.py:903] (2/4) Epoch 11, batch 2500, loss[loss=0.1815, simple_loss=0.2547, pruned_loss=0.05411, over 19025.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3094, pruned_loss=0.08223, over 3813918.69 frames. ], batch size: 42, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:27,558 INFO [train.py:903] (2/4) Epoch 11, batch 2550, loss[loss=0.2106, simple_loss=0.2888, pruned_loss=0.06625, over 19483.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3101, pruned_loss=0.0828, over 3814708.80 frames. ], batch size: 49, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:46,695 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:17,504 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:20,506 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.586e+02 5.428e+02 6.511e+02 7.969e+02 1.423e+03, threshold=1.302e+03, percent-clipped=4.0 +2023-04-01 19:16:26,498 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 19:16:30,133 INFO [train.py:903] (2/4) Epoch 11, batch 2600, loss[loss=0.2185, simple_loss=0.3005, pruned_loss=0.06826, over 19539.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3092, pruned_loss=0.08229, over 3819624.22 frames. ], batch size: 54, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:34,839 INFO [train.py:903] (2/4) Epoch 11, batch 2650, loss[loss=0.2235, simple_loss=0.296, pruned_loss=0.07546, over 19458.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3089, pruned_loss=0.0819, over 3813840.33 frames. ], batch size: 49, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:56,649 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 19:18:28,065 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 5.322e+02 6.264e+02 9.085e+02 2.401e+03, threshold=1.253e+03, percent-clipped=10.0 +2023-04-01 19:18:39,383 INFO [train.py:903] (2/4) Epoch 11, batch 2700, loss[loss=0.2416, simple_loss=0.3218, pruned_loss=0.08067, over 19474.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3083, pruned_loss=0.08159, over 3815363.41 frames. ], batch size: 64, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:18:56,259 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70993.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:18,245 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:30,052 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:43,436 INFO [train.py:903] (2/4) Epoch 11, batch 2750, loss[loss=0.2387, simple_loss=0.318, pruned_loss=0.0797, over 13546.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3076, pruned_loss=0.08109, over 3817174.06 frames. ], batch size: 135, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:20:08,223 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:20:37,010 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.301e+02 6.565e+02 7.951e+02 1.458e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 19:20:45,883 INFO [train.py:903] (2/4) Epoch 11, batch 2800, loss[loss=0.2527, simple_loss=0.3206, pruned_loss=0.09236, over 17295.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3077, pruned_loss=0.08123, over 3814443.41 frames. ], batch size: 101, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:21:29,709 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 19:21:51,269 INFO [train.py:903] (2/4) Epoch 11, batch 2850, loss[loss=0.2352, simple_loss=0.3113, pruned_loss=0.07953, over 18398.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3075, pruned_loss=0.08113, over 3814300.29 frames. ], batch size: 84, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:22:45,074 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.466e+02 6.713e+02 8.640e+02 2.236e+03, threshold=1.343e+03, percent-clipped=7.0 +2023-04-01 19:22:54,321 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 19:22:55,471 INFO [train.py:903] (2/4) Epoch 11, batch 2900, loss[loss=0.2441, simple_loss=0.3184, pruned_loss=0.08491, over 19337.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3065, pruned_loss=0.0805, over 3825045.14 frames. ], batch size: 66, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:23:03,498 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.71 vs. limit=5.0 +2023-04-01 19:23:09,038 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8966, 3.5188, 2.4796, 3.2407, 0.9110, 3.2913, 3.2833, 3.3868], + device='cuda:2'), covar=tensor([0.0847, 0.1130, 0.1954, 0.0884, 0.4037, 0.0972, 0.0883, 0.1157], + device='cuda:2'), in_proj_covar=tensor([0.0431, 0.0366, 0.0440, 0.0321, 0.0382, 0.0368, 0.0356, 0.0393], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:24:00,125 INFO [train.py:903] (2/4) Epoch 11, batch 2950, loss[loss=0.211, simple_loss=0.2785, pruned_loss=0.07178, over 19756.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3069, pruned_loss=0.08057, over 3819470.48 frames. ], batch size: 47, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:24:53,527 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.471e+02 6.960e+02 8.532e+02 1.699e+03, threshold=1.392e+03, percent-clipped=7.0 +2023-04-01 19:25:02,840 INFO [train.py:903] (2/4) Epoch 11, batch 3000, loss[loss=0.2241, simple_loss=0.3011, pruned_loss=0.07357, over 19761.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3077, pruned_loss=0.08125, over 3828417.92 frames. ], batch size: 54, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:25:02,840 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 19:25:16,084 INFO [train.py:937] (2/4) Epoch 11, validation: loss=0.1785, simple_loss=0.2793, pruned_loss=0.0389, over 944034.00 frames. +2023-04-01 19:25:16,085 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 19:25:20,539 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 19:25:26,639 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3350, 2.9455, 1.9017, 2.0545, 2.1384, 2.3773, 0.7725, 2.1271], + device='cuda:2'), covar=tensor([0.0429, 0.0431, 0.0621, 0.0793, 0.0729, 0.0877, 0.0991, 0.0784], + device='cuda:2'), in_proj_covar=tensor([0.0342, 0.0335, 0.0335, 0.0356, 0.0429, 0.0358, 0.0312, 0.0326], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 19:25:51,496 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2263, 2.1701, 2.2846, 3.3056, 2.1949, 3.1281, 2.7252, 2.0971], + device='cuda:2'), covar=tensor([0.3558, 0.2987, 0.1422, 0.1704, 0.3642, 0.1348, 0.3142, 0.2671], + device='cuda:2'), in_proj_covar=tensor([0.0789, 0.0803, 0.0647, 0.0890, 0.0781, 0.0701, 0.0784, 0.0705], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:26:01,951 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7992, 1.6647, 1.2622, 1.7688, 1.6678, 1.4138, 1.3417, 1.6100], + device='cuda:2'), covar=tensor([0.1019, 0.1450, 0.1728, 0.1129, 0.1304, 0.0908, 0.1499, 0.0912], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0344, 0.0284, 0.0235, 0.0290, 0.0240, 0.0273, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:26:20,107 INFO [train.py:903] (2/4) Epoch 11, batch 3050, loss[loss=0.2705, simple_loss=0.3286, pruned_loss=0.1062, over 13317.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3076, pruned_loss=0.08116, over 3821386.44 frames. ], batch size: 135, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:26:51,695 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:27:07,114 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1851, 2.1818, 2.4295, 3.2384, 2.1646, 2.9781, 2.7577, 2.2840], + device='cuda:2'), covar=tensor([0.3848, 0.3503, 0.1392, 0.1921, 0.3863, 0.1597, 0.3388, 0.2598], + device='cuda:2'), in_proj_covar=tensor([0.0786, 0.0802, 0.0645, 0.0888, 0.0777, 0.0699, 0.0783, 0.0703], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:27:13,585 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.484e+02 7.005e+02 9.170e+02 2.820e+03, threshold=1.401e+03, percent-clipped=8.0 +2023-04-01 19:27:23,008 INFO [train.py:903] (2/4) Epoch 11, batch 3100, loss[loss=0.221, simple_loss=0.3019, pruned_loss=0.07011, over 19671.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3084, pruned_loss=0.08178, over 3822090.25 frames. ], batch size: 60, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:27:27,788 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7394, 1.3134, 1.4111, 2.0285, 1.8156, 2.0967, 2.1424, 1.7814], + device='cuda:2'), covar=tensor([0.0749, 0.0963, 0.1012, 0.0794, 0.0810, 0.0616, 0.0801, 0.0658], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0228, 0.0224, 0.0251, 0.0238, 0.0215, 0.0198, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 19:27:39,104 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:28:26,276 INFO [train.py:903] (2/4) Epoch 11, batch 3150, loss[loss=0.2391, simple_loss=0.3137, pruned_loss=0.08219, over 19442.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3077, pruned_loss=0.081, over 3833762.07 frames. ], batch size: 70, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:28:55,935 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 19:28:57,877 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 19:29:17,646 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:29:19,600 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.450e+02 6.508e+02 8.826e+02 1.534e+03, threshold=1.302e+03, percent-clipped=1.0 +2023-04-01 19:29:31,369 INFO [train.py:903] (2/4) Epoch 11, batch 3200, loss[loss=0.2223, simple_loss=0.3031, pruned_loss=0.07074, over 19672.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3086, pruned_loss=0.0814, over 3820244.59 frames. ], batch size: 58, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:30:05,861 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:30:32,441 INFO [train.py:903] (2/4) Epoch 11, batch 3250, loss[loss=0.2686, simple_loss=0.3411, pruned_loss=0.0981, over 17505.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3088, pruned_loss=0.08198, over 3806978.27 frames. ], batch size: 101, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:30:35,050 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3259, 1.4277, 1.6239, 1.4997, 2.2756, 2.1002, 2.2818, 0.8400], + device='cuda:2'), covar=tensor([0.2235, 0.3779, 0.2271, 0.1790, 0.1299, 0.1863, 0.1367, 0.3718], + device='cuda:2'), in_proj_covar=tensor([0.0490, 0.0575, 0.0601, 0.0441, 0.0595, 0.0493, 0.0649, 0.0497], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:31:26,706 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.701e+02 5.157e+02 6.415e+02 8.641e+02 1.397e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-04-01 19:31:36,063 INFO [train.py:903] (2/4) Epoch 11, batch 3300, loss[loss=0.1789, simple_loss=0.2488, pruned_loss=0.05447, over 19751.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3086, pruned_loss=0.08169, over 3814771.45 frames. ], batch size: 46, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:31:41,722 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 19:32:39,937 INFO [train.py:903] (2/4) Epoch 11, batch 3350, loss[loss=0.2414, simple_loss=0.3204, pruned_loss=0.08126, over 18649.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3076, pruned_loss=0.08137, over 3819887.23 frames. ], batch size: 74, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:32:47,342 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:33:26,644 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 19:33:34,248 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.202e+02 6.815e+02 8.173e+02 2.322e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 19:33:44,445 INFO [train.py:903] (2/4) Epoch 11, batch 3400, loss[loss=0.2513, simple_loss=0.3324, pruned_loss=0.08513, over 19659.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3063, pruned_loss=0.08072, over 3829317.69 frames. ], batch size: 58, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:40,448 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0467, 3.4897, 2.0691, 2.2031, 3.1253, 1.7957, 1.3021, 2.1766], + device='cuda:2'), covar=tensor([0.1255, 0.0505, 0.1020, 0.0736, 0.0435, 0.1015, 0.1014, 0.0616], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0301, 0.0329, 0.0250, 0.0235, 0.0321, 0.0289, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:34:44,108 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71725.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:34:48,498 INFO [train.py:903] (2/4) Epoch 11, batch 3450, loss[loss=0.2239, simple_loss=0.3035, pruned_loss=0.0721, over 19484.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3062, pruned_loss=0.08064, over 3812385.34 frames. ], batch size: 64, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:52,154 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 19:35:15,999 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:33,074 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:42,106 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.649e+02 6.835e+02 8.667e+02 1.565e+03, threshold=1.367e+03, percent-clipped=4.0 +2023-04-01 19:35:52,291 INFO [train.py:903] (2/4) Epoch 11, batch 3500, loss[loss=0.2717, simple_loss=0.3503, pruned_loss=0.09651, over 19559.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3066, pruned_loss=0.08066, over 3814269.27 frames. ], batch size: 61, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:36:04,527 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:36:33,843 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1543, 2.1327, 2.2776, 2.9361, 2.0240, 3.1371, 2.5441, 2.1400], + device='cuda:2'), covar=tensor([0.3726, 0.3244, 0.1607, 0.2126, 0.3805, 0.1507, 0.3704, 0.2897], + device='cuda:2'), in_proj_covar=tensor([0.0787, 0.0805, 0.0648, 0.0891, 0.0779, 0.0703, 0.0785, 0.0709], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:36:56,405 INFO [train.py:903] (2/4) Epoch 11, batch 3550, loss[loss=0.2284, simple_loss=0.3065, pruned_loss=0.07521, over 19665.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.307, pruned_loss=0.08062, over 3830286.16 frames. ], batch size: 60, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:37:00,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.10 vs. limit=5.0 +2023-04-01 19:37:49,025 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.129e+02 6.236e+02 8.069e+02 1.994e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-01 19:37:58,900 INFO [train.py:903] (2/4) Epoch 11, batch 3600, loss[loss=0.2496, simple_loss=0.3177, pruned_loss=0.09071, over 19533.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3093, pruned_loss=0.08181, over 3835338.03 frames. ], batch size: 54, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:03,006 INFO [train.py:903] (2/4) Epoch 11, batch 3650, loss[loss=0.2189, simple_loss=0.303, pruned_loss=0.06737, over 19534.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3085, pruned_loss=0.08118, over 3841255.12 frames. ], batch size: 56, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:13,485 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:39:55,945 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.422e+02 6.726e+02 7.997e+02 1.955e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-04-01 19:40:05,245 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:40:06,257 INFO [train.py:903] (2/4) Epoch 11, batch 3700, loss[loss=0.2144, simple_loss=0.2922, pruned_loss=0.06827, over 19587.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3091, pruned_loss=0.08145, over 3827922.86 frames. ], batch size: 52, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:41:02,230 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-01 19:41:11,914 INFO [train.py:903] (2/4) Epoch 11, batch 3750, loss[loss=0.2096, simple_loss=0.2772, pruned_loss=0.07102, over 19288.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3083, pruned_loss=0.08086, over 3829127.28 frames. ], batch size: 44, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:06,342 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.141e+02 6.053e+02 7.322e+02 1.150e+03, threshold=1.211e+03, percent-clipped=0.0 +2023-04-01 19:42:17,093 INFO [train.py:903] (2/4) Epoch 11, batch 3800, loss[loss=0.2199, simple_loss=0.2978, pruned_loss=0.07104, over 19582.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3088, pruned_loss=0.08073, over 3825125.16 frames. ], batch size: 52, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:22,599 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 19:42:35,690 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:42:46,984 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 19:43:21,151 INFO [train.py:903] (2/4) Epoch 11, batch 3850, loss[loss=0.239, simple_loss=0.3079, pruned_loss=0.08503, over 19531.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3096, pruned_loss=0.08119, over 3796595.50 frames. ], batch size: 54, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:43:45,190 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:44:16,372 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 5.600e+02 6.929e+02 8.545e+02 2.074e+03, threshold=1.386e+03, percent-clipped=5.0 +2023-04-01 19:44:25,945 INFO [train.py:903] (2/4) Epoch 11, batch 3900, loss[loss=0.2585, simple_loss=0.3316, pruned_loss=0.09266, over 19623.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3089, pruned_loss=0.08122, over 3785137.20 frames. ], batch size: 57, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:04,304 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3211, 1.2489, 1.8253, 1.3787, 2.7221, 3.6995, 3.3776, 3.8890], + device='cuda:2'), covar=tensor([0.1385, 0.3361, 0.2790, 0.1977, 0.0493, 0.0138, 0.0203, 0.0181], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0294, 0.0321, 0.0251, 0.0213, 0.0154, 0.0206, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 19:45:30,656 INFO [train.py:903] (2/4) Epoch 11, batch 3950, loss[loss=0.2412, simple_loss=0.318, pruned_loss=0.0822, over 19652.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.308, pruned_loss=0.08079, over 3782525.57 frames. ], batch size: 60, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:31,946 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 19:46:23,815 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 5.760e+02 7.487e+02 9.416e+02 2.541e+03, threshold=1.497e+03, percent-clipped=9.0 +2023-04-01 19:46:34,099 INFO [train.py:903] (2/4) Epoch 11, batch 4000, loss[loss=0.3037, simple_loss=0.3534, pruned_loss=0.1269, over 13592.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3097, pruned_loss=0.08214, over 3786276.72 frames. ], batch size: 136, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:46:36,471 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72281.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:47:22,616 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 19:47:37,790 INFO [train.py:903] (2/4) Epoch 11, batch 4050, loss[loss=0.214, simple_loss=0.2928, pruned_loss=0.06762, over 19662.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3084, pruned_loss=0.08119, over 3806495.64 frames. ], batch size: 53, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:47:57,890 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:05,149 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:33,043 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.561e+02 4.924e+02 6.616e+02 8.581e+02 2.031e+03, threshold=1.323e+03, percent-clipped=3.0 +2023-04-01 19:48:37,063 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:43,646 INFO [train.py:903] (2/4) Epoch 11, batch 4100, loss[loss=0.2641, simple_loss=0.3372, pruned_loss=0.09548, over 18210.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3081, pruned_loss=0.08122, over 3796483.34 frames. ], batch size: 83, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:49:05,282 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72396.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:49:18,927 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 19:49:48,299 INFO [train.py:903] (2/4) Epoch 11, batch 4150, loss[loss=0.2275, simple_loss=0.3078, pruned_loss=0.07361, over 19328.00 frames. ], tot_loss[loss=0.234, simple_loss=0.307, pruned_loss=0.08047, over 3800054.46 frames. ], batch size: 70, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:50:01,627 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4769, 4.0504, 4.1785, 4.1646, 1.5465, 3.9116, 3.3962, 3.8658], + device='cuda:2'), covar=tensor([0.1444, 0.0766, 0.0551, 0.0609, 0.4923, 0.0695, 0.0659, 0.1096], + device='cuda:2'), in_proj_covar=tensor([0.0665, 0.0601, 0.0793, 0.0674, 0.0723, 0.0550, 0.0485, 0.0733], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 19:50:42,219 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.902e+02 6.015e+02 8.099e+02 1.569e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-01 19:50:42,516 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0949, 5.3897, 3.1722, 4.7435, 1.1563, 5.4335, 5.4618, 5.5450], + device='cuda:2'), covar=tensor([0.0379, 0.0928, 0.1685, 0.0595, 0.3825, 0.0564, 0.0594, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0433, 0.0370, 0.0440, 0.0320, 0.0379, 0.0373, 0.0358, 0.0393], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:50:47,150 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9937, 4.4837, 4.7666, 4.7376, 1.7932, 4.4233, 3.8273, 4.4500], + device='cuda:2'), covar=tensor([0.1316, 0.0626, 0.0467, 0.0512, 0.4733, 0.0528, 0.0553, 0.0928], + device='cuda:2'), in_proj_covar=tensor([0.0668, 0.0602, 0.0795, 0.0676, 0.0724, 0.0552, 0.0485, 0.0734], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 19:50:51,447 INFO [train.py:903] (2/4) Epoch 11, batch 4200, loss[loss=0.2092, simple_loss=0.2822, pruned_loss=0.06809, over 19748.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3066, pruned_loss=0.0799, over 3816395.46 frames. ], batch size: 47, lr: 7.54e-03, grad_scale: 16.0 +2023-04-01 19:50:57,217 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 19:51:07,231 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:51:56,568 INFO [train.py:903] (2/4) Epoch 11, batch 4250, loss[loss=0.2308, simple_loss=0.2917, pruned_loss=0.08491, over 19362.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3046, pruned_loss=0.07862, over 3824727.08 frames. ], batch size: 47, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:52:17,342 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 19:52:23,720 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8791, 4.3144, 4.5862, 4.5659, 1.6352, 4.2523, 3.7009, 4.2678], + device='cuda:2'), covar=tensor([0.1341, 0.0663, 0.0512, 0.0569, 0.5013, 0.0609, 0.0592, 0.0965], + device='cuda:2'), in_proj_covar=tensor([0.0671, 0.0603, 0.0795, 0.0679, 0.0726, 0.0554, 0.0485, 0.0735], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 19:52:23,923 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9957, 2.0580, 2.1588, 2.7501, 1.9514, 2.5864, 2.4180, 1.9861], + device='cuda:2'), covar=tensor([0.3674, 0.3117, 0.1553, 0.1881, 0.3479, 0.1537, 0.3529, 0.2747], + device='cuda:2'), in_proj_covar=tensor([0.0792, 0.0811, 0.0651, 0.0896, 0.0781, 0.0707, 0.0790, 0.0713], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:52:28,257 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 19:52:51,994 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.433e+02 6.415e+02 7.675e+02 1.468e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-01 19:52:58,569 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3256, 1.4714, 1.9065, 1.5597, 3.1637, 2.5337, 3.2884, 1.5691], + device='cuda:2'), covar=tensor([0.2382, 0.3911, 0.2357, 0.1863, 0.1506, 0.1879, 0.1720, 0.3653], + device='cuda:2'), in_proj_covar=tensor([0.0490, 0.0572, 0.0596, 0.0435, 0.0589, 0.0489, 0.0645, 0.0493], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:53:01,666 INFO [train.py:903] (2/4) Epoch 11, batch 4300, loss[loss=0.1907, simple_loss=0.2676, pruned_loss=0.05691, over 19733.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3049, pruned_loss=0.07888, over 3819671.98 frames. ], batch size: 46, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:53:26,286 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4300, 1.2144, 1.3601, 1.5025, 2.1726, 1.0941, 1.8902, 2.3613], + device='cuda:2'), covar=tensor([0.0524, 0.2064, 0.2010, 0.1167, 0.0566, 0.1713, 0.1242, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0336, 0.0350, 0.0319, 0.0345, 0.0331, 0.0333, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 19:53:36,376 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:00,204 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 19:54:04,818 INFO [train.py:903] (2/4) Epoch 11, batch 4350, loss[loss=0.2604, simple_loss=0.3179, pruned_loss=0.1014, over 19712.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3073, pruned_loss=0.08045, over 3817046.07 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:54:33,995 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:58,901 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.206e+02 6.524e+02 8.372e+02 1.509e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-01 19:55:06,330 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:08,352 INFO [train.py:903] (2/4) Epoch 11, batch 4400, loss[loss=0.258, simple_loss=0.3331, pruned_loss=0.09144, over 19683.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3084, pruned_loss=0.08092, over 3825730.42 frames. ], batch size: 59, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:55:12,322 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2799, 1.3665, 1.6980, 1.5194, 2.7654, 2.2467, 2.7108, 1.2019], + device='cuda:2'), covar=tensor([0.2184, 0.3763, 0.2285, 0.1687, 0.1185, 0.1723, 0.1339, 0.3447], + device='cuda:2'), in_proj_covar=tensor([0.0490, 0.0575, 0.0600, 0.0436, 0.0592, 0.0492, 0.0647, 0.0495], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 19:55:20,153 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:37,564 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 19:55:47,774 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 19:56:12,262 INFO [train.py:903] (2/4) Epoch 11, batch 4450, loss[loss=0.2894, simple_loss=0.3525, pruned_loss=0.1132, over 19688.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3091, pruned_loss=0.08116, over 3822928.62 frames. ], batch size: 60, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:57:06,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.681e+02 5.500e+02 6.922e+02 8.863e+02 1.965e+03, threshold=1.384e+03, percent-clipped=10.0 +2023-04-01 19:57:14,268 INFO [train.py:903] (2/4) Epoch 11, batch 4500, loss[loss=0.2229, simple_loss=0.3021, pruned_loss=0.07188, over 19523.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3097, pruned_loss=0.0817, over 3820046.17 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:57:46,490 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:58:19,530 INFO [train.py:903] (2/4) Epoch 11, batch 4550, loss[loss=0.2608, simple_loss=0.3303, pruned_loss=0.09569, over 19568.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3094, pruned_loss=0.08145, over 3819940.46 frames. ], batch size: 61, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:58:28,661 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 19:58:53,293 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 19:59:02,318 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:59:15,576 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.895e+02 7.318e+02 9.053e+02 1.617e+03, threshold=1.464e+03, percent-clipped=1.0 +2023-04-01 19:59:16,562 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 19:59:23,893 INFO [train.py:903] (2/4) Epoch 11, batch 4600, loss[loss=0.2754, simple_loss=0.3433, pruned_loss=0.1038, over 19683.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3096, pruned_loss=0.08173, over 3821408.86 frames. ], batch size: 59, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:59:34,623 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72887.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:04,229 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 20:00:06,142 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:27,766 INFO [train.py:903] (2/4) Epoch 11, batch 4650, loss[loss=0.2242, simple_loss=0.2919, pruned_loss=0.07819, over 19382.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3091, pruned_loss=0.08167, over 3835209.00 frames. ], batch size: 47, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 20:00:46,736 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 20:00:57,793 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 20:01:22,034 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.839e+02 5.234e+02 6.593e+02 8.122e+02 1.324e+03, threshold=1.319e+03, percent-clipped=0.0 +2023-04-01 20:01:30,180 INFO [train.py:903] (2/4) Epoch 11, batch 4700, loss[loss=0.2051, simple_loss=0.284, pruned_loss=0.06311, over 19751.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3091, pruned_loss=0.08195, over 3823582.57 frames. ], batch size: 51, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:01:34,999 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72982.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:01:55,721 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 20:02:34,870 INFO [train.py:903] (2/4) Epoch 11, batch 4750, loss[loss=0.2431, simple_loss=0.3196, pruned_loss=0.08337, over 19542.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3085, pruned_loss=0.08172, over 3835476.72 frames. ], batch size: 56, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:11,993 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:03:28,793 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.728e+02 6.958e+02 8.518e+02 2.013e+03, threshold=1.392e+03, percent-clipped=2.0 +2023-04-01 20:03:37,827 INFO [train.py:903] (2/4) Epoch 11, batch 4800, loss[loss=0.2742, simple_loss=0.3468, pruned_loss=0.1008, over 19286.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3086, pruned_loss=0.08168, over 3831828.81 frames. ], batch size: 66, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:38,477 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 20:03:44,162 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73084.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:04:18,421 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0113, 5.3284, 2.8941, 4.7940, 1.3337, 5.4463, 5.3413, 5.4942], + device='cuda:2'), covar=tensor([0.0389, 0.0937, 0.1885, 0.0572, 0.3776, 0.0573, 0.0655, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0430, 0.0364, 0.0433, 0.0315, 0.0376, 0.0370, 0.0356, 0.0389], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:04:26,306 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2223, 1.3120, 1.9485, 1.4803, 2.6518, 2.0370, 2.6676, 1.3142], + device='cuda:2'), covar=tensor([0.2477, 0.4260, 0.2447, 0.2017, 0.1603, 0.2271, 0.1753, 0.3737], + device='cuda:2'), in_proj_covar=tensor([0.0490, 0.0577, 0.0601, 0.0438, 0.0591, 0.0493, 0.0645, 0.0497], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 20:04:40,996 INFO [train.py:903] (2/4) Epoch 11, batch 4850, loss[loss=0.2524, simple_loss=0.321, pruned_loss=0.09192, over 18708.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3086, pruned_loss=0.0818, over 3823981.60 frames. ], batch size: 74, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:05:01,963 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 20:05:22,854 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 20:05:29,947 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 20:05:29,990 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 20:05:35,783 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.053e+02 5.592e+02 6.791e+02 8.044e+02 1.982e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 20:05:39,476 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 20:05:44,087 INFO [train.py:903] (2/4) Epoch 11, batch 4900, loss[loss=0.2481, simple_loss=0.3203, pruned_loss=0.08795, over 19675.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3087, pruned_loss=0.08167, over 3813978.52 frames. ], batch size: 60, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:05:57,048 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3420, 1.1617, 1.4248, 1.4231, 2.8386, 0.9468, 2.0479, 3.2548], + device='cuda:2'), covar=tensor([0.0492, 0.2768, 0.2816, 0.1842, 0.0799, 0.2637, 0.1341, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0355, 0.0337, 0.0352, 0.0321, 0.0344, 0.0332, 0.0331, 0.0351], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:06:01,207 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 20:06:48,385 INFO [train.py:903] (2/4) Epoch 11, batch 4950, loss[loss=0.2456, simple_loss=0.3136, pruned_loss=0.08884, over 13445.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3092, pruned_loss=0.08209, over 3795220.45 frames. ], batch size: 136, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:00,944 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 20:07:21,304 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73256.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:07:24,668 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 20:07:42,672 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.726e+02 5.448e+02 7.218e+02 1.051e+03 2.092e+03, threshold=1.444e+03, percent-clipped=5.0 +2023-04-01 20:07:51,019 INFO [train.py:903] (2/4) Epoch 11, batch 5000, loss[loss=0.2185, simple_loss=0.3015, pruned_loss=0.06775, over 19571.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3105, pruned_loss=0.08281, over 3807943.34 frames. ], batch size: 61, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:55,880 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 20:08:08,614 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 20:08:32,086 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:08:50,649 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73326.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:08:54,855 INFO [train.py:903] (2/4) Epoch 11, batch 5050, loss[loss=0.2054, simple_loss=0.2896, pruned_loss=0.06062, over 19844.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3095, pruned_loss=0.08197, over 3813676.64 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:09:28,948 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 20:09:47,564 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:09:48,288 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.337e+02 6.487e+02 8.820e+02 1.986e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-01 20:09:56,488 INFO [train.py:903] (2/4) Epoch 11, batch 5100, loss[loss=0.2606, simple_loss=0.3314, pruned_loss=0.09495, over 19734.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.3109, pruned_loss=0.08292, over 3802572.16 frames. ], batch size: 63, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:10:04,911 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 20:10:10,263 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 20:10:13,761 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 20:10:22,107 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2958, 2.0855, 1.6490, 1.9235, 0.7078, 2.0352, 1.9542, 2.0380], + device='cuda:2'), covar=tensor([0.1146, 0.1049, 0.1632, 0.0759, 0.2841, 0.0934, 0.0887, 0.1168], + device='cuda:2'), in_proj_covar=tensor([0.0432, 0.0363, 0.0437, 0.0317, 0.0379, 0.0373, 0.0357, 0.0390], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:11:00,239 INFO [train.py:903] (2/4) Epoch 11, batch 5150, loss[loss=0.2517, simple_loss=0.3139, pruned_loss=0.09476, over 19618.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.309, pruned_loss=0.08187, over 3806679.33 frames. ], batch size: 50, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:11:09,664 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 20:11:16,074 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73441.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:11:31,598 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6854, 1.3399, 1.4705, 1.4375, 3.2351, 0.9713, 2.2422, 3.5323], + device='cuda:2'), covar=tensor([0.0453, 0.2605, 0.2559, 0.1783, 0.0724, 0.2585, 0.1200, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0334, 0.0348, 0.0320, 0.0344, 0.0331, 0.0328, 0.0350], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:11:43,044 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:11:50,701 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 20:11:54,728 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.282e+02 5.910e+02 7.077e+02 9.438e+02 2.777e+03, threshold=1.415e+03, percent-clipped=4.0 +2023-04-01 20:12:03,999 INFO [train.py:903] (2/4) Epoch 11, batch 5200, loss[loss=0.2116, simple_loss=0.2836, pruned_loss=0.06979, over 19760.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.309, pruned_loss=0.08129, over 3811588.31 frames. ], batch size: 47, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:12:16,613 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 20:13:00,075 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 20:13:07,095 INFO [train.py:903] (2/4) Epoch 11, batch 5250, loss[loss=0.222, simple_loss=0.2882, pruned_loss=0.07788, over 19751.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3091, pruned_loss=0.08122, over 3818513.33 frames. ], batch size: 46, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:14:02,595 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 5.338e+02 6.658e+02 8.757e+02 1.866e+03, threshold=1.332e+03, percent-clipped=3.0 +2023-04-01 20:14:11,101 INFO [train.py:903] (2/4) Epoch 11, batch 5300, loss[loss=0.2478, simple_loss=0.3192, pruned_loss=0.08826, over 19342.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3085, pruned_loss=0.08124, over 3806980.80 frames. ], batch size: 66, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:14:26,078 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 20:14:31,192 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0454, 1.9912, 1.7753, 1.5661, 1.4843, 1.6062, 0.3579, 0.9072], + device='cuda:2'), covar=tensor([0.0413, 0.0403, 0.0284, 0.0435, 0.0831, 0.0518, 0.0820, 0.0697], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0326, 0.0329, 0.0348, 0.0424, 0.0349, 0.0304, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:14:43,172 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3022, 3.7563, 3.8641, 3.8730, 1.5077, 3.6556, 3.1879, 3.5804], + device='cuda:2'), covar=tensor([0.1434, 0.0843, 0.0629, 0.0693, 0.4899, 0.0702, 0.0692, 0.1143], + device='cuda:2'), in_proj_covar=tensor([0.0666, 0.0600, 0.0794, 0.0678, 0.0724, 0.0553, 0.0485, 0.0730], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 20:14:49,205 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:13,570 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73627.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:15,324 INFO [train.py:903] (2/4) Epoch 11, batch 5350, loss[loss=0.2187, simple_loss=0.2934, pruned_loss=0.072, over 19670.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3079, pruned_loss=0.08087, over 3800258.01 frames. ], batch size: 53, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:15:46,128 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:46,890 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 20:15:49,353 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73655.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:16:09,902 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.285e+02 6.573e+02 8.256e+02 1.333e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-04-01 20:16:19,909 INFO [train.py:903] (2/4) Epoch 11, batch 5400, loss[loss=0.2589, simple_loss=0.3105, pruned_loss=0.1037, over 19768.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3067, pruned_loss=0.08037, over 3816011.38 frames. ], batch size: 47, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:16:41,375 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73697.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:17:13,047 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:17:20,604 INFO [train.py:903] (2/4) Epoch 11, batch 5450, loss[loss=0.2525, simple_loss=0.3282, pruned_loss=0.08837, over 18210.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3081, pruned_loss=0.08123, over 3831064.13 frames. ], batch size: 83, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:17:45,882 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:12,036 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:14,011 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.707e+02 7.323e+02 9.356e+02 2.024e+03, threshold=1.465e+03, percent-clipped=8.0 +2023-04-01 20:18:23,367 INFO [train.py:903] (2/4) Epoch 11, batch 5500, loss[loss=0.1995, simple_loss=0.2707, pruned_loss=0.06422, over 17774.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3077, pruned_loss=0.08116, over 3837292.22 frames. ], batch size: 39, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:18:50,416 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 20:19:25,476 INFO [train.py:903] (2/4) Epoch 11, batch 5550, loss[loss=0.264, simple_loss=0.3356, pruned_loss=0.09618, over 17451.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3081, pruned_loss=0.08126, over 3838691.11 frames. ], batch size: 101, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:19:35,108 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 20:20:04,832 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0498, 1.2901, 1.6253, 0.5985, 2.1451, 2.4444, 2.1506, 2.5767], + device='cuda:2'), covar=tensor([0.1342, 0.3172, 0.2810, 0.2255, 0.0440, 0.0246, 0.0335, 0.0274], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0292, 0.0321, 0.0248, 0.0212, 0.0156, 0.0205, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:20:20,659 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.761e+02 6.752e+02 8.536e+02 1.570e+03, threshold=1.350e+03, percent-clipped=1.0 +2023-04-01 20:20:26,357 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 20:20:29,856 INFO [train.py:903] (2/4) Epoch 11, batch 5600, loss[loss=0.2683, simple_loss=0.3379, pruned_loss=0.09931, over 19742.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3074, pruned_loss=0.08068, over 3832625.08 frames. ], batch size: 51, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:21:24,767 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0784, 1.1331, 1.3125, 1.3521, 2.6054, 1.0265, 1.9365, 2.9179], + device='cuda:2'), covar=tensor([0.0480, 0.2726, 0.2809, 0.1753, 0.0786, 0.2375, 0.1242, 0.0372], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0338, 0.0349, 0.0322, 0.0347, 0.0331, 0.0332, 0.0353], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:21:34,086 INFO [train.py:903] (2/4) Epoch 11, batch 5650, loss[loss=0.2646, simple_loss=0.332, pruned_loss=0.09858, over 19310.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3089, pruned_loss=0.08155, over 3822119.52 frames. ], batch size: 70, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:21:41,800 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 20:21:42,590 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4120, 1.4792, 1.7482, 1.4888, 2.2998, 2.6688, 2.5723, 2.7847], + device='cuda:2'), covar=tensor([0.1259, 0.2643, 0.2424, 0.1942, 0.0997, 0.0396, 0.0228, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0294, 0.0322, 0.0250, 0.0214, 0.0157, 0.0206, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:22:03,008 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:22:23,912 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 20:22:28,528 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.880e+02 7.006e+02 8.632e+02 1.687e+03, threshold=1.401e+03, percent-clipped=2.0 +2023-04-01 20:22:37,797 INFO [train.py:903] (2/4) Epoch 11, batch 5700, loss[loss=0.2351, simple_loss=0.3163, pruned_loss=0.07693, over 19613.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3081, pruned_loss=0.08074, over 3823797.76 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:13,060 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3456, 1.4292, 1.7503, 1.5951, 2.7302, 2.2001, 2.8336, 1.1810], + device='cuda:2'), covar=tensor([0.2154, 0.3550, 0.2174, 0.1591, 0.1291, 0.1788, 0.1271, 0.3495], + device='cuda:2'), in_proj_covar=tensor([0.0492, 0.0574, 0.0599, 0.0436, 0.0594, 0.0492, 0.0645, 0.0494], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 20:23:38,876 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:23:42,014 INFO [train.py:903] (2/4) Epoch 11, batch 5750, loss[loss=0.2685, simple_loss=0.3337, pruned_loss=0.1017, over 19663.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3082, pruned_loss=0.08127, over 3823128.13 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:43,294 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 20:23:51,346 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 20:23:56,775 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 20:24:11,320 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74051.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:30,624 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74067.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:36,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.413e+02 6.547e+02 7.617e+02 1.298e+03, threshold=1.309e+03, percent-clipped=0.0 +2023-04-01 20:24:45,231 INFO [train.py:903] (2/4) Epoch 11, batch 5800, loss[loss=0.2619, simple_loss=0.3191, pruned_loss=0.1023, over 19273.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3094, pruned_loss=0.08187, over 3808979.86 frames. ], batch size: 66, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:24:50,616 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:04,161 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:46,660 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2438, 1.2826, 1.2349, 1.0415, 1.0182, 1.1004, 0.0653, 0.3780], + device='cuda:2'), covar=tensor([0.0461, 0.0422, 0.0250, 0.0334, 0.0815, 0.0372, 0.0765, 0.0682], + device='cuda:2'), in_proj_covar=tensor([0.0331, 0.0325, 0.0327, 0.0346, 0.0421, 0.0344, 0.0305, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:25:48,487 INFO [train.py:903] (2/4) Epoch 11, batch 5850, loss[loss=0.1809, simple_loss=0.2568, pruned_loss=0.05246, over 19811.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3096, pruned_loss=0.08226, over 3805169.66 frames. ], batch size: 49, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:25:52,584 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.04 vs. limit=2.0 +2023-04-01 20:26:04,731 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 20:26:41,993 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.778e+02 5.734e+02 7.251e+02 9.100e+02 2.751e+03, threshold=1.450e+03, percent-clipped=7.0 +2023-04-01 20:26:51,312 INFO [train.py:903] (2/4) Epoch 11, batch 5900, loss[loss=0.2217, simple_loss=0.2967, pruned_loss=0.07339, over 19599.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3099, pruned_loss=0.08292, over 3790882.43 frames. ], batch size: 52, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:26:53,556 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 20:27:15,051 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 20:27:30,967 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:27:55,171 INFO [train.py:903] (2/4) Epoch 11, batch 5950, loss[loss=0.2367, simple_loss=0.3203, pruned_loss=0.07652, over 19480.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3086, pruned_loss=0.08206, over 3792337.92 frames. ], batch size: 64, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:33,387 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74258.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:28:49,682 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 4.883e+02 6.160e+02 7.607e+02 1.521e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-01 20:28:59,175 INFO [train.py:903] (2/4) Epoch 11, batch 6000, loss[loss=0.2192, simple_loss=0.2903, pruned_loss=0.07399, over 19622.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3074, pruned_loss=0.08125, over 3795627.92 frames. ], batch size: 50, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:59,175 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 20:29:11,911 INFO [train.py:937] (2/4) Epoch 11, validation: loss=0.1778, simple_loss=0.2787, pruned_loss=0.03847, over 944034.00 frames. +2023-04-01 20:29:11,912 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 20:30:09,788 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74323.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:30:18,379 INFO [train.py:903] (2/4) Epoch 11, batch 6050, loss[loss=0.1847, simple_loss=0.2624, pruned_loss=0.05354, over 19614.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3065, pruned_loss=0.08084, over 3781563.42 frames. ], batch size: 50, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:30:42,810 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:31:12,597 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.290e+02 5.870e+02 7.295e+02 8.913e+02 1.728e+03, threshold=1.459e+03, percent-clipped=8.0 +2023-04-01 20:31:21,958 INFO [train.py:903] (2/4) Epoch 11, batch 6100, loss[loss=0.1915, simple_loss=0.2601, pruned_loss=0.06143, over 19754.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.307, pruned_loss=0.08121, over 3781629.88 frames. ], batch size: 46, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:22,433 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74426.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:32:25,903 INFO [train.py:903] (2/4) Epoch 11, batch 6150, loss[loss=0.2117, simple_loss=0.2861, pruned_loss=0.06871, over 19859.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3067, pruned_loss=0.08044, over 3797938.89 frames. ], batch size: 52, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:54,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 20:33:12,678 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:33:20,925 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.576e+02 6.382e+02 8.608e+02 2.367e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-01 20:33:29,311 INFO [train.py:903] (2/4) Epoch 11, batch 6200, loss[loss=0.2341, simple_loss=0.3118, pruned_loss=0.07825, over 18812.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3075, pruned_loss=0.08076, over 3810825.94 frames. ], batch size: 74, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:33:34,560 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4421, 2.0699, 2.0002, 2.5350, 2.2718, 2.2930, 2.0142, 2.4699], + device='cuda:2'), covar=tensor([0.0838, 0.1587, 0.1309, 0.0853, 0.1234, 0.0415, 0.1041, 0.0577], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0349, 0.0291, 0.0237, 0.0295, 0.0241, 0.0277, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:33:43,973 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:33:47,334 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2637, 1.0762, 1.6414, 1.4256, 2.6488, 3.5960, 3.4171, 3.9063], + device='cuda:2'), covar=tensor([0.1744, 0.4807, 0.3973, 0.2122, 0.0620, 0.0167, 0.0261, 0.0217], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0294, 0.0321, 0.0249, 0.0213, 0.0156, 0.0206, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:34:31,914 INFO [train.py:903] (2/4) Epoch 11, batch 6250, loss[loss=0.2128, simple_loss=0.2965, pruned_loss=0.06453, over 19773.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3077, pruned_loss=0.08095, over 3810978.52 frames. ], batch size: 54, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:34:48,607 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74541.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:34:53,764 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2770, 2.9595, 2.1611, 2.1287, 2.0656, 2.4211, 0.6993, 2.0282], + device='cuda:2'), covar=tensor([0.0421, 0.0399, 0.0544, 0.0855, 0.0853, 0.0755, 0.1068, 0.0879], + device='cuda:2'), in_proj_covar=tensor([0.0338, 0.0332, 0.0332, 0.0353, 0.0427, 0.0352, 0.0308, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:35:02,953 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 20:35:30,751 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 5.334e+02 6.699e+02 8.918e+02 1.691e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-01 20:35:38,657 INFO [train.py:903] (2/4) Epoch 11, batch 6300, loss[loss=0.3068, simple_loss=0.3606, pruned_loss=0.1265, over 19672.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3062, pruned_loss=0.08005, over 3810439.38 frames. ], batch size: 58, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:36:06,824 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:21,221 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:40,491 INFO [train.py:903] (2/4) Epoch 11, batch 6350, loss[loss=0.2684, simple_loss=0.3422, pruned_loss=0.09732, over 17998.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3074, pruned_loss=0.08069, over 3804867.25 frames. ], batch size: 83, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:36,492 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 5.622e+02 6.880e+02 9.256e+02 3.776e+03, threshold=1.376e+03, percent-clipped=5.0 +2023-04-01 20:37:44,831 INFO [train.py:903] (2/4) Epoch 11, batch 6400, loss[loss=0.2082, simple_loss=0.2856, pruned_loss=0.06535, over 19624.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3064, pruned_loss=0.07992, over 3821619.33 frames. ], batch size: 50, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:56,012 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 20:38:02,535 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7003, 2.2614, 2.1369, 2.7247, 2.6721, 2.2109, 2.1286, 2.7825], + device='cuda:2'), covar=tensor([0.0843, 0.1626, 0.1409, 0.0921, 0.1161, 0.0506, 0.1164, 0.0579], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0351, 0.0291, 0.0238, 0.0298, 0.0243, 0.0278, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:38:33,497 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:38:46,853 INFO [train.py:903] (2/4) Epoch 11, batch 6450, loss[loss=0.1803, simple_loss=0.2649, pruned_loss=0.04785, over 19665.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3068, pruned_loss=0.08009, over 3814420.09 frames. ], batch size: 53, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:39:25,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0695, 1.9288, 1.6577, 2.0449, 2.2331, 1.7636, 1.6632, 1.9800], + device='cuda:2'), covar=tensor([0.0898, 0.1353, 0.1368, 0.0907, 0.0971, 0.0506, 0.1154, 0.0653], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0351, 0.0293, 0.0239, 0.0298, 0.0244, 0.0279, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:39:28,514 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 20:39:44,917 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 5.590e+02 7.165e+02 9.175e+02 2.194e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 20:39:53,340 INFO [train.py:903] (2/4) Epoch 11, batch 6500, loss[loss=0.238, simple_loss=0.3195, pruned_loss=0.07823, over 19661.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3072, pruned_loss=0.08038, over 3809321.08 frames. ], batch size: 55, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:39:54,563 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 20:40:16,067 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:48,452 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:56,364 INFO [train.py:903] (2/4) Epoch 11, batch 6550, loss[loss=0.2377, simple_loss=0.3099, pruned_loss=0.08276, over 17466.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3065, pruned_loss=0.0797, over 3805379.97 frames. ], batch size: 101, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:40:56,798 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3274, 2.2163, 1.8803, 1.8278, 1.6861, 1.8164, 0.4954, 1.1917], + device='cuda:2'), covar=tensor([0.0412, 0.0396, 0.0333, 0.0492, 0.0795, 0.0635, 0.0914, 0.0756], + device='cuda:2'), in_proj_covar=tensor([0.0341, 0.0331, 0.0333, 0.0354, 0.0429, 0.0353, 0.0312, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:41:52,114 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 5.036e+02 6.201e+02 8.020e+02 1.527e+03, threshold=1.240e+03, percent-clipped=1.0 +2023-04-01 20:41:59,113 INFO [train.py:903] (2/4) Epoch 11, batch 6600, loss[loss=0.2432, simple_loss=0.327, pruned_loss=0.07968, over 19507.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3068, pruned_loss=0.07998, over 3798136.18 frames. ], batch size: 64, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:42:16,807 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.86 vs. limit=5.0 +2023-04-01 20:42:46,181 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4311, 1.5023, 1.8718, 1.6428, 2.7546, 2.2554, 2.8424, 1.1942], + device='cuda:2'), covar=tensor([0.2070, 0.3533, 0.2087, 0.1642, 0.1278, 0.1801, 0.1335, 0.3575], + device='cuda:2'), in_proj_covar=tensor([0.0496, 0.0577, 0.0604, 0.0441, 0.0597, 0.0493, 0.0651, 0.0495], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 20:43:01,227 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:43:03,142 INFO [train.py:903] (2/4) Epoch 11, batch 6650, loss[loss=0.2402, simple_loss=0.3131, pruned_loss=0.08362, over 19316.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3072, pruned_loss=0.08004, over 3797496.73 frames. ], batch size: 70, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:43:32,787 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6701, 2.2960, 2.3948, 2.7361, 2.7124, 2.3876, 1.9853, 2.7877], + device='cuda:2'), covar=tensor([0.0784, 0.1478, 0.1142, 0.0857, 0.1032, 0.0414, 0.1088, 0.0520], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0352, 0.0291, 0.0239, 0.0296, 0.0242, 0.0279, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:43:40,238 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:00,893 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.506e+02 7.046e+02 9.043e+02 1.602e+03, threshold=1.409e+03, percent-clipped=2.0 +2023-04-01 20:44:01,376 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:07,812 INFO [train.py:903] (2/4) Epoch 11, batch 6700, loss[loss=0.2356, simple_loss=0.3144, pruned_loss=0.07839, over 19655.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.306, pruned_loss=0.07916, over 3815679.78 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:44:12,449 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74982.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:31,775 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74998.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:45:06,536 INFO [train.py:903] (2/4) Epoch 11, batch 6750, loss[loss=0.2461, simple_loss=0.2993, pruned_loss=0.09643, over 19780.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.307, pruned_loss=0.07998, over 3821309.02 frames. ], batch size: 47, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:45:30,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 20:45:55,356 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:45:56,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 5.649e+02 7.002e+02 8.709e+02 1.691e+03, threshold=1.400e+03, percent-clipped=2.0 +2023-04-01 20:46:04,201 INFO [train.py:903] (2/4) Epoch 11, batch 6800, loss[loss=0.2525, simple_loss=0.3275, pruned_loss=0.08868, over 19538.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3073, pruned_loss=0.08048, over 3813341.44 frames. ], batch size: 54, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:46:52,590 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 20:46:53,156 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 20:46:56,127 INFO [train.py:903] (2/4) Epoch 12, batch 0, loss[loss=0.2374, simple_loss=0.3074, pruned_loss=0.08374, over 19774.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3074, pruned_loss=0.08374, over 19774.00 frames. ], batch size: 54, lr: 7.10e-03, grad_scale: 8.0 +2023-04-01 20:46:56,128 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 20:47:05,124 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8526, 2.5523, 1.9920, 2.4348, 0.5865, 2.4379, 2.4048, 2.6211], + device='cuda:2'), covar=tensor([0.0613, 0.0694, 0.1474, 0.0754, 0.3129, 0.0883, 0.0703, 0.0799], + device='cuda:2'), in_proj_covar=tensor([0.0430, 0.0361, 0.0432, 0.0312, 0.0374, 0.0364, 0.0352, 0.0385], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-04-01 20:47:08,135 INFO [train.py:937] (2/4) Epoch 12, validation: loss=0.1777, simple_loss=0.2788, pruned_loss=0.03825, over 944034.00 frames. +2023-04-01 20:47:08,136 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 20:47:20,786 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 20:47:58,798 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 20:48:11,512 INFO [train.py:903] (2/4) Epoch 12, batch 50, loss[loss=0.1911, simple_loss=0.2761, pruned_loss=0.05305, over 19840.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3063, pruned_loss=0.07907, over 854144.69 frames. ], batch size: 52, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:48:29,486 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.251e+02 6.823e+02 1.011e+03 3.055e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-01 20:48:41,949 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 20:48:54,700 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 20:49:13,865 INFO [train.py:903] (2/4) Epoch 12, batch 100, loss[loss=0.2041, simple_loss=0.2903, pruned_loss=0.05892, over 19773.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3084, pruned_loss=0.08118, over 1513593.00 frames. ], batch size: 54, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:49:22,088 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 20:50:02,892 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:16,759 INFO [train.py:903] (2/4) Epoch 12, batch 150, loss[loss=0.2129, simple_loss=0.3002, pruned_loss=0.06277, over 19699.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3041, pruned_loss=0.07794, over 2042724.72 frames. ], batch size: 59, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:50:34,005 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:36,108 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.206e+02 6.373e+02 8.343e+02 1.576e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-01 20:50:47,874 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8013, 1.5382, 1.4151, 1.6967, 1.7058, 1.6010, 1.2813, 1.6896], + device='cuda:2'), covar=tensor([0.0978, 0.1263, 0.1435, 0.0928, 0.0994, 0.0522, 0.1322, 0.0722], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0350, 0.0288, 0.0239, 0.0293, 0.0242, 0.0276, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:50:56,167 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:15,335 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 20:51:18,699 INFO [train.py:903] (2/4) Epoch 12, batch 200, loss[loss=0.1958, simple_loss=0.2733, pruned_loss=0.0592, over 19614.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3049, pruned_loss=0.07824, over 2448660.67 frames. ], batch size: 50, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:51:42,621 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:45,333 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:52:02,505 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2995, 3.0369, 2.3466, 2.3921, 2.1109, 2.4496, 1.0349, 2.0803], + device='cuda:2'), covar=tensor([0.0403, 0.0373, 0.0464, 0.0733, 0.0823, 0.0703, 0.0903, 0.0788], + device='cuda:2'), in_proj_covar=tensor([0.0341, 0.0331, 0.0330, 0.0357, 0.0430, 0.0353, 0.0309, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 20:52:16,217 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:52:22,162 INFO [train.py:903] (2/4) Epoch 12, batch 250, loss[loss=0.262, simple_loss=0.3331, pruned_loss=0.09546, over 17140.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3042, pruned_loss=0.07874, over 2756146.39 frames. ], batch size: 101, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:52:41,708 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.714e+02 6.835e+02 8.470e+02 1.829e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 20:52:56,657 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:53:25,593 INFO [train.py:903] (2/4) Epoch 12, batch 300, loss[loss=0.2512, simple_loss=0.3278, pruned_loss=0.08734, over 19440.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3044, pruned_loss=0.07848, over 3010666.42 frames. ], batch size: 70, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:54:07,383 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:54:28,456 INFO [train.py:903] (2/4) Epoch 12, batch 350, loss[loss=0.2423, simple_loss=0.3201, pruned_loss=0.0822, over 19376.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3053, pruned_loss=0.07919, over 3185488.40 frames. ], batch size: 70, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:54:31,938 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:54:45,902 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.463e+02 6.814e+02 8.627e+02 1.955e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-01 20:55:30,816 INFO [train.py:903] (2/4) Epoch 12, batch 400, loss[loss=0.2003, simple_loss=0.2696, pruned_loss=0.06553, over 19378.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3046, pruned_loss=0.07872, over 3330607.34 frames. ], batch size: 48, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:56:16,899 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6474, 1.7495, 1.9832, 1.8267, 2.7926, 2.4888, 2.9392, 1.5965], + device='cuda:2'), covar=tensor([0.1765, 0.3002, 0.1799, 0.1415, 0.1124, 0.1431, 0.1140, 0.2794], + device='cuda:2'), in_proj_covar=tensor([0.0492, 0.0577, 0.0604, 0.0438, 0.0598, 0.0493, 0.0646, 0.0492], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 20:56:32,364 INFO [train.py:903] (2/4) Epoch 12, batch 450, loss[loss=0.1853, simple_loss=0.2561, pruned_loss=0.05725, over 19756.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3051, pruned_loss=0.07894, over 3432653.24 frames. ], batch size: 46, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:56:51,545 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.172e+02 6.423e+02 7.976e+02 2.291e+03, threshold=1.285e+03, percent-clipped=4.0 +2023-04-01 20:57:09,452 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 20:57:10,714 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 20:57:13,222 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75590.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:57:17,008 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5404, 4.6748, 5.2281, 5.2259, 2.1209, 4.8568, 4.2147, 4.8757], + device='cuda:2'), covar=tensor([0.1240, 0.1193, 0.0543, 0.0537, 0.5122, 0.0629, 0.0583, 0.1057], + device='cuda:2'), in_proj_covar=tensor([0.0677, 0.0603, 0.0804, 0.0688, 0.0731, 0.0559, 0.0492, 0.0741], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 20:57:36,531 INFO [train.py:903] (2/4) Epoch 12, batch 500, loss[loss=0.2334, simple_loss=0.3088, pruned_loss=0.07905, over 19584.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3042, pruned_loss=0.07834, over 3524137.69 frames. ], batch size: 61, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:06,138 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:18,048 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:38,563 INFO [train.py:903] (2/4) Epoch 12, batch 550, loss[loss=0.1938, simple_loss=0.267, pruned_loss=0.0603, over 19747.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3042, pruned_loss=0.0782, over 3600578.32 frames. ], batch size: 45, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:50,431 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:56,735 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.816e+02 6.939e+02 9.327e+02 2.224e+03, threshold=1.388e+03, percent-clipped=13.0 +2023-04-01 20:59:09,126 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 20:59:24,527 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5655, 1.1925, 1.3284, 1.5946, 3.0845, 0.9924, 2.1881, 3.4167], + device='cuda:2'), covar=tensor([0.0450, 0.2853, 0.2886, 0.1765, 0.0768, 0.2699, 0.1332, 0.0301], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0340, 0.0351, 0.0319, 0.0347, 0.0330, 0.0333, 0.0354], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 20:59:26,880 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:38,252 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75705.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:41,416 INFO [train.py:903] (2/4) Epoch 12, batch 600, loss[loss=0.2446, simple_loss=0.3283, pruned_loss=0.08039, over 19081.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3038, pruned_loss=0.07776, over 3650043.29 frames. ], batch size: 69, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:59:57,543 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:22,875 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 21:00:30,127 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:42,506 INFO [train.py:903] (2/4) Epoch 12, batch 650, loss[loss=0.2141, simple_loss=0.2808, pruned_loss=0.0737, over 16925.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3052, pruned_loss=0.07789, over 3695699.03 frames. ], batch size: 37, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 21:01:01,243 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.224e+02 6.354e+02 7.929e+02 1.382e+03, threshold=1.271e+03, percent-clipped=0.0 +2023-04-01 21:01:22,999 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5635, 2.3945, 1.8096, 1.5730, 2.2150, 1.2585, 1.3097, 1.9212], + device='cuda:2'), covar=tensor([0.0746, 0.0514, 0.0841, 0.0630, 0.0349, 0.1013, 0.0628, 0.0346], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0304, 0.0329, 0.0249, 0.0237, 0.0320, 0.0292, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:01:24,576 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.63 vs. limit=5.0 +2023-04-01 21:01:29,699 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4553, 1.3254, 1.7055, 1.3811, 2.8209, 3.7433, 3.5080, 3.8386], + device='cuda:2'), covar=tensor([0.1377, 0.3309, 0.2992, 0.2024, 0.0489, 0.0158, 0.0183, 0.0223], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0293, 0.0323, 0.0250, 0.0214, 0.0156, 0.0206, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:01:45,571 INFO [train.py:903] (2/4) Epoch 12, batch 700, loss[loss=0.2113, simple_loss=0.2809, pruned_loss=0.07083, over 19815.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3058, pruned_loss=0.07858, over 3728334.26 frames. ], batch size: 49, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:02:04,536 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-01 21:02:46,461 INFO [train.py:903] (2/4) Epoch 12, batch 750, loss[loss=0.2492, simple_loss=0.3337, pruned_loss=0.08233, over 19742.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3061, pruned_loss=0.07871, over 3741921.07 frames. ], batch size: 63, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:03:05,205 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 5.547e+02 6.814e+02 8.571e+02 2.504e+03, threshold=1.363e+03, percent-clipped=8.0 +2023-04-01 21:03:40,188 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 21:03:49,639 INFO [train.py:903] (2/4) Epoch 12, batch 800, loss[loss=0.2519, simple_loss=0.3303, pruned_loss=0.08676, over 19657.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3056, pruned_loss=0.07842, over 3763288.40 frames. ], batch size: 58, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:07,030 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 21:04:50,829 INFO [train.py:903] (2/4) Epoch 12, batch 850, loss[loss=0.2207, simple_loss=0.3005, pruned_loss=0.07043, over 19773.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3053, pruned_loss=0.07823, over 3778847.32 frames. ], batch size: 54, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:54,863 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75961.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:10,027 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.208e+02 6.361e+02 7.722e+02 1.579e+03, threshold=1.272e+03, percent-clipped=2.0 +2023-04-01 21:05:25,782 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75986.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:29,772 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3211, 1.3660, 1.7449, 1.2433, 2.6571, 3.4111, 3.1713, 3.5953], + device='cuda:2'), covar=tensor([0.1515, 0.3323, 0.2980, 0.2164, 0.0528, 0.0182, 0.0211, 0.0208], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0294, 0.0324, 0.0251, 0.0214, 0.0157, 0.0206, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:05:46,874 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 21:05:48,477 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:53,891 INFO [train.py:903] (2/4) Epoch 12, batch 900, loss[loss=0.2446, simple_loss=0.3139, pruned_loss=0.0876, over 19598.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3056, pruned_loss=0.07879, over 3765208.27 frames. ], batch size: 57, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:06:13,758 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7844, 1.4022, 1.4119, 2.1340, 1.6924, 2.1826, 2.0979, 1.9479], + device='cuda:2'), covar=tensor([0.0770, 0.1017, 0.1083, 0.0835, 0.0938, 0.0645, 0.0888, 0.0616], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0224, 0.0222, 0.0244, 0.0235, 0.0212, 0.0195, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 21:06:20,402 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:06:57,929 INFO [train.py:903] (2/4) Epoch 12, batch 950, loss[loss=0.1991, simple_loss=0.2759, pruned_loss=0.06118, over 19740.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3058, pruned_loss=0.07876, over 3788230.39 frames. ], batch size: 46, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:07:02,617 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 21:07:17,508 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 5.102e+02 6.493e+02 8.387e+02 1.985e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 21:07:59,837 INFO [train.py:903] (2/4) Epoch 12, batch 1000, loss[loss=0.2242, simple_loss=0.294, pruned_loss=0.07725, over 19485.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3038, pruned_loss=0.07763, over 3798253.50 frames. ], batch size: 49, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:08:42,454 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8363, 1.3384, 1.0340, 0.9512, 1.1817, 0.9687, 0.7932, 1.2474], + device='cuda:2'), covar=tensor([0.0539, 0.0704, 0.0973, 0.0570, 0.0467, 0.1038, 0.0611, 0.0394], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0303, 0.0326, 0.0247, 0.0233, 0.0314, 0.0289, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:08:54,935 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 21:09:02,677 INFO [train.py:903] (2/4) Epoch 12, batch 1050, loss[loss=0.2098, simple_loss=0.2798, pruned_loss=0.06987, over 19410.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3041, pruned_loss=0.0781, over 3791783.73 frames. ], batch size: 48, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:09:20,678 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 5.178e+02 6.428e+02 8.624e+02 1.751e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-01 21:09:35,976 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 21:10:05,074 INFO [train.py:903] (2/4) Epoch 12, batch 1100, loss[loss=0.2036, simple_loss=0.2725, pruned_loss=0.06735, over 19380.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3049, pruned_loss=0.07878, over 3797342.45 frames. ], batch size: 47, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:10:12,413 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3414, 1.7399, 1.8479, 2.1985, 1.9919, 1.9431, 1.8020, 2.1280], + device='cuda:2'), covar=tensor([0.0742, 0.1461, 0.1180, 0.0846, 0.1091, 0.0440, 0.1148, 0.0556], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0352, 0.0292, 0.0241, 0.0296, 0.0243, 0.0280, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:11:08,892 INFO [train.py:903] (2/4) Epoch 12, batch 1150, loss[loss=0.2473, simple_loss=0.3212, pruned_loss=0.08671, over 19318.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3045, pruned_loss=0.07853, over 3810411.88 frames. ], batch size: 66, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:11:27,479 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 5.432e+02 6.664e+02 8.568e+02 1.731e+03, threshold=1.333e+03, percent-clipped=3.0 +2023-04-01 21:11:44,816 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76287.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:12:10,715 INFO [train.py:903] (2/4) Epoch 12, batch 1200, loss[loss=0.2892, simple_loss=0.3449, pruned_loss=0.1167, over 13186.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3045, pruned_loss=0.07848, over 3815846.66 frames. ], batch size: 135, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:12:15,523 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:12:45,722 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 21:12:52,874 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76341.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:13:14,054 INFO [train.py:903] (2/4) Epoch 12, batch 1250, loss[loss=0.2299, simple_loss=0.3086, pruned_loss=0.0756, over 18381.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3047, pruned_loss=0.07859, over 3818342.45 frames. ], batch size: 84, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:13:31,221 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 5.242e+02 6.279e+02 7.669e+02 1.575e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-01 21:14:15,613 INFO [train.py:903] (2/4) Epoch 12, batch 1300, loss[loss=0.262, simple_loss=0.3351, pruned_loss=0.09443, over 19562.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3046, pruned_loss=0.07867, over 3802638.91 frames. ], batch size: 61, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:15:18,798 INFO [train.py:903] (2/4) Epoch 12, batch 1350, loss[loss=0.2549, simple_loss=0.3218, pruned_loss=0.09399, over 19649.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3055, pruned_loss=0.07947, over 3798426.57 frames. ], batch size: 58, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:15:37,066 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 5.247e+02 6.486e+02 7.910e+02 1.390e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 21:16:20,442 INFO [train.py:903] (2/4) Epoch 12, batch 1400, loss[loss=0.2977, simple_loss=0.3594, pruned_loss=0.118, over 19573.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3056, pruned_loss=0.0791, over 3810445.03 frames. ], batch size: 61, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:16:50,864 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2047, 1.7057, 1.6503, 2.0400, 1.9783, 1.8715, 1.5816, 2.0317], + device='cuda:2'), covar=tensor([0.0842, 0.1645, 0.1369, 0.0885, 0.1088, 0.0484, 0.1293, 0.0623], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0354, 0.0292, 0.0242, 0.0298, 0.0245, 0.0279, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:17:01,826 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9469, 3.5440, 2.4380, 3.1952, 0.8771, 3.3426, 3.3440, 3.4362], + device='cuda:2'), covar=tensor([0.0812, 0.1159, 0.2069, 0.0884, 0.4069, 0.1011, 0.0904, 0.1124], + device='cuda:2'), in_proj_covar=tensor([0.0442, 0.0367, 0.0442, 0.0319, 0.0380, 0.0374, 0.0358, 0.0392], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:17:23,456 INFO [train.py:903] (2/4) Epoch 12, batch 1450, loss[loss=0.2735, simple_loss=0.3411, pruned_loss=0.103, over 19545.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3056, pruned_loss=0.07944, over 3807952.99 frames. ], batch size: 54, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:17:25,875 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 21:17:40,981 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 4.935e+02 6.510e+02 8.250e+02 1.774e+03, threshold=1.302e+03, percent-clipped=3.0 +2023-04-01 21:18:24,875 INFO [train.py:903] (2/4) Epoch 12, batch 1500, loss[loss=0.2043, simple_loss=0.2779, pruned_loss=0.06533, over 19042.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3059, pruned_loss=0.07938, over 3804603.53 frames. ], batch size: 42, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:18:53,775 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76631.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:19:24,218 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76656.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:19:27,185 INFO [train.py:903] (2/4) Epoch 12, batch 1550, loss[loss=0.2211, simple_loss=0.2924, pruned_loss=0.07487, over 19473.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3063, pruned_loss=0.07984, over 3813304.52 frames. ], batch size: 49, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:19:46,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 5.709e+02 6.905e+02 9.210e+02 1.884e+03, threshold=1.381e+03, percent-clipped=4.0 +2023-04-01 21:20:01,237 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76685.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:20:19,360 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 21:20:23,671 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 21:20:29,928 INFO [train.py:903] (2/4) Epoch 12, batch 1600, loss[loss=0.2163, simple_loss=0.301, pruned_loss=0.06583, over 19778.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3045, pruned_loss=0.07854, over 3823241.30 frames. ], batch size: 56, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:20:53,980 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 21:21:16,387 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76746.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:21:23,364 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9599, 3.3986, 1.9025, 1.9124, 3.0444, 1.5415, 1.2705, 2.1399], + device='cuda:2'), covar=tensor([0.1247, 0.0418, 0.1030, 0.0790, 0.0475, 0.1178, 0.1058, 0.0635], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0300, 0.0321, 0.0243, 0.0231, 0.0316, 0.0289, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:21:29,664 INFO [train.py:903] (2/4) Epoch 12, batch 1650, loss[loss=0.2228, simple_loss=0.2874, pruned_loss=0.0791, over 19779.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3049, pruned_loss=0.07907, over 3814188.05 frames. ], batch size: 48, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:21:30,359 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-01 21:21:46,977 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:21:49,944 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.672e+02 5.088e+02 6.360e+02 7.707e+02 1.579e+03, threshold=1.272e+03, percent-clipped=3.0 +2023-04-01 21:22:06,325 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3876, 1.4934, 1.4829, 2.0435, 1.4221, 1.8415, 1.7392, 1.2256], + device='cuda:2'), covar=tensor([0.4117, 0.3373, 0.2179, 0.2069, 0.3491, 0.1737, 0.4811, 0.4037], + device='cuda:2'), in_proj_covar=tensor([0.0791, 0.0816, 0.0646, 0.0889, 0.0781, 0.0706, 0.0779, 0.0709], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 21:22:22,677 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76800.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:22:33,399 INFO [train.py:903] (2/4) Epoch 12, batch 1700, loss[loss=0.2546, simple_loss=0.3267, pruned_loss=0.09124, over 19367.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.305, pruned_loss=0.079, over 3819017.20 frames. ], batch size: 70, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:22:59,201 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4211, 1.8384, 1.9601, 2.7307, 2.1462, 2.7703, 2.6228, 2.5023], + device='cuda:2'), covar=tensor([0.0684, 0.0874, 0.0897, 0.0890, 0.0901, 0.0607, 0.0833, 0.0538], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0225, 0.0224, 0.0246, 0.0236, 0.0213, 0.0195, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 21:23:10,303 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 21:23:16,962 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0083, 1.9034, 1.7763, 1.5633, 1.4587, 1.5986, 0.5142, 0.8950], + device='cuda:2'), covar=tensor([0.0398, 0.0425, 0.0284, 0.0459, 0.0865, 0.0489, 0.0811, 0.0753], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0329, 0.0332, 0.0356, 0.0427, 0.0354, 0.0309, 0.0322], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:23:33,212 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:23:34,180 INFO [train.py:903] (2/4) Epoch 12, batch 1750, loss[loss=0.2975, simple_loss=0.3514, pruned_loss=0.1218, over 12862.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3065, pruned_loss=0.07985, over 3811449.00 frames. ], batch size: 135, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:23:36,412 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 21:23:53,639 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.587e+02 7.065e+02 8.864e+02 2.096e+03, threshold=1.413e+03, percent-clipped=6.0 +2023-04-01 21:24:37,130 INFO [train.py:903] (2/4) Epoch 12, batch 1800, loss[loss=0.2932, simple_loss=0.358, pruned_loss=0.1141, over 19676.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3069, pruned_loss=0.08013, over 3822270.67 frames. ], batch size: 60, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:24:44,436 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1089, 3.2423, 1.9297, 2.0933, 2.9463, 1.6165, 1.5570, 2.1870], + device='cuda:2'), covar=tensor([0.1136, 0.0575, 0.0941, 0.0644, 0.0486, 0.1082, 0.0820, 0.0587], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0300, 0.0320, 0.0243, 0.0231, 0.0317, 0.0289, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:25:32,716 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 21:25:38,683 INFO [train.py:903] (2/4) Epoch 12, batch 1850, loss[loss=0.2154, simple_loss=0.2946, pruned_loss=0.06815, over 19771.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3052, pruned_loss=0.07934, over 3819336.65 frames. ], batch size: 56, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:25:45,873 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.83 vs. limit=5.0 +2023-04-01 21:25:59,471 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 5.640e+02 7.026e+02 8.457e+02 1.689e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 21:26:12,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 21:26:34,375 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77002.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:26:41,880 INFO [train.py:903] (2/4) Epoch 12, batch 1900, loss[loss=0.2347, simple_loss=0.3098, pruned_loss=0.07977, over 19604.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3043, pruned_loss=0.07853, over 3816338.52 frames. ], batch size: 57, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:27:00,195 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 21:27:04,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 21:27:06,370 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:27:06,385 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:28,481 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 21:27:36,585 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77052.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:42,264 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77056.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:27:43,954 INFO [train.py:903] (2/4) Epoch 12, batch 1950, loss[loss=0.2857, simple_loss=0.3433, pruned_loss=0.114, over 19294.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3061, pruned_loss=0.07969, over 3809349.82 frames. ], batch size: 66, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:28:03,222 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.700e+02 5.468e+02 7.092e+02 9.065e+02 1.810e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 21:28:11,852 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77081.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:28:45,469 INFO [train.py:903] (2/4) Epoch 12, batch 2000, loss[loss=0.219, simple_loss=0.299, pruned_loss=0.06954, over 19528.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3059, pruned_loss=0.07913, over 3820858.88 frames. ], batch size: 54, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:28:51,469 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77113.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:29:15,585 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5890, 1.3056, 1.2034, 1.4553, 1.0846, 1.3879, 1.2032, 1.3930], + device='cuda:2'), covar=tensor([0.0996, 0.1082, 0.1447, 0.0874, 0.1174, 0.0533, 0.1316, 0.0748], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0349, 0.0292, 0.0238, 0.0294, 0.0242, 0.0278, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:29:43,041 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 21:29:46,526 INFO [train.py:903] (2/4) Epoch 12, batch 2050, loss[loss=0.2545, simple_loss=0.3285, pruned_loss=0.09024, over 18043.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3054, pruned_loss=0.07893, over 3828935.35 frames. ], batch size: 83, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:30:02,221 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 21:30:03,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 21:30:06,831 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.225e+02 5.837e+02 7.308e+02 9.815e+02 2.165e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 21:30:26,535 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 21:30:40,585 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77201.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:30:49,625 INFO [train.py:903] (2/4) Epoch 12, batch 2100, loss[loss=0.2519, simple_loss=0.3033, pruned_loss=0.1002, over 18688.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3051, pruned_loss=0.07861, over 3815939.81 frames. ], batch size: 41, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:31:10,322 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 21:31:12,344 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0140, 5.4086, 2.8964, 4.6936, 1.1042, 5.4321, 5.3134, 5.5164], + device='cuda:2'), covar=tensor([0.0391, 0.0886, 0.1908, 0.0674, 0.3935, 0.0543, 0.0735, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0445, 0.0369, 0.0444, 0.0321, 0.0384, 0.0378, 0.0364, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:31:17,056 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:31:20,290 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 21:31:28,734 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1585, 1.7102, 1.7890, 2.5541, 2.0561, 2.6583, 2.5057, 2.2832], + device='cuda:2'), covar=tensor([0.0779, 0.0949, 0.1018, 0.0961, 0.0957, 0.0632, 0.0888, 0.0634], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0226, 0.0226, 0.0249, 0.0236, 0.0215, 0.0197, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 21:31:40,661 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 21:31:52,747 INFO [train.py:903] (2/4) Epoch 12, batch 2150, loss[loss=0.2642, simple_loss=0.3365, pruned_loss=0.09599, over 19524.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3046, pruned_loss=0.07845, over 3814714.38 frames. ], batch size: 56, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:32:13,143 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.202e+02 5.327e+02 7.168e+02 9.347e+02 2.125e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-04-01 21:32:33,258 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:39,481 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77295.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:51,910 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-01 21:32:53,873 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2340, 1.3985, 1.9066, 1.7492, 2.9343, 4.6726, 4.5715, 5.0109], + device='cuda:2'), covar=tensor([0.1611, 0.3433, 0.2973, 0.1872, 0.0528, 0.0146, 0.0141, 0.0125], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0295, 0.0324, 0.0250, 0.0215, 0.0158, 0.0205, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:32:55,901 INFO [train.py:903] (2/4) Epoch 12, batch 2200, loss[loss=0.2106, simple_loss=0.2813, pruned_loss=0.06989, over 19743.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3048, pruned_loss=0.07825, over 3809170.89 frames. ], batch size: 51, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:33:05,515 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77316.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:33:25,215 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 21:33:57,352 INFO [train.py:903] (2/4) Epoch 12, batch 2250, loss[loss=0.1879, simple_loss=0.2774, pruned_loss=0.0492, over 19762.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.305, pruned_loss=0.07835, over 3802096.32 frames. ], batch size: 54, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:34:08,728 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1465, 1.2414, 1.7788, 1.2682, 2.8684, 3.7728, 3.4624, 3.9524], + device='cuda:2'), covar=tensor([0.1587, 0.3342, 0.2901, 0.2061, 0.0451, 0.0130, 0.0201, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0294, 0.0325, 0.0250, 0.0215, 0.0158, 0.0206, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:34:18,142 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.298e+02 6.341e+02 7.997e+02 1.542e+03, threshold=1.268e+03, percent-clipped=2.0 +2023-04-01 21:34:27,370 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5670, 2.9626, 3.0413, 3.0541, 1.3156, 2.8547, 2.5809, 2.7786], + device='cuda:2'), covar=tensor([0.1523, 0.1331, 0.0746, 0.0791, 0.4508, 0.1052, 0.0746, 0.1259], + device='cuda:2'), in_proj_covar=tensor([0.0683, 0.0613, 0.0812, 0.0690, 0.0738, 0.0565, 0.0498, 0.0749], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 21:34:35,673 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 21:34:58,601 INFO [train.py:903] (2/4) Epoch 12, batch 2300, loss[loss=0.2543, simple_loss=0.3276, pruned_loss=0.09052, over 18229.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3056, pruned_loss=0.07905, over 3805209.99 frames. ], batch size: 83, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:35:10,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 21:35:59,758 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77457.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:36:00,823 INFO [train.py:903] (2/4) Epoch 12, batch 2350, loss[loss=0.3273, simple_loss=0.3712, pruned_loss=0.1417, over 13072.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3062, pruned_loss=0.07962, over 3798718.52 frames. ], batch size: 136, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:36:22,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.245e+02 6.457e+02 8.417e+02 4.507e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-01 21:36:41,217 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 21:36:59,067 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 21:37:02,535 INFO [train.py:903] (2/4) Epoch 12, batch 2400, loss[loss=0.216, simple_loss=0.2876, pruned_loss=0.07225, over 19601.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3066, pruned_loss=0.07956, over 3806277.03 frames. ], batch size: 50, lr: 6.99e-03, grad_scale: 8.0 +2023-04-01 21:37:17,476 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4561, 1.2550, 1.2869, 1.7911, 1.4045, 1.6022, 1.7889, 1.4944], + device='cuda:2'), covar=tensor([0.0813, 0.0996, 0.1025, 0.0730, 0.0802, 0.0752, 0.0747, 0.0736], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0224, 0.0224, 0.0248, 0.0234, 0.0214, 0.0196, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 21:37:36,456 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9270, 2.0042, 2.1425, 2.7519, 1.9761, 2.6123, 2.4182, 1.9864], + device='cuda:2'), covar=tensor([0.3270, 0.2913, 0.1439, 0.1760, 0.3143, 0.1401, 0.3127, 0.2597], + device='cuda:2'), in_proj_covar=tensor([0.0802, 0.0823, 0.0653, 0.0897, 0.0788, 0.0711, 0.0789, 0.0719], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 21:38:04,500 INFO [train.py:903] (2/4) Epoch 12, batch 2450, loss[loss=0.314, simple_loss=0.3569, pruned_loss=0.1355, over 13148.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3071, pruned_loss=0.08016, over 3802178.46 frames. ], batch size: 136, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:38:21,434 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:21,503 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:22,409 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:24,483 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.530e+02 6.529e+02 8.263e+02 1.639e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 21:38:52,851 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77597.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:56,119 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1834, 5.5879, 3.0768, 4.9568, 1.3016, 5.5395, 5.5372, 5.6966], + device='cuda:2'), covar=tensor([0.0377, 0.0793, 0.1786, 0.0596, 0.3715, 0.0540, 0.0627, 0.0828], + device='cuda:2'), in_proj_covar=tensor([0.0440, 0.0365, 0.0440, 0.0319, 0.0380, 0.0373, 0.0360, 0.0392], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:39:05,884 INFO [train.py:903] (2/4) Epoch 12, batch 2500, loss[loss=0.264, simple_loss=0.3361, pruned_loss=0.09594, over 19676.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3061, pruned_loss=0.07968, over 3805937.15 frames. ], batch size: 60, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:39:27,243 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5594, 1.2939, 1.3150, 2.0965, 1.6759, 1.8699, 2.1848, 1.7885], + device='cuda:2'), covar=tensor([0.0859, 0.1029, 0.1109, 0.0839, 0.0929, 0.0721, 0.0779, 0.0661], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0244, 0.0232, 0.0211, 0.0193, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 21:39:39,425 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,537 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,750 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4708, 1.3493, 1.3058, 1.9098, 1.4778, 1.8031, 1.8707, 1.6877], + device='cuda:2'), covar=tensor([0.0842, 0.0937, 0.1095, 0.0800, 0.0898, 0.0721, 0.0879, 0.0647], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0220, 0.0221, 0.0243, 0.0231, 0.0211, 0.0193, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 21:39:45,165 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:08,010 INFO [train.py:903] (2/4) Epoch 12, batch 2550, loss[loss=0.2792, simple_loss=0.3394, pruned_loss=0.1095, over 19667.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3074, pruned_loss=0.08028, over 3808638.73 frames. ], batch size: 53, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:40:30,541 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.340e+02 6.709e+02 8.508e+02 1.809e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-01 21:40:46,827 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:54,793 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:41:05,590 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 21:41:10,775 INFO [train.py:903] (2/4) Epoch 12, batch 2600, loss[loss=0.2165, simple_loss=0.2935, pruned_loss=0.06976, over 19480.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3058, pruned_loss=0.07922, over 3823625.35 frames. ], batch size: 49, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:41:54,843 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 21:42:02,073 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:07,421 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:11,588 INFO [train.py:903] (2/4) Epoch 12, batch 2650, loss[loss=0.2451, simple_loss=0.3194, pruned_loss=0.08539, over 19761.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3072, pruned_loss=0.07986, over 3817494.07 frames. ], batch size: 54, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:42:32,175 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.775e+02 6.860e+02 8.613e+02 1.817e+03, threshold=1.372e+03, percent-clipped=5.0 +2023-04-01 21:42:33,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 21:43:06,970 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0079, 3.6261, 2.3932, 3.2997, 0.8176, 3.4162, 3.4452, 3.5083], + device='cuda:2'), covar=tensor([0.0754, 0.1163, 0.2134, 0.0816, 0.4122, 0.0872, 0.0832, 0.1159], + device='cuda:2'), in_proj_covar=tensor([0.0449, 0.0374, 0.0450, 0.0326, 0.0386, 0.0379, 0.0367, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:43:12,307 INFO [train.py:903] (2/4) Epoch 12, batch 2700, loss[loss=0.2356, simple_loss=0.3056, pruned_loss=0.08286, over 19647.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.308, pruned_loss=0.08017, over 3812483.23 frames. ], batch size: 53, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:43:38,298 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:08,839 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77853.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:14,409 INFO [train.py:903] (2/4) Epoch 12, batch 2750, loss[loss=0.1969, simple_loss=0.2707, pruned_loss=0.06156, over 19616.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3071, pruned_loss=0.07984, over 3817261.98 frames. ], batch size: 50, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:44:36,443 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.785e+02 6.935e+02 8.560e+02 1.739e+03, threshold=1.387e+03, percent-clipped=4.0 +2023-04-01 21:45:15,194 INFO [train.py:903] (2/4) Epoch 12, batch 2800, loss[loss=0.249, simple_loss=0.3234, pruned_loss=0.08728, over 17322.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3078, pruned_loss=0.08066, over 3813254.05 frames. ], batch size: 101, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:46:01,216 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:19,703 INFO [train.py:903] (2/4) Epoch 12, batch 2850, loss[loss=0.1757, simple_loss=0.253, pruned_loss=0.04923, over 19772.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3072, pruned_loss=0.07991, over 3824364.39 frames. ], batch size: 47, lr: 6.97e-03, grad_scale: 4.0 +2023-04-01 21:46:22,374 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3823, 1.3640, 1.5239, 1.5659, 2.9079, 1.0640, 2.1848, 3.2888], + device='cuda:2'), covar=tensor([0.0514, 0.2635, 0.2574, 0.1692, 0.0827, 0.2493, 0.1115, 0.0285], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0333, 0.0345, 0.0315, 0.0340, 0.0330, 0.0331, 0.0351], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 21:46:32,749 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:41,208 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.493e+02 5.663e+02 6.634e+02 8.773e+02 3.814e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-01 21:46:43,717 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:20,370 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:22,308 INFO [train.py:903] (2/4) Epoch 12, batch 2900, loss[loss=0.2495, simple_loss=0.3279, pruned_loss=0.08552, over 19736.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3068, pruned_loss=0.07965, over 3834384.59 frames. ], batch size: 63, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:47:22,330 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 21:47:25,084 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:39,756 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3714, 1.3233, 1.4750, 1.4966, 1.8653, 1.9358, 1.8251, 0.4935], + device='cuda:2'), covar=tensor([0.1988, 0.3629, 0.2166, 0.1624, 0.1241, 0.1855, 0.1199, 0.3718], + device='cuda:2'), in_proj_covar=tensor([0.0489, 0.0578, 0.0608, 0.0437, 0.0594, 0.0489, 0.0644, 0.0495], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 21:47:52,633 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:57,859 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78035.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:02,336 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:25,035 INFO [train.py:903] (2/4) Epoch 12, batch 2950, loss[loss=0.261, simple_loss=0.3266, pruned_loss=0.0977, over 12656.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3056, pruned_loss=0.07888, over 3821619.95 frames. ], batch size: 136, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:48:48,723 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.583e+02 6.866e+02 9.102e+02 1.641e+03, threshold=1.373e+03, percent-clipped=7.0 +2023-04-01 21:49:10,004 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:49:28,169 INFO [train.py:903] (2/4) Epoch 12, batch 3000, loss[loss=0.2726, simple_loss=0.3471, pruned_loss=0.09903, over 18097.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3063, pruned_loss=0.07931, over 3812846.41 frames. ], batch size: 83, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:49:28,170 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 21:49:40,672 INFO [train.py:937] (2/4) Epoch 12, validation: loss=0.1772, simple_loss=0.2779, pruned_loss=0.0383, over 944034.00 frames. +2023-04-01 21:49:40,673 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 21:49:45,492 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 21:50:38,067 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:50:42,294 INFO [train.py:903] (2/4) Epoch 12, batch 3050, loss[loss=0.2252, simple_loss=0.309, pruned_loss=0.07066, over 19742.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3056, pruned_loss=0.07887, over 3819726.79 frames. ], batch size: 63, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:51:04,779 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 5.238e+02 6.566e+02 8.426e+02 1.854e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 21:51:18,347 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1002, 1.3128, 1.9779, 1.5171, 2.7919, 4.3989, 4.3962, 4.8911], + device='cuda:2'), covar=tensor([0.1669, 0.3457, 0.2905, 0.1980, 0.0574, 0.0172, 0.0152, 0.0129], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0297, 0.0326, 0.0253, 0.0216, 0.0159, 0.0205, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:51:43,592 INFO [train.py:903] (2/4) Epoch 12, batch 3100, loss[loss=0.1932, simple_loss=0.2631, pruned_loss=0.06164, over 18201.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3068, pruned_loss=0.08026, over 3808483.17 frames. ], batch size: 40, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:52:36,393 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:52:45,562 INFO [train.py:903] (2/4) Epoch 12, batch 3150, loss[loss=0.2805, simple_loss=0.3315, pruned_loss=0.1148, over 19130.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.306, pruned_loss=0.07939, over 3804905.95 frames. ], batch size: 42, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:53:07,636 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.745e+02 5.622e+02 7.364e+02 8.683e+02 1.879e+03, threshold=1.473e+03, percent-clipped=3.0 +2023-04-01 21:53:15,172 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 21:53:47,081 INFO [train.py:903] (2/4) Epoch 12, batch 3200, loss[loss=0.2148, simple_loss=0.2807, pruned_loss=0.07442, over 19789.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.305, pruned_loss=0.07906, over 3816484.21 frames. ], batch size: 48, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:54:39,760 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:54:50,773 INFO [train.py:903] (2/4) Epoch 12, batch 3250, loss[loss=0.1977, simple_loss=0.2792, pruned_loss=0.05811, over 19848.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3048, pruned_loss=0.07871, over 3804783.41 frames. ], batch size: 52, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:54:57,744 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-01 21:55:10,576 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1731, 2.2241, 2.3159, 3.1047, 2.2014, 2.9851, 2.7061, 2.2783], + device='cuda:2'), covar=tensor([0.3615, 0.3257, 0.1495, 0.2025, 0.3588, 0.1546, 0.3348, 0.2623], + device='cuda:2'), in_proj_covar=tensor([0.0798, 0.0823, 0.0652, 0.0895, 0.0785, 0.0711, 0.0788, 0.0717], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 21:55:11,700 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:13,650 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.227e+02 6.164e+02 7.465e+02 1.234e+03, threshold=1.233e+03, percent-clipped=0.0 +2023-04-01 21:55:54,296 INFO [train.py:903] (2/4) Epoch 12, batch 3300, loss[loss=0.2146, simple_loss=0.2973, pruned_loss=0.06594, over 19574.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3044, pruned_loss=0.07841, over 3811096.87 frames. ], batch size: 61, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:55:57,137 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:57,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 21:56:26,553 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:56:55,608 INFO [train.py:903] (2/4) Epoch 12, batch 3350, loss[loss=0.2315, simple_loss=0.3096, pruned_loss=0.07668, over 19784.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3041, pruned_loss=0.07835, over 3823772.03 frames. ], batch size: 56, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:56:58,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2999, 1.5019, 1.9324, 1.5354, 2.9506, 2.4832, 3.3237, 1.4606], + device='cuda:2'), covar=tensor([0.2251, 0.3796, 0.2324, 0.1822, 0.1559, 0.1894, 0.1585, 0.3542], + device='cuda:2'), in_proj_covar=tensor([0.0483, 0.0570, 0.0598, 0.0430, 0.0587, 0.0483, 0.0636, 0.0486], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 21:57:18,018 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 5.251e+02 6.145e+02 6.896e+02 1.617e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-01 21:57:20,559 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:57:37,003 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-01 21:57:57,198 INFO [train.py:903] (2/4) Epoch 12, batch 3400, loss[loss=0.1993, simple_loss=0.2722, pruned_loss=0.06317, over 19795.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3042, pruned_loss=0.07822, over 3817087.52 frames. ], batch size: 48, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:58:04,346 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5410, 1.6829, 1.8043, 1.9657, 1.3251, 1.7289, 1.9565, 1.7612], + device='cuda:2'), covar=tensor([0.3498, 0.2825, 0.1520, 0.1832, 0.3187, 0.1684, 0.3899, 0.2712], + device='cuda:2'), in_proj_covar=tensor([0.0800, 0.0824, 0.0652, 0.0895, 0.0784, 0.0711, 0.0789, 0.0717], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 21:59:00,752 INFO [train.py:903] (2/4) Epoch 12, batch 3450, loss[loss=0.2316, simple_loss=0.3034, pruned_loss=0.07987, over 19768.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.305, pruned_loss=0.07886, over 3808823.84 frames. ], batch size: 51, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:59:06,183 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 21:59:06,677 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1423, 2.2236, 2.4559, 3.2577, 2.2524, 3.1843, 2.8618, 2.3164], + device='cuda:2'), covar=tensor([0.3931, 0.3465, 0.1402, 0.1930, 0.3874, 0.1463, 0.3349, 0.2629], + device='cuda:2'), in_proj_covar=tensor([0.0800, 0.0827, 0.0652, 0.0894, 0.0787, 0.0710, 0.0788, 0.0718], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 21:59:21,427 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78574.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:59:23,277 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.692e+02 7.472e+02 9.495e+02 2.057e+03, threshold=1.494e+03, percent-clipped=9.0 +2023-04-01 21:59:44,665 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:00:03,308 INFO [train.py:903] (2/4) Epoch 12, batch 3500, loss[loss=0.1851, simple_loss=0.2596, pruned_loss=0.05525, over 19328.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3056, pruned_loss=0.0794, over 3812128.52 frames. ], batch size: 44, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 22:00:54,059 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3169, 1.7548, 1.9232, 2.6236, 2.0291, 2.5462, 2.8223, 2.4629], + device='cuda:2'), covar=tensor([0.0762, 0.0929, 0.0981, 0.0903, 0.0923, 0.0674, 0.0782, 0.0621], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0248, 0.0236, 0.0213, 0.0196, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 22:01:05,327 INFO [train.py:903] (2/4) Epoch 12, batch 3550, loss[loss=0.2507, simple_loss=0.3203, pruned_loss=0.09059, over 19536.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3035, pruned_loss=0.07833, over 3825133.81 frames. ], batch size: 56, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:01:09,480 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-01 22:01:26,751 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 5.737e+02 7.042e+02 1.025e+03 1.962e+03, threshold=1.408e+03, percent-clipped=6.0 +2023-04-01 22:02:07,311 INFO [train.py:903] (2/4) Epoch 12, batch 3600, loss[loss=0.2109, simple_loss=0.2865, pruned_loss=0.06762, over 19858.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3037, pruned_loss=0.07849, over 3816891.56 frames. ], batch size: 52, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:02:08,944 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:02:13,717 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-01 22:03:09,180 INFO [train.py:903] (2/4) Epoch 12, batch 3650, loss[loss=0.1997, simple_loss=0.2853, pruned_loss=0.05701, over 19669.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3036, pruned_loss=0.07829, over 3816887.14 frames. ], batch size: 53, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:03:29,512 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5625, 4.0027, 4.2363, 4.2272, 1.6341, 3.9735, 3.5449, 3.9278], + device='cuda:2'), covar=tensor([0.1365, 0.0784, 0.0545, 0.0592, 0.5396, 0.0725, 0.0611, 0.1086], + device='cuda:2'), in_proj_covar=tensor([0.0674, 0.0612, 0.0801, 0.0685, 0.0729, 0.0569, 0.0488, 0.0738], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 22:03:33,803 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.318e+02 6.562e+02 8.219e+02 2.478e+03, threshold=1.312e+03, percent-clipped=4.0 +2023-04-01 22:04:14,202 INFO [train.py:903] (2/4) Epoch 12, batch 3700, loss[loss=0.218, simple_loss=0.296, pruned_loss=0.07001, over 19545.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3042, pruned_loss=0.07849, over 3810308.03 frames. ], batch size: 54, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:04:31,405 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:04:31,612 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3516, 1.1891, 1.1984, 1.6668, 1.4305, 1.5018, 1.6297, 1.4213], + device='cuda:2'), covar=tensor([0.0941, 0.1066, 0.1134, 0.0793, 0.0836, 0.0798, 0.0902, 0.0775], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0226, 0.0224, 0.0249, 0.0237, 0.0214, 0.0196, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 22:04:34,432 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 22:05:15,856 INFO [train.py:903] (2/4) Epoch 12, batch 3750, loss[loss=0.2113, simple_loss=0.2888, pruned_loss=0.06694, over 19675.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3043, pruned_loss=0.07836, over 3828439.54 frames. ], batch size: 53, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:05:37,740 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.608e+02 5.233e+02 6.179e+02 8.242e+02 1.500e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-01 22:05:59,316 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-01 22:06:05,767 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 22:06:16,422 INFO [train.py:903] (2/4) Epoch 12, batch 3800, loss[loss=0.2472, simple_loss=0.3169, pruned_loss=0.08878, over 19462.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3046, pruned_loss=0.07834, over 3819870.91 frames. ], batch size: 49, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:06:29,210 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:06:53,665 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 22:06:54,061 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:17,948 INFO [train.py:903] (2/4) Epoch 12, batch 3850, loss[loss=0.2279, simple_loss=0.3063, pruned_loss=0.07471, over 19700.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3041, pruned_loss=0.07817, over 3835745.19 frames. ], batch size: 59, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:07:28,328 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:40,746 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.982e+02 6.977e+02 9.373e+02 2.137e+03, threshold=1.395e+03, percent-clipped=8.0 +2023-04-01 22:07:49,870 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:58,031 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:08:21,050 INFO [train.py:903] (2/4) Epoch 12, batch 3900, loss[loss=0.1977, simple_loss=0.2647, pruned_loss=0.06536, over 19366.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3042, pruned_loss=0.07835, over 3827781.41 frames. ], batch size: 47, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:08:51,016 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79033.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:09:15,007 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0967, 2.1398, 2.3762, 3.1756, 2.1391, 2.9730, 2.5657, 2.1477], + device='cuda:2'), covar=tensor([0.3791, 0.3411, 0.1505, 0.1908, 0.3827, 0.1545, 0.3606, 0.2815], + device='cuda:2'), in_proj_covar=tensor([0.0802, 0.0826, 0.0655, 0.0900, 0.0787, 0.0716, 0.0794, 0.0718], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 22:09:22,284 INFO [train.py:903] (2/4) Epoch 12, batch 3950, loss[loss=0.2544, simple_loss=0.3312, pruned_loss=0.08877, over 19302.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3051, pruned_loss=0.07886, over 3819967.03 frames. ], batch size: 66, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:09:24,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 22:09:29,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 22:09:29,436 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2373, 2.8182, 2.2451, 2.0303, 1.8032, 2.2777, 0.8262, 2.0258], + device='cuda:2'), covar=tensor([0.0434, 0.0503, 0.0507, 0.0920, 0.0975, 0.0869, 0.1099, 0.0808], + device='cuda:2'), in_proj_covar=tensor([0.0346, 0.0334, 0.0336, 0.0361, 0.0431, 0.0360, 0.0316, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 22:09:43,635 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.132e+02 6.993e+02 8.356e+02 2.478e+03, threshold=1.399e+03, percent-clipped=5.0 +2023-04-01 22:10:22,893 INFO [train.py:903] (2/4) Epoch 12, batch 4000, loss[loss=0.2431, simple_loss=0.3152, pruned_loss=0.08549, over 19374.00 frames. ], tot_loss[loss=0.23, simple_loss=0.304, pruned_loss=0.07803, over 3813305.87 frames. ], batch size: 47, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:13,114 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 22:11:21,647 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.30 vs. limit=5.0 +2023-04-01 22:11:24,594 INFO [train.py:903] (2/4) Epoch 12, batch 4050, loss[loss=0.2455, simple_loss=0.3251, pruned_loss=0.08293, over 19669.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3039, pruned_loss=0.07785, over 3809228.58 frames. ], batch size: 60, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:47,128 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.067e+02 6.190e+02 7.758e+02 2.001e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-01 22:11:53,114 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 22:12:05,426 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1494, 1.9626, 1.8031, 1.5874, 1.4647, 1.5925, 0.3785, 0.9632], + device='cuda:2'), covar=tensor([0.0431, 0.0498, 0.0377, 0.0613, 0.0937, 0.0692, 0.1000, 0.0832], + device='cuda:2'), in_proj_covar=tensor([0.0346, 0.0336, 0.0338, 0.0362, 0.0433, 0.0361, 0.0318, 0.0330], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 22:12:07,778 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:12:26,659 INFO [train.py:903] (2/4) Epoch 12, batch 4100, loss[loss=0.1713, simple_loss=0.2449, pruned_loss=0.04889, over 19751.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3038, pruned_loss=0.0778, over 3810961.46 frames. ], batch size: 45, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:12:39,314 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79218.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:13:03,843 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 22:13:27,006 INFO [train.py:903] (2/4) Epoch 12, batch 4150, loss[loss=0.2143, simple_loss=0.2995, pruned_loss=0.06458, over 19703.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3049, pruned_loss=0.07817, over 3828638.58 frames. ], batch size: 63, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:13:49,452 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 5.552e+02 6.928e+02 9.304e+02 2.111e+03, threshold=1.386e+03, percent-clipped=6.0 +2023-04-01 22:14:06,885 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79289.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:23,449 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79303.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:14:28,770 INFO [train.py:903] (2/4) Epoch 12, batch 4200, loss[loss=0.1864, simple_loss=0.2607, pruned_loss=0.0561, over 15954.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3043, pruned_loss=0.07804, over 3826931.60 frames. ], batch size: 35, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:14:33,309 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 22:14:35,715 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79314.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:53,793 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:22,567 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:26,847 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5611, 3.9560, 4.1880, 4.1867, 1.6558, 3.9190, 3.5024, 3.8554], + device='cuda:2'), covar=tensor([0.1425, 0.0964, 0.0589, 0.0629, 0.5293, 0.0764, 0.0631, 0.1217], + device='cuda:2'), in_proj_covar=tensor([0.0689, 0.0619, 0.0816, 0.0695, 0.0741, 0.0573, 0.0497, 0.0753], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 22:15:31,214 INFO [train.py:903] (2/4) Epoch 12, batch 4250, loss[loss=0.231, simple_loss=0.3041, pruned_loss=0.07894, over 19750.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3042, pruned_loss=0.07791, over 3817209.62 frames. ], batch size: 51, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:15:43,224 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 22:15:52,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 4.925e+02 6.557e+02 8.003e+02 1.515e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-01 22:15:54,744 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 22:16:00,652 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:16:13,122 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79393.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:16:13,617 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.57 vs. limit=5.0 +2023-04-01 22:16:33,040 INFO [train.py:903] (2/4) Epoch 12, batch 4300, loss[loss=0.2196, simple_loss=0.2994, pruned_loss=0.06996, over 19853.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3043, pruned_loss=0.07877, over 3808915.09 frames. ], batch size: 52, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:16:43,495 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9414, 2.5570, 1.8364, 1.9905, 2.3884, 1.6199, 1.5723, 2.0357], + device='cuda:2'), covar=tensor([0.0824, 0.0588, 0.0739, 0.0527, 0.0432, 0.0846, 0.0644, 0.0509], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0299, 0.0324, 0.0245, 0.0235, 0.0318, 0.0287, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:17:07,821 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2673, 1.2947, 1.5667, 1.4316, 2.1986, 1.9762, 2.2820, 0.8759], + device='cuda:2'), covar=tensor([0.2163, 0.3685, 0.2185, 0.1683, 0.1344, 0.1849, 0.1219, 0.3712], + device='cuda:2'), in_proj_covar=tensor([0.0493, 0.0580, 0.0611, 0.0439, 0.0595, 0.0495, 0.0648, 0.0495], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 22:17:14,346 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79442.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:17:24,310 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 22:17:32,596 INFO [train.py:903] (2/4) Epoch 12, batch 4350, loss[loss=0.2164, simple_loss=0.2866, pruned_loss=0.07304, over 19591.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3053, pruned_loss=0.07914, over 3821022.56 frames. ], batch size: 52, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:17:54,380 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.544e+02 6.866e+02 8.754e+02 2.036e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 22:17:59,268 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 22:18:34,714 INFO [train.py:903] (2/4) Epoch 12, batch 4400, loss[loss=0.2405, simple_loss=0.314, pruned_loss=0.08354, over 19364.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3056, pruned_loss=0.07928, over 3823237.90 frames. ], batch size: 66, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:18:45,486 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8510, 1.3471, 1.0095, 0.8578, 1.1746, 0.9339, 0.8158, 1.2227], + device='cuda:2'), covar=tensor([0.0571, 0.0715, 0.1062, 0.0778, 0.0508, 0.1152, 0.0619, 0.0376], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0298, 0.0322, 0.0244, 0.0234, 0.0315, 0.0286, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:18:58,952 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 22:19:07,289 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 22:19:38,009 INFO [train.py:903] (2/4) Epoch 12, batch 4450, loss[loss=0.1793, simple_loss=0.2562, pruned_loss=0.05118, over 19760.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3044, pruned_loss=0.07827, over 3823790.41 frames. ], batch size: 45, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:20:00,027 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.129e+02 5.261e+02 6.909e+02 8.531e+02 1.990e+03, threshold=1.382e+03, percent-clipped=5.0 +2023-04-01 22:20:33,599 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:20:41,076 INFO [train.py:903] (2/4) Epoch 12, batch 4500, loss[loss=0.2404, simple_loss=0.3118, pruned_loss=0.08444, over 19625.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3049, pruned_loss=0.07883, over 3821074.05 frames. ], batch size: 57, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:20:51,329 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-01 22:21:29,744 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79647.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:21:43,337 INFO [train.py:903] (2/4) Epoch 12, batch 4550, loss[loss=0.2736, simple_loss=0.3362, pruned_loss=0.1055, over 19522.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3052, pruned_loss=0.07904, over 3809576.14 frames. ], batch size: 64, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:21:49,802 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-01 22:21:52,463 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 22:22:04,153 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:04,987 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.458e+02 6.277e+02 7.572e+02 1.495e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-01 22:22:11,618 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0126, 4.4375, 4.7761, 4.7404, 1.8150, 4.3791, 3.8668, 4.3803], + device='cuda:2'), covar=tensor([0.1351, 0.0748, 0.0489, 0.0500, 0.4705, 0.0673, 0.0572, 0.0999], + device='cuda:2'), in_proj_covar=tensor([0.0686, 0.0617, 0.0812, 0.0691, 0.0732, 0.0565, 0.0495, 0.0745], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 22:22:15,955 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 22:22:29,762 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:29,930 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7488, 4.2582, 4.4665, 4.4553, 1.6666, 4.1194, 3.6934, 4.1072], + device='cuda:2'), covar=tensor([0.1416, 0.0667, 0.0509, 0.0538, 0.4967, 0.0665, 0.0565, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0686, 0.0616, 0.0812, 0.0691, 0.0732, 0.0564, 0.0494, 0.0745], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 22:22:33,637 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:44,774 INFO [train.py:903] (2/4) Epoch 12, batch 4600, loss[loss=0.2316, simple_loss=0.3064, pruned_loss=0.07841, over 19841.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3045, pruned_loss=0.07836, over 3824842.05 frames. ], batch size: 52, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:04,290 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79723.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:07,649 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:21,790 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79737.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:23:46,648 INFO [train.py:903] (2/4) Epoch 12, batch 4650, loss[loss=0.2633, simple_loss=0.3336, pruned_loss=0.0965, over 19574.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3049, pruned_loss=0.07818, over 3835677.72 frames. ], batch size: 61, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:49,908 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.38 vs. limit=5.0 +2023-04-01 22:23:52,626 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79762.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:24:06,317 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 22:24:09,608 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 5.614e+02 7.010e+02 8.934e+02 1.991e+03, threshold=1.402e+03, percent-clipped=7.0 +2023-04-01 22:24:14,672 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:24:15,607 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 22:24:48,915 INFO [train.py:903] (2/4) Epoch 12, batch 4700, loss[loss=0.2226, simple_loss=0.3061, pruned_loss=0.06962, over 19754.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3056, pruned_loss=0.07856, over 3831949.39 frames. ], batch size: 54, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:24:52,468 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:12,467 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 22:25:30,332 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:45,559 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79852.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:25:52,116 INFO [train.py:903] (2/4) Epoch 12, batch 4750, loss[loss=0.2359, simple_loss=0.3106, pruned_loss=0.0806, over 18108.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3046, pruned_loss=0.07804, over 3822384.25 frames. ], batch size: 83, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:26:14,210 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.848e+02 5.173e+02 6.366e+02 7.623e+02 1.625e+03, threshold=1.273e+03, percent-clipped=2.0 +2023-04-01 22:26:54,248 INFO [train.py:903] (2/4) Epoch 12, batch 4800, loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.0625, over 19594.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3037, pruned_loss=0.07759, over 3828263.03 frames. ], batch size: 52, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:26:58,940 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8185, 1.6073, 1.7208, 2.3259, 2.0314, 1.9310, 2.0464, 1.8677], + device='cuda:2'), covar=tensor([0.0709, 0.0930, 0.0801, 0.0602, 0.0731, 0.0720, 0.0820, 0.0639], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0226, 0.0224, 0.0247, 0.0237, 0.0213, 0.0194, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 22:27:42,669 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:27:55,362 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5175, 3.6233, 3.9914, 4.0037, 2.3204, 3.7364, 3.4113, 3.7057], + device='cuda:2'), covar=tensor([0.1242, 0.3326, 0.0589, 0.0636, 0.3869, 0.1049, 0.0589, 0.0999], + device='cuda:2'), in_proj_covar=tensor([0.0679, 0.0615, 0.0805, 0.0688, 0.0726, 0.0558, 0.0487, 0.0739], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 22:27:56,267 INFO [train.py:903] (2/4) Epoch 12, batch 4850, loss[loss=0.3002, simple_loss=0.3487, pruned_loss=0.1259, over 13524.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3043, pruned_loss=0.078, over 3825817.16 frames. ], batch size: 136, lr: 6.88e-03, grad_scale: 16.0 +2023-04-01 22:28:19,171 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 5.432e+02 6.685e+02 9.186e+02 1.976e+03, threshold=1.337e+03, percent-clipped=11.0 +2023-04-01 22:28:21,558 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8919, 1.6434, 1.5238, 1.8783, 1.7582, 1.5946, 1.5503, 1.7360], + device='cuda:2'), covar=tensor([0.0872, 0.1382, 0.1345, 0.0895, 0.1134, 0.0509, 0.1169, 0.0704], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0351, 0.0293, 0.0240, 0.0298, 0.0242, 0.0281, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:28:23,575 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 22:28:43,806 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 22:28:48,464 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 22:28:51,004 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 22:28:59,978 INFO [train.py:903] (2/4) Epoch 12, batch 4900, loss[loss=0.1804, simple_loss=0.2539, pruned_loss=0.05346, over 19774.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3037, pruned_loss=0.07777, over 3821738.99 frames. ], batch size: 47, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:29:02,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 22:29:13,782 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80018.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:29:14,546 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:29:21,245 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 22:29:39,133 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5144, 2.3742, 1.6071, 1.6494, 2.2029, 1.3207, 1.2711, 1.9706], + device='cuda:2'), covar=tensor([0.1016, 0.0662, 0.0969, 0.0665, 0.0488, 0.1082, 0.0772, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0297, 0.0322, 0.0243, 0.0236, 0.0316, 0.0286, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:29:43,751 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80043.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:30:03,743 INFO [train.py:903] (2/4) Epoch 12, batch 4950, loss[loss=0.2191, simple_loss=0.2851, pruned_loss=0.07655, over 19411.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3038, pruned_loss=0.0778, over 3823715.55 frames. ], batch size: 48, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:30:08,669 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:13,625 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:20,433 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 22:30:26,819 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.263e+02 6.788e+02 8.958e+02 2.034e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 22:30:44,587 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80091.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:45,316 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 22:30:49,222 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.95 vs. limit=5.0 +2023-04-01 22:30:52,454 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:04,647 INFO [train.py:903] (2/4) Epoch 12, batch 5000, loss[loss=0.1745, simple_loss=0.2506, pruned_loss=0.04918, over 19725.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3044, pruned_loss=0.07831, over 3823734.72 frames. ], batch size: 51, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:31:05,081 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80108.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:31:13,750 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 22:31:22,046 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:24,067 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:26,035 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 22:31:37,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80133.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:31:38,250 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:32:06,837 INFO [train.py:903] (2/4) Epoch 12, batch 5050, loss[loss=0.2782, simple_loss=0.3342, pruned_loss=0.1111, over 13919.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3067, pruned_loss=0.07973, over 3810609.27 frames. ], batch size: 136, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:32:30,892 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.181e+02 5.736e+02 7.301e+02 9.465e+02 2.500e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 22:32:41,349 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 22:32:55,396 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7849, 1.4424, 1.4573, 1.7052, 1.4678, 1.5519, 1.4305, 1.6230], + device='cuda:2'), covar=tensor([0.0901, 0.1273, 0.1353, 0.0888, 0.1182, 0.0502, 0.1235, 0.0707], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0349, 0.0291, 0.0238, 0.0297, 0.0241, 0.0280, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:33:07,070 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 22:33:08,738 INFO [train.py:903] (2/4) Epoch 12, batch 5100, loss[loss=0.2914, simple_loss=0.3451, pruned_loss=0.1189, over 13433.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3072, pruned_loss=0.08009, over 3808060.04 frames. ], batch size: 136, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:33:21,044 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 22:33:23,158 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 22:33:26,504 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 22:33:47,514 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:33:53,912 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 22:34:11,773 INFO [train.py:903] (2/4) Epoch 12, batch 5150, loss[loss=0.202, simple_loss=0.2753, pruned_loss=0.06436, over 19055.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3057, pruned_loss=0.07906, over 3820382.57 frames. ], batch size: 42, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:34:23,658 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 22:34:34,582 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.377e+02 5.205e+02 6.062e+02 7.794e+02 1.645e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-01 22:34:59,509 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 22:35:05,754 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:13,720 INFO [train.py:903] (2/4) Epoch 12, batch 5200, loss[loss=0.2054, simple_loss=0.2988, pruned_loss=0.05595, over 19676.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3045, pruned_loss=0.07797, over 3808060.42 frames. ], batch size: 58, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:35:26,463 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:27,300 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 22:35:58,830 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:36:13,572 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 22:36:16,619 INFO [train.py:903] (2/4) Epoch 12, batch 5250, loss[loss=0.2395, simple_loss=0.3189, pruned_loss=0.08009, over 19661.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3044, pruned_loss=0.07776, over 3811222.28 frames. ], batch size: 58, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:36:41,375 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.664e+02 5.603e+02 6.465e+02 8.351e+02 1.434e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 22:36:43,227 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 22:36:57,036 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80390.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:18,484 INFO [train.py:903] (2/4) Epoch 12, batch 5300, loss[loss=0.218, simple_loss=0.2956, pruned_loss=0.07023, over 19616.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3038, pruned_loss=0.07717, over 3817386.56 frames. ], batch size: 57, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:37:22,956 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:28,733 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:42,141 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 22:38:23,295 INFO [train.py:903] (2/4) Epoch 12, batch 5350, loss[loss=0.2637, simple_loss=0.3284, pruned_loss=0.09953, over 19609.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3034, pruned_loss=0.07691, over 3821741.51 frames. ], batch size: 57, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:38:33,792 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9191, 2.0130, 2.1660, 2.7198, 1.9576, 2.6300, 2.3591, 1.9211], + device='cuda:2'), covar=tensor([0.3830, 0.3312, 0.1523, 0.2074, 0.3682, 0.1631, 0.3768, 0.2936], + device='cuda:2'), in_proj_covar=tensor([0.0806, 0.0828, 0.0658, 0.0896, 0.0792, 0.0717, 0.0792, 0.0720], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 22:38:44,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.085e+02 6.728e+02 8.660e+02 2.071e+03, threshold=1.346e+03, percent-clipped=4.0 +2023-04-01 22:38:50,335 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:38:52,582 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9125, 1.1901, 1.4055, 1.4599, 2.4415, 1.0591, 1.8860, 2.8556], + device='cuda:2'), covar=tensor([0.0736, 0.2886, 0.2816, 0.1772, 0.1066, 0.2510, 0.1414, 0.0433], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0344, 0.0357, 0.0323, 0.0350, 0.0333, 0.0340, 0.0362], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:38:59,012 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 22:39:08,253 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80495.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:23,541 INFO [train.py:903] (2/4) Epoch 12, batch 5400, loss[loss=0.1908, simple_loss=0.2608, pruned_loss=0.06037, over 19105.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3047, pruned_loss=0.07782, over 3828119.06 frames. ], batch size: 42, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:39:28,314 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80512.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:38,451 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:40:24,797 INFO [train.py:903] (2/4) Epoch 12, batch 5450, loss[loss=0.2173, simple_loss=0.2887, pruned_loss=0.0729, over 19852.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3045, pruned_loss=0.07781, over 3822301.80 frames. ], batch size: 52, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:40:49,072 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 5.165e+02 6.319e+02 8.444e+02 1.726e+03, threshold=1.264e+03, percent-clipped=5.0 +2023-04-01 22:41:26,460 INFO [train.py:903] (2/4) Epoch 12, batch 5500, loss[loss=0.2235, simple_loss=0.305, pruned_loss=0.07096, over 19667.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3049, pruned_loss=0.07806, over 3822646.77 frames. ], batch size: 58, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:41:53,692 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 22:42:12,382 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:42:29,083 INFO [train.py:903] (2/4) Epoch 12, batch 5550, loss[loss=0.2243, simple_loss=0.3059, pruned_loss=0.07135, over 18706.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3053, pruned_loss=0.07839, over 3808668.96 frames. ], batch size: 74, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:42:38,030 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 22:42:45,270 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3362, 1.9326, 1.9229, 2.3823, 1.9883, 1.9586, 2.0323, 2.2717], + device='cuda:2'), covar=tensor([0.0755, 0.1367, 0.1198, 0.0864, 0.1182, 0.0446, 0.0978, 0.0583], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0351, 0.0292, 0.0239, 0.0300, 0.0239, 0.0280, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:42:51,886 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.910e+02 5.291e+02 6.725e+02 8.423e+02 1.958e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-01 22:43:28,246 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 22:43:31,865 INFO [train.py:903] (2/4) Epoch 12, batch 5600, loss[loss=0.2049, simple_loss=0.2775, pruned_loss=0.06614, over 19737.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3038, pruned_loss=0.07757, over 3800209.36 frames. ], batch size: 47, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:44:29,095 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:30,106 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:30,268 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1012, 5.1901, 5.9737, 5.8809, 1.9756, 5.6038, 4.7845, 5.5592], + device='cuda:2'), covar=tensor([0.1221, 0.0663, 0.0410, 0.0463, 0.5256, 0.0485, 0.0500, 0.0851], + device='cuda:2'), in_proj_covar=tensor([0.0686, 0.0616, 0.0811, 0.0698, 0.0736, 0.0562, 0.0497, 0.0744], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 22:44:33,521 INFO [train.py:903] (2/4) Epoch 12, batch 5650, loss[loss=0.2115, simple_loss=0.2803, pruned_loss=0.07136, over 19765.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3039, pruned_loss=0.0778, over 3810078.82 frames. ], batch size: 45, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:44:36,173 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:57,737 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.237e+02 6.303e+02 7.862e+02 2.175e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-01 22:45:15,131 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:45:17,621 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6679, 1.4748, 1.5323, 1.9929, 1.8152, 1.8672, 1.8666, 1.8144], + device='cuda:2'), covar=tensor([0.0711, 0.0886, 0.0871, 0.0734, 0.0766, 0.0673, 0.0867, 0.0586], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0223, 0.0220, 0.0242, 0.0232, 0.0208, 0.0192, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 22:45:20,700 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 22:45:24,541 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4538, 2.3052, 1.7715, 1.5718, 2.1180, 1.3716, 1.3170, 1.7369], + device='cuda:2'), covar=tensor([0.0995, 0.0655, 0.0938, 0.0730, 0.0468, 0.1114, 0.0706, 0.0479], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0301, 0.0323, 0.0244, 0.0238, 0.0317, 0.0284, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:45:35,185 INFO [train.py:903] (2/4) Epoch 12, batch 5700, loss[loss=0.2203, simple_loss=0.3075, pruned_loss=0.06654, over 18840.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3047, pruned_loss=0.07819, over 3805359.24 frames. ], batch size: 74, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:45:57,688 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:33,738 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7122, 1.5339, 1.3578, 1.6326, 1.5408, 1.3183, 1.2286, 1.5498], + device='cuda:2'), covar=tensor([0.1079, 0.1400, 0.1540, 0.1013, 0.1234, 0.0767, 0.1545, 0.0835], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0353, 0.0293, 0.0239, 0.0299, 0.0241, 0.0281, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:46:35,803 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:38,759 INFO [train.py:903] (2/4) Epoch 12, batch 5750, loss[loss=0.1783, simple_loss=0.2574, pruned_loss=0.04963, over 19083.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3047, pruned_loss=0.0783, over 3798853.93 frames. ], batch size: 42, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:46:39,952 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 22:46:47,878 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 22:46:52,478 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 22:46:52,772 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:47:00,404 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.415e+02 6.686e+02 8.336e+02 1.819e+03, threshold=1.337e+03, percent-clipped=1.0 +2023-04-01 22:47:40,316 INFO [train.py:903] (2/4) Epoch 12, batch 5800, loss[loss=0.214, simple_loss=0.2896, pruned_loss=0.06924, over 19587.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3038, pruned_loss=0.07714, over 3806264.26 frames. ], batch size: 52, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:21,628 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:48:41,886 INFO [train.py:903] (2/4) Epoch 12, batch 5850, loss[loss=0.2288, simple_loss=0.3076, pruned_loss=0.07504, over 19539.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3038, pruned_loss=0.0774, over 3810931.23 frames. ], batch size: 56, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:58,808 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80971.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:49:06,075 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.387e+02 6.409e+02 7.183e+02 1.679e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 22:49:17,192 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 22:49:43,637 INFO [train.py:903] (2/4) Epoch 12, batch 5900, loss[loss=0.1984, simple_loss=0.2854, pruned_loss=0.05567, over 19590.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3044, pruned_loss=0.07753, over 3823482.39 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:49:47,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 22:49:55,089 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:49:56,195 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2977, 1.4190, 1.5406, 1.5347, 2.9205, 1.1113, 2.3141, 3.2467], + device='cuda:2'), covar=tensor([0.0531, 0.2459, 0.2540, 0.1773, 0.0682, 0.2354, 0.1153, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0341, 0.0355, 0.0323, 0.0347, 0.0332, 0.0337, 0.0361], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:50:09,732 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 22:50:25,043 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:50:47,170 INFO [train.py:903] (2/4) Epoch 12, batch 5950, loss[loss=0.2167, simple_loss=0.2943, pruned_loss=0.0696, over 19363.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3043, pruned_loss=0.0774, over 3824385.30 frames. ], batch size: 47, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:10,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.377e+02 6.760e+02 8.757e+02 1.989e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-01 22:51:36,986 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81098.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:51:49,674 INFO [train.py:903] (2/4) Epoch 12, batch 6000, loss[loss=0.1904, simple_loss=0.2622, pruned_loss=0.05928, over 19762.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3037, pruned_loss=0.07721, over 3816842.97 frames. ], batch size: 46, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:49,674 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 22:52:03,360 INFO [train.py:937] (2/4) Epoch 12, validation: loss=0.1765, simple_loss=0.2774, pruned_loss=0.03779, over 944034.00 frames. +2023-04-01 22:52:03,360 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 22:52:25,539 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:34,801 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:35,861 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:57,854 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:53:05,460 INFO [train.py:903] (2/4) Epoch 12, batch 6050, loss[loss=0.2187, simple_loss=0.3047, pruned_loss=0.06639, over 19687.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3038, pruned_loss=0.07724, over 3824496.86 frames. ], batch size: 59, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:53:27,714 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.041e+02 6.677e+02 8.260e+02 1.738e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-01 22:53:33,654 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8563, 1.3552, 1.0581, 0.9279, 1.1585, 0.9412, 0.9117, 1.2318], + device='cuda:2'), covar=tensor([0.0621, 0.0716, 0.1107, 0.0611, 0.0529, 0.1180, 0.0565, 0.0433], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0303, 0.0325, 0.0246, 0.0240, 0.0320, 0.0282, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 22:53:52,461 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:05,932 INFO [train.py:903] (2/4) Epoch 12, batch 6100, loss[loss=0.2293, simple_loss=0.3022, pruned_loss=0.07822, over 19845.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3046, pruned_loss=0.07777, over 3823173.71 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:54:11,816 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:20,976 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:29,493 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:57,859 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:00,239 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:06,758 INFO [train.py:903] (2/4) Epoch 12, batch 6150, loss[loss=0.211, simple_loss=0.2937, pruned_loss=0.06413, over 19580.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3047, pruned_loss=0.07801, over 3827350.25 frames. ], batch size: 52, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:55:33,023 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 5.279e+02 6.402e+02 8.020e+02 2.167e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-04-01 22:55:39,186 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 22:56:12,035 INFO [train.py:903] (2/4) Epoch 12, batch 6200, loss[loss=0.2422, simple_loss=0.3179, pruned_loss=0.08323, over 19701.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3046, pruned_loss=0.07779, over 3832274.19 frames. ], batch size: 59, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:56:22,844 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81317.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:56:25,176 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:57:11,772 INFO [train.py:903] (2/4) Epoch 12, batch 6250, loss[loss=0.2115, simple_loss=0.2852, pruned_loss=0.06895, over 19477.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3039, pruned_loss=0.07732, over 3836761.91 frames. ], batch size: 49, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:57:33,897 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.420e+02 5.051e+02 6.163e+02 7.297e+02 1.401e+03, threshold=1.233e+03, percent-clipped=3.0 +2023-04-01 22:57:42,512 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5058, 1.5976, 1.9807, 1.7583, 3.1397, 2.5769, 3.2836, 1.5126], + device='cuda:2'), covar=tensor([0.2086, 0.3576, 0.2228, 0.1635, 0.1332, 0.1728, 0.1502, 0.3449], + device='cuda:2'), in_proj_covar=tensor([0.0494, 0.0587, 0.0616, 0.0440, 0.0594, 0.0502, 0.0649, 0.0500], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 22:57:43,134 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 22:58:13,171 INFO [train.py:903] (2/4) Epoch 12, batch 6300, loss[loss=0.2711, simple_loss=0.3391, pruned_loss=0.1015, over 17250.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3051, pruned_loss=0.07836, over 3814400.36 frames. ], batch size: 100, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:58:32,105 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3025, 1.4171, 1.8404, 1.4596, 2.7766, 3.5735, 3.4648, 3.8015], + device='cuda:2'), covar=tensor([0.1535, 0.3274, 0.2880, 0.2024, 0.0539, 0.0185, 0.0184, 0.0210], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0297, 0.0327, 0.0249, 0.0218, 0.0162, 0.0205, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 22:59:02,065 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2588, 1.3321, 1.8079, 1.5227, 2.5233, 1.9675, 2.5532, 1.2237], + device='cuda:2'), covar=tensor([0.2588, 0.4258, 0.2539, 0.2056, 0.1654, 0.2362, 0.1723, 0.4088], + device='cuda:2'), in_proj_covar=tensor([0.0488, 0.0581, 0.0611, 0.0435, 0.0591, 0.0497, 0.0645, 0.0496], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 22:59:14,515 INFO [train.py:903] (2/4) Epoch 12, batch 6350, loss[loss=0.2101, simple_loss=0.2942, pruned_loss=0.06301, over 17425.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.305, pruned_loss=0.07819, over 3806199.95 frames. ], batch size: 101, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 22:59:29,324 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:39,339 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 5.412e+02 6.997e+02 8.497e+02 1.750e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-04-01 22:59:40,735 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:59,489 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81494.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:09,760 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0353, 1.7540, 1.7141, 2.1337, 1.8132, 1.8223, 1.8094, 1.9436], + device='cuda:2'), covar=tensor([0.0847, 0.1471, 0.1254, 0.0836, 0.1172, 0.0453, 0.1090, 0.0654], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0347, 0.0290, 0.0237, 0.0293, 0.0239, 0.0276, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 23:00:14,196 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:16,025 INFO [train.py:903] (2/4) Epoch 12, batch 6400, loss[loss=0.2507, simple_loss=0.3233, pruned_loss=0.08907, over 19413.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3053, pruned_loss=0.07842, over 3807536.38 frames. ], batch size: 70, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:00:45,284 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:01:19,145 INFO [train.py:903] (2/4) Epoch 12, batch 6450, loss[loss=0.2698, simple_loss=0.3359, pruned_loss=0.1018, over 13223.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3056, pruned_loss=0.07853, over 3805212.21 frames. ], batch size: 136, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:01:41,363 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 5.839e+02 6.972e+02 8.326e+02 2.886e+03, threshold=1.394e+03, percent-clipped=3.0 +2023-04-01 23:02:03,359 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81593.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:02:06,508 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 23:02:20,360 INFO [train.py:903] (2/4) Epoch 12, batch 6500, loss[loss=0.2382, simple_loss=0.3147, pruned_loss=0.08086, over 18662.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3049, pruned_loss=0.0778, over 3816244.53 frames. ], batch size: 74, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:02:27,386 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 23:03:22,581 INFO [train.py:903] (2/4) Epoch 12, batch 6550, loss[loss=0.2893, simple_loss=0.3514, pruned_loss=0.1136, over 19454.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3043, pruned_loss=0.07757, over 3811000.79 frames. ], batch size: 64, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:03:26,197 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:28,586 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:47,193 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 5.338e+02 6.617e+02 7.892e+02 1.534e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 23:04:00,901 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:04:24,215 INFO [train.py:903] (2/4) Epoch 12, batch 6600, loss[loss=0.2714, simple_loss=0.3297, pruned_loss=0.1065, over 13430.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3038, pruned_loss=0.07739, over 3810847.48 frames. ], batch size: 135, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:05:26,102 INFO [train.py:903] (2/4) Epoch 12, batch 6650, loss[loss=0.1957, simple_loss=0.2776, pruned_loss=0.05693, over 19616.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3036, pruned_loss=0.07699, over 3803943.60 frames. ], batch size: 50, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:05:46,845 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:05:47,589 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.145e+02 6.461e+02 8.134e+02 1.737e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-01 23:05:49,161 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:06:26,738 INFO [train.py:903] (2/4) Epoch 12, batch 6700, loss[loss=0.2373, simple_loss=0.3131, pruned_loss=0.08076, over 18744.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3028, pruned_loss=0.07687, over 3811093.78 frames. ], batch size: 74, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:07:03,310 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 23:07:16,989 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:26,538 INFO [train.py:903] (2/4) Epoch 12, batch 6750, loss[loss=0.2396, simple_loss=0.3179, pruned_loss=0.08064, over 19666.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3031, pruned_loss=0.07724, over 3824256.80 frames. ], batch size: 55, lr: 6.80e-03, grad_scale: 4.0 +2023-04-01 23:07:45,258 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:49,445 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+02 6.276e+02 7.333e+02 1.082e+03 2.540e+03, threshold=1.467e+03, percent-clipped=11.0 +2023-04-01 23:08:23,230 INFO [train.py:903] (2/4) Epoch 12, batch 6800, loss[loss=0.1924, simple_loss=0.2717, pruned_loss=0.05659, over 19609.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3021, pruned_loss=0.07721, over 3808319.97 frames. ], batch size: 50, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:08:40,550 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9123, 1.8614, 1.7172, 1.5528, 1.4305, 1.5771, 0.4283, 0.9651], + device='cuda:2'), covar=tensor([0.0454, 0.0434, 0.0276, 0.0449, 0.0750, 0.0521, 0.0821, 0.0690], + device='cuda:2'), in_proj_covar=tensor([0.0343, 0.0332, 0.0330, 0.0358, 0.0429, 0.0357, 0.0313, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:09:08,325 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 23:09:09,337 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 23:09:12,678 INFO [train.py:903] (2/4) Epoch 13, batch 0, loss[loss=0.2158, simple_loss=0.2888, pruned_loss=0.07142, over 19735.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2888, pruned_loss=0.07142, over 19735.00 frames. ], batch size: 51, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:09:12,678 INFO [train.py:928] (2/4) Computing validation loss +2023-04-01 23:09:23,579 INFO [train.py:937] (2/4) Epoch 13, validation: loss=0.176, simple_loss=0.2772, pruned_loss=0.03738, over 944034.00 frames. +2023-04-01 23:09:23,580 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-01 23:09:35,417 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 23:10:14,567 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 5.222e+02 6.740e+02 8.452e+02 3.268e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-01 23:10:23,830 INFO [train.py:903] (2/4) Epoch 13, batch 50, loss[loss=0.2069, simple_loss=0.301, pruned_loss=0.05647, over 19658.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3072, pruned_loss=0.07754, over 865517.29 frames. ], batch size: 55, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:10:46,581 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:10:59,192 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 23:11:20,799 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:21,065 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:23,310 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:25,073 INFO [train.py:903] (2/4) Epoch 13, batch 100, loss[loss=0.2921, simple_loss=0.3509, pruned_loss=0.1166, over 19571.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3035, pruned_loss=0.07643, over 1525559.22 frames. ], batch size: 61, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:11:36,600 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 23:11:52,586 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:55,635 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:11,463 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:16,819 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.805e+02 6.218e+02 7.513e+02 1.266e+03, threshold=1.244e+03, percent-clipped=0.0 +2023-04-01 23:12:25,778 INFO [train.py:903] (2/4) Epoch 13, batch 150, loss[loss=0.2052, simple_loss=0.2761, pruned_loss=0.06719, over 19400.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.302, pruned_loss=0.07572, over 2041505.95 frames. ], batch size: 48, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:23,617 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 23:13:24,756 INFO [train.py:903] (2/4) Epoch 13, batch 200, loss[loss=0.2208, simple_loss=0.2988, pruned_loss=0.07142, over 19654.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3019, pruned_loss=0.07587, over 2444356.16 frames. ], batch size: 55, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:25,538 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-01 23:13:37,358 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 23:13:40,477 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:14:14,395 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.002e+02 5.972e+02 7.403e+02 2.257e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-01 23:14:26,895 INFO [train.py:903] (2/4) Epoch 13, batch 250, loss[loss=0.2141, simple_loss=0.2857, pruned_loss=0.07118, over 19732.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3031, pruned_loss=0.0768, over 2756910.70 frames. ], batch size: 47, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:15:26,890 INFO [train.py:903] (2/4) Epoch 13, batch 300, loss[loss=0.1865, simple_loss=0.2645, pruned_loss=0.05422, over 19421.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3035, pruned_loss=0.07694, over 2998730.96 frames. ], batch size: 48, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:15:48,489 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 23:16:18,757 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.753e+02 6.815e+02 9.164e+02 1.837e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 23:16:28,127 INFO [train.py:903] (2/4) Epoch 13, batch 350, loss[loss=0.2243, simple_loss=0.3003, pruned_loss=0.07415, over 19697.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3036, pruned_loss=0.07728, over 3181330.82 frames. ], batch size: 53, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:16:30,460 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 23:17:28,519 INFO [train.py:903] (2/4) Epoch 13, batch 400, loss[loss=0.1838, simple_loss=0.2578, pruned_loss=0.05496, over 19728.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3023, pruned_loss=0.07694, over 3322915.09 frames. ], batch size: 46, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:17:47,110 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:04,877 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:21,974 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.325e+02 6.166e+02 7.720e+02 2.046e+03, threshold=1.233e+03, percent-clipped=4.0 +2023-04-01 23:18:26,266 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 23:18:31,252 INFO [train.py:903] (2/4) Epoch 13, batch 450, loss[loss=0.2143, simple_loss=0.2886, pruned_loss=0.06997, over 19479.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3021, pruned_loss=0.07664, over 3438143.00 frames. ], batch size: 49, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:18:53,511 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:01,689 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8838, 3.4894, 2.4730, 3.1307, 0.9168, 3.3903, 3.3204, 3.4102], + device='cuda:2'), covar=tensor([0.0924, 0.1259, 0.1983, 0.0943, 0.3999, 0.0873, 0.0875, 0.1159], + device='cuda:2'), in_proj_covar=tensor([0.0444, 0.0374, 0.0445, 0.0321, 0.0381, 0.0375, 0.0369, 0.0402], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 23:19:04,668 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 23:19:04,708 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 23:19:09,581 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:23,688 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82428.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:33,813 INFO [train.py:903] (2/4) Epoch 13, batch 500, loss[loss=0.187, simple_loss=0.276, pruned_loss=0.04906, over 19597.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3022, pruned_loss=0.07637, over 3511645.73 frames. ], batch size: 52, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:20:06,483 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:20:24,378 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8666, 4.3189, 4.5907, 4.5745, 1.6281, 4.2637, 3.7446, 4.2234], + device='cuda:2'), covar=tensor([0.1520, 0.0770, 0.0550, 0.0662, 0.5630, 0.0754, 0.0633, 0.1070], + device='cuda:2'), in_proj_covar=tensor([0.0701, 0.0621, 0.0821, 0.0706, 0.0750, 0.0573, 0.0501, 0.0755], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-01 23:20:27,372 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.153e+02 6.569e+02 8.401e+02 1.477e+03, threshold=1.314e+03, percent-clipped=3.0 +2023-04-01 23:20:35,264 INFO [train.py:903] (2/4) Epoch 13, batch 550, loss[loss=0.2237, simple_loss=0.2989, pruned_loss=0.07427, over 19768.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3026, pruned_loss=0.0766, over 3586574.34 frames. ], batch size: 56, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:21:05,898 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82511.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:21:30,830 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:21:35,043 INFO [train.py:903] (2/4) Epoch 13, batch 600, loss[loss=0.2541, simple_loss=0.3344, pruned_loss=0.08687, over 19494.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3038, pruned_loss=0.0768, over 3633989.21 frames. ], batch size: 64, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:22:17,359 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 23:22:28,774 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.313e+02 6.751e+02 8.249e+02 1.619e+03, threshold=1.350e+03, percent-clipped=3.0 +2023-04-01 23:22:36,927 INFO [train.py:903] (2/4) Epoch 13, batch 650, loss[loss=0.2169, simple_loss=0.2843, pruned_loss=0.07478, over 19410.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3023, pruned_loss=0.07604, over 3687273.00 frames. ], batch size: 48, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:22:38,451 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:23:40,974 INFO [train.py:903] (2/4) Epoch 13, batch 700, loss[loss=0.2119, simple_loss=0.2762, pruned_loss=0.07383, over 19316.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3015, pruned_loss=0.07507, over 3727565.83 frames. ], batch size: 44, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:24:01,339 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1346, 1.2710, 1.7948, 1.1478, 2.5877, 3.2928, 3.0202, 3.4571], + device='cuda:2'), covar=tensor([0.1528, 0.3436, 0.2815, 0.2132, 0.0500, 0.0163, 0.0220, 0.0231], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0302, 0.0331, 0.0255, 0.0222, 0.0163, 0.0207, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:24:36,357 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.358e+02 6.603e+02 8.553e+02 2.977e+03, threshold=1.321e+03, percent-clipped=4.0 +2023-04-01 23:24:44,577 INFO [train.py:903] (2/4) Epoch 13, batch 750, loss[loss=0.2314, simple_loss=0.3007, pruned_loss=0.08107, over 19740.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3025, pruned_loss=0.0757, over 3748367.63 frames. ], batch size: 51, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:10,152 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:28,829 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:47,331 INFO [train.py:903] (2/4) Epoch 13, batch 800, loss[loss=0.2106, simple_loss=0.2931, pruned_loss=0.06401, over 19524.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3019, pruned_loss=0.07549, over 3777554.43 frames. ], batch size: 54, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:58,381 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:26:01,439 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 23:26:10,301 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4934, 1.5231, 1.8099, 1.6974, 2.7952, 2.2511, 2.8993, 1.3872], + device='cuda:2'), covar=tensor([0.1956, 0.3583, 0.2140, 0.1593, 0.1262, 0.1754, 0.1183, 0.3475], + device='cuda:2'), in_proj_covar=tensor([0.0488, 0.0578, 0.0610, 0.0439, 0.0589, 0.0495, 0.0640, 0.0493], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:26:24,286 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2630, 1.2945, 1.1837, 1.1039, 0.9917, 1.1317, 0.3341, 0.6216], + device='cuda:2'), covar=tensor([0.0399, 0.0387, 0.0226, 0.0353, 0.0692, 0.0424, 0.0743, 0.0596], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0333, 0.0334, 0.0359, 0.0430, 0.0357, 0.0317, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:26:42,349 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.821e+02 5.442e+02 6.436e+02 7.821e+02 1.140e+03, threshold=1.287e+03, percent-clipped=0.0 +2023-04-01 23:26:50,562 INFO [train.py:903] (2/4) Epoch 13, batch 850, loss[loss=0.2197, simple_loss=0.2977, pruned_loss=0.07084, over 19778.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3021, pruned_loss=0.07575, over 3789578.03 frames. ], batch size: 54, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:26:53,198 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:27,011 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:38,390 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:44,739 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 23:27:52,714 INFO [train.py:903] (2/4) Epoch 13, batch 900, loss[loss=0.2946, simple_loss=0.3343, pruned_loss=0.1275, over 19784.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3027, pruned_loss=0.07663, over 3792042.64 frames. ], batch size: 48, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:28:19,292 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82855.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:28:47,653 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.906e+02 6.958e+02 9.103e+02 2.196e+03, threshold=1.392e+03, percent-clipped=5.0 +2023-04-01 23:28:59,851 INFO [train.py:903] (2/4) Epoch 13, batch 950, loss[loss=0.2268, simple_loss=0.304, pruned_loss=0.07483, over 19666.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3011, pruned_loss=0.07589, over 3812813.94 frames. ], batch size: 60, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:29:04,323 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 23:29:37,782 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5676, 1.6634, 1.8717, 2.1141, 1.3911, 1.8839, 1.9638, 1.7335], + device='cuda:2'), covar=tensor([0.3705, 0.2951, 0.1563, 0.1712, 0.3335, 0.1593, 0.4096, 0.2818], + device='cuda:2'), in_proj_covar=tensor([0.0816, 0.0843, 0.0665, 0.0902, 0.0807, 0.0731, 0.0806, 0.0732], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 23:29:55,942 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:30:01,521 INFO [train.py:903] (2/4) Epoch 13, batch 1000, loss[loss=0.2048, simple_loss=0.2779, pruned_loss=0.06587, over 19086.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3018, pruned_loss=0.07625, over 3795373.18 frames. ], batch size: 42, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:30:44,814 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:30:49,530 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6111, 1.4386, 1.4695, 2.1365, 1.6539, 1.9394, 2.0285, 1.7238], + device='cuda:2'), covar=tensor([0.0912, 0.0994, 0.1070, 0.0847, 0.0916, 0.0761, 0.0893, 0.0730], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0225, 0.0225, 0.0244, 0.0234, 0.0211, 0.0195, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-01 23:30:52,611 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 23:30:54,574 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.161e+02 6.395e+02 8.326e+02 2.115e+03, threshold=1.279e+03, percent-clipped=2.0 +2023-04-01 23:30:57,014 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82981.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:31:02,718 INFO [train.py:903] (2/4) Epoch 13, batch 1050, loss[loss=0.2388, simple_loss=0.3124, pruned_loss=0.08261, over 19591.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3021, pruned_loss=0.0763, over 3810556.75 frames. ], batch size: 57, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:31:34,332 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 23:31:51,299 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9137, 2.3193, 2.3667, 2.9791, 2.5697, 2.5413, 2.4204, 3.0568], + device='cuda:2'), covar=tensor([0.0722, 0.1703, 0.1311, 0.1050, 0.1373, 0.0413, 0.1063, 0.0470], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0351, 0.0297, 0.0239, 0.0296, 0.0242, 0.0280, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 23:32:04,663 INFO [train.py:903] (2/4) Epoch 13, batch 1100, loss[loss=0.2382, simple_loss=0.3171, pruned_loss=0.07967, over 19763.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3019, pruned_loss=0.07615, over 3814050.30 frames. ], batch size: 63, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:32:19,505 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:32:57,824 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 5.014e+02 6.117e+02 7.878e+02 1.226e+03, threshold=1.223e+03, percent-clipped=0.0 +2023-04-01 23:32:58,246 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:08,490 INFO [train.py:903] (2/4) Epoch 13, batch 1150, loss[loss=0.275, simple_loss=0.3443, pruned_loss=0.1029, over 19670.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3018, pruned_loss=0.07597, over 3813970.87 frames. ], batch size: 58, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:33:30,508 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:34:10,932 INFO [train.py:903] (2/4) Epoch 13, batch 1200, loss[loss=0.2235, simple_loss=0.2986, pruned_loss=0.07421, over 19737.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3017, pruned_loss=0.07588, over 3824754.59 frames. ], batch size: 63, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:34:14,937 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1386, 1.2935, 1.6595, 1.1406, 2.6905, 3.6309, 3.4016, 3.8916], + device='cuda:2'), covar=tensor([0.1588, 0.3470, 0.3115, 0.2207, 0.0558, 0.0155, 0.0206, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0299, 0.0328, 0.0253, 0.0221, 0.0162, 0.0207, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:34:40,330 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 23:35:06,465 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.101e+02 7.395e+02 1.032e+03 1.939e+03, threshold=1.479e+03, percent-clipped=13.0 +2023-04-01 23:35:12,376 INFO [train.py:903] (2/4) Epoch 13, batch 1250, loss[loss=0.2654, simple_loss=0.3401, pruned_loss=0.09539, over 19674.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3013, pruned_loss=0.07579, over 3833193.05 frames. ], batch size: 58, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:35:48,391 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83214.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:36:02,591 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83226.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:36:13,263 INFO [train.py:903] (2/4) Epoch 13, batch 1300, loss[loss=0.2329, simple_loss=0.3077, pruned_loss=0.07906, over 19617.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3028, pruned_loss=0.0764, over 3823058.95 frames. ], batch size: 57, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:36:33,767 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83251.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:37:08,171 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 5.042e+02 6.195e+02 7.677e+02 1.204e+03, threshold=1.239e+03, percent-clipped=0.0 +2023-04-01 23:37:17,151 INFO [train.py:903] (2/4) Epoch 13, batch 1350, loss[loss=0.2539, simple_loss=0.3302, pruned_loss=0.08878, over 19469.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3024, pruned_loss=0.07596, over 3829622.32 frames. ], batch size: 64, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:37:37,973 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:37:38,098 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:07,358 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:09,930 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:21,069 INFO [train.py:903] (2/4) Epoch 13, batch 1400, loss[loss=0.2218, simple_loss=0.29, pruned_loss=0.07681, over 19752.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3027, pruned_loss=0.07633, over 3824076.50 frames. ], batch size: 46, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:38:49,426 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8461, 1.9532, 2.1643, 2.7602, 1.9364, 2.7100, 2.3611, 1.9971], + device='cuda:2'), covar=tensor([0.3677, 0.3110, 0.1465, 0.1809, 0.3534, 0.1463, 0.3486, 0.2601], + device='cuda:2'), in_proj_covar=tensor([0.0809, 0.0832, 0.0659, 0.0892, 0.0796, 0.0723, 0.0797, 0.0720], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 23:39:10,613 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2685, 2.2704, 2.5885, 3.3430, 2.2195, 3.2142, 2.7969, 2.3602], + device='cuda:2'), covar=tensor([0.3832, 0.3377, 0.1446, 0.1991, 0.4071, 0.1593, 0.3639, 0.2792], + device='cuda:2'), in_proj_covar=tensor([0.0811, 0.0834, 0.0659, 0.0894, 0.0797, 0.0724, 0.0798, 0.0722], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 23:39:16,727 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.533e+02 6.494e+02 7.603e+02 1.656e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 23:39:20,299 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 23:39:22,614 INFO [train.py:903] (2/4) Epoch 13, batch 1450, loss[loss=0.2175, simple_loss=0.2799, pruned_loss=0.07754, over 19742.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3028, pruned_loss=0.07685, over 3814048.28 frames. ], batch size: 45, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:24,405 INFO [train.py:903] (2/4) Epoch 13, batch 1500, loss[loss=0.2224, simple_loss=0.3091, pruned_loss=0.06785, over 19274.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3028, pruned_loss=0.07728, over 3806494.07 frames. ], batch size: 70, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:29,241 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:41:19,832 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.552e+02 5.496e+02 6.437e+02 7.955e+02 2.023e+03, threshold=1.287e+03, percent-clipped=5.0 +2023-04-01 23:41:26,537 INFO [train.py:903] (2/4) Epoch 13, batch 1550, loss[loss=0.205, simple_loss=0.2876, pruned_loss=0.06117, over 19592.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3018, pruned_loss=0.07653, over 3819369.91 frames. ], batch size: 61, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:42:30,076 INFO [train.py:903] (2/4) Epoch 13, batch 1600, loss[loss=0.216, simple_loss=0.2752, pruned_loss=0.07844, over 18686.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3023, pruned_loss=0.07652, over 3822857.16 frames. ], batch size: 41, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:42:53,320 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 23:42:55,753 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:43:25,327 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 5.445e+02 6.386e+02 7.908e+02 1.256e+03, threshold=1.277e+03, percent-clipped=0.0 +2023-04-01 23:43:31,085 INFO [train.py:903] (2/4) Epoch 13, batch 1650, loss[loss=0.2246, simple_loss=0.2884, pruned_loss=0.08043, over 19583.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3037, pruned_loss=0.07732, over 3820419.15 frames. ], batch size: 52, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:44:33,585 INFO [train.py:903] (2/4) Epoch 13, batch 1700, loss[loss=0.1617, simple_loss=0.2457, pruned_loss=0.03883, over 19332.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.304, pruned_loss=0.07762, over 3829873.44 frames. ], batch size: 47, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:44:45,669 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:15,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 23:45:19,369 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:27,803 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.386e+02 6.785e+02 9.131e+02 1.645e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 23:45:33,653 INFO [train.py:903] (2/4) Epoch 13, batch 1750, loss[loss=0.2434, simple_loss=0.3158, pruned_loss=0.08547, over 19667.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3047, pruned_loss=0.07797, over 3828790.51 frames. ], batch size: 58, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:45:48,924 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83696.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:19,385 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:37,929 INFO [train.py:903] (2/4) Epoch 13, batch 1800, loss[loss=0.179, simple_loss=0.259, pruned_loss=0.04953, over 19763.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3041, pruned_loss=0.07757, over 3818455.53 frames. ], batch size: 46, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:47:08,593 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:29,805 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:32,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.542e+02 6.939e+02 8.095e+02 2.139e+03, threshold=1.388e+03, percent-clipped=3.0 +2023-04-01 23:47:35,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 23:47:39,805 INFO [train.py:903] (2/4) Epoch 13, batch 1850, loss[loss=0.2565, simple_loss=0.3301, pruned_loss=0.09146, over 19539.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3054, pruned_loss=0.07836, over 3824095.81 frames. ], batch size: 54, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:11,514 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 23:48:38,901 INFO [train.py:903] (2/4) Epoch 13, batch 1900, loss[loss=0.2339, simple_loss=0.2945, pruned_loss=0.08668, over 19765.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3049, pruned_loss=0.07801, over 3816129.99 frames. ], batch size: 47, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:56,187 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 23:49:00,755 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 23:49:22,965 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6631, 2.3279, 1.7602, 1.5800, 2.2708, 1.4352, 1.4221, 1.9310], + device='cuda:2'), covar=tensor([0.0863, 0.0600, 0.0846, 0.0666, 0.0402, 0.1011, 0.0691, 0.0478], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0303, 0.0325, 0.0248, 0.0237, 0.0317, 0.0284, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-01 23:49:23,829 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 23:49:32,948 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.453e+02 6.640e+02 7.751e+02 1.927e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-01 23:49:38,628 INFO [train.py:903] (2/4) Epoch 13, batch 1950, loss[loss=0.219, simple_loss=0.2873, pruned_loss=0.07533, over 19483.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3039, pruned_loss=0.07749, over 3809481.82 frames. ], batch size: 49, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:50:25,359 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0914, 2.1998, 2.2883, 3.0705, 2.1213, 2.8990, 2.5433, 2.1346], + device='cuda:2'), covar=tensor([0.4276, 0.3596, 0.1655, 0.2223, 0.4049, 0.1797, 0.4238, 0.3035], + device='cuda:2'), in_proj_covar=tensor([0.0810, 0.0841, 0.0659, 0.0896, 0.0797, 0.0725, 0.0802, 0.0723], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-01 23:50:31,072 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:50:40,979 INFO [train.py:903] (2/4) Epoch 13, batch 2000, loss[loss=0.2259, simple_loss=0.3048, pruned_loss=0.07345, over 19669.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3043, pruned_loss=0.07726, over 3823451.82 frames. ], batch size: 55, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:02,448 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:17,798 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:36,105 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.295e+02 5.067e+02 6.527e+02 8.467e+02 1.955e+03, threshold=1.305e+03, percent-clipped=7.0 +2023-04-01 23:51:38,312 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 23:51:42,657 INFO [train.py:903] (2/4) Epoch 13, batch 2050, loss[loss=0.2223, simple_loss=0.3048, pruned_loss=0.06995, over 19714.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3043, pruned_loss=0.07735, over 3812402.62 frames. ], batch size: 59, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:56,825 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 23:51:57,798 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 23:52:03,619 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:21,332 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 23:52:22,869 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:43,920 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4603, 2.4193, 1.9990, 1.8939, 1.8111, 2.0652, 1.2425, 1.8557], + device='cuda:2'), covar=tensor([0.0456, 0.0471, 0.0454, 0.0662, 0.0748, 0.0766, 0.0882, 0.0631], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0333, 0.0334, 0.0356, 0.0425, 0.0357, 0.0313, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:52:44,648 INFO [train.py:903] (2/4) Epoch 13, batch 2100, loss[loss=0.2764, simple_loss=0.3427, pruned_loss=0.1051, over 13657.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3049, pruned_loss=0.07815, over 3811661.02 frames. ], batch size: 136, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:52:52,282 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:53:14,854 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 23:53:36,195 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 23:53:39,566 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.545e+02 6.946e+02 9.457e+02 3.064e+03, threshold=1.389e+03, percent-clipped=12.0 +2023-04-01 23:53:45,272 INFO [train.py:903] (2/4) Epoch 13, batch 2150, loss[loss=0.2552, simple_loss=0.324, pruned_loss=0.09315, over 17627.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3047, pruned_loss=0.07839, over 3804095.63 frames. ], batch size: 101, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:54:23,396 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:31,389 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:49,422 INFO [train.py:903] (2/4) Epoch 13, batch 2200, loss[loss=0.267, simple_loss=0.3331, pruned_loss=0.1005, over 19386.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3052, pruned_loss=0.07866, over 3807640.36 frames. ], batch size: 70, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:55:44,487 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.936e+02 7.647e+02 9.699e+02 2.302e+03, threshold=1.529e+03, percent-clipped=8.0 +2023-04-01 23:55:50,238 INFO [train.py:903] (2/4) Epoch 13, batch 2250, loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06392, over 19739.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3043, pruned_loss=0.07816, over 3799575.52 frames. ], batch size: 51, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:28,832 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:56:51,982 INFO [train.py:903] (2/4) Epoch 13, batch 2300, loss[loss=0.2645, simple_loss=0.3359, pruned_loss=0.09656, over 19648.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3057, pruned_loss=0.07864, over 3814844.50 frames. ], batch size: 55, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:53,536 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84237.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:05,858 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 23:57:15,126 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:47,135 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 5.192e+02 6.483e+02 8.696e+02 2.103e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 23:57:52,879 INFO [train.py:903] (2/4) Epoch 13, batch 2350, loss[loss=0.2345, simple_loss=0.3109, pruned_loss=0.07908, over 19736.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3066, pruned_loss=0.07962, over 3790950.60 frames. ], batch size: 63, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:58:25,970 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:58:37,175 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 23:58:54,366 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 23:58:57,888 INFO [train.py:903] (2/4) Epoch 13, batch 2400, loss[loss=0.1984, simple_loss=0.2683, pruned_loss=0.06429, over 19299.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.306, pruned_loss=0.07882, over 3804120.70 frames. ], batch size: 44, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:59:11,498 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:20,741 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:37,079 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3278, 2.1397, 1.9859, 1.8408, 1.6120, 1.8070, 0.7141, 1.3319], + device='cuda:2'), covar=tensor([0.0379, 0.0478, 0.0346, 0.0574, 0.0843, 0.0674, 0.0957, 0.0735], + device='cuda:2'), in_proj_covar=tensor([0.0343, 0.0337, 0.0338, 0.0360, 0.0431, 0.0359, 0.0318, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-01 23:59:48,313 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-01 23:59:54,313 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.138e+02 6.932e+02 8.383e+02 1.660e+03, threshold=1.386e+03, percent-clipped=4.0 +2023-04-01 23:59:59,965 INFO [train.py:903] (2/4) Epoch 13, batch 2450, loss[loss=0.2625, simple_loss=0.3339, pruned_loss=0.09549, over 19785.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3059, pruned_loss=0.07861, over 3804462.51 frames. ], batch size: 56, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:00:51,637 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84426.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:03,736 INFO [train.py:903] (2/4) Epoch 13, batch 2500, loss[loss=0.2229, simple_loss=0.2942, pruned_loss=0.07576, over 19827.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3053, pruned_loss=0.07839, over 3794379.08 frames. ], batch size: 52, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:01:29,845 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0663, 1.9435, 1.7569, 1.5595, 1.5320, 1.5510, 0.3691, 0.8317], + device='cuda:2'), covar=tensor([0.0402, 0.0444, 0.0305, 0.0493, 0.0843, 0.0583, 0.0934, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0341, 0.0335, 0.0337, 0.0357, 0.0430, 0.0357, 0.0316, 0.0326], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 00:01:33,203 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:36,881 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1081, 1.7878, 1.8499, 2.2258, 1.9872, 1.9293, 1.7745, 2.1631], + device='cuda:2'), covar=tensor([0.0792, 0.1504, 0.1156, 0.0698, 0.1130, 0.0445, 0.1094, 0.0536], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0351, 0.0294, 0.0239, 0.0296, 0.0242, 0.0280, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:01:38,073 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:00,516 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 5.326e+02 7.100e+02 9.098e+02 1.657e+03, threshold=1.420e+03, percent-clipped=3.0 +2023-04-02 00:02:06,422 INFO [train.py:903] (2/4) Epoch 13, batch 2550, loss[loss=0.2329, simple_loss=0.3083, pruned_loss=0.07874, over 19612.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3063, pruned_loss=0.07898, over 3809547.59 frames. ], batch size: 57, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:02:16,040 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84493.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:22,158 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3258, 2.3667, 2.5797, 3.0771, 2.4914, 3.0581, 2.7270, 2.3355], + device='cuda:2'), covar=tensor([0.3249, 0.2781, 0.1269, 0.1646, 0.2983, 0.1251, 0.2926, 0.2262], + device='cuda:2'), in_proj_covar=tensor([0.0815, 0.0843, 0.0665, 0.0902, 0.0799, 0.0728, 0.0803, 0.0726], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 00:02:48,834 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84518.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:04,569 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 00:03:10,385 INFO [train.py:903] (2/4) Epoch 13, batch 2600, loss[loss=0.2592, simple_loss=0.34, pruned_loss=0.08919, over 18308.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.306, pruned_loss=0.07861, over 3811643.58 frames. ], batch size: 83, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:03:21,123 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:42,740 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:59,424 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:09,373 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.097e+02 6.359e+02 8.045e+02 2.004e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 00:04:15,154 INFO [train.py:903] (2/4) Epoch 13, batch 2650, loss[loss=0.2189, simple_loss=0.2885, pruned_loss=0.07463, over 19414.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3052, pruned_loss=0.07818, over 3807397.25 frames. ], batch size: 48, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:04:30,348 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:34,931 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 00:05:17,487 INFO [train.py:903] (2/4) Epoch 13, batch 2700, loss[loss=0.2429, simple_loss=0.3184, pruned_loss=0.0837, over 19753.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.305, pruned_loss=0.07762, over 3809592.49 frames. ], batch size: 63, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:05:36,197 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:08,579 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:13,993 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.394e+02 6.395e+02 8.456e+02 1.799e+03, threshold=1.279e+03, percent-clipped=4.0 +2023-04-02 00:06:16,797 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:21,011 INFO [train.py:903] (2/4) Epoch 13, batch 2750, loss[loss=0.2297, simple_loss=0.3119, pruned_loss=0.07374, over 19593.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3056, pruned_loss=0.07838, over 3806331.43 frames. ], batch size: 61, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:06:37,481 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:49,091 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:56,215 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:01,944 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:24,547 INFO [train.py:903] (2/4) Epoch 13, batch 2800, loss[loss=0.2047, simple_loss=0.2768, pruned_loss=0.06634, over 19582.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3048, pruned_loss=0.07803, over 3807161.54 frames. ], batch size: 52, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:07:34,095 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:08:22,666 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.423e+02 6.884e+02 8.957e+02 1.568e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-02 00:08:29,875 INFO [train.py:903] (2/4) Epoch 13, batch 2850, loss[loss=0.2468, simple_loss=0.3217, pruned_loss=0.08595, over 19779.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3064, pruned_loss=0.07908, over 3796970.64 frames. ], batch size: 56, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:08:57,585 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7469, 1.7420, 1.6073, 1.3306, 1.3375, 1.3998, 0.1918, 0.6001], + device='cuda:2'), covar=tensor([0.0472, 0.0479, 0.0287, 0.0464, 0.1055, 0.0537, 0.0927, 0.0835], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0333, 0.0334, 0.0356, 0.0429, 0.0355, 0.0315, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 00:09:04,275 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:11,237 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84818.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:26,664 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7358, 1.7179, 1.5197, 1.3293, 1.3282, 1.4002, 0.1982, 0.6263], + device='cuda:2'), covar=tensor([0.0449, 0.0452, 0.0320, 0.0493, 0.1000, 0.0541, 0.0935, 0.0825], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0333, 0.0335, 0.0357, 0.0430, 0.0356, 0.0315, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 00:09:26,671 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:33,372 INFO [train.py:903] (2/4) Epoch 13, batch 2900, loss[loss=0.2462, simple_loss=0.3186, pruned_loss=0.08687, over 18107.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3071, pruned_loss=0.07917, over 3792749.71 frames. ], batch size: 83, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:09:33,421 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 00:09:58,757 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:10:06,442 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 00:10:15,854 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9398, 1.7160, 1.6966, 2.3220, 1.8425, 2.2060, 2.2137, 2.0145], + device='cuda:2'), covar=tensor([0.0709, 0.0873, 0.0952, 0.0736, 0.0788, 0.0640, 0.0734, 0.0602], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0226, 0.0225, 0.0246, 0.0233, 0.0212, 0.0195, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 00:10:31,985 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.833e+02 5.191e+02 6.710e+02 8.572e+02 2.238e+03, threshold=1.342e+03, percent-clipped=4.0 +2023-04-02 00:10:38,025 INFO [train.py:903] (2/4) Epoch 13, batch 2950, loss[loss=0.2162, simple_loss=0.2932, pruned_loss=0.06963, over 19526.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3059, pruned_loss=0.07822, over 3794586.36 frames. ], batch size: 56, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:10:40,471 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:39,641 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:45,180 INFO [train.py:903] (2/4) Epoch 13, batch 3000, loss[loss=0.205, simple_loss=0.293, pruned_loss=0.05853, over 19338.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3058, pruned_loss=0.07765, over 3811001.96 frames. ], batch size: 66, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:11:45,181 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 00:12:00,847 INFO [train.py:937] (2/4) Epoch 13, validation: loss=0.1754, simple_loss=0.276, pruned_loss=0.03742, over 944034.00 frames. +2023-04-02 00:12:00,850 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 00:12:06,833 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 00:12:13,231 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5787, 1.2885, 1.4891, 1.6099, 3.1379, 1.0793, 2.1433, 3.5142], + device='cuda:2'), covar=tensor([0.0454, 0.2902, 0.2712, 0.1698, 0.0724, 0.2533, 0.1351, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0349, 0.0358, 0.0326, 0.0349, 0.0335, 0.0344, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:12:29,866 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84957.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:38,232 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5357, 1.2847, 1.3236, 2.0466, 1.6030, 1.9284, 1.9921, 1.5878], + device='cuda:2'), covar=tensor([0.0890, 0.1086, 0.1156, 0.0941, 0.0969, 0.0777, 0.0890, 0.0728], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0224, 0.0223, 0.0243, 0.0231, 0.0210, 0.0193, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 00:12:43,916 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:44,073 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:59,476 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.728e+02 4.901e+02 6.331e+02 8.447e+02 1.208e+03, threshold=1.266e+03, percent-clipped=0.0 +2023-04-02 00:13:05,461 INFO [train.py:903] (2/4) Epoch 13, batch 3050, loss[loss=0.2321, simple_loss=0.3148, pruned_loss=0.07475, over 19669.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3053, pruned_loss=0.07743, over 3818612.06 frames. ], batch size: 59, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:13:17,215 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84994.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:18,172 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84995.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:26,478 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:14:09,082 INFO [train.py:903] (2/4) Epoch 13, batch 3100, loss[loss=0.3271, simple_loss=0.3764, pruned_loss=0.1389, over 12906.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3053, pruned_loss=0.078, over 3813339.61 frames. ], batch size: 136, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:14:34,308 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-02 00:14:50,675 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:05,351 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.118e+02 6.727e+02 8.323e+02 1.616e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-02 00:15:11,226 INFO [train.py:903] (2/4) Epoch 13, batch 3150, loss[loss=0.2704, simple_loss=0.3479, pruned_loss=0.09648, over 19314.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3053, pruned_loss=0.07787, over 3808831.66 frames. ], batch size: 66, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:15:20,648 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:30,603 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7897, 1.5243, 1.6239, 1.6759, 3.3891, 1.3011, 2.3889, 3.6725], + device='cuda:2'), covar=tensor([0.0408, 0.2627, 0.2701, 0.1792, 0.0689, 0.2316, 0.1201, 0.0272], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0348, 0.0357, 0.0326, 0.0349, 0.0334, 0.0342, 0.0367], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:15:37,412 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 00:15:40,142 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 00:15:41,873 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85110.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:52,625 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-02 00:16:00,171 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:09,438 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:12,575 INFO [train.py:903] (2/4) Epoch 13, batch 3200, loss[loss=0.2378, simple_loss=0.3173, pruned_loss=0.07917, over 19750.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3063, pruned_loss=0.07839, over 3812910.99 frames. ], batch size: 63, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:16:46,269 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:17:10,848 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.817e+02 5.513e+02 6.647e+02 8.266e+02 1.326e+03, threshold=1.329e+03, percent-clipped=0.0 +2023-04-02 00:17:11,355 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4905, 2.3435, 1.6886, 1.5961, 2.1673, 1.2875, 1.2883, 1.8381], + device='cuda:2'), covar=tensor([0.0953, 0.0665, 0.0983, 0.0767, 0.0456, 0.1190, 0.0690, 0.0454], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0304, 0.0329, 0.0250, 0.0238, 0.0322, 0.0288, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:17:16,619 INFO [train.py:903] (2/4) Epoch 13, batch 3250, loss[loss=0.2395, simple_loss=0.319, pruned_loss=0.08003, over 19657.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3058, pruned_loss=0.0784, over 3810332.41 frames. ], batch size: 58, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:17:25,604 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.86 vs. limit=5.0 +2023-04-02 00:18:20,666 INFO [train.py:903] (2/4) Epoch 13, batch 3300, loss[loss=0.2042, simple_loss=0.2768, pruned_loss=0.06582, over 19773.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.306, pruned_loss=0.07828, over 3811384.11 frames. ], batch size: 48, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:18:21,937 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 00:18:47,646 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:12,469 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:17,832 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 5.269e+02 6.447e+02 8.165e+02 1.494e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 00:19:19,519 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:22,473 INFO [train.py:903] (2/4) Epoch 13, batch 3350, loss[loss=0.2302, simple_loss=0.3083, pruned_loss=0.07601, over 19610.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3058, pruned_loss=0.07813, over 3825341.13 frames. ], batch size: 57, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:19:57,322 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:20:24,675 INFO [train.py:903] (2/4) Epoch 13, batch 3400, loss[loss=0.2036, simple_loss=0.2804, pruned_loss=0.06337, over 19680.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3029, pruned_loss=0.07663, over 3828183.20 frames. ], batch size: 53, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:21:03,094 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:22,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.371e+02 6.826e+02 8.500e+02 2.504e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-02 00:21:27,166 INFO [train.py:903] (2/4) Epoch 13, batch 3450, loss[loss=0.1679, simple_loss=0.2449, pruned_loss=0.04541, over 19774.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3031, pruned_loss=0.07691, over 3823775.76 frames. ], batch size: 47, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:21:30,625 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 00:21:34,241 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85391.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:37,722 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7628, 1.7095, 1.5888, 1.4073, 1.2957, 1.3939, 0.2320, 0.6931], + device='cuda:2'), covar=tensor([0.0429, 0.0432, 0.0291, 0.0426, 0.0900, 0.0490, 0.0867, 0.0780], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0334, 0.0335, 0.0357, 0.0427, 0.0357, 0.0313, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 00:21:59,012 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4515, 1.8392, 1.4842, 1.4550, 1.7270, 1.2983, 1.3473, 1.5852], + device='cuda:2'), covar=tensor([0.0696, 0.0603, 0.0728, 0.0555, 0.0445, 0.0915, 0.0491, 0.0395], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0305, 0.0328, 0.0250, 0.0240, 0.0322, 0.0288, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:22:15,803 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9876, 3.6395, 2.3402, 3.2309, 1.0723, 3.4655, 3.4258, 3.5238], + device='cuda:2'), covar=tensor([0.0706, 0.1000, 0.1873, 0.0804, 0.3347, 0.0745, 0.0803, 0.1069], + device='cuda:2'), in_proj_covar=tensor([0.0448, 0.0375, 0.0451, 0.0323, 0.0391, 0.0384, 0.0373, 0.0405], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:22:19,416 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:22:25,646 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3161, 3.0626, 2.0880, 2.7775, 0.8514, 2.9291, 2.8784, 2.9247], + device='cuda:2'), covar=tensor([0.1075, 0.1243, 0.2117, 0.0946, 0.3559, 0.0930, 0.0969, 0.1313], + device='cuda:2'), in_proj_covar=tensor([0.0448, 0.0375, 0.0451, 0.0323, 0.0391, 0.0384, 0.0373, 0.0405], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:22:28,798 INFO [train.py:903] (2/4) Epoch 13, batch 3500, loss[loss=0.2311, simple_loss=0.3128, pruned_loss=0.07468, over 19653.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3035, pruned_loss=0.07753, over 3818566.76 frames. ], batch size: 55, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:22:42,128 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:10,405 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:20,663 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:27,196 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.339e+02 6.675e+02 8.042e+02 1.644e+03, threshold=1.335e+03, percent-clipped=3.0 +2023-04-02 00:23:31,543 INFO [train.py:903] (2/4) Epoch 13, batch 3550, loss[loss=0.2926, simple_loss=0.3539, pruned_loss=0.1157, over 19673.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3038, pruned_loss=0.07745, over 3823244.59 frames. ], batch size: 60, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:23:50,603 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85501.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:31,259 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:34,224 INFO [train.py:903] (2/4) Epoch 13, batch 3600, loss[loss=0.2446, simple_loss=0.3204, pruned_loss=0.08438, over 19572.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3033, pruned_loss=0.07736, over 3818751.25 frames. ], batch size: 61, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:24:50,329 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:03,601 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85558.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:33,194 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.969e+02 7.367e+02 9.197e+02 1.857e+03, threshold=1.473e+03, percent-clipped=2.0 +2023-04-02 00:25:36,799 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:38,629 INFO [train.py:903] (2/4) Epoch 13, batch 3650, loss[loss=0.2181, simple_loss=0.2938, pruned_loss=0.0712, over 19676.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3032, pruned_loss=0.07756, over 3803884.48 frames. ], batch size: 53, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:25:46,148 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:26:41,857 INFO [train.py:903] (2/4) Epoch 13, batch 3700, loss[loss=0.2725, simple_loss=0.3408, pruned_loss=0.1021, over 19835.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3039, pruned_loss=0.0777, over 3799247.00 frames. ], batch size: 52, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:26:51,286 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:26,090 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8281, 1.8692, 2.1483, 2.3898, 1.6059, 2.1718, 2.2959, 2.0322], + device='cuda:2'), covar=tensor([0.3611, 0.3154, 0.1459, 0.1853, 0.3398, 0.1687, 0.3790, 0.2763], + device='cuda:2'), in_proj_covar=tensor([0.0806, 0.0838, 0.0658, 0.0892, 0.0791, 0.0717, 0.0794, 0.0718], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 00:27:41,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.337e+02 6.385e+02 1.025e+03 1.989e+03, threshold=1.277e+03, percent-clipped=4.0 +2023-04-02 00:27:44,656 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:46,553 INFO [train.py:903] (2/4) Epoch 13, batch 3750, loss[loss=0.3209, simple_loss=0.3676, pruned_loss=0.1371, over 12941.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3044, pruned_loss=0.07759, over 3786896.57 frames. ], batch size: 136, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:28:14,971 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:28:48,147 INFO [train.py:903] (2/4) Epoch 13, batch 3800, loss[loss=0.1813, simple_loss=0.2685, pruned_loss=0.04707, over 19575.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.304, pruned_loss=0.07692, over 3792020.53 frames. ], batch size: 52, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:20,847 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 00:29:44,879 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 5.587e+02 7.205e+02 9.194e+02 2.888e+03, threshold=1.441e+03, percent-clipped=8.0 +2023-04-02 00:29:50,627 INFO [train.py:903] (2/4) Epoch 13, batch 3850, loss[loss=0.2088, simple_loss=0.275, pruned_loss=0.07132, over 19764.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3042, pruned_loss=0.077, over 3805351.62 frames. ], batch size: 46, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:56,232 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:30:53,507 INFO [train.py:903] (2/4) Epoch 13, batch 3900, loss[loss=0.1951, simple_loss=0.2889, pruned_loss=0.05063, over 19669.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3045, pruned_loss=0.07718, over 3786240.62 frames. ], batch size: 55, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:30:55,150 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8151, 3.2337, 3.2963, 3.3151, 1.3934, 3.1767, 2.7647, 3.0014], + device='cuda:2'), covar=tensor([0.1501, 0.0910, 0.0745, 0.0782, 0.4686, 0.0860, 0.0798, 0.1178], + device='cuda:2'), in_proj_covar=tensor([0.0701, 0.0623, 0.0835, 0.0705, 0.0753, 0.0578, 0.0502, 0.0765], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 00:31:00,129 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:06,827 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85845.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:10,520 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:30,232 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:40,480 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:51,441 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.450e+02 6.738e+02 8.252e+02 2.044e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-02 00:31:57,033 INFO [train.py:903] (2/4) Epoch 13, batch 3950, loss[loss=0.2462, simple_loss=0.3259, pruned_loss=0.08322, over 19665.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3056, pruned_loss=0.07793, over 3795426.06 frames. ], batch size: 59, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:32:00,534 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 00:32:04,167 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85892.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:05,586 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:20,776 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:55,336 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2599, 1.1275, 1.1512, 1.5526, 1.3660, 1.3921, 1.4530, 1.2887], + device='cuda:2'), covar=tensor([0.0684, 0.0776, 0.0833, 0.0569, 0.0749, 0.0669, 0.0771, 0.0589], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0224, 0.0224, 0.0244, 0.0232, 0.0210, 0.0192, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 00:32:59,796 INFO [train.py:903] (2/4) Epoch 13, batch 4000, loss[loss=0.2175, simple_loss=0.2994, pruned_loss=0.06784, over 19613.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3042, pruned_loss=0.07708, over 3806826.31 frames. ], batch size: 57, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:33:14,781 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85948.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:33:30,815 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85960.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:45,371 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2486, 1.9172, 2.0218, 2.8883, 2.1315, 2.4875, 2.5291, 2.2717], + device='cuda:2'), covar=tensor([0.0741, 0.0902, 0.0948, 0.0821, 0.0912, 0.0701, 0.0911, 0.0617], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0225, 0.0225, 0.0245, 0.0233, 0.0210, 0.0192, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 00:33:46,163 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 00:33:48,593 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:56,487 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.280e+02 6.258e+02 8.022e+02 1.377e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 00:34:02,168 INFO [train.py:903] (2/4) Epoch 13, batch 4050, loss[loss=0.2309, simple_loss=0.3143, pruned_loss=0.07375, over 19771.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3032, pruned_loss=0.07632, over 3820533.64 frames. ], batch size: 56, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:34:03,495 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:30,830 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:46,121 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:35:05,748 INFO [train.py:903] (2/4) Epoch 13, batch 4100, loss[loss=0.2465, simple_loss=0.3273, pruned_loss=0.08284, over 19582.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3022, pruned_loss=0.07588, over 3826230.82 frames. ], batch size: 61, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:35:42,479 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 00:35:48,453 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86070.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:04,014 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.943e+02 6.094e+02 7.890e+02 1.986e+03, threshold=1.219e+03, percent-clipped=5.0 +2023-04-02 00:36:08,788 INFO [train.py:903] (2/4) Epoch 13, batch 4150, loss[loss=0.274, simple_loss=0.3339, pruned_loss=0.1071, over 13777.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3018, pruned_loss=0.07523, over 3820219.77 frames. ], batch size: 135, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:36:11,032 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:28,548 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:58,038 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3240, 1.3780, 1.7171, 1.5624, 2.6245, 2.2447, 2.7127, 0.9650], + device='cuda:2'), covar=tensor([0.2077, 0.3671, 0.2162, 0.1599, 0.1260, 0.1772, 0.1239, 0.3755], + device='cuda:2'), in_proj_covar=tensor([0.0498, 0.0589, 0.0627, 0.0441, 0.0599, 0.0503, 0.0651, 0.0506], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 00:37:11,317 INFO [train.py:903] (2/4) Epoch 13, batch 4200, loss[loss=0.233, simple_loss=0.3132, pruned_loss=0.0764, over 19449.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.302, pruned_loss=0.07524, over 3824895.73 frames. ], batch size: 64, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:37:14,927 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 00:37:42,897 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:37:47,530 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2567, 1.2690, 1.5917, 1.4653, 2.1954, 1.9880, 2.1843, 0.8055], + device='cuda:2'), covar=tensor([0.2249, 0.4008, 0.2389, 0.1833, 0.1434, 0.2042, 0.1418, 0.3995], + device='cuda:2'), in_proj_covar=tensor([0.0497, 0.0586, 0.0625, 0.0440, 0.0598, 0.0501, 0.0649, 0.0503], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 00:38:08,159 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.506e+02 6.651e+02 9.015e+02 1.898e+03, threshold=1.330e+03, percent-clipped=8.0 +2023-04-02 00:38:12,739 INFO [train.py:903] (2/4) Epoch 13, batch 4250, loss[loss=0.2553, simple_loss=0.3179, pruned_loss=0.09636, over 19477.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3025, pruned_loss=0.07551, over 3828494.69 frames. ], batch size: 49, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:38:13,209 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:31,017 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 00:38:43,584 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 00:38:51,473 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 00:38:52,311 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86216.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:15,637 INFO [train.py:903] (2/4) Epoch 13, batch 4300, loss[loss=0.2598, simple_loss=0.3352, pruned_loss=0.09218, over 19268.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.303, pruned_loss=0.07585, over 3826547.66 frames. ], batch size: 66, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:39:17,014 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86237.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:17,298 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4064, 2.2279, 1.9267, 1.8564, 1.6394, 1.8366, 0.4875, 1.3373], + device='cuda:2'), covar=tensor([0.0427, 0.0449, 0.0375, 0.0646, 0.0901, 0.0693, 0.1019, 0.0776], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0332, 0.0333, 0.0357, 0.0431, 0.0356, 0.0314, 0.0323], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 00:39:19,934 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-02 00:39:23,011 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86241.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:50,470 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:51,559 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86264.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:13,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.647e+02 5.244e+02 6.314e+02 8.271e+02 2.210e+03, threshold=1.263e+03, percent-clipped=7.0 +2023-04-02 00:40:15,188 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 00:40:19,667 INFO [train.py:903] (2/4) Epoch 13, batch 4350, loss[loss=0.2498, simple_loss=0.3348, pruned_loss=0.08243, over 19779.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3044, pruned_loss=0.07696, over 3839023.67 frames. ], batch size: 56, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:40:22,481 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:27,962 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86292.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:41:00,078 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86319.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:22,565 INFO [train.py:903] (2/4) Epoch 13, batch 4400, loss[loss=0.2846, simple_loss=0.3348, pruned_loss=0.1172, over 19716.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3047, pruned_loss=0.07699, over 3831415.07 frames. ], batch size: 51, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:41:42,051 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:49,356 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:50,094 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 00:41:58,607 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:00,819 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 00:42:17,237 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4066, 1.7083, 2.1043, 1.7414, 3.2383, 2.7704, 3.7201, 1.7972], + device='cuda:2'), covar=tensor([0.2285, 0.3736, 0.2353, 0.1753, 0.1417, 0.1876, 0.1342, 0.3439], + device='cuda:2'), in_proj_covar=tensor([0.0495, 0.0585, 0.0624, 0.0440, 0.0596, 0.0500, 0.0646, 0.0501], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 00:42:20,373 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.626e+02 5.303e+02 6.614e+02 7.903e+02 1.976e+03, threshold=1.323e+03, percent-clipped=7.0 +2023-04-02 00:42:22,050 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:25,152 INFO [train.py:903] (2/4) Epoch 13, batch 4450, loss[loss=0.203, simple_loss=0.295, pruned_loss=0.05547, over 19703.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3039, pruned_loss=0.07651, over 3832453.30 frames. ], batch size: 59, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:42:52,172 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86407.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:43:00,998 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:21,667 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:25,443 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:27,352 INFO [train.py:903] (2/4) Epoch 13, batch 4500, loss[loss=0.2549, simple_loss=0.3229, pruned_loss=0.09348, over 13393.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3035, pruned_loss=0.07666, over 3827602.82 frames. ], batch size: 136, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:43:55,652 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:01,568 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:21,271 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:24,464 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.430e+02 6.459e+02 7.870e+02 1.871e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-02 00:44:29,966 INFO [train.py:903] (2/4) Epoch 13, batch 4550, loss[loss=0.2139, simple_loss=0.2967, pruned_loss=0.06555, over 19535.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3041, pruned_loss=0.07715, over 3809681.33 frames. ], batch size: 54, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:44:39,225 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 00:45:02,114 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 00:45:03,933 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 00:45:24,991 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86529.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:45:33,593 INFO [train.py:903] (2/4) Epoch 13, batch 4600, loss[loss=0.2281, simple_loss=0.3038, pruned_loss=0.07622, over 19742.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.303, pruned_loss=0.07639, over 3810473.29 frames. ], batch size: 54, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:45:33,936 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8212, 1.4752, 1.6430, 1.5117, 2.5595, 1.2766, 2.1432, 2.7312], + device='cuda:2'), covar=tensor([0.0452, 0.1950, 0.1880, 0.1474, 0.0542, 0.1785, 0.1436, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0342, 0.0352, 0.0322, 0.0349, 0.0327, 0.0343, 0.0363], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:45:35,096 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3924, 1.1084, 1.2763, 1.2553, 2.1277, 1.0275, 1.7853, 2.2762], + device='cuda:2'), covar=tensor([0.0481, 0.2134, 0.2091, 0.1464, 0.0601, 0.1852, 0.1447, 0.0506], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0342, 0.0352, 0.0322, 0.0349, 0.0327, 0.0343, 0.0363], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:45:39,803 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2259, 1.2187, 1.4451, 1.3322, 1.9795, 1.7437, 1.9888, 1.0307], + device='cuda:2'), covar=tensor([0.1836, 0.3231, 0.1913, 0.1505, 0.1128, 0.1648, 0.1043, 0.3505], + device='cuda:2'), in_proj_covar=tensor([0.0495, 0.0587, 0.0625, 0.0443, 0.0598, 0.0501, 0.0647, 0.0504], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 00:45:46,418 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:46:24,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 00:46:31,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.452e+02 6.634e+02 8.020e+02 1.693e+03, threshold=1.327e+03, percent-clipped=1.0 +2023-04-02 00:46:35,924 INFO [train.py:903] (2/4) Epoch 13, batch 4650, loss[loss=0.2188, simple_loss=0.2936, pruned_loss=0.07201, over 19745.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3025, pruned_loss=0.07634, over 3799358.45 frames. ], batch size: 51, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:46:51,374 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:46:52,252 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 00:47:03,872 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 00:47:04,000 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:04,276 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:07,703 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 00:47:35,075 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:38,172 INFO [train.py:903] (2/4) Epoch 13, batch 4700, loss[loss=0.2198, simple_loss=0.2993, pruned_loss=0.07013, over 19764.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3026, pruned_loss=0.07604, over 3809427.79 frames. ], batch size: 54, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:48:04,334 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 00:48:13,305 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86663.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:48:35,956 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.083e+02 5.953e+02 7.054e+02 1.649e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-02 00:48:42,058 INFO [train.py:903] (2/4) Epoch 13, batch 4750, loss[loss=0.2103, simple_loss=0.2832, pruned_loss=0.06872, over 19664.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3039, pruned_loss=0.07675, over 3810537.97 frames. ], batch size: 53, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:48:45,746 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86688.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:48:48,118 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86690.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:18,505 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:27,260 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:43,043 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:43,758 INFO [train.py:903] (2/4) Epoch 13, batch 4800, loss[loss=0.3169, simple_loss=0.3728, pruned_loss=0.1305, over 19297.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3054, pruned_loss=0.07805, over 3798624.55 frames. ], batch size: 66, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:49:52,401 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3129, 3.0118, 2.2634, 2.7290, 0.9227, 2.9612, 2.8405, 2.9166], + device='cuda:2'), covar=tensor([0.1099, 0.1402, 0.2143, 0.1130, 0.3871, 0.1037, 0.1144, 0.1445], + device='cuda:2'), in_proj_covar=tensor([0.0447, 0.0373, 0.0450, 0.0325, 0.0386, 0.0383, 0.0373, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:50:12,893 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:41,008 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.270e+02 6.552e+02 8.488e+02 1.715e+03, threshold=1.310e+03, percent-clipped=7.0 +2023-04-02 00:50:45,033 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:45,836 INFO [train.py:903] (2/4) Epoch 13, batch 4850, loss[loss=0.2801, simple_loss=0.3354, pruned_loss=0.1124, over 13358.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.305, pruned_loss=0.07802, over 3804397.79 frames. ], batch size: 135, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:51:06,440 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:06,741 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:08,602 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 00:51:14,229 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:18,150 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:31,679 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 00:51:37,281 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 00:51:37,314 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 00:51:38,847 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:45,327 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86832.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:47,424 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 00:51:49,805 INFO [train.py:903] (2/4) Epoch 13, batch 4900, loss[loss=0.2832, simple_loss=0.34, pruned_loss=0.1132, over 13892.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3039, pruned_loss=0.07711, over 3818824.61 frames. ], batch size: 136, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:52:07,083 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 00:52:46,044 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 5.393e+02 6.845e+02 8.725e+02 1.892e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 00:52:50,742 INFO [train.py:903] (2/4) Epoch 13, batch 4950, loss[loss=0.2696, simple_loss=0.3376, pruned_loss=0.1007, over 19703.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3032, pruned_loss=0.07686, over 3817272.67 frames. ], batch size: 63, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:53:05,845 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86896.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:08,074 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 00:53:10,746 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7099, 1.5112, 1.3850, 1.9934, 1.6886, 1.9314, 2.0287, 1.7904], + device='cuda:2'), covar=tensor([0.0766, 0.0925, 0.1031, 0.0798, 0.0830, 0.0719, 0.0814, 0.0637], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0225, 0.0224, 0.0244, 0.0230, 0.0211, 0.0193, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 00:53:31,786 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:32,653 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 00:53:37,681 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:56,228 INFO [train.py:903] (2/4) Epoch 13, batch 5000, loss[loss=0.2426, simple_loss=0.3223, pruned_loss=0.08142, over 19673.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3029, pruned_loss=0.07656, over 3811052.32 frames. ], batch size: 53, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:54:03,386 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 00:54:03,566 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:14,000 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 00:54:49,342 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:52,321 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.998e+02 5.650e+02 6.679e+02 8.615e+02 1.943e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 00:54:56,911 INFO [train.py:903] (2/4) Epoch 13, batch 5050, loss[loss=0.2527, simple_loss=0.3256, pruned_loss=0.08992, over 19694.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.303, pruned_loss=0.07641, over 3815404.41 frames. ], batch size: 60, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:55:19,935 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87004.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:55:32,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 00:55:41,765 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87020.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:56:01,013 INFO [train.py:903] (2/4) Epoch 13, batch 5100, loss[loss=0.2268, simple_loss=0.2878, pruned_loss=0.08287, over 19716.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3019, pruned_loss=0.07575, over 3827756.81 frames. ], batch size: 46, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:56:01,555 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1398, 3.7022, 2.0630, 2.2292, 3.2789, 1.8949, 1.5121, 2.1638], + device='cuda:2'), covar=tensor([0.1151, 0.0501, 0.0974, 0.0686, 0.0379, 0.1034, 0.0831, 0.0621], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0305, 0.0331, 0.0252, 0.0240, 0.0321, 0.0291, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:56:07,897 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 00:56:11,378 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 00:56:18,089 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 00:56:28,873 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:56:58,517 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.089e+02 6.519e+02 8.713e+02 1.677e+03, threshold=1.304e+03, percent-clipped=2.0 +2023-04-02 00:57:03,274 INFO [train.py:903] (2/4) Epoch 13, batch 5150, loss[loss=0.2546, simple_loss=0.317, pruned_loss=0.09612, over 13405.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3015, pruned_loss=0.07583, over 3800276.45 frames. ], batch size: 136, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:57:15,153 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 00:57:48,512 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 00:58:07,738 INFO [train.py:903] (2/4) Epoch 13, batch 5200, loss[loss=0.1956, simple_loss=0.2673, pruned_loss=0.06198, over 19823.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3032, pruned_loss=0.07697, over 3801986.37 frames. ], batch size: 48, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:58:18,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 00:58:46,533 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2151, 1.5914, 1.2699, 1.2228, 1.4598, 1.0792, 1.2383, 1.4395], + device='cuda:2'), covar=tensor([0.0640, 0.0666, 0.0715, 0.0546, 0.0422, 0.0906, 0.0441, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0306, 0.0332, 0.0253, 0.0241, 0.0324, 0.0292, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:58:54,639 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87173.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:58:57,970 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87176.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:01,515 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:04,689 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 00:59:05,685 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.198e+02 5.751e+02 6.794e+02 9.141e+02 2.147e+03, threshold=1.359e+03, percent-clipped=10.0 +2023-04-02 00:59:10,450 INFO [train.py:903] (2/4) Epoch 13, batch 5250, loss[loss=0.2159, simple_loss=0.2986, pruned_loss=0.06659, over 19757.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3023, pruned_loss=0.0764, over 3796361.56 frames. ], batch size: 54, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 00:59:20,118 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9415, 3.5502, 2.4197, 3.1907, 0.8857, 3.4023, 3.3856, 3.4803], + device='cuda:2'), covar=tensor([0.0893, 0.1314, 0.2082, 0.0938, 0.3861, 0.0889, 0.0943, 0.1281], + device='cuda:2'), in_proj_covar=tensor([0.0445, 0.0372, 0.0450, 0.0321, 0.0383, 0.0382, 0.0371, 0.0408], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 00:59:25,815 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87198.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:31,684 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:13,788 INFO [train.py:903] (2/4) Epoch 13, batch 5300, loss[loss=0.2203, simple_loss=0.2904, pruned_loss=0.07506, over 19772.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3017, pruned_loss=0.07555, over 3815289.37 frames. ], batch size: 54, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:00:18,348 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87240.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:27,901 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87248.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:28,086 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4478, 1.4919, 1.7511, 1.6969, 2.4856, 2.2437, 2.5631, 1.0243], + device='cuda:2'), covar=tensor([0.2082, 0.3818, 0.2266, 0.1692, 0.1308, 0.1834, 0.1282, 0.3836], + device='cuda:2'), in_proj_covar=tensor([0.0496, 0.0589, 0.0625, 0.0445, 0.0602, 0.0499, 0.0645, 0.0506], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 01:00:28,731 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 01:01:10,965 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.140e+02 5.034e+02 6.226e+02 7.840e+02 1.929e+03, threshold=1.245e+03, percent-clipped=3.0 +2023-04-02 01:01:15,795 INFO [train.py:903] (2/4) Epoch 13, batch 5350, loss[loss=0.2088, simple_loss=0.2852, pruned_loss=0.06621, over 19727.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3019, pruned_loss=0.07552, over 3810696.88 frames. ], batch size: 51, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:01:24,151 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:01:49,539 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 01:01:51,120 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:03,654 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8842, 1.1421, 1.5004, 0.5642, 1.9679, 2.3055, 2.0120, 2.4803], + device='cuda:2'), covar=tensor([0.1588, 0.3535, 0.3136, 0.2482, 0.0595, 0.0330, 0.0360, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0303, 0.0333, 0.0253, 0.0224, 0.0164, 0.0209, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 01:02:21,302 INFO [train.py:903] (2/4) Epoch 13, batch 5400, loss[loss=0.2176, simple_loss=0.2965, pruned_loss=0.0693, over 19601.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3025, pruned_loss=0.07596, over 3790461.19 frames. ], batch size: 50, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 01:02:24,126 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:44,288 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87355.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:55,498 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87364.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:03:16,637 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87380.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:03:19,989 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 4.952e+02 6.479e+02 7.900e+02 1.578e+03, threshold=1.296e+03, percent-clipped=3.0 +2023-04-02 01:03:21,954 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.82 vs. limit=5.0 +2023-04-02 01:03:23,439 INFO [train.py:903] (2/4) Epoch 13, batch 5450, loss[loss=0.2171, simple_loss=0.2986, pruned_loss=0.06778, over 19604.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3018, pruned_loss=0.07542, over 3798287.99 frames. ], batch size: 57, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:03:33,515 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4740, 2.3326, 1.7137, 1.2184, 2.2468, 1.1497, 1.2631, 1.9414], + device='cuda:2'), covar=tensor([0.1039, 0.0711, 0.0964, 0.0982, 0.0490, 0.1294, 0.0780, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0303, 0.0326, 0.0248, 0.0238, 0.0319, 0.0289, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:03:45,364 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 01:04:10,984 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0621, 1.8715, 2.0164, 2.6981, 2.1150, 2.6242, 2.5704, 2.1847], + device='cuda:2'), covar=tensor([0.0779, 0.0853, 0.0900, 0.0856, 0.0805, 0.0656, 0.0845, 0.0632], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0225, 0.0224, 0.0244, 0.0228, 0.0211, 0.0193, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 01:04:25,744 INFO [train.py:903] (2/4) Epoch 13, batch 5500, loss[loss=0.2232, simple_loss=0.2881, pruned_loss=0.07917, over 15164.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3017, pruned_loss=0.07513, over 3797362.50 frames. ], batch size: 33, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:47,834 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 01:05:21,381 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:05:21,447 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87479.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:05:25,839 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 5.041e+02 6.443e+02 8.590e+02 2.643e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 01:05:29,323 INFO [train.py:903] (2/4) Epoch 13, batch 5550, loss[loss=0.2134, simple_loss=0.2787, pruned_loss=0.07406, over 19266.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3014, pruned_loss=0.07516, over 3802937.49 frames. ], batch size: 44, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:05:33,831 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 01:06:16,504 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:06:23,050 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 01:06:33,781 INFO [train.py:903] (2/4) Epoch 13, batch 5600, loss[loss=0.2019, simple_loss=0.285, pruned_loss=0.05939, over 19725.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3007, pruned_loss=0.07484, over 3810952.95 frames. ], batch size: 51, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:06:44,185 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8851, 1.3556, 1.0728, 0.9682, 1.1650, 0.9575, 1.0201, 1.2610], + device='cuda:2'), covar=tensor([0.0552, 0.0679, 0.1019, 0.0590, 0.0481, 0.1128, 0.0460, 0.0375], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0306, 0.0331, 0.0251, 0.0241, 0.0324, 0.0293, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:06:47,652 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:17,532 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87572.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:32,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 5.023e+02 6.222e+02 8.051e+02 1.328e+03, threshold=1.244e+03, percent-clipped=2.0 +2023-04-02 01:07:36,345 INFO [train.py:903] (2/4) Epoch 13, batch 5650, loss[loss=0.219, simple_loss=0.3027, pruned_loss=0.06759, over 19468.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3011, pruned_loss=0.07539, over 3817487.62 frames. ], batch size: 64, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:07:43,446 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:56,287 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:01,799 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:06,362 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:23,577 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:24,446 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 01:08:24,686 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3590, 3.9476, 2.5139, 3.4977, 1.1078, 3.8198, 3.7686, 3.8463], + device='cuda:2'), covar=tensor([0.0698, 0.1082, 0.2179, 0.0937, 0.4035, 0.0850, 0.0869, 0.1087], + device='cuda:2'), in_proj_covar=tensor([0.0446, 0.0375, 0.0452, 0.0325, 0.0387, 0.0385, 0.0376, 0.0411], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:08:38,515 INFO [train.py:903] (2/4) Epoch 13, batch 5700, loss[loss=0.2059, simple_loss=0.2861, pruned_loss=0.0628, over 19391.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3011, pruned_loss=0.07518, over 3818626.41 frames. ], batch size: 48, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:08:38,969 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:09:36,749 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 4.997e+02 6.690e+02 8.841e+02 1.921e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 01:09:40,125 INFO [train.py:903] (2/4) Epoch 13, batch 5750, loss[loss=0.2232, simple_loss=0.3035, pruned_loss=0.07145, over 19776.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3022, pruned_loss=0.07607, over 3821936.74 frames. ], batch size: 54, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:09:41,276 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 01:09:49,516 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 01:09:55,215 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 01:10:07,760 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:27,831 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87724.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:41,876 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87735.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:10:42,500 INFO [train.py:903] (2/4) Epoch 13, batch 5800, loss[loss=0.2315, simple_loss=0.3084, pruned_loss=0.07728, over 19796.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3029, pruned_loss=0.07611, over 3835177.81 frames. ], batch size: 48, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:11:13,887 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87760.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:11:43,246 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 5.041e+02 6.242e+02 7.928e+02 1.581e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 01:11:46,961 INFO [train.py:903] (2/4) Epoch 13, batch 5850, loss[loss=0.1917, simple_loss=0.2679, pruned_loss=0.05772, over 19375.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3043, pruned_loss=0.07665, over 3823487.69 frames. ], batch size: 47, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:33,217 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87823.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:12:47,990 INFO [train.py:903] (2/4) Epoch 13, batch 5900, loss[loss=0.2205, simple_loss=0.2957, pruned_loss=0.07264, over 19577.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3042, pruned_loss=0.07668, over 3816657.07 frames. ], batch size: 52, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:49,026 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 01:12:51,609 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:10,516 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 01:13:27,502 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:47,005 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.447e+02 6.662e+02 8.353e+02 2.279e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 01:13:50,399 INFO [train.py:903] (2/4) Epoch 13, batch 5950, loss[loss=0.2181, simple_loss=0.2872, pruned_loss=0.07451, over 19462.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3035, pruned_loss=0.07654, over 3813577.87 frames. ], batch size: 49, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:14:44,702 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.56 vs. limit=5.0 +2023-04-02 01:14:53,065 INFO [train.py:903] (2/4) Epoch 13, batch 6000, loss[loss=0.2178, simple_loss=0.2829, pruned_loss=0.07636, over 19765.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3029, pruned_loss=0.07592, over 3825150.63 frames. ], batch size: 47, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:14:53,065 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 01:15:06,424 INFO [train.py:937] (2/4) Epoch 13, validation: loss=0.175, simple_loss=0.2755, pruned_loss=0.03726, over 944034.00 frames. +2023-04-02 01:15:06,425 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 01:15:09,220 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87938.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:19,420 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:25,215 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:41,021 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:45,514 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:02,158 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 01:16:04,017 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:04,756 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.142e+02 6.649e+02 8.424e+02 2.027e+03, threshold=1.330e+03, percent-clipped=9.0 +2023-04-02 01:16:08,320 INFO [train.py:903] (2/4) Epoch 13, batch 6050, loss[loss=0.2184, simple_loss=0.3003, pruned_loss=0.06827, over 19523.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3026, pruned_loss=0.07573, over 3828846.35 frames. ], batch size: 54, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:16:12,392 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:12,304 INFO [train.py:903] (2/4) Epoch 13, batch 6100, loss[loss=0.2826, simple_loss=0.3488, pruned_loss=0.1082, over 18011.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3027, pruned_loss=0.07592, over 3833917.15 frames. ], batch size: 83, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:17:20,991 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2129, 1.2599, 1.7505, 1.1969, 2.6533, 3.5863, 3.3196, 3.8180], + device='cuda:2'), covar=tensor([0.1502, 0.3580, 0.2955, 0.2160, 0.0517, 0.0157, 0.0205, 0.0197], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0302, 0.0332, 0.0253, 0.0222, 0.0164, 0.0208, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 01:17:42,745 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:49,483 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:54,545 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 01:18:09,974 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:10,748 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.311e+02 6.775e+02 8.712e+02 1.953e+03, threshold=1.355e+03, percent-clipped=3.0 +2023-04-02 01:18:14,177 INFO [train.py:903] (2/4) Epoch 13, batch 6150, loss[loss=0.216, simple_loss=0.2822, pruned_loss=0.07494, over 19390.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3029, pruned_loss=0.07604, over 3844290.69 frames. ], batch size: 47, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:18:24,886 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:38,372 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 01:18:54,530 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88118.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:56,866 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:19:15,196 INFO [train.py:903] (2/4) Epoch 13, batch 6200, loss[loss=0.2311, simple_loss=0.2863, pruned_loss=0.08795, over 19753.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3026, pruned_loss=0.07617, over 3841386.95 frames. ], batch size: 45, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:19:50,754 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3130, 3.0318, 2.1755, 2.7327, 1.0064, 2.9300, 2.8714, 2.9270], + device='cuda:2'), covar=tensor([0.1203, 0.1504, 0.2126, 0.0984, 0.3531, 0.1020, 0.1030, 0.1379], + device='cuda:2'), in_proj_covar=tensor([0.0442, 0.0372, 0.0445, 0.0319, 0.0384, 0.0380, 0.0370, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:20:13,327 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.409e+02 5.304e+02 6.722e+02 8.627e+02 2.252e+03, threshold=1.344e+03, percent-clipped=3.0 +2023-04-02 01:20:16,891 INFO [train.py:903] (2/4) Epoch 13, batch 6250, loss[loss=0.2182, simple_loss=0.2972, pruned_loss=0.06957, over 19593.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3021, pruned_loss=0.0757, over 3832643.66 frames. ], batch size: 52, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:20:29,098 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88194.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:20:44,718 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 01:20:58,226 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:20,741 INFO [train.py:903] (2/4) Epoch 13, batch 6300, loss[loss=0.2485, simple_loss=0.3244, pruned_loss=0.08632, over 19669.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3022, pruned_loss=0.07593, over 3830615.10 frames. ], batch size: 55, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:21:22,386 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5284, 1.4298, 1.3495, 1.9240, 1.5961, 1.8324, 1.8477, 1.7329], + device='cuda:2'), covar=tensor([0.0812, 0.0893, 0.0998, 0.0739, 0.0789, 0.0678, 0.0835, 0.0611], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0223, 0.0222, 0.0240, 0.0226, 0.0207, 0.0190, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 01:21:23,580 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:52,934 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:22:18,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.052e+02 6.199e+02 7.712e+02 1.989e+03, threshold=1.240e+03, percent-clipped=5.0 +2023-04-02 01:22:21,896 INFO [train.py:903] (2/4) Epoch 13, batch 6350, loss[loss=0.2169, simple_loss=0.2987, pruned_loss=0.06755, over 19540.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3024, pruned_loss=0.07602, over 3832556.08 frames. ], batch size: 64, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:22:31,600 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8694, 1.9142, 2.1006, 2.5738, 1.8566, 2.4728, 2.2693, 1.9202], + device='cuda:2'), covar=tensor([0.3782, 0.3367, 0.1675, 0.2140, 0.3670, 0.1751, 0.3860, 0.2960], + device='cuda:2'), in_proj_covar=tensor([0.0816, 0.0848, 0.0661, 0.0899, 0.0797, 0.0730, 0.0796, 0.0724], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 01:22:38,094 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 01:22:50,124 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88309.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:23:02,588 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:08,222 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:24,610 INFO [train.py:903] (2/4) Epoch 13, batch 6400, loss[loss=0.256, simple_loss=0.3275, pruned_loss=0.09231, over 19608.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3026, pruned_loss=0.0759, over 3825158.44 frames. ], batch size: 57, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:23:27,240 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:32,523 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:38,205 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:58,502 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:24:22,191 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.590e+02 5.639e+02 6.967e+02 8.699e+02 1.659e+03, threshold=1.393e+03, percent-clipped=7.0 +2023-04-02 01:24:25,791 INFO [train.py:903] (2/4) Epoch 13, batch 6450, loss[loss=0.2376, simple_loss=0.3142, pruned_loss=0.08046, over 19674.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3019, pruned_loss=0.07554, over 3816031.31 frames. ], batch size: 58, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:24:41,552 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8717, 1.8931, 2.1664, 2.6207, 1.7425, 2.4359, 2.3631, 1.9733], + device='cuda:2'), covar=tensor([0.3769, 0.3396, 0.1671, 0.1949, 0.3690, 0.1746, 0.3927, 0.3108], + device='cuda:2'), in_proj_covar=tensor([0.0816, 0.0849, 0.0662, 0.0902, 0.0799, 0.0733, 0.0801, 0.0725], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 01:25:05,084 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 01:25:28,931 INFO [train.py:903] (2/4) Epoch 13, batch 6500, loss[loss=0.1783, simple_loss=0.2556, pruned_loss=0.05047, over 19304.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3022, pruned_loss=0.07558, over 3812796.24 frames. ], batch size: 44, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:25:30,966 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 01:26:00,494 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:26:26,785 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.190e+02 6.145e+02 7.751e+02 1.614e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 01:26:30,113 INFO [train.py:903] (2/4) Epoch 13, batch 6550, loss[loss=0.2581, simple_loss=0.3302, pruned_loss=0.09302, over 17406.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3023, pruned_loss=0.07574, over 3827818.09 frames. ], batch size: 101, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:27:31,536 INFO [train.py:903] (2/4) Epoch 13, batch 6600, loss[loss=0.2187, simple_loss=0.3012, pruned_loss=0.06814, over 19581.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3032, pruned_loss=0.07613, over 3816859.79 frames. ], batch size: 61, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:27:57,271 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-02 01:28:21,383 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:28:28,439 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 5.175e+02 6.632e+02 9.239e+02 1.861e+03, threshold=1.326e+03, percent-clipped=7.0 +2023-04-02 01:28:32,110 INFO [train.py:903] (2/4) Epoch 13, batch 6650, loss[loss=0.2569, simple_loss=0.3352, pruned_loss=0.0893, over 19512.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3032, pruned_loss=0.07658, over 3809831.88 frames. ], batch size: 64, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:34,638 INFO [train.py:903] (2/4) Epoch 13, batch 6700, loss[loss=0.2776, simple_loss=0.3369, pruned_loss=0.1091, over 13238.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3028, pruned_loss=0.07669, over 3807211.66 frames. ], batch size: 136, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:55,532 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88653.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:30:29,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 5.222e+02 6.682e+02 8.627e+02 1.913e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-02 01:30:33,159 INFO [train.py:903] (2/4) Epoch 13, batch 6750, loss[loss=0.2168, simple_loss=0.2994, pruned_loss=0.06705, over 19665.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3035, pruned_loss=0.07713, over 3802662.97 frames. ], batch size: 58, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:31:31,231 INFO [train.py:903] (2/4) Epoch 13, batch 6800, loss[loss=0.2433, simple_loss=0.3181, pruned_loss=0.08422, over 18283.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.303, pruned_loss=0.07666, over 3810108.59 frames. ], batch size: 83, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:31:37,787 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3387, 2.1155, 1.6246, 1.3502, 1.9005, 1.2604, 1.3101, 1.9371], + device='cuda:2'), covar=tensor([0.0823, 0.0708, 0.1031, 0.0707, 0.0493, 0.1193, 0.0601, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0292, 0.0303, 0.0326, 0.0248, 0.0239, 0.0322, 0.0295, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:32:17,054 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 01:32:17,505 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 01:32:21,843 INFO [train.py:903] (2/4) Epoch 14, batch 0, loss[loss=0.3035, simple_loss=0.3539, pruned_loss=0.1265, over 19740.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3539, pruned_loss=0.1265, over 19740.00 frames. ], batch size: 51, lr: 6.05e-03, grad_scale: 8.0 +2023-04-02 01:32:21,844 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 01:32:33,652 INFO [train.py:937] (2/4) Epoch 14, validation: loss=0.1763, simple_loss=0.2772, pruned_loss=0.03773, over 944034.00 frames. +2023-04-02 01:32:33,653 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 01:32:41,812 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88768.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:32:49,780 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 01:32:59,166 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.998e+02 6.377e+02 7.989e+02 1.719e+03, threshold=1.275e+03, percent-clipped=2.0 +2023-04-02 01:33:40,054 INFO [train.py:903] (2/4) Epoch 14, batch 50, loss[loss=0.1905, simple_loss=0.2737, pruned_loss=0.05361, over 19604.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3012, pruned_loss=0.07413, over 877774.59 frames. ], batch size: 50, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:01,737 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:15,354 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 01:34:34,200 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:40,600 INFO [train.py:903] (2/4) Epoch 14, batch 100, loss[loss=0.1955, simple_loss=0.2866, pruned_loss=0.05222, over 19700.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3039, pruned_loss=0.07569, over 1535117.28 frames. ], batch size: 59, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:51,069 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 01:35:02,777 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.271e+02 6.621e+02 8.700e+02 2.391e+03, threshold=1.324e+03, percent-clipped=10.0 +2023-04-02 01:35:41,058 INFO [train.py:903] (2/4) Epoch 14, batch 150, loss[loss=0.2105, simple_loss=0.3011, pruned_loss=0.05993, over 19677.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3019, pruned_loss=0.07446, over 2045621.06 frames. ], batch size: 58, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:36:38,761 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 01:36:39,938 INFO [train.py:903] (2/4) Epoch 14, batch 200, loss[loss=0.1872, simple_loss=0.2564, pruned_loss=0.05903, over 19790.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3009, pruned_loss=0.07429, over 2443339.23 frames. ], batch size: 47, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:03,949 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 5.170e+02 6.545e+02 8.695e+02 1.666e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 01:37:37,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7450, 4.2424, 4.4502, 4.4308, 1.4438, 4.1243, 3.5965, 4.1284], + device='cuda:2'), covar=tensor([0.1459, 0.0678, 0.0581, 0.0630, 0.5875, 0.0683, 0.0665, 0.1185], + device='cuda:2'), in_proj_covar=tensor([0.0707, 0.0631, 0.0835, 0.0715, 0.0755, 0.0583, 0.0503, 0.0765], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 01:37:41,106 INFO [train.py:903] (2/4) Epoch 14, batch 250, loss[loss=0.2144, simple_loss=0.2944, pruned_loss=0.06721, over 19776.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2987, pruned_loss=0.0739, over 2756383.31 frames. ], batch size: 56, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:53,603 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89024.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:38:17,662 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:38:22,435 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89049.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:38:43,481 INFO [train.py:903] (2/4) Epoch 14, batch 300, loss[loss=0.2133, simple_loss=0.2781, pruned_loss=0.07421, over 19755.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2976, pruned_loss=0.07357, over 2999393.40 frames. ], batch size: 45, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:39:05,434 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.830e+02 5.923e+02 7.544e+02 2.111e+03, threshold=1.185e+03, percent-clipped=1.0 +2023-04-02 01:39:45,145 INFO [train.py:903] (2/4) Epoch 14, batch 350, loss[loss=0.2147, simple_loss=0.2943, pruned_loss=0.06752, over 19539.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2988, pruned_loss=0.07411, over 3189177.21 frames. ], batch size: 54, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:39:47,489 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 01:40:46,827 INFO [train.py:903] (2/4) Epoch 14, batch 400, loss[loss=0.1912, simple_loss=0.2762, pruned_loss=0.0531, over 19735.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2991, pruned_loss=0.0737, over 3336380.02 frames. ], batch size: 51, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:41:11,934 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.893e+02 5.942e+02 7.582e+02 1.529e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-02 01:41:16,759 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:41:33,890 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5687, 1.3680, 1.4983, 1.7152, 3.1626, 1.1307, 2.2999, 3.5112], + device='cuda:2'), covar=tensor([0.0472, 0.2458, 0.2470, 0.1560, 0.0679, 0.2282, 0.1186, 0.0261], + device='cuda:2'), in_proj_covar=tensor([0.0370, 0.0345, 0.0358, 0.0326, 0.0353, 0.0332, 0.0345, 0.0368], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:41:36,337 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.7871, 1.2377, 0.9796, 0.8736, 1.0864, 0.8954, 0.8262, 1.1526], + device='cuda:2'), covar=tensor([0.0576, 0.0771, 0.0970, 0.0634, 0.0472, 0.1145, 0.0572, 0.0426], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0305, 0.0328, 0.0251, 0.0241, 0.0326, 0.0296, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:41:47,619 INFO [train.py:903] (2/4) Epoch 14, batch 450, loss[loss=0.2965, simple_loss=0.3489, pruned_loss=0.122, over 13492.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3001, pruned_loss=0.07428, over 3444925.01 frames. ], batch size: 135, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:42:19,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 01:42:20,929 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 01:42:51,704 INFO [train.py:903] (2/4) Epoch 14, batch 500, loss[loss=0.2195, simple_loss=0.3068, pruned_loss=0.0661, over 19621.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07441, over 3538994.57 frames. ], batch size: 57, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:43:13,291 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.086e+02 6.501e+02 8.394e+02 1.936e+03, threshold=1.300e+03, percent-clipped=6.0 +2023-04-02 01:43:38,137 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 01:43:51,229 INFO [train.py:903] (2/4) Epoch 14, batch 550, loss[loss=0.21, simple_loss=0.2954, pruned_loss=0.06235, over 19625.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3004, pruned_loss=0.07423, over 3611806.16 frames. ], batch size: 57, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:44:50,889 INFO [train.py:903] (2/4) Epoch 14, batch 600, loss[loss=0.2322, simple_loss=0.3037, pruned_loss=0.08032, over 19475.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3013, pruned_loss=0.07481, over 3654015.34 frames. ], batch size: 49, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:45:14,769 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.437e+02 6.463e+02 8.394e+02 1.645e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 01:45:23,620 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:27,405 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:32,889 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 01:45:52,146 INFO [train.py:903] (2/4) Epoch 14, batch 650, loss[loss=0.2087, simple_loss=0.2967, pruned_loss=0.06037, over 19543.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3021, pruned_loss=0.07507, over 3699211.24 frames. ], batch size: 56, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:46:25,962 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2898, 3.0071, 2.2024, 2.7477, 0.7413, 2.8906, 2.8017, 2.9213], + device='cuda:2'), covar=tensor([0.1105, 0.1460, 0.2092, 0.1027, 0.4036, 0.1048, 0.1097, 0.1294], + device='cuda:2'), in_proj_covar=tensor([0.0446, 0.0375, 0.0450, 0.0328, 0.0390, 0.0382, 0.0379, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:46:29,568 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89443.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:46:33,491 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-02 01:46:34,598 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 01:46:45,770 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89457.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:46:53,913 INFO [train.py:903] (2/4) Epoch 14, batch 700, loss[loss=0.2467, simple_loss=0.32, pruned_loss=0.08669, over 19483.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3023, pruned_loss=0.07592, over 3729468.25 frames. ], batch size: 64, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:47:21,078 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.092e+02 5.132e+02 6.716e+02 8.076e+02 1.293e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-02 01:47:44,495 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:47:59,827 INFO [train.py:903] (2/4) Epoch 14, batch 750, loss[loss=0.2135, simple_loss=0.2756, pruned_loss=0.07573, over 19733.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3014, pruned_loss=0.07545, over 3745151.09 frames. ], batch size: 45, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:48:14,774 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:48:19,169 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:49:01,540 INFO [train.py:903] (2/4) Epoch 14, batch 800, loss[loss=0.221, simple_loss=0.2867, pruned_loss=0.07763, over 19416.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3018, pruned_loss=0.0759, over 3755880.88 frames. ], batch size: 48, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:49:16,448 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 01:49:23,242 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 01:49:24,441 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.343e+02 6.523e+02 8.164e+02 1.688e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 01:50:01,788 INFO [train.py:903] (2/4) Epoch 14, batch 850, loss[loss=0.1946, simple_loss=0.2691, pruned_loss=0.06007, over 19587.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3006, pruned_loss=0.07511, over 3782187.65 frames. ], batch size: 52, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:50:11,430 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5365, 2.1961, 2.4178, 3.1096, 2.4452, 2.7585, 2.6784, 2.7749], + device='cuda:2'), covar=tensor([0.0707, 0.0863, 0.0848, 0.0744, 0.0792, 0.0662, 0.0837, 0.0572], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0223, 0.0224, 0.0243, 0.0229, 0.0210, 0.0194, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 01:50:26,631 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2108, 1.1426, 1.1704, 1.3050, 1.0302, 1.3270, 1.3178, 1.2720], + device='cuda:2'), covar=tensor([0.0912, 0.1044, 0.1114, 0.0700, 0.0865, 0.0853, 0.0828, 0.0773], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0223, 0.0224, 0.0243, 0.0230, 0.0210, 0.0193, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 01:50:42,898 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:50:55,569 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 01:51:03,556 INFO [train.py:903] (2/4) Epoch 14, batch 900, loss[loss=0.2168, simple_loss=0.2947, pruned_loss=0.06949, over 19750.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3015, pruned_loss=0.07579, over 3777121.35 frames. ], batch size: 63, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:51:29,677 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 4.983e+02 6.131e+02 7.467e+02 1.791e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 01:51:46,399 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:51:49,222 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.16 vs. limit=5.0 +2023-04-02 01:51:58,103 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3616, 2.1142, 1.5816, 1.5004, 2.0098, 1.3207, 1.2555, 1.7853], + device='cuda:2'), covar=tensor([0.0928, 0.0770, 0.0937, 0.0699, 0.0428, 0.1066, 0.0690, 0.0434], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0307, 0.0329, 0.0250, 0.0240, 0.0328, 0.0297, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:52:07,444 INFO [train.py:903] (2/4) Epoch 14, batch 950, loss[loss=0.2294, simple_loss=0.3044, pruned_loss=0.07724, over 19572.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3033, pruned_loss=0.07701, over 3769456.10 frames. ], batch size: 52, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:52:07,477 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 01:52:33,960 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:06,418 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:10,813 INFO [train.py:903] (2/4) Epoch 14, batch 1000, loss[loss=0.1883, simple_loss=0.2678, pruned_loss=0.05437, over 19851.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3039, pruned_loss=0.07717, over 3785013.60 frames. ], batch size: 52, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:53:34,644 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.523e+02 5.300e+02 6.720e+02 8.420e+02 1.635e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-02 01:53:36,395 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:38,573 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:59,172 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89801.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:54:02,270 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 01:54:13,796 INFO [train.py:903] (2/4) Epoch 14, batch 1050, loss[loss=0.2067, simple_loss=0.2854, pruned_loss=0.06405, over 19660.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3032, pruned_loss=0.07653, over 3797217.01 frames. ], batch size: 53, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:54:46,500 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 01:55:02,287 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:15,726 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0476, 1.7871, 1.8593, 2.1352, 1.9543, 1.8699, 1.7900, 2.0791], + device='cuda:2'), covar=tensor([0.0908, 0.1506, 0.1174, 0.0844, 0.1139, 0.0458, 0.1150, 0.0597], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0351, 0.0295, 0.0240, 0.0296, 0.0241, 0.0286, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 01:55:16,487 INFO [train.py:903] (2/4) Epoch 14, batch 1100, loss[loss=0.2102, simple_loss=0.2911, pruned_loss=0.06467, over 18268.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3035, pruned_loss=0.07656, over 3816425.33 frames. ], batch size: 84, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:55:24,745 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,009 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89883.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 5.450e+02 6.738e+02 9.006e+02 2.173e+03, threshold=1.348e+03, percent-clipped=6.0 +2023-04-02 01:56:05,595 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:05,664 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:19,144 INFO [train.py:903] (2/4) Epoch 14, batch 1150, loss[loss=0.22, simple_loss=0.2894, pruned_loss=0.07528, over 19421.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3027, pruned_loss=0.0758, over 3828592.92 frames. ], batch size: 48, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:56:21,939 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 01:56:24,113 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89916.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:56:39,121 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:57:25,194 INFO [train.py:903] (2/4) Epoch 14, batch 1200, loss[loss=0.2409, simple_loss=0.3188, pruned_loss=0.08147, over 19690.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3021, pruned_loss=0.07571, over 3828691.13 frames. ], batch size: 58, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:57:49,395 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.889e+02 5.798e+02 7.146e+02 1.027e+03, threshold=1.160e+03, percent-clipped=0.0 +2023-04-02 01:57:51,955 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:57:55,108 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 01:58:28,848 INFO [train.py:903] (2/4) Epoch 14, batch 1250, loss[loss=0.2125, simple_loss=0.2945, pruned_loss=0.06528, over 19543.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3022, pruned_loss=0.07577, over 3825657.75 frames. ], batch size: 56, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:58:47,026 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.19 vs. limit=5.0 +2023-04-02 01:59:04,881 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:59:31,266 INFO [train.py:903] (2/4) Epoch 14, batch 1300, loss[loss=0.2788, simple_loss=0.3441, pruned_loss=0.1067, over 19310.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3016, pruned_loss=0.07548, over 3826595.48 frames. ], batch size: 66, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:57,908 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 5.410e+02 6.769e+02 7.813e+02 2.183e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-02 02:00:25,768 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:00:33,600 INFO [train.py:903] (2/4) Epoch 14, batch 1350, loss[loss=0.2271, simple_loss=0.3082, pruned_loss=0.07296, over 19681.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3027, pruned_loss=0.07613, over 3816222.97 frames. ], batch size: 58, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:00:58,163 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:16,431 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:28,093 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:29,350 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:35,848 INFO [train.py:903] (2/4) Epoch 14, batch 1400, loss[loss=0.2154, simple_loss=0.2999, pruned_loss=0.06541, over 19342.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3022, pruned_loss=0.07549, over 3818828.98 frames. ], batch size: 66, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:01:40,607 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1114, 2.7522, 2.2078, 2.2351, 1.9155, 2.3224, 1.0353, 2.0423], + device='cuda:2'), covar=tensor([0.0537, 0.0514, 0.0549, 0.0949, 0.0987, 0.0971, 0.1071, 0.0904], + device='cuda:2'), in_proj_covar=tensor([0.0342, 0.0336, 0.0333, 0.0363, 0.0437, 0.0359, 0.0317, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 02:01:46,570 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90172.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:01:59,393 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:02:00,111 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.450e+02 7.234e+02 1.006e+03 2.886e+03, threshold=1.447e+03, percent-clipped=11.0 +2023-04-02 02:02:17,640 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90197.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:02:34,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 02:02:38,351 INFO [train.py:903] (2/4) Epoch 14, batch 1450, loss[loss=0.2541, simple_loss=0.3271, pruned_loss=0.09053, over 19116.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3021, pruned_loss=0.07532, over 3819051.43 frames. ], batch size: 69, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:02:53,326 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90227.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:12,040 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:22,227 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:38,188 INFO [train.py:903] (2/4) Epoch 14, batch 1500, loss[loss=0.244, simple_loss=0.3148, pruned_loss=0.08657, over 19660.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3011, pruned_loss=0.07454, over 3830162.31 frames. ], batch size: 55, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:03:42,135 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:04:03,123 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.072e+02 6.110e+02 7.921e+02 2.000e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 02:04:39,089 INFO [train.py:903] (2/4) Epoch 14, batch 1550, loss[loss=0.245, simple_loss=0.3211, pruned_loss=0.08443, over 17366.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.301, pruned_loss=0.07439, over 3838178.98 frames. ], batch size: 101, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:16,343 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:05:44,740 INFO [train.py:903] (2/4) Epoch 14, batch 1600, loss[loss=0.2462, simple_loss=0.3165, pruned_loss=0.08795, over 17013.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3019, pruned_loss=0.07518, over 3824185.69 frames. ], batch size: 101, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:51,730 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:02,643 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 02:06:08,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.288e+02 6.387e+02 7.705e+02 1.564e+03, threshold=1.277e+03, percent-clipped=2.0 +2023-04-02 02:06:46,562 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:47,299 INFO [train.py:903] (2/4) Epoch 14, batch 1650, loss[loss=0.1877, simple_loss=0.2593, pruned_loss=0.05804, over 19749.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3022, pruned_loss=0.07544, over 3824661.73 frames. ], batch size: 46, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:15,857 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:07:47,909 INFO [train.py:903] (2/4) Epoch 14, batch 1700, loss[loss=0.1921, simple_loss=0.2776, pruned_loss=0.05329, over 19595.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.302, pruned_loss=0.07549, over 3833347.50 frames. ], batch size: 52, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:08:03,306 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:13,890 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 5.354e+02 6.441e+02 8.114e+02 1.316e+03, threshold=1.288e+03, percent-clipped=1.0 +2023-04-02 02:08:23,209 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:25,352 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 02:08:49,500 INFO [train.py:903] (2/4) Epoch 14, batch 1750, loss[loss=0.2486, simple_loss=0.3201, pruned_loss=0.08851, over 19661.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2997, pruned_loss=0.07409, over 3844529.99 frames. ], batch size: 55, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:08:49,804 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:09:51,909 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9441, 4.4658, 3.0650, 3.9664, 1.0852, 4.3340, 4.2948, 4.4701], + device='cuda:2'), covar=tensor([0.0492, 0.1026, 0.1693, 0.0819, 0.4307, 0.0644, 0.0718, 0.0920], + device='cuda:2'), in_proj_covar=tensor([0.0449, 0.0375, 0.0450, 0.0326, 0.0392, 0.0384, 0.0376, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:09:54,006 INFO [train.py:903] (2/4) Epoch 14, batch 1800, loss[loss=0.2564, simple_loss=0.3159, pruned_loss=0.09842, over 19653.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.07389, over 3833462.41 frames. ], batch size: 53, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:10:18,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.749e+02 5.076e+02 6.157e+02 8.007e+02 1.318e+03, threshold=1.231e+03, percent-clipped=1.0 +2023-04-02 02:10:29,855 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:34,920 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:47,863 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:48,791 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 02:10:55,672 INFO [train.py:903] (2/4) Epoch 14, batch 1850, loss[loss=0.2455, simple_loss=0.3216, pruned_loss=0.08466, over 19582.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2994, pruned_loss=0.07388, over 3838751.08 frames. ], batch size: 61, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:11:07,289 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:11:15,700 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-02 02:11:24,430 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 02:11:58,545 INFO [train.py:903] (2/4) Epoch 14, batch 1900, loss[loss=0.1626, simple_loss=0.2395, pruned_loss=0.0428, over 19760.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2996, pruned_loss=0.07385, over 3837613.66 frames. ], batch size: 46, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:12:12,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 02:12:15,963 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 02:12:24,248 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.065e+02 6.349e+02 7.558e+02 1.663e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-02 02:12:43,819 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 02:12:54,519 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:12:59,233 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:13:00,237 INFO [train.py:903] (2/4) Epoch 14, batch 1950, loss[loss=0.2386, simple_loss=0.3166, pruned_loss=0.08026, over 19764.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3002, pruned_loss=0.07446, over 3832600.38 frames. ], batch size: 63, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:13:05,577 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.94 vs. limit=5.0 +2023-04-02 02:14:03,119 INFO [train.py:903] (2/4) Epoch 14, batch 2000, loss[loss=0.2351, simple_loss=0.3046, pruned_loss=0.08276, over 19682.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2996, pruned_loss=0.0742, over 3828405.06 frames. ], batch size: 53, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:14:27,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.207e+02 5.999e+02 7.179e+02 1.269e+03, threshold=1.200e+03, percent-clipped=0.0 +2023-04-02 02:14:30,499 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0890, 1.8213, 1.4135, 1.1970, 1.6434, 1.0493, 1.0899, 1.5970], + device='cuda:2'), covar=tensor([0.0777, 0.0753, 0.1009, 0.0715, 0.0550, 0.1309, 0.0622, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0309, 0.0330, 0.0251, 0.0241, 0.0328, 0.0297, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:14:59,937 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 02:15:05,785 INFO [train.py:903] (2/4) Epoch 14, batch 2050, loss[loss=0.2279, simple_loss=0.3107, pruned_loss=0.07258, over 19769.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2996, pruned_loss=0.07443, over 3823303.38 frames. ], batch size: 63, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:15:14,217 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90821.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:18,954 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 02:15:20,136 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 02:15:24,024 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90828.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:42,062 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 02:16:01,127 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-02 02:16:01,449 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:07,317 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:08,093 INFO [train.py:903] (2/4) Epoch 14, batch 2100, loss[loss=0.2626, simple_loss=0.3306, pruned_loss=0.09736, over 18811.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2983, pruned_loss=0.07395, over 3833075.83 frames. ], batch size: 74, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:16:12,811 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:32,690 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.435e+02 5.132e+02 6.276e+02 7.348e+02 1.970e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 02:16:37,024 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 02:16:39,401 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:58,536 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 02:17:09,969 INFO [train.py:903] (2/4) Epoch 14, batch 2150, loss[loss=0.2232, simple_loss=0.3081, pruned_loss=0.06909, over 19391.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2988, pruned_loss=0.074, over 3818935.54 frames. ], batch size: 70, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:17:38,850 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:17:53,046 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-02 02:18:10,991 INFO [train.py:903] (2/4) Epoch 14, batch 2200, loss[loss=0.2179, simple_loss=0.2865, pruned_loss=0.07465, over 19618.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2995, pruned_loss=0.07432, over 3831965.31 frames. ], batch size: 50, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:18:12,480 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:19,854 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9763, 1.1271, 1.6574, 0.9576, 2.2854, 3.0137, 2.7265, 3.2144], + device='cuda:2'), covar=tensor([0.1742, 0.3771, 0.3155, 0.2385, 0.0589, 0.0203, 0.0261, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0302, 0.0332, 0.0254, 0.0225, 0.0166, 0.0206, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 02:18:23,316 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:35,015 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-02 02:18:35,326 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 5.132e+02 5.971e+02 7.684e+02 1.888e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-02 02:18:43,484 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90990.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:19:13,353 INFO [train.py:903] (2/4) Epoch 14, batch 2250, loss[loss=0.1945, simple_loss=0.2851, pruned_loss=0.05191, over 19689.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3006, pruned_loss=0.07444, over 3824300.24 frames. ], batch size: 59, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:14,202 INFO [train.py:903] (2/4) Epoch 14, batch 2300, loss[loss=0.2179, simple_loss=0.3035, pruned_loss=0.06612, over 19595.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3009, pruned_loss=0.07473, over 3817071.03 frames. ], batch size: 57, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:26,827 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 02:20:38,187 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.417e+02 6.829e+02 8.730e+02 1.535e+03, threshold=1.366e+03, percent-clipped=12.0 +2023-04-02 02:20:38,672 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:20:47,634 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9478, 1.7169, 1.6859, 2.0050, 1.8143, 1.8168, 1.7455, 2.0034], + device='cuda:2'), covar=tensor([0.0985, 0.1568, 0.1375, 0.0982, 0.1230, 0.0472, 0.1128, 0.0618], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0350, 0.0294, 0.0239, 0.0293, 0.0239, 0.0285, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:21:11,079 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:21:16,438 INFO [train.py:903] (2/4) Epoch 14, batch 2350, loss[loss=0.2281, simple_loss=0.3104, pruned_loss=0.07291, over 19709.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3006, pruned_loss=0.0745, over 3823530.19 frames. ], batch size: 60, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:21:57,696 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 02:22:13,120 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 02:22:14,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-04-02 02:22:17,784 INFO [train.py:903] (2/4) Epoch 14, batch 2400, loss[loss=0.1993, simple_loss=0.2692, pruned_loss=0.06472, over 19696.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3002, pruned_loss=0.07416, over 3834782.51 frames. ], batch size: 45, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:22:42,188 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.466e+02 6.599e+02 8.174e+02 1.804e+03, threshold=1.320e+03, percent-clipped=5.0 +2023-04-02 02:22:52,552 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7514, 4.1696, 4.3848, 4.3807, 1.7021, 4.0785, 3.6650, 4.0341], + device='cuda:2'), covar=tensor([0.1378, 0.0962, 0.0596, 0.0607, 0.5186, 0.0838, 0.0616, 0.1240], + device='cuda:2'), in_proj_covar=tensor([0.0708, 0.0636, 0.0839, 0.0724, 0.0756, 0.0586, 0.0504, 0.0777], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 02:22:52,707 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:15,506 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91211.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:19,603 INFO [train.py:903] (2/4) Epoch 14, batch 2450, loss[loss=0.1963, simple_loss=0.2706, pruned_loss=0.06097, over 19770.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2997, pruned_loss=0.07385, over 3828697.43 frames. ], batch size: 47, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:23:23,103 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:37,737 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91229.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:08,239 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:19,341 INFO [train.py:903] (2/4) Epoch 14, batch 2500, loss[loss=0.2494, simple_loss=0.3253, pruned_loss=0.08673, over 19669.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2996, pruned_loss=0.07376, over 3822814.86 frames. ], batch size: 58, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:24:32,867 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5786, 1.4431, 1.3972, 1.9401, 1.5218, 1.7229, 1.9946, 1.6427], + device='cuda:2'), covar=tensor([0.0832, 0.0924, 0.1060, 0.0689, 0.0777, 0.0761, 0.0757, 0.0691], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0223, 0.0224, 0.0241, 0.0228, 0.0209, 0.0191, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 02:24:35,616 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-04-02 02:24:39,628 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3559, 3.9602, 2.5786, 3.5794, 0.9773, 3.7605, 3.7554, 3.8211], + device='cuda:2'), covar=tensor([0.0712, 0.1180, 0.2100, 0.0884, 0.4127, 0.0822, 0.0814, 0.1114], + device='cuda:2'), in_proj_covar=tensor([0.0452, 0.0380, 0.0454, 0.0330, 0.0392, 0.0386, 0.0378, 0.0413], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:24:42,800 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.074e+02 5.867e+02 6.968e+02 8.618e+02 1.617e+03, threshold=1.394e+03, percent-clipped=2.0 +2023-04-02 02:24:45,414 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0306, 1.1883, 1.5829, 0.8519, 2.3570, 3.0112, 2.7475, 3.2098], + device='cuda:2'), covar=tensor([0.1613, 0.3666, 0.3243, 0.2449, 0.0523, 0.0190, 0.0248, 0.0246], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0301, 0.0330, 0.0253, 0.0223, 0.0166, 0.0206, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 02:24:58,173 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0026, 1.9554, 1.8956, 2.6329, 1.9979, 2.5738, 2.5837, 2.2949], + device='cuda:2'), covar=tensor([0.0845, 0.0873, 0.1033, 0.0891, 0.0881, 0.0658, 0.0856, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0223, 0.0225, 0.0242, 0.0228, 0.0209, 0.0192, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 02:25:06,891 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91303.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:25:19,880 INFO [train.py:903] (2/4) Epoch 14, batch 2550, loss[loss=0.192, simple_loss=0.2762, pruned_loss=0.05384, over 19681.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3001, pruned_loss=0.07422, over 3816082.61 frames. ], batch size: 53, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:25:33,168 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:26:12,377 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 02:26:19,143 INFO [train.py:903] (2/4) Epoch 14, batch 2600, loss[loss=0.2095, simple_loss=0.2833, pruned_loss=0.06785, over 19735.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.07409, over 3820196.69 frames. ], batch size: 51, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:26:44,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.557e+02 7.203e+02 9.059e+02 1.995e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-02 02:27:21,526 INFO [train.py:903] (2/4) Epoch 14, batch 2650, loss[loss=0.1846, simple_loss=0.2678, pruned_loss=0.05073, over 19592.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3006, pruned_loss=0.07445, over 3820821.63 frames. ], batch size: 52, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:27:41,098 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 02:28:11,805 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5814, 1.1990, 1.4432, 1.2659, 2.2066, 1.0405, 2.0539, 2.4081], + device='cuda:2'), covar=tensor([0.0661, 0.2591, 0.2680, 0.1595, 0.0865, 0.1978, 0.0949, 0.0466], + device='cuda:2'), in_proj_covar=tensor([0.0371, 0.0346, 0.0363, 0.0327, 0.0355, 0.0338, 0.0347, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:28:18,975 INFO [train.py:903] (2/4) Epoch 14, batch 2700, loss[loss=0.188, simple_loss=0.2674, pruned_loss=0.05429, over 19708.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3016, pruned_loss=0.07541, over 3827058.07 frames. ], batch size: 51, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:28:43,676 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.145e+02 6.524e+02 8.606e+02 2.089e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 02:29:13,060 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-02 02:29:20,024 INFO [train.py:903] (2/4) Epoch 14, batch 2750, loss[loss=0.2276, simple_loss=0.3044, pruned_loss=0.07544, over 17701.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3008, pruned_loss=0.07492, over 3822307.65 frames. ], batch size: 101, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:29:44,615 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 02:30:14,318 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91560.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:30:18,379 INFO [train.py:903] (2/4) Epoch 14, batch 2800, loss[loss=0.223, simple_loss=0.2999, pruned_loss=0.07307, over 19668.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3017, pruned_loss=0.07533, over 3839747.93 frames. ], batch size: 53, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:30:41,238 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:30:44,141 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 4.991e+02 6.176e+02 8.428e+02 2.269e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 02:31:10,031 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:31:19,257 INFO [train.py:903] (2/4) Epoch 14, batch 2850, loss[loss=0.2481, simple_loss=0.3195, pruned_loss=0.08831, over 19601.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3, pruned_loss=0.07455, over 3842974.74 frames. ], batch size: 61, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:31:55,212 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6539, 4.0218, 4.2204, 4.2374, 1.5959, 3.8974, 3.4919, 3.9166], + device='cuda:2'), covar=tensor([0.1310, 0.0877, 0.0578, 0.0631, 0.5371, 0.0855, 0.0666, 0.1107], + device='cuda:2'), in_proj_covar=tensor([0.0708, 0.0634, 0.0846, 0.0728, 0.0762, 0.0593, 0.0509, 0.0774], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 02:31:58,526 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91647.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:32:20,013 INFO [train.py:903] (2/4) Epoch 14, batch 2900, loss[loss=0.2041, simple_loss=0.276, pruned_loss=0.06604, over 19274.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2988, pruned_loss=0.07385, over 3845762.71 frames. ], batch size: 44, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:32:20,835 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 02:32:44,081 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.659e+02 5.126e+02 6.310e+02 7.919e+02 2.445e+03, threshold=1.262e+03, percent-clipped=4.0 +2023-04-02 02:33:19,570 INFO [train.py:903] (2/4) Epoch 14, batch 2950, loss[loss=0.2046, simple_loss=0.288, pruned_loss=0.06061, over 19766.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3004, pruned_loss=0.07451, over 3830936.49 frames. ], batch size: 54, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:17,857 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91762.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:34:19,640 INFO [train.py:903] (2/4) Epoch 14, batch 3000, loss[loss=0.2267, simple_loss=0.3036, pruned_loss=0.07488, over 18697.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3011, pruned_loss=0.07521, over 3813734.15 frames. ], batch size: 74, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:19,640 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 02:34:36,649 INFO [train.py:937] (2/4) Epoch 14, validation: loss=0.1742, simple_loss=0.2751, pruned_loss=0.03671, over 944034.00 frames. +2023-04-02 02:34:36,649 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 02:34:42,037 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 02:35:02,709 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.261e+02 6.370e+02 8.625e+02 1.479e+03, threshold=1.274e+03, percent-clipped=4.0 +2023-04-02 02:35:07,907 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 02:35:37,796 INFO [train.py:903] (2/4) Epoch 14, batch 3050, loss[loss=0.1803, simple_loss=0.2642, pruned_loss=0.04824, over 19858.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2989, pruned_loss=0.07354, over 3823498.29 frames. ], batch size: 52, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:36:37,011 INFO [train.py:903] (2/4) Epoch 14, batch 3100, loss[loss=0.2393, simple_loss=0.3151, pruned_loss=0.08172, over 19697.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2996, pruned_loss=0.07384, over 3829429.74 frames. ], batch size: 60, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:37:02,384 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.361e+02 6.622e+02 8.860e+02 2.580e+03, threshold=1.324e+03, percent-clipped=11.0 +2023-04-02 02:37:24,382 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91904.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:37:31,750 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:37:37,890 INFO [train.py:903] (2/4) Epoch 14, batch 3150, loss[loss=0.2397, simple_loss=0.3186, pruned_loss=0.08036, over 19665.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3013, pruned_loss=0.07484, over 3839498.20 frames. ], batch size: 60, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:04,260 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 02:38:37,358 INFO [train.py:903] (2/4) Epoch 14, batch 3200, loss[loss=0.3125, simple_loss=0.3684, pruned_loss=0.1283, over 13074.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3011, pruned_loss=0.07512, over 3818928.82 frames. ], batch size: 135, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:48,711 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:02,841 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.150e+02 6.206e+02 7.874e+02 1.849e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 02:39:12,907 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9653, 1.6114, 1.9504, 1.8052, 4.4167, 1.0303, 2.4151, 4.7825], + device='cuda:2'), covar=tensor([0.0367, 0.2766, 0.2718, 0.1843, 0.0734, 0.2738, 0.1485, 0.0191], + device='cuda:2'), in_proj_covar=tensor([0.0373, 0.0347, 0.0364, 0.0328, 0.0358, 0.0338, 0.0350, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:39:20,939 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91999.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:39:39,708 INFO [train.py:903] (2/4) Epoch 14, batch 3250, loss[loss=0.2726, simple_loss=0.3358, pruned_loss=0.1047, over 19687.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3014, pruned_loss=0.07553, over 3797876.22 frames. ], batch size: 60, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:39:44,747 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:45,908 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92019.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:40:05,440 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1297, 1.8006, 1.4016, 1.2324, 1.5826, 1.1800, 1.2258, 1.5198], + device='cuda:2'), covar=tensor([0.0717, 0.0664, 0.0978, 0.0666, 0.0469, 0.1120, 0.0537, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0309, 0.0329, 0.0252, 0.0241, 0.0329, 0.0297, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:40:15,631 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:40:39,293 INFO [train.py:903] (2/4) Epoch 14, batch 3300, loss[loss=0.2072, simple_loss=0.2768, pruned_loss=0.06873, over 19053.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3019, pruned_loss=0.07591, over 3793561.45 frames. ], batch size: 42, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:40:45,689 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 02:41:04,997 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.974e+02 6.176e+02 7.406e+02 2.018e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-02 02:41:41,642 INFO [train.py:903] (2/4) Epoch 14, batch 3350, loss[loss=0.2278, simple_loss=0.3062, pruned_loss=0.07469, over 19536.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3018, pruned_loss=0.07559, over 3804344.52 frames. ], batch size: 56, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:42:19,487 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2038, 1.2906, 1.6017, 1.3325, 2.8620, 3.8111, 3.5222, 4.0044], + device='cuda:2'), covar=tensor([0.1551, 0.3498, 0.3089, 0.2112, 0.0493, 0.0146, 0.0185, 0.0191], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0299, 0.0328, 0.0252, 0.0221, 0.0165, 0.0205, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 02:42:40,601 INFO [train.py:903] (2/4) Epoch 14, batch 3400, loss[loss=0.2148, simple_loss=0.3001, pruned_loss=0.06473, over 19665.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3017, pruned_loss=0.07568, over 3803762.16 frames. ], batch size: 58, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:43:05,849 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.934e+02 6.017e+02 7.496e+02 1.650e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-02 02:43:42,223 INFO [train.py:903] (2/4) Epoch 14, batch 3450, loss[loss=0.2363, simple_loss=0.3142, pruned_loss=0.07919, over 19659.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3021, pruned_loss=0.07587, over 3798546.35 frames. ], batch size: 58, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:43:44,673 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 02:44:28,720 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:44:40,671 INFO [train.py:903] (2/4) Epoch 14, batch 3500, loss[loss=0.1803, simple_loss=0.2651, pruned_loss=0.04776, over 19861.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3015, pruned_loss=0.0754, over 3809815.56 frames. ], batch size: 52, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:44:54,894 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92275.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:45:05,731 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.239e+02 6.377e+02 7.871e+02 1.606e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-02 02:45:24,240 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92300.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:45:41,111 INFO [train.py:903] (2/4) Epoch 14, batch 3550, loss[loss=0.2254, simple_loss=0.3093, pruned_loss=0.07075, over 18823.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3025, pruned_loss=0.07565, over 3823602.38 frames. ], batch size: 74, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:45:44,527 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:46:15,220 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92343.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:46:39,899 INFO [train.py:903] (2/4) Epoch 14, batch 3600, loss[loss=0.2148, simple_loss=0.3015, pruned_loss=0.06402, over 19729.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3011, pruned_loss=0.07441, over 3832990.66 frames. ], batch size: 63, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:46:44,953 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92368.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:47:04,638 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.209e+02 6.321e+02 7.842e+02 1.520e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 02:47:40,754 INFO [train.py:903] (2/4) Epoch 14, batch 3650, loss[loss=0.2127, simple_loss=0.3007, pruned_loss=0.06237, over 19530.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3, pruned_loss=0.07405, over 3824248.68 frames. ], batch size: 56, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:47:42,673 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-02 02:48:03,005 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92432.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:48:34,111 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92458.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:48:40,409 INFO [train.py:903] (2/4) Epoch 14, batch 3700, loss[loss=0.2857, simple_loss=0.3482, pruned_loss=0.1116, over 13147.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2996, pruned_loss=0.07398, over 3824213.87 frames. ], batch size: 136, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:49:01,666 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0232, 1.9238, 1.8852, 2.1981, 1.9620, 1.7615, 1.8281, 2.0066], + device='cuda:2'), covar=tensor([0.0816, 0.1344, 0.1118, 0.0749, 0.1051, 0.0499, 0.1094, 0.0586], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0350, 0.0294, 0.0241, 0.0296, 0.0243, 0.0288, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 02:49:05,826 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.888e+02 6.023e+02 8.004e+02 1.682e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 02:49:23,513 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 02:49:41,654 INFO [train.py:903] (2/4) Epoch 14, batch 3750, loss[loss=0.1923, simple_loss=0.2704, pruned_loss=0.0571, over 19736.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2993, pruned_loss=0.07362, over 3815138.24 frames. ], batch size: 51, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:50:42,054 INFO [train.py:903] (2/4) Epoch 14, batch 3800, loss[loss=0.2016, simple_loss=0.2805, pruned_loss=0.06135, over 19682.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2998, pruned_loss=0.07406, over 3810924.80 frames. ], batch size: 53, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:06,370 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.992e+02 6.384e+02 8.353e+02 1.667e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 02:51:07,052 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.31 vs. limit=5.0 +2023-04-02 02:51:10,992 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 02:51:16,781 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7990, 3.2607, 3.3365, 3.3140, 1.2026, 3.1485, 2.7895, 3.1110], + device='cuda:2'), covar=tensor([0.1719, 0.0888, 0.0781, 0.0916, 0.5348, 0.0868, 0.0747, 0.1294], + device='cuda:2'), in_proj_covar=tensor([0.0715, 0.0638, 0.0846, 0.0727, 0.0759, 0.0591, 0.0511, 0.0779], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 02:51:42,072 INFO [train.py:903] (2/4) Epoch 14, batch 3850, loss[loss=0.2654, simple_loss=0.3405, pruned_loss=0.09519, over 19360.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3006, pruned_loss=0.07484, over 3819600.53 frames. ], batch size: 70, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:54,764 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92624.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:51:59,471 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3951, 1.3389, 1.5759, 1.5660, 2.4074, 2.1015, 2.3372, 0.7064], + device='cuda:2'), covar=tensor([0.2268, 0.3936, 0.2424, 0.1763, 0.1246, 0.1954, 0.1275, 0.4040], + device='cuda:2'), in_proj_covar=tensor([0.0509, 0.0599, 0.0644, 0.0455, 0.0608, 0.0507, 0.0657, 0.0513], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 02:52:19,032 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:26,120 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:30,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-02 02:52:43,617 INFO [train.py:903] (2/4) Epoch 14, batch 3900, loss[loss=0.2224, simple_loss=0.2986, pruned_loss=0.0731, over 19478.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07435, over 3811687.60 frames. ], batch size: 64, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:10,396 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.910e+02 5.705e+02 7.277e+02 9.203e+02 2.913e+03, threshold=1.455e+03, percent-clipped=9.0 +2023-04-02 02:53:13,182 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92688.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:43,652 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:44,350 INFO [train.py:903] (2/4) Epoch 14, batch 3950, loss[loss=0.2408, simple_loss=0.3192, pruned_loss=0.08118, over 19607.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2998, pruned_loss=0.07444, over 3801476.38 frames. ], batch size: 57, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:44,779 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92714.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:53:48,545 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 02:54:13,015 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:54:14,283 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92739.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:54:45,461 INFO [train.py:903] (2/4) Epoch 14, batch 4000, loss[loss=0.1961, simple_loss=0.267, pruned_loss=0.06257, over 19743.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2988, pruned_loss=0.07337, over 3816016.98 frames. ], batch size: 46, lr: 5.91e-03, grad_scale: 8.0 +2023-04-02 02:54:48,412 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-02 02:54:50,740 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-02 02:55:11,213 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.983e+02 6.436e+02 8.255e+02 1.908e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 02:55:33,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 02:55:33,672 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 02:55:45,564 INFO [train.py:903] (2/4) Epoch 14, batch 4050, loss[loss=0.209, simple_loss=0.2937, pruned_loss=0.06219, over 19661.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2993, pruned_loss=0.07355, over 3807313.01 frames. ], batch size: 55, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:56:29,006 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:56:31,605 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 02:56:45,592 INFO [train.py:903] (2/4) Epoch 14, batch 4100, loss[loss=0.2147, simple_loss=0.2888, pruned_loss=0.07027, over 17729.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3006, pruned_loss=0.07437, over 3808608.17 frames. ], batch size: 39, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:13,887 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 5.414e+02 6.604e+02 8.302e+02 1.654e+03, threshold=1.321e+03, percent-clipped=7.0 +2023-04-02 02:57:21,833 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 02:57:34,180 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 02:57:45,529 INFO [train.py:903] (2/4) Epoch 14, batch 4150, loss[loss=0.2182, simple_loss=0.2763, pruned_loss=0.08005, over 19736.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2992, pruned_loss=0.07387, over 3824575.83 frames. ], batch size: 46, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:51,055 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:28,840 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92949.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:30,583 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 02:58:34,559 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7426, 1.5191, 1.5233, 2.2046, 1.7352, 1.9772, 1.9741, 1.8324], + device='cuda:2'), covar=tensor([0.0742, 0.0916, 0.0983, 0.0712, 0.0777, 0.0721, 0.0843, 0.0626], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0220, 0.0223, 0.0241, 0.0226, 0.0207, 0.0188, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 02:58:48,463 INFO [train.py:903] (2/4) Epoch 14, batch 4200, loss[loss=0.2206, simple_loss=0.2947, pruned_loss=0.07322, over 19530.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3001, pruned_loss=0.07449, over 3818716.04 frames. ], batch size: 54, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:58:51,713 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 02:59:15,433 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.424e+02 5.036e+02 6.286e+02 7.807e+02 1.684e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-02 02:59:15,610 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:22,680 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:48,127 INFO [train.py:903] (2/4) Epoch 14, batch 4250, loss[loss=0.2667, simple_loss=0.3268, pruned_loss=0.1033, over 12575.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3007, pruned_loss=0.0751, over 3791792.02 frames. ], batch size: 136, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:59:55,515 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6137, 2.1524, 2.3281, 2.8371, 2.5600, 2.4150, 2.2934, 2.7973], + device='cuda:2'), covar=tensor([0.0915, 0.1729, 0.1334, 0.0961, 0.1304, 0.0440, 0.1126, 0.0555], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0351, 0.0296, 0.0242, 0.0297, 0.0245, 0.0287, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:00:01,601 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 03:00:02,436 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6304, 1.7006, 1.9370, 2.1082, 1.4578, 1.9496, 2.0453, 1.8295], + device='cuda:2'), covar=tensor([0.3767, 0.3138, 0.1559, 0.1808, 0.3278, 0.1746, 0.4283, 0.2915], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0859, 0.0666, 0.0898, 0.0810, 0.0741, 0.0802, 0.0731], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 03:00:03,116 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 03:00:05,158 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:15,028 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 03:00:26,879 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 03:00:37,790 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:47,528 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-02 03:00:48,966 INFO [train.py:903] (2/4) Epoch 14, batch 4300, loss[loss=0.2325, simple_loss=0.3069, pruned_loss=0.07903, over 19765.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.301, pruned_loss=0.07488, over 3797287.69 frames. ], batch size: 54, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:01:12,686 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:18,347 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.279e+02 5.152e+02 6.308e+02 8.497e+02 2.668e+03, threshold=1.262e+03, percent-clipped=7.0 +2023-04-02 03:01:36,190 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:42,508 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 03:01:50,403 INFO [train.py:903] (2/4) Epoch 14, batch 4350, loss[loss=0.2318, simple_loss=0.3088, pruned_loss=0.07738, over 19526.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3007, pruned_loss=0.07451, over 3799245.90 frames. ], batch size: 54, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:02:14,529 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:02:52,477 INFO [train.py:903] (2/4) Epoch 14, batch 4400, loss[loss=0.2679, simple_loss=0.335, pruned_loss=0.1004, over 19346.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3007, pruned_loss=0.07461, over 3789372.87 frames. ], batch size: 66, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:03:02,485 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 03:03:15,286 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 03:03:18,719 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.134e+02 6.154e+02 7.916e+02 2.681e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 03:03:25,262 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 03:03:26,660 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:32,737 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93197.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:52,932 INFO [train.py:903] (2/4) Epoch 14, batch 4450, loss[loss=0.2488, simple_loss=0.3224, pruned_loss=0.08759, over 18310.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.301, pruned_loss=0.07517, over 3802351.02 frames. ], batch size: 83, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:04:49,432 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93261.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:04:52,746 INFO [train.py:903] (2/4) Epoch 14, batch 4500, loss[loss=0.2419, simple_loss=0.3156, pruned_loss=0.08416, over 19673.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3005, pruned_loss=0.07468, over 3808504.36 frames. ], batch size: 55, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:05:21,652 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 5.314e+02 6.344e+02 7.896e+02 1.749e+03, threshold=1.269e+03, percent-clipped=5.0 +2023-04-02 03:05:28,480 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:45,617 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93308.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:52,299 INFO [train.py:903] (2/4) Epoch 14, batch 4550, loss[loss=0.2118, simple_loss=0.2822, pruned_loss=0.0707, over 19396.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3009, pruned_loss=0.07499, over 3793194.46 frames. ], batch size: 48, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:06:06,165 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 03:06:18,795 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0413, 1.3090, 1.5958, 1.1208, 2.5668, 3.3376, 3.1103, 3.5506], + device='cuda:2'), covar=tensor([0.1652, 0.3440, 0.3222, 0.2291, 0.0500, 0.0182, 0.0208, 0.0199], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0299, 0.0328, 0.0252, 0.0220, 0.0164, 0.0205, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:06:21,907 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:27,652 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 03:06:46,931 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:55,975 INFO [train.py:903] (2/4) Epoch 14, batch 4600, loss[loss=0.1897, simple_loss=0.2677, pruned_loss=0.05588, over 19839.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3013, pruned_loss=0.07521, over 3791730.22 frames. ], batch size: 52, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:07:05,028 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:09,512 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:17,772 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:22,001 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.339e+02 6.388e+02 8.227e+02 2.509e+03, threshold=1.278e+03, percent-clipped=2.0 +2023-04-02 03:07:35,567 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:49,460 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:50,577 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5359, 1.1537, 1.3509, 1.2893, 2.1508, 1.0249, 2.0308, 2.4043], + device='cuda:2'), covar=tensor([0.0589, 0.2518, 0.2673, 0.1540, 0.0874, 0.1982, 0.0903, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0341, 0.0361, 0.0321, 0.0351, 0.0330, 0.0341, 0.0367], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:07:56,014 INFO [train.py:903] (2/4) Epoch 14, batch 4650, loss[loss=0.1959, simple_loss=0.2841, pruned_loss=0.05381, over 19530.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.301, pruned_loss=0.07476, over 3808900.68 frames. ], batch size: 56, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:08:12,032 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 03:08:23,193 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 03:08:43,074 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:44,295 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93453.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:56,302 INFO [train.py:903] (2/4) Epoch 14, batch 4700, loss[loss=0.2314, simple_loss=0.31, pruned_loss=0.07642, over 19593.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3007, pruned_loss=0.07484, over 3814455.83 frames. ], batch size: 61, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:09:06,638 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0454, 2.8083, 2.0261, 2.5071, 1.0727, 2.7052, 2.6044, 2.6957], + device='cuda:2'), covar=tensor([0.1208, 0.1358, 0.2019, 0.0989, 0.3294, 0.1023, 0.1078, 0.1228], + device='cuda:2'), in_proj_covar=tensor([0.0450, 0.0380, 0.0452, 0.0327, 0.0390, 0.0388, 0.0378, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:09:11,835 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:13,342 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:20,339 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 03:09:25,718 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 5.199e+02 6.337e+02 7.857e+02 1.524e+03, threshold=1.267e+03, percent-clipped=2.0 +2023-04-02 03:09:26,067 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:46,400 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5846, 1.6369, 2.0860, 1.7507, 3.1502, 2.4451, 3.2211, 1.7466], + device='cuda:2'), covar=tensor([0.2216, 0.3828, 0.2314, 0.1827, 0.1420, 0.1990, 0.1583, 0.3492], + device='cuda:2'), in_proj_covar=tensor([0.0501, 0.0592, 0.0638, 0.0453, 0.0602, 0.0504, 0.0648, 0.0507], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:09:55,142 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,940 INFO [train.py:903] (2/4) Epoch 14, batch 4750, loss[loss=0.216, simple_loss=0.3021, pruned_loss=0.06493, over 19615.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2995, pruned_loss=0.07358, over 3825781.99 frames. ], batch size: 57, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:55,742 INFO [train.py:903] (2/4) Epoch 14, batch 4800, loss[loss=0.2861, simple_loss=0.3548, pruned_loss=0.1088, over 18152.00 frames. ], tot_loss[loss=0.223, simple_loss=0.299, pruned_loss=0.0735, over 3824621.41 frames. ], batch size: 83, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:56,184 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:22,952 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.541e+02 6.642e+02 8.296e+02 2.320e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 03:11:25,731 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:29,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:57,055 INFO [train.py:903] (2/4) Epoch 14, batch 4850, loss[loss=0.2408, simple_loss=0.3159, pruned_loss=0.08289, over 19618.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2998, pruned_loss=0.07396, over 3811480.49 frames. ], batch size: 57, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:15,544 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7350, 1.5132, 1.5096, 2.2799, 1.8673, 2.2274, 2.1639, 1.8820], + device='cuda:2'), covar=tensor([0.0790, 0.0947, 0.1006, 0.0757, 0.0796, 0.0599, 0.0790, 0.0616], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0219, 0.0220, 0.0241, 0.0225, 0.0206, 0.0189, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 03:12:17,847 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:23,246 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 03:12:42,756 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 03:12:47,282 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 03:12:48,586 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 03:12:48,974 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93657.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:56,529 INFO [train.py:903] (2/4) Epoch 14, batch 4900, loss[loss=0.2386, simple_loss=0.3143, pruned_loss=0.0815, over 17553.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2996, pruned_loss=0.07353, over 3827056.06 frames. ], batch size: 101, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:56,927 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:57,698 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 03:13:18,091 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 03:13:25,555 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.842e+02 5.818e+02 7.123e+02 1.786e+03, threshold=1.164e+03, percent-clipped=2.0 +2023-04-02 03:13:28,147 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93689.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:35,386 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 03:13:49,613 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:55,988 INFO [train.py:903] (2/4) Epoch 14, batch 4950, loss[loss=0.2555, simple_loss=0.3185, pruned_loss=0.09629, over 19373.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2991, pruned_loss=0.07335, over 3824838.67 frames. ], batch size: 48, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:14:16,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 03:14:21,108 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:32,417 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:36,697 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 03:14:53,753 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 03:14:58,219 INFO [train.py:903] (2/4) Epoch 14, batch 5000, loss[loss=0.2739, simple_loss=0.3492, pruned_loss=0.0993, over 19564.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3001, pruned_loss=0.07411, over 3823218.91 frames. ], batch size: 56, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:15:04,143 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93768.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:05,332 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:07,263 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 03:15:17,370 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 03:15:25,227 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 5.347e+02 6.962e+02 9.103e+02 2.417e+03, threshold=1.392e+03, percent-clipped=9.0 +2023-04-02 03:15:26,654 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93788.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:33,821 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:59,313 INFO [train.py:903] (2/4) Epoch 14, batch 5050, loss[loss=0.2112, simple_loss=0.2971, pruned_loss=0.0627, over 18289.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3008, pruned_loss=0.07421, over 3826535.47 frames. ], batch size: 83, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:16:16,690 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 03:16:20,965 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8465, 1.9321, 2.0502, 2.4032, 1.8809, 2.3303, 2.1766, 1.9714], + device='cuda:2'), covar=tensor([0.2959, 0.2485, 0.1282, 0.1459, 0.2587, 0.1262, 0.2849, 0.2159], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0860, 0.0668, 0.0900, 0.0810, 0.0746, 0.0807, 0.0735], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 03:16:35,021 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 03:16:40,858 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:16:59,042 INFO [train.py:903] (2/4) Epoch 14, batch 5100, loss[loss=0.2649, simple_loss=0.3401, pruned_loss=0.0948, over 18811.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3004, pruned_loss=0.07424, over 3814305.51 frames. ], batch size: 74, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:17:09,128 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:17:09,884 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 03:17:13,173 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 03:17:16,709 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 03:17:26,262 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 4.904e+02 5.934e+02 7.645e+02 1.361e+03, threshold=1.187e+03, percent-clipped=0.0 +2023-04-02 03:17:56,877 INFO [train.py:903] (2/4) Epoch 14, batch 5150, loss[loss=0.2014, simple_loss=0.2792, pruned_loss=0.06174, over 19482.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3018, pruned_loss=0.07486, over 3807683.00 frames. ], batch size: 49, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:18:09,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 03:18:43,212 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 03:18:58,017 INFO [train.py:903] (2/4) Epoch 14, batch 5200, loss[loss=0.3036, simple_loss=0.3515, pruned_loss=0.1279, over 19675.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3019, pruned_loss=0.07503, over 3801156.16 frames. ], batch size: 59, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:19:13,852 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 03:19:25,470 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 5.268e+02 6.485e+02 8.631e+02 2.638e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 03:19:32,901 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 03:19:39,234 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:19:57,310 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 03:19:59,381 INFO [train.py:903] (2/4) Epoch 14, batch 5250, loss[loss=0.2332, simple_loss=0.3055, pruned_loss=0.08042, over 19493.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3009, pruned_loss=0.07452, over 3814510.27 frames. ], batch size: 49, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:20:02,058 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6154, 1.5475, 1.7161, 1.9414, 4.1231, 1.2412, 2.4115, 4.4181], + device='cuda:2'), covar=tensor([0.0366, 0.2700, 0.2695, 0.1595, 0.0715, 0.2541, 0.1445, 0.0203], + device='cuda:2'), in_proj_covar=tensor([0.0372, 0.0345, 0.0364, 0.0323, 0.0351, 0.0334, 0.0343, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:20:59,237 INFO [train.py:903] (2/4) Epoch 14, batch 5300, loss[loss=0.2771, simple_loss=0.3374, pruned_loss=0.1084, over 12975.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2999, pruned_loss=0.07438, over 3796662.19 frames. ], batch size: 136, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:21:16,452 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 03:21:27,962 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.389e+02 5.368e+02 7.020e+02 9.283e+02 2.840e+03, threshold=1.404e+03, percent-clipped=4.0 +2023-04-02 03:21:59,006 INFO [train.py:903] (2/4) Epoch 14, batch 5350, loss[loss=0.2105, simple_loss=0.2978, pruned_loss=0.06156, over 19697.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2995, pruned_loss=0.07388, over 3820677.48 frames. ], batch size: 59, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:22:23,142 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:22:34,755 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 03:22:59,600 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8717, 1.9638, 2.1730, 2.5079, 1.8062, 2.4886, 2.3287, 2.0940], + device='cuda:2'), covar=tensor([0.3768, 0.3233, 0.1572, 0.1972, 0.3605, 0.1627, 0.3868, 0.2746], + device='cuda:2'), in_proj_covar=tensor([0.0821, 0.0859, 0.0664, 0.0894, 0.0808, 0.0742, 0.0803, 0.0729], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 03:23:01,401 INFO [train.py:903] (2/4) Epoch 14, batch 5400, loss[loss=0.1821, simple_loss=0.2557, pruned_loss=0.0542, over 19407.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2989, pruned_loss=0.07384, over 3810428.34 frames. ], batch size: 48, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:23:29,188 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.537e+02 7.248e+02 8.700e+02 2.021e+03, threshold=1.450e+03, percent-clipped=3.0 +2023-04-02 03:24:03,212 INFO [train.py:903] (2/4) Epoch 14, batch 5450, loss[loss=0.1813, simple_loss=0.2541, pruned_loss=0.05423, over 19766.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2989, pruned_loss=0.07374, over 3805877.35 frames. ], batch size: 46, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:24:04,644 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5005, 4.0330, 4.2104, 4.1884, 1.6954, 3.9693, 3.4078, 3.8593], + device='cuda:2'), covar=tensor([0.1532, 0.0850, 0.0604, 0.0650, 0.5341, 0.0718, 0.0699, 0.1203], + device='cuda:2'), in_proj_covar=tensor([0.0713, 0.0642, 0.0855, 0.0727, 0.0760, 0.0594, 0.0511, 0.0784], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 03:24:33,968 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94241.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:24:43,537 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:25:02,870 INFO [train.py:903] (2/4) Epoch 14, batch 5500, loss[loss=0.2482, simple_loss=0.3246, pruned_loss=0.08591, over 19691.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2977, pruned_loss=0.07285, over 3820870.07 frames. ], batch size: 60, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:25:09,715 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9926, 2.8334, 1.8171, 1.9246, 1.6762, 2.1209, 0.9788, 2.0076], + device='cuda:2'), covar=tensor([0.0771, 0.0648, 0.0794, 0.1253, 0.1421, 0.1281, 0.1239, 0.1141], + device='cuda:2'), in_proj_covar=tensor([0.0345, 0.0342, 0.0337, 0.0364, 0.0438, 0.0363, 0.0317, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:25:25,702 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 03:25:30,872 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.805e+02 5.794e+02 7.462e+02 1.465e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 03:26:01,459 INFO [train.py:903] (2/4) Epoch 14, batch 5550, loss[loss=0.2274, simple_loss=0.3071, pruned_loss=0.07386, over 17377.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2991, pruned_loss=0.07329, over 3822499.82 frames. ], batch size: 101, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:26:08,354 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 03:26:37,721 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:26:57,893 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 03:27:01,443 INFO [train.py:903] (2/4) Epoch 14, batch 5600, loss[loss=0.2307, simple_loss=0.3102, pruned_loss=0.07562, over 19334.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2993, pruned_loss=0.07379, over 3829374.51 frames. ], batch size: 66, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:27:05,702 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:27:06,236 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 03:27:30,031 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.188e+02 6.005e+02 7.911e+02 1.925e+03, threshold=1.201e+03, percent-clipped=8.0 +2023-04-02 03:28:03,376 INFO [train.py:903] (2/4) Epoch 14, batch 5650, loss[loss=0.2295, simple_loss=0.301, pruned_loss=0.07898, over 19674.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2995, pruned_loss=0.07388, over 3817542.18 frames. ], batch size: 55, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:28:36,441 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.99 vs. limit=5.0 +2023-04-02 03:28:49,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 03:28:55,927 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:29:02,024 INFO [train.py:903] (2/4) Epoch 14, batch 5700, loss[loss=0.2447, simple_loss=0.3018, pruned_loss=0.09377, over 19750.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2994, pruned_loss=0.07424, over 3810818.05 frames. ], batch size: 47, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:29:19,510 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5854, 2.3548, 1.7376, 1.4479, 2.2347, 1.2016, 1.2307, 2.0094], + device='cuda:2'), covar=tensor([0.1058, 0.0641, 0.0975, 0.0863, 0.0506, 0.1258, 0.0831, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0301, 0.0324, 0.0245, 0.0235, 0.0324, 0.0290, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:29:29,830 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 4.949e+02 6.008e+02 7.817e+02 2.884e+03, threshold=1.202e+03, percent-clipped=11.0 +2023-04-02 03:29:50,193 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94503.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:30:02,384 INFO [train.py:903] (2/4) Epoch 14, batch 5750, loss[loss=0.2269, simple_loss=0.313, pruned_loss=0.07041, over 19615.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2998, pruned_loss=0.07399, over 3819451.50 frames. ], batch size: 57, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:30:04,714 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 03:30:11,533 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 03:30:17,744 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 03:30:21,250 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:31:04,686 INFO [train.py:903] (2/4) Epoch 14, batch 5800, loss[loss=0.2401, simple_loss=0.3193, pruned_loss=0.08041, over 19687.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2996, pruned_loss=0.07387, over 3804899.82 frames. ], batch size: 59, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:31:09,548 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4882, 1.6287, 1.8594, 1.7423, 2.7460, 2.3716, 2.9699, 1.2340], + device='cuda:2'), covar=tensor([0.2177, 0.3655, 0.2313, 0.1734, 0.1358, 0.1825, 0.1291, 0.3747], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0587, 0.0637, 0.0448, 0.0600, 0.0503, 0.0646, 0.0507], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:31:13,715 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3341, 3.9288, 2.3899, 3.5679, 0.9538, 3.7358, 3.7471, 3.7821], + device='cuda:2'), covar=tensor([0.0722, 0.1078, 0.2293, 0.0863, 0.4046, 0.0867, 0.0939, 0.1103], + device='cuda:2'), in_proj_covar=tensor([0.0455, 0.0380, 0.0456, 0.0327, 0.0393, 0.0387, 0.0380, 0.0411], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:31:30,534 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94585.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:31:32,537 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.430e+02 7.155e+02 9.192e+02 1.752e+03, threshold=1.431e+03, percent-clipped=10.0 +2023-04-02 03:31:44,321 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9595, 4.3763, 4.6482, 4.6279, 1.6781, 4.3344, 3.7631, 4.3278], + device='cuda:2'), covar=tensor([0.1473, 0.0788, 0.0538, 0.0561, 0.5663, 0.0631, 0.0581, 0.1067], + device='cuda:2'), in_proj_covar=tensor([0.0726, 0.0655, 0.0867, 0.0741, 0.0774, 0.0604, 0.0520, 0.0796], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 03:32:06,963 INFO [train.py:903] (2/4) Epoch 14, batch 5850, loss[loss=0.2108, simple_loss=0.2893, pruned_loss=0.06618, over 19577.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3001, pruned_loss=0.07372, over 3811086.56 frames. ], batch size: 52, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:33:06,744 INFO [train.py:903] (2/4) Epoch 14, batch 5900, loss[loss=0.2152, simple_loss=0.2815, pruned_loss=0.07442, over 19792.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3001, pruned_loss=0.07424, over 3810189.15 frames. ], batch size: 48, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:33:07,936 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 03:33:21,887 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 03:33:27,833 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 03:33:33,165 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.122e+02 5.971e+02 8.409e+02 2.018e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-02 03:33:50,043 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94700.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:34:01,332 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94710.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:05,821 INFO [train.py:903] (2/4) Epoch 14, batch 5950, loss[loss=0.2426, simple_loss=0.3005, pruned_loss=0.09232, over 19403.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3005, pruned_loss=0.07458, over 3815304.08 frames. ], batch size: 48, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:34:06,247 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94714.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:37,303 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94739.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:35:04,621 INFO [train.py:903] (2/4) Epoch 14, batch 6000, loss[loss=0.262, simple_loss=0.331, pruned_loss=0.09649, over 19346.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3011, pruned_loss=0.07488, over 3808658.15 frames. ], batch size: 70, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:35:04,622 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 03:35:17,175 INFO [train.py:937] (2/4) Epoch 14, validation: loss=0.1744, simple_loss=0.2748, pruned_loss=0.03705, over 944034.00 frames. +2023-04-02 03:35:17,176 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 03:35:33,069 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 03:35:38,762 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6089, 1.3690, 1.3903, 1.6277, 3.1662, 1.1748, 2.3144, 3.5420], + device='cuda:2'), covar=tensor([0.0461, 0.2733, 0.2844, 0.1708, 0.0717, 0.2392, 0.1149, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0372, 0.0346, 0.0366, 0.0325, 0.0354, 0.0335, 0.0344, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:35:47,198 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.018e+02 6.191e+02 7.483e+02 1.325e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-02 03:36:13,898 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 03:36:15,032 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 03:36:17,849 INFO [train.py:903] (2/4) Epoch 14, batch 6050, loss[loss=0.2361, simple_loss=0.3158, pruned_loss=0.07824, over 19358.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3013, pruned_loss=0.07512, over 3798112.50 frames. ], batch size: 70, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:36:33,207 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94825.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:36:54,380 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7831, 3.2440, 3.2754, 3.3130, 1.2945, 3.1261, 2.7430, 3.0406], + device='cuda:2'), covar=tensor([0.1557, 0.0963, 0.0790, 0.0826, 0.5141, 0.0868, 0.0784, 0.1281], + device='cuda:2'), in_proj_covar=tensor([0.0711, 0.0640, 0.0849, 0.0725, 0.0755, 0.0594, 0.0510, 0.0781], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 03:37:20,911 INFO [train.py:903] (2/4) Epoch 14, batch 6100, loss[loss=0.2415, simple_loss=0.3192, pruned_loss=0.0819, over 19641.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3018, pruned_loss=0.0755, over 3803322.20 frames. ], batch size: 60, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:37:48,987 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.260e+02 6.294e+02 8.137e+02 1.551e+03, threshold=1.259e+03, percent-clipped=3.0 +2023-04-02 03:38:00,759 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 03:38:21,674 INFO [train.py:903] (2/4) Epoch 14, batch 6150, loss[loss=0.2594, simple_loss=0.3323, pruned_loss=0.0933, over 19293.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3025, pruned_loss=0.0755, over 3778541.35 frames. ], batch size: 70, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:38:48,801 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 03:39:13,134 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94956.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:39:14,541 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-02 03:39:21,882 INFO [train.py:903] (2/4) Epoch 14, batch 6200, loss[loss=0.2372, simple_loss=0.3164, pruned_loss=0.07898, over 19111.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3001, pruned_loss=0.07387, over 3797457.29 frames. ], batch size: 69, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:39:44,483 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94981.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:39:51,886 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 5.470e+02 6.385e+02 8.085e+02 2.296e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 03:40:22,505 INFO [train.py:903] (2/4) Epoch 14, batch 6250, loss[loss=0.2378, simple_loss=0.315, pruned_loss=0.08029, over 18818.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.07402, over 3796018.86 frames. ], batch size: 74, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:40:55,023 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 03:41:24,164 INFO [train.py:903] (2/4) Epoch 14, batch 6300, loss[loss=0.2316, simple_loss=0.3179, pruned_loss=0.07267, over 19659.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.07404, over 3813597.94 frames. ], batch size: 55, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:41:44,543 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:41:51,887 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.537e+02 5.238e+02 6.215e+02 7.195e+02 1.642e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 03:42:15,058 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95106.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:42:23,459 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8918, 1.9930, 2.1431, 2.7474, 1.9447, 2.5817, 2.3527, 1.9545], + device='cuda:2'), covar=tensor([0.3914, 0.3433, 0.1659, 0.2014, 0.3675, 0.1747, 0.3919, 0.3023], + device='cuda:2'), in_proj_covar=tensor([0.0826, 0.0863, 0.0667, 0.0902, 0.0812, 0.0752, 0.0808, 0.0735], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 03:42:24,134 INFO [train.py:903] (2/4) Epoch 14, batch 6350, loss[loss=0.224, simple_loss=0.3051, pruned_loss=0.07149, over 19783.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3008, pruned_loss=0.07482, over 3814737.23 frames. ], batch size: 56, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:42:34,694 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95123.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:43:23,508 INFO [train.py:903] (2/4) Epoch 14, batch 6400, loss[loss=0.2062, simple_loss=0.2892, pruned_loss=0.06157, over 19610.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.301, pruned_loss=0.07464, over 3818859.37 frames. ], batch size: 61, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:43:52,813 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 5.689e+02 7.116e+02 8.755e+02 2.889e+03, threshold=1.423e+03, percent-clipped=3.0 +2023-04-02 03:44:23,636 INFO [train.py:903] (2/4) Epoch 14, batch 6450, loss[loss=0.2386, simple_loss=0.3169, pruned_loss=0.0802, over 18982.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3015, pruned_loss=0.07485, over 3820138.33 frames. ], batch size: 75, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:44:51,721 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7136, 1.4645, 1.5811, 1.8327, 3.2784, 1.1662, 2.3274, 3.7467], + device='cuda:2'), covar=tensor([0.0440, 0.2529, 0.2537, 0.1536, 0.0680, 0.2391, 0.1232, 0.0221], + device='cuda:2'), in_proj_covar=tensor([0.0374, 0.0348, 0.0366, 0.0328, 0.0355, 0.0336, 0.0346, 0.0371], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:45:09,459 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 03:45:25,847 INFO [train.py:903] (2/4) Epoch 14, batch 6500, loss[loss=0.2323, simple_loss=0.3108, pruned_loss=0.07694, over 19291.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2992, pruned_loss=0.07342, over 3820926.17 frames. ], batch size: 66, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:32,333 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 03:45:43,524 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:52,717 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:54,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.245e+02 6.559e+02 8.783e+02 2.152e+03, threshold=1.312e+03, percent-clipped=6.0 +2023-04-02 03:45:57,565 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-02 03:46:27,867 INFO [train.py:903] (2/4) Epoch 14, batch 6550, loss[loss=0.2194, simple_loss=0.3034, pruned_loss=0.06767, over 19505.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2986, pruned_loss=0.07274, over 3821843.45 frames. ], batch size: 64, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:47:20,339 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95357.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:47:27,252 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8022, 1.3149, 1.4148, 1.6383, 3.3584, 1.1510, 2.2133, 3.7527], + device='cuda:2'), covar=tensor([0.0452, 0.2774, 0.2967, 0.1728, 0.0734, 0.2528, 0.1392, 0.0240], + device='cuda:2'), in_proj_covar=tensor([0.0376, 0.0348, 0.0368, 0.0328, 0.0359, 0.0338, 0.0347, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:47:28,135 INFO [train.py:903] (2/4) Epoch 14, batch 6600, loss[loss=0.2141, simple_loss=0.3022, pruned_loss=0.06299, over 19782.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2993, pruned_loss=0.07319, over 3827429.60 frames. ], batch size: 56, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:47:57,393 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.401e+02 5.166e+02 6.061e+02 7.266e+02 1.890e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 03:48:17,690 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9438, 2.0463, 2.2383, 2.6845, 1.9169, 2.5988, 2.4445, 2.0613], + device='cuda:2'), covar=tensor([0.4215, 0.3838, 0.1805, 0.2236, 0.4226, 0.1902, 0.4311, 0.3271], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0865, 0.0665, 0.0901, 0.0813, 0.0748, 0.0808, 0.0733], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 03:48:28,438 INFO [train.py:903] (2/4) Epoch 14, batch 6650, loss[loss=0.2282, simple_loss=0.321, pruned_loss=0.0677, over 19680.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2999, pruned_loss=0.07359, over 3822775.38 frames. ], batch size: 58, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:49:29,375 INFO [train.py:903] (2/4) Epoch 14, batch 6700, loss[loss=0.2209, simple_loss=0.3046, pruned_loss=0.06858, over 19674.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3008, pruned_loss=0.07423, over 3822437.56 frames. ], batch size: 55, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:49:33,797 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95467.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:49:57,461 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.267e+02 5.901e+02 8.158e+02 1.902e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-02 03:50:25,745 INFO [train.py:903] (2/4) Epoch 14, batch 6750, loss[loss=0.2385, simple_loss=0.315, pruned_loss=0.08104, over 19644.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3019, pruned_loss=0.07516, over 3821735.92 frames. ], batch size: 55, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:21,228 INFO [train.py:903] (2/4) Epoch 14, batch 6800, loss[loss=0.2041, simple_loss=0.2852, pruned_loss=0.06149, over 19672.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3018, pruned_loss=0.07524, over 3830039.43 frames. ], batch size: 53, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:36,066 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2977, 2.8984, 2.2954, 2.3274, 2.0467, 2.5138, 1.0509, 2.0484], + device='cuda:2'), covar=tensor([0.0562, 0.0513, 0.0572, 0.0899, 0.1017, 0.0992, 0.1090, 0.0944], + device='cuda:2'), in_proj_covar=tensor([0.0347, 0.0343, 0.0336, 0.0366, 0.0439, 0.0365, 0.0318, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:51:41,549 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95582.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:51:46,735 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.198e+02 6.166e+02 8.008e+02 1.508e+03, threshold=1.233e+03, percent-clipped=6.0 +2023-04-02 03:52:06,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 03:52:06,881 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 03:52:10,201 INFO [train.py:903] (2/4) Epoch 15, batch 0, loss[loss=0.2096, simple_loss=0.2835, pruned_loss=0.06791, over 19595.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2835, pruned_loss=0.06791, over 19595.00 frames. ], batch size: 50, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:52:10,202 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 03:52:21,745 INFO [train.py:937] (2/4) Epoch 15, validation: loss=0.1744, simple_loss=0.2751, pruned_loss=0.03681, over 944034.00 frames. +2023-04-02 03:52:21,745 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 03:52:28,921 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2830, 1.3202, 1.4618, 1.4695, 2.2455, 2.0430, 2.2522, 0.8050], + device='cuda:2'), covar=tensor([0.2411, 0.4112, 0.2555, 0.1899, 0.1398, 0.2018, 0.1404, 0.4203], + device='cuda:2'), in_proj_covar=tensor([0.0503, 0.0592, 0.0642, 0.0452, 0.0602, 0.0507, 0.0650, 0.0510], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 03:52:33,143 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 03:52:58,945 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:08,242 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:14,679 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:22,148 INFO [train.py:903] (2/4) Epoch 15, batch 50, loss[loss=0.194, simple_loss=0.2653, pruned_loss=0.06136, over 19368.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3026, pruned_loss=0.07686, over 851528.46 frames. ], batch size: 47, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:53:25,967 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1014, 1.2434, 1.4606, 1.3669, 2.7110, 1.0330, 2.0848, 2.9813], + device='cuda:2'), covar=tensor([0.0537, 0.2729, 0.2708, 0.1768, 0.0769, 0.2275, 0.1102, 0.0344], + device='cuda:2'), in_proj_covar=tensor([0.0376, 0.0349, 0.0369, 0.0328, 0.0358, 0.0337, 0.0346, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:53:58,782 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 03:54:20,259 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 5.472e+02 6.461e+02 8.212e+02 1.912e+03, threshold=1.292e+03, percent-clipped=7.0 +2023-04-02 03:54:26,853 INFO [train.py:903] (2/4) Epoch 15, batch 100, loss[loss=0.2395, simple_loss=0.3213, pruned_loss=0.0789, over 18329.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2996, pruned_loss=0.07401, over 1508601.30 frames. ], batch size: 83, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:54:29,769 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1899, 1.8613, 1.7180, 2.1548, 1.9977, 1.8782, 1.6436, 2.0916], + device='cuda:2'), covar=tensor([0.0901, 0.1596, 0.1497, 0.1018, 0.1311, 0.0485, 0.1379, 0.0678], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0355, 0.0299, 0.0246, 0.0299, 0.0247, 0.0289, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:54:37,480 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 03:54:37,595 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95701.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:22,860 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:28,254 INFO [train.py:903] (2/4) Epoch 15, batch 150, loss[loss=0.2, simple_loss=0.2827, pruned_loss=0.0586, over 19848.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2992, pruned_loss=0.07406, over 2027510.11 frames. ], batch size: 52, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:55:32,092 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:56:23,976 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 5.351e+02 6.322e+02 7.452e+02 1.833e+03, threshold=1.264e+03, percent-clipped=1.0 +2023-04-02 03:56:27,292 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 03:56:28,493 INFO [train.py:903] (2/4) Epoch 15, batch 200, loss[loss=0.2749, simple_loss=0.3402, pruned_loss=0.1048, over 17124.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3005, pruned_loss=0.07496, over 2422042.59 frames. ], batch size: 101, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:56:51,125 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8776, 1.2158, 1.5250, 0.4813, 1.9340, 2.4342, 2.1187, 2.6187], + device='cuda:2'), covar=tensor([0.1521, 0.3426, 0.3009, 0.2463, 0.0586, 0.0261, 0.0308, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0302, 0.0328, 0.0252, 0.0223, 0.0166, 0.0205, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:56:59,705 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:57:15,625 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95830.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:24,910 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95838.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:29,640 INFO [train.py:903] (2/4) Epoch 15, batch 250, loss[loss=0.2861, simple_loss=0.3383, pruned_loss=0.117, over 13589.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3001, pruned_loss=0.07476, over 2717563.30 frames. ], batch size: 136, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:57:39,965 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7559, 4.3093, 2.5357, 3.7351, 1.1552, 4.0925, 4.0600, 4.1440], + device='cuda:2'), covar=tensor([0.0602, 0.0997, 0.2314, 0.0891, 0.4017, 0.0749, 0.0909, 0.1206], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0377, 0.0457, 0.0322, 0.0391, 0.0387, 0.0381, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 03:57:56,093 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95863.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:57:58,220 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:58:24,516 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.277e+02 6.948e+02 9.039e+02 3.101e+03, threshold=1.390e+03, percent-clipped=9.0 +2023-04-02 03:58:30,111 INFO [train.py:903] (2/4) Epoch 15, batch 300, loss[loss=0.215, simple_loss=0.2959, pruned_loss=0.06707, over 19736.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3015, pruned_loss=0.07484, over 2974829.61 frames. ], batch size: 63, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:11,953 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2679, 1.2507, 1.3873, 1.3883, 1.8041, 1.7705, 1.7191, 0.5363], + device='cuda:2'), covar=tensor([0.2001, 0.3621, 0.2188, 0.1654, 0.1316, 0.1908, 0.1217, 0.4027], + device='cuda:2'), in_proj_covar=tensor([0.0498, 0.0587, 0.0636, 0.0448, 0.0598, 0.0503, 0.0643, 0.0506], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 03:59:32,841 INFO [train.py:903] (2/4) Epoch 15, batch 350, loss[loss=0.2199, simple_loss=0.2811, pruned_loss=0.07932, over 19091.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.302, pruned_loss=0.07522, over 3147844.00 frames. ], batch size: 42, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:33,868 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 04:00:17,459 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:24,009 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0569, 1.1221, 1.5732, 0.8517, 2.2491, 3.0240, 2.7543, 3.2417], + device='cuda:2'), covar=tensor([0.1620, 0.3678, 0.3160, 0.2495, 0.0564, 0.0181, 0.0228, 0.0223], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0304, 0.0332, 0.0254, 0.0224, 0.0168, 0.0207, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:00:28,190 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.018e+02 5.906e+02 6.897e+02 1.495e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-04-02 04:00:32,778 INFO [train.py:903] (2/4) Epoch 15, batch 400, loss[loss=0.2514, simple_loss=0.3307, pruned_loss=0.08605, over 19605.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.07402, over 3313893.23 frames. ], batch size: 61, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 04:00:34,360 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:39,914 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95998.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:44,445 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:53,875 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9123, 1.1752, 1.6638, 1.1328, 2.5070, 3.4887, 3.2587, 3.7175], + device='cuda:2'), covar=tensor([0.1843, 0.3787, 0.3232, 0.2331, 0.0585, 0.0179, 0.0199, 0.0196], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0305, 0.0332, 0.0255, 0.0225, 0.0168, 0.0207, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:01:04,809 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:16,051 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:33,844 INFO [train.py:903] (2/4) Epoch 15, batch 450, loss[loss=0.255, simple_loss=0.3324, pruned_loss=0.08878, over 19322.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2999, pruned_loss=0.07371, over 3426987.99 frames. ], batch size: 66, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:07,791 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 04:02:07,832 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 04:02:12,897 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:31,243 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.837e+02 5.995e+02 7.453e+02 1.580e+03, threshold=1.199e+03, percent-clipped=6.0 +2023-04-02 04:02:36,671 INFO [train.py:903] (2/4) Epoch 15, batch 500, loss[loss=0.218, simple_loss=0.301, pruned_loss=0.06752, over 19457.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2992, pruned_loss=0.07326, over 3502646.42 frames. ], batch size: 64, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:39,347 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:43,800 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:44,837 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:03:38,924 INFO [train.py:903] (2/4) Epoch 15, batch 550, loss[loss=0.2614, simple_loss=0.3348, pruned_loss=0.09401, over 19597.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2987, pruned_loss=0.07352, over 3563828.34 frames. ], batch size: 61, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:04:18,079 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96174.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:04:29,224 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:04:35,630 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.342e+02 6.491e+02 8.104e+02 1.503e+03, threshold=1.298e+03, percent-clipped=3.0 +2023-04-02 04:04:40,041 INFO [train.py:903] (2/4) Epoch 15, batch 600, loss[loss=0.2285, simple_loss=0.3129, pruned_loss=0.07205, over 19607.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.299, pruned_loss=0.07395, over 3622391.18 frames. ], batch size: 57, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:05:00,821 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:05:20,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 04:05:35,570 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3046, 2.4069, 2.5124, 3.2451, 2.3305, 3.1533, 2.7597, 2.3287], + device='cuda:2'), covar=tensor([0.3834, 0.3293, 0.1477, 0.2053, 0.3786, 0.1621, 0.3716, 0.2769], + device='cuda:2'), in_proj_covar=tensor([0.0828, 0.0867, 0.0667, 0.0899, 0.0814, 0.0747, 0.0806, 0.0731], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 04:05:43,313 INFO [train.py:903] (2/4) Epoch 15, batch 650, loss[loss=0.1968, simple_loss=0.2719, pruned_loss=0.06082, over 19423.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2982, pruned_loss=0.07377, over 3667109.46 frames. ], batch size: 48, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:06:41,557 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.105e+02 6.385e+02 8.770e+02 1.706e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 04:06:42,958 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96289.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:06:46,048 INFO [train.py:903] (2/4) Epoch 15, batch 700, loss[loss=0.2322, simple_loss=0.2975, pruned_loss=0.08345, over 19767.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2979, pruned_loss=0.07379, over 3708710.45 frames. ], batch size: 47, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:12,181 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.47 vs. limit=5.0 +2023-04-02 04:07:19,746 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7148, 1.8101, 2.0180, 2.4035, 1.6688, 2.2653, 2.1691, 1.8668], + device='cuda:2'), covar=tensor([0.3795, 0.3393, 0.1656, 0.1911, 0.3573, 0.1705, 0.4105, 0.3043], + device='cuda:2'), in_proj_covar=tensor([0.0830, 0.0868, 0.0668, 0.0902, 0.0816, 0.0749, 0.0807, 0.0732], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 04:07:26,414 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,392 INFO [train.py:903] (2/4) Epoch 15, batch 750, loss[loss=0.2089, simple_loss=0.2923, pruned_loss=0.06279, over 19683.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2986, pruned_loss=0.07361, over 3739888.24 frames. ], batch size: 60, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:47,545 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,657 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:57,749 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:13,626 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 04:08:29,483 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:44,439 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.276e+02 6.207e+02 7.536e+02 1.572e+03, threshold=1.241e+03, percent-clipped=2.0 +2023-04-02 04:08:49,801 INFO [train.py:903] (2/4) Epoch 15, batch 800, loss[loss=0.2272, simple_loss=0.3013, pruned_loss=0.07657, over 17433.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2992, pruned_loss=0.07362, over 3746467.98 frames. ], batch size: 101, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:09:04,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 04:09:09,594 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:09:17,617 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5204, 4.0266, 4.1910, 4.1837, 1.4974, 3.9676, 3.4342, 3.8899], + device='cuda:2'), covar=tensor([0.1523, 0.0841, 0.0577, 0.0625, 0.5796, 0.0724, 0.0636, 0.1082], + device='cuda:2'), in_proj_covar=tensor([0.0728, 0.0651, 0.0866, 0.0743, 0.0769, 0.0603, 0.0521, 0.0795], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 04:09:50,616 INFO [train.py:903] (2/4) Epoch 15, batch 850, loss[loss=0.1998, simple_loss=0.2818, pruned_loss=0.05893, over 19667.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3004, pruned_loss=0.07433, over 3749271.07 frames. ], batch size: 53, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:09:51,934 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:10,291 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:41,191 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 04:10:47,718 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.238e+02 6.465e+02 7.879e+02 1.664e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 04:10:52,495 INFO [train.py:903] (2/4) Epoch 15, batch 900, loss[loss=0.2989, simple_loss=0.3537, pruned_loss=0.1221, over 13481.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3007, pruned_loss=0.07466, over 3762521.58 frames. ], batch size: 136, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:36,541 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:11:55,052 INFO [train.py:903] (2/4) Epoch 15, batch 950, loss[loss=0.2072, simple_loss=0.3013, pruned_loss=0.0565, over 19672.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2984, pruned_loss=0.07306, over 3785432.49 frames. ], batch size: 58, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:56,226 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 04:11:59,947 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96545.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:12:06,712 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0422, 1.9676, 1.7361, 1.6344, 1.5115, 1.6159, 0.3394, 0.8371], + device='cuda:2'), covar=tensor([0.0453, 0.0470, 0.0322, 0.0467, 0.0957, 0.0576, 0.0970, 0.0825], + device='cuda:2'), in_proj_covar=tensor([0.0344, 0.0342, 0.0336, 0.0365, 0.0440, 0.0365, 0.0320, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:12:14,157 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:30,405 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96570.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:12:41,734 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:52,446 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.225e+02 6.143e+02 7.839e+02 1.754e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-02 04:12:57,217 INFO [train.py:903] (2/4) Epoch 15, batch 1000, loss[loss=0.2162, simple_loss=0.2819, pruned_loss=0.07521, over 19774.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2986, pruned_loss=0.07317, over 3801220.05 frames. ], batch size: 48, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:13,538 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:13:51,326 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 04:13:59,278 INFO [train.py:903] (2/4) Epoch 15, batch 1050, loss[loss=0.211, simple_loss=0.2948, pruned_loss=0.06358, over 19646.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.298, pruned_loss=0.07267, over 3809712.13 frames. ], batch size: 58, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:59,660 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:28,094 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.9381, 5.3594, 2.8283, 4.5819, 1.1111, 5.3919, 5.2963, 5.4299], + device='cuda:2'), covar=tensor([0.0406, 0.0907, 0.2085, 0.0647, 0.3895, 0.0514, 0.0635, 0.0856], + device='cuda:2'), in_proj_covar=tensor([0.0453, 0.0380, 0.0456, 0.0323, 0.0389, 0.0388, 0.0381, 0.0413], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 04:14:31,318 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 04:14:53,804 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:57,022 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.831e+02 5.170e+02 6.505e+02 8.376e+02 1.590e+03, threshold=1.301e+03, percent-clipped=4.0 +2023-04-02 04:15:01,344 INFO [train.py:903] (2/4) Epoch 15, batch 1100, loss[loss=0.1908, simple_loss=0.2737, pruned_loss=0.05391, over 19721.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2975, pruned_loss=0.07258, over 3811906.16 frames. ], batch size: 51, lr: 5.60e-03, grad_scale: 4.0 +2023-04-02 04:15:28,129 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:15:58,791 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:16:02,837 INFO [train.py:903] (2/4) Epoch 15, batch 1150, loss[loss=0.2493, simple_loss=0.3205, pruned_loss=0.08903, over 19526.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2971, pruned_loss=0.07229, over 3821683.41 frames. ], batch size: 54, lr: 5.59e-03, grad_scale: 4.0 +2023-04-02 04:16:08,676 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8021, 4.0858, 4.5728, 4.6161, 1.7200, 4.2988, 3.6175, 3.9624], + device='cuda:2'), covar=tensor([0.2269, 0.1362, 0.0938, 0.1100, 0.7007, 0.1304, 0.1075, 0.2054], + device='cuda:2'), in_proj_covar=tensor([0.0726, 0.0656, 0.0861, 0.0738, 0.0773, 0.0604, 0.0520, 0.0795], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 04:16:16,222 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:16:57,089 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-02 04:17:00,106 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 04:17:01,611 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.122e+02 6.460e+02 8.216e+02 1.619e+03, threshold=1.292e+03, percent-clipped=5.0 +2023-04-02 04:17:06,204 INFO [train.py:903] (2/4) Epoch 15, batch 1200, loss[loss=0.2459, simple_loss=0.3179, pruned_loss=0.08692, over 19681.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2983, pruned_loss=0.07297, over 3818243.36 frames. ], batch size: 60, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:17:08,679 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:17,127 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:32,653 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:39,085 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 04:18:03,087 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:18:08,013 INFO [train.py:903] (2/4) Epoch 15, batch 1250, loss[loss=0.2483, simple_loss=0.3189, pruned_loss=0.08885, over 19730.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2984, pruned_loss=0.07329, over 3815457.78 frames. ], batch size: 63, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:18:38,336 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:05,686 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 5.442e+02 6.897e+02 8.528e+02 1.967e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 04:19:09,079 INFO [train.py:903] (2/4) Epoch 15, batch 1300, loss[loss=0.2142, simple_loss=0.2956, pruned_loss=0.06639, over 19600.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3, pruned_loss=0.074, over 3819135.16 frames. ], batch size: 57, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:19:17,709 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:48,595 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:20:12,488 INFO [train.py:903] (2/4) Epoch 15, batch 1350, loss[loss=0.1955, simple_loss=0.2704, pruned_loss=0.06029, over 19588.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2994, pruned_loss=0.07406, over 3822075.82 frames. ], batch size: 52, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:11,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.520e+02 5.515e+02 7.272e+02 1.782e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-04-02 04:21:15,868 INFO [train.py:903] (2/4) Epoch 15, batch 1400, loss[loss=0.2206, simple_loss=0.2816, pruned_loss=0.07986, over 19750.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2983, pruned_loss=0.07343, over 3822931.15 frames. ], batch size: 46, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:49,721 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:22:19,075 INFO [train.py:903] (2/4) Epoch 15, batch 1450, loss[loss=0.2171, simple_loss=0.3102, pruned_loss=0.06203, over 19301.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2988, pruned_loss=0.07334, over 3830414.01 frames. ], batch size: 66, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:22:20,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 04:22:38,955 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:09,856 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:18,578 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.198e+02 6.417e+02 9.294e+02 1.968e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 04:23:21,935 INFO [train.py:903] (2/4) Epoch 15, batch 1500, loss[loss=0.1859, simple_loss=0.2648, pruned_loss=0.05345, over 19738.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2991, pruned_loss=0.07384, over 3805991.12 frames. ], batch size: 51, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:23:32,793 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1387, 4.1721, 4.7470, 4.7604, 2.6912, 4.4253, 4.0691, 4.4467], + device='cuda:2'), covar=tensor([0.1174, 0.3225, 0.0559, 0.0522, 0.4164, 0.0818, 0.0519, 0.0998], + device='cuda:2'), in_proj_covar=tensor([0.0714, 0.0644, 0.0853, 0.0731, 0.0763, 0.0597, 0.0512, 0.0786], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 04:24:01,285 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:20,466 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:24,794 INFO [train.py:903] (2/4) Epoch 15, batch 1550, loss[loss=0.2503, simple_loss=0.3144, pruned_loss=0.0931, over 19591.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2995, pruned_loss=0.07405, over 3802906.45 frames. ], batch size: 52, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:24:31,910 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:25:16,320 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 04:25:22,061 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-02 04:25:22,553 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.223e+02 6.490e+02 8.468e+02 1.572e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-02 04:25:26,787 INFO [train.py:903] (2/4) Epoch 15, batch 1600, loss[loss=0.2368, simple_loss=0.3138, pruned_loss=0.07991, over 18070.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3009, pruned_loss=0.07453, over 3810140.21 frames. ], batch size: 83, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:25:31,638 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3057, 1.3225, 1.5215, 1.4616, 2.1449, 2.0348, 2.2275, 0.7985], + device='cuda:2'), covar=tensor([0.2287, 0.4078, 0.2541, 0.1941, 0.1527, 0.2027, 0.1408, 0.4084], + device='cuda:2'), in_proj_covar=tensor([0.0507, 0.0592, 0.0644, 0.0451, 0.0604, 0.0506, 0.0648, 0.0509], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:25:40,870 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2688, 2.3631, 2.5670, 3.1562, 2.2548, 3.0347, 2.8637, 2.4344], + device='cuda:2'), covar=tensor([0.3990, 0.3633, 0.1519, 0.2169, 0.4125, 0.1772, 0.3691, 0.2768], + device='cuda:2'), in_proj_covar=tensor([0.0838, 0.0874, 0.0676, 0.0908, 0.0822, 0.0756, 0.0810, 0.0740], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 04:25:51,599 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 04:26:06,971 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 04:26:28,830 INFO [train.py:903] (2/4) Epoch 15, batch 1650, loss[loss=0.2057, simple_loss=0.2892, pruned_loss=0.06114, over 19382.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3001, pruned_loss=0.07369, over 3829608.76 frames. ], batch size: 48, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:26:42,613 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:03,897 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1908, 1.2822, 1.2442, 1.0811, 1.0913, 1.1098, 0.1076, 0.4541], + device='cuda:2'), covar=tensor([0.0561, 0.0526, 0.0345, 0.0408, 0.1073, 0.0468, 0.1014, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0347, 0.0344, 0.0373, 0.0447, 0.0374, 0.0325, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:27:14,805 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:22,948 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8209, 3.9703, 4.3743, 4.3995, 2.5204, 4.0835, 3.7399, 4.1115], + device='cuda:2'), covar=tensor([0.1227, 0.2528, 0.0572, 0.0563, 0.4203, 0.0982, 0.0559, 0.0918], + device='cuda:2'), in_proj_covar=tensor([0.0717, 0.0647, 0.0855, 0.0735, 0.0763, 0.0599, 0.0516, 0.0788], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 04:27:27,488 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.574e+02 5.168e+02 6.563e+02 8.938e+02 1.305e+03, threshold=1.313e+03, percent-clipped=1.0 +2023-04-02 04:27:30,914 INFO [train.py:903] (2/4) Epoch 15, batch 1700, loss[loss=0.2557, simple_loss=0.3234, pruned_loss=0.09402, over 19350.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3005, pruned_loss=0.07417, over 3819666.18 frames. ], batch size: 66, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:27:39,845 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3785, 1.5105, 1.7442, 1.6345, 2.5481, 2.1925, 2.5870, 0.9550], + device='cuda:2'), covar=tensor([0.2488, 0.4126, 0.2630, 0.1948, 0.1371, 0.2119, 0.1331, 0.4231], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0590, 0.0641, 0.0451, 0.0601, 0.0505, 0.0645, 0.0507], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:28:11,283 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 04:28:32,701 INFO [train.py:903] (2/4) Epoch 15, batch 1750, loss[loss=0.2113, simple_loss=0.2953, pruned_loss=0.06361, over 19669.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2988, pruned_loss=0.07288, over 3816881.13 frames. ], batch size: 55, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:28:55,635 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:28:57,859 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:01,626 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:30,673 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.096e+02 6.393e+02 8.037e+02 1.428e+03, threshold=1.279e+03, percent-clipped=5.0 +2023-04-02 04:29:33,909 INFO [train.py:903] (2/4) Epoch 15, batch 1800, loss[loss=0.2257, simple_loss=0.3042, pruned_loss=0.07362, over 19611.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2994, pruned_loss=0.07355, over 3809720.11 frames. ], batch size: 57, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:29:34,222 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:30:32,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 04:30:36,842 INFO [train.py:903] (2/4) Epoch 15, batch 1850, loss[loss=0.2425, simple_loss=0.3212, pruned_loss=0.08195, over 18765.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2979, pruned_loss=0.0728, over 3801377.60 frames. ], batch size: 74, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:10,813 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 04:31:21,359 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:31:21,397 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8845, 2.2155, 2.1191, 2.7555, 2.4550, 2.3415, 2.1584, 2.8656], + device='cuda:2'), covar=tensor([0.0768, 0.1597, 0.1350, 0.0900, 0.1227, 0.0447, 0.1204, 0.0573], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0350, 0.0297, 0.0243, 0.0294, 0.0245, 0.0287, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 04:31:35,889 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.535e+02 5.674e+02 7.772e+02 1.771e+03, threshold=1.135e+03, percent-clipped=3.0 +2023-04-02 04:31:39,239 INFO [train.py:903] (2/4) Epoch 15, batch 1900, loss[loss=0.2457, simple_loss=0.3279, pruned_loss=0.08174, over 19410.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2978, pruned_loss=0.07214, over 3818102.94 frames. ], batch size: 70, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:46,070 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 04:31:58,015 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 04:32:00,968 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97509.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:02,873 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 04:32:28,318 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 04:32:32,270 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:40,697 INFO [train.py:903] (2/4) Epoch 15, batch 1950, loss[loss=0.2433, simple_loss=0.3188, pruned_loss=0.0839, over 19780.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2967, pruned_loss=0.07133, over 3819118.63 frames. ], batch size: 56, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:33:19,864 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:33:28,685 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 04:33:39,668 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 4.980e+02 6.456e+02 8.636e+02 2.349e+03, threshold=1.291e+03, percent-clipped=8.0 +2023-04-02 04:33:43,285 INFO [train.py:903] (2/4) Epoch 15, batch 2000, loss[loss=0.2195, simple_loss=0.3057, pruned_loss=0.06664, over 19718.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2978, pruned_loss=0.07206, over 3824126.39 frames. ], batch size: 59, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:34:03,649 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-02 04:34:20,866 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:34:42,340 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 04:34:47,029 INFO [train.py:903] (2/4) Epoch 15, batch 2050, loss[loss=0.2157, simple_loss=0.3065, pruned_loss=0.06249, over 19639.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2986, pruned_loss=0.07256, over 3801397.10 frames. ], batch size: 57, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:01,881 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 04:35:03,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 04:35:23,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 04:35:47,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.998e+02 6.604e+02 7.928e+02 1.987e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 04:35:50,612 INFO [train.py:903] (2/4) Epoch 15, batch 2100, loss[loss=0.1857, simple_loss=0.2736, pruned_loss=0.04888, over 19523.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2972, pruned_loss=0.07179, over 3818521.64 frames. ], batch size: 54, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:50,924 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97692.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:36:04,938 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:10,965 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 04:36:11,609 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:20,756 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 04:36:41,936 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9138, 2.4355, 2.5193, 2.7596, 2.5971, 2.5001, 2.1139, 3.0337], + device='cuda:2'), covar=tensor([0.0726, 0.1575, 0.1159, 0.1010, 0.1257, 0.0420, 0.1273, 0.0525], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0353, 0.0298, 0.0245, 0.0296, 0.0247, 0.0290, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 04:36:41,976 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:43,975 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 04:36:45,264 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:46,635 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:51,856 INFO [train.py:903] (2/4) Epoch 15, batch 2150, loss[loss=0.2039, simple_loss=0.294, pruned_loss=0.05691, over 19794.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2967, pruned_loss=0.07179, over 3836097.73 frames. ], batch size: 56, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:36:53,797 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 04:37:12,264 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:37:49,743 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.862e+02 6.186e+02 7.185e+02 1.323e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 04:37:54,004 INFO [train.py:903] (2/4) Epoch 15, batch 2200, loss[loss=0.2879, simple_loss=0.3443, pruned_loss=0.1158, over 14233.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2973, pruned_loss=0.07201, over 3827234.92 frames. ], batch size: 136, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:38:28,061 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:33,727 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:36,555 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 04:38:57,680 INFO [train.py:903] (2/4) Epoch 15, batch 2250, loss[loss=0.3027, simple_loss=0.3583, pruned_loss=0.1236, over 19650.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2966, pruned_loss=0.07173, over 3837642.00 frames. ], batch size: 60, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:39:09,193 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:39:09,870 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.19 vs. limit=5.0 +2023-04-02 04:39:56,867 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.977e+02 6.292e+02 8.077e+02 1.831e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 04:40:00,318 INFO [train.py:903] (2/4) Epoch 15, batch 2300, loss[loss=0.2053, simple_loss=0.2724, pruned_loss=0.06906, over 19750.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2968, pruned_loss=0.07188, over 3820939.95 frames. ], batch size: 46, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:40:12,672 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 04:40:16,302 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:40:21,311 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-02 04:40:29,878 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:41:01,538 INFO [train.py:903] (2/4) Epoch 15, batch 2350, loss[loss=0.1984, simple_loss=0.2673, pruned_loss=0.06478, over 19291.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2964, pruned_loss=0.07151, over 3818188.45 frames. ], batch size: 44, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:41:11,491 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 04:41:44,924 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 04:41:58,279 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.416e+02 6.244e+02 7.823e+02 1.587e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-04-02 04:41:58,343 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 04:42:02,754 INFO [train.py:903] (2/4) Epoch 15, batch 2400, loss[loss=0.2273, simple_loss=0.3066, pruned_loss=0.07401, over 19500.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2962, pruned_loss=0.07119, over 3824276.07 frames. ], batch size: 64, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:42:04,434 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:36,264 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:53,735 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:58,104 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98036.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:43:05,613 INFO [train.py:903] (2/4) Epoch 15, batch 2450, loss[loss=0.2042, simple_loss=0.291, pruned_loss=0.05874, over 19300.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.297, pruned_loss=0.07179, over 3824504.45 frames. ], batch size: 66, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:43:47,619 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6548, 1.5390, 1.5175, 2.0844, 1.6107, 2.1099, 2.0149, 1.8606], + device='cuda:2'), covar=tensor([0.0838, 0.0932, 0.0980, 0.0778, 0.0872, 0.0647, 0.0837, 0.0617], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0220, 0.0222, 0.0241, 0.0226, 0.0206, 0.0190, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 04:43:47,678 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98075.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:43:55,098 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:05,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.305e+02 6.204e+02 8.091e+02 1.870e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 04:44:09,725 INFO [train.py:903] (2/4) Epoch 15, batch 2500, loss[loss=0.1672, simple_loss=0.2444, pruned_loss=0.04502, over 19785.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2969, pruned_loss=0.07172, over 3816996.18 frames. ], batch size: 48, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:44:19,662 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98100.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:25,542 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98105.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:28,003 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:51,232 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98124.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:00,674 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:01,673 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 04:45:02,554 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9858, 4.3696, 4.7480, 4.6842, 1.6432, 4.4394, 3.7712, 4.3918], + device='cuda:2'), covar=tensor([0.1581, 0.0809, 0.0555, 0.0599, 0.5696, 0.0726, 0.0693, 0.1042], + device='cuda:2'), in_proj_covar=tensor([0.0724, 0.0654, 0.0862, 0.0742, 0.0770, 0.0608, 0.0520, 0.0787], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 04:45:12,196 INFO [train.py:903] (2/4) Epoch 15, batch 2550, loss[loss=0.2503, simple_loss=0.3363, pruned_loss=0.08216, over 18275.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2966, pruned_loss=0.0718, over 3817580.81 frames. ], batch size: 83, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:45:23,219 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98151.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:46:07,151 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 04:46:10,535 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 5.247e+02 6.339e+02 8.638e+02 2.352e+03, threshold=1.268e+03, percent-clipped=5.0 +2023-04-02 04:46:14,034 INFO [train.py:903] (2/4) Epoch 15, batch 2600, loss[loss=0.2178, simple_loss=0.3015, pruned_loss=0.06701, over 19680.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2974, pruned_loss=0.07186, over 3821469.36 frames. ], batch size: 58, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:18,025 INFO [train.py:903] (2/4) Epoch 15, batch 2650, loss[loss=0.2019, simple_loss=0.2801, pruned_loss=0.06183, over 19763.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2963, pruned_loss=0.07152, over 3818211.67 frames. ], batch size: 47, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:28,828 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:47:39,877 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 04:48:17,329 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:48:18,016 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.990e+02 6.118e+02 7.575e+02 1.335e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 04:48:21,601 INFO [train.py:903] (2/4) Epoch 15, batch 2700, loss[loss=0.2271, simple_loss=0.3007, pruned_loss=0.07679, over 19545.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2974, pruned_loss=0.07217, over 3824475.71 frames. ], batch size: 54, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:48:40,301 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3756, 1.3256, 1.3003, 1.7430, 1.4039, 1.6311, 1.7824, 1.4739], + device='cuda:2'), covar=tensor([0.0898, 0.0980, 0.1113, 0.0771, 0.0802, 0.0818, 0.0787, 0.0766], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0221, 0.0224, 0.0242, 0.0229, 0.0208, 0.0190, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 04:48:47,473 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:49:24,163 INFO [train.py:903] (2/4) Epoch 15, batch 2750, loss[loss=0.2655, simple_loss=0.3249, pruned_loss=0.1031, over 13582.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2979, pruned_loss=0.0726, over 3818182.02 frames. ], batch size: 136, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:49:54,772 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:50:04,428 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0880, 3.2344, 1.7678, 1.5336, 2.8988, 1.3757, 1.3630, 2.0736], + device='cuda:2'), covar=tensor([0.1261, 0.0580, 0.1088, 0.0982, 0.0576, 0.1321, 0.0973, 0.0677], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0304, 0.0322, 0.0248, 0.0238, 0.0325, 0.0290, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 04:50:23,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.173e+02 6.107e+02 7.991e+02 1.431e+03, threshold=1.221e+03, percent-clipped=3.0 +2023-04-02 04:50:27,286 INFO [train.py:903] (2/4) Epoch 15, batch 2800, loss[loss=0.2132, simple_loss=0.2997, pruned_loss=0.06334, over 17915.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2976, pruned_loss=0.07196, over 3825683.80 frames. ], batch size: 83, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:50:48,872 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98407.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:51:13,900 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:51:18,584 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98432.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:51:30,816 INFO [train.py:903] (2/4) Epoch 15, batch 2850, loss[loss=0.243, simple_loss=0.3198, pruned_loss=0.08307, over 17045.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2969, pruned_loss=0.07152, over 3831891.03 frames. ], batch size: 101, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:52:03,923 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:52:27,592 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8282, 1.8834, 2.1282, 2.4363, 1.7277, 2.3308, 2.2715, 1.9932], + device='cuda:2'), covar=tensor([0.3406, 0.3073, 0.1505, 0.1797, 0.3329, 0.1634, 0.3759, 0.2679], + device='cuda:2'), in_proj_covar=tensor([0.0828, 0.0874, 0.0670, 0.0900, 0.0814, 0.0748, 0.0807, 0.0736], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 04:52:28,798 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2067, 1.1999, 1.4337, 1.3599, 1.7784, 1.7578, 1.7517, 0.5284], + device='cuda:2'), covar=tensor([0.2381, 0.4094, 0.2427, 0.1845, 0.1477, 0.2189, 0.1355, 0.4105], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0594, 0.0645, 0.0451, 0.0604, 0.0508, 0.0648, 0.0508], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:52:30,661 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 5.164e+02 6.295e+02 8.927e+02 2.262e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 04:52:34,168 INFO [train.py:903] (2/4) Epoch 15, batch 2900, loss[loss=0.2096, simple_loss=0.2887, pruned_loss=0.06521, over 19336.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2975, pruned_loss=0.0719, over 3825779.54 frames. ], batch size: 66, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:52:35,440 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 04:53:36,707 INFO [train.py:903] (2/4) Epoch 15, batch 2950, loss[loss=0.1975, simple_loss=0.2794, pruned_loss=0.05779, over 19769.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2968, pruned_loss=0.07173, over 3803172.54 frames. ], batch size: 56, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:00,604 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:29,194 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:35,382 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.043e+02 6.244e+02 8.249e+02 2.456e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 04:54:38,824 INFO [train.py:903] (2/4) Epoch 15, batch 3000, loss[loss=0.227, simple_loss=0.3046, pruned_loss=0.07468, over 19759.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2978, pruned_loss=0.07272, over 3793382.75 frames. ], batch size: 54, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:38,824 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 04:54:51,354 INFO [train.py:937] (2/4) Epoch 15, validation: loss=0.1735, simple_loss=0.2738, pruned_loss=0.0366, over 944034.00 frames. +2023-04-02 04:54:51,355 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 04:54:53,546 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 04:55:08,287 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0959, 1.7790, 1.3888, 1.1322, 1.5877, 1.0972, 1.0522, 1.5524], + device='cuda:2'), covar=tensor([0.0776, 0.0802, 0.0968, 0.0746, 0.0494, 0.1230, 0.0661, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0292, 0.0305, 0.0325, 0.0249, 0.0238, 0.0329, 0.0292, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 04:55:28,689 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:55:52,976 INFO [train.py:903] (2/4) Epoch 15, batch 3050, loss[loss=0.2065, simple_loss=0.2936, pruned_loss=0.05971, over 19674.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2981, pruned_loss=0.07292, over 3776585.57 frames. ], batch size: 55, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:55:57,907 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:56:51,966 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.607e+02 7.054e+02 9.171e+02 2.046e+03, threshold=1.411e+03, percent-clipped=6.0 +2023-04-02 04:56:54,315 INFO [train.py:903] (2/4) Epoch 15, batch 3100, loss[loss=0.2385, simple_loss=0.3189, pruned_loss=0.07908, over 19572.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07218, over 3796681.64 frames. ], batch size: 52, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:57:58,290 INFO [train.py:903] (2/4) Epoch 15, batch 3150, loss[loss=0.2105, simple_loss=0.2954, pruned_loss=0.06286, over 19647.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07228, over 3810985.68 frames. ], batch size: 58, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:58:26,303 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 04:58:31,704 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 04:58:34,633 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:58:39,669 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3480, 3.0815, 2.2187, 2.2228, 2.0947, 2.5810, 1.0872, 2.1550], + device='cuda:2'), covar=tensor([0.0570, 0.0490, 0.0660, 0.1014, 0.1044, 0.1083, 0.1198, 0.0894], + device='cuda:2'), in_proj_covar=tensor([0.0342, 0.0340, 0.0337, 0.0369, 0.0441, 0.0365, 0.0319, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 04:58:58,670 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.220e+02 6.366e+02 8.908e+02 1.802e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-02 04:59:01,103 INFO [train.py:903] (2/4) Epoch 15, batch 3200, loss[loss=0.2062, simple_loss=0.2833, pruned_loss=0.06459, over 19741.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2969, pruned_loss=0.07135, over 3822369.80 frames. ], batch size: 51, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:59:59,409 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:02,402 INFO [train.py:903] (2/4) Epoch 15, batch 3250, loss[loss=0.219, simple_loss=0.3029, pruned_loss=0.06748, over 19488.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2978, pruned_loss=0.07207, over 3798079.82 frames. ], batch size: 49, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:00:29,961 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:56,793 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:59,973 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.355e+02 4.885e+02 6.090e+02 7.322e+02 1.846e+03, threshold=1.218e+03, percent-clipped=3.0 +2023-04-02 05:01:02,395 INFO [train.py:903] (2/4) Epoch 15, batch 3300, loss[loss=0.1957, simple_loss=0.285, pruned_loss=0.05323, over 18009.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07221, over 3802000.99 frames. ], batch size: 83, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:01:08,207 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 05:01:20,962 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:25,250 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98908.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:30,600 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:41,431 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9916, 2.0956, 2.2823, 2.6458, 1.8587, 2.5004, 2.5073, 2.1934], + device='cuda:2'), covar=tensor([0.3840, 0.3462, 0.1637, 0.1970, 0.3742, 0.1771, 0.3702, 0.2745], + device='cuda:2'), in_proj_covar=tensor([0.0831, 0.0873, 0.0669, 0.0903, 0.0815, 0.0748, 0.0807, 0.0735], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 05:02:07,325 INFO [train.py:903] (2/4) Epoch 15, batch 3350, loss[loss=0.1917, simple_loss=0.2738, pruned_loss=0.0548, over 19695.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2969, pruned_loss=0.07188, over 3822660.86 frames. ], batch size: 53, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:02:09,382 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 05:03:06,853 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98989.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:03:07,569 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 5.628e+02 7.317e+02 9.748e+02 2.071e+03, threshold=1.463e+03, percent-clipped=8.0 +2023-04-02 05:03:09,829 INFO [train.py:903] (2/4) Epoch 15, batch 3400, loss[loss=0.2231, simple_loss=0.3023, pruned_loss=0.07194, over 18113.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2982, pruned_loss=0.07233, over 3819010.14 frames. ], batch size: 83, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:03:44,106 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:04:10,183 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4606, 2.3316, 1.7237, 1.5992, 2.1547, 1.4444, 1.3802, 1.9038], + device='cuda:2'), covar=tensor([0.1065, 0.0732, 0.0988, 0.0705, 0.0482, 0.1080, 0.0727, 0.0534], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0307, 0.0324, 0.0251, 0.0239, 0.0327, 0.0293, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:04:10,829 INFO [train.py:903] (2/4) Epoch 15, batch 3450, loss[loss=0.213, simple_loss=0.2877, pruned_loss=0.0692, over 19855.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2986, pruned_loss=0.07224, over 3834222.54 frames. ], batch size: 52, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:04:14,088 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 05:04:34,759 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2271, 1.1334, 1.1905, 1.3446, 1.0560, 1.3271, 1.3376, 1.2683], + device='cuda:2'), covar=tensor([0.0933, 0.1057, 0.1102, 0.0706, 0.0862, 0.0842, 0.0853, 0.0768], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0240, 0.0225, 0.0207, 0.0189, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 05:04:45,788 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:05:11,153 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.946e+02 5.886e+02 7.201e+02 1.354e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-02 05:05:12,321 INFO [train.py:903] (2/4) Epoch 15, batch 3500, loss[loss=0.2163, simple_loss=0.2955, pruned_loss=0.06855, over 19535.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2987, pruned_loss=0.07256, over 3844313.24 frames. ], batch size: 56, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:15,626 INFO [train.py:903] (2/4) Epoch 15, batch 3550, loss[loss=0.1879, simple_loss=0.2635, pruned_loss=0.05613, over 19349.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2988, pruned_loss=0.07237, over 3844457.12 frames. ], batch size: 47, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:18,406 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:48,284 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:53,014 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7808, 1.5868, 1.5210, 2.0274, 1.5619, 2.1351, 2.0398, 1.8879], + device='cuda:2'), covar=tensor([0.0756, 0.0874, 0.0945, 0.0794, 0.0860, 0.0630, 0.0792, 0.0602], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0240, 0.0226, 0.0208, 0.0189, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 05:07:18,042 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 4.734e+02 6.110e+02 7.809e+02 1.736e+03, threshold=1.222e+03, percent-clipped=8.0 +2023-04-02 05:07:19,080 INFO [train.py:903] (2/4) Epoch 15, batch 3600, loss[loss=0.225, simple_loss=0.2965, pruned_loss=0.07671, over 19775.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2991, pruned_loss=0.0726, over 3836011.52 frames. ], batch size: 54, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:20,339 INFO [train.py:903] (2/4) Epoch 15, batch 3650, loss[loss=0.2401, simple_loss=0.3123, pruned_loss=0.08397, over 19561.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2993, pruned_loss=0.07284, over 3842694.19 frames. ], batch size: 64, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:26,446 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:32,082 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:37,705 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:03,839 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:15,287 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:20,758 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.562e+02 6.527e+02 8.007e+02 1.277e+03, threshold=1.305e+03, percent-clipped=3.0 +2023-04-02 05:09:21,909 INFO [train.py:903] (2/4) Epoch 15, batch 3700, loss[loss=0.2469, simple_loss=0.3211, pruned_loss=0.08634, over 18046.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.299, pruned_loss=0.07288, over 3839729.74 frames. ], batch size: 83, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:09:34,099 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:13,728 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99333.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:24,822 INFO [train.py:903] (2/4) Epoch 15, batch 3750, loss[loss=0.199, simple_loss=0.2871, pruned_loss=0.05544, over 18176.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2989, pruned_loss=0.07321, over 3835095.76 frames. ], batch size: 83, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:10:25,094 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:40,407 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1390, 1.9008, 1.8134, 2.1528, 1.9394, 1.8626, 1.7338, 2.0918], + device='cuda:2'), covar=tensor([0.0923, 0.1544, 0.1351, 0.1010, 0.1300, 0.0530, 0.1261, 0.0663], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0354, 0.0297, 0.0244, 0.0298, 0.0249, 0.0292, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:10:56,151 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99367.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:01,985 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:26,382 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.684e+02 5.640e+02 6.861e+02 8.808e+02 1.909e+03, threshold=1.372e+03, percent-clipped=3.0 +2023-04-02 05:11:28,561 INFO [train.py:903] (2/4) Epoch 15, batch 3800, loss[loss=0.1986, simple_loss=0.2806, pruned_loss=0.05826, over 19489.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2989, pruned_loss=0.07337, over 3840916.24 frames. ], batch size: 49, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:11:41,848 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4702, 3.1192, 2.2709, 2.3725, 2.4594, 2.6634, 0.8241, 2.1443], + device='cuda:2'), covar=tensor([0.0533, 0.0485, 0.0610, 0.0927, 0.0785, 0.0791, 0.1225, 0.0920], + device='cuda:2'), in_proj_covar=tensor([0.0346, 0.0346, 0.0341, 0.0373, 0.0444, 0.0369, 0.0322, 0.0330], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:11:53,195 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:58,891 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 05:12:26,314 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:30,511 INFO [train.py:903] (2/4) Epoch 15, batch 3850, loss[loss=0.2013, simple_loss=0.2696, pruned_loss=0.06647, over 18195.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2991, pruned_loss=0.07357, over 3819996.76 frames. ], batch size: 40, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:12:37,708 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:44,258 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 05:13:25,913 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99486.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:13:32,483 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 4.923e+02 6.054e+02 7.410e+02 1.518e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 05:13:32,502 INFO [train.py:903] (2/4) Epoch 15, batch 3900, loss[loss=0.2158, simple_loss=0.294, pruned_loss=0.0688, over 19533.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2985, pruned_loss=0.07311, over 3830165.05 frames. ], batch size: 54, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:18,187 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:14:33,479 INFO [train.py:903] (2/4) Epoch 15, batch 3950, loss[loss=0.2136, simple_loss=0.2827, pruned_loss=0.07224, over 19415.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07306, over 3838184.04 frames. ], batch size: 48, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:41,169 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 05:15:28,345 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99586.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:35,966 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:36,870 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.329e+02 6.331e+02 8.679e+02 1.427e+03, threshold=1.266e+03, percent-clipped=6.0 +2023-04-02 05:15:36,888 INFO [train.py:903] (2/4) Epoch 15, batch 4000, loss[loss=0.2628, simple_loss=0.3303, pruned_loss=0.09771, over 19525.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2977, pruned_loss=0.07298, over 3830485.91 frames. ], batch size: 54, lr: 5.51e-03, grad_scale: 8.0 +2023-04-02 05:15:54,675 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:14,221 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:20,950 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:23,029 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 05:16:23,147 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:38,158 INFO [train.py:903] (2/4) Epoch 15, batch 4050, loss[loss=0.2331, simple_loss=0.3107, pruned_loss=0.07771, over 17556.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2979, pruned_loss=0.07293, over 3821872.74 frames. ], batch size: 101, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:16:45,352 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:50,825 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:17,267 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-02 05:17:20,384 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7575, 1.7729, 1.5811, 1.4040, 1.3222, 1.4068, 0.1724, 0.7235], + device='cuda:2'), covar=tensor([0.0466, 0.0470, 0.0340, 0.0514, 0.1146, 0.0615, 0.1079, 0.0904], + device='cuda:2'), in_proj_covar=tensor([0.0343, 0.0342, 0.0338, 0.0369, 0.0441, 0.0366, 0.0319, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:17:32,700 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:39,410 INFO [train.py:903] (2/4) Epoch 15, batch 4100, loss[loss=0.2423, simple_loss=0.3158, pruned_loss=0.08438, over 19601.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2988, pruned_loss=0.07326, over 3826123.56 frames. ], batch size: 57, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:17:40,554 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.802e+02 5.566e+02 7.429e+02 9.161e+02 2.166e+03, threshold=1.486e+03, percent-clipped=8.0 +2023-04-02 05:17:46,972 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:55,012 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:58,064 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:14,919 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 05:18:27,062 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:43,272 INFO [train.py:903] (2/4) Epoch 15, batch 4150, loss[loss=0.1911, simple_loss=0.2761, pruned_loss=0.05302, over 19465.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2973, pruned_loss=0.07255, over 3821752.92 frames. ], batch size: 64, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:18:47,072 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:31,726 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:35,407 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99784.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:45,183 INFO [train.py:903] (2/4) Epoch 15, batch 4200, loss[loss=0.2039, simple_loss=0.274, pruned_loss=0.06691, over 19382.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.297, pruned_loss=0.07235, over 3826605.79 frames. ], batch size: 47, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:19:47,437 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.549e+02 6.772e+02 8.720e+02 1.402e+03, threshold=1.354e+03, percent-clipped=0.0 +2023-04-02 05:19:50,953 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 05:19:57,170 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:06,275 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8984, 4.4157, 2.7948, 3.8652, 0.8591, 4.2773, 4.1627, 4.3491], + device='cuda:2'), covar=tensor([0.0556, 0.0935, 0.1956, 0.0774, 0.4267, 0.0744, 0.0888, 0.0997], + device='cuda:2'), in_proj_covar=tensor([0.0464, 0.0387, 0.0460, 0.0327, 0.0394, 0.0395, 0.0389, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:20:06,469 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:31,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 05:20:32,495 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99830.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:20:47,373 INFO [train.py:903] (2/4) Epoch 15, batch 4250, loss[loss=0.2137, simple_loss=0.275, pruned_loss=0.07619, over 19739.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2972, pruned_loss=0.07303, over 3823341.76 frames. ], batch size: 46, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:03,955 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 05:21:15,075 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 05:21:47,825 INFO [train.py:903] (2/4) Epoch 15, batch 4300, loss[loss=0.217, simple_loss=0.2957, pruned_loss=0.06917, over 19669.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2975, pruned_loss=0.07279, over 3825923.73 frames. ], batch size: 53, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:48,970 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.517e+02 6.494e+02 8.260e+02 1.741e+03, threshold=1.299e+03, percent-clipped=4.0 +2023-04-02 05:21:53,799 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:35,373 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99930.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:41,691 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 05:22:48,543 INFO [train.py:903] (2/4) Epoch 15, batch 4350, loss[loss=0.2327, simple_loss=0.3101, pruned_loss=0.07767, over 19157.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2983, pruned_loss=0.07303, over 3829749.01 frames. ], batch size: 69, lr: 5.50e-03, grad_scale: 4.0 +2023-04-02 05:22:53,549 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99945.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:23:01,034 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:15,960 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99962.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:46,369 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:51,928 INFO [train.py:903] (2/4) Epoch 15, batch 4400, loss[loss=0.1846, simple_loss=0.2645, pruned_loss=0.05236, over 16849.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2971, pruned_loss=0.07228, over 3832677.68 frames. ], batch size: 37, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:23:53,159 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.844e+02 5.787e+02 7.470e+02 1.170e+03, threshold=1.157e+03, percent-clipped=0.0 +2023-04-02 05:23:56,445 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4039, 0.9513, 1.3954, 1.3950, 2.7525, 1.0009, 2.2275, 3.2834], + device='cuda:2'), covar=tensor([0.0755, 0.3861, 0.3342, 0.2247, 0.1246, 0.3093, 0.1484, 0.0455], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0349, 0.0370, 0.0329, 0.0356, 0.0339, 0.0350, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:24:06,741 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:23,727 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 05:24:31,880 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 05:24:34,581 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100025.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:24:35,752 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:47,764 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.90 vs. limit=5.0 +2023-04-02 05:24:57,064 INFO [train.py:903] (2/4) Epoch 15, batch 4450, loss[loss=0.1863, simple_loss=0.2716, pruned_loss=0.05057, over 19867.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2969, pruned_loss=0.07197, over 3829045.68 frames. ], batch size: 52, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:24:57,236 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:00,917 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:07,820 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4021, 1.3912, 1.5312, 1.6776, 2.9598, 1.2155, 2.2324, 3.2977], + device='cuda:2'), covar=tensor([0.0522, 0.2660, 0.2708, 0.1564, 0.0755, 0.2379, 0.1184, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0350, 0.0369, 0.0330, 0.0357, 0.0339, 0.0350, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:25:14,942 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:25,303 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:47,693 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:59,167 INFO [train.py:903] (2/4) Epoch 15, batch 4500, loss[loss=0.1914, simple_loss=0.2664, pruned_loss=0.05815, over 17380.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2963, pruned_loss=0.07179, over 3820442.52 frames. ], batch size: 38, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:25:59,560 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3620, 1.5519, 2.0303, 1.5978, 3.1241, 2.3781, 3.3391, 1.5028], + device='cuda:2'), covar=tensor([0.2318, 0.3886, 0.2325, 0.1834, 0.1492, 0.2091, 0.1620, 0.3857], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0600, 0.0651, 0.0456, 0.0606, 0.0508, 0.0646, 0.0515], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:26:00,181 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.166e+02 6.659e+02 8.670e+02 1.796e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 05:27:01,360 INFO [train.py:903] (2/4) Epoch 15, batch 4550, loss[loss=0.256, simple_loss=0.3369, pruned_loss=0.08758, over 19677.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2961, pruned_loss=0.07184, over 3798819.20 frames. ], batch size: 55, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:27:10,395 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 05:27:16,327 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:21,766 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:38,131 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 05:27:47,845 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:28:04,926 INFO [train.py:903] (2/4) Epoch 15, batch 4600, loss[loss=0.2194, simple_loss=0.3066, pruned_loss=0.06613, over 19479.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2974, pruned_loss=0.07212, over 3804624.11 frames. ], batch size: 64, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:28:06,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.707e+02 5.654e+02 7.541e+02 1.184e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-02 05:28:19,081 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100201.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:28:30,986 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7733, 4.2579, 4.4892, 4.4831, 1.6230, 4.2331, 3.6490, 4.2101], + device='cuda:2'), covar=tensor([0.1457, 0.0739, 0.0498, 0.0560, 0.5438, 0.0710, 0.0671, 0.0943], + device='cuda:2'), in_proj_covar=tensor([0.0728, 0.0664, 0.0865, 0.0744, 0.0771, 0.0612, 0.0522, 0.0799], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 05:28:47,093 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100226.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:29:08,985 INFO [train.py:903] (2/4) Epoch 15, batch 4650, loss[loss=0.2423, simple_loss=0.3279, pruned_loss=0.07836, over 19694.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2981, pruned_loss=0.07262, over 3800925.82 frames. ], batch size: 59, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:29:25,491 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 05:29:25,728 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:29:36,799 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 05:29:44,900 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0196, 1.1589, 1.5388, 1.2186, 2.6270, 3.7034, 3.4649, 3.9578], + device='cuda:2'), covar=tensor([0.1722, 0.3884, 0.3444, 0.2313, 0.0588, 0.0161, 0.0208, 0.0215], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0305, 0.0334, 0.0252, 0.0225, 0.0170, 0.0208, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:30:11,079 INFO [train.py:903] (2/4) Epoch 15, batch 4700, loss[loss=0.1833, simple_loss=0.2606, pruned_loss=0.05301, over 19411.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2981, pruned_loss=0.07267, over 3801404.88 frames. ], batch size: 48, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:30:12,228 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.601e+02 6.328e+02 8.331e+02 3.311e+03, threshold=1.266e+03, percent-clipped=13.0 +2023-04-02 05:30:22,201 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:33,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 05:30:50,895 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:55,443 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:13,907 INFO [train.py:903] (2/4) Epoch 15, batch 4750, loss[loss=0.2451, simple_loss=0.3238, pruned_loss=0.08317, over 19629.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2982, pruned_loss=0.07233, over 3799129.77 frames. ], batch size: 57, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:31:21,200 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:49,658 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100369.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:32:16,838 INFO [train.py:903] (2/4) Epoch 15, batch 4800, loss[loss=0.2409, simple_loss=0.3194, pruned_loss=0.08117, over 19600.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2969, pruned_loss=0.07186, over 3810348.76 frames. ], batch size: 61, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:32:18,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.713e+02 6.387e+02 8.455e+02 2.006e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 05:32:44,477 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:32:52,647 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:17,209 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:21,577 INFO [train.py:903] (2/4) Epoch 15, batch 4850, loss[loss=0.2061, simple_loss=0.2888, pruned_loss=0.0617, over 18239.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2971, pruned_loss=0.07149, over 3824054.24 frames. ], batch size: 83, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:33:48,703 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 05:34:10,992 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 05:34:14,899 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100484.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:34:15,728 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 05:34:17,772 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 05:34:24,800 INFO [train.py:903] (2/4) Epoch 15, batch 4900, loss[loss=0.2035, simple_loss=0.2903, pruned_loss=0.0584, over 19308.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2967, pruned_loss=0.07141, over 3831757.54 frames. ], batch size: 66, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:34:25,928 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.042e+02 5.846e+02 7.925e+02 1.600e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-02 05:34:26,005 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 05:34:46,435 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 05:35:19,185 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9105, 1.6568, 1.4984, 1.9033, 1.6166, 1.6539, 1.4481, 1.8197], + device='cuda:2'), covar=tensor([0.0930, 0.1366, 0.1408, 0.0928, 0.1270, 0.0519, 0.1357, 0.0698], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0355, 0.0297, 0.0242, 0.0298, 0.0245, 0.0292, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:35:27,073 INFO [train.py:903] (2/4) Epoch 15, batch 4950, loss[loss=0.2072, simple_loss=0.2914, pruned_loss=0.06153, over 19107.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2968, pruned_loss=0.07162, over 3833052.35 frames. ], batch size: 69, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:35:45,824 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 05:36:05,324 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7173, 4.1914, 4.4514, 4.4370, 1.6969, 4.1483, 3.6325, 4.1299], + device='cuda:2'), covar=tensor([0.1597, 0.0847, 0.0573, 0.0640, 0.5390, 0.0741, 0.0675, 0.1127], + device='cuda:2'), in_proj_covar=tensor([0.0735, 0.0669, 0.0873, 0.0750, 0.0778, 0.0619, 0.0528, 0.0808], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 05:36:09,665 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 05:36:29,688 INFO [train.py:903] (2/4) Epoch 15, batch 5000, loss[loss=0.191, simple_loss=0.2647, pruned_loss=0.0586, over 19735.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2973, pruned_loss=0.07192, over 3809601.06 frames. ], batch size: 45, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:36:31,854 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 5.199e+02 6.623e+02 8.418e+02 1.165e+03, threshold=1.325e+03, percent-clipped=0.0 +2023-04-02 05:36:40,514 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 05:36:40,819 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:36:52,298 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 05:37:33,607 INFO [train.py:903] (2/4) Epoch 15, batch 5050, loss[loss=0.2369, simple_loss=0.3221, pruned_loss=0.07582, over 19658.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2977, pruned_loss=0.0721, over 3815721.98 frames. ], batch size: 60, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:38:09,676 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 05:38:37,496 INFO [train.py:903] (2/4) Epoch 15, batch 5100, loss[loss=0.2184, simple_loss=0.2907, pruned_loss=0.07305, over 19737.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2968, pruned_loss=0.07174, over 3809315.31 frames. ], batch size: 48, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:38:39,900 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 5.134e+02 6.124e+02 7.830e+02 1.941e+03, threshold=1.225e+03, percent-clipped=4.0 +2023-04-02 05:38:49,385 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 05:38:51,807 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 05:38:57,362 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 05:39:05,776 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:39:29,415 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-02 05:39:37,139 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100740.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:39:38,888 INFO [train.py:903] (2/4) Epoch 15, batch 5150, loss[loss=0.2455, simple_loss=0.3326, pruned_loss=0.07916, over 18796.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2967, pruned_loss=0.07161, over 3811231.80 frames. ], batch size: 74, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:39:39,309 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2065, 1.2932, 1.2176, 1.0270, 0.9621, 1.0430, 0.0864, 0.3470], + device='cuda:2'), covar=tensor([0.0591, 0.0599, 0.0362, 0.0506, 0.1217, 0.0619, 0.1126, 0.0976], + device='cuda:2'), in_proj_covar=tensor([0.0347, 0.0342, 0.0340, 0.0369, 0.0442, 0.0370, 0.0322, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:39:47,020 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100748.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:39:51,334 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 05:40:07,667 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:40:09,140 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100765.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:40:28,230 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 05:40:41,943 INFO [train.py:903] (2/4) Epoch 15, batch 5200, loss[loss=0.2552, simple_loss=0.3285, pruned_loss=0.09097, over 19660.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2982, pruned_loss=0.07266, over 3818692.68 frames. ], batch size: 60, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:40:44,523 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 5.325e+02 6.515e+02 8.501e+02 1.618e+03, threshold=1.303e+03, percent-clipped=5.0 +2023-04-02 05:40:58,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 05:41:43,054 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 05:41:47,605 INFO [train.py:903] (2/4) Epoch 15, batch 5250, loss[loss=0.1863, simple_loss=0.2752, pruned_loss=0.04877, over 19521.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2972, pruned_loss=0.07188, over 3825323.76 frames. ], batch size: 54, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:20,940 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1890, 2.2166, 2.4891, 3.1556, 2.2408, 2.9671, 2.6950, 2.2646], + device='cuda:2'), covar=tensor([0.4118, 0.3833, 0.1621, 0.2271, 0.4195, 0.1861, 0.4144, 0.3094], + device='cuda:2'), in_proj_covar=tensor([0.0835, 0.0877, 0.0675, 0.0906, 0.0817, 0.0752, 0.0812, 0.0739], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 05:42:25,534 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2118, 1.3328, 1.9957, 1.6509, 3.1630, 4.5945, 4.4636, 4.9954], + device='cuda:2'), covar=tensor([0.1754, 0.3795, 0.3147, 0.2153, 0.0524, 0.0203, 0.0172, 0.0157], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0304, 0.0333, 0.0252, 0.0225, 0.0170, 0.0208, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:42:33,415 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100879.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:42:50,419 INFO [train.py:903] (2/4) Epoch 15, batch 5300, loss[loss=0.2136, simple_loss=0.2981, pruned_loss=0.06459, over 18120.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2973, pruned_loss=0.07214, over 3822378.70 frames. ], batch size: 83, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:52,722 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.131e+02 6.120e+02 8.353e+02 1.768e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 05:43:08,007 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 05:43:52,929 INFO [train.py:903] (2/4) Epoch 15, batch 5350, loss[loss=0.1792, simple_loss=0.2556, pruned_loss=0.05137, over 19729.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2976, pruned_loss=0.07226, over 3821702.94 frames. ], batch size: 46, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:29,359 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 05:44:30,883 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:44:51,634 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5763, 1.4377, 1.3397, 1.9451, 1.5541, 1.9570, 1.8597, 1.6453], + device='cuda:2'), covar=tensor([0.0802, 0.0907, 0.1074, 0.0746, 0.0832, 0.0672, 0.0818, 0.0700], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0220, 0.0221, 0.0242, 0.0226, 0.0209, 0.0189, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 05:44:56,010 INFO [train.py:903] (2/4) Epoch 15, batch 5400, loss[loss=0.1995, simple_loss=0.2729, pruned_loss=0.06303, over 19613.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.297, pruned_loss=0.07184, over 3822527.08 frames. ], batch size: 50, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:58,258 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.355e+02 6.832e+02 8.412e+02 2.240e+03, threshold=1.366e+03, percent-clipped=7.0 +2023-04-02 05:45:01,707 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:46:00,092 INFO [train.py:903] (2/4) Epoch 15, batch 5450, loss[loss=0.2136, simple_loss=0.2983, pruned_loss=0.06448, over 19674.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2985, pruned_loss=0.07252, over 3808766.41 frames. ], batch size: 55, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:46:30,079 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6444, 2.4022, 1.8291, 1.6758, 2.2862, 1.4401, 1.4126, 1.9353], + device='cuda:2'), covar=tensor([0.0967, 0.0739, 0.0898, 0.0786, 0.0423, 0.1114, 0.0709, 0.0451], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0307, 0.0326, 0.0252, 0.0240, 0.0330, 0.0294, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:47:04,590 INFO [train.py:903] (2/4) Epoch 15, batch 5500, loss[loss=0.2231, simple_loss=0.3043, pruned_loss=0.07094, over 19536.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2974, pruned_loss=0.07179, over 3805623.24 frames. ], batch size: 54, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:47:04,764 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101092.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:47:06,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 5.715e+02 6.860e+02 8.623e+02 1.531e+03, threshold=1.372e+03, percent-clipped=1.0 +2023-04-02 05:47:27,335 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 05:47:45,292 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:47:45,818 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 05:47:58,065 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:48:05,671 INFO [train.py:903] (2/4) Epoch 15, batch 5550, loss[loss=0.2696, simple_loss=0.3477, pruned_loss=0.09579, over 19522.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2971, pruned_loss=0.07209, over 3808239.95 frames. ], batch size: 56, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:48:12,791 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 05:48:28,900 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:49:04,237 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 05:49:08,930 INFO [train.py:903] (2/4) Epoch 15, batch 5600, loss[loss=0.2561, simple_loss=0.3199, pruned_loss=0.09614, over 19084.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.297, pruned_loss=0.07204, over 3813899.92 frames. ], batch size: 69, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:49:11,016 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 5.272e+02 6.555e+02 8.017e+02 1.573e+03, threshold=1.311e+03, percent-clipped=2.0 +2023-04-02 05:49:28,282 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101207.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:50:11,400 INFO [train.py:903] (2/4) Epoch 15, batch 5650, loss[loss=0.265, simple_loss=0.33, pruned_loss=0.09994, over 19672.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2968, pruned_loss=0.07243, over 3812405.52 frames. ], batch size: 58, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:50:59,653 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 05:51:14,079 INFO [train.py:903] (2/4) Epoch 15, batch 5700, loss[loss=0.1843, simple_loss=0.2526, pruned_loss=0.05798, over 19729.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2976, pruned_loss=0.07302, over 3800039.80 frames. ], batch size: 45, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:51:17,701 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.651e+02 6.610e+02 8.419e+02 1.957e+03, threshold=1.322e+03, percent-clipped=7.0 +2023-04-02 05:51:48,359 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5402, 1.1768, 1.2248, 1.4673, 1.1360, 1.3690, 1.2112, 1.3972], + device='cuda:2'), covar=tensor([0.1040, 0.1196, 0.1500, 0.0966, 0.1237, 0.0555, 0.1390, 0.0802], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0351, 0.0296, 0.0244, 0.0297, 0.0244, 0.0291, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:52:17,922 INFO [train.py:903] (2/4) Epoch 15, batch 5750, loss[loss=0.2346, simple_loss=0.3117, pruned_loss=0.07876, over 19162.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2969, pruned_loss=0.07251, over 3791269.06 frames. ], batch size: 69, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:52:20,181 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 05:52:29,547 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 05:52:33,000 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 05:52:51,683 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8357, 1.2306, 1.4937, 0.5703, 2.0047, 2.4387, 2.1470, 2.5908], + device='cuda:2'), covar=tensor([0.1574, 0.3600, 0.3224, 0.2551, 0.0589, 0.0245, 0.0321, 0.0322], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0304, 0.0334, 0.0253, 0.0225, 0.0170, 0.0208, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:53:10,198 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0952, 1.1030, 1.4458, 1.4773, 2.6439, 1.0653, 2.0397, 2.8875], + device='cuda:2'), covar=tensor([0.0568, 0.2902, 0.2779, 0.1740, 0.0799, 0.2414, 0.1228, 0.0379], + device='cuda:2'), in_proj_covar=tensor([0.0382, 0.0353, 0.0370, 0.0336, 0.0362, 0.0341, 0.0354, 0.0374], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:53:21,273 INFO [train.py:903] (2/4) Epoch 15, batch 5800, loss[loss=0.2343, simple_loss=0.3167, pruned_loss=0.07598, over 19664.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2977, pruned_loss=0.07291, over 3787646.86 frames. ], batch size: 60, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:53:23,486 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.554e+02 5.132e+02 6.075e+02 7.663e+02 2.298e+03, threshold=1.215e+03, percent-clipped=3.0 +2023-04-02 05:54:24,297 INFO [train.py:903] (2/4) Epoch 15, batch 5850, loss[loss=0.2002, simple_loss=0.2896, pruned_loss=0.05541, over 19610.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2977, pruned_loss=0.07279, over 3791339.58 frames. ], batch size: 57, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:54:37,897 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8961, 1.9717, 2.1919, 2.5542, 1.8713, 2.5351, 2.2671, 1.9205], + device='cuda:2'), covar=tensor([0.4076, 0.3423, 0.1722, 0.2066, 0.3757, 0.1746, 0.4553, 0.3194], + device='cuda:2'), in_proj_covar=tensor([0.0836, 0.0882, 0.0679, 0.0910, 0.0823, 0.0756, 0.0815, 0.0744], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 05:54:52,524 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101463.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:54:58,854 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:55:24,201 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101488.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:55:28,446 INFO [train.py:903] (2/4) Epoch 15, batch 5900, loss[loss=0.2185, simple_loss=0.3015, pruned_loss=0.06777, over 19525.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2982, pruned_loss=0.07293, over 3794284.27 frames. ], batch size: 54, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:55:30,755 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.163e+02 6.557e+02 7.983e+02 1.612e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-02 05:55:30,810 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 05:55:52,884 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 05:56:03,834 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8135, 1.4864, 1.4485, 1.7838, 1.4899, 1.5795, 1.5416, 1.7681], + device='cuda:2'), covar=tensor([0.1002, 0.1376, 0.1522, 0.0912, 0.1244, 0.0552, 0.1265, 0.0732], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0352, 0.0296, 0.0245, 0.0297, 0.0244, 0.0291, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 05:56:20,858 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5323, 1.8174, 2.2154, 1.8395, 3.3425, 2.8932, 3.7126, 1.6807], + device='cuda:2'), covar=tensor([0.2223, 0.3858, 0.2340, 0.1695, 0.1369, 0.1700, 0.1424, 0.3651], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0608, 0.0654, 0.0457, 0.0609, 0.0513, 0.0650, 0.0516], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 05:56:30,792 INFO [train.py:903] (2/4) Epoch 15, batch 5950, loss[loss=0.2207, simple_loss=0.2986, pruned_loss=0.07139, over 19469.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2986, pruned_loss=0.07354, over 3793682.08 frames. ], batch size: 64, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:24,057 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:57:34,460 INFO [train.py:903] (2/4) Epoch 15, batch 6000, loss[loss=0.2264, simple_loss=0.3057, pruned_loss=0.07359, over 19778.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2977, pruned_loss=0.07286, over 3799996.90 frames. ], batch size: 56, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:34,460 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 05:57:47,192 INFO [train.py:937] (2/4) Epoch 15, validation: loss=0.1729, simple_loss=0.2735, pruned_loss=0.0362, over 944034.00 frames. +2023-04-02 05:57:47,193 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 05:57:49,631 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.208e+02 6.128e+02 8.316e+02 1.573e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 05:58:03,656 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-02 05:58:49,959 INFO [train.py:903] (2/4) Epoch 15, batch 6050, loss[loss=0.177, simple_loss=0.2577, pruned_loss=0.04813, over 19750.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2974, pruned_loss=0.07256, over 3807204.20 frames. ], batch size: 46, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:52,012 INFO [train.py:903] (2/4) Epoch 15, batch 6100, loss[loss=0.2003, simple_loss=0.2784, pruned_loss=0.06106, over 19855.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2971, pruned_loss=0.07256, over 3796480.65 frames. ], batch size: 52, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:55,078 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.053e+02 5.047e+02 6.326e+02 7.867e+02 1.574e+03, threshold=1.265e+03, percent-clipped=9.0 +2023-04-02 06:00:05,542 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:00:56,516 INFO [train.py:903] (2/4) Epoch 15, batch 6150, loss[loss=0.2114, simple_loss=0.2838, pruned_loss=0.06955, over 19730.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2984, pruned_loss=0.07282, over 3813898.87 frames. ], batch size: 51, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 06:01:23,856 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 06:01:59,355 INFO [train.py:903] (2/4) Epoch 15, batch 6200, loss[loss=0.251, simple_loss=0.3277, pruned_loss=0.08718, over 17334.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2985, pruned_loss=0.07304, over 3803127.36 frames. ], batch size: 101, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:02:01,561 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.959e+02 6.303e+02 7.991e+02 1.526e+03, threshold=1.261e+03, percent-clipped=1.0 +2023-04-02 06:02:57,092 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6463, 1.5032, 1.5155, 2.1548, 1.6845, 2.0568, 2.0275, 1.8344], + device='cuda:2'), covar=tensor([0.0817, 0.0910, 0.1019, 0.0795, 0.0868, 0.0689, 0.0829, 0.0637], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0222, 0.0223, 0.0243, 0.0228, 0.0209, 0.0189, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 06:03:00,198 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:02,065 INFO [train.py:903] (2/4) Epoch 15, batch 6250, loss[loss=0.2329, simple_loss=0.3048, pruned_loss=0.08048, over 19623.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2986, pruned_loss=0.07274, over 3794555.93 frames. ], batch size: 57, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:03:16,050 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0765, 1.9935, 1.7076, 1.6241, 1.3616, 1.5038, 0.5895, 0.9589], + device='cuda:2'), covar=tensor([0.0609, 0.0582, 0.0482, 0.0788, 0.1201, 0.0922, 0.1161, 0.1097], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0340, 0.0339, 0.0369, 0.0443, 0.0368, 0.0322, 0.0331], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 06:03:30,486 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 06:03:30,826 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:34,284 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7244, 1.8525, 1.6597, 2.6504, 1.9370, 2.5879, 1.9520, 1.4960], + device='cuda:2'), covar=tensor([0.4541, 0.4124, 0.2746, 0.2762, 0.4049, 0.2088, 0.5561, 0.4712], + device='cuda:2'), in_proj_covar=tensor([0.0829, 0.0875, 0.0674, 0.0903, 0.0816, 0.0751, 0.0807, 0.0740], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 06:04:04,521 INFO [train.py:903] (2/4) Epoch 15, batch 6300, loss[loss=0.2524, simple_loss=0.3208, pruned_loss=0.09196, over 19676.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2994, pruned_loss=0.07306, over 3797846.98 frames. ], batch size: 60, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:04:08,000 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.483e+02 5.666e+02 6.616e+02 7.934e+02 1.912e+03, threshold=1.323e+03, percent-clipped=2.0 +2023-04-02 06:05:03,703 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101938.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:05:08,246 INFO [train.py:903] (2/4) Epoch 15, batch 6350, loss[loss=0.2063, simple_loss=0.2841, pruned_loss=0.06429, over 19830.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2975, pruned_loss=0.072, over 3794046.69 frames. ], batch size: 52, lr: 5.45e-03, grad_scale: 2.0 +2023-04-02 06:05:50,371 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-02 06:06:11,823 INFO [train.py:903] (2/4) Epoch 15, batch 6400, loss[loss=0.2078, simple_loss=0.2791, pruned_loss=0.06824, over 19348.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.298, pruned_loss=0.07219, over 3788894.62 frames. ], batch size: 47, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:06:13,933 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 06:06:16,593 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 4.874e+02 5.936e+02 7.490e+02 2.019e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 06:06:30,759 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:06:52,974 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 06:07:17,097 INFO [train.py:903] (2/4) Epoch 15, batch 6450, loss[loss=0.241, simple_loss=0.3227, pruned_loss=0.07972, over 19676.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2972, pruned_loss=0.07193, over 3796538.34 frames. ], batch size: 60, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:07:23,170 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102046.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:07:47,712 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3302, 1.1493, 1.5702, 1.4185, 2.5770, 3.5999, 3.3022, 3.8668], + device='cuda:2'), covar=tensor([0.1598, 0.4801, 0.4163, 0.2113, 0.0675, 0.0234, 0.0325, 0.0262], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0309, 0.0337, 0.0256, 0.0228, 0.0172, 0.0210, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 06:08:03,622 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 06:08:20,697 INFO [train.py:903] (2/4) Epoch 15, batch 6500, loss[loss=0.2206, simple_loss=0.3013, pruned_loss=0.06998, over 19330.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.297, pruned_loss=0.0722, over 3795287.17 frames. ], batch size: 66, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:08:25,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.860e+02 6.100e+02 7.866e+02 2.286e+03, threshold=1.220e+03, percent-clipped=9.0 +2023-04-02 06:08:26,641 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 06:09:23,525 INFO [train.py:903] (2/4) Epoch 15, batch 6550, loss[loss=0.2035, simple_loss=0.279, pruned_loss=0.06401, over 19736.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2971, pruned_loss=0.07203, over 3794837.71 frames. ], batch size: 51, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:09:47,216 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:10:26,474 INFO [train.py:903] (2/4) Epoch 15, batch 6600, loss[loss=0.1889, simple_loss=0.2709, pruned_loss=0.05343, over 19736.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2973, pruned_loss=0.07244, over 3804025.36 frames. ], batch size: 51, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:10:31,177 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 4.937e+02 6.611e+02 8.175e+02 1.787e+03, threshold=1.322e+03, percent-clipped=6.0 +2023-04-02 06:10:43,365 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7492, 4.8222, 5.5048, 5.4947, 1.9754, 5.1150, 4.4902, 5.1682], + device='cuda:2'), covar=tensor([0.1376, 0.1244, 0.0560, 0.0528, 0.5784, 0.0699, 0.0544, 0.1005], + device='cuda:2'), in_proj_covar=tensor([0.0737, 0.0671, 0.0880, 0.0756, 0.0785, 0.0628, 0.0528, 0.0816], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 06:11:29,869 INFO [train.py:903] (2/4) Epoch 15, batch 6650, loss[loss=0.1849, simple_loss=0.259, pruned_loss=0.05538, over 19759.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.297, pruned_loss=0.07231, over 3795503.65 frames. ], batch size: 45, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:11:44,401 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 06:12:20,500 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102282.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:12:33,709 INFO [train.py:903] (2/4) Epoch 15, batch 6700, loss[loss=0.2082, simple_loss=0.2892, pruned_loss=0.06356, over 19761.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.297, pruned_loss=0.07235, over 3807670.45 frames. ], batch size: 54, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:12:38,443 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 5.015e+02 6.423e+02 7.841e+02 1.581e+03, threshold=1.285e+03, percent-clipped=1.0 +2023-04-02 06:13:32,265 INFO [train.py:903] (2/4) Epoch 15, batch 6750, loss[loss=0.2101, simple_loss=0.2837, pruned_loss=0.06829, over 19813.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2959, pruned_loss=0.07126, over 3806348.90 frames. ], batch size: 49, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:13:40,349 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:14:30,407 INFO [train.py:903] (2/4) Epoch 15, batch 6800, loss[loss=0.2272, simple_loss=0.2932, pruned_loss=0.08058, over 19735.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2959, pruned_loss=0.07159, over 3806850.54 frames. ], batch size: 51, lr: 5.44e-03, grad_scale: 8.0 +2023-04-02 06:14:35,330 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.506e+02 4.944e+02 6.022e+02 7.665e+02 3.022e+03, threshold=1.204e+03, percent-clipped=5.0 +2023-04-02 06:14:37,034 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102397.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:14:57,423 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:15:15,731 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 06:15:16,203 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 06:15:19,097 INFO [train.py:903] (2/4) Epoch 16, batch 0, loss[loss=0.2415, simple_loss=0.3142, pruned_loss=0.08437, over 19511.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3142, pruned_loss=0.08437, over 19511.00 frames. ], batch size: 54, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:15:19,097 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 06:15:29,721 INFO [train.py:937] (2/4) Epoch 16, validation: loss=0.1737, simple_loss=0.2745, pruned_loss=0.03646, over 944034.00 frames. +2023-04-02 06:15:29,722 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 06:15:45,603 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 06:15:58,388 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:06,655 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7048, 1.5262, 1.5866, 2.1933, 1.7533, 2.0744, 2.1462, 1.8974], + device='cuda:2'), covar=tensor([0.0795, 0.0946, 0.0992, 0.0754, 0.0835, 0.0719, 0.0841, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0222, 0.0222, 0.0243, 0.0226, 0.0209, 0.0189, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 06:16:24,721 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:33,067 INFO [train.py:903] (2/4) Epoch 16, batch 50, loss[loss=0.2787, simple_loss=0.3363, pruned_loss=0.1106, over 13477.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3004, pruned_loss=0.07244, over 858652.12 frames. ], batch size: 136, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:04,310 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.709e+02 4.865e+02 6.426e+02 8.395e+02 1.744e+03, threshold=1.285e+03, percent-clipped=5.0 +2023-04-02 06:17:08,775 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 06:17:29,605 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5563, 1.1902, 1.4500, 1.4836, 3.0761, 1.0113, 2.3315, 3.4500], + device='cuda:2'), covar=tensor([0.0496, 0.3133, 0.3047, 0.1937, 0.0769, 0.2663, 0.1282, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0382, 0.0350, 0.0369, 0.0334, 0.0359, 0.0338, 0.0350, 0.0374], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:17:33,685 INFO [train.py:903] (2/4) Epoch 16, batch 100, loss[loss=0.2443, simple_loss=0.318, pruned_loss=0.08531, over 19652.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3002, pruned_loss=0.07195, over 1525010.89 frames. ], batch size: 53, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:47,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 06:18:13,836 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2388, 2.2822, 2.4291, 3.1913, 2.2440, 3.0126, 2.6164, 2.2779], + device='cuda:2'), covar=tensor([0.4083, 0.3748, 0.1692, 0.2195, 0.4087, 0.1849, 0.4258, 0.2990], + device='cuda:2'), in_proj_covar=tensor([0.0833, 0.0881, 0.0678, 0.0903, 0.0821, 0.0756, 0.0811, 0.0743], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 06:18:34,760 INFO [train.py:903] (2/4) Epoch 16, batch 150, loss[loss=0.2211, simple_loss=0.3048, pruned_loss=0.06868, over 19343.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2996, pruned_loss=0.07189, over 2048375.21 frames. ], batch size: 66, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:06,752 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9355, 1.7069, 1.5762, 2.0225, 1.7652, 1.7064, 1.6173, 1.9334], + device='cuda:2'), covar=tensor([0.1016, 0.1524, 0.1490, 0.0981, 0.1322, 0.0557, 0.1311, 0.0730], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0352, 0.0298, 0.0246, 0.0299, 0.0246, 0.0292, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:19:07,437 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.578e+02 6.638e+02 8.298e+02 1.665e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 06:19:36,774 INFO [train.py:903] (2/4) Epoch 16, batch 200, loss[loss=0.2156, simple_loss=0.2936, pruned_loss=0.06876, over 19847.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2985, pruned_loss=0.07092, over 2452933.65 frames. ], batch size: 52, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:38,845 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 06:20:18,514 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102653.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:20:29,351 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3699, 3.1082, 2.4850, 2.4063, 2.2167, 2.6959, 1.1824, 2.3014], + device='cuda:2'), covar=tensor([0.0522, 0.0476, 0.0500, 0.0847, 0.0861, 0.0901, 0.1082, 0.0812], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0342, 0.0339, 0.0371, 0.0444, 0.0371, 0.0324, 0.0333], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 06:20:38,288 INFO [train.py:903] (2/4) Epoch 16, batch 250, loss[loss=0.219, simple_loss=0.2804, pruned_loss=0.07886, over 18252.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2972, pruned_loss=0.07078, over 2769721.58 frames. ], batch size: 40, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:20:43,974 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 06:20:51,646 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102678.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:21:12,173 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 5.293e+02 5.976e+02 7.347e+02 1.638e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 06:21:43,452 INFO [train.py:903] (2/4) Epoch 16, batch 300, loss[loss=0.2194, simple_loss=0.2961, pruned_loss=0.07138, over 19731.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2956, pruned_loss=0.06993, over 3012856.53 frames. ], batch size: 51, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:21:43,887 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102720.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:13,614 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:21,910 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8861, 1.9701, 2.1393, 2.6361, 1.8805, 2.4449, 2.2806, 2.0073], + device='cuda:2'), covar=tensor([0.4012, 0.3492, 0.1770, 0.2186, 0.3749, 0.1927, 0.4260, 0.3054], + device='cuda:2'), in_proj_covar=tensor([0.0840, 0.0887, 0.0680, 0.0909, 0.0826, 0.0759, 0.0816, 0.0745], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 06:22:44,947 INFO [train.py:903] (2/4) Epoch 16, batch 350, loss[loss=0.221, simple_loss=0.2997, pruned_loss=0.07112, over 19689.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2948, pruned_loss=0.06952, over 3204964.96 frames. ], batch size: 59, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:22:47,911 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4207, 1.4914, 1.7394, 1.5961, 2.5016, 2.2174, 2.5310, 1.1407], + device='cuda:2'), covar=tensor([0.2264, 0.4040, 0.2391, 0.1858, 0.1413, 0.1967, 0.1376, 0.3920], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0597, 0.0650, 0.0453, 0.0602, 0.0509, 0.0648, 0.0511], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 06:22:50,835 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 06:23:16,122 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.254e+02 6.186e+02 7.503e+02 2.205e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 06:23:39,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 06:23:47,563 INFO [train.py:903] (2/4) Epoch 16, batch 400, loss[loss=0.1982, simple_loss=0.2881, pruned_loss=0.05418, over 19676.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2952, pruned_loss=0.07019, over 3333996.28 frames. ], batch size: 55, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:24:49,425 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102869.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:24:50,278 INFO [train.py:903] (2/4) Epoch 16, batch 450, loss[loss=0.2398, simple_loss=0.321, pruned_loss=0.07929, over 19553.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2969, pruned_loss=0.07099, over 3447149.72 frames. ], batch size: 61, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:25:11,599 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 06:25:22,282 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.837e+02 5.955e+02 8.003e+02 1.401e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 06:25:24,671 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 06:25:25,907 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 06:25:30,911 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2599, 1.3213, 1.2839, 1.0992, 1.0491, 1.1668, 0.1142, 0.4574], + device='cuda:2'), covar=tensor([0.0516, 0.0498, 0.0304, 0.0423, 0.0962, 0.0469, 0.0959, 0.0826], + device='cuda:2'), in_proj_covar=tensor([0.0347, 0.0338, 0.0335, 0.0367, 0.0438, 0.0368, 0.0321, 0.0330], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 06:25:32,993 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:25:52,012 INFO [train.py:903] (2/4) Epoch 16, batch 500, loss[loss=0.2595, simple_loss=0.3341, pruned_loss=0.09243, over 18127.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2982, pruned_loss=0.07174, over 3525457.72 frames. ], batch size: 83, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:26:54,162 INFO [train.py:903] (2/4) Epoch 16, batch 550, loss[loss=0.1803, simple_loss=0.2549, pruned_loss=0.05284, over 19300.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2982, pruned_loss=0.07216, over 3589776.17 frames. ], batch size: 44, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:27:24,959 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.789e+02 5.227e+02 6.443e+02 7.702e+02 1.436e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 06:27:54,425 INFO [train.py:903] (2/4) Epoch 16, batch 600, loss[loss=0.2015, simple_loss=0.2876, pruned_loss=0.05772, over 19659.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2977, pruned_loss=0.07166, over 3647887.60 frames. ], batch size: 55, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:28:37,060 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 06:28:55,607 INFO [train.py:903] (2/4) Epoch 16, batch 650, loss[loss=0.2318, simple_loss=0.305, pruned_loss=0.07935, over 19068.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2969, pruned_loss=0.0714, over 3681784.44 frames. ], batch size: 69, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:29:28,783 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.715e+02 6.008e+02 7.942e+02 1.451e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-04-02 06:29:53,925 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103116.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:29:57,401 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6454, 4.1294, 4.2956, 4.3000, 1.5914, 4.0203, 3.5786, 4.0125], + device='cuda:2'), covar=tensor([0.1408, 0.0704, 0.0566, 0.0589, 0.5267, 0.0641, 0.0611, 0.1051], + device='cuda:2'), in_proj_covar=tensor([0.0733, 0.0666, 0.0876, 0.0749, 0.0775, 0.0619, 0.0523, 0.0804], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 06:29:58,299 INFO [train.py:903] (2/4) Epoch 16, batch 700, loss[loss=0.2282, simple_loss=0.2924, pruned_loss=0.08201, over 19757.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2975, pruned_loss=0.07161, over 3721633.08 frames. ], batch size: 47, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:30:35,144 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103150.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:31:00,301 INFO [train.py:903] (2/4) Epoch 16, batch 750, loss[loss=0.2139, simple_loss=0.2744, pruned_loss=0.07663, over 19761.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.298, pruned_loss=0.0722, over 3758341.93 frames. ], batch size: 47, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:31:33,684 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.236e+02 6.262e+02 8.063e+02 1.865e+03, threshold=1.252e+03, percent-clipped=5.0 +2023-04-02 06:31:55,091 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:32:03,181 INFO [train.py:903] (2/4) Epoch 16, batch 800, loss[loss=0.2275, simple_loss=0.3043, pruned_loss=0.07538, over 19788.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2983, pruned_loss=0.07229, over 3778708.40 frames. ], batch size: 56, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:32:18,133 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 06:32:36,481 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4466, 2.2355, 2.1005, 2.7715, 2.2866, 2.0863, 2.3870, 2.5724], + device='cuda:2'), covar=tensor([0.0904, 0.1625, 0.1387, 0.0860, 0.1313, 0.0525, 0.1099, 0.0618], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0352, 0.0296, 0.0244, 0.0298, 0.0245, 0.0292, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:32:38,482 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103249.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:33:04,747 INFO [train.py:903] (2/4) Epoch 16, batch 850, loss[loss=0.2525, simple_loss=0.3198, pruned_loss=0.09258, over 13539.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2974, pruned_loss=0.0716, over 3788435.50 frames. ], batch size: 136, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:33:38,413 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.865e+02 6.263e+02 7.829e+02 1.710e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 06:33:57,932 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 06:34:06,716 INFO [train.py:903] (2/4) Epoch 16, batch 900, loss[loss=0.2213, simple_loss=0.3053, pruned_loss=0.06868, over 19524.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2974, pruned_loss=0.07143, over 3792416.37 frames. ], batch size: 56, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:34:17,418 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:34:52,813 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.2122, 2.2397, 2.2602, 2.0110, 4.6552, 1.3277, 2.7745, 5.1287], + device='cuda:2'), covar=tensor([0.0338, 0.2231, 0.2290, 0.1745, 0.0685, 0.2434, 0.1174, 0.0131], + device='cuda:2'), in_proj_covar=tensor([0.0385, 0.0351, 0.0370, 0.0334, 0.0361, 0.0340, 0.0349, 0.0376], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:35:01,526 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:35:08,171 INFO [train.py:903] (2/4) Epoch 16, batch 950, loss[loss=0.2114, simple_loss=0.2961, pruned_loss=0.06339, over 19680.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2971, pruned_loss=0.07096, over 3800934.98 frames. ], batch size: 58, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:35:13,559 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 06:35:27,478 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103384.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:35:34,310 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9041, 1.5109, 1.6103, 1.7099, 3.3544, 1.2818, 2.4839, 3.9041], + device='cuda:2'), covar=tensor([0.0458, 0.2662, 0.2629, 0.1817, 0.0784, 0.2309, 0.1154, 0.0217], + device='cuda:2'), in_proj_covar=tensor([0.0385, 0.0352, 0.0370, 0.0334, 0.0361, 0.0339, 0.0350, 0.0376], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:35:40,826 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.367e+02 6.234e+02 7.823e+02 2.113e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-02 06:36:12,138 INFO [train.py:903] (2/4) Epoch 16, batch 1000, loss[loss=0.315, simple_loss=0.3642, pruned_loss=0.1329, over 14087.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2976, pruned_loss=0.07159, over 3791883.02 frames. ], batch size: 136, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:03,129 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103460.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:07,756 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 06:37:14,593 INFO [train.py:903] (2/4) Epoch 16, batch 1050, loss[loss=0.1932, simple_loss=0.2766, pruned_loss=0.05494, over 19844.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2967, pruned_loss=0.07117, over 3800052.11 frames. ], batch size: 52, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:43,711 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:45,894 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.497e+02 6.450e+02 8.583e+02 2.663e+03, threshold=1.290e+03, percent-clipped=6.0 +2023-04-02 06:37:49,197 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 06:38:15,969 INFO [train.py:903] (2/4) Epoch 16, batch 1100, loss[loss=0.2456, simple_loss=0.3204, pruned_loss=0.08536, over 19288.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2969, pruned_loss=0.07161, over 3805107.54 frames. ], batch size: 66, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:38:54,823 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3020, 2.1321, 2.0425, 2.5458, 2.3133, 2.0710, 2.1968, 2.3578], + device='cuda:2'), covar=tensor([0.0949, 0.1606, 0.1297, 0.0881, 0.1241, 0.0505, 0.1128, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0355, 0.0298, 0.0247, 0.0301, 0.0247, 0.0294, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:39:18,052 INFO [train.py:903] (2/4) Epoch 16, batch 1150, loss[loss=0.2257, simple_loss=0.2959, pruned_loss=0.07777, over 19655.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2974, pruned_loss=0.07177, over 3790778.33 frames. ], batch size: 53, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:39:25,964 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103575.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:37,331 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:50,619 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.106e+02 6.190e+02 8.719e+02 1.567e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 06:40:06,763 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:06,805 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:21,175 INFO [train.py:903] (2/4) Epoch 16, batch 1200, loss[loss=0.2118, simple_loss=0.2913, pruned_loss=0.06611, over 17257.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2975, pruned_loss=0.07165, over 3797116.38 frames. ], batch size: 101, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:40:21,633 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:52,887 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:54,976 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 06:41:24,585 INFO [train.py:903] (2/4) Epoch 16, batch 1250, loss[loss=0.2238, simple_loss=0.302, pruned_loss=0.07273, over 19309.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2964, pruned_loss=0.07118, over 3804879.29 frames. ], batch size: 66, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:41:56,590 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.406e+02 4.955e+02 6.144e+02 7.729e+02 1.641e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 06:42:00,277 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8046, 1.9559, 2.0713, 1.7538, 4.3084, 1.2044, 2.5096, 4.8310], + device='cuda:2'), covar=tensor([0.0389, 0.2385, 0.2300, 0.1829, 0.0731, 0.2412, 0.1299, 0.0142], + device='cuda:2'), in_proj_covar=tensor([0.0383, 0.0351, 0.0368, 0.0333, 0.0359, 0.0339, 0.0348, 0.0375], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:42:24,682 INFO [train.py:903] (2/4) Epoch 16, batch 1300, loss[loss=0.2003, simple_loss=0.2739, pruned_loss=0.06337, over 19408.00 frames. ], tot_loss[loss=0.22, simple_loss=0.297, pruned_loss=0.0715, over 3815105.23 frames. ], batch size: 48, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:42:34,232 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0652, 3.5914, 1.9998, 2.1685, 3.1922, 1.7706, 1.5713, 2.1677], + device='cuda:2'), covar=tensor([0.1241, 0.0484, 0.1022, 0.0774, 0.0499, 0.1096, 0.0920, 0.0636], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0309, 0.0329, 0.0255, 0.0243, 0.0329, 0.0296, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:42:35,157 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103728.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:43:12,195 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:26,062 INFO [train.py:903] (2/4) Epoch 16, batch 1350, loss[loss=0.2368, simple_loss=0.3136, pruned_loss=0.08005, over 19775.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2982, pruned_loss=0.07273, over 3801022.26 frames. ], batch size: 56, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:43:43,840 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:59,438 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 5.214e+02 6.633e+02 9.392e+02 2.118e+03, threshold=1.327e+03, percent-clipped=8.0 +2023-04-02 06:44:30,118 INFO [train.py:903] (2/4) Epoch 16, batch 1400, loss[loss=0.2125, simple_loss=0.2865, pruned_loss=0.06923, over 19601.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2972, pruned_loss=0.07224, over 3816625.66 frames. ], batch size: 52, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:44:42,081 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-02 06:44:43,975 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103831.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:44:57,957 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103843.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:45:14,484 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103856.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:26,690 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:32,154 INFO [train.py:903] (2/4) Epoch 16, batch 1450, loss[loss=0.2277, simple_loss=0.3059, pruned_loss=0.07476, over 19765.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2985, pruned_loss=0.07307, over 3807191.91 frames. ], batch size: 54, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:45:32,187 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 06:45:56,427 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:01,021 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103894.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:03,877 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.072e+02 4.634e+02 5.962e+02 7.073e+02 1.523e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-02 06:46:33,191 INFO [train.py:903] (2/4) Epoch 16, batch 1500, loss[loss=0.2783, simple_loss=0.3456, pruned_loss=0.1055, over 18054.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2978, pruned_loss=0.07218, over 3826286.87 frames. ], batch size: 83, lr: 5.23e-03, grad_scale: 16.0 +2023-04-02 06:47:35,228 INFO [train.py:903] (2/4) Epoch 16, batch 1550, loss[loss=0.2302, simple_loss=0.3124, pruned_loss=0.07396, over 19673.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.297, pruned_loss=0.07238, over 3828739.81 frames. ], batch size: 58, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:48:05,232 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8393, 1.3064, 1.0362, 0.9294, 1.1695, 0.9691, 0.8806, 1.2370], + device='cuda:2'), covar=tensor([0.0621, 0.0832, 0.1072, 0.0704, 0.0514, 0.1198, 0.0606, 0.0430], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0310, 0.0331, 0.0255, 0.0244, 0.0333, 0.0297, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:48:09,262 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.500e+02 6.727e+02 8.636e+02 1.580e+03, threshold=1.345e+03, percent-clipped=8.0 +2023-04-02 06:48:24,304 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:32,648 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:39,347 INFO [train.py:903] (2/4) Epoch 16, batch 1600, loss[loss=0.2344, simple_loss=0.3068, pruned_loss=0.08106, over 18346.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2985, pruned_loss=0.0727, over 3814234.59 frames. ], batch size: 83, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:49:05,532 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 06:49:38,452 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:49:41,613 INFO [train.py:903] (2/4) Epoch 16, batch 1650, loss[loss=0.1864, simple_loss=0.2654, pruned_loss=0.0537, over 19476.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.297, pruned_loss=0.07196, over 3821698.13 frames. ], batch size: 49, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:14,817 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.025e+02 5.150e+02 6.179e+02 7.587e+02 1.568e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 06:50:18,605 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104099.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:50:20,720 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8520, 4.9277, 5.6298, 5.5925, 2.1224, 5.2485, 4.4966, 5.2617], + device='cuda:2'), covar=tensor([0.1428, 0.0936, 0.0513, 0.0517, 0.5490, 0.0595, 0.0558, 0.1088], + device='cuda:2'), in_proj_covar=tensor([0.0737, 0.0673, 0.0876, 0.0751, 0.0779, 0.0623, 0.0524, 0.0805], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 06:50:22,746 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:50:43,586 INFO [train.py:903] (2/4) Epoch 16, batch 1700, loss[loss=0.2535, simple_loss=0.3234, pruned_loss=0.09181, over 19536.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2965, pruned_loss=0.07201, over 3816705.88 frames. ], batch size: 54, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:48,597 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104124.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:50:51,813 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104127.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:51:25,789 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 06:51:44,974 INFO [train.py:903] (2/4) Epoch 16, batch 1750, loss[loss=0.1832, simple_loss=0.2565, pruned_loss=0.05494, over 19746.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2963, pruned_loss=0.07231, over 3799667.74 frames. ], batch size: 45, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:52:19,020 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.884e+02 5.867e+02 6.930e+02 2.034e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 06:52:43,901 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 06:52:45,577 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:52:48,730 INFO [train.py:903] (2/4) Epoch 16, batch 1800, loss[loss=0.1894, simple_loss=0.2772, pruned_loss=0.05079, over 19776.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2979, pruned_loss=0.07298, over 3770166.08 frames. ], batch size: 54, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:53:09,935 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:14,847 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:46,271 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 06:53:49,826 INFO [train.py:903] (2/4) Epoch 16, batch 1850, loss[loss=0.2026, simple_loss=0.2882, pruned_loss=0.05852, over 19396.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2967, pruned_loss=0.0721, over 3784749.38 frames. ], batch size: 70, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:53:56,750 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7595, 4.3671, 2.7268, 3.8004, 0.9659, 4.2187, 4.1509, 4.2594], + device='cuda:2'), covar=tensor([0.0548, 0.0890, 0.1898, 0.0775, 0.3947, 0.0663, 0.0787, 0.0994], + device='cuda:2'), in_proj_covar=tensor([0.0469, 0.0383, 0.0461, 0.0331, 0.0391, 0.0397, 0.0393, 0.0429], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 06:54:21,805 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 06:54:22,921 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.248e+02 6.749e+02 7.716e+02 1.558e+03, threshold=1.350e+03, percent-clipped=5.0 +2023-04-02 06:54:51,925 INFO [train.py:903] (2/4) Epoch 16, batch 1900, loss[loss=0.2402, simple_loss=0.3154, pruned_loss=0.08246, over 19321.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2969, pruned_loss=0.07203, over 3791327.20 frames. ], batch size: 70, lr: 5.22e-03, grad_scale: 4.0 +2023-04-02 06:55:09,087 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 06:55:15,659 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 06:55:31,879 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:34,088 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:39,886 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 06:55:41,202 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:53,980 INFO [train.py:903] (2/4) Epoch 16, batch 1950, loss[loss=0.2307, simple_loss=0.3122, pruned_loss=0.07463, over 19012.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2972, pruned_loss=0.07162, over 3807097.19 frames. ], batch size: 69, lr: 5.21e-03, grad_scale: 4.0 +2023-04-02 06:56:30,731 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.688e+02 6.377e+02 8.120e+02 1.703e+03, threshold=1.275e+03, percent-clipped=4.0 +2023-04-02 06:56:38,151 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.44 vs. limit=5.0 +2023-04-02 06:56:46,831 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104411.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:56:58,261 INFO [train.py:903] (2/4) Epoch 16, batch 2000, loss[loss=0.2001, simple_loss=0.2719, pruned_loss=0.06413, over 19608.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2958, pruned_loss=0.0708, over 3811120.50 frames. ], batch size: 50, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:57:56,617 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1892, 2.2407, 2.4142, 3.0827, 2.2338, 3.0031, 2.6277, 2.1224], + device='cuda:2'), covar=tensor([0.3802, 0.3707, 0.1674, 0.2213, 0.3987, 0.1763, 0.3883, 0.3128], + device='cuda:2'), in_proj_covar=tensor([0.0840, 0.0889, 0.0680, 0.0907, 0.0823, 0.0759, 0.0812, 0.0743], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 06:57:57,342 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 06:57:57,650 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:00,766 INFO [train.py:903] (2/4) Epoch 16, batch 2050, loss[loss=0.1744, simple_loss=0.2494, pruned_loss=0.04969, over 19759.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2969, pruned_loss=0.07121, over 3798240.36 frames. ], batch size: 46, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:58:04,668 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:05,671 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:09,735 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7764, 3.2505, 3.3011, 3.3018, 1.3407, 3.1859, 2.7329, 3.0748], + device='cuda:2'), covar=tensor([0.1648, 0.1008, 0.0849, 0.0897, 0.5229, 0.0964, 0.0802, 0.1337], + device='cuda:2'), in_proj_covar=tensor([0.0733, 0.0675, 0.0876, 0.0753, 0.0780, 0.0623, 0.0523, 0.0807], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 06:58:14,144 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 06:58:15,344 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 06:58:35,725 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.885e+02 6.080e+02 8.555e+02 1.693e+03, threshold=1.216e+03, percent-clipped=6.0 +2023-04-02 06:58:36,151 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:36,181 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:40,140 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 06:59:02,740 INFO [train.py:903] (2/4) Epoch 16, batch 2100, loss[loss=0.2153, simple_loss=0.2883, pruned_loss=0.07116, over 19403.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.297, pruned_loss=0.07141, over 3796454.07 frames. ], batch size: 48, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:59:06,393 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:09,979 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:33,819 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 06:59:55,540 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 07:00:04,689 INFO [train.py:903] (2/4) Epoch 16, batch 2150, loss[loss=0.2379, simple_loss=0.308, pruned_loss=0.08392, over 19651.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2985, pruned_loss=0.07228, over 3806430.25 frames. ], batch size: 53, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:00:39,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.547e+02 6.907e+02 8.298e+02 2.194e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-02 07:00:53,828 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:01:08,294 INFO [train.py:903] (2/4) Epoch 16, batch 2200, loss[loss=0.2802, simple_loss=0.3375, pruned_loss=0.1115, over 12956.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2981, pruned_loss=0.07242, over 3817109.97 frames. ], batch size: 135, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:01:26,362 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:02:09,406 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0239, 3.3747, 1.8600, 1.9413, 2.8691, 1.4948, 1.3958, 2.2549], + device='cuda:2'), covar=tensor([0.1348, 0.0638, 0.1187, 0.0855, 0.0666, 0.1364, 0.1002, 0.0703], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0309, 0.0328, 0.0255, 0.0244, 0.0332, 0.0292, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:02:12,259 INFO [train.py:903] (2/4) Epoch 16, batch 2250, loss[loss=0.1707, simple_loss=0.2462, pruned_loss=0.04762, over 19711.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2978, pruned_loss=0.07223, over 3815782.96 frames. ], batch size: 46, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:02:46,757 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.921e+02 5.898e+02 6.952e+02 1.646e+03, threshold=1.180e+03, percent-clipped=1.0 +2023-04-02 07:03:15,187 INFO [train.py:903] (2/4) Epoch 16, batch 2300, loss[loss=0.2099, simple_loss=0.2966, pruned_loss=0.06165, over 19550.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2978, pruned_loss=0.07205, over 3806299.08 frames. ], batch size: 54, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:03:19,212 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:27,434 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:29,353 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 07:03:51,567 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104748.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:59,129 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-02 07:03:59,881 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:11,302 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5645, 2.2847, 2.1712, 2.7093, 2.3158, 2.3450, 2.1239, 2.5928], + device='cuda:2'), covar=tensor([0.0889, 0.1622, 0.1274, 0.0965, 0.1404, 0.0462, 0.1199, 0.0592], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0350, 0.0297, 0.0246, 0.0298, 0.0247, 0.0292, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:04:17,945 INFO [train.py:903] (2/4) Epoch 16, batch 2350, loss[loss=0.2426, simple_loss=0.3253, pruned_loss=0.07992, over 19612.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2977, pruned_loss=0.07169, over 3815526.43 frames. ], batch size: 57, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:04:34,949 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:53,850 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.048e+02 6.738e+02 8.844e+02 1.580e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 07:05:00,920 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 07:05:05,729 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:05:18,372 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 07:05:22,501 INFO [train.py:903] (2/4) Epoch 16, batch 2400, loss[loss=0.2205, simple_loss=0.3068, pruned_loss=0.06709, over 19572.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2973, pruned_loss=0.07125, over 3818065.39 frames. ], batch size: 61, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:06:24,572 INFO [train.py:903] (2/4) Epoch 16, batch 2450, loss[loss=0.2364, simple_loss=0.31, pruned_loss=0.08143, over 19402.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2984, pruned_loss=0.07226, over 3814631.17 frames. ], batch size: 70, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:06:54,732 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-02 07:07:00,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.877e+02 7.599e+02 9.302e+02 2.010e+03, threshold=1.520e+03, percent-clipped=8.0 +2023-04-02 07:07:27,189 INFO [train.py:903] (2/4) Epoch 16, batch 2500, loss[loss=0.2148, simple_loss=0.295, pruned_loss=0.06725, over 19782.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2971, pruned_loss=0.07137, over 3826666.62 frames. ], batch size: 56, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:08:29,692 INFO [train.py:903] (2/4) Epoch 16, batch 2550, loss[loss=0.2189, simple_loss=0.3008, pruned_loss=0.06848, over 19780.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2976, pruned_loss=0.07163, over 3829188.53 frames. ], batch size: 54, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:09:05,174 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 5.239e+02 6.384e+02 8.143e+02 1.987e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 07:09:24,647 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 07:09:32,157 INFO [train.py:903] (2/4) Epoch 16, batch 2600, loss[loss=0.2424, simple_loss=0.3234, pruned_loss=0.0807, over 19530.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.297, pruned_loss=0.07164, over 3828098.94 frames. ], batch size: 54, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:09:48,358 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0075, 1.9707, 1.8019, 1.5573, 1.5396, 1.6654, 0.3349, 0.8196], + device='cuda:2'), covar=tensor([0.0476, 0.0484, 0.0331, 0.0601, 0.1041, 0.0627, 0.1107, 0.0948], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0346, 0.0343, 0.0373, 0.0447, 0.0374, 0.0326, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 07:10:35,305 INFO [train.py:903] (2/4) Epoch 16, batch 2650, loss[loss=0.2131, simple_loss=0.2785, pruned_loss=0.0739, over 19741.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2966, pruned_loss=0.0717, over 3823926.92 frames. ], batch size: 45, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:54,822 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 07:11:09,890 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 5.317e+02 6.316e+02 8.001e+02 1.365e+03, threshold=1.263e+03, percent-clipped=2.0 +2023-04-02 07:11:36,915 INFO [train.py:903] (2/4) Epoch 16, batch 2700, loss[loss=0.1838, simple_loss=0.2574, pruned_loss=0.05512, over 19727.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2961, pruned_loss=0.07128, over 3829669.06 frames. ], batch size: 45, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:11:38,438 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1063, 1.2719, 1.4928, 1.3301, 2.6757, 0.9854, 2.0655, 2.9408], + device='cuda:2'), covar=tensor([0.0527, 0.2742, 0.2730, 0.1796, 0.0770, 0.2377, 0.1187, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0384, 0.0350, 0.0369, 0.0334, 0.0360, 0.0340, 0.0355, 0.0376], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:12:39,773 INFO [train.py:903] (2/4) Epoch 16, batch 2750, loss[loss=0.1949, simple_loss=0.2631, pruned_loss=0.06332, over 19739.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2959, pruned_loss=0.071, over 3828733.93 frames. ], batch size: 45, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:13:15,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 5.170e+02 6.445e+02 8.543e+02 2.677e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 07:13:41,800 INFO [train.py:903] (2/4) Epoch 16, batch 2800, loss[loss=0.2263, simple_loss=0.3061, pruned_loss=0.07327, over 18766.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2961, pruned_loss=0.07102, over 3819520.85 frames. ], batch size: 74, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:14:13,473 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 07:14:19,801 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.17 vs. limit=5.0 +2023-04-02 07:14:23,295 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4880, 1.6887, 2.0885, 1.7673, 3.3303, 2.9211, 3.7035, 1.7633], + device='cuda:2'), covar=tensor([0.2357, 0.4022, 0.2439, 0.1772, 0.1359, 0.1698, 0.1291, 0.3638], + device='cuda:2'), in_proj_covar=tensor([0.0507, 0.0603, 0.0658, 0.0458, 0.0603, 0.0510, 0.0649, 0.0518], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 07:14:44,317 INFO [train.py:903] (2/4) Epoch 16, batch 2850, loss[loss=0.22, simple_loss=0.2933, pruned_loss=0.07336, over 19599.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2958, pruned_loss=0.07061, over 3827136.87 frames. ], batch size: 52, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:18,937 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.908e+02 5.066e+02 6.361e+02 8.222e+02 2.548e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 07:15:28,059 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2875, 1.4746, 1.9020, 1.6133, 2.5134, 2.0272, 2.6318, 1.1738], + device='cuda:2'), covar=tensor([0.2510, 0.3999, 0.2434, 0.1934, 0.1651, 0.2295, 0.1661, 0.4326], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0606, 0.0661, 0.0460, 0.0607, 0.0513, 0.0653, 0.0521], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 07:15:46,241 INFO [train.py:903] (2/4) Epoch 16, batch 2900, loss[loss=0.2284, simple_loss=0.3111, pruned_loss=0.07287, over 19296.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2966, pruned_loss=0.07131, over 3813943.87 frames. ], batch size: 66, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:46,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 07:15:47,805 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2303, 1.2259, 1.3374, 1.3444, 1.7466, 1.7399, 1.7595, 0.6022], + device='cuda:2'), covar=tensor([0.2325, 0.4028, 0.2545, 0.1858, 0.1477, 0.2206, 0.1278, 0.4265], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0607, 0.0662, 0.0461, 0.0607, 0.0514, 0.0653, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 07:16:48,805 INFO [train.py:903] (2/4) Epoch 16, batch 2950, loss[loss=0.2976, simple_loss=0.3595, pruned_loss=0.1178, over 18462.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2967, pruned_loss=0.07148, over 3794158.73 frames. ], batch size: 83, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:06,096 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 07:17:23,865 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.947e+02 6.195e+02 7.724e+02 2.015e+03, threshold=1.239e+03, percent-clipped=3.0 +2023-04-02 07:17:30,296 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5530, 1.6228, 1.8156, 1.9036, 1.4320, 1.8055, 1.8954, 1.7331], + device='cuda:2'), covar=tensor([0.3637, 0.3172, 0.1683, 0.1945, 0.3375, 0.1809, 0.4243, 0.2909], + device='cuda:2'), in_proj_covar=tensor([0.0847, 0.0894, 0.0685, 0.0911, 0.0829, 0.0765, 0.0818, 0.0748], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 07:17:33,642 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:17:50,872 INFO [train.py:903] (2/4) Epoch 16, batch 3000, loss[loss=0.2117, simple_loss=0.2825, pruned_loss=0.07049, over 19467.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2971, pruned_loss=0.07175, over 3790701.81 frames. ], batch size: 49, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:50,873 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 07:18:04,143 INFO [train.py:937] (2/4) Epoch 16, validation: loss=0.1725, simple_loss=0.273, pruned_loss=0.03604, over 944034.00 frames. +2023-04-02 07:18:04,144 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 07:18:07,803 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 07:18:57,207 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0684, 2.1321, 2.2885, 2.6521, 2.1159, 2.5528, 2.4032, 2.1867], + device='cuda:2'), covar=tensor([0.3156, 0.2467, 0.1326, 0.1634, 0.2739, 0.1387, 0.2931, 0.2253], + device='cuda:2'), in_proj_covar=tensor([0.0846, 0.0893, 0.0684, 0.0910, 0.0828, 0.0764, 0.0818, 0.0748], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 07:19:07,110 INFO [train.py:903] (2/4) Epoch 16, batch 3050, loss[loss=0.2337, simple_loss=0.3194, pruned_loss=0.07402, over 19543.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2969, pruned_loss=0.07153, over 3798528.82 frames. ], batch size: 64, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:19:41,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.577e+02 4.913e+02 6.190e+02 8.953e+02 2.496e+03, threshold=1.238e+03, percent-clipped=7.0 +2023-04-02 07:19:49,980 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:11,581 INFO [train.py:903] (2/4) Epoch 16, batch 3100, loss[loss=0.2183, simple_loss=0.2835, pruned_loss=0.07656, over 19733.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07129, over 3809089.94 frames. ], batch size: 45, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:20:11,880 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105520.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:55,553 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3581, 1.4182, 1.7562, 1.5771, 2.3654, 2.0144, 2.3682, 0.9555], + device='cuda:2'), covar=tensor([0.2548, 0.4417, 0.2594, 0.2042, 0.1627, 0.2410, 0.1747, 0.4552], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0605, 0.0659, 0.0460, 0.0606, 0.0509, 0.0651, 0.0519], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 07:21:13,221 INFO [train.py:903] (2/4) Epoch 16, batch 3150, loss[loss=0.1905, simple_loss=0.2669, pruned_loss=0.05702, over 19775.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2981, pruned_loss=0.072, over 3805359.16 frames. ], batch size: 47, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:21:41,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 07:21:45,890 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2087, 1.1443, 1.1539, 1.3091, 0.9899, 1.3092, 1.3373, 1.2561], + device='cuda:2'), covar=tensor([0.0882, 0.1003, 0.1094, 0.0707, 0.0940, 0.0840, 0.0898, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0242, 0.0226, 0.0207, 0.0190, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 07:21:46,677 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.014e+02 6.069e+02 7.545e+02 2.509e+03, threshold=1.214e+03, percent-clipped=2.0 +2023-04-02 07:22:12,921 INFO [train.py:903] (2/4) Epoch 16, batch 3200, loss[loss=0.18, simple_loss=0.2531, pruned_loss=0.05343, over 19738.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2976, pruned_loss=0.07164, over 3816419.38 frames. ], batch size: 45, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:15,470 INFO [train.py:903] (2/4) Epoch 16, batch 3250, loss[loss=0.2247, simple_loss=0.3057, pruned_loss=0.07188, over 17620.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2963, pruned_loss=0.07078, over 3825417.63 frames. ], batch size: 101, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:50,144 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 5.141e+02 5.939e+02 7.859e+02 1.653e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-02 07:24:19,069 INFO [train.py:903] (2/4) Epoch 16, batch 3300, loss[loss=0.1729, simple_loss=0.2494, pruned_loss=0.04817, over 19765.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07113, over 3832431.44 frames. ], batch size: 45, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:24:22,592 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 07:24:55,820 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:25:18,097 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8409, 3.9890, 4.3838, 4.3979, 2.6482, 4.1024, 3.7505, 4.1367], + device='cuda:2'), covar=tensor([0.1330, 0.2835, 0.0593, 0.0625, 0.4235, 0.1064, 0.0542, 0.1017], + device='cuda:2'), in_proj_covar=tensor([0.0736, 0.0683, 0.0890, 0.0770, 0.0788, 0.0632, 0.0528, 0.0820], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 07:25:21,450 INFO [train.py:903] (2/4) Epoch 16, batch 3350, loss[loss=0.1958, simple_loss=0.2816, pruned_loss=0.05504, over 19781.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2958, pruned_loss=0.07052, over 3832142.66 frames. ], batch size: 56, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:25:57,717 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 5.066e+02 6.242e+02 8.095e+02 2.652e+03, threshold=1.248e+03, percent-clipped=5.0 +2023-04-02 07:26:12,286 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:20,553 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105817.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:23,650 INFO [train.py:903] (2/4) Epoch 16, batch 3400, loss[loss=0.2096, simple_loss=0.2844, pruned_loss=0.06746, over 19737.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2948, pruned_loss=0.06996, over 3831024.62 frames. ], batch size: 51, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:27:00,020 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:09,291 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2920, 1.3760, 1.5876, 1.4797, 2.1873, 2.0290, 2.2269, 0.8874], + device='cuda:2'), covar=tensor([0.2335, 0.4045, 0.2546, 0.1916, 0.1505, 0.2080, 0.1437, 0.4184], + device='cuda:2'), in_proj_covar=tensor([0.0509, 0.0607, 0.0661, 0.0462, 0.0606, 0.0511, 0.0652, 0.0519], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 07:27:18,404 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:19,651 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:26,111 INFO [train.py:903] (2/4) Epoch 16, batch 3450, loss[loss=0.2596, simple_loss=0.3254, pruned_loss=0.09689, over 19373.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2953, pruned_loss=0.07037, over 3821055.97 frames. ], batch size: 66, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:27:29,398 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 07:27:54,655 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-02 07:28:00,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.355e+02 6.157e+02 7.690e+02 1.854e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 07:28:29,088 INFO [train.py:903] (2/4) Epoch 16, batch 3500, loss[loss=0.1924, simple_loss=0.2725, pruned_loss=0.0562, over 18636.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.295, pruned_loss=0.07001, over 3825835.86 frames. ], batch size: 41, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:29:23,565 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:29:31,026 INFO [train.py:903] (2/4) Epoch 16, batch 3550, loss[loss=0.2055, simple_loss=0.2874, pruned_loss=0.06182, over 19775.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2948, pruned_loss=0.07014, over 3833389.23 frames. ], batch size: 56, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:29:42,161 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:30:06,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 5.018e+02 6.219e+02 7.272e+02 1.453e+03, threshold=1.244e+03, percent-clipped=3.0 +2023-04-02 07:30:20,134 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-02 07:30:34,290 INFO [train.py:903] (2/4) Epoch 16, batch 3600, loss[loss=0.2179, simple_loss=0.3006, pruned_loss=0.06762, over 18215.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2953, pruned_loss=0.07041, over 3819059.19 frames. ], batch size: 83, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:31:09,962 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6711, 1.7115, 1.5432, 1.2595, 1.1211, 1.2833, 0.3063, 0.5907], + device='cuda:2'), covar=tensor([0.0756, 0.0682, 0.0405, 0.0670, 0.1557, 0.0879, 0.1179, 0.1180], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0344, 0.0341, 0.0369, 0.0445, 0.0372, 0.0322, 0.0331], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 07:31:37,388 INFO [train.py:903] (2/4) Epoch 16, batch 3650, loss[loss=0.2547, simple_loss=0.3257, pruned_loss=0.09184, over 19554.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2961, pruned_loss=0.07065, over 3820664.90 frames. ], batch size: 61, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:09,772 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:32:13,044 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.371e+02 6.801e+02 8.277e+02 1.518e+03, threshold=1.360e+03, percent-clipped=5.0 +2023-04-02 07:32:42,528 INFO [train.py:903] (2/4) Epoch 16, batch 3700, loss[loss=0.1963, simple_loss=0.2699, pruned_loss=0.06129, over 19501.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2957, pruned_loss=0.07045, over 3821754.75 frames. ], batch size: 49, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:44,099 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:13,586 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:23,620 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:34,051 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:44,489 INFO [train.py:903] (2/4) Epoch 16, batch 3750, loss[loss=0.2201, simple_loss=0.3053, pruned_loss=0.06747, over 19583.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2959, pruned_loss=0.07038, over 3828126.50 frames. ], batch size: 52, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:34:19,091 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.705e+02 5.517e+02 6.973e+02 1.532e+03, threshold=1.103e+03, percent-clipped=3.0 +2023-04-02 07:34:45,438 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:34:46,186 INFO [train.py:903] (2/4) Epoch 16, batch 3800, loss[loss=0.2421, simple_loss=0.3152, pruned_loss=0.08455, over 19664.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2952, pruned_loss=0.07054, over 3815285.84 frames. ], batch size: 55, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:06,384 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106235.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:18,209 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 07:35:18,551 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106244.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:36,757 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106260.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,030 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106269.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,817 INFO [train.py:903] (2/4) Epoch 16, batch 3850, loss[loss=0.23, simple_loss=0.3106, pruned_loss=0.07474, over 19584.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2952, pruned_loss=0.07053, over 3820049.60 frames. ], batch size: 61, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:56,822 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:09,625 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:23,153 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.267e+02 6.364e+02 7.734e+02 1.610e+03, threshold=1.273e+03, percent-clipped=5.0 +2023-04-02 07:36:50,364 INFO [train.py:903] (2/4) Epoch 16, batch 3900, loss[loss=0.2071, simple_loss=0.2703, pruned_loss=0.072, over 19759.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2945, pruned_loss=0.07046, over 3830862.85 frames. ], batch size: 48, lr: 5.17e-03, grad_scale: 16.0 +2023-04-02 07:37:32,166 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106354.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:37:53,357 INFO [train.py:903] (2/4) Epoch 16, batch 3950, loss[loss=0.2568, simple_loss=0.329, pruned_loss=0.09229, over 19258.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2949, pruned_loss=0.07065, over 3834058.35 frames. ], batch size: 66, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:37:58,039 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 07:38:28,356 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.931e+02 6.019e+02 7.839e+02 1.392e+03, threshold=1.204e+03, percent-clipped=3.0 +2023-04-02 07:38:54,928 INFO [train.py:903] (2/4) Epoch 16, batch 4000, loss[loss=0.2218, simple_loss=0.2986, pruned_loss=0.07247, over 19475.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2945, pruned_loss=0.07021, over 3834488.96 frames. ], batch size: 49, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:19,383 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:39:19,577 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9654, 1.7784, 2.0025, 1.5948, 4.4883, 0.9462, 2.5846, 4.9684], + device='cuda:2'), covar=tensor([0.0388, 0.2436, 0.2335, 0.1924, 0.0673, 0.2760, 0.1292, 0.0154], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0350, 0.0370, 0.0337, 0.0360, 0.0340, 0.0354, 0.0376], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:39:45,509 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 07:39:56,759 INFO [train.py:903] (2/4) Epoch 16, batch 4050, loss[loss=0.1789, simple_loss=0.2571, pruned_loss=0.05033, over 19403.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2944, pruned_loss=0.07005, over 3832512.35 frames. ], batch size: 47, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:59,415 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:40:34,186 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.509e+02 4.912e+02 6.033e+02 8.007e+02 1.551e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-02 07:40:59,912 INFO [train.py:903] (2/4) Epoch 16, batch 4100, loss[loss=0.166, simple_loss=0.2448, pruned_loss=0.04354, over 19372.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2943, pruned_loss=0.0698, over 3826075.06 frames. ], batch size: 47, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:41:07,069 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106525.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:15,872 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:34,837 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 07:41:36,502 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106550.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:41,941 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106554.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:45,547 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:42:02,440 INFO [train.py:903] (2/4) Epoch 16, batch 4150, loss[loss=0.2415, simple_loss=0.3202, pruned_loss=0.08139, over 19346.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2958, pruned_loss=0.07077, over 3820449.14 frames. ], batch size: 66, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:42:36,773 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.926e+02 6.073e+02 7.216e+02 1.422e+03, threshold=1.215e+03, percent-clipped=1.0 +2023-04-02 07:43:03,952 INFO [train.py:903] (2/4) Epoch 16, batch 4200, loss[loss=0.1803, simple_loss=0.2646, pruned_loss=0.04798, over 19576.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2952, pruned_loss=0.07023, over 3829009.57 frames. ], batch size: 52, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:43:11,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 07:43:15,915 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:05,135 INFO [train.py:903] (2/4) Epoch 16, batch 4250, loss[loss=0.2121, simple_loss=0.2826, pruned_loss=0.07085, over 19751.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2958, pruned_loss=0.07038, over 3828830.09 frames. ], batch size: 51, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:44:20,455 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 07:44:32,713 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 07:44:33,381 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 07:44:41,253 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:42,277 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.502e+02 4.922e+02 6.114e+02 7.451e+02 1.808e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 07:45:08,519 INFO [train.py:903] (2/4) Epoch 16, batch 4300, loss[loss=0.1874, simple_loss=0.2646, pruned_loss=0.0551, over 19728.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.297, pruned_loss=0.07095, over 3823403.62 frames. ], batch size: 48, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:45:39,033 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:45:58,079 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 07:46:11,647 INFO [train.py:903] (2/4) Epoch 16, batch 4350, loss[loss=0.2369, simple_loss=0.3147, pruned_loss=0.07957, over 19391.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.298, pruned_loss=0.07156, over 3824791.23 frames. ], batch size: 70, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:46:28,669 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3201, 3.0608, 2.3182, 2.8012, 1.0373, 2.9520, 2.8861, 2.9526], + device='cuda:2'), covar=tensor([0.1167, 0.1406, 0.1946, 0.0961, 0.3476, 0.0976, 0.1104, 0.1233], + device='cuda:2'), in_proj_covar=tensor([0.0468, 0.0385, 0.0463, 0.0328, 0.0390, 0.0397, 0.0395, 0.0427], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:46:30,462 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-02 07:46:46,924 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.102e+02 6.153e+02 8.101e+02 2.041e+03, threshold=1.231e+03, percent-clipped=8.0 +2023-04-02 07:47:03,100 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:05,390 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 07:47:06,284 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:09,658 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:14,208 INFO [train.py:903] (2/4) Epoch 16, batch 4400, loss[loss=0.2187, simple_loss=0.3076, pruned_loss=0.06487, over 19352.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.298, pruned_loss=0.0721, over 3811144.40 frames. ], batch size: 66, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:47:33,146 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106835.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:36,425 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 07:47:46,702 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 07:47:48,476 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7735, 1.8801, 2.1903, 2.4014, 1.7989, 2.3309, 2.2539, 1.9572], + device='cuda:2'), covar=tensor([0.3889, 0.3468, 0.1568, 0.2097, 0.3507, 0.1793, 0.4325, 0.3108], + device='cuda:2'), in_proj_covar=tensor([0.0839, 0.0887, 0.0678, 0.0899, 0.0820, 0.0757, 0.0808, 0.0742], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 07:47:57,651 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9608, 2.0564, 2.3077, 2.6457, 2.0631, 2.5981, 2.3915, 2.1233], + device='cuda:2'), covar=tensor([0.3591, 0.2992, 0.1435, 0.1939, 0.3148, 0.1563, 0.3817, 0.2688], + device='cuda:2'), in_proj_covar=tensor([0.0839, 0.0887, 0.0678, 0.0899, 0.0820, 0.0757, 0.0807, 0.0741], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 07:48:17,288 INFO [train.py:903] (2/4) Epoch 16, batch 4450, loss[loss=0.1989, simple_loss=0.2894, pruned_loss=0.05422, over 19792.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2971, pruned_loss=0.07159, over 3811056.30 frames. ], batch size: 54, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:48:53,920 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.966e+02 6.259e+02 8.420e+02 1.632e+03, threshold=1.252e+03, percent-clipped=6.0 +2023-04-02 07:49:06,855 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.91 vs. limit=5.0 +2023-04-02 07:49:08,219 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-02 07:49:18,996 INFO [train.py:903] (2/4) Epoch 16, batch 4500, loss[loss=0.2441, simple_loss=0.3118, pruned_loss=0.0882, over 14578.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2966, pruned_loss=0.07133, over 3809141.56 frames. ], batch size: 136, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:49:34,368 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106931.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:50:03,544 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.99 vs. limit=5.0 +2023-04-02 07:50:11,292 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9689, 0.8486, 1.3002, 1.3160, 2.2856, 1.0054, 2.1706, 2.7688], + device='cuda:2'), covar=tensor([0.0800, 0.4047, 0.3465, 0.2228, 0.1354, 0.2884, 0.1281, 0.0541], + device='cuda:2'), in_proj_covar=tensor([0.0388, 0.0352, 0.0374, 0.0336, 0.0360, 0.0342, 0.0357, 0.0378], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:50:23,575 INFO [train.py:903] (2/4) Epoch 16, batch 4550, loss[loss=0.1814, simple_loss=0.2562, pruned_loss=0.05323, over 19741.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07128, over 3823007.62 frames. ], batch size: 45, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:50:31,670 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 07:50:54,381 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 07:50:59,932 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 4.883e+02 5.860e+02 7.136e+02 1.225e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-02 07:51:02,621 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:51:27,833 INFO [train.py:903] (2/4) Epoch 16, batch 4600, loss[loss=0.2204, simple_loss=0.3016, pruned_loss=0.06957, over 19785.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2967, pruned_loss=0.07097, over 3836935.36 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:51:34,881 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:29,089 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:29,790 INFO [train.py:903] (2/4) Epoch 16, batch 4650, loss[loss=0.1924, simple_loss=0.2678, pruned_loss=0.05851, over 19801.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2973, pruned_loss=0.07149, over 3835687.44 frames. ], batch size: 49, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:52:47,211 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 07:52:59,800 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 07:53:01,367 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:53:07,461 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.220e+02 5.459e+02 6.581e+02 8.910e+02 1.601e+03, threshold=1.316e+03, percent-clipped=6.0 +2023-04-02 07:53:22,215 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2760, 2.0180, 1.5124, 1.3093, 1.8254, 1.1650, 1.1377, 1.7279], + device='cuda:2'), covar=tensor([0.0883, 0.0757, 0.1014, 0.0754, 0.0516, 0.1220, 0.0704, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0309, 0.0329, 0.0254, 0.0241, 0.0328, 0.0289, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 07:53:31,769 INFO [train.py:903] (2/4) Epoch 16, batch 4700, loss[loss=0.2578, simple_loss=0.3398, pruned_loss=0.08787, over 19444.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2964, pruned_loss=0.0708, over 3839648.69 frames. ], batch size: 64, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:53:55,910 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 07:54:20,576 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107158.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 07:54:36,809 INFO [train.py:903] (2/4) Epoch 16, batch 4750, loss[loss=0.1608, simple_loss=0.239, pruned_loss=0.04136, over 19322.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2956, pruned_loss=0.07032, over 3831517.52 frames. ], batch size: 44, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:54:37,084 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107170.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:54:56,385 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3538, 1.3537, 1.8210, 1.5228, 2.7144, 3.6870, 3.4736, 3.8662], + device='cuda:2'), covar=tensor([0.1555, 0.3557, 0.3071, 0.2096, 0.0616, 0.0211, 0.0190, 0.0236], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0306, 0.0335, 0.0255, 0.0229, 0.0174, 0.0210, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 07:54:58,938 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:11,909 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.545e+02 6.621e+02 8.650e+02 1.971e+03, threshold=1.324e+03, percent-clipped=6.0 +2023-04-02 07:55:30,505 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:40,218 INFO [train.py:903] (2/4) Epoch 16, batch 4800, loss[loss=0.1855, simple_loss=0.2632, pruned_loss=0.05388, over 19414.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2951, pruned_loss=0.07019, over 3833069.55 frames. ], batch size: 48, lr: 5.14e-03, grad_scale: 8.0 +2023-04-02 07:56:26,635 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1532, 1.7054, 1.7743, 2.6003, 2.1743, 2.5538, 2.3968, 2.1510], + device='cuda:2'), covar=tensor([0.0806, 0.1034, 0.1061, 0.0893, 0.0843, 0.0685, 0.0894, 0.0673], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0221, 0.0222, 0.0243, 0.0225, 0.0206, 0.0188, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 07:56:41,878 INFO [train.py:903] (2/4) Epoch 16, batch 4850, loss[loss=0.1963, simple_loss=0.268, pruned_loss=0.0623, over 19384.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2947, pruned_loss=0.06979, over 3821345.30 frames. ], batch size: 47, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:07,080 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 07:57:19,837 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.930e+02 6.295e+02 8.478e+02 1.665e+03, threshold=1.259e+03, percent-clipped=1.0 +2023-04-02 07:57:27,490 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 07:57:32,780 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 07:57:32,809 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 07:57:43,098 INFO [train.py:903] (2/4) Epoch 16, batch 4900, loss[loss=0.234, simple_loss=0.3064, pruned_loss=0.08079, over 19787.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2957, pruned_loss=0.07085, over 3820185.91 frames. ], batch size: 56, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:43,116 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 07:58:04,185 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 07:58:46,427 INFO [train.py:903] (2/4) Epoch 16, batch 4950, loss[loss=0.1749, simple_loss=0.2556, pruned_loss=0.04714, over 19847.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.296, pruned_loss=0.07067, over 3821917.86 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:04,244 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 07:59:22,734 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 5.680e+02 6.678e+02 8.404e+02 2.020e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 07:59:27,626 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 07:59:48,956 INFO [train.py:903] (2/4) Epoch 16, batch 5000, loss[loss=0.2329, simple_loss=0.3121, pruned_loss=0.0768, over 19659.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2959, pruned_loss=0.07045, over 3818575.81 frames. ], batch size: 55, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:58,935 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 08:00:09,009 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 08:00:50,457 INFO [train.py:903] (2/4) Epoch 16, batch 5050, loss[loss=0.1762, simple_loss=0.2491, pruned_loss=0.05169, over 19748.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2982, pruned_loss=0.07181, over 3791471.61 frames. ], batch size: 46, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:00:57,386 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-02 08:01:27,876 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.470e+02 6.454e+02 8.047e+02 2.188e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 08:01:27,927 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 08:01:30,406 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107502.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:01:44,959 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:01:51,917 INFO [train.py:903] (2/4) Epoch 16, batch 5100, loss[loss=0.1921, simple_loss=0.2785, pruned_loss=0.0529, over 19667.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2978, pruned_loss=0.07169, over 3800778.01 frames. ], batch size: 55, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:02:02,202 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:02:04,280 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 08:02:08,825 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 08:02:13,294 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 08:02:54,727 INFO [train.py:903] (2/4) Epoch 16, batch 5150, loss[loss=0.2067, simple_loss=0.2821, pruned_loss=0.06568, over 19691.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2979, pruned_loss=0.0717, over 3802052.96 frames. ], batch size: 53, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:03:08,996 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:03:31,799 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.461e+02 4.950e+02 6.087e+02 7.766e+02 1.818e+03, threshold=1.217e+03, percent-clipped=6.0 +2023-04-02 08:03:43,255 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:03:54,401 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107617.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:03:57,177 INFO [train.py:903] (2/4) Epoch 16, batch 5200, loss[loss=0.2254, simple_loss=0.3002, pruned_loss=0.07534, over 18715.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3, pruned_loss=0.07326, over 3795075.16 frames. ], batch size: 74, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:04:08,689 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:04:09,541 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 08:04:55,314 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 08:04:59,588 INFO [train.py:903] (2/4) Epoch 16, batch 5250, loss[loss=0.2075, simple_loss=0.2861, pruned_loss=0.06447, over 19527.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2982, pruned_loss=0.07201, over 3804403.68 frames. ], batch size: 56, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:05:07,732 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:05:07,840 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8820, 1.6203, 1.5453, 1.9346, 1.6361, 1.6924, 1.5278, 1.7773], + device='cuda:2'), covar=tensor([0.0958, 0.1382, 0.1342, 0.0932, 0.1252, 0.0518, 0.1258, 0.0699], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0351, 0.0301, 0.0243, 0.0297, 0.0247, 0.0291, 0.0245], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:05:36,450 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.464e+02 6.488e+02 8.647e+02 1.622e+03, threshold=1.298e+03, percent-clipped=8.0 +2023-04-02 08:06:00,480 INFO [train.py:903] (2/4) Epoch 16, batch 5300, loss[loss=0.225, simple_loss=0.3071, pruned_loss=0.07148, over 19577.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2994, pruned_loss=0.07272, over 3792468.64 frames. ], batch size: 61, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:06:15,056 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4779, 2.3211, 1.7219, 1.5205, 2.1373, 1.3037, 1.2651, 1.8799], + device='cuda:2'), covar=tensor([0.1193, 0.0839, 0.0978, 0.0884, 0.0548, 0.1300, 0.0834, 0.0461], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0310, 0.0329, 0.0254, 0.0243, 0.0330, 0.0291, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:06:19,313 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 08:06:45,139 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:07:03,282 INFO [train.py:903] (2/4) Epoch 16, batch 5350, loss[loss=0.239, simple_loss=0.3154, pruned_loss=0.08131, over 18771.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.299, pruned_loss=0.07235, over 3795073.99 frames. ], batch size: 74, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:07:37,253 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 08:07:40,608 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 4.506e+02 5.884e+02 6.805e+02 1.610e+03, threshold=1.177e+03, percent-clipped=2.0 +2023-04-02 08:08:06,497 INFO [train.py:903] (2/4) Epoch 16, batch 5400, loss[loss=0.2268, simple_loss=0.2831, pruned_loss=0.0852, over 16402.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2982, pruned_loss=0.07214, over 3789505.81 frames. ], batch size: 36, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:09:02,775 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4055, 1.2782, 1.4784, 1.6036, 2.9704, 1.1485, 2.1581, 3.3865], + device='cuda:2'), covar=tensor([0.0474, 0.2695, 0.2747, 0.1664, 0.0702, 0.2397, 0.1341, 0.0273], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0354, 0.0375, 0.0335, 0.0360, 0.0340, 0.0359, 0.0378], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:09:05,201 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0695, 2.0142, 1.6941, 1.5944, 1.3999, 1.5980, 0.5253, 1.0824], + device='cuda:2'), covar=tensor([0.0480, 0.0548, 0.0432, 0.0711, 0.1078, 0.0846, 0.1105, 0.0853], + device='cuda:2'), in_proj_covar=tensor([0.0347, 0.0340, 0.0340, 0.0365, 0.0442, 0.0372, 0.0320, 0.0329], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:09:08,379 INFO [train.py:903] (2/4) Epoch 16, batch 5450, loss[loss=0.229, simple_loss=0.3081, pruned_loss=0.07492, over 19499.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2977, pruned_loss=0.07183, over 3801750.29 frames. ], batch size: 64, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:09:10,575 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107872.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:11,956 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107873.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:09:26,835 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:44,600 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107898.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:09:46,450 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.026e+02 5.228e+02 6.592e+02 8.752e+02 1.860e+03, threshold=1.318e+03, percent-clipped=11.0 +2023-04-02 08:09:58,260 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9041, 3.1832, 3.5151, 3.5165, 1.9296, 3.2800, 2.8807, 3.0544], + device='cuda:2'), covar=tensor([0.2378, 0.3115, 0.1029, 0.1474, 0.5846, 0.2091, 0.1219, 0.1892], + device='cuda:2'), in_proj_covar=tensor([0.0735, 0.0677, 0.0879, 0.0762, 0.0784, 0.0630, 0.0524, 0.0817], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 08:09:58,362 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:10:10,018 INFO [train.py:903] (2/4) Epoch 16, batch 5500, loss[loss=0.2008, simple_loss=0.2794, pruned_loss=0.06114, over 19582.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2977, pruned_loss=0.0716, over 3813211.08 frames. ], batch size: 52, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:10:34,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 08:11:05,916 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-02 08:11:13,099 INFO [train.py:903] (2/4) Epoch 16, batch 5550, loss[loss=0.2574, simple_loss=0.331, pruned_loss=0.09188, over 19662.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2969, pruned_loss=0.07102, over 3813510.09 frames. ], batch size: 55, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:11:13,384 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3649, 3.9456, 2.6963, 3.5114, 1.0038, 3.7347, 3.7509, 3.7803], + device='cuda:2'), covar=tensor([0.0688, 0.1048, 0.1869, 0.0823, 0.3836, 0.0834, 0.0907, 0.1129], + device='cuda:2'), in_proj_covar=tensor([0.0468, 0.0384, 0.0462, 0.0328, 0.0391, 0.0400, 0.0398, 0.0427], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:11:19,674 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 08:11:34,481 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:11:50,041 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.933e+02 6.287e+02 7.608e+02 2.106e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-02 08:12:10,698 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 08:12:17,779 INFO [train.py:903] (2/4) Epoch 16, batch 5600, loss[loss=0.212, simple_loss=0.2832, pruned_loss=0.07046, over 17688.00 frames. ], tot_loss[loss=0.22, simple_loss=0.297, pruned_loss=0.07145, over 3810191.98 frames. ], batch size: 39, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:12:19,200 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:12:51,334 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4212, 4.0443, 3.1440, 3.4558, 1.7076, 3.7901, 3.7763, 3.9791], + device='cuda:2'), covar=tensor([0.0654, 0.1021, 0.1737, 0.1149, 0.3223, 0.0919, 0.1013, 0.1130], + device='cuda:2'), in_proj_covar=tensor([0.0467, 0.0382, 0.0460, 0.0327, 0.0391, 0.0400, 0.0398, 0.0426], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:13:17,747 INFO [train.py:903] (2/4) Epoch 16, batch 5650, loss[loss=0.2807, simple_loss=0.3403, pruned_loss=0.1105, over 18876.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2977, pruned_loss=0.0723, over 3795628.38 frames. ], batch size: 74, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:13:55,632 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108099.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:13:56,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.688e+02 6.420e+02 8.034e+02 1.662e+03, threshold=1.284e+03, percent-clipped=4.0 +2023-04-02 08:14:03,631 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 08:14:21,002 INFO [train.py:903] (2/4) Epoch 16, batch 5700, loss[loss=0.2008, simple_loss=0.2891, pruned_loss=0.05629, over 19512.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2976, pruned_loss=0.07179, over 3813858.62 frames. ], batch size: 54, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:14:42,910 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:15:06,030 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-02 08:15:22,679 INFO [train.py:903] (2/4) Epoch 16, batch 5750, loss[loss=0.1907, simple_loss=0.27, pruned_loss=0.05569, over 19679.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2968, pruned_loss=0.07167, over 3819560.48 frames. ], batch size: 53, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:15:22,749 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 08:15:33,019 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 08:15:36,706 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 08:16:00,583 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.860e+02 6.382e+02 7.922e+02 1.732e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-02 08:16:19,195 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:16:26,740 INFO [train.py:903] (2/4) Epoch 16, batch 5800, loss[loss=0.2537, simple_loss=0.3248, pruned_loss=0.09133, over 19078.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2967, pruned_loss=0.07166, over 3813114.32 frames. ], batch size: 69, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:16:54,039 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:25,849 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:27,716 INFO [train.py:903] (2/4) Epoch 16, batch 5850, loss[loss=0.2017, simple_loss=0.2821, pruned_loss=0.06063, over 19630.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.297, pruned_loss=0.07168, over 3819704.30 frames. ], batch size: 50, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:05,292 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.738e+02 5.463e+02 6.888e+02 8.802e+02 1.964e+03, threshold=1.378e+03, percent-clipped=5.0 +2023-04-02 08:18:28,341 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 08:18:29,521 INFO [train.py:903] (2/4) Epoch 16, batch 5900, loss[loss=0.2279, simple_loss=0.3058, pruned_loss=0.075, over 19331.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2969, pruned_loss=0.07144, over 3819715.43 frames. ], batch size: 66, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:52,003 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 08:18:53,519 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0909, 1.3386, 1.7843, 1.3150, 2.7563, 3.7077, 3.4308, 3.9726], + device='cuda:2'), covar=tensor([0.1735, 0.3629, 0.3146, 0.2318, 0.0562, 0.0205, 0.0193, 0.0212], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0306, 0.0335, 0.0255, 0.0230, 0.0173, 0.0207, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:18:59,385 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:19:15,952 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0554, 1.1742, 1.6832, 1.0511, 2.4336, 3.3465, 3.0537, 3.5533], + device='cuda:2'), covar=tensor([0.1648, 0.3685, 0.3222, 0.2438, 0.0605, 0.0177, 0.0209, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0306, 0.0335, 0.0255, 0.0230, 0.0173, 0.0207, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:19:30,982 INFO [train.py:903] (2/4) Epoch 16, batch 5950, loss[loss=0.2105, simple_loss=0.2798, pruned_loss=0.07059, over 19747.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2972, pruned_loss=0.07165, over 3832881.87 frames. ], batch size: 47, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:19:59,869 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:09,618 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 5.248e+02 6.439e+02 7.965e+02 2.252e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-02 08:20:30,416 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:35,475 INFO [train.py:903] (2/4) Epoch 16, batch 6000, loss[loss=0.2928, simple_loss=0.3642, pruned_loss=0.1107, over 18172.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07132, over 3827696.03 frames. ], batch size: 83, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:20:35,475 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 08:20:47,907 INFO [train.py:937] (2/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2723, pruned_loss=0.03545, over 944034.00 frames. +2023-04-02 08:20:47,909 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 08:21:05,963 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 08:21:12,811 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6569, 1.5447, 1.5294, 2.1656, 1.7631, 2.1000, 2.0956, 1.9066], + device='cuda:2'), covar=tensor([0.0772, 0.0860, 0.0951, 0.0732, 0.0711, 0.0627, 0.0728, 0.0577], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0222, 0.0223, 0.0245, 0.0226, 0.0207, 0.0189, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 08:21:25,618 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:21:51,717 INFO [train.py:903] (2/4) Epoch 16, batch 6050, loss[loss=0.2267, simple_loss=0.3071, pruned_loss=0.0731, over 18640.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2976, pruned_loss=0.07223, over 3818919.23 frames. ], batch size: 74, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:21:52,136 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108470.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:14,353 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2838, 1.3554, 1.5176, 1.4131, 1.8073, 1.8208, 1.8139, 0.6014], + device='cuda:2'), covar=tensor([0.2316, 0.4069, 0.2493, 0.1886, 0.1548, 0.2182, 0.1333, 0.4178], + device='cuda:2'), in_proj_covar=tensor([0.0507, 0.0607, 0.0660, 0.0460, 0.0604, 0.0511, 0.0648, 0.0517], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:22:22,206 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108495.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:28,595 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.272e+02 6.402e+02 8.353e+02 1.883e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 08:22:53,807 INFO [train.py:903] (2/4) Epoch 16, batch 6100, loss[loss=0.2522, simple_loss=0.3233, pruned_loss=0.09061, over 13036.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.297, pruned_loss=0.07137, over 3820530.91 frames. ], batch size: 136, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:23:56,088 INFO [train.py:903] (2/4) Epoch 16, batch 6150, loss[loss=0.2644, simple_loss=0.3311, pruned_loss=0.09884, over 19436.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2964, pruned_loss=0.07096, over 3826825.07 frames. ], batch size: 62, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:24:26,215 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 08:24:28,859 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:24:35,750 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.845e+02 6.012e+02 7.583e+02 1.796e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 08:24:58,801 INFO [train.py:903] (2/4) Epoch 16, batch 6200, loss[loss=0.2273, simple_loss=0.3068, pruned_loss=0.07386, over 19859.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2958, pruned_loss=0.07072, over 3826367.39 frames. ], batch size: 52, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:25:33,689 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2014, 2.2092, 2.4844, 3.0879, 2.2030, 2.8284, 2.6305, 2.2716], + device='cuda:2'), covar=tensor([0.4077, 0.3819, 0.1653, 0.2436, 0.4146, 0.1976, 0.3918, 0.2949], + device='cuda:2'), in_proj_covar=tensor([0.0848, 0.0892, 0.0680, 0.0908, 0.0828, 0.0766, 0.0815, 0.0746], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 08:26:02,187 INFO [train.py:903] (2/4) Epoch 16, batch 6250, loss[loss=0.2432, simple_loss=0.3129, pruned_loss=0.08679, over 13400.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2953, pruned_loss=0.07045, over 3831089.11 frames. ], batch size: 135, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:26:22,912 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108687.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:26:34,474 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 08:26:40,075 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 4.966e+02 6.026e+02 7.805e+02 1.726e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 08:27:03,146 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 08:27:04,698 INFO [train.py:903] (2/4) Epoch 16, batch 6300, loss[loss=0.2581, simple_loss=0.3279, pruned_loss=0.09411, over 19314.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2951, pruned_loss=0.07012, over 3833061.88 frames. ], batch size: 70, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:28:06,338 INFO [train.py:903] (2/4) Epoch 16, batch 6350, loss[loss=0.2045, simple_loss=0.2856, pruned_loss=0.06169, over 19734.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2964, pruned_loss=0.07087, over 3812536.20 frames. ], batch size: 51, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:28:38,952 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:47,234 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.125e+02 6.267e+02 8.166e+02 1.468e+03, threshold=1.253e+03, percent-clipped=8.0 +2023-04-02 08:28:48,870 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:56,974 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:29:09,789 INFO [train.py:903] (2/4) Epoch 16, batch 6400, loss[loss=0.249, simple_loss=0.3189, pruned_loss=0.08956, over 18828.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2973, pruned_loss=0.07119, over 3816785.58 frames. ], batch size: 74, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:29:30,125 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 08:30:14,207 INFO [train.py:903] (2/4) Epoch 16, batch 6450, loss[loss=0.2129, simple_loss=0.2957, pruned_loss=0.06501, over 19761.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2968, pruned_loss=0.07124, over 3817142.71 frames. ], batch size: 54, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:30:33,226 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-02 08:30:52,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.109e+02 6.250e+02 7.655e+02 1.750e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-02 08:30:52,634 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8788, 2.2337, 2.5138, 2.8435, 2.3479, 2.5281, 2.2524, 2.7664], + device='cuda:2'), covar=tensor([0.0756, 0.1599, 0.1107, 0.0742, 0.1287, 0.0414, 0.1118, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0352, 0.0300, 0.0244, 0.0296, 0.0247, 0.0291, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:31:00,400 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 08:31:04,049 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:31:18,889 INFO [train.py:903] (2/4) Epoch 16, batch 6500, loss[loss=0.1598, simple_loss=0.2408, pruned_loss=0.03939, over 19817.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2972, pruned_loss=0.07135, over 3807217.36 frames. ], batch size: 49, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:31:24,489 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 08:31:40,944 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:32:20,051 INFO [train.py:903] (2/4) Epoch 16, batch 6550, loss[loss=0.195, simple_loss=0.2737, pruned_loss=0.05813, over 19477.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2969, pruned_loss=0.07102, over 3807347.37 frames. ], batch size: 49, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:32:58,929 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.624e+02 5.967e+02 6.821e+02 1.232e+03, threshold=1.193e+03, percent-clipped=0.0 +2023-04-02 08:33:21,265 INFO [train.py:903] (2/4) Epoch 16, batch 6600, loss[loss=0.1684, simple_loss=0.2474, pruned_loss=0.04469, over 15099.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2966, pruned_loss=0.07063, over 3817024.05 frames. ], batch size: 33, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:34:03,936 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:05,062 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109055.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:34:08,552 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:22,843 INFO [train.py:903] (2/4) Epoch 16, batch 6650, loss[loss=0.2023, simple_loss=0.2832, pruned_loss=0.06073, over 19536.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2967, pruned_loss=0.07091, over 3823807.22 frames. ], batch size: 54, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:34:25,985 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.04 vs. limit=5.0 +2023-04-02 08:34:41,452 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109083.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:56,368 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4080, 1.4164, 1.7537, 1.2448, 2.5319, 3.1501, 2.9818, 3.3605], + device='cuda:2'), covar=tensor([0.1482, 0.3477, 0.3047, 0.2420, 0.0691, 0.0314, 0.0228, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0308, 0.0336, 0.0257, 0.0230, 0.0174, 0.0207, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:35:01,852 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.420e+02 6.464e+02 8.289e+02 2.034e+03, threshold=1.293e+03, percent-clipped=5.0 +2023-04-02 08:35:15,768 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8795, 4.3599, 4.6303, 4.6585, 1.7032, 4.3954, 3.7529, 4.3055], + device='cuda:2'), covar=tensor([0.1523, 0.0767, 0.0552, 0.0553, 0.5516, 0.0690, 0.0644, 0.1103], + device='cuda:2'), in_proj_covar=tensor([0.0739, 0.0680, 0.0884, 0.0765, 0.0791, 0.0633, 0.0529, 0.0816], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 08:35:27,575 INFO [train.py:903] (2/4) Epoch 16, batch 6700, loss[loss=0.2436, simple_loss=0.3143, pruned_loss=0.08648, over 19473.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2961, pruned_loss=0.07072, over 3815452.36 frames. ], batch size: 64, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:36:06,058 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:07,902 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 08:36:19,894 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:25,880 INFO [train.py:903] (2/4) Epoch 16, batch 6750, loss[loss=0.1877, simple_loss=0.2685, pruned_loss=0.05346, over 19405.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2953, pruned_loss=0.07025, over 3806463.14 frames. ], batch size: 48, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:36:50,496 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:03,550 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 4.897e+02 5.778e+02 7.062e+02 1.289e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-02 08:37:15,730 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:24,184 INFO [train.py:903] (2/4) Epoch 16, batch 6800, loss[loss=0.2137, simple_loss=0.2899, pruned_loss=0.06882, over 19595.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2948, pruned_loss=0.07001, over 3813777.95 frames. ], batch size: 52, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:37:30,146 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9298, 4.4599, 2.7684, 3.8909, 0.9136, 4.3914, 4.2694, 4.3250], + device='cuda:2'), covar=tensor([0.0566, 0.1018, 0.1979, 0.0782, 0.4154, 0.0643, 0.0860, 0.1100], + device='cuda:2'), in_proj_covar=tensor([0.0470, 0.0383, 0.0465, 0.0332, 0.0391, 0.0400, 0.0401, 0.0428], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:38:09,830 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 08:38:10,300 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 08:38:13,449 INFO [train.py:903] (2/4) Epoch 17, batch 0, loss[loss=0.2237, simple_loss=0.3069, pruned_loss=0.07024, over 19659.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3069, pruned_loss=0.07024, over 19659.00 frames. ], batch size: 55, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:38:13,450 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 08:38:25,408 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4342, 1.4201, 1.6377, 1.5612, 2.2610, 1.9990, 2.2581, 1.2194], + device='cuda:2'), covar=tensor([0.2152, 0.4019, 0.2432, 0.1805, 0.1404, 0.2076, 0.1290, 0.4108], + device='cuda:2'), in_proj_covar=tensor([0.0509, 0.0612, 0.0664, 0.0460, 0.0606, 0.0512, 0.0650, 0.0520], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:38:26,017 INFO [train.py:937] (2/4) Epoch 17, validation: loss=0.1721, simple_loss=0.2728, pruned_loss=0.03571, over 944034.00 frames. +2023-04-02 08:38:26,018 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 08:38:39,457 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 08:38:51,302 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:29,173 INFO [train.py:903] (2/4) Epoch 17, batch 50, loss[loss=0.208, simple_loss=0.3024, pruned_loss=0.05685, over 19529.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2971, pruned_loss=0.06987, over 860942.02 frames. ], batch size: 56, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:39:32,717 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 5.319e+02 6.338e+02 7.961e+02 1.981e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-02 08:39:43,562 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:45,687 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:59,850 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 08:40:13,452 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:14,506 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:28,821 INFO [train.py:903] (2/4) Epoch 17, batch 100, loss[loss=0.2127, simple_loss=0.3, pruned_loss=0.06271, over 19540.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2929, pruned_loss=0.06819, over 1521536.76 frames. ], batch size: 56, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:40:36,772 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 08:40:53,541 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5681, 3.0834, 2.4620, 2.4898, 2.3279, 2.5112, 1.1707, 2.2209], + device='cuda:2'), covar=tensor([0.0457, 0.0487, 0.0596, 0.0880, 0.0949, 0.0993, 0.1107, 0.0902], + device='cuda:2'), in_proj_covar=tensor([0.0345, 0.0346, 0.0343, 0.0368, 0.0444, 0.0374, 0.0323, 0.0330], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:41:29,302 INFO [train.py:903] (2/4) Epoch 17, batch 150, loss[loss=0.2336, simple_loss=0.3096, pruned_loss=0.07876, over 19669.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2952, pruned_loss=0.07037, over 2021002.73 frames. ], batch size: 58, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:41:30,560 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109399.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:41:32,677 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.062e+02 6.078e+02 8.331e+02 1.364e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 08:41:34,551 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.13 vs. limit=5.0 +2023-04-02 08:41:44,716 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.00 vs. limit=5.0 +2023-04-02 08:42:07,836 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3496, 1.4493, 2.1009, 1.6362, 2.9727, 2.2579, 3.2934, 1.3705], + device='cuda:2'), covar=tensor([0.2539, 0.4318, 0.2548, 0.1986, 0.1639, 0.2303, 0.1640, 0.4220], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0607, 0.0659, 0.0458, 0.0601, 0.0508, 0.0646, 0.0517], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:42:22,300 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 08:42:29,325 INFO [train.py:903] (2/4) Epoch 17, batch 200, loss[loss=0.1977, simple_loss=0.2846, pruned_loss=0.05541, over 19659.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2961, pruned_loss=0.07085, over 2427086.22 frames. ], batch size: 58, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:25,348 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5649, 2.3633, 1.7757, 1.5138, 2.1841, 1.4050, 1.3360, 2.0178], + device='cuda:2'), covar=tensor([0.0916, 0.0670, 0.0950, 0.0835, 0.0495, 0.1219, 0.0706, 0.0406], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0305, 0.0326, 0.0254, 0.0240, 0.0326, 0.0285, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:43:32,826 INFO [train.py:903] (2/4) Epoch 17, batch 250, loss[loss=0.1744, simple_loss=0.2538, pruned_loss=0.04747, over 19763.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2958, pruned_loss=0.07042, over 2736539.45 frames. ], batch size: 46, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:36,246 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.403e+02 5.064e+02 6.047e+02 7.271e+02 1.663e+03, threshold=1.209e+03, percent-clipped=2.0 +2023-04-02 08:43:52,429 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109514.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:44:04,265 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:35,511 INFO [train.py:903] (2/4) Epoch 17, batch 300, loss[loss=0.2629, simple_loss=0.3241, pruned_loss=0.1008, over 13187.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2956, pruned_loss=0.07052, over 2971036.33 frames. ], batch size: 136, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:44:37,127 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:45,078 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:45,553 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 08:45:36,079 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109597.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:45:36,909 INFO [train.py:903] (2/4) Epoch 17, batch 350, loss[loss=0.2379, simple_loss=0.3061, pruned_loss=0.08484, over 19728.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2946, pruned_loss=0.07025, over 3161434.30 frames. ], batch size: 51, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:45:38,101 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:45:40,565 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.783e+02 5.858e+02 7.548e+02 1.929e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 08:46:33,318 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7036, 4.1542, 4.4221, 4.4122, 1.6068, 4.1637, 3.5487, 4.1004], + device='cuda:2'), covar=tensor([0.1636, 0.0983, 0.0614, 0.0677, 0.6101, 0.0856, 0.0690, 0.1171], + device='cuda:2'), in_proj_covar=tensor([0.0731, 0.0677, 0.0876, 0.0761, 0.0785, 0.0625, 0.0525, 0.0805], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 08:46:38,844 INFO [train.py:903] (2/4) Epoch 17, batch 400, loss[loss=0.1932, simple_loss=0.264, pruned_loss=0.06125, over 19766.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06965, over 3309209.25 frames. ], batch size: 47, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:46:49,106 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:09,945 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:16,745 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:20,111 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:40,320 INFO [train.py:903] (2/4) Epoch 17, batch 450, loss[loss=0.2303, simple_loss=0.31, pruned_loss=0.0753, over 19096.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.295, pruned_loss=0.0701, over 3424790.36 frames. ], batch size: 69, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:47:44,685 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 5.202e+02 6.539e+02 8.058e+02 1.631e+03, threshold=1.308e+03, percent-clipped=6.0 +2023-04-02 08:48:12,275 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 08:48:13,418 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 08:48:44,999 INFO [train.py:903] (2/4) Epoch 17, batch 500, loss[loss=0.1918, simple_loss=0.267, pruned_loss=0.05826, over 19812.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2949, pruned_loss=0.07042, over 3518717.39 frames. ], batch size: 49, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:48:53,606 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1738, 5.1290, 6.0159, 5.9681, 2.0423, 5.6316, 4.7172, 5.6154], + device='cuda:2'), covar=tensor([0.1431, 0.0682, 0.0458, 0.0590, 0.5631, 0.0665, 0.0545, 0.1045], + device='cuda:2'), in_proj_covar=tensor([0.0734, 0.0676, 0.0876, 0.0761, 0.0784, 0.0625, 0.0525, 0.0809], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 08:48:59,856 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 08:49:11,994 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109770.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:49:12,979 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:16,864 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.18 vs. limit=5.0 +2023-04-02 08:49:44,277 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:44,316 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109795.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:49:47,264 INFO [train.py:903] (2/4) Epoch 17, batch 550, loss[loss=0.238, simple_loss=0.3173, pruned_loss=0.07932, over 19659.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.295, pruned_loss=0.07036, over 3596720.46 frames. ], batch size: 53, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:49:50,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.960e+02 5.345e+02 7.483e+02 9.617e+02 2.288e+03, threshold=1.497e+03, percent-clipped=10.0 +2023-04-02 08:49:56,062 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-02 08:50:48,356 INFO [train.py:903] (2/4) Epoch 17, batch 600, loss[loss=0.2202, simple_loss=0.3004, pruned_loss=0.07001, over 19684.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2965, pruned_loss=0.07104, over 3652308.16 frames. ], batch size: 60, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:16,292 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4531, 1.5309, 2.0618, 1.9051, 3.1100, 4.0830, 3.9900, 4.4341], + device='cuda:2'), covar=tensor([0.1600, 0.3594, 0.3105, 0.2055, 0.0673, 0.0304, 0.0176, 0.0197], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0307, 0.0336, 0.0255, 0.0230, 0.0173, 0.0208, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:51:27,267 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 08:51:49,467 INFO [train.py:903] (2/4) Epoch 17, batch 650, loss[loss=0.2074, simple_loss=0.2819, pruned_loss=0.06645, over 19404.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2965, pruned_loss=0.0707, over 3699045.69 frames. ], batch size: 48, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:53,023 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.642e+02 6.698e+02 8.791e+02 1.815e+03, threshold=1.340e+03, percent-clipped=2.0 +2023-04-02 08:52:21,973 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4382, 2.2311, 2.1028, 1.9608, 1.7720, 1.8826, 0.8135, 1.2760], + device='cuda:2'), covar=tensor([0.0532, 0.0530, 0.0401, 0.0739, 0.1042, 0.0844, 0.1071, 0.0939], + device='cuda:2'), in_proj_covar=tensor([0.0348, 0.0346, 0.0344, 0.0368, 0.0444, 0.0376, 0.0324, 0.0330], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:52:27,865 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:44,004 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109941.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:51,845 INFO [train.py:903] (2/4) Epoch 17, batch 700, loss[loss=0.2093, simple_loss=0.2797, pruned_loss=0.06945, over 19852.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.295, pruned_loss=0.06938, over 3739401.14 frames. ], batch size: 52, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:52:58,995 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109952.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:53:57,830 INFO [train.py:903] (2/4) Epoch 17, batch 750, loss[loss=0.223, simple_loss=0.3011, pruned_loss=0.07248, over 19024.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.295, pruned_loss=0.06911, over 3770286.86 frames. ], batch size: 69, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:54:02,545 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.697e+02 5.712e+02 7.217e+02 1.165e+03, threshold=1.142e+03, percent-clipped=0.0 +2023-04-02 08:54:25,906 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:33,240 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,199 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110035.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,338 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2298, 2.0618, 1.9403, 1.7660, 1.5557, 1.7068, 0.5492, 1.1076], + device='cuda:2'), covar=tensor([0.0589, 0.0533, 0.0436, 0.0734, 0.1045, 0.0882, 0.1141, 0.0924], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0349, 0.0347, 0.0371, 0.0448, 0.0378, 0.0326, 0.0333], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 08:54:54,086 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3808, 1.9832, 2.0507, 2.9390, 2.1153, 2.7727, 2.5225, 2.4939], + device='cuda:2'), covar=tensor([0.0658, 0.0843, 0.0874, 0.0760, 0.0792, 0.0613, 0.0841, 0.0560], + device='cuda:2'), in_proj_covar=tensor([0.0206, 0.0218, 0.0221, 0.0241, 0.0224, 0.0205, 0.0186, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 08:55:00,572 INFO [train.py:903] (2/4) Epoch 17, batch 800, loss[loss=0.2045, simple_loss=0.287, pruned_loss=0.06095, over 19568.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2958, pruned_loss=0.06991, over 3783529.30 frames. ], batch size: 52, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:55:04,516 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:05,681 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:10,553 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:14,901 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:55:17,670 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:35,585 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:03,227 INFO [train.py:903] (2/4) Epoch 17, batch 850, loss[loss=0.202, simple_loss=0.2827, pruned_loss=0.06062, over 19520.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2943, pruned_loss=0.06938, over 3790148.65 frames. ], batch size: 54, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:56:06,199 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.213e+02 6.563e+02 8.363e+02 2.159e+03, threshold=1.313e+03, percent-clipped=10.0 +2023-04-02 08:56:42,112 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2864, 3.9799, 2.9656, 3.5471, 1.9374, 3.7649, 3.7560, 3.8576], + device='cuda:2'), covar=tensor([0.0744, 0.0939, 0.1847, 0.0773, 0.2676, 0.0799, 0.0906, 0.1213], + device='cuda:2'), in_proj_covar=tensor([0.0470, 0.0382, 0.0463, 0.0330, 0.0387, 0.0399, 0.0398, 0.0429], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 08:56:42,130 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:47,720 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:51,221 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:56,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 08:57:04,359 INFO [train.py:903] (2/4) Epoch 17, batch 900, loss[loss=0.2005, simple_loss=0.2801, pruned_loss=0.06049, over 19775.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2941, pruned_loss=0.06988, over 3805152.45 frames. ], batch size: 54, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:05,928 INFO [train.py:903] (2/4) Epoch 17, batch 950, loss[loss=0.1959, simple_loss=0.2687, pruned_loss=0.06152, over 19760.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2949, pruned_loss=0.06989, over 3821055.49 frames. ], batch size: 47, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:08,349 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 08:58:09,604 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.867e+02 6.255e+02 8.229e+02 2.250e+03, threshold=1.251e+03, percent-clipped=5.0 +2023-04-02 08:59:08,995 INFO [train.py:903] (2/4) Epoch 17, batch 1000, loss[loss=0.206, simple_loss=0.2907, pruned_loss=0.06062, over 19600.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2944, pruned_loss=0.06954, over 3823059.82 frames. ], batch size: 57, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:00:05,672 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 09:00:11,636 INFO [train.py:903] (2/4) Epoch 17, batch 1050, loss[loss=0.2489, simple_loss=0.3243, pruned_loss=0.08676, over 18715.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2944, pruned_loss=0.06988, over 3829515.42 frames. ], batch size: 74, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:00:15,419 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:16,231 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 4.918e+02 6.228e+02 8.050e+02 1.872e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 09:00:28,120 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:40,510 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:44,967 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 09:01:00,198 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:01,483 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:13,840 INFO [train.py:903] (2/4) Epoch 17, batch 1100, loss[loss=0.227, simple_loss=0.3088, pruned_loss=0.07256, over 18755.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.06998, over 3835372.10 frames. ], batch size: 74, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:01:54,277 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:09,857 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:16,589 INFO [train.py:903] (2/4) Epoch 17, batch 1150, loss[loss=0.2262, simple_loss=0.3061, pruned_loss=0.0731, over 19794.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2933, pruned_loss=0.06925, over 3833567.77 frames. ], batch size: 56, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:02:21,350 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.729e+02 5.705e+02 7.310e+02 1.426e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-02 09:02:28,351 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:43,182 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:20,434 INFO [train.py:903] (2/4) Epoch 17, batch 1200, loss[loss=0.2221, simple_loss=0.2972, pruned_loss=0.07351, over 19662.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2947, pruned_loss=0.06992, over 3817163.53 frames. ], batch size: 58, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 09:03:22,075 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.04 vs. limit=5.0 +2023-04-02 09:03:49,891 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:53,242 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 09:03:55,654 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:19,621 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:23,819 INFO [train.py:903] (2/4) Epoch 17, batch 1250, loss[loss=0.2033, simple_loss=0.2725, pruned_loss=0.06704, over 19808.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2948, pruned_loss=0.06978, over 3830812.52 frames. ], batch size: 49, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:04:28,265 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.334e+02 6.614e+02 7.905e+02 1.343e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-02 09:04:48,482 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3479, 3.0564, 2.2146, 2.7673, 0.7565, 2.9655, 2.8804, 3.0020], + device='cuda:2'), covar=tensor([0.1054, 0.1435, 0.2011, 0.1082, 0.3917, 0.1030, 0.1074, 0.1278], + device='cuda:2'), in_proj_covar=tensor([0.0471, 0.0385, 0.0465, 0.0331, 0.0389, 0.0402, 0.0400, 0.0429], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:04:52,209 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110521.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:05:25,877 INFO [train.py:903] (2/4) Epoch 17, batch 1300, loss[loss=0.192, simple_loss=0.2634, pruned_loss=0.06029, over 19733.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2952, pruned_loss=0.06997, over 3838268.33 frames. ], batch size: 46, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:02,880 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-02 09:06:14,194 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:20,230 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:26,785 INFO [train.py:903] (2/4) Epoch 17, batch 1350, loss[loss=0.2458, simple_loss=0.3109, pruned_loss=0.09037, over 12920.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2956, pruned_loss=0.0702, over 3828756.71 frames. ], batch size: 136, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:31,265 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 4.458e+02 6.078e+02 8.226e+02 1.667e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 09:06:35,117 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5360, 1.7408, 2.2355, 1.8807, 3.1885, 2.4798, 3.4381, 1.9169], + device='cuda:2'), covar=tensor([0.2600, 0.4393, 0.2841, 0.2018, 0.1666, 0.2395, 0.1821, 0.3866], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0621, 0.0675, 0.0467, 0.0612, 0.0518, 0.0658, 0.0525], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 09:07:07,180 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:26,698 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:28,016 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9424, 1.1781, 1.5095, 0.6413, 1.9307, 2.2698, 2.0544, 2.4928], + device='cuda:2'), covar=tensor([0.1586, 0.3592, 0.3133, 0.2724, 0.0735, 0.0382, 0.0350, 0.0368], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0308, 0.0337, 0.0258, 0.0232, 0.0175, 0.0209, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 09:07:29,984 INFO [train.py:903] (2/4) Epoch 17, batch 1400, loss[loss=0.2584, simple_loss=0.3256, pruned_loss=0.09562, over 13798.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2948, pruned_loss=0.07013, over 3822769.03 frames. ], batch size: 135, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:07:51,399 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110665.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:08,777 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:32,859 INFO [train.py:903] (2/4) Epoch 17, batch 1450, loss[loss=0.2188, simple_loss=0.2948, pruned_loss=0.07143, over 19669.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2954, pruned_loss=0.07042, over 3826071.42 frames. ], batch size: 55, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:08:32,909 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 09:08:38,609 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.047e+02 5.721e+02 7.117e+02 1.861e+03, threshold=1.144e+03, percent-clipped=3.0 +2023-04-02 09:09:35,388 INFO [train.py:903] (2/4) Epoch 17, batch 1500, loss[loss=0.2596, simple_loss=0.3316, pruned_loss=0.09375, over 13568.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2949, pruned_loss=0.07012, over 3820930.05 frames. ], batch size: 137, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:09:38,229 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:42,835 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:50,114 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:11,734 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:14,309 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110777.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:16,445 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2780, 3.8233, 3.9374, 3.9283, 1.4192, 3.7169, 3.2491, 3.6778], + device='cuda:2'), covar=tensor([0.1558, 0.0804, 0.0674, 0.0755, 0.5788, 0.0906, 0.0669, 0.1177], + device='cuda:2'), in_proj_covar=tensor([0.0742, 0.0686, 0.0885, 0.0771, 0.0792, 0.0638, 0.0534, 0.0818], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 09:10:17,700 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110780.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:27,706 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 09:10:35,427 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:38,194 INFO [train.py:903] (2/4) Epoch 17, batch 1550, loss[loss=0.2339, simple_loss=0.3093, pruned_loss=0.07928, over 13686.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.294, pruned_loss=0.06933, over 3808910.31 frames. ], batch size: 136, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:10:44,288 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:44,892 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.421e+02 5.256e+02 6.667e+02 1.625e+03, threshold=1.051e+03, percent-clipped=2.0 +2023-04-02 09:11:35,765 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:11:42,169 INFO [train.py:903] (2/4) Epoch 17, batch 1600, loss[loss=0.2146, simple_loss=0.2995, pruned_loss=0.06486, over 19728.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2937, pruned_loss=0.06944, over 3810825.41 frames. ], batch size: 59, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:11:42,621 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:05,494 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 09:12:05,851 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0265, 1.6942, 1.6653, 2.5588, 2.0738, 2.3776, 2.3227, 2.1650], + device='cuda:2'), covar=tensor([0.0752, 0.0920, 0.1042, 0.0822, 0.0798, 0.0641, 0.0845, 0.0651], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0220, 0.0222, 0.0244, 0.0226, 0.0208, 0.0188, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 09:12:07,091 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:12,581 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:29,542 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:45,458 INFO [train.py:903] (2/4) Epoch 17, batch 1650, loss[loss=0.2473, simple_loss=0.3184, pruned_loss=0.08815, over 19718.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.294, pruned_loss=0.06949, over 3819082.24 frames. ], batch size: 63, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:12:51,301 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 5.600e+02 6.791e+02 9.379e+02 3.114e+03, threshold=1.358e+03, percent-clipped=15.0 +2023-04-02 09:13:30,818 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2692, 1.8803, 1.8634, 2.6738, 2.0194, 2.4610, 2.4578, 2.4856], + device='cuda:2'), covar=tensor([0.0750, 0.0909, 0.0978, 0.0871, 0.0862, 0.0718, 0.0896, 0.0589], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0221, 0.0224, 0.0245, 0.0227, 0.0209, 0.0189, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 09:13:47,394 INFO [train.py:903] (2/4) Epoch 17, batch 1700, loss[loss=0.2181, simple_loss=0.2975, pruned_loss=0.06934, over 19613.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2942, pruned_loss=0.06957, over 3823076.21 frames. ], batch size: 61, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:19,101 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:24,112 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110976.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:29,658 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 09:14:49,204 INFO [train.py:903] (2/4) Epoch 17, batch 1750, loss[loss=0.2444, simple_loss=0.3229, pruned_loss=0.08295, over 17296.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2942, pruned_loss=0.0699, over 3810495.11 frames. ], batch size: 101, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:55,345 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.870e+02 5.792e+02 7.151e+02 1.845e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 09:15:03,670 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2293, 1.9052, 1.8969, 2.7042, 1.9928, 2.5290, 2.5306, 2.4645], + device='cuda:2'), covar=tensor([0.0737, 0.0915, 0.0955, 0.0834, 0.0866, 0.0704, 0.0832, 0.0579], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0221, 0.0223, 0.0245, 0.0227, 0.0208, 0.0188, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 09:15:14,972 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:37,642 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111036.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:39,754 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1146, 5.1681, 5.9860, 5.9167, 2.1239, 5.6117, 4.7624, 5.5824], + device='cuda:2'), covar=tensor([0.1521, 0.0758, 0.0526, 0.0548, 0.5664, 0.0633, 0.0572, 0.1066], + device='cuda:2'), in_proj_covar=tensor([0.0738, 0.0679, 0.0883, 0.0766, 0.0788, 0.0638, 0.0530, 0.0809], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 09:15:43,368 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111041.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:51,885 INFO [train.py:903] (2/4) Epoch 17, batch 1800, loss[loss=0.2446, simple_loss=0.3251, pruned_loss=0.08203, over 18664.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2938, pruned_loss=0.06917, over 3819774.53 frames. ], batch size: 74, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:15:57,978 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:10,680 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:28,057 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:44,397 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:51,937 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 09:16:56,848 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7760, 1.8723, 2.0882, 2.3196, 1.7883, 2.2366, 2.1960, 1.9906], + device='cuda:2'), covar=tensor([0.3589, 0.2980, 0.1542, 0.1890, 0.3162, 0.1638, 0.3655, 0.2686], + device='cuda:2'), in_proj_covar=tensor([0.0853, 0.0902, 0.0682, 0.0909, 0.0833, 0.0770, 0.0812, 0.0752], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 09:16:57,617 INFO [train.py:903] (2/4) Epoch 17, batch 1850, loss[loss=0.2511, simple_loss=0.3222, pruned_loss=0.09004, over 17454.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2942, pruned_loss=0.06961, over 3812394.81 frames. ], batch size: 101, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:16:57,845 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:17:03,773 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.241e+02 6.549e+02 7.790e+02 1.838e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-02 09:17:29,439 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 09:18:00,994 INFO [train.py:903] (2/4) Epoch 17, batch 1900, loss[loss=0.1885, simple_loss=0.2663, pruned_loss=0.0553, over 19603.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2933, pruned_loss=0.06851, over 3821318.75 frames. ], batch size: 50, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:18:17,319 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 09:18:24,386 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 09:18:34,730 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4142, 1.5534, 1.8130, 1.6291, 2.4026, 2.1679, 2.4686, 1.0890], + device='cuda:2'), covar=tensor([0.2268, 0.3804, 0.2314, 0.1863, 0.1511, 0.2032, 0.1408, 0.4104], + device='cuda:2'), in_proj_covar=tensor([0.0506, 0.0613, 0.0670, 0.0462, 0.0608, 0.0513, 0.0651, 0.0521], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 09:18:49,756 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 09:19:02,960 INFO [train.py:903] (2/4) Epoch 17, batch 1950, loss[loss=0.1961, simple_loss=0.2851, pruned_loss=0.05353, over 19677.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2938, pruned_loss=0.06905, over 3803026.41 frames. ], batch size: 58, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:19:08,708 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 5.118e+02 6.155e+02 7.161e+02 1.490e+03, threshold=1.231e+03, percent-clipped=2.0 +2023-04-02 09:19:10,544 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 09:19:23,906 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:24,967 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8927, 1.7020, 2.0662, 1.6093, 4.4294, 1.0245, 2.6496, 4.8469], + device='cuda:2'), covar=tensor([0.0431, 0.2573, 0.2389, 0.1912, 0.0748, 0.2676, 0.1276, 0.0170], + device='cuda:2'), in_proj_covar=tensor([0.0389, 0.0352, 0.0374, 0.0335, 0.0360, 0.0342, 0.0358, 0.0381], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:19:44,079 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111230.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:59,232 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:20:03,523 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3637, 1.3511, 1.2798, 1.6871, 1.2703, 1.7084, 1.6069, 1.5715], + device='cuda:2'), covar=tensor([0.0862, 0.0953, 0.1058, 0.0755, 0.0883, 0.0727, 0.0838, 0.0711], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0222, 0.0224, 0.0245, 0.0228, 0.0209, 0.0190, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 09:20:03,731 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-04-02 09:20:05,432 INFO [train.py:903] (2/4) Epoch 17, batch 2000, loss[loss=0.1839, simple_loss=0.2616, pruned_loss=0.05309, over 19823.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2943, pruned_loss=0.06944, over 3810323.16 frames. ], batch size: 52, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:20:47,965 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-02 09:21:04,227 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 09:21:07,744 INFO [train.py:903] (2/4) Epoch 17, batch 2050, loss[loss=0.2163, simple_loss=0.3005, pruned_loss=0.06603, over 19527.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2939, pruned_loss=0.06898, over 3821649.68 frames. ], batch size: 56, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:14,880 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.413e+02 6.604e+02 7.706e+02 1.674e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 09:21:22,813 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 09:21:24,054 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 09:21:34,572 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:21:44,783 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 09:22:05,005 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:06,103 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:09,333 INFO [train.py:903] (2/4) Epoch 17, batch 2100, loss[loss=0.2492, simple_loss=0.3222, pruned_loss=0.08814, over 18842.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2942, pruned_loss=0.06923, over 3834294.26 frames. ], batch size: 74, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:22:34,905 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:38,045 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 09:22:55,906 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5263, 3.1259, 2.3497, 2.3681, 2.2557, 2.6360, 1.1140, 2.2306], + device='cuda:2'), covar=tensor([0.0452, 0.0556, 0.0663, 0.0968, 0.1086, 0.0973, 0.1279, 0.1012], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0350, 0.0348, 0.0374, 0.0450, 0.0382, 0.0326, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 09:23:01,368 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 09:23:10,556 INFO [train.py:903] (2/4) Epoch 17, batch 2150, loss[loss=0.2005, simple_loss=0.2937, pruned_loss=0.05359, over 19538.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2945, pruned_loss=0.06951, over 3826118.21 frames. ], batch size: 56, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:23:15,736 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5242, 1.5698, 1.8634, 1.7217, 2.7075, 2.3062, 2.8272, 1.4762], + device='cuda:2'), covar=tensor([0.2183, 0.3847, 0.2482, 0.1825, 0.1410, 0.1937, 0.1378, 0.3839], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0615, 0.0671, 0.0463, 0.0611, 0.0514, 0.0653, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 09:23:16,487 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.943e+02 5.975e+02 7.359e+02 1.553e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 09:23:56,917 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111435.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:11,378 INFO [train.py:903] (2/4) Epoch 17, batch 2200, loss[loss=0.2402, simple_loss=0.3082, pruned_loss=0.08608, over 19660.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2945, pruned_loss=0.06976, over 3831004.59 frames. ], batch size: 53, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:24:38,985 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:58,478 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:09,808 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:15,250 INFO [train.py:903] (2/4) Epoch 17, batch 2250, loss[loss=0.2258, simple_loss=0.3125, pruned_loss=0.0695, over 19768.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.295, pruned_loss=0.07035, over 3801760.12 frames. ], batch size: 56, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:25:22,031 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.257e+02 6.827e+02 8.687e+02 2.303e+03, threshold=1.365e+03, percent-clipped=8.0 +2023-04-02 09:26:16,855 INFO [train.py:903] (2/4) Epoch 17, batch 2300, loss[loss=0.21, simple_loss=0.2869, pruned_loss=0.06658, over 19619.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2954, pruned_loss=0.07044, over 3779274.62 frames. ], batch size: 50, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:26:27,113 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 09:26:38,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-02 09:27:05,568 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:18,432 INFO [train.py:903] (2/4) Epoch 17, batch 2350, loss[loss=0.2269, simple_loss=0.3074, pruned_loss=0.07318, over 19543.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2961, pruned_loss=0.0706, over 3789684.62 frames. ], batch size: 56, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:27:22,392 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111601.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:24,243 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.149e+02 5.946e+02 7.803e+02 1.982e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-02 09:27:54,523 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:58,762 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 09:28:06,321 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.28 vs. limit=5.0 +2023-04-02 09:28:14,863 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 09:28:19,177 INFO [train.py:903] (2/4) Epoch 17, batch 2400, loss[loss=0.2368, simple_loss=0.3028, pruned_loss=0.08541, over 19764.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.296, pruned_loss=0.07087, over 3797198.48 frames. ], batch size: 54, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:28:24,016 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7612, 1.8766, 2.1143, 2.3475, 1.6714, 2.2060, 2.1818, 1.9600], + device='cuda:2'), covar=tensor([0.3930, 0.3438, 0.1760, 0.2027, 0.3656, 0.1887, 0.4399, 0.3160], + device='cuda:2'), in_proj_covar=tensor([0.0854, 0.0902, 0.0683, 0.0909, 0.0833, 0.0768, 0.0816, 0.0752], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 09:29:15,421 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:24,908 INFO [train.py:903] (2/4) Epoch 17, batch 2450, loss[loss=0.1986, simple_loss=0.274, pruned_loss=0.06163, over 19586.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2961, pruned_loss=0.07076, over 3811920.37 frames. ], batch size: 52, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:29:29,891 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:30,396 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 09:29:32,594 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.976e+02 6.292e+02 8.323e+02 1.636e+03, threshold=1.258e+03, percent-clipped=0.0 +2023-04-02 09:29:32,966 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7442, 1.5789, 1.5424, 1.8042, 1.6292, 1.5580, 1.5079, 1.7120], + device='cuda:2'), covar=tensor([0.0842, 0.1239, 0.1164, 0.0751, 0.0996, 0.0473, 0.1072, 0.0559], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0354, 0.0302, 0.0245, 0.0299, 0.0248, 0.0297, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:29:47,085 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111716.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:30:18,818 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7676, 1.8530, 2.1261, 2.3115, 1.7352, 2.2134, 2.2004, 1.9331], + device='cuda:2'), covar=tensor([0.3760, 0.3387, 0.1692, 0.2038, 0.3373, 0.1837, 0.4227, 0.3068], + device='cuda:2'), in_proj_covar=tensor([0.0857, 0.0907, 0.0687, 0.0914, 0.0836, 0.0773, 0.0818, 0.0753], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 09:30:27,986 INFO [train.py:903] (2/4) Epoch 17, batch 2500, loss[loss=0.1809, simple_loss=0.259, pruned_loss=0.05138, over 19397.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2965, pruned_loss=0.07081, over 3805576.61 frames. ], batch size: 48, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:30:51,929 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 09:31:31,093 INFO [train.py:903] (2/4) Epoch 17, batch 2550, loss[loss=0.2659, simple_loss=0.3334, pruned_loss=0.09917, over 19719.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.295, pruned_loss=0.07008, over 3810972.60 frames. ], batch size: 63, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:31:36,563 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8464, 2.4900, 2.2627, 2.8542, 2.3542, 2.3059, 2.1006, 2.6985], + device='cuda:2'), covar=tensor([0.0822, 0.1469, 0.1388, 0.0959, 0.1383, 0.0489, 0.1289, 0.0609], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0355, 0.0304, 0.0246, 0.0301, 0.0250, 0.0299, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:31:38,616 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.268e+02 6.331e+02 7.722e+02 1.710e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-02 09:31:44,801 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111809.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:32:13,234 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:32:28,410 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 09:32:34,314 INFO [train.py:903] (2/4) Epoch 17, batch 2600, loss[loss=0.2232, simple_loss=0.3079, pruned_loss=0.0693, over 18732.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2943, pruned_loss=0.0693, over 3818645.89 frames. ], batch size: 74, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:32:50,528 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3085, 3.0248, 2.0582, 2.7518, 0.6370, 2.9435, 2.8728, 2.9404], + device='cuda:2'), covar=tensor([0.1099, 0.1344, 0.2171, 0.1038, 0.3841, 0.1019, 0.1121, 0.1350], + device='cuda:2'), in_proj_covar=tensor([0.0481, 0.0392, 0.0475, 0.0337, 0.0395, 0.0410, 0.0410, 0.0438], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:33:07,576 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8479, 4.9761, 5.7007, 5.7039, 2.1028, 5.3758, 4.6319, 5.3296], + device='cuda:2'), covar=tensor([0.1476, 0.0854, 0.0508, 0.0542, 0.5498, 0.0618, 0.0532, 0.1109], + device='cuda:2'), in_proj_covar=tensor([0.0748, 0.0684, 0.0891, 0.0775, 0.0796, 0.0641, 0.0534, 0.0820], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 09:33:38,518 INFO [train.py:903] (2/4) Epoch 17, batch 2650, loss[loss=0.1942, simple_loss=0.2691, pruned_loss=0.0596, over 19789.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.296, pruned_loss=0.07073, over 3805534.81 frames. ], batch size: 49, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:33:46,347 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.387e+02 5.132e+02 6.430e+02 7.842e+02 1.964e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 09:33:58,649 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 09:34:09,643 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4091, 1.1826, 1.4090, 1.5103, 2.9798, 1.1657, 2.3093, 3.3204], + device='cuda:2'), covar=tensor([0.0489, 0.2820, 0.3003, 0.1787, 0.0748, 0.2427, 0.1178, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0351, 0.0372, 0.0334, 0.0360, 0.0341, 0.0358, 0.0378], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:34:38,387 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:34:41,279 INFO [train.py:903] (2/4) Epoch 17, batch 2700, loss[loss=0.1907, simple_loss=0.2692, pruned_loss=0.05607, over 19613.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2965, pruned_loss=0.07111, over 3799040.65 frames. ], batch size: 50, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:34:54,258 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:34:58,874 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1050, 2.0686, 1.7435, 2.2009, 2.0543, 1.8593, 1.7589, 2.0059], + device='cuda:2'), covar=tensor([0.1020, 0.1363, 0.1485, 0.0967, 0.1204, 0.0521, 0.1320, 0.0690], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0355, 0.0303, 0.0246, 0.0300, 0.0249, 0.0298, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:35:26,067 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:35:43,873 INFO [train.py:903] (2/4) Epoch 17, batch 2750, loss[loss=0.1813, simple_loss=0.2541, pruned_loss=0.05428, over 19767.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.296, pruned_loss=0.07077, over 3804667.57 frames. ], batch size: 45, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:35:52,115 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.760e+02 5.468e+02 6.814e+02 8.726e+02 1.544e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-02 09:36:45,031 INFO [train.py:903] (2/4) Epoch 17, batch 2800, loss[loss=0.2368, simple_loss=0.3058, pruned_loss=0.08392, over 19622.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2948, pruned_loss=0.07003, over 3807506.08 frames. ], batch size: 50, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:26,162 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1476, 1.2534, 1.6270, 1.1091, 2.5325, 3.3485, 3.0644, 3.5935], + device='cuda:2'), covar=tensor([0.1626, 0.3688, 0.3164, 0.2445, 0.0556, 0.0189, 0.0234, 0.0247], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0307, 0.0337, 0.0256, 0.0229, 0.0174, 0.0208, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 09:37:29,640 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:37:48,063 INFO [train.py:903] (2/4) Epoch 17, batch 2850, loss[loss=0.2383, simple_loss=0.3156, pruned_loss=0.08051, over 19616.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2953, pruned_loss=0.07024, over 3811980.63 frames. ], batch size: 57, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:54,818 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 5.207e+02 6.661e+02 8.674e+02 1.797e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 09:37:57,490 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5756, 1.2814, 1.5054, 1.0568, 2.2116, 0.9390, 2.0386, 2.4666], + device='cuda:2'), covar=tensor([0.0671, 0.2473, 0.2503, 0.1761, 0.0839, 0.2125, 0.1041, 0.0443], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0349, 0.0371, 0.0332, 0.0358, 0.0340, 0.0357, 0.0378], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:38:46,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 09:38:49,613 INFO [train.py:903] (2/4) Epoch 17, batch 2900, loss[loss=0.2348, simple_loss=0.306, pruned_loss=0.08179, over 19783.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.06996, over 3804408.41 frames. ], batch size: 56, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:38:56,332 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112153.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:39:51,804 INFO [train.py:903] (2/4) Epoch 17, batch 2950, loss[loss=0.1881, simple_loss=0.2634, pruned_loss=0.05637, over 19788.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2936, pruned_loss=0.06965, over 3816895.51 frames. ], batch size: 48, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:39:55,957 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:39:58,767 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 4.879e+02 6.137e+02 7.850e+02 1.399e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-04-02 09:40:27,872 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:40:35,970 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3079, 1.3797, 1.6461, 1.5353, 2.4479, 2.0971, 2.5522, 0.9983], + device='cuda:2'), covar=tensor([0.2414, 0.4202, 0.2523, 0.1950, 0.1458, 0.2198, 0.1439, 0.4375], + device='cuda:2'), in_proj_covar=tensor([0.0507, 0.0610, 0.0664, 0.0461, 0.0603, 0.0513, 0.0649, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 09:40:54,423 INFO [train.py:903] (2/4) Epoch 17, batch 3000, loss[loss=0.1854, simple_loss=0.2677, pruned_loss=0.0515, over 19666.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2926, pruned_loss=0.06893, over 3818583.36 frames. ], batch size: 53, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:40:54,424 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 09:41:09,010 INFO [train.py:937] (2/4) Epoch 17, validation: loss=0.1717, simple_loss=0.272, pruned_loss=0.03576, over 944034.00 frames. +2023-04-02 09:41:09,011 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 09:41:13,733 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 09:41:33,216 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112268.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:41:33,250 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2652, 1.8798, 1.5099, 1.0591, 1.8365, 0.9728, 1.0901, 1.7048], + device='cuda:2'), covar=tensor([0.0913, 0.0772, 0.1051, 0.0997, 0.0526, 0.1364, 0.0744, 0.0418], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0307, 0.0330, 0.0255, 0.0242, 0.0329, 0.0290, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:42:09,758 INFO [train.py:903] (2/4) Epoch 17, batch 3050, loss[loss=0.2209, simple_loss=0.2811, pruned_loss=0.08032, over 19749.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2929, pruned_loss=0.06913, over 3818486.34 frames. ], batch size: 45, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:42:16,485 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.197e+02 6.217e+02 9.038e+02 1.667e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-02 09:42:16,887 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8231, 1.6149, 1.4912, 1.8342, 1.5648, 1.6168, 1.4491, 1.7269], + device='cuda:2'), covar=tensor([0.0948, 0.1284, 0.1344, 0.0877, 0.1139, 0.0525, 0.1310, 0.0705], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0358, 0.0305, 0.0246, 0.0299, 0.0250, 0.0298, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:43:10,131 INFO [train.py:903] (2/4) Epoch 17, batch 3100, loss[loss=0.203, simple_loss=0.2871, pruned_loss=0.05948, over 19686.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2933, pruned_loss=0.06929, over 3833104.88 frames. ], batch size: 59, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:43:32,153 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.65 vs. limit=5.0 +2023-04-02 09:43:32,968 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5447, 1.2825, 1.3036, 1.5222, 1.3202, 1.3659, 1.2368, 1.4821], + device='cuda:2'), covar=tensor([0.0783, 0.1208, 0.1097, 0.0723, 0.0985, 0.0470, 0.1122, 0.0591], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0356, 0.0304, 0.0245, 0.0298, 0.0249, 0.0296, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:43:34,200 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1537, 1.3127, 1.8617, 1.3160, 2.6773, 3.7458, 3.4781, 3.9640], + device='cuda:2'), covar=tensor([0.1605, 0.3554, 0.2969, 0.2280, 0.0569, 0.0156, 0.0188, 0.0232], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0307, 0.0336, 0.0256, 0.0229, 0.0175, 0.0208, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 09:44:09,950 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.76 vs. limit=5.0 +2023-04-02 09:44:14,541 INFO [train.py:903] (2/4) Epoch 17, batch 3150, loss[loss=0.2348, simple_loss=0.3136, pruned_loss=0.078, over 19582.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2944, pruned_loss=0.07012, over 3838717.29 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:44:21,820 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.016e+02 6.190e+02 7.660e+02 1.883e+03, threshold=1.238e+03, percent-clipped=9.0 +2023-04-02 09:44:25,534 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112407.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:44:42,659 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 09:44:52,116 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:44:52,647 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.09 vs. limit=5.0 +2023-04-02 09:45:17,099 INFO [train.py:903] (2/4) Epoch 17, batch 3200, loss[loss=0.205, simple_loss=0.2812, pruned_loss=0.06433, over 19594.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2945, pruned_loss=0.07005, over 3834391.62 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:07,411 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6296, 1.3655, 1.5602, 1.5846, 3.1997, 1.0210, 2.2845, 3.5887], + device='cuda:2'), covar=tensor([0.0485, 0.2681, 0.2616, 0.1742, 0.0738, 0.2561, 0.1241, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0352, 0.0371, 0.0332, 0.0358, 0.0340, 0.0356, 0.0379], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:46:19,039 INFO [train.py:903] (2/4) Epoch 17, batch 3250, loss[loss=0.2088, simple_loss=0.2888, pruned_loss=0.06437, over 19749.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2949, pruned_loss=0.07021, over 3829921.56 frames. ], batch size: 54, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:26,173 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.958e+02 6.274e+02 7.840e+02 2.025e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 09:46:42,512 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 09:46:51,193 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112524.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:47:13,560 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:47:18,737 INFO [train.py:903] (2/4) Epoch 17, batch 3300, loss[loss=0.206, simple_loss=0.2871, pruned_loss=0.06244, over 19849.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2951, pruned_loss=0.07003, over 3839253.73 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:47:21,226 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112549.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:47:25,361 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 09:48:00,371 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:20,892 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:24,050 INFO [train.py:903] (2/4) Epoch 17, batch 3350, loss[loss=0.2068, simple_loss=0.2836, pruned_loss=0.06499, over 19693.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2945, pruned_loss=0.06933, over 3841912.64 frames. ], batch size: 53, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:48:31,317 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.436e+02 6.846e+02 8.612e+02 1.565e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 09:49:09,946 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:49:24,124 INFO [train.py:903] (2/4) Epoch 17, batch 3400, loss[loss=0.2537, simple_loss=0.3296, pruned_loss=0.08886, over 19585.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2946, pruned_loss=0.06947, over 3827498.36 frames. ], batch size: 61, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:50:25,907 INFO [train.py:903] (2/4) Epoch 17, batch 3450, loss[loss=0.2271, simple_loss=0.2951, pruned_loss=0.07956, over 19414.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2934, pruned_loss=0.06878, over 3816763.15 frames. ], batch size: 48, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:50:28,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 09:50:32,995 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.932e+02 6.092e+02 9.481e+02 2.200e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-02 09:50:55,137 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-02 09:51:04,789 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8690, 1.7419, 1.4626, 1.8893, 1.6100, 1.5885, 1.6031, 1.7418], + device='cuda:2'), covar=tensor([0.1076, 0.1420, 0.1623, 0.1065, 0.1326, 0.0622, 0.1351, 0.0805], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0353, 0.0299, 0.0242, 0.0295, 0.0246, 0.0293, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:51:27,355 INFO [train.py:903] (2/4) Epoch 17, batch 3500, loss[loss=0.2422, simple_loss=0.3186, pruned_loss=0.08292, over 18814.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06962, over 3823976.48 frames. ], batch size: 74, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:51:32,015 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:31,111 INFO [train.py:903] (2/4) Epoch 17, batch 3550, loss[loss=0.2407, simple_loss=0.3159, pruned_loss=0.08272, over 18100.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2939, pruned_loss=0.06945, over 3820435.55 frames. ], batch size: 83, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:52:32,856 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:38,378 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.345e+02 4.759e+02 5.980e+02 7.566e+02 1.638e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 09:52:57,243 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8717, 4.3047, 4.6048, 4.6225, 1.7494, 4.3383, 3.7137, 4.2803], + device='cuda:2'), covar=tensor([0.1554, 0.0803, 0.0570, 0.0602, 0.5676, 0.0739, 0.0663, 0.1128], + device='cuda:2'), in_proj_covar=tensor([0.0752, 0.0691, 0.0900, 0.0777, 0.0797, 0.0647, 0.0538, 0.0823], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 09:53:03,326 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:53:33,308 INFO [train.py:903] (2/4) Epoch 17, batch 3600, loss[loss=0.206, simple_loss=0.2902, pruned_loss=0.06091, over 19688.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2924, pruned_loss=0.06809, over 3833383.14 frames. ], batch size: 60, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:53:55,539 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112866.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:54:35,707 INFO [train.py:903] (2/4) Epoch 17, batch 3650, loss[loss=0.265, simple_loss=0.3431, pruned_loss=0.09345, over 19540.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2932, pruned_loss=0.06837, over 3832201.31 frames. ], batch size: 54, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:54:43,570 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.951e+02 4.960e+02 5.826e+02 7.647e+02 1.614e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 09:55:09,850 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112924.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,598 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,730 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:38,878 INFO [train.py:903] (2/4) Epoch 17, batch 3700, loss[loss=0.1775, simple_loss=0.2598, pruned_loss=0.04762, over 19737.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2936, pruned_loss=0.06871, over 3831482.30 frames. ], batch size: 51, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:05,006 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2597, 1.1929, 1.2570, 1.3893, 1.0763, 1.3304, 1.2556, 1.3336], + device='cuda:2'), covar=tensor([0.0941, 0.1092, 0.1085, 0.0719, 0.0871, 0.0916, 0.0944, 0.0831], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0222, 0.0222, 0.0244, 0.0228, 0.0208, 0.0189, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 09:56:17,476 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:56:42,237 INFO [train.py:903] (2/4) Epoch 17, batch 3750, loss[loss=0.2206, simple_loss=0.302, pruned_loss=0.06966, over 19680.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2938, pruned_loss=0.06928, over 3824999.66 frames. ], batch size: 60, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:49,254 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.723e+02 6.001e+02 7.947e+02 1.345e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-02 09:56:58,856 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2254, 3.7145, 2.2097, 2.1800, 3.3430, 2.1493, 1.6221, 2.3280], + device='cuda:2'), covar=tensor([0.1349, 0.0540, 0.0983, 0.0874, 0.0409, 0.1059, 0.0959, 0.0621], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0313, 0.0334, 0.0258, 0.0245, 0.0333, 0.0294, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 09:57:32,680 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113039.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:57:42,418 INFO [train.py:903] (2/4) Epoch 17, batch 3800, loss[loss=0.1678, simple_loss=0.2463, pruned_loss=0.04462, over 19767.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2951, pruned_loss=0.07046, over 3805043.47 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:57:49,577 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:14,078 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 09:58:39,329 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:43,324 INFO [train.py:903] (2/4) Epoch 17, batch 3850, loss[loss=0.1941, simple_loss=0.2841, pruned_loss=0.05209, over 19373.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2949, pruned_loss=0.07035, over 3807762.19 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:58:51,555 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.627e+02 5.316e+02 6.326e+02 9.097e+02 1.552e+03, threshold=1.265e+03, percent-clipped=8.0 +2023-04-02 09:59:14,508 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113122.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:44,505 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113147.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:45,342 INFO [train.py:903] (2/4) Epoch 17, batch 3900, loss[loss=0.2649, simple_loss=0.3233, pruned_loss=0.1032, over 13487.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2949, pruned_loss=0.07, over 3823129.34 frames. ], batch size: 136, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:48,760 INFO [train.py:903] (2/4) Epoch 17, batch 3950, loss[loss=0.2104, simple_loss=0.28, pruned_loss=0.07043, over 19608.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2938, pruned_loss=0.06935, over 3831827.70 frames. ], batch size: 50, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:56,123 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 10:00:57,243 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.545e+02 5.288e+02 6.585e+02 1.560e+03, threshold=1.058e+03, percent-clipped=1.0 +2023-04-02 10:01:33,725 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-04-02 10:01:51,457 INFO [train.py:903] (2/4) Epoch 17, batch 4000, loss[loss=0.2307, simple_loss=0.3122, pruned_loss=0.07457, over 18820.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2945, pruned_loss=0.06933, over 3825844.36 frames. ], batch size: 74, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:01:56,565 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:25,035 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7362, 1.3427, 1.6292, 1.7576, 3.2959, 1.1038, 2.3799, 3.7333], + device='cuda:2'), covar=tensor([0.0475, 0.2724, 0.2782, 0.1563, 0.0708, 0.2488, 0.1253, 0.0235], + device='cuda:2'), in_proj_covar=tensor([0.0388, 0.0352, 0.0374, 0.0333, 0.0360, 0.0342, 0.0359, 0.0381], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:02:35,233 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:39,803 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 10:02:49,635 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:52,760 INFO [train.py:903] (2/4) Epoch 17, batch 4050, loss[loss=0.1799, simple_loss=0.2598, pruned_loss=0.04998, over 19737.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2936, pruned_loss=0.06867, over 3840383.27 frames. ], batch size: 46, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:00,896 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.601e+02 4.703e+02 5.716e+02 7.594e+02 1.568e+03, threshold=1.143e+03, percent-clipped=5.0 +2023-04-02 10:03:08,178 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:21,637 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:38,659 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:39,147 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-02 10:03:54,658 INFO [train.py:903] (2/4) Epoch 17, batch 4100, loss[loss=0.2587, simple_loss=0.3277, pruned_loss=0.09489, over 19700.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2944, pruned_loss=0.06915, over 3828386.91 frames. ], batch size: 59, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:57,612 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:28,678 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:31,497 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 10:04:56,295 INFO [train.py:903] (2/4) Epoch 17, batch 4150, loss[loss=0.1731, simple_loss=0.2475, pruned_loss=0.04934, over 19721.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2939, pruned_loss=0.06892, over 3836225.12 frames. ], batch size: 45, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:04:56,639 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:05:03,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 5.375e+02 6.520e+02 8.152e+02 2.133e+03, threshold=1.304e+03, percent-clipped=6.0 +2023-04-02 10:05:23,394 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1579, 2.1432, 1.6206, 2.2387, 2.3998, 1.6003, 1.6345, 1.9632], + device='cuda:2'), covar=tensor([0.1061, 0.1645, 0.1810, 0.1110, 0.1316, 0.0951, 0.1700, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0352, 0.0298, 0.0241, 0.0295, 0.0247, 0.0291, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:05:57,673 INFO [train.py:903] (2/4) Epoch 17, batch 4200, loss[loss=0.2141, simple_loss=0.2802, pruned_loss=0.07398, over 19725.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2944, pruned_loss=0.06923, over 3824704.24 frames. ], batch size: 46, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:06:02,327 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 10:06:41,226 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7390, 2.0072, 2.2749, 2.1821, 3.1332, 3.7176, 3.6238, 4.0279], + device='cuda:2'), covar=tensor([0.1484, 0.2915, 0.2620, 0.1874, 0.0952, 0.0296, 0.0185, 0.0227], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0310, 0.0339, 0.0258, 0.0232, 0.0177, 0.0211, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 10:06:59,519 INFO [train.py:903] (2/4) Epoch 17, batch 4250, loss[loss=0.1941, simple_loss=0.2684, pruned_loss=0.05995, over 19808.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2957, pruned_loss=0.07021, over 3810582.98 frames. ], batch size: 48, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:07:06,463 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.808e+02 5.898e+02 7.585e+02 1.571e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 10:07:13,468 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 10:07:15,387 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 10:07:21,783 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1310, 3.2944, 1.9866, 1.9840, 2.8453, 1.8516, 1.4888, 2.2078], + device='cuda:2'), covar=tensor([0.1226, 0.0548, 0.1068, 0.0781, 0.0596, 0.1146, 0.0959, 0.0642], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0310, 0.0329, 0.0255, 0.0244, 0.0329, 0.0291, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:07:24,954 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 10:08:02,054 INFO [train.py:903] (2/4) Epoch 17, batch 4300, loss[loss=0.1862, simple_loss=0.2651, pruned_loss=0.05363, over 19485.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2957, pruned_loss=0.06991, over 3812878.88 frames. ], batch size: 49, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:08:03,898 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 10:08:55,426 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 10:09:02,224 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113596.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:09:04,355 INFO [train.py:903] (2/4) Epoch 17, batch 4350, loss[loss=0.3059, simple_loss=0.3568, pruned_loss=0.1275, over 13250.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2957, pruned_loss=0.06986, over 3805517.19 frames. ], batch size: 136, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:09:12,444 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 4.847e+02 6.118e+02 7.738e+02 1.753e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 10:09:49,678 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:07,375 INFO [train.py:903] (2/4) Epoch 17, batch 4400, loss[loss=0.249, simple_loss=0.3198, pruned_loss=0.08911, over 19500.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2961, pruned_loss=0.07023, over 3800544.43 frames. ], batch size: 64, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:10:14,930 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113654.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:26,323 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:33,138 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 10:10:43,982 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 10:10:45,297 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113679.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:11:07,788 INFO [train.py:903] (2/4) Epoch 17, batch 4450, loss[loss=0.2, simple_loss=0.2867, pruned_loss=0.05668, over 19769.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2959, pruned_loss=0.07042, over 3806620.44 frames. ], batch size: 56, lr: 4.84e-03, grad_scale: 16.0 +2023-04-02 10:11:14,458 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.100e+02 6.811e+02 8.906e+02 1.680e+03, threshold=1.362e+03, percent-clipped=7.0 +2023-04-02 10:11:22,980 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113711.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:12:07,715 INFO [train.py:903] (2/4) Epoch 17, batch 4500, loss[loss=0.2366, simple_loss=0.3166, pruned_loss=0.07833, over 19763.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2964, pruned_loss=0.07073, over 3817503.73 frames. ], batch size: 63, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:13:09,290 INFO [train.py:903] (2/4) Epoch 17, batch 4550, loss[loss=0.2201, simple_loss=0.3089, pruned_loss=0.06566, over 19426.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.296, pruned_loss=0.07079, over 3810559.08 frames. ], batch size: 62, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:13:19,354 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 10:13:20,449 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 5.090e+02 6.214e+02 7.749e+02 1.433e+03, threshold=1.243e+03, percent-clipped=2.0 +2023-04-02 10:13:42,452 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 10:14:12,738 INFO [train.py:903] (2/4) Epoch 17, batch 4600, loss[loss=0.2234, simple_loss=0.3105, pruned_loss=0.06816, over 19128.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2963, pruned_loss=0.07079, over 3803766.25 frames. ], batch size: 69, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:14,387 INFO [train.py:903] (2/4) Epoch 17, batch 4650, loss[loss=0.223, simple_loss=0.3007, pruned_loss=0.07263, over 19411.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.06999, over 3807368.74 frames. ], batch size: 48, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:23,609 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.276e+02 6.482e+02 7.907e+02 1.823e+03, threshold=1.296e+03, percent-clipped=2.0 +2023-04-02 10:15:31,993 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 10:15:44,637 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 10:15:57,531 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9786, 3.0798, 1.8578, 1.8348, 2.8368, 1.6181, 1.4622, 2.1012], + device='cuda:2'), covar=tensor([0.1440, 0.0753, 0.1031, 0.0856, 0.0547, 0.1297, 0.0984, 0.0747], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0311, 0.0330, 0.0255, 0.0243, 0.0329, 0.0291, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:16:16,818 INFO [train.py:903] (2/4) Epoch 17, batch 4700, loss[loss=0.1816, simple_loss=0.2668, pruned_loss=0.04814, over 19584.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2948, pruned_loss=0.07001, over 3804362.91 frames. ], batch size: 52, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:16:42,982 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:16:43,737 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 10:16:56,542 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:12,171 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:19,850 INFO [train.py:903] (2/4) Epoch 17, batch 4750, loss[loss=0.2077, simple_loss=0.2912, pruned_loss=0.06207, over 19407.00 frames. ], tot_loss[loss=0.217, simple_loss=0.295, pruned_loss=0.06953, over 3816000.44 frames. ], batch size: 70, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:17:32,729 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.836e+02 6.122e+02 7.624e+02 1.576e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 10:17:35,052 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:18:24,455 INFO [train.py:903] (2/4) Epoch 17, batch 4800, loss[loss=0.2273, simple_loss=0.3114, pruned_loss=0.07158, over 19704.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2964, pruned_loss=0.07067, over 3817066.55 frames. ], batch size: 59, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:22,118 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:19:26,562 INFO [train.py:903] (2/4) Epoch 17, batch 4850, loss[loss=0.2026, simple_loss=0.2775, pruned_loss=0.06385, over 19860.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2959, pruned_loss=0.07051, over 3820961.43 frames. ], batch size: 52, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:35,426 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.130e+02 6.675e+02 8.728e+02 1.864e+03, threshold=1.335e+03, percent-clipped=11.0 +2023-04-02 10:19:38,308 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3159, 2.3112, 2.4731, 3.2654, 2.3076, 3.0135, 2.6599, 2.3532], + device='cuda:2'), covar=tensor([0.4034, 0.3743, 0.1689, 0.2287, 0.4239, 0.1885, 0.4293, 0.2989], + device='cuda:2'), in_proj_covar=tensor([0.0857, 0.0906, 0.0688, 0.0913, 0.0837, 0.0774, 0.0819, 0.0756], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 10:19:52,812 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 10:19:57,421 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:09,279 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 10:20:14,641 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 10:20:19,123 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 10:20:20,343 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 10:20:25,246 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:28,428 INFO [train.py:903] (2/4) Epoch 17, batch 4900, loss[loss=0.1922, simple_loss=0.2691, pruned_loss=0.05762, over 19746.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2952, pruned_loss=0.07013, over 3833579.96 frames. ], batch size: 46, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:20:28,474 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 10:20:48,128 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 10:21:23,052 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0396, 1.4343, 1.7621, 1.2564, 2.5562, 3.3839, 3.0732, 3.5963], + device='cuda:2'), covar=tensor([0.1722, 0.3423, 0.3044, 0.2317, 0.0529, 0.0163, 0.0229, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0312, 0.0341, 0.0259, 0.0233, 0.0178, 0.0211, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 10:21:29,555 INFO [train.py:903] (2/4) Epoch 17, batch 4950, loss[loss=0.2108, simple_loss=0.2946, pruned_loss=0.06345, over 19499.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2949, pruned_loss=0.06995, over 3831242.21 frames. ], batch size: 64, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:21:41,951 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.073e+02 6.090e+02 7.599e+02 1.461e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-02 10:21:48,516 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 10:22:09,554 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 10:22:31,812 INFO [train.py:903] (2/4) Epoch 17, batch 5000, loss[loss=0.2195, simple_loss=0.3028, pruned_loss=0.06807, over 19344.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2955, pruned_loss=0.07038, over 3831223.97 frames. ], batch size: 66, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:22:39,593 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 10:22:50,091 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 10:22:59,523 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5098, 2.3196, 2.0693, 2.5702, 2.4321, 2.0927, 2.0830, 2.5916], + device='cuda:2'), covar=tensor([0.0874, 0.1470, 0.1337, 0.1005, 0.1172, 0.0526, 0.1176, 0.0578], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0353, 0.0298, 0.0242, 0.0296, 0.0246, 0.0292, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:23:32,990 INFO [train.py:903] (2/4) Epoch 17, batch 5050, loss[loss=0.2114, simple_loss=0.2996, pruned_loss=0.06159, over 19732.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2958, pruned_loss=0.07043, over 3835428.56 frames. ], batch size: 63, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:23:42,364 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 5.068e+02 6.244e+02 7.899e+02 1.430e+03, threshold=1.249e+03, percent-clipped=5.0 +2023-04-02 10:24:10,977 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 10:24:34,964 INFO [train.py:903] (2/4) Epoch 17, batch 5100, loss[loss=0.1845, simple_loss=0.2556, pruned_loss=0.0567, over 19323.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2943, pruned_loss=0.06966, over 3838433.50 frames. ], batch size: 44, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:24:37,685 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:24:44,353 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 10:24:46,806 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 10:24:53,309 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 10:25:10,318 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:14,809 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:20,129 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.34 vs. limit=5.0 +2023-04-02 10:25:36,276 INFO [train.py:903] (2/4) Epoch 17, batch 5150, loss[loss=0.24, simple_loss=0.3111, pruned_loss=0.08443, over 19744.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2957, pruned_loss=0.0702, over 3829543.65 frames. ], batch size: 51, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:25:44,128 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9184, 4.3772, 4.6857, 4.7217, 1.7238, 4.3586, 3.8032, 4.3488], + device='cuda:2'), covar=tensor([0.1699, 0.0925, 0.0591, 0.0624, 0.6172, 0.0816, 0.0676, 0.1166], + device='cuda:2'), in_proj_covar=tensor([0.0744, 0.0694, 0.0892, 0.0780, 0.0798, 0.0644, 0.0537, 0.0821], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 10:25:46,419 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114404.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:49,639 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.288e+02 6.652e+02 7.839e+02 1.735e+03, threshold=1.330e+03, percent-clipped=3.0 +2023-04-02 10:25:50,834 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 10:26:24,413 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 10:26:41,812 INFO [train.py:903] (2/4) Epoch 17, batch 5200, loss[loss=0.2514, simple_loss=0.3116, pruned_loss=0.09563, over 19849.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2955, pruned_loss=0.0705, over 3813369.82 frames. ], batch size: 52, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:26:54,843 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 10:27:10,234 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6114, 1.2433, 1.2414, 1.4810, 1.1907, 1.3747, 1.1796, 1.4501], + device='cuda:2'), covar=tensor([0.1069, 0.1154, 0.1521, 0.0933, 0.1178, 0.0596, 0.1428, 0.0751], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0356, 0.0301, 0.0245, 0.0300, 0.0248, 0.0295, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:27:19,897 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 10:27:33,123 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114489.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:27:37,831 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 10:27:43,627 INFO [train.py:903] (2/4) Epoch 17, batch 5250, loss[loss=0.186, simple_loss=0.2585, pruned_loss=0.05674, over 19731.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2956, pruned_loss=0.06997, over 3829228.78 frames. ], batch size: 46, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:27:53,071 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 4.802e+02 5.852e+02 7.465e+02 1.395e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 10:28:44,580 INFO [train.py:903] (2/4) Epoch 17, batch 5300, loss[loss=0.2623, simple_loss=0.3332, pruned_loss=0.09571, over 18112.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2959, pruned_loss=0.07015, over 3824992.54 frames. ], batch size: 83, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:28:59,044 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 10:29:31,242 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4118, 1.4599, 1.8053, 1.6866, 2.6777, 2.3858, 2.9140, 1.3210], + device='cuda:2'), covar=tensor([0.2337, 0.4125, 0.2509, 0.1796, 0.1527, 0.1944, 0.1405, 0.3968], + device='cuda:2'), in_proj_covar=tensor([0.0513, 0.0614, 0.0668, 0.0461, 0.0607, 0.0515, 0.0649, 0.0524], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 10:29:44,122 INFO [train.py:903] (2/4) Epoch 17, batch 5350, loss[loss=0.2225, simple_loss=0.3042, pruned_loss=0.07044, over 19629.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2964, pruned_loss=0.07056, over 3828466.95 frames. ], batch size: 57, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:29:44,502 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9900, 1.2812, 1.6946, 0.8735, 2.3328, 3.0094, 2.6965, 3.2110], + device='cuda:2'), covar=tensor([0.1692, 0.3623, 0.3134, 0.2587, 0.0579, 0.0236, 0.0279, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0311, 0.0340, 0.0259, 0.0232, 0.0178, 0.0212, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 10:29:45,546 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1636, 1.9138, 1.6743, 2.0774, 1.9280, 1.7743, 1.6627, 2.0517], + device='cuda:2'), covar=tensor([0.0929, 0.1368, 0.1422, 0.0950, 0.1282, 0.0557, 0.1302, 0.0677], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0354, 0.0301, 0.0245, 0.0300, 0.0248, 0.0295, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:29:51,216 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:29:54,895 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 5.171e+02 6.688e+02 9.089e+02 2.274e+03, threshold=1.338e+03, percent-clipped=9.0 +2023-04-02 10:30:19,031 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 10:30:46,233 INFO [train.py:903] (2/4) Epoch 17, batch 5400, loss[loss=0.1974, simple_loss=0.2873, pruned_loss=0.05374, over 19669.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2961, pruned_loss=0.07027, over 3828767.48 frames. ], batch size: 58, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:29,406 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9371, 1.9973, 2.2357, 2.6712, 1.9734, 2.6210, 2.3678, 2.0751], + device='cuda:2'), covar=tensor([0.4211, 0.3929, 0.1909, 0.2239, 0.3847, 0.1941, 0.4403, 0.3357], + device='cuda:2'), in_proj_covar=tensor([0.0865, 0.0915, 0.0692, 0.0922, 0.0844, 0.0779, 0.0821, 0.0761], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 10:31:29,583 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 10:31:47,169 INFO [train.py:903] (2/4) Epoch 17, batch 5450, loss[loss=0.2088, simple_loss=0.2943, pruned_loss=0.06159, over 19652.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2959, pruned_loss=0.07013, over 3839272.91 frames. ], batch size: 58, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:56,195 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.503e+02 5.761e+02 7.243e+02 1.420e+03, threshold=1.152e+03, percent-clipped=1.0 +2023-04-02 10:32:31,023 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:32:47,135 INFO [train.py:903] (2/4) Epoch 17, batch 5500, loss[loss=0.2279, simple_loss=0.3081, pruned_loss=0.07385, over 19326.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2952, pruned_loss=0.06991, over 3837478.71 frames. ], batch size: 66, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:32:47,597 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2434, 1.3832, 1.4777, 1.4276, 1.7376, 1.7846, 1.8227, 0.5296], + device='cuda:2'), covar=tensor([0.2342, 0.3913, 0.2527, 0.1850, 0.1590, 0.2195, 0.1365, 0.4406], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0612, 0.0668, 0.0461, 0.0607, 0.0515, 0.0648, 0.0523], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 10:33:10,779 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 10:33:46,749 INFO [train.py:903] (2/4) Epoch 17, batch 5550, loss[loss=0.217, simple_loss=0.293, pruned_loss=0.0705, over 19772.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2948, pruned_loss=0.07003, over 3830268.66 frames. ], batch size: 54, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:33:54,783 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 10:33:55,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.950e+02 6.230e+02 7.289e+02 1.704e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 10:34:04,683 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8212, 1.9192, 2.1125, 2.5469, 1.8316, 2.3936, 2.2279, 1.9979], + device='cuda:2'), covar=tensor([0.4116, 0.3706, 0.1871, 0.2226, 0.3897, 0.2016, 0.4507, 0.3146], + device='cuda:2'), in_proj_covar=tensor([0.0861, 0.0911, 0.0688, 0.0917, 0.0837, 0.0775, 0.0817, 0.0758], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 10:34:39,082 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 10:34:41,608 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 10:34:48,919 INFO [train.py:903] (2/4) Epoch 17, batch 5600, loss[loss=0.2167, simple_loss=0.3007, pruned_loss=0.06635, over 19785.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2955, pruned_loss=0.07038, over 3809443.71 frames. ], batch size: 56, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:03,677 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:33,266 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:39,353 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114889.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:50,177 INFO [train.py:903] (2/4) Epoch 17, batch 5650, loss[loss=0.222, simple_loss=0.3061, pruned_loss=0.06901, over 19640.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2958, pruned_loss=0.07039, over 3809083.87 frames. ], batch size: 57, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:59,361 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.160e+02 6.498e+02 8.575e+02 1.504e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-02 10:36:35,307 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5713, 1.1309, 1.4346, 1.1110, 2.2558, 0.9211, 2.1394, 2.3935], + device='cuda:2'), covar=tensor([0.0677, 0.2755, 0.2628, 0.1744, 0.0829, 0.2086, 0.0942, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0391, 0.0356, 0.0376, 0.0338, 0.0365, 0.0344, 0.0363, 0.0383], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:36:36,131 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 10:36:51,135 INFO [train.py:903] (2/4) Epoch 17, batch 5700, loss[loss=0.1999, simple_loss=0.2837, pruned_loss=0.05803, over 19697.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2958, pruned_loss=0.07017, over 3825610.29 frames. ], batch size: 53, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:08,306 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5715, 1.7726, 2.0752, 1.9741, 3.4406, 2.7759, 3.6417, 1.7811], + device='cuda:2'), covar=tensor([0.2283, 0.4006, 0.2677, 0.1644, 0.1318, 0.1919, 0.1443, 0.3724], + device='cuda:2'), in_proj_covar=tensor([0.0513, 0.0615, 0.0673, 0.0463, 0.0611, 0.0516, 0.0650, 0.0526], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 10:37:43,497 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6089, 1.4934, 1.4572, 1.9716, 1.4784, 1.7747, 1.7862, 1.6766], + device='cuda:2'), covar=tensor([0.0808, 0.0928, 0.1007, 0.0747, 0.0855, 0.0783, 0.0912, 0.0686], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0222, 0.0223, 0.0243, 0.0227, 0.0209, 0.0188, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 10:37:50,252 INFO [train.py:903] (2/4) Epoch 17, batch 5750, loss[loss=0.2579, simple_loss=0.3291, pruned_loss=0.09333, over 19318.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2953, pruned_loss=0.06987, over 3825322.81 frames. ], batch size: 66, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:50,272 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 10:37:57,232 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 10:37:59,478 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 5.221e+02 6.429e+02 7.572e+02 1.818e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 10:38:04,567 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 10:38:50,578 INFO [train.py:903] (2/4) Epoch 17, batch 5800, loss[loss=0.2108, simple_loss=0.2944, pruned_loss=0.06363, over 19292.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2947, pruned_loss=0.06927, over 3827732.45 frames. ], batch size: 66, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:38:52,836 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8316, 4.2761, 4.5801, 4.5882, 1.5689, 4.2857, 3.6447, 4.2653], + device='cuda:2'), covar=tensor([0.1572, 0.0942, 0.0592, 0.0610, 0.6236, 0.0833, 0.0719, 0.1089], + device='cuda:2'), in_proj_covar=tensor([0.0741, 0.0691, 0.0891, 0.0775, 0.0794, 0.0645, 0.0536, 0.0819], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 10:39:27,224 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:39:28,619 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8043, 2.5044, 2.2815, 2.5619, 2.4270, 2.1989, 2.0829, 2.6839], + device='cuda:2'), covar=tensor([0.0753, 0.1414, 0.1339, 0.1018, 0.1398, 0.0492, 0.1202, 0.0595], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0352, 0.0302, 0.0245, 0.0298, 0.0247, 0.0294, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:39:35,786 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-02 10:39:52,202 INFO [train.py:903] (2/4) Epoch 17, batch 5850, loss[loss=0.2169, simple_loss=0.2864, pruned_loss=0.07366, over 19758.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2944, pruned_loss=0.06896, over 3826712.13 frames. ], batch size: 49, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:39:59,371 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:40:01,415 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 4.663e+02 6.050e+02 7.882e+02 1.454e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 10:40:51,604 INFO [train.py:903] (2/4) Epoch 17, batch 5900, loss[loss=0.1881, simple_loss=0.2638, pruned_loss=0.05619, over 19335.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2939, pruned_loss=0.06894, over 3828470.56 frames. ], batch size: 47, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:40:55,179 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 10:40:55,563 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5132, 2.3014, 1.6895, 1.5205, 2.1421, 1.2927, 1.3662, 1.8717], + device='cuda:2'), covar=tensor([0.1154, 0.0669, 0.1003, 0.0808, 0.0537, 0.1237, 0.0737, 0.0483], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0309, 0.0328, 0.0257, 0.0245, 0.0327, 0.0290, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:41:13,983 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 10:41:45,891 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:41:51,144 INFO [train.py:903] (2/4) Epoch 17, batch 5950, loss[loss=0.2538, simple_loss=0.3262, pruned_loss=0.09072, over 19512.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2946, pruned_loss=0.06952, over 3810515.63 frames. ], batch size: 64, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:00,462 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.938e+02 6.318e+02 8.201e+02 2.090e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:42:20,335 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3334, 1.4166, 1.7923, 1.5667, 2.7510, 2.2486, 2.9090, 1.2314], + device='cuda:2'), covar=tensor([0.2444, 0.4171, 0.2516, 0.1935, 0.1427, 0.2095, 0.1376, 0.4093], + device='cuda:2'), in_proj_covar=tensor([0.0514, 0.0615, 0.0674, 0.0464, 0.0611, 0.0518, 0.0652, 0.0527], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 10:42:35,072 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115233.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:42:43,152 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8898, 4.4260, 2.7982, 3.9267, 0.7831, 4.3544, 4.2344, 4.4337], + device='cuda:2'), covar=tensor([0.0562, 0.0996, 0.1828, 0.0732, 0.4175, 0.0658, 0.0825, 0.0957], + device='cuda:2'), in_proj_covar=tensor([0.0471, 0.0388, 0.0469, 0.0336, 0.0391, 0.0406, 0.0404, 0.0433], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:42:51,742 INFO [train.py:903] (2/4) Epoch 17, batch 6000, loss[loss=0.2648, simple_loss=0.3381, pruned_loss=0.09574, over 19390.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2949, pruned_loss=0.06929, over 3829153.08 frames. ], batch size: 70, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:51,742 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 10:43:04,254 INFO [train.py:937] (2/4) Epoch 17, validation: loss=0.1707, simple_loss=0.2712, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 10:43:04,254 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 10:44:04,143 INFO [train.py:903] (2/4) Epoch 17, batch 6050, loss[loss=0.2234, simple_loss=0.3044, pruned_loss=0.07121, over 17443.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2951, pruned_loss=0.06964, over 3815293.88 frames. ], batch size: 101, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:44:15,952 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 5.156e+02 6.136e+02 7.598e+02 1.906e+03, threshold=1.227e+03, percent-clipped=4.0 +2023-04-02 10:44:20,981 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 10:44:22,795 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1904, 1.5545, 2.1493, 1.7747, 3.1329, 4.6568, 4.6665, 5.2415], + device='cuda:2'), covar=tensor([0.1702, 0.3576, 0.2941, 0.1995, 0.0557, 0.0163, 0.0167, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0314, 0.0341, 0.0261, 0.0235, 0.0179, 0.0213, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 10:45:03,315 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3287, 3.8034, 3.9252, 3.9259, 1.6850, 3.7169, 3.2021, 3.6676], + device='cuda:2'), covar=tensor([0.1644, 0.0883, 0.0684, 0.0776, 0.5141, 0.0872, 0.0757, 0.1107], + device='cuda:2'), in_proj_covar=tensor([0.0747, 0.0697, 0.0898, 0.0778, 0.0799, 0.0648, 0.0537, 0.0824], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 10:45:06,507 INFO [train.py:903] (2/4) Epoch 17, batch 6100, loss[loss=0.2164, simple_loss=0.2986, pruned_loss=0.06713, over 19548.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2952, pruned_loss=0.06975, over 3814232.80 frames. ], batch size: 61, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:45:06,856 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115348.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:34,894 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5911, 4.1194, 2.6677, 3.6920, 1.0454, 4.0372, 4.0175, 4.0568], + device='cuda:2'), covar=tensor([0.0611, 0.1115, 0.2068, 0.0841, 0.4121, 0.0750, 0.0873, 0.1384], + device='cuda:2'), in_proj_covar=tensor([0.0473, 0.0389, 0.0472, 0.0335, 0.0393, 0.0409, 0.0405, 0.0435], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:45:54,937 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:56,181 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:46:06,536 INFO [train.py:903] (2/4) Epoch 17, batch 6150, loss[loss=0.2349, simple_loss=0.3097, pruned_loss=0.08012, over 19565.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2948, pruned_loss=0.06972, over 3814249.90 frames. ], batch size: 61, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:46:15,597 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.209e+02 6.440e+02 8.380e+02 1.538e+03, threshold=1.288e+03, percent-clipped=5.0 +2023-04-02 10:46:33,781 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 10:47:07,330 INFO [train.py:903] (2/4) Epoch 17, batch 6200, loss[loss=0.1919, simple_loss=0.2569, pruned_loss=0.06344, over 19755.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2941, pruned_loss=0.06952, over 3834476.57 frames. ], batch size: 45, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:47:07,478 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:08,880 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:23,443 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1812, 1.9342, 1.7503, 2.1177, 1.8667, 1.8355, 1.6273, 2.0752], + device='cuda:2'), covar=tensor([0.0914, 0.1327, 0.1417, 0.0966, 0.1317, 0.0523, 0.1326, 0.0625], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0351, 0.0301, 0.0243, 0.0295, 0.0245, 0.0293, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:47:39,822 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:56,156 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9797, 3.5845, 2.4257, 3.2115, 0.9660, 3.4943, 3.4236, 3.5360], + device='cuda:2'), covar=tensor([0.0810, 0.1243, 0.2027, 0.0901, 0.4035, 0.0860, 0.0952, 0.1166], + device='cuda:2'), in_proj_covar=tensor([0.0477, 0.0390, 0.0475, 0.0337, 0.0396, 0.0411, 0.0407, 0.0436], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:48:07,509 INFO [train.py:903] (2/4) Epoch 17, batch 6250, loss[loss=0.2171, simple_loss=0.2966, pruned_loss=0.06884, over 19623.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2939, pruned_loss=0.06947, over 3822344.54 frames. ], batch size: 61, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:48:16,576 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.690e+02 5.769e+02 7.890e+02 2.007e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-02 10:48:37,585 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 10:49:09,163 INFO [train.py:903] (2/4) Epoch 17, batch 6300, loss[loss=0.2257, simple_loss=0.307, pruned_loss=0.07225, over 19579.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.294, pruned_loss=0.06937, over 3814397.42 frames. ], batch size: 64, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:49:27,657 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115563.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:12,479 INFO [train.py:903] (2/4) Epoch 17, batch 6350, loss[loss=0.2147, simple_loss=0.2959, pruned_loss=0.06679, over 19773.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.293, pruned_loss=0.06882, over 3815480.64 frames. ], batch size: 54, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:50:19,964 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:21,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.077e+02 8.091e+02 1.466e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 10:50:44,537 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-02 10:50:50,650 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:51:13,881 INFO [train.py:903] (2/4) Epoch 17, batch 6400, loss[loss=0.2499, simple_loss=0.3284, pruned_loss=0.08573, over 18724.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.293, pruned_loss=0.06872, over 3805798.95 frames. ], batch size: 74, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:51:23,203 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8982, 4.5472, 3.2565, 4.0337, 1.8671, 4.3392, 4.2729, 4.4406], + device='cuda:2'), covar=tensor([0.0457, 0.0939, 0.1779, 0.0745, 0.3076, 0.0690, 0.0870, 0.1180], + device='cuda:2'), in_proj_covar=tensor([0.0478, 0.0393, 0.0477, 0.0337, 0.0398, 0.0414, 0.0410, 0.0438], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 10:51:26,051 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.69 vs. limit=5.0 +2023-04-02 10:51:43,306 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3693, 2.1760, 1.9663, 1.8659, 1.5968, 1.8431, 0.7306, 1.2453], + device='cuda:2'), covar=tensor([0.0557, 0.0572, 0.0471, 0.0723, 0.1126, 0.0899, 0.1114, 0.0981], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0347, 0.0348, 0.0376, 0.0447, 0.0380, 0.0325, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 10:52:15,073 INFO [train.py:903] (2/4) Epoch 17, batch 6450, loss[loss=0.2531, simple_loss=0.3348, pruned_loss=0.08567, over 19331.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2942, pruned_loss=0.06909, over 3808693.34 frames. ], batch size: 66, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:25,110 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.829e+02 5.862e+02 7.962e+02 1.327e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 10:52:58,105 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115732.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:52:59,223 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:53:01,409 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 10:53:16,380 INFO [train.py:903] (2/4) Epoch 17, batch 6500, loss[loss=0.2365, simple_loss=0.3124, pruned_loss=0.08031, over 19671.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2941, pruned_loss=0.0698, over 3791092.40 frames. ], batch size: 58, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:53:24,007 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 10:54:02,653 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 10:54:18,481 INFO [train.py:903] (2/4) Epoch 17, batch 6550, loss[loss=0.2366, simple_loss=0.3078, pruned_loss=0.08269, over 19681.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2949, pruned_loss=0.07027, over 3791985.32 frames. ], batch size: 53, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:54:28,763 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.150e+02 6.522e+02 8.804e+02 2.234e+03, threshold=1.304e+03, percent-clipped=7.0 +2023-04-02 10:54:43,149 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:15,241 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115844.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:18,702 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115847.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:19,546 INFO [train.py:903] (2/4) Epoch 17, batch 6600, loss[loss=0.1969, simple_loss=0.273, pruned_loss=0.06041, over 19377.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.294, pruned_loss=0.06968, over 3803320.05 frames. ], batch size: 47, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:55:19,903 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:54,239 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3251, 1.3418, 1.7603, 1.2061, 2.4680, 3.3797, 3.0870, 3.5948], + device='cuda:2'), covar=tensor([0.1521, 0.3607, 0.3028, 0.2438, 0.0574, 0.0210, 0.0221, 0.0235], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0312, 0.0340, 0.0259, 0.0234, 0.0177, 0.0210, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 10:56:12,270 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 10:56:19,810 INFO [train.py:903] (2/4) Epoch 17, batch 6650, loss[loss=0.2164, simple_loss=0.2971, pruned_loss=0.06785, over 19457.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2947, pruned_loss=0.07024, over 3808709.47 frames. ], batch size: 64, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:56:30,890 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.833e+02 5.946e+02 8.225e+02 1.682e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-02 10:56:35,529 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115910.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 10:57:21,974 INFO [train.py:903] (2/4) Epoch 17, batch 6700, loss[loss=0.1663, simple_loss=0.2429, pruned_loss=0.04486, over 19736.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2945, pruned_loss=0.06974, over 3807612.24 frames. ], batch size: 46, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:20,392 INFO [train.py:903] (2/4) Epoch 17, batch 6750, loss[loss=0.2381, simple_loss=0.3211, pruned_loss=0.07756, over 19738.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2958, pruned_loss=0.07086, over 3795438.95 frames. ], batch size: 63, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:31,536 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.004e+02 5.304e+02 6.320e+02 7.514e+02 1.971e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:58:52,447 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-02 10:58:59,778 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:59:05,991 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 10:59:12,145 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2654, 2.3300, 2.5248, 3.1577, 2.3625, 3.0766, 2.6591, 2.3851], + device='cuda:2'), covar=tensor([0.4022, 0.3783, 0.1731, 0.2425, 0.4273, 0.1912, 0.4381, 0.3100], + device='cuda:2'), in_proj_covar=tensor([0.0857, 0.0909, 0.0686, 0.0911, 0.0837, 0.0773, 0.0818, 0.0754], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 10:59:17,211 INFO [train.py:903] (2/4) Epoch 17, batch 6800, loss[loss=0.1983, simple_loss=0.2853, pruned_loss=0.05567, over 19777.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2955, pruned_loss=0.07054, over 3795930.03 frames. ], batch size: 56, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 11:00:03,036 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 11:00:03,508 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 11:00:07,131 INFO [train.py:903] (2/4) Epoch 18, batch 0, loss[loss=0.177, simple_loss=0.2539, pruned_loss=0.0501, over 19804.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2539, pruned_loss=0.0501, over 19804.00 frames. ], batch size: 48, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:00:07,131 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 11:00:18,786 INFO [train.py:937] (2/4) Epoch 18, validation: loss=0.1712, simple_loss=0.2722, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 11:00:18,787 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 11:00:32,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 11:00:51,670 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:52,782 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:55,571 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.457e+02 4.972e+02 6.494e+02 8.085e+02 1.604e+03, threshold=1.299e+03, percent-clipped=1.0 +2023-04-02 11:01:14,433 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5316, 2.3161, 2.1376, 2.5502, 2.4425, 2.0929, 1.9996, 2.3864], + device='cuda:2'), covar=tensor([0.0964, 0.1510, 0.1513, 0.1111, 0.1347, 0.0543, 0.1370, 0.0723], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0353, 0.0303, 0.0245, 0.0297, 0.0246, 0.0294, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:01:15,474 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:18,720 INFO [train.py:903] (2/4) Epoch 18, batch 50, loss[loss=0.2012, simple_loss=0.2838, pruned_loss=0.05928, over 19589.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2935, pruned_loss=0.07081, over 851801.89 frames. ], batch size: 52, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:01:21,443 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:23,504 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:30,192 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:47,384 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.51 vs. limit=2.0 +2023-04-02 11:01:52,473 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 11:01:55,422 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-02 11:02:21,174 INFO [train.py:903] (2/4) Epoch 18, batch 100, loss[loss=0.1861, simple_loss=0.2659, pruned_loss=0.05316, over 19423.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2937, pruned_loss=0.06923, over 1505010.16 frames. ], batch size: 48, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:02:32,241 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 11:02:58,315 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 4.838e+02 6.090e+02 7.458e+02 2.009e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 11:03:21,609 INFO [train.py:903] (2/4) Epoch 18, batch 150, loss[loss=0.2464, simple_loss=0.3241, pruned_loss=0.08437, over 19862.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2943, pruned_loss=0.06904, over 2021874.09 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:03:56,114 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116254.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:04:20,470 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 11:04:21,608 INFO [train.py:903] (2/4) Epoch 18, batch 200, loss[loss=0.2114, simple_loss=0.2845, pruned_loss=0.06921, over 19730.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2954, pruned_loss=0.06986, over 2426098.29 frames. ], batch size: 51, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:01,397 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 4.750e+02 5.613e+02 7.153e+02 1.890e+03, threshold=1.123e+03, percent-clipped=2.0 +2023-04-02 11:05:24,085 INFO [train.py:903] (2/4) Epoch 18, batch 250, loss[loss=0.2347, simple_loss=0.3228, pruned_loss=0.07332, over 19627.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2937, pruned_loss=0.06821, over 2744642.51 frames. ], batch size: 57, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:43,802 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116341.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:06:18,097 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116369.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 11:06:25,430 INFO [train.py:903] (2/4) Epoch 18, batch 300, loss[loss=0.1883, simple_loss=0.2741, pruned_loss=0.05126, over 19649.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2935, pruned_loss=0.06837, over 2979957.68 frames. ], batch size: 55, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:06:25,590 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:07:03,943 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 5.241e+02 6.705e+02 8.261e+02 1.478e+03, threshold=1.341e+03, percent-clipped=3.0 +2023-04-02 11:07:28,473 INFO [train.py:903] (2/4) Epoch 18, batch 350, loss[loss=0.2004, simple_loss=0.2774, pruned_loss=0.06168, over 19860.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.06744, over 3168859.96 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:07:31,355 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6564, 1.7590, 1.9627, 2.0233, 1.5133, 1.9110, 2.0094, 1.8102], + device='cuda:2'), covar=tensor([0.3800, 0.3307, 0.1790, 0.1933, 0.3319, 0.1825, 0.4518, 0.3102], + device='cuda:2'), in_proj_covar=tensor([0.0856, 0.0909, 0.0686, 0.0911, 0.0837, 0.0773, 0.0813, 0.0754], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 11:07:33,294 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 11:07:44,267 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 11:08:16,145 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:19,411 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:29,490 INFO [train.py:903] (2/4) Epoch 18, batch 400, loss[loss=0.2265, simple_loss=0.3042, pruned_loss=0.0744, over 19761.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2923, pruned_loss=0.0677, over 3328786.45 frames. ], batch size: 63, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:08:31,843 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:48,742 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116491.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:08,776 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.874e+02 5.859e+02 7.069e+02 1.370e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:09:27,710 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:31,060 INFO [train.py:903] (2/4) Epoch 18, batch 450, loss[loss=0.18, simple_loss=0.2606, pruned_loss=0.04973, over 19467.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.06749, over 3426189.32 frames. ], batch size: 49, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:06,954 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 11:10:08,082 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 11:10:36,068 INFO [train.py:903] (2/4) Epoch 18, batch 500, loss[loss=0.183, simple_loss=0.2705, pruned_loss=0.04774, over 19858.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.06742, over 3514982.60 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:43,234 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:10:57,242 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116593.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:11:13,493 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.090e+02 6.291e+02 8.243e+02 1.843e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 11:11:38,152 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116625.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:11:38,805 INFO [train.py:903] (2/4) Epoch 18, batch 550, loss[loss=0.231, simple_loss=0.304, pruned_loss=0.07901, over 19730.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2923, pruned_loss=0.06823, over 3580848.07 frames. ], batch size: 51, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:11:51,095 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:12:07,828 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116650.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 11:12:07,879 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4412, 1.5038, 1.9744, 1.7300, 2.6056, 2.2085, 2.6326, 1.3109], + device='cuda:2'), covar=tensor([0.2608, 0.4391, 0.2565, 0.2012, 0.1691, 0.2359, 0.1839, 0.4384], + device='cuda:2'), in_proj_covar=tensor([0.0512, 0.0612, 0.0671, 0.0463, 0.0611, 0.0516, 0.0648, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 11:12:40,988 INFO [train.py:903] (2/4) Epoch 18, batch 600, loss[loss=0.2182, simple_loss=0.2859, pruned_loss=0.07525, over 19785.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2918, pruned_loss=0.06832, over 3620708.62 frames. ], batch size: 49, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:12:51,644 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116685.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:18,881 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.849e+02 4.880e+02 6.214e+02 8.095e+02 1.532e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 11:13:21,179 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 11:13:28,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8725, 1.6758, 1.4889, 1.9176, 1.4917, 1.7069, 1.5468, 1.7841], + device='cuda:2'), covar=tensor([0.0982, 0.1295, 0.1427, 0.0899, 0.1361, 0.0529, 0.1249, 0.0731], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0356, 0.0302, 0.0247, 0.0299, 0.0247, 0.0296, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:13:37,808 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116722.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:42,023 INFO [train.py:903] (2/4) Epoch 18, batch 650, loss[loss=0.2272, simple_loss=0.3085, pruned_loss=0.07291, over 19475.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2921, pruned_loss=0.06862, over 3666510.86 frames. ], batch size: 49, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:13:55,032 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6178, 1.3506, 1.5840, 1.5282, 3.2007, 0.9805, 2.4131, 3.6163], + device='cuda:2'), covar=tensor([0.0487, 0.2697, 0.2656, 0.1829, 0.0657, 0.2590, 0.1259, 0.0241], + device='cuda:2'), in_proj_covar=tensor([0.0390, 0.0354, 0.0374, 0.0340, 0.0362, 0.0345, 0.0359, 0.0384], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:14:08,728 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:34,415 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.32 vs. limit=5.0 +2023-04-02 11:14:38,793 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:43,012 INFO [train.py:903] (2/4) Epoch 18, batch 700, loss[loss=0.2603, simple_loss=0.328, pruned_loss=0.09628, over 17543.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2938, pruned_loss=0.06918, over 3711189.82 frames. ], batch size: 101, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:15:15,683 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:23,662 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.148e+02 4.855e+02 5.777e+02 7.000e+02 1.472e+03, threshold=1.155e+03, percent-clipped=1.0 +2023-04-02 11:15:24,933 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:27,849 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 11:15:39,759 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:47,498 INFO [train.py:903] (2/4) Epoch 18, batch 750, loss[loss=0.2144, simple_loss=0.2983, pruned_loss=0.06526, over 19530.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2931, pruned_loss=0.06847, over 3748555.09 frames. ], batch size: 64, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:16:03,722 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:16,565 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:35,186 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:39,887 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:50,596 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116874.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:52,573 INFO [train.py:903] (2/4) Epoch 18, batch 800, loss[loss=0.1946, simple_loss=0.2762, pruned_loss=0.05645, over 19761.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2928, pruned_loss=0.06857, over 3756007.38 frames. ], batch size: 54, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:17:06,590 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 11:17:32,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.513e+02 6.326e+02 7.669e+02 1.889e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-02 11:17:51,818 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:17:55,029 INFO [train.py:903] (2/4) Epoch 18, batch 850, loss[loss=0.2175, simple_loss=0.292, pruned_loss=0.0715, over 19599.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2927, pruned_loss=0.06825, over 3780280.39 frames. ], batch size: 52, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:18:12,811 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1905, 1.3303, 1.6789, 1.3256, 2.7147, 3.6868, 3.3971, 3.8983], + device='cuda:2'), covar=tensor([0.1635, 0.3658, 0.3267, 0.2333, 0.0571, 0.0167, 0.0218, 0.0234], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0312, 0.0339, 0.0256, 0.0231, 0.0177, 0.0209, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 11:18:48,138 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 11:18:56,389 INFO [train.py:903] (2/4) Epoch 18, batch 900, loss[loss=0.1972, simple_loss=0.2786, pruned_loss=0.05793, over 19651.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2929, pruned_loss=0.06836, over 3781478.48 frames. ], batch size: 55, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:19:01,382 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116980.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:04,004 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:38,593 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 4.671e+02 5.637e+02 7.276e+02 1.422e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 11:20:00,611 INFO [train.py:903] (2/4) Epoch 18, batch 950, loss[loss=0.2024, simple_loss=0.2878, pruned_loss=0.05855, over 17197.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2924, pruned_loss=0.06807, over 3789139.56 frames. ], batch size: 101, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:20:02,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 11:20:38,647 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:20:51,135 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:03,208 INFO [train.py:903] (2/4) Epoch 18, batch 1000, loss[loss=0.1876, simple_loss=0.2608, pruned_loss=0.05717, over 19777.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2931, pruned_loss=0.06845, over 3787465.71 frames. ], batch size: 48, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:21:11,182 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:27,373 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:43,056 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 4.999e+02 6.181e+02 7.829e+02 2.221e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 11:21:58,125 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 11:22:07,204 INFO [train.py:903] (2/4) Epoch 18, batch 1050, loss[loss=0.2279, simple_loss=0.3039, pruned_loss=0.0759, over 19791.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2932, pruned_loss=0.06873, over 3791280.10 frames. ], batch size: 56, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:22:40,248 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 11:22:55,011 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117164.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:09,072 INFO [train.py:903] (2/4) Epoch 18, batch 1100, loss[loss=0.2001, simple_loss=0.2779, pruned_loss=0.06111, over 19456.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2913, pruned_loss=0.0674, over 3804225.14 frames. ], batch size: 49, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:23:13,156 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:15,419 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117181.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:24,962 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1515, 2.0246, 1.8006, 1.6525, 1.5446, 1.6070, 0.5507, 1.0237], + device='cuda:2'), covar=tensor([0.0558, 0.0613, 0.0489, 0.0821, 0.1203, 0.0980, 0.1279, 0.1055], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0347, 0.0347, 0.0376, 0.0449, 0.0382, 0.0328, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 11:23:44,507 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117204.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:49,510 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.961e+02 5.150e+02 6.225e+02 7.900e+02 1.283e+03, threshold=1.245e+03, percent-clipped=2.0 +2023-04-02 11:24:11,116 INFO [train.py:903] (2/4) Epoch 18, batch 1150, loss[loss=0.1815, simple_loss=0.2745, pruned_loss=0.04426, over 19679.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.291, pruned_loss=0.06718, over 3809476.03 frames. ], batch size: 59, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:24:27,284 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:28,292 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117239.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:39,791 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3910, 3.9226, 2.6664, 3.4926, 1.0049, 3.8613, 3.8615, 3.8683], + device='cuda:2'), covar=tensor([0.0660, 0.1133, 0.1922, 0.0893, 0.3970, 0.0774, 0.0834, 0.1046], + device='cuda:2'), in_proj_covar=tensor([0.0479, 0.0393, 0.0475, 0.0337, 0.0395, 0.0413, 0.0406, 0.0436], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:24:58,361 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:14,043 INFO [train.py:903] (2/4) Epoch 18, batch 1200, loss[loss=0.2312, simple_loss=0.3062, pruned_loss=0.07809, over 19689.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2909, pruned_loss=0.06724, over 3809178.00 frames. ], batch size: 60, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:25:19,022 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:31,286 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6513, 2.3930, 2.3036, 2.7319, 2.4039, 2.2714, 2.2575, 2.6428], + device='cuda:2'), covar=tensor([0.0780, 0.1462, 0.1178, 0.0871, 0.1262, 0.0462, 0.1094, 0.0565], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0355, 0.0302, 0.0248, 0.0299, 0.0247, 0.0295, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:25:50,734 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 11:25:54,159 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.775e+02 5.862e+02 7.562e+02 1.280e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:26:12,978 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-02 11:26:18,177 INFO [train.py:903] (2/4) Epoch 18, batch 1250, loss[loss=0.1822, simple_loss=0.2649, pruned_loss=0.04976, over 19778.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2911, pruned_loss=0.06706, over 3813463.41 frames. ], batch size: 54, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:26:43,829 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:49,490 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117351.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:58,463 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3284, 2.2021, 2.0322, 1.8721, 1.7259, 1.8939, 0.5732, 1.2346], + device='cuda:2'), covar=tensor([0.0546, 0.0549, 0.0463, 0.0789, 0.1076, 0.0852, 0.1296, 0.0989], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0347, 0.0348, 0.0376, 0.0449, 0.0381, 0.0328, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 11:27:20,888 INFO [train.py:903] (2/4) Epoch 18, batch 1300, loss[loss=0.1822, simple_loss=0.2541, pruned_loss=0.05517, over 19779.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2912, pruned_loss=0.06721, over 3818828.09 frames. ], batch size: 47, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:27:21,328 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:27:48,181 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4553, 1.3927, 1.3628, 1.8713, 1.3999, 1.7713, 1.7579, 1.5432], + device='cuda:2'), covar=tensor([0.0874, 0.0931, 0.1039, 0.0634, 0.0841, 0.0678, 0.0750, 0.0714], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0221, 0.0223, 0.0240, 0.0226, 0.0209, 0.0186, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 11:28:01,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.655e+02 5.418e+02 6.594e+02 8.357e+02 1.516e+03, threshold=1.319e+03, percent-clipped=5.0 +2023-04-02 11:28:22,254 INFO [train.py:903] (2/4) Epoch 18, batch 1350, loss[loss=0.1957, simple_loss=0.274, pruned_loss=0.05874, over 19736.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2914, pruned_loss=0.06755, over 3817144.08 frames. ], batch size: 51, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:28:37,275 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:07,741 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:21,700 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 11:29:24,525 INFO [train.py:903] (2/4) Epoch 18, batch 1400, loss[loss=0.1901, simple_loss=0.2585, pruned_loss=0.06087, over 19420.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2913, pruned_loss=0.06793, over 3820162.82 frames. ], batch size: 48, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:30:04,549 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.042e+02 5.443e+02 6.741e+02 8.791e+02 2.167e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 11:30:28,257 INFO [train.py:903] (2/4) Epoch 18, batch 1450, loss[loss=0.1764, simple_loss=0.2521, pruned_loss=0.0503, over 19375.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.291, pruned_loss=0.06777, over 3813742.55 frames. ], batch size: 47, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:30:29,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 11:30:40,082 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117535.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:11,252 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117560.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:30,934 INFO [train.py:903] (2/4) Epoch 18, batch 1500, loss[loss=0.1906, simple_loss=0.2762, pruned_loss=0.05249, over 19763.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2918, pruned_loss=0.06813, over 3807416.92 frames. ], batch size: 54, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:31:39,061 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:53,612 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-02 11:32:11,894 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.657e+02 6.062e+02 7.787e+02 1.498e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 11:32:32,136 INFO [train.py:903] (2/4) Epoch 18, batch 1550, loss[loss=0.2948, simple_loss=0.3635, pruned_loss=0.1131, over 19663.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2926, pruned_loss=0.06875, over 3806202.40 frames. ], batch size: 59, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:33:34,533 INFO [train.py:903] (2/4) Epoch 18, batch 1600, loss[loss=0.2029, simple_loss=0.289, pruned_loss=0.0584, over 17378.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2931, pruned_loss=0.06863, over 3804638.56 frames. ], batch size: 101, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:33:54,974 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:01,931 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 11:34:03,384 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:15,615 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.964e+02 5.954e+02 7.051e+02 1.393e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 11:34:37,827 INFO [train.py:903] (2/4) Epoch 18, batch 1650, loss[loss=0.1909, simple_loss=0.2597, pruned_loss=0.06102, over 19014.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2935, pruned_loss=0.06889, over 3804485.82 frames. ], batch size: 42, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:34:44,920 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0508, 1.2883, 1.6912, 0.9442, 2.3300, 3.0214, 2.7367, 3.2136], + device='cuda:2'), covar=tensor([0.1714, 0.3726, 0.3275, 0.2638, 0.0623, 0.0240, 0.0264, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0309, 0.0338, 0.0257, 0.0230, 0.0177, 0.0209, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 11:35:04,230 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3363, 2.3781, 2.6263, 3.1871, 2.2905, 3.0336, 2.7208, 2.4026], + device='cuda:2'), covar=tensor([0.4196, 0.3884, 0.1700, 0.2431, 0.4328, 0.2023, 0.4198, 0.3159], + device='cuda:2'), in_proj_covar=tensor([0.0858, 0.0911, 0.0689, 0.0910, 0.0837, 0.0775, 0.0818, 0.0757], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 11:35:39,486 INFO [train.py:903] (2/4) Epoch 18, batch 1700, loss[loss=0.2077, simple_loss=0.2872, pruned_loss=0.06406, over 19771.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2937, pruned_loss=0.06899, over 3804290.26 frames. ], batch size: 56, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:36:16,647 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:36:19,946 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.954e+02 5.098e+02 6.258e+02 7.253e+02 1.524e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 11:36:21,126 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 11:36:40,232 INFO [train.py:903] (2/4) Epoch 18, batch 1750, loss[loss=0.2334, simple_loss=0.3226, pruned_loss=0.07212, over 19675.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2945, pruned_loss=0.06896, over 3821639.82 frames. ], batch size: 59, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:37:14,810 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 11:37:43,036 INFO [train.py:903] (2/4) Epoch 18, batch 1800, loss[loss=0.1977, simple_loss=0.2808, pruned_loss=0.05725, over 19584.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2931, pruned_loss=0.06825, over 3810595.33 frames. ], batch size: 52, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:38:23,380 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.803e+02 6.041e+02 7.952e+02 1.877e+03, threshold=1.208e+03, percent-clipped=3.0 +2023-04-02 11:38:42,053 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 11:38:45,250 INFO [train.py:903] (2/4) Epoch 18, batch 1850, loss[loss=0.2083, simple_loss=0.2959, pruned_loss=0.06031, over 19618.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2932, pruned_loss=0.06845, over 3801348.46 frames. ], batch size: 57, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:18,894 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 11:39:19,263 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117954.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:39:47,637 INFO [train.py:903] (2/4) Epoch 18, batch 1900, loss[loss=0.226, simple_loss=0.3076, pruned_loss=0.07218, over 19732.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2925, pruned_loss=0.06804, over 3787589.20 frames. ], batch size: 63, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:51,598 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:40:03,249 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 11:40:07,858 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 11:40:27,943 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.120e+02 6.309e+02 7.973e+02 1.539e+03, threshold=1.262e+03, percent-clipped=6.0 +2023-04-02 11:40:34,591 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 11:40:48,204 INFO [train.py:903] (2/4) Epoch 18, batch 1950, loss[loss=0.1845, simple_loss=0.2575, pruned_loss=0.05577, over 16994.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2926, pruned_loss=0.06833, over 3805703.63 frames. ], batch size: 37, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:41:28,980 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:33,525 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:49,955 INFO [train.py:903] (2/4) Epoch 18, batch 2000, loss[loss=0.1669, simple_loss=0.2443, pruned_loss=0.04472, over 19319.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06817, over 3804640.37 frames. ], batch size: 44, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:42:05,405 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:42:11,300 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-02 11:42:31,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 5.367e+02 6.632e+02 8.072e+02 1.503e+03, threshold=1.326e+03, percent-clipped=5.0 +2023-04-02 11:42:46,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 11:42:54,014 INFO [train.py:903] (2/4) Epoch 18, batch 2050, loss[loss=0.2151, simple_loss=0.2985, pruned_loss=0.06581, over 19786.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2923, pruned_loss=0.06789, over 3805811.33 frames. ], batch size: 56, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:43:06,260 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 11:43:07,433 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 11:43:26,124 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 11:43:55,357 INFO [train.py:903] (2/4) Epoch 18, batch 2100, loss[loss=0.3026, simple_loss=0.3595, pruned_loss=0.1229, over 13510.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2928, pruned_loss=0.06809, over 3794041.64 frames. ], batch size: 136, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:44:07,025 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:44:21,262 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 11:44:36,101 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.186e+02 6.689e+02 8.493e+02 1.656e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 11:44:44,927 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 11:44:56,297 INFO [train.py:903] (2/4) Epoch 18, batch 2150, loss[loss=0.2661, simple_loss=0.3339, pruned_loss=0.09914, over 13748.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2931, pruned_loss=0.06813, over 3799334.46 frames. ], batch size: 136, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:45:39,509 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6131, 1.2558, 1.2449, 1.5185, 1.1718, 1.4184, 1.2063, 1.4666], + device='cuda:2'), covar=tensor([0.1083, 0.1186, 0.1539, 0.0923, 0.1229, 0.0573, 0.1473, 0.0782], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0350, 0.0299, 0.0245, 0.0296, 0.0244, 0.0292, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:45:57,805 INFO [train.py:903] (2/4) Epoch 18, batch 2200, loss[loss=0.1933, simple_loss=0.2649, pruned_loss=0.06084, over 19769.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2929, pruned_loss=0.06833, over 3798208.34 frames. ], batch size: 46, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:46:13,153 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:37,302 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:39,297 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 5.366e+02 7.280e+02 9.450e+02 2.114e+03, threshold=1.456e+03, percent-clipped=6.0 +2023-04-02 11:47:01,193 INFO [train.py:903] (2/4) Epoch 18, batch 2250, loss[loss=0.2105, simple_loss=0.2818, pruned_loss=0.06967, over 19663.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2931, pruned_loss=0.06811, over 3795117.69 frames. ], batch size: 53, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:47:27,267 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:47:47,777 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2638, 3.9346, 3.0789, 3.4798, 1.7798, 3.7260, 3.6822, 3.7994], + device='cuda:2'), covar=tensor([0.0716, 0.1085, 0.1844, 0.1025, 0.3102, 0.0791, 0.1054, 0.1302], + device='cuda:2'), in_proj_covar=tensor([0.0478, 0.0395, 0.0479, 0.0340, 0.0399, 0.0416, 0.0411, 0.0441], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:48:02,749 INFO [train.py:903] (2/4) Epoch 18, batch 2300, loss[loss=0.2172, simple_loss=0.2937, pruned_loss=0.07041, over 19847.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2942, pruned_loss=0.06941, over 3782579.37 frames. ], batch size: 52, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:48:15,079 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 11:48:22,252 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4780, 1.3868, 1.6159, 1.3479, 3.0786, 1.0159, 2.3787, 3.3514], + device='cuda:2'), covar=tensor([0.0480, 0.2534, 0.2472, 0.1903, 0.0694, 0.2421, 0.1060, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0398, 0.0359, 0.0377, 0.0344, 0.0367, 0.0347, 0.0367, 0.0388], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:48:35,798 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:48:44,949 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.253e+02 6.243e+02 7.561e+02 1.558e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 11:49:05,629 INFO [train.py:903] (2/4) Epoch 18, batch 2350, loss[loss=0.1836, simple_loss=0.2647, pruned_loss=0.05122, over 19624.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2935, pruned_loss=0.06867, over 3796027.20 frames. ], batch size: 50, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:49:12,812 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9508, 1.3447, 1.0733, 0.9902, 1.1814, 1.0036, 1.0317, 1.2796], + device='cuda:2'), covar=tensor([0.0617, 0.0827, 0.1179, 0.0761, 0.0611, 0.1343, 0.0590, 0.0477], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0262, 0.0247, 0.0334, 0.0293, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:49:46,255 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 11:50:02,106 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 11:50:06,554 INFO [train.py:903] (2/4) Epoch 18, batch 2400, loss[loss=0.2043, simple_loss=0.2943, pruned_loss=0.0571, over 18648.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2931, pruned_loss=0.06864, over 3805599.57 frames. ], batch size: 74, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:50:48,687 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.821e+02 5.593e+02 7.730e+02 1.750e+03, threshold=1.119e+03, percent-clipped=3.0 +2023-04-02 11:50:58,576 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118517.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:10,675 INFO [train.py:903] (2/4) Epoch 18, batch 2450, loss[loss=0.1808, simple_loss=0.2591, pruned_loss=0.05128, over 19757.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2921, pruned_loss=0.06827, over 3796753.45 frames. ], batch size: 47, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:51:16,269 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118530.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:57,089 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7849, 2.0596, 2.3572, 2.0784, 3.2061, 3.6506, 3.6102, 4.0003], + device='cuda:2'), covar=tensor([0.1401, 0.2885, 0.2644, 0.1979, 0.0908, 0.0339, 0.0180, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0310, 0.0340, 0.0257, 0.0232, 0.0178, 0.0210, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 11:52:12,599 INFO [train.py:903] (2/4) Epoch 18, batch 2500, loss[loss=0.2356, simple_loss=0.3161, pruned_loss=0.07762, over 19398.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2934, pruned_loss=0.06878, over 3802182.15 frames. ], batch size: 70, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:52:53,545 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 5.081e+02 6.040e+02 7.266e+02 1.380e+03, threshold=1.208e+03, percent-clipped=1.0 +2023-04-02 11:53:13,674 INFO [train.py:903] (2/4) Epoch 18, batch 2550, loss[loss=0.1799, simple_loss=0.256, pruned_loss=0.05191, over 19347.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2933, pruned_loss=0.06838, over 3818806.69 frames. ], batch size: 47, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:53:19,835 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118631.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:37,336 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-02 11:53:37,929 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:43,516 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:45,541 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:06,487 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 11:54:15,894 INFO [train.py:903] (2/4) Epoch 18, batch 2600, loss[loss=0.2066, simple_loss=0.2764, pruned_loss=0.06839, over 19751.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.291, pruned_loss=0.06714, over 3826760.65 frames. ], batch size: 47, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:54:35,492 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:56,091 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.742e+02 5.749e+02 7.114e+02 1.367e+03, threshold=1.150e+03, percent-clipped=3.0 +2023-04-02 11:55:16,573 INFO [train.py:903] (2/4) Epoch 18, batch 2650, loss[loss=0.2198, simple_loss=0.2859, pruned_loss=0.07682, over 19782.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06702, over 3832583.74 frames. ], batch size: 47, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:55:33,748 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 11:55:42,217 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:06,167 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:15,272 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:18,252 INFO [train.py:903] (2/4) Epoch 18, batch 2700, loss[loss=0.196, simple_loss=0.2853, pruned_loss=0.05336, over 19606.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2919, pruned_loss=0.06758, over 3822695.40 frames. ], batch size: 52, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:56:44,810 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118798.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:49,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.79 vs. limit=5.0 +2023-04-02 11:56:55,797 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:59,040 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.095e+02 6.154e+02 8.195e+02 1.746e+03, threshold=1.231e+03, percent-clipped=5.0 +2023-04-02 11:57:05,259 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2840, 5.6682, 3.3107, 4.9559, 1.0941, 5.7679, 5.6523, 5.7694], + device='cuda:2'), covar=tensor([0.0345, 0.0701, 0.1618, 0.0675, 0.4032, 0.0501, 0.0747, 0.0796], + device='cuda:2'), in_proj_covar=tensor([0.0483, 0.0394, 0.0480, 0.0341, 0.0402, 0.0419, 0.0413, 0.0445], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:57:06,542 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4356, 1.3067, 1.0091, 1.2645, 1.1803, 1.1008, 0.9896, 1.2256], + device='cuda:2'), covar=tensor([0.1247, 0.1168, 0.1947, 0.1174, 0.1313, 0.1145, 0.1933, 0.1121], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0353, 0.0299, 0.0248, 0.0298, 0.0246, 0.0295, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:57:20,472 INFO [train.py:903] (2/4) Epoch 18, batch 2750, loss[loss=0.197, simple_loss=0.2672, pruned_loss=0.06342, over 19282.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2913, pruned_loss=0.06744, over 3819673.94 frames. ], batch size: 44, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:15,406 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3481, 1.2983, 1.6256, 1.5561, 2.3136, 2.0124, 2.4525, 0.8789], + device='cuda:2'), covar=tensor([0.2666, 0.4641, 0.2811, 0.2177, 0.1759, 0.2475, 0.1632, 0.4830], + device='cuda:2'), in_proj_covar=tensor([0.0519, 0.0622, 0.0679, 0.0468, 0.0615, 0.0521, 0.0655, 0.0530], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 11:58:23,109 INFO [train.py:903] (2/4) Epoch 18, batch 2800, loss[loss=0.2002, simple_loss=0.2872, pruned_loss=0.05656, over 19615.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2922, pruned_loss=0.06775, over 3824489.48 frames. ], batch size: 57, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:41,703 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9749, 1.8780, 1.7573, 1.5488, 1.4967, 1.5590, 0.4159, 0.7924], + device='cuda:2'), covar=tensor([0.0527, 0.0564, 0.0383, 0.0626, 0.1069, 0.0685, 0.1165, 0.0965], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0348, 0.0347, 0.0374, 0.0448, 0.0380, 0.0329, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 11:58:44,008 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7085, 1.7089, 1.7348, 1.6403, 4.2829, 1.0980, 2.6725, 4.6110], + device='cuda:2'), covar=tensor([0.0429, 0.2678, 0.2777, 0.2021, 0.0738, 0.2748, 0.1408, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0399, 0.0357, 0.0378, 0.0342, 0.0365, 0.0345, 0.0366, 0.0389], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 11:58:54,314 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:59:03,992 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.804e+02 6.148e+02 8.494e+02 1.418e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 11:59:24,489 INFO [train.py:903] (2/4) Epoch 18, batch 2850, loss[loss=0.2067, simple_loss=0.2717, pruned_loss=0.07082, over 19794.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2933, pruned_loss=0.06852, over 3827863.47 frames. ], batch size: 47, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:59:24,958 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118926.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:22,866 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 12:00:26,007 INFO [train.py:903] (2/4) Epoch 18, batch 2900, loss[loss=0.1853, simple_loss=0.2686, pruned_loss=0.05096, over 19665.00 frames. ], tot_loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06846, over 3804557.01 frames. ], batch size: 53, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:00:27,573 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1198, 1.9865, 1.8688, 1.7240, 1.5554, 1.7194, 0.5051, 0.9866], + device='cuda:2'), covar=tensor([0.0540, 0.0558, 0.0378, 0.0618, 0.1065, 0.0728, 0.1152, 0.0949], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0349, 0.0348, 0.0375, 0.0449, 0.0381, 0.0330, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 12:00:46,055 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:54,348 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 12:00:54,950 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3163, 1.3655, 1.6737, 1.6150, 2.4272, 2.1089, 2.6515, 1.1050], + device='cuda:2'), covar=tensor([0.2646, 0.4582, 0.2758, 0.2106, 0.1824, 0.2387, 0.1676, 0.4856], + device='cuda:2'), in_proj_covar=tensor([0.0520, 0.0625, 0.0682, 0.0469, 0.0618, 0.0524, 0.0659, 0.0533], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 12:00:56,057 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:58,541 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 12:01:05,225 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 5.127e+02 5.908e+02 8.371e+02 2.467e+03, threshold=1.182e+03, percent-clipped=10.0 +2023-04-02 12:01:21,066 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:25,114 INFO [train.py:903] (2/4) Epoch 18, batch 2950, loss[loss=0.2289, simple_loss=0.3045, pruned_loss=0.07667, over 18765.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2938, pruned_loss=0.06886, over 3802965.66 frames. ], batch size: 74, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:01:26,622 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:50,466 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:08,449 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:24,518 INFO [train.py:903] (2/4) Epoch 18, batch 3000, loss[loss=0.2357, simple_loss=0.3107, pruned_loss=0.08039, over 13600.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2946, pruned_loss=0.0693, over 3792650.73 frames. ], batch size: 135, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:02:24,519 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 12:02:37,018 INFO [train.py:937] (2/4) Epoch 18, validation: loss=0.1707, simple_loss=0.2711, pruned_loss=0.03521, over 944034.00 frames. +2023-04-02 12:02:37,019 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 12:02:40,535 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 12:02:50,781 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:57,638 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:17,179 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:17,940 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.504e+02 6.538e+02 8.295e+02 4.074e+03, threshold=1.308e+03, percent-clipped=8.0 +2023-04-02 12:03:20,401 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119111.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:37,845 INFO [train.py:903] (2/4) Epoch 18, batch 3050, loss[loss=0.1836, simple_loss=0.27, pruned_loss=0.04855, over 19756.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2943, pruned_loss=0.069, over 3801044.91 frames. ], batch size: 54, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:04:37,426 INFO [train.py:903] (2/4) Epoch 18, batch 3100, loss[loss=0.1889, simple_loss=0.282, pruned_loss=0.04791, over 19689.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.294, pruned_loss=0.06891, over 3801725.17 frames. ], batch size: 59, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:05:18,220 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.828e+02 5.979e+02 7.400e+02 1.693e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 12:05:39,409 INFO [train.py:903] (2/4) Epoch 18, batch 3150, loss[loss=0.1927, simple_loss=0.2622, pruned_loss=0.0616, over 19748.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.294, pruned_loss=0.06915, over 3803842.85 frames. ], batch size: 46, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:06:04,644 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 12:06:06,964 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 12:06:39,671 INFO [train.py:903] (2/4) Epoch 18, batch 3200, loss[loss=0.1974, simple_loss=0.275, pruned_loss=0.05992, over 19781.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2943, pruned_loss=0.06927, over 3805555.53 frames. ], batch size: 48, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:07:09,070 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3039, 2.1765, 1.9357, 1.8065, 1.6072, 1.8266, 0.5735, 1.2436], + device='cuda:2'), covar=tensor([0.0504, 0.0531, 0.0452, 0.0744, 0.1033, 0.0877, 0.1200, 0.0914], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0345, 0.0345, 0.0371, 0.0446, 0.0378, 0.0326, 0.0333], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 12:07:18,202 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.710e+02 5.964e+02 8.137e+02 2.705e+03, threshold=1.193e+03, percent-clipped=4.0 +2023-04-02 12:07:39,013 INFO [train.py:903] (2/4) Epoch 18, batch 3250, loss[loss=0.2255, simple_loss=0.3056, pruned_loss=0.07269, over 19683.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2941, pruned_loss=0.06937, over 3808125.17 frames. ], batch size: 60, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:24,496 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:08:37,672 INFO [train.py:903] (2/4) Epoch 18, batch 3300, loss[loss=0.1835, simple_loss=0.2628, pruned_loss=0.0521, over 19423.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2941, pruned_loss=0.06952, over 3801408.65 frames. ], batch size: 48, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:42,302 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 12:08:50,174 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7839, 4.3687, 2.5455, 3.8624, 0.8478, 4.2837, 4.1640, 4.3071], + device='cuda:2'), covar=tensor([0.0630, 0.1092, 0.2182, 0.0858, 0.4395, 0.0677, 0.0898, 0.1188], + device='cuda:2'), in_proj_covar=tensor([0.0484, 0.0396, 0.0481, 0.0342, 0.0399, 0.0418, 0.0412, 0.0445], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:08:53,612 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:09:17,495 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.369e+02 6.623e+02 8.148e+02 1.799e+03, threshold=1.325e+03, percent-clipped=5.0 +2023-04-02 12:09:37,824 INFO [train.py:903] (2/4) Epoch 18, batch 3350, loss[loss=0.1936, simple_loss=0.2693, pruned_loss=0.05901, over 19477.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2938, pruned_loss=0.06926, over 3807612.77 frames. ], batch size: 49, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:09:39,235 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1446, 3.4241, 3.6806, 3.6863, 2.0127, 3.4168, 3.1247, 3.4623], + device='cuda:2'), covar=tensor([0.1357, 0.3069, 0.0604, 0.0738, 0.4371, 0.1343, 0.0621, 0.0987], + device='cuda:2'), in_proj_covar=tensor([0.0756, 0.0698, 0.0903, 0.0787, 0.0805, 0.0661, 0.0547, 0.0838], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 12:09:46,677 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7364, 1.1641, 1.5646, 1.7160, 3.0682, 1.3484, 2.5627, 3.6913], + device='cuda:2'), covar=tensor([0.0607, 0.3519, 0.3057, 0.1967, 0.1160, 0.2690, 0.1273, 0.0334], + device='cuda:2'), in_proj_covar=tensor([0.0399, 0.0358, 0.0377, 0.0343, 0.0366, 0.0347, 0.0368, 0.0389], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:09:50,664 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:11,911 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:37,453 INFO [train.py:903] (2/4) Epoch 18, batch 3400, loss[loss=0.2077, simple_loss=0.2875, pruned_loss=0.06392, over 19679.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2927, pruned_loss=0.06815, over 3823787.98 frames. ], batch size: 53, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:18,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 4.894e+02 5.911e+02 7.983e+02 1.559e+03, threshold=1.182e+03, percent-clipped=2.0 +2023-04-02 12:11:37,556 INFO [train.py:903] (2/4) Epoch 18, batch 3450, loss[loss=0.2595, simple_loss=0.3287, pruned_loss=0.09511, over 19330.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2923, pruned_loss=0.06804, over 3830260.96 frames. ], batch size: 66, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:43,131 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 12:11:45,595 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:09,611 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119552.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:31,760 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119570.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:38,210 INFO [train.py:903] (2/4) Epoch 18, batch 3500, loss[loss=0.2021, simple_loss=0.2919, pruned_loss=0.05612, over 17998.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2931, pruned_loss=0.0681, over 3822918.01 frames. ], batch size: 83, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:12:47,662 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-02 12:12:51,542 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4185, 1.4899, 1.8529, 1.6671, 2.9134, 2.2802, 2.9975, 1.3059], + device='cuda:2'), covar=tensor([0.2312, 0.4122, 0.2570, 0.1795, 0.1339, 0.2078, 0.1363, 0.4061], + device='cuda:2'), in_proj_covar=tensor([0.0520, 0.0620, 0.0681, 0.0469, 0.0616, 0.0522, 0.0657, 0.0531], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 12:12:55,131 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 12:13:20,028 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.884e+02 6.516e+02 7.989e+02 1.670e+03, threshold=1.303e+03, percent-clipped=4.0 +2023-04-02 12:13:39,051 INFO [train.py:903] (2/4) Epoch 18, batch 3550, loss[loss=0.2434, simple_loss=0.3151, pruned_loss=0.08589, over 19453.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.293, pruned_loss=0.06811, over 3826357.27 frames. ], batch size: 64, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:14:39,531 INFO [train.py:903] (2/4) Epoch 18, batch 3600, loss[loss=0.186, simple_loss=0.2647, pruned_loss=0.05365, over 19737.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2933, pruned_loss=0.06813, over 3828873.07 frames. ], batch size: 46, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:14:47,911 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:15:20,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.936e+02 6.005e+02 7.350e+02 2.220e+03, threshold=1.201e+03, percent-clipped=3.0 +2023-04-02 12:15:39,373 INFO [train.py:903] (2/4) Epoch 18, batch 3650, loss[loss=0.2444, simple_loss=0.3091, pruned_loss=0.08988, over 19475.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2921, pruned_loss=0.06757, over 3835282.21 frames. ], batch size: 49, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:40,013 INFO [train.py:903] (2/4) Epoch 18, batch 3700, loss[loss=0.1773, simple_loss=0.2657, pruned_loss=0.04441, over 19618.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2929, pruned_loss=0.06806, over 3841211.79 frames. ], batch size: 50, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:58,975 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 12:17:19,636 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:21,520 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 5.017e+02 5.995e+02 7.636e+02 1.424e+03, threshold=1.199e+03, percent-clipped=3.0 +2023-04-02 12:17:32,243 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0547, 1.2421, 1.7347, 1.1997, 2.5345, 3.5167, 3.2541, 3.7314], + device='cuda:2'), covar=tensor([0.1693, 0.3770, 0.3244, 0.2444, 0.0599, 0.0203, 0.0205, 0.0229], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0309, 0.0340, 0.0258, 0.0233, 0.0178, 0.0211, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 12:17:40,477 INFO [train.py:903] (2/4) Epoch 18, batch 3750, loss[loss=0.1963, simple_loss=0.283, pruned_loss=0.05478, over 19536.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2935, pruned_loss=0.06836, over 3841319.54 frames. ], batch size: 54, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:17:40,926 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119826.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:48,736 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:58,529 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9648, 2.0412, 1.7148, 3.1210, 2.0314, 2.9070, 1.9367, 1.5696], + device='cuda:2'), covar=tensor([0.5139, 0.4580, 0.2829, 0.3066, 0.4829, 0.2358, 0.6354, 0.5150], + device='cuda:2'), in_proj_covar=tensor([0.0870, 0.0926, 0.0696, 0.0921, 0.0850, 0.0789, 0.0827, 0.0766], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 12:18:10,606 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:18:20,868 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9809, 4.4038, 4.7299, 4.7425, 1.9136, 4.4490, 3.8620, 4.4151], + device='cuda:2'), covar=tensor([0.1546, 0.0904, 0.0544, 0.0591, 0.5618, 0.0745, 0.0630, 0.1047], + device='cuda:2'), in_proj_covar=tensor([0.0757, 0.0696, 0.0905, 0.0790, 0.0806, 0.0656, 0.0546, 0.0839], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 12:18:33,229 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6134, 1.8094, 2.1615, 2.0001, 3.2904, 2.7360, 3.5564, 1.6673], + device='cuda:2'), covar=tensor([0.2288, 0.3950, 0.2551, 0.1654, 0.1494, 0.1954, 0.1576, 0.3997], + device='cuda:2'), in_proj_covar=tensor([0.0518, 0.0618, 0.0678, 0.0467, 0.0611, 0.0519, 0.0654, 0.0530], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 12:18:39,734 INFO [train.py:903] (2/4) Epoch 18, batch 3800, loss[loss=0.1866, simple_loss=0.2674, pruned_loss=0.05283, over 19789.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2926, pruned_loss=0.06755, over 3837017.88 frames. ], batch size: 47, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:18:40,729 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:19:12,059 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 12:19:22,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.499e+02 6.857e+02 8.596e+02 2.059e+03, threshold=1.371e+03, percent-clipped=8.0 +2023-04-02 12:19:41,468 INFO [train.py:903] (2/4) Epoch 18, batch 3850, loss[loss=0.1897, simple_loss=0.2747, pruned_loss=0.05234, over 19479.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2936, pruned_loss=0.06827, over 3823358.38 frames. ], batch size: 49, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:20:40,729 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 12:20:43,315 INFO [train.py:903] (2/4) Epoch 18, batch 3900, loss[loss=0.1945, simple_loss=0.2705, pruned_loss=0.05927, over 19857.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2919, pruned_loss=0.06704, over 3836517.15 frames. ], batch size: 52, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:02,833 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:26,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.199e+02 4.757e+02 5.722e+02 7.251e+02 1.425e+03, threshold=1.144e+03, percent-clipped=2.0 +2023-04-02 12:21:45,629 INFO [train.py:903] (2/4) Epoch 18, batch 3950, loss[loss=0.2071, simple_loss=0.2802, pruned_loss=0.06706, over 19735.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2915, pruned_loss=0.06686, over 3830942.60 frames. ], batch size: 51, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:47,757 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=120027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:49,996 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 12:22:05,641 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2540, 5.6811, 3.1916, 4.9171, 0.9397, 5.8349, 5.6364, 5.8247], + device='cuda:2'), covar=tensor([0.0297, 0.0646, 0.1724, 0.0766, 0.4248, 0.0467, 0.0644, 0.0788], + device='cuda:2'), in_proj_covar=tensor([0.0480, 0.0392, 0.0479, 0.0341, 0.0396, 0.0415, 0.0408, 0.0442], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:22:47,962 INFO [train.py:903] (2/4) Epoch 18, batch 4000, loss[loss=0.1689, simple_loss=0.2431, pruned_loss=0.0474, over 19732.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06681, over 3834311.20 frames. ], batch size: 46, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:23:29,303 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 4.630e+02 5.764e+02 7.444e+02 1.534e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-02 12:23:36,025 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 12:23:49,494 INFO [train.py:903] (2/4) Epoch 18, batch 4050, loss[loss=0.2372, simple_loss=0.3116, pruned_loss=0.08138, over 18100.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2916, pruned_loss=0.06686, over 3824340.42 frames. ], batch size: 83, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:24:08,122 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=120142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:24:47,319 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6926, 1.5187, 1.5014, 1.9155, 1.6317, 1.7656, 1.7364, 1.7308], + device='cuda:2'), covar=tensor([0.0697, 0.0778, 0.0846, 0.0652, 0.0864, 0.0724, 0.0876, 0.0585], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0222, 0.0224, 0.0244, 0.0227, 0.0208, 0.0188, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 12:24:49,147 INFO [train.py:903] (2/4) Epoch 18, batch 4100, loss[loss=0.2138, simple_loss=0.2896, pruned_loss=0.06904, over 19676.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2928, pruned_loss=0.06801, over 3810380.37 frames. ], batch size: 53, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:25:24,879 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 12:25:29,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.087e+02 6.190e+02 7.903e+02 1.294e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 12:25:48,080 INFO [train.py:903] (2/4) Epoch 18, batch 4150, loss[loss=0.2064, simple_loss=0.2881, pruned_loss=0.06237, over 19624.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2913, pruned_loss=0.06717, over 3814551.95 frames. ], batch size: 57, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:14,729 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:46,430 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:50,734 INFO [train.py:903] (2/4) Epoch 18, batch 4200, loss[loss=0.1837, simple_loss=0.2663, pruned_loss=0.05052, over 19628.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.289, pruned_loss=0.06605, over 3826771.09 frames. ], batch size: 50, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:57,180 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 12:27:06,695 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0591, 1.9901, 1.7900, 1.6037, 1.5042, 1.6194, 0.5665, 1.1202], + device='cuda:2'), covar=tensor([0.0500, 0.0589, 0.0433, 0.0690, 0.1073, 0.0840, 0.1221, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0348, 0.0345, 0.0346, 0.0372, 0.0448, 0.0379, 0.0325, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 12:27:30,829 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 4.968e+02 6.385e+02 7.954e+02 1.571e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 12:27:51,846 INFO [train.py:903] (2/4) Epoch 18, batch 4250, loss[loss=0.2738, simple_loss=0.3418, pruned_loss=0.1029, over 19701.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2898, pruned_loss=0.06685, over 3819363.67 frames. ], batch size: 60, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:28:09,630 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 12:28:18,904 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 12:28:19,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-02 12:28:51,534 INFO [train.py:903] (2/4) Epoch 18, batch 4300, loss[loss=0.2357, simple_loss=0.3073, pruned_loss=0.08206, over 19535.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2887, pruned_loss=0.06598, over 3833868.14 frames. ], batch size: 54, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:19,848 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:34,134 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.084e+02 5.969e+02 7.631e+02 1.294e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 12:29:46,147 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 12:29:49,869 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120423.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:52,876 INFO [train.py:903] (2/4) Epoch 18, batch 4350, loss[loss=0.2205, simple_loss=0.3043, pruned_loss=0.06833, over 19774.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2893, pruned_loss=0.06603, over 3826999.73 frames. ], batch size: 56, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:57,476 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8974, 1.1684, 1.5548, 0.6305, 2.0629, 2.4696, 2.2365, 2.6479], + device='cuda:2'), covar=tensor([0.1664, 0.3759, 0.3197, 0.2552, 0.0577, 0.0265, 0.0327, 0.0340], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0311, 0.0339, 0.0258, 0.0234, 0.0179, 0.0211, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 12:30:20,709 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6073, 1.1656, 1.6109, 1.5995, 2.8973, 1.2113, 2.4203, 3.3562], + device='cuda:2'), covar=tensor([0.0604, 0.3576, 0.2984, 0.2056, 0.1178, 0.2819, 0.1341, 0.0451], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0354, 0.0374, 0.0338, 0.0363, 0.0343, 0.0364, 0.0385], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:30:55,182 INFO [train.py:903] (2/4) Epoch 18, batch 4400, loss[loss=0.1987, simple_loss=0.2654, pruned_loss=0.06604, over 18996.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2895, pruned_loss=0.06635, over 3830433.70 frames. ], batch size: 42, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:31:18,739 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 12:31:28,329 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 12:31:35,095 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.092e+02 6.306e+02 8.223e+02 1.474e+03, threshold=1.261e+03, percent-clipped=7.0 +2023-04-02 12:31:55,629 INFO [train.py:903] (2/4) Epoch 18, batch 4450, loss[loss=0.2178, simple_loss=0.3023, pruned_loss=0.06671, over 19771.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2909, pruned_loss=0.0671, over 3844527.01 frames. ], batch size: 56, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:32:55,857 INFO [train.py:903] (2/4) Epoch 18, batch 4500, loss[loss=0.3113, simple_loss=0.3681, pruned_loss=0.1272, over 13450.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2914, pruned_loss=0.06723, over 3818110.48 frames. ], batch size: 138, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:33:37,595 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.202e+02 6.117e+02 7.756e+02 2.072e+03, threshold=1.223e+03, percent-clipped=4.0 +2023-04-02 12:33:56,187 INFO [train.py:903] (2/4) Epoch 18, batch 4550, loss[loss=0.2046, simple_loss=0.277, pruned_loss=0.06604, over 19344.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2902, pruned_loss=0.06648, over 3822199.26 frames. ], batch size: 47, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:34:05,869 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 12:34:29,505 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 12:34:56,267 INFO [train.py:903] (2/4) Epoch 18, batch 4600, loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05944, over 19539.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2911, pruned_loss=0.0672, over 3831641.64 frames. ], batch size: 56, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:35:17,297 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=120694.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:35:27,842 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5629, 1.4873, 1.5043, 2.0627, 1.5773, 2.0550, 2.0207, 1.8279], + device='cuda:2'), covar=tensor([0.0859, 0.0974, 0.1036, 0.0858, 0.0942, 0.0708, 0.0836, 0.0687], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0245, 0.0229, 0.0211, 0.0189, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-02 12:35:35,197 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 4.865e+02 6.169e+02 7.995e+02 1.948e+03, threshold=1.234e+03, percent-clipped=9.0 +2023-04-02 12:35:55,083 INFO [train.py:903] (2/4) Epoch 18, batch 4650, loss[loss=0.2023, simple_loss=0.2969, pruned_loss=0.05386, over 19769.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2911, pruned_loss=0.06702, over 3833032.24 frames. ], batch size: 56, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:36:13,552 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 12:36:23,041 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 12:36:24,603 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 12:36:55,848 INFO [train.py:903] (2/4) Epoch 18, batch 4700, loss[loss=0.1763, simple_loss=0.2529, pruned_loss=0.04986, over 19779.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2916, pruned_loss=0.06722, over 3832613.28 frames. ], batch size: 47, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:37:18,750 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 12:37:37,336 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.224e+02 6.544e+02 8.077e+02 2.112e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 12:37:51,183 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6217, 1.5312, 1.5717, 2.1303, 1.6979, 1.8712, 1.9813, 1.7335], + device='cuda:2'), covar=tensor([0.0786, 0.0900, 0.0953, 0.0784, 0.0818, 0.0745, 0.0776, 0.0702], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0223, 0.0225, 0.0244, 0.0227, 0.0209, 0.0189, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 12:37:54,543 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 12:37:56,118 INFO [train.py:903] (2/4) Epoch 18, batch 4750, loss[loss=0.1906, simple_loss=0.2766, pruned_loss=0.05237, over 19672.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06693, over 3840260.11 frames. ], batch size: 53, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:38:57,291 INFO [train.py:903] (2/4) Epoch 18, batch 4800, loss[loss=0.2172, simple_loss=0.3057, pruned_loss=0.0644, over 19793.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2917, pruned_loss=0.06663, over 3831939.48 frames. ], batch size: 56, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:39:38,286 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.063e+02 6.517e+02 8.294e+02 1.429e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-04-02 12:39:57,798 INFO [train.py:903] (2/4) Epoch 18, batch 4850, loss[loss=0.2042, simple_loss=0.2895, pruned_loss=0.0594, over 19663.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2918, pruned_loss=0.06696, over 3827799.08 frames. ], batch size: 53, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:40:19,477 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 12:40:38,930 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 12:40:44,415 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 12:40:46,348 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 12:40:55,310 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 12:40:57,468 INFO [train.py:903] (2/4) Epoch 18, batch 4900, loss[loss=0.2678, simple_loss=0.3275, pruned_loss=0.104, over 13034.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2932, pruned_loss=0.06772, over 3828337.99 frames. ], batch size: 135, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:41:15,410 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 12:41:38,442 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.463e+02 5.379e+02 6.693e+02 8.152e+02 1.326e+03, threshold=1.339e+03, percent-clipped=1.0 +2023-04-02 12:41:50,638 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:41:56,821 INFO [train.py:903] (2/4) Epoch 18, batch 4950, loss[loss=0.1975, simple_loss=0.2632, pruned_loss=0.06587, over 19745.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2939, pruned_loss=0.06881, over 3819088.89 frames. ], batch size: 45, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:42:10,900 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121038.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:42:13,945 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 12:42:36,741 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 12:42:55,222 INFO [train.py:903] (2/4) Epoch 18, batch 5000, loss[loss=0.1918, simple_loss=0.2622, pruned_loss=0.06072, over 17676.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2947, pruned_loss=0.06957, over 3810924.66 frames. ], batch size: 39, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:43:05,400 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 12:43:11,831 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0780, 1.7700, 1.8733, 2.9759, 2.1264, 2.4270, 2.5418, 2.1218], + device='cuda:2'), covar=tensor([0.0842, 0.0984, 0.1046, 0.0783, 0.0873, 0.0757, 0.0866, 0.0717], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0244, 0.0229, 0.0210, 0.0189, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-02 12:43:16,146 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 12:43:36,112 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.049e+02 6.044e+02 8.198e+02 1.739e+03, threshold=1.209e+03, percent-clipped=8.0 +2023-04-02 12:43:55,699 INFO [train.py:903] (2/4) Epoch 18, batch 5050, loss[loss=0.2001, simple_loss=0.2867, pruned_loss=0.05676, over 19614.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2937, pruned_loss=0.06883, over 3809237.15 frames. ], batch size: 57, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:44:27,513 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:44:29,512 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 12:44:55,825 INFO [train.py:903] (2/4) Epoch 18, batch 5100, loss[loss=0.2148, simple_loss=0.2979, pruned_loss=0.06582, over 19772.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2929, pruned_loss=0.0684, over 3821654.72 frames. ], batch size: 54, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:45:03,042 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:45:05,061 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 12:45:07,241 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 12:45:12,781 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 12:45:37,243 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 4.928e+02 6.063e+02 8.354e+02 2.244e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 12:45:54,355 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0826, 1.7958, 1.4323, 1.1432, 1.6354, 1.1410, 1.1344, 1.6187], + device='cuda:2'), covar=tensor([0.0804, 0.0750, 0.1022, 0.0824, 0.0496, 0.1238, 0.0635, 0.0416], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0313, 0.0326, 0.0259, 0.0247, 0.0332, 0.0292, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:45:56,109 INFO [train.py:903] (2/4) Epoch 18, batch 5150, loss[loss=0.2392, simple_loss=0.3241, pruned_loss=0.07711, over 19532.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2932, pruned_loss=0.06854, over 3806335.66 frames. ], batch size: 56, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:46:05,292 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 12:46:40,470 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 12:46:55,813 INFO [train.py:903] (2/4) Epoch 18, batch 5200, loss[loss=0.2128, simple_loss=0.2937, pruned_loss=0.06592, over 18737.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2925, pruned_loss=0.06852, over 3809098.20 frames. ], batch size: 74, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:03,760 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9407, 1.6129, 1.8569, 1.5529, 4.4281, 0.9896, 2.4442, 4.7648], + device='cuda:2'), covar=tensor([0.0409, 0.2807, 0.2800, 0.2105, 0.0743, 0.2867, 0.1603, 0.0186], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0355, 0.0376, 0.0341, 0.0365, 0.0347, 0.0366, 0.0387], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:47:08,971 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 12:47:36,014 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.110e+02 6.332e+02 8.392e+02 3.036e+03, threshold=1.266e+03, percent-clipped=7.0 +2023-04-02 12:47:39,666 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0556, 1.2603, 1.6032, 1.0455, 2.4712, 3.2878, 3.0557, 3.5218], + device='cuda:2'), covar=tensor([0.1722, 0.3701, 0.3306, 0.2603, 0.0619, 0.0203, 0.0232, 0.0251], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0312, 0.0343, 0.0259, 0.0235, 0.0179, 0.0212, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 12:47:49,060 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 12:47:50,676 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4601, 1.7065, 1.9830, 1.7609, 3.3005, 2.7923, 3.7682, 1.6288], + device='cuda:2'), covar=tensor([0.2278, 0.4029, 0.2535, 0.1694, 0.1349, 0.1813, 0.1297, 0.3745], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0621, 0.0680, 0.0466, 0.0612, 0.0519, 0.0653, 0.0531], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 12:47:54,560 INFO [train.py:903] (2/4) Epoch 18, batch 5250, loss[loss=0.244, simple_loss=0.3298, pruned_loss=0.0791, over 18073.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06818, over 3822529.43 frames. ], batch size: 83, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:59,205 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121329.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:48:22,549 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4256, 1.4902, 1.6711, 1.6576, 2.6490, 2.2824, 2.7502, 1.1987], + device='cuda:2'), covar=tensor([0.2345, 0.4190, 0.2589, 0.1794, 0.1422, 0.1947, 0.1327, 0.4091], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0620, 0.0680, 0.0466, 0.0611, 0.0518, 0.0653, 0.0531], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 12:48:40,811 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:48:54,023 INFO [train.py:903] (2/4) Epoch 18, batch 5300, loss[loss=0.2063, simple_loss=0.2918, pruned_loss=0.06036, over 19776.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2918, pruned_loss=0.0679, over 3833411.65 frames. ], batch size: 47, lr: 4.56e-03, grad_scale: 4.0 +2023-04-02 12:49:10,545 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 12:49:14,138 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2507, 2.3015, 2.4790, 3.1194, 2.2841, 2.8899, 2.5782, 2.3628], + device='cuda:2'), covar=tensor([0.3909, 0.3758, 0.1758, 0.2259, 0.4209, 0.2007, 0.4197, 0.2925], + device='cuda:2'), in_proj_covar=tensor([0.0870, 0.0928, 0.0695, 0.0923, 0.0851, 0.0788, 0.0826, 0.0764], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 12:49:33,469 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:35,382 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.404e+02 6.878e+02 8.269e+02 2.518e+03, threshold=1.376e+03, percent-clipped=11.0 +2023-04-02 12:49:52,525 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:53,390 INFO [train.py:903] (2/4) Epoch 18, batch 5350, loss[loss=0.2106, simple_loss=0.2829, pruned_loss=0.06914, over 19755.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2927, pruned_loss=0.06841, over 3840637.35 frames. ], batch size: 51, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:03,598 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:50:27,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 12:50:53,952 INFO [train.py:903] (2/4) Epoch 18, batch 5400, loss[loss=0.1712, simple_loss=0.2419, pruned_loss=0.05021, over 19311.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2927, pruned_loss=0.06819, over 3838547.87 frames. ], batch size: 44, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:59,562 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121480.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:35,380 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 5.292e+02 6.238e+02 7.901e+02 1.766e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 12:51:54,667 INFO [train.py:903] (2/4) Epoch 18, batch 5450, loss[loss=0.2083, simple_loss=0.2847, pruned_loss=0.066, over 19611.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2926, pruned_loss=0.0684, over 3838296.56 frames. ], batch size: 50, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:51:54,831 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:56,524 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 12:52:54,550 INFO [train.py:903] (2/4) Epoch 18, batch 5500, loss[loss=0.185, simple_loss=0.2584, pruned_loss=0.0558, over 19751.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2919, pruned_loss=0.06807, over 3842586.78 frames. ], batch size: 47, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:53:09,283 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0294, 1.7446, 1.5974, 1.9268, 1.7217, 1.7255, 1.5028, 1.9000], + device='cuda:2'), covar=tensor([0.1001, 0.1373, 0.1486, 0.1109, 0.1299, 0.0560, 0.1414, 0.0723], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0355, 0.0302, 0.0249, 0.0299, 0.0247, 0.0297, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 12:53:19,887 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 12:53:37,492 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.754e+02 5.891e+02 7.661e+02 1.861e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-02 12:53:55,069 INFO [train.py:903] (2/4) Epoch 18, batch 5550, loss[loss=0.2394, simple_loss=0.3089, pruned_loss=0.08499, over 15227.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2932, pruned_loss=0.069, over 3817618.98 frames. ], batch size: 33, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:54:02,986 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 12:54:13,211 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121641.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:54:51,524 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 12:54:52,537 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121673.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:54:55,720 INFO [train.py:903] (2/4) Epoch 18, batch 5600, loss[loss=0.1802, simple_loss=0.2587, pruned_loss=0.0509, over 19740.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2932, pruned_loss=0.06887, over 3808299.41 frames. ], batch size: 51, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:55:37,560 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.280e+02 4.851e+02 5.729e+02 6.791e+02 1.695e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-04-02 12:55:56,732 INFO [train.py:903] (2/4) Epoch 18, batch 5650, loss[loss=0.1836, simple_loss=0.2649, pruned_loss=0.05114, over 19745.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.293, pruned_loss=0.06844, over 3805974.42 frames. ], batch size: 51, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:56:08,966 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:37,722 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121761.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:44,587 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 12:56:48,216 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:57,052 INFO [train.py:903] (2/4) Epoch 18, batch 5700, loss[loss=0.194, simple_loss=0.283, pruned_loss=0.05246, over 19669.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2925, pruned_loss=0.06829, over 3812609.16 frames. ], batch size: 58, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:10,992 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121788.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 12:57:39,331 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.821e+02 5.859e+02 7.608e+02 1.521e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-02 12:57:57,051 INFO [train.py:903] (2/4) Epoch 18, batch 5750, loss[loss=0.2563, simple_loss=0.3309, pruned_loss=0.09085, over 18221.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2927, pruned_loss=0.06799, over 3816857.17 frames. ], batch size: 83, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:58,078 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 12:58:05,928 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 12:58:11,402 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 12:58:57,520 INFO [train.py:903] (2/4) Epoch 18, batch 5800, loss[loss=0.1712, simple_loss=0.2502, pruned_loss=0.04608, over 19120.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2929, pruned_loss=0.06773, over 3812153.54 frames. ], batch size: 42, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:59:08,077 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:24,546 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:39,859 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.174e+02 6.386e+02 8.157e+02 2.937e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-02 12:59:41,516 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.49 vs. limit=5.0 +2023-04-02 12:59:54,651 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:59,636 INFO [train.py:903] (2/4) Epoch 18, batch 5850, loss[loss=0.2093, simple_loss=0.2912, pruned_loss=0.06367, over 19609.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2918, pruned_loss=0.06738, over 3799606.02 frames. ], batch size: 57, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:00:51,273 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121968.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:01:00,894 INFO [train.py:903] (2/4) Epoch 18, batch 5900, loss[loss=0.2351, simple_loss=0.3198, pruned_loss=0.07524, over 19317.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2919, pruned_loss=0.06707, over 3803990.14 frames. ], batch size: 66, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:01:02,088 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 13:01:11,457 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9060, 1.3403, 1.0518, 0.9590, 1.1844, 1.0019, 0.9193, 1.2590], + device='cuda:2'), covar=tensor([0.0601, 0.0824, 0.1154, 0.0694, 0.0555, 0.1256, 0.0606, 0.0453], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0312, 0.0326, 0.0256, 0.0245, 0.0331, 0.0291, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 13:01:23,286 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 13:01:39,559 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5969, 1.2989, 1.5310, 1.0828, 2.2514, 0.9119, 2.1181, 2.5080], + device='cuda:2'), covar=tensor([0.0721, 0.2616, 0.2527, 0.1794, 0.0856, 0.2159, 0.0946, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0390, 0.0352, 0.0373, 0.0338, 0.0364, 0.0346, 0.0363, 0.0385], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 13:01:43,749 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.950e+02 6.415e+02 8.144e+02 2.513e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-02 13:02:01,673 INFO [train.py:903] (2/4) Epoch 18, batch 5950, loss[loss=0.1914, simple_loss=0.2673, pruned_loss=0.05776, over 19688.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2916, pruned_loss=0.06744, over 3795298.96 frames. ], batch size: 53, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:02:24,025 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122044.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:02:42,871 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 13:02:53,813 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122069.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:03:01,818 INFO [train.py:903] (2/4) Epoch 18, batch 6000, loss[loss=0.2248, simple_loss=0.304, pruned_loss=0.07276, over 19770.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2921, pruned_loss=0.06807, over 3796871.09 frames. ], batch size: 54, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:03:01,818 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 13:03:14,306 INFO [train.py:937] (2/4) Epoch 18, validation: loss=0.1702, simple_loss=0.2706, pruned_loss=0.03489, over 944034.00 frames. +2023-04-02 13:03:14,308 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 13:03:42,527 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0155, 2.0690, 2.2542, 2.6647, 2.0148, 2.6527, 2.3174, 2.1366], + device='cuda:2'), covar=tensor([0.4043, 0.3888, 0.1779, 0.2283, 0.4077, 0.1957, 0.4475, 0.3146], + device='cuda:2'), in_proj_covar=tensor([0.0867, 0.0924, 0.0696, 0.0918, 0.0851, 0.0785, 0.0823, 0.0760], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 13:03:51,026 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 13:03:57,709 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.792e+02 5.055e+02 6.106e+02 7.549e+02 1.634e+03, threshold=1.221e+03, percent-clipped=4.0 +2023-04-02 13:04:07,553 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 13:04:15,898 INFO [train.py:903] (2/4) Epoch 18, batch 6050, loss[loss=0.2266, simple_loss=0.3037, pruned_loss=0.07478, over 18060.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2914, pruned_loss=0.06751, over 3799305.27 frames. ], batch size: 83, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:04:27,146 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:04:33,703 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:04,347 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:18,138 INFO [train.py:903] (2/4) Epoch 18, batch 6100, loss[loss=0.2177, simple_loss=0.2915, pruned_loss=0.07196, over 19769.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2918, pruned_loss=0.06793, over 3803097.06 frames. ], batch size: 54, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:05:59,976 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.309e+02 6.592e+02 8.010e+02 1.726e+03, threshold=1.318e+03, percent-clipped=1.0 +2023-04-02 13:06:03,457 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:06:17,056 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8490, 1.9552, 2.1971, 2.4672, 1.7952, 2.3251, 2.2345, 1.9773], + device='cuda:2'), covar=tensor([0.4061, 0.3701, 0.1769, 0.2232, 0.3919, 0.1997, 0.4572, 0.3166], + device='cuda:2'), in_proj_covar=tensor([0.0870, 0.0926, 0.0696, 0.0920, 0.0854, 0.0785, 0.0825, 0.0762], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 13:06:18,827 INFO [train.py:903] (2/4) Epoch 18, batch 6150, loss[loss=0.1783, simple_loss=0.254, pruned_loss=0.05131, over 19366.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2908, pruned_loss=0.06742, over 3813086.90 frames. ], batch size: 47, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:06:38,410 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3471, 2.4603, 2.6095, 3.1861, 2.3823, 3.0649, 2.6328, 2.3587], + device='cuda:2'), covar=tensor([0.4295, 0.3670, 0.1754, 0.2354, 0.4212, 0.1924, 0.4644, 0.3165], + device='cuda:2'), in_proj_covar=tensor([0.0868, 0.0924, 0.0695, 0.0918, 0.0852, 0.0783, 0.0823, 0.0760], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 13:06:46,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 13:07:20,195 INFO [train.py:903] (2/4) Epoch 18, batch 6200, loss[loss=0.2073, simple_loss=0.2829, pruned_loss=0.06587, over 19852.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2917, pruned_loss=0.06749, over 3813130.12 frames. ], batch size: 52, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:04,096 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.549e+02 5.662e+02 6.707e+02 1.660e+03, threshold=1.132e+03, percent-clipped=3.0 +2023-04-02 13:08:05,411 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:17,460 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 13:08:21,487 INFO [train.py:903] (2/4) Epoch 18, batch 6250, loss[loss=0.1692, simple_loss=0.2607, pruned_loss=0.03882, over 19682.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.291, pruned_loss=0.06683, over 3814441.28 frames. ], batch size: 59, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:35,715 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:54,338 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 13:09:23,690 INFO [train.py:903] (2/4) Epoch 18, batch 6300, loss[loss=0.2528, simple_loss=0.3342, pruned_loss=0.08575, over 19757.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2902, pruned_loss=0.06673, over 3824210.51 frames. ], batch size: 63, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:06,333 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.826e+02 5.839e+02 7.420e+02 1.796e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-02 13:10:25,333 INFO [train.py:903] (2/4) Epoch 18, batch 6350, loss[loss=0.2114, simple_loss=0.2841, pruned_loss=0.0693, over 19473.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2905, pruned_loss=0.06665, over 3822035.44 frames. ], batch size: 49, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:26,810 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:25,781 INFO [train.py:903] (2/4) Epoch 18, batch 6400, loss[loss=0.1681, simple_loss=0.2435, pruned_loss=0.04636, over 19076.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2903, pruned_loss=0.06685, over 3806658.23 frames. ], batch size: 42, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:11:29,340 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:30,790 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9572, 1.9080, 1.8370, 1.5785, 1.5060, 1.6293, 0.3923, 0.8791], + device='cuda:2'), covar=tensor([0.0609, 0.0582, 0.0361, 0.0620, 0.1096, 0.0785, 0.1198, 0.0997], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0347, 0.0347, 0.0373, 0.0449, 0.0379, 0.0329, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 13:11:38,322 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:12:08,690 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.572e+02 5.352e+02 7.001e+02 2.095e+03, threshold=1.070e+03, percent-clipped=5.0 +2023-04-02 13:12:27,163 INFO [train.py:903] (2/4) Epoch 18, batch 6450, loss[loss=0.1999, simple_loss=0.2709, pruned_loss=0.06443, over 19325.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06643, over 3818829.54 frames. ], batch size: 44, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:12:43,823 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 13:13:05,519 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:13:09,889 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 13:13:27,311 INFO [train.py:903] (2/4) Epoch 18, batch 6500, loss[loss=0.2384, simple_loss=0.3147, pruned_loss=0.08102, over 19560.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2918, pruned_loss=0.06713, over 3822449.94 frames. ], batch size: 61, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:13:32,617 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 13:13:41,138 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-02 13:13:50,442 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:14:06,021 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2054, 2.0810, 1.9632, 1.7458, 1.5733, 1.7853, 0.6152, 1.1319], + device='cuda:2'), covar=tensor([0.0537, 0.0572, 0.0444, 0.0784, 0.1146, 0.0884, 0.1206, 0.0942], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0347, 0.0348, 0.0375, 0.0451, 0.0380, 0.0330, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 13:14:10,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.824e+02 5.508e+02 6.898e+02 1.222e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 13:14:28,808 INFO [train.py:903] (2/4) Epoch 18, batch 6550, loss[loss=0.2391, simple_loss=0.3281, pruned_loss=0.07509, over 18030.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06667, over 3821005.22 frames. ], batch size: 83, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:25,357 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:29,447 INFO [train.py:903] (2/4) Epoch 18, batch 6600, loss[loss=0.2252, simple_loss=0.309, pruned_loss=0.07068, over 18713.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2914, pruned_loss=0.06703, over 3804984.49 frames. ], batch size: 74, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:35,565 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:37,968 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:08,835 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:11,860 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.976e+02 6.114e+02 8.137e+02 1.508e+03, threshold=1.223e+03, percent-clipped=10.0 +2023-04-02 13:16:29,836 INFO [train.py:903] (2/4) Epoch 18, batch 6650, loss[loss=0.2258, simple_loss=0.3079, pruned_loss=0.07181, over 19612.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2926, pruned_loss=0.06785, over 3791998.06 frames. ], batch size: 50, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:30,797 INFO [train.py:903] (2/4) Epoch 18, batch 6700, loss[loss=0.1967, simple_loss=0.2736, pruned_loss=0.05985, over 19745.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2912, pruned_loss=0.06709, over 3808087.67 frames. ], batch size: 51, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:55,686 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:12,047 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.791e+02 5.844e+02 7.603e+02 1.856e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 13:18:28,394 INFO [train.py:903] (2/4) Epoch 18, batch 6750, loss[loss=0.2453, simple_loss=0.3232, pruned_loss=0.08372, over 19757.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2907, pruned_loss=0.0671, over 3825968.72 frames. ], batch size: 54, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:18:33,100 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:55,595 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:24,259 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:25,055 INFO [train.py:903] (2/4) Epoch 18, batch 6800, loss[loss=0.1969, simple_loss=0.2668, pruned_loss=0.06356, over 19413.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2915, pruned_loss=0.06733, over 3832907.87 frames. ], batch size: 48, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:19:40,792 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:20:09,322 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 13:20:10,325 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 13:20:12,600 INFO [train.py:903] (2/4) Epoch 19, batch 0, loss[loss=0.231, simple_loss=0.2995, pruned_loss=0.0812, over 19721.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2995, pruned_loss=0.0812, over 19721.00 frames. ], batch size: 51, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:20:12,601 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 13:20:24,054 INFO [train.py:937] (2/4) Epoch 19, validation: loss=0.171, simple_loss=0.2713, pruned_loss=0.03533, over 944034.00 frames. +2023-04-02 13:20:24,055 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 13:20:32,696 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 4.987e+02 6.075e+02 7.792e+02 1.350e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 13:20:37,488 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 13:20:53,762 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122928.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:20:55,931 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3067, 3.7623, 3.8765, 3.8960, 1.5205, 3.6661, 3.2317, 3.6110], + device='cuda:2'), covar=tensor([0.1589, 0.0918, 0.0675, 0.0734, 0.5591, 0.0933, 0.0696, 0.1188], + device='cuda:2'), in_proj_covar=tensor([0.0751, 0.0696, 0.0898, 0.0796, 0.0803, 0.0652, 0.0542, 0.0826], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 13:21:14,018 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:17,731 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 13:21:24,655 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:25,465 INFO [train.py:903] (2/4) Epoch 19, batch 50, loss[loss=0.2066, simple_loss=0.2825, pruned_loss=0.06534, over 19313.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2872, pruned_loss=0.06514, over 873722.68 frames. ], batch size: 66, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:22:04,201 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 13:22:27,943 INFO [train.py:903] (2/4) Epoch 19, batch 100, loss[loss=0.1793, simple_loss=0.2569, pruned_loss=0.05084, over 19048.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2906, pruned_loss=0.06797, over 1518801.78 frames. ], batch size: 42, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:22:35,842 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.975e+02 5.926e+02 8.151e+02 1.966e+03, threshold=1.185e+03, percent-clipped=7.0 +2023-04-02 13:22:40,158 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 13:22:51,546 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3576, 1.4311, 2.0738, 1.4756, 3.0603, 4.5693, 4.4379, 4.9996], + device='cuda:2'), covar=tensor([0.1570, 0.3793, 0.3217, 0.2375, 0.0563, 0.0187, 0.0166, 0.0164], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0311, 0.0341, 0.0259, 0.0234, 0.0179, 0.0211, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 13:23:25,744 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:23:27,464 INFO [train.py:903] (2/4) Epoch 19, batch 150, loss[loss=0.2212, simple_loss=0.3081, pruned_loss=0.06713, over 19786.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2904, pruned_loss=0.06732, over 2044698.08 frames. ], batch size: 56, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:23:55,480 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123077.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:24:27,286 INFO [train.py:903] (2/4) Epoch 19, batch 200, loss[loss=0.2308, simple_loss=0.3012, pruned_loss=0.08023, over 19578.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2915, pruned_loss=0.06708, over 2430186.86 frames. ], batch size: 52, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:24:27,659 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9958, 3.1411, 1.8700, 2.0084, 2.8944, 1.7480, 1.4310, 2.1231], + device='cuda:2'), covar=tensor([0.1268, 0.0612, 0.0986, 0.0749, 0.0474, 0.1084, 0.0908, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0310, 0.0327, 0.0256, 0.0243, 0.0331, 0.0288, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 13:24:29,573 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 13:24:35,484 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.223e+02 5.868e+02 7.012e+02 1.944e+03, threshold=1.174e+03, percent-clipped=7.0 +2023-04-02 13:25:27,082 INFO [train.py:903] (2/4) Epoch 19, batch 250, loss[loss=0.2434, simple_loss=0.3245, pruned_loss=0.08114, over 19711.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2913, pruned_loss=0.06636, over 2737598.03 frames. ], batch size: 63, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:25,100 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:26:28,233 INFO [train.py:903] (2/4) Epoch 19, batch 300, loss[loss=0.1955, simple_loss=0.2788, pruned_loss=0.05609, over 19849.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2941, pruned_loss=0.06832, over 2946977.19 frames. ], batch size: 52, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:37,169 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.226e+02 7.852e+02 1.722e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-02 13:26:55,594 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:04,597 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:29,488 INFO [train.py:903] (2/4) Epoch 19, batch 350, loss[loss=0.2114, simple_loss=0.2969, pruned_loss=0.06296, over 19722.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2933, pruned_loss=0.06782, over 3151713.48 frames. ], batch size: 63, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:27:35,236 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 13:28:30,167 INFO [train.py:903] (2/4) Epoch 19, batch 400, loss[loss=0.2322, simple_loss=0.3089, pruned_loss=0.07774, over 19582.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2936, pruned_loss=0.06799, over 3306519.09 frames. ], batch size: 61, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:28:37,938 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.151e+02 6.306e+02 7.944e+02 1.366e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-02 13:28:55,716 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4174, 1.4560, 1.8875, 1.5592, 2.7640, 3.5856, 3.3999, 3.8317], + device='cuda:2'), covar=tensor([0.1520, 0.3596, 0.3102, 0.2198, 0.0657, 0.0257, 0.0218, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0310, 0.0341, 0.0258, 0.0235, 0.0179, 0.0211, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 13:29:04,159 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123332.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:24,955 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:30,457 INFO [train.py:903] (2/4) Epoch 19, batch 450, loss[loss=0.2192, simple_loss=0.301, pruned_loss=0.06867, over 18097.00 frames. ], tot_loss[loss=0.213, simple_loss=0.292, pruned_loss=0.06693, over 3430197.64 frames. ], batch size: 83, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:30:04,814 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 13:30:05,744 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 13:30:30,934 INFO [train.py:903] (2/4) Epoch 19, batch 500, loss[loss=0.2317, simple_loss=0.2986, pruned_loss=0.08238, over 17412.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2918, pruned_loss=0.06717, over 3521336.11 frames. ], batch size: 101, lr: 4.40e-03, grad_scale: 16.0 +2023-04-02 13:30:39,753 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.352e+02 7.180e+02 8.361e+02 2.088e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-02 13:31:21,138 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:31:30,121 INFO [train.py:903] (2/4) Epoch 19, batch 550, loss[loss=0.2522, simple_loss=0.3267, pruned_loss=0.08889, over 19572.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2926, pruned_loss=0.06741, over 3587794.72 frames. ], batch size: 61, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:30,274 INFO [train.py:903] (2/4) Epoch 19, batch 600, loss[loss=0.1712, simple_loss=0.254, pruned_loss=0.04426, over 19747.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2926, pruned_loss=0.06716, over 3645300.16 frames. ], batch size: 51, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:39,899 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.960e+02 5.982e+02 8.370e+02 1.865e+03, threshold=1.196e+03, percent-clipped=4.0 +2023-04-02 13:33:12,222 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1435, 1.3782, 1.7985, 1.1915, 2.4625, 3.3970, 3.1117, 3.6137], + device='cuda:2'), covar=tensor([0.1516, 0.3419, 0.2990, 0.2258, 0.0593, 0.0181, 0.0214, 0.0239], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0312, 0.0343, 0.0259, 0.0237, 0.0179, 0.0212, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 13:33:14,037 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 13:33:30,834 INFO [train.py:903] (2/4) Epoch 19, batch 650, loss[loss=0.2314, simple_loss=0.3191, pruned_loss=0.07186, over 19667.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2923, pruned_loss=0.06691, over 3694492.19 frames. ], batch size: 58, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:30,415 INFO [train.py:903] (2/4) Epoch 19, batch 700, loss[loss=0.2075, simple_loss=0.2824, pruned_loss=0.06631, over 19854.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2927, pruned_loss=0.06735, over 3722782.33 frames. ], batch size: 52, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:31,944 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:34:41,234 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.460e+02 6.000e+02 7.674e+02 1.249e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 13:35:02,759 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:35:31,845 INFO [train.py:903] (2/4) Epoch 19, batch 750, loss[loss=0.1828, simple_loss=0.2535, pruned_loss=0.05603, over 19772.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.293, pruned_loss=0.06764, over 3736728.06 frames. ], batch size: 45, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:35:59,267 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:36:34,034 INFO [train.py:903] (2/4) Epoch 19, batch 800, loss[loss=0.2947, simple_loss=0.3494, pruned_loss=0.12, over 13447.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2935, pruned_loss=0.06786, over 3731891.68 frames. ], batch size: 135, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:36:43,873 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.634e+02 4.695e+02 6.255e+02 7.449e+02 1.390e+03, threshold=1.251e+03, percent-clipped=2.0 +2023-04-02 13:36:47,184 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 13:37:34,001 INFO [train.py:903] (2/4) Epoch 19, batch 850, loss[loss=0.2161, simple_loss=0.2944, pruned_loss=0.06889, over 19593.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2937, pruned_loss=0.06828, over 3743338.64 frames. ], batch size: 57, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:17,264 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:18,632 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:23,847 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 13:38:34,404 INFO [train.py:903] (2/4) Epoch 19, batch 900, loss[loss=0.267, simple_loss=0.3465, pruned_loss=0.0938, over 19332.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2936, pruned_loss=0.06806, over 3764664.80 frames. ], batch size: 66, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:44,951 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.405e+02 4.993e+02 6.523e+02 8.112e+02 2.572e+03, threshold=1.305e+03, percent-clipped=9.0 +2023-04-02 13:39:35,383 INFO [train.py:903] (2/4) Epoch 19, batch 950, loss[loss=0.2457, simple_loss=0.3103, pruned_loss=0.09057, over 19672.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2915, pruned_loss=0.06711, over 3776472.18 frames. ], batch size: 53, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:39:35,394 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 13:40:18,700 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:35,889 INFO [train.py:903] (2/4) Epoch 19, batch 1000, loss[loss=0.2632, simple_loss=0.3337, pruned_loss=0.09632, over 19623.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2923, pruned_loss=0.06764, over 3782369.97 frames. ], batch size: 61, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:40:37,284 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:44,460 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 5.533e+02 6.903e+02 9.244e+02 2.435e+03, threshold=1.381e+03, percent-clipped=8.0 +2023-04-02 13:41:23,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 13:41:33,882 INFO [train.py:903] (2/4) Epoch 19, batch 1050, loss[loss=0.2346, simple_loss=0.308, pruned_loss=0.08061, over 19648.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2944, pruned_loss=0.06878, over 3790201.09 frames. ], batch size: 55, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:41:37,457 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0996, 2.9966, 2.1604, 2.2421, 1.8095, 2.3497, 1.1076, 2.1332], + device='cuda:2'), covar=tensor([0.0919, 0.0675, 0.0730, 0.1258, 0.1422, 0.1315, 0.1444, 0.1221], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0346, 0.0347, 0.0371, 0.0449, 0.0379, 0.0329, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 13:42:03,306 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 13:42:28,040 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8744, 1.5862, 1.8416, 1.5987, 4.4464, 1.0014, 2.5298, 4.7668], + device='cuda:2'), covar=tensor([0.0432, 0.2770, 0.2776, 0.2035, 0.0697, 0.2854, 0.1476, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0400, 0.0357, 0.0377, 0.0342, 0.0368, 0.0350, 0.0369, 0.0389], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 13:42:36,014 INFO [train.py:903] (2/4) Epoch 19, batch 1100, loss[loss=0.1891, simple_loss=0.2642, pruned_loss=0.05695, over 15562.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2938, pruned_loss=0.06817, over 3796379.82 frames. ], batch size: 34, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:42:45,327 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.737e+02 5.703e+02 7.685e+02 1.238e+03, threshold=1.141e+03, percent-clipped=0.0 +2023-04-02 13:43:28,396 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:36,792 INFO [train.py:903] (2/4) Epoch 19, batch 1150, loss[loss=0.2425, simple_loss=0.3123, pruned_loss=0.08633, over 17572.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2928, pruned_loss=0.06743, over 3811324.76 frames. ], batch size: 101, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:43:37,072 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:59,184 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:08,945 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:37,474 INFO [train.py:903] (2/4) Epoch 19, batch 1200, loss[loss=0.2068, simple_loss=0.2929, pruned_loss=0.06034, over 19415.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2927, pruned_loss=0.06746, over 3818601.85 frames. ], batch size: 70, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:44:48,185 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.076e+02 6.121e+02 8.217e+02 1.455e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-02 13:45:00,040 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 13:45:09,227 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 13:45:38,203 INFO [train.py:903] (2/4) Epoch 19, batch 1250, loss[loss=0.2182, simple_loss=0.2963, pruned_loss=0.07008, over 19542.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2917, pruned_loss=0.06714, over 3831376.12 frames. ], batch size: 56, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:45:47,155 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:16,811 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:38,208 INFO [train.py:903] (2/4) Epoch 19, batch 1300, loss[loss=0.2693, simple_loss=0.3402, pruned_loss=0.09916, over 19333.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2914, pruned_loss=0.06749, over 3833842.03 frames. ], batch size: 66, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:46:39,484 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4048, 4.0293, 2.5374, 3.5771, 1.1676, 3.8318, 3.8242, 3.8715], + device='cuda:2'), covar=tensor([0.0629, 0.0936, 0.1903, 0.0868, 0.3489, 0.0801, 0.0866, 0.1056], + device='cuda:2'), in_proj_covar=tensor([0.0484, 0.0394, 0.0479, 0.0338, 0.0394, 0.0419, 0.0407, 0.0442], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 13:46:48,377 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.504e+02 4.873e+02 5.866e+02 7.986e+02 1.872e+03, threshold=1.173e+03, percent-clipped=5.0 +2023-04-02 13:46:53,205 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 13:47:15,097 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:47:37,768 INFO [train.py:903] (2/4) Epoch 19, batch 1350, loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.06504, over 19659.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2914, pruned_loss=0.06749, over 3842729.34 frames. ], batch size: 53, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:22,193 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:48:30,511 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6524, 4.2105, 2.6266, 3.7707, 1.1272, 4.0924, 4.0198, 4.1669], + device='cuda:2'), covar=tensor([0.0618, 0.0924, 0.2076, 0.0800, 0.3984, 0.0710, 0.0890, 0.1081], + device='cuda:2'), in_proj_covar=tensor([0.0487, 0.0393, 0.0481, 0.0338, 0.0395, 0.0420, 0.0408, 0.0443], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 13:48:39,028 INFO [train.py:903] (2/4) Epoch 19, batch 1400, loss[loss=0.2466, simple_loss=0.3377, pruned_loss=0.07774, over 19726.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2907, pruned_loss=0.06692, over 3845813.93 frames. ], batch size: 63, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:48,825 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.508e+02 6.829e+02 9.566e+02 2.163e+03, threshold=1.366e+03, percent-clipped=9.0 +2023-04-02 13:49:21,065 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 13:49:32,977 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:49:38,317 INFO [train.py:903] (2/4) Epoch 19, batch 1450, loss[loss=0.2198, simple_loss=0.2987, pruned_loss=0.07047, over 19648.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2913, pruned_loss=0.06756, over 3836148.94 frames. ], batch size: 55, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:49:40,267 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 13:49:53,730 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124366.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:50:04,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.72 vs. limit=5.0 +2023-04-02 13:50:32,830 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:50:40,519 INFO [train.py:903] (2/4) Epoch 19, batch 1500, loss[loss=0.2253, simple_loss=0.3078, pruned_loss=0.07144, over 19621.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2909, pruned_loss=0.06701, over 3838764.99 frames. ], batch size: 57, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:50:50,226 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.822e+02 6.235e+02 8.378e+02 1.519e+03, threshold=1.247e+03, percent-clipped=2.0 +2023-04-02 13:51:04,442 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:28,516 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 13:51:35,981 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:39,961 INFO [train.py:903] (2/4) Epoch 19, batch 1550, loss[loss=0.249, simple_loss=0.3256, pruned_loss=0.08618, over 19429.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2911, pruned_loss=0.06692, over 3844795.68 frames. ], batch size: 70, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:40,769 INFO [train.py:903] (2/4) Epoch 19, batch 1600, loss[loss=0.212, simple_loss=0.2959, pruned_loss=0.06405, over 19627.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2922, pruned_loss=0.06748, over 3845713.11 frames. ], batch size: 57, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:51,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 4.800e+02 6.281e+02 8.115e+02 1.566e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-02 13:52:52,256 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:06,489 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 13:53:23,382 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:23,663 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 13:53:40,455 INFO [train.py:903] (2/4) Epoch 19, batch 1650, loss[loss=0.2431, simple_loss=0.3174, pruned_loss=0.08441, over 19782.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2935, pruned_loss=0.06835, over 3820301.15 frames. ], batch size: 56, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:43,096 INFO [train.py:903] (2/4) Epoch 19, batch 1700, loss[loss=0.1719, simple_loss=0.2434, pruned_loss=0.05022, over 19737.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2939, pruned_loss=0.0689, over 3808016.52 frames. ], batch size: 46, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:44,634 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:54:53,183 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.721e+02 4.942e+02 5.736e+02 7.226e+02 1.444e+03, threshold=1.147e+03, percent-clipped=3.0 +2023-04-02 13:55:14,611 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:18,819 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:21,971 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 13:55:42,976 INFO [train.py:903] (2/4) Epoch 19, batch 1750, loss[loss=0.2159, simple_loss=0.2941, pruned_loss=0.0688, over 19514.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.0675, over 3817979.30 frames. ], batch size: 54, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:55:46,501 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:16,377 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:37,883 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 13:56:44,204 INFO [train.py:903] (2/4) Epoch 19, batch 1800, loss[loss=0.2601, simple_loss=0.3193, pruned_loss=0.1005, over 12982.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2898, pruned_loss=0.06637, over 3812002.71 frames. ], batch size: 136, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:56:51,806 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124710.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:56:54,991 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.111e+02 6.286e+02 7.731e+02 1.656e+03, threshold=1.257e+03, percent-clipped=2.0 +2023-04-02 13:57:38,763 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 13:57:39,051 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:57:44,335 INFO [train.py:903] (2/4) Epoch 19, batch 1850, loss[loss=0.2162, simple_loss=0.2936, pruned_loss=0.06944, over 18738.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.06643, over 3811972.36 frames. ], batch size: 74, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:03,247 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:17,470 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 13:58:33,224 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:33,444 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:33,635 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 13:58:34,571 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:46,480 INFO [train.py:903] (2/4) Epoch 19, batch 1900, loss[loss=0.1798, simple_loss=0.2586, pruned_loss=0.05052, over 15515.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06603, over 3809876.19 frames. ], batch size: 34, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:56,686 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 4.862e+02 5.969e+02 7.822e+02 1.490e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 13:59:01,903 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 13:59:05,803 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:07,780 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 13:59:11,262 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124825.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:59:26,825 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:32,850 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 13:59:46,587 INFO [train.py:903] (2/4) Epoch 19, batch 1950, loss[loss=0.1701, simple_loss=0.2507, pruned_loss=0.04477, over 19730.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2903, pruned_loss=0.06618, over 3810979.33 frames. ], batch size: 45, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:47,545 INFO [train.py:903] (2/4) Epoch 19, batch 2000, loss[loss=0.1845, simple_loss=0.2576, pruned_loss=0.05575, over 19790.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06649, over 3816186.10 frames. ], batch size: 48, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:50,118 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2818, 3.0372, 2.2385, 2.2560, 2.1136, 2.5453, 0.9581, 2.1452], + device='cuda:2'), covar=tensor([0.0695, 0.0540, 0.0765, 0.1187, 0.1142, 0.1090, 0.1443, 0.1080], + device='cuda:2'), in_proj_covar=tensor([0.0355, 0.0349, 0.0351, 0.0373, 0.0451, 0.0383, 0.0333, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:00:54,346 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:00:58,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.950e+02 6.179e+02 7.428e+02 1.573e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 14:01:45,076 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 14:01:48,487 INFO [train.py:903] (2/4) Epoch 19, batch 2050, loss[loss=0.2293, simple_loss=0.317, pruned_loss=0.07077, over 19648.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2911, pruned_loss=0.06673, over 3804818.20 frames. ], batch size: 58, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:06,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 14:02:07,325 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 14:02:25,187 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 14:02:44,755 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:02:50,775 INFO [train.py:903] (2/4) Epoch 19, batch 2100, loss[loss=0.2288, simple_loss=0.3107, pruned_loss=0.07346, over 19592.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2904, pruned_loss=0.06673, over 3813365.64 frames. ], batch size: 61, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:52,335 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:00,982 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.657e+02 4.808e+02 5.766e+02 7.881e+02 2.968e+03, threshold=1.153e+03, percent-clipped=4.0 +2023-04-02 14:03:15,321 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:18,747 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 14:03:22,170 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125030.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:31,216 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3222, 1.4241, 1.6965, 1.6117, 2.6223, 2.1087, 2.7488, 1.0903], + device='cuda:2'), covar=tensor([0.2448, 0.4192, 0.2702, 0.1887, 0.1463, 0.2124, 0.1368, 0.4294], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0623, 0.0684, 0.0467, 0.0614, 0.0518, 0.0653, 0.0532], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:03:39,537 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 14:03:49,808 INFO [train.py:903] (2/4) Epoch 19, batch 2150, loss[loss=0.1998, simple_loss=0.2885, pruned_loss=0.05556, over 19347.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.291, pruned_loss=0.06695, over 3805089.20 frames. ], batch size: 66, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:22,804 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125081.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:04:29,459 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.05 vs. limit=5.0 +2023-04-02 14:04:50,330 INFO [train.py:903] (2/4) Epoch 19, batch 2200, loss[loss=0.1829, simple_loss=0.2598, pruned_loss=0.05299, over 19371.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2902, pruned_loss=0.06694, over 3801576.45 frames. ], batch size: 47, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:53,063 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125106.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:05:01,346 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.808e+02 4.810e+02 5.592e+02 6.977e+02 1.826e+03, threshold=1.118e+03, percent-clipped=4.0 +2023-04-02 14:05:04,039 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125115.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:34,484 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:41,556 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:51,112 INFO [train.py:903] (2/4) Epoch 19, batch 2250, loss[loss=0.2509, simple_loss=0.3215, pruned_loss=0.09016, over 19543.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2895, pruned_loss=0.06658, over 3799672.87 frames. ], batch size: 56, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:06:05,778 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:25,692 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:35,039 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:52,192 INFO [train.py:903] (2/4) Epoch 19, batch 2300, loss[loss=0.2412, simple_loss=0.3059, pruned_loss=0.08821, over 19531.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2905, pruned_loss=0.06717, over 3793017.21 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:07:04,357 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.756e+02 5.218e+02 6.176e+02 8.185e+02 2.110e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 14:07:06,709 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 14:07:52,839 INFO [train.py:903] (2/4) Epoch 19, batch 2350, loss[loss=0.2282, simple_loss=0.3041, pruned_loss=0.07611, over 18160.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2906, pruned_loss=0.06698, over 3804581.33 frames. ], batch size: 83, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:08:32,698 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 14:08:45,084 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125297.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:08:49,236 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 14:08:53,425 INFO [train.py:903] (2/4) Epoch 19, batch 2400, loss[loss=0.2379, simple_loss=0.3176, pruned_loss=0.07912, over 19693.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2912, pruned_loss=0.0671, over 3797265.89 frames. ], batch size: 59, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:09:05,368 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.876e+02 4.925e+02 6.150e+02 7.515e+02 1.529e+03, threshold=1.230e+03, percent-clipped=3.0 +2023-04-02 14:09:53,260 INFO [train.py:903] (2/4) Epoch 19, batch 2450, loss[loss=0.2002, simple_loss=0.2883, pruned_loss=0.056, over 19676.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2915, pruned_loss=0.06752, over 3789496.12 frames. ], batch size: 55, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:10:14,748 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125371.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:17,758 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125374.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:41,900 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,287 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,317 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:53,820 INFO [train.py:903] (2/4) Epoch 19, batch 2500, loss[loss=0.2243, simple_loss=0.3045, pruned_loss=0.07205, over 19541.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.291, pruned_loss=0.06731, over 3796592.08 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:11:05,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.898e+02 6.460e+02 8.264e+02 2.020e+03, threshold=1.292e+03, percent-clipped=4.0 +2023-04-02 14:11:13,970 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:35,818 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:39,913 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3398, 3.9562, 2.5384, 3.4770, 0.9436, 3.8078, 3.7380, 3.8321], + device='cuda:2'), covar=tensor([0.0733, 0.1121, 0.2096, 0.0873, 0.3858, 0.0814, 0.0968, 0.1228], + device='cuda:2'), in_proj_covar=tensor([0.0484, 0.0393, 0.0478, 0.0335, 0.0391, 0.0417, 0.0408, 0.0443], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:11:54,215 INFO [train.py:903] (2/4) Epoch 19, batch 2550, loss[loss=0.1882, simple_loss=0.2608, pruned_loss=0.05779, over 19783.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2899, pruned_loss=0.06675, over 3795958.38 frames. ], batch size: 46, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:12:12,069 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3043, 3.8184, 3.9316, 3.9268, 1.4926, 3.7423, 3.2356, 3.6665], + device='cuda:2'), covar=tensor([0.1673, 0.0786, 0.0697, 0.0776, 0.5787, 0.0890, 0.0735, 0.1229], + device='cuda:2'), in_proj_covar=tensor([0.0760, 0.0702, 0.0910, 0.0796, 0.0806, 0.0659, 0.0547, 0.0839], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 14:12:30,670 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2745, 1.4310, 1.9214, 1.4524, 2.6895, 3.7514, 3.4555, 3.8930], + device='cuda:2'), covar=tensor([0.1538, 0.3499, 0.2921, 0.2177, 0.0586, 0.0171, 0.0203, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0313, 0.0344, 0.0261, 0.0238, 0.0180, 0.0212, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 14:12:38,060 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:12:47,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 14:12:53,798 INFO [train.py:903] (2/4) Epoch 19, batch 2600, loss[loss=0.2382, simple_loss=0.3123, pruned_loss=0.08209, over 19575.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2903, pruned_loss=0.06654, over 3812885.63 frames. ], batch size: 61, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:13:01,722 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9276, 2.0346, 2.1956, 2.5889, 1.9491, 2.4918, 2.2317, 2.0741], + device='cuda:2'), covar=tensor([0.3839, 0.3605, 0.1832, 0.2215, 0.3814, 0.2024, 0.4486, 0.3058], + device='cuda:2'), in_proj_covar=tensor([0.0871, 0.0929, 0.0699, 0.0916, 0.0853, 0.0789, 0.0827, 0.0764], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 14:13:05,879 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 4.815e+02 5.841e+02 7.665e+02 1.339e+03, threshold=1.168e+03, percent-clipped=2.0 +2023-04-02 14:13:39,750 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0490, 1.2471, 1.5931, 1.0619, 2.4959, 3.3323, 3.0271, 3.4953], + device='cuda:2'), covar=tensor([0.1722, 0.3817, 0.3514, 0.2598, 0.0585, 0.0179, 0.0240, 0.0262], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0311, 0.0343, 0.0260, 0.0237, 0.0179, 0.0212, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 14:13:43,190 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7644, 1.6341, 1.6359, 2.2401, 1.7308, 2.1969, 2.0925, 1.8903], + device='cuda:2'), covar=tensor([0.0815, 0.0895, 0.1022, 0.0774, 0.0847, 0.0686, 0.0853, 0.0676], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0221, 0.0227, 0.0245, 0.0228, 0.0212, 0.0190, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-02 14:13:52,970 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125553.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:13:53,679 INFO [train.py:903] (2/4) Epoch 19, batch 2650, loss[loss=0.1872, simple_loss=0.2709, pruned_loss=0.05178, over 19783.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2912, pruned_loss=0.06685, over 3816657.12 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:15,458 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 14:14:23,618 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125578.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:14:34,893 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 14:14:54,482 INFO [train.py:903] (2/4) Epoch 19, batch 2700, loss[loss=0.2375, simple_loss=0.3098, pruned_loss=0.08261, over 17299.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2911, pruned_loss=0.06677, over 3815949.39 frames. ], batch size: 101, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:55,971 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:14:59,361 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-02 14:15:07,180 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.817e+02 5.964e+02 7.468e+02 1.608e+03, threshold=1.193e+03, percent-clipped=5.0 +2023-04-02 14:15:15,721 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:15:31,465 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8560, 1.5183, 1.4473, 1.7691, 1.4988, 1.5415, 1.3714, 1.6973], + device='cuda:2'), covar=tensor([0.1035, 0.1414, 0.1509, 0.1010, 0.1232, 0.0605, 0.1467, 0.0760], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0356, 0.0309, 0.0249, 0.0300, 0.0249, 0.0301, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:15:56,202 INFO [train.py:903] (2/4) Epoch 19, batch 2750, loss[loss=0.2364, simple_loss=0.311, pruned_loss=0.08089, over 19770.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.292, pruned_loss=0.0678, over 3797514.39 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:16:49,133 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4965, 2.0471, 1.5503, 1.4317, 1.8914, 1.2927, 1.3956, 1.8095], + device='cuda:2'), covar=tensor([0.0862, 0.0759, 0.0953, 0.0837, 0.0572, 0.1185, 0.0644, 0.0412], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0312, 0.0333, 0.0260, 0.0245, 0.0334, 0.0290, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:16:55,540 INFO [train.py:903] (2/4) Epoch 19, batch 2800, loss[loss=0.2227, simple_loss=0.3013, pruned_loss=0.07211, over 17628.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2919, pruned_loss=0.06772, over 3803990.45 frames. ], batch size: 101, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:17:01,172 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4693, 2.2828, 2.2231, 2.6420, 2.4657, 2.3382, 2.0436, 2.3901], + device='cuda:2'), covar=tensor([0.0926, 0.1548, 0.1315, 0.1021, 0.1232, 0.0442, 0.1229, 0.0644], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0356, 0.0309, 0.0249, 0.0300, 0.0248, 0.0300, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:17:08,452 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.188e+02 6.338e+02 7.813e+02 1.733e+03, threshold=1.268e+03, percent-clipped=8.0 +2023-04-02 14:17:12,866 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:37,021 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:42,821 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:55,255 INFO [train.py:903] (2/4) Epoch 19, batch 2850, loss[loss=0.2131, simple_loss=0.2948, pruned_loss=0.06572, over 18211.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2919, pruned_loss=0.06757, over 3801938.19 frames. ], batch size: 83, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:04,498 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5420, 1.0854, 1.3303, 1.2081, 2.1598, 0.9799, 1.9590, 2.4450], + device='cuda:2'), covar=tensor([0.0704, 0.2992, 0.3009, 0.1791, 0.0955, 0.2214, 0.1102, 0.0514], + device='cuda:2'), in_proj_covar=tensor([0.0397, 0.0358, 0.0377, 0.0342, 0.0367, 0.0350, 0.0368, 0.0388], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:18:15,688 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5504, 2.3364, 1.6428, 1.5926, 2.1351, 1.3414, 1.4318, 1.9170], + device='cuda:2'), covar=tensor([0.1033, 0.0681, 0.1051, 0.0805, 0.0549, 0.1178, 0.0756, 0.0503], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0312, 0.0334, 0.0260, 0.0245, 0.0335, 0.0291, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:18:30,685 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:18:56,094 INFO [train.py:903] (2/4) Epoch 19, batch 2900, loss[loss=0.1917, simple_loss=0.2754, pruned_loss=0.05399, over 19646.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2927, pruned_loss=0.06782, over 3803310.72 frames. ], batch size: 53, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:56,110 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 14:19:09,042 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.625e+02 5.511e+02 7.350e+02 1.619e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 14:19:31,559 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:47,783 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2885, 1.9881, 1.6036, 1.2748, 1.7394, 1.1855, 1.2068, 1.8195], + device='cuda:2'), covar=tensor([0.0893, 0.0783, 0.0992, 0.0814, 0.0622, 0.1205, 0.0659, 0.0430], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0314, 0.0336, 0.0261, 0.0247, 0.0337, 0.0293, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:19:55,789 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:56,473 INFO [train.py:903] (2/4) Epoch 19, batch 2950, loss[loss=0.2349, simple_loss=0.3023, pruned_loss=0.08376, over 19733.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2933, pruned_loss=0.06771, over 3807360.38 frames. ], batch size: 51, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:05,731 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:36,528 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:50,757 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:57,389 INFO [train.py:903] (2/4) Epoch 19, batch 3000, loss[loss=0.1981, simple_loss=0.2843, pruned_loss=0.05592, over 19670.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2931, pruned_loss=0.06772, over 3803418.60 frames. ], batch size: 55, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:57,389 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 14:21:08,844 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5481, 1.6203, 1.5615, 1.3468, 1.2433, 1.3564, 0.3453, 0.6743], + device='cuda:2'), covar=tensor([0.0656, 0.0672, 0.0395, 0.0669, 0.1202, 0.0775, 0.1316, 0.1125], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0352, 0.0355, 0.0378, 0.0457, 0.0388, 0.0334, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:21:10,746 INFO [train.py:937] (2/4) Epoch 19, validation: loss=0.1696, simple_loss=0.2702, pruned_loss=0.03451, over 944034.00 frames. +2023-04-02 14:21:10,747 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 14:21:10,817 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 14:21:24,049 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 4.999e+02 6.816e+02 8.693e+02 1.814e+03, threshold=1.363e+03, percent-clipped=12.0 +2023-04-02 14:22:11,556 INFO [train.py:903] (2/4) Epoch 19, batch 3050, loss[loss=0.1906, simple_loss=0.2802, pruned_loss=0.05046, over 19782.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2935, pruned_loss=0.06813, over 3808012.59 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:22:24,979 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:23:10,414 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2065, 1.3088, 1.7296, 1.5774, 2.8724, 4.3012, 4.1912, 4.9656], + device='cuda:2'), covar=tensor([0.1948, 0.5147, 0.4621, 0.2354, 0.0718, 0.0265, 0.0240, 0.0204], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0315, 0.0347, 0.0262, 0.0239, 0.0181, 0.0215, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 14:23:13,560 INFO [train.py:903] (2/4) Epoch 19, batch 3100, loss[loss=0.2042, simple_loss=0.292, pruned_loss=0.05816, over 19542.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2919, pruned_loss=0.06712, over 3824328.13 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:23:21,452 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6197, 2.2472, 1.6820, 1.5574, 2.0819, 1.2694, 1.5598, 1.9840], + device='cuda:2'), covar=tensor([0.0933, 0.0739, 0.0929, 0.0748, 0.0501, 0.1185, 0.0606, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0313, 0.0334, 0.0261, 0.0246, 0.0335, 0.0292, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:23:26,817 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.651e+02 5.591e+02 6.916e+02 1.279e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-02 14:23:28,044 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:13,739 INFO [train.py:903] (2/4) Epoch 19, batch 3150, loss[loss=0.1945, simple_loss=0.2661, pruned_loss=0.06144, over 19736.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2931, pruned_loss=0.06746, over 3823299.74 frames. ], batch size: 46, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:24:40,341 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 14:24:45,979 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:53,527 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:55,936 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:04,727 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5955, 2.4235, 1.7735, 1.6145, 2.2361, 1.4058, 1.3826, 2.0249], + device='cuda:2'), covar=tensor([0.1048, 0.0708, 0.0953, 0.0788, 0.0496, 0.1100, 0.0768, 0.0451], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0311, 0.0332, 0.0259, 0.0243, 0.0333, 0.0289, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:25:14,239 INFO [train.py:903] (2/4) Epoch 19, batch 3200, loss[loss=0.2252, simple_loss=0.3063, pruned_loss=0.07207, over 19282.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2924, pruned_loss=0.06765, over 3826337.43 frames. ], batch size: 66, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:25:21,294 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:26,846 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126114.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:27,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 4.817e+02 6.226e+02 7.515e+02 1.545e+03, threshold=1.245e+03, percent-clipped=7.0 +2023-04-02 14:25:51,085 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:15,154 INFO [train.py:903] (2/4) Epoch 19, batch 3250, loss[loss=0.2383, simple_loss=0.3067, pruned_loss=0.08495, over 13507.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.06736, over 3823096.28 frames. ], batch size: 136, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:26:15,574 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:46,042 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:58,347 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6173, 1.6805, 1.5765, 1.3974, 1.2761, 1.4051, 0.4183, 0.7255], + device='cuda:2'), covar=tensor([0.0612, 0.0615, 0.0384, 0.0601, 0.1152, 0.0738, 0.1232, 0.1010], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0349, 0.0352, 0.0377, 0.0454, 0.0386, 0.0333, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:27:14,813 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:27:16,666 INFO [train.py:903] (2/4) Epoch 19, batch 3300, loss[loss=0.2522, simple_loss=0.3317, pruned_loss=0.08629, over 19733.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2917, pruned_loss=0.06697, over 3822027.25 frames. ], batch size: 63, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:27:20,145 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 14:27:30,253 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.439e+02 5.162e+02 6.410e+02 7.971e+02 2.422e+03, threshold=1.282e+03, percent-clipped=4.0 +2023-04-02 14:28:17,445 INFO [train.py:903] (2/4) Epoch 19, batch 3350, loss[loss=0.2084, simple_loss=0.2878, pruned_loss=0.06447, over 19670.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2919, pruned_loss=0.0671, over 3800494.38 frames. ], batch size: 53, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:28:38,571 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2066, 1.2743, 1.2503, 1.0361, 1.0864, 1.0491, 0.0805, 0.3060], + device='cuda:2'), covar=tensor([0.0633, 0.0626, 0.0416, 0.0541, 0.1205, 0.0589, 0.1283, 0.1070], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0349, 0.0352, 0.0376, 0.0452, 0.0384, 0.0332, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:29:17,961 INFO [train.py:903] (2/4) Epoch 19, batch 3400, loss[loss=0.2231, simple_loss=0.3023, pruned_loss=0.07193, over 19769.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2929, pruned_loss=0.06751, over 3796197.84 frames. ], batch size: 54, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:29:31,307 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.180e+02 6.014e+02 8.021e+02 1.733e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 14:29:57,170 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:18,214 INFO [train.py:903] (2/4) Epoch 19, batch 3450, loss[loss=0.1968, simple_loss=0.277, pruned_loss=0.05835, over 19350.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.0667, over 3819000.30 frames. ], batch size: 70, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:30:22,542 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 14:30:27,182 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:28,569 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126361.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:41,474 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 14:31:13,311 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:31:20,709 INFO [train.py:903] (2/4) Epoch 19, batch 3500, loss[loss=0.197, simple_loss=0.2802, pruned_loss=0.05686, over 19346.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2908, pruned_loss=0.06619, over 3822071.28 frames. ], batch size: 70, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:31:23,776 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-02 14:31:34,961 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.908e+02 6.053e+02 7.325e+02 1.346e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 14:31:53,986 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.2020, 3.8123, 2.6703, 3.4558, 1.2933, 3.6687, 3.6553, 3.7305], + device='cuda:2'), covar=tensor([0.0858, 0.1198, 0.2023, 0.0870, 0.3725, 0.0865, 0.0925, 0.1385], + device='cuda:2'), in_proj_covar=tensor([0.0488, 0.0394, 0.0480, 0.0337, 0.0396, 0.0419, 0.0409, 0.0446], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:32:21,707 INFO [train.py:903] (2/4) Epoch 19, batch 3550, loss[loss=0.2375, simple_loss=0.3161, pruned_loss=0.07943, over 17359.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2908, pruned_loss=0.06637, over 3804039.76 frames. ], batch size: 101, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:32:26,692 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:38,964 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:48,131 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126475.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:57,931 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126483.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:33:00,138 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2840, 2.1550, 1.9675, 1.8280, 1.6718, 1.8265, 0.7504, 1.2836], + device='cuda:2'), covar=tensor([0.0521, 0.0544, 0.0471, 0.0805, 0.0964, 0.0860, 0.1166, 0.0953], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0349, 0.0353, 0.0376, 0.0454, 0.0386, 0.0332, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:33:21,951 INFO [train.py:903] (2/4) Epoch 19, batch 3600, loss[loss=0.1958, simple_loss=0.2788, pruned_loss=0.05635, over 19611.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2902, pruned_loss=0.06622, over 3813194.34 frames. ], batch size: 50, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:33:37,204 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.926e+02 5.826e+02 7.456e+02 2.258e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 14:34:22,218 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8052, 4.3375, 2.6824, 3.8732, 0.8821, 4.2502, 4.1722, 4.2783], + device='cuda:2'), covar=tensor([0.0599, 0.1007, 0.2099, 0.0848, 0.4094, 0.0660, 0.0827, 0.1152], + device='cuda:2'), in_proj_covar=tensor([0.0488, 0.0395, 0.0483, 0.0338, 0.0395, 0.0419, 0.0409, 0.0447], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:34:23,092 INFO [train.py:903] (2/4) Epoch 19, batch 3650, loss[loss=0.2028, simple_loss=0.2724, pruned_loss=0.06661, over 19733.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2912, pruned_loss=0.06678, over 3826928.04 frames. ], batch size: 51, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:24,556 INFO [train.py:903] (2/4) Epoch 19, batch 3700, loss[loss=0.234, simple_loss=0.3097, pruned_loss=0.07919, over 19685.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2923, pruned_loss=0.06734, over 3830107.57 frames. ], batch size: 59, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:38,476 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.326e+02 6.409e+02 8.349e+02 1.648e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-02 14:36:06,938 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 14:36:23,995 INFO [train.py:903] (2/4) Epoch 19, batch 3750, loss[loss=0.1929, simple_loss=0.2838, pruned_loss=0.05101, over 19586.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2919, pruned_loss=0.06679, over 3836715.66 frames. ], batch size: 52, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:25,135 INFO [train.py:903] (2/4) Epoch 19, batch 3800, loss[loss=0.2146, simple_loss=0.2999, pruned_loss=0.06465, over 19781.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2927, pruned_loss=0.06692, over 3846758.29 frames. ], batch size: 63, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:40,985 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.757e+02 5.693e+02 7.302e+02 1.543e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-02 14:37:57,324 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 14:37:58,818 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126731.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:12,094 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:12,703 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-02 14:38:26,702 INFO [train.py:903] (2/4) Epoch 19, batch 3850, loss[loss=0.2119, simple_loss=0.296, pruned_loss=0.06383, over 17502.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2922, pruned_loss=0.06674, over 3835652.11 frames. ], batch size: 101, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:38:30,300 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126756.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:34,969 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8936, 1.9429, 1.5637, 1.9588, 1.9192, 1.4997, 1.4476, 1.7550], + device='cuda:2'), covar=tensor([0.1192, 0.1530, 0.1702, 0.1103, 0.1326, 0.0757, 0.1684, 0.0885], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0351, 0.0302, 0.0246, 0.0294, 0.0244, 0.0295, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:39:28,455 INFO [train.py:903] (2/4) Epoch 19, batch 3900, loss[loss=0.2479, simple_loss=0.3286, pruned_loss=0.08354, over 19587.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2918, pruned_loss=0.0665, over 3847904.58 frames. ], batch size: 61, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:39:30,208 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.21 vs. limit=5.0 +2023-04-02 14:39:37,801 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:39:42,918 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.082e+02 6.454e+02 7.734e+02 3.345e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-02 14:40:04,522 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126834.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:29,132 INFO [train.py:903] (2/4) Epoch 19, batch 3950, loss[loss=0.2326, simple_loss=0.3111, pruned_loss=0.07702, over 18106.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2916, pruned_loss=0.06631, over 3844245.19 frames. ], batch size: 83, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:40:33,525 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126857.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:35,249 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 14:41:29,521 INFO [train.py:903] (2/4) Epoch 19, batch 4000, loss[loss=0.2144, simple_loss=0.29, pruned_loss=0.06943, over 19856.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2913, pruned_loss=0.06615, over 3843632.04 frames. ], batch size: 52, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:41:43,555 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.579e+02 4.980e+02 6.258e+02 9.023e+02 1.716e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-02 14:41:57,635 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:42:16,849 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 14:42:30,349 INFO [train.py:903] (2/4) Epoch 19, batch 4050, loss[loss=0.1747, simple_loss=0.2624, pruned_loss=0.04345, over 19771.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2911, pruned_loss=0.06604, over 3844809.62 frames. ], batch size: 54, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:17,818 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8379, 3.2803, 3.3353, 3.3464, 1.3577, 3.2221, 2.8250, 3.1054], + device='cuda:2'), covar=tensor([0.1718, 0.0949, 0.0782, 0.0982, 0.5461, 0.0902, 0.0817, 0.1285], + device='cuda:2'), in_proj_covar=tensor([0.0766, 0.0713, 0.0916, 0.0804, 0.0817, 0.0671, 0.0554, 0.0854], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 14:43:30,606 INFO [train.py:903] (2/4) Epoch 19, batch 4100, loss[loss=0.1895, simple_loss=0.2795, pruned_loss=0.04971, over 19646.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.291, pruned_loss=0.06614, over 3835517.68 frames. ], batch size: 55, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:45,830 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.699e+02 5.681e+02 7.096e+02 1.300e+03, threshold=1.136e+03, percent-clipped=1.0 +2023-04-02 14:44:06,963 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 14:44:31,635 INFO [train.py:903] (2/4) Epoch 19, batch 4150, loss[loss=0.1891, simple_loss=0.2691, pruned_loss=0.05461, over 19782.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.0663, over 3838197.12 frames. ], batch size: 48, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:45:32,549 INFO [train.py:903] (2/4) Epoch 19, batch 4200, loss[loss=0.21, simple_loss=0.2916, pruned_loss=0.06418, over 14875.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2898, pruned_loss=0.06613, over 3821002.04 frames. ], batch size: 32, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:45:35,868 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 14:45:43,872 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127113.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:45:46,901 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 4.915e+02 5.762e+02 6.825e+02 1.362e+03, threshold=1.152e+03, percent-clipped=8.0 +2023-04-02 14:46:14,877 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:46:28,601 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2596, 1.3315, 1.2468, 1.0722, 1.1047, 1.1225, 0.0472, 0.3471], + device='cuda:2'), covar=tensor([0.0611, 0.0584, 0.0403, 0.0512, 0.1230, 0.0582, 0.1141, 0.0989], + device='cuda:2'), in_proj_covar=tensor([0.0355, 0.0348, 0.0353, 0.0375, 0.0454, 0.0384, 0.0331, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 14:46:32,725 INFO [train.py:903] (2/4) Epoch 19, batch 4250, loss[loss=0.1943, simple_loss=0.2615, pruned_loss=0.06355, over 19771.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2894, pruned_loss=0.06646, over 3820028.23 frames. ], batch size: 46, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:46:46,863 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8616, 1.4946, 1.8285, 1.6450, 4.3578, 1.2356, 2.3829, 4.7429], + device='cuda:2'), covar=tensor([0.0387, 0.2858, 0.2784, 0.2036, 0.0686, 0.2592, 0.1528, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0357, 0.0375, 0.0341, 0.0366, 0.0347, 0.0367, 0.0386], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:46:50,070 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 14:47:01,522 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 14:47:03,664 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:09,531 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:34,700 INFO [train.py:903] (2/4) Epoch 19, batch 4300, loss[loss=0.1925, simple_loss=0.2574, pruned_loss=0.06379, over 19758.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2901, pruned_loss=0.0671, over 3818107.99 frames. ], batch size: 46, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:47:40,441 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127208.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:50,094 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.800e+02 4.893e+02 5.914e+02 7.996e+02 1.682e+03, threshold=1.183e+03, percent-clipped=7.0 +2023-04-02 14:48:28,062 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 14:48:35,537 INFO [train.py:903] (2/4) Epoch 19, batch 4350, loss[loss=0.2009, simple_loss=0.2888, pruned_loss=0.05652, over 19686.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2917, pruned_loss=0.06764, over 3815476.72 frames. ], batch size: 58, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:48:51,135 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:02,191 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 14:49:23,166 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:35,811 INFO [train.py:903] (2/4) Epoch 19, batch 4400, loss[loss=0.241, simple_loss=0.3067, pruned_loss=0.0877, over 17417.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2921, pruned_loss=0.06758, over 3829366.59 frames. ], batch size: 101, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:49:49,571 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.239e+02 6.175e+02 6.853e+02 1.670e+03, threshold=1.235e+03, percent-clipped=2.0 +2023-04-02 14:50:02,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 14:50:12,015 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 14:50:12,970 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-02 14:50:36,314 INFO [train.py:903] (2/4) Epoch 19, batch 4450, loss[loss=0.235, simple_loss=0.3144, pruned_loss=0.0778, over 17492.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2918, pruned_loss=0.06761, over 3835762.13 frames. ], batch size: 101, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:38,011 INFO [train.py:903] (2/4) Epoch 19, batch 4500, loss[loss=0.1972, simple_loss=0.2789, pruned_loss=0.0577, over 15611.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2926, pruned_loss=0.06819, over 3816693.84 frames. ], batch size: 34, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:52,900 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.116e+02 6.133e+02 7.767e+02 1.446e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-02 14:51:53,551 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-02 14:52:28,110 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127445.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:52:39,295 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-04-02 14:52:39,717 INFO [train.py:903] (2/4) Epoch 19, batch 4550, loss[loss=0.2371, simple_loss=0.3148, pruned_loss=0.07973, over 19540.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2918, pruned_loss=0.06748, over 3821683.64 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:52:48,340 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 14:53:11,967 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 14:53:30,000 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8041, 2.5826, 2.5053, 2.9014, 2.6226, 2.4334, 2.2543, 2.5518], + device='cuda:2'), covar=tensor([0.0827, 0.1465, 0.1145, 0.0947, 0.1245, 0.0426, 0.1245, 0.0586], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0350, 0.0302, 0.0246, 0.0294, 0.0243, 0.0295, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:53:40,582 INFO [train.py:903] (2/4) Epoch 19, batch 4600, loss[loss=0.2377, simple_loss=0.3164, pruned_loss=0.07951, over 19793.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2919, pruned_loss=0.06738, over 3822746.78 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:53:52,470 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6390, 2.4791, 2.2583, 2.6992, 2.3748, 2.2211, 2.0144, 2.4016], + device='cuda:2'), covar=tensor([0.0907, 0.1490, 0.1315, 0.0981, 0.1387, 0.0496, 0.1351, 0.0698], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0351, 0.0303, 0.0247, 0.0295, 0.0244, 0.0296, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:53:54,254 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.018e+02 6.286e+02 8.427e+02 2.189e+03, threshold=1.257e+03, percent-clipped=8.0 +2023-04-02 14:54:11,461 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1132, 1.3326, 1.8587, 1.4307, 2.9767, 4.5921, 4.5409, 5.0529], + device='cuda:2'), covar=tensor([0.1751, 0.3814, 0.3334, 0.2368, 0.0610, 0.0188, 0.0160, 0.0162], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0316, 0.0347, 0.0261, 0.0239, 0.0182, 0.0214, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 14:54:16,985 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5946, 4.0868, 4.2452, 4.2584, 1.7348, 4.0188, 3.5046, 3.9794], + device='cuda:2'), covar=tensor([0.1475, 0.0916, 0.0571, 0.0635, 0.5311, 0.0827, 0.0646, 0.1089], + device='cuda:2'), in_proj_covar=tensor([0.0758, 0.0711, 0.0910, 0.0795, 0.0812, 0.0666, 0.0551, 0.0848], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 14:54:32,429 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:34,727 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:37,341 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-02 14:54:39,913 INFO [train.py:903] (2/4) Epoch 19, batch 4650, loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05152, over 18327.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2919, pruned_loss=0.0673, over 3824559.43 frames. ], batch size: 83, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:54:50,215 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0856, 5.1242, 5.8122, 5.8928, 1.9611, 5.4948, 4.6120, 5.4769], + device='cuda:2'), covar=tensor([0.1603, 0.0863, 0.0596, 0.0631, 0.5951, 0.0748, 0.0680, 0.1202], + device='cuda:2'), in_proj_covar=tensor([0.0761, 0.0715, 0.0914, 0.0798, 0.0814, 0.0668, 0.0553, 0.0850], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 14:54:55,875 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 14:55:05,432 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:07,386 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 14:55:16,505 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6765, 1.4609, 1.4895, 1.9846, 1.5232, 1.8048, 1.8570, 1.6911], + device='cuda:2'), covar=tensor([0.0871, 0.1045, 0.1088, 0.0818, 0.0922, 0.0884, 0.0933, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0246, 0.0229, 0.0212, 0.0190, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 14:55:20,853 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8979, 1.5474, 1.5643, 1.5789, 3.4506, 1.1649, 2.5357, 3.8370], + device='cuda:2'), covar=tensor([0.0441, 0.2708, 0.2824, 0.1951, 0.0676, 0.2597, 0.1277, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0394, 0.0358, 0.0376, 0.0341, 0.0367, 0.0347, 0.0369, 0.0386], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 14:55:40,716 INFO [train.py:903] (2/4) Epoch 19, batch 4700, loss[loss=0.2175, simple_loss=0.307, pruned_loss=0.06396, over 19164.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2924, pruned_loss=0.06731, over 3812785.51 frames. ], batch size: 69, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:55:43,309 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3165, 1.9123, 2.1073, 2.6948, 2.0496, 2.4032, 2.4011, 2.3557], + device='cuda:2'), covar=tensor([0.0747, 0.0926, 0.0924, 0.0886, 0.0895, 0.0731, 0.0925, 0.0600], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0245, 0.0229, 0.0211, 0.0189, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 14:55:50,512 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:55,864 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 5.036e+02 6.203e+02 8.078e+02 1.735e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-02 14:56:02,629 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 14:56:41,680 INFO [train.py:903] (2/4) Epoch 19, batch 4750, loss[loss=0.2191, simple_loss=0.3026, pruned_loss=0.06776, over 19719.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.293, pruned_loss=0.06757, over 3816059.65 frames. ], batch size: 63, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:56:49,305 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 14:57:00,071 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1020, 1.3190, 1.7734, 1.1344, 2.4819, 3.4239, 3.1815, 3.6320], + device='cuda:2'), covar=tensor([0.1669, 0.3589, 0.3042, 0.2391, 0.0553, 0.0188, 0.0193, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0316, 0.0345, 0.0260, 0.0238, 0.0181, 0.0214, 0.0245], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 14:57:13,640 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:57:41,809 INFO [train.py:903] (2/4) Epoch 19, batch 4800, loss[loss=0.2415, simple_loss=0.3191, pruned_loss=0.08193, over 19702.00 frames. ], tot_loss[loss=0.214, simple_loss=0.293, pruned_loss=0.06754, over 3827544.15 frames. ], batch size: 59, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:55,395 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.990e+02 6.329e+02 8.030e+02 1.437e+03, threshold=1.266e+03, percent-clipped=1.0 +2023-04-02 14:58:07,646 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127726.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:58:21,456 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 14:58:40,157 INFO [train.py:903] (2/4) Epoch 19, batch 4850, loss[loss=0.1996, simple_loss=0.2884, pruned_loss=0.05545, over 19609.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2921, pruned_loss=0.06745, over 3835580.07 frames. ], batch size: 57, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:04,710 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 14:59:22,978 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127789.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:59:25,079 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 14:59:30,826 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 14:59:30,850 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 14:59:32,256 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:59:40,801 INFO [train.py:903] (2/4) Epoch 19, batch 4900, loss[loss=0.2248, simple_loss=0.3059, pruned_loss=0.07184, over 18742.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2913, pruned_loss=0.06741, over 3829981.35 frames. ], batch size: 74, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:40,835 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 14:59:55,908 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.848e+02 5.865e+02 7.992e+02 2.664e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-02 15:00:01,799 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 15:00:41,582 INFO [train.py:903] (2/4) Epoch 19, batch 4950, loss[loss=0.2234, simple_loss=0.3034, pruned_loss=0.07173, over 17575.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2926, pruned_loss=0.068, over 3826016.19 frames. ], batch size: 101, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:00:58,810 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:00:59,653 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 15:01:09,394 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4595, 1.5591, 1.8778, 1.7283, 2.6733, 2.3604, 2.9886, 1.3019], + device='cuda:2'), covar=tensor([0.2359, 0.4166, 0.2575, 0.1807, 0.1537, 0.1932, 0.1303, 0.4168], + device='cuda:2'), in_proj_covar=tensor([0.0520, 0.0627, 0.0692, 0.0473, 0.0615, 0.0517, 0.0659, 0.0536], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 15:01:22,182 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 15:01:24,500 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:26,343 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127891.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:41,506 INFO [train.py:903] (2/4) Epoch 19, batch 5000, loss[loss=0.1922, simple_loss=0.2768, pruned_loss=0.05374, over 19841.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2931, pruned_loss=0.06807, over 3822431.73 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:01:41,865 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127904.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:01:51,183 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 15:01:55,663 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.675e+02 5.614e+02 6.818e+02 2.294e+03, threshold=1.123e+03, percent-clipped=3.0 +2023-04-02 15:02:03,227 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 15:02:41,852 INFO [train.py:903] (2/4) Epoch 19, batch 5050, loss[loss=0.2377, simple_loss=0.3151, pruned_loss=0.08008, over 19767.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.293, pruned_loss=0.06765, over 3828371.90 frames. ], batch size: 54, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:02:53,524 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 15:03:16,164 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:18,095 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 15:03:43,135 INFO [train.py:903] (2/4) Epoch 19, batch 5100, loss[loss=0.2145, simple_loss=0.2958, pruned_loss=0.06662, over 19593.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2925, pruned_loss=0.06728, over 3824502.98 frames. ], batch size: 61, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:03:45,726 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:47,034 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:56,493 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 15:03:58,282 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.139e+02 4.818e+02 5.706e+02 8.227e+02 1.561e+03, threshold=1.141e+03, percent-clipped=7.0 +2023-04-02 15:04:00,663 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 15:04:04,155 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 15:04:07,797 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128024.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:04:43,096 INFO [train.py:903] (2/4) Epoch 19, batch 5150, loss[loss=0.2055, simple_loss=0.2832, pruned_loss=0.06396, over 19484.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2923, pruned_loss=0.06711, over 3808905.23 frames. ], batch size: 49, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:04:57,240 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:05:31,583 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:05:44,855 INFO [train.py:903] (2/4) Epoch 19, batch 5200, loss[loss=0.2095, simple_loss=0.2889, pruned_loss=0.06508, over 19474.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2913, pruned_loss=0.067, over 3802058.02 frames. ], batch size: 49, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:05:59,011 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.969e+02 4.920e+02 6.208e+02 7.921e+02 1.726e+03, threshold=1.242e+03, percent-clipped=7.0 +2023-04-02 15:05:59,066 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 15:06:28,670 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128139.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:30,659 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128141.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:41,124 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 15:06:45,761 INFO [train.py:903] (2/4) Epoch 19, batch 5250, loss[loss=0.2235, simple_loss=0.3, pruned_loss=0.07349, over 19435.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2918, pruned_loss=0.06741, over 3782680.70 frames. ], batch size: 64, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:06:53,650 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128160.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:07:22,018 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6257, 1.2821, 1.5317, 1.5271, 3.1566, 1.0741, 2.3513, 3.5668], + device='cuda:2'), covar=tensor([0.0501, 0.2802, 0.2751, 0.1913, 0.0745, 0.2627, 0.1249, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0397, 0.0358, 0.0378, 0.0343, 0.0369, 0.0349, 0.0370, 0.0390], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 15:07:23,079 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128185.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:07:45,450 INFO [train.py:903] (2/4) Epoch 19, batch 5300, loss[loss=0.1976, simple_loss=0.2686, pruned_loss=0.06328, over 19395.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2917, pruned_loss=0.0674, over 3780742.81 frames. ], batch size: 48, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:07:54,710 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:07:59,669 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.001e+02 6.088e+02 7.600e+02 1.403e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 15:08:00,879 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 15:08:21,799 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:46,733 INFO [train.py:903] (2/4) Epoch 19, batch 5350, loss[loss=0.1904, simple_loss=0.2819, pruned_loss=0.04945, over 19651.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2899, pruned_loss=0.06632, over 3797670.61 frames. ], batch size: 59, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:08:50,342 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:57,164 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:20,034 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 15:09:26,927 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:47,392 INFO [train.py:903] (2/4) Epoch 19, batch 5400, loss[loss=0.2032, simple_loss=0.2886, pruned_loss=0.05885, over 19700.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2905, pruned_loss=0.06612, over 3825350.74 frames. ], batch size: 51, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:10:01,766 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 4.631e+02 5.571e+02 7.152e+02 1.493e+03, threshold=1.114e+03, percent-clipped=2.0 +2023-04-02 15:10:15,374 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:29,847 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:39,135 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1328, 1.3579, 1.9896, 1.5998, 2.9906, 4.4586, 4.3555, 4.9254], + device='cuda:2'), covar=tensor([0.1743, 0.3805, 0.3043, 0.2184, 0.0622, 0.0211, 0.0173, 0.0181], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0314, 0.0345, 0.0260, 0.0237, 0.0181, 0.0213, 0.0245], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 15:10:42,718 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:48,543 INFO [train.py:903] (2/4) Epoch 19, batch 5450, loss[loss=0.2068, simple_loss=0.2841, pruned_loss=0.06478, over 19762.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2906, pruned_loss=0.06603, over 3829976.84 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 16.0 +2023-04-02 15:11:39,598 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128395.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:11:50,168 INFO [train.py:903] (2/4) Epoch 19, batch 5500, loss[loss=0.2127, simple_loss=0.29, pruned_loss=0.06768, over 19563.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2919, pruned_loss=0.06642, over 3820599.70 frames. ], batch size: 52, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:06,854 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.166e+02 6.121e+02 7.872e+02 1.632e+03, threshold=1.224e+03, percent-clipped=5.0 +2023-04-02 15:12:10,695 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:12,710 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 15:12:23,800 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:50,273 INFO [train.py:903] (2/4) Epoch 19, batch 5550, loss[loss=0.2958, simple_loss=0.3475, pruned_loss=0.122, over 12835.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2908, pruned_loss=0.06582, over 3823873.53 frames. ], batch size: 136, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:56,481 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 15:13:44,849 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 15:13:51,307 INFO [train.py:903] (2/4) Epoch 19, batch 5600, loss[loss=0.2224, simple_loss=0.3069, pruned_loss=0.06897, over 18239.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2916, pruned_loss=0.06643, over 3821528.04 frames. ], batch size: 83, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:13:57,375 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2245, 5.5860, 3.2841, 4.9900, 1.0614, 5.8069, 5.6002, 5.8433], + device='cuda:2'), covar=tensor([0.0346, 0.0738, 0.1635, 0.0662, 0.4061, 0.0442, 0.0689, 0.0871], + device='cuda:2'), in_proj_covar=tensor([0.0492, 0.0395, 0.0484, 0.0342, 0.0397, 0.0421, 0.0412, 0.0448], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 15:14:01,864 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128512.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:07,040 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.843e+02 5.877e+02 7.578e+02 1.194e+03, threshold=1.175e+03, percent-clipped=0.0 +2023-04-02 15:14:07,398 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5728, 1.6505, 2.0612, 1.8612, 3.0876, 4.2653, 4.1764, 4.6944], + device='cuda:2'), covar=tensor([0.1537, 0.3536, 0.3105, 0.2107, 0.0644, 0.0299, 0.0175, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0313, 0.0345, 0.0260, 0.0237, 0.0180, 0.0213, 0.0245], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 15:14:32,633 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:52,048 INFO [train.py:903] (2/4) Epoch 19, batch 5650, loss[loss=0.256, simple_loss=0.3235, pruned_loss=0.09423, over 19763.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2914, pruned_loss=0.06678, over 3820662.17 frames. ], batch size: 63, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:27,797 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:37,269 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 15:15:53,151 INFO [train.py:903] (2/4) Epoch 19, batch 5700, loss[loss=0.1847, simple_loss=0.2634, pruned_loss=0.05302, over 19788.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2912, pruned_loss=0.06676, over 3837985.91 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:54,843 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:57,212 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8566, 1.9780, 2.1380, 2.4497, 1.7458, 2.3166, 2.2039, 1.9556], + device='cuda:2'), covar=tensor([0.4072, 0.3601, 0.1891, 0.2237, 0.3843, 0.2141, 0.4835, 0.3404], + device='cuda:2'), in_proj_covar=tensor([0.0876, 0.0935, 0.0700, 0.0924, 0.0860, 0.0796, 0.0834, 0.0767], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 15:15:58,298 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:08,029 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.156e+02 6.108e+02 7.232e+02 1.309e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 15:16:24,927 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:53,491 INFO [train.py:903] (2/4) Epoch 19, batch 5750, loss[loss=0.2079, simple_loss=0.2958, pruned_loss=0.05995, over 19527.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06662, over 3839134.70 frames. ], batch size: 56, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:16:53,826 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0890, 1.2617, 1.6737, 1.2510, 2.3362, 3.2468, 3.0174, 3.5946], + device='cuda:2'), covar=tensor([0.1835, 0.4886, 0.4209, 0.2521, 0.0706, 0.0252, 0.0297, 0.0284], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0315, 0.0345, 0.0261, 0.0239, 0.0181, 0.0214, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 15:16:55,739 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 15:17:05,275 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 15:17:09,593 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 15:17:27,288 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:17:55,083 INFO [train.py:903] (2/4) Epoch 19, batch 5800, loss[loss=0.2292, simple_loss=0.3056, pruned_loss=0.07641, over 19543.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2912, pruned_loss=0.06648, over 3837847.80 frames. ], batch size: 54, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:18:05,763 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-02 15:18:08,029 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.53 vs. limit=5.0 +2023-04-02 15:18:10,461 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 4.671e+02 6.414e+02 7.787e+02 1.302e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-02 15:18:55,627 INFO [train.py:903] (2/4) Epoch 19, batch 5850, loss[loss=0.2152, simple_loss=0.3031, pruned_loss=0.0636, over 19655.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2909, pruned_loss=0.06655, over 3825823.52 frames. ], batch size: 58, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:19:17,644 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4348, 1.5169, 1.7941, 1.6721, 2.7064, 2.2897, 2.9889, 1.3264], + device='cuda:2'), covar=tensor([0.2376, 0.4017, 0.2553, 0.1809, 0.1519, 0.1972, 0.1371, 0.4161], + device='cuda:2'), in_proj_covar=tensor([0.0521, 0.0629, 0.0691, 0.0473, 0.0617, 0.0521, 0.0661, 0.0537], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 15:19:20,638 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:48,457 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:50,606 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:55,909 INFO [train.py:903] (2/4) Epoch 19, batch 5900, loss[loss=0.2545, simple_loss=0.3269, pruned_loss=0.0911, over 19660.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06695, over 3825268.10 frames. ], batch size: 58, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:20:02,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 15:20:11,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.778e+02 5.849e+02 7.721e+02 1.320e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 15:20:22,231 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 15:20:56,175 INFO [train.py:903] (2/4) Epoch 19, batch 5950, loss[loss=0.2641, simple_loss=0.3368, pruned_loss=0.09576, over 18923.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2918, pruned_loss=0.06702, over 3828243.27 frames. ], batch size: 74, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:21:13,054 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-02 15:21:41,394 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:21:57,516 INFO [train.py:903] (2/4) Epoch 19, batch 6000, loss[loss=0.1966, simple_loss=0.2697, pruned_loss=0.06174, over 19777.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.291, pruned_loss=0.067, over 3812304.27 frames. ], batch size: 48, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:21:57,516 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 15:22:12,607 INFO [train.py:937] (2/4) Epoch 19, validation: loss=0.1702, simple_loss=0.2702, pruned_loss=0.03514, over 944034.00 frames. +2023-04-02 15:22:12,608 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 15:22:17,283 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128908.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:22:28,004 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.136e+02 6.485e+02 9.043e+02 2.174e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 15:23:11,990 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-02 15:23:13,570 INFO [train.py:903] (2/4) Epoch 19, batch 6050, loss[loss=0.2088, simple_loss=0.2946, pruned_loss=0.06148, over 19678.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2916, pruned_loss=0.06729, over 3800967.82 frames. ], batch size: 59, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:24:15,342 INFO [train.py:903] (2/4) Epoch 19, batch 6100, loss[loss=0.2562, simple_loss=0.3306, pruned_loss=0.09091, over 19765.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2906, pruned_loss=0.06654, over 3813112.44 frames. ], batch size: 54, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:24:30,770 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.994e+02 6.076e+02 7.380e+02 1.472e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 15:25:15,131 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129053.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:25:15,819 INFO [train.py:903] (2/4) Epoch 19, batch 6150, loss[loss=0.2191, simple_loss=0.3018, pruned_loss=0.06822, over 19661.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.291, pruned_loss=0.06666, over 3826045.67 frames. ], batch size: 55, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:25:17,189 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2116, 1.0078, 1.4569, 1.2735, 2.2663, 3.1986, 2.9802, 3.6102], + device='cuda:2'), covar=tensor([0.1870, 0.5468, 0.4694, 0.2544, 0.0813, 0.0252, 0.0326, 0.0310], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0317, 0.0347, 0.0263, 0.0239, 0.0181, 0.0214, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 15:25:18,719 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-02 15:25:44,294 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 15:25:44,626 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:26:16,318 INFO [train.py:903] (2/4) Epoch 19, batch 6200, loss[loss=0.2055, simple_loss=0.295, pruned_loss=0.05801, over 19672.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2926, pruned_loss=0.06788, over 3808662.15 frames. ], batch size: 60, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:26:32,069 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.839e+02 4.815e+02 6.250e+02 7.621e+02 1.523e+03, threshold=1.250e+03, percent-clipped=7.0 +2023-04-02 15:27:04,021 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:07,685 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:17,155 INFO [train.py:903] (2/4) Epoch 19, batch 6250, loss[loss=0.2399, simple_loss=0.311, pruned_loss=0.08441, over 18851.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2907, pruned_loss=0.06688, over 3802219.83 frames. ], batch size: 74, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:27:38,395 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:47,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 15:28:17,842 INFO [train.py:903] (2/4) Epoch 19, batch 6300, loss[loss=0.2307, simple_loss=0.308, pruned_loss=0.0767, over 19772.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2901, pruned_loss=0.06633, over 3806076.46 frames. ], batch size: 54, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:28:33,724 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 5.181e+02 6.373e+02 8.503e+02 1.874e+03, threshold=1.275e+03, percent-clipped=7.0 +2023-04-02 15:29:17,506 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129252.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:29:19,491 INFO [train.py:903] (2/4) Epoch 19, batch 6350, loss[loss=0.2118, simple_loss=0.2824, pruned_loss=0.07056, over 19169.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2905, pruned_loss=0.06672, over 3787993.98 frames. ], batch size: 42, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:29:25,299 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:30:21,275 INFO [train.py:903] (2/4) Epoch 19, batch 6400, loss[loss=0.2397, simple_loss=0.3116, pruned_loss=0.08391, over 19615.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2913, pruned_loss=0.0669, over 3804764.97 frames. ], batch size: 61, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:30:36,134 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0239, 1.9742, 1.8437, 1.5395, 1.5973, 1.5895, 0.3662, 0.8579], + device='cuda:2'), covar=tensor([0.0604, 0.0565, 0.0378, 0.0652, 0.1093, 0.0788, 0.1207, 0.1019], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0347, 0.0350, 0.0374, 0.0449, 0.0380, 0.0329, 0.0336], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 15:30:36,860 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.331e+02 4.991e+02 6.008e+02 7.727e+02 1.608e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-02 15:31:22,263 INFO [train.py:903] (2/4) Epoch 19, batch 6450, loss[loss=0.1866, simple_loss=0.2573, pruned_loss=0.05796, over 19754.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2901, pruned_loss=0.06623, over 3809477.86 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:31:38,234 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129367.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:32:06,822 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 15:32:22,364 INFO [train.py:903] (2/4) Epoch 19, batch 6500, loss[loss=0.2043, simple_loss=0.2861, pruned_loss=0.06125, over 19475.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2904, pruned_loss=0.06642, over 3794586.96 frames. ], batch size: 49, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:32:29,749 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 15:32:38,768 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.340e+02 6.897e+02 8.888e+02 1.987e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 15:32:56,850 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129431.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:33:21,250 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2409, 1.2854, 1.2706, 1.0644, 1.0906, 1.1286, 0.1098, 0.4401], + device='cuda:2'), covar=tensor([0.0685, 0.0603, 0.0407, 0.0509, 0.1262, 0.0618, 0.1188, 0.1022], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0351, 0.0353, 0.0377, 0.0453, 0.0384, 0.0332, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 15:33:24,209 INFO [train.py:903] (2/4) Epoch 19, batch 6550, loss[loss=0.2057, simple_loss=0.2957, pruned_loss=0.05788, over 19535.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2907, pruned_loss=0.06699, over 3803851.60 frames. ], batch size: 54, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:01,177 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 15:34:03,321 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.92 vs. limit=5.0 +2023-04-02 15:34:24,457 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-02 15:34:24,938 INFO [train.py:903] (2/4) Epoch 19, batch 6600, loss[loss=0.2195, simple_loss=0.2992, pruned_loss=0.06989, over 19754.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2907, pruned_loss=0.06681, over 3809985.54 frames. ], batch size: 63, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:37,628 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:34:40,455 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.539e+02 5.847e+02 6.807e+02 8.552e+02 1.538e+03, threshold=1.361e+03, percent-clipped=4.0 +2023-04-02 15:35:07,252 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:35:25,978 INFO [train.py:903] (2/4) Epoch 19, batch 6650, loss[loss=0.1807, simple_loss=0.2559, pruned_loss=0.05273, over 19725.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2903, pruned_loss=0.06634, over 3811634.05 frames. ], batch size: 45, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:35:59,899 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5632, 1.0865, 1.2741, 1.3112, 2.1946, 1.0943, 2.0515, 2.4882], + device='cuda:2'), covar=tensor([0.0683, 0.2836, 0.2936, 0.1571, 0.0888, 0.2073, 0.1075, 0.0453], + device='cuda:2'), in_proj_covar=tensor([0.0395, 0.0357, 0.0376, 0.0339, 0.0366, 0.0347, 0.0368, 0.0389], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 15:36:26,349 INFO [train.py:903] (2/4) Epoch 19, batch 6700, loss[loss=0.2114, simple_loss=0.2997, pruned_loss=0.06158, over 18365.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2905, pruned_loss=0.06653, over 3808877.73 frames. ], batch size: 84, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:36:42,871 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 4.923e+02 5.649e+02 7.598e+02 1.428e+03, threshold=1.130e+03, percent-clipped=1.0 +2023-04-02 15:36:51,065 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129623.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:36:58,734 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:37:17,607 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1204, 1.7838, 1.8680, 2.4282, 2.0695, 2.3052, 2.2613, 2.0150], + device='cuda:2'), covar=tensor([0.0705, 0.0868, 0.0928, 0.0804, 0.0854, 0.0662, 0.0879, 0.0680], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0219, 0.0224, 0.0244, 0.0226, 0.0209, 0.0188, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 15:37:18,812 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129648.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:37:25,089 INFO [train.py:903] (2/4) Epoch 19, batch 6750, loss[loss=0.2494, simple_loss=0.3154, pruned_loss=0.09171, over 13104.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2913, pruned_loss=0.06704, over 3813654.61 frames. ], batch size: 136, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:20,255 INFO [train.py:903] (2/4) Epoch 19, batch 6800, loss[loss=0.2069, simple_loss=0.2791, pruned_loss=0.06738, over 19402.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2911, pruned_loss=0.06704, over 3812005.74 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:34,412 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.884e+02 6.226e+02 8.201e+02 1.689e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-02 15:39:04,784 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 15:39:05,741 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 15:39:08,362 INFO [train.py:903] (2/4) Epoch 20, batch 0, loss[loss=0.19, simple_loss=0.2779, pruned_loss=0.05102, over 19764.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2779, pruned_loss=0.05102, over 19764.00 frames. ], batch size: 48, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:39:08,362 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 15:39:19,742 INFO [train.py:937] (2/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2703, pruned_loss=0.03432, over 944034.00 frames. +2023-04-02 15:39:19,743 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 15:39:31,859 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 15:39:40,399 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8848, 1.6862, 1.6133, 1.9662, 1.6512, 1.6460, 1.5216, 1.8339], + device='cuda:2'), covar=tensor([0.1141, 0.1509, 0.1566, 0.0972, 0.1358, 0.0597, 0.1472, 0.0795], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0357, 0.0308, 0.0251, 0.0301, 0.0249, 0.0301, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 15:39:49,274 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9626, 1.3206, 1.7442, 1.7187, 4.4451, 1.2068, 2.4367, 4.8379], + device='cuda:2'), covar=tensor([0.0407, 0.3019, 0.3058, 0.1997, 0.0730, 0.2727, 0.1570, 0.0168], + device='cuda:2'), in_proj_covar=tensor([0.0396, 0.0358, 0.0377, 0.0340, 0.0366, 0.0348, 0.0371, 0.0390], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 15:39:50,418 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2216, 1.5152, 2.1367, 1.6155, 2.9960, 4.6438, 4.5883, 5.1418], + device='cuda:2'), covar=tensor([0.1668, 0.3694, 0.3011, 0.2226, 0.0597, 0.0201, 0.0165, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0317, 0.0348, 0.0263, 0.0239, 0.0182, 0.0214, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 15:40:12,664 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129775.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:40:20,209 INFO [train.py:903] (2/4) Epoch 20, batch 50, loss[loss=0.2129, simple_loss=0.2884, pruned_loss=0.06875, over 19478.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2933, pruned_loss=0.06756, over 856495.43 frames. ], batch size: 49, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:40:51,363 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:40:54,562 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 15:41:03,013 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.543e+02 6.891e+02 8.835e+02 1.770e+03, threshold=1.378e+03, percent-clipped=8.0 +2023-04-02 15:41:20,211 INFO [train.py:903] (2/4) Epoch 20, batch 100, loss[loss=0.189, simple_loss=0.2599, pruned_loss=0.05905, over 19765.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2914, pruned_loss=0.06697, over 1522040.26 frames. ], batch size: 45, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:41:31,363 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 15:41:56,953 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2754, 1.8756, 1.8742, 2.6172, 1.7717, 2.4838, 2.3750, 2.3158], + device='cuda:2'), covar=tensor([0.0760, 0.0903, 0.0970, 0.0806, 0.0963, 0.0686, 0.0878, 0.0629], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0243, 0.0226, 0.0210, 0.0188, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 15:42:21,224 INFO [train.py:903] (2/4) Epoch 20, batch 150, loss[loss=0.2253, simple_loss=0.3065, pruned_loss=0.07205, over 19675.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2907, pruned_loss=0.06692, over 2038586.74 frames. ], batch size: 60, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:42:30,205 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129890.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:43:03,460 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 4.799e+02 5.935e+02 7.467e+02 3.197e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 15:43:21,683 INFO [train.py:903] (2/4) Epoch 20, batch 200, loss[loss=0.2155, simple_loss=0.3081, pruned_loss=0.06138, over 19669.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2895, pruned_loss=0.06639, over 2451954.30 frames. ], batch size: 58, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:43:22,857 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 15:44:13,750 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:44:23,539 INFO [train.py:903] (2/4) Epoch 20, batch 250, loss[loss=0.1888, simple_loss=0.2743, pruned_loss=0.05168, over 19673.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2894, pruned_loss=0.06549, over 2760506.40 frames. ], batch size: 53, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:45:06,936 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.298e+02 6.354e+02 8.098e+02 1.543e+03, threshold=1.271e+03, percent-clipped=7.0 +2023-04-02 15:45:08,330 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:10,551 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:25,497 INFO [train.py:903] (2/4) Epoch 20, batch 300, loss[loss=0.2, simple_loss=0.2631, pruned_loss=0.06841, over 18585.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2897, pruned_loss=0.06556, over 3009414.56 frames. ], batch size: 41, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:46:25,934 INFO [train.py:903] (2/4) Epoch 20, batch 350, loss[loss=0.2297, simple_loss=0.3059, pruned_loss=0.07678, over 19673.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.06637, over 3178674.64 frames. ], batch size: 53, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:46:35,009 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:46:35,361 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:46:50,775 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.55 vs. limit=5.0 +2023-04-02 15:47:08,654 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.230e+02 6.397e+02 7.792e+02 1.393e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-02 15:47:26,635 INFO [train.py:903] (2/4) Epoch 20, batch 400, loss[loss=0.2002, simple_loss=0.2835, pruned_loss=0.05849, over 18748.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06605, over 3326702.01 frames. ], batch size: 74, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:47:32,430 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9502, 4.3786, 4.6694, 4.6775, 1.8242, 4.3822, 3.7352, 4.3655], + device='cuda:2'), covar=tensor([0.1610, 0.0783, 0.0550, 0.0619, 0.5478, 0.0795, 0.0694, 0.0992], + device='cuda:2'), in_proj_covar=tensor([0.0759, 0.0712, 0.0914, 0.0800, 0.0811, 0.0672, 0.0550, 0.0849], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 15:47:42,939 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130146.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:47:53,134 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:48:15,019 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130171.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:48:27,090 INFO [train.py:903] (2/4) Epoch 20, batch 450, loss[loss=0.297, simple_loss=0.3494, pruned_loss=0.1223, over 13450.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2904, pruned_loss=0.06664, over 3429029.82 frames. ], batch size: 136, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:49:03,628 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 15:49:04,558 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 15:49:09,165 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.003e+02 6.443e+02 8.043e+02 1.786e+03, threshold=1.289e+03, percent-clipped=5.0 +2023-04-02 15:49:27,184 INFO [train.py:903] (2/4) Epoch 20, batch 500, loss[loss=0.2058, simple_loss=0.2892, pruned_loss=0.06124, over 19852.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.29, pruned_loss=0.0663, over 3511731.42 frames. ], batch size: 52, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:49:37,969 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-02 15:50:10,041 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:50:15,784 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 15:50:27,995 INFO [train.py:903] (2/4) Epoch 20, batch 550, loss[loss=0.248, simple_loss=0.3173, pruned_loss=0.08936, over 18744.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2912, pruned_loss=0.06682, over 3591314.71 frames. ], batch size: 74, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:11,220 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.062e+02 6.327e+02 8.479e+02 2.088e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 15:51:28,464 INFO [train.py:903] (2/4) Epoch 20, batch 600, loss[loss=0.2294, simple_loss=0.3048, pruned_loss=0.07694, over 19679.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2895, pruned_loss=0.06592, over 3647340.21 frames. ], batch size: 60, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:44,894 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:06,340 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:08,735 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:15,661 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 15:52:16,096 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130370.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:30,046 INFO [train.py:903] (2/4) Epoch 20, batch 650, loss[loss=0.1979, simple_loss=0.2906, pruned_loss=0.05257, over 19665.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2908, pruned_loss=0.06692, over 3682475.15 frames. ], batch size: 58, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:52:56,086 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:53:13,836 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.222e+02 6.307e+02 8.250e+02 2.391e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 15:53:31,369 INFO [train.py:903] (2/4) Epoch 20, batch 700, loss[loss=0.1907, simple_loss=0.2749, pruned_loss=0.05321, over 19783.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2906, pruned_loss=0.06687, over 3721051.27 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:53:55,090 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:02,090 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:28,652 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:32,044 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:35,655 INFO [train.py:903] (2/4) Epoch 20, batch 750, loss[loss=0.1777, simple_loss=0.2593, pruned_loss=0.04804, over 19608.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2902, pruned_loss=0.06659, over 3747866.04 frames. ], batch size: 50, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:54:40,945 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4194, 1.5541, 1.8259, 1.6663, 2.6265, 2.3434, 2.7607, 1.1912], + device='cuda:2'), covar=tensor([0.2432, 0.4163, 0.2510, 0.1875, 0.1449, 0.2033, 0.1415, 0.4233], + device='cuda:2'), in_proj_covar=tensor([0.0522, 0.0628, 0.0692, 0.0472, 0.0612, 0.0522, 0.0654, 0.0537], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 15:55:19,213 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.790e+02 6.042e+02 7.311e+02 1.890e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 15:55:28,451 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:55:37,193 INFO [train.py:903] (2/4) Epoch 20, batch 800, loss[loss=0.1789, simple_loss=0.2673, pruned_loss=0.04528, over 19669.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2889, pruned_loss=0.06563, over 3753867.98 frames. ], batch size: 53, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:55:53,533 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:55:58,705 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:21,772 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130566.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:40,733 INFO [train.py:903] (2/4) Epoch 20, batch 850, loss[loss=0.1677, simple_loss=0.2458, pruned_loss=0.04482, over 19411.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2887, pruned_loss=0.06567, over 3771701.78 frames. ], batch size: 48, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:56:41,924 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:44,143 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8527, 1.4513, 1.5757, 1.4364, 3.4038, 1.0441, 2.4172, 3.8861], + device='cuda:2'), covar=tensor([0.0515, 0.2729, 0.2793, 0.1982, 0.0744, 0.2646, 0.1343, 0.0229], + device='cuda:2'), in_proj_covar=tensor([0.0397, 0.0355, 0.0375, 0.0337, 0.0366, 0.0344, 0.0370, 0.0390], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 15:56:57,623 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0823, 1.3259, 1.7134, 0.9759, 2.3536, 2.9693, 2.7142, 3.1416], + device='cuda:2'), covar=tensor([0.1631, 0.3613, 0.3199, 0.2582, 0.0622, 0.0264, 0.0268, 0.0327], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0315, 0.0346, 0.0261, 0.0237, 0.0182, 0.0213, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 15:57:25,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.920e+02 5.760e+02 7.852e+02 1.760e+03, threshold=1.152e+03, percent-clipped=6.0 +2023-04-02 15:57:33,236 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 15:57:40,813 INFO [train.py:903] (2/4) Epoch 20, batch 900, loss[loss=0.2142, simple_loss=0.2983, pruned_loss=0.065, over 19365.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.289, pruned_loss=0.06601, over 3794132.71 frames. ], batch size: 70, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:44,445 INFO [train.py:903] (2/4) Epoch 20, batch 950, loss[loss=0.1943, simple_loss=0.2778, pruned_loss=0.05546, over 19113.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2893, pruned_loss=0.06571, over 3796809.61 frames. ], batch size: 69, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:47,641 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 15:59:28,722 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.928e+02 5.917e+02 7.294e+02 1.421e+03, threshold=1.183e+03, percent-clipped=3.0 +2023-04-02 15:59:46,656 INFO [train.py:903] (2/4) Epoch 20, batch 1000, loss[loss=0.2767, simple_loss=0.3504, pruned_loss=0.1016, over 18428.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2893, pruned_loss=0.06544, over 3820857.04 frames. ], batch size: 84, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:59:48,280 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:59:50,509 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:03,661 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:18,683 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:20,899 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:38,553 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 16:00:48,478 INFO [train.py:903] (2/4) Epoch 20, batch 1050, loss[loss=0.2228, simple_loss=0.3051, pruned_loss=0.0702, over 19655.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2902, pruned_loss=0.06631, over 3822656.60 frames. ], batch size: 61, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:01:00,630 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0893, 4.4626, 4.8066, 4.8421, 1.8032, 4.5503, 3.9625, 4.5218], + device='cuda:2'), covar=tensor([0.1439, 0.0800, 0.0520, 0.0560, 0.5633, 0.0766, 0.0595, 0.1015], + device='cuda:2'), in_proj_covar=tensor([0.0759, 0.0717, 0.0918, 0.0801, 0.0817, 0.0677, 0.0555, 0.0857], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 16:01:02,851 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:10,718 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:20,581 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 16:01:33,079 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.731e+02 5.562e+02 6.742e+02 8.268e+02 2.102e+03, threshold=1.348e+03, percent-clipped=2.0 +2023-04-02 16:01:49,835 INFO [train.py:903] (2/4) Epoch 20, batch 1100, loss[loss=0.1949, simple_loss=0.2602, pruned_loss=0.06481, over 19753.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2914, pruned_loss=0.06697, over 3827422.37 frames. ], batch size: 46, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:02:28,208 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:02:42,064 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7473, 2.7327, 2.2179, 2.5836, 2.6659, 2.0688, 1.9605, 2.4659], + device='cuda:2'), covar=tensor([0.0951, 0.1518, 0.1526, 0.1175, 0.1427, 0.0707, 0.1596, 0.0750], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0360, 0.0312, 0.0252, 0.0302, 0.0251, 0.0305, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:02:52,362 INFO [train.py:903] (2/4) Epoch 20, batch 1150, loss[loss=0.2348, simple_loss=0.3149, pruned_loss=0.07737, over 18881.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2908, pruned_loss=0.06649, over 3830231.19 frames. ], batch size: 74, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:02:53,762 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1050, 1.9089, 1.7502, 2.1641, 1.8585, 1.7581, 1.7485, 1.9436], + device='cuda:2'), covar=tensor([0.0979, 0.1484, 0.1420, 0.0997, 0.1317, 0.0546, 0.1334, 0.0724], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0359, 0.0311, 0.0252, 0.0301, 0.0251, 0.0304, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:03:26,709 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:27,645 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:33,826 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130915.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:39,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 5.063e+02 6.056e+02 7.993e+02 1.743e+03, threshold=1.211e+03, percent-clipped=5.0 +2023-04-02 16:03:50,406 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:55,679 INFO [train.py:903] (2/4) Epoch 20, batch 1200, loss[loss=0.2448, simple_loss=0.3143, pruned_loss=0.08761, over 19650.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2917, pruned_loss=0.06668, over 3832372.59 frames. ], batch size: 55, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:04:23,994 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 16:04:56,115 INFO [train.py:903] (2/4) Epoch 20, batch 1250, loss[loss=0.1777, simple_loss=0.256, pruned_loss=0.04974, over 19796.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06695, over 3841292.40 frames. ], batch size: 47, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:05:07,179 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 16:05:42,760 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.121e+02 6.297e+02 7.673e+02 2.016e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 16:05:51,127 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:05:58,787 INFO [train.py:903] (2/4) Epoch 20, batch 1300, loss[loss=0.2087, simple_loss=0.2967, pruned_loss=0.06041, over 19689.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2908, pruned_loss=0.06601, over 3849503.24 frames. ], batch size: 59, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:06:12,047 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:06:31,638 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1200, 5.5925, 3.0327, 4.8909, 1.3285, 5.6589, 5.5077, 5.6310], + device='cuda:2'), covar=tensor([0.0367, 0.0760, 0.1855, 0.0680, 0.3666, 0.0535, 0.0699, 0.1055], + device='cuda:2'), in_proj_covar=tensor([0.0491, 0.0395, 0.0484, 0.0343, 0.0400, 0.0421, 0.0415, 0.0447], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:06:51,910 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0916, 5.1779, 5.9544, 5.9562, 2.1085, 5.5962, 4.7043, 5.5540], + device='cuda:2'), covar=tensor([0.1614, 0.0744, 0.0519, 0.0558, 0.5890, 0.0735, 0.0610, 0.1135], + device='cuda:2'), in_proj_covar=tensor([0.0758, 0.0716, 0.0916, 0.0798, 0.0814, 0.0675, 0.0551, 0.0855], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 16:06:59,471 INFO [train.py:903] (2/4) Epoch 20, batch 1350, loss[loss=0.2363, simple_loss=0.3134, pruned_loss=0.07958, over 19525.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2909, pruned_loss=0.06598, over 3858211.26 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:07:43,054 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131117.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:07:44,787 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.090e+02 6.517e+02 8.267e+02 2.193e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-02 16:08:02,289 INFO [train.py:903] (2/4) Epoch 20, batch 1400, loss[loss=0.1874, simple_loss=0.2721, pruned_loss=0.05139, over 19837.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2909, pruned_loss=0.066, over 3853563.60 frames. ], batch size: 52, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:08:15,152 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:42,762 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:50,714 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:51,743 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4422, 1.3738, 1.4117, 1.8843, 1.4413, 1.7367, 1.7000, 1.5092], + device='cuda:2'), covar=tensor([0.0824, 0.0895, 0.1002, 0.0613, 0.0763, 0.0710, 0.0779, 0.0715], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0242, 0.0226, 0.0209, 0.0188, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 16:09:03,123 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4221, 1.4307, 1.6505, 1.6158, 2.2564, 2.1713, 2.2220, 0.9414], + device='cuda:2'), covar=tensor([0.2466, 0.4325, 0.2683, 0.1941, 0.1632, 0.2106, 0.1532, 0.4564], + device='cuda:2'), in_proj_covar=tensor([0.0526, 0.0630, 0.0695, 0.0475, 0.0614, 0.0526, 0.0658, 0.0541], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:09:03,845 INFO [train.py:903] (2/4) Epoch 20, batch 1450, loss[loss=0.1969, simple_loss=0.2744, pruned_loss=0.05969, over 19374.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2903, pruned_loss=0.06592, over 3846484.05 frames. ], batch size: 48, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:09:06,078 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 16:09:14,517 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:16,710 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:21,594 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131196.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:50,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.028e+02 6.181e+02 7.641e+02 1.699e+03, threshold=1.236e+03, percent-clipped=6.0 +2023-04-02 16:10:06,714 INFO [train.py:903] (2/4) Epoch 20, batch 1500, loss[loss=0.1943, simple_loss=0.2616, pruned_loss=0.06349, over 19780.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2899, pruned_loss=0.06597, over 3850342.03 frames. ], batch size: 47, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:10:20,912 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2337, 1.3280, 1.2699, 1.0312, 1.1225, 1.0892, 0.0444, 0.3197], + device='cuda:2'), covar=tensor([0.0712, 0.0663, 0.0446, 0.0567, 0.1310, 0.0677, 0.1244, 0.1112], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0349, 0.0352, 0.0377, 0.0452, 0.0383, 0.0332, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:11:06,985 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:07,716 INFO [train.py:903] (2/4) Epoch 20, batch 1550, loss[loss=0.2261, simple_loss=0.3036, pruned_loss=0.07429, over 19590.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2904, pruned_loss=0.06653, over 3833474.73 frames. ], batch size: 61, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:11:29,287 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:38,469 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:53,760 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 5.087e+02 6.243e+02 7.473e+02 1.350e+03, threshold=1.249e+03, percent-clipped=1.0 +2023-04-02 16:11:58,760 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:12:10,218 INFO [train.py:903] (2/4) Epoch 20, batch 1600, loss[loss=0.2267, simple_loss=0.3089, pruned_loss=0.07222, over 19792.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2902, pruned_loss=0.06637, over 3827391.75 frames. ], batch size: 56, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:12:36,101 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 16:13:12,817 INFO [train.py:903] (2/4) Epoch 20, batch 1650, loss[loss=0.1696, simple_loss=0.2451, pruned_loss=0.04707, over 19726.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2898, pruned_loss=0.0659, over 3831780.20 frames. ], batch size: 45, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:13:59,216 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.697e+02 5.218e+02 6.304e+02 8.075e+02 1.501e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 16:14:08,638 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:15,058 INFO [train.py:903] (2/4) Epoch 20, batch 1700, loss[loss=0.2186, simple_loss=0.3038, pruned_loss=0.06676, over 19613.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2886, pruned_loss=0.06553, over 3835540.87 frames. ], batch size: 57, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:14:17,672 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:51,575 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7936, 1.2768, 1.7458, 1.6529, 4.0949, 1.1188, 2.6728, 4.4209], + device='cuda:2'), covar=tensor([0.0509, 0.3673, 0.3125, 0.2229, 0.1016, 0.3070, 0.1549, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0396, 0.0356, 0.0376, 0.0338, 0.0365, 0.0346, 0.0370, 0.0391], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:14:55,859 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 16:15:13,131 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5486, 2.2953, 1.6707, 1.5785, 2.0816, 1.3674, 1.4565, 1.8842], + device='cuda:2'), covar=tensor([0.1148, 0.0819, 0.1054, 0.0880, 0.0550, 0.1249, 0.0799, 0.0550], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0313, 0.0331, 0.0261, 0.0245, 0.0335, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:15:16,174 INFO [train.py:903] (2/4) Epoch 20, batch 1750, loss[loss=0.2051, simple_loss=0.2868, pruned_loss=0.06163, over 19535.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2896, pruned_loss=0.0658, over 3830602.78 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:15:52,723 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131510.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:16:01,413 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 16:16:02,700 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.359e+02 6.412e+02 7.691e+02 1.507e+03, threshold=1.282e+03, percent-clipped=3.0 +2023-04-02 16:16:18,695 INFO [train.py:903] (2/4) Epoch 20, batch 1800, loss[loss=0.2324, simple_loss=0.3145, pruned_loss=0.07517, over 19664.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.29, pruned_loss=0.06593, over 3803664.79 frames. ], batch size: 55, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:16:24,468 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131536.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:17:16,046 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 16:17:22,062 INFO [train.py:903] (2/4) Epoch 20, batch 1850, loss[loss=0.1861, simple_loss=0.2663, pruned_loss=0.05293, over 19616.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2889, pruned_loss=0.06577, over 3794998.54 frames. ], batch size: 50, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:17:54,267 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 16:18:09,985 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.826e+02 6.634e+02 9.045e+02 2.049e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-02 16:18:25,161 INFO [train.py:903] (2/4) Epoch 20, batch 1900, loss[loss=0.2379, simple_loss=0.3188, pruned_loss=0.07853, over 19547.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.289, pruned_loss=0.06586, over 3804586.88 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:18:40,082 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 16:18:45,474 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 16:18:47,946 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:19:10,739 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 16:19:19,659 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 16:19:25,447 INFO [train.py:903] (2/4) Epoch 20, batch 1950, loss[loss=0.217, simple_loss=0.3025, pruned_loss=0.0658, over 19669.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2898, pruned_loss=0.06648, over 3806690.19 frames. ], batch size: 55, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:19:57,522 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3732, 2.2077, 2.0250, 1.8881, 1.7856, 1.8885, 0.5915, 1.2263], + device='cuda:2'), covar=tensor([0.0591, 0.0624, 0.0484, 0.0816, 0.1111, 0.0976, 0.1331, 0.1113], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0349, 0.0353, 0.0378, 0.0452, 0.0384, 0.0332, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:20:13,304 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.129e+02 6.435e+02 8.344e+02 2.370e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 16:20:28,634 INFO [train.py:903] (2/4) Epoch 20, batch 2000, loss[loss=0.2057, simple_loss=0.2939, pruned_loss=0.05875, over 19665.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2893, pruned_loss=0.06576, over 3822877.55 frames. ], batch size: 55, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:00,300 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2852, 2.2999, 2.5531, 3.1373, 2.3583, 2.9541, 2.5700, 2.3069], + device='cuda:2'), covar=tensor([0.4141, 0.4109, 0.1741, 0.2407, 0.4371, 0.2057, 0.4497, 0.3251], + device='cuda:2'), in_proj_covar=tensor([0.0876, 0.0939, 0.0700, 0.0920, 0.0858, 0.0793, 0.0825, 0.0770], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 16:21:18,592 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:25,582 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 16:21:28,785 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131778.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:33,329 INFO [train.py:903] (2/4) Epoch 20, batch 2050, loss[loss=0.2008, simple_loss=0.2641, pruned_loss=0.06876, over 19747.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2881, pruned_loss=0.06509, over 3823600.37 frames. ], batch size: 46, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:45,609 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 16:21:46,765 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 16:22:07,692 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 16:22:22,558 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 4.827e+02 6.005e+02 7.777e+02 1.829e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 16:22:35,844 INFO [train.py:903] (2/4) Epoch 20, batch 2100, loss[loss=0.1938, simple_loss=0.2712, pruned_loss=0.05824, over 19424.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06543, over 3828759.97 frames. ], batch size: 48, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:02,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 16:23:03,494 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131854.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:26,801 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 16:23:29,508 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:37,620 INFO [train.py:903] (2/4) Epoch 20, batch 2150, loss[loss=0.2201, simple_loss=0.3039, pruned_loss=0.0681, over 19488.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2883, pruned_loss=0.06478, over 3835040.35 frames. ], batch size: 64, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:42,708 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:46,185 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2514, 2.3023, 2.5794, 3.1236, 2.2485, 2.8759, 2.6203, 2.3024], + device='cuda:2'), covar=tensor([0.4309, 0.4246, 0.1804, 0.2523, 0.4608, 0.2180, 0.4597, 0.3386], + device='cuda:2'), in_proj_covar=tensor([0.0877, 0.0940, 0.0701, 0.0922, 0.0860, 0.0793, 0.0827, 0.0769], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 16:23:51,999 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:09,639 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:26,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.118e+02 6.039e+02 8.265e+02 1.505e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 16:24:39,694 INFO [train.py:903] (2/4) Epoch 20, batch 2200, loss[loss=0.2508, simple_loss=0.3233, pruned_loss=0.08918, over 19426.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2878, pruned_loss=0.06449, over 3839142.09 frames. ], batch size: 70, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:24:40,083 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:26,166 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:42,867 INFO [train.py:903] (2/4) Epoch 20, batch 2250, loss[loss=0.2179, simple_loss=0.3046, pruned_loss=0.06565, over 18771.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2889, pruned_loss=0.06517, over 3815140.72 frames. ], batch size: 74, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:26:31,961 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.368e+02 4.978e+02 6.272e+02 7.965e+02 1.499e+03, threshold=1.254e+03, percent-clipped=2.0 +2023-04-02 16:26:44,539 INFO [train.py:903] (2/4) Epoch 20, batch 2300, loss[loss=0.1651, simple_loss=0.2375, pruned_loss=0.04633, over 19755.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2899, pruned_loss=0.06581, over 3818913.93 frames. ], batch size: 46, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:26:58,044 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 16:27:05,413 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132049.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:35,713 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:47,305 INFO [train.py:903] (2/4) Epoch 20, batch 2350, loss[loss=0.185, simple_loss=0.2579, pruned_loss=0.05605, over 19369.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2902, pruned_loss=0.06581, over 3808682.57 frames. ], batch size: 47, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:27:52,439 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 16:28:26,819 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 16:28:30,724 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8876, 1.9587, 2.2341, 2.5577, 1.8407, 2.4486, 2.3057, 2.0511], + device='cuda:2'), covar=tensor([0.4375, 0.4054, 0.1933, 0.2352, 0.4226, 0.2150, 0.4798, 0.3474], + device='cuda:2'), in_proj_covar=tensor([0.0885, 0.0946, 0.0707, 0.0930, 0.0865, 0.0798, 0.0836, 0.0774], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 16:28:35,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 4.795e+02 5.776e+02 7.778e+02 1.972e+03, threshold=1.155e+03, percent-clipped=8.0 +2023-04-02 16:28:42,754 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 16:28:49,774 INFO [train.py:903] (2/4) Epoch 20, batch 2400, loss[loss=0.1735, simple_loss=0.26, pruned_loss=0.04348, over 19618.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2886, pruned_loss=0.06487, over 3798603.44 frames. ], batch size: 50, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:29:03,492 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:11,472 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132149.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:32,957 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:41,685 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:51,979 INFO [train.py:903] (2/4) Epoch 20, batch 2450, loss[loss=0.1813, simple_loss=0.2637, pruned_loss=0.04944, over 19598.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2879, pruned_loss=0.06453, over 3810036.76 frames. ], batch size: 50, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:30:38,921 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:41,021 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.995e+02 6.263e+02 8.063e+02 1.363e+03, threshold=1.253e+03, percent-clipped=5.0 +2023-04-02 16:30:47,035 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132225.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:54,290 INFO [train.py:903] (2/4) Epoch 20, batch 2500, loss[loss=0.1708, simple_loss=0.2469, pruned_loss=0.0474, over 19388.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2874, pruned_loss=0.06414, over 3817420.18 frames. ], batch size: 47, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:31:15,947 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:31:21,824 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-04-02 16:31:56,369 INFO [train.py:903] (2/4) Epoch 20, batch 2550, loss[loss=0.2083, simple_loss=0.2912, pruned_loss=0.06269, over 18171.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2873, pruned_loss=0.06399, over 3822409.05 frames. ], batch size: 83, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:32:34,723 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3747, 3.0508, 2.3196, 2.3499, 2.3080, 2.6631, 0.9906, 2.1267], + device='cuda:2'), covar=tensor([0.0579, 0.0530, 0.0706, 0.1052, 0.1003, 0.1007, 0.1449, 0.1119], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0351, 0.0355, 0.0382, 0.0455, 0.0385, 0.0334, 0.0336], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:32:45,612 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.513e+02 4.985e+02 5.787e+02 8.066e+02 1.995e+03, threshold=1.157e+03, percent-clipped=4.0 +2023-04-02 16:32:52,684 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 16:32:58,515 INFO [train.py:903] (2/4) Epoch 20, batch 2600, loss[loss=0.2825, simple_loss=0.3407, pruned_loss=0.1121, over 14380.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06497, over 3825254.54 frames. ], batch size: 136, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:33:02,386 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:01,406 INFO [train.py:903] (2/4) Epoch 20, batch 2650, loss[loss=0.2337, simple_loss=0.3204, pruned_loss=0.07345, over 19518.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2894, pruned_loss=0.0653, over 3831602.43 frames. ], batch size: 56, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:34:15,343 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:23,146 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 16:34:44,232 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132416.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:50,449 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.865e+02 6.149e+02 7.368e+02 1.585e+03, threshold=1.230e+03, percent-clipped=4.0 +2023-04-02 16:34:53,632 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-02 16:35:04,060 INFO [train.py:903] (2/4) Epoch 20, batch 2700, loss[loss=0.2182, simple_loss=0.3032, pruned_loss=0.06662, over 19606.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2895, pruned_loss=0.06517, over 3831406.17 frames. ], batch size: 57, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:35:17,120 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6507, 4.2605, 2.8208, 3.7192, 0.8708, 4.1857, 4.0176, 4.1483], + device='cuda:2'), covar=tensor([0.0577, 0.0893, 0.1761, 0.0882, 0.3960, 0.0633, 0.0869, 0.0969], + device='cuda:2'), in_proj_covar=tensor([0.0495, 0.0400, 0.0486, 0.0345, 0.0399, 0.0423, 0.0418, 0.0449], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:36:06,703 INFO [train.py:903] (2/4) Epoch 20, batch 2750, loss[loss=0.2076, simple_loss=0.297, pruned_loss=0.05908, over 19392.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2908, pruned_loss=0.06591, over 3827687.82 frames. ], batch size: 70, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:36:38,093 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5882, 1.6310, 1.8191, 1.6982, 2.3406, 2.0801, 2.3101, 1.4040], + device='cuda:2'), covar=tensor([0.1766, 0.3164, 0.2008, 0.1469, 0.1166, 0.1670, 0.1140, 0.3699], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0633, 0.0698, 0.0478, 0.0616, 0.0527, 0.0660, 0.0542], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:36:39,134 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132508.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:36:55,660 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.186e+02 6.180e+02 7.968e+02 1.505e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-02 16:37:07,878 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:37:08,648 INFO [train.py:903] (2/4) Epoch 20, batch 2800, loss[loss=0.1942, simple_loss=0.275, pruned_loss=0.05674, over 19742.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2898, pruned_loss=0.06547, over 3816844.99 frames. ], batch size: 51, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:37:24,822 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2982, 1.2595, 1.6993, 1.2437, 2.6263, 3.5440, 3.2033, 3.7315], + device='cuda:2'), covar=tensor([0.1565, 0.3814, 0.3362, 0.2437, 0.0574, 0.0198, 0.0217, 0.0249], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0320, 0.0349, 0.0263, 0.0239, 0.0184, 0.0216, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 16:38:13,009 INFO [train.py:903] (2/4) Epoch 20, batch 2850, loss[loss=0.19, simple_loss=0.2799, pruned_loss=0.05012, over 19590.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2888, pruned_loss=0.06513, over 3817137.03 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:38:22,430 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:38:52,930 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:39:01,716 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 4.861e+02 5.815e+02 7.642e+02 3.357e+03, threshold=1.163e+03, percent-clipped=7.0 +2023-04-02 16:39:14,636 INFO [train.py:903] (2/4) Epoch 20, batch 2900, loss[loss=0.1754, simple_loss=0.2564, pruned_loss=0.04715, over 19447.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2884, pruned_loss=0.0651, over 3825458.54 frames. ], batch size: 49, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:39:14,683 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 16:40:13,467 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132678.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:40:18,628 INFO [train.py:903] (2/4) Epoch 20, batch 2950, loss[loss=0.1947, simple_loss=0.2877, pruned_loss=0.05088, over 19539.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2895, pruned_loss=0.06557, over 3815376.42 frames. ], batch size: 56, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:09,139 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.627e+02 5.679e+02 7.371e+02 2.153e+03, threshold=1.136e+03, percent-clipped=3.0 +2023-04-02 16:41:18,318 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-02 16:41:20,828 INFO [train.py:903] (2/4) Epoch 20, batch 3000, loss[loss=0.1827, simple_loss=0.2761, pruned_loss=0.04464, over 19629.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2899, pruned_loss=0.06567, over 3800900.82 frames. ], batch size: 57, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:20,829 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 16:41:34,272 INFO [train.py:937] (2/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2697, pruned_loss=0.03462, over 944034.00 frames. +2023-04-02 16:41:34,272 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 16:41:37,065 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5161, 2.5373, 2.0952, 2.6643, 2.4659, 2.2703, 2.1861, 2.6006], + device='cuda:2'), covar=tensor([0.1006, 0.1524, 0.1497, 0.1066, 0.1357, 0.0503, 0.1293, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0353, 0.0307, 0.0248, 0.0297, 0.0246, 0.0302, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 16:41:40,248 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 16:41:47,616 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.10 vs. limit=5.0 +2023-04-02 16:42:12,876 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:13,007 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:35,229 INFO [train.py:903] (2/4) Epoch 20, batch 3050, loss[loss=0.1869, simple_loss=0.2694, pruned_loss=0.05215, over 19679.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2915, pruned_loss=0.06646, over 3803066.85 frames. ], batch size: 53, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:42:41,442 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:43,608 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132789.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:58,323 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 16:43:13,165 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:24,235 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 4.785e+02 6.187e+02 7.720e+02 1.879e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-02 16:43:37,018 INFO [train.py:903] (2/4) Epoch 20, batch 3100, loss[loss=0.2469, simple_loss=0.3267, pruned_loss=0.08353, over 19677.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2921, pruned_loss=0.06734, over 3789834.40 frames. ], batch size: 58, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:44:27,890 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8409, 1.6717, 1.7429, 2.2196, 1.8823, 2.0658, 1.9876, 1.8777], + device='cuda:2'), covar=tensor([0.0710, 0.0789, 0.0822, 0.0649, 0.0813, 0.0659, 0.0880, 0.0606], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0243, 0.0229, 0.0211, 0.0187, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 16:44:40,228 INFO [train.py:903] (2/4) Epoch 20, batch 3150, loss[loss=0.2562, simple_loss=0.3275, pruned_loss=0.09246, over 19524.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.292, pruned_loss=0.06712, over 3796037.01 frames. ], batch size: 56, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:44:58,736 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8579, 0.8976, 1.2156, 0.5844, 1.4779, 1.7164, 1.5366, 1.7962], + device='cuda:2'), covar=tensor([0.1267, 0.2999, 0.2580, 0.2503, 0.0912, 0.0459, 0.0344, 0.0413], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0319, 0.0349, 0.0264, 0.0240, 0.0184, 0.0216, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 16:45:07,841 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 16:45:29,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.025e+02 5.951e+02 7.011e+02 1.371e+03, threshold=1.190e+03, percent-clipped=2.0 +2023-04-02 16:45:42,517 INFO [train.py:903] (2/4) Epoch 20, batch 3200, loss[loss=0.2133, simple_loss=0.2749, pruned_loss=0.07581, over 16782.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2906, pruned_loss=0.06659, over 3790089.30 frames. ], batch size: 37, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:46:46,022 INFO [train.py:903] (2/4) Epoch 20, batch 3250, loss[loss=0.2888, simple_loss=0.348, pruned_loss=0.1148, over 19648.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2908, pruned_loss=0.06656, over 3798954.94 frames. ], batch size: 55, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:47:37,653 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.383e+02 4.867e+02 6.333e+02 8.818e+02 1.782e+03, threshold=1.267e+03, percent-clipped=7.0 +2023-04-02 16:47:37,841 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:47:49,137 INFO [train.py:903] (2/4) Epoch 20, batch 3300, loss[loss=0.2085, simple_loss=0.2866, pruned_loss=0.06522, over 19764.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2904, pruned_loss=0.06627, over 3816758.90 frames. ], batch size: 54, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:47:57,183 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 16:48:03,600 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7550, 1.8240, 2.1086, 2.2892, 1.6771, 2.1718, 2.1434, 1.9009], + device='cuda:2'), covar=tensor([0.4276, 0.3881, 0.1987, 0.2281, 0.3996, 0.2229, 0.5104, 0.3544], + device='cuda:2'), in_proj_covar=tensor([0.0882, 0.0943, 0.0704, 0.0923, 0.0862, 0.0795, 0.0834, 0.0771], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 16:48:25,890 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1326, 1.3459, 1.8181, 1.2154, 2.6621, 3.7865, 3.4380, 3.9831], + device='cuda:2'), covar=tensor([0.1748, 0.3788, 0.3241, 0.2447, 0.0627, 0.0197, 0.0204, 0.0238], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0319, 0.0348, 0.0263, 0.0240, 0.0184, 0.0215, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 16:48:49,105 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3423, 2.1681, 1.9641, 1.8428, 1.6854, 1.8232, 0.7846, 1.3071], + device='cuda:2'), covar=tensor([0.0595, 0.0596, 0.0517, 0.0821, 0.1191, 0.0954, 0.1261, 0.1008], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0352, 0.0357, 0.0384, 0.0457, 0.0386, 0.0333, 0.0338], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:48:54,793 INFO [train.py:903] (2/4) Epoch 20, batch 3350, loss[loss=0.1543, simple_loss=0.2388, pruned_loss=0.0349, over 19403.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2893, pruned_loss=0.06522, over 3814288.75 frames. ], batch size: 47, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:49:27,437 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:49:45,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.956e+02 6.093e+02 7.175e+02 1.819e+03, threshold=1.219e+03, percent-clipped=1.0 +2023-04-02 16:49:57,537 INFO [train.py:903] (2/4) Epoch 20, batch 3400, loss[loss=0.2023, simple_loss=0.2866, pruned_loss=0.05896, over 19625.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2896, pruned_loss=0.06548, over 3817969.83 frames. ], batch size: 57, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:50:06,211 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133137.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:51:02,171 INFO [train.py:903] (2/4) Epoch 20, batch 3450, loss[loss=0.2191, simple_loss=0.2933, pruned_loss=0.07245, over 19540.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.291, pruned_loss=0.06633, over 3815803.56 frames. ], batch size: 54, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:51:07,351 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8515, 2.7359, 2.1284, 2.0641, 1.5883, 2.1054, 0.9947, 1.9042], + device='cuda:2'), covar=tensor([0.0917, 0.0690, 0.0687, 0.1357, 0.1540, 0.1579, 0.1513, 0.1218], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0349, 0.0354, 0.0381, 0.0453, 0.0382, 0.0331, 0.0336], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 16:51:08,048 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 16:51:52,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 4.746e+02 5.634e+02 7.504e+02 1.582e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 16:51:54,107 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133223.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:52:04,222 INFO [train.py:903] (2/4) Epoch 20, batch 3500, loss[loss=0.2262, simple_loss=0.3078, pruned_loss=0.07228, over 19457.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2922, pruned_loss=0.06719, over 3810544.49 frames. ], batch size: 64, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:08,049 INFO [train.py:903] (2/4) Epoch 20, batch 3550, loss[loss=0.2079, simple_loss=0.2963, pruned_loss=0.05972, over 19672.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2918, pruned_loss=0.06673, over 3818737.96 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:58,720 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 5.017e+02 5.933e+02 7.980e+02 2.795e+03, threshold=1.187e+03, percent-clipped=11.0 +2023-04-02 16:54:10,372 INFO [train.py:903] (2/4) Epoch 20, batch 3600, loss[loss=0.1931, simple_loss=0.2718, pruned_loss=0.05725, over 19416.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2916, pruned_loss=0.06696, over 3816111.62 frames. ], batch size: 48, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:55:15,911 INFO [train.py:903] (2/4) Epoch 20, batch 3650, loss[loss=0.2471, simple_loss=0.3337, pruned_loss=0.08029, over 19656.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2912, pruned_loss=0.0665, over 3815870.43 frames. ], batch size: 58, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:55:28,087 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133392.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 16:55:29,449 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:55:30,536 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:56:02,491 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133418.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:56:06,669 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.934e+02 5.113e+02 6.456e+02 7.889e+02 1.610e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 16:56:18,445 INFO [train.py:903] (2/4) Epoch 20, batch 3700, loss[loss=0.2093, simple_loss=0.3001, pruned_loss=0.05931, over 19665.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2915, pruned_loss=0.06624, over 3820484.02 frames. ], batch size: 60, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:03,745 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5483, 1.5440, 1.4945, 2.0278, 1.5160, 1.7385, 1.7466, 1.6691], + device='cuda:2'), covar=tensor([0.0855, 0.0882, 0.0988, 0.0727, 0.0908, 0.0824, 0.0898, 0.0698], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0221, 0.0224, 0.0242, 0.0227, 0.0210, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 16:57:19,866 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:23,009 INFO [train.py:903] (2/4) Epoch 20, batch 3750, loss[loss=0.1817, simple_loss=0.27, pruned_loss=0.04668, over 19801.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2911, pruned_loss=0.06606, over 3821905.65 frames. ], batch size: 56, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:34,891 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:50,939 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:58:13,502 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.077e+02 5.935e+02 8.206e+02 1.595e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 16:58:24,816 INFO [train.py:903] (2/4) Epoch 20, batch 3800, loss[loss=0.2145, simple_loss=0.2811, pruned_loss=0.07394, over 19765.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2908, pruned_loss=0.06615, over 3833316.48 frames. ], batch size: 48, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:58:44,327 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.58 vs. limit=5.0 +2023-04-02 16:58:58,165 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 16:59:30,213 INFO [train.py:903] (2/4) Epoch 20, batch 3850, loss[loss=0.1601, simple_loss=0.2404, pruned_loss=0.03987, over 19746.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2905, pruned_loss=0.0664, over 3831550.16 frames. ], batch size: 47, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 17:00:20,964 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.079e+02 6.219e+02 7.261e+02 1.808e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-02 17:00:32,705 INFO [train.py:903] (2/4) Epoch 20, batch 3900, loss[loss=0.204, simple_loss=0.2903, pruned_loss=0.05884, over 19659.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.06636, over 3821758.63 frames. ], batch size: 58, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 17:00:51,713 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1586, 2.3671, 1.6961, 2.1354, 2.4081, 1.6332, 1.6364, 2.1093], + device='cuda:2'), covar=tensor([0.1236, 0.1658, 0.1922, 0.1310, 0.1418, 0.0990, 0.1957, 0.0958], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0354, 0.0306, 0.0248, 0.0297, 0.0247, 0.0301, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:01:21,163 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133670.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:01:32,078 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5549, 2.5642, 2.1685, 2.6952, 2.3417, 2.1366, 2.2201, 2.5845], + device='cuda:2'), covar=tensor([0.0975, 0.1461, 0.1359, 0.0951, 0.1340, 0.0507, 0.1228, 0.0609], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0354, 0.0306, 0.0248, 0.0296, 0.0247, 0.0301, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:01:37,406 INFO [train.py:903] (2/4) Epoch 20, batch 3950, loss[loss=0.177, simple_loss=0.2577, pruned_loss=0.04814, over 19410.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2914, pruned_loss=0.06657, over 3823090.54 frames. ], batch size: 48, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:01:42,246 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 17:01:54,654 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 17:02:26,833 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 5.167e+02 6.244e+02 7.613e+02 1.189e+03, threshold=1.249e+03, percent-clipped=0.0 +2023-04-02 17:02:38,785 INFO [train.py:903] (2/4) Epoch 20, batch 4000, loss[loss=0.2409, simple_loss=0.3136, pruned_loss=0.08414, over 19448.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2902, pruned_loss=0.06576, over 3826869.79 frames. ], batch size: 64, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:02:43,768 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133736.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:02:47,218 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:03:26,456 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 17:03:41,991 INFO [train.py:903] (2/4) Epoch 20, batch 4050, loss[loss=0.2037, simple_loss=0.2795, pruned_loss=0.06395, over 19663.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2901, pruned_loss=0.06587, over 3820928.94 frames. ], batch size: 53, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:30,933 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.971e+02 6.413e+02 8.150e+02 1.897e+03, threshold=1.283e+03, percent-clipped=7.0 +2023-04-02 17:04:42,293 INFO [train.py:903] (2/4) Epoch 20, batch 4100, loss[loss=0.1852, simple_loss=0.2631, pruned_loss=0.0536, over 19391.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2902, pruned_loss=0.06612, over 3815181.70 frames. ], batch size: 48, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:47,143 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:05:06,758 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133851.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:05:08,977 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:05:09,074 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7813, 1.7715, 1.9524, 1.8725, 2.5767, 2.3054, 2.6808, 1.6192], + device='cuda:2'), covar=tensor([0.1846, 0.3240, 0.2100, 0.1537, 0.1244, 0.1680, 0.1132, 0.3794], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0635, 0.0700, 0.0478, 0.0616, 0.0526, 0.0661, 0.0543], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:05:18,886 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 17:05:19,374 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0162, 2.0842, 2.3299, 2.7444, 2.0049, 2.5737, 2.3747, 2.0889], + device='cuda:2'), covar=tensor([0.4179, 0.3822, 0.1822, 0.2292, 0.4065, 0.1984, 0.4527, 0.3161], + device='cuda:2'), in_proj_covar=tensor([0.0886, 0.0948, 0.0707, 0.0931, 0.0867, 0.0801, 0.0835, 0.0772], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 17:05:45,975 INFO [train.py:903] (2/4) Epoch 20, batch 4150, loss[loss=0.2159, simple_loss=0.2916, pruned_loss=0.07008, over 19527.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06669, over 3792750.79 frames. ], batch size: 54, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:35,678 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.109e+02 4.757e+02 5.877e+02 6.653e+02 1.329e+03, threshold=1.175e+03, percent-clipped=1.0 +2023-04-02 17:06:47,898 INFO [train.py:903] (2/4) Epoch 20, batch 4200, loss[loss=0.1989, simple_loss=0.2768, pruned_loss=0.0605, over 19491.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2917, pruned_loss=0.06694, over 3804787.81 frames. ], batch size: 49, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:51,416 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 17:07:12,210 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:07:31,938 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7468, 3.2148, 3.2636, 3.2801, 1.4498, 3.1462, 2.7493, 3.0511], + device='cuda:2'), covar=tensor([0.1804, 0.1037, 0.0873, 0.0980, 0.5133, 0.1013, 0.0838, 0.1386], + device='cuda:2'), in_proj_covar=tensor([0.0767, 0.0722, 0.0924, 0.0811, 0.0818, 0.0684, 0.0555, 0.0857], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 17:07:34,302 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0184, 2.7922, 2.2262, 2.2633, 1.9564, 2.4693, 1.1843, 2.0680], + device='cuda:2'), covar=tensor([0.0571, 0.0530, 0.0612, 0.0967, 0.1042, 0.0967, 0.1183, 0.0931], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0350, 0.0356, 0.0380, 0.0454, 0.0381, 0.0331, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:07:50,931 INFO [train.py:903] (2/4) Epoch 20, batch 4250, loss[loss=0.2131, simple_loss=0.2915, pruned_loss=0.06733, over 18222.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2913, pruned_loss=0.06652, over 3813681.26 frames. ], batch size: 83, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:08:08,179 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 17:08:20,547 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 17:08:32,206 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:33,424 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:41,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.008e+02 5.790e+02 6.980e+02 1.679e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-02 17:08:54,657 INFO [train.py:903] (2/4) Epoch 20, batch 4300, loss[loss=0.1982, simple_loss=0.2701, pruned_loss=0.06316, over 19488.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2902, pruned_loss=0.06572, over 3821383.47 frames. ], batch size: 49, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:09:28,879 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-02 17:09:50,627 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 17:09:56,287 INFO [train.py:903] (2/4) Epoch 20, batch 4350, loss[loss=0.2382, simple_loss=0.3233, pruned_loss=0.07652, over 19659.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.06557, over 3817964.05 frames. ], batch size: 55, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:10:30,263 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134107.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:10:32,515 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:10:48,193 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 5.216e+02 6.315e+02 8.361e+02 2.012e+03, threshold=1.263e+03, percent-clipped=10.0 +2023-04-02 17:10:49,766 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4801, 2.2186, 1.6181, 1.4614, 2.0242, 1.3138, 1.2917, 1.8661], + device='cuda:2'), covar=tensor([0.1099, 0.0778, 0.1133, 0.0869, 0.0594, 0.1283, 0.0797, 0.0532], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0310, 0.0330, 0.0256, 0.0243, 0.0333, 0.0288, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:10:57,692 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:00,862 INFO [train.py:903] (2/4) Epoch 20, batch 4400, loss[loss=0.2474, simple_loss=0.3195, pruned_loss=0.08765, over 19470.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2901, pruned_loss=0.06584, over 3813482.22 frames. ], batch size: 64, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:11:01,135 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6778, 4.1459, 4.3852, 4.3877, 1.9415, 4.0994, 3.6030, 4.1153], + device='cuda:2'), covar=tensor([0.1682, 0.1091, 0.0603, 0.0663, 0.5347, 0.0995, 0.0665, 0.1164], + device='cuda:2'), in_proj_covar=tensor([0.0774, 0.0726, 0.0929, 0.0813, 0.0824, 0.0689, 0.0558, 0.0864], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 17:11:01,255 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134132.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:11:03,633 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:27,019 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4703, 1.4209, 1.9753, 1.3826, 2.9102, 3.8449, 3.5176, 4.0180], + device='cuda:2'), covar=tensor([0.1461, 0.3548, 0.2975, 0.2310, 0.0529, 0.0164, 0.0202, 0.0239], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0318, 0.0348, 0.0264, 0.0239, 0.0183, 0.0215, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 17:11:29,074 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 17:11:37,423 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 17:12:05,046 INFO [train.py:903] (2/4) Epoch 20, batch 4450, loss[loss=0.2164, simple_loss=0.2995, pruned_loss=0.06664, over 19771.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2899, pruned_loss=0.06579, over 3802475.05 frames. ], batch size: 56, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:12:17,687 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8648, 2.7248, 2.2836, 2.8336, 2.5389, 2.1760, 2.2002, 2.6806], + device='cuda:2'), covar=tensor([0.0830, 0.1376, 0.1347, 0.0926, 0.1277, 0.0510, 0.1328, 0.0576], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0358, 0.0309, 0.0251, 0.0301, 0.0250, 0.0305, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:12:36,965 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134207.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:12:46,597 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3105, 1.3837, 2.0006, 1.8356, 3.0430, 4.4473, 4.2627, 4.8715], + device='cuda:2'), covar=tensor([0.1693, 0.3832, 0.3286, 0.2183, 0.0642, 0.0266, 0.0198, 0.0197], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0318, 0.0348, 0.0263, 0.0239, 0.0183, 0.0215, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 17:12:56,175 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.942e+02 5.989e+02 7.537e+02 1.405e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-02 17:13:08,110 INFO [train.py:903] (2/4) Epoch 20, batch 4500, loss[loss=0.1783, simple_loss=0.2533, pruned_loss=0.05169, over 18669.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2908, pruned_loss=0.06642, over 3793912.74 frames. ], batch size: 41, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:13:08,534 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134232.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:13:35,990 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2047, 2.0150, 1.8172, 2.1093, 1.8706, 1.8264, 1.7403, 2.0209], + device='cuda:2'), covar=tensor([0.1073, 0.1567, 0.1436, 0.1224, 0.1561, 0.0579, 0.1395, 0.0725], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0356, 0.0308, 0.0250, 0.0300, 0.0249, 0.0304, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:14:10,218 INFO [train.py:903] (2/4) Epoch 20, batch 4550, loss[loss=0.1993, simple_loss=0.2767, pruned_loss=0.06102, over 19732.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06602, over 3795864.13 frames. ], batch size: 51, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:14:19,191 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 17:14:42,915 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 17:15:02,257 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:04,304 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.805e+02 4.924e+02 5.886e+02 8.898e+02 2.816e+03, threshold=1.177e+03, percent-clipped=9.0 +2023-04-02 17:15:09,050 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:14,424 INFO [train.py:903] (2/4) Epoch 20, batch 4600, loss[loss=0.2126, simple_loss=0.2899, pruned_loss=0.06763, over 19749.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2901, pruned_loss=0.06574, over 3798583.57 frames. ], batch size: 63, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:15:47,467 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:15,900 INFO [train.py:903] (2/4) Epoch 20, batch 4650, loss[loss=0.1981, simple_loss=0.2857, pruned_loss=0.05529, over 19790.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2885, pruned_loss=0.06496, over 3808314.02 frames. ], batch size: 56, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:16:20,736 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134385.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:32,920 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 17:16:44,600 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 17:16:52,783 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134410.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:17:09,007 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 4.575e+02 5.939e+02 8.134e+02 1.295e+03, threshold=1.188e+03, percent-clipped=3.0 +2023-04-02 17:17:19,256 INFO [train.py:903] (2/4) Epoch 20, batch 4700, loss[loss=0.1979, simple_loss=0.2846, pruned_loss=0.05558, over 19439.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2878, pruned_loss=0.06473, over 3802475.93 frames. ], batch size: 64, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:17:41,469 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 17:18:11,939 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:18:20,712 INFO [train.py:903] (2/4) Epoch 20, batch 4750, loss[loss=0.229, simple_loss=0.2937, pruned_loss=0.08218, over 19390.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2878, pruned_loss=0.06468, over 3803132.37 frames. ], batch size: 48, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:18:56,444 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7122, 1.5547, 1.5136, 2.0705, 1.5170, 2.0719, 1.9764, 1.7514], + device='cuda:2'), covar=tensor([0.0836, 0.0932, 0.1003, 0.0811, 0.0923, 0.0716, 0.0838, 0.0686], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0242, 0.0225, 0.0210, 0.0187, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 17:19:14,950 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.001e+02 5.955e+02 7.090e+02 1.974e+03, threshold=1.191e+03, percent-clipped=7.0 +2023-04-02 17:19:25,456 INFO [train.py:903] (2/4) Epoch 20, batch 4800, loss[loss=0.1765, simple_loss=0.2544, pruned_loss=0.04931, over 19753.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.06439, over 3816782.66 frames. ], batch size: 46, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:20:26,819 INFO [train.py:903] (2/4) Epoch 20, batch 4850, loss[loss=0.1999, simple_loss=0.2838, pruned_loss=0.05801, over 19661.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2881, pruned_loss=0.06464, over 3805169.07 frames. ], batch size: 58, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:20:49,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 17:20:54,357 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 17:21:13,049 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 17:21:18,721 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 17:21:18,752 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 17:21:21,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.787e+02 5.710e+02 7.760e+02 1.554e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-02 17:21:31,383 INFO [train.py:903] (2/4) Epoch 20, batch 4900, loss[loss=0.2184, simple_loss=0.2969, pruned_loss=0.06996, over 19772.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2883, pruned_loss=0.06465, over 3811542.38 frames. ], batch size: 56, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:21:31,393 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 17:21:51,454 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 17:22:14,393 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134666.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:21,058 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:33,377 INFO [train.py:903] (2/4) Epoch 20, batch 4950, loss[loss=0.2478, simple_loss=0.322, pruned_loss=0.08686, over 18202.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2897, pruned_loss=0.0655, over 3800508.43 frames. ], batch size: 83, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:22:37,649 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 17:22:49,221 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 17:23:15,463 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 17:23:28,332 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 4.876e+02 5.783e+02 7.285e+02 1.244e+03, threshold=1.157e+03, percent-clipped=2.0 +2023-04-02 17:23:34,625 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:23:37,542 INFO [train.py:903] (2/4) Epoch 20, batch 5000, loss[loss=0.2538, simple_loss=0.3221, pruned_loss=0.09272, over 19696.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2898, pruned_loss=0.0653, over 3812960.90 frames. ], batch size: 59, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:23:45,521 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 17:23:56,613 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 17:24:06,075 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:37,432 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134781.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:38,201 INFO [train.py:903] (2/4) Epoch 20, batch 5050, loss[loss=0.1868, simple_loss=0.2602, pruned_loss=0.05668, over 19719.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06538, over 3813708.02 frames. ], batch size: 46, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:24:44,167 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134786.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:25:13,081 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 17:25:21,970 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 17:25:22,640 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7061, 2.6069, 2.1374, 2.0556, 1.9153, 2.2870, 1.1816, 1.9324], + device='cuda:2'), covar=tensor([0.0668, 0.0614, 0.0633, 0.0995, 0.0990, 0.1113, 0.1265, 0.0943], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0348, 0.0352, 0.0376, 0.0452, 0.0383, 0.0331, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:25:31,564 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.915e+02 6.341e+02 8.226e+02 2.739e+03, threshold=1.268e+03, percent-clipped=9.0 +2023-04-02 17:25:41,325 INFO [train.py:903] (2/4) Epoch 20, batch 5100, loss[loss=0.1966, simple_loss=0.2703, pruned_loss=0.06148, over 15501.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2885, pruned_loss=0.065, over 3817976.80 frames. ], batch size: 34, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:25:50,455 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 17:25:53,848 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 17:25:58,149 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 17:25:58,878 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-02 17:26:41,227 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134880.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:26:43,296 INFO [train.py:903] (2/4) Epoch 20, batch 5150, loss[loss=0.2369, simple_loss=0.3184, pruned_loss=0.07773, over 17542.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06506, over 3815272.77 frames. ], batch size: 101, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:26:47,041 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3009, 1.0309, 1.3689, 1.3356, 2.6385, 1.0101, 2.2822, 3.0766], + device='cuda:2'), covar=tensor([0.0709, 0.3989, 0.3488, 0.2377, 0.1248, 0.3099, 0.1461, 0.0514], + device='cuda:2'), in_proj_covar=tensor([0.0401, 0.0361, 0.0380, 0.0342, 0.0371, 0.0346, 0.0372, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:26:55,958 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 17:27:03,555 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 17:27:10,859 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 17:27:32,553 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 17:27:37,091 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.618e+02 4.964e+02 6.321e+02 8.056e+02 1.479e+03, threshold=1.264e+03, percent-clipped=3.0 +2023-04-02 17:27:46,174 INFO [train.py:903] (2/4) Epoch 20, batch 5200, loss[loss=0.1892, simple_loss=0.281, pruned_loss=0.04874, over 19525.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2884, pruned_loss=0.06471, over 3832821.61 frames. ], batch size: 56, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:28:00,831 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 17:28:46,960 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 17:28:49,183 INFO [train.py:903] (2/4) Epoch 20, batch 5250, loss[loss=0.2271, simple_loss=0.3054, pruned_loss=0.07436, over 18393.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.288, pruned_loss=0.06466, over 3845729.74 frames. ], batch size: 84, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:42,805 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.845e+02 5.899e+02 8.450e+02 1.811e+03, threshold=1.180e+03, percent-clipped=4.0 +2023-04-02 17:29:51,986 INFO [train.py:903] (2/4) Epoch 20, batch 5300, loss[loss=0.1602, simple_loss=0.2402, pruned_loss=0.04012, over 19759.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2878, pruned_loss=0.06432, over 3852296.09 frames. ], batch size: 46, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:59,080 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135037.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:04,964 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:11,592 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 17:30:30,738 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:36,517 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:54,696 INFO [train.py:903] (2/4) Epoch 20, batch 5350, loss[loss=0.1998, simple_loss=0.2769, pruned_loss=0.06129, over 19460.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2892, pruned_loss=0.06532, over 3835372.42 frames. ], batch size: 49, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:31:29,755 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 17:31:48,518 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.248e+02 6.375e+02 8.534e+02 1.946e+03, threshold=1.275e+03, percent-clipped=9.0 +2023-04-02 17:31:57,760 INFO [train.py:903] (2/4) Epoch 20, batch 5400, loss[loss=0.1966, simple_loss=0.282, pruned_loss=0.05559, over 19654.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2902, pruned_loss=0.06633, over 3830582.16 frames. ], batch size: 55, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:31:59,398 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3806, 1.4278, 1.7213, 1.6272, 2.6907, 2.2351, 2.8988, 1.2028], + device='cuda:2'), covar=tensor([0.2574, 0.4571, 0.2809, 0.2050, 0.1540, 0.2175, 0.1358, 0.4629], + device='cuda:2'), in_proj_covar=tensor([0.0522, 0.0628, 0.0694, 0.0474, 0.0610, 0.0519, 0.0650, 0.0538], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:33:00,877 INFO [train.py:903] (2/4) Epoch 20, batch 5450, loss[loss=0.2106, simple_loss=0.2918, pruned_loss=0.06468, over 19494.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2893, pruned_loss=0.06576, over 3841711.97 frames. ], batch size: 64, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:33:09,044 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135189.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:15,491 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2470, 2.0259, 1.8346, 2.1353, 1.8280, 1.8262, 1.7111, 2.0646], + device='cuda:2'), covar=tensor([0.0935, 0.1332, 0.1407, 0.1060, 0.1425, 0.0544, 0.1363, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0351, 0.0305, 0.0248, 0.0297, 0.0247, 0.0303, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:33:52,674 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135224.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:54,895 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.255e+02 6.010e+02 7.558e+02 1.824e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 17:34:03,100 INFO [train.py:903] (2/4) Epoch 20, batch 5500, loss[loss=0.1886, simple_loss=0.2644, pruned_loss=0.05642, over 19754.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2899, pruned_loss=0.06618, over 3823448.40 frames. ], batch size: 47, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:34:29,174 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 17:34:30,745 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0596, 2.8157, 2.1477, 2.1547, 1.9642, 2.4720, 0.9444, 1.9502], + device='cuda:2'), covar=tensor([0.0637, 0.0541, 0.0741, 0.1047, 0.1129, 0.1063, 0.1453, 0.1072], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0351, 0.0354, 0.0379, 0.0455, 0.0384, 0.0333, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:35:05,530 INFO [train.py:903] (2/4) Epoch 20, batch 5550, loss[loss=0.1755, simple_loss=0.2559, pruned_loss=0.04749, over 19740.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2894, pruned_loss=0.0656, over 3823012.59 frames. ], batch size: 46, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:35:13,921 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 17:35:20,847 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7541, 4.0111, 4.4825, 4.5517, 1.8832, 4.2162, 3.6258, 3.8683], + device='cuda:2'), covar=tensor([0.1975, 0.1605, 0.0901, 0.0992, 0.6941, 0.1901, 0.1072, 0.2099], + device='cuda:2'), in_proj_covar=tensor([0.0769, 0.0724, 0.0928, 0.0813, 0.0821, 0.0691, 0.0554, 0.0862], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 17:36:00,656 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.884e+02 6.418e+02 8.039e+02 2.322e+03, threshold=1.284e+03, percent-clipped=8.0 +2023-04-02 17:36:03,998 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 17:36:07,360 INFO [train.py:903] (2/4) Epoch 20, batch 5600, loss[loss=0.1898, simple_loss=0.2773, pruned_loss=0.05115, over 19529.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2896, pruned_loss=0.06577, over 3821295.15 frames. ], batch size: 54, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:36:18,180 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135339.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:36:31,076 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1108, 2.0053, 1.9127, 1.7109, 1.5715, 1.7589, 0.6011, 1.0110], + device='cuda:2'), covar=tensor([0.0655, 0.0594, 0.0445, 0.0786, 0.1190, 0.0844, 0.1270, 0.1079], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0354, 0.0355, 0.0381, 0.0458, 0.0388, 0.0335, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:37:11,262 INFO [train.py:903] (2/4) Epoch 20, batch 5650, loss[loss=0.219, simple_loss=0.2868, pruned_loss=0.07558, over 19131.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2885, pruned_loss=0.06513, over 3830974.21 frames. ], batch size: 42, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:38:01,367 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 17:38:05,717 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.892e+02 5.590e+02 6.975e+02 1.698e+03, threshold=1.118e+03, percent-clipped=3.0 +2023-04-02 17:38:12,543 INFO [train.py:903] (2/4) Epoch 20, batch 5700, loss[loss=0.2322, simple_loss=0.3033, pruned_loss=0.08053, over 19326.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2886, pruned_loss=0.06509, over 3834413.50 frames. ], batch size: 66, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:39:14,104 INFO [train.py:903] (2/4) Epoch 20, batch 5750, loss[loss=0.2638, simple_loss=0.3276, pruned_loss=0.1, over 13946.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.0664, over 3839542.15 frames. ], batch size: 137, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:39:17,228 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 17:39:25,442 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 17:39:31,249 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 17:40:10,280 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 5.062e+02 5.931e+02 7.729e+02 1.708e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-02 17:40:18,219 INFO [train.py:903] (2/4) Epoch 20, batch 5800, loss[loss=0.2121, simple_loss=0.2942, pruned_loss=0.06499, over 18096.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06619, over 3842048.54 frames. ], batch size: 83, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:40:19,535 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:41:20,579 INFO [train.py:903] (2/4) Epoch 20, batch 5850, loss[loss=0.23, simple_loss=0.3077, pruned_loss=0.07617, over 19582.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2915, pruned_loss=0.06708, over 3837923.73 frames. ], batch size: 61, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:41:36,386 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:08,925 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:11,762 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-02 17:42:15,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.765e+02 5.164e+02 6.346e+02 8.557e+02 2.066e+03, threshold=1.269e+03, percent-clipped=9.0 +2023-04-02 17:42:23,022 INFO [train.py:903] (2/4) Epoch 20, batch 5900, loss[loss=0.1796, simple_loss=0.265, pruned_loss=0.04712, over 19854.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.06736, over 3836193.29 frames. ], batch size: 52, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:42:25,417 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 17:42:43,065 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:43,132 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:46,314 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 17:43:24,700 INFO [train.py:903] (2/4) Epoch 20, batch 5950, loss[loss=0.2188, simple_loss=0.3008, pruned_loss=0.06842, over 19754.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2935, pruned_loss=0.06798, over 3828591.13 frames. ], batch size: 54, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:19,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.126e+02 6.708e+02 1.004e+03 2.382e+03, threshold=1.342e+03, percent-clipped=11.0 +2023-04-02 17:44:27,263 INFO [train.py:903] (2/4) Epoch 20, batch 6000, loss[loss=0.1787, simple_loss=0.2645, pruned_loss=0.04648, over 19597.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2916, pruned_loss=0.0668, over 3838656.47 frames. ], batch size: 52, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:27,264 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 17:44:39,943 INFO [train.py:937] (2/4) Epoch 20, validation: loss=0.1697, simple_loss=0.2697, pruned_loss=0.0349, over 944034.00 frames. +2023-04-02 17:44:39,945 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 17:45:41,602 INFO [train.py:903] (2/4) Epoch 20, batch 6050, loss[loss=0.1909, simple_loss=0.2764, pruned_loss=0.05274, over 19762.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2914, pruned_loss=0.06675, over 3838930.09 frames. ], batch size: 54, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:46:10,071 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 17:46:36,905 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 4.873e+02 5.806e+02 7.519e+02 1.887e+03, threshold=1.161e+03, percent-clipped=2.0 +2023-04-02 17:46:43,929 INFO [train.py:903] (2/4) Epoch 20, batch 6100, loss[loss=0.207, simple_loss=0.2848, pruned_loss=0.06457, over 19685.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2924, pruned_loss=0.06775, over 3815608.18 frames. ], batch size: 53, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:47:46,917 INFO [train.py:903] (2/4) Epoch 20, batch 6150, loss[loss=0.2351, simple_loss=0.3121, pruned_loss=0.07907, over 19690.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2907, pruned_loss=0.06657, over 3816846.46 frames. ], batch size: 59, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:48:15,328 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135904.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:16,119 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 17:48:21,075 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3214, 1.1924, 1.6587, 1.1640, 2.4829, 3.3323, 3.0347, 3.5243], + device='cuda:2'), covar=tensor([0.1451, 0.3862, 0.3373, 0.2508, 0.0600, 0.0209, 0.0224, 0.0295], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0318, 0.0347, 0.0264, 0.0239, 0.0183, 0.0215, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 17:48:38,633 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0117, 5.0822, 5.8830, 5.8531, 1.9129, 5.5697, 4.6648, 5.5072], + device='cuda:2'), covar=tensor([0.1765, 0.0857, 0.0608, 0.0664, 0.6267, 0.0726, 0.0595, 0.1302], + device='cuda:2'), in_proj_covar=tensor([0.0768, 0.0723, 0.0927, 0.0809, 0.0820, 0.0685, 0.0557, 0.0859], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 17:48:41,890 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.701e+02 6.211e+02 7.118e+02 1.417e+03, threshold=1.242e+03, percent-clipped=3.0 +2023-04-02 17:48:43,260 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0537, 1.2757, 1.7873, 0.9982, 2.5334, 3.3300, 3.0360, 3.5715], + device='cuda:2'), covar=tensor([0.1634, 0.3786, 0.3212, 0.2602, 0.0536, 0.0208, 0.0233, 0.0251], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0319, 0.0348, 0.0264, 0.0239, 0.0183, 0.0215, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 17:48:45,691 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:49,686 INFO [train.py:903] (2/4) Epoch 20, batch 6200, loss[loss=0.1718, simple_loss=0.2549, pruned_loss=0.04438, over 19584.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2902, pruned_loss=0.06625, over 3808500.40 frames. ], batch size: 52, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:49:52,103 INFO [train.py:903] (2/4) Epoch 20, batch 6250, loss[loss=0.2164, simple_loss=0.2899, pruned_loss=0.07148, over 19613.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2889, pruned_loss=0.06586, over 3814028.35 frames. ], batch size: 50, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:50:04,689 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:15,858 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:19,545 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2725, 1.2916, 1.4759, 1.4187, 1.8489, 1.7901, 1.8681, 0.6223], + device='cuda:2'), covar=tensor([0.2437, 0.4300, 0.2616, 0.1972, 0.1591, 0.2415, 0.1480, 0.4723], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0636, 0.0699, 0.0477, 0.0617, 0.0525, 0.0658, 0.0543], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 17:50:24,786 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 17:50:30,650 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136012.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:48,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 5.185e+02 6.758e+02 8.208e+02 2.074e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-02 17:50:55,343 INFO [train.py:903] (2/4) Epoch 20, batch 6300, loss[loss=0.2424, simple_loss=0.3213, pruned_loss=0.08173, over 17255.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2903, pruned_loss=0.06678, over 3803846.08 frames. ], batch size: 101, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:50:56,863 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3268, 1.6614, 1.9410, 1.8820, 2.9042, 1.5767, 2.7132, 3.1901], + device='cuda:2'), covar=tensor([0.0707, 0.2989, 0.2635, 0.1978, 0.0980, 0.2463, 0.1935, 0.0510], + device='cuda:2'), in_proj_covar=tensor([0.0404, 0.0360, 0.0380, 0.0342, 0.0372, 0.0347, 0.0372, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:51:58,458 INFO [train.py:903] (2/4) Epoch 20, batch 6350, loss[loss=0.2368, simple_loss=0.3123, pruned_loss=0.08067, over 19784.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2906, pruned_loss=0.06693, over 3810982.14 frames. ], batch size: 56, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:52:30,869 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:52:53,616 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.045e+02 5.932e+02 7.156e+02 1.987e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 17:53:01,256 INFO [train.py:903] (2/4) Epoch 20, batch 6400, loss[loss=0.2921, simple_loss=0.3507, pruned_loss=0.1167, over 13651.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.29, pruned_loss=0.06629, over 3794812.33 frames. ], batch size: 137, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:53:36,340 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:54:02,561 INFO [train.py:903] (2/4) Epoch 20, batch 6450, loss[loss=0.2029, simple_loss=0.2645, pruned_loss=0.07068, over 19741.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06693, over 3784033.81 frames. ], batch size: 46, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:54:51,304 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 17:54:59,001 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.012e+02 6.099e+02 8.089e+02 3.011e+03, threshold=1.220e+03, percent-clipped=7.0 +2023-04-02 17:55:01,043 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 17:55:07,138 INFO [train.py:903] (2/4) Epoch 20, batch 6500, loss[loss=0.2371, simple_loss=0.3174, pruned_loss=0.07835, over 17172.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2907, pruned_loss=0.06665, over 3798521.68 frames. ], batch size: 101, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:55:12,989 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 17:55:20,961 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6439, 1.7124, 1.8750, 2.0831, 1.5208, 1.9394, 1.9272, 1.7861], + device='cuda:2'), covar=tensor([0.3957, 0.3470, 0.1911, 0.2075, 0.3620, 0.2054, 0.4926, 0.3237], + device='cuda:2'), in_proj_covar=tensor([0.0878, 0.0942, 0.0700, 0.0919, 0.0862, 0.0793, 0.0826, 0.0768], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 17:55:41,770 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9485, 1.9267, 1.7497, 2.0612, 1.9709, 1.6640, 1.7077, 1.8942], + device='cuda:2'), covar=tensor([0.0975, 0.1404, 0.1371, 0.0895, 0.1127, 0.0816, 0.1459, 0.0761], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0353, 0.0305, 0.0248, 0.0297, 0.0246, 0.0304, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 17:56:09,980 INFO [train.py:903] (2/4) Epoch 20, batch 6550, loss[loss=0.1869, simple_loss=0.2638, pruned_loss=0.05499, over 19779.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06699, over 3800986.89 frames. ], batch size: 47, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:56:48,280 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 17:57:06,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.319e+02 6.594e+02 8.030e+02 1.579e+03, threshold=1.319e+03, percent-clipped=2.0 +2023-04-02 17:57:14,420 INFO [train.py:903] (2/4) Epoch 20, batch 6600, loss[loss=0.2249, simple_loss=0.3139, pruned_loss=0.06791, over 19588.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06643, over 3797066.68 frames. ], batch size: 61, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:57:28,443 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:41,140 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:45,651 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136356.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:53,924 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:16,846 INFO [train.py:903] (2/4) Epoch 20, batch 6650, loss[loss=0.2048, simple_loss=0.285, pruned_loss=0.06225, over 19694.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2911, pruned_loss=0.06678, over 3799459.13 frames. ], batch size: 53, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:58:26,125 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:43,792 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 17:59:14,394 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.763e+02 5.879e+02 7.861e+02 2.647e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 17:59:22,074 INFO [train.py:903] (2/4) Epoch 20, batch 6700, loss[loss=0.2066, simple_loss=0.2976, pruned_loss=0.0578, over 17481.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.291, pruned_loss=0.0669, over 3788804.34 frames. ], batch size: 101, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:59:55,291 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:08,540 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136471.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:20,892 INFO [train.py:903] (2/4) Epoch 20, batch 6750, loss[loss=0.1993, simple_loss=0.2901, pruned_loss=0.05426, over 19651.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2914, pruned_loss=0.06687, over 3805265.50 frames. ], batch size: 58, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:00:21,353 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2531, 2.2631, 2.5496, 2.9429, 2.1915, 2.8083, 2.5077, 2.2481], + device='cuda:2'), covar=tensor([0.4310, 0.4277, 0.1944, 0.2645, 0.4547, 0.2256, 0.4902, 0.3597], + device='cuda:2'), in_proj_covar=tensor([0.0882, 0.0947, 0.0704, 0.0924, 0.0865, 0.0797, 0.0830, 0.0773], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 18:00:45,733 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:01:11,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 5.547e+02 6.932e+02 9.039e+02 1.788e+03, threshold=1.386e+03, percent-clipped=9.0 +2023-04-02 18:01:19,058 INFO [train.py:903] (2/4) Epoch 20, batch 6800, loss[loss=0.2009, simple_loss=0.2744, pruned_loss=0.06369, over 19736.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.29, pruned_loss=0.0658, over 3824547.76 frames. ], batch size: 51, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:02:04,288 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 18:02:05,343 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 18:02:07,959 INFO [train.py:903] (2/4) Epoch 21, batch 0, loss[loss=0.2486, simple_loss=0.3147, pruned_loss=0.09124, over 19751.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3147, pruned_loss=0.09124, over 19751.00 frames. ], batch size: 51, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:02:07,959 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 18:02:18,730 INFO [train.py:937] (2/4) Epoch 21, validation: loss=0.1691, simple_loss=0.2696, pruned_loss=0.03427, over 944034.00 frames. +2023-04-02 18:02:18,730 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 18:02:30,989 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 18:03:20,708 INFO [train.py:903] (2/4) Epoch 21, batch 50, loss[loss=0.2933, simple_loss=0.3537, pruned_loss=0.1165, over 13080.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2915, pruned_loss=0.0665, over 848371.80 frames. ], batch size: 135, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:03:24,695 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 18:03:33,355 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136619.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:03:43,288 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.947e+02 6.173e+02 6.953e+02 1.295e+03, threshold=1.235e+03, percent-clipped=0.0 +2023-04-02 18:03:54,622 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 18:04:22,857 INFO [train.py:903] (2/4) Epoch 21, batch 100, loss[loss=0.2204, simple_loss=0.301, pruned_loss=0.06989, over 18174.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2899, pruned_loss=0.0657, over 1522445.10 frames. ], batch size: 84, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:04:25,211 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:04:35,107 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 18:04:42,075 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5758, 1.4658, 1.4045, 1.9311, 1.4786, 1.8315, 1.8483, 1.5520], + device='cuda:2'), covar=tensor([0.0877, 0.0972, 0.1067, 0.0761, 0.0846, 0.0796, 0.0860, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0242, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 18:05:00,466 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-02 18:05:09,050 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136697.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:25,010 INFO [train.py:903] (2/4) Epoch 21, batch 150, loss[loss=0.2238, simple_loss=0.3058, pruned_loss=0.07092, over 19667.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.06625, over 2029217.96 frames. ], batch size: 55, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:05:32,011 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:45,589 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 5.031e+02 5.954e+02 7.665e+02 1.668e+03, threshold=1.191e+03, percent-clipped=3.0 +2023-04-02 18:05:45,985 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136727.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:02,681 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136740.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:18,613 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:25,303 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 18:06:27,565 INFO [train.py:903] (2/4) Epoch 21, batch 200, loss[loss=0.2164, simple_loss=0.2996, pruned_loss=0.06665, over 19500.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2915, pruned_loss=0.06715, over 2410042.11 frames. ], batch size: 64, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:07:29,899 INFO [train.py:903] (2/4) Epoch 21, batch 250, loss[loss=0.2055, simple_loss=0.2948, pruned_loss=0.05806, over 19688.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.292, pruned_loss=0.06722, over 2719897.91 frames. ], batch size: 53, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:07:32,566 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6666, 1.3805, 1.5385, 1.4026, 3.2133, 1.1396, 2.3486, 3.6426], + device='cuda:2'), covar=tensor([0.0468, 0.2719, 0.2647, 0.1943, 0.0694, 0.2436, 0.1170, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0406, 0.0361, 0.0380, 0.0344, 0.0371, 0.0348, 0.0372, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:07:32,617 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:47,532 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:52,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.815e+02 6.209e+02 8.068e+02 1.278e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-04-02 18:08:32,582 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 18:08:33,038 INFO [train.py:903] (2/4) Epoch 21, batch 300, loss[loss=0.1936, simple_loss=0.2724, pruned_loss=0.05739, over 19850.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06648, over 2969458.69 frames. ], batch size: 52, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:08:53,742 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:07,901 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.02 vs. limit=5.0 +2023-04-02 18:09:23,336 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136900.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:36,400 INFO [train.py:903] (2/4) Epoch 21, batch 350, loss[loss=0.1767, simple_loss=0.2544, pruned_loss=0.04947, over 19728.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2901, pruned_loss=0.06601, over 3165955.88 frames. ], batch size: 47, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:09:38,644 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 18:09:56,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.835e+02 5.943e+02 7.281e+02 1.741e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-02 18:10:38,506 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8851, 1.2237, 1.5094, 0.5694, 1.9401, 2.4318, 2.1407, 2.5939], + device='cuda:2'), covar=tensor([0.1645, 0.3904, 0.3438, 0.2865, 0.0643, 0.0278, 0.0342, 0.0369], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0319, 0.0347, 0.0264, 0.0240, 0.0183, 0.0215, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 18:10:39,342 INFO [train.py:903] (2/4) Epoch 21, batch 400, loss[loss=0.2324, simple_loss=0.3117, pruned_loss=0.07656, over 19684.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2901, pruned_loss=0.06523, over 3313060.37 frames. ], batch size: 59, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:11:35,378 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:11:41,215 INFO [train.py:903] (2/4) Epoch 21, batch 450, loss[loss=0.2324, simple_loss=0.3116, pruned_loss=0.0766, over 19790.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.06535, over 3439432.15 frames. ], batch size: 56, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:12:03,760 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.613e+02 5.973e+02 7.527e+02 1.521e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-02 18:12:12,942 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 18:12:14,059 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 18:12:43,474 INFO [train.py:903] (2/4) Epoch 21, batch 500, loss[loss=0.2146, simple_loss=0.2898, pruned_loss=0.06972, over 19769.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.06533, over 3529998.06 frames. ], batch size: 63, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:12:53,166 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:25,186 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:45,870 INFO [train.py:903] (2/4) Epoch 21, batch 550, loss[loss=0.2131, simple_loss=0.3039, pruned_loss=0.06118, over 19524.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2907, pruned_loss=0.06593, over 3590085.99 frames. ], batch size: 54, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:13:59,971 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:14:09,845 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.365e+02 6.823e+02 8.347e+02 2.113e+03, threshold=1.365e+03, percent-clipped=7.0 +2023-04-02 18:14:48,995 INFO [train.py:903] (2/4) Epoch 21, batch 600, loss[loss=0.2534, simple_loss=0.3221, pruned_loss=0.09234, over 19660.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.292, pruned_loss=0.0668, over 3622963.98 frames. ], batch size: 58, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:14:57,089 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6301, 4.2408, 2.6724, 3.6971, 1.2500, 4.1410, 4.0252, 4.1510], + device='cuda:2'), covar=tensor([0.0559, 0.0984, 0.1980, 0.0884, 0.3743, 0.0653, 0.0870, 0.0989], + device='cuda:2'), in_proj_covar=tensor([0.0492, 0.0403, 0.0486, 0.0338, 0.0401, 0.0423, 0.0418, 0.0453], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:15:00,561 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:15:29,930 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 18:15:53,982 INFO [train.py:903] (2/4) Epoch 21, batch 650, loss[loss=0.2675, simple_loss=0.3376, pruned_loss=0.09869, over 19504.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2923, pruned_loss=0.06712, over 3671098.73 frames. ], batch size: 64, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:16:12,086 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6384, 1.3366, 1.5311, 1.4717, 3.2002, 1.1644, 2.4252, 3.6052], + device='cuda:2'), covar=tensor([0.0483, 0.2829, 0.2887, 0.1923, 0.0686, 0.2564, 0.1278, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0405, 0.0362, 0.0382, 0.0343, 0.0371, 0.0347, 0.0373, 0.0398], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:16:16,600 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 4.775e+02 5.968e+02 8.002e+02 1.696e+03, threshold=1.194e+03, percent-clipped=7.0 +2023-04-02 18:16:56,146 INFO [train.py:903] (2/4) Epoch 21, batch 700, loss[loss=0.1932, simple_loss=0.2678, pruned_loss=0.05932, over 19754.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2909, pruned_loss=0.06641, over 3709970.71 frames. ], batch size: 46, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:17:26,138 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:17:42,550 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7065, 1.7220, 1.6288, 1.4108, 1.2800, 1.4449, 0.2819, 0.6695], + device='cuda:2'), covar=tensor([0.0621, 0.0598, 0.0392, 0.0610, 0.1272, 0.0682, 0.1248, 0.1098], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0352, 0.0355, 0.0380, 0.0458, 0.0388, 0.0335, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:17:54,693 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-02 18:18:00,855 INFO [train.py:903] (2/4) Epoch 21, batch 750, loss[loss=0.1991, simple_loss=0.2764, pruned_loss=0.06086, over 19607.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2899, pruned_loss=0.06552, over 3737058.10 frames. ], batch size: 50, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:18:22,899 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 4.851e+02 6.236e+02 7.648e+02 2.101e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-04-02 18:19:01,638 INFO [train.py:903] (2/4) Epoch 21, batch 800, loss[loss=0.2319, simple_loss=0.3161, pruned_loss=0.07391, over 19707.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2921, pruned_loss=0.06663, over 3750322.62 frames. ], batch size: 59, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:19:07,850 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 18:19:22,445 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:19:34,009 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3706, 2.1652, 1.6294, 1.4831, 1.9867, 1.3149, 1.2755, 1.8691], + device='cuda:2'), covar=tensor([0.1082, 0.0784, 0.1094, 0.0835, 0.0569, 0.1352, 0.0816, 0.0542], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0313, 0.0336, 0.0260, 0.0245, 0.0337, 0.0292, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:19:53,531 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:20:05,537 INFO [train.py:903] (2/4) Epoch 21, batch 850, loss[loss=0.2277, simple_loss=0.3086, pruned_loss=0.07339, over 19419.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2919, pruned_loss=0.06637, over 3765009.96 frames. ], batch size: 70, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:20:27,119 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.583e+02 6.647e+02 8.818e+02 2.027e+03, threshold=1.329e+03, percent-clipped=5.0 +2023-04-02 18:20:48,394 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 18:21:06,665 INFO [train.py:903] (2/4) Epoch 21, batch 900, loss[loss=0.1598, simple_loss=0.2447, pruned_loss=0.03747, over 19380.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2916, pruned_loss=0.06656, over 3785658.37 frames. ], batch size: 47, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:21:59,573 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 18:22:08,624 INFO [train.py:903] (2/4) Epoch 21, batch 950, loss[loss=0.1694, simple_loss=0.2513, pruned_loss=0.04377, over 19319.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2908, pruned_loss=0.0662, over 3798964.04 frames. ], batch size: 44, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:22:31,546 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 5.256e+02 6.264e+02 7.895e+02 1.664e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 18:22:34,050 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3996, 1.4289, 1.6315, 1.6280, 2.3287, 2.1127, 2.3354, 1.0208], + device='cuda:2'), covar=tensor([0.2908, 0.4798, 0.3082, 0.2406, 0.1816, 0.2436, 0.1657, 0.5037], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0639, 0.0703, 0.0482, 0.0619, 0.0527, 0.0661, 0.0544], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:22:45,746 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:10,897 INFO [train.py:903] (2/4) Epoch 21, batch 1000, loss[loss=0.2, simple_loss=0.2692, pruned_loss=0.06542, over 19731.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2909, pruned_loss=0.06625, over 3815220.98 frames. ], batch size: 46, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:23:15,568 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:55,873 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 18:24:10,012 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.30 vs. limit=5.0 +2023-04-02 18:24:14,000 INFO [train.py:903] (2/4) Epoch 21, batch 1050, loss[loss=0.2052, simple_loss=0.2911, pruned_loss=0.05962, over 18756.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2915, pruned_loss=0.06658, over 3811437.47 frames. ], batch size: 74, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:24:35,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.318e+02 6.486e+02 8.521e+02 3.216e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 18:24:36,542 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 18:25:17,799 INFO [train.py:903] (2/4) Epoch 21, batch 1100, loss[loss=0.1937, simple_loss=0.2848, pruned_loss=0.05126, over 19685.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2913, pruned_loss=0.06642, over 3817990.91 frames. ], batch size: 59, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:25:48,781 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2476, 3.7736, 3.9217, 3.9285, 1.6028, 3.7269, 3.2120, 3.6620], + device='cuda:2'), covar=tensor([0.1732, 0.0961, 0.0678, 0.0746, 0.5760, 0.1023, 0.0783, 0.1163], + device='cuda:2'), in_proj_covar=tensor([0.0778, 0.0729, 0.0938, 0.0824, 0.0828, 0.0696, 0.0565, 0.0867], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 18:26:19,703 INFO [train.py:903] (2/4) Epoch 21, batch 1150, loss[loss=0.1715, simple_loss=0.2448, pruned_loss=0.04909, over 19736.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2895, pruned_loss=0.06527, over 3816794.58 frames. ], batch size: 46, lr: 3.95e-03, grad_scale: 4.0 +2023-04-02 18:26:43,796 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.127e+02 6.263e+02 7.580e+02 1.245e+03, threshold=1.253e+03, percent-clipped=0.0 +2023-04-02 18:27:19,133 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5739, 1.5790, 1.7751, 1.7683, 2.5032, 2.3647, 2.6133, 1.3025], + device='cuda:2'), covar=tensor([0.2233, 0.3888, 0.2453, 0.1824, 0.1517, 0.1926, 0.1458, 0.4093], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0640, 0.0704, 0.0483, 0.0618, 0.0528, 0.0662, 0.0544], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:27:22,172 INFO [train.py:903] (2/4) Epoch 21, batch 1200, loss[loss=0.1914, simple_loss=0.2688, pruned_loss=0.05704, over 19778.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2898, pruned_loss=0.066, over 3806080.27 frames. ], batch size: 48, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:27:46,782 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 18:27:50,664 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2353, 1.3199, 1.3946, 1.5139, 1.1356, 1.4525, 1.4675, 1.3532], + device='cuda:2'), covar=tensor([0.2857, 0.2346, 0.1388, 0.1559, 0.2592, 0.1409, 0.3270, 0.2300], + device='cuda:2'), in_proj_covar=tensor([0.0886, 0.0951, 0.0710, 0.0927, 0.0868, 0.0801, 0.0833, 0.0776], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 18:28:25,533 INFO [train.py:903] (2/4) Epoch 21, batch 1250, loss[loss=0.1953, simple_loss=0.287, pruned_loss=0.05174, over 19495.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2895, pruned_loss=0.06586, over 3789417.30 frames. ], batch size: 64, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:28:36,406 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3110, 1.1421, 1.5084, 1.3992, 2.8091, 1.1676, 2.2076, 3.2293], + device='cuda:2'), covar=tensor([0.0641, 0.3011, 0.2697, 0.1918, 0.0901, 0.2478, 0.1433, 0.0351], + device='cuda:2'), in_proj_covar=tensor([0.0403, 0.0360, 0.0379, 0.0343, 0.0370, 0.0345, 0.0373, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:28:48,816 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 5.084e+02 5.991e+02 7.123e+02 1.423e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-02 18:29:27,971 INFO [train.py:903] (2/4) Epoch 21, batch 1300, loss[loss=0.2214, simple_loss=0.2927, pruned_loss=0.07511, over 19663.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2893, pruned_loss=0.06582, over 3797642.79 frames. ], batch size: 55, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:29:36,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.38 vs. limit=5.0 +2023-04-02 18:30:30,615 INFO [train.py:903] (2/4) Epoch 21, batch 1350, loss[loss=0.2067, simple_loss=0.2913, pruned_loss=0.06106, over 19763.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2898, pruned_loss=0.06558, over 3790165.35 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:30:30,914 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:34,415 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:54,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.098e+02 6.340e+02 8.452e+02 2.491e+03, threshold=1.268e+03, percent-clipped=6.0 +2023-04-02 18:30:59,193 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-02 18:31:33,358 INFO [train.py:903] (2/4) Epoch 21, batch 1400, loss[loss=0.2278, simple_loss=0.3045, pruned_loss=0.07557, over 19655.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2896, pruned_loss=0.06568, over 3808536.52 frames. ], batch size: 58, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:28,121 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 18:32:33,095 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4532, 1.5414, 1.7748, 1.7003, 2.6636, 2.3296, 2.7796, 1.0722], + device='cuda:2'), covar=tensor([0.2495, 0.4430, 0.2676, 0.2021, 0.1502, 0.2170, 0.1377, 0.4709], + device='cuda:2'), in_proj_covar=tensor([0.0533, 0.0644, 0.0707, 0.0485, 0.0621, 0.0529, 0.0666, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 18:32:37,232 INFO [train.py:903] (2/4) Epoch 21, batch 1450, loss[loss=0.2303, simple_loss=0.3119, pruned_loss=0.0743, over 19612.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2895, pruned_loss=0.06555, over 3814619.81 frames. ], batch size: 61, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:53,767 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8861, 1.9968, 2.2291, 2.5033, 1.8647, 2.4220, 2.2875, 2.0847], + device='cuda:2'), covar=tensor([0.4093, 0.3610, 0.1803, 0.2307, 0.3857, 0.1975, 0.4585, 0.3161], + device='cuda:2'), in_proj_covar=tensor([0.0886, 0.0949, 0.0709, 0.0927, 0.0867, 0.0800, 0.0831, 0.0775], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 18:33:01,289 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.676e+02 5.543e+02 6.978e+02 2.034e+03, threshold=1.109e+03, percent-clipped=2.0 +2023-04-02 18:33:31,757 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-02 18:33:39,043 INFO [train.py:903] (2/4) Epoch 21, batch 1500, loss[loss=0.1852, simple_loss=0.2636, pruned_loss=0.05343, over 19469.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2878, pruned_loss=0.06468, over 3831775.67 frames. ], batch size: 49, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:33:50,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6880, 2.6074, 2.2209, 2.7165, 2.7130, 2.3506, 2.1329, 2.7134], + device='cuda:2'), covar=tensor([0.0910, 0.1457, 0.1374, 0.1042, 0.1193, 0.0509, 0.1354, 0.0599], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0354, 0.0307, 0.0248, 0.0297, 0.0249, 0.0307, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:34:26,142 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5570, 1.2779, 1.1610, 1.3963, 1.1093, 1.2367, 1.1154, 1.3762], + device='cuda:2'), covar=tensor([0.1187, 0.1242, 0.1784, 0.1130, 0.1438, 0.0859, 0.1798, 0.0948], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0355, 0.0309, 0.0249, 0.0298, 0.0251, 0.0309, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:34:42,049 INFO [train.py:903] (2/4) Epoch 21, batch 1550, loss[loss=0.2405, simple_loss=0.3133, pruned_loss=0.08384, over 19664.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2883, pruned_loss=0.06462, over 3832708.40 frames. ], batch size: 55, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:35:05,477 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 5.280e+02 6.228e+02 7.726e+02 2.313e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 18:35:44,850 INFO [train.py:903] (2/4) Epoch 21, batch 1600, loss[loss=0.255, simple_loss=0.3283, pruned_loss=0.09084, over 19677.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06458, over 3832444.85 frames. ], batch size: 58, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:07,163 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 18:36:48,267 INFO [train.py:903] (2/4) Epoch 21, batch 1650, loss[loss=0.2068, simple_loss=0.2912, pruned_loss=0.06121, over 19546.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06406, over 3825419.30 frames. ], batch size: 56, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:37:12,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.591e+02 5.832e+02 7.172e+02 1.632e+03, threshold=1.166e+03, percent-clipped=1.0 +2023-04-02 18:37:43,164 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:46,572 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:49,815 INFO [train.py:903] (2/4) Epoch 21, batch 1700, loss[loss=0.1971, simple_loss=0.2834, pruned_loss=0.05536, over 19762.00 frames. ], tot_loss[loss=0.208, simple_loss=0.288, pruned_loss=0.06406, over 3840589.65 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:38:24,226 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:38:28,559 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 18:38:53,714 INFO [train.py:903] (2/4) Epoch 21, batch 1750, loss[loss=0.2023, simple_loss=0.2897, pruned_loss=0.05746, over 17375.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2873, pruned_loss=0.0637, over 3839604.98 frames. ], batch size: 101, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:39:16,065 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 4.819e+02 5.989e+02 8.047e+02 2.111e+03, threshold=1.198e+03, percent-clipped=8.0 +2023-04-02 18:39:30,307 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 18:39:30,906 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:39:55,289 INFO [train.py:903] (2/4) Epoch 21, batch 1800, loss[loss=0.2481, simple_loss=0.3206, pruned_loss=0.08784, over 17242.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2875, pruned_loss=0.06415, over 3830634.88 frames. ], batch size: 101, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:40:05,990 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:10,933 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:54,342 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 18:40:57,825 INFO [train.py:903] (2/4) Epoch 21, batch 1850, loss[loss=0.2602, simple_loss=0.3316, pruned_loss=0.09434, over 17433.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2877, pruned_loss=0.06423, over 3806540.60 frames. ], batch size: 101, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:41:22,806 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 4.784e+02 5.677e+02 7.800e+02 1.333e+03, threshold=1.135e+03, percent-clipped=2.0 +2023-04-02 18:41:34,207 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 18:41:34,574 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2168, 2.0242, 1.9579, 1.7648, 1.5118, 1.7425, 0.5699, 1.1459], + device='cuda:2'), covar=tensor([0.0591, 0.0614, 0.0431, 0.0786, 0.1118, 0.0873, 0.1221, 0.1021], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0354, 0.0359, 0.0383, 0.0460, 0.0388, 0.0335, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:42:01,833 INFO [train.py:903] (2/4) Epoch 21, batch 1900, loss[loss=0.2473, simple_loss=0.3272, pruned_loss=0.08371, over 19489.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06492, over 3791535.98 frames. ], batch size: 64, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:42:18,798 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 18:42:23,559 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 18:42:48,366 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 18:43:04,462 INFO [train.py:903] (2/4) Epoch 21, batch 1950, loss[loss=0.2202, simple_loss=0.2993, pruned_loss=0.07052, over 19581.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2888, pruned_loss=0.06488, over 3801572.08 frames. ], batch size: 61, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:43:27,759 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 4.783e+02 6.007e+02 7.405e+02 3.008e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 18:43:49,770 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2464, 2.1380, 2.0458, 1.8503, 1.6610, 1.8123, 0.5287, 1.2228], + device='cuda:2'), covar=tensor([0.0607, 0.0579, 0.0416, 0.0724, 0.1091, 0.0874, 0.1285, 0.1010], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0354, 0.0360, 0.0383, 0.0461, 0.0389, 0.0336, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:44:06,917 INFO [train.py:903] (2/4) Epoch 21, batch 2000, loss[loss=0.2283, simple_loss=0.3122, pruned_loss=0.07223, over 19663.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.0647, over 3800679.32 frames. ], batch size: 58, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:44:09,477 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9026, 4.4776, 2.6378, 3.9660, 1.1213, 4.4166, 4.2749, 4.4597], + device='cuda:2'), covar=tensor([0.0557, 0.0924, 0.2134, 0.0837, 0.3854, 0.0701, 0.0837, 0.0980], + device='cuda:2'), in_proj_covar=tensor([0.0497, 0.0407, 0.0493, 0.0344, 0.0402, 0.0429, 0.0421, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:45:03,879 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 18:45:08,462 INFO [train.py:903] (2/4) Epoch 21, batch 2050, loss[loss=0.2748, simple_loss=0.3356, pruned_loss=0.1069, over 13551.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.06478, over 3790099.82 frames. ], batch size: 136, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:45:22,122 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 18:45:23,297 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 18:45:28,204 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138625.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:32,726 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:33,541 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.887e+02 4.818e+02 6.151e+02 8.119e+02 1.355e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 18:45:38,225 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:45,240 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 18:45:59,992 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:03,349 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6108, 4.2473, 2.7685, 3.7599, 1.3583, 4.1130, 3.9925, 4.1703], + device='cuda:2'), covar=tensor([0.0627, 0.0896, 0.1907, 0.0769, 0.3462, 0.0684, 0.0884, 0.1147], + device='cuda:2'), in_proj_covar=tensor([0.0493, 0.0404, 0.0487, 0.0342, 0.0398, 0.0425, 0.0417, 0.0454], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:46:03,528 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:12,219 INFO [train.py:903] (2/4) Epoch 21, batch 2100, loss[loss=0.2225, simple_loss=0.2982, pruned_loss=0.07337, over 19687.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2894, pruned_loss=0.06497, over 3795369.70 frames. ], batch size: 53, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:46:29,068 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2020, 1.2709, 1.2256, 1.0168, 1.0847, 1.0429, 0.0805, 0.3491], + device='cuda:2'), covar=tensor([0.0662, 0.0643, 0.0419, 0.0570, 0.1210, 0.0661, 0.1304, 0.1101], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0353, 0.0359, 0.0382, 0.0460, 0.0388, 0.0335, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:46:33,636 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9545, 1.4354, 1.6926, 1.9096, 4.4799, 1.1932, 2.3678, 4.8112], + device='cuda:2'), covar=tensor([0.0403, 0.2938, 0.3100, 0.1789, 0.0708, 0.2743, 0.1646, 0.0177], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0365, 0.0384, 0.0346, 0.0373, 0.0349, 0.0376, 0.0401], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:46:39,334 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 18:46:41,748 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:00,996 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138699.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:02,041 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 18:47:06,963 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:14,538 INFO [train.py:903] (2/4) Epoch 21, batch 2150, loss[loss=0.2135, simple_loss=0.294, pruned_loss=0.06649, over 18806.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2895, pruned_loss=0.06518, over 3804279.12 frames. ], batch size: 74, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:47:21,574 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:26,524 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0327, 2.1151, 2.3297, 2.8163, 2.1542, 2.7348, 2.3824, 2.1217], + device='cuda:2'), covar=tensor([0.4159, 0.3867, 0.1812, 0.2263, 0.4030, 0.1955, 0.4727, 0.3306], + device='cuda:2'), in_proj_covar=tensor([0.0883, 0.0946, 0.0707, 0.0924, 0.0865, 0.0797, 0.0827, 0.0772], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 18:47:37,630 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.033e+02 5.895e+02 7.227e+02 1.459e+03, threshold=1.179e+03, percent-clipped=3.0 +2023-04-02 18:48:00,951 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:48:18,134 INFO [train.py:903] (2/4) Epoch 21, batch 2200, loss[loss=0.2736, simple_loss=0.3477, pruned_loss=0.09974, over 17287.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2891, pruned_loss=0.06515, over 3803839.10 frames. ], batch size: 101, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:49:08,074 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:49:20,491 INFO [train.py:903] (2/4) Epoch 21, batch 2250, loss[loss=0.2737, simple_loss=0.3503, pruned_loss=0.09854, over 18793.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06538, over 3796061.44 frames. ], batch size: 74, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:49:44,479 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.352e+02 5.230e+02 6.732e+02 8.271e+02 2.316e+03, threshold=1.346e+03, percent-clipped=7.0 +2023-04-02 18:50:23,822 INFO [train.py:903] (2/4) Epoch 21, batch 2300, loss[loss=0.2456, simple_loss=0.3167, pruned_loss=0.08727, over 13393.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06538, over 3797257.35 frames. ], batch size: 136, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:50:38,381 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 18:51:27,174 INFO [train.py:903] (2/4) Epoch 21, batch 2350, loss[loss=0.2014, simple_loss=0.2801, pruned_loss=0.06134, over 19851.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2892, pruned_loss=0.0649, over 3801764.14 frames. ], batch size: 52, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:51:48,951 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:51:50,720 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.807e+02 6.646e+02 8.268e+02 1.737e+03, threshold=1.329e+03, percent-clipped=3.0 +2023-04-02 18:52:08,313 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 18:52:24,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 18:52:31,431 INFO [train.py:903] (2/4) Epoch 21, batch 2400, loss[loss=0.2466, simple_loss=0.3186, pruned_loss=0.08736, over 19849.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.29, pruned_loss=0.06534, over 3792599.04 frames. ], batch size: 52, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:25,691 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0311, 2.1116, 1.6190, 1.9951, 1.9963, 1.4531, 1.5899, 1.8194], + device='cuda:2'), covar=tensor([0.1247, 0.1757, 0.2026, 0.1327, 0.1628, 0.1168, 0.2016, 0.1201], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0354, 0.0308, 0.0248, 0.0298, 0.0249, 0.0307, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:53:26,783 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139003.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:34,421 INFO [train.py:903] (2/4) Epoch 21, batch 2450, loss[loss=0.1718, simple_loss=0.2554, pruned_loss=0.04412, over 19735.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2901, pruned_loss=0.06508, over 3787420.35 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:59,175 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:59,945 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.027e+02 6.383e+02 8.090e+02 1.476e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 18:54:17,225 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:22,820 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:31,522 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139055.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:38,302 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:39,183 INFO [train.py:903] (2/4) Epoch 21, batch 2500, loss[loss=0.2338, simple_loss=0.3217, pruned_loss=0.07295, over 19329.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2895, pruned_loss=0.06447, over 3799563.68 frames. ], batch size: 70, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:55:05,339 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:55:35,343 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9600, 1.6967, 1.5757, 1.8557, 1.5286, 1.5561, 1.5215, 1.7524], + device='cuda:2'), covar=tensor([0.1036, 0.1465, 0.1523, 0.1180, 0.1444, 0.0585, 0.1501, 0.0801], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0353, 0.0307, 0.0247, 0.0296, 0.0248, 0.0306, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 18:55:42,982 INFO [train.py:903] (2/4) Epoch 21, batch 2550, loss[loss=0.1867, simple_loss=0.2769, pruned_loss=0.04831, over 19663.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2896, pruned_loss=0.06471, over 3798381.74 frames. ], batch size: 60, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:56:06,420 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 4.926e+02 6.170e+02 7.459e+02 2.294e+03, threshold=1.234e+03, percent-clipped=3.0 +2023-04-02 18:56:35,479 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 18:56:43,492 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:56:45,330 INFO [train.py:903] (2/4) Epoch 21, batch 2600, loss[loss=0.2214, simple_loss=0.2975, pruned_loss=0.0726, over 19560.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06505, over 3809764.25 frames. ], batch size: 52, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:56:49,244 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:02,175 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:47,158 INFO [train.py:903] (2/4) Epoch 21, batch 2650, loss[loss=0.2227, simple_loss=0.3009, pruned_loss=0.07228, over 19530.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2879, pruned_loss=0.06443, over 3825921.57 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:58:08,387 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 18:58:13,098 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 4.962e+02 5.879e+02 7.846e+02 2.263e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 18:58:29,431 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6554, 1.5544, 1.5524, 2.0688, 1.6620, 2.0391, 2.1364, 1.8341], + device='cuda:2'), covar=tensor([0.0846, 0.0926, 0.1006, 0.0852, 0.0843, 0.0731, 0.0768, 0.0683], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0225, 0.0241, 0.0225, 0.0211, 0.0187, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 18:58:39,743 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3280, 1.4103, 1.6078, 1.5436, 2.4394, 2.1424, 2.6542, 1.1114], + device='cuda:2'), covar=tensor([0.2445, 0.4253, 0.2724, 0.2002, 0.1505, 0.2093, 0.1274, 0.4451], + device='cuda:2'), in_proj_covar=tensor([0.0529, 0.0638, 0.0705, 0.0482, 0.0619, 0.0525, 0.0659, 0.0546], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 18:58:45,377 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:58:49,503 INFO [train.py:903] (2/4) Epoch 21, batch 2700, loss[loss=0.213, simple_loss=0.2868, pruned_loss=0.06962, over 19718.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2873, pruned_loss=0.06404, over 3818398.17 frames. ], batch size: 51, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 18:59:04,240 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139271.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:33,188 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139294.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:49,948 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:52,985 INFO [train.py:903] (2/4) Epoch 21, batch 2750, loss[loss=0.204, simple_loss=0.2794, pruned_loss=0.06433, over 19676.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2864, pruned_loss=0.0638, over 3825059.82 frames. ], batch size: 53, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 19:00:18,059 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.705e+02 5.796e+02 7.407e+02 1.811e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-02 19:00:55,535 INFO [train.py:903] (2/4) Epoch 21, batch 2800, loss[loss=0.185, simple_loss=0.2706, pruned_loss=0.04968, over 19540.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2871, pruned_loss=0.06461, over 3813306.25 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:01:28,018 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139386.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:01:58,449 INFO [train.py:903] (2/4) Epoch 21, batch 2850, loss[loss=0.1979, simple_loss=0.2809, pruned_loss=0.05743, over 19758.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2872, pruned_loss=0.06436, over 3822232.39 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:02:03,742 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:09,489 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139419.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:10,594 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5880, 4.1493, 4.2980, 4.3153, 1.7780, 4.0610, 3.5419, 4.0259], + device='cuda:2'), covar=tensor([0.1585, 0.0797, 0.0636, 0.0647, 0.5464, 0.0820, 0.0700, 0.1144], + device='cuda:2'), in_proj_covar=tensor([0.0778, 0.0730, 0.0942, 0.0820, 0.0826, 0.0693, 0.0568, 0.0870], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 19:02:23,967 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.945e+02 5.282e+02 6.184e+02 7.725e+02 1.552e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 19:02:24,375 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139430.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:35,344 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:42,185 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139444.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:54,861 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:57,821 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 19:03:00,109 INFO [train.py:903] (2/4) Epoch 21, batch 2900, loss[loss=0.2037, simple_loss=0.2902, pruned_loss=0.05863, over 19689.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2891, pruned_loss=0.0653, over 3820607.05 frames. ], batch size: 59, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:04:04,512 INFO [train.py:903] (2/4) Epoch 21, batch 2950, loss[loss=0.2419, simple_loss=0.3159, pruned_loss=0.08395, over 13981.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2887, pruned_loss=0.06487, over 3816926.23 frames. ], batch size: 137, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:04:28,834 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.684e+02 5.947e+02 7.442e+02 1.403e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-02 19:04:42,961 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0105, 0.9452, 1.1696, 1.4446, 2.3849, 1.2112, 2.2678, 2.8153], + device='cuda:2'), covar=tensor([0.0791, 0.3811, 0.3781, 0.2205, 0.1242, 0.2688, 0.1256, 0.0530], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0362, 0.0381, 0.0343, 0.0369, 0.0347, 0.0373, 0.0399], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:04:49,990 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8812, 1.3335, 1.0887, 0.9805, 1.1487, 1.0059, 0.8733, 1.2468], + device='cuda:2'), covar=tensor([0.0631, 0.0815, 0.1156, 0.0733, 0.0576, 0.1269, 0.0662, 0.0481], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0311, 0.0334, 0.0259, 0.0244, 0.0335, 0.0289, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:05:06,920 INFO [train.py:903] (2/4) Epoch 21, batch 3000, loss[loss=0.2262, simple_loss=0.3006, pruned_loss=0.07595, over 19585.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.066, over 3797128.63 frames. ], batch size: 52, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:05:06,920 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 19:05:14,392 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2602, 1.3121, 1.5398, 1.6770, 1.2029, 1.5770, 1.5188, 1.3650], + device='cuda:2'), covar=tensor([0.3264, 0.3571, 0.1587, 0.1956, 0.3638, 0.1840, 0.3800, 0.2745], + device='cuda:2'), in_proj_covar=tensor([0.0890, 0.0953, 0.0710, 0.0929, 0.0869, 0.0803, 0.0834, 0.0775], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 19:05:15,732 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6952, 3.3478, 2.6703, 3.1121, 0.8250, 3.3235, 3.1587, 3.4909], + device='cuda:2'), covar=tensor([0.0837, 0.0874, 0.1741, 0.0978, 0.3876, 0.0904, 0.0926, 0.1262], + device='cuda:2'), in_proj_covar=tensor([0.0495, 0.0407, 0.0488, 0.0342, 0.0398, 0.0424, 0.0419, 0.0455], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:05:20,614 INFO [train.py:937] (2/4) Epoch 21, validation: loss=0.1693, simple_loss=0.2693, pruned_loss=0.03465, over 944034.00 frames. +2023-04-02 19:05:20,614 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 19:05:21,110 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3230, 1.3245, 1.4901, 1.4711, 1.8752, 1.8760, 1.8701, 0.6219], + device='cuda:2'), covar=tensor([0.2348, 0.4167, 0.2622, 0.1899, 0.1481, 0.2167, 0.1301, 0.4546], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0638, 0.0706, 0.0482, 0.0618, 0.0527, 0.0660, 0.0546], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 19:05:24,362 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 19:05:57,548 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:12,382 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:12,513 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:23,819 INFO [train.py:903] (2/4) Epoch 21, batch 3050, loss[loss=0.1945, simple_loss=0.2872, pruned_loss=0.05093, over 19800.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2911, pruned_loss=0.06638, over 3790826.13 frames. ], batch size: 56, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:06:48,011 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.171e+02 6.119e+02 7.965e+02 1.649e+03, threshold=1.224e+03, percent-clipped=3.0 +2023-04-02 19:06:57,082 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139638.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:03,647 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:08,081 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2614, 1.3166, 1.7746, 1.2907, 2.7812, 3.7455, 3.4308, 3.8580], + device='cuda:2'), covar=tensor([0.1579, 0.3823, 0.3214, 0.2407, 0.0562, 0.0163, 0.0193, 0.0248], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0319, 0.0349, 0.0263, 0.0241, 0.0185, 0.0216, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 19:07:14,493 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:24,504 INFO [train.py:903] (2/4) Epoch 21, batch 3100, loss[loss=0.2221, simple_loss=0.3044, pruned_loss=0.06994, over 19604.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2907, pruned_loss=0.06591, over 3797934.99 frames. ], batch size: 61, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:07:34,122 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139667.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:08:25,943 INFO [train.py:903] (2/4) Epoch 21, batch 3150, loss[loss=0.2369, simple_loss=0.3157, pruned_loss=0.07907, over 19571.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.29, pruned_loss=0.06575, over 3806675.66 frames. ], batch size: 61, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:08:32,315 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:08:51,310 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.084e+02 6.643e+02 9.166e+02 2.493e+03, threshold=1.329e+03, percent-clipped=12.0 +2023-04-02 19:08:52,499 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 19:09:19,214 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139753.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:09:28,038 INFO [train.py:903] (2/4) Epoch 21, batch 3200, loss[loss=0.2339, simple_loss=0.312, pruned_loss=0.07789, over 19763.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06601, over 3810634.53 frames. ], batch size: 63, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:09:36,896 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:01,994 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:13,286 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5747, 1.5515, 1.9012, 1.8454, 3.1552, 4.8033, 4.6593, 5.2470], + device='cuda:2'), covar=tensor([0.1510, 0.3628, 0.3274, 0.2115, 0.0578, 0.0194, 0.0164, 0.0154], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0322, 0.0352, 0.0265, 0.0243, 0.0186, 0.0217, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 19:10:27,865 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:30,923 INFO [train.py:903] (2/4) Epoch 21, batch 3250, loss[loss=0.2113, simple_loss=0.2937, pruned_loss=0.06445, over 19698.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2892, pruned_loss=0.06556, over 3809473.50 frames. ], batch size: 59, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:10:46,099 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139822.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:48,426 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:55,283 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.059e+02 5.030e+02 6.558e+02 8.674e+02 2.471e+03, threshold=1.312e+03, percent-clipped=9.0 +2023-04-02 19:11:32,326 INFO [train.py:903] (2/4) Epoch 21, batch 3300, loss[loss=0.2491, simple_loss=0.3329, pruned_loss=0.08265, over 19315.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2887, pruned_loss=0.06563, over 3804370.41 frames. ], batch size: 66, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:11:35,811 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 19:11:43,160 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:12:34,958 INFO [train.py:903] (2/4) Epoch 21, batch 3350, loss[loss=0.222, simple_loss=0.3076, pruned_loss=0.06814, over 19513.00 frames. ], tot_loss[loss=0.21, simple_loss=0.289, pruned_loss=0.06553, over 3814322.25 frames. ], batch size: 64, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:12:43,071 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1996, 1.3044, 1.7076, 1.3785, 2.6647, 3.8376, 3.5393, 3.9711], + device='cuda:2'), covar=tensor([0.1641, 0.3694, 0.3307, 0.2312, 0.0610, 0.0186, 0.0202, 0.0268], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0321, 0.0350, 0.0263, 0.0242, 0.0185, 0.0216, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 19:13:00,884 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.734e+02 5.660e+02 7.256e+02 1.171e+03, threshold=1.132e+03, percent-clipped=0.0 +2023-04-02 19:13:04,630 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139933.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:17,288 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:37,533 INFO [train.py:903] (2/4) Epoch 21, batch 3400, loss[loss=0.2184, simple_loss=0.306, pruned_loss=0.06539, over 19763.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2884, pruned_loss=0.06537, over 3822663.58 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:13:42,149 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7801, 1.2296, 1.5710, 1.4834, 3.3343, 1.1457, 2.4449, 3.7582], + device='cuda:2'), covar=tensor([0.0458, 0.2911, 0.2868, 0.1977, 0.0682, 0.2534, 0.1312, 0.0223], + device='cuda:2'), in_proj_covar=tensor([0.0407, 0.0363, 0.0382, 0.0343, 0.0369, 0.0346, 0.0373, 0.0398], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:13:52,384 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:20,972 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9275, 1.8113, 1.5766, 1.9964, 1.6391, 1.5930, 1.5195, 1.7803], + device='cuda:2'), covar=tensor([0.1102, 0.1347, 0.1525, 0.0973, 0.1355, 0.0634, 0.1586, 0.0802], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0356, 0.0309, 0.0249, 0.0299, 0.0250, 0.0309, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:14:22,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:41,307 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140009.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:42,107 INFO [train.py:903] (2/4) Epoch 21, batch 3450, loss[loss=0.2093, simple_loss=0.2948, pruned_loss=0.06189, over 19530.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06631, over 3821637.89 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:14:43,247 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 19:14:46,246 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-02 19:14:56,556 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:06,287 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.068e+02 6.577e+02 8.644e+02 2.362e+03, threshold=1.315e+03, percent-clipped=9.0 +2023-04-02 19:15:11,059 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140034.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:27,221 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:28,338 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:41,637 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:42,379 INFO [train.py:903] (2/4) Epoch 21, batch 3500, loss[loss=0.2462, simple_loss=0.3201, pruned_loss=0.08613, over 19460.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2904, pruned_loss=0.06629, over 3827866.49 frames. ], batch size: 64, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:45,564 INFO [train.py:903] (2/4) Epoch 21, batch 3550, loss[loss=0.1832, simple_loss=0.2594, pruned_loss=0.05348, over 19294.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.29, pruned_loss=0.06606, over 3822665.85 frames. ], batch size: 44, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:59,759 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:08,461 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:10,318 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.095e+02 6.549e+02 8.089e+02 2.006e+03, threshold=1.310e+03, percent-clipped=2.0 +2023-04-02 19:17:12,581 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140131.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:34,939 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8957, 4.3574, 4.6220, 4.6093, 1.6948, 4.3643, 3.7366, 4.3387], + device='cuda:2'), covar=tensor([0.1621, 0.0796, 0.0550, 0.0631, 0.5772, 0.0873, 0.0629, 0.1054], + device='cuda:2'), in_proj_covar=tensor([0.0774, 0.0732, 0.0935, 0.0822, 0.0824, 0.0694, 0.0567, 0.0870], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 19:17:36,889 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140151.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:36,954 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.9387, 5.3683, 3.1587, 4.6626, 1.0747, 5.4234, 5.3528, 5.4367], + device='cuda:2'), covar=tensor([0.0385, 0.0742, 0.1691, 0.0720, 0.3953, 0.0565, 0.0738, 0.0970], + device='cuda:2'), in_proj_covar=tensor([0.0501, 0.0412, 0.0493, 0.0348, 0.0404, 0.0431, 0.0425, 0.0462], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:17:47,030 INFO [train.py:903] (2/4) Epoch 21, batch 3600, loss[loss=0.2289, simple_loss=0.3092, pruned_loss=0.07429, over 19670.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2901, pruned_loss=0.06591, over 3830457.41 frames. ], batch size: 60, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:17:56,434 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140166.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:56,705 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2816, 2.0091, 1.5323, 1.3818, 1.8497, 1.1755, 1.3599, 1.7425], + device='cuda:2'), covar=tensor([0.0931, 0.0738, 0.1110, 0.0805, 0.0497, 0.1312, 0.0586, 0.0438], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0314, 0.0337, 0.0262, 0.0246, 0.0336, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:17:58,702 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:18:34,571 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5116, 2.5954, 2.1274, 2.5402, 2.4662, 2.0534, 1.9316, 2.4592], + device='cuda:2'), covar=tensor([0.1012, 0.1390, 0.1439, 0.1148, 0.1285, 0.0535, 0.1525, 0.0679], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0360, 0.0311, 0.0251, 0.0301, 0.0252, 0.0310, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:18:51,572 INFO [train.py:903] (2/4) Epoch 21, batch 3650, loss[loss=0.223, simple_loss=0.3049, pruned_loss=0.07056, over 19695.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.0651, over 3832174.24 frames. ], batch size: 59, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:18:54,131 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:15,567 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.123e+02 6.079e+02 7.474e+02 1.635e+03, threshold=1.216e+03, percent-clipped=1.0 +2023-04-02 19:19:37,403 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140246.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:54,181 INFO [train.py:903] (2/4) Epoch 21, batch 3700, loss[loss=0.1961, simple_loss=0.2799, pruned_loss=0.05618, over 19683.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2878, pruned_loss=0.06455, over 3845253.82 frames. ], batch size: 53, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:20:01,705 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140266.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:15,858 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:20,531 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:22,755 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:24,910 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0423, 4.4149, 4.7335, 4.7198, 1.8124, 4.3911, 3.8277, 4.4063], + device='cuda:2'), covar=tensor([0.1563, 0.0832, 0.0538, 0.0652, 0.5738, 0.0892, 0.0641, 0.1105], + device='cuda:2'), in_proj_covar=tensor([0.0765, 0.0723, 0.0923, 0.0811, 0.0812, 0.0687, 0.0560, 0.0859], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 19:20:49,156 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140304.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:55,390 INFO [train.py:903] (2/4) Epoch 21, batch 3750, loss[loss=0.2533, simple_loss=0.3181, pruned_loss=0.09424, over 19680.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2877, pruned_loss=0.06448, over 3837351.73 frames. ], batch size: 60, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:21:02,643 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:17,850 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:20,287 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140329.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:22,190 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.949e+02 4.554e+02 6.128e+02 7.118e+02 1.255e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-02 19:21:33,835 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:40,762 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9939, 1.2434, 1.5717, 0.9572, 2.2678, 3.0025, 2.7228, 3.2037], + device='cuda:2'), covar=tensor([0.1743, 0.3922, 0.3540, 0.2739, 0.0662, 0.0232, 0.0267, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0320, 0.0348, 0.0262, 0.0242, 0.0184, 0.0215, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 19:21:57,849 INFO [train.py:903] (2/4) Epoch 21, batch 3800, loss[loss=0.1863, simple_loss=0.2623, pruned_loss=0.05517, over 19820.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2891, pruned_loss=0.06523, over 3819924.18 frames. ], batch size: 48, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:22:30,441 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 19:22:54,682 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:23:01,171 INFO [train.py:903] (2/4) Epoch 21, batch 3850, loss[loss=0.2226, simple_loss=0.3192, pruned_loss=0.06302, over 19667.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2897, pruned_loss=0.06546, over 3818945.87 frames. ], batch size: 55, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:23:02,824 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3992, 1.4046, 1.6113, 1.5902, 2.2679, 2.1652, 2.3594, 0.8117], + device='cuda:2'), covar=tensor([0.2309, 0.4168, 0.2546, 0.1761, 0.1422, 0.1934, 0.1304, 0.4299], + device='cuda:2'), in_proj_covar=tensor([0.0531, 0.0639, 0.0705, 0.0483, 0.0616, 0.0528, 0.0660, 0.0545], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 19:23:25,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 5.035e+02 6.153e+02 7.922e+02 1.662e+03, threshold=1.231e+03, percent-clipped=3.0 +2023-04-02 19:23:33,491 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 19:24:03,183 INFO [train.py:903] (2/4) Epoch 21, batch 3900, loss[loss=0.2253, simple_loss=0.3133, pruned_loss=0.06867, over 19542.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06568, over 3814743.04 frames. ], batch size: 56, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:24:04,900 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7602, 2.6848, 2.3339, 2.5702, 2.4358, 2.0895, 2.1093, 2.5551], + device='cuda:2'), covar=tensor([0.1035, 0.1700, 0.1618, 0.1158, 0.1656, 0.0709, 0.1697, 0.0751], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0360, 0.0311, 0.0250, 0.0301, 0.0252, 0.0310, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:24:09,105 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:10,664 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9993, 2.0629, 2.2865, 2.5778, 1.9640, 2.4848, 2.3310, 2.1726], + device='cuda:2'), covar=tensor([0.4079, 0.3726, 0.1789, 0.2274, 0.3966, 0.2036, 0.4475, 0.2998], + device='cuda:2'), in_proj_covar=tensor([0.0891, 0.0953, 0.0714, 0.0928, 0.0871, 0.0808, 0.0836, 0.0777], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 19:24:17,255 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:55,673 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140502.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:01,331 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140507.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:04,341 INFO [train.py:903] (2/4) Epoch 21, batch 3950, loss[loss=0.2022, simple_loss=0.2886, pruned_loss=0.05785, over 19594.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06627, over 3789994.31 frames. ], batch size: 57, lr: 3.91e-03, grad_scale: 4.0 +2023-04-02 19:25:10,222 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 19:25:20,317 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140522.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,781 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,878 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:31,859 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.763e+02 5.824e+02 7.544e+02 1.413e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 19:25:40,205 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:42,463 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:51,818 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:07,097 INFO [train.py:903] (2/4) Epoch 21, batch 4000, loss[loss=0.1839, simple_loss=0.2684, pruned_loss=0.04972, over 19582.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2903, pruned_loss=0.066, over 3781105.05 frames. ], batch size: 52, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:26:09,758 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140562.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:13,057 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:33,100 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:36,721 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:41,286 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:55,175 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 19:27:09,329 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:11,210 INFO [train.py:903] (2/4) Epoch 21, batch 4050, loss[loss=0.216, simple_loss=0.2885, pruned_loss=0.07177, over 19418.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2898, pruned_loss=0.06582, over 3766885.67 frames. ], batch size: 48, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:27:25,059 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:36,528 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 5.165e+02 6.710e+02 8.332e+02 1.443e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-02 19:27:37,937 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0924, 1.9466, 1.7691, 2.0374, 1.7766, 1.7544, 1.6241, 1.9546], + device='cuda:2'), covar=tensor([0.1036, 0.1418, 0.1442, 0.1016, 0.1410, 0.0576, 0.1525, 0.0735], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0359, 0.0311, 0.0249, 0.0300, 0.0252, 0.0310, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:28:13,404 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 19:28:13,819 INFO [train.py:903] (2/4) Epoch 21, batch 4100, loss[loss=0.251, simple_loss=0.3302, pruned_loss=0.08593, over 19581.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06623, over 3767495.93 frames. ], batch size: 61, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:28:52,815 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 19:29:08,488 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.79 vs. limit=5.0 +2023-04-02 19:29:15,740 INFO [train.py:903] (2/4) Epoch 21, batch 4150, loss[loss=0.2269, simple_loss=0.3091, pruned_loss=0.07235, over 18768.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.29, pruned_loss=0.0653, over 3783792.09 frames. ], batch size: 74, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:29:42,680 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.318e+02 6.390e+02 7.957e+02 1.686e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 19:29:49,985 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:29:55,913 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140741.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:05,106 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:17,538 INFO [train.py:903] (2/4) Epoch 21, batch 4200, loss[loss=0.224, simple_loss=0.308, pruned_loss=0.07001, over 19308.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2904, pruned_loss=0.06518, over 3800282.73 frames. ], batch size: 70, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:30:24,373 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 19:31:21,341 INFO [train.py:903] (2/4) Epoch 21, batch 4250, loss[loss=0.1871, simple_loss=0.262, pruned_loss=0.05609, over 19769.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2901, pruned_loss=0.06503, over 3804177.58 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:31:40,131 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 19:31:42,804 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:31:47,367 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 5.291e+02 6.401e+02 8.566e+02 1.506e+03, threshold=1.280e+03, percent-clipped=6.0 +2023-04-02 19:31:50,827 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 19:31:53,739 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:01,546 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:13,493 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:24,304 INFO [train.py:903] (2/4) Epoch 21, batch 4300, loss[loss=0.1908, simple_loss=0.2788, pruned_loss=0.05144, over 18076.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2906, pruned_loss=0.06527, over 3801119.41 frames. ], batch size: 83, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:32:25,882 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:29,400 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:33,873 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:37,167 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:33:18,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 19:33:24,433 INFO [train.py:903] (2/4) Epoch 21, batch 4350, loss[loss=0.2013, simple_loss=0.2925, pruned_loss=0.05502, over 19619.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2898, pruned_loss=0.06572, over 3803380.24 frames. ], batch size: 57, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:33:51,509 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.814e+02 5.846e+02 6.905e+02 1.613e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 19:33:57,247 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:25,325 INFO [train.py:903] (2/4) Epoch 21, batch 4400, loss[loss=0.1802, simple_loss=0.2618, pruned_loss=0.04932, over 19775.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2892, pruned_loss=0.06603, over 3815187.08 frames. ], batch size: 48, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:34:32,100 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:33,406 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140966.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:53,839 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 19:34:58,750 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:02,872 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 19:35:05,716 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:23,744 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 19:35:27,475 INFO [train.py:903] (2/4) Epoch 21, batch 4450, loss[loss=0.1936, simple_loss=0.2832, pruned_loss=0.05201, over 19676.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.29, pruned_loss=0.0666, over 3807240.96 frames. ], batch size: 59, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:35:37,880 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141017.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:54,627 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.226e+02 6.450e+02 8.222e+02 2.218e+03, threshold=1.290e+03, percent-clipped=9.0 +2023-04-02 19:36:32,488 INFO [train.py:903] (2/4) Epoch 21, batch 4500, loss[loss=0.1715, simple_loss=0.2483, pruned_loss=0.04735, over 19753.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.289, pruned_loss=0.06588, over 3815211.84 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:36:38,409 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8170, 3.4810, 2.4643, 3.0989, 1.0158, 3.4054, 3.2794, 3.3983], + device='cuda:2'), covar=tensor([0.0918, 0.1067, 0.1932, 0.0956, 0.3615, 0.0872, 0.1030, 0.1253], + device='cuda:2'), in_proj_covar=tensor([0.0494, 0.0404, 0.0484, 0.0342, 0.0394, 0.0425, 0.0417, 0.0453], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:37:01,373 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141085.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:34,668 INFO [train.py:903] (2/4) Epoch 21, batch 4550, loss[loss=0.1913, simple_loss=0.2676, pruned_loss=0.05745, over 19406.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2887, pruned_loss=0.06574, over 3827561.18 frames. ], batch size: 48, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:37:37,510 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3499, 2.3860, 2.5929, 3.1158, 2.2983, 2.9620, 2.5702, 2.3230], + device='cuda:2'), covar=tensor([0.4134, 0.3740, 0.1779, 0.2414, 0.4342, 0.2015, 0.4620, 0.3216], + device='cuda:2'), in_proj_covar=tensor([0.0891, 0.0957, 0.0713, 0.0930, 0.0872, 0.0809, 0.0838, 0.0777], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 19:37:46,076 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 19:37:46,503 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:59,728 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.292e+02 5.246e+02 6.195e+02 7.478e+02 1.454e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-04-02 19:38:10,835 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 19:38:18,676 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:35,234 INFO [train.py:903] (2/4) Epoch 21, batch 4600, loss[loss=0.2365, simple_loss=0.317, pruned_loss=0.07799, over 17118.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2895, pruned_loss=0.06617, over 3810246.90 frames. ], batch size: 101, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:38:36,699 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:49,781 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:25,186 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141200.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:36,153 INFO [train.py:903] (2/4) Epoch 21, batch 4650, loss[loss=0.2223, simple_loss=0.2965, pruned_loss=0.0741, over 19608.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2891, pruned_loss=0.06591, over 3815758.83 frames. ], batch size: 50, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:39:52,969 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141222.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:56,079 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 19:40:02,331 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 4.513e+02 5.853e+02 7.712e+02 1.984e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-02 19:40:05,981 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 19:40:15,464 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:21,219 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:37,896 INFO [train.py:903] (2/4) Epoch 21, batch 4700, loss[loss=0.2254, simple_loss=0.2967, pruned_loss=0.07712, over 19588.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2881, pruned_loss=0.06543, over 3806370.26 frames. ], batch size: 52, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:40:47,249 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:00,528 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 19:41:00,656 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:08,825 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:38,493 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141309.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:39,488 INFO [train.py:903] (2/4) Epoch 21, batch 4750, loss[loss=0.246, simple_loss=0.3237, pruned_loss=0.08416, over 19696.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2888, pruned_loss=0.06546, over 3802157.49 frames. ], batch size: 59, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:42:03,070 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.546e+02 5.235e+02 6.308e+02 8.195e+02 2.468e+03, threshold=1.262e+03, percent-clipped=8.0 +2023-04-02 19:42:25,000 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:42:39,819 INFO [train.py:903] (2/4) Epoch 21, batch 4800, loss[loss=0.2779, simple_loss=0.3368, pruned_loss=0.1095, over 13732.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2893, pruned_loss=0.06521, over 3809200.92 frames. ], batch size: 136, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:43:23,162 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:43:41,787 INFO [train.py:903] (2/4) Epoch 21, batch 4850, loss[loss=0.1836, simple_loss=0.256, pruned_loss=0.05562, over 19724.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2884, pruned_loss=0.06534, over 3803679.96 frames. ], batch size: 46, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:01,423 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:04,792 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2750, 2.9922, 2.3354, 2.3470, 2.1968, 2.5586, 1.0597, 2.1837], + device='cuda:2'), covar=tensor([0.0602, 0.0568, 0.0647, 0.1016, 0.1029, 0.1182, 0.1387, 0.1004], + device='cuda:2'), in_proj_covar=tensor([0.0355, 0.0350, 0.0353, 0.0379, 0.0458, 0.0383, 0.0332, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 19:44:07,727 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 19:44:08,842 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.192e+02 6.534e+02 8.985e+02 2.500e+03, threshold=1.307e+03, percent-clipped=12.0 +2023-04-02 19:44:13,645 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141435.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:44:28,282 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 19:44:33,272 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 19:44:34,436 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 19:44:39,545 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:41,986 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4074, 1.4079, 1.5559, 1.5469, 1.7894, 1.9225, 1.7599, 0.5589], + device='cuda:2'), covar=tensor([0.2356, 0.4221, 0.2556, 0.1921, 0.1625, 0.2186, 0.1433, 0.4652], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0640, 0.0708, 0.0482, 0.0615, 0.0530, 0.0659, 0.0547], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 19:44:44,224 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:45,061 INFO [train.py:903] (2/4) Epoch 21, batch 4900, loss[loss=0.2107, simple_loss=0.2939, pruned_loss=0.0638, over 19112.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2891, pruned_loss=0.06551, over 3812550.41 frames. ], batch size: 69, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:47,086 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 19:45:05,751 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 19:45:10,386 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141481.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:35,680 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.41 vs. limit=5.0 +2023-04-02 19:45:40,783 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141505.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:46,102 INFO [train.py:903] (2/4) Epoch 21, batch 4950, loss[loss=0.2224, simple_loss=0.2857, pruned_loss=0.07957, over 19412.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2897, pruned_loss=0.06601, over 3826764.11 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:03,681 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 19:46:10,479 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.116e+02 6.031e+02 7.721e+02 1.403e+03, threshold=1.206e+03, percent-clipped=1.0 +2023-04-02 19:46:26,561 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141542.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:46:28,471 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 19:46:32,396 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0701, 1.7171, 1.9313, 2.8285, 2.1184, 2.4172, 2.4847, 2.1224], + device='cuda:2'), covar=tensor([0.0787, 0.0923, 0.0928, 0.0718, 0.0779, 0.0715, 0.0840, 0.0627], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0222, 0.0225, 0.0240, 0.0224, 0.0211, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 19:46:46,923 INFO [train.py:903] (2/4) Epoch 21, batch 5000, loss[loss=0.2008, simple_loss=0.2947, pruned_loss=0.05346, over 19615.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2883, pruned_loss=0.06501, over 3828869.55 frames. ], batch size: 61, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:49,271 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1518, 2.0801, 1.9478, 2.2284, 2.0143, 1.8125, 1.8970, 2.0669], + device='cuda:2'), covar=tensor([0.0853, 0.1176, 0.1120, 0.0752, 0.1047, 0.0499, 0.1125, 0.0580], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0357, 0.0310, 0.0248, 0.0298, 0.0251, 0.0308, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 19:46:53,583 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 19:46:55,089 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141567.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:47:08,267 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 19:47:48,203 INFO [train.py:903] (2/4) Epoch 21, batch 5050, loss[loss=0.2205, simple_loss=0.3049, pruned_loss=0.06805, over 19659.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2894, pruned_loss=0.06533, over 3832620.19 frames. ], batch size: 55, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:48:03,190 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:16,452 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.325e+02 4.754e+02 5.832e+02 6.826e+02 1.423e+03, threshold=1.166e+03, percent-clipped=2.0 +2023-04-02 19:48:20,700 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 19:48:21,982 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 19:48:25,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 19:48:39,172 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:53,397 INFO [train.py:903] (2/4) Epoch 21, batch 5100, loss[loss=0.2242, simple_loss=0.308, pruned_loss=0.07019, over 19675.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2888, pruned_loss=0.0651, over 3816723.86 frames. ], batch size: 58, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:49:04,593 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 19:49:07,002 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 19:49:11,617 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 19:49:12,037 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141675.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:17,704 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:30,077 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:50,675 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141705.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:56,352 INFO [train.py:903] (2/4) Epoch 21, batch 5150, loss[loss=0.2427, simple_loss=0.3183, pruned_loss=0.08353, over 19416.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06602, over 3805593.66 frames. ], batch size: 70, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:50:09,420 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 19:50:20,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 4.996e+02 6.424e+02 8.131e+02 1.633e+03, threshold=1.285e+03, percent-clipped=6.0 +2023-04-02 19:50:33,486 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3615, 1.4393, 1.7268, 1.5593, 2.7084, 2.2304, 2.9223, 1.1988], + device='cuda:2'), covar=tensor([0.2650, 0.4532, 0.2930, 0.2118, 0.1525, 0.2278, 0.1426, 0.4650], + device='cuda:2'), in_proj_covar=tensor([0.0533, 0.0641, 0.0711, 0.0485, 0.0619, 0.0533, 0.0662, 0.0550], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 19:50:46,528 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 19:50:58,082 INFO [train.py:903] (2/4) Epoch 21, batch 5200, loss[loss=0.2027, simple_loss=0.2902, pruned_loss=0.05758, over 18725.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.289, pruned_loss=0.06533, over 3806444.76 frames. ], batch size: 74, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:51:14,043 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 19:51:22,169 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141779.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:51:51,429 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141803.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:54,959 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:58,266 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 19:51:59,437 INFO [train.py:903] (2/4) Epoch 21, batch 5250, loss[loss=0.2504, simple_loss=0.3124, pruned_loss=0.09421, over 13380.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2893, pruned_loss=0.06525, over 3802481.39 frames. ], batch size: 135, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:52:28,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 4.884e+02 6.185e+02 7.574e+02 1.457e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 19:53:02,560 INFO [train.py:903] (2/4) Epoch 21, batch 5300, loss[loss=0.2402, simple_loss=0.3095, pruned_loss=0.08548, over 19644.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2905, pruned_loss=0.06632, over 3797358.69 frames. ], batch size: 58, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:53:23,409 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:53:24,174 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 19:53:36,402 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1059, 1.9694, 1.8340, 1.7142, 1.5177, 1.7406, 0.4826, 1.0749], + device='cuda:2'), covar=tensor([0.0582, 0.0591, 0.0437, 0.0717, 0.1147, 0.0854, 0.1271, 0.1002], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0352, 0.0355, 0.0380, 0.0458, 0.0384, 0.0333, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 19:53:45,703 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141894.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 19:53:55,942 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:06,887 INFO [train.py:903] (2/4) Epoch 21, batch 5350, loss[loss=0.2419, simple_loss=0.3262, pruned_loss=0.0788, over 19635.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06564, over 3793213.29 frames. ], batch size: 57, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:54:16,544 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:32,496 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.997e+02 5.942e+02 7.628e+02 1.099e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-02 19:54:35,114 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141934.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:43,717 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 19:54:51,483 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:55:07,460 INFO [train.py:903] (2/4) Epoch 21, batch 5400, loss[loss=0.1739, simple_loss=0.2542, pruned_loss=0.04685, over 19739.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06598, over 3804929.96 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:56:10,558 INFO [train.py:903] (2/4) Epoch 21, batch 5450, loss[loss=0.2457, simple_loss=0.319, pruned_loss=0.0862, over 19678.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2906, pruned_loss=0.06591, over 3815342.19 frames. ], batch size: 60, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:56:39,853 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 5.354e+02 6.392e+02 8.231e+02 1.329e+03, threshold=1.278e+03, percent-clipped=1.0 +2023-04-02 19:57:14,043 INFO [train.py:903] (2/4) Epoch 21, batch 5500, loss[loss=0.195, simple_loss=0.2737, pruned_loss=0.0581, over 19762.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2911, pruned_loss=0.06614, over 3795303.51 frames. ], batch size: 54, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:57:18,015 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:57:41,998 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 19:57:48,312 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:58:17,745 INFO [train.py:903] (2/4) Epoch 21, batch 5550, loss[loss=0.1812, simple_loss=0.2558, pruned_loss=0.05334, over 19763.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2895, pruned_loss=0.06532, over 3800243.09 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:58:26,200 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 19:58:43,634 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.904e+02 5.778e+02 7.569e+02 2.193e+03, threshold=1.156e+03, percent-clipped=4.0 +2023-04-02 19:59:07,795 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142150.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 19:59:15,742 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 19:59:19,356 INFO [train.py:903] (2/4) Epoch 21, batch 5600, loss[loss=0.2358, simple_loss=0.3142, pruned_loss=0.07866, over 19574.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2899, pruned_loss=0.06582, over 3803428.76 frames. ], batch size: 61, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:59:36,299 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:59:37,410 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142175.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:00:08,598 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142199.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:00:20,408 INFO [train.py:903] (2/4) Epoch 21, batch 5650, loss[loss=0.213, simple_loss=0.2885, pruned_loss=0.06872, over 19743.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2897, pruned_loss=0.06583, over 3800864.52 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:00:36,107 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5275, 2.2115, 1.6939, 1.3814, 2.0662, 1.2034, 1.4264, 1.9566], + device='cuda:2'), covar=tensor([0.1187, 0.0893, 0.1184, 0.1029, 0.0613, 0.1483, 0.0814, 0.0526], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0315, 0.0338, 0.0263, 0.0246, 0.0338, 0.0291, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:00:49,291 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 4.896e+02 5.835e+02 7.915e+02 1.772e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-02 20:01:10,302 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 20:01:22,627 INFO [train.py:903] (2/4) Epoch 21, batch 5700, loss[loss=0.1792, simple_loss=0.2498, pruned_loss=0.05424, over 19757.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2891, pruned_loss=0.06559, over 3810276.32 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:01:45,770 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:01:59,748 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:02:26,052 INFO [train.py:903] (2/4) Epoch 21, batch 5750, loss[loss=0.1862, simple_loss=0.2547, pruned_loss=0.0588, over 19319.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.0665, over 3816437.53 frames. ], batch size: 44, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:02:28,334 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 20:02:36,336 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 20:02:41,012 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 20:02:51,479 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.170e+02 6.700e+02 8.460e+02 1.665e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-02 20:03:26,598 INFO [train.py:903] (2/4) Epoch 21, batch 5800, loss[loss=0.2504, simple_loss=0.3132, pruned_loss=0.09382, over 13345.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2909, pruned_loss=0.06651, over 3806856.91 frames. ], batch size: 136, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:03:59,389 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 20:04:08,905 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:22,760 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:28,049 INFO [train.py:903] (2/4) Epoch 21, batch 5850, loss[loss=0.1939, simple_loss=0.2756, pruned_loss=0.05612, over 18223.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2902, pruned_loss=0.06601, over 3808933.21 frames. ], batch size: 83, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:04:34,184 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2362, 1.9540, 1.9956, 2.8670, 1.9384, 2.4841, 2.5449, 2.2416], + device='cuda:2'), covar=tensor([0.0758, 0.0862, 0.0952, 0.0734, 0.0873, 0.0694, 0.0790, 0.0639], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0226, 0.0239, 0.0224, 0.0210, 0.0186, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 20:04:46,794 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2298, 1.3076, 1.2491, 1.1107, 1.0395, 1.0961, 0.1837, 0.3719], + device='cuda:2'), covar=tensor([0.0701, 0.0612, 0.0424, 0.0550, 0.1256, 0.0680, 0.1224, 0.1094], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0354, 0.0357, 0.0382, 0.0459, 0.0386, 0.0335, 0.0342], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 20:04:57,527 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.411e+02 4.867e+02 6.014e+02 7.504e+02 1.855e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 20:05:31,487 INFO [train.py:903] (2/4) Epoch 21, batch 5900, loss[loss=0.2006, simple_loss=0.2893, pruned_loss=0.05597, over 18544.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06642, over 3798416.32 frames. ], batch size: 84, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:05:35,049 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 20:05:36,572 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4093, 1.3431, 1.4307, 1.4590, 2.9894, 1.0647, 2.4029, 3.4293], + device='cuda:2'), covar=tensor([0.0523, 0.2644, 0.2961, 0.1815, 0.0732, 0.2491, 0.1148, 0.0257], + device='cuda:2'), in_proj_covar=tensor([0.0406, 0.0363, 0.0383, 0.0346, 0.0373, 0.0347, 0.0374, 0.0398], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:05:58,750 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 20:06:36,831 INFO [train.py:903] (2/4) Epoch 21, batch 5950, loss[loss=0.2751, simple_loss=0.3393, pruned_loss=0.1054, over 19755.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2917, pruned_loss=0.06671, over 3800114.61 frames. ], batch size: 63, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:02,050 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.752e+02 5.691e+02 7.550e+02 2.003e+03, threshold=1.138e+03, percent-clipped=5.0 +2023-04-02 20:07:37,099 INFO [train.py:903] (2/4) Epoch 21, batch 6000, loss[loss=0.1608, simple_loss=0.2406, pruned_loss=0.04052, over 19732.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06625, over 3804755.05 frames. ], batch size: 46, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:37,100 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 20:07:50,394 INFO [train.py:937] (2/4) Epoch 21, validation: loss=0.1692, simple_loss=0.2693, pruned_loss=0.03459, over 944034.00 frames. +2023-04-02 20:07:50,395 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 20:08:26,836 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:08:52,465 INFO [train.py:903] (2/4) Epoch 21, batch 6050, loss[loss=0.1871, simple_loss=0.2599, pruned_loss=0.05715, over 18677.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2895, pruned_loss=0.06545, over 3811969.53 frames. ], batch size: 41, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:03,417 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2434, 2.2936, 2.5234, 3.1377, 2.3041, 2.9733, 2.5985, 2.3122], + device='cuda:2'), covar=tensor([0.4260, 0.4129, 0.1878, 0.2491, 0.4566, 0.2235, 0.4562, 0.3384], + device='cuda:2'), in_proj_covar=tensor([0.0893, 0.0955, 0.0713, 0.0927, 0.0872, 0.0809, 0.0835, 0.0776], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 20:09:18,887 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.812e+02 5.741e+02 7.692e+02 1.541e+03, threshold=1.148e+03, percent-clipped=3.0 +2023-04-02 20:09:41,436 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:09:53,720 INFO [train.py:903] (2/4) Epoch 21, batch 6100, loss[loss=0.2245, simple_loss=0.3055, pruned_loss=0.07174, over 19287.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06464, over 3827153.03 frames. ], batch size: 66, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:55,353 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:12,506 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142674.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:28,127 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:55,896 INFO [train.py:903] (2/4) Epoch 21, batch 6150, loss[loss=0.1936, simple_loss=0.2847, pruned_loss=0.05124, over 19667.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06509, over 3826010.89 frames. ], batch size: 58, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:11:25,006 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.643e+02 5.511e+02 6.918e+02 9.659e+02 2.206e+03, threshold=1.384e+03, percent-clipped=13.0 +2023-04-02 20:11:26,171 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 20:11:36,751 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:11:59,015 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1326, 1.3282, 1.6998, 1.1931, 2.5813, 3.5194, 3.1974, 3.7449], + device='cuda:2'), covar=tensor([0.1733, 0.3867, 0.3439, 0.2548, 0.0627, 0.0198, 0.0220, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0319, 0.0349, 0.0262, 0.0240, 0.0184, 0.0214, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 20:11:59,800 INFO [train.py:903] (2/4) Epoch 21, batch 6200, loss[loss=0.1962, simple_loss=0.2875, pruned_loss=0.05242, over 19691.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2889, pruned_loss=0.06483, over 3828554.45 frames. ], batch size: 59, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:12:03,976 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 20:12:14,772 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5926, 1.6580, 1.9627, 1.8533, 1.4067, 1.7721, 1.9736, 1.8193], + device='cuda:2'), covar=tensor([0.3953, 0.3672, 0.1857, 0.2341, 0.3833, 0.2216, 0.4768, 0.3241], + device='cuda:2'), in_proj_covar=tensor([0.0890, 0.0955, 0.0712, 0.0924, 0.0871, 0.0809, 0.0835, 0.0773], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 20:13:02,732 INFO [train.py:903] (2/4) Epoch 21, batch 6250, loss[loss=0.2204, simple_loss=0.301, pruned_loss=0.06995, over 18099.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2886, pruned_loss=0.06506, over 3822645.06 frames. ], batch size: 83, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:13:09,063 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3582, 3.5088, 2.1086, 2.2762, 3.1082, 1.8135, 1.7612, 2.4308], + device='cuda:2'), covar=tensor([0.1312, 0.0704, 0.1107, 0.0866, 0.0588, 0.1287, 0.0949, 0.0690], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0262, 0.0245, 0.0335, 0.0291, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:13:28,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 5.305e+02 6.117e+02 7.859e+02 2.157e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 20:13:30,680 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 20:14:05,179 INFO [train.py:903] (2/4) Epoch 21, batch 6300, loss[loss=0.1903, simple_loss=0.2745, pruned_loss=0.05303, over 19603.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.289, pruned_loss=0.06471, over 3820276.26 frames. ], batch size: 61, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:15:07,132 INFO [train.py:903] (2/4) Epoch 21, batch 6350, loss[loss=0.2628, simple_loss=0.3303, pruned_loss=0.09762, over 17496.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2896, pruned_loss=0.06475, over 3823128.51 frames. ], batch size: 101, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:15:11,948 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2222, 1.2858, 1.7129, 0.9657, 2.3299, 3.1039, 2.7969, 3.2929], + device='cuda:2'), covar=tensor([0.1461, 0.3693, 0.3187, 0.2495, 0.0579, 0.0196, 0.0246, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0319, 0.0348, 0.0262, 0.0240, 0.0184, 0.0214, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 20:15:36,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.232e+02 4.679e+02 5.550e+02 7.220e+02 1.923e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-02 20:15:39,971 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:16:11,264 INFO [train.py:903] (2/4) Epoch 21, batch 6400, loss[loss=0.1892, simple_loss=0.2751, pruned_loss=0.05161, over 19776.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2887, pruned_loss=0.06456, over 3828675.37 frames. ], batch size: 54, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:14,711 INFO [train.py:903] (2/4) Epoch 21, batch 6450, loss[loss=0.2159, simple_loss=0.2983, pruned_loss=0.06678, over 19757.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2891, pruned_loss=0.06481, over 3818578.78 frames. ], batch size: 63, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:40,515 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.995e+02 6.270e+02 8.275e+02 2.312e+03, threshold=1.254e+03, percent-clipped=6.0 +2023-04-02 20:18:01,467 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 20:18:04,913 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:18:16,200 INFO [train.py:903] (2/4) Epoch 21, batch 6500, loss[loss=0.1947, simple_loss=0.2743, pruned_loss=0.05753, over 19582.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2893, pruned_loss=0.06491, over 3826402.74 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:18:23,415 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 20:18:48,310 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.24 vs. limit=5.0 +2023-04-02 20:18:48,996 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143086.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:19:16,583 INFO [train.py:903] (2/4) Epoch 21, batch 6550, loss[loss=0.1724, simple_loss=0.256, pruned_loss=0.0444, over 19744.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2892, pruned_loss=0.06511, over 3810504.51 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:19:44,580 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 5.073e+02 6.169e+02 7.633e+02 2.146e+03, threshold=1.234e+03, percent-clipped=4.0 +2023-04-02 20:19:46,047 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0239, 3.6557, 2.4212, 3.2857, 0.8181, 3.6024, 3.4482, 3.5624], + device='cuda:2'), covar=tensor([0.0781, 0.1107, 0.2082, 0.0928, 0.3988, 0.0783, 0.1060, 0.1053], + device='cuda:2'), in_proj_covar=tensor([0.0501, 0.0406, 0.0491, 0.0345, 0.0400, 0.0429, 0.0421, 0.0456], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:20:19,906 INFO [train.py:903] (2/4) Epoch 21, batch 6600, loss[loss=0.1899, simple_loss=0.2624, pruned_loss=0.0587, over 19744.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2895, pruned_loss=0.06507, over 3804304.63 frames. ], batch size: 46, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:11,136 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:21:22,420 INFO [train.py:903] (2/4) Epoch 21, batch 6650, loss[loss=0.1933, simple_loss=0.2811, pruned_loss=0.05277, over 19666.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2895, pruned_loss=0.06496, over 3817115.98 frames. ], batch size: 58, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:46,324 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.68 vs. limit=5.0 +2023-04-02 20:21:47,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.940e+02 5.672e+02 7.065e+02 1.538e+03, threshold=1.134e+03, percent-clipped=2.0 +2023-04-02 20:22:23,655 INFO [train.py:903] (2/4) Epoch 21, batch 6700, loss[loss=0.2259, simple_loss=0.3059, pruned_loss=0.07294, over 19554.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2893, pruned_loss=0.06484, over 3820608.79 frames. ], batch size: 61, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:23:19,553 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:23,648 INFO [train.py:903] (2/4) Epoch 21, batch 6750, loss[loss=0.2111, simple_loss=0.2989, pruned_loss=0.06169, over 19609.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2886, pruned_loss=0.0645, over 3814101.47 frames. ], batch size: 57, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:23:48,066 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143331.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:48,849 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.995e+02 6.197e+02 7.772e+02 2.067e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-02 20:24:20,248 INFO [train.py:903] (2/4) Epoch 21, batch 6800, loss[loss=0.2317, simple_loss=0.3084, pruned_loss=0.07745, over 19101.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2897, pruned_loss=0.06499, over 3804513.38 frames. ], batch size: 69, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:24:21,776 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8262, 4.3713, 2.8302, 3.8367, 1.0005, 4.3005, 4.2484, 4.3096], + device='cuda:2'), covar=tensor([0.0539, 0.0864, 0.1836, 0.0822, 0.3805, 0.0714, 0.0857, 0.1099], + device='cuda:2'), in_proj_covar=tensor([0.0494, 0.0401, 0.0486, 0.0341, 0.0395, 0.0423, 0.0418, 0.0451], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:25:06,018 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 20:25:07,061 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 20:25:09,787 INFO [train.py:903] (2/4) Epoch 22, batch 0, loss[loss=0.1935, simple_loss=0.2714, pruned_loss=0.05776, over 19607.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2714, pruned_loss=0.05776, over 19607.00 frames. ], batch size: 50, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:25:09,787 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 20:25:18,296 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3639, 1.3403, 1.3753, 1.7662, 1.4459, 1.5871, 1.5957, 1.5424], + device='cuda:2'), covar=tensor([0.0818, 0.0908, 0.0945, 0.0611, 0.0940, 0.0833, 0.0918, 0.0670], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-02 20:25:20,463 INFO [train.py:937] (2/4) Epoch 22, validation: loss=0.1683, simple_loss=0.2691, pruned_loss=0.03373, over 944034.00 frames. +2023-04-02 20:25:20,463 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 20:25:29,860 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3262, 3.7887, 3.9262, 3.9705, 1.7268, 3.7475, 3.3057, 3.6675], + device='cuda:2'), covar=tensor([0.1537, 0.0964, 0.0678, 0.0711, 0.5645, 0.0996, 0.0716, 0.1202], + device='cuda:2'), in_proj_covar=tensor([0.0776, 0.0734, 0.0943, 0.0827, 0.0825, 0.0701, 0.0568, 0.0874], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 20:25:31,914 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 20:25:55,298 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143418.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:26:14,246 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.908e+02 5.891e+02 8.006e+02 1.582e+03, threshold=1.178e+03, percent-clipped=4.0 +2023-04-02 20:26:21,017 INFO [train.py:903] (2/4) Epoch 22, batch 50, loss[loss=0.1751, simple_loss=0.2547, pruned_loss=0.04775, over 19785.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2859, pruned_loss=0.06231, over 871321.94 frames. ], batch size: 48, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:26:42,109 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:26:47,965 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9298, 1.7668, 1.6008, 1.9421, 1.6849, 1.6468, 1.5711, 1.8227], + device='cuda:2'), covar=tensor([0.1025, 0.1359, 0.1447, 0.0976, 0.1199, 0.0540, 0.1391, 0.0714], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0354, 0.0308, 0.0247, 0.0296, 0.0248, 0.0307, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:26:53,922 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 20:27:13,779 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143482.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:27:19,985 INFO [train.py:903] (2/4) Epoch 22, batch 100, loss[loss=0.2043, simple_loss=0.285, pruned_loss=0.06182, over 19733.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.291, pruned_loss=0.0657, over 1534326.53 frames. ], batch size: 51, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:27:23,821 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143491.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:27:31,520 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 20:28:12,239 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.227e+02 6.391e+02 8.671e+02 1.540e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 20:28:19,046 INFO [train.py:903] (2/4) Epoch 22, batch 150, loss[loss=0.2301, simple_loss=0.3126, pruned_loss=0.07379, over 19677.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2892, pruned_loss=0.06536, over 2045333.09 frames. ], batch size: 55, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:28:45,109 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0595, 3.4508, 2.0442, 2.0555, 3.0383, 1.7328, 1.6125, 2.2453], + device='cuda:2'), covar=tensor([0.1366, 0.0585, 0.1053, 0.0809, 0.0542, 0.1292, 0.0924, 0.0680], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0313, 0.0334, 0.0260, 0.0245, 0.0335, 0.0289, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:29:18,917 INFO [train.py:903] (2/4) Epoch 22, batch 200, loss[loss=0.193, simple_loss=0.2584, pruned_loss=0.0638, over 19755.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2878, pruned_loss=0.06477, over 2424517.64 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:29:18,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 20:29:32,292 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 20:30:12,489 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.534e+02 5.144e+02 6.083e+02 7.742e+02 1.350e+03, threshold=1.217e+03, percent-clipped=1.0 +2023-04-02 20:30:20,919 INFO [train.py:903] (2/4) Epoch 22, batch 250, loss[loss=0.2521, simple_loss=0.3293, pruned_loss=0.08747, over 19695.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06402, over 2752134.64 frames. ], batch size: 59, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:30:33,627 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.31 vs. limit=5.0 +2023-04-02 20:31:20,920 INFO [train.py:903] (2/4) Epoch 22, batch 300, loss[loss=0.1912, simple_loss=0.2704, pruned_loss=0.056, over 19616.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2874, pruned_loss=0.06436, over 2991817.73 frames. ], batch size: 50, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:32:15,079 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.028e+02 5.065e+02 6.247e+02 8.237e+02 1.383e+03, threshold=1.249e+03, percent-clipped=3.0 +2023-04-02 20:32:22,204 INFO [train.py:903] (2/4) Epoch 22, batch 350, loss[loss=0.217, simple_loss=0.2977, pruned_loss=0.06815, over 19802.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2876, pruned_loss=0.06396, over 3189242.00 frames. ], batch size: 56, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:32:29,136 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 20:32:51,101 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143762.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:33:20,934 INFO [train.py:903] (2/4) Epoch 22, batch 400, loss[loss=0.2372, simple_loss=0.3095, pruned_loss=0.08248, over 18031.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2883, pruned_loss=0.06438, over 3332110.01 frames. ], batch size: 83, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:15,302 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.215e+02 6.557e+02 8.093e+02 2.351e+03, threshold=1.311e+03, percent-clipped=8.0 +2023-04-02 20:34:17,792 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143835.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:34:20,915 INFO [train.py:903] (2/4) Epoch 22, batch 450, loss[loss=0.184, simple_loss=0.2765, pruned_loss=0.04577, over 19680.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2885, pruned_loss=0.06423, over 3457167.75 frames. ], batch size: 59, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:57,882 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 20:34:58,981 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 20:35:08,554 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143877.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:35:22,927 INFO [train.py:903] (2/4) Epoch 22, batch 500, loss[loss=0.1774, simple_loss=0.2598, pruned_loss=0.04747, over 19781.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2886, pruned_loss=0.06452, over 3533434.89 frames. ], batch size: 48, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:17,498 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 5.123e+02 6.359e+02 8.434e+02 1.804e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 20:36:23,289 INFO [train.py:903] (2/4) Epoch 22, batch 550, loss[loss=0.1904, simple_loss=0.2654, pruned_loss=0.05767, over 19405.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.289, pruned_loss=0.06441, over 3596912.02 frames. ], batch size: 48, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:37,270 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143950.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:37:23,303 INFO [train.py:903] (2/4) Epoch 22, batch 600, loss[loss=0.1843, simple_loss=0.2613, pruned_loss=0.05364, over 19811.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2884, pruned_loss=0.06433, over 3639117.77 frames. ], batch size: 49, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:38:02,563 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0369, 2.1058, 2.3399, 2.7562, 1.9833, 2.6496, 2.3734, 2.1863], + device='cuda:2'), covar=tensor([0.4332, 0.4158, 0.1893, 0.2387, 0.4162, 0.2046, 0.4865, 0.3372], + device='cuda:2'), in_proj_covar=tensor([0.0894, 0.0961, 0.0715, 0.0932, 0.0875, 0.0813, 0.0843, 0.0777], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 20:38:06,646 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 20:38:17,789 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 4.914e+02 6.190e+02 8.004e+02 1.732e+03, threshold=1.238e+03, percent-clipped=3.0 +2023-04-02 20:38:23,572 INFO [train.py:903] (2/4) Epoch 22, batch 650, loss[loss=0.2092, simple_loss=0.2941, pruned_loss=0.06218, over 19660.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2874, pruned_loss=0.06376, over 3688344.83 frames. ], batch size: 55, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:39:19,199 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7150, 4.2582, 4.4695, 4.5110, 1.5691, 4.2374, 3.6944, 4.1731], + device='cuda:2'), covar=tensor([0.1716, 0.0907, 0.0624, 0.0668, 0.6382, 0.0951, 0.0685, 0.1205], + device='cuda:2'), in_proj_covar=tensor([0.0773, 0.0733, 0.0936, 0.0821, 0.0825, 0.0695, 0.0565, 0.0866], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 20:39:26,362 INFO [train.py:903] (2/4) Epoch 22, batch 700, loss[loss=0.2045, simple_loss=0.2849, pruned_loss=0.06209, over 19663.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2868, pruned_loss=0.06317, over 3720763.70 frames. ], batch size: 53, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:39:58,303 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2418, 2.0560, 1.9630, 1.8109, 1.6032, 1.7647, 0.5478, 1.1483], + device='cuda:2'), covar=tensor([0.0560, 0.0578, 0.0444, 0.0691, 0.1074, 0.0906, 0.1260, 0.1045], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0353, 0.0353, 0.0377, 0.0455, 0.0383, 0.0331, 0.0338], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 20:40:19,657 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 4.796e+02 6.107e+02 7.975e+02 1.533e+03, threshold=1.221e+03, percent-clipped=5.0 +2023-04-02 20:40:20,064 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144133.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:40:26,351 INFO [train.py:903] (2/4) Epoch 22, batch 750, loss[loss=0.1868, simple_loss=0.271, pruned_loss=0.05123, over 19658.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06372, over 3744196.46 frames. ], batch size: 53, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:40:49,201 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144158.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:41:26,351 INFO [train.py:903] (2/4) Epoch 22, batch 800, loss[loss=0.2278, simple_loss=0.3014, pruned_loss=0.07711, over 17273.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2869, pruned_loss=0.06363, over 3765608.66 frames. ], batch size: 101, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:41:44,773 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 20:41:48,041 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144206.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:42:02,006 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6873, 1.6779, 1.6525, 1.4483, 1.3244, 1.4228, 0.2668, 0.6836], + device='cuda:2'), covar=tensor([0.0667, 0.0607, 0.0388, 0.0594, 0.1246, 0.0747, 0.1266, 0.1085], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0354, 0.0355, 0.0378, 0.0457, 0.0384, 0.0332, 0.0339], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 20:42:09,762 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3452, 3.0728, 2.1556, 2.7657, 0.8288, 3.0549, 2.8816, 3.0125], + device='cuda:2'), covar=tensor([0.0995, 0.1255, 0.2019, 0.1026, 0.3698, 0.0909, 0.1081, 0.1335], + device='cuda:2'), in_proj_covar=tensor([0.0501, 0.0409, 0.0491, 0.0344, 0.0403, 0.0429, 0.0424, 0.0459], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:42:10,155 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 20:42:19,065 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144231.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:42:20,959 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.021e+02 6.351e+02 8.019e+02 1.751e+03, threshold=1.270e+03, percent-clipped=5.0 +2023-04-02 20:42:26,670 INFO [train.py:903] (2/4) Epoch 22, batch 850, loss[loss=0.1863, simple_loss=0.2653, pruned_loss=0.05366, over 19481.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2865, pruned_loss=0.06375, over 3779151.33 frames. ], batch size: 49, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:42:41,804 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 20:42:56,618 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4792, 1.6300, 1.9027, 1.7899, 2.7061, 2.3366, 2.7642, 1.2977], + device='cuda:2'), covar=tensor([0.2430, 0.4077, 0.2617, 0.1858, 0.1459, 0.2064, 0.1481, 0.4263], + device='cuda:2'), in_proj_covar=tensor([0.0531, 0.0643, 0.0713, 0.0482, 0.0619, 0.0530, 0.0662, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 20:43:19,884 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 20:43:20,194 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7842, 1.4431, 1.6377, 1.6574, 3.3498, 1.2421, 2.4908, 3.8165], + device='cuda:2'), covar=tensor([0.0453, 0.2693, 0.2654, 0.1749, 0.0651, 0.2452, 0.1154, 0.0212], + device='cuda:2'), in_proj_covar=tensor([0.0404, 0.0362, 0.0381, 0.0343, 0.0371, 0.0348, 0.0373, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:43:26,414 INFO [train.py:903] (2/4) Epoch 22, batch 900, loss[loss=0.1666, simple_loss=0.2537, pruned_loss=0.03976, over 19590.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2858, pruned_loss=0.063, over 3795762.71 frames. ], batch size: 50, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:44:21,527 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.404e+02 5.111e+02 6.361e+02 7.451e+02 1.172e+03, threshold=1.272e+03, percent-clipped=0.0 +2023-04-02 20:44:26,095 INFO [train.py:903] (2/4) Epoch 22, batch 950, loss[loss=0.2214, simple_loss=0.3064, pruned_loss=0.06824, over 19741.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2873, pruned_loss=0.06426, over 3798927.97 frames. ], batch size: 63, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:44:30,666 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 20:45:27,345 INFO [train.py:903] (2/4) Epoch 22, batch 1000, loss[loss=0.2085, simple_loss=0.2943, pruned_loss=0.0614, over 19689.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2884, pruned_loss=0.06485, over 3804231.79 frames. ], batch size: 58, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:46:17,094 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=144429.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:46:17,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 20:46:22,210 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 5.215e+02 6.539e+02 8.059e+02 1.779e+03, threshold=1.308e+03, percent-clipped=4.0 +2023-04-02 20:46:26,887 INFO [train.py:903] (2/4) Epoch 22, batch 1050, loss[loss=0.1939, simple_loss=0.28, pruned_loss=0.05387, over 19779.00 frames. ], tot_loss[loss=0.211, simple_loss=0.29, pruned_loss=0.06595, over 3780654.64 frames. ], batch size: 56, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:46:28,320 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0981, 5.4895, 2.9688, 4.6076, 1.1477, 5.6549, 5.4828, 5.6607], + device='cuda:2'), covar=tensor([0.0328, 0.0729, 0.1806, 0.0756, 0.3956, 0.0544, 0.0778, 0.0862], + device='cuda:2'), in_proj_covar=tensor([0.0495, 0.0405, 0.0485, 0.0341, 0.0398, 0.0425, 0.0420, 0.0455], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:47:00,684 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 20:47:26,633 INFO [train.py:903] (2/4) Epoch 22, batch 1100, loss[loss=0.2036, simple_loss=0.2692, pruned_loss=0.06894, over 19304.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06565, over 3791085.71 frames. ], batch size: 44, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:48:21,825 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.380e+02 5.103e+02 6.169e+02 7.547e+02 2.403e+03, threshold=1.234e+03, percent-clipped=2.0 +2023-04-02 20:48:27,965 INFO [train.py:903] (2/4) Epoch 22, batch 1150, loss[loss=0.19, simple_loss=0.2636, pruned_loss=0.05821, over 19778.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2882, pruned_loss=0.06468, over 3816457.05 frames. ], batch size: 49, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:48:53,395 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7980, 1.8871, 2.1411, 2.2525, 1.7015, 2.2010, 2.1414, 1.9386], + device='cuda:2'), covar=tensor([0.4169, 0.3537, 0.1913, 0.2313, 0.3935, 0.2106, 0.4911, 0.3372], + device='cuda:2'), in_proj_covar=tensor([0.0893, 0.0960, 0.0716, 0.0932, 0.0875, 0.0813, 0.0841, 0.0779], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 20:49:28,350 INFO [train.py:903] (2/4) Epoch 22, batch 1200, loss[loss=0.1937, simple_loss=0.2787, pruned_loss=0.05437, over 19552.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2873, pruned_loss=0.06412, over 3799999.60 frames. ], batch size: 54, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:50:00,809 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 20:50:23,751 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.877e+02 6.112e+02 7.869e+02 2.071e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 20:50:27,117 INFO [train.py:903] (2/4) Epoch 22, batch 1250, loss[loss=0.1875, simple_loss=0.2627, pruned_loss=0.05615, over 19736.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2877, pruned_loss=0.06442, over 3812143.05 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:51:28,173 INFO [train.py:903] (2/4) Epoch 22, batch 1300, loss[loss=0.2305, simple_loss=0.3154, pruned_loss=0.07282, over 19582.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.288, pruned_loss=0.06412, over 3807845.18 frames. ], batch size: 61, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:52:26,778 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.679e+02 5.951e+02 8.140e+02 2.957e+03, threshold=1.190e+03, percent-clipped=7.0 +2023-04-02 20:52:30,257 INFO [train.py:903] (2/4) Epoch 22, batch 1350, loss[loss=0.1988, simple_loss=0.2837, pruned_loss=0.05696, over 19483.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2891, pruned_loss=0.06469, over 3803524.03 frames. ], batch size: 64, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:52:39,361 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6879, 2.5055, 2.3253, 2.7730, 2.5468, 2.3267, 2.2675, 2.6353], + device='cuda:2'), covar=tensor([0.0923, 0.1665, 0.1476, 0.1067, 0.1379, 0.0579, 0.1366, 0.0729], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0358, 0.0313, 0.0250, 0.0300, 0.0251, 0.0310, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:53:08,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8853, 1.4610, 1.7794, 1.5493, 3.4851, 1.2195, 2.3480, 3.9139], + device='cuda:2'), covar=tensor([0.0493, 0.2657, 0.2608, 0.1942, 0.0733, 0.2553, 0.1461, 0.0236], + device='cuda:2'), in_proj_covar=tensor([0.0405, 0.0363, 0.0383, 0.0345, 0.0371, 0.0348, 0.0376, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:53:12,545 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=144773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:53:31,337 INFO [train.py:903] (2/4) Epoch 22, batch 1400, loss[loss=0.2208, simple_loss=0.298, pruned_loss=0.07178, over 19519.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2896, pruned_loss=0.06503, over 3813256.12 frames. ], batch size: 56, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:54:28,427 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.383e+02 4.808e+02 5.945e+02 7.380e+02 1.517e+03, threshold=1.189e+03, percent-clipped=2.0 +2023-04-02 20:54:29,515 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 20:54:31,679 INFO [train.py:903] (2/4) Epoch 22, batch 1450, loss[loss=0.206, simple_loss=0.2709, pruned_loss=0.07053, over 19389.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2892, pruned_loss=0.06469, over 3820191.73 frames. ], batch size: 48, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:54:47,786 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.4145, 4.9507, 3.1169, 4.3934, 1.3720, 4.9616, 4.7785, 5.0367], + device='cuda:2'), covar=tensor([0.0393, 0.0814, 0.1873, 0.0777, 0.3749, 0.0560, 0.0817, 0.0876], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0412, 0.0495, 0.0345, 0.0405, 0.0433, 0.0426, 0.0462], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:54:53,310 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3161, 3.5244, 2.2413, 2.1292, 3.2789, 2.0188, 1.7998, 2.4145], + device='cuda:2'), covar=tensor([0.1150, 0.0593, 0.0942, 0.0844, 0.0458, 0.1090, 0.0833, 0.0629], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0315, 0.0335, 0.0263, 0.0246, 0.0334, 0.0289, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:55:30,805 INFO [train.py:903] (2/4) Epoch 22, batch 1500, loss[loss=0.2123, simple_loss=0.2957, pruned_loss=0.06442, over 19390.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2898, pruned_loss=0.06508, over 3817578.92 frames. ], batch size: 70, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:55:31,121 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=144888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:56:27,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.957e+02 5.987e+02 8.036e+02 1.770e+03, threshold=1.197e+03, percent-clipped=5.0 +2023-04-02 20:56:31,417 INFO [train.py:903] (2/4) Epoch 22, batch 1550, loss[loss=0.1908, simple_loss=0.2726, pruned_loss=0.05445, over 19589.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2892, pruned_loss=0.06457, over 3804690.54 frames. ], batch size: 50, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:57:30,407 INFO [train.py:903] (2/4) Epoch 22, batch 1600, loss[loss=0.1843, simple_loss=0.2571, pruned_loss=0.05569, over 19389.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2896, pruned_loss=0.06455, over 3823958.25 frames. ], batch size: 48, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:57:48,908 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1102, 1.3037, 1.4501, 1.4634, 2.7061, 1.0558, 2.2246, 3.0875], + device='cuda:2'), covar=tensor([0.0607, 0.2827, 0.2976, 0.1802, 0.0804, 0.2463, 0.1172, 0.0333], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0365, 0.0388, 0.0349, 0.0375, 0.0351, 0.0378, 0.0404], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 20:57:50,814 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 20:58:02,383 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:58:27,682 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.884e+02 5.870e+02 7.908e+02 1.403e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-02 20:58:31,135 INFO [train.py:903] (2/4) Epoch 22, batch 1650, loss[loss=0.2171, simple_loss=0.3008, pruned_loss=0.06674, over 19663.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2885, pruned_loss=0.06368, over 3833831.27 frames. ], batch size: 53, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:58:39,536 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:27,032 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:31,371 INFO [train.py:903] (2/4) Epoch 22, batch 1700, loss[loss=0.2008, simple_loss=0.289, pruned_loss=0.05629, over 19659.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2886, pruned_loss=0.06346, over 3831060.05 frames. ], batch size: 58, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:59:52,392 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-02 21:00:08,582 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 21:00:28,005 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 4.845e+02 6.228e+02 7.810e+02 2.223e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 21:00:33,038 INFO [train.py:903] (2/4) Epoch 22, batch 1750, loss[loss=0.2043, simple_loss=0.2839, pruned_loss=0.06236, over 19587.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2886, pruned_loss=0.0634, over 3835496.57 frames. ], batch size: 61, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:00:40,308 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145144.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:09,157 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145169.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:31,679 INFO [train.py:903] (2/4) Epoch 22, batch 1800, loss[loss=0.1995, simple_loss=0.2808, pruned_loss=0.05908, over 19780.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2889, pruned_loss=0.06374, over 3820944.95 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:01:55,270 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9318, 1.2308, 1.5972, 0.6106, 2.0819, 2.4299, 2.1295, 2.6172], + device='cuda:2'), covar=tensor([0.1639, 0.3811, 0.3318, 0.2807, 0.0615, 0.0288, 0.0366, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0216, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 21:02:27,946 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.845e+02 5.086e+02 5.993e+02 7.804e+02 1.410e+03, threshold=1.199e+03, percent-clipped=2.0 +2023-04-02 21:02:27,983 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 21:02:31,523 INFO [train.py:903] (2/4) Epoch 22, batch 1850, loss[loss=0.2215, simple_loss=0.3008, pruned_loss=0.07112, over 19652.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2886, pruned_loss=0.06397, over 3813985.69 frames. ], batch size: 53, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:04,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 21:03:30,820 INFO [train.py:903] (2/4) Epoch 22, batch 1900, loss[loss=0.2127, simple_loss=0.2987, pruned_loss=0.06336, over 19593.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2874, pruned_loss=0.0634, over 3826074.28 frames. ], batch size: 61, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:45,070 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9764, 4.4312, 4.8129, 4.7723, 1.7696, 4.4475, 3.8711, 4.5252], + device='cuda:2'), covar=tensor([0.1667, 0.0780, 0.0541, 0.0647, 0.6304, 0.0826, 0.0650, 0.1058], + device='cuda:2'), in_proj_covar=tensor([0.0775, 0.0737, 0.0934, 0.0825, 0.0825, 0.0698, 0.0561, 0.0872], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 21:03:48,270 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 21:03:52,771 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 21:04:15,276 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 21:04:26,521 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.830e+02 5.286e+02 6.049e+02 6.874e+02 1.450e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 21:04:30,790 INFO [train.py:903] (2/4) Epoch 22, batch 1950, loss[loss=0.2252, simple_loss=0.292, pruned_loss=0.07918, over 18708.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2877, pruned_loss=0.06375, over 3819374.53 frames. ], batch size: 41, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:04:44,278 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6444, 2.6422, 2.4531, 2.7403, 2.6491, 2.4083, 2.3107, 2.7904], + device='cuda:2'), covar=tensor([0.0976, 0.1494, 0.1365, 0.1061, 0.1258, 0.0496, 0.1283, 0.0602], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0356, 0.0312, 0.0249, 0.0300, 0.0250, 0.0309, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:04:55,827 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:31,548 INFO [train.py:903] (2/4) Epoch 22, batch 2000, loss[loss=0.1764, simple_loss=0.2669, pruned_loss=0.0429, over 19643.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2874, pruned_loss=0.06351, over 3812961.16 frames. ], batch size: 53, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:05:32,818 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:32,926 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:19,692 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:27,594 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 4.801e+02 6.060e+02 7.916e+02 1.266e+03, threshold=1.212e+03, percent-clipped=1.0 +2023-04-02 21:06:27,628 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 21:06:30,126 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0086, 1.2210, 1.5436, 0.8315, 2.2624, 3.0287, 2.7048, 3.2364], + device='cuda:2'), covar=tensor([0.1729, 0.3935, 0.3593, 0.2765, 0.0635, 0.0235, 0.0266, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0321, 0.0352, 0.0265, 0.0243, 0.0186, 0.0216, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 21:06:30,885 INFO [train.py:903] (2/4) Epoch 22, batch 2050, loss[loss=0.2625, simple_loss=0.3396, pruned_loss=0.0927, over 19600.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2873, pruned_loss=0.06335, over 3815359.85 frames. ], batch size: 57, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:06:46,543 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 21:06:46,572 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 21:07:06,397 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 21:07:13,302 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:07:30,900 INFO [train.py:903] (2/4) Epoch 22, batch 2100, loss[loss=0.2266, simple_loss=0.3032, pruned_loss=0.07501, over 19741.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2888, pruned_loss=0.06456, over 3800407.16 frames. ], batch size: 63, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:07:51,393 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:08:01,441 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 21:08:22,530 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 21:08:27,087 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 4.926e+02 6.113e+02 7.931e+02 1.598e+03, threshold=1.223e+03, percent-clipped=5.0 +2023-04-02 21:08:30,642 INFO [train.py:903] (2/4) Epoch 22, batch 2150, loss[loss=0.205, simple_loss=0.2922, pruned_loss=0.05886, over 19660.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.0647, over 3806103.59 frames. ], batch size: 60, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:08:38,978 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:09:32,290 INFO [train.py:903] (2/4) Epoch 22, batch 2200, loss[loss=0.199, simple_loss=0.29, pruned_loss=0.05399, over 17534.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2886, pruned_loss=0.065, over 3794356.18 frames. ], batch size: 101, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:10:00,330 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145612.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:10:08,468 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5739, 1.5224, 1.4342, 1.9712, 1.6632, 1.8560, 1.9335, 1.7299], + device='cuda:2'), covar=tensor([0.0810, 0.0853, 0.1013, 0.0690, 0.0718, 0.0691, 0.0785, 0.0640], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0221, 0.0226, 0.0240, 0.0225, 0.0212, 0.0186, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 21:10:29,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.056e+02 6.179e+02 8.064e+02 2.249e+03, threshold=1.236e+03, percent-clipped=3.0 +2023-04-02 21:10:32,067 INFO [train.py:903] (2/4) Epoch 22, batch 2250, loss[loss=0.1712, simple_loss=0.2541, pruned_loss=0.04416, over 19750.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2884, pruned_loss=0.06501, over 3787882.07 frames. ], batch size: 46, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:11:01,347 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:11:33,806 INFO [train.py:903] (2/4) Epoch 22, batch 2300, loss[loss=0.159, simple_loss=0.237, pruned_loss=0.04049, over 19770.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2875, pruned_loss=0.06397, over 3797728.54 frames. ], batch size: 48, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:11:45,970 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 21:12:22,753 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:27,063 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:30,302 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.899e+02 6.109e+02 7.402e+02 2.135e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 21:12:32,754 INFO [train.py:903] (2/4) Epoch 22, batch 2350, loss[loss=0.1967, simple_loss=0.2797, pruned_loss=0.05688, over 19517.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2878, pruned_loss=0.06439, over 3788198.54 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:12:54,273 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:01,054 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:14,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 21:13:31,581 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 21:13:31,991 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:35,674 INFO [train.py:903] (2/4) Epoch 22, batch 2400, loss[loss=0.2797, simple_loss=0.348, pruned_loss=0.1057, over 19776.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2881, pruned_loss=0.06421, over 3788951.67 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:13:48,778 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:20,066 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:34,015 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.826e+02 5.747e+02 7.009e+02 1.532e+03, threshold=1.149e+03, percent-clipped=5.0 +2023-04-02 21:14:36,516 INFO [train.py:903] (2/4) Epoch 22, batch 2450, loss[loss=0.1986, simple_loss=0.2869, pruned_loss=0.05515, over 19668.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2885, pruned_loss=0.06433, over 3796020.67 frames. ], batch size: 59, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:14:37,990 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145839.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:14:41,434 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9263, 1.1838, 1.6152, 0.5932, 2.0756, 2.4628, 2.1893, 2.6368], + device='cuda:2'), covar=tensor([0.1587, 0.3900, 0.3256, 0.2787, 0.0602, 0.0275, 0.0355, 0.0375], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0319, 0.0350, 0.0263, 0.0242, 0.0185, 0.0215, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 21:14:49,301 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:15:37,464 INFO [train.py:903] (2/4) Epoch 22, batch 2500, loss[loss=0.1947, simple_loss=0.2806, pruned_loss=0.05441, over 19714.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2883, pruned_loss=0.06397, over 3815696.40 frames. ], batch size: 51, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:01,289 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7001, 1.4504, 1.5538, 1.5268, 3.3011, 1.1998, 2.4186, 3.7145], + device='cuda:2'), covar=tensor([0.0533, 0.2670, 0.2783, 0.1877, 0.0681, 0.2398, 0.1242, 0.0249], + device='cuda:2'), in_proj_covar=tensor([0.0410, 0.0367, 0.0388, 0.0351, 0.0376, 0.0350, 0.0381, 0.0407], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:16:34,429 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.066e+02 4.836e+02 5.791e+02 7.519e+02 1.267e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 21:16:36,601 INFO [train.py:903] (2/4) Epoch 22, batch 2550, loss[loss=0.2297, simple_loss=0.3103, pruned_loss=0.07458, over 19669.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2888, pruned_loss=0.06435, over 3816721.96 frames. ], batch size: 53, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:38,048 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1887, 5.5568, 3.0262, 4.8852, 1.0420, 5.8251, 5.6363, 5.8147], + device='cuda:2'), covar=tensor([0.0375, 0.0827, 0.1791, 0.0651, 0.4047, 0.0431, 0.0626, 0.0779], + device='cuda:2'), in_proj_covar=tensor([0.0502, 0.0409, 0.0491, 0.0343, 0.0401, 0.0430, 0.0424, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:16:47,209 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0199, 1.0114, 1.2753, 1.4004, 2.4672, 1.2138, 2.3738, 2.9056], + device='cuda:2'), covar=tensor([0.0718, 0.3621, 0.3528, 0.2133, 0.1124, 0.2625, 0.1175, 0.0469], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0349, 0.0374, 0.0349, 0.0379, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:16:54,917 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1156, 2.8225, 2.1749, 2.1544, 2.0258, 2.5192, 1.0969, 2.0725], + device='cuda:2'), covar=tensor([0.0641, 0.0614, 0.0716, 0.1094, 0.1061, 0.0931, 0.1283, 0.0957], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0357, 0.0357, 0.0381, 0.0458, 0.0385, 0.0335, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 21:16:59,323 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145956.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:17:33,989 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 21:17:38,146 INFO [train.py:903] (2/4) Epoch 22, batch 2600, loss[loss=0.2149, simple_loss=0.2977, pruned_loss=0.06601, over 17621.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2887, pruned_loss=0.06439, over 3828762.81 frames. ], batch size: 101, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:17:59,426 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:18:00,059 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 21:18:05,441 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 21:18:22,946 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1022, 4.4285, 4.8221, 4.7985, 1.8866, 4.5005, 3.9233, 4.5399], + device='cuda:2'), covar=tensor([0.1614, 0.0825, 0.0560, 0.0677, 0.5789, 0.0853, 0.0658, 0.1137], + device='cuda:2'), in_proj_covar=tensor([0.0778, 0.0738, 0.0939, 0.0826, 0.0827, 0.0702, 0.0564, 0.0876], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 21:18:31,806 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5382, 1.4223, 1.4704, 2.1713, 1.7274, 1.7847, 1.9644, 1.6976], + device='cuda:2'), covar=tensor([0.0870, 0.0965, 0.1010, 0.0672, 0.0813, 0.0813, 0.0830, 0.0711], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0218, 0.0222, 0.0237, 0.0222, 0.0208, 0.0183, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 21:18:38,079 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.022e+02 6.231e+02 7.783e+02 1.698e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 21:18:40,366 INFO [train.py:903] (2/4) Epoch 22, batch 2650, loss[loss=0.2298, simple_loss=0.3059, pruned_loss=0.07684, over 19659.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2892, pruned_loss=0.06477, over 3809214.98 frames. ], batch size: 60, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:19:00,429 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 21:19:01,975 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5894, 1.1798, 1.2315, 1.4712, 1.1214, 1.3524, 1.2714, 1.4197], + device='cuda:2'), covar=tensor([0.1148, 0.1353, 0.1536, 0.1033, 0.1315, 0.0640, 0.1522, 0.0832], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0357, 0.0311, 0.0250, 0.0301, 0.0250, 0.0309, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:19:16,636 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-04-02 21:19:21,364 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146071.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:19:23,568 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4963, 1.4424, 1.4462, 2.0012, 1.5086, 1.7239, 1.8115, 1.6236], + device='cuda:2'), covar=tensor([0.0908, 0.0930, 0.1016, 0.0670, 0.0922, 0.0790, 0.0893, 0.0716], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0220, 0.0223, 0.0238, 0.0224, 0.0209, 0.0185, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 21:19:41,307 INFO [train.py:903] (2/4) Epoch 22, batch 2700, loss[loss=0.2003, simple_loss=0.291, pruned_loss=0.05484, over 19311.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2886, pruned_loss=0.06453, over 3821357.48 frames. ], batch size: 66, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:20:01,754 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:20,662 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:26,601 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1491, 2.0346, 1.9410, 1.7201, 1.6451, 1.7574, 0.6042, 1.1325], + device='cuda:2'), covar=tensor([0.0578, 0.0590, 0.0418, 0.0724, 0.1112, 0.0838, 0.1260, 0.0958], + device='cuda:2'), in_proj_covar=tensor([0.0355, 0.0356, 0.0359, 0.0381, 0.0459, 0.0388, 0.0335, 0.0342], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 21:20:32,045 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:39,332 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.787e+02 6.288e+02 8.148e+02 2.582e+03, threshold=1.258e+03, percent-clipped=4.0 +2023-04-02 21:20:41,746 INFO [train.py:903] (2/4) Epoch 22, batch 2750, loss[loss=0.2301, simple_loss=0.305, pruned_loss=0.07761, over 13441.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2895, pruned_loss=0.06505, over 3813662.75 frames. ], batch size: 136, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:21:18,180 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:21:37,434 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146183.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:21:43,817 INFO [train.py:903] (2/4) Epoch 22, batch 2800, loss[loss=0.2048, simple_loss=0.2785, pruned_loss=0.0655, over 19568.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2888, pruned_loss=0.06502, over 3812961.60 frames. ], batch size: 52, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:22:42,904 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 4.555e+02 5.863e+02 7.335e+02 1.249e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-02 21:22:45,126 INFO [train.py:903] (2/4) Epoch 22, batch 2850, loss[loss=0.1963, simple_loss=0.2773, pruned_loss=0.05767, over 19727.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2881, pruned_loss=0.06462, over 3810974.79 frames. ], batch size: 51, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:22:48,501 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 21:23:26,459 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3147, 2.1747, 2.1044, 1.8944, 1.7698, 1.9070, 0.6505, 1.2661], + device='cuda:2'), covar=tensor([0.0648, 0.0610, 0.0497, 0.0886, 0.1149, 0.0920, 0.1434, 0.1163], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0359, 0.0362, 0.0385, 0.0464, 0.0389, 0.0338, 0.0345], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 21:23:42,915 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 21:23:45,159 INFO [train.py:903] (2/4) Epoch 22, batch 2900, loss[loss=0.2241, simple_loss=0.3013, pruned_loss=0.07347, over 19607.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06404, over 3824969.63 frames. ], batch size: 61, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:57,237 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146298.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:24:33,191 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:24:43,685 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 4.789e+02 5.854e+02 7.393e+02 1.532e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-04-02 21:24:45,869 INFO [train.py:903] (2/4) Epoch 22, batch 2950, loss[loss=0.2151, simple_loss=0.2974, pruned_loss=0.06638, over 19601.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2884, pruned_loss=0.06477, over 3811693.61 frames. ], batch size: 57, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:25:04,181 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:11,577 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 21:25:32,572 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:46,771 INFO [train.py:903] (2/4) Epoch 22, batch 3000, loss[loss=0.2248, simple_loss=0.3045, pruned_loss=0.07249, over 19605.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2885, pruned_loss=0.06488, over 3806040.65 frames. ], batch size: 57, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:25:46,771 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 21:25:59,193 INFO [train.py:937] (2/4) Epoch 22, validation: loss=0.1687, simple_loss=0.2687, pruned_loss=0.0344, over 944034.00 frames. +2023-04-02 21:25:59,194 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 21:26:02,604 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 21:26:16,078 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7552, 4.2635, 4.5177, 4.5179, 1.6947, 4.2262, 3.6632, 4.2515], + device='cuda:2'), covar=tensor([0.1769, 0.0776, 0.0571, 0.0694, 0.6023, 0.0870, 0.0702, 0.1078], + device='cuda:2'), in_proj_covar=tensor([0.0785, 0.0743, 0.0949, 0.0833, 0.0834, 0.0708, 0.0571, 0.0883], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 21:26:16,200 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:26:58,620 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 5.066e+02 6.686e+02 8.533e+02 1.871e+03, threshold=1.337e+03, percent-clipped=6.0 +2023-04-02 21:26:59,743 INFO [train.py:903] (2/4) Epoch 22, batch 3050, loss[loss=0.231, simple_loss=0.3085, pruned_loss=0.07677, over 19403.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2894, pruned_loss=0.06522, over 3804434.88 frames. ], batch size: 70, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:27:21,448 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9651, 2.0460, 2.2901, 2.5981, 1.9133, 2.4937, 2.3695, 2.1824], + device='cuda:2'), covar=tensor([0.4241, 0.4018, 0.1939, 0.2309, 0.4138, 0.2085, 0.4761, 0.3303], + device='cuda:2'), in_proj_covar=tensor([0.0899, 0.0963, 0.0719, 0.0932, 0.0880, 0.0816, 0.0845, 0.0782], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 21:28:00,893 INFO [train.py:903] (2/4) Epoch 22, batch 3100, loss[loss=0.2292, simple_loss=0.3071, pruned_loss=0.07565, over 19535.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2894, pruned_loss=0.06498, over 3805668.96 frames. ], batch size: 56, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:28:27,625 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146511.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:28:59,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.441e+02 5.077e+02 6.322e+02 8.082e+02 1.628e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 21:29:00,361 INFO [train.py:903] (2/4) Epoch 22, batch 3150, loss[loss=0.1806, simple_loss=0.2574, pruned_loss=0.0519, over 19378.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2895, pruned_loss=0.06499, over 3810059.73 frames. ], batch size: 47, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:29:20,745 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146554.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:29:29,193 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 21:29:51,104 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146579.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:30:00,731 INFO [train.py:903] (2/4) Epoch 22, batch 3200, loss[loss=0.2252, simple_loss=0.2997, pruned_loss=0.07537, over 19586.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2884, pruned_loss=0.06467, over 3797489.91 frames. ], batch size: 52, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:30:28,079 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:30:47,482 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:31:01,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 4.719e+02 5.878e+02 7.469e+02 1.229e+03, threshold=1.176e+03, percent-clipped=0.0 +2023-04-02 21:31:02,818 INFO [train.py:903] (2/4) Epoch 22, batch 3250, loss[loss=0.1961, simple_loss=0.2794, pruned_loss=0.05645, over 19678.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06503, over 3808292.02 frames. ], batch size: 53, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:31:10,857 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:31:11,354 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-02 21:31:19,063 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1289, 3.4334, 1.9586, 2.1493, 3.0532, 1.7414, 1.5416, 2.1725], + device='cuda:2'), covar=tensor([0.1350, 0.0599, 0.1075, 0.0837, 0.0517, 0.1277, 0.0960, 0.0706], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0317, 0.0337, 0.0265, 0.0248, 0.0336, 0.0291, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:32:00,574 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-02 21:32:02,482 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4250, 1.5426, 1.9043, 1.7571, 2.5540, 2.2214, 2.7073, 1.1799], + device='cuda:2'), covar=tensor([0.2752, 0.4568, 0.2744, 0.2037, 0.1614, 0.2369, 0.1554, 0.4717], + device='cuda:2'), in_proj_covar=tensor([0.0533, 0.0640, 0.0708, 0.0480, 0.0614, 0.0526, 0.0660, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 21:32:03,195 INFO [train.py:903] (2/4) Epoch 22, batch 3300, loss[loss=0.2028, simple_loss=0.2896, pruned_loss=0.05798, over 19530.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2883, pruned_loss=0.06461, over 3815314.72 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:32:09,860 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 21:32:59,758 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.268e+02 6.638e+02 8.509e+02 1.642e+03, threshold=1.328e+03, percent-clipped=7.0 +2023-04-02 21:33:00,759 INFO [train.py:903] (2/4) Epoch 22, batch 3350, loss[loss=0.2838, simple_loss=0.339, pruned_loss=0.1143, over 19274.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2898, pruned_loss=0.06566, over 3818301.95 frames. ], batch size: 66, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:33:04,826 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-02 21:33:25,668 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8820, 1.4130, 1.5996, 1.6274, 3.4806, 1.0988, 2.4058, 3.9813], + device='cuda:2'), covar=tensor([0.0467, 0.2850, 0.2854, 0.1879, 0.0680, 0.2608, 0.1319, 0.0197], + device='cuda:2'), in_proj_covar=tensor([0.0407, 0.0362, 0.0384, 0.0346, 0.0370, 0.0347, 0.0377, 0.0402], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:33:31,346 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9305, 4.3357, 4.6149, 4.6165, 1.6396, 4.3379, 3.7502, 4.3010], + device='cuda:2'), covar=tensor([0.1664, 0.0916, 0.0625, 0.0663, 0.6369, 0.0915, 0.0720, 0.1217], + device='cuda:2'), in_proj_covar=tensor([0.0780, 0.0742, 0.0943, 0.0828, 0.0828, 0.0707, 0.0569, 0.0878], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 21:33:57,993 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.3206, 5.7009, 3.2536, 5.0430, 1.2407, 5.8692, 5.7339, 5.9558], + device='cuda:2'), covar=tensor([0.0406, 0.0992, 0.1760, 0.0622, 0.4026, 0.0460, 0.0700, 0.0759], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0412, 0.0493, 0.0345, 0.0402, 0.0432, 0.0425, 0.0460], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:34:00,057 INFO [train.py:903] (2/4) Epoch 22, batch 3400, loss[loss=0.2168, simple_loss=0.2845, pruned_loss=0.07457, over 19822.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2893, pruned_loss=0.06496, over 3822163.79 frames. ], batch size: 48, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:34:36,128 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2973, 2.3727, 2.6245, 3.0737, 2.3174, 2.9844, 2.6676, 2.3917], + device='cuda:2'), covar=tensor([0.4483, 0.4066, 0.1928, 0.2682, 0.4554, 0.2208, 0.4905, 0.3379], + device='cuda:2'), in_proj_covar=tensor([0.0899, 0.0963, 0.0717, 0.0933, 0.0879, 0.0816, 0.0845, 0.0781], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 21:34:59,707 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 5.013e+02 6.159e+02 8.066e+02 2.491e+03, threshold=1.232e+03, percent-clipped=4.0 +2023-04-02 21:35:00,913 INFO [train.py:903] (2/4) Epoch 22, batch 3450, loss[loss=0.2222, simple_loss=0.3049, pruned_loss=0.06973, over 18138.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2893, pruned_loss=0.06489, over 3824165.45 frames. ], batch size: 83, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:35:03,498 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6548, 1.7047, 1.2359, 1.4898, 1.5655, 1.1077, 1.0939, 1.3843], + device='cuda:2'), covar=tensor([0.1116, 0.1107, 0.1708, 0.1111, 0.1255, 0.1392, 0.2017, 0.1103], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0354, 0.0310, 0.0249, 0.0300, 0.0250, 0.0307, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:35:04,224 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 21:35:28,819 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146862.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:35:54,051 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146882.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:36:00,710 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1422, 2.0592, 1.8443, 1.7642, 1.6060, 1.7839, 0.6583, 1.2669], + device='cuda:2'), covar=tensor([0.0587, 0.0561, 0.0491, 0.0739, 0.1043, 0.0882, 0.1270, 0.0955], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0356, 0.0359, 0.0383, 0.0460, 0.0387, 0.0337, 0.0342], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 21:36:01,451 INFO [train.py:903] (2/4) Epoch 22, batch 3500, loss[loss=0.234, simple_loss=0.3132, pruned_loss=0.07744, over 17249.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2874, pruned_loss=0.06377, over 3834900.46 frames. ], batch size: 101, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:36:23,417 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:36:43,222 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1116, 1.8467, 1.9882, 2.9604, 2.0110, 2.4297, 2.4379, 2.2063], + device='cuda:2'), covar=tensor([0.0795, 0.0918, 0.0961, 0.0741, 0.0898, 0.0753, 0.0914, 0.0656], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0224, 0.0239, 0.0225, 0.0210, 0.0186, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 21:37:00,107 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.850e+02 4.615e+02 6.325e+02 8.235e+02 2.059e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 21:37:01,338 INFO [train.py:903] (2/4) Epoch 22, batch 3550, loss[loss=0.225, simple_loss=0.2962, pruned_loss=0.07687, over 19578.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2866, pruned_loss=0.06334, over 3836790.77 frames. ], batch size: 61, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:37:18,268 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:38:02,339 INFO [train.py:903] (2/4) Epoch 22, batch 3600, loss[loss=0.1737, simple_loss=0.2502, pruned_loss=0.04862, over 19755.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2864, pruned_loss=0.0631, over 3839712.46 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:38:02,529 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:38:40,777 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5965, 1.2565, 1.2689, 1.5450, 1.1308, 1.4128, 1.2892, 1.4539], + device='cuda:2'), covar=tensor([0.1103, 0.1218, 0.1486, 0.0981, 0.1314, 0.0596, 0.1432, 0.0787], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0353, 0.0309, 0.0249, 0.0301, 0.0250, 0.0308, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:38:45,346 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1722, 1.8725, 1.9166, 2.6363, 1.7476, 2.4173, 2.4798, 2.1843], + device='cuda:2'), covar=tensor([0.0755, 0.0855, 0.0922, 0.0811, 0.0885, 0.0718, 0.0792, 0.0625], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0225, 0.0240, 0.0226, 0.0211, 0.0186, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 21:39:01,662 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.391e+02 4.948e+02 6.297e+02 8.667e+02 2.605e+03, threshold=1.259e+03, percent-clipped=8.0 +2023-04-02 21:39:02,704 INFO [train.py:903] (2/4) Epoch 22, batch 3650, loss[loss=0.1867, simple_loss=0.2522, pruned_loss=0.0606, over 19732.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2868, pruned_loss=0.06347, over 3839102.15 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:39:39,116 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:39:41,812 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 21:40:03,857 INFO [train.py:903] (2/4) Epoch 22, batch 3700, loss[loss=0.2014, simple_loss=0.2755, pruned_loss=0.06365, over 19397.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2873, pruned_loss=0.06381, over 3825793.27 frames. ], batch size: 47, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:40:21,206 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:43,476 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 21:41:02,894 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.816e+02 5.960e+02 7.144e+02 1.653e+03, threshold=1.192e+03, percent-clipped=4.0 +2023-04-02 21:41:04,071 INFO [train.py:903] (2/4) Epoch 22, batch 3750, loss[loss=0.2557, simple_loss=0.3289, pruned_loss=0.09125, over 12863.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.06324, over 3830386.09 frames. ], batch size: 136, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:41:07,726 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3532, 3.9668, 2.6226, 3.4696, 0.8942, 3.9167, 3.8090, 3.8859], + device='cuda:2'), covar=tensor([0.0698, 0.0990, 0.1955, 0.0884, 0.3966, 0.0763, 0.0962, 0.1059], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0412, 0.0494, 0.0344, 0.0402, 0.0432, 0.0427, 0.0460], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:42:04,510 INFO [train.py:903] (2/4) Epoch 22, batch 3800, loss[loss=0.2008, simple_loss=0.2909, pruned_loss=0.05532, over 19646.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2866, pruned_loss=0.06295, over 3827044.07 frames. ], batch size: 55, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:42:26,704 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147206.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:42:38,899 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 21:43:02,576 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.867e+02 5.077e+02 5.970e+02 7.548e+02 1.289e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 21:43:03,538 INFO [train.py:903] (2/4) Epoch 22, batch 3850, loss[loss=0.2357, simple_loss=0.3073, pruned_loss=0.08205, over 19602.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.288, pruned_loss=0.06347, over 3831784.33 frames. ], batch size: 61, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:43:29,920 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3029, 3.9033, 2.7292, 3.4970, 1.2225, 3.8240, 3.7097, 3.8518], + device='cuda:2'), covar=tensor([0.0712, 0.0975, 0.1826, 0.0832, 0.3696, 0.0813, 0.0978, 0.1342], + device='cuda:2'), in_proj_covar=tensor([0.0504, 0.0411, 0.0493, 0.0344, 0.0401, 0.0431, 0.0426, 0.0459], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:43:31,169 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147259.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:05,365 INFO [train.py:903] (2/4) Epoch 22, batch 3900, loss[loss=0.2028, simple_loss=0.287, pruned_loss=0.05932, over 19691.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.288, pruned_loss=0.06351, over 3818094.40 frames. ], batch size: 59, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:44:46,185 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:49,774 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:55,098 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:04,846 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 4.890e+02 6.799e+02 8.609e+02 1.784e+03, threshold=1.360e+03, percent-clipped=9.0 +2023-04-02 21:45:05,886 INFO [train.py:903] (2/4) Epoch 22, batch 3950, loss[loss=0.1836, simple_loss=0.2549, pruned_loss=0.05614, over 19787.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.288, pruned_loss=0.06385, over 3801377.86 frames. ], batch size: 47, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:45:08,143 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 21:45:12,761 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0506, 3.3877, 1.9254, 2.0667, 3.0972, 1.7874, 1.5806, 2.2124], + device='cuda:2'), covar=tensor([0.1542, 0.0672, 0.1169, 0.0884, 0.0500, 0.1258, 0.0967, 0.0730], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0318, 0.0339, 0.0265, 0.0250, 0.0336, 0.0292, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:45:18,210 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:30,122 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:46,165 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4646, 2.2455, 1.6270, 1.5829, 2.0936, 1.3778, 1.3246, 1.8255], + device='cuda:2'), covar=tensor([0.1191, 0.0762, 0.1136, 0.0854, 0.0526, 0.1284, 0.0800, 0.0559], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0317, 0.0338, 0.0264, 0.0250, 0.0336, 0.0292, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:46:00,807 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:04,829 INFO [train.py:903] (2/4) Epoch 22, batch 4000, loss[loss=0.1759, simple_loss=0.2558, pruned_loss=0.04801, over 19732.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2873, pruned_loss=0.06363, over 3821783.94 frames. ], batch size: 46, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:46:50,135 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:52,243 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 21:47:03,830 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.354e+02 4.920e+02 5.925e+02 7.748e+02 1.160e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-04-02 21:47:05,863 INFO [train.py:903] (2/4) Epoch 22, batch 4050, loss[loss=0.1863, simple_loss=0.2563, pruned_loss=0.05812, over 19704.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2865, pruned_loss=0.06333, over 3825999.34 frames. ], batch size: 45, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:47:24,625 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:48:00,741 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3972, 1.4260, 1.6696, 1.6151, 2.3281, 2.1266, 2.3966, 1.0788], + device='cuda:2'), covar=tensor([0.2586, 0.4514, 0.2846, 0.2005, 0.1693, 0.2243, 0.1725, 0.4606], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0644, 0.0713, 0.0480, 0.0616, 0.0528, 0.0663, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 21:48:07,527 INFO [train.py:903] (2/4) Epoch 22, batch 4100, loss[loss=0.2216, simple_loss=0.2997, pruned_loss=0.07179, over 19495.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06359, over 3823500.14 frames. ], batch size: 64, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:48:44,890 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 21:48:48,747 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6344, 4.0806, 4.2710, 4.2620, 1.5485, 4.0377, 3.5153, 3.9921], + device='cuda:2'), covar=tensor([0.1572, 0.0847, 0.0637, 0.0694, 0.5879, 0.0855, 0.0678, 0.1158], + device='cuda:2'), in_proj_covar=tensor([0.0779, 0.0742, 0.0945, 0.0825, 0.0826, 0.0707, 0.0568, 0.0882], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 21:49:07,918 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.254e+02 6.349e+02 7.493e+02 1.711e+03, threshold=1.270e+03, percent-clipped=4.0 +2023-04-02 21:49:09,115 INFO [train.py:903] (2/4) Epoch 22, batch 4150, loss[loss=0.1919, simple_loss=0.2787, pruned_loss=0.05253, over 19583.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2872, pruned_loss=0.06311, over 3823801.02 frames. ], batch size: 52, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:49:49,341 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9189, 2.0088, 2.2822, 2.5498, 1.8951, 2.4291, 2.3014, 2.1008], + device='cuda:2'), covar=tensor([0.4011, 0.3591, 0.1769, 0.2135, 0.3851, 0.2047, 0.4683, 0.3176], + device='cuda:2'), in_proj_covar=tensor([0.0893, 0.0959, 0.0712, 0.0927, 0.0873, 0.0811, 0.0839, 0.0777], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 21:49:53,531 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4847, 2.2689, 1.7214, 1.5502, 2.1074, 1.3172, 1.4245, 1.8775], + device='cuda:2'), covar=tensor([0.1090, 0.0806, 0.1013, 0.0826, 0.0591, 0.1304, 0.0730, 0.0519], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0315, 0.0335, 0.0263, 0.0248, 0.0335, 0.0289, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:49:56,984 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:09,104 INFO [train.py:903] (2/4) Epoch 22, batch 4200, loss[loss=0.1792, simple_loss=0.2623, pruned_loss=0.04804, over 19633.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2872, pruned_loss=0.06311, over 3824895.10 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:50:13,782 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 21:50:26,609 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:27,405 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147603.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:56,086 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 21:51:09,770 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.933e+02 5.671e+02 7.351e+02 2.024e+03, threshold=1.134e+03, percent-clipped=5.0 +2023-04-02 21:51:10,932 INFO [train.py:903] (2/4) Epoch 22, batch 4250, loss[loss=0.1919, simple_loss=0.2772, pruned_loss=0.05334, over 19550.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2873, pruned_loss=0.06308, over 3820513.37 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:51:13,553 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1331, 3.2344, 1.9727, 2.0221, 2.9038, 1.8418, 1.6669, 2.2618], + device='cuda:2'), covar=tensor([0.1177, 0.0616, 0.0988, 0.0790, 0.0554, 0.1100, 0.0823, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0316, 0.0336, 0.0265, 0.0249, 0.0336, 0.0291, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:51:25,901 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 21:51:38,183 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 21:51:51,752 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:51:54,076 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4564, 2.4115, 2.0670, 2.5437, 2.2764, 2.1330, 2.1029, 2.4897], + device='cuda:2'), covar=tensor([0.1060, 0.1676, 0.1661, 0.1202, 0.1539, 0.0582, 0.1447, 0.0715], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0356, 0.0314, 0.0251, 0.0303, 0.0253, 0.0310, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:51:57,375 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:52:11,224 INFO [train.py:903] (2/4) Epoch 22, batch 4300, loss[loss=0.1885, simple_loss=0.269, pruned_loss=0.05396, over 19717.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2881, pruned_loss=0.06309, over 3818694.22 frames. ], batch size: 51, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:52:17,405 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-02 21:52:22,661 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 21:52:33,850 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 21:52:47,214 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:53:02,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 21:53:11,621 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 5.055e+02 6.219e+02 8.115e+02 2.735e+03, threshold=1.244e+03, percent-clipped=11.0 +2023-04-02 21:53:11,639 INFO [train.py:903] (2/4) Epoch 22, batch 4350, loss[loss=0.2074, simple_loss=0.2824, pruned_loss=0.06621, over 19749.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2893, pruned_loss=0.06422, over 3810092.08 frames. ], batch size: 51, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:53:48,691 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:11,265 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:11,999 INFO [train.py:903] (2/4) Epoch 22, batch 4400, loss[loss=0.1957, simple_loss=0.2729, pruned_loss=0.05924, over 19737.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2889, pruned_loss=0.06448, over 3814462.99 frames. ], batch size: 48, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:54:20,670 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:37,133 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 21:54:46,701 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 21:55:06,284 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:12,511 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.137e+02 6.374e+02 7.743e+02 1.534e+03, threshold=1.275e+03, percent-clipped=3.0 +2023-04-02 21:55:12,529 INFO [train.py:903] (2/4) Epoch 22, batch 4450, loss[loss=0.1953, simple_loss=0.2853, pruned_loss=0.05265, over 19591.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2897, pruned_loss=0.06492, over 3815530.11 frames. ], batch size: 57, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:55:57,441 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:57,477 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6399, 1.3895, 1.5092, 1.5900, 3.2376, 1.2645, 2.3736, 3.6921], + device='cuda:2'), covar=tensor([0.0553, 0.2800, 0.2891, 0.1919, 0.0791, 0.2445, 0.1310, 0.0263], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0348, 0.0373, 0.0349, 0.0382, 0.0404], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:56:08,761 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:56:12,927 INFO [train.py:903] (2/4) Epoch 22, batch 4500, loss[loss=0.1569, simple_loss=0.2378, pruned_loss=0.03795, over 16446.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2891, pruned_loss=0.06461, over 3814124.03 frames. ], batch size: 36, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:56:41,857 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147911.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:03,817 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:15,406 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 4.535e+02 5.624e+02 7.235e+02 1.683e+03, threshold=1.125e+03, percent-clipped=3.0 +2023-04-02 21:57:15,425 INFO [train.py:903] (2/4) Epoch 22, batch 4550, loss[loss=0.2046, simple_loss=0.2751, pruned_loss=0.06701, over 19712.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06421, over 3809917.16 frames. ], batch size: 45, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:57:23,446 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 21:57:46,096 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 21:57:58,573 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:15,826 INFO [train.py:903] (2/4) Epoch 22, batch 4600, loss[loss=0.2336, simple_loss=0.3086, pruned_loss=0.07933, over 19487.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2875, pruned_loss=0.06422, over 3792110.00 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:58:28,673 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:56,667 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:04,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-02 21:59:12,777 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5947, 1.2679, 1.4570, 1.6788, 3.1952, 1.2182, 2.3069, 3.5829], + device='cuda:2'), covar=tensor([0.0479, 0.2874, 0.3114, 0.1714, 0.0684, 0.2393, 0.1286, 0.0258], + device='cuda:2'), in_proj_covar=tensor([0.0408, 0.0366, 0.0387, 0.0347, 0.0374, 0.0348, 0.0381, 0.0403], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 21:59:16,076 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.857e+02 6.032e+02 8.227e+02 1.754e+03, threshold=1.206e+03, percent-clipped=4.0 +2023-04-02 21:59:16,094 INFO [train.py:903] (2/4) Epoch 22, batch 4650, loss[loss=0.196, simple_loss=0.2733, pruned_loss=0.05935, over 18812.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2873, pruned_loss=0.06412, over 3796622.38 frames. ], batch size: 74, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 21:59:22,648 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:32,297 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 21:59:43,967 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 21:59:53,336 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:00:16,030 INFO [train.py:903] (2/4) Epoch 22, batch 4700, loss[loss=0.1743, simple_loss=0.2491, pruned_loss=0.04976, over 19720.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2884, pruned_loss=0.0646, over 3800297.33 frames. ], batch size: 46, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:00:39,959 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 22:01:15,681 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:17,614 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.448e+02 6.322e+02 7.572e+02 1.580e+03, threshold=1.264e+03, percent-clipped=4.0 +2023-04-02 22:01:17,633 INFO [train.py:903] (2/4) Epoch 22, batch 4750, loss[loss=0.2382, simple_loss=0.313, pruned_loss=0.08168, over 18827.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2892, pruned_loss=0.06492, over 3810680.52 frames. ], batch size: 74, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:01:21,305 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:34,513 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 22:01:50,390 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:52,786 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:05,587 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148177.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:06,210 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 22:02:18,760 INFO [train.py:903] (2/4) Epoch 22, batch 4800, loss[loss=0.221, simple_loss=0.2907, pruned_loss=0.07567, over 19585.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.06449, over 3820672.76 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:02:23,670 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:57,703 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:03:18,978 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.799e+02 5.785e+02 7.279e+02 1.291e+03, threshold=1.157e+03, percent-clipped=1.0 +2023-04-02 22:03:18,996 INFO [train.py:903] (2/4) Epoch 22, batch 4850, loss[loss=0.1747, simple_loss=0.2538, pruned_loss=0.04782, over 18144.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2888, pruned_loss=0.06468, over 3792056.48 frames. ], batch size: 40, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:03:44,130 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 22:04:01,913 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148273.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:02,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 22:04:08,080 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 22:04:09,284 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 22:04:18,709 INFO [train.py:903] (2/4) Epoch 22, batch 4900, loss[loss=0.2456, simple_loss=0.3169, pruned_loss=0.08709, over 19541.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2875, pruned_loss=0.06427, over 3813703.25 frames. ], batch size: 56, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:04:18,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 22:04:24,305 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:39,205 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 22:04:40,581 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148305.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:14,493 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:19,523 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.396e+02 6.572e+02 8.433e+02 1.736e+03, threshold=1.314e+03, percent-clipped=6.0 +2023-04-02 22:05:19,541 INFO [train.py:903] (2/4) Epoch 22, batch 4950, loss[loss=0.2231, simple_loss=0.3069, pruned_loss=0.06964, over 19612.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2884, pruned_loss=0.06487, over 3816287.40 frames. ], batch size: 57, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:05:26,432 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.43 vs. limit=5.0 +2023-04-02 22:05:35,930 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 22:06:01,172 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 22:06:20,896 INFO [train.py:903] (2/4) Epoch 22, batch 5000, loss[loss=0.2001, simple_loss=0.2786, pruned_loss=0.06077, over 19607.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2882, pruned_loss=0.06417, over 3820685.29 frames. ], batch size: 50, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:06:21,217 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:25,708 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:29,626 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 22:06:40,553 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 22:06:55,453 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:19,244 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.597e+02 4.828e+02 6.337e+02 7.923e+02 1.456e+03, threshold=1.267e+03, percent-clipped=1.0 +2023-04-02 22:07:19,262 INFO [train.py:903] (2/4) Epoch 22, batch 5050, loss[loss=0.2196, simple_loss=0.3059, pruned_loss=0.06661, over 19791.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2896, pruned_loss=0.06516, over 3819819.04 frames. ], batch size: 63, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:07:30,120 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148447.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:46,901 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4726, 4.0861, 2.8103, 3.6006, 1.3403, 4.0332, 3.8891, 4.0377], + device='cuda:2'), covar=tensor([0.0706, 0.1036, 0.1840, 0.0860, 0.3529, 0.0661, 0.0902, 0.1132], + device='cuda:2'), in_proj_covar=tensor([0.0502, 0.0409, 0.0492, 0.0344, 0.0398, 0.0432, 0.0424, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:07:54,472 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 22:08:19,386 INFO [train.py:903] (2/4) Epoch 22, batch 5100, loss[loss=0.156, simple_loss=0.2347, pruned_loss=0.03868, over 19781.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.288, pruned_loss=0.06425, over 3820744.20 frames. ], batch size: 47, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:08:21,058 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 22:08:30,466 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 22:08:33,788 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 22:08:39,192 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 22:08:53,317 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148516.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:19,535 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.468e+02 6.941e+02 9.893e+02 2.948e+03, threshold=1.388e+03, percent-clipped=12.0 +2023-04-02 22:09:19,553 INFO [train.py:903] (2/4) Epoch 22, batch 5150, loss[loss=0.2127, simple_loss=0.2946, pruned_loss=0.06537, over 19738.00 frames. ], tot_loss[loss=0.208, simple_loss=0.288, pruned_loss=0.06402, over 3822509.86 frames. ], batch size: 63, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:09:31,353 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 22:09:32,982 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:40,841 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 22:10:02,755 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:05,882 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:10:20,783 INFO [train.py:903] (2/4) Epoch 22, batch 5200, loss[loss=0.218, simple_loss=0.3041, pruned_loss=0.06596, over 19360.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2878, pruned_loss=0.06373, over 3823176.89 frames. ], batch size: 70, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:10:23,531 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:33,166 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 22:10:53,676 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:58,743 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-04-02 22:11:17,520 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 22:11:21,000 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.558e+02 5.776e+02 7.251e+02 2.001e+03, threshold=1.155e+03, percent-clipped=2.0 +2023-04-02 22:11:21,018 INFO [train.py:903] (2/4) Epoch 22, batch 5250, loss[loss=0.2079, simple_loss=0.2921, pruned_loss=0.06186, over 19674.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2881, pruned_loss=0.06384, over 3826513.71 frames. ], batch size: 59, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:11:27,784 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4495, 1.3667, 1.5180, 1.4420, 3.0512, 1.1778, 2.3213, 3.4212], + device='cuda:2'), covar=tensor([0.0472, 0.2625, 0.2730, 0.1891, 0.0711, 0.2346, 0.1199, 0.0269], + device='cuda:2'), in_proj_covar=tensor([0.0410, 0.0367, 0.0387, 0.0348, 0.0375, 0.0351, 0.0384, 0.0405], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:11:27,895 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:33,740 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:35,083 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1299, 1.3319, 1.7071, 1.3165, 2.6638, 3.6874, 3.3800, 3.8170], + device='cuda:2'), covar=tensor([0.1702, 0.3741, 0.3386, 0.2474, 0.0644, 0.0188, 0.0200, 0.0258], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0326, 0.0357, 0.0268, 0.0248, 0.0190, 0.0218, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 22:11:58,291 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:58,395 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:12:20,276 INFO [train.py:903] (2/4) Epoch 22, batch 5300, loss[loss=0.1794, simple_loss=0.261, pruned_loss=0.04895, over 19756.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2892, pruned_loss=0.06469, over 3829154.27 frames. ], batch size: 47, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:12:39,146 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 22:13:17,792 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:22,173 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.679e+02 5.309e+02 6.457e+02 8.011e+02 2.116e+03, threshold=1.291e+03, percent-clipped=5.0 +2023-04-02 22:13:22,191 INFO [train.py:903] (2/4) Epoch 22, batch 5350, loss[loss=0.1903, simple_loss=0.2642, pruned_loss=0.05826, over 19291.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2883, pruned_loss=0.06423, over 3823763.28 frames. ], batch size: 44, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:13:53,931 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:55,510 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 22:13:58,121 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2924, 3.7912, 3.9062, 3.9014, 1.6893, 3.7128, 3.2698, 3.6415], + device='cuda:2'), covar=tensor([0.1551, 0.1020, 0.0694, 0.0742, 0.5376, 0.1038, 0.0700, 0.1215], + device='cuda:2'), in_proj_covar=tensor([0.0777, 0.0738, 0.0939, 0.0824, 0.0827, 0.0704, 0.0564, 0.0875], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 22:13:59,361 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5281, 3.1494, 2.2141, 2.2795, 2.2457, 2.7029, 0.9880, 2.2962], + device='cuda:2'), covar=tensor([0.0495, 0.0472, 0.0642, 0.1010, 0.0851, 0.0901, 0.1251, 0.0822], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0356, 0.0357, 0.0380, 0.0461, 0.0387, 0.0336, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 22:14:24,407 INFO [train.py:903] (2/4) Epoch 22, batch 5400, loss[loss=0.2411, simple_loss=0.3093, pruned_loss=0.08647, over 13245.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2878, pruned_loss=0.06362, over 3832331.98 frames. ], batch size: 136, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:14:28,070 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:15:24,091 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 4.882e+02 5.931e+02 8.184e+02 2.288e+03, threshold=1.186e+03, percent-clipped=7.0 +2023-04-02 22:15:24,109 INFO [train.py:903] (2/4) Epoch 22, batch 5450, loss[loss=0.222, simple_loss=0.3045, pruned_loss=0.06975, over 19595.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2881, pruned_loss=0.06346, over 3828531.50 frames. ], batch size: 61, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:15:50,183 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:05,295 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4708, 1.5189, 1.7763, 1.7199, 2.4124, 2.2207, 2.5564, 1.0820], + device='cuda:2'), covar=tensor([0.2460, 0.4222, 0.2608, 0.1905, 0.1649, 0.2116, 0.1542, 0.4587], + device='cuda:2'), in_proj_covar=tensor([0.0540, 0.0647, 0.0717, 0.0486, 0.0624, 0.0533, 0.0667, 0.0553], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 22:16:24,044 INFO [train.py:903] (2/4) Epoch 22, batch 5500, loss[loss=0.19, simple_loss=0.2726, pruned_loss=0.05373, over 17466.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2877, pruned_loss=0.06349, over 3834663.54 frames. ], batch size: 101, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:16:47,597 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:49,403 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 22:17:25,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.956e+02 6.031e+02 8.410e+02 1.981e+03, threshold=1.206e+03, percent-clipped=11.0 +2023-04-02 22:17:25,290 INFO [train.py:903] (2/4) Epoch 22, batch 5550, loss[loss=0.2076, simple_loss=0.2859, pruned_loss=0.0646, over 19545.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06274, over 3833095.87 frames. ], batch size: 54, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:17:33,838 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 22:17:36,211 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2757, 3.6341, 2.1927, 2.2060, 3.3647, 1.9496, 1.6187, 2.2338], + device='cuda:2'), covar=tensor([0.1339, 0.0697, 0.1058, 0.0867, 0.0479, 0.1227, 0.1012, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0317, 0.0338, 0.0264, 0.0248, 0.0339, 0.0291, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:18:10,834 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:18:21,571 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 22:18:27,007 INFO [train.py:903] (2/4) Epoch 22, batch 5600, loss[loss=0.2271, simple_loss=0.3072, pruned_loss=0.07355, over 19082.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2873, pruned_loss=0.06365, over 3834506.07 frames. ], batch size: 69, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:18:41,659 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9868, 2.0718, 2.3567, 2.6288, 2.0092, 2.4959, 2.3896, 2.1182], + device='cuda:2'), covar=tensor([0.3958, 0.3957, 0.1790, 0.2411, 0.4071, 0.2117, 0.4509, 0.3219], + device='cuda:2'), in_proj_covar=tensor([0.0893, 0.0959, 0.0714, 0.0928, 0.0874, 0.0810, 0.0837, 0.0778], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 22:18:56,787 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149013.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:05,999 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:27,601 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 5.096e+02 6.059e+02 8.103e+02 1.757e+03, threshold=1.212e+03, percent-clipped=10.0 +2023-04-02 22:19:27,619 INFO [train.py:903] (2/4) Epoch 22, batch 5650, loss[loss=0.22, simple_loss=0.3026, pruned_loss=0.0687, over 18050.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.287, pruned_loss=0.06356, over 3816356.35 frames. ], batch size: 83, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:19:35,883 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:41,136 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2556, 3.8049, 3.9389, 3.9179, 1.5260, 3.7460, 3.2432, 3.6964], + device='cuda:2'), covar=tensor([0.1740, 0.0921, 0.0703, 0.0798, 0.6054, 0.1059, 0.0742, 0.1229], + device='cuda:2'), in_proj_covar=tensor([0.0786, 0.0745, 0.0951, 0.0836, 0.0841, 0.0718, 0.0569, 0.0884], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 22:20:15,157 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 22:20:16,297 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:18,954 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4426, 1.6035, 1.9456, 1.7444, 3.2123, 2.4642, 3.4846, 1.6522], + device='cuda:2'), covar=tensor([0.2761, 0.4664, 0.3081, 0.2063, 0.1574, 0.2373, 0.1560, 0.4483], + device='cuda:2'), in_proj_covar=tensor([0.0536, 0.0642, 0.0711, 0.0483, 0.0621, 0.0531, 0.0661, 0.0549], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 22:20:21,711 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 22:20:28,282 INFO [train.py:903] (2/4) Epoch 22, batch 5700, loss[loss=0.206, simple_loss=0.293, pruned_loss=0.05952, over 19097.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2869, pruned_loss=0.06354, over 3818324.95 frames. ], batch size: 69, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:17,806 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:21:29,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.618e+02 6.077e+02 7.635e+02 1.470e+03, threshold=1.215e+03, percent-clipped=6.0 +2023-04-02 22:21:29,614 INFO [train.py:903] (2/4) Epoch 22, batch 5750, loss[loss=0.1785, simple_loss=0.25, pruned_loss=0.05353, over 19773.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2868, pruned_loss=0.0635, over 3827351.58 frames. ], batch size: 46, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:30,815 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 22:21:39,630 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 22:21:46,341 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 22:21:59,277 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:29,890 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:30,644 INFO [train.py:903] (2/4) Epoch 22, batch 5800, loss[loss=0.1829, simple_loss=0.257, pruned_loss=0.05443, over 19776.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2874, pruned_loss=0.06392, over 3814752.98 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:22:37,186 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:23,082 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149231.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:30,410 INFO [train.py:903] (2/4) Epoch 22, batch 5850, loss[loss=0.1687, simple_loss=0.2522, pruned_loss=0.04261, over 19425.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2876, pruned_loss=0.06413, over 3815521.23 frames. ], batch size: 48, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:23:31,585 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.174e+02 6.346e+02 7.936e+02 1.645e+03, threshold=1.269e+03, percent-clipped=7.0 +2023-04-02 22:23:52,865 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:24:30,286 INFO [train.py:903] (2/4) Epoch 22, batch 5900, loss[loss=0.1869, simple_loss=0.271, pruned_loss=0.05136, over 19575.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2872, pruned_loss=0.06388, over 3820709.27 frames. ], batch size: 52, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:24:35,574 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 22:24:56,375 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 22:25:31,743 INFO [train.py:903] (2/4) Epoch 22, batch 5950, loss[loss=0.2192, simple_loss=0.2937, pruned_loss=0.07235, over 19774.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2864, pruned_loss=0.06372, over 3809284.32 frames. ], batch size: 54, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:25:32,878 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.792e+02 4.957e+02 5.985e+02 7.132e+02 1.534e+03, threshold=1.197e+03, percent-clipped=1.0 +2023-04-02 22:26:28,441 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:26:33,220 INFO [train.py:903] (2/4) Epoch 22, batch 6000, loss[loss=0.2072, simple_loss=0.2921, pruned_loss=0.06121, over 18308.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2873, pruned_loss=0.06404, over 3811302.03 frames. ], batch size: 83, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:26:33,221 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 22:26:46,911 INFO [train.py:937] (2/4) Epoch 22, validation: loss=0.1681, simple_loss=0.2682, pruned_loss=0.03398, over 944034.00 frames. +2023-04-02 22:26:46,912 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 22:27:13,604 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:27:48,664 INFO [train.py:903] (2/4) Epoch 22, batch 6050, loss[loss=0.2534, simple_loss=0.323, pruned_loss=0.09191, over 18828.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2872, pruned_loss=0.06401, over 3813108.94 frames. ], batch size: 74, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:27:49,811 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.880e+02 5.766e+02 7.280e+02 1.810e+03, threshold=1.153e+03, percent-clipped=3.0 +2023-04-02 22:28:02,637 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:32,538 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:50,322 INFO [train.py:903] (2/4) Epoch 22, batch 6100, loss[loss=0.1604, simple_loss=0.2368, pruned_loss=0.04198, over 19412.00 frames. ], tot_loss[loss=0.206, simple_loss=0.286, pruned_loss=0.06302, over 3822195.53 frames. ], batch size: 48, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:29:16,828 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3530, 1.3131, 1.9291, 1.6993, 3.1473, 4.6380, 4.4604, 5.0060], + device='cuda:2'), covar=tensor([0.1676, 0.4055, 0.3491, 0.2271, 0.0609, 0.0186, 0.0180, 0.0213], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0323, 0.0354, 0.0265, 0.0246, 0.0188, 0.0216, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 22:29:42,760 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:29:48,970 INFO [train.py:903] (2/4) Epoch 22, batch 6150, loss[loss=0.1957, simple_loss=0.2721, pruned_loss=0.05963, over 19730.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2872, pruned_loss=0.06384, over 3817833.17 frames. ], batch size: 51, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:29:50,022 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.761e+02 6.063e+02 7.648e+02 1.908e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 22:30:19,811 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 22:30:43,261 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:30:49,417 INFO [train.py:903] (2/4) Epoch 22, batch 6200, loss[loss=0.1752, simple_loss=0.2612, pruned_loss=0.04464, over 19594.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2872, pruned_loss=0.06376, over 3820103.76 frames. ], batch size: 52, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:31:35,875 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 22:31:51,302 INFO [train.py:903] (2/4) Epoch 22, batch 6250, loss[loss=0.2107, simple_loss=0.2995, pruned_loss=0.06099, over 17435.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2874, pruned_loss=0.06372, over 3796280.59 frames. ], batch size: 101, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:31:52,392 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.068e+02 6.178e+02 8.268e+02 1.694e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 22:32:21,872 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 22:32:52,472 INFO [train.py:903] (2/4) Epoch 22, batch 6300, loss[loss=0.1708, simple_loss=0.2462, pruned_loss=0.04769, over 16903.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06301, over 3808144.56 frames. ], batch size: 37, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:32:59,739 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 22:33:33,518 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 22:33:51,830 INFO [train.py:903] (2/4) Epoch 22, batch 6350, loss[loss=0.1909, simple_loss=0.2629, pruned_loss=0.05946, over 19730.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.06341, over 3802390.79 frames. ], batch size: 45, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:33:52,936 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.551e+02 6.531e+02 8.044e+02 1.579e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-02 22:34:23,965 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149764.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:34:48,907 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 22:34:52,488 INFO [train.py:903] (2/4) Epoch 22, batch 6400, loss[loss=0.1915, simple_loss=0.2758, pruned_loss=0.05361, over 18742.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2872, pruned_loss=0.06341, over 3805416.02 frames. ], batch size: 74, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:25,518 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 22:35:51,423 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0980, 1.7806, 1.4909, 1.1297, 1.5793, 1.1402, 1.1935, 1.6661], + device='cuda:2'), covar=tensor([0.0826, 0.0789, 0.0999, 0.0889, 0.0568, 0.1307, 0.0626, 0.0468], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0268, 0.0249, 0.0338, 0.0292, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:35:54,282 INFO [train.py:903] (2/4) Epoch 22, batch 6450, loss[loss=0.1859, simple_loss=0.276, pruned_loss=0.0479, over 19664.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06301, over 3828244.05 frames. ], batch size: 58, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:55,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.788e+02 5.699e+02 7.070e+02 1.580e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-02 22:36:23,901 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1223, 1.3563, 1.5743, 1.3314, 2.7643, 1.0177, 2.1702, 3.0916], + device='cuda:2'), covar=tensor([0.0586, 0.2703, 0.2695, 0.1844, 0.0701, 0.2421, 0.1211, 0.0326], + device='cuda:2'), in_proj_covar=tensor([0.0414, 0.0368, 0.0388, 0.0347, 0.0375, 0.0354, 0.0384, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:36:39,132 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 22:36:40,521 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:36:54,748 INFO [train.py:903] (2/4) Epoch 22, batch 6500, loss[loss=0.2495, simple_loss=0.3248, pruned_loss=0.08712, over 19513.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.0625, over 3826397.50 frames. ], batch size: 64, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:00,234 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 22:37:41,928 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:37:55,092 INFO [train.py:903] (2/4) Epoch 22, batch 6550, loss[loss=0.246, simple_loss=0.3245, pruned_loss=0.08373, over 19706.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2855, pruned_loss=0.06243, over 3831253.86 frames. ], batch size: 63, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:56,255 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 4.654e+02 5.933e+02 7.304e+02 1.667e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 22:38:55,832 INFO [train.py:903] (2/4) Epoch 22, batch 6600, loss[loss=0.1864, simple_loss=0.2771, pruned_loss=0.04783, over 18148.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.286, pruned_loss=0.06273, over 3818230.09 frames. ], batch size: 83, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:38:59,640 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:39:59,893 INFO [train.py:903] (2/4) Epoch 22, batch 6650, loss[loss=0.2323, simple_loss=0.3128, pruned_loss=0.07589, over 19532.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2869, pruned_loss=0.06344, over 3813223.21 frames. ], batch size: 54, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:40:01,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 4.673e+02 5.867e+02 7.414e+02 1.313e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 22:40:04,701 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:40:54,109 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9749, 1.8075, 1.5244, 1.8332, 1.7477, 1.4166, 1.4899, 1.7668], + device='cuda:2'), covar=tensor([0.1096, 0.1493, 0.1725, 0.1184, 0.1418, 0.0836, 0.1666, 0.0880], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0352, 0.0311, 0.0249, 0.0300, 0.0249, 0.0307, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:40:59,354 INFO [train.py:903] (2/4) Epoch 22, batch 6700, loss[loss=0.207, simple_loss=0.2933, pruned_loss=0.06032, over 19662.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06298, over 3823957.02 frames. ], batch size: 53, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:10,403 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:41:23,662 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150108.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:41:51,114 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9544, 4.3865, 4.6557, 4.6454, 1.6934, 4.3778, 3.7595, 4.3683], + device='cuda:2'), covar=tensor([0.1649, 0.0776, 0.0601, 0.0655, 0.6230, 0.0891, 0.0676, 0.1166], + device='cuda:2'), in_proj_covar=tensor([0.0787, 0.0749, 0.0955, 0.0833, 0.0839, 0.0714, 0.0568, 0.0882], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 22:41:57,255 INFO [train.py:903] (2/4) Epoch 22, batch 6750, loss[loss=0.2491, simple_loss=0.3192, pruned_loss=0.08954, over 19281.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06353, over 3832943.30 frames. ], batch size: 66, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:58,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 4.879e+02 6.504e+02 7.654e+02 1.720e+03, threshold=1.301e+03, percent-clipped=5.0 +2023-04-02 22:42:53,133 INFO [train.py:903] (2/4) Epoch 22, batch 6800, loss[loss=0.1881, simple_loss=0.2715, pruned_loss=0.0524, over 19621.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2878, pruned_loss=0.06409, over 3824628.44 frames. ], batch size: 50, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:43:20,004 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7822, 3.9487, 4.3476, 4.3520, 2.7934, 4.0605, 3.7470, 4.1151], + device='cuda:2'), covar=tensor([0.1385, 0.3353, 0.0632, 0.0688, 0.4137, 0.1345, 0.0629, 0.1022], + device='cuda:2'), in_proj_covar=tensor([0.0786, 0.0747, 0.0954, 0.0833, 0.0836, 0.0712, 0.0567, 0.0882], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 22:43:38,426 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 22:43:39,484 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 22:43:42,625 INFO [train.py:903] (2/4) Epoch 23, batch 0, loss[loss=0.224, simple_loss=0.3021, pruned_loss=0.073, over 19762.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3021, pruned_loss=0.073, over 19762.00 frames. ], batch size: 63, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:43:42,626 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 22:43:54,253 INFO [train.py:937] (2/4) Epoch 23, validation: loss=0.1688, simple_loss=0.2693, pruned_loss=0.03418, over 944034.00 frames. +2023-04-02 22:43:54,254 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 22:44:03,541 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150223.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:44:06,620 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 22:44:21,433 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.848e+02 5.561e+02 7.527e+02 1.735e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-04-02 22:44:31,832 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:44:35,237 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1041, 1.7990, 1.5244, 1.2162, 1.6362, 1.2368, 1.0978, 1.5986], + device='cuda:2'), covar=tensor([0.0900, 0.0829, 0.1073, 0.0861, 0.0527, 0.1330, 0.0708, 0.0482], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0320, 0.0342, 0.0269, 0.0250, 0.0339, 0.0294, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:44:55,830 INFO [train.py:903] (2/4) Epoch 23, batch 50, loss[loss=0.1985, simple_loss=0.2726, pruned_loss=0.06219, over 19743.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2853, pruned_loss=0.06135, over 865777.34 frames. ], batch size: 51, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:45:03,061 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:27,185 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:30,265 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 22:45:36,396 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:57,947 INFO [train.py:903] (2/4) Epoch 23, batch 100, loss[loss=0.2094, simple_loss=0.2929, pruned_loss=0.06297, over 18753.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2858, pruned_loss=0.0625, over 1523757.79 frames. ], batch size: 74, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:46:06,470 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:46:07,246 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 22:46:26,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.910e+02 5.630e+02 7.676e+02 1.557e+03, threshold=1.126e+03, percent-clipped=7.0 +2023-04-02 22:46:48,751 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1900, 3.3639, 2.0195, 1.8698, 3.0910, 1.7085, 1.6021, 2.2976], + device='cuda:2'), covar=tensor([0.1369, 0.0658, 0.1072, 0.1021, 0.0624, 0.1340, 0.1044, 0.0706], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0318, 0.0340, 0.0268, 0.0248, 0.0337, 0.0292, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:46:59,547 INFO [train.py:903] (2/4) Epoch 23, batch 150, loss[loss=0.2122, simple_loss=0.285, pruned_loss=0.06969, over 19377.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.286, pruned_loss=0.06268, over 2050394.67 frames. ], batch size: 48, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:47:59,876 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 22:48:01,003 INFO [train.py:903] (2/4) Epoch 23, batch 200, loss[loss=0.2247, simple_loss=0.3024, pruned_loss=0.07349, over 19536.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.288, pruned_loss=0.06395, over 2435636.26 frames. ], batch size: 54, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:48:30,849 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 5.350e+02 6.653e+02 9.712e+02 2.771e+03, threshold=1.331e+03, percent-clipped=16.0 +2023-04-02 22:48:33,190 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150441.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:48:38,323 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 22:49:02,669 INFO [train.py:903] (2/4) Epoch 23, batch 250, loss[loss=0.2405, simple_loss=0.3162, pruned_loss=0.08236, over 17415.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2887, pruned_loss=0.06443, over 2749930.14 frames. ], batch size: 101, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:49:20,033 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150479.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:49:43,869 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1652, 1.2123, 1.6507, 1.2954, 2.5274, 3.5469, 3.2392, 3.8280], + device='cuda:2'), covar=tensor([0.1696, 0.4090, 0.3607, 0.2475, 0.0641, 0.0199, 0.0240, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0325, 0.0355, 0.0267, 0.0246, 0.0189, 0.0217, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 22:49:48,392 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.4341, 5.2934, 6.1724, 6.1480, 2.2464, 5.8096, 4.9581, 5.8660], + device='cuda:2'), covar=tensor([0.1535, 0.0705, 0.0522, 0.0551, 0.5728, 0.0644, 0.0569, 0.1047], + device='cuda:2'), in_proj_covar=tensor([0.0784, 0.0744, 0.0952, 0.0832, 0.0836, 0.0711, 0.0566, 0.0878], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 22:49:49,695 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150504.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:49:54,006 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5544, 1.7427, 2.0024, 1.9636, 3.2184, 2.6467, 3.5788, 1.6680], + device='cuda:2'), covar=tensor([0.2485, 0.4164, 0.2727, 0.1816, 0.1488, 0.2095, 0.1488, 0.4065], + device='cuda:2'), in_proj_covar=tensor([0.0535, 0.0642, 0.0715, 0.0484, 0.0618, 0.0530, 0.0661, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 22:50:05,854 INFO [train.py:903] (2/4) Epoch 23, batch 300, loss[loss=0.1952, simple_loss=0.2743, pruned_loss=0.05799, over 19468.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06371, over 2991888.00 frames. ], batch size: 49, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:50:34,494 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.361e+02 5.024e+02 5.928e+02 7.198e+02 2.066e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 22:50:54,909 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150555.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:50:56,010 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:51:07,069 INFO [train.py:903] (2/4) Epoch 23, batch 350, loss[loss=0.1733, simple_loss=0.2548, pruned_loss=0.04585, over 19730.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2882, pruned_loss=0.06413, over 3157016.01 frames. ], batch size: 51, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:51:11,921 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:52:09,601 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 22:52:09,970 INFO [train.py:903] (2/4) Epoch 23, batch 400, loss[loss=0.231, simple_loss=0.3054, pruned_loss=0.07829, over 19662.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2877, pruned_loss=0.06377, over 3303619.59 frames. ], batch size: 60, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:52:36,583 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:52:39,026 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7748, 1.8413, 2.1556, 2.3143, 1.6603, 2.1197, 2.1505, 1.9870], + device='cuda:2'), covar=tensor([0.4246, 0.3761, 0.1993, 0.2438, 0.3998, 0.2304, 0.4992, 0.3491], + device='cuda:2'), in_proj_covar=tensor([0.0903, 0.0968, 0.0719, 0.0932, 0.0883, 0.0819, 0.0845, 0.0784], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 22:52:40,893 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.962e+02 5.093e+02 6.486e+02 8.008e+02 1.724e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 22:53:11,930 INFO [train.py:903] (2/4) Epoch 23, batch 450, loss[loss=0.1758, simple_loss=0.2504, pruned_loss=0.05053, over 19781.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06357, over 3417341.27 frames. ], batch size: 48, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:53:46,043 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 22:53:46,070 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 22:54:15,721 INFO [train.py:903] (2/4) Epoch 23, batch 500, loss[loss=0.2208, simple_loss=0.2995, pruned_loss=0.07102, over 19617.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06433, over 3495252.04 frames. ], batch size: 57, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:54:29,693 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.8337, 5.3822, 3.1769, 4.7595, 1.1127, 5.3729, 5.2870, 5.4440], + device='cuda:2'), covar=tensor([0.0363, 0.0755, 0.1671, 0.0647, 0.3972, 0.0481, 0.0732, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0505, 0.0413, 0.0498, 0.0346, 0.0402, 0.0436, 0.0425, 0.0460], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 22:54:45,176 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 5.191e+02 6.635e+02 8.528e+02 2.142e+03, threshold=1.327e+03, percent-clipped=5.0 +2023-04-02 22:54:57,375 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:54:58,643 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:55:17,407 INFO [train.py:903] (2/4) Epoch 23, batch 550, loss[loss=0.2299, simple_loss=0.3036, pruned_loss=0.07807, over 19768.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06471, over 3576143.38 frames. ], batch size: 63, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:55:40,338 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2182, 1.2562, 1.4826, 1.3800, 1.7988, 1.6929, 1.8095, 0.6433], + device='cuda:2'), covar=tensor([0.2776, 0.4621, 0.2776, 0.2216, 0.1705, 0.2686, 0.1553, 0.5061], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0642, 0.0711, 0.0483, 0.0616, 0.0531, 0.0660, 0.0547], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 22:56:14,494 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:18,589 INFO [train.py:903] (2/4) Epoch 23, batch 600, loss[loss=0.1901, simple_loss=0.2674, pruned_loss=0.05643, over 19611.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2891, pruned_loss=0.06452, over 3638823.09 frames. ], batch size: 50, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:56:45,471 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150837.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:48,647 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+02 5.000e+02 5.859e+02 6.998e+02 1.831e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-02 22:56:59,192 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 22:57:21,235 INFO [train.py:903] (2/4) Epoch 23, batch 650, loss[loss=0.2104, simple_loss=0.2998, pruned_loss=0.06056, over 19290.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2891, pruned_loss=0.06434, over 3681414.34 frames. ], batch size: 66, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:02,423 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150899.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:23,645 INFO [train.py:903] (2/4) Epoch 23, batch 700, loss[loss=0.1826, simple_loss=0.2638, pruned_loss=0.05067, over 19420.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2883, pruned_loss=0.06358, over 3718797.87 frames. ], batch size: 48, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:27,273 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:52,730 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.019e+02 5.794e+02 7.164e+02 1.349e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 22:58:58,138 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.00 vs. limit=5.0 +2023-04-02 22:59:26,031 INFO [train.py:903] (2/4) Epoch 23, batch 750, loss[loss=0.1814, simple_loss=0.2661, pruned_loss=0.04838, over 19592.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2901, pruned_loss=0.06458, over 3735672.09 frames. ], batch size: 52, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 23:00:17,651 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:26,910 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:28,679 INFO [train.py:903] (2/4) Epoch 23, batch 800, loss[loss=0.2113, simple_loss=0.2726, pruned_loss=0.07498, over 17345.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2895, pruned_loss=0.0644, over 3759012.78 frames. ], batch size: 38, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 23:00:36,356 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 23:00:38,120 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2582, 1.4513, 1.5792, 1.5244, 2.8960, 1.1944, 2.3034, 3.2100], + device='cuda:2'), covar=tensor([0.0515, 0.2431, 0.2446, 0.1670, 0.0707, 0.2219, 0.1195, 0.0310], + device='cuda:2'), in_proj_covar=tensor([0.0416, 0.0370, 0.0389, 0.0350, 0.0379, 0.0354, 0.0385, 0.0408], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:00:44,078 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 23:00:46,702 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 23:00:48,234 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:57,133 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.089e+02 6.161e+02 7.478e+02 1.780e+03, threshold=1.232e+03, percent-clipped=6.0 +2023-04-02 23:01:09,786 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-02 23:01:29,764 INFO [train.py:903] (2/4) Epoch 23, batch 850, loss[loss=0.1851, simple_loss=0.2775, pruned_loss=0.04636, over 19715.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2896, pruned_loss=0.06449, over 3782366.14 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:01:38,467 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 23:02:00,705 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4452, 1.5059, 1.6704, 1.6204, 2.1857, 2.1339, 2.2903, 0.8085], + device='cuda:2'), covar=tensor([0.2394, 0.4038, 0.2506, 0.1889, 0.1529, 0.2080, 0.1408, 0.4516], + device='cuda:2'), in_proj_covar=tensor([0.0531, 0.0638, 0.0707, 0.0481, 0.0611, 0.0526, 0.0653, 0.0545], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 23:02:04,863 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:02:25,399 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 23:02:31,808 INFO [train.py:903] (2/4) Epoch 23, batch 900, loss[loss=0.1817, simple_loss=0.2589, pruned_loss=0.05228, over 19770.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2891, pruned_loss=0.06392, over 3789120.49 frames. ], batch size: 47, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:02,019 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 5.070e+02 6.543e+02 8.443e+02 1.332e+03, threshold=1.309e+03, percent-clipped=3.0 +2023-04-02 23:03:21,199 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 23:03:32,675 INFO [train.py:903] (2/4) Epoch 23, batch 950, loss[loss=0.2253, simple_loss=0.3055, pruned_loss=0.07256, over 19773.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2891, pruned_loss=0.06458, over 3805618.16 frames. ], batch size: 56, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:39,536 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 23:04:11,635 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.9029, 5.3799, 2.9493, 4.6999, 1.3668, 5.4567, 5.2885, 5.5065], + device='cuda:2'), covar=tensor([0.0382, 0.0792, 0.1864, 0.0728, 0.3533, 0.0509, 0.0729, 0.0841], + device='cuda:2'), in_proj_covar=tensor([0.0504, 0.0413, 0.0497, 0.0346, 0.0400, 0.0435, 0.0426, 0.0459], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:04:16,135 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 23:04:17,808 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:27,651 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:35,166 INFO [train.py:903] (2/4) Epoch 23, batch 1000, loss[loss=0.2018, simple_loss=0.2762, pruned_loss=0.06364, over 19732.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2883, pruned_loss=0.06436, over 3809244.33 frames. ], batch size: 51, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:04:47,465 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5535, 1.6725, 1.8964, 1.8153, 2.7873, 2.3796, 2.9505, 1.3264], + device='cuda:2'), covar=tensor([0.2464, 0.4180, 0.2742, 0.1869, 0.1529, 0.2107, 0.1415, 0.4301], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0642, 0.0711, 0.0484, 0.0616, 0.0528, 0.0658, 0.0548], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 23:05:04,890 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.146e+02 6.401e+02 7.951e+02 1.702e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 23:05:32,291 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 23:05:33,596 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:05:37,846 INFO [train.py:903] (2/4) Epoch 23, batch 1050, loss[loss=0.2141, simple_loss=0.2888, pruned_loss=0.06967, over 19593.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2873, pruned_loss=0.06369, over 3812696.21 frames. ], batch size: 52, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:05:42,840 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151270.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:12,763 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 23:06:14,324 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:21,321 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151300.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:40,098 INFO [train.py:903] (2/4) Epoch 23, batch 1100, loss[loss=0.1993, simple_loss=0.2663, pruned_loss=0.06617, over 19742.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06334, over 3818017.08 frames. ], batch size: 47, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:06:40,416 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151316.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:07:09,140 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.937e+02 5.070e+02 6.152e+02 7.617e+02 1.362e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-04-02 23:07:40,867 INFO [train.py:903] (2/4) Epoch 23, batch 1150, loss[loss=0.2217, simple_loss=0.3072, pruned_loss=0.06815, over 17312.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2873, pruned_loss=0.06325, over 3816617.49 frames. ], batch size: 101, lr: 3.60e-03, grad_scale: 4.0 +2023-04-02 23:07:55,705 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151377.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:08:43,930 INFO [train.py:903] (2/4) Epoch 23, batch 1200, loss[loss=0.2423, simple_loss=0.3103, pruned_loss=0.08713, over 13457.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2871, pruned_loss=0.06307, over 3820972.63 frames. ], batch size: 135, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:09:14,769 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.896e+02 6.001e+02 7.643e+02 1.247e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 23:09:18,073 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 23:09:45,290 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:09:46,023 INFO [train.py:903] (2/4) Epoch 23, batch 1250, loss[loss=0.2079, simple_loss=0.2974, pruned_loss=0.05917, over 19679.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2878, pruned_loss=0.06386, over 3823152.59 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:10:12,466 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4797, 2.2365, 1.6692, 1.4843, 2.0765, 1.2283, 1.3830, 1.8614], + device='cuda:2'), covar=tensor([0.1051, 0.0791, 0.1097, 0.0837, 0.0533, 0.1335, 0.0763, 0.0554], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0316, 0.0339, 0.0266, 0.0246, 0.0339, 0.0291, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:10:16,536 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:10:17,544 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8158, 1.9203, 2.0256, 2.3363, 1.9162, 2.2838, 2.1072, 1.9916], + device='cuda:2'), covar=tensor([0.3324, 0.2777, 0.1523, 0.1780, 0.2854, 0.1556, 0.3521, 0.2403], + device='cuda:2'), in_proj_covar=tensor([0.0906, 0.0973, 0.0719, 0.0936, 0.0884, 0.0817, 0.0844, 0.0787], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 23:10:46,708 INFO [train.py:903] (2/4) Epoch 23, batch 1300, loss[loss=0.2027, simple_loss=0.2961, pruned_loss=0.05464, over 19608.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2887, pruned_loss=0.06457, over 3829344.45 frames. ], batch size: 50, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:11:16,030 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 5.118e+02 6.046e+02 8.032e+02 1.744e+03, threshold=1.209e+03, percent-clipped=5.0 +2023-04-02 23:11:22,856 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:11:46,624 INFO [train.py:903] (2/4) Epoch 23, batch 1350, loss[loss=0.1931, simple_loss=0.2684, pruned_loss=0.05895, over 19743.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2889, pruned_loss=0.06489, over 3823576.91 frames. ], batch size: 51, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:12:48,223 INFO [train.py:903] (2/4) Epoch 23, batch 1400, loss[loss=0.2032, simple_loss=0.2862, pruned_loss=0.06008, over 19519.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2891, pruned_loss=0.06518, over 3816853.01 frames. ], batch size: 54, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:08,472 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:13,655 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7611, 4.0823, 4.6013, 4.6191, 2.0473, 4.3127, 3.7526, 4.0466], + device='cuda:2'), covar=tensor([0.2169, 0.1279, 0.0786, 0.1013, 0.6704, 0.1739, 0.1061, 0.1757], + device='cuda:2'), in_proj_covar=tensor([0.0784, 0.0745, 0.0954, 0.0833, 0.0840, 0.0712, 0.0568, 0.0886], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-02 23:13:17,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 5.043e+02 6.240e+02 8.237e+02 1.280e+03, threshold=1.248e+03, percent-clipped=3.0 +2023-04-02 23:13:21,401 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:38,340 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151658.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:40,483 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151660.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:41,871 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:48,344 INFO [train.py:903] (2/4) Epoch 23, batch 1450, loss[loss=0.2357, simple_loss=0.3091, pruned_loss=0.08115, over 13383.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2873, pruned_loss=0.06422, over 3815893.01 frames. ], batch size: 136, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:48,378 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 23:14:36,975 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:14:49,057 INFO [train.py:903] (2/4) Epoch 23, batch 1500, loss[loss=0.2099, simple_loss=0.2841, pruned_loss=0.06788, over 19591.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2877, pruned_loss=0.06443, over 3823522.53 frames. ], batch size: 61, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:18,488 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.905e+02 6.054e+02 7.299e+02 2.065e+03, threshold=1.211e+03, percent-clipped=4.0 +2023-04-02 23:15:32,948 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5306, 1.5987, 1.8592, 1.7780, 2.6419, 2.2975, 2.8414, 1.3728], + device='cuda:2'), covar=tensor([0.2552, 0.4317, 0.2709, 0.1992, 0.1695, 0.2218, 0.1620, 0.4488], + device='cuda:2'), in_proj_covar=tensor([0.0540, 0.0647, 0.0717, 0.0488, 0.0620, 0.0533, 0.0665, 0.0554], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-02 23:15:40,385 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151759.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:15:47,606 INFO [train.py:903] (2/4) Epoch 23, batch 1550, loss[loss=0.2423, simple_loss=0.3154, pruned_loss=0.0846, over 19558.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2883, pruned_loss=0.06432, over 3838190.89 frames. ], batch size: 61, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:59,929 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:16:45,637 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7246, 4.3042, 2.9176, 3.8255, 1.0288, 4.2412, 4.1513, 4.2549], + device='cuda:2'), covar=tensor([0.0572, 0.0815, 0.1733, 0.0785, 0.3953, 0.0664, 0.0852, 0.1067], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0411, 0.0497, 0.0347, 0.0401, 0.0436, 0.0428, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:16:50,035 INFO [train.py:903] (2/4) Epoch 23, batch 1600, loss[loss=0.1705, simple_loss=0.2494, pruned_loss=0.04583, over 19410.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2875, pruned_loss=0.06369, over 3840019.55 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:16:53,620 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:17:10,281 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 23:17:20,214 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.863e+02 5.887e+02 6.951e+02 2.426e+03, threshold=1.177e+03, percent-clipped=3.0 +2023-04-02 23:17:50,196 INFO [train.py:903] (2/4) Epoch 23, batch 1650, loss[loss=0.1678, simple_loss=0.2594, pruned_loss=0.03813, over 19655.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2876, pruned_loss=0.06366, over 3841132.89 frames. ], batch size: 55, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:18:51,777 INFO [train.py:903] (2/4) Epoch 23, batch 1700, loss[loss=0.1791, simple_loss=0.2654, pruned_loss=0.04642, over 19616.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2885, pruned_loss=0.0641, over 3821845.98 frames. ], batch size: 50, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:18:53,417 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:21,471 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.018e+02 6.073e+02 7.613e+02 1.748e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 23:19:23,179 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:26,197 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 23:19:42,986 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9667, 1.7978, 1.5200, 1.7895, 1.8189, 1.5248, 1.4819, 1.7509], + device='cuda:2'), covar=tensor([0.1165, 0.1518, 0.1870, 0.1275, 0.1392, 0.0990, 0.1904, 0.1020], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0359, 0.0318, 0.0255, 0.0307, 0.0255, 0.0313, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:19:52,382 INFO [train.py:903] (2/4) Epoch 23, batch 1750, loss[loss=0.2118, simple_loss=0.2914, pruned_loss=0.06608, over 19663.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2884, pruned_loss=0.06416, over 3834935.71 frames. ], batch size: 59, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:20:41,324 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.9455, 5.3858, 2.9698, 4.7402, 1.1852, 5.4950, 5.3274, 5.4992], + device='cuda:2'), covar=tensor([0.0395, 0.0858, 0.1974, 0.0730, 0.4054, 0.0522, 0.0743, 0.1036], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0412, 0.0497, 0.0347, 0.0400, 0.0435, 0.0429, 0.0458], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:20:53,728 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:20:54,416 INFO [train.py:903] (2/4) Epoch 23, batch 1800, loss[loss=0.1922, simple_loss=0.2834, pruned_loss=0.05053, over 19762.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2874, pruned_loss=0.06358, over 3838092.47 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:21:13,151 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:24,567 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152040.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:26,133 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.33 vs. limit=5.0 +2023-04-02 23:21:26,672 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 5.200e+02 6.601e+02 8.823e+02 1.720e+03, threshold=1.320e+03, percent-clipped=12.0 +2023-04-02 23:21:36,339 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:44,273 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:48,439 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 23:21:55,073 INFO [train.py:903] (2/4) Epoch 23, batch 1850, loss[loss=0.2167, simple_loss=0.3013, pruned_loss=0.06608, over 19478.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2875, pruned_loss=0.06333, over 3838129.92 frames. ], batch size: 64, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:22:27,487 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 23:22:54,485 INFO [train.py:903] (2/4) Epoch 23, batch 1900, loss[loss=0.206, simple_loss=0.2758, pruned_loss=0.06806, over 19793.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2879, pruned_loss=0.06364, over 3844949.06 frames. ], batch size: 47, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:23:09,866 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 23:23:16,373 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 23:23:26,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.505e+02 4.994e+02 5.968e+02 7.712e+02 2.482e+03, threshold=1.194e+03, percent-clipped=3.0 +2023-04-02 23:23:41,402 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 23:23:52,758 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:55,366 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:56,157 INFO [train.py:903] (2/4) Epoch 23, batch 1950, loss[loss=0.1981, simple_loss=0.2859, pruned_loss=0.0551, over 19532.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2867, pruned_loss=0.06293, over 3844360.90 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:24:58,529 INFO [train.py:903] (2/4) Epoch 23, batch 2000, loss[loss=0.2129, simple_loss=0.2981, pruned_loss=0.0638, over 19400.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06293, over 3838882.90 frames. ], batch size: 48, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:25:28,731 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.661e+02 5.613e+02 7.041e+02 1.127e+03, threshold=1.123e+03, percent-clipped=0.0 +2023-04-02 23:25:30,248 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:25:53,687 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 23:25:58,367 INFO [train.py:903] (2/4) Epoch 23, batch 2050, loss[loss=0.1883, simple_loss=0.2655, pruned_loss=0.05557, over 19391.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.0635, over 3827826.77 frames. ], batch size: 48, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:26:09,969 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3318, 1.3917, 1.7719, 1.6401, 2.5348, 2.0778, 2.5542, 1.2387], + device='cuda:2'), covar=tensor([0.2800, 0.4769, 0.2897, 0.2186, 0.1683, 0.2563, 0.1868, 0.4726], + device='cuda:2'), in_proj_covar=tensor([0.0535, 0.0643, 0.0715, 0.0486, 0.0619, 0.0531, 0.0663, 0.0550], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 23:26:13,090 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 23:26:13,443 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:14,189 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 23:26:29,821 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:36,223 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 23:26:51,153 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4923, 2.2421, 1.6166, 1.4652, 2.0526, 1.3059, 1.3685, 1.9618], + device='cuda:2'), covar=tensor([0.1001, 0.0637, 0.1103, 0.0811, 0.0581, 0.1317, 0.0747, 0.0479], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0316, 0.0339, 0.0267, 0.0250, 0.0341, 0.0292, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:26:58,782 INFO [train.py:903] (2/4) Epoch 23, batch 2100, loss[loss=0.1812, simple_loss=0.2686, pruned_loss=0.04685, over 19787.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06357, over 3831223.04 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:27:27,816 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 23:27:29,283 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4875, 1.4120, 1.4144, 2.0504, 1.6970, 1.7636, 1.9137, 1.5963], + device='cuda:2'), covar=tensor([0.0848, 0.0938, 0.1007, 0.0658, 0.0755, 0.0771, 0.0801, 0.0721], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0223, 0.0239, 0.0226, 0.0212, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 23:27:31,217 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.808e+02 5.705e+02 7.050e+02 1.568e+03, threshold=1.141e+03, percent-clipped=6.0 +2023-04-02 23:27:48,005 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 23:27:59,782 INFO [train.py:903] (2/4) Epoch 23, batch 2150, loss[loss=0.2039, simple_loss=0.2784, pruned_loss=0.06471, over 19787.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2888, pruned_loss=0.06435, over 3831804.05 frames. ], batch size: 48, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:29:00,653 INFO [train.py:903] (2/4) Epoch 23, batch 2200, loss[loss=0.2205, simple_loss=0.3041, pruned_loss=0.0684, over 19529.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2884, pruned_loss=0.06427, over 3842525.66 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:29:01,727 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3606, 3.1340, 2.1662, 2.8013, 0.9380, 3.0495, 2.9311, 3.0312], + device='cuda:2'), covar=tensor([0.1074, 0.1258, 0.2204, 0.1058, 0.3591, 0.0923, 0.1188, 0.1269], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0412, 0.0498, 0.0346, 0.0400, 0.0435, 0.0427, 0.0459], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:29:07,508 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:31,742 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.084e+02 6.029e+02 8.181e+02 1.825e+03, threshold=1.206e+03, percent-clipped=10.0 +2023-04-02 23:29:37,614 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:58,456 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:30:01,460 INFO [train.py:903] (2/4) Epoch 23, batch 2250, loss[loss=0.271, simple_loss=0.3367, pruned_loss=0.1026, over 13645.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2877, pruned_loss=0.06397, over 3820703.73 frames. ], batch size: 136, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:30:02,905 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6034, 1.3313, 1.5071, 1.5447, 3.1784, 1.1276, 2.4156, 3.6779], + device='cuda:2'), covar=tensor([0.0478, 0.2800, 0.2890, 0.1885, 0.0683, 0.2483, 0.1199, 0.0222], + device='cuda:2'), in_proj_covar=tensor([0.0413, 0.0369, 0.0389, 0.0350, 0.0376, 0.0353, 0.0384, 0.0408], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:31:01,772 INFO [train.py:903] (2/4) Epoch 23, batch 2300, loss[loss=0.242, simple_loss=0.3176, pruned_loss=0.08323, over 19592.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2889, pruned_loss=0.06444, over 3820810.77 frames. ], batch size: 61, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:31:17,223 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 23:31:25,347 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:31:36,142 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.927e+02 5.902e+02 7.617e+02 2.113e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 23:31:55,703 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152559.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:04,344 INFO [train.py:903] (2/4) Epoch 23, batch 2350, loss[loss=0.2236, simple_loss=0.306, pruned_loss=0.07056, over 19783.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2873, pruned_loss=0.06313, over 3822422.63 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:32:13,370 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7654, 4.3164, 3.0073, 3.7987, 1.0413, 4.3503, 4.1737, 4.3220], + device='cuda:2'), covar=tensor([0.0621, 0.1024, 0.1701, 0.0885, 0.4145, 0.0654, 0.0890, 0.1059], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0415, 0.0500, 0.0349, 0.0403, 0.0437, 0.0429, 0.0461], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:32:30,250 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:43,257 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 23:32:45,496 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:54,416 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:03,100 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 23:33:05,156 INFO [train.py:903] (2/4) Epoch 23, batch 2400, loss[loss=0.2337, simple_loss=0.3138, pruned_loss=0.07679, over 19797.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2873, pruned_loss=0.06316, over 3833435.77 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:33:28,184 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:38,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 5.347e+02 6.485e+02 7.646e+02 1.871e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 23:34:06,660 INFO [train.py:903] (2/4) Epoch 23, batch 2450, loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05921, over 19471.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.287, pruned_loss=0.06304, over 3833813.78 frames. ], batch size: 49, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:34:51,429 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:06,552 INFO [train.py:903] (2/4) Epoch 23, batch 2500, loss[loss=0.1682, simple_loss=0.2548, pruned_loss=0.04077, over 19742.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2866, pruned_loss=0.06304, over 3840332.19 frames. ], batch size: 51, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:35:13,351 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3778, 3.1017, 2.3520, 2.7657, 0.9061, 3.0770, 2.9287, 3.0381], + device='cuda:2'), covar=tensor([0.1048, 0.1212, 0.1932, 0.1187, 0.3634, 0.0905, 0.1057, 0.1347], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0416, 0.0502, 0.0351, 0.0404, 0.0440, 0.0431, 0.0464], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:35:40,597 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.968e+02 5.949e+02 7.714e+02 2.745e+03, threshold=1.190e+03, percent-clipped=5.0 +2023-04-02 23:35:42,057 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152744.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:48,959 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:36:08,243 INFO [train.py:903] (2/4) Epoch 23, batch 2550, loss[loss=0.2334, simple_loss=0.311, pruned_loss=0.07793, over 19677.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.06319, over 3835040.95 frames. ], batch size: 58, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:36:32,156 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-02 23:36:57,884 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:37:01,058 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 23:37:08,440 INFO [train.py:903] (2/4) Epoch 23, batch 2600, loss[loss=0.1884, simple_loss=0.2594, pruned_loss=0.05876, over 19740.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2868, pruned_loss=0.06329, over 3833343.53 frames. ], batch size: 45, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:37:40,420 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 4.724e+02 5.510e+02 7.301e+02 1.657e+03, threshold=1.102e+03, percent-clipped=4.0 +2023-04-02 23:38:08,554 INFO [train.py:903] (2/4) Epoch 23, batch 2650, loss[loss=0.1686, simple_loss=0.2465, pruned_loss=0.04539, over 18654.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2871, pruned_loss=0.06367, over 3833009.25 frames. ], batch size: 41, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:38:27,765 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 23:38:32,673 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5470, 1.7203, 2.1097, 1.8927, 3.2862, 2.7346, 3.5297, 1.6884], + device='cuda:2'), covar=tensor([0.2560, 0.4410, 0.2694, 0.1881, 0.1477, 0.2090, 0.1520, 0.4143], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0647, 0.0717, 0.0486, 0.0619, 0.0533, 0.0663, 0.0551], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-02 23:39:08,682 INFO [train.py:903] (2/4) Epoch 23, batch 2700, loss[loss=0.2509, simple_loss=0.3136, pruned_loss=0.09413, over 13896.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.06312, over 3822020.28 frames. ], batch size: 137, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:39:16,620 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:35,101 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3148, 3.6238, 2.2405, 2.2201, 3.3656, 2.0440, 1.6496, 2.3581], + device='cuda:2'), covar=tensor([0.1311, 0.0620, 0.1026, 0.0871, 0.0550, 0.1155, 0.1028, 0.0685], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0316, 0.0336, 0.0265, 0.0248, 0.0338, 0.0290, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:39:42,350 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.895e+02 5.730e+02 7.672e+02 1.465e+03, threshold=1.146e+03, percent-clipped=5.0 +2023-04-02 23:39:43,669 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:43,873 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0184, 1.2285, 1.4958, 0.9071, 2.1535, 3.0438, 2.7341, 3.2773], + device='cuda:2'), covar=tensor([0.1798, 0.3913, 0.3676, 0.2788, 0.0676, 0.0220, 0.0262, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0322, 0.0352, 0.0264, 0.0244, 0.0188, 0.0216, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 23:39:51,726 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:00,040 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:09,446 INFO [train.py:903] (2/4) Epoch 23, batch 2750, loss[loss=0.2396, simple_loss=0.3279, pruned_loss=0.07563, over 19665.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2845, pruned_loss=0.06208, over 3816046.12 frames. ], batch size: 55, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:40:31,181 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:57,898 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:10,184 INFO [train.py:903] (2/4) Epoch 23, batch 2800, loss[loss=0.1834, simple_loss=0.2562, pruned_loss=0.05528, over 19766.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2843, pruned_loss=0.06178, over 3822585.46 frames. ], batch size: 47, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:41:28,005 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:42,352 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.940e+02 6.140e+02 7.865e+02 1.529e+03, threshold=1.228e+03, percent-clipped=3.0 +2023-04-02 23:42:02,580 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:10,891 INFO [train.py:903] (2/4) Epoch 23, batch 2850, loss[loss=0.2537, simple_loss=0.3304, pruned_loss=0.08849, over 17447.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06228, over 3829230.18 frames. ], batch size: 101, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:42:11,290 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:36,413 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:43:09,820 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 23:43:10,996 INFO [train.py:903] (2/4) Epoch 23, batch 2900, loss[loss=0.1883, simple_loss=0.2705, pruned_loss=0.05309, over 19571.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2862, pruned_loss=0.0626, over 3822314.49 frames. ], batch size: 52, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:43:34,529 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:43:45,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.545e+02 5.074e+02 6.117e+02 7.609e+02 1.538e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 23:44:10,300 INFO [train.py:903] (2/4) Epoch 23, batch 2950, loss[loss=0.181, simple_loss=0.2548, pruned_loss=0.05361, over 19752.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2867, pruned_loss=0.06298, over 3825242.71 frames. ], batch size: 47, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:44:25,523 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,240 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,280 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:45:09,889 INFO [train.py:903] (2/4) Epoch 23, batch 3000, loss[loss=0.1999, simple_loss=0.2832, pruned_loss=0.05832, over 19678.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.06392, over 3827557.53 frames. ], batch size: 60, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:45:09,889 INFO [train.py:928] (2/4) Computing validation loss +2023-04-02 23:45:23,394 INFO [train.py:937] (2/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2685, pruned_loss=0.03441, over 944034.00 frames. +2023-04-02 23:45:23,395 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-02 23:45:26,710 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 23:45:57,203 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.963e+02 5.132e+02 6.544e+02 7.997e+02 1.730e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 23:46:06,635 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1458, 1.8951, 1.7915, 2.0734, 1.7922, 1.8825, 1.7750, 2.0670], + device='cuda:2'), covar=tensor([0.1011, 0.1397, 0.1454, 0.0982, 0.1352, 0.0521, 0.1312, 0.0681], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0354, 0.0311, 0.0252, 0.0302, 0.0250, 0.0307, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:46:24,010 INFO [train.py:903] (2/4) Epoch 23, batch 3050, loss[loss=0.1561, simple_loss=0.2441, pruned_loss=0.03405, over 19768.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.06433, over 3827244.53 frames. ], batch size: 47, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:47:00,366 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153296.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:25,776 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:26,477 INFO [train.py:903] (2/4) Epoch 23, batch 3100, loss[loss=0.1964, simple_loss=0.2868, pruned_loss=0.05297, over 19696.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06406, over 3836227.33 frames. ], batch size: 59, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:47:33,573 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:54,341 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:59,279 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 4.897e+02 6.414e+02 9.491e+02 6.432e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 23:48:03,098 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:48:25,986 INFO [train.py:903] (2/4) Epoch 23, batch 3150, loss[loss=0.2291, simple_loss=0.3033, pruned_loss=0.07745, over 13832.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2883, pruned_loss=0.06395, over 3835110.71 frames. ], batch size: 136, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:48:54,102 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 23:49:26,056 INFO [train.py:903] (2/4) Epoch 23, batch 3200, loss[loss=0.2371, simple_loss=0.3155, pruned_loss=0.0793, over 19655.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2878, pruned_loss=0.06387, over 3820261.32 frames. ], batch size: 55, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:49:54,892 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:00,120 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.043e+02 6.093e+02 8.078e+02 1.420e+03, threshold=1.219e+03, percent-clipped=2.0 +2023-04-02 23:50:17,405 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:26,643 INFO [train.py:903] (2/4) Epoch 23, batch 3250, loss[loss=0.2172, simple_loss=0.2977, pruned_loss=0.06837, over 19530.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2886, pruned_loss=0.06424, over 3831038.05 frames. ], batch size: 54, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:50:43,095 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:48,933 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153484.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:51:20,332 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3825, 3.1386, 2.1036, 2.8061, 0.7855, 3.0959, 2.9645, 2.9857], + device='cuda:2'), covar=tensor([0.1013, 0.1186, 0.2086, 0.1040, 0.3678, 0.0887, 0.1108, 0.1318], + device='cuda:2'), in_proj_covar=tensor([0.0509, 0.0414, 0.0497, 0.0348, 0.0400, 0.0437, 0.0429, 0.0463], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:51:20,809 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 23:51:27,759 INFO [train.py:903] (2/4) Epoch 23, batch 3300, loss[loss=0.1754, simple_loss=0.2606, pruned_loss=0.04514, over 19428.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2877, pruned_loss=0.06379, over 3821214.18 frames. ], batch size: 48, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:51:34,810 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 23:52:00,746 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.081e+02 6.216e+02 8.009e+02 2.047e+03, threshold=1.243e+03, percent-clipped=5.0 +2023-04-02 23:52:26,360 INFO [train.py:903] (2/4) Epoch 23, batch 3350, loss[loss=0.2741, simple_loss=0.3401, pruned_loss=0.1041, over 13966.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2885, pruned_loss=0.06429, over 3826607.51 frames. ], batch size: 136, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:53:00,290 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:17,358 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9462, 1.3021, 1.6807, 0.5527, 2.0728, 2.4854, 2.1686, 2.6194], + device='cuda:2'), covar=tensor([0.1559, 0.3680, 0.3123, 0.2711, 0.0610, 0.0271, 0.0352, 0.0357], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0324, 0.0352, 0.0265, 0.0244, 0.0189, 0.0217, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-02 23:53:26,223 INFO [train.py:903] (2/4) Epoch 23, batch 3400, loss[loss=0.2193, simple_loss=0.3032, pruned_loss=0.06768, over 19568.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2884, pruned_loss=0.06422, over 3823644.69 frames. ], batch size: 61, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:53:56,990 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153640.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:59,273 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3515, 3.0872, 2.2551, 2.8287, 0.6423, 3.0894, 2.9322, 3.0323], + device='cuda:2'), covar=tensor([0.1058, 0.1278, 0.2010, 0.1020, 0.3930, 0.0906, 0.1095, 0.1374], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0414, 0.0497, 0.0348, 0.0401, 0.0438, 0.0430, 0.0462], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:54:01,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.295e+02 6.743e+02 8.549e+02 2.424e+03, threshold=1.349e+03, percent-clipped=5.0 +2023-04-02 23:54:28,039 INFO [train.py:903] (2/4) Epoch 23, batch 3450, loss[loss=0.2007, simple_loss=0.2689, pruned_loss=0.06629, over 19296.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2881, pruned_loss=0.06354, over 3832205.31 frames. ], batch size: 44, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:54:31,540 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 23:54:51,835 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-02 23:55:10,117 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 23:55:28,715 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-02 23:55:30,145 INFO [train.py:903] (2/4) Epoch 23, batch 3500, loss[loss=0.2872, simple_loss=0.3418, pruned_loss=0.1163, over 13177.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2884, pruned_loss=0.06407, over 3811968.84 frames. ], batch size: 136, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:55:35,124 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5345, 2.2594, 2.3362, 2.7098, 2.3979, 2.1281, 2.1011, 2.5341], + device='cuda:2'), covar=tensor([0.1041, 0.1787, 0.1492, 0.1051, 0.1457, 0.0610, 0.1446, 0.0752], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0359, 0.0316, 0.0255, 0.0307, 0.0253, 0.0312, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:56:02,445 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.938e+02 5.821e+02 7.521e+02 2.332e+03, threshold=1.164e+03, percent-clipped=1.0 +2023-04-02 23:56:17,969 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:56:30,177 INFO [train.py:903] (2/4) Epoch 23, batch 3550, loss[loss=0.1983, simple_loss=0.2903, pruned_loss=0.05319, over 19662.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2885, pruned_loss=0.06385, over 3813301.80 frames. ], batch size: 58, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:56:34,059 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7117, 1.6586, 1.6355, 2.1627, 1.5450, 2.0122, 1.8523, 1.8605], + device='cuda:2'), covar=tensor([0.0816, 0.0892, 0.0926, 0.0713, 0.0885, 0.0729, 0.0945, 0.0657], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0218, 0.0223, 0.0237, 0.0225, 0.0210, 0.0185, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-02 23:56:44,119 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0273, 3.4799, 1.9399, 2.0447, 3.1444, 1.6892, 1.4713, 2.1683], + device='cuda:2'), covar=tensor([0.1308, 0.0548, 0.1121, 0.0826, 0.0493, 0.1246, 0.1010, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0316, 0.0341, 0.0266, 0.0249, 0.0342, 0.0291, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-02 23:56:50,061 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:57:30,147 INFO [train.py:903] (2/4) Epoch 23, batch 3600, loss[loss=0.1927, simple_loss=0.2729, pruned_loss=0.05628, over 19778.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2878, pruned_loss=0.06343, over 3821295.51 frames. ], batch size: 54, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:58:05,068 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 5.155e+02 6.351e+02 8.015e+02 2.586e+03, threshold=1.270e+03, percent-clipped=6.0 +2023-04-02 23:58:12,375 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:58:30,880 INFO [train.py:903] (2/4) Epoch 23, batch 3650, loss[loss=0.1973, simple_loss=0.2704, pruned_loss=0.06213, over 19389.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06406, over 3803405.80 frames. ], batch size: 48, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:58:42,975 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:08,939 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:31,665 INFO [train.py:903] (2/4) Epoch 23, batch 3700, loss[loss=0.2299, simple_loss=0.3065, pruned_loss=0.0767, over 19437.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2885, pruned_loss=0.06419, over 3810802.13 frames. ], batch size: 64, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:00:00,704 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153941.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:00:04,691 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.617e+02 5.510e+02 6.874e+02 2.344e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-03 00:00:23,763 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.46 vs. limit=2.0 +2023-04-03 00:00:31,959 INFO [train.py:903] (2/4) Epoch 23, batch 3750, loss[loss=0.2162, simple_loss=0.2997, pruned_loss=0.0663, over 19667.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.288, pruned_loss=0.06397, over 3810816.52 frames. ], batch size: 60, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:27,863 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:01:33,245 INFO [train.py:903] (2/4) Epoch 23, batch 3800, loss[loss=0.1842, simple_loss=0.2581, pruned_loss=0.05513, over 18606.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2877, pruned_loss=0.06367, over 3827496.11 frames. ], batch size: 41, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:59,815 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154036.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:02:05,111 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 00:02:08,273 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 4.923e+02 6.103e+02 7.526e+02 2.694e+03, threshold=1.221e+03, percent-clipped=9.0 +2023-04-03 00:02:33,014 INFO [train.py:903] (2/4) Epoch 23, batch 3850, loss[loss=0.226, simple_loss=0.2996, pruned_loss=0.07626, over 19658.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2868, pruned_loss=0.06308, over 3832561.87 frames. ], batch size: 55, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:02:35,323 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154067.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:03:09,338 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:14,333 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-03 00:03:15,500 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.84 vs. limit=5.0 +2023-04-03 00:03:18,572 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:35,946 INFO [train.py:903] (2/4) Epoch 23, batch 3900, loss[loss=0.2259, simple_loss=0.2949, pruned_loss=0.0785, over 19656.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.285, pruned_loss=0.06203, over 3837384.08 frames. ], batch size: 58, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:09,481 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 4.608e+02 5.656e+02 7.392e+02 1.919e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-04-03 00:04:22,848 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:04:37,503 INFO [train.py:903] (2/4) Epoch 23, batch 3950, loss[loss=0.1695, simple_loss=0.2563, pruned_loss=0.04136, over 19375.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2852, pruned_loss=0.06228, over 3831145.48 frames. ], batch size: 47, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:44,231 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 00:04:52,331 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:05:37,019 INFO [train.py:903] (2/4) Epoch 23, batch 4000, loss[loss=0.1852, simple_loss=0.269, pruned_loss=0.05068, over 19580.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06179, over 3837291.26 frames. ], batch size: 52, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:05:50,660 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 00:06:02,281 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 00:06:06,085 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:06:12,359 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.130e+02 6.145e+02 8.525e+02 2.203e+03, threshold=1.229e+03, percent-clipped=9.0 +2023-04-03 00:06:27,024 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 00:06:37,053 INFO [train.py:903] (2/4) Epoch 23, batch 4050, loss[loss=0.1722, simple_loss=0.2521, pruned_loss=0.04614, over 19060.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2859, pruned_loss=0.06244, over 3830057.64 frames. ], batch size: 42, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:07:01,679 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154285.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:23,109 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:29,684 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:37,600 INFO [train.py:903] (2/4) Epoch 23, batch 4100, loss[loss=0.2368, simple_loss=0.3189, pruned_loss=0.07731, over 19548.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.287, pruned_loss=0.06344, over 3824945.49 frames. ], batch size: 56, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:08:11,157 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.105e+02 5.940e+02 7.682e+02 1.555e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-03 00:08:13,551 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 00:08:25,638 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154355.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:08:39,414 INFO [train.py:903] (2/4) Epoch 23, batch 4150, loss[loss=0.2728, simple_loss=0.345, pruned_loss=0.1004, over 14059.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2874, pruned_loss=0.06377, over 3796028.21 frames. ], batch size: 136, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:22,074 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:09:33,989 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154411.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:09:39,112 INFO [train.py:903] (2/4) Epoch 23, batch 4200, loss[loss=0.2172, simple_loss=0.2971, pruned_loss=0.06859, over 19548.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06362, over 3799546.52 frames. ], batch size: 56, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:41,410 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 00:10:07,072 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:14,758 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.103e+02 4.744e+02 5.863e+02 7.378e+02 1.705e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-03 00:10:17,220 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154446.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:40,158 INFO [train.py:903] (2/4) Epoch 23, batch 4250, loss[loss=0.1864, simple_loss=0.2578, pruned_loss=0.05748, over 19767.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2861, pruned_loss=0.06258, over 3815435.13 frames. ], batch size: 47, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:10:54,212 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 00:11:05,348 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 00:11:12,109 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:11:40,259 INFO [train.py:903] (2/4) Epoch 23, batch 4300, loss[loss=0.1679, simple_loss=0.2567, pruned_loss=0.03957, over 19862.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2856, pruned_loss=0.06213, over 3821268.77 frames. ], batch size: 52, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:11:53,672 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154526.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:11:55,886 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9513, 1.2537, 1.0430, 0.9361, 1.1749, 0.9058, 0.9360, 1.1445], + device='cuda:2'), covar=tensor([0.0585, 0.0606, 0.0707, 0.0605, 0.0409, 0.0874, 0.0441, 0.0381], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0313, 0.0337, 0.0263, 0.0246, 0.0336, 0.0289, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:12:13,327 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.652e+02 5.888e+02 7.584e+02 1.931e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 00:12:24,541 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154553.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:33,756 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 00:12:36,080 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:41,454 INFO [train.py:903] (2/4) Epoch 23, batch 4350, loss[loss=0.2381, simple_loss=0.3191, pruned_loss=0.07859, over 19575.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2863, pruned_loss=0.06283, over 3809405.78 frames. ], batch size: 52, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:13:01,190 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:13:04,725 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0975, 2.0557, 1.8268, 2.1707, 2.0372, 1.8224, 1.7748, 2.0370], + device='cuda:2'), covar=tensor([0.1053, 0.1389, 0.1545, 0.0973, 0.1299, 0.0601, 0.1430, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0355, 0.0313, 0.0252, 0.0302, 0.0251, 0.0311, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:13:40,250 INFO [train.py:903] (2/4) Epoch 23, batch 4400, loss[loss=0.2056, simple_loss=0.2936, pruned_loss=0.05879, over 19763.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2876, pruned_loss=0.06354, over 3805625.20 frames. ], batch size: 54, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:14:04,354 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 00:14:14,044 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.197e+02 6.555e+02 7.915e+02 1.480e+03, threshold=1.311e+03, percent-clipped=6.0 +2023-04-03 00:14:15,172 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 00:14:16,473 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:18,495 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154648.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:23,890 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:27,590 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154656.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:38,571 INFO [train.py:903] (2/4) Epoch 23, batch 4450, loss[loss=0.2068, simple_loss=0.2882, pruned_loss=0.06272, over 19398.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2876, pruned_loss=0.06371, over 3803126.42 frames. ], batch size: 70, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:14:58,445 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154681.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:18,443 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:19,415 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154699.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:15:38,748 INFO [train.py:903] (2/4) Epoch 23, batch 4500, loss[loss=0.172, simple_loss=0.2471, pruned_loss=0.04846, over 19733.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2879, pruned_loss=0.06399, over 3803415.30 frames. ], batch size: 47, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:06,378 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154738.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:13,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.922e+02 6.448e+02 7.735e+02 1.395e+03, threshold=1.290e+03, percent-clipped=1.0 +2023-04-03 00:16:38,057 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:41,138 INFO [train.py:903] (2/4) Epoch 23, batch 4550, loss[loss=0.1724, simple_loss=0.2555, pruned_loss=0.04465, over 19846.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2868, pruned_loss=0.06342, over 3806033.29 frames. ], batch size: 52, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:43,828 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:48,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 00:17:00,100 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154782.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:08,981 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6175, 1.2539, 1.2631, 1.5075, 1.0809, 1.3909, 1.2420, 1.4434], + device='cuda:2'), covar=tensor([0.1168, 0.1266, 0.1615, 0.1023, 0.1370, 0.0626, 0.1535, 0.0853], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0356, 0.0314, 0.0253, 0.0304, 0.0253, 0.0312, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:17:11,891 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 00:17:16,355 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.8337, 5.3410, 3.0826, 4.6626, 1.3322, 5.3513, 5.2243, 5.3954], + device='cuda:2'), covar=tensor([0.0394, 0.0721, 0.1814, 0.0752, 0.3690, 0.0613, 0.0832, 0.1112], + device='cuda:2'), in_proj_covar=tensor([0.0513, 0.0414, 0.0496, 0.0348, 0.0399, 0.0438, 0.0428, 0.0463], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:17:31,863 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154807.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:17:34,168 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154809.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:17:39,915 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154814.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:41,847 INFO [train.py:903] (2/4) Epoch 23, batch 4600, loss[loss=0.2053, simple_loss=0.279, pruned_loss=0.06581, over 19492.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2865, pruned_loss=0.063, over 3824207.33 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:17:43,474 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154817.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:02,706 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154834.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:04,534 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:13,465 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:17,276 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.757e+02 5.456e+02 7.137e+02 2.039e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-04-03 00:18:39,908 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8244, 1.3507, 1.5229, 1.7574, 3.4161, 1.2010, 2.4966, 3.9050], + device='cuda:2'), covar=tensor([0.0558, 0.2956, 0.2989, 0.1854, 0.0712, 0.2583, 0.1356, 0.0234], + device='cuda:2'), in_proj_covar=tensor([0.0411, 0.0367, 0.0389, 0.0350, 0.0373, 0.0350, 0.0383, 0.0404], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:18:41,884 INFO [train.py:903] (2/4) Epoch 23, batch 4650, loss[loss=0.2494, simple_loss=0.3178, pruned_loss=0.09046, over 14348.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2863, pruned_loss=0.06263, over 3822579.80 frames. ], batch size: 136, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:18:45,726 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2519, 1.2456, 1.2264, 1.3179, 1.0562, 1.3659, 1.3439, 1.2996], + device='cuda:2'), covar=tensor([0.0943, 0.0997, 0.1129, 0.0727, 0.0883, 0.0873, 0.0818, 0.0784], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0226, 0.0240, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-03 00:18:57,537 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 00:19:09,932 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 00:19:42,546 INFO [train.py:903] (2/4) Epoch 23, batch 4700, loss[loss=0.2303, simple_loss=0.3052, pruned_loss=0.07771, over 19700.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06261, over 3823170.40 frames. ], batch size: 63, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:20:04,431 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 00:20:17,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.550e+02 5.511e+02 7.065e+02 1.410e+03, threshold=1.102e+03, percent-clipped=2.0 +2023-04-03 00:20:25,039 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154951.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:28,242 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:44,142 INFO [train.py:903] (2/4) Epoch 23, batch 4750, loss[loss=0.1713, simple_loss=0.2515, pruned_loss=0.04559, over 19782.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2864, pruned_loss=0.06246, over 3829710.04 frames. ], batch size: 48, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:21:00,298 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:12,332 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:17,051 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:21,745 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4079, 2.5273, 2.6999, 3.2254, 2.5881, 3.2661, 2.6700, 2.3877], + device='cuda:2'), covar=tensor([0.4449, 0.3989, 0.1964, 0.2387, 0.4123, 0.1930, 0.5011, 0.3471], + device='cuda:2'), in_proj_covar=tensor([0.0907, 0.0975, 0.0723, 0.0937, 0.0886, 0.0825, 0.0846, 0.0788], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 00:21:45,312 INFO [train.py:903] (2/4) Epoch 23, batch 4800, loss[loss=0.1927, simple_loss=0.2791, pruned_loss=0.05314, over 19645.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2857, pruned_loss=0.06207, over 3838085.03 frames. ], batch size: 55, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:21:49,087 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:54,304 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:18,861 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:19,577 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.324e+02 6.216e+02 7.674e+02 2.163e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-03 00:22:26,124 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:44,537 INFO [train.py:903] (2/4) Epoch 23, batch 4850, loss[loss=0.1977, simple_loss=0.2652, pruned_loss=0.06512, over 19802.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2863, pruned_loss=0.06236, over 3840257.01 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:22:49,256 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6860, 1.6975, 1.5398, 1.3431, 1.2289, 1.3361, 0.3134, 0.6333], + device='cuda:2'), covar=tensor([0.0686, 0.0694, 0.0450, 0.0702, 0.1324, 0.0861, 0.1369, 0.1211], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0353, 0.0360, 0.0383, 0.0461, 0.0389, 0.0336, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 00:22:49,263 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155070.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:22:49,293 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2282, 1.3168, 1.4900, 1.4067, 2.1392, 1.8607, 2.2496, 0.7723], + device='cuda:2'), covar=tensor([0.2696, 0.4470, 0.2811, 0.2198, 0.1706, 0.2399, 0.1417, 0.4903], + device='cuda:2'), in_proj_covar=tensor([0.0538, 0.0647, 0.0720, 0.0490, 0.0623, 0.0535, 0.0660, 0.0555], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 00:23:03,367 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:10,497 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 00:23:11,887 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:21,481 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155095.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:23:29,056 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 00:23:32,631 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:34,413 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 00:23:34,438 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 00:23:44,625 INFO [train.py:903] (2/4) Epoch 23, batch 4900, loss[loss=0.2246, simple_loss=0.311, pruned_loss=0.06916, over 19303.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2874, pruned_loss=0.06335, over 3824993.59 frames. ], batch size: 66, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:23:44,637 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 00:24:04,405 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 00:24:20,244 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.924e+02 5.163e+02 5.938e+02 7.647e+02 1.407e+03, threshold=1.188e+03, percent-clipped=5.0 +2023-04-03 00:24:25,536 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-03 00:24:46,212 INFO [train.py:903] (2/4) Epoch 23, batch 4950, loss[loss=0.1942, simple_loss=0.2747, pruned_loss=0.05687, over 19598.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2868, pruned_loss=0.06286, over 3809547.30 frames. ], batch size: 50, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:01,067 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 00:25:21,537 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155197.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:22,308 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 00:25:29,823 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0033, 2.0460, 2.3376, 2.7299, 2.0729, 2.5762, 2.4282, 2.1786], + device='cuda:2'), covar=tensor([0.4140, 0.3976, 0.1845, 0.2452, 0.4103, 0.2219, 0.4374, 0.3199], + device='cuda:2'), in_proj_covar=tensor([0.0910, 0.0978, 0.0725, 0.0940, 0.0890, 0.0827, 0.0847, 0.0788], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 00:25:34,805 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:44,524 INFO [train.py:903] (2/4) Epoch 23, batch 5000, loss[loss=0.2174, simple_loss=0.302, pruned_loss=0.06644, over 19670.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2864, pruned_loss=0.0629, over 3812521.84 frames. ], batch size: 58, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:52,531 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 00:25:54,039 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8166, 1.9511, 2.2304, 2.2867, 1.7190, 2.1711, 2.2661, 2.0845], + device='cuda:2'), covar=tensor([0.4085, 0.3677, 0.1858, 0.2473, 0.3879, 0.2195, 0.4628, 0.3233], + device='cuda:2'), in_proj_covar=tensor([0.0911, 0.0979, 0.0726, 0.0941, 0.0891, 0.0828, 0.0849, 0.0790], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 00:26:02,920 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:26:03,632 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 00:26:19,060 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.751e+02 5.889e+02 7.363e+02 1.722e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 00:26:43,567 INFO [train.py:903] (2/4) Epoch 23, batch 5050, loss[loss=0.2246, simple_loss=0.299, pruned_loss=0.07511, over 13207.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06296, over 3814297.38 frames. ], batch size: 135, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:17,601 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 00:27:41,309 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.21 vs. limit=5.0 +2023-04-03 00:27:42,609 INFO [train.py:903] (2/4) Epoch 23, batch 5100, loss[loss=0.2129, simple_loss=0.2902, pruned_loss=0.06779, over 19673.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2879, pruned_loss=0.06389, over 3805102.81 frames. ], batch size: 55, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:45,275 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1654, 2.2041, 2.4324, 2.8739, 2.2440, 2.8082, 2.4182, 2.2600], + device='cuda:2'), covar=tensor([0.4600, 0.4381, 0.2063, 0.2876, 0.4830, 0.2309, 0.5244, 0.3658], + device='cuda:2'), in_proj_covar=tensor([0.0907, 0.0976, 0.0722, 0.0937, 0.0888, 0.0824, 0.0847, 0.0787], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 00:27:53,104 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 00:27:56,483 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 00:28:01,507 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 00:28:10,592 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:18,270 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.084e+02 6.467e+02 7.878e+02 1.414e+03, threshold=1.293e+03, percent-clipped=6.0 +2023-04-03 00:28:36,978 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:43,671 INFO [train.py:903] (2/4) Epoch 23, batch 5150, loss[loss=0.2085, simple_loss=0.284, pruned_loss=0.06648, over 19766.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2864, pruned_loss=0.06288, over 3824930.82 frames. ], batch size: 54, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:28:44,036 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6401, 1.4458, 1.5833, 1.5855, 3.2246, 1.1935, 2.4404, 3.6803], + device='cuda:2'), covar=tensor([0.0517, 0.2709, 0.2829, 0.1825, 0.0693, 0.2453, 0.1191, 0.0229], + device='cuda:2'), in_proj_covar=tensor([0.0417, 0.0370, 0.0392, 0.0352, 0.0376, 0.0354, 0.0387, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:28:56,745 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 00:29:08,637 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:29:13,129 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4068, 1.5254, 1.8815, 1.6862, 2.7257, 2.2281, 2.8713, 1.2818], + device='cuda:2'), covar=tensor([0.2488, 0.4234, 0.2615, 0.1864, 0.1491, 0.2175, 0.1418, 0.4437], + device='cuda:2'), in_proj_covar=tensor([0.0538, 0.0647, 0.0719, 0.0490, 0.0622, 0.0535, 0.0659, 0.0553], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 00:29:30,422 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 00:29:38,193 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9355, 4.8795, 5.9103, 5.9160, 2.5501, 5.6708, 4.6021, 5.2149], + device='cuda:2'), covar=tensor([0.1789, 0.1215, 0.0701, 0.0806, 0.6108, 0.1465, 0.1104, 0.1465], + device='cuda:2'), in_proj_covar=tensor([0.0787, 0.0751, 0.0958, 0.0838, 0.0841, 0.0719, 0.0574, 0.0890], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 00:29:45,064 INFO [train.py:903] (2/4) Epoch 23, batch 5200, loss[loss=0.2536, simple_loss=0.3275, pruned_loss=0.08983, over 19397.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2878, pruned_loss=0.06353, over 3817667.11 frames. ], batch size: 70, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:29:58,654 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 00:30:02,265 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:19,763 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 5.305e+02 6.432e+02 7.969e+02 2.733e+03, threshold=1.286e+03, percent-clipped=6.0 +2023-04-03 00:30:30,701 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:30,770 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:41,480 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 00:30:44,915 INFO [train.py:903] (2/4) Epoch 23, batch 5250, loss[loss=0.1601, simple_loss=0.251, pruned_loss=0.03459, over 19834.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2876, pruned_loss=0.06311, over 3821238.75 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:30:55,583 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155475.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:59,161 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:31:45,244 INFO [train.py:903] (2/4) Epoch 23, batch 5300, loss[loss=0.2044, simple_loss=0.2753, pruned_loss=0.06676, over 16047.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2869, pruned_loss=0.06307, over 3825611.57 frames. ], batch size: 35, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:32:04,591 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 00:32:21,406 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 4.703e+02 5.856e+02 7.687e+02 1.612e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 00:32:22,914 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:32:46,438 INFO [train.py:903] (2/4) Epoch 23, batch 5350, loss[loss=0.1973, simple_loss=0.2846, pruned_loss=0.05494, over 19628.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2874, pruned_loss=0.06304, over 3828808.51 frames. ], batch size: 57, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:18,092 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 00:33:46,942 INFO [train.py:903] (2/4) Epoch 23, batch 5400, loss[loss=0.2567, simple_loss=0.3344, pruned_loss=0.08948, over 19216.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2878, pruned_loss=0.06339, over 3814966.46 frames. ], batch size: 69, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:56,233 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155623.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:34:21,904 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.892e+02 4.747e+02 5.806e+02 7.220e+02 1.360e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 00:34:36,324 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9799, 1.2400, 1.6008, 0.8528, 2.2905, 3.0254, 2.6916, 3.2366], + device='cuda:2'), covar=tensor([0.1778, 0.3970, 0.3550, 0.2878, 0.0631, 0.0223, 0.0264, 0.0303], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0325, 0.0355, 0.0265, 0.0246, 0.0190, 0.0218, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 00:34:48,075 INFO [train.py:903] (2/4) Epoch 23, batch 5450, loss[loss=0.215, simple_loss=0.2837, pruned_loss=0.07312, over 19396.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2872, pruned_loss=0.06311, over 3817062.14 frames. ], batch size: 48, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:34:58,349 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155675.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:10,205 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2422, 1.5010, 1.9408, 1.6984, 3.2244, 4.6731, 4.4495, 5.0643], + device='cuda:2'), covar=tensor([0.1730, 0.3748, 0.3402, 0.2234, 0.0550, 0.0187, 0.0181, 0.0176], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0325, 0.0354, 0.0265, 0.0246, 0.0190, 0.0218, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 00:35:39,183 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 00:35:39,967 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:47,576 INFO [train.py:903] (2/4) Epoch 23, batch 5500, loss[loss=0.1867, simple_loss=0.2586, pruned_loss=0.05745, over 19739.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2866, pruned_loss=0.06267, over 3833511.06 frames. ], batch size: 46, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:10,860 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:36:13,476 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 00:36:14,848 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6245, 4.1951, 2.7549, 3.7067, 0.7724, 4.1408, 4.0580, 4.1419], + device='cuda:2'), covar=tensor([0.0634, 0.0953, 0.1854, 0.0841, 0.4218, 0.0667, 0.0864, 0.1071], + device='cuda:2'), in_proj_covar=tensor([0.0518, 0.0418, 0.0502, 0.0352, 0.0404, 0.0442, 0.0433, 0.0467], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:36:21,502 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4927, 1.6640, 2.0351, 1.8515, 2.7438, 2.1992, 2.7239, 1.3743], + device='cuda:2'), covar=tensor([0.2713, 0.4513, 0.2857, 0.2071, 0.1680, 0.2534, 0.1826, 0.4679], + device='cuda:2'), in_proj_covar=tensor([0.0539, 0.0650, 0.0722, 0.0492, 0.0625, 0.0539, 0.0662, 0.0556], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 00:36:24,172 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.224e+02 5.057e+02 6.298e+02 8.158e+02 1.659e+03, threshold=1.260e+03, percent-clipped=6.0 +2023-04-03 00:36:46,707 INFO [train.py:903] (2/4) Epoch 23, batch 5550, loss[loss=0.1896, simple_loss=0.2704, pruned_loss=0.05443, over 19722.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06282, over 3829735.16 frames. ], batch size: 51, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:56,206 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 00:37:30,540 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:30,682 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:34,227 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3070, 3.5453, 2.1783, 2.1817, 3.2541, 1.9190, 1.7819, 2.3917], + device='cuda:2'), covar=tensor([0.1279, 0.0591, 0.1023, 0.0862, 0.0570, 0.1196, 0.0915, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0314, 0.0336, 0.0265, 0.0247, 0.0339, 0.0289, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:37:42,232 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 00:37:48,755 INFO [train.py:903] (2/4) Epoch 23, batch 5600, loss[loss=0.1813, simple_loss=0.2601, pruned_loss=0.05122, over 19775.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2863, pruned_loss=0.06337, over 3815229.46 frames. ], batch size: 48, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:37:52,360 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155819.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:59,006 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0770, 2.8614, 2.3301, 2.2823, 2.1050, 2.4476, 1.1227, 2.0314], + device='cuda:2'), covar=tensor([0.0709, 0.0637, 0.0694, 0.1193, 0.1124, 0.1075, 0.1398, 0.1160], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0353, 0.0360, 0.0384, 0.0462, 0.0389, 0.0337, 0.0342], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 00:38:01,955 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 00:38:02,594 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:38:23,383 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 5.365e+02 7.033e+02 8.601e+02 1.530e+03, threshold=1.407e+03, percent-clipped=6.0 +2023-04-03 00:38:48,662 INFO [train.py:903] (2/4) Epoch 23, batch 5650, loss[loss=0.2258, simple_loss=0.3054, pruned_loss=0.07304, over 18796.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06302, over 3831592.53 frames. ], batch size: 74, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:38:50,823 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 00:39:33,344 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 00:39:47,803 INFO [train.py:903] (2/4) Epoch 23, batch 5700, loss[loss=0.2201, simple_loss=0.2992, pruned_loss=0.07054, over 19577.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06292, over 3835149.77 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:10,839 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:17,494 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2119, 1.2693, 1.7398, 1.2502, 2.6748, 3.7944, 3.4718, 3.9522], + device='cuda:2'), covar=tensor([0.1686, 0.3954, 0.3400, 0.2561, 0.0666, 0.0170, 0.0215, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0324, 0.0353, 0.0264, 0.0246, 0.0190, 0.0217, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 00:40:24,771 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.906e+02 6.184e+02 7.924e+02 2.131e+03, threshold=1.237e+03, percent-clipped=2.0 +2023-04-03 00:40:47,842 INFO [train.py:903] (2/4) Epoch 23, batch 5750, loss[loss=0.2013, simple_loss=0.2789, pruned_loss=0.06187, over 19871.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06296, over 3836918.09 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:49,191 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155967.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:51,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 00:40:58,274 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.62 vs. limit=5.0 +2023-04-03 00:40:59,648 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 00:41:04,147 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 00:41:50,977 INFO [train.py:903] (2/4) Epoch 23, batch 5800, loss[loss=0.183, simple_loss=0.2746, pruned_loss=0.04571, over 19615.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2868, pruned_loss=0.06301, over 3823949.31 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:41:54,507 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:42:25,081 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 4.982e+02 6.301e+02 7.857e+02 1.493e+03, threshold=1.260e+03, percent-clipped=3.0 +2023-04-03 00:42:39,982 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 00:42:50,181 INFO [train.py:903] (2/4) Epoch 23, batch 5850, loss[loss=0.1935, simple_loss=0.2776, pruned_loss=0.05471, over 19780.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06329, over 3822314.21 frames. ], batch size: 56, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:42:56,254 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 00:43:08,195 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:14,906 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156088.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:48,543 INFO [train.py:903] (2/4) Epoch 23, batch 5900, loss[loss=0.226, simple_loss=0.3123, pruned_loss=0.06985, over 19753.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06345, over 3825660.36 frames. ], batch size: 63, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:43:52,965 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 00:44:08,166 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-03 00:44:10,890 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:14,912 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 00:44:24,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.043e+02 6.365e+02 8.179e+02 2.050e+03, threshold=1.273e+03, percent-clipped=8.0 +2023-04-03 00:44:25,632 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:48,115 INFO [train.py:903] (2/4) Epoch 23, batch 5950, loss[loss=0.1999, simple_loss=0.2842, pruned_loss=0.05782, over 19619.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2872, pruned_loss=0.06335, over 3827734.86 frames. ], batch size: 57, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:45:19,169 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:47,958 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:48,678 INFO [train.py:903] (2/4) Epoch 23, batch 6000, loss[loss=0.2548, simple_loss=0.3348, pruned_loss=0.08736, over 19693.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2865, pruned_loss=0.06316, over 3822116.32 frames. ], batch size: 59, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:45:48,678 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 00:46:01,153 INFO [train.py:937] (2/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2684, pruned_loss=0.03439, over 944034.00 frames. +2023-04-03 00:46:01,154 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 00:46:23,269 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:46:37,247 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.873e+02 6.527e+02 8.069e+02 1.468e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-03 00:46:55,713 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156261.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:47:01,908 INFO [train.py:903] (2/4) Epoch 23, batch 6050, loss[loss=0.1601, simple_loss=0.2413, pruned_loss=0.03948, over 19360.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.06463, over 3816492.72 frames. ], batch size: 47, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:47:13,027 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 00:47:53,118 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:02,113 INFO [train.py:903] (2/4) Epoch 23, batch 6100, loss[loss=0.2241, simple_loss=0.2958, pruned_loss=0.07617, over 19461.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2886, pruned_loss=0.06452, over 3812347.40 frames. ], batch size: 49, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:48:27,997 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:37,362 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.768e+02 6.249e+02 8.138e+02 1.749e+03, threshold=1.250e+03, percent-clipped=2.0 +2023-04-03 00:48:58,846 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:01,839 INFO [train.py:903] (2/4) Epoch 23, batch 6150, loss[loss=0.2553, simple_loss=0.3325, pruned_loss=0.08902, over 19686.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2893, pruned_loss=0.06443, over 3825699.63 frames. ], batch size: 58, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:49:31,080 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:31,825 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 00:50:00,565 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156415.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:01,341 INFO [train.py:903] (2/4) Epoch 23, batch 6200, loss[loss=0.2262, simple_loss=0.3057, pruned_loss=0.07333, over 19016.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2889, pruned_loss=0.06412, over 3830566.47 frames. ], batch size: 75, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:50:22,384 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156432.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:38,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.815e+02 5.704e+02 6.895e+02 2.552e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-03 00:51:02,803 INFO [train.py:903] (2/4) Epoch 23, batch 6250, loss[loss=0.2113, simple_loss=0.2882, pruned_loss=0.06724, over 19756.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06405, over 3838762.40 frames. ], batch size: 51, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:51:23,582 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 00:51:29,514 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6208, 4.7513, 5.3271, 5.3251, 2.2447, 4.9795, 4.3016, 5.0549], + device='cuda:2'), covar=tensor([0.1604, 0.1343, 0.0541, 0.0638, 0.5818, 0.0876, 0.0624, 0.1075], + device='cuda:2'), in_proj_covar=tensor([0.0780, 0.0746, 0.0948, 0.0826, 0.0833, 0.0715, 0.0569, 0.0880], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 00:51:32,671 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 00:52:02,822 INFO [train.py:903] (2/4) Epoch 23, batch 6300, loss[loss=0.248, simple_loss=0.3227, pruned_loss=0.08669, over 19463.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2889, pruned_loss=0.0644, over 3844102.03 frames. ], batch size: 64, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:52:04,450 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:34,289 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156542.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:39,263 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.353e+02 6.743e+02 8.019e+02 1.408e+03, threshold=1.349e+03, percent-clipped=4.0 +2023-04-03 00:52:40,674 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:03,413 INFO [train.py:903] (2/4) Epoch 23, batch 6350, loss[loss=0.2108, simple_loss=0.2968, pruned_loss=0.06241, over 19787.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2879, pruned_loss=0.06375, over 3837930.17 frames. ], batch size: 56, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:53:17,261 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:30,645 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0317, 1.8623, 1.6980, 2.0649, 1.8410, 1.7615, 1.7065, 2.0166], + device='cuda:2'), covar=tensor([0.1100, 0.1573, 0.1559, 0.1140, 0.1368, 0.0591, 0.1531, 0.0791], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0355, 0.0313, 0.0252, 0.0303, 0.0253, 0.0310, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 00:54:02,671 INFO [train.py:903] (2/4) Epoch 23, batch 6400, loss[loss=0.1751, simple_loss=0.2576, pruned_loss=0.0463, over 16455.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2874, pruned_loss=0.06378, over 3837613.10 frames. ], batch size: 36, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:54:39,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 4.834e+02 5.927e+02 7.987e+02 2.615e+03, threshold=1.185e+03, percent-clipped=4.0 +2023-04-03 00:54:46,361 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:04,059 INFO [train.py:903] (2/4) Epoch 23, batch 6450, loss[loss=0.2064, simple_loss=0.2865, pruned_loss=0.06316, over 19412.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2875, pruned_loss=0.06336, over 3832034.45 frames. ], batch size: 48, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:55:35,948 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:39,534 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1709, 1.3488, 1.9105, 1.3550, 3.0589, 4.5847, 4.4502, 4.9926], + device='cuda:2'), covar=tensor([0.1687, 0.3991, 0.3425, 0.2411, 0.0600, 0.0184, 0.0182, 0.0181], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0264, 0.0246, 0.0189, 0.0216, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 00:55:47,919 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 00:56:04,449 INFO [train.py:903] (2/4) Epoch 23, batch 6500, loss[loss=0.2586, simple_loss=0.3217, pruned_loss=0.09773, over 19702.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2876, pruned_loss=0.06337, over 3837519.83 frames. ], batch size: 60, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:56:10,076 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 00:56:39,933 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.077e+02 6.090e+02 8.057e+02 1.603e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-03 00:57:04,750 INFO [train.py:903] (2/4) Epoch 23, batch 6550, loss[loss=0.1749, simple_loss=0.2531, pruned_loss=0.04832, over 16455.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06341, over 3834288.01 frames. ], batch size: 36, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:57:06,338 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:06,401 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:50,663 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:04,880 INFO [train.py:903] (2/4) Epoch 23, batch 6600, loss[loss=0.1943, simple_loss=0.2814, pruned_loss=0.05357, over 19657.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2864, pruned_loss=0.06295, over 3832397.73 frames. ], batch size: 55, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:58:20,360 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:34,787 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0217, 1.3021, 1.6652, 0.8592, 2.3479, 3.0630, 2.7263, 3.2467], + device='cuda:2'), covar=tensor([0.1697, 0.3790, 0.3386, 0.2786, 0.0604, 0.0202, 0.0272, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0265, 0.0245, 0.0190, 0.0216, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 00:58:41,922 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.163e+02 6.336e+02 8.010e+02 1.885e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 00:59:05,157 INFO [train.py:903] (2/4) Epoch 23, batch 6650, loss[loss=0.1907, simple_loss=0.2778, pruned_loss=0.05182, over 19677.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2865, pruned_loss=0.06284, over 3835512.27 frames. ], batch size: 53, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:59:51,146 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156903.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:00:07,302 INFO [train.py:903] (2/4) Epoch 23, batch 6700, loss[loss=0.2437, simple_loss=0.3147, pruned_loss=0.0863, over 17204.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06305, over 3835174.69 frames. ], batch size: 101, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:00:41,738 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.603e+02 5.252e+02 6.535e+02 7.903e+02 1.565e+03, threshold=1.307e+03, percent-clipped=2.0 +2023-04-03 01:00:45,421 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:01:04,457 INFO [train.py:903] (2/4) Epoch 23, batch 6750, loss[loss=0.2414, simple_loss=0.3281, pruned_loss=0.07741, over 19755.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06349, over 3830622.27 frames. ], batch size: 63, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:01:13,626 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:00,909 INFO [train.py:903] (2/4) Epoch 23, batch 6800, loss[loss=0.2014, simple_loss=0.2855, pruned_loss=0.05863, over 19504.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2869, pruned_loss=0.06359, over 3831035.13 frames. ], batch size: 64, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:02:09,516 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:14,962 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:44,836 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 01:02:46,004 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 01:02:48,305 INFO [train.py:903] (2/4) Epoch 24, batch 0, loss[loss=0.2205, simple_loss=0.3065, pruned_loss=0.0673, over 19694.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3065, pruned_loss=0.0673, over 19694.00 frames. ], batch size: 60, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:02:48,305 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 01:02:59,935 INFO [train.py:937] (2/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2685, pruned_loss=0.03408, over 944034.00 frames. +2023-04-03 01:02:59,936 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 01:03:03,176 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.212e+02 6.445e+02 8.399e+02 3.393e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-03 01:03:05,715 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:03:12,277 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 01:04:00,819 INFO [train.py:903] (2/4) Epoch 24, batch 50, loss[loss=0.1664, simple_loss=0.2416, pruned_loss=0.04559, over 19754.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.28, pruned_loss=0.05985, over 870834.04 frames. ], batch size: 46, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:04:01,208 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7391, 2.6672, 2.2413, 2.1762, 1.8768, 2.3692, 1.1657, 1.9780], + device='cuda:2'), covar=tensor([0.0669, 0.0681, 0.0623, 0.0936, 0.1098, 0.0976, 0.1398, 0.0992], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0357, 0.0362, 0.0386, 0.0466, 0.0391, 0.0338, 0.0342], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:04:20,650 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:04:32,474 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 01:04:47,081 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3621, 1.3930, 1.9096, 1.4054, 2.8378, 3.7113, 3.4013, 3.9842], + device='cuda:2'), covar=tensor([0.1618, 0.3835, 0.3235, 0.2432, 0.0578, 0.0202, 0.0241, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0326, 0.0355, 0.0266, 0.0246, 0.0190, 0.0217, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 01:05:01,217 INFO [train.py:903] (2/4) Epoch 24, batch 100, loss[loss=0.2035, simple_loss=0.2878, pruned_loss=0.05964, over 19663.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2843, pruned_loss=0.06369, over 1514794.56 frames. ], batch size: 58, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:05:03,484 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 5.500e+02 6.534e+02 8.918e+02 1.825e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 01:05:11,356 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 01:05:19,826 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157160.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:05:26,385 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4305, 1.4148, 1.5520, 1.4612, 3.0253, 1.2773, 2.3875, 3.4050], + device='cuda:2'), covar=tensor([0.0564, 0.2720, 0.2755, 0.1891, 0.0765, 0.2306, 0.1153, 0.0284], + device='cuda:2'), in_proj_covar=tensor([0.0414, 0.0368, 0.0390, 0.0351, 0.0374, 0.0350, 0.0383, 0.0407], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:06:02,080 INFO [train.py:903] (2/4) Epoch 24, batch 150, loss[loss=0.2345, simple_loss=0.3117, pruned_loss=0.0786, over 17555.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2847, pruned_loss=0.06243, over 2025088.55 frames. ], batch size: 101, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:06:42,470 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:07:01,362 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 01:07:02,474 INFO [train.py:903] (2/4) Epoch 24, batch 200, loss[loss=0.186, simple_loss=0.2618, pruned_loss=0.05513, over 19774.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2852, pruned_loss=0.06242, over 2422601.13 frames. ], batch size: 46, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:07:04,624 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 4.992e+02 5.973e+02 7.088e+02 2.080e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 01:07:05,920 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:07:12,117 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7974, 2.4824, 2.3538, 2.7855, 2.5411, 2.3762, 2.2135, 2.7700], + device='cuda:2'), covar=tensor([0.0870, 0.1680, 0.1413, 0.1116, 0.1354, 0.0484, 0.1293, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0356, 0.0314, 0.0252, 0.0304, 0.0253, 0.0312, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:08:03,056 INFO [train.py:903] (2/4) Epoch 24, batch 250, loss[loss=0.1913, simple_loss=0.2709, pruned_loss=0.05583, over 19700.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2852, pruned_loss=0.06309, over 2718941.09 frames. ], batch size: 53, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:08:45,264 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6186, 1.5331, 1.5369, 2.0459, 1.6619, 1.8820, 1.8706, 1.7129], + device='cuda:2'), covar=tensor([0.0826, 0.0907, 0.0988, 0.0709, 0.0783, 0.0734, 0.0801, 0.0666], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0223, 0.0227, 0.0241, 0.0228, 0.0215, 0.0190, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 01:09:03,288 INFO [train.py:903] (2/4) Epoch 24, batch 300, loss[loss=0.2111, simple_loss=0.2952, pruned_loss=0.06343, over 19612.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2862, pruned_loss=0.06355, over 2976694.33 frames. ], batch size: 61, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:09:06,233 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.405e+02 6.557e+02 9.024e+02 1.464e+03, threshold=1.311e+03, percent-clipped=9.0 +2023-04-03 01:09:25,572 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157362.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:34,463 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5912, 1.2542, 1.4946, 1.4634, 3.1125, 1.1102, 2.3668, 3.6109], + device='cuda:2'), covar=tensor([0.0698, 0.3186, 0.3085, 0.2118, 0.0936, 0.2892, 0.1588, 0.0319], + device='cuda:2'), in_proj_covar=tensor([0.0414, 0.0369, 0.0390, 0.0352, 0.0374, 0.0350, 0.0383, 0.0407], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:09:36,389 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:39,529 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:41,187 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-03 01:09:56,109 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1001, 1.7906, 1.4566, 1.1706, 1.5806, 1.1654, 1.1803, 1.6359], + device='cuda:2'), covar=tensor([0.0843, 0.0762, 0.1041, 0.0864, 0.0586, 0.1277, 0.0605, 0.0450], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0314, 0.0336, 0.0266, 0.0246, 0.0338, 0.0288, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:10:05,064 INFO [train.py:903] (2/4) Epoch 24, batch 350, loss[loss=0.2169, simple_loss=0.3009, pruned_loss=0.06649, over 18876.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2863, pruned_loss=0.06388, over 3166582.69 frames. ], batch size: 74, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:10:10,691 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 01:10:12,258 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:10:39,434 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157422.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:11:05,171 INFO [train.py:903] (2/4) Epoch 24, batch 400, loss[loss=0.2012, simple_loss=0.2755, pruned_loss=0.06345, over 19624.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2883, pruned_loss=0.06497, over 3297341.25 frames. ], batch size: 50, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:11:07,649 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 4.825e+02 6.674e+02 8.153e+02 1.427e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-03 01:11:48,251 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1936, 2.9243, 2.3511, 2.3033, 2.0182, 2.5147, 1.0192, 2.0852], + device='cuda:2'), covar=tensor([0.0635, 0.0583, 0.0645, 0.1052, 0.1083, 0.1056, 0.1384, 0.1025], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0354, 0.0361, 0.0384, 0.0463, 0.0392, 0.0337, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:11:52,578 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157482.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:11:56,137 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9342, 2.0551, 1.8544, 3.1033, 2.2399, 2.9665, 1.9661, 1.6389], + device='cuda:2'), covar=tensor([0.5660, 0.5023, 0.3236, 0.3283, 0.4939, 0.2414, 0.7106, 0.5770], + device='cuda:2'), in_proj_covar=tensor([0.0908, 0.0977, 0.0723, 0.0934, 0.0888, 0.0825, 0.0847, 0.0788], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 01:11:58,242 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:01,767 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8578, 1.3486, 1.0900, 1.0373, 1.1883, 1.0517, 0.9451, 1.2358], + device='cuda:2'), covar=tensor([0.0702, 0.0843, 0.1230, 0.0797, 0.0665, 0.1368, 0.0633, 0.0558], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0315, 0.0338, 0.0267, 0.0247, 0.0339, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:12:05,706 INFO [train.py:903] (2/4) Epoch 24, batch 450, loss[loss=0.2166, simple_loss=0.2812, pruned_loss=0.076, over 19648.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2888, pruned_loss=0.0651, over 3422182.62 frames. ], batch size: 50, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:12:20,131 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:24,887 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:31,933 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6875, 1.6962, 1.5730, 1.3649, 1.3162, 1.3684, 0.2174, 0.6235], + device='cuda:2'), covar=tensor([0.0698, 0.0679, 0.0490, 0.0700, 0.1394, 0.0797, 0.1387, 0.1169], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0354, 0.0362, 0.0384, 0.0464, 0.0392, 0.0338, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:12:40,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 01:12:40,701 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 01:12:43,468 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5720, 2.3022, 1.7617, 1.6163, 2.1932, 1.4707, 1.3792, 1.9567], + device='cuda:2'), covar=tensor([0.1154, 0.0826, 0.1160, 0.0869, 0.0581, 0.1284, 0.0816, 0.0573], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0315, 0.0339, 0.0268, 0.0248, 0.0340, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:12:44,489 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157524.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:13:08,942 INFO [train.py:903] (2/4) Epoch 24, batch 500, loss[loss=0.199, simple_loss=0.2909, pruned_loss=0.05355, over 19530.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2871, pruned_loss=0.06376, over 3503589.87 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:13:12,134 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.617e+02 5.203e+02 6.096e+02 8.720e+02 1.456e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 01:14:02,831 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:12,213 INFO [train.py:903] (2/4) Epoch 24, batch 550, loss[loss=0.2339, simple_loss=0.3149, pruned_loss=0.07645, over 19132.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2872, pruned_loss=0.06337, over 3566705.10 frames. ], batch size: 69, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:14:32,927 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5027, 1.5203, 1.7146, 1.6766, 2.4845, 2.3374, 2.5685, 1.1118], + device='cuda:2'), covar=tensor([0.2402, 0.4273, 0.2693, 0.1996, 0.1472, 0.2041, 0.1333, 0.4445], + device='cuda:2'), in_proj_covar=tensor([0.0538, 0.0647, 0.0721, 0.0491, 0.0620, 0.0534, 0.0661, 0.0552], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:14:41,067 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157618.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:42,176 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,030 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,768 INFO [train.py:903] (2/4) Epoch 24, batch 600, loss[loss=0.2095, simple_loss=0.2952, pruned_loss=0.06192, over 19325.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2878, pruned_loss=0.06319, over 3629633.33 frames. ], batch size: 66, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:15:15,910 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.646e+02 5.574e+02 6.767e+02 1.170e+03, threshold=1.115e+03, percent-clipped=0.0 +2023-04-03 01:15:28,512 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1758, 5.5479, 3.2854, 4.9524, 1.0989, 5.7802, 5.5995, 5.7780], + device='cuda:2'), covar=tensor([0.0371, 0.0916, 0.1641, 0.0705, 0.4128, 0.0483, 0.0729, 0.0956], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0415, 0.0498, 0.0349, 0.0402, 0.0439, 0.0432, 0.0468], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:15:29,134 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.01 vs. limit=5.0 +2023-04-03 01:15:39,848 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8225, 4.2298, 4.4773, 4.5036, 1.7116, 4.2318, 3.6210, 4.1773], + device='cuda:2'), covar=tensor([0.1638, 0.0895, 0.0622, 0.0665, 0.6218, 0.0925, 0.0757, 0.1193], + device='cuda:2'), in_proj_covar=tensor([0.0792, 0.0757, 0.0965, 0.0844, 0.0850, 0.0731, 0.0580, 0.0889], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 01:15:53,026 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 01:16:14,753 INFO [train.py:903] (2/4) Epoch 24, batch 650, loss[loss=0.2328, simple_loss=0.3082, pruned_loss=0.07865, over 12947.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2891, pruned_loss=0.06421, over 3677384.15 frames. ], batch size: 135, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:16:45,949 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157718.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:14,700 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157743.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:15,390 INFO [train.py:903] (2/4) Epoch 24, batch 700, loss[loss=0.2311, simple_loss=0.3082, pruned_loss=0.07693, over 19564.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2877, pruned_loss=0.06363, over 3709710.54 frames. ], batch size: 61, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:17:15,553 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:20,745 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.232e+02 6.869e+02 8.195e+02 1.483e+03, threshold=1.374e+03, percent-clipped=7.0 +2023-04-03 01:17:44,062 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157766.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:17:46,329 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:18:19,077 INFO [train.py:903] (2/4) Epoch 24, batch 750, loss[loss=0.202, simple_loss=0.2938, pruned_loss=0.05508, over 19778.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2878, pruned_loss=0.06392, over 3746055.60 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:18:31,205 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-03 01:19:06,855 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:19,795 INFO [train.py:903] (2/4) Epoch 24, batch 800, loss[loss=0.2054, simple_loss=0.2841, pruned_loss=0.06332, over 17227.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2864, pruned_loss=0.06333, over 3758090.81 frames. ], batch size: 101, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:19:23,270 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 5.074e+02 6.358e+02 8.526e+02 1.766e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-03 01:19:30,256 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 01:19:37,371 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157859.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:37,389 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6063, 1.4705, 1.4868, 2.2037, 1.5629, 1.7877, 1.8214, 1.6396], + device='cuda:2'), covar=tensor([0.0954, 0.1030, 0.1136, 0.0801, 0.0956, 0.0895, 0.0997, 0.0803], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0224, 0.0229, 0.0242, 0.0229, 0.0215, 0.0191, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 01:19:48,541 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157868.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:19:52,854 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:59,136 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157875.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:20:05,661 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157881.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:20:20,136 INFO [train.py:903] (2/4) Epoch 24, batch 850, loss[loss=0.2167, simple_loss=0.2922, pruned_loss=0.07059, over 17532.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.06326, over 3779238.76 frames. ], batch size: 101, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:20:27,416 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157900.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:20:27,444 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1032, 2.0285, 1.9703, 1.8523, 1.5892, 1.7706, 0.8047, 1.2507], + device='cuda:2'), covar=tensor([0.0621, 0.0626, 0.0436, 0.0700, 0.1160, 0.0916, 0.1310, 0.1010], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0357, 0.0363, 0.0388, 0.0467, 0.0394, 0.0339, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:20:37,625 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 01:21:05,409 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:21:08,624 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 01:21:21,337 INFO [train.py:903] (2/4) Epoch 24, batch 900, loss[loss=0.1735, simple_loss=0.2513, pruned_loss=0.04785, over 19085.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2857, pruned_loss=0.06248, over 3792329.77 frames. ], batch size: 42, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:21:25,793 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.800e+02 5.808e+02 7.910e+02 1.683e+03, threshold=1.162e+03, percent-clipped=5.0 +2023-04-03 01:22:10,132 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157983.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:22:22,960 INFO [train.py:903] (2/4) Epoch 24, batch 950, loss[loss=0.2575, simple_loss=0.3237, pruned_loss=0.09564, over 19475.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2855, pruned_loss=0.06265, over 3802572.95 frames. ], batch size: 64, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:22:22,983 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 01:23:26,860 INFO [train.py:903] (2/4) Epoch 24, batch 1000, loss[loss=0.2251, simple_loss=0.2996, pruned_loss=0.07527, over 19596.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2852, pruned_loss=0.06232, over 3809068.14 frames. ], batch size: 61, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:23:28,214 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:23:31,292 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.075e+02 5.979e+02 8.043e+02 1.884e+03, threshold=1.196e+03, percent-clipped=5.0 +2023-04-03 01:24:17,483 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 01:24:22,269 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:22,529 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 01:24:27,540 INFO [train.py:903] (2/4) Epoch 24, batch 1050, loss[loss=0.1929, simple_loss=0.2772, pruned_loss=0.05433, over 19738.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2856, pruned_loss=0.0626, over 3814703.24 frames. ], batch size: 51, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:24:50,248 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158114.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:52,321 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158115.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:56,182 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 01:25:07,405 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8389, 1.4441, 1.5355, 1.3969, 3.4100, 1.0850, 2.4588, 3.8777], + device='cuda:2'), covar=tensor([0.0500, 0.2800, 0.2998, 0.2143, 0.0728, 0.2701, 0.1368, 0.0228], + device='cuda:2'), in_proj_covar=tensor([0.0419, 0.0373, 0.0394, 0.0354, 0.0377, 0.0354, 0.0389, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:25:20,114 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158137.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:25:23,286 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:23,372 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:23,660 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 01:25:27,550 INFO [train.py:903] (2/4) Epoch 24, batch 1100, loss[loss=0.181, simple_loss=0.2714, pruned_loss=0.04529, over 19523.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2871, pruned_loss=0.0638, over 3809006.74 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:25:31,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 5.122e+02 6.777e+02 7.992e+02 2.032e+03, threshold=1.355e+03, percent-clipped=5.0 +2023-04-03 01:25:51,746 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158162.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:26:28,902 INFO [train.py:903] (2/4) Epoch 24, batch 1150, loss[loss=0.1717, simple_loss=0.2483, pruned_loss=0.04756, over 19744.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2864, pruned_loss=0.06345, over 3823402.43 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:26:56,867 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:26:58,667 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.89 vs. limit=5.0 +2023-04-03 01:27:25,341 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158239.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:27:31,334 INFO [train.py:903] (2/4) Epoch 24, batch 1200, loss[loss=0.182, simple_loss=0.2767, pruned_loss=0.04364, over 19610.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2868, pruned_loss=0.06369, over 3814636.89 frames. ], batch size: 57, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:27:37,768 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.926e+02 5.852e+02 7.782e+02 1.430e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 01:27:56,335 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158264.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:28:02,799 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 01:28:34,708 INFO [train.py:903] (2/4) Epoch 24, batch 1250, loss[loss=0.1931, simple_loss=0.2648, pruned_loss=0.06067, over 18611.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2873, pruned_loss=0.06398, over 3812038.62 frames. ], batch size: 41, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:28:43,383 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:15,075 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:20,354 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:26,987 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5727, 4.1311, 2.7536, 3.5884, 1.2975, 4.1402, 4.0022, 4.0901], + device='cuda:2'), covar=tensor([0.0649, 0.1030, 0.1831, 0.0894, 0.3619, 0.0649, 0.0911, 0.1064], + device='cuda:2'), in_proj_covar=tensor([0.0513, 0.0414, 0.0500, 0.0348, 0.0402, 0.0439, 0.0432, 0.0465], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:29:35,685 INFO [train.py:903] (2/4) Epoch 24, batch 1300, loss[loss=0.2015, simple_loss=0.2847, pruned_loss=0.05914, over 19778.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2866, pruned_loss=0.06326, over 3814868.83 frames. ], batch size: 54, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:29:37,557 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-03 01:29:40,389 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 5.283e+02 7.241e+02 8.945e+02 2.355e+03, threshold=1.448e+03, percent-clipped=9.0 +2023-04-03 01:30:36,951 INFO [train.py:903] (2/4) Epoch 24, batch 1350, loss[loss=0.1806, simple_loss=0.2643, pruned_loss=0.0485, over 19741.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2866, pruned_loss=0.06304, over 3816012.58 frames. ], batch size: 51, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:30:40,611 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1981, 2.0848, 1.9990, 1.9033, 1.6382, 1.7696, 0.5185, 1.1775], + device='cuda:2'), covar=tensor([0.0625, 0.0614, 0.0468, 0.0729, 0.1181, 0.0863, 0.1423, 0.1067], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0358, 0.0362, 0.0386, 0.0463, 0.0392, 0.0338, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:30:48,159 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:31:38,829 INFO [train.py:903] (2/4) Epoch 24, batch 1400, loss[loss=0.1871, simple_loss=0.2714, pruned_loss=0.05138, over 19683.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06374, over 3816867.24 frames. ], batch size: 53, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:31:43,425 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.106e+02 6.656e+02 8.188e+02 2.197e+03, threshold=1.331e+03, percent-clipped=4.0 +2023-04-03 01:31:59,423 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8414, 1.9690, 2.2680, 2.5046, 1.8911, 2.4240, 2.3162, 2.0106], + device='cuda:2'), covar=tensor([0.4330, 0.3811, 0.1912, 0.2360, 0.3981, 0.2160, 0.4744, 0.3467], + device='cuda:2'), in_proj_covar=tensor([0.0910, 0.0981, 0.0726, 0.0939, 0.0890, 0.0827, 0.0846, 0.0789], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 01:32:27,057 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:32:40,273 INFO [train.py:903] (2/4) Epoch 24, batch 1450, loss[loss=0.1829, simple_loss=0.2651, pruned_loss=0.05035, over 19376.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06334, over 3836217.19 frames. ], batch size: 48, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:32:42,601 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 01:32:52,781 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3130, 1.3606, 1.5174, 1.4567, 1.6898, 1.8011, 1.6991, 0.7012], + device='cuda:2'), covar=tensor([0.2383, 0.4115, 0.2486, 0.1882, 0.1650, 0.2232, 0.1448, 0.4698], + device='cuda:2'), in_proj_covar=tensor([0.0543, 0.0652, 0.0725, 0.0495, 0.0623, 0.0538, 0.0665, 0.0558], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:33:41,525 INFO [train.py:903] (2/4) Epoch 24, batch 1500, loss[loss=0.182, simple_loss=0.2614, pruned_loss=0.05132, over 19485.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2874, pruned_loss=0.06357, over 3836747.80 frames. ], batch size: 49, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:33:46,116 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.633e+02 4.870e+02 5.968e+02 7.477e+02 1.869e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-03 01:34:33,961 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:34:42,451 INFO [train.py:903] (2/4) Epoch 24, batch 1550, loss[loss=0.2132, simple_loss=0.2966, pruned_loss=0.0649, over 17466.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.06362, over 3829820.49 frames. ], batch size: 101, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:34:48,598 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:04,312 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:36,446 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-03 01:35:46,043 INFO [train.py:903] (2/4) Epoch 24, batch 1600, loss[loss=0.1797, simple_loss=0.2595, pruned_loss=0.05001, over 19703.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06342, over 3816307.98 frames. ], batch size: 46, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:35:51,809 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.721e+02 6.000e+02 7.264e+02 1.836e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 01:36:12,728 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 01:36:50,304 INFO [train.py:903] (2/4) Epoch 24, batch 1650, loss[loss=0.2069, simple_loss=0.2937, pruned_loss=0.06012, over 19718.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06352, over 3805051.42 frames. ], batch size: 63, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:02,710 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0859, 1.5606, 2.0216, 1.6134, 4.5339, 1.2815, 2.5336, 4.9991], + device='cuda:2'), covar=tensor([0.0444, 0.2767, 0.2683, 0.2083, 0.0772, 0.2529, 0.1483, 0.0156], + device='cuda:2'), in_proj_covar=tensor([0.0419, 0.0374, 0.0394, 0.0354, 0.0378, 0.0356, 0.0389, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:37:52,883 INFO [train.py:903] (2/4) Epoch 24, batch 1700, loss[loss=0.2202, simple_loss=0.3039, pruned_loss=0.06822, over 18810.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2867, pruned_loss=0.06306, over 3819048.50 frames. ], batch size: 74, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:55,344 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:37:57,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.745e+02 5.793e+02 7.168e+02 1.456e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 01:38:35,605 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 01:38:54,538 INFO [train.py:903] (2/4) Epoch 24, batch 1750, loss[loss=0.2336, simple_loss=0.3093, pruned_loss=0.07901, over 13263.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2873, pruned_loss=0.06326, over 3815218.46 frames. ], batch size: 136, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:38:56,098 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:03,118 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:57,552 INFO [train.py:903] (2/4) Epoch 24, batch 1800, loss[loss=0.2016, simple_loss=0.2809, pruned_loss=0.0611, over 19735.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2869, pruned_loss=0.06283, over 3809532.61 frames. ], batch size: 51, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:40:02,401 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.690e+02 5.979e+02 7.282e+02 2.087e+03, threshold=1.196e+03, percent-clipped=3.0 +2023-04-03 01:40:08,495 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158853.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:40:11,842 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158855.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:19,318 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:40,515 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:56,883 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 01:40:58,003 INFO [train.py:903] (2/4) Epoch 24, batch 1850, loss[loss=0.2212, simple_loss=0.296, pruned_loss=0.07318, over 19568.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.06315, over 3795025.98 frames. ], batch size: 61, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:41:32,433 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 01:42:00,400 INFO [train.py:903] (2/4) Epoch 24, batch 1900, loss[loss=0.1772, simple_loss=0.2599, pruned_loss=0.04731, over 19768.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.286, pruned_loss=0.06291, over 3802152.32 frames. ], batch size: 48, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:42:04,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 4.936e+02 5.873e+02 7.509e+02 2.125e+03, threshold=1.175e+03, percent-clipped=8.0 +2023-04-03 01:42:18,828 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 01:42:24,116 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 01:42:41,442 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9247, 4.4749, 2.7248, 3.9276, 1.1219, 4.4760, 4.3075, 4.4655], + device='cuda:2'), covar=tensor([0.0501, 0.0882, 0.2003, 0.0752, 0.3940, 0.0589, 0.0885, 0.1094], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0414, 0.0496, 0.0345, 0.0401, 0.0437, 0.0430, 0.0462], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:42:46,679 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 01:43:00,389 INFO [train.py:903] (2/4) Epoch 24, batch 1950, loss[loss=0.1921, simple_loss=0.2842, pruned_loss=0.04996, over 19590.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2862, pruned_loss=0.06289, over 3817431.29 frames. ], batch size: 52, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:03,844 INFO [train.py:903] (2/4) Epoch 24, batch 2000, loss[loss=0.1962, simple_loss=0.2816, pruned_loss=0.05546, over 19660.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.06319, over 3804315.83 frames. ], batch size: 55, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:08,591 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.106e+02 6.605e+02 9.481e+02 1.726e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-03 01:44:57,740 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:44:59,855 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 01:45:03,909 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.50 vs. limit=5.0 +2023-04-03 01:45:06,681 INFO [train.py:903] (2/4) Epoch 24, batch 2050, loss[loss=0.1769, simple_loss=0.2513, pruned_loss=0.05119, over 16464.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2869, pruned_loss=0.06355, over 3804146.22 frames. ], batch size: 36, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:45:19,168 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 01:45:20,294 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 01:45:35,006 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:45:41,998 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 01:46:02,789 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159139.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:03,077 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2517, 1.3224, 1.2723, 1.0816, 1.1141, 1.1133, 0.0762, 0.3685], + device='cuda:2'), covar=tensor([0.0778, 0.0731, 0.0460, 0.0615, 0.1377, 0.0684, 0.1365, 0.1158], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0358, 0.0362, 0.0387, 0.0463, 0.0392, 0.0339, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 01:46:06,511 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:08,192 INFO [train.py:903] (2/4) Epoch 24, batch 2100, loss[loss=0.2488, simple_loss=0.3227, pruned_loss=0.0875, over 18191.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2861, pruned_loss=0.06285, over 3800302.26 frames. ], batch size: 83, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:46:10,468 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159145.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:13,634 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.527e+02 4.786e+02 5.922e+02 8.131e+02 1.881e+03, threshold=1.184e+03, percent-clipped=4.0 +2023-04-03 01:46:37,339 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 01:46:49,830 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:58,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 01:47:10,232 INFO [train.py:903] (2/4) Epoch 24, batch 2150, loss[loss=0.1956, simple_loss=0.2886, pruned_loss=0.05134, over 18079.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2862, pruned_loss=0.06303, over 3790100.66 frames. ], batch size: 83, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:47:13,881 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159197.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:48:12,832 INFO [train.py:903] (2/4) Epoch 24, batch 2200, loss[loss=0.2198, simple_loss=0.3139, pruned_loss=0.06281, over 19523.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2866, pruned_loss=0.06303, over 3796622.74 frames. ], batch size: 56, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:48:18,019 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.881e+02 6.157e+02 7.813e+02 2.191e+03, threshold=1.231e+03, percent-clipped=6.0 +2023-04-03 01:48:25,300 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:33,262 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:41,402 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3884, 1.7558, 1.9099, 1.7642, 3.8866, 1.4311, 2.9325, 4.1668], + device='cuda:2'), covar=tensor([0.0562, 0.2981, 0.2954, 0.2270, 0.0823, 0.2819, 0.1511, 0.0253], + device='cuda:2'), in_proj_covar=tensor([0.0418, 0.0374, 0.0395, 0.0354, 0.0379, 0.0357, 0.0391, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:49:14,284 INFO [train.py:903] (2/4) Epoch 24, batch 2250, loss[loss=0.2385, simple_loss=0.3114, pruned_loss=0.08275, over 19316.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.064, over 3804392.45 frames. ], batch size: 66, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:49:31,889 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:49:37,467 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159312.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:49:46,540 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3944, 1.3729, 1.5269, 1.5815, 2.9781, 1.1821, 2.4076, 3.4368], + device='cuda:2'), covar=tensor([0.0588, 0.2785, 0.2885, 0.1787, 0.0758, 0.2486, 0.1239, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0354, 0.0378, 0.0356, 0.0390, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:50:16,682 INFO [train.py:903] (2/4) Epoch 24, batch 2300, loss[loss=0.2338, simple_loss=0.3044, pruned_loss=0.08162, over 19723.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2884, pruned_loss=0.0638, over 3816437.67 frames. ], batch size: 63, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:50:21,056 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.864e+02 6.208e+02 8.672e+02 1.812e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 01:50:31,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 01:50:52,322 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6736, 1.6153, 1.8428, 1.7579, 4.1840, 1.1223, 2.6945, 4.5786], + device='cuda:2'), covar=tensor([0.0472, 0.2788, 0.2814, 0.1982, 0.0738, 0.2804, 0.1491, 0.0165], + device='cuda:2'), in_proj_covar=tensor([0.0415, 0.0370, 0.0391, 0.0351, 0.0375, 0.0354, 0.0387, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:51:19,167 INFO [train.py:903] (2/4) Epoch 24, batch 2350, loss[loss=0.2217, simple_loss=0.3123, pruned_loss=0.06552, over 19361.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2888, pruned_loss=0.06404, over 3812015.05 frames. ], batch size: 66, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:00,178 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 01:52:04,797 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:52:17,050 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 01:52:19,425 INFO [train.py:903] (2/4) Epoch 24, batch 2400, loss[loss=0.2057, simple_loss=0.2808, pruned_loss=0.06531, over 19844.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2888, pruned_loss=0.064, over 3823446.35 frames. ], batch size: 52, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:25,062 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.946e+02 5.943e+02 8.368e+02 2.189e+03, threshold=1.189e+03, percent-clipped=6.0 +2023-04-03 01:53:22,620 INFO [train.py:903] (2/4) Epoch 24, batch 2450, loss[loss=0.178, simple_loss=0.259, pruned_loss=0.04851, over 19754.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2882, pruned_loss=0.06335, over 3805987.83 frames. ], batch size: 51, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:53:42,102 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159510.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:49,982 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:55,524 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:14,806 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:21,728 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:24,656 INFO [train.py:903] (2/4) Epoch 24, batch 2500, loss[loss=0.183, simple_loss=0.2542, pruned_loss=0.05585, over 16329.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2895, pruned_loss=0.06448, over 3797883.66 frames. ], batch size: 36, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:54:27,436 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:29,311 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.941e+02 6.110e+02 7.649e+02 1.406e+03, threshold=1.222e+03, percent-clipped=1.0 +2023-04-03 01:54:55,500 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159568.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:55:25,752 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159593.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:55:26,480 INFO [train.py:903] (2/4) Epoch 24, batch 2550, loss[loss=0.2248, simple_loss=0.3059, pruned_loss=0.07181, over 19605.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2887, pruned_loss=0.06401, over 3804056.86 frames. ], batch size: 57, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:19,868 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:20,616 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 01:56:29,396 INFO [train.py:903] (2/4) Epoch 24, batch 2600, loss[loss=0.2191, simple_loss=0.3082, pruned_loss=0.065, over 19782.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06267, over 3815443.21 frames. ], batch size: 56, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:34,436 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 01:56:34,953 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.779e+02 5.928e+02 8.262e+02 1.528e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-03 01:56:36,575 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159649.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:39,990 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:57:31,602 INFO [train.py:903] (2/4) Epoch 24, batch 2650, loss[loss=0.2125, simple_loss=0.295, pruned_loss=0.06502, over 19697.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2856, pruned_loss=0.06242, over 3813303.93 frames. ], batch size: 59, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:57:43,898 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 01:58:18,697 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9543, 1.2370, 1.6190, 0.6195, 2.1096, 2.4699, 2.1051, 2.5865], + device='cuda:2'), covar=tensor([0.1583, 0.3793, 0.3309, 0.2699, 0.0583, 0.0275, 0.0365, 0.0386], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0325, 0.0356, 0.0264, 0.0245, 0.0190, 0.0216, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 01:58:20,964 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4704, 2.0818, 1.6377, 1.3045, 1.9503, 1.2562, 1.2593, 1.8956], + device='cuda:2'), covar=tensor([0.1085, 0.0834, 0.1137, 0.1025, 0.0652, 0.1390, 0.0800, 0.0509], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0315, 0.0339, 0.0268, 0.0248, 0.0339, 0.0292, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:58:34,703 INFO [train.py:903] (2/4) Epoch 24, batch 2700, loss[loss=0.1973, simple_loss=0.2813, pruned_loss=0.05664, over 17288.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.06251, over 3812854.32 frames. ], batch size: 101, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:58:39,056 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 5.237e+02 6.508e+02 8.466e+02 2.382e+03, threshold=1.302e+03, percent-clipped=8.0 +2023-04-03 01:59:02,797 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6247, 2.3358, 1.7479, 1.5915, 2.1409, 1.4021, 1.4074, 1.9776], + device='cuda:2'), covar=tensor([0.1047, 0.0803, 0.1086, 0.0844, 0.0626, 0.1293, 0.0774, 0.0558], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0315, 0.0340, 0.0269, 0.0248, 0.0340, 0.0292, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 01:59:03,906 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:59:36,014 INFO [train.py:903] (2/4) Epoch 24, batch 2750, loss[loss=0.175, simple_loss=0.2505, pruned_loss=0.04969, over 19778.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2859, pruned_loss=0.06269, over 3806181.12 frames. ], batch size: 48, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:59:46,862 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:19,545 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:40,396 INFO [train.py:903] (2/4) Epoch 24, batch 2800, loss[loss=0.168, simple_loss=0.2453, pruned_loss=0.04537, over 19361.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2855, pruned_loss=0.06245, over 3803667.77 frames. ], batch size: 47, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:00:45,927 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.444e+02 4.661e+02 5.641e+02 7.181e+02 2.352e+03, threshold=1.128e+03, percent-clipped=2.0 +2023-04-03 02:01:16,190 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:40,847 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:42,777 INFO [train.py:903] (2/4) Epoch 24, batch 2850, loss[loss=0.2042, simple_loss=0.2792, pruned_loss=0.06454, over 19836.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06354, over 3809655.97 frames. ], batch size: 52, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:10,743 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:25,915 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159928.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:39,642 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 02:02:45,336 INFO [train.py:903] (2/4) Epoch 24, batch 2900, loss[loss=0.1933, simple_loss=0.2636, pruned_loss=0.06151, over 19708.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.06308, over 3809334.03 frames. ], batch size: 45, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:51,072 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.874e+02 6.501e+02 8.672e+02 1.518e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-03 02:03:45,532 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:03:46,482 INFO [train.py:903] (2/4) Epoch 24, batch 2950, loss[loss=0.179, simple_loss=0.2584, pruned_loss=0.04979, over 19784.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.0631, over 3805301.16 frames. ], batch size: 47, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:13,443 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8644, 1.4712, 1.4761, 1.7979, 1.4089, 1.5820, 1.4851, 1.6736], + device='cuda:2'), covar=tensor([0.1042, 0.1414, 0.1563, 0.1021, 0.1406, 0.0598, 0.1381, 0.0820], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0355, 0.0312, 0.0254, 0.0303, 0.0252, 0.0312, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:04:24,635 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:04:45,844 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 02:04:49,472 INFO [train.py:903] (2/4) Epoch 24, batch 3000, loss[loss=0.2179, simple_loss=0.2985, pruned_loss=0.06862, over 17553.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2874, pruned_loss=0.06393, over 3785861.83 frames. ], batch size: 101, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:49,472 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 02:05:02,017 INFO [train.py:937] (2/4) Epoch 24, validation: loss=0.1679, simple_loss=0.268, pruned_loss=0.03397, over 944034.00 frames. +2023-04-03 02:05:02,018 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 02:05:08,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:05:08,853 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 4.966e+02 6.275e+02 7.790e+02 1.988e+03, threshold=1.255e+03, percent-clipped=5.0 +2023-04-03 02:05:33,090 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-03 02:05:47,965 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.31 vs. limit=5.0 +2023-04-03 02:06:04,589 INFO [train.py:903] (2/4) Epoch 24, batch 3050, loss[loss=0.1764, simple_loss=0.2688, pruned_loss=0.042, over 19727.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2869, pruned_loss=0.06332, over 3805508.95 frames. ], batch size: 51, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:06:19,318 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 02:06:23,655 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:07:08,744 INFO [train.py:903] (2/4) Epoch 24, batch 3100, loss[loss=0.181, simple_loss=0.252, pruned_loss=0.05507, over 18099.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2849, pruned_loss=0.0621, over 3822901.44 frames. ], batch size: 40, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:07:14,570 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 4.859e+02 5.894e+02 7.109e+02 1.682e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 02:07:37,224 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0729, 1.3279, 1.6503, 1.0469, 2.4810, 3.3790, 3.0204, 3.5818], + device='cuda:2'), covar=tensor([0.1688, 0.3787, 0.3456, 0.2685, 0.0643, 0.0200, 0.0226, 0.0283], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0327, 0.0357, 0.0265, 0.0246, 0.0191, 0.0217, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 02:07:52,721 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4507, 1.4963, 1.7819, 1.7183, 2.4862, 2.2062, 2.6602, 1.0853], + device='cuda:2'), covar=tensor([0.2525, 0.4314, 0.2727, 0.1959, 0.1619, 0.2182, 0.1522, 0.4807], + device='cuda:2'), in_proj_covar=tensor([0.0544, 0.0656, 0.0726, 0.0495, 0.0623, 0.0537, 0.0666, 0.0559], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 02:08:10,525 INFO [train.py:903] (2/4) Epoch 24, batch 3150, loss[loss=0.2406, simple_loss=0.3147, pruned_loss=0.08326, over 19680.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.0613, over 3831236.96 frames. ], batch size: 55, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:08:36,015 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 02:08:39,774 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160217.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:08:56,814 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8000, 1.8992, 2.0666, 2.3626, 1.8101, 2.2609, 2.1255, 1.9140], + device='cuda:2'), covar=tensor([0.3986, 0.3539, 0.1957, 0.2167, 0.3736, 0.1967, 0.4702, 0.3268], + device='cuda:2'), in_proj_covar=tensor([0.0919, 0.0990, 0.0731, 0.0945, 0.0896, 0.0831, 0.0853, 0.0794], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 02:09:14,362 INFO [train.py:903] (2/4) Epoch 24, batch 3200, loss[loss=0.193, simple_loss=0.274, pruned_loss=0.056, over 19537.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2842, pruned_loss=0.06172, over 3825285.97 frames. ], batch size: 54, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:09:18,011 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6079, 4.2418, 2.8271, 3.7443, 1.1311, 4.1565, 4.0181, 4.1059], + device='cuda:2'), covar=tensor([0.0630, 0.0936, 0.1768, 0.0775, 0.3657, 0.0709, 0.0896, 0.1185], + device='cuda:2'), in_proj_covar=tensor([0.0512, 0.0413, 0.0498, 0.0346, 0.0403, 0.0437, 0.0431, 0.0464], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:09:19,347 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2445, 1.2978, 1.2404, 1.0180, 1.0196, 1.0602, 0.1249, 0.3245], + device='cuda:2'), covar=tensor([0.0849, 0.0852, 0.0532, 0.0653, 0.1630, 0.0831, 0.1407, 0.1389], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0356, 0.0361, 0.0383, 0.0461, 0.0391, 0.0338, 0.0345], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 02:09:20,052 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.134e+02 6.599e+02 8.700e+02 2.161e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-03 02:09:49,733 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:10:17,061 INFO [train.py:903] (2/4) Epoch 24, batch 3250, loss[loss=0.2209, simple_loss=0.3016, pruned_loss=0.07012, over 19685.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06229, over 3823405.41 frames. ], batch size: 60, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:10:56,755 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160325.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:11:04,779 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160332.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:11:21,818 INFO [train.py:903] (2/4) Epoch 24, batch 3300, loss[loss=0.2155, simple_loss=0.3017, pruned_loss=0.06468, over 19505.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06345, over 3803375.34 frames. ], batch size: 64, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:11:24,400 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 02:11:27,809 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.973e+02 5.834e+02 7.675e+02 1.997e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-03 02:11:46,529 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160364.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:15,761 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:18,249 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160389.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:23,803 INFO [train.py:903] (2/4) Epoch 24, batch 3350, loss[loss=0.1856, simple_loss=0.274, pruned_loss=0.04866, over 19261.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2866, pruned_loss=0.063, over 3811132.93 frames. ], batch size: 66, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:12:47,485 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0084, 5.0587, 5.8609, 5.8491, 2.0184, 5.5295, 4.6433, 5.5293], + device='cuda:2'), covar=tensor([0.1663, 0.0831, 0.0522, 0.0586, 0.6254, 0.0884, 0.0608, 0.1114], + device='cuda:2'), in_proj_covar=tensor([0.0787, 0.0753, 0.0959, 0.0839, 0.0842, 0.0731, 0.0572, 0.0891], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 02:13:26,806 INFO [train.py:903] (2/4) Epoch 24, batch 3400, loss[loss=0.1798, simple_loss=0.2555, pruned_loss=0.05202, over 19338.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.287, pruned_loss=0.06321, over 3804892.29 frames. ], batch size: 44, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:13:32,537 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.133e+02 6.647e+02 9.203e+02 1.938e+03, threshold=1.329e+03, percent-clipped=8.0 +2023-04-03 02:14:28,023 INFO [train.py:903] (2/4) Epoch 24, batch 3450, loss[loss=0.1955, simple_loss=0.2775, pruned_loss=0.05677, over 19838.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2861, pruned_loss=0.06248, over 3802289.18 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:14:31,635 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 02:15:29,542 INFO [train.py:903] (2/4) Epoch 24, batch 3500, loss[loss=0.2323, simple_loss=0.3133, pruned_loss=0.07567, over 19650.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2861, pruned_loss=0.06249, over 3806967.93 frames. ], batch size: 58, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:15:38,053 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.685e+02 5.887e+02 7.904e+02 2.662e+03, threshold=1.177e+03, percent-clipped=4.0 +2023-04-03 02:16:26,915 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:16:33,577 INFO [train.py:903] (2/4) Epoch 24, batch 3550, loss[loss=0.2203, simple_loss=0.2997, pruned_loss=0.0704, over 19305.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.06189, over 3816626.79 frames. ], batch size: 66, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:16:57,348 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160613.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:17:36,562 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:17:37,258 INFO [train.py:903] (2/4) Epoch 24, batch 3600, loss[loss=0.2351, simple_loss=0.3036, pruned_loss=0.08329, over 19781.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06181, over 3824766.45 frames. ], batch size: 54, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:17:44,411 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 4.811e+02 5.669e+02 7.209e+02 1.690e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 02:18:07,971 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160668.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:18:09,962 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160669.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:18:20,456 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1481, 1.9689, 1.7763, 2.1885, 1.7601, 1.8339, 1.7903, 2.0151], + device='cuda:2'), covar=tensor([0.1030, 0.1480, 0.1498, 0.0988, 0.1438, 0.0545, 0.1367, 0.0738], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0358, 0.0313, 0.0256, 0.0306, 0.0255, 0.0314, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:18:36,982 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1084, 1.9396, 1.9090, 2.7243, 1.9577, 2.4000, 2.4625, 2.2155], + device='cuda:2'), covar=tensor([0.0823, 0.0905, 0.1004, 0.0796, 0.0901, 0.0711, 0.0842, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0222, 0.0228, 0.0239, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 02:18:40,043 INFO [train.py:903] (2/4) Epoch 24, batch 3650, loss[loss=0.2329, simple_loss=0.3208, pruned_loss=0.07251, over 19536.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2856, pruned_loss=0.06193, over 3820500.67 frames. ], batch size: 56, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:19:08,189 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1480, 2.0534, 1.9678, 1.7681, 1.5618, 1.6489, 0.9911, 1.2823], + device='cuda:2'), covar=tensor([0.0701, 0.0783, 0.0480, 0.0899, 0.1226, 0.1160, 0.1268, 0.1158], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0359, 0.0362, 0.0385, 0.0462, 0.0394, 0.0339, 0.0345], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 02:19:40,900 INFO [train.py:903] (2/4) Epoch 24, batch 3700, loss[loss=0.2346, simple_loss=0.3053, pruned_loss=0.08196, over 13531.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2863, pruned_loss=0.06246, over 3824485.51 frames. ], batch size: 136, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:19:49,425 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.616e+02 6.347e+02 7.690e+02 1.972e+03, threshold=1.269e+03, percent-clipped=6.0 +2023-04-03 02:20:22,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 02:20:30,330 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160784.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:20:44,497 INFO [train.py:903] (2/4) Epoch 24, batch 3750, loss[loss=0.2054, simple_loss=0.2852, pruned_loss=0.06287, over 19676.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2869, pruned_loss=0.06278, over 3828214.80 frames. ], batch size: 55, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:15,777 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-03 02:21:45,654 INFO [train.py:903] (2/4) Epoch 24, batch 3800, loss[loss=0.2029, simple_loss=0.2739, pruned_loss=0.06598, over 19786.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06228, over 3838091.71 frames. ], batch size: 48, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:53,459 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.186e+02 4.635e+02 5.250e+02 7.046e+02 1.734e+03, threshold=1.050e+03, percent-clipped=4.0 +2023-04-03 02:22:18,590 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 02:22:47,095 INFO [train.py:903] (2/4) Epoch 24, batch 3850, loss[loss=0.2228, simple_loss=0.3055, pruned_loss=0.07007, over 19495.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2851, pruned_loss=0.06238, over 3825559.98 frames. ], batch size: 64, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:22:55,995 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 02:23:18,527 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160919.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:23:48,688 INFO [train.py:903] (2/4) Epoch 24, batch 3900, loss[loss=0.1859, simple_loss=0.2636, pruned_loss=0.05417, over 19392.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2854, pruned_loss=0.06209, over 3816217.34 frames. ], batch size: 48, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:23:58,402 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 5.134e+02 6.381e+02 7.692e+02 1.884e+03, threshold=1.276e+03, percent-clipped=12.0 +2023-04-03 02:24:38,104 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:24:53,481 INFO [train.py:903] (2/4) Epoch 24, batch 3950, loss[loss=0.1962, simple_loss=0.2834, pruned_loss=0.05453, over 19652.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06129, over 3818896.19 frames. ], batch size: 55, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:24:57,052 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 02:25:41,894 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7806, 1.4178, 1.5594, 1.5289, 3.3518, 1.0275, 2.3772, 3.8231], + device='cuda:2'), covar=tensor([0.0481, 0.2817, 0.2944, 0.2010, 0.0699, 0.2681, 0.1446, 0.0217], + device='cuda:2'), in_proj_covar=tensor([0.0412, 0.0367, 0.0389, 0.0350, 0.0373, 0.0353, 0.0385, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:25:51,271 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161040.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:25:55,364 INFO [train.py:903] (2/4) Epoch 24, batch 4000, loss[loss=0.1866, simple_loss=0.2782, pruned_loss=0.04755, over 17984.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06085, over 3823979.51 frames. ], batch size: 83, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:26:03,420 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.651e+02 5.074e+02 6.327e+02 7.723e+02 1.762e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-03 02:26:21,960 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161065.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:26:41,804 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 02:26:57,369 INFO [train.py:903] (2/4) Epoch 24, batch 4050, loss[loss=0.1889, simple_loss=0.264, pruned_loss=0.05692, over 19489.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2858, pruned_loss=0.06219, over 3820950.89 frames. ], batch size: 49, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:27:57,726 INFO [train.py:903] (2/4) Epoch 24, batch 4100, loss[loss=0.2133, simple_loss=0.2976, pruned_loss=0.06453, over 18696.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2869, pruned_loss=0.06315, over 3828310.73 frames. ], batch size: 74, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:28:06,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.915e+02 6.129e+02 7.795e+02 1.333e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 02:28:31,091 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 02:29:00,654 INFO [train.py:903] (2/4) Epoch 24, batch 4150, loss[loss=0.2039, simple_loss=0.2952, pruned_loss=0.05632, over 19485.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2874, pruned_loss=0.06324, over 3813027.58 frames. ], batch size: 64, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:29:49,607 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5748, 2.2938, 1.8527, 1.5755, 2.0985, 1.4239, 1.3600, 1.9745], + device='cuda:2'), covar=tensor([0.1101, 0.0855, 0.1046, 0.0865, 0.0573, 0.1348, 0.0807, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0315, 0.0337, 0.0267, 0.0247, 0.0340, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:29:50,553 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:01,376 INFO [train.py:903] (2/4) Epoch 24, batch 4200, loss[loss=0.2012, simple_loss=0.2921, pruned_loss=0.05518, over 19564.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2875, pruned_loss=0.06305, over 3816963.43 frames. ], batch size: 61, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:30:02,616 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 02:30:04,058 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3415, 1.3576, 1.2725, 1.1309, 1.1182, 1.1731, 0.3503, 0.6223], + device='cuda:2'), covar=tensor([0.0485, 0.0523, 0.0345, 0.0509, 0.0815, 0.0641, 0.1172, 0.0851], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0361, 0.0365, 0.0388, 0.0466, 0.0395, 0.0342, 0.0347], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 02:30:08,527 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:09,254 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.684e+02 6.110e+02 7.845e+02 2.290e+03, threshold=1.222e+03, percent-clipped=7.0 +2023-04-03 02:30:24,259 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:31:03,256 INFO [train.py:903] (2/4) Epoch 24, batch 4250, loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06853, over 19318.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2882, pruned_loss=0.06333, over 3810622.90 frames. ], batch size: 66, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:31:17,058 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 02:31:28,264 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 02:31:44,254 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:32:04,737 INFO [train.py:903] (2/4) Epoch 24, batch 4300, loss[loss=0.2098, simple_loss=0.298, pruned_loss=0.06079, over 19550.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2875, pruned_loss=0.0629, over 3814788.33 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:32:12,544 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.585e+02 5.768e+02 7.257e+02 2.214e+03, threshold=1.154e+03, percent-clipped=5.0 +2023-04-03 02:32:38,074 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2006, 1.3265, 1.6370, 1.1452, 2.4810, 3.3473, 3.0293, 3.5346], + device='cuda:2'), covar=tensor([0.1640, 0.3712, 0.3444, 0.2668, 0.0667, 0.0219, 0.0241, 0.0326], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0325, 0.0356, 0.0265, 0.0245, 0.0190, 0.0217, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 02:32:47,636 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:32:57,532 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 02:33:06,257 INFO [train.py:903] (2/4) Epoch 24, batch 4350, loss[loss=0.2344, simple_loss=0.3161, pruned_loss=0.0763, over 18839.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06172, over 3814473.32 frames. ], batch size: 74, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:33:37,798 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:34:07,041 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:34:08,989 INFO [train.py:903] (2/4) Epoch 24, batch 4400, loss[loss=0.1998, simple_loss=0.2867, pruned_loss=0.05644, over 19524.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2862, pruned_loss=0.06227, over 3802379.29 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:34:15,585 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.009e+02 6.093e+02 7.233e+02 1.222e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 02:34:31,952 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 02:34:41,638 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 02:34:43,248 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3433, 1.3858, 1.5400, 1.4812, 1.7625, 1.8370, 1.8067, 0.6383], + device='cuda:2'), covar=tensor([0.2460, 0.4304, 0.2737, 0.1971, 0.1642, 0.2338, 0.1391, 0.4799], + device='cuda:2'), in_proj_covar=tensor([0.0540, 0.0651, 0.0725, 0.0493, 0.0623, 0.0535, 0.0661, 0.0556], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 02:34:50,975 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:35:10,541 INFO [train.py:903] (2/4) Epoch 24, batch 4450, loss[loss=0.1942, simple_loss=0.2865, pruned_loss=0.05091, over 19675.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2873, pruned_loss=0.06317, over 3779620.38 frames. ], batch size: 58, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:36:10,900 INFO [train.py:903] (2/4) Epoch 24, batch 4500, loss[loss=0.2006, simple_loss=0.2795, pruned_loss=0.06086, over 19330.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2869, pruned_loss=0.06335, over 3792843.49 frames. ], batch size: 66, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:36:17,775 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.067e+02 5.206e+02 6.185e+02 8.214e+02 2.130e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 02:36:53,397 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:37:10,282 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:37:12,191 INFO [train.py:903] (2/4) Epoch 24, batch 4550, loss[loss=0.1763, simple_loss=0.2591, pruned_loss=0.04677, over 19465.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2866, pruned_loss=0.06324, over 3796403.43 frames. ], batch size: 49, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:37:20,026 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 02:37:28,118 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4824, 1.6279, 1.8503, 1.7353, 2.6865, 2.2701, 2.8340, 1.3220], + device='cuda:2'), covar=tensor([0.2452, 0.4117, 0.2572, 0.1864, 0.1478, 0.2129, 0.1466, 0.4217], + device='cuda:2'), in_proj_covar=tensor([0.0544, 0.0655, 0.0731, 0.0496, 0.0628, 0.0539, 0.0666, 0.0559], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 02:37:44,731 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 02:38:01,518 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:38:15,064 INFO [train.py:903] (2/4) Epoch 24, batch 4600, loss[loss=0.1734, simple_loss=0.2588, pruned_loss=0.04397, over 19840.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2872, pruned_loss=0.06317, over 3803659.58 frames. ], batch size: 52, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:38:21,971 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.894e+02 5.872e+02 7.852e+02 1.807e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 02:38:33,811 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:38:45,540 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6171, 1.4388, 1.6100, 1.5563, 3.2134, 1.2585, 2.4413, 3.6699], + device='cuda:2'), covar=tensor([0.0522, 0.2727, 0.2770, 0.1901, 0.0691, 0.2350, 0.1261, 0.0239], + device='cuda:2'), in_proj_covar=tensor([0.0413, 0.0367, 0.0388, 0.0349, 0.0373, 0.0351, 0.0385, 0.0405], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:39:15,645 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:16,431 INFO [train.py:903] (2/4) Epoch 24, batch 4650, loss[loss=0.179, simple_loss=0.2586, pruned_loss=0.04975, over 19787.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2869, pruned_loss=0.06286, over 3808058.12 frames. ], batch size: 49, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:39:22,562 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:29,473 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6331, 1.2342, 1.3020, 1.5025, 1.1296, 1.4333, 1.2998, 1.4955], + device='cuda:2'), covar=tensor([0.1270, 0.1276, 0.1657, 0.1079, 0.1390, 0.0637, 0.1579, 0.0855], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0358, 0.0313, 0.0257, 0.0307, 0.0255, 0.0316, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:39:32,905 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0519, 1.8754, 1.9849, 2.5363, 2.0818, 2.2199, 2.2088, 2.1280], + device='cuda:2'), covar=tensor([0.0710, 0.0868, 0.0783, 0.0647, 0.0895, 0.0683, 0.0802, 0.0620], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0221, 0.0226, 0.0238, 0.0223, 0.0211, 0.0188, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-03 02:39:33,561 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 02:39:33,927 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:44,908 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 02:39:53,578 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:40:19,299 INFO [train.py:903] (2/4) Epoch 24, batch 4700, loss[loss=0.239, simple_loss=0.3203, pruned_loss=0.07891, over 18610.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2866, pruned_loss=0.06253, over 3792890.89 frames. ], batch size: 74, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:40:26,427 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 5.147e+02 6.134e+02 7.606e+02 1.537e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-03 02:40:39,902 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 02:40:43,374 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:41:20,594 INFO [train.py:903] (2/4) Epoch 24, batch 4750, loss[loss=0.2013, simple_loss=0.2892, pruned_loss=0.05671, over 19697.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.287, pruned_loss=0.06292, over 3798375.01 frames. ], batch size: 59, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:41:56,371 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0941, 5.1317, 5.9484, 5.9476, 1.9720, 5.5939, 4.7083, 5.6039], + device='cuda:2'), covar=tensor([0.1751, 0.0816, 0.0567, 0.0623, 0.6139, 0.0995, 0.0625, 0.1134], + device='cuda:2'), in_proj_covar=tensor([0.0792, 0.0757, 0.0960, 0.0842, 0.0844, 0.0729, 0.0576, 0.0892], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 02:41:57,360 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:09,935 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:23,972 INFO [train.py:903] (2/4) Epoch 24, batch 4800, loss[loss=0.2406, simple_loss=0.3203, pruned_loss=0.08042, over 19402.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2877, pruned_loss=0.06311, over 3810264.60 frames. ], batch size: 70, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:42:31,545 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.225e+02 6.101e+02 7.305e+02 1.695e+03, threshold=1.220e+03, percent-clipped=2.0 +2023-04-03 02:43:04,826 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:43:25,947 INFO [train.py:903] (2/4) Epoch 24, batch 4850, loss[loss=0.1799, simple_loss=0.269, pruned_loss=0.04537, over 19759.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2867, pruned_loss=0.06273, over 3810472.16 frames. ], batch size: 54, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:43:50,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 02:44:11,998 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 02:44:16,557 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 02:44:17,697 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 02:44:19,156 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161937.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:24,034 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-03 02:44:27,636 INFO [train.py:903] (2/4) Epoch 24, batch 4900, loss[loss=0.1781, simple_loss=0.2619, pruned_loss=0.04713, over 19714.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2863, pruned_loss=0.06224, over 3811677.18 frames. ], batch size: 51, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:44:27,660 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 02:44:33,871 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:34,701 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 5.435e+02 6.496e+02 8.047e+02 2.666e+03, threshold=1.299e+03, percent-clipped=6.0 +2023-04-03 02:44:47,020 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 02:44:53,298 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:55,532 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3166, 1.4825, 1.8363, 1.4321, 2.7186, 3.5184, 3.2951, 3.6962], + device='cuda:2'), covar=tensor([0.1580, 0.3604, 0.3240, 0.2505, 0.0687, 0.0276, 0.0201, 0.0309], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0326, 0.0357, 0.0265, 0.0246, 0.0191, 0.0218, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 02:45:05,670 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:12,702 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 02:45:23,784 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:28,867 INFO [train.py:903] (2/4) Epoch 24, batch 4950, loss[loss=0.2041, simple_loss=0.2882, pruned_loss=0.06003, over 19581.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2869, pruned_loss=0.06245, over 3812929.29 frames. ], batch size: 61, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:45:44,734 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8789, 1.6073, 1.4916, 1.8220, 1.5684, 1.5778, 1.4514, 1.7278], + device='cuda:2'), covar=tensor([0.1075, 0.1246, 0.1576, 0.1121, 0.1310, 0.0607, 0.1517, 0.0790], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0356, 0.0311, 0.0255, 0.0304, 0.0253, 0.0314, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:45:47,843 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 02:46:12,997 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 02:46:33,466 INFO [train.py:903] (2/4) Epoch 24, batch 5000, loss[loss=0.2084, simple_loss=0.2946, pruned_loss=0.06112, over 19591.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2873, pruned_loss=0.06235, over 3823456.38 frames. ], batch size: 52, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:46:41,315 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.589e+02 5.769e+02 7.322e+02 1.477e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-03 02:46:44,811 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 02:46:56,725 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 02:47:35,231 INFO [train.py:903] (2/4) Epoch 24, batch 5050, loss[loss=0.2045, simple_loss=0.2918, pruned_loss=0.05861, over 19722.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2862, pruned_loss=0.06183, over 3837200.52 frames. ], batch size: 63, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:47:36,683 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:14,960 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 02:48:25,512 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:37,731 INFO [train.py:903] (2/4) Epoch 24, batch 5100, loss[loss=0.226, simple_loss=0.3194, pruned_loss=0.06634, over 19769.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2866, pruned_loss=0.06219, over 3826786.07 frames. ], batch size: 63, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:48:44,637 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.540e+02 5.777e+02 7.463e+02 1.637e+03, threshold=1.155e+03, percent-clipped=6.0 +2023-04-03 02:48:51,591 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 02:48:55,184 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 02:48:55,603 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:58,701 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 02:49:18,978 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:39,643 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162193.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:40,375 INFO [train.py:903] (2/4) Epoch 24, batch 5150, loss[loss=0.1858, simple_loss=0.2595, pruned_loss=0.05608, over 18975.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2864, pruned_loss=0.06221, over 3812334.53 frames. ], batch size: 42, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:49:57,175 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 02:50:11,357 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:31,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 02:50:43,109 INFO [train.py:903] (2/4) Epoch 24, batch 5200, loss[loss=0.1939, simple_loss=0.2897, pruned_loss=0.04902, over 19760.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2858, pruned_loss=0.06163, over 3819292.70 frames. ], batch size: 63, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:50:50,170 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:51,041 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.774e+02 5.644e+02 7.681e+02 1.514e+03, threshold=1.129e+03, percent-clipped=4.0 +2023-04-03 02:50:59,696 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 02:51:44,096 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 02:51:44,390 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:51:46,282 INFO [train.py:903] (2/4) Epoch 24, batch 5250, loss[loss=0.2215, simple_loss=0.3073, pruned_loss=0.06782, over 19653.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06128, over 3815113.98 frames. ], batch size: 55, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:50,081 INFO [train.py:903] (2/4) Epoch 24, batch 5300, loss[loss=0.2387, simple_loss=0.3204, pruned_loss=0.07855, over 18752.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06109, over 3829799.15 frames. ], batch size: 74, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:57,096 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.862e+02 5.825e+02 7.901e+02 2.284e+03, threshold=1.165e+03, percent-clipped=8.0 +2023-04-03 02:53:08,423 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 02:53:09,853 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3757, 3.9608, 2.6396, 3.5289, 0.7485, 3.9521, 3.8191, 3.8767], + device='cuda:2'), covar=tensor([0.0722, 0.1085, 0.1994, 0.0911, 0.4226, 0.0764, 0.0969, 0.1223], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0422, 0.0507, 0.0355, 0.0406, 0.0446, 0.0440, 0.0472], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 02:53:51,567 INFO [train.py:903] (2/4) Epoch 24, batch 5350, loss[loss=0.1769, simple_loss=0.2513, pruned_loss=0.05128, over 19023.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06114, over 3834534.93 frames. ], batch size: 42, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:54:28,322 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 02:54:39,209 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0686, 0.9888, 1.0009, 1.1612, 0.9211, 1.1439, 1.0816, 1.0590], + device='cuda:2'), covar=tensor([0.0734, 0.0841, 0.0898, 0.0518, 0.0751, 0.0678, 0.0705, 0.0631], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0225, 0.0212, 0.0189, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 02:54:48,009 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:54:54,723 INFO [train.py:903] (2/4) Epoch 24, batch 5400, loss[loss=0.2033, simple_loss=0.2861, pruned_loss=0.06025, over 19594.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.284, pruned_loss=0.06145, over 3834786.60 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:55:02,635 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.752e+02 4.703e+02 6.237e+02 7.666e+02 1.372e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 02:55:40,747 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-03 02:55:51,453 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3333, 1.3468, 1.8962, 1.3418, 2.8502, 3.8108, 3.5825, 4.0609], + device='cuda:2'), covar=tensor([0.1567, 0.3729, 0.3125, 0.2451, 0.0550, 0.0191, 0.0195, 0.0253], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0325, 0.0356, 0.0265, 0.0244, 0.0190, 0.0216, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 02:55:56,870 INFO [train.py:903] (2/4) Epoch 24, batch 5450, loss[loss=0.2275, simple_loss=0.3046, pruned_loss=0.07523, over 18491.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2837, pruned_loss=0.06127, over 3844945.11 frames. ], batch size: 84, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:56:41,313 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:00,252 INFO [train.py:903] (2/4) Epoch 24, batch 5500, loss[loss=0.1978, simple_loss=0.2857, pruned_loss=0.05498, over 19771.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06131, over 3854160.82 frames. ], batch size: 56, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:57:05,472 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:07,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.077e+02 6.464e+02 7.861e+02 1.317e+03, threshold=1.293e+03, percent-clipped=1.0 +2023-04-03 02:57:12,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:25,042 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 02:57:36,151 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:58:01,106 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:58:02,007 INFO [train.py:903] (2/4) Epoch 24, batch 5550, loss[loss=0.2225, simple_loss=0.3046, pruned_loss=0.07025, over 19726.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06134, over 3856041.11 frames. ], batch size: 51, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:58:08,788 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 02:58:59,597 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 02:59:02,960 INFO [train.py:903] (2/4) Epoch 24, batch 5600, loss[loss=0.2039, simple_loss=0.2829, pruned_loss=0.06246, over 19659.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06157, over 3848048.44 frames. ], batch size: 53, lr: 3.40e-03, grad_scale: 16.0 +2023-04-03 02:59:12,273 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 5.034e+02 5.949e+02 8.933e+02 2.100e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 02:59:43,936 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7447, 2.6755, 2.3943, 2.7566, 2.5053, 2.3743, 2.2905, 2.7732], + device='cuda:2'), covar=tensor([0.1035, 0.1524, 0.1464, 0.1165, 0.1483, 0.0534, 0.1448, 0.0648], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0359, 0.0313, 0.0257, 0.0305, 0.0255, 0.0316, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:00:07,504 INFO [train.py:903] (2/4) Epoch 24, batch 5650, loss[loss=0.1955, simple_loss=0.2695, pruned_loss=0.06074, over 19353.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2831, pruned_loss=0.06129, over 3855778.91 frames. ], batch size: 47, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:00:24,978 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:00:41,598 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-03 03:00:55,010 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 03:01:09,549 INFO [train.py:903] (2/4) Epoch 24, batch 5700, loss[loss=0.2223, simple_loss=0.3038, pruned_loss=0.07042, over 19665.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2833, pruned_loss=0.06157, over 3854246.13 frames. ], batch size: 60, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:01:17,488 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 4.704e+02 5.740e+02 7.100e+02 1.656e+03, threshold=1.148e+03, percent-clipped=4.0 +2023-04-03 03:02:11,499 INFO [train.py:903] (2/4) Epoch 24, batch 5750, loss[loss=0.224, simple_loss=0.3, pruned_loss=0.074, over 17319.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.283, pruned_loss=0.06142, over 3844684.36 frames. ], batch size: 101, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:02:13,876 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 03:02:22,213 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 03:02:28,834 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 03:02:31,423 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:01,631 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-03 03:03:03,519 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:04,679 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7813, 1.1862, 1.4259, 1.2252, 2.4141, 1.0899, 2.0885, 2.7127], + device='cuda:2'), covar=tensor([0.0566, 0.2591, 0.2463, 0.1776, 0.0735, 0.2044, 0.1046, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0415, 0.0369, 0.0390, 0.0350, 0.0373, 0.0354, 0.0387, 0.0407], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:03:13,229 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2651, 1.2695, 1.4140, 1.4093, 1.6512, 1.6956, 1.6621, 0.6071], + device='cuda:2'), covar=tensor([0.2639, 0.4420, 0.2872, 0.2163, 0.1765, 0.2514, 0.1490, 0.5301], + device='cuda:2'), in_proj_covar=tensor([0.0545, 0.0658, 0.0734, 0.0498, 0.0630, 0.0545, 0.0670, 0.0564], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 03:03:13,911 INFO [train.py:903] (2/4) Epoch 24, batch 5800, loss[loss=0.2687, simple_loss=0.3425, pruned_loss=0.09741, over 18093.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2837, pruned_loss=0.06153, over 3844955.07 frames. ], batch size: 83, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:03:15,386 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162845.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:22,909 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 4.857e+02 6.549e+02 8.249e+02 1.553e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-03 03:03:51,970 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:04:16,915 INFO [train.py:903] (2/4) Epoch 24, batch 5850, loss[loss=0.2039, simple_loss=0.2933, pruned_loss=0.05727, over 19329.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.06244, over 3831315.22 frames. ], batch size: 66, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:04:49,657 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6161, 4.2256, 2.6719, 3.7110, 0.9911, 4.1636, 4.0406, 4.1206], + device='cuda:2'), covar=tensor([0.0601, 0.0937, 0.1976, 0.0880, 0.3896, 0.0750, 0.0938, 0.1102], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0417, 0.0501, 0.0351, 0.0401, 0.0440, 0.0434, 0.0466], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:05:20,439 INFO [train.py:903] (2/4) Epoch 24, batch 5900, loss[loss=0.1872, simple_loss=0.2737, pruned_loss=0.05034, over 19786.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2855, pruned_loss=0.06208, over 3846759.25 frames. ], batch size: 56, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:05:26,353 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 03:05:28,684 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 4.550e+02 5.410e+02 6.827e+02 1.573e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 03:05:43,760 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:05:44,439 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 03:05:44,635 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3616, 3.9676, 2.5669, 3.5321, 0.9239, 3.9131, 3.7779, 3.8441], + device='cuda:2'), covar=tensor([0.0714, 0.0990, 0.2068, 0.0876, 0.4038, 0.0773, 0.1005, 0.1249], + device='cuda:2'), in_proj_covar=tensor([0.0512, 0.0419, 0.0503, 0.0352, 0.0402, 0.0442, 0.0436, 0.0468], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:06:15,287 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162988.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:16,471 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:18,802 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3216, 2.2137, 2.0136, 1.9457, 1.8110, 1.8905, 0.5936, 1.3323], + device='cuda:2'), covar=tensor([0.0707, 0.0605, 0.0511, 0.0857, 0.1101, 0.0962, 0.1420, 0.1062], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0360, 0.0363, 0.0389, 0.0467, 0.0396, 0.0342, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 03:06:21,817 INFO [train.py:903] (2/4) Epoch 24, batch 5950, loss[loss=0.2306, simple_loss=0.3076, pruned_loss=0.07679, over 19653.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2853, pruned_loss=0.06224, over 3829606.56 frames. ], batch size: 55, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,799 INFO [train.py:903] (2/4) Epoch 24, batch 6000, loss[loss=0.2237, simple_loss=0.3107, pruned_loss=0.0684, over 19764.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2848, pruned_loss=0.06206, over 3820182.36 frames. ], batch size: 63, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,799 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 03:07:35,188 INFO [train.py:937] (2/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2679, pruned_loss=0.03436, over 944034.00 frames. +2023-04-03 03:07:35,189 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 03:07:43,475 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.276e+02 6.488e+02 8.005e+02 1.643e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-03 03:08:35,915 INFO [train.py:903] (2/4) Epoch 24, batch 6050, loss[loss=0.1819, simple_loss=0.2611, pruned_loss=0.05133, over 19598.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2831, pruned_loss=0.06156, over 3823882.24 frames. ], batch size: 50, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:08:53,248 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:09:37,877 INFO [train.py:903] (2/4) Epoch 24, batch 6100, loss[loss=0.2107, simple_loss=0.2914, pruned_loss=0.06504, over 19355.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2834, pruned_loss=0.06147, over 3830236.39 frames. ], batch size: 66, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:09:42,780 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4898, 1.5071, 1.7660, 1.6703, 2.6161, 2.2681, 2.7158, 1.2717], + device='cuda:2'), covar=tensor([0.2486, 0.4460, 0.2728, 0.1935, 0.1513, 0.2186, 0.1479, 0.4382], + device='cuda:2'), in_proj_covar=tensor([0.0543, 0.0656, 0.0731, 0.0495, 0.0626, 0.0541, 0.0666, 0.0559], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 03:09:45,775 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 5.048e+02 5.892e+02 7.607e+02 1.565e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 03:09:52,218 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.81 vs. limit=5.0 +2023-04-03 03:10:34,793 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163189.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:10:40,155 INFO [train.py:903] (2/4) Epoch 24, batch 6150, loss[loss=0.219, simple_loss=0.2946, pruned_loss=0.07172, over 19859.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2834, pruned_loss=0.06152, over 3815149.25 frames. ], batch size: 52, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:10:52,479 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8521, 1.6444, 1.4912, 1.7789, 1.4871, 1.5822, 1.4860, 1.7474], + device='cuda:2'), covar=tensor([0.1087, 0.1289, 0.1523, 0.1032, 0.1289, 0.0585, 0.1508, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0356, 0.0312, 0.0254, 0.0302, 0.0254, 0.0314, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:11:10,647 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 03:11:28,565 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-03 03:11:29,654 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.40 vs. limit=5.0 +2023-04-03 03:11:43,668 INFO [train.py:903] (2/4) Epoch 24, batch 6200, loss[loss=0.2038, simple_loss=0.2893, pruned_loss=0.05912, over 19167.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2826, pruned_loss=0.06095, over 3816477.41 frames. ], batch size: 69, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:11:44,110 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:11:51,417 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.538e+02 5.825e+02 7.342e+02 1.276e+03, threshold=1.165e+03, percent-clipped=3.0 +2023-04-03 03:12:00,549 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163258.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:04,000 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1061, 1.9120, 1.8430, 2.1554, 1.8985, 1.8447, 1.6739, 2.0631], + device='cuda:2'), covar=tensor([0.1047, 0.1481, 0.1400, 0.0997, 0.1345, 0.0539, 0.1545, 0.0701], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0357, 0.0312, 0.0254, 0.0303, 0.0254, 0.0315, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:12:14,189 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163269.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:38,708 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9905, 2.0725, 2.3208, 2.6198, 2.0015, 2.5305, 2.3313, 2.1340], + device='cuda:2'), covar=tensor([0.4284, 0.3872, 0.1914, 0.2503, 0.4062, 0.2160, 0.4921, 0.3460], + device='cuda:2'), in_proj_covar=tensor([0.0912, 0.0985, 0.0725, 0.0937, 0.0893, 0.0826, 0.0849, 0.0791], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 03:12:43,994 INFO [train.py:903] (2/4) Epoch 24, batch 6250, loss[loss=0.2146, simple_loss=0.2973, pruned_loss=0.06591, over 18844.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2831, pruned_loss=0.06136, over 3814989.77 frames. ], batch size: 74, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:12:56,428 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:13:11,308 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 03:13:45,525 INFO [train.py:903] (2/4) Epoch 24, batch 6300, loss[loss=0.1718, simple_loss=0.2559, pruned_loss=0.04384, over 19482.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2837, pruned_loss=0.06192, over 3815095.48 frames. ], batch size: 49, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:13:54,810 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.008e+02 6.887e+02 8.761e+02 2.377e+03, threshold=1.377e+03, percent-clipped=3.0 +2023-04-03 03:14:40,289 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.31 vs. limit=5.0 +2023-04-03 03:14:48,467 INFO [train.py:903] (2/4) Epoch 24, batch 6350, loss[loss=0.2159, simple_loss=0.2997, pruned_loss=0.06602, over 19539.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2833, pruned_loss=0.06175, over 3810975.98 frames. ], batch size: 54, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:30,625 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1229, 1.8694, 1.9237, 2.8395, 1.9966, 2.4599, 2.5108, 2.1528], + device='cuda:2'), covar=tensor([0.0843, 0.0915, 0.1009, 0.0733, 0.0881, 0.0698, 0.0856, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0226, 0.0214, 0.0190, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 03:15:50,638 INFO [train.py:903] (2/4) Epoch 24, batch 6400, loss[loss=0.2103, simple_loss=0.2956, pruned_loss=0.0625, over 17416.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2847, pruned_loss=0.06193, over 3812056.76 frames. ], batch size: 101, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:59,006 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 4.869e+02 5.854e+02 7.378e+02 1.563e+03, threshold=1.171e+03, percent-clipped=2.0 +2023-04-03 03:16:00,357 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:16:01,974 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-03 03:16:52,288 INFO [train.py:903] (2/4) Epoch 24, batch 6450, loss[loss=0.2019, simple_loss=0.2807, pruned_loss=0.06156, over 19686.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06129, over 3821755.75 frames. ], batch size: 53, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:01,267 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-03 03:17:26,447 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4062, 4.0396, 2.6042, 3.6113, 0.7773, 3.9996, 3.9082, 3.9534], + device='cuda:2'), covar=tensor([0.0693, 0.1042, 0.1935, 0.0805, 0.4189, 0.0656, 0.0842, 0.1246], + device='cuda:2'), in_proj_covar=tensor([0.0512, 0.0418, 0.0502, 0.0352, 0.0402, 0.0443, 0.0436, 0.0467], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:17:34,426 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 03:17:53,149 INFO [train.py:903] (2/4) Epoch 24, batch 6500, loss[loss=0.1831, simple_loss=0.2514, pruned_loss=0.05736, over 14779.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06181, over 3813907.84 frames. ], batch size: 32, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:56,713 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 03:18:01,366 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.063e+02 4.704e+02 6.024e+02 8.376e+02 1.457e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 03:18:14,495 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163560.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:22,409 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:25,181 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=5.01 vs. limit=5.0 +2023-04-03 03:18:44,325 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:55,198 INFO [train.py:903] (2/4) Epoch 24, batch 6550, loss[loss=0.2105, simple_loss=0.2949, pruned_loss=0.06305, over 19360.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2849, pruned_loss=0.06196, over 3797516.15 frames. ], batch size: 66, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:19:05,574 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163602.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:19:25,613 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5721, 1.3471, 1.4389, 1.2066, 2.2140, 1.0042, 2.0360, 2.5346], + device='cuda:2'), covar=tensor([0.0741, 0.2622, 0.2875, 0.1816, 0.0913, 0.2202, 0.1149, 0.0440], + device='cuda:2'), in_proj_covar=tensor([0.0414, 0.0369, 0.0391, 0.0349, 0.0374, 0.0352, 0.0386, 0.0407], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:19:57,506 INFO [train.py:903] (2/4) Epoch 24, batch 6600, loss[loss=0.2018, simple_loss=0.293, pruned_loss=0.05535, over 19699.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2852, pruned_loss=0.06176, over 3789185.05 frames. ], batch size: 59, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:20:05,518 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.011e+02 6.018e+02 7.633e+02 1.393e+03, threshold=1.204e+03, percent-clipped=8.0 +2023-04-03 03:20:25,830 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6322, 1.4601, 1.2525, 1.5720, 1.3489, 1.3152, 1.2960, 1.4802], + device='cuda:2'), covar=tensor([0.1299, 0.1558, 0.1990, 0.1357, 0.1627, 0.1029, 0.2031, 0.1065], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0360, 0.0315, 0.0256, 0.0306, 0.0256, 0.0316, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:20:57,662 INFO [train.py:903] (2/4) Epoch 24, batch 6650, loss[loss=0.1903, simple_loss=0.2776, pruned_loss=0.05152, over 18742.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.287, pruned_loss=0.06283, over 3799553.42 frames. ], batch size: 74, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:21:12,738 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6445, 1.5425, 1.5356, 2.2944, 1.6433, 2.0589, 1.9578, 1.8012], + device='cuda:2'), covar=tensor([0.0870, 0.0984, 0.1053, 0.0732, 0.0906, 0.0752, 0.0891, 0.0689], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0228, 0.0240, 0.0226, 0.0213, 0.0189, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 03:21:25,783 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163717.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:21:58,173 INFO [train.py:903] (2/4) Epoch 24, batch 6700, loss[loss=0.1735, simple_loss=0.2519, pruned_loss=0.04758, over 19397.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2865, pruned_loss=0.06283, over 3802117.78 frames. ], batch size: 48, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:22:08,770 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.744e+02 6.010e+02 8.153e+02 1.593e+03, threshold=1.202e+03, percent-clipped=6.0 +2023-04-03 03:22:24,860 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5344, 1.5938, 1.8363, 1.7707, 2.7826, 2.3266, 2.9541, 1.2923], + device='cuda:2'), covar=tensor([0.2545, 0.4460, 0.2837, 0.1954, 0.1513, 0.2213, 0.1407, 0.4430], + device='cuda:2'), in_proj_covar=tensor([0.0542, 0.0658, 0.0734, 0.0495, 0.0627, 0.0541, 0.0667, 0.0558], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 03:22:57,595 INFO [train.py:903] (2/4) Epoch 24, batch 6750, loss[loss=0.1606, simple_loss=0.2375, pruned_loss=0.04182, over 19016.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.286, pruned_loss=0.06276, over 3796681.74 frames. ], batch size: 42, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:23:30,574 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:23:53,913 INFO [train.py:903] (2/4) Epoch 24, batch 6800, loss[loss=0.2067, simple_loss=0.2817, pruned_loss=0.06587, over 19666.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2869, pruned_loss=0.06302, over 3808548.47 frames. ], batch size: 53, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:23:58,831 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:24:03,016 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.890e+02 5.869e+02 7.347e+02 2.478e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 03:24:39,583 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 03:24:40,027 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 03:24:42,798 INFO [train.py:903] (2/4) Epoch 25, batch 0, loss[loss=0.1524, simple_loss=0.2331, pruned_loss=0.03585, over 19766.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2331, pruned_loss=0.03585, over 19766.00 frames. ], batch size: 47, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:24:42,798 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 03:24:54,396 INFO [train.py:937] (2/4) Epoch 25, validation: loss=0.1672, simple_loss=0.2675, pruned_loss=0.03346, over 944034.00 frames. +2023-04-03 03:24:54,397 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 03:25:06,953 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 03:25:12,144 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4224, 1.4442, 1.5863, 1.6365, 2.2650, 1.9956, 2.2193, 0.9577], + device='cuda:2'), covar=tensor([0.2712, 0.4725, 0.2989, 0.2190, 0.1642, 0.2525, 0.1702, 0.4926], + device='cuda:2'), in_proj_covar=tensor([0.0546, 0.0662, 0.0739, 0.0498, 0.0632, 0.0545, 0.0673, 0.0564], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 03:25:57,057 INFO [train.py:903] (2/4) Epoch 25, batch 50, loss[loss=0.2208, simple_loss=0.3034, pruned_loss=0.0691, over 19342.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2852, pruned_loss=0.06157, over 871308.23 frames. ], batch size: 66, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:26:35,499 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.575e+02 5.589e+02 7.102e+02 2.434e+03, threshold=1.118e+03, percent-clipped=5.0 +2023-04-03 03:26:36,735 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 03:27:00,584 INFO [train.py:903] (2/4) Epoch 25, batch 100, loss[loss=0.2138, simple_loss=0.2967, pruned_loss=0.06547, over 13136.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.06162, over 1535276.02 frames. ], batch size: 136, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:27:03,101 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163973.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:15,428 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 03:27:34,442 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:42,634 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.12 vs. limit=5.0 +2023-04-03 03:28:05,197 INFO [train.py:903] (2/4) Epoch 25, batch 150, loss[loss=0.1576, simple_loss=0.2415, pruned_loss=0.03682, over 19593.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2841, pruned_loss=0.06145, over 2045368.87 frames. ], batch size: 50, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:28:42,974 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 5.520e+02 6.265e+02 7.455e+02 1.438e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-03 03:29:06,846 INFO [train.py:903] (2/4) Epoch 25, batch 200, loss[loss=0.1877, simple_loss=0.2749, pruned_loss=0.05024, over 19693.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2856, pruned_loss=0.06209, over 2442506.50 frames. ], batch size: 59, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:29:09,369 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 03:29:53,482 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:30:10,581 INFO [train.py:903] (2/4) Epoch 25, batch 250, loss[loss=0.1905, simple_loss=0.2689, pruned_loss=0.05603, over 19374.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2862, pruned_loss=0.06292, over 2755935.77 frames. ], batch size: 47, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:30:10,857 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5787, 1.2938, 1.5486, 1.5109, 3.1120, 1.1909, 2.4601, 3.6029], + device='cuda:2'), covar=tensor([0.0623, 0.3187, 0.3029, 0.2021, 0.0852, 0.2625, 0.1400, 0.0301], + device='cuda:2'), in_proj_covar=tensor([0.0414, 0.0369, 0.0391, 0.0348, 0.0374, 0.0352, 0.0387, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:30:32,840 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1279, 2.0001, 1.9080, 1.7477, 1.5844, 1.7168, 0.5817, 1.0202], + device='cuda:2'), covar=tensor([0.0673, 0.0641, 0.0488, 0.0820, 0.1227, 0.0963, 0.1377, 0.1132], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0362, 0.0366, 0.0388, 0.0466, 0.0397, 0.0343, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 03:30:48,737 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 4.875e+02 6.095e+02 7.386e+02 2.001e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 03:31:13,870 INFO [train.py:903] (2/4) Epoch 25, batch 300, loss[loss=0.1858, simple_loss=0.2724, pruned_loss=0.04957, over 19762.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2854, pruned_loss=0.06273, over 2998219.80 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:31:16,557 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:32:18,088 INFO [train.py:903] (2/4) Epoch 25, batch 350, loss[loss=0.19, simple_loss=0.2761, pruned_loss=0.05192, over 19579.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2844, pruned_loss=0.06255, over 3184428.48 frames. ], batch size: 52, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:32:25,190 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 03:32:42,696 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3762, 1.3288, 1.7931, 1.4232, 2.6504, 3.7680, 3.4341, 3.9211], + device='cuda:2'), covar=tensor([0.1573, 0.3913, 0.3433, 0.2508, 0.0722, 0.0197, 0.0218, 0.0257], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0328, 0.0358, 0.0266, 0.0247, 0.0190, 0.0217, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 03:32:54,600 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 03:32:54,991 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.912e+02 5.440e+02 6.552e+02 9.332e+02 1.789e+03, threshold=1.310e+03, percent-clipped=12.0 +2023-04-03 03:33:20,726 INFO [train.py:903] (2/4) Epoch 25, batch 400, loss[loss=0.1926, simple_loss=0.2742, pruned_loss=0.05552, over 19679.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2846, pruned_loss=0.06197, over 3337055.71 frames. ], batch size: 53, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:34:24,823 INFO [train.py:903] (2/4) Epoch 25, batch 450, loss[loss=0.2339, simple_loss=0.3156, pruned_loss=0.07607, over 19293.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2842, pruned_loss=0.06166, over 3442833.55 frames. ], batch size: 66, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:34:59,270 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 03:35:00,465 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 03:35:02,826 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 4.778e+02 5.577e+02 7.081e+02 1.680e+03, threshold=1.115e+03, percent-clipped=3.0 +2023-04-03 03:35:27,605 INFO [train.py:903] (2/4) Epoch 25, batch 500, loss[loss=0.247, simple_loss=0.3254, pruned_loss=0.08429, over 17222.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2852, pruned_loss=0.06241, over 3506263.30 frames. ], batch size: 101, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:35:35,604 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:36:31,236 INFO [train.py:903] (2/4) Epoch 25, batch 550, loss[loss=0.1851, simple_loss=0.2636, pruned_loss=0.05325, over 19860.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2845, pruned_loss=0.06219, over 3584847.82 frames. ], batch size: 52, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:37:09,261 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 5.163e+02 6.218e+02 7.641e+02 1.675e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 03:37:09,455 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:37:14,154 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4446, 4.1304, 2.6536, 3.6290, 0.7645, 3.9995, 3.8675, 3.9467], + device='cuda:2'), covar=tensor([0.0618, 0.0829, 0.1792, 0.0813, 0.3860, 0.0684, 0.0927, 0.0921], + device='cuda:2'), in_proj_covar=tensor([0.0521, 0.0424, 0.0509, 0.0356, 0.0408, 0.0447, 0.0442, 0.0473], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:37:34,077 INFO [train.py:903] (2/4) Epoch 25, batch 600, loss[loss=0.2237, simple_loss=0.3055, pruned_loss=0.07092, over 18708.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2856, pruned_loss=0.06243, over 3618704.93 frames. ], batch size: 74, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:38:16,672 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 03:38:30,625 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:38:36,206 INFO [train.py:903] (2/4) Epoch 25, batch 650, loss[loss=0.2209, simple_loss=0.3006, pruned_loss=0.07064, over 19616.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2862, pruned_loss=0.06255, over 3670243.63 frames. ], batch size: 57, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:15,257 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.538e+02 5.913e+02 7.820e+02 1.600e+03, threshold=1.183e+03, percent-clipped=2.0 +2023-04-03 03:39:33,648 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:39:39,469 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6775, 1.5752, 1.5167, 2.1815, 1.4619, 1.9453, 1.8903, 1.7198], + device='cuda:2'), covar=tensor([0.0802, 0.0901, 0.1018, 0.0735, 0.0935, 0.0725, 0.0861, 0.0700], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0221, 0.0226, 0.0237, 0.0225, 0.0211, 0.0188, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-03 03:39:40,280 INFO [train.py:903] (2/4) Epoch 25, batch 700, loss[loss=0.2173, simple_loss=0.302, pruned_loss=0.06628, over 19674.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.0625, over 3699105.40 frames. ], batch size: 55, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:58,601 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:11,639 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:44,578 INFO [train.py:903] (2/4) Epoch 25, batch 750, loss[loss=0.1788, simple_loss=0.2587, pruned_loss=0.04941, over 19849.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2853, pruned_loss=0.06246, over 3731126.06 frames. ], batch size: 52, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:40:57,975 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:41:21,486 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.157e+02 6.753e+02 8.219e+02 1.587e+03, threshold=1.351e+03, percent-clipped=10.0 +2023-04-03 03:41:32,773 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-03 03:41:48,138 INFO [train.py:903] (2/4) Epoch 25, batch 800, loss[loss=0.2113, simple_loss=0.2822, pruned_loss=0.07017, over 19724.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2857, pruned_loss=0.06248, over 3764905.53 frames. ], batch size: 51, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:41:52,851 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164676.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:42:03,219 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 03:42:50,191 INFO [train.py:903] (2/4) Epoch 25, batch 850, loss[loss=0.2556, simple_loss=0.3314, pruned_loss=0.08989, over 19698.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.06189, over 3783273.26 frames. ], batch size: 60, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:42:50,361 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:29,202 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 4.928e+02 6.003e+02 7.929e+02 1.446e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 03:43:43,444 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164764.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:44,371 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 03:43:52,639 INFO [train.py:903] (2/4) Epoch 25, batch 900, loss[loss=0.1794, simple_loss=0.2689, pruned_loss=0.04491, over 19682.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2863, pruned_loss=0.06235, over 3793966.97 frames. ], batch size: 53, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:29,969 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 03:44:38,706 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0438, 2.0982, 2.4182, 2.6742, 2.0056, 2.5404, 2.4286, 2.1888], + device='cuda:2'), covar=tensor([0.4654, 0.4380, 0.2064, 0.2694, 0.4586, 0.2517, 0.5059, 0.3633], + device='cuda:2'), in_proj_covar=tensor([0.0915, 0.0986, 0.0726, 0.0936, 0.0894, 0.0826, 0.0852, 0.0791], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 03:44:56,069 INFO [train.py:903] (2/4) Epoch 25, batch 950, loss[loss=0.2126, simple_loss=0.2848, pruned_loss=0.07014, over 19467.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2868, pruned_loss=0.06282, over 3794326.17 frames. ], batch size: 49, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:57,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 03:44:57,644 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:15,725 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:28,770 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:32,822 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.681e+02 5.489e+02 6.922e+02 1.500e+03, threshold=1.098e+03, percent-clipped=4.0 +2023-04-03 03:45:43,197 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 03:46:00,732 INFO [train.py:903] (2/4) Epoch 25, batch 1000, loss[loss=0.1751, simple_loss=0.2572, pruned_loss=0.04644, over 19613.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2859, pruned_loss=0.0623, over 3781373.42 frames. ], batch size: 50, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:46:19,890 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:53,184 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:55,216 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 03:47:03,440 INFO [train.py:903] (2/4) Epoch 25, batch 1050, loss[loss=0.2286, simple_loss=0.306, pruned_loss=0.07561, over 19573.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2862, pruned_loss=0.06241, over 3785529.98 frames. ], batch size: 61, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:47:13,148 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:27,171 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:36,139 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 03:47:41,873 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.678e+02 5.688e+02 6.996e+02 1.189e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 03:47:45,596 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7346, 1.1873, 1.4302, 1.5588, 3.3076, 1.1980, 2.3562, 3.7975], + device='cuda:2'), covar=tensor([0.0537, 0.3023, 0.3078, 0.1945, 0.0756, 0.2616, 0.1451, 0.0226], + device='cuda:2'), in_proj_covar=tensor([0.0417, 0.0369, 0.0394, 0.0349, 0.0377, 0.0353, 0.0388, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:48:06,291 INFO [train.py:903] (2/4) Epoch 25, batch 1100, loss[loss=0.2206, simple_loss=0.297, pruned_loss=0.07211, over 19617.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2863, pruned_loss=0.06242, over 3793023.06 frames. ], batch size: 61, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:07,184 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165020.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:09,317 INFO [train.py:903] (2/4) Epoch 25, batch 1150, loss[loss=0.207, simple_loss=0.2938, pruned_loss=0.06006, over 19539.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2854, pruned_loss=0.06241, over 3807287.43 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:40,414 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:48,275 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.101e+02 6.369e+02 8.453e+02 1.568e+03, threshold=1.274e+03, percent-clipped=10.0 +2023-04-03 03:49:52,178 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:14,723 INFO [train.py:903] (2/4) Epoch 25, batch 1200, loss[loss=0.1951, simple_loss=0.2828, pruned_loss=0.05373, over 19524.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.286, pruned_loss=0.06233, over 3815997.27 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:50:41,039 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:46,463 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 03:51:01,331 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:14,654 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:18,446 INFO [train.py:903] (2/4) Epoch 25, batch 1250, loss[loss=0.2205, simple_loss=0.2915, pruned_loss=0.07475, over 19352.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06207, over 3821542.89 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:51:34,122 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165135.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:41,196 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:51:43,131 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8065, 4.2945, 4.5000, 4.5176, 1.7554, 4.2561, 3.7062, 4.2182], + device='cuda:2'), covar=tensor([0.1634, 0.0849, 0.0609, 0.0638, 0.5954, 0.0981, 0.0674, 0.1123], + device='cuda:2'), in_proj_covar=tensor([0.0802, 0.0766, 0.0974, 0.0850, 0.0856, 0.0736, 0.0579, 0.0903], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 03:51:43,151 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:49,311 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3600, 1.5086, 1.8685, 1.4398, 2.3520, 2.7932, 2.6562, 2.9326], + device='cuda:2'), covar=tensor([0.1425, 0.3078, 0.2710, 0.2402, 0.0981, 0.0345, 0.0247, 0.0366], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0329, 0.0359, 0.0267, 0.0248, 0.0191, 0.0218, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 03:51:57,593 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 4.844e+02 6.322e+02 8.270e+02 1.695e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-03 03:52:21,097 INFO [train.py:903] (2/4) Epoch 25, batch 1300, loss[loss=0.1586, simple_loss=0.2401, pruned_loss=0.0385, over 19789.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2861, pruned_loss=0.06258, over 3827394.20 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:23,838 INFO [train.py:903] (2/4) Epoch 25, batch 1350, loss[loss=0.1969, simple_loss=0.287, pruned_loss=0.05341, over 19663.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2863, pruned_loss=0.06262, over 3821318.79 frames. ], batch size: 58, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:27,635 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:54:04,598 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.919e+02 6.162e+02 7.502e+02 1.875e+03, threshold=1.232e+03, percent-clipped=3.0 +2023-04-03 03:54:29,447 INFO [train.py:903] (2/4) Epoch 25, batch 1400, loss[loss=0.2231, simple_loss=0.3042, pruned_loss=0.07099, over 19672.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2855, pruned_loss=0.06172, over 3836160.11 frames. ], batch size: 58, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:54:50,989 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4358, 1.2065, 1.5670, 1.3546, 3.0470, 1.1485, 2.2542, 3.4037], + device='cuda:2'), covar=tensor([0.0562, 0.2951, 0.2778, 0.1982, 0.0694, 0.2480, 0.1360, 0.0285], + device='cuda:2'), in_proj_covar=tensor([0.0413, 0.0367, 0.0391, 0.0348, 0.0373, 0.0350, 0.0386, 0.0407], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:54:53,480 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0691, 2.7725, 2.3293, 2.3649, 2.0150, 2.5476, 1.1804, 2.0895], + device='cuda:2'), covar=tensor([0.0665, 0.0640, 0.0577, 0.1003, 0.1184, 0.0977, 0.1371, 0.1022], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0359, 0.0363, 0.0388, 0.0465, 0.0394, 0.0341, 0.0347], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 03:55:05,151 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:18,674 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:19,103 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-03 03:55:32,133 INFO [train.py:903] (2/4) Epoch 25, batch 1450, loss[loss=0.2009, simple_loss=0.2757, pruned_loss=0.06304, over 19757.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2857, pruned_loss=0.06179, over 3835810.49 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:55:33,162 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 03:55:37,004 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:48,862 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:56:10,661 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.584e+02 6.100e+02 8.063e+02 1.924e+03, threshold=1.220e+03, percent-clipped=4.0 +2023-04-03 03:56:33,836 INFO [train.py:903] (2/4) Epoch 25, batch 1500, loss[loss=0.235, simple_loss=0.3233, pruned_loss=0.0734, over 18067.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2864, pruned_loss=0.06257, over 3827155.73 frames. ], batch size: 83, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:56:59,416 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165391.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:30,000 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:36,928 INFO [train.py:903] (2/4) Epoch 25, batch 1550, loss[loss=0.2263, simple_loss=0.3083, pruned_loss=0.0721, over 19737.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06161, over 3822427.65 frames. ], batch size: 63, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:58:06,817 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2016, 1.8135, 1.5305, 1.2722, 1.6251, 1.2880, 1.1800, 1.6544], + device='cuda:2'), covar=tensor([0.0784, 0.0944, 0.1118, 0.0908, 0.0647, 0.1299, 0.0652, 0.0480], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0268, 0.0249, 0.0343, 0.0293, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 03:58:17,712 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.229e+02 4.957e+02 6.075e+02 6.924e+02 1.069e+03, threshold=1.215e+03, percent-clipped=0.0 +2023-04-03 03:58:37,746 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 03:58:42,890 INFO [train.py:903] (2/4) Epoch 25, batch 1600, loss[loss=0.194, simple_loss=0.2785, pruned_loss=0.05475, over 19772.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2837, pruned_loss=0.06134, over 3811355.55 frames. ], batch size: 56, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:58:51,619 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:58:59,593 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:06,519 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 03:59:21,470 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:44,250 INFO [train.py:903] (2/4) Epoch 25, batch 1650, loss[loss=0.216, simple_loss=0.2937, pruned_loss=0.0692, over 19595.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2839, pruned_loss=0.06128, over 3804479.68 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:00:23,738 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.584e+02 6.289e+02 7.621e+02 1.672e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-03 04:00:44,049 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:00:47,348 INFO [train.py:903] (2/4) Epoch 25, batch 1700, loss[loss=0.1826, simple_loss=0.2649, pruned_loss=0.05018, over 19846.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.284, pruned_loss=0.06121, over 3818759.06 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:22,707 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:01:29,480 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 04:01:49,445 INFO [train.py:903] (2/4) Epoch 25, batch 1750, loss[loss=0.1915, simple_loss=0.2808, pruned_loss=0.05112, over 19533.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2847, pruned_loss=0.06174, over 3805311.19 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:53,780 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.32 vs. limit=5.0 +2023-04-03 04:02:29,207 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.689e+02 5.610e+02 7.383e+02 2.270e+03, threshold=1.122e+03, percent-clipped=4.0 +2023-04-03 04:02:53,379 INFO [train.py:903] (2/4) Epoch 25, batch 1800, loss[loss=0.2054, simple_loss=0.2987, pruned_loss=0.05608, over 19527.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.06223, over 3803452.69 frames. ], batch size: 56, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:03:51,797 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 04:03:56,452 INFO [train.py:903] (2/4) Epoch 25, batch 1850, loss[loss=0.1765, simple_loss=0.2583, pruned_loss=0.04734, over 19669.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2858, pruned_loss=0.06226, over 3804022.11 frames. ], batch size: 53, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:04:28,975 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 04:04:37,055 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 4.826e+02 5.776e+02 6.973e+02 2.376e+03, threshold=1.155e+03, percent-clipped=4.0 +2023-04-03 04:04:56,244 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:05:00,676 INFO [train.py:903] (2/4) Epoch 25, batch 1900, loss[loss=0.1921, simple_loss=0.2781, pruned_loss=0.05305, over 19658.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2858, pruned_loss=0.06224, over 3798712.65 frames. ], batch size: 55, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:05:16,875 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 04:05:22,726 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 04:05:24,242 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0795, 5.1535, 5.9442, 5.9577, 1.8815, 5.6771, 4.6624, 5.5649], + device='cuda:2'), covar=tensor([0.1763, 0.0836, 0.0606, 0.0620, 0.6495, 0.0835, 0.0658, 0.1237], + device='cuda:2'), in_proj_covar=tensor([0.0809, 0.0770, 0.0979, 0.0852, 0.0855, 0.0740, 0.0581, 0.0907], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 04:05:48,084 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 04:06:02,684 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165821.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:06:03,532 INFO [train.py:903] (2/4) Epoch 25, batch 1950, loss[loss=0.2533, simple_loss=0.3338, pruned_loss=0.08637, over 17365.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06218, over 3799708.47 frames. ], batch size: 101, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:06:20,379 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3521, 1.3224, 1.4956, 1.4957, 1.7527, 1.8695, 1.7706, 0.6106], + device='cuda:2'), covar=tensor([0.2523, 0.4327, 0.2814, 0.1994, 0.1706, 0.2351, 0.1491, 0.5009], + device='cuda:2'), in_proj_covar=tensor([0.0542, 0.0655, 0.0730, 0.0493, 0.0625, 0.0538, 0.0661, 0.0560], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:06:44,209 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.671e+02 6.009e+02 7.545e+02 1.239e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 04:06:46,983 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1068, 1.3002, 1.5067, 1.4265, 2.7085, 1.1286, 2.1426, 3.1098], + device='cuda:2'), covar=tensor([0.0585, 0.2887, 0.3005, 0.1907, 0.0785, 0.2534, 0.1330, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0415, 0.0370, 0.0393, 0.0350, 0.0375, 0.0354, 0.0388, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:06:48,309 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165856.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:07:08,059 INFO [train.py:903] (2/4) Epoch 25, batch 2000, loss[loss=0.198, simple_loss=0.2861, pruned_loss=0.05491, over 19605.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2859, pruned_loss=0.06208, over 3818912.09 frames. ], batch size: 57, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:07:20,289 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:00,823 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:07,742 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 04:08:12,468 INFO [train.py:903] (2/4) Epoch 25, batch 2050, loss[loss=0.2106, simple_loss=0.283, pruned_loss=0.06915, over 19596.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2838, pruned_loss=0.06119, over 3815193.83 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:08:27,917 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 04:08:27,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 04:08:38,777 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0077, 3.6539, 2.4791, 3.2721, 0.9467, 3.6312, 3.4836, 3.5690], + device='cuda:2'), covar=tensor([0.0812, 0.1103, 0.2073, 0.0936, 0.3823, 0.0752, 0.0985, 0.1285], + device='cuda:2'), in_proj_covar=tensor([0.0514, 0.0421, 0.0503, 0.0351, 0.0402, 0.0444, 0.0437, 0.0468], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:08:48,983 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 04:08:51,220 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 5.221e+02 6.289e+02 7.653e+02 1.251e+03, threshold=1.258e+03, percent-clipped=2.0 +2023-04-03 04:09:15,817 INFO [train.py:903] (2/4) Epoch 25, batch 2100, loss[loss=0.2323, simple_loss=0.3104, pruned_loss=0.07706, over 17466.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2834, pruned_loss=0.06118, over 3812783.84 frames. ], batch size: 101, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:09:44,638 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 04:10:08,522 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 04:10:19,017 INFO [train.py:903] (2/4) Epoch 25, batch 2150, loss[loss=0.2227, simple_loss=0.3067, pruned_loss=0.06937, over 17718.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2834, pruned_loss=0.06158, over 3825667.12 frames. ], batch size: 101, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:10:26,458 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6085, 1.7129, 1.9318, 1.9705, 1.5639, 1.9330, 1.9446, 1.8025], + device='cuda:2'), covar=tensor([0.4188, 0.3823, 0.2122, 0.2459, 0.4028, 0.2248, 0.5244, 0.3500], + device='cuda:2'), in_proj_covar=tensor([0.0917, 0.0992, 0.0729, 0.0937, 0.0895, 0.0829, 0.0855, 0.0793], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 04:10:27,628 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:10:48,698 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0834, 2.0610, 1.7947, 1.6443, 1.5185, 1.6786, 0.5535, 1.1218], + device='cuda:2'), covar=tensor([0.0664, 0.0622, 0.0561, 0.0894, 0.1246, 0.0993, 0.1485, 0.1128], + device='cuda:2'), in_proj_covar=tensor([0.0362, 0.0359, 0.0363, 0.0387, 0.0465, 0.0392, 0.0341, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:10:58,801 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 5.055e+02 6.614e+02 9.413e+02 1.694e+03, threshold=1.323e+03, percent-clipped=9.0 +2023-04-03 04:11:21,693 INFO [train.py:903] (2/4) Epoch 25, batch 2200, loss[loss=0.1546, simple_loss=0.2406, pruned_loss=0.03434, over 19307.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2838, pruned_loss=0.0618, over 3831526.13 frames. ], batch size: 44, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:11:22,024 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166072.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:11:38,572 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8651, 2.8428, 2.5427, 2.8656, 2.6643, 2.5280, 2.2620, 2.9179], + device='cuda:2'), covar=tensor([0.0878, 0.1346, 0.1280, 0.0997, 0.1291, 0.0443, 0.1389, 0.0532], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0355, 0.0311, 0.0254, 0.0301, 0.0252, 0.0313, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:12:02,237 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5395, 2.2845, 1.7118, 1.5794, 2.1404, 1.4145, 1.3342, 2.0143], + device='cuda:2'), covar=tensor([0.1079, 0.0826, 0.1142, 0.0828, 0.0552, 0.1282, 0.0800, 0.0475], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0267, 0.0249, 0.0341, 0.0293, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:12:14,466 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166112.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:12:26,094 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4208, 2.0421, 1.5381, 1.2879, 1.9417, 1.2072, 1.3974, 1.9570], + device='cuda:2'), covar=tensor([0.0981, 0.0785, 0.1198, 0.0944, 0.0568, 0.1386, 0.0708, 0.0430], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0317, 0.0339, 0.0266, 0.0248, 0.0340, 0.0292, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:12:26,901 INFO [train.py:903] (2/4) Epoch 25, batch 2250, loss[loss=0.2237, simple_loss=0.3089, pruned_loss=0.06922, over 19307.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2835, pruned_loss=0.06136, over 3832414.25 frames. ], batch size: 66, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:12:34,248 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7491, 1.7650, 1.6858, 1.4732, 1.4662, 1.5051, 0.2506, 0.6789], + device='cuda:2'), covar=tensor([0.0672, 0.0633, 0.0394, 0.0651, 0.1161, 0.0713, 0.1361, 0.1161], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0360, 0.0364, 0.0389, 0.0466, 0.0393, 0.0343, 0.0347], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:13:04,902 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.777e+02 5.716e+02 7.657e+02 1.924e+03, threshold=1.143e+03, percent-clipped=2.0 +2023-04-03 04:13:17,465 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6768, 2.4390, 1.7922, 1.6568, 2.2329, 1.4320, 1.4786, 2.1426], + device='cuda:2'), covar=tensor([0.1100, 0.0855, 0.1220, 0.0907, 0.0563, 0.1374, 0.0788, 0.0466], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0317, 0.0340, 0.0267, 0.0248, 0.0341, 0.0292, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:13:21,695 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:21,862 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:31,174 INFO [train.py:903] (2/4) Epoch 25, batch 2300, loss[loss=0.1979, simple_loss=0.2716, pruned_loss=0.06209, over 19517.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.284, pruned_loss=0.06177, over 3839301.70 frames. ], batch size: 49, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:13:42,668 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 04:14:00,633 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3460, 1.1529, 1.2849, 1.8333, 1.5300, 1.2710, 1.4043, 1.2971], + device='cuda:2'), covar=tensor([0.0994, 0.1415, 0.1071, 0.0736, 0.1155, 0.1249, 0.1241, 0.0982], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0239, 0.0226, 0.0213, 0.0189, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 04:14:34,774 INFO [train.py:903] (2/4) Epoch 25, batch 2350, loss[loss=0.1726, simple_loss=0.2588, pruned_loss=0.04324, over 19733.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.284, pruned_loss=0.06165, over 3831726.88 frames. ], batch size: 51, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:14:42,147 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166227.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:14,913 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 5.104e+02 6.317e+02 8.219e+02 1.547e+03, threshold=1.263e+03, percent-clipped=3.0 +2023-04-03 04:15:17,146 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 04:15:24,577 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6442, 1.3478, 1.3140, 1.5475, 1.1710, 1.4266, 1.2448, 1.4807], + device='cuda:2'), covar=tensor([0.1120, 0.1089, 0.1570, 0.1048, 0.1276, 0.0634, 0.1656, 0.0837], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0354, 0.0311, 0.0253, 0.0301, 0.0252, 0.0312, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:15:34,618 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 04:15:38,204 INFO [train.py:903] (2/4) Epoch 25, batch 2400, loss[loss=0.2088, simple_loss=0.2996, pruned_loss=0.05906, over 19592.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2837, pruned_loss=0.06132, over 3839592.96 frames. ], batch size: 61, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:15:48,893 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166280.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:55,273 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166284.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:25,801 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:29,052 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0943, 5.5427, 2.9354, 4.7899, 1.3845, 5.7093, 5.5390, 5.7192], + device='cuda:2'), covar=tensor([0.0352, 0.0710, 0.1904, 0.0701, 0.3598, 0.0469, 0.0730, 0.0838], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0421, 0.0504, 0.0353, 0.0404, 0.0446, 0.0438, 0.0469], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:16:41,770 INFO [train.py:903] (2/4) Epoch 25, batch 2450, loss[loss=0.2496, simple_loss=0.326, pruned_loss=0.08662, over 17490.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06112, over 3839674.30 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:16:46,765 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:17:20,237 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.023e+02 4.903e+02 5.916e+02 7.490e+02 1.353e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-04-03 04:17:44,925 INFO [train.py:903] (2/4) Epoch 25, batch 2500, loss[loss=0.2126, simple_loss=0.2957, pruned_loss=0.06474, over 19548.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06138, over 3844374.30 frames. ], batch size: 56, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:17:52,019 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6893, 1.8325, 1.8431, 2.5205, 1.9255, 2.3173, 1.9158, 1.5861], + device='cuda:2'), covar=tensor([0.4879, 0.4384, 0.2786, 0.2667, 0.4371, 0.2420, 0.6200, 0.4986], + device='cuda:2'), in_proj_covar=tensor([0.0916, 0.0991, 0.0729, 0.0938, 0.0894, 0.0831, 0.0851, 0.0793], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 04:18:41,300 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166416.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:18:48,038 INFO [train.py:903] (2/4) Epoch 25, batch 2550, loss[loss=0.1786, simple_loss=0.2696, pruned_loss=0.04383, over 19764.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06251, over 3831606.44 frames. ], batch size: 56, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:19:21,989 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 04:19:28,548 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 5.112e+02 6.516e+02 8.109e+02 2.174e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-03 04:19:40,791 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9828, 1.9579, 1.8887, 1.6543, 1.5557, 1.6311, 0.4811, 0.8951], + device='cuda:2'), covar=tensor([0.0673, 0.0618, 0.0412, 0.0752, 0.1238, 0.0852, 0.1331, 0.1111], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0360, 0.0363, 0.0387, 0.0467, 0.0393, 0.0340, 0.0345], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:19:46,425 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 04:19:53,265 INFO [train.py:903] (2/4) Epoch 25, batch 2600, loss[loss=0.2203, simple_loss=0.2942, pruned_loss=0.07316, over 19662.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2867, pruned_loss=0.06255, over 3827552.95 frames. ], batch size: 55, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:20:08,606 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166483.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:29,563 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:34,778 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.41 vs. limit=2.0 +2023-04-03 04:20:40,172 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166508.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:41,204 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166509.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:58,297 INFO [train.py:903] (2/4) Epoch 25, batch 2650, loss[loss=0.2188, simple_loss=0.2853, pruned_loss=0.07615, over 19495.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2866, pruned_loss=0.06235, over 3826672.59 frames. ], batch size: 49, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:21:05,499 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:10,269 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166531.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:21:12,599 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3374, 1.2714, 1.6494, 1.1330, 2.4157, 3.3746, 3.0704, 3.5782], + device='cuda:2'), covar=tensor([0.1484, 0.3862, 0.3470, 0.2623, 0.0638, 0.0203, 0.0222, 0.0273], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0328, 0.0359, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 04:21:17,630 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166536.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:19,744 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 04:21:32,650 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:38,181 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.612e+02 5.770e+02 6.756e+02 1.610e+03, threshold=1.154e+03, percent-clipped=1.0 +2023-04-03 04:21:49,308 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:22:03,050 INFO [train.py:903] (2/4) Epoch 25, batch 2700, loss[loss=0.2271, simple_loss=0.3047, pruned_loss=0.07477, over 19125.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2857, pruned_loss=0.06177, over 3825331.07 frames. ], batch size: 69, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:22:21,030 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-03 04:23:06,172 INFO [train.py:903] (2/4) Epoch 25, batch 2750, loss[loss=0.1926, simple_loss=0.2743, pruned_loss=0.05541, over 19730.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2854, pruned_loss=0.0618, over 3816969.31 frames. ], batch size: 45, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:23:08,940 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:23:45,198 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.014e+02 4.918e+02 6.109e+02 7.546e+02 1.552e+03, threshold=1.222e+03, percent-clipped=5.0 +2023-04-03 04:24:04,563 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:24:07,644 INFO [train.py:903] (2/4) Epoch 25, batch 2800, loss[loss=0.2209, simple_loss=0.2991, pruned_loss=0.0714, over 19657.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2843, pruned_loss=0.06156, over 3821856.81 frames. ], batch size: 55, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:24:43,350 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8477, 3.2986, 3.3354, 3.3519, 1.4630, 3.2264, 2.8359, 3.1134], + device='cuda:2'), covar=tensor([0.1749, 0.1071, 0.0893, 0.0926, 0.5779, 0.1147, 0.0810, 0.1403], + device='cuda:2'), in_proj_covar=tensor([0.0803, 0.0766, 0.0975, 0.0853, 0.0851, 0.0740, 0.0580, 0.0905], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 04:25:11,699 INFO [train.py:903] (2/4) Epoch 25, batch 2850, loss[loss=0.1923, simple_loss=0.2843, pruned_loss=0.05016, over 17283.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2846, pruned_loss=0.06187, over 3785562.21 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:25:50,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.563e+02 5.950e+02 8.060e+02 1.987e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 04:25:50,648 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:11,853 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 04:26:14,209 INFO [train.py:903] (2/4) Epoch 25, batch 2900, loss[loss=0.1852, simple_loss=0.268, pruned_loss=0.05122, over 19476.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2852, pruned_loss=0.06242, over 3808458.19 frames. ], batch size: 49, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:26:27,358 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166782.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:29,688 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:33,199 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166787.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:27:05,059 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166812.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:27:07,349 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9163, 1.9645, 2.2071, 2.5491, 1.9259, 2.4873, 2.1631, 2.0014], + device='cuda:2'), covar=tensor([0.4365, 0.4123, 0.2074, 0.2631, 0.4490, 0.2337, 0.5320, 0.3595], + device='cuda:2'), in_proj_covar=tensor([0.0918, 0.0990, 0.0729, 0.0938, 0.0894, 0.0829, 0.0853, 0.0793], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 04:27:17,223 INFO [train.py:903] (2/4) Epoch 25, batch 2950, loss[loss=0.2067, simple_loss=0.2843, pruned_loss=0.06454, over 19623.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2856, pruned_loss=0.06246, over 3817659.45 frames. ], batch size: 50, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:27:44,187 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:27:56,482 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.674e+02 5.957e+02 7.713e+02 2.101e+03, threshold=1.191e+03, percent-clipped=6.0 +2023-04-03 04:28:20,033 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:28:21,101 INFO [train.py:903] (2/4) Epoch 25, batch 3000, loss[loss=0.1775, simple_loss=0.2473, pruned_loss=0.05387, over 19761.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06212, over 3814992.68 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:28:21,102 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 04:28:33,792 INFO [train.py:937] (2/4) Epoch 25, validation: loss=0.1677, simple_loss=0.2674, pruned_loss=0.034, over 944034.00 frames. +2023-04-03 04:28:33,797 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 04:28:35,118 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 04:28:44,927 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:28:55,153 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-03 04:29:01,379 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166893.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:17,269 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166905.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:25,573 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 04:29:38,485 INFO [train.py:903] (2/4) Epoch 25, batch 3050, loss[loss=0.1946, simple_loss=0.2788, pruned_loss=0.05516, over 19584.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.06244, over 3799509.51 frames. ], batch size: 52, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:09,942 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:17,431 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 4.942e+02 6.233e+02 8.295e+02 1.859e+03, threshold=1.247e+03, percent-clipped=9.0 +2023-04-03 04:30:23,339 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:39,885 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7161, 4.1360, 4.3682, 4.3793, 2.0623, 4.1131, 3.6680, 4.1376], + device='cuda:2'), covar=tensor([0.1639, 0.1347, 0.0598, 0.0680, 0.5270, 0.1078, 0.0608, 0.1008], + device='cuda:2'), in_proj_covar=tensor([0.0798, 0.0763, 0.0971, 0.0850, 0.0846, 0.0738, 0.0578, 0.0899], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 04:30:41,891 INFO [train.py:903] (2/4) Epoch 25, batch 3100, loss[loss=0.2055, simple_loss=0.2742, pruned_loss=0.06843, over 19632.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2864, pruned_loss=0.06318, over 3804831.46 frames. ], batch size: 50, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:58,966 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:59,020 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1221, 1.7814, 1.4577, 1.2319, 1.6037, 1.1731, 1.0974, 1.5702], + device='cuda:2'), covar=tensor([0.0906, 0.0842, 0.1107, 0.0876, 0.0546, 0.1319, 0.0702, 0.0484], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0318, 0.0338, 0.0267, 0.0248, 0.0342, 0.0291, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:31:17,697 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1690, 1.3364, 1.9250, 1.4348, 3.1696, 4.6847, 4.4969, 5.0907], + device='cuda:2'), covar=tensor([0.1803, 0.4003, 0.3434, 0.2542, 0.0601, 0.0206, 0.0178, 0.0174], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0327, 0.0358, 0.0266, 0.0248, 0.0191, 0.0217, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 04:31:27,830 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:44,386 INFO [train.py:903] (2/4) Epoch 25, batch 3150, loss[loss=0.1642, simple_loss=0.2539, pruned_loss=0.0373, over 19713.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2858, pruned_loss=0.06286, over 3806900.27 frames. ], batch size: 51, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:32:08,054 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167040.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:08,795 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 04:32:24,692 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.039e+02 6.011e+02 8.459e+02 2.094e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-03 04:32:39,339 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167065.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:48,315 INFO [train.py:903] (2/4) Epoch 25, batch 3200, loss[loss=0.1918, simple_loss=0.2814, pruned_loss=0.05109, over 17681.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2865, pruned_loss=0.06285, over 3792376.84 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:32:52,065 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6594, 4.2658, 2.6606, 3.7475, 0.9751, 4.2258, 4.1050, 4.2062], + device='cuda:2'), covar=tensor([0.0629, 0.0947, 0.1993, 0.0879, 0.3864, 0.0633, 0.0836, 0.1035], + device='cuda:2'), in_proj_covar=tensor([0.0516, 0.0418, 0.0504, 0.0353, 0.0402, 0.0446, 0.0438, 0.0470], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:33:20,650 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:47,216 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:47,412 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9149, 2.0045, 2.2207, 2.4597, 1.9386, 2.4098, 2.2821, 2.0759], + device='cuda:2'), covar=tensor([0.4124, 0.3680, 0.1892, 0.2319, 0.3899, 0.2069, 0.4582, 0.3238], + device='cuda:2'), in_proj_covar=tensor([0.0924, 0.0998, 0.0734, 0.0945, 0.0901, 0.0835, 0.0858, 0.0800], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 04:33:52,601 INFO [train.py:903] (2/4) Epoch 25, batch 3250, loss[loss=0.1878, simple_loss=0.2796, pruned_loss=0.04804, over 19756.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2856, pruned_loss=0.06194, over 3793464.51 frames. ], batch size: 54, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:33:52,930 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:57,483 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167126.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:57,762 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2364, 1.2883, 1.2716, 1.0526, 1.0764, 1.1140, 0.1298, 0.3990], + device='cuda:2'), covar=tensor([0.0683, 0.0661, 0.0446, 0.0579, 0.1310, 0.0637, 0.1397, 0.1198], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0360, 0.0364, 0.0388, 0.0467, 0.0394, 0.0343, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:34:33,308 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 4.897e+02 6.248e+02 7.764e+02 1.427e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 04:34:56,638 INFO [train.py:903] (2/4) Epoch 25, batch 3300, loss[loss=0.2033, simple_loss=0.2853, pruned_loss=0.06069, over 19598.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06122, over 3803346.43 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:34:56,673 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 04:35:15,539 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0851, 1.0469, 1.4882, 1.1630, 2.1998, 2.8899, 2.6571, 3.2454], + device='cuda:2'), covar=tensor([0.1942, 0.5546, 0.4839, 0.2882, 0.0856, 0.0361, 0.0378, 0.0410], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0328, 0.0359, 0.0268, 0.0250, 0.0192, 0.0219, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 04:35:47,885 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:50,103 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:59,021 INFO [train.py:903] (2/4) Epoch 25, batch 3350, loss[loss=0.2403, simple_loss=0.3256, pruned_loss=0.07749, over 19590.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.06157, over 3809863.27 frames. ], batch size: 61, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:36:21,822 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:24,218 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167241.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:25,499 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167242.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:41,238 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.821e+02 5.770e+02 7.565e+02 1.496e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-04-03 04:36:53,556 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167264.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:56,896 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167267.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:02,319 INFO [train.py:903] (2/4) Epoch 25, batch 3400, loss[loss=0.214, simple_loss=0.2867, pruned_loss=0.07063, over 19708.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2845, pruned_loss=0.0615, over 3820985.79 frames. ], batch size: 51, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:37:26,128 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:28,374 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:38:08,145 INFO [train.py:903] (2/4) Epoch 25, batch 3450, loss[loss=0.1997, simple_loss=0.2839, pruned_loss=0.05773, over 19760.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06139, over 3826843.72 frames. ], batch size: 54, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:38:10,618 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 04:38:36,000 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1961, 1.2910, 1.2389, 1.0047, 1.1116, 1.0829, 0.0694, 0.3625], + device='cuda:2'), covar=tensor([0.0717, 0.0700, 0.0507, 0.0685, 0.1357, 0.0704, 0.1468, 0.1231], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0360, 0.0362, 0.0386, 0.0465, 0.0393, 0.0341, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:38:49,777 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.608e+02 5.631e+02 6.989e+02 1.333e+03, threshold=1.126e+03, percent-clipped=1.0 +2023-04-03 04:39:07,033 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1794, 2.0470, 2.0011, 1.8103, 1.6715, 1.8299, 0.6019, 1.1402], + device='cuda:2'), covar=tensor([0.0675, 0.0648, 0.0434, 0.0732, 0.1278, 0.0841, 0.1285, 0.1109], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0358, 0.0361, 0.0385, 0.0464, 0.0392, 0.0340, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 04:39:12,382 INFO [train.py:903] (2/4) Epoch 25, batch 3500, loss[loss=0.1911, simple_loss=0.2786, pruned_loss=0.05181, over 19757.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2859, pruned_loss=0.06207, over 3808779.36 frames. ], batch size: 54, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:39:42,143 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1951, 1.5909, 1.3103, 1.2146, 1.4995, 1.1534, 1.1760, 1.4537], + device='cuda:2'), covar=tensor([0.0796, 0.0757, 0.0796, 0.0678, 0.0482, 0.0963, 0.0549, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0318, 0.0336, 0.0266, 0.0248, 0.0342, 0.0291, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:39:56,160 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:15,428 INFO [train.py:903] (2/4) Epoch 25, batch 3550, loss[loss=0.221, simple_loss=0.2955, pruned_loss=0.07328, over 19712.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2862, pruned_loss=0.06226, over 3816485.05 frames. ], batch size: 63, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:40:51,402 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167450.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:55,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.129e+02 6.252e+02 7.981e+02 1.792e+03, threshold=1.250e+03, percent-clipped=6.0 +2023-04-03 04:41:06,589 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:11,106 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167466.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:13,897 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:17,994 INFO [train.py:903] (2/4) Epoch 25, batch 3600, loss[loss=0.2442, simple_loss=0.3363, pruned_loss=0.07607, over 19483.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06127, over 3813330.24 frames. ], batch size: 64, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:41:44,859 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:50,903 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167497.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:01,240 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4533, 1.2388, 1.4869, 1.5911, 3.0437, 1.3510, 2.2719, 3.4733], + device='cuda:2'), covar=tensor([0.0514, 0.2952, 0.2999, 0.1854, 0.0695, 0.2322, 0.1298, 0.0260], + device='cuda:2'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0349, 0.0377, 0.0353, 0.0391, 0.0411], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:42:20,841 INFO [train.py:903] (2/4) Epoch 25, batch 3650, loss[loss=0.216, simple_loss=0.2987, pruned_loss=0.06663, over 19523.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2839, pruned_loss=0.06113, over 3815440.29 frames. ], batch size: 54, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:42:21,242 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:36,704 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 04:43:00,372 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 5.264e+02 6.466e+02 8.043e+02 1.528e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-03 04:43:24,174 INFO [train.py:903] (2/4) Epoch 25, batch 3700, loss[loss=0.2555, simple_loss=0.3177, pruned_loss=0.09665, over 12923.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06185, over 3815179.11 frames. ], batch size: 136, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:43:31,617 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:43:36,267 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:44:28,030 INFO [train.py:903] (2/4) Epoch 25, batch 3750, loss[loss=0.1819, simple_loss=0.2554, pruned_loss=0.05425, over 19351.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2849, pruned_loss=0.06178, over 3818022.65 frames. ], batch size: 47, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:44:57,783 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8926, 1.9984, 2.2508, 2.4695, 1.8385, 2.3217, 2.2240, 2.0207], + device='cuda:2'), covar=tensor([0.4208, 0.3737, 0.1962, 0.2303, 0.4025, 0.2125, 0.5116, 0.3415], + device='cuda:2'), in_proj_covar=tensor([0.0923, 0.0995, 0.0731, 0.0943, 0.0898, 0.0832, 0.0857, 0.0797], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 04:45:08,825 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.966e+02 5.285e+02 6.593e+02 8.150e+02 1.742e+03, threshold=1.319e+03, percent-clipped=4.0 +2023-04-03 04:45:19,723 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9960, 3.6508, 2.4196, 3.2597, 0.9888, 3.6354, 3.4860, 3.5537], + device='cuda:2'), covar=tensor([0.0942, 0.1164, 0.2254, 0.0916, 0.3801, 0.0769, 0.1142, 0.1310], + device='cuda:2'), in_proj_covar=tensor([0.0521, 0.0422, 0.0509, 0.0356, 0.0407, 0.0450, 0.0441, 0.0473], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:45:19,945 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:45:31,293 INFO [train.py:903] (2/4) Epoch 25, batch 3800, loss[loss=0.1888, simple_loss=0.268, pruned_loss=0.05483, over 19617.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06223, over 3822780.03 frames. ], batch size: 50, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:45:50,911 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:46:01,947 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 04:46:32,954 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 04:46:35,337 INFO [train.py:903] (2/4) Epoch 25, batch 3850, loss[loss=0.2492, simple_loss=0.3209, pruned_loss=0.08878, over 17179.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06147, over 3837605.50 frames. ], batch size: 101, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:46:54,839 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167737.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:47:16,484 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.991e+02 5.959e+02 7.597e+02 1.650e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 04:47:39,474 INFO [train.py:903] (2/4) Epoch 25, batch 3900, loss[loss=0.2252, simple_loss=0.3024, pruned_loss=0.07407, over 18757.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2833, pruned_loss=0.06055, over 3834673.38 frames. ], batch size: 74, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:47:55,785 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167785.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:06,189 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167794.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:43,216 INFO [train.py:903] (2/4) Epoch 25, batch 3950, loss[loss=0.2, simple_loss=0.2889, pruned_loss=0.05557, over 19664.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2841, pruned_loss=0.06118, over 3821168.11 frames. ], batch size: 58, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:48:45,699 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 04:48:56,957 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:01,638 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:13,781 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-03 04:49:25,130 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 4.848e+02 5.890e+02 7.638e+02 1.655e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 04:49:29,106 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:34,614 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:46,053 INFO [train.py:903] (2/4) Epoch 25, batch 4000, loss[loss=0.2509, simple_loss=0.3228, pruned_loss=0.08947, over 19665.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06129, over 3813301.08 frames. ], batch size: 60, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:50:32,152 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 04:50:33,727 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:50:50,806 INFO [train.py:903] (2/4) Epoch 25, batch 4050, loss[loss=0.167, simple_loss=0.2502, pruned_loss=0.04191, over 19775.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06156, over 3800376.91 frames. ], batch size: 47, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:51:32,266 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.057e+02 6.529e+02 8.113e+02 1.821e+03, threshold=1.306e+03, percent-clipped=7.0 +2023-04-03 04:51:52,925 INFO [train.py:903] (2/4) Epoch 25, batch 4100, loss[loss=0.2329, simple_loss=0.3124, pruned_loss=0.07675, over 19612.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2853, pruned_loss=0.06195, over 3809790.15 frames. ], batch size: 50, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:52:24,783 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 04:52:27,620 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 04:52:56,845 INFO [train.py:903] (2/4) Epoch 25, batch 4150, loss[loss=0.2137, simple_loss=0.2981, pruned_loss=0.06463, over 19548.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06231, over 3792279.75 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:53:36,890 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168053.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:53:39,909 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 5.393e+02 6.582e+02 8.080e+02 1.683e+03, threshold=1.316e+03, percent-clipped=2.0 +2023-04-03 04:53:59,617 INFO [train.py:903] (2/4) Epoch 25, batch 4200, loss[loss=0.2411, simple_loss=0.3152, pruned_loss=0.08343, over 19792.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.285, pruned_loss=0.06232, over 3798189.99 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:54:01,980 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 04:54:10,948 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168081.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:54:18,319 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 04:55:03,247 INFO [train.py:903] (2/4) Epoch 25, batch 4250, loss[loss=0.209, simple_loss=0.3008, pruned_loss=0.0586, over 19709.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.285, pruned_loss=0.06214, over 3811229.05 frames. ], batch size: 59, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:55:13,197 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:55:17,977 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 04:55:20,782 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.35 vs. limit=5.0 +2023-04-03 04:55:29,583 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 04:55:46,754 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.961e+02 6.439e+02 7.740e+02 2.119e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-03 04:55:59,596 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:07,289 INFO [train.py:903] (2/4) Epoch 25, batch 4300, loss[loss=0.2414, simple_loss=0.3118, pruned_loss=0.08549, over 13715.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2857, pruned_loss=0.0624, over 3798707.95 frames. ], batch size: 135, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:56:15,589 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-03 04:56:29,202 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.0421, 1.1031, 1.3681, 1.2757, 2.5965, 1.0752, 2.0854, 2.9615], + device='cuda:2'), covar=tensor([0.0809, 0.3324, 0.3102, 0.2139, 0.0999, 0.2600, 0.1444, 0.0420], + device='cuda:2'), in_proj_covar=tensor([0.0419, 0.0375, 0.0395, 0.0351, 0.0379, 0.0354, 0.0393, 0.0413], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 04:56:30,383 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:37,450 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168196.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:42,776 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.99 vs. limit=5.0 +2023-04-03 04:56:57,662 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-03 04:57:00,316 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 04:57:10,768 INFO [train.py:903] (2/4) Epoch 25, batch 4350, loss[loss=0.1782, simple_loss=0.2546, pruned_loss=0.0509, over 19749.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2852, pruned_loss=0.06218, over 3800297.56 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:57:38,637 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:47,819 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168251.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:49,662 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 04:57:53,365 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.774e+02 5.735e+02 7.211e+02 1.236e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 04:58:13,426 INFO [train.py:903] (2/4) Epoch 25, batch 4400, loss[loss=0.181, simple_loss=0.2657, pruned_loss=0.04819, over 19676.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2853, pruned_loss=0.06205, over 3810957.14 frames. ], batch size: 53, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:58:40,138 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 04:58:50,370 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 04:59:16,256 INFO [train.py:903] (2/4) Epoch 25, batch 4450, loss[loss=0.2027, simple_loss=0.2945, pruned_loss=0.05549, over 19663.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.285, pruned_loss=0.06183, over 3815528.31 frames. ], batch size: 58, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:59:59,622 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.609e+02 5.804e+02 7.720e+02 1.927e+03, threshold=1.161e+03, percent-clipped=7.0 +2023-04-03 05:00:17,554 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 05:00:20,127 INFO [train.py:903] (2/4) Epoch 25, batch 4500, loss[loss=0.1803, simple_loss=0.272, pruned_loss=0.04433, over 19522.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2862, pruned_loss=0.06279, over 3803974.36 frames. ], batch size: 54, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:00:44,722 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 05:00:52,074 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:01:23,378 INFO [train.py:903] (2/4) Epoch 25, batch 4550, loss[loss=0.2139, simple_loss=0.2921, pruned_loss=0.0678, over 19667.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2849, pruned_loss=0.06207, over 3819223.07 frames. ], batch size: 60, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:01:34,600 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 05:01:59,969 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 05:02:02,790 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:08,841 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 4.803e+02 5.692e+02 6.651e+02 1.392e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 05:02:15,270 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1058, 1.2347, 1.7198, 1.1662, 2.5939, 3.5339, 3.2508, 3.7383], + device='cuda:2'), covar=tensor([0.1678, 0.3982, 0.3393, 0.2630, 0.0624, 0.0195, 0.0225, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0327, 0.0357, 0.0267, 0.0248, 0.0191, 0.0217, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 05:02:27,741 INFO [train.py:903] (2/4) Epoch 25, batch 4600, loss[loss=0.1849, simple_loss=0.2706, pruned_loss=0.04962, over 19607.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2838, pruned_loss=0.06142, over 3834687.87 frames. ], batch size: 52, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:02:35,439 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168477.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:59,876 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9280, 4.5121, 2.7790, 3.8979, 1.0416, 4.4735, 4.3745, 4.4356], + device='cuda:2'), covar=tensor([0.0520, 0.0857, 0.1881, 0.0837, 0.3957, 0.0617, 0.0842, 0.1150], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0420, 0.0504, 0.0353, 0.0405, 0.0447, 0.0441, 0.0471], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:03:04,811 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168500.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:20,612 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:27,492 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 05:03:31,427 INFO [train.py:903] (2/4) Epoch 25, batch 4650, loss[loss=0.2146, simple_loss=0.292, pruned_loss=0.06855, over 19669.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.285, pruned_loss=0.0621, over 3830770.92 frames. ], batch size: 58, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:03:35,320 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:50,722 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 05:04:02,095 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 05:04:02,461 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1418, 1.1169, 1.6105, 1.0401, 2.3005, 3.0029, 2.7712, 3.3197], + device='cuda:2'), covar=tensor([0.1705, 0.5217, 0.4369, 0.2793, 0.0750, 0.0279, 0.0333, 0.0334], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0326, 0.0356, 0.0267, 0.0248, 0.0191, 0.0217, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 05:04:16,035 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.060e+02 6.015e+02 7.632e+02 1.453e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 05:04:34,756 INFO [train.py:903] (2/4) Epoch 25, batch 4700, loss[loss=0.1903, simple_loss=0.2837, pruned_loss=0.04844, over 19659.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06212, over 3825984.97 frames. ], batch size: 58, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:04:58,615 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 05:05:04,400 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:05:38,975 INFO [train.py:903] (2/4) Epoch 25, batch 4750, loss[loss=0.1705, simple_loss=0.2504, pruned_loss=0.04529, over 19385.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.285, pruned_loss=0.0622, over 3835127.37 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:06:06,418 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 05:06:09,930 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 05:06:22,550 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.029e+02 6.051e+02 7.124e+02 1.309e+03, threshold=1.210e+03, percent-clipped=1.0 +2023-04-03 05:06:40,996 INFO [train.py:903] (2/4) Epoch 25, batch 4800, loss[loss=0.2547, simple_loss=0.3283, pruned_loss=0.09056, over 19327.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.285, pruned_loss=0.06211, over 3839508.67 frames. ], batch size: 66, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:06:59,403 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1777, 2.0262, 1.8299, 2.1477, 2.0313, 1.8430, 1.6878, 2.0408], + device='cuda:2'), covar=tensor([0.0998, 0.1389, 0.1385, 0.1046, 0.1315, 0.0564, 0.1498, 0.0731], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0356, 0.0315, 0.0253, 0.0303, 0.0254, 0.0314, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:07:29,206 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168710.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:07:43,809 INFO [train.py:903] (2/4) Epoch 25, batch 4850, loss[loss=0.2049, simple_loss=0.2897, pruned_loss=0.06008, over 19526.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2843, pruned_loss=0.06164, over 3833006.35 frames. ], batch size: 56, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:08:09,418 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 05:08:29,303 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.536e+02 6.758e+02 9.275e+02 1.787e+03, threshold=1.352e+03, percent-clipped=12.0 +2023-04-03 05:08:30,560 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 05:08:36,453 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 05:08:36,488 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 05:08:43,852 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:08:47,092 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 05:08:48,234 INFO [train.py:903] (2/4) Epoch 25, batch 4900, loss[loss=0.1891, simple_loss=0.2788, pruned_loss=0.04973, over 19661.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.0617, over 3832604.47 frames. ], batch size: 55, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:09:06,545 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 05:09:10,218 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1119, 0.9084, 1.0778, 1.4449, 1.0547, 0.9810, 1.0312, 1.0406], + device='cuda:2'), covar=tensor([0.1161, 0.1701, 0.1364, 0.0695, 0.1032, 0.1486, 0.1222, 0.1131], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0238, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 05:09:15,998 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:09:52,871 INFO [train.py:903] (2/4) Epoch 25, batch 4950, loss[loss=0.2183, simple_loss=0.3002, pruned_loss=0.06827, over 19666.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06176, over 3823996.19 frames. ], batch size: 55, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:10:04,481 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 05:10:30,180 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 05:10:36,920 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.693e+02 5.645e+02 7.417e+02 1.662e+03, threshold=1.129e+03, percent-clipped=1.0 +2023-04-03 05:10:49,048 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4322, 1.4899, 1.7935, 1.6576, 2.7097, 2.2897, 2.7773, 1.3526], + device='cuda:2'), covar=tensor([0.2670, 0.4707, 0.2995, 0.2080, 0.1629, 0.2275, 0.1642, 0.4695], + device='cuda:2'), in_proj_covar=tensor([0.0548, 0.0666, 0.0739, 0.0501, 0.0630, 0.0544, 0.0670, 0.0567], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 05:10:50,503 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 05:10:55,827 INFO [train.py:903] (2/4) Epoch 25, batch 5000, loss[loss=0.1499, simple_loss=0.2299, pruned_loss=0.03496, over 19753.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2852, pruned_loss=0.06195, over 3824825.48 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:11:02,566 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 05:11:13,720 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 05:11:58,384 INFO [train.py:903] (2/4) Epoch 25, batch 5050, loss[loss=0.2215, simple_loss=0.3047, pruned_loss=0.06913, over 19669.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2851, pruned_loss=0.06193, over 3813191.93 frames. ], batch size: 55, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:12:06,825 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0786, 2.1655, 2.4185, 2.7844, 2.1204, 2.6972, 2.4523, 2.2175], + device='cuda:2'), covar=tensor([0.4394, 0.4149, 0.1918, 0.2463, 0.4386, 0.2158, 0.4915, 0.3437], + device='cuda:2'), in_proj_covar=tensor([0.0920, 0.0994, 0.0731, 0.0941, 0.0896, 0.0833, 0.0851, 0.0796], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 05:12:18,663 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3461, 2.3731, 2.6394, 3.1209, 2.3732, 2.9552, 2.7192, 2.3670], + device='cuda:2'), covar=tensor([0.4307, 0.4345, 0.1930, 0.2667, 0.4668, 0.2328, 0.4707, 0.3500], + device='cuda:2'), in_proj_covar=tensor([0.0920, 0.0994, 0.0731, 0.0942, 0.0897, 0.0833, 0.0851, 0.0796], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 05:12:33,915 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 05:12:41,888 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.713e+02 5.534e+02 7.099e+02 1.364e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-04-03 05:12:55,408 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168966.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:02,135 INFO [train.py:903] (2/4) Epoch 25, batch 5100, loss[loss=0.2206, simple_loss=0.3037, pruned_loss=0.06875, over 18738.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2849, pruned_loss=0.06138, over 3825692.66 frames. ], batch size: 74, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:13:11,261 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 05:13:14,752 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 05:13:19,307 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168985.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:20,089 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 05:13:21,493 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168987.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 05:13:26,023 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:32,731 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6051, 1.4272, 1.4218, 2.1547, 1.7063, 1.9546, 2.0687, 1.6313], + device='cuda:2'), covar=tensor([0.0931, 0.1024, 0.1148, 0.0792, 0.0903, 0.0803, 0.0844, 0.0778], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0224, 0.0226, 0.0239, 0.0226, 0.0214, 0.0189, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 05:14:05,076 INFO [train.py:903] (2/4) Epoch 25, batch 5150, loss[loss=0.1606, simple_loss=0.2495, pruned_loss=0.03588, over 19845.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.06088, over 3832951.27 frames. ], batch size: 52, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:14:16,409 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 05:14:24,673 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0877, 5.5209, 3.4194, 4.8979, 0.7982, 5.7406, 5.4971, 5.7305], + device='cuda:2'), covar=tensor([0.0408, 0.0869, 0.1567, 0.0743, 0.4391, 0.0479, 0.0807, 0.0978], + device='cuda:2'), in_proj_covar=tensor([0.0524, 0.0427, 0.0511, 0.0358, 0.0412, 0.0454, 0.0446, 0.0477], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:14:48,108 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.295e+02 6.704e+02 8.101e+02 2.101e+03, threshold=1.341e+03, percent-clipped=6.0 +2023-04-03 05:14:52,534 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:15:08,237 INFO [train.py:903] (2/4) Epoch 25, batch 5200, loss[loss=0.2018, simple_loss=0.288, pruned_loss=0.05776, over 19094.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2829, pruned_loss=0.06065, over 3841125.07 frames. ], batch size: 69, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:15:23,490 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 05:15:44,123 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169100.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:08,669 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 05:16:13,023 INFO [train.py:903] (2/4) Epoch 25, batch 5250, loss[loss=0.1729, simple_loss=0.2694, pruned_loss=0.0382, over 19675.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2821, pruned_loss=0.06016, over 3845687.14 frames. ], batch size: 53, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:16:27,842 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:56,981 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.211e+02 5.791e+02 7.204e+02 1.532e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 05:17:16,191 INFO [train.py:903] (2/4) Epoch 25, batch 5300, loss[loss=0.1887, simple_loss=0.2775, pruned_loss=0.0499, over 19689.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.283, pruned_loss=0.06037, over 3850942.34 frames. ], batch size: 59, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:17:22,417 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:17:34,462 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 05:18:18,931 INFO [train.py:903] (2/4) Epoch 25, batch 5350, loss[loss=0.2193, simple_loss=0.302, pruned_loss=0.0683, over 19472.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2832, pruned_loss=0.06075, over 3838162.72 frames. ], batch size: 64, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:18:56,420 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 05:19:04,353 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 4.992e+02 6.506e+02 8.048e+02 1.510e+03, threshold=1.301e+03, percent-clipped=6.0 +2023-04-03 05:19:07,159 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8154, 4.9322, 5.6114, 5.5846, 1.9909, 5.3156, 4.4360, 5.2835], + device='cuda:2'), covar=tensor([0.1746, 0.1164, 0.0581, 0.0680, 0.6474, 0.0904, 0.0673, 0.1147], + device='cuda:2'), in_proj_covar=tensor([0.0805, 0.0774, 0.0978, 0.0861, 0.0850, 0.0741, 0.0583, 0.0909], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 05:19:24,345 INFO [train.py:903] (2/4) Epoch 25, batch 5400, loss[loss=0.1756, simple_loss=0.261, pruned_loss=0.04507, over 19836.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2833, pruned_loss=0.06065, over 3824801.35 frames. ], batch size: 52, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:19:38,039 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.09 vs. limit=5.0 +2023-04-03 05:19:38,766 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6361, 2.6660, 2.1338, 2.7485, 2.6258, 2.2312, 2.1240, 2.4739], + device='cuda:2'), covar=tensor([0.0999, 0.1443, 0.1436, 0.0962, 0.1260, 0.0533, 0.1449, 0.0712], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0355, 0.0313, 0.0251, 0.0302, 0.0254, 0.0314, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:20:27,247 INFO [train.py:903] (2/4) Epoch 25, batch 5450, loss[loss=0.2596, simple_loss=0.3264, pruned_loss=0.09636, over 19684.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.06032, over 3838761.83 frames. ], batch size: 60, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:20:33,629 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.37 vs. limit=5.0 +2023-04-03 05:20:36,088 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 05:20:36,594 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:20:39,947 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169331.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:20:46,767 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2021, 2.0893, 2.0301, 1.8697, 1.6975, 1.8848, 0.9086, 1.3541], + device='cuda:2'), covar=tensor([0.0643, 0.0680, 0.0483, 0.0822, 0.1041, 0.0868, 0.1302, 0.1018], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0363, 0.0364, 0.0391, 0.0467, 0.0399, 0.0344, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 05:20:50,641 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 05:21:11,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.467e+02 5.289e+02 7.172e+02 1.661e+03, threshold=1.058e+03, percent-clipped=2.0 +2023-04-03 05:21:29,438 INFO [train.py:903] (2/4) Epoch 25, batch 5500, loss[loss=0.235, simple_loss=0.3055, pruned_loss=0.08227, over 19519.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2838, pruned_loss=0.06043, over 3839715.07 frames. ], batch size: 56, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:21:57,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 05:22:24,189 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 05:22:33,001 INFO [train.py:903] (2/4) Epoch 25, batch 5550, loss[loss=0.1926, simple_loss=0.285, pruned_loss=0.05011, over 17969.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2846, pruned_loss=0.06082, over 3836799.25 frames. ], batch size: 83, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:22:43,807 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 05:23:01,518 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:01,734 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:04,035 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169446.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 05:23:17,166 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 5.192e+02 6.082e+02 7.576e+02 1.216e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-03 05:23:33,237 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 05:23:36,948 INFO [train.py:903] (2/4) Epoch 25, batch 5600, loss[loss=0.1988, simple_loss=0.2881, pruned_loss=0.0547, over 19341.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2841, pruned_loss=0.06075, over 3828932.56 frames. ], batch size: 66, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:23:44,269 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:38,816 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:39,769 INFO [train.py:903] (2/4) Epoch 25, batch 5650, loss[loss=0.2364, simple_loss=0.3118, pruned_loss=0.08054, over 19563.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2842, pruned_loss=0.06065, over 3828935.69 frames. ], batch size: 61, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:25:24,921 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.961e+02 6.158e+02 8.288e+02 1.627e+03, threshold=1.232e+03, percent-clipped=5.0 +2023-04-03 05:25:27,543 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169559.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:25:30,456 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 05:25:37,646 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6633, 1.4912, 1.5366, 2.1268, 1.5742, 2.0382, 2.0447, 1.6887], + device='cuda:2'), covar=tensor([0.0865, 0.0960, 0.0989, 0.0751, 0.0854, 0.0717, 0.0774, 0.0701], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0222, 0.0225, 0.0238, 0.0225, 0.0212, 0.0188, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:2') +2023-04-03 05:25:43,204 INFO [train.py:903] (2/4) Epoch 25, batch 5700, loss[loss=0.1909, simple_loss=0.2627, pruned_loss=0.0596, over 19750.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2839, pruned_loss=0.06143, over 3807137.51 frames. ], batch size: 46, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:11,300 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:26:11,677 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-03 05:26:12,797 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.30 vs. limit=5.0 +2023-04-03 05:26:22,656 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-03 05:26:29,628 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9407, 2.0420, 2.2980, 2.5771, 2.0003, 2.4284, 2.3112, 2.0679], + device='cuda:2'), covar=tensor([0.4039, 0.3606, 0.1844, 0.2359, 0.3852, 0.2054, 0.4579, 0.3278], + device='cuda:2'), in_proj_covar=tensor([0.0920, 0.0995, 0.0730, 0.0942, 0.0898, 0.0833, 0.0853, 0.0795], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 05:26:47,655 INFO [train.py:903] (2/4) Epoch 25, batch 5750, loss[loss=0.2102, simple_loss=0.2926, pruned_loss=0.0639, over 17032.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.283, pruned_loss=0.06098, over 3797391.80 frames. ], batch size: 101, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:48,834 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 05:26:59,202 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 05:27:04,009 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 05:27:06,660 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:09,118 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2125, 1.2630, 1.6585, 1.2921, 2.7363, 3.6923, 3.3636, 3.8058], + device='cuda:2'), covar=tensor([0.1643, 0.3979, 0.3620, 0.2616, 0.0620, 0.0193, 0.0227, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0326, 0.0356, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 05:27:10,230 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:32,987 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.765e+02 5.968e+02 8.362e+02 1.575e+03, threshold=1.194e+03, percent-clipped=5.0 +2023-04-03 05:27:52,461 INFO [train.py:903] (2/4) Epoch 25, batch 5800, loss[loss=0.1617, simple_loss=0.2449, pruned_loss=0.03929, over 19760.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2833, pruned_loss=0.06106, over 3813726.13 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:28:20,527 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9866, 1.1606, 1.5968, 0.6577, 2.1010, 2.4801, 2.1423, 2.5777], + device='cuda:2'), covar=tensor([0.1561, 0.3828, 0.3374, 0.2705, 0.0604, 0.0278, 0.0344, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0326, 0.0357, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 05:28:27,917 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169700.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:28:30,493 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169702.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 05:28:55,613 INFO [train.py:903] (2/4) Epoch 25, batch 5850, loss[loss=0.1899, simple_loss=0.2786, pruned_loss=0.0506, over 19679.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2826, pruned_loss=0.06053, over 3826571.15 frames. ], batch size: 60, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:29:00,347 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:02,811 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169727.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:29:08,737 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4429, 1.4592, 1.6683, 1.6684, 2.3310, 2.1708, 2.2817, 1.0498], + device='cuda:2'), covar=tensor([0.2640, 0.4579, 0.2796, 0.2057, 0.1535, 0.2238, 0.1534, 0.4663], + device='cuda:2'), in_proj_covar=tensor([0.0548, 0.0663, 0.0736, 0.0501, 0.0630, 0.0542, 0.0668, 0.0565], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 05:29:28,407 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169747.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:41,211 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.003e+02 6.037e+02 8.413e+02 1.989e+03, threshold=1.207e+03, percent-clipped=6.0 +2023-04-03 05:29:51,797 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:59,831 INFO [train.py:903] (2/4) Epoch 25, batch 5900, loss[loss=0.1742, simple_loss=0.2613, pruned_loss=0.04359, over 19576.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06159, over 3822072.73 frames. ], batch size: 52, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:30:04,541 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 05:30:27,861 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 05:30:42,274 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:30:56,641 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:04,213 INFO [train.py:903] (2/4) Epoch 25, batch 5950, loss[loss=0.1978, simple_loss=0.2852, pruned_loss=0.05518, over 19735.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06159, over 3832582.88 frames. ], batch size: 63, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:31:28,354 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169840.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:39,967 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:49,609 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.837e+02 6.128e+02 7.395e+02 1.765e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-03 05:32:09,283 INFO [train.py:903] (2/4) Epoch 25, batch 6000, loss[loss=0.2025, simple_loss=0.2675, pruned_loss=0.06881, over 19769.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.285, pruned_loss=0.06192, over 3825894.23 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:32:09,283 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 05:32:21,940 INFO [train.py:937] (2/4) Epoch 25, validation: loss=0.1675, simple_loss=0.2674, pruned_loss=0.03383, over 944034.00 frames. +2023-04-03 05:32:21,941 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 05:32:25,714 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:32:27,349 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.58 vs. limit=5.0 +2023-04-03 05:32:48,291 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:32:57,510 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5535, 1.2606, 1.1648, 1.3618, 1.1806, 1.2303, 1.0623, 1.3583], + device='cuda:2'), covar=tensor([0.1247, 0.1255, 0.1861, 0.1167, 0.1381, 0.0865, 0.2030, 0.1027], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0354, 0.0312, 0.0252, 0.0302, 0.0254, 0.0314, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:33:03,264 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2060, 2.0932, 1.8701, 2.1590, 2.0384, 1.8861, 1.6995, 2.0893], + device='cuda:2'), covar=tensor([0.1017, 0.1465, 0.1387, 0.1047, 0.1361, 0.0532, 0.1544, 0.0700], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0355, 0.0313, 0.0252, 0.0302, 0.0254, 0.0314, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:33:20,610 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:33:26,967 INFO [train.py:903] (2/4) Epoch 25, batch 6050, loss[loss=0.17, simple_loss=0.2476, pruned_loss=0.04623, over 19732.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2844, pruned_loss=0.06149, over 3830176.52 frames. ], batch size: 46, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:12,785 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.971e+02 6.200e+02 7.955e+02 1.563e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 05:34:30,151 INFO [train.py:903] (2/4) Epoch 25, batch 6100, loss[loss=0.1872, simple_loss=0.2748, pruned_loss=0.04983, over 19604.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06168, over 3816805.12 frames. ], batch size: 57, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:44,071 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:35:19,630 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 05:35:35,274 INFO [train.py:903] (2/4) Epoch 25, batch 6150, loss[loss=0.2191, simple_loss=0.3006, pruned_loss=0.06875, over 17467.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.285, pruned_loss=0.06173, over 3817610.11 frames. ], batch size: 101, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:35:40,541 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4863, 1.5820, 1.7517, 1.7165, 2.2565, 2.2072, 2.3425, 0.9501], + device='cuda:2'), covar=tensor([0.2470, 0.4435, 0.2860, 0.1958, 0.1650, 0.2162, 0.1428, 0.4763], + device='cuda:2'), in_proj_covar=tensor([0.0546, 0.0663, 0.0737, 0.0500, 0.0630, 0.0541, 0.0666, 0.0564], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 05:36:07,472 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 05:36:22,372 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 4.967e+02 5.743e+02 7.363e+02 2.013e+03, threshold=1.149e+03, percent-clipped=2.0 +2023-04-03 05:36:28,742 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:36:40,354 INFO [train.py:903] (2/4) Epoch 25, batch 6200, loss[loss=0.2255, simple_loss=0.3047, pruned_loss=0.07312, over 17410.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.0613, over 3813094.46 frames. ], batch size: 101, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:37:04,430 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:13,770 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:26,526 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:39,252 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5331, 1.2150, 1.2709, 2.0797, 1.5272, 1.6393, 1.8177, 1.3490], + device='cuda:2'), covar=tensor([0.0929, 0.1203, 0.1247, 0.0781, 0.0920, 0.0963, 0.0925, 0.0929], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0225, 0.0238, 0.0225, 0.0214, 0.0189, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 05:37:43,646 INFO [train.py:903] (2/4) Epoch 25, batch 6250, loss[loss=0.2539, simple_loss=0.3171, pruned_loss=0.09535, over 12993.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.06154, over 3811276.95 frames. ], batch size: 135, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:38:16,038 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 05:38:16,216 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170148.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:38:29,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.199e+02 6.170e+02 7.836e+02 1.706e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 05:38:47,600 INFO [train.py:903] (2/4) Epoch 25, batch 6300, loss[loss=0.2155, simple_loss=0.2955, pruned_loss=0.06778, over 19671.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06148, over 3812137.11 frames. ], batch size: 58, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:39:08,037 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0486, 1.9499, 1.8576, 1.6979, 1.5522, 1.7132, 0.6897, 1.1357], + device='cuda:2'), covar=tensor([0.0631, 0.0616, 0.0444, 0.0737, 0.1053, 0.0861, 0.1274, 0.0975], + device='cuda:2'), in_proj_covar=tensor([0.0362, 0.0359, 0.0362, 0.0387, 0.0463, 0.0396, 0.0341, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 05:39:32,219 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170206.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:39:51,010 INFO [train.py:903] (2/4) Epoch 25, batch 6350, loss[loss=0.2265, simple_loss=0.3098, pruned_loss=0.07158, over 19789.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06089, over 3812749.09 frames. ], batch size: 56, lr: 3.25e-03, grad_scale: 4.0 +2023-04-03 05:39:53,870 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:36,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.417e+02 5.475e+02 7.140e+02 1.571e+03, threshold=1.095e+03, percent-clipped=3.0 +2023-04-03 05:40:42,957 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:54,609 INFO [train.py:903] (2/4) Epoch 25, batch 6400, loss[loss=0.1797, simple_loss=0.2533, pruned_loss=0.05308, over 19780.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2827, pruned_loss=0.06019, over 3828291.01 frames. ], batch size: 47, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:41:04,241 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7445, 2.5304, 2.2940, 2.7027, 2.3566, 2.4219, 2.2783, 2.7088], + device='cuda:2'), covar=tensor([0.0920, 0.1688, 0.1391, 0.1004, 0.1371, 0.0497, 0.1390, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0354, 0.0312, 0.0252, 0.0301, 0.0253, 0.0313, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:41:59,017 INFO [train.py:903] (2/4) Epoch 25, batch 6450, loss[loss=0.2233, simple_loss=0.3033, pruned_loss=0.07166, over 18743.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2826, pruned_loss=0.05979, over 3827949.49 frames. ], batch size: 74, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:42:32,624 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0966, 1.2426, 1.6487, 0.8968, 2.4006, 3.0799, 2.7917, 3.2892], + device='cuda:2'), covar=tensor([0.1661, 0.4068, 0.3617, 0.2831, 0.0628, 0.0242, 0.0266, 0.0329], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0269, 0.0251, 0.0193, 0.0219, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 05:42:40,340 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:42:44,565 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.688e+02 6.468e+02 8.318e+02 2.178e+03, threshold=1.294e+03, percent-clipped=13.0 +2023-04-03 05:42:45,698 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 05:43:03,215 INFO [train.py:903] (2/4) Epoch 25, batch 6500, loss[loss=0.2128, simple_loss=0.2821, pruned_loss=0.07176, over 19595.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2841, pruned_loss=0.06063, over 3832487.71 frames. ], batch size: 50, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:43:08,909 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 05:43:12,753 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:43:48,148 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170407.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:44:06,260 INFO [train.py:903] (2/4) Epoch 25, batch 6550, loss[loss=0.1941, simple_loss=0.2728, pruned_loss=0.05775, over 19769.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2841, pruned_loss=0.06102, over 3837863.64 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:44:52,747 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.881e+02 6.158e+02 7.799e+02 1.457e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-03 05:44:57,804 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:09,734 INFO [train.py:903] (2/4) Epoch 25, batch 6600, loss[loss=0.2012, simple_loss=0.2878, pruned_loss=0.05729, over 19778.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06072, over 3826587.78 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:45:19,091 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:20,444 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:27,545 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4811, 1.4636, 1.5412, 1.6161, 1.8680, 1.9487, 1.8353, 0.6699], + device='cuda:2'), covar=tensor([0.2404, 0.4461, 0.2844, 0.1935, 0.1627, 0.2344, 0.1500, 0.4762], + device='cuda:2'), in_proj_covar=tensor([0.0547, 0.0664, 0.0738, 0.0498, 0.0628, 0.0542, 0.0667, 0.0564], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 05:45:28,742 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:51,466 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:09,051 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:13,013 INFO [train.py:903] (2/4) Epoch 25, batch 6650, loss[loss=0.1799, simple_loss=0.2656, pruned_loss=0.04706, over 19765.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.06088, over 3834554.76 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:46:13,331 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:41,278 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170544.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:58,415 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.827e+02 5.978e+02 7.974e+02 2.215e+03, threshold=1.196e+03, percent-clipped=6.0 +2023-04-03 05:47:16,981 INFO [train.py:903] (2/4) Epoch 25, batch 6700, loss[loss=0.1693, simple_loss=0.2474, pruned_loss=0.04555, over 19726.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06072, over 3820604.08 frames. ], batch size: 51, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:47:36,693 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:48:17,852 INFO [train.py:903] (2/4) Epoch 25, batch 6750, loss[loss=0.2192, simple_loss=0.3043, pruned_loss=0.06708, over 19549.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2845, pruned_loss=0.06099, over 3831532.30 frames. ], batch size: 64, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:48:59,810 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.526e+02 5.801e+02 6.899e+02 1.670e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 05:49:15,797 INFO [train.py:903] (2/4) Epoch 25, batch 6800, loss[loss=0.1792, simple_loss=0.2633, pruned_loss=0.04753, over 19748.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.0604, over 3822413.95 frames. ], batch size: 51, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:50:02,673 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 05:50:03,828 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 05:50:07,359 INFO [train.py:903] (2/4) Epoch 26, batch 0, loss[loss=0.1719, simple_loss=0.2472, pruned_loss=0.04831, over 18140.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2472, pruned_loss=0.04831, over 18140.00 frames. ], batch size: 40, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:50:07,359 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 05:50:19,307 INFO [train.py:937] (2/4) Epoch 26, validation: loss=0.1673, simple_loss=0.2675, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 05:50:19,308 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 05:50:32,188 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 05:50:44,717 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9378, 4.5170, 2.7662, 3.9113, 0.8895, 4.4846, 4.3538, 4.4471], + device='cuda:2'), covar=tensor([0.0578, 0.0907, 0.2012, 0.0845, 0.4189, 0.0652, 0.0916, 0.1171], + device='cuda:2'), in_proj_covar=tensor([0.0520, 0.0424, 0.0510, 0.0357, 0.0409, 0.0450, 0.0445, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:51:06,032 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.69 vs. limit=5.0 +2023-04-03 05:51:20,627 INFO [train.py:903] (2/4) Epoch 26, batch 50, loss[loss=0.2017, simple_loss=0.2779, pruned_loss=0.06276, over 19793.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2806, pruned_loss=0.06154, over 876116.30 frames. ], batch size: 49, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:51:28,231 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1117, 1.3177, 1.6580, 1.1622, 2.4832, 3.3274, 3.0493, 3.5158], + device='cuda:2'), covar=tensor([0.1828, 0.4167, 0.3619, 0.2794, 0.0708, 0.0222, 0.0261, 0.0353], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0328, 0.0358, 0.0268, 0.0250, 0.0191, 0.0218, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 05:51:30,191 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.268e+02 6.216e+02 7.845e+02 1.668e+03, threshold=1.243e+03, percent-clipped=9.0 +2023-04-03 05:51:54,950 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170778.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:51:55,740 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 05:52:04,576 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-03 05:52:21,931 INFO [train.py:903] (2/4) Epoch 26, batch 100, loss[loss=0.2222, simple_loss=0.307, pruned_loss=0.06869, over 18236.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.286, pruned_loss=0.06344, over 1522946.00 frames. ], batch size: 83, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:52:25,970 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:52:32,347 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 05:52:50,717 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:53:24,748 INFO [train.py:903] (2/4) Epoch 26, batch 150, loss[loss=0.2162, simple_loss=0.2957, pruned_loss=0.0684, over 13519.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2861, pruned_loss=0.0627, over 2029064.76 frames. ], batch size: 136, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:53:36,327 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 4.908e+02 6.470e+02 7.917e+02 1.560e+03, threshold=1.294e+03, percent-clipped=6.0 +2023-04-03 05:54:25,496 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 05:54:26,701 INFO [train.py:903] (2/4) Epoch 26, batch 200, loss[loss=0.1725, simple_loss=0.2486, pruned_loss=0.0482, over 19354.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2856, pruned_loss=0.06285, over 2427300.66 frames. ], batch size: 47, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:05,060 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170931.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:14,518 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:24,848 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170946.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:30,408 INFO [train.py:903] (2/4) Epoch 26, batch 250, loss[loss=0.1604, simple_loss=0.2416, pruned_loss=0.03961, over 19784.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2837, pruned_loss=0.06172, over 2743591.54 frames. ], batch size: 47, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:31,162 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-03 05:55:39,633 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4161, 2.0624, 1.6114, 1.4630, 1.8928, 1.3635, 1.4222, 1.8920], + device='cuda:2'), covar=tensor([0.0948, 0.0908, 0.1177, 0.0876, 0.0673, 0.1348, 0.0693, 0.0458], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0318, 0.0336, 0.0270, 0.0249, 0.0342, 0.0294, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 05:55:42,602 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.848e+02 5.950e+02 8.014e+02 1.769e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 05:56:34,950 INFO [train.py:903] (2/4) Epoch 26, batch 300, loss[loss=0.1792, simple_loss=0.2603, pruned_loss=0.04901, over 19681.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2835, pruned_loss=0.06138, over 2992603.74 frames. ], batch size: 53, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:34,248 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171046.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:57:34,580 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-03 05:57:38,583 INFO [train.py:903] (2/4) Epoch 26, batch 350, loss[loss=0.1704, simple_loss=0.2513, pruned_loss=0.04478, over 19770.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2839, pruned_loss=0.06174, over 3179301.05 frames. ], batch size: 47, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:45,671 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:57:49,061 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 4.764e+02 6.020e+02 7.720e+02 1.602e+03, threshold=1.204e+03, percent-clipped=4.0 +2023-04-03 05:58:42,167 INFO [train.py:903] (2/4) Epoch 26, batch 400, loss[loss=0.2196, simple_loss=0.3009, pruned_loss=0.0692, over 19615.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06173, over 3318388.19 frames. ], batch size: 61, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:58:55,406 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171110.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:59:43,979 INFO [train.py:903] (2/4) Epoch 26, batch 450, loss[loss=0.2105, simple_loss=0.2982, pruned_loss=0.06146, over 19780.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2843, pruned_loss=0.06157, over 3435955.97 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:59:56,342 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 5.217e+02 6.577e+02 9.059e+02 2.566e+03, threshold=1.315e+03, percent-clipped=7.0 +2023-04-03 06:00:19,593 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 06:00:20,829 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 06:00:30,532 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1353, 1.8531, 1.7599, 1.9958, 1.7700, 1.7959, 1.6327, 2.0213], + device='cuda:2'), covar=tensor([0.0974, 0.1366, 0.1387, 0.1056, 0.1392, 0.0559, 0.1520, 0.0703], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0354, 0.0314, 0.0254, 0.0304, 0.0255, 0.0315, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:00:38,904 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:00:47,728 INFO [train.py:903] (2/4) Epoch 26, batch 500, loss[loss=0.2164, simple_loss=0.2956, pruned_loss=0.06861, over 19656.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06252, over 3528312.55 frames. ], batch size: 60, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:01:05,598 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:11,764 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:52,225 INFO [train.py:903] (2/4) Epoch 26, batch 550, loss[loss=0.2513, simple_loss=0.3358, pruned_loss=0.08345, over 19747.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2853, pruned_loss=0.06209, over 3585110.52 frames. ], batch size: 63, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:03,094 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.798e+02 6.471e+02 7.842e+02 1.459e+03, threshold=1.294e+03, percent-clipped=3.0 +2023-04-03 06:02:12,067 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 06:02:44,061 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171290.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:02:55,675 INFO [train.py:903] (2/4) Epoch 26, batch 600, loss[loss=0.1976, simple_loss=0.2733, pruned_loss=0.06096, over 19613.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2847, pruned_loss=0.06208, over 3643749.98 frames. ], batch size: 50, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:58,486 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:31,082 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:36,404 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 06:03:57,550 INFO [train.py:903] (2/4) Epoch 26, batch 650, loss[loss=0.2017, simple_loss=0.289, pruned_loss=0.05718, over 19673.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.06247, over 3678955.65 frames. ], batch size: 58, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:04:09,344 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.689e+02 4.759e+02 5.860e+02 7.918e+02 1.260e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-03 06:04:16,475 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:05:01,709 INFO [train.py:903] (2/4) Epoch 26, batch 700, loss[loss=0.2279, simple_loss=0.3027, pruned_loss=0.07652, over 17562.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2832, pruned_loss=0.06098, over 3715337.74 frames. ], batch size: 101, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:05:10,661 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171405.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:08,106 INFO [train.py:903] (2/4) Epoch 26, batch 750, loss[loss=0.1645, simple_loss=0.2503, pruned_loss=0.03933, over 19522.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2837, pruned_loss=0.06117, over 3728587.02 frames. ], batch size: 54, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:06:11,909 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1511, 1.2803, 1.5003, 1.4068, 2.7747, 1.1289, 2.2599, 3.0799], + device='cuda:2'), covar=tensor([0.0609, 0.2984, 0.3012, 0.1942, 0.0766, 0.2397, 0.1182, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0420, 0.0374, 0.0394, 0.0354, 0.0379, 0.0354, 0.0392, 0.0413], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:06:12,984 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171454.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:18,667 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.123e+02 6.550e+02 8.686e+02 2.549e+03, threshold=1.310e+03, percent-clipped=11.0 +2023-04-03 06:07:12,529 INFO [train.py:903] (2/4) Epoch 26, batch 800, loss[loss=0.2472, simple_loss=0.3102, pruned_loss=0.09215, over 13599.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2832, pruned_loss=0.06115, over 3748638.26 frames. ], batch size: 136, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:07:25,345 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 06:07:32,328 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:14,612 INFO [train.py:903] (2/4) Epoch 26, batch 850, loss[loss=0.1861, simple_loss=0.2826, pruned_loss=0.04477, over 19533.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2839, pruned_loss=0.06148, over 3766838.08 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:08:24,829 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:25,743 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.689e+02 5.699e+02 7.261e+02 1.636e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-03 06:08:40,932 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:09:05,332 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 06:09:18,250 INFO [train.py:903] (2/4) Epoch 26, batch 900, loss[loss=0.2038, simple_loss=0.288, pruned_loss=0.05979, over 19512.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2847, pruned_loss=0.06202, over 3766005.22 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:17,079 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.79 vs. limit=5.0 +2023-04-03 06:10:22,183 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 06:10:23,376 INFO [train.py:903] (2/4) Epoch 26, batch 950, loss[loss=0.2056, simple_loss=0.2855, pruned_loss=0.06283, over 19679.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.284, pruned_loss=0.06176, over 3768440.98 frames. ], batch size: 60, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:34,905 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.213e+02 6.211e+02 8.451e+02 1.981e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 06:10:37,597 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171661.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:10:45,921 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4944, 2.1403, 1.7278, 1.3448, 2.0690, 1.2792, 1.3134, 1.9789], + device='cuda:2'), covar=tensor([0.0893, 0.0711, 0.0878, 0.0994, 0.0472, 0.1273, 0.0699, 0.0400], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0318, 0.0337, 0.0270, 0.0250, 0.0343, 0.0295, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:10:51,832 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171673.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:00,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9064, 2.0369, 1.4523, 1.8777, 1.9028, 1.5260, 1.5119, 1.7816], + device='cuda:2'), covar=tensor([0.1295, 0.1520, 0.2030, 0.1358, 0.1496, 0.1004, 0.2042, 0.1071], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0354, 0.0314, 0.0254, 0.0305, 0.0254, 0.0315, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:11:09,439 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171686.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:27,624 INFO [train.py:903] (2/4) Epoch 26, batch 1000, loss[loss=0.2181, simple_loss=0.2968, pruned_loss=0.06971, over 19651.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2847, pruned_loss=0.0622, over 3773231.68 frames. ], batch size: 55, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:11:36,116 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171707.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:12:19,816 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 06:12:31,849 INFO [train.py:903] (2/4) Epoch 26, batch 1050, loss[loss=0.1707, simple_loss=0.2508, pruned_loss=0.04532, over 19427.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06222, over 3775183.37 frames. ], batch size: 48, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:12:42,471 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.465e+02 6.383e+02 7.807e+02 1.569e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-03 06:12:49,996 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-03 06:13:01,877 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 06:13:34,467 INFO [train.py:903] (2/4) Epoch 26, batch 1100, loss[loss=0.1831, simple_loss=0.2521, pruned_loss=0.05707, over 19766.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2848, pruned_loss=0.06204, over 3790110.23 frames. ], batch size: 46, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:05,464 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:09,085 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171825.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:40,977 INFO [train.py:903] (2/4) Epoch 26, batch 1150, loss[loss=0.1714, simple_loss=0.2522, pruned_loss=0.04526, over 19752.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2846, pruned_loss=0.06198, over 3783425.20 frames. ], batch size: 51, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:41,488 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171850.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:54,273 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.298e+02 6.784e+02 8.276e+02 1.649e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-03 06:14:55,524 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:15:46,145 INFO [train.py:903] (2/4) Epoch 26, batch 1200, loss[loss=0.1711, simple_loss=0.2447, pruned_loss=0.0488, over 19719.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2851, pruned_loss=0.06204, over 3796777.38 frames. ], batch size: 46, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:12,779 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 06:16:22,261 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:29,762 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 06:16:48,466 INFO [train.py:903] (2/4) Epoch 26, batch 1250, loss[loss=0.2471, simple_loss=0.3294, pruned_loss=0.08242, over 19799.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2847, pruned_loss=0.06194, over 3813962.94 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:53,712 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:54,767 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171955.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:58,916 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.182e+02 6.196e+02 7.754e+02 1.405e+03, threshold=1.239e+03, percent-clipped=1.0 +2023-04-03 06:17:21,603 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:17:51,745 INFO [train.py:903] (2/4) Epoch 26, batch 1300, loss[loss=0.1691, simple_loss=0.2435, pruned_loss=0.04734, over 18581.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.06129, over 3818231.03 frames. ], batch size: 41, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:56,742 INFO [train.py:903] (2/4) Epoch 26, batch 1350, loss[loss=0.2169, simple_loss=0.2943, pruned_loss=0.06977, over 19005.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.284, pruned_loss=0.06149, over 3824145.59 frames. ], batch size: 75, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:59,455 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8448, 3.5115, 2.5128, 3.1115, 1.0891, 3.4611, 3.3375, 3.3938], + device='cuda:2'), covar=tensor([0.0891, 0.1021, 0.1970, 0.0936, 0.3502, 0.0852, 0.1031, 0.1404], + device='cuda:2'), in_proj_covar=tensor([0.0521, 0.0423, 0.0510, 0.0357, 0.0409, 0.0449, 0.0444, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:19:09,254 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-03 06:19:09,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.487e+02 6.725e+02 8.152e+02 1.378e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-03 06:19:16,128 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 06:19:25,571 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4764, 2.3190, 2.0994, 2.5122, 2.3141, 2.1548, 1.9773, 2.4508], + device='cuda:2'), covar=tensor([0.1095, 0.1754, 0.1523, 0.1324, 0.1481, 0.0565, 0.1525, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0357, 0.0318, 0.0256, 0.0308, 0.0256, 0.0318, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:19:34,006 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:01,381 INFO [train.py:903] (2/4) Epoch 26, batch 1400, loss[loss=0.2225, simple_loss=0.3027, pruned_loss=0.07113, over 19670.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06173, over 3828802.94 frames. ], batch size: 60, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:20:07,088 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:47,866 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172136.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:21:05,097 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 06:21:06,113 INFO [train.py:903] (2/4) Epoch 26, batch 1450, loss[loss=0.2101, simple_loss=0.2801, pruned_loss=0.07007, over 19430.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.06094, over 3836123.94 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:21:16,558 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 4.778e+02 6.066e+02 7.310e+02 2.231e+03, threshold=1.213e+03, percent-clipped=2.0 +2023-04-03 06:21:22,856 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4508, 1.4458, 1.6723, 1.6555, 2.2256, 2.1001, 2.3360, 0.9169], + device='cuda:2'), covar=tensor([0.2624, 0.4587, 0.2832, 0.2066, 0.1669, 0.2271, 0.1543, 0.5054], + device='cuda:2'), in_proj_covar=tensor([0.0548, 0.0662, 0.0740, 0.0500, 0.0630, 0.0541, 0.0667, 0.0565], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 06:22:09,963 INFO [train.py:903] (2/4) Epoch 26, batch 1500, loss[loss=0.1951, simple_loss=0.2619, pruned_loss=0.06419, over 19752.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2836, pruned_loss=0.06089, over 3835225.74 frames. ], batch size: 46, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:22:33,622 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9022, 4.4498, 2.6372, 3.8314, 0.9322, 4.4727, 4.3428, 4.4407], + device='cuda:2'), covar=tensor([0.0546, 0.0955, 0.2113, 0.0891, 0.4058, 0.0602, 0.0924, 0.1102], + device='cuda:2'), in_proj_covar=tensor([0.0522, 0.0425, 0.0513, 0.0358, 0.0410, 0.0452, 0.0446, 0.0476], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:22:51,477 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172231.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:14,780 INFO [train.py:903] (2/4) Epoch 26, batch 1550, loss[loss=0.2153, simple_loss=0.2912, pruned_loss=0.06969, over 19844.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06083, over 3835029.79 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:23:23,367 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:26,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.787e+02 4.585e+02 5.725e+02 7.044e+02 1.122e+03, threshold=1.145e+03, percent-clipped=0.0 +2023-04-03 06:23:59,162 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8018, 1.1451, 1.4450, 1.4855, 3.1370, 1.2149, 2.6257, 3.7001], + device='cuda:2'), covar=tensor([0.0628, 0.3872, 0.3423, 0.2470, 0.1105, 0.3065, 0.1358, 0.0376], + device='cuda:2'), in_proj_covar=tensor([0.0420, 0.0374, 0.0395, 0.0354, 0.0380, 0.0354, 0.0393, 0.0414], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:24:17,371 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:24:18,409 INFO [train.py:903] (2/4) Epoch 26, batch 1600, loss[loss=0.1904, simple_loss=0.2717, pruned_loss=0.0546, over 19465.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06118, over 3827727.97 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:24:30,665 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:24:45,059 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 06:24:49,268 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0918, 3.3084, 1.8764, 2.0722, 2.9610, 1.6813, 1.4335, 2.3980], + device='cuda:2'), covar=tensor([0.1534, 0.0815, 0.1173, 0.0940, 0.0577, 0.1359, 0.1150, 0.0656], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0320, 0.0340, 0.0272, 0.0251, 0.0345, 0.0294, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:25:22,900 INFO [train.py:903] (2/4) Epoch 26, batch 1650, loss[loss=0.253, simple_loss=0.328, pruned_loss=0.08902, over 19603.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2857, pruned_loss=0.06173, over 3802414.00 frames. ], batch size: 57, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:25:32,979 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 4.806e+02 6.116e+02 7.815e+02 1.931e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 06:26:24,993 INFO [train.py:903] (2/4) Epoch 26, batch 1700, loss[loss=0.1637, simple_loss=0.2531, pruned_loss=0.03719, over 19714.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2856, pruned_loss=0.0619, over 3810309.69 frames. ], batch size: 51, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:26:27,918 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 06:26:43,105 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172414.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:27:07,067 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 06:27:27,216 INFO [train.py:903] (2/4) Epoch 26, batch 1750, loss[loss=0.1911, simple_loss=0.2863, pruned_loss=0.04789, over 19765.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2851, pruned_loss=0.06169, over 3821959.18 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:27:39,752 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.883e+02 5.717e+02 7.034e+02 1.807e+03, threshold=1.143e+03, percent-clipped=3.0 +2023-04-03 06:27:52,738 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7056, 1.6369, 1.5937, 2.1962, 1.6682, 1.9012, 1.9750, 1.8051], + device='cuda:2'), covar=tensor([0.0859, 0.0934, 0.1044, 0.0798, 0.0904, 0.0803, 0.0882, 0.0681], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0240, 0.0226, 0.0214, 0.0190, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 06:28:06,464 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:28:09,159 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3254, 1.3103, 1.8061, 1.3482, 2.7976, 3.7318, 3.4652, 3.9619], + device='cuda:2'), covar=tensor([0.1543, 0.3866, 0.3291, 0.2480, 0.0588, 0.0187, 0.0210, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0270, 0.0252, 0.0193, 0.0219, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 06:28:26,586 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4183, 4.0356, 2.6081, 3.5477, 0.7668, 3.9708, 3.8949, 3.9128], + device='cuda:2'), covar=tensor([0.0601, 0.0927, 0.2041, 0.0896, 0.4016, 0.0720, 0.0873, 0.1183], + device='cuda:2'), in_proj_covar=tensor([0.0518, 0.0420, 0.0507, 0.0355, 0.0406, 0.0447, 0.0441, 0.0472], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:28:32,019 INFO [train.py:903] (2/4) Epoch 26, batch 1800, loss[loss=0.2108, simple_loss=0.28, pruned_loss=0.0708, over 19729.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.286, pruned_loss=0.06186, over 3825401.73 frames. ], batch size: 51, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:31,743 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 06:29:36,215 INFO [train.py:903] (2/4) Epoch 26, batch 1850, loss[loss=0.2231, simple_loss=0.3067, pruned_loss=0.06972, over 19485.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2866, pruned_loss=0.06224, over 3825971.20 frames. ], batch size: 64, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:46,981 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.880e+02 5.794e+02 7.257e+02 1.575e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 06:30:11,358 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 06:30:20,692 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.7443, 5.2829, 3.0661, 4.6433, 1.4565, 5.2853, 5.1872, 5.2766], + device='cuda:2'), covar=tensor([0.0380, 0.0697, 0.1813, 0.0705, 0.3460, 0.0530, 0.0736, 0.1124], + device='cuda:2'), in_proj_covar=tensor([0.0521, 0.0423, 0.0511, 0.0357, 0.0407, 0.0450, 0.0443, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:30:34,188 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:30:39,847 INFO [train.py:903] (2/4) Epoch 26, batch 1900, loss[loss=0.2446, simple_loss=0.3268, pruned_loss=0.08118, over 19599.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2873, pruned_loss=0.06208, over 3828737.32 frames. ], batch size: 61, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:30:59,244 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 06:31:01,796 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:05,070 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 06:31:23,028 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9105, 5.0046, 5.7100, 5.7524, 1.8101, 5.3727, 4.5532, 5.3486], + device='cuda:2'), covar=tensor([0.1959, 0.0997, 0.0626, 0.0698, 0.7046, 0.1006, 0.0697, 0.1456], + device='cuda:2'), in_proj_covar=tensor([0.0806, 0.0774, 0.0978, 0.0859, 0.0852, 0.0745, 0.0584, 0.0906], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 06:31:30,015 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 06:31:36,117 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:44,206 INFO [train.py:903] (2/4) Epoch 26, batch 1950, loss[loss=0.1674, simple_loss=0.2566, pruned_loss=0.03906, over 19851.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2877, pruned_loss=0.06247, over 3807880.47 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:31:57,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.105e+02 6.247e+02 7.487e+02 1.872e+03, threshold=1.249e+03, percent-clipped=7.0 +2023-04-03 06:32:12,282 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172670.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:43,437 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172695.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:48,412 INFO [train.py:903] (2/4) Epoch 26, batch 2000, loss[loss=0.2473, simple_loss=0.3208, pruned_loss=0.08688, over 19663.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2874, pruned_loss=0.0625, over 3806621.08 frames. ], batch size: 58, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:14,448 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 06:33:39,975 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.2246, 5.2907, 6.0691, 6.0792, 2.1441, 5.6706, 4.7883, 5.7424], + device='cuda:2'), covar=tensor([0.1787, 0.0748, 0.0601, 0.0617, 0.6286, 0.0829, 0.0634, 0.1187], + device='cuda:2'), in_proj_covar=tensor([0.0803, 0.0774, 0.0978, 0.0859, 0.0852, 0.0744, 0.0584, 0.0905], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 06:33:46,785 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 06:33:51,652 INFO [train.py:903] (2/4) Epoch 26, batch 2050, loss[loss=0.1956, simple_loss=0.2843, pruned_loss=0.05346, over 19694.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2867, pruned_loss=0.06261, over 3798710.85 frames. ], batch size: 59, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:55,260 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172752.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:34:03,946 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.217e+02 6.186e+02 8.512e+02 2.102e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 06:34:06,463 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 06:34:07,806 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 06:34:27,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 06:34:54,268 INFO [train.py:903] (2/4) Epoch 26, batch 2100, loss[loss=0.2123, simple_loss=0.2859, pruned_loss=0.06928, over 19628.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2866, pruned_loss=0.06274, over 3807454.32 frames. ], batch size: 50, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:25,172 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 06:35:47,316 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 06:35:56,681 INFO [train.py:903] (2/4) Epoch 26, batch 2150, loss[loss=0.2128, simple_loss=0.2931, pruned_loss=0.06625, over 19571.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2863, pruned_loss=0.06278, over 3803036.49 frames. ], batch size: 61, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:58,259 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172851.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:02,870 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9312, 1.5470, 1.8352, 1.6841, 4.4484, 1.1918, 2.7490, 4.8325], + device='cuda:2'), covar=tensor([0.0541, 0.2909, 0.2856, 0.2138, 0.0786, 0.2665, 0.1388, 0.0195], + device='cuda:2'), in_proj_covar=tensor([0.0417, 0.0371, 0.0392, 0.0351, 0.0378, 0.0352, 0.0389, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:36:10,067 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.604e+02 4.749e+02 6.121e+02 8.086e+02 1.727e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 06:36:14,795 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:21,394 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 06:36:31,107 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:59,981 INFO [train.py:903] (2/4) Epoch 26, batch 2200, loss[loss=0.191, simple_loss=0.2807, pruned_loss=0.05067, over 19673.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2868, pruned_loss=0.06276, over 3797694.00 frames. ], batch size: 53, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:38:04,012 INFO [train.py:903] (2/4) Epoch 26, batch 2250, loss[loss=0.1615, simple_loss=0.2447, pruned_loss=0.03915, over 19782.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.286, pruned_loss=0.06227, over 3793417.51 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:38:16,963 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172960.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:38:17,987 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.806e+02 5.924e+02 7.729e+02 1.368e+03, threshold=1.185e+03, percent-clipped=2.0 +2023-04-03 06:38:51,880 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:39:07,874 INFO [train.py:903] (2/4) Epoch 26, batch 2300, loss[loss=0.2086, simple_loss=0.2954, pruned_loss=0.06094, over 19778.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06198, over 3798695.41 frames. ], batch size: 56, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:39:13,394 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:39:19,764 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 06:40:11,061 INFO [train.py:903] (2/4) Epoch 26, batch 2350, loss[loss=0.1792, simple_loss=0.2531, pruned_loss=0.05264, over 19728.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2842, pruned_loss=0.06153, over 3798890.78 frames. ], batch size: 45, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:40:25,950 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.175e+02 6.518e+02 7.971e+02 2.695e+03, threshold=1.304e+03, percent-clipped=8.0 +2023-04-03 06:40:43,736 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173075.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:40:54,419 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 06:41:08,284 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:10,600 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 06:41:14,041 INFO [train.py:903] (2/4) Epoch 26, batch 2400, loss[loss=0.237, simple_loss=0.3137, pruned_loss=0.08011, over 17425.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06129, over 3804495.64 frames. ], batch size: 101, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:41:18,192 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:48,403 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0402, 1.3218, 1.6864, 1.1315, 2.4597, 3.5372, 3.2269, 3.7720], + device='cuda:2'), covar=tensor([0.1807, 0.3991, 0.3602, 0.2786, 0.0725, 0.0191, 0.0240, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0330, 0.0362, 0.0271, 0.0252, 0.0193, 0.0220, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 06:42:19,069 INFO [train.py:903] (2/4) Epoch 26, batch 2450, loss[loss=0.2333, simple_loss=0.319, pruned_loss=0.07384, over 18181.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2846, pruned_loss=0.06128, over 3814347.96 frames. ], batch size: 83, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:42:32,912 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.972e+02 5.973e+02 7.732e+02 1.743e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 06:42:41,738 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173168.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:22,670 INFO [train.py:903] (2/4) Epoch 26, batch 2500, loss[loss=0.1622, simple_loss=0.2387, pruned_loss=0.04283, over 19777.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2844, pruned_loss=0.0611, over 3827763.84 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:43:24,434 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2899, 2.3587, 2.5837, 2.9253, 2.3250, 2.7959, 2.5973, 2.3647], + device='cuda:2'), covar=tensor([0.4147, 0.4080, 0.1820, 0.2660, 0.4405, 0.2286, 0.4629, 0.3252], + device='cuda:2'), in_proj_covar=tensor([0.0927, 0.1004, 0.0736, 0.0948, 0.0906, 0.0842, 0.0859, 0.0804], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 06:43:31,272 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:36,119 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173211.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:44:25,915 INFO [train.py:903] (2/4) Epoch 26, batch 2550, loss[loss=0.2095, simple_loss=0.2964, pruned_loss=0.06129, over 18230.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06119, over 3817422.94 frames. ], batch size: 83, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:44:40,258 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6130, 2.3232, 1.7055, 1.6072, 2.1586, 1.4326, 1.5932, 1.9530], + device='cuda:2'), covar=tensor([0.1064, 0.0776, 0.1094, 0.0831, 0.0589, 0.1235, 0.0730, 0.0595], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0317, 0.0337, 0.0269, 0.0248, 0.0341, 0.0291, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:44:40,981 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.733e+02 4.982e+02 5.984e+02 8.594e+02 2.255e+03, threshold=1.197e+03, percent-clipped=6.0 +2023-04-03 06:45:23,074 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 06:45:30,051 INFO [train.py:903] (2/4) Epoch 26, batch 2600, loss[loss=0.2141, simple_loss=0.2784, pruned_loss=0.07485, over 19400.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06135, over 3818877.65 frames. ], batch size: 48, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:45:59,689 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173322.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,382 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,478 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2766, 1.2205, 1.2611, 1.3201, 1.0415, 1.3231, 1.3595, 1.2747], + device='cuda:2'), covar=tensor([0.0952, 0.1057, 0.1086, 0.0708, 0.0872, 0.0894, 0.0855, 0.0848], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0226, 0.0228, 0.0242, 0.0228, 0.0214, 0.0190, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 06:46:10,010 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:35,576 INFO [train.py:903] (2/4) Epoch 26, batch 2650, loss[loss=0.1949, simple_loss=0.2836, pruned_loss=0.05308, over 19746.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.285, pruned_loss=0.06178, over 3823065.11 frames. ], batch size: 63, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:46:41,873 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5740, 2.2514, 1.7393, 1.4971, 2.0819, 1.3762, 1.5431, 1.9844], + device='cuda:2'), covar=tensor([0.1077, 0.0901, 0.1115, 0.0851, 0.0617, 0.1332, 0.0714, 0.0536], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0316, 0.0336, 0.0269, 0.0248, 0.0340, 0.0290, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:46:43,090 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173356.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:45,497 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:49,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.033e+02 5.962e+02 7.363e+02 1.395e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 06:46:55,556 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 06:47:17,768 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:47:39,162 INFO [train.py:903] (2/4) Epoch 26, batch 2700, loss[loss=0.2343, simple_loss=0.3121, pruned_loss=0.07819, over 19128.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2853, pruned_loss=0.06189, over 3817570.53 frames. ], batch size: 69, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:00,257 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8382, 3.3067, 3.3409, 3.3569, 1.3825, 3.2151, 2.8186, 3.1200], + device='cuda:2'), covar=tensor([0.1811, 0.1064, 0.0879, 0.0995, 0.5709, 0.1064, 0.0845, 0.1376], + device='cuda:2'), in_proj_covar=tensor([0.0806, 0.0773, 0.0978, 0.0861, 0.0856, 0.0742, 0.0584, 0.0909], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 06:48:32,602 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3342, 2.3283, 2.5815, 2.9073, 2.2986, 2.7904, 2.5151, 2.3859], + device='cuda:2'), covar=tensor([0.4404, 0.4287, 0.1964, 0.2825, 0.4316, 0.2301, 0.5013, 0.3295], + device='cuda:2'), in_proj_covar=tensor([0.0928, 0.1004, 0.0736, 0.0949, 0.0908, 0.0841, 0.0859, 0.0805], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 06:48:39,710 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-03 06:48:41,211 INFO [train.py:903] (2/4) Epoch 26, batch 2750, loss[loss=0.2182, simple_loss=0.2998, pruned_loss=0.06828, over 18814.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2841, pruned_loss=0.06151, over 3825660.53 frames. ], batch size: 74, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:54,808 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.847e+02 4.981e+02 6.163e+02 7.639e+02 1.916e+03, threshold=1.233e+03, percent-clipped=5.0 +2023-04-03 06:49:03,488 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:34,144 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:44,198 INFO [train.py:903] (2/4) Epoch 26, batch 2800, loss[loss=0.1699, simple_loss=0.2551, pruned_loss=0.04237, over 19851.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2836, pruned_loss=0.06134, over 3828876.36 frames. ], batch size: 52, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:49:52,993 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4871, 1.6031, 2.0144, 1.8375, 3.2424, 2.6602, 3.4194, 1.6481], + device='cuda:2'), covar=tensor([0.2679, 0.4664, 0.2902, 0.2020, 0.1562, 0.2134, 0.1580, 0.4456], + device='cuda:2'), in_proj_covar=tensor([0.0545, 0.0661, 0.0739, 0.0500, 0.0629, 0.0539, 0.0666, 0.0566], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 06:49:56,461 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173509.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:49:56,869 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-03 06:50:00,284 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:50:48,001 INFO [train.py:903] (2/4) Epoch 26, batch 2850, loss[loss=0.1913, simple_loss=0.2683, pruned_loss=0.05708, over 19749.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2838, pruned_loss=0.06138, over 3823766.63 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:01,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.954e+02 6.202e+02 8.183e+02 1.932e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 06:51:23,163 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:51:51,093 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 06:51:52,160 INFO [train.py:903] (2/4) Epoch 26, batch 2900, loss[loss=0.1957, simple_loss=0.2839, pruned_loss=0.05373, over 19546.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06145, over 3822723.87 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:57,220 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173603.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:08,006 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2433, 2.2495, 2.5588, 2.8891, 2.2601, 2.7841, 2.5830, 2.3614], + device='cuda:2'), covar=tensor([0.4315, 0.4074, 0.1892, 0.2701, 0.4472, 0.2248, 0.4610, 0.3224], + device='cuda:2'), in_proj_covar=tensor([0.0928, 0.1003, 0.0736, 0.0946, 0.0907, 0.0840, 0.0859, 0.0805], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 06:52:27,677 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:34,774 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:56,771 INFO [train.py:903] (2/4) Epoch 26, batch 2950, loss[loss=0.1943, simple_loss=0.2633, pruned_loss=0.06269, over 19738.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2847, pruned_loss=0.06138, over 3827355.77 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:10,746 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.287e+02 6.719e+02 8.706e+02 2.181e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-03 06:53:23,946 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173671.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:53:59,609 INFO [train.py:903] (2/4) Epoch 26, batch 3000, loss[loss=0.2692, simple_loss=0.3338, pruned_loss=0.1023, over 13086.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06166, over 3813445.78 frames. ], batch size: 135, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:59,610 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 06:54:12,275 INFO [train.py:937] (2/4) Epoch 26, validation: loss=0.1681, simple_loss=0.2675, pruned_loss=0.03435, over 944034.00 frames. +2023-04-03 06:54:12,276 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 06:54:17,254 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 06:55:16,507 INFO [train.py:903] (2/4) Epoch 26, batch 3050, loss[loss=0.1873, simple_loss=0.2806, pruned_loss=0.04705, over 19529.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2849, pruned_loss=0.06189, over 3817324.25 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:55:30,856 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 4.825e+02 5.950e+02 7.456e+02 1.374e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 06:56:02,600 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173786.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:56:20,053 INFO [train.py:903] (2/4) Epoch 26, batch 3100, loss[loss=0.1978, simple_loss=0.2778, pruned_loss=0.05888, over 19665.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.0612, over 3831862.39 frames. ], batch size: 53, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:56:32,206 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-03 06:56:49,579 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.7951, 1.2077, 1.5711, 0.5770, 1.9691, 2.4052, 2.1662, 2.6141], + device='cuda:2'), covar=tensor([0.1772, 0.4059, 0.3474, 0.2916, 0.0689, 0.0285, 0.0341, 0.0423], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0328, 0.0361, 0.0269, 0.0252, 0.0193, 0.0218, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 06:57:23,190 INFO [train.py:903] (2/4) Epoch 26, batch 3150, loss[loss=0.1723, simple_loss=0.2651, pruned_loss=0.03976, over 19467.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2852, pruned_loss=0.06184, over 3819388.77 frames. ], batch size: 64, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:57:23,600 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0503, 1.2959, 1.7177, 1.0482, 2.4322, 3.3469, 3.0557, 3.5744], + device='cuda:2'), covar=tensor([0.1717, 0.3935, 0.3381, 0.2683, 0.0656, 0.0203, 0.0221, 0.0294], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0328, 0.0361, 0.0269, 0.0252, 0.0193, 0.0218, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 06:57:26,865 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173853.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:57:37,140 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.081e+02 6.233e+02 7.406e+02 2.417e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 06:57:49,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 06:58:00,855 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 06:58:02,693 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8593, 4.4718, 2.8147, 3.8814, 0.9674, 4.4001, 4.2697, 4.3689], + device='cuda:2'), covar=tensor([0.0572, 0.0834, 0.1908, 0.0797, 0.4028, 0.0656, 0.0919, 0.1105], + device='cuda:2'), in_proj_covar=tensor([0.0525, 0.0426, 0.0513, 0.0359, 0.0411, 0.0451, 0.0448, 0.0476], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:58:05,096 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173883.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:58:25,322 INFO [train.py:903] (2/4) Epoch 26, batch 3200, loss[loss=0.222, simple_loss=0.2974, pruned_loss=0.07327, over 19133.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2859, pruned_loss=0.06173, over 3831463.53 frames. ], batch size: 69, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:58:34,842 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1732, 2.0169, 1.7834, 2.0903, 1.8878, 1.7951, 1.6581, 2.0468], + device='cuda:2'), covar=tensor([0.0981, 0.1444, 0.1581, 0.1028, 0.1497, 0.0643, 0.1567, 0.0805], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0359, 0.0316, 0.0256, 0.0307, 0.0256, 0.0319, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:58:35,903 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173908.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:59:25,966 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6543, 1.2517, 1.5205, 1.6864, 3.1987, 1.2558, 2.3322, 3.6805], + device='cuda:2'), covar=tensor([0.0507, 0.3081, 0.2976, 0.1824, 0.0698, 0.2458, 0.1360, 0.0235], + device='cuda:2'), in_proj_covar=tensor([0.0418, 0.0373, 0.0392, 0.0351, 0.0377, 0.0354, 0.0391, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 06:59:27,978 INFO [train.py:903] (2/4) Epoch 26, batch 3250, loss[loss=0.1888, simple_loss=0.2673, pruned_loss=0.05516, over 19841.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2854, pruned_loss=0.06152, over 3839634.97 frames. ], batch size: 52, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:59:42,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.716e+02 4.798e+02 6.185e+02 8.155e+02 2.789e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-03 06:59:52,430 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173968.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 07:00:01,800 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:00:31,306 INFO [train.py:903] (2/4) Epoch 26, batch 3300, loss[loss=0.2445, simple_loss=0.3162, pruned_loss=0.08643, over 17648.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2845, pruned_loss=0.06089, over 3832668.68 frames. ], batch size: 101, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 07:00:35,707 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 07:00:38,079 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9850, 1.5310, 1.5705, 1.8245, 1.4671, 1.6523, 1.4728, 1.8186], + device='cuda:2'), covar=tensor([0.1032, 0.1434, 0.1567, 0.1059, 0.1438, 0.0609, 0.1607, 0.0777], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0358, 0.0316, 0.0256, 0.0306, 0.0256, 0.0319, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:01:14,208 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174033.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:17,527 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:26,005 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:35,842 INFO [train.py:903] (2/4) Epoch 26, batch 3350, loss[loss=0.185, simple_loss=0.2663, pruned_loss=0.05185, over 19678.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.06046, over 3826349.01 frames. ], batch size: 53, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:01:49,561 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.999e+02 5.843e+02 7.881e+02 1.777e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 07:01:56,819 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:28,210 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:39,919 INFO [train.py:903] (2/4) Epoch 26, batch 3400, loss[loss=0.2403, simple_loss=0.3103, pruned_loss=0.08514, over 19612.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2856, pruned_loss=0.06146, over 3810774.60 frames. ], batch size: 61, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:03:15,050 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 07:03:41,130 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 07:03:42,278 INFO [train.py:903] (2/4) Epoch 26, batch 3450, loss[loss=0.191, simple_loss=0.2802, pruned_loss=0.05093, over 19700.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2857, pruned_loss=0.06164, over 3810258.10 frames. ], batch size: 59, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:03:53,276 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:03:57,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.666e+02 5.900e+02 7.449e+02 1.550e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-03 07:04:02,648 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9920, 1.5389, 1.9446, 1.6044, 4.5051, 1.0928, 2.5930, 4.9756], + device='cuda:2'), covar=tensor([0.0488, 0.3073, 0.2819, 0.2180, 0.0820, 0.2887, 0.1522, 0.0161], + device='cuda:2'), in_proj_covar=tensor([0.0422, 0.0376, 0.0395, 0.0353, 0.0382, 0.0357, 0.0394, 0.0414], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:04:47,408 INFO [train.py:903] (2/4) Epoch 26, batch 3500, loss[loss=0.192, simple_loss=0.2778, pruned_loss=0.05305, over 17597.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2853, pruned_loss=0.06146, over 3813345.50 frames. ], batch size: 101, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:05:18,075 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174224.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 07:05:49,655 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174249.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 07:05:50,402 INFO [train.py:903] (2/4) Epoch 26, batch 3550, loss[loss=0.2133, simple_loss=0.2961, pruned_loss=0.06525, over 18714.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2856, pruned_loss=0.06176, over 3819967.68 frames. ], batch size: 74, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:03,183 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.676e+02 6.063e+02 7.972e+02 1.969e+03, threshold=1.213e+03, percent-clipped=7.0 +2023-04-03 07:06:12,990 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5315, 1.7269, 1.9875, 1.8302, 3.0837, 2.6574, 3.3614, 1.6420], + device='cuda:2'), covar=tensor([0.2557, 0.4163, 0.2728, 0.1878, 0.1551, 0.2042, 0.1636, 0.4311], + device='cuda:2'), in_proj_covar=tensor([0.0545, 0.0664, 0.0740, 0.0499, 0.0628, 0.0539, 0.0664, 0.0567], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 07:06:17,604 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0015, 1.2365, 1.7423, 0.9393, 2.2661, 3.0526, 2.7051, 3.2368], + device='cuda:2'), covar=tensor([0.1766, 0.4044, 0.3321, 0.2958, 0.0701, 0.0233, 0.0264, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0327, 0.0361, 0.0269, 0.0250, 0.0193, 0.0218, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 07:06:53,360 INFO [train.py:903] (2/4) Epoch 26, batch 3600, loss[loss=0.1754, simple_loss=0.2504, pruned_loss=0.05018, over 19796.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.06181, over 3834751.30 frames. ], batch size: 49, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:54,913 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:05,328 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:54,119 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:57,124 INFO [train.py:903] (2/4) Epoch 26, batch 3650, loss[loss=0.2688, simple_loss=0.3336, pruned_loss=0.102, over 13398.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2864, pruned_loss=0.06207, over 3833132.67 frames. ], batch size: 136, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:08:12,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.064e+02 6.896e+02 8.623e+02 2.807e+03, threshold=1.379e+03, percent-clipped=9.0 +2023-04-03 07:08:26,929 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:28,386 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 07:08:32,713 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:35,178 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:09:02,324 INFO [train.py:903] (2/4) Epoch 26, batch 3700, loss[loss=0.211, simple_loss=0.2959, pruned_loss=0.06302, over 19755.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2859, pruned_loss=0.06171, over 3822980.41 frames. ], batch size: 63, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:09:10,647 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:09:31,639 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1965, 2.9459, 2.1605, 2.6575, 1.0031, 2.9200, 2.8183, 2.8488], + device='cuda:2'), covar=tensor([0.1240, 0.1328, 0.2011, 0.1063, 0.3282, 0.0986, 0.1205, 0.1432], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0427, 0.0513, 0.0358, 0.0413, 0.0451, 0.0450, 0.0477], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:09:44,325 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8421, 4.3885, 2.8717, 3.8734, 1.0359, 4.3866, 4.2513, 4.3644], + device='cuda:2'), covar=tensor([0.0568, 0.1019, 0.1874, 0.0908, 0.4039, 0.0667, 0.0946, 0.1002], + device='cuda:2'), in_proj_covar=tensor([0.0526, 0.0427, 0.0513, 0.0358, 0.0412, 0.0450, 0.0449, 0.0477], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:10:06,913 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6860, 1.6470, 1.5642, 2.1567, 1.6023, 1.9987, 2.0130, 1.7823], + device='cuda:2'), covar=tensor([0.0793, 0.0840, 0.0954, 0.0739, 0.0852, 0.0719, 0.0805, 0.0648], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0240, 0.0224, 0.0212, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 07:10:07,622 INFO [train.py:903] (2/4) Epoch 26, batch 3750, loss[loss=0.213, simple_loss=0.2988, pruned_loss=0.06357, over 19618.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2861, pruned_loss=0.06166, over 3814182.33 frames. ], batch size: 57, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:10:20,549 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.722e+02 4.995e+02 5.757e+02 7.069e+02 1.518e+03, threshold=1.151e+03, percent-clipped=1.0 +2023-04-03 07:11:01,670 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:04,055 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:10,961 INFO [train.py:903] (2/4) Epoch 26, batch 3800, loss[loss=0.184, simple_loss=0.2564, pruned_loss=0.05581, over 19718.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06151, over 3825001.57 frames. ], batch size: 45, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:11:12,304 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:42,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 07:11:55,976 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 07:12:14,059 INFO [train.py:903] (2/4) Epoch 26, batch 3850, loss[loss=0.2748, simple_loss=0.3466, pruned_loss=0.1015, over 19306.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2855, pruned_loss=0.0622, over 3799336.26 frames. ], batch size: 66, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:12:27,531 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 5.118e+02 6.382e+02 8.415e+02 1.605e+03, threshold=1.276e+03, percent-clipped=5.0 +2023-04-03 07:13:15,057 INFO [train.py:903] (2/4) Epoch 26, batch 3900, loss[loss=0.204, simple_loss=0.2846, pruned_loss=0.06176, over 18827.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2858, pruned_loss=0.06243, over 3804972.96 frames. ], batch size: 74, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:13:36,337 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:13,853 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:20,750 INFO [train.py:903] (2/4) Epoch 26, batch 3950, loss[loss=0.1584, simple_loss=0.2464, pruned_loss=0.03518, over 19604.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2858, pruned_loss=0.06227, over 3808529.77 frames. ], batch size: 50, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:14:24,392 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 07:14:24,515 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:34,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.927e+02 6.085e+02 7.650e+02 1.385e+03, threshold=1.217e+03, percent-clipped=3.0 +2023-04-03 07:14:54,719 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-03 07:15:24,369 INFO [train.py:903] (2/4) Epoch 26, batch 4000, loss[loss=0.1817, simple_loss=0.2726, pruned_loss=0.0454, over 19675.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2853, pruned_loss=0.062, over 3815547.96 frames. ], batch size: 58, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:15:54,773 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:13,006 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 07:16:15,460 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9103, 1.3802, 1.5298, 1.5916, 3.5252, 1.3463, 2.5341, 3.8936], + device='cuda:2'), covar=tensor([0.0434, 0.2923, 0.2977, 0.1994, 0.0592, 0.2426, 0.1323, 0.0219], + device='cuda:2'), in_proj_covar=tensor([0.0418, 0.0372, 0.0390, 0.0349, 0.0377, 0.0353, 0.0389, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:16:23,716 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6873, 1.4645, 1.5135, 2.4128, 1.8016, 1.9757, 2.2550, 1.6443], + device='cuda:2'), covar=tensor([0.0877, 0.1044, 0.1084, 0.0655, 0.0843, 0.0767, 0.0744, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0223, 0.0227, 0.0240, 0.0224, 0.0212, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 07:16:24,909 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:26,988 INFO [train.py:903] (2/4) Epoch 26, batch 4050, loss[loss=0.183, simple_loss=0.2737, pruned_loss=0.04616, over 19072.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2861, pruned_loss=0.06218, over 3817473.00 frames. ], batch size: 69, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:16:27,161 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:27,470 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:40,265 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:41,100 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 4.947e+02 6.251e+02 7.600e+02 1.203e+03, threshold=1.250e+03, percent-clipped=0.0 +2023-04-03 07:16:52,120 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:59,314 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174773.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:01,691 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174775.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:32,066 INFO [train.py:903] (2/4) Epoch 26, batch 4100, loss[loss=0.2406, simple_loss=0.3177, pruned_loss=0.08174, over 13837.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2843, pruned_loss=0.06141, over 3797101.91 frames. ], batch size: 136, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:17:50,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-03 07:18:07,598 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 07:18:38,181 INFO [train.py:903] (2/4) Epoch 26, batch 4150, loss[loss=0.213, simple_loss=0.3006, pruned_loss=0.06272, over 19687.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06114, over 3805424.41 frames. ], batch size: 60, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:18:53,316 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.046e+02 6.484e+02 8.542e+02 2.236e+03, threshold=1.297e+03, percent-clipped=8.0 +2023-04-03 07:18:57,239 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:05,602 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:26,294 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 07:19:38,554 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:41,596 INFO [train.py:903] (2/4) Epoch 26, batch 4200, loss[loss=0.2172, simple_loss=0.2983, pruned_loss=0.06807, over 19267.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2851, pruned_loss=0.06159, over 3800289.09 frames. ], batch size: 66, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:19:42,874 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 07:19:46,914 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0745, 1.2888, 1.6121, 1.1828, 2.3058, 3.0030, 2.7619, 3.2363], + device='cuda:2'), covar=tensor([0.1512, 0.3350, 0.3015, 0.2285, 0.0611, 0.0227, 0.0247, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0329, 0.0362, 0.0271, 0.0252, 0.0194, 0.0219, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 07:20:44,730 INFO [train.py:903] (2/4) Epoch 26, batch 4250, loss[loss=0.2165, simple_loss=0.3075, pruned_loss=0.06275, over 17684.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2853, pruned_loss=0.06165, over 3800875.58 frames. ], batch size: 101, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:20:55,193 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 07:21:01,860 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.804e+02 5.687e+02 7.207e+02 1.586e+03, threshold=1.137e+03, percent-clipped=5.0 +2023-04-03 07:21:07,842 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 07:21:10,202 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6117, 1.4545, 1.5121, 2.2013, 1.5593, 1.9200, 2.0329, 1.6108], + device='cuda:2'), covar=tensor([0.0849, 0.0988, 0.1061, 0.0715, 0.0895, 0.0749, 0.0813, 0.0753], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 07:21:49,468 INFO [train.py:903] (2/4) Epoch 26, batch 4300, loss[loss=0.185, simple_loss=0.2707, pruned_loss=0.04962, over 19581.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2848, pruned_loss=0.06153, over 3794574.37 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:22:11,533 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:19,623 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0710, 5.5312, 3.1137, 4.8036, 1.0046, 5.7096, 5.4851, 5.6445], + device='cuda:2'), covar=tensor([0.0378, 0.0831, 0.1790, 0.0691, 0.4165, 0.0499, 0.0737, 0.0898], + device='cuda:2'), in_proj_covar=tensor([0.0523, 0.0423, 0.0508, 0.0355, 0.0409, 0.0448, 0.0445, 0.0472], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:22:20,979 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:38,851 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 07:22:41,538 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:52,828 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:53,610 INFO [train.py:903] (2/4) Epoch 26, batch 4350, loss[loss=0.1689, simple_loss=0.2601, pruned_loss=0.03886, over 19687.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2839, pruned_loss=0.06116, over 3800042.84 frames. ], batch size: 59, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:23:08,535 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 4.980e+02 6.586e+02 7.974e+02 2.340e+03, threshold=1.317e+03, percent-clipped=8.0 +2023-04-03 07:23:14,388 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:23:56,998 INFO [train.py:903] (2/4) Epoch 26, batch 4400, loss[loss=0.1751, simple_loss=0.2618, pruned_loss=0.04422, over 19682.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.06132, over 3816276.17 frames. ], batch size: 53, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:24:17,226 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 07:24:23,158 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:26,299 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 07:24:55,261 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:59,527 INFO [train.py:903] (2/4) Epoch 26, batch 4450, loss[loss=0.1891, simple_loss=0.2743, pruned_loss=0.0519, over 19681.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06125, over 3831460.07 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:25:08,163 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:25:14,971 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 4.830e+02 6.413e+02 8.619e+02 2.132e+03, threshold=1.283e+03, percent-clipped=8.0 +2023-04-03 07:25:41,753 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175182.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:26:02,757 INFO [train.py:903] (2/4) Epoch 26, batch 4500, loss[loss=0.1658, simple_loss=0.2512, pruned_loss=0.04015, over 19616.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06102, over 3821279.94 frames. ], batch size: 50, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:26:35,234 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 07:26:35,992 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:27:08,200 INFO [train.py:903] (2/4) Epoch 26, batch 4550, loss[loss=0.2214, simple_loss=0.3095, pruned_loss=0.06669, over 19677.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.06099, over 3823446.03 frames. ], batch size: 53, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:27:15,040 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 07:27:23,518 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.834e+02 6.204e+02 7.660e+02 2.009e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-03 07:27:40,225 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 07:28:13,303 INFO [train.py:903] (2/4) Epoch 26, batch 4600, loss[loss=0.1619, simple_loss=0.241, pruned_loss=0.0414, over 19754.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.283, pruned_loss=0.06075, over 3822962.08 frames. ], batch size: 51, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:16,040 INFO [train.py:903] (2/4) Epoch 26, batch 4650, loss[loss=0.1845, simple_loss=0.2686, pruned_loss=0.05022, over 19386.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06075, over 3816577.40 frames. ], batch size: 48, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:29,898 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 4.879e+02 5.869e+02 7.462e+02 1.692e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 07:29:31,126 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 07:29:44,521 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 07:30:03,973 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:17,580 INFO [train.py:903] (2/4) Epoch 26, batch 4700, loss[loss=0.2417, simple_loss=0.3281, pruned_loss=0.07759, over 19544.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06141, over 3827063.31 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:30:19,629 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9187, 2.0171, 2.3009, 2.5368, 1.9075, 2.4555, 2.3148, 2.1116], + device='cuda:2'), covar=tensor([0.4302, 0.4092, 0.2042, 0.2598, 0.4345, 0.2339, 0.4866, 0.3512], + device='cuda:2'), in_proj_covar=tensor([0.0928, 0.1004, 0.0737, 0.0951, 0.0908, 0.0843, 0.0858, 0.0803], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 07:30:33,531 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5691, 4.0894, 4.2853, 4.2692, 1.7708, 4.0403, 3.5065, 4.0372], + device='cuda:2'), covar=tensor([0.1763, 0.0902, 0.0660, 0.0783, 0.5853, 0.0910, 0.0752, 0.1196], + device='cuda:2'), in_proj_covar=tensor([0.0812, 0.0775, 0.0977, 0.0862, 0.0852, 0.0744, 0.0582, 0.0907], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 07:30:35,712 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:41,020 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 07:31:06,983 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:31:21,512 INFO [train.py:903] (2/4) Epoch 26, batch 4750, loss[loss=0.212, simple_loss=0.2991, pruned_loss=0.06239, over 19476.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2843, pruned_loss=0.06165, over 3818907.48 frames. ], batch size: 64, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:31:37,182 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.945e+02 6.030e+02 7.655e+02 2.128e+03, threshold=1.206e+03, percent-clipped=6.0 +2023-04-03 07:31:38,742 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175463.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:22,915 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:23,806 INFO [train.py:903] (2/4) Epoch 26, batch 4800, loss[loss=0.17, simple_loss=0.2588, pruned_loss=0.0406, over 19463.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2829, pruned_loss=0.06064, over 3824964.68 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:32:25,160 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:33:28,520 INFO [train.py:903] (2/4) Epoch 26, batch 4850, loss[loss=0.2695, simple_loss=0.3382, pruned_loss=0.1004, over 13202.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2838, pruned_loss=0.06113, over 3807181.78 frames. ], batch size: 135, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:33:42,549 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.553e+02 5.462e+02 6.461e+02 1.537e+03, threshold=1.092e+03, percent-clipped=2.0 +2023-04-03 07:33:49,724 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 07:33:51,023 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:34:12,013 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 07:34:17,972 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 07:34:17,993 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 07:34:27,155 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 07:34:30,608 INFO [train.py:903] (2/4) Epoch 26, batch 4900, loss[loss=0.2426, simple_loss=0.3236, pruned_loss=0.08077, over 19675.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2847, pruned_loss=0.0618, over 3795822.72 frames. ], batch size: 58, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:34:46,816 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 07:34:50,753 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:35:31,950 INFO [train.py:903] (2/4) Epoch 26, batch 4950, loss[loss=0.2113, simple_loss=0.2904, pruned_loss=0.06606, over 19765.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06232, over 3806871.79 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:35:49,939 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.878e+02 6.108e+02 7.489e+02 1.803e+03, threshold=1.222e+03, percent-clipped=10.0 +2023-04-03 07:35:49,970 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 07:36:13,138 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 07:36:15,578 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175684.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:36:36,326 INFO [train.py:903] (2/4) Epoch 26, batch 5000, loss[loss=0.2111, simple_loss=0.2925, pruned_loss=0.06483, over 19324.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.285, pruned_loss=0.06196, over 3808560.79 frames. ], batch size: 66, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:36:46,077 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 07:36:56,364 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 07:37:10,902 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175728.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:16,381 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175732.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:39,577 INFO [train.py:903] (2/4) Epoch 26, batch 5050, loss[loss=0.191, simple_loss=0.2721, pruned_loss=0.05494, over 19590.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06134, over 3818487.29 frames. ], batch size: 52, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:37:41,475 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.62 vs. limit=5.0 +2023-04-03 07:37:46,847 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:49,654 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.71 vs. limit=5.0 +2023-04-03 07:37:53,679 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.916e+02 5.717e+02 6.994e+02 1.273e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-04-03 07:38:14,292 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 07:38:39,105 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3916, 3.1315, 2.2668, 2.8304, 0.8541, 3.0947, 2.9817, 3.0207], + device='cuda:2'), covar=tensor([0.0947, 0.1246, 0.1947, 0.0991, 0.3602, 0.0926, 0.1139, 0.1376], + device='cuda:2'), in_proj_covar=tensor([0.0523, 0.0423, 0.0507, 0.0358, 0.0409, 0.0449, 0.0445, 0.0473], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:38:42,427 INFO [train.py:903] (2/4) Epoch 26, batch 5100, loss[loss=0.2039, simple_loss=0.2877, pruned_loss=0.06008, over 19694.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2839, pruned_loss=0.06099, over 3820972.83 frames. ], batch size: 53, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:38:49,483 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 07:38:49,808 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6435, 2.3666, 1.7609, 1.6072, 2.1873, 1.4130, 1.4754, 2.0441], + device='cuda:2'), covar=tensor([0.1050, 0.0816, 0.1072, 0.0842, 0.0538, 0.1300, 0.0767, 0.0484], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0319, 0.0337, 0.0271, 0.0250, 0.0344, 0.0291, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:38:52,888 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 07:38:57,443 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 07:39:30,283 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9984, 1.9195, 1.8752, 1.7021, 1.5010, 1.6336, 0.4502, 0.8951], + device='cuda:2'), covar=tensor([0.0639, 0.0651, 0.0432, 0.0696, 0.1288, 0.0831, 0.1334, 0.1126], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0361, 0.0366, 0.0389, 0.0469, 0.0396, 0.0344, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 07:39:37,133 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:42,002 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175847.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:45,150 INFO [train.py:903] (2/4) Epoch 26, batch 5150, loss[loss=0.2258, simple_loss=0.3044, pruned_loss=0.07358, over 19782.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06045, over 3825618.69 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:39:55,538 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 07:40:01,374 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.003e+02 6.332e+02 8.398e+02 1.509e+03, threshold=1.266e+03, percent-clipped=8.0 +2023-04-03 07:40:14,035 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:15,284 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:30,113 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 07:40:47,095 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:49,328 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:50,212 INFO [train.py:903] (2/4) Epoch 26, batch 5200, loss[loss=0.2057, simple_loss=0.2992, pruned_loss=0.05609, over 19583.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06054, over 3827661.41 frames. ], batch size: 61, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:40:54,075 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1213, 1.2854, 1.4856, 1.5598, 2.7662, 1.2328, 2.1468, 3.1203], + device='cuda:2'), covar=tensor([0.0527, 0.2692, 0.2824, 0.1643, 0.0689, 0.2195, 0.1229, 0.0300], + device='cuda:2'), in_proj_covar=tensor([0.0417, 0.0371, 0.0389, 0.0349, 0.0376, 0.0353, 0.0388, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:41:02,243 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 07:41:41,486 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:41:45,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 07:41:53,689 INFO [train.py:903] (2/4) Epoch 26, batch 5250, loss[loss=0.19, simple_loss=0.2781, pruned_loss=0.05092, over 19667.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06128, over 3817511.19 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:42:03,330 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:07,498 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.675e+02 5.849e+02 7.641e+02 1.436e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 07:42:08,966 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:11,360 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175965.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:25,588 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-03 07:42:54,939 INFO [train.py:903] (2/4) Epoch 26, batch 5300, loss[loss=0.1831, simple_loss=0.2625, pruned_loss=0.05186, over 19462.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2856, pruned_loss=0.06206, over 3827591.77 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:43:08,850 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 07:43:09,635 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 07:43:57,546 INFO [train.py:903] (2/4) Epoch 26, batch 5350, loss[loss=0.21, simple_loss=0.2907, pruned_loss=0.06467, over 19786.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06201, over 3835933.34 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:44:14,891 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.809e+02 5.990e+02 7.353e+02 1.688e+03, threshold=1.198e+03, percent-clipped=9.0 +2023-04-03 07:44:27,822 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176072.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:44:30,129 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 07:45:03,487 INFO [train.py:903] (2/4) Epoch 26, batch 5400, loss[loss=0.2002, simple_loss=0.292, pruned_loss=0.05415, over 19542.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2849, pruned_loss=0.0615, over 3823095.39 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:45:07,685 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:36,764 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:37,876 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:08,008 INFO [train.py:903] (2/4) Epoch 26, batch 5450, loss[loss=0.1806, simple_loss=0.2661, pruned_loss=0.04751, over 19772.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06129, over 3831343.36 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:46:10,706 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:23,257 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.815e+02 4.882e+02 5.855e+02 6.912e+02 1.680e+03, threshold=1.171e+03, percent-clipped=1.0 +2023-04-03 07:46:55,121 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:11,319 INFO [train.py:903] (2/4) Epoch 26, batch 5500, loss[loss=0.1781, simple_loss=0.2633, pruned_loss=0.04647, over 19610.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2842, pruned_loss=0.06073, over 3833149.02 frames. ], batch size: 50, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:47:28,814 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:30,665 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 07:47:37,934 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1148, 1.9733, 1.8031, 2.1324, 1.8949, 1.7496, 1.7157, 2.0045], + device='cuda:2'), covar=tensor([0.1039, 0.1412, 0.1448, 0.0999, 0.1327, 0.0592, 0.1498, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0356, 0.0315, 0.0255, 0.0304, 0.0256, 0.0318, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 07:48:02,311 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:06,825 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:14,667 INFO [train.py:903] (2/4) Epoch 26, batch 5550, loss[loss=0.1924, simple_loss=0.2642, pruned_loss=0.06029, over 17821.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06064, over 3819982.13 frames. ], batch size: 39, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:48:17,145 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 07:48:29,037 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:32,237 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.802e+02 5.930e+02 7.241e+02 1.738e+03, threshold=1.186e+03, percent-clipped=4.0 +2023-04-03 07:48:47,665 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:07,994 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 07:49:18,906 INFO [train.py:903] (2/4) Epoch 26, batch 5600, loss[loss=0.2181, simple_loss=0.2895, pruned_loss=0.07334, over 19862.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05986, over 3826717.73 frames. ], batch size: 52, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:49:28,224 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176307.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:45,691 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8536, 4.3239, 4.6044, 4.6210, 1.6934, 4.3436, 3.7653, 4.3067], + device='cuda:2'), covar=tensor([0.1762, 0.1051, 0.0670, 0.0703, 0.6377, 0.0947, 0.0758, 0.1195], + device='cuda:2'), in_proj_covar=tensor([0.0815, 0.0774, 0.0980, 0.0863, 0.0859, 0.0748, 0.0582, 0.0910], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 07:50:23,046 INFO [train.py:903] (2/4) Epoch 26, batch 5650, loss[loss=0.2091, simple_loss=0.2974, pruned_loss=0.06037, over 19780.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06108, over 3797371.10 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:50:26,193 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 07:50:32,646 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:50:39,210 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.381e+02 5.619e+02 7.137e+02 2.187e+03, threshold=1.124e+03, percent-clipped=4.0 +2023-04-03 07:51:02,584 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 07:51:24,805 INFO [train.py:903] (2/4) Epoch 26, batch 5700, loss[loss=0.1682, simple_loss=0.2452, pruned_loss=0.04557, over 18173.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.0614, over 3790379.74 frames. ], batch size: 40, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:51:52,425 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:19,197 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:22,364 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 07:52:26,807 INFO [train.py:903] (2/4) Epoch 26, batch 5750, loss[loss=0.2045, simple_loss=0.2946, pruned_loss=0.05719, over 19666.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06122, over 3800093.91 frames. ], batch size: 58, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:52:30,384 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 07:52:33,956 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 07:52:44,007 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.352e+02 5.112e+02 6.171e+02 7.862e+02 1.795e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 07:52:50,871 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:09,523 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:28,523 INFO [train.py:903] (2/4) Epoch 26, batch 5800, loss[loss=0.2396, simple_loss=0.3124, pruned_loss=0.08337, over 19760.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06112, over 3802478.77 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:32,080 INFO [train.py:903] (2/4) Epoch 26, batch 5850, loss[loss=0.1904, simple_loss=0.2753, pruned_loss=0.05281, over 19672.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2847, pruned_loss=0.06116, over 3794990.32 frames. ], batch size: 60, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:48,252 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.827e+02 6.114e+02 8.553e+02 2.097e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 07:55:30,001 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 07:55:33,102 INFO [train.py:903] (2/4) Epoch 26, batch 5900, loss[loss=0.1966, simple_loss=0.2841, pruned_loss=0.05455, over 19665.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06108, over 3806303.38 frames. ], batch size: 55, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:55:37,900 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:50,033 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:51,767 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 07:55:55,485 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:22,740 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:27,340 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:35,023 INFO [train.py:903] (2/4) Epoch 26, batch 5950, loss[loss=0.2279, simple_loss=0.308, pruned_loss=0.07388, over 19248.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06127, over 3823788.41 frames. ], batch size: 66, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:56:51,411 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.421e+02 5.079e+02 6.315e+02 7.489e+02 1.732e+03, threshold=1.263e+03, percent-clipped=4.0 +2023-04-03 07:57:12,939 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:18,397 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:38,452 INFO [train.py:903] (2/4) Epoch 26, batch 6000, loss[loss=0.1716, simple_loss=0.257, pruned_loss=0.04311, over 19727.00 frames. ], tot_loss[loss=0.202, simple_loss=0.283, pruned_loss=0.06049, over 3829573.76 frames. ], batch size: 47, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:57:38,452 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 07:57:51,368 INFO [train.py:937] (2/4) Epoch 26, validation: loss=0.1675, simple_loss=0.2672, pruned_loss=0.03393, over 944034.00 frames. +2023-04-03 07:57:51,369 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 07:57:55,554 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176703.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:16,486 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176719.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:36,014 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:44,243 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-03 07:58:54,249 INFO [train.py:903] (2/4) Epoch 26, batch 6050, loss[loss=0.252, simple_loss=0.3308, pruned_loss=0.08664, over 19785.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2832, pruned_loss=0.0606, over 3828766.01 frames. ], batch size: 63, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:59:04,000 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3708, 2.2683, 2.0983, 2.0077, 1.7338, 1.9410, 0.6004, 1.3295], + device='cuda:2'), covar=tensor([0.0664, 0.0631, 0.0524, 0.0851, 0.1313, 0.0998, 0.1591, 0.1161], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0358, 0.0363, 0.0386, 0.0463, 0.0391, 0.0341, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 07:59:11,770 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 5.094e+02 6.239e+02 7.368e+02 1.384e+03, threshold=1.248e+03, percent-clipped=1.0 +2023-04-03 07:59:57,912 INFO [train.py:903] (2/4) Epoch 26, batch 6100, loss[loss=0.2105, simple_loss=0.2882, pruned_loss=0.06642, over 17646.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2825, pruned_loss=0.06017, over 3833159.77 frames. ], batch size: 101, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:00:33,092 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:01:00,302 INFO [train.py:903] (2/4) Epoch 26, batch 6150, loss[loss=0.1995, simple_loss=0.2966, pruned_loss=0.05125, over 19286.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06185, over 3810609.20 frames. ], batch size: 66, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:01:18,088 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 4.899e+02 5.851e+02 7.446e+02 2.190e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 08:01:21,484 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 08:01:25,369 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:02:01,466 INFO [train.py:903] (2/4) Epoch 26, batch 6200, loss[loss=0.1971, simple_loss=0.2847, pruned_loss=0.05482, over 19802.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2854, pruned_loss=0.06234, over 3816069.70 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:02:54,384 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:02,386 INFO [train.py:903] (2/4) Epoch 26, batch 6250, loss[loss=0.1807, simple_loss=0.263, pruned_loss=0.04919, over 19477.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2855, pruned_loss=0.06202, over 3818321.71 frames. ], batch size: 49, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:03:20,780 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.905e+02 4.899e+02 6.025e+02 7.517e+02 2.005e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 08:03:24,745 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4748, 2.1189, 1.6377, 1.5361, 1.9531, 1.4338, 1.4147, 1.8819], + device='cuda:2'), covar=tensor([0.1073, 0.0871, 0.1073, 0.0828, 0.0581, 0.1230, 0.0737, 0.0466], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0319, 0.0337, 0.0272, 0.0249, 0.0346, 0.0291, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 08:03:29,810 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 08:03:33,670 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:37,023 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8061, 1.2897, 1.6104, 1.7550, 3.3876, 1.2944, 2.3568, 3.8802], + device='cuda:2'), covar=tensor([0.0513, 0.3076, 0.2885, 0.1733, 0.0675, 0.2472, 0.1424, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0419, 0.0374, 0.0391, 0.0351, 0.0378, 0.0356, 0.0391, 0.0411], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 08:03:47,550 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:51,227 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:03,342 INFO [train.py:903] (2/4) Epoch 26, batch 6300, loss[loss=0.1973, simple_loss=0.2718, pruned_loss=0.06147, over 19468.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2859, pruned_loss=0.06239, over 3829644.63 frames. ], batch size: 49, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:04:03,957 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:23,758 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177015.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:37,549 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177027.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:56,744 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 08:05:06,344 INFO [train.py:903] (2/4) Epoch 26, batch 6350, loss[loss=0.2102, simple_loss=0.2965, pruned_loss=0.06197, over 19246.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2851, pruned_loss=0.06197, over 3832407.68 frames. ], batch size: 66, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:05:26,141 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.448e+02 5.061e+02 6.104e+02 7.524e+02 1.291e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 08:05:27,859 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5304, 2.0527, 1.5963, 1.5181, 1.9297, 1.3991, 1.4576, 1.8603], + device='cuda:2'), covar=tensor([0.1020, 0.0823, 0.0940, 0.0867, 0.0558, 0.1254, 0.0747, 0.0454], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0317, 0.0334, 0.0270, 0.0248, 0.0343, 0.0290, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 08:06:11,975 INFO [train.py:903] (2/4) Epoch 26, batch 6400, loss[loss=0.1892, simple_loss=0.2778, pruned_loss=0.05031, over 19534.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06107, over 3832724.35 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:06:14,742 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:38,229 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 08:06:45,669 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:48,166 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:05,510 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:14,951 INFO [train.py:903] (2/4) Epoch 26, batch 6450, loss[loss=0.1497, simple_loss=0.2343, pruned_loss=0.03256, over 19299.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2831, pruned_loss=0.06044, over 3808619.78 frames. ], batch size: 44, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:07:33,744 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 4.651e+02 5.846e+02 7.696e+02 2.286e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-03 08:07:56,659 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 08:08:15,607 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177199.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:16,326 INFO [train.py:903] (2/4) Epoch 26, batch 6500, loss[loss=0.2174, simple_loss=0.301, pruned_loss=0.06694, over 19527.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06123, over 3823517.51 frames. ], batch size: 64, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:08:17,673 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 08:08:27,470 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3479, 2.4358, 2.6194, 3.2024, 2.4537, 3.0693, 2.7233, 2.4792], + device='cuda:2'), covar=tensor([0.4170, 0.3977, 0.1927, 0.2477, 0.4434, 0.2147, 0.4605, 0.3235], + device='cuda:2'), in_proj_covar=tensor([0.0926, 0.1002, 0.0735, 0.0948, 0.0904, 0.0841, 0.0855, 0.0801], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 08:08:38,042 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:49,188 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:09:20,907 INFO [train.py:903] (2/4) Epoch 26, batch 6550, loss[loss=0.2082, simple_loss=0.295, pruned_loss=0.06066, over 17704.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2854, pruned_loss=0.062, over 3806296.40 frames. ], batch size: 101, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:09:39,909 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.796e+02 6.270e+02 7.966e+02 1.683e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 08:10:25,199 INFO [train.py:903] (2/4) Epoch 26, batch 6600, loss[loss=0.2216, simple_loss=0.307, pruned_loss=0.06815, over 19711.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2858, pruned_loss=0.0619, over 3825742.52 frames. ], batch size: 63, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:10:34,182 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.87 vs. limit=5.0 +2023-04-03 08:11:02,493 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:27,589 INFO [train.py:903] (2/4) Epoch 26, batch 6650, loss[loss=0.1996, simple_loss=0.2709, pruned_loss=0.06415, over 19088.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2858, pruned_loss=0.06206, over 3801495.04 frames. ], batch size: 42, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:11:37,316 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:47,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.339e+02 5.694e+02 7.782e+02 1.307e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 08:12:11,017 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:28,602 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:30,471 INFO [train.py:903] (2/4) Epoch 26, batch 6700, loss[loss=0.2075, simple_loss=0.2891, pruned_loss=0.0629, over 18133.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06135, over 3807614.37 frames. ], batch size: 83, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:13:01,386 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177423.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:31,556 INFO [train.py:903] (2/4) Epoch 26, batch 6750, loss[loss=0.2136, simple_loss=0.2968, pruned_loss=0.06523, over 19455.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06161, over 3806595.54 frames. ], batch size: 64, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:13:48,506 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.721e+02 5.880e+02 7.244e+02 1.873e+03, threshold=1.176e+03, percent-clipped=5.0 +2023-04-03 08:13:55,573 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177471.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:57,775 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177473.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:14:00,217 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1335, 1.0342, 1.0885, 1.1966, 0.8904, 1.2039, 1.2330, 1.1243], + device='cuda:2'), covar=tensor([0.0908, 0.0970, 0.1028, 0.0663, 0.1003, 0.0844, 0.0831, 0.0769], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0224, 0.0228, 0.0240, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 08:14:28,156 INFO [train.py:903] (2/4) Epoch 26, batch 6800, loss[loss=0.2054, simple_loss=0.2888, pruned_loss=0.06094, over 18713.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06158, over 3806886.36 frames. ], batch size: 74, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:14:46,366 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5915, 1.6878, 1.9471, 1.9512, 1.4889, 1.8733, 1.9530, 1.7843], + device='cuda:2'), covar=tensor([0.4225, 0.3887, 0.2039, 0.2343, 0.3854, 0.2242, 0.5196, 0.3498], + device='cuda:2'), in_proj_covar=tensor([0.0923, 0.1002, 0.0734, 0.0946, 0.0902, 0.0841, 0.0854, 0.0801], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 08:15:14,225 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 08:15:14,690 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 08:15:18,391 INFO [train.py:903] (2/4) Epoch 27, batch 0, loss[loss=0.2313, simple_loss=0.3171, pruned_loss=0.07268, over 19514.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3171, pruned_loss=0.07268, over 19514.00 frames. ], batch size: 64, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:15:18,392 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 08:15:30,269 INFO [train.py:937] (2/4) Epoch 27, validation: loss=0.1666, simple_loss=0.2668, pruned_loss=0.03317, over 944034.00 frames. +2023-04-03 08:15:30,270 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 08:15:42,950 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 08:16:15,177 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.975e+02 6.244e+02 7.696e+02 2.158e+03, threshold=1.249e+03, percent-clipped=8.0 +2023-04-03 08:16:24,554 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177571.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:33,692 INFO [train.py:903] (2/4) Epoch 27, batch 50, loss[loss=0.2101, simple_loss=0.2937, pruned_loss=0.06329, over 18135.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.06275, over 862191.57 frames. ], batch size: 84, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:16:44,185 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:44,239 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:46,390 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:06,234 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 08:17:15,801 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:35,980 INFO [train.py:903] (2/4) Epoch 27, batch 100, loss[loss=0.2668, simple_loss=0.3367, pruned_loss=0.09846, over 19727.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2846, pruned_loss=0.06162, over 1521769.01 frames. ], batch size: 63, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:17:47,466 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 08:17:53,823 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2478, 1.3036, 1.2561, 1.0730, 1.1232, 1.1196, 0.0905, 0.4185], + device='cuda:2'), covar=tensor([0.0774, 0.0727, 0.0509, 0.0672, 0.1458, 0.0774, 0.1446, 0.1229], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0360, 0.0363, 0.0387, 0.0467, 0.0392, 0.0342, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 08:18:23,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.210e+02 6.703e+02 8.227e+02 2.617e+03, threshold=1.341e+03, percent-clipped=11.0 +2023-04-03 08:18:39,597 INFO [train.py:903] (2/4) Epoch 27, batch 150, loss[loss=0.1875, simple_loss=0.2794, pruned_loss=0.04773, over 19761.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06175, over 2042947.61 frames. ], batch size: 54, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:18:44,581 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:18:57,585 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 08:19:40,074 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 08:19:42,214 INFO [train.py:903] (2/4) Epoch 27, batch 200, loss[loss=0.1618, simple_loss=0.2385, pruned_loss=0.04252, over 19377.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2851, pruned_loss=0.06123, over 2443717.39 frames. ], batch size: 48, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:20:29,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 4.383e+02 5.368e+02 7.234e+02 1.640e+03, threshold=1.074e+03, percent-clipped=1.0 +2023-04-03 08:20:41,540 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.93 vs. limit=5.0 +2023-04-03 08:20:46,585 INFO [train.py:903] (2/4) Epoch 27, batch 250, loss[loss=0.2078, simple_loss=0.28, pruned_loss=0.06782, over 18619.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2844, pruned_loss=0.06048, over 2735558.17 frames. ], batch size: 41, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:21:09,833 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9277, 4.4003, 4.6732, 4.6841, 1.7461, 4.3663, 3.8143, 4.3954], + device='cuda:2'), covar=tensor([0.1755, 0.0789, 0.0636, 0.0661, 0.6302, 0.0972, 0.0681, 0.1170], + device='cuda:2'), in_proj_covar=tensor([0.0812, 0.0777, 0.0981, 0.0862, 0.0860, 0.0746, 0.0580, 0.0908], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 08:21:12,200 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:32,143 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:50,875 INFO [train.py:903] (2/4) Epoch 27, batch 300, loss[loss=0.2204, simple_loss=0.2984, pruned_loss=0.0712, over 13090.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2826, pruned_loss=0.06009, over 2971481.12 frames. ], batch size: 136, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:22:08,573 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:10,843 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177844.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:36,374 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.100e+02 5.015e+02 6.251e+02 7.839e+02 1.329e+03, threshold=1.250e+03, percent-clipped=8.0 +2023-04-03 08:22:40,038 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177867.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:42,396 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:52,705 INFO [train.py:903] (2/4) Epoch 27, batch 350, loss[loss=0.2023, simple_loss=0.2865, pruned_loss=0.05903, over 19289.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2831, pruned_loss=0.06063, over 3167676.74 frames. ], batch size: 66, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:23:00,609 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 08:23:00,920 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3106, 3.8603, 3.9707, 3.9454, 1.5605, 3.7571, 3.2184, 3.7386], + device='cuda:2'), covar=tensor([0.1702, 0.0911, 0.0683, 0.0809, 0.5985, 0.1073, 0.0784, 0.1169], + device='cuda:2'), in_proj_covar=tensor([0.0810, 0.0777, 0.0979, 0.0861, 0.0858, 0.0745, 0.0579, 0.0909], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 08:23:40,518 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177915.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:23:56,878 INFO [train.py:903] (2/4) Epoch 27, batch 400, loss[loss=0.1999, simple_loss=0.2905, pruned_loss=0.05469, over 19601.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2823, pruned_loss=0.06027, over 3324655.55 frames. ], batch size: 61, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:24:43,708 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.795e+02 5.555e+02 6.674e+02 1.146e+03, threshold=1.111e+03, percent-clipped=0.0 +2023-04-03 08:24:57,946 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-03 08:24:58,343 INFO [train.py:903] (2/4) Epoch 27, batch 450, loss[loss=0.2049, simple_loss=0.2829, pruned_loss=0.06346, over 19732.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.283, pruned_loss=0.0608, over 3450806.71 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:25:39,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 08:25:40,990 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 08:25:59,705 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178026.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:02,521 INFO [train.py:903] (2/4) Epoch 27, batch 500, loss[loss=0.1639, simple_loss=0.248, pruned_loss=0.03986, over 19493.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2829, pruned_loss=0.0604, over 3536179.87 frames. ], batch size: 49, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:26:06,472 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:06,573 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4190, 1.5124, 1.7442, 1.6765, 2.7181, 2.1992, 2.8986, 1.3545], + device='cuda:2'), covar=tensor([0.2692, 0.4617, 0.2984, 0.2099, 0.1472, 0.2375, 0.1341, 0.4645], + device='cuda:2'), in_proj_covar=tensor([0.0549, 0.0665, 0.0744, 0.0503, 0.0630, 0.0544, 0.0665, 0.0568], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 08:26:48,569 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 5.151e+02 6.325e+02 8.134e+02 1.856e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-03 08:26:51,479 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1012, 2.0023, 1.8164, 1.6279, 1.5865, 1.6624, 0.5546, 1.0423], + device='cuda:2'), covar=tensor([0.0641, 0.0657, 0.0520, 0.0900, 0.1203, 0.0942, 0.1406, 0.1154], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0363, 0.0367, 0.0390, 0.0472, 0.0395, 0.0346, 0.0347], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 08:27:07,125 INFO [train.py:903] (2/4) Epoch 27, batch 550, loss[loss=0.181, simple_loss=0.2715, pruned_loss=0.04521, over 19620.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.06004, over 3606447.05 frames. ], batch size: 57, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:27:15,672 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1552, 5.1979, 6.0111, 6.0378, 2.1157, 5.6512, 4.7957, 5.6738], + device='cuda:2'), covar=tensor([0.1782, 0.0813, 0.0569, 0.0657, 0.6398, 0.0820, 0.0629, 0.1197], + device='cuda:2'), in_proj_covar=tensor([0.0815, 0.0781, 0.0984, 0.0865, 0.0861, 0.0749, 0.0580, 0.0913], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 08:28:09,844 INFO [train.py:903] (2/4) Epoch 27, batch 600, loss[loss=0.2509, simple_loss=0.3182, pruned_loss=0.09182, over 13357.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2838, pruned_loss=0.06015, over 3660871.40 frames. ], batch size: 136, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:28:15,850 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3195, 3.8394, 3.9250, 3.9400, 1.5377, 3.7284, 3.2132, 3.6892], + device='cuda:2'), covar=tensor([0.1852, 0.0949, 0.0709, 0.0802, 0.6190, 0.0994, 0.0816, 0.1213], + device='cuda:2'), in_proj_covar=tensor([0.0818, 0.0781, 0.0987, 0.0868, 0.0864, 0.0751, 0.0583, 0.0916], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 08:28:25,314 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:26,321 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:48,730 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:54,533 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 08:28:55,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 5.259e+02 6.338e+02 7.640e+02 1.730e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-03 08:29:11,517 INFO [train.py:903] (2/4) Epoch 27, batch 650, loss[loss=0.176, simple_loss=0.262, pruned_loss=0.04501, over 19404.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.06003, over 3690947.28 frames. ], batch size: 48, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:29:18,991 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1812, 1.9661, 1.8120, 2.0813, 1.7717, 1.8264, 1.7347, 2.0597], + device='cuda:2'), covar=tensor([0.1064, 0.1397, 0.1590, 0.1080, 0.1482, 0.0578, 0.1570, 0.0740], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0360, 0.0320, 0.0258, 0.0309, 0.0259, 0.0323, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 08:30:11,954 INFO [train.py:903] (2/4) Epoch 27, batch 700, loss[loss=0.2142, simple_loss=0.3007, pruned_loss=0.06392, over 19609.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.284, pruned_loss=0.06023, over 3724787.40 frames. ], batch size: 57, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:30:49,379 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:30:58,536 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.694e+02 6.359e+02 8.615e+02 1.569e+03, threshold=1.272e+03, percent-clipped=7.0 +2023-04-03 08:31:11,166 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:16,241 INFO [train.py:903] (2/4) Epoch 27, batch 750, loss[loss=0.2645, simple_loss=0.3235, pruned_loss=0.1027, over 13928.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2846, pruned_loss=0.06079, over 3728833.86 frames. ], batch size: 136, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:31:26,143 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:56,960 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:32:17,795 INFO [train.py:903] (2/4) Epoch 27, batch 800, loss[loss=0.2054, simple_loss=0.2839, pruned_loss=0.06352, over 19693.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2852, pruned_loss=0.06141, over 3739188.90 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:32:31,960 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 08:33:04,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 4.845e+02 6.131e+02 7.065e+02 2.334e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 08:33:20,264 INFO [train.py:903] (2/4) Epoch 27, batch 850, loss[loss=0.2132, simple_loss=0.3013, pruned_loss=0.06256, over 19359.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2848, pruned_loss=0.06082, over 3762817.03 frames. ], batch size: 66, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:33:45,286 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:34:12,702 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 08:34:16,580 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:34:22,830 INFO [train.py:903] (2/4) Epoch 27, batch 900, loss[loss=0.2741, simple_loss=0.3353, pruned_loss=0.1065, over 19657.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2842, pruned_loss=0.06034, over 3784466.37 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:34:30,685 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-03 08:35:10,866 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.108e+02 4.416e+02 5.422e+02 6.593e+02 1.258e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-04-03 08:35:17,014 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178470.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:35:28,147 INFO [train.py:903] (2/4) Epoch 27, batch 950, loss[loss=0.2396, simple_loss=0.3151, pruned_loss=0.08204, over 17370.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2858, pruned_loss=0.06092, over 3791630.36 frames. ], batch size: 101, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:35:30,677 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 08:36:11,811 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:32,208 INFO [train.py:903] (2/4) Epoch 27, batch 1000, loss[loss=0.2269, simple_loss=0.2895, pruned_loss=0.08216, over 19783.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.285, pruned_loss=0.06064, over 3804745.08 frames. ], batch size: 49, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:36:34,986 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:45,510 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:06,218 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178555.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:19,298 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.971e+02 6.338e+02 8.822e+02 2.004e+03, threshold=1.268e+03, percent-clipped=12.0 +2023-04-03 08:37:25,257 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 08:37:35,825 INFO [train.py:903] (2/4) Epoch 27, batch 1050, loss[loss=0.1935, simple_loss=0.2822, pruned_loss=0.05238, over 19694.00 frames. ], tot_loss[loss=0.203, simple_loss=0.285, pruned_loss=0.06049, over 3814134.82 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:37:48,901 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5829, 1.7679, 2.0526, 1.7873, 2.5504, 2.9820, 2.8967, 3.1424], + device='cuda:2'), covar=tensor([0.1380, 0.3018, 0.2735, 0.2345, 0.1137, 0.0302, 0.0232, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0329, 0.0362, 0.0271, 0.0253, 0.0195, 0.0218, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 08:38:09,356 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 08:38:38,432 INFO [train.py:903] (2/4) Epoch 27, batch 1100, loss[loss=0.2326, simple_loss=0.3071, pruned_loss=0.07907, over 19576.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.284, pruned_loss=0.06019, over 3816800.22 frames. ], batch size: 61, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:38:50,330 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178637.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:39:24,226 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4710, 1.5584, 1.8425, 1.7018, 2.6443, 2.2718, 2.8448, 1.3042], + device='cuda:2'), covar=tensor([0.2691, 0.4655, 0.2825, 0.2093, 0.1680, 0.2382, 0.1527, 0.4824], + device='cuda:2'), in_proj_covar=tensor([0.0552, 0.0667, 0.0745, 0.0504, 0.0633, 0.0544, 0.0665, 0.0568], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 08:39:27,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.758e+02 5.833e+02 7.524e+02 1.653e+03, threshold=1.167e+03, percent-clipped=2.0 +2023-04-03 08:39:42,277 INFO [train.py:903] (2/4) Epoch 27, batch 1150, loss[loss=0.2059, simple_loss=0.2948, pruned_loss=0.05854, over 19667.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2847, pruned_loss=0.06089, over 3827820.80 frames. ], batch size: 58, lr: 3.05e-03, grad_scale: 4.0 +2023-04-03 08:40:25,314 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178712.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:40:30,182 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178715.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:40:47,630 INFO [train.py:903] (2/4) Epoch 27, batch 1200, loss[loss=0.2131, simple_loss=0.2987, pruned_loss=0.0637, over 19364.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06102, over 3834554.48 frames. ], batch size: 70, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:41:04,878 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3100, 2.1973, 2.0488, 1.9134, 1.6956, 1.8541, 0.6643, 1.3115], + device='cuda:2'), covar=tensor([0.0667, 0.0691, 0.0553, 0.0994, 0.1310, 0.1142, 0.1537, 0.1176], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0367, 0.0370, 0.0394, 0.0474, 0.0398, 0.0348, 0.0351], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 08:41:18,721 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 08:41:38,088 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.626e+02 4.854e+02 6.177e+02 8.249e+02 1.593e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-03 08:41:53,224 INFO [train.py:903] (2/4) Epoch 27, batch 1250, loss[loss=0.182, simple_loss=0.269, pruned_loss=0.04752, over 19662.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2843, pruned_loss=0.06079, over 3817251.88 frames. ], batch size: 55, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:42:03,115 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5035, 1.3315, 1.2987, 1.4624, 1.2979, 1.3145, 1.2635, 1.4303], + device='cuda:2'), covar=tensor([0.0887, 0.1298, 0.1247, 0.0801, 0.1128, 0.0531, 0.1294, 0.0674], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0361, 0.0319, 0.0258, 0.0309, 0.0258, 0.0323, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 08:42:05,346 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:40,228 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:47,799 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1267, 3.3398, 1.9763, 2.1712, 2.9517, 1.8508, 1.6160, 2.2067], + device='cuda:2'), covar=tensor([0.1433, 0.0719, 0.1148, 0.0896, 0.0609, 0.1273, 0.1017, 0.0794], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0321, 0.0337, 0.0273, 0.0250, 0.0346, 0.0294, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 08:42:56,587 INFO [train.py:903] (2/4) Epoch 27, batch 1300, loss[loss=0.2122, simple_loss=0.2915, pruned_loss=0.06646, over 17479.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2845, pruned_loss=0.06081, over 3817806.45 frames. ], batch size: 101, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:43:01,013 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 08:43:34,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.47 vs. limit=5.0 +2023-04-03 08:43:44,692 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.004e+02 6.015e+02 7.494e+02 1.649e+03, threshold=1.203e+03, percent-clipped=2.0 +2023-04-03 08:43:58,889 INFO [train.py:903] (2/4) Epoch 27, batch 1350, loss[loss=0.2242, simple_loss=0.3021, pruned_loss=0.07314, over 19352.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06113, over 3822963.32 frames. ], batch size: 66, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:44:11,461 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4674, 2.1495, 1.6958, 1.3631, 1.9558, 1.3249, 1.3441, 1.9603], + device='cuda:2'), covar=tensor([0.1117, 0.0889, 0.1133, 0.1037, 0.0704, 0.1340, 0.0800, 0.0475], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0322, 0.0338, 0.0274, 0.0251, 0.0347, 0.0295, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 08:44:41,886 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178912.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:45:03,470 INFO [train.py:903] (2/4) Epoch 27, batch 1400, loss[loss=0.22, simple_loss=0.2991, pruned_loss=0.07043, over 17416.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06114, over 3815645.83 frames. ], batch size: 101, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:45:05,047 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:45:50,938 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.777e+02 6.195e+02 8.137e+02 1.607e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-03 08:46:03,721 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 08:46:04,756 INFO [train.py:903] (2/4) Epoch 27, batch 1450, loss[loss=0.1874, simple_loss=0.2651, pruned_loss=0.0548, over 19390.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06039, over 3828240.86 frames. ], batch size: 48, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:46:09,313 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:46:20,297 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 08:47:07,116 INFO [train.py:903] (2/4) Epoch 27, batch 1500, loss[loss=0.2447, simple_loss=0.3113, pruned_loss=0.08908, over 13070.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2833, pruned_loss=0.06063, over 3831664.65 frames. ], batch size: 135, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:47:42,372 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:47:45,652 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179059.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:47:54,377 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.273e+02 4.749e+02 5.669e+02 7.445e+02 1.551e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 08:48:08,111 INFO [train.py:903] (2/4) Epoch 27, batch 1550, loss[loss=0.2186, simple_loss=0.2981, pruned_loss=0.0696, over 18206.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2834, pruned_loss=0.06069, over 3832511.09 frames. ], batch size: 83, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:48:32,713 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:12,295 INFO [train.py:903] (2/4) Epoch 27, batch 1600, loss[loss=0.1868, simple_loss=0.2662, pruned_loss=0.05366, over 19732.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2829, pruned_loss=0.06022, over 3831118.35 frames. ], batch size: 51, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:49:18,231 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:37,313 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 08:49:59,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.994e+02 6.186e+02 7.700e+02 1.707e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 08:50:06,443 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179171.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:09,942 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179174.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:50:14,087 INFO [train.py:903] (2/4) Epoch 27, batch 1650, loss[loss=0.1823, simple_loss=0.2807, pruned_loss=0.04193, over 19560.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2834, pruned_loss=0.05983, over 3837893.86 frames. ], batch size: 61, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:50:23,755 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179185.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:55,295 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179210.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:17,035 INFO [train.py:903] (2/4) Epoch 27, batch 1700, loss[loss=0.183, simple_loss=0.2732, pruned_loss=0.04642, over 19481.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2836, pruned_loss=0.05991, over 3824790.68 frames. ], batch size: 49, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:51:41,279 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:52,279 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179256.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:51:58,273 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 08:52:04,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 5.162e+02 6.407e+02 7.875e+02 2.392e+03, threshold=1.281e+03, percent-clipped=6.0 +2023-04-03 08:52:18,191 INFO [train.py:903] (2/4) Epoch 27, batch 1750, loss[loss=0.1545, simple_loss=0.2397, pruned_loss=0.03467, over 19731.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05971, over 3819826.70 frames. ], batch size: 45, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:53:13,854 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2888, 2.9557, 2.4368, 2.4748, 2.0807, 2.6814, 1.1178, 2.2017], + device='cuda:2'), covar=tensor([0.0727, 0.0651, 0.0756, 0.1231, 0.1267, 0.1129, 0.1610, 0.1158], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0364, 0.0367, 0.0391, 0.0472, 0.0397, 0.0345, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 08:53:21,041 INFO [train.py:903] (2/4) Epoch 27, batch 1800, loss[loss=0.1677, simple_loss=0.2556, pruned_loss=0.03992, over 19859.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2835, pruned_loss=0.05995, over 3832353.27 frames. ], batch size: 52, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:53:21,458 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1195, 2.0106, 1.7811, 2.0979, 1.8649, 1.8092, 1.7297, 2.0230], + device='cuda:2'), covar=tensor([0.1087, 0.1426, 0.1522, 0.1125, 0.1381, 0.0579, 0.1509, 0.0777], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0359, 0.0318, 0.0256, 0.0307, 0.0257, 0.0320, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 08:53:51,622 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:08,230 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 4.924e+02 6.082e+02 7.444e+02 1.664e+03, threshold=1.216e+03, percent-clipped=4.0 +2023-04-03 08:54:15,374 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179371.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:54:21,557 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 08:54:23,182 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:23,936 INFO [train.py:903] (2/4) Epoch 27, batch 1850, loss[loss=0.1863, simple_loss=0.2736, pruned_loss=0.04948, over 19660.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2827, pruned_loss=0.05968, over 3823024.89 frames. ], batch size: 53, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:54:25,270 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:00,562 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 08:55:21,089 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4902, 1.6225, 1.7475, 2.0306, 1.6495, 1.9198, 1.8199, 1.5984], + device='cuda:2'), covar=tensor([0.4829, 0.4197, 0.2796, 0.2702, 0.4034, 0.2494, 0.6407, 0.4977], + device='cuda:2'), in_proj_covar=tensor([0.0927, 0.1004, 0.0737, 0.0948, 0.0905, 0.0844, 0.0855, 0.0802], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 08:55:25,837 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179427.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:26,554 INFO [train.py:903] (2/4) Epoch 27, batch 1900, loss[loss=0.1729, simple_loss=0.2551, pruned_loss=0.0453, over 19619.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06057, over 3822781.40 frames. ], batch size: 50, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:55:26,927 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3766, 1.4545, 1.7876, 1.5655, 2.8642, 3.8524, 3.5264, 4.0361], + device='cuda:2'), covar=tensor([0.1489, 0.3624, 0.3249, 0.2373, 0.0601, 0.0174, 0.0209, 0.0263], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0330, 0.0363, 0.0270, 0.0253, 0.0195, 0.0218, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 08:55:29,283 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179430.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:55:45,932 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 08:55:49,564 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 08:55:56,562 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:56:00,911 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179455.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:56:10,096 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179462.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:56:14,505 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.488e+02 5.214e+02 6.120e+02 7.486e+02 1.853e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-03 08:56:15,776 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 08:56:28,547 INFO [train.py:903] (2/4) Epoch 27, batch 1950, loss[loss=0.1596, simple_loss=0.2382, pruned_loss=0.04047, over 19724.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.06045, over 3817566.99 frames. ], batch size: 46, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:02,015 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:57:13,667 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-03 08:57:31,829 INFO [train.py:903] (2/4) Epoch 27, batch 2000, loss[loss=0.221, simple_loss=0.2995, pruned_loss=0.07129, over 19317.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.061, over 3804303.95 frames. ], batch size: 70, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:32,251 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179528.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:58:19,145 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.376e+02 6.611e+02 8.547e+02 2.231e+03, threshold=1.322e+03, percent-clipped=11.0 +2023-04-03 08:58:33,850 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 08:58:34,808 INFO [train.py:903] (2/4) Epoch 27, batch 2050, loss[loss=0.221, simple_loss=0.3, pruned_loss=0.07099, over 19491.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06147, over 3799528.53 frames. ], batch size: 64, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:58:53,485 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 08:58:54,675 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 08:59:14,227 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 08:59:37,292 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179627.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:59:37,904 INFO [train.py:903] (2/4) Epoch 27, batch 2100, loss[loss=0.2247, simple_loss=0.3124, pruned_loss=0.0685, over 19746.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2849, pruned_loss=0.06112, over 3804055.67 frames. ], batch size: 63, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:00:03,379 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8613, 1.7132, 1.7622, 2.2171, 1.8435, 1.9966, 1.9735, 1.8246], + device='cuda:2'), covar=tensor([0.0628, 0.0768, 0.0821, 0.0595, 0.0872, 0.0675, 0.0851, 0.0623], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0224, 0.0227, 0.0239, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 09:00:07,543 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 09:00:07,912 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179652.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:00:10,164 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179654.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:00:25,310 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.840e+02 4.609e+02 5.792e+02 6.874e+02 1.495e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 09:00:28,904 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 09:00:39,310 INFO [train.py:903] (2/4) Epoch 27, batch 2150, loss[loss=0.1916, simple_loss=0.2815, pruned_loss=0.05087, over 19430.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2846, pruned_loss=0.0611, over 3784687.78 frames. ], batch size: 70, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:00:42,472 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 09:01:37,664 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:01:43,433 INFO [train.py:903] (2/4) Epoch 27, batch 2200, loss[loss=0.209, simple_loss=0.2929, pruned_loss=0.06258, over 19683.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.0605, over 3789348.77 frames. ], batch size: 59, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:02:30,598 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.069e+02 5.235e+02 6.025e+02 7.394e+02 1.358e+03, threshold=1.205e+03, percent-clipped=1.0 +2023-04-03 09:02:46,337 INFO [train.py:903] (2/4) Epoch 27, batch 2250, loss[loss=0.1721, simple_loss=0.251, pruned_loss=0.04665, over 19393.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2844, pruned_loss=0.06126, over 3785985.34 frames. ], batch size: 47, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:03:21,840 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179806.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:03:33,138 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:03:50,160 INFO [train.py:903] (2/4) Epoch 27, batch 2300, loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04393, over 19675.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.06093, over 3791406.22 frames. ], batch size: 60, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:04:02,209 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179838.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:04:03,049 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 09:04:38,194 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.173e+02 6.408e+02 8.011e+02 2.024e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 09:04:51,778 INFO [train.py:903] (2/4) Epoch 27, batch 2350, loss[loss=0.1842, simple_loss=0.2721, pruned_loss=0.04812, over 19644.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06117, over 3797781.68 frames. ], batch size: 55, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:05:34,695 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 09:05:45,224 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179921.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:05:48,557 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179924.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:05:50,584 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 09:05:53,787 INFO [train.py:903] (2/4) Epoch 27, batch 2400, loss[loss=0.1891, simple_loss=0.2737, pruned_loss=0.05232, over 19758.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06154, over 3803687.72 frames. ], batch size: 48, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:06:11,773 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179942.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:06:41,353 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 4.979e+02 6.417e+02 8.310e+02 2.182e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-03 09:06:57,093 INFO [train.py:903] (2/4) Epoch 27, batch 2450, loss[loss=0.2103, simple_loss=0.2928, pruned_loss=0.0639, over 19758.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06073, over 3814762.04 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:07:21,331 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:07:46,427 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 09:08:00,994 INFO [train.py:903] (2/4) Epoch 27, batch 2500, loss[loss=0.192, simple_loss=0.2803, pruned_loss=0.05181, over 19680.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06007, over 3810649.66 frames. ], batch size: 58, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:08:19,333 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:08:47,574 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3468, 1.6318, 2.1617, 1.6065, 3.1931, 4.7688, 4.6401, 5.2633], + device='cuda:2'), covar=tensor([0.1672, 0.3716, 0.3188, 0.2365, 0.0591, 0.0190, 0.0166, 0.0174], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0331, 0.0364, 0.0270, 0.0254, 0.0196, 0.0220, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 09:08:48,294 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 4.714e+02 5.802e+02 6.784e+02 1.958e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 09:09:02,327 INFO [train.py:903] (2/4) Epoch 27, batch 2550, loss[loss=0.1728, simple_loss=0.2484, pruned_loss=0.04858, over 19345.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.06042, over 3818143.70 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:09:03,703 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1293, 5.1954, 6.0166, 6.0268, 2.0905, 5.6943, 4.7616, 5.6753], + device='cuda:2'), covar=tensor([0.1776, 0.0798, 0.0517, 0.0593, 0.6193, 0.0844, 0.0650, 0.1025], + device='cuda:2'), in_proj_covar=tensor([0.0817, 0.0781, 0.0989, 0.0867, 0.0865, 0.0750, 0.0582, 0.0915], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 09:09:22,041 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:47,135 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180113.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:53,165 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7256, 2.4269, 1.8015, 1.6282, 2.1604, 1.4213, 1.7055, 2.0636], + device='cuda:2'), covar=tensor([0.1173, 0.0884, 0.1164, 0.0894, 0.0647, 0.1353, 0.0677, 0.0513], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0321, 0.0338, 0.0273, 0.0250, 0.0344, 0.0292, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:09:54,399 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180119.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:58,727 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 09:10:04,536 INFO [train.py:903] (2/4) Epoch 27, batch 2600, loss[loss=0.2333, simple_loss=0.3113, pruned_loss=0.07769, over 19654.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2849, pruned_loss=0.06066, over 3819074.37 frames. ], batch size: 55, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:10:15,105 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5190, 2.5738, 2.1172, 2.6619, 2.2882, 2.1619, 2.0731, 2.4097], + device='cuda:2'), covar=tensor([0.1044, 0.1481, 0.1460, 0.1033, 0.1421, 0.0533, 0.1444, 0.0706], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0359, 0.0317, 0.0258, 0.0307, 0.0256, 0.0321, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:10:42,780 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:10:52,196 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.258e+02 6.240e+02 7.647e+02 1.495e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-03 09:11:07,709 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180177.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:11:08,382 INFO [train.py:903] (2/4) Epoch 27, batch 2650, loss[loss=0.2383, simple_loss=0.3194, pruned_loss=0.0786, over 17933.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2851, pruned_loss=0.06085, over 3817182.39 frames. ], batch size: 83, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:11:18,559 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2203, 5.6476, 3.1427, 4.8657, 1.1120, 5.8932, 5.6497, 5.8583], + device='cuda:2'), covar=tensor([0.0389, 0.0813, 0.1869, 0.0730, 0.3962, 0.0489, 0.0752, 0.1071], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0427, 0.0511, 0.0358, 0.0407, 0.0455, 0.0447, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:11:29,731 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 09:11:38,075 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180202.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:11:59,692 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 09:12:04,763 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:12:11,780 INFO [train.py:903] (2/4) Epoch 27, batch 2700, loss[loss=0.2656, simple_loss=0.339, pruned_loss=0.09611, over 19682.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2852, pruned_loss=0.06114, over 3814698.00 frames. ], batch size: 60, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:12:33,813 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 09:12:39,142 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2349, 2.2960, 2.5342, 3.0087, 2.3632, 2.8495, 2.5462, 2.2276], + device='cuda:2'), covar=tensor([0.4333, 0.4034, 0.1970, 0.2768, 0.4408, 0.2430, 0.4810, 0.3590], + device='cuda:2'), in_proj_covar=tensor([0.0934, 0.1012, 0.0742, 0.0954, 0.0912, 0.0852, 0.0859, 0.0809], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 09:13:00,980 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.426e+02 5.233e+02 6.139e+02 8.955e+02 2.009e+03, threshold=1.228e+03, percent-clipped=9.0 +2023-04-03 09:13:02,195 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180268.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:13:08,010 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:13:11,337 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6194, 2.3275, 1.6339, 1.5966, 2.1029, 1.3138, 1.4919, 1.9084], + device='cuda:2'), covar=tensor([0.1117, 0.0796, 0.1195, 0.0846, 0.0643, 0.1361, 0.0761, 0.0556], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0323, 0.0341, 0.0275, 0.0253, 0.0347, 0.0295, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:13:13,275 INFO [train.py:903] (2/4) Epoch 27, batch 2750, loss[loss=0.154, simple_loss=0.2417, pruned_loss=0.03316, over 19416.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2853, pruned_loss=0.06104, over 3816628.32 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:13:20,961 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-03 09:13:22,797 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:14:15,234 INFO [train.py:903] (2/4) Epoch 27, batch 2800, loss[loss=0.1749, simple_loss=0.2518, pruned_loss=0.04903, over 19788.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2849, pruned_loss=0.06083, over 3805598.49 frames. ], batch size: 48, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:14:49,794 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:04,812 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.838e+02 6.554e+02 8.151e+02 1.805e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-03 09:15:08,524 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:18,461 INFO [train.py:903] (2/4) Epoch 27, batch 2850, loss[loss=0.2198, simple_loss=0.3125, pruned_loss=0.06358, over 19510.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2847, pruned_loss=0.06092, over 3809139.54 frames. ], batch size: 56, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:15:24,596 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 09:15:26,271 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180383.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:15:33,053 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:40,147 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:48,629 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180401.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:16:20,685 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 09:16:22,938 INFO [train.py:903] (2/4) Epoch 27, batch 2900, loss[loss=0.2067, simple_loss=0.2877, pruned_loss=0.06285, over 18767.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.0605, over 3808332.93 frames. ], batch size: 74, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:13,080 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.922e+02 6.355e+02 8.136e+02 1.738e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-04-03 09:17:15,665 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1026, 4.4794, 4.8173, 4.8135, 1.9202, 4.5187, 3.9379, 4.5416], + device='cuda:2'), covar=tensor([0.1633, 0.0799, 0.0539, 0.0609, 0.5999, 0.0905, 0.0667, 0.1033], + device='cuda:2'), in_proj_covar=tensor([0.0813, 0.0777, 0.0987, 0.0865, 0.0860, 0.0749, 0.0580, 0.0915], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 09:17:26,071 INFO [train.py:903] (2/4) Epoch 27, batch 2950, loss[loss=0.1847, simple_loss=0.2658, pruned_loss=0.05179, over 19737.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.283, pruned_loss=0.06033, over 3818576.08 frames. ], batch size: 51, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:45,971 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2500, 1.4476, 1.9118, 1.5484, 3.0570, 4.4393, 4.2894, 4.8612], + device='cuda:2'), covar=tensor([0.1726, 0.3899, 0.3460, 0.2443, 0.0724, 0.0293, 0.0194, 0.0245], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0332, 0.0365, 0.0271, 0.0255, 0.0197, 0.0220, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 09:17:56,642 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:27,180 INFO [train.py:903] (2/4) Epoch 27, batch 3000, loss[loss=0.1835, simple_loss=0.2619, pruned_loss=0.05259, over 19581.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2833, pruned_loss=0.06055, over 3823474.68 frames. ], batch size: 52, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:18:27,181 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 09:18:39,763 INFO [train.py:937] (2/4) Epoch 27, validation: loss=0.1667, simple_loss=0.2664, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 09:18:39,764 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 09:18:41,337 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:43,413 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 09:19:11,493 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:29,347 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:30,280 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.587e+02 5.827e+02 7.595e+02 1.750e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 09:19:41,906 INFO [train.py:903] (2/4) Epoch 27, batch 3050, loss[loss=0.2281, simple_loss=0.3081, pruned_loss=0.07408, over 19681.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06033, over 3814690.26 frames. ], batch size: 60, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:19:54,660 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:20:44,777 INFO [train.py:903] (2/4) Epoch 27, batch 3100, loss[loss=0.1933, simple_loss=0.2771, pruned_loss=0.05477, over 19782.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05981, over 3817750.20 frames. ], batch size: 56, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:20:56,855 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4606, 1.5155, 1.7082, 1.7261, 2.6710, 2.3082, 2.8448, 1.1783], + device='cuda:2'), covar=tensor([0.2617, 0.4555, 0.3002, 0.2015, 0.1624, 0.2228, 0.1491, 0.4979], + device='cuda:2'), in_proj_covar=tensor([0.0551, 0.0668, 0.0749, 0.0505, 0.0633, 0.0545, 0.0668, 0.0569], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 09:20:59,827 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180639.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:21:20,347 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180657.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:29,246 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180664.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:21:33,516 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.719e+02 5.890e+02 7.652e+02 1.677e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 09:21:46,250 INFO [train.py:903] (2/4) Epoch 27, batch 3150, loss[loss=0.1703, simple_loss=0.247, pruned_loss=0.04684, over 19324.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2819, pruned_loss=0.05965, over 3822179.34 frames. ], batch size: 44, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:21:52,946 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:52,987 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:11,338 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:15,864 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 09:22:49,877 INFO [train.py:903] (2/4) Epoch 27, batch 3200, loss[loss=0.1797, simple_loss=0.2552, pruned_loss=0.05211, over 19373.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2821, pruned_loss=0.06007, over 3815460.50 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:16,607 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:28,917 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180759.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:39,761 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.255e+02 6.624e+02 9.042e+02 3.460e+03, threshold=1.325e+03, percent-clipped=12.0 +2023-04-03 09:23:51,467 INFO [train.py:903] (2/4) Epoch 27, batch 3250, loss[loss=0.2036, simple_loss=0.2935, pruned_loss=0.05689, over 19653.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.06016, over 3817010.00 frames. ], batch size: 60, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:59,389 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:36,472 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180813.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:45,518 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4954, 4.1626, 3.2015, 3.4519, 1.8497, 4.0397, 3.9687, 4.0405], + device='cuda:2'), covar=tensor([0.0598, 0.0918, 0.1771, 0.1153, 0.3044, 0.0752, 0.0937, 0.1224], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0425, 0.0511, 0.0357, 0.0405, 0.0452, 0.0447, 0.0475], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:24:54,217 INFO [train.py:903] (2/4) Epoch 27, batch 3300, loss[loss=0.2756, simple_loss=0.3384, pruned_loss=0.1064, over 19680.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2839, pruned_loss=0.06122, over 3805768.63 frames. ], batch size: 58, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:25:00,024 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 09:25:07,331 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 09:25:13,341 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:25:43,904 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.181e+02 6.566e+02 8.708e+02 1.878e+03, threshold=1.313e+03, percent-clipped=7.0 +2023-04-03 09:25:56,122 INFO [train.py:903] (2/4) Epoch 27, batch 3350, loss[loss=0.2265, simple_loss=0.3088, pruned_loss=0.07213, over 19454.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2854, pruned_loss=0.06214, over 3804565.21 frames. ], batch size: 64, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:26:51,575 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7888, 3.2840, 3.3246, 3.3500, 1.3357, 3.1960, 2.7775, 3.1076], + device='cuda:2'), covar=tensor([0.1943, 0.1154, 0.0900, 0.0977, 0.6049, 0.1219, 0.0934, 0.1421], + device='cuda:2'), in_proj_covar=tensor([0.0822, 0.0783, 0.0997, 0.0875, 0.0865, 0.0756, 0.0585, 0.0923], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 09:27:00,218 INFO [train.py:903] (2/4) Epoch 27, batch 3400, loss[loss=0.1732, simple_loss=0.257, pruned_loss=0.04469, over 19611.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06165, over 3818775.01 frames. ], batch size: 50, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:27:05,055 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180932.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:12,003 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:44,015 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:50,459 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 4.823e+02 5.841e+02 8.287e+02 1.627e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 09:27:59,604 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-04-03 09:28:02,298 INFO [train.py:903] (2/4) Epoch 27, batch 3450, loss[loss=0.2106, simple_loss=0.2996, pruned_loss=0.06081, over 19480.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2844, pruned_loss=0.06154, over 3825186.57 frames. ], batch size: 64, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:28:06,944 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 09:28:18,956 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-03 09:29:04,451 INFO [train.py:903] (2/4) Epoch 27, batch 3500, loss[loss=0.1759, simple_loss=0.2582, pruned_loss=0.04682, over 19401.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06124, over 3809314.07 frames. ], batch size: 48, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:29:28,500 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181047.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:29:53,844 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.177e+02 6.114e+02 8.226e+02 1.424e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 09:29:55,515 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181069.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:06,537 INFO [train.py:903] (2/4) Epoch 27, batch 3550, loss[loss=0.1874, simple_loss=0.2648, pruned_loss=0.05499, over 19771.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06113, over 3817741.80 frames. ], batch size: 48, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:30:27,222 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:27,464 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:31:09,199 INFO [train.py:903] (2/4) Epoch 27, batch 3600, loss[loss=0.21, simple_loss=0.2928, pruned_loss=0.0636, over 19681.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06032, over 3832571.68 frames. ], batch size: 53, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:32:00,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 4.881e+02 5.766e+02 7.270e+02 1.755e+03, threshold=1.153e+03, percent-clipped=5.0 +2023-04-03 09:32:12,505 INFO [train.py:903] (2/4) Epoch 27, batch 3650, loss[loss=0.1523, simple_loss=0.2292, pruned_loss=0.03776, over 19322.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2827, pruned_loss=0.06074, over 3818580.78 frames. ], batch size: 44, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:32:24,261 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:32:33,453 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0058, 3.7069, 2.5614, 3.2974, 0.9517, 3.6807, 3.4971, 3.6139], + device='cuda:2'), covar=tensor([0.0830, 0.1150, 0.1953, 0.0952, 0.3854, 0.0827, 0.1023, 0.1251], + device='cuda:2'), in_proj_covar=tensor([0.0529, 0.0428, 0.0514, 0.0358, 0.0408, 0.0453, 0.0448, 0.0476], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:32:52,766 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:33:15,875 INFO [train.py:903] (2/4) Epoch 27, batch 3700, loss[loss=0.2009, simple_loss=0.2856, pruned_loss=0.05812, over 19491.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06114, over 3816549.19 frames. ], batch size: 64, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:06,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.357e+02 6.375e+02 8.389e+02 2.807e+03, threshold=1.275e+03, percent-clipped=10.0 +2023-04-03 09:34:17,477 INFO [train.py:903] (2/4) Epoch 27, batch 3750, loss[loss=0.229, simple_loss=0.3114, pruned_loss=0.07335, over 18842.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2857, pruned_loss=0.06218, over 3797982.81 frames. ], batch size: 74, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:40,898 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6475, 1.4803, 1.5104, 2.2139, 1.6423, 1.8552, 2.0214, 1.6629], + device='cuda:2'), covar=tensor([0.0865, 0.0927, 0.1033, 0.0715, 0.0806, 0.0749, 0.0798, 0.0752], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0222, 0.0225, 0.0239, 0.0224, 0.0211, 0.0186, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 09:34:48,341 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:34:49,700 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:04,222 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181315.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:20,122 INFO [train.py:903] (2/4) Epoch 27, batch 3800, loss[loss=0.1766, simple_loss=0.2598, pruned_loss=0.04672, over 19618.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06226, over 3794955.69 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:35:20,553 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181328.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:48,537 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 09:36:10,636 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.827e+02 5.403e+02 6.773e+02 8.761e+02 1.536e+03, threshold=1.355e+03, percent-clipped=8.0 +2023-04-03 09:36:22,055 INFO [train.py:903] (2/4) Epoch 27, batch 3850, loss[loss=0.2177, simple_loss=0.306, pruned_loss=0.0647, over 19727.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06209, over 3813952.62 frames. ], batch size: 63, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:36:36,837 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2366, 2.3350, 2.4895, 3.0770, 2.3083, 2.8545, 2.4823, 2.2995], + device='cuda:2'), covar=tensor([0.4571, 0.4258, 0.2154, 0.2720, 0.4704, 0.2468, 0.5416, 0.3596], + device='cuda:2'), in_proj_covar=tensor([0.0929, 0.1007, 0.0738, 0.0948, 0.0906, 0.0847, 0.0856, 0.0804], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 09:37:25,669 INFO [train.py:903] (2/4) Epoch 27, batch 3900, loss[loss=0.2216, simple_loss=0.2999, pruned_loss=0.07161, over 19558.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06097, over 3817859.75 frames. ], batch size: 61, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:38:13,015 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181465.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:38:17,127 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.394e+02 5.123e+02 6.226e+02 8.139e+02 1.802e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-03 09:38:28,445 INFO [train.py:903] (2/4) Epoch 27, batch 3950, loss[loss=0.2098, simple_loss=0.2846, pruned_loss=0.06754, over 19372.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06042, over 3809486.09 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:38:30,589 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 09:38:43,854 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181490.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:39:30,597 INFO [train.py:903] (2/4) Epoch 27, batch 4000, loss[loss=0.1815, simple_loss=0.2658, pruned_loss=0.04858, over 19665.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.061, over 3804914.24 frames. ], batch size: 53, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:39:55,717 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:08,384 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:14,679 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 09:40:21,509 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.331e+02 6.990e+02 9.796e+02 2.756e+03, threshold=1.398e+03, percent-clipped=11.0 +2023-04-03 09:40:32,613 INFO [train.py:903] (2/4) Epoch 27, batch 4050, loss[loss=0.1775, simple_loss=0.2614, pruned_loss=0.04675, over 19626.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06109, over 3811574.16 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:40:38,825 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:41:35,051 INFO [train.py:903] (2/4) Epoch 27, batch 4100, loss[loss=0.2103, simple_loss=0.2978, pruned_loss=0.06137, over 19069.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06073, over 3822798.28 frames. ], batch size: 69, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:42:07,735 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 09:42:13,761 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:42:26,971 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 5.142e+02 6.045e+02 7.647e+02 1.406e+03, threshold=1.209e+03, percent-clipped=1.0 +2023-04-03 09:42:35,660 INFO [train.py:903] (2/4) Epoch 27, batch 4150, loss[loss=0.2042, simple_loss=0.2893, pruned_loss=0.05956, over 19538.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2846, pruned_loss=0.06124, over 3838427.03 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:07,920 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4540, 1.2952, 1.4745, 1.4989, 3.0466, 1.1264, 2.3538, 3.4462], + device='cuda:2'), covar=tensor([0.0545, 0.2924, 0.2977, 0.1862, 0.0678, 0.2510, 0.1309, 0.0274], + device='cuda:2'), in_proj_covar=tensor([0.0420, 0.0375, 0.0392, 0.0350, 0.0381, 0.0354, 0.0390, 0.0413], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:43:21,562 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3616, 1.3767, 1.5344, 1.5148, 1.8817, 1.8519, 1.8434, 0.6155], + device='cuda:2'), covar=tensor([0.2517, 0.4590, 0.2829, 0.2013, 0.1606, 0.2439, 0.1396, 0.5242], + device='cuda:2'), in_proj_covar=tensor([0.0553, 0.0669, 0.0751, 0.0506, 0.0635, 0.0547, 0.0670, 0.0571], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 09:43:38,695 INFO [train.py:903] (2/4) Epoch 27, batch 4200, loss[loss=0.2027, simple_loss=0.2895, pruned_loss=0.05798, over 19777.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2843, pruned_loss=0.06066, over 3834167.54 frames. ], batch size: 56, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:42,087 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 09:43:55,232 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6545, 1.4195, 1.6645, 1.5949, 3.2394, 1.2042, 2.4792, 3.7041], + device='cuda:2'), covar=tensor([0.0480, 0.2991, 0.2763, 0.1878, 0.0682, 0.2477, 0.1204, 0.0226], + device='cuda:2'), in_proj_covar=tensor([0.0422, 0.0377, 0.0394, 0.0352, 0.0382, 0.0356, 0.0392, 0.0415], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:44:30,940 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.083e+02 6.482e+02 7.948e+02 1.533e+03, threshold=1.296e+03, percent-clipped=4.0 +2023-04-03 09:44:36,236 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:44:40,472 INFO [train.py:903] (2/4) Epoch 27, batch 4250, loss[loss=0.2334, simple_loss=0.3217, pruned_loss=0.07254, over 19494.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2823, pruned_loss=0.05994, over 3829338.44 frames. ], batch size: 64, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:44:55,284 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 09:45:08,386 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 09:45:43,727 INFO [train.py:903] (2/4) Epoch 27, batch 4300, loss[loss=0.2104, simple_loss=0.2921, pruned_loss=0.06435, over 19676.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2831, pruned_loss=0.06028, over 3827397.72 frames. ], batch size: 58, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:45:46,625 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7837, 4.3312, 2.7518, 3.8556, 1.0150, 4.3494, 4.1754, 4.2748], + device='cuda:2'), covar=tensor([0.0569, 0.0960, 0.1882, 0.0846, 0.3924, 0.0639, 0.0923, 0.1064], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0427, 0.0512, 0.0357, 0.0406, 0.0452, 0.0448, 0.0477], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:46:16,696 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6123, 4.1429, 4.3165, 4.2883, 1.8595, 4.0535, 3.5648, 4.0916], + device='cuda:2'), covar=tensor([0.1676, 0.0821, 0.0604, 0.0721, 0.5551, 0.0902, 0.0691, 0.1038], + device='cuda:2'), in_proj_covar=tensor([0.0823, 0.0784, 0.0994, 0.0870, 0.0864, 0.0755, 0.0588, 0.0922], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 09:46:36,878 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 4.917e+02 6.450e+02 8.224e+02 1.543e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-04-03 09:46:40,242 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 09:46:47,387 INFO [train.py:903] (2/4) Epoch 27, batch 4350, loss[loss=0.1847, simple_loss=0.2715, pruned_loss=0.04899, over 19665.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2821, pruned_loss=0.06001, over 3836200.38 frames. ], batch size: 53, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:46:59,931 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9659, 4.5637, 2.8383, 3.9412, 1.1782, 4.4980, 4.3604, 4.4517], + device='cuda:2'), covar=tensor([0.0556, 0.0892, 0.1896, 0.0814, 0.3768, 0.0664, 0.0925, 0.1324], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0428, 0.0513, 0.0358, 0.0408, 0.0454, 0.0450, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:47:05,490 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:47:25,499 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-03 09:47:49,878 INFO [train.py:903] (2/4) Epoch 27, batch 4400, loss[loss=0.2263, simple_loss=0.3108, pruned_loss=0.07093, over 19191.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.282, pruned_loss=0.06006, over 3832145.89 frames. ], batch size: 69, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:15,001 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 09:48:24,221 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 09:48:42,706 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.593e+02 4.919e+02 6.507e+02 9.105e+02 1.976e+03, threshold=1.301e+03, percent-clipped=10.0 +2023-04-03 09:48:52,995 INFO [train.py:903] (2/4) Epoch 27, batch 4450, loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.0636, over 19566.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2819, pruned_loss=0.06017, over 3845429.48 frames. ], batch size: 61, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:53,752 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 09:48:59,279 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2948, 2.4289, 2.5990, 3.1199, 2.3623, 2.9199, 2.5868, 2.4384], + device='cuda:2'), covar=tensor([0.4134, 0.4014, 0.1891, 0.2625, 0.4602, 0.2287, 0.4811, 0.3309], + device='cuda:2'), in_proj_covar=tensor([0.0930, 0.1006, 0.0739, 0.0948, 0.0907, 0.0848, 0.0855, 0.0805], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 09:49:16,016 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:30,772 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:37,359 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9994, 3.6395, 2.5802, 3.2529, 1.2194, 3.6544, 3.4815, 3.5590], + device='cuda:2'), covar=tensor([0.0863, 0.1268, 0.2078, 0.0975, 0.3750, 0.0812, 0.1116, 0.1347], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0427, 0.0513, 0.0357, 0.0407, 0.0453, 0.0449, 0.0477], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:49:56,883 INFO [train.py:903] (2/4) Epoch 27, batch 4500, loss[loss=0.184, simple_loss=0.2622, pruned_loss=0.05288, over 19843.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2825, pruned_loss=0.06054, over 3840827.46 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:49:59,823 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:31,949 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:49,963 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.059e+02 6.218e+02 7.785e+02 2.105e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 09:51:00,201 INFO [train.py:903] (2/4) Epoch 27, batch 4550, loss[loss=0.1972, simple_loss=0.28, pruned_loss=0.05724, over 19851.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06029, over 3837021.62 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:51:09,779 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 09:51:32,197 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 09:51:35,765 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5027, 2.1042, 1.5854, 1.4232, 1.9298, 1.3030, 1.3427, 1.9107], + device='cuda:2'), covar=tensor([0.1031, 0.0836, 0.1138, 0.0935, 0.0604, 0.1356, 0.0799, 0.0490], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0321, 0.0337, 0.0273, 0.0251, 0.0345, 0.0292, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:52:02,916 INFO [train.py:903] (2/4) Epoch 27, batch 4600, loss[loss=0.2143, simple_loss=0.2958, pruned_loss=0.06636, over 19771.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06057, over 3813065.92 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:52:54,752 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 4.830e+02 5.724e+02 7.323e+02 1.391e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 09:53:05,193 INFO [train.py:903] (2/4) Epoch 27, batch 4650, loss[loss=0.1826, simple_loss=0.2679, pruned_loss=0.0486, over 19763.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2839, pruned_loss=0.06048, over 3800563.46 frames. ], batch size: 51, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:53:22,613 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 09:53:34,166 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 09:53:57,775 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6082, 1.6775, 1.7902, 1.8484, 2.7299, 2.3883, 2.8931, 1.4456], + device='cuda:2'), covar=tensor([0.2453, 0.4316, 0.2890, 0.1943, 0.1439, 0.2121, 0.1364, 0.4367], + device='cuda:2'), in_proj_covar=tensor([0.0552, 0.0667, 0.0751, 0.0505, 0.0636, 0.0544, 0.0668, 0.0570], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 09:54:07,706 INFO [train.py:903] (2/4) Epoch 27, batch 4700, loss[loss=0.2096, simple_loss=0.2886, pruned_loss=0.06532, over 19586.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2839, pruned_loss=0.06058, over 3817264.66 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:54:30,891 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 09:54:51,535 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:54:59,127 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.994e+02 4.730e+02 5.895e+02 7.660e+02 1.174e+03, threshold=1.179e+03, percent-clipped=2.0 +2023-04-03 09:55:10,413 INFO [train.py:903] (2/4) Epoch 27, batch 4750, loss[loss=0.236, simple_loss=0.2937, pruned_loss=0.08917, over 19428.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2837, pruned_loss=0.0608, over 3817971.47 frames. ], batch size: 48, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:55:22,527 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182288.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:55:36,003 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:12,231 INFO [train.py:903] (2/4) Epoch 27, batch 4800, loss[loss=0.195, simple_loss=0.2847, pruned_loss=0.05268, over 19667.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2837, pruned_loss=0.06039, over 3822223.10 frames. ], batch size: 58, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:56:26,912 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182340.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:27,636 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-03 09:56:42,895 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2672, 2.1813, 1.9450, 2.1895, 2.0052, 1.8860, 1.7979, 2.2113], + device='cuda:2'), covar=tensor([0.1050, 0.1365, 0.1486, 0.1122, 0.1387, 0.0561, 0.1562, 0.0716], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0358, 0.0317, 0.0257, 0.0307, 0.0256, 0.0321, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 09:57:03,562 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.862e+02 5.780e+02 7.243e+02 1.108e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-03 09:57:13,597 INFO [train.py:903] (2/4) Epoch 27, batch 4850, loss[loss=0.2083, simple_loss=0.2903, pruned_loss=0.06311, over 19615.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.0601, over 3830760.71 frames. ], batch size: 57, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:57:36,905 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 09:57:58,332 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 09:57:58,644 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8111, 3.2986, 3.3551, 3.3555, 1.4152, 3.2211, 2.8040, 3.1390], + device='cuda:2'), covar=tensor([0.1891, 0.1056, 0.0875, 0.1000, 0.5696, 0.1060, 0.0873, 0.1395], + device='cuda:2'), in_proj_covar=tensor([0.0819, 0.0782, 0.0990, 0.0869, 0.0860, 0.0753, 0.0587, 0.0919], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 09:58:03,902 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 09:58:03,926 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 09:58:13,216 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 09:58:14,421 INFO [train.py:903] (2/4) Epoch 27, batch 4900, loss[loss=0.1915, simple_loss=0.2593, pruned_loss=0.06181, over 19745.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.283, pruned_loss=0.06013, over 3820277.43 frames. ], batch size: 45, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:58:34,882 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 09:58:50,332 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182455.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:59:07,219 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.786e+02 5.871e+02 7.388e+02 1.622e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 09:59:18,873 INFO [train.py:903] (2/4) Epoch 27, batch 4950, loss[loss=0.2281, simple_loss=0.2998, pruned_loss=0.07825, over 19690.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2819, pruned_loss=0.05969, over 3826295.01 frames. ], batch size: 53, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:59:36,558 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 10:00:00,876 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 10:00:21,850 INFO [train.py:903] (2/4) Epoch 27, batch 5000, loss[loss=0.1997, simple_loss=0.2846, pruned_loss=0.05744, over 19668.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2814, pruned_loss=0.05952, over 3826138.38 frames. ], batch size: 58, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:00:27,427 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182532.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:00:32,549 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 10:00:44,437 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 10:01:15,165 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.878e+02 5.976e+02 7.448e+02 1.686e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 10:01:25,327 INFO [train.py:903] (2/4) Epoch 27, batch 5050, loss[loss=0.1711, simple_loss=0.2589, pruned_loss=0.04161, over 19589.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2815, pruned_loss=0.05921, over 3816087.12 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:02,790 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 10:02:27,487 INFO [train.py:903] (2/4) Epoch 27, batch 5100, loss[loss=0.1963, simple_loss=0.2839, pruned_loss=0.05431, over 19302.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2827, pruned_loss=0.0597, over 3796611.10 frames. ], batch size: 66, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:31,269 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8386, 3.3168, 3.4049, 3.3849, 1.3632, 3.2533, 2.8211, 3.2085], + device='cuda:2'), covar=tensor([0.1577, 0.0812, 0.0635, 0.0749, 0.5248, 0.0926, 0.0674, 0.1019], + device='cuda:2'), in_proj_covar=tensor([0.0811, 0.0777, 0.0982, 0.0864, 0.0854, 0.0746, 0.0582, 0.0912], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 10:02:37,768 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 10:02:41,996 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 10:02:46,620 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 10:02:46,775 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:03:19,757 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.110e+02 6.408e+02 8.268e+02 2.195e+03, threshold=1.282e+03, percent-clipped=9.0 +2023-04-03 10:03:30,281 INFO [train.py:903] (2/4) Epoch 27, batch 5150, loss[loss=0.1939, simple_loss=0.2702, pruned_loss=0.0588, over 19604.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2811, pruned_loss=0.05908, over 3807214.78 frames. ], batch size: 50, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:03:44,254 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:04:12,469 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182711.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:21,110 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:04:24,663 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6847, 4.1406, 4.3088, 4.3095, 1.8111, 4.0857, 3.5874, 4.0512], + device='cuda:2'), covar=tensor([0.1608, 0.0890, 0.0704, 0.0743, 0.5787, 0.0975, 0.0717, 0.1206], + device='cuda:2'), in_proj_covar=tensor([0.0812, 0.0779, 0.0984, 0.0867, 0.0856, 0.0748, 0.0583, 0.0913], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 10:04:26,034 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3446, 2.0436, 1.6722, 1.4317, 1.8426, 1.3367, 1.3407, 1.8250], + device='cuda:2'), covar=tensor([0.1000, 0.0843, 0.1083, 0.0909, 0.0592, 0.1315, 0.0703, 0.0470], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0319, 0.0335, 0.0271, 0.0250, 0.0343, 0.0291, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:04:34,485 INFO [train.py:903] (2/4) Epoch 27, batch 5200, loss[loss=0.1907, simple_loss=0.2835, pruned_loss=0.04891, over 19694.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2806, pruned_loss=0.0591, over 3820838.24 frames. ], batch size: 53, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:04:45,084 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:50,575 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 10:05:11,627 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:05:19,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-03 10:05:28,497 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.726e+02 5.796e+02 7.282e+02 1.371e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-03 10:05:35,289 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 10:05:37,678 INFO [train.py:903] (2/4) Epoch 27, batch 5250, loss[loss=0.2057, simple_loss=0.2875, pruned_loss=0.06192, over 19759.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2807, pruned_loss=0.05883, over 3826413.51 frames. ], batch size: 63, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:05:47,605 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-04-03 10:05:51,902 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0850, 1.9747, 1.9218, 1.7449, 1.6024, 1.6776, 0.6132, 1.1444], + device='cuda:2'), covar=tensor([0.0711, 0.0739, 0.0500, 0.0892, 0.1265, 0.1010, 0.1454, 0.1103], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0363, 0.0370, 0.0393, 0.0472, 0.0397, 0.0347, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 10:06:39,520 INFO [train.py:903] (2/4) Epoch 27, batch 5300, loss[loss=0.1879, simple_loss=0.2742, pruned_loss=0.05081, over 19760.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2815, pruned_loss=0.05915, over 3833208.24 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:06:57,240 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-03 10:06:57,461 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 10:06:59,522 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.04 vs. limit=5.0 +2023-04-03 10:07:34,150 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.440e+02 4.985e+02 5.848e+02 7.587e+02 2.195e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 10:07:37,812 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:39,980 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:42,166 INFO [train.py:903] (2/4) Epoch 27, batch 5350, loss[loss=0.1739, simple_loss=0.2538, pruned_loss=0.04694, over 19740.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05931, over 3828230.17 frames. ], batch size: 51, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:07:44,852 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:08:16,952 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 10:08:46,432 INFO [train.py:903] (2/4) Epoch 27, batch 5400, loss[loss=0.1979, simple_loss=0.2857, pruned_loss=0.0551, over 19598.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.05962, over 3832707.92 frames. ], batch size: 57, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:09:41,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.624e+02 4.932e+02 6.009e+02 7.341e+02 1.388e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 10:09:49,115 INFO [train.py:903] (2/4) Epoch 27, batch 5450, loss[loss=0.1744, simple_loss=0.2612, pruned_loss=0.04376, over 19384.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05988, over 3804728.69 frames. ], batch size: 48, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:10:04,451 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:35,606 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183014.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:40,367 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-03 10:10:50,991 INFO [train.py:903] (2/4) Epoch 27, batch 5500, loss[loss=0.1939, simple_loss=0.2851, pruned_loss=0.05134, over 19682.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2838, pruned_loss=0.06011, over 3801875.65 frames. ], batch size: 53, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:05,254 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183039.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:11:13,898 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 10:11:33,638 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-03 10:11:45,358 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.032e+02 6.184e+02 7.491e+02 1.557e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 10:11:52,114 INFO [train.py:903] (2/4) Epoch 27, batch 5550, loss[loss=0.2313, simple_loss=0.3082, pruned_loss=0.0772, over 18671.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.0604, over 3813602.21 frames. ], batch size: 74, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:52,640 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6353, 1.7408, 2.1872, 1.9190, 3.2016, 2.7127, 3.5898, 1.6434], + device='cuda:2'), covar=tensor([0.2596, 0.4454, 0.2826, 0.2006, 0.1574, 0.2186, 0.1513, 0.4631], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0672, 0.0758, 0.0509, 0.0638, 0.0548, 0.0673, 0.0575], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 10:11:57,902 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 10:12:46,713 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 10:12:56,193 INFO [train.py:903] (2/4) Epoch 27, batch 5600, loss[loss=0.2452, simple_loss=0.3197, pruned_loss=0.08541, over 19831.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.06013, over 3820389.34 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:13:48,492 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9895, 3.0239, 3.3121, 3.8846, 3.1099, 3.6565, 3.2864, 3.1186], + device='cuda:2'), covar=tensor([0.3416, 0.3204, 0.1493, 0.1888, 0.3237, 0.1691, 0.3872, 0.2647], + device='cuda:2'), in_proj_covar=tensor([0.0933, 0.1012, 0.0742, 0.0950, 0.0910, 0.0849, 0.0859, 0.0807], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 10:13:51,543 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.790e+02 6.069e+02 7.863e+02 1.689e+03, threshold=1.214e+03, percent-clipped=3.0 +2023-04-03 10:13:56,131 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6530, 2.7635, 2.3873, 2.7529, 2.6856, 2.3235, 2.2583, 2.6387], + device='cuda:2'), covar=tensor([0.0924, 0.1409, 0.1381, 0.0970, 0.1214, 0.0530, 0.1333, 0.0688], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0357, 0.0317, 0.0257, 0.0306, 0.0256, 0.0318, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:13:59,318 INFO [train.py:903] (2/4) Epoch 27, batch 5650, loss[loss=0.236, simple_loss=0.3158, pruned_loss=0.07816, over 18266.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.283, pruned_loss=0.06007, over 3808809.15 frames. ], batch size: 83, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:14:34,160 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9298, 2.7852, 2.0244, 2.0742, 1.9517, 2.3714, 1.0030, 2.0105], + device='cuda:2'), covar=tensor([0.0842, 0.0696, 0.0930, 0.1344, 0.1286, 0.1310, 0.1695, 0.1220], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0364, 0.0372, 0.0394, 0.0472, 0.0398, 0.0347, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 10:14:36,241 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4103, 1.4721, 1.7652, 1.6937, 2.3278, 2.0646, 2.4569, 1.0766], + device='cuda:2'), covar=tensor([0.2944, 0.5046, 0.3197, 0.2377, 0.1894, 0.2785, 0.1875, 0.5560], + device='cuda:2'), in_proj_covar=tensor([0.0555, 0.0672, 0.0758, 0.0510, 0.0639, 0.0548, 0.0673, 0.0577], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 10:14:44,841 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 10:14:49,511 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:14:56,372 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:01,003 INFO [train.py:903] (2/4) Epoch 27, batch 5700, loss[loss=0.1821, simple_loss=0.2556, pruned_loss=0.05428, over 19292.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.0594, over 3820025.90 frames. ], batch size: 44, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:15:25,128 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:54,498 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.103e+02 5.954e+02 7.593e+02 1.308e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-03 10:15:54,946 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:59,204 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 10:16:01,591 INFO [train.py:903] (2/4) Epoch 27, batch 5750, loss[loss=0.1879, simple_loss=0.2555, pruned_loss=0.06013, over 19117.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2832, pruned_loss=0.06025, over 3815188.13 frames. ], batch size: 42, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:16:08,286 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 10:16:12,848 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 10:17:05,411 INFO [train.py:903] (2/4) Epoch 27, batch 5800, loss[loss=0.1889, simple_loss=0.2729, pruned_loss=0.05247, over 19768.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2828, pruned_loss=0.05999, over 3802550.81 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:17:12,679 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183333.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:19,552 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:59,582 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.364e+02 7.027e+02 9.052e+02 1.891e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-03 10:18:07,742 INFO [train.py:903] (2/4) Epoch 27, batch 5850, loss[loss=0.2089, simple_loss=0.296, pruned_loss=0.06092, over 19674.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.284, pruned_loss=0.06106, over 3813630.54 frames. ], batch size: 58, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:18:12,173 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 10:18:12,959 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9823, 2.0050, 1.6873, 2.0439, 1.7752, 1.7439, 1.7279, 1.9461], + device='cuda:2'), covar=tensor([0.1111, 0.1277, 0.1466, 0.0992, 0.1359, 0.0570, 0.1383, 0.0738], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0355, 0.0316, 0.0256, 0.0305, 0.0255, 0.0317, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:18:31,484 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 10:18:46,150 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9625, 2.1031, 2.2859, 2.6280, 2.0491, 2.5372, 2.2577, 2.0466], + device='cuda:2'), covar=tensor([0.4175, 0.3782, 0.1950, 0.2548, 0.4091, 0.2158, 0.4941, 0.3556], + device='cuda:2'), in_proj_covar=tensor([0.0930, 0.1008, 0.0740, 0.0946, 0.0907, 0.0847, 0.0858, 0.0803], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 10:19:07,143 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:19:08,993 INFO [train.py:903] (2/4) Epoch 27, batch 5900, loss[loss=0.191, simple_loss=0.2867, pruned_loss=0.04763, over 17421.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.284, pruned_loss=0.06139, over 3799825.68 frames. ], batch size: 101, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:19:09,040 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 10:19:32,075 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 10:20:03,305 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 4.868e+02 5.663e+02 7.388e+02 1.454e+03, threshold=1.133e+03, percent-clipped=1.0 +2023-04-03 10:20:10,242 INFO [train.py:903] (2/4) Epoch 27, batch 5950, loss[loss=0.1902, simple_loss=0.2809, pruned_loss=0.04971, over 19783.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2829, pruned_loss=0.06057, over 3816165.55 frames. ], batch size: 56, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,623 INFO [train.py:903] (2/4) Epoch 27, batch 6000, loss[loss=0.1913, simple_loss=0.2805, pruned_loss=0.05105, over 19542.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2836, pruned_loss=0.06106, over 3825643.09 frames. ], batch size: 56, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,623 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 10:21:25,595 INFO [train.py:937] (2/4) Epoch 27, validation: loss=0.1675, simple_loss=0.2669, pruned_loss=0.03401, over 944034.00 frames. +2023-04-03 10:21:25,596 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 10:21:39,352 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-03 10:22:22,440 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.376e+02 6.105e+02 7.773e+02 1.848e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 10:22:28,412 INFO [train.py:903] (2/4) Epoch 27, batch 6050, loss[loss=0.1816, simple_loss=0.2615, pruned_loss=0.05082, over 19664.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.06159, over 3822570.12 frames. ], batch size: 53, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:22:43,127 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183589.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:22:50,960 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:22:55,500 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5202, 2.1241, 1.6548, 1.5532, 2.0127, 1.3760, 1.3357, 1.8294], + device='cuda:2'), covar=tensor([0.1149, 0.0888, 0.1135, 0.0889, 0.0598, 0.1390, 0.0871, 0.0622], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0319, 0.0336, 0.0273, 0.0250, 0.0344, 0.0292, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:23:08,791 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-03 10:23:14,134 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:20,844 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183620.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:29,937 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-03 10:23:31,597 INFO [train.py:903] (2/4) Epoch 27, batch 6100, loss[loss=0.1976, simple_loss=0.2829, pruned_loss=0.05613, over 19471.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2845, pruned_loss=0.06154, over 3818966.63 frames. ], batch size: 49, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:23:37,187 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:24:00,179 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9846, 2.0677, 2.3295, 2.5140, 1.9088, 2.3959, 2.2944, 2.1678], + device='cuda:2'), covar=tensor([0.4449, 0.3857, 0.1946, 0.2454, 0.4187, 0.2312, 0.5242, 0.3422], + device='cuda:2'), in_proj_covar=tensor([0.0935, 0.1011, 0.0743, 0.0950, 0.0912, 0.0850, 0.0862, 0.0805], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 10:24:27,766 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.311e+02 5.176e+02 6.035e+02 7.763e+02 1.396e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-03 10:24:28,042 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2406, 5.6825, 3.3905, 5.0370, 0.9414, 5.9120, 5.6309, 5.8784], + device='cuda:2'), covar=tensor([0.0406, 0.0926, 0.1630, 0.0759, 0.4313, 0.0498, 0.0742, 0.0890], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0428, 0.0516, 0.0357, 0.0410, 0.0454, 0.0451, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:24:33,563 INFO [train.py:903] (2/4) Epoch 27, batch 6150, loss[loss=0.2218, simple_loss=0.3023, pruned_loss=0.07064, over 19706.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2843, pruned_loss=0.06139, over 3816911.56 frames. ], batch size: 63, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:24:39,635 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1079, 3.7851, 2.9655, 3.3209, 1.6183, 3.7094, 3.5904, 3.7040], + device='cuda:2'), covar=tensor([0.0869, 0.1110, 0.1831, 0.0914, 0.3242, 0.0819, 0.1081, 0.1459], + device='cuda:2'), in_proj_covar=tensor([0.0529, 0.0429, 0.0517, 0.0357, 0.0411, 0.0455, 0.0452, 0.0480], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:24:53,533 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6212, 1.4350, 1.5911, 1.7411, 3.2052, 1.3174, 2.4754, 3.7168], + device='cuda:2'), covar=tensor([0.0496, 0.2855, 0.3027, 0.1755, 0.0652, 0.2425, 0.1269, 0.0215], + device='cuda:2'), in_proj_covar=tensor([0.0425, 0.0379, 0.0397, 0.0353, 0.0385, 0.0358, 0.0396, 0.0416], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:25:01,300 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 10:25:35,000 INFO [train.py:903] (2/4) Epoch 27, batch 6200, loss[loss=0.2209, simple_loss=0.3003, pruned_loss=0.07073, over 19575.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2841, pruned_loss=0.06077, over 3821813.11 frames. ], batch size: 52, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:26:08,947 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1255, 5.5680, 3.3289, 4.9157, 1.2006, 5.7428, 5.4827, 5.7124], + device='cuda:2'), covar=tensor([0.0377, 0.0824, 0.1740, 0.0702, 0.4059, 0.0482, 0.0795, 0.1063], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0427, 0.0517, 0.0357, 0.0409, 0.0453, 0.0451, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:26:28,525 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183770.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:26:31,934 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.839e+02 5.826e+02 7.637e+02 1.855e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 10:26:37,745 INFO [train.py:903] (2/4) Epoch 27, batch 6250, loss[loss=0.1751, simple_loss=0.253, pruned_loss=0.04853, over 19402.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2841, pruned_loss=0.06054, over 3808285.00 frames. ], batch size: 48, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:27:08,109 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 10:27:40,819 INFO [train.py:903] (2/4) Epoch 27, batch 6300, loss[loss=0.2376, simple_loss=0.3185, pruned_loss=0.07837, over 19723.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06146, over 3810166.99 frames. ], batch size: 63, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:20,311 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:28:36,678 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.928e+02 5.882e+02 7.199e+02 1.705e+03, threshold=1.176e+03, percent-clipped=2.0 +2023-04-03 10:28:43,616 INFO [train.py:903] (2/4) Epoch 27, batch 6350, loss[loss=0.2019, simple_loss=0.2917, pruned_loss=0.05608, over 19678.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06117, over 3811475.07 frames. ], batch size: 59, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:51,892 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:29:18,583 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0888, 2.0384, 1.8078, 2.1268, 1.8930, 1.7824, 1.7383, 2.0359], + device='cuda:2'), covar=tensor([0.1125, 0.1479, 0.1542, 0.1157, 0.1477, 0.0617, 0.1523, 0.0758], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0358, 0.0317, 0.0256, 0.0305, 0.0256, 0.0320, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:29:43,984 INFO [train.py:903] (2/4) Epoch 27, batch 6400, loss[loss=0.1992, simple_loss=0.2865, pruned_loss=0.0559, over 19618.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2847, pruned_loss=0.06106, over 3814096.55 frames. ], batch size: 57, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:30:39,969 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.757e+02 6.035e+02 7.575e+02 1.901e+03, threshold=1.207e+03, percent-clipped=7.0 +2023-04-03 10:30:43,701 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:30:45,873 INFO [train.py:903] (2/4) Epoch 27, batch 6450, loss[loss=0.2271, simple_loss=0.2979, pruned_loss=0.07818, over 19839.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2844, pruned_loss=0.06097, over 3825865.44 frames. ], batch size: 52, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:28,486 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 10:31:31,138 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 10:31:50,533 INFO [train.py:903] (2/4) Epoch 27, batch 6500, loss[loss=0.2103, simple_loss=0.2924, pruned_loss=0.06412, over 18599.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2854, pruned_loss=0.061, over 3832749.27 frames. ], batch size: 74, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:52,979 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 10:32:16,626 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 10:32:40,179 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:32:46,406 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.542e+02 5.565e+02 7.286e+02 1.442e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-04-03 10:32:53,090 INFO [train.py:903] (2/4) Epoch 27, batch 6550, loss[loss=0.2141, simple_loss=0.2959, pruned_loss=0.06611, over 19667.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2852, pruned_loss=0.0611, over 3833882.17 frames. ], batch size: 58, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:33:08,652 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:33:55,011 INFO [train.py:903] (2/4) Epoch 27, batch 6600, loss[loss=0.2032, simple_loss=0.2894, pruned_loss=0.05851, over 19587.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2843, pruned_loss=0.06073, over 3843334.52 frames. ], batch size: 57, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:34:11,894 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:43,151 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:50,798 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.027e+02 4.833e+02 5.798e+02 7.178e+02 1.551e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 10:34:58,097 INFO [train.py:903] (2/4) Epoch 27, batch 6650, loss[loss=0.1733, simple_loss=0.252, pruned_loss=0.04725, over 19730.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2834, pruned_loss=0.06018, over 3838941.27 frames. ], batch size: 45, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:35:30,809 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184204.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:35:59,892 INFO [train.py:903] (2/4) Epoch 27, batch 6700, loss[loss=0.1565, simple_loss=0.2384, pruned_loss=0.0373, over 19615.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.06045, over 3826448.88 frames. ], batch size: 50, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:36:52,214 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.122e+02 6.084e+02 8.596e+02 2.606e+03, threshold=1.217e+03, percent-clipped=9.0 +2023-04-03 10:36:57,985 INFO [train.py:903] (2/4) Epoch 27, batch 6750, loss[loss=0.1829, simple_loss=0.263, pruned_loss=0.05144, over 19681.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2841, pruned_loss=0.06067, over 3829160.76 frames. ], batch size: 53, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:37:00,437 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 10:37:31,973 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5124, 2.5415, 2.2229, 2.6492, 2.3563, 2.2671, 2.0303, 2.5320], + device='cuda:2'), covar=tensor([0.1066, 0.1590, 0.1610, 0.1216, 0.1545, 0.0605, 0.1605, 0.0778], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0358, 0.0317, 0.0257, 0.0306, 0.0257, 0.0320, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:37:44,344 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:37:54,075 INFO [train.py:903] (2/4) Epoch 27, batch 6800, loss[loss=0.2424, simple_loss=0.3198, pruned_loss=0.08251, over 19280.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.06042, over 3838155.57 frames. ], batch size: 70, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:38:15,917 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:38:20,855 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 10:38:39,723 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 10:38:40,180 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 10:38:43,611 INFO [train.py:903] (2/4) Epoch 28, batch 0, loss[loss=0.2165, simple_loss=0.2848, pruned_loss=0.07407, over 19617.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2848, pruned_loss=0.07407, over 19617.00 frames. ], batch size: 50, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:38:43,611 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 10:38:54,486 INFO [train.py:937] (2/4) Epoch 28, validation: loss=0.1665, simple_loss=0.2666, pruned_loss=0.03316, over 944034.00 frames. +2023-04-03 10:38:54,488 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 10:39:08,336 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 10:39:14,446 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:15,178 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.715e+02 5.190e+02 6.304e+02 8.212e+02 1.288e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-03 10:39:21,688 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:42,075 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4272, 1.4866, 1.7414, 1.6540, 2.4317, 2.0742, 2.5110, 1.1410], + device='cuda:2'), covar=tensor([0.2921, 0.4880, 0.3105, 0.2268, 0.1712, 0.2572, 0.1610, 0.5270], + device='cuda:2'), in_proj_covar=tensor([0.0555, 0.0670, 0.0753, 0.0507, 0.0639, 0.0546, 0.0670, 0.0573], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 10:39:57,744 INFO [train.py:903] (2/4) Epoch 28, batch 50, loss[loss=0.1891, simple_loss=0.2793, pruned_loss=0.04946, over 19565.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05956, over 857329.35 frames. ], batch size: 61, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:40:04,875 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:40:32,266 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 10:40:58,085 INFO [train.py:903] (2/4) Epoch 28, batch 100, loss[loss=0.1496, simple_loss=0.2288, pruned_loss=0.03518, over 19731.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2816, pruned_loss=0.05832, over 1522985.95 frames. ], batch size: 46, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:41:08,329 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 10:41:18,602 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 4.572e+02 5.776e+02 7.316e+02 1.195e+03, threshold=1.155e+03, percent-clipped=0.0 +2023-04-03 10:41:56,054 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:41:58,109 INFO [train.py:903] (2/4) Epoch 28, batch 150, loss[loss=0.2233, simple_loss=0.3024, pruned_loss=0.07215, over 19594.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05972, over 2026771.43 frames. ], batch size: 61, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:42:21,074 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2685, 3.4458, 2.0259, 2.2411, 3.1568, 1.8273, 1.7388, 2.4156], + device='cuda:2'), covar=tensor([0.1398, 0.0644, 0.1124, 0.0901, 0.0562, 0.1298, 0.1013, 0.0664], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0317, 0.0334, 0.0270, 0.0248, 0.0341, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:42:24,592 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:42:57,425 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 10:42:58,595 INFO [train.py:903] (2/4) Epoch 28, batch 200, loss[loss=0.1617, simple_loss=0.2431, pruned_loss=0.04009, over 19375.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05847, over 2432890.35 frames. ], batch size: 47, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:43:19,482 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 4.969e+02 6.258e+02 7.516e+02 2.266e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 10:43:22,302 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184575.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:51,823 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5711, 1.2698, 1.4610, 1.2679, 2.2407, 1.1565, 2.2314, 2.5682], + device='cuda:2'), covar=tensor([0.0743, 0.2814, 0.2894, 0.1719, 0.0911, 0.2024, 0.0980, 0.0451], + device='cuda:2'), in_proj_covar=tensor([0.0427, 0.0380, 0.0398, 0.0354, 0.0385, 0.0359, 0.0397, 0.0417], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:43:51,908 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:59,859 INFO [train.py:903] (2/4) Epoch 28, batch 250, loss[loss=0.2264, simple_loss=0.3018, pruned_loss=0.07554, over 19585.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2826, pruned_loss=0.05978, over 2739608.66 frames. ], batch size: 52, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:01,664 INFO [train.py:903] (2/4) Epoch 28, batch 300, loss[loss=0.2305, simple_loss=0.3052, pruned_loss=0.07784, over 19304.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2824, pruned_loss=0.05964, over 2982323.31 frames. ], batch size: 66, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:22,243 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.936e+02 6.396e+02 8.049e+02 1.564e+03, threshold=1.279e+03, percent-clipped=7.0 +2023-04-03 10:46:02,789 INFO [train.py:903] (2/4) Epoch 28, batch 350, loss[loss=0.2021, simple_loss=0.2892, pruned_loss=0.05755, over 19583.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.283, pruned_loss=0.06023, over 3156172.89 frames. ], batch size: 57, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:46:06,328 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:46:20,191 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184721.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:46:39,085 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1758, 2.8477, 2.3326, 2.2595, 2.0486, 2.5415, 1.0496, 2.1033], + device='cuda:2'), covar=tensor([0.0691, 0.0689, 0.0701, 0.1197, 0.1241, 0.1138, 0.1523, 0.1110], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0364, 0.0370, 0.0393, 0.0471, 0.0395, 0.0346, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 10:47:04,881 INFO [train.py:903] (2/4) Epoch 28, batch 400, loss[loss=0.2163, simple_loss=0.3009, pruned_loss=0.06589, over 18761.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.283, pruned_loss=0.06021, over 3308597.40 frames. ], batch size: 74, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:47:24,995 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.910e+02 5.909e+02 7.518e+02 1.907e+03, threshold=1.182e+03, percent-clipped=3.0 +2023-04-03 10:47:38,218 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:05,133 INFO [train.py:903] (2/4) Epoch 28, batch 450, loss[loss=0.2455, simple_loss=0.3159, pruned_loss=0.08753, over 19682.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05989, over 3433654.62 frames. ], batch size: 60, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:48:07,881 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184808.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:38,479 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 10:48:39,506 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 10:48:43,515 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:56,829 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:49:03,928 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9549, 1.9406, 1.9069, 1.8266, 1.6422, 1.8055, 1.0157, 1.4162], + device='cuda:2'), covar=tensor([0.0588, 0.0631, 0.0443, 0.0665, 0.0943, 0.0838, 0.1347, 0.0890], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0365, 0.0371, 0.0395, 0.0474, 0.0397, 0.0348, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 10:49:06,883 INFO [train.py:903] (2/4) Epoch 28, batch 500, loss[loss=0.2111, simple_loss=0.2948, pruned_loss=0.06368, over 19677.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2823, pruned_loss=0.05982, over 3507210.37 frames. ], batch size: 53, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:49:28,422 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.109e+02 6.404e+02 8.256e+02 1.456e+03, threshold=1.281e+03, percent-clipped=5.0 +2023-04-03 10:49:41,505 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:50:09,142 INFO [train.py:903] (2/4) Epoch 28, batch 550, loss[loss=0.2185, simple_loss=0.3013, pruned_loss=0.06788, over 19480.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2825, pruned_loss=0.0598, over 3569542.62 frames. ], batch size: 64, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:11,444 INFO [train.py:903] (2/4) Epoch 28, batch 600, loss[loss=0.2222, simple_loss=0.2794, pruned_loss=0.08253, over 19261.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2815, pruned_loss=0.05971, over 3620378.40 frames. ], batch size: 44, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:12,935 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184957.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:19,913 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:31,269 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.007e+02 6.276e+02 8.222e+02 1.849e+03, threshold=1.255e+03, percent-clipped=3.0 +2023-04-03 10:51:50,599 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 10:52:14,353 INFO [train.py:903] (2/4) Epoch 28, batch 650, loss[loss=0.1977, simple_loss=0.2896, pruned_loss=0.05287, over 19496.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2812, pruned_loss=0.05938, over 3670306.50 frames. ], batch size: 64, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:16,112 INFO [train.py:903] (2/4) Epoch 28, batch 700, loss[loss=0.2037, simple_loss=0.2878, pruned_loss=0.05979, over 18428.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2809, pruned_loss=0.05889, over 3702077.04 frames. ], batch size: 84, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:31,659 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2307, 1.8474, 1.9730, 3.0973, 2.1559, 2.4223, 2.4999, 2.1402], + device='cuda:2'), covar=tensor([0.0861, 0.0985, 0.0998, 0.0663, 0.0816, 0.0780, 0.0861, 0.0752], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0226, 0.0228, 0.0240, 0.0227, 0.0215, 0.0189, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 10:53:38,021 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 4.589e+02 5.550e+02 7.126e+02 1.317e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-03 10:53:45,193 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:01,398 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:19,791 INFO [train.py:903] (2/4) Epoch 28, batch 750, loss[loss=0.3058, simple_loss=0.3509, pruned_loss=0.1304, over 13504.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.0599, over 3726576.62 frames. ], batch size: 136, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:54:34,020 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:10,728 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185147.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:20,946 INFO [train.py:903] (2/4) Epoch 28, batch 800, loss[loss=0.1664, simple_loss=0.2488, pruned_loss=0.04201, over 19755.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2821, pruned_loss=0.05964, over 3758818.14 frames. ], batch size: 46, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:55:34,896 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:55:41,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.493e+02 6.558e+02 7.858e+02 2.224e+03, threshold=1.312e+03, percent-clipped=8.0 +2023-04-03 10:55:53,708 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0766, 1.3469, 1.6676, 0.9808, 2.3937, 3.0551, 2.7758, 3.2541], + device='cuda:2'), covar=tensor([0.1694, 0.3882, 0.3515, 0.2850, 0.0629, 0.0241, 0.0274, 0.0331], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0271, 0.0253, 0.0195, 0.0219, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 10:56:24,254 INFO [train.py:903] (2/4) Epoch 28, batch 850, loss[loss=0.1769, simple_loss=0.2555, pruned_loss=0.04916, over 19394.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2814, pruned_loss=0.05939, over 3783210.02 frames. ], batch size: 48, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:56:39,427 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:56:51,611 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185228.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:11,574 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:15,610 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 10:57:24,923 INFO [train.py:903] (2/4) Epoch 28, batch 900, loss[loss=0.1985, simple_loss=0.2869, pruned_loss=0.05508, over 19601.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2819, pruned_loss=0.05998, over 3797666.25 frames. ], batch size: 57, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:57:47,703 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 4.798e+02 5.781e+02 7.336e+02 1.381e+03, threshold=1.156e+03, percent-clipped=1.0 +2023-04-03 10:58:05,475 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9802, 1.5936, 1.8722, 2.8007, 1.8691, 2.1462, 2.2041, 2.0528], + device='cuda:2'), covar=tensor([0.0958, 0.1155, 0.1115, 0.0857, 0.1039, 0.0898, 0.0978, 0.0807], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0225, 0.0227, 0.0240, 0.0228, 0.0215, 0.0189, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 10:58:21,551 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:58:28,121 INFO [train.py:903] (2/4) Epoch 28, batch 950, loss[loss=0.1901, simple_loss=0.2705, pruned_loss=0.05489, over 19679.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2825, pruned_loss=0.06003, over 3793632.59 frames. ], batch size: 53, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:58:29,315 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 10:58:57,753 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8682, 4.4514, 2.9419, 3.9504, 1.0162, 4.4256, 4.2927, 4.3515], + device='cuda:2'), covar=tensor([0.0578, 0.0993, 0.1892, 0.0850, 0.4030, 0.0613, 0.0907, 0.1108], + device='cuda:2'), in_proj_covar=tensor([0.0526, 0.0425, 0.0516, 0.0358, 0.0408, 0.0455, 0.0450, 0.0479], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 10:59:15,262 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185343.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:59:32,238 INFO [train.py:903] (2/4) Epoch 28, batch 1000, loss[loss=0.2049, simple_loss=0.2938, pruned_loss=0.05799, over 19505.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2827, pruned_loss=0.0601, over 3793213.64 frames. ], batch size: 64, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:59:53,732 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 4.891e+02 5.853e+02 7.878e+02 2.572e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-03 11:00:23,362 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 11:00:34,759 INFO [train.py:903] (2/4) Epoch 28, batch 1050, loss[loss=0.2238, simple_loss=0.3039, pruned_loss=0.07192, over 19725.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2826, pruned_loss=0.0602, over 3805447.92 frames. ], batch size: 63, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:00:47,030 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:00:53,764 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:01:02,625 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 11:01:35,153 INFO [train.py:903] (2/4) Epoch 28, batch 1100, loss[loss=0.243, simple_loss=0.3166, pruned_loss=0.08464, over 19615.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.06095, over 3808122.48 frames. ], batch size: 61, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:01:57,232 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.947e+02 6.329e+02 8.288e+02 1.903e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-03 11:02:08,281 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 11:02:12,994 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:18,720 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185491.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:35,132 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2936, 2.2068, 1.8546, 1.7693, 1.6753, 1.7437, 0.6452, 1.1840], + device='cuda:2'), covar=tensor([0.0697, 0.0681, 0.0664, 0.0994, 0.1301, 0.1046, 0.1488, 0.1260], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0364, 0.0371, 0.0393, 0.0471, 0.0395, 0.0346, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:02:35,894 INFO [train.py:903] (2/4) Epoch 28, batch 1150, loss[loss=0.1685, simple_loss=0.2434, pruned_loss=0.04679, over 19375.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2837, pruned_loss=0.06132, over 3813227.97 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:03:01,904 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-03 11:03:15,733 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:03:40,783 INFO [train.py:903] (2/4) Epoch 28, batch 1200, loss[loss=0.1951, simple_loss=0.2848, pruned_loss=0.05269, over 19613.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2827, pruned_loss=0.06034, over 3830389.30 frames. ], batch size: 57, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:04:01,640 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.383e+02 6.799e+02 8.653e+02 1.626e+03, threshold=1.360e+03, percent-clipped=3.0 +2023-04-03 11:04:11,799 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 11:04:34,008 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:04:41,622 INFO [train.py:903] (2/4) Epoch 28, batch 1250, loss[loss=0.2369, simple_loss=0.3107, pruned_loss=0.08153, over 17414.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2825, pruned_loss=0.06033, over 3835165.10 frames. ], batch size: 101, lr: 2.94e-03, grad_scale: 16.0 +2023-04-03 11:04:42,017 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185606.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:03,708 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:44,897 INFO [train.py:903] (2/4) Epoch 28, batch 1300, loss[loss=0.2438, simple_loss=0.3082, pruned_loss=0.08977, over 13446.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06031, over 3835184.98 frames. ], batch size: 136, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:06:04,652 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185672.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:06,498 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 4.542e+02 5.530e+02 7.592e+02 1.164e+03, threshold=1.106e+03, percent-clipped=0.0 +2023-04-03 11:06:27,830 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5609, 1.6919, 2.1767, 1.9391, 3.0204, 2.4150, 3.2503, 1.6806], + device='cuda:2'), covar=tensor([0.2835, 0.4840, 0.3006, 0.2171, 0.1802, 0.2598, 0.1847, 0.4791], + device='cuda:2'), in_proj_covar=tensor([0.0552, 0.0668, 0.0752, 0.0506, 0.0633, 0.0544, 0.0667, 0.0572], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:06:35,915 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185697.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:46,280 INFO [train.py:903] (2/4) Epoch 28, batch 1350, loss[loss=0.1743, simple_loss=0.2517, pruned_loss=0.04843, over 19710.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2824, pruned_loss=0.06005, over 3810224.55 frames. ], batch size: 45, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:07:00,503 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 11:07:48,963 INFO [train.py:903] (2/4) Epoch 28, batch 1400, loss[loss=0.1845, simple_loss=0.2557, pruned_loss=0.05671, over 19037.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2822, pruned_loss=0.05975, over 3824160.76 frames. ], batch size: 42, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:08:02,957 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4664, 1.5357, 1.8468, 1.7143, 2.6619, 2.3222, 2.7251, 1.4118], + device='cuda:2'), covar=tensor([0.2553, 0.4448, 0.2792, 0.2037, 0.1645, 0.2241, 0.1659, 0.4515], + device='cuda:2'), in_proj_covar=tensor([0.0553, 0.0669, 0.0754, 0.0507, 0.0634, 0.0546, 0.0669, 0.0572], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:08:11,886 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.444e+02 4.899e+02 6.106e+02 7.699e+02 1.518e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 11:08:15,756 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185777.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:19,612 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 11:08:35,157 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:49,357 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 11:08:51,539 INFO [train.py:903] (2/4) Epoch 28, batch 1450, loss[loss=0.2743, simple_loss=0.3477, pruned_loss=0.1004, over 19673.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2829, pruned_loss=0.06048, over 3832552.30 frames. ], batch size: 60, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:09:05,599 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185817.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:09:06,970 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:20,231 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:27,188 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4394, 1.4363, 1.3606, 1.8215, 1.3518, 1.7285, 1.7016, 1.5300], + device='cuda:2'), covar=tensor([0.0888, 0.0886, 0.1054, 0.0637, 0.0887, 0.0752, 0.0785, 0.0727], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0225, 0.0228, 0.0240, 0.0228, 0.0215, 0.0189, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 11:09:54,223 INFO [train.py:903] (2/4) Epoch 28, batch 1500, loss[loss=0.2082, simple_loss=0.298, pruned_loss=0.05922, over 19776.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2833, pruned_loss=0.06087, over 3828239.85 frames. ], batch size: 56, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:10:02,015 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:15,972 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 5.017e+02 6.227e+02 8.666e+02 1.816e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-03 11:10:33,546 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185887.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:56,454 INFO [train.py:903] (2/4) Epoch 28, batch 1550, loss[loss=0.2009, simple_loss=0.2863, pruned_loss=0.05778, over 18836.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06036, over 3829827.04 frames. ], batch size: 74, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:11:34,122 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8550, 1.3467, 1.0807, 1.0045, 1.1784, 1.0306, 0.9158, 1.2785], + device='cuda:2'), covar=tensor([0.0724, 0.0957, 0.1139, 0.0801, 0.0570, 0.1420, 0.0713, 0.0562], + device='cuda:2'), in_proj_covar=tensor([0.0302, 0.0319, 0.0339, 0.0272, 0.0251, 0.0345, 0.0293, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:11:34,208 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6155, 1.7298, 1.9960, 1.9632, 1.4724, 1.9348, 1.9711, 1.8321], + device='cuda:2'), covar=tensor([0.4221, 0.3859, 0.2156, 0.2528, 0.3998, 0.2323, 0.5284, 0.3630], + device='cuda:2'), in_proj_covar=tensor([0.0937, 0.1012, 0.0742, 0.0950, 0.0913, 0.0851, 0.0858, 0.0809], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 11:11:45,491 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:11:58,299 INFO [train.py:903] (2/4) Epoch 28, batch 1600, loss[loss=0.257, simple_loss=0.3305, pruned_loss=0.09178, over 19267.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.06132, over 3817689.77 frames. ], batch size: 66, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:12:20,822 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 11:12:23,179 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.773e+02 5.899e+02 6.974e+02 1.687e+03, threshold=1.180e+03, percent-clipped=2.0 +2023-04-03 11:12:31,006 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 11:13:03,469 INFO [train.py:903] (2/4) Epoch 28, batch 1650, loss[loss=0.1947, simple_loss=0.2671, pruned_loss=0.0611, over 19780.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2833, pruned_loss=0.0611, over 3792876.45 frames. ], batch size: 48, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:13:44,518 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.5790, 4.1824, 2.7518, 3.7133, 1.3515, 4.1719, 4.0237, 4.1635], + device='cuda:2'), covar=tensor([0.0626, 0.0996, 0.1965, 0.0829, 0.3587, 0.0692, 0.1039, 0.1155], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0427, 0.0516, 0.0360, 0.0411, 0.0458, 0.0452, 0.0480], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:14:07,008 INFO [train.py:903] (2/4) Epoch 28, batch 1700, loss[loss=0.2281, simple_loss=0.2999, pruned_loss=0.07812, over 13063.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2831, pruned_loss=0.06088, over 3786270.61 frames. ], batch size: 136, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:14:08,637 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6276, 2.4067, 1.7737, 1.6475, 2.1892, 1.4694, 1.4510, 2.0921], + device='cuda:2'), covar=tensor([0.1124, 0.0785, 0.1222, 0.0928, 0.0599, 0.1444, 0.0873, 0.0537], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0320, 0.0340, 0.0273, 0.0251, 0.0347, 0.0293, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:14:29,615 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.751e+02 5.670e+02 7.170e+02 1.723e+03, threshold=1.134e+03, percent-clipped=7.0 +2023-04-03 11:14:36,562 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9463, 1.1980, 1.5505, 0.6183, 1.9853, 2.4402, 2.1658, 2.6052], + device='cuda:2'), covar=tensor([0.1632, 0.3959, 0.3510, 0.2929, 0.0668, 0.0298, 0.0341, 0.0397], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0331, 0.0365, 0.0271, 0.0255, 0.0196, 0.0219, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 11:14:44,423 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 11:14:58,930 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6040, 1.7802, 2.0991, 1.8605, 3.1749, 2.5768, 3.5381, 1.8427], + device='cuda:2'), covar=tensor([0.2625, 0.4490, 0.2827, 0.1951, 0.1594, 0.2237, 0.1537, 0.4215], + device='cuda:2'), in_proj_covar=tensor([0.0552, 0.0668, 0.0752, 0.0506, 0.0635, 0.0545, 0.0667, 0.0571], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:15:08,875 INFO [train.py:903] (2/4) Epoch 28, batch 1750, loss[loss=0.2381, simple_loss=0.3173, pruned_loss=0.07945, over 18254.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2828, pruned_loss=0.06073, over 3804018.01 frames. ], batch size: 83, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:15:24,181 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:15:28,461 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:16:11,532 INFO [train.py:903] (2/4) Epoch 28, batch 1800, loss[loss=0.2167, simple_loss=0.3029, pruned_loss=0.06527, over 19307.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2823, pruned_loss=0.06013, over 3812209.93 frames. ], batch size: 66, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:16:13,040 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0650, 5.1576, 5.9134, 5.9244, 2.0033, 5.6102, 4.7582, 5.6039], + device='cuda:2'), covar=tensor([0.1767, 0.0813, 0.0617, 0.0667, 0.6581, 0.0792, 0.0660, 0.1223], + device='cuda:2'), in_proj_covar=tensor([0.0821, 0.0784, 0.0996, 0.0872, 0.0863, 0.0763, 0.0589, 0.0921], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 11:16:18,387 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186161.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:16:36,644 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.184e+02 6.048e+02 8.514e+02 1.613e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 11:16:38,092 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186176.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:08,568 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 11:17:09,058 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186201.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:15,224 INFO [train.py:903] (2/4) Epoch 28, batch 1850, loss[loss=0.1601, simple_loss=0.2506, pruned_loss=0.0348, over 19342.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2829, pruned_loss=0.06019, over 3825706.12 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:17:38,789 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5319, 2.5119, 2.1719, 2.6225, 2.4137, 2.1277, 2.0461, 2.5016], + device='cuda:2'), covar=tensor([0.1066, 0.1628, 0.1578, 0.1188, 0.1543, 0.0585, 0.1524, 0.0716], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0363, 0.0322, 0.0259, 0.0310, 0.0259, 0.0324, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 11:17:39,963 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:46,434 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 11:17:52,424 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186236.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:18:17,545 INFO [train.py:903] (2/4) Epoch 28, batch 1900, loss[loss=0.2227, simple_loss=0.3024, pruned_loss=0.07153, over 19317.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06043, over 3813950.88 frames. ], batch size: 66, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:18:33,620 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 11:18:36,317 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5537, 4.1236, 4.3156, 4.3162, 1.6667, 4.0895, 3.5806, 4.0913], + device='cuda:2'), covar=tensor([0.1851, 0.0874, 0.0644, 0.0754, 0.5949, 0.0944, 0.0712, 0.1041], + device='cuda:2'), in_proj_covar=tensor([0.0822, 0.0785, 0.0997, 0.0872, 0.0863, 0.0762, 0.0589, 0.0923], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 11:18:38,418 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 11:18:39,532 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.824e+02 4.995e+02 5.845e+02 7.128e+02 1.193e+03, threshold=1.169e+03, percent-clipped=0.0 +2023-04-03 11:18:41,953 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186276.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:18:42,936 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:19:02,982 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 11:19:05,652 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.8079, 1.3353, 1.5569, 1.7240, 3.3893, 1.2666, 2.4133, 3.8624], + device='cuda:2'), covar=tensor([0.0542, 0.3003, 0.3060, 0.1795, 0.0700, 0.2575, 0.1391, 0.0219], + device='cuda:2'), in_proj_covar=tensor([0.0421, 0.0377, 0.0397, 0.0352, 0.0382, 0.0357, 0.0393, 0.0417], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:19:19,223 INFO [train.py:903] (2/4) Epoch 28, batch 1950, loss[loss=0.2049, simple_loss=0.2892, pruned_loss=0.06031, over 19280.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2833, pruned_loss=0.06032, over 3818156.86 frames. ], batch size: 66, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:20,370 INFO [train.py:903] (2/4) Epoch 28, batch 2000, loss[loss=0.1984, simple_loss=0.276, pruned_loss=0.06045, over 19473.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06009, over 3815929.40 frames. ], batch size: 49, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:31,030 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6577, 1.7576, 2.0292, 1.9711, 1.5236, 1.9544, 2.0066, 1.8754], + device='cuda:2'), covar=tensor([0.4039, 0.3578, 0.1938, 0.2273, 0.3806, 0.2167, 0.5083, 0.3330], + device='cuda:2'), in_proj_covar=tensor([0.0937, 0.1013, 0.0742, 0.0951, 0.0913, 0.0853, 0.0860, 0.0808], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 11:20:45,117 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.060e+02 6.550e+02 8.587e+02 3.446e+03, threshold=1.310e+03, percent-clipped=8.0 +2023-04-03 11:21:19,994 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 11:21:24,405 INFO [train.py:903] (2/4) Epoch 28, batch 2050, loss[loss=0.1958, simple_loss=0.2861, pruned_loss=0.05275, over 19678.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.0597, over 3819351.27 frames. ], batch size: 58, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:21:41,345 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 11:21:42,550 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 11:21:52,127 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0921, 2.0338, 1.9430, 1.7491, 1.5661, 1.6730, 0.6473, 1.0876], + device='cuda:2'), covar=tensor([0.0734, 0.0689, 0.0510, 0.0862, 0.1327, 0.1014, 0.1386, 0.1146], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0365, 0.0371, 0.0393, 0.0471, 0.0397, 0.0347, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:22:02,208 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 11:22:04,652 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:25,886 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7437, 1.7622, 1.6362, 1.4243, 1.4461, 1.4367, 0.3152, 0.6857], + device='cuda:2'), covar=tensor([0.0734, 0.0683, 0.0508, 0.0756, 0.1255, 0.0869, 0.1376, 0.1247], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0364, 0.0370, 0.0393, 0.0470, 0.0397, 0.0346, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:22:26,609 INFO [train.py:903] (2/4) Epoch 28, batch 2100, loss[loss=0.1934, simple_loss=0.2687, pruned_loss=0.05907, over 19609.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2822, pruned_loss=0.05923, over 3834717.39 frames. ], batch size: 50, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:22:35,104 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:49,640 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.089e+02 6.089e+02 7.390e+02 1.324e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 11:22:58,697 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 11:23:11,846 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:13,782 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:21,595 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 11:23:25,164 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3305, 3.0378, 2.1940, 2.7311, 0.8622, 3.0659, 2.8814, 3.0013], + device='cuda:2'), covar=tensor([0.1161, 0.1442, 0.2139, 0.1092, 0.3754, 0.0983, 0.1258, 0.1481], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0430, 0.0516, 0.0360, 0.0410, 0.0457, 0.0452, 0.0481], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:23:29,671 INFO [train.py:903] (2/4) Epoch 28, batch 2150, loss[loss=0.179, simple_loss=0.2638, pruned_loss=0.0471, over 19472.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2824, pruned_loss=0.05939, over 3829277.02 frames. ], batch size: 49, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:23:33,605 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9680, 2.0736, 2.3263, 2.5776, 1.9773, 2.4390, 2.2555, 2.1128], + device='cuda:2'), covar=tensor([0.4285, 0.4083, 0.2004, 0.2553, 0.4325, 0.2405, 0.5129, 0.3528], + device='cuda:2'), in_proj_covar=tensor([0.0939, 0.1016, 0.0745, 0.0952, 0.0915, 0.0854, 0.0862, 0.0810], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 11:23:42,620 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:46,784 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186520.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:00,655 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0339, 2.0929, 2.3196, 2.6634, 2.0436, 2.5073, 2.2930, 2.0839], + device='cuda:2'), covar=tensor([0.4349, 0.4205, 0.1985, 0.2495, 0.4411, 0.2334, 0.5101, 0.3603], + device='cuda:2'), in_proj_covar=tensor([0.0941, 0.1018, 0.0746, 0.0954, 0.0917, 0.0856, 0.0865, 0.0812], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 11:24:02,609 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186532.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:24:06,080 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:30,573 INFO [train.py:903] (2/4) Epoch 28, batch 2200, loss[loss=0.1792, simple_loss=0.2745, pruned_loss=0.04195, over 19787.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06013, over 3831319.05 frames. ], batch size: 56, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:24:32,118 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186557.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:32,222 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186557.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:24:55,768 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 4.864e+02 5.746e+02 7.619e+02 1.717e+03, threshold=1.149e+03, percent-clipped=3.0 +2023-04-03 11:24:58,680 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:09,453 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:13,495 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.29 vs. limit=5.0 +2023-04-03 11:25:32,849 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:34,935 INFO [train.py:903] (2/4) Epoch 28, batch 2250, loss[loss=0.1676, simple_loss=0.25, pruned_loss=0.04258, over 19757.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05964, over 3838087.27 frames. ], batch size: 46, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:25:54,491 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186621.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:12,640 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186635.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:39,392 INFO [train.py:903] (2/4) Epoch 28, batch 2300, loss[loss=0.1755, simple_loss=0.2597, pruned_loss=0.04566, over 19611.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2831, pruned_loss=0.05999, over 3831119.48 frames. ], batch size: 50, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:26:43,097 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7302, 1.2915, 1.5103, 1.5476, 3.3187, 1.1070, 2.5296, 3.8025], + device='cuda:2'), covar=tensor([0.0527, 0.3019, 0.3109, 0.1934, 0.0727, 0.2740, 0.1359, 0.0227], + device='cuda:2'), in_proj_covar=tensor([0.0422, 0.0376, 0.0395, 0.0352, 0.0382, 0.0356, 0.0393, 0.0416], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:26:55,470 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 11:27:02,266 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 4.896e+02 5.750e+02 7.152e+02 2.246e+03, threshold=1.150e+03, percent-clipped=6.0 +2023-04-03 11:27:42,249 INFO [train.py:903] (2/4) Epoch 28, batch 2350, loss[loss=0.2021, simple_loss=0.2863, pruned_loss=0.05897, over 19546.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.05986, over 3829779.61 frames. ], batch size: 56, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:28:07,869 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:20,552 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:22,866 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0641, 1.2107, 1.6479, 0.9949, 2.3234, 3.0920, 2.8046, 3.2878], + device='cuda:2'), covar=tensor([0.1728, 0.4129, 0.3599, 0.2819, 0.0652, 0.0229, 0.0252, 0.0329], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0333, 0.0366, 0.0272, 0.0257, 0.0197, 0.0220, 0.0280], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 11:28:26,054 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 11:28:42,208 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 11:28:44,645 INFO [train.py:903] (2/4) Epoch 28, batch 2400, loss[loss=0.1524, simple_loss=0.2396, pruned_loss=0.03262, over 19304.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2839, pruned_loss=0.06029, over 3820706.07 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:28:50,166 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 11:29:05,680 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.0808, 5.5259, 3.1436, 4.8958, 0.9679, 5.7018, 5.4658, 5.6936], + device='cuda:2'), covar=tensor([0.0350, 0.0834, 0.1769, 0.0735, 0.4108, 0.0504, 0.0805, 0.0963], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0432, 0.0519, 0.0362, 0.0412, 0.0461, 0.0455, 0.0484], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:29:08,814 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 4.439e+02 5.729e+02 7.466e+02 1.887e+03, threshold=1.146e+03, percent-clipped=9.0 +2023-04-03 11:29:20,262 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:29:29,326 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3045, 3.0247, 2.1535, 2.7170, 0.8939, 2.9952, 2.8944, 2.9689], + device='cuda:2'), covar=tensor([0.1216, 0.1352, 0.2186, 0.1150, 0.3766, 0.1069, 0.1236, 0.1612], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0432, 0.0518, 0.0362, 0.0413, 0.0461, 0.0454, 0.0484], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:29:47,721 INFO [train.py:903] (2/4) Epoch 28, batch 2450, loss[loss=0.221, simple_loss=0.2938, pruned_loss=0.07407, over 13461.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.284, pruned_loss=0.0603, over 3813724.05 frames. ], batch size: 136, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:22,262 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:26,698 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:50,784 INFO [train.py:903] (2/4) Epoch 28, batch 2500, loss[loss=0.213, simple_loss=0.2882, pruned_loss=0.0689, over 19747.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2837, pruned_loss=0.05997, over 3816449.92 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:54,446 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:54,554 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:15,065 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.341e+02 4.860e+02 5.866e+02 7.110e+02 2.029e+03, threshold=1.173e+03, percent-clipped=7.0 +2023-04-03 11:31:19,827 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186879.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:35,092 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186891.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:45,086 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:49,221 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:54,935 INFO [train.py:903] (2/4) Epoch 28, batch 2550, loss[loss=0.191, simple_loss=0.275, pruned_loss=0.05344, over 19732.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2823, pruned_loss=0.05926, over 3824454.16 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:32:06,731 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186916.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,509 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,649 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:44,467 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7509, 1.1343, 1.4187, 1.3948, 3.1305, 1.1675, 2.6077, 3.6652], + device='cuda:2'), covar=tensor([0.0666, 0.3721, 0.3581, 0.2531, 0.1148, 0.3133, 0.1437, 0.0379], + device='cuda:2'), in_proj_covar=tensor([0.0421, 0.0377, 0.0396, 0.0352, 0.0382, 0.0357, 0.0394, 0.0415], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:32:47,702 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186948.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:50,879 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 11:32:52,316 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:56,643 INFO [train.py:903] (2/4) Epoch 28, batch 2600, loss[loss=0.1846, simple_loss=0.272, pruned_loss=0.04858, over 19659.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05988, over 3828754.45 frames. ], batch size: 55, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:33:08,791 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6201, 1.7566, 2.0964, 1.9301, 3.1656, 2.7079, 3.4943, 1.5877], + device='cuda:2'), covar=tensor([0.2564, 0.4370, 0.2900, 0.1853, 0.1456, 0.2077, 0.1499, 0.4544], + device='cuda:2'), in_proj_covar=tensor([0.0553, 0.0669, 0.0755, 0.0507, 0.0633, 0.0545, 0.0667, 0.0571], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:33:20,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.127e+02 5.898e+02 7.788e+02 1.720e+03, threshold=1.180e+03, percent-clipped=7.0 +2023-04-03 11:33:43,112 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186992.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:45,250 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:59,861 INFO [train.py:903] (2/4) Epoch 28, batch 2650, loss[loss=0.2048, simple_loss=0.2778, pruned_loss=0.0659, over 19769.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.283, pruned_loss=0.05941, over 3828525.12 frames. ], batch size: 45, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:34:12,833 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:13,992 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:22,406 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 11:34:46,310 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187043.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:47,554 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:02,646 INFO [train.py:903] (2/4) Epoch 28, batch 2700, loss[loss=0.1723, simple_loss=0.2529, pruned_loss=0.04583, over 19759.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2837, pruned_loss=0.05998, over 3835758.16 frames. ], batch size: 45, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:35:02,918 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:13,306 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:22,297 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187071.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:26,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 4.685e+02 5.845e+02 7.450e+02 1.306e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 11:36:06,490 INFO [train.py:903] (2/4) Epoch 28, batch 2750, loss[loss=0.2344, simple_loss=0.3147, pruned_loss=0.077, over 19128.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2834, pruned_loss=0.05986, over 3825818.03 frames. ], batch size: 69, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:37:06,454 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:08,435 INFO [train.py:903] (2/4) Epoch 28, batch 2800, loss[loss=0.2642, simple_loss=0.3222, pruned_loss=0.1031, over 12710.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05958, over 3819409.41 frames. ], batch size: 135, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:37:31,619 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.065e+02 4.906e+02 5.640e+02 7.444e+02 1.445e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-04-03 11:37:36,931 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:46,976 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187186.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:58,605 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0598, 3.7168, 2.5134, 3.2527, 0.8009, 3.6640, 3.5485, 3.5803], + device='cuda:2'), covar=tensor([0.0759, 0.1025, 0.1890, 0.0942, 0.3878, 0.0776, 0.1001, 0.1159], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0430, 0.0515, 0.0360, 0.0410, 0.0457, 0.0452, 0.0482], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:38:05,775 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187202.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:10,174 INFO [train.py:903] (2/4) Epoch 28, batch 2850, loss[loss=0.2361, simple_loss=0.3042, pruned_loss=0.08399, over 19581.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.05959, over 3817137.08 frames. ], batch size: 52, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:38:13,771 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187208.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:44,957 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187233.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:05,591 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187250.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:11,674 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 11:39:12,689 INFO [train.py:903] (2/4) Epoch 28, batch 2900, loss[loss=0.2168, simple_loss=0.3013, pruned_loss=0.06613, over 13176.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2828, pruned_loss=0.06015, over 3804131.42 frames. ], batch size: 136, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:39:32,609 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:33,465 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:35,532 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.012e+02 6.172e+02 7.442e+02 2.226e+03, threshold=1.234e+03, percent-clipped=8.0 +2023-04-03 11:39:35,995 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:02,566 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:06,108 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187300.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:13,523 INFO [train.py:903] (2/4) Epoch 28, batch 2950, loss[loss=0.271, simple_loss=0.3332, pruned_loss=0.1044, over 13365.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06035, over 3817823.33 frames. ], batch size: 136, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:40:26,537 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187317.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:28,968 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:35,749 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:41:00,020 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187344.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:41:11,801 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1312, 1.3186, 1.7031, 1.3585, 2.7775, 3.7988, 3.4605, 4.0316], + device='cuda:2'), covar=tensor([0.1715, 0.3921, 0.3531, 0.2590, 0.0655, 0.0203, 0.0220, 0.0261], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0270, 0.0255, 0.0196, 0.0219, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 11:41:12,963 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8860, 1.5226, 2.0271, 1.7310, 4.4295, 1.1944, 2.6931, 4.8991], + device='cuda:2'), covar=tensor([0.0462, 0.2938, 0.2705, 0.2020, 0.0764, 0.2772, 0.1458, 0.0165], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0381, 0.0400, 0.0355, 0.0385, 0.0360, 0.0399, 0.0419], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:41:13,834 INFO [train.py:903] (2/4) Epoch 28, batch 3000, loss[loss=0.1886, simple_loss=0.278, pruned_loss=0.0496, over 19791.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2837, pruned_loss=0.06109, over 3812714.74 frames. ], batch size: 56, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:41:13,834 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 11:41:26,720 INFO [train.py:937] (2/4) Epoch 28, validation: loss=0.1673, simple_loss=0.2667, pruned_loss=0.03394, over 944034.00 frames. +2023-04-03 11:41:26,721 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 11:41:29,156 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 11:41:49,228 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.693e+02 4.898e+02 6.411e+02 7.995e+02 1.373e+03, threshold=1.282e+03, percent-clipped=5.0 +2023-04-03 11:41:55,457 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6253, 1.7305, 1.9791, 1.9229, 1.4942, 1.8683, 1.9609, 1.8491], + device='cuda:2'), covar=tensor([0.4303, 0.3767, 0.2108, 0.2541, 0.4000, 0.2415, 0.5443, 0.3637], + device='cuda:2'), in_proj_covar=tensor([0.0935, 0.1012, 0.0741, 0.0948, 0.0910, 0.0853, 0.0860, 0.0808], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 11:42:05,136 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:06,450 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:08,766 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:21,159 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:27,776 INFO [train.py:903] (2/4) Epoch 28, batch 3050, loss[loss=0.2261, simple_loss=0.3045, pruned_loss=0.07387, over 19609.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2846, pruned_loss=0.06112, over 3801985.22 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:42:33,236 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-03 11:43:13,205 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:29,481 INFO [train.py:903] (2/4) Epoch 28, batch 3100, loss[loss=0.1953, simple_loss=0.2843, pruned_loss=0.05311, over 19688.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2844, pruned_loss=0.06077, over 3809587.70 frames. ], batch size: 53, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:43:43,943 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:48,542 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3686, 3.5548, 2.1144, 2.1665, 3.1731, 1.7787, 1.7358, 2.5394], + device='cuda:2'), covar=tensor([0.1242, 0.0601, 0.1137, 0.0912, 0.0571, 0.1303, 0.0978, 0.0618], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0318, 0.0340, 0.0272, 0.0252, 0.0345, 0.0293, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:43:54,461 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.792e+02 5.797e+02 7.675e+02 1.223e+03, threshold=1.159e+03, percent-clipped=0.0 +2023-04-03 11:44:27,036 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187502.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:32,168 INFO [train.py:903] (2/4) Epoch 28, batch 3150, loss[loss=0.2455, simple_loss=0.3225, pruned_loss=0.0842, over 19714.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.06043, over 3819625.46 frames. ], batch size: 63, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:44:44,967 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:54,653 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 11:45:01,727 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:10,627 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:15,248 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:35,287 INFO [train.py:903] (2/4) Epoch 28, batch 3200, loss[loss=0.2025, simple_loss=0.2832, pruned_loss=0.06092, over 19345.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06063, over 3801748.02 frames. ], batch size: 66, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:45:55,534 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:57,397 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.914e+02 6.228e+02 8.001e+02 2.182e+03, threshold=1.246e+03, percent-clipped=10.0 +2023-04-03 11:46:15,075 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 11:46:27,899 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187598.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:46:36,795 INFO [train.py:903] (2/4) Epoch 28, batch 3250, loss[loss=0.187, simple_loss=0.2741, pruned_loss=0.04991, over 19375.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06086, over 3809119.00 frames. ], batch size: 70, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:46:54,668 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 11:47:23,896 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187644.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:47:37,437 INFO [train.py:903] (2/4) Epoch 28, batch 3300, loss[loss=0.2029, simple_loss=0.2893, pruned_loss=0.05823, over 19666.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06132, over 3805280.52 frames. ], batch size: 58, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:47:37,467 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 11:47:54,254 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:48:01,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.770e+02 6.093e+02 7.830e+02 1.620e+03, threshold=1.219e+03, percent-clipped=4.0 +2023-04-03 11:48:08,549 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 11:48:40,832 INFO [train.py:903] (2/4) Epoch 28, batch 3350, loss[loss=0.1859, simple_loss=0.2763, pruned_loss=0.04775, over 17411.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06098, over 3806343.79 frames. ], batch size: 101, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:48:54,793 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1274, 1.6468, 1.8158, 2.7508, 2.0054, 2.2226, 2.2819, 1.8965], + device='cuda:2'), covar=tensor([0.0858, 0.1014, 0.1054, 0.0738, 0.0926, 0.0827, 0.0905, 0.0772], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0241, 0.0227, 0.0215, 0.0188, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 11:49:14,742 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:49:42,968 INFO [train.py:903] (2/4) Epoch 28, batch 3400, loss[loss=0.1979, simple_loss=0.2807, pruned_loss=0.05752, over 19738.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2843, pruned_loss=0.06144, over 3801201.39 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:49:45,590 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:02,044 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:07,737 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.801e+02 5.865e+02 7.575e+02 1.695e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-03 11:50:16,892 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:34,176 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:46,603 INFO [train.py:903] (2/4) Epoch 28, batch 3450, loss[loss=0.2228, simple_loss=0.3076, pruned_loss=0.069, over 19689.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2823, pruned_loss=0.06028, over 3809950.20 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:50:47,823 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 11:50:52,913 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4041, 4.0508, 2.6033, 3.5679, 0.9306, 3.9538, 3.8230, 3.9288], + device='cuda:2'), covar=tensor([0.0617, 0.0942, 0.1950, 0.0863, 0.3815, 0.0722, 0.0973, 0.1232], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0429, 0.0514, 0.0359, 0.0409, 0.0455, 0.0451, 0.0483], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:51:41,400 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:51:49,300 INFO [train.py:903] (2/4) Epoch 28, batch 3500, loss[loss=0.2017, simple_loss=0.2891, pruned_loss=0.05713, over 19751.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2817, pruned_loss=0.05975, over 3822838.39 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:52:12,006 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:13,419 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2346, 1.1861, 1.2279, 1.3563, 0.9188, 1.3140, 1.2804, 1.2892], + device='cuda:2'), covar=tensor([0.0978, 0.1058, 0.1108, 0.0670, 0.0986, 0.0936, 0.0862, 0.0825], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0222, 0.0227, 0.0239, 0.0224, 0.0213, 0.0187, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 11:52:14,012 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.602e+02 4.936e+02 6.004e+02 7.154e+02 1.224e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 11:52:20,881 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:26,771 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:50,780 INFO [train.py:903] (2/4) Epoch 28, batch 3550, loss[loss=0.1706, simple_loss=0.2567, pruned_loss=0.04223, over 19738.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2821, pruned_loss=0.05973, over 3835071.96 frames. ], batch size: 51, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:53:52,897 INFO [train.py:903] (2/4) Epoch 28, batch 3600, loss[loss=0.2231, simple_loss=0.3047, pruned_loss=0.07075, over 19794.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2816, pruned_loss=0.05972, over 3830950.49 frames. ], batch size: 56, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:53:57,153 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-03 11:54:17,586 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.904e+02 5.870e+02 7.567e+02 1.667e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 11:54:34,395 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:42,625 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:48,332 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:56,065 INFO [train.py:903] (2/4) Epoch 28, batch 3650, loss[loss=0.2075, simple_loss=0.2824, pruned_loss=0.06628, over 19542.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2818, pruned_loss=0.05929, over 3839533.93 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:55:49,207 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:55:53,660 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1451, 1.8102, 1.4526, 1.2294, 1.6102, 1.2100, 1.2090, 1.6675], + device='cuda:2'), covar=tensor([0.0897, 0.0868, 0.1229, 0.0938, 0.0687, 0.1393, 0.0678, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0306, 0.0322, 0.0345, 0.0274, 0.0254, 0.0350, 0.0296, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 11:55:57,944 INFO [train.py:903] (2/4) Epoch 28, batch 3700, loss[loss=0.1992, simple_loss=0.2881, pruned_loss=0.05519, over 19529.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2843, pruned_loss=0.06075, over 3816530.49 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:56:23,997 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.013e+02 6.013e+02 7.793e+02 2.143e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 11:56:59,856 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:57:00,596 INFO [train.py:903] (2/4) Epoch 28, batch 3750, loss[loss=0.2034, simple_loss=0.2925, pruned_loss=0.05716, over 19622.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06058, over 3827979.58 frames. ], batch size: 57, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:57:19,352 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3180, 3.0339, 2.4770, 2.4446, 2.1235, 2.6405, 0.9961, 2.2396], + device='cuda:2'), covar=tensor([0.0655, 0.0591, 0.0689, 0.1088, 0.1095, 0.1050, 0.1470, 0.1107], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0367, 0.0374, 0.0396, 0.0475, 0.0401, 0.0348, 0.0353], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 11:57:32,277 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188130.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:57:55,861 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3555, 3.8650, 3.9463, 3.9841, 1.6078, 3.7776, 3.3241, 3.7193], + device='cuda:2'), covar=tensor([0.1632, 0.0974, 0.0707, 0.0753, 0.5984, 0.1118, 0.0737, 0.1145], + device='cuda:2'), in_proj_covar=tensor([0.0829, 0.0791, 0.1006, 0.0880, 0.0870, 0.0771, 0.0595, 0.0933], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 11:58:03,799 INFO [train.py:903] (2/4) Epoch 28, batch 3800, loss[loss=0.1596, simple_loss=0.2337, pruned_loss=0.04274, over 19741.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2827, pruned_loss=0.0602, over 3823923.04 frames. ], batch size: 45, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:58:30,500 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.177e+02 6.017e+02 8.386e+02 1.721e+03, threshold=1.203e+03, percent-clipped=7.0 +2023-04-03 11:58:33,995 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 11:59:06,274 INFO [train.py:903] (2/4) Epoch 28, batch 3850, loss[loss=0.2014, simple_loss=0.287, pruned_loss=0.05786, over 19765.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.06033, over 3827798.55 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:59:56,100 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:05,171 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188252.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:09,182 INFO [train.py:903] (2/4) Epoch 28, batch 3900, loss[loss=0.2323, simple_loss=0.3092, pruned_loss=0.07768, over 19513.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2821, pruned_loss=0.05948, over 3837527.17 frames. ], batch size: 64, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:00:09,614 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:26,401 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188270.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:29,479 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.3372, 5.6865, 3.1671, 4.9910, 1.1746, 5.8733, 5.7639, 5.8746], + device='cuda:2'), covar=tensor([0.0355, 0.0738, 0.1876, 0.0757, 0.3940, 0.0533, 0.0712, 0.0903], + device='cuda:2'), in_proj_covar=tensor([0.0531, 0.0431, 0.0518, 0.0363, 0.0412, 0.0458, 0.0454, 0.0486], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:00:34,020 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.053e+02 6.264e+02 8.116e+02 1.975e+03, threshold=1.253e+03, percent-clipped=4.0 +2023-04-03 12:00:34,500 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:39,135 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188281.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:04,080 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:09,692 INFO [train.py:903] (2/4) Epoch 28, batch 3950, loss[loss=0.1891, simple_loss=0.268, pruned_loss=0.05509, over 19729.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05968, over 3840631.74 frames. ], batch size: 51, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:01:15,484 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 12:01:37,966 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0046, 2.1003, 2.3754, 2.6717, 1.9882, 2.5116, 2.3124, 2.1126], + device='cuda:2'), covar=tensor([0.4351, 0.4382, 0.2098, 0.2608, 0.4438, 0.2479, 0.5126, 0.3657], + device='cuda:2'), in_proj_covar=tensor([0.0940, 0.1016, 0.0744, 0.0953, 0.0914, 0.0857, 0.0862, 0.0810], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:02:03,054 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7026, 2.4337, 1.8332, 1.6722, 2.1981, 1.6146, 1.6037, 2.1086], + device='cuda:2'), covar=tensor([0.1105, 0.0854, 0.1128, 0.0885, 0.0571, 0.1210, 0.0806, 0.0501], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0319, 0.0342, 0.0272, 0.0252, 0.0345, 0.0293, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:02:07,464 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-03 12:02:08,272 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1027, 2.0317, 1.9341, 1.7569, 1.6326, 1.7017, 0.5683, 1.0665], + device='cuda:2'), covar=tensor([0.0713, 0.0684, 0.0519, 0.0938, 0.1255, 0.1014, 0.1503, 0.1239], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0365, 0.0372, 0.0392, 0.0473, 0.0397, 0.0346, 0.0350], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 12:02:12,181 INFO [train.py:903] (2/4) Epoch 28, batch 4000, loss[loss=0.2002, simple_loss=0.2782, pruned_loss=0.06113, over 19788.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2823, pruned_loss=0.05972, over 3824656.07 frames. ], batch size: 56, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:02:38,283 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.378e+02 4.980e+02 6.272e+02 8.090e+02 1.579e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 12:02:59,884 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 12:03:14,921 INFO [train.py:903] (2/4) Epoch 28, batch 4050, loss[loss=0.2292, simple_loss=0.3207, pruned_loss=0.06886, over 19542.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05929, over 3822343.02 frames. ], batch size: 56, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:03:41,456 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5647, 4.1398, 4.2774, 4.2880, 1.7933, 4.0815, 3.5258, 4.0327], + device='cuda:2'), covar=tensor([0.1796, 0.0862, 0.0673, 0.0733, 0.5748, 0.0963, 0.0742, 0.1079], + device='cuda:2'), in_proj_covar=tensor([0.0832, 0.0795, 0.1009, 0.0883, 0.0873, 0.0771, 0.0596, 0.0934], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 12:03:58,487 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.6498, 2.7896, 2.3327, 2.8052, 2.6062, 2.2313, 2.2091, 2.6303], + device='cuda:2'), covar=tensor([0.0950, 0.1387, 0.1422, 0.1032, 0.1292, 0.0557, 0.1525, 0.0661], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0359, 0.0319, 0.0257, 0.0306, 0.0257, 0.0321, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:04:17,054 INFO [train.py:903] (2/4) Epoch 28, batch 4100, loss[loss=0.258, simple_loss=0.3291, pruned_loss=0.09346, over 19532.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2826, pruned_loss=0.05954, over 3830037.39 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:04:25,838 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9210, 2.0149, 2.2840, 2.5461, 1.9507, 2.4366, 2.2550, 2.0403], + device='cuda:2'), covar=tensor([0.4446, 0.4190, 0.2073, 0.2605, 0.4459, 0.2373, 0.5191, 0.3617], + device='cuda:2'), in_proj_covar=tensor([0.0940, 0.1018, 0.0745, 0.0953, 0.0916, 0.0858, 0.0864, 0.0811], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:04:43,427 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.116e+02 6.170e+02 8.694e+02 1.686e+03, threshold=1.234e+03, percent-clipped=5.0 +2023-04-03 12:04:54,660 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 12:05:11,284 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:05:19,168 INFO [train.py:903] (2/4) Epoch 28, batch 4150, loss[loss=0.1898, simple_loss=0.279, pruned_loss=0.05034, over 18888.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2815, pruned_loss=0.05931, over 3810380.21 frames. ], batch size: 74, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:06:21,501 INFO [train.py:903] (2/4) Epoch 28, batch 4200, loss[loss=0.2164, simple_loss=0.3097, pruned_loss=0.06156, over 19606.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.05967, over 3815333.53 frames. ], batch size: 57, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:06:24,953 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 12:06:46,554 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 4.775e+02 5.885e+02 7.540e+02 1.202e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-03 12:07:22,476 INFO [train.py:903] (2/4) Epoch 28, batch 4250, loss[loss=0.1887, simple_loss=0.2759, pruned_loss=0.05071, over 19669.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2822, pruned_loss=0.05982, over 3816009.83 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:07:33,772 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 12:07:42,258 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6957, 1.4656, 1.5536, 2.3428, 1.6866, 2.0166, 2.0071, 1.6904], + device='cuda:2'), covar=tensor([0.0863, 0.0969, 0.1017, 0.0638, 0.0859, 0.0763, 0.0832, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0214, 0.0188, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 12:07:44,074 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 12:08:10,324 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:08:24,251 INFO [train.py:903] (2/4) Epoch 28, batch 4300, loss[loss=0.1996, simple_loss=0.2887, pruned_loss=0.05522, over 19495.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2832, pruned_loss=0.06027, over 3804836.43 frames. ], batch size: 64, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:08:51,759 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.988e+02 4.883e+02 5.889e+02 8.258e+02 1.553e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-03 12:09:15,649 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 12:09:26,306 INFO [train.py:903] (2/4) Epoch 28, batch 4350, loss[loss=0.211, simple_loss=0.3002, pruned_loss=0.06086, over 18095.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2821, pruned_loss=0.05991, over 3814617.68 frames. ], batch size: 83, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:10:17,293 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7956, 2.0635, 2.2772, 2.0894, 3.4801, 2.9962, 3.6811, 1.8352], + device='cuda:2'), covar=tensor([0.2396, 0.3968, 0.2715, 0.1807, 0.1332, 0.1930, 0.1438, 0.4062], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0670, 0.0757, 0.0510, 0.0636, 0.0548, 0.0671, 0.0574], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:10:30,431 INFO [train.py:903] (2/4) Epoch 28, batch 4400, loss[loss=0.1567, simple_loss=0.2385, pruned_loss=0.0375, over 19733.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2815, pruned_loss=0.05925, over 3820563.97 frames. ], batch size: 47, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:10:35,416 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:10:52,231 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 12:10:57,774 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.797e+02 5.749e+02 7.341e+02 1.454e+03, threshold=1.150e+03, percent-clipped=2.0 +2023-04-03 12:11:02,397 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 12:11:26,489 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3270, 1.3993, 1.6782, 1.5465, 2.0978, 1.8299, 2.1127, 0.8970], + device='cuda:2'), covar=tensor([0.3169, 0.5164, 0.3129, 0.2659, 0.1965, 0.3015, 0.1831, 0.5863], + device='cuda:2'), in_proj_covar=tensor([0.0557, 0.0672, 0.0758, 0.0510, 0.0637, 0.0548, 0.0671, 0.0575], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:11:31,837 INFO [train.py:903] (2/4) Epoch 28, batch 4450, loss[loss=0.1847, simple_loss=0.2704, pruned_loss=0.04955, over 19751.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06033, over 3822615.89 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:12:19,635 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:12:36,102 INFO [train.py:903] (2/4) Epoch 28, batch 4500, loss[loss=0.1673, simple_loss=0.253, pruned_loss=0.04078, over 16060.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2826, pruned_loss=0.06, over 3820976.38 frames. ], batch size: 35, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:12:39,334 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 12:13:04,414 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.536e+02 5.549e+02 7.562e+02 1.666e+03, threshold=1.110e+03, percent-clipped=5.0 +2023-04-03 12:13:38,242 INFO [train.py:903] (2/4) Epoch 28, batch 4550, loss[loss=0.1894, simple_loss=0.2795, pruned_loss=0.04964, over 18052.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05988, over 3821159.57 frames. ], batch size: 83, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:13:46,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 12:14:12,241 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 12:14:27,688 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-03 12:14:29,498 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:31,884 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:39,529 INFO [train.py:903] (2/4) Epoch 28, batch 4600, loss[loss=0.1849, simple_loss=0.2731, pruned_loss=0.04838, over 19712.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2823, pruned_loss=0.05985, over 3813103.94 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:14:43,221 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:15:07,936 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.624e+02 5.779e+02 7.629e+02 1.899e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 12:15:43,036 INFO [train.py:903] (2/4) Epoch 28, batch 4650, loss[loss=0.1713, simple_loss=0.2546, pruned_loss=0.04396, over 19742.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.06047, over 3807554.46 frames. ], batch size: 51, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:15:56,021 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:01,315 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 12:16:12,616 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 12:16:26,525 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:34,714 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:44,455 INFO [train.py:903] (2/4) Epoch 28, batch 4700, loss[loss=0.2457, simple_loss=0.3175, pruned_loss=0.08695, over 19671.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.06045, over 3802859.89 frames. ], batch size: 58, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:17:07,728 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 12:17:10,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.848e+02 5.842e+02 7.352e+02 2.015e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-03 12:17:17,598 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:36,207 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:37,515 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.7209, 1.7580, 1.7165, 1.4426, 1.4565, 1.4754, 0.2813, 0.7349], + device='cuda:2'), covar=tensor([0.0777, 0.0746, 0.0498, 0.0844, 0.1450, 0.0929, 0.1553, 0.1342], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0367, 0.0372, 0.0396, 0.0475, 0.0399, 0.0348, 0.0351], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 12:17:46,794 INFO [train.py:903] (2/4) Epoch 28, batch 4750, loss[loss=0.2098, simple_loss=0.2982, pruned_loss=0.06065, over 18083.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06034, over 3788585.34 frames. ], batch size: 83, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:18:47,828 INFO [train.py:903] (2/4) Epoch 28, batch 4800, loss[loss=0.2285, simple_loss=0.3174, pruned_loss=0.06979, over 19572.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06046, over 3789132.10 frames. ], batch size: 61, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:19:16,003 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 5.029e+02 6.156e+02 7.557e+02 1.439e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-03 12:19:44,212 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7851, 3.2691, 3.3210, 3.3528, 1.4205, 3.2170, 2.7702, 3.0974], + device='cuda:2'), covar=tensor([0.1905, 0.1238, 0.0875, 0.1003, 0.5577, 0.1144, 0.0933, 0.1384], + device='cuda:2'), in_proj_covar=tensor([0.0821, 0.0787, 0.0997, 0.0875, 0.0864, 0.0761, 0.0588, 0.0924], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 12:19:50,825 INFO [train.py:903] (2/4) Epoch 28, batch 4850, loss[loss=0.1837, simple_loss=0.2731, pruned_loss=0.04712, over 19537.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2833, pruned_loss=0.05989, over 3807958.92 frames. ], batch size: 56, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:20:01,588 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:12,582 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 12:20:32,967 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:33,812 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 12:20:39,534 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 12:20:40,701 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 12:20:51,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 12:20:53,318 INFO [train.py:903] (2/4) Epoch 28, batch 4900, loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04774, over 19771.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.283, pruned_loss=0.05973, over 3812358.06 frames. ], batch size: 63, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:21:05,671 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189266.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:10,803 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 12:21:19,667 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.742e+02 6.083e+02 7.635e+02 1.565e+03, threshold=1.217e+03, percent-clipped=2.0 +2023-04-03 12:21:20,148 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3713, 1.4496, 1.6778, 1.6083, 2.2396, 2.0229, 2.3707, 0.8712], + device='cuda:2'), covar=tensor([0.2620, 0.4497, 0.2821, 0.2071, 0.1630, 0.2406, 0.1459, 0.5198], + device='cuda:2'), in_proj_covar=tensor([0.0557, 0.0674, 0.0759, 0.0510, 0.0638, 0.0550, 0.0674, 0.0576], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:21:36,046 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:38,180 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189293.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:53,518 INFO [train.py:903] (2/4) Epoch 28, batch 4950, loss[loss=0.2127, simple_loss=0.2982, pruned_loss=0.06365, over 19685.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.06008, over 3810407.41 frames. ], batch size: 55, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:22:10,594 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 12:22:34,612 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 12:22:56,070 INFO [train.py:903] (2/4) Epoch 28, batch 5000, loss[loss=0.2251, simple_loss=0.3082, pruned_loss=0.07093, over 18998.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2844, pruned_loss=0.06084, over 3828334.16 frames. ], batch size: 69, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:22:57,318 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6040, 2.2245, 1.5357, 1.5113, 2.1097, 1.2600, 1.5541, 1.9480], + device='cuda:2'), covar=tensor([0.1098, 0.0950, 0.1156, 0.0830, 0.0576, 0.1403, 0.0735, 0.0553], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0319, 0.0342, 0.0273, 0.0253, 0.0344, 0.0294, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:23:04,854 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 12:23:08,803 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189365.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:15,684 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 12:23:24,841 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.003e+02 6.019e+02 8.012e+02 1.544e+03, threshold=1.204e+03, percent-clipped=7.0 +2023-04-03 12:23:42,170 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:59,706 INFO [train.py:903] (2/4) Epoch 28, batch 5050, loss[loss=0.2075, simple_loss=0.2962, pruned_loss=0.05943, over 19689.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2841, pruned_loss=0.06067, over 3841944.63 frames. ], batch size: 60, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:24:00,207 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:02,597 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:24,215 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:33,179 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 12:24:44,051 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189441.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:52,468 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-03 12:25:01,203 INFO [train.py:903] (2/4) Epoch 28, batch 5100, loss[loss=0.1914, simple_loss=0.2783, pruned_loss=0.05221, over 19666.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2848, pruned_loss=0.06103, over 3822398.85 frames. ], batch size: 60, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:25:10,105 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 12:25:12,427 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 12:25:19,117 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 12:25:27,912 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.191e+02 6.540e+02 8.236e+02 1.634e+03, threshold=1.308e+03, percent-clipped=9.0 +2023-04-03 12:26:01,393 INFO [train.py:903] (2/4) Epoch 28, batch 5150, loss[loss=0.2523, simple_loss=0.3243, pruned_loss=0.09014, over 18741.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.285, pruned_loss=0.06132, over 3818980.52 frames. ], batch size: 74, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:26:02,894 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:11,418 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 12:26:18,119 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:44,858 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:46,844 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 12:27:00,763 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3226, 1.3636, 1.5869, 1.4934, 1.8095, 1.8378, 1.8110, 0.5456], + device='cuda:2'), covar=tensor([0.2876, 0.4929, 0.3043, 0.2342, 0.1849, 0.2808, 0.1642, 0.5888], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0674, 0.0758, 0.0509, 0.0637, 0.0548, 0.0672, 0.0575], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:27:02,655 INFO [train.py:903] (2/4) Epoch 28, batch 5200, loss[loss=0.1907, simple_loss=0.26, pruned_loss=0.06071, over 19749.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2857, pruned_loss=0.06179, over 3815077.17 frames. ], batch size: 46, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:27:03,038 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189556.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:27:17,229 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 12:27:30,437 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 4.850e+02 5.941e+02 7.550e+02 1.552e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-03 12:28:01,610 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 12:28:04,868 INFO [train.py:903] (2/4) Epoch 28, batch 5250, loss[loss=0.2084, simple_loss=0.2901, pruned_loss=0.06333, over 19268.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.285, pruned_loss=0.06128, over 3827516.24 frames. ], batch size: 66, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:28:09,619 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:28:41,502 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0475, 5.1708, 5.8673, 5.8722, 2.0257, 5.6114, 4.6747, 5.5580], + device='cuda:2'), covar=tensor([0.1724, 0.0835, 0.0579, 0.0631, 0.6140, 0.0956, 0.0662, 0.1101], + device='cuda:2'), in_proj_covar=tensor([0.0816, 0.0783, 0.0993, 0.0872, 0.0862, 0.0758, 0.0584, 0.0924], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 12:29:04,828 INFO [train.py:903] (2/4) Epoch 28, batch 5300, loss[loss=0.1979, simple_loss=0.287, pruned_loss=0.05438, over 19549.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06141, over 3830698.33 frames. ], batch size: 54, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:29:08,424 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189658.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:12,996 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:14,964 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189664.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:16,078 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.0638, 1.5990, 1.3193, 0.9715, 1.5041, 0.9905, 1.0155, 1.6021], + device='cuda:2'), covar=tensor([0.0813, 0.0856, 0.0966, 0.1020, 0.0547, 0.1353, 0.0730, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0319, 0.0342, 0.0273, 0.0253, 0.0345, 0.0295, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:29:21,484 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 12:29:31,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.811e+02 6.188e+02 7.440e+02 1.460e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-03 12:29:44,209 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:46,231 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189689.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:05,520 INFO [train.py:903] (2/4) Epoch 28, batch 5350, loss[loss=0.166, simple_loss=0.241, pruned_loss=0.04555, over 19780.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2849, pruned_loss=0.06123, over 3841749.72 frames. ], batch size: 47, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:30:09,136 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:20,560 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-03 12:30:25,520 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:29,875 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:38,356 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 12:30:48,723 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3156, 1.3769, 1.7592, 1.3526, 2.7010, 3.7695, 3.4465, 3.9941], + device='cuda:2'), covar=tensor([0.1531, 0.3885, 0.3421, 0.2684, 0.0684, 0.0186, 0.0219, 0.0274], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0334, 0.0366, 0.0274, 0.0257, 0.0199, 0.0221, 0.0280], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 12:30:54,241 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:04,990 INFO [train.py:903] (2/4) Epoch 28, batch 5400, loss[loss=0.1812, simple_loss=0.2612, pruned_loss=0.05058, over 19827.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2839, pruned_loss=0.06056, over 3838918.23 frames. ], batch size: 49, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:31:08,700 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1746, 1.7112, 1.4079, 1.0132, 1.5918, 1.0414, 1.1913, 1.6905], + device='cuda:2'), covar=tensor([0.0873, 0.0834, 0.1076, 0.1087, 0.0618, 0.1481, 0.0672, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0321, 0.0345, 0.0275, 0.0256, 0.0348, 0.0297, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:31:15,145 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:33,363 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 4.839e+02 5.999e+02 7.745e+02 2.007e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 12:31:44,914 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:56,155 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:07,665 INFO [train.py:903] (2/4) Epoch 28, batch 5450, loss[loss=0.1934, simple_loss=0.276, pruned_loss=0.0554, over 19277.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.0605, over 3838491.21 frames. ], batch size: 66, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:32:15,129 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189812.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:19,037 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-03 12:32:25,875 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.11 vs. limit=5.0 +2023-04-03 12:32:27,702 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:28,069 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-03 12:32:30,051 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189824.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:45,824 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:08,051 INFO [train.py:903] (2/4) Epoch 28, batch 5500, loss[loss=0.2075, simple_loss=0.2947, pruned_loss=0.06019, over 18320.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.0604, over 3832574.78 frames. ], batch size: 83, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:33:17,142 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:23,050 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 12:33:24,296 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-03 12:33:30,512 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 12:33:35,034 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 4.755e+02 5.876e+02 8.325e+02 1.733e+03, threshold=1.175e+03, percent-clipped=4.0 +2023-04-03 12:34:10,042 INFO [train.py:903] (2/4) Epoch 28, batch 5550, loss[loss=0.2366, simple_loss=0.3152, pruned_loss=0.07901, over 18673.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2836, pruned_loss=0.0606, over 3827330.63 frames. ], batch size: 74, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:34:17,013 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 12:34:49,642 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5516, 1.3824, 1.4505, 1.8986, 1.4188, 1.7687, 1.8305, 1.4919], + device='cuda:2'), covar=tensor([0.0896, 0.0994, 0.1052, 0.0777, 0.0930, 0.0793, 0.0861, 0.0804], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0224, 0.0229, 0.0241, 0.0226, 0.0215, 0.0189, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 12:35:04,795 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6205, 1.4303, 1.4921, 1.8802, 1.3688, 1.9006, 1.8366, 1.6151], + device='cuda:2'), covar=tensor([0.0859, 0.0944, 0.1004, 0.0756, 0.0928, 0.0728, 0.0873, 0.0737], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0224, 0.0229, 0.0241, 0.0226, 0.0215, 0.0189, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 12:35:06,754 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 12:35:11,470 INFO [train.py:903] (2/4) Epoch 28, batch 5600, loss[loss=0.1621, simple_loss=0.2392, pruned_loss=0.04246, over 19730.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05996, over 3822646.18 frames. ], batch size: 45, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:35:40,028 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.597e+02 5.831e+02 7.652e+02 2.230e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 12:35:40,335 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:35:44,637 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:11,943 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190002.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:17,383 INFO [train.py:903] (2/4) Epoch 28, batch 5650, loss[loss=0.216, simple_loss=0.2952, pruned_loss=0.06838, over 19621.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05937, over 3831057.47 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:36:17,838 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:04,788 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 12:37:18,834 INFO [train.py:903] (2/4) Epoch 28, batch 5700, loss[loss=0.193, simple_loss=0.2848, pruned_loss=0.05061, over 19654.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2817, pruned_loss=0.05962, over 3814270.57 frames. ], batch size: 58, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:37:31,559 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190066.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:45,568 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.283e+02 6.430e+02 7.892e+02 1.365e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-04-03 12:37:49,394 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190080.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:02,443 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,057 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,822 INFO [train.py:903] (2/4) Epoch 28, batch 5750, loss[loss=0.1735, simple_loss=0.2514, pruned_loss=0.04777, over 19773.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2813, pruned_loss=0.05919, over 3828334.64 frames. ], batch size: 48, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:38:24,219 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 12:38:32,498 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 12:38:32,781 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5823, 1.2123, 1.3664, 1.2554, 2.2626, 1.0864, 2.2010, 2.5347], + device='cuda:2'), covar=tensor([0.0684, 0.2784, 0.2967, 0.1727, 0.0802, 0.2116, 0.1064, 0.0458], + device='cuda:2'), in_proj_covar=tensor([0.0424, 0.0379, 0.0398, 0.0354, 0.0382, 0.0358, 0.0398, 0.0419], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:38:35,190 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:37,062 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 12:39:03,305 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4215, 1.4767, 1.7158, 1.6229, 2.1962, 2.0507, 2.3521, 0.9986], + device='cuda:2'), covar=tensor([0.2601, 0.4460, 0.2666, 0.2122, 0.1707, 0.2393, 0.1477, 0.4864], + device='cuda:2'), in_proj_covar=tensor([0.0558, 0.0676, 0.0761, 0.0512, 0.0640, 0.0551, 0.0674, 0.0579], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:39:23,778 INFO [train.py:903] (2/4) Epoch 28, batch 5800, loss[loss=0.2237, simple_loss=0.3067, pruned_loss=0.0703, over 19654.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2822, pruned_loss=0.05954, over 3831111.48 frames. ], batch size: 58, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:39:25,251 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190157.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:39:41,866 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190169.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:39:52,006 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.681e+02 5.738e+02 7.022e+02 1.341e+03, threshold=1.148e+03, percent-clipped=1.0 +2023-04-03 12:39:56,696 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:26,139 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190205.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:27,625 INFO [train.py:903] (2/4) Epoch 28, batch 5850, loss[loss=0.1829, simple_loss=0.2593, pruned_loss=0.0532, over 19395.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.282, pruned_loss=0.0597, over 3820347.68 frames. ], batch size: 47, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:02,570 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:29,991 INFO [train.py:903] (2/4) Epoch 28, batch 5900, loss[loss=0.189, simple_loss=0.2739, pruned_loss=0.05202, over 19746.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05928, over 3821286.14 frames. ], batch size: 63, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:32,315 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 12:41:33,973 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190259.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:45,071 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9206, 4.6072, 3.2881, 3.9905, 1.9288, 4.4793, 4.3716, 4.4996], + device='cuda:2'), covar=tensor([0.0466, 0.0927, 0.1816, 0.0834, 0.2950, 0.0663, 0.0915, 0.1155], + device='cuda:2'), in_proj_covar=tensor([0.0531, 0.0432, 0.0520, 0.0362, 0.0411, 0.0459, 0.0452, 0.0484], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:41:54,170 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 12:41:56,485 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.583e+02 5.674e+02 7.520e+02 1.844e+03, threshold=1.135e+03, percent-clipped=9.0 +2023-04-03 12:42:24,949 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5965, 4.1603, 4.2847, 4.2902, 1.7447, 4.0675, 3.5619, 4.0492], + device='cuda:2'), covar=tensor([0.1745, 0.0865, 0.0666, 0.0760, 0.6025, 0.1067, 0.0706, 0.1152], + device='cuda:2'), in_proj_covar=tensor([0.0822, 0.0786, 0.0998, 0.0878, 0.0865, 0.0763, 0.0588, 0.0928], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 12:42:32,647 INFO [train.py:903] (2/4) Epoch 28, batch 5950, loss[loss=0.1979, simple_loss=0.2834, pruned_loss=0.05623, over 19758.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.282, pruned_loss=0.05891, over 3823069.06 frames. ], batch size: 54, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:43:08,018 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190334.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:43:34,395 INFO [train.py:903] (2/4) Epoch 28, batch 6000, loss[loss=0.1672, simple_loss=0.2516, pruned_loss=0.04145, over 19793.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2823, pruned_loss=0.05901, over 3822397.25 frames. ], batch size: 49, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:43:34,396 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 12:43:47,439 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.9974, 1.3498, 1.5294, 1.5751, 2.6163, 1.1571, 2.2348, 3.0213], + device='cuda:2'), covar=tensor([0.0592, 0.3183, 0.3261, 0.1917, 0.0789, 0.2577, 0.1166, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0424, 0.0378, 0.0398, 0.0354, 0.0383, 0.0358, 0.0398, 0.0419], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:43:48,499 INFO [train.py:937] (2/4) Epoch 28, validation: loss=0.1668, simple_loss=0.2663, pruned_loss=0.03368, over 944034.00 frames. +2023-04-03 12:43:48,500 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 12:44:08,738 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:15,158 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 4.800e+02 5.909e+02 8.119e+02 1.607e+03, threshold=1.182e+03, percent-clipped=4.0 +2023-04-03 12:44:41,357 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:50,436 INFO [train.py:903] (2/4) Epoch 28, batch 6050, loss[loss=0.2178, simple_loss=0.298, pruned_loss=0.06878, over 19538.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.281, pruned_loss=0.05863, over 3825038.75 frames. ], batch size: 61, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:04,559 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:28,503 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190437.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:50,722 INFO [train.py:903] (2/4) Epoch 28, batch 6100, loss[loss=0.2431, simple_loss=0.3193, pruned_loss=0.08344, over 19332.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05844, over 3826566.96 frames. ], batch size: 66, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:57,883 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190461.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:59,861 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:17,943 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 4.836e+02 5.950e+02 7.277e+02 1.439e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 12:46:28,825 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:46,948 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190501.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:46:53,520 INFO [train.py:903] (2/4) Epoch 28, batch 6150, loss[loss=0.2514, simple_loss=0.3243, pruned_loss=0.0892, over 13480.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05933, over 3818983.71 frames. ], batch size: 136, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:47:02,055 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:47:23,090 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 12:47:56,356 INFO [train.py:903] (2/4) Epoch 28, batch 6200, loss[loss=0.1719, simple_loss=0.2532, pruned_loss=0.04525, over 19357.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2828, pruned_loss=0.05956, over 3815516.17 frames. ], batch size: 47, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:48:23,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.897e+02 5.800e+02 7.277e+02 1.783e+03, threshold=1.160e+03, percent-clipped=5.0 +2023-04-03 12:48:59,515 INFO [train.py:903] (2/4) Epoch 28, batch 6250, loss[loss=0.2078, simple_loss=0.2935, pruned_loss=0.06107, over 19593.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2828, pruned_loss=0.05937, over 3810604.89 frames. ], batch size: 61, lr: 2.90e-03, grad_scale: 16.0 +2023-04-03 12:49:03,311 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190609.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:49:11,015 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190616.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:49:26,303 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:27,623 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190628.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:31,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 12:50:00,482 INFO [train.py:903] (2/4) Epoch 28, batch 6300, loss[loss=0.1598, simple_loss=0.2376, pruned_loss=0.041, over 19316.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2835, pruned_loss=0.06, over 3811926.21 frames. ], batch size: 44, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:50:28,874 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:50:29,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.803e+02 6.276e+02 7.910e+02 2.564e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 12:50:37,142 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3335, 1.8371, 2.1349, 3.0939, 2.0331, 2.4708, 2.5205, 2.2237], + device='cuda:2'), covar=tensor([0.0784, 0.0958, 0.0921, 0.0680, 0.0881, 0.0787, 0.0898, 0.0669], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0224, 0.0231, 0.0242, 0.0228, 0.0216, 0.0190, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 12:50:45,492 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6461, 1.7414, 1.9639, 1.8883, 1.5075, 1.8637, 1.9211, 1.8239], + device='cuda:2'), covar=tensor([0.4241, 0.3805, 0.2166, 0.2531, 0.3885, 0.2346, 0.5453, 0.3641], + device='cuda:2'), in_proj_covar=tensor([0.0938, 0.1016, 0.0745, 0.0953, 0.0913, 0.0855, 0.0859, 0.0809], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:50:47,587 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:51:04,099 INFO [train.py:903] (2/4) Epoch 28, batch 6350, loss[loss=0.1793, simple_loss=0.2724, pruned_loss=0.0431, over 19618.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2828, pruned_loss=0.05937, over 3823454.64 frames. ], batch size: 57, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:06,525 INFO [train.py:903] (2/4) Epoch 28, batch 6400, loss[loss=0.1895, simple_loss=0.2675, pruned_loss=0.05572, over 19843.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.282, pruned_loss=0.05909, over 3822117.93 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:13,583 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:52:36,095 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.755e+02 6.032e+02 8.092e+02 1.400e+03, threshold=1.206e+03, percent-clipped=2.0 +2023-04-03 12:52:52,725 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:53:05,589 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9783, 1.6740, 1.5756, 1.8416, 1.4642, 1.6344, 1.5050, 1.7506], + device='cuda:2'), covar=tensor([0.1043, 0.1345, 0.1517, 0.1118, 0.1449, 0.0576, 0.1630, 0.0763], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0359, 0.0317, 0.0257, 0.0305, 0.0254, 0.0321, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:53:08,674 INFO [train.py:903] (2/4) Epoch 28, batch 6450, loss[loss=0.1959, simple_loss=0.2836, pruned_loss=0.05414, over 19689.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.281, pruned_loss=0.05882, over 3815823.68 frames. ], batch size: 59, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:53:55,134 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 12:53:55,697 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4612, 1.4937, 1.7182, 1.6919, 2.5493, 2.1527, 2.8441, 1.0910], + device='cuda:2'), covar=tensor([0.2652, 0.4621, 0.2959, 0.2100, 0.1585, 0.2407, 0.1403, 0.5053], + device='cuda:2'), in_proj_covar=tensor([0.0555, 0.0672, 0.0758, 0.0511, 0.0638, 0.0548, 0.0672, 0.0576], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:54:10,281 INFO [train.py:903] (2/4) Epoch 28, batch 6500, loss[loss=0.1832, simple_loss=0.276, pruned_loss=0.04518, over 19732.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2814, pruned_loss=0.0587, over 3820209.84 frames. ], batch size: 63, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:54:14,078 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3619, 1.3660, 1.5519, 1.5574, 2.1649, 2.0332, 2.3432, 0.9571], + device='cuda:2'), covar=tensor([0.2561, 0.4499, 0.2867, 0.2072, 0.1608, 0.2245, 0.1403, 0.4915], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0673, 0.0759, 0.0511, 0.0639, 0.0549, 0.0673, 0.0577], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 12:54:17,918 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 12:54:29,617 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1792, 5.1802, 5.9482, 5.9884, 2.1305, 5.6310, 4.7936, 5.6313], + device='cuda:2'), covar=tensor([0.1678, 0.0841, 0.0584, 0.0586, 0.6115, 0.0867, 0.0607, 0.1079], + device='cuda:2'), in_proj_covar=tensor([0.0827, 0.0792, 0.1002, 0.0880, 0.0867, 0.0767, 0.0591, 0.0932], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 12:54:32,033 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190872.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:54:36,588 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:54:40,957 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.758e+02 5.991e+02 7.680e+02 1.999e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-03 12:54:47,169 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:55:01,677 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190897.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:55:11,661 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9709, 1.7290, 1.5475, 1.8277, 1.4969, 1.6281, 1.4673, 1.8304], + device='cuda:2'), covar=tensor([0.1059, 0.1259, 0.1724, 0.1195, 0.1468, 0.0620, 0.1675, 0.0802], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0358, 0.0316, 0.0256, 0.0303, 0.0253, 0.0319, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 12:55:13,651 INFO [train.py:903] (2/4) Epoch 28, batch 6550, loss[loss=0.2186, simple_loss=0.2884, pruned_loss=0.0744, over 19740.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2808, pruned_loss=0.05852, over 3829719.60 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:55:18,347 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:56:14,197 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190953.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:56:17,092 INFO [train.py:903] (2/4) Epoch 28, batch 6600, loss[loss=0.1853, simple_loss=0.2649, pruned_loss=0.0528, over 19381.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2805, pruned_loss=0.05876, over 3814043.12 frames. ], batch size: 47, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:56:35,873 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190971.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:56:48,225 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.870e+02 6.050e+02 8.150e+02 2.542e+03, threshold=1.210e+03, percent-clipped=13.0 +2023-04-03 12:57:19,915 INFO [train.py:903] (2/4) Epoch 28, batch 6650, loss[loss=0.2293, simple_loss=0.2918, pruned_loss=0.08343, over 19786.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2812, pruned_loss=0.05934, over 3816486.28 frames. ], batch size: 49, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:57:27,632 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-03 12:57:59,356 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:14,528 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:22,334 INFO [train.py:903] (2/4) Epoch 28, batch 6700, loss[loss=0.1715, simple_loss=0.2526, pruned_loss=0.04521, over 19492.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05944, over 3804845.90 frames. ], batch size: 49, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:58:38,458 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191068.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:58:45,555 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191074.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:52,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.245e+02 6.198e+02 8.145e+02 2.088e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 12:58:58,995 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:59,064 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:59:05,705 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1698, 2.0756, 1.8703, 1.7746, 1.5567, 1.7176, 0.5986, 1.1875], + device='cuda:2'), covar=tensor([0.0683, 0.0741, 0.0613, 0.0977, 0.1377, 0.1209, 0.1495, 0.1251], + device='cuda:2'), in_proj_covar=tensor([0.0370, 0.0367, 0.0368, 0.0394, 0.0471, 0.0401, 0.0347, 0.0350], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 12:59:21,831 INFO [train.py:903] (2/4) Epoch 28, batch 6750, loss[loss=0.2183, simple_loss=0.2943, pruned_loss=0.07115, over 19854.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2821, pruned_loss=0.06002, over 3806777.34 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:59:51,584 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:08,108 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3422, 1.9890, 1.9374, 2.1419, 1.8264, 1.8331, 1.7658, 2.1448], + device='cuda:2'), covar=tensor([0.0918, 0.1422, 0.1417, 0.1083, 0.1491, 0.0561, 0.1583, 0.0686], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0359, 0.0317, 0.0257, 0.0305, 0.0255, 0.0320, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:00:11,317 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:13,748 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:17,983 INFO [train.py:903] (2/4) Epoch 28, batch 6800, loss[loss=0.213, simple_loss=0.2933, pruned_loss=0.06634, over 19289.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2811, pruned_loss=0.05957, over 3797737.17 frames. ], batch size: 66, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 13:00:19,470 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:45,261 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.099e+02 6.292e+02 8.518e+02 1.501e+03, threshold=1.258e+03, percent-clipped=3.0 +2023-04-03 13:01:04,593 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 13:01:05,699 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 13:01:08,835 INFO [train.py:903] (2/4) Epoch 29, batch 0, loss[loss=0.1942, simple_loss=0.2834, pruned_loss=0.05252, over 19057.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2834, pruned_loss=0.05252, over 19057.00 frames. ], batch size: 69, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:01:08,835 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 13:01:20,504 INFO [train.py:937] (2/4) Epoch 29, validation: loss=0.1669, simple_loss=0.2669, pruned_loss=0.03339, over 944034.00 frames. +2023-04-03 13:01:20,505 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 13:01:31,748 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 13:01:43,671 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191203.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:02:01,249 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 13:02:19,663 INFO [train.py:903] (2/4) Epoch 29, batch 50, loss[loss=0.1684, simple_loss=0.2565, pruned_loss=0.04012, over 19661.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2807, pruned_loss=0.05882, over 874939.37 frames. ], batch size: 53, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:02:55,000 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 13:03:00,997 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2311, 1.3105, 1.2767, 1.0975, 1.1499, 1.1314, 0.0721, 0.3925], + device='cuda:2'), covar=tensor([0.0805, 0.0739, 0.0488, 0.0672, 0.1412, 0.0742, 0.1449, 0.1311], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0364, 0.0365, 0.0391, 0.0469, 0.0397, 0.0345, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 13:03:15,129 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.210e+02 6.215e+02 8.429e+02 1.754e+03, threshold=1.243e+03, percent-clipped=6.0 +2023-04-03 13:03:18,589 INFO [train.py:903] (2/4) Epoch 29, batch 100, loss[loss=0.1715, simple_loss=0.2519, pruned_loss=0.04558, over 19786.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2799, pruned_loss=0.05848, over 1538129.41 frames. ], batch size: 48, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:03:33,067 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 13:04:07,339 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191324.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:04:19,665 INFO [train.py:903] (2/4) Epoch 29, batch 150, loss[loss=0.1796, simple_loss=0.2585, pruned_loss=0.05037, over 19405.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.281, pruned_loss=0.05898, over 2051385.29 frames. ], batch size: 48, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:04:29,084 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191342.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:04:36,960 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191349.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:04:59,858 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:05,661 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4645, 1.5002, 1.3910, 1.2515, 1.2221, 1.2683, 0.2423, 0.6106], + device='cuda:2'), covar=tensor([0.0675, 0.0599, 0.0438, 0.0617, 0.1191, 0.0674, 0.1246, 0.1098], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0365, 0.0366, 0.0392, 0.0470, 0.0398, 0.0345, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 13:05:07,810 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7407, 4.1139, 4.5842, 4.6077, 2.0039, 4.2807, 3.7212, 4.0249], + device='cuda:2'), covar=tensor([0.2584, 0.1426, 0.0898, 0.1141, 0.7198, 0.2109, 0.1160, 0.1918], + device='cuda:2'), in_proj_covar=tensor([0.0825, 0.0786, 0.0997, 0.0880, 0.0865, 0.0765, 0.0590, 0.0933], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 13:05:15,367 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.272e+02 6.303e+02 7.742e+02 1.475e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-03 13:05:15,428 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 13:05:18,076 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:18,901 INFO [train.py:903] (2/4) Epoch 29, batch 200, loss[loss=0.2243, simple_loss=0.3095, pruned_loss=0.06956, over 19671.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2817, pruned_loss=0.05981, over 2430277.77 frames. ], batch size: 58, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:05:48,182 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:13,740 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191430.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:18,509 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:20,059 INFO [train.py:903] (2/4) Epoch 29, batch 250, loss[loss=0.1946, simple_loss=0.2778, pruned_loss=0.05573, over 19529.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.279, pruned_loss=0.05876, over 2758071.46 frames. ], batch size: 56, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:07:11,547 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4795, 1.4102, 1.4321, 1.8913, 1.4206, 1.7012, 1.6433, 1.5614], + device='cuda:2'), covar=tensor([0.0881, 0.0931, 0.1037, 0.0622, 0.0835, 0.0766, 0.0855, 0.0732], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0225, 0.0230, 0.0241, 0.0228, 0.0217, 0.0190, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 13:07:16,619 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.311e+02 6.503e+02 8.385e+02 2.446e+03, threshold=1.301e+03, percent-clipped=7.0 +2023-04-03 13:07:20,117 INFO [train.py:903] (2/4) Epoch 29, batch 300, loss[loss=0.2008, simple_loss=0.2785, pruned_loss=0.06157, over 19859.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2796, pruned_loss=0.05893, over 2992409.11 frames. ], batch size: 52, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:07:33,134 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:07:57,198 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-03 13:08:19,683 INFO [train.py:903] (2/4) Epoch 29, batch 350, loss[loss=0.1993, simple_loss=0.2857, pruned_loss=0.05645, over 19678.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2811, pruned_loss=0.05954, over 3182078.53 frames. ], batch size: 58, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:08:27,196 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 13:08:33,083 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191545.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:08:35,155 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:09:02,262 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-03 13:09:16,624 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.772e+02 5.697e+02 6.938e+02 1.378e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 13:09:19,970 INFO [train.py:903] (2/4) Epoch 29, batch 400, loss[loss=0.2227, simple_loss=0.2982, pruned_loss=0.07362, over 18260.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2799, pruned_loss=0.05926, over 3318389.25 frames. ], batch size: 83, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:09:26,912 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1594, 2.2550, 2.5495, 2.9767, 2.1741, 2.7272, 2.5629, 2.3113], + device='cuda:2'), covar=tensor([0.4494, 0.4222, 0.1911, 0.2670, 0.4732, 0.2479, 0.4841, 0.3424], + device='cuda:2'), in_proj_covar=tensor([0.0941, 0.1020, 0.0746, 0.0955, 0.0918, 0.0859, 0.0863, 0.0810], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 13:09:50,399 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191609.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:19,940 INFO [train.py:903] (2/4) Epoch 29, batch 450, loss[loss=0.2539, simple_loss=0.3224, pruned_loss=0.09266, over 18170.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2809, pruned_loss=0.05952, over 3425385.82 frames. ], batch size: 83, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:10:55,267 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:57,255 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 13:10:58,393 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 13:11:17,385 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.702e+02 5.973e+02 7.083e+02 1.398e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 13:11:20,347 INFO [train.py:903] (2/4) Epoch 29, batch 500, loss[loss=0.2218, simple_loss=0.3026, pruned_loss=0.0705, over 19774.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2808, pruned_loss=0.05929, over 3524063.72 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:11:27,828 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 13:12:13,382 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:12:21,067 INFO [train.py:903] (2/4) Epoch 29, batch 550, loss[loss=0.2246, simple_loss=0.3093, pruned_loss=0.07, over 17344.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2821, pruned_loss=0.0599, over 3580082.74 frames. ], batch size: 101, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:13:19,218 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.419e+02 6.471e+02 8.968e+02 1.855e+03, threshold=1.294e+03, percent-clipped=10.0 +2023-04-03 13:13:22,374 INFO [train.py:903] (2/4) Epoch 29, batch 600, loss[loss=0.1911, simple_loss=0.2681, pruned_loss=0.05707, over 19623.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05984, over 3638894.44 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:13:42,666 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:02,323 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 13:14:13,332 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191826.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:22,139 INFO [train.py:903] (2/4) Epoch 29, batch 650, loss[loss=0.1815, simple_loss=0.2577, pruned_loss=0.05262, over 19745.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2812, pruned_loss=0.05955, over 3692884.42 frames. ], batch size: 45, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:14:31,507 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:01,021 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:18,381 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.883e+02 6.286e+02 7.850e+02 2.365e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-03 13:15:21,594 INFO [train.py:903] (2/4) Epoch 29, batch 700, loss[loss=0.184, simple_loss=0.2784, pruned_loss=0.04479, over 19776.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2827, pruned_loss=0.06017, over 3710843.23 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:15:29,669 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191890.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:36,422 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2302, 1.2818, 1.2160, 1.0332, 1.0100, 1.0274, 0.1566, 0.3584], + device='cuda:2'), covar=tensor([0.0965, 0.0901, 0.0609, 0.0773, 0.1813, 0.0985, 0.1696, 0.1599], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0367, 0.0369, 0.0393, 0.0472, 0.0399, 0.0346, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 13:16:03,790 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191918.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:16:23,390 INFO [train.py:903] (2/4) Epoch 29, batch 750, loss[loss=0.1971, simple_loss=0.2669, pruned_loss=0.06363, over 19756.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05988, over 3740934.18 frames. ], batch size: 45, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:16:29,930 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8757, 1.9809, 2.2565, 2.4182, 1.8207, 2.3400, 2.2766, 2.1154], + device='cuda:2'), covar=tensor([0.4285, 0.3962, 0.2076, 0.2386, 0.4144, 0.2188, 0.5206, 0.3472], + device='cuda:2'), in_proj_covar=tensor([0.0944, 0.1021, 0.0748, 0.0956, 0.0920, 0.0860, 0.0867, 0.0811], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 13:16:34,327 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:17:21,354 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.754e+02 5.591e+02 7.301e+02 1.189e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-03 13:17:23,700 INFO [train.py:903] (2/4) Epoch 29, batch 800, loss[loss=0.1864, simple_loss=0.269, pruned_loss=0.05188, over 19683.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06075, over 3750563.26 frames. ], batch size: 53, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:17:36,668 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 13:17:38,134 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.3907, 3.9934, 2.5946, 3.5113, 0.8930, 3.9559, 3.8394, 3.9344], + device='cuda:2'), covar=tensor([0.0662, 0.1024, 0.2027, 0.0936, 0.4059, 0.0823, 0.1001, 0.1202], + device='cuda:2'), in_proj_covar=tensor([0.0530, 0.0431, 0.0518, 0.0360, 0.0410, 0.0457, 0.0454, 0.0483], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:17:51,890 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.7595, 1.3391, 1.5325, 1.8362, 3.3205, 1.4623, 2.5516, 3.8055], + device='cuda:2'), covar=tensor([0.0501, 0.2984, 0.3084, 0.1763, 0.0662, 0.2350, 0.1249, 0.0230], + device='cuda:2'), in_proj_covar=tensor([0.0425, 0.0380, 0.0400, 0.0357, 0.0385, 0.0360, 0.0400, 0.0420], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:18:26,428 INFO [train.py:903] (2/4) Epoch 29, batch 850, loss[loss=0.1604, simple_loss=0.2452, pruned_loss=0.03775, over 19692.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2829, pruned_loss=0.05989, over 3769389.35 frames. ], batch size: 53, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:17,575 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 13:19:23,125 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 4.771e+02 5.573e+02 7.809e+02 2.111e+03, threshold=1.115e+03, percent-clipped=7.0 +2023-04-03 13:19:25,394 INFO [train.py:903] (2/4) Epoch 29, batch 900, loss[loss=0.227, simple_loss=0.3024, pruned_loss=0.07575, over 18037.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2836, pruned_loss=0.06032, over 3788328.41 frames. ], batch size: 83, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:43,208 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:19:53,201 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 13:20:09,370 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192120.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:10,724 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9679, 2.0760, 2.3079, 2.6206, 2.0586, 2.4850, 2.2629, 2.1257], + device='cuda:2'), covar=tensor([0.4185, 0.3894, 0.1976, 0.2453, 0.4134, 0.2223, 0.5054, 0.3302], + device='cuda:2'), in_proj_covar=tensor([0.0949, 0.1027, 0.0753, 0.0962, 0.0923, 0.0864, 0.0870, 0.0815], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 13:20:12,939 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192123.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:26,582 INFO [train.py:903] (2/4) Epoch 29, batch 950, loss[loss=0.2164, simple_loss=0.3036, pruned_loss=0.06457, over 19648.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2823, pruned_loss=0.0595, over 3804682.05 frames. ], batch size: 60, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:20:30,182 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 13:21:13,696 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:21:24,650 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.272e+02 6.130e+02 7.632e+02 1.213e+03, threshold=1.226e+03, percent-clipped=2.0 +2023-04-03 13:21:27,730 INFO [train.py:903] (2/4) Epoch 29, batch 1000, loss[loss=0.1986, simple_loss=0.2932, pruned_loss=0.05203, over 19703.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2821, pruned_loss=0.05908, over 3806039.93 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:21,213 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4119, 2.0152, 1.5785, 1.4184, 1.8064, 1.3622, 1.3207, 1.7866], + device='cuda:2'), covar=tensor([0.0940, 0.0846, 0.1048, 0.0929, 0.0645, 0.1286, 0.0679, 0.0482], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0318, 0.0342, 0.0273, 0.0252, 0.0345, 0.0293, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:22:23,177 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 13:22:27,655 INFO [train.py:903] (2/4) Epoch 29, batch 1050, loss[loss=0.1948, simple_loss=0.2817, pruned_loss=0.05391, over 19684.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2828, pruned_loss=0.05907, over 3815399.04 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:31,131 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192237.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:22:34,320 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:22:55,084 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 13:23:02,362 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 13:23:25,657 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 4.755e+02 5.709e+02 7.263e+02 1.610e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 13:23:28,076 INFO [train.py:903] (2/4) Epoch 29, batch 1100, loss[loss=0.2058, simple_loss=0.2914, pruned_loss=0.06008, over 19791.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2823, pruned_loss=0.05865, over 3818860.44 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:29,304 INFO [train.py:903] (2/4) Epoch 29, batch 1150, loss[loss=0.1662, simple_loss=0.2424, pruned_loss=0.04501, over 19744.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2829, pruned_loss=0.05893, over 3822901.34 frames. ], batch size: 46, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:36,037 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0787, 1.7712, 1.9892, 2.8879, 2.0814, 2.3771, 2.4940, 2.1687], + device='cuda:2'), covar=tensor([0.0810, 0.0918, 0.0958, 0.0661, 0.0827, 0.0755, 0.0843, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0224, 0.0230, 0.0241, 0.0228, 0.0216, 0.0189, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 13:25:27,744 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 4.839e+02 5.803e+02 7.271e+02 1.538e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 13:25:30,848 INFO [train.py:903] (2/4) Epoch 29, batch 1200, loss[loss=0.2159, simple_loss=0.2984, pruned_loss=0.06667, over 18706.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2826, pruned_loss=0.05861, over 3836099.71 frames. ], batch size: 74, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:01,116 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:01,924 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 13:26:10,313 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:30,221 INFO [train.py:903] (2/4) Epoch 29, batch 1250, loss[loss=0.1786, simple_loss=0.2687, pruned_loss=0.04423, over 19543.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2818, pruned_loss=0.05888, over 3835555.37 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:50,100 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192451.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:07,165 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:28,114 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.043e+02 5.075e+02 6.202e+02 7.494e+02 1.680e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 13:27:30,148 INFO [train.py:903] (2/4) Epoch 29, batch 1300, loss[loss=0.1769, simple_loss=0.2609, pruned_loss=0.04651, over 19600.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2815, pruned_loss=0.05887, over 3832030.94 frames. ], batch size: 57, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:28:07,554 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 13:28:10,144 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:28:30,906 INFO [train.py:903] (2/4) Epoch 29, batch 1350, loss[loss=0.1808, simple_loss=0.2637, pruned_loss=0.04894, over 19780.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2815, pruned_loss=0.05933, over 3828410.49 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:29:25,901 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192579.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:27,890 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:28,924 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.533e+02 7.774e+02 1.028e+03 2.542e+03, threshold=1.555e+03, percent-clipped=13.0 +2023-04-03 13:29:31,166 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:32,191 INFO [train.py:903] (2/4) Epoch 29, batch 1400, loss[loss=0.1924, simple_loss=0.2628, pruned_loss=0.06102, over 18634.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2809, pruned_loss=0.05907, over 3825295.29 frames. ], batch size: 41, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:29:40,670 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1746, 1.9774, 1.7958, 2.0866, 1.8266, 1.7691, 1.6766, 2.0131], + device='cuda:2'), covar=tensor([0.1048, 0.1378, 0.1526, 0.1060, 0.1400, 0.0593, 0.1569, 0.0745], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0361, 0.0320, 0.0259, 0.0307, 0.0257, 0.0323, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 13:30:30,464 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:32,441 INFO [train.py:903] (2/4) Epoch 29, batch 1450, loss[loss=0.1889, simple_loss=0.2652, pruned_loss=0.05633, over 19464.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2812, pruned_loss=0.05892, over 3811291.67 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:30:32,475 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 13:30:32,756 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:48,514 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.15 vs. limit=5.0 +2023-04-03 13:31:03,981 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-03 13:31:10,952 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5982, 4.0977, 4.2932, 4.2865, 1.6566, 4.0565, 3.4942, 4.0501], + device='cuda:2'), covar=tensor([0.1747, 0.0862, 0.0653, 0.0718, 0.6179, 0.0915, 0.0729, 0.1128], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0785, 0.0995, 0.0875, 0.0866, 0.0765, 0.0588, 0.0929], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 13:31:29,874 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.834e+02 5.734e+02 7.377e+02 1.406e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 13:31:32,942 INFO [train.py:903] (2/4) Epoch 29, batch 1500, loss[loss=0.1841, simple_loss=0.2722, pruned_loss=0.048, over 19755.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05948, over 3810511.41 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:31:41,359 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192691.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:47,441 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192696.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:50,546 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:32,341 INFO [train.py:903] (2/4) Epoch 29, batch 1550, loss[loss=0.2098, simple_loss=0.2818, pruned_loss=0.06892, over 19355.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.28, pruned_loss=0.0584, over 3828729.06 frames. ], batch size: 47, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:32:32,692 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3378, 1.2236, 1.7224, 1.3932, 2.6632, 3.6460, 3.3929, 3.8875], + device='cuda:2'), covar=tensor([0.1664, 0.4170, 0.3619, 0.2607, 0.0643, 0.0207, 0.0207, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0334, 0.0367, 0.0273, 0.0256, 0.0198, 0.0221, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 13:32:56,558 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:57,533 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:05,624 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:07,251 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.38 vs. limit=5.0 +2023-04-03 13:33:13,966 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 13:33:31,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 5.130e+02 6.287e+02 7.695e+02 1.691e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-03 13:33:34,856 INFO [train.py:903] (2/4) Epoch 29, batch 1600, loss[loss=0.1904, simple_loss=0.2799, pruned_loss=0.05048, over 19610.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2793, pruned_loss=0.05772, over 3831447.36 frames. ], batch size: 57, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:33:47,560 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:57,595 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 13:34:33,311 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4380, 1.6317, 2.0960, 1.8503, 3.2239, 4.1266, 4.0035, 4.4945], + device='cuda:2'), covar=tensor([0.1677, 0.3751, 0.3304, 0.2363, 0.0697, 0.0310, 0.0188, 0.0261], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0334, 0.0366, 0.0273, 0.0256, 0.0198, 0.0220, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 13:34:34,142 INFO [train.py:903] (2/4) Epoch 29, batch 1650, loss[loss=0.1973, simple_loss=0.2886, pruned_loss=0.05297, over 17328.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2802, pruned_loss=0.05844, over 3829131.81 frames. ], batch size: 101, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:34:35,764 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:34:51,828 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192849.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:35:04,840 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:15,840 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:23,825 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:30,386 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 4.944e+02 5.872e+02 7.824e+02 1.796e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 13:35:32,717 INFO [train.py:903] (2/4) Epoch 29, batch 1700, loss[loss=0.2242, simple_loss=0.3028, pruned_loss=0.07282, over 19596.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2814, pruned_loss=0.05963, over 3833922.33 frames. ], batch size: 57, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:35:38,381 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:01,160 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6583, 3.3703, 2.5700, 2.9605, 1.4047, 3.2889, 3.1750, 3.2820], + device='cuda:2'), covar=tensor([0.0910, 0.1107, 0.1877, 0.0936, 0.2984, 0.0891, 0.1100, 0.1357], + device='cuda:2'), in_proj_covar=tensor([0.0527, 0.0427, 0.0515, 0.0357, 0.0407, 0.0454, 0.0449, 0.0478], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:36:04,831 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:08,239 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:13,383 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 13:36:32,303 INFO [train.py:903] (2/4) Epoch 29, batch 1750, loss[loss=0.1699, simple_loss=0.2559, pruned_loss=0.04199, over 19660.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2817, pruned_loss=0.05991, over 3821981.55 frames. ], batch size: 53, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:36:55,813 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:58,007 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:24,852 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:25,783 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:27,176 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:31,715 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 4.741e+02 6.117e+02 7.797e+02 1.758e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 13:37:34,998 INFO [train.py:903] (2/4) Epoch 29, batch 1800, loss[loss=0.1971, simple_loss=0.2854, pruned_loss=0.05435, over 19662.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2817, pruned_loss=0.05982, over 3824456.25 frames. ], batch size: 55, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:38:29,366 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 13:38:33,981 INFO [train.py:903] (2/4) Epoch 29, batch 1850, loss[loss=0.1841, simple_loss=0.2644, pruned_loss=0.05187, over 19401.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2814, pruned_loss=0.0595, over 3820162.72 frames. ], batch size: 48, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:38:35,292 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:04,666 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193059.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:05,539 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 13:39:32,459 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.498e+02 4.794e+02 5.817e+02 7.567e+02 2.266e+03, threshold=1.163e+03, percent-clipped=4.0 +2023-04-03 13:39:34,765 INFO [train.py:903] (2/4) Epoch 29, batch 1900, loss[loss=0.2156, simple_loss=0.2998, pruned_loss=0.0657, over 19540.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2819, pruned_loss=0.05949, over 3815447.32 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:39:45,979 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:49,061 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 13:39:51,212 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:55,328 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 13:40:18,685 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 13:40:25,418 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:33,594 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:35,432 INFO [train.py:903] (2/4) Epoch 29, batch 1950, loss[loss=0.2181, simple_loss=0.3001, pruned_loss=0.06811, over 19764.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05974, over 3813574.85 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:40:56,441 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:56,480 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:04,186 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:15,375 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:34,351 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.40 vs. limit=2.0 +2023-04-03 13:41:34,710 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.104e+02 4.788e+02 5.811e+02 7.006e+02 1.429e+03, threshold=1.162e+03, percent-clipped=3.0 +2023-04-03 13:41:37,846 INFO [train.py:903] (2/4) Epoch 29, batch 2000, loss[loss=0.206, simple_loss=0.2903, pruned_loss=0.06087, over 18848.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05949, over 3806125.74 frames. ], batch size: 74, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:41:46,070 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:48,029 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193193.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:42:10,549 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:31,346 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 13:42:31,630 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193229.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:37,086 INFO [train.py:903] (2/4) Epoch 29, batch 2050, loss[loss=0.1933, simple_loss=0.2823, pruned_loss=0.05214, over 19778.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2827, pruned_loss=0.05955, over 3810783.67 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:42:50,735 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 13:42:50,764 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 13:43:00,919 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6965, 1.5384, 1.2577, 1.6251, 1.3956, 1.3032, 1.2922, 1.4920], + device='cuda:2'), covar=tensor([0.1251, 0.1457, 0.1996, 0.1206, 0.1487, 0.1088, 0.1954, 0.1108], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0363, 0.0321, 0.0260, 0.0310, 0.0259, 0.0324, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 13:43:13,518 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 13:43:34,586 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.912e+02 6.403e+02 8.614e+02 2.001e+03, threshold=1.281e+03, percent-clipped=8.0 +2023-04-03 13:43:36,905 INFO [train.py:903] (2/4) Epoch 29, batch 2100, loss[loss=0.1859, simple_loss=0.2765, pruned_loss=0.04766, over 19539.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2829, pruned_loss=0.05961, over 3819616.27 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:43:59,195 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:44:07,586 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 13:44:07,940 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193308.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:44:25,615 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 13:44:37,260 INFO [train.py:903] (2/4) Epoch 29, batch 2150, loss[loss=0.1856, simple_loss=0.2755, pruned_loss=0.04787, over 19727.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2825, pruned_loss=0.05957, over 3807867.26 frames. ], batch size: 63, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:44:56,823 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193349.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:25,842 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:35,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.242e+02 6.391e+02 8.913e+02 2.261e+03, threshold=1.278e+03, percent-clipped=9.0 +2023-04-03 13:45:37,409 INFO [train.py:903] (2/4) Epoch 29, batch 2200, loss[loss=0.1827, simple_loss=0.2723, pruned_loss=0.04653, over 19780.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05949, over 3820615.47 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:00,712 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:04,416 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:36,067 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:39,105 INFO [train.py:903] (2/4) Epoch 29, batch 2250, loss[loss=0.1997, simple_loss=0.2858, pruned_loss=0.05681, over 19669.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2812, pruned_loss=0.05906, over 3825638.84 frames. ], batch size: 58, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:45,267 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:15,246 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5800, 1.6872, 1.8410, 1.8496, 2.5804, 2.3075, 2.6455, 1.1300], + device='cuda:2'), covar=tensor([0.2474, 0.4387, 0.2861, 0.1974, 0.1444, 0.2221, 0.1435, 0.4732], + device='cuda:2'), in_proj_covar=tensor([0.0554, 0.0672, 0.0761, 0.0511, 0.0637, 0.0548, 0.0668, 0.0574], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 13:47:20,675 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:25,807 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193472.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:47:36,425 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 4.884e+02 5.894e+02 7.303e+02 1.661e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 13:47:38,737 INFO [train.py:903] (2/4) Epoch 29, batch 2300, loss[loss=0.1883, simple_loss=0.2743, pruned_loss=0.05115, over 19532.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2805, pruned_loss=0.05877, over 3830705.40 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:47:50,437 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:53,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 13:48:21,002 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193518.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:48:39,107 INFO [train.py:903] (2/4) Epoch 29, batch 2350, loss[loss=0.2265, simple_loss=0.3078, pruned_loss=0.07259, over 13098.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2813, pruned_loss=0.05922, over 3796593.01 frames. ], batch size: 136, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:16,338 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193564.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:49:20,444 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 13:49:27,320 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:49:33,404 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5914, 1.6230, 1.8261, 1.8344, 2.5454, 2.3120, 2.7307, 1.1974], + device='cuda:2'), covar=tensor([0.2436, 0.4142, 0.2689, 0.1899, 0.1590, 0.2155, 0.1519, 0.4583], + device='cuda:2'), in_proj_covar=tensor([0.0554, 0.0671, 0.0760, 0.0510, 0.0636, 0.0548, 0.0668, 0.0573], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 13:49:36,480 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 13:49:37,594 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.546e+02 5.901e+02 7.828e+02 1.716e+03, threshold=1.180e+03, percent-clipped=11.0 +2023-04-03 13:49:40,757 INFO [train.py:903] (2/4) Epoch 29, batch 2400, loss[loss=0.1808, simple_loss=0.2624, pruned_loss=0.0496, over 19393.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.05963, over 3804309.15 frames. ], batch size: 47, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:47,425 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193589.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:50:36,643 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 13:50:40,454 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:50:41,249 INFO [train.py:903] (2/4) Epoch 29, batch 2450, loss[loss=0.2086, simple_loss=0.2861, pruned_loss=0.06551, over 19625.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06009, over 3804040.42 frames. ], batch size: 50, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:50:54,654 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:39,133 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.882e+02 5.957e+02 8.043e+02 1.393e+03, threshold=1.191e+03, percent-clipped=5.0 +2023-04-03 13:51:41,316 INFO [train.py:903] (2/4) Epoch 29, batch 2500, loss[loss=0.1975, simple_loss=0.2782, pruned_loss=0.05842, over 19732.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05954, over 3807917.57 frames. ], batch size: 51, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:51:45,939 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193688.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:47,057 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5874, 2.2488, 1.7286, 1.5155, 2.0864, 1.4376, 1.3673, 1.9895], + device='cuda:2'), covar=tensor([0.1133, 0.0775, 0.0994, 0.0923, 0.0575, 0.1249, 0.0860, 0.0579], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0319, 0.0341, 0.0273, 0.0253, 0.0346, 0.0293, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:51:54,121 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-03 13:52:17,913 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 13:52:40,706 INFO [train.py:903] (2/4) Epoch 29, batch 2550, loss[loss=0.1987, simple_loss=0.2841, pruned_loss=0.05663, over 19619.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2824, pruned_loss=0.05939, over 3798540.44 frames. ], batch size: 57, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:53:06,575 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 13:53:15,363 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:30,065 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:35,070 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 13:53:38,228 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-03 13:53:40,999 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 5.186e+02 6.141e+02 8.140e+02 1.584e+03, threshold=1.228e+03, percent-clipped=10.0 +2023-04-03 13:53:41,167 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:42,121 INFO [train.py:903] (2/4) Epoch 29, batch 2600, loss[loss=0.1897, simple_loss=0.262, pruned_loss=0.05873, over 17345.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05926, over 3815287.27 frames. ], batch size: 38, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:02,325 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193799.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:54:22,066 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193816.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:54:44,188 INFO [train.py:903] (2/4) Epoch 29, batch 2650, loss[loss=0.2001, simple_loss=0.285, pruned_loss=0.05764, over 19513.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2812, pruned_loss=0.05874, over 3822170.02 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:44,613 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8988, 1.3435, 1.0771, 0.9932, 1.1905, 1.0396, 0.9658, 1.2750], + device='cuda:2'), covar=tensor([0.0661, 0.0894, 0.1196, 0.0820, 0.0613, 0.1334, 0.0629, 0.0542], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0317, 0.0339, 0.0271, 0.0251, 0.0344, 0.0290, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:54:47,974 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5943, 1.2853, 1.3982, 1.1946, 2.2686, 1.0570, 2.1217, 2.5748], + device='cuda:2'), covar=tensor([0.0777, 0.2798, 0.2912, 0.1839, 0.0874, 0.2191, 0.1105, 0.0485], + device='cuda:2'), in_proj_covar=tensor([0.0423, 0.0378, 0.0398, 0.0353, 0.0381, 0.0358, 0.0395, 0.0417], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:54:53,414 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.7559, 3.0492, 3.2490, 3.2449, 1.7115, 3.0394, 2.7411, 3.0717], + device='cuda:2'), covar=tensor([0.1535, 0.2881, 0.0684, 0.0841, 0.4688, 0.1552, 0.0669, 0.1030], + device='cuda:2'), in_proj_covar=tensor([0.0825, 0.0792, 0.1002, 0.0883, 0.0872, 0.0766, 0.0592, 0.0929], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 13:54:59,846 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 13:55:18,920 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 13:55:43,539 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 4.600e+02 5.538e+02 7.187e+02 1.315e+03, threshold=1.108e+03, percent-clipped=1.0 +2023-04-03 13:55:44,725 INFO [train.py:903] (2/4) Epoch 29, batch 2700, loss[loss=0.2403, simple_loss=0.3211, pruned_loss=0.07971, over 19289.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05936, over 3820954.75 frames. ], batch size: 66, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:55:48,422 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3264, 2.3513, 2.1962, 2.6164, 2.1457, 2.0711, 2.0804, 2.3290], + device='cuda:2'), covar=tensor([0.1188, 0.1744, 0.1570, 0.1170, 0.1588, 0.0632, 0.1521, 0.0820], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0361, 0.0321, 0.0258, 0.0308, 0.0258, 0.0323, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 13:56:00,592 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:05,905 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193902.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:42,142 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193931.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:56:45,192 INFO [train.py:903] (2/4) Epoch 29, batch 2750, loss[loss=0.1869, simple_loss=0.2608, pruned_loss=0.05651, over 19859.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.05943, over 3830970.67 frames. ], batch size: 52, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:56:58,056 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:27,666 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8652, 4.4278, 2.8486, 3.8834, 1.2804, 4.3530, 4.2800, 4.3521], + device='cuda:2'), covar=tensor([0.0545, 0.0988, 0.1965, 0.0895, 0.3836, 0.0654, 0.0938, 0.1024], + device='cuda:2'), in_proj_covar=tensor([0.0535, 0.0432, 0.0523, 0.0359, 0.0414, 0.0460, 0.0455, 0.0487], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:57:27,871 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193969.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:36,660 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:44,120 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.680e+02 5.858e+02 7.532e+02 1.982e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-03 13:57:45,311 INFO [train.py:903] (2/4) Epoch 29, batch 2800, loss[loss=0.16, simple_loss=0.2462, pruned_loss=0.03693, over 19384.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.283, pruned_loss=0.05984, over 3833349.90 frames. ], batch size: 48, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:26,659 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:58:48,021 INFO [train.py:903] (2/4) Epoch 29, batch 2850, loss[loss=0.2007, simple_loss=0.2764, pruned_loss=0.06252, over 19738.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.0593, over 3824428.52 frames. ], batch size: 47, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:58,378 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:01,785 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3188, 1.9838, 1.5615, 1.1504, 1.9882, 1.1196, 1.1033, 1.8619], + device='cuda:2'), covar=tensor([0.1123, 0.0861, 0.1140, 0.1148, 0.0602, 0.1423, 0.0907, 0.0514], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0319, 0.0342, 0.0273, 0.0253, 0.0345, 0.0292, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 13:59:45,131 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 13:59:47,409 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.652e+02 5.661e+02 7.187e+02 2.087e+03, threshold=1.132e+03, percent-clipped=4.0 +2023-04-03 13:59:48,549 INFO [train.py:903] (2/4) Epoch 29, batch 2900, loss[loss=0.1911, simple_loss=0.2737, pruned_loss=0.05425, over 19829.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2825, pruned_loss=0.05935, over 3820783.59 frames. ], batch size: 52, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:59:55,304 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:57,642 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:00:47,046 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4600, 1.3058, 1.2623, 1.4985, 1.2207, 1.2982, 1.2574, 1.4266], + device='cuda:2'), covar=tensor([0.0950, 0.1271, 0.1227, 0.0772, 0.1175, 0.0550, 0.1242, 0.0652], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0362, 0.0322, 0.0260, 0.0310, 0.0260, 0.0325, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 14:00:47,871 INFO [train.py:903] (2/4) Epoch 29, batch 2950, loss[loss=0.2487, simple_loss=0.3177, pruned_loss=0.08984, over 13526.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2832, pruned_loss=0.05971, over 3830050.79 frames. ], batch size: 136, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:13,338 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:31,687 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-03 14:01:42,877 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:46,909 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 4.745e+02 5.839e+02 7.587e+02 1.918e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-04-03 14:01:48,092 INFO [train.py:903] (2/4) Epoch 29, batch 3000, loss[loss=0.2144, simple_loss=0.293, pruned_loss=0.06786, over 19703.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2842, pruned_loss=0.06022, over 3828424.65 frames. ], batch size: 60, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:48,093 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 14:02:01,081 INFO [train.py:937] (2/4) Epoch 29, validation: loss=0.1668, simple_loss=0.2661, pruned_loss=0.03375, over 944034.00 frames. +2023-04-03 14:02:01,082 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 14:02:02,355 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 14:02:05,067 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194187.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:02:36,803 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194212.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:03:01,746 INFO [train.py:903] (2/4) Epoch 29, batch 3050, loss[loss=0.181, simple_loss=0.2626, pruned_loss=0.04965, over 19785.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2827, pruned_loss=0.05933, over 3833152.60 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:03:17,763 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194246.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:04:02,892 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 5.546e+02 6.449e+02 7.988e+02 2.570e+03, threshold=1.290e+03, percent-clipped=8.0 +2023-04-03 14:04:03,997 INFO [train.py:903] (2/4) Epoch 29, batch 3100, loss[loss=0.1935, simple_loss=0.2855, pruned_loss=0.05075, over 19667.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06005, over 3827507.74 frames. ], batch size: 55, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:04:33,736 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-03 14:04:41,066 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3577, 1.3762, 1.5209, 1.5095, 1.7480, 1.8340, 1.7859, 0.6623], + device='cuda:2'), covar=tensor([0.2640, 0.4584, 0.2857, 0.2069, 0.1845, 0.2500, 0.1602, 0.5244], + device='cuda:2'), in_proj_covar=tensor([0.0557, 0.0674, 0.0762, 0.0511, 0.0638, 0.0549, 0.0672, 0.0576], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:05:04,147 INFO [train.py:903] (2/4) Epoch 29, batch 3150, loss[loss=0.2411, simple_loss=0.3291, pruned_loss=0.07657, over 19705.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.0601, over 3821738.34 frames. ], batch size: 59, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:06,667 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:07,815 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3738, 1.4584, 1.6361, 1.5763, 2.2780, 2.0511, 2.3303, 1.0067], + device='cuda:2'), covar=tensor([0.2669, 0.4614, 0.2849, 0.2103, 0.1592, 0.2307, 0.1476, 0.4898], + device='cuda:2'), in_proj_covar=tensor([0.0557, 0.0674, 0.0762, 0.0511, 0.0638, 0.0549, 0.0672, 0.0576], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:05:19,991 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:27,482 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 14:05:36,331 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:41,124 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 14:05:49,246 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194371.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:51,746 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:59,659 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194380.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:06:02,722 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.902e+02 5.790e+02 7.151e+02 3.080e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-03 14:06:03,882 INFO [train.py:903] (2/4) Epoch 29, batch 3200, loss[loss=0.2301, simple_loss=0.312, pruned_loss=0.07412, over 19313.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2841, pruned_loss=0.06085, over 3793751.20 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:07:04,910 INFO [train.py:903] (2/4) Epoch 29, batch 3250, loss[loss=0.1845, simple_loss=0.2648, pruned_loss=0.05212, over 19867.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2839, pruned_loss=0.06031, over 3797206.96 frames. ], batch size: 52, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:07:05,057 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194434.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:07:29,393 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9684, 2.0196, 2.3677, 2.5552, 1.9571, 2.5237, 2.3689, 2.1096], + device='cuda:2'), covar=tensor([0.4395, 0.4112, 0.1971, 0.2631, 0.4422, 0.2362, 0.5091, 0.3618], + device='cuda:2'), in_proj_covar=tensor([0.0944, 0.1023, 0.0747, 0.0956, 0.0920, 0.0862, 0.0862, 0.0813], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:08:04,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.916e+02 6.594e+02 9.076e+02 2.390e+03, threshold=1.319e+03, percent-clipped=9.0 +2023-04-03 14:08:05,467 INFO [train.py:903] (2/4) Epoch 29, batch 3300, loss[loss=0.189, simple_loss=0.276, pruned_loss=0.051, over 19537.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2832, pruned_loss=0.05967, over 3813911.62 frames. ], batch size: 54, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:08:09,431 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 14:09:07,372 INFO [train.py:903] (2/4) Epoch 29, batch 3350, loss[loss=0.2367, simple_loss=0.3184, pruned_loss=0.07747, over 19672.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2828, pruned_loss=0.05977, over 3808166.57 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:09:24,568 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:10:06,126 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.166e+02 6.518e+02 8.363e+02 1.379e+03, threshold=1.304e+03, percent-clipped=1.0 +2023-04-03 14:10:07,289 INFO [train.py:903] (2/4) Epoch 29, batch 3400, loss[loss=0.1638, simple_loss=0.2426, pruned_loss=0.04247, over 19404.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2839, pruned_loss=0.06037, over 3811761.23 frames. ], batch size: 48, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:10:48,248 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194617.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:07,040 INFO [train.py:903] (2/4) Epoch 29, batch 3450, loss[loss=0.2428, simple_loss=0.3224, pruned_loss=0.08158, over 19667.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2843, pruned_loss=0.06072, over 3809777.31 frames. ], batch size: 60, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:11:09,261 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 14:11:17,038 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:29,957 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:38,587 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8211, 1.8140, 1.9985, 1.9457, 2.6384, 2.3493, 2.7142, 1.7083], + device='cuda:2'), covar=tensor([0.2016, 0.3524, 0.2342, 0.1677, 0.1293, 0.1948, 0.1285, 0.4051], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0674, 0.0762, 0.0511, 0.0636, 0.0547, 0.0671, 0.0575], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:11:39,792 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3475, 2.0063, 1.5397, 1.4055, 1.8538, 1.2940, 1.2962, 1.8282], + device='cuda:2'), covar=tensor([0.1062, 0.0846, 0.1183, 0.0955, 0.0584, 0.1398, 0.0733, 0.0494], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0320, 0.0343, 0.0273, 0.0253, 0.0347, 0.0290, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 14:12:02,514 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194680.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:05,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.627e+02 5.712e+02 7.363e+02 1.612e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 14:12:06,671 INFO [train.py:903] (2/4) Epoch 29, batch 3500, loss[loss=0.2322, simple_loss=0.3086, pruned_loss=0.07787, over 19287.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.05999, over 3819049.57 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:12:11,447 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0768, 1.9493, 1.8110, 2.0366, 1.7772, 1.7603, 1.6516, 1.9464], + device='cuda:2'), covar=tensor([0.1114, 0.1505, 0.1543, 0.1120, 0.1451, 0.0615, 0.1596, 0.0763], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0362, 0.0322, 0.0260, 0.0310, 0.0260, 0.0324, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 14:12:43,652 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194715.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:54,406 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194724.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:13:06,876 INFO [train.py:903] (2/4) Epoch 29, batch 3550, loss[loss=0.2274, simple_loss=0.3054, pruned_loss=0.07468, over 19603.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2831, pruned_loss=0.05987, over 3815914.13 frames. ], batch size: 57, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:13:49,785 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.60 vs. limit=5.0 +2023-04-03 14:14:05,415 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.1774, 5.6247, 3.2924, 4.9064, 1.0477, 5.7936, 5.5634, 5.7403], + device='cuda:2'), covar=tensor([0.0334, 0.0759, 0.1634, 0.0698, 0.4135, 0.0478, 0.0791, 0.1002], + device='cuda:2'), in_proj_covar=tensor([0.0529, 0.0426, 0.0516, 0.0355, 0.0409, 0.0456, 0.0451, 0.0482], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 14:14:06,359 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.806e+02 5.826e+02 6.989e+02 1.690e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 14:14:07,572 INFO [train.py:903] (2/4) Epoch 29, batch 3600, loss[loss=0.1508, simple_loss=0.2321, pruned_loss=0.03473, over 19076.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.283, pruned_loss=0.05968, over 3809696.76 frames. ], batch size: 42, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:14:20,130 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:14:33,795 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194805.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,733 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,774 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:06,259 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 14:15:06,688 INFO [train.py:903] (2/4) Epoch 29, batch 3650, loss[loss=0.2285, simple_loss=0.3048, pruned_loss=0.07609, over 19537.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2835, pruned_loss=0.06004, over 3820601.93 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:15:12,780 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194839.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:15:45,380 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:16:05,562 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.663e+02 5.135e+02 6.261e+02 7.520e+02 2.080e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 14:16:06,488 INFO [train.py:903] (2/4) Epoch 29, batch 3700, loss[loss=0.2338, simple_loss=0.312, pruned_loss=0.07778, over 19529.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2838, pruned_loss=0.06006, over 3826868.75 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:17:08,169 INFO [train.py:903] (2/4) Epoch 29, batch 3750, loss[loss=0.1932, simple_loss=0.2761, pruned_loss=0.05519, over 18188.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06026, over 3832491.56 frames. ], batch size: 83, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:06,324 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 4.741e+02 6.225e+02 7.602e+02 2.236e+03, threshold=1.245e+03, percent-clipped=10.0 +2023-04-03 14:18:07,440 INFO [train.py:903] (2/4) Epoch 29, batch 3800, loss[loss=0.2, simple_loss=0.2874, pruned_loss=0.05634, over 19661.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2831, pruned_loss=0.05985, over 3833947.02 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:21,200 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:18:34,667 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 14:19:01,228 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195029.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:06,523 INFO [train.py:903] (2/4) Epoch 29, batch 3850, loss[loss=0.2513, simple_loss=0.3254, pruned_loss=0.08859, over 18083.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2842, pruned_loss=0.06041, over 3831117.78 frames. ], batch size: 83, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:19:27,237 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195051.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:41,476 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195062.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:19:44,111 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 14:19:56,969 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195076.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:06,400 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.743e+02 5.822e+02 7.676e+02 1.767e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-04-03 14:20:06,419 INFO [train.py:903] (2/4) Epoch 29, batch 3900, loss[loss=0.1766, simple_loss=0.2543, pruned_loss=0.04946, over 19775.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.284, pruned_loss=0.06023, over 3834929.47 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:20:08,998 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:13,145 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:21,728 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195095.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:20:39,863 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:39,904 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:49,062 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2653, 2.1444, 2.0693, 1.9682, 1.7399, 1.8705, 0.7235, 1.2544], + device='cuda:2'), covar=tensor([0.0665, 0.0685, 0.0511, 0.0827, 0.1284, 0.0947, 0.1468, 0.1170], + device='cuda:2'), in_proj_covar=tensor([0.0371, 0.0368, 0.0374, 0.0397, 0.0476, 0.0403, 0.0349, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 14:20:50,144 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195120.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:21:07,593 INFO [train.py:903] (2/4) Epoch 29, batch 3950, loss[loss=0.1788, simple_loss=0.2574, pruned_loss=0.05008, over 19755.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06012, over 3830137.89 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:21:12,681 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 14:21:55,813 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4300, 2.0707, 1.6129, 1.4207, 1.8631, 1.3065, 1.5486, 1.9295], + device='cuda:2'), covar=tensor([0.0904, 0.0862, 0.1184, 0.0901, 0.0589, 0.1391, 0.0568, 0.0457], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0320, 0.0343, 0.0273, 0.0253, 0.0346, 0.0290, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 14:22:08,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.632e+02 5.828e+02 7.591e+02 1.503e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 14:22:08,303 INFO [train.py:903] (2/4) Epoch 29, batch 4000, loss[loss=0.2211, simple_loss=0.3012, pruned_loss=0.0705, over 17581.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2835, pruned_loss=0.06006, over 3822082.05 frames. ], batch size: 101, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:22:38,720 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:22:52,711 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 14:23:07,863 INFO [train.py:903] (2/4) Epoch 29, batch 4050, loss[loss=0.2418, simple_loss=0.3111, pruned_loss=0.08627, over 19453.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.06, over 3823628.85 frames. ], batch size: 64, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:24:08,396 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.197e+02 5.118e+02 6.147e+02 8.377e+02 1.783e+03, threshold=1.229e+03, percent-clipped=5.0 +2023-04-03 14:24:08,418 INFO [train.py:903] (2/4) Epoch 29, batch 4100, loss[loss=0.2193, simple_loss=0.2964, pruned_loss=0.07105, over 18452.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05964, over 3833091.49 frames. ], batch size: 84, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:24:41,887 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 14:24:57,137 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:08,838 INFO [train.py:903] (2/4) Epoch 29, batch 4150, loss[loss=0.1865, simple_loss=0.2787, pruned_loss=0.04714, over 19363.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2825, pruned_loss=0.05983, over 3821933.11 frames. ], batch size: 70, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:25:49,092 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:56,330 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:10,052 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 4.925e+02 6.547e+02 8.865e+02 3.200e+03, threshold=1.309e+03, percent-clipped=9.0 +2023-04-03 14:26:10,070 INFO [train.py:903] (2/4) Epoch 29, batch 4200, loss[loss=0.1896, simple_loss=0.2736, pruned_loss=0.05278, over 19543.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2819, pruned_loss=0.05948, over 3800991.35 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:26:11,847 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-03 14:26:13,447 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 14:26:19,450 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:35,632 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195406.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:26:40,099 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:07,662 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:08,632 INFO [train.py:903] (2/4) Epoch 29, batch 4250, loss[loss=0.1902, simple_loss=0.2748, pruned_loss=0.05282, over 19597.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2827, pruned_loss=0.06006, over 3801798.39 frames. ], batch size: 52, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:27:22,568 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 14:27:31,307 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 14:27:32,633 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 14:28:08,326 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 4.660e+02 5.852e+02 7.750e+02 1.648e+03, threshold=1.170e+03, percent-clipped=3.0 +2023-04-03 14:28:08,345 INFO [train.py:903] (2/4) Epoch 29, batch 4300, loss[loss=0.1824, simple_loss=0.2701, pruned_loss=0.04731, over 19757.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2837, pruned_loss=0.0602, over 3799910.63 frames. ], batch size: 54, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:28:08,829 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8913, 1.9905, 2.2678, 2.5729, 1.9626, 2.4204, 2.2296, 2.0501], + device='cuda:2'), covar=tensor([0.4657, 0.4386, 0.2115, 0.2680, 0.4561, 0.2584, 0.5494, 0.3761], + device='cuda:2'), in_proj_covar=tensor([0.0948, 0.1030, 0.0751, 0.0963, 0.0926, 0.0867, 0.0867, 0.0815], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:28:14,118 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195488.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:28:54,530 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195521.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:29:01,476 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 14:29:09,900 INFO [train.py:903] (2/4) Epoch 29, batch 4350, loss[loss=0.2092, simple_loss=0.2751, pruned_loss=0.07166, over 19301.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.284, pruned_loss=0.06025, over 3808499.72 frames. ], batch size: 44, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:29:28,074 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:05,585 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:10,384 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 4.994e+02 6.103e+02 7.630e+02 1.747e+03, threshold=1.221e+03, percent-clipped=1.0 +2023-04-03 14:30:10,402 INFO [train.py:903] (2/4) Epoch 29, batch 4400, loss[loss=0.2145, simple_loss=0.3047, pruned_loss=0.06219, over 19667.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2852, pruned_loss=0.06104, over 3784014.17 frames. ], batch size: 55, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:30:14,280 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4529, 1.5316, 1.8533, 1.7091, 2.8023, 2.2587, 3.0061, 1.4005], + device='cuda:2'), covar=tensor([0.2691, 0.4570, 0.2904, 0.2029, 0.1503, 0.2343, 0.1420, 0.4635], + device='cuda:2'), in_proj_covar=tensor([0.0558, 0.0677, 0.0766, 0.0514, 0.0639, 0.0551, 0.0674, 0.0578], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:30:29,896 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 14:30:35,596 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:38,550 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 14:31:09,644 INFO [train.py:903] (2/4) Epoch 29, batch 4450, loss[loss=0.194, simple_loss=0.277, pruned_loss=0.05546, over 19757.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.06031, over 3794341.62 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:32:08,771 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.083e+02 5.988e+02 7.824e+02 1.664e+03, threshold=1.198e+03, percent-clipped=3.0 +2023-04-03 14:32:08,790 INFO [train.py:903] (2/4) Epoch 29, batch 4500, loss[loss=0.2, simple_loss=0.2792, pruned_loss=0.06036, over 19533.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2839, pruned_loss=0.06006, over 3814423.37 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:10,063 INFO [train.py:903] (2/4) Epoch 29, batch 4550, loss[loss=0.1978, simple_loss=0.2754, pruned_loss=0.0601, over 19577.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2843, pruned_loss=0.06037, over 3820921.70 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:15,615 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 14:33:21,619 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:33,982 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:39,333 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 14:33:51,784 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195769.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:34:01,495 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195777.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:34:08,706 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.047e+02 5.905e+02 8.200e+02 1.793e+03, threshold=1.181e+03, percent-clipped=7.0 +2023-04-03 14:34:08,723 INFO [train.py:903] (2/4) Epoch 29, batch 4600, loss[loss=0.2607, simple_loss=0.3347, pruned_loss=0.09336, over 12805.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2845, pruned_loss=0.06065, over 3826646.78 frames. ], batch size: 135, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:34:09,415 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=4.44 vs. limit=5.0 +2023-04-03 14:34:22,033 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6907, 1.6021, 1.5382, 2.1929, 1.5740, 1.9254, 1.8646, 1.7751], + device='cuda:2'), covar=tensor([0.0853, 0.0943, 0.1062, 0.0701, 0.0946, 0.0793, 0.0939, 0.0706], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0225, 0.0229, 0.0241, 0.0227, 0.0214, 0.0189, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 14:34:31,293 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195802.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:34:33,628 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:05,208 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:10,614 INFO [train.py:903] (2/4) Epoch 29, batch 4650, loss[loss=0.259, simple_loss=0.3142, pruned_loss=0.1019, over 13201.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2855, pruned_loss=0.06125, over 3825416.37 frames. ], batch size: 136, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:35:22,175 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 14:35:29,353 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 14:35:33,355 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 14:35:44,564 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-03 14:35:54,059 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:36:10,279 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.073e+02 5.115e+02 6.253e+02 8.443e+02 2.371e+03, threshold=1.251e+03, percent-clipped=7.0 +2023-04-03 14:36:10,297 INFO [train.py:903] (2/4) Epoch 29, batch 4700, loss[loss=0.2035, simple_loss=0.293, pruned_loss=0.057, over 19671.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2834, pruned_loss=0.06025, over 3828230.82 frames. ], batch size: 58, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:36:29,170 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 14:37:11,415 INFO [train.py:903] (2/4) Epoch 29, batch 4750, loss[loss=0.1922, simple_loss=0.2625, pruned_loss=0.06094, over 19761.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2819, pruned_loss=0.05951, over 3841640.05 frames. ], batch size: 47, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:38:11,665 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.980e+02 6.350e+02 8.555e+02 2.103e+03, threshold=1.270e+03, percent-clipped=3.0 +2023-04-03 14:38:11,683 INFO [train.py:903] (2/4) Epoch 29, batch 4800, loss[loss=0.1987, simple_loss=0.2794, pruned_loss=0.059, over 19609.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2822, pruned_loss=0.05989, over 3835846.27 frames. ], batch size: 50, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:39:14,512 INFO [train.py:903] (2/4) Epoch 29, batch 4850, loss[loss=0.2141, simple_loss=0.2923, pruned_loss=0.0679, over 17465.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05983, over 3818423.29 frames. ], batch size: 101, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:39:37,047 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 14:39:55,381 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 14:40:01,768 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 14:40:02,672 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 14:40:11,417 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 14:40:13,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 4.641e+02 5.767e+02 7.663e+02 1.512e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-03 14:40:13,845 INFO [train.py:903] (2/4) Epoch 29, batch 4900, loss[loss=0.1937, simple_loss=0.2866, pruned_loss=0.05038, over 19656.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2817, pruned_loss=0.05951, over 3834818.43 frames. ], batch size: 55, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:40:32,802 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 14:41:03,510 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=196125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:14,060 INFO [train.py:903] (2/4) Epoch 29, batch 4950, loss[loss=0.1963, simple_loss=0.2673, pruned_loss=0.06262, over 19632.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2817, pruned_loss=0.05954, over 3844912.90 frames. ], batch size: 50, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:41:24,599 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2499, 2.3826, 2.5545, 3.1271, 2.4409, 3.0393, 2.5758, 2.2855], + device='cuda:2'), covar=tensor([0.4628, 0.4386, 0.2029, 0.2746, 0.4686, 0.2351, 0.5299, 0.3621], + device='cuda:2'), in_proj_covar=tensor([0.0947, 0.1028, 0.0751, 0.0961, 0.0927, 0.0865, 0.0866, 0.0815], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:41:31,265 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 14:41:33,804 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=196150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:49,108 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1233, 2.8147, 2.3994, 2.4314, 2.1172, 2.5721, 1.2340, 2.0548], + device='cuda:2'), covar=tensor([0.0749, 0.0700, 0.0708, 0.1186, 0.1168, 0.1134, 0.1452, 0.1215], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0474, 0.0398, 0.0347, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 14:41:52,360 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9434, 2.0756, 2.2797, 2.5767, 1.9995, 2.5322, 2.2510, 2.1132], + device='cuda:2'), covar=tensor([0.4194, 0.3929, 0.1984, 0.2414, 0.3997, 0.2183, 0.5104, 0.3389], + device='cuda:2'), in_proj_covar=tensor([0.0946, 0.1027, 0.0751, 0.0960, 0.0925, 0.0864, 0.0865, 0.0814], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:41:54,194 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 14:42:14,324 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.427e+02 4.864e+02 6.197e+02 7.725e+02 1.739e+03, threshold=1.239e+03, percent-clipped=10.0 +2023-04-03 14:42:14,342 INFO [train.py:903] (2/4) Epoch 29, batch 5000, loss[loss=0.2017, simple_loss=0.2798, pruned_loss=0.06178, over 19654.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.283, pruned_loss=0.06025, over 3837084.64 frames. ], batch size: 55, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:42:23,520 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 14:42:35,055 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 14:43:14,845 INFO [train.py:903] (2/4) Epoch 29, batch 5050, loss[loss=0.1922, simple_loss=0.2788, pruned_loss=0.05285, over 19676.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2823, pruned_loss=0.06004, over 3822434.20 frames. ], batch size: 59, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:43:49,705 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 14:44:15,611 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 4.774e+02 5.782e+02 7.176e+02 1.455e+03, threshold=1.156e+03, percent-clipped=5.0 +2023-04-03 14:44:15,629 INFO [train.py:903] (2/4) Epoch 29, batch 5100, loss[loss=0.2138, simple_loss=0.2981, pruned_loss=0.06473, over 19360.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05944, over 3818881.33 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:44:24,663 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 14:44:27,976 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 14:44:32,506 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 14:45:16,434 INFO [train.py:903] (2/4) Epoch 29, batch 5150, loss[loss=0.2328, simple_loss=0.3183, pruned_loss=0.07371, over 19608.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2825, pruned_loss=0.0598, over 3807419.98 frames. ], batch size: 57, lr: 2.81e-03, grad_scale: 4.0 +2023-04-03 14:45:17,952 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2413, 1.3267, 1.2741, 1.0853, 1.1443, 1.0925, 0.0962, 0.3746], + device='cuda:2'), covar=tensor([0.0768, 0.0691, 0.0496, 0.0655, 0.1358, 0.0723, 0.1440, 0.1246], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0473, 0.0398, 0.0347, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 14:45:23,038 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:45:26,216 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 14:45:59,976 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 14:46:10,259 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:46:15,848 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0530, 2.1313, 2.3423, 2.6411, 2.0509, 2.5743, 2.2569, 2.0754], + device='cuda:2'), covar=tensor([0.4421, 0.4098, 0.2055, 0.2549, 0.4370, 0.2259, 0.5335, 0.3611], + device='cuda:2'), in_proj_covar=tensor([0.0945, 0.1023, 0.0748, 0.0958, 0.0923, 0.0862, 0.0863, 0.0812], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:46:17,531 INFO [train.py:903] (2/4) Epoch 29, batch 5200, loss[loss=0.2217, simple_loss=0.3114, pruned_loss=0.06603, over 19682.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.05997, over 3820248.49 frames. ], batch size: 59, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:46:18,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 5.187e+02 6.337e+02 8.295e+02 1.863e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 14:46:28,766 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 14:47:12,666 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 14:47:19,206 INFO [train.py:903] (2/4) Epoch 29, batch 5250, loss[loss=0.2122, simple_loss=0.2921, pruned_loss=0.06619, over 19507.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.05909, over 3828361.70 frames. ], batch size: 64, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:47:43,299 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9908, 2.0902, 2.4036, 2.6537, 2.0886, 2.6048, 2.3339, 2.1217], + device='cuda:2'), covar=tensor([0.4477, 0.4225, 0.2000, 0.2597, 0.4345, 0.2366, 0.5166, 0.3641], + device='cuda:2'), in_proj_covar=tensor([0.0949, 0.1026, 0.0751, 0.0961, 0.0926, 0.0864, 0.0866, 0.0815], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:48:07,770 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9766, 2.0744, 2.3550, 2.5753, 2.0273, 2.5006, 2.3018, 2.0598], + device='cuda:2'), covar=tensor([0.4315, 0.4058, 0.1954, 0.2662, 0.4119, 0.2285, 0.4833, 0.3561], + device='cuda:2'), in_proj_covar=tensor([0.0948, 0.1025, 0.0750, 0.0960, 0.0925, 0.0864, 0.0865, 0.0814], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 14:48:20,444 INFO [train.py:903] (2/4) Epoch 29, batch 5300, loss[loss=0.2077, simple_loss=0.301, pruned_loss=0.0572, over 19687.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.0591, over 3819160.73 frames. ], batch size: 59, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:48:21,522 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.792e+02 5.709e+02 7.130e+02 1.478e+03, threshold=1.142e+03, percent-clipped=2.0 +2023-04-03 14:48:35,856 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 14:49:20,274 INFO [train.py:903] (2/4) Epoch 29, batch 5350, loss[loss=0.163, simple_loss=0.2461, pruned_loss=0.03996, over 19485.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.05906, over 3820916.40 frames. ], batch size: 49, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:49:51,997 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 14:50:09,924 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0099, 3.6803, 2.3483, 3.2842, 0.9569, 3.6582, 3.5243, 3.6331], + device='cuda:2'), covar=tensor([0.0889, 0.1202, 0.2285, 0.0966, 0.3973, 0.0808, 0.1031, 0.1460], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0432, 0.0518, 0.0357, 0.0409, 0.0457, 0.0452, 0.0487], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 14:50:21,627 INFO [train.py:903] (2/4) Epoch 29, batch 5400, loss[loss=0.1785, simple_loss=0.2545, pruned_loss=0.05125, over 19739.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.0594, over 3823753.26 frames. ], batch size: 45, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:50:22,762 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.834e+02 5.946e+02 7.840e+02 1.782e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-03 14:50:36,268 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:50:59,724 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0771, 3.2830, 1.8882, 2.0652, 2.8834, 1.6488, 1.5438, 2.2682], + device='cuda:2'), covar=tensor([0.1418, 0.0645, 0.1168, 0.0901, 0.0642, 0.1402, 0.1042, 0.0717], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0324, 0.0346, 0.0277, 0.0255, 0.0350, 0.0294, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 14:51:22,491 INFO [train.py:903] (2/4) Epoch 29, batch 5450, loss[loss=0.1957, simple_loss=0.2858, pruned_loss=0.05277, over 19546.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.05913, over 3833357.18 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:51:34,847 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9188, 5.0167, 5.7487, 5.7645, 2.0180, 5.4252, 4.6027, 5.4311], + device='cuda:2'), covar=tensor([0.1894, 0.0964, 0.0635, 0.0676, 0.6550, 0.0919, 0.0700, 0.1229], + device='cuda:2'), in_proj_covar=tensor([0.0829, 0.0802, 0.1013, 0.0886, 0.0879, 0.0774, 0.0596, 0.0938], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 14:51:37,075 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1600, 2.0904, 1.9951, 1.8588, 1.7709, 1.7985, 0.6918, 1.1381], + device='cuda:2'), covar=tensor([0.0678, 0.0618, 0.0494, 0.0820, 0.1173, 0.0884, 0.1359, 0.1147], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0365, 0.0370, 0.0394, 0.0474, 0.0400, 0.0348, 0.0350], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 14:51:41,456 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.8692, 4.4868, 2.6351, 3.8118, 0.9702, 4.4299, 4.2699, 4.2829], + device='cuda:2'), covar=tensor([0.0571, 0.0889, 0.1997, 0.0860, 0.3910, 0.0587, 0.0894, 0.1133], + device='cuda:2'), in_proj_covar=tensor([0.0537, 0.0435, 0.0522, 0.0360, 0.0413, 0.0460, 0.0456, 0.0491], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 14:52:21,549 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:52:22,558 INFO [train.py:903] (2/4) Epoch 29, batch 5500, loss[loss=0.2212, simple_loss=0.3026, pruned_loss=0.06985, over 19291.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2819, pruned_loss=0.05896, over 3840284.75 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:52:23,692 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 4.767e+02 5.823e+02 7.066e+02 1.237e+03, threshold=1.165e+03, percent-clipped=1.0 +2023-04-03 14:52:46,218 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 14:53:08,298 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:53:22,920 INFO [train.py:903] (2/4) Epoch 29, batch 5550, loss[loss=0.1925, simple_loss=0.2794, pruned_loss=0.05286, over 19784.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2814, pruned_loss=0.05911, over 3838944.29 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:53:30,039 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 14:53:40,089 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.25 vs. limit=5.0 +2023-04-03 14:54:18,738 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 14:54:22,916 INFO [train.py:903] (2/4) Epoch 29, batch 5600, loss[loss=0.2088, simple_loss=0.2953, pruned_loss=0.06114, over 18158.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2817, pruned_loss=0.05918, over 3841896.40 frames. ], batch size: 83, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:54:24,076 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.937e+02 5.977e+02 7.468e+02 1.263e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 14:54:39,794 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:54:40,951 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:55:24,436 INFO [train.py:903] (2/4) Epoch 29, batch 5650, loss[loss=0.1887, simple_loss=0.262, pruned_loss=0.05776, over 19788.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2816, pruned_loss=0.05923, over 3827708.73 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:55:28,136 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:10,107 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:10,954 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 14:56:24,243 INFO [train.py:903] (2/4) Epoch 29, batch 5700, loss[loss=0.2361, simple_loss=0.3136, pruned_loss=0.07931, over 19469.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05955, over 3832887.44 frames. ], batch size: 64, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:56:25,388 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.159e+02 6.169e+02 7.777e+02 1.148e+03, threshold=1.234e+03, percent-clipped=0.0 +2023-04-03 14:56:41,809 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:46,521 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 14:57:24,047 INFO [train.py:903] (2/4) Epoch 29, batch 5750, loss[loss=0.1978, simple_loss=0.2771, pruned_loss=0.05927, over 19496.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2819, pruned_loss=0.05947, over 3829610.82 frames. ], batch size: 49, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:57:25,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 14:57:30,931 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:57:31,933 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 14:57:38,012 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 14:58:24,228 INFO [train.py:903] (2/4) Epoch 29, batch 5800, loss[loss=0.1907, simple_loss=0.2762, pruned_loss=0.05263, over 19669.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2814, pruned_loss=0.05932, over 3828593.04 frames. ], batch size: 53, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:58:25,406 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.987e+02 6.141e+02 8.368e+02 1.662e+03, threshold=1.228e+03, percent-clipped=5.0 +2023-04-03 14:59:24,878 INFO [train.py:903] (2/4) Epoch 29, batch 5850, loss[loss=0.1515, simple_loss=0.2321, pruned_loss=0.03542, over 19738.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2809, pruned_loss=0.05933, over 3814855.11 frames. ], batch size: 45, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 14:59:27,414 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 14:59:49,586 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197054.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:59:50,543 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:19,088 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197079.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:24,409 INFO [train.py:903] (2/4) Epoch 29, batch 5900, loss[loss=0.1796, simple_loss=0.2718, pruned_loss=0.0437, over 19668.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2815, pruned_loss=0.05946, over 3830442.55 frames. ], batch size: 59, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:00:25,159 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.70 vs. limit=5.0 +2023-04-03 15:00:25,528 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.889e+02 5.905e+02 7.501e+02 2.168e+03, threshold=1.181e+03, percent-clipped=6.0 +2023-04-03 15:00:26,684 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 15:00:35,853 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:46,559 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 15:00:48,772 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.1325, 3.7959, 2.9284, 3.3698, 1.5260, 3.7379, 3.6219, 3.7353], + device='cuda:2'), covar=tensor([0.0827, 0.1069, 0.1901, 0.0936, 0.3182, 0.0799, 0.1060, 0.1435], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0433, 0.0520, 0.0358, 0.0409, 0.0459, 0.0454, 0.0487], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:01:06,448 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:24,665 INFO [train.py:903] (2/4) Epoch 29, batch 5950, loss[loss=0.1723, simple_loss=0.265, pruned_loss=0.03984, over 19840.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2813, pruned_loss=0.05964, over 3820946.13 frames. ], batch size: 52, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:01:32,740 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:59,494 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.4410, 1.3919, 1.6269, 1.5488, 3.0397, 1.2013, 2.3711, 3.5000], + device='cuda:2'), covar=tensor([0.0567, 0.2937, 0.2837, 0.1908, 0.0757, 0.2518, 0.1319, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0427, 0.0383, 0.0402, 0.0357, 0.0385, 0.0360, 0.0400, 0.0421], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:02:18,325 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2244, 1.2737, 1.2664, 1.0787, 1.1130, 1.1073, 0.0937, 0.3530], + device='cuda:2'), covar=tensor([0.0892, 0.0793, 0.0547, 0.0700, 0.1454, 0.0793, 0.1546, 0.1378], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0363, 0.0369, 0.0393, 0.0471, 0.0400, 0.0347, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 15:02:24,687 INFO [train.py:903] (2/4) Epoch 29, batch 6000, loss[loss=0.1986, simple_loss=0.2828, pruned_loss=0.05725, over 19539.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2818, pruned_loss=0.05987, over 3817188.82 frames. ], batch size: 56, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:02:24,687 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 15:02:43,153 INFO [train.py:937] (2/4) Epoch 29, validation: loss=0.167, simple_loss=0.2662, pruned_loss=0.03392, over 944034.00 frames. +2023-04-03 15:02:43,154 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 15:02:44,362 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 4.704e+02 5.833e+02 7.372e+02 1.660e+03, threshold=1.167e+03, percent-clipped=5.0 +2023-04-03 15:03:23,050 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:03:25,881 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-03 15:03:44,117 INFO [train.py:903] (2/4) Epoch 29, batch 6050, loss[loss=0.2034, simple_loss=0.286, pruned_loss=0.06041, over 19600.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.05999, over 3814972.63 frames. ], batch size: 57, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:03:55,200 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:11,916 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:45,073 INFO [train.py:903] (2/4) Epoch 29, batch 6100, loss[loss=0.1744, simple_loss=0.26, pruned_loss=0.04434, over 19681.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.06003, over 3812704.19 frames. ], batch size: 53, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:04:46,195 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 5.045e+02 6.568e+02 8.070e+02 1.422e+03, threshold=1.314e+03, percent-clipped=5.0 +2023-04-03 15:05:17,416 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:42,435 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:45,466 INFO [train.py:903] (2/4) Epoch 29, batch 6150, loss[loss=0.1678, simple_loss=0.2557, pruned_loss=0.03989, over 19598.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.282, pruned_loss=0.05973, over 3817154.82 frames. ], batch size: 52, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:05:48,234 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:13,318 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 15:06:14,652 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:30,288 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 15:06:46,116 INFO [train.py:903] (2/4) Epoch 29, batch 6200, loss[loss=0.2149, simple_loss=0.3039, pruned_loss=0.06293, over 19532.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2829, pruned_loss=0.06018, over 3809082.62 frames. ], batch size: 56, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:06:47,112 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.150e+02 5.975e+02 7.505e+02 2.002e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-03 15:07:45,741 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197433.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:07:46,600 INFO [train.py:903] (2/4) Epoch 29, batch 6250, loss[loss=0.2308, simple_loss=0.3096, pruned_loss=0.07596, over 19474.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06034, over 3810412.64 frames. ], batch size: 64, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:08:18,014 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 15:08:48,228 INFO [train.py:903] (2/4) Epoch 29, batch 6300, loss[loss=0.1622, simple_loss=0.2399, pruned_loss=0.04227, over 19760.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05962, over 3816630.17 frames. ], batch size: 46, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:08:50,595 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.620e+02 5.575e+02 6.491e+02 1.508e+03, threshold=1.115e+03, percent-clipped=2.0 +2023-04-03 15:09:21,981 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:49,202 INFO [train.py:903] (2/4) Epoch 29, batch 6350, loss[loss=0.1922, simple_loss=0.2856, pruned_loss=0.04938, over 19538.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2825, pruned_loss=0.05944, over 3822235.59 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:09:49,491 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197534.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:52,899 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:14,130 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-03 15:10:23,539 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197563.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:33,869 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 15:10:49,098 INFO [train.py:903] (2/4) Epoch 29, batch 6400, loss[loss=0.2039, simple_loss=0.2874, pruned_loss=0.06021, over 19611.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.282, pruned_loss=0.05917, over 3819823.99 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:10:52,229 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.287e+02 5.708e+02 7.339e+02 1.464e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-04-03 15:10:53,782 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:23,640 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197612.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:25,910 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:49,896 INFO [train.py:903] (2/4) Epoch 29, batch 6450, loss[loss=0.2011, simple_loss=0.2794, pruned_loss=0.06139, over 19574.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2825, pruned_loss=0.05934, over 3828341.98 frames. ], batch size: 52, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:11:56,888 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:12:27,643 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3805, 3.1130, 2.2207, 2.8067, 0.7471, 3.0633, 2.9412, 3.0488], + device='cuda:2'), covar=tensor([0.0993, 0.1263, 0.2029, 0.1092, 0.3900, 0.0979, 0.1198, 0.1273], + device='cuda:2'), in_proj_covar=tensor([0.0532, 0.0431, 0.0519, 0.0356, 0.0409, 0.0458, 0.0451, 0.0485], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:12:37,429 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 15:12:37,704 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2900, 3.8363, 3.9339, 3.9573, 1.5990, 3.7457, 3.2517, 3.6813], + device='cuda:2'), covar=tensor([0.1825, 0.1065, 0.0752, 0.0815, 0.6086, 0.1040, 0.0828, 0.1297], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0793, 0.1001, 0.0876, 0.0868, 0.0766, 0.0590, 0.0932], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 15:12:49,913 INFO [train.py:903] (2/4) Epoch 29, batch 6500, loss[loss=0.1614, simple_loss=0.242, pruned_loss=0.04034, over 19287.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05936, over 3834409.70 frames. ], batch size: 44, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:12:52,112 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 4.970e+02 6.097e+02 8.194e+02 1.467e+03, threshold=1.219e+03, percent-clipped=8.0 +2023-04-03 15:12:58,634 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 15:13:50,299 INFO [train.py:903] (2/4) Epoch 29, batch 6550, loss[loss=0.2486, simple_loss=0.335, pruned_loss=0.08111, over 19659.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.282, pruned_loss=0.0592, over 3823554.15 frames. ], batch size: 60, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:14:24,670 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.8173, 1.2515, 1.5178, 0.5229, 1.8773, 2.4577, 2.1062, 2.6084], + device='cuda:2'), covar=tensor([0.1781, 0.3992, 0.3722, 0.3207, 0.0732, 0.0287, 0.0368, 0.0381], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0275, 0.0256, 0.0199, 0.0221, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 15:14:42,384 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197777.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:14:50,648 INFO [train.py:903] (2/4) Epoch 29, batch 6600, loss[loss=0.2049, simple_loss=0.2873, pruned_loss=0.06123, over 19493.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05977, over 3807084.80 frames. ], batch size: 64, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:14:54,072 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.758e+02 5.997e+02 7.159e+02 2.116e+03, threshold=1.199e+03, percent-clipped=4.0 +2023-04-03 15:15:51,171 INFO [train.py:903] (2/4) Epoch 29, batch 6650, loss[loss=0.2064, simple_loss=0.2904, pruned_loss=0.06122, over 19597.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2837, pruned_loss=0.06026, over 3791669.29 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:45,532 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:16:52,897 INFO [train.py:903] (2/4) Epoch 29, batch 6700, loss[loss=0.2182, simple_loss=0.302, pruned_loss=0.06723, over 17346.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06064, over 3784524.49 frames. ], batch size: 101, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:56,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.093e+02 5.944e+02 7.634e+02 1.783e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-03 15:17:02,427 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197892.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:17:19,980 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197907.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:23,575 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:50,653 INFO [train.py:903] (2/4) Epoch 29, batch 6750, loss[loss=0.2287, simple_loss=0.304, pruned_loss=0.07674, over 13282.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2824, pruned_loss=0.05994, over 3785914.95 frames. ], batch size: 137, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:18:45,945 INFO [train.py:903] (2/4) Epoch 29, batch 6800, loss[loss=0.1768, simple_loss=0.2728, pruned_loss=0.04044, over 19690.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.05967, over 3798992.08 frames. ], batch size: 59, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:18:49,168 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.543e+02 4.853e+02 6.089e+02 7.661e+02 1.249e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 15:18:56,378 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:19:33,505 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 15:19:33,942 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 15:19:37,015 INFO [train.py:903] (2/4) Epoch 30, batch 0, loss[loss=0.2346, simple_loss=0.3165, pruned_loss=0.07639, over 19693.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3165, pruned_loss=0.07639, over 19693.00 frames. ], batch size: 58, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:19:37,016 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 15:19:49,318 INFO [train.py:937] (2/4) Epoch 30, validation: loss=0.167, simple_loss=0.2667, pruned_loss=0.03362, over 944034.00 frames. +2023-04-03 15:19:49,319 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 15:20:02,486 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198022.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:20:03,308 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 15:20:35,582 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 15:20:51,417 INFO [train.py:903] (2/4) Epoch 30, batch 50, loss[loss=0.2118, simple_loss=0.2899, pruned_loss=0.06682, over 19523.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2802, pruned_loss=0.05824, over 865328.56 frames. ], batch size: 56, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:21:13,156 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-03 15:21:20,382 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 4.959e+02 5.838e+02 7.711e+02 1.808e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 15:21:26,251 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 15:21:32,474 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 15:21:36,615 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198099.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:21:52,088 INFO [train.py:903] (2/4) Epoch 30, batch 100, loss[loss=0.1674, simple_loss=0.2511, pruned_loss=0.04179, over 19775.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2801, pruned_loss=0.05795, over 1533689.56 frames. ], batch size: 47, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:22:04,481 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 15:22:04,742 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.6662, 4.8516, 5.4903, 5.5005, 2.0649, 5.1657, 4.4233, 5.1871], + device='cuda:2'), covar=tensor([0.1713, 0.1129, 0.0601, 0.0654, 0.6363, 0.0943, 0.0646, 0.1185], + device='cuda:2'), in_proj_covar=tensor([0.0823, 0.0797, 0.1003, 0.0881, 0.0872, 0.0769, 0.0592, 0.0934], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 15:22:31,187 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6023, 1.7307, 2.2051, 1.9504, 2.9183, 2.5301, 3.1296, 1.6992], + device='cuda:2'), covar=tensor([0.2889, 0.4859, 0.3083, 0.2214, 0.1972, 0.2577, 0.2012, 0.4894], + device='cuda:2'), in_proj_covar=tensor([0.0561, 0.0682, 0.0770, 0.0517, 0.0642, 0.0553, 0.0675, 0.0579], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 15:22:37,706 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198148.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:22:54,432 INFO [train.py:903] (2/4) Epoch 30, batch 150, loss[loss=0.1868, simple_loss=0.2806, pruned_loss=0.04646, over 19794.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.279, pruned_loss=0.05733, over 2034513.95 frames. ], batch size: 56, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:23:07,449 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198173.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:23:10,044 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-03 15:23:25,376 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.519e+02 4.863e+02 5.858e+02 7.546e+02 1.579e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-03 15:23:52,143 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 15:23:55,536 INFO [train.py:903] (2/4) Epoch 30, batch 200, loss[loss=0.2522, simple_loss=0.3195, pruned_loss=0.09246, over 12849.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2807, pruned_loss=0.05849, over 2409600.55 frames. ], batch size: 136, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:24:42,385 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:47,683 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:57,526 INFO [train.py:903] (2/4) Epoch 30, batch 250, loss[loss=0.2222, simple_loss=0.3171, pruned_loss=0.06368, over 19514.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2816, pruned_loss=0.05925, over 2728212.19 frames. ], batch size: 64, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:25:13,962 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:19,540 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198278.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:29,213 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.793e+02 5.720e+02 7.294e+02 1.524e+03, threshold=1.144e+03, percent-clipped=6.0 +2023-04-03 15:25:49,116 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:26:01,276 INFO [train.py:903] (2/4) Epoch 30, batch 300, loss[loss=0.1726, simple_loss=0.2608, pruned_loss=0.04223, over 19787.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2812, pruned_loss=0.05872, over 2968538.84 frames. ], batch size: 49, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:03,945 INFO [train.py:903] (2/4) Epoch 30, batch 350, loss[loss=0.188, simple_loss=0.278, pruned_loss=0.04899, over 19155.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2809, pruned_loss=0.05872, over 3170584.69 frames. ], batch size: 69, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:06,263 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 15:27:12,152 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:27:33,410 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.876e+02 6.987e+02 8.997e+02 2.429e+03, threshold=1.397e+03, percent-clipped=9.0 +2023-04-03 15:28:04,750 INFO [train.py:903] (2/4) Epoch 30, batch 400, loss[loss=0.2188, simple_loss=0.3059, pruned_loss=0.06588, over 19674.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05972, over 3326583.49 frames. ], batch size: 59, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:28:44,694 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:29:06,536 INFO [train.py:903] (2/4) Epoch 30, batch 450, loss[loss=0.1856, simple_loss=0.2796, pruned_loss=0.04576, over 19716.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2814, pruned_loss=0.05846, over 3447292.20 frames. ], batch size: 63, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:29:08,583 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 15:29:38,785 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.102e+02 6.085e+02 7.779e+02 1.785e+03, threshold=1.217e+03, percent-clipped=4.0 +2023-04-03 15:29:40,853 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 15:29:42,041 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 15:29:54,120 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6242, 1.7438, 1.9968, 2.0173, 1.5165, 1.9480, 1.9613, 1.8704], + device='cuda:2'), covar=tensor([0.4299, 0.4068, 0.2120, 0.2621, 0.4288, 0.2435, 0.5433, 0.3600], + device='cuda:2'), in_proj_covar=tensor([0.0946, 0.1028, 0.0748, 0.0959, 0.0925, 0.0863, 0.0864, 0.0815], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 15:30:08,497 INFO [train.py:903] (2/4) Epoch 30, batch 500, loss[loss=0.1774, simple_loss=0.2585, pruned_loss=0.04819, over 19613.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2816, pruned_loss=0.05908, over 3529492.80 frames. ], batch size: 50, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:30:11,085 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0626, 1.6532, 1.8558, 1.8614, 4.5734, 1.1492, 2.6606, 5.0733], + device='cuda:2'), covar=tensor([0.0419, 0.2877, 0.2909, 0.1970, 0.0770, 0.2897, 0.1487, 0.0154], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0383, 0.0402, 0.0356, 0.0387, 0.0360, 0.0401, 0.0424], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:30:16,774 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:06,275 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:11,508 INFO [train.py:903] (2/4) Epoch 30, batch 550, loss[loss=0.2195, simple_loss=0.3006, pruned_loss=0.06923, over 19303.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05972, over 3595339.98 frames. ], batch size: 66, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:31:40,933 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.928e+02 6.417e+02 8.667e+02 2.852e+03, threshold=1.283e+03, percent-clipped=10.0 +2023-04-03 15:31:47,797 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8191, 1.7329, 1.7600, 2.3593, 1.7840, 2.0709, 2.0321, 1.9298], + device='cuda:2'), covar=tensor([0.0798, 0.0842, 0.0906, 0.0695, 0.0825, 0.0751, 0.0853, 0.0643], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0226, 0.0215, 0.0188, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 15:31:56,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 15:32:13,286 INFO [train.py:903] (2/4) Epoch 30, batch 600, loss[loss=0.2578, simple_loss=0.3255, pruned_loss=0.09512, over 13465.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2831, pruned_loss=0.05957, over 3650092.81 frames. ], batch size: 135, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:32:27,220 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3494, 1.0578, 1.2728, 2.1755, 1.5529, 1.3295, 1.5588, 1.2969], + device='cuda:2'), covar=tensor([0.1240, 0.1739, 0.1426, 0.0849, 0.1159, 0.1468, 0.1323, 0.1128], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0226, 0.0215, 0.0188, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 15:32:28,384 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198625.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:38,412 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:52,849 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 15:33:01,379 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198650.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:33:14,979 INFO [train.py:903] (2/4) Epoch 30, batch 650, loss[loss=0.1961, simple_loss=0.2861, pruned_loss=0.05309, over 18777.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2832, pruned_loss=0.05958, over 3680948.27 frames. ], batch size: 74, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:33:46,600 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.000e+02 5.780e+02 7.067e+02 2.104e+03, threshold=1.156e+03, percent-clipped=2.0 +2023-04-03 15:34:00,608 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4355, 2.1010, 1.6345, 1.4428, 1.9697, 1.3060, 1.3580, 1.8910], + device='cuda:2'), covar=tensor([0.1146, 0.1001, 0.1159, 0.0944, 0.0543, 0.1468, 0.0853, 0.0547], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0321, 0.0346, 0.0276, 0.0253, 0.0349, 0.0292, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:34:02,333 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-03 15:34:16,542 INFO [train.py:903] (2/4) Epoch 30, batch 700, loss[loss=0.1869, simple_loss=0.2705, pruned_loss=0.05167, over 19767.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05912, over 3716583.22 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:18,679 INFO [train.py:903] (2/4) Epoch 30, batch 750, loss[loss=0.2449, simple_loss=0.3103, pruned_loss=0.08978, over 19591.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05917, over 3735463.91 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:51,148 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.908e+02 5.891e+02 7.920e+02 1.978e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 15:36:23,532 INFO [train.py:903] (2/4) Epoch 30, batch 800, loss[loss=0.1741, simple_loss=0.2619, pruned_loss=0.04321, over 19773.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.281, pruned_loss=0.0584, over 3771855.88 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:36:27,288 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:36:39,870 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 15:36:56,815 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198839.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:25,165 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:26,183 INFO [train.py:903] (2/4) Epoch 30, batch 850, loss[loss=0.1809, simple_loss=0.2698, pruned_loss=0.04594, over 19762.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2817, pruned_loss=0.05862, over 3766879.92 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:37:35,527 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198870.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:55,569 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 4.755e+02 5.781e+02 7.125e+02 1.514e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 15:38:13,702 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:38:20,318 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 15:38:26,177 INFO [train.py:903] (2/4) Epoch 30, batch 900, loss[loss=0.1943, simple_loss=0.2789, pruned_loss=0.05487, over 19594.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2814, pruned_loss=0.05882, over 3789617.49 frames. ], batch size: 61, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:07,706 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:27,833 INFO [train.py:903] (2/4) Epoch 30, batch 950, loss[loss=0.2279, simple_loss=0.3132, pruned_loss=0.07128, over 18191.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2821, pruned_loss=0.05881, over 3794412.37 frames. ], batch size: 83, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:32,350 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 15:39:47,090 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:48,122 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:59,676 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.052e+02 6.305e+02 7.870e+02 1.588e+03, threshold=1.261e+03, percent-clipped=4.0 +2023-04-03 15:40:30,254 INFO [train.py:903] (2/4) Epoch 30, batch 1000, loss[loss=0.2227, simple_loss=0.291, pruned_loss=0.07724, over 19613.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2823, pruned_loss=0.05928, over 3792655.20 frames. ], batch size: 50, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:40:34,412 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 15:41:23,980 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 15:41:33,186 INFO [train.py:903] (2/4) Epoch 30, batch 1050, loss[loss=0.1695, simple_loss=0.2492, pruned_loss=0.04492, over 19752.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2829, pruned_loss=0.05972, over 3793533.63 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:42:03,436 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.270e+02 4.631e+02 5.961e+02 7.864e+02 1.953e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 15:42:04,614 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 15:42:11,036 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:42:35,185 INFO [train.py:903] (2/4) Epoch 30, batch 1100, loss[loss=0.1684, simple_loss=0.248, pruned_loss=0.04443, over 19363.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05975, over 3796622.81 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:43:38,430 INFO [train.py:903] (2/4) Epoch 30, batch 1150, loss[loss=0.1598, simple_loss=0.2399, pruned_loss=0.03989, over 19746.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05982, over 3788261.63 frames. ], batch size: 46, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:10,292 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.110e+02 5.915e+02 7.639e+02 1.372e+03, threshold=1.183e+03, percent-clipped=6.0 +2023-04-03 15:44:18,790 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:44:28,835 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5979, 2.2962, 1.8148, 1.6150, 2.1179, 1.5111, 1.4580, 2.0874], + device='cuda:2'), covar=tensor([0.1092, 0.0864, 0.1069, 0.0914, 0.0624, 0.1334, 0.0792, 0.0517], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0319, 0.0343, 0.0274, 0.0252, 0.0346, 0.0290, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:44:41,424 INFO [train.py:903] (2/4) Epoch 30, batch 1200, loss[loss=0.1867, simple_loss=0.2712, pruned_loss=0.05111, over 19747.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2829, pruned_loss=0.05988, over 3796975.81 frames. ], batch size: 51, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:43,968 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:06,854 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:14,403 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 15:45:22,169 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:38,727 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:44,962 INFO [train.py:903] (2/4) Epoch 30, batch 1250, loss[loss=0.2113, simple_loss=0.2966, pruned_loss=0.06303, over 19603.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2834, pruned_loss=0.0601, over 3782221.92 frames. ], batch size: 61, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:45:54,333 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7749, 4.3348, 2.7845, 3.7843, 0.9792, 4.3551, 4.1795, 4.3043], + device='cuda:2'), covar=tensor([0.0585, 0.0933, 0.1790, 0.0838, 0.3857, 0.0636, 0.0955, 0.1139], + device='cuda:2'), in_proj_covar=tensor([0.0534, 0.0432, 0.0520, 0.0359, 0.0408, 0.0459, 0.0453, 0.0488], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:46:14,640 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.004e+02 5.991e+02 7.632e+02 1.398e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-03 15:46:17,137 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:46:46,510 INFO [train.py:903] (2/4) Epoch 30, batch 1300, loss[loss=0.2189, simple_loss=0.3021, pruned_loss=0.06788, over 19311.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05985, over 3803725.69 frames. ], batch size: 66, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:47:07,616 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:32,057 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:46,101 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199360.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:47,892 INFO [train.py:903] (2/4) Epoch 30, batch 1350, loss[loss=0.2308, simple_loss=0.3226, pruned_loss=0.0695, over 19676.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05979, over 3803096.38 frames. ], batch size: 60, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:03,209 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:20,886 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 4.672e+02 5.739e+02 7.239e+02 1.592e+03, threshold=1.148e+03, percent-clipped=7.0 +2023-04-03 15:48:40,978 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199404.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:50,926 INFO [train.py:903] (2/4) Epoch 30, batch 1400, loss[loss=0.1857, simple_loss=0.2744, pruned_loss=0.04849, over 18478.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.0597, over 3798372.19 frames. ], batch size: 84, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:58,972 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199418.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:49:03,143 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 15:49:53,473 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-03 15:49:55,031 INFO [train.py:903] (2/4) Epoch 30, batch 1450, loss[loss=0.2428, simple_loss=0.3176, pruned_loss=0.08402, over 19383.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05988, over 3803256.87 frames. ], batch size: 70, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:49:56,190 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 15:50:11,576 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199476.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:50:25,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.040e+02 6.121e+02 7.927e+02 1.972e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 15:50:56,163 INFO [train.py:903] (2/4) Epoch 30, batch 1500, loss[loss=0.1883, simple_loss=0.2698, pruned_loss=0.05341, over 19617.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06051, over 3794817.28 frames. ], batch size: 50, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:50:56,437 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199512.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:51:28,292 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:51:46,271 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9282, 1.5356, 1.5958, 1.5824, 3.5573, 1.2148, 2.3865, 3.9594], + device='cuda:2'), covar=tensor([0.0462, 0.2742, 0.2900, 0.1978, 0.0654, 0.2623, 0.1455, 0.0221], + device='cuda:2'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0388, 0.0362, 0.0403, 0.0424], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 15:51:47,418 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199553.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:51:58,250 INFO [train.py:903] (2/4) Epoch 30, batch 1550, loss[loss=0.2249, simple_loss=0.3056, pruned_loss=0.07214, over 19528.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2845, pruned_loss=0.06092, over 3797499.97 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:52:28,271 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:52:31,299 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.717e+02 5.804e+02 6.903e+02 1.639e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 15:52:45,777 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1834, 2.8180, 2.1739, 2.2582, 1.9289, 2.4020, 1.1283, 2.0239], + device='cuda:2'), covar=tensor([0.0792, 0.0706, 0.0872, 0.1356, 0.1393, 0.1332, 0.1652, 0.1272], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0364, 0.0370, 0.0395, 0.0472, 0.0398, 0.0347, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 15:52:59,363 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:01,357 INFO [train.py:903] (2/4) Epoch 30, batch 1600, loss[loss=0.1632, simple_loss=0.2462, pruned_loss=0.04008, over 19739.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.06005, over 3803425.14 frames. ], batch size: 46, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:53:06,428 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:26,407 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 15:53:37,688 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:53,196 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:01,284 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199660.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:03,852 INFO [train.py:903] (2/4) Epoch 30, batch 1650, loss[loss=0.2372, simple_loss=0.3111, pruned_loss=0.08159, over 19354.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.282, pruned_loss=0.05948, over 3816254.73 frames. ], batch size: 70, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:54:32,758 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199685.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:35,821 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 4.724e+02 5.858e+02 7.730e+02 3.208e+03, threshold=1.172e+03, percent-clipped=5.0 +2023-04-03 15:55:06,073 INFO [train.py:903] (2/4) Epoch 30, batch 1700, loss[loss=0.175, simple_loss=0.262, pruned_loss=0.04401, over 19770.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2818, pruned_loss=0.05901, over 3804111.32 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:55:24,093 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.2642, 1.3230, 1.2226, 1.0544, 1.1230, 1.0754, 0.0943, 0.4747], + device='cuda:2'), covar=tensor([0.0780, 0.0718, 0.0549, 0.0700, 0.1439, 0.0798, 0.1491, 0.1278], + device='cuda:2'), in_proj_covar=tensor([0.0371, 0.0366, 0.0373, 0.0398, 0.0476, 0.0402, 0.0350, 0.0351], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 15:55:40,547 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 15:55:46,477 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 15:56:09,061 INFO [train.py:903] (2/4) Epoch 30, batch 1750, loss[loss=0.1753, simple_loss=0.2605, pruned_loss=0.04506, over 19683.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05859, over 3820988.47 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:56:09,243 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199762.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:56:42,925 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.893e+02 6.270e+02 7.375e+02 1.627e+03, threshold=1.254e+03, percent-clipped=1.0 +2023-04-03 15:57:00,416 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3629, 3.8173, 3.9334, 3.9499, 1.5241, 3.7403, 3.2724, 3.6923], + device='cuda:2'), covar=tensor([0.1750, 0.1072, 0.0718, 0.0836, 0.6383, 0.1131, 0.0787, 0.1218], + device='cuda:2'), in_proj_covar=tensor([0.0829, 0.0803, 0.1015, 0.0891, 0.0877, 0.0776, 0.0600, 0.0943], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 15:57:11,428 INFO [train.py:903] (2/4) Epoch 30, batch 1800, loss[loss=0.1886, simple_loss=0.2747, pruned_loss=0.05132, over 19664.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2811, pruned_loss=0.05877, over 3809815.25 frames. ], batch size: 58, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:57:21,129 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199820.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:06,475 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199856.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:58:08,440 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 15:58:13,035 INFO [train.py:903] (2/4) Epoch 30, batch 1850, loss[loss=0.1784, simple_loss=0.2611, pruned_loss=0.04786, over 19480.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05858, over 3804097.41 frames. ], batch size: 49, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:58:15,386 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9185, 4.3769, 4.6367, 4.6265, 1.8198, 4.3586, 3.7855, 4.3514], + device='cuda:2'), covar=tensor([0.1692, 0.1032, 0.0630, 0.0696, 0.6203, 0.1172, 0.0728, 0.1123], + device='cuda:2'), in_proj_covar=tensor([0.0831, 0.0806, 0.1020, 0.0894, 0.0881, 0.0779, 0.0603, 0.0948], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 15:58:32,561 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199877.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:46,820 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.043e+02 6.338e+02 8.325e+02 2.069e+03, threshold=1.268e+03, percent-clipped=7.0 +2023-04-03 15:58:46,872 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 15:58:57,090 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199897.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:59:12,883 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:15,989 INFO [train.py:903] (2/4) Epoch 30, batch 1900, loss[loss=0.174, simple_loss=0.2627, pruned_loss=0.04267, over 19604.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2805, pruned_loss=0.05862, over 3820641.50 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:59:33,272 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 15:59:33,576 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199927.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:38,042 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 15:59:42,764 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:43,851 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199935.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:00:03,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 16:00:17,079 INFO [train.py:903] (2/4) Epoch 30, batch 1950, loss[loss=0.2086, simple_loss=0.288, pruned_loss=0.06465, over 19729.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2801, pruned_loss=0.05823, over 3812841.00 frames. ], batch size: 51, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 16:00:29,632 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199971.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:00:51,087 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 5.211e+02 6.525e+02 7.685e+02 1.771e+03, threshold=1.305e+03, percent-clipped=2.0 +2023-04-03 16:01:21,188 INFO [train.py:903] (2/4) Epoch 30, batch 2000, loss[loss=0.1665, simple_loss=0.2424, pruned_loss=0.04527, over 19757.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2804, pruned_loss=0.05788, over 3810087.91 frames. ], batch size: 46, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:01:22,689 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200012.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:02:21,496 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 16:02:23,758 INFO [train.py:903] (2/4) Epoch 30, batch 2050, loss[loss=0.1886, simple_loss=0.2582, pruned_loss=0.05953, over 19744.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2819, pruned_loss=0.05898, over 3820882.78 frames. ], batch size: 45, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:02:43,246 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 16:02:43,278 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 16:02:50,510 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3765, 2.3491, 2.7127, 3.0975, 2.3331, 2.8876, 2.6775, 2.4861], + device='cuda:2'), covar=tensor([0.4420, 0.4446, 0.1964, 0.2769, 0.4783, 0.2540, 0.5036, 0.3539], + device='cuda:2'), in_proj_covar=tensor([0.0947, 0.1032, 0.0750, 0.0959, 0.0926, 0.0866, 0.0867, 0.0815], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 16:02:57,798 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 4.986e+02 6.252e+02 8.206e+02 1.738e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 16:03:03,577 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 16:03:11,191 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-03 16:03:26,460 INFO [train.py:903] (2/4) Epoch 30, batch 2100, loss[loss=0.2101, simple_loss=0.288, pruned_loss=0.06608, over 19521.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2811, pruned_loss=0.05885, over 3818132.18 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:03:52,972 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200133.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:03:57,203 WARNING [train.py:1073] (2/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 16:04:18,962 WARNING [train.py:1073] (2/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 16:04:24,922 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:04:29,062 INFO [train.py:903] (2/4) Epoch 30, batch 2150, loss[loss=0.2335, simple_loss=0.3055, pruned_loss=0.08072, over 12901.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2816, pruned_loss=0.05926, over 3792743.28 frames. ], batch size: 135, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:05:02,779 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.819e+02 5.957e+02 8.119e+02 2.108e+03, threshold=1.191e+03, percent-clipped=2.0 +2023-04-03 16:05:05,537 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:10,723 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200195.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:05:30,906 INFO [train.py:903] (2/4) Epoch 30, batch 2200, loss[loss=0.1608, simple_loss=0.2354, pruned_loss=0.04306, over 18682.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05945, over 3797976.52 frames. ], batch size: 41, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:05:37,218 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:39,454 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9553, 4.4313, 4.6966, 4.6912, 1.7656, 4.3993, 3.8220, 4.4200], + device='cuda:2'), covar=tensor([0.1777, 0.0869, 0.0640, 0.0712, 0.6306, 0.0986, 0.0710, 0.1137], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0798, 0.1011, 0.0885, 0.0872, 0.0772, 0.0597, 0.0936], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 16:05:50,127 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200227.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:06:20,843 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200252.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:33,034 INFO [train.py:903] (2/4) Epoch 30, batch 2250, loss[loss=0.2023, simple_loss=0.2869, pruned_loss=0.05884, over 19472.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2816, pruned_loss=0.05976, over 3806911.12 frames. ], batch size: 64, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:06:41,201 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200268.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:44,368 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:08,303 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.298e+02 6.530e+02 8.599e+02 1.543e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-03 16:07:12,109 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200293.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:07:17,566 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:35,304 INFO [train.py:903] (2/4) Epoch 30, batch 2300, loss[loss=0.1743, simple_loss=0.2643, pruned_loss=0.04214, over 19528.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2826, pruned_loss=0.06015, over 3817016.90 frames. ], batch size: 54, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:07:51,115 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 16:08:18,938 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 16:08:37,756 INFO [train.py:903] (2/4) Epoch 30, batch 2350, loss[loss=0.1932, simple_loss=0.2812, pruned_loss=0.05262, over 19685.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06053, over 3810722.84 frames. ], batch size: 60, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:08:47,325 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-03 16:09:07,684 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:09:12,865 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.379e+02 4.836e+02 5.833e+02 7.163e+02 1.475e+03, threshold=1.167e+03, percent-clipped=1.0 +2023-04-03 16:09:21,106 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 16:09:34,069 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.6694, 1.3843, 1.5755, 1.4340, 3.2298, 1.1250, 2.4041, 3.6586], + device='cuda:2'), covar=tensor([0.0492, 0.2918, 0.3069, 0.2089, 0.0724, 0.2681, 0.1375, 0.0265], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0383, 0.0404, 0.0356, 0.0386, 0.0361, 0.0402, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:09:38,150 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 16:09:40,564 INFO [train.py:903] (2/4) Epoch 30, batch 2400, loss[loss=0.1842, simple_loss=0.2789, pruned_loss=0.04473, over 19657.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06029, over 3801319.42 frames. ], batch size: 58, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:10:24,209 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.9363, 1.4519, 1.6881, 1.4537, 3.5206, 1.2462, 2.5109, 3.9666], + device='cuda:2'), covar=tensor([0.0469, 0.2873, 0.2885, 0.2123, 0.0677, 0.2593, 0.1439, 0.0213], + device='cuda:2'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0386, 0.0361, 0.0402, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:10:43,362 INFO [train.py:903] (2/4) Epoch 30, batch 2450, loss[loss=0.1787, simple_loss=0.2671, pruned_loss=0.04516, over 19672.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05963, over 3809664.88 frames. ], batch size: 53, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:11:19,107 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.121e+02 5.946e+02 7.814e+02 2.121e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-03 16:11:27,565 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3012, 3.8153, 3.9371, 3.9357, 1.5674, 3.7615, 3.2582, 3.6951], + device='cuda:2'), covar=tensor([0.1761, 0.1013, 0.0703, 0.0864, 0.6148, 0.1130, 0.0822, 0.1237], + device='cuda:2'), in_proj_covar=tensor([0.0819, 0.0795, 0.1006, 0.0883, 0.0868, 0.0770, 0.0593, 0.0935], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 16:11:46,476 INFO [train.py:903] (2/4) Epoch 30, batch 2500, loss[loss=0.1859, simple_loss=0.2679, pruned_loss=0.05196, over 19467.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2819, pruned_loss=0.05946, over 3803896.71 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:12:20,493 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200539.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:12:48,794 INFO [train.py:903] (2/4) Epoch 30, batch 2550, loss[loss=0.2009, simple_loss=0.2909, pruned_loss=0.05547, over 19779.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.0597, over 3815871.26 frames. ], batch size: 56, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:13:23,055 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 4.923e+02 6.163e+02 7.973e+02 2.573e+03, threshold=1.233e+03, percent-clipped=12.0 +2023-04-03 16:13:41,800 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0794, 3.7398, 2.6108, 3.3290, 1.0136, 3.6831, 3.6180, 3.6288], + device='cuda:2'), covar=tensor([0.0738, 0.1046, 0.1884, 0.0946, 0.3660, 0.0751, 0.0988, 0.1164], + device='cuda:2'), in_proj_covar=tensor([0.0533, 0.0431, 0.0517, 0.0358, 0.0409, 0.0458, 0.0449, 0.0487], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:13:47,181 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 16:13:51,619 INFO [train.py:903] (2/4) Epoch 30, batch 2600, loss[loss=0.2127, simple_loss=0.2938, pruned_loss=0.06583, over 19479.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2812, pruned_loss=0.05886, over 3823184.30 frames. ], batch size: 64, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:28,356 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:29,747 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:44,531 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200654.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:14:54,165 INFO [train.py:903] (2/4) Epoch 30, batch 2650, loss[loss=0.1871, simple_loss=0.2663, pruned_loss=0.05394, over 19800.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2809, pruned_loss=0.05852, over 3825078.85 frames. ], batch size: 49, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:55,550 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200663.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:00,435 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200667.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:12,727 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.9857, 1.8285, 2.0743, 1.8219, 4.4836, 1.3054, 2.6739, 4.9002], + device='cuda:2'), covar=tensor([0.0430, 0.2737, 0.2596, 0.1942, 0.0719, 0.2589, 0.1488, 0.0156], + device='cuda:2'), in_proj_covar=tensor([0.0425, 0.0382, 0.0402, 0.0355, 0.0386, 0.0361, 0.0402, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:15:15,805 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 16:15:28,266 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 4.881e+02 6.412e+02 8.116e+02 1.737e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 16:15:55,158 INFO [train.py:903] (2/4) Epoch 30, batch 2700, loss[loss=0.1842, simple_loss=0.2654, pruned_loss=0.05148, over 19872.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2808, pruned_loss=0.05865, over 3821183.82 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:16:34,930 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 16:16:51,805 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:16:59,614 INFO [train.py:903] (2/4) Epoch 30, batch 2750, loss[loss=0.1827, simple_loss=0.2596, pruned_loss=0.0529, over 19737.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2798, pruned_loss=0.05797, over 3820622.60 frames. ], batch size: 46, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:17:34,086 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.266e+02 4.594e+02 5.767e+02 6.844e+02 1.269e+03, threshold=1.153e+03, percent-clipped=0.0 +2023-04-03 16:18:02,575 INFO [train.py:903] (2/4) Epoch 30, batch 2800, loss[loss=0.184, simple_loss=0.2621, pruned_loss=0.05298, over 19412.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2803, pruned_loss=0.05807, over 3828352.34 frames. ], batch size: 48, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:05,168 INFO [train.py:903] (2/4) Epoch 30, batch 2850, loss[loss=0.1783, simple_loss=0.2653, pruned_loss=0.04562, over 19670.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2808, pruned_loss=0.05855, over 3821738.91 frames. ], batch size: 53, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:39,289 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.865e+02 5.683e+02 7.765e+02 1.857e+03, threshold=1.137e+03, percent-clipped=6.0 +2023-04-03 16:20:04,719 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200910.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:20:06,535 INFO [train.py:903] (2/4) Epoch 30, batch 2900, loss[loss=0.2104, simple_loss=0.2869, pruned_loss=0.06693, over 19598.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2812, pruned_loss=0.05879, over 3819018.11 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:20:07,770 WARNING [train.py:1073] (2/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 16:20:36,333 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200935.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:21:00,112 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3548, 2.0261, 1.6023, 1.3741, 1.8520, 1.3352, 1.2751, 1.8670], + device='cuda:2'), covar=tensor([0.0992, 0.0875, 0.1145, 0.0951, 0.0598, 0.1392, 0.0781, 0.0488], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0321, 0.0345, 0.0277, 0.0255, 0.0350, 0.0291, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:21:08,765 INFO [train.py:903] (2/4) Epoch 30, batch 2950, loss[loss=0.2047, simple_loss=0.2958, pruned_loss=0.05674, over 19564.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.281, pruned_loss=0.05869, over 3821370.01 frames. ], batch size: 61, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:21:44,100 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.844e+02 6.179e+02 7.399e+02 1.416e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-03 16:22:06,367 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:11,911 INFO [train.py:903] (2/4) Epoch 30, batch 3000, loss[loss=0.1718, simple_loss=0.2558, pruned_loss=0.04387, over 19729.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2813, pruned_loss=0.05896, over 3808287.74 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:22:11,912 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 16:22:26,196 INFO [train.py:937] (2/4) Epoch 30, validation: loss=0.1666, simple_loss=0.266, pruned_loss=0.03357, over 944034.00 frames. +2023-04-03 16:22:26,198 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 16:22:26,684 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201012.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:32,383 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 16:22:57,375 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:23:27,830 INFO [train.py:903] (2/4) Epoch 30, batch 3050, loss[loss=0.1913, simple_loss=0.2808, pruned_loss=0.0509, over 19655.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05923, over 3809209.33 frames. ], batch size: 58, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:23:57,942 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([0.9446, 1.3404, 1.7366, 0.6170, 2.0862, 2.4820, 2.2065, 2.6023], + device='cuda:2'), covar=tensor([0.1633, 0.3860, 0.3283, 0.2881, 0.0635, 0.0300, 0.0351, 0.0440], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0335, 0.0367, 0.0274, 0.0257, 0.0200, 0.0222, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 16:24:02,298 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:24:03,231 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.896e+02 5.871e+02 7.483e+02 2.064e+03, threshold=1.174e+03, percent-clipped=5.0 +2023-04-03 16:24:32,129 INFO [train.py:903] (2/4) Epoch 30, batch 3100, loss[loss=0.2015, simple_loss=0.2851, pruned_loss=0.05892, over 19554.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2806, pruned_loss=0.0587, over 3802428.14 frames. ], batch size: 61, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:24:43,664 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:25:03,520 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-03 16:25:33,516 INFO [train.py:903] (2/4) Epoch 30, batch 3150, loss[loss=0.164, simple_loss=0.2377, pruned_loss=0.04514, over 19334.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2813, pruned_loss=0.05938, over 3798348.70 frames. ], batch size: 47, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:26:02,516 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 16:26:10,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 4.625e+02 5.752e+02 7.402e+02 1.953e+03, threshold=1.150e+03, percent-clipped=5.0 +2023-04-03 16:26:16,507 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.5238, 2.4693, 2.2309, 2.5266, 2.3730, 2.0367, 2.0295, 2.4537], + device='cuda:2'), covar=tensor([0.0952, 0.1493, 0.1360, 0.1035, 0.1390, 0.0590, 0.1470, 0.0662], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0358, 0.0319, 0.0258, 0.0309, 0.0260, 0.0324, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-04-03 16:26:35,936 INFO [train.py:903] (2/4) Epoch 30, batch 3200, loss[loss=0.2222, simple_loss=0.2993, pruned_loss=0.07256, over 17487.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.281, pruned_loss=0.05881, over 3807738.41 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:26:38,603 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0685, 1.5849, 1.8511, 2.8720, 2.0614, 2.0974, 2.2920, 1.8589], + device='cuda:2'), covar=tensor([0.0906, 0.1151, 0.1091, 0.0781, 0.0929, 0.0955, 0.0991, 0.0863], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0240, 0.0227, 0.0216, 0.0187, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 16:27:03,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-03 16:27:26,766 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5927, 1.7358, 2.0388, 1.8907, 3.1361, 2.6460, 3.3866, 1.7594], + device='cuda:2'), covar=tensor([0.2691, 0.4516, 0.2994, 0.2020, 0.1604, 0.2220, 0.1650, 0.4520], + device='cuda:2'), in_proj_covar=tensor([0.0557, 0.0679, 0.0766, 0.0514, 0.0637, 0.0549, 0.0672, 0.0580], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 16:27:40,090 INFO [train.py:903] (2/4) Epoch 30, batch 3250, loss[loss=0.1997, simple_loss=0.2878, pruned_loss=0.0558, over 19696.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2807, pruned_loss=0.05855, over 3816932.03 frames. ], batch size: 58, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:27:51,494 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:14,883 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.466e+02 4.729e+02 5.856e+02 7.119e+02 1.424e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 16:28:16,412 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:42,756 INFO [train.py:903] (2/4) Epoch 30, batch 3300, loss[loss=0.1819, simple_loss=0.2684, pruned_loss=0.04765, over 19730.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2811, pruned_loss=0.05868, over 3819314.64 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:28:49,704 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 16:28:51,261 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4024, 3.6771, 2.1413, 2.3279, 3.2777, 2.0250, 1.6092, 2.6477], + device='cuda:2'), covar=tensor([0.1298, 0.0532, 0.1151, 0.0889, 0.0476, 0.1254, 0.1033, 0.0577], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0319, 0.0343, 0.0276, 0.0253, 0.0348, 0.0290, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:29:22,254 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 16:29:44,709 INFO [train.py:903] (2/4) Epoch 30, batch 3350, loss[loss=0.18, simple_loss=0.2756, pruned_loss=0.0422, over 18737.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.0582, over 3827364.08 frames. ], batch size: 74, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:30:05,903 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:21,786 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.184e+02 4.772e+02 5.759e+02 7.012e+02 1.305e+03, threshold=1.152e+03, percent-clipped=2.0 +2023-04-03 16:30:37,279 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:48,355 INFO [train.py:903] (2/4) Epoch 30, batch 3400, loss[loss=0.1936, simple_loss=0.2693, pruned_loss=0.05893, over 19698.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2805, pruned_loss=0.05825, over 3826015.04 frames. ], batch size: 53, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:16,056 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:31:51,795 INFO [train.py:903] (2/4) Epoch 30, batch 3450, loss[loss=0.2188, simple_loss=0.2997, pruned_loss=0.06893, over 17264.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2797, pruned_loss=0.05763, over 3829199.02 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:57,371 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 16:32:27,320 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.269e+02 6.567e+02 8.372e+02 2.121e+03, threshold=1.313e+03, percent-clipped=9.0 +2023-04-03 16:32:29,864 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.2146, 2.8509, 2.3701, 2.2567, 2.0458, 2.5538, 1.0476, 2.1541], + device='cuda:2'), covar=tensor([0.0739, 0.0714, 0.0695, 0.1215, 0.1291, 0.1169, 0.1520, 0.1155], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0366, 0.0371, 0.0396, 0.0474, 0.0401, 0.0349, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 16:32:55,662 INFO [train.py:903] (2/4) Epoch 30, batch 3500, loss[loss=0.2128, simple_loss=0.2989, pruned_loss=0.06338, over 17470.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2801, pruned_loss=0.05784, over 3817040.32 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:33:41,231 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:33:58,204 INFO [train.py:903] (2/4) Epoch 30, batch 3550, loss[loss=0.2014, simple_loss=0.2877, pruned_loss=0.05752, over 19778.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2803, pruned_loss=0.05841, over 3808557.40 frames. ], batch size: 56, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:34:35,156 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.645e+02 6.006e+02 7.208e+02 1.141e+03, threshold=1.201e+03, percent-clipped=0.0 +2023-04-03 16:34:37,087 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.38 vs. limit=2.0 +2023-04-03 16:35:01,953 INFO [train.py:903] (2/4) Epoch 30, batch 3600, loss[loss=0.1938, simple_loss=0.282, pruned_loss=0.05282, over 18211.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2818, pruned_loss=0.05893, over 3816565.46 frames. ], batch size: 84, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:35:06,946 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201615.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:35:14,942 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4748, 1.5335, 1.7113, 1.6733, 2.2314, 2.0951, 2.3784, 0.9690], + device='cuda:2'), covar=tensor([0.2468, 0.4381, 0.2736, 0.1948, 0.1579, 0.2290, 0.1396, 0.4888], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0677, 0.0764, 0.0514, 0.0637, 0.0551, 0.0670, 0.0579], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 16:35:32,763 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:04,974 INFO [train.py:903] (2/4) Epoch 30, batch 3650, loss[loss=0.2331, simple_loss=0.3085, pruned_loss=0.07881, over 17429.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.0597, over 3822710.25 frames. ], batch size: 102, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:36:05,318 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:20,502 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.3371, 3.8526, 3.9708, 3.9770, 1.7349, 3.7944, 3.2566, 3.7430], + device='cuda:2'), covar=tensor([0.1683, 0.0937, 0.0737, 0.0790, 0.5781, 0.1035, 0.0827, 0.1156], + device='cuda:2'), in_proj_covar=tensor([0.0824, 0.0798, 0.1008, 0.0886, 0.0874, 0.0775, 0.0596, 0.0942], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 16:36:27,549 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201679.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:42,040 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 4.844e+02 5.974e+02 7.556e+02 1.962e+03, threshold=1.195e+03, percent-clipped=4.0 +2023-04-03 16:37:09,554 INFO [train.py:903] (2/4) Epoch 30, batch 3700, loss[loss=0.3025, simple_loss=0.3511, pruned_loss=0.127, over 13050.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2813, pruned_loss=0.05905, over 3821908.80 frames. ], batch size: 136, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:37:16,821 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.1586, 1.3130, 1.5782, 1.3900, 2.7692, 1.1822, 2.2608, 3.1381], + device='cuda:2'), covar=tensor([0.0531, 0.2825, 0.2780, 0.1906, 0.0738, 0.2314, 0.1114, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0431, 0.0386, 0.0404, 0.0358, 0.0390, 0.0364, 0.0405, 0.0427], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:37:20,707 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 16:37:23,578 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201724.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:31,425 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201730.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:57,751 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201751.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:38:11,314 INFO [train.py:903] (2/4) Epoch 30, batch 3750, loss[loss=0.1813, simple_loss=0.2626, pruned_loss=0.05005, over 19565.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2809, pruned_loss=0.05878, over 3830547.14 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:38:47,455 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.921e+02 6.534e+02 8.381e+02 2.079e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 16:39:04,399 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:14,011 INFO [train.py:903] (2/4) Epoch 30, batch 3800, loss[loss=0.2098, simple_loss=0.2869, pruned_loss=0.0663, over 19589.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2804, pruned_loss=0.05848, over 3831879.38 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:39:35,692 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:41,270 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 16:39:41,684 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3108, 2.1998, 2.1350, 2.0000, 1.8455, 1.9500, 0.8343, 1.3032], + device='cuda:2'), covar=tensor([0.0730, 0.0679, 0.0542, 0.0913, 0.1238, 0.1029, 0.1445, 0.1217], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0365, 0.0371, 0.0395, 0.0473, 0.0401, 0.0348, 0.0351], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 16:39:46,606 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-03 16:40:15,946 INFO [train.py:903] (2/4) Epoch 30, batch 3850, loss[loss=0.1919, simple_loss=0.2751, pruned_loss=0.05432, over 19082.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2805, pruned_loss=0.059, over 3821041.04 frames. ], batch size: 69, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:40:51,663 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.891e+02 4.977e+02 6.719e+02 8.916e+02 2.147e+03, threshold=1.344e+03, percent-clipped=8.0 +2023-04-03 16:41:00,221 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6580, 1.7391, 2.0136, 1.9350, 1.4736, 1.9198, 1.9785, 1.8261], + device='cuda:2'), covar=tensor([0.4412, 0.3994, 0.2155, 0.2580, 0.4122, 0.2419, 0.5678, 0.3734], + device='cuda:2'), in_proj_covar=tensor([0.0956, 0.1039, 0.0757, 0.0966, 0.0934, 0.0873, 0.0872, 0.0821], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 16:41:18,277 INFO [train.py:903] (2/4) Epoch 30, batch 3900, loss[loss=0.2433, simple_loss=0.3175, pruned_loss=0.08451, over 13587.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2819, pruned_loss=0.05969, over 3825326.60 frames. ], batch size: 138, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:10,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3003, 2.3420, 2.6016, 3.0334, 2.3505, 3.0106, 2.4047, 2.2956], + device='cuda:2'), covar=tensor([0.4619, 0.4137, 0.2133, 0.2617, 0.4400, 0.2188, 0.5930, 0.3626], + device='cuda:2'), in_proj_covar=tensor([0.0956, 0.1039, 0.0756, 0.0965, 0.0932, 0.0874, 0.0872, 0.0821], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 16:42:20,923 INFO [train.py:903] (2/4) Epoch 30, batch 3950, loss[loss=0.1882, simple_loss=0.2785, pruned_loss=0.04898, over 19524.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2808, pruned_loss=0.0589, over 3830657.15 frames. ], batch size: 54, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:20,976 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 16:42:51,089 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:42:56,914 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 4.814e+02 5.725e+02 7.208e+02 1.816e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 16:43:11,428 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 16:43:16,606 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:18,103 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:19,156 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:23,700 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:24,455 INFO [train.py:903] (2/4) Epoch 30, batch 4000, loss[loss=0.177, simple_loss=0.2603, pruned_loss=0.04682, over 19389.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2808, pruned_loss=0.05894, over 3840014.65 frames. ], batch size: 48, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:43:38,595 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:42,230 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.3804, 1.9695, 1.5850, 1.3811, 1.7917, 1.3142, 1.2764, 1.8007], + device='cuda:2'), covar=tensor([0.0921, 0.0833, 0.1100, 0.0864, 0.0636, 0.1339, 0.0717, 0.0491], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0322, 0.0345, 0.0277, 0.0255, 0.0350, 0.0292, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:43:47,508 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5841, 1.6366, 1.9555, 1.8622, 2.6404, 2.2842, 2.7973, 1.2235], + device='cuda:2'), covar=tensor([0.2704, 0.4736, 0.2976, 0.2112, 0.1719, 0.2465, 0.1677, 0.5107], + device='cuda:2'), in_proj_covar=tensor([0.0556, 0.0677, 0.0764, 0.0514, 0.0637, 0.0551, 0.0670, 0.0579], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 16:43:49,648 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202032.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:44:08,199 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 16:44:27,677 INFO [train.py:903] (2/4) Epoch 30, batch 4050, loss[loss=0.2081, simple_loss=0.2952, pruned_loss=0.06054, over 19382.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2803, pruned_loss=0.05847, over 3841083.39 frames. ], batch size: 70, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:44:34,768 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:45:02,986 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.674e+02 6.115e+02 7.260e+02 2.667e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 16:45:30,406 INFO [train.py:903] (2/4) Epoch 30, batch 4100, loss[loss=0.2185, simple_loss=0.2983, pruned_loss=0.06932, over 19599.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2801, pruned_loss=0.05826, over 3855216.24 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:45:42,195 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:45:47,983 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.1744, 5.1667, 6.0251, 6.0413, 2.0974, 5.7324, 4.7849, 5.7175], + device='cuda:2'), covar=tensor([0.1767, 0.0869, 0.0581, 0.0593, 0.6430, 0.0854, 0.0642, 0.1108], + device='cuda:2'), in_proj_covar=tensor([0.0826, 0.0803, 0.1014, 0.0893, 0.0877, 0.0777, 0.0600, 0.0944], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 16:45:48,423 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 16:46:03,035 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202138.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:46:04,971 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 16:46:32,899 INFO [train.py:903] (2/4) Epoch 30, batch 4150, loss[loss=0.2117, simple_loss=0.2968, pruned_loss=0.06329, over 19767.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2806, pruned_loss=0.05825, over 3852101.41 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:46:45,117 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 16:46:59,293 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202183.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:47:08,855 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.823e+02 5.653e+02 7.013e+02 1.196e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-03 16:47:27,784 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0611, 5.0297, 5.8701, 5.8849, 2.1154, 5.5793, 4.6094, 5.5071], + device='cuda:2'), covar=tensor([0.1850, 0.1016, 0.0606, 0.0661, 0.6276, 0.0799, 0.0680, 0.1285], + device='cuda:2'), in_proj_covar=tensor([0.0829, 0.0805, 0.1015, 0.0894, 0.0878, 0.0778, 0.0601, 0.0947], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 16:47:34,528 INFO [train.py:903] (2/4) Epoch 30, batch 4200, loss[loss=0.1968, simple_loss=0.2856, pruned_loss=0.05399, over 19547.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2801, pruned_loss=0.05814, over 3848916.31 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:47:37,803 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 16:48:35,832 INFO [train.py:903] (2/4) Epoch 30, batch 4250, loss[loss=0.2067, simple_loss=0.2933, pruned_loss=0.05999, over 19507.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05942, over 3831981.54 frames. ], batch size: 64, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:48:52,010 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 16:48:57,125 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.1984, 1.9067, 2.0183, 2.9427, 1.8960, 2.3962, 2.3768, 2.1748], + device='cuda:2'), covar=tensor([0.0769, 0.0870, 0.0913, 0.0672, 0.0919, 0.0752, 0.0880, 0.0657], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0227, 0.0216, 0.0187, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 16:49:03,287 WARNING [train.py:1073] (2/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 16:49:12,500 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.334e+02 6.771e+02 9.277e+02 2.113e+03, threshold=1.354e+03, percent-clipped=7.0 +2023-04-03 16:49:38,448 INFO [train.py:903] (2/4) Epoch 30, batch 4300, loss[loss=0.1958, simple_loss=0.285, pruned_loss=0.05333, over 19504.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2809, pruned_loss=0.05877, over 3828703.04 frames. ], batch size: 64, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:50:28,706 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:50:34,362 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 16:50:41,238 INFO [train.py:903] (2/4) Epoch 30, batch 4350, loss[loss=0.2251, simple_loss=0.3089, pruned_loss=0.07067, over 19655.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2808, pruned_loss=0.05868, over 3838418.38 frames. ], batch size: 53, lr: 2.72e-03, grad_scale: 4.0 +2023-04-03 16:50:59,702 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:05,037 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.4277, 4.0263, 2.5611, 3.5298, 1.1340, 4.0091, 3.8918, 3.9593], + device='cuda:2'), covar=tensor([0.0665, 0.1044, 0.2117, 0.0996, 0.3748, 0.0714, 0.0998, 0.1171], + device='cuda:2'), in_proj_covar=tensor([0.0541, 0.0440, 0.0525, 0.0365, 0.0416, 0.0464, 0.0459, 0.0495], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 16:51:18,778 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.712e+02 5.570e+02 7.585e+02 1.545e+03, threshold=1.114e+03, percent-clipped=3.0 +2023-04-03 16:51:21,659 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:31,125 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:43,047 INFO [train.py:903] (2/4) Epoch 30, batch 4400, loss[loss=0.2125, simple_loss=0.2975, pruned_loss=0.06371, over 19598.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2811, pruned_loss=0.05905, over 3847221.28 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:51:52,572 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:08,212 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 16:52:17,009 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 16:52:17,410 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:46,171 INFO [train.py:903] (2/4) Epoch 30, batch 4450, loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.05942, over 19316.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2815, pruned_loss=0.05942, over 3841777.26 frames. ], batch size: 66, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:52:48,952 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:52,449 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:58,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.54 vs. limit=5.0 +2023-04-03 16:53:23,819 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 4.882e+02 5.921e+02 7.342e+02 1.295e+03, threshold=1.184e+03, percent-clipped=2.0 +2023-04-03 16:53:39,171 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4624, 1.4842, 1.7520, 1.6894, 2.6279, 2.2173, 2.8877, 1.2304], + device='cuda:2'), covar=tensor([0.2685, 0.4804, 0.2977, 0.2108, 0.1632, 0.2402, 0.1488, 0.4963], + device='cuda:2'), in_proj_covar=tensor([0.0561, 0.0682, 0.0769, 0.0519, 0.0640, 0.0555, 0.0673, 0.0583], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 16:53:48,733 INFO [train.py:903] (2/4) Epoch 30, batch 4500, loss[loss=0.2069, simple_loss=0.2903, pruned_loss=0.06177, over 19424.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2808, pruned_loss=0.05917, over 3842815.08 frames. ], batch size: 70, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:54:50,692 INFO [train.py:903] (2/4) Epoch 30, batch 4550, loss[loss=0.2319, simple_loss=0.3092, pruned_loss=0.07731, over 17278.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2813, pruned_loss=0.05927, over 3845076.49 frames. ], batch size: 101, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:55:01,223 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 16:55:25,946 WARNING [train.py:1073] (2/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 16:55:27,103 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 5.006e+02 6.254e+02 8.173e+02 1.576e+03, threshold=1.251e+03, percent-clipped=3.0 +2023-04-03 16:55:52,915 INFO [train.py:903] (2/4) Epoch 30, batch 4600, loss[loss=0.209, simple_loss=0.2885, pruned_loss=0.06472, over 19687.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.0604, over 3843390.62 frames. ], batch size: 53, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:56:54,624 INFO [train.py:903] (2/4) Epoch 30, batch 4650, loss[loss=0.1768, simple_loss=0.2554, pruned_loss=0.0491, over 19787.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.05997, over 3830278.78 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:57:12,746 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 16:57:22,337 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2459, 2.3480, 2.5059, 2.9860, 2.3521, 2.9028, 2.4807, 2.3102], + device='cuda:2'), covar=tensor([0.4367, 0.4732, 0.2108, 0.2895, 0.4870, 0.2490, 0.5261, 0.3579], + device='cuda:2'), in_proj_covar=tensor([0.0953, 0.1036, 0.0753, 0.0964, 0.0930, 0.0871, 0.0868, 0.0820], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 16:57:23,003 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 16:57:31,777 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.029e+02 5.947e+02 7.618e+02 1.686e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-03 16:57:55,570 INFO [train.py:903] (2/4) Epoch 30, batch 4700, loss[loss=0.2273, simple_loss=0.3114, pruned_loss=0.07161, over 18116.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2844, pruned_loss=0.06031, over 3828658.61 frames. ], batch size: 83, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:58:10,551 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:19,481 WARNING [train.py:1073] (2/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 16:58:41,337 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:59,227 INFO [train.py:903] (2/4) Epoch 30, batch 4750, loss[loss=0.1971, simple_loss=0.2811, pruned_loss=0.05658, over 17287.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06011, over 3819732.95 frames. ], batch size: 101, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:59:35,970 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.055e+02 5.959e+02 7.636e+02 1.705e+03, threshold=1.192e+03, percent-clipped=6.0 +2023-04-03 17:00:01,993 INFO [train.py:903] (2/4) Epoch 30, batch 4800, loss[loss=0.1942, simple_loss=0.2865, pruned_loss=0.05094, over 19631.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2834, pruned_loss=0.05994, over 3813721.56 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:00:08,684 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-03 17:00:19,798 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 17:00:47,867 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202848.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:01:03,245 INFO [train.py:903] (2/4) Epoch 30, batch 4850, loss[loss=0.1566, simple_loss=0.2402, pruned_loss=0.03654, over 19738.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.283, pruned_loss=0.05974, over 3816180.73 frames. ], batch size: 46, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:01:27,949 WARNING [train.py:1073] (2/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 17:01:34,025 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5018, 1.3991, 1.4611, 1.8165, 1.5226, 1.6069, 1.5836, 1.5311], + device='cuda:2'), covar=tensor([0.0721, 0.0764, 0.0822, 0.0602, 0.0937, 0.0747, 0.0858, 0.0630], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0240, 0.0227, 0.0216, 0.0188, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 17:01:41,381 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.041e+02 6.050e+02 7.335e+02 2.031e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:01:47,276 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 17:01:53,233 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 17:01:55,297 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 17:02:02,743 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.9380, 2.0662, 2.3307, 2.5504, 1.9811, 2.4071, 2.3072, 2.1287], + device='cuda:2'), covar=tensor([0.4314, 0.3917, 0.1999, 0.2550, 0.4323, 0.2369, 0.5055, 0.3444], + device='cuda:2'), in_proj_covar=tensor([0.0950, 0.1035, 0.0753, 0.0961, 0.0929, 0.0870, 0.0867, 0.0817], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 17:02:03,487 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 17:02:05,915 INFO [train.py:903] (2/4) Epoch 30, batch 4900, loss[loss=0.2099, simple_loss=0.2982, pruned_loss=0.06076, over 19651.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2824, pruned_loss=0.05962, over 3816656.31 frames. ], batch size: 60, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:02:25,021 WARNING [train.py:1073] (2/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 17:03:07,820 INFO [train.py:903] (2/4) Epoch 30, batch 4950, loss[loss=0.262, simple_loss=0.3304, pruned_loss=0.09679, over 19667.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06027, over 3811931.82 frames. ], batch size: 58, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:03:22,390 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 17:03:27,595 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 17:03:43,835 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.771e+02 6.104e+02 8.250e+02 2.222e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 17:03:46,234 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 17:04:09,796 INFO [train.py:903] (2/4) Epoch 30, batch 5000, loss[loss=0.1772, simple_loss=0.2613, pruned_loss=0.04651, over 19632.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06027, over 3809945.92 frames. ], batch size: 50, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:04:16,548 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 17:04:28,740 WARNING [train.py:1073] (2/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 17:05:02,472 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:05:09,009 INFO [train.py:903] (2/4) Epoch 30, batch 5050, loss[loss=0.2722, simple_loss=0.346, pruned_loss=0.09916, over 13628.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2839, pruned_loss=0.06057, over 3808230.00 frames. ], batch size: 136, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:05:18,183 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7104, 4.2583, 2.7040, 3.6983, 1.1287, 4.2206, 4.1058, 4.1518], + device='cuda:2'), covar=tensor([0.0601, 0.1026, 0.2100, 0.1024, 0.3829, 0.0757, 0.1007, 0.1164], + device='cuda:2'), in_proj_covar=tensor([0.0538, 0.0437, 0.0520, 0.0361, 0.0413, 0.0461, 0.0454, 0.0491], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 17:05:45,267 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 17:05:46,420 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.511e+02 5.411e+02 6.959e+02 2.884e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 17:05:59,235 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:05:59,653 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-03 17:06:10,226 INFO [train.py:903] (2/4) Epoch 30, batch 5100, loss[loss=0.1751, simple_loss=0.263, pruned_loss=0.04357, over 18248.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2835, pruned_loss=0.06004, over 3822949.89 frames. ], batch size: 83, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:06:23,804 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 17:06:27,361 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 17:06:28,822 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5883, 1.5332, 1.5815, 1.8887, 1.4719, 1.7009, 1.6304, 1.7101], + device='cuda:2'), covar=tensor([0.0806, 0.0869, 0.0927, 0.0620, 0.0771, 0.0799, 0.0841, 0.0672], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0226, 0.0216, 0.0187, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 17:06:30,798 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 17:07:12,618 INFO [train.py:903] (2/4) Epoch 30, batch 5150, loss[loss=0.206, simple_loss=0.2906, pruned_loss=0.06073, over 19544.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05965, over 3808765.31 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:07:27,936 WARNING [train.py:1073] (2/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 17:07:49,458 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.115e+02 6.435e+02 8.085e+02 2.061e+03, threshold=1.287e+03, percent-clipped=7.0 +2023-04-03 17:07:49,628 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203192.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:08:01,715 WARNING [train.py:1073] (2/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 17:08:15,003 INFO [train.py:903] (2/4) Epoch 30, batch 5200, loss[loss=0.2552, simple_loss=0.3272, pruned_loss=0.09159, over 19700.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2814, pruned_loss=0.05939, over 3815657.13 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:08:30,111 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 17:09:14,167 WARNING [train.py:1073] (2/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 17:09:16,391 INFO [train.py:903] (2/4) Epoch 30, batch 5250, loss[loss=0.2404, simple_loss=0.3198, pruned_loss=0.08046, over 19321.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2816, pruned_loss=0.05917, over 3808943.21 frames. ], batch size: 66, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:09:53,738 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.978e+02 4.929e+02 6.276e+02 8.119e+02 2.486e+03, threshold=1.255e+03, percent-clipped=4.0 +2023-04-03 17:10:12,254 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203307.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:10:17,557 INFO [train.py:903] (2/4) Epoch 30, batch 5300, loss[loss=0.2237, simple_loss=0.3103, pruned_loss=0.06854, over 19789.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05959, over 3803555.78 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:10:36,826 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 17:11:02,677 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([6.2286, 5.6364, 3.0563, 4.9551, 1.1337, 5.8561, 5.6810, 5.8620], + device='cuda:2'), covar=tensor([0.0358, 0.0759, 0.2077, 0.0779, 0.4078, 0.0472, 0.0770, 0.0875], + device='cuda:2'), in_proj_covar=tensor([0.0537, 0.0436, 0.0519, 0.0361, 0.0412, 0.0461, 0.0453, 0.0490], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 17:11:18,759 INFO [train.py:903] (2/4) Epoch 30, batch 5350, loss[loss=0.1946, simple_loss=0.2843, pruned_loss=0.05248, over 18677.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2824, pruned_loss=0.05923, over 3816376.55 frames. ], batch size: 74, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:11:53,881 WARNING [train.py:1073] (2/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 17:11:56,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 5.524e+02 6.450e+02 8.325e+02 1.910e+03, threshold=1.290e+03, percent-clipped=5.0 +2023-04-03 17:12:05,337 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:12:20,979 INFO [train.py:903] (2/4) Epoch 30, batch 5400, loss[loss=0.2159, simple_loss=0.2945, pruned_loss=0.06868, over 19579.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2839, pruned_loss=0.06053, over 3799057.55 frames. ], batch size: 52, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:02,974 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203447.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:20,915 INFO [train.py:903] (2/4) Epoch 30, batch 5450, loss[loss=0.2034, simple_loss=0.2875, pruned_loss=0.05965, over 19618.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2836, pruned_loss=0.06018, over 3816116.69 frames. ], batch size: 50, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:21,414 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.2762, 2.2569, 2.6007, 2.9997, 2.2624, 2.7895, 2.5699, 2.3663], + device='cuda:2'), covar=tensor([0.4430, 0.4399, 0.1984, 0.2861, 0.4942, 0.2501, 0.5142, 0.3554], + device='cuda:2'), in_proj_covar=tensor([0.0954, 0.1036, 0.0753, 0.0963, 0.0930, 0.0869, 0.0867, 0.0819], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 17:13:50,566 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:59,461 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.910e+02 6.531e+02 8.117e+02 1.984e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-03 17:14:10,096 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.5416, 1.3382, 1.9371, 1.7293, 3.0738, 4.4240, 4.3019, 4.9231], + device='cuda:2'), covar=tensor([0.1616, 0.4181, 0.3624, 0.2518, 0.0704, 0.0239, 0.0202, 0.0210], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0336, 0.0368, 0.0275, 0.0258, 0.0200, 0.0221, 0.0283], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 17:14:23,410 INFO [train.py:903] (2/4) Epoch 30, batch 5500, loss[loss=0.2204, simple_loss=0.3031, pruned_loss=0.06884, over 19652.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2831, pruned_loss=0.05983, over 3810898.98 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:14:26,959 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:14:48,309 WARNING [train.py:1073] (2/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 17:15:24,331 INFO [train.py:903] (2/4) Epoch 30, batch 5550, loss[loss=0.2006, simple_loss=0.2888, pruned_loss=0.05618, over 19663.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2834, pruned_loss=0.05989, over 3815274.65 frames. ], batch size: 55, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:15:24,639 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203562.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:15:25,807 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203563.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:15:31,862 WARNING [train.py:1073] (2/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 17:15:56,770 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203588.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:16:01,700 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 4.952e+02 6.311e+02 7.819e+02 2.120e+03, threshold=1.262e+03, percent-clipped=2.0 +2023-04-03 17:16:03,245 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8318, 1.5646, 1.6430, 2.3608, 1.7568, 2.0432, 2.0858, 1.8447], + device='cuda:2'), covar=tensor([0.0803, 0.0953, 0.0974, 0.0621, 0.0837, 0.0774, 0.0840, 0.0699], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0238, 0.0226, 0.0215, 0.0187, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 17:16:20,042 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([3.5742, 4.1312, 4.3101, 4.3114, 1.7886, 4.0935, 3.5690, 4.0714], + device='cuda:2'), covar=tensor([0.1842, 0.0908, 0.0667, 0.0809, 0.6130, 0.0916, 0.0740, 0.1173], + device='cuda:2'), in_proj_covar=tensor([0.0830, 0.0807, 0.1016, 0.0892, 0.0879, 0.0779, 0.0600, 0.0947], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 17:16:22,134 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 17:16:27,765 INFO [train.py:903] (2/4) Epoch 30, batch 5600, loss[loss=0.2265, simple_loss=0.3081, pruned_loss=0.07242, over 18719.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2837, pruned_loss=0.05999, over 3813962.45 frames. ], batch size: 74, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:16:48,443 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 17:17:02,903 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.3425, 2.3256, 2.6549, 3.1489, 2.4457, 2.9898, 2.6632, 2.3926], + device='cuda:2'), covar=tensor([0.4549, 0.4284, 0.1985, 0.2776, 0.4467, 0.2438, 0.5274, 0.3636], + device='cuda:2'), in_proj_covar=tensor([0.0955, 0.1039, 0.0755, 0.0966, 0.0933, 0.0871, 0.0869, 0.0821], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 17:17:29,524 INFO [train.py:903] (2/4) Epoch 30, batch 5650, loss[loss=0.1947, simple_loss=0.2837, pruned_loss=0.05283, over 19658.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05918, over 3826541.89 frames. ], batch size: 55, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:18:06,339 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.913e+02 5.869e+02 7.597e+02 1.607e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 17:18:18,656 WARNING [train.py:1073] (2/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 17:18:29,973 INFO [train.py:903] (2/4) Epoch 30, batch 5700, loss[loss=0.2242, simple_loss=0.3025, pruned_loss=0.07294, over 19681.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.283, pruned_loss=0.05945, over 3819749.87 frames. ], batch size: 60, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:03,886 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4928, 1.5175, 1.7754, 1.7604, 2.7233, 2.3492, 2.8716, 1.3077], + device='cuda:2'), covar=tensor([0.2745, 0.4781, 0.3024, 0.2137, 0.1621, 0.2312, 0.1625, 0.5026], + device='cuda:2'), in_proj_covar=tensor([0.0562, 0.0684, 0.0769, 0.0518, 0.0641, 0.0555, 0.0674, 0.0584], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 17:19:31,947 INFO [train.py:903] (2/4) Epoch 30, batch 5750, loss[loss=0.1993, simple_loss=0.2822, pruned_loss=0.05816, over 19291.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.05915, over 3815656.16 frames. ], batch size: 66, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:35,369 WARNING [train.py:1073] (2/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 17:19:35,647 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:44,378 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:45,195 WARNING [train.py:1073] (2/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 17:19:51,030 WARNING [train.py:1073] (2/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 17:20:00,633 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:09,169 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.021e+02 5.985e+02 7.897e+02 1.857e+03, threshold=1.197e+03, percent-clipped=7.0 +2023-04-03 17:20:15,351 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:35,340 INFO [train.py:903] (2/4) Epoch 30, batch 5800, loss[loss=0.1664, simple_loss=0.2421, pruned_loss=0.04536, over 18701.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05928, over 3814844.16 frames. ], batch size: 41, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:20:38,228 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 17:20:43,833 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:55,516 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4765, 2.0533, 1.6223, 1.3364, 1.9374, 1.1363, 1.2460, 1.8918], + device='cuda:2'), covar=tensor([0.1120, 0.0932, 0.1163, 0.1033, 0.0668, 0.1583, 0.0918, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0324, 0.0347, 0.0279, 0.0257, 0.0351, 0.0293, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 17:20:56,441 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:13,740 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:37,911 INFO [train.py:903] (2/4) Epoch 30, batch 5850, loss[loss=0.231, simple_loss=0.3019, pruned_loss=0.0801, over 19629.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05987, over 3814961.05 frames. ], batch size: 50, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:21:41,696 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:22:15,808 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.919e+02 5.824e+02 7.977e+02 1.569e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 17:22:39,480 INFO [train.py:903] (2/4) Epoch 30, batch 5900, loss[loss=0.1948, simple_loss=0.2813, pruned_loss=0.05421, over 19545.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2818, pruned_loss=0.05947, over 3825629.28 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:22:42,674 WARNING [train.py:1073] (2/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 17:22:47,260 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203918.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:23:06,121 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 17:23:19,592 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:23:25,690 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8279, 1.9537, 2.2069, 2.3089, 1.8232, 2.2688, 2.1889, 2.0581], + device='cuda:2'), covar=tensor([0.4309, 0.4029, 0.2009, 0.2552, 0.4184, 0.2361, 0.5060, 0.3497], + device='cuda:2'), in_proj_covar=tensor([0.0952, 0.1036, 0.0753, 0.0965, 0.0931, 0.0870, 0.0865, 0.0818], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 17:23:40,675 INFO [train.py:903] (2/4) Epoch 30, batch 5950, loss[loss=0.1912, simple_loss=0.2773, pruned_loss=0.05258, over 18790.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.0594, over 3816235.60 frames. ], batch size: 74, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:18,146 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.092e+02 6.183e+02 7.281e+02 1.501e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-03 17:24:45,152 INFO [train.py:903] (2/4) Epoch 30, batch 6000, loss[loss=0.2087, simple_loss=0.2934, pruned_loss=0.06206, over 19341.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2808, pruned_loss=0.05885, over 3826981.32 frames. ], batch size: 70, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:45,153 INFO [train.py:928] (2/4) Computing validation loss +2023-04-03 17:24:58,734 INFO [train.py:937] (2/4) Epoch 30, validation: loss=0.167, simple_loss=0.2658, pruned_loss=0.03407, over 944034.00 frames. +2023-04-03 17:24:58,735 INFO [train.py:938] (2/4) Maximum memory allocated so far is 18869MB +2023-04-03 17:26:02,075 INFO [train.py:903] (2/4) Epoch 30, batch 6050, loss[loss=0.1664, simple_loss=0.2464, pruned_loss=0.04323, over 19384.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2809, pruned_loss=0.05885, over 3831955.51 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:26:38,224 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 5.054e+02 6.352e+02 7.854e+02 1.582e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-03 17:26:40,400 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 17:26:42,133 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:27:00,747 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:27:04,114 INFO [train.py:903] (2/4) Epoch 30, batch 6100, loss[loss=0.219, simple_loss=0.3034, pruned_loss=0.06725, over 19520.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2812, pruned_loss=0.05916, over 3828165.56 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:27:23,317 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:04,388 INFO [train.py:903] (2/4) Epoch 30, batch 6150, loss[loss=0.1856, simple_loss=0.2664, pruned_loss=0.05238, over 19787.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2816, pruned_loss=0.05942, over 3831193.50 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:28:04,787 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.1767, 1.3326, 1.7175, 1.1700, 2.4710, 3.3521, 3.0162, 3.5533], + device='cuda:2'), covar=tensor([0.1683, 0.4057, 0.3555, 0.2829, 0.0702, 0.0222, 0.0254, 0.0324], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0336, 0.0368, 0.0275, 0.0258, 0.0200, 0.0221, 0.0282], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-04-03 17:28:29,856 INFO [zipformer.py:1188] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:34,970 WARNING [train.py:1073] (2/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 17:28:42,857 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.061e+02 6.485e+02 7.945e+02 1.594e+03, threshold=1.297e+03, percent-clipped=2.0 +2023-04-03 17:28:52,807 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204200.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:03,795 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:07,246 INFO [train.py:903] (2/4) Epoch 30, batch 6200, loss[loss=0.244, simple_loss=0.3151, pruned_loss=0.08647, over 13504.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2821, pruned_loss=0.05947, over 3812138.32 frames. ], batch size: 136, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:29:24,438 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:25,574 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:29,046 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.4162, 2.4081, 2.1195, 2.0113, 1.9406, 2.2671, 1.5655, 1.8727], + device='cuda:2'), covar=tensor([0.0658, 0.0704, 0.0552, 0.0894, 0.0927, 0.0953, 0.1159, 0.0921], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0369, 0.0372, 0.0397, 0.0476, 0.0402, 0.0349, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 17:29:45,908 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:30:09,327 INFO [train.py:903] (2/4) Epoch 30, batch 6250, loss[loss=0.2194, simple_loss=0.2972, pruned_loss=0.07077, over 19663.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05942, over 3810738.42 frames. ], batch size: 60, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:30:10,640 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204262.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:30:18,767 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-03 17:30:38,580 WARNING [train.py:1073] (2/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 17:30:46,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.004e+02 5.938e+02 7.424e+02 1.100e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-03 17:31:10,305 INFO [train.py:903] (2/4) Epoch 30, batch 6300, loss[loss=0.2746, simple_loss=0.3384, pruned_loss=0.1054, over 18105.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2826, pruned_loss=0.05984, over 3795761.77 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:31:24,170 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:32:11,325 INFO [train.py:903] (2/4) Epoch 30, batch 6350, loss[loss=0.1846, simple_loss=0.2767, pruned_loss=0.04628, over 19651.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05958, over 3801761.65 frames. ], batch size: 55, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:32:30,606 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204377.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:32:50,291 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.728e+02 6.049e+02 7.442e+02 1.533e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:33:07,743 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.8746, 2.7423, 2.1940, 2.1972, 1.9632, 2.4292, 1.1755, 2.0273], + device='cuda:2'), covar=tensor([0.0734, 0.0662, 0.0725, 0.1207, 0.1179, 0.1116, 0.1522, 0.1108], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0366, 0.0370, 0.0394, 0.0472, 0.0398, 0.0347, 0.0350], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 17:33:12,661 INFO [train.py:903] (2/4) Epoch 30, batch 6400, loss[loss=0.2234, simple_loss=0.3, pruned_loss=0.07333, over 19659.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05958, over 3803509.48 frames. ], batch size: 53, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:33:45,677 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:33:45,837 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([5.0882, 5.1954, 5.9328, 5.9361, 2.0918, 5.5978, 4.7192, 5.6287], + device='cuda:2'), covar=tensor([0.1888, 0.0849, 0.0553, 0.0685, 0.6484, 0.0924, 0.0681, 0.1141], + device='cuda:2'), in_proj_covar=tensor([0.0825, 0.0803, 0.1009, 0.0883, 0.0872, 0.0774, 0.0597, 0.0940], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-04-03 17:34:00,732 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([2.0042, 1.9680, 1.9098, 1.7360, 1.5730, 1.7324, 0.4819, 0.9450], + device='cuda:2'), covar=tensor([0.0704, 0.0726, 0.0446, 0.0748, 0.1325, 0.0857, 0.1432, 0.1194], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0367, 0.0370, 0.0395, 0.0474, 0.0399, 0.0349, 0.0350], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-04-03 17:34:13,537 INFO [train.py:903] (2/4) Epoch 30, batch 6450, loss[loss=0.1627, simple_loss=0.2469, pruned_loss=0.03927, over 19379.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2818, pruned_loss=0.05905, over 3820583.43 frames. ], batch size: 47, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:34:35,895 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:34:51,303 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 4.981e+02 6.276e+02 8.010e+02 2.155e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 17:34:54,966 WARNING [train.py:1073] (2/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 17:34:59,498 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:08,072 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:16,832 INFO [train.py:903] (2/4) Epoch 30, batch 6500, loss[loss=0.2012, simple_loss=0.283, pruned_loss=0.05972, over 19681.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.281, pruned_loss=0.05842, over 3812492.46 frames. ], batch size: 53, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:35:19,135 WARNING [train.py:1073] (2/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 17:35:30,921 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204524.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:31,809 INFO [zipformer.py:1188] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:09,146 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:11,588 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.8219, 1.6872, 1.7773, 2.2662, 1.6760, 2.0708, 1.9777, 1.8887], + device='cuda:2'), covar=tensor([0.0823, 0.0865, 0.0916, 0.0605, 0.0864, 0.0769, 0.0838, 0.0674], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0221, 0.0227, 0.0238, 0.0226, 0.0215, 0.0186, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:2') +2023-04-03 17:36:18,057 INFO [train.py:903] (2/4) Epoch 30, batch 6550, loss[loss=0.2174, simple_loss=0.3029, pruned_loss=0.06596, over 17940.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2803, pruned_loss=0.05782, over 3824201.61 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:36:28,069 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.0779, 3.7275, 2.6619, 3.3251, 0.9208, 3.7102, 3.5715, 3.6704], + device='cuda:2'), covar=tensor([0.0706, 0.0988, 0.1806, 0.0980, 0.3688, 0.0772, 0.0989, 0.1125], + device='cuda:2'), in_proj_covar=tensor([0.0535, 0.0434, 0.0517, 0.0358, 0.0407, 0.0457, 0.0451, 0.0486], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 17:36:40,537 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:56,887 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.288e+02 6.916e+02 9.470e+02 2.608e+03, threshold=1.383e+03, percent-clipped=11.0 +2023-04-03 17:37:11,460 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:37:20,113 INFO [train.py:903] (2/4) Epoch 30, batch 6600, loss[loss=0.1846, simple_loss=0.2728, pruned_loss=0.04826, over 19530.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2797, pruned_loss=0.05733, over 3821586.09 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:37:46,981 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204633.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:37:54,610 INFO [zipformer.py:1188] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204640.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:38:17,186 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204658.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:38:21,420 INFO [train.py:903] (2/4) Epoch 30, batch 6650, loss[loss=0.1943, simple_loss=0.2758, pruned_loss=0.05643, over 19761.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2803, pruned_loss=0.05777, over 3812309.34 frames. ], batch size: 54, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:38:59,519 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 4.923e+02 6.497e+02 8.594e+02 3.232e+03, threshold=1.299e+03, percent-clipped=5.0 +2023-04-03 17:39:24,424 INFO [train.py:903] (2/4) Epoch 30, batch 6700, loss[loss=0.2401, simple_loss=0.324, pruned_loss=0.07812, over 19711.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2814, pruned_loss=0.05822, over 3812983.28 frames. ], batch size: 63, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:39:42,313 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([4.7955, 1.7714, 1.9260, 1.8821, 4.3718, 1.4391, 2.5354, 4.8297], + device='cuda:2'), covar=tensor([0.0516, 0.2822, 0.2883, 0.1943, 0.0730, 0.2557, 0.1649, 0.0161], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0381, 0.0402, 0.0357, 0.0386, 0.0361, 0.0401, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 17:40:21,758 INFO [train.py:903] (2/4) Epoch 30, batch 6750, loss[loss=0.2627, simple_loss=0.3359, pruned_loss=0.09474, over 18797.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2809, pruned_loss=0.05813, over 3824455.58 frames. ], batch size: 74, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:40:41,886 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.4250, 2.0780, 1.6161, 1.4852, 1.8594, 1.3807, 1.4444, 1.8465], + device='cuda:2'), covar=tensor([0.0967, 0.0761, 0.1100, 0.0869, 0.0577, 0.1249, 0.0626, 0.0481], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0324, 0.0346, 0.0278, 0.0256, 0.0351, 0.0293, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-04-03 17:40:46,282 INFO [zipformer.py:2441] (2/4) attn_weights_entropy = tensor([1.6658, 1.7545, 2.0010, 2.0173, 1.6039, 2.0160, 1.9611, 1.8552], + device='cuda:2'), covar=tensor([0.4207, 0.3809, 0.2050, 0.2512, 0.3926, 0.2257, 0.5456, 0.3546], + device='cuda:2'), in_proj_covar=tensor([0.0955, 0.1039, 0.0754, 0.0967, 0.0935, 0.0871, 0.0869, 0.0821], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-04-03 17:40:58,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.849e+02 6.262e+02 9.905e+02 2.863e+03, threshold=1.252e+03, percent-clipped=11.0 +2023-04-03 17:41:17,890 INFO [zipformer.py:1188] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:19,750 INFO [train.py:903] (2/4) Epoch 30, batch 6800, loss[loss=0.2148, simple_loss=0.2994, pruned_loss=0.06514, over 19408.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2821, pruned_loss=0.05895, over 3820932.07 frames. ], batch size: 70, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:41:45,204 INFO [zipformer.py:1188] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:50,102 INFO [train.py:1171] (2/4) Done! diff --git a/log/log-train-2023-03-31-18-51-54-3 b/log/log-train-2023-03-31-18-51-54-3 new file mode 100644 index 0000000000000000000000000000000000000000..485c61af9fb329663254c10269475be8eeebf33b --- /dev/null +++ b/log/log-train-2023-03-31-18-51-54-3 @@ -0,0 +1,25303 @@ +2023-03-31 18:51:54,787 INFO [train.py:975] (3/4) Training started +2023-03-31 18:51:54,787 INFO [train.py:985] (3/4) Device: cuda:3 +2023-03-31 18:51:54,825 INFO [train.py:994] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r2n03', 'IP address': '10.1.2.3'}, 'world_size': 4, 'master_port': 54321, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 10, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-31 18:51:54,826 INFO [train.py:996] (3/4) About to create model +2023-03-31 18:51:55,665 INFO [zipformer.py:405] (3/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-31 18:51:55,676 INFO [train.py:1000] (3/4) Number of model parameters: 20697573 +2023-03-31 18:52:03,011 INFO [train.py:1019] (3/4) Using DDP +2023-03-31 18:52:03,648 INFO [asr_datamodule.py:429] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts, combined with their reverberated versions +2023-03-31 18:52:03,689 INFO [asr_datamodule.py:224] (3/4) Enable MUSAN +2023-03-31 18:52:03,689 INFO [asr_datamodule.py:225] (3/4) About to get Musan cuts +2023-03-31 18:52:05,941 INFO [asr_datamodule.py:249] (3/4) Enable SpecAugment +2023-03-31 18:52:05,941 INFO [asr_datamodule.py:250] (3/4) Time warp factor: 80 +2023-03-31 18:52:05,941 INFO [asr_datamodule.py:260] (3/4) Num frame mask: 10 +2023-03-31 18:52:05,941 INFO [asr_datamodule.py:273] (3/4) About to create train dataset +2023-03-31 18:52:05,941 INFO [asr_datamodule.py:300] (3/4) Using DynamicBucketingSampler. +2023-03-31 18:52:08,275 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:08,739 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:09,020 INFO [asr_datamodule.py:315] (3/4) About to create train dataloader +2023-03-31 18:52:09,021 INFO [asr_datamodule.py:440] (3/4) About to get dev-clean cuts +2023-03-31 18:52:09,022 INFO [asr_datamodule.py:447] (3/4) About to get dev-other cuts +2023-03-31 18:52:09,023 INFO [asr_datamodule.py:346] (3/4) About to create dev dataset +2023-03-31 18:52:09,471 INFO [asr_datamodule.py:363] (3/4) About to create dev dataloader +2023-03-31 18:52:23,899 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 18:52:24,322 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 18:52:36,261 INFO [train.py:903] (3/4) Epoch 1, batch 0, loss[loss=7.077, simple_loss=6.406, pruned_loss=6.699, over 19303.00 frames. ], tot_loss[loss=7.077, simple_loss=6.406, pruned_loss=6.699, over 19303.00 frames. ], batch size: 44, lr: 2.50e-02, grad_scale: 2.0 +2023-03-31 18:52:36,261 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 18:52:49,135 INFO [train.py:937] (3/4) Epoch 1, validation: loss=6.888, simple_loss=6.229, pruned_loss=6.575, over 944034.00 frames. +2023-03-31 18:52:49,136 INFO [train.py:938] (3/4) Maximum memory allocated so far is 11586MB +2023-03-31 18:53:02,997 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 18:53:58,980 INFO [train.py:903] (3/4) Epoch 1, batch 50, loss[loss=1.355, simple_loss=1.199, pruned_loss=1.39, over 19320.00 frames. ], tot_loss[loss=2.158, simple_loss=1.949, pruned_loss=1.999, over 874003.52 frames. ], batch size: 70, lr: 2.75e-02, grad_scale: 0.125 +2023-03-31 18:54:00,856 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 18:54:25,976 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1992, 4.1942, 4.1928, 4.1986, 4.1974, 4.1933, 4.1975, 4.1975], + device='cuda:3'), covar=tensor([0.0069, 0.0043, 0.0079, 0.0073, 0.0193, 0.0106, 0.0119, 0.0103], + device='cuda:3'), in_proj_covar=tensor([0.0013, 0.0013, 0.0013, 0.0014, 0.0014, 0.0013, 0.0013, 0.0013], + device='cuda:3'), out_proj_covar=tensor([8.6617e-06, 8.9851e-06, 8.8598e-06, 8.8768e-06, 8.9125e-06, 8.9549e-06, + 8.8366e-06, 8.7795e-06], device='cuda:3') +2023-03-31 18:54:36,611 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:54:41,879 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 18:54:47,823 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=4.44 vs. limit=2.0 +2023-03-31 18:54:59,744 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=42.44 vs. limit=5.0 +2023-03-31 18:55:06,318 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=17.55 vs. limit=2.0 +2023-03-31 18:55:11,352 INFO [train.py:903] (3/4) Epoch 1, batch 100, loss[loss=1.141, simple_loss=0.9946, pruned_loss=1.181, over 13389.00 frames. ], tot_loss[loss=1.632, simple_loss=1.453, pruned_loss=1.614, over 1525776.39 frames. ], batch size: 136, lr: 3.00e-02, grad_scale: 0.25 +2023-03-31 18:55:11,617 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 18:55:17,835 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.479e+01 1.678e+02 3.237e+02 1.260e+03 8.630e+04, threshold=6.475e+02, percent-clipped=0.0 +2023-03-31 18:55:25,893 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 18:55:43,476 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=6.18 vs. limit=2.0 +2023-03-31 18:56:20,076 INFO [train.py:903] (3/4) Epoch 1, batch 150, loss[loss=1.034, simple_loss=0.882, pruned_loss=1.101, over 19442.00 frames. ], tot_loss[loss=1.391, simple_loss=1.223, pruned_loss=1.423, over 2036100.23 frames. ], batch size: 70, lr: 3.25e-02, grad_scale: 0.25 +2023-03-31 18:57:32,422 INFO [train.py:903] (3/4) Epoch 1, batch 200, loss[loss=1.032, simple_loss=0.8756, pruned_loss=1.053, over 19610.00 frames. ], tot_loss[loss=1.255, simple_loss=1.093, pruned_loss=1.291, over 2435973.89 frames. ], batch size: 57, lr: 3.50e-02, grad_scale: 0.5 +2023-03-31 18:57:33,129 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 18:57:39,439 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.067e+01 1.186e+02 1.653e+02 2.090e+02 5.158e+02, threshold=3.307e+02, percent-clipped=0.0 +2023-03-31 18:57:48,568 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-31 18:58:43,211 INFO [train.py:903] (3/4) Epoch 1, batch 250, loss[loss=1.004, simple_loss=0.8457, pruned_loss=0.9909, over 19586.00 frames. ], tot_loss[loss=1.167, simple_loss=1.009, pruned_loss=1.192, over 2749483.49 frames. ], batch size: 61, lr: 3.75e-02, grad_scale: 0.5 +2023-03-31 18:59:51,889 INFO [train.py:903] (3/4) Epoch 1, batch 300, loss[loss=0.9671, simple_loss=0.8077, pruned_loss=0.9349, over 19679.00 frames. ], tot_loss[loss=1.102, simple_loss=0.946, pruned_loss=1.114, over 2992759.98 frames. ], batch size: 58, lr: 4.00e-02, grad_scale: 1.0 +2023-03-31 18:59:56,691 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.717e+01 1.166e+02 1.521e+02 1.991e+02 3.277e+02, threshold=3.043e+02, percent-clipped=0.0 +2023-03-31 18:59:58,411 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=306.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:00:09,467 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=314.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:00:49,610 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.11 vs. limit=2.0 +2023-03-31 19:00:58,531 INFO [train.py:903] (3/4) Epoch 1, batch 350, loss[loss=0.9795, simple_loss=0.8117, pruned_loss=0.9264, over 19346.00 frames. ], tot_loss[loss=1.056, simple_loss=0.8993, pruned_loss=1.054, over 3193706.32 frames. ], batch size: 66, lr: 4.25e-02, grad_scale: 1.0 +2023-03-31 19:01:05,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 19:01:47,854 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4088, 4.0422, 4.2233, 4.2358, 4.4600, 4.4360, 4.4329, 4.4024], + device='cuda:3'), covar=tensor([0.0124, 0.0192, 0.0141, 0.0187, 0.0123, 0.0095, 0.0107, 0.0117], + device='cuda:3'), in_proj_covar=tensor([0.0013, 0.0014, 0.0013, 0.0014, 0.0014, 0.0013, 0.0014, 0.0013], + device='cuda:3'), out_proj_covar=tensor([8.6472e-06, 9.1172e-06, 8.9155e-06, 8.9413e-06, 8.7632e-06, 8.8641e-06, + 8.7775e-06, 8.7957e-06], device='cuda:3') +2023-03-31 19:02:08,273 INFO [train.py:903] (3/4) Epoch 1, batch 400, loss[loss=0.9656, simple_loss=0.7948, pruned_loss=0.8932, over 19545.00 frames. ], tot_loss[loss=1.026, simple_loss=0.8681, pruned_loss=1.008, over 3321103.40 frames. ], batch size: 54, lr: 4.50e-02, grad_scale: 2.0 +2023-03-31 19:02:13,391 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.278e+02 1.546e+02 1.978e+02 5.474e+02, threshold=3.091e+02, percent-clipped=7.0 +2023-03-31 19:02:13,799 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=405.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:02:33,793 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=421.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:03:05,099 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=445.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:03:12,885 INFO [train.py:903] (3/4) Epoch 1, batch 450, loss[loss=0.9599, simple_loss=0.7894, pruned_loss=0.8566, over 19608.00 frames. ], tot_loss[loss=0.9995, simple_loss=0.8405, pruned_loss=0.9638, over 3444940.34 frames. ], batch size: 61, lr: 4.75e-02, grad_scale: 2.0 +2023-03-31 19:03:41,214 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=4.25 vs. limit=2.0 +2023-03-31 19:03:51,035 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 19:03:51,734 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 19:04:19,396 INFO [train.py:903] (3/4) Epoch 1, batch 500, loss[loss=0.8779, simple_loss=0.7256, pruned_loss=0.7473, over 19395.00 frames. ], tot_loss[loss=0.9832, simple_loss=0.8237, pruned_loss=0.9259, over 3522392.64 frames. ], batch size: 48, lr: 4.99e-02, grad_scale: 2.0 +2023-03-31 19:04:25,207 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.386e+02 1.860e+02 2.529e+02 4.736e+02, threshold=3.719e+02, percent-clipped=12.0 +2023-03-31 19:05:27,838 INFO [train.py:903] (3/4) Epoch 1, batch 550, loss[loss=0.8769, simple_loss=0.7334, pruned_loss=0.7027, over 19672.00 frames. ], tot_loss[loss=0.9673, simple_loss=0.8088, pruned_loss=0.8861, over 3587717.64 frames. ], batch size: 53, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:05:41,522 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=560.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:04,508 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=580.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:12,408 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=586.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:06:32,557 INFO [train.py:903] (3/4) Epoch 1, batch 600, loss[loss=0.8956, simple_loss=0.7518, pruned_loss=0.6932, over 19720.00 frames. ], tot_loss[loss=0.9435, simple_loss=0.7891, pruned_loss=0.8385, over 3644417.99 frames. ], batch size: 63, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:06:36,898 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 2.910e+02 4.086e+02 6.136e+02 1.097e+03, threshold=8.173e+02, percent-clipped=60.0 +2023-03-31 19:06:41,681 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=608.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:06:43,962 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=2.18 vs. limit=2.0 +2023-03-31 19:06:47,614 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=3.69 vs. limit=2.0 +2023-03-31 19:07:11,460 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 19:07:38,089 INFO [train.py:903] (3/4) Epoch 1, batch 650, loss[loss=0.7179, simple_loss=0.6089, pruned_loss=0.5294, over 19352.00 frames. ], tot_loss[loss=0.9193, simple_loss=0.7701, pruned_loss=0.7918, over 3680038.14 frames. ], batch size: 47, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:07:47,307 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=658.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:08:13,494 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=677.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:08:41,843 INFO [train.py:903] (3/4) Epoch 1, batch 700, loss[loss=0.7126, simple_loss=0.6074, pruned_loss=0.5094, over 15511.00 frames. ], tot_loss[loss=0.8901, simple_loss=0.7477, pruned_loss=0.7424, over 3718904.32 frames. ], batch size: 34, lr: 4.98e-02, grad_scale: 2.0 +2023-03-31 19:08:43,434 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=702.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:08:46,601 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.173e+02 6.580e+02 8.914e+02 3.039e+03, threshold=1.316e+03, percent-clipped=29.0 +2023-03-31 19:09:43,565 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=749.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:09:45,325 INFO [train.py:903] (3/4) Epoch 1, batch 750, loss[loss=0.8078, simple_loss=0.6888, pruned_loss=0.5669, over 19651.00 frames. ], tot_loss[loss=0.8617, simple_loss=0.7266, pruned_loss=0.6956, over 3747947.31 frames. ], batch size: 58, lr: 4.97e-02, grad_scale: 2.0 +2023-03-31 19:10:14,445 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=773.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:10:48,968 INFO [train.py:903] (3/4) Epoch 1, batch 800, loss[loss=0.7224, simple_loss=0.6229, pruned_loss=0.487, over 19846.00 frames. ], tot_loss[loss=0.8357, simple_loss=0.7072, pruned_loss=0.6544, over 3765221.58 frames. ], batch size: 52, lr: 4.97e-02, grad_scale: 4.0 +2023-03-31 19:10:53,085 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.827e+02 7.991e+02 1.030e+03 2.888e+03, threshold=1.598e+03, percent-clipped=14.0 +2023-03-31 19:11:01,619 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 19:11:08,448 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=816.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:11:23,925 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9111, 0.8740, 1.0417, 0.9486, 0.8724, 0.8623, 0.5636, 0.7401], + device='cuda:3'), covar=tensor([0.5532, 0.6191, 0.5872, 0.8007, 0.7043, 0.7320, 0.9413, 0.6373], + device='cuda:3'), in_proj_covar=tensor([0.0056, 0.0059, 0.0061, 0.0067, 0.0064, 0.0063, 0.0071, 0.0057], + device='cuda:3'), out_proj_covar=tensor([4.1324e-05, 3.7400e-05, 3.8572e-05, 4.7434e-05, 4.3410e-05, 3.9747e-05, + 5.0992e-05, 3.7452e-05], device='cuda:3') +2023-03-31 19:11:40,315 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=841.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:11:52,170 INFO [train.py:903] (3/4) Epoch 1, batch 850, loss[loss=0.7706, simple_loss=0.6636, pruned_loss=0.5139, over 19735.00 frames. ], tot_loss[loss=0.8073, simple_loss=0.6859, pruned_loss=0.6137, over 3780006.56 frames. ], batch size: 63, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:10,443 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=864.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:12:12,289 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=865.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:12:15,720 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=5.18 vs. limit=5.0 +2023-03-31 19:12:43,340 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 19:12:51,333 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2825, 1.5032, 2.5942, 2.0387, 2.1583, 2.5438, 2.5565, 2.5166], + device='cuda:3'), covar=tensor([0.4292, 0.7630, 0.3867, 0.4796, 0.3235, 0.2659, 0.3149, 0.3112], + device='cuda:3'), in_proj_covar=tensor([0.0061, 0.0065, 0.0058, 0.0064, 0.0061, 0.0053, 0.0062, 0.0054], + device='cuda:3'), out_proj_covar=tensor([3.9114e-05, 4.7181e-05, 3.5075e-05, 4.5042e-05, 3.7678e-05, 3.0497e-05, + 3.8307e-05, 3.4056e-05], device='cuda:3') +2023-03-31 19:12:54,612 INFO [train.py:903] (3/4) Epoch 1, batch 900, loss[loss=0.623, simple_loss=0.5409, pruned_loss=0.4035, over 19099.00 frames. ], tot_loss[loss=0.7853, simple_loss=0.6698, pruned_loss=0.5801, over 3803596.91 frames. ], batch size: 42, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:12:59,594 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 6.072e+02 7.456e+02 9.579e+02 1.181e+04, threshold=1.491e+03, percent-clipped=3.0 +2023-03-31 19:13:21,569 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=924.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:13:30,349 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=930.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:13:53,705 INFO [train.py:903] (3/4) Epoch 1, batch 950, loss[loss=0.6101, simple_loss=0.5342, pruned_loss=0.3842, over 19763.00 frames. ], tot_loss[loss=0.7656, simple_loss=0.6553, pruned_loss=0.5508, over 3805964.47 frames. ], batch size: 46, lr: 4.96e-02, grad_scale: 4.0 +2023-03-31 19:13:53,739 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 19:13:56,064 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=952.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:14:51,779 INFO [train.py:903] (3/4) Epoch 1, batch 1000, loss[loss=0.6795, simple_loss=0.5952, pruned_loss=0.4237, over 19597.00 frames. ], tot_loss[loss=0.7491, simple_loss=0.6435, pruned_loss=0.5257, over 3813667.56 frames. ], batch size: 52, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:14:56,987 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 5.980e+02 7.509e+02 1.052e+03 2.029e+03, threshold=1.502e+03, percent-clipped=4.0 +2023-03-31 19:15:25,818 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1029.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:15:38,923 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1039.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:41,715 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 19:15:45,014 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1045.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:15:52,657 INFO [train.py:903] (3/4) Epoch 1, batch 1050, loss[loss=0.6339, simple_loss=0.5601, pruned_loss=0.3856, over 19730.00 frames. ], tot_loss[loss=0.7304, simple_loss=0.6305, pruned_loss=0.4999, over 3818770.20 frames. ], batch size: 51, lr: 4.95e-02, grad_scale: 4.0 +2023-03-31 19:15:56,697 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1054.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:12,569 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1067.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:16:20,721 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 19:16:53,091 INFO [train.py:903] (3/4) Epoch 1, batch 1100, loss[loss=0.7131, simple_loss=0.6114, pruned_loss=0.4534, over 17940.00 frames. ], tot_loss[loss=0.7157, simple_loss=0.6198, pruned_loss=0.4793, over 3830342.97 frames. ], batch size: 83, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:16:57,396 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.036e+02 7.117e+02 8.563e+02 1.068e+03 2.368e+03, threshold=1.713e+03, percent-clipped=4.0 +2023-03-31 19:17:14,922 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1120.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:17:45,068 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1145.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:17:51,773 INFO [train.py:903] (3/4) Epoch 1, batch 1150, loss[loss=0.658, simple_loss=0.5725, pruned_loss=0.4048, over 19480.00 frames. ], tot_loss[loss=0.7004, simple_loss=0.6089, pruned_loss=0.4596, over 3826876.39 frames. ], batch size: 49, lr: 4.94e-02, grad_scale: 4.0 +2023-03-31 19:18:13,023 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1171.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:18:47,606 INFO [train.py:903] (3/4) Epoch 1, batch 1200, loss[loss=0.6695, simple_loss=0.5957, pruned_loss=0.3945, over 19714.00 frames. ], tot_loss[loss=0.6864, simple_loss=0.5983, pruned_loss=0.4427, over 3827428.76 frames. ], batch size: 63, lr: 4.93e-02, grad_scale: 8.0 +2023-03-31 19:18:52,216 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 7.433e+02 9.314e+02 1.239e+03 3.000e+03, threshold=1.863e+03, percent-clipped=16.0 +2023-03-31 19:18:56,109 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1209.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:19:16,395 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 19:19:42,385 INFO [train.py:903] (3/4) Epoch 1, batch 1250, loss[loss=0.5681, simple_loss=0.505, pruned_loss=0.3334, over 19315.00 frames. ], tot_loss[loss=0.6725, simple_loss=0.5889, pruned_loss=0.4256, over 3830534.52 frames. ], batch size: 44, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:32,325 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1295.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:20:39,090 INFO [train.py:903] (3/4) Epoch 1, batch 1300, loss[loss=0.7669, simple_loss=0.6349, pruned_loss=0.4938, over 19688.00 frames. ], tot_loss[loss=0.6656, simple_loss=0.584, pruned_loss=0.4151, over 3814461.74 frames. ], batch size: 60, lr: 4.92e-02, grad_scale: 8.0 +2023-03-31 19:20:39,475 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1301.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:20:43,725 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+02 7.699e+02 1.048e+03 1.379e+03 4.741e+03, threshold=2.097e+03, percent-clipped=13.0 +2023-03-31 19:21:00,035 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1320.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:03,692 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1323.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:21:04,879 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1324.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:06,836 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1326.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:33,403 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1348.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:21:35,927 INFO [train.py:903] (3/4) Epoch 1, batch 1350, loss[loss=0.5934, simple_loss=0.528, pruned_loss=0.3443, over 19673.00 frames. ], tot_loss[loss=0.6528, simple_loss=0.5751, pruned_loss=0.4009, over 3805375.39 frames. ], batch size: 53, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:31,180 INFO [train.py:903] (3/4) Epoch 1, batch 1400, loss[loss=0.5378, simple_loss=0.5005, pruned_loss=0.291, over 19767.00 frames. ], tot_loss[loss=0.6393, simple_loss=0.5662, pruned_loss=0.3864, over 3815595.34 frames. ], batch size: 54, lr: 4.91e-02, grad_scale: 8.0 +2023-03-31 19:22:35,196 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.777e+02 7.620e+02 9.515e+02 1.230e+03 4.278e+03, threshold=1.903e+03, percent-clipped=3.0 +2023-03-31 19:23:24,308 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 19:23:25,237 INFO [train.py:903] (3/4) Epoch 1, batch 1450, loss[loss=0.5174, simple_loss=0.481, pruned_loss=0.28, over 19593.00 frames. ], tot_loss[loss=0.6269, simple_loss=0.5571, pruned_loss=0.3741, over 3805106.09 frames. ], batch size: 52, lr: 4.90e-02, grad_scale: 8.0 +2023-03-31 19:23:26,469 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1452.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:24:18,027 INFO [train.py:903] (3/4) Epoch 1, batch 1500, loss[loss=0.548, simple_loss=0.5053, pruned_loss=0.2996, over 19604.00 frames. ], tot_loss[loss=0.6201, simple_loss=0.5528, pruned_loss=0.3657, over 3804530.53 frames. ], batch size: 50, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:24:23,061 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+02 9.104e+02 1.060e+03 1.370e+03 5.981e+03, threshold=2.119e+03, percent-clipped=12.0 +2023-03-31 19:24:35,378 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1515.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:24:52,460 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0535, 1.5494, 1.9822, 1.8383, 1.1566, 1.7706, 0.7405, 1.2794], + device='cuda:3'), covar=tensor([0.1695, 0.2352, 0.2132, 0.3198, 0.3904, 0.2827, 0.7146, 0.3230], + device='cuda:3'), in_proj_covar=tensor([0.0087, 0.0082, 0.0094, 0.0102, 0.0104, 0.0093, 0.0138, 0.0103], + device='cuda:3'), out_proj_covar=tensor([5.9787e-05, 4.9858e-05, 6.0910e-05, 7.3433e-05, 7.5147e-05, 6.2697e-05, + 1.0091e-04, 7.3868e-05], device='cuda:3') +2023-03-31 19:25:14,429 INFO [train.py:903] (3/4) Epoch 1, batch 1550, loss[loss=0.6387, simple_loss=0.5659, pruned_loss=0.3663, over 19535.00 frames. ], tot_loss[loss=0.6103, simple_loss=0.5468, pruned_loss=0.3552, over 3817228.39 frames. ], batch size: 54, lr: 4.89e-02, grad_scale: 8.0 +2023-03-31 19:25:24,673 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.14 vs. limit=2.0 +2023-03-31 19:25:43,604 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1580.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:25:47,367 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1584.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:26:05,886 INFO [train.py:903] (3/4) Epoch 1, batch 1600, loss[loss=0.5786, simple_loss=0.5347, pruned_loss=0.3144, over 19679.00 frames. ], tot_loss[loss=0.603, simple_loss=0.5419, pruned_loss=0.3475, over 3814099.19 frames. ], batch size: 59, lr: 4.88e-02, grad_scale: 8.0 +2023-03-31 19:26:10,808 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.811e+02 9.198e+02 1.152e+03 1.497e+03 2.578e+03, threshold=2.303e+03, percent-clipped=3.0 +2023-03-31 19:26:11,232 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1605.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:25,452 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 19:26:35,468 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1629.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:36,305 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1630.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:26:57,907 INFO [train.py:903] (3/4) Epoch 1, batch 1650, loss[loss=0.6326, simple_loss=0.5755, pruned_loss=0.3496, over 19671.00 frames. ], tot_loss[loss=0.5958, simple_loss=0.5378, pruned_loss=0.3397, over 3816120.20 frames. ], batch size: 60, lr: 4.87e-02, grad_scale: 8.0 +2023-03-31 19:27:52,335 INFO [train.py:903] (3/4) Epoch 1, batch 1700, loss[loss=0.5354, simple_loss=0.494, pruned_loss=0.2905, over 19522.00 frames. ], tot_loss[loss=0.5899, simple_loss=0.5349, pruned_loss=0.333, over 3827695.72 frames. ], batch size: 54, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:27:56,182 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.719e+02 9.402e+02 1.223e+03 1.535e+03 2.582e+03, threshold=2.447e+03, percent-clipped=3.0 +2023-03-31 19:28:26,633 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 19:28:33,164 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8223, 1.5478, 1.6529, 3.0672, 3.9571, 2.2233, 3.0576, 3.4278], + device='cuda:3'), covar=tensor([0.0472, 0.4525, 0.6759, 0.2284, 0.0764, 0.6087, 0.1176, 0.1415], + device='cuda:3'), in_proj_covar=tensor([0.0091, 0.0143, 0.0183, 0.0129, 0.0129, 0.0220, 0.0125, 0.0115], + device='cuda:3'), out_proj_covar=tensor([5.2270e-05, 9.9517e-05, 1.2725e-04, 9.1926e-05, 7.6963e-05, 1.4293e-04, + 8.1368e-05, 7.6814e-05], device='cuda:3') +2023-03-31 19:28:46,832 INFO [train.py:903] (3/4) Epoch 1, batch 1750, loss[loss=0.4959, simple_loss=0.4671, pruned_loss=0.2628, over 19726.00 frames. ], tot_loss[loss=0.5817, simple_loss=0.5301, pruned_loss=0.3252, over 3827032.20 frames. ], batch size: 46, lr: 4.86e-02, grad_scale: 8.0 +2023-03-31 19:29:38,259 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1796.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:29:43,455 INFO [train.py:903] (3/4) Epoch 1, batch 1800, loss[loss=0.5446, simple_loss=0.5126, pruned_loss=0.2887, over 19699.00 frames. ], tot_loss[loss=0.5757, simple_loss=0.5257, pruned_loss=0.3199, over 3822081.22 frames. ], batch size: 59, lr: 4.85e-02, grad_scale: 8.0 +2023-03-31 19:29:47,626 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.927e+02 9.266e+02 1.209e+03 1.539e+03 2.564e+03, threshold=2.418e+03, percent-clipped=2.0 +2023-03-31 19:30:36,181 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 19:30:40,566 INFO [train.py:903] (3/4) Epoch 1, batch 1850, loss[loss=0.5217, simple_loss=0.4942, pruned_loss=0.2746, over 19521.00 frames. ], tot_loss[loss=0.5701, simple_loss=0.5229, pruned_loss=0.3143, over 3832692.97 frames. ], batch size: 54, lr: 4.84e-02, grad_scale: 8.0 +2023-03-31 19:30:45,775 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1856.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:31:08,136 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1875.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:12,894 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 19:31:20,292 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1886.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:31:22,097 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1888.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:31:36,176 INFO [train.py:903] (3/4) Epoch 1, batch 1900, loss[loss=0.4852, simple_loss=0.4853, pruned_loss=0.2413, over 19688.00 frames. ], tot_loss[loss=0.5637, simple_loss=0.519, pruned_loss=0.3087, over 3810251.14 frames. ], batch size: 60, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:31:40,292 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 9.078e+02 1.104e+03 1.499e+03 2.754e+03, threshold=2.207e+03, percent-clipped=2.0 +2023-03-31 19:31:47,483 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1911.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:31:47,504 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1911.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:31:52,293 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 19:31:57,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 19:32:06,359 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:32:19,677 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 19:32:31,502 INFO [train.py:903] (3/4) Epoch 1, batch 1950, loss[loss=0.5276, simple_loss=0.5077, pruned_loss=0.2735, over 19779.00 frames. ], tot_loss[loss=0.5583, simple_loss=0.5154, pruned_loss=0.3042, over 3801877.59 frames. ], batch size: 56, lr: 4.83e-02, grad_scale: 8.0 +2023-03-31 19:32:57,190 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1973.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:33:29,169 INFO [train.py:903] (3/4) Epoch 1, batch 2000, loss[loss=0.5919, simple_loss=0.5391, pruned_loss=0.3223, over 17434.00 frames. ], tot_loss[loss=0.5554, simple_loss=0.5146, pruned_loss=0.3009, over 3795301.12 frames. ], batch size: 101, lr: 4.82e-02, grad_scale: 8.0 +2023-03-31 19:33:33,541 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.917e+02 1.007e+03 1.260e+03 1.703e+03 3.255e+03, threshold=2.521e+03, percent-clipped=11.0 +2023-03-31 19:34:04,902 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1658, 1.5290, 1.9460, 1.3531, 1.9862, 2.8498, 2.4229, 1.8742], + device='cuda:3'), covar=tensor([0.2902, 0.1803, 0.2877, 0.3281, 0.2211, 0.0529, 0.1367, 0.2707], + device='cuda:3'), in_proj_covar=tensor([0.0094, 0.0074, 0.0095, 0.0106, 0.0106, 0.0053, 0.0081, 0.0108], + device='cuda:3'), out_proj_covar=tensor([6.2455e-05, 4.8035e-05, 6.3418e-05, 7.3631e-05, 7.2993e-05, 3.0281e-05, + 5.5912e-05, 7.1019e-05], device='cuda:3') +2023-03-31 19:34:18,983 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2043.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:34:25,201 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 19:34:27,597 INFO [train.py:903] (3/4) Epoch 1, batch 2050, loss[loss=0.5009, simple_loss=0.4922, pruned_loss=0.2548, over 19671.00 frames. ], tot_loss[loss=0.5471, simple_loss=0.5104, pruned_loss=0.294, over 3793023.90 frames. ], batch size: 58, lr: 4.81e-02, grad_scale: 16.0 +2023-03-31 19:34:43,698 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 19:34:43,733 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 19:34:54,258 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8024, 2.0519, 2.7261, 1.6033, 2.2401, 3.9762, 3.5296, 3.7114], + device='cuda:3'), covar=tensor([0.2452, 0.2058, 0.1234, 0.2859, 0.1250, 0.0138, 0.0309, 0.0323], + device='cuda:3'), in_proj_covar=tensor([0.0127, 0.0109, 0.0095, 0.0128, 0.0094, 0.0061, 0.0077, 0.0066], + device='cuda:3'), out_proj_covar=tensor([8.1477e-05, 7.0637e-05, 5.8991e-05, 8.1920e-05, 5.9912e-05, 3.1654e-05, + 4.1392e-05, 3.4848e-05], device='cuda:3') +2023-03-31 19:35:06,985 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 19:35:12,786 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2088.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:35:27,385 INFO [train.py:903] (3/4) Epoch 1, batch 2100, loss[loss=0.5812, simple_loss=0.5395, pruned_loss=0.3114, over 19514.00 frames. ], tot_loss[loss=0.5382, simple_loss=0.5056, pruned_loss=0.2871, over 3798742.34 frames. ], batch size: 56, lr: 4.80e-02, grad_scale: 16.0 +2023-03-31 19:35:31,661 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+02 9.211e+02 1.091e+03 1.524e+03 2.851e+03, threshold=2.182e+03, percent-clipped=6.0 +2023-03-31 19:35:49,309 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8165, 2.1435, 2.8044, 1.6266, 2.1407, 4.0233, 4.0525, 3.6483], + device='cuda:3'), covar=tensor([0.2652, 0.2220, 0.1281, 0.2987, 0.1498, 0.0242, 0.0245, 0.0490], + device='cuda:3'), in_proj_covar=tensor([0.0137, 0.0118, 0.0102, 0.0137, 0.0103, 0.0065, 0.0081, 0.0070], + device='cuda:3'), out_proj_covar=tensor([8.8146e-05, 7.6440e-05, 6.3669e-05, 8.8146e-05, 6.5860e-05, 3.3896e-05, + 4.3853e-05, 3.6932e-05], device='cuda:3') +2023-03-31 19:35:56,700 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 19:36:17,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 19:36:24,969 INFO [train.py:903] (3/4) Epoch 1, batch 2150, loss[loss=0.4576, simple_loss=0.4535, pruned_loss=0.2309, over 19620.00 frames. ], tot_loss[loss=0.5298, simple_loss=0.5012, pruned_loss=0.2805, over 3803119.06 frames. ], batch size: 50, lr: 4.79e-02, grad_scale: 16.0 +2023-03-31 19:36:45,574 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2167.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:37:13,945 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2192.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:37:19,351 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6498, 3.1804, 2.0092, 2.7386, 1.4826, 3.4738, 2.9719, 3.1045], + device='cuda:3'), covar=tensor([0.0695, 0.1382, 0.2956, 0.0873, 0.3014, 0.0551, 0.0895, 0.0756], + device='cuda:3'), in_proj_covar=tensor([0.0150, 0.0191, 0.0190, 0.0131, 0.0206, 0.0116, 0.0130, 0.0118], + device='cuda:3'), out_proj_covar=tensor([1.2349e-04, 1.4822e-04, 1.2768e-04, 9.5103e-05, 1.4342e-04, 8.3670e-05, + 9.3502e-05, 8.8158e-05], device='cuda:3') +2023-03-31 19:37:24,837 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:37:25,819 INFO [train.py:903] (3/4) Epoch 1, batch 2200, loss[loss=0.4543, simple_loss=0.4589, pruned_loss=0.2248, over 19671.00 frames. ], tot_loss[loss=0.5223, simple_loss=0.4969, pruned_loss=0.2749, over 3818536.49 frames. ], batch size: 53, lr: 4.78e-02, grad_scale: 16.0 +2023-03-31 19:37:31,694 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.347e+02 9.332e+02 1.145e+03 1.435e+03 3.303e+03, threshold=2.290e+03, percent-clipped=7.0 +2023-03-31 19:37:49,524 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2219.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:03,609 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:38:05,862 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0741, 3.4956, 1.9345, 3.2460, 1.4866, 3.8295, 3.4263, 3.4624], + device='cuda:3'), covar=tensor([0.0479, 0.0978, 0.2569, 0.0571, 0.2548, 0.0400, 0.0657, 0.0736], + device='cuda:3'), in_proj_covar=tensor([0.0153, 0.0188, 0.0193, 0.0132, 0.0205, 0.0118, 0.0129, 0.0118], + device='cuda:3'), out_proj_covar=tensor([1.2445e-04, 1.4557e-04, 1.2955e-04, 9.6870e-05, 1.4336e-04, 8.6310e-05, + 9.2721e-05, 8.8126e-05], device='cuda:3') +2023-03-31 19:38:27,463 INFO [train.py:903] (3/4) Epoch 1, batch 2250, loss[loss=0.4659, simple_loss=0.4789, pruned_loss=0.2265, over 19699.00 frames. ], tot_loss[loss=0.518, simple_loss=0.4945, pruned_loss=0.2715, over 3821303.84 frames. ], batch size: 59, lr: 4.77e-02, grad_scale: 16.0 +2023-03-31 19:39:24,144 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2299.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:39:25,913 INFO [train.py:903] (3/4) Epoch 1, batch 2300, loss[loss=0.5031, simple_loss=0.495, pruned_loss=0.2556, over 19301.00 frames. ], tot_loss[loss=0.5142, simple_loss=0.4923, pruned_loss=0.2687, over 3805063.08 frames. ], batch size: 66, lr: 4.77e-02, grad_scale: 8.0 +2023-03-31 19:39:31,322 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+02 9.458e+02 1.205e+03 1.557e+03 3.326e+03, threshold=2.410e+03, percent-clipped=10.0 +2023-03-31 19:39:39,152 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 19:39:41,702 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2315.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:39:54,010 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2324.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:04,986 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2334.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:16,272 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-31 19:40:17,288 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2344.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:40:20,648 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2347.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:40:24,837 INFO [train.py:903] (3/4) Epoch 1, batch 2350, loss[loss=0.4928, simple_loss=0.4908, pruned_loss=0.2474, over 19735.00 frames. ], tot_loss[loss=0.5107, simple_loss=0.4902, pruned_loss=0.266, over 3809235.26 frames. ], batch size: 63, lr: 4.76e-02, grad_scale: 8.0 +2023-03-31 19:40:33,273 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2358.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:40:48,340 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2369.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:41:07,209 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 19:41:23,353 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 19:41:26,487 INFO [train.py:903] (3/4) Epoch 1, batch 2400, loss[loss=0.4212, simple_loss=0.4217, pruned_loss=0.2104, over 19731.00 frames. ], tot_loss[loss=0.5035, simple_loss=0.486, pruned_loss=0.2609, over 3820760.20 frames. ], batch size: 45, lr: 4.75e-02, grad_scale: 8.0 +2023-03-31 19:41:33,163 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+02 9.458e+02 1.226e+03 1.613e+03 2.603e+03, threshold=2.451e+03, percent-clipped=4.0 +2023-03-31 19:42:26,145 INFO [train.py:903] (3/4) Epoch 1, batch 2450, loss[loss=0.4952, simple_loss=0.4916, pruned_loss=0.2494, over 19513.00 frames. ], tot_loss[loss=0.4993, simple_loss=0.4838, pruned_loss=0.2577, over 3818890.36 frames. ], batch size: 56, lr: 4.74e-02, grad_scale: 8.0 +2023-03-31 19:43:24,714 INFO [train.py:903] (3/4) Epoch 1, batch 2500, loss[loss=0.4905, simple_loss=0.4878, pruned_loss=0.2466, over 18859.00 frames. ], tot_loss[loss=0.4937, simple_loss=0.4805, pruned_loss=0.2537, over 3822051.45 frames. ], batch size: 74, lr: 4.73e-02, grad_scale: 8.0 +2023-03-31 19:43:30,998 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.049e+02 1.082e+03 1.390e+03 1.742e+03 4.873e+03, threshold=2.779e+03, percent-clipped=5.0 +2023-03-31 19:43:37,153 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-31 19:44:22,072 INFO [train.py:903] (3/4) Epoch 1, batch 2550, loss[loss=0.436, simple_loss=0.4401, pruned_loss=0.2159, over 19864.00 frames. ], tot_loss[loss=0.4931, simple_loss=0.48, pruned_loss=0.2532, over 3810120.13 frames. ], batch size: 52, lr: 4.72e-02, grad_scale: 8.0 +2023-03-31 19:44:47,236 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2571.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:44:51,869 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-03-31 19:45:09,096 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2590.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:14,145 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 19:45:15,653 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2596.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:21,672 INFO [train.py:903] (3/4) Epoch 1, batch 2600, loss[loss=0.466, simple_loss=0.4737, pruned_loss=0.2291, over 19739.00 frames. ], tot_loss[loss=0.4901, simple_loss=0.4786, pruned_loss=0.2509, over 3800060.88 frames. ], batch size: 63, lr: 4.71e-02, grad_scale: 8.0 +2023-03-31 19:45:25,701 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2603.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:45:28,247 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+02 9.154e+02 1.259e+03 1.710e+03 2.682e+03, threshold=2.519e+03, percent-clipped=0.0 +2023-03-31 19:45:39,484 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2615.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:45:44,378 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.40 vs. limit=5.0 +2023-03-31 19:45:46,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2522, 1.0848, 1.2938, 1.4340, 1.9845, 0.9427, 2.0066, 2.1586], + device='cuda:3'), covar=tensor([0.0419, 0.2541, 0.3026, 0.2109, 0.0642, 0.3217, 0.0947, 0.0692], + device='cuda:3'), in_proj_covar=tensor([0.0115, 0.0198, 0.0216, 0.0205, 0.0148, 0.0288, 0.0183, 0.0155], + device='cuda:3'), out_proj_covar=tensor([8.3711e-05, 1.4422e-04, 1.5926e-04, 1.6019e-04, 1.0830e-04, 1.9321e-04, + 1.4648e-04, 1.1630e-04], device='cuda:3') +2023-03-31 19:45:55,231 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2628.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:46:00,887 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-03-31 19:46:22,935 INFO [train.py:903] (3/4) Epoch 1, batch 2650, loss[loss=0.5801, simple_loss=0.5287, pruned_loss=0.3157, over 13589.00 frames. ], tot_loss[loss=0.4846, simple_loss=0.4756, pruned_loss=0.2469, over 3808439.78 frames. ], batch size: 136, lr: 4.70e-02, grad_scale: 8.0 +2023-03-31 19:46:28,025 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-31 19:46:39,378 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 19:47:23,183 INFO [train.py:903] (3/4) Epoch 1, batch 2700, loss[loss=0.4772, simple_loss=0.4804, pruned_loss=0.237, over 18710.00 frames. ], tot_loss[loss=0.4811, simple_loss=0.4736, pruned_loss=0.2443, over 3812619.74 frames. ], batch size: 74, lr: 4.69e-02, grad_scale: 8.0 +2023-03-31 19:47:24,589 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2702.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:47:25,667 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2703.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:47:29,727 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+02 8.490e+02 1.133e+03 1.436e+03 3.154e+03, threshold=2.267e+03, percent-clipped=3.0 +2023-03-31 19:48:24,813 INFO [train.py:903] (3/4) Epoch 1, batch 2750, loss[loss=0.5299, simple_loss=0.5019, pruned_loss=0.279, over 13256.00 frames. ], tot_loss[loss=0.4769, simple_loss=0.471, pruned_loss=0.2414, over 3813471.07 frames. ], batch size: 136, lr: 4.68e-02, grad_scale: 8.0 +2023-03-31 19:49:25,709 INFO [train.py:903] (3/4) Epoch 1, batch 2800, loss[loss=0.4942, simple_loss=0.499, pruned_loss=0.2447, over 19613.00 frames. ], tot_loss[loss=0.473, simple_loss=0.4683, pruned_loss=0.2389, over 3806723.15 frames. ], batch size: 57, lr: 4.67e-02, grad_scale: 8.0 +2023-03-31 19:49:31,047 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 1.002e+03 1.265e+03 1.511e+03 4.462e+03, threshold=2.529e+03, percent-clipped=7.0 +2023-03-31 19:49:45,708 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2817.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:50:15,447 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2842.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:50:26,202 INFO [train.py:903] (3/4) Epoch 1, batch 2850, loss[loss=0.4004, simple_loss=0.4269, pruned_loss=0.1869, over 19664.00 frames. ], tot_loss[loss=0.4711, simple_loss=0.4672, pruned_loss=0.2375, over 3810657.18 frames. ], batch size: 55, lr: 4.66e-02, grad_scale: 8.0 +2023-03-31 19:51:16,440 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-03-31 19:51:22,285 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 19:51:25,262 INFO [train.py:903] (3/4) Epoch 1, batch 2900, loss[loss=0.4738, simple_loss=0.445, pruned_loss=0.2512, over 19770.00 frames. ], tot_loss[loss=0.4688, simple_loss=0.4658, pruned_loss=0.2359, over 3799133.40 frames. ], batch size: 47, lr: 4.65e-02, grad_scale: 8.0 +2023-03-31 19:51:30,476 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.237e+02 1.045e+03 1.349e+03 1.754e+03 3.463e+03, threshold=2.699e+03, percent-clipped=4.0 +2023-03-31 19:52:25,021 INFO [train.py:903] (3/4) Epoch 1, batch 2950, loss[loss=0.5112, simple_loss=0.4994, pruned_loss=0.2614, over 18207.00 frames. ], tot_loss[loss=0.4671, simple_loss=0.465, pruned_loss=0.2346, over 3797566.85 frames. ], batch size: 83, lr: 4.64e-02, grad_scale: 8.0 +2023-03-31 19:52:30,839 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2955.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:53:02,076 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0237, 1.6132, 2.4952, 1.5123, 2.4328, 4.5148, 3.3487, 2.5994], + device='cuda:3'), covar=tensor([0.2997, 0.1720, 0.2164, 0.2782, 0.1448, 0.0171, 0.1126, 0.1610], + device='cuda:3'), in_proj_covar=tensor([0.0160, 0.0126, 0.0146, 0.0173, 0.0159, 0.0078, 0.0143, 0.0158], + device='cuda:3'), out_proj_covar=tensor([1.0920e-04, 8.5020e-05, 1.0301e-04, 1.2028e-04, 1.0772e-04, 5.1987e-05, + 9.6555e-05, 1.0380e-04], device='cuda:3') +2023-03-31 19:53:26,136 INFO [train.py:903] (3/4) Epoch 1, batch 3000, loss[loss=0.4323, simple_loss=0.443, pruned_loss=0.2108, over 19713.00 frames. ], tot_loss[loss=0.4628, simple_loss=0.4625, pruned_loss=0.2316, over 3801473.12 frames. ], batch size: 51, lr: 4.63e-02, grad_scale: 8.0 +2023-03-31 19:53:26,137 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 19:53:38,705 INFO [train.py:937] (3/4) Epoch 1, validation: loss=0.3995, simple_loss=0.4801, pruned_loss=0.1594, over 944034.00 frames. +2023-03-31 19:53:38,705 INFO [train.py:938] (3/4) Maximum memory allocated so far is 15470MB +2023-03-31 19:53:43,180 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 19:53:45,670 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 9.060e+02 1.151e+03 1.550e+03 2.691e+03, threshold=2.303e+03, percent-clipped=0.0 +2023-03-31 19:53:56,937 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8757, 2.0647, 2.0163, 3.0921, 4.3899, 1.3692, 2.4858, 4.1978], + device='cuda:3'), covar=tensor([0.0254, 0.2358, 0.2864, 0.1656, 0.0291, 0.3593, 0.1022, 0.0323], + device='cuda:3'), in_proj_covar=tensor([0.0138, 0.0218, 0.0229, 0.0231, 0.0164, 0.0309, 0.0202, 0.0169], + device='cuda:3'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:3') +2023-03-31 19:54:23,283 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3037.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:35,539 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3047.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:54:40,057 INFO [train.py:903] (3/4) Epoch 1, batch 3050, loss[loss=0.4171, simple_loss=0.4193, pruned_loss=0.2075, over 19728.00 frames. ], tot_loss[loss=0.459, simple_loss=0.4603, pruned_loss=0.2288, over 3814212.25 frames. ], batch size: 46, lr: 4.62e-02, grad_scale: 8.0 +2023-03-31 19:54:46,090 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-31 19:55:07,353 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3073.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 19:55:36,294 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3098.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 19:55:39,210 INFO [train.py:903] (3/4) Epoch 1, batch 3100, loss[loss=0.497, simple_loss=0.4809, pruned_loss=0.2565, over 19658.00 frames. ], tot_loss[loss=0.4613, simple_loss=0.4615, pruned_loss=0.2305, over 3820186.25 frames. ], batch size: 53, lr: 4.61e-02, grad_scale: 8.0 +2023-03-31 19:55:45,836 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+02 1.021e+03 1.362e+03 1.815e+03 5.785e+03, threshold=2.723e+03, percent-clipped=14.0 +2023-03-31 19:56:41,857 INFO [train.py:903] (3/4) Epoch 1, batch 3150, loss[loss=0.4683, simple_loss=0.4724, pruned_loss=0.2321, over 18723.00 frames. ], tot_loss[loss=0.4583, simple_loss=0.4596, pruned_loss=0.2285, over 3816716.02 frames. ], batch size: 74, lr: 4.60e-02, grad_scale: 8.0 +2023-03-31 19:56:54,540 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3162.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:03,670 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4357, 0.9079, 1.0135, 1.1918, 1.3666, 1.5957, 1.3953, 1.4780], + device='cuda:3'), covar=tensor([0.1086, 0.2613, 0.2398, 0.1675, 0.2570, 0.1006, 0.1502, 0.1212], + device='cuda:3'), in_proj_covar=tensor([0.0119, 0.0160, 0.0174, 0.0145, 0.0200, 0.0133, 0.0143, 0.0133], + device='cuda:3'), out_proj_covar=tensor([8.8973e-05, 1.1752e-04, 1.2172e-04, 1.0654e-04, 1.4492e-04, 9.5291e-05, + 1.0151e-04, 9.6116e-05], device='cuda:3') +2023-03-31 19:57:08,973 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 19:57:24,065 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3186.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:57:31,522 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7914, 1.2726, 1.2453, 1.6260, 1.8342, 1.6934, 1.8321, 1.5687], + device='cuda:3'), covar=tensor([0.1494, 0.2530, 0.2651, 0.2335, 0.2963, 0.2584, 0.3346, 0.1732], + device='cuda:3'), in_proj_covar=tensor([0.0200, 0.0254, 0.0252, 0.0269, 0.0353, 0.0249, 0.0323, 0.0219], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 19:57:40,816 INFO [train.py:903] (3/4) Epoch 1, batch 3200, loss[loss=0.4037, simple_loss=0.4277, pruned_loss=0.1898, over 19765.00 frames. ], tot_loss[loss=0.4564, simple_loss=0.4588, pruned_loss=0.227, over 3807910.90 frames. ], batch size: 56, lr: 4.59e-02, grad_scale: 8.0 +2023-03-31 19:57:46,449 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+02 9.158e+02 1.127e+03 1.418e+03 2.574e+03, threshold=2.253e+03, percent-clipped=0.0 +2023-03-31 19:58:32,927 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-31 19:58:41,766 INFO [train.py:903] (3/4) Epoch 1, batch 3250, loss[loss=0.5085, simple_loss=0.5016, pruned_loss=0.2577, over 19704.00 frames. ], tot_loss[loss=0.4554, simple_loss=0.4584, pruned_loss=0.2263, over 3798699.47 frames. ], batch size: 63, lr: 4.58e-02, grad_scale: 8.0 +2023-03-31 19:59:40,390 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3299.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:42,437 INFO [train.py:903] (3/4) Epoch 1, batch 3300, loss[loss=0.3753, simple_loss=0.4036, pruned_loss=0.1735, over 19499.00 frames. ], tot_loss[loss=0.451, simple_loss=0.4555, pruned_loss=0.2233, over 3799373.19 frames. ], batch size: 49, lr: 4.57e-02, grad_scale: 8.0 +2023-03-31 19:59:43,737 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3301.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 19:59:48,774 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+02 9.991e+02 1.183e+03 1.562e+03 4.237e+03, threshold=2.366e+03, percent-clipped=7.0 +2023-03-31 19:59:48,809 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 19:59:49,125 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3306.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:00:29,865 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.89 vs. limit=2.0 +2023-03-31 20:00:43,748 INFO [train.py:903] (3/4) Epoch 1, batch 3350, loss[loss=0.4988, simple_loss=0.4872, pruned_loss=0.2552, over 19353.00 frames. ], tot_loss[loss=0.4509, simple_loss=0.4561, pruned_loss=0.2228, over 3801688.69 frames. ], batch size: 66, lr: 4.56e-02, grad_scale: 8.0 +2023-03-31 20:01:16,587 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.3750, 4.8479, 3.1252, 4.5286, 1.6523, 5.3666, 4.7548, 5.2739], + device='cuda:3'), covar=tensor([0.0451, 0.0950, 0.2123, 0.0486, 0.3187, 0.0470, 0.0529, 0.0441], + device='cuda:3'), in_proj_covar=tensor([0.0197, 0.0220, 0.0242, 0.0182, 0.0255, 0.0169, 0.0146, 0.0147], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:3') +2023-03-31 20:01:22,261 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3381.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:01:46,120 INFO [train.py:903] (3/4) Epoch 1, batch 3400, loss[loss=0.466, simple_loss=0.449, pruned_loss=0.2415, over 19581.00 frames. ], tot_loss[loss=0.4488, simple_loss=0.4545, pruned_loss=0.2216, over 3795050.85 frames. ], batch size: 52, lr: 4.55e-02, grad_scale: 8.0 +2023-03-31 20:01:52,574 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-31 20:01:52,898 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.525e+02 9.967e+02 1.253e+03 1.611e+03 4.007e+03, threshold=2.507e+03, percent-clipped=3.0 +2023-03-31 20:02:02,173 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3414.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:07,709 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3418.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:02:38,951 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3443.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:02:48,483 INFO [train.py:903] (3/4) Epoch 1, batch 3450, loss[loss=0.404, simple_loss=0.4256, pruned_loss=0.1912, over 19728.00 frames. ], tot_loss[loss=0.4445, simple_loss=0.4511, pruned_loss=0.219, over 3809038.87 frames. ], batch size: 47, lr: 4.54e-02, grad_scale: 8.0 +2023-03-31 20:02:50,770 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 20:03:43,984 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:03:50,182 INFO [train.py:903] (3/4) Epoch 1, batch 3500, loss[loss=0.4473, simple_loss=0.4699, pruned_loss=0.2124, over 19677.00 frames. ], tot_loss[loss=0.4433, simple_loss=0.4503, pruned_loss=0.2181, over 3810675.17 frames. ], batch size: 59, lr: 4.53e-02, grad_scale: 8.0 +2023-03-31 20:03:56,683 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+02 9.703e+02 1.213e+03 1.703e+03 9.610e+03, threshold=2.427e+03, percent-clipped=9.0 +2023-03-31 20:04:16,296 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9988, 2.1675, 2.4133, 1.9159, 2.0936, 1.3075, 1.4820, 2.0989], + device='cuda:3'), covar=tensor([0.1272, 0.0556, 0.0425, 0.1002, 0.0930, 0.1181, 0.1606, 0.0845], + device='cuda:3'), in_proj_covar=tensor([0.0226, 0.0141, 0.0143, 0.0177, 0.0136, 0.0190, 0.0211, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:04:24,132 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2369, 2.9464, 3.0982, 2.5469, 2.5409, 0.9832, 1.0707, 2.0154], + device='cuda:3'), covar=tensor([0.2053, 0.0742, 0.0480, 0.1163, 0.1497, 0.2000, 0.3079, 0.1634], + device='cuda:3'), in_proj_covar=tensor([0.0225, 0.0140, 0.0143, 0.0177, 0.0135, 0.0191, 0.0212, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:04:52,066 INFO [train.py:903] (3/4) Epoch 1, batch 3550, loss[loss=0.4017, simple_loss=0.4089, pruned_loss=0.1972, over 19753.00 frames. ], tot_loss[loss=0.4446, simple_loss=0.4515, pruned_loss=0.2189, over 3805643.68 frames. ], batch size: 45, lr: 4.51e-02, grad_scale: 8.0 +2023-03-31 20:05:00,231 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3557.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:05:07,871 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3564.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:05:31,015 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3582.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:05:33,604 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-03-31 20:05:47,590 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-31 20:05:53,943 INFO [train.py:903] (3/4) Epoch 1, batch 3600, loss[loss=0.4518, simple_loss=0.4622, pruned_loss=0.2207, over 19531.00 frames. ], tot_loss[loss=0.4595, simple_loss=0.4606, pruned_loss=0.2292, over 3807128.78 frames. ], batch size: 54, lr: 4.50e-02, grad_scale: 8.0 +2023-03-31 20:06:00,965 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+02 9.459e+02 1.417e+03 1.964e+03 2.103e+04, threshold=2.834e+03, percent-clipped=17.0 +2023-03-31 20:06:55,033 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3650.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:06:55,794 INFO [train.py:903] (3/4) Epoch 1, batch 3650, loss[loss=0.5684, simple_loss=0.5259, pruned_loss=0.3055, over 19344.00 frames. ], tot_loss[loss=0.4575, simple_loss=0.46, pruned_loss=0.2275, over 3817666.48 frames. ], batch size: 66, lr: 4.49e-02, grad_scale: 8.0 +2023-03-31 20:07:18,765 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3670.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:49,511 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3694.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:07:50,860 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3695.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:07:56,857 INFO [train.py:903] (3/4) Epoch 1, batch 3700, loss[loss=0.4817, simple_loss=0.4819, pruned_loss=0.2408, over 19654.00 frames. ], tot_loss[loss=0.4602, simple_loss=0.4618, pruned_loss=0.2293, over 3832395.91 frames. ], batch size: 58, lr: 4.48e-02, grad_scale: 8.0 +2023-03-31 20:08:01,726 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1521, 3.7532, 4.6521, 4.3360, 2.4872, 4.1924, 3.9122, 4.1897], + device='cuda:3'), covar=tensor([0.0219, 0.0442, 0.0224, 0.0167, 0.2195, 0.0176, 0.0285, 0.0433], + device='cuda:3'), in_proj_covar=tensor([0.0136, 0.0179, 0.0217, 0.0148, 0.0312, 0.0124, 0.0170, 0.0199], + device='cuda:3'), out_proj_covar=tensor([9.5508e-05, 1.2382e-04, 1.4064e-04, 9.0595e-05, 1.7691e-04, 8.3968e-05, + 1.1319e-04, 1.2267e-04], device='cuda:3') +2023-03-31 20:08:05,839 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.255e+02 1.022e+03 1.666e+03 2.666e+03 1.441e+04, threshold=3.331e+03, percent-clipped=22.0 +2023-03-31 20:09:01,160 INFO [train.py:903] (3/4) Epoch 1, batch 3750, loss[loss=0.4791, simple_loss=0.4914, pruned_loss=0.2334, over 19400.00 frames. ], tot_loss[loss=0.4572, simple_loss=0.4604, pruned_loss=0.227, over 3822081.50 frames. ], batch size: 70, lr: 4.47e-02, grad_scale: 8.0 +2023-03-31 20:09:02,765 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3752.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:09:03,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2927, 1.4333, 1.5108, 1.2295, 1.3350, 0.7307, 0.5908, 1.6228], + device='cuda:3'), covar=tensor([0.1512, 0.0794, 0.0963, 0.1345, 0.1169, 0.1971, 0.2440, 0.1233], + device='cuda:3'), in_proj_covar=tensor([0.0232, 0.0153, 0.0154, 0.0192, 0.0142, 0.0212, 0.0229, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:09:19,438 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3765.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:09:34,307 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3777.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:10:05,359 INFO [train.py:903] (3/4) Epoch 1, batch 3800, loss[loss=0.4516, simple_loss=0.452, pruned_loss=0.2256, over 19838.00 frames. ], tot_loss[loss=0.4528, simple_loss=0.4577, pruned_loss=0.2239, over 3823407.22 frames. ], batch size: 52, lr: 4.46e-02, grad_scale: 8.0 +2023-03-31 20:10:12,576 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.844e+02 1.035e+03 1.394e+03 1.973e+03 4.112e+03, threshold=2.788e+03, percent-clipped=1.0 +2023-03-31 20:10:13,349 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-31 20:10:41,878 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 20:11:08,752 INFO [train.py:903] (3/4) Epoch 1, batch 3850, loss[loss=0.5392, simple_loss=0.519, pruned_loss=0.2797, over 13564.00 frames. ], tot_loss[loss=0.4503, simple_loss=0.4562, pruned_loss=0.2222, over 3819650.53 frames. ], batch size: 136, lr: 4.45e-02, grad_scale: 8.0 +2023-03-31 20:12:13,118 INFO [train.py:903] (3/4) Epoch 1, batch 3900, loss[loss=0.4532, simple_loss=0.46, pruned_loss=0.2232, over 18794.00 frames. ], tot_loss[loss=0.4463, simple_loss=0.4536, pruned_loss=0.2195, over 3831982.65 frames. ], batch size: 74, lr: 4.44e-02, grad_scale: 8.0 +2023-03-31 20:12:17,965 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9181, 1.3678, 1.6597, 1.1008, 2.5556, 3.1863, 3.0343, 3.0571], + device='cuda:3'), covar=tensor([0.1710, 0.2551, 0.1965, 0.2992, 0.0665, 0.0196, 0.0251, 0.0317], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0258, 0.0260, 0.0287, 0.0197, 0.0130, 0.0151, 0.0128], + device='cuda:3'), out_proj_covar=tensor([2.0124e-04, 1.8870e-04, 1.9291e-04, 2.1106e-04, 1.6942e-04, 9.0144e-05, + 1.1416e-04, 9.9066e-05], device='cuda:3') +2023-03-31 20:12:22,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.917e+02 1.152e+03 1.441e+03 1.935e+03 3.736e+03, threshold=2.883e+03, percent-clipped=2.0 +2023-03-31 20:12:22,948 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-31 20:12:23,361 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3908.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:13:08,746 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:13:11,155 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6985, 2.8633, 3.0551, 2.8540, 0.9436, 2.7169, 2.4524, 2.7103], + device='cuda:3'), covar=tensor([0.0358, 0.0484, 0.0519, 0.0386, 0.3057, 0.0294, 0.0558, 0.0891], + device='cuda:3'), in_proj_covar=tensor([0.0138, 0.0175, 0.0215, 0.0152, 0.0313, 0.0116, 0.0171, 0.0208], + device='cuda:3'), out_proj_covar=tensor([9.5347e-05, 1.2038e-04, 1.4004e-04, 9.3064e-05, 1.7663e-04, 8.0132e-05, + 1.1329e-04, 1.2646e-04], device='cuda:3') +2023-03-31 20:13:18,226 INFO [train.py:903] (3/4) Epoch 1, batch 3950, loss[loss=0.4506, simple_loss=0.478, pruned_loss=0.2116, over 18848.00 frames. ], tot_loss[loss=0.4501, simple_loss=0.4554, pruned_loss=0.2224, over 3818411.27 frames. ], batch size: 74, lr: 4.43e-02, grad_scale: 8.0 +2023-03-31 20:13:24,005 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 20:14:04,514 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 20:14:24,018 INFO [train.py:903] (3/4) Epoch 1, batch 4000, loss[loss=0.3673, simple_loss=0.3899, pruned_loss=0.1724, over 15078.00 frames. ], tot_loss[loss=0.4441, simple_loss=0.4514, pruned_loss=0.2184, over 3806991.61 frames. ], batch size: 33, lr: 4.42e-02, grad_scale: 8.0 +2023-03-31 20:14:26,763 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4328, 1.1325, 1.2442, 1.4639, 2.1764, 1.1293, 2.0243, 2.1709], + device='cuda:3'), covar=tensor([0.0431, 0.2297, 0.2640, 0.1670, 0.0474, 0.2312, 0.0781, 0.0561], + device='cuda:3'), in_proj_covar=tensor([0.0164, 0.0253, 0.0250, 0.0255, 0.0184, 0.0317, 0.0219, 0.0189], + device='cuda:3'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:14:30,932 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.942e+02 1.053e+03 1.358e+03 1.948e+03 9.883e+03, threshold=2.717e+03, percent-clipped=12.0 +2023-03-31 20:14:49,347 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4021.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:14:52,460 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4023.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:15:11,867 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4038.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:15:12,800 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 20:15:22,664 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4046.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:15:28,264 INFO [train.py:903] (3/4) Epoch 1, batch 4050, loss[loss=0.4455, simple_loss=0.4575, pruned_loss=0.2167, over 19738.00 frames. ], tot_loss[loss=0.4434, simple_loss=0.451, pruned_loss=0.2179, over 3805387.31 frames. ], batch size: 63, lr: 4.41e-02, grad_scale: 8.0 +2023-03-31 20:16:32,907 INFO [train.py:903] (3/4) Epoch 1, batch 4100, loss[loss=0.3507, simple_loss=0.3786, pruned_loss=0.1614, over 19754.00 frames. ], tot_loss[loss=0.439, simple_loss=0.4477, pruned_loss=0.2151, over 3802247.97 frames. ], batch size: 46, lr: 4.40e-02, grad_scale: 8.0 +2023-03-31 20:16:41,801 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.649e+02 1.198e+03 1.458e+03 1.833e+03 3.490e+03, threshold=2.915e+03, percent-clipped=3.0 +2023-03-31 20:17:08,516 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 20:17:38,859 INFO [train.py:903] (3/4) Epoch 1, batch 4150, loss[loss=0.5073, simple_loss=0.4876, pruned_loss=0.2635, over 13562.00 frames. ], tot_loss[loss=0.4355, simple_loss=0.4456, pruned_loss=0.2127, over 3806231.33 frames. ], batch size: 136, lr: 4.39e-02, grad_scale: 8.0 +2023-03-31 20:17:41,668 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4153.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:17:49,797 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4159.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:18:44,275 INFO [train.py:903] (3/4) Epoch 1, batch 4200, loss[loss=0.4557, simple_loss=0.4723, pruned_loss=0.2196, over 19743.00 frames. ], tot_loss[loss=0.4336, simple_loss=0.4447, pruned_loss=0.2112, over 3815992.70 frames. ], batch size: 63, lr: 4.38e-02, grad_scale: 8.0 +2023-03-31 20:18:46,651 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 20:18:51,465 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.919e+02 1.098e+03 1.489e+03 3.268e+03, threshold=2.196e+03, percent-clipped=3.0 +2023-03-31 20:19:01,556 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-31 20:19:47,736 INFO [train.py:903] (3/4) Epoch 1, batch 4250, loss[loss=0.5242, simple_loss=0.5089, pruned_loss=0.2698, over 19732.00 frames. ], tot_loss[loss=0.4358, simple_loss=0.4462, pruned_loss=0.2127, over 3816797.43 frames. ], batch size: 63, lr: 4.36e-02, grad_scale: 8.0 +2023-03-31 20:20:02,065 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 20:20:15,223 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 20:20:25,113 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4279.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:20:36,686 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:20:52,970 INFO [train.py:903] (3/4) Epoch 1, batch 4300, loss[loss=0.3724, simple_loss=0.4014, pruned_loss=0.1717, over 19586.00 frames. ], tot_loss[loss=0.4341, simple_loss=0.4449, pruned_loss=0.2116, over 3826429.28 frames. ], batch size: 52, lr: 4.35e-02, grad_scale: 8.0 +2023-03-31 20:20:57,843 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4304.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:21:01,264 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4306.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:21:02,055 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.021e+02 1.171e+03 1.478e+03 2.100e+03 3.660e+03, threshold=2.957e+03, percent-clipped=20.0 +2023-03-31 20:21:44,198 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7471, 3.4161, 2.2393, 3.1828, 1.4185, 3.3734, 3.0578, 3.0665], + device='cuda:3'), covar=tensor([0.0638, 0.1200, 0.2062, 0.0613, 0.3129, 0.0794, 0.0572, 0.0847], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0231, 0.0256, 0.0205, 0.0273, 0.0207, 0.0155, 0.0171], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:3') +2023-03-31 20:21:46,494 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 20:21:59,415 INFO [train.py:903] (3/4) Epoch 1, batch 4350, loss[loss=0.4379, simple_loss=0.4468, pruned_loss=0.2145, over 19682.00 frames. ], tot_loss[loss=0.4316, simple_loss=0.4436, pruned_loss=0.2097, over 3834136.49 frames. ], batch size: 58, lr: 4.34e-02, grad_scale: 8.0 +2023-03-31 20:22:23,983 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.59 vs. limit=5.0 +2023-03-31 20:23:03,133 INFO [train.py:903] (3/4) Epoch 1, batch 4400, loss[loss=0.4151, simple_loss=0.4287, pruned_loss=0.2007, over 19621.00 frames. ], tot_loss[loss=0.4275, simple_loss=0.441, pruned_loss=0.2071, over 3846217.39 frames. ], batch size: 50, lr: 4.33e-02, grad_scale: 8.0 +2023-03-31 20:23:05,873 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4403.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:23:11,485 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.454e+02 9.391e+02 1.114e+03 1.514e+03 3.216e+03, threshold=2.228e+03, percent-clipped=1.0 +2023-03-31 20:23:14,566 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4409.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:23:29,276 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 20:23:39,404 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 20:23:46,934 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4434.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:23:50,503 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8610, 1.2293, 1.5997, 1.1610, 2.5087, 3.3376, 2.9669, 3.1636], + device='cuda:3'), covar=tensor([0.1767, 0.3135, 0.2442, 0.2797, 0.0704, 0.0160, 0.0258, 0.0207], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0268, 0.0278, 0.0297, 0.0203, 0.0126, 0.0153, 0.0117], + device='cuda:3'), out_proj_covar=tensor([2.1958e-04, 2.0524e-04, 2.1240e-04, 2.2676e-04, 1.8258e-04, 9.1234e-05, + 1.2155e-04, 9.7951e-05], device='cuda:3') +2023-03-31 20:24:04,974 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3956, 2.3672, 1.6425, 2.7726, 1.9345, 2.4326, 1.9434, 1.8206], + device='cuda:3'), covar=tensor([0.1161, 0.0850, 0.1044, 0.0923, 0.1477, 0.0778, 0.2042, 0.1161], + device='cuda:3'), in_proj_covar=tensor([0.0112, 0.0097, 0.0127, 0.0141, 0.0147, 0.0085, 0.0146, 0.0121], + device='cuda:3'), out_proj_covar=tensor([7.9044e-05, 6.7667e-05, 8.3553e-05, 9.2726e-05, 9.3568e-05, 5.2100e-05, + 1.0632e-04, 8.2738e-05], device='cuda:3') +2023-03-31 20:24:06,852 INFO [train.py:903] (3/4) Epoch 1, batch 4450, loss[loss=0.377, simple_loss=0.3959, pruned_loss=0.1791, over 19466.00 frames. ], tot_loss[loss=0.427, simple_loss=0.4404, pruned_loss=0.2068, over 3830011.39 frames. ], batch size: 49, lr: 4.32e-02, grad_scale: 8.0 +2023-03-31 20:25:00,175 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-31 20:25:09,796 INFO [train.py:903] (3/4) Epoch 1, batch 4500, loss[loss=0.4654, simple_loss=0.4729, pruned_loss=0.229, over 17358.00 frames. ], tot_loss[loss=0.4256, simple_loss=0.4395, pruned_loss=0.2058, over 3825692.41 frames. ], batch size: 101, lr: 4.31e-02, grad_scale: 8.0 +2023-03-31 20:25:12,511 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4503.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:25:18,130 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.500e+02 1.049e+03 1.357e+03 1.620e+03 3.962e+03, threshold=2.713e+03, percent-clipped=8.0 +2023-03-31 20:25:31,639 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9759, 1.1511, 2.1793, 1.3018, 2.9090, 3.4130, 3.4486, 2.3913], + device='cuda:3'), covar=tensor([0.1840, 0.1843, 0.1516, 0.1772, 0.1055, 0.0534, 0.1029, 0.1451], + device='cuda:3'), in_proj_covar=tensor([0.0228, 0.0220, 0.0215, 0.0239, 0.0239, 0.0179, 0.0248, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:26:14,040 INFO [train.py:903] (3/4) Epoch 1, batch 4550, loss[loss=0.3664, simple_loss=0.3946, pruned_loss=0.1691, over 19752.00 frames. ], tot_loss[loss=0.4221, simple_loss=0.4373, pruned_loss=0.2034, over 3814276.75 frames. ], batch size: 46, lr: 4.30e-02, grad_scale: 8.0 +2023-03-31 20:26:24,212 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 20:26:47,301 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 20:27:16,034 INFO [train.py:903] (3/4) Epoch 1, batch 4600, loss[loss=0.4272, simple_loss=0.4442, pruned_loss=0.2051, over 18236.00 frames. ], tot_loss[loss=0.4218, simple_loss=0.4369, pruned_loss=0.2034, over 3821479.90 frames. ], batch size: 84, lr: 4.29e-02, grad_scale: 4.0 +2023-03-31 20:27:24,059 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.210e+02 9.691e+02 1.279e+03 1.723e+03 8.130e+03, threshold=2.557e+03, percent-clipped=7.0 +2023-03-31 20:27:36,635 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4618.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:17,136 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4650.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:18,168 INFO [train.py:903] (3/4) Epoch 1, batch 4650, loss[loss=0.5297, simple_loss=0.5122, pruned_loss=0.2736, over 13040.00 frames. ], tot_loss[loss=0.4211, simple_loss=0.4366, pruned_loss=0.2028, over 3821080.93 frames. ], batch size: 136, lr: 4.28e-02, grad_scale: 4.0 +2023-03-31 20:28:27,866 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4659.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:28:34,636 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 20:28:44,787 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 20:28:58,636 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4684.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:29:18,957 INFO [train.py:903] (3/4) Epoch 1, batch 4700, loss[loss=0.4136, simple_loss=0.4476, pruned_loss=0.1898, over 19666.00 frames. ], tot_loss[loss=0.4194, simple_loss=0.4357, pruned_loss=0.2015, over 3825140.98 frames. ], batch size: 55, lr: 4.27e-02, grad_scale: 4.0 +2023-03-31 20:29:21,846 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9736, 1.2687, 1.1690, 1.7903, 1.5671, 2.0137, 1.8596, 1.9420], + device='cuda:3'), covar=tensor([0.0824, 0.1789, 0.1953, 0.1563, 0.2178, 0.1147, 0.1887, 0.0933], + device='cuda:3'), in_proj_covar=tensor([0.0235, 0.0302, 0.0301, 0.0328, 0.0395, 0.0280, 0.0355, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 20:29:28,007 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+02 9.658e+02 1.202e+03 1.526e+03 2.859e+03, threshold=2.405e+03, percent-clipped=1.0 +2023-03-31 20:29:39,216 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 20:30:21,804 INFO [train.py:903] (3/4) Epoch 1, batch 4750, loss[loss=0.5142, simple_loss=0.494, pruned_loss=0.2672, over 13861.00 frames. ], tot_loss[loss=0.4171, simple_loss=0.4342, pruned_loss=0.2, over 3835963.16 frames. ], batch size: 135, lr: 4.26e-02, grad_scale: 4.0 +2023-03-31 20:30:39,345 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4765.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:31:18,393 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6875, 3.7415, 4.2463, 3.9432, 1.3755, 3.4972, 3.3933, 3.6619], + device='cuda:3'), covar=tensor([0.0273, 0.0358, 0.0284, 0.0211, 0.2799, 0.0257, 0.0336, 0.0691], + device='cuda:3'), in_proj_covar=tensor([0.0166, 0.0211, 0.0252, 0.0181, 0.0344, 0.0131, 0.0186, 0.0261], + device='cuda:3'), out_proj_covar=tensor([1.1039e-04, 1.4284e-04, 1.6781e-04, 1.1135e-04, 1.9166e-04, 8.4401e-05, + 1.2011e-04, 1.5564e-04], device='cuda:3') +2023-03-31 20:31:23,850 INFO [train.py:903] (3/4) Epoch 1, batch 4800, loss[loss=0.4281, simple_loss=0.445, pruned_loss=0.2056, over 19530.00 frames. ], tot_loss[loss=0.419, simple_loss=0.4355, pruned_loss=0.2012, over 3819027.62 frames. ], batch size: 54, lr: 4.25e-02, grad_scale: 8.0 +2023-03-31 20:31:32,988 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.504e+02 1.038e+03 1.224e+03 1.522e+03 3.175e+03, threshold=2.447e+03, percent-clipped=5.0 +2023-03-31 20:32:25,501 INFO [train.py:903] (3/4) Epoch 1, batch 4850, loss[loss=0.4042, simple_loss=0.4363, pruned_loss=0.186, over 19554.00 frames. ], tot_loss[loss=0.4199, simple_loss=0.4364, pruned_loss=0.2017, over 3817384.57 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 8.0 +2023-03-31 20:32:46,764 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 20:32:55,280 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4874.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:06,423 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 20:33:12,764 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 20:33:12,785 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 20:33:23,803 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 20:33:25,335 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4899.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:33:27,238 INFO [train.py:903] (3/4) Epoch 1, batch 4900, loss[loss=0.4417, simple_loss=0.4547, pruned_loss=0.2143, over 18819.00 frames. ], tot_loss[loss=0.4165, simple_loss=0.4341, pruned_loss=0.1994, over 3819940.15 frames. ], batch size: 74, lr: 4.23e-02, grad_scale: 8.0 +2023-03-31 20:33:37,034 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.319e+02 9.845e+02 1.167e+03 1.485e+03 2.856e+03, threshold=2.333e+03, percent-clipped=2.0 +2023-03-31 20:33:44,810 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 20:34:29,502 INFO [train.py:903] (3/4) Epoch 1, batch 4950, loss[loss=0.4733, simple_loss=0.4596, pruned_loss=0.2435, over 19028.00 frames. ], tot_loss[loss=0.4152, simple_loss=0.4335, pruned_loss=0.1985, over 3817645.17 frames. ], batch size: 42, lr: 4.21e-02, grad_scale: 8.0 +2023-03-31 20:34:38,656 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3520, 1.3868, 1.1565, 1.3785, 1.2458, 1.3829, 1.2002, 1.4433], + device='cuda:3'), covar=tensor([0.0791, 0.1361, 0.1399, 0.0965, 0.1608, 0.0728, 0.1367, 0.0641], + device='cuda:3'), in_proj_covar=tensor([0.0205, 0.0307, 0.0262, 0.0224, 0.0293, 0.0219, 0.0247, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:34:40,548 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 20:35:05,873 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 20:35:31,813 INFO [train.py:903] (3/4) Epoch 1, batch 5000, loss[loss=0.3701, simple_loss=0.4075, pruned_loss=0.1663, over 19662.00 frames. ], tot_loss[loss=0.4154, simple_loss=0.4338, pruned_loss=0.1985, over 3819532.07 frames. ], batch size: 55, lr: 4.20e-02, grad_scale: 8.0 +2023-03-31 20:35:35,557 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 20:35:40,124 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.692e+02 8.720e+02 1.063e+03 1.451e+03 3.452e+03, threshold=2.125e+03, percent-clipped=4.0 +2023-03-31 20:35:46,914 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 20:35:56,112 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5021.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:27,134 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5046.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:36:33,266 INFO [train.py:903] (3/4) Epoch 1, batch 5050, loss[loss=0.4106, simple_loss=0.4416, pruned_loss=0.1898, over 19076.00 frames. ], tot_loss[loss=0.415, simple_loss=0.4337, pruned_loss=0.1982, over 3809980.30 frames. ], batch size: 69, lr: 4.19e-02, grad_scale: 8.0 +2023-03-31 20:37:02,501 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 20:37:34,520 INFO [train.py:903] (3/4) Epoch 1, batch 5100, loss[loss=0.4575, simple_loss=0.4594, pruned_loss=0.2278, over 18241.00 frames. ], tot_loss[loss=0.4145, simple_loss=0.4333, pruned_loss=0.1979, over 3818416.74 frames. ], batch size: 83, lr: 4.18e-02, grad_scale: 8.0 +2023-03-31 20:37:39,778 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 20:37:43,121 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+02 1.065e+03 1.254e+03 1.490e+03 3.647e+03, threshold=2.509e+03, percent-clipped=6.0 +2023-03-31 20:37:43,168 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 20:37:47,677 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 20:38:34,795 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1210, 1.2025, 0.9722, 1.0665, 0.9196, 1.0692, 0.9494, 1.1616], + device='cuda:3'), covar=tensor([0.1363, 0.1731, 0.2029, 0.1428, 0.2065, 0.1125, 0.1662, 0.1138], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0313, 0.0262, 0.0226, 0.0291, 0.0214, 0.0245, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:38:36,972 INFO [train.py:903] (3/4) Epoch 1, batch 5150, loss[loss=0.4098, simple_loss=0.4415, pruned_loss=0.1891, over 19567.00 frames. ], tot_loss[loss=0.4145, simple_loss=0.433, pruned_loss=0.198, over 3812774.18 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 8.0 +2023-03-31 20:38:45,548 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5527, 4.1655, 2.1219, 3.6772, 1.1275, 4.0342, 3.5751, 3.7442], + device='cuda:3'), covar=tensor([0.0605, 0.1080, 0.2365, 0.0662, 0.3634, 0.0641, 0.0657, 0.0622], + device='cuda:3'), in_proj_covar=tensor([0.0238, 0.0238, 0.0269, 0.0224, 0.0289, 0.0226, 0.0167, 0.0178], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:3') +2023-03-31 20:38:46,435 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 20:39:20,328 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 20:39:37,969 INFO [train.py:903] (3/4) Epoch 1, batch 5200, loss[loss=0.4184, simple_loss=0.4451, pruned_loss=0.1958, over 19517.00 frames. ], tot_loss[loss=0.416, simple_loss=0.4336, pruned_loss=0.1992, over 3814801.00 frames. ], batch size: 56, lr: 4.16e-02, grad_scale: 8.0 +2023-03-31 20:39:45,881 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+02 1.028e+03 1.252e+03 1.630e+03 4.880e+03, threshold=2.504e+03, percent-clipped=1.0 +2023-03-31 20:39:50,378 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 20:40:08,859 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0872, 1.3703, 2.0750, 1.5844, 2.9304, 4.6446, 4.3042, 5.0426], + device='cuda:3'), covar=tensor([0.1839, 0.3023, 0.2361, 0.2570, 0.0618, 0.0124, 0.0145, 0.0089], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0280, 0.0299, 0.0307, 0.0203, 0.0123, 0.0173, 0.0127], + device='cuda:3'), out_proj_covar=tensor([2.4248e-04, 2.2567e-04, 2.3750e-04, 2.4721e-04, 1.8835e-04, 9.8263e-05, + 1.4135e-04, 1.1334e-04], device='cuda:3') +2023-03-31 20:40:32,307 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 20:40:39,287 INFO [train.py:903] (3/4) Epoch 1, batch 5250, loss[loss=0.3402, simple_loss=0.3721, pruned_loss=0.1542, over 19473.00 frames. ], tot_loss[loss=0.4112, simple_loss=0.4309, pruned_loss=0.1958, over 3822795.69 frames. ], batch size: 49, lr: 4.15e-02, grad_scale: 8.0 +2023-03-31 20:40:42,832 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0143, 4.4536, 5.7239, 5.4948, 1.4754, 5.1421, 4.7287, 4.9699], + device='cuda:3'), covar=tensor([0.0172, 0.0354, 0.0273, 0.0151, 0.3180, 0.0133, 0.0262, 0.0650], + device='cuda:3'), in_proj_covar=tensor([0.0172, 0.0217, 0.0267, 0.0190, 0.0358, 0.0134, 0.0196, 0.0277], + device='cuda:3'), out_proj_covar=tensor([1.1452e-04, 1.4342e-04, 1.7712e-04, 1.1366e-04, 1.9788e-04, 8.7248e-05, + 1.2384e-04, 1.6197e-04], device='cuda:3') +2023-03-31 20:41:17,744 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7302, 1.2350, 1.1464, 1.6020, 1.4303, 1.7274, 1.8689, 1.6517], + device='cuda:3'), covar=tensor([0.0985, 0.1627, 0.1789, 0.1549, 0.2145, 0.1324, 0.1880, 0.1082], + device='cuda:3'), in_proj_covar=tensor([0.0239, 0.0296, 0.0297, 0.0322, 0.0387, 0.0272, 0.0358, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-03-31 20:41:39,595 INFO [train.py:903] (3/4) Epoch 1, batch 5300, loss[loss=0.3871, simple_loss=0.4185, pruned_loss=0.1778, over 19784.00 frames. ], tot_loss[loss=0.4115, simple_loss=0.4308, pruned_loss=0.196, over 3824272.50 frames. ], batch size: 56, lr: 4.14e-02, grad_scale: 8.0 +2023-03-31 20:41:48,686 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.962e+02 9.394e+02 1.191e+03 1.647e+03 4.206e+03, threshold=2.383e+03, percent-clipped=5.0 +2023-03-31 20:41:53,356 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 20:42:41,818 INFO [train.py:903] (3/4) Epoch 1, batch 5350, loss[loss=0.3897, simple_loss=0.4241, pruned_loss=0.1777, over 19693.00 frames. ], tot_loss[loss=0.4088, simple_loss=0.4288, pruned_loss=0.1944, over 3834310.18 frames. ], batch size: 63, lr: 4.13e-02, grad_scale: 8.0 +2023-03-31 20:43:13,710 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 20:43:43,593 INFO [train.py:903] (3/4) Epoch 1, batch 5400, loss[loss=0.3595, simple_loss=0.4037, pruned_loss=0.1576, over 19533.00 frames. ], tot_loss[loss=0.4051, simple_loss=0.4264, pruned_loss=0.1919, over 3841058.89 frames. ], batch size: 56, lr: 4.12e-02, grad_scale: 8.0 +2023-03-31 20:43:51,076 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.073e+02 9.364e+02 1.084e+03 1.611e+03 4.795e+03, threshold=2.168e+03, percent-clipped=7.0 +2023-03-31 20:44:28,519 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-31 20:44:44,653 INFO [train.py:903] (3/4) Epoch 1, batch 5450, loss[loss=0.4572, simple_loss=0.4651, pruned_loss=0.2247, over 19681.00 frames. ], tot_loss[loss=0.4057, simple_loss=0.4263, pruned_loss=0.1926, over 3838458.15 frames. ], batch size: 59, lr: 4.11e-02, grad_scale: 8.0 +2023-03-31 20:44:46,096 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1585, 2.8531, 1.7206, 2.6774, 0.9357, 2.8982, 2.5974, 2.7001], + device='cuda:3'), covar=tensor([0.0824, 0.1206, 0.2286, 0.0798, 0.3366, 0.0968, 0.0662, 0.0797], + device='cuda:3'), in_proj_covar=tensor([0.0236, 0.0240, 0.0279, 0.0225, 0.0292, 0.0231, 0.0170, 0.0180], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:3') +2023-03-31 20:45:46,525 INFO [train.py:903] (3/4) Epoch 1, batch 5500, loss[loss=0.524, simple_loss=0.5066, pruned_loss=0.2707, over 13203.00 frames. ], tot_loss[loss=0.407, simple_loss=0.4276, pruned_loss=0.1932, over 3835304.93 frames. ], batch size: 135, lr: 4.10e-02, grad_scale: 8.0 +2023-03-31 20:45:54,009 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+02 9.504e+02 1.107e+03 1.412e+03 4.004e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 20:46:08,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 20:46:30,122 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3518, 1.5315, 1.0127, 1.6081, 1.5813, 1.7257, 1.5350, 1.8238], + device='cuda:3'), covar=tensor([0.1081, 0.1911, 0.1850, 0.1152, 0.1738, 0.0735, 0.1209, 0.0748], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0318, 0.0263, 0.0227, 0.0292, 0.0223, 0.0243, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:46:46,622 INFO [train.py:903] (3/4) Epoch 1, batch 5550, loss[loss=0.4217, simple_loss=0.4463, pruned_loss=0.1986, over 19350.00 frames. ], tot_loss[loss=0.4055, simple_loss=0.4264, pruned_loss=0.1923, over 3839187.64 frames. ], batch size: 66, lr: 4.09e-02, grad_scale: 8.0 +2023-03-31 20:46:53,475 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 20:47:42,835 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 20:47:47,486 INFO [train.py:903] (3/4) Epoch 1, batch 5600, loss[loss=0.4692, simple_loss=0.466, pruned_loss=0.2362, over 17782.00 frames. ], tot_loss[loss=0.4027, simple_loss=0.4245, pruned_loss=0.1905, over 3852812.35 frames. ], batch size: 101, lr: 4.08e-02, grad_scale: 8.0 +2023-03-31 20:47:56,556 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.862e+02 1.009e+03 1.185e+03 1.400e+03 2.216e+03, threshold=2.370e+03, percent-clipped=2.0 +2023-03-31 20:48:48,828 INFO [train.py:903] (3/4) Epoch 1, batch 5650, loss[loss=0.3272, simple_loss=0.3568, pruned_loss=0.1488, over 19313.00 frames. ], tot_loss[loss=0.4045, simple_loss=0.4253, pruned_loss=0.1918, over 3848895.43 frames. ], batch size: 44, lr: 4.07e-02, grad_scale: 8.0 +2023-03-31 20:49:09,595 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1628, 3.9034, 2.7234, 3.3927, 1.8217, 3.5240, 3.3365, 3.4802], + device='cuda:3'), covar=tensor([0.0577, 0.0877, 0.1849, 0.0745, 0.2935, 0.0931, 0.0653, 0.0695], + device='cuda:3'), in_proj_covar=tensor([0.0237, 0.0231, 0.0277, 0.0224, 0.0291, 0.0225, 0.0170, 0.0183], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:3') +2023-03-31 20:49:09,666 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5668.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 20:49:32,790 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 20:49:49,711 INFO [train.py:903] (3/4) Epoch 1, batch 5700, loss[loss=0.4137, simple_loss=0.4363, pruned_loss=0.1955, over 19649.00 frames. ], tot_loss[loss=0.4069, simple_loss=0.4264, pruned_loss=0.1937, over 3842715.70 frames. ], batch size: 60, lr: 4.06e-02, grad_scale: 8.0 +2023-03-31 20:49:57,493 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.193e+02 1.084e+03 1.385e+03 1.754e+03 4.325e+03, threshold=2.770e+03, percent-clipped=14.0 +2023-03-31 20:50:51,575 INFO [train.py:903] (3/4) Epoch 1, batch 5750, loss[loss=0.3466, simple_loss=0.3805, pruned_loss=0.1563, over 19755.00 frames. ], tot_loss[loss=0.4043, simple_loss=0.4249, pruned_loss=0.1918, over 3848497.14 frames. ], batch size: 46, lr: 4.05e-02, grad_scale: 8.0 +2023-03-31 20:50:51,594 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 20:50:59,658 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 20:51:05,170 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 20:51:18,557 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5773.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:51:52,607 INFO [train.py:903] (3/4) Epoch 1, batch 5800, loss[loss=0.4147, simple_loss=0.445, pruned_loss=0.1922, over 19615.00 frames. ], tot_loss[loss=0.4046, simple_loss=0.4255, pruned_loss=0.1918, over 3838484.89 frames. ], batch size: 57, lr: 4.04e-02, grad_scale: 8.0 +2023-03-31 20:52:02,181 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.310e+02 8.969e+02 1.169e+03 1.352e+03 2.735e+03, threshold=2.337e+03, percent-clipped=0.0 +2023-03-31 20:52:02,647 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1417, 1.2154, 1.8097, 1.3638, 2.6279, 2.5828, 2.6389, 1.8300], + device='cuda:3'), covar=tensor([0.1599, 0.1732, 0.1233, 0.1616, 0.0707, 0.0633, 0.0864, 0.1326], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0269, 0.0254, 0.0284, 0.0296, 0.0231, 0.0317, 0.0290], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:52:25,497 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0051, 2.0684, 2.0558, 2.6907, 4.6283, 1.0363, 2.2130, 4.0742], + device='cuda:3'), covar=tensor([0.0222, 0.2330, 0.2628, 0.1549, 0.0287, 0.2753, 0.1287, 0.0366], + device='cuda:3'), in_proj_covar=tensor([0.0188, 0.0284, 0.0271, 0.0263, 0.0214, 0.0319, 0.0236, 0.0219], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 20:52:53,460 INFO [train.py:903] (3/4) Epoch 1, batch 5850, loss[loss=0.3526, simple_loss=0.3773, pruned_loss=0.164, over 19758.00 frames. ], tot_loss[loss=0.4059, simple_loss=0.4263, pruned_loss=0.1928, over 3830433.81 frames. ], batch size: 45, lr: 4.03e-02, grad_scale: 8.0 +2023-03-31 20:53:23,813 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=5876.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:53:55,088 INFO [train.py:903] (3/4) Epoch 1, batch 5900, loss[loss=0.4274, simple_loss=0.4528, pruned_loss=0.201, over 19679.00 frames. ], tot_loss[loss=0.4023, simple_loss=0.4236, pruned_loss=0.1905, over 3823595.74 frames. ], batch size: 58, lr: 4.02e-02, grad_scale: 8.0 +2023-03-31 20:53:58,588 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 20:54:03,122 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.633e+02 8.668e+02 1.127e+03 1.397e+03 3.736e+03, threshold=2.255e+03, percent-clipped=4.0 +2023-03-31 20:54:21,183 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 20:54:55,351 INFO [train.py:903] (3/4) Epoch 1, batch 5950, loss[loss=0.3684, simple_loss=0.3975, pruned_loss=0.1697, over 19497.00 frames. ], tot_loss[loss=0.3997, simple_loss=0.4217, pruned_loss=0.1889, over 3826874.76 frames. ], batch size: 49, lr: 4.01e-02, grad_scale: 8.0 +2023-03-31 20:55:02,437 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9278, 1.8206, 1.7824, 2.5209, 1.9895, 2.5499, 1.7596, 1.4414], + device='cuda:3'), covar=tensor([0.1060, 0.0847, 0.0593, 0.0639, 0.1058, 0.0429, 0.1595, 0.1217], + device='cuda:3'), in_proj_covar=tensor([0.0183, 0.0161, 0.0182, 0.0230, 0.0227, 0.0128, 0.0250, 0.0194], + device='cuda:3'), out_proj_covar=tensor([1.3641e-04, 1.1881e-04, 1.2153e-04, 1.5438e-04, 1.4831e-04, 8.7549e-05, + 1.7839e-04, 1.3483e-04], device='cuda:3') +2023-03-31 20:55:11,945 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-31 20:55:57,492 INFO [train.py:903] (3/4) Epoch 1, batch 6000, loss[loss=0.4021, simple_loss=0.4281, pruned_loss=0.1881, over 17173.00 frames. ], tot_loss[loss=0.4013, simple_loss=0.4226, pruned_loss=0.19, over 3814819.82 frames. ], batch size: 101, lr: 4.00e-02, grad_scale: 8.0 +2023-03-31 20:55:57,492 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 20:56:10,585 INFO [train.py:937] (3/4) Epoch 1, validation: loss=0.2784, simple_loss=0.3626, pruned_loss=0.09714, over 944034.00 frames. +2023-03-31 20:56:10,586 INFO [train.py:938] (3/4) Maximum memory allocated so far is 17850MB +2023-03-31 20:56:19,577 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.849e+02 9.012e+02 1.240e+03 1.620e+03 2.952e+03, threshold=2.480e+03, percent-clipped=5.0 +2023-03-31 20:56:19,903 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:56:24,078 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6012.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:57:10,738 INFO [train.py:903] (3/4) Epoch 1, batch 6050, loss[loss=0.511, simple_loss=0.4962, pruned_loss=0.2629, over 18733.00 frames. ], tot_loss[loss=0.4043, simple_loss=0.4246, pruned_loss=0.192, over 3824837.91 frames. ], batch size: 74, lr: 3.99e-02, grad_scale: 8.0 +2023-03-31 20:57:24,367 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:57:40,479 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-03-31 20:58:12,899 INFO [train.py:903] (3/4) Epoch 1, batch 6100, loss[loss=0.36, simple_loss=0.3876, pruned_loss=0.1662, over 19787.00 frames. ], tot_loss[loss=0.4013, simple_loss=0.4226, pruned_loss=0.19, over 3824367.90 frames. ], batch size: 48, lr: 3.98e-02, grad_scale: 8.0 +2023-03-31 20:58:20,963 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+02 9.509e+02 1.169e+03 1.489e+03 2.977e+03, threshold=2.338e+03, percent-clipped=4.0 +2023-03-31 20:58:32,477 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6117.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:58:44,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6127.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 20:59:13,361 INFO [train.py:903] (3/4) Epoch 1, batch 6150, loss[loss=0.4424, simple_loss=0.4481, pruned_loss=0.2184, over 13825.00 frames. ], tot_loss[loss=0.4003, simple_loss=0.422, pruned_loss=0.1893, over 3811244.75 frames. ], batch size: 137, lr: 3.97e-02, grad_scale: 8.0 +2023-03-31 20:59:16,562 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6153.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 20:59:42,678 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 20:59:58,501 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6188.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:13,783 INFO [train.py:903] (3/4) Epoch 1, batch 6200, loss[loss=0.3569, simple_loss=0.404, pruned_loss=0.155, over 19652.00 frames. ], tot_loss[loss=0.4003, simple_loss=0.4218, pruned_loss=0.1894, over 3816237.82 frames. ], batch size: 55, lr: 3.96e-02, grad_scale: 8.0 +2023-03-31 21:00:22,728 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 9.585e+02 1.181e+03 1.511e+03 2.920e+03, threshold=2.362e+03, percent-clipped=2.0 +2023-03-31 21:00:38,384 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:39,683 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:49,059 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6229.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:00:53,421 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6232.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:01:16,025 INFO [train.py:903] (3/4) Epoch 1, batch 6250, loss[loss=0.3691, simple_loss=0.4115, pruned_loss=0.1633, over 19658.00 frames. ], tot_loss[loss=0.3973, simple_loss=0.4201, pruned_loss=0.1872, over 3814354.95 frames. ], batch size: 55, lr: 3.95e-02, grad_scale: 8.0 +2023-03-31 21:01:19,936 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6651, 1.3844, 1.2105, 1.5786, 1.3748, 1.5030, 1.3620, 1.6914], + device='cuda:3'), covar=tensor([0.0927, 0.1920, 0.1555, 0.1211, 0.1641, 0.0963, 0.1505, 0.0830], + device='cuda:3'), in_proj_covar=tensor([0.0223, 0.0342, 0.0272, 0.0236, 0.0296, 0.0229, 0.0266, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:01:46,723 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 21:01:56,504 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6284.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:02:18,542 INFO [train.py:903] (3/4) Epoch 1, batch 6300, loss[loss=0.3345, simple_loss=0.3696, pruned_loss=0.1497, over 19400.00 frames. ], tot_loss[loss=0.3918, simple_loss=0.416, pruned_loss=0.1837, over 3821989.57 frames. ], batch size: 47, lr: 3.94e-02, grad_scale: 8.0 +2023-03-31 21:02:20,118 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8237, 1.1747, 1.1680, 1.8890, 1.7918, 1.8340, 1.9206, 1.8075], + device='cuda:3'), covar=tensor([0.1073, 0.2218, 0.1992, 0.1572, 0.2035, 0.1379, 0.1911, 0.1054], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0304, 0.0301, 0.0325, 0.0392, 0.0277, 0.0344, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 21:02:26,555 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.984e+02 8.812e+02 1.125e+03 1.363e+03 2.149e+03, threshold=2.249e+03, percent-clipped=0.0 +2023-03-31 21:02:58,304 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6335.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:00,493 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2929, 1.3589, 1.5386, 2.0131, 2.8829, 1.2359, 1.8315, 2.7713], + device='cuda:3'), covar=tensor([0.0308, 0.2731, 0.2736, 0.1729, 0.0418, 0.2502, 0.1167, 0.0483], + device='cuda:3'), in_proj_covar=tensor([0.0190, 0.0285, 0.0272, 0.0267, 0.0220, 0.0318, 0.0241, 0.0227], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:03:08,921 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3234, 1.3630, 1.0877, 1.4305, 1.2828, 1.4318, 1.3214, 1.4722], + device='cuda:3'), covar=tensor([0.1038, 0.1779, 0.1434, 0.1086, 0.1735, 0.0719, 0.1417, 0.0808], + device='cuda:3'), in_proj_covar=tensor([0.0233, 0.0350, 0.0267, 0.0237, 0.0308, 0.0234, 0.0269, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:03:17,632 INFO [train.py:903] (3/4) Epoch 1, batch 6350, loss[loss=0.3828, simple_loss=0.4246, pruned_loss=0.1704, over 19673.00 frames. ], tot_loss[loss=0.3941, simple_loss=0.4174, pruned_loss=0.1854, over 3825088.47 frames. ], batch size: 60, lr: 3.93e-02, grad_scale: 8.0 +2023-03-31 21:03:18,874 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6352.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:03:58,112 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6383.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:04:00,298 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0524, 1.1788, 0.8796, 1.1149, 0.9065, 1.1327, 0.8987, 1.0655], + device='cuda:3'), covar=tensor([0.1247, 0.1555, 0.1849, 0.1172, 0.1661, 0.0815, 0.1662, 0.0939], + device='cuda:3'), in_proj_covar=tensor([0.0235, 0.0351, 0.0270, 0.0239, 0.0307, 0.0236, 0.0273, 0.0212], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:04:07,474 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-31 21:04:08,228 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0020, 0.9767, 1.6674, 1.1625, 2.1930, 2.2063, 2.4273, 1.3984], + device='cuda:3'), covar=tensor([0.1524, 0.1923, 0.1134, 0.1545, 0.0713, 0.0756, 0.0762, 0.1359], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0299, 0.0282, 0.0301, 0.0318, 0.0262, 0.0353, 0.0314], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:04:18,895 INFO [train.py:903] (3/4) Epoch 1, batch 6400, loss[loss=0.4386, simple_loss=0.455, pruned_loss=0.2111, over 19272.00 frames. ], tot_loss[loss=0.3944, simple_loss=0.4186, pruned_loss=0.1851, over 3818808.15 frames. ], batch size: 66, lr: 3.92e-02, grad_scale: 8.0 +2023-03-31 21:04:24,491 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6405.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:04:27,870 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.880e+02 9.359e+02 1.206e+03 1.547e+03 5.333e+03, threshold=2.412e+03, percent-clipped=7.0 +2023-03-31 21:04:28,298 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6408.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:05:19,064 INFO [train.py:903] (3/4) Epoch 1, batch 6450, loss[loss=0.4477, simple_loss=0.4663, pruned_loss=0.2145, over 19757.00 frames. ], tot_loss[loss=0.3925, simple_loss=0.4173, pruned_loss=0.1838, over 3819062.82 frames. ], batch size: 63, lr: 3.91e-02, grad_scale: 8.0 +2023-03-31 21:05:23,287 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-31 21:05:39,909 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:05:55,637 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6480.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:01,942 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6485.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:05,061 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 21:06:05,473 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6488.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:15,204 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6497.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:21,207 INFO [train.py:903] (3/4) Epoch 1, batch 6500, loss[loss=0.4721, simple_loss=0.4585, pruned_loss=0.2429, over 19866.00 frames. ], tot_loss[loss=0.392, simple_loss=0.4169, pruned_loss=0.1835, over 3820790.86 frames. ], batch size: 52, lr: 3.90e-02, grad_scale: 8.0 +2023-03-31 21:06:25,570 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 21:06:28,780 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+02 9.704e+02 1.201e+03 1.443e+03 2.205e+03, threshold=2.402e+03, percent-clipped=0.0 +2023-03-31 21:06:35,156 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6513.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:43,913 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6520.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:06:58,411 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:21,802 INFO [train.py:903] (3/4) Epoch 1, batch 6550, loss[loss=0.4512, simple_loss=0.4533, pruned_loss=0.2245, over 13473.00 frames. ], tot_loss[loss=0.3946, simple_loss=0.4188, pruned_loss=0.1852, over 3813824.29 frames. ], batch size: 135, lr: 3.89e-02, grad_scale: 8.0 +2023-03-31 21:07:39,353 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6565.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:07:48,423 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6573.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:10,379 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6591.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:22,326 INFO [train.py:903] (3/4) Epoch 1, batch 6600, loss[loss=0.4163, simple_loss=0.4509, pruned_loss=0.1909, over 19474.00 frames. ], tot_loss[loss=0.3946, simple_loss=0.4195, pruned_loss=0.1849, over 3822363.20 frames. ], batch size: 64, lr: 3.89e-02, grad_scale: 16.0 +2023-03-31 21:08:31,045 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.089e+02 8.736e+02 1.082e+03 1.231e+03 3.386e+03, threshold=2.164e+03, percent-clipped=2.0 +2023-03-31 21:08:36,148 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6612.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:39,570 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5294, 1.5454, 1.5536, 2.1666, 3.1914, 1.0517, 1.9741, 3.2127], + device='cuda:3'), covar=tensor([0.0314, 0.2714, 0.2943, 0.1474, 0.0437, 0.2710, 0.1217, 0.0389], + device='cuda:3'), in_proj_covar=tensor([0.0191, 0.0288, 0.0277, 0.0270, 0.0222, 0.0317, 0.0243, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:08:40,744 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6616.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:08:56,079 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6628.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:19,949 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:20,015 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6647.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:09:24,251 INFO [train.py:903] (3/4) Epoch 1, batch 6650, loss[loss=0.3407, simple_loss=0.3706, pruned_loss=0.1554, over 19753.00 frames. ], tot_loss[loss=0.3909, simple_loss=0.417, pruned_loss=0.1824, over 3825994.16 frames. ], batch size: 45, lr: 3.88e-02, grad_scale: 4.0 +2023-03-31 21:09:59,912 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6680.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:09,254 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6688.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:10:23,707 INFO [train.py:903] (3/4) Epoch 1, batch 6700, loss[loss=0.4066, simple_loss=0.4352, pruned_loss=0.189, over 19676.00 frames. ], tot_loss[loss=0.3909, simple_loss=0.4167, pruned_loss=0.1825, over 3842432.06 frames. ], batch size: 58, lr: 3.87e-02, grad_scale: 4.0 +2023-03-31 21:10:35,447 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+02 8.354e+02 1.101e+03 1.693e+03 1.016e+04, threshold=2.202e+03, percent-clipped=16.0 +2023-03-31 21:10:51,620 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6723.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:15,021 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6743.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:20,797 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6748.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:11:23,805 INFO [train.py:903] (3/4) Epoch 1, batch 6750, loss[loss=0.4068, simple_loss=0.4389, pruned_loss=0.1873, over 19785.00 frames. ], tot_loss[loss=0.389, simple_loss=0.4152, pruned_loss=0.1814, over 3839690.94 frames. ], batch size: 56, lr: 3.86e-02, grad_scale: 4.0 +2023-03-31 21:11:51,653 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:18,954 INFO [train.py:903] (3/4) Epoch 1, batch 6800, loss[loss=0.4219, simple_loss=0.4485, pruned_loss=0.1976, over 19789.00 frames. ], tot_loss[loss=0.3877, simple_loss=0.4139, pruned_loss=0.1807, over 3837967.15 frames. ], batch size: 56, lr: 3.85e-02, grad_scale: 8.0 +2023-03-31 21:12:19,295 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6801.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:12:28,859 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.487e+02 9.125e+02 1.072e+03 1.412e+03 3.162e+03, threshold=2.143e+03, percent-clipped=4.0 +2023-03-31 21:12:43,734 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6824.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:13:03,112 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 21:13:04,135 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 21:13:06,750 INFO [train.py:903] (3/4) Epoch 2, batch 0, loss[loss=0.4173, simple_loss=0.4129, pruned_loss=0.2109, over 19394.00 frames. ], tot_loss[loss=0.4173, simple_loss=0.4129, pruned_loss=0.2109, over 19394.00 frames. ], batch size: 47, lr: 3.77e-02, grad_scale: 8.0 +2023-03-31 21:13:06,751 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 21:13:16,045 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1283, 1.4364, 1.2343, 1.1810, 1.2767, 0.7777, 0.7814, 1.3436], + device='cuda:3'), covar=tensor([0.1069, 0.0516, 0.1056, 0.0815, 0.0828, 0.1405, 0.1369, 0.0629], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0165, 0.0240, 0.0248, 0.0173, 0.0275, 0.0263, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:13:18,413 INFO [train.py:937] (3/4) Epoch 2, validation: loss=0.2802, simple_loss=0.3637, pruned_loss=0.09835, over 944034.00 frames. +2023-03-31 21:13:18,415 INFO [train.py:938] (3/4) Maximum memory allocated so far is 17850MB +2023-03-31 21:13:18,572 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6829.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:13:28,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 21:14:06,591 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6868.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:20,893 INFO [train.py:903] (3/4) Epoch 2, batch 50, loss[loss=0.459, simple_loss=0.463, pruned_loss=0.2276, over 19672.00 frames. ], tot_loss[loss=0.3856, simple_loss=0.4121, pruned_loss=0.1796, over 866670.85 frames. ], batch size: 59, lr: 3.76e-02, grad_scale: 8.0 +2023-03-31 21:14:21,609 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-31 21:14:37,451 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6893.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:41,583 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6896.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:14:43,759 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.5238, 5.0981, 3.0291, 4.6307, 2.0066, 5.2525, 4.9111, 5.0963], + device='cuda:3'), covar=tensor([0.0380, 0.0774, 0.1800, 0.0492, 0.2956, 0.0602, 0.0413, 0.0453], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0242, 0.0279, 0.0236, 0.0304, 0.0235, 0.0182, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 21:14:49,464 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6903.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:14:54,417 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 21:14:57,929 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.488e+02 9.076e+02 1.150e+03 1.515e+03 2.802e+03, threshold=2.301e+03, percent-clipped=3.0 +2023-03-31 21:15:19,073 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6927.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,161 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6928.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:20,722 INFO [train.py:903] (3/4) Epoch 2, batch 100, loss[loss=0.4472, simple_loss=0.4695, pruned_loss=0.2125, over 19297.00 frames. ], tot_loss[loss=0.3893, simple_loss=0.4142, pruned_loss=0.1822, over 1530710.39 frames. ], batch size: 66, lr: 3.75e-02, grad_scale: 8.0 +2023-03-31 21:15:30,198 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6936.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:32,200 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 21:15:33,673 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6939.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,467 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:15:39,538 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6944.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:01,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6961.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:10,937 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6969.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:15,271 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3858, 0.9155, 1.3150, 1.2592, 2.1813, 1.0417, 1.7694, 1.9858], + device='cuda:3'), covar=tensor([0.0439, 0.2663, 0.2547, 0.1614, 0.0536, 0.1994, 0.0953, 0.0637], + device='cuda:3'), in_proj_covar=tensor([0.0189, 0.0286, 0.0273, 0.0265, 0.0224, 0.0315, 0.0242, 0.0231], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:16:23,040 INFO [train.py:903] (3/4) Epoch 2, batch 150, loss[loss=0.4502, simple_loss=0.458, pruned_loss=0.2212, over 19687.00 frames. ], tot_loss[loss=0.3877, simple_loss=0.4137, pruned_loss=0.1809, over 2037730.72 frames. ], batch size: 59, lr: 3.74e-02, grad_scale: 4.0 +2023-03-31 21:16:38,556 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6991.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:16:49,270 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6999.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:03,027 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.870e+02 9.855e+02 1.288e+03 4.108e+03, threshold=1.971e+03, percent-clipped=4.0 +2023-03-31 21:17:19,729 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7024.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:17:24,043 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 21:17:25,199 INFO [train.py:903] (3/4) Epoch 2, batch 200, loss[loss=0.436, simple_loss=0.4566, pruned_loss=0.2077, over 19075.00 frames. ], tot_loss[loss=0.3865, simple_loss=0.4133, pruned_loss=0.1798, over 2420734.94 frames. ], batch size: 69, lr: 3.73e-02, grad_scale: 4.0 +2023-03-31 21:17:41,272 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-31 21:18:11,622 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7066.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:18:29,124 INFO [train.py:903] (3/4) Epoch 2, batch 250, loss[loss=0.4169, simple_loss=0.4216, pruned_loss=0.2061, over 19476.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.4125, pruned_loss=0.1791, over 2736844.66 frames. ], batch size: 49, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:19:03,872 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:19:09,458 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 7.747e+02 9.740e+02 1.157e+03 2.695e+03, threshold=1.948e+03, percent-clipped=1.0 +2023-03-31 21:19:33,825 INFO [train.py:903] (3/4) Epoch 2, batch 300, loss[loss=0.4502, simple_loss=0.4594, pruned_loss=0.2205, over 19785.00 frames. ], tot_loss[loss=0.3801, simple_loss=0.4093, pruned_loss=0.1755, over 2979747.81 frames. ], batch size: 56, lr: 3.72e-02, grad_scale: 4.0 +2023-03-31 21:20:15,178 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5535, 1.4816, 1.2091, 1.5666, 1.7563, 1.7210, 1.4072, 1.7338], + device='cuda:3'), covar=tensor([0.0945, 0.1849, 0.1577, 0.1232, 0.1422, 0.0587, 0.1357, 0.0721], + device='cuda:3'), in_proj_covar=tensor([0.0237, 0.0368, 0.0283, 0.0247, 0.0314, 0.0245, 0.0271, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:20:35,441 INFO [train.py:903] (3/4) Epoch 2, batch 350, loss[loss=0.3833, simple_loss=0.4092, pruned_loss=0.1787, over 19782.00 frames. ], tot_loss[loss=0.382, simple_loss=0.4104, pruned_loss=0.1768, over 3179632.54 frames. ], batch size: 56, lr: 3.71e-02, grad_scale: 4.0 +2023-03-31 21:20:37,924 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7181.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:20:39,487 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-31 21:20:39,881 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 21:20:40,463 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-31 21:20:55,887 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7195.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:01,703 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7200.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:15,789 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 9.781e+02 1.245e+03 1.512e+03 3.081e+03, threshold=2.489e+03, percent-clipped=8.0 +2023-03-31 21:21:26,543 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:32,339 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7225.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:21:37,131 INFO [train.py:903] (3/4) Epoch 2, batch 400, loss[loss=0.5256, simple_loss=0.5042, pruned_loss=0.2735, over 18174.00 frames. ], tot_loss[loss=0.3846, simple_loss=0.4122, pruned_loss=0.1785, over 3319514.48 frames. ], batch size: 83, lr: 3.70e-02, grad_scale: 8.0 +2023-03-31 21:21:37,588 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4377, 2.3143, 1.7167, 1.8049, 1.7993, 1.0248, 1.0098, 1.7947], + device='cuda:3'), covar=tensor([0.1070, 0.0383, 0.0934, 0.0654, 0.0724, 0.1316, 0.1166, 0.0618], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0164, 0.0245, 0.0239, 0.0174, 0.0270, 0.0256, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:21:51,122 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7240.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:22:30,123 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7271.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:22:40,128 INFO [train.py:903] (3/4) Epoch 2, batch 450, loss[loss=0.3501, simple_loss=0.401, pruned_loss=0.1497, over 19786.00 frames. ], tot_loss[loss=0.3831, simple_loss=0.4104, pruned_loss=0.1779, over 3438466.19 frames. ], batch size: 56, lr: 3.69e-02, grad_scale: 8.0 +2023-03-31 21:22:58,507 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:23:14,155 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 21:23:15,293 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 21:23:19,470 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.877e+02 8.763e+02 1.192e+03 1.491e+03 2.950e+03, threshold=2.384e+03, percent-clipped=4.0 +2023-03-31 21:23:43,126 INFO [train.py:903] (3/4) Epoch 2, batch 500, loss[loss=0.4153, simple_loss=0.4385, pruned_loss=0.1961, over 19326.00 frames. ], tot_loss[loss=0.3797, simple_loss=0.4078, pruned_loss=0.1758, over 3534038.61 frames. ], batch size: 66, lr: 3.68e-02, grad_scale: 8.0 +2023-03-31 21:24:14,696 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7355.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:24:23,792 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7362.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:37,512 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8752, 3.7589, 4.3775, 4.3129, 1.4710, 3.7663, 3.6683, 3.7523], + device='cuda:3'), covar=tensor([0.0280, 0.0561, 0.0450, 0.0228, 0.3129, 0.0226, 0.0338, 0.0923], + device='cuda:3'), in_proj_covar=tensor([0.0216, 0.0255, 0.0323, 0.0218, 0.0396, 0.0155, 0.0231, 0.0332], + device='cuda:3'), out_proj_covar=tensor([1.3372e-04, 1.5776e-04, 2.0237e-04, 1.2543e-04, 2.1244e-04, 9.9618e-05, + 1.3496e-04, 1.8547e-04], device='cuda:3') +2023-03-31 21:24:45,285 INFO [train.py:903] (3/4) Epoch 2, batch 550, loss[loss=0.317, simple_loss=0.361, pruned_loss=0.1365, over 19615.00 frames. ], tot_loss[loss=0.3807, simple_loss=0.4091, pruned_loss=0.1762, over 3594453.31 frames. ], batch size: 50, lr: 3.67e-02, grad_scale: 8.0 +2023-03-31 21:24:53,947 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7386.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:24:55,120 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7387.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:25,335 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7410.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:25:26,118 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+02 9.206e+02 1.127e+03 1.377e+03 2.659e+03, threshold=2.254e+03, percent-clipped=2.0 +2023-03-31 21:25:36,563 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7799, 1.1234, 1.0493, 1.6953, 1.4071, 1.8331, 1.6925, 1.5609], + device='cuda:3'), covar=tensor([0.1003, 0.1643, 0.1816, 0.1513, 0.1922, 0.1103, 0.1621, 0.1144], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0309, 0.0304, 0.0329, 0.0378, 0.0276, 0.0343, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 21:25:43,322 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:25:47,657 INFO [train.py:903] (3/4) Epoch 2, batch 600, loss[loss=0.3452, simple_loss=0.3808, pruned_loss=0.1548, over 19835.00 frames. ], tot_loss[loss=0.3822, simple_loss=0.4106, pruned_loss=0.1769, over 3649469.69 frames. ], batch size: 52, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:26:29,913 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 21:26:50,581 INFO [train.py:903] (3/4) Epoch 2, batch 650, loss[loss=0.3196, simple_loss=0.3612, pruned_loss=0.139, over 19776.00 frames. ], tot_loss[loss=0.38, simple_loss=0.4094, pruned_loss=0.1754, over 3695484.94 frames. ], batch size: 48, lr: 3.66e-02, grad_scale: 8.0 +2023-03-31 21:27:30,830 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 8.519e+02 1.041e+03 1.431e+03 3.840e+03, threshold=2.082e+03, percent-clipped=3.0 +2023-03-31 21:27:49,412 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7525.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:27:49,645 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7525.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 21:27:53,960 INFO [train.py:903] (3/4) Epoch 2, batch 700, loss[loss=0.3297, simple_loss=0.3802, pruned_loss=0.1396, over 19855.00 frames. ], tot_loss[loss=0.3814, simple_loss=0.4103, pruned_loss=0.1763, over 3725621.49 frames. ], batch size: 52, lr: 3.65e-02, grad_scale: 8.0 +2023-03-31 21:28:56,971 INFO [train.py:903] (3/4) Epoch 2, batch 750, loss[loss=0.336, simple_loss=0.3715, pruned_loss=0.1503, over 19717.00 frames. ], tot_loss[loss=0.382, simple_loss=0.4105, pruned_loss=0.1767, over 3752740.02 frames. ], batch size: 45, lr: 3.64e-02, grad_scale: 8.0 +2023-03-31 21:29:24,871 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8780, 4.3896, 5.6575, 5.3985, 1.7649, 5.1474, 4.6216, 4.7584], + device='cuda:3'), covar=tensor([0.0255, 0.0502, 0.0336, 0.0196, 0.3306, 0.0158, 0.0281, 0.0984], + device='cuda:3'), in_proj_covar=tensor([0.0226, 0.0261, 0.0330, 0.0224, 0.0398, 0.0162, 0.0234, 0.0344], + device='cuda:3'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 21:29:35,964 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.266e+02 8.676e+02 1.032e+03 1.220e+03 3.020e+03, threshold=2.064e+03, percent-clipped=5.0 +2023-03-31 21:29:36,446 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7611.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:29:55,639 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7341, 1.5375, 1.2996, 1.9006, 1.3702, 1.6242, 1.6051, 2.0160], + device='cuda:3'), covar=tensor([0.0875, 0.1940, 0.1535, 0.1173, 0.1787, 0.0723, 0.1270, 0.0560], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0370, 0.0290, 0.0253, 0.0327, 0.0259, 0.0285, 0.0215], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:29:58,618 INFO [train.py:903] (3/4) Epoch 2, batch 800, loss[loss=0.2923, simple_loss=0.3382, pruned_loss=0.1231, over 19420.00 frames. ], tot_loss[loss=0.38, simple_loss=0.4097, pruned_loss=0.1752, over 3778506.89 frames. ], batch size: 48, lr: 3.63e-02, grad_scale: 8.0 +2023-03-31 21:30:01,397 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5842, 1.1754, 1.0079, 1.6388, 1.1035, 1.5891, 1.5799, 1.3572], + device='cuda:3'), covar=tensor([0.1099, 0.1652, 0.2028, 0.1189, 0.1867, 0.1292, 0.1553, 0.1063], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0303, 0.0311, 0.0329, 0.0378, 0.0274, 0.0340, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 21:30:08,340 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7636.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:30:09,191 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7637.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:11,716 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0970, 1.1889, 2.3010, 1.3001, 3.0514, 3.5116, 3.7047, 1.9009], + device='cuda:3'), covar=tensor([0.1449, 0.1951, 0.1334, 0.1439, 0.0965, 0.0660, 0.1155, 0.1856], + device='cuda:3'), in_proj_covar=tensor([0.0314, 0.0321, 0.0304, 0.0314, 0.0352, 0.0286, 0.0408, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 21:30:12,839 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7640.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,289 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:15,427 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7642.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:30:16,182 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 21:30:46,842 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7667.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:31:01,249 INFO [train.py:903] (3/4) Epoch 2, batch 850, loss[loss=0.3233, simple_loss=0.3818, pruned_loss=0.1324, over 19662.00 frames. ], tot_loss[loss=0.3791, simple_loss=0.4093, pruned_loss=0.1744, over 3792890.80 frames. ], batch size: 60, lr: 3.62e-02, grad_scale: 8.0 +2023-03-31 21:31:41,596 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+02 9.016e+02 1.057e+03 1.450e+03 5.160e+03, threshold=2.114e+03, percent-clipped=6.0 +2023-03-31 21:31:56,582 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 21:32:02,491 INFO [train.py:903] (3/4) Epoch 2, batch 900, loss[loss=0.4582, simple_loss=0.4662, pruned_loss=0.2251, over 19421.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.4096, pruned_loss=0.1754, over 3807434.63 frames. ], batch size: 70, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:32:32,765 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7752.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:43,849 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:32:53,091 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7769.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:33:06,313 INFO [train.py:903] (3/4) Epoch 2, batch 950, loss[loss=0.3565, simple_loss=0.3804, pruned_loss=0.1663, over 19783.00 frames. ], tot_loss[loss=0.382, simple_loss=0.4107, pruned_loss=0.1767, over 3806376.64 frames. ], batch size: 48, lr: 3.61e-02, grad_scale: 4.0 +2023-03-31 21:33:09,127 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7781.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:33:10,165 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0233, 1.2765, 1.2924, 2.1041, 1.5942, 2.0052, 2.0946, 1.7940], + device='cuda:3'), covar=tensor([0.0795, 0.1645, 0.1752, 0.1202, 0.1743, 0.1079, 0.1360, 0.0945], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0305, 0.0304, 0.0329, 0.0377, 0.0272, 0.0338, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 21:33:10,898 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 21:33:24,393 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-31 21:33:40,022 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7806.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 21:33:46,539 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.660e+02 8.698e+02 1.089e+03 1.494e+03 2.916e+03, threshold=2.178e+03, percent-clipped=6.0 +2023-03-31 21:33:58,938 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7820.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:34:07,586 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6638, 4.2300, 2.4737, 3.8680, 1.2334, 4.0494, 3.7953, 3.9796], + device='cuda:3'), covar=tensor([0.0621, 0.1190, 0.2114, 0.0653, 0.3653, 0.0865, 0.0749, 0.0605], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0263, 0.0294, 0.0244, 0.0310, 0.0249, 0.0198, 0.0216], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 21:34:09,637 INFO [train.py:903] (3/4) Epoch 2, batch 1000, loss[loss=0.3106, simple_loss=0.3475, pruned_loss=0.1368, over 17381.00 frames. ], tot_loss[loss=0.3801, simple_loss=0.4091, pruned_loss=0.1755, over 3800221.92 frames. ], batch size: 38, lr: 3.60e-02, grad_scale: 4.0 +2023-03-31 21:35:04,945 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 21:35:11,840 INFO [train.py:903] (3/4) Epoch 2, batch 1050, loss[loss=0.4215, simple_loss=0.4428, pruned_loss=0.2001, over 19590.00 frames. ], tot_loss[loss=0.3786, simple_loss=0.4082, pruned_loss=0.1745, over 3811575.54 frames. ], batch size: 61, lr: 3.59e-02, grad_scale: 4.0 +2023-03-31 21:35:18,873 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7884.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:31,653 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0640, 1.8100, 1.8420, 2.9521, 2.1156, 3.1795, 2.1826, 1.7119], + device='cuda:3'), covar=tensor([0.1089, 0.0865, 0.0598, 0.0450, 0.0985, 0.0243, 0.1158, 0.0988], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0233, 0.0254, 0.0322, 0.0324, 0.0171, 0.0346, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:35:33,753 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7896.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:35:46,820 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 21:35:53,415 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+02 8.878e+02 9.952e+02 1.228e+03 3.126e+03, threshold=1.990e+03, percent-clipped=5.0 +2023-03-31 21:35:56,209 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4602, 0.9740, 1.3893, 1.3479, 2.2093, 1.1077, 1.8661, 2.1429], + device='cuda:3'), covar=tensor([0.0476, 0.2719, 0.2340, 0.1534, 0.0496, 0.1825, 0.0801, 0.0609], + device='cuda:3'), in_proj_covar=tensor([0.0206, 0.0294, 0.0276, 0.0270, 0.0233, 0.0311, 0.0254, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:36:05,137 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7921.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:36:14,159 INFO [train.py:903] (3/4) Epoch 2, batch 1100, loss[loss=0.3416, simple_loss=0.3868, pruned_loss=0.1482, over 19532.00 frames. ], tot_loss[loss=0.3788, simple_loss=0.4087, pruned_loss=0.1745, over 3825707.18 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 4.0 +2023-03-31 21:37:16,171 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4786, 0.7951, 1.2288, 0.7165, 2.5050, 2.5359, 2.3335, 2.7061], + device='cuda:3'), covar=tensor([0.2149, 0.5067, 0.4146, 0.3313, 0.0554, 0.0329, 0.0579, 0.0291], + device='cuda:3'), in_proj_covar=tensor([0.0298, 0.0282, 0.0325, 0.0295, 0.0197, 0.0109, 0.0185, 0.0114], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-03-31 21:37:16,834 INFO [train.py:903] (3/4) Epoch 2, batch 1150, loss[loss=0.5199, simple_loss=0.4986, pruned_loss=0.2706, over 19561.00 frames. ], tot_loss[loss=0.3808, simple_loss=0.4102, pruned_loss=0.1757, over 3822894.69 frames. ], batch size: 61, lr: 3.57e-02, grad_scale: 4.0 +2023-03-31 21:37:26,764 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7986.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:54,512 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8008.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:37:58,746 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+02 8.681e+02 1.035e+03 1.273e+03 2.854e+03, threshold=2.070e+03, percent-clipped=5.0 +2023-03-31 21:38:21,515 INFO [train.py:903] (3/4) Epoch 2, batch 1200, loss[loss=0.3931, simple_loss=0.4215, pruned_loss=0.1823, over 19485.00 frames. ], tot_loss[loss=0.3776, simple_loss=0.4083, pruned_loss=0.1735, over 3840310.24 frames. ], batch size: 64, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:38:26,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8033.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:38:35,980 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-31 21:38:52,456 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-03-31 21:39:22,161 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-31 21:39:22,669 INFO [train.py:903] (3/4) Epoch 2, batch 1250, loss[loss=0.3593, simple_loss=0.393, pruned_loss=0.1628, over 19681.00 frames. ], tot_loss[loss=0.3796, simple_loss=0.4098, pruned_loss=0.1747, over 3853310.41 frames. ], batch size: 53, lr: 3.56e-02, grad_scale: 8.0 +2023-03-31 21:39:35,508 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7722, 3.6852, 3.9155, 4.0320, 1.5044, 3.7296, 3.4378, 3.1589], + device='cuda:3'), covar=tensor([0.0473, 0.0764, 0.0877, 0.0490, 0.3704, 0.0425, 0.0585, 0.1580], + device='cuda:3'), in_proj_covar=tensor([0.0227, 0.0248, 0.0327, 0.0225, 0.0383, 0.0157, 0.0227, 0.0342], + device='cuda:3'), out_proj_covar=tensor([1.3801e-04, 1.5211e-04, 2.0109e-04, 1.2689e-04, 2.0383e-04, 9.8941e-05, + 1.2860e-04, 1.8633e-04], device='cuda:3') +2023-03-31 21:39:51,765 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8101.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:39:57,187 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:40:05,139 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.392e+02 1.041e+03 1.254e+03 3.427e+03, threshold=2.083e+03, percent-clipped=3.0 +2023-03-31 21:40:25,464 INFO [train.py:903] (3/4) Epoch 2, batch 1300, loss[loss=0.3172, simple_loss=0.3636, pruned_loss=0.1354, over 19763.00 frames. ], tot_loss[loss=0.377, simple_loss=0.4078, pruned_loss=0.1731, over 3847918.91 frames. ], batch size: 45, lr: 3.55e-02, grad_scale: 8.0 +2023-03-31 21:40:40,991 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8140.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:40:47,742 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-31 21:40:49,843 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4922, 2.3925, 1.7983, 1.8659, 2.0357, 1.0856, 0.7683, 1.5422], + device='cuda:3'), covar=tensor([0.1136, 0.0407, 0.0948, 0.0555, 0.0802, 0.1416, 0.1255, 0.0822], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0168, 0.0251, 0.0231, 0.0171, 0.0274, 0.0255, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:41:09,973 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8164.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:11,430 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8165.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:41:28,089 INFO [train.py:903] (3/4) Epoch 2, batch 1350, loss[loss=0.2886, simple_loss=0.3368, pruned_loss=0.1202, over 19392.00 frames. ], tot_loss[loss=0.3798, simple_loss=0.4098, pruned_loss=0.1749, over 3827396.15 frames. ], batch size: 47, lr: 3.54e-02, grad_scale: 8.0 +2023-03-31 21:42:08,820 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.809e+02 9.088e+02 1.109e+03 1.527e+03 2.312e+03, threshold=2.218e+03, percent-clipped=6.0 +2023-03-31 21:42:20,861 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:42:30,891 INFO [train.py:903] (3/4) Epoch 2, batch 1400, loss[loss=0.393, simple_loss=0.4269, pruned_loss=0.1796, over 19073.00 frames. ], tot_loss[loss=0.3785, simple_loss=0.4087, pruned_loss=0.1742, over 3809277.00 frames. ], batch size: 69, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:32,692 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-03-31 21:43:33,840 INFO [train.py:903] (3/4) Epoch 2, batch 1450, loss[loss=0.3796, simple_loss=0.4139, pruned_loss=0.1726, over 19681.00 frames. ], tot_loss[loss=0.3765, simple_loss=0.4077, pruned_loss=0.1727, over 3820109.80 frames. ], batch size: 59, lr: 3.53e-02, grad_scale: 8.0 +2023-03-31 21:43:34,161 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8279.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:43:39,548 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8283.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:44:15,432 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+02 8.724e+02 1.078e+03 1.353e+03 2.729e+03, threshold=2.156e+03, percent-clipped=3.0 +2023-03-31 21:44:34,914 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8798, 1.2502, 1.3649, 1.7278, 2.5426, 1.2137, 1.9234, 2.5856], + device='cuda:3'), covar=tensor([0.0500, 0.2747, 0.2757, 0.1674, 0.0548, 0.2205, 0.1057, 0.0525], + device='cuda:3'), in_proj_covar=tensor([0.0221, 0.0301, 0.0285, 0.0276, 0.0245, 0.0318, 0.0258, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:44:35,647 INFO [train.py:903] (3/4) Epoch 2, batch 1500, loss[loss=0.4194, simple_loss=0.4209, pruned_loss=0.209, over 19829.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.4086, pruned_loss=0.1735, over 3818912.60 frames. ], batch size: 52, lr: 3.52e-02, grad_scale: 8.0 +2023-03-31 21:45:10,940 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8357.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:45:38,215 INFO [train.py:903] (3/4) Epoch 2, batch 1550, loss[loss=0.3737, simple_loss=0.399, pruned_loss=0.1742, over 19595.00 frames. ], tot_loss[loss=0.3794, simple_loss=0.4092, pruned_loss=0.1748, over 3826692.41 frames. ], batch size: 50, lr: 3.51e-02, grad_scale: 8.0 +2023-03-31 21:45:43,354 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8382.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:46:19,241 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.712e+02 9.525e+02 1.175e+03 1.582e+03 3.285e+03, threshold=2.351e+03, percent-clipped=5.0 +2023-03-31 21:46:41,074 INFO [train.py:903] (3/4) Epoch 2, batch 1600, loss[loss=0.3237, simple_loss=0.3647, pruned_loss=0.1413, over 19425.00 frames. ], tot_loss[loss=0.3763, simple_loss=0.4072, pruned_loss=0.1728, over 3836434.15 frames. ], batch size: 48, lr: 3.50e-02, grad_scale: 8.0 +2023-03-31 21:47:00,247 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1424, 2.6974, 1.8126, 2.3772, 2.0078, 1.7866, 0.3493, 2.0155], + device='cuda:3'), covar=tensor([0.0583, 0.0488, 0.0442, 0.0497, 0.0834, 0.0988, 0.1504, 0.0961], + device='cuda:3'), in_proj_covar=tensor([0.0195, 0.0202, 0.0192, 0.0230, 0.0270, 0.0239, 0.0244, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:47:02,107 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-03-31 21:47:39,685 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:47:42,847 INFO [train.py:903] (3/4) Epoch 2, batch 1650, loss[loss=0.4384, simple_loss=0.4508, pruned_loss=0.213, over 19576.00 frames. ], tot_loss[loss=0.3755, simple_loss=0.4069, pruned_loss=0.1721, over 3836091.26 frames. ], batch size: 61, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:10,082 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:48:23,196 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.971e+02 8.791e+02 1.048e+03 1.403e+03 4.696e+03, threshold=2.096e+03, percent-clipped=2.0 +2023-03-31 21:48:44,128 INFO [train.py:903] (3/4) Epoch 2, batch 1700, loss[loss=0.3149, simple_loss=0.3516, pruned_loss=0.1391, over 19070.00 frames. ], tot_loss[loss=0.376, simple_loss=0.4068, pruned_loss=0.1726, over 3841664.44 frames. ], batch size: 42, lr: 3.49e-02, grad_scale: 8.0 +2023-03-31 21:48:52,096 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8535.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:23,172 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8560.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:49:23,926 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-03-31 21:49:46,288 INFO [train.py:903] (3/4) Epoch 2, batch 1750, loss[loss=0.3733, simple_loss=0.4144, pruned_loss=0.1661, over 19593.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.4063, pruned_loss=0.172, over 3841909.39 frames. ], batch size: 61, lr: 3.48e-02, grad_scale: 8.0 +2023-03-31 21:50:27,274 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.711e+02 8.589e+02 1.062e+03 1.367e+03 2.706e+03, threshold=2.124e+03, percent-clipped=6.0 +2023-03-31 21:50:47,026 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8627.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:50:48,939 INFO [train.py:903] (3/4) Epoch 2, batch 1800, loss[loss=0.3533, simple_loss=0.3675, pruned_loss=0.1695, over 19711.00 frames. ], tot_loss[loss=0.3736, simple_loss=0.4054, pruned_loss=0.1709, over 3842848.58 frames. ], batch size: 46, lr: 3.47e-02, grad_scale: 8.0 +2023-03-31 21:51:48,931 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-03-31 21:51:52,354 INFO [train.py:903] (3/4) Epoch 2, batch 1850, loss[loss=0.3588, simple_loss=0.4053, pruned_loss=0.1562, over 19709.00 frames. ], tot_loss[loss=0.3745, simple_loss=0.4064, pruned_loss=0.1713, over 3830969.14 frames. ], batch size: 59, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:51:59,655 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7992, 1.3034, 1.1546, 1.6644, 1.5261, 2.0382, 2.1311, 2.1700], + device='cuda:3'), covar=tensor([0.0984, 0.1558, 0.1860, 0.1627, 0.1918, 0.0986, 0.1375, 0.0771], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0296, 0.0302, 0.0320, 0.0364, 0.0264, 0.0334, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 21:52:26,732 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-03-31 21:52:32,409 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.683e+02 8.545e+02 1.022e+03 1.402e+03 2.945e+03, threshold=2.044e+03, percent-clipped=4.0 +2023-03-31 21:52:53,431 INFO [train.py:903] (3/4) Epoch 2, batch 1900, loss[loss=0.3865, simple_loss=0.4006, pruned_loss=0.1862, over 19619.00 frames. ], tot_loss[loss=0.3749, simple_loss=0.4068, pruned_loss=0.1715, over 3833770.36 frames. ], batch size: 50, lr: 3.46e-02, grad_scale: 8.0 +2023-03-31 21:52:53,873 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7155, 3.6950, 2.0826, 2.5081, 3.0110, 1.2657, 1.1765, 1.6536], + device='cuda:3'), covar=tensor([0.1799, 0.0366, 0.1202, 0.0765, 0.0790, 0.1656, 0.1534, 0.1341], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0177, 0.0266, 0.0247, 0.0174, 0.0291, 0.0266, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 21:53:10,060 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:13,049 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-03-31 21:53:18,232 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9497, 1.7015, 1.9477, 1.9141, 2.8868, 3.7151, 3.7604, 3.9649], + device='cuda:3'), covar=tensor([0.1440, 0.2374, 0.2541, 0.1744, 0.0523, 0.0180, 0.0192, 0.0200], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0286, 0.0335, 0.0298, 0.0201, 0.0111, 0.0193, 0.0118], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-03-31 21:53:19,070 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-03-31 21:53:20,894 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-31 21:53:44,527 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-03-31 21:53:56,652 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8778.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:53:57,544 INFO [train.py:903] (3/4) Epoch 2, batch 1950, loss[loss=0.3827, simple_loss=0.411, pruned_loss=0.1772, over 19663.00 frames. ], tot_loss[loss=0.3725, simple_loss=0.405, pruned_loss=0.17, over 3833004.13 frames. ], batch size: 53, lr: 3.45e-02, grad_scale: 8.0 +2023-03-31 21:54:38,567 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.007e+02 8.441e+02 1.013e+03 1.280e+03 2.038e+03, threshold=2.026e+03, percent-clipped=0.0 +2023-03-31 21:55:00,982 INFO [train.py:903] (3/4) Epoch 2, batch 2000, loss[loss=0.3599, simple_loss=0.4056, pruned_loss=0.1571, over 19664.00 frames. ], tot_loss[loss=0.3724, simple_loss=0.4049, pruned_loss=0.1699, over 3833245.45 frames. ], batch size: 55, lr: 3.44e-02, grad_scale: 8.0 +2023-03-31 21:55:24,882 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-31 21:56:00,575 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-03-31 21:56:01,581 INFO [train.py:903] (3/4) Epoch 2, batch 2050, loss[loss=0.4867, simple_loss=0.474, pruned_loss=0.2497, over 18281.00 frames. ], tot_loss[loss=0.3731, simple_loss=0.405, pruned_loss=0.1706, over 3842592.34 frames. ], batch size: 83, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:56:19,563 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-03-31 21:56:20,280 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.93 vs. limit=2.0 +2023-03-31 21:56:20,771 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-03-31 21:56:41,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-03-31 21:56:44,119 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.978e+02 1.029e+03 1.194e+03 1.427e+03 4.040e+03, threshold=2.389e+03, percent-clipped=7.0 +2023-03-31 21:57:05,506 INFO [train.py:903] (3/4) Epoch 2, batch 2100, loss[loss=0.3238, simple_loss=0.3709, pruned_loss=0.1384, over 19695.00 frames. ], tot_loss[loss=0.3725, simple_loss=0.4045, pruned_loss=0.1703, over 3826005.90 frames. ], batch size: 53, lr: 3.43e-02, grad_scale: 8.0 +2023-03-31 21:57:25,762 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8945.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:57:36,777 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-03-31 21:57:59,444 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-03-31 21:58:07,606 INFO [train.py:903] (3/4) Epoch 2, batch 2150, loss[loss=0.369, simple_loss=0.4095, pruned_loss=0.1643, over 19759.00 frames. ], tot_loss[loss=0.3707, simple_loss=0.4031, pruned_loss=0.1691, over 3817407.76 frames. ], batch size: 56, lr: 3.42e-02, grad_scale: 8.0 +2023-03-31 21:58:32,426 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8998.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:40,186 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9004.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:58:49,979 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+02 7.392e+02 9.171e+02 1.181e+03 2.165e+03, threshold=1.834e+03, percent-clipped=0.0 +2023-03-31 21:59:04,449 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9023.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 21:59:11,992 INFO [train.py:903] (3/4) Epoch 2, batch 2200, loss[loss=0.3652, simple_loss=0.4063, pruned_loss=0.162, over 17419.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.402, pruned_loss=0.1677, over 3811323.24 frames. ], batch size: 101, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 21:59:14,478 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9031.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:00:13,982 INFO [train.py:903] (3/4) Epoch 2, batch 2250, loss[loss=0.3809, simple_loss=0.4151, pruned_loss=0.1734, over 19756.00 frames. ], tot_loss[loss=0.3675, simple_loss=0.4014, pruned_loss=0.1669, over 3813662.23 frames. ], batch size: 63, lr: 3.41e-02, grad_scale: 8.0 +2023-03-31 22:00:56,144 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.993e+02 8.727e+02 9.943e+02 1.293e+03 2.077e+03, threshold=1.989e+03, percent-clipped=4.0 +2023-03-31 22:01:08,152 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9122.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:01:17,886 INFO [train.py:903] (3/4) Epoch 2, batch 2300, loss[loss=0.31, simple_loss=0.3617, pruned_loss=0.1291, over 19685.00 frames. ], tot_loss[loss=0.365, simple_loss=0.3994, pruned_loss=0.1654, over 3818363.28 frames. ], batch size: 53, lr: 3.40e-02, grad_scale: 8.0 +2023-03-31 22:01:30,558 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-03-31 22:02:19,096 INFO [train.py:903] (3/4) Epoch 2, batch 2350, loss[loss=0.331, simple_loss=0.3639, pruned_loss=0.1491, over 19779.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.3993, pruned_loss=0.1656, over 3801445.00 frames. ], batch size: 48, lr: 3.39e-02, grad_scale: 8.0 +2023-03-31 22:03:00,732 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.988e+02 9.117e+02 1.090e+03 1.432e+03 2.529e+03, threshold=2.180e+03, percent-clipped=5.0 +2023-03-31 22:03:00,817 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-03-31 22:03:19,484 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-03-31 22:03:19,783 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1631, 2.7046, 1.9458, 2.7214, 2.0664, 2.2421, 0.4142, 2.1833], + device='cuda:3'), covar=tensor([0.0486, 0.0474, 0.0350, 0.0431, 0.0786, 0.0708, 0.1328, 0.0860], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0213, 0.0208, 0.0252, 0.0296, 0.0261, 0.0262, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:03:22,887 INFO [train.py:903] (3/4) Epoch 2, batch 2400, loss[loss=0.35, simple_loss=0.3716, pruned_loss=0.1642, over 16040.00 frames. ], tot_loss[loss=0.3675, simple_loss=0.4009, pruned_loss=0.1671, over 3796688.28 frames. ], batch size: 35, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:03:32,318 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9237.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:03:44,348 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.77 vs. limit=5.0 +2023-03-31 22:04:21,301 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1131, 1.3039, 2.1806, 1.4103, 2.9046, 3.1771, 3.1437, 1.8918], + device='cuda:3'), covar=tensor([0.1372, 0.1820, 0.1140, 0.1343, 0.0919, 0.0697, 0.1263, 0.1635], + device='cuda:3'), in_proj_covar=tensor([0.0334, 0.0360, 0.0333, 0.0341, 0.0387, 0.0305, 0.0454, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 22:04:24,304 INFO [train.py:903] (3/4) Epoch 2, batch 2450, loss[loss=0.3151, simple_loss=0.3587, pruned_loss=0.1357, over 19750.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.4017, pruned_loss=0.1679, over 3787199.90 frames. ], batch size: 51, lr: 3.38e-02, grad_scale: 8.0 +2023-03-31 22:04:38,168 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9289.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:04:38,394 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0456, 1.2629, 1.8507, 1.7122, 2.9720, 4.4236, 4.6362, 4.8636], + device='cuda:3'), covar=tensor([0.1359, 0.2494, 0.2419, 0.1952, 0.0484, 0.0124, 0.0107, 0.0091], + device='cuda:3'), in_proj_covar=tensor([0.0295, 0.0284, 0.0322, 0.0293, 0.0200, 0.0109, 0.0185, 0.0115], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-03-31 22:05:06,391 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 8.791e+02 1.096e+03 1.484e+03 3.289e+03, threshold=2.192e+03, percent-clipped=7.0 +2023-03-31 22:05:27,995 INFO [train.py:903] (3/4) Epoch 2, batch 2500, loss[loss=0.3862, simple_loss=0.4203, pruned_loss=0.176, over 19680.00 frames. ], tot_loss[loss=0.3664, simple_loss=0.4006, pruned_loss=0.1661, over 3805549.83 frames. ], batch size: 58, lr: 3.37e-02, grad_scale: 8.0 +2023-03-31 22:05:52,450 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9348.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:25,085 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9375.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:06:29,686 INFO [train.py:903] (3/4) Epoch 2, batch 2550, loss[loss=0.3701, simple_loss=0.4093, pruned_loss=0.1654, over 19613.00 frames. ], tot_loss[loss=0.3675, simple_loss=0.4014, pruned_loss=0.1669, over 3799750.72 frames. ], batch size: 57, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:06:54,757 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5416, 3.8409, 4.0885, 3.9991, 1.4371, 3.6276, 3.3019, 3.5671], + device='cuda:3'), covar=tensor([0.0324, 0.0393, 0.0397, 0.0228, 0.2751, 0.0234, 0.0315, 0.0855], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0273, 0.0363, 0.0252, 0.0410, 0.0172, 0.0247, 0.0376], + device='cuda:3'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 22:07:01,818 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9404.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:07:11,545 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+02 8.134e+02 9.492e+02 1.275e+03 2.544e+03, threshold=1.898e+03, percent-clipped=3.0 +2023-03-31 22:07:18,176 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-31 22:07:25,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-03-31 22:07:33,275 INFO [train.py:903] (3/4) Epoch 2, batch 2600, loss[loss=0.3499, simple_loss=0.3977, pruned_loss=0.151, over 19766.00 frames. ], tot_loss[loss=0.3665, simple_loss=0.4004, pruned_loss=0.1663, over 3799818.01 frames. ], batch size: 56, lr: 3.36e-02, grad_scale: 8.0 +2023-03-31 22:07:36,955 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:14,545 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9463.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:23,778 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.19 vs. limit=2.0 +2023-03-31 22:08:34,690 INFO [train.py:903] (3/4) Epoch 2, batch 2650, loss[loss=0.3624, simple_loss=0.4008, pruned_loss=0.162, over 19664.00 frames. ], tot_loss[loss=0.3672, simple_loss=0.4007, pruned_loss=0.1669, over 3797857.30 frames. ], batch size: 53, lr: 3.35e-02, grad_scale: 8.0 +2023-03-31 22:08:49,063 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9490.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:52,759 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9493.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:08:53,571 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-03-31 22:09:16,685 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.258e+02 8.887e+02 1.023e+03 1.383e+03 3.476e+03, threshold=2.047e+03, percent-clipped=7.0 +2023-03-31 22:09:24,065 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9518.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:09:36,956 INFO [train.py:903] (3/4) Epoch 2, batch 2700, loss[loss=0.3468, simple_loss=0.3697, pruned_loss=0.1619, over 19070.00 frames. ], tot_loss[loss=0.366, simple_loss=0.3998, pruned_loss=0.1661, over 3808844.48 frames. ], batch size: 42, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:09:43,946 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5175, 3.7606, 4.0487, 3.8905, 1.3732, 3.6004, 3.2408, 3.5529], + device='cuda:3'), covar=tensor([0.0365, 0.0503, 0.0440, 0.0285, 0.3095, 0.0236, 0.0369, 0.0871], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0273, 0.0361, 0.0256, 0.0414, 0.0175, 0.0249, 0.0367], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 22:10:39,551 INFO [train.py:903] (3/4) Epoch 2, batch 2750, loss[loss=0.3817, simple_loss=0.4162, pruned_loss=0.1736, over 17543.00 frames. ], tot_loss[loss=0.3651, simple_loss=0.3997, pruned_loss=0.1653, over 3815945.62 frames. ], batch size: 101, lr: 3.34e-02, grad_scale: 8.0 +2023-03-31 22:10:49,084 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1788, 0.8052, 1.1488, 0.4420, 2.3517, 2.0089, 1.7867, 2.1793], + device='cuda:3'), covar=tensor([0.1796, 0.3821, 0.3549, 0.3023, 0.0443, 0.0235, 0.0461, 0.0239], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0282, 0.0319, 0.0291, 0.0199, 0.0107, 0.0185, 0.0114], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-03-31 22:11:01,855 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5959, 0.9631, 1.1890, 1.8196, 1.6186, 1.5924, 2.0008, 1.4839], + device='cuda:3'), covar=tensor([0.1397, 0.2508, 0.2207, 0.1531, 0.1881, 0.1766, 0.1620, 0.1441], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0310, 0.0309, 0.0327, 0.0364, 0.0275, 0.0342, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 22:11:08,842 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9602.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:11:20,989 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.046e+02 9.047e+02 1.065e+03 1.297e+03 2.590e+03, threshold=2.130e+03, percent-clipped=3.0 +2023-03-31 22:11:43,321 INFO [train.py:903] (3/4) Epoch 2, batch 2800, loss[loss=0.4547, simple_loss=0.4562, pruned_loss=0.2266, over 13969.00 frames. ], tot_loss[loss=0.364, simple_loss=0.3992, pruned_loss=0.1644, over 3812090.15 frames. ], batch size: 137, lr: 3.33e-02, grad_scale: 8.0 +2023-03-31 22:12:20,976 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9660.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:41,317 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9676.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:12:44,618 INFO [train.py:903] (3/4) Epoch 2, batch 2850, loss[loss=0.3552, simple_loss=0.4002, pruned_loss=0.1551, over 19544.00 frames. ], tot_loss[loss=0.3645, simple_loss=0.3997, pruned_loss=0.1647, over 3818683.37 frames. ], batch size: 54, lr: 3.32e-02, grad_scale: 8.0 +2023-03-31 22:12:51,774 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9685.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:26,582 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.408e+02 8.898e+02 1.125e+03 1.384e+03 2.599e+03, threshold=2.251e+03, percent-clipped=6.0 +2023-03-31 22:13:35,213 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:13:43,488 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6580, 3.8134, 4.1422, 3.9959, 1.5348, 3.6484, 3.3383, 3.5947], + device='cuda:3'), covar=tensor([0.0357, 0.0463, 0.0370, 0.0271, 0.2854, 0.0209, 0.0411, 0.0932], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0281, 0.0375, 0.0263, 0.0421, 0.0183, 0.0260, 0.0379], + device='cuda:3'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 22:13:45,584 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-03-31 22:13:46,851 INFO [train.py:903] (3/4) Epoch 2, batch 2900, loss[loss=0.3653, simple_loss=0.4089, pruned_loss=0.1609, over 18748.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.3994, pruned_loss=0.1644, over 3816457.70 frames. ], batch size: 74, lr: 3.31e-02, grad_scale: 16.0 +2023-03-31 22:14:06,836 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9744.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:10,305 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9746.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:23,242 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3018, 1.1209, 1.0406, 1.4143, 1.1669, 1.2952, 1.3406, 1.2684], + device='cuda:3'), covar=tensor([0.0984, 0.1539, 0.1788, 0.1033, 0.1442, 0.1297, 0.1215, 0.1085], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0307, 0.0303, 0.0332, 0.0361, 0.0273, 0.0331, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 22:14:27,853 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9761.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:39,311 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:45,623 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9776.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:14:49,545 INFO [train.py:903] (3/4) Epoch 2, batch 2950, loss[loss=0.3927, simple_loss=0.4124, pruned_loss=0.1865, over 19841.00 frames. ], tot_loss[loss=0.3658, simple_loss=0.4005, pruned_loss=0.1656, over 3813782.10 frames. ], batch size: 52, lr: 3.31e-02, grad_scale: 8.0 +2023-03-31 22:14:52,369 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-31 22:15:31,704 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+02 8.475e+02 1.131e+03 1.413e+03 3.215e+03, threshold=2.262e+03, percent-clipped=4.0 +2023-03-31 22:15:53,078 INFO [train.py:903] (3/4) Epoch 2, batch 3000, loss[loss=0.3925, simple_loss=0.4184, pruned_loss=0.1833, over 18708.00 frames. ], tot_loss[loss=0.365, simple_loss=0.3997, pruned_loss=0.1651, over 3809602.00 frames. ], batch size: 74, lr: 3.30e-02, grad_scale: 4.0 +2023-03-31 22:15:53,078 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 22:16:06,232 INFO [train.py:937] (3/4) Epoch 2, validation: loss=0.2513, simple_loss=0.3423, pruned_loss=0.08019, over 944034.00 frames. +2023-03-31 22:16:06,233 INFO [train.py:938] (3/4) Maximum memory allocated so far is 17850MB +2023-03-31 22:16:12,130 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-03-31 22:17:08,084 INFO [train.py:903] (3/4) Epoch 2, batch 3050, loss[loss=0.355, simple_loss=0.3807, pruned_loss=0.1646, over 19476.00 frames. ], tot_loss[loss=0.3661, simple_loss=0.4005, pruned_loss=0.1659, over 3811836.39 frames. ], batch size: 49, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:17:22,810 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9891.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:17:50,736 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 8.806e+02 1.158e+03 1.402e+03 3.282e+03, threshold=2.315e+03, percent-clipped=2.0 +2023-03-31 22:18:10,044 INFO [train.py:903] (3/4) Epoch 2, batch 3100, loss[loss=0.3902, simple_loss=0.4037, pruned_loss=0.1884, over 19630.00 frames. ], tot_loss[loss=0.3656, simple_loss=0.4, pruned_loss=0.1656, over 3812931.28 frames. ], batch size: 50, lr: 3.29e-02, grad_scale: 4.0 +2023-03-31 22:18:29,705 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9946.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:19:11,239 INFO [train.py:903] (3/4) Epoch 2, batch 3150, loss[loss=0.2728, simple_loss=0.3283, pruned_loss=0.1087, over 19750.00 frames. ], tot_loss[loss=0.3634, simple_loss=0.3982, pruned_loss=0.1643, over 3819225.61 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 4.0 +2023-03-31 22:19:42,630 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-03-31 22:19:56,512 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 7.345e+02 9.404e+02 1.247e+03 3.615e+03, threshold=1.881e+03, percent-clipped=3.0 +2023-03-31 22:20:03,838 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10020.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:15,210 INFO [train.py:903] (3/4) Epoch 2, batch 3200, loss[loss=0.3325, simple_loss=0.3616, pruned_loss=0.1517, over 19310.00 frames. ], tot_loss[loss=0.364, simple_loss=0.3987, pruned_loss=0.1647, over 3802337.15 frames. ], batch size: 44, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:20:50,858 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10057.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:20:55,735 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:21:19,936 INFO [train.py:903] (3/4) Epoch 2, batch 3250, loss[loss=0.4401, simple_loss=0.4604, pruned_loss=0.2099, over 19530.00 frames. ], tot_loss[loss=0.3638, simple_loss=0.3989, pruned_loss=0.1644, over 3783810.68 frames. ], batch size: 56, lr: 3.27e-02, grad_scale: 8.0 +2023-03-31 22:21:21,296 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10080.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:21:50,581 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10105.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:02,472 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+02 8.486e+02 1.037e+03 1.291e+03 3.604e+03, threshold=2.074e+03, percent-clipped=6.0 +2023-03-31 22:22:20,716 INFO [train.py:903] (3/4) Epoch 2, batch 3300, loss[loss=0.3674, simple_loss=0.4113, pruned_loss=0.1618, over 19759.00 frames. ], tot_loss[loss=0.3614, simple_loss=0.3968, pruned_loss=0.163, over 3802563.09 frames. ], batch size: 63, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:22:25,215 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-03-31 22:22:27,933 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10135.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:22:42,649 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10147.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:15,000 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10172.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:23:22,725 INFO [train.py:903] (3/4) Epoch 2, batch 3350, loss[loss=0.5118, simple_loss=0.487, pruned_loss=0.2683, over 13098.00 frames. ], tot_loss[loss=0.3623, simple_loss=0.3975, pruned_loss=0.1635, over 3783768.60 frames. ], batch size: 136, lr: 3.26e-02, grad_scale: 8.0 +2023-03-31 22:24:07,629 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.516e+02 8.361e+02 9.909e+02 1.198e+03 2.844e+03, threshold=1.982e+03, percent-clipped=3.0 +2023-03-31 22:24:14,935 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10220.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:24:17,827 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-31 22:24:25,945 INFO [train.py:903] (3/4) Epoch 2, batch 3400, loss[loss=0.333, simple_loss=0.3853, pruned_loss=0.1404, over 19672.00 frames. ], tot_loss[loss=0.3614, simple_loss=0.3972, pruned_loss=0.1628, over 3794245.68 frames. ], batch size: 60, lr: 3.25e-02, grad_scale: 8.0 +2023-03-31 22:25:29,368 INFO [train.py:903] (3/4) Epoch 2, batch 3450, loss[loss=0.4057, simple_loss=0.4348, pruned_loss=0.1883, over 19653.00 frames. ], tot_loss[loss=0.3623, simple_loss=0.397, pruned_loss=0.1638, over 3802648.19 frames. ], batch size: 55, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:25:32,660 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-03-31 22:26:13,347 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+02 9.409e+02 1.166e+03 1.453e+03 2.796e+03, threshold=2.333e+03, percent-clipped=9.0 +2023-03-31 22:26:17,058 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10317.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:26:31,778 INFO [train.py:903] (3/4) Epoch 2, batch 3500, loss[loss=0.361, simple_loss=0.3938, pruned_loss=0.1641, over 19767.00 frames. ], tot_loss[loss=0.362, simple_loss=0.3968, pruned_loss=0.1636, over 3810362.15 frames. ], batch size: 54, lr: 3.24e-02, grad_scale: 4.0 +2023-03-31 22:26:47,010 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10342.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:27:28,357 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4593, 2.8194, 2.1237, 2.4962, 2.4444, 1.6564, 0.7552, 2.2385], + device='cuda:3'), covar=tensor([0.0340, 0.0326, 0.0316, 0.0333, 0.0513, 0.0710, 0.0864, 0.0665], + device='cuda:3'), in_proj_covar=tensor([0.0206, 0.0216, 0.0212, 0.0241, 0.0287, 0.0256, 0.0251, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:27:33,749 INFO [train.py:903] (3/4) Epoch 2, batch 3550, loss[loss=0.3482, simple_loss=0.3884, pruned_loss=0.154, over 19664.00 frames. ], tot_loss[loss=0.3613, simple_loss=0.3963, pruned_loss=0.1631, over 3816758.41 frames. ], batch size: 53, lr: 3.23e-02, grad_scale: 4.0 +2023-03-31 22:27:49,478 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10391.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:04,139 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10401.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:20,156 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+02 7.776e+02 1.013e+03 1.369e+03 3.978e+03, threshold=2.027e+03, percent-clipped=2.0 +2023-03-31 22:28:21,581 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10416.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:30,605 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10424.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:28:35,060 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10427.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:28:37,136 INFO [train.py:903] (3/4) Epoch 2, batch 3600, loss[loss=0.2907, simple_loss=0.3413, pruned_loss=0.1201, over 19768.00 frames. ], tot_loss[loss=0.3621, simple_loss=0.397, pruned_loss=0.1636, over 3813655.05 frames. ], batch size: 46, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:29:32,600 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10472.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:37,372 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10476.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:29:41,349 INFO [train.py:903] (3/4) Epoch 2, batch 3650, loss[loss=0.3323, simple_loss=0.3679, pruned_loss=0.1483, over 19786.00 frames. ], tot_loss[loss=0.3612, simple_loss=0.3968, pruned_loss=0.1628, over 3812569.93 frames. ], batch size: 48, lr: 3.22e-02, grad_scale: 8.0 +2023-03-31 22:29:59,891 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5040, 1.4878, 1.1027, 1.7855, 1.5035, 1.3979, 1.4497, 1.3997], + device='cuda:3'), covar=tensor([0.0992, 0.1875, 0.1555, 0.0920, 0.1337, 0.0760, 0.1115, 0.0871], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0386, 0.0291, 0.0259, 0.0320, 0.0263, 0.0286, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 22:30:09,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10501.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:26,011 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+02 8.454e+02 1.072e+03 1.396e+03 2.688e+03, threshold=2.143e+03, percent-clipped=6.0 +2023-03-31 22:30:27,514 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10516.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:30:45,121 INFO [train.py:903] (3/4) Epoch 2, batch 3700, loss[loss=0.2929, simple_loss=0.3353, pruned_loss=0.1252, over 19345.00 frames. ], tot_loss[loss=0.3625, simple_loss=0.398, pruned_loss=0.1635, over 3811406.93 frames. ], batch size: 44, lr: 3.21e-02, grad_scale: 8.0 +2023-03-31 22:30:50,130 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1177, 3.7934, 2.0520, 3.4606, 1.0570, 3.3964, 3.2698, 3.6289], + device='cuda:3'), covar=tensor([0.0779, 0.1145, 0.2237, 0.0722, 0.3618, 0.1078, 0.0814, 0.0743], + device='cuda:3'), in_proj_covar=tensor([0.0288, 0.0284, 0.0312, 0.0254, 0.0324, 0.0273, 0.0214, 0.0245], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:30:57,292 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10539.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:31:47,571 INFO [train.py:903] (3/4) Epoch 2, batch 3750, loss[loss=0.383, simple_loss=0.4243, pruned_loss=0.1709, over 19510.00 frames. ], tot_loss[loss=0.3608, simple_loss=0.3968, pruned_loss=0.1624, over 3815087.21 frames. ], batch size: 64, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:32:33,530 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+02 8.986e+02 1.057e+03 1.326e+03 2.585e+03, threshold=2.114e+03, percent-clipped=3.0 +2023-03-31 22:32:50,180 INFO [train.py:903] (3/4) Epoch 2, batch 3800, loss[loss=0.4662, simple_loss=0.4735, pruned_loss=0.2295, over 19608.00 frames. ], tot_loss[loss=0.36, simple_loss=0.3961, pruned_loss=0.162, over 3825111.73 frames. ], batch size: 57, lr: 3.20e-02, grad_scale: 8.0 +2023-03-31 22:33:23,964 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-03-31 22:33:51,759 INFO [train.py:903] (3/4) Epoch 2, batch 3850, loss[loss=0.404, simple_loss=0.4304, pruned_loss=0.1888, over 19792.00 frames. ], tot_loss[loss=0.359, simple_loss=0.3954, pruned_loss=0.1613, over 3819871.52 frames. ], batch size: 56, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:34:15,325 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-31 22:34:36,685 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5425, 4.0974, 2.3319, 3.6499, 1.2761, 3.9159, 3.7851, 3.8031], + device='cuda:3'), covar=tensor([0.0610, 0.1194, 0.2108, 0.0691, 0.3438, 0.0930, 0.0536, 0.0648], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0275, 0.0305, 0.0249, 0.0316, 0.0270, 0.0208, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:34:37,535 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.106e+02 8.604e+02 1.077e+03 1.442e+03 2.985e+03, threshold=2.155e+03, percent-clipped=6.0 +2023-03-31 22:34:56,858 INFO [train.py:903] (3/4) Epoch 2, batch 3900, loss[loss=0.3817, simple_loss=0.4192, pruned_loss=0.1721, over 19500.00 frames. ], tot_loss[loss=0.3566, simple_loss=0.3935, pruned_loss=0.1598, over 3834178.56 frames. ], batch size: 64, lr: 3.19e-02, grad_scale: 8.0 +2023-03-31 22:35:37,427 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10762.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:49,464 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10771.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:50,889 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10772.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:35:58,743 INFO [train.py:903] (3/4) Epoch 2, batch 3950, loss[loss=0.3157, simple_loss=0.3657, pruned_loss=0.1329, over 19726.00 frames. ], tot_loss[loss=0.357, simple_loss=0.3938, pruned_loss=0.1601, over 3833975.23 frames. ], batch size: 51, lr: 3.18e-02, grad_scale: 8.0 +2023-03-31 22:36:00,418 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3304, 1.2323, 1.7800, 1.3850, 2.5996, 2.5233, 2.8330, 1.1995], + device='cuda:3'), covar=tensor([0.1423, 0.2120, 0.1384, 0.1533, 0.0855, 0.0824, 0.0884, 0.1886], + device='cuda:3'), in_proj_covar=tensor([0.0351, 0.0378, 0.0352, 0.0350, 0.0410, 0.0331, 0.0483, 0.0368], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 22:36:04,589 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-03-31 22:36:15,286 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10792.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:18,966 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10795.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:36:21,148 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10797.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:44,827 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.316e+02 7.972e+02 1.008e+03 1.220e+03 2.629e+03, threshold=2.016e+03, percent-clipped=1.0 +2023-03-31 22:36:46,287 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10816.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:36:51,386 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10820.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:37:01,758 INFO [train.py:903] (3/4) Epoch 2, batch 4000, loss[loss=0.3045, simple_loss=0.3463, pruned_loss=0.1313, over 19750.00 frames. ], tot_loss[loss=0.3564, simple_loss=0.3938, pruned_loss=0.1595, over 3808843.50 frames. ], batch size: 45, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:37:49,238 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-03-31 22:38:04,671 INFO [train.py:903] (3/4) Epoch 2, batch 4050, loss[loss=0.3065, simple_loss=0.3539, pruned_loss=0.1295, over 19757.00 frames. ], tot_loss[loss=0.3564, simple_loss=0.3936, pruned_loss=0.1596, over 3805946.52 frames. ], batch size: 47, lr: 3.17e-02, grad_scale: 8.0 +2023-03-31 22:38:15,786 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10886.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:38:35,390 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10903.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:38:49,943 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.194e+02 9.186e+02 1.101e+03 1.338e+03 4.215e+03, threshold=2.201e+03, percent-clipped=7.0 +2023-03-31 22:39:10,595 INFO [train.py:903] (3/4) Epoch 2, batch 4100, loss[loss=0.3647, simple_loss=0.4117, pruned_loss=0.1588, over 19549.00 frames. ], tot_loss[loss=0.355, simple_loss=0.3926, pruned_loss=0.1587, over 3806827.38 frames. ], batch size: 56, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:39:13,297 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10931.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:39:45,322 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-03-31 22:40:13,114 INFO [train.py:903] (3/4) Epoch 2, batch 4150, loss[loss=0.3907, simple_loss=0.4269, pruned_loss=0.1772, over 19594.00 frames. ], tot_loss[loss=0.3555, simple_loss=0.3932, pruned_loss=0.1589, over 3806801.07 frames. ], batch size: 57, lr: 3.16e-02, grad_scale: 8.0 +2023-03-31 22:40:59,345 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 7.731e+02 1.007e+03 1.258e+03 2.097e+03, threshold=2.015e+03, percent-clipped=0.0 +2023-03-31 22:41:15,492 INFO [train.py:903] (3/4) Epoch 2, batch 4200, loss[loss=0.3795, simple_loss=0.4168, pruned_loss=0.1711, over 19558.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.3924, pruned_loss=0.1585, over 3810388.26 frames. ], batch size: 61, lr: 3.15e-02, grad_scale: 8.0 +2023-03-31 22:41:18,936 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-03-31 22:42:18,014 INFO [train.py:903] (3/4) Epoch 2, batch 4250, loss[loss=0.4823, simple_loss=0.4785, pruned_loss=0.243, over 18155.00 frames. ], tot_loss[loss=0.3555, simple_loss=0.3927, pruned_loss=0.1591, over 3828809.69 frames. ], batch size: 83, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:42:35,133 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-03-31 22:42:45,446 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-03-31 22:42:52,703 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11106.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:03,986 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.611e+02 8.485e+02 1.107e+03 1.406e+03 3.284e+03, threshold=2.214e+03, percent-clipped=7.0 +2023-03-31 22:43:21,881 INFO [train.py:903] (3/4) Epoch 2, batch 4300, loss[loss=0.3368, simple_loss=0.3809, pruned_loss=0.1464, over 19588.00 frames. ], tot_loss[loss=0.3539, simple_loss=0.3921, pruned_loss=0.1578, over 3828420.88 frames. ], batch size: 52, lr: 3.14e-02, grad_scale: 8.0 +2023-03-31 22:43:30,408 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11136.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:43:37,641 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11142.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:09,048 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11167.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:16,377 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-03-31 22:44:23,252 INFO [train.py:903] (3/4) Epoch 2, batch 4350, loss[loss=0.338, simple_loss=0.3846, pruned_loss=0.1457, over 17236.00 frames. ], tot_loss[loss=0.3543, simple_loss=0.3929, pruned_loss=0.1579, over 3818565.40 frames. ], batch size: 101, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:44:32,898 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11187.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:44:39,318 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11192.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:04,764 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11212.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:09,090 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.029e+02 7.841e+02 9.559e+02 1.160e+03 2.939e+03, threshold=1.912e+03, percent-clipped=2.0 +2023-03-31 22:45:15,643 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11221.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:45:24,374 INFO [train.py:903] (3/4) Epoch 2, batch 4400, loss[loss=0.3465, simple_loss=0.3791, pruned_loss=0.1569, over 19831.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.3916, pruned_loss=0.1571, over 3825176.46 frames. ], batch size: 52, lr: 3.13e-02, grad_scale: 8.0 +2023-03-31 22:45:46,922 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-31 22:45:48,415 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11247.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:45:50,514 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-03-31 22:45:54,212 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11251.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:46:00,706 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-03-31 22:46:04,600 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7038, 1.3937, 1.6133, 2.1751, 3.2659, 1.3287, 2.0781, 3.3400], + device='cuda:3'), covar=tensor([0.0258, 0.2119, 0.2100, 0.1306, 0.0357, 0.1933, 0.1044, 0.0319], + device='cuda:3'), in_proj_covar=tensor([0.0224, 0.0294, 0.0279, 0.0274, 0.0252, 0.0320, 0.0258, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 22:46:27,496 INFO [train.py:903] (3/4) Epoch 2, batch 4450, loss[loss=0.3118, simple_loss=0.3547, pruned_loss=0.1345, over 19355.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.3915, pruned_loss=0.1572, over 3828704.77 frames. ], batch size: 47, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:46:31,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6463, 1.1375, 1.0512, 1.7204, 1.1879, 1.7166, 1.7134, 1.5250], + device='cuda:3'), covar=tensor([0.0816, 0.1419, 0.1513, 0.1126, 0.1442, 0.0976, 0.1103, 0.0857], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0305, 0.0295, 0.0330, 0.0353, 0.0267, 0.0318, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 22:47:14,190 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+02 8.820e+02 1.081e+03 1.372e+03 2.333e+03, threshold=2.162e+03, percent-clipped=5.0 +2023-03-31 22:47:18,411 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-03-31 22:47:31,605 INFO [train.py:903] (3/4) Epoch 2, batch 4500, loss[loss=0.3509, simple_loss=0.3829, pruned_loss=0.1594, over 19590.00 frames. ], tot_loss[loss=0.3525, simple_loss=0.3909, pruned_loss=0.157, over 3827258.33 frames. ], batch size: 52, lr: 3.12e-02, grad_scale: 8.0 +2023-03-31 22:48:12,963 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11362.0, num_to_drop=1, layers_to_drop={1} +2023-03-31 22:48:26,005 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7782, 1.2039, 1.1351, 1.6956, 1.2881, 1.5522, 1.6593, 1.6043], + device='cuda:3'), covar=tensor([0.0775, 0.1444, 0.1603, 0.1119, 0.1497, 0.1212, 0.1387, 0.0835], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0308, 0.0297, 0.0328, 0.0354, 0.0270, 0.0324, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-03-31 22:48:26,046 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1117, 1.8001, 1.4451, 1.4938, 1.6324, 0.8961, 0.6122, 1.6010], + device='cuda:3'), covar=tensor([0.1086, 0.0647, 0.1190, 0.0602, 0.0650, 0.1593, 0.1189, 0.0566], + device='cuda:3'), in_proj_covar=tensor([0.0291, 0.0203, 0.0293, 0.0253, 0.0201, 0.0297, 0.0271, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-03-31 22:48:34,553 INFO [train.py:903] (3/4) Epoch 2, batch 4550, loss[loss=0.403, simple_loss=0.4344, pruned_loss=0.1858, over 19565.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3916, pruned_loss=0.1576, over 3832077.92 frames. ], batch size: 61, lr: 3.11e-02, grad_scale: 8.0 +2023-03-31 22:48:45,435 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-03-31 22:49:08,229 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-03-31 22:49:21,598 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.316e+02 7.788e+02 1.003e+03 1.220e+03 2.356e+03, threshold=2.005e+03, percent-clipped=1.0 +2023-03-31 22:49:37,109 INFO [train.py:903] (3/4) Epoch 2, batch 4600, loss[loss=0.3514, simple_loss=0.3937, pruned_loss=0.1546, over 19096.00 frames. ], tot_loss[loss=0.3527, simple_loss=0.3909, pruned_loss=0.1573, over 3830515.44 frames. ], batch size: 69, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:49:49,978 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3982, 2.2245, 1.7306, 1.9249, 1.5139, 1.6211, 0.3532, 1.2246], + device='cuda:3'), covar=tensor([0.0371, 0.0298, 0.0242, 0.0293, 0.0690, 0.0478, 0.0749, 0.0578], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0222, 0.0219, 0.0247, 0.0302, 0.0261, 0.0252, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:50:37,638 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11477.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:50:39,641 INFO [train.py:903] (3/4) Epoch 2, batch 4650, loss[loss=0.3124, simple_loss=0.3624, pruned_loss=0.1312, over 19859.00 frames. ], tot_loss[loss=0.3522, simple_loss=0.3905, pruned_loss=0.157, over 3829129.81 frames. ], batch size: 52, lr: 3.10e-02, grad_scale: 8.0 +2023-03-31 22:50:45,018 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8733, 1.7942, 1.7592, 2.6509, 1.6898, 2.5354, 1.9965, 1.6860], + device='cuda:3'), covar=tensor([0.0890, 0.0743, 0.0442, 0.0366, 0.0873, 0.0246, 0.0922, 0.0763], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0325, 0.0343, 0.0450, 0.0411, 0.0244, 0.0451, 0.0357], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 22:50:56,512 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-31 22:50:59,956 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-03-31 22:51:09,665 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11502.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:10,458 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-03-31 22:51:15,630 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11507.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:20,436 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5088, 2.2381, 1.5639, 1.6844, 1.8711, 0.9379, 1.1193, 1.7497], + device='cuda:3'), covar=tensor([0.1206, 0.0473, 0.1394, 0.0690, 0.0753, 0.1505, 0.1031, 0.0668], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0199, 0.0300, 0.0247, 0.0203, 0.0300, 0.0267, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-03-31 22:51:26,521 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.039e+02 8.212e+02 1.126e+03 1.409e+03 2.689e+03, threshold=2.252e+03, percent-clipped=6.0 +2023-03-31 22:51:42,941 INFO [train.py:903] (3/4) Epoch 2, batch 4700, loss[loss=0.3231, simple_loss=0.3765, pruned_loss=0.1349, over 19661.00 frames. ], tot_loss[loss=0.3515, simple_loss=0.3899, pruned_loss=0.1565, over 3825855.99 frames. ], batch size: 58, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:51:47,991 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:51:49,465 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-31 22:51:52,287 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11536.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:05,035 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-03-31 22:52:08,737 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11550.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:52:15,832 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8082, 3.5834, 2.1672, 2.6621, 2.8183, 1.4832, 1.4819, 1.5494], + device='cuda:3'), covar=tensor([0.1519, 0.0388, 0.1315, 0.0604, 0.0868, 0.1518, 0.1187, 0.1188], + device='cuda:3'), in_proj_covar=tensor([0.0286, 0.0199, 0.0298, 0.0247, 0.0200, 0.0296, 0.0266, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-03-31 22:52:46,172 INFO [train.py:903] (3/4) Epoch 2, batch 4750, loss[loss=0.3551, simple_loss=0.3989, pruned_loss=0.1557, over 18791.00 frames. ], tot_loss[loss=0.3516, simple_loss=0.3903, pruned_loss=0.1565, over 3827636.00 frames. ], batch size: 74, lr: 3.09e-02, grad_scale: 8.0 +2023-03-31 22:53:32,504 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+02 7.433e+02 1.012e+03 1.332e+03 3.283e+03, threshold=2.025e+03, percent-clipped=2.0 +2023-03-31 22:53:35,213 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11618.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:53:47,427 INFO [train.py:903] (3/4) Epoch 2, batch 4800, loss[loss=0.3172, simple_loss=0.3597, pruned_loss=0.1373, over 19612.00 frames. ], tot_loss[loss=0.3511, simple_loss=0.3895, pruned_loss=0.1564, over 3811629.35 frames. ], batch size: 50, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:53:47,662 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0224, 5.3381, 2.8151, 4.7132, 1.5663, 5.3466, 5.2320, 5.5082], + device='cuda:3'), covar=tensor([0.0402, 0.0894, 0.2133, 0.0626, 0.3355, 0.0923, 0.0551, 0.0531], + device='cuda:3'), in_proj_covar=tensor([0.0291, 0.0287, 0.0315, 0.0260, 0.0326, 0.0284, 0.0223, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:54:04,672 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11643.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 22:54:13,893 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9767, 1.9051, 2.0184, 2.4649, 2.3062, 2.8557, 3.1191, 2.8991], + device='cuda:3'), covar=tensor([0.0620, 0.1208, 0.1370, 0.1371, 0.1367, 0.0846, 0.1164, 0.0633], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0307, 0.0291, 0.0322, 0.0347, 0.0263, 0.0320, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:3') +2023-03-31 22:54:15,091 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11651.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:19,410 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:54:49,932 INFO [train.py:903] (3/4) Epoch 2, batch 4850, loss[loss=0.3233, simple_loss=0.3539, pruned_loss=0.1463, over 19747.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3885, pruned_loss=0.1549, over 3830495.47 frames. ], batch size: 46, lr: 3.08e-02, grad_scale: 8.0 +2023-03-31 22:55:14,991 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-03-31 22:55:34,591 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-03-31 22:55:36,946 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.585e+02 1.008e+03 1.288e+03 2.592e+03, threshold=2.016e+03, percent-clipped=4.0 +2023-03-31 22:55:38,504 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:55:40,628 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-03-31 22:55:41,816 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-03-31 22:55:51,135 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-03-31 22:55:53,217 INFO [train.py:903] (3/4) Epoch 2, batch 4900, loss[loss=0.3603, simple_loss=0.4086, pruned_loss=0.156, over 19694.00 frames. ], tot_loss[loss=0.3505, simple_loss=0.3893, pruned_loss=0.1558, over 3807663.09 frames. ], batch size: 59, lr: 3.07e-02, grad_scale: 8.0 +2023-03-31 22:56:04,446 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2424, 3.7424, 2.1802, 3.5018, 1.2645, 3.7189, 3.4459, 3.6552], + device='cuda:3'), covar=tensor([0.0625, 0.1382, 0.1997, 0.0669, 0.3418, 0.0801, 0.0562, 0.0690], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0298, 0.0316, 0.0266, 0.0332, 0.0286, 0.0225, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 22:56:13,250 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-03-31 22:56:55,835 INFO [train.py:903] (3/4) Epoch 2, batch 4950, loss[loss=0.3595, simple_loss=0.3993, pruned_loss=0.1599, over 19519.00 frames. ], tot_loss[loss=0.3506, simple_loss=0.3894, pruned_loss=0.1558, over 3815388.13 frames. ], batch size: 54, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:57:12,057 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-03-31 22:57:37,549 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-03-31 22:57:41,842 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+02 9.257e+02 1.136e+03 1.412e+03 3.441e+03, threshold=2.272e+03, percent-clipped=4.0 +2023-03-31 22:57:52,801 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-31 22:57:57,810 INFO [train.py:903] (3/4) Epoch 2, batch 5000, loss[loss=0.3843, simple_loss=0.4141, pruned_loss=0.1772, over 19668.00 frames. ], tot_loss[loss=0.3505, simple_loss=0.3895, pruned_loss=0.1558, over 3826466.35 frames. ], batch size: 58, lr: 3.06e-02, grad_scale: 8.0 +2023-03-31 22:58:04,651 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:58:07,425 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-03-31 22:58:08,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4073, 1.2218, 1.5389, 1.8113, 3.0873, 1.1948, 2.0294, 2.9430], + device='cuda:3'), covar=tensor([0.0340, 0.2441, 0.2215, 0.1503, 0.0374, 0.2059, 0.1071, 0.0461], + device='cuda:3'), in_proj_covar=tensor([0.0229, 0.0298, 0.0281, 0.0276, 0.0258, 0.0313, 0.0257, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 22:58:16,655 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-03-31 22:58:21,712 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5594, 1.1086, 1.3739, 1.0580, 2.6118, 3.2697, 3.2690, 3.5300], + device='cuda:3'), covar=tensor([0.1514, 0.2905, 0.2849, 0.2160, 0.0506, 0.0138, 0.0227, 0.0124], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0278, 0.0315, 0.0276, 0.0195, 0.0105, 0.0192, 0.0109], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-03-31 22:58:42,527 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11864.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:00,551 INFO [train.py:903] (3/4) Epoch 2, batch 5050, loss[loss=0.3276, simple_loss=0.3788, pruned_loss=0.1382, over 19531.00 frames. ], tot_loss[loss=0.3522, simple_loss=0.3909, pruned_loss=0.1568, over 3828414.67 frames. ], batch size: 54, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 22:59:19,020 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11894.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:36,721 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 22:59:38,652 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-03-31 22:59:48,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 8.836e+02 1.119e+03 1.460e+03 3.605e+03, threshold=2.237e+03, percent-clipped=7.0 +2023-03-31 23:00:03,346 INFO [train.py:903] (3/4) Epoch 2, batch 5100, loss[loss=0.3543, simple_loss=0.3989, pruned_loss=0.1549, over 17942.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.3915, pruned_loss=0.1571, over 3825504.91 frames. ], batch size: 83, lr: 3.05e-02, grad_scale: 8.0 +2023-03-31 23:00:08,071 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11932.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:16,225 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11938.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:00:19,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-03-31 23:00:22,851 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-03-31 23:00:26,270 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-03-31 23:00:50,344 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11966.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:07,396 INFO [train.py:903] (3/4) Epoch 2, batch 5150, loss[loss=0.4319, simple_loss=0.4464, pruned_loss=0.2087, over 19506.00 frames. ], tot_loss[loss=0.3514, simple_loss=0.3902, pruned_loss=0.1563, over 3820570.76 frames. ], batch size: 64, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:01:20,920 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:01:29,902 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11997.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:36,674 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12002.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:44,903 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:01:54,417 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 8.254e+02 9.989e+02 1.287e+03 2.673e+03, threshold=1.998e+03, percent-clipped=4.0 +2023-03-31 23:01:56,795 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:02:10,377 INFO [train.py:903] (3/4) Epoch 2, batch 5200, loss[loss=0.3255, simple_loss=0.3588, pruned_loss=0.1461, over 19727.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3905, pruned_loss=0.1564, over 3820552.12 frames. ], batch size: 47, lr: 3.04e-02, grad_scale: 8.0 +2023-03-31 23:02:25,403 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-03-31 23:02:52,155 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12061.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:11,235 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-03-31 23:03:13,776 INFO [train.py:903] (3/4) Epoch 2, batch 5250, loss[loss=0.2782, simple_loss=0.3271, pruned_loss=0.1146, over 19761.00 frames. ], tot_loss[loss=0.3515, simple_loss=0.3902, pruned_loss=0.1563, over 3819706.90 frames. ], batch size: 47, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:03:32,695 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12094.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:55,623 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12112.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:03:59,697 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.115e+02 8.666e+02 1.057e+03 1.421e+03 4.195e+03, threshold=2.115e+03, percent-clipped=5.0 +2023-03-31 23:04:02,192 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7290, 1.3234, 1.2772, 1.8147, 1.5824, 1.5051, 1.3958, 1.6012], + device='cuda:3'), covar=tensor([0.0754, 0.1607, 0.1293, 0.0868, 0.1117, 0.0591, 0.0950, 0.0664], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0378, 0.0289, 0.0259, 0.0325, 0.0266, 0.0275, 0.0231], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:04:14,787 INFO [train.py:903] (3/4) Epoch 2, batch 5300, loss[loss=0.3498, simple_loss=0.3951, pruned_loss=0.1522, over 18770.00 frames. ], tot_loss[loss=0.3532, simple_loss=0.3915, pruned_loss=0.1575, over 3815446.59 frames. ], batch size: 74, lr: 3.03e-02, grad_scale: 8.0 +2023-03-31 23:04:35,591 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-03-31 23:05:13,747 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12176.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:15,938 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12178.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:05:16,931 INFO [train.py:903] (3/4) Epoch 2, batch 5350, loss[loss=0.3479, simple_loss=0.3951, pruned_loss=0.1503, over 19671.00 frames. ], tot_loss[loss=0.3521, simple_loss=0.3903, pruned_loss=0.1569, over 3832045.39 frames. ], batch size: 58, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:05:45,073 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1403, 1.1956, 1.8030, 1.3880, 2.8070, 2.7553, 2.8759, 1.3123], + device='cuda:3'), covar=tensor([0.1296, 0.2077, 0.1152, 0.1232, 0.0679, 0.0663, 0.0871, 0.1682], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0388, 0.0349, 0.0350, 0.0420, 0.0339, 0.0493, 0.0373], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:05:53,531 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-03-31 23:05:53,660 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12208.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:06:03,420 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 9.122e+02 1.169e+03 1.506e+03 3.477e+03, threshold=2.338e+03, percent-clipped=13.0 +2023-03-31 23:06:20,773 INFO [train.py:903] (3/4) Epoch 2, batch 5400, loss[loss=0.3777, simple_loss=0.4111, pruned_loss=0.1721, over 18751.00 frames. ], tot_loss[loss=0.3519, simple_loss=0.3903, pruned_loss=0.1567, over 3824508.44 frames. ], batch size: 74, lr: 3.02e-02, grad_scale: 8.0 +2023-03-31 23:07:06,480 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12265.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:23,454 INFO [train.py:903] (3/4) Epoch 2, batch 5450, loss[loss=0.439, simple_loss=0.4497, pruned_loss=0.2141, over 17540.00 frames. ], tot_loss[loss=0.3507, simple_loss=0.3898, pruned_loss=0.1558, over 3821010.33 frames. ], batch size: 101, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:07:27,096 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12282.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:36,432 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12290.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:07:40,664 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12293.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:01,536 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12310.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:08,448 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 7.539e+02 9.464e+02 1.137e+03 1.898e+03, threshold=1.893e+03, percent-clipped=0.0 +2023-03-31 23:08:17,866 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12323.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:23,997 INFO [train.py:903] (3/4) Epoch 2, batch 5500, loss[loss=0.369, simple_loss=0.4081, pruned_loss=0.1649, over 19533.00 frames. ], tot_loss[loss=0.3516, simple_loss=0.3908, pruned_loss=0.1562, over 3810210.99 frames. ], batch size: 64, lr: 3.01e-02, grad_scale: 8.0 +2023-03-31 23:08:47,643 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12346.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:08:49,882 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-03-31 23:09:14,235 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12368.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:28,529 INFO [train.py:903] (3/4) Epoch 2, batch 5550, loss[loss=0.3696, simple_loss=0.414, pruned_loss=0.1626, over 19374.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3885, pruned_loss=0.1541, over 3826612.04 frames. ], batch size: 66, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:09:36,189 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-03-31 23:09:46,133 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12393.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:09:50,671 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12397.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:14,308 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+02 8.577e+02 1.018e+03 1.198e+03 2.956e+03, threshold=2.037e+03, percent-clipped=3.0 +2023-03-31 23:10:25,914 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-03-31 23:10:27,088 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12425.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:31,331 INFO [train.py:903] (3/4) Epoch 2, batch 5600, loss[loss=0.3203, simple_loss=0.3534, pruned_loss=0.1435, over 19498.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3876, pruned_loss=0.1531, over 3832137.14 frames. ], batch size: 49, lr: 3.00e-02, grad_scale: 8.0 +2023-03-31 23:10:35,306 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:10:41,880 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12438.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:06,511 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12457.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:10,906 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1645, 1.7732, 1.4514, 1.2075, 1.5955, 0.7977, 0.8404, 1.5643], + device='cuda:3'), covar=tensor([0.0702, 0.0429, 0.0817, 0.0507, 0.0459, 0.1106, 0.0737, 0.0422], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0210, 0.0301, 0.0249, 0.0209, 0.0297, 0.0271, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-03-31 23:11:12,054 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12461.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:16,301 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12464.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:11:33,650 INFO [train.py:903] (3/4) Epoch 2, batch 5650, loss[loss=0.3308, simple_loss=0.3835, pruned_loss=0.139, over 19675.00 frames. ], tot_loss[loss=0.3473, simple_loss=0.3876, pruned_loss=0.1535, over 3826696.32 frames. ], batch size: 55, lr: 2.99e-02, grad_scale: 8.0 +2023-03-31 23:12:19,594 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.393e+02 8.152e+02 1.040e+03 1.340e+03 2.595e+03, threshold=2.080e+03, percent-clipped=4.0 +2023-03-31 23:12:21,931 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-03-31 23:12:35,557 INFO [train.py:903] (3/4) Epoch 2, batch 5700, loss[loss=0.3048, simple_loss=0.3457, pruned_loss=0.132, over 19344.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3881, pruned_loss=0.1537, over 3839238.94 frames. ], batch size: 47, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:02,450 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:06,767 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0261, 4.8235, 5.7567, 5.5857, 1.9759, 5.1345, 4.8601, 5.1420], + device='cuda:3'), covar=tensor([0.0382, 0.0429, 0.0385, 0.0211, 0.2964, 0.0158, 0.0245, 0.0781], + device='cuda:3'), in_proj_covar=tensor([0.0310, 0.0297, 0.0408, 0.0305, 0.0448, 0.0201, 0.0279, 0.0408], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 23:13:06,850 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12553.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:32,367 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12574.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:37,668 INFO [train.py:903] (3/4) Epoch 2, batch 5750, loss[loss=0.3765, simple_loss=0.4266, pruned_loss=0.1632, over 19663.00 frames. ], tot_loss[loss=0.3462, simple_loss=0.3872, pruned_loss=0.1526, over 3841287.04 frames. ], batch size: 58, lr: 2.98e-02, grad_scale: 8.0 +2023-03-31 23:13:39,172 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12579.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:13:42,220 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-03-31 23:13:50,271 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-03-31 23:13:56,874 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-03-31 23:14:10,379 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:14:23,858 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+02 8.083e+02 9.516e+02 1.322e+03 3.330e+03, threshold=1.903e+03, percent-clipped=5.0 +2023-03-31 23:14:40,968 INFO [train.py:903] (3/4) Epoch 2, batch 5800, loss[loss=0.4082, simple_loss=0.4371, pruned_loss=0.1897, over 19539.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3876, pruned_loss=0.1529, over 3821426.59 frames. ], batch size: 56, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:15:10,932 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12653.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:42,714 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12678.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:15:43,517 INFO [train.py:903] (3/4) Epoch 2, batch 5850, loss[loss=0.2995, simple_loss=0.3399, pruned_loss=0.1295, over 19100.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.388, pruned_loss=0.1536, over 3813977.74 frames. ], batch size: 42, lr: 2.97e-02, grad_scale: 8.0 +2023-03-31 23:15:44,004 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9726, 0.9533, 1.5998, 1.1061, 2.3548, 2.3073, 2.6035, 1.0032], + device='cuda:3'), covar=tensor([0.1681, 0.2332, 0.1317, 0.1637, 0.0920, 0.0977, 0.1018, 0.2008], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0396, 0.0363, 0.0363, 0.0438, 0.0349, 0.0510, 0.0382], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:15:47,153 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12681.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:18,364 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12706.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:30,351 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.479e+02 7.980e+02 9.827e+02 1.217e+03 2.781e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-31 23:16:32,017 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12717.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:16:46,108 INFO [train.py:903] (3/4) Epoch 2, batch 5900, loss[loss=0.3428, simple_loss=0.3877, pruned_loss=0.149, over 19604.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3884, pruned_loss=0.1536, over 3821992.16 frames. ], batch size: 57, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:16:49,441 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-03-31 23:17:03,733 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12742.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:17:11,479 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-03-31 23:17:41,839 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.66 vs. limit=5.0 +2023-03-31 23:17:49,031 INFO [train.py:903] (3/4) Epoch 2, batch 5950, loss[loss=0.327, simple_loss=0.3829, pruned_loss=0.1356, over 19615.00 frames. ], tot_loss[loss=0.3465, simple_loss=0.3872, pruned_loss=0.1529, over 3823631.25 frames. ], batch size: 57, lr: 2.96e-02, grad_scale: 8.0 +2023-03-31 23:17:52,884 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12781.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:25,408 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12808.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:26,826 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12809.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:18:34,471 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+02 9.090e+02 1.139e+03 1.452e+03 3.383e+03, threshold=2.279e+03, percent-clipped=8.0 +2023-03-31 23:18:51,783 INFO [train.py:903] (3/4) Epoch 2, batch 6000, loss[loss=0.2916, simple_loss=0.3401, pruned_loss=0.1216, over 19461.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3876, pruned_loss=0.154, over 3809237.54 frames. ], batch size: 49, lr: 2.95e-02, grad_scale: 8.0 +2023-03-31 23:18:51,784 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 23:19:06,012 INFO [train.py:937] (3/4) Epoch 2, validation: loss=0.246, simple_loss=0.337, pruned_loss=0.07745, over 944034.00 frames. +2023-03-31 23:19:06,013 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18251MB +2023-03-31 23:19:13,368 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12834.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:20:08,252 INFO [train.py:903] (3/4) Epoch 2, batch 6050, loss[loss=0.3127, simple_loss=0.3696, pruned_loss=0.1279, over 19596.00 frames. ], tot_loss[loss=0.3476, simple_loss=0.387, pruned_loss=0.1541, over 3807929.15 frames. ], batch size: 52, lr: 2.95e-02, grad_scale: 4.0 +2023-03-31 23:20:52,510 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6465, 1.6517, 1.5097, 2.2826, 1.6284, 1.9499, 1.8855, 1.4222], + device='cuda:3'), covar=tensor([0.0813, 0.0649, 0.0444, 0.0333, 0.0691, 0.0272, 0.0832, 0.0724], + device='cuda:3'), in_proj_covar=tensor([0.0375, 0.0347, 0.0361, 0.0476, 0.0431, 0.0268, 0.0461, 0.0367], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:20:56,515 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+02 7.840e+02 9.937e+02 1.323e+03 8.220e+03, threshold=1.987e+03, percent-clipped=9.0 +2023-03-31 23:21:03,491 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12923.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:21:10,256 INFO [train.py:903] (3/4) Epoch 2, batch 6100, loss[loss=0.2977, simple_loss=0.3485, pruned_loss=0.1235, over 19605.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3864, pruned_loss=0.1532, over 3816647.00 frames. ], batch size: 50, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:21:24,588 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-31 23:21:30,965 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6330, 1.5868, 1.6016, 2.2709, 1.6651, 1.9352, 1.8877, 1.5109], + device='cuda:3'), covar=tensor([0.0811, 0.0662, 0.0426, 0.0318, 0.0647, 0.0274, 0.0813, 0.0669], + device='cuda:3'), in_proj_covar=tensor([0.0377, 0.0348, 0.0362, 0.0476, 0.0431, 0.0269, 0.0463, 0.0364], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:22:11,868 INFO [train.py:903] (3/4) Epoch 2, batch 6150, loss[loss=0.3516, simple_loss=0.3796, pruned_loss=0.1619, over 19212.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3886, pruned_loss=0.1554, over 3811312.60 frames. ], batch size: 42, lr: 2.94e-02, grad_scale: 4.0 +2023-03-31 23:22:14,912 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-31 23:22:42,902 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-03-31 23:23:00,757 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+02 7.749e+02 1.029e+03 1.287e+03 3.235e+03, threshold=2.059e+03, percent-clipped=7.0 +2023-03-31 23:23:13,338 INFO [train.py:903] (3/4) Epoch 2, batch 6200, loss[loss=0.3004, simple_loss=0.3463, pruned_loss=0.1273, over 18551.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3868, pruned_loss=0.1541, over 3811288.81 frames. ], batch size: 41, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:15,396 INFO [train.py:903] (3/4) Epoch 2, batch 6250, loss[loss=0.4301, simple_loss=0.4421, pruned_loss=0.209, over 13262.00 frames. ], tot_loss[loss=0.3485, simple_loss=0.3879, pruned_loss=0.1545, over 3807984.32 frames. ], batch size: 136, lr: 2.93e-02, grad_scale: 4.0 +2023-03-31 23:24:47,227 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-03-31 23:25:04,357 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.358e+02 8.552e+02 1.023e+03 1.333e+03 3.705e+03, threshold=2.046e+03, percent-clipped=2.0 +2023-03-31 23:25:13,052 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13125.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:25:17,707 INFO [train.py:903] (3/4) Epoch 2, batch 6300, loss[loss=0.2935, simple_loss=0.3458, pruned_loss=0.1206, over 19735.00 frames. ], tot_loss[loss=0.3486, simple_loss=0.3885, pruned_loss=0.1543, over 3813204.98 frames. ], batch size: 51, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:19,879 INFO [train.py:903] (3/4) Epoch 2, batch 6350, loss[loss=0.3244, simple_loss=0.3758, pruned_loss=0.1366, over 19664.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.388, pruned_loss=0.1534, over 3824768.46 frames. ], batch size: 53, lr: 2.92e-02, grad_scale: 4.0 +2023-03-31 23:26:20,353 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13179.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:26:49,668 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13204.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:27:06,691 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.673e+02 8.539e+02 1.071e+03 1.407e+03 4.202e+03, threshold=2.141e+03, percent-clipped=6.0 +2023-03-31 23:27:19,362 INFO [train.py:903] (3/4) Epoch 2, batch 6400, loss[loss=0.3714, simple_loss=0.4077, pruned_loss=0.1675, over 19531.00 frames. ], tot_loss[loss=0.3484, simple_loss=0.3888, pruned_loss=0.154, over 3810161.37 frames. ], batch size: 54, lr: 2.92e-02, grad_scale: 8.0 +2023-03-31 23:27:33,753 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13240.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:28:22,169 INFO [train.py:903] (3/4) Epoch 2, batch 6450, loss[loss=0.3334, simple_loss=0.3838, pruned_loss=0.1415, over 17975.00 frames. ], tot_loss[loss=0.3481, simple_loss=0.3888, pruned_loss=0.1537, over 3808236.33 frames. ], batch size: 83, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:09,608 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-03-31 23:29:10,633 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+02 7.874e+02 9.907e+02 1.203e+03 2.411e+03, threshold=1.981e+03, percent-clipped=4.0 +2023-03-31 23:29:24,102 INFO [train.py:903] (3/4) Epoch 2, batch 6500, loss[loss=0.2989, simple_loss=0.3427, pruned_loss=0.1276, over 19758.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3878, pruned_loss=0.153, over 3807694.78 frames. ], batch size: 46, lr: 2.91e-02, grad_scale: 8.0 +2023-03-31 23:29:30,885 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-03-31 23:30:26,693 INFO [train.py:903] (3/4) Epoch 2, batch 6550, loss[loss=0.3412, simple_loss=0.3886, pruned_loss=0.1468, over 19776.00 frames. ], tot_loss[loss=0.3459, simple_loss=0.387, pruned_loss=0.1524, over 3820393.86 frames. ], batch size: 56, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:31:14,491 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+02 7.924e+02 9.507e+02 1.218e+03 2.525e+03, threshold=1.901e+03, percent-clipped=3.0 +2023-03-31 23:31:27,083 INFO [train.py:903] (3/4) Epoch 2, batch 6600, loss[loss=0.384, simple_loss=0.4257, pruned_loss=0.1711, over 18136.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3866, pruned_loss=0.1521, over 3821605.32 frames. ], batch size: 83, lr: 2.90e-02, grad_scale: 8.0 +2023-03-31 23:32:29,095 INFO [train.py:903] (3/4) Epoch 2, batch 6650, loss[loss=0.4002, simple_loss=0.434, pruned_loss=0.1832, over 19621.00 frames. ], tot_loss[loss=0.3467, simple_loss=0.3868, pruned_loss=0.1533, over 3822582.79 frames. ], batch size: 57, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:32:51,548 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13496.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:17,438 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.558e+02 9.096e+02 1.181e+03 1.471e+03 3.411e+03, threshold=2.361e+03, percent-clipped=10.0 +2023-03-31 23:33:21,194 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13521.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:33:29,651 INFO [train.py:903] (3/4) Epoch 2, batch 6700, loss[loss=0.4083, simple_loss=0.4347, pruned_loss=0.191, over 18153.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3866, pruned_loss=0.1536, over 3805182.59 frames. ], batch size: 83, lr: 2.89e-02, grad_scale: 8.0 +2023-03-31 23:34:27,762 INFO [train.py:903] (3/4) Epoch 2, batch 6750, loss[loss=0.3641, simple_loss=0.3846, pruned_loss=0.1718, over 19404.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3878, pruned_loss=0.1544, over 3796984.28 frames. ], batch size: 48, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:35:12,671 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.052e+02 8.174e+02 9.996e+02 1.293e+03 2.664e+03, threshold=1.999e+03, percent-clipped=1.0 +2023-03-31 23:35:25,230 INFO [train.py:903] (3/4) Epoch 2, batch 6800, loss[loss=0.3364, simple_loss=0.3907, pruned_loss=0.1411, over 19531.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3868, pruned_loss=0.1535, over 3799474.06 frames. ], batch size: 54, lr: 2.88e-02, grad_scale: 8.0 +2023-03-31 23:36:09,779 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-03-31 23:36:10,968 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-03-31 23:36:13,728 INFO [train.py:903] (3/4) Epoch 3, batch 0, loss[loss=0.389, simple_loss=0.4155, pruned_loss=0.1812, over 18205.00 frames. ], tot_loss[loss=0.389, simple_loss=0.4155, pruned_loss=0.1812, over 18205.00 frames. ], batch size: 83, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:36:13,728 INFO [train.py:928] (3/4) Computing validation loss +2023-03-31 23:36:24,494 INFO [train.py:937] (3/4) Epoch 3, validation: loss=0.241, simple_loss=0.3346, pruned_loss=0.07374, over 944034.00 frames. +2023-03-31 23:36:24,495 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18419MB +2023-03-31 23:36:37,412 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-03-31 23:37:25,523 INFO [train.py:903] (3/4) Epoch 3, batch 50, loss[loss=0.3678, simple_loss=0.4036, pruned_loss=0.166, over 19426.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.3852, pruned_loss=0.1506, over 866559.39 frames. ], batch size: 70, lr: 2.73e-02, grad_scale: 8.0 +2023-03-31 23:37:38,309 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 7.787e+02 9.326e+02 1.115e+03 3.182e+03, threshold=1.865e+03, percent-clipped=5.0 +2023-03-31 23:37:58,965 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-03-31 23:38:24,031 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13755.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:38:25,865 INFO [train.py:903] (3/4) Epoch 3, batch 100, loss[loss=0.3872, simple_loss=0.425, pruned_loss=0.1748, over 19596.00 frames. ], tot_loss[loss=0.339, simple_loss=0.3813, pruned_loss=0.1484, over 1505485.58 frames. ], batch size: 57, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:38:33,262 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3416, 1.2456, 2.3287, 1.7299, 3.1306, 3.0798, 3.4312, 1.8212], + device='cuda:3'), covar=tensor([0.1365, 0.2091, 0.1108, 0.1177, 0.0930, 0.0843, 0.1097, 0.1837], + device='cuda:3'), in_proj_covar=tensor([0.0380, 0.0420, 0.0380, 0.0377, 0.0447, 0.0365, 0.0530, 0.0391], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:38:35,131 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-03-31 23:39:21,045 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7879, 1.7513, 2.0621, 2.3621, 1.9993, 2.1529, 2.1568, 2.7246], + device='cuda:3'), covar=tensor([0.0601, 0.2034, 0.1300, 0.0939, 0.1414, 0.0554, 0.1012, 0.0547], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0383, 0.0292, 0.0259, 0.0324, 0.0271, 0.0279, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:39:27,419 INFO [train.py:903] (3/4) Epoch 3, batch 150, loss[loss=0.3367, simple_loss=0.3952, pruned_loss=0.1391, over 19669.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3821, pruned_loss=0.1495, over 2022685.54 frames. ], batch size: 58, lr: 2.72e-02, grad_scale: 8.0 +2023-03-31 23:39:40,058 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+02 7.521e+02 1.009e+03 1.351e+03 3.530e+03, threshold=2.018e+03, percent-clipped=10.0 +2023-03-31 23:40:28,871 INFO [train.py:903] (3/4) Epoch 3, batch 200, loss[loss=0.3394, simple_loss=0.3644, pruned_loss=0.1573, over 19786.00 frames. ], tot_loss[loss=0.342, simple_loss=0.384, pruned_loss=0.15, over 2433702.94 frames. ], batch size: 48, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:40:28,910 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-03-31 23:40:37,961 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6962, 1.3296, 1.5264, 1.8841, 1.7881, 1.6120, 1.5913, 1.9062], + device='cuda:3'), covar=tensor([0.0881, 0.1997, 0.1343, 0.0973, 0.1284, 0.0637, 0.1098, 0.0655], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0389, 0.0290, 0.0260, 0.0326, 0.0273, 0.0279, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:41:28,994 INFO [train.py:903] (3/4) Epoch 3, batch 250, loss[loss=0.3335, simple_loss=0.3863, pruned_loss=0.1403, over 19602.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3833, pruned_loss=0.1489, over 2745693.77 frames. ], batch size: 61, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:41:44,247 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+02 8.729e+02 1.056e+03 1.304e+03 3.760e+03, threshold=2.113e+03, percent-clipped=6.0 +2023-03-31 23:42:19,866 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5531, 4.1379, 2.4011, 3.7672, 1.3317, 3.7743, 3.7355, 3.9879], + device='cuda:3'), covar=tensor([0.0513, 0.0960, 0.1891, 0.0601, 0.3396, 0.0922, 0.0650, 0.0735], + device='cuda:3'), in_proj_covar=tensor([0.0293, 0.0284, 0.0316, 0.0260, 0.0338, 0.0277, 0.0225, 0.0256], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 23:42:33,069 INFO [train.py:903] (3/4) Epoch 3, batch 300, loss[loss=0.2857, simple_loss=0.3552, pruned_loss=0.1082, over 19661.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3826, pruned_loss=0.1478, over 2997961.16 frames. ], batch size: 58, lr: 2.71e-02, grad_scale: 8.0 +2023-03-31 23:43:34,498 INFO [train.py:903] (3/4) Epoch 3, batch 350, loss[loss=0.3459, simple_loss=0.3913, pruned_loss=0.1503, over 19674.00 frames. ], tot_loss[loss=0.338, simple_loss=0.382, pruned_loss=0.147, over 3187564.45 frames. ], batch size: 55, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:43:40,035 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-03-31 23:43:46,962 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 7.628e+02 9.853e+02 1.217e+03 3.369e+03, threshold=1.971e+03, percent-clipped=3.0 +2023-03-31 23:43:50,258 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-31 23:44:22,035 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8222, 1.1241, 1.3697, 1.6294, 2.5995, 1.1625, 1.6397, 2.5744], + device='cuda:3'), covar=tensor([0.0414, 0.2530, 0.2304, 0.1355, 0.0461, 0.2007, 0.1164, 0.0499], + device='cuda:3'), in_proj_covar=tensor([0.0239, 0.0310, 0.0293, 0.0277, 0.0275, 0.0324, 0.0271, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:44:34,931 INFO [train.py:903] (3/4) Epoch 3, batch 400, loss[loss=0.3475, simple_loss=0.3951, pruned_loss=0.1499, over 19678.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3833, pruned_loss=0.1485, over 3324038.85 frames. ], batch size: 58, lr: 2.70e-02, grad_scale: 8.0 +2023-03-31 23:45:13,841 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4307, 2.2196, 1.5101, 1.9250, 1.5108, 1.5665, 0.1500, 1.2748], + device='cuda:3'), covar=tensor([0.0211, 0.0237, 0.0197, 0.0219, 0.0515, 0.0370, 0.0635, 0.0396], + device='cuda:3'), in_proj_covar=tensor([0.0229, 0.0237, 0.0231, 0.0247, 0.0313, 0.0261, 0.0254, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 23:45:17,180 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14090.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:27,299 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14099.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:45:36,308 INFO [train.py:903] (3/4) Epoch 3, batch 450, loss[loss=0.3064, simple_loss=0.3503, pruned_loss=0.1313, over 19392.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.3828, pruned_loss=0.1481, over 3445754.60 frames. ], batch size: 47, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:45:52,381 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+02 8.213e+02 1.015e+03 1.206e+03 3.609e+03, threshold=2.029e+03, percent-clipped=6.0 +2023-03-31 23:45:55,996 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14121.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:46:10,159 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-03-31 23:46:11,136 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-03-31 23:46:14,941 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14138.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:46:24,025 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1289, 1.0591, 2.0150, 1.3939, 2.6594, 2.4375, 2.9519, 1.0776], + device='cuda:3'), covar=tensor([0.1453, 0.2314, 0.1205, 0.1221, 0.0899, 0.0944, 0.1085, 0.2025], + device='cuda:3'), in_proj_covar=tensor([0.0385, 0.0423, 0.0388, 0.0373, 0.0456, 0.0369, 0.0531, 0.0391], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:46:38,893 INFO [train.py:903] (3/4) Epoch 3, batch 500, loss[loss=0.2645, simple_loss=0.3172, pruned_loss=0.1059, over 19757.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3822, pruned_loss=0.1481, over 3526096.12 frames. ], batch size: 47, lr: 2.69e-02, grad_scale: 8.0 +2023-03-31 23:47:38,987 INFO [train.py:903] (3/4) Epoch 3, batch 550, loss[loss=0.4224, simple_loss=0.4362, pruned_loss=0.2043, over 13695.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3832, pruned_loss=0.149, over 3577877.44 frames. ], batch size: 136, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:47:47,487 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14214.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:47:51,335 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+02 8.063e+02 9.949e+02 1.307e+03 2.222e+03, threshold=1.990e+03, percent-clipped=3.0 +2023-03-31 23:48:38,811 INFO [train.py:903] (3/4) Epoch 3, batch 600, loss[loss=0.29, simple_loss=0.3408, pruned_loss=0.1196, over 19773.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3833, pruned_loss=0.149, over 3634973.91 frames. ], batch size: 47, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:16,711 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-03-31 23:49:39,184 INFO [train.py:903] (3/4) Epoch 3, batch 650, loss[loss=0.3099, simple_loss=0.3537, pruned_loss=0.133, over 19457.00 frames. ], tot_loss[loss=0.341, simple_loss=0.3835, pruned_loss=0.1492, over 3676830.84 frames. ], batch size: 49, lr: 2.68e-02, grad_scale: 8.0 +2023-03-31 23:49:54,627 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.103e+02 8.257e+02 1.098e+03 1.322e+03 3.191e+03, threshold=2.197e+03, percent-clipped=10.0 +2023-03-31 23:50:26,638 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3435, 2.5164, 1.9236, 2.3674, 2.1382, 1.5993, 0.2776, 2.1507], + device='cuda:3'), covar=tensor([0.0202, 0.0267, 0.0254, 0.0330, 0.0429, 0.0517, 0.0716, 0.0479], + device='cuda:3'), in_proj_covar=tensor([0.0231, 0.0238, 0.0232, 0.0251, 0.0315, 0.0268, 0.0257, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-03-31 23:50:41,496 INFO [train.py:903] (3/4) Epoch 3, batch 700, loss[loss=0.405, simple_loss=0.4215, pruned_loss=0.1942, over 12913.00 frames. ], tot_loss[loss=0.3421, simple_loss=0.3844, pruned_loss=0.1499, over 3707227.44 frames. ], batch size: 136, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:43,778 INFO [train.py:903] (3/4) Epoch 3, batch 750, loss[loss=0.2948, simple_loss=0.3546, pruned_loss=0.1174, over 19599.00 frames. ], tot_loss[loss=0.3393, simple_loss=0.3827, pruned_loss=0.1479, over 3745059.73 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 8.0 +2023-03-31 23:51:56,450 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.798e+02 9.551e+02 1.191e+03 2.807e+03, threshold=1.910e+03, percent-clipped=6.0 +2023-03-31 23:52:11,661 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14431.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:14,913 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14434.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:44,756 INFO [train.py:903] (3/4) Epoch 3, batch 800, loss[loss=0.4176, simple_loss=0.4256, pruned_loss=0.2048, over 13679.00 frames. ], tot_loss[loss=0.3398, simple_loss=0.3833, pruned_loss=0.1481, over 3764077.04 frames. ], batch size: 136, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:52:53,880 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14465.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:52:54,914 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-03-31 23:52:59,823 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14470.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:15,263 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14482.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:53:31,025 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14495.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:42,714 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14505.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:53:44,738 INFO [train.py:903] (3/4) Epoch 3, batch 850, loss[loss=0.3158, simple_loss=0.3549, pruned_loss=0.1384, over 19341.00 frames. ], tot_loss[loss=0.3411, simple_loss=0.3843, pruned_loss=0.1489, over 3783954.59 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:53:58,372 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.292e+02 8.633e+02 1.105e+03 1.534e+03 3.114e+03, threshold=2.210e+03, percent-clipped=11.0 +2023-03-31 23:54:32,133 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-03-31 23:54:35,856 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14549.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:54:45,560 INFO [train.py:903] (3/4) Epoch 3, batch 900, loss[loss=0.3496, simple_loss=0.4003, pruned_loss=0.1494, over 19687.00 frames. ], tot_loss[loss=0.3417, simple_loss=0.385, pruned_loss=0.1492, over 3792156.28 frames. ], batch size: 59, lr: 2.66e-02, grad_scale: 8.0 +2023-03-31 23:55:03,507 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9425, 4.2993, 4.6334, 4.5877, 1.5684, 4.1728, 3.7935, 4.0890], + device='cuda:3'), covar=tensor([0.0500, 0.0384, 0.0391, 0.0266, 0.3025, 0.0209, 0.0314, 0.0898], + device='cuda:3'), in_proj_covar=tensor([0.0345, 0.0311, 0.0436, 0.0325, 0.0467, 0.0223, 0.0289, 0.0431], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-03-31 23:55:15,260 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14580.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:55:34,857 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14597.0, num_to_drop=1, layers_to_drop={0} +2023-03-31 23:55:46,732 INFO [train.py:903] (3/4) Epoch 3, batch 950, loss[loss=0.3525, simple_loss=0.3881, pruned_loss=0.1584, over 17398.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.3827, pruned_loss=0.1475, over 3810832.89 frames. ], batch size: 101, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:55:46,740 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-03-31 23:56:00,994 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+02 7.381e+02 9.246e+02 1.263e+03 4.500e+03, threshold=1.849e+03, percent-clipped=5.0 +2023-03-31 23:56:21,988 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2155, 1.8448, 1.3932, 1.2461, 1.6839, 1.0294, 0.8658, 1.4613], + device='cuda:3'), covar=tensor([0.0675, 0.0394, 0.0918, 0.0480, 0.0358, 0.1038, 0.0659, 0.0425], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0217, 0.0313, 0.0255, 0.0210, 0.0321, 0.0277, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-03-31 23:56:46,894 INFO [train.py:903] (3/4) Epoch 3, batch 1000, loss[loss=0.3327, simple_loss=0.3873, pruned_loss=0.1391, over 19687.00 frames. ], tot_loss[loss=0.3383, simple_loss=0.3819, pruned_loss=0.1473, over 3831677.95 frames. ], batch size: 60, lr: 2.65e-02, grad_scale: 4.0 +2023-03-31 23:57:10,686 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6087, 2.5803, 2.2988, 3.6983, 2.6884, 4.3534, 3.8297, 2.4974], + device='cuda:3'), covar=tensor([0.0963, 0.0662, 0.0391, 0.0436, 0.0823, 0.0130, 0.0499, 0.0536], + device='cuda:3'), in_proj_covar=tensor([0.0415, 0.0390, 0.0397, 0.0517, 0.0468, 0.0301, 0.0489, 0.0400], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:57:13,855 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9805, 2.0380, 2.1548, 2.6730, 4.4477, 1.3307, 2.1109, 4.3592], + device='cuda:3'), covar=tensor([0.0184, 0.2092, 0.1920, 0.1223, 0.0315, 0.1979, 0.1083, 0.0305], + device='cuda:3'), in_proj_covar=tensor([0.0234, 0.0301, 0.0292, 0.0278, 0.0273, 0.0318, 0.0262, 0.0269], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-03-31 23:57:38,689 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-03-31 23:57:47,680 INFO [train.py:903] (3/4) Epoch 3, batch 1050, loss[loss=0.3981, simple_loss=0.4333, pruned_loss=0.1815, over 17425.00 frames. ], tot_loss[loss=0.3357, simple_loss=0.3795, pruned_loss=0.146, over 3814030.19 frames. ], batch size: 101, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:01,055 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.305e+02 8.951e+02 1.118e+03 2.421e+03, threshold=1.790e+03, percent-clipped=2.0 +2023-03-31 23:58:17,603 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-03-31 23:58:48,370 INFO [train.py:903] (3/4) Epoch 3, batch 1100, loss[loss=0.2895, simple_loss=0.3366, pruned_loss=0.1212, over 19131.00 frames. ], tot_loss[loss=0.3335, simple_loss=0.3781, pruned_loss=0.1444, over 3826678.08 frames. ], batch size: 42, lr: 2.64e-02, grad_scale: 4.0 +2023-03-31 23:58:49,837 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14758.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:10,923 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14775.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:47,760 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14805.0, num_to_drop=0, layers_to_drop=set() +2023-03-31 23:59:49,456 INFO [train.py:903] (3/4) Epoch 3, batch 1150, loss[loss=0.4636, simple_loss=0.4653, pruned_loss=0.231, over 14015.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3792, pruned_loss=0.1451, over 3821628.53 frames. ], batch size: 138, lr: 2.64e-02, grad_scale: 4.0 +2023-04-01 00:00:03,874 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+02 7.465e+02 1.021e+03 1.238e+03 3.548e+03, threshold=2.043e+03, percent-clipped=7.0 +2023-04-01 00:00:16,807 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:23,708 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:39,270 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 00:00:40,937 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:00:45,734 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:00:49,931 INFO [train.py:903] (3/4) Epoch 3, batch 1200, loss[loss=0.2667, simple_loss=0.3326, pruned_loss=0.1005, over 19673.00 frames. ], tot_loss[loss=0.3344, simple_loss=0.3792, pruned_loss=0.1448, over 3829703.78 frames. ], batch size: 53, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:00:55,823 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:08,125 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5825, 3.9653, 4.2573, 4.2514, 1.5687, 3.8612, 3.4775, 3.7625], + device='cuda:3'), covar=tensor([0.0516, 0.0526, 0.0405, 0.0305, 0.3063, 0.0246, 0.0359, 0.0836], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0318, 0.0445, 0.0339, 0.0473, 0.0233, 0.0299, 0.0438], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 00:01:15,100 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14878.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 00:01:17,835 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 00:01:31,313 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14890.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:01:51,484 INFO [train.py:903] (3/4) Epoch 3, batch 1250, loss[loss=0.4484, simple_loss=0.4467, pruned_loss=0.225, over 13071.00 frames. ], tot_loss[loss=0.3354, simple_loss=0.3799, pruned_loss=0.1455, over 3826487.00 frames. ], batch size: 136, lr: 2.63e-02, grad_scale: 8.0 +2023-04-01 00:02:05,937 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+02 7.701e+02 1.002e+03 1.250e+03 2.941e+03, threshold=2.004e+03, percent-clipped=3.0 +2023-04-01 00:02:13,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2426, 2.1179, 1.9276, 3.1621, 2.3510, 3.8367, 3.2058, 1.7661], + device='cuda:3'), covar=tensor([0.1079, 0.0763, 0.0486, 0.0571, 0.0996, 0.0195, 0.0735, 0.0812], + device='cuda:3'), in_proj_covar=tensor([0.0427, 0.0397, 0.0406, 0.0531, 0.0477, 0.0306, 0.0505, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:02:53,118 INFO [train.py:903] (3/4) Epoch 3, batch 1300, loss[loss=0.3147, simple_loss=0.3614, pruned_loss=0.1341, over 19717.00 frames. ], tot_loss[loss=0.3348, simple_loss=0.3791, pruned_loss=0.1452, over 3821235.72 frames. ], batch size: 51, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:03:02,107 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:03:54,924 INFO [train.py:903] (3/4) Epoch 3, batch 1350, loss[loss=0.3546, simple_loss=0.397, pruned_loss=0.156, over 19575.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3803, pruned_loss=0.1463, over 3824264.76 frames. ], batch size: 61, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:03:59,605 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0883, 1.9669, 1.7603, 3.0334, 2.0193, 3.1728, 2.6063, 1.7175], + device='cuda:3'), covar=tensor([0.0881, 0.0693, 0.0443, 0.0404, 0.0765, 0.0178, 0.0714, 0.0672], + device='cuda:3'), in_proj_covar=tensor([0.0425, 0.0399, 0.0405, 0.0523, 0.0467, 0.0305, 0.0497, 0.0405], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:04:10,657 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+02 8.351e+02 9.883e+02 1.225e+03 3.360e+03, threshold=1.977e+03, percent-clipped=2.0 +2023-04-01 00:04:57,772 INFO [train.py:903] (3/4) Epoch 3, batch 1400, loss[loss=0.422, simple_loss=0.4315, pruned_loss=0.2063, over 19649.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3797, pruned_loss=0.1466, over 3811495.06 frames. ], batch size: 58, lr: 2.62e-02, grad_scale: 8.0 +2023-04-01 00:05:29,115 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7299, 1.5846, 1.5892, 1.8422, 3.3510, 1.0986, 1.8805, 3.3861], + device='cuda:3'), covar=tensor([0.0279, 0.2087, 0.2093, 0.1305, 0.0354, 0.1998, 0.1158, 0.0332], + device='cuda:3'), in_proj_covar=tensor([0.0243, 0.0303, 0.0296, 0.0282, 0.0282, 0.0324, 0.0266, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 00:05:53,869 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15102.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:05:59,104 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 00:06:00,254 INFO [train.py:903] (3/4) Epoch 3, batch 1450, loss[loss=0.3407, simple_loss=0.3867, pruned_loss=0.1474, over 19611.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3782, pruned_loss=0.1448, over 3828582.32 frames. ], batch size: 57, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:06:00,482 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8157, 3.1490, 3.3185, 3.4325, 1.3415, 3.1725, 2.9478, 2.6199], + device='cuda:3'), covar=tensor([0.1239, 0.1134, 0.1128, 0.0881, 0.4542, 0.0708, 0.0752, 0.2074], + device='cuda:3'), in_proj_covar=tensor([0.0347, 0.0312, 0.0442, 0.0333, 0.0461, 0.0228, 0.0292, 0.0425], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 00:06:13,781 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+02 7.923e+02 9.351e+02 1.150e+03 2.880e+03, threshold=1.870e+03, percent-clipped=3.0 +2023-04-01 00:06:49,295 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:06:51,634 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1799, 1.1428, 2.0055, 1.3617, 2.3948, 2.2440, 2.6107, 0.9164], + device='cuda:3'), covar=tensor([0.1330, 0.2145, 0.1109, 0.1238, 0.0913, 0.0968, 0.1069, 0.1996], + device='cuda:3'), in_proj_covar=tensor([0.0401, 0.0440, 0.0406, 0.0386, 0.0475, 0.0380, 0.0557, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:07:01,544 INFO [train.py:903] (3/4) Epoch 3, batch 1500, loss[loss=0.3178, simple_loss=0.3748, pruned_loss=0.1304, over 19754.00 frames. ], tot_loss[loss=0.3331, simple_loss=0.378, pruned_loss=0.1441, over 3833796.81 frames. ], batch size: 54, lr: 2.61e-02, grad_scale: 8.0 +2023-04-01 00:07:20,447 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15171.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:03,673 INFO [train.py:903] (3/4) Epoch 3, batch 1550, loss[loss=0.2894, simple_loss=0.3518, pruned_loss=0.1135, over 19644.00 frames. ], tot_loss[loss=0.3316, simple_loss=0.3768, pruned_loss=0.1432, over 3838329.16 frames. ], batch size: 53, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:08:12,819 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:18,419 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:20,279 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.825e+02 9.432e+02 1.149e+03 3.008e+03, threshold=1.886e+03, percent-clipped=3.0 +2023-04-01 00:08:21,917 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:52,281 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:08:52,350 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1074, 1.0337, 1.8243, 1.1782, 2.2904, 2.1573, 2.4299, 0.8963], + device='cuda:3'), covar=tensor([0.1366, 0.2224, 0.1106, 0.1307, 0.0918, 0.0996, 0.1077, 0.2014], + device='cuda:3'), in_proj_covar=tensor([0.0398, 0.0439, 0.0402, 0.0386, 0.0475, 0.0384, 0.0560, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:09:08,812 INFO [train.py:903] (3/4) Epoch 3, batch 1600, loss[loss=0.2937, simple_loss=0.3425, pruned_loss=0.1224, over 19754.00 frames. ], tot_loss[loss=0.3308, simple_loss=0.3765, pruned_loss=0.1426, over 3844567.05 frames. ], batch size: 45, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:09:32,820 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 00:09:38,930 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5105, 1.0905, 1.4294, 0.9621, 2.8704, 3.4594, 3.2581, 3.6454], + device='cuda:3'), covar=tensor([0.1466, 0.2908, 0.2924, 0.2294, 0.0392, 0.0109, 0.0224, 0.0117], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0282, 0.0329, 0.0281, 0.0193, 0.0103, 0.0199, 0.0116], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 00:10:02,522 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8953, 3.5136, 2.1868, 3.2363, 1.3134, 3.3719, 3.2337, 3.3311], + device='cuda:3'), covar=tensor([0.0664, 0.1089, 0.1869, 0.0733, 0.3361, 0.0953, 0.0674, 0.0918], + device='cuda:3'), in_proj_covar=tensor([0.0311, 0.0300, 0.0329, 0.0275, 0.0352, 0.0294, 0.0241, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:10:10,177 INFO [train.py:903] (3/4) Epoch 3, batch 1650, loss[loss=0.3904, simple_loss=0.4255, pruned_loss=0.1777, over 19592.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3754, pruned_loss=0.1418, over 3849964.39 frames. ], batch size: 61, lr: 2.60e-02, grad_scale: 8.0 +2023-04-01 00:10:24,981 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+02 8.096e+02 9.310e+02 1.117e+03 2.889e+03, threshold=1.862e+03, percent-clipped=6.0 +2023-04-01 00:10:25,649 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-04-01 00:11:11,888 INFO [train.py:903] (3/4) Epoch 3, batch 1700, loss[loss=0.3288, simple_loss=0.3792, pruned_loss=0.1393, over 17495.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.3754, pruned_loss=0.1422, over 3839645.34 frames. ], batch size: 101, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:11:50,336 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 00:12:04,725 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.80 vs. limit=5.0 +2023-04-01 00:12:13,187 INFO [train.py:903] (3/4) Epoch 3, batch 1750, loss[loss=0.2944, simple_loss=0.358, pruned_loss=0.1154, over 19768.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3754, pruned_loss=0.1418, over 3844451.99 frames. ], batch size: 54, lr: 2.59e-02, grad_scale: 8.0 +2023-04-01 00:12:30,184 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+02 8.359e+02 1.065e+03 1.276e+03 4.198e+03, threshold=2.129e+03, percent-clipped=6.0 +2023-04-01 00:13:17,309 INFO [train.py:903] (3/4) Epoch 3, batch 1800, loss[loss=0.3146, simple_loss=0.3699, pruned_loss=0.1297, over 18722.00 frames. ], tot_loss[loss=0.3289, simple_loss=0.3748, pruned_loss=0.1415, over 3839124.53 frames. ], batch size: 74, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:13:37,281 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15473.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:07,915 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:14:14,382 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 00:14:17,663 INFO [train.py:903] (3/4) Epoch 3, batch 1850, loss[loss=0.3139, simple_loss=0.3724, pruned_loss=0.1276, over 19655.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3741, pruned_loss=0.1405, over 3818533.80 frames. ], batch size: 58, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:14:23,557 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2482, 1.1446, 1.9589, 1.4090, 2.7250, 2.4212, 3.0551, 1.0564], + device='cuda:3'), covar=tensor([0.1379, 0.2232, 0.1160, 0.1246, 0.0839, 0.0954, 0.0864, 0.2074], + device='cuda:3'), in_proj_covar=tensor([0.0392, 0.0429, 0.0397, 0.0376, 0.0464, 0.0378, 0.0543, 0.0402], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:14:32,125 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+02 7.448e+02 9.407e+02 1.169e+03 2.273e+03, threshold=1.881e+03, percent-clipped=1.0 +2023-04-01 00:14:50,377 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 00:15:18,059 INFO [train.py:903] (3/4) Epoch 3, batch 1900, loss[loss=0.3989, simple_loss=0.4211, pruned_loss=0.1883, over 19645.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3762, pruned_loss=0.142, over 3820025.84 frames. ], batch size: 55, lr: 2.58e-02, grad_scale: 8.0 +2023-04-01 00:15:18,213 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:15:18,436 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2387, 1.2107, 0.9631, 1.2029, 1.0939, 1.1632, 0.9850, 1.1089], + device='cuda:3'), covar=tensor([0.0815, 0.1010, 0.1299, 0.0688, 0.0876, 0.0529, 0.0933, 0.0674], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0386, 0.0293, 0.0260, 0.0322, 0.0267, 0.0281, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:15:36,177 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 00:15:41,726 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 00:16:04,626 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 00:16:19,661 INFO [train.py:903] (3/4) Epoch 3, batch 1950, loss[loss=0.3838, simple_loss=0.4162, pruned_loss=0.1757, over 19379.00 frames. ], tot_loss[loss=0.331, simple_loss=0.3769, pruned_loss=0.1426, over 3818247.03 frames. ], batch size: 70, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:16:36,884 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+02 7.789e+02 9.617e+02 1.296e+03 2.448e+03, threshold=1.923e+03, percent-clipped=3.0 +2023-04-01 00:16:54,042 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5948, 1.3566, 1.2396, 1.5985, 1.3851, 1.3937, 1.2209, 1.5922], + device='cuda:3'), covar=tensor([0.0928, 0.1465, 0.1475, 0.0897, 0.1127, 0.0638, 0.1092, 0.0701], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0386, 0.0289, 0.0255, 0.0322, 0.0263, 0.0279, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:16:54,064 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1296, 1.9550, 1.4589, 1.3796, 1.7171, 0.8873, 0.8172, 1.6975], + device='cuda:3'), covar=tensor([0.0804, 0.0469, 0.0979, 0.0525, 0.0486, 0.1348, 0.0787, 0.0386], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0228, 0.0309, 0.0254, 0.0220, 0.0306, 0.0278, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 00:17:10,258 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-04-01 00:17:22,860 INFO [train.py:903] (3/4) Epoch 3, batch 2000, loss[loss=0.2815, simple_loss=0.3365, pruned_loss=0.1133, over 19625.00 frames. ], tot_loss[loss=0.3284, simple_loss=0.3749, pruned_loss=0.1409, over 3832165.26 frames. ], batch size: 50, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:17:41,421 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:18:20,116 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 00:18:23,593 INFO [train.py:903] (3/4) Epoch 3, batch 2050, loss[loss=0.3014, simple_loss=0.3647, pruned_loss=0.119, over 19272.00 frames. ], tot_loss[loss=0.3284, simple_loss=0.3749, pruned_loss=0.141, over 3823326.21 frames. ], batch size: 66, lr: 2.57e-02, grad_scale: 8.0 +2023-04-01 00:18:38,231 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+02 7.410e+02 9.269e+02 1.172e+03 2.915e+03, threshold=1.854e+03, percent-clipped=8.0 +2023-04-01 00:18:38,296 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 00:18:39,612 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 00:18:58,658 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 00:19:06,962 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0397, 1.7515, 1.2724, 1.2390, 1.4962, 0.8165, 0.6894, 1.4817], + device='cuda:3'), covar=tensor([0.0590, 0.0388, 0.0944, 0.0483, 0.0412, 0.1137, 0.0767, 0.0347], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0223, 0.0312, 0.0255, 0.0214, 0.0308, 0.0276, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 00:19:25,007 INFO [train.py:903] (3/4) Epoch 3, batch 2100, loss[loss=0.3012, simple_loss=0.3607, pruned_loss=0.1209, over 19527.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3765, pruned_loss=0.1421, over 3807187.53 frames. ], batch size: 54, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:19:52,226 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 00:20:13,818 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 00:20:20,264 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5879, 2.4442, 1.5892, 1.8012, 2.0442, 1.0779, 1.1922, 1.5379], + device='cuda:3'), covar=tensor([0.0965, 0.0411, 0.0915, 0.0597, 0.0521, 0.1254, 0.0934, 0.0641], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0224, 0.0315, 0.0258, 0.0215, 0.0308, 0.0278, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 00:20:25,858 INFO [train.py:903] (3/4) Epoch 3, batch 2150, loss[loss=0.3557, simple_loss=0.3876, pruned_loss=0.1619, over 19724.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3751, pruned_loss=0.141, over 3814864.98 frames. ], batch size: 51, lr: 2.56e-02, grad_scale: 8.0 +2023-04-01 00:20:42,353 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 7.356e+02 9.022e+02 1.284e+03 2.686e+03, threshold=1.804e+03, percent-clipped=4.0 +2023-04-01 00:21:28,857 INFO [train.py:903] (3/4) Epoch 3, batch 2200, loss[loss=0.3176, simple_loss=0.376, pruned_loss=0.1296, over 19669.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3743, pruned_loss=0.1405, over 3817072.29 frames. ], batch size: 58, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:23,678 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:22:30,393 INFO [train.py:903] (3/4) Epoch 3, batch 2250, loss[loss=0.287, simple_loss=0.3527, pruned_loss=0.1107, over 18761.00 frames. ], tot_loss[loss=0.3284, simple_loss=0.3749, pruned_loss=0.1409, over 3808481.29 frames. ], batch size: 74, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:22:35,107 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9509, 1.4287, 1.2243, 1.8795, 1.5369, 2.0942, 2.1427, 2.0420], + device='cuda:3'), covar=tensor([0.0734, 0.1188, 0.1307, 0.1179, 0.1342, 0.0773, 0.1079, 0.0635], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0289, 0.0276, 0.0314, 0.0325, 0.0262, 0.0293, 0.0256], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 00:22:44,789 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+02 7.012e+02 9.226e+02 1.146e+03 2.721e+03, threshold=1.845e+03, percent-clipped=4.0 +2023-04-01 00:22:51,992 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7042, 4.2067, 2.4187, 3.7307, 1.2501, 3.9065, 3.7154, 4.0549], + device='cuda:3'), covar=tensor([0.0568, 0.1408, 0.2186, 0.0735, 0.4261, 0.1015, 0.0819, 0.0785], + device='cuda:3'), in_proj_covar=tensor([0.0309, 0.0297, 0.0331, 0.0270, 0.0342, 0.0288, 0.0238, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:22:55,698 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:00,327 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8422, 1.7500, 1.4792, 1.7283, 1.6408, 1.6849, 1.3845, 1.7941], + device='cuda:3'), covar=tensor([0.0779, 0.1462, 0.1288, 0.1096, 0.1198, 0.0538, 0.0999, 0.0659], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0380, 0.0280, 0.0255, 0.0314, 0.0261, 0.0270, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:23:08,945 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7812, 1.8737, 1.2286, 1.3543, 1.3235, 1.3830, 0.0493, 0.5373], + device='cuda:3'), covar=tensor([0.0213, 0.0193, 0.0149, 0.0155, 0.0431, 0.0211, 0.0382, 0.0364], + device='cuda:3'), in_proj_covar=tensor([0.0238, 0.0234, 0.0237, 0.0257, 0.0314, 0.0256, 0.0249, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:23:26,961 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:23:31,839 INFO [train.py:903] (3/4) Epoch 3, batch 2300, loss[loss=0.3676, simple_loss=0.4049, pruned_loss=0.1651, over 19541.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3746, pruned_loss=0.141, over 3802829.92 frames. ], batch size: 56, lr: 2.55e-02, grad_scale: 8.0 +2023-04-01 00:23:44,531 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 00:23:44,947 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6345, 1.4856, 1.3242, 1.5120, 1.3017, 1.4374, 1.2512, 1.5399], + device='cuda:3'), covar=tensor([0.0826, 0.1180, 0.1159, 0.0847, 0.1055, 0.0556, 0.0944, 0.0625], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0385, 0.0284, 0.0255, 0.0320, 0.0261, 0.0269, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:23:47,093 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2300, 1.1736, 1.8917, 1.3616, 2.6996, 2.5788, 2.8214, 1.1297], + device='cuda:3'), covar=tensor([0.1337, 0.2235, 0.1197, 0.1177, 0.0847, 0.0881, 0.1137, 0.1999], + device='cuda:3'), in_proj_covar=tensor([0.0398, 0.0437, 0.0397, 0.0378, 0.0466, 0.0389, 0.0555, 0.0402], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:24:02,632 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 00:24:33,433 INFO [train.py:903] (3/4) Epoch 3, batch 2350, loss[loss=0.3813, simple_loss=0.4215, pruned_loss=0.1705, over 19683.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3725, pruned_loss=0.1392, over 3815381.29 frames. ], batch size: 60, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:24:48,808 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 7.519e+02 9.138e+02 1.115e+03 3.205e+03, threshold=1.828e+03, percent-clipped=8.0 +2023-04-01 00:24:49,961 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:25:15,424 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 00:25:17,324 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 00:25:31,112 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 00:25:34,313 INFO [train.py:903] (3/4) Epoch 3, batch 2400, loss[loss=0.3302, simple_loss=0.3815, pruned_loss=0.1395, over 18817.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3752, pruned_loss=0.1411, over 3822376.52 frames. ], batch size: 74, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:26:33,065 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:26:37,321 INFO [train.py:903] (3/4) Epoch 3, batch 2450, loss[loss=0.3491, simple_loss=0.3967, pruned_loss=0.1508, over 19601.00 frames. ], tot_loss[loss=0.3291, simple_loss=0.3754, pruned_loss=0.1414, over 3813041.67 frames. ], batch size: 57, lr: 2.54e-02, grad_scale: 8.0 +2023-04-01 00:26:51,587 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+02 8.374e+02 9.822e+02 1.305e+03 3.634e+03, threshold=1.964e+03, percent-clipped=9.0 +2023-04-01 00:27:07,886 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0990, 2.9966, 1.6920, 2.2140, 1.7709, 1.7454, 0.2951, 2.1255], + device='cuda:3'), covar=tensor([0.0260, 0.0220, 0.0249, 0.0274, 0.0448, 0.0439, 0.0630, 0.0440], + device='cuda:3'), in_proj_covar=tensor([0.0244, 0.0240, 0.0242, 0.0265, 0.0320, 0.0266, 0.0259, 0.0268], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:27:38,194 INFO [train.py:903] (3/4) Epoch 3, batch 2500, loss[loss=0.3364, simple_loss=0.3714, pruned_loss=0.1506, over 19615.00 frames. ], tot_loss[loss=0.3273, simple_loss=0.374, pruned_loss=0.1403, over 3823051.30 frames. ], batch size: 50, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:27:50,922 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:28:40,057 INFO [train.py:903] (3/4) Epoch 3, batch 2550, loss[loss=0.4258, simple_loss=0.443, pruned_loss=0.2043, over 13259.00 frames. ], tot_loss[loss=0.3286, simple_loss=0.3749, pruned_loss=0.1411, over 3803517.65 frames. ], batch size: 135, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:28:56,248 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+02 7.641e+02 9.209e+02 1.283e+03 2.881e+03, threshold=1.842e+03, percent-clipped=1.0 +2023-04-01 00:29:28,552 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16245.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:29:36,516 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 00:29:37,933 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2998, 3.8017, 3.9474, 3.9249, 1.3245, 3.5174, 3.3089, 3.5009], + device='cuda:3'), covar=tensor([0.0851, 0.0649, 0.0596, 0.0429, 0.3428, 0.0333, 0.0444, 0.1174], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0332, 0.0450, 0.0345, 0.0469, 0.0241, 0.0295, 0.0434], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 00:29:44,015 INFO [train.py:903] (3/4) Epoch 3, batch 2600, loss[loss=0.3237, simple_loss=0.376, pruned_loss=0.1357, over 19648.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3764, pruned_loss=0.1425, over 3806612.61 frames. ], batch size: 55, lr: 2.53e-02, grad_scale: 8.0 +2023-04-01 00:30:46,335 INFO [train.py:903] (3/4) Epoch 3, batch 2650, loss[loss=0.3282, simple_loss=0.3863, pruned_loss=0.1351, over 19501.00 frames. ], tot_loss[loss=0.3289, simple_loss=0.3753, pruned_loss=0.1412, over 3809879.57 frames. ], batch size: 64, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:00,229 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+02 7.570e+02 8.863e+02 1.283e+03 4.568e+03, threshold=1.773e+03, percent-clipped=9.0 +2023-04-01 00:31:06,929 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 00:31:47,149 INFO [train.py:903] (3/4) Epoch 3, batch 2700, loss[loss=0.2655, simple_loss=0.3327, pruned_loss=0.0992, over 19736.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3732, pruned_loss=0.1397, over 3809467.77 frames. ], batch size: 51, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:31:51,958 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:31:54,894 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:01,321 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-04-01 00:32:13,166 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9132, 4.1001, 4.4828, 4.4567, 1.4437, 4.0503, 3.7116, 3.9628], + device='cuda:3'), covar=tensor([0.0502, 0.0495, 0.0396, 0.0248, 0.3135, 0.0232, 0.0360, 0.0860], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0337, 0.0463, 0.0349, 0.0476, 0.0243, 0.0300, 0.0449], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 00:32:26,173 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16389.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:32:47,349 INFO [train.py:903] (3/4) Epoch 3, batch 2750, loss[loss=0.3286, simple_loss=0.3771, pruned_loss=0.14, over 19614.00 frames. ], tot_loss[loss=0.3262, simple_loss=0.3734, pruned_loss=0.1395, over 3816517.09 frames. ], batch size: 57, lr: 2.52e-02, grad_scale: 8.0 +2023-04-01 00:33:01,694 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.986e+02 7.672e+02 9.838e+02 1.209e+03 2.463e+03, threshold=1.968e+03, percent-clipped=5.0 +2023-04-01 00:33:05,406 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7757, 1.2949, 1.2740, 1.8878, 1.4695, 1.9079, 2.1325, 1.6836], + device='cuda:3'), covar=tensor([0.0843, 0.1327, 0.1437, 0.1216, 0.1350, 0.0906, 0.1020, 0.0860], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0287, 0.0273, 0.0312, 0.0317, 0.0263, 0.0292, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 00:33:35,977 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:33:46,966 INFO [train.py:903] (3/4) Epoch 3, batch 2800, loss[loss=0.2832, simple_loss=0.3299, pruned_loss=0.1183, over 17359.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3747, pruned_loss=0.1409, over 3808891.76 frames. ], batch size: 38, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:13,570 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:34:48,738 INFO [train.py:903] (3/4) Epoch 3, batch 2850, loss[loss=0.3912, simple_loss=0.4151, pruned_loss=0.1837, over 13087.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3752, pruned_loss=0.1413, over 3805509.12 frames. ], batch size: 137, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:34:54,279 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16511.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:35:03,282 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+02 7.862e+02 1.093e+03 1.433e+03 3.382e+03, threshold=2.185e+03, percent-clipped=3.0 +2023-04-01 00:35:47,689 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 00:35:49,902 INFO [train.py:903] (3/4) Epoch 3, batch 2900, loss[loss=0.2779, simple_loss=0.3291, pruned_loss=0.1134, over 19756.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3755, pruned_loss=0.1416, over 3806942.06 frames. ], batch size: 47, lr: 2.51e-02, grad_scale: 8.0 +2023-04-01 00:35:52,752 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9651, 2.0299, 1.5179, 1.5618, 1.4318, 1.5839, 0.1948, 1.0670], + device='cuda:3'), covar=tensor([0.0240, 0.0201, 0.0145, 0.0185, 0.0404, 0.0249, 0.0475, 0.0357], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0244, 0.0242, 0.0259, 0.0318, 0.0275, 0.0258, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:35:55,896 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0154, 2.0259, 1.9685, 2.5118, 4.6119, 1.4557, 2.2448, 4.2817], + device='cuda:3'), covar=tensor([0.0168, 0.1981, 0.1808, 0.1124, 0.0247, 0.1816, 0.1064, 0.0311], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0307, 0.0296, 0.0278, 0.0286, 0.0320, 0.0272, 0.0283], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 00:35:57,090 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16562.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:36:37,491 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8830, 1.9794, 1.7321, 3.0013, 2.1555, 3.0746, 2.7270, 1.8215], + device='cuda:3'), covar=tensor([0.1166, 0.0772, 0.0508, 0.0471, 0.0920, 0.0249, 0.0762, 0.0777], + device='cuda:3'), in_proj_covar=tensor([0.0450, 0.0419, 0.0424, 0.0555, 0.0500, 0.0332, 0.0513, 0.0422], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:36:51,764 INFO [train.py:903] (3/4) Epoch 3, batch 2950, loss[loss=0.3393, simple_loss=0.3967, pruned_loss=0.141, over 19691.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.376, pruned_loss=0.1419, over 3807801.67 frames. ], batch size: 59, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:02,792 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16616.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:07,338 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 7.271e+02 9.405e+02 1.170e+03 2.853e+03, threshold=1.881e+03, percent-clipped=4.0 +2023-04-01 00:37:15,876 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:33,996 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:37:43,775 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9793, 0.8927, 1.2608, 1.0899, 1.7181, 1.5049, 1.8808, 0.8249], + device='cuda:3'), covar=tensor([0.1182, 0.1886, 0.1023, 0.1153, 0.0712, 0.0886, 0.0713, 0.1748], + device='cuda:3'), in_proj_covar=tensor([0.0403, 0.0438, 0.0402, 0.0384, 0.0484, 0.0389, 0.0562, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:37:52,200 INFO [train.py:903] (3/4) Epoch 3, batch 3000, loss[loss=0.2683, simple_loss=0.3282, pruned_loss=0.1042, over 19487.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3742, pruned_loss=0.1403, over 3822627.72 frames. ], batch size: 49, lr: 2.50e-02, grad_scale: 16.0 +2023-04-01 00:37:52,200 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 00:38:05,259 INFO [train.py:937] (3/4) Epoch 3, validation: loss=0.231, simple_loss=0.3246, pruned_loss=0.06867, over 944034.00 frames. +2023-04-01 00:38:05,260 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18419MB +2023-04-01 00:38:08,702 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 00:39:07,683 INFO [train.py:903] (3/4) Epoch 3, batch 3050, loss[loss=0.354, simple_loss=0.4009, pruned_loss=0.1536, over 19134.00 frames. ], tot_loss[loss=0.328, simple_loss=0.3746, pruned_loss=0.1408, over 3812955.80 frames. ], batch size: 69, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:39:22,587 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+02 7.860e+02 1.014e+03 1.267e+03 1.851e+03, threshold=2.027e+03, percent-clipped=0.0 +2023-04-01 00:39:34,424 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5308, 1.1428, 1.6907, 1.2777, 2.6312, 3.3640, 3.2059, 3.6058], + device='cuda:3'), covar=tensor([0.1327, 0.2594, 0.2427, 0.1884, 0.0423, 0.0123, 0.0203, 0.0092], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0277, 0.0329, 0.0271, 0.0195, 0.0106, 0.0197, 0.0111], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 00:39:38,697 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16733.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:39:40,026 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:08,118 INFO [train.py:903] (3/4) Epoch 3, batch 3100, loss[loss=0.3103, simple_loss=0.3541, pruned_loss=0.1333, over 19370.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.3728, pruned_loss=0.1388, over 3825892.34 frames. ], batch size: 47, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:40:11,507 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:40:34,232 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-01 00:40:47,274 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-01 00:41:08,266 INFO [train.py:903] (3/4) Epoch 3, batch 3150, loss[loss=0.3421, simple_loss=0.3815, pruned_loss=0.1514, over 19592.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3716, pruned_loss=0.1385, over 3830013.69 frames. ], batch size: 52, lr: 2.49e-02, grad_scale: 16.0 +2023-04-01 00:41:21,856 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16818.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:22,529 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.096e+02 7.784e+02 9.820e+02 1.270e+03 2.923e+03, threshold=1.964e+03, percent-clipped=2.0 +2023-04-01 00:41:31,798 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 00:41:37,158 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 00:41:51,861 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:41:57,391 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:07,316 INFO [train.py:903] (3/4) Epoch 3, batch 3200, loss[loss=0.3116, simple_loss=0.3595, pruned_loss=0.1319, over 19723.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.3726, pruned_loss=0.1389, over 3814103.57 frames. ], batch size: 51, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:42:39,216 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16882.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:42:43,621 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16886.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:43:07,061 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-01 00:43:08,568 INFO [train.py:903] (3/4) Epoch 3, batch 3250, loss[loss=0.3875, simple_loss=0.4208, pruned_loss=0.1771, over 18666.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3733, pruned_loss=0.1401, over 3807241.74 frames. ], batch size: 74, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:43:09,806 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:43:24,371 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+02 8.507e+02 1.021e+03 1.288e+03 2.328e+03, threshold=2.042e+03, percent-clipped=1.0 +2023-04-01 00:43:29,325 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:44:09,582 INFO [train.py:903] (3/4) Epoch 3, batch 3300, loss[loss=0.2728, simple_loss=0.3218, pruned_loss=0.1119, over 19777.00 frames. ], tot_loss[loss=0.3253, simple_loss=0.3721, pruned_loss=0.1392, over 3810580.31 frames. ], batch size: 46, lr: 2.48e-02, grad_scale: 8.0 +2023-04-01 00:44:16,152 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 00:44:29,259 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6192, 2.9516, 2.0625, 2.3026, 2.2761, 1.9964, 0.4330, 2.1950], + device='cuda:3'), covar=tensor([0.0176, 0.0203, 0.0240, 0.0257, 0.0398, 0.0358, 0.0527, 0.0368], + device='cuda:3'), in_proj_covar=tensor([0.0244, 0.0240, 0.0233, 0.0257, 0.0311, 0.0257, 0.0245, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:45:10,101 INFO [train.py:903] (3/4) Epoch 3, batch 3350, loss[loss=0.3214, simple_loss=0.3845, pruned_loss=0.1292, over 19576.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3728, pruned_loss=0.1392, over 3812843.48 frames. ], batch size: 61, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:45:24,559 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+02 7.847e+02 9.419e+02 1.175e+03 3.710e+03, threshold=1.884e+03, percent-clipped=5.0 +2023-04-01 00:45:47,671 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1486, 1.1189, 1.7372, 1.3152, 2.3296, 2.0683, 2.5280, 0.9566], + device='cuda:3'), covar=tensor([0.1473, 0.2380, 0.1168, 0.1310, 0.0903, 0.1115, 0.1072, 0.2103], + device='cuda:3'), in_proj_covar=tensor([0.0405, 0.0445, 0.0408, 0.0385, 0.0490, 0.0389, 0.0566, 0.0408], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:46:10,041 INFO [train.py:903] (3/4) Epoch 3, batch 3400, loss[loss=0.324, simple_loss=0.3796, pruned_loss=0.1342, over 19707.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3739, pruned_loss=0.14, over 3811140.30 frames. ], batch size: 59, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:47:08,457 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:47:12,304 INFO [train.py:903] (3/4) Epoch 3, batch 3450, loss[loss=0.3249, simple_loss=0.3692, pruned_loss=0.1403, over 19849.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3736, pruned_loss=0.1398, over 3808426.04 frames. ], batch size: 52, lr: 2.47e-02, grad_scale: 8.0 +2023-04-01 00:47:14,332 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 00:47:24,041 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5954, 2.3956, 2.2983, 3.3322, 2.3915, 3.9013, 3.4276, 2.1799], + device='cuda:3'), covar=tensor([0.1152, 0.0839, 0.0510, 0.0538, 0.1007, 0.0224, 0.0762, 0.0786], + device='cuda:3'), in_proj_covar=tensor([0.0459, 0.0435, 0.0433, 0.0576, 0.0512, 0.0343, 0.0536, 0.0430], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:47:28,177 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+02 8.544e+02 1.014e+03 1.278e+03 1.988e+03, threshold=2.028e+03, percent-clipped=3.0 +2023-04-01 00:47:39,689 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:48:12,433 INFO [train.py:903] (3/4) Epoch 3, batch 3500, loss[loss=0.3384, simple_loss=0.3887, pruned_loss=0.1441, over 19574.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3755, pruned_loss=0.142, over 3801734.62 frames. ], batch size: 61, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:49:12,364 INFO [train.py:903] (3/4) Epoch 3, batch 3550, loss[loss=0.2947, simple_loss=0.3415, pruned_loss=0.124, over 19400.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3746, pruned_loss=0.1413, over 3805596.67 frames. ], batch size: 47, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:49:12,668 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:49:26,830 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.377e+02 8.429e+02 1.068e+03 1.302e+03 2.755e+03, threshold=2.137e+03, percent-clipped=4.0 +2023-04-01 00:49:40,883 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17230.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:50:11,404 INFO [train.py:903] (3/4) Epoch 3, batch 3600, loss[loss=0.3031, simple_loss=0.344, pruned_loss=0.1311, over 19357.00 frames. ], tot_loss[loss=0.3296, simple_loss=0.3755, pruned_loss=0.1418, over 3799521.28 frames. ], batch size: 47, lr: 2.46e-02, grad_scale: 8.0 +2023-04-01 00:50:24,843 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:50:42,786 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6118, 1.3340, 1.2124, 1.5918, 1.3929, 1.3862, 1.2308, 1.6436], + device='cuda:3'), covar=tensor([0.0938, 0.1372, 0.1476, 0.0980, 0.1168, 0.0681, 0.1175, 0.0758], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0379, 0.0286, 0.0253, 0.0320, 0.0265, 0.0277, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 00:51:11,853 INFO [train.py:903] (3/4) Epoch 3, batch 3650, loss[loss=0.2712, simple_loss=0.3235, pruned_loss=0.1095, over 19759.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3741, pruned_loss=0.1405, over 3804177.26 frames. ], batch size: 46, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:51:27,519 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.083e+02 7.894e+02 9.345e+02 1.119e+03 1.949e+03, threshold=1.869e+03, percent-clipped=0.0 +2023-04-01 00:51:57,303 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17345.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:52:12,710 INFO [train.py:903] (3/4) Epoch 3, batch 3700, loss[loss=0.2746, simple_loss=0.3174, pruned_loss=0.1159, over 19755.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3732, pruned_loss=0.1403, over 3813818.37 frames. ], batch size: 45, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:52:18,213 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5449, 4.0815, 2.4452, 3.7280, 1.1368, 3.9545, 3.7480, 3.9163], + device='cuda:3'), covar=tensor([0.0516, 0.1186, 0.1955, 0.0586, 0.3856, 0.0725, 0.0590, 0.0795], + device='cuda:3'), in_proj_covar=tensor([0.0320, 0.0299, 0.0336, 0.0276, 0.0350, 0.0298, 0.0248, 0.0291], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:52:32,171 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-04-01 00:52:39,373 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:52:41,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5201, 1.1964, 1.5777, 0.8765, 2.6221, 2.8813, 2.7373, 3.0648], + device='cuda:3'), covar=tensor([0.1128, 0.2413, 0.2325, 0.1899, 0.0326, 0.0129, 0.0231, 0.0115], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0275, 0.0324, 0.0269, 0.0191, 0.0107, 0.0199, 0.0113], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 00:52:42,863 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:53:05,594 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17401.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:53:12,847 INFO [train.py:903] (3/4) Epoch 3, batch 3750, loss[loss=0.3122, simple_loss=0.3713, pruned_loss=0.1265, over 19609.00 frames. ], tot_loss[loss=0.3272, simple_loss=0.3732, pruned_loss=0.1406, over 3832389.58 frames. ], batch size: 57, lr: 2.45e-02, grad_scale: 8.0 +2023-04-01 00:53:27,567 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+02 9.192e+02 1.060e+03 1.489e+03 3.397e+03, threshold=2.120e+03, percent-clipped=7.0 +2023-04-01 00:54:12,635 INFO [train.py:903] (3/4) Epoch 3, batch 3800, loss[loss=0.2893, simple_loss=0.3492, pruned_loss=0.1147, over 19840.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3721, pruned_loss=0.1395, over 3844601.34 frames. ], batch size: 52, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:54:23,098 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6681, 1.3619, 2.0137, 1.7137, 2.8218, 2.5917, 2.9024, 1.7160], + device='cuda:3'), covar=tensor([0.1062, 0.2018, 0.1092, 0.0982, 0.0656, 0.0761, 0.0777, 0.1511], + device='cuda:3'), in_proj_covar=tensor([0.0409, 0.0450, 0.0411, 0.0382, 0.0485, 0.0397, 0.0573, 0.0401], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 00:54:45,633 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 00:55:11,732 INFO [train.py:903] (3/4) Epoch 3, batch 3850, loss[loss=0.3907, simple_loss=0.4231, pruned_loss=0.1791, over 13198.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3733, pruned_loss=0.1402, over 3833709.29 frames. ], batch size: 137, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:55:28,329 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+02 7.944e+02 9.720e+02 1.209e+03 3.103e+03, threshold=1.944e+03, percent-clipped=2.0 +2023-04-01 00:55:40,055 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3333, 1.0664, 1.0925, 1.5900, 1.3050, 1.4384, 1.7597, 1.3538], + device='cuda:3'), covar=tensor([0.1076, 0.1456, 0.1409, 0.1089, 0.1203, 0.1169, 0.1013, 0.0933], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0280, 0.0272, 0.0313, 0.0319, 0.0258, 0.0285, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 00:56:04,059 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17551.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:56:05,718 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.53 vs. limit=5.0 +2023-04-01 00:56:12,763 INFO [train.py:903] (3/4) Epoch 3, batch 3900, loss[loss=0.2805, simple_loss=0.3412, pruned_loss=0.1099, over 19764.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3733, pruned_loss=0.1401, over 3825355.44 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:05,251 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17601.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:57:11,566 INFO [train.py:903] (3/4) Epoch 3, batch 3950, loss[loss=0.3431, simple_loss=0.39, pruned_loss=0.1481, over 19519.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3739, pruned_loss=0.1402, over 3827621.92 frames. ], batch size: 64, lr: 2.44e-02, grad_scale: 8.0 +2023-04-01 00:57:18,165 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 00:57:27,261 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+02 7.211e+02 9.089e+02 1.148e+03 2.193e+03, threshold=1.818e+03, percent-clipped=2.0 +2023-04-01 00:57:34,470 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17626.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 00:57:51,670 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:12,126 INFO [train.py:903] (3/4) Epoch 3, batch 4000, loss[loss=0.2803, simple_loss=0.3365, pruned_loss=0.112, over 19471.00 frames. ], tot_loss[loss=0.3233, simple_loss=0.3706, pruned_loss=0.138, over 3834079.64 frames. ], batch size: 49, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:58:20,416 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:22,675 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:58:50,785 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4209, 1.0965, 1.4468, 1.0257, 2.5639, 3.0875, 3.0636, 3.3605], + device='cuda:3'), covar=tensor([0.1326, 0.2602, 0.2685, 0.1945, 0.0410, 0.0118, 0.0210, 0.0100], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0279, 0.0323, 0.0272, 0.0195, 0.0107, 0.0201, 0.0113], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 00:58:59,484 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 00:59:11,487 INFO [train.py:903] (3/4) Epoch 3, batch 4050, loss[loss=0.3921, simple_loss=0.4201, pruned_loss=0.182, over 13854.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3715, pruned_loss=0.1381, over 3815990.96 frames. ], batch size: 136, lr: 2.43e-02, grad_scale: 8.0 +2023-04-01 00:59:25,594 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:28,914 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+02 7.398e+02 9.459e+02 1.250e+03 4.446e+03, threshold=1.892e+03, percent-clipped=10.0 +2023-04-01 00:59:32,714 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 00:59:37,205 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1585, 2.8607, 2.0572, 2.7279, 1.0576, 2.7863, 2.5901, 2.7340], + device='cuda:3'), covar=tensor([0.0940, 0.1429, 0.1982, 0.0859, 0.3556, 0.1080, 0.0819, 0.1085], + device='cuda:3'), in_proj_covar=tensor([0.0319, 0.0296, 0.0336, 0.0273, 0.0349, 0.0297, 0.0243, 0.0286], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 00:59:57,742 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17745.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:00:01,032 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17748.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:00:12,214 INFO [train.py:903] (3/4) Epoch 3, batch 4100, loss[loss=0.3553, simple_loss=0.3874, pruned_loss=0.1616, over 19658.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3728, pruned_loss=0.139, over 3820766.75 frames. ], batch size: 60, lr: 2.43e-02, grad_scale: 4.0 +2023-04-01 01:00:48,445 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 01:01:11,682 INFO [train.py:903] (3/4) Epoch 3, batch 4150, loss[loss=0.2855, simple_loss=0.3288, pruned_loss=0.1211, over 19719.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3714, pruned_loss=0.138, over 3827370.19 frames. ], batch size: 45, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:01:28,531 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+02 7.784e+02 9.706e+02 1.186e+03 3.618e+03, threshold=1.941e+03, percent-clipped=3.0 +2023-04-01 01:01:49,977 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:02:10,744 INFO [train.py:903] (3/4) Epoch 3, batch 4200, loss[loss=0.3755, simple_loss=0.4054, pruned_loss=0.1728, over 19775.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3709, pruned_loss=0.1373, over 3823372.14 frames. ], batch size: 56, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:02:14,260 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17860.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:02:14,914 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 01:03:09,269 INFO [train.py:903] (3/4) Epoch 3, batch 4250, loss[loss=0.3048, simple_loss=0.3428, pruned_loss=0.1334, over 19769.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3721, pruned_loss=0.1383, over 3823045.43 frames. ], batch size: 47, lr: 2.42e-02, grad_scale: 4.0 +2023-04-01 01:03:26,776 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.782e+02 8.334e+02 9.808e+02 1.259e+03 2.577e+03, threshold=1.962e+03, percent-clipped=5.0 +2023-04-01 01:03:26,821 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 01:03:28,299 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:03:37,999 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 01:03:57,696 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:04:08,380 INFO [train.py:903] (3/4) Epoch 3, batch 4300, loss[loss=0.3549, simple_loss=0.4018, pruned_loss=0.1541, over 17477.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3721, pruned_loss=0.1383, over 3830969.44 frames. ], batch size: 101, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:04:35,078 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:05:06,032 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 01:05:10,739 INFO [train.py:903] (3/4) Epoch 3, batch 4350, loss[loss=0.3184, simple_loss=0.3808, pruned_loss=0.128, over 19687.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3709, pruned_loss=0.1378, over 3834125.97 frames. ], batch size: 59, lr: 2.41e-02, grad_scale: 4.0 +2023-04-01 01:05:27,030 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+02 7.530e+02 9.485e+02 1.282e+03 2.824e+03, threshold=1.897e+03, percent-clipped=4.0 +2023-04-01 01:06:11,119 INFO [train.py:903] (3/4) Epoch 3, batch 4400, loss[loss=0.3617, simple_loss=0.4051, pruned_loss=0.1591, over 19606.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3714, pruned_loss=0.1381, over 3821624.49 frames. ], batch size: 57, lr: 2.41e-02, grad_scale: 8.0 +2023-04-01 01:06:17,200 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:32,865 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 01:06:43,600 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 01:06:51,966 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.97 vs. limit=5.0 +2023-04-01 01:06:53,814 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18092.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:06:57,486 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:10,941 INFO [train.py:903] (3/4) Epoch 3, batch 4450, loss[loss=0.3285, simple_loss=0.3748, pruned_loss=0.1411, over 18741.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3701, pruned_loss=0.1368, over 3824753.64 frames. ], batch size: 74, lr: 2.40e-02, grad_scale: 8.0 +2023-04-01 01:07:21,294 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18116.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:07:27,544 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:07:28,238 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.891e+02 7.625e+02 9.248e+02 1.171e+03 2.408e+03, threshold=1.850e+03, percent-clipped=4.0 +2023-04-01 01:07:53,390 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18141.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:08:11,164 INFO [train.py:903] (3/4) Epoch 3, batch 4500, loss[loss=0.3336, simple_loss=0.3653, pruned_loss=0.151, over 19766.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3713, pruned_loss=0.1377, over 3829294.80 frames. ], batch size: 45, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:08:31,582 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0447, 1.7624, 2.0937, 2.5506, 4.4726, 1.5646, 2.3693, 4.4448], + device='cuda:3'), covar=tensor([0.0183, 0.2032, 0.1897, 0.1150, 0.0332, 0.1770, 0.1023, 0.0286], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0304, 0.0295, 0.0279, 0.0285, 0.0321, 0.0272, 0.0283], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:08:37,477 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:54,083 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18192.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:08:55,175 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:11,759 INFO [train.py:903] (3/4) Epoch 3, batch 4550, loss[loss=0.2622, simple_loss=0.3259, pruned_loss=0.09921, over 19606.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3709, pruned_loss=0.1378, over 3822761.50 frames. ], batch size: 50, lr: 2.40e-02, grad_scale: 4.0 +2023-04-01 01:09:12,106 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:09:18,481 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 01:09:25,766 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 01:09:29,435 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.087e+02 7.616e+02 9.596e+02 1.201e+03 2.125e+03, threshold=1.919e+03, percent-clipped=4.0 +2023-04-01 01:09:42,206 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 01:09:48,932 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2244, 1.2278, 1.8147, 1.3171, 2.3258, 2.1556, 2.5244, 0.9597], + device='cuda:3'), covar=tensor([0.1409, 0.2266, 0.1167, 0.1340, 0.0926, 0.1109, 0.0970, 0.2033], + device='cuda:3'), in_proj_covar=tensor([0.0411, 0.0459, 0.0416, 0.0389, 0.0502, 0.0404, 0.0581, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:10:07,226 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 01:10:11,980 INFO [train.py:903] (3/4) Epoch 3, batch 4600, loss[loss=0.3138, simple_loss=0.3609, pruned_loss=0.1334, over 19475.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3724, pruned_loss=0.1393, over 3813503.92 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:11,635 INFO [train.py:903] (3/4) Epoch 3, batch 4650, loss[loss=0.348, simple_loss=0.3969, pruned_loss=0.1496, over 17176.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3708, pruned_loss=0.1382, over 3822846.97 frames. ], batch size: 101, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:11:27,781 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 01:11:28,861 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+02 7.851e+02 9.796e+02 1.308e+03 3.825e+03, threshold=1.959e+03, percent-clipped=6.0 +2023-04-01 01:11:29,088 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:11:38,751 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 01:12:10,878 INFO [train.py:903] (3/4) Epoch 3, batch 4700, loss[loss=0.2881, simple_loss=0.3338, pruned_loss=0.1212, over 19781.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3707, pruned_loss=0.1383, over 3813016.37 frames. ], batch size: 47, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:12:33,351 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 01:12:54,111 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9619, 1.9787, 1.4190, 1.3127, 1.2585, 1.5820, 0.2371, 0.8193], + device='cuda:3'), covar=tensor([0.0273, 0.0270, 0.0217, 0.0283, 0.0578, 0.0314, 0.0511, 0.0498], + device='cuda:3'), in_proj_covar=tensor([0.0246, 0.0246, 0.0244, 0.0266, 0.0322, 0.0257, 0.0251, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:12:59,735 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6491, 1.2455, 1.6585, 2.0381, 3.2794, 1.3408, 1.9857, 3.3226], + device='cuda:3'), covar=tensor([0.0307, 0.2353, 0.2136, 0.1233, 0.0429, 0.1954, 0.1166, 0.0377], + device='cuda:3'), in_proj_covar=tensor([0.0249, 0.0299, 0.0296, 0.0273, 0.0280, 0.0316, 0.0266, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:13:13,560 INFO [train.py:903] (3/4) Epoch 3, batch 4750, loss[loss=0.3566, simple_loss=0.4026, pruned_loss=0.1553, over 19707.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3709, pruned_loss=0.138, over 3814386.55 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 4.0 +2023-04-01 01:13:19,797 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-04-01 01:13:22,628 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1446, 1.2022, 1.9552, 1.3047, 2.3622, 2.3305, 2.6310, 0.9672], + device='cuda:3'), covar=tensor([0.1578, 0.2517, 0.1209, 0.1431, 0.1163, 0.1164, 0.1190, 0.2322], + device='cuda:3'), in_proj_covar=tensor([0.0422, 0.0464, 0.0420, 0.0395, 0.0505, 0.0403, 0.0582, 0.0417], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:13:30,998 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 01:13:31,290 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.455e+02 7.810e+02 9.309e+02 1.222e+03 2.382e+03, threshold=1.862e+03, percent-clipped=4.0 +2023-04-01 01:13:32,812 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6853, 1.2302, 1.6265, 1.9892, 3.3119, 1.6508, 2.2416, 3.3352], + device='cuda:3'), covar=tensor([0.0335, 0.2739, 0.2287, 0.1472, 0.0462, 0.2105, 0.0976, 0.0462], + device='cuda:3'), in_proj_covar=tensor([0.0249, 0.0301, 0.0297, 0.0274, 0.0281, 0.0320, 0.0271, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:13:44,297 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18433.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:13:48,013 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-04-01 01:13:48,785 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:15,105 INFO [train.py:903] (3/4) Epoch 3, batch 4800, loss[loss=0.28, simple_loss=0.3225, pruned_loss=0.1188, over 19760.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3721, pruned_loss=0.1383, over 3802030.85 frames. ], batch size: 47, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:14:16,617 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:22,308 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18463.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:14:53,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18488.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:16,099 INFO [train.py:903] (3/4) Epoch 3, batch 4850, loss[loss=0.4558, simple_loss=0.4543, pruned_loss=0.2287, over 13075.00 frames. ], tot_loss[loss=0.3227, simple_loss=0.3709, pruned_loss=0.1373, over 3797060.48 frames. ], batch size: 135, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:15:17,556 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:34,587 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+02 7.295e+02 9.130e+02 1.063e+03 1.681e+03, threshold=1.826e+03, percent-clipped=0.0 +2023-04-01 01:15:38,190 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 01:15:40,625 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18526.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:15:52,403 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:15:53,582 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18537.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:16:00,114 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 01:16:04,601 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 01:16:05,774 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 01:16:14,592 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 01:16:15,641 INFO [train.py:903] (3/4) Epoch 3, batch 4900, loss[loss=0.3823, simple_loss=0.416, pruned_loss=0.1742, over 19717.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3693, pruned_loss=0.1362, over 3803579.36 frames. ], batch size: 59, lr: 2.38e-02, grad_scale: 8.0 +2023-04-01 01:16:34,644 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 01:17:14,805 INFO [train.py:903] (3/4) Epoch 3, batch 4950, loss[loss=0.3766, simple_loss=0.4076, pruned_loss=0.1728, over 18098.00 frames. ], tot_loss[loss=0.3233, simple_loss=0.3711, pruned_loss=0.1377, over 3797598.19 frames. ], batch size: 83, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:17:30,524 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 01:17:34,978 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.825e+02 8.688e+02 1.059e+03 1.337e+03 3.400e+03, threshold=2.119e+03, percent-clipped=10.0 +2023-04-01 01:17:52,131 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 01:17:54,768 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3775, 0.9254, 1.2149, 1.2443, 2.0697, 1.0854, 1.6480, 1.9926], + device='cuda:3'), covar=tensor([0.0583, 0.2721, 0.2521, 0.1492, 0.0689, 0.1955, 0.1038, 0.0714], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0303, 0.0298, 0.0276, 0.0285, 0.0319, 0.0271, 0.0281], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:18:09,845 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18651.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:10,978 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:18:17,088 INFO [train.py:903] (3/4) Epoch 3, batch 5000, loss[loss=0.3439, simple_loss=0.4003, pruned_loss=0.1437, over 19415.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3713, pruned_loss=0.1376, over 3799562.16 frames. ], batch size: 70, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:18:21,588 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 01:18:32,449 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 01:18:59,281 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:00,356 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18693.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:16,428 INFO [train.py:903] (3/4) Epoch 3, batch 5050, loss[loss=0.3245, simple_loss=0.3699, pruned_loss=0.1395, over 19540.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3714, pruned_loss=0.1378, over 3810924.89 frames. ], batch size: 54, lr: 2.37e-02, grad_scale: 4.0 +2023-04-01 01:19:29,230 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:19:35,293 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.108e+02 7.776e+02 1.028e+03 1.229e+03 3.550e+03, threshold=2.057e+03, percent-clipped=2.0 +2023-04-01 01:19:48,441 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 01:20:09,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0779, 5.3600, 3.1452, 4.7200, 1.4060, 5.1888, 5.2109, 5.5203], + device='cuda:3'), covar=tensor([0.0421, 0.1036, 0.1585, 0.0665, 0.3597, 0.0588, 0.0544, 0.0580], + device='cuda:3'), in_proj_covar=tensor([0.0312, 0.0293, 0.0333, 0.0278, 0.0343, 0.0290, 0.0247, 0.0281], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:20:16,938 INFO [train.py:903] (3/4) Epoch 3, batch 5100, loss[loss=0.2851, simple_loss=0.3425, pruned_loss=0.1138, over 19609.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3706, pruned_loss=0.137, over 3815406.47 frames. ], batch size: 50, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:20:24,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 01:20:27,864 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 01:20:32,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 01:20:36,148 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8600, 4.1578, 4.5044, 4.4361, 1.4097, 4.0554, 3.7565, 3.9796], + device='cuda:3'), covar=tensor([0.0614, 0.0512, 0.0412, 0.0325, 0.3343, 0.0276, 0.0376, 0.0945], + device='cuda:3'), in_proj_covar=tensor([0.0391, 0.0361, 0.0495, 0.0385, 0.0496, 0.0261, 0.0324, 0.0469], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 01:21:01,142 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9173, 1.9734, 1.6854, 2.8402, 1.8999, 2.9577, 2.6254, 1.8017], + device='cuda:3'), covar=tensor([0.1209, 0.0878, 0.0628, 0.0521, 0.1040, 0.0260, 0.0898, 0.0855], + device='cuda:3'), in_proj_covar=tensor([0.0493, 0.0457, 0.0461, 0.0617, 0.0535, 0.0372, 0.0558, 0.0458], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:21:19,959 INFO [train.py:903] (3/4) Epoch 3, batch 5150, loss[loss=0.3329, simple_loss=0.3898, pruned_loss=0.138, over 19490.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.369, pruned_loss=0.1352, over 3822929.17 frames. ], batch size: 64, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:21:22,518 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9073, 1.7827, 1.7303, 2.5244, 2.0275, 2.5122, 2.1716, 1.5717], + device='cuda:3'), covar=tensor([0.0915, 0.0769, 0.0501, 0.0394, 0.0730, 0.0250, 0.0725, 0.0747], + device='cuda:3'), in_proj_covar=tensor([0.0488, 0.0458, 0.0460, 0.0615, 0.0534, 0.0375, 0.0559, 0.0456], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:21:31,907 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 01:21:40,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.924e+02 8.047e+02 1.080e+03 1.932e+03, threshold=1.609e+03, percent-clipped=0.0 +2023-04-01 01:22:06,986 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 01:22:14,770 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18852.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:22:20,358 INFO [train.py:903] (3/4) Epoch 3, batch 5200, loss[loss=0.4261, simple_loss=0.4341, pruned_loss=0.209, over 13480.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3678, pruned_loss=0.1338, over 3816501.27 frames. ], batch size: 136, lr: 2.36e-02, grad_scale: 8.0 +2023-04-01 01:22:37,384 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 01:22:37,518 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18870.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:23:21,232 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 01:23:23,236 INFO [train.py:903] (3/4) Epoch 3, batch 5250, loss[loss=0.2733, simple_loss=0.3343, pruned_loss=0.1061, over 19673.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3666, pruned_loss=0.1335, over 3812761.98 frames. ], batch size: 53, lr: 2.36e-02, grad_scale: 4.0 +2023-04-01 01:23:23,700 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:24,877 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:42,381 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+02 7.388e+02 9.793e+02 1.246e+03 4.620e+03, threshold=1.959e+03, percent-clipped=9.0 +2023-04-01 01:23:45,180 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 01:23:53,545 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:23:54,590 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:12,783 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:22,206 INFO [train.py:903] (3/4) Epoch 3, batch 5300, loss[loss=0.3663, simple_loss=0.399, pruned_loss=0.1668, over 13532.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3678, pruned_loss=0.135, over 3805104.44 frames. ], batch size: 136, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:24:35,242 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:24:39,531 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 01:24:56,497 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18985.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:25:22,328 INFO [train.py:903] (3/4) Epoch 3, batch 5350, loss[loss=0.2863, simple_loss=0.3404, pruned_loss=0.1161, over 19615.00 frames. ], tot_loss[loss=0.3201, simple_loss=0.3686, pruned_loss=0.1358, over 3796551.57 frames. ], batch size: 50, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:25:28,792 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 01:25:39,625 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8969, 2.1090, 2.0397, 2.5342, 4.3898, 1.7119, 2.1747, 4.2359], + device='cuda:3'), covar=tensor([0.0216, 0.2008, 0.2209, 0.1301, 0.0346, 0.1838, 0.1143, 0.0310], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0307, 0.0303, 0.0279, 0.0292, 0.0322, 0.0275, 0.0287], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:25:42,787 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+02 7.837e+02 1.011e+03 1.314e+03 3.062e+03, threshold=2.023e+03, percent-clipped=6.0 +2023-04-01 01:25:52,475 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2894, 2.3017, 1.5042, 1.5558, 1.8154, 0.9797, 1.1289, 1.8978], + device='cuda:3'), covar=tensor([0.0957, 0.0432, 0.1023, 0.0564, 0.0659, 0.1242, 0.0833, 0.0419], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0238, 0.0311, 0.0253, 0.0219, 0.0309, 0.0272, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:25:55,459 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 01:25:56,694 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19036.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:26:20,416 INFO [train.py:903] (3/4) Epoch 3, batch 5400, loss[loss=0.3523, simple_loss=0.395, pruned_loss=0.1548, over 19636.00 frames. ], tot_loss[loss=0.3201, simple_loss=0.3688, pruned_loss=0.1357, over 3805680.88 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 4.0 +2023-04-01 01:26:22,879 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1501, 1.1259, 1.6690, 1.3389, 2.3110, 2.1908, 2.3717, 0.8011], + device='cuda:3'), covar=tensor([0.1599, 0.2527, 0.1288, 0.1383, 0.0914, 0.1079, 0.1011, 0.2208], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0465, 0.0424, 0.0393, 0.0498, 0.0406, 0.0575, 0.0415], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:26:27,106 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3959, 0.9532, 1.4224, 1.3273, 2.2047, 1.1348, 1.8855, 2.0925], + device='cuda:3'), covar=tensor([0.0546, 0.2332, 0.1913, 0.1159, 0.0538, 0.1497, 0.0701, 0.0589], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0310, 0.0303, 0.0279, 0.0293, 0.0322, 0.0278, 0.0288], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:26:39,647 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 01:27:22,187 INFO [train.py:903] (3/4) Epoch 3, batch 5450, loss[loss=0.3479, simple_loss=0.3882, pruned_loss=0.1538, over 13683.00 frames. ], tot_loss[loss=0.3202, simple_loss=0.3687, pruned_loss=0.1359, over 3796627.33 frames. ], batch size: 137, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:27:39,366 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5111, 1.2193, 1.1200, 1.7411, 1.3609, 1.5630, 1.7002, 1.5190], + device='cuda:3'), covar=tensor([0.0783, 0.1147, 0.1270, 0.0902, 0.1005, 0.0902, 0.0926, 0.0725], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0280, 0.0267, 0.0307, 0.0309, 0.0257, 0.0281, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 01:27:41,224 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+02 7.941e+02 9.480e+02 1.159e+03 2.761e+03, threshold=1.896e+03, percent-clipped=1.0 +2023-04-01 01:28:10,034 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19147.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:28:14,880 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:28:21,123 INFO [train.py:903] (3/4) Epoch 3, batch 5500, loss[loss=0.3351, simple_loss=0.3896, pruned_loss=0.1403, over 19528.00 frames. ], tot_loss[loss=0.3209, simple_loss=0.3696, pruned_loss=0.1361, over 3799781.48 frames. ], batch size: 56, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:28:31,565 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0745, 1.5659, 1.9429, 1.5991, 2.8811, 4.3002, 4.2755, 4.6899], + device='cuda:3'), covar=tensor([0.0945, 0.2236, 0.2166, 0.1532, 0.0407, 0.0090, 0.0145, 0.0071], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0279, 0.0323, 0.0263, 0.0197, 0.0108, 0.0200, 0.0117], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 01:28:33,448 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2641, 1.3542, 1.0756, 1.0323, 0.9726, 1.1409, 0.2473, 0.6170], + device='cuda:3'), covar=tensor([0.0185, 0.0155, 0.0109, 0.0132, 0.0282, 0.0176, 0.0348, 0.0282], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0246, 0.0250, 0.0268, 0.0328, 0.0262, 0.0260, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:28:34,650 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9410, 1.8392, 1.7034, 2.7300, 1.7423, 2.7251, 2.5347, 1.5742], + device='cuda:3'), covar=tensor([0.1037, 0.0820, 0.0490, 0.0499, 0.0991, 0.0287, 0.0793, 0.0821], + device='cuda:3'), in_proj_covar=tensor([0.0498, 0.0467, 0.0469, 0.0624, 0.0543, 0.0385, 0.0567, 0.0463], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:28:45,296 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 01:29:20,073 INFO [train.py:903] (3/4) Epoch 3, batch 5550, loss[loss=0.3576, simple_loss=0.3935, pruned_loss=0.1608, over 17502.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.37, pruned_loss=0.1366, over 3784847.07 frames. ], batch size: 101, lr: 2.34e-02, grad_scale: 4.0 +2023-04-01 01:29:27,432 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 01:29:41,395 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:29:42,151 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 7.947e+02 9.800e+02 1.342e+03 2.993e+03, threshold=1.960e+03, percent-clipped=6.0 +2023-04-01 01:29:55,073 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6387, 3.9270, 2.6060, 3.3227, 3.2997, 2.2893, 2.1634, 2.3561], + device='cuda:3'), covar=tensor([0.0874, 0.0265, 0.0729, 0.0445, 0.0553, 0.0902, 0.0669, 0.0602], + device='cuda:3'), in_proj_covar=tensor([0.0284, 0.0240, 0.0313, 0.0261, 0.0221, 0.0312, 0.0279, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:30:02,031 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19241.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:30:10,542 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:30:15,626 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 01:30:21,457 INFO [train.py:903] (3/4) Epoch 3, batch 5600, loss[loss=0.2865, simple_loss=0.3479, pruned_loss=0.1125, over 19764.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3676, pruned_loss=0.1351, over 3785396.90 frames. ], batch size: 54, lr: 2.34e-02, grad_scale: 8.0 +2023-04-01 01:30:26,196 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 01:30:33,704 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19266.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:31:04,003 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:31:22,075 INFO [train.py:903] (3/4) Epoch 3, batch 5650, loss[loss=0.3706, simple_loss=0.411, pruned_loss=0.1651, over 19670.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3684, pruned_loss=0.1354, over 3797997.39 frames. ], batch size: 58, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:31:29,265 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6218, 1.8659, 1.8749, 2.9306, 2.3806, 2.3478, 2.1137, 2.8037], + device='cuda:3'), covar=tensor([0.0621, 0.1589, 0.1089, 0.0590, 0.0990, 0.0404, 0.0749, 0.0472], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0375, 0.0282, 0.0250, 0.0308, 0.0261, 0.0267, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:31:41,039 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.993e+02 7.403e+02 9.102e+02 1.185e+03 3.385e+03, threshold=1.820e+03, percent-clipped=3.0 +2023-04-01 01:32:09,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 01:32:21,815 INFO [train.py:903] (3/4) Epoch 3, batch 5700, loss[loss=0.3269, simple_loss=0.3651, pruned_loss=0.1444, over 19749.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3692, pruned_loss=0.1363, over 3798756.65 frames. ], batch size: 47, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:33:22,522 INFO [train.py:903] (3/4) Epoch 3, batch 5750, loss[loss=0.298, simple_loss=0.356, pruned_loss=0.12, over 19771.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3667, pruned_loss=0.1339, over 3815195.32 frames. ], batch size: 54, lr: 2.33e-02, grad_scale: 8.0 +2023-04-01 01:33:22,853 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:22,920 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:25,751 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 01:33:30,125 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:33:34,052 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 01:33:39,378 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 01:33:43,561 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.525e+02 9.538e+02 1.231e+03 3.556e+03, threshold=1.908e+03, percent-clipped=6.0 +2023-04-01 01:33:49,419 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1588, 1.1278, 1.5498, 1.3242, 2.2519, 1.8533, 2.2771, 0.9070], + device='cuda:3'), covar=tensor([0.1496, 0.2487, 0.1322, 0.1292, 0.0911, 0.1235, 0.0974, 0.2239], + device='cuda:3'), in_proj_covar=tensor([0.0409, 0.0462, 0.0416, 0.0388, 0.0502, 0.0406, 0.0569, 0.0414], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 01:33:53,867 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:34:22,893 INFO [train.py:903] (3/4) Epoch 3, batch 5800, loss[loss=0.3407, simple_loss=0.3886, pruned_loss=0.1464, over 18254.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.3677, pruned_loss=0.1347, over 3808640.25 frames. ], batch size: 83, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:35:03,997 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19491.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:35:07,671 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-01 01:35:23,142 INFO [train.py:903] (3/4) Epoch 3, batch 5850, loss[loss=0.301, simple_loss=0.3564, pruned_loss=0.1228, over 19747.00 frames. ], tot_loss[loss=0.32, simple_loss=0.369, pruned_loss=0.1355, over 3801862.26 frames. ], batch size: 63, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:35:43,379 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+02 8.354e+02 1.035e+03 1.319e+03 5.609e+03, threshold=2.070e+03, percent-clipped=8.0 +2023-04-01 01:35:54,724 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1008, 2.1113, 1.3616, 1.5656, 1.3774, 1.7471, 0.1936, 0.8077], + device='cuda:3'), covar=tensor([0.0198, 0.0196, 0.0166, 0.0195, 0.0469, 0.0211, 0.0407, 0.0351], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0250, 0.0251, 0.0278, 0.0332, 0.0264, 0.0257, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:36:23,588 INFO [train.py:903] (3/4) Epoch 3, batch 5900, loss[loss=0.3061, simple_loss=0.3706, pruned_loss=0.1208, over 19526.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3683, pruned_loss=0.1347, over 3811999.38 frames. ], batch size: 56, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:36:26,968 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 01:36:46,206 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 01:37:21,765 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19606.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:37:22,517 INFO [train.py:903] (3/4) Epoch 3, batch 5950, loss[loss=0.3539, simple_loss=0.398, pruned_loss=0.1549, over 18742.00 frames. ], tot_loss[loss=0.3202, simple_loss=0.3696, pruned_loss=0.1354, over 3812894.43 frames. ], batch size: 74, lr: 2.32e-02, grad_scale: 4.0 +2023-04-01 01:37:45,644 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.894e+02 9.030e+02 1.163e+03 3.004e+03, threshold=1.806e+03, percent-clipped=5.0 +2023-04-01 01:37:59,467 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:38:24,423 INFO [train.py:903] (3/4) Epoch 3, batch 6000, loss[loss=0.2871, simple_loss=0.3321, pruned_loss=0.121, over 19304.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3677, pruned_loss=0.1339, over 3821622.16 frames. ], batch size: 44, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:38:24,424 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 01:38:34,251 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3772, 1.1855, 1.4498, 1.0201, 2.3253, 2.6771, 2.6395, 2.8815], + device='cuda:3'), covar=tensor([0.1264, 0.2651, 0.2632, 0.2101, 0.0436, 0.0185, 0.0257, 0.0149], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0274, 0.0316, 0.0264, 0.0196, 0.0107, 0.0197, 0.0119], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 01:38:37,335 INFO [train.py:937] (3/4) Epoch 3, validation: loss=0.2218, simple_loss=0.3182, pruned_loss=0.06273, over 944034.00 frames. +2023-04-01 01:38:37,336 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18419MB +2023-04-01 01:38:42,421 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-04-01 01:38:45,556 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:15,740 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:39:37,223 INFO [train.py:903] (3/4) Epoch 3, batch 6050, loss[loss=0.2937, simple_loss=0.3457, pruned_loss=0.1209, over 19300.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.3688, pruned_loss=0.135, over 3818755.54 frames. ], batch size: 44, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:39:59,307 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+02 7.768e+02 9.451e+02 1.306e+03 2.772e+03, threshold=1.890e+03, percent-clipped=6.0 +2023-04-01 01:40:37,800 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:40:38,857 INFO [train.py:903] (3/4) Epoch 3, batch 6100, loss[loss=0.2756, simple_loss=0.3442, pruned_loss=0.1035, over 19677.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3677, pruned_loss=0.1343, over 3812058.12 frames. ], batch size: 59, lr: 2.31e-02, grad_scale: 8.0 +2023-04-01 01:41:38,885 INFO [train.py:903] (3/4) Epoch 3, batch 6150, loss[loss=0.3051, simple_loss=0.3625, pruned_loss=0.1239, over 19598.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3682, pruned_loss=0.1345, over 3830043.41 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:01,436 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+02 8.602e+02 1.123e+03 1.510e+03 2.312e+03, threshold=2.246e+03, percent-clipped=7.0 +2023-04-01 01:42:04,691 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 01:42:38,570 INFO [train.py:903] (3/4) Epoch 3, batch 6200, loss[loss=0.3639, simple_loss=0.4063, pruned_loss=0.1607, over 19320.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3689, pruned_loss=0.1344, over 3832979.30 frames. ], batch size: 66, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:42:46,198 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19862.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 01:42:57,096 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:43:10,824 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 01:43:15,884 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19887.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 01:43:39,796 INFO [train.py:903] (3/4) Epoch 3, batch 6250, loss[loss=0.3666, simple_loss=0.4101, pruned_loss=0.1615, over 19117.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3676, pruned_loss=0.1338, over 3838759.11 frames. ], batch size: 69, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:44:01,769 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+02 7.100e+02 9.208e+02 1.133e+03 3.490e+03, threshold=1.842e+03, percent-clipped=3.0 +2023-04-01 01:44:09,646 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 01:44:19,060 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9392, 5.2134, 3.1485, 4.7897, 1.4494, 5.2321, 5.1455, 5.5171], + device='cuda:3'), covar=tensor([0.0396, 0.0899, 0.1715, 0.0509, 0.3689, 0.0692, 0.0502, 0.0656], + device='cuda:3'), in_proj_covar=tensor([0.0320, 0.0293, 0.0339, 0.0270, 0.0339, 0.0288, 0.0244, 0.0288], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:44:38,572 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9374, 4.9416, 5.8169, 5.7450, 1.6526, 5.3940, 4.6260, 5.2065], + device='cuda:3'), covar=tensor([0.0570, 0.0446, 0.0379, 0.0227, 0.3801, 0.0199, 0.0411, 0.0806], + device='cuda:3'), in_proj_covar=tensor([0.0401, 0.0365, 0.0494, 0.0375, 0.0501, 0.0266, 0.0332, 0.0469], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 01:44:40,500 INFO [train.py:903] (3/4) Epoch 3, batch 6300, loss[loss=0.3066, simple_loss=0.3597, pruned_loss=0.1267, over 19851.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3665, pruned_loss=0.1332, over 3843576.24 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 8.0 +2023-04-01 01:44:47,590 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9227, 1.5707, 1.7279, 2.1372, 1.8376, 2.0178, 2.2044, 2.0158], + device='cuda:3'), covar=tensor([0.0591, 0.0884, 0.0862, 0.0738, 0.0873, 0.0683, 0.0788, 0.0515], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0280, 0.0268, 0.0308, 0.0308, 0.0252, 0.0274, 0.0243], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 01:45:07,280 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19980.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:45:42,109 INFO [train.py:903] (3/4) Epoch 3, batch 6350, loss[loss=0.3183, simple_loss=0.3765, pruned_loss=0.13, over 19570.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3671, pruned_loss=0.134, over 3819543.24 frames. ], batch size: 61, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:46:03,312 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.926e+02 7.424e+02 9.293e+02 1.158e+03 2.968e+03, threshold=1.859e+03, percent-clipped=5.0 +2023-04-01 01:46:20,204 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20038.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:46:42,149 INFO [train.py:903] (3/4) Epoch 3, batch 6400, loss[loss=0.2842, simple_loss=0.3428, pruned_loss=0.1128, over 19532.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.3686, pruned_loss=0.1349, over 3818120.40 frames. ], batch size: 54, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:09,212 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3571, 2.1931, 1.4470, 1.7094, 2.0533, 1.1124, 1.0199, 1.5866], + device='cuda:3'), covar=tensor([0.0815, 0.0515, 0.1042, 0.0438, 0.0407, 0.1089, 0.0806, 0.0497], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0245, 0.0320, 0.0251, 0.0218, 0.0312, 0.0282, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 01:47:29,327 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:47:43,491 INFO [train.py:903] (3/4) Epoch 3, batch 6450, loss[loss=0.3127, simple_loss=0.3627, pruned_loss=0.1314, over 19776.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3691, pruned_loss=0.1351, over 3820725.73 frames. ], batch size: 54, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:47:43,814 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:05,562 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.028e+02 7.833e+02 9.380e+02 1.145e+03 2.427e+03, threshold=1.876e+03, percent-clipped=3.0 +2023-04-01 01:48:08,323 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:12,605 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:29,095 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 01:48:39,377 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:48:44,548 INFO [train.py:903] (3/4) Epoch 3, batch 6500, loss[loss=0.3153, simple_loss=0.3606, pruned_loss=0.135, over 19582.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3681, pruned_loss=0.1345, over 3824309.29 frames. ], batch size: 52, lr: 2.29e-02, grad_scale: 8.0 +2023-04-01 01:48:52,306 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 01:49:45,349 INFO [train.py:903] (3/4) Epoch 3, batch 6550, loss[loss=0.3496, simple_loss=0.3944, pruned_loss=0.1524, over 19671.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.368, pruned_loss=0.1337, over 3825586.05 frames. ], batch size: 55, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:50:03,604 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:50:07,447 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.125e+02 7.085e+02 9.596e+02 1.224e+03 2.377e+03, threshold=1.919e+03, percent-clipped=5.0 +2023-04-01 01:50:28,031 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2208, 2.7747, 1.8963, 2.1461, 1.6335, 2.2101, 0.4576, 2.1986], + device='cuda:3'), covar=tensor([0.0275, 0.0217, 0.0239, 0.0354, 0.0501, 0.0323, 0.0630, 0.0370], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0255, 0.0254, 0.0277, 0.0337, 0.0270, 0.0259, 0.0269], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:50:46,685 INFO [train.py:903] (3/4) Epoch 3, batch 6600, loss[loss=0.3448, simple_loss=0.3913, pruned_loss=0.1491, over 19482.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3668, pruned_loss=0.1327, over 3837507.37 frames. ], batch size: 64, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:51:47,896 INFO [train.py:903] (3/4) Epoch 3, batch 6650, loss[loss=0.3002, simple_loss=0.3538, pruned_loss=0.1233, over 17172.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3678, pruned_loss=0.1336, over 3831018.85 frames. ], batch size: 101, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:52:10,192 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+02 7.672e+02 8.992e+02 1.144e+03 3.524e+03, threshold=1.798e+03, percent-clipped=4.0 +2023-04-01 01:52:42,306 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:52:47,437 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7212, 3.8258, 4.2404, 4.1914, 2.4314, 3.6850, 3.5102, 3.8697], + device='cuda:3'), covar=tensor([0.0679, 0.1053, 0.0476, 0.0344, 0.2539, 0.0319, 0.0391, 0.0857], + device='cuda:3'), in_proj_covar=tensor([0.0416, 0.0368, 0.0504, 0.0388, 0.0514, 0.0274, 0.0330, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 01:52:49,541 INFO [train.py:903] (3/4) Epoch 3, batch 6700, loss[loss=0.2292, simple_loss=0.2991, pruned_loss=0.07963, over 19505.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3666, pruned_loss=0.1328, over 3834984.42 frames. ], batch size: 49, lr: 2.28e-02, grad_scale: 8.0 +2023-04-01 01:53:11,826 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:17,990 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:45,269 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:53:46,032 INFO [train.py:903] (3/4) Epoch 3, batch 6750, loss[loss=0.3543, simple_loss=0.3932, pruned_loss=0.1577, over 13457.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3666, pruned_loss=0.1333, over 3830594.14 frames. ], batch size: 136, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:54:05,360 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+02 7.667e+02 1.002e+03 1.269e+03 2.908e+03, threshold=2.004e+03, percent-clipped=6.0 +2023-04-01 01:54:17,342 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:34,793 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:54:41,604 INFO [train.py:903] (3/4) Epoch 3, batch 6800, loss[loss=0.2975, simple_loss=0.3552, pruned_loss=0.1199, over 19625.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.3661, pruned_loss=0.1326, over 3818789.69 frames. ], batch size: 50, lr: 2.27e-02, grad_scale: 8.0 +2023-04-01 01:55:00,655 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:55:25,479 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 01:55:26,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 01:55:29,039 INFO [train.py:903] (3/4) Epoch 4, batch 0, loss[loss=0.3367, simple_loss=0.3956, pruned_loss=0.1389, over 19389.00 frames. ], tot_loss[loss=0.3367, simple_loss=0.3956, pruned_loss=0.1389, over 19389.00 frames. ], batch size: 70, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:55:29,039 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 01:55:40,519 INFO [train.py:937] (3/4) Epoch 4, validation: loss=0.2245, simple_loss=0.3205, pruned_loss=0.06426, over 944034.00 frames. +2023-04-01 01:55:40,520 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18419MB +2023-04-01 01:55:53,638 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 01:55:55,196 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20497.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:56:27,871 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.855e+02 8.790e+02 1.166e+03 2.960e+03, threshold=1.758e+03, percent-clipped=3.0 +2023-04-01 01:56:41,015 INFO [train.py:903] (3/4) Epoch 4, batch 50, loss[loss=0.3065, simple_loss=0.37, pruned_loss=0.1215, over 19642.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3701, pruned_loss=0.1359, over 865177.47 frames. ], batch size: 60, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:14,499 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 01:57:15,816 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:16,031 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20566.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:32,391 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0670, 1.0032, 1.3953, 0.5335, 2.4354, 2.4276, 2.2485, 2.5262], + device='cuda:3'), covar=tensor([0.1231, 0.2599, 0.2544, 0.2001, 0.0285, 0.0133, 0.0283, 0.0165], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0273, 0.0316, 0.0261, 0.0187, 0.0104, 0.0194, 0.0116], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 01:57:40,112 INFO [train.py:903] (3/4) Epoch 4, batch 100, loss[loss=0.3389, simple_loss=0.3888, pruned_loss=0.1445, over 19741.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3659, pruned_loss=0.1343, over 1520714.54 frames. ], batch size: 63, lr: 2.12e-02, grad_scale: 8.0 +2023-04-01 01:57:46,163 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20589.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:57:52,779 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 01:58:05,929 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:58:16,462 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4353, 4.0296, 2.4359, 3.6570, 1.2790, 3.5030, 3.5203, 3.6321], + device='cuda:3'), covar=tensor([0.0595, 0.1036, 0.1994, 0.0669, 0.3783, 0.1023, 0.0756, 0.1136], + device='cuda:3'), in_proj_covar=tensor([0.0323, 0.0297, 0.0336, 0.0267, 0.0345, 0.0290, 0.0248, 0.0288], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 01:58:29,320 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 7.763e+02 9.275e+02 1.175e+03 2.763e+03, threshold=1.855e+03, percent-clipped=7.0 +2023-04-01 01:58:41,720 INFO [train.py:903] (3/4) Epoch 4, batch 150, loss[loss=0.2743, simple_loss=0.3372, pruned_loss=0.1057, over 19593.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3634, pruned_loss=0.1307, over 2035614.70 frames. ], batch size: 52, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:03,928 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-01 01:59:35,941 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20681.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 01:59:40,165 INFO [train.py:903] (3/4) Epoch 4, batch 200, loss[loss=0.2877, simple_loss=0.3375, pruned_loss=0.1189, over 19782.00 frames. ], tot_loss[loss=0.3116, simple_loss=0.3628, pruned_loss=0.1302, over 2437284.51 frames. ], batch size: 48, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 01:59:41,292 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 01:59:54,697 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-01 02:00:20,392 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 02:00:28,682 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 7.097e+02 9.184e+02 1.257e+03 2.857e+03, threshold=1.837e+03, percent-clipped=5.0 +2023-04-01 02:00:39,432 INFO [train.py:903] (3/4) Epoch 4, batch 250, loss[loss=0.2642, simple_loss=0.3317, pruned_loss=0.09833, over 19661.00 frames. ], tot_loss[loss=0.3081, simple_loss=0.3599, pruned_loss=0.1281, over 2766927.36 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:00:45,040 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1981, 2.1399, 1.8552, 3.5259, 2.1595, 3.5578, 3.4263, 1.8818], + device='cuda:3'), covar=tensor([0.1406, 0.1000, 0.0568, 0.0633, 0.1296, 0.0309, 0.0848, 0.0957], + device='cuda:3'), in_proj_covar=tensor([0.0513, 0.0483, 0.0478, 0.0645, 0.0550, 0.0399, 0.0578, 0.0481], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:00:57,788 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:02,449 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:20,560 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7577, 1.4884, 1.9317, 1.6760, 3.3260, 4.5521, 4.6539, 5.1701], + device='cuda:3'), covar=tensor([0.1367, 0.2810, 0.2594, 0.1846, 0.0354, 0.0115, 0.0138, 0.0055], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0279, 0.0323, 0.0265, 0.0194, 0.0109, 0.0200, 0.0120], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 02:01:24,172 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1214, 1.3356, 2.1225, 1.5535, 2.9294, 2.6727, 3.1955, 1.3949], + device='cuda:3'), covar=tensor([0.1789, 0.2693, 0.1439, 0.1470, 0.1256, 0.1243, 0.1573, 0.2609], + device='cuda:3'), in_proj_covar=tensor([0.0422, 0.0469, 0.0436, 0.0397, 0.0513, 0.0414, 0.0591, 0.0420], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:01:32,693 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:34,839 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:01:41,110 INFO [train.py:903] (3/4) Epoch 4, batch 300, loss[loss=0.3697, simple_loss=0.403, pruned_loss=0.1682, over 19315.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3593, pruned_loss=0.127, over 3005273.85 frames. ], batch size: 66, lr: 2.11e-02, grad_scale: 8.0 +2023-04-01 02:02:25,225 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:29,140 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 7.559e+02 9.012e+02 1.206e+03 2.235e+03, threshold=1.802e+03, percent-clipped=6.0 +2023-04-01 02:02:30,545 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7344, 1.5558, 2.0794, 2.5322, 2.3110, 2.6907, 2.2281, 3.0355], + device='cuda:3'), covar=tensor([0.0697, 0.2008, 0.1188, 0.0818, 0.1228, 0.0374, 0.0824, 0.0418], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0381, 0.0278, 0.0243, 0.0312, 0.0259, 0.0267, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:02:40,183 INFO [train.py:903] (3/4) Epoch 4, batch 350, loss[loss=0.2889, simple_loss=0.3516, pruned_loss=0.1131, over 19668.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3618, pruned_loss=0.1289, over 3178525.23 frames. ], batch size: 58, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:02:45,656 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 02:02:52,661 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:02:54,938 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20847.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:00,487 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0611, 4.8359, 5.7660, 5.7624, 1.7306, 5.2606, 4.6550, 5.2110], + device='cuda:3'), covar=tensor([0.0620, 0.0457, 0.0342, 0.0237, 0.3478, 0.0176, 0.0330, 0.0747], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0374, 0.0508, 0.0390, 0.0513, 0.0278, 0.0333, 0.0483], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 02:03:03,958 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0957, 5.4210, 3.1831, 4.7296, 1.3024, 5.1491, 5.1417, 5.4605], + device='cuda:3'), covar=tensor([0.0404, 0.0900, 0.1828, 0.0617, 0.4001, 0.0724, 0.0493, 0.0545], + device='cuda:3'), in_proj_covar=tensor([0.0334, 0.0304, 0.0346, 0.0280, 0.0351, 0.0295, 0.0252, 0.0296], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 02:03:18,431 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20865.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:23,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:03:40,718 INFO [train.py:903] (3/4) Epoch 4, batch 400, loss[loss=0.2942, simple_loss=0.3578, pruned_loss=0.1153, over 19779.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.362, pruned_loss=0.1287, over 3330595.31 frames. ], batch size: 56, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:03:52,334 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:27,878 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+02 7.529e+02 9.870e+02 1.265e+03 2.610e+03, threshold=1.974e+03, percent-clipped=3.0 +2023-04-01 02:04:36,624 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.85 vs. limit=5.0 +2023-04-01 02:04:39,083 INFO [train.py:903] (3/4) Epoch 4, batch 450, loss[loss=0.2979, simple_loss=0.3612, pruned_loss=0.1172, over 19787.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3618, pruned_loss=0.1286, over 3452323.14 frames. ], batch size: 56, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:04:41,821 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:04:58,391 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:12,735 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 02:05:13,080 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:05:13,941 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 02:05:40,645 INFO [train.py:903] (3/4) Epoch 4, batch 500, loss[loss=0.2934, simple_loss=0.3391, pruned_loss=0.1238, over 19488.00 frames. ], tot_loss[loss=0.3094, simple_loss=0.3617, pruned_loss=0.1286, over 3547205.25 frames. ], batch size: 49, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:06:27,515 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.944e+02 7.370e+02 9.144e+02 1.191e+03 3.185e+03, threshold=1.829e+03, percent-clipped=4.0 +2023-04-01 02:06:40,264 INFO [train.py:903] (3/4) Epoch 4, batch 550, loss[loss=0.3418, simple_loss=0.3819, pruned_loss=0.1508, over 19519.00 frames. ], tot_loss[loss=0.3104, simple_loss=0.3625, pruned_loss=0.1292, over 3610763.03 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 8.0 +2023-04-01 02:07:17,121 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:23,287 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:07:37,589 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9492, 3.6499, 2.2313, 3.3632, 1.2685, 3.3208, 3.1438, 3.3868], + device='cuda:3'), covar=tensor([0.0611, 0.1024, 0.2024, 0.0695, 0.3342, 0.0927, 0.0770, 0.0897], + device='cuda:3'), in_proj_covar=tensor([0.0321, 0.0295, 0.0338, 0.0273, 0.0339, 0.0292, 0.0251, 0.0288], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 02:07:39,554 INFO [train.py:903] (3/4) Epoch 4, batch 600, loss[loss=0.3104, simple_loss=0.3628, pruned_loss=0.129, over 17499.00 frames. ], tot_loss[loss=0.3117, simple_loss=0.3635, pruned_loss=0.13, over 3655005.95 frames. ], batch size: 101, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:15,013 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2470, 1.5412, 1.4642, 2.5025, 1.9320, 2.2652, 2.6083, 2.2636], + device='cuda:3'), covar=tensor([0.0775, 0.1186, 0.1389, 0.1071, 0.1158, 0.0777, 0.0967, 0.0721], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0272, 0.0267, 0.0296, 0.0300, 0.0249, 0.0263, 0.0243], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 02:08:19,258 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 02:08:23,067 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21121.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:27,139 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.199e+02 8.023e+02 1.001e+03 1.305e+03 2.804e+03, threshold=2.003e+03, percent-clipped=3.0 +2023-04-01 02:08:39,444 INFO [train.py:903] (3/4) Epoch 4, batch 650, loss[loss=0.2643, simple_loss=0.3195, pruned_loss=0.1046, over 19761.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3626, pruned_loss=0.1294, over 3691067.52 frames. ], batch size: 46, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:08:40,635 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:08:53,046 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:00,264 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:16,937 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5657, 1.5748, 1.9949, 2.4444, 4.2225, 1.1267, 2.2395, 4.2863], + device='cuda:3'), covar=tensor([0.0292, 0.2277, 0.1918, 0.1240, 0.0405, 0.2236, 0.1225, 0.0288], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0303, 0.0299, 0.0277, 0.0294, 0.0318, 0.0282, 0.0284], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:09:29,119 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21176.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:09:38,634 INFO [train.py:903] (3/4) Epoch 4, batch 700, loss[loss=0.3994, simple_loss=0.4211, pruned_loss=0.1888, over 17492.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3625, pruned_loss=0.1295, over 3723998.89 frames. ], batch size: 101, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:10:26,834 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 7.118e+02 8.929e+02 1.162e+03 2.438e+03, threshold=1.786e+03, percent-clipped=3.0 +2023-04-01 02:10:40,520 INFO [train.py:903] (3/4) Epoch 4, batch 750, loss[loss=0.3089, simple_loss=0.3713, pruned_loss=0.1233, over 19747.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.362, pruned_loss=0.1289, over 3756354.65 frames. ], batch size: 63, lr: 2.09e-02, grad_scale: 8.0 +2023-04-01 02:10:44,254 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7439, 1.3854, 1.2277, 1.5292, 1.4562, 1.4468, 1.2743, 1.5467], + device='cuda:3'), covar=tensor([0.0786, 0.1346, 0.1285, 0.0797, 0.0979, 0.0537, 0.0915, 0.0636], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0374, 0.0279, 0.0247, 0.0313, 0.0259, 0.0262, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:10:48,899 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4085, 1.3830, 2.2184, 1.6902, 3.0719, 3.0144, 3.5205, 1.4042], + device='cuda:3'), covar=tensor([0.1490, 0.2478, 0.1357, 0.1233, 0.1033, 0.1007, 0.1220, 0.2339], + device='cuda:3'), in_proj_covar=tensor([0.0424, 0.0476, 0.0438, 0.0396, 0.0517, 0.0414, 0.0597, 0.0425], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:11:40,042 INFO [train.py:903] (3/4) Epoch 4, batch 800, loss[loss=0.3172, simple_loss=0.3664, pruned_loss=0.134, over 19599.00 frames. ], tot_loss[loss=0.3089, simple_loss=0.3613, pruned_loss=0.1282, over 3782244.90 frames. ], batch size: 50, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:11:56,118 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 02:12:25,403 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:30,558 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.379e+02 8.113e+02 1.001e+03 1.207e+03 2.017e+03, threshold=2.002e+03, percent-clipped=2.0 +2023-04-01 02:12:35,470 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:41,604 INFO [train.py:903] (3/4) Epoch 4, batch 850, loss[loss=0.2654, simple_loss=0.3305, pruned_loss=0.1002, over 19663.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.362, pruned_loss=0.1284, over 3788890.09 frames. ], batch size: 53, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:12:47,672 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0985, 1.9846, 1.6300, 1.6402, 1.4655, 1.6836, 0.2024, 0.8337], + device='cuda:3'), covar=tensor([0.0199, 0.0182, 0.0140, 0.0180, 0.0430, 0.0199, 0.0413, 0.0380], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0256, 0.0259, 0.0288, 0.0342, 0.0273, 0.0264, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 02:12:54,289 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:12:54,393 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:13:30,946 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 02:13:39,981 INFO [train.py:903] (3/4) Epoch 4, batch 900, loss[loss=0.2951, simple_loss=0.3497, pruned_loss=0.1202, over 19607.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.3623, pruned_loss=0.1286, over 3798534.92 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:14,506 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:14:28,710 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+02 7.656e+02 9.192e+02 1.106e+03 2.022e+03, threshold=1.838e+03, percent-clipped=1.0 +2023-04-01 02:14:40,644 INFO [train.py:903] (3/4) Epoch 4, batch 950, loss[loss=0.2954, simple_loss=0.3572, pruned_loss=0.1168, over 18594.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3621, pruned_loss=0.1286, over 3804484.22 frames. ], batch size: 74, lr: 2.08e-02, grad_scale: 8.0 +2023-04-01 02:14:43,013 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 02:14:43,648 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-01 02:15:35,418 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:15:37,652 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6076, 4.1540, 2.3446, 3.7474, 1.2009, 3.6983, 3.7719, 3.8275], + device='cuda:3'), covar=tensor([0.0553, 0.0917, 0.1939, 0.0681, 0.3901, 0.0969, 0.0666, 0.0857], + device='cuda:3'), in_proj_covar=tensor([0.0320, 0.0290, 0.0337, 0.0269, 0.0339, 0.0293, 0.0253, 0.0283], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 02:15:40,656 INFO [train.py:903] (3/4) Epoch 4, batch 1000, loss[loss=0.328, simple_loss=0.3788, pruned_loss=0.1386, over 19671.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3621, pruned_loss=0.1287, over 3811609.43 frames. ], batch size: 58, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:16:28,019 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4533, 1.3742, 1.0902, 1.2988, 1.2203, 1.2850, 1.0513, 1.3313], + device='cuda:3'), covar=tensor([0.0797, 0.0959, 0.1177, 0.0814, 0.0939, 0.0493, 0.1033, 0.0653], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0370, 0.0277, 0.0249, 0.0311, 0.0258, 0.0265, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:16:29,779 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+02 7.436e+02 9.242e+02 1.292e+03 2.692e+03, threshold=1.848e+03, percent-clipped=7.0 +2023-04-01 02:16:33,304 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 02:16:33,634 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:16:35,886 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.6568, 0.8676, 0.6668, 0.6805, 0.8169, 0.6309, 0.4117, 0.8467], + device='cuda:3'), covar=tensor([0.0382, 0.0410, 0.0629, 0.0339, 0.0271, 0.0698, 0.0498, 0.0297], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0252, 0.0315, 0.0248, 0.0217, 0.0314, 0.0279, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:16:40,202 INFO [train.py:903] (3/4) Epoch 4, batch 1050, loss[loss=0.3298, simple_loss=0.376, pruned_loss=0.1418, over 19794.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.3624, pruned_loss=0.1294, over 3810504.97 frames. ], batch size: 56, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:16:57,650 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4454, 2.4279, 1.7227, 1.6702, 2.2181, 1.0425, 1.1838, 1.7710], + device='cuda:3'), covar=tensor([0.0889, 0.0516, 0.0952, 0.0557, 0.0498, 0.1265, 0.0779, 0.0515], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0249, 0.0313, 0.0244, 0.0216, 0.0311, 0.0275, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:17:14,226 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 02:17:30,946 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.48 vs. limit=5.0 +2023-04-01 02:17:34,814 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9965, 2.0950, 1.8732, 2.9135, 1.9355, 3.1233, 2.6416, 1.7880], + device='cuda:3'), covar=tensor([0.1356, 0.0954, 0.0604, 0.0645, 0.1191, 0.0308, 0.1014, 0.0972], + device='cuda:3'), in_proj_covar=tensor([0.0531, 0.0503, 0.0492, 0.0666, 0.0569, 0.0418, 0.0590, 0.0490], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:17:40,184 INFO [train.py:903] (3/4) Epoch 4, batch 1100, loss[loss=0.25, simple_loss=0.3048, pruned_loss=0.09759, over 19288.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3615, pruned_loss=0.1288, over 3826648.16 frames. ], batch size: 44, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:17:52,925 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:15,016 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:30,558 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+02 7.921e+02 9.622e+02 1.275e+03 2.981e+03, threshold=1.924e+03, percent-clipped=6.0 +2023-04-01 02:18:36,432 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21631.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:18:43,701 INFO [train.py:903] (3/4) Epoch 4, batch 1150, loss[loss=0.2695, simple_loss=0.3431, pruned_loss=0.09798, over 19668.00 frames. ], tot_loss[loss=0.3081, simple_loss=0.3601, pruned_loss=0.128, over 3837053.00 frames. ], batch size: 53, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:31,573 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:19:45,549 INFO [train.py:903] (3/4) Epoch 4, batch 1200, loss[loss=0.3406, simple_loss=0.3857, pruned_loss=0.1478, over 17520.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.3608, pruned_loss=0.1282, over 3829487.50 frames. ], batch size: 100, lr: 2.07e-02, grad_scale: 8.0 +2023-04-01 02:19:51,428 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:20:14,904 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 02:20:18,749 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-01 02:20:35,528 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+02 7.515e+02 9.038e+02 1.111e+03 2.454e+03, threshold=1.808e+03, percent-clipped=2.0 +2023-04-01 02:20:45,432 INFO [train.py:903] (3/4) Epoch 4, batch 1250, loss[loss=0.3471, simple_loss=0.391, pruned_loss=0.1516, over 19692.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3597, pruned_loss=0.127, over 3836354.46 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:39,848 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:44,775 INFO [train.py:903] (3/4) Epoch 4, batch 1300, loss[loss=0.277, simple_loss=0.344, pruned_loss=0.105, over 19697.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3607, pruned_loss=0.1273, over 3838910.80 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:21:45,180 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:21:49,643 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:10,090 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:15,829 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:22:34,649 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 7.410e+02 9.034e+02 1.121e+03 1.849e+03, threshold=1.807e+03, percent-clipped=1.0 +2023-04-01 02:22:45,117 INFO [train.py:903] (3/4) Epoch 4, batch 1350, loss[loss=0.3459, simple_loss=0.3677, pruned_loss=0.1621, over 18599.00 frames. ], tot_loss[loss=0.308, simple_loss=0.3606, pruned_loss=0.1277, over 3836196.67 frames. ], batch size: 41, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:23:04,096 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:05,052 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4433, 2.7979, 3.0221, 2.9892, 1.0293, 2.7124, 2.6303, 2.4401], + device='cuda:3'), covar=tensor([0.1635, 0.1115, 0.0978, 0.0986, 0.4697, 0.0813, 0.0768, 0.1850], + device='cuda:3'), in_proj_covar=tensor([0.0432, 0.0377, 0.0512, 0.0397, 0.0525, 0.0284, 0.0338, 0.0485], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 02:23:23,912 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:34,825 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:23:45,417 INFO [train.py:903] (3/4) Epoch 4, batch 1400, loss[loss=0.3026, simple_loss=0.3581, pruned_loss=0.1236, over 19588.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3617, pruned_loss=0.1288, over 3816158.90 frames. ], batch size: 52, lr: 2.06e-02, grad_scale: 8.0 +2023-04-01 02:24:35,708 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.358e+02 7.364e+02 9.517e+02 1.310e+03 2.254e+03, threshold=1.903e+03, percent-clipped=6.0 +2023-04-01 02:24:42,742 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 02:24:46,194 INFO [train.py:903] (3/4) Epoch 4, batch 1450, loss[loss=0.3347, simple_loss=0.3821, pruned_loss=0.1437, over 19784.00 frames. ], tot_loss[loss=0.3092, simple_loss=0.3616, pruned_loss=0.1284, over 3807640.54 frames. ], batch size: 56, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:24:48,760 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7476, 4.7784, 5.5635, 5.4754, 1.7872, 5.0362, 4.5306, 4.8778], + device='cuda:3'), covar=tensor([0.0685, 0.0534, 0.0349, 0.0235, 0.3591, 0.0247, 0.0333, 0.0840], + device='cuda:3'), in_proj_covar=tensor([0.0436, 0.0383, 0.0513, 0.0398, 0.0529, 0.0288, 0.0339, 0.0496], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 02:25:13,225 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:34,247 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:25:45,920 INFO [train.py:903] (3/4) Epoch 4, batch 1500, loss[loss=0.3979, simple_loss=0.424, pruned_loss=0.1859, over 18712.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3627, pruned_loss=0.1295, over 3815130.97 frames. ], batch size: 74, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:25:46,716 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 02:25:49,651 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:26:18,127 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2050, 1.2028, 1.9439, 1.4139, 2.6991, 2.4386, 2.7856, 1.1553], + device='cuda:3'), covar=tensor([0.1717, 0.2908, 0.1459, 0.1404, 0.1081, 0.1207, 0.1274, 0.2601], + device='cuda:3'), in_proj_covar=tensor([0.0424, 0.0477, 0.0444, 0.0404, 0.0521, 0.0417, 0.0604, 0.0429], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:26:36,549 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+02 6.805e+02 9.241e+02 1.170e+03 2.581e+03, threshold=1.848e+03, percent-clipped=2.0 +2023-04-01 02:26:47,114 INFO [train.py:903] (3/4) Epoch 4, batch 1550, loss[loss=0.2902, simple_loss=0.3507, pruned_loss=0.1149, over 19532.00 frames. ], tot_loss[loss=0.3083, simple_loss=0.361, pruned_loss=0.1278, over 3805740.83 frames. ], batch size: 54, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:01,639 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:19,413 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:30,471 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:32,796 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:42,174 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3047, 2.1399, 2.0274, 3.3133, 2.1338, 3.7200, 3.1460, 1.8765], + device='cuda:3'), covar=tensor([0.1623, 0.1238, 0.0658, 0.0793, 0.1536, 0.0340, 0.1108, 0.1126], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0506, 0.0495, 0.0670, 0.0574, 0.0425, 0.0594, 0.0501], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:27:49,225 INFO [train.py:903] (3/4) Epoch 4, batch 1600, loss[loss=0.3121, simple_loss=0.3675, pruned_loss=0.1284, over 18168.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3618, pruned_loss=0.1281, over 3819451.33 frames. ], batch size: 83, lr: 2.05e-02, grad_scale: 8.0 +2023-04-01 02:27:50,800 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22086.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:27:55,240 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:10,563 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 02:28:37,657 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:28:39,603 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+02 8.565e+02 1.081e+03 1.346e+03 3.673e+03, threshold=2.162e+03, percent-clipped=6.0 +2023-04-01 02:28:44,656 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0280, 1.5615, 1.5133, 1.8685, 1.7937, 1.7821, 1.6044, 1.8774], + device='cuda:3'), covar=tensor([0.0724, 0.1544, 0.1181, 0.0927, 0.1036, 0.0448, 0.0891, 0.0565], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0368, 0.0280, 0.0245, 0.0301, 0.0251, 0.0261, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:28:48,850 INFO [train.py:903] (3/4) Epoch 4, batch 1650, loss[loss=0.294, simple_loss=0.3557, pruned_loss=0.1161, over 19771.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.3611, pruned_loss=0.1278, over 3822343.82 frames. ], batch size: 56, lr: 2.05e-02, grad_scale: 4.0 +2023-04-01 02:29:04,269 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22148.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:29:40,675 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7953, 3.1116, 3.2184, 3.2160, 1.1330, 2.9410, 2.7909, 2.8696], + device='cuda:3'), covar=tensor([0.0928, 0.0582, 0.0626, 0.0526, 0.3397, 0.0406, 0.0511, 0.1187], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0372, 0.0510, 0.0395, 0.0513, 0.0289, 0.0335, 0.0484], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 02:29:49,263 INFO [train.py:903] (3/4) Epoch 4, batch 1700, loss[loss=0.3003, simple_loss=0.3596, pruned_loss=0.1205, over 19613.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3595, pruned_loss=0.1264, over 3825363.01 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:29:52,348 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 02:30:00,422 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22194.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:23,334 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22212.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:27,873 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 02:30:40,334 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.920e+02 6.276e+02 7.753e+02 9.050e+02 1.909e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 02:30:46,032 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9934, 1.1119, 1.3949, 0.5116, 2.2719, 2.3895, 2.2047, 2.4948], + device='cuda:3'), covar=tensor([0.1145, 0.2587, 0.2549, 0.1909, 0.0370, 0.0147, 0.0349, 0.0157], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0276, 0.0320, 0.0259, 0.0192, 0.0109, 0.0198, 0.0121], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 02:30:48,089 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22233.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:30:49,931 INFO [train.py:903] (3/4) Epoch 4, batch 1750, loss[loss=0.3218, simple_loss=0.3677, pruned_loss=0.1379, over 19746.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3604, pruned_loss=0.1269, over 3819949.42 frames. ], batch size: 51, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:30:57,742 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:15,614 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22255.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:31:52,437 INFO [train.py:903] (3/4) Epoch 4, batch 1800, loss[loss=0.3264, simple_loss=0.3718, pruned_loss=0.1406, over 19624.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3589, pruned_loss=0.1258, over 3816028.68 frames. ], batch size: 50, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:32:43,155 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+02 7.313e+02 8.961e+02 1.128e+03 3.443e+03, threshold=1.792e+03, percent-clipped=8.0 +2023-04-01 02:32:43,403 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:44,448 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:46,336 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 02:32:48,651 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22332.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:32:52,008 INFO [train.py:903] (3/4) Epoch 4, batch 1850, loss[loss=0.264, simple_loss=0.3114, pruned_loss=0.1083, over 19720.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3604, pruned_loss=0.127, over 3823160.77 frames. ], batch size: 45, lr: 2.04e-02, grad_scale: 4.0 +2023-04-01 02:33:04,854 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22346.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:13,922 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:24,816 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 02:33:36,207 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:33:51,852 INFO [train.py:903] (3/4) Epoch 4, batch 1900, loss[loss=0.3079, simple_loss=0.3707, pruned_loss=0.1226, over 19588.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3594, pruned_loss=0.126, over 3834765.94 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:34:09,608 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 02:34:14,790 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 02:34:15,067 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:34:39,427 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 02:34:42,858 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.044e+02 7.549e+02 9.520e+02 1.192e+03 3.384e+03, threshold=1.904e+03, percent-clipped=5.0 +2023-04-01 02:34:51,934 INFO [train.py:903] (3/4) Epoch 4, batch 1950, loss[loss=0.3009, simple_loss=0.3622, pruned_loss=0.1198, over 18173.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3605, pruned_loss=0.1269, over 3812129.60 frames. ], batch size: 83, lr: 2.03e-02, grad_scale: 4.0 +2023-04-01 02:35:08,586 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22447.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:35:53,486 INFO [train.py:903] (3/4) Epoch 4, batch 2000, loss[loss=0.2953, simple_loss=0.3577, pruned_loss=0.1164, over 19663.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3598, pruned_loss=0.1263, over 3812959.25 frames. ], batch size: 55, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:01,833 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22492.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:06,665 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:38,284 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:36:45,511 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+02 6.735e+02 8.799e+02 1.102e+03 2.294e+03, threshold=1.760e+03, percent-clipped=1.0 +2023-04-01 02:36:46,717 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 02:36:47,091 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1699, 1.1168, 1.4352, 0.4501, 2.4596, 2.4588, 2.2006, 2.5305], + device='cuda:3'), covar=tensor([0.1032, 0.2636, 0.2441, 0.1854, 0.0287, 0.0131, 0.0299, 0.0156], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0280, 0.0323, 0.0260, 0.0192, 0.0110, 0.0201, 0.0124], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 02:36:54,495 INFO [train.py:903] (3/4) Epoch 4, batch 2050, loss[loss=0.3192, simple_loss=0.3666, pruned_loss=0.1359, over 19579.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3583, pruned_loss=0.1251, over 3814050.74 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:36:58,174 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:04,886 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 02:37:06,034 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 02:37:20,839 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2198, 1.2858, 1.4254, 1.5607, 2.7408, 1.1917, 1.9091, 2.9324], + device='cuda:3'), covar=tensor([0.0416, 0.2450, 0.2336, 0.1503, 0.0526, 0.2061, 0.1132, 0.0418], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0308, 0.0304, 0.0278, 0.0292, 0.0320, 0.0283, 0.0289], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:37:27,446 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 02:37:45,380 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22577.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:52,339 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:37:54,200 INFO [train.py:903] (3/4) Epoch 4, batch 2100, loss[loss=0.2692, simple_loss=0.3228, pruned_loss=0.1078, over 19493.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3579, pruned_loss=0.125, over 3818887.11 frames. ], batch size: 49, lr: 2.03e-02, grad_scale: 8.0 +2023-04-01 02:38:10,692 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:21,647 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 02:38:21,979 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:23,363 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:38:42,454 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 02:38:44,803 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.772e+02 6.954e+02 8.861e+02 1.126e+03 2.028e+03, threshold=1.772e+03, percent-clipped=3.0 +2023-04-01 02:38:53,763 INFO [train.py:903] (3/4) Epoch 4, batch 2150, loss[loss=0.2566, simple_loss=0.3121, pruned_loss=0.1006, over 19729.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3571, pruned_loss=0.1247, over 3830836.17 frames. ], batch size: 46, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:39:17,774 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22653.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:22,236 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22657.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:39:56,339 INFO [train.py:903] (3/4) Epoch 4, batch 2200, loss[loss=0.2972, simple_loss=0.3649, pruned_loss=0.1147, over 19795.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3576, pruned_loss=0.1244, over 3846166.97 frames. ], batch size: 56, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:40:05,453 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:17,997 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:30,190 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:47,466 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+02 6.866e+02 8.417e+02 1.098e+03 2.160e+03, threshold=1.683e+03, percent-clipped=4.0 +2023-04-01 02:40:49,085 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:40:56,702 INFO [train.py:903] (3/4) Epoch 4, batch 2250, loss[loss=0.277, simple_loss=0.3443, pruned_loss=0.1049, over 19697.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3573, pruned_loss=0.1247, over 3837547.45 frames. ], batch size: 59, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:41:10,273 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:41:44,873 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6414, 4.0979, 4.3248, 4.2489, 1.3763, 3.9337, 3.6037, 3.8416], + device='cuda:3'), covar=tensor([0.0819, 0.0527, 0.0482, 0.0385, 0.3853, 0.0306, 0.0466, 0.0972], + device='cuda:3'), in_proj_covar=tensor([0.0430, 0.0386, 0.0520, 0.0407, 0.0525, 0.0291, 0.0350, 0.0492], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 02:41:46,038 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3205, 1.8816, 2.0394, 2.3812, 2.1589, 2.1562, 2.1429, 2.3331], + device='cuda:3'), covar=tensor([0.0580, 0.1517, 0.0915, 0.0610, 0.0865, 0.0356, 0.0624, 0.0443], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0377, 0.0280, 0.0248, 0.0309, 0.0256, 0.0271, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:41:56,362 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3923, 1.2145, 1.8465, 1.5215, 3.0820, 2.7349, 3.5039, 1.4000], + device='cuda:3'), covar=tensor([0.1553, 0.2602, 0.1564, 0.1291, 0.0965, 0.1156, 0.1111, 0.2373], + device='cuda:3'), in_proj_covar=tensor([0.0431, 0.0481, 0.0451, 0.0402, 0.0524, 0.0423, 0.0606, 0.0431], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 02:41:57,018 INFO [train.py:903] (3/4) Epoch 4, batch 2300, loss[loss=0.2758, simple_loss=0.3259, pruned_loss=0.1128, over 19732.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3562, pruned_loss=0.1241, over 3841780.56 frames. ], batch size: 51, lr: 2.02e-02, grad_scale: 8.0 +2023-04-01 02:42:09,274 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 02:42:31,195 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22812.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:42:47,874 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+02 7.223e+02 9.387e+02 1.200e+03 1.860e+03, threshold=1.877e+03, percent-clipped=8.0 +2023-04-01 02:42:56,885 INFO [train.py:903] (3/4) Epoch 4, batch 2350, loss[loss=0.3182, simple_loss=0.3679, pruned_loss=0.1342, over 19538.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3558, pruned_loss=0.1237, over 3838228.45 frames. ], batch size: 56, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:43:06,749 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:31,058 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:32,288 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:43:38,745 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 02:43:54,383 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 02:43:57,673 INFO [train.py:903] (3/4) Epoch 4, batch 2400, loss[loss=0.4174, simple_loss=0.4381, pruned_loss=0.1983, over 17644.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3552, pruned_loss=0.1232, over 3836857.68 frames. ], batch size: 101, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:44:03,331 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:27,700 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:37,903 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9229, 4.2716, 4.5821, 4.5013, 1.4790, 4.1078, 3.7190, 4.1326], + device='cuda:3'), covar=tensor([0.0706, 0.0549, 0.0438, 0.0328, 0.3712, 0.0278, 0.0431, 0.0816], + device='cuda:3'), in_proj_covar=tensor([0.0430, 0.0386, 0.0515, 0.0404, 0.0522, 0.0290, 0.0341, 0.0487], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 02:44:49,611 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+02 7.425e+02 9.466e+02 1.182e+03 3.064e+03, threshold=1.893e+03, percent-clipped=2.0 +2023-04-01 02:44:58,891 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:44:59,531 INFO [train.py:903] (3/4) Epoch 4, batch 2450, loss[loss=0.3476, simple_loss=0.385, pruned_loss=0.1551, over 19538.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.356, pruned_loss=0.1238, over 3824141.82 frames. ], batch size: 54, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:45:14,097 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:28,847 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-01 02:45:40,912 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:44,367 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:45:57,746 INFO [train.py:903] (3/4) Epoch 4, batch 2500, loss[loss=0.3847, simple_loss=0.4133, pruned_loss=0.178, over 19363.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3572, pruned_loss=0.1247, over 3822868.02 frames. ], batch size: 66, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:46:09,341 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22995.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:16,256 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:46:48,542 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 7.308e+02 8.619e+02 1.083e+03 2.930e+03, threshold=1.724e+03, percent-clipped=3.0 +2023-04-01 02:46:57,732 INFO [train.py:903] (3/4) Epoch 4, batch 2550, loss[loss=0.3103, simple_loss=0.3681, pruned_loss=0.1263, over 17790.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3579, pruned_loss=0.125, over 3821812.11 frames. ], batch size: 101, lr: 2.01e-02, grad_scale: 8.0 +2023-04-01 02:47:49,710 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 02:47:57,385 INFO [train.py:903] (3/4) Epoch 4, batch 2600, loss[loss=0.3202, simple_loss=0.3778, pruned_loss=0.1313, over 19776.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.3567, pruned_loss=0.1245, over 3814123.94 frames. ], batch size: 56, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:48:33,782 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23116.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:36,146 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:46,614 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:48:48,299 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+02 6.946e+02 8.555e+02 1.074e+03 2.756e+03, threshold=1.711e+03, percent-clipped=6.0 +2023-04-01 02:48:58,591 INFO [train.py:903] (3/4) Epoch 4, batch 2650, loss[loss=0.256, simple_loss=0.3126, pruned_loss=0.09965, over 19054.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3561, pruned_loss=0.1242, over 3805326.33 frames. ], batch size: 42, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:49:08,318 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:17,083 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 02:49:22,944 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:49:58,046 INFO [train.py:903] (3/4) Epoch 4, batch 2700, loss[loss=0.2963, simple_loss=0.3541, pruned_loss=0.1193, over 19535.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3563, pruned_loss=0.1239, over 3822939.65 frames. ], batch size: 54, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:00,479 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23187.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:50:48,020 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.628e+02 7.002e+02 8.947e+02 1.091e+03 2.361e+03, threshold=1.789e+03, percent-clipped=7.0 +2023-04-01 02:50:57,147 INFO [train.py:903] (3/4) Epoch 4, batch 2750, loss[loss=0.3285, simple_loss=0.3788, pruned_loss=0.139, over 17421.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3572, pruned_loss=0.1248, over 3824808.66 frames. ], batch size: 101, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:50:57,426 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:11,506 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:41,063 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:51:56,453 INFO [train.py:903] (3/4) Epoch 4, batch 2800, loss[loss=0.3096, simple_loss=0.3737, pruned_loss=0.1228, over 19293.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3595, pruned_loss=0.1262, over 3804264.37 frames. ], batch size: 66, lr: 2.00e-02, grad_scale: 8.0 +2023-04-01 02:52:17,021 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:52:45,202 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+02 7.909e+02 1.044e+03 1.347e+03 2.323e+03, threshold=2.087e+03, percent-clipped=7.0 +2023-04-01 02:52:56,810 INFO [train.py:903] (3/4) Epoch 4, batch 2850, loss[loss=0.3342, simple_loss=0.39, pruned_loss=0.1392, over 19643.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3606, pruned_loss=0.1268, over 3818153.22 frames. ], batch size: 55, lr: 1.99e-02, grad_scale: 8.0 +2023-04-01 02:53:03,168 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 02:53:41,875 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23372.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:53:44,370 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 02:53:56,271 INFO [train.py:903] (3/4) Epoch 4, batch 2900, loss[loss=0.3058, simple_loss=0.3695, pruned_loss=0.1211, over 19604.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3598, pruned_loss=0.1261, over 3833611.97 frames. ], batch size: 57, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:53:56,287 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 02:54:10,019 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:54:45,629 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 7.852e+02 1.023e+03 1.284e+03 2.319e+03, threshold=2.047e+03, percent-clipped=2.0 +2023-04-01 02:54:53,631 INFO [train.py:903] (3/4) Epoch 4, batch 2950, loss[loss=0.3373, simple_loss=0.3801, pruned_loss=0.1472, over 19669.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3596, pruned_loss=0.1264, over 3820793.53 frames. ], batch size: 60, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:20,562 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8899, 2.0572, 2.0365, 2.4251, 4.3340, 1.4444, 2.4284, 4.4050], + device='cuda:3'), covar=tensor([0.0243, 0.2241, 0.2159, 0.1265, 0.0433, 0.2096, 0.1212, 0.0277], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0304, 0.0304, 0.0276, 0.0297, 0.0319, 0.0280, 0.0286], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 02:55:35,117 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:55:45,360 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4512, 4.0285, 2.4766, 3.5803, 1.3211, 3.5274, 3.5855, 3.8627], + device='cuda:3'), covar=tensor([0.0612, 0.0934, 0.1888, 0.0793, 0.3496, 0.1095, 0.0758, 0.0820], + device='cuda:3'), in_proj_covar=tensor([0.0338, 0.0301, 0.0350, 0.0277, 0.0347, 0.0302, 0.0255, 0.0294], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 02:55:52,230 INFO [train.py:903] (3/4) Epoch 4, batch 3000, loss[loss=0.3254, simple_loss=0.3637, pruned_loss=0.1436, over 19832.00 frames. ], tot_loss[loss=0.3044, simple_loss=0.3587, pruned_loss=0.1251, over 3821538.32 frames. ], batch size: 52, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:55:52,230 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 02:56:05,134 INFO [train.py:937] (3/4) Epoch 4, validation: loss=0.2145, simple_loss=0.3118, pruned_loss=0.05862, over 944034.00 frames. +2023-04-01 02:56:05,135 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 02:56:09,824 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 02:56:32,408 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,227 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23527.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:56:56,888 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+02 6.234e+02 7.977e+02 1.046e+03 2.333e+03, threshold=1.595e+03, percent-clipped=2.0 +2023-04-01 02:57:05,045 INFO [train.py:903] (3/4) Epoch 4, batch 3050, loss[loss=0.2301, simple_loss=0.293, pruned_loss=0.08365, over 19388.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3572, pruned_loss=0.1241, over 3829272.95 frames. ], batch size: 48, lr: 1.99e-02, grad_scale: 4.0 +2023-04-01 02:57:26,952 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23552.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:33,702 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:57:58,299 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:04,099 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:06,131 INFO [train.py:903] (3/4) Epoch 4, batch 3100, loss[loss=0.2917, simple_loss=0.3489, pruned_loss=0.1173, over 19577.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3576, pruned_loss=0.1247, over 3815884.24 frames. ], batch size: 52, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:58:06,491 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:13,283 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 02:58:55,944 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.915e+02 8.546e+02 1.092e+03 2.878e+03, threshold=1.709e+03, percent-clipped=7.0 +2023-04-01 02:59:01,876 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8444, 4.3996, 2.6657, 3.9773, 1.2317, 4.1592, 4.0264, 4.1937], + device='cuda:3'), covar=tensor([0.0523, 0.1016, 0.1963, 0.0686, 0.4039, 0.0888, 0.0714, 0.0774], + device='cuda:3'), in_proj_covar=tensor([0.0337, 0.0297, 0.0346, 0.0276, 0.0344, 0.0294, 0.0257, 0.0291], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 02:59:03,926 INFO [train.py:903] (3/4) Epoch 4, batch 3150, loss[loss=0.2641, simple_loss=0.3277, pruned_loss=0.1002, over 19781.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.3578, pruned_loss=0.1247, over 3828367.44 frames. ], batch size: 47, lr: 1.98e-02, grad_scale: 4.0 +2023-04-01 02:59:28,014 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 03:00:02,540 INFO [train.py:903] (3/4) Epoch 4, batch 3200, loss[loss=0.2423, simple_loss=0.3036, pruned_loss=0.09051, over 19776.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3585, pruned_loss=0.1254, over 3810065.15 frames. ], batch size: 46, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:00:13,229 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23694.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:29,411 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:00:44,988 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4595, 1.1501, 1.0413, 1.3078, 1.2387, 1.2822, 1.1052, 1.2334], + device='cuda:3'), covar=tensor([0.0911, 0.1153, 0.1270, 0.0828, 0.0841, 0.0528, 0.0987, 0.0721], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0376, 0.0279, 0.0246, 0.0308, 0.0259, 0.0270, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:00:53,667 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+02 7.379e+02 9.197e+02 1.143e+03 1.957e+03, threshold=1.839e+03, percent-clipped=5.0 +2023-04-01 03:01:02,112 INFO [train.py:903] (3/4) Epoch 4, batch 3250, loss[loss=0.2651, simple_loss=0.3406, pruned_loss=0.09482, over 19689.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3595, pruned_loss=0.126, over 3795494.75 frames. ], batch size: 59, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:01:51,726 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5120, 1.2486, 1.9129, 1.4813, 2.7216, 4.4415, 4.4156, 4.8673], + device='cuda:3'), covar=tensor([0.1408, 0.2810, 0.2704, 0.1801, 0.0483, 0.0077, 0.0124, 0.0063], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0278, 0.0318, 0.0258, 0.0188, 0.0105, 0.0201, 0.0125], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 03:02:01,902 INFO [train.py:903] (3/4) Epoch 4, batch 3300, loss[loss=0.2521, simple_loss=0.3176, pruned_loss=0.09335, over 19359.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3582, pruned_loss=0.1255, over 3802681.57 frames. ], batch size: 47, lr: 1.98e-02, grad_scale: 8.0 +2023-04-01 03:02:04,051 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 03:02:30,682 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8738, 4.2393, 4.6443, 4.4889, 1.4524, 4.1931, 3.6379, 4.1970], + device='cuda:3'), covar=tensor([0.0894, 0.0546, 0.0455, 0.0373, 0.4303, 0.0363, 0.0457, 0.0930], + device='cuda:3'), in_proj_covar=tensor([0.0449, 0.0394, 0.0537, 0.0419, 0.0539, 0.0304, 0.0347, 0.0500], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 03:02:54,105 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 7.772e+02 9.614e+02 1.210e+03 2.492e+03, threshold=1.923e+03, percent-clipped=5.0 +2023-04-01 03:03:02,121 INFO [train.py:903] (3/4) Epoch 4, batch 3350, loss[loss=0.3382, simple_loss=0.3829, pruned_loss=0.1467, over 19486.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3579, pruned_loss=0.1255, over 3809547.09 frames. ], batch size: 64, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:03:09,290 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:19,211 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:03:40,414 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:04:01,844 INFO [train.py:903] (3/4) Epoch 4, batch 3400, loss[loss=0.3817, simple_loss=0.4122, pruned_loss=0.1756, over 17557.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3562, pruned_loss=0.1242, over 3816026.91 frames. ], batch size: 101, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:04:51,950 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6352, 1.4357, 1.2128, 1.6800, 1.4022, 1.5425, 1.4391, 1.4192], + device='cuda:3'), covar=tensor([0.0850, 0.1260, 0.1244, 0.0712, 0.0948, 0.0481, 0.0865, 0.0702], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0372, 0.0281, 0.0245, 0.0313, 0.0259, 0.0272, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:04:53,667 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.081e+02 7.350e+02 9.318e+02 1.202e+03 2.145e+03, threshold=1.864e+03, percent-clipped=3.0 +2023-04-01 03:05:01,736 INFO [train.py:903] (3/4) Epoch 4, batch 3450, loss[loss=0.2684, simple_loss=0.3289, pruned_loss=0.104, over 19845.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3555, pruned_loss=0.1236, over 3814549.27 frames. ], batch size: 52, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:05:01,760 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 03:05:22,667 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23950.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:36,157 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:39,449 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:05:50,653 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:04,981 INFO [train.py:903] (3/4) Epoch 4, batch 3500, loss[loss=0.2361, simple_loss=0.3023, pruned_loss=0.08497, over 19293.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3568, pruned_loss=0.1248, over 3802875.96 frames. ], batch size: 44, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:06:07,801 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:06:58,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+02 7.196e+02 8.612e+02 1.120e+03 2.630e+03, threshold=1.722e+03, percent-clipped=5.0 +2023-04-01 03:07:06,277 INFO [train.py:903] (3/4) Epoch 4, batch 3550, loss[loss=0.262, simple_loss=0.3182, pruned_loss=0.1029, over 19740.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.355, pruned_loss=0.1231, over 3820313.38 frames. ], batch size: 46, lr: 1.97e-02, grad_scale: 8.0 +2023-04-01 03:07:28,455 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:07:45,474 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1514, 5.4942, 3.0248, 4.9617, 1.7182, 5.4989, 5.3405, 5.5102], + device='cuda:3'), covar=tensor([0.0419, 0.0913, 0.1715, 0.0499, 0.3247, 0.0640, 0.0612, 0.0546], + device='cuda:3'), in_proj_covar=tensor([0.0333, 0.0297, 0.0345, 0.0280, 0.0350, 0.0299, 0.0263, 0.0295], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:07:57,066 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.41 vs. limit=5.0 +2023-04-01 03:08:05,263 INFO [train.py:903] (3/4) Epoch 4, batch 3600, loss[loss=0.3715, simple_loss=0.412, pruned_loss=0.1655, over 19284.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3568, pruned_loss=0.1242, over 3810966.37 frames. ], batch size: 66, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:08:56,916 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+02 7.148e+02 8.733e+02 1.077e+03 2.339e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 03:09:04,858 INFO [train.py:903] (3/4) Epoch 4, batch 3650, loss[loss=0.2346, simple_loss=0.3128, pruned_loss=0.0782, over 19583.00 frames. ], tot_loss[loss=0.3024, simple_loss=0.3566, pruned_loss=0.1241, over 3817114.97 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 8.0 +2023-04-01 03:09:34,931 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2852, 2.2763, 2.0995, 3.4663, 2.3497, 3.7158, 3.5521, 2.0571], + device='cuda:3'), covar=tensor([0.1688, 0.1210, 0.0677, 0.0770, 0.1454, 0.0351, 0.0910, 0.1024], + device='cuda:3'), in_proj_covar=tensor([0.0563, 0.0541, 0.0512, 0.0707, 0.0601, 0.0456, 0.0611, 0.0522], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:10:05,498 INFO [train.py:903] (3/4) Epoch 4, batch 3700, loss[loss=0.2961, simple_loss=0.3319, pruned_loss=0.1302, over 19764.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3583, pruned_loss=0.1254, over 3816880.50 frames. ], batch size: 45, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:10:16,642 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2177, 1.2556, 1.8326, 1.3609, 2.6091, 2.1830, 2.7232, 1.0085], + device='cuda:3'), covar=tensor([0.1515, 0.2367, 0.1256, 0.1270, 0.0916, 0.1143, 0.1046, 0.2221], + device='cuda:3'), in_proj_covar=tensor([0.0434, 0.0481, 0.0451, 0.0398, 0.0525, 0.0423, 0.0602, 0.0434], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:10:38,062 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9403, 1.9232, 1.7720, 2.8645, 1.9235, 2.7304, 2.5885, 1.7883], + device='cuda:3'), covar=tensor([0.1525, 0.1151, 0.0702, 0.0652, 0.1352, 0.0427, 0.1048, 0.1069], + device='cuda:3'), in_proj_covar=tensor([0.0567, 0.0536, 0.0512, 0.0700, 0.0601, 0.0457, 0.0610, 0.0521], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:10:48,459 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:10:58,950 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.207e+02 9.640e+02 1.134e+03 2.323e+03, threshold=1.928e+03, percent-clipped=6.0 +2023-04-01 03:11:07,277 INFO [train.py:903] (3/4) Epoch 4, batch 3750, loss[loss=0.3177, simple_loss=0.3734, pruned_loss=0.1311, over 19789.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3583, pruned_loss=0.1256, over 3816205.74 frames. ], batch size: 56, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:11:20,277 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:12:07,328 INFO [train.py:903] (3/4) Epoch 4, batch 3800, loss[loss=0.2836, simple_loss=0.3552, pruned_loss=0.106, over 19603.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3575, pruned_loss=0.1251, over 3818088.15 frames. ], batch size: 57, lr: 1.96e-02, grad_scale: 4.0 +2023-04-01 03:12:38,499 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 03:12:47,446 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2607, 1.1973, 1.5514, 1.2562, 2.1041, 1.9485, 2.2193, 0.8886], + device='cuda:3'), covar=tensor([0.1570, 0.2604, 0.1426, 0.1421, 0.1058, 0.1324, 0.1073, 0.2335], + device='cuda:3'), in_proj_covar=tensor([0.0433, 0.0484, 0.0454, 0.0401, 0.0525, 0.0426, 0.0608, 0.0435], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:12:53,008 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.73 vs. limit=5.0 +2023-04-01 03:13:00,201 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.239e+02 7.507e+02 9.076e+02 1.248e+03 3.254e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-04-01 03:13:07,208 INFO [train.py:903] (3/4) Epoch 4, batch 3850, loss[loss=0.2486, simple_loss=0.3261, pruned_loss=0.08556, over 19662.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3568, pruned_loss=0.1242, over 3816102.80 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:13:38,878 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6728, 1.4275, 1.4702, 1.9232, 1.7215, 1.7196, 1.5103, 1.7159], + device='cuda:3'), covar=tensor([0.0849, 0.1411, 0.1176, 0.0758, 0.0957, 0.0427, 0.0882, 0.0606], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0373, 0.0282, 0.0249, 0.0309, 0.0254, 0.0272, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:14:06,605 INFO [train.py:903] (3/4) Epoch 4, batch 3900, loss[loss=0.2991, simple_loss=0.3609, pruned_loss=0.1187, over 19681.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3572, pruned_loss=0.1239, over 3824854.38 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:14:11,569 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-04-01 03:14:25,952 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:14:27,418 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1213, 1.1327, 1.4005, 1.2321, 1.6875, 1.7297, 1.8480, 0.4388], + device='cuda:3'), covar=tensor([0.1565, 0.2442, 0.1322, 0.1403, 0.1045, 0.1382, 0.1019, 0.2373], + device='cuda:3'), in_proj_covar=tensor([0.0439, 0.0487, 0.0460, 0.0407, 0.0531, 0.0428, 0.0617, 0.0437], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:15:00,806 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+02 8.238e+02 9.729e+02 1.230e+03 4.971e+03, threshold=1.946e+03, percent-clipped=9.0 +2023-04-01 03:15:09,421 INFO [train.py:903] (3/4) Epoch 4, batch 3950, loss[loss=0.2707, simple_loss=0.3315, pruned_loss=0.1049, over 19750.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3568, pruned_loss=0.1236, over 3835884.19 frames. ], batch size: 51, lr: 1.95e-02, grad_scale: 4.0 +2023-04-01 03:15:17,175 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 03:16:10,710 INFO [train.py:903] (3/4) Epoch 4, batch 4000, loss[loss=0.2929, simple_loss=0.3493, pruned_loss=0.1183, over 19576.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3571, pruned_loss=0.1234, over 3837695.12 frames. ], batch size: 52, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:16:45,854 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24514.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:16:58,859 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 03:17:03,419 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.288e+02 6.850e+02 8.525e+02 1.041e+03 2.187e+03, threshold=1.705e+03, percent-clipped=1.0 +2023-04-01 03:17:08,329 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.7039, 0.9048, 0.6919, 0.6885, 0.8466, 0.5338, 0.5053, 0.8500], + device='cuda:3'), covar=tensor([0.0319, 0.0362, 0.0602, 0.0304, 0.0284, 0.0715, 0.0429, 0.0291], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0249, 0.0312, 0.0241, 0.0217, 0.0308, 0.0278, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:17:09,918 INFO [train.py:903] (3/4) Epoch 4, batch 4050, loss[loss=0.2806, simple_loss=0.3343, pruned_loss=0.1135, over 19383.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3566, pruned_loss=0.1235, over 3826264.06 frames. ], batch size: 48, lr: 1.95e-02, grad_scale: 8.0 +2023-04-01 03:17:47,527 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24565.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:17:47,602 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5544, 1.4370, 1.7169, 1.6776, 3.2019, 4.4343, 4.5057, 4.8537], + device='cuda:3'), covar=tensor([0.1326, 0.2636, 0.2749, 0.1603, 0.0352, 0.0119, 0.0131, 0.0054], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0283, 0.0325, 0.0260, 0.0192, 0.0110, 0.0205, 0.0126], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 03:18:10,059 INFO [train.py:903] (3/4) Epoch 4, batch 4100, loss[loss=0.3281, simple_loss=0.3844, pruned_loss=0.1359, over 19513.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3566, pruned_loss=0.1231, over 3822502.68 frames. ], batch size: 70, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:18:12,590 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:18:49,628 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 03:19:04,404 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+02 6.813e+02 8.989e+02 1.047e+03 2.179e+03, threshold=1.798e+03, percent-clipped=2.0 +2023-04-01 03:19:12,697 INFO [train.py:903] (3/4) Epoch 4, batch 4150, loss[loss=0.3519, simple_loss=0.4026, pruned_loss=0.1506, over 19683.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3536, pruned_loss=0.1212, over 3832170.56 frames. ], batch size: 59, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:19:19,920 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9611, 1.9764, 1.5270, 1.4927, 1.3547, 1.4887, 0.3673, 0.8930], + device='cuda:3'), covar=tensor([0.0213, 0.0195, 0.0163, 0.0215, 0.0453, 0.0266, 0.0431, 0.0389], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0270, 0.0263, 0.0288, 0.0354, 0.0277, 0.0271, 0.0282], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:20:13,433 INFO [train.py:903] (3/4) Epoch 4, batch 4200, loss[loss=0.2929, simple_loss=0.3519, pruned_loss=0.117, over 19768.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3538, pruned_loss=0.1211, over 3831094.04 frames. ], batch size: 63, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:20:19,920 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 03:20:31,296 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8543, 1.8452, 1.7785, 2.7998, 1.6220, 2.3803, 2.3866, 1.7054], + device='cuda:3'), covar=tensor([0.1478, 0.1108, 0.0672, 0.0622, 0.1465, 0.0494, 0.1118, 0.1110], + device='cuda:3'), in_proj_covar=tensor([0.0566, 0.0540, 0.0514, 0.0707, 0.0607, 0.0462, 0.0615, 0.0527], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:20:33,582 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2585, 2.2383, 1.5684, 1.6996, 2.0919, 0.9791, 1.0392, 1.6468], + device='cuda:3'), covar=tensor([0.0783, 0.0459, 0.0860, 0.0415, 0.0412, 0.1066, 0.0670, 0.0387], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0258, 0.0321, 0.0249, 0.0219, 0.0315, 0.0286, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:21:05,885 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 7.220e+02 8.850e+02 1.090e+03 2.101e+03, threshold=1.770e+03, percent-clipped=3.0 +2023-04-01 03:21:12,814 INFO [train.py:903] (3/4) Epoch 4, batch 4250, loss[loss=0.2928, simple_loss=0.3459, pruned_loss=0.1198, over 19616.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3546, pruned_loss=0.1217, over 3814275.75 frames. ], batch size: 50, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:21:29,767 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 03:21:41,556 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 03:21:56,492 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:10,842 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24783.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:12,642 INFO [train.py:903] (3/4) Epoch 4, batch 4300, loss[loss=0.3007, simple_loss=0.3658, pruned_loss=0.1178, over 19658.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3546, pruned_loss=0.1215, over 3836278.37 frames. ], batch size: 60, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:22:26,945 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:22:31,625 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1432, 2.6841, 1.8862, 2.2823, 1.8777, 2.0723, 0.6980, 2.0484], + device='cuda:3'), covar=tensor([0.0245, 0.0227, 0.0247, 0.0371, 0.0464, 0.0429, 0.0576, 0.0438], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0272, 0.0264, 0.0288, 0.0353, 0.0278, 0.0269, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:22:54,057 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4107, 2.3576, 2.1135, 3.6933, 2.3278, 3.8404, 3.3756, 2.0582], + device='cuda:3'), covar=tensor([0.1673, 0.1203, 0.0684, 0.0724, 0.1527, 0.0328, 0.1031, 0.1079], + device='cuda:3'), in_proj_covar=tensor([0.0568, 0.0544, 0.0512, 0.0706, 0.0605, 0.0458, 0.0618, 0.0525], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:23:06,670 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.009e+02 7.093e+02 8.869e+02 1.163e+03 2.104e+03, threshold=1.774e+03, percent-clipped=1.0 +2023-04-01 03:23:08,952 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 03:23:14,538 INFO [train.py:903] (3/4) Epoch 4, batch 4350, loss[loss=0.2651, simple_loss=0.3231, pruned_loss=0.1035, over 19727.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3538, pruned_loss=0.1212, over 3831375.45 frames. ], batch size: 45, lr: 1.94e-02, grad_scale: 8.0 +2023-04-01 03:23:21,138 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0951, 1.0823, 1.3486, 1.2057, 1.6931, 1.7061, 1.9149, 0.4450], + device='cuda:3'), covar=tensor([0.1691, 0.2748, 0.1470, 0.1466, 0.1115, 0.1477, 0.1042, 0.2641], + device='cuda:3'), in_proj_covar=tensor([0.0433, 0.0481, 0.0452, 0.0405, 0.0525, 0.0421, 0.0603, 0.0430], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:23:52,227 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:15,086 INFO [train.py:903] (3/4) Epoch 4, batch 4400, loss[loss=0.2731, simple_loss=0.3327, pruned_loss=0.1068, over 19384.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3546, pruned_loss=0.1216, over 3806477.08 frames. ], batch size: 48, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:24:21,307 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 03:24:40,901 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 03:24:44,304 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:24:50,761 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 03:25:09,651 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.331e+02 8.078e+02 9.889e+02 1.280e+03 3.768e+03, threshold=1.978e+03, percent-clipped=10.0 +2023-04-01 03:25:12,026 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:25:16,506 INFO [train.py:903] (3/4) Epoch 4, batch 4450, loss[loss=0.2636, simple_loss=0.334, pruned_loss=0.09656, over 19775.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3546, pruned_loss=0.122, over 3806643.90 frames. ], batch size: 54, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:17,094 INFO [train.py:903] (3/4) Epoch 4, batch 4500, loss[loss=0.2536, simple_loss=0.3225, pruned_loss=0.09241, over 19593.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3538, pruned_loss=0.1211, over 3814941.64 frames. ], batch size: 50, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:26:21,826 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0472, 1.9995, 1.8970, 2.8723, 2.0511, 2.8043, 2.5514, 1.8795], + device='cuda:3'), covar=tensor([0.1420, 0.1126, 0.0677, 0.0718, 0.1278, 0.0435, 0.1095, 0.1059], + device='cuda:3'), in_proj_covar=tensor([0.0573, 0.0552, 0.0518, 0.0716, 0.0608, 0.0462, 0.0625, 0.0533], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:26:52,456 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:04,658 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:11,028 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.968e+02 6.473e+02 7.865e+02 1.057e+03 2.211e+03, threshold=1.573e+03, percent-clipped=1.0 +2023-04-01 03:27:18,537 INFO [train.py:903] (3/4) Epoch 4, batch 4550, loss[loss=0.2527, simple_loss=0.3117, pruned_loss=0.09682, over 18635.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3543, pruned_loss=0.1218, over 3812666.55 frames. ], batch size: 41, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:27:27,108 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 03:27:31,925 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:27:50,568 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 03:27:54,650 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 03:28:18,970 INFO [train.py:903] (3/4) Epoch 4, batch 4600, loss[loss=0.2816, simple_loss=0.3339, pruned_loss=0.1147, over 19414.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3545, pruned_loss=0.1219, over 3809579.02 frames. ], batch size: 48, lr: 1.93e-02, grad_scale: 8.0 +2023-04-01 03:29:10,701 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:29:12,893 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+02 7.418e+02 9.211e+02 1.176e+03 2.853e+03, threshold=1.842e+03, percent-clipped=7.0 +2023-04-01 03:29:20,334 INFO [train.py:903] (3/4) Epoch 4, batch 4650, loss[loss=0.3158, simple_loss=0.3664, pruned_loss=0.1326, over 19772.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.355, pruned_loss=0.1226, over 3815759.28 frames. ], batch size: 54, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:29:37,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 03:29:46,700 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 03:30:19,358 INFO [train.py:903] (3/4) Epoch 4, batch 4700, loss[loss=0.3913, simple_loss=0.425, pruned_loss=0.1788, over 19631.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3552, pruned_loss=0.1231, over 3823825.88 frames. ], batch size: 61, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:30:42,828 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 03:30:47,461 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7536, 4.1339, 4.4268, 4.3959, 1.4303, 3.8488, 3.5023, 4.0085], + device='cuda:3'), covar=tensor([0.0964, 0.0581, 0.0521, 0.0411, 0.4100, 0.0380, 0.0521, 0.1017], + device='cuda:3'), in_proj_covar=tensor([0.0456, 0.0397, 0.0533, 0.0423, 0.0528, 0.0307, 0.0342, 0.0502], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 03:30:50,633 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:13,782 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+02 7.587e+02 9.394e+02 1.259e+03 3.233e+03, threshold=1.879e+03, percent-clipped=11.0 +2023-04-01 03:31:21,436 INFO [train.py:903] (3/4) Epoch 4, batch 4750, loss[loss=0.2938, simple_loss=0.3481, pruned_loss=0.1198, over 19762.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3557, pruned_loss=0.1234, over 3827811.71 frames. ], batch size: 54, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:31:24,252 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2023, 1.3120, 1.8768, 1.5009, 2.5644, 2.2283, 2.7604, 0.9556], + device='cuda:3'), covar=tensor([0.1690, 0.2655, 0.1450, 0.1309, 0.1165, 0.1262, 0.1247, 0.2458], + device='cuda:3'), in_proj_covar=tensor([0.0437, 0.0480, 0.0454, 0.0404, 0.0529, 0.0426, 0.0604, 0.0427], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:31:30,726 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25242.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:31:39,441 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6954, 1.2196, 1.2329, 1.6059, 1.4208, 1.5253, 1.3821, 1.4859], + device='cuda:3'), covar=tensor([0.0825, 0.1294, 0.1228, 0.0843, 0.1004, 0.0485, 0.0867, 0.0644], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0376, 0.0284, 0.0250, 0.0312, 0.0262, 0.0275, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:31:44,150 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 03:32:16,226 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:21,434 INFO [train.py:903] (3/4) Epoch 4, batch 4800, loss[loss=0.3215, simple_loss=0.3727, pruned_loss=0.1352, over 13418.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3541, pruned_loss=0.1222, over 3812809.87 frames. ], batch size: 136, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:32:27,815 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 03:32:41,282 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:32:44,310 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:09,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:11,914 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:33:13,791 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+02 7.095e+02 8.715e+02 1.261e+03 2.828e+03, threshold=1.743e+03, percent-clipped=4.0 +2023-04-01 03:33:21,567 INFO [train.py:903] (3/4) Epoch 4, batch 4850, loss[loss=0.2577, simple_loss=0.3194, pruned_loss=0.09797, over 19843.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3535, pruned_loss=0.1218, over 3819278.80 frames. ], batch size: 52, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:33:28,356 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3716, 2.2370, 1.6579, 1.6998, 1.6483, 1.7068, 0.3467, 1.2135], + device='cuda:3'), covar=tensor([0.0235, 0.0229, 0.0202, 0.0290, 0.0498, 0.0327, 0.0545, 0.0416], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0271, 0.0266, 0.0289, 0.0355, 0.0285, 0.0272, 0.0280], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:33:45,829 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 03:33:48,283 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:34:04,854 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 03:34:11,327 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 03:34:12,496 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 03:34:21,579 INFO [train.py:903] (3/4) Epoch 4, batch 4900, loss[loss=0.3273, simple_loss=0.3757, pruned_loss=0.1395, over 18273.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3535, pruned_loss=0.1217, over 3821983.42 frames. ], batch size: 84, lr: 1.92e-02, grad_scale: 8.0 +2023-04-01 03:34:21,587 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 03:34:34,900 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8135, 1.2960, 2.1410, 1.6756, 2.9250, 4.4755, 4.5352, 4.9119], + device='cuda:3'), covar=tensor([0.1168, 0.2631, 0.2293, 0.1578, 0.0416, 0.0117, 0.0116, 0.0058], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0278, 0.0317, 0.0257, 0.0195, 0.0113, 0.0201, 0.0126], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 03:34:41,705 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 03:34:43,939 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4731, 4.0384, 2.3502, 3.6294, 0.9874, 3.5610, 3.6122, 3.8409], + device='cuda:3'), covar=tensor([0.0582, 0.1162, 0.2110, 0.0704, 0.3956, 0.1043, 0.0723, 0.0875], + device='cuda:3'), in_proj_covar=tensor([0.0334, 0.0294, 0.0354, 0.0278, 0.0349, 0.0301, 0.0269, 0.0300], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:35:16,121 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.171e+02 7.094e+02 8.660e+02 1.069e+03 1.655e+03, threshold=1.732e+03, percent-clipped=0.0 +2023-04-01 03:35:23,611 INFO [train.py:903] (3/4) Epoch 4, batch 4950, loss[loss=0.2675, simple_loss=0.3264, pruned_loss=0.1043, over 19612.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3546, pruned_loss=0.1217, over 3815342.88 frames. ], batch size: 50, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:35:37,195 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25446.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:35:41,246 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 03:36:04,441 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 03:36:04,788 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7982, 1.3223, 1.2661, 2.0964, 1.6521, 1.8926, 2.0549, 1.7320], + device='cuda:3'), covar=tensor([0.0680, 0.0999, 0.1081, 0.0758, 0.0833, 0.0686, 0.0827, 0.0655], + device='cuda:3'), in_proj_covar=tensor([0.0244, 0.0262, 0.0254, 0.0288, 0.0286, 0.0244, 0.0257, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 03:36:09,084 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:24,197 INFO [train.py:903] (3/4) Epoch 4, batch 5000, loss[loss=0.3126, simple_loss=0.3451, pruned_loss=0.14, over 19778.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3557, pruned_loss=0.1224, over 3801132.07 frames. ], batch size: 48, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:36:33,161 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 03:36:40,138 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25498.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:36:44,425 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 03:37:11,000 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25523.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:37:17,559 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.823e+02 8.824e+02 1.078e+03 2.588e+03, threshold=1.765e+03, percent-clipped=9.0 +2023-04-01 03:37:24,344 INFO [train.py:903] (3/4) Epoch 4, batch 5050, loss[loss=0.3032, simple_loss=0.3674, pruned_loss=0.1195, over 19547.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3555, pruned_loss=0.1224, over 3809851.28 frames. ], batch size: 56, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:02,119 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 03:38:06,290 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-01 03:38:21,775 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:38:25,844 INFO [train.py:903] (3/4) Epoch 4, batch 5100, loss[loss=0.3394, simple_loss=0.3915, pruned_loss=0.1436, over 19073.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3543, pruned_loss=0.1216, over 3809714.35 frames. ], batch size: 69, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:38:37,692 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 03:38:40,928 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 03:38:44,278 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 03:38:52,402 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:39:19,438 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.903e+02 6.354e+02 8.688e+02 1.168e+03 2.387e+03, threshold=1.738e+03, percent-clipped=4.0 +2023-04-01 03:39:27,220 INFO [train.py:903] (3/4) Epoch 4, batch 5150, loss[loss=0.2419, simple_loss=0.3, pruned_loss=0.09194, over 19783.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3538, pruned_loss=0.1221, over 3802487.39 frames. ], batch size: 48, lr: 1.91e-02, grad_scale: 8.0 +2023-04-01 03:39:39,296 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 03:39:55,239 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8187, 1.8700, 1.5862, 2.8382, 2.0200, 2.7306, 2.3133, 1.2187], + device='cuda:3'), covar=tensor([0.2067, 0.1664, 0.1229, 0.0956, 0.1671, 0.0625, 0.1871, 0.2192], + device='cuda:3'), in_proj_covar=tensor([0.0567, 0.0549, 0.0515, 0.0708, 0.0613, 0.0469, 0.0622, 0.0525], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:39:58,724 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3077, 2.8767, 2.1141, 2.3169, 2.0246, 2.2768, 0.6485, 2.3965], + device='cuda:3'), covar=tensor([0.0240, 0.0244, 0.0233, 0.0315, 0.0442, 0.0338, 0.0535, 0.0359], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0274, 0.0265, 0.0295, 0.0355, 0.0283, 0.0274, 0.0280], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:39:59,816 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9473, 1.1998, 1.3522, 1.4357, 2.5333, 1.0216, 1.7939, 2.6008], + device='cuda:3'), covar=tensor([0.0481, 0.2444, 0.2243, 0.1472, 0.0604, 0.2097, 0.1101, 0.0460], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0308, 0.0306, 0.0280, 0.0304, 0.0316, 0.0285, 0.0291], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 03:40:13,785 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 03:40:28,111 INFO [train.py:903] (3/4) Epoch 4, batch 5200, loss[loss=0.267, simple_loss=0.3286, pruned_loss=0.1027, over 19678.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3538, pruned_loss=0.1216, over 3805857.45 frames. ], batch size: 53, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:40:29,757 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2143, 1.1819, 1.9398, 1.4485, 2.8978, 2.5333, 3.1455, 1.2491], + device='cuda:3'), covar=tensor([0.1976, 0.3019, 0.1587, 0.1492, 0.1252, 0.1434, 0.1535, 0.2846], + device='cuda:3'), in_proj_covar=tensor([0.0453, 0.0499, 0.0462, 0.0408, 0.0539, 0.0443, 0.0620, 0.0437], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:40:42,797 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 03:41:20,992 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:41:21,773 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 7.304e+02 9.145e+02 1.165e+03 2.884e+03, threshold=1.829e+03, percent-clipped=6.0 +2023-04-01 03:41:25,403 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 03:41:28,710 INFO [train.py:903] (3/4) Epoch 4, batch 5250, loss[loss=0.3306, simple_loss=0.3784, pruned_loss=0.1414, over 19472.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.354, pruned_loss=0.1214, over 3815596.44 frames. ], batch size: 49, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:41:44,112 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 03:41:52,257 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:42:30,028 INFO [train.py:903] (3/4) Epoch 4, batch 5300, loss[loss=0.359, simple_loss=0.4001, pruned_loss=0.159, over 19728.00 frames. ], tot_loss[loss=0.2974, simple_loss=0.3533, pruned_loss=0.1207, over 3811048.99 frames. ], batch size: 63, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:42:36,786 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25790.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:42:48,922 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 03:43:23,328 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 7.857e+02 9.756e+02 1.201e+03 3.803e+03, threshold=1.951e+03, percent-clipped=8.0 +2023-04-01 03:43:31,862 INFO [train.py:903] (3/4) Epoch 4, batch 5350, loss[loss=0.3617, simple_loss=0.4106, pruned_loss=0.1565, over 19646.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3553, pruned_loss=0.1227, over 3816268.65 frames. ], batch size: 60, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:44:04,176 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 03:44:20,736 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0245, 4.9108, 5.8592, 5.7113, 1.8610, 5.4013, 4.7567, 5.2407], + device='cuda:3'), covar=tensor([0.0746, 0.0516, 0.0366, 0.0307, 0.3562, 0.0216, 0.0365, 0.0824], + device='cuda:3'), in_proj_covar=tensor([0.0461, 0.0404, 0.0540, 0.0438, 0.0533, 0.0317, 0.0359, 0.0514], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 03:44:21,925 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25876.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:44:32,191 INFO [train.py:903] (3/4) Epoch 4, batch 5400, loss[loss=0.2578, simple_loss=0.333, pruned_loss=0.09128, over 19691.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3562, pruned_loss=0.1234, over 3817803.99 frames. ], batch size: 59, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:44:56,679 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:45:26,555 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.180e+02 6.939e+02 8.636e+02 1.056e+03 2.577e+03, threshold=1.727e+03, percent-clipped=2.0 +2023-04-01 03:45:33,338 INFO [train.py:903] (3/4) Epoch 4, batch 5450, loss[loss=0.3451, simple_loss=0.3936, pruned_loss=0.1483, over 19682.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.355, pruned_loss=0.1226, over 3815171.73 frames. ], batch size: 59, lr: 1.90e-02, grad_scale: 8.0 +2023-04-01 03:46:34,635 INFO [train.py:903] (3/4) Epoch 4, batch 5500, loss[loss=0.2991, simple_loss=0.3618, pruned_loss=0.1182, over 19475.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3548, pruned_loss=0.1221, over 3815880.12 frames. ], batch size: 64, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:46:58,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 03:47:31,760 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+02 7.568e+02 9.013e+02 1.115e+03 1.816e+03, threshold=1.803e+03, percent-clipped=1.0 +2023-04-01 03:47:37,485 INFO [train.py:903] (3/4) Epoch 4, batch 5550, loss[loss=0.287, simple_loss=0.3384, pruned_loss=0.1178, over 19479.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3538, pruned_loss=0.1213, over 3815811.37 frames. ], batch size: 49, lr: 1.89e-02, grad_scale: 4.0 +2023-04-01 03:47:45,468 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 03:48:33,397 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 03:48:38,931 INFO [train.py:903] (3/4) Epoch 4, batch 5600, loss[loss=0.3724, simple_loss=0.3979, pruned_loss=0.1735, over 13093.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3528, pruned_loss=0.1206, over 3812410.19 frames. ], batch size: 135, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:49:22,555 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:49:34,176 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+02 7.669e+02 9.359e+02 1.114e+03 3.409e+03, threshold=1.872e+03, percent-clipped=3.0 +2023-04-01 03:49:40,070 INFO [train.py:903] (3/4) Epoch 4, batch 5650, loss[loss=0.2432, simple_loss=0.3055, pruned_loss=0.09043, over 19499.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.352, pruned_loss=0.1203, over 3823185.63 frames. ], batch size: 49, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:12,585 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26161.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:50:25,378 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 03:50:40,052 INFO [train.py:903] (3/4) Epoch 4, batch 5700, loss[loss=0.3051, simple_loss=0.3586, pruned_loss=0.1257, over 19671.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.352, pruned_loss=0.1201, over 3824993.32 frames. ], batch size: 55, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:50:42,138 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:50:42,238 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26186.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:51:23,043 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26220.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:51:35,334 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.235e+02 7.760e+02 9.507e+02 1.157e+03 2.773e+03, threshold=1.901e+03, percent-clipped=5.0 +2023-04-01 03:51:39,943 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 03:51:40,952 INFO [train.py:903] (3/4) Epoch 4, batch 5750, loss[loss=0.2954, simple_loss=0.3692, pruned_loss=0.1108, over 19089.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3526, pruned_loss=0.1207, over 3802027.16 frames. ], batch size: 69, lr: 1.89e-02, grad_scale: 8.0 +2023-04-01 03:51:48,676 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 03:51:52,962 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 03:52:21,192 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:52:40,293 INFO [train.py:903] (3/4) Epoch 4, batch 5800, loss[loss=0.3028, simple_loss=0.3646, pruned_loss=0.1205, over 19680.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3535, pruned_loss=0.1216, over 3792716.26 frames. ], batch size: 59, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:53:36,643 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.787e+02 8.542e+02 1.114e+03 2.576e+03, threshold=1.708e+03, percent-clipped=4.0 +2023-04-01 03:53:41,252 INFO [train.py:903] (3/4) Epoch 4, batch 5850, loss[loss=0.2672, simple_loss=0.3416, pruned_loss=0.09639, over 19734.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3541, pruned_loss=0.1219, over 3796234.05 frames. ], batch size: 63, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:53:41,613 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26335.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:54:40,815 INFO [train.py:903] (3/4) Epoch 4, batch 5900, loss[loss=0.3462, simple_loss=0.3847, pruned_loss=0.1538, over 13361.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.353, pruned_loss=0.1213, over 3802696.44 frames. ], batch size: 136, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:54:41,863 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 03:55:03,848 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 03:55:37,699 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+02 6.673e+02 8.574e+02 1.115e+03 3.080e+03, threshold=1.715e+03, percent-clipped=4.0 +2023-04-01 03:55:39,780 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26432.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:55:43,001 INFO [train.py:903] (3/4) Epoch 4, batch 5950, loss[loss=0.3308, simple_loss=0.3791, pruned_loss=0.1412, over 19355.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3542, pruned_loss=0.122, over 3793599.04 frames. ], batch size: 66, lr: 1.88e-02, grad_scale: 4.0 +2023-04-01 03:55:44,118 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2485, 3.6428, 3.9735, 4.0988, 1.4732, 3.7310, 3.3268, 3.1740], + device='cuda:3'), covar=tensor([0.1833, 0.1313, 0.0987, 0.0865, 0.5306, 0.0736, 0.0850, 0.2058], + device='cuda:3'), in_proj_covar=tensor([0.0470, 0.0419, 0.0547, 0.0442, 0.0539, 0.0316, 0.0360, 0.0522], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 03:56:15,420 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4313, 2.3929, 1.8728, 2.0287, 1.7854, 2.1484, 1.1569, 2.0423], + device='cuda:3'), covar=tensor([0.0187, 0.0255, 0.0205, 0.0243, 0.0358, 0.0268, 0.0408, 0.0313], + device='cuda:3'), in_proj_covar=tensor([0.0271, 0.0272, 0.0264, 0.0294, 0.0351, 0.0277, 0.0268, 0.0282], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 03:56:17,511 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:56:43,852 INFO [train.py:903] (3/4) Epoch 4, batch 6000, loss[loss=0.2413, simple_loss=0.3003, pruned_loss=0.09113, over 19107.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3538, pruned_loss=0.1216, over 3794097.01 frames. ], batch size: 42, lr: 1.88e-02, grad_scale: 8.0 +2023-04-01 03:56:43,852 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 03:56:57,348 INFO [train.py:937] (3/4) Epoch 4, validation: loss=0.2103, simple_loss=0.3081, pruned_loss=0.05622, over 944034.00 frames. +2023-04-01 03:56:57,349 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 03:57:52,005 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:57:52,999 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+02 7.085e+02 8.722e+02 1.145e+03 2.334e+03, threshold=1.744e+03, percent-clipped=4.0 +2023-04-01 03:57:57,465 INFO [train.py:903] (3/4) Epoch 4, batch 6050, loss[loss=0.3192, simple_loss=0.3702, pruned_loss=0.1341, over 19555.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3536, pruned_loss=0.1216, over 3800610.13 frames. ], batch size: 61, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:58:18,096 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1905, 2.0875, 1.8477, 3.1557, 2.0939, 3.5849, 3.0345, 1.9234], + device='cuda:3'), covar=tensor([0.1895, 0.1439, 0.0770, 0.1008, 0.1779, 0.0405, 0.1295, 0.1284], + device='cuda:3'), in_proj_covar=tensor([0.0588, 0.0571, 0.0531, 0.0730, 0.0630, 0.0489, 0.0637, 0.0542], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 03:58:49,976 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:58:57,312 INFO [train.py:903] (3/4) Epoch 4, batch 6100, loss[loss=0.2842, simple_loss=0.35, pruned_loss=0.1092, over 19759.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3527, pruned_loss=0.1207, over 3819683.98 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 03:59:04,787 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26591.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 03:59:29,876 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 03:59:34,622 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26616.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 03:59:51,598 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 6.768e+02 7.963e+02 1.032e+03 2.370e+03, threshold=1.593e+03, percent-clipped=5.0 +2023-04-01 03:59:56,257 INFO [train.py:903] (3/4) Epoch 4, batch 6150, loss[loss=0.3342, simple_loss=0.3894, pruned_loss=0.1395, over 19704.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3535, pruned_loss=0.1214, over 3811172.03 frames. ], batch size: 59, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:00:08,599 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:00:23,789 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 04:00:54,794 INFO [train.py:903] (3/4) Epoch 4, batch 6200, loss[loss=0.3103, simple_loss=0.3744, pruned_loss=0.123, over 19663.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3541, pruned_loss=0.1225, over 3806949.32 frames. ], batch size: 58, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:01:45,218 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26727.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:01:51,166 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+02 7.340e+02 8.907e+02 1.200e+03 2.630e+03, threshold=1.781e+03, percent-clipped=5.0 +2023-04-01 04:01:55,636 INFO [train.py:903] (3/4) Epoch 4, batch 6250, loss[loss=0.3175, simple_loss=0.3742, pruned_loss=0.1304, over 18713.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3547, pruned_loss=0.1226, over 3815130.42 frames. ], batch size: 74, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:02:10,574 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8162, 1.8881, 1.7552, 2.6565, 1.8987, 2.7218, 2.3952, 1.6597], + device='cuda:3'), covar=tensor([0.1661, 0.1300, 0.0816, 0.0778, 0.1430, 0.0460, 0.1448, 0.1394], + device='cuda:3'), in_proj_covar=tensor([0.0581, 0.0565, 0.0529, 0.0725, 0.0625, 0.0485, 0.0633, 0.0538], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:02:24,773 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 04:02:42,400 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8136, 4.0574, 4.4354, 4.4490, 1.7299, 4.0398, 3.7412, 3.9818], + device='cuda:3'), covar=tensor([0.0747, 0.0678, 0.0464, 0.0327, 0.3358, 0.0341, 0.0393, 0.0837], + device='cuda:3'), in_proj_covar=tensor([0.0463, 0.0410, 0.0554, 0.0441, 0.0536, 0.0316, 0.0366, 0.0519], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 04:02:44,567 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:02:49,364 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 04:02:55,313 INFO [train.py:903] (3/4) Epoch 4, batch 6300, loss[loss=0.3307, simple_loss=0.3675, pruned_loss=0.147, over 19853.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3551, pruned_loss=0.1226, over 3808652.19 frames. ], batch size: 52, lr: 1.87e-02, grad_scale: 8.0 +2023-04-01 04:03:49,870 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.898e+02 7.473e+02 9.358e+02 1.215e+03 3.413e+03, threshold=1.872e+03, percent-clipped=4.0 +2023-04-01 04:03:54,565 INFO [train.py:903] (3/4) Epoch 4, batch 6350, loss[loss=0.293, simple_loss=0.3633, pruned_loss=0.1113, over 19663.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3544, pruned_loss=0.1222, over 3807599.31 frames. ], batch size: 53, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:03:55,024 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:26,067 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:04:37,816 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 04:04:45,402 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0791, 3.1761, 3.4998, 3.4991, 1.9255, 3.1407, 3.0459, 3.1768], + device='cuda:3'), covar=tensor([0.0904, 0.1610, 0.0570, 0.0452, 0.2977, 0.0631, 0.0452, 0.0936], + device='cuda:3'), in_proj_covar=tensor([0.0468, 0.0416, 0.0559, 0.0442, 0.0540, 0.0319, 0.0367, 0.0524], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 04:04:55,432 INFO [train.py:903] (3/4) Epoch 4, batch 6400, loss[loss=0.2838, simple_loss=0.3508, pruned_loss=0.1084, over 19627.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3535, pruned_loss=0.1213, over 3808994.97 frames. ], batch size: 57, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:05:05,497 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26891.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:16,543 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:41,172 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26922.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:05:45,923 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:05:52,842 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+02 6.991e+02 8.678e+02 1.023e+03 2.915e+03, threshold=1.736e+03, percent-clipped=2.0 +2023-04-01 04:05:58,084 INFO [train.py:903] (3/4) Epoch 4, batch 6450, loss[loss=0.3003, simple_loss=0.36, pruned_loss=0.1203, over 19667.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3548, pruned_loss=0.1217, over 3809762.85 frames. ], batch size: 55, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:06:37,698 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:40,563 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 04:06:56,681 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:06:58,607 INFO [train.py:903] (3/4) Epoch 4, batch 6500, loss[loss=0.2665, simple_loss=0.3282, pruned_loss=0.1024, over 19469.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3556, pruned_loss=0.1222, over 3813783.85 frames. ], batch size: 49, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:07:03,008 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 04:07:25,287 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27008.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:07:53,581 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+02 7.867e+02 9.982e+02 1.245e+03 2.621e+03, threshold=1.996e+03, percent-clipped=6.0 +2023-04-01 04:07:55,893 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5868, 1.3577, 1.3695, 1.6316, 1.6101, 1.4121, 1.3365, 1.5578], + device='cuda:3'), covar=tensor([0.0629, 0.1071, 0.0995, 0.0624, 0.0758, 0.0417, 0.0841, 0.0532], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0369, 0.0279, 0.0240, 0.0305, 0.0254, 0.0272, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 04:07:57,612 INFO [train.py:903] (3/4) Epoch 4, batch 6550, loss[loss=0.2972, simple_loss=0.3501, pruned_loss=0.1221, over 19669.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3572, pruned_loss=0.1239, over 3817696.90 frames. ], batch size: 53, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:08:57,153 INFO [train.py:903] (3/4) Epoch 4, batch 6600, loss[loss=0.2618, simple_loss=0.336, pruned_loss=0.09376, over 19674.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3562, pruned_loss=0.1229, over 3825778.74 frames. ], batch size: 55, lr: 1.86e-02, grad_scale: 8.0 +2023-04-01 04:09:53,360 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+02 7.635e+02 9.605e+02 1.185e+03 2.942e+03, threshold=1.921e+03, percent-clipped=6.0 +2023-04-01 04:09:58,813 INFO [train.py:903] (3/4) Epoch 4, batch 6650, loss[loss=0.2978, simple_loss=0.3607, pruned_loss=0.1175, over 19663.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3553, pruned_loss=0.1223, over 3828827.81 frames. ], batch size: 55, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:10:03,557 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9787, 2.0004, 1.8424, 2.8186, 1.9768, 2.6627, 2.5277, 1.8158], + device='cuda:3'), covar=tensor([0.1619, 0.1285, 0.0711, 0.0751, 0.1425, 0.0508, 0.1254, 0.1178], + device='cuda:3'), in_proj_covar=tensor([0.0589, 0.0575, 0.0531, 0.0731, 0.0628, 0.0486, 0.0644, 0.0549], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:10:13,164 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:35,302 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:39,786 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.94 vs. limit=5.0 +2023-04-01 04:10:43,566 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:10:58,823 INFO [train.py:903] (3/4) Epoch 4, batch 6700, loss[loss=0.3074, simple_loss=0.362, pruned_loss=0.1264, over 19663.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.355, pruned_loss=0.1224, over 3834563.63 frames. ], batch size: 58, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:11:52,642 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.097e+02 7.210e+02 9.176e+02 1.266e+03 4.477e+03, threshold=1.835e+03, percent-clipped=7.0 +2023-04-01 04:11:57,028 INFO [train.py:903] (3/4) Epoch 4, batch 6750, loss[loss=0.3078, simple_loss=0.3684, pruned_loss=0.1236, over 19660.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3546, pruned_loss=0.1219, over 3829850.46 frames. ], batch size: 55, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:12:31,970 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27266.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:12:50,003 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7687, 3.1368, 3.2574, 3.2081, 1.0667, 2.9702, 2.7095, 2.9091], + device='cuda:3'), covar=tensor([0.1049, 0.0644, 0.0665, 0.0582, 0.3568, 0.0451, 0.0566, 0.1271], + device='cuda:3'), in_proj_covar=tensor([0.0472, 0.0416, 0.0562, 0.0445, 0.0543, 0.0321, 0.0367, 0.0525], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 04:12:52,218 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6976, 1.3088, 1.1684, 1.6083, 1.4312, 1.3623, 1.2582, 1.5430], + device='cuda:3'), covar=tensor([0.0759, 0.1272, 0.1288, 0.0809, 0.0981, 0.0591, 0.0997, 0.0648], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0365, 0.0277, 0.0240, 0.0301, 0.0253, 0.0267, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 04:12:52,952 INFO [train.py:903] (3/4) Epoch 4, batch 6800, loss[loss=0.3252, simple_loss=0.3744, pruned_loss=0.138, over 19687.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3543, pruned_loss=0.1223, over 3826985.02 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 8.0 +2023-04-01 04:13:36,970 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 04:13:37,407 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 04:13:40,505 INFO [train.py:903] (3/4) Epoch 5, batch 0, loss[loss=0.3108, simple_loss=0.3551, pruned_loss=0.1333, over 19505.00 frames. ], tot_loss[loss=0.3108, simple_loss=0.3551, pruned_loss=0.1333, over 19505.00 frames. ], batch size: 49, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:13:40,505 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 04:13:52,272 INFO [train.py:937] (3/4) Epoch 5, validation: loss=0.2121, simple_loss=0.3102, pruned_loss=0.05704, over 944034.00 frames. +2023-04-01 04:13:52,273 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 04:13:52,410 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:14:04,621 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 04:14:16,053 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.218e+02 7.861e+02 9.859e+02 1.236e+03 2.711e+03, threshold=1.972e+03, percent-clipped=3.0 +2023-04-01 04:14:52,468 INFO [train.py:903] (3/4) Epoch 5, batch 50, loss[loss=0.2716, simple_loss=0.3346, pruned_loss=0.1043, over 19539.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3574, pruned_loss=0.1236, over 864619.01 frames. ], batch size: 54, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:15:15,069 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27381.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:15:26,079 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 04:15:53,859 INFO [train.py:903] (3/4) Epoch 5, batch 100, loss[loss=0.2454, simple_loss=0.3128, pruned_loss=0.08898, over 19807.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3547, pruned_loss=0.1196, over 1524617.15 frames. ], batch size: 49, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:16:05,289 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 04:16:11,155 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:16:15,198 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.953e+02 8.679e+02 1.081e+03 2.199e+03, threshold=1.736e+03, percent-clipped=1.0 +2023-04-01 04:16:53,739 INFO [train.py:903] (3/4) Epoch 5, batch 150, loss[loss=0.3243, simple_loss=0.3759, pruned_loss=0.1363, over 19316.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.354, pruned_loss=0.1198, over 2040066.00 frames. ], batch size: 66, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:52,401 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:17:54,451 INFO [train.py:903] (3/4) Epoch 5, batch 200, loss[loss=0.3072, simple_loss=0.3604, pruned_loss=0.127, over 19728.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3509, pruned_loss=0.1179, over 2425014.69 frames. ], batch size: 51, lr: 1.72e-02, grad_scale: 8.0 +2023-04-01 04:17:54,461 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 04:18:19,379 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+02 6.870e+02 8.382e+02 1.064e+03 2.606e+03, threshold=1.676e+03, percent-clipped=3.0 +2023-04-01 04:18:56,504 INFO [train.py:903] (3/4) Epoch 5, batch 250, loss[loss=0.3033, simple_loss=0.3626, pruned_loss=0.122, over 19532.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3483, pruned_loss=0.1159, over 2744980.59 frames. ], batch size: 56, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:19:58,942 INFO [train.py:903] (3/4) Epoch 5, batch 300, loss[loss=0.2813, simple_loss=0.3399, pruned_loss=0.1113, over 19664.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.3492, pruned_loss=0.1167, over 2989220.27 frames. ], batch size: 55, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:20:15,422 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:20:22,960 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+02 6.581e+02 8.607e+02 1.103e+03 1.922e+03, threshold=1.721e+03, percent-clipped=6.0 +2023-04-01 04:20:28,921 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27637.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:21:01,017 INFO [train.py:903] (3/4) Epoch 5, batch 350, loss[loss=0.354, simple_loss=0.4054, pruned_loss=0.1513, over 18796.00 frames. ], tot_loss[loss=0.2927, simple_loss=0.3497, pruned_loss=0.1179, over 3175625.83 frames. ], batch size: 74, lr: 1.71e-02, grad_scale: 4.0 +2023-04-01 04:21:01,426 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27662.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:21:07,171 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 04:21:26,428 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:21:58,795 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:22:02,939 INFO [train.py:903] (3/4) Epoch 5, batch 400, loss[loss=0.2491, simple_loss=0.3293, pruned_loss=0.08441, over 19566.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3488, pruned_loss=0.1171, over 3327882.47 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:22:26,130 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1282, 2.1926, 1.9089, 3.2084, 2.1578, 3.5238, 2.9403, 2.0781], + device='cuda:3'), covar=tensor([0.2028, 0.1577, 0.0811, 0.1024, 0.1951, 0.0474, 0.1430, 0.1247], + device='cuda:3'), in_proj_covar=tensor([0.0595, 0.0583, 0.0534, 0.0742, 0.0638, 0.0499, 0.0647, 0.0548], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:22:27,973 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.948e+02 7.305e+02 9.017e+02 1.065e+03 1.815e+03, threshold=1.803e+03, percent-clipped=3.0 +2023-04-01 04:22:28,320 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27732.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:22:43,460 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2612, 1.2807, 1.6207, 0.9032, 2.4398, 2.7763, 2.6693, 2.9179], + device='cuda:3'), covar=tensor([0.1308, 0.2674, 0.2609, 0.2121, 0.0469, 0.0220, 0.0251, 0.0163], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0281, 0.0319, 0.0259, 0.0197, 0.0115, 0.0206, 0.0135], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:23:04,214 INFO [train.py:903] (3/4) Epoch 5, batch 450, loss[loss=0.3029, simple_loss=0.3634, pruned_loss=0.1212, over 19337.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3484, pruned_loss=0.1163, over 3444199.75 frames. ], batch size: 70, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:23:45,999 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 04:23:47,128 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 04:24:07,106 INFO [train.py:903] (3/4) Epoch 5, batch 500, loss[loss=0.2807, simple_loss=0.337, pruned_loss=0.1123, over 18649.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3484, pruned_loss=0.1159, over 3530726.48 frames. ], batch size: 41, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:24:31,790 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+02 6.150e+02 8.318e+02 1.057e+03 1.987e+03, threshold=1.664e+03, percent-clipped=1.0 +2023-04-01 04:25:11,324 INFO [train.py:903] (3/4) Epoch 5, batch 550, loss[loss=0.3549, simple_loss=0.4085, pruned_loss=0.1507, over 19686.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.349, pruned_loss=0.116, over 3602974.23 frames. ], batch size: 59, lr: 1.71e-02, grad_scale: 8.0 +2023-04-01 04:25:35,647 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:25:56,960 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7179, 4.1863, 2.5526, 3.6392, 0.8576, 3.8130, 3.8568, 3.9345], + device='cuda:3'), covar=tensor([0.0559, 0.1055, 0.1852, 0.0775, 0.4158, 0.0793, 0.0726, 0.0796], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0305, 0.0354, 0.0276, 0.0345, 0.0294, 0.0270, 0.0301], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:26:07,957 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:26:13,755 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1338, 1.0967, 1.3419, 0.5387, 2.3671, 2.3581, 2.1365, 2.4693], + device='cuda:3'), covar=tensor([0.1311, 0.3001, 0.2924, 0.2162, 0.0365, 0.0180, 0.0355, 0.0201], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0280, 0.0316, 0.0255, 0.0195, 0.0114, 0.0204, 0.0134], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:26:14,567 INFO [train.py:903] (3/4) Epoch 5, batch 600, loss[loss=0.3294, simple_loss=0.3808, pruned_loss=0.139, over 19304.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3487, pruned_loss=0.1157, over 3657565.73 frames. ], batch size: 66, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:26:38,768 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.595e+02 8.388e+02 1.023e+03 2.578e+03, threshold=1.678e+03, percent-clipped=3.0 +2023-04-01 04:27:02,854 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 04:27:12,903 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-01 04:27:17,953 INFO [train.py:903] (3/4) Epoch 5, batch 650, loss[loss=0.2655, simple_loss=0.3266, pruned_loss=0.1021, over 19730.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3487, pruned_loss=0.1165, over 3698594.85 frames. ], batch size: 51, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:27:22,971 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4040, 2.4349, 1.5508, 1.4405, 2.2157, 1.0573, 1.2297, 1.5720], + device='cuda:3'), covar=tensor([0.0755, 0.0412, 0.0823, 0.0537, 0.0343, 0.1032, 0.0625, 0.0439], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0254, 0.0305, 0.0234, 0.0215, 0.0309, 0.0271, 0.0244], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 04:27:59,453 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=5.01 vs. limit=5.0 +2023-04-01 04:28:20,084 INFO [train.py:903] (3/4) Epoch 5, batch 700, loss[loss=0.3683, simple_loss=0.3958, pruned_loss=0.1704, over 13463.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3488, pruned_loss=0.1165, over 3714978.09 frames. ], batch size: 136, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:28:47,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 7.486e+02 9.333e+02 1.140e+03 2.488e+03, threshold=1.867e+03, percent-clipped=5.0 +2023-04-01 04:29:08,508 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4171, 2.2307, 1.8350, 1.7099, 1.4843, 1.7718, 0.3940, 1.4231], + device='cuda:3'), covar=tensor([0.0271, 0.0230, 0.0182, 0.0302, 0.0505, 0.0278, 0.0483, 0.0387], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0278, 0.0273, 0.0301, 0.0360, 0.0283, 0.0274, 0.0282], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:29:25,957 INFO [train.py:903] (3/4) Epoch 5, batch 750, loss[loss=0.3251, simple_loss=0.3627, pruned_loss=0.1437, over 19587.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3497, pruned_loss=0.1172, over 3743397.49 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:29:43,608 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28076.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:30:27,891 INFO [train.py:903] (3/4) Epoch 5, batch 800, loss[loss=0.3127, simple_loss=0.3683, pruned_loss=0.1286, over 19599.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3495, pruned_loss=0.1175, over 3747519.95 frames. ], batch size: 61, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:30:48,164 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 04:30:52,981 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.190e+02 6.427e+02 8.567e+02 1.032e+03 2.729e+03, threshold=1.713e+03, percent-clipped=3.0 +2023-04-01 04:31:32,204 INFO [train.py:903] (3/4) Epoch 5, batch 850, loss[loss=0.3072, simple_loss=0.3578, pruned_loss=0.1283, over 19533.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3474, pruned_loss=0.1161, over 3752805.58 frames. ], batch size: 54, lr: 1.70e-02, grad_scale: 8.0 +2023-04-01 04:32:03,333 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-01 04:32:08,670 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28191.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:32:14,506 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7481, 1.4445, 1.8654, 1.5164, 3.0623, 4.4961, 4.7504, 5.0691], + device='cuda:3'), covar=tensor([0.1215, 0.2533, 0.2383, 0.1664, 0.0384, 0.0137, 0.0124, 0.0056], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0279, 0.0314, 0.0254, 0.0196, 0.0115, 0.0204, 0.0133], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:3') +2023-04-01 04:32:29,187 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 04:32:33,626 INFO [train.py:903] (3/4) Epoch 5, batch 900, loss[loss=0.2157, simple_loss=0.2845, pruned_loss=0.07343, over 19732.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.3486, pruned_loss=0.117, over 3754461.57 frames. ], batch size: 45, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:32:59,333 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+02 6.938e+02 8.196e+02 1.125e+03 2.658e+03, threshold=1.639e+03, percent-clipped=4.0 +2023-04-01 04:33:17,402 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:33:22,480 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-01 04:33:36,690 INFO [train.py:903] (3/4) Epoch 5, batch 950, loss[loss=0.2512, simple_loss=0.3246, pruned_loss=0.08896, over 19718.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3468, pruned_loss=0.1156, over 3776185.02 frames. ], batch size: 63, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:33:42,368 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 04:33:49,441 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28273.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:34:28,485 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28305.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:34:36,191 INFO [train.py:903] (3/4) Epoch 5, batch 1000, loss[loss=0.2281, simple_loss=0.2953, pruned_loss=0.08043, over 19738.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3459, pruned_loss=0.1148, over 3785400.45 frames. ], batch size: 45, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:34:59,300 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 7.143e+02 8.888e+02 1.138e+03 2.880e+03, threshold=1.778e+03, percent-clipped=9.0 +2023-04-01 04:35:30,270 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 04:35:36,617 INFO [train.py:903] (3/4) Epoch 5, batch 1050, loss[loss=0.3527, simple_loss=0.3887, pruned_loss=0.1583, over 19628.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3478, pruned_loss=0.1162, over 3782317.63 frames. ], batch size: 57, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:36:09,495 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 04:36:36,348 INFO [train.py:903] (3/4) Epoch 5, batch 1100, loss[loss=0.3007, simple_loss=0.3659, pruned_loss=0.1177, over 19775.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3475, pruned_loss=0.1159, over 3789679.99 frames. ], batch size: 56, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:37:01,573 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+02 6.917e+02 8.996e+02 1.190e+03 3.192e+03, threshold=1.799e+03, percent-clipped=6.0 +2023-04-01 04:37:19,648 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28447.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:37:37,665 INFO [train.py:903] (3/4) Epoch 5, batch 1150, loss[loss=0.269, simple_loss=0.3224, pruned_loss=0.1078, over 17745.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3468, pruned_loss=0.1153, over 3807223.33 frames. ], batch size: 39, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:37:50,296 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28472.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:38:11,055 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1976, 1.0833, 1.0769, 1.4466, 1.1863, 1.2642, 1.3590, 1.2087], + device='cuda:3'), covar=tensor([0.0878, 0.1107, 0.1148, 0.0681, 0.0851, 0.0854, 0.0846, 0.0797], + device='cuda:3'), in_proj_covar=tensor([0.0232, 0.0254, 0.0245, 0.0279, 0.0280, 0.0237, 0.0242, 0.0229], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 04:38:29,212 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:36,139 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:38:38,084 INFO [train.py:903] (3/4) Epoch 5, batch 1200, loss[loss=0.2809, simple_loss=0.3482, pruned_loss=0.1068, over 19606.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3471, pruned_loss=0.1152, over 3811512.11 frames. ], batch size: 57, lr: 1.69e-02, grad_scale: 8.0 +2023-04-01 04:39:01,764 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.666e+02 7.999e+02 1.012e+03 1.920e+03, threshold=1.600e+03, percent-clipped=0.0 +2023-04-01 04:39:12,212 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 04:39:37,128 INFO [train.py:903] (3/4) Epoch 5, batch 1250, loss[loss=0.3317, simple_loss=0.3832, pruned_loss=0.1401, over 19590.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.3477, pruned_loss=0.116, over 3823932.43 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:13,035 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28591.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:40:37,857 INFO [train.py:903] (3/4) Epoch 5, batch 1300, loss[loss=0.2364, simple_loss=0.297, pruned_loss=0.08788, over 19775.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3469, pruned_loss=0.1156, over 3816351.16 frames. ], batch size: 47, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:40:43,796 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28617.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:41:03,193 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.211e+02 7.072e+02 8.596e+02 1.219e+03 1.879e+03, threshold=1.719e+03, percent-clipped=8.0 +2023-04-01 04:41:11,362 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4885, 3.2055, 2.0340, 2.5612, 2.2992, 2.5013, 0.8495, 2.5256], + device='cuda:3'), covar=tensor([0.0270, 0.0218, 0.0289, 0.0323, 0.0384, 0.0338, 0.0546, 0.0371], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0277, 0.0275, 0.0296, 0.0360, 0.0277, 0.0273, 0.0281], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:41:23,371 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:41:39,846 INFO [train.py:903] (3/4) Epoch 5, batch 1350, loss[loss=0.3165, simple_loss=0.3585, pruned_loss=0.1372, over 19593.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3463, pruned_loss=0.1147, over 3820322.66 frames. ], batch size: 52, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:42:34,081 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28706.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:42:40,552 INFO [train.py:903] (3/4) Epoch 5, batch 1400, loss[loss=0.3196, simple_loss=0.3752, pruned_loss=0.132, over 19616.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3452, pruned_loss=0.1145, over 3822078.22 frames. ], batch size: 57, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:01,034 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0486, 4.9953, 5.9904, 5.8525, 1.8635, 5.4745, 4.9142, 5.3725], + device='cuda:3'), covar=tensor([0.0794, 0.0547, 0.0378, 0.0306, 0.3759, 0.0257, 0.0398, 0.0808], + device='cuda:3'), in_proj_covar=tensor([0.0495, 0.0445, 0.0587, 0.0466, 0.0564, 0.0337, 0.0379, 0.0543], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 04:43:04,142 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 6.792e+02 9.169e+02 1.129e+03 1.829e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-04-01 04:43:04,528 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28732.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:43:40,832 INFO [train.py:903] (3/4) Epoch 5, batch 1450, loss[loss=0.269, simple_loss=0.3414, pruned_loss=0.09829, over 19688.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3458, pruned_loss=0.1144, over 3826072.08 frames. ], batch size: 59, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:43:43,195 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 04:43:43,506 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28764.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:44:41,295 INFO [train.py:903] (3/4) Epoch 5, batch 1500, loss[loss=0.289, simple_loss=0.3445, pruned_loss=0.1168, over 19600.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3464, pruned_loss=0.1144, over 3813306.00 frames. ], batch size: 50, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:45:06,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.912e+02 8.562e+02 1.022e+03 2.509e+03, threshold=1.712e+03, percent-clipped=1.0 +2023-04-01 04:45:24,303 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:31,741 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:45:32,290 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 04:45:42,354 INFO [train.py:903] (3/4) Epoch 5, batch 1550, loss[loss=0.2861, simple_loss=0.3414, pruned_loss=0.1154, over 19467.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3464, pruned_loss=0.1151, over 3794745.23 frames. ], batch size: 49, lr: 1.68e-02, grad_scale: 8.0 +2023-04-01 04:46:02,294 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28879.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:46:41,467 INFO [train.py:903] (3/4) Epoch 5, batch 1600, loss[loss=0.3774, simple_loss=0.4008, pruned_loss=0.177, over 13647.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.347, pruned_loss=0.1153, over 3786308.33 frames. ], batch size: 136, lr: 1.67e-02, grad_scale: 8.0 +2023-04-01 04:47:01,752 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9744, 2.0257, 1.9170, 2.8618, 1.8063, 2.6600, 2.6035, 1.8919], + device='cuda:3'), covar=tensor([0.1742, 0.1352, 0.0726, 0.0833, 0.1727, 0.0564, 0.1274, 0.1221], + device='cuda:3'), in_proj_covar=tensor([0.0605, 0.0591, 0.0544, 0.0751, 0.0651, 0.0507, 0.0656, 0.0561], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:47:04,538 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+02 6.984e+02 8.420e+02 1.102e+03 2.946e+03, threshold=1.684e+03, percent-clipped=4.0 +2023-04-01 04:47:04,566 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 04:47:41,582 INFO [train.py:903] (3/4) Epoch 5, batch 1650, loss[loss=0.3141, simple_loss=0.3764, pruned_loss=0.1259, over 18729.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3464, pruned_loss=0.1149, over 3792307.03 frames. ], batch size: 74, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:47:42,054 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28962.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:43,184 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28963.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:47:49,877 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:12,492 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28987.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:14,445 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28988.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 04:48:42,352 INFO [train.py:903] (3/4) Epoch 5, batch 1700, loss[loss=0.3026, simple_loss=0.3602, pruned_loss=0.1225, over 19525.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3462, pruned_loss=0.1149, over 3790845.95 frames. ], batch size: 64, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:48:43,875 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29013.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 04:48:52,758 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:48:52,890 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29020.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:08,764 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+02 6.699e+02 8.227e+02 1.083e+03 2.721e+03, threshold=1.645e+03, percent-clipped=4.0 +2023-04-01 04:49:16,993 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3283, 2.1823, 2.0678, 3.3234, 2.1587, 3.5590, 3.1710, 2.0237], + device='cuda:3'), covar=tensor([0.1836, 0.1487, 0.0750, 0.0917, 0.1860, 0.0461, 0.1266, 0.1303], + device='cuda:3'), in_proj_covar=tensor([0.0599, 0.0589, 0.0536, 0.0743, 0.0645, 0.0502, 0.0657, 0.0557], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:49:21,046 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 04:49:22,488 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:49:36,773 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.34 vs. limit=5.0 +2023-04-01 04:49:40,703 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8757, 1.8762, 1.9000, 2.5226, 1.6013, 2.3577, 2.3877, 1.7705], + device='cuda:3'), covar=tensor([0.1607, 0.1272, 0.0717, 0.0685, 0.1438, 0.0535, 0.1355, 0.1274], + device='cuda:3'), in_proj_covar=tensor([0.0594, 0.0584, 0.0531, 0.0737, 0.0640, 0.0498, 0.0651, 0.0554], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:49:42,475 INFO [train.py:903] (3/4) Epoch 5, batch 1750, loss[loss=0.2999, simple_loss=0.3645, pruned_loss=0.1177, over 19650.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.346, pruned_loss=0.1146, over 3791870.26 frames. ], batch size: 58, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:26,343 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-04-01 04:50:43,927 INFO [train.py:903] (3/4) Epoch 5, batch 1800, loss[loss=0.2746, simple_loss=0.3275, pruned_loss=0.1108, over 19748.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3457, pruned_loss=0.1143, over 3798664.38 frames. ], batch size: 48, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:50:59,221 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0532, 1.0743, 1.5482, 0.5666, 2.4575, 2.3998, 2.1037, 2.4949], + device='cuda:3'), covar=tensor([0.1276, 0.2862, 0.2621, 0.1968, 0.0308, 0.0183, 0.0349, 0.0193], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0278, 0.0314, 0.0253, 0.0197, 0.0115, 0.0205, 0.0136], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:51:07,718 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.740e+02 6.050e+02 7.849e+02 1.011e+03 2.328e+03, threshold=1.570e+03, percent-clipped=7.0 +2023-04-01 04:51:39,753 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 04:51:44,194 INFO [train.py:903] (3/4) Epoch 5, batch 1850, loss[loss=0.3217, simple_loss=0.3759, pruned_loss=0.1337, over 19777.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3444, pruned_loss=0.1132, over 3805956.92 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:51:52,844 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-01 04:52:17,655 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 04:52:43,729 INFO [train.py:903] (3/4) Epoch 5, batch 1900, loss[loss=0.2773, simple_loss=0.3346, pruned_loss=0.11, over 19660.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3454, pruned_loss=0.1136, over 3802024.91 frames. ], batch size: 60, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:52:53,176 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29219.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:52:59,218 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:01,719 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:02,418 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 04:53:07,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 04:53:11,316 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+02 6.988e+02 9.073e+02 1.156e+03 1.890e+03, threshold=1.815e+03, percent-clipped=5.0 +2023-04-01 04:53:23,912 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:28,917 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 04:53:30,453 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:35,744 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:53:44,960 INFO [train.py:903] (3/4) Epoch 5, batch 1950, loss[loss=0.2365, simple_loss=0.3077, pruned_loss=0.08264, over 19663.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3467, pruned_loss=0.1147, over 3800754.31 frames. ], batch size: 53, lr: 1.67e-02, grad_scale: 4.0 +2023-04-01 04:54:46,876 INFO [train.py:903] (3/4) Epoch 5, batch 2000, loss[loss=0.2836, simple_loss=0.3455, pruned_loss=0.1108, over 19669.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3461, pruned_loss=0.114, over 3806824.32 frames. ], batch size: 60, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:54:56,674 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.00 vs. limit=5.0 +2023-04-01 04:55:10,280 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.142e+02 7.102e+02 9.141e+02 1.135e+03 3.050e+03, threshold=1.828e+03, percent-clipped=2.0 +2023-04-01 04:55:16,951 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:41,133 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2122, 1.1421, 1.4834, 0.9909, 2.6959, 3.3165, 3.2327, 3.5610], + device='cuda:3'), covar=tensor([0.1336, 0.2926, 0.2867, 0.2059, 0.0439, 0.0153, 0.0200, 0.0125], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0279, 0.0314, 0.0256, 0.0196, 0.0114, 0.0204, 0.0135], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 04:55:43,163 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 04:55:46,567 INFO [train.py:903] (3/4) Epoch 5, batch 2050, loss[loss=0.2989, simple_loss=0.3637, pruned_loss=0.117, over 19531.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3456, pruned_loss=0.114, over 3816514.80 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:55:49,013 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:55:59,871 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 04:56:00,835 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 04:56:12,693 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1916, 1.2991, 1.6689, 1.2910, 2.4267, 2.1789, 2.5361, 0.9497], + device='cuda:3'), covar=tensor([0.1689, 0.2675, 0.1438, 0.1417, 0.1018, 0.1252, 0.1103, 0.2491], + device='cuda:3'), in_proj_covar=tensor([0.0438, 0.0497, 0.0469, 0.0403, 0.0533, 0.0431, 0.0616, 0.0433], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:56:16,134 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 04:56:23,070 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 04:56:30,418 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5802, 1.6522, 1.5551, 2.2355, 1.4323, 1.8225, 1.9733, 1.5851], + device='cuda:3'), covar=tensor([0.1705, 0.1275, 0.0808, 0.0657, 0.1416, 0.0641, 0.1477, 0.1333], + device='cuda:3'), in_proj_covar=tensor([0.0607, 0.0597, 0.0543, 0.0750, 0.0652, 0.0509, 0.0656, 0.0559], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:56:37,212 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3520, 2.3957, 2.1954, 3.4669, 2.2335, 3.5237, 3.4653, 2.5830], + device='cuda:3'), covar=tensor([0.1947, 0.1418, 0.0743, 0.0967, 0.1988, 0.0543, 0.1191, 0.1096], + device='cuda:3'), in_proj_covar=tensor([0.0606, 0.0596, 0.0541, 0.0750, 0.0652, 0.0509, 0.0656, 0.0559], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 04:56:45,829 INFO [train.py:903] (3/4) Epoch 5, batch 2100, loss[loss=0.2885, simple_loss=0.3458, pruned_loss=0.1156, over 19673.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3443, pruned_loss=0.1134, over 3832188.73 frames. ], batch size: 53, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:57:12,304 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 7.145e+02 9.347e+02 1.270e+03 4.921e+03, threshold=1.869e+03, percent-clipped=10.0 +2023-04-01 04:57:13,529 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 04:57:15,696 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 04:57:34,270 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 04:57:46,676 INFO [train.py:903] (3/4) Epoch 5, batch 2150, loss[loss=0.2805, simple_loss=0.3245, pruned_loss=0.1182, over 19297.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3453, pruned_loss=0.1146, over 3820237.61 frames. ], batch size: 44, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:58:08,905 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29479.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 04:58:48,388 INFO [train.py:903] (3/4) Epoch 5, batch 2200, loss[loss=0.2654, simple_loss=0.3212, pruned_loss=0.1047, over 19812.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3454, pruned_loss=0.1145, over 3811790.98 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:11,935 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+02 6.760e+02 8.285e+02 1.117e+03 1.782e+03, threshold=1.657e+03, percent-clipped=0.0 +2023-04-01 04:59:47,494 INFO [train.py:903] (3/4) Epoch 5, batch 2250, loss[loss=0.2727, simple_loss=0.3356, pruned_loss=0.1049, over 19568.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3454, pruned_loss=0.1147, over 3814424.48 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 04:59:55,558 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:28,223 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:32,406 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:00:48,025 INFO [train.py:903] (3/4) Epoch 5, batch 2300, loss[loss=0.3065, simple_loss=0.3603, pruned_loss=0.1263, over 19298.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3457, pruned_loss=0.1144, over 3815983.01 frames. ], batch size: 66, lr: 1.66e-02, grad_scale: 8.0 +2023-04-01 05:00:56,357 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29619.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:01:03,672 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 05:01:15,152 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.694e+02 6.380e+02 7.654e+02 9.572e+02 1.859e+03, threshold=1.531e+03, percent-clipped=2.0 +2023-04-01 05:01:15,583 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1902, 2.1768, 1.5657, 1.4439, 1.9673, 1.0154, 1.2389, 1.6303], + device='cuda:3'), covar=tensor([0.0626, 0.0427, 0.0834, 0.0428, 0.0367, 0.1006, 0.0531, 0.0364], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0263, 0.0310, 0.0235, 0.0213, 0.0307, 0.0282, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:01:48,836 INFO [train.py:903] (3/4) Epoch 5, batch 2350, loss[loss=0.3331, simple_loss=0.3793, pruned_loss=0.1434, over 19610.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3451, pruned_loss=0.1137, over 3818319.58 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:11,376 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29680.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:02:28,866 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 05:02:37,413 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 05:02:45,270 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 05:02:49,428 INFO [train.py:903] (3/4) Epoch 5, batch 2400, loss[loss=0.3731, simple_loss=0.4017, pruned_loss=0.1723, over 12816.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3454, pruned_loss=0.1138, over 3820660.03 frames. ], batch size: 136, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:02:50,600 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:12,595 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.376e+02 6.843e+02 7.889e+02 1.103e+03 3.246e+03, threshold=1.578e+03, percent-clipped=5.0 +2023-04-01 05:03:15,307 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:46,799 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:03:48,490 INFO [train.py:903] (3/4) Epoch 5, batch 2450, loss[loss=0.3472, simple_loss=0.3876, pruned_loss=0.1534, over 19307.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3429, pruned_loss=0.1124, over 3834351.25 frames. ], batch size: 66, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:48,601 INFO [train.py:903] (3/4) Epoch 5, batch 2500, loss[loss=0.2871, simple_loss=0.3485, pruned_loss=0.1129, over 18847.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3439, pruned_loss=0.113, over 3807996.85 frames. ], batch size: 74, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:04:57,792 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:05:14,588 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+02 6.790e+02 8.500e+02 1.086e+03 2.138e+03, threshold=1.700e+03, percent-clipped=3.0 +2023-04-01 05:05:30,623 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2696, 0.8461, 1.0220, 2.0467, 1.5173, 1.2337, 1.9670, 1.3391], + device='cuda:3'), covar=tensor([0.1247, 0.2028, 0.1607, 0.1028, 0.1220, 0.1631, 0.1162, 0.1184], + device='cuda:3'), in_proj_covar=tensor([0.0232, 0.0253, 0.0244, 0.0281, 0.0274, 0.0231, 0.0238, 0.0228], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:05:48,266 INFO [train.py:903] (3/4) Epoch 5, batch 2550, loss[loss=0.2822, simple_loss=0.3392, pruned_loss=0.1126, over 19397.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3438, pruned_loss=0.1131, over 3814872.40 frames. ], batch size: 48, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:05,776 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 05:06:16,590 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.42 vs. limit=5.0 +2023-04-01 05:06:40,384 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 05:06:48,581 INFO [train.py:903] (3/4) Epoch 5, batch 2600, loss[loss=0.3197, simple_loss=0.3689, pruned_loss=0.1353, over 12836.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3442, pruned_loss=0.1133, over 3805812.01 frames. ], batch size: 137, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:06:50,696 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:07:05,059 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8933, 1.8594, 1.9371, 2.9515, 1.8832, 2.7479, 2.6095, 1.8543], + device='cuda:3'), covar=tensor([0.1919, 0.1613, 0.0797, 0.0774, 0.1775, 0.0609, 0.1428, 0.1427], + device='cuda:3'), in_proj_covar=tensor([0.0607, 0.0597, 0.0541, 0.0755, 0.0646, 0.0516, 0.0654, 0.0563], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 05:07:13,515 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.882e+02 9.175e+02 1.239e+03 1.828e+03, threshold=1.835e+03, percent-clipped=5.0 +2023-04-01 05:07:50,212 INFO [train.py:903] (3/4) Epoch 5, batch 2650, loss[loss=0.3582, simple_loss=0.3979, pruned_loss=0.1593, over 19639.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3449, pruned_loss=0.1134, over 3820471.10 frames. ], batch size: 60, lr: 1.65e-02, grad_scale: 8.0 +2023-04-01 05:07:58,283 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:08,343 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 05:08:29,828 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:08:48,839 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3670, 1.9357, 1.4276, 1.5491, 1.8016, 1.1794, 1.2556, 1.6355], + device='cuda:3'), covar=tensor([0.0616, 0.0396, 0.0614, 0.0376, 0.0285, 0.0756, 0.0490, 0.0291], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0266, 0.0313, 0.0237, 0.0221, 0.0310, 0.0288, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:08:50,698 INFO [train.py:903] (3/4) Epoch 5, batch 2700, loss[loss=0.2955, simple_loss=0.364, pruned_loss=0.1135, over 19686.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3439, pruned_loss=0.1122, over 3824758.23 frames. ], batch size: 59, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:09:04,701 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30024.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:07,346 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 05:09:10,274 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:09:17,305 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.255e+02 6.674e+02 8.375e+02 9.964e+02 1.932e+03, threshold=1.675e+03, percent-clipped=1.0 +2023-04-01 05:09:42,500 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8600, 1.8417, 1.8334, 2.8244, 1.9825, 2.8289, 2.5703, 1.8270], + device='cuda:3'), covar=tensor([0.1970, 0.1585, 0.0862, 0.0877, 0.1711, 0.0564, 0.1483, 0.1438], + device='cuda:3'), in_proj_covar=tensor([0.0607, 0.0597, 0.0544, 0.0757, 0.0653, 0.0519, 0.0663, 0.0564], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 05:09:49,950 INFO [train.py:903] (3/4) Epoch 5, batch 2750, loss[loss=0.3349, simple_loss=0.388, pruned_loss=0.1409, over 18827.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3446, pruned_loss=0.113, over 3809956.62 frames. ], batch size: 74, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:10:50,795 INFO [train.py:903] (3/4) Epoch 5, batch 2800, loss[loss=0.2431, simple_loss=0.2975, pruned_loss=0.09435, over 19750.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3443, pruned_loss=0.1125, over 3820644.24 frames. ], batch size: 47, lr: 1.64e-02, grad_scale: 8.0 +2023-04-01 05:11:17,048 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.279e+02 7.041e+02 9.072e+02 1.191e+03 2.188e+03, threshold=1.814e+03, percent-clipped=6.0 +2023-04-01 05:11:22,788 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30139.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:27,366 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:11:51,984 INFO [train.py:903] (3/4) Epoch 5, batch 2850, loss[loss=0.3138, simple_loss=0.3753, pruned_loss=0.1262, over 19601.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3464, pruned_loss=0.1137, over 3822627.49 frames. ], batch size: 61, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:11:54,315 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30164.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:12:15,909 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9502, 1.5387, 1.4811, 2.0133, 1.5768, 2.1178, 2.2507, 2.0980], + device='cuda:3'), covar=tensor([0.0698, 0.0983, 0.1036, 0.0991, 0.1023, 0.0757, 0.0819, 0.0599], + device='cuda:3'), in_proj_covar=tensor([0.0237, 0.0259, 0.0249, 0.0288, 0.0282, 0.0239, 0.0242, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:12:46,623 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2216, 2.1859, 1.5752, 1.3323, 1.9500, 1.0645, 1.1257, 1.8120], + device='cuda:3'), covar=tensor([0.0685, 0.0442, 0.0824, 0.0552, 0.0367, 0.1064, 0.0635, 0.0315], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0265, 0.0310, 0.0235, 0.0219, 0.0310, 0.0286, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:12:51,658 INFO [train.py:903] (3/4) Epoch 5, batch 2900, loss[loss=0.2837, simple_loss=0.3472, pruned_loss=0.1101, over 19766.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3443, pruned_loss=0.1124, over 3836588.96 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:12:51,680 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 05:13:09,293 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:13:20,147 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.023e+02 7.237e+02 9.091e+02 1.153e+03 2.755e+03, threshold=1.818e+03, percent-clipped=7.0 +2023-04-01 05:13:28,928 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7239, 1.4269, 1.3958, 1.9893, 1.4491, 1.9980, 2.0885, 1.9939], + device='cuda:3'), covar=tensor([0.0808, 0.1060, 0.1106, 0.0920, 0.1058, 0.0778, 0.0842, 0.0650], + device='cuda:3'), in_proj_covar=tensor([0.0238, 0.0259, 0.0248, 0.0286, 0.0280, 0.0237, 0.0242, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:13:51,431 INFO [train.py:903] (3/4) Epoch 5, batch 2950, loss[loss=0.2587, simple_loss=0.3153, pruned_loss=0.101, over 19754.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3445, pruned_loss=0.112, over 3837619.02 frames. ], batch size: 47, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:12,815 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:18,231 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30284.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:29,300 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7617, 1.4528, 1.3169, 2.0326, 1.3990, 2.1257, 2.0884, 2.0234], + device='cuda:3'), covar=tensor([0.0847, 0.1136, 0.1165, 0.0997, 0.1176, 0.0688, 0.0880, 0.0636], + device='cuda:3'), in_proj_covar=tensor([0.0236, 0.0258, 0.0247, 0.0282, 0.0279, 0.0235, 0.0240, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:14:46,942 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30309.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:14:50,602 INFO [train.py:903] (3/4) Epoch 5, batch 3000, loss[loss=0.2998, simple_loss=0.3618, pruned_loss=0.1189, over 19674.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3451, pruned_loss=0.1123, over 3842551.62 frames. ], batch size: 59, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:14:50,602 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 05:15:03,126 INFO [train.py:937] (3/4) Epoch 5, validation: loss=0.2047, simple_loss=0.3034, pruned_loss=0.05296, over 944034.00 frames. +2023-04-01 05:15:03,127 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 05:15:05,713 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 05:15:27,705 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.77 vs. limit=5.0 +2023-04-01 05:15:33,556 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 7.155e+02 8.736e+02 1.085e+03 2.346e+03, threshold=1.747e+03, percent-clipped=4.0 +2023-04-01 05:16:05,815 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.43 vs. limit=5.0 +2023-04-01 05:16:06,358 INFO [train.py:903] (3/4) Epoch 5, batch 3050, loss[loss=0.237, simple_loss=0.3012, pruned_loss=0.0864, over 19708.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3444, pruned_loss=0.1121, over 3834619.85 frames. ], batch size: 45, lr: 1.64e-02, grad_scale: 4.0 +2023-04-01 05:16:07,006 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-01 05:16:26,394 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:16:45,476 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30395.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:07,569 INFO [train.py:903] (3/4) Epoch 5, batch 3100, loss[loss=0.287, simple_loss=0.3374, pruned_loss=0.1183, over 19490.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.344, pruned_loss=0.1118, over 3828324.68 frames. ], batch size: 49, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:17:17,194 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30420.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:17:26,216 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0170, 1.3737, 1.6023, 2.0670, 1.7487, 1.7666, 1.8622, 1.9890], + device='cuda:3'), covar=tensor([0.0779, 0.1601, 0.1217, 0.0754, 0.1067, 0.0430, 0.0845, 0.0537], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0368, 0.0285, 0.0241, 0.0302, 0.0247, 0.0269, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:17:33,697 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+02 6.847e+02 8.274e+02 1.001e+03 3.134e+03, threshold=1.655e+03, percent-clipped=2.0 +2023-04-01 05:17:43,912 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2827, 1.1277, 1.2421, 1.4510, 2.1248, 1.0927, 1.6617, 2.1549], + device='cuda:3'), covar=tensor([0.0393, 0.1717, 0.1695, 0.1010, 0.0482, 0.1499, 0.1393, 0.0394], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0308, 0.0308, 0.0286, 0.0304, 0.0314, 0.0287, 0.0302], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:18:06,702 INFO [train.py:903] (3/4) Epoch 5, batch 3150, loss[loss=0.281, simple_loss=0.3466, pruned_loss=0.1077, over 19532.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3441, pruned_loss=0.1121, over 3830410.49 frames. ], batch size: 56, lr: 1.63e-02, grad_scale: 4.0 +2023-04-01 05:18:34,303 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 05:18:37,217 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30487.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:19:06,273 INFO [train.py:903] (3/4) Epoch 5, batch 3200, loss[loss=0.263, simple_loss=0.3183, pruned_loss=0.1038, over 19759.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3454, pruned_loss=0.113, over 3833822.70 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:19:35,545 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 7.005e+02 8.588e+02 1.128e+03 3.335e+03, threshold=1.718e+03, percent-clipped=13.0 +2023-04-01 05:19:35,975 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30535.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:04,489 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:06,323 INFO [train.py:903] (3/4) Epoch 5, batch 3250, loss[loss=0.2662, simple_loss=0.3326, pruned_loss=0.09985, over 19485.00 frames. ], tot_loss[loss=0.285, simple_loss=0.345, pruned_loss=0.1125, over 3840230.11 frames. ], batch size: 49, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:20:20,424 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:20:55,355 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:21:08,678 INFO [train.py:903] (3/4) Epoch 5, batch 3300, loss[loss=0.2732, simple_loss=0.3302, pruned_loss=0.1081, over 19764.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3456, pruned_loss=0.1125, over 3849487.54 frames. ], batch size: 47, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:21:16,490 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 05:21:29,548 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.26 vs. limit=5.0 +2023-04-01 05:21:35,516 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 6.699e+02 7.755e+02 9.198e+02 2.155e+03, threshold=1.551e+03, percent-clipped=1.0 +2023-04-01 05:22:09,095 INFO [train.py:903] (3/4) Epoch 5, batch 3350, loss[loss=0.3032, simple_loss=0.3657, pruned_loss=0.1204, over 19538.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3455, pruned_loss=0.1126, over 3832157.00 frames. ], batch size: 56, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:22:21,653 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:22:39,147 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:22:49,175 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8782, 3.5743, 2.3804, 3.2729, 1.0347, 3.2375, 3.1050, 3.2831], + device='cuda:3'), covar=tensor([0.0876, 0.1367, 0.2012, 0.0755, 0.3797, 0.0989, 0.0928, 0.0998], + device='cuda:3'), in_proj_covar=tensor([0.0349, 0.0315, 0.0366, 0.0288, 0.0355, 0.0303, 0.0282, 0.0316], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:23:09,774 INFO [train.py:903] (3/4) Epoch 5, batch 3400, loss[loss=0.2867, simple_loss=0.3262, pruned_loss=0.1236, over 19736.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3443, pruned_loss=0.1122, over 3828626.16 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:23:22,327 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:23:39,932 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+02 6.893e+02 8.724e+02 1.073e+03 2.213e+03, threshold=1.745e+03, percent-clipped=7.0 +2023-04-01 05:23:57,768 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2303, 1.0575, 1.0190, 1.4093, 1.2353, 1.2717, 1.3983, 1.1784], + device='cuda:3'), covar=tensor([0.1030, 0.1268, 0.1426, 0.0760, 0.0920, 0.0957, 0.0936, 0.0912], + device='cuda:3'), in_proj_covar=tensor([0.0237, 0.0256, 0.0251, 0.0282, 0.0276, 0.0232, 0.0237, 0.0228], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:24:11,824 INFO [train.py:903] (3/4) Epoch 5, batch 3450, loss[loss=0.2314, simple_loss=0.2999, pruned_loss=0.08145, over 19399.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3433, pruned_loss=0.1115, over 3825607.37 frames. ], batch size: 48, lr: 1.63e-02, grad_scale: 8.0 +2023-04-01 05:24:15,168 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 05:24:27,457 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0006, 1.4306, 1.5472, 1.9170, 1.8308, 1.8181, 1.6924, 1.9379], + device='cuda:3'), covar=tensor([0.0718, 0.1392, 0.1238, 0.0785, 0.1004, 0.0416, 0.0813, 0.0512], + device='cuda:3'), in_proj_covar=tensor([0.0249, 0.0358, 0.0278, 0.0235, 0.0301, 0.0242, 0.0265, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:24:52,628 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2881, 2.8487, 2.0841, 2.2231, 1.8590, 2.4135, 0.7177, 2.1533], + device='cuda:3'), covar=tensor([0.0257, 0.0248, 0.0236, 0.0387, 0.0496, 0.0354, 0.0550, 0.0440], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0280, 0.0279, 0.0302, 0.0370, 0.0293, 0.0276, 0.0287], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:25:13,755 INFO [train.py:903] (3/4) Epoch 5, batch 3500, loss[loss=0.2737, simple_loss=0.3404, pruned_loss=0.1036, over 18139.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3435, pruned_loss=0.1116, over 3824815.15 frames. ], batch size: 83, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:25:39,221 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 7.015e+02 8.195e+02 1.097e+03 2.546e+03, threshold=1.639e+03, percent-clipped=5.0 +2023-04-01 05:25:41,891 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:09,323 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30858.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:26:13,374 INFO [train.py:903] (3/4) Epoch 5, batch 3550, loss[loss=0.2861, simple_loss=0.3554, pruned_loss=0.1084, over 19776.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3456, pruned_loss=0.113, over 3821435.54 frames. ], batch size: 56, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:26:15,962 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5190, 2.5178, 1.7385, 1.5534, 2.2084, 1.3229, 1.2566, 1.6298], + device='cuda:3'), covar=tensor([0.0647, 0.0308, 0.0615, 0.0432, 0.0320, 0.0688, 0.0538, 0.0351], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0271, 0.0316, 0.0244, 0.0223, 0.0313, 0.0288, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:26:38,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:13,894 INFO [train.py:903] (3/4) Epoch 5, batch 3600, loss[loss=0.3329, simple_loss=0.3812, pruned_loss=0.1423, over 19810.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3438, pruned_loss=0.1117, over 3839143.14 frames. ], batch size: 56, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:27:24,165 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30921.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:43,234 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+02 6.769e+02 8.289e+02 1.066e+03 2.218e+03, threshold=1.658e+03, percent-clipped=4.0 +2023-04-01 05:27:51,226 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:27:52,206 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:02,213 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:13,484 INFO [train.py:903] (3/4) Epoch 5, batch 3650, loss[loss=0.2856, simple_loss=0.3543, pruned_loss=0.1084, over 19669.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3433, pruned_loss=0.1116, over 3832510.62 frames. ], batch size: 58, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:28:20,070 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:28:45,370 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1930, 1.3214, 1.1018, 1.0125, 0.9706, 1.1347, 0.1028, 0.3928], + device='cuda:3'), covar=tensor([0.0254, 0.0246, 0.0159, 0.0181, 0.0500, 0.0195, 0.0466, 0.0423], + device='cuda:3'), in_proj_covar=tensor([0.0286, 0.0284, 0.0287, 0.0309, 0.0376, 0.0298, 0.0280, 0.0293], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:29:02,305 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1655, 2.1250, 1.6237, 1.4329, 1.9435, 1.0463, 1.0714, 1.6325], + device='cuda:3'), covar=tensor([0.0715, 0.0498, 0.0875, 0.0500, 0.0406, 0.1039, 0.0585, 0.0388], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0274, 0.0317, 0.0243, 0.0225, 0.0314, 0.0286, 0.0256], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:29:12,118 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-01 05:29:14,322 INFO [train.py:903] (3/4) Epoch 5, batch 3700, loss[loss=0.2593, simple_loss=0.3143, pruned_loss=0.1022, over 19780.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3443, pruned_loss=0.112, over 3839210.87 frames. ], batch size: 48, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:29:21,014 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:29:34,019 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31029.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:29:40,640 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+02 7.099e+02 8.962e+02 1.140e+03 3.223e+03, threshold=1.792e+03, percent-clipped=9.0 +2023-04-01 05:30:15,306 INFO [train.py:903] (3/4) Epoch 5, batch 3750, loss[loss=0.331, simple_loss=0.3746, pruned_loss=0.1437, over 13360.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3455, pruned_loss=0.1133, over 3836681.75 frames. ], batch size: 135, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:30:53,648 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:15,740 INFO [train.py:903] (3/4) Epoch 5, batch 3800, loss[loss=0.2837, simple_loss=0.3507, pruned_loss=0.1084, over 19666.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3455, pruned_loss=0.1133, over 3840849.93 frames. ], batch size: 55, lr: 1.62e-02, grad_scale: 8.0 +2023-04-01 05:31:22,823 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:25,989 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4504, 1.8084, 1.9955, 2.6188, 2.4098, 2.1082, 1.9158, 2.5886], + device='cuda:3'), covar=tensor([0.0690, 0.1529, 0.1166, 0.0665, 0.0941, 0.0418, 0.0910, 0.0450], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0359, 0.0279, 0.0231, 0.0299, 0.0240, 0.0265, 0.0225], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:31:40,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:31:44,606 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.073e+02 5.856e+02 8.467e+02 1.115e+03 2.554e+03, threshold=1.693e+03, percent-clipped=5.0 +2023-04-01 05:31:49,119 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 05:32:15,363 INFO [train.py:903] (3/4) Epoch 5, batch 3850, loss[loss=0.2976, simple_loss=0.3631, pruned_loss=0.116, over 19669.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3455, pruned_loss=0.1136, over 3825325.23 frames. ], batch size: 60, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:32:37,689 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 05:33:19,169 INFO [train.py:903] (3/4) Epoch 5, batch 3900, loss[loss=0.3215, simple_loss=0.3713, pruned_loss=0.1358, over 19597.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3459, pruned_loss=0.1138, over 3819849.02 frames. ], batch size: 61, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:33:45,289 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.653e+02 6.709e+02 8.069e+02 1.055e+03 2.198e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 05:34:10,691 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1798, 3.7315, 2.2346, 2.3767, 3.3491, 1.8309, 1.1893, 1.9246], + device='cuda:3'), covar=tensor([0.0910, 0.0335, 0.0751, 0.0529, 0.0406, 0.0832, 0.0800, 0.0589], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0270, 0.0313, 0.0233, 0.0219, 0.0306, 0.0279, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:34:18,968 INFO [train.py:903] (3/4) Epoch 5, batch 3950, loss[loss=0.2788, simple_loss=0.3386, pruned_loss=0.1095, over 19651.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3452, pruned_loss=0.1134, over 3821128.68 frames. ], batch size: 53, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:34:22,383 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:34:24,551 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 05:34:47,812 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:00,481 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31296.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:35:18,261 INFO [train.py:903] (3/4) Epoch 5, batch 4000, loss[loss=0.3037, simple_loss=0.3681, pruned_loss=0.1197, over 18736.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3455, pruned_loss=0.1138, over 3814309.66 frames. ], batch size: 74, lr: 1.61e-02, grad_scale: 8.0 +2023-04-01 05:35:48,954 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+02 7.061e+02 8.767e+02 1.081e+03 2.366e+03, threshold=1.753e+03, percent-clipped=7.0 +2023-04-01 05:36:06,101 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 05:36:18,327 INFO [train.py:903] (3/4) Epoch 5, batch 4050, loss[loss=0.2838, simple_loss=0.3526, pruned_loss=0.1075, over 19614.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3449, pruned_loss=0.1134, over 3821271.22 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:36:20,397 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 05:36:33,372 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31373.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:36:42,206 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:36:51,301 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:07,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:18,623 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:19,400 INFO [train.py:903] (3/4) Epoch 5, batch 4100, loss[loss=0.2454, simple_loss=0.3147, pruned_loss=0.08804, over 19734.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3446, pruned_loss=0.1132, over 3824857.07 frames. ], batch size: 51, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:37:20,874 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:37,698 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:37:48,916 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.964e+02 7.397e+02 9.605e+02 3.908e+03, threshold=1.479e+03, percent-clipped=5.0 +2023-04-01 05:37:49,658 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 05:37:53,720 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 05:38:20,925 INFO [train.py:903] (3/4) Epoch 5, batch 4150, loss[loss=0.2728, simple_loss=0.327, pruned_loss=0.1092, over 19711.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3442, pruned_loss=0.1125, over 3822454.81 frames. ], batch size: 46, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:38:51,006 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31488.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:39:16,906 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31509.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:39:20,064 INFO [train.py:903] (3/4) Epoch 5, batch 4200, loss[loss=0.2879, simple_loss=0.3511, pruned_loss=0.1124, over 18000.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3446, pruned_loss=0.1129, over 3816512.50 frames. ], batch size: 83, lr: 1.61e-02, grad_scale: 4.0 +2023-04-01 05:39:23,671 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 05:39:50,934 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.017e+02 6.856e+02 8.101e+02 1.045e+03 3.023e+03, threshold=1.620e+03, percent-clipped=6.0 +2023-04-01 05:40:19,245 INFO [train.py:903] (3/4) Epoch 5, batch 4250, loss[loss=0.2057, simple_loss=0.2765, pruned_loss=0.0674, over 19746.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3431, pruned_loss=0.1117, over 3823749.46 frames. ], batch size: 46, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:40:21,995 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3730, 1.0805, 1.0603, 1.5685, 1.1308, 1.5046, 1.6948, 1.4962], + device='cuda:3'), covar=tensor([0.0882, 0.1169, 0.1291, 0.0941, 0.1069, 0.0845, 0.0952, 0.0722], + device='cuda:3'), in_proj_covar=tensor([0.0231, 0.0253, 0.0245, 0.0280, 0.0274, 0.0234, 0.0237, 0.0223], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:40:35,456 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 05:40:46,345 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 05:41:20,812 INFO [train.py:903] (3/4) Epoch 5, batch 4300, loss[loss=0.2385, simple_loss=0.3007, pruned_loss=0.08819, over 16083.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3431, pruned_loss=0.1118, over 3815727.31 frames. ], batch size: 35, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:41:42,005 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7830, 1.0800, 1.1586, 2.0003, 1.7065, 1.6374, 1.9078, 1.7780], + device='cuda:3'), covar=tensor([0.0735, 0.1257, 0.1163, 0.0903, 0.0957, 0.0873, 0.0895, 0.0726], + device='cuda:3'), in_proj_covar=tensor([0.0229, 0.0251, 0.0245, 0.0277, 0.0271, 0.0232, 0.0233, 0.0221], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 05:41:51,068 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:41:51,770 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 7.144e+02 8.425e+02 1.091e+03 2.021e+03, threshold=1.685e+03, percent-clipped=5.0 +2023-04-01 05:42:13,884 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 05:42:18,615 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:21,709 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:22,335 INFO [train.py:903] (3/4) Epoch 5, batch 4350, loss[loss=0.333, simple_loss=0.3758, pruned_loss=0.1452, over 19643.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3442, pruned_loss=0.1128, over 3812333.94 frames. ], batch size: 58, lr: 1.60e-02, grad_scale: 4.0 +2023-04-01 05:42:28,428 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:45,966 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:42:57,836 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:22,084 INFO [train.py:903] (3/4) Epoch 5, batch 4400, loss[loss=0.3596, simple_loss=0.3922, pruned_loss=0.1635, over 13689.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3455, pruned_loss=0.1137, over 3798357.01 frames. ], batch size: 136, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:43:33,985 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:43:44,926 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 05:43:53,319 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.183e+02 7.191e+02 8.879e+02 1.082e+03 1.961e+03, threshold=1.776e+03, percent-clipped=1.0 +2023-04-01 05:43:55,412 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 05:44:00,347 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8074, 1.9607, 2.3309, 2.2351, 3.0003, 3.5272, 3.6316, 3.7556], + device='cuda:3'), covar=tensor([0.1174, 0.2116, 0.2056, 0.1370, 0.0685, 0.0210, 0.0151, 0.0119], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0279, 0.0315, 0.0251, 0.0198, 0.0119, 0.0203, 0.0138], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:44:02,740 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31744.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:44:20,764 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2832, 1.3002, 1.5413, 1.2425, 2.7211, 3.6043, 3.5963, 3.9144], + device='cuda:3'), covar=tensor([0.1364, 0.2870, 0.2838, 0.1821, 0.0451, 0.0109, 0.0179, 0.0103], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0282, 0.0318, 0.0253, 0.0200, 0.0120, 0.0204, 0.0139], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:44:22,687 INFO [train.py:903] (3/4) Epoch 5, batch 4450, loss[loss=0.222, simple_loss=0.2991, pruned_loss=0.07248, over 19845.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3442, pruned_loss=0.1128, over 3807041.93 frames. ], batch size: 52, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:44:30,939 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31769.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:44:32,881 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:16,703 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:45:23,179 INFO [train.py:903] (3/4) Epoch 5, batch 4500, loss[loss=0.2583, simple_loss=0.332, pruned_loss=0.09228, over 19540.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3445, pruned_loss=0.1128, over 3815578.00 frames. ], batch size: 54, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:45:53,689 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.229e+02 6.666e+02 8.776e+02 1.149e+03 2.550e+03, threshold=1.755e+03, percent-clipped=7.0 +2023-04-01 05:45:54,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9063, 2.5808, 1.7052, 1.8010, 1.7668, 1.8164, 0.5694, 2.0309], + device='cuda:3'), covar=tensor([0.0375, 0.0372, 0.0451, 0.0620, 0.0703, 0.0613, 0.0733, 0.0583], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0280, 0.0279, 0.0301, 0.0374, 0.0293, 0.0273, 0.0289], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:45:58,554 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:46:12,840 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31853.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 05:46:24,486 INFO [train.py:903] (3/4) Epoch 5, batch 4550, loss[loss=0.2287, simple_loss=0.2932, pruned_loss=0.08203, over 19394.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3438, pruned_loss=0.112, over 3818045.65 frames. ], batch size: 48, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:46:30,251 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 05:46:51,959 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 05:46:52,276 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:46:54,762 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1002, 1.9976, 1.9209, 3.2164, 2.1082, 3.0780, 3.0184, 1.8070], + device='cuda:3'), covar=tensor([0.2332, 0.1997, 0.0933, 0.1166, 0.2303, 0.0744, 0.1596, 0.1683], + device='cuda:3'), in_proj_covar=tensor([0.0626, 0.0616, 0.0555, 0.0771, 0.0661, 0.0531, 0.0682, 0.0585], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 05:47:06,472 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31896.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:25,656 INFO [train.py:903] (3/4) Epoch 5, batch 4600, loss[loss=0.3564, simple_loss=0.394, pruned_loss=0.1594, over 13477.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3432, pruned_loss=0.1116, over 3807636.31 frames. ], batch size: 135, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:47:50,399 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31933.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:47:55,744 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 6.775e+02 7.918e+02 9.900e+02 3.222e+03, threshold=1.584e+03, percent-clipped=3.0 +2023-04-01 05:48:25,675 INFO [train.py:903] (3/4) Epoch 5, batch 4650, loss[loss=0.4524, simple_loss=0.4554, pruned_loss=0.2247, over 13395.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3436, pruned_loss=0.112, over 3806152.97 frames. ], batch size: 136, lr: 1.60e-02, grad_scale: 8.0 +2023-04-01 05:48:32,757 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31968.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:48:32,827 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31968.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:48:41,063 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 05:48:43,626 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5111, 1.5196, 1.6368, 2.3504, 1.9173, 2.1832, 2.6609, 2.1566], + device='cuda:3'), covar=tensor([0.0717, 0.1303, 0.1206, 0.1122, 0.1068, 0.0874, 0.0920, 0.0777], + device='cuda:3'), in_proj_covar=tensor([0.0231, 0.0257, 0.0248, 0.0282, 0.0273, 0.0235, 0.0236, 0.0226], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 05:48:52,753 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 05:49:00,905 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0386, 2.0890, 2.1013, 2.9941, 2.4407, 2.8065, 2.6612, 2.0637], + device='cuda:3'), covar=tensor([0.1828, 0.1320, 0.0716, 0.0747, 0.1326, 0.0468, 0.1151, 0.1169], + device='cuda:3'), in_proj_covar=tensor([0.0626, 0.0613, 0.0552, 0.0768, 0.0656, 0.0529, 0.0679, 0.0581], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 05:49:25,341 INFO [train.py:903] (3/4) Epoch 5, batch 4700, loss[loss=0.3595, simple_loss=0.3989, pruned_loss=0.16, over 13045.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.343, pruned_loss=0.1119, over 3804069.84 frames. ], batch size: 136, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:49:37,012 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-01 05:49:48,442 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 05:49:56,507 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.386e+02 6.540e+02 8.637e+02 1.061e+03 2.519e+03, threshold=1.727e+03, percent-clipped=5.0 +2023-04-01 05:50:27,325 INFO [train.py:903] (3/4) Epoch 5, batch 4750, loss[loss=0.2739, simple_loss=0.3207, pruned_loss=0.1135, over 19320.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3413, pruned_loss=0.111, over 3822272.35 frames. ], batch size: 44, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:50:32,837 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:50:34,227 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8655, 1.3152, 0.9981, 0.9865, 1.2162, 0.8529, 0.7240, 1.2640], + device='cuda:3'), covar=tensor([0.0522, 0.0565, 0.0923, 0.0432, 0.0373, 0.1029, 0.0537, 0.0372], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0264, 0.0312, 0.0234, 0.0215, 0.0309, 0.0281, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 05:51:28,013 INFO [train.py:903] (3/4) Epoch 5, batch 4800, loss[loss=0.2692, simple_loss=0.3348, pruned_loss=0.1018, over 19614.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3431, pruned_loss=0.1115, over 3834890.28 frames. ], batch size: 61, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:51:57,642 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+02 7.051e+02 8.763e+02 1.042e+03 3.094e+03, threshold=1.753e+03, percent-clipped=4.0 +2023-04-01 05:52:01,617 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1541, 1.3733, 1.6837, 1.2695, 2.8626, 3.5162, 3.3997, 3.7312], + device='cuda:3'), covar=tensor([0.1594, 0.2798, 0.2885, 0.1958, 0.0442, 0.0248, 0.0200, 0.0119], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0280, 0.0317, 0.0251, 0.0197, 0.0117, 0.0204, 0.0138], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:52:04,804 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32142.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:14,250 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:27,937 INFO [train.py:903] (3/4) Epoch 5, batch 4850, loss[loss=0.2566, simple_loss=0.3364, pruned_loss=0.08841, over 19645.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3427, pruned_loss=0.1112, over 3832797.83 frames. ], batch size: 58, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:52:34,183 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32167.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:51,467 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:52:54,109 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 05:52:57,426 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:53:12,908 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 05:53:17,713 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 05:53:18,687 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 05:53:21,340 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8067, 4.1762, 4.4732, 4.4216, 1.7776, 4.0467, 3.6709, 4.0561], + device='cuda:3'), covar=tensor([0.0845, 0.0722, 0.0472, 0.0399, 0.3897, 0.0436, 0.0498, 0.0966], + device='cuda:3'), in_proj_covar=tensor([0.0509, 0.0454, 0.0591, 0.0499, 0.0578, 0.0362, 0.0392, 0.0558], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 05:53:26,880 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 05:53:27,951 INFO [train.py:903] (3/4) Epoch 5, batch 4900, loss[loss=0.357, simple_loss=0.3963, pruned_loss=0.1588, over 13766.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.343, pruned_loss=0.1114, over 3816532.32 frames. ], batch size: 136, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:53:45,222 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32224.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:53:48,942 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 05:53:59,194 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.307e+02 6.585e+02 7.912e+02 1.039e+03 2.328e+03, threshold=1.582e+03, percent-clipped=1.0 +2023-04-01 05:54:02,583 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32240.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:13,283 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32249.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 05:54:16,862 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-01 05:54:29,206 INFO [train.py:903] (3/4) Epoch 5, batch 4950, loss[loss=0.2309, simple_loss=0.3016, pruned_loss=0.08012, over 19479.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3437, pruned_loss=0.1119, over 3795721.41 frames. ], batch size: 49, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:54:33,932 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:48,159 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32277.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:54:49,209 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 05:55:12,820 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 05:55:15,577 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:30,487 INFO [train.py:903] (3/4) Epoch 5, batch 5000, loss[loss=0.3172, simple_loss=0.3664, pruned_loss=0.1339, over 19682.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3441, pruned_loss=0.1126, over 3795590.68 frames. ], batch size: 59, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:55:30,633 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:55:40,423 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 05:55:50,530 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 05:55:58,320 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+02 7.030e+02 8.789e+02 1.165e+03 2.289e+03, threshold=1.758e+03, percent-clipped=7.0 +2023-04-01 05:56:21,778 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:56:29,553 INFO [train.py:903] (3/4) Epoch 5, batch 5050, loss[loss=0.2747, simple_loss=0.3452, pruned_loss=0.1021, over 18208.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3451, pruned_loss=0.1131, over 3791013.39 frames. ], batch size: 83, lr: 1.59e-02, grad_scale: 8.0 +2023-04-01 05:57:05,113 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 05:57:07,770 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:57:30,263 INFO [train.py:903] (3/4) Epoch 5, batch 5100, loss[loss=0.3014, simple_loss=0.3636, pruned_loss=0.1196, over 19663.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3444, pruned_loss=0.1127, over 3807483.05 frames. ], batch size: 60, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:57:40,583 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 05:57:44,922 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 05:57:51,244 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 05:57:51,520 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:01,497 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4787, 1.1581, 1.5895, 1.2151, 2.7026, 3.6269, 3.5213, 3.8646], + device='cuda:3'), covar=tensor([0.1336, 0.3049, 0.2890, 0.1905, 0.0443, 0.0131, 0.0191, 0.0111], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0279, 0.0316, 0.0250, 0.0195, 0.0115, 0.0204, 0.0137], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 05:58:02,218 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 6.879e+02 8.267e+02 1.044e+03 2.791e+03, threshold=1.653e+03, percent-clipped=3.0 +2023-04-01 05:58:02,655 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32437.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:20,131 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9211, 1.6460, 1.6372, 2.2430, 1.7143, 2.2198, 2.1562, 2.0886], + device='cuda:3'), covar=tensor([0.0769, 0.0944, 0.0994, 0.0878, 0.0945, 0.0721, 0.0884, 0.0617], + device='cuda:3'), in_proj_covar=tensor([0.0232, 0.0248, 0.0244, 0.0276, 0.0267, 0.0232, 0.0230, 0.0223], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 05:58:32,011 INFO [train.py:903] (3/4) Epoch 5, batch 5150, loss[loss=0.3529, simple_loss=0.3955, pruned_loss=0.1551, over 19530.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3431, pruned_loss=0.1117, over 3808632.90 frames. ], batch size: 54, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:58:32,383 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32462.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:58:45,277 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 05:59:19,201 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 05:59:33,452 INFO [train.py:903] (3/4) Epoch 5, batch 5200, loss[loss=0.2672, simple_loss=0.3378, pruned_loss=0.09831, over 19777.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.342, pruned_loss=0.1108, over 3803868.81 frames. ], batch size: 56, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 05:59:43,942 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 05:59:45,955 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 06:00:02,762 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 6.175e+02 7.903e+02 1.065e+03 1.799e+03, threshold=1.581e+03, percent-clipped=1.0 +2023-04-01 06:00:15,194 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:26,342 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:00:29,355 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 06:00:32,702 INFO [train.py:903] (3/4) Epoch 5, batch 5250, loss[loss=0.3066, simple_loss=0.3674, pruned_loss=0.1229, over 18104.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3426, pruned_loss=0.1109, over 3815265.13 frames. ], batch size: 83, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:00:55,249 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,207 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:01:32,915 INFO [train.py:903] (3/4) Epoch 5, batch 5300, loss[loss=0.3231, simple_loss=0.3697, pruned_loss=0.1383, over 19679.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3416, pruned_loss=0.1098, over 3827899.10 frames. ], batch size: 53, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:01:41,079 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1765, 1.2071, 1.8402, 1.4263, 2.5847, 2.1618, 2.8314, 1.0994], + device='cuda:3'), covar=tensor([0.1948, 0.3349, 0.1667, 0.1584, 0.1268, 0.1609, 0.1336, 0.2968], + device='cuda:3'), in_proj_covar=tensor([0.0446, 0.0507, 0.0480, 0.0407, 0.0555, 0.0446, 0.0625, 0.0442], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:01:51,391 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 06:02:04,555 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32636.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:05,308 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 6.583e+02 8.041e+02 1.027e+03 2.106e+03, threshold=1.608e+03, percent-clipped=4.0 +2023-04-01 06:02:17,829 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32648.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:02:34,326 INFO [train.py:903] (3/4) Epoch 5, batch 5350, loss[loss=0.2798, simple_loss=0.3307, pruned_loss=0.1144, over 19714.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3427, pruned_loss=0.1107, over 3807682.11 frames. ], batch size: 51, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:02:36,861 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2566, 1.2689, 1.8119, 1.4978, 2.5149, 2.2784, 2.6489, 0.8981], + device='cuda:3'), covar=tensor([0.1624, 0.2794, 0.1431, 0.1327, 0.1084, 0.1251, 0.1283, 0.2655], + device='cuda:3'), in_proj_covar=tensor([0.0441, 0.0502, 0.0475, 0.0403, 0.0549, 0.0439, 0.0621, 0.0437], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:02:44,362 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5029, 1.1046, 1.3157, 1.3424, 2.1128, 0.9949, 1.8936, 2.1887], + device='cuda:3'), covar=tensor([0.0558, 0.2420, 0.2247, 0.1348, 0.0799, 0.1758, 0.0880, 0.0564], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0314, 0.0316, 0.0286, 0.0306, 0.0316, 0.0289, 0.0306], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:02:48,913 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:00,322 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32683.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:06,684 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 06:03:31,063 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:03:35,126 INFO [train.py:903] (3/4) Epoch 5, batch 5400, loss[loss=0.2889, simple_loss=0.3477, pruned_loss=0.1151, over 19582.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3423, pruned_loss=0.1107, over 3814236.86 frames. ], batch size: 52, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:03:48,191 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.79 vs. limit=5.0 +2023-04-01 06:04:03,210 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.192e+02 6.673e+02 8.431e+02 1.064e+03 2.658e+03, threshold=1.686e+03, percent-clipped=8.0 +2023-04-01 06:04:21,185 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9951, 2.0716, 1.9476, 3.0134, 1.9369, 2.8927, 2.6766, 1.9070], + device='cuda:3'), covar=tensor([0.2070, 0.1619, 0.0839, 0.0929, 0.1997, 0.0636, 0.1482, 0.1403], + device='cuda:3'), in_proj_covar=tensor([0.0628, 0.0621, 0.0554, 0.0777, 0.0659, 0.0541, 0.0678, 0.0586], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:04:34,474 INFO [train.py:903] (3/4) Epoch 5, batch 5450, loss[loss=0.2614, simple_loss=0.33, pruned_loss=0.09641, over 19799.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3427, pruned_loss=0.1106, over 3811903.27 frames. ], batch size: 56, lr: 1.58e-02, grad_scale: 8.0 +2023-04-01 06:04:47,441 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.58 vs. limit=5.0 +2023-04-01 06:05:26,452 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.58 vs. limit=5.0 +2023-04-01 06:05:34,680 INFO [train.py:903] (3/4) Epoch 5, batch 5500, loss[loss=0.353, simple_loss=0.4024, pruned_loss=0.1518, over 19734.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3433, pruned_loss=0.111, over 3806154.06 frames. ], batch size: 63, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:05:56,705 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 06:06:05,439 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+02 6.283e+02 7.782e+02 1.000e+03 2.107e+03, threshold=1.556e+03, percent-clipped=4.0 +2023-04-01 06:06:32,678 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7824, 1.7873, 1.7762, 2.8104, 1.7828, 2.6861, 2.4431, 1.7364], + device='cuda:3'), covar=tensor([0.2023, 0.1701, 0.0898, 0.0898, 0.1942, 0.0623, 0.1610, 0.1574], + device='cuda:3'), in_proj_covar=tensor([0.0629, 0.0621, 0.0555, 0.0781, 0.0667, 0.0544, 0.0681, 0.0586], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:06:34,472 INFO [train.py:903] (3/4) Epoch 5, batch 5550, loss[loss=0.3573, simple_loss=0.3925, pruned_loss=0.1611, over 19314.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3441, pruned_loss=0.1117, over 3806834.54 frames. ], batch size: 66, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:06:40,852 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 06:07:08,641 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2368, 1.3926, 1.9168, 1.4385, 2.5519, 2.1653, 2.7274, 0.9945], + device='cuda:3'), covar=tensor([0.1758, 0.2931, 0.1446, 0.1433, 0.1129, 0.1406, 0.1131, 0.2815], + device='cuda:3'), in_proj_covar=tensor([0.0446, 0.0505, 0.0480, 0.0407, 0.0553, 0.0445, 0.0628, 0.0445], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:07:27,160 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4766, 3.9197, 4.0649, 4.0644, 1.4234, 3.7240, 3.3273, 3.6689], + device='cuda:3'), covar=tensor([0.0975, 0.0613, 0.0579, 0.0470, 0.4152, 0.0431, 0.0574, 0.1105], + device='cuda:3'), in_proj_covar=tensor([0.0511, 0.0449, 0.0600, 0.0496, 0.0575, 0.0365, 0.0388, 0.0564], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 06:07:29,977 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 06:07:36,761 INFO [train.py:903] (3/4) Epoch 5, batch 5600, loss[loss=0.3085, simple_loss=0.361, pruned_loss=0.1279, over 19511.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3435, pruned_loss=0.111, over 3815734.10 frames. ], batch size: 64, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:08:06,606 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 7.188e+02 9.159e+02 1.163e+03 2.158e+03, threshold=1.832e+03, percent-clipped=9.0 +2023-04-01 06:08:31,824 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5915, 1.1857, 1.4699, 1.8993, 1.6205, 2.0270, 2.2576, 2.0529], + device='cuda:3'), covar=tensor([0.0883, 0.1093, 0.1048, 0.0945, 0.0892, 0.0701, 0.0769, 0.0571], + device='cuda:3'), in_proj_covar=tensor([0.0232, 0.0249, 0.0245, 0.0274, 0.0267, 0.0232, 0.0232, 0.0223], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 06:08:38,297 INFO [train.py:903] (3/4) Epoch 5, batch 5650, loss[loss=0.2826, simple_loss=0.3475, pruned_loss=0.1089, over 19319.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3423, pruned_loss=0.1102, over 3824000.56 frames. ], batch size: 66, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:09:04,640 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:09:24,864 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 06:09:38,280 INFO [train.py:903] (3/4) Epoch 5, batch 5700, loss[loss=0.2619, simple_loss=0.3345, pruned_loss=0.09466, over 19743.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.343, pruned_loss=0.1111, over 3832158.72 frames. ], batch size: 63, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:10:09,259 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.491e+02 7.117e+02 8.783e+02 1.086e+03 2.576e+03, threshold=1.757e+03, percent-clipped=4.0 +2023-04-01 06:10:21,039 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2323, 1.1181, 1.3390, 1.3050, 1.8635, 1.8564, 1.8830, 0.4375], + device='cuda:3'), covar=tensor([0.1706, 0.2894, 0.1586, 0.1423, 0.1004, 0.1445, 0.1018, 0.2845], + device='cuda:3'), in_proj_covar=tensor([0.0440, 0.0495, 0.0475, 0.0401, 0.0544, 0.0440, 0.0614, 0.0436], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:10:38,545 INFO [train.py:903] (3/4) Epoch 5, batch 5750, loss[loss=0.2389, simple_loss=0.3095, pruned_loss=0.08414, over 19633.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.342, pruned_loss=0.1106, over 3847257.03 frames. ], batch size: 50, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:10:39,675 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 06:10:47,555 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 06:10:52,568 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 06:11:09,100 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33087.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:11:18,384 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4894, 3.8457, 4.0517, 3.9770, 1.6779, 3.7382, 3.3993, 3.6938], + device='cuda:3'), covar=tensor([0.0840, 0.0572, 0.0506, 0.0498, 0.3438, 0.0380, 0.0445, 0.1013], + device='cuda:3'), in_proj_covar=tensor([0.0514, 0.0448, 0.0602, 0.0500, 0.0577, 0.0366, 0.0388, 0.0566], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 06:11:30,086 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 06:11:40,193 INFO [train.py:903] (3/4) Epoch 5, batch 5800, loss[loss=0.2709, simple_loss=0.3219, pruned_loss=0.1099, over 19743.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3422, pruned_loss=0.1104, over 3840497.09 frames. ], batch size: 51, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:08,910 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+02 7.070e+02 8.520e+02 1.129e+03 2.712e+03, threshold=1.704e+03, percent-clipped=8.0 +2023-04-01 06:12:40,681 INFO [train.py:903] (3/4) Epoch 5, batch 5850, loss[loss=0.2981, simple_loss=0.3627, pruned_loss=0.1168, over 19478.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.344, pruned_loss=0.1122, over 3833954.37 frames. ], batch size: 64, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:12:53,496 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:13:13,036 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1894, 1.3142, 1.1254, 1.0120, 1.0114, 1.0751, 0.0155, 0.3560], + device='cuda:3'), covar=tensor([0.0315, 0.0303, 0.0175, 0.0222, 0.0587, 0.0245, 0.0489, 0.0463], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0283, 0.0282, 0.0298, 0.0370, 0.0292, 0.0274, 0.0291], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 06:13:40,940 INFO [train.py:903] (3/4) Epoch 5, batch 5900, loss[loss=0.2247, simple_loss=0.2899, pruned_loss=0.07978, over 19410.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3444, pruned_loss=0.1122, over 3824981.76 frames. ], batch size: 48, lr: 1.57e-02, grad_scale: 8.0 +2023-04-01 06:13:43,338 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 06:14:04,514 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 06:14:11,990 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.345e+02 7.066e+02 8.762e+02 1.130e+03 2.300e+03, threshold=1.752e+03, percent-clipped=6.0 +2023-04-01 06:14:18,954 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3650, 2.0966, 1.7110, 1.3382, 2.0104, 1.0202, 1.1644, 1.6819], + device='cuda:3'), covar=tensor([0.0577, 0.0453, 0.0672, 0.0536, 0.0307, 0.0981, 0.0585, 0.0296], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0263, 0.0307, 0.0236, 0.0216, 0.0305, 0.0283, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:14:41,682 INFO [train.py:903] (3/4) Epoch 5, batch 5950, loss[loss=0.2623, simple_loss=0.3365, pruned_loss=0.09404, over 19763.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3428, pruned_loss=0.1109, over 3828682.47 frames. ], batch size: 63, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:14:44,252 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 06:15:43,948 INFO [train.py:903] (3/4) Epoch 5, batch 6000, loss[loss=0.2644, simple_loss=0.3285, pruned_loss=0.1002, over 19691.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3415, pruned_loss=0.1104, over 3830889.25 frames. ], batch size: 53, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:15:43,948 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 06:15:56,873 INFO [train.py:937] (3/4) Epoch 5, validation: loss=0.203, simple_loss=0.3017, pruned_loss=0.05213, over 944034.00 frames. +2023-04-01 06:15:56,874 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 06:16:05,083 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2711, 1.4154, 2.0617, 1.5852, 3.2091, 2.5296, 3.5702, 1.6227], + device='cuda:3'), covar=tensor([0.1981, 0.3321, 0.1801, 0.1421, 0.1220, 0.1538, 0.1387, 0.2716], + device='cuda:3'), in_proj_covar=tensor([0.0448, 0.0507, 0.0484, 0.0410, 0.0560, 0.0447, 0.0623, 0.0442], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:16:18,146 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:16:28,853 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.050e+02 6.893e+02 8.503e+02 1.056e+03 1.945e+03, threshold=1.701e+03, percent-clipped=4.0 +2023-04-01 06:16:37,619 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 06:16:59,411 INFO [train.py:903] (3/4) Epoch 5, batch 6050, loss[loss=0.261, simple_loss=0.3301, pruned_loss=0.09597, over 19662.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3413, pruned_loss=0.1105, over 3822726.80 frames. ], batch size: 58, lr: 1.56e-02, grad_scale: 16.0 +2023-04-01 06:18:00,558 INFO [train.py:903] (3/4) Epoch 5, batch 6100, loss[loss=0.3357, simple_loss=0.3825, pruned_loss=0.1444, over 19735.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3411, pruned_loss=0.1108, over 3818443.34 frames. ], batch size: 63, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:18:23,051 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33431.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:18:32,605 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.262e+02 7.464e+02 9.853e+02 2.581e+03, threshold=1.493e+03, percent-clipped=2.0 +2023-04-01 06:18:38,640 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 06:18:39,234 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:19:00,962 INFO [train.py:903] (3/4) Epoch 5, batch 6150, loss[loss=0.2646, simple_loss=0.3272, pruned_loss=0.101, over 19598.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3422, pruned_loss=0.1111, over 3825662.66 frames. ], batch size: 52, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:19:29,652 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 06:19:35,840 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:01,185 INFO [train.py:903] (3/4) Epoch 5, batch 6200, loss[loss=0.2861, simple_loss=0.3469, pruned_loss=0.1127, over 19680.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3413, pruned_loss=0.1108, over 3814995.11 frames. ], batch size: 53, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:20:08,934 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:20:34,185 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.213e+02 6.728e+02 8.677e+02 1.165e+03 2.777e+03, threshold=1.735e+03, percent-clipped=13.0 +2023-04-01 06:20:43,514 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:21:03,397 INFO [train.py:903] (3/4) Epoch 5, batch 6250, loss[loss=0.228, simple_loss=0.2961, pruned_loss=0.07992, over 19771.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3394, pruned_loss=0.1094, over 3810674.31 frames. ], batch size: 48, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:21:31,254 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 06:22:03,934 INFO [train.py:903] (3/4) Epoch 5, batch 6300, loss[loss=0.2353, simple_loss=0.305, pruned_loss=0.08279, over 19494.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3402, pruned_loss=0.11, over 3829945.83 frames. ], batch size: 49, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:22:28,415 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:22:35,562 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 6.435e+02 8.030e+02 9.840e+02 2.632e+03, threshold=1.606e+03, percent-clipped=3.0 +2023-04-01 06:22:47,560 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.62 vs. limit=5.0 +2023-04-01 06:23:04,055 INFO [train.py:903] (3/4) Epoch 5, batch 6350, loss[loss=0.263, simple_loss=0.3168, pruned_loss=0.1046, over 19029.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3412, pruned_loss=0.1104, over 3831558.32 frames. ], batch size: 42, lr: 1.56e-02, grad_scale: 8.0 +2023-04-01 06:23:45,912 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:23:50,609 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:05,038 INFO [train.py:903] (3/4) Epoch 5, batch 6400, loss[loss=0.3198, simple_loss=0.3936, pruned_loss=0.123, over 19691.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3423, pruned_loss=0.1108, over 3822862.96 frames. ], batch size: 59, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:24:20,335 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33724.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:24:37,367 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.704e+02 6.874e+02 8.420e+02 1.031e+03 3.616e+03, threshold=1.684e+03, percent-clipped=3.0 +2023-04-01 06:25:00,331 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3290, 2.3180, 1.8483, 1.2337, 2.3248, 0.9845, 1.0253, 1.6387], + device='cuda:3'), covar=tensor([0.0759, 0.0512, 0.0640, 0.0678, 0.0370, 0.0992, 0.0689, 0.0414], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0269, 0.0312, 0.0237, 0.0225, 0.0304, 0.0284, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:25:05,847 INFO [train.py:903] (3/4) Epoch 5, batch 6450, loss[loss=0.3407, simple_loss=0.3804, pruned_loss=0.1505, over 19269.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3409, pruned_loss=0.1098, over 3837562.21 frames. ], batch size: 66, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:25:48,208 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 06:25:54,908 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:25:59,532 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-04-01 06:26:06,361 INFO [train.py:903] (3/4) Epoch 5, batch 6500, loss[loss=0.2379, simple_loss=0.3191, pruned_loss=0.07837, over 19623.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3422, pruned_loss=0.1106, over 3826899.43 frames. ], batch size: 57, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:26:12,177 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 06:26:24,764 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:32,488 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33834.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:26:36,708 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.680e+02 8.171e+02 1.132e+03 2.519e+03, threshold=1.634e+03, percent-clipped=6.0 +2023-04-01 06:27:07,200 INFO [train.py:903] (3/4) Epoch 5, batch 6550, loss[loss=0.2682, simple_loss=0.3423, pruned_loss=0.09703, over 19658.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3417, pruned_loss=0.1102, over 3817206.34 frames. ], batch size: 59, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:27:38,517 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33888.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:28:06,990 INFO [train.py:903] (3/4) Epoch 5, batch 6600, loss[loss=0.2672, simple_loss=0.3355, pruned_loss=0.09948, over 19780.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3418, pruned_loss=0.11, over 3803857.44 frames. ], batch size: 56, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:28:08,467 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33913.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:28:40,230 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.407e+02 7.502e+02 9.137e+02 1.060e+03 2.817e+03, threshold=1.827e+03, percent-clipped=6.0 +2023-04-01 06:28:52,899 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33949.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:01,860 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:29:09,038 INFO [train.py:903] (3/4) Epoch 5, batch 6650, loss[loss=0.2906, simple_loss=0.3667, pruned_loss=0.1073, over 19739.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3399, pruned_loss=0.1088, over 3817839.66 frames. ], batch size: 63, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:30:10,061 INFO [train.py:903] (3/4) Epoch 5, batch 6700, loss[loss=0.2281, simple_loss=0.2927, pruned_loss=0.08178, over 19797.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3417, pruned_loss=0.1106, over 3814760.08 frames. ], batch size: 48, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:30:25,148 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 06:30:40,326 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 7.257e+02 9.131e+02 1.100e+03 2.314e+03, threshold=1.826e+03, percent-clipped=7.0 +2023-04-01 06:30:40,478 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:30:42,851 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.49 vs. limit=5.0 +2023-04-01 06:31:06,262 INFO [train.py:903] (3/4) Epoch 5, batch 6750, loss[loss=0.3109, simple_loss=0.3668, pruned_loss=0.1275, over 19533.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3414, pruned_loss=0.1105, over 3824222.64 frames. ], batch size: 56, lr: 1.55e-02, grad_scale: 4.0 +2023-04-01 06:31:06,532 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:31:22,477 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4369, 2.3808, 1.6823, 1.5563, 2.3081, 1.0878, 1.1680, 1.6649], + device='cuda:3'), covar=tensor([0.0709, 0.0405, 0.0735, 0.0531, 0.0303, 0.0941, 0.0628, 0.0430], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0269, 0.0312, 0.0240, 0.0223, 0.0304, 0.0287, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:32:02,677 INFO [train.py:903] (3/4) Epoch 5, batch 6800, loss[loss=0.3152, simple_loss=0.3632, pruned_loss=0.1335, over 19614.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3409, pruned_loss=0.1103, over 3817476.03 frames. ], batch size: 50, lr: 1.55e-02, grad_scale: 8.0 +2023-04-01 06:32:30,647 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.271e+02 6.317e+02 7.808e+02 9.230e+02 1.582e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-04-01 06:32:47,562 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 06:32:48,597 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 06:32:50,734 INFO [train.py:903] (3/4) Epoch 6, batch 0, loss[loss=0.3268, simple_loss=0.3786, pruned_loss=0.1375, over 19659.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3786, pruned_loss=0.1375, over 19659.00 frames. ], batch size: 58, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:32:50,734 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 06:33:02,090 INFO [train.py:937] (3/4) Epoch 6, validation: loss=0.2022, simple_loss=0.3015, pruned_loss=0.05149, over 944034.00 frames. +2023-04-01 06:33:02,091 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 06:33:15,298 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 06:33:20,354 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:33:30,313 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:03,531 INFO [train.py:903] (3/4) Epoch 6, batch 50, loss[loss=0.268, simple_loss=0.3273, pruned_loss=0.1044, over 19458.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3401, pruned_loss=0.1077, over 875423.28 frames. ], batch size: 49, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:34:15,334 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 06:34:22,176 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34205.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:34:29,751 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 06:34:40,712 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 06:34:54,295 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34230.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:35:05,215 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.721e+02 5.756e+02 7.149e+02 1.025e+03 3.166e+03, threshold=1.430e+03, percent-clipped=7.0 +2023-04-01 06:35:06,288 INFO [train.py:903] (3/4) Epoch 6, batch 100, loss[loss=0.2804, simple_loss=0.3517, pruned_loss=0.1045, over 19666.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3358, pruned_loss=0.1047, over 1542797.87 frames. ], batch size: 58, lr: 1.44e-02, grad_scale: 8.0 +2023-04-01 06:35:18,583 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 06:35:20,113 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4823, 1.1202, 1.1822, 1.3395, 2.1405, 0.9924, 1.6863, 2.2018], + device='cuda:3'), covar=tensor([0.0561, 0.2340, 0.2403, 0.1305, 0.0727, 0.1916, 0.0989, 0.0504], + device='cuda:3'), in_proj_covar=tensor([0.0286, 0.0311, 0.0313, 0.0286, 0.0307, 0.0316, 0.0284, 0.0301], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:36:06,449 INFO [train.py:903] (3/4) Epoch 6, batch 150, loss[loss=0.2439, simple_loss=0.3195, pruned_loss=0.0842, over 19656.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.3377, pruned_loss=0.1062, over 2059303.60 frames. ], batch size: 55, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:36:19,472 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:37:08,895 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.164e+02 6.478e+02 8.283e+02 9.993e+02 1.951e+03, threshold=1.657e+03, percent-clipped=7.0 +2023-04-01 06:37:08,914 INFO [train.py:903] (3/4) Epoch 6, batch 200, loss[loss=0.3262, simple_loss=0.3925, pruned_loss=0.1299, over 19357.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3378, pruned_loss=0.1064, over 2444450.95 frames. ], batch size: 70, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:37:08,921 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 06:38:12,100 INFO [train.py:903] (3/4) Epoch 6, batch 250, loss[loss=0.2733, simple_loss=0.3451, pruned_loss=0.1008, over 19302.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3383, pruned_loss=0.107, over 2760346.10 frames. ], batch size: 66, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:38:33,065 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:37,984 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:38:44,900 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:08,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:39:14,113 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 6.949e+02 8.663e+02 1.115e+03 2.860e+03, threshold=1.733e+03, percent-clipped=3.0 +2023-04-01 06:39:14,131 INFO [train.py:903] (3/4) Epoch 6, batch 300, loss[loss=0.2286, simple_loss=0.2956, pruned_loss=0.08084, over 19362.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3379, pruned_loss=0.1065, over 3004723.70 frames. ], batch size: 47, lr: 1.44e-02, grad_scale: 4.0 +2023-04-01 06:40:17,202 INFO [train.py:903] (3/4) Epoch 6, batch 350, loss[loss=0.2715, simple_loss=0.345, pruned_loss=0.09906, over 19527.00 frames. ], tot_loss[loss=0.2745, simple_loss=0.3375, pruned_loss=0.1058, over 3191399.74 frames. ], batch size: 56, lr: 1.43e-02, grad_scale: 4.0 +2023-04-01 06:40:22,957 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 06:40:37,843 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:40:45,481 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 06:40:55,534 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:41:02,905 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 06:41:18,579 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+02 6.755e+02 8.258e+02 9.979e+02 1.871e+03, threshold=1.652e+03, percent-clipped=1.0 +2023-04-01 06:41:18,598 INFO [train.py:903] (3/4) Epoch 6, batch 400, loss[loss=0.2746, simple_loss=0.3405, pruned_loss=0.1043, over 19610.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3366, pruned_loss=0.1056, over 3324348.71 frames. ], batch size: 61, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:41:51,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9912, 1.4906, 1.5877, 2.1289, 1.9167, 1.7996, 1.7657, 1.8696], + device='cuda:3'), covar=tensor([0.0773, 0.1471, 0.1275, 0.0737, 0.1041, 0.0441, 0.0881, 0.0586], + device='cuda:3'), in_proj_covar=tensor([0.0249, 0.0362, 0.0287, 0.0235, 0.0301, 0.0245, 0.0272, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:42:20,648 INFO [train.py:903] (3/4) Epoch 6, batch 450, loss[loss=0.2485, simple_loss=0.3194, pruned_loss=0.08874, over 19716.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3388, pruned_loss=0.1072, over 3438131.61 frames. ], batch size: 51, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:42:49,006 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34611.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:42:54,486 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 06:42:55,454 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 06:43:01,473 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:23,826 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.432e+02 6.970e+02 8.269e+02 1.071e+03 2.551e+03, threshold=1.654e+03, percent-clipped=6.0 +2023-04-01 06:43:23,844 INFO [train.py:903] (3/4) Epoch 6, batch 500, loss[loss=0.3231, simple_loss=0.38, pruned_loss=0.1331, over 19793.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3383, pruned_loss=0.107, over 3532393.60 frames. ], batch size: 56, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:43:25,159 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34640.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:29,871 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:43:29,995 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0401, 3.6415, 1.9826, 2.1354, 3.2075, 1.6008, 1.2716, 1.9841], + device='cuda:3'), covar=tensor([0.0815, 0.0305, 0.0770, 0.0531, 0.0299, 0.0852, 0.0756, 0.0482], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0271, 0.0314, 0.0238, 0.0227, 0.0305, 0.0286, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:43:35,808 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9147, 4.3647, 4.6102, 4.4859, 1.6288, 4.1147, 3.7032, 4.1918], + device='cuda:3'), covar=tensor([0.0916, 0.0497, 0.0390, 0.0431, 0.4086, 0.0356, 0.0464, 0.0872], + device='cuda:3'), in_proj_covar=tensor([0.0532, 0.0464, 0.0610, 0.0510, 0.0598, 0.0381, 0.0390, 0.0576], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 06:44:03,447 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:27,125 INFO [train.py:903] (3/4) Epoch 6, batch 550, loss[loss=0.2121, simple_loss=0.2793, pruned_loss=0.07244, over 19765.00 frames. ], tot_loss[loss=0.275, simple_loss=0.3374, pruned_loss=0.1063, over 3589986.27 frames. ], batch size: 46, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:44:37,142 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:44:42,301 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-01 06:45:17,137 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34728.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:45:31,818 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.620e+02 6.191e+02 8.093e+02 9.820e+02 1.880e+03, threshold=1.619e+03, percent-clipped=2.0 +2023-04-01 06:45:31,837 INFO [train.py:903] (3/4) Epoch 6, batch 600, loss[loss=0.2865, simple_loss=0.343, pruned_loss=0.115, over 19776.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3373, pruned_loss=0.106, over 3646298.08 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:13,306 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 06:46:20,296 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34777.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:46:35,493 INFO [train.py:903] (3/4) Epoch 6, batch 650, loss[loss=0.228, simple_loss=0.2933, pruned_loss=0.08135, over 19613.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3376, pruned_loss=0.1066, over 3698366.41 frames. ], batch size: 50, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:46:51,809 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34802.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:47:38,641 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 6.280e+02 8.587e+02 1.153e+03 3.497e+03, threshold=1.717e+03, percent-clipped=9.0 +2023-04-01 06:47:38,661 INFO [train.py:903] (3/4) Epoch 6, batch 700, loss[loss=0.3804, simple_loss=0.4038, pruned_loss=0.1786, over 13354.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3376, pruned_loss=0.1066, over 3716810.34 frames. ], batch size: 136, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:47:51,755 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.7746, 5.1343, 2.5779, 4.5140, 1.1512, 4.8040, 4.9415, 5.1986], + device='cuda:3'), covar=tensor([0.0453, 0.0900, 0.2149, 0.0597, 0.4111, 0.0617, 0.0628, 0.0797], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0315, 0.0376, 0.0290, 0.0360, 0.0310, 0.0289, 0.0322], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 06:47:54,283 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3956, 2.9990, 2.0706, 2.0659, 2.2826, 2.3928, 0.7668, 2.1805], + device='cuda:3'), covar=tensor([0.0249, 0.0239, 0.0327, 0.0437, 0.0402, 0.0443, 0.0614, 0.0476], + device='cuda:3'), in_proj_covar=tensor([0.0284, 0.0288, 0.0292, 0.0306, 0.0374, 0.0295, 0.0286, 0.0296], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 06:48:27,772 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:48:43,534 INFO [train.py:903] (3/4) Epoch 6, batch 750, loss[loss=0.3174, simple_loss=0.371, pruned_loss=0.1319, over 19794.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3399, pruned_loss=0.1076, over 3750829.71 frames. ], batch size: 63, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:48:47,836 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9677, 1.9979, 1.9447, 2.8705, 1.9668, 2.6859, 2.5804, 1.8458], + device='cuda:3'), covar=tensor([0.2150, 0.1651, 0.0861, 0.0995, 0.1971, 0.0697, 0.1622, 0.1556], + device='cuda:3'), in_proj_covar=tensor([0.0652, 0.0638, 0.0570, 0.0800, 0.0681, 0.0557, 0.0701, 0.0605], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 06:49:00,092 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:49:45,088 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.784e+02 6.254e+02 7.861e+02 1.094e+03 2.828e+03, threshold=1.572e+03, percent-clipped=5.0 +2023-04-01 06:49:45,108 INFO [train.py:903] (3/4) Epoch 6, batch 800, loss[loss=0.3084, simple_loss=0.3674, pruned_loss=0.1247, over 19340.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3385, pruned_loss=0.1072, over 3767472.34 frames. ], batch size: 66, lr: 1.43e-02, grad_scale: 8.0 +2023-04-01 06:50:02,467 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 06:50:03,738 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:37,442 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2016, 1.3615, 1.3818, 1.7686, 2.7546, 1.0784, 2.0899, 3.0333], + device='cuda:3'), covar=tensor([0.0437, 0.2441, 0.2286, 0.1238, 0.0641, 0.2148, 0.1064, 0.0392], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0317, 0.0317, 0.0287, 0.0313, 0.0315, 0.0289, 0.0307], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:50:41,648 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34984.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:46,241 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34988.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:50:48,468 INFO [train.py:903] (3/4) Epoch 6, batch 850, loss[loss=0.3071, simple_loss=0.3694, pruned_loss=0.1224, over 19649.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.3382, pruned_loss=0.107, over 3775821.32 frames. ], batch size: 58, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:50:57,232 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 06:51:37,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2197, 2.8094, 2.1261, 2.1579, 2.1115, 2.4022, 0.5696, 1.9972], + device='cuda:3'), covar=tensor([0.0292, 0.0289, 0.0317, 0.0425, 0.0467, 0.0402, 0.0618, 0.0532], + device='cuda:3'), in_proj_covar=tensor([0.0288, 0.0289, 0.0289, 0.0308, 0.0376, 0.0295, 0.0285, 0.0298], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 06:51:42,524 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 06:51:49,537 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 6.223e+02 7.935e+02 9.772e+02 2.166e+03, threshold=1.587e+03, percent-clipped=2.0 +2023-04-01 06:51:49,557 INFO [train.py:903] (3/4) Epoch 6, batch 900, loss[loss=0.2873, simple_loss=0.3641, pruned_loss=0.1052, over 19310.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3378, pruned_loss=0.1067, over 3800593.25 frames. ], batch size: 66, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:52:28,250 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:52:30,300 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35072.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:52:51,734 INFO [train.py:903] (3/4) Epoch 6, batch 950, loss[loss=0.3078, simple_loss=0.363, pruned_loss=0.1263, over 19686.00 frames. ], tot_loss[loss=0.276, simple_loss=0.3382, pruned_loss=0.1069, over 3812557.40 frames. ], batch size: 59, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:52:57,573 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 06:53:04,407 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:09,177 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:18,122 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:53:37,153 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0178, 1.4386, 1.4741, 1.5107, 2.6819, 0.9981, 1.8563, 2.7871], + device='cuda:3'), covar=tensor([0.0340, 0.1742, 0.1748, 0.1115, 0.0505, 0.1866, 0.0954, 0.0314], + device='cuda:3'), in_proj_covar=tensor([0.0289, 0.0311, 0.0311, 0.0283, 0.0307, 0.0314, 0.0286, 0.0304], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 06:53:55,220 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 7.194e+02 8.589e+02 1.083e+03 2.096e+03, threshold=1.718e+03, percent-clipped=5.0 +2023-04-01 06:53:55,239 INFO [train.py:903] (3/4) Epoch 6, batch 1000, loss[loss=0.2737, simple_loss=0.3293, pruned_loss=0.109, over 19600.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3384, pruned_loss=0.1075, over 3799841.41 frames. ], batch size: 50, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:54:48,383 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 06:54:52,216 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35187.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 06:54:55,181 INFO [train.py:903] (3/4) Epoch 6, batch 1050, loss[loss=0.2506, simple_loss=0.3198, pruned_loss=0.0907, over 19670.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3385, pruned_loss=0.1077, over 3811402.15 frames. ], batch size: 53, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:55:30,981 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 06:55:57,171 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.102e+02 7.066e+02 8.619e+02 1.238e+03 3.302e+03, threshold=1.724e+03, percent-clipped=8.0 +2023-04-01 06:55:57,191 INFO [train.py:903] (3/4) Epoch 6, batch 1100, loss[loss=0.2732, simple_loss=0.3401, pruned_loss=0.1032, over 19409.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3395, pruned_loss=0.1085, over 3812466.36 frames. ], batch size: 70, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:56:59,591 INFO [train.py:903] (3/4) Epoch 6, batch 1150, loss[loss=0.2848, simple_loss=0.3529, pruned_loss=0.1084, over 19301.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3386, pruned_loss=0.1081, over 3805150.21 frames. ], batch size: 66, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:57:45,502 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:04,200 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.124e+02 7.469e+02 9.050e+02 1.884e+03, threshold=1.494e+03, percent-clipped=1.0 +2023-04-01 06:58:04,219 INFO [train.py:903] (3/4) Epoch 6, batch 1200, loss[loss=0.2611, simple_loss=0.3253, pruned_loss=0.09845, over 19847.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3389, pruned_loss=0.1084, over 3804440.39 frames. ], batch size: 52, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:58:17,345 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:22,860 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9239, 3.5308, 2.3629, 3.2695, 1.0032, 3.2220, 3.2437, 3.2960], + device='cuda:3'), covar=tensor([0.0777, 0.1087, 0.1908, 0.0726, 0.3519, 0.0915, 0.0782, 0.0940], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0318, 0.0380, 0.0294, 0.0357, 0.0313, 0.0294, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 06:58:23,077 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:27,684 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:32,232 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35363.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 06:58:36,592 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 06:58:55,126 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35380.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:58:59,609 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:05,735 INFO [train.py:903] (3/4) Epoch 6, batch 1250, loss[loss=0.3282, simple_loss=0.3754, pruned_loss=0.1405, over 19765.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3383, pruned_loss=0.1081, over 3821542.63 frames. ], batch size: 63, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 06:59:09,344 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 06:59:26,319 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-01 07:00:08,110 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+02 6.685e+02 8.523e+02 1.077e+03 2.432e+03, threshold=1.705e+03, percent-clipped=5.0 +2023-04-01 07:00:08,129 INFO [train.py:903] (3/4) Epoch 6, batch 1300, loss[loss=0.2525, simple_loss=0.331, pruned_loss=0.08701, over 19539.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3377, pruned_loss=0.1077, over 3815650.20 frames. ], batch size: 56, lr: 1.42e-02, grad_scale: 8.0 +2023-04-01 07:00:12,118 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35443.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:00:26,240 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35454.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:00:43,939 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35468.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:01:11,034 INFO [train.py:903] (3/4) Epoch 6, batch 1350, loss[loss=0.2755, simple_loss=0.3335, pruned_loss=0.1087, over 19746.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3375, pruned_loss=0.1074, over 3816817.85 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:01:26,078 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5559, 1.0798, 1.4250, 1.5718, 2.8829, 1.1193, 2.1312, 3.0979], + device='cuda:3'), covar=tensor([0.0475, 0.3122, 0.2770, 0.1720, 0.0867, 0.2499, 0.1273, 0.0554], + device='cuda:3'), in_proj_covar=tensor([0.0291, 0.0313, 0.0314, 0.0284, 0.0309, 0.0312, 0.0284, 0.0305], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:01:30,441 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9457, 2.1627, 2.2800, 2.2580, 1.0555, 2.0962, 1.9058, 2.0648], + device='cuda:3'), covar=tensor([0.0917, 0.1254, 0.0562, 0.0542, 0.2684, 0.0625, 0.0494, 0.0901], + device='cuda:3'), in_proj_covar=tensor([0.0532, 0.0463, 0.0618, 0.0507, 0.0592, 0.0378, 0.0396, 0.0578], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 07:01:30,555 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9970, 0.7932, 0.7288, 0.9539, 0.8166, 0.8967, 0.7020, 0.8853], + device='cuda:3'), covar=tensor([0.0635, 0.0909, 0.0994, 0.0556, 0.0731, 0.0372, 0.0810, 0.0491], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0359, 0.0285, 0.0233, 0.0300, 0.0239, 0.0267, 0.0229], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:02:13,028 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+02 6.783e+02 8.495e+02 1.071e+03 2.340e+03, threshold=1.699e+03, percent-clipped=3.0 +2023-04-01 07:02:13,047 INFO [train.py:903] (3/4) Epoch 6, batch 1400, loss[loss=0.2533, simple_loss=0.31, pruned_loss=0.09828, over 19763.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3376, pruned_loss=0.1077, over 3805915.30 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:02:22,780 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:02:47,194 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:03:13,362 INFO [train.py:903] (3/4) Epoch 6, batch 1450, loss[loss=0.2926, simple_loss=0.361, pruned_loss=0.1121, over 19777.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3385, pruned_loss=0.1082, over 3811814.91 frames. ], batch size: 56, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:03:13,402 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 07:04:15,923 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+02 6.609e+02 8.520e+02 1.101e+03 2.891e+03, threshold=1.704e+03, percent-clipped=3.0 +2023-04-01 07:04:15,942 INFO [train.py:903] (3/4) Epoch 6, batch 1500, loss[loss=0.3247, simple_loss=0.3715, pruned_loss=0.139, over 19757.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3383, pruned_loss=0.1078, over 3817796.33 frames. ], batch size: 63, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:04:24,661 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0492, 2.7039, 1.7109, 1.9005, 1.7399, 2.2820, 0.5529, 1.8352], + device='cuda:3'), covar=tensor([0.0281, 0.0321, 0.0349, 0.0501, 0.0562, 0.0469, 0.0659, 0.0592], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0296, 0.0292, 0.0312, 0.0381, 0.0299, 0.0286, 0.0299], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:04:43,320 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3343, 1.4251, 1.4007, 1.4368, 2.9179, 1.0231, 1.9714, 3.0008], + device='cuda:3'), covar=tensor([0.0374, 0.2239, 0.2301, 0.1479, 0.0568, 0.2109, 0.1156, 0.0407], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0313, 0.0316, 0.0285, 0.0310, 0.0311, 0.0286, 0.0304], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:05:17,134 INFO [train.py:903] (3/4) Epoch 6, batch 1550, loss[loss=0.3385, simple_loss=0.3742, pruned_loss=0.1514, over 19764.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3403, pruned_loss=0.1096, over 3802571.45 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:05:18,540 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0313, 1.8585, 1.4456, 1.1873, 1.7408, 1.0185, 0.9853, 1.5561], + device='cuda:3'), covar=tensor([0.0584, 0.0481, 0.0728, 0.0518, 0.0303, 0.0897, 0.0545, 0.0306], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0273, 0.0314, 0.0239, 0.0227, 0.0308, 0.0284, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:05:39,897 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35707.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:06:02,414 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5953, 1.2935, 1.3119, 2.0960, 1.6544, 2.0571, 2.1472, 1.8242], + device='cuda:3'), covar=tensor([0.0842, 0.1071, 0.1107, 0.0849, 0.0962, 0.0674, 0.0741, 0.0686], + device='cuda:3'), in_proj_covar=tensor([0.0229, 0.0248, 0.0240, 0.0271, 0.0264, 0.0232, 0.0224, 0.0219], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 07:06:09,043 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9139, 2.0619, 1.8269, 1.9118, 4.4840, 0.9925, 2.1473, 4.1028], + device='cuda:3'), covar=tensor([0.0270, 0.2148, 0.2283, 0.1436, 0.0469, 0.2387, 0.1298, 0.0360], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0312, 0.0315, 0.0286, 0.0309, 0.0311, 0.0285, 0.0304], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:06:16,828 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:06:22,435 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.104e+02 6.450e+02 9.026e+02 1.093e+03 2.835e+03, threshold=1.805e+03, percent-clipped=5.0 +2023-04-01 07:06:22,459 INFO [train.py:903] (3/4) Epoch 6, batch 1600, loss[loss=0.2441, simple_loss=0.3114, pruned_loss=0.08845, over 19480.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3394, pruned_loss=0.1086, over 3820895.05 frames. ], batch size: 49, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:06:44,235 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 07:07:23,556 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:07:24,378 INFO [train.py:903] (3/4) Epoch 6, batch 1650, loss[loss=0.2718, simple_loss=0.3435, pruned_loss=0.1, over 19690.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3388, pruned_loss=0.1081, over 3815638.16 frames. ], batch size: 59, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:08:05,730 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35822.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:08:09,320 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:13,941 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35829.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:23,387 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7102, 1.7636, 1.5318, 1.3144, 1.2335, 1.5045, 0.1258, 0.6329], + device='cuda:3'), covar=tensor([0.0280, 0.0280, 0.0159, 0.0254, 0.0638, 0.0251, 0.0490, 0.0478], + device='cuda:3'), in_proj_covar=tensor([0.0290, 0.0297, 0.0291, 0.0311, 0.0382, 0.0300, 0.0286, 0.0301], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:08:27,429 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.226e+02 6.593e+02 7.720e+02 9.793e+02 2.227e+03, threshold=1.544e+03, percent-clipped=1.0 +2023-04-01 07:08:27,448 INFO [train.py:903] (3/4) Epoch 6, batch 1700, loss[loss=0.304, simple_loss=0.3532, pruned_loss=0.1273, over 19523.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3381, pruned_loss=0.1072, over 3836220.09 frames. ], batch size: 54, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:08:38,917 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35850.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:40,015 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35851.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:08:54,021 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 07:09:06,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 07:09:18,234 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0625, 2.8448, 1.9417, 2.0086, 1.7984, 2.3504, 0.7551, 1.9287], + device='cuda:3'), covar=tensor([0.0251, 0.0255, 0.0263, 0.0387, 0.0508, 0.0402, 0.0564, 0.0482], + device='cuda:3'), in_proj_covar=tensor([0.0288, 0.0295, 0.0290, 0.0309, 0.0379, 0.0299, 0.0282, 0.0298], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:09:29,413 INFO [train.py:903] (3/4) Epoch 6, batch 1750, loss[loss=0.2682, simple_loss=0.3295, pruned_loss=0.1034, over 19404.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.337, pruned_loss=0.1061, over 3837239.55 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:09:31,982 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35892.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:09:56,884 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.48 vs. limit=5.0 +2023-04-01 07:10:33,851 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+02 6.695e+02 8.372e+02 1.116e+03 2.634e+03, threshold=1.674e+03, percent-clipped=7.0 +2023-04-01 07:10:33,872 INFO [train.py:903] (3/4) Epoch 6, batch 1800, loss[loss=0.2619, simple_loss=0.3359, pruned_loss=0.09395, over 19774.00 frames. ], tot_loss[loss=0.2748, simple_loss=0.3373, pruned_loss=0.1062, over 3830313.27 frames. ], batch size: 54, lr: 1.41e-02, grad_scale: 8.0 +2023-04-01 07:10:36,673 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9325, 1.2645, 0.9620, 0.9630, 1.1217, 0.9155, 0.8380, 1.2357], + device='cuda:3'), covar=tensor([0.0391, 0.0505, 0.0843, 0.0401, 0.0336, 0.0790, 0.0445, 0.0307], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0276, 0.0315, 0.0241, 0.0228, 0.0307, 0.0281, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:11:11,642 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8619, 1.9415, 1.8691, 2.8139, 1.8381, 2.6619, 2.6436, 1.8522], + device='cuda:3'), covar=tensor([0.2552, 0.1907, 0.0990, 0.1126, 0.2275, 0.0805, 0.1823, 0.1812], + device='cuda:3'), in_proj_covar=tensor([0.0663, 0.0647, 0.0579, 0.0805, 0.0685, 0.0572, 0.0701, 0.0610], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 07:11:31,916 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 07:11:36,739 INFO [train.py:903] (3/4) Epoch 6, batch 1850, loss[loss=0.2637, simple_loss=0.3114, pruned_loss=0.108, over 19744.00 frames. ], tot_loss[loss=0.2741, simple_loss=0.3368, pruned_loss=0.1057, over 3831835.02 frames. ], batch size: 45, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:11:59,170 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36007.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:12:11,425 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 07:12:40,877 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.024e+02 7.330e+02 9.053e+02 1.086e+03 1.723e+03, threshold=1.811e+03, percent-clipped=2.0 +2023-04-01 07:12:40,895 INFO [train.py:903] (3/4) Epoch 6, batch 1900, loss[loss=0.2465, simple_loss=0.3122, pruned_loss=0.09035, over 19340.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3374, pruned_loss=0.106, over 3829745.66 frames. ], batch size: 47, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:12:57,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 07:13:04,075 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 07:13:26,822 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5574, 2.2594, 1.5926, 1.6001, 2.0107, 1.3317, 1.3359, 1.7704], + device='cuda:3'), covar=tensor([0.0713, 0.0533, 0.0843, 0.0536, 0.0423, 0.0882, 0.0561, 0.0409], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0278, 0.0311, 0.0240, 0.0230, 0.0308, 0.0284, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:13:27,614 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 07:13:29,155 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36078.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:13:42,462 INFO [train.py:903] (3/4) Epoch 6, batch 1950, loss[loss=0.2411, simple_loss=0.2991, pruned_loss=0.09152, over 19376.00 frames. ], tot_loss[loss=0.2748, simple_loss=0.3373, pruned_loss=0.1062, over 3826636.99 frames. ], batch size: 47, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:14:00,226 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36103.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:14:04,978 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:35,250 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:36,151 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36133.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:14:36,381 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8571, 1.4476, 1.4808, 1.7784, 1.6140, 1.6454, 1.5225, 1.5975], + device='cuda:3'), covar=tensor([0.0772, 0.1353, 0.1213, 0.0801, 0.1068, 0.0412, 0.0904, 0.0620], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0364, 0.0287, 0.0238, 0.0307, 0.0245, 0.0272, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:14:45,005 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.894e+02 6.352e+02 7.868e+02 9.806e+02 1.510e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-04-01 07:14:45,023 INFO [train.py:903] (3/4) Epoch 6, batch 2000, loss[loss=0.2086, simple_loss=0.2753, pruned_loss=0.07094, over 19309.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3368, pruned_loss=0.1058, over 3825034.53 frames. ], batch size: 44, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:24,944 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36173.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:15:30,618 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8630, 1.9210, 1.7572, 2.8038, 1.9005, 2.6130, 2.4262, 1.7406], + device='cuda:3'), covar=tensor([0.2435, 0.1899, 0.1070, 0.1064, 0.2191, 0.0784, 0.1968, 0.1902], + device='cuda:3'), in_proj_covar=tensor([0.0663, 0.0650, 0.0578, 0.0808, 0.0685, 0.0571, 0.0702, 0.0609], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 07:15:42,599 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 07:15:42,939 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5852, 1.4240, 1.3489, 2.0139, 1.7925, 2.0971, 2.1329, 1.7910], + device='cuda:3'), covar=tensor([0.0750, 0.0892, 0.1010, 0.0888, 0.0815, 0.0610, 0.0842, 0.0636], + device='cuda:3'), in_proj_covar=tensor([0.0226, 0.0244, 0.0236, 0.0272, 0.0262, 0.0225, 0.0224, 0.0216], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 07:15:46,070 INFO [train.py:903] (3/4) Epoch 6, batch 2050, loss[loss=0.2726, simple_loss=0.3341, pruned_loss=0.1056, over 19764.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3357, pruned_loss=0.1047, over 3834879.26 frames. ], batch size: 56, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:15:57,095 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36199.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:16:00,523 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 07:16:03,078 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 07:16:22,976 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 07:16:47,774 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.109e+02 6.690e+02 8.798e+02 1.173e+03 2.442e+03, threshold=1.760e+03, percent-clipped=12.0 +2023-04-01 07:16:47,793 INFO [train.py:903] (3/4) Epoch 6, batch 2100, loss[loss=0.2472, simple_loss=0.3286, pruned_loss=0.08287, over 19620.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3357, pruned_loss=0.1046, over 3836246.76 frames. ], batch size: 57, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:16:53,764 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9865, 1.7741, 1.5672, 1.9502, 2.0046, 1.7500, 1.6551, 1.8656], + device='cuda:3'), covar=tensor([0.0719, 0.1292, 0.1213, 0.0740, 0.0896, 0.0453, 0.0925, 0.0588], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0363, 0.0287, 0.0237, 0.0304, 0.0244, 0.0270, 0.0231], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:16:57,879 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36248.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:17,867 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36263.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:18,600 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 07:17:39,982 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 07:17:49,931 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:49,971 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:17:51,847 INFO [train.py:903] (3/4) Epoch 6, batch 2150, loss[loss=0.301, simple_loss=0.3663, pruned_loss=0.1179, over 19778.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3357, pruned_loss=0.1047, over 3824807.31 frames. ], batch size: 56, lr: 1.40e-02, grad_scale: 16.0 +2023-04-01 07:18:53,907 INFO [train.py:903] (3/4) Epoch 6, batch 2200, loss[loss=0.2813, simple_loss=0.3394, pruned_loss=0.1117, over 19846.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3359, pruned_loss=0.1048, over 3825633.30 frames. ], batch size: 52, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:18:55,064 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 6.300e+02 8.031e+02 1.073e+03 2.013e+03, threshold=1.606e+03, percent-clipped=1.0 +2023-04-01 07:19:57,018 INFO [train.py:903] (3/4) Epoch 6, batch 2250, loss[loss=0.3108, simple_loss=0.3525, pruned_loss=0.1345, over 13333.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.3365, pruned_loss=0.1054, over 3807965.20 frames. ], batch size: 136, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:58,359 INFO [train.py:903] (3/4) Epoch 6, batch 2300, loss[loss=0.2867, simple_loss=0.3506, pruned_loss=0.1114, over 19321.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3363, pruned_loss=0.1051, over 3814313.29 frames. ], batch size: 66, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:20:59,554 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.730e+02 6.689e+02 7.482e+02 9.822e+02 1.768e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-04-01 07:21:14,335 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 07:22:00,470 INFO [train.py:903] (3/4) Epoch 6, batch 2350, loss[loss=0.2539, simple_loss=0.3153, pruned_loss=0.09625, over 19715.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3346, pruned_loss=0.1041, over 3800348.80 frames. ], batch size: 45, lr: 1.40e-02, grad_scale: 8.0 +2023-04-01 07:22:20,239 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36504.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:22:24,766 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-01 07:22:43,441 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 07:22:49,437 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:00,583 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 07:23:02,882 INFO [train.py:903] (3/4) Epoch 6, batch 2400, loss[loss=0.2918, simple_loss=0.3556, pruned_loss=0.114, over 19777.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3346, pruned_loss=0.1039, over 3794108.26 frames. ], batch size: 56, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:23:04,017 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.742e+02 5.696e+02 7.249e+02 9.157e+02 1.479e+03, threshold=1.450e+03, percent-clipped=0.0 +2023-04-01 07:23:07,407 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:08,806 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36544.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:39,050 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36569.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:23:48,693 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-01 07:24:06,981 INFO [train.py:903] (3/4) Epoch 6, batch 2450, loss[loss=0.2506, simple_loss=0.3203, pruned_loss=0.0904, over 19680.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3345, pruned_loss=0.1037, over 3807127.53 frames. ], batch size: 53, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:24:46,707 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:51,078 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36626.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:24:56,983 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:25:07,903 INFO [train.py:903] (3/4) Epoch 6, batch 2500, loss[loss=0.2589, simple_loss=0.3289, pruned_loss=0.09443, over 19704.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3342, pruned_loss=0.1038, over 3819525.67 frames. ], batch size: 59, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:25:09,076 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.265e+02 8.006e+02 9.274e+02 1.564e+03, threshold=1.601e+03, percent-clipped=1.0 +2023-04-01 07:25:30,341 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:26:09,577 INFO [train.py:903] (3/4) Epoch 6, batch 2550, loss[loss=0.2924, simple_loss=0.3543, pruned_loss=0.1153, over 19534.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3345, pruned_loss=0.1041, over 3814166.14 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:26:44,768 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36718.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:26:47,042 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4907, 1.5497, 2.1196, 2.7310, 2.2352, 2.2933, 2.1497, 2.6538], + device='cuda:3'), covar=tensor([0.0777, 0.1930, 0.1190, 0.0696, 0.1092, 0.0393, 0.0843, 0.0523], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0365, 0.0287, 0.0234, 0.0303, 0.0240, 0.0269, 0.0229], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:27:05,086 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 07:27:10,912 INFO [train.py:903] (3/4) Epoch 6, batch 2600, loss[loss=0.2926, simple_loss=0.3534, pruned_loss=0.1159, over 18231.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3356, pruned_loss=0.1047, over 3821019.64 frames. ], batch size: 83, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:27:12,686 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.492e+02 8.253e+02 1.085e+03 2.742e+03, threshold=1.651e+03, percent-clipped=10.0 +2023-04-01 07:28:14,514 INFO [train.py:903] (3/4) Epoch 6, batch 2650, loss[loss=0.2223, simple_loss=0.2939, pruned_loss=0.07538, over 19583.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3342, pruned_loss=0.1038, over 3826816.62 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:28:19,433 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1573, 2.1132, 2.2824, 3.4756, 2.2162, 3.5994, 3.2778, 2.0241], + device='cuda:3'), covar=tensor([0.2708, 0.2078, 0.0989, 0.1243, 0.2487, 0.0728, 0.1794, 0.1883], + device='cuda:3'), in_proj_covar=tensor([0.0659, 0.0650, 0.0573, 0.0801, 0.0679, 0.0567, 0.0694, 0.0608], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 07:28:21,697 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9300, 1.3470, 0.9998, 0.9540, 1.1897, 0.8853, 0.6923, 1.2473], + device='cuda:3'), covar=tensor([0.0455, 0.0532, 0.0880, 0.0470, 0.0375, 0.1017, 0.0614, 0.0328], + device='cuda:3'), in_proj_covar=tensor([0.0283, 0.0277, 0.0309, 0.0239, 0.0226, 0.0311, 0.0281, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:28:37,581 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 07:28:55,412 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36823.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:29:16,462 INFO [train.py:903] (3/4) Epoch 6, batch 2700, loss[loss=0.2779, simple_loss=0.3448, pruned_loss=0.1054, over 19771.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3334, pruned_loss=0.1027, over 3839364.91 frames. ], batch size: 56, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:29:17,597 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.977e+02 6.342e+02 7.427e+02 9.812e+02 2.890e+03, threshold=1.485e+03, percent-clipped=2.0 +2023-04-01 07:29:30,673 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2640, 3.8233, 2.3526, 3.4545, 1.1502, 3.4992, 3.5585, 3.5836], + device='cuda:3'), covar=tensor([0.0746, 0.1213, 0.2067, 0.0739, 0.3564, 0.0934, 0.0741, 0.0995], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0314, 0.0376, 0.0289, 0.0354, 0.0313, 0.0293, 0.0326], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:30:18,803 INFO [train.py:903] (3/4) Epoch 6, batch 2750, loss[loss=0.233, simple_loss=0.2941, pruned_loss=0.08592, over 19747.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.334, pruned_loss=0.1037, over 3808523.06 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:30:51,740 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0265, 3.3177, 1.9061, 2.0643, 3.0066, 1.5779, 1.2362, 1.8705], + device='cuda:3'), covar=tensor([0.0904, 0.0400, 0.0784, 0.0571, 0.0342, 0.0992, 0.0803, 0.0596], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0276, 0.0307, 0.0237, 0.0224, 0.0306, 0.0279, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:30:51,760 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36914.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:09,604 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:22,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:31:23,033 INFO [train.py:903] (3/4) Epoch 6, batch 2800, loss[loss=0.2494, simple_loss=0.3289, pruned_loss=0.08495, over 19284.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3338, pruned_loss=0.1036, over 3810568.60 frames. ], batch size: 66, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:31:24,217 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+02 7.168e+02 8.701e+02 1.243e+03 3.330e+03, threshold=1.740e+03, percent-clipped=17.0 +2023-04-01 07:31:43,504 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5823, 1.5285, 1.3839, 1.5170, 3.0760, 1.0416, 1.9904, 3.2121], + device='cuda:3'), covar=tensor([0.0335, 0.2300, 0.2472, 0.1641, 0.0613, 0.2397, 0.1381, 0.0368], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0317, 0.0319, 0.0296, 0.0312, 0.0317, 0.0293, 0.0308], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:31:56,917 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:00,392 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36970.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:02,129 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.45 vs. limit=5.0 +2023-04-01 07:32:06,249 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:08,545 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:32:26,411 INFO [train.py:903] (3/4) Epoch 6, batch 2850, loss[loss=0.2214, simple_loss=0.2968, pruned_loss=0.07298, over 19728.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3342, pruned_loss=0.1037, over 3813726.31 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 8.0 +2023-04-01 07:33:05,605 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:33:28,652 INFO [train.py:903] (3/4) Epoch 6, batch 2900, loss[loss=0.2606, simple_loss=0.3342, pruned_loss=0.09354, over 19542.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3341, pruned_loss=0.1043, over 3798418.55 frames. ], batch size: 56, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:33:28,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 07:33:29,876 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.105e+02 7.947e+02 1.025e+03 2.308e+03, threshold=1.589e+03, percent-clipped=2.0 +2023-04-01 07:33:55,110 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37062.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:34:20,477 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:24,058 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37085.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:28,768 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37089.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:34:29,562 INFO [train.py:903] (3/4) Epoch 6, batch 2950, loss[loss=0.24, simple_loss=0.3169, pruned_loss=0.08153, over 19685.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3338, pruned_loss=0.1038, over 3807394.35 frames. ], batch size: 53, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:35:31,157 INFO [train.py:903] (3/4) Epoch 6, batch 3000, loss[loss=0.3201, simple_loss=0.3708, pruned_loss=0.1347, over 13507.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3355, pruned_loss=0.1052, over 3794894.48 frames. ], batch size: 135, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:35:31,158 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 07:35:43,638 INFO [train.py:937] (3/4) Epoch 6, validation: loss=0.1968, simple_loss=0.2962, pruned_loss=0.04867, over 944034.00 frames. +2023-04-01 07:35:43,639 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 07:35:44,844 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.497e+02 6.001e+02 7.289e+02 9.626e+02 1.809e+03, threshold=1.458e+03, percent-clipped=5.0 +2023-04-01 07:35:48,618 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 07:36:18,750 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37167.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:36:29,238 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4545, 1.2130, 1.6261, 1.3008, 2.6736, 3.6234, 3.3565, 3.7847], + device='cuda:3'), covar=tensor([0.1359, 0.3103, 0.2891, 0.1828, 0.0450, 0.0131, 0.0211, 0.0126], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0283, 0.0316, 0.0249, 0.0201, 0.0125, 0.0202, 0.0153], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:36:30,324 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37177.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:36:45,871 INFO [train.py:903] (3/4) Epoch 6, batch 3050, loss[loss=0.2411, simple_loss=0.3069, pruned_loss=0.08767, over 19402.00 frames. ], tot_loss[loss=0.2727, simple_loss=0.3351, pruned_loss=0.1051, over 3793482.04 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:48,504 INFO [train.py:903] (3/4) Epoch 6, batch 3100, loss[loss=0.2341, simple_loss=0.2955, pruned_loss=0.08639, over 19772.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3349, pruned_loss=0.1049, over 3802126.63 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:37:49,789 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 6.699e+02 8.375e+02 1.038e+03 2.239e+03, threshold=1.675e+03, percent-clipped=7.0 +2023-04-01 07:38:08,564 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9651, 5.3264, 2.7875, 4.7110, 1.2276, 5.1352, 5.2574, 5.2654], + device='cuda:3'), covar=tensor([0.0422, 0.0869, 0.1923, 0.0561, 0.3919, 0.0605, 0.0492, 0.0727], + device='cuda:3'), in_proj_covar=tensor([0.0362, 0.0322, 0.0380, 0.0292, 0.0357, 0.0317, 0.0296, 0.0331], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 07:38:28,951 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37273.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:42,259 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37282.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 07:38:46,571 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37286.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:38:50,733 INFO [train.py:903] (3/4) Epoch 6, batch 3150, loss[loss=0.2674, simple_loss=0.3321, pruned_loss=0.1014, over 18761.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.3355, pruned_loss=0.1052, over 3805342.50 frames. ], batch size: 74, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:38:52,204 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4903, 1.0678, 1.2243, 1.2609, 2.1377, 0.8447, 1.7894, 2.1972], + device='cuda:3'), covar=tensor([0.0570, 0.2560, 0.2548, 0.1370, 0.0775, 0.2004, 0.1001, 0.0564], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0316, 0.0315, 0.0290, 0.0310, 0.0312, 0.0288, 0.0308], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:39:13,511 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 07:39:27,508 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37320.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:49,090 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:51,049 INFO [train.py:903] (3/4) Epoch 6, batch 3200, loss[loss=0.2896, simple_loss=0.3549, pruned_loss=0.1122, over 19715.00 frames. ], tot_loss[loss=0.275, simple_loss=0.3374, pruned_loss=0.1063, over 3802392.32 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 8.0 +2023-04-01 07:39:52,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.326e+02 6.253e+02 8.171e+02 9.916e+02 1.975e+03, threshold=1.634e+03, percent-clipped=4.0 +2023-04-01 07:39:52,632 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:39:58,108 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37345.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:20,457 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,439 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:24,657 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:29,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37370.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:42,615 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:50,657 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37388.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:40:52,606 INFO [train.py:903] (3/4) Epoch 6, batch 3250, loss[loss=0.2315, simple_loss=0.3018, pruned_loss=0.08054, over 19761.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.337, pruned_loss=0.1067, over 3794677.71 frames. ], batch size: 51, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:45,891 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37433.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:41:48,059 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:41:53,529 INFO [train.py:903] (3/4) Epoch 6, batch 3300, loss[loss=0.2602, simple_loss=0.3397, pruned_loss=0.09042, over 19697.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3366, pruned_loss=0.1059, over 3808969.42 frames. ], batch size: 59, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:41:57,411 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 6.108e+02 8.102e+02 1.002e+03 3.053e+03, threshold=1.620e+03, percent-clipped=3.0 +2023-04-01 07:42:01,111 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 07:42:17,161 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37458.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:42:44,807 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:42:56,289 INFO [train.py:903] (3/4) Epoch 6, batch 3350, loss[loss=0.282, simple_loss=0.3417, pruned_loss=0.1112, over 19791.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3376, pruned_loss=0.1066, over 3820446.07 frames. ], batch size: 56, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:43:04,921 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8308, 4.4227, 2.6326, 3.8896, 1.1065, 3.9940, 4.0455, 4.2715], + device='cuda:3'), covar=tensor([0.0526, 0.0984, 0.1960, 0.0688, 0.4056, 0.0995, 0.0721, 0.0802], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0324, 0.0385, 0.0296, 0.0363, 0.0320, 0.0300, 0.0333], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 07:43:49,279 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0695, 2.0577, 1.7363, 1.7627, 1.6276, 1.8419, 0.9073, 1.5242], + device='cuda:3'), covar=tensor([0.0208, 0.0274, 0.0203, 0.0238, 0.0386, 0.0305, 0.0482, 0.0366], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0298, 0.0297, 0.0315, 0.0387, 0.0309, 0.0283, 0.0302], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:43:56,139 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37538.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:43:57,994 INFO [train.py:903] (3/4) Epoch 6, batch 3400, loss[loss=0.2883, simple_loss=0.3472, pruned_loss=0.1147, over 18194.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.3363, pruned_loss=0.1055, over 3818482.13 frames. ], batch size: 83, lr: 1.38e-02, grad_scale: 4.0 +2023-04-01 07:44:00,243 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 6.377e+02 8.364e+02 1.096e+03 2.128e+03, threshold=1.673e+03, percent-clipped=5.0 +2023-04-01 07:44:27,169 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37563.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 07:44:59,262 INFO [train.py:903] (3/4) Epoch 6, batch 3450, loss[loss=0.2857, simple_loss=0.3518, pruned_loss=0.1098, over 18258.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3365, pruned_loss=0.1057, over 3822651.02 frames. ], batch size: 83, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:45:07,191 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 07:45:49,573 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37630.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:02,066 INFO [train.py:903] (3/4) Epoch 6, batch 3500, loss[loss=0.2932, simple_loss=0.356, pruned_loss=0.1152, over 18780.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3342, pruned_loss=0.1042, over 3825781.81 frames. ], batch size: 74, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:46:04,578 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.340e+02 8.060e+02 1.060e+03 3.220e+03, threshold=1.612e+03, percent-clipped=3.0 +2023-04-01 07:46:07,151 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.3414, 4.7938, 2.9211, 4.2749, 1.5503, 4.2604, 4.5433, 4.7405], + device='cuda:3'), covar=tensor([0.0492, 0.0992, 0.1996, 0.0673, 0.3786, 0.0814, 0.0724, 0.0811], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0323, 0.0383, 0.0293, 0.0360, 0.0318, 0.0297, 0.0331], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 07:46:07,358 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37644.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:46:38,300 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:04,850 INFO [train.py:903] (3/4) Epoch 6, batch 3550, loss[loss=0.342, simple_loss=0.4054, pruned_loss=0.1393, over 19595.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3346, pruned_loss=0.1051, over 3829661.49 frames. ], batch size: 61, lr: 1.37e-02, grad_scale: 4.0 +2023-04-01 07:47:07,128 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37691.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:34,916 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37716.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:49,120 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:47:57,202 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2168, 1.2231, 1.5582, 0.9220, 2.4120, 2.9227, 2.6236, 3.0141], + device='cuda:3'), covar=tensor([0.1482, 0.3051, 0.2962, 0.2207, 0.0501, 0.0197, 0.0287, 0.0187], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0279, 0.0314, 0.0246, 0.0200, 0.0125, 0.0202, 0.0153], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:48:01,740 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6091, 1.6819, 1.3081, 1.1935, 1.1429, 1.3802, 0.1850, 0.7079], + device='cuda:3'), covar=tensor([0.0265, 0.0251, 0.0186, 0.0245, 0.0515, 0.0261, 0.0474, 0.0425], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0298, 0.0296, 0.0314, 0.0384, 0.0309, 0.0282, 0.0301], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:48:03,009 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:05,908 INFO [train.py:903] (3/4) Epoch 6, batch 3600, loss[loss=0.3299, simple_loss=0.3864, pruned_loss=0.1367, over 18243.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3342, pruned_loss=0.1049, over 3816277.20 frames. ], batch size: 84, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:48:08,222 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.206e+02 7.040e+02 8.572e+02 1.227e+03 4.209e+03, threshold=1.714e+03, percent-clipped=12.0 +2023-04-01 07:48:12,119 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:33,210 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:48:35,748 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 07:48:41,991 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5484, 1.5714, 1.6483, 2.1281, 1.2881, 1.6906, 1.9772, 1.6359], + device='cuda:3'), covar=tensor([0.2313, 0.1829, 0.1024, 0.0973, 0.2090, 0.0946, 0.2147, 0.1731], + device='cuda:3'), in_proj_covar=tensor([0.0669, 0.0664, 0.0578, 0.0820, 0.0693, 0.0583, 0.0705, 0.0617], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 07:49:08,249 INFO [train.py:903] (3/4) Epoch 6, batch 3650, loss[loss=0.2741, simple_loss=0.3408, pruned_loss=0.1037, over 18772.00 frames. ], tot_loss[loss=0.2731, simple_loss=0.335, pruned_loss=0.1056, over 3782035.55 frames. ], batch size: 74, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:49:49,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4040, 1.3217, 1.7292, 1.5256, 3.1193, 4.4811, 4.4981, 4.8559], + device='cuda:3'), covar=tensor([0.1459, 0.2997, 0.2931, 0.1796, 0.0408, 0.0105, 0.0136, 0.0074], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0281, 0.0314, 0.0247, 0.0200, 0.0125, 0.0202, 0.0154], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 07:50:10,246 INFO [train.py:903] (3/4) Epoch 6, batch 3700, loss[loss=0.2654, simple_loss=0.336, pruned_loss=0.09741, over 19530.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3338, pruned_loss=0.1047, over 3784408.24 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:50:11,766 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:50:12,518 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 5.969e+02 7.272e+02 1.002e+03 1.787e+03, threshold=1.454e+03, percent-clipped=1.0 +2023-04-01 07:50:21,877 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4621, 1.0890, 1.3121, 1.1805, 2.1743, 0.8824, 1.6597, 2.1371], + device='cuda:3'), covar=tensor([0.0589, 0.2500, 0.2319, 0.1387, 0.0733, 0.1959, 0.0977, 0.0592], + device='cuda:3'), in_proj_covar=tensor([0.0299, 0.0317, 0.0316, 0.0291, 0.0314, 0.0315, 0.0291, 0.0308], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:50:48,321 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0641, 2.2059, 2.1702, 3.3035, 2.1149, 3.3230, 3.0165, 1.9738], + device='cuda:3'), covar=tensor([0.2627, 0.1933, 0.0914, 0.1273, 0.2596, 0.0717, 0.1813, 0.1754], + device='cuda:3'), in_proj_covar=tensor([0.0664, 0.0658, 0.0576, 0.0811, 0.0689, 0.0579, 0.0699, 0.0610], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 07:51:13,476 INFO [train.py:903] (3/4) Epoch 6, batch 3750, loss[loss=0.2194, simple_loss=0.2855, pruned_loss=0.07664, over 18997.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3328, pruned_loss=0.1038, over 3786279.85 frames. ], batch size: 42, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:16,179 INFO [train.py:903] (3/4) Epoch 6, batch 3800, loss[loss=0.2526, simple_loss=0.316, pruned_loss=0.09464, over 19381.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3324, pruned_loss=0.1035, over 3803842.95 frames. ], batch size: 47, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:52:18,421 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.762e+02 7.572e+02 9.178e+02 2.007e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-04-01 07:52:47,826 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 07:53:17,777 INFO [train.py:903] (3/4) Epoch 6, batch 3850, loss[loss=0.2254, simple_loss=0.3018, pruned_loss=0.07452, over 19600.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3342, pruned_loss=0.1042, over 3804185.18 frames. ], batch size: 57, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:53:25,815 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37997.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:53:32,710 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38001.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:03,092 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:54:20,062 INFO [train.py:903] (3/4) Epoch 6, batch 3900, loss[loss=0.2864, simple_loss=0.3444, pruned_loss=0.1142, over 19634.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3347, pruned_loss=0.1047, over 3819881.53 frames. ], batch size: 60, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:54:22,364 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.032e+02 6.773e+02 7.927e+02 9.711e+02 2.220e+03, threshold=1.585e+03, percent-clipped=5.0 +2023-04-01 07:55:08,056 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8479, 4.4635, 2.5248, 3.9340, 1.0508, 4.1057, 4.1802, 4.3455], + device='cuda:3'), covar=tensor([0.0587, 0.1058, 0.1910, 0.0668, 0.3843, 0.0656, 0.0595, 0.0728], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0328, 0.0387, 0.0297, 0.0362, 0.0322, 0.0302, 0.0336], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 07:55:24,003 INFO [train.py:903] (3/4) Epoch 6, batch 3950, loss[loss=0.318, simple_loss=0.3711, pruned_loss=0.1324, over 19481.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3329, pruned_loss=0.103, over 3831315.63 frames. ], batch size: 64, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:55:27,716 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 07:55:33,029 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:03,036 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 07:56:25,972 INFO [train.py:903] (3/4) Epoch 6, batch 4000, loss[loss=0.205, simple_loss=0.2725, pruned_loss=0.06875, over 19753.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3328, pruned_loss=0.1031, over 3824613.01 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 8.0 +2023-04-01 07:56:28,273 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 6.005e+02 7.523e+02 9.814e+02 1.567e+03, threshold=1.505e+03, percent-clipped=0.0 +2023-04-01 07:57:10,236 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 07:57:18,207 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3000, 3.8809, 2.2985, 3.5300, 0.9060, 3.4828, 3.5801, 3.7255], + device='cuda:3'), covar=tensor([0.0675, 0.1204, 0.2154, 0.0782, 0.4056, 0.0837, 0.0813, 0.0883], + device='cuda:3'), in_proj_covar=tensor([0.0370, 0.0327, 0.0385, 0.0298, 0.0360, 0.0321, 0.0303, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 07:57:24,172 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0890, 3.6980, 2.0157, 2.0099, 3.1773, 1.5794, 1.1171, 2.0216], + device='cuda:3'), covar=tensor([0.0861, 0.0277, 0.0746, 0.0570, 0.0283, 0.0869, 0.0841, 0.0531], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0279, 0.0314, 0.0240, 0.0221, 0.0314, 0.0283, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 07:57:25,997 INFO [train.py:903] (3/4) Epoch 6, batch 4050, loss[loss=0.2665, simple_loss=0.3122, pruned_loss=0.1104, over 18523.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3335, pruned_loss=0.1033, over 3823315.72 frames. ], batch size: 41, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:57:36,493 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7939, 4.2365, 4.4330, 4.3859, 1.5401, 4.0014, 3.6815, 4.0411], + device='cuda:3'), covar=tensor([0.0954, 0.0514, 0.0496, 0.0449, 0.4572, 0.0412, 0.0504, 0.1019], + device='cuda:3'), in_proj_covar=tensor([0.0565, 0.0485, 0.0671, 0.0543, 0.0623, 0.0401, 0.0420, 0.0616], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 07:58:27,529 INFO [train.py:903] (3/4) Epoch 6, batch 4100, loss[loss=0.2689, simple_loss=0.3281, pruned_loss=0.1049, over 19381.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3329, pruned_loss=0.1029, over 3834486.75 frames. ], batch size: 48, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 07:58:30,590 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.486e+02 7.817e+02 9.790e+02 2.532e+03, threshold=1.563e+03, percent-clipped=8.0 +2023-04-01 07:59:04,837 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 07:59:30,607 INFO [train.py:903] (3/4) Epoch 6, batch 4150, loss[loss=0.2656, simple_loss=0.3334, pruned_loss=0.09886, over 19759.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3329, pruned_loss=0.1029, over 3838956.96 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:34,880 INFO [train.py:903] (3/4) Epoch 6, batch 4200, loss[loss=0.2525, simple_loss=0.3148, pruned_loss=0.09511, over 19751.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3336, pruned_loss=0.1036, over 3835259.62 frames. ], batch size: 51, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:00:36,333 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38341.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:00:37,309 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.104e+02 7.462e+02 9.650e+02 2.123e+03, threshold=1.492e+03, percent-clipped=5.0 +2023-04-01 08:00:39,356 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 08:01:21,329 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38378.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:01:30,688 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5058, 1.1420, 1.3421, 1.1504, 2.1706, 0.8877, 1.7512, 2.1956], + device='cuda:3'), covar=tensor([0.0545, 0.2410, 0.2251, 0.1424, 0.0705, 0.1975, 0.0944, 0.0562], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0320, 0.0323, 0.0297, 0.0320, 0.0317, 0.0293, 0.0313], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:01:34,728 INFO [train.py:903] (3/4) Epoch 6, batch 4250, loss[loss=0.3281, simple_loss=0.3712, pruned_loss=0.1425, over 19780.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3334, pruned_loss=0.1037, over 3838495.76 frames. ], batch size: 56, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:01:48,255 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 08:02:01,564 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 08:02:03,924 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6097, 4.1900, 2.6159, 3.6897, 1.0703, 3.7435, 3.8190, 3.8996], + device='cuda:3'), covar=tensor([0.0589, 0.1049, 0.1901, 0.0716, 0.3847, 0.0829, 0.0687, 0.0815], + device='cuda:3'), in_proj_covar=tensor([0.0370, 0.0323, 0.0382, 0.0294, 0.0361, 0.0321, 0.0304, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 08:02:34,847 INFO [train.py:903] (3/4) Epoch 6, batch 4300, loss[loss=0.2878, simple_loss=0.3411, pruned_loss=0.1172, over 13293.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3362, pruned_loss=0.1054, over 3814773.01 frames. ], batch size: 136, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:02:37,124 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+02 6.582e+02 8.580e+02 1.078e+03 2.349e+03, threshold=1.716e+03, percent-clipped=8.0 +2023-04-01 08:02:55,998 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38456.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:02:58,139 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0701, 3.6602, 2.0490, 1.7890, 3.1446, 1.6401, 1.3216, 1.9979], + device='cuda:3'), covar=tensor([0.0673, 0.0244, 0.0571, 0.0502, 0.0347, 0.0683, 0.0644, 0.0435], + device='cuda:3'), in_proj_covar=tensor([0.0284, 0.0284, 0.0319, 0.0245, 0.0229, 0.0318, 0.0289, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:03:27,667 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 08:03:35,904 INFO [train.py:903] (3/4) Epoch 6, batch 4350, loss[loss=0.1979, simple_loss=0.2735, pruned_loss=0.06115, over 19782.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.336, pruned_loss=0.1056, over 3822629.10 frames. ], batch size: 47, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:04,774 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-01 08:04:40,411 INFO [train.py:903] (3/4) Epoch 6, batch 4400, loss[loss=0.2784, simple_loss=0.3451, pruned_loss=0.1059, over 19617.00 frames. ], tot_loss[loss=0.2737, simple_loss=0.3361, pruned_loss=0.1056, over 3813247.32 frames. ], batch size: 50, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:04:42,534 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+02 6.588e+02 8.122e+02 1.160e+03 2.348e+03, threshold=1.624e+03, percent-clipped=4.0 +2023-04-01 08:05:02,028 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7233, 1.7827, 1.7799, 2.5644, 1.6326, 2.1662, 2.2425, 1.7236], + device='cuda:3'), covar=tensor([0.2231, 0.1779, 0.0982, 0.0910, 0.2089, 0.0849, 0.1929, 0.1740], + device='cuda:3'), in_proj_covar=tensor([0.0671, 0.0672, 0.0584, 0.0814, 0.0698, 0.0585, 0.0714, 0.0618], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 08:05:05,908 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 08:05:13,686 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 08:05:40,417 INFO [train.py:903] (3/4) Epoch 6, batch 4450, loss[loss=0.2352, simple_loss=0.303, pruned_loss=0.0837, over 19743.00 frames. ], tot_loss[loss=0.2737, simple_loss=0.3361, pruned_loss=0.1056, over 3822662.57 frames. ], batch size: 51, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:15,731 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9056, 1.7778, 1.9481, 2.2245, 4.4381, 1.3753, 2.1945, 4.4220], + device='cuda:3'), covar=tensor([0.0276, 0.2226, 0.2245, 0.1300, 0.0488, 0.2140, 0.1284, 0.0273], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0316, 0.0318, 0.0292, 0.0317, 0.0312, 0.0290, 0.0309], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:06:42,148 INFO [train.py:903] (3/4) Epoch 6, batch 4500, loss[loss=0.3086, simple_loss=0.3723, pruned_loss=0.1225, over 19659.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3345, pruned_loss=0.1042, over 3829976.31 frames. ], batch size: 58, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:06:44,515 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 6.139e+02 7.598e+02 9.442e+02 2.713e+03, threshold=1.520e+03, percent-clipped=3.0 +2023-04-01 08:07:42,587 INFO [train.py:903] (3/4) Epoch 6, batch 4550, loss[loss=0.3502, simple_loss=0.3894, pruned_loss=0.1555, over 13641.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3345, pruned_loss=0.1041, over 3802452.99 frames. ], batch size: 136, lr: 1.36e-02, grad_scale: 8.0 +2023-04-01 08:07:53,131 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 08:08:10,869 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38712.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:17,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 08:08:22,020 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:41,576 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:08:45,375 INFO [train.py:903] (3/4) Epoch 6, batch 4600, loss[loss=0.2433, simple_loss=0.3057, pruned_loss=0.09048, over 19724.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3344, pruned_loss=0.104, over 3799599.13 frames. ], batch size: 51, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:08:47,714 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 6.588e+02 8.068e+02 1.040e+03 1.807e+03, threshold=1.614e+03, percent-clipped=3.0 +2023-04-01 08:09:02,884 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-01 08:09:45,875 INFO [train.py:903] (3/4) Epoch 6, batch 4650, loss[loss=0.3004, simple_loss=0.3474, pruned_loss=0.1267, over 19735.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3346, pruned_loss=0.1035, over 3805182.12 frames. ], batch size: 51, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:01,546 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 08:10:05,092 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2460, 2.9313, 2.1368, 2.7456, 1.1586, 2.7587, 2.6271, 2.7216], + device='cuda:3'), covar=tensor([0.1139, 0.1384, 0.2066, 0.0863, 0.3477, 0.1154, 0.1037, 0.1280], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0323, 0.0384, 0.0290, 0.0357, 0.0318, 0.0298, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 08:10:11,691 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 08:10:42,127 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9405, 1.5110, 1.6994, 2.1444, 1.7695, 1.8107, 1.7011, 1.8874], + device='cuda:3'), covar=tensor([0.0863, 0.1597, 0.1256, 0.0822, 0.1144, 0.0460, 0.1012, 0.0639], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0358, 0.0285, 0.0235, 0.0301, 0.0241, 0.0271, 0.0229], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:10:43,249 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1125, 1.2709, 1.5683, 0.9430, 2.5144, 3.3171, 3.1207, 3.5325], + device='cuda:3'), covar=tensor([0.1485, 0.2813, 0.2785, 0.1992, 0.0409, 0.0149, 0.0201, 0.0143], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0288, 0.0321, 0.0251, 0.0203, 0.0130, 0.0205, 0.0156], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:10:43,262 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38837.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:46,234 INFO [train.py:903] (3/4) Epoch 6, batch 4700, loss[loss=0.3303, simple_loss=0.3768, pruned_loss=0.1419, over 19771.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3352, pruned_loss=0.1035, over 3808089.52 frames. ], batch size: 54, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:10:47,745 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:10:48,619 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.118e+02 6.701e+02 8.528e+02 1.085e+03 2.106e+03, threshold=1.706e+03, percent-clipped=3.0 +2023-04-01 08:10:52,862 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 08:11:00,086 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8447, 1.9104, 2.2180, 2.1521, 2.9514, 3.4914, 3.5546, 3.8572], + device='cuda:3'), covar=tensor([0.1490, 0.3274, 0.3142, 0.1799, 0.1095, 0.0479, 0.0271, 0.0175], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0290, 0.0323, 0.0253, 0.0204, 0.0131, 0.0206, 0.0156], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:11:07,237 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 08:11:46,444 INFO [train.py:903] (3/4) Epoch 6, batch 4750, loss[loss=0.3589, simple_loss=0.3885, pruned_loss=0.1647, over 13123.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3338, pruned_loss=0.1033, over 3807776.02 frames. ], batch size: 135, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:47,942 INFO [train.py:903] (3/4) Epoch 6, batch 4800, loss[loss=0.3569, simple_loss=0.3974, pruned_loss=0.1582, over 18853.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3355, pruned_loss=0.1048, over 3814070.43 frames. ], batch size: 74, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:12:52,285 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 6.641e+02 7.688e+02 1.063e+03 2.770e+03, threshold=1.538e+03, percent-clipped=4.0 +2023-04-01 08:13:49,882 INFO [train.py:903] (3/4) Epoch 6, batch 4850, loss[loss=0.2589, simple_loss=0.3268, pruned_loss=0.09551, over 19592.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3344, pruned_loss=0.104, over 3832492.44 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:10,477 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5676, 1.4723, 1.3494, 1.6473, 1.5976, 1.5486, 1.3860, 1.6046], + device='cuda:3'), covar=tensor([0.0744, 0.1149, 0.1092, 0.0669, 0.0875, 0.0422, 0.0902, 0.0558], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0365, 0.0292, 0.0241, 0.0307, 0.0247, 0.0276, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:14:14,747 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 08:14:36,520 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 08:14:42,289 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 08:14:42,317 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 08:14:51,531 INFO [train.py:903] (3/4) Epoch 6, batch 4900, loss[loss=0.2729, simple_loss=0.3411, pruned_loss=0.1024, over 19722.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3347, pruned_loss=0.1039, over 3836131.31 frames. ], batch size: 51, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:14:51,584 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 08:14:55,128 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 6.820e+02 8.455e+02 1.082e+03 3.554e+03, threshold=1.691e+03, percent-clipped=3.0 +2023-04-01 08:15:00,044 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3524, 3.9001, 2.4816, 3.5040, 1.1272, 3.3027, 3.5537, 3.7123], + device='cuda:3'), covar=tensor([0.0587, 0.1038, 0.1983, 0.0786, 0.3890, 0.1134, 0.0806, 0.0924], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0317, 0.0378, 0.0290, 0.0355, 0.0316, 0.0296, 0.0330], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 08:15:12,461 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 08:15:36,705 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 08:15:38,508 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1594, 5.4503, 2.8582, 4.7202, 1.2294, 5.2193, 5.3114, 5.5776], + device='cuda:3'), covar=tensor([0.0395, 0.1045, 0.1960, 0.0639, 0.4055, 0.0619, 0.0678, 0.0724], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0318, 0.0380, 0.0291, 0.0357, 0.0316, 0.0297, 0.0333], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 08:15:52,288 INFO [train.py:903] (3/4) Epoch 6, batch 4950, loss[loss=0.3279, simple_loss=0.3806, pruned_loss=0.1376, over 19491.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3343, pruned_loss=0.1037, over 3833972.41 frames. ], batch size: 64, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:15:56,167 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:11,566 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 08:16:27,973 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39118.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:16:34,793 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 08:16:54,839 INFO [train.py:903] (3/4) Epoch 6, batch 5000, loss[loss=0.1927, simple_loss=0.261, pruned_loss=0.06218, over 19723.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.334, pruned_loss=0.1035, over 3842063.66 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:16:59,188 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.473e+02 6.699e+02 7.802e+02 1.019e+03 2.317e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 08:17:05,624 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 08:17:14,765 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 08:17:19,804 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9396, 4.2825, 4.6680, 4.5630, 1.5442, 4.2682, 3.8513, 4.2112], + device='cuda:3'), covar=tensor([0.1071, 0.0685, 0.0467, 0.0417, 0.4628, 0.0430, 0.0469, 0.0983], + device='cuda:3'), in_proj_covar=tensor([0.0565, 0.0481, 0.0659, 0.0544, 0.0616, 0.0404, 0.0411, 0.0606], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 08:17:51,519 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:17:57,105 INFO [train.py:903] (3/4) Epoch 6, batch 5050, loss[loss=0.2571, simple_loss=0.3378, pruned_loss=0.0882, over 19539.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3342, pruned_loss=0.1037, over 3816217.60 frames. ], batch size: 56, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:18:12,172 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39203.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:18:29,777 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 08:18:43,123 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 08:18:53,228 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2570, 1.2042, 1.5789, 1.1317, 2.6836, 3.4807, 3.3256, 3.7022], + device='cuda:3'), covar=tensor([0.1434, 0.3115, 0.2748, 0.1894, 0.0391, 0.0132, 0.0197, 0.0133], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0286, 0.0318, 0.0248, 0.0202, 0.0129, 0.0203, 0.0154], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:18:57,176 INFO [train.py:903] (3/4) Epoch 6, batch 5100, loss[loss=0.3236, simple_loss=0.3745, pruned_loss=0.1364, over 19536.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3333, pruned_loss=0.1029, over 3826191.33 frames. ], batch size: 56, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:19:00,439 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.152e+02 6.811e+02 8.395e+02 1.062e+03 1.934e+03, threshold=1.679e+03, percent-clipped=3.0 +2023-04-01 08:19:05,509 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 08:19:08,954 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 08:19:13,351 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 08:19:17,188 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.16 vs. limit=5.0 +2023-04-01 08:19:38,200 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 08:19:57,716 INFO [train.py:903] (3/4) Epoch 6, batch 5150, loss[loss=0.2437, simple_loss=0.3165, pruned_loss=0.08545, over 19571.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3324, pruned_loss=0.1024, over 3828764.81 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2023-04-01 08:20:07,874 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.78 vs. limit=5.0 +2023-04-01 08:20:09,380 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 08:20:11,764 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39300.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:13,884 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:20:43,355 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 08:21:00,829 INFO [train.py:903] (3/4) Epoch 6, batch 5200, loss[loss=0.255, simple_loss=0.3238, pruned_loss=0.09312, over 19694.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3341, pruned_loss=0.1034, over 3819256.28 frames. ], batch size: 53, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:21:04,235 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.495e+02 6.191e+02 7.598e+02 1.014e+03 2.218e+03, threshold=1.520e+03, percent-clipped=2.0 +2023-04-01 08:21:14,291 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 08:21:56,503 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 08:22:01,129 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0346, 5.4713, 2.9045, 4.6447, 1.3067, 5.2905, 5.2280, 5.6129], + device='cuda:3'), covar=tensor([0.0395, 0.0914, 0.1774, 0.0606, 0.3896, 0.0590, 0.0511, 0.0603], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0319, 0.0378, 0.0288, 0.0356, 0.0314, 0.0298, 0.0327], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:22:03,020 INFO [train.py:903] (3/4) Epoch 6, batch 5250, loss[loss=0.2625, simple_loss=0.3242, pruned_loss=0.1004, over 19727.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3343, pruned_loss=0.1034, over 3830871.60 frames. ], batch size: 51, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:22:48,197 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4239, 2.3629, 1.6175, 1.4138, 2.2797, 1.1062, 1.0579, 1.5110], + device='cuda:3'), covar=tensor([0.0971, 0.0595, 0.0933, 0.0665, 0.0373, 0.1117, 0.0811, 0.0500], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0282, 0.0317, 0.0235, 0.0226, 0.0309, 0.0283, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:23:05,661 INFO [train.py:903] (3/4) Epoch 6, batch 5300, loss[loss=0.2875, simple_loss=0.354, pruned_loss=0.1105, over 19502.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3334, pruned_loss=0.1024, over 3835088.67 frames. ], batch size: 64, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:23:10,376 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.313e+02 6.384e+02 7.596e+02 9.799e+02 2.153e+03, threshold=1.519e+03, percent-clipped=7.0 +2023-04-01 08:23:22,414 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 08:23:56,538 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:24:06,733 INFO [train.py:903] (3/4) Epoch 6, batch 5350, loss[loss=0.37, simple_loss=0.3922, pruned_loss=0.1739, over 12961.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3331, pruned_loss=0.1026, over 3838488.26 frames. ], batch size: 136, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:24:27,511 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6270, 3.9734, 4.1452, 4.1534, 1.4837, 3.8695, 3.4912, 3.7784], + device='cuda:3'), covar=tensor([0.0971, 0.0661, 0.0578, 0.0488, 0.4131, 0.0450, 0.0519, 0.1101], + device='cuda:3'), in_proj_covar=tensor([0.0564, 0.0477, 0.0660, 0.0538, 0.0613, 0.0404, 0.0413, 0.0610], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 08:24:41,835 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 08:25:05,950 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.82 vs. limit=5.0 +2023-04-01 08:25:07,430 INFO [train.py:903] (3/4) Epoch 6, batch 5400, loss[loss=0.262, simple_loss=0.3296, pruned_loss=0.0972, over 19675.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3321, pruned_loss=0.1019, over 3839277.72 frames. ], batch size: 53, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:25:15,189 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.754e+02 6.102e+02 7.285e+02 1.015e+03 2.320e+03, threshold=1.457e+03, percent-clipped=6.0 +2023-04-01 08:25:19,786 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39547.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:25:30,292 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39556.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:26:01,275 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:26:09,084 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0997, 1.2115, 1.5749, 1.0680, 2.6935, 3.2749, 3.0541, 3.4792], + device='cuda:3'), covar=tensor([0.1616, 0.3325, 0.3035, 0.2057, 0.0400, 0.0174, 0.0218, 0.0148], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0283, 0.0315, 0.0245, 0.0198, 0.0129, 0.0201, 0.0151], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:26:13,204 INFO [train.py:903] (3/4) Epoch 6, batch 5450, loss[loss=0.3302, simple_loss=0.3812, pruned_loss=0.1396, over 13249.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3318, pruned_loss=0.1017, over 3816874.25 frames. ], batch size: 135, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:26:28,353 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1916, 1.1830, 1.5216, 1.3418, 2.2030, 1.9437, 2.3171, 0.7995], + device='cuda:3'), covar=tensor([0.1839, 0.3226, 0.1830, 0.1529, 0.1215, 0.1586, 0.1226, 0.3024], + device='cuda:3'), in_proj_covar=tensor([0.0444, 0.0518, 0.0501, 0.0406, 0.0566, 0.0448, 0.0630, 0.0454], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 08:27:13,644 INFO [train.py:903] (3/4) Epoch 6, batch 5500, loss[loss=0.2666, simple_loss=0.3353, pruned_loss=0.09892, over 19593.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3325, pruned_loss=0.1025, over 3827248.11 frames. ], batch size: 61, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:27:18,175 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.238e+02 6.796e+02 8.775e+02 1.086e+03 2.226e+03, threshold=1.755e+03, percent-clipped=13.0 +2023-04-01 08:27:20,739 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:27:33,896 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39657.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:27:36,853 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 08:27:40,476 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39662.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:27:58,083 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4186, 2.0990, 1.8042, 1.7760, 1.5321, 1.8248, 0.3524, 1.1782], + device='cuda:3'), covar=tensor([0.0238, 0.0276, 0.0242, 0.0359, 0.0565, 0.0385, 0.0609, 0.0509], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0298, 0.0299, 0.0314, 0.0384, 0.0310, 0.0286, 0.0301], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:28:02,691 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.7942, 1.3471, 1.0314, 0.9116, 1.1906, 0.8956, 0.6593, 1.2520], + device='cuda:3'), covar=tensor([0.0536, 0.0604, 0.0858, 0.0488, 0.0350, 0.0926, 0.0591, 0.0308], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0276, 0.0309, 0.0233, 0.0219, 0.0304, 0.0282, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:28:14,677 INFO [train.py:903] (3/4) Epoch 6, batch 5550, loss[loss=0.2954, simple_loss=0.3464, pruned_loss=0.1222, over 19596.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3324, pruned_loss=0.1027, over 3836765.44 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 4.0 +2023-04-01 08:28:21,827 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 08:29:11,949 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 08:29:15,353 INFO [train.py:903] (3/4) Epoch 6, batch 5600, loss[loss=0.206, simple_loss=0.2808, pruned_loss=0.06558, over 19809.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.332, pruned_loss=0.1025, over 3838482.61 frames. ], batch size: 49, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:29:20,714 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 6.221e+02 7.656e+02 9.358e+02 1.388e+03, threshold=1.531e+03, percent-clipped=0.0 +2023-04-01 08:29:43,047 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:29:51,151 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1407, 1.1699, 1.6265, 1.3092, 2.7511, 2.0348, 2.8530, 1.0610], + device='cuda:3'), covar=tensor([0.2034, 0.3414, 0.1928, 0.1614, 0.1261, 0.1719, 0.1376, 0.3165], + device='cuda:3'), in_proj_covar=tensor([0.0441, 0.0518, 0.0503, 0.0406, 0.0565, 0.0447, 0.0628, 0.0454], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 08:30:01,391 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7890, 1.8696, 1.8385, 2.2031, 4.2742, 1.1915, 2.2356, 4.3480], + device='cuda:3'), covar=tensor([0.0294, 0.2256, 0.2346, 0.1270, 0.0566, 0.2296, 0.1231, 0.0268], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0317, 0.0319, 0.0295, 0.0319, 0.0317, 0.0292, 0.0311], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:30:12,402 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:30:19,676 INFO [train.py:903] (3/4) Epoch 6, batch 5650, loss[loss=0.2954, simple_loss=0.3578, pruned_loss=0.1165, over 19783.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3325, pruned_loss=0.1025, over 3844865.56 frames. ], batch size: 56, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:31:02,178 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:08,826 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 08:31:13,807 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39833.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:31:21,779 INFO [train.py:903] (3/4) Epoch 6, batch 5700, loss[loss=0.2589, simple_loss=0.3307, pruned_loss=0.09358, over 19287.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3328, pruned_loss=0.1024, over 3844263.31 frames. ], batch size: 66, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:31:26,538 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.066e+02 6.807e+02 8.639e+02 1.032e+03 2.369e+03, threshold=1.728e+03, percent-clipped=2.0 +2023-04-01 08:32:05,581 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 08:32:21,661 INFO [train.py:903] (3/4) Epoch 6, batch 5750, loss[loss=0.2987, simple_loss=0.3468, pruned_loss=0.1253, over 19612.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3332, pruned_loss=0.1026, over 3842556.93 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 8.0 +2023-04-01 08:32:23,999 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 08:32:30,900 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 08:32:35,628 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 08:32:36,987 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7647, 1.7844, 1.9008, 2.5338, 1.6137, 2.2639, 2.3640, 1.8052], + device='cuda:3'), covar=tensor([0.2572, 0.2123, 0.1079, 0.1117, 0.2393, 0.0943, 0.2102, 0.1928], + device='cuda:3'), in_proj_covar=tensor([0.0690, 0.0684, 0.0588, 0.0834, 0.0714, 0.0598, 0.0725, 0.0629], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 08:32:58,465 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:23,258 INFO [train.py:903] (3/4) Epoch 6, batch 5800, loss[loss=0.3003, simple_loss=0.3598, pruned_loss=0.1204, over 19608.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3325, pruned_loss=0.1018, over 3852544.64 frames. ], batch size: 57, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:33:23,642 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:28,244 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39943.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:33:29,036 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+02 6.519e+02 7.840e+02 9.394e+02 2.455e+03, threshold=1.568e+03, percent-clipped=4.0 +2023-04-01 08:34:26,987 INFO [train.py:903] (3/4) Epoch 6, batch 5850, loss[loss=0.2096, simple_loss=0.2854, pruned_loss=0.06693, over 19376.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.332, pruned_loss=0.1012, over 3852366.78 frames. ], batch size: 48, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:34:40,718 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40001.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:34:59,424 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:29,013 INFO [train.py:903] (3/4) Epoch 6, batch 5900, loss[loss=0.2555, simple_loss=0.3184, pruned_loss=0.09627, over 19357.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3337, pruned_loss=0.1023, over 3853802.00 frames. ], batch size: 48, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:35:31,459 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 08:35:31,831 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:35:33,716 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.177e+02 6.123e+02 7.695e+02 9.772e+02 2.844e+03, threshold=1.539e+03, percent-clipped=4.0 +2023-04-01 08:35:53,051 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 08:36:30,104 INFO [train.py:903] (3/4) Epoch 6, batch 5950, loss[loss=0.2685, simple_loss=0.3419, pruned_loss=0.09759, over 19770.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3333, pruned_loss=0.1018, over 3853722.52 frames. ], batch size: 54, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:03,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40116.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:37:07,975 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 08:37:15,813 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9193, 4.3030, 4.6751, 4.6485, 1.5643, 4.2484, 3.7481, 4.2483], + device='cuda:3'), covar=tensor([0.1238, 0.0658, 0.0520, 0.0473, 0.4650, 0.0428, 0.0572, 0.1075], + device='cuda:3'), in_proj_covar=tensor([0.0565, 0.0481, 0.0658, 0.0542, 0.0614, 0.0408, 0.0417, 0.0605], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 08:37:18,155 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40129.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:37:30,832 INFO [train.py:903] (3/4) Epoch 6, batch 6000, loss[loss=0.2546, simple_loss=0.3313, pruned_loss=0.08899, over 19691.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3338, pruned_loss=0.1029, over 3835530.98 frames. ], batch size: 53, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:37:30,832 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 08:37:43,218 INFO [train.py:937] (3/4) Epoch 6, validation: loss=0.1955, simple_loss=0.2951, pruned_loss=0.04789, over 944034.00 frames. +2023-04-01 08:37:43,220 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 08:37:47,771 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 6.298e+02 7.514e+02 9.544e+02 1.960e+03, threshold=1.503e+03, percent-clipped=1.0 +2023-04-01 08:38:29,639 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40177.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:38:44,848 INFO [train.py:903] (3/4) Epoch 6, batch 6050, loss[loss=0.2521, simple_loss=0.3074, pruned_loss=0.0984, over 19763.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3337, pruned_loss=0.1029, over 3833436.86 frames. ], batch size: 47, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:38:53,736 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:14,887 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:24,443 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:25,615 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4278, 1.1587, 1.7073, 1.2012, 2.7615, 3.4670, 3.2685, 3.6892], + device='cuda:3'), covar=tensor([0.1376, 0.3114, 0.2688, 0.1927, 0.0381, 0.0117, 0.0197, 0.0128], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0285, 0.0315, 0.0247, 0.0205, 0.0131, 0.0202, 0.0156], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:39:43,619 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40236.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:39:48,075 INFO [train.py:903] (3/4) Epoch 6, batch 6100, loss[loss=0.2442, simple_loss=0.3075, pruned_loss=0.09042, over 19596.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3336, pruned_loss=0.1029, over 3827950.80 frames. ], batch size: 52, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:39:54,264 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 6.367e+02 7.728e+02 1.144e+03 2.582e+03, threshold=1.546e+03, percent-clipped=10.0 +2023-04-01 08:39:54,685 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40244.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:21,634 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2267, 2.2652, 1.6110, 1.3165, 2.1326, 1.1054, 1.0524, 1.6951], + device='cuda:3'), covar=tensor([0.0829, 0.0495, 0.0919, 0.0607, 0.0368, 0.1026, 0.0708, 0.0396], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0280, 0.0317, 0.0235, 0.0225, 0.0307, 0.0284, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:40:51,267 INFO [train.py:903] (3/4) Epoch 6, batch 6150, loss[loss=0.313, simple_loss=0.3692, pruned_loss=0.1284, over 17314.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3338, pruned_loss=0.103, over 3828279.41 frames. ], batch size: 101, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:40:51,571 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40290.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:40:54,085 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40292.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:41:19,452 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 08:41:52,310 INFO [train.py:903] (3/4) Epoch 6, batch 6200, loss[loss=0.2671, simple_loss=0.3188, pruned_loss=0.1077, over 19384.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3326, pruned_loss=0.1023, over 3839801.12 frames. ], batch size: 48, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:41:57,223 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.101e+02 6.810e+02 8.218e+02 1.008e+03 2.334e+03, threshold=1.644e+03, percent-clipped=5.0 +2023-04-01 08:42:01,860 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:42:15,832 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4839, 1.1906, 1.1339, 1.3127, 1.1062, 1.2774, 1.0810, 1.3474], + device='cuda:3'), covar=tensor([0.0918, 0.1153, 0.1397, 0.0866, 0.0998, 0.0584, 0.1206, 0.0715], + device='cuda:3'), in_proj_covar=tensor([0.0245, 0.0356, 0.0284, 0.0236, 0.0302, 0.0246, 0.0274, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:42:33,807 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40372.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 08:42:54,280 INFO [train.py:903] (3/4) Epoch 6, batch 6250, loss[loss=0.3249, simple_loss=0.3726, pruned_loss=0.1386, over 17372.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3324, pruned_loss=0.1016, over 3847923.47 frames. ], batch size: 100, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:43:04,720 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40397.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 08:43:05,883 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1815, 1.5598, 1.7125, 1.9296, 1.7542, 1.8300, 1.5968, 1.9783], + device='cuda:3'), covar=tensor([0.0729, 0.1501, 0.1231, 0.0973, 0.1239, 0.0460, 0.1093, 0.0609], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0361, 0.0286, 0.0239, 0.0305, 0.0247, 0.0276, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:43:25,233 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2289, 1.3795, 1.1242, 1.1040, 0.9963, 1.1855, 0.0343, 0.3907], + device='cuda:3'), covar=tensor([0.0325, 0.0307, 0.0197, 0.0243, 0.0670, 0.0262, 0.0566, 0.0493], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0299, 0.0295, 0.0313, 0.0385, 0.0310, 0.0283, 0.0297], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 08:43:27,265 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 08:43:58,137 INFO [train.py:903] (3/4) Epoch 6, batch 6300, loss[loss=0.2974, simple_loss=0.3567, pruned_loss=0.1191, over 18875.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3328, pruned_loss=0.1022, over 3828158.51 frames. ], batch size: 74, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:44:03,776 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.389e+02 8.265e+02 1.061e+03 2.633e+03, threshold=1.653e+03, percent-clipped=7.0 +2023-04-01 08:45:00,754 INFO [train.py:903] (3/4) Epoch 6, batch 6350, loss[loss=0.2639, simple_loss=0.3356, pruned_loss=0.09611, over 18859.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3318, pruned_loss=0.1013, over 3821165.44 frames. ], batch size: 74, lr: 1.33e-02, grad_scale: 8.0 +2023-04-01 08:45:12,837 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:24,299 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:45:33,428 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8661, 1.7653, 2.2040, 2.7782, 2.6356, 2.4481, 2.2682, 3.0630], + device='cuda:3'), covar=tensor([0.0652, 0.1725, 0.1056, 0.0705, 0.0956, 0.0382, 0.0916, 0.0388], + device='cuda:3'), in_proj_covar=tensor([0.0246, 0.0362, 0.0285, 0.0239, 0.0301, 0.0246, 0.0275, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:45:33,996 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 08:45:44,982 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40525.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:02,599 INFO [train.py:903] (3/4) Epoch 6, batch 6400, loss[loss=0.3116, simple_loss=0.36, pruned_loss=0.1317, over 13356.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3332, pruned_loss=0.1027, over 3807533.53 frames. ], batch size: 135, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:46:07,241 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 6.528e+02 8.039e+02 1.077e+03 1.980e+03, threshold=1.608e+03, percent-clipped=3.0 +2023-04-01 08:46:12,206 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:15,844 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 08:46:23,192 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40557.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:44,867 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:46:52,963 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40580.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:47:04,379 INFO [train.py:903] (3/4) Epoch 6, batch 6450, loss[loss=0.2053, simple_loss=0.2771, pruned_loss=0.06675, over 19745.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3326, pruned_loss=0.102, over 3816032.43 frames. ], batch size: 47, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:47:50,031 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 08:47:59,685 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:07,614 INFO [train.py:903] (3/4) Epoch 6, batch 6500, loss[loss=0.2381, simple_loss=0.3163, pruned_loss=0.07997, over 19781.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3324, pruned_loss=0.102, over 3813475.52 frames. ], batch size: 56, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:48:13,022 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 6.039e+02 7.833e+02 1.001e+03 2.233e+03, threshold=1.567e+03, percent-clipped=5.0 +2023-04-01 08:48:15,449 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 08:48:44,346 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40669.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:47,957 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:48:54,075 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.03 vs. limit=5.0 +2023-04-01 08:49:12,715 INFO [train.py:903] (3/4) Epoch 6, batch 6550, loss[loss=0.2348, simple_loss=0.3034, pruned_loss=0.08314, over 19637.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3335, pruned_loss=0.1032, over 3799378.95 frames. ], batch size: 50, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:49:15,178 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:18,806 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:49:55,779 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 08:50:15,126 INFO [train.py:903] (3/4) Epoch 6, batch 6600, loss[loss=0.3052, simple_loss=0.3566, pruned_loss=0.1269, over 19841.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3326, pruned_loss=0.1026, over 3817058.88 frames. ], batch size: 52, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:50:19,765 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+02 6.030e+02 7.716e+02 9.913e+02 2.888e+03, threshold=1.543e+03, percent-clipped=3.0 +2023-04-01 08:50:26,143 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40749.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:50:58,489 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-01 08:51:17,592 INFO [train.py:903] (3/4) Epoch 6, batch 6650, loss[loss=0.2784, simple_loss=0.3307, pruned_loss=0.1131, over 19786.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3331, pruned_loss=0.1029, over 3812268.39 frames. ], batch size: 47, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:51:19,261 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9002, 1.9834, 1.9958, 2.8031, 1.9106, 2.6353, 2.5360, 1.8208], + device='cuda:3'), covar=tensor([0.2592, 0.2053, 0.1035, 0.1279, 0.2382, 0.0880, 0.2041, 0.1897], + device='cuda:3'), in_proj_covar=tensor([0.0682, 0.0685, 0.0589, 0.0835, 0.0709, 0.0593, 0.0727, 0.0630], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 08:51:36,072 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0511, 1.0968, 1.2557, 1.4579, 2.6594, 0.8504, 1.7360, 2.6533], + device='cuda:3'), covar=tensor([0.0485, 0.2668, 0.2747, 0.1479, 0.0692, 0.2424, 0.1271, 0.0517], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0316, 0.0321, 0.0296, 0.0319, 0.0316, 0.0292, 0.0314], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:51:40,543 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40807.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:52:19,368 INFO [train.py:903] (3/4) Epoch 6, batch 6700, loss[loss=0.2728, simple_loss=0.3436, pruned_loss=0.1009, over 19795.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3335, pruned_loss=0.1035, over 3823689.72 frames. ], batch size: 56, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:52:24,134 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.981e+02 6.315e+02 8.385e+02 9.952e+02 2.559e+03, threshold=1.677e+03, percent-clipped=5.0 +2023-04-01 08:52:39,404 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40854.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:13,390 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40883.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:53:21,366 INFO [train.py:903] (3/4) Epoch 6, batch 6750, loss[loss=0.2777, simple_loss=0.3368, pruned_loss=0.1093, over 18083.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3329, pruned_loss=0.1032, over 3798378.78 frames. ], batch size: 83, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:53:42,240 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8933, 1.5669, 1.5441, 1.9289, 1.8872, 1.7581, 1.6517, 1.8803], + device='cuda:3'), covar=tensor([0.0967, 0.1658, 0.1382, 0.0958, 0.1172, 0.0462, 0.1072, 0.0662], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0360, 0.0285, 0.0237, 0.0301, 0.0243, 0.0271, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:53:47,612 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3129, 1.2943, 1.3091, 1.4328, 2.8662, 0.9117, 2.1788, 3.1004], + device='cuda:3'), covar=tensor([0.0395, 0.2430, 0.2521, 0.1531, 0.0643, 0.2397, 0.1096, 0.0348], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0312, 0.0315, 0.0293, 0.0316, 0.0311, 0.0289, 0.0310], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:53:53,156 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:04,525 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:17,526 INFO [train.py:903] (3/4) Epoch 6, batch 6800, loss[loss=0.271, simple_loss=0.3369, pruned_loss=0.1026, over 19610.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3326, pruned_loss=0.1031, over 3783063.11 frames. ], batch size: 57, lr: 1.32e-02, grad_scale: 8.0 +2023-04-01 08:54:23,019 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.879e+02 7.609e+02 1.019e+03 2.150e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 08:54:31,411 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:54:33,681 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40953.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:04,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 08:55:04,906 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 08:55:08,786 INFO [train.py:903] (3/4) Epoch 7, batch 0, loss[loss=0.2661, simple_loss=0.3276, pruned_loss=0.1024, over 18110.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3276, pruned_loss=0.1024, over 18110.00 frames. ], batch size: 83, lr: 1.24e-02, grad_scale: 8.0 +2023-04-01 08:55:08,787 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 08:55:20,406 INFO [train.py:937] (3/4) Epoch 7, validation: loss=0.1957, simple_loss=0.2957, pruned_loss=0.04779, over 944034.00 frames. +2023-04-01 08:55:20,407 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 08:55:21,992 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:30,919 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:55:32,932 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 08:55:40,165 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6732, 1.7014, 1.8220, 2.4092, 1.5851, 2.1287, 2.2817, 1.7288], + device='cuda:3'), covar=tensor([0.2113, 0.1760, 0.0900, 0.0907, 0.1944, 0.0758, 0.1708, 0.1654], + device='cuda:3'), in_proj_covar=tensor([0.0682, 0.0675, 0.0584, 0.0829, 0.0702, 0.0591, 0.0726, 0.0625], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 08:56:04,695 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:16,007 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:19,148 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41015.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:22,009 INFO [train.py:903] (3/4) Epoch 7, batch 50, loss[loss=0.3228, simple_loss=0.3757, pruned_loss=0.1349, over 19759.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3272, pruned_loss=0.09575, over 872368.61 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:56:36,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41030.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:56:51,947 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.786e+02 6.089e+02 7.435e+02 1.027e+03 3.072e+03, threshold=1.487e+03, percent-clipped=7.0 +2023-04-01 08:56:56,507 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 08:57:17,985 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:18,119 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:57:23,453 INFO [train.py:903] (3/4) Epoch 7, batch 100, loss[loss=0.2567, simple_loss=0.3139, pruned_loss=0.09978, over 19741.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3307, pruned_loss=0.09973, over 1526881.21 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:57:34,743 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 08:57:41,763 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4081, 2.1650, 1.6188, 1.5010, 2.1415, 1.2414, 1.3629, 1.7414], + device='cuda:3'), covar=tensor([0.0700, 0.0531, 0.0865, 0.0558, 0.0342, 0.0928, 0.0531, 0.0363], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0279, 0.0313, 0.0237, 0.0226, 0.0308, 0.0282, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 08:57:46,202 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41088.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:24,484 INFO [train.py:903] (3/4) Epoch 7, batch 150, loss[loss=0.3033, simple_loss=0.3476, pruned_loss=0.1294, over 13341.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3283, pruned_loss=0.0984, over 2038139.81 frames. ], batch size: 137, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:58:26,830 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41120.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:36,400 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 08:58:56,748 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.181e+02 6.219e+02 8.190e+02 1.094e+03 2.901e+03, threshold=1.638e+03, percent-clipped=4.0 +2023-04-01 08:59:22,651 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 08:59:23,717 INFO [train.py:903] (3/4) Epoch 7, batch 200, loss[loss=0.286, simple_loss=0.3574, pruned_loss=0.1073, over 17440.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3302, pruned_loss=0.1006, over 2428710.03 frames. ], batch size: 101, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 08:59:44,428 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-01 09:00:27,762 INFO [train.py:903] (3/4) Epoch 7, batch 250, loss[loss=0.2334, simple_loss=0.2985, pruned_loss=0.08419, over 18178.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3307, pruned_loss=0.1013, over 2735295.66 frames. ], batch size: 40, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:00:38,045 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:40,035 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:00:59,441 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.194e+02 6.866e+02 8.894e+02 1.080e+03 3.290e+03, threshold=1.779e+03, percent-clipped=6.0 +2023-04-01 09:01:06,554 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:06,849 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.01 vs. limit=5.0 +2023-04-01 09:01:23,879 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:01:30,754 INFO [train.py:903] (3/4) Epoch 7, batch 300, loss[loss=0.2312, simple_loss=0.3027, pruned_loss=0.07986, over 19584.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3309, pruned_loss=0.1014, over 2982663.54 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:31,484 INFO [train.py:903] (3/4) Epoch 7, batch 350, loss[loss=0.3065, simple_loss=0.3616, pruned_loss=0.1257, over 19588.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3299, pruned_loss=0.1006, over 3179759.22 frames. ], batch size: 61, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:02:33,986 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 09:03:02,035 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:04,944 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.797e+02 7.460e+02 9.435e+02 2.818e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-04-01 09:03:22,758 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41359.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:32,954 INFO [train.py:903] (3/4) Epoch 7, batch 400, loss[loss=0.286, simple_loss=0.3591, pruned_loss=0.1065, over 17356.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.33, pruned_loss=0.1004, over 3323086.70 frames. ], batch size: 101, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:03:44,421 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41377.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:03:54,273 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41384.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:20,763 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:23,209 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:04:29,177 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8578, 0.8975, 1.0798, 0.9428, 1.3179, 1.2810, 1.3585, 0.5245], + device='cuda:3'), covar=tensor([0.1229, 0.2150, 0.1240, 0.0973, 0.0831, 0.1111, 0.0794, 0.2145], + device='cuda:3'), in_proj_covar=tensor([0.0452, 0.0523, 0.0510, 0.0409, 0.0561, 0.0455, 0.0631, 0.0456], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 09:04:34,727 INFO [train.py:903] (3/4) Epoch 7, batch 450, loss[loss=0.2664, simple_loss=0.3392, pruned_loss=0.09682, over 18192.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3288, pruned_loss=0.09941, over 3438988.87 frames. ], batch size: 83, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:05:02,679 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 09:05:03,850 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 09:05:06,095 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 5.824e+02 7.719e+02 9.807e+02 3.448e+03, threshold=1.544e+03, percent-clipped=7.0 +2023-04-01 09:05:31,861 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:05:37,398 INFO [train.py:903] (3/4) Epoch 7, batch 500, loss[loss=0.2496, simple_loss=0.3259, pruned_loss=0.0866, over 17449.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.331, pruned_loss=0.1007, over 3521688.80 frames. ], batch size: 101, lr: 1.23e-02, grad_scale: 16.0 +2023-04-01 09:05:44,925 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41474.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:06:38,626 INFO [train.py:903] (3/4) Epoch 7, batch 550, loss[loss=0.256, simple_loss=0.3241, pruned_loss=0.09396, over 19536.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3316, pruned_loss=0.1006, over 3592642.43 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:06:43,743 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41522.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:07:09,851 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 6.032e+02 7.411e+02 9.275e+02 1.625e+03, threshold=1.482e+03, percent-clipped=1.0 +2023-04-01 09:07:37,922 INFO [train.py:903] (3/4) Epoch 7, batch 600, loss[loss=0.2798, simple_loss=0.348, pruned_loss=0.1058, over 19600.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3325, pruned_loss=0.1014, over 3641256.42 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:07:50,883 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:13,417 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 09:08:16,237 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:08:39,321 INFO [train.py:903] (3/4) Epoch 7, batch 650, loss[loss=0.2056, simple_loss=0.2879, pruned_loss=0.06167, over 19585.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3315, pruned_loss=0.1009, over 3684570.28 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:08:45,302 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41623.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:00,446 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41633.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:13,511 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 5.719e+02 7.418e+02 1.065e+03 4.334e+03, threshold=1.484e+03, percent-clipped=7.0 +2023-04-01 09:09:28,650 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41658.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:09:40,773 INFO [train.py:903] (3/4) Epoch 7, batch 700, loss[loss=0.2556, simple_loss=0.3267, pruned_loss=0.09223, over 19537.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3323, pruned_loss=0.1011, over 3722284.27 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 8.0 +2023-04-01 09:09:48,605 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:10:30,227 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 09:10:45,013 INFO [train.py:903] (3/4) Epoch 7, batch 750, loss[loss=0.2412, simple_loss=0.3021, pruned_loss=0.09013, over 19747.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.331, pruned_loss=0.1001, over 3753760.19 frames. ], batch size: 46, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:10:47,658 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9250, 3.5852, 2.1850, 3.2770, 1.1073, 3.3870, 3.2380, 3.3683], + device='cuda:3'), covar=tensor([0.0735, 0.1088, 0.2123, 0.0784, 0.3695, 0.0741, 0.0807, 0.0947], + device='cuda:3'), in_proj_covar=tensor([0.0378, 0.0334, 0.0378, 0.0300, 0.0356, 0.0312, 0.0304, 0.0338], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 09:10:59,392 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:15,966 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.751e+02 6.886e+02 8.721e+02 1.519e+03, threshold=1.377e+03, percent-clipped=2.0 +2023-04-01 09:11:31,835 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:11:46,757 INFO [train.py:903] (3/4) Epoch 7, batch 800, loss[loss=0.2451, simple_loss=0.3238, pruned_loss=0.0832, over 19777.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3298, pruned_loss=0.09933, over 3778520.07 frames. ], batch size: 56, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:11:56,190 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 09:11:58,727 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:31,084 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:12:48,119 INFO [train.py:903] (3/4) Epoch 7, batch 850, loss[loss=0.2795, simple_loss=0.3374, pruned_loss=0.1107, over 19846.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3307, pruned_loss=0.09972, over 3791770.06 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:13:11,214 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41835.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:23,186 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.000e+02 6.514e+02 8.083e+02 9.646e+02 1.896e+03, threshold=1.617e+03, percent-clipped=5.0 +2023-04-01 09:13:38,287 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 09:13:40,992 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41860.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:13:50,191 INFO [train.py:903] (3/4) Epoch 7, batch 900, loss[loss=0.2414, simple_loss=0.3248, pruned_loss=0.07904, over 19556.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3292, pruned_loss=0.09842, over 3813760.79 frames. ], batch size: 56, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:14:49,752 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0349, 1.1430, 1.3191, 1.4182, 2.6515, 0.9628, 2.1173, 2.7030], + device='cuda:3'), covar=tensor([0.0430, 0.2635, 0.2477, 0.1603, 0.0636, 0.2251, 0.0910, 0.0471], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0320, 0.0323, 0.0299, 0.0324, 0.0317, 0.0300, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 09:14:51,584 INFO [train.py:903] (3/4) Epoch 7, batch 950, loss[loss=0.3613, simple_loss=0.4084, pruned_loss=0.1571, over 19574.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3284, pruned_loss=0.09827, over 3824181.24 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:14:56,063 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 09:15:01,469 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:15:26,245 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.318e+02 6.505e+02 7.519e+02 9.487e+02 1.757e+03, threshold=1.504e+03, percent-clipped=1.0 +2023-04-01 09:15:54,009 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41966.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 09:15:55,890 INFO [train.py:903] (3/4) Epoch 7, batch 1000, loss[loss=0.2758, simple_loss=0.3447, pruned_loss=0.1035, over 19284.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3288, pruned_loss=0.09846, over 3812607.78 frames. ], batch size: 66, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:16:26,040 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-01 09:16:26,564 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2140, 2.0844, 1.7360, 1.5900, 1.5322, 1.6751, 0.2305, 1.0004], + device='cuda:3'), covar=tensor([0.0240, 0.0261, 0.0206, 0.0348, 0.0572, 0.0332, 0.0610, 0.0462], + device='cuda:3'), in_proj_covar=tensor([0.0298, 0.0306, 0.0297, 0.0324, 0.0391, 0.0317, 0.0290, 0.0305], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 09:16:48,569 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 09:16:56,960 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:16:59,085 INFO [train.py:903] (3/4) Epoch 7, batch 1050, loss[loss=0.2458, simple_loss=0.3198, pruned_loss=0.08592, over 19671.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3309, pruned_loss=0.1002, over 3823863.30 frames. ], batch size: 53, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:17:29,376 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 09:17:33,748 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+02 5.894e+02 7.129e+02 9.584e+02 2.561e+03, threshold=1.426e+03, percent-clipped=5.0 +2023-04-01 09:18:00,563 INFO [train.py:903] (3/4) Epoch 7, batch 1100, loss[loss=0.3934, simple_loss=0.4259, pruned_loss=0.1804, over 19697.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3297, pruned_loss=0.09955, over 3833734.83 frames. ], batch size: 59, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:03,370 INFO [train.py:903] (3/4) Epoch 7, batch 1150, loss[loss=0.2457, simple_loss=0.307, pruned_loss=0.09221, over 19442.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3286, pruned_loss=0.09888, over 3834517.93 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 4.0 +2023-04-01 09:19:21,146 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:19:33,492 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.5885, 0.8468, 0.6465, 0.6390, 0.7991, 0.5749, 0.6208, 0.7339], + device='cuda:3'), covar=tensor([0.0307, 0.0399, 0.0577, 0.0324, 0.0270, 0.0619, 0.0350, 0.0263], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0281, 0.0313, 0.0237, 0.0226, 0.0308, 0.0289, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 09:19:37,737 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.892e+02 7.369e+02 1.011e+03 1.805e+03, threshold=1.474e+03, percent-clipped=4.0 +2023-04-01 09:19:51,767 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5447, 2.3899, 1.6798, 1.5560, 2.2601, 1.3193, 1.3577, 1.7526], + device='cuda:3'), covar=tensor([0.0638, 0.0440, 0.0693, 0.0524, 0.0318, 0.0805, 0.0589, 0.0380], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0280, 0.0313, 0.0237, 0.0225, 0.0308, 0.0287, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 09:20:05,772 INFO [train.py:903] (3/4) Epoch 7, batch 1200, loss[loss=0.2707, simple_loss=0.3371, pruned_loss=0.1021, over 19542.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3298, pruned_loss=0.09969, over 3827125.70 frames. ], batch size: 56, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:20:30,581 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 09:21:08,030 INFO [train.py:903] (3/4) Epoch 7, batch 1250, loss[loss=0.2695, simple_loss=0.3319, pruned_loss=0.1036, over 19765.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.33, pruned_loss=0.1002, over 3811668.20 frames. ], batch size: 54, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:21:43,462 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 6.237e+02 7.824e+02 1.034e+03 2.254e+03, threshold=1.565e+03, percent-clipped=6.0 +2023-04-01 09:22:09,630 INFO [train.py:903] (3/4) Epoch 7, batch 1300, loss[loss=0.2218, simple_loss=0.2868, pruned_loss=0.07842, over 19756.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.329, pruned_loss=0.09937, over 3824146.00 frames. ], batch size: 47, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:22:09,798 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:23:02,530 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42310.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:23:12,310 INFO [train.py:903] (3/4) Epoch 7, batch 1350, loss[loss=0.2965, simple_loss=0.3598, pruned_loss=0.1166, over 18786.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3288, pruned_loss=0.09892, over 3840676.58 frames. ], batch size: 74, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:23:47,360 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.606e+02 6.578e+02 7.819e+02 1.024e+03 2.032e+03, threshold=1.564e+03, percent-clipped=3.0 +2023-04-01 09:23:50,218 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8905, 1.3660, 1.4453, 2.0961, 1.6173, 2.1659, 2.0437, 1.8851], + device='cuda:3'), covar=tensor([0.0697, 0.0966, 0.0994, 0.0851, 0.0856, 0.0618, 0.0812, 0.0607], + device='cuda:3'), in_proj_covar=tensor([0.0223, 0.0236, 0.0233, 0.0265, 0.0256, 0.0219, 0.0216, 0.0211], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 09:24:15,697 INFO [train.py:903] (3/4) Epoch 7, batch 1400, loss[loss=0.2312, simple_loss=0.2921, pruned_loss=0.08518, over 19409.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3276, pruned_loss=0.09794, over 3847704.61 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 8.0 +2023-04-01 09:24:16,606 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 09:24:34,429 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:39,201 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42387.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:50,674 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:24:52,468 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 09:25:10,403 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:25:12,408 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 09:25:17,935 INFO [train.py:903] (3/4) Epoch 7, batch 1450, loss[loss=0.2848, simple_loss=0.3497, pruned_loss=0.1099, over 19467.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3262, pruned_loss=0.09697, over 3850506.01 frames. ], batch size: 64, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:25:26,432 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42425.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:25:31,023 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0839, 1.1578, 1.5352, 0.8011, 2.4003, 2.9489, 2.6998, 3.1372], + device='cuda:3'), covar=tensor([0.1520, 0.3161, 0.2863, 0.2192, 0.0434, 0.0206, 0.0266, 0.0174], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0282, 0.0310, 0.0244, 0.0200, 0.0134, 0.0201, 0.0159], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 09:25:53,250 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.957e+02 5.854e+02 7.466e+02 9.791e+02 2.115e+03, threshold=1.493e+03, percent-clipped=4.0 +2023-04-01 09:26:18,636 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2827, 3.8685, 2.5962, 3.4974, 1.4990, 3.5052, 3.5192, 3.6785], + device='cuda:3'), covar=tensor([0.0689, 0.1082, 0.1745, 0.0647, 0.2925, 0.0781, 0.0681, 0.0984], + device='cuda:3'), in_proj_covar=tensor([0.0371, 0.0329, 0.0381, 0.0290, 0.0353, 0.0313, 0.0297, 0.0329], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 09:26:19,589 INFO [train.py:903] (3/4) Epoch 7, batch 1500, loss[loss=0.3009, simple_loss=0.366, pruned_loss=0.1179, over 19079.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3262, pruned_loss=0.09686, over 3849928.90 frames. ], batch size: 69, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:20,485 INFO [train.py:903] (3/4) Epoch 7, batch 1550, loss[loss=0.2872, simple_loss=0.3399, pruned_loss=0.1173, over 19574.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3278, pruned_loss=0.09841, over 3841319.56 frames. ], batch size: 52, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:27:29,126 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:27:55,724 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 6.427e+02 8.071e+02 9.919e+02 2.182e+03, threshold=1.614e+03, percent-clipped=7.0 +2023-04-01 09:28:23,010 INFO [train.py:903] (3/4) Epoch 7, batch 1600, loss[loss=0.2718, simple_loss=0.3417, pruned_loss=0.1009, over 19536.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3295, pruned_loss=0.0996, over 3834713.63 frames. ], batch size: 56, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:28:41,274 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 09:29:24,655 INFO [train.py:903] (3/4) Epoch 7, batch 1650, loss[loss=0.2558, simple_loss=0.3234, pruned_loss=0.09405, over 19840.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3281, pruned_loss=0.09887, over 3837546.71 frames. ], batch size: 52, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:29:50,171 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:29:59,743 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.059e+02 6.175e+02 7.945e+02 9.824e+02 2.630e+03, threshold=1.589e+03, percent-clipped=4.0 +2023-04-01 09:30:23,159 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:26,330 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:30:26,438 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1530, 2.1017, 1.5256, 1.5218, 1.3504, 1.6456, 0.4063, 1.0470], + device='cuda:3'), covar=tensor([0.0268, 0.0293, 0.0275, 0.0399, 0.0646, 0.0417, 0.0616, 0.0540], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0308, 0.0303, 0.0327, 0.0396, 0.0323, 0.0290, 0.0306], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 09:30:27,189 INFO [train.py:903] (3/4) Epoch 7, batch 1700, loss[loss=0.2914, simple_loss=0.3555, pruned_loss=0.1136, over 19530.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3276, pruned_loss=0.09839, over 3835582.27 frames. ], batch size: 64, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:30:42,167 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 09:30:43,931 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42681.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:31:02,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 09:31:15,455 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42706.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 09:31:29,060 INFO [train.py:903] (3/4) Epoch 7, batch 1750, loss[loss=0.2854, simple_loss=0.3537, pruned_loss=0.1086, over 19088.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3285, pruned_loss=0.09887, over 3844309.88 frames. ], batch size: 69, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:31:59,547 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:32:05,278 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.682e+02 7.177e+02 9.179e+02 1.731e+03, threshold=1.435e+03, percent-clipped=1.0 +2023-04-01 09:32:33,263 INFO [train.py:903] (3/4) Epoch 7, batch 1800, loss[loss=0.277, simple_loss=0.3442, pruned_loss=0.105, over 18827.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3286, pruned_loss=0.09922, over 3831760.99 frames. ], batch size: 74, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:33:27,619 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 09:33:35,067 INFO [train.py:903] (3/4) Epoch 7, batch 1850, loss[loss=0.3592, simple_loss=0.3984, pruned_loss=0.16, over 19328.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3285, pruned_loss=0.09907, over 3834955.38 frames. ], batch size: 66, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:04,823 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 09:34:09,013 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 6.596e+02 7.909e+02 1.066e+03 2.536e+03, threshold=1.582e+03, percent-clipped=10.0 +2023-04-01 09:34:22,471 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:37,124 INFO [train.py:903] (3/4) Epoch 7, batch 1900, loss[loss=0.2427, simple_loss=0.3051, pruned_loss=0.09021, over 19389.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3287, pruned_loss=0.09897, over 3821520.77 frames. ], batch size: 48, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:34:37,304 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:48,040 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:34:51,102 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 09:34:55,748 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 09:35:14,078 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:35:21,940 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 09:35:38,557 INFO [train.py:903] (3/4) Epoch 7, batch 1950, loss[loss=0.2999, simple_loss=0.3564, pruned_loss=0.1217, over 17365.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3305, pruned_loss=0.1, over 3812459.48 frames. ], batch size: 101, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:36:15,250 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+02 6.682e+02 8.244e+02 9.665e+02 2.689e+03, threshold=1.649e+03, percent-clipped=3.0 +2023-04-01 09:36:23,015 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.81 vs. limit=5.0 +2023-04-01 09:36:41,126 INFO [train.py:903] (3/4) Epoch 7, batch 2000, loss[loss=0.289, simple_loss=0.3499, pruned_loss=0.114, over 19480.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3303, pruned_loss=0.09948, over 3812896.17 frames. ], batch size: 64, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:00,261 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:35,138 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:37:36,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 09:37:43,658 INFO [train.py:903] (3/4) Epoch 7, batch 2050, loss[loss=0.2782, simple_loss=0.3469, pruned_loss=0.1048, over 19781.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3308, pruned_loss=0.1002, over 3803729.23 frames. ], batch size: 54, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:37:56,213 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 09:37:57,395 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 09:38:17,409 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.956e+02 6.149e+02 7.669e+02 9.586e+02 2.177e+03, threshold=1.534e+03, percent-clipped=1.0 +2023-04-01 09:38:18,641 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 09:38:38,206 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4221, 1.4861, 1.9119, 2.4352, 1.8495, 2.2326, 2.5430, 2.2709], + device='cuda:3'), covar=tensor([0.0753, 0.1207, 0.1085, 0.1101, 0.1080, 0.0928, 0.0937, 0.0748], + device='cuda:3'), in_proj_covar=tensor([0.0223, 0.0236, 0.0231, 0.0263, 0.0254, 0.0220, 0.0215, 0.0210], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 09:38:46,691 INFO [train.py:903] (3/4) Epoch 7, batch 2100, loss[loss=0.2616, simple_loss=0.3337, pruned_loss=0.0947, over 19777.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3288, pruned_loss=0.09885, over 3811216.35 frames. ], batch size: 56, lr: 1.21e-02, grad_scale: 8.0 +2023-04-01 09:38:47,710 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 09:39:13,061 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 09:39:28,856 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7771, 1.2241, 1.4061, 2.0680, 1.6368, 1.9138, 2.1600, 1.6131], + device='cuda:3'), covar=tensor([0.0757, 0.1030, 0.1068, 0.0819, 0.0854, 0.0711, 0.0724, 0.0688], + device='cuda:3'), in_proj_covar=tensor([0.0225, 0.0239, 0.0235, 0.0265, 0.0256, 0.0221, 0.0218, 0.0213], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 09:39:35,510 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 09:39:41,536 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:39:48,067 INFO [train.py:903] (3/4) Epoch 7, batch 2150, loss[loss=0.2388, simple_loss=0.2991, pruned_loss=0.08923, over 19419.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3284, pruned_loss=0.09902, over 3801983.11 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:39:57,668 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:13,322 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:40:23,158 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 6.527e+02 7.673e+02 9.951e+02 2.226e+03, threshold=1.535e+03, percent-clipped=3.0 +2023-04-01 09:40:49,521 INFO [train.py:903] (3/4) Epoch 7, batch 2200, loss[loss=0.2694, simple_loss=0.3411, pruned_loss=0.09881, over 18784.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3287, pruned_loss=0.09878, over 3815674.55 frames. ], batch size: 74, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:53,419 INFO [train.py:903] (3/4) Epoch 7, batch 2250, loss[loss=0.274, simple_loss=0.3508, pruned_loss=0.09865, over 19639.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3295, pruned_loss=0.09961, over 3813482.01 frames. ], batch size: 60, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:41:57,907 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:19,766 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:21,867 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43241.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:27,691 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+02 6.206e+02 7.577e+02 9.251e+02 2.641e+03, threshold=1.515e+03, percent-clipped=5.0 +2023-04-01 09:42:52,532 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:42:56,674 INFO [train.py:903] (3/4) Epoch 7, batch 2300, loss[loss=0.2484, simple_loss=0.3214, pruned_loss=0.08772, over 19537.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3281, pruned_loss=0.09887, over 3820291.26 frames. ], batch size: 54, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:43:03,293 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 09:43:10,466 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 09:43:59,264 INFO [train.py:903] (3/4) Epoch 7, batch 2350, loss[loss=0.301, simple_loss=0.3598, pruned_loss=0.1211, over 18754.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3284, pruned_loss=0.09876, over 3809552.62 frames. ], batch size: 74, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:44:22,139 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:32,288 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2563, 1.2721, 1.3970, 1.6192, 2.8091, 0.9972, 1.9305, 3.1072], + device='cuda:3'), covar=tensor([0.0458, 0.2522, 0.2512, 0.1549, 0.0716, 0.2357, 0.1213, 0.0366], + device='cuda:3'), in_proj_covar=tensor([0.0308, 0.0318, 0.0326, 0.0297, 0.0323, 0.0319, 0.0302, 0.0319], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 09:44:34,240 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 6.151e+02 7.487e+02 9.533e+02 1.563e+03, threshold=1.497e+03, percent-clipped=2.0 +2023-04-01 09:44:43,158 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 09:44:46,762 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43356.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:44:59,569 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 09:45:00,595 INFO [train.py:903] (3/4) Epoch 7, batch 2400, loss[loss=0.2603, simple_loss=0.318, pruned_loss=0.1013, over 19424.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3282, pruned_loss=0.09879, over 3812031.48 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:45:20,057 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:45:49,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43407.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:46:04,420 INFO [train.py:903] (3/4) Epoch 7, batch 2450, loss[loss=0.2224, simple_loss=0.2983, pruned_loss=0.07328, over 19731.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3285, pruned_loss=0.09846, over 3811837.19 frames. ], batch size: 51, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:46:38,155 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.802e+02 7.669e+02 8.855e+02 2.284e+03, threshold=1.534e+03, percent-clipped=5.0 +2023-04-01 09:47:06,558 INFO [train.py:903] (3/4) Epoch 7, batch 2500, loss[loss=0.2496, simple_loss=0.3225, pruned_loss=0.08836, over 19766.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3287, pruned_loss=0.09873, over 3806261.91 frames. ], batch size: 54, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:47:53,176 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.96 vs. limit=5.0 +2023-04-01 09:48:09,517 INFO [train.py:903] (3/4) Epoch 7, batch 2550, loss[loss=0.266, simple_loss=0.3242, pruned_loss=0.1039, over 19387.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3282, pruned_loss=0.09812, over 3810595.48 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:48:25,886 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:48:44,536 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.143e+02 6.070e+02 7.288e+02 8.830e+02 1.707e+03, threshold=1.458e+03, percent-clipped=2.0 +2023-04-01 09:49:02,680 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7080, 3.1761, 3.2403, 3.2423, 1.1653, 2.9941, 2.6769, 2.9463], + device='cuda:3'), covar=tensor([0.1406, 0.0781, 0.0736, 0.0710, 0.4631, 0.0726, 0.0724, 0.1303], + device='cuda:3'), in_proj_covar=tensor([0.0579, 0.0505, 0.0681, 0.0560, 0.0644, 0.0431, 0.0433, 0.0633], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 09:49:05,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 09:49:10,238 INFO [train.py:903] (3/4) Epoch 7, batch 2600, loss[loss=0.2905, simple_loss=0.35, pruned_loss=0.1155, over 18754.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3293, pruned_loss=0.0988, over 3813721.67 frames. ], batch size: 74, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:49:41,324 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:49:51,827 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.69 vs. limit=5.0 +2023-04-01 09:50:02,800 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8337, 3.5324, 2.1796, 3.1252, 1.0481, 2.9991, 3.2119, 3.3758], + device='cuda:3'), covar=tensor([0.0595, 0.0915, 0.1964, 0.0694, 0.3447, 0.0863, 0.0681, 0.0731], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0336, 0.0394, 0.0300, 0.0365, 0.0325, 0.0309, 0.0345], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 09:50:06,464 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43612.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:09,209 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-01 09:50:12,284 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:12,994 INFO [train.py:903] (3/4) Epoch 7, batch 2650, loss[loss=0.243, simple_loss=0.3107, pruned_loss=0.08768, over 19748.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3282, pruned_loss=0.09807, over 3816501.83 frames. ], batch size: 51, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:50:23,872 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.80 vs. limit=5.0 +2023-04-01 09:50:35,408 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 09:50:38,365 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43637.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:50:49,495 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 6.243e+02 7.316e+02 9.618e+02 1.411e+03, threshold=1.463e+03, percent-clipped=0.0 +2023-04-01 09:51:16,493 INFO [train.py:903] (3/4) Epoch 7, batch 2700, loss[loss=0.2284, simple_loss=0.2977, pruned_loss=0.0795, over 19397.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.327, pruned_loss=0.09769, over 3818580.28 frames. ], batch size: 48, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:51:56,257 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.05 vs. limit=5.0 +2023-04-01 09:52:03,511 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9507, 1.2615, 1.4587, 1.5234, 2.6084, 1.0528, 1.9045, 2.7559], + device='cuda:3'), covar=tensor([0.0478, 0.2470, 0.2370, 0.1515, 0.0719, 0.2275, 0.1176, 0.0439], + device='cuda:3'), in_proj_covar=tensor([0.0308, 0.0316, 0.0325, 0.0294, 0.0323, 0.0317, 0.0300, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 09:52:19,230 INFO [train.py:903] (3/4) Epoch 7, batch 2750, loss[loss=0.2967, simple_loss=0.3462, pruned_loss=0.1236, over 13829.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3269, pruned_loss=0.09777, over 3812310.69 frames. ], batch size: 137, lr: 1.20e-02, grad_scale: 4.0 +2023-04-01 09:52:55,475 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.869e+02 8.299e+02 1.091e+03 2.331e+03, threshold=1.660e+03, percent-clipped=8.0 +2023-04-01 09:53:20,556 INFO [train.py:903] (3/4) Epoch 7, batch 2800, loss[loss=0.3116, simple_loss=0.3694, pruned_loss=0.1269, over 19568.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3274, pruned_loss=0.098, over 3815695.43 frames. ], batch size: 61, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:54:07,596 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:54:22,952 INFO [train.py:903] (3/4) Epoch 7, batch 2850, loss[loss=0.2469, simple_loss=0.3118, pruned_loss=0.09096, over 19746.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3276, pruned_loss=0.09844, over 3824441.08 frames. ], batch size: 51, lr: 1.20e-02, grad_scale: 8.0 +2023-04-01 09:54:59,106 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.316e+02 8.520e+02 1.005e+03 1.613e+03, threshold=1.704e+03, percent-clipped=0.0 +2023-04-01 09:55:25,666 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 09:55:26,752 INFO [train.py:903] (3/4) Epoch 7, batch 2900, loss[loss=0.2307, simple_loss=0.3113, pruned_loss=0.07509, over 19521.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3279, pruned_loss=0.09818, over 3832154.51 frames. ], batch size: 54, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:55:36,426 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:56:28,084 INFO [train.py:903] (3/4) Epoch 7, batch 2950, loss[loss=0.3309, simple_loss=0.3777, pruned_loss=0.1421, over 17318.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.327, pruned_loss=0.09759, over 3838298.68 frames. ], batch size: 101, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:56:39,834 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:57:04,993 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.923e+02 7.218e+02 9.278e+02 2.092e+03, threshold=1.444e+03, percent-clipped=1.0 +2023-04-01 09:57:25,768 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 09:57:30,518 INFO [train.py:903] (3/4) Epoch 7, batch 3000, loss[loss=0.2256, simple_loss=0.3104, pruned_loss=0.07037, over 19665.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3258, pruned_loss=0.09676, over 3845105.80 frames. ], batch size: 55, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:57:30,518 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 09:57:43,097 INFO [train.py:937] (3/4) Epoch 7, validation: loss=0.1917, simple_loss=0.2919, pruned_loss=0.04574, over 944034.00 frames. +2023-04-01 09:57:43,098 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 09:57:43,657 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7607, 1.8400, 1.8984, 2.6049, 1.6526, 2.2966, 2.3947, 1.8900], + device='cuda:3'), covar=tensor([0.2538, 0.2294, 0.1086, 0.1049, 0.2254, 0.0924, 0.2222, 0.1945], + device='cuda:3'), in_proj_covar=tensor([0.0700, 0.0704, 0.0596, 0.0834, 0.0719, 0.0613, 0.0733, 0.0639], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 09:57:49,866 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 09:57:54,981 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:57:58,832 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7791, 1.3949, 1.3444, 1.7913, 1.5782, 1.5480, 1.5219, 1.7195], + device='cuda:3'), covar=tensor([0.0883, 0.1569, 0.1420, 0.0905, 0.1141, 0.0538, 0.0997, 0.0640], + device='cuda:3'), in_proj_covar=tensor([0.0244, 0.0358, 0.0286, 0.0236, 0.0298, 0.0247, 0.0268, 0.0231], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 09:58:13,816 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 09:58:18,950 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8808, 1.9418, 1.9428, 2.9026, 1.8382, 2.6120, 2.5572, 1.9205], + device='cuda:3'), covar=tensor([0.2727, 0.2293, 0.1144, 0.1213, 0.2601, 0.0966, 0.2167, 0.1973], + device='cuda:3'), in_proj_covar=tensor([0.0694, 0.0701, 0.0594, 0.0834, 0.0716, 0.0611, 0.0730, 0.0637], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 09:58:31,227 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9750, 1.0315, 1.4236, 0.5405, 2.2398, 2.4031, 2.1617, 2.6173], + device='cuda:3'), covar=tensor([0.1474, 0.3395, 0.2907, 0.2212, 0.0440, 0.0233, 0.0383, 0.0248], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0284, 0.0313, 0.0245, 0.0203, 0.0133, 0.0204, 0.0162], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 09:58:46,650 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2275, 1.3078, 1.8193, 1.4426, 2.6906, 2.1410, 2.7308, 1.0728], + device='cuda:3'), covar=tensor([0.1990, 0.3426, 0.1855, 0.1585, 0.1236, 0.1680, 0.1390, 0.3161], + device='cuda:3'), in_proj_covar=tensor([0.0449, 0.0519, 0.0515, 0.0405, 0.0557, 0.0451, 0.0624, 0.0453], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 09:58:47,331 INFO [train.py:903] (3/4) Epoch 7, batch 3050, loss[loss=0.238, simple_loss=0.3133, pruned_loss=0.08132, over 19275.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3267, pruned_loss=0.09722, over 3848254.71 frames. ], batch size: 66, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 09:59:24,084 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+02 5.923e+02 7.306e+02 8.819e+02 1.422e+03, threshold=1.461e+03, percent-clipped=0.0 +2023-04-01 09:59:50,239 INFO [train.py:903] (3/4) Epoch 7, batch 3100, loss[loss=0.2911, simple_loss=0.3524, pruned_loss=0.1149, over 19582.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3259, pruned_loss=0.09647, over 3851087.75 frames. ], batch size: 61, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:00:50,926 INFO [train.py:903] (3/4) Epoch 7, batch 3150, loss[loss=0.2379, simple_loss=0.2958, pruned_loss=0.08997, over 19486.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3266, pruned_loss=0.09741, over 3844128.53 frames. ], batch size: 49, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:00:53,389 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0829, 1.1721, 1.5959, 0.8917, 2.3774, 2.9723, 2.7298, 3.1530], + device='cuda:3'), covar=tensor([0.1484, 0.3176, 0.2771, 0.2191, 0.0460, 0.0191, 0.0272, 0.0179], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0286, 0.0316, 0.0250, 0.0205, 0.0134, 0.0207, 0.0163], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 10:00:56,512 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2083, 1.1821, 1.8529, 1.5964, 2.8967, 4.1959, 4.1882, 4.6340], + device='cuda:3'), covar=tensor([0.1592, 0.3309, 0.2832, 0.1808, 0.0510, 0.0194, 0.0182, 0.0107], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0286, 0.0316, 0.0250, 0.0205, 0.0134, 0.0207, 0.0163], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 10:01:18,567 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 10:01:26,087 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 6.470e+02 8.010e+02 1.018e+03 2.357e+03, threshold=1.602e+03, percent-clipped=4.0 +2023-04-01 10:01:29,402 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:01:51,333 INFO [train.py:903] (3/4) Epoch 7, batch 3200, loss[loss=0.2544, simple_loss=0.3289, pruned_loss=0.08991, over 19523.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3266, pruned_loss=0.09736, over 3842384.17 frames. ], batch size: 54, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:02:51,383 INFO [train.py:903] (3/4) Epoch 7, batch 3250, loss[loss=0.2707, simple_loss=0.3418, pruned_loss=0.09981, over 19601.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3288, pruned_loss=0.09852, over 3834080.44 frames. ], batch size: 57, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:05,873 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 10:03:25,181 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-01 10:03:27,995 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 6.408e+02 8.261e+02 1.024e+03 1.757e+03, threshold=1.652e+03, percent-clipped=4.0 +2023-04-01 10:03:28,442 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44247.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:49,224 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44264.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:03:53,343 INFO [train.py:903] (3/4) Epoch 7, batch 3300, loss[loss=0.2471, simple_loss=0.3185, pruned_loss=0.08788, over 19600.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.328, pruned_loss=0.09843, over 3821227.54 frames. ], batch size: 52, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:03:59,186 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:00,238 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 10:04:00,641 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44272.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:04:57,179 INFO [train.py:903] (3/4) Epoch 7, batch 3350, loss[loss=0.2585, simple_loss=0.3276, pruned_loss=0.09476, over 19661.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3285, pruned_loss=0.09876, over 3829074.28 frames. ], batch size: 55, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:05:01,676 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44321.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:05:32,247 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 6.124e+02 7.865e+02 1.014e+03 2.362e+03, threshold=1.573e+03, percent-clipped=3.0 +2023-04-01 10:05:58,464 INFO [train.py:903] (3/4) Epoch 7, batch 3400, loss[loss=0.2956, simple_loss=0.3604, pruned_loss=0.1154, over 19478.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3274, pruned_loss=0.09817, over 3831533.59 frames. ], batch size: 64, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:06:20,848 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:01,357 INFO [train.py:903] (3/4) Epoch 7, batch 3450, loss[loss=0.1982, simple_loss=0.2683, pruned_loss=0.06408, over 18783.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3269, pruned_loss=0.09798, over 3830003.01 frames. ], batch size: 41, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:07:06,915 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 10:07:08,515 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2308, 1.2409, 1.6311, 1.4593, 2.4264, 2.0681, 2.6008, 1.0555], + device='cuda:3'), covar=tensor([0.2207, 0.3771, 0.2040, 0.1796, 0.1335, 0.1837, 0.1399, 0.3188], + device='cuda:3'), in_proj_covar=tensor([0.0458, 0.0529, 0.0522, 0.0414, 0.0572, 0.0463, 0.0638, 0.0460], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 10:07:25,050 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:07:39,986 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 5.624e+02 7.209e+02 9.228e+02 1.461e+03, threshold=1.442e+03, percent-clipped=0.0 +2023-04-01 10:08:03,657 INFO [train.py:903] (3/4) Epoch 7, batch 3500, loss[loss=0.2012, simple_loss=0.2691, pruned_loss=0.06665, over 19755.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.328, pruned_loss=0.09836, over 3828871.90 frames. ], batch size: 46, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:07,866 INFO [train.py:903] (3/4) Epoch 7, batch 3550, loss[loss=0.286, simple_loss=0.3538, pruned_loss=0.1091, over 19602.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3299, pruned_loss=0.09948, over 3820967.22 frames. ], batch size: 61, lr: 1.19e-02, grad_scale: 4.0 +2023-04-01 10:09:10,474 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:37,863 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 10:09:40,619 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:40,732 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44545.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:09:43,701 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 5.736e+02 7.487e+02 9.426e+02 2.431e+03, threshold=1.497e+03, percent-clipped=5.0 +2023-04-01 10:10:10,018 INFO [train.py:903] (3/4) Epoch 7, batch 3600, loss[loss=0.3166, simple_loss=0.3664, pruned_loss=0.1334, over 13405.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3301, pruned_loss=0.1001, over 3804605.13 frames. ], batch size: 136, lr: 1.19e-02, grad_scale: 8.0 +2023-04-01 10:10:10,278 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:12,085 INFO [train.py:903] (3/4) Epoch 7, batch 3650, loss[loss=0.2465, simple_loss=0.3221, pruned_loss=0.08542, over 19658.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.33, pruned_loss=0.1001, over 3795291.33 frames. ], batch size: 60, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:11:16,252 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.76 vs. limit=5.0 +2023-04-01 10:11:43,265 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:11:49,789 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 6.222e+02 7.769e+02 9.647e+02 2.431e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:12:12,918 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:12:13,723 INFO [train.py:903] (3/4) Epoch 7, batch 3700, loss[loss=0.2316, simple_loss=0.3107, pruned_loss=0.07622, over 19736.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.329, pruned_loss=0.0994, over 3803788.21 frames. ], batch size: 63, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:12:45,621 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44692.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:12:59,043 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7622, 1.6319, 1.2833, 1.7199, 1.7989, 1.4622, 1.3383, 1.5828], + device='cuda:3'), covar=tensor([0.0965, 0.1671, 0.1796, 0.1083, 0.1444, 0.1048, 0.1439, 0.1020], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0358, 0.0288, 0.0236, 0.0303, 0.0244, 0.0270, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 10:13:16,193 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:13:16,967 INFO [train.py:903] (3/4) Epoch 7, batch 3750, loss[loss=0.2613, simple_loss=0.3359, pruned_loss=0.09341, over 19651.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3298, pruned_loss=0.09987, over 3802710.45 frames. ], batch size: 55, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:13:53,509 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+02 6.149e+02 7.382e+02 9.468e+02 1.650e+03, threshold=1.476e+03, percent-clipped=2.0 +2023-04-01 10:14:18,149 INFO [train.py:903] (3/4) Epoch 7, batch 3800, loss[loss=0.2246, simple_loss=0.2921, pruned_loss=0.0786, over 19787.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3281, pruned_loss=0.0989, over 3813545.40 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:14:50,705 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 10:15:19,313 INFO [train.py:903] (3/4) Epoch 7, batch 3850, loss[loss=0.2735, simple_loss=0.3477, pruned_loss=0.09967, over 19377.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3284, pruned_loss=0.09857, over 3812248.17 frames. ], batch size: 70, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:15:57,068 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 6.658e+02 8.285e+02 1.095e+03 3.075e+03, threshold=1.657e+03, percent-clipped=10.0 +2023-04-01 10:16:20,985 INFO [train.py:903] (3/4) Epoch 7, batch 3900, loss[loss=0.291, simple_loss=0.3499, pruned_loss=0.116, over 19786.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3276, pruned_loss=0.09841, over 3813636.73 frames. ], batch size: 56, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:16:48,347 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44889.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:15,639 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:17:24,388 INFO [train.py:903] (3/4) Epoch 7, batch 3950, loss[loss=0.257, simple_loss=0.3157, pruned_loss=0.09917, over 19723.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3272, pruned_loss=0.09791, over 3816449.26 frames. ], batch size: 51, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:17:29,135 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 10:18:00,740 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+02 5.655e+02 7.377e+02 9.527e+02 2.304e+03, threshold=1.475e+03, percent-clipped=3.0 +2023-04-01 10:18:26,675 INFO [train.py:903] (3/4) Epoch 7, batch 4000, loss[loss=0.2598, simple_loss=0.3323, pruned_loss=0.09364, over 19579.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3278, pruned_loss=0.09793, over 3818841.39 frames. ], batch size: 61, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:11,998 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45004.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:19:15,206 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 10:19:22,027 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 10:19:27,786 INFO [train.py:903] (3/4) Epoch 7, batch 4050, loss[loss=0.2621, simple_loss=0.3312, pruned_loss=0.09656, over 18336.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3274, pruned_loss=0.09732, over 3819761.71 frames. ], batch size: 84, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:19:39,412 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:20:05,076 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 6.019e+02 6.941e+02 8.524e+02 1.564e+03, threshold=1.388e+03, percent-clipped=1.0 +2023-04-01 10:20:28,972 INFO [train.py:903] (3/4) Epoch 7, batch 4100, loss[loss=0.2829, simple_loss=0.3503, pruned_loss=0.1077, over 17284.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3268, pruned_loss=0.09724, over 3826195.67 frames. ], batch size: 101, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:20:36,306 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:20:54,203 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8476, 4.2317, 4.5241, 4.4797, 1.7518, 4.1558, 3.7131, 4.1159], + device='cuda:3'), covar=tensor([0.1108, 0.0699, 0.0505, 0.0458, 0.4229, 0.0468, 0.0526, 0.1025], + device='cuda:3'), in_proj_covar=tensor([0.0589, 0.0512, 0.0696, 0.0570, 0.0647, 0.0437, 0.0441, 0.0650], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 10:21:05,613 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 10:21:07,164 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45099.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:21:30,596 INFO [train.py:903] (3/4) Epoch 7, batch 4150, loss[loss=0.2188, simple_loss=0.2826, pruned_loss=0.0775, over 19751.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3275, pruned_loss=0.09816, over 3827442.65 frames. ], batch size: 46, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:21:41,539 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 10:22:07,171 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 6.430e+02 8.235e+02 1.004e+03 2.607e+03, threshold=1.647e+03, percent-clipped=6.0 +2023-04-01 10:22:32,968 INFO [train.py:903] (3/4) Epoch 7, batch 4200, loss[loss=0.2421, simple_loss=0.3039, pruned_loss=0.09014, over 19629.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3271, pruned_loss=0.09806, over 3817766.28 frames. ], batch size: 50, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:22:38,329 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 10:23:33,351 INFO [train.py:903] (3/4) Epoch 7, batch 4250, loss[loss=0.2138, simple_loss=0.2808, pruned_loss=0.07338, over 19089.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3268, pruned_loss=0.09784, over 3816865.81 frames. ], batch size: 42, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:23:49,621 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 10:23:53,861 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=2.00 vs. limit=2.0 +2023-04-01 10:24:01,333 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 10:24:12,489 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.934e+02 6.110e+02 8.094e+02 1.072e+03 2.309e+03, threshold=1.619e+03, percent-clipped=5.0 +2023-04-01 10:24:27,363 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:24:37,358 INFO [train.py:903] (3/4) Epoch 7, batch 4300, loss[loss=0.2615, simple_loss=0.3321, pruned_loss=0.09546, over 19530.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3269, pruned_loss=0.09743, over 3821595.67 frames. ], batch size: 54, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:24:57,141 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45283.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:00,280 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45285.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:03,710 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4479, 1.1578, 1.0969, 1.3839, 1.1870, 1.2870, 1.1918, 1.3470], + device='cuda:3'), covar=tensor([0.0956, 0.1213, 0.1407, 0.0868, 0.0955, 0.0545, 0.1054, 0.0691], + device='cuda:3'), in_proj_covar=tensor([0.0244, 0.0350, 0.0282, 0.0234, 0.0297, 0.0241, 0.0265, 0.0228], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 10:25:27,074 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:25:32,131 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 10:25:39,641 INFO [train.py:903] (3/4) Epoch 7, batch 4350, loss[loss=0.2919, simple_loss=0.3508, pruned_loss=0.1165, over 19405.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3274, pruned_loss=0.09794, over 3823411.32 frames. ], batch size: 70, lr: 1.18e-02, grad_scale: 8.0 +2023-04-01 10:26:16,052 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 6.479e+02 7.493e+02 1.002e+03 2.532e+03, threshold=1.499e+03, percent-clipped=5.0 +2023-04-01 10:26:34,300 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2787, 3.0798, 1.8038, 2.3796, 1.7905, 2.3234, 0.8057, 2.1093], + device='cuda:3'), covar=tensor([0.0413, 0.0400, 0.0497, 0.0652, 0.0835, 0.0801, 0.0845, 0.0750], + device='cuda:3'), in_proj_covar=tensor([0.0310, 0.0313, 0.0302, 0.0333, 0.0408, 0.0325, 0.0289, 0.0311], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 10:26:39,945 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45366.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:26:42,860 INFO [train.py:903] (3/4) Epoch 7, batch 4400, loss[loss=0.2339, simple_loss=0.3054, pruned_loss=0.08117, over 19689.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3284, pruned_loss=0.09866, over 3817085.43 frames. ], batch size: 53, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:26:48,513 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-01 10:27:06,062 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 10:27:14,901 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 10:27:44,672 INFO [train.py:903] (3/4) Epoch 7, batch 4450, loss[loss=0.2199, simple_loss=0.2905, pruned_loss=0.07465, over 19425.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3272, pruned_loss=0.0979, over 3829022.00 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:27:44,841 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:16,084 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:28:22,922 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.630e+02 6.151e+02 7.769e+02 9.586e+02 4.695e+03, threshold=1.554e+03, percent-clipped=4.0 +2023-04-01 10:28:46,110 INFO [train.py:903] (3/4) Epoch 7, batch 4500, loss[loss=0.304, simple_loss=0.3649, pruned_loss=0.1216, over 19662.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3273, pruned_loss=0.09811, over 3823307.71 frames. ], batch size: 60, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:29:09,233 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-01 10:29:48,542 INFO [train.py:903] (3/4) Epoch 7, batch 4550, loss[loss=0.2995, simple_loss=0.3579, pruned_loss=0.1205, over 19050.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3264, pruned_loss=0.09776, over 3841467.89 frames. ], batch size: 75, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:30:00,876 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 10:30:09,149 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:23,718 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 10:30:27,041 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.783e+02 6.251e+02 7.662e+02 9.726e+02 2.453e+03, threshold=1.532e+03, percent-clipped=6.0 +2023-04-01 10:30:38,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:30:51,041 INFO [train.py:903] (3/4) Epoch 7, batch 4600, loss[loss=0.2429, simple_loss=0.3173, pruned_loss=0.08426, over 19657.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3276, pruned_loss=0.0985, over 3824957.03 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:31:54,318 INFO [train.py:903] (3/4) Epoch 7, batch 4650, loss[loss=0.2182, simple_loss=0.2926, pruned_loss=0.07193, over 19396.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3265, pruned_loss=0.09703, over 3831204.08 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:32:08,739 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3605, 1.4800, 2.0474, 1.5407, 3.2543, 2.7363, 3.5292, 1.4656], + device='cuda:3'), covar=tensor([0.1967, 0.3488, 0.1998, 0.1603, 0.1331, 0.1536, 0.1401, 0.3130], + device='cuda:3'), in_proj_covar=tensor([0.0451, 0.0530, 0.0517, 0.0412, 0.0564, 0.0458, 0.0625, 0.0453], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 10:32:11,395 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-01 10:32:11,865 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 10:32:24,512 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 10:32:33,274 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.859e+02 7.403e+02 8.847e+02 2.429e+03, threshold=1.481e+03, percent-clipped=2.0 +2023-04-01 10:32:55,952 INFO [train.py:903] (3/4) Epoch 7, batch 4700, loss[loss=0.2043, simple_loss=0.2784, pruned_loss=0.06515, over 19775.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3269, pruned_loss=0.09738, over 3810246.06 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:33:14,950 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4239, 1.5521, 2.0541, 1.6294, 3.2281, 2.8288, 3.5037, 1.4577], + device='cuda:3'), covar=tensor([0.1934, 0.3265, 0.2015, 0.1532, 0.1398, 0.1458, 0.1406, 0.3024], + device='cuda:3'), in_proj_covar=tensor([0.0460, 0.0537, 0.0525, 0.0418, 0.0573, 0.0465, 0.0633, 0.0460], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 10:33:18,900 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 10:33:47,817 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45710.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:33:51,389 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2345, 1.3226, 1.7933, 1.7127, 3.0165, 4.5824, 4.5635, 5.0482], + device='cuda:3'), covar=tensor([0.1504, 0.2948, 0.2746, 0.1729, 0.0466, 0.0163, 0.0154, 0.0087], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0285, 0.0316, 0.0246, 0.0205, 0.0135, 0.0204, 0.0166], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 10:33:58,624 INFO [train.py:903] (3/4) Epoch 7, batch 4750, loss[loss=0.3292, simple_loss=0.3831, pruned_loss=0.1377, over 19736.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3273, pruned_loss=0.09777, over 3810044.75 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:34:01,234 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:34:36,017 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 6.576e+02 8.008e+02 9.322e+02 2.457e+03, threshold=1.602e+03, percent-clipped=6.0 +2023-04-01 10:35:01,472 INFO [train.py:903] (3/4) Epoch 7, batch 4800, loss[loss=0.282, simple_loss=0.3414, pruned_loss=0.1114, over 19682.00 frames. ], tot_loss[loss=0.261, simple_loss=0.327, pruned_loss=0.0975, over 3807302.08 frames. ], batch size: 53, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:35:19,785 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 10:35:26,502 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:29,646 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:58,127 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:35:58,157 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:02,102 INFO [train.py:903] (3/4) Epoch 7, batch 4850, loss[loss=0.2598, simple_loss=0.3253, pruned_loss=0.09721, over 19533.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3279, pruned_loss=0.09832, over 3807979.00 frames. ], batch size: 56, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:36:10,624 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45825.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 10:36:25,112 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 10:36:27,424 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:36:38,173 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 10:36:40,693 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.989e+02 6.823e+02 8.718e+02 1.115e+03 2.265e+03, threshold=1.744e+03, percent-clipped=6.0 +2023-04-01 10:36:43,213 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6624, 1.3494, 1.4263, 1.9317, 1.3575, 1.8156, 1.8441, 1.8303], + device='cuda:3'), covar=tensor([0.0762, 0.1014, 0.1009, 0.0859, 0.0970, 0.0711, 0.0917, 0.0620], + device='cuda:3'), in_proj_covar=tensor([0.0219, 0.0238, 0.0233, 0.0265, 0.0254, 0.0220, 0.0219, 0.0215], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:3') +2023-04-01 10:36:48,782 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 10:36:54,244 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 10:36:54,266 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 10:37:03,557 INFO [train.py:903] (3/4) Epoch 7, batch 4900, loss[loss=0.2243, simple_loss=0.2924, pruned_loss=0.0781, over 19392.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3275, pruned_loss=0.09812, over 3819349.28 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:37:04,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 10:37:14,481 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:37:19,166 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-01 10:37:26,126 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 10:37:31,690 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-01 10:38:05,107 INFO [train.py:903] (3/4) Epoch 7, batch 4950, loss[loss=0.3168, simple_loss=0.3704, pruned_loss=0.1316, over 17343.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3267, pruned_loss=0.09709, over 3809831.01 frames. ], batch size: 101, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:38:24,761 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 10:38:44,269 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+02 6.277e+02 7.738e+02 9.356e+02 2.304e+03, threshold=1.548e+03, percent-clipped=1.0 +2023-04-01 10:38:47,738 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 10:39:09,684 INFO [train.py:903] (3/4) Epoch 7, batch 5000, loss[loss=0.2329, simple_loss=0.3108, pruned_loss=0.07757, over 19597.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.327, pruned_loss=0.09717, over 3820765.71 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:39:20,561 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 10:39:30,931 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 10:40:12,335 INFO [train.py:903] (3/4) Epoch 7, batch 5050, loss[loss=0.2425, simple_loss=0.3158, pruned_loss=0.08463, over 19763.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3267, pruned_loss=0.09677, over 3816265.52 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 8.0 +2023-04-01 10:40:49,620 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 10:40:51,923 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 6.208e+02 7.894e+02 9.516e+02 2.052e+03, threshold=1.579e+03, percent-clipped=3.0 +2023-04-01 10:40:58,131 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 10:41:03,344 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5435, 1.5950, 1.6615, 2.0518, 1.3396, 1.6723, 2.0865, 1.5687], + device='cuda:3'), covar=tensor([0.2307, 0.1681, 0.1051, 0.1010, 0.1898, 0.0998, 0.2188, 0.1863], + device='cuda:3'), in_proj_covar=tensor([0.0710, 0.0712, 0.0604, 0.0854, 0.0724, 0.0628, 0.0746, 0.0651], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 10:41:08,568 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46064.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:41:13,139 INFO [train.py:903] (3/4) Epoch 7, batch 5100, loss[loss=0.2283, simple_loss=0.3036, pruned_loss=0.07646, over 19656.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3267, pruned_loss=0.097, over 3808652.05 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:41:24,899 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 10:41:28,149 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 10:41:28,568 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46081.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:41:32,522 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 10:42:00,060 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46106.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 10:42:13,344 INFO [train.py:903] (3/4) Epoch 7, batch 5150, loss[loss=0.2401, simple_loss=0.2987, pruned_loss=0.09072, over 19740.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3264, pruned_loss=0.09683, over 3810571.53 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 4.0 +2023-04-01 10:42:26,334 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 10:42:37,815 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46136.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:42:53,640 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.315e+02 6.102e+02 7.228e+02 8.853e+02 1.806e+03, threshold=1.446e+03, percent-clipped=2.0 +2023-04-01 10:43:00,542 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 10:43:15,665 INFO [train.py:903] (3/4) Epoch 7, batch 5200, loss[loss=0.2964, simple_loss=0.3719, pruned_loss=0.1105, over 18311.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3265, pruned_loss=0.09678, over 3808288.67 frames. ], batch size: 83, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:43:30,896 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46179.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:43:31,689 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 10:44:18,557 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 10:44:19,700 INFO [train.py:903] (3/4) Epoch 7, batch 5250, loss[loss=0.2882, simple_loss=0.353, pruned_loss=0.1117, over 19392.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3271, pruned_loss=0.09675, over 3813935.87 frames. ], batch size: 70, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:44:23,438 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:44:32,721 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3726, 1.5051, 1.8113, 1.3805, 2.4027, 2.6481, 2.6089, 2.8230], + device='cuda:3'), covar=tensor([0.1204, 0.2245, 0.2105, 0.1880, 0.0752, 0.0577, 0.0222, 0.0204], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0284, 0.0316, 0.0245, 0.0203, 0.0133, 0.0202, 0.0165], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 10:44:58,428 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.945e+02 5.998e+02 7.245e+02 8.826e+02 1.462e+03, threshold=1.449e+03, percent-clipped=1.0 +2023-04-01 10:45:00,951 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:45:21,290 INFO [train.py:903] (3/4) Epoch 7, batch 5300, loss[loss=0.255, simple_loss=0.3219, pruned_loss=0.09405, over 19470.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3275, pruned_loss=0.0969, over 3811466.93 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:45:39,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 10:45:54,314 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46294.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:46:22,054 INFO [train.py:903] (3/4) Epoch 7, batch 5350, loss[loss=0.2377, simple_loss=0.2983, pruned_loss=0.08857, over 19726.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.328, pruned_loss=0.09713, over 3825308.93 frames. ], batch size: 45, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:46:47,662 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:47:00,316 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 10:47:03,769 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.681e+02 7.157e+02 9.578e+02 3.754e+03, threshold=1.431e+03, percent-clipped=4.0 +2023-04-01 10:47:26,532 INFO [train.py:903] (3/4) Epoch 7, batch 5400, loss[loss=0.2042, simple_loss=0.276, pruned_loss=0.06625, over 19724.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3264, pruned_loss=0.09643, over 3822885.16 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:47:58,983 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2195, 1.1859, 1.3935, 1.2948, 1.7833, 1.7808, 1.9486, 0.5688], + device='cuda:3'), covar=tensor([0.1830, 0.2947, 0.1705, 0.1585, 0.1077, 0.1608, 0.0959, 0.2841], + device='cuda:3'), in_proj_covar=tensor([0.0454, 0.0530, 0.0521, 0.0414, 0.0569, 0.0459, 0.0636, 0.0458], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 10:48:29,980 INFO [train.py:903] (3/4) Epoch 7, batch 5450, loss[loss=0.2838, simple_loss=0.347, pruned_loss=0.1103, over 19778.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3274, pruned_loss=0.09754, over 3824012.16 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:48:49,860 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:10,731 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+02 7.102e+02 8.361e+02 1.036e+03 2.875e+03, threshold=1.672e+03, percent-clipped=7.0 +2023-04-01 10:49:23,157 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46460.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:49:31,707 INFO [train.py:903] (3/4) Epoch 7, batch 5500, loss[loss=0.2664, simple_loss=0.337, pruned_loss=0.0979, over 19550.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3276, pruned_loss=0.09757, over 3832371.91 frames. ], batch size: 61, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:49:58,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 10:50:20,878 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:50:33,173 INFO [train.py:903] (3/4) Epoch 7, batch 5550, loss[loss=0.2473, simple_loss=0.3213, pruned_loss=0.08661, over 19687.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3273, pruned_loss=0.09668, over 3832189.05 frames. ], batch size: 60, lr: 1.16e-02, grad_scale: 4.0 +2023-04-01 10:50:43,598 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 10:50:51,465 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:51:15,056 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.541e+02 5.815e+02 7.044e+02 9.146e+02 3.032e+03, threshold=1.409e+03, percent-clipped=3.0 +2023-04-01 10:51:21,515 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 10:51:30,166 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6862, 1.3082, 1.4139, 1.7597, 3.1530, 1.0979, 2.2483, 3.3673], + device='cuda:3'), covar=tensor([0.0372, 0.2638, 0.2689, 0.1558, 0.0705, 0.2451, 0.1137, 0.0362], + device='cuda:3'), in_proj_covar=tensor([0.0315, 0.0319, 0.0329, 0.0299, 0.0328, 0.0322, 0.0297, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 10:51:31,092 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 10:51:35,755 INFO [train.py:903] (3/4) Epoch 7, batch 5600, loss[loss=0.2489, simple_loss=0.3198, pruned_loss=0.08901, over 19656.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3278, pruned_loss=0.09688, over 3829846.04 frames. ], batch size: 60, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:06,750 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46592.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:32,980 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:39,566 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46617.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:52:40,323 INFO [train.py:903] (3/4) Epoch 7, batch 5650, loss[loss=0.212, simple_loss=0.2818, pruned_loss=0.07115, over 19468.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3275, pruned_loss=0.09652, over 3832297.56 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:52:42,901 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:03,553 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46638.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:53:12,134 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 10:53:20,361 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.976e+02 7.674e+02 9.559e+02 1.706e+03, threshold=1.535e+03, percent-clipped=4.0 +2023-04-01 10:53:29,236 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 10:53:42,092 INFO [train.py:903] (3/4) Epoch 7, batch 5700, loss[loss=0.303, simple_loss=0.3691, pruned_loss=0.1185, over 19672.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3256, pruned_loss=0.09547, over 3827280.29 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:53:58,471 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4852, 1.0728, 1.2723, 1.1114, 2.1222, 0.8629, 1.7514, 2.2462], + device='cuda:3'), covar=tensor([0.0635, 0.2532, 0.2424, 0.1591, 0.0868, 0.2097, 0.0987, 0.0505], + device='cuda:3'), in_proj_covar=tensor([0.0314, 0.0316, 0.0325, 0.0297, 0.0326, 0.0321, 0.0296, 0.0315], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 10:54:43,277 INFO [train.py:903] (3/4) Epoch 7, batch 5750, loss[loss=0.2439, simple_loss=0.3244, pruned_loss=0.08171, over 19652.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.325, pruned_loss=0.09529, over 3822899.76 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:54:45,649 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 10:54:55,172 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 10:54:59,793 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 10:55:01,307 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 10:55:25,496 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+02 6.148e+02 7.021e+02 8.191e+02 1.564e+03, threshold=1.404e+03, percent-clipped=1.0 +2023-04-01 10:55:28,135 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:55:45,400 INFO [train.py:903] (3/4) Epoch 7, batch 5800, loss[loss=0.2534, simple_loss=0.3257, pruned_loss=0.09058, over 19539.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3243, pruned_loss=0.09512, over 3811809.15 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:55:47,400 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 10:56:49,194 INFO [train.py:903] (3/4) Epoch 7, batch 5850, loss[loss=0.2641, simple_loss=0.3252, pruned_loss=0.1015, over 19604.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3238, pruned_loss=0.09498, over 3812214.92 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:02,093 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:57:09,250 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2786, 1.0714, 1.2056, 1.3804, 0.8856, 1.3808, 1.3646, 1.2484], + device='cuda:3'), covar=tensor([0.0869, 0.1157, 0.1111, 0.0708, 0.0989, 0.0832, 0.0822, 0.0775], + device='cuda:3'), in_proj_covar=tensor([0.0218, 0.0237, 0.0233, 0.0263, 0.0256, 0.0218, 0.0213, 0.0212], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 10:57:09,591 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-01 10:57:26,224 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0463, 2.2774, 2.4101, 2.4108, 1.0793, 2.2133, 2.0601, 2.1946], + device='cuda:3'), covar=tensor([0.1086, 0.1991, 0.0618, 0.0638, 0.3468, 0.0882, 0.0577, 0.1000], + device='cuda:3'), in_proj_covar=tensor([0.0579, 0.0508, 0.0694, 0.0571, 0.0640, 0.0442, 0.0434, 0.0649], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 10:57:29,389 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.374e+02 9.303e+02 2.879e+03, threshold=1.475e+03, percent-clipped=6.0 +2023-04-01 10:57:51,519 INFO [train.py:903] (3/4) Epoch 7, batch 5900, loss[loss=0.3024, simple_loss=0.3596, pruned_loss=0.1226, over 17272.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3257, pruned_loss=0.09633, over 3800687.10 frames. ], batch size: 101, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:57:57,275 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 10:58:16,952 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 10:58:52,939 INFO [train.py:903] (3/4) Epoch 7, batch 5950, loss[loss=0.2575, simple_loss=0.3286, pruned_loss=0.09322, over 19357.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3264, pruned_loss=0.09674, over 3789189.30 frames. ], batch size: 70, lr: 1.16e-02, grad_scale: 8.0 +2023-04-01 10:59:05,782 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8679, 4.3524, 4.7016, 4.6163, 1.7111, 4.3286, 3.8050, 4.2561], + device='cuda:3'), covar=tensor([0.1172, 0.0683, 0.0505, 0.0458, 0.4637, 0.0540, 0.0536, 0.1130], + device='cuda:3'), in_proj_covar=tensor([0.0584, 0.0514, 0.0695, 0.0574, 0.0648, 0.0446, 0.0440, 0.0654], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 10:59:34,053 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.644e+02 7.805e+02 9.690e+02 1.794e+03, threshold=1.561e+03, percent-clipped=4.0 +2023-04-01 10:59:40,942 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:48,569 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 10:59:52,804 INFO [train.py:903] (3/4) Epoch 7, batch 6000, loss[loss=0.2287, simple_loss=0.2952, pruned_loss=0.08107, over 19394.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3256, pruned_loss=0.09634, over 3786457.69 frames. ], batch size: 48, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 10:59:52,805 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 11:00:03,700 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9857, 1.1899, 1.3072, 1.4468, 2.5524, 0.9095, 2.0004, 2.7919], + device='cuda:3'), covar=tensor([0.0407, 0.2725, 0.2817, 0.1820, 0.0660, 0.2563, 0.0964, 0.0373], + device='cuda:3'), in_proj_covar=tensor([0.0312, 0.0317, 0.0324, 0.0297, 0.0326, 0.0321, 0.0296, 0.0315], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:00:05,284 INFO [train.py:937] (3/4) Epoch 7, validation: loss=0.1903, simple_loss=0.2902, pruned_loss=0.04516, over 944034.00 frames. +2023-04-01 11:00:05,284 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 11:00:41,388 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1521, 3.5905, 2.0288, 2.1114, 3.1763, 1.8012, 1.1551, 1.9034], + device='cuda:3'), covar=tensor([0.0947, 0.0410, 0.0837, 0.0580, 0.0324, 0.0870, 0.0881, 0.0543], + device='cuda:3'), in_proj_covar=tensor([0.0284, 0.0283, 0.0316, 0.0238, 0.0225, 0.0309, 0.0286, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:00:57,569 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47009.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:09,016 INFO [train.py:903] (3/4) Epoch 7, batch 6050, loss[loss=0.201, simple_loss=0.273, pruned_loss=0.06453, over 19303.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3246, pruned_loss=0.09567, over 3785162.88 frames. ], batch size: 44, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:01:30,905 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:01:49,944 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.317e+02 6.018e+02 7.577e+02 1.005e+03 2.434e+03, threshold=1.515e+03, percent-clipped=7.0 +2023-04-01 11:02:13,573 INFO [train.py:903] (3/4) Epoch 7, batch 6100, loss[loss=0.2737, simple_loss=0.3355, pruned_loss=0.106, over 19584.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3245, pruned_loss=0.09551, over 3788822.62 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:02:15,643 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 11:02:18,752 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:02:26,705 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:03:15,748 INFO [train.py:903] (3/4) Epoch 7, batch 6150, loss[loss=0.2711, simple_loss=0.3428, pruned_loss=0.09971, over 18154.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3243, pruned_loss=0.09504, over 3800621.67 frames. ], batch size: 83, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:03:16,150 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1185, 3.6296, 2.1518, 2.0643, 2.9997, 1.7694, 1.1834, 1.9974], + device='cuda:3'), covar=tensor([0.0887, 0.0327, 0.0694, 0.0559, 0.0412, 0.0834, 0.0848, 0.0538], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0284, 0.0314, 0.0239, 0.0226, 0.0312, 0.0288, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:03:41,641 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 11:03:56,565 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.986e+02 7.602e+02 9.492e+02 2.168e+03, threshold=1.520e+03, percent-clipped=5.0 +2023-04-01 11:04:15,665 INFO [train.py:903] (3/4) Epoch 7, batch 6200, loss[loss=0.2457, simple_loss=0.3057, pruned_loss=0.09281, over 19400.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3253, pruned_loss=0.09576, over 3811272.05 frames. ], batch size: 48, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:04:20,365 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:04:38,672 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2056, 1.2433, 1.6300, 1.4058, 2.3128, 1.9769, 2.3881, 0.7951], + device='cuda:3'), covar=tensor([0.1901, 0.3302, 0.1730, 0.1583, 0.1058, 0.1597, 0.1134, 0.3058], + device='cuda:3'), in_proj_covar=tensor([0.0451, 0.0536, 0.0527, 0.0421, 0.0570, 0.0465, 0.0633, 0.0463], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:05:17,526 INFO [train.py:903] (3/4) Epoch 7, batch 6250, loss[loss=0.251, simple_loss=0.3272, pruned_loss=0.0874, over 19668.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3257, pruned_loss=0.09563, over 3810782.29 frames. ], batch size: 53, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:05:47,059 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 11:05:57,377 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 6.379e+02 7.672e+02 9.726e+02 2.182e+03, threshold=1.534e+03, percent-clipped=2.0 +2023-04-01 11:06:08,352 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:06:19,225 INFO [train.py:903] (3/4) Epoch 7, batch 6300, loss[loss=0.2623, simple_loss=0.3371, pruned_loss=0.09379, over 19539.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3269, pruned_loss=0.09638, over 3819449.99 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:06:43,162 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:06:49,173 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2510, 1.3321, 1.6877, 1.4661, 2.4428, 2.0993, 2.6289, 1.0490], + device='cuda:3'), covar=tensor([0.1967, 0.3433, 0.1890, 0.1642, 0.1341, 0.1721, 0.1460, 0.3090], + device='cuda:3'), in_proj_covar=tensor([0.0450, 0.0538, 0.0527, 0.0420, 0.0571, 0.0463, 0.0630, 0.0461], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:07:21,231 INFO [train.py:903] (3/4) Epoch 7, batch 6350, loss[loss=0.2701, simple_loss=0.3448, pruned_loss=0.09766, over 19702.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3264, pruned_loss=0.09628, over 3826622.54 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:07:26,539 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-01 11:07:33,241 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:07:42,294 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47335.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:02,956 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 5.885e+02 7.911e+02 1.072e+03 3.322e+03, threshold=1.582e+03, percent-clipped=7.0 +2023-04-01 11:08:05,624 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:13,943 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47360.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:08:22,753 INFO [train.py:903] (3/4) Epoch 7, batch 6400, loss[loss=0.2596, simple_loss=0.3329, pruned_loss=0.09313, over 19617.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3269, pruned_loss=0.09654, over 3822292.70 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:09:00,242 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47397.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:09:25,579 INFO [train.py:903] (3/4) Epoch 7, batch 6450, loss[loss=0.2115, simple_loss=0.2807, pruned_loss=0.07119, over 19751.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3269, pruned_loss=0.0961, over 3809683.13 frames. ], batch size: 45, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:05,688 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 6.283e+02 7.588e+02 1.008e+03 1.535e+03, threshold=1.518e+03, percent-clipped=0.0 +2023-04-01 11:10:08,042 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 11:10:26,631 INFO [train.py:903] (3/4) Epoch 7, batch 6500, loss[loss=0.3641, simple_loss=0.3972, pruned_loss=0.1655, over 13450.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3258, pruned_loss=0.09552, over 3817426.80 frames. ], batch size: 135, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:10:29,888 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 11:11:27,816 INFO [train.py:903] (3/4) Epoch 7, batch 6550, loss[loss=0.2678, simple_loss=0.3171, pruned_loss=0.1092, over 18592.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3257, pruned_loss=0.09588, over 3821423.48 frames. ], batch size: 41, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:11:39,562 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7246, 1.3261, 1.4979, 1.6092, 3.2051, 1.0792, 2.2351, 3.4376], + device='cuda:3'), covar=tensor([0.0416, 0.2588, 0.2549, 0.1639, 0.0641, 0.2513, 0.1201, 0.0365], + device='cuda:3'), in_proj_covar=tensor([0.0315, 0.0317, 0.0322, 0.0298, 0.0324, 0.0320, 0.0297, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:11:58,295 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:12:06,377 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7101, 1.7595, 1.8533, 2.5554, 1.6704, 2.2069, 2.2915, 1.8362], + device='cuda:3'), covar=tensor([0.2671, 0.2301, 0.1167, 0.1140, 0.2434, 0.1072, 0.2399, 0.2071], + device='cuda:3'), in_proj_covar=tensor([0.0714, 0.0721, 0.0605, 0.0852, 0.0730, 0.0632, 0.0746, 0.0655], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:12:10,157 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+02 6.347e+02 8.071e+02 1.082e+03 2.174e+03, threshold=1.614e+03, percent-clipped=4.0 +2023-04-01 11:12:16,324 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4612, 1.2377, 1.1311, 1.3679, 1.1373, 1.2763, 1.1469, 1.2938], + device='cuda:3'), covar=tensor([0.0973, 0.1193, 0.1381, 0.0831, 0.1083, 0.0565, 0.1140, 0.0790], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0358, 0.0288, 0.0239, 0.0304, 0.0243, 0.0273, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:12:29,849 INFO [train.py:903] (3/4) Epoch 7, batch 6600, loss[loss=0.2263, simple_loss=0.3044, pruned_loss=0.07409, over 19668.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3268, pruned_loss=0.09661, over 3819017.16 frames. ], batch size: 53, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:12:30,269 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:13,417 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:13:31,701 INFO [train.py:903] (3/4) Epoch 7, batch 6650, loss[loss=0.2296, simple_loss=0.2971, pruned_loss=0.0811, over 19481.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.326, pruned_loss=0.09645, over 3823469.01 frames. ], batch size: 49, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:13:37,027 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0083, 1.7083, 1.5892, 2.1422, 1.7916, 1.8249, 1.6342, 1.9863], + device='cuda:3'), covar=tensor([0.0916, 0.1695, 0.1423, 0.0963, 0.1374, 0.0481, 0.1146, 0.0642], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0359, 0.0289, 0.0240, 0.0305, 0.0245, 0.0275, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:14:11,833 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 6.649e+02 7.986e+02 1.008e+03 1.623e+03, threshold=1.597e+03, percent-clipped=0.0 +2023-04-01 11:14:31,907 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-01 11:14:32,253 INFO [train.py:903] (3/4) Epoch 7, batch 6700, loss[loss=0.2079, simple_loss=0.2766, pruned_loss=0.06961, over 19720.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3251, pruned_loss=0.09623, over 3822541.72 frames. ], batch size: 45, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:21,500 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9190, 4.3129, 4.6473, 4.6523, 1.4819, 4.3377, 3.8017, 4.2115], + device='cuda:3'), covar=tensor([0.1249, 0.0699, 0.0589, 0.0494, 0.5040, 0.0401, 0.0592, 0.1142], + device='cuda:3'), in_proj_covar=tensor([0.0589, 0.0510, 0.0699, 0.0580, 0.0643, 0.0446, 0.0443, 0.0650], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 11:15:30,804 INFO [train.py:903] (3/4) Epoch 7, batch 6750, loss[loss=0.3119, simple_loss=0.3524, pruned_loss=0.1357, over 19726.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3239, pruned_loss=0.09555, over 3813703.43 frames. ], batch size: 51, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:15:31,133 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:15:58,011 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47741.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:09,116 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.932e+02 5.831e+02 7.200e+02 8.445e+02 1.747e+03, threshold=1.440e+03, percent-clipped=3.0 +2023-04-01 11:16:13,239 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.42 vs. limit=5.0 +2023-04-01 11:16:15,241 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:16:28,688 INFO [train.py:903] (3/4) Epoch 7, batch 6800, loss[loss=0.2421, simple_loss=0.3183, pruned_loss=0.08288, over 19498.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.324, pruned_loss=0.09516, over 3816739.93 frames. ], batch size: 64, lr: 1.15e-02, grad_scale: 8.0 +2023-04-01 11:17:14,509 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 11:17:15,587 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 11:17:18,393 INFO [train.py:903] (3/4) Epoch 8, batch 0, loss[loss=0.2208, simple_loss=0.2918, pruned_loss=0.07491, over 19834.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2918, pruned_loss=0.07491, over 19834.00 frames. ], batch size: 52, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:17:18,394 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 11:17:30,978 INFO [train.py:937] (3/4) Epoch 8, validation: loss=0.1916, simple_loss=0.2915, pruned_loss=0.0458, over 944034.00 frames. +2023-04-01 11:17:30,979 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 11:17:41,962 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 11:17:43,324 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47806.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:18:07,512 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47827.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:18:32,756 INFO [train.py:903] (3/4) Epoch 8, batch 50, loss[loss=0.2007, simple_loss=0.2809, pruned_loss=0.06027, over 19742.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3222, pruned_loss=0.09273, over 861178.19 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:18:38,613 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 5.925e+02 7.453e+02 9.478e+02 2.348e+03, threshold=1.491e+03, percent-clipped=8.0 +2023-04-01 11:18:44,831 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:18:52,168 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.85 vs. limit=5.0 +2023-04-01 11:19:06,093 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 11:19:32,493 INFO [train.py:903] (3/4) Epoch 8, batch 100, loss[loss=0.2381, simple_loss=0.3008, pruned_loss=0.0877, over 19779.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3231, pruned_loss=0.09345, over 1529607.93 frames. ], batch size: 47, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:19:42,616 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 11:20:32,672 INFO [train.py:903] (3/4) Epoch 8, batch 150, loss[loss=0.2516, simple_loss=0.3301, pruned_loss=0.08652, over 18712.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3228, pruned_loss=0.09389, over 2050463.92 frames. ], batch size: 74, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:20:38,416 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.629e+02 6.208e+02 7.626e+02 9.440e+02 2.273e+03, threshold=1.525e+03, percent-clipped=3.0 +2023-04-01 11:20:38,917 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2187, 2.9202, 1.9619, 2.0020, 1.8447, 2.4139, 0.6849, 2.0908], + device='cuda:3'), covar=tensor([0.0351, 0.0307, 0.0411, 0.0557, 0.0608, 0.0526, 0.0723, 0.0603], + device='cuda:3'), in_proj_covar=tensor([0.0311, 0.0314, 0.0307, 0.0325, 0.0398, 0.0322, 0.0285, 0.0311], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:21:07,734 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:21:29,238 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 11:21:32,698 INFO [train.py:903] (3/4) Epoch 8, batch 200, loss[loss=0.2641, simple_loss=0.332, pruned_loss=0.09815, over 19691.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3219, pruned_loss=0.09291, over 2444428.72 frames. ], batch size: 58, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:21:36,577 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:22:15,569 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2336, 1.2307, 1.6343, 1.2172, 2.7936, 3.5292, 3.3229, 3.7542], + device='cuda:3'), covar=tensor([0.1448, 0.3124, 0.2901, 0.2027, 0.0458, 0.0171, 0.0213, 0.0141], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0284, 0.0317, 0.0247, 0.0208, 0.0136, 0.0204, 0.0172], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:22:35,823 INFO [train.py:903] (3/4) Epoch 8, batch 250, loss[loss=0.2343, simple_loss=0.3124, pruned_loss=0.07807, over 19656.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.323, pruned_loss=0.09385, over 2744417.64 frames. ], batch size: 60, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:22:42,343 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 6.073e+02 7.344e+02 8.973e+02 2.163e+03, threshold=1.469e+03, percent-clipped=4.0 +2023-04-01 11:23:30,381 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48090.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:31,782 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2932, 1.3191, 1.5775, 1.4879, 2.1592, 2.1363, 2.3256, 0.7378], + device='cuda:3'), covar=tensor([0.1849, 0.3316, 0.1881, 0.1512, 0.1241, 0.1632, 0.1111, 0.3248], + device='cuda:3'), in_proj_covar=tensor([0.0455, 0.0533, 0.0530, 0.0416, 0.0572, 0.0467, 0.0624, 0.0459], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:23:36,834 INFO [train.py:903] (3/4) Epoch 8, batch 300, loss[loss=0.2649, simple_loss=0.3277, pruned_loss=0.1011, over 18695.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3242, pruned_loss=0.09489, over 2981124.51 frames. ], batch size: 74, lr: 1.08e-02, grad_scale: 8.0 +2023-04-01 11:23:41,649 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:55,944 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48112.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:23:59,582 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2345, 1.3114, 1.6968, 1.4643, 2.4698, 2.3312, 2.6426, 0.9561], + device='cuda:3'), covar=tensor([0.1953, 0.3449, 0.1937, 0.1514, 0.1377, 0.1558, 0.1456, 0.3288], + device='cuda:3'), in_proj_covar=tensor([0.0458, 0.0536, 0.0532, 0.0418, 0.0575, 0.0468, 0.0629, 0.0461], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:24:14,361 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8157, 1.8629, 1.9135, 2.6948, 1.7705, 2.4192, 2.3206, 1.7348], + device='cuda:3'), covar=tensor([0.2720, 0.2231, 0.1136, 0.1186, 0.2509, 0.1016, 0.2426, 0.2165], + device='cuda:3'), in_proj_covar=tensor([0.0721, 0.0723, 0.0607, 0.0852, 0.0734, 0.0632, 0.0748, 0.0657], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:24:27,823 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:37,683 INFO [train.py:903] (3/4) Epoch 8, batch 350, loss[loss=0.208, simple_loss=0.2772, pruned_loss=0.06943, over 19406.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3227, pruned_loss=0.09429, over 3182747.90 frames. ], batch size: 48, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:24:39,944 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 11:24:42,120 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:24:43,059 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.994e+02 6.192e+02 7.149e+02 9.949e+02 1.629e+03, threshold=1.430e+03, percent-clipped=6.0 +2023-04-01 11:25:09,480 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48171.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:25:22,169 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8168, 1.2974, 1.3845, 2.1231, 1.5901, 1.7542, 1.9890, 1.6831], + device='cuda:3'), covar=tensor([0.0811, 0.1323, 0.1200, 0.0904, 0.0907, 0.0927, 0.1035, 0.0847], + device='cuda:3'), in_proj_covar=tensor([0.0217, 0.0235, 0.0232, 0.0262, 0.0246, 0.0217, 0.0211, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 11:25:33,669 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4685, 1.2290, 1.7561, 1.2111, 2.8718, 3.5987, 3.4383, 3.8755], + device='cuda:3'), covar=tensor([0.1347, 0.3285, 0.2841, 0.1974, 0.0413, 0.0172, 0.0197, 0.0145], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0284, 0.0313, 0.0244, 0.0207, 0.0136, 0.0202, 0.0171], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:25:37,717 INFO [train.py:903] (3/4) Epoch 8, batch 400, loss[loss=0.2785, simple_loss=0.3553, pruned_loss=0.1008, over 19104.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3216, pruned_loss=0.09359, over 3328440.38 frames. ], batch size: 69, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:25:57,727 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3992, 2.1863, 1.7402, 1.6262, 1.4636, 1.6904, 0.4939, 1.2557], + device='cuda:3'), covar=tensor([0.0286, 0.0323, 0.0283, 0.0446, 0.0673, 0.0451, 0.0641, 0.0533], + device='cuda:3'), in_proj_covar=tensor([0.0309, 0.0311, 0.0306, 0.0324, 0.0397, 0.0318, 0.0285, 0.0308], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:26:03,330 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9334, 5.3311, 2.9381, 4.6798, 0.9427, 5.0511, 5.1492, 5.4003], + device='cuda:3'), covar=tensor([0.0449, 0.0890, 0.1673, 0.0533, 0.4088, 0.0487, 0.0599, 0.0671], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0340, 0.0395, 0.0297, 0.0364, 0.0322, 0.0316, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 11:26:03,485 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:26:13,177 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8218, 1.9710, 2.0015, 2.7913, 1.8866, 2.5451, 2.5448, 1.9086], + device='cuda:3'), covar=tensor([0.2897, 0.2355, 0.1128, 0.1370, 0.2678, 0.1105, 0.2425, 0.2063], + device='cuda:3'), in_proj_covar=tensor([0.0726, 0.0730, 0.0612, 0.0861, 0.0737, 0.0638, 0.0756, 0.0660], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:26:14,354 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0080, 2.1477, 2.1112, 3.1622, 2.2897, 3.3558, 2.9479, 2.1686], + device='cuda:3'), covar=tensor([0.2924, 0.2260, 0.1134, 0.1519, 0.2691, 0.0905, 0.2164, 0.1879], + device='cuda:3'), in_proj_covar=tensor([0.0726, 0.0730, 0.0612, 0.0861, 0.0737, 0.0638, 0.0756, 0.0660], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:26:21,225 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48230.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:26:38,937 INFO [train.py:903] (3/4) Epoch 8, batch 450, loss[loss=0.2239, simple_loss=0.2951, pruned_loss=0.07637, over 19777.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3213, pruned_loss=0.09304, over 3435343.15 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:26:45,636 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.626e+02 7.034e+02 8.568e+02 1.629e+03, threshold=1.407e+03, percent-clipped=1.0 +2023-04-01 11:27:03,653 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48265.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:27:11,113 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 11:27:12,252 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 11:27:27,229 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48286.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:27:41,081 INFO [train.py:903] (3/4) Epoch 8, batch 500, loss[loss=0.2317, simple_loss=0.2987, pruned_loss=0.08242, over 19622.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3218, pruned_loss=0.09345, over 3514613.50 frames. ], batch size: 50, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:37,114 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48342.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:28:41,346 INFO [train.py:903] (3/4) Epoch 8, batch 550, loss[loss=0.2378, simple_loss=0.3003, pruned_loss=0.08761, over 19394.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3204, pruned_loss=0.09249, over 3591974.37 frames. ], batch size: 48, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:28:47,050 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.262e+02 6.228e+02 7.563e+02 9.337e+02 1.593e+03, threshold=1.513e+03, percent-clipped=3.0 +2023-04-01 11:28:47,316 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8983, 4.3711, 2.7290, 3.8324, 1.1551, 4.1544, 4.1316, 4.2653], + device='cuda:3'), covar=tensor([0.0551, 0.1134, 0.1844, 0.0741, 0.3610, 0.0655, 0.0604, 0.0787], + device='cuda:3'), in_proj_covar=tensor([0.0393, 0.0344, 0.0398, 0.0302, 0.0366, 0.0325, 0.0318, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 11:29:23,958 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1991, 1.3308, 1.1394, 0.9371, 1.0427, 1.0846, 0.0403, 0.3179], + device='cuda:3'), covar=tensor([0.0388, 0.0434, 0.0252, 0.0363, 0.0806, 0.0327, 0.0645, 0.0644], + device='cuda:3'), in_proj_covar=tensor([0.0308, 0.0313, 0.0309, 0.0328, 0.0401, 0.0321, 0.0289, 0.0312], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:29:42,861 INFO [train.py:903] (3/4) Epoch 8, batch 600, loss[loss=0.2922, simple_loss=0.3519, pruned_loss=0.1162, over 19783.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3198, pruned_loss=0.09217, over 3654967.62 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:30:05,020 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:26,080 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 11:30:31,029 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:39,411 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:30:44,670 INFO [train.py:903] (3/4) Epoch 8, batch 650, loss[loss=0.2461, simple_loss=0.3221, pruned_loss=0.08511, over 19507.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3204, pruned_loss=0.09232, over 3698724.77 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:30:50,369 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.061e+02 6.183e+02 7.491e+02 9.829e+02 2.830e+03, threshold=1.498e+03, percent-clipped=3.0 +2023-04-01 11:31:17,364 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48471.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:31:45,461 INFO [train.py:903] (3/4) Epoch 8, batch 700, loss[loss=0.3213, simple_loss=0.374, pruned_loss=0.1343, over 13315.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3208, pruned_loss=0.09284, over 3718906.87 frames. ], batch size: 135, lr: 1.07e-02, grad_scale: 16.0 +2023-04-01 11:31:45,799 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48496.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:17,670 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48521.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:43,992 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48542.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:32:48,872 INFO [train.py:903] (3/4) Epoch 8, batch 750, loss[loss=0.2574, simple_loss=0.3253, pruned_loss=0.09473, over 19523.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3214, pruned_loss=0.09325, over 3751301.86 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:32:49,333 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48546.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:52,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48549.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:32:55,668 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.054e+02 7.636e+02 9.380e+02 1.990e+03, threshold=1.527e+03, percent-clipped=3.0 +2023-04-01 11:33:13,747 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48567.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:33:21,448 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48574.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:33:37,263 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1836, 1.2964, 1.6539, 1.2409, 2.6517, 3.3546, 3.1517, 3.6186], + device='cuda:3'), covar=tensor([0.1544, 0.3165, 0.2867, 0.2050, 0.0513, 0.0226, 0.0219, 0.0162], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0285, 0.0316, 0.0246, 0.0209, 0.0135, 0.0202, 0.0172], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:33:38,455 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5165, 1.8201, 1.9700, 2.5928, 2.2763, 2.2417, 2.0325, 2.5367], + device='cuda:3'), covar=tensor([0.0833, 0.1950, 0.1355, 0.0871, 0.1296, 0.0470, 0.1126, 0.0596], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0358, 0.0285, 0.0238, 0.0303, 0.0243, 0.0273, 0.0231], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:33:49,551 INFO [train.py:903] (3/4) Epoch 8, batch 800, loss[loss=0.2402, simple_loss=0.3023, pruned_loss=0.08906, over 19601.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3232, pruned_loss=0.09451, over 3769814.46 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:02,660 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 11:34:34,446 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8035, 4.3183, 2.4270, 3.7786, 1.2353, 3.9850, 3.9999, 4.2366], + device='cuda:3'), covar=tensor([0.0639, 0.1173, 0.2234, 0.0764, 0.3773, 0.0803, 0.0754, 0.0932], + device='cuda:3'), in_proj_covar=tensor([0.0394, 0.0344, 0.0401, 0.0302, 0.0366, 0.0328, 0.0318, 0.0355], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 11:34:51,087 INFO [train.py:903] (3/4) Epoch 8, batch 850, loss[loss=0.2933, simple_loss=0.3458, pruned_loss=0.1204, over 12857.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3232, pruned_loss=0.09453, over 3770418.73 frames. ], batch size: 135, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:34:57,942 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 5.922e+02 7.936e+02 9.993e+02 1.897e+03, threshold=1.587e+03, percent-clipped=5.0 +2023-04-01 11:35:39,758 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 11:35:39,901 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48686.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:35:43,344 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48689.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:35:50,882 INFO [train.py:903] (3/4) Epoch 8, batch 900, loss[loss=0.2595, simple_loss=0.3343, pruned_loss=0.09237, over 19525.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3238, pruned_loss=0.09482, over 3756761.20 frames. ], batch size: 56, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:35,812 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0918, 1.1760, 1.0092, 0.8129, 0.9059, 0.9512, 0.0599, 0.3438], + device='cuda:3'), covar=tensor([0.0295, 0.0308, 0.0198, 0.0279, 0.0545, 0.0265, 0.0581, 0.0466], + device='cuda:3'), in_proj_covar=tensor([0.0312, 0.0313, 0.0311, 0.0329, 0.0402, 0.0324, 0.0291, 0.0311], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:36:54,666 INFO [train.py:903] (3/4) Epoch 8, batch 950, loss[loss=0.2455, simple_loss=0.322, pruned_loss=0.08448, over 19667.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3235, pruned_loss=0.09427, over 3772431.26 frames. ], batch size: 58, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:36:56,558 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 11:37:03,057 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.996e+02 5.931e+02 7.048e+02 8.289e+02 1.665e+03, threshold=1.410e+03, percent-clipped=1.0 +2023-04-01 11:37:11,362 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48759.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:42,413 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:37:57,228 INFO [train.py:903] (3/4) Epoch 8, batch 1000, loss[loss=0.2396, simple_loss=0.3053, pruned_loss=0.08694, over 19659.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3209, pruned_loss=0.0926, over 3791011.99 frames. ], batch size: 53, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:38:03,560 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:08,276 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48805.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:41,025 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48830.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:50,181 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 11:38:51,478 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48839.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:38:59,277 INFO [train.py:903] (3/4) Epoch 8, batch 1050, loss[loss=0.2891, simple_loss=0.3532, pruned_loss=0.1125, over 19603.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3215, pruned_loss=0.09309, over 3799384.15 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:39:06,227 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.500e+02 6.561e+02 8.180e+02 1.521e+03, threshold=1.312e+03, percent-clipped=1.0 +2023-04-01 11:39:11,120 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:31,755 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 11:39:34,235 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:39:45,603 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8650, 4.2363, 4.4890, 4.4733, 1.4389, 4.1868, 3.5876, 4.1045], + device='cuda:3'), covar=tensor([0.1180, 0.0699, 0.0544, 0.0496, 0.4848, 0.0495, 0.0602, 0.1069], + device='cuda:3'), in_proj_covar=tensor([0.0584, 0.0514, 0.0691, 0.0581, 0.0643, 0.0439, 0.0442, 0.0642], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 11:39:45,753 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2585, 2.1234, 1.7479, 1.7060, 1.4805, 1.7498, 0.5614, 1.0895], + device='cuda:3'), covar=tensor([0.0306, 0.0337, 0.0250, 0.0414, 0.0693, 0.0429, 0.0629, 0.0609], + device='cuda:3'), in_proj_covar=tensor([0.0311, 0.0312, 0.0309, 0.0329, 0.0401, 0.0320, 0.0289, 0.0313], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 11:39:58,991 INFO [train.py:903] (3/4) Epoch 8, batch 1100, loss[loss=0.1996, simple_loss=0.2755, pruned_loss=0.06189, over 19734.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3217, pruned_loss=0.09343, over 3804189.25 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:40:04,002 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48900.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:41:00,124 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48945.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 11:41:00,833 INFO [train.py:903] (3/4) Epoch 8, batch 1150, loss[loss=0.3345, simple_loss=0.381, pruned_loss=0.144, over 13633.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3222, pruned_loss=0.09352, over 3797150.98 frames. ], batch size: 136, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:41:09,120 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.943e+02 6.952e+02 8.882e+02 1.618e+03, threshold=1.390e+03, percent-clipped=5.0 +2023-04-01 11:41:30,913 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:42:04,113 INFO [train.py:903] (3/4) Epoch 8, batch 1200, loss[loss=0.2381, simple_loss=0.3116, pruned_loss=0.08236, over 19502.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3217, pruned_loss=0.09311, over 3803188.81 frames. ], batch size: 64, lr: 1.07e-02, grad_scale: 8.0 +2023-04-01 11:42:32,672 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 11:43:05,009 INFO [train.py:903] (3/4) Epoch 8, batch 1250, loss[loss=0.2216, simple_loss=0.2862, pruned_loss=0.07845, over 19730.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3215, pruned_loss=0.09339, over 3811794.07 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:43:11,745 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.529e+02 6.811e+02 8.521e+02 1.008e+03 2.064e+03, threshold=1.704e+03, percent-clipped=4.0 +2023-04-01 11:43:18,050 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:43:39,591 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 11:43:50,337 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:44:06,035 INFO [train.py:903] (3/4) Epoch 8, batch 1300, loss[loss=0.2751, simple_loss=0.3447, pruned_loss=0.1027, over 19349.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3217, pruned_loss=0.09312, over 3827370.62 frames. ], batch size: 70, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:44:49,033 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49130.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:07,322 INFO [train.py:903] (3/4) Epoch 8, batch 1350, loss[loss=0.2529, simple_loss=0.324, pruned_loss=0.09094, over 19449.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3224, pruned_loss=0.09388, over 3842093.94 frames. ], batch size: 64, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:45:08,146 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 11:45:16,536 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.835e+02 7.092e+02 8.908e+02 2.388e+03, threshold=1.418e+03, percent-clipped=3.0 +2023-04-01 11:45:21,321 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:22,444 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:41,563 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4160, 2.2988, 1.5421, 1.5315, 2.0817, 1.2003, 1.1717, 1.7101], + device='cuda:3'), covar=tensor([0.0889, 0.0502, 0.0921, 0.0605, 0.0421, 0.0968, 0.0684, 0.0442], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0290, 0.0315, 0.0243, 0.0232, 0.0309, 0.0284, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:45:42,695 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6630, 1.1684, 1.4556, 1.5643, 3.2145, 1.1597, 2.0655, 3.3994], + device='cuda:3'), covar=tensor([0.0425, 0.2599, 0.2555, 0.1619, 0.0620, 0.2261, 0.1313, 0.0343], + device='cuda:3'), in_proj_covar=tensor([0.0313, 0.0317, 0.0325, 0.0298, 0.0325, 0.0319, 0.0297, 0.0321], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:45:52,074 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49181.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:45:54,143 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49183.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:46:10,832 INFO [train.py:903] (3/4) Epoch 8, batch 1400, loss[loss=0.2083, simple_loss=0.2775, pruned_loss=0.06955, over 19792.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3211, pruned_loss=0.09284, over 3830824.36 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:46:17,523 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49200.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:47:12,311 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 11:47:13,439 INFO [train.py:903] (3/4) Epoch 8, batch 1450, loss[loss=0.2911, simple_loss=0.3602, pruned_loss=0.111, over 17313.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3213, pruned_loss=0.09338, over 3811195.55 frames. ], batch size: 101, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:47:19,909 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 6.215e+02 8.146e+02 9.729e+02 2.293e+03, threshold=1.629e+03, percent-clipped=2.0 +2023-04-01 11:47:22,668 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:14,512 INFO [train.py:903] (3/4) Epoch 8, batch 1500, loss[loss=0.2532, simple_loss=0.3131, pruned_loss=0.09665, over 19375.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3208, pruned_loss=0.09261, over 3806014.53 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:48:16,763 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49298.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:48:38,601 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49315.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:49:05,504 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8377, 1.4928, 1.3589, 1.7162, 1.6745, 1.3241, 1.2106, 1.6301], + device='cuda:3'), covar=tensor([0.0924, 0.1483, 0.1470, 0.1031, 0.1199, 0.0785, 0.1397, 0.0752], + device='cuda:3'), in_proj_covar=tensor([0.0241, 0.0346, 0.0278, 0.0232, 0.0290, 0.0238, 0.0266, 0.0225], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 11:49:14,520 INFO [train.py:903] (3/4) Epoch 8, batch 1550, loss[loss=0.2375, simple_loss=0.3105, pruned_loss=0.08219, over 19771.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3213, pruned_loss=0.09261, over 3818923.12 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:49:23,152 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+02 6.392e+02 7.844e+02 9.464e+02 1.840e+03, threshold=1.569e+03, percent-clipped=1.0 +2023-04-01 11:49:42,562 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3394, 3.9389, 2.5540, 3.5531, 1.1419, 3.6274, 3.6854, 3.8025], + device='cuda:3'), covar=tensor([0.0613, 0.1066, 0.1964, 0.0703, 0.3734, 0.0814, 0.0741, 0.0888], + device='cuda:3'), in_proj_covar=tensor([0.0393, 0.0341, 0.0399, 0.0298, 0.0368, 0.0326, 0.0318, 0.0355], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 11:50:17,463 INFO [train.py:903] (3/4) Epoch 8, batch 1600, loss[loss=0.2289, simple_loss=0.2937, pruned_loss=0.08209, over 19809.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3217, pruned_loss=0.09294, over 3821125.97 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:50:25,527 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7678, 4.1817, 4.5005, 4.4523, 1.8581, 4.1828, 3.6209, 4.1066], + device='cuda:3'), covar=tensor([0.1113, 0.0774, 0.0525, 0.0488, 0.4289, 0.0486, 0.0586, 0.1008], + device='cuda:3'), in_proj_covar=tensor([0.0600, 0.0533, 0.0717, 0.0598, 0.0667, 0.0458, 0.0454, 0.0663], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 11:50:38,465 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 11:51:12,154 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:51:20,003 INFO [train.py:903] (3/4) Epoch 8, batch 1650, loss[loss=0.2999, simple_loss=0.3599, pruned_loss=0.1199, over 18166.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3224, pruned_loss=0.09334, over 3818882.94 frames. ], batch size: 83, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:51:24,778 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49450.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:51:26,748 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 6.230e+02 7.478e+02 9.229e+02 3.510e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-04-01 11:52:21,801 INFO [train.py:903] (3/4) Epoch 8, batch 1700, loss[loss=0.2559, simple_loss=0.3099, pruned_loss=0.101, over 19768.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.321, pruned_loss=0.09234, over 3827985.70 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:02,521 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 11:53:23,396 INFO [train.py:903] (3/4) Epoch 8, batch 1750, loss[loss=0.2332, simple_loss=0.3008, pruned_loss=0.08283, over 19394.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3223, pruned_loss=0.09315, over 3824647.28 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:53:31,458 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.581e+02 6.147e+02 7.390e+02 1.012e+03 1.809e+03, threshold=1.478e+03, percent-clipped=6.0 +2023-04-01 11:53:35,079 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49554.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:56,608 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49571.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:53:58,160 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-01 11:54:06,059 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49579.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:09,487 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3011, 3.8938, 2.5456, 3.4760, 1.1442, 3.5031, 3.5505, 3.7090], + device='cuda:3'), covar=tensor([0.0769, 0.1123, 0.2103, 0.0873, 0.4062, 0.0983, 0.0931, 0.1031], + device='cuda:3'), in_proj_covar=tensor([0.0396, 0.0343, 0.0401, 0.0299, 0.0366, 0.0325, 0.0316, 0.0356], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 11:54:27,937 INFO [train.py:903] (3/4) Epoch 8, batch 1800, loss[loss=0.2255, simple_loss=0.3059, pruned_loss=0.0725, over 18269.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.322, pruned_loss=0.09269, over 3817562.04 frames. ], batch size: 83, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:54:28,357 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:31,445 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49598.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:54:51,842 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7790, 4.1836, 4.4868, 4.4576, 1.8960, 4.1347, 3.6115, 4.0893], + device='cuda:3'), covar=tensor([0.1133, 0.0931, 0.0503, 0.0468, 0.4434, 0.0585, 0.0612, 0.1027], + device='cuda:3'), in_proj_covar=tensor([0.0606, 0.0533, 0.0718, 0.0602, 0.0669, 0.0461, 0.0454, 0.0665], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 11:55:25,416 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 11:55:26,233 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.44 vs. limit=5.0 +2023-04-01 11:55:30,988 INFO [train.py:903] (3/4) Epoch 8, batch 1850, loss[loss=0.2361, simple_loss=0.3145, pruned_loss=0.07883, over 19766.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3216, pruned_loss=0.09271, over 3803262.74 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:55:38,000 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.845e+02 7.519e+02 8.649e+02 2.522e+03, threshold=1.504e+03, percent-clipped=4.0 +2023-04-01 11:56:02,474 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 11:56:31,217 INFO [train.py:903] (3/4) Epoch 8, batch 1900, loss[loss=0.2758, simple_loss=0.353, pruned_loss=0.09927, over 19607.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3233, pruned_loss=0.09436, over 3805185.93 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:56:33,998 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3298, 1.4846, 1.7006, 1.5849, 2.6131, 2.2330, 2.7660, 0.9960], + device='cuda:3'), covar=tensor([0.1860, 0.3208, 0.1898, 0.1512, 0.1150, 0.1594, 0.1187, 0.3069], + device='cuda:3'), in_proj_covar=tensor([0.0457, 0.0535, 0.0532, 0.0415, 0.0571, 0.0467, 0.0627, 0.0460], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 11:56:48,645 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 11:56:52,394 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49713.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:56:54,388 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 11:57:18,981 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 11:57:33,017 INFO [train.py:903] (3/4) Epoch 8, batch 1950, loss[loss=0.2773, simple_loss=0.3486, pruned_loss=0.103, over 19315.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3225, pruned_loss=0.0935, over 3803030.46 frames. ], batch size: 66, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:57:40,107 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.492e+02 5.402e+02 6.624e+02 8.916e+02 2.925e+03, threshold=1.325e+03, percent-clipped=4.0 +2023-04-01 11:58:20,355 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:20,517 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 11:58:32,991 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49794.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 11:58:35,910 INFO [train.py:903] (3/4) Epoch 8, batch 2000, loss[loss=0.2399, simple_loss=0.3155, pruned_loss=0.08209, over 18135.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3229, pruned_loss=0.09348, over 3823311.05 frames. ], batch size: 83, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:34,101 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.94 vs. limit=5.0 +2023-04-01 11:59:35,611 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 11:59:38,067 INFO [train.py:903] (3/4) Epoch 8, batch 2050, loss[loss=0.2364, simple_loss=0.3122, pruned_loss=0.08031, over 19674.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.324, pruned_loss=0.09388, over 3821523.75 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 11:59:45,925 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.411e+02 7.156e+02 9.098e+02 3.444e+03, threshold=1.431e+03, percent-clipped=9.0 +2023-04-01 11:59:53,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 11:59:55,109 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 12:00:17,465 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 12:00:39,978 INFO [train.py:903] (3/4) Epoch 8, batch 2100, loss[loss=0.3198, simple_loss=0.3622, pruned_loss=0.1387, over 12910.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3244, pruned_loss=0.09418, over 3820167.30 frames. ], batch size: 135, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:00:43,786 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:00:55,216 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49909.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:01:10,402 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 12:01:32,570 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 12:01:40,057 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9272, 2.0111, 2.0895, 2.9115, 1.8386, 2.5661, 2.6757, 1.9614], + device='cuda:3'), covar=tensor([0.2983, 0.2465, 0.1173, 0.1456, 0.2896, 0.1178, 0.2439, 0.2189], + device='cuda:3'), in_proj_covar=tensor([0.0724, 0.0734, 0.0612, 0.0859, 0.0731, 0.0636, 0.0760, 0.0658], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 12:01:41,833 INFO [train.py:903] (3/4) Epoch 8, batch 2150, loss[loss=0.2166, simple_loss=0.2885, pruned_loss=0.07239, over 19591.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3244, pruned_loss=0.09423, over 3818387.76 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 8.0 +2023-04-01 12:01:48,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.004e+02 5.997e+02 7.086e+02 8.659e+02 2.224e+03, threshold=1.417e+03, percent-clipped=8.0 +2023-04-01 12:01:49,703 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0312, 2.0379, 1.6656, 1.5576, 1.4513, 1.6512, 0.3541, 0.8565], + device='cuda:3'), covar=tensor([0.0292, 0.0285, 0.0218, 0.0329, 0.0641, 0.0358, 0.0592, 0.0556], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0307, 0.0308, 0.0327, 0.0397, 0.0319, 0.0288, 0.0306], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 12:02:06,582 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2849, 1.3696, 1.7023, 1.4888, 2.4818, 2.2935, 2.5529, 0.9695], + device='cuda:3'), covar=tensor([0.1838, 0.3216, 0.1900, 0.1505, 0.1088, 0.1439, 0.1195, 0.2947], + device='cuda:3'), in_proj_covar=tensor([0.0454, 0.0530, 0.0526, 0.0411, 0.0567, 0.0461, 0.0628, 0.0459], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 12:02:12,069 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:41,657 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:02:43,587 INFO [train.py:903] (3/4) Epoch 8, batch 2200, loss[loss=0.2407, simple_loss=0.3127, pruned_loss=0.08436, over 19755.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3246, pruned_loss=0.09457, over 3806462.62 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:48,613 INFO [train.py:903] (3/4) Epoch 8, batch 2250, loss[loss=0.2341, simple_loss=0.2971, pruned_loss=0.08552, over 19794.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3253, pruned_loss=0.09492, over 3819770.54 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:03:51,223 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1252, 5.5011, 2.9400, 4.8580, 1.1840, 5.3156, 5.4088, 5.5625], + device='cuda:3'), covar=tensor([0.0374, 0.0812, 0.1825, 0.0608, 0.3949, 0.0661, 0.0553, 0.0911], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0338, 0.0398, 0.0294, 0.0360, 0.0323, 0.0312, 0.0353], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 12:03:55,466 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+02 6.238e+02 7.806e+02 1.014e+03 2.092e+03, threshold=1.561e+03, percent-clipped=8.0 +2023-04-01 12:03:58,802 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2725, 3.8425, 2.4529, 3.4797, 1.3326, 3.5384, 3.6004, 3.6765], + device='cuda:3'), covar=tensor([0.0676, 0.1139, 0.2130, 0.0740, 0.3536, 0.0921, 0.0745, 0.1047], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0339, 0.0398, 0.0294, 0.0360, 0.0323, 0.0313, 0.0353], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 12:04:51,010 INFO [train.py:903] (3/4) Epoch 8, batch 2300, loss[loss=0.2246, simple_loss=0.3058, pruned_loss=0.07176, over 19535.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3252, pruned_loss=0.09476, over 3805349.94 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:03,056 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2859, 0.9256, 1.1628, 2.0568, 1.6773, 1.1425, 1.8291, 1.2328], + device='cuda:3'), covar=tensor([0.1241, 0.1930, 0.1511, 0.1043, 0.1072, 0.1590, 0.1160, 0.1247], + device='cuda:3'), in_proj_covar=tensor([0.0217, 0.0232, 0.0229, 0.0258, 0.0245, 0.0217, 0.0208, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 12:05:05,019 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 12:05:11,322 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2475, 2.9546, 2.1650, 2.1623, 1.8220, 2.4983, 0.7865, 1.9928], + device='cuda:3'), covar=tensor([0.0313, 0.0330, 0.0366, 0.0615, 0.0725, 0.0571, 0.0763, 0.0648], + device='cuda:3'), in_proj_covar=tensor([0.0312, 0.0312, 0.0312, 0.0328, 0.0403, 0.0324, 0.0293, 0.0310], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 12:05:31,277 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:37,140 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2888, 1.4109, 1.9566, 1.6142, 2.9628, 4.6168, 4.5822, 4.9797], + device='cuda:3'), covar=tensor([0.1485, 0.3082, 0.2709, 0.1841, 0.0485, 0.0142, 0.0152, 0.0088], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0284, 0.0312, 0.0247, 0.0208, 0.0135, 0.0204, 0.0171], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 12:05:52,929 INFO [train.py:903] (3/4) Epoch 8, batch 2350, loss[loss=0.2518, simple_loss=0.3248, pruned_loss=0.08942, over 17380.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.324, pruned_loss=0.09394, over 3810719.14 frames. ], batch size: 101, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:05:53,263 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50146.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:05:59,868 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.938e+02 7.771e+02 9.106e+02 1.869e+03, threshold=1.554e+03, percent-clipped=2.0 +2023-04-01 12:06:03,818 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:18,093 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50165.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:06:36,731 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:06:38,694 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 12:06:47,997 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50190.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:06:54,684 INFO [train.py:903] (3/4) Epoch 8, batch 2400, loss[loss=0.2596, simple_loss=0.3227, pruned_loss=0.09823, over 19736.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3236, pruned_loss=0.09319, over 3824262.52 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:06:54,690 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 12:07:20,948 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50215.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:07:54,905 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:07:58,092 INFO [train.py:903] (3/4) Epoch 8, batch 2450, loss[loss=0.223, simple_loss=0.2955, pruned_loss=0.07523, over 19853.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3233, pruned_loss=0.09346, over 3812428.15 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:08:05,246 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.846e+02 6.354e+02 7.339e+02 9.160e+02 2.255e+03, threshold=1.468e+03, percent-clipped=3.0 +2023-04-01 12:09:00,867 INFO [train.py:903] (3/4) Epoch 8, batch 2500, loss[loss=0.3078, simple_loss=0.3667, pruned_loss=0.1245, over 18816.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3234, pruned_loss=0.09355, over 3809628.63 frames. ], batch size: 74, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:03,021 INFO [train.py:903] (3/4) Epoch 8, batch 2550, loss[loss=0.2596, simple_loss=0.3363, pruned_loss=0.09142, over 19734.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3229, pruned_loss=0.09303, over 3808662.65 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:10:09,520 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.479e+02 6.803e+02 8.076e+02 1.672e+03, threshold=1.361e+03, percent-clipped=2.0 +2023-04-01 12:10:39,026 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-01 12:10:59,277 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 12:11:05,094 INFO [train.py:903] (3/4) Epoch 8, batch 2600, loss[loss=0.2042, simple_loss=0.2725, pruned_loss=0.06796, over 19293.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3231, pruned_loss=0.0934, over 3821546.85 frames. ], batch size: 44, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:11:45,197 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 12:12:09,318 INFO [train.py:903] (3/4) Epoch 8, batch 2650, loss[loss=0.2075, simple_loss=0.2772, pruned_loss=0.06896, over 19799.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3221, pruned_loss=0.09288, over 3822465.88 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:12:15,976 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.098e+02 6.836e+02 8.198e+02 1.046e+03 1.620e+03, threshold=1.640e+03, percent-clipped=8.0 +2023-04-01 12:12:27,568 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 12:13:04,663 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:11,332 INFO [train.py:903] (3/4) Epoch 8, batch 2700, loss[loss=0.2635, simple_loss=0.3189, pruned_loss=0.104, over 18594.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3225, pruned_loss=0.09334, over 3818908.51 frames. ], batch size: 41, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:13:16,521 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50499.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:13:47,859 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:14:15,291 INFO [train.py:903] (3/4) Epoch 8, batch 2750, loss[loss=0.275, simple_loss=0.3349, pruned_loss=0.1075, over 19669.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3224, pruned_loss=0.09355, over 3836363.12 frames. ], batch size: 58, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:14:23,786 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 5.912e+02 7.181e+02 9.047e+02 1.864e+03, threshold=1.436e+03, percent-clipped=1.0 +2023-04-01 12:14:30,883 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50559.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:14:37,797 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50564.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:15:18,012 INFO [train.py:903] (3/4) Epoch 8, batch 2800, loss[loss=0.2132, simple_loss=0.2914, pruned_loss=0.0675, over 19587.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3216, pruned_loss=0.09282, over 3844056.59 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:15:29,739 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50605.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:16:22,000 INFO [train.py:903] (3/4) Epoch 8, batch 2850, loss[loss=0.2137, simple_loss=0.2818, pruned_loss=0.07282, over 19624.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3204, pruned_loss=0.09236, over 3835853.25 frames. ], batch size: 50, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:16:31,184 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.810e+02 5.617e+02 7.073e+02 8.787e+02 1.544e+03, threshold=1.415e+03, percent-clipped=2.0 +2023-04-01 12:16:57,660 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50674.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:17:25,977 INFO [train.py:903] (3/4) Epoch 8, batch 2900, loss[loss=0.2999, simple_loss=0.3539, pruned_loss=0.1229, over 19540.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3208, pruned_loss=0.09234, over 3833467.87 frames. ], batch size: 56, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:17:26,041 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 12:18:29,363 INFO [train.py:903] (3/4) Epoch 8, batch 2950, loss[loss=0.1871, simple_loss=0.2679, pruned_loss=0.0531, over 19717.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.321, pruned_loss=0.09205, over 3835410.90 frames. ], batch size: 45, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:18:37,518 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 6.074e+02 7.951e+02 1.027e+03 2.467e+03, threshold=1.590e+03, percent-clipped=7.0 +2023-04-01 12:19:31,423 INFO [train.py:903] (3/4) Epoch 8, batch 3000, loss[loss=0.3039, simple_loss=0.3578, pruned_loss=0.125, over 18279.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3222, pruned_loss=0.0935, over 3811301.21 frames. ], batch size: 83, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:19:31,424 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 12:19:44,058 INFO [train.py:937] (3/4) Epoch 8, validation: loss=0.1875, simple_loss=0.2879, pruned_loss=0.04358, over 944034.00 frames. +2023-04-01 12:19:44,059 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 12:19:46,436 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 12:19:47,996 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9791, 1.7602, 1.5622, 1.9589, 1.9003, 1.7778, 1.4935, 1.8648], + device='cuda:3'), covar=tensor([0.0857, 0.1349, 0.1318, 0.0897, 0.0993, 0.0482, 0.1147, 0.0585], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0355, 0.0286, 0.0241, 0.0297, 0.0239, 0.0272, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 12:19:49,169 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50800.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:20:21,829 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50826.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:20:45,719 INFO [train.py:903] (3/4) Epoch 8, batch 3050, loss[loss=0.2367, simple_loss=0.3172, pruned_loss=0.0781, over 18630.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3235, pruned_loss=0.09424, over 3820975.31 frames. ], batch size: 74, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:20:55,179 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.189e+02 5.734e+02 7.199e+02 9.163e+02 1.650e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-04-01 12:21:05,857 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3503, 1.2596, 1.6225, 1.4251, 3.1912, 2.8641, 3.3298, 1.2772], + device='cuda:3'), covar=tensor([0.1863, 0.3316, 0.2115, 0.1501, 0.1180, 0.1305, 0.1359, 0.3083], + device='cuda:3'), in_proj_covar=tensor([0.0469, 0.0540, 0.0538, 0.0422, 0.0577, 0.0472, 0.0638, 0.0469], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 12:21:06,915 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50861.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:36,839 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50886.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:21:49,959 INFO [train.py:903] (3/4) Epoch 8, batch 3100, loss[loss=0.2549, simple_loss=0.3267, pruned_loss=0.09157, over 19512.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3233, pruned_loss=0.09396, over 3831143.51 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:22:01,846 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 12:22:04,805 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50908.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:22:32,040 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50930.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:22:52,356 INFO [train.py:903] (3/4) Epoch 8, batch 3150, loss[loss=0.2047, simple_loss=0.2685, pruned_loss=0.07043, over 19342.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3237, pruned_loss=0.09414, over 3815577.85 frames. ], batch size: 44, lr: 1.05e-02, grad_scale: 8.0 +2023-04-01 12:23:00,491 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 5.915e+02 7.023e+02 8.955e+02 1.571e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-01 12:23:03,162 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50955.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:23:14,726 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50964.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:23:20,357 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 12:23:54,308 INFO [train.py:903] (3/4) Epoch 8, batch 3200, loss[loss=0.2581, simple_loss=0.3228, pruned_loss=0.09669, over 17972.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3245, pruned_loss=0.09412, over 3828679.05 frames. ], batch size: 83, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:24:30,164 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51023.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:24:57,557 INFO [train.py:903] (3/4) Epoch 8, batch 3250, loss[loss=0.2352, simple_loss=0.2961, pruned_loss=0.08715, over 19739.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.323, pruned_loss=0.09341, over 3831126.02 frames. ], batch size: 46, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:25:05,775 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 6.087e+02 7.883e+02 9.942e+02 3.174e+03, threshold=1.577e+03, percent-clipped=7.0 +2023-04-01 12:25:40,485 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:26:00,812 INFO [train.py:903] (3/4) Epoch 8, batch 3300, loss[loss=0.2499, simple_loss=0.3266, pruned_loss=0.08654, over 19486.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3232, pruned_loss=0.09366, over 3825524.33 frames. ], batch size: 64, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:26:08,880 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 12:27:02,450 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51144.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:27:04,793 INFO [train.py:903] (3/4) Epoch 8, batch 3350, loss[loss=0.2292, simple_loss=0.3042, pruned_loss=0.07709, over 19687.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3241, pruned_loss=0.09426, over 3822716.99 frames. ], batch size: 53, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:27:12,718 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.942e+02 7.377e+02 9.279e+02 2.136e+03, threshold=1.475e+03, percent-clipped=2.0 +2023-04-01 12:27:34,026 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51170.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:27:47,826 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2103, 1.3246, 1.1879, 1.0492, 1.0672, 1.0496, 0.0398, 0.3863], + device='cuda:3'), covar=tensor([0.0336, 0.0359, 0.0224, 0.0271, 0.0773, 0.0301, 0.0644, 0.0583], + device='cuda:3'), in_proj_covar=tensor([0.0321, 0.0318, 0.0315, 0.0331, 0.0408, 0.0333, 0.0294, 0.0314], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 12:28:00,625 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51190.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:28:07,186 INFO [train.py:903] (3/4) Epoch 8, batch 3400, loss[loss=0.3194, simple_loss=0.3653, pruned_loss=0.1367, over 13235.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3235, pruned_loss=0.09408, over 3818489.49 frames. ], batch size: 136, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:05,386 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 12:29:10,668 INFO [train.py:903] (3/4) Epoch 8, batch 3450, loss[loss=0.2476, simple_loss=0.318, pruned_loss=0.08866, over 19667.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.322, pruned_loss=0.09286, over 3832422.56 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:29:12,351 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-01 12:29:16,192 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 12:29:18,574 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+02 6.280e+02 7.477e+02 9.686e+02 1.820e+03, threshold=1.495e+03, percent-clipped=3.0 +2023-04-01 12:29:27,099 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:29:52,061 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51279.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:00,108 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51285.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:30:12,377 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 12:30:12,716 INFO [train.py:903] (3/4) Epoch 8, batch 3500, loss[loss=0.319, simple_loss=0.3722, pruned_loss=0.1329, over 19332.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3226, pruned_loss=0.09365, over 3799123.73 frames. ], batch size: 66, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:30:25,524 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51304.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:30,163 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51308.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:30:31,845 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-04-01 12:31:19,057 INFO [train.py:903] (3/4) Epoch 8, batch 3550, loss[loss=0.2934, simple_loss=0.3486, pruned_loss=0.1191, over 19664.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3225, pruned_loss=0.09324, over 3809304.62 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:31:27,383 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.209e+02 6.396e+02 8.561e+02 1.899e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-01 12:31:47,285 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9096, 1.9391, 2.0648, 2.8425, 1.8257, 2.6013, 2.5487, 1.8941], + device='cuda:3'), covar=tensor([0.3081, 0.2471, 0.1135, 0.1447, 0.2893, 0.1116, 0.2448, 0.2204], + device='cuda:3'), in_proj_covar=tensor([0.0726, 0.0734, 0.0612, 0.0858, 0.0731, 0.0641, 0.0749, 0.0662], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 12:32:21,072 INFO [train.py:903] (3/4) Epoch 8, batch 3600, loss[loss=0.2947, simple_loss=0.3577, pruned_loss=0.1158, over 17193.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3227, pruned_loss=0.09345, over 3815202.16 frames. ], batch size: 101, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:32:26,096 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51400.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:38,822 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:55,884 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:32:56,816 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:33:22,342 INFO [train.py:903] (3/4) Epoch 8, batch 3650, loss[loss=0.2954, simple_loss=0.3522, pruned_loss=0.1193, over 19570.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3233, pruned_loss=0.09369, over 3809807.61 frames. ], batch size: 61, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:33:31,510 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 6.415e+02 7.779e+02 9.918e+02 2.619e+03, threshold=1.556e+03, percent-clipped=14.0 +2023-04-01 12:34:24,456 INFO [train.py:903] (3/4) Epoch 8, batch 3700, loss[loss=0.2439, simple_loss=0.328, pruned_loss=0.07995, over 18796.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3233, pruned_loss=0.0936, over 3812293.52 frames. ], batch size: 74, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:34:46,878 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:34:49,322 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51515.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:12,270 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51534.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:35:18,982 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51539.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:20,156 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51540.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:35:22,213 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51541.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:35:28,602 INFO [train.py:903] (3/4) Epoch 8, batch 3750, loss[loss=0.2456, simple_loss=0.3251, pruned_loss=0.08308, over 18050.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3227, pruned_loss=0.09287, over 3816174.53 frames. ], batch size: 83, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:35:36,648 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.985e+02 5.907e+02 7.282e+02 9.270e+02 2.268e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:35:52,365 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51566.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:36:30,186 INFO [train.py:903] (3/4) Epoch 8, batch 3800, loss[loss=0.2144, simple_loss=0.2856, pruned_loss=0.07158, over 19829.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3218, pruned_loss=0.09234, over 3831364.90 frames. ], batch size: 52, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:02,486 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 12:37:31,373 INFO [train.py:903] (3/4) Epoch 8, batch 3850, loss[loss=0.3165, simple_loss=0.3608, pruned_loss=0.1361, over 13287.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3215, pruned_loss=0.09246, over 3826946.98 frames. ], batch size: 136, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:37:35,041 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51649.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:37:40,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.568e+02 6.155e+02 7.716e+02 1.023e+03 2.199e+03, threshold=1.543e+03, percent-clipped=8.0 +2023-04-01 12:38:13,479 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51679.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:38:33,777 INFO [train.py:903] (3/4) Epoch 8, batch 3900, loss[loss=0.2974, simple_loss=0.3548, pruned_loss=0.12, over 13190.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3209, pruned_loss=0.09183, over 3829126.69 frames. ], batch size: 136, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:38:46,001 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51704.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:33,761 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51744.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:37,153 INFO [train.py:903] (3/4) Epoch 8, batch 3950, loss[loss=0.2502, simple_loss=0.3146, pruned_loss=0.09286, over 19835.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3201, pruned_loss=0.09184, over 3827554.08 frames. ], batch size: 52, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:39:41,712 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 12:39:45,226 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+02 5.815e+02 7.280e+02 9.203e+02 2.422e+03, threshold=1.456e+03, percent-clipped=4.0 +2023-04-01 12:39:46,690 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:39:59,579 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51765.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:37,996 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51795.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:40:38,724 INFO [train.py:903] (3/4) Epoch 8, batch 4000, loss[loss=0.2246, simple_loss=0.286, pruned_loss=0.08165, over 19727.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3195, pruned_loss=0.09169, over 3824096.44 frames. ], batch size: 46, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:09,883 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51820.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:27,824 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 12:41:41,697 INFO [train.py:903] (3/4) Epoch 8, batch 4050, loss[loss=0.2638, simple_loss=0.3369, pruned_loss=0.09538, over 18755.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3192, pruned_loss=0.09107, over 3829237.44 frames. ], batch size: 74, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:41:50,762 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.576e+02 5.742e+02 7.614e+02 9.901e+02 2.045e+03, threshold=1.523e+03, percent-clipped=5.0 +2023-04-01 12:41:56,460 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:41:59,731 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51859.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:11,969 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51869.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:42:43,874 INFO [train.py:903] (3/4) Epoch 8, batch 4100, loss[loss=0.256, simple_loss=0.3219, pruned_loss=0.09506, over 19525.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3184, pruned_loss=0.09062, over 3836181.99 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-04-01 12:42:56,252 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51905.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:43:21,454 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 12:43:26,474 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51930.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 12:43:39,203 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-01 12:43:47,787 INFO [train.py:903] (3/4) Epoch 8, batch 4150, loss[loss=0.2369, simple_loss=0.3152, pruned_loss=0.07934, over 19616.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3191, pruned_loss=0.09109, over 3828599.97 frames. ], batch size: 61, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:43:56,803 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.129e+02 6.343e+02 7.798e+02 9.790e+02 2.215e+03, threshold=1.560e+03, percent-clipped=4.0 +2023-04-01 12:44:19,707 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:44:50,672 INFO [train.py:903] (3/4) Epoch 8, batch 4200, loss[loss=0.2626, simple_loss=0.3307, pruned_loss=0.09723, over 19499.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3195, pruned_loss=0.09138, over 3819427.85 frames. ], batch size: 64, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:44:57,654 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 12:45:04,800 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1810, 4.2827, 4.7501, 4.7013, 2.6929, 4.3976, 4.0452, 4.4321], + device='cuda:3'), covar=tensor([0.0946, 0.1936, 0.0434, 0.0461, 0.3339, 0.0527, 0.0437, 0.0800], + device='cuda:3'), in_proj_covar=tensor([0.0600, 0.0526, 0.0712, 0.0601, 0.0666, 0.0461, 0.0450, 0.0661], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 12:45:53,324 INFO [train.py:903] (3/4) Epoch 8, batch 4250, loss[loss=0.2319, simple_loss=0.3162, pruned_loss=0.07378, over 19550.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3205, pruned_loss=0.09144, over 3832911.08 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:45:57,238 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1444, 1.8169, 1.3658, 1.1919, 1.6121, 0.9873, 1.0920, 1.7131], + device='cuda:3'), covar=tensor([0.0629, 0.0533, 0.0886, 0.0595, 0.0384, 0.1013, 0.0546, 0.0314], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0290, 0.0314, 0.0241, 0.0226, 0.0308, 0.0284, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 12:46:01,321 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 5.455e+02 6.472e+02 8.916e+02 2.597e+03, threshold=1.294e+03, percent-clipped=4.0 +2023-04-01 12:46:08,494 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:46:11,508 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 12:46:18,993 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 12:46:21,912 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 12:46:55,487 INFO [train.py:903] (3/4) Epoch 8, batch 4300, loss[loss=0.2447, simple_loss=0.3149, pruned_loss=0.08724, over 19626.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3194, pruned_loss=0.09064, over 3836145.90 frames. ], batch size: 50, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:47:15,125 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52109.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:22,353 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:31,689 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5895, 1.9695, 2.3981, 2.5819, 2.2780, 2.2852, 2.2911, 2.5459], + device='cuda:3'), covar=tensor([0.0657, 0.1513, 0.0912, 0.0795, 0.1051, 0.0382, 0.0801, 0.0478], + device='cuda:3'), in_proj_covar=tensor([0.0247, 0.0350, 0.0282, 0.0234, 0.0295, 0.0238, 0.0267, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 12:47:34,156 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:47:50,787 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 12:47:53,339 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:00,441 INFO [train.py:903] (3/4) Epoch 8, batch 4350, loss[loss=0.2315, simple_loss=0.2963, pruned_loss=0.08339, over 19387.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3194, pruned_loss=0.09071, over 3823020.87 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:48:06,452 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52150.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:48:09,411 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.672e+02 7.291e+02 9.101e+02 1.997e+03, threshold=1.458e+03, percent-clipped=8.0 +2023-04-01 12:48:48,791 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 12:49:03,039 INFO [train.py:903] (3/4) Epoch 8, batch 4400, loss[loss=0.2423, simple_loss=0.3078, pruned_loss=0.08842, over 19695.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3184, pruned_loss=0.08972, over 3839684.29 frames. ], batch size: 53, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:49:21,182 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52211.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:26,762 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 12:49:38,219 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 12:49:38,563 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:49:43,333 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:50:01,068 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-01 12:50:05,247 INFO [train.py:903] (3/4) Epoch 8, batch 4450, loss[loss=0.2502, simple_loss=0.3231, pruned_loss=0.08867, over 19754.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3199, pruned_loss=0.09094, over 3831205.49 frames. ], batch size: 63, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:50:13,311 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.880e+02 7.086e+02 8.839e+02 1.936e+03, threshold=1.417e+03, percent-clipped=3.0 +2023-04-01 12:50:13,759 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:50:14,760 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0314, 1.8902, 1.9531, 2.2430, 2.1917, 1.9170, 2.0439, 2.0939], + device='cuda:3'), covar=tensor([0.0730, 0.1298, 0.0980, 0.0629, 0.0811, 0.0412, 0.0782, 0.0508], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0351, 0.0284, 0.0233, 0.0298, 0.0241, 0.0270, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 12:51:06,390 INFO [train.py:903] (3/4) Epoch 8, batch 4500, loss[loss=0.3025, simple_loss=0.3593, pruned_loss=0.1228, over 18800.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3214, pruned_loss=0.0917, over 3825991.00 frames. ], batch size: 74, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:51:50,145 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52330.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:52:10,352 INFO [train.py:903] (3/4) Epoch 8, batch 4550, loss[loss=0.2656, simple_loss=0.3379, pruned_loss=0.09669, over 19663.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.321, pruned_loss=0.09144, over 3815897.95 frames. ], batch size: 58, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:52:10,749 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7528, 1.3777, 1.2758, 1.6252, 1.4784, 1.5187, 1.3263, 1.5632], + device='cuda:3'), covar=tensor([0.0866, 0.1351, 0.1316, 0.0856, 0.1027, 0.0511, 0.1088, 0.0670], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0351, 0.0286, 0.0234, 0.0297, 0.0241, 0.0270, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 12:52:18,694 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.922e+02 7.010e+02 8.869e+02 1.679e+03, threshold=1.402e+03, percent-clipped=2.0 +2023-04-01 12:52:18,730 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 12:52:27,451 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 12:52:41,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 12:53:11,441 INFO [train.py:903] (3/4) Epoch 8, batch 4600, loss[loss=0.2813, simple_loss=0.3499, pruned_loss=0.1063, over 18690.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3222, pruned_loss=0.09247, over 3811273.26 frames. ], batch size: 74, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:53:18,628 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:53:19,886 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2256, 1.3116, 1.1812, 1.0118, 1.0048, 1.0354, 0.0273, 0.3587], + device='cuda:3'), covar=tensor([0.0354, 0.0349, 0.0217, 0.0314, 0.0772, 0.0292, 0.0699, 0.0591], + device='cuda:3'), in_proj_covar=tensor([0.0322, 0.0321, 0.0316, 0.0337, 0.0413, 0.0331, 0.0301, 0.0317], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 12:54:12,886 INFO [train.py:903] (3/4) Epoch 8, batch 4650, loss[loss=0.3253, simple_loss=0.3771, pruned_loss=0.1367, over 13882.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3212, pruned_loss=0.09186, over 3823372.27 frames. ], batch size: 135, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:54:21,262 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 5.664e+02 6.903e+02 8.285e+02 1.576e+03, threshold=1.381e+03, percent-clipped=2.0 +2023-04-01 12:54:26,501 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8514, 1.8904, 1.9813, 2.4335, 1.6767, 2.2134, 2.3250, 1.9354], + device='cuda:3'), covar=tensor([0.2807, 0.2154, 0.1181, 0.1296, 0.2486, 0.1151, 0.2697, 0.2067], + device='cuda:3'), in_proj_covar=tensor([0.0722, 0.0734, 0.0607, 0.0861, 0.0726, 0.0636, 0.0754, 0.0659], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 12:54:30,512 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 12:54:42,678 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 12:54:56,945 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:15,249 INFO [train.py:903] (3/4) Epoch 8, batch 4700, loss[loss=0.2028, simple_loss=0.2733, pruned_loss=0.0661, over 19270.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3212, pruned_loss=0.09183, over 3820790.26 frames. ], batch size: 44, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:55:27,909 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52505.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:55:28,210 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.68 vs. limit=5.0 +2023-04-01 12:55:39,741 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 12:55:41,526 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 12:55:43,531 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:18,361 INFO [train.py:903] (3/4) Epoch 8, batch 4750, loss[loss=0.2589, simple_loss=0.3406, pruned_loss=0.08856, over 19541.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3212, pruned_loss=0.09168, over 3824044.56 frames. ], batch size: 54, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:56:29,688 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.555e+02 6.316e+02 7.348e+02 9.529e+02 1.491e+03, threshold=1.470e+03, percent-clipped=3.0 +2023-04-01 12:56:31,064 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:56:52,284 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52573.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 12:57:22,250 INFO [train.py:903] (3/4) Epoch 8, batch 4800, loss[loss=0.2219, simple_loss=0.2836, pruned_loss=0.08009, over 19728.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3209, pruned_loss=0.09213, over 3826476.79 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:22,769 INFO [train.py:903] (3/4) Epoch 8, batch 4850, loss[loss=0.234, simple_loss=0.3021, pruned_loss=0.08297, over 19469.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3197, pruned_loss=0.09143, over 3826450.83 frames. ], batch size: 49, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:58:32,081 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 6.021e+02 7.604e+02 9.872e+02 2.114e+03, threshold=1.521e+03, percent-clipped=8.0 +2023-04-01 12:58:45,976 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 12:58:52,949 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52670.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:58:58,365 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 12:59:08,351 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 12:59:14,093 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 12:59:14,127 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 12:59:23,225 INFO [train.py:903] (3/4) Epoch 8, batch 4900, loss[loss=0.2864, simple_loss=0.3512, pruned_loss=0.1108, over 18159.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3194, pruned_loss=0.09098, over 3824671.03 frames. ], batch size: 83, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 12:59:24,407 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 12:59:44,292 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 12:59:49,955 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8036, 4.4020, 2.5797, 3.9548, 1.1213, 4.0234, 4.1372, 4.2107], + device='cuda:3'), covar=tensor([0.0583, 0.1069, 0.1969, 0.0759, 0.3790, 0.0745, 0.0714, 0.0841], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0349, 0.0407, 0.0302, 0.0367, 0.0333, 0.0327, 0.0364], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 13:00:22,661 INFO [train.py:903] (3/4) Epoch 8, batch 4950, loss[loss=0.3017, simple_loss=0.3585, pruned_loss=0.1225, over 13132.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.321, pruned_loss=0.09212, over 3819884.60 frames. ], batch size: 136, lr: 1.03e-02, grad_scale: 8.0 +2023-04-01 13:00:25,766 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2032, 3.8143, 2.5482, 3.4707, 1.0277, 3.5229, 3.5789, 3.7028], + device='cuda:3'), covar=tensor([0.0691, 0.1027, 0.1952, 0.0728, 0.3917, 0.0871, 0.0753, 0.1029], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0346, 0.0406, 0.0300, 0.0365, 0.0331, 0.0325, 0.0362], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 13:00:35,702 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 6.374e+02 8.178e+02 1.048e+03 2.702e+03, threshold=1.636e+03, percent-clipped=11.0 +2023-04-01 13:00:40,340 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 13:00:53,324 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:00:56,957 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:04,931 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 13:01:17,721 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:26,623 INFO [train.py:903] (3/4) Epoch 8, batch 5000, loss[loss=0.2499, simple_loss=0.3239, pruned_loss=0.08799, over 19764.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3223, pruned_loss=0.09257, over 3826205.65 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:01:29,351 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:01:35,990 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 13:01:42,313 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-01 13:01:47,281 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 13:01:51,287 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7161, 3.1429, 3.2247, 3.1986, 1.2028, 3.0176, 2.7135, 2.9607], + device='cuda:3'), covar=tensor([0.1345, 0.0815, 0.0784, 0.0841, 0.4517, 0.0716, 0.0709, 0.1263], + device='cuda:3'), in_proj_covar=tensor([0.0597, 0.0524, 0.0715, 0.0601, 0.0666, 0.0464, 0.0449, 0.0661], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 13:02:28,532 INFO [train.py:903] (3/4) Epoch 8, batch 5050, loss[loss=0.2546, simple_loss=0.3359, pruned_loss=0.08664, over 19527.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3217, pruned_loss=0.09203, over 3829073.44 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:02:39,034 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 5.648e+02 7.062e+02 8.811e+02 1.795e+03, threshold=1.412e+03, percent-clipped=2.0 +2023-04-01 13:02:51,282 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.78 vs. limit=5.0 +2023-04-01 13:03:05,006 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 13:03:15,919 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-01 13:03:30,516 INFO [train.py:903] (3/4) Epoch 8, batch 5100, loss[loss=0.2976, simple_loss=0.3548, pruned_loss=0.1202, over 19077.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3219, pruned_loss=0.09244, over 3837296.43 frames. ], batch size: 69, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:03:36,923 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 13:03:41,012 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 13:03:45,419 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 13:03:50,847 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 13:03:59,150 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52917.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:04:09,641 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:32,217 INFO [train.py:903] (3/4) Epoch 8, batch 5150, loss[loss=0.2559, simple_loss=0.3221, pruned_loss=0.09488, over 19588.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3217, pruned_loss=0.09252, over 3822706.26 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 4.0 +2023-04-01 13:04:41,050 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:04:45,912 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.391e+02 6.060e+02 7.983e+02 1.041e+03 2.368e+03, threshold=1.597e+03, percent-clipped=6.0 +2023-04-01 13:04:47,183 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:04:59,549 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 13:05:19,599 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:05:27,526 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1808, 1.7850, 1.7624, 2.0287, 1.9493, 1.9434, 1.7123, 1.9859], + device='cuda:3'), covar=tensor([0.0840, 0.1441, 0.1234, 0.0958, 0.1145, 0.0446, 0.1086, 0.0599], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0355, 0.0289, 0.0240, 0.0301, 0.0242, 0.0274, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 13:05:37,043 INFO [train.py:903] (3/4) Epoch 8, batch 5200, loss[loss=0.292, simple_loss=0.3666, pruned_loss=0.1087, over 19531.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3225, pruned_loss=0.09288, over 3817431.47 frames. ], batch size: 56, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:05:51,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 13:05:54,952 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8076, 4.3059, 2.5349, 3.8109, 1.3180, 4.0526, 4.1039, 4.1886], + device='cuda:3'), covar=tensor([0.0590, 0.1092, 0.2063, 0.0779, 0.3543, 0.0798, 0.0732, 0.0963], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0340, 0.0400, 0.0296, 0.0356, 0.0326, 0.0320, 0.0356], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 13:06:21,662 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53032.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:06:36,721 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 13:06:38,343 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53045.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:06:39,057 INFO [train.py:903] (3/4) Epoch 8, batch 5250, loss[loss=0.2754, simple_loss=0.3297, pruned_loss=0.1106, over 19851.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3217, pruned_loss=0.09225, over 3825361.85 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:06:49,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.973e+02 7.081e+02 8.822e+02 3.028e+03, threshold=1.416e+03, percent-clipped=2.0 +2023-04-01 13:06:51,073 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 13:07:08,274 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:13,810 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53074.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:07:39,282 INFO [train.py:903] (3/4) Epoch 8, batch 5300, loss[loss=0.2502, simple_loss=0.3292, pruned_loss=0.08555, over 19106.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.322, pruned_loss=0.09257, over 3810368.28 frames. ], batch size: 69, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:07:57,461 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 13:08:03,088 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:08:15,992 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7650, 1.4517, 1.3820, 1.5917, 1.7300, 1.3867, 1.3953, 1.5459], + device='cuda:3'), covar=tensor([0.0978, 0.1683, 0.1567, 0.1219, 0.1317, 0.0894, 0.1413, 0.0873], + device='cuda:3'), in_proj_covar=tensor([0.0249, 0.0354, 0.0286, 0.0238, 0.0298, 0.0241, 0.0273, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 13:08:41,250 INFO [train.py:903] (3/4) Epoch 8, batch 5350, loss[loss=0.2487, simple_loss=0.3225, pruned_loss=0.08743, over 19625.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.321, pruned_loss=0.09186, over 3823535.58 frames. ], batch size: 57, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:08:52,777 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 6.176e+02 7.478e+02 9.376e+02 1.338e+03, threshold=1.496e+03, percent-clipped=0.0 +2023-04-01 13:09:18,558 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 13:09:44,017 INFO [train.py:903] (3/4) Epoch 8, batch 5400, loss[loss=0.2258, simple_loss=0.3002, pruned_loss=0.07568, over 19627.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3209, pruned_loss=0.09158, over 3827691.66 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:24,915 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:10:47,203 INFO [train.py:903] (3/4) Epoch 8, batch 5450, loss[loss=0.2384, simple_loss=0.3059, pruned_loss=0.08541, over 19419.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3221, pruned_loss=0.09196, over 3839183.88 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:10:57,345 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 5.768e+02 7.150e+02 9.218e+02 2.127e+03, threshold=1.430e+03, percent-clipped=3.0 +2023-04-01 13:11:02,407 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53259.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:11:40,444 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53288.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:11:45,052 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 13:11:48,914 INFO [train.py:903] (3/4) Epoch 8, batch 5500, loss[loss=0.264, simple_loss=0.3136, pruned_loss=0.1072, over 19799.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3207, pruned_loss=0.09161, over 3828451.07 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:11,209 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53313.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:12:16,579 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 13:12:23,266 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:35,605 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53333.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:12:50,184 INFO [train.py:903] (3/4) Epoch 8, batch 5550, loss[loss=0.2301, simple_loss=0.2981, pruned_loss=0.08107, over 19581.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3222, pruned_loss=0.09225, over 3830679.23 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:12:59,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 13:13:03,077 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 6.394e+02 7.821e+02 9.803e+02 2.197e+03, threshold=1.564e+03, percent-clipped=2.0 +2023-04-01 13:13:40,394 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:13:50,361 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 13:13:53,936 INFO [train.py:903] (3/4) Epoch 8, batch 5600, loss[loss=0.2279, simple_loss=0.3101, pruned_loss=0.07285, over 19702.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3207, pruned_loss=0.09165, over 3837611.12 frames. ], batch size: 59, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:14:21,449 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:14:24,159 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9948, 1.9444, 2.1170, 2.7777, 1.8580, 2.5790, 2.4984, 2.0140], + device='cuda:3'), covar=tensor([0.3022, 0.2634, 0.1285, 0.1436, 0.2907, 0.1188, 0.2771, 0.2237], + device='cuda:3'), in_proj_covar=tensor([0.0729, 0.0741, 0.0611, 0.0863, 0.0735, 0.0643, 0.0763, 0.0660], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:14:28,532 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8439, 1.4689, 1.4859, 1.8704, 3.3333, 1.2670, 2.1784, 3.5957], + device='cuda:3'), covar=tensor([0.0374, 0.2322, 0.2453, 0.1332, 0.0587, 0.1940, 0.1202, 0.0278], + device='cuda:3'), in_proj_covar=tensor([0.0326, 0.0325, 0.0336, 0.0301, 0.0328, 0.0317, 0.0306, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 13:14:57,497 INFO [train.py:903] (3/4) Epoch 8, batch 5650, loss[loss=0.1952, simple_loss=0.2714, pruned_loss=0.0595, over 19381.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3205, pruned_loss=0.09109, over 3837255.32 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:15:07,837 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.837e+02 5.961e+02 7.306e+02 9.270e+02 2.985e+03, threshold=1.461e+03, percent-clipped=1.0 +2023-04-01 13:15:45,797 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 13:15:46,280 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:15:58,772 INFO [train.py:903] (3/4) Epoch 8, batch 5700, loss[loss=0.2158, simple_loss=0.2781, pruned_loss=0.07678, over 19748.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3186, pruned_loss=0.09065, over 3833905.16 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:16:15,430 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:44,944 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:16:59,703 INFO [train.py:903] (3/4) Epoch 8, batch 5750, loss[loss=0.339, simple_loss=0.388, pruned_loss=0.145, over 18042.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3187, pruned_loss=0.09022, over 3847509.51 frames. ], batch size: 83, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:17:00,955 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 13:17:10,453 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 13:17:11,664 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 5.699e+02 6.647e+02 8.184e+02 1.829e+03, threshold=1.329e+03, percent-clipped=1.0 +2023-04-01 13:17:15,870 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 13:17:19,443 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6405, 1.4681, 1.4814, 2.0604, 1.5723, 2.0003, 2.0105, 1.7594], + device='cuda:3'), covar=tensor([0.0834, 0.0924, 0.0960, 0.0843, 0.0924, 0.0654, 0.0933, 0.0639], + device='cuda:3'), in_proj_covar=tensor([0.0215, 0.0228, 0.0225, 0.0255, 0.0242, 0.0211, 0.0206, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 13:18:02,116 INFO [train.py:903] (3/4) Epoch 8, batch 5800, loss[loss=0.215, simple_loss=0.2821, pruned_loss=0.07396, over 19789.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3186, pruned_loss=0.09017, over 3845053.93 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:18:12,762 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:19:04,521 INFO [train.py:903] (3/4) Epoch 8, batch 5850, loss[loss=0.2608, simple_loss=0.3302, pruned_loss=0.09565, over 18788.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3179, pruned_loss=0.0894, over 3849993.84 frames. ], batch size: 74, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:19:15,051 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.371e+02 7.746e+02 9.220e+02 2.993e+03, threshold=1.549e+03, percent-clipped=10.0 +2023-04-01 13:19:27,647 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:19:42,003 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:05,079 INFO [train.py:903] (3/4) Epoch 8, batch 5900, loss[loss=0.248, simple_loss=0.3268, pruned_loss=0.08454, over 19463.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3192, pruned_loss=0.09078, over 3831967.07 frames. ], batch size: 64, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:20:09,611 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 13:20:30,262 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 13:20:34,009 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53718.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:20:47,028 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53729.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:21:06,831 INFO [train.py:903] (3/4) Epoch 8, batch 5950, loss[loss=0.1965, simple_loss=0.2728, pruned_loss=0.06013, over 19363.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3195, pruned_loss=0.09118, over 3825200.35 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:21:19,042 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 5.886e+02 7.195e+02 1.025e+03 2.007e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-04-01 13:21:33,094 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 13:21:47,825 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 13:21:50,846 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:00,165 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53789.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:04,758 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53792.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:22:09,975 INFO [train.py:903] (3/4) Epoch 8, batch 6000, loss[loss=0.2177, simple_loss=0.2881, pruned_loss=0.07368, over 19792.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3176, pruned_loss=0.08974, over 3822619.10 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:22:09,975 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 13:22:22,635 INFO [train.py:937] (3/4) Epoch 8, validation: loss=0.1864, simple_loss=0.2865, pruned_loss=0.04314, over 944034.00 frames. +2023-04-01 13:22:22,636 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 13:22:29,222 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1310, 1.2932, 1.7926, 1.2673, 2.8403, 3.8433, 3.5670, 4.0374], + device='cuda:3'), covar=tensor([0.1510, 0.3229, 0.2829, 0.1907, 0.0427, 0.0125, 0.0179, 0.0139], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0289, 0.0317, 0.0246, 0.0208, 0.0140, 0.0205, 0.0175], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 13:22:48,445 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53814.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:24,824 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53844.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:23:26,630 INFO [train.py:903] (3/4) Epoch 8, batch 6050, loss[loss=0.3216, simple_loss=0.3643, pruned_loss=0.1395, over 13366.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3179, pruned_loss=0.08971, over 3823237.17 frames. ], batch size: 136, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:23:39,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.583e+02 7.013e+02 9.917e+02 2.418e+03, threshold=1.403e+03, percent-clipped=8.0 +2023-04-01 13:24:11,899 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3287, 1.4406, 1.9265, 1.6319, 2.5579, 2.1468, 2.7180, 1.1945], + device='cuda:3'), covar=tensor([0.2173, 0.3646, 0.2091, 0.1733, 0.1448, 0.1889, 0.1538, 0.3437], + device='cuda:3'), in_proj_covar=tensor([0.0465, 0.0539, 0.0546, 0.0416, 0.0574, 0.0472, 0.0635, 0.0472], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:24:18,426 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1537, 5.5053, 3.1110, 4.7702, 0.9160, 5.2537, 5.3997, 5.5641], + device='cuda:3'), covar=tensor([0.0400, 0.0742, 0.1635, 0.0629, 0.4144, 0.0636, 0.0706, 0.0751], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0345, 0.0408, 0.0305, 0.0370, 0.0334, 0.0327, 0.0364], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 13:24:30,152 INFO [train.py:903] (3/4) Epoch 8, batch 6100, loss[loss=0.2106, simple_loss=0.2761, pruned_loss=0.07256, over 19731.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3173, pruned_loss=0.08963, over 3830599.98 frames. ], batch size: 45, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:24:48,962 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1674, 1.1979, 1.0849, 0.9337, 0.9664, 1.0465, 0.0710, 0.3364], + device='cuda:3'), covar=tensor([0.0394, 0.0369, 0.0252, 0.0316, 0.0736, 0.0307, 0.0673, 0.0628], + device='cuda:3'), in_proj_covar=tensor([0.0324, 0.0318, 0.0318, 0.0334, 0.0411, 0.0336, 0.0294, 0.0316], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 13:25:07,691 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-01 13:25:22,434 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2205, 2.2564, 2.2046, 3.2341, 2.0977, 3.1890, 2.8419, 2.1028], + device='cuda:3'), covar=tensor([0.3136, 0.2478, 0.1191, 0.1540, 0.3158, 0.1081, 0.2623, 0.2170], + device='cuda:3'), in_proj_covar=tensor([0.0720, 0.0735, 0.0609, 0.0856, 0.0735, 0.0642, 0.0758, 0.0658], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:25:31,558 INFO [train.py:903] (3/4) Epoch 8, batch 6150, loss[loss=0.254, simple_loss=0.3251, pruned_loss=0.09141, over 19300.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.318, pruned_loss=0.08963, over 3826194.99 frames. ], batch size: 66, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:25:42,179 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.923e+02 5.703e+02 7.303e+02 8.849e+02 1.874e+03, threshold=1.461e+03, percent-clipped=4.0 +2023-04-01 13:25:56,473 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 13:26:07,957 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53974.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:26:12,673 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0148, 1.7000, 2.0558, 1.9462, 4.4432, 0.8502, 2.4044, 4.7661], + device='cuda:3'), covar=tensor([0.0315, 0.2485, 0.2355, 0.1709, 0.0719, 0.2829, 0.1308, 0.0211], + device='cuda:3'), in_proj_covar=tensor([0.0326, 0.0327, 0.0337, 0.0307, 0.0332, 0.0322, 0.0315, 0.0327], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 13:26:33,376 INFO [train.py:903] (3/4) Epoch 8, batch 6200, loss[loss=0.2191, simple_loss=0.2999, pruned_loss=0.06916, over 19680.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3186, pruned_loss=0.08981, over 3836141.61 frames. ], batch size: 53, lr: 1.02e-02, grad_scale: 8.0 +2023-04-01 13:26:38,487 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:25,521 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54037.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:37,603 INFO [train.py:903] (3/4) Epoch 8, batch 6250, loss[loss=0.2493, simple_loss=0.3252, pruned_loss=0.08667, over 19650.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3184, pruned_loss=0.08969, over 3831473.77 frames. ], batch size: 53, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:27:41,238 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:27:49,336 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.888e+02 5.723e+02 7.068e+02 9.572e+02 2.133e+03, threshold=1.414e+03, percent-clipped=6.0 +2023-04-01 13:27:58,062 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:04,596 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 13:28:10,774 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:28:40,890 INFO [train.py:903] (3/4) Epoch 8, batch 6300, loss[loss=0.2307, simple_loss=0.2951, pruned_loss=0.0831, over 19473.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3192, pruned_loss=0.09028, over 3827998.46 frames. ], batch size: 49, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:28:45,605 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54100.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:05,554 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-01 13:29:16,191 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:29:41,764 INFO [train.py:903] (3/4) Epoch 8, batch 6350, loss[loss=0.219, simple_loss=0.2927, pruned_loss=0.0727, over 19624.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3207, pruned_loss=0.09097, over 3833282.56 frames. ], batch size: 50, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:29:52,050 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.072e+02 6.055e+02 7.520e+02 8.667e+02 2.456e+03, threshold=1.504e+03, percent-clipped=3.0 +2023-04-01 13:30:03,340 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1643, 1.2413, 1.8944, 1.4043, 2.5485, 2.1356, 2.7422, 1.0494], + device='cuda:3'), covar=tensor([0.2253, 0.3815, 0.2027, 0.1739, 0.1390, 0.1806, 0.1501, 0.3554], + device='cuda:3'), in_proj_covar=tensor([0.0472, 0.0548, 0.0552, 0.0421, 0.0579, 0.0475, 0.0642, 0.0476], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:30:21,619 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1784, 1.1374, 1.4206, 1.1503, 2.4617, 3.1251, 2.9420, 3.3757], + device='cuda:3'), covar=tensor([0.1716, 0.4338, 0.3927, 0.2237, 0.0611, 0.0253, 0.0309, 0.0230], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0287, 0.0317, 0.0247, 0.0209, 0.0140, 0.0206, 0.0175], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 13:30:36,876 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2437, 1.3647, 1.8527, 1.4735, 2.5778, 2.2025, 2.7203, 0.9816], + device='cuda:3'), covar=tensor([0.2050, 0.3520, 0.1952, 0.1637, 0.1365, 0.1705, 0.1385, 0.3487], + device='cuda:3'), in_proj_covar=tensor([0.0469, 0.0546, 0.0552, 0.0421, 0.0578, 0.0475, 0.0640, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:30:43,433 INFO [train.py:903] (3/4) Epoch 8, batch 6400, loss[loss=0.2397, simple_loss=0.3165, pruned_loss=0.08141, over 19628.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3204, pruned_loss=0.09084, over 3821472.12 frames. ], batch size: 57, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:45,992 INFO [train.py:903] (3/4) Epoch 8, batch 6450, loss[loss=0.2251, simple_loss=0.3063, pruned_loss=0.07192, over 19326.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3184, pruned_loss=0.08975, over 3819177.52 frames. ], batch size: 66, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:31:54,381 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-01 13:31:54,410 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.92 vs. limit=5.0 +2023-04-01 13:31:55,091 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7148, 4.1916, 2.5916, 3.8231, 1.1170, 3.9706, 4.0046, 4.2017], + device='cuda:3'), covar=tensor([0.0598, 0.1168, 0.2118, 0.0752, 0.4120, 0.0822, 0.0744, 0.1029], + device='cuda:3'), in_proj_covar=tensor([0.0404, 0.0345, 0.0408, 0.0302, 0.0370, 0.0331, 0.0327, 0.0361], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 13:31:58,337 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 5.571e+02 6.761e+02 8.500e+02 1.702e+03, threshold=1.352e+03, percent-clipped=3.0 +2023-04-01 13:32:30,255 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 13:32:48,752 INFO [train.py:903] (3/4) Epoch 8, batch 6500, loss[loss=0.2308, simple_loss=0.3115, pruned_loss=0.07501, over 19717.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3201, pruned_loss=0.09093, over 3809937.56 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:32:54,456 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 13:33:50,188 INFO [train.py:903] (3/4) Epoch 8, batch 6550, loss[loss=0.2338, simple_loss=0.3046, pruned_loss=0.08153, over 19870.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3208, pruned_loss=0.0916, over 3805462.29 frames. ], batch size: 52, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:34:00,244 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 13:34:00,552 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+02 6.329e+02 8.151e+02 1.084e+03 2.341e+03, threshold=1.630e+03, percent-clipped=12.0 +2023-04-01 13:34:13,780 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-01 13:34:51,146 INFO [train.py:903] (3/4) Epoch 8, batch 6600, loss[loss=0.2059, simple_loss=0.2741, pruned_loss=0.06887, over 19726.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3205, pruned_loss=0.09117, over 3802124.77 frames. ], batch size: 46, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:35:53,846 INFO [train.py:903] (3/4) Epoch 8, batch 6650, loss[loss=0.2191, simple_loss=0.2929, pruned_loss=0.0726, over 19609.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.321, pruned_loss=0.09132, over 3809071.41 frames. ], batch size: 50, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:36:04,916 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.746e+02 5.865e+02 7.808e+02 1.010e+03 1.907e+03, threshold=1.562e+03, percent-clipped=2.0 +2023-04-01 13:36:55,565 INFO [train.py:903] (3/4) Epoch 8, batch 6700, loss[loss=0.2144, simple_loss=0.2855, pruned_loss=0.07164, over 19767.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3194, pruned_loss=0.09022, over 3816991.53 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:37:14,058 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 13:37:52,326 INFO [train.py:903] (3/4) Epoch 8, batch 6750, loss[loss=0.2739, simple_loss=0.3414, pruned_loss=0.1032, over 19505.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3192, pruned_loss=0.08988, over 3824390.47 frames. ], batch size: 64, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:38:03,620 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.992e+02 6.380e+02 7.224e+02 9.353e+02 2.017e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 13:38:50,509 INFO [train.py:903] (3/4) Epoch 8, batch 6800, loss[loss=0.2586, simple_loss=0.3334, pruned_loss=0.09184, over 19546.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3191, pruned_loss=0.08991, over 3818280.63 frames. ], batch size: 56, lr: 1.01e-02, grad_scale: 8.0 +2023-04-01 13:39:34,066 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 13:39:35,158 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 13:39:38,465 INFO [train.py:903] (3/4) Epoch 9, batch 0, loss[loss=0.2179, simple_loss=0.3023, pruned_loss=0.06675, over 19351.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.3023, pruned_loss=0.06675, over 19351.00 frames. ], batch size: 70, lr: 9.56e-03, grad_scale: 8.0 +2023-04-01 13:39:38,465 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 13:39:49,511 INFO [train.py:937] (3/4) Epoch 9, validation: loss=0.1866, simple_loss=0.2872, pruned_loss=0.04294, over 944034.00 frames. +2023-04-01 13:39:49,512 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 13:40:03,819 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 13:40:28,247 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.879e+02 5.500e+02 7.208e+02 8.930e+02 1.459e+03, threshold=1.442e+03, percent-clipped=1.0 +2023-04-01 13:40:51,457 INFO [train.py:903] (3/4) Epoch 9, batch 50, loss[loss=0.2658, simple_loss=0.3314, pruned_loss=0.1001, over 19652.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3225, pruned_loss=0.09217, over 866084.89 frames. ], batch size: 55, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:41:02,967 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54682.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:41:26,440 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 13:41:53,038 INFO [train.py:903] (3/4) Epoch 9, batch 100, loss[loss=0.299, simple_loss=0.3502, pruned_loss=0.1239, over 13181.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3201, pruned_loss=0.09164, over 1515338.42 frames. ], batch size: 136, lr: 9.55e-03, grad_scale: 8.0 +2023-04-01 13:42:05,417 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 13:42:18,629 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3235, 1.3545, 1.5647, 1.5103, 2.3134, 2.0674, 2.3253, 0.9221], + device='cuda:3'), covar=tensor([0.2076, 0.3727, 0.2216, 0.1658, 0.1271, 0.1767, 0.1277, 0.3375], + device='cuda:3'), in_proj_covar=tensor([0.0472, 0.0550, 0.0556, 0.0423, 0.0580, 0.0475, 0.0643, 0.0476], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:42:31,166 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.564e+02 7.190e+02 9.001e+02 2.877e+03, threshold=1.438e+03, percent-clipped=2.0 +2023-04-01 13:42:53,296 INFO [train.py:903] (3/4) Epoch 9, batch 150, loss[loss=0.2749, simple_loss=0.3468, pruned_loss=0.1015, over 19664.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3186, pruned_loss=0.08998, over 2026082.66 frames. ], batch size: 58, lr: 9.54e-03, grad_scale: 16.0 +2023-04-01 13:43:05,260 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3086, 1.4650, 1.7964, 1.5444, 2.6520, 2.2365, 2.7598, 1.0530], + device='cuda:3'), covar=tensor([0.2037, 0.3446, 0.2026, 0.1598, 0.1261, 0.1679, 0.1233, 0.3323], + device='cuda:3'), in_proj_covar=tensor([0.0474, 0.0549, 0.0556, 0.0424, 0.0583, 0.0477, 0.0642, 0.0479], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:43:53,897 INFO [train.py:903] (3/4) Epoch 9, batch 200, loss[loss=0.2354, simple_loss=0.3077, pruned_loss=0.08156, over 19515.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3189, pruned_loss=0.09018, over 2425898.73 frames. ], batch size: 54, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:43:56,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 13:43:57,440 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0279, 5.3346, 3.0235, 4.7528, 1.1185, 5.2389, 5.2675, 5.4763], + device='cuda:3'), covar=tensor([0.0411, 0.0850, 0.1736, 0.0580, 0.3960, 0.0585, 0.0615, 0.0643], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0339, 0.0406, 0.0300, 0.0365, 0.0330, 0.0326, 0.0358], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 13:44:36,458 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.883e+02 7.738e+02 9.204e+02 1.688e+03, threshold=1.548e+03, percent-clipped=2.0 +2023-04-01 13:44:57,178 INFO [train.py:903] (3/4) Epoch 9, batch 250, loss[loss=0.2927, simple_loss=0.3538, pruned_loss=0.1158, over 19056.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3198, pruned_loss=0.09127, over 2723575.79 frames. ], batch size: 69, lr: 9.54e-03, grad_scale: 8.0 +2023-04-01 13:45:06,583 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=54880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:45:39,341 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1565, 3.6554, 2.1370, 1.9374, 3.2217, 1.6868, 1.3878, 2.0973], + device='cuda:3'), covar=tensor([0.1007, 0.0376, 0.0773, 0.0721, 0.0415, 0.0989, 0.0881, 0.0574], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0290, 0.0321, 0.0241, 0.0228, 0.0310, 0.0287, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 13:45:57,840 INFO [train.py:903] (3/4) Epoch 9, batch 300, loss[loss=0.3085, simple_loss=0.3613, pruned_loss=0.1279, over 13085.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3191, pruned_loss=0.09143, over 2963446.27 frames. ], batch size: 135, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:46:39,714 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 5.660e+02 7.032e+02 9.457e+02 2.087e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-04-01 13:47:01,341 INFO [train.py:903] (3/4) Epoch 9, batch 350, loss[loss=0.2666, simple_loss=0.3285, pruned_loss=0.1023, over 18150.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.319, pruned_loss=0.0908, over 3162766.09 frames. ], batch size: 83, lr: 9.53e-03, grad_scale: 8.0 +2023-04-01 13:47:07,268 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 13:48:03,062 INFO [train.py:903] (3/4) Epoch 9, batch 400, loss[loss=0.2248, simple_loss=0.3123, pruned_loss=0.06862, over 19643.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3188, pruned_loss=0.09025, over 3322451.49 frames. ], batch size: 59, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:48:06,566 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55026.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:48:38,448 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-04-01 13:48:44,520 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.195e+02 5.719e+02 7.898e+02 1.018e+03 2.327e+03, threshold=1.580e+03, percent-clipped=4.0 +2023-04-01 13:49:04,315 INFO [train.py:903] (3/4) Epoch 9, batch 450, loss[loss=0.2397, simple_loss=0.302, pruned_loss=0.08868, over 19404.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3176, pruned_loss=0.08922, over 3439373.35 frames. ], batch size: 48, lr: 9.52e-03, grad_scale: 8.0 +2023-04-01 13:49:42,327 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 13:49:43,518 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 13:50:00,657 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6495, 1.7014, 1.6210, 2.6124, 1.6730, 2.3819, 2.0393, 1.3468], + device='cuda:3'), covar=tensor([0.3889, 0.3249, 0.2060, 0.1836, 0.3314, 0.1483, 0.3968, 0.3726], + device='cuda:3'), in_proj_covar=tensor([0.0741, 0.0750, 0.0622, 0.0870, 0.0750, 0.0657, 0.0771, 0.0675], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:50:07,769 INFO [train.py:903] (3/4) Epoch 9, batch 500, loss[loss=0.2445, simple_loss=0.3144, pruned_loss=0.0873, over 19849.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.318, pruned_loss=0.08941, over 3529250.04 frames. ], batch size: 52, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:50:30,917 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55141.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:50:47,935 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.663e+02 6.183e+02 7.261e+02 8.870e+02 1.589e+03, threshold=1.452e+03, percent-clipped=1.0 +2023-04-01 13:51:00,606 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9498, 3.3690, 3.6936, 3.7180, 1.3140, 3.2927, 3.0978, 3.1766], + device='cuda:3'), covar=tensor([0.2209, 0.1590, 0.1244, 0.1283, 0.6580, 0.1339, 0.1136, 0.2319], + device='cuda:3'), in_proj_covar=tensor([0.0617, 0.0545, 0.0733, 0.0613, 0.0667, 0.0475, 0.0462, 0.0671], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 13:51:01,581 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55166.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:51:04,360 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 13:51:10,562 INFO [train.py:903] (3/4) Epoch 9, batch 550, loss[loss=0.2544, simple_loss=0.3231, pruned_loss=0.09287, over 18819.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3186, pruned_loss=0.08967, over 3588112.01 frames. ], batch size: 74, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:14,553 INFO [train.py:903] (3/4) Epoch 9, batch 600, loss[loss=0.2276, simple_loss=0.3024, pruned_loss=0.07645, over 19853.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3187, pruned_loss=0.08991, over 3643681.71 frames. ], batch size: 52, lr: 9.51e-03, grad_scale: 8.0 +2023-04-01 13:52:15,869 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55224.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:21,730 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:52:47,962 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1119, 1.1381, 1.5278, 1.0711, 2.5257, 3.4782, 3.2719, 3.6966], + device='cuda:3'), covar=tensor([0.1528, 0.3408, 0.3211, 0.2150, 0.0529, 0.0162, 0.0233, 0.0171], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0290, 0.0319, 0.0250, 0.0211, 0.0142, 0.0208, 0.0177], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 13:52:55,192 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.838e+02 6.783e+02 8.586e+02 3.812e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 13:52:57,873 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5354, 4.0564, 4.2206, 4.1538, 1.6259, 3.8558, 3.4393, 3.9060], + device='cuda:3'), covar=tensor([0.1151, 0.0645, 0.0482, 0.0547, 0.4203, 0.0596, 0.0601, 0.0926], + device='cuda:3'), in_proj_covar=tensor([0.0619, 0.0543, 0.0732, 0.0613, 0.0668, 0.0476, 0.0462, 0.0671], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 13:52:58,743 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 13:53:13,314 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1741, 1.2445, 1.1209, 0.9499, 1.0384, 1.0533, 0.0461, 0.2835], + device='cuda:3'), covar=tensor([0.0381, 0.0372, 0.0231, 0.0327, 0.0734, 0.0316, 0.0696, 0.0644], + device='cuda:3'), in_proj_covar=tensor([0.0319, 0.0313, 0.0313, 0.0333, 0.0408, 0.0334, 0.0296, 0.0315], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 13:53:16,398 INFO [train.py:903] (3/4) Epoch 9, batch 650, loss[loss=0.2259, simple_loss=0.3036, pruned_loss=0.07408, over 19667.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3186, pruned_loss=0.08953, over 3696042.95 frames. ], batch size: 58, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:19,295 INFO [train.py:903] (3/4) Epoch 9, batch 700, loss[loss=0.2661, simple_loss=0.339, pruned_loss=0.09657, over 19686.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3196, pruned_loss=0.09036, over 3712913.47 frames. ], batch size: 59, lr: 9.50e-03, grad_scale: 4.0 +2023-04-01 13:54:41,861 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55339.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:55:02,468 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.553e+02 6.808e+02 9.255e+02 2.546e+03, threshold=1.362e+03, percent-clipped=4.0 +2023-04-01 13:55:22,969 INFO [train.py:903] (3/4) Epoch 9, batch 750, loss[loss=0.2415, simple_loss=0.3196, pruned_loss=0.08176, over 19522.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3198, pruned_loss=0.09031, over 3733874.16 frames. ], batch size: 54, lr: 9.49e-03, grad_scale: 4.0 +2023-04-01 13:55:53,603 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55397.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 13:56:25,994 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55422.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 13:56:26,680 INFO [train.py:903] (3/4) Epoch 9, batch 800, loss[loss=0.1961, simple_loss=0.274, pruned_loss=0.05911, over 19469.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3201, pruned_loss=0.09064, over 3746006.45 frames. ], batch size: 49, lr: 9.49e-03, grad_scale: 8.0 +2023-04-01 13:56:41,969 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 13:57:07,881 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.631e+02 7.179e+02 9.586e+02 1.610e+03, threshold=1.436e+03, percent-clipped=4.0 +2023-04-01 13:57:28,778 INFO [train.py:903] (3/4) Epoch 9, batch 850, loss[loss=0.2405, simple_loss=0.3083, pruned_loss=0.08639, over 19687.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3182, pruned_loss=0.08947, over 3758322.23 frames. ], batch size: 53, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:58:10,388 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1087, 2.0838, 2.2185, 2.8002, 2.2281, 2.6712, 2.5121, 2.0246], + device='cuda:3'), covar=tensor([0.2364, 0.1888, 0.0954, 0.1088, 0.2030, 0.0830, 0.1880, 0.1675], + device='cuda:3'), in_proj_covar=tensor([0.0743, 0.0756, 0.0627, 0.0872, 0.0751, 0.0658, 0.0774, 0.0677], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 13:58:14,371 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:58:22,708 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 13:58:29,596 INFO [train.py:903] (3/4) Epoch 9, batch 900, loss[loss=0.3169, simple_loss=0.3652, pruned_loss=0.1343, over 13211.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3183, pruned_loss=0.09041, over 3756602.09 frames. ], batch size: 136, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:12,476 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.962e+02 5.974e+02 7.143e+02 8.825e+02 2.413e+03, threshold=1.429e+03, percent-clipped=6.0 +2023-04-01 13:59:32,016 INFO [train.py:903] (3/4) Epoch 9, batch 950, loss[loss=0.2864, simple_loss=0.3357, pruned_loss=0.1186, over 19386.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3191, pruned_loss=0.09076, over 3761058.52 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 8.0 +2023-04-01 13:59:32,192 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 13:59:37,616 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 14:00:01,053 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55595.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:32,390 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:00:35,415 INFO [train.py:903] (3/4) Epoch 9, batch 1000, loss[loss=0.2572, simple_loss=0.3287, pruned_loss=0.09285, over 19405.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3187, pruned_loss=0.08993, over 3782837.64 frames. ], batch size: 70, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:00:38,011 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55625.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:01:18,080 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.465e+02 5.596e+02 6.834e+02 8.838e+02 1.578e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 14:01:29,784 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 14:01:39,291 INFO [train.py:903] (3/4) Epoch 9, batch 1050, loss[loss=0.2344, simple_loss=0.3103, pruned_loss=0.0792, over 19526.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3178, pruned_loss=0.08906, over 3800994.60 frames. ], batch size: 56, lr: 9.47e-03, grad_scale: 8.0 +2023-04-01 14:01:57,386 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:02:10,953 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 14:02:42,620 INFO [train.py:903] (3/4) Epoch 9, batch 1100, loss[loss=0.2437, simple_loss=0.3183, pruned_loss=0.08452, over 18705.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3181, pruned_loss=0.08941, over 3798812.33 frames. ], batch size: 74, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:03:25,905 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 6.007e+02 7.412e+02 9.315e+02 2.515e+03, threshold=1.482e+03, percent-clipped=6.0 +2023-04-01 14:03:28,849 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 14:03:36,831 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55766.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:03:43,631 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2029, 1.7919, 1.4503, 1.2321, 1.6551, 1.1515, 1.2138, 1.6473], + device='cuda:3'), covar=tensor([0.0589, 0.0648, 0.0856, 0.0552, 0.0413, 0.0976, 0.0461, 0.0350], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0290, 0.0319, 0.0237, 0.0228, 0.0313, 0.0285, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 14:03:45,585 INFO [train.py:903] (3/4) Epoch 9, batch 1150, loss[loss=0.2395, simple_loss=0.3182, pruned_loss=0.08039, over 19788.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3186, pruned_loss=0.08988, over 3805116.55 frames. ], batch size: 56, lr: 9.46e-03, grad_scale: 8.0 +2023-04-01 14:03:46,128 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6449, 1.7366, 1.8916, 2.1425, 1.3849, 1.8816, 2.1384, 1.7542], + device='cuda:3'), covar=tensor([0.3068, 0.2366, 0.1194, 0.1359, 0.2658, 0.1188, 0.2872, 0.2226], + device='cuda:3'), in_proj_covar=tensor([0.0744, 0.0751, 0.0623, 0.0871, 0.0748, 0.0657, 0.0767, 0.0675], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 14:04:14,222 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 14:04:27,005 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8202, 2.1775, 2.3563, 2.6740, 2.6434, 2.3448, 2.2957, 3.0550], + device='cuda:3'), covar=tensor([0.0727, 0.1652, 0.1218, 0.0939, 0.1094, 0.0486, 0.1037, 0.0476], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0353, 0.0291, 0.0240, 0.0298, 0.0244, 0.0271, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 14:04:50,173 INFO [train.py:903] (3/4) Epoch 9, batch 1200, loss[loss=0.2949, simple_loss=0.3506, pruned_loss=0.1196, over 14199.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3173, pruned_loss=0.08935, over 3795339.82 frames. ], batch size: 137, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:05:13,340 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3696, 1.2102, 1.3362, 1.5457, 2.9046, 1.0449, 2.1310, 3.2004], + device='cuda:3'), covar=tensor([0.0408, 0.2575, 0.2615, 0.1625, 0.0710, 0.2343, 0.1126, 0.0315], + device='cuda:3'), in_proj_covar=tensor([0.0325, 0.0326, 0.0336, 0.0303, 0.0331, 0.0321, 0.0311, 0.0333], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 14:05:18,980 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 14:05:31,642 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.284e+02 5.926e+02 7.645e+02 1.010e+03 3.329e+03, threshold=1.529e+03, percent-clipped=6.0 +2023-04-01 14:05:44,772 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:05:53,922 INFO [train.py:903] (3/4) Epoch 9, batch 1250, loss[loss=0.241, simple_loss=0.3097, pruned_loss=0.08613, over 19470.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3183, pruned_loss=0.08999, over 3797813.26 frames. ], batch size: 49, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:06:03,217 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55881.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:35,371 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55906.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:06:55,962 INFO [train.py:903] (3/4) Epoch 9, batch 1300, loss[loss=0.2378, simple_loss=0.3151, pruned_loss=0.08024, over 19789.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3182, pruned_loss=0.08986, over 3805218.71 frames. ], batch size: 56, lr: 9.45e-03, grad_scale: 8.0 +2023-04-01 14:07:00,756 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1768, 3.6473, 3.7699, 3.7733, 1.5423, 3.4875, 3.1566, 3.4608], + device='cuda:3'), covar=tensor([0.1290, 0.0964, 0.0646, 0.0598, 0.4425, 0.0704, 0.0630, 0.1086], + device='cuda:3'), in_proj_covar=tensor([0.0620, 0.0544, 0.0738, 0.0618, 0.0675, 0.0481, 0.0466, 0.0678], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 14:07:20,259 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1670, 5.4125, 3.1473, 4.7941, 1.0148, 5.3398, 5.3332, 5.4583], + device='cuda:3'), covar=tensor([0.0415, 0.0824, 0.1650, 0.0612, 0.3916, 0.0637, 0.0634, 0.0865], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0341, 0.0413, 0.0306, 0.0369, 0.0338, 0.0332, 0.0363], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 14:07:24,033 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:39,615 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.793e+02 6.909e+02 8.321e+02 2.022e+03, threshold=1.382e+03, percent-clipped=2.0 +2023-04-01 14:07:54,311 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:07:58,594 INFO [train.py:903] (3/4) Epoch 9, batch 1350, loss[loss=0.2323, simple_loss=0.3121, pruned_loss=0.07627, over 19535.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3191, pruned_loss=0.08999, over 3812010.81 frames. ], batch size: 56, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:08:50,497 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-01 14:09:02,600 INFO [train.py:903] (3/4) Epoch 9, batch 1400, loss[loss=0.2399, simple_loss=0.3216, pruned_loss=0.07913, over 19594.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3183, pruned_loss=0.08956, over 3810600.79 frames. ], batch size: 61, lr: 9.44e-03, grad_scale: 8.0 +2023-04-01 14:09:47,082 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.536e+02 7.314e+02 8.995e+02 2.483e+03, threshold=1.463e+03, percent-clipped=9.0 +2023-04-01 14:10:07,189 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 14:10:08,160 INFO [train.py:903] (3/4) Epoch 9, batch 1450, loss[loss=0.2983, simple_loss=0.3618, pruned_loss=0.1174, over 18043.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3176, pruned_loss=0.08971, over 3803888.96 frames. ], batch size: 83, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:10:34,605 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7916, 3.2495, 3.3095, 3.2897, 1.3173, 3.0807, 2.7522, 3.0370], + device='cuda:3'), covar=tensor([0.1381, 0.0803, 0.0744, 0.0768, 0.4503, 0.0750, 0.0788, 0.1227], + device='cuda:3'), in_proj_covar=tensor([0.0615, 0.0543, 0.0739, 0.0621, 0.0680, 0.0486, 0.0467, 0.0675], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 14:10:55,222 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56110.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:11:11,141 INFO [train.py:903] (3/4) Epoch 9, batch 1500, loss[loss=0.311, simple_loss=0.3611, pruned_loss=0.1305, over 17136.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3164, pruned_loss=0.08872, over 3803494.13 frames. ], batch size: 101, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:11:52,399 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.776e+02 6.008e+02 7.164e+02 9.066e+02 2.093e+03, threshold=1.433e+03, percent-clipped=3.0 +2023-04-01 14:12:01,130 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56163.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:12:12,299 INFO [train.py:903] (3/4) Epoch 9, batch 1550, loss[loss=0.2501, simple_loss=0.318, pruned_loss=0.09108, over 19593.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3163, pruned_loss=0.08895, over 3812780.05 frames. ], batch size: 52, lr: 9.43e-03, grad_scale: 8.0 +2023-04-01 14:12:33,677 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 14:12:37,078 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.99 vs. limit=5.0 +2023-04-01 14:12:59,836 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:15,311 INFO [train.py:903] (3/4) Epoch 9, batch 1600, loss[loss=0.2646, simple_loss=0.3284, pruned_loss=0.1004, over 19649.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3161, pruned_loss=0.08832, over 3814821.17 frames. ], batch size: 60, lr: 9.42e-03, grad_scale: 8.0 +2023-04-01 14:13:18,955 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56225.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:13:41,628 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 14:13:59,883 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.781e+02 6.769e+02 8.617e+02 2.222e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-01 14:14:09,534 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 14:14:19,008 INFO [train.py:903] (3/4) Epoch 9, batch 1650, loss[loss=0.2871, simple_loss=0.345, pruned_loss=0.1146, over 17160.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3159, pruned_loss=0.08802, over 3820964.52 frames. ], batch size: 101, lr: 9.42e-03, grad_scale: 4.0 +2023-04-01 14:14:27,609 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7018, 3.1608, 3.2296, 3.2633, 1.1713, 3.0768, 2.7226, 2.9463], + device='cuda:3'), covar=tensor([0.1451, 0.0956, 0.0803, 0.0746, 0.4669, 0.0741, 0.0789, 0.1288], + device='cuda:3'), in_proj_covar=tensor([0.0609, 0.0536, 0.0732, 0.0611, 0.0676, 0.0478, 0.0456, 0.0670], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 14:15:22,544 INFO [train.py:903] (3/4) Epoch 9, batch 1700, loss[loss=0.2596, simple_loss=0.3333, pruned_loss=0.093, over 19564.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3158, pruned_loss=0.08774, over 3818809.91 frames. ], batch size: 61, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:15:25,221 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:16:02,358 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 14:16:05,919 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 5.619e+02 6.768e+02 8.904e+02 2.101e+03, threshold=1.354e+03, percent-clipped=6.0 +2023-04-01 14:16:24,618 INFO [train.py:903] (3/4) Epoch 9, batch 1750, loss[loss=0.2584, simple_loss=0.3282, pruned_loss=0.09426, over 19476.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3158, pruned_loss=0.08781, over 3833092.24 frames. ], batch size: 64, lr: 9.41e-03, grad_scale: 4.0 +2023-04-01 14:17:26,702 INFO [train.py:903] (3/4) Epoch 9, batch 1800, loss[loss=0.1947, simple_loss=0.2697, pruned_loss=0.05984, over 19751.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3163, pruned_loss=0.08827, over 3830301.14 frames. ], batch size: 46, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:17:29,849 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 14:18:09,954 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.839e+02 7.002e+02 8.564e+02 1.629e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 14:18:13,456 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9333, 5.2721, 2.9864, 4.5874, 1.3666, 5.1498, 5.2328, 5.4641], + device='cuda:3'), covar=tensor([0.0453, 0.0987, 0.1929, 0.0683, 0.3809, 0.0587, 0.0582, 0.0800], + device='cuda:3'), in_proj_covar=tensor([0.0407, 0.0342, 0.0411, 0.0305, 0.0371, 0.0338, 0.0332, 0.0364], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 14:18:25,590 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 14:18:29,984 INFO [train.py:903] (3/4) Epoch 9, batch 1850, loss[loss=0.2149, simple_loss=0.2832, pruned_loss=0.07327, over 19767.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3168, pruned_loss=0.08808, over 3828442.65 frames. ], batch size: 47, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:18:40,532 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:02,314 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 14:19:11,108 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:12,041 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:19:32,991 INFO [train.py:903] (3/4) Epoch 9, batch 1900, loss[loss=0.2386, simple_loss=0.3094, pruned_loss=0.08394, over 19741.00 frames. ], tot_loss[loss=0.248, simple_loss=0.318, pruned_loss=0.08901, over 3833325.43 frames. ], batch size: 46, lr: 9.40e-03, grad_scale: 4.0 +2023-04-01 14:19:48,436 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 14:19:54,886 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 14:19:55,175 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56541.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 14:20:16,634 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.665e+02 6.760e+02 8.403e+02 1.758e+03, threshold=1.352e+03, percent-clipped=2.0 +2023-04-01 14:20:18,933 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 14:20:35,966 INFO [train.py:903] (3/4) Epoch 9, batch 1950, loss[loss=0.2091, simple_loss=0.2818, pruned_loss=0.06817, over 19730.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3187, pruned_loss=0.08919, over 3820859.99 frames. ], batch size: 46, lr: 9.39e-03, grad_scale: 4.0 +2023-04-01 14:20:46,753 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:19,002 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:38,531 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:21:38,885 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-01 14:21:39,347 INFO [train.py:903] (3/4) Epoch 9, batch 2000, loss[loss=0.2454, simple_loss=0.3236, pruned_loss=0.08364, over 19615.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3186, pruned_loss=0.08912, over 3822914.93 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 8.0 +2023-04-01 14:22:22,874 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.506e+02 7.081e+02 9.090e+02 3.144e+03, threshold=1.416e+03, percent-clipped=7.0 +2023-04-01 14:22:36,096 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 14:22:42,524 INFO [train.py:903] (3/4) Epoch 9, batch 2050, loss[loss=0.2343, simple_loss=0.311, pruned_loss=0.07882, over 19662.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3203, pruned_loss=0.09047, over 3832823.33 frames. ], batch size: 60, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:22:56,335 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 14:22:57,534 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 14:23:19,228 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 14:23:43,572 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3968, 1.2536, 1.2525, 1.8501, 1.4243, 1.6991, 1.8418, 1.5274], + device='cuda:3'), covar=tensor([0.0827, 0.1060, 0.1077, 0.0657, 0.0842, 0.0729, 0.0722, 0.0697], + device='cuda:3'), in_proj_covar=tensor([0.0216, 0.0232, 0.0228, 0.0256, 0.0243, 0.0216, 0.0205, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 14:23:44,278 INFO [train.py:903] (3/4) Epoch 9, batch 2100, loss[loss=0.2815, simple_loss=0.3401, pruned_loss=0.1115, over 19657.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3189, pruned_loss=0.08974, over 3833023.93 frames. ], batch size: 60, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:24:12,065 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 14:24:29,329 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.187e+02 5.573e+02 6.906e+02 8.990e+02 1.566e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-01 14:24:35,334 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 14:24:48,310 INFO [train.py:903] (3/4) Epoch 9, batch 2150, loss[loss=0.2302, simple_loss=0.2881, pruned_loss=0.08616, over 19308.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3176, pruned_loss=0.08916, over 3832440.14 frames. ], batch size: 44, lr: 9.38e-03, grad_scale: 8.0 +2023-04-01 14:25:48,770 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56821.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:25:50,905 INFO [train.py:903] (3/4) Epoch 9, batch 2200, loss[loss=0.2603, simple_loss=0.3215, pruned_loss=0.09957, over 19319.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.317, pruned_loss=0.08888, over 3834490.71 frames. ], batch size: 66, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:26:36,144 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 5.854e+02 7.300e+02 9.690e+02 2.298e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 14:26:57,253 INFO [train.py:903] (3/4) Epoch 9, batch 2250, loss[loss=0.3095, simple_loss=0.3679, pruned_loss=0.1256, over 19344.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3171, pruned_loss=0.08854, over 3828455.56 frames. ], batch size: 66, lr: 9.37e-03, grad_scale: 8.0 +2023-04-01 14:27:03,720 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56878.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:27:11,762 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56885.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:27:34,970 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56903.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:00,639 INFO [train.py:903] (3/4) Epoch 9, batch 2300, loss[loss=0.2828, simple_loss=0.3466, pruned_loss=0.1095, over 19445.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3168, pruned_loss=0.08865, over 3824328.43 frames. ], batch size: 62, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:28:05,853 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:28:13,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 14:28:46,771 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.809e+02 7.207e+02 9.233e+02 1.673e+03, threshold=1.441e+03, percent-clipped=4.0 +2023-04-01 14:28:50,682 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3087, 1.2030, 1.8708, 1.5112, 2.9727, 4.4129, 4.3800, 4.9148], + device='cuda:3'), covar=tensor([0.1598, 0.3465, 0.3005, 0.1938, 0.0489, 0.0176, 0.0157, 0.0104], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0289, 0.0319, 0.0247, 0.0208, 0.0144, 0.0204, 0.0178], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 14:29:04,521 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3795, 2.1541, 1.9038, 1.7777, 1.6322, 1.8422, 0.4434, 1.1812], + device='cuda:3'), covar=tensor([0.0316, 0.0385, 0.0301, 0.0438, 0.0678, 0.0504, 0.0692, 0.0607], + device='cuda:3'), in_proj_covar=tensor([0.0324, 0.0323, 0.0319, 0.0337, 0.0416, 0.0337, 0.0296, 0.0319], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 14:29:05,081 INFO [train.py:903] (3/4) Epoch 9, batch 2350, loss[loss=0.3056, simple_loss=0.3555, pruned_loss=0.1278, over 19757.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3163, pruned_loss=0.08808, over 3832702.57 frames. ], batch size: 51, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:29:40,023 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57000.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:29:46,434 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 14:29:53,592 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57011.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:29:55,738 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57013.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:30:01,354 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 14:30:07,034 INFO [train.py:903] (3/4) Epoch 9, batch 2400, loss[loss=0.2313, simple_loss=0.2984, pruned_loss=0.08206, over 19608.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3185, pruned_loss=0.0897, over 3828050.97 frames. ], batch size: 50, lr: 9.36e-03, grad_scale: 8.0 +2023-04-01 14:30:51,622 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.434e+02 5.803e+02 7.402e+02 8.988e+02 1.700e+03, threshold=1.480e+03, percent-clipped=2.0 +2023-04-01 14:31:11,536 INFO [train.py:903] (3/4) Epoch 9, batch 2450, loss[loss=0.2008, simple_loss=0.2743, pruned_loss=0.06363, over 19381.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3179, pruned_loss=0.0888, over 3833083.15 frames. ], batch size: 47, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:32:15,754 INFO [train.py:903] (3/4) Epoch 9, batch 2500, loss[loss=0.2391, simple_loss=0.3182, pruned_loss=0.07998, over 19671.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3172, pruned_loss=0.08787, over 3835971.93 frames. ], batch size: 59, lr: 9.35e-03, grad_scale: 8.0 +2023-04-01 14:33:00,912 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.325e+02 6.954e+02 9.918e+02 1.981e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 14:33:09,583 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57165.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:33:19,749 INFO [train.py:903] (3/4) Epoch 9, batch 2550, loss[loss=0.3383, simple_loss=0.3767, pruned_loss=0.15, over 13700.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3178, pruned_loss=0.08831, over 3825290.15 frames. ], batch size: 136, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:33:45,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7394, 3.1968, 3.2318, 3.2511, 1.1832, 3.1042, 2.6817, 2.9494], + device='cuda:3'), covar=tensor([0.1417, 0.0903, 0.0797, 0.0730, 0.4541, 0.0702, 0.0808, 0.1315], + device='cuda:3'), in_proj_covar=tensor([0.0625, 0.0544, 0.0742, 0.0618, 0.0687, 0.0488, 0.0468, 0.0689], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 14:33:59,358 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57204.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:13,772 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57215.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:34:15,581 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 14:34:22,488 INFO [train.py:903] (3/4) Epoch 9, batch 2600, loss[loss=0.2521, simple_loss=0.3264, pruned_loss=0.08892, over 19496.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3164, pruned_loss=0.08755, over 3833247.76 frames. ], batch size: 64, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:05,293 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57256.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:35:07,156 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.816e+02 6.932e+02 7.774e+02 1.592e+03, threshold=1.386e+03, percent-clipped=3.0 +2023-04-01 14:35:07,957 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.37 vs. limit=5.0 +2023-04-01 14:35:12,155 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57262.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:22,436 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57270.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:25,691 INFO [train.py:903] (3/4) Epoch 9, batch 2650, loss[loss=0.2755, simple_loss=0.3531, pruned_loss=0.09889, over 19383.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3166, pruned_loss=0.08755, over 3835955.82 frames. ], batch size: 70, lr: 9.34e-03, grad_scale: 8.0 +2023-04-01 14:35:36,476 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57280.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:35:37,682 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57281.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 14:35:45,913 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 14:35:51,999 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5935, 4.1585, 2.5741, 3.6554, 0.9988, 3.8127, 3.9519, 3.9914], + device='cuda:3'), covar=tensor([0.0625, 0.1003, 0.2007, 0.0735, 0.3939, 0.0804, 0.0695, 0.1009], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0343, 0.0411, 0.0301, 0.0367, 0.0337, 0.0327, 0.0364], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 14:36:31,184 INFO [train.py:903] (3/4) Epoch 9, batch 2700, loss[loss=0.2741, simple_loss=0.3422, pruned_loss=0.103, over 19698.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3173, pruned_loss=0.08805, over 3823193.26 frames. ], batch size: 59, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:12,087 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57355.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:14,489 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57357.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:37:15,518 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.587e+02 5.592e+02 7.209e+02 8.892e+02 3.755e+03, threshold=1.442e+03, percent-clipped=7.0 +2023-04-01 14:37:33,717 INFO [train.py:903] (3/4) Epoch 9, batch 2750, loss[loss=0.189, simple_loss=0.267, pruned_loss=0.05553, over 19476.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3169, pruned_loss=0.08794, over 3825885.85 frames. ], batch size: 49, lr: 9.33e-03, grad_scale: 8.0 +2023-04-01 14:37:49,327 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:30,635 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57416.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:38:38,519 INFO [train.py:903] (3/4) Epoch 9, batch 2800, loss[loss=0.2268, simple_loss=0.3135, pruned_loss=0.07003, over 19617.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3183, pruned_loss=0.08873, over 3811140.83 frames. ], batch size: 57, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:23,166 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.525e+02 6.599e+02 8.446e+02 1.316e+03, threshold=1.320e+03, percent-clipped=0.0 +2023-04-01 14:39:38,408 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57470.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:41,763 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:42,549 INFO [train.py:903] (3/4) Epoch 9, batch 2850, loss[loss=0.2216, simple_loss=0.3007, pruned_loss=0.07123, over 19668.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3177, pruned_loss=0.08811, over 3820588.88 frames. ], batch size: 53, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:39:51,277 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57480.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:39:55,840 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0623, 1.1603, 1.3386, 1.4365, 2.6206, 0.9912, 1.8661, 2.8203], + device='cuda:3'), covar=tensor([0.0483, 0.2632, 0.2704, 0.1566, 0.0798, 0.2376, 0.1191, 0.0390], + device='cuda:3'), in_proj_covar=tensor([0.0333, 0.0325, 0.0338, 0.0303, 0.0335, 0.0323, 0.0309, 0.0331], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 14:40:33,706 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57513.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:40:41,911 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 14:40:46,555 INFO [train.py:903] (3/4) Epoch 9, batch 2900, loss[loss=0.2543, simple_loss=0.3297, pruned_loss=0.08947, over 18751.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3181, pruned_loss=0.08876, over 3809908.39 frames. ], batch size: 74, lr: 9.32e-03, grad_scale: 8.0 +2023-04-01 14:41:03,516 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57536.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:16,958 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57548.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:30,782 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.787e+02 5.829e+02 7.475e+02 8.920e+02 2.516e+03, threshold=1.495e+03, percent-clipped=6.0 +2023-04-01 14:41:32,227 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57559.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:35,772 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57561.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:41:49,414 INFO [train.py:903] (3/4) Epoch 9, batch 2950, loss[loss=0.2018, simple_loss=0.2677, pruned_loss=0.06792, over 19391.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3164, pruned_loss=0.08788, over 3818155.53 frames. ], batch size: 47, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:32,126 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:42:53,537 INFO [train.py:903] (3/4) Epoch 9, batch 3000, loss[loss=0.2657, simple_loss=0.3421, pruned_loss=0.09467, over 19759.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3161, pruned_loss=0.08738, over 3825000.47 frames. ], batch size: 63, lr: 9.31e-03, grad_scale: 8.0 +2023-04-01 14:42:53,537 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 14:43:06,205 INFO [train.py:937] (3/4) Epoch 9, validation: loss=0.1831, simple_loss=0.2838, pruned_loss=0.04122, over 944034.00 frames. +2023-04-01 14:43:06,206 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 14:43:08,504 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 14:43:29,259 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:43:50,440 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.061e+02 7.914e+02 9.800e+02 2.087e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-04-01 14:43:56,590 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:00,184 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:44:08,758 INFO [train.py:903] (3/4) Epoch 9, batch 3050, loss[loss=0.2974, simple_loss=0.3465, pruned_loss=0.1241, over 19662.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.317, pruned_loss=0.0882, over 3821280.03 frames. ], batch size: 53, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:44:10,184 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:09,158 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:11,162 INFO [train.py:903] (3/4) Epoch 9, batch 3100, loss[loss=0.2592, simple_loss=0.3282, pruned_loss=0.09514, over 19284.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3156, pruned_loss=0.08778, over 3834048.23 frames. ], batch size: 66, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:45:16,055 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:19,266 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57728.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:26,180 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2022, 1.2803, 1.6729, 1.3643, 2.5493, 1.9532, 2.5542, 1.0082], + device='cuda:3'), covar=tensor([0.2161, 0.3693, 0.2097, 0.1682, 0.1188, 0.1878, 0.1303, 0.3428], + device='cuda:3'), in_proj_covar=tensor([0.0467, 0.0542, 0.0552, 0.0418, 0.0574, 0.0469, 0.0633, 0.0470], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 14:45:47,443 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:47,547 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57751.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:49,760 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:45:54,924 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.860e+02 5.906e+02 7.408e+02 1.029e+03 2.368e+03, threshold=1.482e+03, percent-clipped=3.0 +2023-04-01 14:45:57,418 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:46:14,147 INFO [train.py:903] (3/4) Epoch 9, batch 3150, loss[loss=0.2339, simple_loss=0.3092, pruned_loss=0.07934, over 19537.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3157, pruned_loss=0.08776, over 3831422.77 frames. ], batch size: 54, lr: 9.30e-03, grad_scale: 8.0 +2023-04-01 14:46:42,121 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 14:47:14,704 INFO [train.py:903] (3/4) Epoch 9, batch 3200, loss[loss=0.2434, simple_loss=0.3232, pruned_loss=0.08175, over 19658.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3155, pruned_loss=0.08777, over 3827674.56 frames. ], batch size: 58, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:47:16,010 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57824.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:55,932 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:47:56,960 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.791e+02 7.100e+02 9.261e+02 4.038e+03, threshold=1.420e+03, percent-clipped=7.0 +2023-04-01 14:48:09,922 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 14:48:14,917 INFO [train.py:903] (3/4) Epoch 9, batch 3250, loss[loss=0.2103, simple_loss=0.2815, pruned_loss=0.06956, over 19735.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3151, pruned_loss=0.08766, over 3828956.99 frames. ], batch size: 46, lr: 9.29e-03, grad_scale: 8.0 +2023-04-01 14:48:18,532 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:12,207 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57919.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:16,424 INFO [train.py:903] (3/4) Epoch 9, batch 3300, loss[loss=0.1721, simple_loss=0.2455, pruned_loss=0.04937, over 19741.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3153, pruned_loss=0.08757, over 3827198.42 frames. ], batch size: 46, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:49:23,984 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 14:49:26,382 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57930.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:36,424 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57939.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:43,142 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:56,596 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57955.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:49:59,470 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.470e+02 6.783e+02 8.234e+02 1.579e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 14:50:17,152 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57972.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:17,932 INFO [train.py:903] (3/4) Epoch 9, batch 3350, loss[loss=0.2347, simple_loss=0.3042, pruned_loss=0.08263, over 19622.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3146, pruned_loss=0.08721, over 3838824.11 frames. ], batch size: 50, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:50:22,849 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57977.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:50:54,297 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58002.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:51:20,066 INFO [train.py:903] (3/4) Epoch 9, batch 3400, loss[loss=0.2928, simple_loss=0.3515, pruned_loss=0.117, over 13437.00 frames. ], tot_loss[loss=0.245, simple_loss=0.315, pruned_loss=0.0875, over 3817777.19 frames. ], batch size: 136, lr: 9.28e-03, grad_scale: 8.0 +2023-04-01 14:51:35,099 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-01 14:52:02,956 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.309e+02 6.592e+02 8.930e+02 1.711e+03, threshold=1.318e+03, percent-clipped=4.0 +2023-04-01 14:52:20,120 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7521, 1.3961, 1.3885, 2.3078, 1.8364, 2.1466, 2.2022, 1.8355], + device='cuda:3'), covar=tensor([0.0825, 0.1049, 0.1109, 0.0806, 0.0877, 0.0705, 0.0889, 0.0683], + device='cuda:3'), in_proj_covar=tensor([0.0217, 0.0232, 0.0229, 0.0256, 0.0243, 0.0216, 0.0204, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 14:52:20,806 INFO [train.py:903] (3/4) Epoch 9, batch 3450, loss[loss=0.2451, simple_loss=0.3002, pruned_loss=0.095, over 19738.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3149, pruned_loss=0.08762, over 3815596.13 frames. ], batch size: 46, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:52:25,128 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 14:52:48,554 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58095.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:53:22,372 INFO [train.py:903] (3/4) Epoch 9, batch 3500, loss[loss=0.2765, simple_loss=0.3404, pruned_loss=0.1063, over 19671.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3149, pruned_loss=0.08707, over 3830729.28 frames. ], batch size: 55, lr: 9.27e-03, grad_scale: 8.0 +2023-04-01 14:53:25,163 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6944, 4.1484, 4.3699, 4.3559, 1.4133, 4.0042, 3.5749, 4.0116], + device='cuda:3'), covar=tensor([0.1225, 0.0787, 0.0560, 0.0525, 0.5327, 0.0622, 0.0595, 0.1043], + device='cuda:3'), in_proj_covar=tensor([0.0624, 0.0550, 0.0750, 0.0620, 0.0686, 0.0491, 0.0467, 0.0685], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 14:53:33,947 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58131.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:53:36,482 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 14:54:03,483 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58156.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:05,471 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.015e+02 7.407e+02 9.414e+02 2.837e+03, threshold=1.481e+03, percent-clipped=3.0 +2023-04-01 14:54:06,988 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58159.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:08,258 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3277, 1.3787, 1.7473, 1.5359, 2.5329, 2.0744, 2.5793, 1.0010], + device='cuda:3'), covar=tensor([0.1993, 0.3333, 0.1972, 0.1562, 0.1279, 0.1757, 0.1423, 0.3304], + device='cuda:3'), in_proj_covar=tensor([0.0468, 0.0548, 0.0558, 0.0422, 0.0580, 0.0473, 0.0638, 0.0474], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 14:54:10,345 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:54:24,270 INFO [train.py:903] (3/4) Epoch 9, batch 3550, loss[loss=0.2096, simple_loss=0.2901, pruned_loss=0.06457, over 19748.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.315, pruned_loss=0.08738, over 3822340.09 frames. ], batch size: 51, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:54:37,431 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1616, 1.2810, 1.1114, 0.9437, 0.9915, 1.0041, 0.0665, 0.2713], + device='cuda:3'), covar=tensor([0.0564, 0.0562, 0.0349, 0.0426, 0.1062, 0.0467, 0.0898, 0.0945], + device='cuda:3'), in_proj_covar=tensor([0.0318, 0.0322, 0.0319, 0.0337, 0.0409, 0.0334, 0.0299, 0.0316], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 14:54:50,516 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:09,229 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:20,399 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:55:24,265 INFO [train.py:903] (3/4) Epoch 9, batch 3600, loss[loss=0.2571, simple_loss=0.3337, pruned_loss=0.09022, over 18433.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3149, pruned_loss=0.08742, over 3811541.97 frames. ], batch size: 83, lr: 9.26e-03, grad_scale: 8.0 +2023-04-01 14:55:30,394 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58228.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:56:01,032 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58253.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 14:56:06,162 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.487e+02 6.500e+02 7.997e+02 2.924e+03, threshold=1.300e+03, percent-clipped=2.0 +2023-04-01 14:56:23,769 INFO [train.py:903] (3/4) Epoch 9, batch 3650, loss[loss=0.2661, simple_loss=0.3384, pruned_loss=0.09692, over 19617.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3145, pruned_loss=0.08722, over 3823021.60 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 16.0 +2023-04-01 14:57:24,455 INFO [train.py:903] (3/4) Epoch 9, batch 3700, loss[loss=0.1981, simple_loss=0.2699, pruned_loss=0.06321, over 19744.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3142, pruned_loss=0.08683, over 3836945.50 frames. ], batch size: 47, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:57:59,836 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 14:58:07,747 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 6.060e+02 7.736e+02 9.843e+02 2.060e+03, threshold=1.547e+03, percent-clipped=9.0 +2023-04-01 14:58:23,949 INFO [train.py:903] (3/4) Epoch 9, batch 3750, loss[loss=0.2798, simple_loss=0.3456, pruned_loss=0.107, over 19636.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3148, pruned_loss=0.08721, over 3836347.02 frames. ], batch size: 61, lr: 9.25e-03, grad_scale: 8.0 +2023-04-01 14:59:24,634 INFO [train.py:903] (3/4) Epoch 9, batch 3800, loss[loss=0.2289, simple_loss=0.3147, pruned_loss=0.07161, over 19331.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3143, pruned_loss=0.08686, over 3826500.67 frames. ], batch size: 66, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 14:59:54,363 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 15:00:08,585 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 5.230e+02 6.601e+02 8.580e+02 1.875e+03, threshold=1.320e+03, percent-clipped=3.0 +2023-04-01 15:00:11,112 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4257, 1.4102, 1.7816, 1.5665, 2.5911, 2.1689, 2.6847, 1.2873], + device='cuda:3'), covar=tensor([0.1982, 0.3515, 0.2044, 0.1605, 0.1207, 0.1733, 0.1228, 0.3235], + device='cuda:3'), in_proj_covar=tensor([0.0475, 0.0554, 0.0568, 0.0426, 0.0584, 0.0480, 0.0643, 0.0477], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 15:00:17,578 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58466.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:00:24,957 INFO [train.py:903] (3/4) Epoch 9, batch 3850, loss[loss=0.2326, simple_loss=0.3007, pruned_loss=0.08226, over 16882.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3143, pruned_loss=0.08683, over 3826574.89 frames. ], batch size: 37, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:00:46,698 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:02,734 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:06,100 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:01:17,420 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2925, 1.3596, 1.8739, 1.3018, 2.6539, 3.4570, 3.2485, 3.6402], + device='cuda:3'), covar=tensor([0.1383, 0.3047, 0.2644, 0.1925, 0.0509, 0.0172, 0.0200, 0.0182], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0286, 0.0317, 0.0247, 0.0209, 0.0144, 0.0204, 0.0182], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 15:01:25,307 INFO [train.py:903] (3/4) Epoch 9, batch 3900, loss[loss=0.2374, simple_loss=0.3075, pruned_loss=0.08362, over 19601.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3141, pruned_loss=0.08677, over 3814166.01 frames. ], batch size: 50, lr: 9.24e-03, grad_scale: 8.0 +2023-04-01 15:02:08,184 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+02 6.109e+02 7.308e+02 8.933e+02 2.200e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 15:02:26,007 INFO [train.py:903] (3/4) Epoch 9, batch 3950, loss[loss=0.2427, simple_loss=0.3163, pruned_loss=0.0846, over 19673.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3136, pruned_loss=0.0869, over 3809638.13 frames. ], batch size: 59, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:02:28,075 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 15:02:51,967 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8929, 1.5886, 1.4999, 1.9629, 1.7375, 1.5939, 1.6439, 1.7962], + device='cuda:3'), covar=tensor([0.0864, 0.1362, 0.1383, 0.0820, 0.1077, 0.0505, 0.1002, 0.0604], + device='cuda:3'), in_proj_covar=tensor([0.0248, 0.0351, 0.0290, 0.0239, 0.0298, 0.0242, 0.0270, 0.0232], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:03:21,986 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58618.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:25,477 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:03:28,120 INFO [train.py:903] (3/4) Epoch 9, batch 4000, loss[loss=0.2721, simple_loss=0.3343, pruned_loss=0.1049, over 19483.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3149, pruned_loss=0.08728, over 3811593.61 frames. ], batch size: 64, lr: 9.23e-03, grad_scale: 8.0 +2023-04-01 15:04:10,730 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.392e+02 5.642e+02 6.845e+02 8.578e+02 2.087e+03, threshold=1.369e+03, percent-clipped=3.0 +2023-04-01 15:04:10,786 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 15:04:27,015 INFO [train.py:903] (3/4) Epoch 9, batch 4050, loss[loss=0.2273, simple_loss=0.2987, pruned_loss=0.07793, over 19533.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3159, pruned_loss=0.08767, over 3822839.51 frames. ], batch size: 56, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:04:29,190 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-01 15:04:59,250 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58699.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:05:28,159 INFO [train.py:903] (3/4) Epoch 9, batch 4100, loss[loss=0.2722, simple_loss=0.3462, pruned_loss=0.09911, over 19769.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3156, pruned_loss=0.08723, over 3815945.64 frames. ], batch size: 56, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:03,208 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 15:06:11,181 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 5.572e+02 6.953e+02 8.904e+02 1.888e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-04-01 15:06:28,248 INFO [train.py:903] (3/4) Epoch 9, batch 4150, loss[loss=0.1853, simple_loss=0.2611, pruned_loss=0.05478, over 15282.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3144, pruned_loss=0.08648, over 3817153.73 frames. ], batch size: 33, lr: 9.22e-03, grad_scale: 8.0 +2023-04-01 15:06:46,514 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-01 15:06:54,411 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58793.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:07:30,859 INFO [train.py:903] (3/4) Epoch 9, batch 4200, loss[loss=0.2382, simple_loss=0.3125, pruned_loss=0.08196, over 19622.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3143, pruned_loss=0.08672, over 3804099.43 frames. ], batch size: 57, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:07:34,199 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 15:07:49,314 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4563, 2.2945, 1.9435, 1.8677, 1.8006, 1.9034, 0.5633, 1.2340], + device='cuda:3'), covar=tensor([0.0337, 0.0378, 0.0306, 0.0438, 0.0670, 0.0546, 0.0745, 0.0632], + device='cuda:3'), in_proj_covar=tensor([0.0321, 0.0322, 0.0321, 0.0336, 0.0412, 0.0336, 0.0299, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 15:08:14,670 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.798e+02 5.991e+02 7.229e+02 9.145e+02 1.621e+03, threshold=1.446e+03, percent-clipped=3.0 +2023-04-01 15:08:32,532 INFO [train.py:903] (3/4) Epoch 9, batch 4250, loss[loss=0.242, simple_loss=0.3178, pruned_loss=0.08308, over 19665.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.3134, pruned_loss=0.08624, over 3803987.89 frames. ], batch size: 55, lr: 9.21e-03, grad_scale: 8.0 +2023-04-01 15:08:34,189 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:37,530 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:08:48,664 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 15:08:59,999 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 15:09:03,851 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:08,272 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:09:32,781 INFO [train.py:903] (3/4) Epoch 9, batch 4300, loss[loss=0.223, simple_loss=0.2938, pruned_loss=0.07608, over 19412.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3146, pruned_loss=0.08702, over 3814048.19 frames. ], batch size: 48, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:09:34,153 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58924.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:10:17,293 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.360e+02 5.584e+02 7.321e+02 9.114e+02 2.155e+03, threshold=1.464e+03, percent-clipped=5.0 +2023-04-01 15:10:17,710 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0432, 2.0305, 1.7602, 1.6077, 1.4592, 1.7048, 0.5010, 1.1283], + device='cuda:3'), covar=tensor([0.0313, 0.0368, 0.0254, 0.0387, 0.0781, 0.0432, 0.0718, 0.0605], + device='cuda:3'), in_proj_covar=tensor([0.0321, 0.0322, 0.0321, 0.0335, 0.0414, 0.0338, 0.0299, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 15:10:26,449 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 15:10:33,821 INFO [train.py:903] (3/4) Epoch 9, batch 4350, loss[loss=0.2778, simple_loss=0.3448, pruned_loss=0.1054, over 19617.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3165, pruned_loss=0.08778, over 3818935.40 frames. ], batch size: 61, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:10:43,767 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 15:11:10,136 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7410, 4.2563, 2.6335, 3.8395, 1.1337, 4.0427, 3.9849, 4.1673], + device='cuda:3'), covar=tensor([0.0613, 0.1032, 0.2124, 0.0757, 0.3943, 0.0783, 0.0777, 0.1044], + device='cuda:3'), in_proj_covar=tensor([0.0410, 0.0350, 0.0415, 0.0311, 0.0369, 0.0344, 0.0335, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 15:11:34,342 INFO [train.py:903] (3/4) Epoch 9, batch 4400, loss[loss=0.2321, simple_loss=0.3088, pruned_loss=0.07769, over 19857.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3166, pruned_loss=0.08816, over 3803172.94 frames. ], batch size: 52, lr: 9.20e-03, grad_scale: 8.0 +2023-04-01 15:11:58,695 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 15:11:58,800 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:12:08,636 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 15:12:18,356 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.626e+02 6.682e+02 1.014e+03 2.125e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-01 15:12:35,838 INFO [train.py:903] (3/4) Epoch 9, batch 4450, loss[loss=0.2443, simple_loss=0.3102, pruned_loss=0.08918, over 19849.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3182, pruned_loss=0.08893, over 3804498.22 frames. ], batch size: 52, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:05,749 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 15:13:36,621 INFO [train.py:903] (3/4) Epoch 9, batch 4500, loss[loss=0.2377, simple_loss=0.3154, pruned_loss=0.08002, over 19650.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3175, pruned_loss=0.08829, over 3804715.96 frames. ], batch size: 58, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:13:54,300 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59137.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:17,431 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 15:14:20,650 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59158.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:14:21,456 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.665e+02 5.638e+02 7.209e+02 9.049e+02 2.430e+03, threshold=1.442e+03, percent-clipped=5.0 +2023-04-01 15:14:38,161 INFO [train.py:903] (3/4) Epoch 9, batch 4550, loss[loss=0.2587, simple_loss=0.3376, pruned_loss=0.08996, over 19490.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3173, pruned_loss=0.08783, over 3804188.39 frames. ], batch size: 64, lr: 9.19e-03, grad_scale: 8.0 +2023-04-01 15:14:46,062 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 15:14:49,767 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59182.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:15:11,262 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 15:15:39,256 INFO [train.py:903] (3/4) Epoch 9, batch 4600, loss[loss=0.2132, simple_loss=0.2895, pruned_loss=0.06843, over 19487.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.316, pruned_loss=0.08692, over 3802651.93 frames. ], batch size: 49, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:01,384 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 15:16:15,387 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:24,167 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 5.555e+02 6.855e+02 8.371e+02 1.742e+03, threshold=1.371e+03, percent-clipped=2.0 +2023-04-01 15:16:33,268 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8917, 1.1621, 1.4540, 0.5954, 2.1205, 2.4466, 2.1163, 2.5837], + device='cuda:3'), covar=tensor([0.1417, 0.3257, 0.2918, 0.2238, 0.0472, 0.0218, 0.0333, 0.0262], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0290, 0.0321, 0.0248, 0.0214, 0.0147, 0.0206, 0.0185], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 15:16:35,112 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59268.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:16:40,777 INFO [train.py:903] (3/4) Epoch 9, batch 4650, loss[loss=0.2666, simple_loss=0.3305, pruned_loss=0.1014, over 19608.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3159, pruned_loss=0.08709, over 3812462.96 frames. ], batch size: 57, lr: 9.18e-03, grad_scale: 8.0 +2023-04-01 15:16:56,380 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 15:17:09,148 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 15:17:41,932 INFO [train.py:903] (3/4) Epoch 9, batch 4700, loss[loss=0.276, simple_loss=0.3437, pruned_loss=0.1042, over 19603.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3164, pruned_loss=0.08725, over 3817754.96 frames. ], batch size: 61, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:03,982 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 15:18:13,129 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7316, 1.2262, 1.3014, 2.1931, 1.7502, 1.9230, 2.0715, 1.6523], + device='cuda:3'), covar=tensor([0.0891, 0.1253, 0.1208, 0.0911, 0.0893, 0.0878, 0.0867, 0.0812], + device='cuda:3'), in_proj_covar=tensor([0.0216, 0.0230, 0.0228, 0.0257, 0.0240, 0.0213, 0.0202, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 15:18:25,865 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.586e+02 6.716e+02 8.168e+02 1.499e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-01 15:18:32,960 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3104, 1.1776, 1.4113, 1.5306, 2.8367, 0.9894, 2.1380, 3.0508], + device='cuda:3'), covar=tensor([0.0486, 0.2711, 0.2602, 0.1627, 0.0820, 0.2443, 0.1155, 0.0370], + device='cuda:3'), in_proj_covar=tensor([0.0338, 0.0329, 0.0338, 0.0311, 0.0343, 0.0326, 0.0315, 0.0338], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:18:41,884 INFO [train.py:903] (3/4) Epoch 9, batch 4750, loss[loss=0.2509, simple_loss=0.325, pruned_loss=0.08841, over 19520.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3167, pruned_loss=0.08715, over 3820414.40 frames. ], batch size: 56, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:18:54,772 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59383.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:32,478 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:19:43,257 INFO [train.py:903] (3/4) Epoch 9, batch 4800, loss[loss=0.2531, simple_loss=0.3287, pruned_loss=0.08879, over 19446.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3161, pruned_loss=0.08726, over 3819989.88 frames. ], batch size: 64, lr: 9.17e-03, grad_scale: 8.0 +2023-04-01 15:19:58,761 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0499, 3.4554, 1.9446, 2.1312, 2.9903, 1.5711, 1.3956, 1.9472], + device='cuda:3'), covar=tensor([0.1050, 0.0367, 0.0917, 0.0593, 0.0444, 0.1001, 0.0818, 0.0649], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0293, 0.0319, 0.0242, 0.0230, 0.0315, 0.0287, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:20:03,388 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59439.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:20:27,561 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.762e+02 6.893e+02 8.818e+02 1.836e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-01 15:20:43,944 INFO [train.py:903] (3/4) Epoch 9, batch 4850, loss[loss=0.2208, simple_loss=0.303, pruned_loss=0.0693, over 19537.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3167, pruned_loss=0.08768, over 3836946.98 frames. ], batch size: 54, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:20:45,436 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5955, 1.3475, 1.2824, 2.0714, 1.6346, 1.9034, 1.9221, 1.6464], + device='cuda:3'), covar=tensor([0.0830, 0.0979, 0.1110, 0.0773, 0.0843, 0.0661, 0.0837, 0.0659], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0229, 0.0225, 0.0254, 0.0237, 0.0212, 0.0199, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 15:21:08,912 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 15:21:27,104 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59508.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:28,999 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 15:21:35,283 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 15:21:36,464 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 15:21:42,268 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:45,374 INFO [train.py:903] (3/4) Epoch 9, batch 4900, loss[loss=0.2675, simple_loss=0.3374, pruned_loss=0.09879, over 19771.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3164, pruned_loss=0.0874, over 3827576.33 frames. ], batch size: 54, lr: 9.16e-03, grad_scale: 8.0 +2023-04-01 15:21:46,570 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 15:21:49,055 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59526.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:21:56,449 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-01 15:21:56,973 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59533.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:22:05,288 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 15:22:29,456 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 5.274e+02 6.528e+02 8.023e+02 2.606e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-01 15:22:45,478 INFO [train.py:903] (3/4) Epoch 9, batch 4950, loss[loss=0.3285, simple_loss=0.3716, pruned_loss=0.1427, over 13668.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3171, pruned_loss=0.08801, over 3821125.19 frames. ], batch size: 136, lr: 9.15e-03, grad_scale: 8.0 +2023-04-01 15:23:01,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 15:23:24,576 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 15:23:44,261 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9491, 1.3699, 1.0524, 0.9699, 1.1730, 0.9608, 0.9099, 1.2243], + device='cuda:3'), covar=tensor([0.0456, 0.0694, 0.1018, 0.0563, 0.0444, 0.1063, 0.0567, 0.0400], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0291, 0.0318, 0.0241, 0.0230, 0.0314, 0.0286, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:23:46,152 INFO [train.py:903] (3/4) Epoch 9, batch 5000, loss[loss=0.2091, simple_loss=0.2828, pruned_loss=0.06764, over 19685.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3161, pruned_loss=0.08766, over 3818483.06 frames. ], batch size: 53, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:23:53,579 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 15:24:04,769 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 15:24:06,363 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:08,533 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59641.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:30,504 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.818e+02 6.890e+02 9.185e+02 2.943e+03, threshold=1.378e+03, percent-clipped=3.0 +2023-04-01 15:24:35,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59664.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:24:46,096 INFO [train.py:903] (3/4) Epoch 9, batch 5050, loss[loss=0.2074, simple_loss=0.2736, pruned_loss=0.07057, over 18182.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3151, pruned_loss=0.08696, over 3813656.98 frames. ], batch size: 40, lr: 9.15e-03, grad_scale: 4.0 +2023-04-01 15:25:21,704 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 15:25:47,328 INFO [train.py:903] (3/4) Epoch 9, batch 5100, loss[loss=0.2622, simple_loss=0.3284, pruned_loss=0.09803, over 19596.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3165, pruned_loss=0.08788, over 3804835.66 frames. ], batch size: 61, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:25:56,484 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 15:25:59,757 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 15:26:01,393 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9881, 1.4771, 1.1861, 0.9639, 1.3226, 0.9521, 0.9317, 1.3413], + device='cuda:3'), covar=tensor([0.0485, 0.0436, 0.0653, 0.0478, 0.0278, 0.0764, 0.0399, 0.0267], + device='cuda:3'), in_proj_covar=tensor([0.0289, 0.0294, 0.0321, 0.0242, 0.0231, 0.0317, 0.0288, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:26:05,153 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 15:26:33,072 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.640e+02 5.962e+02 7.316e+02 9.170e+02 1.645e+03, threshold=1.463e+03, percent-clipped=5.0 +2023-04-01 15:26:47,506 INFO [train.py:903] (3/4) Epoch 9, batch 5150, loss[loss=0.2864, simple_loss=0.3463, pruned_loss=0.1133, over 19589.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3161, pruned_loss=0.08775, over 3805336.80 frames. ], batch size: 61, lr: 9.14e-03, grad_scale: 4.0 +2023-04-01 15:26:58,279 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 15:27:32,234 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 15:27:49,839 INFO [train.py:903] (3/4) Epoch 9, batch 5200, loss[loss=0.2262, simple_loss=0.2952, pruned_loss=0.07861, over 17317.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3159, pruned_loss=0.08781, over 3800040.59 frames. ], batch size: 38, lr: 9.14e-03, grad_scale: 8.0 +2023-04-01 15:27:56,720 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9061, 2.0636, 2.0699, 2.0035, 4.4011, 0.9586, 2.3962, 4.5167], + device='cuda:3'), covar=tensor([0.0327, 0.2183, 0.2164, 0.1490, 0.0640, 0.2607, 0.1277, 0.0217], + device='cuda:3'), in_proj_covar=tensor([0.0338, 0.0328, 0.0338, 0.0310, 0.0338, 0.0324, 0.0316, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:27:59,732 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 15:28:34,608 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.759e+02 5.545e+02 6.726e+02 8.723e+02 1.623e+03, threshold=1.345e+03, percent-clipped=2.0 +2023-04-01 15:28:39,342 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59864.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:41,613 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 15:28:41,851 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59866.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:28:50,360 INFO [train.py:903] (3/4) Epoch 9, batch 5250, loss[loss=0.2607, simple_loss=0.3308, pruned_loss=0.09528, over 18367.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3153, pruned_loss=0.08759, over 3794856.99 frames. ], batch size: 84, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:29:19,332 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:31,306 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:39,003 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8338, 1.7784, 1.8729, 1.6778, 4.2710, 0.8399, 2.4620, 4.4800], + device='cuda:3'), covar=tensor([0.0309, 0.2248, 0.2379, 0.1625, 0.0669, 0.2617, 0.1198, 0.0232], + device='cuda:3'), in_proj_covar=tensor([0.0335, 0.0327, 0.0338, 0.0310, 0.0336, 0.0323, 0.0314, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:29:50,053 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59922.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:29:50,796 INFO [train.py:903] (3/4) Epoch 9, batch 5300, loss[loss=0.2693, simple_loss=0.3407, pruned_loss=0.09894, over 18275.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3147, pruned_loss=0.087, over 3798652.13 frames. ], batch size: 83, lr: 9.13e-03, grad_scale: 8.0 +2023-04-01 15:30:04,495 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 15:30:09,128 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7549, 4.2994, 2.7685, 3.7853, 1.1207, 4.0308, 4.0616, 4.2228], + device='cuda:3'), covar=tensor([0.0603, 0.1209, 0.1831, 0.0727, 0.3850, 0.0667, 0.0704, 0.0999], + device='cuda:3'), in_proj_covar=tensor([0.0410, 0.0347, 0.0412, 0.0307, 0.0369, 0.0341, 0.0332, 0.0371], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 15:30:36,750 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.525e+02 7.204e+02 8.995e+02 2.228e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-01 15:30:39,259 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7227, 2.2083, 2.3784, 2.8079, 2.3515, 2.3764, 2.2905, 2.7876], + device='cuda:3'), covar=tensor([0.0747, 0.1611, 0.1114, 0.0853, 0.1185, 0.0414, 0.0944, 0.0503], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0349, 0.0289, 0.0237, 0.0295, 0.0240, 0.0275, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:30:50,971 INFO [train.py:903] (3/4) Epoch 9, batch 5350, loss[loss=0.212, simple_loss=0.2782, pruned_loss=0.07296, over 19113.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3141, pruned_loss=0.08669, over 3803958.83 frames. ], batch size: 42, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:30:59,192 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:31:03,381 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0345, 1.1358, 1.4103, 1.5462, 2.6232, 1.0775, 1.9889, 2.7655], + device='cuda:3'), covar=tensor([0.0534, 0.2699, 0.2583, 0.1498, 0.0771, 0.2184, 0.1115, 0.0415], + device='cuda:3'), in_proj_covar=tensor([0.0334, 0.0326, 0.0336, 0.0309, 0.0334, 0.0321, 0.0315, 0.0333], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:31:24,014 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 15:31:52,157 INFO [train.py:903] (3/4) Epoch 9, batch 5400, loss[loss=0.315, simple_loss=0.363, pruned_loss=0.1335, over 13428.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3147, pruned_loss=0.08732, over 3800219.07 frames. ], batch size: 136, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:32:35,001 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60058.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:36,997 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.978e+02 5.587e+02 7.112e+02 9.365e+02 1.948e+03, threshold=1.422e+03, percent-clipped=3.0 +2023-04-01 15:32:50,005 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60070.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:32:53,196 INFO [train.py:903] (3/4) Epoch 9, batch 5450, loss[loss=0.2335, simple_loss=0.3094, pruned_loss=0.07883, over 19580.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3145, pruned_loss=0.08711, over 3795595.98 frames. ], batch size: 52, lr: 9.12e-03, grad_scale: 8.0 +2023-04-01 15:33:06,639 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60083.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:21,789 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60096.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:33:34,846 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6594, 1.4574, 1.3726, 2.0457, 1.6969, 2.1077, 2.0927, 1.8558], + device='cuda:3'), covar=tensor([0.0775, 0.0953, 0.1089, 0.0887, 0.0887, 0.0646, 0.0760, 0.0622], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0229, 0.0228, 0.0253, 0.0241, 0.0213, 0.0199, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 15:33:55,434 INFO [train.py:903] (3/4) Epoch 9, batch 5500, loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05973, over 19730.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3163, pruned_loss=0.08772, over 3788786.66 frames. ], batch size: 46, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:34:17,037 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 15:34:40,128 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.485e+02 6.858e+02 8.862e+02 1.983e+03, threshold=1.372e+03, percent-clipped=4.0 +2023-04-01 15:34:46,364 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 15:34:56,042 INFO [train.py:903] (3/4) Epoch 9, batch 5550, loss[loss=0.2211, simple_loss=0.3002, pruned_loss=0.07101, over 19851.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3171, pruned_loss=0.08847, over 3791051.31 frames. ], batch size: 52, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:35:01,728 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 15:35:28,633 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8690, 1.8431, 1.8456, 1.8221, 4.3609, 0.9092, 2.3236, 4.5296], + device='cuda:3'), covar=tensor([0.0360, 0.2296, 0.2400, 0.1586, 0.0651, 0.2638, 0.1331, 0.0228], + device='cuda:3'), in_proj_covar=tensor([0.0336, 0.0327, 0.0336, 0.0309, 0.0336, 0.0322, 0.0314, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:35:42,135 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60210.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:48,914 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60216.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:35:49,879 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 15:35:57,518 INFO [train.py:903] (3/4) Epoch 9, batch 5600, loss[loss=0.2086, simple_loss=0.2825, pruned_loss=0.06737, over 14633.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3157, pruned_loss=0.08775, over 3797079.40 frames. ], batch size: 32, lr: 9.11e-03, grad_scale: 8.0 +2023-04-01 15:36:13,020 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60235.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:28,798 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 15:36:31,694 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60251.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:41,625 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.287e+02 5.903e+02 7.092e+02 9.595e+02 1.671e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 15:36:42,028 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60260.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:36:58,108 INFO [train.py:903] (3/4) Epoch 9, batch 5650, loss[loss=0.2622, simple_loss=0.3332, pruned_loss=0.09558, over 19085.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3166, pruned_loss=0.08814, over 3800930.78 frames. ], batch size: 69, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:37:45,065 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 15:38:00,339 INFO [train.py:903] (3/4) Epoch 9, batch 5700, loss[loss=0.2393, simple_loss=0.316, pruned_loss=0.08135, over 19669.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3167, pruned_loss=0.08813, over 3804760.16 frames. ], batch size: 60, lr: 9.10e-03, grad_scale: 8.0 +2023-04-01 15:38:02,961 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:45,030 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.631e+02 6.670e+02 7.834e+02 2.342e+03, threshold=1.334e+03, percent-clipped=2.0 +2023-04-01 15:38:52,992 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60366.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:38:59,662 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 15:39:00,756 INFO [train.py:903] (3/4) Epoch 9, batch 5750, loss[loss=0.2091, simple_loss=0.2788, pruned_loss=0.06967, over 19793.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3153, pruned_loss=0.08721, over 3813482.47 frames. ], batch size: 48, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:39:07,415 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 15:39:12,820 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 15:39:36,979 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60402.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:39:50,842 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60414.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:00,906 INFO [train.py:903] (3/4) Epoch 9, batch 5800, loss[loss=0.205, simple_loss=0.2735, pruned_loss=0.06824, over 19766.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3148, pruned_loss=0.0869, over 3818899.18 frames. ], batch size: 47, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:40:06,307 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60427.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:12,306 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 15:40:22,428 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:40:45,425 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.333e+02 6.303e+02 7.771e+02 9.979e+02 2.257e+03, threshold=1.554e+03, percent-clipped=12.0 +2023-04-01 15:41:01,368 INFO [train.py:903] (3/4) Epoch 9, batch 5850, loss[loss=0.2327, simple_loss=0.3097, pruned_loss=0.07786, over 19782.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.315, pruned_loss=0.08704, over 3826535.53 frames. ], batch size: 56, lr: 9.09e-03, grad_scale: 8.0 +2023-04-01 15:41:56,054 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60517.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:02,429 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 15:42:03,584 INFO [train.py:903] (3/4) Epoch 9, batch 5900, loss[loss=0.2149, simple_loss=0.2974, pruned_loss=0.06622, over 19539.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3147, pruned_loss=0.08671, over 3804852.22 frames. ], batch size: 54, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:42:10,690 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60529.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:22,838 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 15:42:25,393 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60542.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:42,841 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60555.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:42:46,125 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3236, 0.8752, 1.0097, 2.2426, 1.3880, 1.2295, 1.7751, 1.2499], + device='cuda:3'), covar=tensor([0.1291, 0.2095, 0.1603, 0.0968, 0.1363, 0.1649, 0.1304, 0.1261], + device='cuda:3'), in_proj_covar=tensor([0.0218, 0.0232, 0.0234, 0.0259, 0.0247, 0.0218, 0.0204, 0.0210], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 15:42:47,839 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.460e+02 7.799e+02 9.723e+02 2.713e+03, threshold=1.560e+03, percent-clipped=1.0 +2023-04-01 15:42:48,019 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:03,513 INFO [train.py:903] (3/4) Epoch 9, batch 5950, loss[loss=0.2836, simple_loss=0.342, pruned_loss=0.1126, over 18405.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3157, pruned_loss=0.08772, over 3805114.56 frames. ], batch size: 84, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:43:10,774 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4639, 2.2950, 1.7593, 1.3138, 2.1494, 1.1803, 1.2248, 1.9574], + device='cuda:3'), covar=tensor([0.0751, 0.0510, 0.0791, 0.0638, 0.0375, 0.1041, 0.0666, 0.0363], + device='cuda:3'), in_proj_covar=tensor([0.0290, 0.0298, 0.0326, 0.0245, 0.0233, 0.0324, 0.0292, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:43:13,154 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60581.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:43:20,764 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3404, 1.4634, 2.0082, 1.8065, 3.0181, 4.8227, 4.5913, 5.1202], + device='cuda:3'), covar=tensor([0.1513, 0.3081, 0.2721, 0.1672, 0.0438, 0.0110, 0.0135, 0.0070], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0288, 0.0317, 0.0245, 0.0209, 0.0145, 0.0202, 0.0183], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 15:43:45,256 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:03,653 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:44:04,412 INFO [train.py:903] (3/4) Epoch 9, batch 6000, loss[loss=0.2809, simple_loss=0.3493, pruned_loss=0.1063, over 19731.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3163, pruned_loss=0.08809, over 3810197.81 frames. ], batch size: 63, lr: 9.08e-03, grad_scale: 8.0 +2023-04-01 15:44:04,412 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 15:44:16,868 INFO [train.py:937] (3/4) Epoch 9, validation: loss=0.1828, simple_loss=0.2835, pruned_loss=0.04105, over 944034.00 frames. +2023-04-01 15:44:16,869 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 15:44:47,016 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:45:02,309 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.785e+02 7.329e+02 9.590e+02 1.552e+03, threshold=1.466e+03, percent-clipped=0.0 +2023-04-01 15:45:17,221 INFO [train.py:903] (3/4) Epoch 9, batch 6050, loss[loss=0.2396, simple_loss=0.31, pruned_loss=0.08462, over 19590.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3148, pruned_loss=0.08674, over 3818582.03 frames. ], batch size: 52, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:45:20,651 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:45:41,565 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-01 15:46:18,269 INFO [train.py:903] (3/4) Epoch 9, batch 6100, loss[loss=0.2761, simple_loss=0.3464, pruned_loss=0.1029, over 19689.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3142, pruned_loss=0.08595, over 3829532.43 frames. ], batch size: 59, lr: 9.07e-03, grad_scale: 8.0 +2023-04-01 15:46:34,606 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60736.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:03,006 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.339e+02 6.367e+02 8.531e+02 1.806e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-01 15:47:18,986 INFO [train.py:903] (3/4) Epoch 9, batch 6150, loss[loss=0.2212, simple_loss=0.289, pruned_loss=0.07666, over 19812.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3138, pruned_loss=0.0863, over 3828019.05 frames. ], batch size: 49, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:47:19,397 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60773.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:33,287 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60785.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:44,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 15:47:48,639 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:47:48,670 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60798.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:03,522 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:04,585 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60811.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:18,465 INFO [train.py:903] (3/4) Epoch 9, batch 6200, loss[loss=0.2605, simple_loss=0.3262, pruned_loss=0.09735, over 19542.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3142, pruned_loss=0.08665, over 3816282.40 frames. ], batch size: 56, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:48:19,773 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:48:34,270 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60836.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:49:03,643 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.706e+02 5.965e+02 7.614e+02 9.330e+02 2.107e+03, threshold=1.523e+03, percent-clipped=6.0 +2023-04-01 15:49:19,598 INFO [train.py:903] (3/4) Epoch 9, batch 6250, loss[loss=0.2712, simple_loss=0.3406, pruned_loss=0.1009, over 19603.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3147, pruned_loss=0.08657, over 3823123.04 frames. ], batch size: 61, lr: 9.06e-03, grad_scale: 8.0 +2023-04-01 15:49:49,664 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 15:50:20,302 INFO [train.py:903] (3/4) Epoch 9, batch 6300, loss[loss=0.2437, simple_loss=0.3235, pruned_loss=0.08196, over 19748.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3151, pruned_loss=0.08738, over 3794219.71 frames. ], batch size: 63, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:50:31,290 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:01,601 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60956.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:51:05,845 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.456e+02 6.999e+02 8.896e+02 1.665e+03, threshold=1.400e+03, percent-clipped=1.0 +2023-04-01 15:51:21,558 INFO [train.py:903] (3/4) Epoch 9, batch 6350, loss[loss=0.2485, simple_loss=0.3309, pruned_loss=0.08309, over 19294.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3154, pruned_loss=0.08739, over 3795087.61 frames. ], batch size: 66, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:51:59,711 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 15:52:08,288 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9474, 2.0225, 2.1162, 2.7454, 1.8769, 2.5804, 2.5881, 2.1405], + device='cuda:3'), covar=tensor([0.3273, 0.2774, 0.1337, 0.1557, 0.3167, 0.1273, 0.2903, 0.2328], + device='cuda:3'), in_proj_covar=tensor([0.0755, 0.0767, 0.0629, 0.0873, 0.0754, 0.0674, 0.0783, 0.0690], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 15:52:22,395 INFO [train.py:903] (3/4) Epoch 9, batch 6400, loss[loss=0.2388, simple_loss=0.3126, pruned_loss=0.0825, over 19451.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3149, pruned_loss=0.08724, over 3801226.76 frames. ], batch size: 64, lr: 9.05e-03, grad_scale: 8.0 +2023-04-01 15:52:25,007 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3042, 2.2921, 2.3771, 3.2815, 2.1631, 3.2505, 2.9309, 2.3710], + device='cuda:3'), covar=tensor([0.3471, 0.2948, 0.1368, 0.1776, 0.3703, 0.1318, 0.2979, 0.2360], + device='cuda:3'), in_proj_covar=tensor([0.0754, 0.0766, 0.0628, 0.0872, 0.0752, 0.0673, 0.0782, 0.0688], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 15:53:07,168 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 6.196e+02 7.384e+02 8.672e+02 1.804e+03, threshold=1.477e+03, percent-clipped=4.0 +2023-04-01 15:53:22,487 INFO [train.py:903] (3/4) Epoch 9, batch 6450, loss[loss=0.288, simple_loss=0.3478, pruned_loss=0.1141, over 18706.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3154, pruned_loss=0.08739, over 3800220.43 frames. ], batch size: 74, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:53:30,426 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:53:31,426 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61080.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:53:46,184 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 15:54:05,090 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 15:54:22,077 INFO [train.py:903] (3/4) Epoch 9, batch 6500, loss[loss=0.229, simple_loss=0.2989, pruned_loss=0.07958, over 19848.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3164, pruned_loss=0.08807, over 3814004.36 frames. ], batch size: 52, lr: 9.04e-03, grad_scale: 8.0 +2023-04-01 15:54:27,554 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 15:54:44,277 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1730, 2.0977, 1.7204, 1.5555, 1.5457, 1.6249, 0.2950, 0.9693], + device='cuda:3'), covar=tensor([0.0317, 0.0366, 0.0293, 0.0499, 0.0788, 0.0505, 0.0803, 0.0656], + device='cuda:3'), in_proj_covar=tensor([0.0320, 0.0316, 0.0318, 0.0334, 0.0411, 0.0334, 0.0295, 0.0310], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 15:55:06,066 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 6.706e+02 8.302e+02 1.012e+03 2.679e+03, threshold=1.660e+03, percent-clipped=7.0 +2023-04-01 15:55:21,994 INFO [train.py:903] (3/4) Epoch 9, batch 6550, loss[loss=0.2314, simple_loss=0.3025, pruned_loss=0.08016, over 19604.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3154, pruned_loss=0.08714, over 3832625.48 frames. ], batch size: 50, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:55:45,494 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2909, 3.7810, 3.8487, 3.8712, 1.4603, 3.6410, 3.1900, 3.5425], + device='cuda:3'), covar=tensor([0.1317, 0.0781, 0.0649, 0.0627, 0.5047, 0.0678, 0.0673, 0.1129], + device='cuda:3'), in_proj_covar=tensor([0.0624, 0.0558, 0.0746, 0.0627, 0.0689, 0.0496, 0.0459, 0.0687], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 15:55:50,205 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61195.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 15:56:24,271 INFO [train.py:903] (3/4) Epoch 9, batch 6600, loss[loss=0.1934, simple_loss=0.2667, pruned_loss=0.06, over 19785.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3149, pruned_loss=0.08646, over 3831973.45 frames. ], batch size: 48, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:56:57,860 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5406, 1.3992, 1.3791, 1.7523, 1.4150, 1.7657, 1.7770, 1.6047], + device='cuda:3'), covar=tensor([0.0800, 0.0974, 0.1030, 0.0682, 0.0838, 0.0741, 0.0783, 0.0709], + device='cuda:3'), in_proj_covar=tensor([0.0216, 0.0230, 0.0231, 0.0257, 0.0246, 0.0217, 0.0202, 0.0210], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 15:57:09,254 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 5.654e+02 6.784e+02 8.392e+02 1.741e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 15:57:24,975 INFO [train.py:903] (3/4) Epoch 9, batch 6650, loss[loss=0.2269, simple_loss=0.2987, pruned_loss=0.07756, over 19750.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3147, pruned_loss=0.08648, over 3838898.76 frames. ], batch size: 48, lr: 9.03e-03, grad_scale: 8.0 +2023-04-01 15:57:50,974 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2646, 1.3850, 1.8871, 1.5806, 2.6158, 2.3487, 2.8486, 1.1128], + device='cuda:3'), covar=tensor([0.2083, 0.3524, 0.1982, 0.1565, 0.1321, 0.1667, 0.1312, 0.3402], + device='cuda:3'), in_proj_covar=tensor([0.0476, 0.0554, 0.0570, 0.0427, 0.0586, 0.0479, 0.0639, 0.0479], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 15:58:06,963 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9136, 1.6050, 1.4627, 2.0984, 1.8689, 1.6311, 1.5143, 1.7758], + device='cuda:3'), covar=tensor([0.0917, 0.1581, 0.1366, 0.0816, 0.1104, 0.0537, 0.1190, 0.0680], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0354, 0.0293, 0.0239, 0.0296, 0.0244, 0.0275, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:58:11,521 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0871, 1.6581, 1.5855, 2.0911, 1.8744, 1.7828, 1.5770, 1.8984], + device='cuda:3'), covar=tensor([0.0821, 0.1479, 0.1365, 0.0859, 0.1040, 0.0483, 0.1121, 0.0614], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0354, 0.0292, 0.0239, 0.0296, 0.0244, 0.0275, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 15:58:25,585 INFO [train.py:903] (3/4) Epoch 9, batch 6700, loss[loss=0.3094, simple_loss=0.3601, pruned_loss=0.1294, over 19616.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3156, pruned_loss=0.08707, over 3839877.02 frames. ], batch size: 57, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 15:59:08,506 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.683e+02 7.501e+02 9.618e+02 2.603e+03, threshold=1.500e+03, percent-clipped=7.0 +2023-04-01 15:59:23,017 INFO [train.py:903] (3/4) Epoch 9, batch 6750, loss[loss=0.2082, simple_loss=0.2917, pruned_loss=0.06235, over 19758.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3147, pruned_loss=0.08682, over 3841418.06 frames. ], batch size: 54, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 15:59:44,572 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8464, 1.9636, 2.0552, 2.7154, 1.8560, 2.6382, 2.4936, 1.9924], + device='cuda:3'), covar=tensor([0.3333, 0.2625, 0.1278, 0.1631, 0.3180, 0.1236, 0.2858, 0.2328], + device='cuda:3'), in_proj_covar=tensor([0.0762, 0.0774, 0.0632, 0.0878, 0.0760, 0.0676, 0.0784, 0.0690], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 15:59:47,297 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 16:00:18,968 INFO [train.py:903] (3/4) Epoch 9, batch 6800, loss[loss=0.2131, simple_loss=0.2966, pruned_loss=0.0648, over 19766.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3144, pruned_loss=0.08641, over 3849120.43 frames. ], batch size: 54, lr: 9.02e-03, grad_scale: 8.0 +2023-04-01 16:00:19,093 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61423.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:03,176 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 16:01:04,245 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 16:01:06,621 INFO [train.py:903] (3/4) Epoch 10, batch 0, loss[loss=0.2698, simple_loss=0.3386, pruned_loss=0.1005, over 19599.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3386, pruned_loss=0.1005, over 19599.00 frames. ], batch size: 61, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:01:06,621 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 16:01:17,503 INFO [train.py:937] (3/4) Epoch 10, validation: loss=0.1825, simple_loss=0.2836, pruned_loss=0.04072, over 944034.00 frames. +2023-04-01 16:01:17,504 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 16:01:17,992 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61451.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:01:21,304 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3740, 2.2503, 2.3590, 3.3039, 2.1378, 3.3326, 2.9202, 2.2290], + device='cuda:3'), covar=tensor([0.3500, 0.3022, 0.1434, 0.1723, 0.3667, 0.1279, 0.2919, 0.2533], + device='cuda:3'), in_proj_covar=tensor([0.0759, 0.0770, 0.0630, 0.0873, 0.0757, 0.0673, 0.0781, 0.0688], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 16:01:27,607 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.689e+02 6.760e+02 8.116e+02 1.440e+03, threshold=1.352e+03, percent-clipped=0.0 +2023-04-01 16:01:29,695 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 16:01:48,088 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:02:17,430 INFO [train.py:903] (3/4) Epoch 10, batch 50, loss[loss=0.2356, simple_loss=0.3092, pruned_loss=0.08105, over 19721.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3135, pruned_loss=0.08455, over 857879.49 frames. ], batch size: 63, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:02:50,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 16:02:56,286 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9232, 3.4118, 2.0216, 2.1550, 3.0368, 1.7119, 1.3131, 1.8622], + device='cuda:3'), covar=tensor([0.1107, 0.0566, 0.0923, 0.0646, 0.0422, 0.1078, 0.0926, 0.0738], + device='cuda:3'), in_proj_covar=tensor([0.0290, 0.0300, 0.0324, 0.0245, 0.0234, 0.0326, 0.0289, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:03:03,254 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61538.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:03:18,485 INFO [train.py:903] (3/4) Epoch 10, batch 100, loss[loss=0.2231, simple_loss=0.3073, pruned_loss=0.06939, over 19672.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3157, pruned_loss=0.08681, over 1516446.05 frames. ], batch size: 58, lr: 8.57e-03, grad_scale: 8.0 +2023-04-01 16:03:24,171 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 16:03:29,311 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.995e+02 6.266e+02 7.755e+02 9.384e+02 2.029e+03, threshold=1.551e+03, percent-clipped=6.0 +2023-04-01 16:04:19,612 INFO [train.py:903] (3/4) Epoch 10, batch 150, loss[loss=0.201, simple_loss=0.2699, pruned_loss=0.06604, over 19797.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3139, pruned_loss=0.08617, over 2020961.37 frames. ], batch size: 48, lr: 8.56e-03, grad_scale: 16.0 +2023-04-01 16:05:12,386 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 16:05:20,102 INFO [train.py:903] (3/4) Epoch 10, batch 200, loss[loss=0.2379, simple_loss=0.3123, pruned_loss=0.08177, over 19582.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3145, pruned_loss=0.08608, over 2431353.49 frames. ], batch size: 52, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:05:32,341 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.325e+02 6.934e+02 9.117e+02 1.602e+03, threshold=1.387e+03, percent-clipped=3.0 +2023-04-01 16:05:34,707 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6744, 2.2888, 2.1005, 2.7570, 2.6422, 2.4025, 2.2524, 2.5053], + device='cuda:3'), covar=tensor([0.0754, 0.1533, 0.1223, 0.0878, 0.1060, 0.0396, 0.0932, 0.0547], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0354, 0.0289, 0.0238, 0.0297, 0.0243, 0.0274, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:05:35,095 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-01 16:05:45,453 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9289, 4.3768, 4.6541, 4.6378, 2.0119, 4.3084, 3.7773, 4.2960], + device='cuda:3'), covar=tensor([0.1313, 0.0794, 0.0512, 0.0514, 0.4621, 0.0667, 0.0608, 0.1020], + device='cuda:3'), in_proj_covar=tensor([0.0632, 0.0563, 0.0751, 0.0633, 0.0694, 0.0506, 0.0465, 0.0701], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 16:06:20,989 INFO [train.py:903] (3/4) Epoch 10, batch 250, loss[loss=0.2281, simple_loss=0.3067, pruned_loss=0.07478, over 17390.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.3146, pruned_loss=0.08604, over 2743325.86 frames. ], batch size: 101, lr: 8.56e-03, grad_scale: 8.0 +2023-04-01 16:07:19,589 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:07:20,496 INFO [train.py:903] (3/4) Epoch 10, batch 300, loss[loss=0.3128, simple_loss=0.3622, pruned_loss=0.1317, over 13685.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3152, pruned_loss=0.0868, over 2975757.47 frames. ], batch size: 136, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:07:32,774 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.563e+02 6.785e+02 8.281e+02 1.821e+03, threshold=1.357e+03, percent-clipped=1.0 +2023-04-01 16:07:33,036 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:12,970 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61794.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:08:20,599 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 16:08:21,871 INFO [train.py:903] (3/4) Epoch 10, batch 350, loss[loss=0.2555, simple_loss=0.3257, pruned_loss=0.09267, over 19541.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3147, pruned_loss=0.08651, over 3156292.03 frames. ], batch size: 56, lr: 8.55e-03, grad_scale: 8.0 +2023-04-01 16:08:44,880 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61819.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:09:23,123 INFO [train.py:903] (3/4) Epoch 10, batch 400, loss[loss=0.2706, simple_loss=0.3425, pruned_loss=0.09935, over 19679.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3158, pruned_loss=0.08769, over 3301553.93 frames. ], batch size: 53, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:09:36,144 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+02 5.438e+02 6.846e+02 8.745e+02 2.106e+03, threshold=1.369e+03, percent-clipped=7.0 +2023-04-01 16:10:26,713 INFO [train.py:903] (3/4) Epoch 10, batch 450, loss[loss=0.2435, simple_loss=0.321, pruned_loss=0.08302, over 19083.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3156, pruned_loss=0.08719, over 3400091.60 frames. ], batch size: 69, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:10:29,711 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-01 16:10:50,935 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 16:10:50,968 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 16:11:05,207 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61932.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:11:19,197 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2315, 4.2913, 4.7788, 4.7651, 2.5158, 4.4742, 4.0703, 4.4897], + device='cuda:3'), covar=tensor([0.1030, 0.2606, 0.0483, 0.0481, 0.3890, 0.0581, 0.0475, 0.0858], + device='cuda:3'), in_proj_covar=tensor([0.0624, 0.0559, 0.0743, 0.0631, 0.0687, 0.0500, 0.0460, 0.0690], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 16:11:27,991 INFO [train.py:903] (3/4) Epoch 10, batch 500, loss[loss=0.2707, simple_loss=0.3347, pruned_loss=0.1033, over 19675.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3157, pruned_loss=0.08691, over 3504379.24 frames. ], batch size: 53, lr: 8.54e-03, grad_scale: 8.0 +2023-04-01 16:11:30,098 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 16:11:39,884 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.473e+02 6.472e+02 7.882e+02 1.512e+03, threshold=1.294e+03, percent-clipped=2.0 +2023-04-01 16:12:30,820 INFO [train.py:903] (3/4) Epoch 10, batch 550, loss[loss=0.2442, simple_loss=0.3182, pruned_loss=0.08513, over 19469.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3153, pruned_loss=0.08664, over 3583186.40 frames. ], batch size: 49, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:32,118 INFO [train.py:903] (3/4) Epoch 10, batch 600, loss[loss=0.2078, simple_loss=0.287, pruned_loss=0.06432, over 19766.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3128, pruned_loss=0.08498, over 3632954.71 frames. ], batch size: 54, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:13:42,211 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-01 16:13:46,037 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.499e+02 5.366e+02 7.096e+02 8.541e+02 1.663e+03, threshold=1.419e+03, percent-clipped=2.0 +2023-04-01 16:14:11,915 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 16:14:26,439 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:14:35,311 INFO [train.py:903] (3/4) Epoch 10, batch 650, loss[loss=0.2609, simple_loss=0.3284, pruned_loss=0.09674, over 17382.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3121, pruned_loss=0.08484, over 3674913.58 frames. ], batch size: 101, lr: 8.53e-03, grad_scale: 8.0 +2023-04-01 16:14:40,071 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62105.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:24,628 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62140.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:15:39,370 INFO [train.py:903] (3/4) Epoch 10, batch 700, loss[loss=0.2402, simple_loss=0.314, pruned_loss=0.08314, over 19299.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3123, pruned_loss=0.08456, over 3713313.03 frames. ], batch size: 66, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:15:51,155 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.410e+02 5.986e+02 7.007e+02 9.230e+02 2.462e+03, threshold=1.401e+03, percent-clipped=6.0 +2023-04-01 16:15:51,679 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2237, 1.2805, 1.6081, 1.5030, 2.3376, 2.0330, 2.3750, 0.8573], + device='cuda:3'), covar=tensor([0.2348, 0.4098, 0.2367, 0.1851, 0.1435, 0.2060, 0.1401, 0.3871], + device='cuda:3'), in_proj_covar=tensor([0.0476, 0.0557, 0.0575, 0.0426, 0.0585, 0.0480, 0.0639, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 16:16:41,947 INFO [train.py:903] (3/4) Epoch 10, batch 750, loss[loss=0.2198, simple_loss=0.284, pruned_loss=0.07777, over 18705.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3122, pruned_loss=0.08505, over 3732674.37 frames. ], batch size: 41, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:16:51,086 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:04,546 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62220.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:17:11,246 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3906, 1.3244, 1.4573, 1.5775, 2.8948, 1.0610, 2.1195, 3.2365], + device='cuda:3'), covar=tensor([0.0466, 0.2607, 0.2583, 0.1587, 0.0783, 0.2373, 0.1143, 0.0309], + device='cuda:3'), in_proj_covar=tensor([0.0338, 0.0326, 0.0337, 0.0311, 0.0338, 0.0322, 0.0317, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:17:35,223 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.67 vs. limit=5.0 +2023-04-01 16:17:42,578 INFO [train.py:903] (3/4) Epoch 10, batch 800, loss[loss=0.2409, simple_loss=0.3241, pruned_loss=0.07887, over 19617.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3129, pruned_loss=0.08573, over 3749362.37 frames. ], batch size: 50, lr: 8.52e-03, grad_scale: 8.0 +2023-04-01 16:17:54,856 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.237e+02 6.930e+02 8.487e+02 1.526e+03, threshold=1.386e+03, percent-clipped=2.0 +2023-04-01 16:17:58,879 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 16:18:15,001 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:32,358 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:18:43,719 INFO [train.py:903] (3/4) Epoch 10, batch 850, loss[loss=0.2372, simple_loss=0.3127, pruned_loss=0.08084, over 19735.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3125, pruned_loss=0.08525, over 3777182.60 frames. ], batch size: 51, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:37,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 16:19:45,727 INFO [train.py:903] (3/4) Epoch 10, batch 900, loss[loss=0.2269, simple_loss=0.3063, pruned_loss=0.07379, over 17579.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3133, pruned_loss=0.08572, over 3780581.46 frames. ], batch size: 101, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:19:59,162 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 6.023e+02 7.076e+02 9.770e+02 2.916e+03, threshold=1.415e+03, percent-clipped=7.0 +2023-04-01 16:20:35,903 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:20:48,986 INFO [train.py:903] (3/4) Epoch 10, batch 950, loss[loss=0.2601, simple_loss=0.3363, pruned_loss=0.092, over 19701.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.313, pruned_loss=0.08526, over 3787537.37 frames. ], batch size: 59, lr: 8.51e-03, grad_scale: 8.0 +2023-04-01 16:20:50,159 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 16:20:58,685 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62409.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:21:50,240 INFO [train.py:903] (3/4) Epoch 10, batch 1000, loss[loss=0.2581, simple_loss=0.3324, pruned_loss=0.09194, over 19678.00 frames. ], tot_loss[loss=0.242, simple_loss=0.313, pruned_loss=0.08552, over 3799239.78 frames. ], batch size: 60, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:01,598 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.402e+02 5.469e+02 6.659e+02 8.311e+02 1.987e+03, threshold=1.332e+03, percent-clipped=4.0 +2023-04-01 16:22:07,629 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:21,508 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62476.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:31,382 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62484.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:38,713 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:22:43,037 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 16:22:51,163 INFO [train.py:903] (3/4) Epoch 10, batch 1050, loss[loss=0.2544, simple_loss=0.3322, pruned_loss=0.08828, over 17959.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3118, pruned_loss=0.08439, over 3818017.16 frames. ], batch size: 83, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:22:51,565 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62501.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:23:23,743 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 16:23:54,503 INFO [train.py:903] (3/4) Epoch 10, batch 1100, loss[loss=0.293, simple_loss=0.3625, pruned_loss=0.1117, over 19072.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3119, pruned_loss=0.0846, over 3814520.95 frames. ], batch size: 69, lr: 8.50e-03, grad_scale: 8.0 +2023-04-01 16:24:07,765 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.569e+02 6.927e+02 9.101e+02 1.941e+03, threshold=1.385e+03, percent-clipped=3.0 +2023-04-01 16:24:10,211 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62563.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:54,321 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62599.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:24:56,396 INFO [train.py:903] (3/4) Epoch 10, batch 1150, loss[loss=0.2702, simple_loss=0.332, pruned_loss=0.1042, over 19553.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3111, pruned_loss=0.08429, over 3828967.01 frames. ], batch size: 56, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:24:59,699 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:19,247 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2253, 2.0780, 1.8388, 1.6401, 1.4609, 1.7023, 0.3764, 1.0397], + device='cuda:3'), covar=tensor([0.0327, 0.0364, 0.0264, 0.0424, 0.0836, 0.0499, 0.0772, 0.0646], + device='cuda:3'), in_proj_covar=tensor([0.0325, 0.0321, 0.0325, 0.0338, 0.0411, 0.0335, 0.0302, 0.0315], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 16:25:21,134 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7927, 4.3628, 2.7331, 3.8964, 1.2346, 4.0244, 4.0901, 4.1642], + device='cuda:3'), covar=tensor([0.0536, 0.0933, 0.1764, 0.0684, 0.3444, 0.0690, 0.0689, 0.0794], + device='cuda:3'), in_proj_covar=tensor([0.0413, 0.0352, 0.0422, 0.0309, 0.0371, 0.0346, 0.0342, 0.0373], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 16:25:37,691 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:53,855 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:25:56,072 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3060, 3.0263, 2.3000, 2.8334, 1.1258, 2.8818, 2.8229, 2.8900], + device='cuda:3'), covar=tensor([0.1083, 0.1334, 0.1796, 0.0855, 0.3088, 0.0936, 0.0945, 0.1270], + device='cuda:3'), in_proj_covar=tensor([0.0411, 0.0350, 0.0420, 0.0308, 0.0369, 0.0344, 0.0339, 0.0370], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 16:25:58,136 INFO [train.py:903] (3/4) Epoch 10, batch 1200, loss[loss=0.2121, simple_loss=0.2784, pruned_loss=0.07292, over 19757.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3106, pruned_loss=0.08414, over 3837074.43 frames. ], batch size: 45, lr: 8.49e-03, grad_scale: 8.0 +2023-04-01 16:26:09,504 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.581e+02 6.902e+02 9.588e+02 2.703e+03, threshold=1.380e+03, percent-clipped=8.0 +2023-04-01 16:26:24,966 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62672.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:32,529 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 16:26:40,542 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62684.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:26:59,720 INFO [train.py:903] (3/4) Epoch 10, batch 1250, loss[loss=0.2853, simple_loss=0.3459, pruned_loss=0.1123, over 19359.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3107, pruned_loss=0.08409, over 3836221.01 frames. ], batch size: 66, lr: 8.49e-03, grad_scale: 4.0 +2023-04-01 16:28:00,865 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:01,682 INFO [train.py:903] (3/4) Epoch 10, batch 1300, loss[loss=0.2504, simple_loss=0.3218, pruned_loss=0.08945, over 19378.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3099, pruned_loss=0.08401, over 3831405.74 frames. ], batch size: 70, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:28:05,058 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62753.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:28:16,587 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.064e+02 6.596e+02 8.862e+02 1.920e+03, threshold=1.319e+03, percent-clipped=1.0 +2023-04-01 16:28:27,818 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4345, 2.2106, 1.6138, 1.5717, 2.1379, 1.1748, 1.3348, 1.6714], + device='cuda:3'), covar=tensor([0.0821, 0.0682, 0.0899, 0.0571, 0.0446, 0.1090, 0.0593, 0.0424], + device='cuda:3'), in_proj_covar=tensor([0.0286, 0.0298, 0.0323, 0.0241, 0.0233, 0.0324, 0.0286, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:28:51,840 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:29:05,051 INFO [train.py:903] (3/4) Epoch 10, batch 1350, loss[loss=0.2471, simple_loss=0.3263, pruned_loss=0.08398, over 19042.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3112, pruned_loss=0.08456, over 3825639.39 frames. ], batch size: 69, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:29:16,161 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-01 16:29:34,259 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8289, 1.4809, 1.6123, 1.5387, 3.2875, 0.9591, 2.2438, 3.7256], + device='cuda:3'), covar=tensor([0.0390, 0.2301, 0.2357, 0.1666, 0.0736, 0.2444, 0.1212, 0.0246], + device='cuda:3'), in_proj_covar=tensor([0.0343, 0.0326, 0.0342, 0.0313, 0.0339, 0.0324, 0.0320, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:30:07,946 INFO [train.py:903] (3/4) Epoch 10, batch 1400, loss[loss=0.2494, simple_loss=0.3305, pruned_loss=0.08412, over 19542.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3105, pruned_loss=0.08385, over 3834147.68 frames. ], batch size: 56, lr: 8.48e-03, grad_scale: 4.0 +2023-04-01 16:30:13,130 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62855.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:20,949 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.591e+02 6.749e+02 8.217e+02 1.554e+03, threshold=1.350e+03, percent-clipped=4.0 +2023-04-01 16:30:27,972 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:30:44,902 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62880.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:31:07,150 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 16:31:09,396 INFO [train.py:903] (3/4) Epoch 10, batch 1450, loss[loss=0.2481, simple_loss=0.3226, pruned_loss=0.08681, over 19396.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3122, pruned_loss=0.08525, over 3798792.86 frames. ], batch size: 70, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:31:16,534 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62907.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:07,386 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:32:11,884 INFO [train.py:903] (3/4) Epoch 10, batch 1500, loss[loss=0.2353, simple_loss=0.3166, pruned_loss=0.077, over 19625.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3116, pruned_loss=0.08471, over 3815329.73 frames. ], batch size: 57, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:32:27,777 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 5.675e+02 6.883e+02 8.252e+02 2.690e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-01 16:33:08,486 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.84 vs. limit=5.0 +2023-04-01 16:33:17,112 INFO [train.py:903] (3/4) Epoch 10, batch 1550, loss[loss=0.209, simple_loss=0.2894, pruned_loss=0.06427, over 18611.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3118, pruned_loss=0.08469, over 3817905.34 frames. ], batch size: 41, lr: 8.47e-03, grad_scale: 4.0 +2023-04-01 16:33:23,579 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:33,747 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5449, 1.1160, 1.3158, 1.2176, 2.1712, 0.8748, 1.8809, 2.3274], + device='cuda:3'), covar=tensor([0.0653, 0.2574, 0.2593, 0.1473, 0.0860, 0.2046, 0.0957, 0.0498], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0324, 0.0338, 0.0311, 0.0338, 0.0322, 0.0318, 0.0339], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:33:42,922 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:49,634 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:33:53,315 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:20,243 INFO [train.py:903] (3/4) Epoch 10, batch 1600, loss[loss=0.2567, simple_loss=0.3284, pruned_loss=0.09252, over 19675.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3122, pruned_loss=0.08457, over 3818638.27 frames. ], batch size: 58, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:34:33,008 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.609e+02 5.367e+02 6.713e+02 8.573e+02 1.582e+03, threshold=1.343e+03, percent-clipped=2.0 +2023-04-01 16:34:33,419 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:34:42,301 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 16:35:21,340 INFO [train.py:903] (3/4) Epoch 10, batch 1650, loss[loss=0.206, simple_loss=0.2828, pruned_loss=0.06459, over 19488.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.312, pruned_loss=0.0848, over 3814964.79 frames. ], batch size: 49, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:35:51,917 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:05,479 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:14,719 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63143.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:21,878 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63149.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:36:23,854 INFO [train.py:903] (3/4) Epoch 10, batch 1700, loss[loss=0.2441, simple_loss=0.3161, pruned_loss=0.08608, over 19530.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3122, pruned_loss=0.08476, over 3823285.25 frames. ], batch size: 54, lr: 8.46e-03, grad_scale: 8.0 +2023-04-01 16:36:38,454 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.819e+02 7.178e+02 9.040e+02 2.117e+03, threshold=1.436e+03, percent-clipped=7.0 +2023-04-01 16:36:54,037 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5322, 1.9401, 2.1202, 2.1655, 3.2763, 1.7277, 2.7922, 3.3496], + device='cuda:3'), covar=tensor([0.0379, 0.1825, 0.1830, 0.1243, 0.0498, 0.1876, 0.1169, 0.0281], + device='cuda:3'), in_proj_covar=tensor([0.0340, 0.0325, 0.0339, 0.0311, 0.0338, 0.0324, 0.0319, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:37:04,089 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 16:37:06,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6694, 1.7822, 1.5610, 2.5949, 1.7189, 2.3885, 2.0102, 1.3407], + device='cuda:3'), covar=tensor([0.4166, 0.3489, 0.2362, 0.2109, 0.3660, 0.1723, 0.4577, 0.4241], + device='cuda:3'), in_proj_covar=tensor([0.0771, 0.0780, 0.0634, 0.0880, 0.0758, 0.0678, 0.0779, 0.0691], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 16:37:28,659 INFO [train.py:903] (3/4) Epoch 10, batch 1750, loss[loss=0.2184, simple_loss=0.3045, pruned_loss=0.06618, over 19475.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3128, pruned_loss=0.08512, over 3822243.79 frames. ], batch size: 64, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:37:36,150 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4228, 2.3272, 1.9006, 1.9149, 1.7524, 1.9405, 0.7885, 1.4450], + device='cuda:3'), covar=tensor([0.0372, 0.0375, 0.0319, 0.0462, 0.0709, 0.0529, 0.0756, 0.0631], + device='cuda:3'), in_proj_covar=tensor([0.0326, 0.0322, 0.0326, 0.0338, 0.0413, 0.0338, 0.0303, 0.0318], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 16:38:14,973 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:38:30,877 INFO [train.py:903] (3/4) Epoch 10, batch 1800, loss[loss=0.278, simple_loss=0.333, pruned_loss=0.1115, over 19773.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3132, pruned_loss=0.0854, over 3818613.61 frames. ], batch size: 54, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:38:44,560 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.870e+02 7.659e+02 9.293e+02 2.596e+03, threshold=1.532e+03, percent-clipped=8.0 +2023-04-01 16:39:03,325 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:16,014 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4807, 1.8204, 2.0266, 1.9776, 3.1932, 1.5029, 2.6553, 3.3124], + device='cuda:3'), covar=tensor([0.0472, 0.2107, 0.2041, 0.1449, 0.0598, 0.2123, 0.1573, 0.0352], + device='cuda:3'), in_proj_covar=tensor([0.0343, 0.0327, 0.0341, 0.0315, 0.0340, 0.0325, 0.0322, 0.0343], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:39:30,790 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 16:39:32,967 INFO [train.py:903] (3/4) Epoch 10, batch 1850, loss[loss=0.2308, simple_loss=0.3029, pruned_loss=0.07937, over 19629.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3132, pruned_loss=0.08548, over 3825945.69 frames. ], batch size: 50, lr: 8.45e-03, grad_scale: 4.0 +2023-04-01 16:39:35,776 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:39:54,136 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:10,162 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 16:40:26,771 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:40:35,821 INFO [train.py:903] (3/4) Epoch 10, batch 1900, loss[loss=0.2523, simple_loss=0.3301, pruned_loss=0.08721, over 19490.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3122, pruned_loss=0.08447, over 3831889.70 frames. ], batch size: 64, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:40:45,541 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 16:40:52,656 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 5.562e+02 7.038e+02 8.618e+02 1.834e+03, threshold=1.408e+03, percent-clipped=3.0 +2023-04-01 16:40:55,973 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 16:41:02,768 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 16:41:24,106 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3792, 1.1632, 1.3499, 1.3487, 2.9701, 0.8081, 2.0507, 3.1716], + device='cuda:3'), covar=tensor([0.0459, 0.2534, 0.2669, 0.1691, 0.0715, 0.2427, 0.1228, 0.0327], + device='cuda:3'), in_proj_covar=tensor([0.0344, 0.0328, 0.0345, 0.0314, 0.0343, 0.0325, 0.0323, 0.0346], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:41:24,920 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 16:41:37,235 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63399.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:41:40,410 INFO [train.py:903] (3/4) Epoch 10, batch 1950, loss[loss=0.2221, simple_loss=0.2972, pruned_loss=0.07349, over 19495.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3133, pruned_loss=0.0856, over 3819263.92 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 4.0 +2023-04-01 16:42:10,542 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63424.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:27,987 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:31,462 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:42:44,764 INFO [train.py:903] (3/4) Epoch 10, batch 2000, loss[loss=0.2101, simple_loss=0.2866, pruned_loss=0.06681, over 19710.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3131, pruned_loss=0.08486, over 3834479.03 frames. ], batch size: 51, lr: 8.44e-03, grad_scale: 8.0 +2023-04-01 16:42:58,757 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.14 vs. limit=5.0 +2023-04-01 16:43:00,248 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.731e+02 5.221e+02 6.641e+02 8.776e+02 2.044e+03, threshold=1.328e+03, percent-clipped=3.0 +2023-04-01 16:43:36,551 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4478, 1.4604, 1.8918, 1.7063, 3.0932, 2.8430, 3.4540, 1.4338], + device='cuda:3'), covar=tensor([0.2065, 0.3607, 0.2163, 0.1562, 0.1403, 0.1510, 0.1469, 0.3344], + device='cuda:3'), in_proj_covar=tensor([0.0475, 0.0555, 0.0577, 0.0425, 0.0582, 0.0475, 0.0637, 0.0481], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 16:43:43,958 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 16:43:47,388 INFO [train.py:903] (3/4) Epoch 10, batch 2050, loss[loss=0.2701, simple_loss=0.3385, pruned_loss=0.1009, over 19524.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3119, pruned_loss=0.08406, over 3833319.37 frames. ], batch size: 54, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:43:53,589 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:03,926 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 16:44:05,967 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 16:44:26,408 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:44:28,410 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 16:44:35,229 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-01 16:44:35,925 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5951, 4.1027, 2.6248, 3.7253, 1.1946, 3.8849, 3.9055, 4.0582], + device='cuda:3'), covar=tensor([0.0599, 0.1037, 0.1833, 0.0752, 0.3533, 0.0734, 0.0648, 0.0920], + device='cuda:3'), in_proj_covar=tensor([0.0416, 0.0353, 0.0421, 0.0311, 0.0372, 0.0351, 0.0344, 0.0374], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 16:44:50,578 INFO [train.py:903] (3/4) Epoch 10, batch 2100, loss[loss=0.2688, simple_loss=0.3412, pruned_loss=0.09818, over 19523.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.311, pruned_loss=0.0839, over 3835015.41 frames. ], batch size: 54, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:45:06,403 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.645e+02 7.348e+02 9.688e+02 2.351e+03, threshold=1.470e+03, percent-clipped=4.0 +2023-04-01 16:45:22,180 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1183, 1.1815, 1.6384, 1.0178, 2.6679, 3.3222, 3.0818, 3.5312], + device='cuda:3'), covar=tensor([0.1539, 0.3304, 0.2922, 0.2120, 0.0436, 0.0146, 0.0210, 0.0171], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0292, 0.0321, 0.0249, 0.0212, 0.0151, 0.0205, 0.0188], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 16:45:26,358 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 16:45:32,506 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63583.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:33,940 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3674, 2.1601, 1.9445, 1.7758, 1.5471, 1.8423, 0.3647, 1.1600], + device='cuda:3'), covar=tensor([0.0338, 0.0388, 0.0296, 0.0456, 0.0807, 0.0533, 0.0860, 0.0698], + device='cuda:3'), in_proj_covar=tensor([0.0330, 0.0328, 0.0329, 0.0343, 0.0420, 0.0342, 0.0306, 0.0323], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 16:45:37,342 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:45:46,572 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 16:45:54,834 INFO [train.py:903] (3/4) Epoch 10, batch 2150, loss[loss=0.239, simple_loss=0.3159, pruned_loss=0.08108, over 19418.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3107, pruned_loss=0.0838, over 3843914.24 frames. ], batch size: 70, lr: 8.43e-03, grad_scale: 8.0 +2023-04-01 16:46:00,093 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63604.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:46:58,774 INFO [train.py:903] (3/4) Epoch 10, batch 2200, loss[loss=0.2037, simple_loss=0.2937, pruned_loss=0.05688, over 19696.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3109, pruned_loss=0.08396, over 3834025.58 frames. ], batch size: 59, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:47:13,909 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.200e+02 5.616e+02 6.875e+02 8.680e+02 1.983e+03, threshold=1.375e+03, percent-clipped=4.0 +2023-04-01 16:48:00,213 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:48:03,398 INFO [train.py:903] (3/4) Epoch 10, batch 2250, loss[loss=0.2211, simple_loss=0.2926, pruned_loss=0.07478, over 19485.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3113, pruned_loss=0.08423, over 3818382.43 frames. ], batch size: 49, lr: 8.42e-03, grad_scale: 8.0 +2023-04-01 16:49:04,922 INFO [train.py:903] (3/4) Epoch 10, batch 2300, loss[loss=0.2126, simple_loss=0.2912, pruned_loss=0.06693, over 19824.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3138, pruned_loss=0.08627, over 3790429.29 frames. ], batch size: 52, lr: 8.42e-03, grad_scale: 4.0 +2023-04-01 16:49:18,654 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 16:49:23,048 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.837e+02 6.965e+02 8.642e+02 2.205e+03, threshold=1.393e+03, percent-clipped=3.0 +2023-04-01 16:49:45,522 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63782.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:49:47,898 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63784.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:09,266 INFO [train.py:903] (3/4) Epoch 10, batch 2350, loss[loss=0.2352, simple_loss=0.311, pruned_loss=0.07964, over 19669.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3124, pruned_loss=0.08521, over 3802289.30 frames. ], batch size: 58, lr: 8.41e-03, grad_scale: 4.0 +2023-04-01 16:50:44,333 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:50:50,884 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 16:51:10,027 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 16:51:11,957 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 16:51:13,627 INFO [train.py:903] (3/4) Epoch 10, batch 2400, loss[loss=0.2432, simple_loss=0.3092, pruned_loss=0.08857, over 19405.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.313, pruned_loss=0.08581, over 3797621.68 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:51:29,410 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63863.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:51:30,163 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.763e+02 5.915e+02 7.022e+02 9.244e+02 2.907e+03, threshold=1.404e+03, percent-clipped=10.0 +2023-04-01 16:51:55,363 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9366, 2.0079, 2.1239, 2.7547, 2.0452, 2.6806, 2.4216, 2.0566], + device='cuda:3'), covar=tensor([0.3160, 0.2569, 0.1281, 0.1495, 0.2838, 0.1185, 0.2841, 0.2241], + device='cuda:3'), in_proj_covar=tensor([0.0772, 0.0780, 0.0638, 0.0882, 0.0763, 0.0683, 0.0781, 0.0695], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 16:52:03,547 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-01 16:52:12,812 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63897.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:15,964 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63899.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:17,969 INFO [train.py:903] (3/4) Epoch 10, batch 2450, loss[loss=0.2785, simple_loss=0.335, pruned_loss=0.111, over 19583.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3127, pruned_loss=0.08506, over 3805940.64 frames. ], batch size: 52, lr: 8.41e-03, grad_scale: 8.0 +2023-04-01 16:52:55,569 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:52:59,305 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63934.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:16,602 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63948.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:19,964 INFO [train.py:903] (3/4) Epoch 10, batch 2500, loss[loss=0.2509, simple_loss=0.3208, pruned_loss=0.09055, over 19702.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3129, pruned_loss=0.08525, over 3796536.47 frames. ], batch size: 59, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:53:24,612 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:53:35,919 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.566e+02 5.261e+02 6.826e+02 9.233e+02 1.687e+03, threshold=1.365e+03, percent-clipped=6.0 +2023-04-01 16:53:56,256 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63979.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:54:23,059 INFO [train.py:903] (3/4) Epoch 10, batch 2550, loss[loss=0.2785, simple_loss=0.3373, pruned_loss=0.1099, over 19757.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3117, pruned_loss=0.08458, over 3803813.37 frames. ], batch size: 63, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:55:13,554 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:15,684 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 16:55:20,883 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:26,452 INFO [train.py:903] (3/4) Epoch 10, batch 2600, loss[loss=0.216, simple_loss=0.2824, pruned_loss=0.07485, over 19723.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.312, pruned_loss=0.08469, over 3802134.81 frames. ], batch size: 46, lr: 8.40e-03, grad_scale: 8.0 +2023-04-01 16:55:41,464 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64063.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:55:42,246 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.625e+02 5.188e+02 6.411e+02 7.864e+02 1.888e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 16:56:27,597 INFO [train.py:903] (3/4) Epoch 10, batch 2650, loss[loss=0.2355, simple_loss=0.2959, pruned_loss=0.08754, over 19474.00 frames. ], tot_loss[loss=0.241, simple_loss=0.312, pruned_loss=0.085, over 3816391.81 frames. ], batch size: 49, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:56:45,307 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 16:57:18,005 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3602, 1.7280, 1.6936, 2.6656, 2.1726, 2.4076, 2.5517, 2.4555], + device='cuda:3'), covar=tensor([0.0749, 0.1017, 0.1047, 0.0988, 0.0975, 0.0725, 0.0888, 0.0634], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0254, 0.0240, 0.0213, 0.0201, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 16:57:29,272 INFO [train.py:903] (3/4) Epoch 10, batch 2700, loss[loss=0.2668, simple_loss=0.3396, pruned_loss=0.09701, over 19687.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3113, pruned_loss=0.08417, over 3814869.46 frames. ], batch size: 60, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:57:29,582 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9555, 4.8986, 5.7826, 5.7797, 1.5667, 5.5118, 4.7050, 5.3594], + device='cuda:3'), covar=tensor([0.1285, 0.0861, 0.0512, 0.0428, 0.5830, 0.0469, 0.0514, 0.1070], + device='cuda:3'), in_proj_covar=tensor([0.0642, 0.0581, 0.0769, 0.0641, 0.0715, 0.0519, 0.0477, 0.0709], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 16:57:32,849 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64153.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:34,005 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0718, 1.7531, 2.1352, 1.7114, 4.6179, 0.8370, 2.4970, 4.8500], + device='cuda:3'), covar=tensor([0.0282, 0.2355, 0.2167, 0.1628, 0.0603, 0.2557, 0.1217, 0.0175], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0328, 0.0342, 0.0311, 0.0336, 0.0323, 0.0319, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:57:35,356 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64155.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:57:45,495 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.891e+02 6.747e+02 8.779e+02 3.257e+03, threshold=1.349e+03, percent-clipped=11.0 +2023-04-01 16:57:56,914 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64172.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:05,001 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64178.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:07,365 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:58:33,083 INFO [train.py:903] (3/4) Epoch 10, batch 2750, loss[loss=0.1735, simple_loss=0.2533, pruned_loss=0.04685, over 19376.00 frames. ], tot_loss[loss=0.239, simple_loss=0.311, pruned_loss=0.08353, over 3806671.12 frames. ], batch size: 47, lr: 8.39e-03, grad_scale: 8.0 +2023-04-01 16:58:41,443 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64207.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,117 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 16:59:36,901 INFO [train.py:903] (3/4) Epoch 10, batch 2800, loss[loss=0.2516, simple_loss=0.3075, pruned_loss=0.09786, over 19395.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3119, pruned_loss=0.08433, over 3809854.39 frames. ], batch size: 48, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 16:59:43,002 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9745, 1.8285, 2.0362, 1.7819, 4.3799, 0.8504, 2.2267, 4.6298], + device='cuda:3'), covar=tensor([0.0285, 0.2350, 0.2280, 0.1675, 0.0694, 0.2669, 0.1486, 0.0211], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0327, 0.0342, 0.0313, 0.0339, 0.0326, 0.0319, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 16:59:45,217 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9215, 3.5072, 2.4794, 3.2200, 0.9534, 3.3242, 3.2838, 3.3899], + device='cuda:3'), covar=tensor([0.0773, 0.1194, 0.2018, 0.0808, 0.3866, 0.0894, 0.0848, 0.1103], + device='cuda:3'), in_proj_covar=tensor([0.0409, 0.0353, 0.0415, 0.0309, 0.0368, 0.0343, 0.0341, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 16:59:52,865 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.243e+02 6.695e+02 7.923e+02 2.040e+03, threshold=1.339e+03, percent-clipped=2.0 +2023-04-01 17:00:10,660 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:21,841 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2186, 1.2500, 1.7317, 1.2213, 2.6763, 3.5516, 3.2817, 3.7764], + device='cuda:3'), covar=tensor([0.1481, 0.3256, 0.2804, 0.1977, 0.0453, 0.0162, 0.0192, 0.0159], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0291, 0.0320, 0.0249, 0.0211, 0.0149, 0.0203, 0.0190], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:00:21,853 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64287.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:00:40,327 INFO [train.py:903] (3/4) Epoch 10, batch 2850, loss[loss=0.2149, simple_loss=0.2836, pruned_loss=0.07306, over 19760.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3106, pruned_loss=0.08334, over 3816456.93 frames. ], batch size: 47, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:00:41,921 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:03,219 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:06,431 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:12,428 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:35,607 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:39,746 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 17:01:43,205 INFO [train.py:903] (3/4) Epoch 10, batch 2900, loss[loss=0.2345, simple_loss=0.3061, pruned_loss=0.08144, over 19663.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3109, pruned_loss=0.08362, over 3816084.06 frames. ], batch size: 53, lr: 8.38e-03, grad_scale: 8.0 +2023-04-01 17:01:58,188 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64363.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:01:58,958 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 6.015e+02 7.349e+02 1.031e+03 2.008e+03, threshold=1.470e+03, percent-clipped=12.0 +2023-04-01 17:02:26,637 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64385.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:28,051 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2091, 1.9755, 1.4344, 1.3032, 1.8594, 1.1354, 1.1455, 1.6142], + device='cuda:3'), covar=tensor([0.0915, 0.0623, 0.0979, 0.0655, 0.0436, 0.1161, 0.0650, 0.0422], + device='cuda:3'), in_proj_covar=tensor([0.0288, 0.0296, 0.0323, 0.0242, 0.0231, 0.0322, 0.0288, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 17:02:36,423 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64393.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:02:44,649 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8439, 1.6913, 1.4812, 1.9644, 1.9470, 1.6699, 1.4989, 1.7856], + device='cuda:3'), covar=tensor([0.0876, 0.1434, 0.1363, 0.0910, 0.1064, 0.0529, 0.1188, 0.0630], + device='cuda:3'), in_proj_covar=tensor([0.0249, 0.0349, 0.0288, 0.0237, 0.0296, 0.0243, 0.0273, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 17:02:46,685 INFO [train.py:903] (3/4) Epoch 10, batch 2950, loss[loss=0.2402, simple_loss=0.3161, pruned_loss=0.08217, over 19852.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3116, pruned_loss=0.08361, over 3825141.12 frames. ], batch size: 52, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:48,250 INFO [train.py:903] (3/4) Epoch 10, batch 3000, loss[loss=0.2037, simple_loss=0.2882, pruned_loss=0.05955, over 19778.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3099, pruned_loss=0.08278, over 3834571.47 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:03:48,250 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 17:04:00,864 INFO [train.py:937] (3/4) Epoch 10, validation: loss=0.1811, simple_loss=0.2816, pruned_loss=0.04036, over 944034.00 frames. +2023-04-01 17:04:00,865 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 17:04:04,327 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 17:04:18,120 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 5.533e+02 6.661e+02 8.174e+02 1.809e+03, threshold=1.332e+03, percent-clipped=2.0 +2023-04-01 17:04:30,052 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5630, 1.3565, 1.3523, 1.9602, 1.5246, 1.7385, 1.8318, 1.6509], + device='cuda:3'), covar=tensor([0.0848, 0.1078, 0.1052, 0.0776, 0.0821, 0.0834, 0.0922, 0.0738], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0226, 0.0224, 0.0252, 0.0237, 0.0215, 0.0201, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 17:04:41,339 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64483.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:03,239 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64500.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:05:04,026 INFO [train.py:903] (3/4) Epoch 10, batch 3050, loss[loss=0.1981, simple_loss=0.277, pruned_loss=0.05961, over 19736.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3122, pruned_loss=0.08411, over 3832564.59 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:05:57,723 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64543.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:06,594 INFO [train.py:903] (3/4) Epoch 10, batch 3100, loss[loss=0.2119, simple_loss=0.2772, pruned_loss=0.07334, over 19750.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.313, pruned_loss=0.08459, over 3825125.67 frames. ], batch size: 45, lr: 8.37e-03, grad_scale: 8.0 +2023-04-01 17:06:22,864 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.961e+02 7.216e+02 8.690e+02 2.208e+03, threshold=1.443e+03, percent-clipped=3.0 +2023-04-01 17:06:28,012 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64568.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:06:40,873 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64578.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:00,334 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5216, 1.2776, 1.7563, 1.5973, 2.9244, 4.8145, 4.6078, 5.0960], + device='cuda:3'), covar=tensor([0.1573, 0.4204, 0.3796, 0.1998, 0.0570, 0.0157, 0.0164, 0.0116], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0292, 0.0319, 0.0248, 0.0211, 0.0150, 0.0202, 0.0190], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:07:01,272 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:10,477 INFO [train.py:903] (3/4) Epoch 10, batch 3150, loss[loss=0.2546, simple_loss=0.3262, pruned_loss=0.09151, over 19699.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3138, pruned_loss=0.08498, over 3831302.20 frames. ], batch size: 60, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:07:13,023 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:17,543 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64607.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:07:39,761 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 17:07:57,640 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3009, 1.6695, 1.7239, 2.7245, 2.3070, 2.3263, 2.5686, 2.2778], + device='cuda:3'), covar=tensor([0.0758, 0.0983, 0.1021, 0.0831, 0.0767, 0.0768, 0.0801, 0.0659], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0226, 0.0225, 0.0253, 0.0239, 0.0216, 0.0200, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 17:08:10,679 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64649.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:12,598 INFO [train.py:903] (3/4) Epoch 10, batch 3200, loss[loss=0.28, simple_loss=0.34, pruned_loss=0.11, over 13333.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.312, pruned_loss=0.08361, over 3830012.22 frames. ], batch size: 136, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:08:30,175 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.483e+02 6.754e+02 8.204e+02 1.644e+03, threshold=1.351e+03, percent-clipped=2.0 +2023-04-01 17:08:32,822 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64666.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:08:43,788 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64674.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:02,728 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4714, 2.1050, 2.1364, 2.9784, 2.1429, 2.6723, 2.6343, 2.6815], + device='cuda:3'), covar=tensor([0.0683, 0.0807, 0.0823, 0.0694, 0.0875, 0.0642, 0.0776, 0.0538], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0224, 0.0223, 0.0251, 0.0236, 0.0213, 0.0199, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 17:09:16,024 INFO [train.py:903] (3/4) Epoch 10, batch 3250, loss[loss=0.2485, simple_loss=0.3217, pruned_loss=0.08766, over 18226.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3109, pruned_loss=0.08319, over 3841110.90 frames. ], batch size: 83, lr: 8.36e-03, grad_scale: 8.0 +2023-04-01 17:09:24,060 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64707.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:26,628 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:09:42,833 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:20,328 INFO [train.py:903] (3/4) Epoch 10, batch 3300, loss[loss=0.225, simple_loss=0.3028, pruned_loss=0.07363, over 19605.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3109, pruned_loss=0.08349, over 3839650.83 frames. ], batch size: 57, lr: 8.35e-03, grad_scale: 8.0 +2023-04-01 17:10:26,741 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:28,761 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 17:10:35,540 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:37,431 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.384e+02 6.605e+02 8.281e+02 2.311e+03, threshold=1.321e+03, percent-clipped=9.0 +2023-04-01 17:10:43,939 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:10:57,711 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64781.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:23,421 INFO [train.py:903] (3/4) Epoch 10, batch 3350, loss[loss=0.2975, simple_loss=0.3579, pruned_loss=0.1186, over 19601.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3115, pruned_loss=0.08416, over 3820532.13 frames. ], batch size: 57, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:11:49,545 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:11:55,096 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64827.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:12:24,011 INFO [train.py:903] (3/4) Epoch 10, batch 3400, loss[loss=0.2882, simple_loss=0.3476, pruned_loss=0.1144, over 19612.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3116, pruned_loss=0.08461, over 3829377.16 frames. ], batch size: 57, lr: 8.35e-03, grad_scale: 4.0 +2023-04-01 17:12:42,253 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.893e+02 5.724e+02 7.342e+02 8.665e+02 1.913e+03, threshold=1.468e+03, percent-clipped=6.0 +2023-04-01 17:13:27,816 INFO [train.py:903] (3/4) Epoch 10, batch 3450, loss[loss=0.2627, simple_loss=0.3339, pruned_loss=0.09579, over 19847.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3109, pruned_loss=0.08419, over 3833070.96 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:13:35,084 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 17:14:05,176 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2129, 2.2379, 2.2996, 3.3378, 2.3125, 3.2303, 2.8001, 2.2158], + device='cuda:3'), covar=tensor([0.3426, 0.2982, 0.1380, 0.1712, 0.3371, 0.1325, 0.3038, 0.2408], + device='cuda:3'), in_proj_covar=tensor([0.0769, 0.0779, 0.0633, 0.0879, 0.0762, 0.0682, 0.0776, 0.0691], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:14:20,591 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64942.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:30,655 INFO [train.py:903] (3/4) Epoch 10, batch 3500, loss[loss=0.2122, simple_loss=0.28, pruned_loss=0.07222, over 19297.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.31, pruned_loss=0.08389, over 3825663.61 frames. ], batch size: 44, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:14:31,770 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64951.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:14:48,653 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.495e+02 5.616e+02 6.822e+02 8.996e+02 1.764e+03, threshold=1.364e+03, percent-clipped=1.0 +2023-04-01 17:14:49,108 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:19,965 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:25,305 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64994.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:15:27,106 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-01 17:15:34,048 INFO [train.py:903] (3/4) Epoch 10, batch 3550, loss[loss=0.286, simple_loss=0.3441, pruned_loss=0.114, over 18248.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3099, pruned_loss=0.08376, over 3818583.03 frames. ], batch size: 83, lr: 8.34e-03, grad_scale: 4.0 +2023-04-01 17:15:44,363 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:16,119 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3335, 1.3606, 1.7454, 1.4293, 2.9897, 4.3788, 4.2895, 4.8623], + device='cuda:3'), covar=tensor([0.1521, 0.3327, 0.3104, 0.2034, 0.0511, 0.0154, 0.0172, 0.0121], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0290, 0.0318, 0.0246, 0.0211, 0.0149, 0.0202, 0.0190], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:16:34,667 INFO [train.py:903] (3/4) Epoch 10, batch 3600, loss[loss=0.3005, simple_loss=0.3591, pruned_loss=0.121, over 19780.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.311, pruned_loss=0.08413, over 3821218.32 frames. ], batch size: 56, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:16:37,753 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 17:16:52,000 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.791e+02 6.813e+02 8.568e+02 1.743e+03, threshold=1.363e+03, percent-clipped=4.0 +2023-04-01 17:16:52,169 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65065.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:16:53,539 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:02,295 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:08,952 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:20,310 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.72 vs. limit=5.0 +2023-04-01 17:17:35,955 INFO [train.py:903] (3/4) Epoch 10, batch 3650, loss[loss=0.286, simple_loss=0.3477, pruned_loss=0.1121, over 19664.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3113, pruned_loss=0.08436, over 3819014.19 frames. ], batch size: 60, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:17:38,648 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65103.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:42,761 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65107.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:17:45,377 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2012, 3.6721, 2.0367, 2.2224, 3.2129, 1.8253, 1.4193, 2.2201], + device='cuda:3'), covar=tensor([0.1038, 0.0368, 0.1009, 0.0641, 0.0464, 0.1038, 0.0919, 0.0608], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0295, 0.0321, 0.0243, 0.0232, 0.0319, 0.0287, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 17:17:51,937 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65114.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:05,475 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65125.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:18:36,264 INFO [train.py:903] (3/4) Epoch 10, batch 3700, loss[loss=0.2146, simple_loss=0.2962, pruned_loss=0.0665, over 18073.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3117, pruned_loss=0.08431, over 3817807.05 frames. ], batch size: 83, lr: 8.33e-03, grad_scale: 8.0 +2023-04-01 17:18:53,968 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 5.629e+02 7.088e+02 8.754e+02 1.818e+03, threshold=1.418e+03, percent-clipped=5.0 +2023-04-01 17:19:12,228 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:34,116 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65198.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:19:37,048 INFO [train.py:903] (3/4) Epoch 10, batch 3750, loss[loss=0.2463, simple_loss=0.3187, pruned_loss=0.0869, over 19781.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3128, pruned_loss=0.08531, over 3828983.62 frames. ], batch size: 56, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:02,515 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65222.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:03,636 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65223.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:10,553 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:20:36,852 INFO [train.py:903] (3/4) Epoch 10, batch 3800, loss[loss=0.2584, simple_loss=0.3321, pruned_loss=0.09235, over 19543.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3135, pruned_loss=0.08557, over 3819133.46 frames. ], batch size: 56, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:20:54,005 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.166e+02 5.620e+02 6.620e+02 8.005e+02 1.692e+03, threshold=1.324e+03, percent-clipped=4.0 +2023-04-01 17:21:06,343 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 17:21:37,286 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 17:21:37,759 INFO [train.py:903] (3/4) Epoch 10, batch 3850, loss[loss=0.2599, simple_loss=0.3253, pruned_loss=0.09725, over 19523.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3132, pruned_loss=0.08517, over 3831168.86 frames. ], batch size: 54, lr: 8.32e-03, grad_scale: 8.0 +2023-04-01 17:21:40,425 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65303.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:03,641 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65322.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:21,622 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:33,979 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:22:37,971 INFO [train.py:903] (3/4) Epoch 10, batch 3900, loss[loss=0.2556, simple_loss=0.3293, pruned_loss=0.09093, over 18749.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3118, pruned_loss=0.0841, over 3839025.74 frames. ], batch size: 74, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:22:44,184 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 17:22:55,831 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.027e+02 5.653e+02 7.180e+02 9.093e+02 1.633e+03, threshold=1.436e+03, percent-clipped=2.0 +2023-04-01 17:23:15,720 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65381.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:40,379 INFO [train.py:903] (3/4) Epoch 10, batch 3950, loss[loss=0.2323, simple_loss=0.2903, pruned_loss=0.08718, over 19384.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3122, pruned_loss=0.08453, over 3833621.99 frames. ], batch size: 48, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:23:40,411 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 17:23:46,504 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65406.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:23:53,339 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4411, 1.2451, 1.3265, 1.8288, 1.4571, 1.6323, 1.7647, 1.4764], + device='cuda:3'), covar=tensor([0.0871, 0.0998, 0.1035, 0.0694, 0.0813, 0.0705, 0.0750, 0.0727], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0227, 0.0226, 0.0252, 0.0242, 0.0216, 0.0202, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 17:23:58,955 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:23,321 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65436.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:41,330 INFO [train.py:903] (3/4) Epoch 10, batch 4000, loss[loss=0.2179, simple_loss=0.2761, pruned_loss=0.07987, over 19354.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3112, pruned_loss=0.08423, over 3832929.77 frames. ], batch size: 47, lr: 8.31e-03, grad_scale: 8.0 +2023-04-01 17:24:44,060 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65453.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:54,595 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65461.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:24:58,711 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.016e+02 5.448e+02 7.265e+02 9.508e+02 1.942e+03, threshold=1.453e+03, percent-clipped=2.0 +2023-04-01 17:25:14,524 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:23,062 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 17:25:23,371 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65485.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:41,796 INFO [train.py:903] (3/4) Epoch 10, batch 4050, loss[loss=0.2443, simple_loss=0.3143, pruned_loss=0.08714, over 19581.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3116, pruned_loss=0.08446, over 3837434.52 frames. ], batch size: 52, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:25:45,282 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65503.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:25:53,336 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65510.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:19,256 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:26:40,723 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65550.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:26:41,427 INFO [train.py:903] (3/4) Epoch 10, batch 4100, loss[loss=0.2386, simple_loss=0.2978, pruned_loss=0.08967, over 19730.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3113, pruned_loss=0.08452, over 3823243.99 frames. ], batch size: 46, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:26:59,039 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.925e+02 7.032e+02 8.463e+02 2.911e+03, threshold=1.406e+03, percent-clipped=6.0 +2023-04-01 17:27:04,283 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3300, 2.3000, 2.4472, 3.3900, 2.2849, 3.2965, 2.8919, 2.2784], + device='cuda:3'), covar=tensor([0.3661, 0.3178, 0.1444, 0.1878, 0.3776, 0.1390, 0.3175, 0.2664], + device='cuda:3'), in_proj_covar=tensor([0.0776, 0.0789, 0.0641, 0.0884, 0.0771, 0.0691, 0.0787, 0.0700], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:27:11,840 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 17:27:23,829 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2483, 1.2333, 1.7463, 1.1457, 2.5720, 3.3382, 3.0649, 3.5837], + device='cuda:3'), covar=tensor([0.1408, 0.3316, 0.2803, 0.2018, 0.0426, 0.0171, 0.0213, 0.0167], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0291, 0.0317, 0.0245, 0.0212, 0.0149, 0.0203, 0.0191], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:27:42,542 INFO [train.py:903] (3/4) Epoch 10, batch 4150, loss[loss=0.271, simple_loss=0.3439, pruned_loss=0.09903, over 17980.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.311, pruned_loss=0.08394, over 3830747.82 frames. ], batch size: 83, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:39,193 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65647.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:28:43,359 INFO [train.py:903] (3/4) Epoch 10, batch 4200, loss[loss=0.2829, simple_loss=0.3515, pruned_loss=0.1071, over 19583.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3113, pruned_loss=0.08405, over 3815583.34 frames. ], batch size: 61, lr: 8.30e-03, grad_scale: 8.0 +2023-04-01 17:28:43,399 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 17:28:59,406 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.075e+02 5.851e+02 6.725e+02 9.221e+02 2.199e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-01 17:29:42,451 INFO [train.py:903] (3/4) Epoch 10, batch 4250, loss[loss=0.2481, simple_loss=0.3286, pruned_loss=0.08381, over 18663.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3126, pruned_loss=0.08487, over 3813502.09 frames. ], batch size: 74, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:29:54,057 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:29:55,743 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 17:30:06,598 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 17:30:22,866 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65734.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:30:40,307 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7154, 1.7966, 1.9860, 2.4168, 1.5719, 2.0764, 2.2477, 1.8812], + device='cuda:3'), covar=tensor([0.3208, 0.2709, 0.1367, 0.1561, 0.2965, 0.1479, 0.3136, 0.2440], + device='cuda:3'), in_proj_covar=tensor([0.0770, 0.0788, 0.0637, 0.0882, 0.0766, 0.0691, 0.0779, 0.0698], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:30:43,321 INFO [train.py:903] (3/4) Epoch 10, batch 4300, loss[loss=0.2573, simple_loss=0.3309, pruned_loss=0.09186, over 19217.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3111, pruned_loss=0.08408, over 3826286.32 frames. ], batch size: 69, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:30:56,174 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65762.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:00,064 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 5.358e+02 7.223e+02 8.854e+02 2.636e+03, threshold=1.445e+03, percent-clipped=3.0 +2023-04-01 17:31:17,207 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-01 17:31:27,984 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:31:35,313 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 17:31:43,144 INFO [train.py:903] (3/4) Epoch 10, batch 4350, loss[loss=0.2692, simple_loss=0.3418, pruned_loss=0.09831, over 18750.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3114, pruned_loss=0.08423, over 3829341.40 frames. ], batch size: 74, lr: 8.29e-03, grad_scale: 8.0 +2023-04-01 17:31:58,661 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:32:33,598 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-01 17:32:44,687 INFO [train.py:903] (3/4) Epoch 10, batch 4400, loss[loss=0.2553, simple_loss=0.322, pruned_loss=0.09426, over 19842.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3119, pruned_loss=0.08422, over 3834043.56 frames. ], batch size: 52, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:32:50,612 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65856.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:33:00,116 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.748e+02 7.554e+02 9.760e+02 1.805e+03, threshold=1.511e+03, percent-clipped=4.0 +2023-04-01 17:33:10,942 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 17:33:19,854 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 17:33:36,075 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65894.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:33:44,057 INFO [train.py:903] (3/4) Epoch 10, batch 4450, loss[loss=0.1955, simple_loss=0.2689, pruned_loss=0.06106, over 19743.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3127, pruned_loss=0.08467, over 3837722.93 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:34:45,020 INFO [train.py:903] (3/4) Epoch 10, batch 4500, loss[loss=0.2002, simple_loss=0.2713, pruned_loss=0.06453, over 19759.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3127, pruned_loss=0.08473, over 3823113.18 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 8.0 +2023-04-01 17:35:01,306 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.376e+02 6.626e+02 8.325e+02 1.832e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-04-01 17:35:47,213 INFO [train.py:903] (3/4) Epoch 10, batch 4550, loss[loss=0.1899, simple_loss=0.2699, pruned_loss=0.05492, over 19747.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.311, pruned_loss=0.08373, over 3811128.24 frames. ], batch size: 45, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:35:56,025 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 17:35:56,362 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66009.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:36:03,141 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 17:36:07,021 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:17,581 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 17:36:36,238 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66043.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:36:45,870 INFO [train.py:903] (3/4) Epoch 10, batch 4600, loss[loss=0.3028, simple_loss=0.3699, pruned_loss=0.1178, over 19603.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3124, pruned_loss=0.08447, over 3809744.13 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:36:55,845 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8562, 1.6503, 1.6282, 2.0356, 1.5730, 2.2896, 2.0683, 2.0015], + device='cuda:3'), covar=tensor([0.0704, 0.0847, 0.0923, 0.0820, 0.0880, 0.0592, 0.0821, 0.0562], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0227, 0.0224, 0.0253, 0.0242, 0.0216, 0.0201, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 17:37:00,875 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 5.689e+02 6.858e+02 9.462e+02 1.667e+03, threshold=1.372e+03, percent-clipped=8.0 +2023-04-01 17:37:43,790 INFO [train.py:903] (3/4) Epoch 10, batch 4650, loss[loss=0.2648, simple_loss=0.3315, pruned_loss=0.09912, over 17441.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3132, pruned_loss=0.08483, over 3806607.60 frames. ], batch size: 101, lr: 8.27e-03, grad_scale: 8.0 +2023-04-01 17:38:00,794 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 17:38:10,785 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 17:38:44,405 INFO [train.py:903] (3/4) Epoch 10, batch 4700, loss[loss=0.2415, simple_loss=0.3142, pruned_loss=0.08441, over 19749.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3126, pruned_loss=0.08463, over 3804424.27 frames. ], batch size: 51, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:38:57,803 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:39:00,970 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.007e+02 6.063e+02 7.892e+02 1.039e+03 2.104e+03, threshold=1.578e+03, percent-clipped=6.0 +2023-04-01 17:39:05,610 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 17:39:17,525 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66178.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:39:43,296 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66200.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:39:44,102 INFO [train.py:903] (3/4) Epoch 10, batch 4750, loss[loss=0.2194, simple_loss=0.2865, pruned_loss=0.07614, over 19389.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3133, pruned_loss=0.08542, over 3805098.16 frames. ], batch size: 47, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:39:50,415 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2834, 1.4537, 1.8007, 1.5342, 2.6915, 2.2498, 2.8980, 1.1117], + device='cuda:3'), covar=tensor([0.2046, 0.3500, 0.2108, 0.1623, 0.1310, 0.1705, 0.1365, 0.3471], + device='cuda:3'), in_proj_covar=tensor([0.0478, 0.0563, 0.0586, 0.0430, 0.0587, 0.0486, 0.0646, 0.0483], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:39:53,516 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0995, 5.3824, 2.9395, 4.7142, 1.1171, 5.3476, 5.4278, 5.5264], + device='cuda:3'), covar=tensor([0.0358, 0.0836, 0.1727, 0.0609, 0.3713, 0.0571, 0.0555, 0.0818], + device='cuda:3'), in_proj_covar=tensor([0.0421, 0.0358, 0.0422, 0.0314, 0.0372, 0.0352, 0.0347, 0.0380], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 17:40:43,461 INFO [train.py:903] (3/4) Epoch 10, batch 4800, loss[loss=0.2871, simple_loss=0.3539, pruned_loss=0.1102, over 18855.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3125, pruned_loss=0.08422, over 3822311.41 frames. ], batch size: 74, lr: 8.26e-03, grad_scale: 8.0 +2023-04-01 17:41:01,217 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.914e+02 7.080e+02 8.206e+02 1.527e+03, threshold=1.416e+03, percent-clipped=0.0 +2023-04-01 17:41:01,675 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66265.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:41:15,556 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66276.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:41:32,185 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66290.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:41:43,187 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 17:41:44,778 INFO [train.py:903] (3/4) Epoch 10, batch 4850, loss[loss=0.2294, simple_loss=0.3065, pruned_loss=0.07614, over 19529.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.312, pruned_loss=0.08395, over 3823173.54 frames. ], batch size: 56, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:42:02,638 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66315.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:42:10,354 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 17:42:30,026 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 17:42:35,766 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 17:42:35,792 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 17:42:45,285 INFO [train.py:903] (3/4) Epoch 10, batch 4900, loss[loss=0.2079, simple_loss=0.2798, pruned_loss=0.06795, over 19751.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3111, pruned_loss=0.08344, over 3830335.53 frames. ], batch size: 46, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:42:45,302 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 17:43:01,824 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.967e+02 7.269e+02 9.008e+02 2.184e+03, threshold=1.454e+03, percent-clipped=11.0 +2023-04-01 17:43:03,041 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 17:43:22,101 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5230, 2.1640, 1.6632, 1.5951, 2.0820, 1.2269, 1.4749, 1.8260], + device='cuda:3'), covar=tensor([0.0751, 0.0573, 0.0752, 0.0498, 0.0424, 0.0942, 0.0517, 0.0348], + device='cuda:3'), in_proj_covar=tensor([0.0284, 0.0294, 0.0321, 0.0240, 0.0233, 0.0314, 0.0285, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 17:43:45,943 INFO [train.py:903] (3/4) Epoch 10, batch 4950, loss[loss=0.2522, simple_loss=0.3251, pruned_loss=0.08963, over 19671.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3116, pruned_loss=0.08337, over 3815594.14 frames. ], batch size: 58, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:02,373 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 17:44:05,931 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9088, 2.0002, 2.1652, 2.7383, 1.8038, 2.6271, 2.4727, 2.0205], + device='cuda:3'), covar=tensor([0.3440, 0.2957, 0.1408, 0.1659, 0.3396, 0.1387, 0.3218, 0.2649], + device='cuda:3'), in_proj_covar=tensor([0.0771, 0.0786, 0.0637, 0.0884, 0.0763, 0.0688, 0.0777, 0.0698], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:44:26,188 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 17:44:44,954 INFO [train.py:903] (3/4) Epoch 10, batch 5000, loss[loss=0.2901, simple_loss=0.3571, pruned_loss=0.1115, over 19466.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3125, pruned_loss=0.08418, over 3816389.25 frames. ], batch size: 64, lr: 8.25e-03, grad_scale: 8.0 +2023-04-01 17:44:56,400 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 17:45:01,937 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.419e+02 6.464e+02 8.305e+02 1.628e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 17:45:06,368 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 17:45:45,182 INFO [train.py:903] (3/4) Epoch 10, batch 5050, loss[loss=0.2215, simple_loss=0.2888, pruned_loss=0.07712, over 19405.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3125, pruned_loss=0.08431, over 3820985.12 frames. ], batch size: 47, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:45:51,100 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:46:10,887 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66522.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:46:19,336 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 17:46:31,393 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66539.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:46:44,533 INFO [train.py:903] (3/4) Epoch 10, batch 5100, loss[loss=0.2, simple_loss=0.2662, pruned_loss=0.06691, over 19730.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3125, pruned_loss=0.08437, over 3827858.37 frames. ], batch size: 46, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:46:57,232 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 17:46:59,522 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 17:47:01,862 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.567e+02 5.864e+02 7.142e+02 9.030e+02 2.803e+03, threshold=1.428e+03, percent-clipped=6.0 +2023-04-01 17:47:04,021 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 17:47:09,004 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66571.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:47:21,909 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9278, 1.9607, 2.0463, 2.7565, 1.8399, 2.5227, 2.4813, 2.0201], + device='cuda:3'), covar=tensor([0.3135, 0.2481, 0.1345, 0.1616, 0.2933, 0.1258, 0.2912, 0.2395], + device='cuda:3'), in_proj_covar=tensor([0.0776, 0.0787, 0.0641, 0.0887, 0.0767, 0.0691, 0.0778, 0.0702], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:47:26,367 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 17:47:39,566 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66596.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:47:45,735 INFO [train.py:903] (3/4) Epoch 10, batch 5150, loss[loss=0.2489, simple_loss=0.3217, pruned_loss=0.08807, over 19515.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.313, pruned_loss=0.08441, over 3827942.63 frames. ], batch size: 64, lr: 8.24e-03, grad_scale: 8.0 +2023-04-01 17:47:56,913 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 17:48:09,060 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66620.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:10,434 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66621.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:48:29,350 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66637.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:48:33,113 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 17:48:45,642 INFO [train.py:903] (3/4) Epoch 10, batch 5200, loss[loss=0.2659, simple_loss=0.3337, pruned_loss=0.09901, over 18451.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3121, pruned_loss=0.0841, over 3824336.81 frames. ], batch size: 84, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:49:00,693 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 17:49:02,887 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.107e+02 5.688e+02 7.032e+02 8.430e+02 1.656e+03, threshold=1.406e+03, percent-clipped=2.0 +2023-04-01 17:49:44,615 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 17:49:46,898 INFO [train.py:903] (3/4) Epoch 10, batch 5250, loss[loss=0.2056, simple_loss=0.2833, pruned_loss=0.06393, over 19473.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3122, pruned_loss=0.08456, over 3827508.41 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:50:28,115 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66735.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:50:45,010 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66749.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:50:47,102 INFO [train.py:903] (3/4) Epoch 10, batch 5300, loss[loss=0.2452, simple_loss=0.3248, pruned_loss=0.08274, over 19526.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3134, pruned_loss=0.08528, over 3830063.35 frames. ], batch size: 54, lr: 8.23e-03, grad_scale: 8.0 +2023-04-01 17:51:03,224 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 17:51:04,359 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.985e+02 7.858e+02 1.069e+03 1.957e+03, threshold=1.572e+03, percent-clipped=7.0 +2023-04-01 17:51:37,422 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4763, 1.1560, 1.4880, 1.2830, 2.6778, 3.6430, 3.3479, 3.8593], + device='cuda:3'), covar=tensor([0.1543, 0.4532, 0.4043, 0.2204, 0.0562, 0.0188, 0.0264, 0.0201], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0290, 0.0317, 0.0246, 0.0209, 0.0150, 0.0202, 0.0193], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:51:47,695 INFO [train.py:903] (3/4) Epoch 10, batch 5350, loss[loss=0.2683, simple_loss=0.3351, pruned_loss=0.1008, over 19361.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3136, pruned_loss=0.08533, over 3822725.62 frames. ], batch size: 70, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:52:19,370 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 17:52:46,938 INFO [train.py:903] (3/4) Epoch 10, batch 5400, loss[loss=0.226, simple_loss=0.2927, pruned_loss=0.07965, over 19415.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3123, pruned_loss=0.08444, over 3821649.92 frames. ], batch size: 48, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:05,681 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.633e+02 6.871e+02 8.512e+02 1.525e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-04-01 17:53:18,042 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66877.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:53:26,402 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66883.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:53:37,747 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66893.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:53:47,838 INFO [train.py:903] (3/4) Epoch 10, batch 5450, loss[loss=0.2667, simple_loss=0.3328, pruned_loss=0.1003, over 13266.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3133, pruned_loss=0.08537, over 3815621.90 frames. ], batch size: 135, lr: 8.22e-03, grad_scale: 8.0 +2023-04-01 17:53:49,487 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66902.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:54:08,173 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66918.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:54:47,612 INFO [train.py:903] (3/4) Epoch 10, batch 5500, loss[loss=0.2758, simple_loss=0.3396, pruned_loss=0.106, over 19544.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3128, pruned_loss=0.08527, over 3814100.40 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:06,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.851e+02 7.428e+02 9.674e+02 2.066e+03, threshold=1.486e+03, percent-clipped=6.0 +2023-04-01 17:55:10,681 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 17:55:16,769 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66975.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:36,728 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66991.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:55:44,724 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66998.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 17:55:48,016 INFO [train.py:903] (3/4) Epoch 10, batch 5550, loss[loss=0.2108, simple_loss=0.294, pruned_loss=0.0638, over 19782.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3121, pruned_loss=0.08475, over 3812744.08 frames. ], batch size: 54, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:55:54,311 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 17:56:06,556 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:56:20,719 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0973, 2.0569, 1.7437, 1.6076, 1.2653, 1.5274, 0.4906, 1.0253], + device='cuda:3'), covar=tensor([0.0680, 0.0594, 0.0455, 0.0672, 0.1298, 0.0845, 0.1064, 0.1008], + device='cuda:3'), in_proj_covar=tensor([0.0330, 0.0328, 0.0329, 0.0349, 0.0418, 0.0348, 0.0305, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:56:42,313 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 17:56:47,796 INFO [train.py:903] (3/4) Epoch 10, batch 5600, loss[loss=0.2417, simple_loss=0.317, pruned_loss=0.08317, over 19790.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3116, pruned_loss=0.08458, over 3816505.80 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:06,482 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 5.918e+02 7.804e+02 1.015e+03 2.269e+03, threshold=1.561e+03, percent-clipped=7.0 +2023-04-01 17:57:38,372 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67093.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:57:47,808 INFO [train.py:903] (3/4) Epoch 10, batch 5650, loss[loss=0.2452, simple_loss=0.3191, pruned_loss=0.08567, over 19787.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3114, pruned_loss=0.0846, over 3808301.53 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-04-01 17:57:48,108 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 17:58:33,180 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 17:58:47,423 INFO [train.py:903] (3/4) Epoch 10, batch 5700, loss[loss=0.2632, simple_loss=0.3343, pruned_loss=0.09603, over 19299.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3117, pruned_loss=0.08483, over 3810396.53 frames. ], batch size: 66, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:05,253 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 6.145e+02 7.592e+02 1.064e+03 2.520e+03, threshold=1.518e+03, percent-clipped=7.0 +2023-04-01 17:59:06,732 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3487, 1.2143, 1.6414, 1.3064, 2.8882, 3.8086, 3.5137, 3.9370], + device='cuda:3'), covar=tensor([0.1393, 0.3278, 0.2828, 0.1915, 0.0413, 0.0143, 0.0177, 0.0180], + device='cuda:3'), in_proj_covar=tensor([0.0251, 0.0290, 0.0313, 0.0245, 0.0206, 0.0150, 0.0202, 0.0193], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 17:59:47,166 INFO [train.py:903] (3/4) Epoch 10, batch 5750, loss[loss=0.3161, simple_loss=0.3649, pruned_loss=0.1337, over 13235.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3116, pruned_loss=0.0847, over 3809264.69 frames. ], batch size: 136, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 17:59:48,358 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 17:59:48,859 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8118, 1.8682, 2.0120, 2.6075, 1.8493, 2.4575, 2.3441, 1.9208], + device='cuda:3'), covar=tensor([0.3471, 0.2872, 0.1382, 0.1603, 0.3007, 0.1338, 0.3283, 0.2555], + device='cuda:3'), in_proj_covar=tensor([0.0773, 0.0785, 0.0639, 0.0885, 0.0762, 0.0690, 0.0773, 0.0697], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 17:59:57,164 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8004, 1.2965, 1.3812, 1.7232, 3.3825, 1.0213, 2.3102, 3.6022], + device='cuda:3'), covar=tensor([0.0406, 0.2680, 0.2926, 0.1656, 0.0700, 0.2628, 0.1265, 0.0302], + device='cuda:3'), in_proj_covar=tensor([0.0348, 0.0334, 0.0348, 0.0315, 0.0344, 0.0329, 0.0320, 0.0343], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 17:59:57,209 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67208.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 17:59:57,978 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 18:00:02,613 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 18:00:47,882 INFO [train.py:903] (3/4) Epoch 10, batch 5800, loss[loss=0.1823, simple_loss=0.2615, pruned_loss=0.05159, over 19602.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3114, pruned_loss=0.08457, over 3812113.53 frames. ], batch size: 50, lr: 8.20e-03, grad_scale: 8.0 +2023-04-01 18:00:51,541 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67254.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:01:06,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.662e+02 5.628e+02 6.923e+02 8.923e+02 2.275e+03, threshold=1.385e+03, percent-clipped=6.0 +2023-04-01 18:01:20,739 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67279.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:01:47,314 INFO [train.py:903] (3/4) Epoch 10, batch 5850, loss[loss=0.2581, simple_loss=0.3246, pruned_loss=0.09582, over 19773.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3115, pruned_loss=0.08516, over 3805738.74 frames. ], batch size: 56, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:01:50,841 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6346, 1.3038, 1.3702, 2.0140, 1.6071, 2.0246, 2.0009, 1.7870], + device='cuda:3'), covar=tensor([0.0762, 0.0981, 0.1045, 0.0839, 0.0805, 0.0647, 0.0808, 0.0655], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0226, 0.0225, 0.0251, 0.0238, 0.0214, 0.0198, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 18:02:09,370 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:02:10,106 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.92 vs. limit=5.0 +2023-04-01 18:02:48,429 INFO [train.py:903] (3/4) Epoch 10, batch 5900, loss[loss=0.2272, simple_loss=0.3073, pruned_loss=0.07359, over 19531.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3121, pruned_loss=0.08502, over 3807952.61 frames. ], batch size: 54, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:02:52,959 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 18:03:05,151 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.688e+02 6.588e+02 8.588e+02 1.646e+03, threshold=1.318e+03, percent-clipped=2.0 +2023-04-01 18:03:11,567 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 18:03:47,174 INFO [train.py:903] (3/4) Epoch 10, batch 5950, loss[loss=0.2424, simple_loss=0.3211, pruned_loss=0.08179, over 18640.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3118, pruned_loss=0.0846, over 3814740.35 frames. ], batch size: 74, lr: 8.19e-03, grad_scale: 8.0 +2023-04-01 18:04:02,524 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2402, 1.2706, 1.4386, 1.3231, 2.7647, 0.9943, 2.1188, 3.0752], + device='cuda:3'), covar=tensor([0.0581, 0.2616, 0.2671, 0.1954, 0.0874, 0.2542, 0.1272, 0.0377], + device='cuda:3'), in_proj_covar=tensor([0.0350, 0.0333, 0.0350, 0.0316, 0.0345, 0.0331, 0.0324, 0.0345], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:04:26,278 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67434.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:38,311 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67445.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:04:45,818 INFO [train.py:903] (3/4) Epoch 10, batch 6000, loss[loss=0.2106, simple_loss=0.2794, pruned_loss=0.07093, over 19772.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3102, pruned_loss=0.08379, over 3828286.99 frames. ], batch size: 47, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:04:45,818 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 18:04:58,247 INFO [train.py:937] (3/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2805, pruned_loss=0.03952, over 944034.00 frames. +2023-04-01 18:04:58,248 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 18:05:15,107 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67464.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:05:17,959 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 5.414e+02 6.867e+02 8.657e+02 1.897e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 18:05:44,654 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67489.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:05:59,558 INFO [train.py:903] (3/4) Epoch 10, batch 6050, loss[loss=0.2328, simple_loss=0.315, pruned_loss=0.0753, over 19545.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3109, pruned_loss=0.08445, over 3832445.15 frames. ], batch size: 56, lr: 8.18e-03, grad_scale: 8.0 +2023-04-01 18:06:35,116 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:06:59,164 INFO [train.py:903] (3/4) Epoch 10, batch 6100, loss[loss=0.2497, simple_loss=0.3248, pruned_loss=0.08731, over 19785.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3119, pruned_loss=0.08508, over 3804006.84 frames. ], batch size: 56, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:07:10,573 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67560.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:07:19,955 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 5.925e+02 6.739e+02 8.410e+02 2.337e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-01 18:07:59,366 INFO [train.py:903] (3/4) Epoch 10, batch 6150, loss[loss=0.2166, simple_loss=0.2811, pruned_loss=0.07602, over 19762.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.311, pruned_loss=0.08481, over 3803528.95 frames. ], batch size: 46, lr: 8.18e-03, grad_scale: 4.0 +2023-04-01 18:08:28,207 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 18:08:33,638 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 18:08:36,627 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0293, 4.4478, 4.7645, 4.7276, 1.6000, 4.3901, 3.8424, 4.3737], + device='cuda:3'), covar=tensor([0.1222, 0.0687, 0.0496, 0.0485, 0.5297, 0.0635, 0.0564, 0.0983], + device='cuda:3'), in_proj_covar=tensor([0.0650, 0.0583, 0.0767, 0.0649, 0.0708, 0.0531, 0.0474, 0.0708], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 18:08:54,458 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5556, 3.0818, 2.0566, 2.0589, 2.1759, 2.5027, 1.0102, 2.2084], + device='cuda:3'), covar=tensor([0.0378, 0.0440, 0.0535, 0.0829, 0.0739, 0.0805, 0.0947, 0.0771], + device='cuda:3'), in_proj_covar=tensor([0.0331, 0.0329, 0.0326, 0.0348, 0.0420, 0.0346, 0.0306, 0.0321], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 18:08:59,752 INFO [train.py:903] (3/4) Epoch 10, batch 6200, loss[loss=0.2004, simple_loss=0.2803, pruned_loss=0.0603, over 19682.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.3094, pruned_loss=0.08375, over 3806256.14 frames. ], batch size: 53, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:09:20,096 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.059e+02 6.776e+02 9.553e+02 2.024e+03, threshold=1.355e+03, percent-clipped=7.0 +2023-04-01 18:09:46,814 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67690.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:50,146 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3033, 1.5874, 2.0437, 1.5673, 3.1113, 2.5257, 3.4884, 1.4576], + device='cuda:3'), covar=tensor([0.2256, 0.3750, 0.2208, 0.1738, 0.1435, 0.1845, 0.1477, 0.3604], + device='cuda:3'), in_proj_covar=tensor([0.0480, 0.0566, 0.0588, 0.0431, 0.0589, 0.0486, 0.0645, 0.0487], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 18:09:56,468 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:09:59,568 INFO [train.py:903] (3/4) Epoch 10, batch 6250, loss[loss=0.2457, simple_loss=0.3129, pruned_loss=0.08922, over 19470.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3103, pruned_loss=0.08403, over 3810383.29 frames. ], batch size: 64, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:10:09,918 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67710.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:15,785 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67715.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:10:30,842 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 18:10:59,326 INFO [train.py:903] (3/4) Epoch 10, batch 6300, loss[loss=0.2972, simple_loss=0.3502, pruned_loss=0.1221, over 13386.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3101, pruned_loss=0.08358, over 3816907.00 frames. ], batch size: 135, lr: 8.17e-03, grad_scale: 4.0 +2023-04-01 18:11:19,321 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 5.371e+02 6.798e+02 8.698e+02 1.915e+03, threshold=1.360e+03, percent-clipped=7.0 +2023-04-01 18:11:58,678 INFO [train.py:903] (3/4) Epoch 10, batch 6350, loss[loss=0.2733, simple_loss=0.346, pruned_loss=0.1003, over 19763.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3108, pruned_loss=0.08379, over 3824066.18 frames. ], batch size: 54, lr: 8.16e-03, grad_scale: 4.0 +2023-04-01 18:12:15,097 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-01 18:12:16,884 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67816.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:47,264 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:12:58,576 INFO [train.py:903] (3/4) Epoch 10, batch 6400, loss[loss=0.2284, simple_loss=0.2866, pruned_loss=0.08508, over 19760.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3117, pruned_loss=0.08384, over 3829463.17 frames. ], batch size: 47, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:13:07,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5707, 1.6570, 1.7803, 2.0858, 1.4221, 1.8029, 2.0049, 1.7001], + device='cuda:3'), covar=tensor([0.3206, 0.2621, 0.1405, 0.1564, 0.2860, 0.1385, 0.3301, 0.2411], + device='cuda:3'), in_proj_covar=tensor([0.0775, 0.0790, 0.0641, 0.0884, 0.0768, 0.0690, 0.0779, 0.0700], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 18:13:08,903 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.46 vs. limit=5.0 +2023-04-01 18:13:18,757 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.283e+02 6.601e+02 9.298e+02 1.582e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-01 18:13:26,981 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67875.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:13:59,328 INFO [train.py:903] (3/4) Epoch 10, batch 6450, loss[loss=0.2831, simple_loss=0.3435, pruned_loss=0.1114, over 19573.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3111, pruned_loss=0.08318, over 3827391.54 frames. ], batch size: 61, lr: 8.16e-03, grad_scale: 8.0 +2023-04-01 18:14:12,165 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8547, 1.4506, 1.7985, 1.7020, 4.2429, 1.0818, 2.2173, 4.4128], + device='cuda:3'), covar=tensor([0.0364, 0.2879, 0.2565, 0.1826, 0.0774, 0.2757, 0.1486, 0.0249], + device='cuda:3'), in_proj_covar=tensor([0.0349, 0.0333, 0.0346, 0.0313, 0.0342, 0.0329, 0.0323, 0.0344], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:14:42,675 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 18:14:59,310 INFO [train.py:903] (3/4) Epoch 10, batch 6500, loss[loss=0.229, simple_loss=0.3111, pruned_loss=0.07347, over 19673.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3103, pruned_loss=0.08267, over 3826539.54 frames. ], batch size: 59, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:15:05,079 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 18:15:19,446 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.752e+02 5.758e+02 7.165e+02 9.309e+02 2.290e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 18:15:29,997 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7104, 1.5259, 1.4633, 2.1770, 1.5540, 1.9822, 2.0767, 1.9050], + device='cuda:3'), covar=tensor([0.0762, 0.0913, 0.1018, 0.0717, 0.0887, 0.0674, 0.0783, 0.0613], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0228, 0.0226, 0.0253, 0.0238, 0.0214, 0.0198, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 18:15:47,574 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:15:58,447 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67999.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:16:01,748 INFO [train.py:903] (3/4) Epoch 10, batch 6550, loss[loss=0.2568, simple_loss=0.3284, pruned_loss=0.09259, over 18277.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.311, pruned_loss=0.08315, over 3826542.71 frames. ], batch size: 84, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:16:03,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7422, 1.5831, 1.3578, 1.7636, 1.5787, 1.3322, 1.3025, 1.6456], + device='cuda:3'), covar=tensor([0.1017, 0.1413, 0.1557, 0.0989, 0.1258, 0.0773, 0.1490, 0.0773], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0355, 0.0293, 0.0242, 0.0297, 0.0245, 0.0279, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:16:51,313 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:16:51,812 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 18:17:02,074 INFO [train.py:903] (3/4) Epoch 10, batch 6600, loss[loss=0.2319, simple_loss=0.3127, pruned_loss=0.07554, over 19541.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3107, pruned_loss=0.0829, over 3818701.78 frames. ], batch size: 54, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:17:05,670 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68054.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:17:23,324 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.670e+02 5.913e+02 7.324e+02 8.709e+02 1.479e+03, threshold=1.465e+03, percent-clipped=1.0 +2023-04-01 18:17:46,987 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2845, 1.2666, 1.4144, 1.3710, 1.7812, 1.8619, 1.8961, 0.6165], + device='cuda:3'), covar=tensor([0.2076, 0.3700, 0.2179, 0.1684, 0.1330, 0.1900, 0.1114, 0.3624], + device='cuda:3'), in_proj_covar=tensor([0.0483, 0.0569, 0.0590, 0.0432, 0.0590, 0.0488, 0.0646, 0.0488], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 18:18:02,823 INFO [train.py:903] (3/4) Epoch 10, batch 6650, loss[loss=0.2434, simple_loss=0.3224, pruned_loss=0.08219, over 19684.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.3105, pruned_loss=0.08308, over 3813939.42 frames. ], batch size: 59, lr: 8.15e-03, grad_scale: 8.0 +2023-04-01 18:18:07,759 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68105.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:19:02,660 INFO [train.py:903] (3/4) Epoch 10, batch 6700, loss[loss=0.2263, simple_loss=0.3082, pruned_loss=0.07218, over 19660.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3102, pruned_loss=0.08308, over 3820506.68 frames. ], batch size: 58, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:19:09,874 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:19:22,651 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.762e+02 5.919e+02 7.224e+02 9.287e+02 2.274e+03, threshold=1.445e+03, percent-clipped=4.0 +2023-04-01 18:19:24,180 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:00,034 INFO [train.py:903] (3/4) Epoch 10, batch 6750, loss[loss=0.2008, simple_loss=0.2733, pruned_loss=0.06415, over 19062.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3096, pruned_loss=0.08285, over 3820499.66 frames. ], batch size: 42, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:20:07,079 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-01 18:20:17,458 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3328, 3.9335, 2.5292, 3.4760, 1.1964, 3.6293, 3.7451, 3.7729], + device='cuda:3'), covar=tensor([0.0658, 0.0997, 0.1857, 0.0815, 0.3415, 0.0855, 0.0775, 0.0963], + device='cuda:3'), in_proj_covar=tensor([0.0422, 0.0361, 0.0425, 0.0316, 0.0376, 0.0357, 0.0348, 0.0379], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 18:20:51,854 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:20:56,748 INFO [train.py:903] (3/4) Epoch 10, batch 6800, loss[loss=0.1819, simple_loss=0.2567, pruned_loss=0.05355, over 19765.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3098, pruned_loss=0.08301, over 3822664.87 frames. ], batch size: 47, lr: 8.14e-03, grad_scale: 8.0 +2023-04-01 18:21:15,043 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.210e+02 6.150e+02 7.610e+02 9.082e+02 1.904e+03, threshold=1.522e+03, percent-clipped=4.0 +2023-04-01 18:21:18,283 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:21:41,179 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 18:21:41,644 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 18:21:44,653 INFO [train.py:903] (3/4) Epoch 11, batch 0, loss[loss=0.1989, simple_loss=0.2841, pruned_loss=0.05682, over 19666.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2841, pruned_loss=0.05682, over 19666.00 frames. ], batch size: 55, lr: 7.77e-03, grad_scale: 8.0 +2023-04-01 18:21:44,654 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 18:21:56,757 INFO [train.py:937] (3/4) Epoch 11, validation: loss=0.181, simple_loss=0.2818, pruned_loss=0.04012, over 944034.00 frames. +2023-04-01 18:21:56,758 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 18:22:09,357 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 18:22:22,222 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-01 18:22:36,682 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8389, 1.6550, 1.5265, 1.8193, 1.7013, 1.6731, 1.4961, 1.7835], + device='cuda:3'), covar=tensor([0.0916, 0.1416, 0.1333, 0.0973, 0.1156, 0.0520, 0.1188, 0.0684], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0355, 0.0290, 0.0240, 0.0297, 0.0246, 0.0277, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:22:57,941 INFO [train.py:903] (3/4) Epoch 11, batch 50, loss[loss=0.1856, simple_loss=0.2684, pruned_loss=0.05139, over 19774.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3086, pruned_loss=0.08103, over 861265.24 frames. ], batch size: 46, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:23:15,262 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:23:35,567 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 18:23:46,885 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.915e+02 5.747e+02 7.027e+02 9.557e+02 1.564e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 18:24:00,207 INFO [train.py:903] (3/4) Epoch 11, batch 100, loss[loss=0.1995, simple_loss=0.2703, pruned_loss=0.06433, over 19818.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3037, pruned_loss=0.07843, over 1522644.01 frames. ], batch size: 49, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:24:13,685 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 18:24:32,241 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5634, 1.6318, 1.8319, 2.1054, 1.3490, 1.8169, 2.0231, 1.7184], + device='cuda:3'), covar=tensor([0.3218, 0.2704, 0.1421, 0.1600, 0.2914, 0.1476, 0.3505, 0.2569], + device='cuda:3'), in_proj_covar=tensor([0.0773, 0.0789, 0.0641, 0.0880, 0.0771, 0.0695, 0.0779, 0.0696], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 18:24:42,555 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68413.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:47,972 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:24:57,054 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68425.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:01,044 INFO [train.py:903] (3/4) Epoch 11, batch 150, loss[loss=0.2297, simple_loss=0.3009, pruned_loss=0.07928, over 19610.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3082, pruned_loss=0.08145, over 2048022.82 frames. ], batch size: 50, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:25:11,956 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68438.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:25,314 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68449.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:25:26,576 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68450.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:36,631 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68458.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:25:47,780 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.281e+02 6.719e+02 8.986e+02 1.619e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-01 18:26:00,681 INFO [train.py:903] (3/4) Epoch 11, batch 200, loss[loss=0.2501, simple_loss=0.3221, pruned_loss=0.089, over 19734.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3104, pruned_loss=0.08302, over 2438066.85 frames. ], batch size: 63, lr: 7.76e-03, grad_scale: 8.0 +2023-04-01 18:26:02,034 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 18:27:03,595 INFO [train.py:903] (3/4) Epoch 11, batch 250, loss[loss=0.1965, simple_loss=0.2768, pruned_loss=0.05806, over 19618.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3099, pruned_loss=0.08306, over 2725421.42 frames. ], batch size: 50, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:27:46,871 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68564.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:27:50,251 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4162, 1.5388, 2.0172, 1.9042, 3.1228, 4.0377, 3.9961, 4.3588], + device='cuda:3'), covar=tensor([0.1571, 0.3231, 0.2840, 0.1773, 0.0552, 0.0260, 0.0174, 0.0171], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0295, 0.0321, 0.0251, 0.0212, 0.0154, 0.0204, 0.0193], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 18:27:51,031 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.001e+02 5.512e+02 6.613e+02 8.406e+02 1.798e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 18:28:07,068 INFO [train.py:903] (3/4) Epoch 11, batch 300, loss[loss=0.2384, simple_loss=0.3137, pruned_loss=0.08156, over 19522.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3099, pruned_loss=0.08293, over 2974967.78 frames. ], batch size: 56, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:28:27,756 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68596.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:28:30,584 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 18:29:09,811 INFO [train.py:903] (3/4) Epoch 11, batch 350, loss[loss=0.181, simple_loss=0.2616, pruned_loss=0.05024, over 19754.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3092, pruned_loss=0.08239, over 3169358.91 frames. ], batch size: 46, lr: 7.75e-03, grad_scale: 8.0 +2023-04-01 18:29:16,649 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 18:29:40,788 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3567, 2.1466, 1.6518, 1.4512, 2.0644, 1.1765, 1.2390, 1.7515], + device='cuda:3'), covar=tensor([0.0919, 0.0644, 0.0870, 0.0642, 0.0425, 0.1028, 0.0662, 0.0417], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0294, 0.0325, 0.0243, 0.0232, 0.0316, 0.0285, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:29:57,402 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.409e+02 6.235e+02 7.842e+02 1.948e+03, threshold=1.247e+03, percent-clipped=7.0 +2023-04-01 18:30:09,872 INFO [train.py:903] (3/4) Epoch 11, batch 400, loss[loss=0.2008, simple_loss=0.2754, pruned_loss=0.06308, over 19398.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3098, pruned_loss=0.08302, over 3302588.91 frames. ], batch size: 47, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:30:10,350 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8706, 1.3179, 1.0462, 0.9912, 1.1604, 0.9542, 0.9854, 1.1982], + device='cuda:3'), covar=tensor([0.0565, 0.0785, 0.1074, 0.0559, 0.0493, 0.1183, 0.0508, 0.0430], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0294, 0.0326, 0.0244, 0.0233, 0.0317, 0.0286, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:30:40,470 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68703.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:30:54,278 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68714.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:11,172 INFO [train.py:903] (3/4) Epoch 11, batch 450, loss[loss=0.2337, simple_loss=0.3134, pruned_loss=0.07698, over 19600.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3075, pruned_loss=0.08198, over 3426568.44 frames. ], batch size: 61, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:31:25,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68739.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:49,557 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 18:31:50,646 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 18:31:51,685 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:31:59,523 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.180e+02 6.491e+02 8.751e+02 1.660e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-01 18:32:13,193 INFO [train.py:903] (3/4) Epoch 11, batch 500, loss[loss=0.1843, simple_loss=0.2627, pruned_loss=0.05295, over 19594.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3079, pruned_loss=0.08183, over 3516610.95 frames. ], batch size: 50, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:32:31,353 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4604, 1.5329, 1.8877, 1.6679, 2.7214, 2.1560, 2.8176, 1.4385], + device='cuda:3'), covar=tensor([0.1935, 0.3282, 0.1973, 0.1596, 0.1298, 0.1765, 0.1355, 0.3255], + device='cuda:3'), in_proj_covar=tensor([0.0479, 0.0565, 0.0588, 0.0432, 0.0585, 0.0485, 0.0644, 0.0485], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 18:33:02,552 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.89 vs. limit=5.0 +2023-04-01 18:33:05,570 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68820.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:33:11,248 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5473, 1.4487, 1.4118, 1.5821, 3.0729, 1.1109, 2.0452, 3.3718], + device='cuda:3'), covar=tensor([0.0484, 0.2450, 0.2651, 0.1676, 0.0767, 0.2418, 0.1379, 0.0304], + device='cuda:3'), in_proj_covar=tensor([0.0354, 0.0336, 0.0351, 0.0319, 0.0347, 0.0335, 0.0331, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:33:17,990 INFO [train.py:903] (3/4) Epoch 11, batch 550, loss[loss=0.2259, simple_loss=0.3014, pruned_loss=0.07515, over 19832.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3091, pruned_loss=0.08264, over 3580287.89 frames. ], batch size: 52, lr: 7.74e-03, grad_scale: 8.0 +2023-04-01 18:33:36,713 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68845.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:34:06,690 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.206e+02 6.523e+02 8.559e+02 1.532e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 18:34:18,091 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68876.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:34:21,109 INFO [train.py:903] (3/4) Epoch 11, batch 600, loss[loss=0.287, simple_loss=0.3358, pruned_loss=0.1191, over 13759.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3079, pruned_loss=0.08152, over 3632801.55 frames. ], batch size: 135, lr: 7.73e-03, grad_scale: 8.0 +2023-04-01 18:35:07,353 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 18:35:23,584 INFO [train.py:903] (3/4) Epoch 11, batch 650, loss[loss=0.1883, simple_loss=0.2662, pruned_loss=0.05513, over 19059.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3085, pruned_loss=0.08185, over 3687390.04 frames. ], batch size: 42, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:35:36,884 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:36:11,531 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0136, 1.7819, 2.1730, 1.8966, 4.3710, 1.1738, 2.5735, 4.7730], + device='cuda:3'), covar=tensor([0.0370, 0.2700, 0.2460, 0.1693, 0.0777, 0.2644, 0.1186, 0.0192], + device='cuda:3'), in_proj_covar=tensor([0.0353, 0.0336, 0.0351, 0.0318, 0.0346, 0.0332, 0.0330, 0.0349], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:36:14,706 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.863e+02 5.561e+02 7.157e+02 8.945e+02 2.089e+03, threshold=1.431e+03, percent-clipped=8.0 +2023-04-01 18:36:26,509 INFO [train.py:903] (3/4) Epoch 11, batch 700, loss[loss=0.2482, simple_loss=0.3262, pruned_loss=0.08511, over 19742.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3082, pruned_loss=0.08144, over 3727282.84 frames. ], batch size: 63, lr: 7.73e-03, grad_scale: 4.0 +2023-04-01 18:36:41,605 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.39 vs. limit=5.0 +2023-04-01 18:37:30,478 INFO [train.py:903] (3/4) Epoch 11, batch 750, loss[loss=0.2581, simple_loss=0.329, pruned_loss=0.09362, over 19299.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3087, pruned_loss=0.08172, over 3757341.08 frames. ], batch size: 66, lr: 7.72e-03, grad_scale: 4.0 +2023-04-01 18:37:44,831 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0209, 1.9418, 1.7264, 1.5531, 1.3025, 1.5297, 0.4419, 0.8369], + device='cuda:3'), covar=tensor([0.0411, 0.0422, 0.0309, 0.0515, 0.0981, 0.0605, 0.0849, 0.0763], + device='cuda:3'), in_proj_covar=tensor([0.0333, 0.0329, 0.0330, 0.0352, 0.0425, 0.0349, 0.0309, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 18:37:52,539 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69047.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:01,898 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:10,049 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69061.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:38:20,856 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.289e+02 6.423e+02 8.063e+02 1.861e+03, threshold=1.285e+03, percent-clipped=2.0 +2023-04-01 18:38:33,460 INFO [train.py:903] (3/4) Epoch 11, batch 800, loss[loss=0.2683, simple_loss=0.3315, pruned_loss=0.1026, over 19601.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3079, pruned_loss=0.0814, over 3778570.43 frames. ], batch size: 61, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:38:49,967 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 18:39:34,220 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5618, 4.0885, 2.4857, 3.6766, 0.9946, 3.8732, 3.8404, 3.9853], + device='cuda:3'), covar=tensor([0.0579, 0.1070, 0.2101, 0.0779, 0.3876, 0.0735, 0.0763, 0.0979], + device='cuda:3'), in_proj_covar=tensor([0.0421, 0.0362, 0.0429, 0.0315, 0.0373, 0.0361, 0.0349, 0.0382], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 18:39:35,201 INFO [train.py:903] (3/4) Epoch 11, batch 850, loss[loss=0.2256, simple_loss=0.3056, pruned_loss=0.07281, over 19615.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3086, pruned_loss=0.08157, over 3805211.46 frames. ], batch size: 57, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:39:39,344 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69132.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:39:41,078 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-01 18:40:04,946 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7378, 1.8042, 1.9764, 2.4613, 1.5778, 2.2376, 2.2201, 1.8587], + device='cuda:3'), covar=tensor([0.3352, 0.2772, 0.1392, 0.1639, 0.3178, 0.1427, 0.3204, 0.2568], + device='cuda:3'), in_proj_covar=tensor([0.0779, 0.0795, 0.0641, 0.0886, 0.0771, 0.0698, 0.0780, 0.0704], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 18:40:09,379 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2315, 2.9275, 2.0062, 2.6754, 0.7045, 2.8565, 2.7708, 2.8503], + device='cuda:3'), covar=tensor([0.1061, 0.1306, 0.2143, 0.0915, 0.3751, 0.1051, 0.0916, 0.1399], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0359, 0.0427, 0.0314, 0.0371, 0.0360, 0.0348, 0.0380], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 18:40:12,647 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69157.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:18,350 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:26,036 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.808e+02 7.359e+02 9.451e+02 2.011e+03, threshold=1.472e+03, percent-clipped=12.0 +2023-04-01 18:40:32,113 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 18:40:38,101 INFO [train.py:903] (3/4) Epoch 11, batch 900, loss[loss=0.2132, simple_loss=0.2904, pruned_loss=0.06796, over 19662.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3081, pruned_loss=0.08116, over 3812698.75 frames. ], batch size: 53, lr: 7.72e-03, grad_scale: 8.0 +2023-04-01 18:40:39,673 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69180.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:40:48,343 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69186.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:41:28,530 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 18:41:42,669 INFO [train.py:903] (3/4) Epoch 11, batch 950, loss[loss=0.2099, simple_loss=0.2915, pruned_loss=0.06415, over 19680.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3086, pruned_loss=0.08121, over 3816304.22 frames. ], batch size: 53, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:41:49,490 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 18:42:01,319 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69243.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:42:11,247 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-01 18:42:24,851 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69263.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:42:33,739 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.204e+02 6.315e+02 7.601e+02 2.315e+03, threshold=1.263e+03, percent-clipped=1.0 +2023-04-01 18:42:47,233 INFO [train.py:903] (3/4) Epoch 11, batch 1000, loss[loss=0.2888, simple_loss=0.3441, pruned_loss=0.1168, over 13571.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3087, pruned_loss=0.08168, over 3810817.55 frames. ], batch size: 136, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:01,215 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:27,049 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:43,482 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 18:43:48,901 INFO [train.py:903] (3/4) Epoch 11, batch 1050, loss[loss=0.2346, simple_loss=0.3116, pruned_loss=0.07875, over 19659.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3083, pruned_loss=0.08131, over 3806111.69 frames. ], batch size: 55, lr: 7.71e-03, grad_scale: 8.0 +2023-04-01 18:43:57,031 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69336.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,174 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3820, 3.9574, 2.4846, 3.5399, 1.1465, 3.7272, 3.7161, 3.7658], + device='cuda:3'), covar=tensor([0.0623, 0.0905, 0.2007, 0.0811, 0.3766, 0.0892, 0.0829, 0.1113], + device='cuda:3'), in_proj_covar=tensor([0.0423, 0.0361, 0.0428, 0.0312, 0.0375, 0.0361, 0.0350, 0.0383], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 18:43:59,263 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69338.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:43:59,385 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9208, 1.9479, 1.6992, 1.4335, 1.2806, 1.5365, 0.3980, 0.8564], + device='cuda:3'), covar=tensor([0.0636, 0.0580, 0.0395, 0.0665, 0.1197, 0.0712, 0.0954, 0.0940], + device='cuda:3'), in_proj_covar=tensor([0.0336, 0.0329, 0.0331, 0.0352, 0.0425, 0.0348, 0.0309, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 18:44:02,815 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2554, 3.6999, 2.2553, 2.2521, 3.4300, 2.0462, 1.5264, 1.9955], + device='cuda:3'), covar=tensor([0.1039, 0.0426, 0.0829, 0.0724, 0.0364, 0.0943, 0.0845, 0.0642], + device='cuda:3'), in_proj_covar=tensor([0.0285, 0.0301, 0.0328, 0.0246, 0.0237, 0.0318, 0.0289, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:44:24,568 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 18:44:38,149 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 5.222e+02 6.475e+02 7.672e+02 1.315e+03, threshold=1.295e+03, percent-clipped=1.0 +2023-04-01 18:44:49,372 INFO [train.py:903] (3/4) Epoch 11, batch 1100, loss[loss=0.2166, simple_loss=0.305, pruned_loss=0.06414, over 19769.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3085, pruned_loss=0.08147, over 3806447.22 frames. ], batch size: 54, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:44:57,866 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:24,084 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69405.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:27,992 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2511, 1.3403, 1.2273, 1.0260, 1.0791, 1.1139, 0.0482, 0.3592], + device='cuda:3'), covar=tensor([0.0477, 0.0424, 0.0290, 0.0379, 0.0876, 0.0372, 0.0770, 0.0702], + device='cuda:3'), in_proj_covar=tensor([0.0333, 0.0327, 0.0330, 0.0350, 0.0423, 0.0346, 0.0307, 0.0322], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 18:45:32,698 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69412.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:39,614 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69418.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:45:53,079 INFO [train.py:903] (3/4) Epoch 11, batch 1150, loss[loss=0.2193, simple_loss=0.296, pruned_loss=0.07131, over 19671.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3102, pruned_loss=0.08238, over 3817715.22 frames. ], batch size: 55, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:46:11,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69443.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:46:41,981 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.885e+02 7.181e+02 8.495e+02 1.651e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-01 18:46:55,767 INFO [train.py:903] (3/4) Epoch 11, batch 1200, loss[loss=0.3033, simple_loss=0.3536, pruned_loss=0.1265, over 13262.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.31, pruned_loss=0.08216, over 3818457.35 frames. ], batch size: 136, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:46:58,595 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.89 vs. limit=5.0 +2023-04-01 18:47:01,621 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7005, 4.2059, 4.4230, 4.4091, 1.4104, 4.1244, 3.5718, 4.1133], + device='cuda:3'), covar=tensor([0.1428, 0.0784, 0.0530, 0.0576, 0.5493, 0.0559, 0.0644, 0.1036], + device='cuda:3'), in_proj_covar=tensor([0.0662, 0.0594, 0.0783, 0.0668, 0.0723, 0.0541, 0.0486, 0.0725], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 18:47:27,277 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 18:47:48,950 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:53,431 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:47:59,004 INFO [train.py:903] (3/4) Epoch 11, batch 1250, loss[loss=0.2593, simple_loss=0.3329, pruned_loss=0.09286, over 19727.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3104, pruned_loss=0.08233, over 3805716.62 frames. ], batch size: 63, lr: 7.70e-03, grad_scale: 8.0 +2023-04-01 18:48:00,223 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69530.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:48:49,514 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.575e+02 6.899e+02 8.428e+02 1.860e+03, threshold=1.380e+03, percent-clipped=3.0 +2023-04-01 18:49:00,826 INFO [train.py:903] (3/4) Epoch 11, batch 1300, loss[loss=0.2133, simple_loss=0.303, pruned_loss=0.06181, over 18825.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3104, pruned_loss=0.08236, over 3827113.22 frames. ], batch size: 74, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:49:10,211 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:49:38,641 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69607.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:50:04,326 INFO [train.py:903] (3/4) Epoch 11, batch 1350, loss[loss=0.2338, simple_loss=0.3055, pruned_loss=0.08107, over 17273.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3109, pruned_loss=0.08226, over 3809965.96 frames. ], batch size: 101, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:50:13,535 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:19,167 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:26,772 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:50:30,832 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-01 18:50:54,481 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.709e+02 6.895e+02 8.476e+02 1.895e+03, threshold=1.379e+03, percent-clipped=5.0 +2023-04-01 18:51:08,229 INFO [train.py:903] (3/4) Epoch 11, batch 1400, loss[loss=0.213, simple_loss=0.2743, pruned_loss=0.07587, over 19322.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3094, pruned_loss=0.08171, over 3817437.17 frames. ], batch size: 44, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:51:12,914 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69682.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:51:24,977 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69692.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:51:36,551 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69702.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:03,581 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:52:09,959 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 18:52:10,948 INFO [train.py:903] (3/4) Epoch 11, batch 1450, loss[loss=0.2761, simple_loss=0.3249, pruned_loss=0.1136, over 19744.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3102, pruned_loss=0.08223, over 3822748.70 frames. ], batch size: 51, lr: 7.69e-03, grad_scale: 8.0 +2023-04-01 18:52:12,413 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69730.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:36,751 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:52:44,834 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69756.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:01,499 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.344e+02 6.531e+02 8.357e+02 2.062e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 18:53:10,372 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:13,430 INFO [train.py:903] (3/4) Epoch 11, batch 1500, loss[loss=0.2225, simple_loss=0.2986, pruned_loss=0.07316, over 19590.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3094, pruned_loss=0.08194, over 3835202.19 frames. ], batch size: 61, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:53:36,797 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:53:43,105 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69801.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:54:15,941 INFO [train.py:903] (3/4) Epoch 11, batch 1550, loss[loss=0.2006, simple_loss=0.2675, pruned_loss=0.0669, over 19755.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.309, pruned_loss=0.08187, over 3827914.52 frames. ], batch size: 46, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:54:38,378 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69845.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:07,633 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.582e+02 5.814e+02 6.999e+02 8.951e+02 2.972e+03, threshold=1.400e+03, percent-clipped=5.0 +2023-04-01 18:55:09,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69871.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:20,439 INFO [train.py:903] (3/4) Epoch 11, batch 1600, loss[loss=0.1882, simple_loss=0.2611, pruned_loss=0.05763, over 19324.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3085, pruned_loss=0.08185, over 3829945.40 frames. ], batch size: 44, lr: 7.68e-03, grad_scale: 8.0 +2023-04-01 18:55:38,103 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-01 18:55:41,190 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69895.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:55:46,716 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 18:55:48,146 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69901.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:12,826 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69920.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:20,581 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69926.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:56:23,826 INFO [train.py:903] (3/4) Epoch 11, batch 1650, loss[loss=0.2283, simple_loss=0.2934, pruned_loss=0.08156, over 19380.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3079, pruned_loss=0.08138, over 3827284.69 frames. ], batch size: 47, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:56:33,416 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1625, 1.7591, 1.5954, 2.1520, 1.9776, 1.7886, 1.5683, 1.9096], + device='cuda:3'), covar=tensor([0.0828, 0.1514, 0.1414, 0.0842, 0.1178, 0.0484, 0.1316, 0.0645], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0345, 0.0286, 0.0236, 0.0295, 0.0241, 0.0275, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 18:57:00,776 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69958.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:16,657 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 5.294e+02 6.504e+02 8.314e+02 1.576e+03, threshold=1.301e+03, percent-clipped=2.0 +2023-04-01 18:57:26,345 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69978.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:57:27,010 INFO [train.py:903] (3/4) Epoch 11, batch 1700, loss[loss=0.2069, simple_loss=0.2771, pruned_loss=0.06832, over 19335.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3083, pruned_loss=0.08149, over 3825039.85 frames. ], batch size: 44, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:57:32,087 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:57:59,102 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70003.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 18:58:03,177 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:09,483 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 18:58:29,307 INFO [train.py:903] (3/4) Epoch 11, batch 1750, loss[loss=0.2624, simple_loss=0.3343, pruned_loss=0.09527, over 19669.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3093, pruned_loss=0.08232, over 3820448.84 frames. ], batch size: 58, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 18:58:31,987 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:58:40,221 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70036.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 18:59:02,458 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70053.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:11,019 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 18:59:22,247 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.188e+02 6.480e+02 8.127e+02 9.904e+02 1.581e+03, threshold=1.625e+03, percent-clipped=5.0 +2023-04-01 18:59:34,189 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70078.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 18:59:34,847 INFO [train.py:903] (3/4) Epoch 11, batch 1800, loss[loss=0.2241, simple_loss=0.3009, pruned_loss=0.07371, over 19389.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3103, pruned_loss=0.0827, over 3815713.35 frames. ], batch size: 48, lr: 7.67e-03, grad_scale: 8.0 +2023-04-01 19:00:02,666 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70101.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:08,716 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-01 19:00:34,705 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:35,503 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 19:00:36,753 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70127.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:37,763 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70128.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:00:38,700 INFO [train.py:903] (3/4) Epoch 11, batch 1850, loss[loss=0.2589, simple_loss=0.3342, pruned_loss=0.0918, over 19539.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3098, pruned_loss=0.08213, over 3814332.93 frames. ], batch size: 56, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:05,172 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70151.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 19:01:06,370 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70152.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:01:13,693 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 19:01:30,923 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.171e+02 6.551e+02 7.968e+02 1.780e+03, threshold=1.310e+03, percent-clipped=1.0 +2023-04-01 19:01:41,532 INFO [train.py:903] (3/4) Epoch 11, batch 1900, loss[loss=0.2152, simple_loss=0.2972, pruned_loss=0.06661, over 19666.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3094, pruned_loss=0.08164, over 3821413.21 frames. ], batch size: 53, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:01:57,150 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 19:02:04,776 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 19:02:28,859 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 19:02:42,952 INFO [train.py:903] (3/4) Epoch 11, batch 1950, loss[loss=0.2498, simple_loss=0.3244, pruned_loss=0.08765, over 18926.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3097, pruned_loss=0.08205, over 3822741.33 frames. ], batch size: 74, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:03:35,381 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.987e+02 6.114e+02 7.804e+02 3.131e+03, threshold=1.223e+03, percent-clipped=9.0 +2023-04-01 19:03:37,258 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-01 19:03:43,676 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3894, 2.2513, 1.7911, 1.4530, 2.0881, 1.3223, 1.2786, 1.9376], + device='cuda:3'), covar=tensor([0.0784, 0.0565, 0.0801, 0.0722, 0.0402, 0.0997, 0.0627, 0.0334], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0293, 0.0321, 0.0242, 0.0231, 0.0314, 0.0284, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:03:45,798 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70278.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:03:46,664 INFO [train.py:903] (3/4) Epoch 11, batch 2000, loss[loss=0.2514, simple_loss=0.3235, pruned_loss=0.08971, over 19375.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.31, pruned_loss=0.08223, over 3813582.94 frames. ], batch size: 70, lr: 7.66e-03, grad_scale: 8.0 +2023-04-01 19:04:48,767 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 19:04:51,058 INFO [train.py:903] (3/4) Epoch 11, batch 2050, loss[loss=0.2657, simple_loss=0.3329, pruned_loss=0.09923, over 18816.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3098, pruned_loss=0.0818, over 3818280.82 frames. ], batch size: 74, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:05:07,049 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 19:05:08,148 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 19:05:27,796 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 19:05:43,673 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.481e+02 7.126e+02 9.531e+02 2.002e+03, threshold=1.425e+03, percent-clipped=10.0 +2023-04-01 19:05:54,410 INFO [train.py:903] (3/4) Epoch 11, batch 2100, loss[loss=0.3509, simple_loss=0.3838, pruned_loss=0.159, over 13747.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3107, pruned_loss=0.08271, over 3816355.40 frames. ], batch size: 136, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:06:24,811 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 19:06:30,870 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70407.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:06:48,465 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 19:06:57,561 INFO [train.py:903] (3/4) Epoch 11, batch 2150, loss[loss=0.1956, simple_loss=0.2691, pruned_loss=0.06107, over 19773.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3106, pruned_loss=0.08262, over 3819095.08 frames. ], batch size: 47, lr: 7.65e-03, grad_scale: 8.0 +2023-04-01 19:07:01,543 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70432.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 19:07:51,367 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.544e+02 5.585e+02 6.733e+02 8.759e+02 1.859e+03, threshold=1.347e+03, percent-clipped=4.0 +2023-04-01 19:07:52,877 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70472.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:08:02,119 INFO [train.py:903] (3/4) Epoch 11, batch 2200, loss[loss=0.2468, simple_loss=0.3213, pruned_loss=0.08618, over 19532.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3099, pruned_loss=0.08216, over 3818970.30 frames. ], batch size: 54, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:09:06,388 INFO [train.py:903] (3/4) Epoch 11, batch 2250, loss[loss=0.2381, simple_loss=0.3135, pruned_loss=0.08135, over 19558.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3096, pruned_loss=0.08186, over 3814250.55 frames. ], batch size: 61, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:01,000 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.990e+02 6.839e+02 8.349e+02 1.575e+03, threshold=1.368e+03, percent-clipped=2.0 +2023-04-01 19:10:10,416 INFO [train.py:903] (3/4) Epoch 11, batch 2300, loss[loss=0.23, simple_loss=0.3044, pruned_loss=0.07776, over 19660.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3099, pruned_loss=0.08185, over 3819304.37 frames. ], batch size: 55, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:10:19,835 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:10:23,148 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 19:11:05,025 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70622.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:11:13,191 INFO [train.py:903] (3/4) Epoch 11, batch 2350, loss[loss=0.2329, simple_loss=0.3085, pruned_loss=0.07868, over 19543.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3078, pruned_loss=0.08047, over 3819510.73 frames. ], batch size: 56, lr: 7.64e-03, grad_scale: 4.0 +2023-04-01 19:11:27,849 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9922, 2.0902, 2.3144, 2.9608, 1.9373, 2.6758, 2.6583, 2.0914], + device='cuda:3'), covar=tensor([0.3666, 0.3203, 0.1375, 0.1721, 0.3511, 0.1488, 0.3073, 0.2655], + device='cuda:3'), in_proj_covar=tensor([0.0788, 0.0799, 0.0644, 0.0887, 0.0776, 0.0694, 0.0779, 0.0706], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 19:11:56,876 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 19:12:06,972 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.270e+02 6.524e+02 8.351e+02 2.247e+03, threshold=1.305e+03, percent-clipped=5.0 +2023-04-01 19:12:12,570 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 19:12:16,797 INFO [train.py:903] (3/4) Epoch 11, batch 2400, loss[loss=0.204, simple_loss=0.2839, pruned_loss=0.06199, over 19483.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3064, pruned_loss=0.07972, over 3819924.43 frames. ], batch size: 49, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:12:58,761 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5964, 1.6727, 2.0329, 1.6433, 2.5871, 3.0024, 2.8731, 3.1262], + device='cuda:3'), covar=tensor([0.1289, 0.2801, 0.2411, 0.2022, 0.1055, 0.0353, 0.0210, 0.0251], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0295, 0.0320, 0.0251, 0.0215, 0.0155, 0.0205, 0.0196], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 19:13:20,247 INFO [train.py:903] (3/4) Epoch 11, batch 2450, loss[loss=0.2486, simple_loss=0.3287, pruned_loss=0.08423, over 19495.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3088, pruned_loss=0.08119, over 3811932.51 frames. ], batch size: 64, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:13:32,426 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70737.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:13:44,507 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8710, 1.5982, 1.5764, 1.9129, 1.7509, 1.6604, 1.6068, 1.8160], + device='cuda:3'), covar=tensor([0.0851, 0.1295, 0.1228, 0.0776, 0.1026, 0.0477, 0.1127, 0.0608], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0347, 0.0287, 0.0238, 0.0296, 0.0242, 0.0276, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:14:14,278 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.717e+02 6.752e+02 8.721e+02 4.215e+03, threshold=1.350e+03, percent-clipped=8.0 +2023-04-01 19:14:25,713 INFO [train.py:903] (3/4) Epoch 11, batch 2500, loss[loss=0.2218, simple_loss=0.2857, pruned_loss=0.07898, over 19485.00 frames. ], tot_loss[loss=0.236, simple_loss=0.309, pruned_loss=0.08147, over 3810142.12 frames. ], batch size: 49, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:27,560 INFO [train.py:903] (3/4) Epoch 11, batch 2550, loss[loss=0.2457, simple_loss=0.3163, pruned_loss=0.08749, over 19656.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3096, pruned_loss=0.08171, over 3813509.52 frames. ], batch size: 60, lr: 7.63e-03, grad_scale: 8.0 +2023-04-01 19:15:46,682 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70843.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:17,534 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70868.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:16:20,506 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.586e+02 5.428e+02 6.511e+02 7.969e+02 1.423e+03, threshold=1.302e+03, percent-clipped=4.0 +2023-04-01 19:16:26,492 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 19:16:30,128 INFO [train.py:903] (3/4) Epoch 11, batch 2600, loss[loss=0.2043, simple_loss=0.2747, pruned_loss=0.06697, over 19061.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.309, pruned_loss=0.08165, over 3815994.61 frames. ], batch size: 42, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:34,823 INFO [train.py:903] (3/4) Epoch 11, batch 2650, loss[loss=0.2297, simple_loss=0.311, pruned_loss=0.0742, over 19675.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3086, pruned_loss=0.08165, over 3807427.35 frames. ], batch size: 58, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:17:56,637 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 19:18:28,065 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 5.322e+02 6.264e+02 9.085e+02 2.401e+03, threshold=1.253e+03, percent-clipped=10.0 +2023-04-01 19:18:39,374 INFO [train.py:903] (3/4) Epoch 11, batch 2700, loss[loss=0.2025, simple_loss=0.2903, pruned_loss=0.0573, over 19355.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3079, pruned_loss=0.08102, over 3822487.18 frames. ], batch size: 70, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:18:56,261 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70993.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:18,237 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:30,052 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71018.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:19:43,444 INFO [train.py:903] (3/4) Epoch 11, batch 2750, loss[loss=0.1922, simple_loss=0.2726, pruned_loss=0.05588, over 19468.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3078, pruned_loss=0.08096, over 3811339.78 frames. ], batch size: 49, lr: 7.62e-03, grad_scale: 8.0 +2023-04-01 19:20:08,271 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71048.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:20:37,009 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.301e+02 6.565e+02 7.951e+02 1.458e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 19:20:45,875 INFO [train.py:903] (3/4) Epoch 11, batch 2800, loss[loss=0.2491, simple_loss=0.3207, pruned_loss=0.08875, over 19722.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3089, pruned_loss=0.08158, over 3809780.69 frames. ], batch size: 51, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:21:29,710 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-01 19:21:51,261 INFO [train.py:903] (3/4) Epoch 11, batch 2850, loss[loss=0.2367, simple_loss=0.3138, pruned_loss=0.07976, over 19514.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3091, pruned_loss=0.08139, over 3817723.87 frames. ], batch size: 64, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:22:45,074 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.466e+02 6.713e+02 8.640e+02 2.236e+03, threshold=1.343e+03, percent-clipped=7.0 +2023-04-01 19:22:54,322 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 19:22:55,470 INFO [train.py:903] (3/4) Epoch 11, batch 2900, loss[loss=0.226, simple_loss=0.3015, pruned_loss=0.07527, over 19783.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.309, pruned_loss=0.08136, over 3826420.01 frames. ], batch size: 54, lr: 7.61e-03, grad_scale: 8.0 +2023-04-01 19:23:03,498 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-01 19:23:09,064 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9997, 3.6066, 2.4908, 3.2953, 0.9817, 3.3873, 3.3813, 3.4759], + device='cuda:3'), covar=tensor([0.0716, 0.1091, 0.1888, 0.0864, 0.3827, 0.0892, 0.0847, 0.1065], + device='cuda:3'), in_proj_covar=tensor([0.0431, 0.0366, 0.0440, 0.0321, 0.0382, 0.0368, 0.0356, 0.0393], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:24:00,112 INFO [train.py:903] (3/4) Epoch 11, batch 2950, loss[loss=0.2586, simple_loss=0.3273, pruned_loss=0.09498, over 19568.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3074, pruned_loss=0.08051, over 3842722.78 frames. ], batch size: 61, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:24:53,527 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.471e+02 6.960e+02 8.532e+02 1.699e+03, threshold=1.392e+03, percent-clipped=7.0 +2023-04-01 19:25:02,841 INFO [train.py:903] (3/4) Epoch 11, batch 3000, loss[loss=0.2284, simple_loss=0.2919, pruned_loss=0.08241, over 19030.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3081, pruned_loss=0.08092, over 3833916.42 frames. ], batch size: 42, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:25:02,841 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 19:25:16,077 INFO [train.py:937] (3/4) Epoch 11, validation: loss=0.1785, simple_loss=0.2793, pruned_loss=0.0389, over 944034.00 frames. +2023-04-01 19:25:16,079 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 19:25:20,545 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 19:25:26,652 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2490, 1.3064, 1.2113, 1.0284, 1.0405, 1.1360, 0.0326, 0.3827], + device='cuda:3'), covar=tensor([0.0451, 0.0479, 0.0295, 0.0373, 0.0969, 0.0423, 0.0867, 0.0766], + device='cuda:3'), in_proj_covar=tensor([0.0342, 0.0335, 0.0335, 0.0356, 0.0429, 0.0358, 0.0312, 0.0326], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 19:25:51,487 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2086, 2.1462, 2.3543, 3.3170, 2.1811, 3.1288, 2.8352, 2.2464], + device='cuda:3'), covar=tensor([0.3574, 0.2999, 0.1426, 0.1807, 0.3656, 0.1425, 0.3053, 0.2541], + device='cuda:3'), in_proj_covar=tensor([0.0789, 0.0803, 0.0647, 0.0890, 0.0781, 0.0701, 0.0784, 0.0705], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 19:26:01,967 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6551, 1.3325, 1.3694, 2.0288, 1.6835, 1.9905, 2.0940, 1.7713], + device='cuda:3'), covar=tensor([0.0808, 0.1023, 0.1076, 0.0834, 0.0857, 0.0661, 0.0803, 0.0666], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0226, 0.0223, 0.0250, 0.0236, 0.0215, 0.0197, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 19:26:20,107 INFO [train.py:903] (3/4) Epoch 11, batch 3050, loss[loss=0.2285, simple_loss=0.3073, pruned_loss=0.07482, over 19686.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3076, pruned_loss=0.08073, over 3828657.47 frames. ], batch size: 59, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:26:51,684 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:27:07,121 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8399, 1.9351, 2.1233, 2.6418, 1.8285, 2.5183, 2.3669, 1.9532], + device='cuda:3'), covar=tensor([0.3473, 0.2912, 0.1366, 0.1698, 0.3310, 0.1429, 0.3290, 0.2613], + device='cuda:3'), in_proj_covar=tensor([0.0786, 0.0802, 0.0645, 0.0888, 0.0777, 0.0699, 0.0783, 0.0703], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 19:27:13,588 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.484e+02 7.005e+02 9.170e+02 2.820e+03, threshold=1.401e+03, percent-clipped=8.0 +2023-04-01 19:27:22,999 INFO [train.py:903] (3/4) Epoch 11, batch 3100, loss[loss=0.2858, simple_loss=0.3405, pruned_loss=0.1155, over 13264.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3092, pruned_loss=0.08158, over 3811927.55 frames. ], batch size: 136, lr: 7.60e-03, grad_scale: 8.0 +2023-04-01 19:27:27,833 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1889, 1.6587, 1.7798, 2.6336, 2.0904, 2.5680, 2.5809, 2.2765], + device='cuda:3'), covar=tensor([0.0735, 0.0927, 0.0991, 0.0878, 0.0861, 0.0616, 0.0871, 0.0639], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0228, 0.0224, 0.0251, 0.0238, 0.0215, 0.0198, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 19:27:40,191 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71392.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:28:26,276 INFO [train.py:903] (3/4) Epoch 11, batch 3150, loss[loss=0.2262, simple_loss=0.2873, pruned_loss=0.08257, over 19056.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3096, pruned_loss=0.08217, over 3808658.25 frames. ], batch size: 42, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:28:55,925 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 19:28:57,864 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-01 19:29:17,641 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:29:19,592 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 5.450e+02 6.508e+02 8.826e+02 1.534e+03, threshold=1.302e+03, percent-clipped=1.0 +2023-04-01 19:29:31,374 INFO [train.py:903] (3/4) Epoch 11, batch 3200, loss[loss=0.2018, simple_loss=0.2751, pruned_loss=0.06429, over 19785.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3082, pruned_loss=0.08125, over 3808380.40 frames. ], batch size: 47, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:30:05,880 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71507.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:30:32,440 INFO [train.py:903] (3/4) Epoch 11, batch 3250, loss[loss=0.2928, simple_loss=0.3529, pruned_loss=0.1164, over 19529.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3094, pruned_loss=0.08214, over 3808016.65 frames. ], batch size: 54, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:30:35,032 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2404, 1.3992, 1.8164, 1.4416, 2.6647, 2.1627, 2.6742, 1.0459], + device='cuda:3'), covar=tensor([0.2259, 0.3841, 0.2194, 0.1761, 0.1337, 0.1850, 0.1497, 0.3787], + device='cuda:3'), in_proj_covar=tensor([0.0490, 0.0575, 0.0601, 0.0441, 0.0595, 0.0493, 0.0649, 0.0497], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 19:31:26,708 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.701e+02 5.157e+02 6.415e+02 8.641e+02 1.397e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-04-01 19:31:36,054 INFO [train.py:903] (3/4) Epoch 11, batch 3300, loss[loss=0.2574, simple_loss=0.3211, pruned_loss=0.09686, over 19650.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3099, pruned_loss=0.08255, over 3800692.80 frames. ], batch size: 60, lr: 7.59e-03, grad_scale: 8.0 +2023-04-01 19:31:41,724 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 19:32:32,427 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-01 19:32:39,937 INFO [train.py:903] (3/4) Epoch 11, batch 3350, loss[loss=0.2531, simple_loss=0.3277, pruned_loss=0.08932, over 19324.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3107, pruned_loss=0.08281, over 3811064.82 frames. ], batch size: 70, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:32:47,341 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:33:34,251 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.202e+02 6.815e+02 8.173e+02 2.322e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 19:33:44,446 INFO [train.py:903] (3/4) Epoch 11, batch 3400, loss[loss=0.2064, simple_loss=0.2777, pruned_loss=0.06757, over 19306.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.31, pruned_loss=0.08207, over 3822045.58 frames. ], batch size: 44, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:40,425 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8272, 1.2019, 0.9383, 0.8884, 1.0799, 0.8978, 0.8730, 1.1302], + device='cuda:3'), covar=tensor([0.0535, 0.0700, 0.0932, 0.0552, 0.0445, 0.0971, 0.0518, 0.0400], + device='cuda:3'), in_proj_covar=tensor([0.0291, 0.0301, 0.0329, 0.0250, 0.0235, 0.0321, 0.0289, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:34:44,138 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71725.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:34:48,505 INFO [train.py:903] (3/4) Epoch 11, batch 3450, loss[loss=0.2226, simple_loss=0.3033, pruned_loss=0.07097, over 17302.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3109, pruned_loss=0.08266, over 3812894.68 frames. ], batch size: 101, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:34:52,161 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 19:35:16,030 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:33,077 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71763.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:35:42,113 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.543e+02 5.649e+02 6.835e+02 8.667e+02 1.565e+03, threshold=1.367e+03, percent-clipped=4.0 +2023-04-01 19:35:52,284 INFO [train.py:903] (3/4) Epoch 11, batch 3500, loss[loss=0.2494, simple_loss=0.3373, pruned_loss=0.08073, over 19664.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3112, pruned_loss=0.08259, over 3815899.90 frames. ], batch size: 58, lr: 7.58e-03, grad_scale: 8.0 +2023-04-01 19:36:04,530 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:36:33,912 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8252, 1.8053, 2.0170, 2.4414, 1.7686, 2.4150, 2.1872, 1.9049], + device='cuda:3'), covar=tensor([0.3308, 0.2878, 0.1533, 0.1548, 0.2913, 0.1334, 0.3633, 0.2736], + device='cuda:3'), in_proj_covar=tensor([0.0787, 0.0805, 0.0648, 0.0891, 0.0779, 0.0703, 0.0785, 0.0709], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 19:36:56,412 INFO [train.py:903] (3/4) Epoch 11, batch 3550, loss[loss=0.1946, simple_loss=0.2856, pruned_loss=0.05182, over 19695.00 frames. ], tot_loss[loss=0.237, simple_loss=0.31, pruned_loss=0.08199, over 3805516.69 frames. ], batch size: 53, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:37:00,714 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.84 vs. limit=5.0 +2023-04-01 19:37:49,026 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.129e+02 6.236e+02 8.069e+02 1.994e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-01 19:37:58,900 INFO [train.py:903] (3/4) Epoch 11, batch 3600, loss[loss=0.3396, simple_loss=0.3793, pruned_loss=0.1499, over 19702.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3102, pruned_loss=0.08264, over 3804245.40 frames. ], batch size: 59, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:03,005 INFO [train.py:903] (3/4) Epoch 11, batch 3650, loss[loss=0.1883, simple_loss=0.2701, pruned_loss=0.05324, over 19611.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3101, pruned_loss=0.0824, over 3802783.54 frames. ], batch size: 50, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:39:13,492 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:39:55,936 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 5.422e+02 6.726e+02 7.997e+02 1.955e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-04-01 19:40:05,239 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:40:06,256 INFO [train.py:903] (3/4) Epoch 11, batch 3700, loss[loss=0.1885, simple_loss=0.2682, pruned_loss=0.05439, over 19754.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3096, pruned_loss=0.08225, over 3808130.34 frames. ], batch size: 54, lr: 7.57e-03, grad_scale: 8.0 +2023-04-01 19:41:02,232 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 19:41:11,922 INFO [train.py:903] (3/4) Epoch 11, batch 3750, loss[loss=0.2146, simple_loss=0.2811, pruned_loss=0.07402, over 19396.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3091, pruned_loss=0.08197, over 3815631.67 frames. ], batch size: 48, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:06,342 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.116e+02 5.141e+02 6.053e+02 7.322e+02 1.150e+03, threshold=1.211e+03, percent-clipped=0.0 +2023-04-01 19:42:17,092 INFO [train.py:903] (3/4) Epoch 11, batch 3800, loss[loss=0.2515, simple_loss=0.325, pruned_loss=0.08895, over 19527.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3088, pruned_loss=0.08198, over 3814220.42 frames. ], batch size: 54, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:42:22,597 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-01 19:42:35,747 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:42:46,960 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 19:43:21,150 INFO [train.py:903] (3/4) Epoch 11, batch 3850, loss[loss=0.254, simple_loss=0.3318, pruned_loss=0.08813, over 19543.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3093, pruned_loss=0.08186, over 3821603.62 frames. ], batch size: 56, lr: 7.56e-03, grad_scale: 8.0 +2023-04-01 19:43:45,136 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:44:16,373 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 5.600e+02 6.929e+02 8.545e+02 2.074e+03, threshold=1.386e+03, percent-clipped=5.0 +2023-04-01 19:44:25,938 INFO [train.py:903] (3/4) Epoch 11, batch 3900, loss[loss=0.2818, simple_loss=0.3523, pruned_loss=0.1057, over 19448.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.308, pruned_loss=0.0812, over 3824311.47 frames. ], batch size: 64, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:04,323 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0674, 1.4216, 2.0972, 1.7455, 3.1806, 4.6771, 4.5650, 5.0794], + device='cuda:3'), covar=tensor([0.1630, 0.3234, 0.2750, 0.1812, 0.0436, 0.0140, 0.0141, 0.0106], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0294, 0.0321, 0.0251, 0.0213, 0.0154, 0.0206, 0.0199], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 19:45:30,663 INFO [train.py:903] (3/4) Epoch 11, batch 3950, loss[loss=0.2953, simple_loss=0.3554, pruned_loss=0.1176, over 19680.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3077, pruned_loss=0.08084, over 3830783.45 frames. ], batch size: 58, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:45:31,939 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 19:46:23,817 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 5.760e+02 7.487e+02 9.416e+02 2.541e+03, threshold=1.497e+03, percent-clipped=9.0 +2023-04-01 19:46:34,098 INFO [train.py:903] (3/4) Epoch 11, batch 4000, loss[loss=0.2742, simple_loss=0.33, pruned_loss=0.1092, over 19678.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3097, pruned_loss=0.0821, over 3818250.95 frames. ], batch size: 60, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:46:36,456 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72281.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:47:22,644 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 19:47:37,789 INFO [train.py:903] (3/4) Epoch 11, batch 4050, loss[loss=0.2514, simple_loss=0.3281, pruned_loss=0.08732, over 19534.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3088, pruned_loss=0.08129, over 3816326.23 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 8.0 +2023-04-01 19:47:57,886 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72344.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:05,163 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:33,044 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.561e+02 4.924e+02 6.616e+02 8.581e+02 2.031e+03, threshold=1.323e+03, percent-clipped=3.0 +2023-04-01 19:48:37,092 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:48:43,657 INFO [train.py:903] (3/4) Epoch 11, batch 4100, loss[loss=0.2562, simple_loss=0.3324, pruned_loss=0.08998, over 19305.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3092, pruned_loss=0.08139, over 3806614.10 frames. ], batch size: 66, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:49:05,298 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72396.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:49:18,919 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 19:49:48,308 INFO [train.py:903] (3/4) Epoch 11, batch 4150, loss[loss=0.2986, simple_loss=0.3495, pruned_loss=0.1239, over 13769.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3071, pruned_loss=0.08009, over 3815109.30 frames. ], batch size: 136, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:50:01,691 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2391, 3.7651, 3.8697, 3.8714, 1.5034, 3.6584, 3.1859, 3.5856], + device='cuda:3'), covar=tensor([0.1448, 0.0791, 0.0629, 0.0690, 0.4885, 0.0687, 0.0661, 0.1171], + device='cuda:3'), in_proj_covar=tensor([0.0665, 0.0601, 0.0793, 0.0674, 0.0723, 0.0550, 0.0485, 0.0733], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 19:50:42,219 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.902e+02 6.015e+02 8.099e+02 1.569e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-01 19:50:42,505 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.8380, 5.1925, 2.9916, 4.5416, 1.2458, 5.1753, 5.1951, 5.3370], + device='cuda:3'), covar=tensor([0.0401, 0.0913, 0.1870, 0.0642, 0.3720, 0.0566, 0.0649, 0.0817], + device='cuda:3'), in_proj_covar=tensor([0.0433, 0.0370, 0.0440, 0.0320, 0.0379, 0.0373, 0.0358, 0.0393], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:50:47,126 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9600, 4.3571, 4.6640, 4.6521, 1.8322, 4.2985, 3.8035, 4.3261], + device='cuda:3'), covar=tensor([0.1390, 0.0842, 0.0515, 0.0563, 0.4872, 0.0704, 0.0576, 0.1023], + device='cuda:3'), in_proj_covar=tensor([0.0668, 0.0602, 0.0795, 0.0676, 0.0724, 0.0552, 0.0485, 0.0734], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 19:50:51,449 INFO [train.py:903] (3/4) Epoch 11, batch 4200, loss[loss=0.2152, simple_loss=0.2848, pruned_loss=0.07277, over 19801.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3059, pruned_loss=0.07954, over 3811659.63 frames. ], batch size: 48, lr: 7.54e-03, grad_scale: 16.0 +2023-04-01 19:50:57,210 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 19:51:07,242 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72491.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:51:56,566 INFO [train.py:903] (3/4) Epoch 11, batch 4250, loss[loss=0.2781, simple_loss=0.3416, pruned_loss=0.1073, over 19585.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3063, pruned_loss=0.0794, over 3829435.89 frames. ], batch size: 61, lr: 7.54e-03, grad_scale: 8.0 +2023-04-01 19:52:17,367 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 19:52:28,255 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 19:52:33,232 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5791, 1.1297, 1.3435, 1.2221, 2.1893, 0.9653, 1.9586, 2.3341], + device='cuda:3'), covar=tensor([0.0644, 0.2606, 0.2554, 0.1492, 0.0837, 0.1943, 0.0948, 0.0509], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0339, 0.0352, 0.0322, 0.0348, 0.0333, 0.0336, 0.0354], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:52:51,987 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.433e+02 6.415e+02 7.675e+02 1.468e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-01 19:53:01,646 INFO [train.py:903] (3/4) Epoch 11, batch 4300, loss[loss=0.2026, simple_loss=0.2776, pruned_loss=0.06383, over 19752.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3055, pruned_loss=0.07896, over 3828494.67 frames. ], batch size: 45, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:53:36,425 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72606.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:00,227 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 19:54:04,816 INFO [train.py:903] (3/4) Epoch 11, batch 4350, loss[loss=0.2581, simple_loss=0.3242, pruned_loss=0.09598, over 19343.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3072, pruned_loss=0.08008, over 3823637.91 frames. ], batch size: 66, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:54:34,068 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:54:58,904 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.206e+02 6.524e+02 8.372e+02 1.509e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-01 19:55:06,355 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72677.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:08,342 INFO [train.py:903] (3/4) Epoch 11, batch 4400, loss[loss=0.2302, simple_loss=0.3084, pruned_loss=0.07604, over 18035.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3074, pruned_loss=0.08022, over 3815566.80 frames. ], batch size: 83, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:55:20,145 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:55:33,215 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6947, 1.3904, 1.5151, 1.4863, 3.1775, 0.9661, 2.2779, 3.6028], + device='cuda:3'), covar=tensor([0.0399, 0.2671, 0.2618, 0.1816, 0.0679, 0.2650, 0.1261, 0.0257], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0337, 0.0351, 0.0321, 0.0344, 0.0331, 0.0335, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:55:37,549 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 19:55:47,769 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 19:56:12,261 INFO [train.py:903] (3/4) Epoch 11, batch 4450, loss[loss=0.2467, simple_loss=0.3242, pruned_loss=0.08463, over 19681.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3068, pruned_loss=0.07994, over 3817417.84 frames. ], batch size: 59, lr: 7.53e-03, grad_scale: 8.0 +2023-04-01 19:57:06,050 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.681e+02 5.500e+02 6.922e+02 8.863e+02 1.965e+03, threshold=1.384e+03, percent-clipped=10.0 +2023-04-01 19:57:14,277 INFO [train.py:903] (3/4) Epoch 11, batch 4500, loss[loss=0.2303, simple_loss=0.312, pruned_loss=0.07429, over 19431.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3082, pruned_loss=0.08121, over 3788983.22 frames. ], batch size: 70, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:57:46,499 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72803.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:58:19,535 INFO [train.py:903] (3/4) Epoch 11, batch 4550, loss[loss=0.2246, simple_loss=0.2879, pruned_loss=0.08065, over 19373.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3075, pruned_loss=0.08101, over 3798305.16 frames. ], batch size: 47, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:58:28,663 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 19:58:33,821 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9105, 4.4131, 3.0115, 3.9639, 1.2602, 4.1946, 4.2584, 4.4321], + device='cuda:3'), covar=tensor([0.0548, 0.1029, 0.1701, 0.0703, 0.3711, 0.0747, 0.0706, 0.0976], + device='cuda:3'), in_proj_covar=tensor([0.0432, 0.0366, 0.0436, 0.0319, 0.0378, 0.0374, 0.0359, 0.0392], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 19:58:53,316 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 19:59:02,258 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72862.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 19:59:11,645 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 19:59:15,577 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.895e+02 7.318e+02 9.053e+02 1.617e+03, threshold=1.464e+03, percent-clipped=1.0 +2023-04-01 19:59:23,898 INFO [train.py:903] (3/4) Epoch 11, batch 4600, loss[loss=0.2251, simple_loss=0.3049, pruned_loss=0.07262, over 19677.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3074, pruned_loss=0.08052, over 3803772.03 frames. ], batch size: 58, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 19:59:34,635 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72887.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:04,228 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 20:00:06,142 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72912.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:00:27,758 INFO [train.py:903] (3/4) Epoch 11, batch 4650, loss[loss=0.2651, simple_loss=0.3369, pruned_loss=0.09665, over 19780.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.308, pruned_loss=0.08067, over 3816021.48 frames. ], batch size: 63, lr: 7.52e-03, grad_scale: 8.0 +2023-04-01 20:00:46,742 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 20:00:57,786 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 20:01:22,036 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.839e+02 5.234e+02 6.593e+02 8.122e+02 1.324e+03, threshold=1.319e+03, percent-clipped=0.0 +2023-04-01 20:01:30,179 INFO [train.py:903] (3/4) Epoch 11, batch 4700, loss[loss=0.2332, simple_loss=0.3115, pruned_loss=0.07745, over 18843.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3087, pruned_loss=0.0816, over 3810050.04 frames. ], batch size: 74, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:01:34,994 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72982.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:01:55,752 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 20:02:34,858 INFO [train.py:903] (3/4) Epoch 11, batch 4750, loss[loss=0.2401, simple_loss=0.3202, pruned_loss=0.08001, over 19510.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3093, pruned_loss=0.08206, over 3807208.88 frames. ], batch size: 64, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:12,008 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:03:28,793 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.728e+02 6.958e+02 8.518e+02 2.013e+03, threshold=1.392e+03, percent-clipped=2.0 +2023-04-01 20:03:37,835 INFO [train.py:903] (3/4) Epoch 11, batch 4800, loss[loss=0.1839, simple_loss=0.2625, pruned_loss=0.05264, over 19768.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3079, pruned_loss=0.0812, over 3816358.86 frames. ], batch size: 47, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:03:38,480 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 20:03:45,156 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73084.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:04:18,448 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1302, 5.4338, 3.0359, 4.8452, 1.2660, 5.5896, 5.5078, 5.6578], + device='cuda:3'), covar=tensor([0.0364, 0.0783, 0.1677, 0.0541, 0.3595, 0.0468, 0.0594, 0.0662], + device='cuda:3'), in_proj_covar=tensor([0.0430, 0.0364, 0.0433, 0.0315, 0.0376, 0.0370, 0.0356, 0.0389], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:04:26,318 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2122, 1.2361, 1.5894, 1.3857, 2.1873, 1.8523, 2.1608, 0.8831], + device='cuda:3'), covar=tensor([0.2412, 0.4247, 0.2450, 0.1992, 0.1498, 0.2264, 0.1495, 0.4001], + device='cuda:3'), in_proj_covar=tensor([0.0490, 0.0577, 0.0601, 0.0438, 0.0591, 0.0493, 0.0645, 0.0497], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 20:04:40,996 INFO [train.py:903] (3/4) Epoch 11, batch 4850, loss[loss=0.2213, simple_loss=0.3017, pruned_loss=0.0704, over 19535.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3077, pruned_loss=0.08087, over 3814059.16 frames. ], batch size: 54, lr: 7.51e-03, grad_scale: 8.0 +2023-04-01 20:05:01,955 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 20:05:22,855 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 20:05:29,940 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 20:05:29,972 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 20:05:35,782 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.053e+02 5.592e+02 6.791e+02 8.044e+02 1.982e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 20:05:39,476 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 20:05:44,088 INFO [train.py:903] (3/4) Epoch 11, batch 4900, loss[loss=0.2338, simple_loss=0.3225, pruned_loss=0.07253, over 19663.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3084, pruned_loss=0.08157, over 3812230.42 frames. ], batch size: 59, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:05:57,049 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3445, 1.0688, 1.3754, 1.4783, 2.8173, 1.0551, 2.0385, 3.1424], + device='cuda:3'), covar=tensor([0.0505, 0.3022, 0.2965, 0.1819, 0.0829, 0.2567, 0.1332, 0.0378], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0337, 0.0352, 0.0321, 0.0344, 0.0332, 0.0331, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:06:01,217 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 20:06:48,395 INFO [train.py:903] (3/4) Epoch 11, batch 4950, loss[loss=0.2411, simple_loss=0.3232, pruned_loss=0.07952, over 17314.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.309, pruned_loss=0.08189, over 3815630.65 frames. ], batch size: 101, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:00,958 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 20:07:21,299 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73256.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:07:24,664 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 20:07:42,674 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.726e+02 5.448e+02 7.218e+02 1.051e+03 2.092e+03, threshold=1.444e+03, percent-clipped=5.0 +2023-04-01 20:07:51,012 INFO [train.py:903] (3/4) Epoch 11, batch 5000, loss[loss=0.1952, simple_loss=0.2687, pruned_loss=0.06086, over 19765.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3093, pruned_loss=0.08199, over 3816147.04 frames. ], batch size: 46, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:07:55,873 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 20:08:08,617 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 20:08:13,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6237, 1.3803, 1.5090, 2.0336, 1.5820, 1.8380, 1.9945, 1.6572], + device='cuda:3'), covar=tensor([0.0868, 0.1050, 0.1042, 0.0795, 0.0898, 0.0777, 0.0851, 0.0703], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0224, 0.0223, 0.0250, 0.0235, 0.0213, 0.0194, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 20:08:32,077 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:08:50,645 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73326.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:08:54,854 INFO [train.py:903] (3/4) Epoch 11, batch 5050, loss[loss=0.216, simple_loss=0.288, pruned_loss=0.07204, over 19494.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3076, pruned_loss=0.08114, over 3831566.51 frames. ], batch size: 49, lr: 7.50e-03, grad_scale: 8.0 +2023-04-01 20:09:28,954 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 20:09:47,530 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73371.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:09:48,284 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.337e+02 6.487e+02 8.820e+02 1.986e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-01 20:09:56,474 INFO [train.py:903] (3/4) Epoch 11, batch 5100, loss[loss=0.2421, simple_loss=0.3097, pruned_loss=0.0872, over 19053.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3081, pruned_loss=0.08139, over 3828967.42 frames. ], batch size: 69, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:10:04,910 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 20:10:10,270 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 20:10:13,776 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 20:10:22,146 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.2342, 5.5420, 3.0786, 4.8779, 1.6187, 5.5765, 5.5244, 5.7518], + device='cuda:3'), covar=tensor([0.0436, 0.0981, 0.1934, 0.0575, 0.3651, 0.0577, 0.0634, 0.0705], + device='cuda:3'), in_proj_covar=tensor([0.0432, 0.0363, 0.0437, 0.0317, 0.0379, 0.0373, 0.0357, 0.0390], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:11:00,244 INFO [train.py:903] (3/4) Epoch 11, batch 5150, loss[loss=0.2435, simple_loss=0.3206, pruned_loss=0.08321, over 19693.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3087, pruned_loss=0.08156, over 3809654.14 frames. ], batch size: 59, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:11:09,675 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 20:11:16,054 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73441.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:11:31,599 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5517, 1.3549, 1.3886, 1.2762, 3.1050, 0.9346, 2.1413, 3.4404], + device='cuda:3'), covar=tensor([0.0467, 0.2450, 0.2719, 0.1907, 0.0714, 0.2562, 0.1213, 0.0280], + device='cuda:3'), in_proj_covar=tensor([0.0354, 0.0334, 0.0348, 0.0320, 0.0344, 0.0331, 0.0328, 0.0350], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:11:43,042 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:11:50,721 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-01 20:11:54,730 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.282e+02 5.910e+02 7.077e+02 9.438e+02 2.777e+03, threshold=1.415e+03, percent-clipped=4.0 +2023-04-01 20:12:04,004 INFO [train.py:903] (3/4) Epoch 11, batch 5200, loss[loss=0.2373, simple_loss=0.3149, pruned_loss=0.07988, over 19609.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3099, pruned_loss=0.08204, over 3807996.05 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:12:16,631 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 20:12:29,016 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3354, 2.3041, 1.7666, 1.5976, 2.2666, 1.2027, 1.1910, 1.8603], + device='cuda:3'), covar=tensor([0.0952, 0.0604, 0.0951, 0.0677, 0.0365, 0.1118, 0.0804, 0.0402], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0304, 0.0332, 0.0251, 0.0235, 0.0324, 0.0290, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:13:00,027 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 20:13:07,097 INFO [train.py:903] (3/4) Epoch 11, batch 5250, loss[loss=0.2111, simple_loss=0.2925, pruned_loss=0.06491, over 19690.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3093, pruned_loss=0.08134, over 3798850.00 frames. ], batch size: 59, lr: 7.49e-03, grad_scale: 8.0 +2023-04-01 20:14:02,596 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 5.338e+02 6.658e+02 8.757e+02 1.866e+03, threshold=1.332e+03, percent-clipped=3.0 +2023-04-01 20:14:11,099 INFO [train.py:903] (3/4) Epoch 11, batch 5300, loss[loss=0.2648, simple_loss=0.3337, pruned_loss=0.09789, over 19666.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3086, pruned_loss=0.08131, over 3816277.85 frames. ], batch size: 60, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:14:26,093 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 20:14:49,242 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73608.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:14:50,946 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-01 20:15:13,596 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73627.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:15,330 INFO [train.py:903] (3/4) Epoch 11, batch 5350, loss[loss=0.3298, simple_loss=0.3722, pruned_loss=0.1437, over 13118.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3084, pruned_loss=0.08106, over 3811875.06 frames. ], batch size: 135, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:15:46,116 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73652.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:15:46,874 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 20:15:49,349 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73655.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:16:09,903 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.285e+02 6.573e+02 8.256e+02 1.333e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-04-01 20:16:19,917 INFO [train.py:903] (3/4) Epoch 11, batch 5400, loss[loss=0.2823, simple_loss=0.3367, pruned_loss=0.1139, over 13456.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3079, pruned_loss=0.08018, over 3810585.04 frames. ], batch size: 136, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:16:41,417 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73697.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 20:17:13,034 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73722.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 20:17:20,604 INFO [train.py:903] (3/4) Epoch 11, batch 5450, loss[loss=0.23, simple_loss=0.3093, pruned_loss=0.07541, over 19779.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3079, pruned_loss=0.08025, over 3830188.20 frames. ], batch size: 56, lr: 7.48e-03, grad_scale: 8.0 +2023-04-01 20:17:45,902 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:12,070 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73770.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:18:14,003 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.707e+02 7.323e+02 9.356e+02 2.024e+03, threshold=1.465e+03, percent-clipped=8.0 +2023-04-01 20:18:23,353 INFO [train.py:903] (3/4) Epoch 11, batch 5500, loss[loss=0.2568, simple_loss=0.33, pruned_loss=0.09186, over 19572.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3083, pruned_loss=0.08104, over 3831499.55 frames. ], batch size: 61, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:18:50,412 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 20:19:25,475 INFO [train.py:903] (3/4) Epoch 11, batch 5550, loss[loss=0.2436, simple_loss=0.3166, pruned_loss=0.08531, over 19666.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3085, pruned_loss=0.08132, over 3837108.09 frames. ], batch size: 58, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:19:35,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 20:20:04,784 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1256, 1.2577, 1.6816, 0.9634, 2.3236, 2.9089, 2.6792, 3.1115], + device='cuda:3'), covar=tensor([0.1562, 0.3397, 0.2909, 0.2332, 0.0543, 0.0282, 0.0266, 0.0260], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0292, 0.0321, 0.0248, 0.0212, 0.0156, 0.0205, 0.0200], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 20:20:20,661 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.761e+02 6.752e+02 8.536e+02 1.570e+03, threshold=1.350e+03, percent-clipped=1.0 +2023-04-01 20:20:26,366 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 20:20:29,863 INFO [train.py:903] (3/4) Epoch 11, batch 5600, loss[loss=0.2941, simple_loss=0.3566, pruned_loss=0.1158, over 13122.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3088, pruned_loss=0.08147, over 3809441.77 frames. ], batch size: 136, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:21:03,558 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-01 20:21:34,086 INFO [train.py:903] (3/4) Epoch 11, batch 5650, loss[loss=0.2507, simple_loss=0.3337, pruned_loss=0.08392, over 19591.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3087, pruned_loss=0.08128, over 3825253.59 frames. ], batch size: 61, lr: 7.47e-03, grad_scale: 8.0 +2023-04-01 20:21:48,845 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5694, 1.2130, 1.2200, 1.4185, 1.0521, 1.3736, 1.2356, 1.3664], + device='cuda:3'), covar=tensor([0.0998, 0.1175, 0.1439, 0.0930, 0.1189, 0.0554, 0.1281, 0.0821], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0351, 0.0289, 0.0239, 0.0295, 0.0244, 0.0279, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:21:50,105 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3397, 1.3939, 1.7988, 1.6426, 2.9711, 2.4753, 3.3404, 1.3704], + device='cuda:3'), covar=tensor([0.2261, 0.3832, 0.2393, 0.1682, 0.1605, 0.1922, 0.1732, 0.3663], + device='cuda:3'), in_proj_covar=tensor([0.0494, 0.0578, 0.0603, 0.0439, 0.0597, 0.0494, 0.0648, 0.0496], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 20:21:59,920 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-01 20:22:03,008 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73952.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:22:23,885 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 20:22:28,535 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.880e+02 7.006e+02 8.632e+02 1.687e+03, threshold=1.401e+03, percent-clipped=2.0 +2023-04-01 20:22:37,797 INFO [train.py:903] (3/4) Epoch 11, batch 5700, loss[loss=0.2493, simple_loss=0.3308, pruned_loss=0.08395, over 19532.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.309, pruned_loss=0.08103, over 3823179.92 frames. ], batch size: 56, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:38,871 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74026.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:23:42,022 INFO [train.py:903] (3/4) Epoch 11, batch 5750, loss[loss=0.227, simple_loss=0.2944, pruned_loss=0.07981, over 19328.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3079, pruned_loss=0.08047, over 3825280.55 frames. ], batch size: 47, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:23:43,290 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 20:23:51,340 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 20:23:56,806 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 20:24:08,305 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-01 20:24:11,336 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74051.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:17,091 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.39 vs. limit=5.0 +2023-04-01 20:24:30,657 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74067.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:24:36,271 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.413e+02 6.547e+02 7.617e+02 1.298e+03, threshold=1.309e+03, percent-clipped=0.0 +2023-04-01 20:24:37,154 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-01 20:24:45,231 INFO [train.py:903] (3/4) Epoch 11, batch 5800, loss[loss=0.2412, simple_loss=0.3022, pruned_loss=0.09014, over 19407.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3076, pruned_loss=0.07971, over 3832716.77 frames. ], batch size: 48, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:24:50,607 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74082.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:04,163 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74094.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:25:48,486 INFO [train.py:903] (3/4) Epoch 11, batch 5850, loss[loss=0.231, simple_loss=0.3068, pruned_loss=0.07764, over 19605.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3086, pruned_loss=0.08042, over 3841282.25 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 8.0 +2023-04-01 20:25:55,540 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9064, 1.5223, 1.5751, 2.2089, 1.7240, 2.1309, 2.1265, 2.0439], + device='cuda:3'), covar=tensor([0.0738, 0.0970, 0.0976, 0.0791, 0.0879, 0.0638, 0.0880, 0.0578], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0230, 0.0225, 0.0248, 0.0237, 0.0214, 0.0195, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 20:26:42,000 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.778e+02 5.734e+02 7.251e+02 9.100e+02 2.751e+03, threshold=1.450e+03, percent-clipped=7.0 +2023-04-01 20:26:51,311 INFO [train.py:903] (3/4) Epoch 11, batch 5900, loss[loss=0.201, simple_loss=0.2737, pruned_loss=0.0642, over 19295.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3095, pruned_loss=0.0815, over 3830255.06 frames. ], batch size: 44, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:26:53,560 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 20:27:12,262 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.94 vs. limit=5.0 +2023-04-01 20:27:15,048 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 20:27:30,897 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74209.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:27:55,168 INFO [train.py:903] (3/4) Epoch 11, batch 5950, loss[loss=0.2505, simple_loss=0.3207, pruned_loss=0.09012, over 13501.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3088, pruned_loss=0.08112, over 3811840.44 frames. ], batch size: 136, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:33,398 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74258.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:28:49,684 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 4.883e+02 6.160e+02 7.607e+02 1.521e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-01 20:28:59,177 INFO [train.py:903] (3/4) Epoch 11, batch 6000, loss[loss=0.2143, simple_loss=0.2926, pruned_loss=0.06798, over 19680.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3076, pruned_loss=0.07998, over 3825993.16 frames. ], batch size: 53, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:28:59,177 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 20:29:11,904 INFO [train.py:937] (3/4) Epoch 11, validation: loss=0.1778, simple_loss=0.2787, pruned_loss=0.03847, over 944034.00 frames. +2023-04-01 20:29:11,906 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 20:30:09,794 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74323.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:30:18,373 INFO [train.py:903] (3/4) Epoch 11, batch 6050, loss[loss=0.204, simple_loss=0.2837, pruned_loss=0.0622, over 19613.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3072, pruned_loss=0.07964, over 3814500.28 frames. ], batch size: 50, lr: 7.45e-03, grad_scale: 8.0 +2023-04-01 20:30:42,852 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74348.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:31:12,596 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.290e+02 5.870e+02 7.295e+02 8.913e+02 1.728e+03, threshold=1.459e+03, percent-clipped=8.0 +2023-04-01 20:31:21,969 INFO [train.py:903] (3/4) Epoch 11, batch 6100, loss[loss=0.1858, simple_loss=0.2639, pruned_loss=0.05388, over 19734.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3071, pruned_loss=0.07954, over 3816190.56 frames. ], batch size: 47, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:22,432 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74426.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:32:25,901 INFO [train.py:903] (3/4) Epoch 11, batch 6150, loss[loss=0.1984, simple_loss=0.2774, pruned_loss=0.05975, over 19592.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3071, pruned_loss=0.07996, over 3814888.58 frames. ], batch size: 52, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:32:37,704 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8828, 1.9879, 2.1352, 2.6839, 1.8299, 2.5230, 2.3611, 2.0511], + device='cuda:3'), covar=tensor([0.3429, 0.2840, 0.1422, 0.1871, 0.3276, 0.1437, 0.3420, 0.2522], + device='cuda:3'), in_proj_covar=tensor([0.0790, 0.0811, 0.0644, 0.0894, 0.0779, 0.0699, 0.0783, 0.0708], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 20:32:54,707 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 20:33:12,618 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74465.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:33:20,927 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.576e+02 6.382e+02 8.608e+02 2.367e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-01 20:33:29,315 INFO [train.py:903] (3/4) Epoch 11, batch 6200, loss[loss=0.2664, simple_loss=0.3327, pruned_loss=0.1, over 19778.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3082, pruned_loss=0.08125, over 3823765.87 frames. ], batch size: 54, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:33:43,962 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74490.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:34:31,909 INFO [train.py:903] (3/4) Epoch 11, batch 6250, loss[loss=0.234, simple_loss=0.3113, pruned_loss=0.07835, over 19824.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3077, pruned_loss=0.08054, over 3829893.00 frames. ], batch size: 49, lr: 7.44e-03, grad_scale: 8.0 +2023-04-01 20:34:48,642 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74541.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:35:02,958 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 20:35:30,746 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 5.334e+02 6.699e+02 8.918e+02 1.691e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-01 20:35:38,652 INFO [train.py:903] (3/4) Epoch 11, batch 6300, loss[loss=0.2689, simple_loss=0.3302, pruned_loss=0.1038, over 19680.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3065, pruned_loss=0.07961, over 3824881.62 frames. ], batch size: 60, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:36:06,824 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74602.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:21,252 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74613.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:36:40,495 INFO [train.py:903] (3/4) Epoch 11, batch 6350, loss[loss=0.3293, simple_loss=0.3801, pruned_loss=0.1392, over 19665.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3069, pruned_loss=0.07996, over 3838217.92 frames. ], batch size: 58, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:36,496 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 5.622e+02 6.880e+02 9.256e+02 3.776e+03, threshold=1.376e+03, percent-clipped=5.0 +2023-04-01 20:37:44,833 INFO [train.py:903] (3/4) Epoch 11, batch 6400, loss[loss=0.2263, simple_loss=0.2958, pruned_loss=0.07844, over 19628.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.307, pruned_loss=0.08009, over 3831037.79 frames. ], batch size: 50, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:37:56,012 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 20:38:02,527 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6443, 2.2278, 2.2060, 2.8228, 2.7293, 2.1904, 2.2511, 2.7713], + device='cuda:3'), covar=tensor([0.0783, 0.1563, 0.1252, 0.0903, 0.1078, 0.0483, 0.1069, 0.0539], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0351, 0.0291, 0.0238, 0.0298, 0.0243, 0.0278, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:38:33,504 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74717.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:38:46,864 INFO [train.py:903] (3/4) Epoch 11, batch 6450, loss[loss=0.2529, simple_loss=0.3305, pruned_loss=0.08761, over 19467.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3082, pruned_loss=0.08083, over 3826239.64 frames. ], batch size: 64, lr: 7.43e-03, grad_scale: 8.0 +2023-04-01 20:39:25,311 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1685, 1.8392, 1.7471, 2.1388, 2.0927, 1.8905, 1.7201, 2.1578], + device='cuda:3'), covar=tensor([0.0881, 0.1556, 0.1391, 0.0931, 0.1186, 0.0475, 0.1186, 0.0639], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0351, 0.0293, 0.0239, 0.0298, 0.0244, 0.0279, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:39:28,492 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 20:39:44,916 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 5.590e+02 7.165e+02 9.175e+02 2.194e+03, threshold=1.433e+03, percent-clipped=7.0 +2023-04-01 20:39:53,331 INFO [train.py:903] (3/4) Epoch 11, batch 6500, loss[loss=0.1974, simple_loss=0.2822, pruned_loss=0.05631, over 19696.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3073, pruned_loss=0.08019, over 3829175.25 frames. ], batch size: 58, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:39:54,543 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 20:40:16,083 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74797.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:48,464 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:40:56,368 INFO [train.py:903] (3/4) Epoch 11, batch 6550, loss[loss=0.2192, simple_loss=0.29, pruned_loss=0.07422, over 19730.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3092, pruned_loss=0.08133, over 3813672.47 frames. ], batch size: 51, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:40:56,801 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7807, 1.7573, 1.5382, 1.3405, 1.3279, 1.3730, 0.1488, 0.6406], + device='cuda:3'), covar=tensor([0.0429, 0.0416, 0.0268, 0.0428, 0.0843, 0.0476, 0.0818, 0.0758], + device='cuda:3'), in_proj_covar=tensor([0.0341, 0.0331, 0.0333, 0.0354, 0.0429, 0.0353, 0.0312, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 20:41:52,115 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 5.036e+02 6.201e+02 8.020e+02 1.527e+03, threshold=1.240e+03, percent-clipped=1.0 +2023-04-01 20:41:59,119 INFO [train.py:903] (3/4) Epoch 11, batch 6600, loss[loss=0.2264, simple_loss=0.309, pruned_loss=0.07189, over 19621.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.31, pruned_loss=0.08168, over 3814526.61 frames. ], batch size: 57, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:42:42,902 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-01 20:43:01,253 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74927.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:43:03,149 INFO [train.py:903] (3/4) Epoch 11, batch 6650, loss[loss=0.211, simple_loss=0.2921, pruned_loss=0.06495, over 19762.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3082, pruned_loss=0.08112, over 3818543.02 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 8.0 +2023-04-01 20:43:40,282 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74957.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:00,890 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.506e+02 7.046e+02 9.043e+02 1.602e+03, threshold=1.409e+03, percent-clipped=2.0 +2023-04-01 20:44:01,364 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74973.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:07,813 INFO [train.py:903] (3/4) Epoch 11, batch 6700, loss[loss=0.2443, simple_loss=0.3181, pruned_loss=0.08532, over 18829.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3088, pruned_loss=0.08128, over 3817522.41 frames. ], batch size: 74, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:44:12,478 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74982.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:29,486 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4161, 1.9441, 2.0311, 2.9171, 2.2164, 2.6797, 2.6387, 2.5528], + device='cuda:3'), covar=tensor([0.0692, 0.0928, 0.0910, 0.0737, 0.0820, 0.0624, 0.0837, 0.0567], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0227, 0.0224, 0.0247, 0.0237, 0.0214, 0.0197, 0.0200], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 20:44:31,738 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74998.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:44:46,277 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3160, 3.9116, 2.6635, 3.4459, 0.9899, 3.6891, 3.6836, 3.8069], + device='cuda:3'), covar=tensor([0.0720, 0.1075, 0.1903, 0.0845, 0.3911, 0.0767, 0.0761, 0.1234], + device='cuda:3'), in_proj_covar=tensor([0.0437, 0.0369, 0.0438, 0.0317, 0.0379, 0.0369, 0.0357, 0.0393], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:45:06,536 INFO [train.py:903] (3/4) Epoch 11, batch 6750, loss[loss=0.2017, simple_loss=0.2728, pruned_loss=0.06537, over 19721.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3083, pruned_loss=0.08115, over 3826439.85 frames. ], batch size: 51, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:45:55,399 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75072.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:45:56,179 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 5.649e+02 7.002e+02 8.709e+02 1.691e+03, threshold=1.400e+03, percent-clipped=2.0 +2023-04-01 20:46:04,203 INFO [train.py:903] (3/4) Epoch 11, batch 6800, loss[loss=0.2313, simple_loss=0.3119, pruned_loss=0.07533, over 19584.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3074, pruned_loss=0.08087, over 3826836.77 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 8.0 +2023-04-01 20:46:51,849 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 20:46:52,893 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 20:46:56,120 INFO [train.py:903] (3/4) Epoch 12, batch 0, loss[loss=0.2547, simple_loss=0.3075, pruned_loss=0.1009, over 19762.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3075, pruned_loss=0.1009, over 19762.00 frames. ], batch size: 46, lr: 7.10e-03, grad_scale: 8.0 +2023-04-01 20:46:56,121 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 20:47:04,905 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7784, 3.3635, 2.7148, 3.1651, 0.8568, 3.1660, 3.1678, 3.4948], + device='cuda:3'), covar=tensor([0.0842, 0.0878, 0.1907, 0.0967, 0.4247, 0.1174, 0.0875, 0.1105], + device='cuda:3'), in_proj_covar=tensor([0.0430, 0.0361, 0.0432, 0.0312, 0.0374, 0.0364, 0.0352, 0.0385], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:3') +2023-04-01 20:47:08,129 INFO [train.py:937] (3/4) Epoch 12, validation: loss=0.1777, simple_loss=0.2788, pruned_loss=0.03825, over 944034.00 frames. +2023-04-01 20:47:08,129 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 20:47:20,781 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 20:48:11,519 INFO [train.py:903] (3/4) Epoch 12, batch 50, loss[loss=0.2447, simple_loss=0.3192, pruned_loss=0.08512, over 19758.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3081, pruned_loss=0.08092, over 859389.28 frames. ], batch size: 63, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:48:29,486 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.251e+02 6.823e+02 1.011e+03 3.055e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-01 20:48:42,837 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 20:48:54,694 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 20:49:13,862 INFO [train.py:903] (3/4) Epoch 12, batch 100, loss[loss=0.2056, simple_loss=0.2916, pruned_loss=0.05981, over 19686.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3074, pruned_loss=0.08015, over 1514333.49 frames. ], batch size: 58, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:49:22,068 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 20:50:02,890 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75246.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:16,769 INFO [train.py:903] (3/4) Epoch 12, batch 150, loss[loss=0.2598, simple_loss=0.3341, pruned_loss=0.09274, over 19673.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3043, pruned_loss=0.07852, over 2033183.40 frames. ], batch size: 58, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:50:33,998 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75271.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:50:36,119 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.206e+02 6.373e+02 8.343e+02 1.576e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-01 20:50:47,873 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0994, 1.7671, 1.7700, 2.1023, 2.0920, 1.9757, 1.6273, 2.0621], + device='cuda:3'), covar=tensor([0.0921, 0.1597, 0.1340, 0.0932, 0.1124, 0.0457, 0.1215, 0.0657], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0350, 0.0288, 0.0239, 0.0293, 0.0242, 0.0276, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:50:56,167 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75288.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:15,337 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 20:51:18,701 INFO [train.py:903] (3/4) Epoch 12, batch 200, loss[loss=0.2323, simple_loss=0.3117, pruned_loss=0.07646, over 19543.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3043, pruned_loss=0.07897, over 2428017.31 frames. ], batch size: 56, lr: 7.09e-03, grad_scale: 8.0 +2023-04-01 20:51:42,624 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75326.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:51:45,294 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75328.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:52:02,495 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3640, 2.1398, 1.9697, 1.8563, 1.5555, 1.7136, 0.4954, 1.1907], + device='cuda:3'), covar=tensor([0.0371, 0.0413, 0.0323, 0.0536, 0.0827, 0.0646, 0.0867, 0.0723], + device='cuda:3'), in_proj_covar=tensor([0.0341, 0.0331, 0.0330, 0.0357, 0.0430, 0.0353, 0.0309, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 20:52:16,217 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75353.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:52:22,162 INFO [train.py:903] (3/4) Epoch 12, batch 250, loss[loss=0.229, simple_loss=0.3034, pruned_loss=0.07728, over 19720.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3067, pruned_loss=0.07988, over 2741508.68 frames. ], batch size: 51, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:52:41,713 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.714e+02 6.835e+02 8.470e+02 1.829e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-04-01 20:52:56,688 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75386.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:53:25,602 INFO [train.py:903] (3/4) Epoch 12, batch 300, loss[loss=0.1905, simple_loss=0.2646, pruned_loss=0.05815, over 19758.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3063, pruned_loss=0.07995, over 2996669.23 frames. ], batch size: 46, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:54:07,363 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75441.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:54:28,450 INFO [train.py:903] (3/4) Epoch 12, batch 350, loss[loss=0.2529, simple_loss=0.3264, pruned_loss=0.08968, over 19318.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3064, pruned_loss=0.07942, over 3184738.64 frames. ], batch size: 66, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:54:31,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 20:54:45,903 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.463e+02 6.814e+02 8.627e+02 1.955e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-01 20:55:30,817 INFO [train.py:903] (3/4) Epoch 12, batch 400, loss[loss=0.1838, simple_loss=0.264, pruned_loss=0.05178, over 19383.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3047, pruned_loss=0.07836, over 3338608.82 frames. ], batch size: 47, lr: 7.08e-03, grad_scale: 8.0 +2023-04-01 20:56:16,875 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2664, 1.4222, 1.6773, 1.5100, 2.6367, 2.2864, 2.8434, 1.1674], + device='cuda:3'), covar=tensor([0.2217, 0.3660, 0.2182, 0.1711, 0.1389, 0.1731, 0.1449, 0.3564], + device='cuda:3'), in_proj_covar=tensor([0.0492, 0.0577, 0.0604, 0.0438, 0.0598, 0.0493, 0.0646, 0.0492], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 20:56:32,374 INFO [train.py:903] (3/4) Epoch 12, batch 450, loss[loss=0.2487, simple_loss=0.3241, pruned_loss=0.0867, over 17545.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3065, pruned_loss=0.07912, over 3450916.59 frames. ], batch size: 101, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:56:51,546 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.480e+02 5.172e+02 6.423e+02 7.976e+02 2.291e+03, threshold=1.285e+03, percent-clipped=4.0 +2023-04-01 20:57:09,484 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 20:57:10,707 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 20:57:13,237 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75590.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:57:17,035 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2499, 3.7830, 3.8673, 3.8660, 1.3979, 3.6205, 3.1548, 3.5713], + device='cuda:3'), covar=tensor([0.1372, 0.0910, 0.0653, 0.0653, 0.5243, 0.0766, 0.0712, 0.1155], + device='cuda:3'), in_proj_covar=tensor([0.0677, 0.0603, 0.0804, 0.0688, 0.0731, 0.0559, 0.0492, 0.0741], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 20:57:36,550 INFO [train.py:903] (3/4) Epoch 12, batch 500, loss[loss=0.2209, simple_loss=0.3081, pruned_loss=0.06687, over 19748.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3045, pruned_loss=0.07826, over 3546757.30 frames. ], batch size: 63, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:06,133 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75632.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:18,028 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75642.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:38,553 INFO [train.py:903] (3/4) Epoch 12, batch 550, loss[loss=0.2231, simple_loss=0.2961, pruned_loss=0.07504, over 19590.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3056, pruned_loss=0.079, over 3617982.71 frames. ], batch size: 52, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:58:50,380 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75667.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:58:56,744 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 5.816e+02 6.939e+02 9.327e+02 2.224e+03, threshold=1.388e+03, percent-clipped=13.0 +2023-04-01 20:59:09,128 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-01 20:59:24,533 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8371, 1.3213, 1.5077, 1.7086, 3.3374, 1.1543, 2.2113, 3.7324], + device='cuda:3'), covar=tensor([0.0426, 0.2659, 0.2697, 0.1697, 0.0745, 0.2506, 0.1396, 0.0243], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0340, 0.0351, 0.0319, 0.0347, 0.0330, 0.0333, 0.0354], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 20:59:26,890 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75697.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:38,265 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75705.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 20:59:41,424 INFO [train.py:903] (3/4) Epoch 12, batch 600, loss[loss=0.234, simple_loss=0.3068, pruned_loss=0.08061, over 19465.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3049, pruned_loss=0.07843, over 3668942.94 frames. ], batch size: 49, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 20:59:57,556 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75722.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:22,877 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 21:00:30,160 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75747.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:00:42,504 INFO [train.py:903] (3/4) Epoch 12, batch 650, loss[loss=0.2051, simple_loss=0.2878, pruned_loss=0.06119, over 19695.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3054, pruned_loss=0.07888, over 3704918.72 frames. ], batch size: 59, lr: 7.07e-03, grad_scale: 8.0 +2023-04-01 21:01:01,250 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.224e+02 6.354e+02 7.929e+02 1.382e+03, threshold=1.271e+03, percent-clipped=0.0 +2023-04-01 21:01:22,981 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3512, 2.1324, 1.5319, 1.3806, 1.9729, 1.1219, 1.1565, 1.7945], + device='cuda:3'), covar=tensor([0.0866, 0.0643, 0.0959, 0.0658, 0.0422, 0.1143, 0.0739, 0.0403], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0304, 0.0329, 0.0249, 0.0237, 0.0320, 0.0292, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:01:24,570 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-04-01 21:01:29,736 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1397, 1.2494, 1.7332, 1.1717, 2.7669, 3.6800, 3.4285, 3.8515], + device='cuda:3'), covar=tensor([0.1548, 0.3502, 0.3038, 0.2119, 0.0471, 0.0141, 0.0193, 0.0192], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0293, 0.0323, 0.0250, 0.0214, 0.0156, 0.0206, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:01:45,574 INFO [train.py:903] (3/4) Epoch 12, batch 700, loss[loss=0.2415, simple_loss=0.3184, pruned_loss=0.08233, over 19786.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3058, pruned_loss=0.07901, over 3736030.44 frames. ], batch size: 56, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:02:04,533 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.90 vs. limit=5.0 +2023-04-01 21:02:46,461 INFO [train.py:903] (3/4) Epoch 12, batch 750, loss[loss=0.226, simple_loss=0.3098, pruned_loss=0.07107, over 19663.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3061, pruned_loss=0.07896, over 3755689.91 frames. ], batch size: 55, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:03:05,206 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 5.547e+02 6.814e+02 8.571e+02 2.504e+03, threshold=1.363e+03, percent-clipped=8.0 +2023-04-01 21:03:49,642 INFO [train.py:903] (3/4) Epoch 12, batch 800, loss[loss=0.2279, simple_loss=0.3076, pruned_loss=0.07409, over 19771.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3039, pruned_loss=0.07761, over 3775786.05 frames. ], batch size: 54, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:07,030 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 21:04:50,830 INFO [train.py:903] (3/4) Epoch 12, batch 850, loss[loss=0.2661, simple_loss=0.3288, pruned_loss=0.1017, over 13579.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3048, pruned_loss=0.07775, over 3784050.32 frames. ], batch size: 135, lr: 7.06e-03, grad_scale: 8.0 +2023-04-01 21:04:54,859 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75961.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:10,028 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.208e+02 6.361e+02 7.722e+02 1.579e+03, threshold=1.272e+03, percent-clipped=2.0 +2023-04-01 21:05:25,738 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75986.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:29,799 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1035, 1.1397, 1.5713, 1.1137, 2.5903, 3.4892, 3.1916, 3.6990], + device='cuda:3'), covar=tensor([0.1627, 0.3586, 0.3184, 0.2183, 0.0511, 0.0157, 0.0221, 0.0194], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0294, 0.0324, 0.0251, 0.0214, 0.0157, 0.0206, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:05:46,881 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 21:05:48,555 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:05:53,893 INFO [train.py:903] (3/4) Epoch 12, batch 900, loss[loss=0.2014, simple_loss=0.2664, pruned_loss=0.06815, over 19714.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3051, pruned_loss=0.07814, over 3799236.83 frames. ], batch size: 46, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:06:13,774 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6541, 1.4123, 1.4420, 2.0028, 1.6194, 1.9546, 1.9468, 1.7088], + device='cuda:3'), covar=tensor([0.0779, 0.0960, 0.1019, 0.0712, 0.0823, 0.0638, 0.0850, 0.0669], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0224, 0.0222, 0.0244, 0.0235, 0.0212, 0.0195, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 21:06:20,445 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76028.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:06:57,933 INFO [train.py:903] (3/4) Epoch 12, batch 950, loss[loss=0.2591, simple_loss=0.3364, pruned_loss=0.09092, over 18267.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3059, pruned_loss=0.07858, over 3807321.71 frames. ], batch size: 83, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:07:02,614 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 21:07:17,511 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 5.102e+02 6.493e+02 8.387e+02 1.985e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 21:07:59,855 INFO [train.py:903] (3/4) Epoch 12, batch 1000, loss[loss=0.2037, simple_loss=0.2722, pruned_loss=0.06764, over 19770.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3052, pruned_loss=0.07845, over 3814222.80 frames. ], batch size: 47, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:08:42,444 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0154, 3.3307, 1.8590, 2.0258, 3.0341, 1.5045, 1.2598, 1.9177], + device='cuda:3'), covar=tensor([0.1188, 0.0492, 0.0980, 0.0662, 0.0401, 0.1074, 0.0978, 0.0658], + device='cuda:3'), in_proj_covar=tensor([0.0293, 0.0303, 0.0326, 0.0247, 0.0233, 0.0314, 0.0289, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:08:54,934 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 21:09:02,681 INFO [train.py:903] (3/4) Epoch 12, batch 1050, loss[loss=0.2148, simple_loss=0.2914, pruned_loss=0.06911, over 19655.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3054, pruned_loss=0.07878, over 3811378.95 frames. ], batch size: 53, lr: 7.05e-03, grad_scale: 8.0 +2023-04-01 21:09:20,680 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.538e+02 5.178e+02 6.428e+02 8.624e+02 1.751e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-01 21:09:35,990 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 21:10:05,079 INFO [train.py:903] (3/4) Epoch 12, batch 1100, loss[loss=0.2269, simple_loss=0.3016, pruned_loss=0.07611, over 19838.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3063, pruned_loss=0.07952, over 3802404.41 frames. ], batch size: 52, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:10:13,330 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8884, 1.4941, 1.4936, 1.8936, 1.7146, 1.6654, 1.4935, 1.7759], + device='cuda:3'), covar=tensor([0.0885, 0.1475, 0.1323, 0.0908, 0.1079, 0.0491, 0.1259, 0.0656], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0352, 0.0292, 0.0241, 0.0296, 0.0243, 0.0280, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:11:08,884 INFO [train.py:903] (3/4) Epoch 12, batch 1150, loss[loss=0.2265, simple_loss=0.3018, pruned_loss=0.07561, over 19458.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3076, pruned_loss=0.08017, over 3788059.09 frames. ], batch size: 49, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:11:27,482 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 5.432e+02 6.664e+02 8.568e+02 1.731e+03, threshold=1.333e+03, percent-clipped=3.0 +2023-04-01 21:11:44,829 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76287.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:12:10,715 INFO [train.py:903] (3/4) Epoch 12, batch 1200, loss[loss=0.2473, simple_loss=0.3212, pruned_loss=0.08667, over 19502.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3082, pruned_loss=0.08051, over 3810754.99 frames. ], batch size: 64, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:12:15,531 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76312.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:12:45,725 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 21:12:52,915 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76341.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 21:13:14,060 INFO [train.py:903] (3/4) Epoch 12, batch 1250, loss[loss=0.2502, simple_loss=0.3294, pruned_loss=0.08552, over 19520.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3073, pruned_loss=0.07962, over 3832196.59 frames. ], batch size: 54, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:13:31,221 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 5.242e+02 6.279e+02 7.669e+02 1.575e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-01 21:14:15,616 INFO [train.py:903] (3/4) Epoch 12, batch 1300, loss[loss=0.1758, simple_loss=0.2616, pruned_loss=0.04499, over 19615.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3059, pruned_loss=0.07913, over 3839827.58 frames. ], batch size: 50, lr: 7.04e-03, grad_scale: 8.0 +2023-04-01 21:15:18,800 INFO [train.py:903] (3/4) Epoch 12, batch 1350, loss[loss=0.3105, simple_loss=0.3575, pruned_loss=0.1317, over 13253.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3059, pruned_loss=0.07911, over 3831211.11 frames. ], batch size: 135, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:15:37,067 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 5.247e+02 6.486e+02 7.910e+02 1.390e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 21:16:20,445 INFO [train.py:903] (3/4) Epoch 12, batch 1400, loss[loss=0.1944, simple_loss=0.27, pruned_loss=0.05944, over 19762.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3058, pruned_loss=0.07905, over 3832298.93 frames. ], batch size: 47, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:16:50,821 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2186, 2.0767, 2.0046, 2.2542, 2.2236, 1.9598, 1.9343, 2.1766], + device='cuda:3'), covar=tensor([0.0696, 0.1133, 0.0975, 0.0730, 0.0815, 0.0418, 0.0969, 0.0514], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0354, 0.0292, 0.0242, 0.0298, 0.0245, 0.0279, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:17:01,818 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.8159, 5.2526, 3.0735, 4.5468, 1.3136, 5.0286, 5.1484, 5.1958], + device='cuda:3'), covar=tensor([0.0412, 0.0773, 0.1738, 0.0656, 0.3739, 0.0670, 0.0624, 0.0920], + device='cuda:3'), in_proj_covar=tensor([0.0442, 0.0367, 0.0442, 0.0319, 0.0380, 0.0374, 0.0358, 0.0392], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:17:23,469 INFO [train.py:903] (3/4) Epoch 12, batch 1450, loss[loss=0.2058, simple_loss=0.2922, pruned_loss=0.05963, over 19855.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3054, pruned_loss=0.07837, over 3824215.74 frames. ], batch size: 52, lr: 7.03e-03, grad_scale: 16.0 +2023-04-01 21:17:25,881 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 21:17:40,980 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.252e+02 4.935e+02 6.510e+02 8.250e+02 1.774e+03, threshold=1.302e+03, percent-clipped=3.0 +2023-04-01 21:18:24,878 INFO [train.py:903] (3/4) Epoch 12, batch 1500, loss[loss=0.2037, simple_loss=0.2818, pruned_loss=0.06283, over 19365.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3055, pruned_loss=0.07861, over 3810581.69 frames. ], batch size: 47, lr: 7.03e-03, grad_scale: 8.0 +2023-04-01 21:18:53,783 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76631.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:19:24,250 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76656.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:19:27,185 INFO [train.py:903] (3/4) Epoch 12, batch 1550, loss[loss=0.2861, simple_loss=0.3485, pruned_loss=0.1118, over 18191.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.305, pruned_loss=0.07829, over 3810801.48 frames. ], batch size: 83, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:19:46,334 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 5.709e+02 6.905e+02 9.210e+02 1.884e+03, threshold=1.381e+03, percent-clipped=4.0 +2023-04-01 21:20:01,260 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76685.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:20:19,383 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-01 21:20:23,685 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-01 21:20:29,938 INFO [train.py:903] (3/4) Epoch 12, batch 1600, loss[loss=0.2096, simple_loss=0.2806, pruned_loss=0.06927, over 19348.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3048, pruned_loss=0.07832, over 3812357.93 frames. ], batch size: 47, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:20:54,022 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 21:21:16,449 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76746.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:21:23,382 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2700, 2.1410, 1.6303, 1.1765, 2.0380, 1.0013, 1.1378, 1.9948], + device='cuda:3'), covar=tensor([0.0855, 0.0616, 0.0854, 0.0841, 0.0439, 0.1156, 0.0758, 0.0309], + device='cuda:3'), in_proj_covar=tensor([0.0289, 0.0300, 0.0321, 0.0243, 0.0231, 0.0316, 0.0289, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:21:29,664 INFO [train.py:903] (3/4) Epoch 12, batch 1650, loss[loss=0.2027, simple_loss=0.2727, pruned_loss=0.06639, over 19618.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3045, pruned_loss=0.07813, over 3816900.93 frames. ], batch size: 50, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:21:30,367 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-01 21:21:46,972 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76771.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:21:49,949 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.672e+02 5.088e+02 6.360e+02 7.707e+02 1.579e+03, threshold=1.272e+03, percent-clipped=3.0 +2023-04-01 21:22:06,280 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4435, 1.4769, 1.4173, 2.0025, 1.5331, 1.9371, 1.6028, 1.3140], + device='cuda:3'), covar=tensor([0.2942, 0.2591, 0.1679, 0.1611, 0.2495, 0.1184, 0.3163, 0.2858], + device='cuda:3'), in_proj_covar=tensor([0.0791, 0.0816, 0.0646, 0.0889, 0.0781, 0.0706, 0.0779, 0.0709], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 21:22:22,696 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76800.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:22:33,401 INFO [train.py:903] (3/4) Epoch 12, batch 1700, loss[loss=0.2375, simple_loss=0.3104, pruned_loss=0.08236, over 19565.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3044, pruned_loss=0.07833, over 3821382.83 frames. ], batch size: 61, lr: 7.02e-03, grad_scale: 8.0 +2023-04-01 21:22:59,229 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3348, 1.7776, 1.9608, 2.8060, 2.1663, 2.6400, 2.6065, 2.4403], + device='cuda:3'), covar=tensor([0.0704, 0.0893, 0.0928, 0.0860, 0.0917, 0.0638, 0.0875, 0.0565], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0225, 0.0224, 0.0246, 0.0236, 0.0213, 0.0195, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 21:23:10,318 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 21:23:16,945 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0990, 2.0217, 1.8847, 1.7756, 1.6988, 1.8552, 1.0590, 1.4919], + device='cuda:3'), covar=tensor([0.0323, 0.0422, 0.0304, 0.0458, 0.0686, 0.0590, 0.0805, 0.0626], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0329, 0.0332, 0.0356, 0.0427, 0.0354, 0.0309, 0.0322], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:23:33,278 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76857.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:23:34,184 INFO [train.py:903] (3/4) Epoch 12, batch 1750, loss[loss=0.1988, simple_loss=0.273, pruned_loss=0.06225, over 19314.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3051, pruned_loss=0.07912, over 3814498.32 frames. ], batch size: 44, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:23:36,383 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-01 21:23:53,639 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 5.587e+02 7.065e+02 8.864e+02 2.096e+03, threshold=1.413e+03, percent-clipped=6.0 +2023-04-01 21:24:37,142 INFO [train.py:903] (3/4) Epoch 12, batch 1800, loss[loss=0.2003, simple_loss=0.285, pruned_loss=0.05776, over 19770.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3048, pruned_loss=0.07847, over 3823198.70 frames. ], batch size: 54, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:24:44,484 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4807, 2.3344, 1.7955, 1.6023, 2.2169, 1.3302, 1.3559, 1.9953], + device='cuda:3'), covar=tensor([0.0941, 0.0711, 0.0876, 0.0730, 0.0390, 0.1048, 0.0717, 0.0366], + device='cuda:3'), in_proj_covar=tensor([0.0287, 0.0300, 0.0320, 0.0243, 0.0231, 0.0317, 0.0289, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:25:32,729 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 21:25:38,683 INFO [train.py:903] (3/4) Epoch 12, batch 1850, loss[loss=0.1931, simple_loss=0.274, pruned_loss=0.05606, over 19465.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3058, pruned_loss=0.07944, over 3812226.75 frames. ], batch size: 49, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:25:45,874 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.06 vs. limit=5.0 +2023-04-01 21:25:59,469 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.186e+02 5.640e+02 7.026e+02 8.457e+02 1.689e+03, threshold=1.405e+03, percent-clipped=1.0 +2023-04-01 21:26:12,301 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 21:26:34,355 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77002.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:26:41,889 INFO [train.py:903] (3/4) Epoch 12, batch 1900, loss[loss=0.2441, simple_loss=0.3212, pruned_loss=0.0835, over 19665.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3059, pruned_loss=0.07936, over 3826817.20 frames. ], batch size: 60, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:27:00,201 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 21:27:04,913 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 21:27:06,424 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:27:06,441 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77027.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:28,494 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 21:27:36,569 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77052.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:27:42,275 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77056.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:27:43,956 INFO [train.py:903] (3/4) Epoch 12, batch 1950, loss[loss=0.2465, simple_loss=0.3261, pruned_loss=0.08349, over 19389.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3071, pruned_loss=0.08017, over 3823496.75 frames. ], batch size: 70, lr: 7.01e-03, grad_scale: 8.0 +2023-04-01 21:28:03,224 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.700e+02 5.468e+02 7.092e+02 9.065e+02 1.810e+03, threshold=1.418e+03, percent-clipped=4.0 +2023-04-01 21:28:11,862 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77081.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 21:28:45,468 INFO [train.py:903] (3/4) Epoch 12, batch 2000, loss[loss=0.2428, simple_loss=0.3147, pruned_loss=0.08543, over 18162.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3081, pruned_loss=0.08072, over 3815243.95 frames. ], batch size: 83, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:28:51,490 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77113.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:29:15,566 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5407, 1.2845, 1.1380, 1.4378, 1.0987, 1.3313, 1.1920, 1.3494], + device='cuda:3'), covar=tensor([0.0993, 0.1141, 0.1540, 0.0935, 0.1194, 0.0551, 0.1285, 0.0761], + device='cuda:3'), in_proj_covar=tensor([0.0252, 0.0349, 0.0292, 0.0238, 0.0294, 0.0242, 0.0278, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:29:43,047 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 21:29:46,529 INFO [train.py:903] (3/4) Epoch 12, batch 2050, loss[loss=0.2509, simple_loss=0.3186, pruned_loss=0.09162, over 17640.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3086, pruned_loss=0.08097, over 3801636.06 frames. ], batch size: 101, lr: 7.00e-03, grad_scale: 8.0 +2023-04-01 21:30:02,210 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 21:30:03,436 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 21:30:06,833 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.225e+02 5.837e+02 7.308e+02 9.815e+02 2.165e+03, threshold=1.462e+03, percent-clipped=5.0 +2023-04-01 21:30:26,544 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 21:30:40,585 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77201.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:30:49,629 INFO [train.py:903] (3/4) Epoch 12, batch 2100, loss[loss=0.2208, simple_loss=0.3034, pruned_loss=0.06907, over 19766.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3083, pruned_loss=0.08084, over 3797944.86 frames. ], batch size: 63, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:31:10,324 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 21:31:12,334 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8575, 4.4938, 3.2047, 4.0104, 1.9584, 4.2026, 4.2111, 4.3147], + device='cuda:3'), covar=tensor([0.0503, 0.0824, 0.1682, 0.0760, 0.2850, 0.0725, 0.0780, 0.0990], + device='cuda:3'), in_proj_covar=tensor([0.0445, 0.0369, 0.0444, 0.0321, 0.0384, 0.0378, 0.0364, 0.0397], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:31:17,043 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77229.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:31:20,315 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 21:31:28,734 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6847, 1.3913, 1.3867, 2.1754, 1.7466, 2.1315, 2.0965, 1.7976], + device='cuda:3'), covar=tensor([0.0807, 0.0965, 0.1055, 0.0815, 0.0867, 0.0642, 0.0870, 0.0684], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0226, 0.0226, 0.0249, 0.0236, 0.0215, 0.0197, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 21:31:40,667 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 21:31:52,752 INFO [train.py:903] (3/4) Epoch 12, batch 2150, loss[loss=0.3022, simple_loss=0.3541, pruned_loss=0.1252, over 13271.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3075, pruned_loss=0.08051, over 3787024.47 frames. ], batch size: 136, lr: 7.00e-03, grad_scale: 4.0 +2023-04-01 21:32:13,139 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.202e+02 5.327e+02 7.168e+02 9.347e+02 2.125e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-04-01 21:32:33,277 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77291.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:39,476 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77295.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:32:51,912 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-01 21:32:53,871 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0405, 1.2432, 1.6893, 0.8979, 2.2875, 3.0613, 2.7691, 3.1868], + device='cuda:3'), covar=tensor([0.1571, 0.3401, 0.2907, 0.2299, 0.0487, 0.0175, 0.0233, 0.0232], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0295, 0.0324, 0.0250, 0.0215, 0.0158, 0.0205, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:32:55,903 INFO [train.py:903] (3/4) Epoch 12, batch 2200, loss[loss=0.2223, simple_loss=0.3075, pruned_loss=0.06857, over 19792.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.307, pruned_loss=0.07988, over 3805720.43 frames. ], batch size: 56, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:33:05,541 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77316.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:33:25,220 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-01 21:33:57,353 INFO [train.py:903] (3/4) Epoch 12, batch 2250, loss[loss=0.2211, simple_loss=0.2948, pruned_loss=0.07367, over 19685.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3066, pruned_loss=0.07925, over 3822701.21 frames. ], batch size: 53, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:34:08,714 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0280, 1.2276, 1.6896, 1.0446, 2.5000, 3.3198, 3.0029, 3.4931], + device='cuda:3'), covar=tensor([0.1610, 0.3325, 0.2922, 0.2216, 0.0493, 0.0142, 0.0232, 0.0203], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0294, 0.0325, 0.0250, 0.0215, 0.0158, 0.0206, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:34:18,136 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.298e+02 6.341e+02 7.997e+02 1.542e+03, threshold=1.268e+03, percent-clipped=2.0 +2023-04-01 21:34:27,396 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7043, 4.1746, 4.4033, 4.4075, 1.6362, 4.1100, 3.5963, 4.0656], + device='cuda:3'), covar=tensor([0.1397, 0.0896, 0.0543, 0.0556, 0.5187, 0.0769, 0.0631, 0.1071], + device='cuda:3'), in_proj_covar=tensor([0.0683, 0.0613, 0.0812, 0.0690, 0.0738, 0.0565, 0.0498, 0.0749], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 21:34:35,674 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 21:34:58,607 INFO [train.py:903] (3/4) Epoch 12, batch 2300, loss[loss=0.2652, simple_loss=0.3339, pruned_loss=0.09829, over 19666.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3063, pruned_loss=0.07933, over 3819606.50 frames. ], batch size: 60, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:35:10,957 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 21:35:59,780 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77457.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:36:00,814 INFO [train.py:903] (3/4) Epoch 12, batch 2350, loss[loss=0.1877, simple_loss=0.2611, pruned_loss=0.0572, over 19377.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3048, pruned_loss=0.07896, over 3812917.12 frames. ], batch size: 47, lr: 6.99e-03, grad_scale: 4.0 +2023-04-01 21:36:22,285 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.245e+02 6.457e+02 8.417e+02 4.507e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-01 21:36:41,224 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 21:36:59,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 21:37:02,530 INFO [train.py:903] (3/4) Epoch 12, batch 2400, loss[loss=0.3063, simple_loss=0.3673, pruned_loss=0.1227, over 19332.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.306, pruned_loss=0.0797, over 3827453.00 frames. ], batch size: 66, lr: 6.99e-03, grad_scale: 8.0 +2023-04-01 21:37:17,465 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8389, 1.5349, 1.4371, 2.0671, 1.6274, 2.2432, 2.2430, 1.9573], + device='cuda:3'), covar=tensor([0.0724, 0.0840, 0.0983, 0.0794, 0.0835, 0.0538, 0.0739, 0.0571], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0224, 0.0224, 0.0248, 0.0234, 0.0214, 0.0196, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 21:37:36,455 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5715, 1.6459, 1.8204, 2.0895, 1.4275, 1.8421, 2.0000, 1.7432], + device='cuda:3'), covar=tensor([0.3361, 0.2758, 0.1429, 0.1585, 0.2970, 0.1455, 0.3501, 0.2557], + device='cuda:3'), in_proj_covar=tensor([0.0802, 0.0823, 0.0653, 0.0897, 0.0788, 0.0711, 0.0789, 0.0719], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 21:38:04,501 INFO [train.py:903] (3/4) Epoch 12, batch 2450, loss[loss=0.2296, simple_loss=0.3101, pruned_loss=0.07456, over 19777.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.307, pruned_loss=0.08012, over 3822131.59 frames. ], batch size: 56, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:38:21,469 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:21,542 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77572.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:22,404 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77573.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:24,483 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.530e+02 6.529e+02 8.263e+02 1.639e+03, threshold=1.306e+03, percent-clipped=2.0 +2023-04-01 21:38:52,868 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77597.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:38:56,110 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.8535, 5.3021, 3.1035, 4.6730, 1.5213, 5.1939, 5.1958, 5.3548], + device='cuda:3'), covar=tensor([0.0372, 0.0777, 0.1754, 0.0593, 0.3497, 0.0517, 0.0614, 0.0910], + device='cuda:3'), in_proj_covar=tensor([0.0440, 0.0365, 0.0440, 0.0319, 0.0380, 0.0373, 0.0360, 0.0392], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:39:05,885 INFO [train.py:903] (3/4) Epoch 12, batch 2500, loss[loss=0.2245, simple_loss=0.3155, pruned_loss=0.06679, over 19661.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3071, pruned_loss=0.08004, over 3813055.72 frames. ], batch size: 55, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:39:27,255 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1737, 1.1199, 1.1015, 1.3484, 0.9896, 1.3162, 1.3781, 1.2605], + device='cuda:3'), covar=tensor([0.0896, 0.1009, 0.1147, 0.0715, 0.0925, 0.0805, 0.0803, 0.0733], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0221, 0.0222, 0.0244, 0.0232, 0.0211, 0.0193, 0.0199], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 21:39:39,400 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77634.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,550 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77635.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:39:40,772 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8373, 1.7320, 1.4399, 1.8394, 1.8643, 1.4010, 1.4390, 1.6980], + device='cuda:3'), covar=tensor([0.1115, 0.1673, 0.1791, 0.1151, 0.1298, 0.0972, 0.1671, 0.0924], + device='cuda:3'), in_proj_covar=tensor([0.0250, 0.0347, 0.0290, 0.0237, 0.0292, 0.0240, 0.0277, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:39:45,169 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77639.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:08,013 INFO [train.py:903] (3/4) Epoch 12, batch 2550, loss[loss=0.2241, simple_loss=0.295, pruned_loss=0.0766, over 19540.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3074, pruned_loss=0.07999, over 3817938.97 frames. ], batch size: 56, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:40:30,544 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.340e+02 6.709e+02 8.508e+02 1.809e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-01 21:40:46,852 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:40:54,796 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:41:05,578 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-01 21:41:10,777 INFO [train.py:903] (3/4) Epoch 12, batch 2600, loss[loss=0.2966, simple_loss=0.3518, pruned_loss=0.1207, over 13626.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3072, pruned_loss=0.08003, over 3816955.25 frames. ], batch size: 136, lr: 6.98e-03, grad_scale: 8.0 +2023-04-01 21:41:54,848 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 21:42:03,120 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77750.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:07,423 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:42:11,588 INFO [train.py:903] (3/4) Epoch 12, batch 2650, loss[loss=0.2071, simple_loss=0.2713, pruned_loss=0.07144, over 19301.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3069, pruned_loss=0.07959, over 3834096.91 frames. ], batch size: 44, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:42:32,176 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.775e+02 6.860e+02 8.613e+02 1.817e+03, threshold=1.372e+03, percent-clipped=5.0 +2023-04-01 21:42:33,297 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-01 21:43:06,971 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9848, 3.6301, 2.2516, 3.3010, 0.9107, 3.4347, 3.4275, 3.5211], + device='cuda:3'), covar=tensor([0.0840, 0.1264, 0.2292, 0.0821, 0.3932, 0.0890, 0.0868, 0.1142], + device='cuda:3'), in_proj_covar=tensor([0.0449, 0.0374, 0.0450, 0.0326, 0.0386, 0.0379, 0.0367, 0.0400], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:43:12,310 INFO [train.py:903] (3/4) Epoch 12, batch 2700, loss[loss=0.2838, simple_loss=0.3426, pruned_loss=0.1125, over 18129.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3072, pruned_loss=0.07961, over 3836169.59 frames. ], batch size: 83, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:43:38,341 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77828.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:08,828 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77853.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:44:14,414 INFO [train.py:903] (3/4) Epoch 12, batch 2750, loss[loss=0.2332, simple_loss=0.315, pruned_loss=0.07566, over 19788.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3065, pruned_loss=0.07904, over 3834600.94 frames. ], batch size: 56, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:44:36,446 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 5.785e+02 6.935e+02 8.560e+02 1.739e+03, threshold=1.387e+03, percent-clipped=4.0 +2023-04-01 21:45:15,207 INFO [train.py:903] (3/4) Epoch 12, batch 2800, loss[loss=0.2301, simple_loss=0.3129, pruned_loss=0.0737, over 19342.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3065, pruned_loss=0.07908, over 3828347.46 frames. ], batch size: 70, lr: 6.97e-03, grad_scale: 8.0 +2023-04-01 21:46:01,263 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77944.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:19,707 INFO [train.py:903] (3/4) Epoch 12, batch 2850, loss[loss=0.2479, simple_loss=0.3226, pruned_loss=0.08665, over 19643.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3068, pruned_loss=0.07946, over 3813364.86 frames. ], batch size: 55, lr: 6.97e-03, grad_scale: 4.0 +2023-04-01 21:46:22,391 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1409, 2.3227, 2.4679, 2.3230, 4.6786, 1.4131, 2.8768, 4.8966], + device='cuda:3'), covar=tensor([0.0342, 0.2216, 0.2155, 0.1559, 0.0669, 0.2571, 0.1102, 0.0187], + device='cuda:3'), in_proj_covar=tensor([0.0354, 0.0333, 0.0345, 0.0315, 0.0340, 0.0330, 0.0331, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 21:46:32,683 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77969.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:46:41,208 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.493e+02 5.663e+02 6.634e+02 8.773e+02 3.814e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-01 21:46:43,750 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77978.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:20,362 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78006.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:22,309 INFO [train.py:903] (3/4) Epoch 12, batch 2900, loss[loss=0.173, simple_loss=0.2518, pruned_loss=0.0471, over 19802.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3069, pruned_loss=0.07935, over 3801925.62 frames. ], batch size: 48, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:47:22,329 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-01 21:47:25,128 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78010.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:39,756 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5939, 1.9188, 2.2555, 1.8768, 3.2215, 2.7780, 3.6197, 1.5160], + device='cuda:3'), covar=tensor([0.2001, 0.3335, 0.2189, 0.1623, 0.1369, 0.1694, 0.1411, 0.3404], + device='cuda:3'), in_proj_covar=tensor([0.0489, 0.0578, 0.0608, 0.0437, 0.0594, 0.0489, 0.0644, 0.0495], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 21:47:53,543 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78031.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:47:57,901 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78035.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:02,345 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78039.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:48:25,036 INFO [train.py:903] (3/4) Epoch 12, batch 2950, loss[loss=0.2154, simple_loss=0.2872, pruned_loss=0.07179, over 19842.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3063, pruned_loss=0.07926, over 3792249.37 frames. ], batch size: 52, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:48:48,724 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.583e+02 6.866e+02 9.102e+02 1.641e+03, threshold=1.373e+03, percent-clipped=7.0 +2023-04-01 21:48:57,555 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-01 21:49:10,007 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78093.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:49:28,178 INFO [train.py:903] (3/4) Epoch 12, batch 3000, loss[loss=0.2024, simple_loss=0.2776, pruned_loss=0.06357, over 19779.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3053, pruned_loss=0.07891, over 3797722.02 frames. ], batch size: 47, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:49:28,178 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 21:49:40,665 INFO [train.py:937] (3/4) Epoch 12, validation: loss=0.1772, simple_loss=0.2779, pruned_loss=0.0383, over 944034.00 frames. +2023-04-01 21:49:40,666 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 21:49:45,492 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-01 21:50:04,820 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-01 21:50:38,059 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78154.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:50:42,292 INFO [train.py:903] (3/4) Epoch 12, batch 3050, loss[loss=0.1666, simple_loss=0.2529, pruned_loss=0.04016, over 19757.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3052, pruned_loss=0.07871, over 3801848.39 frames. ], batch size: 47, lr: 6.96e-03, grad_scale: 4.0 +2023-04-01 21:50:57,239 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1966, 1.1968, 1.7210, 1.2510, 2.5539, 3.5801, 3.3929, 3.8216], + device='cuda:3'), covar=tensor([0.1560, 0.3586, 0.2914, 0.2139, 0.0529, 0.0149, 0.0185, 0.0186], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0296, 0.0325, 0.0252, 0.0216, 0.0159, 0.0205, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:51:04,785 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 5.238e+02 6.566e+02 8.426e+02 1.854e+03, threshold=1.313e+03, percent-clipped=6.0 +2023-04-01 21:51:43,597 INFO [train.py:903] (3/4) Epoch 12, batch 3100, loss[loss=0.2368, simple_loss=0.3153, pruned_loss=0.07916, over 19731.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3059, pruned_loss=0.07898, over 3812219.63 frames. ], batch size: 63, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:52:07,085 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-01 21:52:36,438 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:52:45,567 INFO [train.py:903] (3/4) Epoch 12, batch 3150, loss[loss=0.3227, simple_loss=0.3735, pruned_loss=0.136, over 13105.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3057, pruned_loss=0.07875, over 3804640.88 frames. ], batch size: 135, lr: 6.95e-03, grad_scale: 4.0 +2023-04-01 21:53:07,629 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.745e+02 5.622e+02 7.364e+02 8.683e+02 1.879e+03, threshold=1.473e+03, percent-clipped=3.0 +2023-04-01 21:53:15,182 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-01 21:53:47,075 INFO [train.py:903] (3/4) Epoch 12, batch 3200, loss[loss=0.2344, simple_loss=0.2976, pruned_loss=0.08558, over 19734.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3057, pruned_loss=0.0787, over 3808633.35 frames. ], batch size: 51, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:54:39,760 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:54:50,789 INFO [train.py:903] (3/4) Epoch 12, batch 3250, loss[loss=0.2149, simple_loss=0.2994, pruned_loss=0.06518, over 19660.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3061, pruned_loss=0.07906, over 3804142.41 frames. ], batch size: 55, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:54:57,746 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-01 21:55:10,601 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0038, 2.0816, 2.3073, 2.8212, 2.0007, 2.6571, 2.5310, 2.1681], + device='cuda:3'), covar=tensor([0.3528, 0.3056, 0.1373, 0.1931, 0.3438, 0.1539, 0.3468, 0.2535], + device='cuda:3'), in_proj_covar=tensor([0.0798, 0.0823, 0.0652, 0.0895, 0.0785, 0.0711, 0.0788, 0.0717], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 21:55:11,649 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78374.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:13,651 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.227e+02 6.164e+02 7.465e+02 1.234e+03, threshold=1.233e+03, percent-clipped=0.0 +2023-04-01 21:55:54,291 INFO [train.py:903] (3/4) Epoch 12, batch 3300, loss[loss=0.2172, simple_loss=0.2787, pruned_loss=0.07785, over 19371.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3061, pruned_loss=0.07899, over 3797523.65 frames. ], batch size: 47, lr: 6.95e-03, grad_scale: 8.0 +2023-04-01 21:55:57,153 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78410.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:55:57,916 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-01 21:56:26,541 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78435.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:56:55,606 INFO [train.py:903] (3/4) Epoch 12, batch 3350, loss[loss=0.2299, simple_loss=0.3053, pruned_loss=0.07731, over 19583.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3064, pruned_loss=0.07919, over 3797811.90 frames. ], batch size: 52, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:56:58,304 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2985, 1.4131, 1.6364, 1.4844, 2.3937, 2.1369, 2.5170, 0.9044], + device='cuda:3'), covar=tensor([0.2109, 0.3684, 0.2246, 0.1747, 0.1408, 0.1839, 0.1339, 0.3701], + device='cuda:3'), in_proj_covar=tensor([0.0483, 0.0570, 0.0598, 0.0430, 0.0587, 0.0483, 0.0636, 0.0486], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 21:57:18,016 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 5.251e+02 6.145e+02 6.896e+02 1.617e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-01 21:57:21,542 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:57:37,005 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.15 vs. limit=5.0 +2023-04-01 21:57:57,207 INFO [train.py:903] (3/4) Epoch 12, batch 3400, loss[loss=0.2269, simple_loss=0.302, pruned_loss=0.07592, over 19790.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3059, pruned_loss=0.07908, over 3806076.50 frames. ], batch size: 56, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:58:04,410 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2721, 2.4142, 2.4603, 3.2733, 2.3999, 3.2095, 2.8073, 2.3851], + device='cuda:3'), covar=tensor([0.3614, 0.2962, 0.1434, 0.1863, 0.3492, 0.1389, 0.3398, 0.2590], + device='cuda:3'), in_proj_covar=tensor([0.0800, 0.0824, 0.0652, 0.0895, 0.0784, 0.0711, 0.0789, 0.0717], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 21:59:00,759 INFO [train.py:903] (3/4) Epoch 12, batch 3450, loss[loss=0.251, simple_loss=0.3245, pruned_loss=0.08871, over 19318.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3058, pruned_loss=0.07868, over 3814243.25 frames. ], batch size: 66, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 21:59:06,198 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-01 21:59:06,691 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8350, 1.8959, 2.0724, 2.7735, 1.9179, 2.6965, 2.3676, 1.9086], + device='cuda:3'), covar=tensor([0.3611, 0.3076, 0.1468, 0.1683, 0.3290, 0.1332, 0.3580, 0.2844], + device='cuda:3'), in_proj_covar=tensor([0.0800, 0.0827, 0.0652, 0.0894, 0.0787, 0.0710, 0.0788, 0.0718], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 21:59:21,409 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78574.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 21:59:23,277 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.692e+02 7.472e+02 9.495e+02 2.057e+03, threshold=1.494e+03, percent-clipped=9.0 +2023-04-01 21:59:44,692 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78594.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:00:03,311 INFO [train.py:903] (3/4) Epoch 12, batch 3500, loss[loss=0.2517, simple_loss=0.3241, pruned_loss=0.08964, over 18208.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3055, pruned_loss=0.07877, over 3807932.13 frames. ], batch size: 83, lr: 6.94e-03, grad_scale: 8.0 +2023-04-01 22:00:54,070 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5767, 1.4029, 1.4057, 1.7946, 1.4375, 1.7799, 1.8429, 1.6145], + device='cuda:3'), covar=tensor([0.0806, 0.0975, 0.1024, 0.0715, 0.0844, 0.0724, 0.0782, 0.0671], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0226, 0.0225, 0.0248, 0.0236, 0.0213, 0.0196, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 22:01:05,327 INFO [train.py:903] (3/4) Epoch 12, batch 3550, loss[loss=0.2315, simple_loss=0.3091, pruned_loss=0.07699, over 19767.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3053, pruned_loss=0.0781, over 3808861.04 frames. ], batch size: 63, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:01:09,484 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-01 22:01:26,741 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.897e+02 5.737e+02 7.042e+02 1.025e+03 1.962e+03, threshold=1.408e+03, percent-clipped=6.0 +2023-04-01 22:02:07,313 INFO [train.py:903] (3/4) Epoch 12, batch 3600, loss[loss=0.2217, simple_loss=0.3075, pruned_loss=0.06796, over 19698.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3047, pruned_loss=0.07761, over 3821053.73 frames. ], batch size: 59, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:02:08,953 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78709.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:02:13,718 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-01 22:03:09,171 INFO [train.py:903] (3/4) Epoch 12, batch 3650, loss[loss=0.1831, simple_loss=0.2637, pruned_loss=0.05124, over 19499.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3054, pruned_loss=0.07835, over 3815772.28 frames. ], batch size: 49, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:03:29,490 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8173, 3.1817, 3.2663, 3.3095, 1.3363, 3.1683, 2.8199, 3.0110], + device='cuda:3'), covar=tensor([0.1417, 0.0971, 0.0765, 0.0772, 0.4826, 0.0855, 0.0729, 0.1274], + device='cuda:3'), in_proj_covar=tensor([0.0674, 0.0612, 0.0801, 0.0685, 0.0729, 0.0569, 0.0488, 0.0738], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 22:03:33,802 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.478e+02 5.318e+02 6.562e+02 8.219e+02 2.478e+03, threshold=1.312e+03, percent-clipped=4.0 +2023-04-01 22:04:14,200 INFO [train.py:903] (3/4) Epoch 12, batch 3700, loss[loss=0.1976, simple_loss=0.2856, pruned_loss=0.05477, over 19659.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3043, pruned_loss=0.07764, over 3832883.65 frames. ], batch size: 53, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:04:31,405 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78822.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:04:31,618 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5892, 1.3895, 1.3772, 1.9149, 1.7046, 1.8646, 1.9527, 1.7779], + device='cuda:3'), covar=tensor([0.0898, 0.1022, 0.1097, 0.0943, 0.0871, 0.0750, 0.0879, 0.0686], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0226, 0.0224, 0.0249, 0.0237, 0.0214, 0.0196, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 22:04:34,438 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-01 22:05:15,856 INFO [train.py:903] (3/4) Epoch 12, batch 3750, loss[loss=0.2023, simple_loss=0.2871, pruned_loss=0.05874, over 19521.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3041, pruned_loss=0.07765, over 3827617.20 frames. ], batch size: 56, lr: 6.93e-03, grad_scale: 8.0 +2023-04-01 22:05:37,741 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.608e+02 5.233e+02 6.179e+02 8.242e+02 1.500e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-01 22:05:59,317 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 22:06:05,769 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-01 22:06:16,426 INFO [train.py:903] (3/4) Epoch 12, batch 3800, loss[loss=0.2709, simple_loss=0.3392, pruned_loss=0.1013, over 13401.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3043, pruned_loss=0.07755, over 3822788.57 frames. ], batch size: 136, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:06:29,250 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78918.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:06:53,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-01 22:06:54,049 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78937.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:17,949 INFO [train.py:903] (3/4) Epoch 12, batch 3850, loss[loss=0.2679, simple_loss=0.3401, pruned_loss=0.09784, over 18839.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.306, pruned_loss=0.07864, over 3811947.14 frames. ], batch size: 74, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:07:28,336 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:40,746 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.982e+02 6.977e+02 9.373e+02 2.137e+03, threshold=1.395e+03, percent-clipped=8.0 +2023-04-01 22:07:49,869 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78983.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:07:58,104 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78990.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:08:21,051 INFO [train.py:903] (3/4) Epoch 12, batch 3900, loss[loss=0.2245, simple_loss=0.3052, pruned_loss=0.07189, over 19606.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3064, pruned_loss=0.07878, over 3817965.85 frames. ], batch size: 61, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:08:51,038 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79033.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:09:15,079 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7878, 1.8362, 2.0994, 2.4271, 1.6376, 2.3046, 2.2763, 1.9439], + device='cuda:3'), covar=tensor([0.3296, 0.3035, 0.1464, 0.1660, 0.3229, 0.1484, 0.3504, 0.2652], + device='cuda:3'), in_proj_covar=tensor([0.0802, 0.0826, 0.0655, 0.0900, 0.0787, 0.0716, 0.0794, 0.0718], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 22:09:22,288 INFO [train.py:903] (3/4) Epoch 12, batch 3950, loss[loss=0.1975, simple_loss=0.2658, pruned_loss=0.06463, over 19763.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3055, pruned_loss=0.07845, over 3812015.42 frames. ], batch size: 47, lr: 6.92e-03, grad_scale: 8.0 +2023-04-01 22:09:24,092 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 22:09:29,035 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-01 22:09:29,441 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3300, 2.0827, 2.0130, 1.7296, 1.4299, 1.6782, 0.6622, 1.2180], + device='cuda:3'), covar=tensor([0.0436, 0.0497, 0.0342, 0.0691, 0.1013, 0.0687, 0.0945, 0.0772], + device='cuda:3'), in_proj_covar=tensor([0.0346, 0.0334, 0.0336, 0.0361, 0.0431, 0.0360, 0.0316, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:09:43,634 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 6.132e+02 6.993e+02 8.356e+02 2.478e+03, threshold=1.399e+03, percent-clipped=5.0 +2023-04-01 22:10:22,895 INFO [train.py:903] (3/4) Epoch 12, batch 4000, loss[loss=0.2563, simple_loss=0.3307, pruned_loss=0.09096, over 19707.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3069, pruned_loss=0.07937, over 3812824.07 frames. ], batch size: 63, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:13,118 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-01 22:11:21,652 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.31 vs. limit=5.0 +2023-04-01 22:11:24,605 INFO [train.py:903] (3/4) Epoch 12, batch 4050, loss[loss=0.23, simple_loss=0.312, pruned_loss=0.07398, over 18119.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3061, pruned_loss=0.07916, over 3801839.18 frames. ], batch size: 83, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:11:47,130 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.067e+02 6.190e+02 7.758e+02 2.001e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-01 22:11:53,113 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-01 22:12:05,411 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3169, 2.0843, 1.9379, 1.7048, 1.5514, 1.7893, 0.5245, 1.1643], + device='cuda:3'), covar=tensor([0.0383, 0.0484, 0.0354, 0.0649, 0.0970, 0.0685, 0.1016, 0.0825], + device='cuda:3'), in_proj_covar=tensor([0.0346, 0.0336, 0.0338, 0.0362, 0.0433, 0.0361, 0.0318, 0.0330], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:12:07,830 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79193.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:12:26,659 INFO [train.py:903] (3/4) Epoch 12, batch 4100, loss[loss=0.201, simple_loss=0.2734, pruned_loss=0.06433, over 19752.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3058, pruned_loss=0.07871, over 3796162.13 frames. ], batch size: 46, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:12:39,328 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79218.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:13:03,857 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-01 22:13:27,008 INFO [train.py:903] (3/4) Epoch 12, batch 4150, loss[loss=0.2218, simple_loss=0.3055, pruned_loss=0.06903, over 19478.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3059, pruned_loss=0.07882, over 3769752.69 frames. ], batch size: 64, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:13:49,448 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 5.552e+02 6.928e+02 9.304e+02 2.111e+03, threshold=1.386e+03, percent-clipped=6.0 +2023-04-01 22:14:06,893 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79289.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:23,430 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79303.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:14:28,771 INFO [train.py:903] (3/4) Epoch 12, batch 4200, loss[loss=0.2009, simple_loss=0.2789, pruned_loss=0.06139, over 19854.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3057, pruned_loss=0.07865, over 3779376.84 frames. ], batch size: 52, lr: 6.91e-03, grad_scale: 8.0 +2023-04-01 22:14:33,323 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-01 22:14:35,721 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79314.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:14:53,789 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:22,593 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79351.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:15:26,816 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2330, 4.3051, 4.7936, 4.7946, 2.7320, 4.4857, 4.0948, 4.5096], + device='cuda:3'), covar=tensor([0.1188, 0.2080, 0.0500, 0.0535, 0.3876, 0.0738, 0.0528, 0.0956], + device='cuda:3'), in_proj_covar=tensor([0.0689, 0.0619, 0.0816, 0.0695, 0.0741, 0.0573, 0.0497, 0.0753], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 22:15:31,208 INFO [train.py:903] (3/4) Epoch 12, batch 4250, loss[loss=0.2126, simple_loss=0.2929, pruned_loss=0.06618, over 19844.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3051, pruned_loss=0.07834, over 3777012.00 frames. ], batch size: 52, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:15:43,240 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-01 22:15:52,358 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 4.925e+02 6.557e+02 8.003e+02 1.515e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-01 22:15:54,777 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-01 22:16:00,656 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79382.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:16:13,159 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79393.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:16:13,617 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.47 vs. limit=5.0 +2023-04-01 22:16:33,041 INFO [train.py:903] (3/4) Epoch 12, batch 4300, loss[loss=0.2398, simple_loss=0.3144, pruned_loss=0.08265, over 19673.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3055, pruned_loss=0.07866, over 3779038.25 frames. ], batch size: 55, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:16:43,555 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9031, 1.3492, 1.0506, 0.9704, 1.1901, 0.9569, 0.8060, 1.2646], + device='cuda:3'), covar=tensor([0.0518, 0.0699, 0.1002, 0.0553, 0.0476, 0.1096, 0.0578, 0.0410], + device='cuda:3'), in_proj_covar=tensor([0.0293, 0.0299, 0.0324, 0.0245, 0.0235, 0.0318, 0.0287, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:17:07,850 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3079, 1.3190, 1.4055, 1.4350, 1.7326, 1.8506, 1.7300, 0.5117], + device='cuda:3'), covar=tensor([0.2027, 0.3537, 0.2213, 0.1640, 0.1316, 0.1799, 0.1199, 0.3747], + device='cuda:3'), in_proj_covar=tensor([0.0493, 0.0580, 0.0611, 0.0439, 0.0595, 0.0495, 0.0648, 0.0495], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 22:17:14,353 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79442.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:17:24,321 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-01 22:17:32,590 INFO [train.py:903] (3/4) Epoch 12, batch 4350, loss[loss=0.23, simple_loss=0.3144, pruned_loss=0.07286, over 19672.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3059, pruned_loss=0.07851, over 3796500.12 frames. ], batch size: 59, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:17:54,382 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.719e+02 5.544e+02 6.866e+02 8.754e+02 2.036e+03, threshold=1.373e+03, percent-clipped=4.0 +2023-04-01 22:17:59,265 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-01 22:18:34,726 INFO [train.py:903] (3/4) Epoch 12, batch 4400, loss[loss=0.1874, simple_loss=0.2687, pruned_loss=0.05307, over 19622.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3056, pruned_loss=0.07867, over 3803665.09 frames. ], batch size: 50, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:18:45,484 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3844, 2.1860, 1.6395, 1.3287, 2.0694, 1.1472, 1.2648, 1.8013], + device='cuda:3'), covar=tensor([0.1059, 0.0741, 0.1044, 0.0860, 0.0541, 0.1186, 0.0731, 0.0444], + device='cuda:3'), in_proj_covar=tensor([0.0290, 0.0298, 0.0322, 0.0244, 0.0234, 0.0315, 0.0286, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:18:58,953 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-01 22:19:07,294 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-01 22:19:38,010 INFO [train.py:903] (3/4) Epoch 12, batch 4450, loss[loss=0.2594, simple_loss=0.3315, pruned_loss=0.09366, over 18179.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3053, pruned_loss=0.07865, over 3797042.49 frames. ], batch size: 83, lr: 6.90e-03, grad_scale: 8.0 +2023-04-01 22:20:00,031 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.129e+02 5.261e+02 6.909e+02 8.531e+02 1.990e+03, threshold=1.382e+03, percent-clipped=5.0 +2023-04-01 22:20:33,620 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79603.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:20:41,077 INFO [train.py:903] (3/4) Epoch 12, batch 4500, loss[loss=0.2145, simple_loss=0.2894, pruned_loss=0.0698, over 19515.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3042, pruned_loss=0.07757, over 3810283.22 frames. ], batch size: 49, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:20:51,332 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-01 22:21:29,751 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79647.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:21:43,339 INFO [train.py:903] (3/4) Epoch 12, batch 4550, loss[loss=0.2333, simple_loss=0.3109, pruned_loss=0.07783, over 19344.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3033, pruned_loss=0.07719, over 3821794.16 frames. ], batch size: 66, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:21:49,802 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-01 22:21:52,477 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-01 22:22:04,139 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79675.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:04,980 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.458e+02 6.277e+02 7.572e+02 1.495e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-01 22:22:11,625 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0476, 4.4487, 4.8028, 4.7441, 1.7414, 4.3738, 3.8655, 4.4472], + device='cuda:3'), covar=tensor([0.1363, 0.0624, 0.0458, 0.0517, 0.4903, 0.0568, 0.0545, 0.0887], + device='cuda:3'), in_proj_covar=tensor([0.0686, 0.0617, 0.0812, 0.0691, 0.0732, 0.0565, 0.0495, 0.0745], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 22:22:15,958 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-01 22:22:29,773 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79695.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:29,915 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9895, 4.3728, 4.7023, 4.6790, 1.6735, 4.3538, 3.8108, 4.3146], + device='cuda:3'), covar=tensor([0.1355, 0.0829, 0.0487, 0.0500, 0.5325, 0.0688, 0.0597, 0.0984], + device='cuda:3'), in_proj_covar=tensor([0.0686, 0.0616, 0.0812, 0.0691, 0.0732, 0.0564, 0.0494, 0.0745], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 22:22:33,619 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79698.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:22:44,775 INFO [train.py:903] (3/4) Epoch 12, batch 4600, loss[loss=0.2231, simple_loss=0.2877, pruned_loss=0.07926, over 19752.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3048, pruned_loss=0.07804, over 3816960.01 frames. ], batch size: 45, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:04,296 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79723.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:07,650 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79726.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:23:21,793 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79737.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:23:46,651 INFO [train.py:903] (3/4) Epoch 12, batch 4650, loss[loss=0.2151, simple_loss=0.2855, pruned_loss=0.07228, over 19408.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3055, pruned_loss=0.07915, over 3809192.33 frames. ], batch size: 48, lr: 6.89e-03, grad_scale: 8.0 +2023-04-01 22:23:49,918 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-04-01 22:23:52,622 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79762.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:24:06,322 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-01 22:24:09,608 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 5.614e+02 7.010e+02 8.934e+02 1.991e+03, threshold=1.402e+03, percent-clipped=7.0 +2023-04-01 22:24:14,690 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79780.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:24:15,619 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-01 22:24:48,914 INFO [train.py:903] (3/4) Epoch 12, batch 4700, loss[loss=0.2363, simple_loss=0.3066, pruned_loss=0.08304, over 19727.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3055, pruned_loss=0.07923, over 3816793.24 frames. ], batch size: 51, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:24:52,488 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79810.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:12,476 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-01 22:25:30,342 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79841.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:25:45,568 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79852.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:25:52,118 INFO [train.py:903] (3/4) Epoch 12, batch 4750, loss[loss=0.2435, simple_loss=0.3052, pruned_loss=0.09094, over 19590.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3071, pruned_loss=0.07979, over 3825679.52 frames. ], batch size: 52, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:26:14,213 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.848e+02 5.173e+02 6.366e+02 7.623e+02 1.625e+03, threshold=1.273e+03, percent-clipped=2.0 +2023-04-01 22:26:54,248 INFO [train.py:903] (3/4) Epoch 12, batch 4800, loss[loss=0.1983, simple_loss=0.2701, pruned_loss=0.06332, over 19747.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3066, pruned_loss=0.07963, over 3832430.10 frames. ], batch size: 46, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:26:58,977 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4230, 1.1289, 1.2525, 2.0653, 1.5427, 1.6150, 1.7988, 1.4320], + device='cuda:3'), covar=tensor([0.0926, 0.1259, 0.1200, 0.0682, 0.0900, 0.0913, 0.0889, 0.0852], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0226, 0.0224, 0.0247, 0.0237, 0.0213, 0.0194, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 22:27:42,679 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79947.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:27:55,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8376, 4.3341, 4.5983, 4.5959, 1.7379, 4.2962, 3.7221, 4.2196], + device='cuda:3'), covar=tensor([0.1496, 0.0657, 0.0498, 0.0555, 0.5301, 0.0558, 0.0611, 0.1044], + device='cuda:3'), in_proj_covar=tensor([0.0679, 0.0615, 0.0805, 0.0688, 0.0726, 0.0558, 0.0487, 0.0739], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 22:27:56,279 INFO [train.py:903] (3/4) Epoch 12, batch 4850, loss[loss=0.1982, simple_loss=0.2815, pruned_loss=0.05751, over 19484.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3058, pruned_loss=0.0789, over 3830125.95 frames. ], batch size: 49, lr: 6.88e-03, grad_scale: 16.0 +2023-04-01 22:28:19,174 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 5.432e+02 6.685e+02 9.186e+02 1.976e+03, threshold=1.337e+03, percent-clipped=11.0 +2023-04-01 22:28:21,572 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0193, 2.4011, 2.4697, 2.9310, 2.9446, 2.4249, 2.3619, 2.8100], + device='cuda:3'), covar=tensor([0.0638, 0.1523, 0.1184, 0.0824, 0.1065, 0.0426, 0.1077, 0.0520], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0351, 0.0293, 0.0240, 0.0298, 0.0242, 0.0281, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:28:23,589 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-01 22:28:43,800 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-01 22:28:48,489 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-01 22:28:51,011 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-01 22:28:59,976 INFO [train.py:903] (3/4) Epoch 12, batch 4900, loss[loss=0.2529, simple_loss=0.3314, pruned_loss=0.0872, over 19291.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3057, pruned_loss=0.07883, over 3820295.22 frames. ], batch size: 66, lr: 6.88e-03, grad_scale: 8.0 +2023-04-01 22:29:02,317 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-01 22:29:13,772 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80018.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:29:14,545 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80019.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:29:21,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-01 22:29:43,741 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80043.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:30:03,743 INFO [train.py:903] (3/4) Epoch 12, batch 4950, loss[loss=0.2328, simple_loss=0.31, pruned_loss=0.07778, over 19769.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3056, pruned_loss=0.07844, over 3820281.25 frames. ], batch size: 54, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:30:08,655 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80062.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:13,660 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80066.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:20,436 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-01 22:30:26,813 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.263e+02 6.788e+02 8.958e+02 2.034e+03, threshold=1.358e+03, percent-clipped=5.0 +2023-04-01 22:30:38,272 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9679, 2.7287, 1.9869, 1.9034, 1.5505, 2.1825, 0.8690, 1.8686], + device='cuda:3'), covar=tensor([0.0716, 0.0649, 0.0659, 0.1169, 0.1268, 0.1140, 0.1198, 0.1082], + device='cuda:3'), in_proj_covar=tensor([0.0342, 0.0328, 0.0331, 0.0357, 0.0430, 0.0356, 0.0312, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:30:44,589 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80091.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:30:45,317 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-01 22:30:52,419 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80097.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:04,646 INFO [train.py:903] (3/4) Epoch 12, batch 5000, loss[loss=0.2053, simple_loss=0.2937, pruned_loss=0.05844, over 19528.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3052, pruned_loss=0.07848, over 3816089.81 frames. ], batch size: 54, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:31:05,128 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80108.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 22:31:13,748 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-01 22:31:22,111 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:24,071 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80124.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:25,169 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-01 22:31:37,121 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80133.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 22:31:38,229 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:31:46,984 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9341, 1.1647, 1.6016, 0.5274, 2.1233, 2.4805, 2.1184, 2.5897], + device='cuda:3'), covar=tensor([0.1492, 0.3386, 0.2869, 0.2284, 0.0475, 0.0228, 0.0337, 0.0296], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0299, 0.0330, 0.0252, 0.0217, 0.0161, 0.0207, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:32:06,836 INFO [train.py:903] (3/4) Epoch 12, batch 5050, loss[loss=0.2151, simple_loss=0.2757, pruned_loss=0.07728, over 16855.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3053, pruned_loss=0.07848, over 3814913.89 frames. ], batch size: 37, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:32:30,897 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.181e+02 5.736e+02 7.301e+02 9.465e+02 2.500e+03, threshold=1.460e+03, percent-clipped=5.0 +2023-04-01 22:32:41,347 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-01 22:33:08,748 INFO [train.py:903] (3/4) Epoch 12, batch 5100, loss[loss=0.1822, simple_loss=0.2575, pruned_loss=0.05347, over 19158.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3047, pruned_loss=0.07829, over 3805735.56 frames. ], batch size: 42, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:33:21,051 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-01 22:33:23,173 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-01 22:33:26,511 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-01 22:33:36,300 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5797, 1.3250, 1.3422, 1.8754, 1.4387, 1.8656, 1.8859, 1.6124], + device='cuda:3'), covar=tensor([0.0849, 0.1033, 0.1097, 0.0839, 0.0903, 0.0677, 0.0836, 0.0715], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0226, 0.0224, 0.0245, 0.0235, 0.0212, 0.0195, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 22:33:47,581 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80239.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:34:11,775 INFO [train.py:903] (3/4) Epoch 12, batch 5150, loss[loss=0.2416, simple_loss=0.307, pruned_loss=0.08813, over 19608.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3049, pruned_loss=0.07841, over 3810263.74 frames. ], batch size: 50, lr: 6.87e-03, grad_scale: 8.0 +2023-04-01 22:34:23,651 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-01 22:34:34,579 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.377e+02 5.205e+02 6.062e+02 7.794e+02 1.645e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-01 22:34:51,822 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8613, 4.3567, 2.6673, 3.7434, 0.9766, 4.2372, 4.1677, 4.3623], + device='cuda:3'), covar=tensor([0.0576, 0.1114, 0.1912, 0.0771, 0.3969, 0.0655, 0.0781, 0.0928], + device='cuda:3'), in_proj_covar=tensor([0.0438, 0.0370, 0.0439, 0.0315, 0.0377, 0.0371, 0.0362, 0.0393], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:34:59,513 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 22:35:00,953 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5200, 2.2965, 1.6660, 1.5339, 2.1135, 1.2511, 1.3526, 1.8244], + device='cuda:3'), covar=tensor([0.1033, 0.0659, 0.0976, 0.0735, 0.0471, 0.1145, 0.0722, 0.0462], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0302, 0.0328, 0.0247, 0.0239, 0.0319, 0.0288, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:35:05,768 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80301.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:13,723 INFO [train.py:903] (3/4) Epoch 12, batch 5200, loss[loss=0.1978, simple_loss=0.2743, pruned_loss=0.06062, over 19749.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3057, pruned_loss=0.07869, over 3809746.36 frames. ], batch size: 51, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:35:24,160 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3067, 2.9773, 2.0926, 2.7137, 0.7301, 2.8529, 2.7897, 2.8801], + device='cuda:3'), covar=tensor([0.1052, 0.1281, 0.2158, 0.1002, 0.3883, 0.1065, 0.1102, 0.1299], + device='cuda:3'), in_proj_covar=tensor([0.0438, 0.0369, 0.0438, 0.0315, 0.0376, 0.0371, 0.0362, 0.0392], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:35:26,523 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80318.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:35:27,307 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-01 22:35:41,991 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.56 vs. limit=5.0 +2023-04-01 22:35:43,944 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2751, 1.3567, 1.6107, 1.4860, 2.2045, 1.8517, 2.1843, 0.9643], + device='cuda:3'), covar=tensor([0.2412, 0.3972, 0.2345, 0.2002, 0.1447, 0.2235, 0.1433, 0.3876], + device='cuda:3'), in_proj_covar=tensor([0.0492, 0.0583, 0.0609, 0.0440, 0.0594, 0.0497, 0.0647, 0.0497], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 22:35:58,873 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80343.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:36:03,605 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6820, 1.5678, 1.5088, 2.1751, 1.8060, 2.0171, 2.0625, 1.8451], + device='cuda:3'), covar=tensor([0.0769, 0.0887, 0.0945, 0.0760, 0.0778, 0.0681, 0.0831, 0.0632], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0224, 0.0222, 0.0243, 0.0233, 0.0211, 0.0193, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 22:36:13,579 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-01 22:36:16,616 INFO [train.py:903] (3/4) Epoch 12, batch 5250, loss[loss=0.2755, simple_loss=0.3371, pruned_loss=0.107, over 18166.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.305, pruned_loss=0.07802, over 3819485.48 frames. ], batch size: 83, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:36:41,380 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.664e+02 5.603e+02 6.465e+02 8.351e+02 1.434e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-01 22:36:57,028 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80390.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:18,497 INFO [train.py:903] (3/4) Epoch 12, batch 5300, loss[loss=0.1746, simple_loss=0.2473, pruned_loss=0.05095, over 19722.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3038, pruned_loss=0.07708, over 3832772.78 frames. ], batch size: 45, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:37:22,973 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80411.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:28,765 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80415.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:37:42,163 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-01 22:38:12,125 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9732, 1.9301, 1.9367, 1.7893, 4.4667, 0.9806, 2.5609, 4.8215], + device='cuda:3'), covar=tensor([0.0380, 0.2409, 0.2520, 0.1671, 0.0688, 0.2654, 0.1260, 0.0188], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0342, 0.0357, 0.0321, 0.0349, 0.0332, 0.0339, 0.0361], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:38:23,298 INFO [train.py:903] (3/4) Epoch 12, batch 5350, loss[loss=0.2383, simple_loss=0.3176, pruned_loss=0.07954, over 19546.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3044, pruned_loss=0.0776, over 3806207.08 frames. ], batch size: 56, lr: 6.86e-03, grad_scale: 8.0 +2023-04-01 22:38:44,527 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.085e+02 6.728e+02 8.660e+02 2.071e+03, threshold=1.346e+03, percent-clipped=4.0 +2023-04-01 22:38:50,329 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80481.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:38:52,618 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3527, 1.3733, 1.8522, 1.7501, 3.1116, 4.6183, 4.4950, 5.0495], + device='cuda:3'), covar=tensor([0.1613, 0.3579, 0.3085, 0.1863, 0.0495, 0.0137, 0.0152, 0.0146], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0297, 0.0329, 0.0252, 0.0218, 0.0161, 0.0207, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:38:59,007 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-01 22:39:08,265 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80495.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:23,540 INFO [train.py:903] (3/4) Epoch 12, batch 5400, loss[loss=0.262, simple_loss=0.3335, pruned_loss=0.09525, over 18763.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3049, pruned_loss=0.07799, over 3820265.81 frames. ], batch size: 74, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:39:28,318 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80512.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:39:38,457 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80520.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:40:24,797 INFO [train.py:903] (3/4) Epoch 12, batch 5450, loss[loss=0.1699, simple_loss=0.2472, pruned_loss=0.04628, over 19007.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3037, pruned_loss=0.07757, over 3808494.92 frames. ], batch size: 42, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:40:49,073 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 5.165e+02 6.319e+02 8.444e+02 1.726e+03, threshold=1.264e+03, percent-clipped=5.0 +2023-04-01 22:41:18,911 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 22:41:26,464 INFO [train.py:903] (3/4) Epoch 12, batch 5500, loss[loss=0.1916, simple_loss=0.268, pruned_loss=0.05758, over 19773.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3047, pruned_loss=0.07816, over 3805408.51 frames. ], batch size: 47, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:41:53,700 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-01 22:42:12,382 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80645.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:42:29,081 INFO [train.py:903] (3/4) Epoch 12, batch 5550, loss[loss=0.2041, simple_loss=0.2995, pruned_loss=0.05441, over 19675.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3047, pruned_loss=0.07791, over 3803252.21 frames. ], batch size: 55, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:42:38,039 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-01 22:42:51,884 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.910e+02 5.291e+02 6.725e+02 8.423e+02 1.958e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-01 22:43:28,251 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-01 22:43:31,864 INFO [train.py:903] (3/4) Epoch 12, batch 5600, loss[loss=0.2206, simple_loss=0.2873, pruned_loss=0.07699, over 19735.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3045, pruned_loss=0.07771, over 3812379.96 frames. ], batch size: 45, lr: 6.85e-03, grad_scale: 8.0 +2023-04-01 22:43:32,234 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4871, 1.2734, 1.2557, 1.8979, 1.5861, 1.7673, 1.9653, 1.6637], + device='cuda:3'), covar=tensor([0.0882, 0.1043, 0.1111, 0.0866, 0.0886, 0.0799, 0.0828, 0.0692], + device='cuda:3'), in_proj_covar=tensor([0.0208, 0.0225, 0.0221, 0.0244, 0.0234, 0.0210, 0.0193, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-01 22:44:23,295 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5436, 2.1384, 2.1898, 2.7368, 2.2859, 2.0815, 2.0171, 2.6235], + device='cuda:3'), covar=tensor([0.0821, 0.1645, 0.1267, 0.0848, 0.1291, 0.0464, 0.1188, 0.0611], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0351, 0.0293, 0.0238, 0.0298, 0.0240, 0.0281, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:44:29,105 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80754.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:30,113 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80755.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:33,519 INFO [train.py:903] (3/4) Epoch 12, batch 5650, loss[loss=0.232, simple_loss=0.3087, pruned_loss=0.07764, over 19472.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3047, pruned_loss=0.0777, over 3815287.31 frames. ], batch size: 64, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:44:36,186 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80760.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:44:42,834 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2053, 1.2676, 1.7940, 1.2080, 2.5538, 3.6068, 3.4027, 3.8809], + device='cuda:3'), covar=tensor([0.1529, 0.3481, 0.2866, 0.2092, 0.0578, 0.0157, 0.0204, 0.0188], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0301, 0.0332, 0.0254, 0.0221, 0.0163, 0.0209, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:44:57,742 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.237e+02 6.303e+02 7.862e+02 2.175e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-01 22:45:00,185 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7069, 1.8678, 2.2552, 1.9214, 2.8073, 3.2839, 3.2397, 3.4860], + device='cuda:3'), covar=tensor([0.1330, 0.2728, 0.2390, 0.1784, 0.1253, 0.0305, 0.0179, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0300, 0.0331, 0.0253, 0.0220, 0.0162, 0.0208, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:45:15,141 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80791.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:45:20,712 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-01 22:45:35,186 INFO [train.py:903] (3/4) Epoch 12, batch 5700, loss[loss=0.2244, simple_loss=0.3082, pruned_loss=0.07036, over 19349.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3062, pruned_loss=0.07882, over 3797037.89 frames. ], batch size: 66, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:45:57,693 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80825.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:12,918 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2408, 1.3100, 1.2470, 1.0796, 1.0685, 1.0667, 0.0267, 0.3119], + device='cuda:3'), covar=tensor([0.0403, 0.0429, 0.0280, 0.0388, 0.0820, 0.0433, 0.0838, 0.0766], + device='cuda:3'), in_proj_covar=tensor([0.0340, 0.0329, 0.0331, 0.0355, 0.0427, 0.0357, 0.0311, 0.0323], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 22:46:35,835 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80856.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:46:38,758 INFO [train.py:903] (3/4) Epoch 12, batch 5750, loss[loss=0.2573, simple_loss=0.3253, pruned_loss=0.09464, over 13712.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3058, pruned_loss=0.07875, over 3799457.33 frames. ], batch size: 137, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:46:39,968 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-01 22:46:47,872 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-01 22:46:52,475 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-01 22:46:52,811 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80870.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:47:00,404 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.415e+02 6.686e+02 8.336e+02 1.819e+03, threshold=1.337e+03, percent-clipped=1.0 +2023-04-01 22:47:40,332 INFO [train.py:903] (3/4) Epoch 12, batch 5800, loss[loss=0.2532, simple_loss=0.3179, pruned_loss=0.09422, over 13263.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3071, pruned_loss=0.07925, over 3797536.39 frames. ], batch size: 136, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:21,634 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80940.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:48:41,888 INFO [train.py:903] (3/4) Epoch 12, batch 5850, loss[loss=0.2449, simple_loss=0.3219, pruned_loss=0.08392, over 18115.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3074, pruned_loss=0.0796, over 3804801.99 frames. ], batch size: 83, lr: 6.84e-03, grad_scale: 8.0 +2023-04-01 22:48:58,818 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80971.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:49:06,076 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.387e+02 6.409e+02 7.183e+02 1.679e+03, threshold=1.282e+03, percent-clipped=1.0 +2023-04-01 22:49:17,135 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-01 22:49:43,638 INFO [train.py:903] (3/4) Epoch 12, batch 5900, loss[loss=0.1919, simple_loss=0.2634, pruned_loss=0.0602, over 19223.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3068, pruned_loss=0.0797, over 3805538.40 frames. ], batch size: 42, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:49:47,140 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-01 22:49:55,127 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81016.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:49:57,126 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3359, 1.2776, 1.4213, 1.6068, 2.8963, 1.1238, 2.1084, 3.2698], + device='cuda:3'), covar=tensor([0.0515, 0.2634, 0.2707, 0.1683, 0.0726, 0.2330, 0.1301, 0.0301], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0341, 0.0355, 0.0323, 0.0347, 0.0332, 0.0337, 0.0361], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:50:09,763 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-01 22:50:25,016 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81041.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:50:47,171 INFO [train.py:903] (3/4) Epoch 12, batch 5950, loss[loss=0.2097, simple_loss=0.2754, pruned_loss=0.07199, over 19723.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3056, pruned_loss=0.07892, over 3803773.80 frames. ], batch size: 46, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:10,059 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.377e+02 6.760e+02 8.757e+02 1.989e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-01 22:51:37,910 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81098.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:51:49,672 INFO [train.py:903] (3/4) Epoch 12, batch 6000, loss[loss=0.2063, simple_loss=0.2921, pruned_loss=0.0602, over 19669.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.304, pruned_loss=0.07771, over 3821241.12 frames. ], batch size: 58, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:51:49,673 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 22:52:03,355 INFO [train.py:937] (3/4) Epoch 12, validation: loss=0.1765, simple_loss=0.2774, pruned_loss=0.03779, over 944034.00 frames. +2023-04-01 22:52:03,357 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 22:52:25,548 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81126.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:34,799 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81134.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:35,862 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81135.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:52:57,839 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81151.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:53:05,463 INFO [train.py:903] (3/4) Epoch 12, batch 6050, loss[loss=0.1876, simple_loss=0.2604, pruned_loss=0.0574, over 19751.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3052, pruned_loss=0.0784, over 3803413.38 frames. ], batch size: 46, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:53:27,715 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.041e+02 6.677e+02 8.260e+02 1.738e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-01 22:53:33,661 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0447, 3.6757, 2.1080, 2.2768, 3.2345, 1.7077, 1.4095, 2.0138], + device='cuda:3'), covar=tensor([0.1209, 0.0467, 0.0990, 0.0703, 0.0433, 0.1140, 0.0911, 0.0716], + device='cuda:3'), in_proj_covar=tensor([0.0290, 0.0303, 0.0325, 0.0246, 0.0240, 0.0320, 0.0282, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:53:52,478 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81196.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:05,933 INFO [train.py:903] (3/4) Epoch 12, batch 6100, loss[loss=0.2469, simple_loss=0.3213, pruned_loss=0.08627, over 19548.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3063, pruned_loss=0.07893, over 3801005.24 frames. ], batch size: 61, lr: 6.83e-03, grad_scale: 8.0 +2023-04-01 22:54:11,841 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81213.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:21,001 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81221.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:29,621 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81227.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:54:57,841 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81250.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:00,291 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81252.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:55:06,760 INFO [train.py:903] (3/4) Epoch 12, batch 6150, loss[loss=0.2421, simple_loss=0.3156, pruned_loss=0.08433, over 18911.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.307, pruned_loss=0.07955, over 3790405.70 frames. ], batch size: 74, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:55:33,022 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 5.279e+02 6.402e+02 8.020e+02 2.167e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-04-01 22:55:39,189 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-01 22:56:12,038 INFO [train.py:903] (3/4) Epoch 12, batch 6200, loss[loss=0.2289, simple_loss=0.3092, pruned_loss=0.07423, over 19663.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3056, pruned_loss=0.07875, over 3797326.99 frames. ], batch size: 55, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:56:22,891 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81317.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:56:25,179 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81319.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:57:11,779 INFO [train.py:903] (3/4) Epoch 12, batch 6250, loss[loss=0.297, simple_loss=0.351, pruned_loss=0.1215, over 13380.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3045, pruned_loss=0.07798, over 3807243.31 frames. ], batch size: 136, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:57:13,551 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-01 22:57:33,897 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.420e+02 5.051e+02 6.163e+02 7.297e+02 1.401e+03, threshold=1.233e+03, percent-clipped=3.0 +2023-04-01 22:57:43,147 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-01 22:57:53,407 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9624, 3.3370, 1.9250, 2.0716, 3.0084, 1.6345, 1.3215, 1.9404], + device='cuda:3'), covar=tensor([0.1345, 0.0567, 0.1024, 0.0718, 0.0503, 0.1131, 0.0977, 0.0739], + device='cuda:3'), in_proj_covar=tensor([0.0288, 0.0301, 0.0323, 0.0246, 0.0239, 0.0317, 0.0282, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 22:58:13,185 INFO [train.py:903] (3/4) Epoch 12, batch 6300, loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06621, over 18830.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3039, pruned_loss=0.07759, over 3804356.45 frames. ], batch size: 74, lr: 6.82e-03, grad_scale: 8.0 +2023-04-01 22:59:14,518 INFO [train.py:903] (3/4) Epoch 12, batch 6350, loss[loss=0.2089, simple_loss=0.2906, pruned_loss=0.06362, over 19686.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3037, pruned_loss=0.07744, over 3808718.77 frames. ], batch size: 53, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 22:59:29,283 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81469.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:39,340 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.788e+02 5.412e+02 6.997e+02 8.497e+02 1.750e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-04-01 22:59:40,737 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81478.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 22:59:59,525 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81494.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:14,190 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81506.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:00:16,028 INFO [train.py:903] (3/4) Epoch 12, batch 6400, loss[loss=0.2321, simple_loss=0.3098, pruned_loss=0.07723, over 19737.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3031, pruned_loss=0.07662, over 3825603.15 frames. ], batch size: 63, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:00:44,239 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8596, 1.9808, 2.1825, 2.7141, 1.8655, 2.5152, 2.3796, 1.9224], + device='cuda:3'), covar=tensor([0.3660, 0.3253, 0.1507, 0.1841, 0.3553, 0.1614, 0.3482, 0.2846], + device='cuda:3'), in_proj_covar=tensor([0.0807, 0.0830, 0.0652, 0.0892, 0.0792, 0.0719, 0.0790, 0.0718], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 23:00:45,264 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81531.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:01:05,922 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2283, 2.0999, 1.8419, 1.6164, 1.5210, 1.5580, 0.4117, 1.0640], + device='cuda:3'), covar=tensor([0.0646, 0.0607, 0.0433, 0.0788, 0.1157, 0.0963, 0.1122, 0.0976], + device='cuda:3'), in_proj_covar=tensor([0.0341, 0.0330, 0.0331, 0.0357, 0.0428, 0.0358, 0.0313, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:01:19,148 INFO [train.py:903] (3/4) Epoch 12, batch 6450, loss[loss=0.2282, simple_loss=0.3082, pruned_loss=0.07406, over 19689.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3033, pruned_loss=0.07653, over 3831651.40 frames. ], batch size: 60, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:01:41,092 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-01 23:01:41,363 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 5.839e+02 6.972e+02 8.326e+02 2.886e+03, threshold=1.394e+03, percent-clipped=3.0 +2023-04-01 23:01:48,762 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9094, 2.0321, 2.1646, 2.7588, 1.8639, 2.5599, 2.3246, 1.9660], + device='cuda:3'), covar=tensor([0.3794, 0.3284, 0.1534, 0.1927, 0.3739, 0.1630, 0.3767, 0.2879], + device='cuda:3'), in_proj_covar=tensor([0.0805, 0.0828, 0.0652, 0.0890, 0.0792, 0.0718, 0.0790, 0.0718], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 23:02:03,372 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81593.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:02:06,520 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-01 23:02:20,376 INFO [train.py:903] (3/4) Epoch 12, batch 6500, loss[loss=0.2151, simple_loss=0.294, pruned_loss=0.06809, over 19680.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3045, pruned_loss=0.07708, over 3824390.47 frames. ], batch size: 53, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:02:27,386 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-01 23:03:22,583 INFO [train.py:903] (3/4) Epoch 12, batch 6550, loss[loss=0.2223, simple_loss=0.289, pruned_loss=0.07777, over 19754.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.304, pruned_loss=0.0771, over 3810874.72 frames. ], batch size: 47, lr: 6.81e-03, grad_scale: 8.0 +2023-04-01 23:03:26,202 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81661.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:28,614 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81663.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:03:47,187 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 5.338e+02 6.617e+02 7.892e+02 1.534e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-01 23:04:00,866 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81688.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:04:24,219 INFO [train.py:903] (3/4) Epoch 12, batch 6600, loss[loss=0.2338, simple_loss=0.3125, pruned_loss=0.07749, over 19657.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3046, pruned_loss=0.07714, over 3813170.93 frames. ], batch size: 58, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:05:26,098 INFO [train.py:903] (3/4) Epoch 12, batch 6650, loss[loss=0.2424, simple_loss=0.3174, pruned_loss=0.0837, over 17314.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.304, pruned_loss=0.07717, over 3807415.06 frames. ], batch size: 102, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:05:46,867 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81776.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:05:47,580 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.145e+02 6.461e+02 8.134e+02 1.737e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-01 23:05:49,147 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:06:26,737 INFO [train.py:903] (3/4) Epoch 12, batch 6700, loss[loss=0.236, simple_loss=0.3144, pruned_loss=0.07876, over 19531.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3032, pruned_loss=0.07673, over 3817300.59 frames. ], batch size: 54, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:07:03,324 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-01 23:07:16,990 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81849.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:26,530 INFO [train.py:903] (3/4) Epoch 12, batch 6750, loss[loss=0.2121, simple_loss=0.2879, pruned_loss=0.06815, over 19476.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3043, pruned_loss=0.07777, over 3826633.79 frames. ], batch size: 49, lr: 6.80e-03, grad_scale: 4.0 +2023-04-01 23:07:45,313 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81874.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:07:49,447 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+02 6.276e+02 7.333e+02 1.082e+03 2.540e+03, threshold=1.467e+03, percent-clipped=11.0 +2023-04-01 23:08:23,233 INFO [train.py:903] (3/4) Epoch 12, batch 6800, loss[loss=0.1942, simple_loss=0.2734, pruned_loss=0.05744, over 19746.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3033, pruned_loss=0.07722, over 3832904.86 frames. ], batch size: 47, lr: 6.80e-03, grad_scale: 8.0 +2023-04-01 23:08:40,542 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5702, 1.5626, 1.4440, 1.3454, 1.2405, 1.3866, 0.5558, 0.9446], + device='cuda:3'), covar=tensor([0.0392, 0.0413, 0.0255, 0.0409, 0.0637, 0.0487, 0.0804, 0.0614], + device='cuda:3'), in_proj_covar=tensor([0.0343, 0.0332, 0.0330, 0.0358, 0.0429, 0.0357, 0.0313, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:09:08,242 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-01 23:09:09,381 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-01 23:09:12,676 INFO [train.py:903] (3/4) Epoch 13, batch 0, loss[loss=0.2206, simple_loss=0.3002, pruned_loss=0.07048, over 19526.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3002, pruned_loss=0.07048, over 19526.00 frames. ], batch size: 54, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:09:12,677 INFO [train.py:928] (3/4) Computing validation loss +2023-04-01 23:09:23,574 INFO [train.py:937] (3/4) Epoch 13, validation: loss=0.176, simple_loss=0.2772, pruned_loss=0.03738, over 944034.00 frames. +2023-04-01 23:09:23,576 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-01 23:09:35,422 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-01 23:10:14,567 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 5.222e+02 6.740e+02 8.452e+02 3.268e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-01 23:10:23,826 INFO [train.py:903] (3/4) Epoch 13, batch 50, loss[loss=0.2226, simple_loss=0.2886, pruned_loss=0.07831, over 19762.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3075, pruned_loss=0.07794, over 864392.78 frames. ], batch size: 47, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:10:46,566 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:10:59,198 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-01 23:11:20,790 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:21,027 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82032.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:23,327 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82034.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:25,070 INFO [train.py:903] (3/4) Epoch 13, batch 100, loss[loss=0.2464, simple_loss=0.3197, pruned_loss=0.08658, over 19669.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3017, pruned_loss=0.07509, over 1532905.27 frames. ], batch size: 55, lr: 6.53e-03, grad_scale: 8.0 +2023-04-01 23:11:36,613 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-01 23:11:52,562 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82057.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:11:55,601 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82059.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:11,459 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82073.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:12:16,817 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.805e+02 6.218e+02 7.513e+02 1.266e+03, threshold=1.244e+03, percent-clipped=0.0 +2023-04-01 23:12:25,769 INFO [train.py:903] (3/4) Epoch 13, batch 150, loss[loss=0.1887, simple_loss=0.2592, pruned_loss=0.05907, over 19775.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3011, pruned_loss=0.07562, over 2032357.92 frames. ], batch size: 45, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:23,609 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-01 23:13:24,755 INFO [train.py:903] (3/4) Epoch 13, batch 200, loss[loss=0.2318, simple_loss=0.3132, pruned_loss=0.07521, over 18217.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.303, pruned_loss=0.07632, over 2438582.06 frames. ], batch size: 83, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:13:25,537 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.73 vs. limit=5.0 +2023-04-01 23:13:37,355 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-01 23:13:40,477 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82147.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:14:14,403 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.002e+02 5.972e+02 7.403e+02 2.257e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-01 23:14:26,891 INFO [train.py:903] (3/4) Epoch 13, batch 250, loss[loss=0.1796, simple_loss=0.2649, pruned_loss=0.04714, over 19477.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3039, pruned_loss=0.07683, over 2747718.97 frames. ], batch size: 49, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:15:26,892 INFO [train.py:903] (3/4) Epoch 13, batch 300, loss[loss=0.289, simple_loss=0.3458, pruned_loss=0.1161, over 13257.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3044, pruned_loss=0.07734, over 2984402.68 frames. ], batch size: 135, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:15:48,493 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-01 23:16:18,769 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.753e+02 6.815e+02 9.164e+02 1.837e+03, threshold=1.363e+03, percent-clipped=5.0 +2023-04-01 23:16:28,135 INFO [train.py:903] (3/4) Epoch 13, batch 350, loss[loss=0.2243, simple_loss=0.3057, pruned_loss=0.07143, over 19471.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3032, pruned_loss=0.07658, over 3152904.97 frames. ], batch size: 64, lr: 6.52e-03, grad_scale: 8.0 +2023-04-01 23:16:30,461 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-01 23:17:28,520 INFO [train.py:903] (3/4) Epoch 13, batch 400, loss[loss=0.2252, simple_loss=0.2962, pruned_loss=0.07714, over 19466.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3036, pruned_loss=0.07748, over 3312721.57 frames. ], batch size: 49, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:17:47,110 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82349.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:04,879 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82364.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:18:21,974 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.325e+02 6.166e+02 7.720e+02 2.046e+03, threshold=1.233e+03, percent-clipped=4.0 +2023-04-01 23:18:26,264 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-01 23:18:31,259 INFO [train.py:903] (3/4) Epoch 13, batch 450, loss[loss=0.2463, simple_loss=0.3335, pruned_loss=0.07959, over 19316.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3038, pruned_loss=0.07795, over 3424431.00 frames. ], batch size: 70, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:18:53,570 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82403.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:01,701 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8286, 4.3559, 2.8024, 3.8200, 1.0889, 4.2134, 4.1897, 4.3078], + device='cuda:3'), covar=tensor([0.0621, 0.1116, 0.1912, 0.0807, 0.4075, 0.0739, 0.0734, 0.0988], + device='cuda:3'), in_proj_covar=tensor([0.0444, 0.0374, 0.0445, 0.0321, 0.0381, 0.0375, 0.0369, 0.0402], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 23:19:04,671 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-01 23:19:04,702 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-01 23:19:09,597 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82417.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:23,702 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82428.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:19:33,812 INFO [train.py:903] (3/4) Epoch 13, batch 500, loss[loss=0.184, simple_loss=0.2561, pruned_loss=0.05598, over 19749.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3034, pruned_loss=0.07726, over 3514690.11 frames. ], batch size: 45, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:20:06,493 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82464.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:20:24,365 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8403, 4.2852, 4.5664, 4.5560, 1.5117, 4.2401, 3.7676, 4.2327], + device='cuda:3'), covar=tensor([0.1538, 0.0817, 0.0555, 0.0635, 0.5840, 0.0716, 0.0600, 0.1036], + device='cuda:3'), in_proj_covar=tensor([0.0701, 0.0621, 0.0821, 0.0706, 0.0750, 0.0573, 0.0501, 0.0755], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-01 23:20:27,377 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.153e+02 6.569e+02 8.401e+02 1.477e+03, threshold=1.314e+03, percent-clipped=3.0 +2023-04-01 23:20:35,263 INFO [train.py:903] (3/4) Epoch 13, batch 550, loss[loss=0.2289, simple_loss=0.3097, pruned_loss=0.07409, over 19535.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3037, pruned_loss=0.07731, over 3591254.93 frames. ], batch size: 56, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:21:05,922 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82511.0, num_to_drop=1, layers_to_drop={0} +2023-04-01 23:21:30,816 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82532.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:21:35,044 INFO [train.py:903] (3/4) Epoch 13, batch 600, loss[loss=0.2216, simple_loss=0.3054, pruned_loss=0.06892, over 19585.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3057, pruned_loss=0.0781, over 3638909.73 frames. ], batch size: 61, lr: 6.51e-03, grad_scale: 8.0 +2023-04-01 23:22:17,379 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-01 23:22:28,774 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.675e+02 5.313e+02 6.751e+02 8.249e+02 1.619e+03, threshold=1.350e+03, percent-clipped=3.0 +2023-04-01 23:22:36,943 INFO [train.py:903] (3/4) Epoch 13, batch 650, loss[loss=0.2432, simple_loss=0.3137, pruned_loss=0.08631, over 19600.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3052, pruned_loss=0.07816, over 3680524.53 frames. ], batch size: 61, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:22:38,468 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82587.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:23:40,983 INFO [train.py:903] (3/4) Epoch 13, batch 700, loss[loss=0.2415, simple_loss=0.3131, pruned_loss=0.08497, over 19592.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3051, pruned_loss=0.07785, over 3713870.49 frames. ], batch size: 52, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:24:01,395 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2505, 1.2775, 1.6730, 1.3326, 2.7801, 3.6436, 3.3828, 3.8311], + device='cuda:3'), covar=tensor([0.1500, 0.3514, 0.3102, 0.2084, 0.0513, 0.0156, 0.0198, 0.0194], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0302, 0.0331, 0.0255, 0.0222, 0.0163, 0.0207, 0.0211], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:24:36,355 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.358e+02 6.603e+02 8.553e+02 2.977e+03, threshold=1.321e+03, percent-clipped=4.0 +2023-04-01 23:24:44,574 INFO [train.py:903] (3/4) Epoch 13, batch 750, loss[loss=0.2422, simple_loss=0.3185, pruned_loss=0.08292, over 19667.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3035, pruned_loss=0.07692, over 3744971.31 frames. ], batch size: 55, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:10,151 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82708.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:28,840 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82720.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:25:47,331 INFO [train.py:903] (3/4) Epoch 13, batch 800, loss[loss=0.2117, simple_loss=0.2849, pruned_loss=0.06927, over 19377.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3035, pruned_loss=0.07695, over 3768739.37 frames. ], batch size: 47, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:25:58,347 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82745.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:26:01,433 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-01 23:26:10,312 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7152, 1.8623, 1.7095, 2.7036, 1.9154, 2.5206, 1.9609, 1.3273], + device='cuda:3'), covar=tensor([0.4527, 0.3527, 0.2305, 0.2414, 0.3834, 0.1851, 0.5153, 0.4508], + device='cuda:3'), in_proj_covar=tensor([0.0812, 0.0837, 0.0661, 0.0899, 0.0800, 0.0727, 0.0800, 0.0727], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 23:26:24,324 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0006, 1.9066, 1.6401, 1.4735, 1.3774, 1.5288, 0.4128, 0.8277], + device='cuda:3'), covar=tensor([0.0512, 0.0487, 0.0333, 0.0538, 0.1016, 0.0605, 0.0915, 0.0822], + device='cuda:3'), in_proj_covar=tensor([0.0340, 0.0333, 0.0334, 0.0359, 0.0430, 0.0357, 0.0317, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:26:42,347 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.821e+02 5.442e+02 6.436e+02 7.821e+02 1.140e+03, threshold=1.287e+03, percent-clipped=0.0 +2023-04-01 23:26:50,562 INFO [train.py:903] (3/4) Epoch 13, batch 850, loss[loss=0.1963, simple_loss=0.2688, pruned_loss=0.06188, over 19721.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3035, pruned_loss=0.07715, over 3779604.13 frames. ], batch size: 51, lr: 6.50e-03, grad_scale: 8.0 +2023-04-01 23:26:53,227 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82788.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:27,005 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82813.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:38,385 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82823.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:27:44,746 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-01 23:27:52,718 INFO [train.py:903] (3/4) Epoch 13, batch 900, loss[loss=0.2501, simple_loss=0.3214, pruned_loss=0.08936, over 19380.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3035, pruned_loss=0.07707, over 3790070.59 frames. ], batch size: 70, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:28:19,292 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82855.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:28:47,654 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.906e+02 6.958e+02 9.103e+02 2.196e+03, threshold=1.392e+03, percent-clipped=5.0 +2023-04-01 23:28:59,846 INFO [train.py:903] (3/4) Epoch 13, batch 950, loss[loss=0.2129, simple_loss=0.2932, pruned_loss=0.06629, over 19758.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3027, pruned_loss=0.07662, over 3798049.10 frames. ], batch size: 54, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:29:04,319 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-01 23:29:55,940 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82931.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:30:01,529 INFO [train.py:903] (3/4) Epoch 13, batch 1000, loss[loss=0.2379, simple_loss=0.312, pruned_loss=0.08192, over 19738.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3028, pruned_loss=0.07599, over 3796533.18 frames. ], batch size: 51, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:30:44,823 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82970.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:30:52,626 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-01 23:30:54,574 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.161e+02 6.395e+02 8.326e+02 2.115e+03, threshold=1.279e+03, percent-clipped=2.0 +2023-04-01 23:30:57,031 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82981.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:31:02,726 INFO [train.py:903] (3/4) Epoch 13, batch 1050, loss[loss=0.2362, simple_loss=0.3133, pruned_loss=0.07951, over 18215.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3037, pruned_loss=0.07672, over 3786537.56 frames. ], batch size: 83, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:31:05,481 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9463, 1.2795, 1.6394, 1.0151, 2.4992, 3.2736, 2.9610, 3.4857], + device='cuda:3'), covar=tensor([0.1712, 0.3467, 0.3108, 0.2362, 0.0604, 0.0195, 0.0235, 0.0217], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0301, 0.0330, 0.0254, 0.0222, 0.0163, 0.0207, 0.0212], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:31:19,502 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0315, 1.3444, 1.6226, 0.5891, 2.0918, 2.4620, 2.1431, 2.6253], + device='cuda:3'), covar=tensor([0.1382, 0.3168, 0.2796, 0.2267, 0.0514, 0.0250, 0.0332, 0.0301], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0300, 0.0330, 0.0254, 0.0222, 0.0163, 0.0206, 0.0212], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:31:34,328 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-01 23:31:58,555 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-01 23:32:04,655 INFO [train.py:903] (3/4) Epoch 13, batch 1100, loss[loss=0.2459, simple_loss=0.32, pruned_loss=0.08589, over 19606.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3041, pruned_loss=0.07734, over 3802580.74 frames. ], batch size: 57, lr: 6.49e-03, grad_scale: 8.0 +2023-04-01 23:32:19,498 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83046.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:32:57,834 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 5.014e+02 6.117e+02 7.878e+02 1.226e+03, threshold=1.223e+03, percent-clipped=0.0 +2023-04-01 23:32:58,250 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83079.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:08,488 INFO [train.py:903] (3/4) Epoch 13, batch 1150, loss[loss=0.2442, simple_loss=0.3232, pruned_loss=0.08255, over 19526.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3039, pruned_loss=0.07719, over 3786531.00 frames. ], batch size: 54, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:33:30,577 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83104.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:33:41,804 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1287, 1.7026, 1.6820, 2.0390, 1.9004, 1.8915, 1.7106, 1.9583], + device='cuda:3'), covar=tensor([0.0821, 0.1395, 0.1321, 0.0872, 0.1117, 0.0449, 0.1128, 0.0599], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0348, 0.0295, 0.0237, 0.0293, 0.0240, 0.0279, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 23:34:10,017 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7788, 4.3575, 2.6358, 3.8761, 0.8935, 4.2159, 4.1285, 4.1983], + device='cuda:3'), covar=tensor([0.0547, 0.0935, 0.2033, 0.0756, 0.4196, 0.0583, 0.0796, 0.1069], + device='cuda:3'), in_proj_covar=tensor([0.0447, 0.0377, 0.0448, 0.0323, 0.0387, 0.0379, 0.0374, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 23:34:10,932 INFO [train.py:903] (3/4) Epoch 13, batch 1200, loss[loss=0.2042, simple_loss=0.2883, pruned_loss=0.06009, over 19598.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3033, pruned_loss=0.07679, over 3790659.34 frames. ], batch size: 52, lr: 6.48e-03, grad_scale: 8.0 +2023-04-01 23:34:18,569 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2596, 1.3791, 1.6701, 1.4801, 2.4409, 2.0615, 2.5104, 0.9682], + device='cuda:3'), covar=tensor([0.2424, 0.4063, 0.2508, 0.2019, 0.1603, 0.2079, 0.1560, 0.4167], + device='cuda:3'), in_proj_covar=tensor([0.0489, 0.0577, 0.0612, 0.0438, 0.0594, 0.0495, 0.0641, 0.0493], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:34:22,355 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-01 23:34:40,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-01 23:34:40,970 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.47 vs. limit=5.0 +2023-04-01 23:35:06,466 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.101e+02 7.395e+02 1.032e+03 1.939e+03, threshold=1.479e+03, percent-clipped=13.0 +2023-04-01 23:35:12,369 INFO [train.py:903] (3/4) Epoch 13, batch 1250, loss[loss=0.2521, simple_loss=0.3418, pruned_loss=0.08125, over 19614.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3038, pruned_loss=0.07719, over 3798294.70 frames. ], batch size: 57, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:35:48,393 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83214.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:36:02,509 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83226.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:36:13,268 INFO [train.py:903] (3/4) Epoch 13, batch 1300, loss[loss=0.2545, simple_loss=0.3276, pruned_loss=0.09068, over 19554.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3052, pruned_loss=0.07837, over 3790792.53 frames. ], batch size: 61, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:36:33,765 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83251.0, num_to_drop=1, layers_to_drop={1} +2023-04-01 23:37:08,172 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 5.042e+02 6.195e+02 7.677e+02 1.204e+03, threshold=1.239e+03, percent-clipped=0.0 +2023-04-01 23:37:17,150 INFO [train.py:903] (3/4) Epoch 13, batch 1350, loss[loss=0.2777, simple_loss=0.3467, pruned_loss=0.1044, over 19569.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3039, pruned_loss=0.07786, over 3794093.59 frames. ], batch size: 61, lr: 6.48e-03, grad_scale: 4.0 +2023-04-01 23:37:37,933 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:37:38,050 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83302.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:07,359 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83325.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:09,911 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83327.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:38:21,068 INFO [train.py:903] (3/4) Epoch 13, batch 1400, loss[loss=0.1968, simple_loss=0.2739, pruned_loss=0.05988, over 18293.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3033, pruned_loss=0.07758, over 3798217.47 frames. ], batch size: 40, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:38:49,431 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1380, 2.2061, 2.4211, 3.1902, 2.1573, 3.1616, 2.6599, 2.2391], + device='cuda:3'), covar=tensor([0.3732, 0.3189, 0.1420, 0.2016, 0.3741, 0.1512, 0.3406, 0.2640], + device='cuda:3'), in_proj_covar=tensor([0.0809, 0.0832, 0.0659, 0.0892, 0.0796, 0.0723, 0.0797, 0.0720], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 23:39:10,620 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6206, 1.6679, 1.9469, 1.9604, 1.3519, 1.8603, 2.0635, 1.8399], + device='cuda:3'), covar=tensor([0.3696, 0.3095, 0.1570, 0.1912, 0.3496, 0.1708, 0.3980, 0.2779], + device='cuda:3'), in_proj_covar=tensor([0.0811, 0.0834, 0.0659, 0.0894, 0.0797, 0.0724, 0.0798, 0.0722], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-01 23:39:16,728 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.533e+02 6.494e+02 7.603e+02 1.656e+03, threshold=1.299e+03, percent-clipped=2.0 +2023-04-01 23:39:20,307 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-01 23:39:22,623 INFO [train.py:903] (3/4) Epoch 13, batch 1450, loss[loss=0.2361, simple_loss=0.3093, pruned_loss=0.08143, over 19667.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3048, pruned_loss=0.07839, over 3807699.03 frames. ], batch size: 53, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:24,411 INFO [train.py:903] (3/4) Epoch 13, batch 1500, loss[loss=0.2531, simple_loss=0.3281, pruned_loss=0.08906, over 19779.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3044, pruned_loss=0.07796, over 3807270.81 frames. ], batch size: 56, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:40:29,243 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83440.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:41:19,835 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.552e+02 5.496e+02 6.437e+02 7.955e+02 2.023e+03, threshold=1.287e+03, percent-clipped=5.0 +2023-04-01 23:41:26,543 INFO [train.py:903] (3/4) Epoch 13, batch 1550, loss[loss=0.236, simple_loss=0.309, pruned_loss=0.0815, over 17630.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3041, pruned_loss=0.07749, over 3822058.25 frames. ], batch size: 101, lr: 6.47e-03, grad_scale: 4.0 +2023-04-01 23:42:30,077 INFO [train.py:903] (3/4) Epoch 13, batch 1600, loss[loss=0.2683, simple_loss=0.3373, pruned_loss=0.09959, over 19710.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3036, pruned_loss=0.07709, over 3830519.27 frames. ], batch size: 59, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:42:53,336 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-01 23:42:55,762 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83558.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:43:25,328 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 5.445e+02 6.386e+02 7.908e+02 1.256e+03, threshold=1.277e+03, percent-clipped=0.0 +2023-04-01 23:43:31,085 INFO [train.py:903] (3/4) Epoch 13, batch 1650, loss[loss=0.223, simple_loss=0.3044, pruned_loss=0.07083, over 19755.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.302, pruned_loss=0.07625, over 3840790.31 frames. ], batch size: 54, lr: 6.47e-03, grad_scale: 8.0 +2023-04-01 23:43:53,242 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8624, 1.3401, 1.0570, 0.9073, 1.1846, 0.9543, 0.9042, 1.2587], + device='cuda:3'), covar=tensor([0.0590, 0.0755, 0.1081, 0.0681, 0.0522, 0.1205, 0.0541, 0.0448], + device='cuda:3'), in_proj_covar=tensor([0.0293, 0.0305, 0.0324, 0.0247, 0.0237, 0.0320, 0.0284, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 23:44:33,594 INFO [train.py:903] (3/4) Epoch 13, batch 1700, loss[loss=0.2236, simple_loss=0.3116, pruned_loss=0.06787, over 19680.00 frames. ], tot_loss[loss=0.228, simple_loss=0.303, pruned_loss=0.07656, over 3839554.50 frames. ], batch size: 60, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:44:45,676 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83646.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:15,473 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-01 23:45:19,391 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83673.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:27,804 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.386e+02 6.785e+02 9.131e+02 1.645e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-01 23:45:33,655 INFO [train.py:903] (3/4) Epoch 13, batch 1750, loss[loss=0.2024, simple_loss=0.271, pruned_loss=0.06692, over 19058.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3026, pruned_loss=0.07644, over 3827505.17 frames. ], batch size: 42, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:45:48,950 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83696.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:45:53,567 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5187, 1.6850, 2.0084, 1.7234, 2.5791, 2.9729, 2.8937, 3.1034], + device='cuda:3'), covar=tensor([0.1361, 0.2717, 0.2403, 0.1926, 0.0940, 0.0274, 0.0211, 0.0304], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0298, 0.0326, 0.0251, 0.0218, 0.0161, 0.0205, 0.0210], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:46:19,396 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83721.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:46:37,927 INFO [train.py:903] (3/4) Epoch 13, batch 1800, loss[loss=0.2847, simple_loss=0.3396, pruned_loss=0.1149, over 13479.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3014, pruned_loss=0.07637, over 3804313.75 frames. ], batch size: 136, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:47:08,620 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83761.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:29,808 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83778.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:47:32,955 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.542e+02 6.939e+02 8.095e+02 2.139e+03, threshold=1.388e+03, percent-clipped=3.0 +2023-04-01 23:47:35,283 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-01 23:47:39,813 INFO [train.py:903] (3/4) Epoch 13, batch 1850, loss[loss=0.2759, simple_loss=0.3337, pruned_loss=0.1091, over 13020.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3016, pruned_loss=0.07632, over 3803676.87 frames. ], batch size: 135, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:11,509 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-01 23:48:38,899 INFO [train.py:903] (3/4) Epoch 13, batch 1900, loss[loss=0.2468, simple_loss=0.3155, pruned_loss=0.08899, over 19605.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3034, pruned_loss=0.07735, over 3813446.50 frames. ], batch size: 57, lr: 6.46e-03, grad_scale: 8.0 +2023-04-01 23:48:56,220 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-01 23:49:00,754 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-01 23:49:22,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9501, 2.5680, 1.8140, 1.9338, 2.4309, 1.6905, 1.6567, 2.0131], + device='cuda:3'), covar=tensor([0.0895, 0.0672, 0.0755, 0.0596, 0.0404, 0.0878, 0.0568, 0.0543], + device='cuda:3'), in_proj_covar=tensor([0.0291, 0.0303, 0.0325, 0.0248, 0.0237, 0.0317, 0.0284, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 23:49:23,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-01 23:49:32,948 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.453e+02 6.640e+02 7.751e+02 1.927e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-01 23:49:38,629 INFO [train.py:903] (3/4) Epoch 13, batch 1950, loss[loss=0.2107, simple_loss=0.2814, pruned_loss=0.06996, over 19571.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3027, pruned_loss=0.07699, over 3814890.45 frames. ], batch size: 52, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:50:31,106 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83929.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:50:40,968 INFO [train.py:903] (3/4) Epoch 13, batch 2000, loss[loss=0.2271, simple_loss=0.3053, pruned_loss=0.0744, over 19564.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3027, pruned_loss=0.07693, over 3806363.85 frames. ], batch size: 61, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:02,420 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83954.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:17,807 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83967.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:51:36,105 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.295e+02 5.067e+02 6.527e+02 8.467e+02 1.955e+03, threshold=1.305e+03, percent-clipped=7.0 +2023-04-01 23:51:38,312 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-01 23:51:42,661 INFO [train.py:903] (3/4) Epoch 13, batch 2050, loss[loss=0.28, simple_loss=0.347, pruned_loss=0.1065, over 17548.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3039, pruned_loss=0.07733, over 3812712.85 frames. ], batch size: 101, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:51:48,289 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-01 23:51:56,832 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-01 23:51:57,786 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-01 23:52:03,662 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84003.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:16,701 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-01 23:52:21,311 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-01 23:52:22,839 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84017.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:52:43,962 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2224, 1.2654, 1.1784, 0.9979, 1.0311, 0.9849, 0.0374, 0.2991], + device='cuda:3'), covar=tensor([0.0497, 0.0489, 0.0331, 0.0411, 0.1029, 0.0455, 0.0952, 0.0842], + device='cuda:3'), in_proj_covar=tensor([0.0340, 0.0333, 0.0334, 0.0356, 0.0425, 0.0357, 0.0313, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-01 23:52:44,647 INFO [train.py:903] (3/4) Epoch 13, batch 2100, loss[loss=0.276, simple_loss=0.3455, pruned_loss=0.1033, over 19651.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3046, pruned_loss=0.07786, over 3825947.40 frames. ], batch size: 58, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:52:52,275 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84042.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:53:14,864 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-01 23:53:36,201 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-01 23:53:39,576 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.545e+02 6.946e+02 9.457e+02 3.064e+03, threshold=1.389e+03, percent-clipped=12.0 +2023-04-01 23:53:45,274 INFO [train.py:903] (3/4) Epoch 13, batch 2150, loss[loss=0.2248, simple_loss=0.3074, pruned_loss=0.07105, over 18699.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3047, pruned_loss=0.07824, over 3836359.31 frames. ], batch size: 74, lr: 6.45e-03, grad_scale: 8.0 +2023-04-01 23:54:23,402 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84115.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:31,390 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84122.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:54:49,428 INFO [train.py:903] (3/4) Epoch 13, batch 2200, loss[loss=0.2666, simple_loss=0.3427, pruned_loss=0.09518, over 19587.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3024, pruned_loss=0.07682, over 3851528.32 frames. ], batch size: 61, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:55:44,487 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.936e+02 7.647e+02 9.699e+02 2.302e+03, threshold=1.529e+03, percent-clipped=8.0 +2023-04-01 23:55:50,242 INFO [train.py:903] (3/4) Epoch 13, batch 2250, loss[loss=0.2442, simple_loss=0.3168, pruned_loss=0.08584, over 19378.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3028, pruned_loss=0.07718, over 3835650.35 frames. ], batch size: 66, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:29,751 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84217.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:56:51,979 INFO [train.py:903] (3/4) Epoch 13, batch 2300, loss[loss=0.1918, simple_loss=0.2726, pruned_loss=0.05551, over 19572.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3018, pruned_loss=0.07693, over 3831970.28 frames. ], batch size: 52, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:56:53,522 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84237.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:05,857 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-01 23:57:15,129 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84254.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:57:47,149 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 5.192e+02 6.483e+02 8.696e+02 2.103e+03, threshold=1.297e+03, percent-clipped=4.0 +2023-04-01 23:57:49,826 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5713, 1.1610, 1.3837, 1.2777, 2.2420, 0.9874, 2.0339, 2.4420], + device='cuda:3'), covar=tensor([0.0579, 0.2585, 0.2498, 0.1459, 0.0795, 0.1860, 0.0875, 0.0428], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0348, 0.0357, 0.0326, 0.0351, 0.0335, 0.0346, 0.0368], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-01 23:57:52,885 INFO [train.py:903] (3/4) Epoch 13, batch 2350, loss[loss=0.2306, simple_loss=0.3048, pruned_loss=0.0782, over 18740.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3028, pruned_loss=0.07687, over 3828414.08 frames. ], batch size: 74, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:58:25,969 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84311.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:58:37,171 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-01 23:58:54,358 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-01 23:58:57,898 INFO [train.py:903] (3/4) Epoch 13, batch 2400, loss[loss=0.1828, simple_loss=0.2679, pruned_loss=0.04889, over 19474.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3022, pruned_loss=0.07631, over 3827103.76 frames. ], batch size: 49, lr: 6.44e-03, grad_scale: 8.0 +2023-04-01 23:59:11,491 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84347.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:20,759 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84354.0, num_to_drop=0, layers_to_drop=set() +2023-04-01 23:59:54,316 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 5.138e+02 6.932e+02 8.383e+02 1.660e+03, threshold=1.386e+03, percent-clipped=4.0 +2023-04-01 23:59:59,973 INFO [train.py:903] (3/4) Epoch 13, batch 2450, loss[loss=0.2739, simple_loss=0.3492, pruned_loss=0.09933, over 19690.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3047, pruned_loss=0.07738, over 3827055.28 frames. ], batch size: 59, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:00:22,351 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2880, 2.2519, 2.4796, 3.2689, 2.2373, 3.2027, 2.9028, 2.3799], + device='cuda:3'), covar=tensor([0.3832, 0.3503, 0.1455, 0.1996, 0.3865, 0.1585, 0.3371, 0.2857], + device='cuda:3'), in_proj_covar=tensor([0.0811, 0.0841, 0.0662, 0.0898, 0.0796, 0.0724, 0.0798, 0.0726], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:00:31,662 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5540, 2.0133, 2.1815, 2.6861, 2.5164, 2.3570, 2.1217, 2.6662], + device='cuda:3'), covar=tensor([0.0820, 0.1744, 0.1267, 0.0941, 0.1207, 0.0453, 0.1080, 0.0552], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0351, 0.0294, 0.0239, 0.0295, 0.0243, 0.0280, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:00:50,511 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5471, 1.6245, 2.0317, 1.8345, 3.6058, 2.8566, 3.7268, 1.7895], + device='cuda:3'), covar=tensor([0.2166, 0.3893, 0.2408, 0.1660, 0.1166, 0.1723, 0.1386, 0.3365], + device='cuda:3'), in_proj_covar=tensor([0.0498, 0.0591, 0.0624, 0.0443, 0.0601, 0.0503, 0.0650, 0.0504], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:00:51,597 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84426.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:03,732 INFO [train.py:903] (3/4) Epoch 13, batch 2500, loss[loss=0.2244, simple_loss=0.3013, pruned_loss=0.07369, over 19758.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3054, pruned_loss=0.07796, over 3830621.63 frames. ], batch size: 51, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:01:34,297 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:38,081 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:01:46,358 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5190, 3.7493, 4.0356, 4.0250, 2.2412, 3.7281, 3.4749, 3.8049], + device='cuda:3'), covar=tensor([0.1153, 0.1961, 0.0522, 0.0563, 0.3818, 0.1048, 0.0489, 0.0851], + device='cuda:3'), in_proj_covar=tensor([0.0693, 0.0621, 0.0827, 0.0705, 0.0748, 0.0572, 0.0498, 0.0757], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 00:01:47,594 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9779, 3.6112, 2.5007, 3.3065, 0.9878, 3.4638, 3.4168, 3.5142], + device='cuda:3'), covar=tensor([0.0784, 0.1177, 0.1966, 0.0835, 0.3764, 0.0803, 0.0887, 0.1078], + device='cuda:3'), in_proj_covar=tensor([0.0444, 0.0371, 0.0445, 0.0319, 0.0382, 0.0377, 0.0370, 0.0403], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:01:47,714 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1215, 1.2832, 1.4827, 1.3652, 2.6893, 1.0454, 2.0509, 2.9758], + device='cuda:3'), covar=tensor([0.0467, 0.2498, 0.2580, 0.1760, 0.0756, 0.2165, 0.1098, 0.0333], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0345, 0.0356, 0.0325, 0.0350, 0.0334, 0.0342, 0.0367], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:01:55,290 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1488, 2.0126, 2.0327, 2.3700, 2.1924, 1.9303, 2.0137, 2.2202], + device='cuda:3'), covar=tensor([0.0717, 0.1213, 0.1002, 0.0610, 0.0904, 0.0422, 0.0918, 0.0472], + device='cuda:3'), in_proj_covar=tensor([0.0254, 0.0350, 0.0294, 0.0238, 0.0295, 0.0242, 0.0279, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:02:00,517 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 5.326e+02 7.100e+02 9.098e+02 1.657e+03, threshold=1.420e+03, percent-clipped=3.0 +2023-04-02 00:02:06,422 INFO [train.py:903] (3/4) Epoch 13, batch 2550, loss[loss=0.2391, simple_loss=0.3111, pruned_loss=0.08359, over 19587.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3051, pruned_loss=0.07828, over 3820292.51 frames. ], batch size: 52, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:02:16,070 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84493.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:02:17,107 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6355, 1.6036, 1.8127, 1.7077, 4.1697, 0.9748, 2.4739, 4.3502], + device='cuda:3'), covar=tensor([0.0397, 0.2590, 0.2592, 0.1915, 0.0701, 0.2771, 0.1419, 0.0234], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0345, 0.0355, 0.0324, 0.0348, 0.0333, 0.0341, 0.0367], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:02:48,834 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84518.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:04,537 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 00:03:10,382 INFO [train.py:903] (3/4) Epoch 13, batch 2600, loss[loss=0.254, simple_loss=0.3369, pruned_loss=0.0855, over 19661.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3047, pruned_loss=0.07804, over 3822421.11 frames. ], batch size: 55, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:03:21,100 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:42,743 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:03:59,445 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:09,372 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.575e+02 5.097e+02 6.359e+02 8.045e+02 2.004e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 00:04:15,152 INFO [train.py:903] (3/4) Epoch 13, batch 2650, loss[loss=0.2783, simple_loss=0.3416, pruned_loss=0.1075, over 19767.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3048, pruned_loss=0.07817, over 3823888.05 frames. ], batch size: 63, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:04:26,074 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9125, 2.0443, 2.1270, 2.6958, 1.9002, 2.5730, 2.3700, 2.0747], + device='cuda:3'), covar=tensor([0.3503, 0.3004, 0.1546, 0.1757, 0.3411, 0.1541, 0.3535, 0.2625], + device='cuda:3'), in_proj_covar=tensor([0.0817, 0.0845, 0.0667, 0.0902, 0.0801, 0.0731, 0.0803, 0.0727], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:04:30,351 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:04:34,949 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 00:05:17,047 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 00:05:17,486 INFO [train.py:903] (3/4) Epoch 13, batch 2700, loss[loss=0.2251, simple_loss=0.3003, pruned_loss=0.07498, over 19540.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3046, pruned_loss=0.07769, over 3830997.17 frames. ], batch size: 54, lr: 6.43e-03, grad_scale: 8.0 +2023-04-02 00:05:36,190 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:08,610 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5196, 1.2631, 1.6901, 1.3766, 2.6961, 3.7883, 3.4966, 4.0233], + device='cuda:3'), covar=tensor([0.1341, 0.3514, 0.3090, 0.2158, 0.0551, 0.0159, 0.0190, 0.0174], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0298, 0.0324, 0.0250, 0.0217, 0.0162, 0.0205, 0.0211], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:06:08,623 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:14,002 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.394e+02 6.395e+02 8.456e+02 1.799e+03, threshold=1.279e+03, percent-clipped=4.0 +2023-04-02 00:06:16,795 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:21,011 INFO [train.py:903] (3/4) Epoch 13, batch 2750, loss[loss=0.2266, simple_loss=0.31, pruned_loss=0.07162, over 19783.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3035, pruned_loss=0.07718, over 3841478.27 frames. ], batch size: 56, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:06:37,475 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:44,851 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 00:06:49,105 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:06:56,179 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:02,029 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:07:22,612 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3613, 1.4228, 1.7438, 1.5742, 2.6400, 2.3022, 2.7981, 1.1950], + device='cuda:3'), covar=tensor([0.2306, 0.3965, 0.2523, 0.1770, 0.1369, 0.1861, 0.1397, 0.3736], + device='cuda:3'), in_proj_covar=tensor([0.0498, 0.0588, 0.0624, 0.0442, 0.0600, 0.0500, 0.0649, 0.0502], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:07:24,546 INFO [train.py:903] (3/4) Epoch 13, batch 2800, loss[loss=0.2129, simple_loss=0.2977, pruned_loss=0.06407, over 19338.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3037, pruned_loss=0.07744, over 3824179.46 frames. ], batch size: 66, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:07:35,151 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:08:22,666 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.423e+02 6.884e+02 8.957e+02 1.568e+03, threshold=1.377e+03, percent-clipped=4.0 +2023-04-02 00:08:27,029 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-02 00:08:29,874 INFO [train.py:903] (3/4) Epoch 13, batch 2850, loss[loss=0.2664, simple_loss=0.3363, pruned_loss=0.09821, over 19768.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3048, pruned_loss=0.07854, over 3807882.34 frames. ], batch size: 63, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:09:04,392 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:11,246 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84818.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:26,676 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:09:33,386 INFO [train.py:903] (3/4) Epoch 13, batch 2900, loss[loss=0.1896, simple_loss=0.2776, pruned_loss=0.05084, over 19760.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3039, pruned_loss=0.07814, over 3806613.19 frames. ], batch size: 54, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:09:33,421 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 00:09:43,376 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1885, 1.2678, 1.1754, 0.9851, 1.0574, 1.0773, 0.0621, 0.3643], + device='cuda:3'), covar=tensor([0.0520, 0.0535, 0.0339, 0.0457, 0.1036, 0.0477, 0.0984, 0.0828], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0333, 0.0335, 0.0356, 0.0429, 0.0356, 0.0314, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:09:58,778 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:10:31,987 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.833e+02 5.191e+02 6.710e+02 8.572e+02 2.238e+03, threshold=1.342e+03, percent-clipped=4.0 +2023-04-02 00:10:38,049 INFO [train.py:903] (3/4) Epoch 13, batch 2950, loss[loss=0.2287, simple_loss=0.3056, pruned_loss=0.07588, over 19577.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3038, pruned_loss=0.07772, over 3811931.11 frames. ], batch size: 61, lr: 6.42e-03, grad_scale: 8.0 +2023-04-02 00:10:40,481 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:39,659 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:11:45,164 INFO [train.py:903] (3/4) Epoch 13, batch 3000, loss[loss=0.2016, simple_loss=0.2853, pruned_loss=0.05893, over 19674.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3025, pruned_loss=0.07668, over 3807401.38 frames. ], batch size: 53, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:11:45,164 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 00:12:00,828 INFO [train.py:937] (3/4) Epoch 13, validation: loss=0.1754, simple_loss=0.276, pruned_loss=0.03742, over 944034.00 frames. +2023-04-02 00:12:00,829 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 00:12:05,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 00:12:29,848 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84957.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:43,850 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:43,984 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:12:59,475 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.728e+02 4.901e+02 6.331e+02 8.447e+02 1.208e+03, threshold=1.266e+03, percent-clipped=0.0 +2023-04-02 00:13:05,452 INFO [train.py:903] (3/4) Epoch 13, batch 3050, loss[loss=0.2378, simple_loss=0.3122, pruned_loss=0.08173, over 19722.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3034, pruned_loss=0.07691, over 3816171.98 frames. ], batch size: 63, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:13:17,195 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84994.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:18,157 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84995.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:13:26,442 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:14:09,057 INFO [train.py:903] (3/4) Epoch 13, batch 3100, loss[loss=0.2172, simple_loss=0.2895, pruned_loss=0.07246, over 19738.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3035, pruned_loss=0.07739, over 3813975.46 frames. ], batch size: 51, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:14:50,660 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:05,350 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.118e+02 6.727e+02 8.323e+02 1.616e+03, threshold=1.345e+03, percent-clipped=6.0 +2023-04-02 00:15:11,220 INFO [train.py:903] (3/4) Epoch 13, batch 3150, loss[loss=0.2144, simple_loss=0.2947, pruned_loss=0.06706, over 19849.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3028, pruned_loss=0.07671, over 3820402.25 frames. ], batch size: 52, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:15:20,649 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:15:37,410 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 00:15:40,757 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3642, 1.4574, 1.7475, 1.4262, 2.3266, 2.7780, 2.5414, 2.8696], + device='cuda:3'), covar=tensor([0.1331, 0.2773, 0.2433, 0.2093, 0.1023, 0.0245, 0.0276, 0.0333], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0300, 0.0329, 0.0251, 0.0218, 0.0164, 0.0206, 0.0211], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:15:41,868 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85110.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:00,162 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:09,398 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:16:12,575 INFO [train.py:903] (3/4) Epoch 13, batch 3200, loss[loss=0.2165, simple_loss=0.299, pruned_loss=0.06705, over 18024.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3038, pruned_loss=0.07779, over 3804594.85 frames. ], batch size: 83, lr: 6.41e-03, grad_scale: 8.0 +2023-04-02 00:16:25,412 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1701, 2.2490, 2.5120, 3.2114, 2.2179, 3.1491, 2.7129, 2.3575], + device='cuda:3'), covar=tensor([0.3992, 0.3607, 0.1466, 0.2012, 0.3982, 0.1572, 0.3644, 0.2707], + device='cuda:3'), in_proj_covar=tensor([0.0812, 0.0842, 0.0660, 0.0895, 0.0797, 0.0724, 0.0795, 0.0721], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:16:46,262 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:17:10,848 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.817e+02 5.513e+02 6.647e+02 8.266e+02 1.326e+03, threshold=1.329e+03, percent-clipped=0.0 +2023-04-02 00:17:16,637 INFO [train.py:903] (3/4) Epoch 13, batch 3250, loss[loss=0.236, simple_loss=0.3127, pruned_loss=0.07965, over 19653.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3026, pruned_loss=0.07697, over 3809508.54 frames. ], batch size: 60, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:17:58,815 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5162, 1.1394, 1.1594, 1.4089, 1.0869, 1.2931, 1.0718, 1.3395], + device='cuda:3'), covar=tensor([0.1124, 0.1385, 0.1529, 0.0975, 0.1275, 0.0640, 0.1462, 0.0818], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0353, 0.0295, 0.0241, 0.0296, 0.0246, 0.0285, 0.0243], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:18:20,657 INFO [train.py:903] (3/4) Epoch 13, batch 3300, loss[loss=0.1907, simple_loss=0.2744, pruned_loss=0.05355, over 19461.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3017, pruned_loss=0.0766, over 3798968.01 frames. ], batch size: 49, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:18:21,909 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 00:18:34,734 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3047, 1.4360, 1.8076, 1.5709, 3.0883, 2.3978, 3.3276, 1.5222], + device='cuda:3'), covar=tensor([0.2388, 0.4073, 0.2468, 0.1930, 0.1470, 0.2002, 0.1576, 0.3764], + device='cuda:3'), in_proj_covar=tensor([0.0493, 0.0584, 0.0619, 0.0438, 0.0597, 0.0499, 0.0644, 0.0502], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:18:39,428 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5265, 1.2194, 1.4909, 1.4947, 3.0742, 0.9544, 2.1799, 3.5219], + device='cuda:3'), covar=tensor([0.0452, 0.2907, 0.2741, 0.1878, 0.0703, 0.2726, 0.1419, 0.0248], + device='cuda:3'), in_proj_covar=tensor([0.0367, 0.0350, 0.0357, 0.0325, 0.0348, 0.0334, 0.0343, 0.0365], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:18:47,641 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:18:53,204 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4803, 2.2937, 1.6451, 1.6359, 2.1032, 1.2934, 1.2446, 1.8053], + device='cuda:3'), covar=tensor([0.0971, 0.0672, 0.0999, 0.0709, 0.0491, 0.1139, 0.0734, 0.0490], + device='cuda:3'), in_proj_covar=tensor([0.0293, 0.0304, 0.0328, 0.0250, 0.0238, 0.0321, 0.0287, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:19:00,110 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 00:19:12,445 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:17,835 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 5.269e+02 6.447e+02 8.165e+02 1.494e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 00:19:19,510 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:19:22,467 INFO [train.py:903] (3/4) Epoch 13, batch 3350, loss[loss=0.2512, simple_loss=0.3315, pruned_loss=0.08544, over 19674.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3019, pruned_loss=0.07642, over 3817575.40 frames. ], batch size: 59, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:19:57,334 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:20:21,525 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3363, 1.4871, 1.8468, 1.4873, 2.7666, 3.5341, 3.3736, 3.7211], + device='cuda:3'), covar=tensor([0.1622, 0.3325, 0.2980, 0.2182, 0.0602, 0.0196, 0.0192, 0.0242], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0301, 0.0329, 0.0252, 0.0220, 0.0165, 0.0206, 0.0213], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:20:24,674 INFO [train.py:903] (3/4) Epoch 13, batch 3400, loss[loss=0.1983, simple_loss=0.2748, pruned_loss=0.0609, over 19623.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3019, pruned_loss=0.07656, over 3810478.79 frames. ], batch size: 50, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:20:57,121 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2250, 1.2759, 1.1704, 0.9777, 0.9561, 1.0362, 0.1194, 0.3209], + device='cuda:3'), covar=tensor([0.0680, 0.0652, 0.0421, 0.0516, 0.1437, 0.0650, 0.1137, 0.1158], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0334, 0.0335, 0.0357, 0.0428, 0.0357, 0.0313, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:21:03,105 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:21:22,538 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.371e+02 6.826e+02 8.500e+02 2.504e+03, threshold=1.365e+03, percent-clipped=9.0 +2023-04-02 00:21:27,160 INFO [train.py:903] (3/4) Epoch 13, batch 3450, loss[loss=0.2334, simple_loss=0.3079, pruned_loss=0.07947, over 17620.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3023, pruned_loss=0.07688, over 3811450.74 frames. ], batch size: 101, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:21:30,654 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 00:21:34,227 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85391.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:22:19,381 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:22:28,802 INFO [train.py:903] (3/4) Epoch 13, batch 3500, loss[loss=0.2056, simple_loss=0.2737, pruned_loss=0.06876, over 19383.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3025, pruned_loss=0.07697, over 3813240.66 frames. ], batch size: 47, lr: 6.40e-03, grad_scale: 8.0 +2023-04-02 00:22:42,065 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:10,410 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:20,670 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:23:27,193 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.339e+02 6.675e+02 8.042e+02 1.644e+03, threshold=1.335e+03, percent-clipped=3.0 +2023-04-02 00:23:31,539 INFO [train.py:903] (3/4) Epoch 13, batch 3550, loss[loss=0.2565, simple_loss=0.3299, pruned_loss=0.09152, over 19542.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3027, pruned_loss=0.07677, over 3823980.20 frames. ], batch size: 56, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:23:31,994 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0048, 1.6943, 1.5950, 1.8670, 1.6826, 1.7973, 1.4561, 1.8770], + device='cuda:3'), covar=tensor([0.0925, 0.1393, 0.1436, 0.1038, 0.1251, 0.0478, 0.1350, 0.0696], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0348, 0.0294, 0.0238, 0.0294, 0.0241, 0.0281, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:23:50,612 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85501.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:04,313 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3159, 1.3414, 1.5745, 1.5215, 2.1173, 2.0334, 2.1737, 0.7401], + device='cuda:3'), covar=tensor([0.2442, 0.4149, 0.2561, 0.1927, 0.1541, 0.2096, 0.1474, 0.4383], + device='cuda:3'), in_proj_covar=tensor([0.0495, 0.0588, 0.0623, 0.0439, 0.0598, 0.0501, 0.0649, 0.0504], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:24:31,256 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:24:34,224 INFO [train.py:903] (3/4) Epoch 13, batch 3600, loss[loss=0.2578, simple_loss=0.3286, pruned_loss=0.09347, over 17664.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3027, pruned_loss=0.07644, over 3829463.67 frames. ], batch size: 101, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:24:50,374 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:04,582 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85558.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:25,618 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8901, 1.1619, 1.4621, 0.5913, 2.1217, 2.4516, 2.1509, 2.5838], + device='cuda:3'), covar=tensor([0.1616, 0.3509, 0.3229, 0.2483, 0.0519, 0.0227, 0.0331, 0.0292], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0300, 0.0329, 0.0251, 0.0220, 0.0164, 0.0205, 0.0211], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:25:33,193 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.969e+02 7.367e+02 9.197e+02 1.857e+03, threshold=1.473e+03, percent-clipped=2.0 +2023-04-02 00:25:36,809 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:25:38,625 INFO [train.py:903] (3/4) Epoch 13, batch 3650, loss[loss=0.2186, simple_loss=0.2982, pruned_loss=0.06947, over 19594.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3027, pruned_loss=0.07651, over 3837707.71 frames. ], batch size: 52, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:25:46,147 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:26:39,822 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2462, 1.3530, 2.0184, 1.6219, 2.9154, 4.6407, 4.5529, 4.9461], + device='cuda:3'), covar=tensor([0.1601, 0.3492, 0.3004, 0.1937, 0.0516, 0.0134, 0.0139, 0.0141], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0300, 0.0331, 0.0251, 0.0220, 0.0164, 0.0206, 0.0211], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:26:41,858 INFO [train.py:903] (3/4) Epoch 13, batch 3700, loss[loss=0.2789, simple_loss=0.3543, pruned_loss=0.1017, over 19501.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3031, pruned_loss=0.07703, over 3828938.29 frames. ], batch size: 64, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:26:51,261 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:41,800 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.698e+02 5.337e+02 6.385e+02 1.025e+03 1.989e+03, threshold=1.277e+03, percent-clipped=4.0 +2023-04-02 00:27:44,677 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:27:46,550 INFO [train.py:903] (3/4) Epoch 13, batch 3750, loss[loss=0.2998, simple_loss=0.3607, pruned_loss=0.1195, over 17086.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.302, pruned_loss=0.07615, over 3837311.71 frames. ], batch size: 101, lr: 6.39e-03, grad_scale: 8.0 +2023-04-02 00:28:01,531 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4554, 1.5405, 2.1680, 1.7981, 3.0394, 2.4070, 3.2725, 1.6572], + device='cuda:3'), covar=tensor([0.2564, 0.4368, 0.2516, 0.1973, 0.1690, 0.2312, 0.1880, 0.3913], + device='cuda:3'), in_proj_covar=tensor([0.0497, 0.0590, 0.0625, 0.0439, 0.0598, 0.0502, 0.0648, 0.0505], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:28:14,067 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 00:28:14,943 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:28:34,773 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5046, 1.5421, 1.8513, 1.6996, 3.3584, 2.7017, 3.5751, 1.4467], + device='cuda:3'), covar=tensor([0.2206, 0.3961, 0.2526, 0.1701, 0.1276, 0.1772, 0.1369, 0.3736], + device='cuda:3'), in_proj_covar=tensor([0.0496, 0.0589, 0.0625, 0.0439, 0.0597, 0.0501, 0.0648, 0.0505], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:28:48,156 INFO [train.py:903] (3/4) Epoch 13, batch 3800, loss[loss=0.2215, simple_loss=0.2988, pruned_loss=0.07216, over 19668.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3009, pruned_loss=0.07539, over 3832853.56 frames. ], batch size: 53, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:20,372 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 00:29:20,848 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 00:29:44,888 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 5.587e+02 7.205e+02 9.194e+02 2.888e+03, threshold=1.441e+03, percent-clipped=8.0 +2023-04-02 00:29:50,626 INFO [train.py:903] (3/4) Epoch 13, batch 3850, loss[loss=0.1896, simple_loss=0.2661, pruned_loss=0.05652, over 19477.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.302, pruned_loss=0.0764, over 3823675.58 frames. ], batch size: 49, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:29:56,236 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:30:53,504 INFO [train.py:903] (3/4) Epoch 13, batch 3900, loss[loss=0.2147, simple_loss=0.2912, pruned_loss=0.06911, over 19573.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3029, pruned_loss=0.07694, over 3831473.62 frames. ], batch size: 52, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:31:00,076 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:06,825 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85845.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:10,509 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:30,227 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:40,504 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:31:51,442 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.450e+02 6.738e+02 8.252e+02 2.044e+03, threshold=1.348e+03, percent-clipped=4.0 +2023-04-02 00:31:57,035 INFO [train.py:903] (3/4) Epoch 13, batch 3950, loss[loss=0.2292, simple_loss=0.2944, pruned_loss=0.08205, over 19801.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3034, pruned_loss=0.07736, over 3840668.46 frames. ], batch size: 49, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:32:00,560 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 00:32:04,161 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85892.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:05,564 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:20,763 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:32:59,797 INFO [train.py:903] (3/4) Epoch 13, batch 4000, loss[loss=0.2105, simple_loss=0.2836, pruned_loss=0.06868, over 19472.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3032, pruned_loss=0.07711, over 3827667.74 frames. ], batch size: 49, lr: 6.38e-03, grad_scale: 8.0 +2023-04-02 00:33:14,767 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85948.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:33:30,762 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85960.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:45,317 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9167, 1.3532, 1.5834, 1.5541, 3.4359, 1.0831, 2.4112, 3.8359], + device='cuda:3'), covar=tensor([0.0383, 0.2469, 0.2404, 0.1750, 0.0667, 0.2350, 0.1182, 0.0231], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0341, 0.0352, 0.0321, 0.0344, 0.0329, 0.0341, 0.0362], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:33:46,159 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 00:33:48,582 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:33:56,481 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.280e+02 6.258e+02 8.022e+02 1.377e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 00:34:02,169 INFO [train.py:903] (3/4) Epoch 13, batch 4050, loss[loss=0.2639, simple_loss=0.3392, pruned_loss=0.09424, over 18777.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3038, pruned_loss=0.07743, over 3829095.04 frames. ], batch size: 74, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:34:03,499 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:30,862 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:34:46,101 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:35:02,921 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9669, 2.0668, 2.1557, 2.6961, 1.9416, 2.5865, 2.2733, 1.9314], + device='cuda:3'), covar=tensor([0.3696, 0.3163, 0.1585, 0.2102, 0.3609, 0.1662, 0.4226, 0.2973], + device='cuda:3'), in_proj_covar=tensor([0.0813, 0.0845, 0.0660, 0.0898, 0.0797, 0.0724, 0.0798, 0.0723], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:35:05,744 INFO [train.py:903] (3/4) Epoch 13, batch 4100, loss[loss=0.2244, simple_loss=0.2903, pruned_loss=0.0792, over 19414.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3033, pruned_loss=0.07729, over 3819107.58 frames. ], batch size: 48, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:35:33,084 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 00:35:41,505 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 00:35:48,406 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86070.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:04,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.943e+02 6.094e+02 7.890e+02 1.986e+03, threshold=1.219e+03, percent-clipped=5.0 +2023-04-02 00:36:08,782 INFO [train.py:903] (3/4) Epoch 13, batch 4150, loss[loss=0.2, simple_loss=0.266, pruned_loss=0.06703, over 19302.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3034, pruned_loss=0.07669, over 3818790.37 frames. ], batch size: 44, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:36:11,025 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:28,541 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:36:36,343 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7581, 1.4450, 1.5172, 2.1352, 1.7386, 2.0425, 1.9839, 1.7637], + device='cuda:3'), covar=tensor([0.0741, 0.0949, 0.0953, 0.0711, 0.0777, 0.0689, 0.0826, 0.0636], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0223, 0.0223, 0.0242, 0.0231, 0.0209, 0.0191, 0.0200], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 00:37:11,323 INFO [train.py:903] (3/4) Epoch 13, batch 4200, loss[loss=0.1903, simple_loss=0.2712, pruned_loss=0.05464, over 19700.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3026, pruned_loss=0.07627, over 3813239.54 frames. ], batch size: 53, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:37:14,929 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 00:37:42,913 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:03,679 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5952, 1.3484, 1.4953, 1.6060, 3.1189, 1.1365, 2.2305, 3.5101], + device='cuda:3'), covar=tensor([0.0413, 0.2530, 0.2556, 0.1619, 0.0675, 0.2278, 0.1315, 0.0260], + device='cuda:3'), in_proj_covar=tensor([0.0362, 0.0342, 0.0354, 0.0321, 0.0345, 0.0329, 0.0342, 0.0362], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:38:08,163 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.506e+02 6.651e+02 9.015e+02 1.898e+03, threshold=1.330e+03, percent-clipped=8.0 +2023-04-02 00:38:12,742 INFO [train.py:903] (3/4) Epoch 13, batch 4250, loss[loss=0.2696, simple_loss=0.3311, pruned_loss=0.104, over 19782.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3014, pruned_loss=0.0754, over 3820701.41 frames. ], batch size: 56, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:38:13,197 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:38:31,046 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 00:38:31,968 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 00:38:43,576 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 00:38:47,300 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1393, 5.5073, 2.6198, 4.7780, 1.3085, 5.5005, 5.4151, 5.6065], + device='cuda:3'), covar=tensor([0.0429, 0.0837, 0.2155, 0.0658, 0.3510, 0.0536, 0.0737, 0.1004], + device='cuda:3'), in_proj_covar=tensor([0.0444, 0.0370, 0.0444, 0.0320, 0.0386, 0.0378, 0.0367, 0.0404], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:38:52,293 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86216.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:15,640 INFO [train.py:903] (3/4) Epoch 13, batch 4300, loss[loss=0.2353, simple_loss=0.3136, pruned_loss=0.07852, over 19654.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3029, pruned_loss=0.07604, over 3804947.57 frames. ], batch size: 55, lr: 6.37e-03, grad_scale: 8.0 +2023-04-02 00:39:17,009 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86237.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:22,953 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86241.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:50,489 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:39:51,564 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86264.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:13,251 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.647e+02 5.244e+02 6.314e+02 8.271e+02 2.210e+03, threshold=1.263e+03, percent-clipped=7.0 +2023-04-02 00:40:15,231 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 00:40:19,670 INFO [train.py:903] (3/4) Epoch 13, batch 4350, loss[loss=0.2571, simple_loss=0.3338, pruned_loss=0.09017, over 19835.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3024, pruned_loss=0.07584, over 3808377.07 frames. ], batch size: 52, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:40:23,414 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:40:27,970 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86292.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:41:00,081 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86319.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:22,566 INFO [train.py:903] (3/4) Epoch 13, batch 4400, loss[loss=0.2166, simple_loss=0.2845, pruned_loss=0.07438, over 16024.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.301, pruned_loss=0.07513, over 3802549.82 frames. ], batch size: 35, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:41:42,037 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:49,360 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:41:50,093 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 00:41:58,614 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:00,825 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 00:42:20,371 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.626e+02 5.303e+02 6.614e+02 7.903e+02 1.976e+03, threshold=1.323e+03, percent-clipped=7.0 +2023-04-02 00:42:21,992 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:42:25,156 INFO [train.py:903] (3/4) Epoch 13, batch 4450, loss[loss=0.2151, simple_loss=0.2796, pruned_loss=0.07535, over 19721.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3008, pruned_loss=0.0751, over 3817402.17 frames. ], batch size: 46, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:42:49,909 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9913, 1.6267, 1.5706, 1.8850, 1.6437, 1.6735, 1.4416, 1.9139], + device='cuda:3'), covar=tensor([0.0894, 0.1379, 0.1344, 0.0914, 0.1200, 0.0523, 0.1289, 0.0645], + device='cuda:3'), in_proj_covar=tensor([0.0253, 0.0343, 0.0290, 0.0238, 0.0290, 0.0237, 0.0279, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:42:52,180 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86407.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:43:00,999 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:21,646 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:25,404 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:43:27,355 INFO [train.py:903] (3/4) Epoch 13, batch 4500, loss[loss=0.2275, simple_loss=0.3118, pruned_loss=0.07157, over 19592.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3018, pruned_loss=0.07558, over 3822479.98 frames. ], batch size: 57, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:43:48,840 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2111, 1.1874, 1.1838, 1.3362, 1.0720, 1.3339, 1.4104, 1.2743], + device='cuda:3'), covar=tensor([0.0870, 0.0983, 0.1096, 0.0701, 0.0860, 0.0822, 0.0785, 0.0755], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0225, 0.0223, 0.0243, 0.0231, 0.0210, 0.0192, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 00:43:55,649 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:01,556 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:21,266 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:44:24,464 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.430e+02 6.459e+02 7.870e+02 1.871e+03, threshold=1.292e+03, percent-clipped=3.0 +2023-04-02 00:44:29,962 INFO [train.py:903] (3/4) Epoch 13, batch 4550, loss[loss=0.2616, simple_loss=0.3381, pruned_loss=0.09259, over 19611.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3028, pruned_loss=0.07656, over 3805408.80 frames. ], batch size: 57, lr: 6.36e-03, grad_scale: 8.0 +2023-04-02 00:44:39,221 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 00:45:02,119 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 00:45:25,853 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86529.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:45:33,583 INFO [train.py:903] (3/4) Epoch 13, batch 4600, loss[loss=0.2565, simple_loss=0.3293, pruned_loss=0.09184, over 19687.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3028, pruned_loss=0.07625, over 3812324.91 frames. ], batch size: 60, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:45:46,439 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.3526, 5.3462, 6.2226, 6.1100, 2.2375, 5.8116, 5.0176, 5.7980], + device='cuda:3'), covar=tensor([0.1436, 0.0632, 0.0451, 0.0501, 0.5292, 0.0524, 0.0513, 0.0894], + device='cuda:3'), in_proj_covar=tensor([0.0707, 0.0625, 0.0835, 0.0706, 0.0751, 0.0576, 0.0503, 0.0766], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 00:45:46,527 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:46:31,573 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.452e+02 6.634e+02 8.020e+02 1.693e+03, threshold=1.327e+03, percent-clipped=1.0 +2023-04-02 00:46:35,936 INFO [train.py:903] (3/4) Epoch 13, batch 4650, loss[loss=0.2004, simple_loss=0.2781, pruned_loss=0.06135, over 19484.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3024, pruned_loss=0.07584, over 3805166.58 frames. ], batch size: 49, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:46:51,371 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:46:52,257 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 00:47:03,875 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 00:47:03,998 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:04,256 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:35,039 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:47:35,324 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 00:47:38,175 INFO [train.py:903] (3/4) Epoch 13, batch 4700, loss[loss=0.2247, simple_loss=0.3061, pruned_loss=0.07168, over 19315.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3012, pruned_loss=0.07511, over 3818710.21 frames. ], batch size: 66, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:47:53,756 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 00:48:04,319 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 00:48:06,042 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2266, 1.3273, 1.2626, 1.0704, 1.0422, 1.0901, 0.0420, 0.3429], + device='cuda:3'), covar=tensor([0.0511, 0.0487, 0.0336, 0.0431, 0.1067, 0.0447, 0.0968, 0.0859], + device='cuda:3'), in_proj_covar=tensor([0.0341, 0.0335, 0.0332, 0.0357, 0.0432, 0.0357, 0.0314, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 00:48:13,320 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86663.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:48:35,953 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 5.083e+02 5.953e+02 7.054e+02 1.649e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-02 00:48:42,059 INFO [train.py:903] (3/4) Epoch 13, batch 4750, loss[loss=0.1982, simple_loss=0.2814, pruned_loss=0.05746, over 19599.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3007, pruned_loss=0.07476, over 3819447.14 frames. ], batch size: 52, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:48:45,718 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86688.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 00:48:48,062 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86690.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:48:58,152 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4759, 1.4143, 1.5094, 1.5262, 3.0573, 1.2446, 2.3220, 3.4090], + device='cuda:3'), covar=tensor([0.0465, 0.2578, 0.2653, 0.1858, 0.0679, 0.2364, 0.1310, 0.0300], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0345, 0.0356, 0.0325, 0.0352, 0.0332, 0.0346, 0.0365], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:49:07,677 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 00:49:10,076 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 00:49:15,251 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-02 00:49:18,565 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:27,264 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:31,061 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 00:49:43,063 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:49:43,762 INFO [train.py:903] (3/4) Epoch 13, batch 4800, loss[loss=0.243, simple_loss=0.311, pruned_loss=0.08749, over 19681.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3016, pruned_loss=0.07534, over 3826201.52 frames. ], batch size: 53, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:50:12,867 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5040, 2.1801, 2.1860, 2.5800, 2.6048, 2.1178, 2.1753, 2.4495], + device='cuda:3'), covar=tensor([0.0878, 0.1712, 0.1301, 0.0937, 0.1138, 0.0503, 0.1134, 0.0643], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0348, 0.0293, 0.0241, 0.0294, 0.0240, 0.0282, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:50:12,875 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:41,010 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.270e+02 6.552e+02 8.488e+02 1.715e+03, threshold=1.310e+03, percent-clipped=7.0 +2023-04-02 00:50:45,052 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:50:45,824 INFO [train.py:903] (3/4) Epoch 13, batch 4850, loss[loss=0.2085, simple_loss=0.2895, pruned_loss=0.06369, over 19617.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3014, pruned_loss=0.07525, over 3821754.57 frames. ], batch size: 57, lr: 6.35e-03, grad_scale: 8.0 +2023-04-02 00:51:06,435 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:06,677 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:08,605 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 00:51:14,209 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:18,126 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:28,824 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 00:51:31,671 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 00:51:37,279 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 00:51:37,323 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 00:51:38,821 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:45,318 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86832.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:51:47,410 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 00:51:49,795 INFO [train.py:903] (3/4) Epoch 13, batch 4900, loss[loss=0.1935, simple_loss=0.2704, pruned_loss=0.05831, over 19354.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3018, pruned_loss=0.07542, over 3816010.23 frames. ], batch size: 48, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:51:59,426 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1259, 1.8091, 1.7067, 2.6174, 1.8475, 2.2675, 2.5440, 2.2243], + device='cuda:3'), covar=tensor([0.0773, 0.0961, 0.1117, 0.0949, 0.0989, 0.0776, 0.0913, 0.0642], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0224, 0.0224, 0.0244, 0.0230, 0.0211, 0.0192, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 00:52:07,082 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 00:52:46,044 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.083e+02 5.393e+02 6.845e+02 8.725e+02 1.892e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 00:52:50,738 INFO [train.py:903] (3/4) Epoch 13, batch 4950, loss[loss=0.2741, simple_loss=0.3471, pruned_loss=0.1005, over 19356.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3023, pruned_loss=0.07587, over 3804914.09 frames. ], batch size: 70, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:53:05,783 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86896.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:08,070 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 00:53:31,827 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:32,649 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 00:53:37,700 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:53:56,223 INFO [train.py:903] (3/4) Epoch 13, batch 5000, loss[loss=0.2464, simple_loss=0.3178, pruned_loss=0.08746, over 19607.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3016, pruned_loss=0.07529, over 3806478.81 frames. ], batch size: 57, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:54:03,378 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 00:54:03,558 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:14,008 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 00:54:49,343 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:54:52,328 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.998e+02 5.650e+02 6.679e+02 8.615e+02 1.943e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 00:54:56,919 INFO [train.py:903] (3/4) Epoch 13, batch 5050, loss[loss=0.2357, simple_loss=0.3235, pruned_loss=0.07399, over 18638.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3021, pruned_loss=0.07544, over 3809929.26 frames. ], batch size: 74, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:55:19,921 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87004.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:55:33,341 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 00:55:38,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5296, 1.2407, 1.1707, 1.4540, 1.1682, 1.3355, 1.2177, 1.3777], + device='cuda:3'), covar=tensor([0.1051, 0.1205, 0.1471, 0.0925, 0.1137, 0.0575, 0.1313, 0.0775], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0349, 0.0292, 0.0239, 0.0295, 0.0241, 0.0281, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 00:55:41,790 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87020.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 00:56:01,009 INFO [train.py:903] (3/4) Epoch 13, batch 5100, loss[loss=0.2496, simple_loss=0.3175, pruned_loss=0.09087, over 18792.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.302, pruned_loss=0.07543, over 3812157.64 frames. ], batch size: 74, lr: 6.34e-03, grad_scale: 8.0 +2023-04-02 00:56:07,909 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 00:56:11,370 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 00:56:18,087 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 00:56:28,907 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:56:58,519 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 5.089e+02 6.519e+02 8.713e+02 1.677e+03, threshold=1.304e+03, percent-clipped=2.0 +2023-04-02 00:57:03,281 INFO [train.py:903] (3/4) Epoch 13, batch 5150, loss[loss=0.2191, simple_loss=0.306, pruned_loss=0.06608, over 19658.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3024, pruned_loss=0.07586, over 3818110.07 frames. ], batch size: 60, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:57:15,160 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 00:57:46,783 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9572, 2.0065, 2.3088, 2.6211, 1.8584, 2.4657, 2.5101, 2.1472], + device='cuda:3'), covar=tensor([0.3813, 0.3397, 0.1505, 0.1979, 0.3725, 0.1787, 0.3620, 0.2767], + device='cuda:3'), in_proj_covar=tensor([0.0815, 0.0844, 0.0661, 0.0894, 0.0795, 0.0728, 0.0796, 0.0728], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:57:48,534 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 00:58:07,739 INFO [train.py:903] (3/4) Epoch 13, batch 5200, loss[loss=0.238, simple_loss=0.3193, pruned_loss=0.07837, over 17568.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3022, pruned_loss=0.07628, over 3821783.57 frames. ], batch size: 101, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 00:58:18,406 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 00:58:36,891 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5772, 4.0081, 4.2137, 4.2036, 1.5809, 3.9363, 3.4481, 3.8912], + device='cuda:3'), covar=tensor([0.1448, 0.0869, 0.0571, 0.0618, 0.5490, 0.0831, 0.0667, 0.1054], + device='cuda:3'), in_proj_covar=tensor([0.0705, 0.0622, 0.0829, 0.0704, 0.0748, 0.0578, 0.0499, 0.0768], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 00:58:54,672 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87173.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:58:58,886 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87176.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:01,499 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:04,669 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 00:59:05,686 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.198e+02 5.751e+02 6.794e+02 9.141e+02 2.147e+03, threshold=1.359e+03, percent-clipped=10.0 +2023-04-02 00:59:10,443 INFO [train.py:903] (3/4) Epoch 13, batch 5250, loss[loss=0.2321, simple_loss=0.3199, pruned_loss=0.0722, over 19670.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3014, pruned_loss=0.07557, over 3831219.04 frames. ], batch size: 60, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 00:59:25,776 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87198.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:30,538 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8295, 1.9484, 2.1275, 2.5920, 1.8174, 2.4450, 2.3444, 1.9606], + device='cuda:3'), covar=tensor([0.3912, 0.3050, 0.1606, 0.1979, 0.3382, 0.1678, 0.3744, 0.2902], + device='cuda:3'), in_proj_covar=tensor([0.0814, 0.0843, 0.0661, 0.0896, 0.0797, 0.0728, 0.0796, 0.0726], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 00:59:31,629 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 00:59:39,976 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 01:00:08,204 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0062, 4.4511, 4.7493, 4.7530, 1.7882, 4.4626, 3.8818, 4.4016], + device='cuda:3'), covar=tensor([0.1396, 0.0757, 0.0553, 0.0558, 0.5329, 0.0681, 0.0649, 0.1097], + device='cuda:3'), in_proj_covar=tensor([0.0718, 0.0634, 0.0845, 0.0720, 0.0763, 0.0590, 0.0509, 0.0781], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 01:00:13,791 INFO [train.py:903] (3/4) Epoch 13, batch 5300, loss[loss=0.1865, simple_loss=0.2634, pruned_loss=0.05485, over 19414.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3022, pruned_loss=0.07584, over 3837121.87 frames. ], batch size: 48, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:00:18,367 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87240.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:27,871 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87248.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:00:28,732 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 01:01:10,975 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.140e+02 5.034e+02 6.226e+02 7.840e+02 1.929e+03, threshold=1.245e+03, percent-clipped=3.0 +2023-04-02 01:01:15,806 INFO [train.py:903] (3/4) Epoch 13, batch 5350, loss[loss=0.2187, simple_loss=0.2993, pruned_loss=0.06902, over 19438.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3021, pruned_loss=0.07589, over 3835853.95 frames. ], batch size: 70, lr: 6.33e-03, grad_scale: 16.0 +2023-04-02 01:01:24,120 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:01:49,519 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 01:01:51,106 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:08,494 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8769, 1.2896, 1.6131, 0.5165, 1.9433, 2.4331, 2.0665, 2.5670], + device='cuda:3'), covar=tensor([0.1536, 0.3347, 0.2972, 0.2436, 0.0555, 0.0238, 0.0343, 0.0305], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0303, 0.0333, 0.0253, 0.0224, 0.0164, 0.0209, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:02:21,294 INFO [train.py:903] (3/4) Epoch 13, batch 5400, loss[loss=0.2568, simple_loss=0.3225, pruned_loss=0.09552, over 19653.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3027, pruned_loss=0.07612, over 3840498.87 frames. ], batch size: 60, lr: 6.33e-03, grad_scale: 8.0 +2023-04-02 01:02:24,101 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:44,281 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87355.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:02:52,487 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-02 01:02:55,501 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87364.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:03:01,413 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9252, 5.3238, 2.8319, 4.5653, 1.0033, 5.3232, 5.3439, 5.4422], + device='cuda:3'), covar=tensor([0.0441, 0.0841, 0.1943, 0.0672, 0.4156, 0.0563, 0.0625, 0.0914], + device='cuda:3'), in_proj_covar=tensor([0.0447, 0.0372, 0.0450, 0.0322, 0.0384, 0.0383, 0.0372, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:03:16,676 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87380.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:03:19,989 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.794e+02 4.952e+02 6.479e+02 7.900e+02 1.578e+03, threshold=1.296e+03, percent-clipped=3.0 +2023-04-02 01:03:23,446 INFO [train.py:903] (3/4) Epoch 13, batch 5450, loss[loss=0.2395, simple_loss=0.3166, pruned_loss=0.08117, over 19431.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3026, pruned_loss=0.07598, over 3839502.82 frames. ], batch size: 64, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:25,744 INFO [train.py:903] (3/4) Epoch 13, batch 5500, loss[loss=0.21, simple_loss=0.2974, pruned_loss=0.06132, over 19587.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3035, pruned_loss=0.07656, over 3832884.97 frames. ], batch size: 61, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:04:47,842 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 01:05:21,386 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:05:21,453 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87479.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:05:25,839 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 5.041e+02 6.443e+02 8.590e+02 2.643e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 01:05:29,325 INFO [train.py:903] (3/4) Epoch 13, batch 5550, loss[loss=0.2193, simple_loss=0.2906, pruned_loss=0.07398, over 19739.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3038, pruned_loss=0.07659, over 3821056.31 frames. ], batch size: 51, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:05:31,797 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0076, 4.3929, 4.7162, 4.7094, 1.6430, 4.3857, 3.7785, 4.3402], + device='cuda:3'), covar=tensor([0.1484, 0.0832, 0.0589, 0.0629, 0.5773, 0.0765, 0.0678, 0.1195], + device='cuda:3'), in_proj_covar=tensor([0.0709, 0.0630, 0.0837, 0.0716, 0.0752, 0.0582, 0.0504, 0.0773], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 01:05:33,837 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 01:05:58,148 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4915, 1.2034, 1.5076, 1.7464, 3.7775, 1.1008, 2.4325, 4.0874], + device='cuda:3'), covar=tensor([0.0624, 0.3738, 0.3619, 0.2286, 0.1210, 0.3282, 0.1661, 0.0405], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0345, 0.0356, 0.0324, 0.0352, 0.0333, 0.0343, 0.0366], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:06:12,095 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0159, 1.6847, 1.9473, 1.8343, 4.5563, 0.9990, 2.5483, 4.9065], + device='cuda:3'), covar=tensor([0.0347, 0.2469, 0.2380, 0.1723, 0.0673, 0.2592, 0.1266, 0.0169], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0345, 0.0355, 0.0324, 0.0352, 0.0332, 0.0343, 0.0365], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:06:16,486 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:06:23,056 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 01:06:33,784 INFO [train.py:903] (3/4) Epoch 13, batch 5600, loss[loss=0.2208, simple_loss=0.3037, pruned_loss=0.06891, over 19505.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3029, pruned_loss=0.07598, over 3815594.09 frames. ], batch size: 64, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:06:47,659 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:06:50,180 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-02 01:07:17,493 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87572.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:32,968 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 5.023e+02 6.222e+02 8.051e+02 1.328e+03, threshold=1.244e+03, percent-clipped=2.0 +2023-04-02 01:07:36,347 INFO [train.py:903] (3/4) Epoch 13, batch 5650, loss[loss=0.2003, simple_loss=0.2865, pruned_loss=0.05707, over 19596.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3015, pruned_loss=0.07525, over 3826803.89 frames. ], batch size: 61, lr: 6.32e-03, grad_scale: 8.0 +2023-04-02 01:07:38,457 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 01:07:39,087 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2638, 1.9836, 1.5040, 1.3132, 1.8181, 1.1713, 1.2413, 1.7418], + device='cuda:3'), covar=tensor([0.0817, 0.0669, 0.0999, 0.0695, 0.0472, 0.1180, 0.0609, 0.0387], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0305, 0.0330, 0.0251, 0.0241, 0.0324, 0.0292, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:07:43,448 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:07:56,302 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:01,807 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:06,355 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:23,603 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:24,453 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 01:08:38,519 INFO [train.py:903] (3/4) Epoch 13, batch 5700, loss[loss=0.2476, simple_loss=0.321, pruned_loss=0.08709, over 19518.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2996, pruned_loss=0.07415, over 3828995.24 frames. ], batch size: 54, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:08:38,923 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:08:53,290 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 01:09:36,750 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 4.997e+02 6.690e+02 8.841e+02 1.921e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 01:09:40,128 INFO [train.py:903] (3/4) Epoch 13, batch 5750, loss[loss=0.1918, simple_loss=0.2726, pruned_loss=0.05549, over 19676.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.301, pruned_loss=0.07501, over 3829284.07 frames. ], batch size: 53, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:09:41,281 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 01:09:49,518 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 01:09:55,222 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 01:10:07,764 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87707.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:27,882 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87724.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:10:37,353 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2481, 2.0502, 1.6017, 1.2668, 1.8981, 1.1120, 1.2244, 1.7987], + device='cuda:3'), covar=tensor([0.0817, 0.0691, 0.1005, 0.0785, 0.0434, 0.1309, 0.0634, 0.0373], + device='cuda:3'), in_proj_covar=tensor([0.0295, 0.0307, 0.0332, 0.0252, 0.0241, 0.0326, 0.0294, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:10:41,880 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87735.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:10:42,502 INFO [train.py:903] (3/4) Epoch 13, batch 5800, loss[loss=0.2805, simple_loss=0.3378, pruned_loss=0.1115, over 19622.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3005, pruned_loss=0.07521, over 3822444.54 frames. ], batch size: 50, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:10:46,879 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 01:11:07,373 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.67 vs. limit=5.0 +2023-04-02 01:11:13,897 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87760.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:11:27,838 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0396, 4.3885, 4.7439, 4.7489, 1.6650, 4.4559, 3.8487, 4.4292], + device='cuda:3'), covar=tensor([0.1463, 0.0771, 0.0548, 0.0543, 0.5184, 0.0640, 0.0598, 0.1049], + device='cuda:3'), in_proj_covar=tensor([0.0703, 0.0625, 0.0836, 0.0714, 0.0745, 0.0581, 0.0502, 0.0765], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 01:11:43,246 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 5.041e+02 6.242e+02 7.928e+02 1.581e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 01:11:46,964 INFO [train.py:903] (3/4) Epoch 13, batch 5850, loss[loss=0.2088, simple_loss=0.2793, pruned_loss=0.0691, over 19782.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3012, pruned_loss=0.07576, over 3807523.51 frames. ], batch size: 47, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:18,374 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2356, 2.0219, 1.8351, 2.5191, 1.8289, 2.6411, 2.5584, 2.2362], + device='cuda:3'), covar=tensor([0.0774, 0.0814, 0.1011, 0.0928, 0.0909, 0.0680, 0.0812, 0.0620], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0223, 0.0224, 0.0242, 0.0229, 0.0211, 0.0191, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 01:12:33,216 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87823.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:12:47,982 INFO [train.py:903] (3/4) Epoch 13, batch 5900, loss[loss=0.235, simple_loss=0.3042, pruned_loss=0.0829, over 19679.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3, pruned_loss=0.07493, over 3816859.12 frames. ], batch size: 53, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:12:49,039 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 01:12:51,599 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:03,845 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9166, 1.8277, 1.6962, 2.0546, 1.8314, 1.6848, 1.7090, 1.9086], + device='cuda:3'), covar=tensor([0.0843, 0.1270, 0.1257, 0.0814, 0.1061, 0.0555, 0.1108, 0.0654], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0353, 0.0296, 0.0239, 0.0295, 0.0243, 0.0284, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:13:10,521 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 01:13:27,552 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:13:47,009 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 5.447e+02 6.662e+02 8.353e+02 2.279e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 01:13:50,411 INFO [train.py:903] (3/4) Epoch 13, batch 5950, loss[loss=0.2507, simple_loss=0.3205, pruned_loss=0.09051, over 19585.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3001, pruned_loss=0.07499, over 3811465.18 frames. ], batch size: 52, lr: 6.31e-03, grad_scale: 8.0 +2023-04-02 01:14:30,803 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6903, 1.7699, 1.4011, 1.7904, 1.6791, 1.3953, 1.5892, 1.7091], + device='cuda:3'), covar=tensor([0.1147, 0.1585, 0.1809, 0.1186, 0.1471, 0.0965, 0.1565, 0.0983], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0352, 0.0295, 0.0239, 0.0295, 0.0242, 0.0284, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:14:53,069 INFO [train.py:903] (3/4) Epoch 13, batch 6000, loss[loss=0.2804, simple_loss=0.3449, pruned_loss=0.108, over 13357.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3007, pruned_loss=0.07533, over 3803280.67 frames. ], batch size: 135, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:14:53,069 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 01:15:06,406 INFO [train.py:937] (3/4) Epoch 13, validation: loss=0.175, simple_loss=0.2755, pruned_loss=0.03726, over 944034.00 frames. +2023-04-02 01:15:06,407 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 01:15:09,205 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87938.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:19,419 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:26,087 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:41,054 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:15:45,514 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:04,017 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:16:04,765 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.142e+02 6.649e+02 8.424e+02 2.027e+03, threshold=1.330e+03, percent-clipped=9.0 +2023-04-02 01:16:08,329 INFO [train.py:903] (3/4) Epoch 13, batch 6050, loss[loss=0.1928, simple_loss=0.2691, pruned_loss=0.05822, over 19468.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3017, pruned_loss=0.07606, over 3804371.45 frames. ], batch size: 49, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:16:12,361 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:12,303 INFO [train.py:903] (3/4) Epoch 13, batch 6100, loss[loss=0.2481, simple_loss=0.3311, pruned_loss=0.08257, over 19340.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3018, pruned_loss=0.07623, over 3808798.55 frames. ], batch size: 66, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:17:42,751 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:17:49,462 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:10,011 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:10,751 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.311e+02 6.775e+02 8.712e+02 1.953e+03, threshold=1.355e+03, percent-clipped=3.0 +2023-04-02 01:18:14,183 INFO [train.py:903] (3/4) Epoch 13, batch 6150, loss[loss=0.26, simple_loss=0.3318, pruned_loss=0.09408, over 19569.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3019, pruned_loss=0.07642, over 3798487.97 frames. ], batch size: 61, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:18:21,018 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-02 01:18:24,932 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:38,392 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 01:18:54,547 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88118.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:18:56,839 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:19:15,199 INFO [train.py:903] (3/4) Epoch 13, batch 6200, loss[loss=0.2414, simple_loss=0.3156, pruned_loss=0.08366, over 19660.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3025, pruned_loss=0.07687, over 3794215.50 frames. ], batch size: 58, lr: 6.30e-03, grad_scale: 8.0 +2023-04-02 01:19:45,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3415, 1.3937, 1.4573, 1.4709, 1.7400, 1.8665, 1.7074, 0.5517], + device='cuda:3'), covar=tensor([0.2108, 0.3784, 0.2324, 0.1768, 0.1519, 0.2036, 0.1375, 0.3981], + device='cuda:3'), in_proj_covar=tensor([0.0501, 0.0591, 0.0630, 0.0447, 0.0604, 0.0498, 0.0649, 0.0507], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 01:20:13,333 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.409e+02 5.304e+02 6.722e+02 8.627e+02 2.252e+03, threshold=1.344e+03, percent-clipped=3.0 +2023-04-02 01:20:16,891 INFO [train.py:903] (3/4) Epoch 13, batch 6250, loss[loss=0.2225, simple_loss=0.3054, pruned_loss=0.06981, over 19674.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3018, pruned_loss=0.0761, over 3811257.18 frames. ], batch size: 58, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:20:29,072 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88194.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:20:44,711 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 01:20:53,433 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4070, 1.3392, 1.4089, 1.4784, 2.9748, 1.0187, 2.3068, 3.3367], + device='cuda:3'), covar=tensor([0.0476, 0.2602, 0.2873, 0.1809, 0.0730, 0.2644, 0.1234, 0.0280], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0344, 0.0356, 0.0324, 0.0350, 0.0332, 0.0341, 0.0365], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:20:58,211 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:03,624 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4465, 1.4954, 1.7266, 1.6271, 2.6738, 2.2395, 2.7009, 1.2227], + device='cuda:3'), covar=tensor([0.2258, 0.4067, 0.2642, 0.1816, 0.1394, 0.1989, 0.1471, 0.3827], + device='cuda:3'), in_proj_covar=tensor([0.0503, 0.0595, 0.0633, 0.0450, 0.0606, 0.0501, 0.0652, 0.0508], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 01:21:20,739 INFO [train.py:903] (3/4) Epoch 13, batch 6300, loss[loss=0.2157, simple_loss=0.2838, pruned_loss=0.07376, over 19779.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3013, pruned_loss=0.07619, over 3814508.10 frames. ], batch size: 47, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:21:23,582 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:21:52,953 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:22:18,335 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.052e+02 6.199e+02 7.712e+02 1.989e+03, threshold=1.240e+03, percent-clipped=5.0 +2023-04-02 01:22:21,899 INFO [train.py:903] (3/4) Epoch 13, batch 6350, loss[loss=0.2091, simple_loss=0.292, pruned_loss=0.06313, over 19615.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3006, pruned_loss=0.07582, over 3811222.21 frames. ], batch size: 57, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:22:50,135 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88309.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:23:02,557 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:08,251 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:24,611 INFO [train.py:903] (3/4) Epoch 13, batch 6400, loss[loss=0.207, simple_loss=0.2983, pruned_loss=0.05783, over 19537.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3, pruned_loss=0.07528, over 3810030.45 frames. ], batch size: 56, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:23:27,210 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:32,584 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:38,245 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:58,508 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:23:59,590 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9006, 1.1802, 1.5935, 0.6160, 2.0872, 2.4790, 2.1589, 2.5606], + device='cuda:3'), covar=tensor([0.1509, 0.3529, 0.2920, 0.2373, 0.0527, 0.0239, 0.0334, 0.0314], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0302, 0.0331, 0.0252, 0.0220, 0.0165, 0.0207, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:24:22,192 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.590e+02 5.639e+02 6.967e+02 8.699e+02 1.659e+03, threshold=1.393e+03, percent-clipped=7.0 +2023-04-02 01:24:25,788 INFO [train.py:903] (3/4) Epoch 13, batch 6450, loss[loss=0.2121, simple_loss=0.2997, pruned_loss=0.06229, over 18713.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3001, pruned_loss=0.07507, over 3814728.75 frames. ], batch size: 74, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:24:57,403 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9035, 1.2596, 1.5294, 0.5882, 2.0448, 2.4274, 2.1385, 2.5942], + device='cuda:3'), covar=tensor([0.1574, 0.3510, 0.3188, 0.2489, 0.0556, 0.0282, 0.0341, 0.0314], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0302, 0.0331, 0.0251, 0.0220, 0.0164, 0.0207, 0.0216], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:25:05,105 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 01:25:28,931 INFO [train.py:903] (3/4) Epoch 13, batch 6500, loss[loss=0.2149, simple_loss=0.3004, pruned_loss=0.06466, over 19717.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3013, pruned_loss=0.07563, over 3814297.38 frames. ], batch size: 59, lr: 6.29e-03, grad_scale: 8.0 +2023-04-02 01:25:30,963 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 01:25:41,356 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 01:26:00,497 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:26:26,785 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.125e+02 5.190e+02 6.145e+02 7.751e+02 1.614e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 01:26:30,118 INFO [train.py:903] (3/4) Epoch 13, batch 6550, loss[loss=0.2222, simple_loss=0.2994, pruned_loss=0.07251, over 19303.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3002, pruned_loss=0.075, over 3826792.57 frames. ], batch size: 44, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:27:31,538 INFO [train.py:903] (3/4) Epoch 13, batch 6600, loss[loss=0.2272, simple_loss=0.3116, pruned_loss=0.07143, over 19104.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2993, pruned_loss=0.07448, over 3818581.63 frames. ], batch size: 69, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:28:21,368 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:28:28,438 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 5.175e+02 6.632e+02 9.239e+02 1.861e+03, threshold=1.326e+03, percent-clipped=7.0 +2023-04-02 01:28:32,110 INFO [train.py:903] (3/4) Epoch 13, batch 6650, loss[loss=0.1897, simple_loss=0.2797, pruned_loss=0.04987, over 19613.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3014, pruned_loss=0.07592, over 3827603.07 frames. ], batch size: 57, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:34,641 INFO [train.py:903] (3/4) Epoch 13, batch 6700, loss[loss=0.2114, simple_loss=0.2925, pruned_loss=0.06517, over 19667.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3016, pruned_loss=0.07583, over 3826097.85 frames. ], batch size: 60, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:29:47,546 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1991, 1.1167, 1.7212, 0.8970, 2.3404, 3.0476, 2.7888, 3.2202], + device='cuda:3'), covar=tensor([0.1547, 0.3715, 0.3097, 0.2438, 0.0570, 0.0203, 0.0241, 0.0263], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0300, 0.0330, 0.0251, 0.0220, 0.0164, 0.0207, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:29:55,535 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88653.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:30:29,674 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.623e+02 5.222e+02 6.682e+02 8.627e+02 1.913e+03, threshold=1.336e+03, percent-clipped=6.0 +2023-04-02 01:30:33,148 INFO [train.py:903] (3/4) Epoch 13, batch 6750, loss[loss=0.2545, simple_loss=0.3277, pruned_loss=0.09059, over 19678.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3017, pruned_loss=0.07601, over 3834859.73 frames. ], batch size: 59, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:31:31,214 INFO [train.py:903] (3/4) Epoch 13, batch 6800, loss[loss=0.1736, simple_loss=0.2475, pruned_loss=0.04988, over 19772.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3027, pruned_loss=0.07645, over 3836425.76 frames. ], batch size: 47, lr: 6.28e-03, grad_scale: 8.0 +2023-04-02 01:32:17,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 01:32:18,136 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 01:32:21,842 INFO [train.py:903] (3/4) Epoch 14, batch 0, loss[loss=0.2774, simple_loss=0.3277, pruned_loss=0.1135, over 18658.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3277, pruned_loss=0.1135, over 18658.00 frames. ], batch size: 41, lr: 6.05e-03, grad_scale: 8.0 +2023-04-02 01:32:21,843 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 01:32:33,649 INFO [train.py:937] (3/4) Epoch 14, validation: loss=0.1763, simple_loss=0.2772, pruned_loss=0.03773, over 944034.00 frames. +2023-04-02 01:32:33,651 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 01:32:41,833 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88768.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:32:49,771 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 01:32:59,169 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.998e+02 6.377e+02 7.989e+02 1.719e+03, threshold=1.275e+03, percent-clipped=2.0 +2023-04-02 01:32:59,657 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2711, 2.1049, 1.8370, 1.7135, 1.4816, 1.6932, 0.4047, 1.0699], + device='cuda:3'), covar=tensor([0.0454, 0.0500, 0.0433, 0.0653, 0.1080, 0.0742, 0.1129, 0.0887], + device='cuda:3'), in_proj_covar=tensor([0.0340, 0.0336, 0.0331, 0.0357, 0.0432, 0.0357, 0.0315, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:33:40,052 INFO [train.py:903] (3/4) Epoch 14, batch 50, loss[loss=0.2404, simple_loss=0.3171, pruned_loss=0.08183, over 18188.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2951, pruned_loss=0.0704, over 875453.65 frames. ], batch size: 83, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:01,734 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:04,147 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3848, 2.0223, 1.6779, 1.1696, 2.0683, 1.0946, 1.3226, 1.9142], + device='cuda:3'), covar=tensor([0.0770, 0.0664, 0.0856, 0.0826, 0.0380, 0.1186, 0.0616, 0.0342], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0308, 0.0329, 0.0251, 0.0241, 0.0326, 0.0298, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:34:15,378 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 01:34:34,167 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:34:40,591 INFO [train.py:903] (3/4) Epoch 14, batch 100, loss[loss=0.215, simple_loss=0.2974, pruned_loss=0.06626, over 19607.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3005, pruned_loss=0.07348, over 1530213.02 frames. ], batch size: 57, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:34:51,080 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 01:35:02,786 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.271e+02 6.621e+02 8.700e+02 2.391e+03, threshold=1.324e+03, percent-clipped=10.0 +2023-04-02 01:35:41,059 INFO [train.py:903] (3/4) Epoch 14, batch 150, loss[loss=0.2444, simple_loss=0.3126, pruned_loss=0.08815, over 19508.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3024, pruned_loss=0.07586, over 2023137.08 frames. ], batch size: 64, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:35:53,914 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4107, 1.2847, 1.3196, 1.7251, 1.2916, 1.6152, 1.6425, 1.5160], + device='cuda:3'), covar=tensor([0.0844, 0.0981, 0.1078, 0.0750, 0.0855, 0.0782, 0.0888, 0.0693], + device='cuda:3'), in_proj_covar=tensor([0.0208, 0.0223, 0.0225, 0.0244, 0.0228, 0.0210, 0.0193, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 01:36:00,305 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 01:36:29,599 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0504, 1.1490, 1.4539, 0.6481, 1.9154, 2.2190, 1.9238, 2.2639], + device='cuda:3'), covar=tensor([0.1410, 0.3320, 0.2984, 0.2460, 0.0665, 0.0365, 0.0396, 0.0389], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0301, 0.0331, 0.0251, 0.0220, 0.0165, 0.0207, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:36:38,762 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 01:36:39,937 INFO [train.py:903] (3/4) Epoch 14, batch 200, loss[loss=0.176, simple_loss=0.2527, pruned_loss=0.04966, over 18718.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3035, pruned_loss=0.07614, over 2423476.62 frames. ], batch size: 41, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:03,953 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.747e+02 5.170e+02 6.545e+02 8.695e+02 1.666e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 01:37:37,778 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7925, 4.2099, 4.4289, 4.4274, 1.6315, 4.1193, 3.5190, 4.1017], + device='cuda:3'), covar=tensor([0.1441, 0.0743, 0.0641, 0.0655, 0.5269, 0.0706, 0.0679, 0.1266], + device='cuda:3'), in_proj_covar=tensor([0.0707, 0.0631, 0.0835, 0.0715, 0.0755, 0.0583, 0.0503, 0.0765], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 01:37:41,112 INFO [train.py:903] (3/4) Epoch 14, batch 250, loss[loss=0.2207, simple_loss=0.2963, pruned_loss=0.07261, over 19871.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3025, pruned_loss=0.07577, over 2731796.67 frames. ], batch size: 52, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:37:53,623 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89024.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:38:17,672 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:38:22,455 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89049.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 01:38:43,482 INFO [train.py:903] (3/4) Epoch 14, batch 300, loss[loss=0.1923, simple_loss=0.2671, pruned_loss=0.05878, over 19417.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3022, pruned_loss=0.07521, over 2979031.44 frames. ], batch size: 48, lr: 6.04e-03, grad_scale: 8.0 +2023-04-02 01:38:52,798 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9366, 5.0344, 5.8755, 5.7853, 1.9393, 5.4604, 4.6230, 5.4714], + device='cuda:3'), covar=tensor([0.1388, 0.0766, 0.0461, 0.0539, 0.5252, 0.0629, 0.0550, 0.0949], + device='cuda:3'), in_proj_covar=tensor([0.0707, 0.0630, 0.0836, 0.0715, 0.0753, 0.0583, 0.0504, 0.0764], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 01:39:05,433 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.830e+02 5.923e+02 7.544e+02 2.111e+03, threshold=1.185e+03, percent-clipped=1.0 +2023-04-02 01:39:45,150 INFO [train.py:903] (3/4) Epoch 14, batch 350, loss[loss=0.2134, simple_loss=0.2907, pruned_loss=0.06804, over 19761.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3019, pruned_loss=0.07524, over 3171945.82 frames. ], batch size: 54, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:39:47,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 01:40:29,935 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9312, 2.0029, 2.1735, 2.7897, 1.8727, 2.5494, 2.3721, 1.9720], + device='cuda:3'), covar=tensor([0.3867, 0.3549, 0.1649, 0.1888, 0.3863, 0.1691, 0.3867, 0.2936], + device='cuda:3'), in_proj_covar=tensor([0.0824, 0.0859, 0.0666, 0.0908, 0.0807, 0.0738, 0.0811, 0.0728], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 01:40:46,829 INFO [train.py:903] (3/4) Epoch 14, batch 400, loss[loss=0.2023, simple_loss=0.2908, pruned_loss=0.05685, over 18197.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3016, pruned_loss=0.07489, over 3314809.11 frames. ], batch size: 84, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:41:11,934 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.893e+02 5.942e+02 7.582e+02 1.529e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-02 01:41:16,767 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:41:36,314 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4515, 1.1882, 1.1181, 1.3305, 1.0362, 1.2629, 1.1313, 1.3214], + device='cuda:3'), covar=tensor([0.0971, 0.1225, 0.1439, 0.0870, 0.1175, 0.0576, 0.1302, 0.0731], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0353, 0.0297, 0.0240, 0.0298, 0.0243, 0.0285, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:41:47,619 INFO [train.py:903] (3/4) Epoch 14, batch 450, loss[loss=0.2724, simple_loss=0.3355, pruned_loss=0.1046, over 19583.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3017, pruned_loss=0.07481, over 3434119.54 frames. ], batch size: 61, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:42:19,964 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 01:42:20,930 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 01:42:51,706 INFO [train.py:903] (3/4) Epoch 14, batch 500, loss[loss=0.2133, simple_loss=0.2873, pruned_loss=0.06965, over 19406.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3019, pruned_loss=0.0753, over 3536322.78 frames. ], batch size: 48, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:43:04,782 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7432, 1.7110, 1.4460, 1.7640, 1.6969, 1.4240, 1.3939, 1.6727], + device='cuda:3'), covar=tensor([0.1037, 0.1326, 0.1508, 0.0947, 0.1136, 0.0701, 0.1431, 0.0765], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0350, 0.0295, 0.0239, 0.0295, 0.0241, 0.0285, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:43:13,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.086e+02 6.501e+02 8.394e+02 1.936e+03, threshold=1.300e+03, percent-clipped=6.0 +2023-04-02 01:43:51,223 INFO [train.py:903] (3/4) Epoch 14, batch 550, loss[loss=0.2593, simple_loss=0.3214, pruned_loss=0.09867, over 13083.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3016, pruned_loss=0.07488, over 3601448.24 frames. ], batch size: 137, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:44:05,921 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 01:44:50,890 INFO [train.py:903] (3/4) Epoch 14, batch 600, loss[loss=0.2236, simple_loss=0.3056, pruned_loss=0.07079, over 19662.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3022, pruned_loss=0.07509, over 3650208.88 frames. ], batch size: 55, lr: 6.03e-03, grad_scale: 8.0 +2023-04-02 01:45:14,768 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.733e+02 5.437e+02 6.463e+02 8.394e+02 1.645e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 01:45:23,674 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:27,406 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:45:32,884 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 01:45:52,144 INFO [train.py:903] (3/4) Epoch 14, batch 650, loss[loss=0.1974, simple_loss=0.27, pruned_loss=0.06236, over 19319.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.302, pruned_loss=0.07529, over 3683130.81 frames. ], batch size: 44, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:46:29,575 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89443.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:46:45,786 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89457.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:46:53,907 INFO [train.py:903] (3/4) Epoch 14, batch 700, loss[loss=0.2229, simple_loss=0.2883, pruned_loss=0.07875, over 19748.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3013, pruned_loss=0.07501, over 3718749.50 frames. ], batch size: 47, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:47:21,077 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.092e+02 5.132e+02 6.716e+02 8.076e+02 1.293e+03, threshold=1.343e+03, percent-clipped=1.0 +2023-04-02 01:47:44,510 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:47:59,822 INFO [train.py:903] (3/4) Epoch 14, batch 750, loss[loss=0.2245, simple_loss=0.3008, pruned_loss=0.07408, over 19532.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3007, pruned_loss=0.07464, over 3744619.21 frames. ], batch size: 54, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:48:14,756 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:48:19,171 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:48:30,293 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4988, 2.2123, 2.1982, 2.6867, 2.3694, 2.3993, 2.0403, 2.7078], + device='cuda:3'), covar=tensor([0.0906, 0.1635, 0.1321, 0.1005, 0.1346, 0.0435, 0.1230, 0.0588], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0350, 0.0294, 0.0239, 0.0294, 0.0241, 0.0284, 0.0240], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:48:48,362 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 01:49:01,551 INFO [train.py:903] (3/4) Epoch 14, batch 800, loss[loss=0.2158, simple_loss=0.2999, pruned_loss=0.0659, over 19686.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3018, pruned_loss=0.07527, over 3757410.13 frames. ], batch size: 59, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:49:16,442 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 01:49:24,442 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 5.343e+02 6.523e+02 8.164e+02 1.688e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 01:50:01,792 INFO [train.py:903] (3/4) Epoch 14, batch 850, loss[loss=0.2712, simple_loss=0.3399, pruned_loss=0.1012, over 19672.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3024, pruned_loss=0.07547, over 3779765.35 frames. ], batch size: 55, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:50:38,079 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3013, 2.0444, 1.5360, 1.1662, 2.0154, 1.1244, 1.1257, 1.8062], + device='cuda:3'), covar=tensor([0.0968, 0.0738, 0.1016, 0.0956, 0.0447, 0.1274, 0.0769, 0.0446], + device='cuda:3'), in_proj_covar=tensor([0.0295, 0.0307, 0.0328, 0.0250, 0.0240, 0.0326, 0.0297, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:50:42,937 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:50:55,560 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 01:51:03,554 INFO [train.py:903] (3/4) Epoch 14, batch 900, loss[loss=0.2209, simple_loss=0.2909, pruned_loss=0.07545, over 19115.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3022, pruned_loss=0.07557, over 3792416.44 frames. ], batch size: 42, lr: 6.02e-03, grad_scale: 8.0 +2023-04-02 01:51:29,677 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 4.983e+02 6.131e+02 7.467e+02 1.791e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 01:51:46,412 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:52:07,438 INFO [train.py:903] (3/4) Epoch 14, batch 950, loss[loss=0.2488, simple_loss=0.3269, pruned_loss=0.08535, over 19515.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3031, pruned_loss=0.07614, over 3799097.96 frames. ], batch size: 56, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:52:07,496 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 01:52:33,961 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:06,438 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:10,802 INFO [train.py:903] (3/4) Epoch 14, batch 1000, loss[loss=0.249, simple_loss=0.3186, pruned_loss=0.08966, over 19534.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3009, pruned_loss=0.07514, over 3814504.45 frames. ], batch size: 54, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:53:34,652 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.523e+02 5.300e+02 6.720e+02 8.420e+02 1.635e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-02 01:53:36,448 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:38,576 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:53:59,176 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89801.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:54:02,293 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 01:54:13,791 INFO [train.py:903] (3/4) Epoch 14, batch 1050, loss[loss=0.2525, simple_loss=0.3246, pruned_loss=0.09015, over 19543.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3003, pruned_loss=0.0751, over 3815290.15 frames. ], batch size: 64, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:54:46,493 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 01:55:02,240 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:16,491 INFO [train.py:903] (3/4) Epoch 14, batch 1100, loss[loss=0.2425, simple_loss=0.317, pruned_loss=0.084, over 19675.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.302, pruned_loss=0.0761, over 3812193.99 frames. ], batch size: 55, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:55:24,741 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:42,988 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89883.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:55:43,840 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 5.450e+02 6.738e+02 9.006e+02 2.173e+03, threshold=1.348e+03, percent-clipped=6.0 +2023-04-02 01:56:05,616 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:05,691 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89902.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:19,139 INFO [train.py:903] (3/4) Epoch 14, batch 1150, loss[loss=0.1898, simple_loss=0.2772, pruned_loss=0.05118, over 19635.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3007, pruned_loss=0.07522, over 3809231.01 frames. ], batch size: 53, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:56:24,155 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89916.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 01:56:39,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:56:54,475 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8820, 4.3691, 2.7222, 3.8591, 0.9159, 4.2515, 4.2252, 4.3292], + device='cuda:3'), covar=tensor([0.0580, 0.1011, 0.1946, 0.0807, 0.4419, 0.0695, 0.0787, 0.0900], + device='cuda:3'), in_proj_covar=tensor([0.0443, 0.0371, 0.0446, 0.0324, 0.0390, 0.0381, 0.0374, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 01:57:25,197 INFO [train.py:903] (3/4) Epoch 14, batch 1200, loss[loss=0.2191, simple_loss=0.2948, pruned_loss=0.07167, over 19777.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3, pruned_loss=0.07447, over 3828373.52 frames. ], batch size: 49, lr: 6.01e-03, grad_scale: 8.0 +2023-04-02 01:57:49,395 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.889e+02 5.798e+02 7.146e+02 1.027e+03, threshold=1.160e+03, percent-clipped=0.0 +2023-04-02 01:57:51,938 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:57:55,114 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 01:58:28,856 INFO [train.py:903] (3/4) Epoch 14, batch 1250, loss[loss=0.2354, simple_loss=0.3103, pruned_loss=0.08029, over 19657.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2993, pruned_loss=0.07408, over 3839939.11 frames. ], batch size: 58, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:04,881 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 01:59:31,258 INFO [train.py:903] (3/4) Epoch 14, batch 1300, loss[loss=0.2748, simple_loss=0.3334, pruned_loss=0.1081, over 13623.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3003, pruned_loss=0.07461, over 3815045.94 frames. ], batch size: 136, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 01:59:36,538 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1887, 2.7890, 2.0872, 2.0003, 1.9557, 2.3673, 0.9127, 2.0087], + device='cuda:3'), covar=tensor([0.0594, 0.0566, 0.0573, 0.1013, 0.0915, 0.0970, 0.1116, 0.0899], + device='cuda:3'), in_proj_covar=tensor([0.0344, 0.0337, 0.0335, 0.0364, 0.0438, 0.0361, 0.0319, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 01:59:57,898 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 5.410e+02 6.769e+02 7.813e+02 2.183e+03, threshold=1.354e+03, percent-clipped=2.0 +2023-04-02 02:00:02,808 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0941, 1.7656, 1.3627, 1.1143, 1.6034, 1.0502, 1.1744, 1.5453], + device='cuda:3'), covar=tensor([0.0691, 0.0706, 0.0967, 0.0712, 0.0435, 0.1166, 0.0553, 0.0398], + device='cuda:3'), in_proj_covar=tensor([0.0298, 0.0307, 0.0327, 0.0250, 0.0238, 0.0326, 0.0295, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:00:25,738 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:00:33,588 INFO [train.py:903] (3/4) Epoch 14, batch 1350, loss[loss=0.2144, simple_loss=0.2966, pruned_loss=0.06604, over 19574.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07441, over 3817998.09 frames. ], batch size: 61, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:00:58,168 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:16,459 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:24,049 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-02 02:01:28,069 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:29,295 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:01:35,839 INFO [train.py:903] (3/4) Epoch 14, batch 1400, loss[loss=0.2139, simple_loss=0.29, pruned_loss=0.06889, over 19375.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2989, pruned_loss=0.07396, over 3810523.35 frames. ], batch size: 70, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:01:46,557 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90172.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:01:59,350 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:02:00,108 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.450e+02 7.234e+02 1.006e+03 2.886e+03, threshold=1.447e+03, percent-clipped=11.0 +2023-04-02 02:02:17,962 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90197.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:02:34,957 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 02:02:38,345 INFO [train.py:903] (3/4) Epoch 14, batch 1450, loss[loss=0.2141, simple_loss=0.2987, pruned_loss=0.06477, over 19781.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2995, pruned_loss=0.07438, over 3820478.68 frames. ], batch size: 56, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:02:53,329 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90227.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:12,044 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:22,213 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:03:38,201 INFO [train.py:903] (3/4) Epoch 14, batch 1500, loss[loss=0.2048, simple_loss=0.2688, pruned_loss=0.07042, over 19711.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3, pruned_loss=0.07431, over 3830181.98 frames. ], batch size: 45, lr: 6.00e-03, grad_scale: 8.0 +2023-04-02 02:03:42,164 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:04:03,122 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.072e+02 6.110e+02 7.921e+02 2.000e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 02:04:09,181 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2414, 3.7323, 3.8575, 3.8507, 1.6509, 3.6180, 3.1594, 3.5887], + device='cuda:3'), covar=tensor([0.1440, 0.0980, 0.0570, 0.0668, 0.4805, 0.0814, 0.0643, 0.1043], + device='cuda:3'), in_proj_covar=tensor([0.0700, 0.0624, 0.0827, 0.0710, 0.0746, 0.0576, 0.0499, 0.0764], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 02:04:23,088 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4769, 2.3882, 1.6192, 1.3780, 2.2250, 1.1860, 1.1129, 1.9211], + device='cuda:3'), covar=tensor([0.1050, 0.0622, 0.0974, 0.0799, 0.0389, 0.1251, 0.0870, 0.0490], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0308, 0.0327, 0.0251, 0.0238, 0.0327, 0.0298, 0.0268], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:04:39,093 INFO [train.py:903] (3/4) Epoch 14, batch 1550, loss[loss=0.2885, simple_loss=0.3472, pruned_loss=0.1149, over 13381.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3009, pruned_loss=0.0755, over 3800520.85 frames. ], batch size: 136, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:16,349 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:05:42,816 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9317, 1.3454, 1.0433, 0.9282, 1.1961, 0.9508, 0.9398, 1.2613], + device='cuda:3'), covar=tensor([0.0529, 0.0722, 0.1040, 0.0645, 0.0488, 0.1166, 0.0537, 0.0420], + device='cuda:3'), in_proj_covar=tensor([0.0298, 0.0308, 0.0327, 0.0250, 0.0239, 0.0327, 0.0297, 0.0268], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:05:44,734 INFO [train.py:903] (3/4) Epoch 14, batch 1600, loss[loss=0.2446, simple_loss=0.3178, pruned_loss=0.08565, over 18197.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3011, pruned_loss=0.0758, over 3809621.35 frames. ], batch size: 83, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:05:51,741 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:02,642 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 02:06:08,371 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 5.288e+02 6.387e+02 7.705e+02 1.564e+03, threshold=1.277e+03, percent-clipped=2.0 +2023-04-02 02:06:46,521 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:06:47,288 INFO [train.py:903] (3/4) Epoch 14, batch 1650, loss[loss=0.2128, simple_loss=0.2992, pruned_loss=0.06324, over 18624.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.301, pruned_loss=0.07542, over 3813640.03 frames. ], batch size: 74, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:15,928 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:07:47,909 INFO [train.py:903] (3/4) Epoch 14, batch 1700, loss[loss=0.2485, simple_loss=0.3224, pruned_loss=0.08729, over 19663.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3013, pruned_loss=0.07571, over 3808521.56 frames. ], batch size: 59, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:07:50,700 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7815, 1.8832, 2.0256, 2.3352, 1.6276, 2.1785, 2.1792, 1.9391], + device='cuda:3'), covar=tensor([0.3493, 0.2895, 0.1572, 0.1841, 0.3090, 0.1612, 0.3841, 0.2762], + device='cuda:3'), in_proj_covar=tensor([0.0817, 0.0852, 0.0665, 0.0893, 0.0797, 0.0737, 0.0800, 0.0729], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 02:08:03,253 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:13,896 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 5.354e+02 6.441e+02 8.114e+02 1.316e+03, threshold=1.288e+03, percent-clipped=1.0 +2023-04-02 02:08:19,178 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-02 02:08:23,199 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:08:25,347 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 02:08:49,500 INFO [train.py:903] (3/4) Epoch 14, batch 1750, loss[loss=0.1908, simple_loss=0.2798, pruned_loss=0.05092, over 19673.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.302, pruned_loss=0.07624, over 3798265.05 frames. ], batch size: 53, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:08:49,818 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:09:54,000 INFO [train.py:903] (3/4) Epoch 14, batch 1800, loss[loss=0.252, simple_loss=0.3373, pruned_loss=0.08333, over 19677.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3022, pruned_loss=0.07618, over 3797754.30 frames. ], batch size: 60, lr: 5.99e-03, grad_scale: 8.0 +2023-04-02 02:10:17,895 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.87 vs. limit=5.0 +2023-04-02 02:10:18,178 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.749e+02 5.076e+02 6.157e+02 8.007e+02 1.318e+03, threshold=1.231e+03, percent-clipped=1.0 +2023-04-02 02:10:29,854 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:34,922 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90598.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:40,544 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 02:10:47,965 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:10:48,804 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 02:10:55,678 INFO [train.py:903] (3/4) Epoch 14, batch 1850, loss[loss=0.2491, simple_loss=0.3216, pruned_loss=0.08834, over 19621.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.302, pruned_loss=0.07585, over 3793934.28 frames. ], batch size: 57, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:11:06,608 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 02:11:07,265 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:11:24,428 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 02:11:30,936 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9702, 3.5560, 2.4300, 3.2004, 1.1117, 3.4890, 3.3941, 3.4861], + device='cuda:3'), covar=tensor([0.0770, 0.1099, 0.1995, 0.0919, 0.3539, 0.0761, 0.0848, 0.1130], + device='cuda:3'), in_proj_covar=tensor([0.0448, 0.0374, 0.0451, 0.0326, 0.0391, 0.0384, 0.0379, 0.0412], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:11:38,566 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-04-02 02:11:58,529 INFO [train.py:903] (3/4) Epoch 14, batch 1900, loss[loss=0.1817, simple_loss=0.2593, pruned_loss=0.05206, over 19792.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3015, pruned_loss=0.07552, over 3795465.80 frames. ], batch size: 48, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:12:08,016 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5501, 4.1126, 4.2520, 4.2387, 1.6181, 3.9707, 3.4783, 3.9976], + device='cuda:3'), covar=tensor([0.1550, 0.0702, 0.0571, 0.0635, 0.5273, 0.0719, 0.0655, 0.1011], + device='cuda:3'), in_proj_covar=tensor([0.0713, 0.0635, 0.0844, 0.0730, 0.0758, 0.0590, 0.0508, 0.0778], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 02:12:12,362 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 02:12:13,986 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8703, 1.3612, 1.0709, 0.9680, 1.1888, 0.9935, 0.9305, 1.2849], + device='cuda:3'), covar=tensor([0.0585, 0.0756, 0.0996, 0.0595, 0.0473, 0.1167, 0.0559, 0.0431], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0309, 0.0329, 0.0252, 0.0241, 0.0328, 0.0297, 0.0269], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:12:15,962 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 02:12:18,385 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2716, 3.8194, 3.9063, 3.9035, 1.4436, 3.6866, 3.2562, 3.6270], + device='cuda:3'), covar=tensor([0.1513, 0.0773, 0.0572, 0.0654, 0.5195, 0.0811, 0.0639, 0.1065], + device='cuda:3'), in_proj_covar=tensor([0.0713, 0.0634, 0.0842, 0.0729, 0.0756, 0.0590, 0.0507, 0.0777], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 02:12:24,251 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.065e+02 6.349e+02 7.558e+02 1.663e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-02 02:12:29,232 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0385, 4.4040, 4.6952, 4.7194, 1.6459, 4.3831, 3.8694, 4.3422], + device='cuda:3'), covar=tensor([0.1408, 0.0776, 0.0578, 0.0549, 0.5447, 0.0728, 0.0609, 0.1144], + device='cuda:3'), in_proj_covar=tensor([0.0713, 0.0633, 0.0842, 0.0729, 0.0756, 0.0590, 0.0507, 0.0778], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 02:12:43,840 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 02:12:48,695 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2694, 1.2141, 1.2425, 1.2849, 1.0018, 1.3836, 1.3560, 1.3348], + device='cuda:3'), covar=tensor([0.0846, 0.0929, 0.1035, 0.0720, 0.0834, 0.0764, 0.0796, 0.0729], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0222, 0.0223, 0.0243, 0.0230, 0.0209, 0.0192, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 02:12:54,580 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:12:59,232 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:13:00,238 INFO [train.py:903] (3/4) Epoch 14, batch 1950, loss[loss=0.1883, simple_loss=0.2815, pruned_loss=0.04755, over 19677.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3014, pruned_loss=0.07576, over 3789128.54 frames. ], batch size: 58, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:13:40,058 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2100, 2.8558, 2.2161, 2.1986, 2.0375, 2.5419, 1.0609, 2.0324], + device='cuda:3'), covar=tensor([0.0503, 0.0474, 0.0512, 0.0883, 0.0873, 0.0786, 0.1034, 0.0845], + device='cuda:3'), in_proj_covar=tensor([0.0339, 0.0336, 0.0331, 0.0359, 0.0434, 0.0358, 0.0315, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 02:14:03,111 INFO [train.py:903] (3/4) Epoch 14, batch 2000, loss[loss=0.2079, simple_loss=0.2961, pruned_loss=0.05984, over 19308.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3025, pruned_loss=0.07586, over 3796497.23 frames. ], batch size: 66, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:14:22,427 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6392, 1.2651, 1.3957, 1.6434, 3.1662, 1.0617, 2.3069, 3.5847], + device='cuda:3'), covar=tensor([0.0440, 0.2724, 0.2769, 0.1671, 0.0723, 0.2510, 0.1151, 0.0248], + device='cuda:3'), in_proj_covar=tensor([0.0370, 0.0347, 0.0360, 0.0329, 0.0351, 0.0335, 0.0346, 0.0368], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:14:27,795 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.207e+02 5.999e+02 7.179e+02 1.269e+03, threshold=1.200e+03, percent-clipped=0.0 +2023-04-02 02:14:59,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 02:15:05,779 INFO [train.py:903] (3/4) Epoch 14, batch 2050, loss[loss=0.2087, simple_loss=0.2894, pruned_loss=0.06399, over 19507.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3024, pruned_loss=0.07559, over 3798769.55 frames. ], batch size: 54, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:15:14,228 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90821.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:18,963 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 02:15:20,142 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 02:15:24,013 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90828.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:15:42,057 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 02:16:01,449 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90858.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:07,309 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:08,091 INFO [train.py:903] (3/4) Epoch 14, batch 2100, loss[loss=0.2416, simple_loss=0.3229, pruned_loss=0.0802, over 18252.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3025, pruned_loss=0.07563, over 3803335.62 frames. ], batch size: 83, lr: 5.98e-03, grad_scale: 8.0 +2023-04-02 02:16:12,852 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:32,690 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.435e+02 5.132e+02 6.276e+02 7.348e+02 1.970e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 02:16:37,022 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 02:16:39,478 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:16:46,421 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4857, 1.2030, 1.1399, 1.4015, 1.1114, 1.2662, 1.1577, 1.3333], + device='cuda:3'), covar=tensor([0.1117, 0.1237, 0.1525, 0.1012, 0.1174, 0.0596, 0.1343, 0.0815], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0354, 0.0296, 0.0240, 0.0295, 0.0240, 0.0288, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:16:58,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 02:17:09,974 INFO [train.py:903] (3/4) Epoch 14, batch 2150, loss[loss=0.2014, simple_loss=0.2872, pruned_loss=0.05777, over 19772.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.301, pruned_loss=0.07468, over 3819415.31 frames. ], batch size: 54, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:17:38,852 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:17:53,046 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-02 02:18:11,004 INFO [train.py:903] (3/4) Epoch 14, batch 2200, loss[loss=0.1977, simple_loss=0.2666, pruned_loss=0.06445, over 19744.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3005, pruned_loss=0.07464, over 3820187.37 frames. ], batch size: 46, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:18:12,477 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:19,878 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2293, 1.5172, 2.0412, 1.4816, 2.9607, 4.6689, 4.5738, 5.0182], + device='cuda:3'), covar=tensor([0.1622, 0.3406, 0.2914, 0.2049, 0.0589, 0.0172, 0.0143, 0.0117], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0302, 0.0332, 0.0254, 0.0225, 0.0166, 0.0206, 0.0218], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 02:18:23,373 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:18:35,015 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-02 02:18:35,325 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 5.132e+02 5.971e+02 7.684e+02 1.888e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-02 02:18:43,460 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90990.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:19:13,346 INFO [train.py:903] (3/4) Epoch 14, batch 2250, loss[loss=0.1995, simple_loss=0.2785, pruned_loss=0.06024, over 19528.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2992, pruned_loss=0.07397, over 3810020.02 frames. ], batch size: 54, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:14,197 INFO [train.py:903] (3/4) Epoch 14, batch 2300, loss[loss=0.2327, simple_loss=0.3173, pruned_loss=0.07406, over 19494.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3001, pruned_loss=0.07459, over 3807787.08 frames. ], batch size: 64, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:20:26,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 02:20:38,185 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.417e+02 6.829e+02 8.730e+02 1.535e+03, threshold=1.366e+03, percent-clipped=12.0 +2023-04-02 02:20:38,721 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:20:47,622 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4193, 2.1204, 2.4029, 2.7157, 2.3991, 2.2726, 2.2001, 2.6803], + device='cuda:3'), covar=tensor([0.0836, 0.1553, 0.1084, 0.0771, 0.1128, 0.0428, 0.0999, 0.0492], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0350, 0.0294, 0.0239, 0.0293, 0.0239, 0.0285, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:21:11,036 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:21:16,437 INFO [train.py:903] (3/4) Epoch 14, batch 2350, loss[loss=0.2094, simple_loss=0.2992, pruned_loss=0.05976, over 19610.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3003, pruned_loss=0.07454, over 3815282.98 frames. ], batch size: 57, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:21:57,692 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 02:22:13,116 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 02:22:14,984 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.91 vs. limit=5.0 +2023-04-02 02:22:17,788 INFO [train.py:903] (3/4) Epoch 14, batch 2400, loss[loss=0.2252, simple_loss=0.3007, pruned_loss=0.07486, over 19683.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2998, pruned_loss=0.07417, over 3815161.90 frames. ], batch size: 55, lr: 5.97e-03, grad_scale: 8.0 +2023-04-02 02:22:42,190 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.466e+02 6.599e+02 8.174e+02 1.804e+03, threshold=1.320e+03, percent-clipped=5.0 +2023-04-02 02:22:52,592 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6399, 4.1030, 4.2701, 4.2395, 1.6619, 3.9623, 3.5160, 3.9754], + device='cuda:3'), covar=tensor([0.1367, 0.0743, 0.0567, 0.0627, 0.5155, 0.0762, 0.0653, 0.1143], + device='cuda:3'), in_proj_covar=tensor([0.0708, 0.0636, 0.0839, 0.0724, 0.0756, 0.0586, 0.0504, 0.0777], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 02:22:52,766 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:15,499 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91211.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:19,602 INFO [train.py:903] (3/4) Epoch 14, batch 2450, loss[loss=0.2339, simple_loss=0.3076, pruned_loss=0.08016, over 19776.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3014, pruned_loss=0.07539, over 3805300.08 frames. ], batch size: 63, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:23:23,094 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:23:37,773 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91229.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:08,242 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:24:19,340 INFO [train.py:903] (3/4) Epoch 14, batch 2500, loss[loss=0.1908, simple_loss=0.259, pruned_loss=0.06127, over 17411.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.301, pruned_loss=0.07504, over 3809837.63 frames. ], batch size: 38, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:24:32,860 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7312, 1.6372, 1.5017, 2.1248, 1.6467, 2.0387, 2.1287, 1.8963], + device='cuda:3'), covar=tensor([0.0791, 0.0874, 0.1061, 0.0775, 0.0835, 0.0692, 0.0823, 0.0655], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0223, 0.0224, 0.0241, 0.0228, 0.0209, 0.0191, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 02:24:35,621 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-02 02:24:39,623 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9712, 5.3563, 3.1068, 4.7602, 1.2590, 5.3051, 5.3367, 5.4926], + device='cuda:3'), covar=tensor([0.0463, 0.1005, 0.1882, 0.0756, 0.4201, 0.0597, 0.0671, 0.0990], + device='cuda:3'), in_proj_covar=tensor([0.0452, 0.0380, 0.0454, 0.0330, 0.0392, 0.0386, 0.0378, 0.0413], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:24:42,800 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.074e+02 5.867e+02 6.968e+02 8.618e+02 1.617e+03, threshold=1.394e+03, percent-clipped=2.0 +2023-04-02 02:24:46,256 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3493, 1.3326, 1.7406, 1.4569, 2.7778, 3.8363, 3.5787, 4.0626], + device='cuda:3'), covar=tensor([0.1486, 0.3419, 0.3000, 0.1967, 0.0515, 0.0146, 0.0170, 0.0175], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0301, 0.0330, 0.0253, 0.0223, 0.0166, 0.0206, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 02:24:58,182 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0761, 2.0424, 1.8462, 2.3396, 2.5530, 1.8311, 1.8637, 2.2611], + device='cuda:3'), covar=tensor([0.1124, 0.1746, 0.1686, 0.1167, 0.1286, 0.0882, 0.1554, 0.0891], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0352, 0.0294, 0.0240, 0.0294, 0.0240, 0.0288, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:25:06,908 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91303.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:25:19,880 INFO [train.py:903] (3/4) Epoch 14, batch 2550, loss[loss=0.2358, simple_loss=0.3141, pruned_loss=0.07877, over 19631.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2997, pruned_loss=0.07448, over 3819362.16 frames. ], batch size: 57, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:25:33,180 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:26:12,384 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 02:26:19,133 INFO [train.py:903] (3/4) Epoch 14, batch 2600, loss[loss=0.2234, simple_loss=0.3094, pruned_loss=0.06869, over 19764.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3011, pruned_loss=0.07521, over 3817028.36 frames. ], batch size: 54, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:26:44,917 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.557e+02 7.203e+02 9.059e+02 1.995e+03, threshold=1.441e+03, percent-clipped=7.0 +2023-04-02 02:27:21,520 INFO [train.py:903] (3/4) Epoch 14, batch 2650, loss[loss=0.2451, simple_loss=0.3182, pruned_loss=0.08599, over 19524.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3009, pruned_loss=0.07502, over 3818939.03 frames. ], batch size: 64, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:27:41,101 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 02:28:11,793 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7513, 1.3364, 1.5300, 1.5679, 3.2783, 1.0872, 2.2667, 3.7150], + device='cuda:3'), covar=tensor([0.0427, 0.2579, 0.2786, 0.1728, 0.0684, 0.2459, 0.1299, 0.0218], + device='cuda:3'), in_proj_covar=tensor([0.0371, 0.0346, 0.0363, 0.0327, 0.0355, 0.0338, 0.0347, 0.0369], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:28:18,985 INFO [train.py:903] (3/4) Epoch 14, batch 2700, loss[loss=0.2022, simple_loss=0.2732, pruned_loss=0.06558, over 19355.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3016, pruned_loss=0.07537, over 3815118.78 frames. ], batch size: 47, lr: 5.96e-03, grad_scale: 8.0 +2023-04-02 02:28:43,685 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.145e+02 6.524e+02 8.606e+02 2.089e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-02 02:29:13,058 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-02 02:29:20,024 INFO [train.py:903] (3/4) Epoch 14, batch 2750, loss[loss=0.221, simple_loss=0.3133, pruned_loss=0.06438, over 19129.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.301, pruned_loss=0.07485, over 3815431.70 frames. ], batch size: 69, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:29:44,615 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 02:30:14,284 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91560.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:30:18,378 INFO [train.py:903] (3/4) Epoch 14, batch 2800, loss[loss=0.2305, simple_loss=0.3073, pruned_loss=0.07686, over 19850.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3027, pruned_loss=0.07578, over 3818177.17 frames. ], batch size: 52, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:30:41,199 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:30:44,141 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 4.991e+02 6.176e+02 8.428e+02 2.269e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 02:31:10,040 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:31:19,257 INFO [train.py:903] (3/4) Epoch 14, batch 2850, loss[loss=0.2235, simple_loss=0.3016, pruned_loss=0.07265, over 17397.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3026, pruned_loss=0.07605, over 3820570.64 frames. ], batch size: 101, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:31:58,530 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91647.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:32:20,013 INFO [train.py:903] (3/4) Epoch 14, batch 2900, loss[loss=0.2435, simple_loss=0.3182, pruned_loss=0.08444, over 17382.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3023, pruned_loss=0.07538, over 3829099.96 frames. ], batch size: 101, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:32:20,835 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 02:32:44,081 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.659e+02 5.126e+02 6.310e+02 7.919e+02 2.445e+03, threshold=1.262e+03, percent-clipped=4.0 +2023-04-02 02:33:15,679 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 02:33:19,569 INFO [train.py:903] (3/4) Epoch 14, batch 2950, loss[loss=0.2284, simple_loss=0.2999, pruned_loss=0.07846, over 19607.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3018, pruned_loss=0.07526, over 3830385.86 frames. ], batch size: 50, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:17,811 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91762.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:34:19,630 INFO [train.py:903] (3/4) Epoch 14, batch 3000, loss[loss=0.2702, simple_loss=0.3371, pruned_loss=0.1016, over 19731.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3019, pruned_loss=0.07546, over 3823311.36 frames. ], batch size: 63, lr: 5.95e-03, grad_scale: 8.0 +2023-04-02 02:34:19,631 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 02:34:36,637 INFO [train.py:937] (3/4) Epoch 14, validation: loss=0.1742, simple_loss=0.2751, pruned_loss=0.03671, over 944034.00 frames. +2023-04-02 02:34:36,639 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 02:34:42,025 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 02:34:45,929 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5124, 2.3898, 1.6491, 1.6062, 2.1323, 1.3100, 1.2437, 1.9318], + device='cuda:3'), covar=tensor([0.1036, 0.0635, 0.1012, 0.0690, 0.0503, 0.1176, 0.0819, 0.0496], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0307, 0.0325, 0.0249, 0.0239, 0.0324, 0.0294, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:35:02,710 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.261e+02 6.370e+02 8.625e+02 1.479e+03, threshold=1.274e+03, percent-clipped=4.0 +2023-04-02 02:35:25,863 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2945, 3.0188, 2.2180, 2.7592, 0.7941, 2.9308, 2.8499, 2.9109], + device='cuda:3'), covar=tensor([0.1171, 0.1403, 0.2018, 0.1032, 0.3899, 0.1002, 0.1115, 0.1344], + device='cuda:3'), in_proj_covar=tensor([0.0450, 0.0380, 0.0454, 0.0329, 0.0390, 0.0388, 0.0380, 0.0414], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:35:37,798 INFO [train.py:903] (3/4) Epoch 14, batch 3050, loss[loss=0.2318, simple_loss=0.3109, pruned_loss=0.07633, over 18718.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3019, pruned_loss=0.07503, over 3830461.19 frames. ], batch size: 74, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:36:09,848 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3080, 2.2193, 1.8743, 1.7394, 1.7448, 1.7881, 0.6260, 1.1521], + device='cuda:3'), covar=tensor([0.0495, 0.0495, 0.0395, 0.0685, 0.0933, 0.0765, 0.0996, 0.0797], + device='cuda:3'), in_proj_covar=tensor([0.0342, 0.0339, 0.0334, 0.0364, 0.0432, 0.0361, 0.0315, 0.0325], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 02:36:12,272 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 02:36:37,020 INFO [train.py:903] (3/4) Epoch 14, batch 3100, loss[loss=0.2083, simple_loss=0.2762, pruned_loss=0.07025, over 19702.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3022, pruned_loss=0.07557, over 3816062.17 frames. ], batch size: 51, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:37:02,384 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.361e+02 6.622e+02 8.860e+02 2.580e+03, threshold=1.324e+03, percent-clipped=11.0 +2023-04-02 02:37:09,339 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-04-02 02:37:24,417 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91904.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:37:31,814 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:37:37,892 INFO [train.py:903] (3/4) Epoch 14, batch 3150, loss[loss=0.2229, simple_loss=0.2987, pruned_loss=0.07359, over 19515.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3016, pruned_loss=0.07528, over 3827333.56 frames. ], batch size: 54, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:37:44,909 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.32 vs. limit=5.0 +2023-04-02 02:38:04,260 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 02:38:37,357 INFO [train.py:903] (3/4) Epoch 14, batch 3200, loss[loss=0.2488, simple_loss=0.3288, pruned_loss=0.0844, over 19651.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3017, pruned_loss=0.07548, over 3830954.92 frames. ], batch size: 55, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:38:48,681 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:02,850 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.150e+02 6.206e+02 7.874e+02 1.849e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 02:39:04,390 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6124, 1.2779, 1.5346, 1.5768, 3.1670, 0.9227, 2.1825, 3.4641], + device='cuda:3'), covar=tensor([0.0445, 0.2705, 0.2781, 0.1705, 0.0724, 0.2576, 0.1326, 0.0282], + device='cuda:3'), in_proj_covar=tensor([0.0373, 0.0347, 0.0364, 0.0328, 0.0358, 0.0338, 0.0350, 0.0371], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:39:20,986 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91999.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:39:39,708 INFO [train.py:903] (3/4) Epoch 14, batch 3250, loss[loss=0.1873, simple_loss=0.2615, pruned_loss=0.05658, over 19736.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3002, pruned_loss=0.07461, over 3831191.14 frames. ], batch size: 46, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:39:44,736 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:39:45,902 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92019.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:40:15,594 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:40:39,291 INFO [train.py:903] (3/4) Epoch 14, batch 3300, loss[loss=0.2679, simple_loss=0.3309, pruned_loss=0.1024, over 19717.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3008, pruned_loss=0.07467, over 3819317.36 frames. ], batch size: 51, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:40:44,827 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 02:41:01,033 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0338, 1.7163, 1.8713, 1.8498, 4.4420, 0.9454, 2.6718, 4.8295], + device='cuda:3'), covar=tensor([0.0402, 0.2776, 0.2728, 0.1897, 0.0703, 0.2968, 0.1381, 0.0201], + device='cuda:3'), in_proj_covar=tensor([0.0373, 0.0347, 0.0364, 0.0328, 0.0357, 0.0338, 0.0349, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:41:04,997 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.974e+02 6.176e+02 7.406e+02 2.018e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-02 02:41:16,629 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9949, 1.5671, 1.5777, 1.9218, 1.5986, 1.7028, 1.5151, 1.7953], + device='cuda:3'), covar=tensor([0.0938, 0.1527, 0.1370, 0.0925, 0.1288, 0.0519, 0.1278, 0.0727], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0351, 0.0293, 0.0241, 0.0294, 0.0244, 0.0287, 0.0241], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:41:41,638 INFO [train.py:903] (3/4) Epoch 14, batch 3350, loss[loss=0.1954, simple_loss=0.2825, pruned_loss=0.05416, over 19647.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2996, pruned_loss=0.07345, over 3829584.07 frames. ], batch size: 55, lr: 5.94e-03, grad_scale: 8.0 +2023-04-02 02:42:40,620 INFO [train.py:903] (3/4) Epoch 14, batch 3400, loss[loss=0.2324, simple_loss=0.3099, pruned_loss=0.07746, over 19327.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2998, pruned_loss=0.07358, over 3833603.21 frames. ], batch size: 66, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:42:41,122 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5015, 1.4896, 1.7018, 1.7093, 2.4914, 2.3044, 2.5717, 1.0141], + device='cuda:3'), covar=tensor([0.2176, 0.3875, 0.2368, 0.1704, 0.1351, 0.1847, 0.1296, 0.3867], + device='cuda:3'), in_proj_covar=tensor([0.0502, 0.0589, 0.0634, 0.0448, 0.0598, 0.0500, 0.0645, 0.0506], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 02:43:05,849 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.934e+02 6.017e+02 7.496e+02 1.650e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-02 02:43:27,413 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-02 02:43:42,230 INFO [train.py:903] (3/4) Epoch 14, batch 3450, loss[loss=0.2109, simple_loss=0.2916, pruned_loss=0.06515, over 19703.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2987, pruned_loss=0.07267, over 3831406.20 frames. ], batch size: 59, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:43:44,673 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 02:44:28,720 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:44:40,672 INFO [train.py:903] (3/4) Epoch 14, batch 3500, loss[loss=0.238, simple_loss=0.317, pruned_loss=0.07952, over 17351.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3002, pruned_loss=0.07365, over 3825654.07 frames. ], batch size: 101, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:44:54,874 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92275.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:45:05,728 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.239e+02 6.377e+02 7.871e+02 1.606e+03, threshold=1.275e+03, percent-clipped=5.0 +2023-04-02 02:45:24,194 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92300.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:45:37,117 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9402, 1.6526, 1.4625, 1.9147, 1.6506, 1.6826, 1.5395, 1.8433], + device='cuda:3'), covar=tensor([0.0973, 0.1412, 0.1523, 0.1030, 0.1228, 0.0517, 0.1243, 0.0732], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0353, 0.0297, 0.0244, 0.0298, 0.0246, 0.0290, 0.0243], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:45:41,111 INFO [train.py:903] (3/4) Epoch 14, batch 3550, loss[loss=0.2269, simple_loss=0.3066, pruned_loss=0.07358, over 19728.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3008, pruned_loss=0.07432, over 3812441.46 frames. ], batch size: 63, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:45:44,523 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92317.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:46:15,212 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92343.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:46:39,903 INFO [train.py:903] (3/4) Epoch 14, batch 3600, loss[loss=0.2341, simple_loss=0.316, pruned_loss=0.07611, over 19693.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3018, pruned_loss=0.07497, over 3810930.92 frames. ], batch size: 59, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:46:44,980 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92368.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:47:04,639 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 5.209e+02 6.321e+02 7.842e+02 1.520e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 02:47:40,755 INFO [train.py:903] (3/4) Epoch 14, batch 3650, loss[loss=0.211, simple_loss=0.2996, pruned_loss=0.06122, over 19256.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3012, pruned_loss=0.07497, over 3801781.33 frames. ], batch size: 66, lr: 5.93e-03, grad_scale: 8.0 +2023-04-02 02:48:03,008 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92432.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:48:34,087 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92458.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:48:40,398 INFO [train.py:903] (3/4) Epoch 14, batch 3700, loss[loss=0.2536, simple_loss=0.3249, pruned_loss=0.09119, over 19281.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3015, pruned_loss=0.0751, over 3818444.67 frames. ], batch size: 66, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:49:05,816 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.888e+02 6.023e+02 8.004e+02 1.682e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 02:49:41,646 INFO [train.py:903] (3/4) Epoch 14, batch 3750, loss[loss=0.2322, simple_loss=0.3126, pruned_loss=0.07594, over 19548.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3018, pruned_loss=0.07487, over 3816786.87 frames. ], batch size: 61, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:50:14,881 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9519, 4.4457, 2.7634, 3.9558, 1.0543, 4.3215, 4.3163, 4.4245], + device='cuda:3'), covar=tensor([0.0502, 0.0949, 0.1881, 0.0755, 0.3873, 0.0627, 0.0737, 0.0966], + device='cuda:3'), in_proj_covar=tensor([0.0447, 0.0375, 0.0448, 0.0327, 0.0388, 0.0386, 0.0375, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 02:50:42,053 INFO [train.py:903] (3/4) Epoch 14, batch 3800, loss[loss=0.1852, simple_loss=0.2531, pruned_loss=0.05861, over 19751.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3007, pruned_loss=0.07419, over 3828220.38 frames. ], batch size: 45, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:06,367 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.992e+02 6.384e+02 8.353e+02 1.667e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 02:51:11,003 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 02:51:15,768 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2888, 1.3387, 1.5818, 1.4707, 2.2850, 1.9350, 2.2876, 0.8637], + device='cuda:3'), covar=tensor([0.2440, 0.4001, 0.2442, 0.1881, 0.1399, 0.2199, 0.1333, 0.4069], + device='cuda:3'), in_proj_covar=tensor([0.0507, 0.0596, 0.0642, 0.0453, 0.0606, 0.0506, 0.0654, 0.0510], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 02:51:42,078 INFO [train.py:903] (3/4) Epoch 14, batch 3850, loss[loss=0.2389, simple_loss=0.3075, pruned_loss=0.08518, over 19383.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3006, pruned_loss=0.07371, over 3835155.02 frames. ], batch size: 48, lr: 5.92e-03, grad_scale: 8.0 +2023-04-02 02:51:45,129 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 02:51:54,820 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92624.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:19,048 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92643.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:26,059 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:52:43,607 INFO [train.py:903] (3/4) Epoch 14, batch 3900, loss[loss=0.2254, simple_loss=0.3048, pruned_loss=0.07302, over 19672.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2991, pruned_loss=0.07338, over 3823810.17 frames. ], batch size: 58, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:10,396 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.910e+02 5.705e+02 7.277e+02 9.203e+02 2.913e+03, threshold=1.455e+03, percent-clipped=9.0 +2023-04-02 02:53:13,149 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92688.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:43,652 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:53:44,353 INFO [train.py:903] (3/4) Epoch 14, batch 3950, loss[loss=0.2523, simple_loss=0.3198, pruned_loss=0.09237, over 19774.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2987, pruned_loss=0.07294, over 3828485.80 frames. ], batch size: 56, lr: 5.92e-03, grad_scale: 4.0 +2023-04-02 02:53:44,806 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92714.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 02:53:48,544 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 02:54:12,952 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:54:14,290 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92739.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 02:54:45,454 INFO [train.py:903] (3/4) Epoch 14, batch 4000, loss[loss=0.2237, simple_loss=0.3046, pruned_loss=0.07141, over 19612.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2987, pruned_loss=0.07316, over 3811199.83 frames. ], batch size: 61, lr: 5.91e-03, grad_scale: 8.0 +2023-04-02 02:55:11,211 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.983e+02 6.436e+02 8.255e+02 1.908e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 02:55:33,048 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 02:55:45,574 INFO [train.py:903] (3/4) Epoch 14, batch 4050, loss[loss=0.2334, simple_loss=0.2909, pruned_loss=0.08797, over 19802.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2981, pruned_loss=0.07321, over 3820894.95 frames. ], batch size: 48, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:56:28,987 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:56:45,576 INFO [train.py:903] (3/4) Epoch 14, batch 4100, loss[loss=0.2259, simple_loss=0.2982, pruned_loss=0.07681, over 19535.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2978, pruned_loss=0.07322, over 3822568.02 frames. ], batch size: 54, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:13,888 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 5.414e+02 6.604e+02 8.302e+02 1.654e+03, threshold=1.321e+03, percent-clipped=7.0 +2023-04-02 02:57:21,841 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 02:57:45,522 INFO [train.py:903] (3/4) Epoch 14, batch 4150, loss[loss=0.1877, simple_loss=0.2741, pruned_loss=0.05061, over 19661.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2971, pruned_loss=0.0727, over 3832557.69 frames. ], batch size: 53, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:57:51,061 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:28,818 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92949.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:58:48,463 INFO [train.py:903] (3/4) Epoch 14, batch 4200, loss[loss=0.2123, simple_loss=0.2939, pruned_loss=0.06532, over 19511.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2974, pruned_loss=0.07263, over 3822017.37 frames. ], batch size: 64, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 02:58:51,710 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 02:59:15,442 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.424e+02 5.036e+02 6.286e+02 7.807e+02 1.684e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-02 02:59:15,622 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:16,394 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-02 02:59:22,708 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 02:59:27,853 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6924, 4.1873, 4.4230, 4.3742, 1.6180, 4.1313, 3.6252, 4.1008], + device='cuda:3'), covar=tensor([0.1606, 0.0843, 0.0557, 0.0650, 0.5661, 0.0798, 0.0674, 0.1078], + device='cuda:3'), in_proj_covar=tensor([0.0711, 0.0643, 0.0847, 0.0732, 0.0762, 0.0597, 0.0514, 0.0780], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 02:59:48,126 INFO [train.py:903] (3/4) Epoch 14, batch 4250, loss[loss=0.2157, simple_loss=0.2843, pruned_loss=0.07359, over 19787.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2976, pruned_loss=0.07294, over 3818830.46 frames. ], batch size: 48, lr: 5.91e-03, grad_scale: 4.0 +2023-04-02 03:00:03,123 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 03:00:05,162 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:15,023 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 03:00:37,755 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:00:47,386 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-04-02 03:00:48,958 INFO [train.py:903] (3/4) Epoch 14, batch 4300, loss[loss=0.2384, simple_loss=0.3127, pruned_loss=0.0821, over 19398.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2979, pruned_loss=0.07317, over 3803689.90 frames. ], batch size: 70, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:01:12,696 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:18,348 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.279e+02 5.152e+02 6.308e+02 8.497e+02 2.668e+03, threshold=1.262e+03, percent-clipped=7.0 +2023-04-02 03:01:18,745 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9726, 1.9012, 1.6039, 2.1187, 2.0488, 1.6881, 1.6214, 1.9457], + device='cuda:3'), covar=tensor([0.0998, 0.1476, 0.1477, 0.0905, 0.1203, 0.0597, 0.1320, 0.0727], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0349, 0.0294, 0.0241, 0.0295, 0.0244, 0.0287, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:01:36,203 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:01:42,517 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 03:01:50,402 INFO [train.py:903] (3/4) Epoch 14, batch 4350, loss[loss=0.241, simple_loss=0.3188, pruned_loss=0.08157, over 19455.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2978, pruned_loss=0.07307, over 3812063.96 frames. ], batch size: 64, lr: 5.90e-03, grad_scale: 4.0 +2023-04-02 03:02:14,541 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:02:52,483 INFO [train.py:903] (3/4) Epoch 14, batch 4400, loss[loss=0.2491, simple_loss=0.3226, pruned_loss=0.08787, over 18235.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2989, pruned_loss=0.07365, over 3813038.23 frames. ], batch size: 83, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:03:15,289 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 03:03:18,721 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.134e+02 6.154e+02 7.916e+02 2.681e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 03:03:25,255 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 03:03:26,657 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:32,761 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93197.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:03:47,759 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3015, 3.0023, 2.2299, 2.2993, 2.0755, 2.5475, 0.8239, 2.1995], + device='cuda:3'), covar=tensor([0.0453, 0.0471, 0.0562, 0.0894, 0.0909, 0.0824, 0.1140, 0.0814], + device='cuda:3'), in_proj_covar=tensor([0.0341, 0.0337, 0.0333, 0.0362, 0.0437, 0.0362, 0.0315, 0.0323], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:03:52,928 INFO [train.py:903] (3/4) Epoch 14, batch 4450, loss[loss=0.2467, simple_loss=0.3244, pruned_loss=0.08454, over 19439.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2994, pruned_loss=0.07393, over 3827256.35 frames. ], batch size: 64, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:04:49,433 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93261.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:04:52,745 INFO [train.py:903] (3/4) Epoch 14, batch 4500, loss[loss=0.2335, simple_loss=0.3091, pruned_loss=0.07896, over 19683.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2998, pruned_loss=0.07375, over 3837337.51 frames. ], batch size: 60, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:05:21,652 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 5.314e+02 6.344e+02 7.896e+02 1.749e+03, threshold=1.269e+03, percent-clipped=5.0 +2023-04-02 03:05:25,892 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 03:05:28,485 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:45,631 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93308.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:05:52,289 INFO [train.py:903] (3/4) Epoch 14, batch 4550, loss[loss=0.2197, simple_loss=0.2988, pruned_loss=0.07031, over 19522.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2997, pruned_loss=0.07379, over 3839638.84 frames. ], batch size: 54, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:06:06,164 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 03:06:21,908 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:27,656 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 03:06:39,765 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.37 vs. limit=5.0 +2023-04-02 03:06:47,781 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:06:55,967 INFO [train.py:903] (3/4) Epoch 14, batch 4600, loss[loss=0.1721, simple_loss=0.2499, pruned_loss=0.04718, over 16444.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2998, pruned_loss=0.07359, over 3822748.00 frames. ], batch size: 36, lr: 5.90e-03, grad_scale: 8.0 +2023-04-02 03:07:05,042 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:09,558 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:17,804 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93383.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:22,010 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.600e+02 5.339e+02 6.388e+02 8.227e+02 2.509e+03, threshold=1.278e+03, percent-clipped=2.0 +2023-04-02 03:07:35,575 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:49,478 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:07:56,022 INFO [train.py:903] (3/4) Epoch 14, batch 4650, loss[loss=0.2139, simple_loss=0.2751, pruned_loss=0.07635, over 19044.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3, pruned_loss=0.07394, over 3822081.73 frames. ], batch size: 42, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:07:56,743 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 03:08:12,046 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 03:08:23,200 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 03:08:43,030 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:44,270 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93453.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:08:45,669 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.66 vs. limit=5.0 +2023-04-02 03:08:56,302 INFO [train.py:903] (3/4) Epoch 14, batch 4700, loss[loss=0.2103, simple_loss=0.2911, pruned_loss=0.06477, over 17181.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3003, pruned_loss=0.07435, over 3816342.19 frames. ], batch size: 101, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:09:11,839 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:14,188 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:20,324 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 03:09:25,719 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 5.199e+02 6.337e+02 7.857e+02 1.524e+03, threshold=1.267e+03, percent-clipped=2.0 +2023-04-02 03:09:26,101 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,144 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:09:55,940 INFO [train.py:903] (3/4) Epoch 14, batch 4750, loss[loss=0.2248, simple_loss=0.2971, pruned_loss=0.07626, over 19361.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3001, pruned_loss=0.07384, over 3824434.32 frames. ], batch size: 47, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:55,741 INFO [train.py:903] (3/4) Epoch 14, batch 4800, loss[loss=0.2452, simple_loss=0.3222, pruned_loss=0.0841, over 17315.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2994, pruned_loss=0.07332, over 3823495.91 frames. ], batch size: 101, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:10:56,177 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:18,655 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1632, 1.1870, 1.5639, 1.1806, 2.6901, 3.4754, 3.2266, 3.6883], + device='cuda:3'), covar=tensor([0.1666, 0.3672, 0.3240, 0.2279, 0.0537, 0.0162, 0.0211, 0.0220], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0301, 0.0330, 0.0254, 0.0222, 0.0164, 0.0207, 0.0216], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:11:22,951 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.531e+02 5.541e+02 6.642e+02 8.296e+02 2.320e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 03:11:25,732 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:29,130 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:11:39,594 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 03:11:43,651 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7313, 3.2287, 3.2984, 3.2732, 1.3473, 3.1383, 2.7398, 3.0129], + device='cuda:3'), covar=tensor([0.1699, 0.1086, 0.0842, 0.0891, 0.5397, 0.1029, 0.0823, 0.1438], + device='cuda:3'), in_proj_covar=tensor([0.0710, 0.0644, 0.0851, 0.0729, 0.0757, 0.0594, 0.0510, 0.0776], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 03:11:57,048 INFO [train.py:903] (3/4) Epoch 14, batch 4850, loss[loss=0.2251, simple_loss=0.3024, pruned_loss=0.07387, over 19344.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2982, pruned_loss=0.07291, over 3832784.34 frames. ], batch size: 66, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:17,802 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:23,245 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 03:12:42,757 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 03:12:47,278 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 03:12:48,581 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 03:12:48,976 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93657.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:56,528 INFO [train.py:903] (3/4) Epoch 14, batch 4900, loss[loss=0.2504, simple_loss=0.3196, pruned_loss=0.09057, over 19651.00 frames. ], tot_loss[loss=0.222, simple_loss=0.298, pruned_loss=0.07297, over 3827367.53 frames. ], batch size: 60, lr: 5.89e-03, grad_scale: 8.0 +2023-04-02 03:12:56,907 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:12:57,692 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 03:13:18,102 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 03:13:25,557 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.842e+02 5.818e+02 7.123e+02 1.786e+03, threshold=1.164e+03, percent-clipped=2.0 +2023-04-02 03:13:26,355 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 03:13:26,358 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 03:13:28,158 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93689.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:33,908 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7294, 1.7966, 2.0637, 1.9038, 3.1380, 2.6022, 3.3509, 1.7723], + device='cuda:3'), covar=tensor([0.2025, 0.3484, 0.2227, 0.1616, 0.1379, 0.1760, 0.1391, 0.3277], + device='cuda:3'), in_proj_covar=tensor([0.0501, 0.0592, 0.0638, 0.0452, 0.0602, 0.0504, 0.0649, 0.0508], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:13:49,641 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:13:55,992 INFO [train.py:903] (3/4) Epoch 14, batch 4950, loss[loss=0.2889, simple_loss=0.349, pruned_loss=0.1144, over 12852.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2985, pruned_loss=0.07296, over 3811564.82 frames. ], batch size: 136, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:14:16,295 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 03:14:21,123 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:32,402 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:14:36,690 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 03:14:58,213 INFO [train.py:903] (3/4) Epoch 14, batch 5000, loss[loss=0.2897, simple_loss=0.3546, pruned_loss=0.1124, over 17244.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2993, pruned_loss=0.07378, over 3795171.75 frames. ], batch size: 101, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:15:04,117 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93768.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:05,336 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:07,255 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 03:15:17,362 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 03:15:18,125 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.75 vs. limit=5.0 +2023-04-02 03:15:25,225 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.057e+02 5.347e+02 6.962e+02 9.103e+02 2.417e+03, threshold=1.392e+03, percent-clipped=9.0 +2023-04-02 03:15:26,635 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93788.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:33,740 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:15:56,434 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3357, 3.8056, 2.2522, 2.3376, 3.5768, 2.1291, 1.6462, 2.2523], + device='cuda:3'), covar=tensor([0.1097, 0.0448, 0.0832, 0.0695, 0.0374, 0.0927, 0.0799, 0.0633], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0303, 0.0324, 0.0246, 0.0236, 0.0323, 0.0288, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:15:59,313 INFO [train.py:903] (3/4) Epoch 14, batch 5050, loss[loss=0.2372, simple_loss=0.3148, pruned_loss=0.07974, over 19535.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3002, pruned_loss=0.07403, over 3806776.32 frames. ], batch size: 54, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:16:35,024 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 03:16:40,897 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:16:59,033 INFO [train.py:903] (3/4) Epoch 14, batch 5100, loss[loss=0.1975, simple_loss=0.2705, pruned_loss=0.06222, over 15218.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2993, pruned_loss=0.0733, over 3813765.73 frames. ], batch size: 33, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:17:09,121 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:17:09,907 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 03:17:13,178 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 03:17:16,704 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 03:17:26,256 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 4.904e+02 5.934e+02 7.645e+02 1.361e+03, threshold=1.187e+03, percent-clipped=0.0 +2023-04-02 03:17:56,877 INFO [train.py:903] (3/4) Epoch 14, batch 5150, loss[loss=0.1716, simple_loss=0.2478, pruned_loss=0.04773, over 19766.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2996, pruned_loss=0.07363, over 3816407.61 frames. ], batch size: 45, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:18:09,224 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 03:18:43,207 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 03:18:58,008 INFO [train.py:903] (3/4) Epoch 14, batch 5200, loss[loss=0.2958, simple_loss=0.356, pruned_loss=0.1178, over 19491.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3, pruned_loss=0.07392, over 3811711.27 frames. ], batch size: 64, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:19:13,844 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 03:19:25,473 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 5.268e+02 6.485e+02 8.631e+02 2.638e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 03:19:39,265 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:19:57,305 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 03:19:59,389 INFO [train.py:903] (3/4) Epoch 14, batch 5250, loss[loss=0.2456, simple_loss=0.3233, pruned_loss=0.08396, over 19769.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2998, pruned_loss=0.07365, over 3820700.77 frames. ], batch size: 54, lr: 5.88e-03, grad_scale: 8.0 +2023-04-02 03:20:59,248 INFO [train.py:903] (3/4) Epoch 14, batch 5300, loss[loss=0.2454, simple_loss=0.3203, pruned_loss=0.08522, over 19741.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3007, pruned_loss=0.07395, over 3817124.81 frames. ], batch size: 63, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:21:16,449 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 03:21:27,964 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.389e+02 5.368e+02 7.020e+02 9.283e+02 2.840e+03, threshold=1.404e+03, percent-clipped=4.0 +2023-04-02 03:21:59,008 INFO [train.py:903] (3/4) Epoch 14, batch 5350, loss[loss=0.268, simple_loss=0.336, pruned_loss=0.1, over 19290.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3008, pruned_loss=0.07421, over 3824415.02 frames. ], batch size: 66, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:22:23,156 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:22:34,758 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 03:23:01,401 INFO [train.py:903] (3/4) Epoch 14, batch 5400, loss[loss=0.2394, simple_loss=0.3049, pruned_loss=0.08692, over 19613.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3015, pruned_loss=0.0747, over 3814486.59 frames. ], batch size: 50, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:23:29,187 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.537e+02 7.248e+02 8.700e+02 2.021e+03, threshold=1.450e+03, percent-clipped=3.0 +2023-04-02 03:24:03,211 INFO [train.py:903] (3/4) Epoch 14, batch 5450, loss[loss=0.1963, simple_loss=0.2756, pruned_loss=0.05846, over 19416.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3015, pruned_loss=0.07472, over 3804490.88 frames. ], batch size: 48, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:24:34,817 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94241.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:24:43,519 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:24:54,154 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4877, 1.6764, 2.0711, 1.7621, 3.1804, 2.7924, 3.4789, 1.4936], + device='cuda:3'), covar=tensor([0.2430, 0.4161, 0.2600, 0.1927, 0.1577, 0.1915, 0.1666, 0.4163], + device='cuda:3'), in_proj_covar=tensor([0.0502, 0.0591, 0.0639, 0.0450, 0.0604, 0.0505, 0.0649, 0.0510], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:25:02,869 INFO [train.py:903] (3/4) Epoch 14, batch 5500, loss[loss=0.2164, simple_loss=0.2983, pruned_loss=0.0672, over 19508.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3006, pruned_loss=0.07409, over 3818073.83 frames. ], batch size: 64, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:25:24,860 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 03:25:30,872 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.805e+02 5.794e+02 7.462e+02 1.465e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 03:25:46,411 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2153, 2.8902, 2.1954, 2.2332, 2.0394, 2.4404, 1.0204, 2.1310], + device='cuda:3'), covar=tensor([0.0525, 0.0530, 0.0580, 0.0915, 0.0929, 0.0935, 0.1027, 0.0820], + device='cuda:3'), in_proj_covar=tensor([0.0344, 0.0342, 0.0336, 0.0363, 0.0437, 0.0363, 0.0316, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:26:01,461 INFO [train.py:903] (3/4) Epoch 14, batch 5550, loss[loss=0.1854, simple_loss=0.2593, pruned_loss=0.05572, over 19764.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2996, pruned_loss=0.07411, over 3813972.38 frames. ], batch size: 47, lr: 5.87e-03, grad_scale: 8.0 +2023-04-02 03:26:08,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 03:26:23,617 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.24 vs. limit=5.0 +2023-04-02 03:26:37,718 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:26:53,090 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 03:26:57,901 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 03:27:01,450 INFO [train.py:903] (3/4) Epoch 14, batch 5600, loss[loss=0.2089, simple_loss=0.2825, pruned_loss=0.06763, over 19494.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2979, pruned_loss=0.07301, over 3827770.06 frames. ], batch size: 49, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:27:05,669 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94366.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:27:30,031 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.083e+02 5.188e+02 6.005e+02 7.911e+02 1.925e+03, threshold=1.201e+03, percent-clipped=8.0 +2023-04-02 03:28:03,376 INFO [train.py:903] (3/4) Epoch 14, batch 5650, loss[loss=0.2337, simple_loss=0.3041, pruned_loss=0.0816, over 19346.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2987, pruned_loss=0.07395, over 3818635.66 frames. ], batch size: 66, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:28:49,693 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 03:28:55,926 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:29:02,020 INFO [train.py:903] (3/4) Epoch 14, batch 5700, loss[loss=0.2826, simple_loss=0.3407, pruned_loss=0.1122, over 17581.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2991, pruned_loss=0.07397, over 3814513.53 frames. ], batch size: 101, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:29:29,830 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 4.949e+02 6.008e+02 7.817e+02 2.884e+03, threshold=1.202e+03, percent-clipped=11.0 +2023-04-02 03:29:50,190 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94503.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:30:02,384 INFO [train.py:903] (3/4) Epoch 14, batch 5750, loss[loss=0.2671, simple_loss=0.3293, pruned_loss=0.1025, over 19363.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2987, pruned_loss=0.07358, over 3813519.12 frames. ], batch size: 70, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:30:04,715 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 03:30:09,051 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-02 03:30:11,543 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 03:30:17,756 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 03:30:21,236 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:31:04,685 INFO [train.py:903] (3/4) Epoch 14, batch 5800, loss[loss=0.184, simple_loss=0.255, pruned_loss=0.05645, over 19313.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2974, pruned_loss=0.0728, over 3810387.63 frames. ], batch size: 44, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:31:30,529 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94585.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:31:32,532 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.430e+02 7.155e+02 9.192e+02 1.752e+03, threshold=1.431e+03, percent-clipped=10.0 +2023-04-02 03:32:06,963 INFO [train.py:903] (3/4) Epoch 14, batch 5850, loss[loss=0.2207, simple_loss=0.3013, pruned_loss=0.07009, over 19561.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2973, pruned_loss=0.07267, over 3810120.84 frames. ], batch size: 61, lr: 5.86e-03, grad_scale: 8.0 +2023-04-02 03:32:43,951 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-02 03:33:06,740 INFO [train.py:903] (3/4) Epoch 14, batch 5900, loss[loss=0.2049, simple_loss=0.2848, pruned_loss=0.06253, over 19379.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2965, pruned_loss=0.07227, over 3817322.59 frames. ], batch size: 48, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:33:07,936 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 03:33:27,821 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 03:33:32,178 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0713, 5.4725, 2.8336, 4.7123, 1.1637, 5.3938, 5.4053, 5.5371], + device='cuda:3'), covar=tensor([0.0450, 0.1023, 0.2232, 0.0742, 0.4159, 0.0575, 0.0694, 0.0997], + device='cuda:3'), in_proj_covar=tensor([0.0449, 0.0378, 0.0451, 0.0324, 0.0389, 0.0382, 0.0376, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:33:33,163 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.122e+02 5.971e+02 8.409e+02 2.018e+03, threshold=1.194e+03, percent-clipped=4.0 +2023-04-02 03:33:50,039 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94700.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:34:01,325 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94710.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:05,812 INFO [train.py:903] (3/4) Epoch 14, batch 5950, loss[loss=0.2289, simple_loss=0.3116, pruned_loss=0.0731, over 19668.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.297, pruned_loss=0.07281, over 3830788.09 frames. ], batch size: 53, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:34:06,287 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94714.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:34:18,445 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3349, 3.0374, 2.2457, 2.7883, 0.8794, 2.9113, 2.8797, 2.9559], + device='cuda:3'), covar=tensor([0.1015, 0.1279, 0.1972, 0.0971, 0.3539, 0.0993, 0.1022, 0.1231], + device='cuda:3'), in_proj_covar=tensor([0.0447, 0.0374, 0.0449, 0.0321, 0.0385, 0.0379, 0.0373, 0.0403], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:34:37,293 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94739.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:35:04,629 INFO [train.py:903] (3/4) Epoch 14, batch 6000, loss[loss=0.2792, simple_loss=0.3271, pruned_loss=0.1157, over 19389.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.298, pruned_loss=0.07412, over 3826112.91 frames. ], batch size: 48, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:35:04,629 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 03:35:17,173 INFO [train.py:937] (3/4) Epoch 14, validation: loss=0.1744, simple_loss=0.2748, pruned_loss=0.03705, over 944034.00 frames. +2023-04-02 03:35:17,174 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 03:35:27,294 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-02 03:35:47,198 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 5.018e+02 6.191e+02 7.483e+02 1.325e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-02 03:36:17,849 INFO [train.py:903] (3/4) Epoch 14, batch 6050, loss[loss=0.2419, simple_loss=0.3148, pruned_loss=0.08455, over 19793.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2989, pruned_loss=0.07429, over 3820470.22 frames. ], batch size: 56, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:36:33,142 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94825.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:36:46,396 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0087, 5.0346, 5.7953, 5.7649, 1.8035, 5.4138, 4.6601, 5.4238], + device='cuda:3'), covar=tensor([0.1294, 0.0735, 0.0485, 0.0483, 0.5957, 0.0547, 0.0536, 0.1016], + device='cuda:3'), in_proj_covar=tensor([0.0712, 0.0641, 0.0850, 0.0726, 0.0756, 0.0595, 0.0510, 0.0783], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 03:37:02,993 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-02 03:37:20,908 INFO [train.py:903] (3/4) Epoch 14, batch 6100, loss[loss=0.1984, simple_loss=0.2764, pruned_loss=0.06023, over 19752.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2991, pruned_loss=0.07378, over 3829127.06 frames. ], batch size: 54, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:37:37,762 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1697, 1.3541, 1.7760, 1.4137, 2.7159, 3.5391, 3.2915, 3.7118], + device='cuda:3'), covar=tensor([0.1610, 0.3442, 0.3015, 0.2188, 0.0542, 0.0216, 0.0209, 0.0211], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0302, 0.0329, 0.0252, 0.0223, 0.0166, 0.0207, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:37:48,988 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 5.260e+02 6.294e+02 8.137e+02 1.551e+03, threshold=1.259e+03, percent-clipped=3.0 +2023-04-02 03:38:21,670 INFO [train.py:903] (3/4) Epoch 14, batch 6150, loss[loss=0.2401, simple_loss=0.3207, pruned_loss=0.07973, over 19699.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2986, pruned_loss=0.07325, over 3840218.90 frames. ], batch size: 59, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:38:24,606 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7482, 1.4919, 1.4579, 1.8050, 1.4213, 1.5930, 1.4749, 1.6830], + device='cuda:3'), covar=tensor([0.0957, 0.1283, 0.1270, 0.0842, 0.1154, 0.0506, 0.1158, 0.0652], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0351, 0.0297, 0.0244, 0.0299, 0.0244, 0.0289, 0.0244], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:38:48,815 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 03:39:13,137 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94956.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:39:21,874 INFO [train.py:903] (3/4) Epoch 14, batch 6200, loss[loss=0.2086, simple_loss=0.2937, pruned_loss=0.06172, over 19647.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2989, pruned_loss=0.07346, over 3824402.98 frames. ], batch size: 55, lr: 5.85e-03, grad_scale: 8.0 +2023-04-02 03:39:44,402 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8519, 4.3537, 2.6638, 3.8387, 1.2253, 4.3105, 4.2236, 4.3097], + device='cuda:3'), covar=tensor([0.0574, 0.0931, 0.2027, 0.0881, 0.3787, 0.0624, 0.0788, 0.0995], + device='cuda:3'), in_proj_covar=tensor([0.0447, 0.0371, 0.0451, 0.0318, 0.0384, 0.0379, 0.0372, 0.0403], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:39:44,499 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94981.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:39:51,884 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 5.470e+02 6.385e+02 8.085e+02 2.296e+03, threshold=1.277e+03, percent-clipped=5.0 +2023-04-02 03:40:22,505 INFO [train.py:903] (3/4) Epoch 14, batch 6250, loss[loss=0.2069, simple_loss=0.2744, pruned_loss=0.06965, over 19318.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.299, pruned_loss=0.07355, over 3821810.83 frames. ], batch size: 44, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:40:55,017 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 03:41:13,034 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0469, 1.1627, 1.6654, 1.1276, 2.6352, 3.6228, 3.3660, 3.8664], + device='cuda:3'), covar=tensor([0.1680, 0.3703, 0.3100, 0.2247, 0.0539, 0.0176, 0.0206, 0.0185], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0302, 0.0328, 0.0252, 0.0223, 0.0167, 0.0207, 0.0217], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:41:24,166 INFO [train.py:903] (3/4) Epoch 14, batch 6300, loss[loss=0.2157, simple_loss=0.2798, pruned_loss=0.07579, over 18677.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2998, pruned_loss=0.07385, over 3820486.80 frames. ], batch size: 41, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:41:44,526 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:41:51,892 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.537e+02 5.238e+02 6.215e+02 7.195e+02 1.642e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 03:42:15,025 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95106.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:42:24,134 INFO [train.py:903] (3/4) Epoch 14, batch 6350, loss[loss=0.2834, simple_loss=0.3354, pruned_loss=0.1157, over 13144.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3001, pruned_loss=0.07442, over 3807364.65 frames. ], batch size: 136, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:42:34,691 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95123.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:43:23,508 INFO [train.py:903] (3/4) Epoch 14, batch 6400, loss[loss=0.2247, simple_loss=0.3044, pruned_loss=0.07253, over 19589.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3014, pruned_loss=0.0753, over 3810850.80 frames. ], batch size: 61, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:43:31,461 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9371, 1.7986, 1.5704, 2.0723, 1.8534, 1.7135, 1.7056, 1.8879], + device='cuda:3'), covar=tensor([0.0937, 0.1477, 0.1435, 0.0947, 0.1253, 0.0515, 0.1135, 0.0727], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0352, 0.0295, 0.0244, 0.0298, 0.0243, 0.0287, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:43:52,823 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 5.689e+02 7.116e+02 8.755e+02 2.889e+03, threshold=1.423e+03, percent-clipped=3.0 +2023-04-02 03:44:23,642 INFO [train.py:903] (3/4) Epoch 14, batch 6450, loss[loss=0.2408, simple_loss=0.3215, pruned_loss=0.08005, over 19673.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3019, pruned_loss=0.07547, over 3811067.18 frames. ], batch size: 58, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:09,474 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 03:45:25,843 INFO [train.py:903] (3/4) Epoch 14, batch 6500, loss[loss=0.2522, simple_loss=0.3323, pruned_loss=0.08607, over 19698.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3008, pruned_loss=0.07442, over 3830970.96 frames. ], batch size: 60, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:45:32,329 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 03:45:43,539 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:52,232 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 03:45:52,736 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:45:54,574 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.245e+02 6.559e+02 8.783e+02 2.152e+03, threshold=1.312e+03, percent-clipped=6.0 +2023-04-02 03:46:27,877 INFO [train.py:903] (3/4) Epoch 14, batch 6550, loss[loss=0.187, simple_loss=0.2763, pruned_loss=0.04886, over 19613.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2992, pruned_loss=0.07326, over 3841310.44 frames. ], batch size: 57, lr: 5.84e-03, grad_scale: 8.0 +2023-04-02 03:47:20,335 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95357.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:47:28,138 INFO [train.py:903] (3/4) Epoch 14, batch 6600, loss[loss=0.1914, simple_loss=0.2637, pruned_loss=0.05958, over 19748.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2979, pruned_loss=0.07283, over 3814453.73 frames. ], batch size: 47, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:47:41,991 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9579, 4.3727, 4.6584, 4.6513, 1.8159, 4.3504, 3.8453, 4.3575], + device='cuda:3'), covar=tensor([0.1517, 0.0700, 0.0576, 0.0567, 0.5030, 0.0544, 0.0597, 0.1065], + device='cuda:3'), in_proj_covar=tensor([0.0726, 0.0648, 0.0863, 0.0740, 0.0765, 0.0603, 0.0517, 0.0793], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 03:47:53,875 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-02 03:47:57,395 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.401e+02 5.166e+02 6.061e+02 7.266e+02 1.890e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 03:48:28,442 INFO [train.py:903] (3/4) Epoch 14, batch 6650, loss[loss=0.2162, simple_loss=0.2948, pruned_loss=0.06877, over 19358.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.298, pruned_loss=0.07293, over 3816563.05 frames. ], batch size: 47, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:48:52,536 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0085, 4.3604, 4.7098, 4.7003, 1.6233, 4.3719, 3.8587, 4.3338], + device='cuda:3'), covar=tensor([0.1351, 0.0867, 0.0532, 0.0526, 0.5701, 0.0670, 0.0599, 0.1141], + device='cuda:3'), in_proj_covar=tensor([0.0719, 0.0644, 0.0859, 0.0737, 0.0760, 0.0601, 0.0515, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 03:49:29,375 INFO [train.py:903] (3/4) Epoch 14, batch 6700, loss[loss=0.2674, simple_loss=0.3303, pruned_loss=0.1022, over 13001.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2998, pruned_loss=0.07398, over 3807844.96 frames. ], batch size: 135, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:49:33,794 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95467.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 03:49:38,883 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-02 03:49:50,129 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0977, 1.8557, 1.7211, 2.0460, 1.9552, 1.7183, 1.5813, 2.0285], + device='cuda:3'), covar=tensor([0.0904, 0.1309, 0.1307, 0.0898, 0.1160, 0.0516, 0.1271, 0.0617], + device='cuda:3'), in_proj_covar=tensor([0.0257, 0.0352, 0.0296, 0.0245, 0.0298, 0.0244, 0.0287, 0.0244], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:49:57,462 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 5.267e+02 5.901e+02 8.158e+02 1.902e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-02 03:50:01,513 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 03:50:12,944 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0527, 3.3701, 1.8714, 2.0272, 3.0336, 1.6198, 1.3426, 2.0769], + device='cuda:3'), covar=tensor([0.1270, 0.0595, 0.1001, 0.0762, 0.0512, 0.1153, 0.0947, 0.0704], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0304, 0.0325, 0.0246, 0.0238, 0.0326, 0.0291, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:50:25,737 INFO [train.py:903] (3/4) Epoch 14, batch 6750, loss[loss=0.262, simple_loss=0.3287, pruned_loss=0.09765, over 18779.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3005, pruned_loss=0.07445, over 3813932.63 frames. ], batch size: 74, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:01,613 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2587, 1.5459, 1.9679, 1.3924, 2.9534, 4.7580, 4.6790, 5.1672], + device='cuda:3'), covar=tensor([0.1547, 0.3281, 0.2870, 0.2018, 0.0499, 0.0150, 0.0150, 0.0115], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0303, 0.0329, 0.0253, 0.0223, 0.0167, 0.0207, 0.0219], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:51:21,229 INFO [train.py:903] (3/4) Epoch 14, batch 6800, loss[loss=0.2323, simple_loss=0.3071, pruned_loss=0.07877, over 17399.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3011, pruned_loss=0.07528, over 3809632.69 frames. ], batch size: 101, lr: 5.83e-03, grad_scale: 8.0 +2023-04-02 03:51:41,543 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95582.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:51:46,738 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.198e+02 6.166e+02 8.008e+02 1.508e+03, threshold=1.233e+03, percent-clipped=6.0 +2023-04-02 03:52:06,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 03:52:07,147 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 03:52:10,211 INFO [train.py:903] (3/4) Epoch 15, batch 0, loss[loss=0.2034, simple_loss=0.2754, pruned_loss=0.06575, over 19415.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2754, pruned_loss=0.06575, over 19415.00 frames. ], batch size: 48, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:52:10,211 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 03:52:21,739 INFO [train.py:937] (3/4) Epoch 15, validation: loss=0.1744, simple_loss=0.2751, pruned_loss=0.03681, over 944034.00 frames. +2023-04-02 03:52:21,741 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 03:52:28,903 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2508, 1.2563, 1.5633, 1.4793, 2.4988, 2.0973, 2.6740, 1.0716], + device='cuda:3'), covar=tensor([0.2447, 0.4132, 0.2458, 0.1892, 0.1490, 0.2026, 0.1424, 0.4025], + device='cuda:3'), in_proj_covar=tensor([0.0503, 0.0592, 0.0642, 0.0452, 0.0602, 0.0507, 0.0650, 0.0510], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 03:52:33,162 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 03:52:58,966 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:08,239 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:14,653 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:53:22,140 INFO [train.py:903] (3/4) Epoch 15, batch 50, loss[loss=0.2512, simple_loss=0.3228, pruned_loss=0.08979, over 19335.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3041, pruned_loss=0.07643, over 861140.38 frames. ], batch size: 66, lr: 5.63e-03, grad_scale: 8.0 +2023-04-02 03:53:25,960 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8832, 1.5276, 1.4940, 1.4565, 3.4594, 0.9960, 2.3090, 3.8147], + device='cuda:3'), covar=tensor([0.0417, 0.2424, 0.2582, 0.1825, 0.0678, 0.2503, 0.1203, 0.0235], + device='cuda:3'), in_proj_covar=tensor([0.0376, 0.0349, 0.0369, 0.0328, 0.0358, 0.0337, 0.0346, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:53:58,798 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 03:54:20,263 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 5.472e+02 6.461e+02 8.212e+02 1.912e+03, threshold=1.292e+03, percent-clipped=7.0 +2023-04-02 03:54:26,855 INFO [train.py:903] (3/4) Epoch 15, batch 100, loss[loss=0.2298, simple_loss=0.311, pruned_loss=0.07429, over 19755.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2992, pruned_loss=0.07237, over 1515497.49 frames. ], batch size: 63, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:54:29,739 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8797, 1.4724, 1.3987, 1.7872, 1.5678, 1.5504, 1.3722, 1.7257], + device='cuda:3'), covar=tensor([0.0939, 0.1386, 0.1546, 0.0911, 0.1196, 0.0552, 0.1399, 0.0731], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0355, 0.0299, 0.0246, 0.0299, 0.0247, 0.0289, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:54:37,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 03:54:37,597 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95701.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:22,878 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:55:28,252 INFO [train.py:903] (3/4) Epoch 15, batch 150, loss[loss=0.2126, simple_loss=0.2972, pruned_loss=0.06399, over 19759.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2975, pruned_loss=0.07189, over 2037059.91 frames. ], batch size: 54, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:55:32,090 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:56:23,970 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 5.351e+02 6.322e+02 7.452e+02 1.833e+03, threshold=1.264e+03, percent-clipped=1.0 +2023-04-02 03:56:27,313 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 03:56:28,499 INFO [train.py:903] (3/4) Epoch 15, batch 200, loss[loss=0.2031, simple_loss=0.2917, pruned_loss=0.05729, over 19617.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3011, pruned_loss=0.0742, over 2437789.87 frames. ], batch size: 57, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:56:51,115 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0520, 1.2963, 1.8302, 1.3837, 2.8101, 4.4270, 4.4103, 4.8945], + device='cuda:3'), covar=tensor([0.1639, 0.3575, 0.3076, 0.2128, 0.0604, 0.0188, 0.0151, 0.0129], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0302, 0.0328, 0.0252, 0.0223, 0.0166, 0.0205, 0.0218], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:56:59,737 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:57:15,609 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95830.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:24,900 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95838.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:29,641 INFO [train.py:903] (3/4) Epoch 15, batch 250, loss[loss=0.2449, simple_loss=0.3185, pruned_loss=0.08564, over 19659.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3001, pruned_loss=0.07388, over 2750229.02 frames. ], batch size: 60, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:57:39,955 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5541, 4.1437, 2.4761, 3.6797, 1.1183, 3.9008, 3.8929, 3.9323], + device='cuda:3'), covar=tensor([0.0650, 0.0964, 0.2313, 0.0824, 0.4075, 0.0783, 0.0927, 0.1241], + device='cuda:3'), in_proj_covar=tensor([0.0454, 0.0377, 0.0457, 0.0322, 0.0391, 0.0387, 0.0381, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 03:57:56,159 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95863.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 03:57:58,221 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 03:58:24,514 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.277e+02 6.948e+02 9.039e+02 3.101e+03, threshold=1.390e+03, percent-clipped=9.0 +2023-04-02 03:58:30,111 INFO [train.py:903] (3/4) Epoch 15, batch 300, loss[loss=0.2498, simple_loss=0.3226, pruned_loss=0.08846, over 19684.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3017, pruned_loss=0.07502, over 2995880.99 frames. ], batch size: 60, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:12,004 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3143, 1.4126, 1.7125, 1.5824, 2.6768, 2.2488, 2.6070, 1.0845], + device='cuda:3'), covar=tensor([0.2281, 0.4050, 0.2417, 0.1789, 0.1322, 0.1932, 0.1427, 0.3913], + device='cuda:3'), in_proj_covar=tensor([0.0498, 0.0587, 0.0636, 0.0448, 0.0598, 0.0503, 0.0643, 0.0506], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 03:59:32,836 INFO [train.py:903] (3/4) Epoch 15, batch 350, loss[loss=0.1821, simple_loss=0.2547, pruned_loss=0.05476, over 19724.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2991, pruned_loss=0.07323, over 3191067.85 frames. ], batch size: 46, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 03:59:33,870 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 04:00:17,460 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:24,009 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1560, 1.3062, 1.8154, 1.5598, 2.9078, 4.5108, 4.5861, 4.9930], + device='cuda:3'), covar=tensor([0.1679, 0.3541, 0.3118, 0.2037, 0.0536, 0.0182, 0.0145, 0.0153], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0304, 0.0332, 0.0254, 0.0224, 0.0168, 0.0207, 0.0220], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 04:00:28,194 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 5.018e+02 5.906e+02 6.897e+02 1.495e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-04-02 04:00:32,778 INFO [train.py:903] (3/4) Epoch 15, batch 400, loss[loss=0.2149, simple_loss=0.2876, pruned_loss=0.0711, over 19853.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.298, pruned_loss=0.07286, over 3335973.42 frames. ], batch size: 52, lr: 5.62e-03, grad_scale: 8.0 +2023-04-02 04:00:34,351 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:39,927 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95998.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:44,425 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:00:53,872 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9794, 1.0688, 1.5410, 0.7597, 2.2492, 2.9995, 2.7221, 3.2084], + device='cuda:3'), covar=tensor([0.1727, 0.4007, 0.3487, 0.2653, 0.0591, 0.0200, 0.0242, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0305, 0.0332, 0.0255, 0.0225, 0.0168, 0.0207, 0.0221], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 04:01:04,807 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:16,068 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:01:33,843 INFO [train.py:903] (3/4) Epoch 15, batch 450, loss[loss=0.2083, simple_loss=0.2907, pruned_loss=0.0629, over 19746.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2976, pruned_loss=0.07278, over 3449776.93 frames. ], batch size: 54, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:07,788 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 04:02:07,823 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 04:02:12,916 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:19,227 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 04:02:31,244 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.837e+02 5.995e+02 7.453e+02 1.580e+03, threshold=1.199e+03, percent-clipped=6.0 +2023-04-02 04:02:36,670 INFO [train.py:903] (3/4) Epoch 15, batch 500, loss[loss=0.2177, simple_loss=0.295, pruned_loss=0.07022, over 19853.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2979, pruned_loss=0.07247, over 3530121.68 frames. ], batch size: 52, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:02:39,295 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:43,773 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:02:44,803 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:03:38,897 INFO [train.py:903] (3/4) Epoch 15, batch 550, loss[loss=0.2426, simple_loss=0.3189, pruned_loss=0.08319, over 18827.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2972, pruned_loss=0.07177, over 3613871.85 frames. ], batch size: 74, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:03:45,757 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7058, 1.4200, 1.3874, 1.7609, 1.4492, 1.4904, 1.4494, 1.6433], + device='cuda:3'), covar=tensor([0.1008, 0.1387, 0.1434, 0.0983, 0.1252, 0.0558, 0.1268, 0.0738], + device='cuda:3'), in_proj_covar=tensor([0.0255, 0.0349, 0.0295, 0.0243, 0.0295, 0.0244, 0.0285, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:04:18,076 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96174.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:04:29,243 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:04:35,630 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.342e+02 6.491e+02 8.104e+02 1.503e+03, threshold=1.298e+03, percent-clipped=3.0 +2023-04-02 04:04:40,050 INFO [train.py:903] (3/4) Epoch 15, batch 600, loss[loss=0.1954, simple_loss=0.2776, pruned_loss=0.05655, over 19788.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2988, pruned_loss=0.0726, over 3667968.60 frames. ], batch size: 47, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:05:00,836 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:05:20,396 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 04:05:33,430 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 04:05:43,306 INFO [train.py:903] (3/4) Epoch 15, batch 650, loss[loss=0.1983, simple_loss=0.2744, pruned_loss=0.06107, over 19475.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2989, pruned_loss=0.07265, over 3707568.48 frames. ], batch size: 49, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:06:10,064 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1025, 3.5505, 2.0777, 2.1105, 3.2511, 1.8798, 1.2881, 2.2783], + device='cuda:3'), covar=tensor([0.1215, 0.0515, 0.0978, 0.0799, 0.0457, 0.1109, 0.1025, 0.0660], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0303, 0.0322, 0.0245, 0.0236, 0.0324, 0.0287, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:06:10,082 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2578, 3.0675, 2.2870, 2.3599, 2.1547, 2.5934, 0.7746, 2.1394], + device='cuda:3'), covar=tensor([0.0555, 0.0478, 0.0603, 0.0932, 0.0984, 0.0940, 0.1343, 0.0913], + device='cuda:3'), in_proj_covar=tensor([0.0344, 0.0340, 0.0335, 0.0364, 0.0438, 0.0365, 0.0319, 0.0324], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 04:06:20,840 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3218, 3.7784, 3.9019, 3.9133, 1.5360, 3.6671, 3.2691, 3.5967], + device='cuda:3'), covar=tensor([0.1505, 0.0828, 0.0674, 0.0702, 0.5366, 0.0781, 0.0647, 0.1233], + device='cuda:3'), in_proj_covar=tensor([0.0720, 0.0648, 0.0857, 0.0736, 0.0760, 0.0595, 0.0516, 0.0786], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 04:06:41,549 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 5.105e+02 6.385e+02 8.770e+02 1.706e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 04:06:42,946 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96289.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:06:46,046 INFO [train.py:903] (3/4) Epoch 15, batch 700, loss[loss=0.226, simple_loss=0.3022, pruned_loss=0.07494, over 18291.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2988, pruned_loss=0.07287, over 3731879.92 frames. ], batch size: 84, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:24,629 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 04:07:26,395 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,397 INFO [train.py:903] (3/4) Epoch 15, batch 750, loss[loss=0.2245, simple_loss=0.3052, pruned_loss=0.07189, over 19760.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3001, pruned_loss=0.07379, over 3754437.47 frames. ], batch size: 63, lr: 5.61e-03, grad_scale: 8.0 +2023-04-02 04:07:47,565 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:47,728 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:07:58,659 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:29,483 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:08:44,438 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 5.276e+02 6.207e+02 7.536e+02 1.572e+03, threshold=1.241e+03, percent-clipped=2.0 +2023-04-02 04:08:49,811 INFO [train.py:903] (3/4) Epoch 15, batch 800, loss[loss=0.1567, simple_loss=0.2325, pruned_loss=0.0404, over 19762.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2996, pruned_loss=0.07361, over 3780018.41 frames. ], batch size: 47, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:09:04,707 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 04:09:09,611 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96408.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:09:12,036 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9915, 2.7764, 1.9687, 2.0710, 1.8533, 2.2556, 0.9695, 1.8980], + device='cuda:3'), covar=tensor([0.0617, 0.0525, 0.0609, 0.0940, 0.0986, 0.0952, 0.1127, 0.0905], + device='cuda:3'), in_proj_covar=tensor([0.0345, 0.0342, 0.0336, 0.0366, 0.0440, 0.0367, 0.0321, 0.0326], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 04:09:50,619 INFO [train.py:903] (3/4) Epoch 15, batch 850, loss[loss=0.2914, simple_loss=0.3498, pruned_loss=0.1165, over 13173.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2998, pruned_loss=0.07382, over 3790883.73 frames. ], batch size: 136, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:09:51,925 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:09:53,261 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3236, 3.7848, 3.8945, 3.8913, 1.5254, 3.6685, 3.2505, 3.6130], + device='cuda:3'), covar=tensor([0.1445, 0.0894, 0.0635, 0.0682, 0.5251, 0.0811, 0.0637, 0.1074], + device='cuda:3'), in_proj_covar=tensor([0.0725, 0.0649, 0.0863, 0.0739, 0.0767, 0.0602, 0.0519, 0.0792], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 04:09:57,238 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-02 04:10:10,299 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:10:41,147 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 04:10:47,710 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 5.238e+02 6.465e+02 7.879e+02 1.664e+03, threshold=1.293e+03, percent-clipped=4.0 +2023-04-02 04:10:52,477 INFO [train.py:903] (3/4) Epoch 15, batch 900, loss[loss=0.1746, simple_loss=0.2535, pruned_loss=0.04787, over 19795.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3, pruned_loss=0.07409, over 3792951.71 frames. ], batch size: 47, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:15,482 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9701, 3.6197, 2.4594, 3.2175, 0.9139, 3.5361, 3.4368, 3.5315], + device='cuda:3'), covar=tensor([0.0844, 0.1263, 0.2080, 0.0935, 0.3924, 0.0768, 0.0914, 0.1088], + device='cuda:3'), in_proj_covar=tensor([0.0453, 0.0381, 0.0455, 0.0323, 0.0390, 0.0388, 0.0382, 0.0412], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:11:36,537 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:11:55,051 INFO [train.py:903] (3/4) Epoch 15, batch 950, loss[loss=0.1951, simple_loss=0.2816, pruned_loss=0.05428, over 19845.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2998, pruned_loss=0.0737, over 3807852.65 frames. ], batch size: 52, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:11:56,235 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 04:11:59,952 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96545.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:12:14,123 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:30,391 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96570.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:12:41,732 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:12:52,449 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.674e+02 5.225e+02 6.143e+02 7.839e+02 1.754e+03, threshold=1.229e+03, percent-clipped=1.0 +2023-04-02 04:12:57,220 INFO [train.py:903] (3/4) Epoch 15, batch 1000, loss[loss=0.2076, simple_loss=0.2883, pruned_loss=0.06341, over 19742.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2996, pruned_loss=0.07379, over 3820802.32 frames. ], batch size: 51, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:12:58,910 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7521, 1.8873, 2.1043, 2.5993, 1.7893, 2.4624, 2.2202, 1.9382], + device='cuda:3'), covar=tensor([0.4003, 0.3469, 0.1594, 0.1847, 0.3717, 0.1695, 0.4123, 0.3014], + device='cuda:3'), in_proj_covar=tensor([0.0826, 0.0865, 0.0666, 0.0896, 0.0812, 0.0747, 0.0802, 0.0731], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 04:13:13,527 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:13:51,310 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 04:13:59,278 INFO [train.py:903] (3/4) Epoch 15, batch 1050, loss[loss=0.2077, simple_loss=0.2771, pruned_loss=0.0692, over 19393.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3006, pruned_loss=0.07448, over 3817930.05 frames. ], batch size: 48, lr: 5.60e-03, grad_scale: 8.0 +2023-04-02 04:13:59,604 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:28,107 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9869, 3.6356, 2.4023, 3.2485, 0.8974, 3.5064, 3.4573, 3.4863], + device='cuda:3'), covar=tensor([0.0791, 0.1126, 0.2121, 0.0864, 0.3781, 0.0766, 0.0854, 0.1183], + device='cuda:3'), in_proj_covar=tensor([0.0453, 0.0380, 0.0456, 0.0323, 0.0389, 0.0388, 0.0381, 0.0413], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:14:31,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 04:14:53,799 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:14:57,022 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.831e+02 5.170e+02 6.505e+02 8.376e+02 1.590e+03, threshold=1.301e+03, percent-clipped=4.0 +2023-04-02 04:15:01,344 INFO [train.py:903] (3/4) Epoch 15, batch 1100, loss[loss=0.2017, simple_loss=0.268, pruned_loss=0.06768, over 19403.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2997, pruned_loss=0.07382, over 3817250.28 frames. ], batch size: 48, lr: 5.60e-03, grad_scale: 4.0 +2023-04-02 04:15:28,122 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96713.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:15:58,790 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:16:02,840 INFO [train.py:903] (3/4) Epoch 15, batch 1150, loss[loss=0.2509, simple_loss=0.3173, pruned_loss=0.09223, over 19638.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2988, pruned_loss=0.0731, over 3816600.80 frames. ], batch size: 55, lr: 5.59e-03, grad_scale: 4.0 +2023-04-02 04:16:08,669 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1428, 5.5528, 3.0391, 4.8207, 1.3545, 5.5976, 5.4748, 5.6558], + device='cuda:3'), covar=tensor([0.0401, 0.0764, 0.1865, 0.0677, 0.3448, 0.0561, 0.0659, 0.0947], + device='cuda:3'), in_proj_covar=tensor([0.0452, 0.0380, 0.0456, 0.0323, 0.0388, 0.0388, 0.0381, 0.0412], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:16:16,218 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:16:55,637 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7971, 1.8555, 2.1493, 2.3592, 1.6534, 2.3142, 2.3107, 1.9784], + device='cuda:3'), covar=tensor([0.3738, 0.3378, 0.1651, 0.1888, 0.3423, 0.1685, 0.3993, 0.2973], + device='cuda:3'), in_proj_covar=tensor([0.0828, 0.0865, 0.0668, 0.0902, 0.0813, 0.0749, 0.0805, 0.0735], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 04:17:01,609 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.122e+02 6.460e+02 8.216e+02 1.619e+03, threshold=1.292e+03, percent-clipped=5.0 +2023-04-02 04:17:06,203 INFO [train.py:903] (3/4) Epoch 15, batch 1200, loss[loss=0.2214, simple_loss=0.3053, pruned_loss=0.06881, over 19555.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2996, pruned_loss=0.07348, over 3819557.95 frames. ], batch size: 56, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:17:08,711 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:18,031 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:32,634 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:17:39,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 04:18:03,086 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:18:08,000 INFO [train.py:903] (3/4) Epoch 15, batch 1250, loss[loss=0.2155, simple_loss=0.2953, pruned_loss=0.0679, over 19333.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2998, pruned_loss=0.07379, over 3814521.16 frames. ], batch size: 70, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:18:23,404 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7245, 3.9327, 4.2730, 4.2540, 2.4390, 4.0204, 3.6591, 4.0266], + device='cuda:3'), covar=tensor([0.1187, 0.2102, 0.0526, 0.0586, 0.4018, 0.0912, 0.0522, 0.0830], + device='cuda:3'), in_proj_covar=tensor([0.0724, 0.0653, 0.0863, 0.0739, 0.0770, 0.0604, 0.0519, 0.0793], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 04:18:38,305 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:05,690 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 5.442e+02 6.897e+02 8.528e+02 1.967e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 04:19:09,075 INFO [train.py:903] (3/4) Epoch 15, batch 1300, loss[loss=0.2269, simple_loss=0.3048, pruned_loss=0.07454, over 19616.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2993, pruned_loss=0.07368, over 3818921.36 frames. ], batch size: 57, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:19:17,641 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:48,652 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:19:57,838 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9354, 2.0082, 2.2679, 2.6617, 1.9247, 2.6628, 2.3112, 2.0629], + device='cuda:3'), covar=tensor([0.3930, 0.3479, 0.1695, 0.2198, 0.3762, 0.1783, 0.4150, 0.2992], + device='cuda:3'), in_proj_covar=tensor([0.0831, 0.0865, 0.0669, 0.0902, 0.0814, 0.0750, 0.0807, 0.0735], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 04:20:12,477 INFO [train.py:903] (3/4) Epoch 15, batch 1350, loss[loss=0.216, simple_loss=0.297, pruned_loss=0.06749, over 19655.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2994, pruned_loss=0.07375, over 3829359.33 frames. ], batch size: 60, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:11,447 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.520e+02 5.515e+02 7.272e+02 1.782e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-04-02 04:21:15,867 INFO [train.py:903] (3/4) Epoch 15, batch 1400, loss[loss=0.2233, simple_loss=0.3036, pruned_loss=0.07144, over 19675.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2974, pruned_loss=0.07231, over 3833401.41 frames. ], batch size: 60, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:21:49,713 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:22:19,081 INFO [train.py:903] (3/4) Epoch 15, batch 1450, loss[loss=0.2217, simple_loss=0.3079, pruned_loss=0.06777, over 19535.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2973, pruned_loss=0.07246, over 3824349.65 frames. ], batch size: 54, lr: 5.59e-03, grad_scale: 8.0 +2023-04-02 04:22:20,293 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 04:22:38,951 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:09,851 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:23:18,577 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.198e+02 6.417e+02 9.294e+02 1.968e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 04:23:21,917 INFO [train.py:903] (3/4) Epoch 15, batch 1500, loss[loss=0.1886, simple_loss=0.2633, pruned_loss=0.05701, over 19777.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2974, pruned_loss=0.07235, over 3827275.90 frames. ], batch size: 48, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:23:22,383 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1156, 1.7856, 1.6100, 2.0198, 1.7369, 1.8093, 1.5880, 2.0517], + device='cuda:3'), covar=tensor([0.0872, 0.1403, 0.1421, 0.0969, 0.1346, 0.0493, 0.1307, 0.0663], + device='cuda:3'), in_proj_covar=tensor([0.0256, 0.0349, 0.0297, 0.0242, 0.0293, 0.0245, 0.0287, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:23:46,474 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.40 vs. limit=5.0 +2023-04-02 04:24:01,311 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:20,475 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:24:24,799 INFO [train.py:903] (3/4) Epoch 15, batch 1550, loss[loss=0.186, simple_loss=0.2593, pruned_loss=0.05634, over 19385.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2971, pruned_loss=0.07179, over 3834089.66 frames. ], batch size: 48, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:24:31,868 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97148.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:25:22,551 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.223e+02 6.490e+02 8.468e+02 1.572e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-02 04:25:25,973 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1128, 1.2217, 1.4594, 1.3615, 2.6805, 1.0883, 2.1112, 2.9693], + device='cuda:3'), covar=tensor([0.0525, 0.2777, 0.2625, 0.1722, 0.0764, 0.2248, 0.1106, 0.0358], + device='cuda:3'), in_proj_covar=tensor([0.0373, 0.0346, 0.0363, 0.0326, 0.0351, 0.0335, 0.0346, 0.0371], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:25:26,785 INFO [train.py:903] (3/4) Epoch 15, batch 1600, loss[loss=0.214, simple_loss=0.2882, pruned_loss=0.06995, over 19733.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2975, pruned_loss=0.07233, over 3826015.16 frames. ], batch size: 51, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:25:40,822 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3931, 1.4257, 1.7242, 1.6368, 2.6653, 2.2746, 2.8105, 1.0449], + device='cuda:3'), covar=tensor([0.2268, 0.3984, 0.2580, 0.1784, 0.1492, 0.1963, 0.1422, 0.4109], + device='cuda:3'), in_proj_covar=tensor([0.0507, 0.0592, 0.0644, 0.0451, 0.0605, 0.0506, 0.0649, 0.0509], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 04:25:51,567 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 04:26:28,830 INFO [train.py:903] (3/4) Epoch 15, batch 1650, loss[loss=0.2608, simple_loss=0.3415, pruned_loss=0.09005, over 19327.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07302, over 3817792.29 frames. ], batch size: 70, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:26:40,205 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0463, 1.5822, 1.8770, 1.6360, 4.5602, 0.8191, 2.6138, 4.8874], + device='cuda:3'), covar=tensor([0.0345, 0.2760, 0.2587, 0.1932, 0.0715, 0.2886, 0.1297, 0.0179], + device='cuda:3'), in_proj_covar=tensor([0.0374, 0.0347, 0.0365, 0.0327, 0.0353, 0.0336, 0.0348, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:26:42,671 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97253.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:14,827 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:27:27,487 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.574e+02 5.168e+02 6.563e+02 8.938e+02 1.305e+03, threshold=1.313e+03, percent-clipped=1.0 +2023-04-02 04:27:30,919 INFO [train.py:903] (3/4) Epoch 15, batch 1700, loss[loss=0.2435, simple_loss=0.317, pruned_loss=0.08502, over 19404.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2991, pruned_loss=0.07341, over 3816439.56 frames. ], batch size: 70, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:27:36,039 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4991, 4.0628, 4.2149, 4.2029, 1.6544, 3.9605, 3.4911, 3.9403], + device='cuda:3'), covar=tensor([0.1400, 0.0770, 0.0535, 0.0590, 0.5148, 0.0811, 0.0622, 0.0933], + device='cuda:3'), in_proj_covar=tensor([0.0717, 0.0647, 0.0855, 0.0735, 0.0763, 0.0599, 0.0516, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 04:28:11,291 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 04:28:32,694 INFO [train.py:903] (3/4) Epoch 15, batch 1750, loss[loss=0.217, simple_loss=0.2947, pruned_loss=0.0697, over 19585.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2994, pruned_loss=0.07386, over 3813691.59 frames. ], batch size: 61, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:28:55,664 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:28:57,858 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:01,665 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:29:30,674 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.096e+02 6.393e+02 8.037e+02 1.428e+03, threshold=1.279e+03, percent-clipped=5.0 +2023-04-02 04:29:33,928 INFO [train.py:903] (3/4) Epoch 15, batch 1800, loss[loss=0.2384, simple_loss=0.3203, pruned_loss=0.07823, over 19531.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2991, pruned_loss=0.07368, over 3821244.25 frames. ], batch size: 54, lr: 5.58e-03, grad_scale: 8.0 +2023-04-02 04:29:34,202 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:30:32,231 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 04:30:36,840 INFO [train.py:903] (3/4) Epoch 15, batch 1850, loss[loss=0.2529, simple_loss=0.3211, pruned_loss=0.09238, over 19312.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2988, pruned_loss=0.07325, over 3811261.28 frames. ], batch size: 66, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:10,809 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 04:31:21,359 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:31:35,888 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.535e+02 5.674e+02 7.772e+02 1.771e+03, threshold=1.135e+03, percent-clipped=3.0 +2023-04-02 04:31:39,238 INFO [train.py:903] (3/4) Epoch 15, batch 1900, loss[loss=0.2315, simple_loss=0.3117, pruned_loss=0.07569, over 19651.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2982, pruned_loss=0.07295, over 3806543.71 frames. ], batch size: 60, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:31:58,009 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 04:32:00,970 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97509.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:02,865 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 04:32:28,308 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 04:32:32,288 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:32:40,694 INFO [train.py:903] (3/4) Epoch 15, batch 1950, loss[loss=0.2401, simple_loss=0.3179, pruned_loss=0.08114, over 18320.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2988, pruned_loss=0.07298, over 3822382.64 frames. ], batch size: 84, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:33:19,864 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:33:28,685 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 04:33:39,670 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.621e+02 4.980e+02 6.456e+02 8.636e+02 2.349e+03, threshold=1.291e+03, percent-clipped=8.0 +2023-04-02 04:33:43,291 INFO [train.py:903] (3/4) Epoch 15, batch 2000, loss[loss=0.2529, simple_loss=0.3138, pruned_loss=0.09598, over 14019.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2999, pruned_loss=0.07381, over 3803767.34 frames. ], batch size: 136, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:34:03,603 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-02 04:34:20,860 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97622.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:34:42,256 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 04:34:47,042 INFO [train.py:903] (3/4) Epoch 15, batch 2050, loss[loss=0.2222, simple_loss=0.3073, pruned_loss=0.06851, over 19291.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3, pruned_loss=0.07379, over 3803738.57 frames. ], batch size: 66, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:01,890 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 04:35:03,049 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 04:35:23,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 04:35:47,011 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.998e+02 6.604e+02 7.928e+02 1.987e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 04:35:50,611 INFO [train.py:903] (3/4) Epoch 15, batch 2100, loss[loss=0.2065, simple_loss=0.2856, pruned_loss=0.06366, over 19591.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3008, pruned_loss=0.07427, over 3786007.23 frames. ], batch size: 52, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:35:50,932 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97692.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:36:04,943 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:10,965 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 04:36:11,598 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97709.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:20,748 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 04:36:41,918 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1020, 1.7877, 1.7024, 2.1388, 1.9086, 1.8059, 1.5598, 2.0327], + device='cuda:3'), covar=tensor([0.0875, 0.1560, 0.1371, 0.0970, 0.1209, 0.0503, 0.1331, 0.0655], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0353, 0.0298, 0.0245, 0.0296, 0.0247, 0.0290, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:36:41,957 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:43,967 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 04:36:45,262 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:46,629 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97737.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:36:51,867 INFO [train.py:903] (3/4) Epoch 15, batch 2150, loss[loss=0.1996, simple_loss=0.2799, pruned_loss=0.05958, over 19577.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3003, pruned_loss=0.07393, over 3784033.27 frames. ], batch size: 52, lr: 5.57e-03, grad_scale: 8.0 +2023-04-02 04:36:53,795 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-02 04:37:12,302 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:37:49,744 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.862e+02 6.186e+02 7.185e+02 1.323e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 04:37:53,998 INFO [train.py:903] (3/4) Epoch 15, batch 2200, loss[loss=0.2107, simple_loss=0.2945, pruned_loss=0.06347, over 19604.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2995, pruned_loss=0.07338, over 3784915.14 frames. ], batch size: 57, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:38:28,072 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:33,718 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:38:36,554 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 04:38:57,676 INFO [train.py:903] (3/4) Epoch 15, batch 2250, loss[loss=0.2944, simple_loss=0.3491, pruned_loss=0.1198, over 19351.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3, pruned_loss=0.07354, over 3791539.85 frames. ], batch size: 70, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:39:09,183 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:39:09,869 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.19 vs. limit=5.0 +2023-04-02 04:39:56,865 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.977e+02 6.292e+02 8.077e+02 1.831e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 04:40:00,327 INFO [train.py:903] (3/4) Epoch 15, batch 2300, loss[loss=0.2286, simple_loss=0.2846, pruned_loss=0.0863, over 16121.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2992, pruned_loss=0.07309, over 3790333.43 frames. ], batch size: 35, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:40:12,679 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 04:40:16,282 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:40:21,313 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.41 vs. limit=5.0 +2023-04-02 04:40:29,883 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:41:01,537 INFO [train.py:903] (3/4) Epoch 15, batch 2350, loss[loss=0.2431, simple_loss=0.3137, pruned_loss=0.08624, over 19297.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2999, pruned_loss=0.07339, over 3800921.00 frames. ], batch size: 66, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:41:11,490 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 04:41:44,921 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 04:41:58,278 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.416e+02 6.244e+02 7.823e+02 1.587e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-04-02 04:41:58,341 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 04:42:02,751 INFO [train.py:903] (3/4) Epoch 15, batch 2400, loss[loss=0.2004, simple_loss=0.2739, pruned_loss=0.06346, over 19851.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2985, pruned_loss=0.07286, over 3799819.59 frames. ], batch size: 52, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:42:04,441 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:36,282 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:53,733 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:42:58,098 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98036.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:43:05,611 INFO [train.py:903] (3/4) Epoch 15, batch 2450, loss[loss=0.2086, simple_loss=0.286, pruned_loss=0.06559, over 19771.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2987, pruned_loss=0.07299, over 3798889.85 frames. ], batch size: 54, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:43:47,589 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7509, 1.6125, 1.5883, 2.1867, 1.7419, 2.1746, 2.1262, 1.9910], + device='cuda:3'), covar=tensor([0.0819, 0.0919, 0.0993, 0.0839, 0.0824, 0.0622, 0.0815, 0.0602], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0220, 0.0222, 0.0241, 0.0226, 0.0206, 0.0190, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 04:43:47,647 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98075.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:43:55,097 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:05,271 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 5.305e+02 6.204e+02 8.091e+02 1.870e+03, threshold=1.241e+03, percent-clipped=5.0 +2023-04-02 04:44:09,724 INFO [train.py:903] (3/4) Epoch 15, batch 2500, loss[loss=0.2267, simple_loss=0.3046, pruned_loss=0.07435, over 19562.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2968, pruned_loss=0.07187, over 3813044.60 frames. ], batch size: 61, lr: 5.56e-03, grad_scale: 8.0 +2023-04-02 04:44:19,650 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98100.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:25,576 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98105.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:27,987 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:44:51,252 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98124.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:01,484 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98132.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:45:01,673 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 04:45:02,547 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1991, 3.7198, 3.8497, 3.8427, 1.4744, 3.6762, 3.1642, 3.5613], + device='cuda:3'), covar=tensor([0.1720, 0.0935, 0.0715, 0.0774, 0.5558, 0.0890, 0.0769, 0.1204], + device='cuda:3'), in_proj_covar=tensor([0.0724, 0.0654, 0.0862, 0.0742, 0.0770, 0.0608, 0.0520, 0.0787], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 04:45:12,195 INFO [train.py:903] (3/4) Epoch 15, batch 2550, loss[loss=0.2943, simple_loss=0.3605, pruned_loss=0.114, over 13683.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2982, pruned_loss=0.07266, over 3809547.45 frames. ], batch size: 136, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:45:23,247 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98151.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 04:46:07,096 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 04:46:10,535 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 5.247e+02 6.339e+02 8.638e+02 2.352e+03, threshold=1.268e+03, percent-clipped=5.0 +2023-04-02 04:46:14,040 INFO [train.py:903] (3/4) Epoch 15, batch 2600, loss[loss=0.2256, simple_loss=0.3012, pruned_loss=0.07503, over 19589.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2977, pruned_loss=0.0726, over 3796551.98 frames. ], batch size: 52, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:18,023 INFO [train.py:903] (3/4) Epoch 15, batch 2650, loss[loss=0.1967, simple_loss=0.2715, pruned_loss=0.06091, over 19744.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2965, pruned_loss=0.0718, over 3810787.26 frames. ], batch size: 45, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:47:28,823 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:47:39,872 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 04:48:17,333 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:48:18,024 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.990e+02 6.118e+02 7.575e+02 1.335e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 04:48:21,600 INFO [train.py:903] (3/4) Epoch 15, batch 2700, loss[loss=0.2249, simple_loss=0.2996, pruned_loss=0.07513, over 19607.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2978, pruned_loss=0.07255, over 3799809.12 frames. ], batch size: 61, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:48:40,275 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6943, 1.6075, 1.5526, 2.2326, 1.8505, 1.9902, 2.0474, 1.8177], + device='cuda:3'), covar=tensor([0.0759, 0.0846, 0.0954, 0.0658, 0.0777, 0.0695, 0.0792, 0.0641], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0221, 0.0224, 0.0242, 0.0229, 0.0208, 0.0190, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 04:48:47,480 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98313.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:49:24,161 INFO [train.py:903] (3/4) Epoch 15, batch 2750, loss[loss=0.1937, simple_loss=0.2649, pruned_loss=0.06129, over 19755.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2974, pruned_loss=0.07245, over 3805771.12 frames. ], batch size: 46, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:49:39,888 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 04:49:54,726 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:50:23,801 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.173e+02 6.107e+02 7.991e+02 1.431e+03, threshold=1.221e+03, percent-clipped=3.0 +2023-04-02 04:50:27,281 INFO [train.py:903] (3/4) Epoch 15, batch 2800, loss[loss=0.2289, simple_loss=0.3092, pruned_loss=0.0743, over 19097.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2969, pruned_loss=0.07192, over 3816115.41 frames. ], batch size: 69, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:50:34,375 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 04:50:48,806 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98407.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:51:13,908 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:51:18,547 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98432.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 04:51:30,819 INFO [train.py:903] (3/4) Epoch 15, batch 2850, loss[loss=0.1926, simple_loss=0.2659, pruned_loss=0.0597, over 19741.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2982, pruned_loss=0.07242, over 3804245.13 frames. ], batch size: 46, lr: 5.55e-03, grad_scale: 8.0 +2023-04-02 04:51:53,692 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7433, 3.2133, 3.2550, 3.3034, 1.3642, 3.1490, 2.7198, 2.9989], + device='cuda:3'), covar=tensor([0.1713, 0.1115, 0.0866, 0.0916, 0.5196, 0.0974, 0.0874, 0.1449], + device='cuda:3'), in_proj_covar=tensor([0.0726, 0.0658, 0.0862, 0.0741, 0.0773, 0.0611, 0.0519, 0.0799], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 04:52:03,936 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:52:30,664 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 5.164e+02 6.295e+02 8.927e+02 2.262e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 04:52:34,166 INFO [train.py:903] (3/4) Epoch 15, batch 2900, loss[loss=0.1928, simple_loss=0.2644, pruned_loss=0.06063, over 19740.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2982, pruned_loss=0.0724, over 3821138.13 frames. ], batch size: 46, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:52:35,432 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 04:52:49,640 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4776, 1.3430, 1.3973, 1.8200, 1.4904, 1.6775, 1.7722, 1.5665], + device='cuda:3'), covar=tensor([0.0884, 0.1022, 0.1086, 0.0720, 0.0842, 0.0801, 0.0811, 0.0734], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0224, 0.0243, 0.0230, 0.0209, 0.0191, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 04:53:14,485 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8024, 4.4435, 3.2013, 3.7470, 1.9900, 4.2236, 4.2009, 4.3049], + device='cuda:3'), covar=tensor([0.0524, 0.1037, 0.1788, 0.0988, 0.2844, 0.0750, 0.0828, 0.1150], + device='cuda:3'), in_proj_covar=tensor([0.0463, 0.0384, 0.0460, 0.0326, 0.0389, 0.0394, 0.0385, 0.0418], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:53:36,705 INFO [train.py:903] (3/4) Epoch 15, batch 2950, loss[loss=0.2214, simple_loss=0.3049, pruned_loss=0.06891, over 19537.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2994, pruned_loss=0.07313, over 3817777.78 frames. ], batch size: 56, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:00,570 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98561.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:29,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:54:35,377 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.043e+02 6.244e+02 8.249e+02 2.456e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 04:54:38,823 INFO [train.py:903] (3/4) Epoch 15, batch 3000, loss[loss=0.2161, simple_loss=0.2916, pruned_loss=0.0703, over 19575.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2994, pruned_loss=0.07331, over 3821187.50 frames. ], batch size: 61, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:54:38,824 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 04:54:51,336 INFO [train.py:937] (3/4) Epoch 15, validation: loss=0.1735, simple_loss=0.2738, pruned_loss=0.0366, over 944034.00 frames. +2023-04-02 04:54:51,337 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 04:54:53,533 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 04:55:08,335 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4590, 2.2367, 1.6981, 1.3932, 2.0237, 1.3264, 1.3907, 1.9138], + device='cuda:3'), covar=tensor([0.0851, 0.0736, 0.0957, 0.0797, 0.0500, 0.1203, 0.0616, 0.0418], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0305, 0.0325, 0.0249, 0.0238, 0.0329, 0.0292, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 04:55:28,665 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:55:52,974 INFO [train.py:903] (3/4) Epoch 15, batch 3050, loss[loss=0.1932, simple_loss=0.2803, pruned_loss=0.05305, over 19655.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2993, pruned_loss=0.07335, over 3826134.81 frames. ], batch size: 55, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:55:57,857 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98646.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:56:51,964 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.865e+02 5.607e+02 7.054e+02 9.171e+02 2.046e+03, threshold=1.411e+03, percent-clipped=6.0 +2023-04-02 04:56:54,317 INFO [train.py:903] (3/4) Epoch 15, batch 3100, loss[loss=0.2436, simple_loss=0.3211, pruned_loss=0.08299, over 19605.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3001, pruned_loss=0.07396, over 3820430.46 frames. ], batch size: 61, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:57:58,294 INFO [train.py:903] (3/4) Epoch 15, batch 3150, loss[loss=0.2122, simple_loss=0.2908, pruned_loss=0.06675, over 19664.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3007, pruned_loss=0.07444, over 3809218.45 frames. ], batch size: 53, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:58:26,319 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 04:58:31,706 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-02 04:58:34,647 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 04:58:39,659 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1935, 2.1109, 1.9056, 1.7216, 1.5480, 1.7697, 0.7117, 1.2150], + device='cuda:3'), covar=tensor([0.0523, 0.0471, 0.0359, 0.0658, 0.1022, 0.0737, 0.0988, 0.0788], + device='cuda:3'), in_proj_covar=tensor([0.0342, 0.0340, 0.0337, 0.0369, 0.0441, 0.0365, 0.0319, 0.0327], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 04:58:58,671 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 5.220e+02 6.366e+02 8.908e+02 1.802e+03, threshold=1.273e+03, percent-clipped=4.0 +2023-04-02 04:59:01,103 INFO [train.py:903] (3/4) Epoch 15, batch 3200, loss[loss=0.2174, simple_loss=0.2988, pruned_loss=0.06798, over 19665.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2997, pruned_loss=0.07356, over 3804886.84 frames. ], batch size: 53, lr: 5.54e-03, grad_scale: 8.0 +2023-04-02 04:59:59,440 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98839.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:02,405 INFO [train.py:903] (3/4) Epoch 15, batch 3250, loss[loss=0.2394, simple_loss=0.3162, pruned_loss=0.08133, over 19669.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2993, pruned_loss=0.07318, over 3808414.71 frames. ], batch size: 55, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:00:30,847 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:56,827 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98887.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:00:59,975 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.355e+02 4.885e+02 6.090e+02 7.322e+02 1.846e+03, threshold=1.218e+03, percent-clipped=3.0 +2023-04-02 05:01:02,403 INFO [train.py:903] (3/4) Epoch 15, batch 3300, loss[loss=0.2256, simple_loss=0.3003, pruned_loss=0.07552, over 19789.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2992, pruned_loss=0.07355, over 3813386.57 frames. ], batch size: 56, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:01:08,225 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 05:01:20,964 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:25,264 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98908.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:30,573 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:01:41,402 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9153, 2.0176, 2.2081, 2.6677, 1.8660, 2.5172, 2.4010, 2.0395], + device='cuda:3'), covar=tensor([0.3971, 0.3529, 0.1702, 0.2114, 0.3949, 0.1839, 0.3995, 0.3058], + device='cuda:3'), in_proj_covar=tensor([0.0831, 0.0873, 0.0669, 0.0903, 0.0815, 0.0748, 0.0807, 0.0735], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 05:02:07,326 INFO [train.py:903] (3/4) Epoch 15, batch 3350, loss[loss=0.2151, simple_loss=0.2841, pruned_loss=0.07301, over 19802.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2994, pruned_loss=0.07346, over 3822176.05 frames. ], batch size: 49, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:02:09,397 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 05:03:06,865 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98989.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:03:07,570 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 5.628e+02 7.317e+02 9.748e+02 2.071e+03, threshold=1.463e+03, percent-clipped=8.0 +2023-04-02 05:03:09,826 INFO [train.py:903] (3/4) Epoch 15, batch 3400, loss[loss=0.2385, simple_loss=0.3143, pruned_loss=0.08141, over 19642.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2986, pruned_loss=0.07299, over 3822986.62 frames. ], batch size: 60, lr: 5.53e-03, grad_scale: 8.0 +2023-04-02 05:03:44,093 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:04:10,174 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5255, 2.2864, 1.6623, 1.5770, 2.1021, 1.3388, 1.4499, 1.8747], + device='cuda:3'), covar=tensor([0.0953, 0.0714, 0.0948, 0.0691, 0.0478, 0.1089, 0.0654, 0.0494], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0307, 0.0324, 0.0251, 0.0239, 0.0327, 0.0293, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:04:10,837 INFO [train.py:903] (3/4) Epoch 15, batch 3450, loss[loss=0.2572, simple_loss=0.3289, pruned_loss=0.09278, over 19621.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2989, pruned_loss=0.07306, over 3822644.82 frames. ], batch size: 61, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:04:14,066 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 05:04:34,710 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1752, 1.1033, 1.1488, 1.3091, 1.0661, 1.3228, 1.2555, 1.2397], + device='cuda:3'), covar=tensor([0.0863, 0.0982, 0.1011, 0.0669, 0.0881, 0.0793, 0.0843, 0.0732], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0240, 0.0225, 0.0207, 0.0189, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 05:04:45,800 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:05:11,152 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.946e+02 5.886e+02 7.201e+02 1.354e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-02 05:05:12,315 INFO [train.py:903] (3/4) Epoch 15, batch 3500, loss[loss=0.2203, simple_loss=0.3031, pruned_loss=0.06875, over 19464.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2992, pruned_loss=0.07336, over 3818658.47 frames. ], batch size: 64, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:15,627 INFO [train.py:903] (3/4) Epoch 15, batch 3550, loss[loss=0.2566, simple_loss=0.3317, pruned_loss=0.0907, over 19308.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2991, pruned_loss=0.07333, over 3809125.47 frames. ], batch size: 66, lr: 5.53e-03, grad_scale: 4.0 +2023-04-02 05:06:18,404 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:48,316 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:06:53,056 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2867, 1.2089, 1.2589, 1.3662, 1.0122, 1.3894, 1.3334, 1.3210], + device='cuda:3'), covar=tensor([0.0876, 0.0981, 0.1044, 0.0681, 0.0885, 0.0806, 0.0847, 0.0749], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0221, 0.0221, 0.0240, 0.0226, 0.0208, 0.0189, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 05:07:18,042 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 4.734e+02 6.110e+02 7.809e+02 1.736e+03, threshold=1.222e+03, percent-clipped=8.0 +2023-04-02 05:07:19,077 INFO [train.py:903] (3/4) Epoch 15, batch 3600, loss[loss=0.2332, simple_loss=0.3048, pruned_loss=0.08078, over 19762.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2988, pruned_loss=0.07294, over 3811795.93 frames. ], batch size: 56, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:20,329 INFO [train.py:903] (3/4) Epoch 15, batch 3650, loss[loss=0.2081, simple_loss=0.2929, pruned_loss=0.06159, over 19726.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2996, pruned_loss=0.07349, over 3808176.97 frames. ], batch size: 63, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:08:26,434 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:32,076 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:08:37,709 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:03,832 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:15,265 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:09:20,757 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.641e+02 5.562e+02 6.527e+02 8.007e+02 1.277e+03, threshold=1.305e+03, percent-clipped=3.0 +2023-04-02 05:09:21,909 INFO [train.py:903] (3/4) Epoch 15, batch 3700, loss[loss=0.1842, simple_loss=0.2657, pruned_loss=0.0513, over 19742.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2985, pruned_loss=0.07273, over 3826277.00 frames. ], batch size: 51, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:09:34,139 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:13,725 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99333.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:24,820 INFO [train.py:903] (3/4) Epoch 15, batch 3750, loss[loss=0.2532, simple_loss=0.321, pruned_loss=0.09272, over 19531.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2985, pruned_loss=0.07305, over 3823987.33 frames. ], batch size: 56, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:10:25,126 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99342.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:10:40,368 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4512, 2.1528, 2.1851, 2.5738, 2.4243, 2.1466, 2.0139, 2.5326], + device='cuda:3'), covar=tensor([0.0873, 0.1595, 0.1192, 0.0947, 0.1241, 0.0483, 0.1161, 0.0600], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0354, 0.0297, 0.0244, 0.0298, 0.0249, 0.0292, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:10:56,148 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99367.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:01,981 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:26,384 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.684e+02 5.640e+02 6.861e+02 8.808e+02 1.909e+03, threshold=1.372e+03, percent-clipped=3.0 +2023-04-02 05:11:28,567 INFO [train.py:903] (3/4) Epoch 15, batch 3800, loss[loss=0.2156, simple_loss=0.2825, pruned_loss=0.07433, over 19805.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2987, pruned_loss=0.07281, over 3829769.82 frames. ], batch size: 47, lr: 5.52e-03, grad_scale: 8.0 +2023-04-02 05:11:41,830 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4061, 2.1575, 1.9018, 1.8878, 1.6400, 1.8435, 0.4939, 1.2017], + device='cuda:3'), covar=tensor([0.0484, 0.0482, 0.0411, 0.0618, 0.0891, 0.0613, 0.1030, 0.0798], + device='cuda:3'), in_proj_covar=tensor([0.0346, 0.0346, 0.0341, 0.0373, 0.0444, 0.0369, 0.0322, 0.0330], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:11:53,197 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:11:58,859 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 05:12:26,300 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:30,508 INFO [train.py:903] (3/4) Epoch 15, batch 3850, loss[loss=0.2328, simple_loss=0.3094, pruned_loss=0.0781, over 18140.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2997, pruned_loss=0.07371, over 3817459.42 frames. ], batch size: 84, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:12:37,705 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:12:44,226 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-02 05:13:25,909 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99486.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:13:32,485 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.342e+02 4.923e+02 6.054e+02 7.410e+02 1.518e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 05:13:32,508 INFO [train.py:903] (3/4) Epoch 15, batch 3900, loss[loss=0.2139, simple_loss=0.2946, pruned_loss=0.06661, over 19543.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2987, pruned_loss=0.07277, over 3818933.47 frames. ], batch size: 54, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:18,191 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:14:33,477 INFO [train.py:903] (3/4) Epoch 15, batch 3950, loss[loss=0.2058, simple_loss=0.2945, pruned_loss=0.05851, over 19679.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.298, pruned_loss=0.07289, over 3816594.77 frames. ], batch size: 58, lr: 5.52e-03, grad_scale: 4.0 +2023-04-02 05:14:41,169 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 05:15:28,365 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99586.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:35,965 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:15:36,879 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.329e+02 6.331e+02 8.679e+02 1.427e+03, threshold=1.266e+03, percent-clipped=6.0 +2023-04-02 05:15:36,896 INFO [train.py:903] (3/4) Epoch 15, batch 4000, loss[loss=0.2375, simple_loss=0.2958, pruned_loss=0.0896, over 19393.00 frames. ], tot_loss[loss=0.223, simple_loss=0.299, pruned_loss=0.07349, over 3808219.42 frames. ], batch size: 47, lr: 5.51e-03, grad_scale: 8.0 +2023-04-02 05:15:54,666 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:14,214 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99623.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:21,043 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:23,022 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 05:16:23,148 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:30,441 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9570, 2.5805, 1.7610, 1.8322, 2.3654, 1.6017, 1.5764, 1.9182], + device='cuda:3'), covar=tensor([0.0934, 0.0668, 0.0718, 0.0711, 0.0491, 0.0965, 0.0674, 0.0569], + device='cuda:3'), in_proj_covar=tensor([0.0292, 0.0305, 0.0321, 0.0250, 0.0239, 0.0325, 0.0290, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:16:38,165 INFO [train.py:903] (3/4) Epoch 15, batch 4050, loss[loss=0.1971, simple_loss=0.2826, pruned_loss=0.05583, over 19678.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2982, pruned_loss=0.07286, over 3803291.05 frames. ], batch size: 58, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:16:45,330 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:16:50,848 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:32,698 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:39,406 INFO [train.py:903] (3/4) Epoch 15, batch 4100, loss[loss=0.2569, simple_loss=0.319, pruned_loss=0.0974, over 17506.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2986, pruned_loss=0.07324, over 3784498.49 frames. ], batch size: 101, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:17:40,549 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.802e+02 5.566e+02 7.429e+02 9.161e+02 2.166e+03, threshold=1.486e+03, percent-clipped=8.0 +2023-04-02 05:17:47,000 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:55,016 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:17:58,101 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:14,913 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 05:18:25,797 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0732, 4.4027, 4.7758, 4.7752, 1.6761, 4.4560, 3.8777, 4.4195], + device='cuda:3'), covar=tensor([0.1402, 0.0843, 0.0539, 0.0542, 0.5576, 0.0694, 0.0620, 0.1083], + device='cuda:3'), in_proj_covar=tensor([0.0725, 0.0657, 0.0860, 0.0736, 0.0771, 0.0609, 0.0519, 0.0797], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 05:18:27,041 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:18:43,280 INFO [train.py:903] (3/4) Epoch 15, batch 4150, loss[loss=0.1824, simple_loss=0.2667, pruned_loss=0.04901, over 19735.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07297, over 3789131.92 frames. ], batch size: 51, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:18:47,084 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:11,133 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8746, 1.1075, 1.5075, 0.6208, 2.0277, 2.4217, 2.0942, 2.5651], + device='cuda:3'), covar=tensor([0.1608, 0.3809, 0.3198, 0.2603, 0.0587, 0.0250, 0.0355, 0.0313], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0304, 0.0332, 0.0252, 0.0226, 0.0169, 0.0207, 0.0224], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:19:13,809 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 05:19:31,732 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:35,421 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99784.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:19:45,183 INFO [train.py:903] (3/4) Epoch 15, batch 4200, loss[loss=0.2085, simple_loss=0.2961, pruned_loss=0.06048, over 19691.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07297, over 3799866.22 frames. ], batch size: 60, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:19:47,439 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.549e+02 6.772e+02 8.720e+02 1.402e+03, threshold=1.354e+03, percent-clipped=0.0 +2023-04-02 05:19:50,942 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 05:19:57,173 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99801.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:06,509 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:20:32,495 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99830.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:20:47,388 INFO [train.py:903] (3/4) Epoch 15, batch 4250, loss[loss=0.2843, simple_loss=0.3429, pruned_loss=0.1129, over 19724.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2983, pruned_loss=0.07289, over 3806621.83 frames. ], batch size: 63, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:03,966 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 05:21:15,081 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 05:21:47,833 INFO [train.py:903] (3/4) Epoch 15, batch 4300, loss[loss=0.2167, simple_loss=0.2939, pruned_loss=0.06981, over 19688.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3002, pruned_loss=0.07398, over 3808362.74 frames. ], batch size: 59, lr: 5.51e-03, grad_scale: 4.0 +2023-04-02 05:21:48,973 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.517e+02 6.494e+02 8.260e+02 1.741e+03, threshold=1.299e+03, percent-clipped=4.0 +2023-04-02 05:21:53,808 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:35,375 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99930.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:22:41,688 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 05:22:48,549 INFO [train.py:903] (3/4) Epoch 15, batch 4350, loss[loss=0.2319, simple_loss=0.311, pruned_loss=0.07646, over 19676.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3017, pruned_loss=0.07451, over 3800563.46 frames. ], batch size: 59, lr: 5.50e-03, grad_scale: 4.0 +2023-04-02 05:22:53,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99945.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:23:01,035 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:15,966 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99962.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:46,367 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:23:51,929 INFO [train.py:903] (3/4) Epoch 15, batch 4400, loss[loss=0.1917, simple_loss=0.2753, pruned_loss=0.054, over 19862.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.302, pruned_loss=0.0744, over 3810843.38 frames. ], batch size: 52, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:23:53,158 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.844e+02 5.787e+02 7.470e+02 1.170e+03, threshold=1.157e+03, percent-clipped=0.0 +2023-04-02 05:23:57,370 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6081, 0.9598, 1.2921, 1.5439, 2.9476, 1.2493, 2.5014, 3.5523], + device='cuda:3'), covar=tensor([0.0610, 0.3691, 0.3292, 0.2069, 0.1123, 0.2783, 0.1215, 0.0356], + device='cuda:3'), in_proj_covar=tensor([0.0379, 0.0349, 0.0370, 0.0329, 0.0356, 0.0339, 0.0350, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:24:06,744 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:23,726 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 05:24:31,844 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 05:24:34,570 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100025.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:24:35,754 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:24:47,758 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.80 vs. limit=5.0 +2023-04-02 05:24:57,052 INFO [train.py:903] (3/4) Epoch 15, batch 4450, loss[loss=0.1974, simple_loss=0.2773, pruned_loss=0.05875, over 19607.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3014, pruned_loss=0.07391, over 3809521.53 frames. ], batch size: 50, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:24:57,242 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:00,932 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:07,812 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8578, 1.3566, 1.5838, 1.4229, 3.3619, 0.9358, 2.2364, 3.7727], + device='cuda:3'), covar=tensor([0.0434, 0.2741, 0.2636, 0.1895, 0.0729, 0.2812, 0.1386, 0.0244], + device='cuda:3'), in_proj_covar=tensor([0.0379, 0.0350, 0.0369, 0.0330, 0.0357, 0.0339, 0.0350, 0.0372], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:25:14,940 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100057.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:25,381 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:47,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100082.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:25:59,178 INFO [train.py:903] (3/4) Epoch 15, batch 4500, loss[loss=0.2092, simple_loss=0.2791, pruned_loss=0.06967, over 15128.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3003, pruned_loss=0.07326, over 3807781.20 frames. ], batch size: 33, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:25:59,590 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3040, 1.4121, 1.8150, 1.5660, 2.6650, 2.1026, 2.7651, 1.2130], + device='cuda:3'), covar=tensor([0.2383, 0.4045, 0.2373, 0.1881, 0.1522, 0.2160, 0.1553, 0.4020], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0600, 0.0651, 0.0456, 0.0606, 0.0508, 0.0646, 0.0515], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:26:00,181 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.254e+02 5.166e+02 6.659e+02 8.670e+02 1.796e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 05:27:01,358 INFO [train.py:903] (3/4) Epoch 15, batch 4550, loss[loss=0.2057, simple_loss=0.2784, pruned_loss=0.06652, over 19713.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3003, pruned_loss=0.07324, over 3811567.15 frames. ], batch size: 46, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:27:10,376 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 05:27:16,343 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:21,784 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100157.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:27:38,127 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 05:27:47,854 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:28:04,925 INFO [train.py:903] (3/4) Epoch 15, batch 4600, loss[loss=0.2051, simple_loss=0.2922, pruned_loss=0.05897, over 19520.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2994, pruned_loss=0.07252, over 3817096.83 frames. ], batch size: 54, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:28:06,059 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.707e+02 5.654e+02 7.541e+02 1.184e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-02 05:28:19,066 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100201.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:28:30,960 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7204, 4.8684, 5.4924, 5.4967, 2.0411, 5.1811, 4.4440, 5.1751], + device='cuda:3'), covar=tensor([0.1415, 0.0928, 0.0445, 0.0494, 0.5288, 0.0673, 0.0563, 0.0883], + device='cuda:3'), in_proj_covar=tensor([0.0728, 0.0664, 0.0865, 0.0744, 0.0771, 0.0612, 0.0522, 0.0799], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 05:28:47,079 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100226.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:29:08,990 INFO [train.py:903] (3/4) Epoch 15, batch 4650, loss[loss=0.2468, simple_loss=0.3303, pruned_loss=0.08162, over 19772.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3005, pruned_loss=0.07348, over 3801894.74 frames. ], batch size: 56, lr: 5.50e-03, grad_scale: 8.0 +2023-04-02 05:29:25,489 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 05:29:25,762 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:29:35,836 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 05:29:44,956 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1840, 1.2642, 1.6679, 1.2070, 2.5112, 3.4001, 3.1382, 3.6096], + device='cuda:3'), covar=tensor([0.1536, 0.3573, 0.3119, 0.2255, 0.0524, 0.0166, 0.0220, 0.0244], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0305, 0.0334, 0.0252, 0.0225, 0.0170, 0.0208, 0.0225], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:30:11,077 INFO [train.py:903] (3/4) Epoch 15, batch 4700, loss[loss=0.2594, simple_loss=0.331, pruned_loss=0.09392, over 13615.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2997, pruned_loss=0.07307, over 3791460.89 frames. ], batch size: 136, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:30:12,228 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.601e+02 6.328e+02 8.331e+02 3.311e+03, threshold=1.266e+03, percent-clipped=13.0 +2023-04-02 05:30:22,240 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:33,229 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 05:30:50,899 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:30:55,476 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100326.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:13,906 INFO [train.py:903] (3/4) Epoch 15, batch 4750, loss[loss=0.2459, simple_loss=0.3208, pruned_loss=0.0855, over 19544.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2981, pruned_loss=0.0719, over 3806217.14 frames. ], batch size: 56, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:31:21,212 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:31:49,667 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100369.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:32:16,847 INFO [train.py:903] (3/4) Epoch 15, batch 4800, loss[loss=0.2247, simple_loss=0.3054, pruned_loss=0.07201, over 19745.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2983, pruned_loss=0.07241, over 3803744.52 frames. ], batch size: 63, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:32:18,028 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.713e+02 6.387e+02 8.455e+02 2.006e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 05:32:44,444 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100413.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:32:52,650 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:17,263 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100438.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:33:21,583 INFO [train.py:903] (3/4) Epoch 15, batch 4850, loss[loss=0.2026, simple_loss=0.2906, pruned_loss=0.05734, over 19175.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2967, pruned_loss=0.07154, over 3811805.45 frames. ], batch size: 69, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:33:48,715 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 05:34:10,979 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 05:34:14,955 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100484.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:34:15,723 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 05:34:17,764 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 05:34:24,801 INFO [train.py:903] (3/4) Epoch 15, batch 4900, loss[loss=0.2398, simple_loss=0.3219, pruned_loss=0.07887, over 18864.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2969, pruned_loss=0.0715, over 3822244.05 frames. ], batch size: 74, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:34:25,926 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.042e+02 5.846e+02 7.925e+02 1.600e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-02 05:34:25,990 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 05:34:46,437 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 05:35:19,255 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7930, 2.5170, 2.4786, 2.8532, 2.6272, 2.5560, 2.2025, 2.9686], + device='cuda:3'), covar=tensor([0.0783, 0.1466, 0.1251, 0.0981, 0.1310, 0.0431, 0.1193, 0.0503], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0355, 0.0297, 0.0242, 0.0298, 0.0245, 0.0292, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:35:27,072 INFO [train.py:903] (3/4) Epoch 15, batch 4950, loss[loss=0.2634, simple_loss=0.3364, pruned_loss=0.09514, over 19327.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2959, pruned_loss=0.07105, over 3821338.12 frames. ], batch size: 66, lr: 5.49e-03, grad_scale: 8.0 +2023-04-02 05:35:45,798 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 05:36:05,342 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7707, 4.2660, 4.5281, 4.4879, 1.6441, 4.2238, 3.7219, 4.2535], + device='cuda:3'), covar=tensor([0.1540, 0.0721, 0.0557, 0.0631, 0.5593, 0.0675, 0.0610, 0.1042], + device='cuda:3'), in_proj_covar=tensor([0.0735, 0.0669, 0.0873, 0.0750, 0.0778, 0.0619, 0.0528, 0.0808], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 05:36:09,661 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 05:36:29,684 INFO [train.py:903] (3/4) Epoch 15, batch 5000, loss[loss=0.2001, simple_loss=0.2858, pruned_loss=0.05724, over 19708.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2965, pruned_loss=0.0717, over 3816453.84 frames. ], batch size: 59, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:36:31,854 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 5.199e+02 6.623e+02 8.418e+02 1.165e+03, threshold=1.325e+03, percent-clipped=0.0 +2023-04-02 05:36:40,514 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 05:36:40,832 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:36:52,299 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 05:37:33,618 INFO [train.py:903] (3/4) Epoch 15, batch 5050, loss[loss=0.2797, simple_loss=0.3516, pruned_loss=0.1039, over 18177.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2976, pruned_loss=0.07255, over 3813284.13 frames. ], batch size: 83, lr: 5.49e-03, grad_scale: 4.0 +2023-04-02 05:38:09,699 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 05:38:37,502 INFO [train.py:903] (3/4) Epoch 15, batch 5100, loss[loss=0.2217, simple_loss=0.3049, pruned_loss=0.06926, over 19661.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2975, pruned_loss=0.07216, over 3819410.00 frames. ], batch size: 55, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:38:39,901 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 5.134e+02 6.124e+02 7.830e+02 1.941e+03, threshold=1.225e+03, percent-clipped=4.0 +2023-04-02 05:38:49,391 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 05:38:51,821 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 05:38:57,375 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 05:38:57,742 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0296, 1.1962, 1.7346, 1.2357, 2.7274, 3.7681, 3.4939, 3.9971], + device='cuda:3'), covar=tensor([0.1617, 0.3713, 0.3144, 0.2279, 0.0526, 0.0176, 0.0206, 0.0191], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0302, 0.0332, 0.0251, 0.0225, 0.0169, 0.0206, 0.0223], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:39:05,825 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:39:37,098 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100740.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:39:38,886 INFO [train.py:903] (3/4) Epoch 15, batch 5150, loss[loss=0.2128, simple_loss=0.2958, pruned_loss=0.06493, over 19679.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2986, pruned_loss=0.07278, over 3797376.67 frames. ], batch size: 53, lr: 5.48e-03, grad_scale: 4.0 +2023-04-02 05:39:39,312 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9580, 1.8577, 1.7085, 1.4947, 1.3256, 1.4366, 0.3501, 0.8148], + device='cuda:3'), covar=tensor([0.0496, 0.0548, 0.0372, 0.0609, 0.1144, 0.0705, 0.1088, 0.0937], + device='cuda:3'), in_proj_covar=tensor([0.0347, 0.0342, 0.0340, 0.0369, 0.0442, 0.0370, 0.0322, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:39:47,029 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100748.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:39:51,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 05:40:07,667 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:40:09,141 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100765.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:40:28,241 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 05:40:41,945 INFO [train.py:903] (3/4) Epoch 15, batch 5200, loss[loss=0.2076, simple_loss=0.2821, pruned_loss=0.0665, over 19846.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2981, pruned_loss=0.07291, over 3807214.22 frames. ], batch size: 52, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:40:44,524 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 5.325e+02 6.515e+02 8.501e+02 1.618e+03, threshold=1.303e+03, percent-clipped=5.0 +2023-04-02 05:40:58,646 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 05:41:43,070 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 05:41:47,614 INFO [train.py:903] (3/4) Epoch 15, batch 5250, loss[loss=0.2311, simple_loss=0.3068, pruned_loss=0.07773, over 19658.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2974, pruned_loss=0.07228, over 3811057.61 frames. ], batch size: 55, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:21,034 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1064, 2.1751, 2.4168, 2.8814, 2.1464, 2.7447, 2.5610, 2.2208], + device='cuda:3'), covar=tensor([0.3803, 0.3729, 0.1663, 0.2142, 0.3682, 0.1779, 0.3925, 0.3011], + device='cuda:3'), in_proj_covar=tensor([0.0835, 0.0877, 0.0675, 0.0906, 0.0817, 0.0752, 0.0812, 0.0739], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 05:42:25,537 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1524, 1.2767, 1.7349, 1.1720, 2.4894, 3.2268, 3.0120, 3.5045], + device='cuda:3'), covar=tensor([0.1680, 0.3743, 0.3144, 0.2469, 0.0624, 0.0278, 0.0255, 0.0250], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0304, 0.0333, 0.0252, 0.0225, 0.0170, 0.0208, 0.0224], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:42:33,480 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100879.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:42:50,397 INFO [train.py:903] (3/4) Epoch 15, batch 5300, loss[loss=0.2136, simple_loss=0.2957, pruned_loss=0.06571, over 19584.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2979, pruned_loss=0.07244, over 3822918.56 frames. ], batch size: 61, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:42:52,714 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.131e+02 6.120e+02 8.353e+02 1.768e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 05:43:08,004 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 05:43:52,923 INFO [train.py:903] (3/4) Epoch 15, batch 5350, loss[loss=0.1784, simple_loss=0.264, pruned_loss=0.04644, over 19483.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2981, pruned_loss=0.0725, over 3813533.14 frames. ], batch size: 49, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:08,513 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2179, 2.3257, 2.5544, 3.3279, 2.3108, 3.0565, 2.7252, 2.3744], + device='cuda:3'), covar=tensor([0.4149, 0.3841, 0.1612, 0.2187, 0.4279, 0.1825, 0.4077, 0.2962], + device='cuda:3'), in_proj_covar=tensor([0.0834, 0.0877, 0.0675, 0.0906, 0.0819, 0.0752, 0.0813, 0.0739], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 05:44:29,366 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 05:44:30,896 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:44:56,023 INFO [train.py:903] (3/4) Epoch 15, batch 5400, loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06162, over 19471.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2973, pruned_loss=0.07198, over 3817598.07 frames. ], batch size: 49, lr: 5.48e-03, grad_scale: 8.0 +2023-04-02 05:44:58,255 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.355e+02 6.832e+02 8.412e+02 2.240e+03, threshold=1.366e+03, percent-clipped=7.0 +2023-04-02 05:45:01,686 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:46:00,083 INFO [train.py:903] (3/4) Epoch 15, batch 5450, loss[loss=0.2102, simple_loss=0.2919, pruned_loss=0.06428, over 19765.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2978, pruned_loss=0.07217, over 3826209.28 frames. ], batch size: 54, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:46:30,076 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6111, 2.3610, 1.7773, 1.6432, 2.2290, 1.3462, 1.3908, 1.9440], + device='cuda:3'), covar=tensor([0.1023, 0.0729, 0.0938, 0.0793, 0.0466, 0.1187, 0.0730, 0.0455], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0307, 0.0326, 0.0252, 0.0240, 0.0330, 0.0294, 0.0269], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:47:04,596 INFO [train.py:903] (3/4) Epoch 15, batch 5500, loss[loss=0.2414, simple_loss=0.3131, pruned_loss=0.08488, over 19602.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2978, pruned_loss=0.07184, over 3837041.35 frames. ], batch size: 61, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:47:04,781 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101092.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:47:06,826 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 5.715e+02 6.860e+02 8.623e+02 1.531e+03, threshold=1.372e+03, percent-clipped=1.0 +2023-04-02 05:47:27,320 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 05:47:45,288 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101125.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:47:45,819 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 05:47:58,047 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:48:05,666 INFO [train.py:903] (3/4) Epoch 15, batch 5550, loss[loss=0.268, simple_loss=0.3381, pruned_loss=0.09898, over 19695.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2988, pruned_loss=0.07249, over 3830644.76 frames. ], batch size: 59, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:48:12,793 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 05:48:28,880 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:49:04,239 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 05:49:08,930 INFO [train.py:903] (3/4) Epoch 15, batch 5600, loss[loss=0.2206, simple_loss=0.2947, pruned_loss=0.07326, over 19756.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2984, pruned_loss=0.07214, over 3824367.41 frames. ], batch size: 54, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:49:11,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 5.272e+02 6.555e+02 8.017e+02 1.573e+03, threshold=1.311e+03, percent-clipped=2.0 +2023-04-02 05:49:28,272 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101207.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 05:50:11,396 INFO [train.py:903] (3/4) Epoch 15, batch 5650, loss[loss=0.2125, simple_loss=0.2877, pruned_loss=0.06865, over 19600.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2985, pruned_loss=0.07219, over 3831377.74 frames. ], batch size: 50, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:50:59,678 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 05:51:14,087 INFO [train.py:903] (3/4) Epoch 15, batch 5700, loss[loss=0.2085, simple_loss=0.2925, pruned_loss=0.06221, over 19659.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.299, pruned_loss=0.07236, over 3828122.21 frames. ], batch size: 55, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:51:17,702 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.651e+02 6.610e+02 8.419e+02 1.957e+03, threshold=1.322e+03, percent-clipped=7.0 +2023-04-02 05:51:48,363 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0395, 1.9126, 1.7328, 2.0745, 1.8078, 1.8285, 1.6856, 2.0675], + device='cuda:3'), covar=tensor([0.0980, 0.1416, 0.1469, 0.1025, 0.1372, 0.0516, 0.1340, 0.0677], + device='cuda:3'), in_proj_covar=tensor([0.0258, 0.0351, 0.0296, 0.0244, 0.0297, 0.0244, 0.0291, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:52:17,928 INFO [train.py:903] (3/4) Epoch 15, batch 5750, loss[loss=0.2463, simple_loss=0.324, pruned_loss=0.0843, over 19594.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2981, pruned_loss=0.07175, over 3831824.54 frames. ], batch size: 57, lr: 5.47e-03, grad_scale: 8.0 +2023-04-02 05:52:20,191 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 05:52:28,390 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 05:52:33,007 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 05:52:51,725 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2121, 1.3396, 1.7120, 1.2501, 2.5747, 3.4023, 3.1294, 3.5956], + device='cuda:3'), covar=tensor([0.1514, 0.3486, 0.3050, 0.2148, 0.0496, 0.0161, 0.0210, 0.0241], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0304, 0.0334, 0.0253, 0.0225, 0.0170, 0.0208, 0.0224], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:53:10,196 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5645, 1.4510, 1.5205, 1.5420, 3.0744, 1.0420, 2.2038, 3.5511], + device='cuda:3'), covar=tensor([0.0446, 0.2605, 0.2688, 0.1852, 0.0712, 0.2603, 0.1312, 0.0233], + device='cuda:3'), in_proj_covar=tensor([0.0382, 0.0353, 0.0370, 0.0336, 0.0362, 0.0341, 0.0354, 0.0374], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:53:21,257 INFO [train.py:903] (3/4) Epoch 15, batch 5800, loss[loss=0.2149, simple_loss=0.3024, pruned_loss=0.0637, over 19617.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2989, pruned_loss=0.0719, over 3825079.24 frames. ], batch size: 57, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:53:23,494 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.554e+02 5.132e+02 6.075e+02 7.663e+02 2.298e+03, threshold=1.215e+03, percent-clipped=3.0 +2023-04-02 05:54:24,304 INFO [train.py:903] (3/4) Epoch 15, batch 5850, loss[loss=0.2252, simple_loss=0.2988, pruned_loss=0.07581, over 19777.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2972, pruned_loss=0.07125, over 3827791.84 frames. ], batch size: 56, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:54:37,790 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4241, 2.4347, 2.6121, 3.1012, 2.5073, 3.0512, 2.7011, 2.3838], + device='cuda:3'), covar=tensor([0.3338, 0.3050, 0.1457, 0.1940, 0.3182, 0.1492, 0.3211, 0.2398], + device='cuda:3'), in_proj_covar=tensor([0.0836, 0.0882, 0.0679, 0.0910, 0.0823, 0.0756, 0.0815, 0.0744], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 05:54:52,524 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101463.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:54:58,893 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:55:24,168 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101488.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 05:55:28,445 INFO [train.py:903] (3/4) Epoch 15, batch 5900, loss[loss=0.2511, simple_loss=0.3292, pruned_loss=0.08647, over 19684.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2989, pruned_loss=0.07214, over 3820483.28 frames. ], batch size: 59, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:55:30,755 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.163e+02 6.557e+02 7.983e+02 1.612e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-02 05:55:30,816 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 05:55:51,839 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 05:56:03,796 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9747, 1.8403, 1.7841, 2.1331, 1.8047, 1.8156, 1.9139, 2.0818], + device='cuda:3'), covar=tensor([0.0945, 0.1476, 0.1336, 0.0912, 0.1313, 0.0510, 0.1114, 0.0636], + device='cuda:3'), in_proj_covar=tensor([0.0259, 0.0352, 0.0296, 0.0245, 0.0297, 0.0244, 0.0291, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 05:56:20,834 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4557, 1.5484, 1.8360, 1.6960, 2.7156, 2.3927, 2.8017, 1.0778], + device='cuda:3'), covar=tensor([0.2317, 0.4201, 0.2452, 0.1835, 0.1394, 0.1923, 0.1400, 0.4065], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0608, 0.0654, 0.0457, 0.0609, 0.0513, 0.0650, 0.0516], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 05:56:30,791 INFO [train.py:903] (3/4) Epoch 15, batch 5950, loss[loss=0.2091, simple_loss=0.2968, pruned_loss=0.06069, over 19686.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2987, pruned_loss=0.07185, over 3826782.32 frames. ], batch size: 58, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:24,063 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 05:57:34,460 INFO [train.py:903] (3/4) Epoch 15, batch 6000, loss[loss=0.2276, simple_loss=0.3, pruned_loss=0.07759, over 19398.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2983, pruned_loss=0.07173, over 3819726.14 frames. ], batch size: 48, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:57:34,460 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 05:57:47,184 INFO [train.py:937] (3/4) Epoch 15, validation: loss=0.1729, simple_loss=0.2735, pruned_loss=0.0362, over 944034.00 frames. +2023-04-02 05:57:47,186 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 05:57:49,630 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.208e+02 6.128e+02 8.316e+02 1.573e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-02 05:58:03,650 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-02 05:58:49,960 INFO [train.py:903] (3/4) Epoch 15, batch 6050, loss[loss=0.1982, simple_loss=0.2718, pruned_loss=0.06235, over 19737.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2978, pruned_loss=0.07145, over 3807417.08 frames. ], batch size: 51, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:52,019 INFO [train.py:903] (3/4) Epoch 15, batch 6100, loss[loss=0.188, simple_loss=0.2596, pruned_loss=0.05818, over 19764.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2963, pruned_loss=0.07068, over 3827875.41 frames. ], batch size: 47, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 05:59:55,083 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.053e+02 5.047e+02 6.326e+02 7.867e+02 1.574e+03, threshold=1.265e+03, percent-clipped=9.0 +2023-04-02 06:00:05,513 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:00:56,528 INFO [train.py:903] (3/4) Epoch 15, batch 6150, loss[loss=0.1949, simple_loss=0.2847, pruned_loss=0.05248, over 19542.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.296, pruned_loss=0.07084, over 3823399.73 frames. ], batch size: 56, lr: 5.46e-03, grad_scale: 8.0 +2023-04-02 06:01:23,846 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 06:01:59,349 INFO [train.py:903] (3/4) Epoch 15, batch 6200, loss[loss=0.2434, simple_loss=0.3276, pruned_loss=0.0796, over 19668.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2953, pruned_loss=0.07045, over 3826458.52 frames. ], batch size: 58, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:02:01,556 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.959e+02 6.303e+02 7.991e+02 1.526e+03, threshold=1.261e+03, percent-clipped=1.0 +2023-04-02 06:02:57,096 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5536, 1.4655, 1.4167, 1.9369, 1.5538, 1.8821, 1.8863, 1.6953], + device='cuda:3'), covar=tensor([0.0833, 0.0905, 0.1017, 0.0722, 0.0797, 0.0715, 0.0803, 0.0633], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0222, 0.0223, 0.0243, 0.0228, 0.0209, 0.0189, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 06:03:00,196 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101840.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:02,073 INFO [train.py:903] (3/4) Epoch 15, batch 6250, loss[loss=0.1862, simple_loss=0.2525, pruned_loss=0.05999, over 19744.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2959, pruned_loss=0.07128, over 3824366.70 frames. ], batch size: 45, lr: 5.45e-03, grad_scale: 8.0 +2023-04-02 06:03:16,054 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6934, 1.7276, 1.4890, 1.2577, 1.2465, 1.2634, 0.2946, 0.5976], + device='cuda:3'), covar=tensor([0.0749, 0.0651, 0.0455, 0.0718, 0.1380, 0.0929, 0.1177, 0.1195], + device='cuda:3'), in_proj_covar=tensor([0.0349, 0.0340, 0.0339, 0.0369, 0.0443, 0.0368, 0.0322, 0.0331], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 06:03:30,465 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 06:03:30,808 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:03:34,313 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3689, 1.4133, 1.7515, 1.5883, 2.4791, 2.1885, 2.6321, 0.9843], + device='cuda:3'), covar=tensor([0.2414, 0.4166, 0.2585, 0.1917, 0.1548, 0.2088, 0.1434, 0.4303], + device='cuda:3'), in_proj_covar=tensor([0.0505, 0.0601, 0.0651, 0.0455, 0.0605, 0.0511, 0.0647, 0.0512], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 06:04:04,536 INFO [train.py:903] (3/4) Epoch 15, batch 6300, loss[loss=0.2306, simple_loss=0.3068, pruned_loss=0.07719, over 19751.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.296, pruned_loss=0.07138, over 3817135.39 frames. ], batch size: 63, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:04:07,999 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.483e+02 5.666e+02 6.616e+02 7.934e+02 1.912e+03, threshold=1.323e+03, percent-clipped=2.0 +2023-04-02 06:05:03,701 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101938.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:05:08,252 INFO [train.py:903] (3/4) Epoch 15, batch 6350, loss[loss=0.1887, simple_loss=0.2646, pruned_loss=0.05643, over 14695.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2948, pruned_loss=0.07059, over 3832071.21 frames. ], batch size: 32, lr: 5.45e-03, grad_scale: 2.0 +2023-04-02 06:05:50,374 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-02 06:06:11,828 INFO [train.py:903] (3/4) Epoch 15, batch 6400, loss[loss=0.186, simple_loss=0.2631, pruned_loss=0.05441, over 19738.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2953, pruned_loss=0.07057, over 3839083.84 frames. ], batch size: 51, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:06:13,934 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 06:06:16,594 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.303e+02 4.874e+02 5.936e+02 7.490e+02 2.019e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 06:06:30,758 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:06:52,969 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 06:07:17,087 INFO [train.py:903] (3/4) Epoch 15, batch 6450, loss[loss=0.2508, simple_loss=0.3152, pruned_loss=0.09318, over 19280.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2959, pruned_loss=0.071, over 3846656.79 frames. ], batch size: 44, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:07:23,181 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102046.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:08:03,623 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 06:08:20,717 INFO [train.py:903] (3/4) Epoch 15, batch 6500, loss[loss=0.2511, simple_loss=0.322, pruned_loss=0.09007, over 18193.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2955, pruned_loss=0.07095, over 3852790.18 frames. ], batch size: 83, lr: 5.45e-03, grad_scale: 4.0 +2023-04-02 06:08:25,528 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.860e+02 6.100e+02 7.866e+02 2.286e+03, threshold=1.220e+03, percent-clipped=9.0 +2023-04-02 06:08:26,632 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 06:09:23,524 INFO [train.py:903] (3/4) Epoch 15, batch 6550, loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.06104, over 19474.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.296, pruned_loss=0.0714, over 3837532.76 frames. ], batch size: 49, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:09:47,144 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:10:26,479 INFO [train.py:903] (3/4) Epoch 15, batch 6600, loss[loss=0.2244, simple_loss=0.3103, pruned_loss=0.06928, over 19663.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2968, pruned_loss=0.07192, over 3829693.74 frames. ], batch size: 60, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:10:31,178 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 4.937e+02 6.611e+02 8.175e+02 1.787e+03, threshold=1.322e+03, percent-clipped=6.0 +2023-04-02 06:10:43,389 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7418, 4.1717, 4.4111, 4.4002, 1.6962, 4.1132, 3.6493, 4.1267], + device='cuda:3'), covar=tensor([0.1427, 0.1012, 0.0591, 0.0606, 0.5547, 0.0872, 0.0624, 0.1043], + device='cuda:3'), in_proj_covar=tensor([0.0737, 0.0671, 0.0880, 0.0756, 0.0785, 0.0628, 0.0528, 0.0816], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 06:11:29,847 INFO [train.py:903] (3/4) Epoch 15, batch 6650, loss[loss=0.1938, simple_loss=0.2631, pruned_loss=0.06221, over 19782.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2963, pruned_loss=0.07141, over 3827035.96 frames. ], batch size: 47, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:11:44,398 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 06:12:20,501 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102282.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:12:33,724 INFO [train.py:903] (3/4) Epoch 15, batch 6700, loss[loss=0.2282, simple_loss=0.3024, pruned_loss=0.077, over 19535.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2964, pruned_loss=0.07132, over 3826977.66 frames. ], batch size: 54, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:12:38,441 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 5.015e+02 6.423e+02 7.841e+02 1.581e+03, threshold=1.285e+03, percent-clipped=1.0 +2023-04-02 06:13:32,252 INFO [train.py:903] (3/4) Epoch 15, batch 6750, loss[loss=0.2438, simple_loss=0.3193, pruned_loss=0.08411, over 19338.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2971, pruned_loss=0.07139, over 3826914.78 frames. ], batch size: 66, lr: 5.44e-03, grad_scale: 4.0 +2023-04-02 06:13:40,336 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:14:30,398 INFO [train.py:903] (3/4) Epoch 15, batch 6800, loss[loss=0.226, simple_loss=0.3064, pruned_loss=0.07284, over 19369.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2967, pruned_loss=0.07127, over 3834641.59 frames. ], batch size: 70, lr: 5.44e-03, grad_scale: 8.0 +2023-04-02 06:14:35,328 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.506e+02 4.944e+02 6.022e+02 7.665e+02 3.022e+03, threshold=1.204e+03, percent-clipped=5.0 +2023-04-02 06:14:37,028 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102397.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:14:57,426 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:15:15,153 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 06:15:16,183 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 06:15:19,105 INFO [train.py:903] (3/4) Epoch 16, batch 0, loss[loss=0.2505, simple_loss=0.3085, pruned_loss=0.09629, over 19459.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3085, pruned_loss=0.09629, over 19459.00 frames. ], batch size: 49, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:15:19,105 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 06:15:29,713 INFO [train.py:937] (3/4) Epoch 16, validation: loss=0.1737, simple_loss=0.2745, pruned_loss=0.03646, over 944034.00 frames. +2023-04-02 06:15:29,714 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 06:15:45,587 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 06:15:58,370 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102442.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:06,636 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4111, 1.3356, 1.3321, 1.7019, 1.4118, 1.7005, 1.7951, 1.5690], + device='cuda:3'), covar=tensor([0.0821, 0.0918, 0.0995, 0.0710, 0.0775, 0.0729, 0.0762, 0.0651], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0222, 0.0222, 0.0243, 0.0226, 0.0209, 0.0189, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 06:16:24,719 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:16:33,054 INFO [train.py:903] (3/4) Epoch 16, batch 50, loss[loss=0.2251, simple_loss=0.3023, pruned_loss=0.07393, over 19358.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.297, pruned_loss=0.07161, over 856792.28 frames. ], batch size: 70, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:04,308 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.709e+02 4.865e+02 6.426e+02 8.395e+02 1.744e+03, threshold=1.285e+03, percent-clipped=5.0 +2023-04-02 06:17:08,781 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 06:17:29,601 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8074, 1.4581, 1.5711, 1.4906, 3.3379, 0.9950, 2.4129, 3.7888], + device='cuda:3'), covar=tensor([0.0497, 0.2912, 0.2907, 0.1967, 0.0756, 0.2712, 0.1295, 0.0246], + device='cuda:3'), in_proj_covar=tensor([0.0382, 0.0350, 0.0369, 0.0334, 0.0359, 0.0338, 0.0350, 0.0374], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:17:33,675 INFO [train.py:903] (3/4) Epoch 16, batch 100, loss[loss=0.1938, simple_loss=0.2813, pruned_loss=0.05317, over 19521.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2964, pruned_loss=0.07109, over 1528270.28 frames. ], batch size: 56, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:17:47,702 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 06:18:13,827 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8631, 1.9639, 2.2137, 2.5023, 1.8210, 2.3626, 2.3044, 1.9596], + device='cuda:3'), covar=tensor([0.3926, 0.3470, 0.1636, 0.2044, 0.3693, 0.1853, 0.4167, 0.3079], + device='cuda:3'), in_proj_covar=tensor([0.0833, 0.0881, 0.0678, 0.0903, 0.0821, 0.0756, 0.0811, 0.0743], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 06:18:34,759 INFO [train.py:903] (3/4) Epoch 16, batch 150, loss[loss=0.1959, simple_loss=0.2793, pruned_loss=0.05623, over 19533.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2945, pruned_loss=0.07032, over 2045462.87 frames. ], batch size: 56, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:06,737 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8292, 1.6333, 1.4522, 1.9167, 1.8134, 1.5942, 1.4495, 1.7974], + device='cuda:3'), covar=tensor([0.1057, 0.1430, 0.1529, 0.0923, 0.1147, 0.0572, 0.1376, 0.0753], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0352, 0.0298, 0.0246, 0.0299, 0.0246, 0.0292, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:19:07,435 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.578e+02 6.638e+02 8.298e+02 1.665e+03, threshold=1.328e+03, percent-clipped=4.0 +2023-04-02 06:19:36,780 INFO [train.py:903] (3/4) Epoch 16, batch 200, loss[loss=0.1977, simple_loss=0.2851, pruned_loss=0.05517, over 19750.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2953, pruned_loss=0.07067, over 2447079.30 frames. ], batch size: 63, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:19:38,849 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 06:20:18,522 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102653.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:20:29,352 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2716, 2.1495, 1.8892, 1.6930, 1.5887, 1.7459, 0.4870, 1.1770], + device='cuda:3'), covar=tensor([0.0500, 0.0488, 0.0384, 0.0698, 0.1066, 0.0735, 0.1129, 0.0828], + device='cuda:3'), in_proj_covar=tensor([0.0350, 0.0342, 0.0339, 0.0371, 0.0444, 0.0371, 0.0324, 0.0333], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 06:20:38,278 INFO [train.py:903] (3/4) Epoch 16, batch 250, loss[loss=0.2234, simple_loss=0.307, pruned_loss=0.06991, over 17997.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2958, pruned_loss=0.07078, over 2754158.03 frames. ], batch size: 83, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:20:43,970 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 06:20:51,617 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102678.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:21:12,162 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 5.293e+02 5.976e+02 7.347e+02 1.638e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 06:21:43,459 INFO [train.py:903] (3/4) Epoch 16, batch 300, loss[loss=0.2146, simple_loss=0.2889, pruned_loss=0.07009, over 19712.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2961, pruned_loss=0.07157, over 2968314.05 frames. ], batch size: 51, lr: 5.26e-03, grad_scale: 8.0 +2023-04-02 06:21:43,893 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102720.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:13,600 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:22:21,954 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0443, 2.1004, 2.3670, 2.8249, 2.0294, 2.6563, 2.5556, 2.1885], + device='cuda:3'), covar=tensor([0.4082, 0.3734, 0.1632, 0.2044, 0.3918, 0.1860, 0.4011, 0.2968], + device='cuda:3'), in_proj_covar=tensor([0.0840, 0.0887, 0.0680, 0.0909, 0.0826, 0.0759, 0.0816, 0.0745], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 06:22:44,953 INFO [train.py:903] (3/4) Epoch 16, batch 350, loss[loss=0.1754, simple_loss=0.2522, pruned_loss=0.04927, over 19393.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2952, pruned_loss=0.07092, over 3161019.31 frames. ], batch size: 48, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:22:47,853 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4624, 1.4909, 1.7034, 1.6095, 2.5500, 2.1576, 2.5224, 1.3238], + device='cuda:3'), covar=tensor([0.2130, 0.3855, 0.2383, 0.1726, 0.1321, 0.1972, 0.1366, 0.3791], + device='cuda:3'), in_proj_covar=tensor([0.0500, 0.0597, 0.0650, 0.0453, 0.0602, 0.0509, 0.0648, 0.0511], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 06:22:50,857 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 06:23:16,113 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.254e+02 6.186e+02 7.503e+02 2.205e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 06:23:39,980 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 06:23:47,562 INFO [train.py:903] (3/4) Epoch 16, batch 400, loss[loss=0.1724, simple_loss=0.252, pruned_loss=0.04644, over 19762.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2955, pruned_loss=0.0712, over 3312416.05 frames. ], batch size: 47, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:24:49,384 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102869.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:24:50,278 INFO [train.py:903] (3/4) Epoch 16, batch 450, loss[loss=0.2789, simple_loss=0.3347, pruned_loss=0.1115, over 13514.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2963, pruned_loss=0.07198, over 3423187.97 frames. ], batch size: 136, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:25:11,603 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 06:25:22,281 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.837e+02 5.955e+02 8.003e+02 1.401e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 06:25:24,676 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 06:25:25,876 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 06:25:30,873 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6353, 1.6392, 1.5606, 1.4262, 1.2865, 1.4071, 0.6748, 0.9743], + device='cuda:3'), covar=tensor([0.0427, 0.0449, 0.0278, 0.0390, 0.0739, 0.0501, 0.0827, 0.0664], + device='cuda:3'), in_proj_covar=tensor([0.0347, 0.0338, 0.0335, 0.0367, 0.0438, 0.0368, 0.0321, 0.0330], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 06:25:32,994 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:25:52,023 INFO [train.py:903] (3/4) Epoch 16, batch 500, loss[loss=0.2122, simple_loss=0.2943, pruned_loss=0.06508, over 19734.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2973, pruned_loss=0.07196, over 3508798.97 frames. ], batch size: 51, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:26:54,172 INFO [train.py:903] (3/4) Epoch 16, batch 550, loss[loss=0.1967, simple_loss=0.2714, pruned_loss=0.06096, over 19018.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2961, pruned_loss=0.07133, over 3588971.88 frames. ], batch size: 42, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:27:24,958 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.789e+02 5.227e+02 6.443e+02 7.702e+02 1.436e+03, threshold=1.289e+03, percent-clipped=3.0 +2023-04-02 06:27:54,419 INFO [train.py:903] (3/4) Epoch 16, batch 600, loss[loss=0.2258, simple_loss=0.3026, pruned_loss=0.07452, over 17495.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2953, pruned_loss=0.07065, over 3646863.21 frames. ], batch size: 101, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:28:37,068 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 06:28:55,606 INFO [train.py:903] (3/4) Epoch 16, batch 650, loss[loss=0.2061, simple_loss=0.2724, pruned_loss=0.06993, over 18189.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2937, pruned_loss=0.06962, over 3699700.72 frames. ], batch size: 40, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:29:28,782 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.715e+02 6.008e+02 7.942e+02 1.451e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-04-02 06:29:53,904 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103116.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:29:57,413 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.3021, 5.2642, 6.1143, 6.0914, 2.1794, 5.7264, 4.8731, 5.7173], + device='cuda:3'), covar=tensor([0.1324, 0.0622, 0.0487, 0.0452, 0.5190, 0.0503, 0.0557, 0.1068], + device='cuda:3'), in_proj_covar=tensor([0.0733, 0.0666, 0.0876, 0.0749, 0.0775, 0.0619, 0.0523, 0.0804], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 06:29:58,304 INFO [train.py:903] (3/4) Epoch 16, batch 700, loss[loss=0.1724, simple_loss=0.2427, pruned_loss=0.05101, over 19736.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2948, pruned_loss=0.06988, over 3726889.30 frames. ], batch size: 46, lr: 5.25e-03, grad_scale: 8.0 +2023-04-02 06:30:35,173 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103150.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:31:00,295 INFO [train.py:903] (3/4) Epoch 16, batch 750, loss[loss=0.2506, simple_loss=0.3183, pruned_loss=0.09145, over 19538.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2962, pruned_loss=0.07086, over 3733356.29 frames. ], batch size: 54, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:31:33,687 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.236e+02 6.262e+02 8.063e+02 1.865e+03, threshold=1.252e+03, percent-clipped=5.0 +2023-04-02 06:31:55,101 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:32:03,180 INFO [train.py:903] (3/4) Epoch 16, batch 800, loss[loss=0.1884, simple_loss=0.2677, pruned_loss=0.05453, over 19737.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2962, pruned_loss=0.07104, over 3761940.43 frames. ], batch size: 51, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:32:18,129 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 06:32:36,484 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9140, 1.8579, 1.5554, 2.1045, 1.7511, 1.6889, 1.7452, 1.9076], + device='cuda:3'), covar=tensor([0.0999, 0.1354, 0.1464, 0.0877, 0.1273, 0.0548, 0.1217, 0.0712], + device='cuda:3'), in_proj_covar=tensor([0.0260, 0.0352, 0.0296, 0.0244, 0.0298, 0.0245, 0.0292, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:32:38,496 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103249.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:33:04,743 INFO [train.py:903] (3/4) Epoch 16, batch 850, loss[loss=0.1693, simple_loss=0.2478, pruned_loss=0.04543, over 19741.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2959, pruned_loss=0.07079, over 3785277.65 frames. ], batch size: 46, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:33:38,416 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.865e+02 6.263e+02 7.829e+02 1.710e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 06:33:57,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 06:34:06,720 INFO [train.py:903] (3/4) Epoch 16, batch 900, loss[loss=0.2214, simple_loss=0.2954, pruned_loss=0.07369, over 19682.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2955, pruned_loss=0.07048, over 3791173.58 frames. ], batch size: 53, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:34:17,413 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:34:52,836 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5802, 1.2479, 1.4553, 1.2292, 2.1819, 0.9459, 2.0629, 2.4372], + device='cuda:3'), covar=tensor([0.0659, 0.2679, 0.2618, 0.1635, 0.0893, 0.2052, 0.0953, 0.0441], + device='cuda:3'), in_proj_covar=tensor([0.0385, 0.0351, 0.0370, 0.0334, 0.0361, 0.0340, 0.0349, 0.0376], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:35:01,527 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:35:08,188 INFO [train.py:903] (3/4) Epoch 16, batch 950, loss[loss=0.2003, simple_loss=0.2823, pruned_loss=0.05915, over 19652.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2952, pruned_loss=0.07041, over 3793344.03 frames. ], batch size: 55, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:35:13,570 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 06:35:27,470 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103384.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:35:34,305 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5500, 1.1514, 1.3932, 1.2669, 2.1426, 1.0018, 1.9192, 2.4569], + device='cuda:3'), covar=tensor([0.0677, 0.2743, 0.2747, 0.1600, 0.0945, 0.1987, 0.1044, 0.0455], + device='cuda:3'), in_proj_covar=tensor([0.0385, 0.0352, 0.0370, 0.0334, 0.0361, 0.0339, 0.0350, 0.0376], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:35:40,823 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.367e+02 6.234e+02 7.823e+02 2.113e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-02 06:36:12,153 INFO [train.py:903] (3/4) Epoch 16, batch 1000, loss[loss=0.2258, simple_loss=0.3048, pruned_loss=0.07345, over 19685.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2959, pruned_loss=0.07107, over 3803134.28 frames. ], batch size: 53, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:03,122 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103460.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:07,748 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 06:37:14,586 INFO [train.py:903] (3/4) Epoch 16, batch 1050, loss[loss=0.2383, simple_loss=0.3186, pruned_loss=0.07899, over 19518.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2957, pruned_loss=0.0709, over 3801110.04 frames. ], batch size: 56, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:37:43,711 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:37:45,897 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 5.497e+02 6.450e+02 8.583e+02 2.663e+03, threshold=1.290e+03, percent-clipped=6.0 +2023-04-02 06:37:49,195 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 06:38:15,967 INFO [train.py:903] (3/4) Epoch 16, batch 1100, loss[loss=0.2125, simple_loss=0.3018, pruned_loss=0.06156, over 19759.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2967, pruned_loss=0.07139, over 3803929.76 frames. ], batch size: 54, lr: 5.24e-03, grad_scale: 8.0 +2023-04-02 06:38:54,859 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0362, 1.7735, 1.6820, 2.0807, 1.8229, 1.8829, 1.7941, 1.9987], + device='cuda:3'), covar=tensor([0.0979, 0.1451, 0.1398, 0.0935, 0.1297, 0.0493, 0.1202, 0.0680], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0355, 0.0298, 0.0247, 0.0301, 0.0247, 0.0294, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:39:18,066 INFO [train.py:903] (3/4) Epoch 16, batch 1150, loss[loss=0.1861, simple_loss=0.2672, pruned_loss=0.05248, over 19423.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2944, pruned_loss=0.07001, over 3809862.08 frames. ], batch size: 48, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:39:25,933 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103575.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:37,385 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103584.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:39:50,623 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.809e+02 5.106e+02 6.190e+02 8.719e+02 1.567e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 06:40:06,762 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:06,804 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:21,155 INFO [train.py:903] (3/4) Epoch 16, batch 1200, loss[loss=0.2145, simple_loss=0.289, pruned_loss=0.06998, over 19625.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2953, pruned_loss=0.07064, over 3818357.60 frames. ], batch size: 50, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:40:21,635 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:52,966 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:40:54,977 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 06:41:24,588 INFO [train.py:903] (3/4) Epoch 16, batch 1250, loss[loss=0.2093, simple_loss=0.2831, pruned_loss=0.06772, over 19759.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2944, pruned_loss=0.07021, over 3822935.92 frames. ], batch size: 51, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:41:56,589 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.406e+02 4.955e+02 6.144e+02 7.729e+02 1.641e+03, threshold=1.229e+03, percent-clipped=4.0 +2023-04-02 06:42:00,283 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5340, 1.3351, 1.4335, 2.0101, 3.0903, 1.5256, 2.3260, 3.5214], + device='cuda:3'), covar=tensor([0.0514, 0.2780, 0.2885, 0.1437, 0.0740, 0.1932, 0.1207, 0.0257], + device='cuda:3'), in_proj_covar=tensor([0.0383, 0.0351, 0.0368, 0.0333, 0.0359, 0.0339, 0.0348, 0.0375], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:42:24,682 INFO [train.py:903] (3/4) Epoch 16, batch 1300, loss[loss=0.2484, simple_loss=0.3224, pruned_loss=0.08724, over 19318.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2945, pruned_loss=0.07017, over 3826583.16 frames. ], batch size: 66, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:42:34,227 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5041, 2.2898, 1.6233, 1.5153, 2.0721, 1.2650, 1.4949, 1.8960], + device='cuda:3'), covar=tensor([0.1016, 0.0671, 0.1076, 0.0763, 0.0528, 0.1212, 0.0691, 0.0433], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0309, 0.0329, 0.0255, 0.0243, 0.0329, 0.0296, 0.0269], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:42:35,157 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103728.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:43:12,241 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:26,072 INFO [train.py:903] (3/4) Epoch 16, batch 1350, loss[loss=0.2173, simple_loss=0.3003, pruned_loss=0.06711, over 19674.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2949, pruned_loss=0.07031, over 3833526.73 frames. ], batch size: 60, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:43:43,882 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:43:59,437 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.194e+02 5.214e+02 6.633e+02 9.392e+02 2.118e+03, threshold=1.327e+03, percent-clipped=8.0 +2023-04-02 06:44:30,116 INFO [train.py:903] (3/4) Epoch 16, batch 1400, loss[loss=0.2086, simple_loss=0.2968, pruned_loss=0.06025, over 19660.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2951, pruned_loss=0.07025, over 3827867.84 frames. ], batch size: 58, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:44:42,081 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.13 vs. limit=5.0 +2023-04-02 06:44:43,973 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103831.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:44:57,959 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103843.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 06:45:14,487 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103856.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:26,693 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:45:32,163 INFO [train.py:903] (3/4) Epoch 16, batch 1450, loss[loss=0.2163, simple_loss=0.2966, pruned_loss=0.06797, over 19741.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2959, pruned_loss=0.07092, over 3827849.31 frames. ], batch size: 51, lr: 5.23e-03, grad_scale: 8.0 +2023-04-02 06:45:32,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 06:45:56,458 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:01,056 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103894.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:46:03,878 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.072e+02 4.634e+02 5.962e+02 7.073e+02 1.523e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-02 06:46:22,702 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5318, 2.3196, 1.6125, 1.4367, 2.1485, 1.2384, 1.2796, 2.0011], + device='cuda:3'), covar=tensor([0.1135, 0.0767, 0.1141, 0.0899, 0.0560, 0.1347, 0.0871, 0.0468], + device='cuda:3'), in_proj_covar=tensor([0.0299, 0.0311, 0.0331, 0.0257, 0.0244, 0.0333, 0.0297, 0.0270], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:46:33,191 INFO [train.py:903] (3/4) Epoch 16, batch 1500, loss[loss=0.268, simple_loss=0.3394, pruned_loss=0.09831, over 19603.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2966, pruned_loss=0.07112, over 3831813.69 frames. ], batch size: 57, lr: 5.23e-03, grad_scale: 16.0 +2023-04-02 06:47:35,230 INFO [train.py:903] (3/4) Epoch 16, batch 1550, loss[loss=0.2449, simple_loss=0.3202, pruned_loss=0.08483, over 18771.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2972, pruned_loss=0.07132, over 3835650.45 frames. ], batch size: 74, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:48:02,907 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5137, 2.4326, 2.0877, 2.9138, 2.5029, 2.1776, 2.2427, 2.7578], + device='cuda:3'), covar=tensor([0.0925, 0.1668, 0.1437, 0.0889, 0.1316, 0.0525, 0.1239, 0.0590], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0355, 0.0299, 0.0247, 0.0302, 0.0247, 0.0295, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:48:09,263 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.500e+02 6.727e+02 8.636e+02 1.580e+03, threshold=1.345e+03, percent-clipped=8.0 +2023-04-02 06:48:14,274 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4496, 1.3834, 1.3610, 1.8153, 1.4179, 1.7177, 1.7601, 1.5749], + device='cuda:3'), covar=tensor([0.0777, 0.0895, 0.0977, 0.0648, 0.0790, 0.0702, 0.0780, 0.0652], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0222, 0.0224, 0.0243, 0.0225, 0.0210, 0.0191, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 06:48:24,334 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:32,661 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:48:39,348 INFO [train.py:903] (3/4) Epoch 16, batch 1600, loss[loss=0.2009, simple_loss=0.2758, pruned_loss=0.06304, over 19401.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2964, pruned_loss=0.07105, over 3837873.28 frames. ], batch size: 48, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:49:05,527 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 06:49:30,573 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6517, 1.4049, 1.4173, 2.3094, 1.6514, 1.9831, 2.0687, 1.6451], + device='cuda:3'), covar=tensor([0.0843, 0.1039, 0.1129, 0.0802, 0.0887, 0.0757, 0.0896, 0.0754], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0245, 0.0227, 0.0211, 0.0192, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 06:49:38,525 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:49:41,609 INFO [train.py:903] (3/4) Epoch 16, batch 1650, loss[loss=0.2045, simple_loss=0.2902, pruned_loss=0.05942, over 19668.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2969, pruned_loss=0.07158, over 3823494.29 frames. ], batch size: 55, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:14,819 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.025e+02 5.150e+02 6.179e+02 7.587e+02 1.568e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 06:50:18,602 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104099.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:50:19,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9137, 2.0615, 2.1570, 2.7858, 2.0559, 2.5905, 2.3358, 1.9906], + device='cuda:3'), covar=tensor([0.3909, 0.3321, 0.1693, 0.1898, 0.3516, 0.1716, 0.4194, 0.3050], + device='cuda:3'), in_proj_covar=tensor([0.0842, 0.0889, 0.0683, 0.0914, 0.0827, 0.0764, 0.0816, 0.0748], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 06:50:22,743 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104102.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:50:43,581 INFO [train.py:903] (3/4) Epoch 16, batch 1700, loss[loss=0.2404, simple_loss=0.32, pruned_loss=0.08041, over 19579.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2991, pruned_loss=0.07262, over 3805782.62 frames. ], batch size: 52, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:50:43,968 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9160, 1.1700, 1.5434, 0.5712, 1.9939, 2.4246, 2.1039, 2.6008], + device='cuda:3'), covar=tensor([0.1555, 0.3631, 0.3076, 0.2503, 0.0564, 0.0291, 0.0329, 0.0296], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0308, 0.0336, 0.0257, 0.0229, 0.0173, 0.0209, 0.0229], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 06:50:48,647 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104124.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 06:50:52,856 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104127.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:51:25,773 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 06:51:44,968 INFO [train.py:903] (3/4) Epoch 16, batch 1750, loss[loss=0.2563, simple_loss=0.3231, pruned_loss=0.09475, over 19663.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2991, pruned_loss=0.07321, over 3818745.17 frames. ], batch size: 60, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:52:19,019 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.884e+02 5.867e+02 6.930e+02 2.034e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 06:52:45,665 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104217.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:52:47,903 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4345, 1.3312, 1.5323, 1.4893, 2.9797, 1.0939, 2.3523, 3.3017], + device='cuda:3'), covar=tensor([0.0478, 0.2727, 0.2706, 0.1767, 0.0704, 0.2471, 0.1129, 0.0286], + device='cuda:3'), in_proj_covar=tensor([0.0381, 0.0349, 0.0367, 0.0333, 0.0356, 0.0340, 0.0352, 0.0375], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:52:48,724 INFO [train.py:903] (3/4) Epoch 16, batch 1800, loss[loss=0.2142, simple_loss=0.2921, pruned_loss=0.06813, over 19793.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2981, pruned_loss=0.07239, over 3806789.94 frames. ], batch size: 56, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:53:09,937 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:14,912 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:53:24,021 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0905, 1.7567, 1.9035, 2.7732, 2.1307, 2.4095, 2.5965, 2.3862], + device='cuda:3'), covar=tensor([0.0827, 0.1003, 0.1012, 0.0884, 0.0865, 0.0770, 0.0811, 0.0666], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0224, 0.0226, 0.0245, 0.0228, 0.0212, 0.0192, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 06:53:46,177 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 06:53:49,825 INFO [train.py:903] (3/4) Epoch 16, batch 1850, loss[loss=0.2157, simple_loss=0.2824, pruned_loss=0.07446, over 19479.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.297, pruned_loss=0.07163, over 3814946.67 frames. ], batch size: 49, lr: 5.22e-03, grad_scale: 8.0 +2023-04-02 06:54:21,794 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 06:54:22,923 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.248e+02 6.749e+02 7.716e+02 1.558e+03, threshold=1.350e+03, percent-clipped=5.0 +2023-04-02 06:54:51,925 INFO [train.py:903] (3/4) Epoch 16, batch 1900, loss[loss=0.2145, simple_loss=0.2883, pruned_loss=0.07037, over 19733.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2973, pruned_loss=0.07182, over 3820664.73 frames. ], batch size: 45, lr: 5.22e-03, grad_scale: 4.0 +2023-04-02 06:55:09,093 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 06:55:15,665 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 06:55:18,494 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 06:55:31,877 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:34,086 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:39,887 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 06:55:41,223 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:55:53,978 INFO [train.py:903] (3/4) Epoch 16, batch 1950, loss[loss=0.2301, simple_loss=0.3123, pruned_loss=0.07393, over 19746.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2956, pruned_loss=0.07041, over 3833280.67 frames. ], batch size: 63, lr: 5.21e-03, grad_scale: 4.0 +2023-04-02 06:56:00,107 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4607, 2.4077, 2.2396, 2.8689, 2.5174, 2.2998, 2.1398, 2.5775], + device='cuda:3'), covar=tensor([0.0900, 0.1391, 0.1282, 0.0882, 0.1156, 0.0479, 0.1203, 0.0643], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0354, 0.0300, 0.0248, 0.0301, 0.0249, 0.0294, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 06:56:30,730 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.688e+02 6.377e+02 8.120e+02 1.703e+03, threshold=1.275e+03, percent-clipped=4.0 +2023-04-02 06:56:46,834 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104411.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:56:58,264 INFO [train.py:903] (3/4) Epoch 16, batch 2000, loss[loss=0.2376, simple_loss=0.3139, pruned_loss=0.08069, over 19359.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2965, pruned_loss=0.0709, over 3825080.21 frames. ], batch size: 70, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:57:57,345 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 06:57:57,646 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:00,776 INFO [train.py:903] (3/4) Epoch 16, batch 2050, loss[loss=0.2055, simple_loss=0.2871, pruned_loss=0.06192, over 19659.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2958, pruned_loss=0.07033, over 3828101.83 frames. ], batch size: 53, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:58:04,673 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:05,676 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:14,089 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 06:58:15,347 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 06:58:35,728 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.885e+02 6.080e+02 8.555e+02 1.693e+03, threshold=1.216e+03, percent-clipped=6.0 +2023-04-02 06:58:36,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:36,188 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104498.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:58:40,139 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 06:59:02,741 INFO [train.py:903] (3/4) Epoch 16, batch 2100, loss[loss=0.2076, simple_loss=0.2748, pruned_loss=0.07015, over 19791.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2967, pruned_loss=0.07075, over 3823393.83 frames. ], batch size: 47, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 06:59:06,446 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:09,999 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 06:59:33,815 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 06:59:55,527 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 07:00:04,689 INFO [train.py:903] (3/4) Epoch 16, batch 2150, loss[loss=0.2349, simple_loss=0.2971, pruned_loss=0.08635, over 19816.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2967, pruned_loss=0.07114, over 3816522.95 frames. ], batch size: 49, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:00:39,765 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.547e+02 6.907e+02 8.298e+02 2.194e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-04-02 07:00:53,810 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:01:08,291 INFO [train.py:903] (3/4) Epoch 16, batch 2200, loss[loss=0.2143, simple_loss=0.2872, pruned_loss=0.07069, over 19610.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2969, pruned_loss=0.07166, over 3819443.19 frames. ], batch size: 50, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:01:26,427 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:02:09,402 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3835, 2.1630, 1.5832, 1.4339, 1.9378, 1.1855, 1.3375, 1.8383], + device='cuda:3'), covar=tensor([0.0996, 0.0737, 0.1058, 0.0720, 0.0552, 0.1261, 0.0682, 0.0460], + device='cuda:3'), in_proj_covar=tensor([0.0295, 0.0309, 0.0328, 0.0255, 0.0244, 0.0332, 0.0292, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:02:12,256 INFO [train.py:903] (3/4) Epoch 16, batch 2250, loss[loss=0.2465, simple_loss=0.318, pruned_loss=0.08746, over 19379.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2962, pruned_loss=0.07184, over 3822518.67 frames. ], batch size: 66, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:02:46,758 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.921e+02 5.898e+02 6.952e+02 1.646e+03, threshold=1.180e+03, percent-clipped=1.0 +2023-04-02 07:03:15,189 INFO [train.py:903] (3/4) Epoch 16, batch 2300, loss[loss=0.206, simple_loss=0.2701, pruned_loss=0.07098, over 19409.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2953, pruned_loss=0.07132, over 3811482.68 frames. ], batch size: 48, lr: 5.21e-03, grad_scale: 8.0 +2023-04-02 07:03:19,197 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104723.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:27,425 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:30,537 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 07:03:51,576 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104748.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:03:59,127 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 07:03:59,896 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:11,302 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6062, 2.3509, 2.1390, 2.7061, 2.3027, 2.4085, 2.0749, 2.5634], + device='cuda:3'), covar=tensor([0.0868, 0.1593, 0.1341, 0.1020, 0.1395, 0.0450, 0.1258, 0.0603], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0350, 0.0297, 0.0246, 0.0298, 0.0247, 0.0292, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:04:17,962 INFO [train.py:903] (3/4) Epoch 16, batch 2350, loss[loss=0.2047, simple_loss=0.2781, pruned_loss=0.06562, over 19336.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2961, pruned_loss=0.07185, over 3792610.15 frames. ], batch size: 48, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:04:34,937 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104782.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:04:53,849 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.557e+02 5.048e+02 6.738e+02 8.844e+02 1.580e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 07:05:00,896 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 07:05:05,755 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:05:18,361 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 07:05:22,504 INFO [train.py:903] (3/4) Epoch 16, batch 2400, loss[loss=0.21, simple_loss=0.2966, pruned_loss=0.06172, over 19529.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2955, pruned_loss=0.0711, over 3792310.73 frames. ], batch size: 54, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:06:24,571 INFO [train.py:903] (3/4) Epoch 16, batch 2450, loss[loss=0.1896, simple_loss=0.2639, pruned_loss=0.05764, over 19751.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2949, pruned_loss=0.07085, over 3801873.42 frames. ], batch size: 46, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:06:54,712 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-02 07:07:00,039 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.877e+02 7.599e+02 9.302e+02 2.010e+03, threshold=1.520e+03, percent-clipped=8.0 +2023-04-02 07:07:27,204 INFO [train.py:903] (3/4) Epoch 16, batch 2500, loss[loss=0.1895, simple_loss=0.2814, pruned_loss=0.04877, over 19321.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.295, pruned_loss=0.07096, over 3814294.51 frames. ], batch size: 70, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:08:29,672 INFO [train.py:903] (3/4) Epoch 16, batch 2550, loss[loss=0.1779, simple_loss=0.2696, pruned_loss=0.04313, over 19688.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2954, pruned_loss=0.07081, over 3818537.07 frames. ], batch size: 53, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:09:05,175 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 5.239e+02 6.384e+02 8.143e+02 1.987e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 07:09:24,638 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 07:09:32,170 INFO [train.py:903] (3/4) Epoch 16, batch 2600, loss[loss=0.2306, simple_loss=0.3108, pruned_loss=0.07519, over 19773.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2961, pruned_loss=0.07084, over 3815334.82 frames. ], batch size: 54, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:35,305 INFO [train.py:903] (3/4) Epoch 16, batch 2650, loss[loss=0.2165, simple_loss=0.2964, pruned_loss=0.06832, over 19373.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2962, pruned_loss=0.07042, over 3820590.27 frames. ], batch size: 66, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:10:54,820 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 07:11:09,899 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 5.317e+02 6.316e+02 8.001e+02 1.365e+03, threshold=1.263e+03, percent-clipped=2.0 +2023-04-02 07:11:36,917 INFO [train.py:903] (3/4) Epoch 16, batch 2700, loss[loss=0.2571, simple_loss=0.3265, pruned_loss=0.09389, over 17387.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2966, pruned_loss=0.07093, over 3811396.34 frames. ], batch size: 101, lr: 5.20e-03, grad_scale: 8.0 +2023-04-02 07:11:55,883 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.72 vs. limit=5.0 +2023-04-02 07:12:39,772 INFO [train.py:903] (3/4) Epoch 16, batch 2750, loss[loss=0.1972, simple_loss=0.2835, pruned_loss=0.0555, over 19656.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2974, pruned_loss=0.07152, over 3815365.19 frames. ], batch size: 58, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:12:52,416 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 07:13:15,058 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 5.170e+02 6.445e+02 8.543e+02 2.677e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-02 07:13:41,785 INFO [train.py:903] (3/4) Epoch 16, batch 2800, loss[loss=0.2251, simple_loss=0.3097, pruned_loss=0.07026, over 19518.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2979, pruned_loss=0.07179, over 3819037.63 frames. ], batch size: 64, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:14:09,832 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-02 07:14:11,888 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3447, 1.4668, 1.8494, 1.6177, 3.0550, 2.5911, 3.3573, 1.4553], + device='cuda:3'), covar=tensor([0.2590, 0.4453, 0.2824, 0.1979, 0.1676, 0.2076, 0.1694, 0.4299], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0603, 0.0658, 0.0458, 0.0604, 0.0511, 0.0650, 0.0518], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 07:14:44,333 INFO [train.py:903] (3/4) Epoch 16, batch 2850, loss[loss=0.2079, simple_loss=0.2952, pruned_loss=0.06029, over 19662.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2967, pruned_loss=0.07113, over 3815202.78 frames. ], batch size: 55, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:18,938 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.908e+02 5.066e+02 6.361e+02 8.222e+02 2.548e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 07:15:46,248 INFO [train.py:903] (3/4) Epoch 16, batch 2900, loss[loss=0.2072, simple_loss=0.2955, pruned_loss=0.05941, over 19665.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2971, pruned_loss=0.07169, over 3810422.58 frames. ], batch size: 60, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:15:46,287 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 07:16:35,303 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7291, 1.3246, 1.4739, 1.4422, 3.2564, 0.9412, 2.2980, 3.6758], + device='cuda:3'), covar=tensor([0.0446, 0.2851, 0.2822, 0.1861, 0.0692, 0.2611, 0.1269, 0.0248], + device='cuda:3'), in_proj_covar=tensor([0.0383, 0.0350, 0.0367, 0.0333, 0.0357, 0.0339, 0.0353, 0.0374], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:16:48,802 INFO [train.py:903] (3/4) Epoch 16, batch 2950, loss[loss=0.236, simple_loss=0.3089, pruned_loss=0.08156, over 16460.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2959, pruned_loss=0.07065, over 3824968.77 frames. ], batch size: 36, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:23,862 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.947e+02 6.195e+02 7.724e+02 2.015e+03, threshold=1.239e+03, percent-clipped=3.0 +2023-04-02 07:17:33,621 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:17:42,529 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3392, 1.3607, 1.7331, 1.5072, 2.4864, 2.2188, 2.6449, 0.9692], + device='cuda:3'), covar=tensor([0.2369, 0.4073, 0.2451, 0.1878, 0.1477, 0.1964, 0.1431, 0.4184], + device='cuda:3'), in_proj_covar=tensor([0.0513, 0.0612, 0.0666, 0.0464, 0.0611, 0.0515, 0.0655, 0.0524], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 07:17:50,878 INFO [train.py:903] (3/4) Epoch 16, batch 3000, loss[loss=0.2482, simple_loss=0.3233, pruned_loss=0.0865, over 18721.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2964, pruned_loss=0.07117, over 3812062.70 frames. ], batch size: 74, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:17:50,878 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 07:18:04,139 INFO [train.py:937] (3/4) Epoch 16, validation: loss=0.1725, simple_loss=0.273, pruned_loss=0.03604, over 944034.00 frames. +2023-04-02 07:18:04,140 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 07:18:07,763 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 07:19:07,115 INFO [train.py:903] (3/4) Epoch 16, batch 3050, loss[loss=0.2013, simple_loss=0.2858, pruned_loss=0.05846, over 19574.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2957, pruned_loss=0.07072, over 3814502.01 frames. ], batch size: 52, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:19:24,531 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3747, 2.1361, 1.9474, 1.8242, 1.6818, 1.8620, 0.3885, 1.2018], + device='cuda:3'), covar=tensor([0.0457, 0.0529, 0.0413, 0.0701, 0.0972, 0.0835, 0.1217, 0.0915], + device='cuda:3'), in_proj_covar=tensor([0.0350, 0.0344, 0.0341, 0.0370, 0.0443, 0.0371, 0.0321, 0.0330], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 07:19:41,624 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.577e+02 4.913e+02 6.190e+02 8.953e+02 2.496e+03, threshold=1.238e+03, percent-clipped=7.0 +2023-04-02 07:19:49,939 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:11,582 INFO [train.py:903] (3/4) Epoch 16, batch 3100, loss[loss=0.195, simple_loss=0.2707, pruned_loss=0.05961, over 19778.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2952, pruned_loss=0.07037, over 3811767.48 frames. ], batch size: 46, lr: 5.19e-03, grad_scale: 8.0 +2023-04-02 07:20:11,898 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105520.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:20:47,395 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9870, 2.6698, 2.7473, 3.1358, 2.8827, 2.6319, 2.6869, 3.0220], + device='cuda:3'), covar=tensor([0.0760, 0.1510, 0.1170, 0.0885, 0.1212, 0.0438, 0.1016, 0.0507], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0357, 0.0302, 0.0249, 0.0301, 0.0252, 0.0298, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:21:13,220 INFO [train.py:903] (3/4) Epoch 16, batch 3150, loss[loss=0.1807, simple_loss=0.2541, pruned_loss=0.05364, over 18162.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2959, pruned_loss=0.07057, over 3809884.77 frames. ], batch size: 40, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:21:41,364 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 07:21:46,679 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.014e+02 6.069e+02 7.545e+02 2.509e+03, threshold=1.214e+03, percent-clipped=2.0 +2023-04-02 07:22:12,921 INFO [train.py:903] (3/4) Epoch 16, batch 3200, loss[loss=0.2408, simple_loss=0.3234, pruned_loss=0.07913, over 19489.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2957, pruned_loss=0.07074, over 3810975.27 frames. ], batch size: 64, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:15,482 INFO [train.py:903] (3/4) Epoch 16, batch 3250, loss[loss=0.1848, simple_loss=0.2588, pruned_loss=0.05543, over 18647.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2963, pruned_loss=0.07071, over 3807197.41 frames. ], batch size: 41, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:23:50,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 5.141e+02 5.939e+02 7.859e+02 1.653e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-02 07:24:19,059 INFO [train.py:903] (3/4) Epoch 16, batch 3300, loss[loss=0.2588, simple_loss=0.3317, pruned_loss=0.09293, over 19491.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2984, pruned_loss=0.07231, over 3792098.84 frames. ], batch size: 64, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:24:22,587 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 07:24:55,822 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:25:18,078 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3888, 4.0128, 2.4572, 3.5690, 0.8898, 3.7855, 3.7885, 3.8674], + device='cuda:3'), covar=tensor([0.0724, 0.1025, 0.2071, 0.0865, 0.4008, 0.0754, 0.0928, 0.1025], + device='cuda:3'), in_proj_covar=tensor([0.0471, 0.0387, 0.0463, 0.0327, 0.0393, 0.0398, 0.0395, 0.0427], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:25:21,448 INFO [train.py:903] (3/4) Epoch 16, batch 3350, loss[loss=0.1859, simple_loss=0.265, pruned_loss=0.05343, over 19484.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2954, pruned_loss=0.07065, over 3807900.36 frames. ], batch size: 49, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:25:57,715 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 5.066e+02 6.242e+02 8.095e+02 2.652e+03, threshold=1.248e+03, percent-clipped=5.0 +2023-04-02 07:26:12,282 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:20,550 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105817.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:26:23,648 INFO [train.py:903] (3/4) Epoch 16, batch 3400, loss[loss=0.2508, simple_loss=0.3206, pruned_loss=0.0905, over 17446.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2952, pruned_loss=0.07078, over 3819499.25 frames. ], batch size: 101, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:27:00,024 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:18,424 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=105864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:19,648 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105865.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:27:26,110 INFO [train.py:903] (3/4) Epoch 16, batch 3450, loss[loss=0.2018, simple_loss=0.2764, pruned_loss=0.06358, over 19384.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2953, pruned_loss=0.07068, over 3819010.53 frames. ], batch size: 48, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:27:29,354 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 07:28:00,500 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.355e+02 6.157e+02 7.690e+02 1.854e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-02 07:28:00,875 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8795, 1.2622, 1.5501, 1.4777, 3.4226, 1.0382, 2.4189, 3.8440], + device='cuda:3'), covar=tensor([0.0469, 0.2750, 0.2637, 0.1927, 0.0731, 0.2574, 0.1183, 0.0235], + device='cuda:3'), in_proj_covar=tensor([0.0384, 0.0351, 0.0369, 0.0335, 0.0357, 0.0340, 0.0353, 0.0375], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:28:29,106 INFO [train.py:903] (3/4) Epoch 16, batch 3500, loss[loss=0.2241, simple_loss=0.3018, pruned_loss=0.07321, over 19530.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2951, pruned_loss=0.0704, over 3822218.27 frames. ], batch size: 54, lr: 5.18e-03, grad_scale: 8.0 +2023-04-02 07:29:23,595 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105963.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:29:31,025 INFO [train.py:903] (3/4) Epoch 16, batch 3550, loss[loss=0.191, simple_loss=0.2682, pruned_loss=0.05689, over 19760.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2962, pruned_loss=0.07072, over 3821528.19 frames. ], batch size: 47, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:29:32,020 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.74 vs. limit=5.0 +2023-04-02 07:29:42,152 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=105979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:30:06,597 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 5.018e+02 6.219e+02 7.272e+02 1.453e+03, threshold=1.244e+03, percent-clipped=3.0 +2023-04-02 07:30:21,859 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5872, 1.1490, 1.4461, 1.2350, 2.2703, 1.0065, 2.1161, 2.4941], + device='cuda:3'), covar=tensor([0.0665, 0.2604, 0.2518, 0.1537, 0.0827, 0.1911, 0.0889, 0.0415], + device='cuda:3'), in_proj_covar=tensor([0.0387, 0.0353, 0.0371, 0.0337, 0.0360, 0.0342, 0.0356, 0.0376], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:30:34,289 INFO [train.py:903] (3/4) Epoch 16, batch 3600, loss[loss=0.208, simple_loss=0.2897, pruned_loss=0.06317, over 18137.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2961, pruned_loss=0.07105, over 3802408.28 frames. ], batch size: 83, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:31:16,237 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.29 vs. limit=5.0 +2023-04-02 07:31:37,393 INFO [train.py:903] (3/4) Epoch 16, batch 3650, loss[loss=0.1851, simple_loss=0.2766, pruned_loss=0.04683, over 19732.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2961, pruned_loss=0.07105, over 3782048.74 frames. ], batch size: 63, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:09,805 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:32:13,045 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 5.371e+02 6.801e+02 8.277e+02 1.518e+03, threshold=1.360e+03, percent-clipped=5.0 +2023-04-02 07:32:42,529 INFO [train.py:903] (3/4) Epoch 16, batch 3700, loss[loss=0.2056, simple_loss=0.279, pruned_loss=0.0661, over 19409.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2957, pruned_loss=0.07052, over 3802649.44 frames. ], batch size: 48, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:32:44,125 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:13,573 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:23,614 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:34,065 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:33:44,495 INFO [train.py:903] (3/4) Epoch 16, batch 3750, loss[loss=0.2243, simple_loss=0.3021, pruned_loss=0.07331, over 19688.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2957, pruned_loss=0.07032, over 3798408.18 frames. ], batch size: 53, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:34:19,089 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.705e+02 5.517e+02 6.973e+02 1.532e+03, threshold=1.103e+03, percent-clipped=3.0 +2023-04-02 07:34:45,466 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:34:46,184 INFO [train.py:903] (3/4) Epoch 16, batch 3800, loss[loss=0.2004, simple_loss=0.2688, pruned_loss=0.06601, over 19268.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2963, pruned_loss=0.07068, over 3803012.02 frames. ], batch size: 44, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:06,477 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106235.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:18,166 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 07:35:18,535 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106244.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:36,771 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106260.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,045 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106269.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:35:48,819 INFO [train.py:903] (3/4) Epoch 16, batch 3850, loss[loss=0.2026, simple_loss=0.2783, pruned_loss=0.06341, over 19840.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2956, pruned_loss=0.07034, over 3812091.29 frames. ], batch size: 52, lr: 5.17e-03, grad_scale: 8.0 +2023-04-02 07:35:56,876 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106276.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:09,604 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:36:23,154 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 5.267e+02 6.364e+02 7.734e+02 1.610e+03, threshold=1.273e+03, percent-clipped=5.0 +2023-04-02 07:36:50,364 INFO [train.py:903] (3/4) Epoch 16, batch 3900, loss[loss=0.2465, simple_loss=0.3018, pruned_loss=0.09562, over 19100.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2951, pruned_loss=0.06978, over 3818889.18 frames. ], batch size: 42, lr: 5.17e-03, grad_scale: 16.0 +2023-04-02 07:37:32,215 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106354.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:37:53,351 INFO [train.py:903] (3/4) Epoch 16, batch 3950, loss[loss=0.2102, simple_loss=0.2758, pruned_loss=0.07227, over 18999.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2959, pruned_loss=0.07041, over 3815811.57 frames. ], batch size: 42, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:37:58,031 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 07:38:28,357 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.931e+02 6.019e+02 7.839e+02 1.392e+03, threshold=1.204e+03, percent-clipped=3.0 +2023-04-02 07:38:54,928 INFO [train.py:903] (3/4) Epoch 16, batch 4000, loss[loss=0.1671, simple_loss=0.2439, pruned_loss=0.04516, over 19370.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2956, pruned_loss=0.07033, over 3822016.70 frames. ], batch size: 47, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:19,380 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:39:19,552 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5668, 1.2679, 1.5290, 1.1406, 2.1663, 0.9396, 1.9794, 2.5029], + device='cuda:3'), covar=tensor([0.0694, 0.2564, 0.2423, 0.1744, 0.0886, 0.2146, 0.1134, 0.0431], + device='cuda:3'), in_proj_covar=tensor([0.0386, 0.0350, 0.0370, 0.0337, 0.0360, 0.0340, 0.0354, 0.0376], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:39:45,494 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 07:39:56,769 INFO [train.py:903] (3/4) Epoch 16, batch 4050, loss[loss=0.2057, simple_loss=0.2956, pruned_loss=0.05792, over 19584.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2952, pruned_loss=0.06996, over 3825927.03 frames. ], batch size: 61, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:39:59,422 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:40:34,181 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.509e+02 4.912e+02 6.033e+02 8.007e+02 1.551e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-02 07:40:59,898 INFO [train.py:903] (3/4) Epoch 16, batch 4100, loss[loss=0.1918, simple_loss=0.2785, pruned_loss=0.05258, over 19412.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2953, pruned_loss=0.0699, over 3826940.57 frames. ], batch size: 48, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:41:07,043 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106525.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:15,923 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:34,839 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 07:41:36,488 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106550.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:42,005 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106554.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:41:45,563 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:42:02,455 INFO [train.py:903] (3/4) Epoch 16, batch 4150, loss[loss=0.2314, simple_loss=0.3, pruned_loss=0.08143, over 19575.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2965, pruned_loss=0.07076, over 3809271.47 frames. ], batch size: 52, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:42:36,771 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.926e+02 6.073e+02 7.216e+02 1.422e+03, threshold=1.215e+03, percent-clipped=1.0 +2023-04-02 07:43:03,953 INFO [train.py:903] (3/4) Epoch 16, batch 4200, loss[loss=0.2579, simple_loss=0.3229, pruned_loss=0.09649, over 13237.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2965, pruned_loss=0.07102, over 3804809.48 frames. ], batch size: 136, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:43:11,041 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 07:43:15,924 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:05,149 INFO [train.py:903] (3/4) Epoch 16, batch 4250, loss[loss=0.2535, simple_loss=0.3272, pruned_loss=0.08993, over 19690.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2953, pruned_loss=0.07027, over 3824109.60 frames. ], batch size: 58, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:44:20,464 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 07:44:32,722 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 07:44:33,381 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 07:44:41,261 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:44:42,271 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.502e+02 4.922e+02 6.114e+02 7.451e+02 1.808e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 07:45:08,533 INFO [train.py:903] (3/4) Epoch 16, batch 4300, loss[loss=0.1743, simple_loss=0.2533, pruned_loss=0.04768, over 19393.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2956, pruned_loss=0.07071, over 3799805.69 frames. ], batch size: 48, lr: 5.16e-03, grad_scale: 8.0 +2023-04-02 07:45:39,051 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106745.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:45:58,136 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 07:46:11,648 INFO [train.py:903] (3/4) Epoch 16, batch 4350, loss[loss=0.217, simple_loss=0.3001, pruned_loss=0.06691, over 19517.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2964, pruned_loss=0.07154, over 3785218.59 frames. ], batch size: 54, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:46:28,690 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6350, 4.2208, 2.7274, 3.7112, 1.1105, 4.0570, 4.0056, 4.0949], + device='cuda:3'), covar=tensor([0.0636, 0.1052, 0.1971, 0.0841, 0.3875, 0.0727, 0.0835, 0.0862], + device='cuda:3'), in_proj_covar=tensor([0.0468, 0.0385, 0.0463, 0.0328, 0.0390, 0.0397, 0.0395, 0.0427], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:46:30,457 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-02 07:46:46,925 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.102e+02 6.153e+02 8.101e+02 2.041e+03, threshold=1.231e+03, percent-clipped=8.0 +2023-04-02 07:47:03,099 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:05,391 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 07:47:06,265 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106813.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:09,657 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106816.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:14,208 INFO [train.py:903] (3/4) Epoch 16, batch 4400, loss[loss=0.1944, simple_loss=0.2716, pruned_loss=0.05865, over 19405.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2961, pruned_loss=0.07165, over 3798789.28 frames. ], batch size: 48, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:47:33,164 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106835.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:47:36,427 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 07:47:46,749 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 07:47:48,521 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0206, 2.1067, 2.3764, 2.8188, 2.1202, 2.7313, 2.4551, 2.1304], + device='cuda:3'), covar=tensor([0.4032, 0.3767, 0.1725, 0.2159, 0.3803, 0.1884, 0.4273, 0.3226], + device='cuda:3'), in_proj_covar=tensor([0.0839, 0.0887, 0.0678, 0.0899, 0.0820, 0.0757, 0.0808, 0.0742], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 07:47:57,687 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6671, 1.7611, 1.9525, 2.0933, 1.6254, 2.0451, 2.0246, 1.8626], + device='cuda:3'), covar=tensor([0.3520, 0.3059, 0.1619, 0.1803, 0.2920, 0.1670, 0.4200, 0.2881], + device='cuda:3'), in_proj_covar=tensor([0.0839, 0.0887, 0.0678, 0.0899, 0.0820, 0.0757, 0.0807, 0.0741], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 07:48:17,268 INFO [train.py:903] (3/4) Epoch 16, batch 4450, loss[loss=0.2212, simple_loss=0.3018, pruned_loss=0.07034, over 19769.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2962, pruned_loss=0.07116, over 3802873.73 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:48:53,921 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.966e+02 6.259e+02 8.420e+02 1.632e+03, threshold=1.252e+03, percent-clipped=6.0 +2023-04-02 07:49:17,344 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-02 07:49:18,989 INFO [train.py:903] (3/4) Epoch 16, batch 4500, loss[loss=0.221, simple_loss=0.303, pruned_loss=0.06953, over 19530.00 frames. ], tot_loss[loss=0.219, simple_loss=0.296, pruned_loss=0.07099, over 3815572.63 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:49:34,435 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106931.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:50:23,578 INFO [train.py:903] (3/4) Epoch 16, batch 4550, loss[loss=0.18, simple_loss=0.2609, pruned_loss=0.04955, over 19374.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2951, pruned_loss=0.07042, over 3817700.07 frames. ], batch size: 47, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:50:31,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 07:50:54,367 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 07:50:59,933 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.264e+02 4.883e+02 5.860e+02 7.136e+02 1.225e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-02 07:51:02,643 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107001.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:51:27,832 INFO [train.py:903] (3/4) Epoch 16, batch 4600, loss[loss=0.2128, simple_loss=0.2974, pruned_loss=0.06416, over 19489.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2945, pruned_loss=0.07001, over 3821975.58 frames. ], batch size: 64, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:51:34,883 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107026.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:06,241 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 07:52:29,092 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107069.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:52:29,802 INFO [train.py:903] (3/4) Epoch 16, batch 4650, loss[loss=0.1993, simple_loss=0.2747, pruned_loss=0.062, over 19370.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2959, pruned_loss=0.07089, over 3799759.14 frames. ], batch size: 47, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:52:47,217 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 07:52:59,792 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 07:53:01,362 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:53:02,739 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 07:53:07,462 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.220e+02 5.459e+02 6.581e+02 8.910e+02 1.601e+03, threshold=1.316e+03, percent-clipped=6.0 +2023-04-02 07:53:31,770 INFO [train.py:903] (3/4) Epoch 16, batch 4700, loss[loss=0.2173, simple_loss=0.3073, pruned_loss=0.06367, over 19661.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2963, pruned_loss=0.07116, over 3791532.59 frames. ], batch size: 60, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:53:55,964 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 07:54:20,584 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107158.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 07:54:36,812 INFO [train.py:903] (3/4) Epoch 16, batch 4750, loss[loss=0.2043, simple_loss=0.2851, pruned_loss=0.06176, over 19574.00 frames. ], tot_loss[loss=0.219, simple_loss=0.296, pruned_loss=0.07101, over 3800885.59 frames. ], batch size: 61, lr: 5.15e-03, grad_scale: 8.0 +2023-04-02 07:54:37,123 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107170.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:54:58,937 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:03,366 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8949, 1.9466, 1.8826, 1.7611, 1.6166, 1.8001, 1.0484, 1.3539], + device='cuda:3'), covar=tensor([0.0452, 0.0481, 0.0329, 0.0496, 0.0726, 0.0573, 0.0876, 0.0682], + device='cuda:3'), in_proj_covar=tensor([0.0350, 0.0346, 0.0344, 0.0370, 0.0446, 0.0377, 0.0325, 0.0332], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 07:55:11,308 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2119, 2.2383, 2.4286, 2.9439, 2.2305, 2.7617, 2.5511, 2.1384], + device='cuda:3'), covar=tensor([0.4225, 0.4013, 0.1795, 0.2558, 0.4279, 0.2172, 0.4321, 0.3384], + device='cuda:3'), in_proj_covar=tensor([0.0843, 0.0891, 0.0680, 0.0906, 0.0826, 0.0764, 0.0810, 0.0745], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 07:55:11,905 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.171e+02 5.545e+02 6.621e+02 8.650e+02 1.971e+03, threshold=1.324e+03, percent-clipped=6.0 +2023-04-02 07:55:30,505 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 07:55:40,203 INFO [train.py:903] (3/4) Epoch 16, batch 4800, loss[loss=0.2074, simple_loss=0.2819, pruned_loss=0.0665, over 19608.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2967, pruned_loss=0.07146, over 3804533.95 frames. ], batch size: 50, lr: 5.14e-03, grad_scale: 8.0 +2023-04-02 07:56:41,878 INFO [train.py:903] (3/4) Epoch 16, batch 4850, loss[loss=0.2458, simple_loss=0.3156, pruned_loss=0.08797, over 18131.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2972, pruned_loss=0.07172, over 3804683.52 frames. ], batch size: 83, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:07,076 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 07:57:19,837 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.930e+02 6.295e+02 8.478e+02 1.665e+03, threshold=1.259e+03, percent-clipped=1.0 +2023-04-02 07:57:27,492 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 07:57:32,767 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 07:57:32,791 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 07:57:43,095 INFO [train.py:903] (3/4) Epoch 16, batch 4900, loss[loss=0.2209, simple_loss=0.3043, pruned_loss=0.06876, over 19392.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2972, pruned_loss=0.07186, over 3794437.04 frames. ], batch size: 70, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:57:43,111 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 07:57:49,619 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.01 vs. limit=5.0 +2023-04-02 07:58:04,169 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 07:58:46,431 INFO [train.py:903] (3/4) Epoch 16, batch 4950, loss[loss=0.2124, simple_loss=0.3, pruned_loss=0.06242, over 19794.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2962, pruned_loss=0.07148, over 3816340.44 frames. ], batch size: 56, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:04,243 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 07:59:17,589 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4311, 1.0194, 1.3133, 1.3923, 2.7717, 0.9902, 2.2399, 3.2493], + device='cuda:3'), covar=tensor([0.0682, 0.3614, 0.3557, 0.2225, 0.1218, 0.3045, 0.1419, 0.0456], + device='cuda:3'), in_proj_covar=tensor([0.0385, 0.0351, 0.0374, 0.0334, 0.0359, 0.0340, 0.0356, 0.0376], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 07:59:22,730 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 5.680e+02 6.678e+02 8.404e+02 2.020e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-04-02 07:59:27,620 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 07:59:48,957 INFO [train.py:903] (3/4) Epoch 16, batch 5000, loss[loss=0.2413, simple_loss=0.3142, pruned_loss=0.08419, over 19070.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2955, pruned_loss=0.07088, over 3823413.79 frames. ], batch size: 69, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 07:59:58,954 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 08:00:09,006 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 08:00:50,465 INFO [train.py:903] (3/4) Epoch 16, batch 5050, loss[loss=0.2252, simple_loss=0.3064, pruned_loss=0.07204, over 19528.00 frames. ], tot_loss[loss=0.219, simple_loss=0.296, pruned_loss=0.07101, over 3830092.85 frames. ], batch size: 54, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:01:24,839 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9360, 1.1961, 1.5867, 0.5928, 2.1930, 2.4558, 2.1800, 2.6552], + device='cuda:3'), covar=tensor([0.1565, 0.3606, 0.3048, 0.2524, 0.0539, 0.0279, 0.0321, 0.0321], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0306, 0.0335, 0.0257, 0.0229, 0.0174, 0.0210, 0.0230], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:01:27,874 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.470e+02 6.454e+02 8.047e+02 2.188e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 08:01:27,918 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 08:01:30,407 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107502.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:01:44,972 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:01:51,915 INFO [train.py:903] (3/4) Epoch 16, batch 5100, loss[loss=0.2236, simple_loss=0.3046, pruned_loss=0.07129, over 18716.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2959, pruned_loss=0.07136, over 3829925.79 frames. ], batch size: 74, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:02:02,196 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107528.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:02:04,286 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 08:02:08,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 08:02:13,328 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 08:02:54,727 INFO [train.py:903] (3/4) Epoch 16, batch 5150, loss[loss=0.2011, simple_loss=0.2818, pruned_loss=0.06026, over 19784.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2957, pruned_loss=0.07132, over 3827703.08 frames. ], batch size: 56, lr: 5.14e-03, grad_scale: 4.0 +2023-04-02 08:03:08,976 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:03:31,801 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.461e+02 4.950e+02 6.087e+02 7.766e+02 1.818e+03, threshold=1.217e+03, percent-clipped=6.0 +2023-04-02 08:03:43,250 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:03:54,459 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107617.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:03:57,185 INFO [train.py:903] (3/4) Epoch 16, batch 5200, loss[loss=0.2503, simple_loss=0.3223, pruned_loss=0.08916, over 19567.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2956, pruned_loss=0.07097, over 3829602.16 frames. ], batch size: 61, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:04:08,674 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:04:09,569 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 08:04:09,732 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.2803, 3.9507, 3.0946, 3.4435, 1.7042, 3.7387, 3.6936, 3.8835], + device='cuda:3'), covar=tensor([0.0731, 0.1039, 0.1832, 0.0952, 0.3224, 0.0886, 0.0934, 0.1333], + device='cuda:3'), in_proj_covar=tensor([0.0470, 0.0388, 0.0466, 0.0331, 0.0393, 0.0402, 0.0400, 0.0429], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:04:25,017 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-02 08:04:55,318 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 08:04:59,577 INFO [train.py:903] (3/4) Epoch 16, batch 5250, loss[loss=0.204, simple_loss=0.2732, pruned_loss=0.06742, over 19741.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2962, pruned_loss=0.0714, over 3828538.55 frames. ], batch size: 51, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:05:07,732 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:05:24,224 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-02 08:05:36,449 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 5.464e+02 6.488e+02 8.647e+02 1.622e+03, threshold=1.298e+03, percent-clipped=8.0 +2023-04-02 08:05:37,904 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1736, 2.1154, 1.9801, 1.7454, 1.6563, 1.8180, 0.7707, 1.2900], + device='cuda:3'), covar=tensor([0.0548, 0.0532, 0.0362, 0.0693, 0.0917, 0.0760, 0.1031, 0.0846], + device='cuda:3'), in_proj_covar=tensor([0.0346, 0.0340, 0.0340, 0.0366, 0.0440, 0.0371, 0.0320, 0.0328], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:05:50,343 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4367, 2.2460, 2.2292, 2.5900, 2.3756, 2.0950, 1.9856, 2.4081], + device='cuda:3'), covar=tensor([0.0869, 0.1428, 0.1156, 0.0850, 0.1174, 0.0480, 0.1199, 0.0586], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0352, 0.0301, 0.0243, 0.0298, 0.0248, 0.0290, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:06:00,484 INFO [train.py:903] (3/4) Epoch 16, batch 5300, loss[loss=0.2062, simple_loss=0.2822, pruned_loss=0.06514, over 19609.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2966, pruned_loss=0.07188, over 3818799.27 frames. ], batch size: 50, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:06:19,332 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 08:06:45,194 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:07:03,291 INFO [train.py:903] (3/4) Epoch 16, batch 5350, loss[loss=0.2075, simple_loss=0.2751, pruned_loss=0.06999, over 19711.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2962, pruned_loss=0.07164, over 3818556.98 frames. ], batch size: 45, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:07:37,249 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 08:07:40,607 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 4.506e+02 5.884e+02 6.805e+02 1.610e+03, threshold=1.177e+03, percent-clipped=2.0 +2023-04-02 08:08:06,496 INFO [train.py:903] (3/4) Epoch 16, batch 5400, loss[loss=0.2116, simple_loss=0.2838, pruned_loss=0.06976, over 19385.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2962, pruned_loss=0.07161, over 3812833.43 frames. ], batch size: 47, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:08:41,526 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3981, 1.4777, 1.7537, 1.6369, 3.2366, 2.7424, 3.5154, 1.6862], + device='cuda:3'), covar=tensor([0.2488, 0.4252, 0.2723, 0.1923, 0.1426, 0.1844, 0.1524, 0.3739], + device='cuda:3'), in_proj_covar=tensor([0.0510, 0.0611, 0.0663, 0.0463, 0.0609, 0.0512, 0.0651, 0.0520], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:09:08,397 INFO [train.py:903] (3/4) Epoch 16, batch 5450, loss[loss=0.2513, simple_loss=0.328, pruned_loss=0.08733, over 18926.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2949, pruned_loss=0.07091, over 3818428.90 frames. ], batch size: 75, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:09:10,589 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107872.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:11,984 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107873.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:09:26,840 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:09:44,605 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107898.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:09:46,448 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.026e+02 5.228e+02 6.592e+02 8.752e+02 1.860e+03, threshold=1.318e+03, percent-clipped=11.0 +2023-04-02 08:09:58,385 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:10:10,032 INFO [train.py:903] (3/4) Epoch 16, batch 5500, loss[loss=0.1867, simple_loss=0.2538, pruned_loss=0.05975, over 19102.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2949, pruned_loss=0.07141, over 3800420.80 frames. ], batch size: 42, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:10:18,750 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4444, 1.3496, 1.3083, 1.6989, 1.4490, 1.7034, 1.7557, 1.5461], + device='cuda:3'), covar=tensor([0.0857, 0.0984, 0.1129, 0.0799, 0.0826, 0.0755, 0.0781, 0.0719], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0221, 0.0223, 0.0245, 0.0226, 0.0207, 0.0189, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 08:10:34,930 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 08:11:13,118 INFO [train.py:903] (3/4) Epoch 16, batch 5550, loss[loss=0.2192, simple_loss=0.2881, pruned_loss=0.07509, over 19405.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2953, pruned_loss=0.07146, over 3799343.45 frames. ], batch size: 48, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:11:18,629 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 08:11:33,206 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6143, 4.1524, 2.7000, 3.7215, 0.9695, 4.0385, 3.9730, 4.1025], + device='cuda:3'), covar=tensor([0.0667, 0.1119, 0.1915, 0.0785, 0.4084, 0.0737, 0.0859, 0.1084], + device='cuda:3'), in_proj_covar=tensor([0.0470, 0.0385, 0.0463, 0.0329, 0.0392, 0.0402, 0.0399, 0.0429], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:11:34,542 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107987.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:11:50,037 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.933e+02 6.287e+02 7.608e+02 2.106e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-02 08:12:10,700 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 08:12:17,773 INFO [train.py:903] (3/4) Epoch 16, batch 5600, loss[loss=0.1949, simple_loss=0.2775, pruned_loss=0.05619, over 19685.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2954, pruned_loss=0.07091, over 3826374.33 frames. ], batch size: 60, lr: 5.13e-03, grad_scale: 8.0 +2023-04-02 08:12:19,214 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:13:00,815 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0400, 1.7676, 1.6163, 1.9069, 1.8050, 1.5370, 1.5226, 1.8578], + device='cuda:3'), covar=tensor([0.0936, 0.1558, 0.1524, 0.0999, 0.1294, 0.0710, 0.1469, 0.0753], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0352, 0.0300, 0.0243, 0.0297, 0.0247, 0.0290, 0.0245], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:13:17,748 INFO [train.py:903] (3/4) Epoch 16, batch 5650, loss[loss=0.2319, simple_loss=0.3072, pruned_loss=0.07834, over 19675.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2962, pruned_loss=0.07167, over 3829059.42 frames. ], batch size: 55, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:13:31,909 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9528, 4.4562, 2.7739, 3.9360, 0.9501, 4.3371, 4.2738, 4.3974], + device='cuda:3'), covar=tensor([0.0565, 0.1032, 0.1979, 0.0776, 0.4305, 0.0699, 0.0879, 0.0950], + device='cuda:3'), in_proj_covar=tensor([0.0470, 0.0387, 0.0464, 0.0330, 0.0394, 0.0403, 0.0402, 0.0430], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:13:55,637 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108099.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:13:56,647 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.688e+02 6.420e+02 8.034e+02 1.662e+03, threshold=1.284e+03, percent-clipped=4.0 +2023-04-02 08:14:03,634 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 08:14:20,998 INFO [train.py:903] (3/4) Epoch 16, batch 5700, loss[loss=0.2252, simple_loss=0.299, pruned_loss=0.0757, over 19412.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2948, pruned_loss=0.07053, over 3831643.34 frames. ], batch size: 70, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:14:42,905 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:15:22,685 INFO [train.py:903] (3/4) Epoch 16, batch 5750, loss[loss=0.2307, simple_loss=0.3169, pruned_loss=0.07224, over 18185.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2959, pruned_loss=0.0708, over 3836484.86 frames. ], batch size: 83, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:15:22,716 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 08:15:33,015 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 08:15:36,726 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 08:16:00,581 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.860e+02 6.382e+02 7.922e+02 1.732e+03, threshold=1.276e+03, percent-clipped=4.0 +2023-04-02 08:16:19,217 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:16:26,751 INFO [train.py:903] (3/4) Epoch 16, batch 5800, loss[loss=0.2377, simple_loss=0.3141, pruned_loss=0.0807, over 12728.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2958, pruned_loss=0.07071, over 3810770.37 frames. ], batch size: 136, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:16:54,106 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:25,879 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:17:27,725 INFO [train.py:903] (3/4) Epoch 16, batch 5850, loss[loss=0.212, simple_loss=0.2898, pruned_loss=0.06715, over 19465.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2953, pruned_loss=0.07089, over 3802224.09 frames. ], batch size: 49, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:05,295 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.738e+02 5.463e+02 6.888e+02 8.802e+02 1.964e+03, threshold=1.378e+03, percent-clipped=5.0 +2023-04-02 08:18:28,343 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 08:18:29,527 INFO [train.py:903] (3/4) Epoch 16, batch 5900, loss[loss=0.2111, simple_loss=0.3033, pruned_loss=0.05944, over 19665.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2944, pruned_loss=0.07044, over 3810463.50 frames. ], batch size: 58, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:18:52,012 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 08:18:59,411 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108343.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:19:30,982 INFO [train.py:903] (3/4) Epoch 16, batch 5950, loss[loss=0.215, simple_loss=0.2974, pruned_loss=0.06632, over 19348.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2954, pruned_loss=0.07084, over 3815864.88 frames. ], batch size: 70, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:19:59,913 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:09,622 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 5.248e+02 6.439e+02 7.965e+02 2.252e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-02 08:20:12,673 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.03 vs. limit=5.0 +2023-04-02 08:20:30,475 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:20:35,475 INFO [train.py:903] (3/4) Epoch 16, batch 6000, loss[loss=0.2429, simple_loss=0.316, pruned_loss=0.08488, over 19477.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2939, pruned_loss=0.06993, over 3833895.78 frames. ], batch size: 49, lr: 5.12e-03, grad_scale: 8.0 +2023-04-02 08:20:35,476 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 08:20:47,897 INFO [train.py:937] (3/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2723, pruned_loss=0.03545, over 944034.00 frames. +2023-04-02 08:20:47,899 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 08:20:49,621 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3880, 1.4431, 1.7724, 1.6034, 2.8687, 2.3417, 3.0745, 1.2509], + device='cuda:3'), covar=tensor([0.2268, 0.3962, 0.2549, 0.1768, 0.1286, 0.1842, 0.1224, 0.3846], + device='cuda:3'), in_proj_covar=tensor([0.0507, 0.0607, 0.0660, 0.0460, 0.0603, 0.0509, 0.0645, 0.0517], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:21:25,619 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:21:51,712 INFO [train.py:903] (3/4) Epoch 16, batch 6050, loss[loss=0.2173, simple_loss=0.3002, pruned_loss=0.06724, over 19688.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2953, pruned_loss=0.07076, over 3826944.53 frames. ], batch size: 59, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:21:52,195 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108470.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:21:52,470 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 08:22:22,217 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108495.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:22:28,596 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.272e+02 6.402e+02 8.353e+02 1.883e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 08:22:41,573 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9590, 1.9279, 1.7391, 1.5816, 1.4742, 1.6034, 0.3827, 0.8764], + device='cuda:3'), covar=tensor([0.0524, 0.0553, 0.0391, 0.0650, 0.1141, 0.0671, 0.1137, 0.0978], + device='cuda:3'), in_proj_covar=tensor([0.0348, 0.0345, 0.0343, 0.0369, 0.0445, 0.0374, 0.0323, 0.0332], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:22:53,808 INFO [train.py:903] (3/4) Epoch 16, batch 6100, loss[loss=0.1889, simple_loss=0.2724, pruned_loss=0.05272, over 19419.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2959, pruned_loss=0.07137, over 3817672.57 frames. ], batch size: 48, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:23:56,095 INFO [train.py:903] (3/4) Epoch 16, batch 6150, loss[loss=0.2203, simple_loss=0.2946, pruned_loss=0.07299, over 18089.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2969, pruned_loss=0.07145, over 3829716.67 frames. ], batch size: 83, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:24:00,399 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 08:24:26,236 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 08:24:28,846 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:24:35,759 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.845e+02 6.012e+02 7.583e+02 1.796e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 08:24:58,817 INFO [train.py:903] (3/4) Epoch 16, batch 6200, loss[loss=0.2518, simple_loss=0.3248, pruned_loss=0.08937, over 17261.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2964, pruned_loss=0.07064, over 3829513.80 frames. ], batch size: 101, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:25:10,033 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4361, 2.2137, 2.3264, 2.5574, 2.3352, 2.1958, 2.2376, 2.4490], + device='cuda:3'), covar=tensor([0.0747, 0.1226, 0.0910, 0.0644, 0.0996, 0.0423, 0.0979, 0.0485], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0353, 0.0301, 0.0243, 0.0299, 0.0249, 0.0293, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:26:02,207 INFO [train.py:903] (3/4) Epoch 16, batch 6250, loss[loss=0.202, simple_loss=0.2812, pruned_loss=0.06137, over 19578.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2946, pruned_loss=0.06938, over 3817691.85 frames. ], batch size: 52, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:26:22,919 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108687.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:26:34,486 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 08:26:40,072 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 4.966e+02 6.026e+02 7.805e+02 1.726e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-04-02 08:27:04,711 INFO [train.py:903] (3/4) Epoch 16, batch 6300, loss[loss=0.2112, simple_loss=0.2965, pruned_loss=0.06296, over 19731.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2945, pruned_loss=0.06961, over 3831505.00 frames. ], batch size: 63, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:27:07,315 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4748, 1.4965, 1.6609, 1.5967, 2.3086, 1.9870, 2.3869, 1.3252], + device='cuda:3'), covar=tensor([0.1850, 0.3303, 0.2134, 0.1581, 0.1235, 0.1760, 0.1074, 0.3712], + device='cuda:3'), in_proj_covar=tensor([0.0506, 0.0608, 0.0661, 0.0460, 0.0604, 0.0511, 0.0648, 0.0519], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:27:09,336 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8199, 1.6239, 1.5762, 2.3589, 1.8377, 2.1359, 2.1687, 1.8864], + device='cuda:3'), covar=tensor([0.0763, 0.0972, 0.1043, 0.0694, 0.0789, 0.0698, 0.0787, 0.0656], + device='cuda:3'), in_proj_covar=tensor([0.0208, 0.0222, 0.0225, 0.0246, 0.0227, 0.0207, 0.0189, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 08:27:56,260 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7922, 1.6519, 1.5819, 2.2764, 1.6347, 2.0451, 2.1143, 1.8904], + device='cuda:3'), covar=tensor([0.0795, 0.0946, 0.1018, 0.0817, 0.0885, 0.0762, 0.0839, 0.0666], + device='cuda:3'), in_proj_covar=tensor([0.0208, 0.0221, 0.0224, 0.0245, 0.0226, 0.0206, 0.0189, 0.0201], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 08:27:59,208 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-04-02 08:28:06,338 INFO [train.py:903] (3/4) Epoch 16, batch 6350, loss[loss=0.2801, simple_loss=0.3514, pruned_loss=0.1044, over 17361.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2936, pruned_loss=0.06947, over 3833746.13 frames. ], batch size: 101, lr: 5.11e-03, grad_scale: 4.0 +2023-04-02 08:28:38,959 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:47,227 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.125e+02 6.267e+02 8.166e+02 1.468e+03, threshold=1.253e+03, percent-clipped=8.0 +2023-04-02 08:28:48,979 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:28:56,975 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:29:09,790 INFO [train.py:903] (3/4) Epoch 16, batch 6400, loss[loss=0.218, simple_loss=0.3011, pruned_loss=0.06747, over 19181.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2944, pruned_loss=0.06974, over 3839984.43 frames. ], batch size: 69, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:29:17,119 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3094, 1.4054, 1.5145, 1.4747, 1.6662, 1.8530, 1.6616, 0.4867], + device='cuda:3'), covar=tensor([0.2331, 0.3951, 0.2497, 0.1853, 0.1642, 0.2110, 0.1443, 0.4365], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0612, 0.0664, 0.0464, 0.0606, 0.0513, 0.0651, 0.0521], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:29:56,413 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3167, 1.3596, 1.5085, 1.4826, 1.6759, 1.8306, 1.6232, 0.5106], + device='cuda:3'), covar=tensor([0.2301, 0.3985, 0.2504, 0.1877, 0.1625, 0.2198, 0.1517, 0.4410], + device='cuda:3'), in_proj_covar=tensor([0.0507, 0.0611, 0.0663, 0.0463, 0.0605, 0.0512, 0.0650, 0.0521], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:30:14,205 INFO [train.py:903] (3/4) Epoch 16, batch 6450, loss[loss=0.2503, simple_loss=0.3213, pruned_loss=0.08966, over 19737.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2949, pruned_loss=0.06976, over 3844957.50 frames. ], batch size: 63, lr: 5.11e-03, grad_scale: 8.0 +2023-04-02 08:30:40,421 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-02 08:30:52,172 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.109e+02 6.250e+02 7.655e+02 1.750e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-02 08:30:59,839 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 08:31:00,314 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 08:31:04,101 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:31:18,889 INFO [train.py:903] (3/4) Epoch 16, batch 6500, loss[loss=0.1989, simple_loss=0.2721, pruned_loss=0.06287, over 14798.00 frames. ], tot_loss[loss=0.216, simple_loss=0.294, pruned_loss=0.06899, over 3842030.27 frames. ], batch size: 32, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:31:24,478 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 08:31:37,676 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8987, 0.8664, 0.8761, 0.9562, 0.7905, 0.9694, 0.9765, 0.9065], + device='cuda:3'), covar=tensor([0.0647, 0.0712, 0.0775, 0.0550, 0.0747, 0.0586, 0.0667, 0.0570], + device='cuda:3'), in_proj_covar=tensor([0.0206, 0.0219, 0.0222, 0.0243, 0.0224, 0.0205, 0.0188, 0.0199], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 08:31:40,959 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:32:20,051 INFO [train.py:903] (3/4) Epoch 16, batch 6550, loss[loss=0.2774, simple_loss=0.3402, pruned_loss=0.1073, over 19332.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2937, pruned_loss=0.06875, over 3845788.43 frames. ], batch size: 66, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:32:27,823 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 08:32:58,929 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.624e+02 5.967e+02 6.821e+02 1.232e+03, threshold=1.193e+03, percent-clipped=0.0 +2023-04-02 08:33:21,265 INFO [train.py:903] (3/4) Epoch 16, batch 6600, loss[loss=0.204, simple_loss=0.2872, pruned_loss=0.06047, over 17321.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2932, pruned_loss=0.06888, over 3835873.43 frames. ], batch size: 101, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:33:53,960 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9901, 3.4194, 1.9666, 1.9912, 3.0114, 1.6045, 1.2161, 2.1248], + device='cuda:3'), covar=tensor([0.1410, 0.0548, 0.1121, 0.0846, 0.0516, 0.1262, 0.1057, 0.0730], + device='cuda:3'), in_proj_covar=tensor([0.0295, 0.0305, 0.0329, 0.0254, 0.0240, 0.0329, 0.0287, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:34:03,923 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:05,065 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109055.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:34:08,550 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:34:22,639 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-02 08:34:22,860 INFO [train.py:903] (3/4) Epoch 16, batch 6650, loss[loss=0.2375, simple_loss=0.313, pruned_loss=0.08102, over 19678.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2939, pruned_loss=0.0695, over 3834295.59 frames. ], batch size: 59, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:34:41,492 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109083.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:35:01,848 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.420e+02 6.464e+02 8.289e+02 2.034e+03, threshold=1.293e+03, percent-clipped=5.0 +2023-04-02 08:35:18,230 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 08:35:27,581 INFO [train.py:903] (3/4) Epoch 16, batch 6700, loss[loss=0.2211, simple_loss=0.3023, pruned_loss=0.06988, over 18186.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2933, pruned_loss=0.06929, over 3831398.12 frames. ], batch size: 83, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:36:06,063 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:19,862 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:36:25,877 INFO [train.py:903] (3/4) Epoch 16, batch 6750, loss[loss=0.2137, simple_loss=0.292, pruned_loss=0.06772, over 19671.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2941, pruned_loss=0.06992, over 3818039.10 frames. ], batch size: 58, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:36:29,588 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6066, 1.4633, 1.4317, 2.2091, 1.7240, 1.8672, 2.1304, 1.7142], + device='cuda:3'), covar=tensor([0.0839, 0.1001, 0.1097, 0.0794, 0.0882, 0.0782, 0.0783, 0.0744], + device='cuda:3'), in_proj_covar=tensor([0.0208, 0.0221, 0.0224, 0.0244, 0.0225, 0.0206, 0.0189, 0.0200], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 08:36:50,461 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:03,547 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 4.897e+02 5.778e+02 7.062e+02 1.289e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-02 08:37:15,727 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:37:22,665 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.80 vs. limit=5.0 +2023-04-02 08:37:24,165 INFO [train.py:903] (3/4) Epoch 16, batch 6800, loss[loss=0.2043, simple_loss=0.2804, pruned_loss=0.06407, over 19666.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2953, pruned_loss=0.07039, over 3821052.82 frames. ], batch size: 53, lr: 5.10e-03, grad_scale: 8.0 +2023-04-02 08:37:42,841 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-02 08:38:09,493 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 08:38:09,930 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 08:38:13,448 INFO [train.py:903] (3/4) Epoch 17, batch 0, loss[loss=0.2405, simple_loss=0.3136, pruned_loss=0.08367, over 19665.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3136, pruned_loss=0.08367, over 19665.00 frames. ], batch size: 60, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:38:13,449 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 08:38:23,516 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2889, 1.2866, 1.5915, 1.4598, 2.2446, 1.8675, 2.2664, 1.1734], + device='cuda:3'), covar=tensor([0.2412, 0.4356, 0.2684, 0.1904, 0.1434, 0.2242, 0.1238, 0.4418], + device='cuda:3'), in_proj_covar=tensor([0.0509, 0.0612, 0.0664, 0.0460, 0.0606, 0.0512, 0.0650, 0.0520], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:38:26,004 INFO [train.py:937] (3/4) Epoch 17, validation: loss=0.1721, simple_loss=0.2728, pruned_loss=0.03571, over 944034.00 frames. +2023-04-02 08:38:26,005 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 08:38:39,464 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 08:38:51,296 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:29,170 INFO [train.py:903] (3/4) Epoch 17, batch 50, loss[loss=0.2389, simple_loss=0.3166, pruned_loss=0.08061, over 19789.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2959, pruned_loss=0.07235, over 840860.20 frames. ], batch size: 56, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:39:32,717 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 5.319e+02 6.338e+02 7.961e+02 1.981e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-02 08:39:43,545 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:45,680 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:39:59,842 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 08:40:13,445 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:14,435 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:40:28,818 INFO [train.py:903] (3/4) Epoch 17, batch 100, loss[loss=0.1965, simple_loss=0.2755, pruned_loss=0.05877, over 19390.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2953, pruned_loss=0.0714, over 1491684.65 frames. ], batch size: 48, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:40:36,771 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 08:40:53,546 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7501, 1.7779, 1.6199, 1.4172, 1.4238, 1.4088, 0.1840, 0.6698], + device='cuda:3'), covar=tensor([0.0565, 0.0555, 0.0337, 0.0493, 0.1023, 0.0637, 0.1028, 0.0892], + device='cuda:3'), in_proj_covar=tensor([0.0345, 0.0346, 0.0343, 0.0368, 0.0444, 0.0374, 0.0323, 0.0330], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:41:29,300 INFO [train.py:903] (3/4) Epoch 17, batch 150, loss[loss=0.207, simple_loss=0.295, pruned_loss=0.05949, over 18362.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2962, pruned_loss=0.07129, over 2011726.65 frames. ], batch size: 83, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:41:30,559 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109399.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:41:32,676 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.062e+02 6.078e+02 8.331e+02 1.364e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 08:41:34,552 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.06 vs. limit=5.0 +2023-04-02 08:41:44,717 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.10 vs. limit=5.0 +2023-04-02 08:42:07,844 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4195, 1.5601, 2.1348, 1.7280, 3.2074, 2.5080, 3.4540, 1.5121], + device='cuda:3'), covar=tensor([0.2385, 0.4210, 0.2528, 0.1843, 0.1439, 0.2001, 0.1546, 0.4074], + device='cuda:3'), in_proj_covar=tensor([0.0505, 0.0607, 0.0659, 0.0458, 0.0601, 0.0508, 0.0646, 0.0517], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:42:22,302 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 08:42:29,333 INFO [train.py:903] (3/4) Epoch 17, batch 200, loss[loss=0.2464, simple_loss=0.321, pruned_loss=0.08587, over 19556.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.297, pruned_loss=0.07144, over 2421661.01 frames. ], batch size: 61, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:25,310 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3844, 2.1596, 1.5776, 1.3970, 1.9882, 1.1953, 1.2039, 1.8082], + device='cuda:3'), covar=tensor([0.0897, 0.0667, 0.0969, 0.0735, 0.0482, 0.1221, 0.0715, 0.0421], + device='cuda:3'), in_proj_covar=tensor([0.0293, 0.0305, 0.0326, 0.0254, 0.0240, 0.0326, 0.0285, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:43:32,827 INFO [train.py:903] (3/4) Epoch 17, batch 250, loss[loss=0.2123, simple_loss=0.2955, pruned_loss=0.06451, over 19528.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2976, pruned_loss=0.07209, over 2744636.88 frames. ], batch size: 54, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:43:36,242 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.403e+02 5.064e+02 6.047e+02 7.271e+02 1.663e+03, threshold=1.209e+03, percent-clipped=2.0 +2023-04-02 08:43:52,402 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109514.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:44:04,251 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:35,520 INFO [train.py:903] (3/4) Epoch 17, batch 300, loss[loss=0.1816, simple_loss=0.2566, pruned_loss=0.05328, over 16006.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2966, pruned_loss=0.07135, over 2974020.31 frames. ], batch size: 35, lr: 4.94e-03, grad_scale: 8.0 +2023-04-02 08:44:37,171 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:45,081 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:44:45,555 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 08:45:36,080 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109597.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:45:36,900 INFO [train.py:903] (3/4) Epoch 17, batch 350, loss[loss=0.2452, simple_loss=0.3161, pruned_loss=0.08719, over 19533.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2954, pruned_loss=0.07047, over 3172159.17 frames. ], batch size: 56, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:45:38,097 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 08:45:40,567 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.783e+02 5.858e+02 7.548e+02 1.929e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 08:46:33,322 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0044, 4.3327, 4.7402, 4.7171, 1.6532, 4.3775, 3.7090, 4.4158], + device='cuda:3'), covar=tensor([0.1557, 0.0898, 0.0536, 0.0635, 0.6069, 0.0819, 0.0715, 0.1047], + device='cuda:3'), in_proj_covar=tensor([0.0731, 0.0677, 0.0876, 0.0761, 0.0785, 0.0625, 0.0525, 0.0805], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 08:46:38,843 INFO [train.py:903] (3/4) Epoch 17, batch 400, loss[loss=0.2189, simple_loss=0.3023, pruned_loss=0.06769, over 19521.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2955, pruned_loss=0.07015, over 3323804.31 frames. ], batch size: 54, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:46:49,103 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:09,937 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:16,706 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:20,165 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:47:40,317 INFO [train.py:903] (3/4) Epoch 17, batch 450, loss[loss=0.1806, simple_loss=0.2634, pruned_loss=0.04893, over 19619.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2952, pruned_loss=0.0696, over 3432268.11 frames. ], batch size: 50, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:47:44,680 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 5.202e+02 6.539e+02 8.058e+02 1.631e+03, threshold=1.308e+03, percent-clipped=6.0 +2023-04-02 08:48:12,287 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 08:48:13,425 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 08:48:44,992 INFO [train.py:903] (3/4) Epoch 17, batch 500, loss[loss=0.1966, simple_loss=0.281, pruned_loss=0.0561, over 19527.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2963, pruned_loss=0.07039, over 3518353.70 frames. ], batch size: 54, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:48:53,562 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7221, 4.0418, 4.3287, 4.3377, 1.8468, 4.0472, 3.5313, 4.0424], + device='cuda:3'), covar=tensor([0.1346, 0.1451, 0.0586, 0.0662, 0.5337, 0.0965, 0.0657, 0.1072], + device='cuda:3'), in_proj_covar=tensor([0.0734, 0.0676, 0.0876, 0.0761, 0.0784, 0.0625, 0.0525, 0.0809], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 08:48:59,863 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 08:49:11,994 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109770.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 08:49:12,930 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:16,865 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-02 08:49:44,307 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:49:44,354 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109795.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 08:49:47,270 INFO [train.py:903] (3/4) Epoch 17, batch 550, loss[loss=0.2422, simple_loss=0.3229, pruned_loss=0.08072, over 19466.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2959, pruned_loss=0.07077, over 3583418.65 frames. ], batch size: 64, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:49:50,690 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.960e+02 5.345e+02 7.483e+02 9.617e+02 2.288e+03, threshold=1.497e+03, percent-clipped=10.0 +2023-04-02 08:50:48,373 INFO [train.py:903] (3/4) Epoch 17, batch 600, loss[loss=0.2181, simple_loss=0.2953, pruned_loss=0.0705, over 19285.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2972, pruned_loss=0.07164, over 3621789.10 frames. ], batch size: 66, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:27,287 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 08:51:35,306 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.14 vs. limit=5.0 +2023-04-02 08:51:49,477 INFO [train.py:903] (3/4) Epoch 17, batch 650, loss[loss=0.2078, simple_loss=0.2759, pruned_loss=0.06986, over 18706.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2975, pruned_loss=0.07214, over 3676674.96 frames. ], batch size: 41, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:51:53,023 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.642e+02 6.698e+02 8.791e+02 1.815e+03, threshold=1.340e+03, percent-clipped=2.0 +2023-04-02 08:52:15,015 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3549, 1.4785, 1.7554, 1.5609, 2.6138, 2.1084, 2.7448, 1.2574], + device='cuda:3'), covar=tensor([0.2305, 0.3917, 0.2402, 0.1867, 0.1436, 0.2087, 0.1407, 0.3863], + device='cuda:3'), in_proj_covar=tensor([0.0505, 0.0609, 0.0661, 0.0457, 0.0601, 0.0509, 0.0645, 0.0515], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 08:52:27,871 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:31,273 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9773, 1.8180, 1.6982, 2.0975, 1.8124, 1.7927, 1.6574, 1.9090], + device='cuda:3'), covar=tensor([0.1050, 0.1505, 0.1383, 0.0943, 0.1231, 0.0511, 0.1336, 0.0691], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0353, 0.0300, 0.0243, 0.0295, 0.0248, 0.0295, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:52:43,999 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=109941.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:52:51,843 INFO [train.py:903] (3/4) Epoch 17, batch 700, loss[loss=0.2077, simple_loss=0.2926, pruned_loss=0.06137, over 19789.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2966, pruned_loss=0.07122, over 3708895.95 frames. ], batch size: 56, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:52:58,969 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109952.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:53:57,844 INFO [train.py:903] (3/4) Epoch 17, batch 750, loss[loss=0.261, simple_loss=0.3315, pruned_loss=0.09525, over 19729.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2949, pruned_loss=0.06988, over 3746525.40 frames. ], batch size: 63, lr: 4.93e-03, grad_scale: 8.0 +2023-04-02 08:54:02,554 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.697e+02 5.712e+02 7.217e+02 1.165e+03, threshold=1.142e+03, percent-clipped=0.0 +2023-04-02 08:54:25,959 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:33,253 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:54:44,229 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110035.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:00,573 INFO [train.py:903] (3/4) Epoch 17, batch 800, loss[loss=0.219, simple_loss=0.3031, pruned_loss=0.06744, over 19660.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2949, pruned_loss=0.06936, over 3770559.50 frames. ], batch size: 55, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:55:04,469 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:05,703 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:10,530 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:14,886 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 08:55:17,675 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:55:27,182 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 08:55:35,612 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:03,217 INFO [train.py:903] (3/4) Epoch 17, batch 850, loss[loss=0.1826, simple_loss=0.2647, pruned_loss=0.05029, over 19837.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2953, pruned_loss=0.06965, over 3778359.56 frames. ], batch size: 52, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:56:06,200 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.347e+02 5.213e+02 6.563e+02 8.363e+02 2.159e+03, threshold=1.313e+03, percent-clipped=10.0 +2023-04-02 08:56:42,126 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1935, 3.8374, 2.6674, 3.4369, 1.2445, 3.7077, 3.6641, 3.7385], + device='cuda:3'), covar=tensor([0.0845, 0.1079, 0.1968, 0.0791, 0.3527, 0.0837, 0.0912, 0.1295], + device='cuda:3'), in_proj_covar=tensor([0.0470, 0.0382, 0.0463, 0.0330, 0.0387, 0.0399, 0.0398, 0.0429], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 08:56:42,143 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:47,720 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110133.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:51,232 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 08:56:56,482 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 08:57:04,358 INFO [train.py:903] (3/4) Epoch 17, batch 900, loss[loss=0.245, simple_loss=0.3224, pruned_loss=0.08381, over 19384.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2954, pruned_loss=0.06984, over 3796304.37 frames. ], batch size: 70, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:05,928 INFO [train.py:903] (3/4) Epoch 17, batch 950, loss[loss=0.2481, simple_loss=0.3208, pruned_loss=0.08768, over 19725.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2969, pruned_loss=0.07079, over 3795622.26 frames. ], batch size: 63, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 08:58:08,342 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 08:58:09,604 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.667e+02 4.867e+02 6.255e+02 8.229e+02 2.250e+03, threshold=1.251e+03, percent-clipped=5.0 +2023-04-02 08:59:09,004 INFO [train.py:903] (3/4) Epoch 17, batch 1000, loss[loss=0.2158, simple_loss=0.2998, pruned_loss=0.06585, over 19583.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2962, pruned_loss=0.07059, over 3808415.89 frames. ], batch size: 61, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:00:05,684 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 09:00:11,629 INFO [train.py:903] (3/4) Epoch 17, batch 1050, loss[loss=0.193, simple_loss=0.2815, pruned_loss=0.05224, over 19673.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2956, pruned_loss=0.07038, over 3809160.52 frames. ], batch size: 58, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:00:15,433 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110301.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:16,231 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 4.918e+02 6.228e+02 8.050e+02 1.872e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 09:00:28,181 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:40,523 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:00:45,000 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 09:01:00,212 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:01,463 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:01:13,841 INFO [train.py:903] (3/4) Epoch 17, batch 1100, loss[loss=0.1982, simple_loss=0.2715, pruned_loss=0.06243, over 19502.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2954, pruned_loss=0.07032, over 3805217.59 frames. ], batch size: 49, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:01:36,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1191, 1.1224, 1.4645, 1.3268, 2.7361, 1.0007, 2.1498, 2.9903], + device='cuda:3'), covar=tensor([0.0598, 0.2867, 0.2829, 0.1792, 0.0803, 0.2371, 0.1053, 0.0360], + device='cuda:3'), in_proj_covar=tensor([0.0389, 0.0355, 0.0375, 0.0335, 0.0360, 0.0342, 0.0357, 0.0381], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:01:54,278 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:09,871 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:16,582 INFO [train.py:903] (3/4) Epoch 17, batch 1150, loss[loss=0.2163, simple_loss=0.2952, pruned_loss=0.06871, over 19553.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2951, pruned_loss=0.06991, over 3810432.81 frames. ], batch size: 64, lr: 4.92e-03, grad_scale: 4.0 +2023-04-02 09:02:21,355 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.729e+02 5.705e+02 7.310e+02 1.426e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-02 09:02:28,349 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110406.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:02:43,205 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:20,434 INFO [train.py:903] (3/4) Epoch 17, batch 1200, loss[loss=0.213, simple_loss=0.2902, pruned_loss=0.06795, over 19761.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2953, pruned_loss=0.06985, over 3818569.93 frames. ], batch size: 54, lr: 4.92e-03, grad_scale: 8.0 +2023-04-02 09:03:22,075 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.12 vs. limit=5.0 +2023-04-02 09:03:49,916 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:03:53,238 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 09:03:55,659 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:19,557 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:04:23,826 INFO [train.py:903] (3/4) Epoch 17, batch 1250, loss[loss=0.2092, simple_loss=0.2848, pruned_loss=0.06678, over 19736.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2949, pruned_loss=0.06969, over 3814535.85 frames. ], batch size: 51, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:04:28,259 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.334e+02 6.614e+02 7.905e+02 1.343e+03, threshold=1.323e+03, percent-clipped=1.0 +2023-04-02 09:04:48,482 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3471, 3.0319, 2.2852, 2.7663, 0.8621, 2.9578, 2.8846, 2.9726], + device='cuda:3'), covar=tensor([0.1091, 0.1460, 0.2030, 0.1164, 0.3842, 0.1077, 0.1137, 0.1340], + device='cuda:3'), in_proj_covar=tensor([0.0471, 0.0385, 0.0465, 0.0331, 0.0389, 0.0402, 0.0400, 0.0429], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:04:52,178 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110521.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:05:25,877 INFO [train.py:903] (3/4) Epoch 17, batch 1300, loss[loss=0.2429, simple_loss=0.3242, pruned_loss=0.08076, over 19479.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.295, pruned_loss=0.06986, over 3820622.06 frames. ], batch size: 64, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:02,874 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.37 vs. limit=5.0 +2023-04-02 09:06:14,221 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:20,240 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110592.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:06:26,791 INFO [train.py:903] (3/4) Epoch 17, batch 1350, loss[loss=0.2185, simple_loss=0.3018, pruned_loss=0.06757, over 18881.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2962, pruned_loss=0.07056, over 3822492.44 frames. ], batch size: 75, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:06:31,267 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 4.458e+02 6.078e+02 8.226e+02 1.667e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-02 09:06:35,113 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2734, 1.4052, 1.7953, 1.5503, 2.4408, 2.0519, 2.4733, 1.0380], + device='cuda:3'), covar=tensor([0.2732, 0.4608, 0.2758, 0.2122, 0.1619, 0.2391, 0.1692, 0.4645], + device='cuda:3'), in_proj_covar=tensor([0.0515, 0.0621, 0.0675, 0.0467, 0.0612, 0.0518, 0.0658, 0.0525], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 09:07:07,157 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:26,715 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:07:28,037 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0490, 1.4906, 1.8745, 1.3382, 2.8959, 4.4923, 4.4336, 4.8986], + device='cuda:3'), covar=tensor([0.1738, 0.3521, 0.3197, 0.2282, 0.0632, 0.0228, 0.0156, 0.0170], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0308, 0.0337, 0.0258, 0.0232, 0.0175, 0.0209, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 09:07:29,985 INFO [train.py:903] (3/4) Epoch 17, batch 1400, loss[loss=0.2099, simple_loss=0.285, pruned_loss=0.06742, over 19622.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2954, pruned_loss=0.07019, over 3836229.36 frames. ], batch size: 50, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:07:51,413 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110665.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:08,773 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:08:32,871 INFO [train.py:903] (3/4) Epoch 17, batch 1450, loss[loss=0.2446, simple_loss=0.3117, pruned_loss=0.08873, over 19768.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2961, pruned_loss=0.07071, over 3841146.91 frames. ], batch size: 54, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:08:32,997 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 09:08:38,609 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.047e+02 5.721e+02 7.117e+02 1.861e+03, threshold=1.144e+03, percent-clipped=3.0 +2023-04-02 09:09:35,399 INFO [train.py:903] (3/4) Epoch 17, batch 1500, loss[loss=0.1899, simple_loss=0.2819, pruned_loss=0.04899, over 19668.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2955, pruned_loss=0.06965, over 3849819.02 frames. ], batch size: 58, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:09:38,214 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:42,819 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:09:50,088 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:11,865 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:14,254 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110777.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:17,726 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110780.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:27,709 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 09:10:35,383 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:38,184 INFO [train.py:903] (3/4) Epoch 17, batch 1550, loss[loss=0.2479, simple_loss=0.3215, pruned_loss=0.08717, over 17916.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2951, pruned_loss=0.06949, over 3830193.17 frames. ], batch size: 83, lr: 4.91e-03, grad_scale: 4.0 +2023-04-02 09:10:44,270 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110802.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:10:44,893 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.421e+02 5.256e+02 6.667e+02 1.625e+03, threshold=1.051e+03, percent-clipped=2.0 +2023-04-02 09:11:35,791 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:11:42,192 INFO [train.py:903] (3/4) Epoch 17, batch 1600, loss[loss=0.2419, simple_loss=0.3174, pruned_loss=0.08322, over 19535.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2943, pruned_loss=0.06927, over 3828145.09 frames. ], batch size: 54, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:11:42,643 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:05,486 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 09:12:07,122 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:12,612 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110873.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:29,585 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:12:45,475 INFO [train.py:903] (3/4) Epoch 17, batch 1650, loss[loss=0.1911, simple_loss=0.2672, pruned_loss=0.05746, over 19725.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2942, pruned_loss=0.06985, over 3823325.44 frames. ], batch size: 46, lr: 4.91e-03, grad_scale: 8.0 +2023-04-02 09:12:51,298 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 5.600e+02 6.791e+02 9.379e+02 3.114e+03, threshold=1.358e+03, percent-clipped=15.0 +2023-04-02 09:13:47,390 INFO [train.py:903] (3/4) Epoch 17, batch 1700, loss[loss=0.2239, simple_loss=0.3037, pruned_loss=0.07202, over 19689.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2935, pruned_loss=0.06897, over 3830127.40 frames. ], batch size: 60, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:04,774 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 09:14:19,118 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=110973.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:24,098 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110976.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:14:29,664 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 09:14:49,216 INFO [train.py:903] (3/4) Epoch 17, batch 1750, loss[loss=0.2209, simple_loss=0.3031, pruned_loss=0.0693, over 18363.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2933, pruned_loss=0.069, over 3834701.89 frames. ], batch size: 84, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:14:55,345 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.870e+02 5.792e+02 7.151e+02 1.845e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 09:15:13,784 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3302, 1.3296, 1.9447, 1.5482, 3.0318, 4.6573, 4.5390, 5.0223], + device='cuda:3'), covar=tensor([0.1560, 0.3801, 0.3152, 0.2214, 0.0569, 0.0176, 0.0160, 0.0163], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0309, 0.0338, 0.0259, 0.0233, 0.0176, 0.0210, 0.0234], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 09:15:15,078 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:37,646 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111036.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:43,447 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111041.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:15:51,886 INFO [train.py:903] (3/4) Epoch 17, batch 1800, loss[loss=0.275, simple_loss=0.3442, pruned_loss=0.1029, over 19272.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2935, pruned_loss=0.06928, over 3831993.26 frames. ], batch size: 66, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:15:57,974 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111051.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:10,614 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111061.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:28,047 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111076.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:44,399 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:16:51,950 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 09:16:56,925 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0516, 2.1179, 2.3101, 2.8200, 2.1046, 2.7570, 2.4765, 2.1569], + device='cuda:3'), covar=tensor([0.3872, 0.3680, 0.1728, 0.2109, 0.3853, 0.1745, 0.4049, 0.3087], + device='cuda:3'), in_proj_covar=tensor([0.0853, 0.0902, 0.0682, 0.0909, 0.0833, 0.0770, 0.0812, 0.0752], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 09:16:57,620 INFO [train.py:903] (3/4) Epoch 17, batch 1850, loss[loss=0.1829, simple_loss=0.2554, pruned_loss=0.05519, over 19719.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2936, pruned_loss=0.06899, over 3829429.49 frames. ], batch size: 45, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:16:57,841 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111098.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:17:03,773 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.241e+02 6.549e+02 7.790e+02 1.838e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-02 09:17:29,438 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 09:18:00,991 INFO [train.py:903] (3/4) Epoch 17, batch 1900, loss[loss=0.2216, simple_loss=0.2889, pruned_loss=0.07715, over 19404.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2929, pruned_loss=0.06834, over 3833935.83 frames. ], batch size: 48, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:18:17,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 09:18:24,354 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 09:18:34,781 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3820, 1.4011, 1.7199, 1.5966, 2.4967, 2.1860, 2.5325, 1.1107], + device='cuda:3'), covar=tensor([0.2384, 0.4282, 0.2664, 0.1906, 0.1476, 0.2079, 0.1421, 0.4127], + device='cuda:3'), in_proj_covar=tensor([0.0506, 0.0613, 0.0670, 0.0462, 0.0608, 0.0513, 0.0651, 0.0521], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 09:18:49,758 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 09:19:02,961 INFO [train.py:903] (3/4) Epoch 17, batch 1950, loss[loss=0.2171, simple_loss=0.2829, pruned_loss=0.07561, over 19410.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2928, pruned_loss=0.06852, over 3841852.11 frames. ], batch size: 48, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:19:08,707 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 5.118e+02 6.155e+02 7.161e+02 1.490e+03, threshold=1.231e+03, percent-clipped=2.0 +2023-04-02 09:19:10,547 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-02 09:19:23,921 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:24,967 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5809, 1.1910, 1.4869, 1.6092, 3.1318, 1.0405, 2.3337, 3.4894], + device='cuda:3'), covar=tensor([0.0495, 0.2929, 0.2818, 0.1701, 0.0737, 0.2564, 0.1223, 0.0293], + device='cuda:3'), in_proj_covar=tensor([0.0389, 0.0352, 0.0374, 0.0335, 0.0360, 0.0342, 0.0358, 0.0381], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:19:44,081 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111230.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:19:59,225 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:20:05,434 INFO [train.py:903] (3/4) Epoch 17, batch 2000, loss[loss=0.1767, simple_loss=0.2574, pruned_loss=0.04804, over 19071.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2932, pruned_loss=0.06838, over 3839342.63 frames. ], batch size: 42, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:04,232 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 09:21:07,754 INFO [train.py:903] (3/4) Epoch 17, batch 2050, loss[loss=0.2064, simple_loss=0.2726, pruned_loss=0.07013, over 19755.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2924, pruned_loss=0.068, over 3847673.66 frames. ], batch size: 47, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:21:14,880 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 5.413e+02 6.604e+02 7.706e+02 1.674e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-02 09:21:22,818 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 09:21:24,037 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 09:21:34,592 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:21:43,899 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 09:22:05,012 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:06,132 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:09,327 INFO [train.py:903] (3/4) Epoch 17, batch 2100, loss[loss=0.226, simple_loss=0.3034, pruned_loss=0.07425, over 19334.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2928, pruned_loss=0.06848, over 3854590.39 frames. ], batch size: 70, lr: 4.90e-03, grad_scale: 8.0 +2023-04-02 09:22:34,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:22:38,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 09:22:55,888 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3460, 2.1563, 2.0140, 1.8362, 1.5600, 1.7937, 0.7120, 1.2391], + device='cuda:3'), covar=tensor([0.0525, 0.0564, 0.0423, 0.0712, 0.1137, 0.0837, 0.1171, 0.0984], + device='cuda:3'), in_proj_covar=tensor([0.0351, 0.0350, 0.0348, 0.0374, 0.0450, 0.0382, 0.0326, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 09:23:01,386 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 09:23:10,553 INFO [train.py:903] (3/4) Epoch 17, batch 2150, loss[loss=0.2231, simple_loss=0.2931, pruned_loss=0.07655, over 19845.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2925, pruned_loss=0.06907, over 3843083.16 frames. ], batch size: 52, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:23:15,802 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4888, 1.6289, 2.1217, 1.7920, 3.3484, 2.7162, 3.7313, 1.7579], + device='cuda:3'), covar=tensor([0.2359, 0.4196, 0.2604, 0.1788, 0.1451, 0.1911, 0.1435, 0.3764], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0615, 0.0671, 0.0463, 0.0611, 0.0514, 0.0653, 0.0522], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 09:23:16,478 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.943e+02 5.975e+02 7.359e+02 1.553e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-02 09:23:56,918 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111435.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:11,372 INFO [train.py:903] (3/4) Epoch 17, batch 2200, loss[loss=0.2784, simple_loss=0.3492, pruned_loss=0.1038, over 19449.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2928, pruned_loss=0.06917, over 3832611.60 frames. ], batch size: 64, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:24:39,059 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111469.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:24:58,479 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:11,183 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111494.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:25:15,257 INFO [train.py:903] (3/4) Epoch 17, batch 2250, loss[loss=0.2273, simple_loss=0.3103, pruned_loss=0.07212, over 19664.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2934, pruned_loss=0.06952, over 3820813.46 frames. ], batch size: 60, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:25:22,029 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 5.257e+02 6.827e+02 8.687e+02 2.303e+03, threshold=1.365e+03, percent-clipped=8.0 +2023-04-02 09:26:16,850 INFO [train.py:903] (3/4) Epoch 17, batch 2300, loss[loss=0.198, simple_loss=0.2933, pruned_loss=0.05132, over 19785.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2947, pruned_loss=0.06998, over 3818532.24 frames. ], batch size: 56, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:26:27,079 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 09:26:38,588 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 09:27:05,581 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:18,435 INFO [train.py:903] (3/4) Epoch 17, batch 2350, loss[loss=0.1721, simple_loss=0.2565, pruned_loss=0.04384, over 19396.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.293, pruned_loss=0.06925, over 3829235.65 frames. ], batch size: 48, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:27:22,382 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111601.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:24,244 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.149e+02 5.946e+02 7.803e+02 1.982e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-02 09:27:54,516 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:27:58,756 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 09:28:06,315 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-02 09:28:14,873 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 09:28:19,177 INFO [train.py:903] (3/4) Epoch 17, batch 2400, loss[loss=0.2664, simple_loss=0.3316, pruned_loss=0.1006, over 19611.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2947, pruned_loss=0.0705, over 3827702.35 frames. ], batch size: 57, lr: 4.89e-03, grad_scale: 8.0 +2023-04-02 09:28:23,952 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5363, 1.6388, 1.7268, 1.9772, 1.5146, 1.8779, 1.8019, 1.6679], + device='cuda:3'), covar=tensor([0.3262, 0.2667, 0.1460, 0.1651, 0.2814, 0.1486, 0.3373, 0.2416], + device='cuda:3'), in_proj_covar=tensor([0.0854, 0.0902, 0.0683, 0.0909, 0.0833, 0.0768, 0.0816, 0.0752], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 09:29:15,449 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:24,913 INFO [train.py:903] (3/4) Epoch 17, batch 2450, loss[loss=0.2025, simple_loss=0.2865, pruned_loss=0.05925, over 19761.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2941, pruned_loss=0.07014, over 3818891.55 frames. ], batch size: 54, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:29:29,898 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:29:30,392 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-02 09:29:32,591 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.976e+02 6.292e+02 8.323e+02 1.636e+03, threshold=1.258e+03, percent-clipped=0.0 +2023-04-02 09:29:33,040 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9750, 1.5761, 1.5164, 1.8398, 1.5660, 1.7000, 1.4174, 1.8219], + device='cuda:3'), covar=tensor([0.0981, 0.1260, 0.1510, 0.1000, 0.1197, 0.0535, 0.1423, 0.0718], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0354, 0.0302, 0.0245, 0.0299, 0.0248, 0.0297, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:29:47,095 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111716.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:30:18,828 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9363, 1.9763, 2.2042, 2.6753, 1.9722, 2.5591, 2.3132, 2.0004], + device='cuda:3'), covar=tensor([0.4022, 0.3713, 0.1734, 0.2168, 0.3675, 0.1871, 0.4371, 0.3121], + device='cuda:3'), in_proj_covar=tensor([0.0857, 0.0907, 0.0687, 0.0914, 0.0836, 0.0773, 0.0818, 0.0753], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 09:30:27,987 INFO [train.py:903] (3/4) Epoch 17, batch 2500, loss[loss=0.1805, simple_loss=0.2563, pruned_loss=0.05234, over 19801.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.07002, over 3819750.96 frames. ], batch size: 47, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:31:28,662 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 09:31:31,103 INFO [train.py:903] (3/4) Epoch 17, batch 2550, loss[loss=0.2634, simple_loss=0.3351, pruned_loss=0.09585, over 19620.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2945, pruned_loss=0.06944, over 3821835.23 frames. ], batch size: 57, lr: 4.89e-03, grad_scale: 4.0 +2023-04-02 09:31:38,617 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.268e+02 6.331e+02 7.722e+02 1.710e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-02 09:31:44,786 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111809.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:32:13,247 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=111830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:32:28,427 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 09:32:34,317 INFO [train.py:903] (3/4) Epoch 17, batch 2600, loss[loss=0.2444, simple_loss=0.3192, pruned_loss=0.08483, over 19598.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2947, pruned_loss=0.06959, over 3827764.58 frames. ], batch size: 61, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:33:38,519 INFO [train.py:903] (3/4) Epoch 17, batch 2650, loss[loss=0.2617, simple_loss=0.3267, pruned_loss=0.09834, over 17364.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2942, pruned_loss=0.06931, over 3825601.54 frames. ], batch size: 101, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:33:46,345 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.387e+02 5.132e+02 6.430e+02 7.842e+02 1.964e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 09:33:47,844 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9216, 4.3581, 4.6348, 4.6307, 1.6203, 4.3762, 3.7620, 4.3172], + device='cuda:3'), covar=tensor([0.1450, 0.0788, 0.0537, 0.0583, 0.5666, 0.0744, 0.0615, 0.1075], + device='cuda:3'), in_proj_covar=tensor([0.0745, 0.0681, 0.0887, 0.0773, 0.0792, 0.0640, 0.0532, 0.0818], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 09:33:58,627 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 09:34:38,392 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:34:41,289 INFO [train.py:903] (3/4) Epoch 17, batch 2700, loss[loss=0.1948, simple_loss=0.2806, pruned_loss=0.05448, over 19675.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2936, pruned_loss=0.06927, over 3825148.58 frames. ], batch size: 53, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:34:54,245 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:35:26,072 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:35:43,876 INFO [train.py:903] (3/4) Epoch 17, batch 2750, loss[loss=0.2045, simple_loss=0.2947, pruned_loss=0.05711, over 19500.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2939, pruned_loss=0.06915, over 3836289.77 frames. ], batch size: 64, lr: 4.88e-03, grad_scale: 4.0 +2023-04-02 09:35:52,116 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.760e+02 5.468e+02 6.814e+02 8.726e+02 1.544e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-04-02 09:36:45,032 INFO [train.py:903] (3/4) Epoch 17, batch 2800, loss[loss=0.2493, simple_loss=0.3206, pruned_loss=0.08905, over 19733.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2957, pruned_loss=0.0703, over 3824097.52 frames. ], batch size: 63, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:26,163 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0020, 1.2741, 1.6302, 1.1119, 2.4619, 3.3621, 3.0704, 3.5913], + device='cuda:3'), covar=tensor([0.1722, 0.3602, 0.3198, 0.2464, 0.0572, 0.0188, 0.0222, 0.0237], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0307, 0.0337, 0.0256, 0.0229, 0.0174, 0.0208, 0.0233], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 09:37:29,661 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:37:48,060 INFO [train.py:903] (3/4) Epoch 17, batch 2850, loss[loss=0.2132, simple_loss=0.3003, pruned_loss=0.06303, over 19694.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2955, pruned_loss=0.06997, over 3837279.31 frames. ], batch size: 59, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:37:54,817 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 5.207e+02 6.661e+02 8.674e+02 1.797e+03, threshold=1.332e+03, percent-clipped=6.0 +2023-04-02 09:37:57,498 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5877, 1.0872, 1.4157, 1.1812, 2.2395, 0.9950, 2.0169, 2.4371], + device='cuda:3'), covar=tensor([0.0655, 0.2711, 0.2710, 0.1625, 0.0848, 0.2062, 0.1031, 0.0458], + device='cuda:3'), in_proj_covar=tensor([0.0386, 0.0349, 0.0371, 0.0332, 0.0358, 0.0340, 0.0357, 0.0378], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:38:46,061 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 09:38:49,622 INFO [train.py:903] (3/4) Epoch 17, batch 2900, loss[loss=0.2325, simple_loss=0.3083, pruned_loss=0.07832, over 19755.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2961, pruned_loss=0.07035, over 3839018.31 frames. ], batch size: 63, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:38:56,333 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112153.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:39:51,810 INFO [train.py:903] (3/4) Epoch 17, batch 2950, loss[loss=0.2017, simple_loss=0.275, pruned_loss=0.06426, over 19803.00 frames. ], tot_loss[loss=0.218, simple_loss=0.296, pruned_loss=0.07, over 3839226.28 frames. ], batch size: 49, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:39:55,966 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:39:58,769 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 4.879e+02 6.137e+02 7.850e+02 1.399e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-04-02 09:40:27,856 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:40:36,006 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4137, 1.5309, 1.9608, 1.7233, 3.1138, 2.5214, 3.4693, 1.6771], + device='cuda:3'), covar=tensor([0.2449, 0.4105, 0.2658, 0.1901, 0.1510, 0.2158, 0.1637, 0.3929], + device='cuda:3'), in_proj_covar=tensor([0.0507, 0.0610, 0.0664, 0.0461, 0.0603, 0.0513, 0.0649, 0.0522], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 09:40:54,421 INFO [train.py:903] (3/4) Epoch 17, batch 3000, loss[loss=0.2298, simple_loss=0.3065, pruned_loss=0.07648, over 19681.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2948, pruned_loss=0.06905, over 3850338.00 frames. ], batch size: 58, lr: 4.88e-03, grad_scale: 8.0 +2023-04-02 09:40:54,422 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 09:41:09,003 INFO [train.py:937] (3/4) Epoch 17, validation: loss=0.1717, simple_loss=0.272, pruned_loss=0.03576, over 944034.00 frames. +2023-04-02 09:41:09,005 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 09:41:13,737 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 09:41:33,207 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112268.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:41:33,241 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2873, 1.9234, 1.5308, 1.0811, 1.8942, 0.9983, 1.1651, 1.7353], + device='cuda:3'), covar=tensor([0.0886, 0.0736, 0.1036, 0.0961, 0.0450, 0.1368, 0.0680, 0.0398], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0307, 0.0330, 0.0255, 0.0242, 0.0329, 0.0290, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:42:09,768 INFO [train.py:903] (3/4) Epoch 17, batch 3050, loss[loss=0.2281, simple_loss=0.3079, pruned_loss=0.07413, over 19674.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2956, pruned_loss=0.06935, over 3836769.69 frames. ], batch size: 60, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:42:16,486 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.197e+02 6.217e+02 9.038e+02 1.667e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-02 09:42:16,904 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1020, 1.7408, 1.7264, 2.0219, 1.6584, 1.8396, 1.6575, 1.9576], + device='cuda:3'), covar=tensor([0.0946, 0.1586, 0.1367, 0.1071, 0.1424, 0.0504, 0.1325, 0.0685], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0358, 0.0305, 0.0246, 0.0299, 0.0250, 0.0298, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:43:10,145 INFO [train.py:903] (3/4) Epoch 17, batch 3100, loss[loss=0.1934, simple_loss=0.2789, pruned_loss=0.05393, over 18762.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2967, pruned_loss=0.07075, over 3823969.40 frames. ], batch size: 74, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:44:14,537 INFO [train.py:903] (3/4) Epoch 17, batch 3150, loss[loss=0.2518, simple_loss=0.3241, pruned_loss=0.0898, over 19692.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2976, pruned_loss=0.07152, over 3816084.50 frames. ], batch size: 59, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:44:21,815 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.016e+02 6.190e+02 7.660e+02 1.883e+03, threshold=1.238e+03, percent-clipped=9.0 +2023-04-02 09:44:25,575 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112407.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:44:42,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 09:44:52,122 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:45:17,097 INFO [train.py:903] (3/4) Epoch 17, batch 3200, loss[loss=0.229, simple_loss=0.294, pruned_loss=0.08195, over 19729.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2965, pruned_loss=0.07094, over 3811414.23 frames. ], batch size: 51, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:45:28,589 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4306, 2.0984, 1.6911, 1.3460, 2.0077, 1.2686, 1.1622, 1.8240], + device='cuda:3'), covar=tensor([0.0962, 0.0728, 0.1032, 0.0894, 0.0538, 0.1302, 0.0781, 0.0440], + device='cuda:3'), in_proj_covar=tensor([0.0295, 0.0308, 0.0330, 0.0256, 0.0243, 0.0329, 0.0290, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:46:19,038 INFO [train.py:903] (3/4) Epoch 17, batch 3250, loss[loss=0.2068, simple_loss=0.276, pruned_loss=0.06882, over 19746.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2954, pruned_loss=0.07028, over 3813283.06 frames. ], batch size: 46, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:46:26,172 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.958e+02 6.274e+02 7.840e+02 2.025e+03, threshold=1.255e+03, percent-clipped=2.0 +2023-04-02 09:46:50,003 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1341, 1.3504, 1.5014, 1.4248, 2.7496, 1.0361, 2.2323, 3.0734], + device='cuda:3'), covar=tensor([0.0513, 0.2587, 0.2665, 0.1705, 0.0700, 0.2324, 0.1034, 0.0305], + device='cuda:3'), in_proj_covar=tensor([0.0385, 0.0350, 0.0369, 0.0330, 0.0356, 0.0338, 0.0355, 0.0377], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:46:51,236 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112524.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 09:47:13,539 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:47:18,737 INFO [train.py:903] (3/4) Epoch 17, batch 3300, loss[loss=0.2419, simple_loss=0.3204, pruned_loss=0.08173, over 19646.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2966, pruned_loss=0.07109, over 3816112.70 frames. ], batch size: 60, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:47:21,208 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112549.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 09:47:25,362 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 09:48:00,378 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:20,898 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:48:24,061 INFO [train.py:903] (3/4) Epoch 17, batch 3350, loss[loss=0.2405, simple_loss=0.3159, pruned_loss=0.08253, over 19269.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2956, pruned_loss=0.07063, over 3811287.27 frames. ], batch size: 66, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:48:31,311 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.436e+02 6.846e+02 8.612e+02 1.565e+03, threshold=1.369e+03, percent-clipped=5.0 +2023-04-02 09:48:45,600 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8585, 1.9412, 2.1711, 2.5201, 1.7981, 2.4029, 2.2793, 1.9836], + device='cuda:3'), covar=tensor([0.4184, 0.3785, 0.1866, 0.2193, 0.4057, 0.1969, 0.4573, 0.3300], + device='cuda:3'), in_proj_covar=tensor([0.0857, 0.0907, 0.0687, 0.0913, 0.0838, 0.0775, 0.0821, 0.0755], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 09:49:09,948 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:49:24,124 INFO [train.py:903] (3/4) Epoch 17, batch 3400, loss[loss=0.204, simple_loss=0.2761, pruned_loss=0.06594, over 19788.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2959, pruned_loss=0.07081, over 3803739.34 frames. ], batch size: 47, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:49:27,243 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 09:49:31,437 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5982, 4.1445, 2.5187, 3.6686, 1.1122, 4.0399, 3.9435, 4.0728], + device='cuda:3'), covar=tensor([0.0600, 0.1060, 0.2145, 0.0861, 0.3718, 0.0759, 0.0891, 0.1142], + device='cuda:3'), in_proj_covar=tensor([0.0474, 0.0389, 0.0472, 0.0332, 0.0391, 0.0407, 0.0403, 0.0432], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:49:45,252 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 09:50:25,918 INFO [train.py:903] (3/4) Epoch 17, batch 3450, loss[loss=0.2553, simple_loss=0.3348, pruned_loss=0.08789, over 19744.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2959, pruned_loss=0.07045, over 3806586.45 frames. ], batch size: 63, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:50:28,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 09:50:33,006 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.932e+02 6.092e+02 9.481e+02 2.200e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-02 09:50:55,138 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 09:51:04,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8718, 1.7662, 1.4930, 1.8213, 1.7318, 1.4414, 1.4078, 1.7696], + device='cuda:3'), covar=tensor([0.1057, 0.1351, 0.1629, 0.1039, 0.1251, 0.0789, 0.1657, 0.0807], + device='cuda:3'), in_proj_covar=tensor([0.0261, 0.0353, 0.0299, 0.0242, 0.0295, 0.0246, 0.0293, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:51:27,355 INFO [train.py:903] (3/4) Epoch 17, batch 3500, loss[loss=0.1797, simple_loss=0.2588, pruned_loss=0.05036, over 19739.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2969, pruned_loss=0.07115, over 3796408.79 frames. ], batch size: 45, lr: 4.87e-03, grad_scale: 8.0 +2023-04-02 09:51:32,076 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:31,109 INFO [train.py:903] (3/4) Epoch 17, batch 3550, loss[loss=0.2229, simple_loss=0.2936, pruned_loss=0.07606, over 19737.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2967, pruned_loss=0.07058, over 3817591.56 frames. ], batch size: 51, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:52:32,837 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:52:38,380 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.345e+02 4.759e+02 5.980e+02 7.566e+02 1.638e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 09:52:57,235 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8960, 4.3187, 4.6175, 4.6189, 1.7213, 4.3302, 3.7027, 4.3152], + device='cuda:3'), covar=tensor([0.1511, 0.0836, 0.0584, 0.0583, 0.5716, 0.0797, 0.0665, 0.1076], + device='cuda:3'), in_proj_covar=tensor([0.0752, 0.0691, 0.0900, 0.0777, 0.0797, 0.0647, 0.0538, 0.0823], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 09:53:03,354 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:53:33,303 INFO [train.py:903] (3/4) Epoch 17, batch 3600, loss[loss=0.2116, simple_loss=0.2955, pruned_loss=0.06383, over 19528.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2951, pruned_loss=0.06931, over 3821786.16 frames. ], batch size: 56, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:53:55,565 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112866.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:54:35,710 INFO [train.py:903] (3/4) Epoch 17, batch 3650, loss[loss=0.2171, simple_loss=0.2975, pruned_loss=0.06837, over 19654.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2948, pruned_loss=0.06902, over 3819826.44 frames. ], batch size: 53, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:54:43,571 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.951e+02 4.960e+02 5.826e+02 7.647e+02 1.614e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 09:55:09,909 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112924.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,595 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:28,727 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112939.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:55:38,877 INFO [train.py:903] (3/4) Epoch 17, batch 3700, loss[loss=0.2587, simple_loss=0.3256, pruned_loss=0.09587, over 13713.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2943, pruned_loss=0.06872, over 3821589.39 frames. ], batch size: 139, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:05,001 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7154, 1.4705, 1.4438, 2.0566, 1.5400, 2.0801, 2.0388, 1.8848], + device='cuda:3'), covar=tensor([0.0882, 0.0993, 0.1103, 0.0869, 0.0967, 0.0723, 0.0878, 0.0683], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0222, 0.0244, 0.0228, 0.0208, 0.0189, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 09:56:17,481 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=112979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:56:42,238 INFO [train.py:903] (3/4) Epoch 17, batch 3750, loss[loss=0.1911, simple_loss=0.271, pruned_loss=0.0556, over 19739.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2947, pruned_loss=0.06846, over 3822820.71 frames. ], batch size: 51, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:56:49,255 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.723e+02 6.001e+02 7.947e+02 1.345e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-02 09:56:58,841 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5518, 2.3818, 1.7594, 1.5821, 2.2390, 1.4915, 1.4845, 1.9008], + device='cuda:3'), covar=tensor([0.1119, 0.0713, 0.0966, 0.0815, 0.0501, 0.1163, 0.0723, 0.0498], + device='cuda:3'), in_proj_covar=tensor([0.0298, 0.0313, 0.0334, 0.0258, 0.0245, 0.0333, 0.0294, 0.0268], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 09:57:32,730 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113039.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:57:42,417 INFO [train.py:903] (3/4) Epoch 17, batch 3800, loss[loss=0.1966, simple_loss=0.2784, pruned_loss=0.05737, over 19590.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.295, pruned_loss=0.06902, over 3822334.27 frames. ], batch size: 52, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:57:49,591 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:14,097 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 09:58:39,388 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:58:43,323 INFO [train.py:903] (3/4) Epoch 17, batch 3850, loss[loss=0.249, simple_loss=0.3288, pruned_loss=0.08462, over 19677.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2966, pruned_loss=0.07018, over 3818011.53 frames. ], batch size: 60, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 09:58:51,557 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.627e+02 5.316e+02 6.326e+02 9.097e+02 1.552e+03, threshold=1.265e+03, percent-clipped=8.0 +2023-04-02 09:59:14,523 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113122.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:44,611 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113147.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 09:59:45,350 INFO [train.py:903] (3/4) Epoch 17, batch 3900, loss[loss=0.1982, simple_loss=0.2724, pruned_loss=0.06196, over 19746.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.296, pruned_loss=0.06992, over 3824050.35 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:48,758 INFO [train.py:903] (3/4) Epoch 17, batch 3950, loss[loss=0.2066, simple_loss=0.2828, pruned_loss=0.06523, over 19748.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2934, pruned_loss=0.06851, over 3833109.38 frames. ], batch size: 51, lr: 4.86e-03, grad_scale: 8.0 +2023-04-02 10:00:56,116 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 10:00:57,244 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.545e+02 5.288e+02 6.585e+02 1.560e+03, threshold=1.058e+03, percent-clipped=1.0 +2023-04-02 10:01:51,451 INFO [train.py:903] (3/4) Epoch 17, batch 4000, loss[loss=0.2363, simple_loss=0.3121, pruned_loss=0.08025, over 19731.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2934, pruned_loss=0.06847, over 3829801.48 frames. ], batch size: 63, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:01:56,615 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113252.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:35,233 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:39,797 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 10:02:49,725 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:02:52,758 INFO [train.py:903] (3/4) Epoch 17, batch 4050, loss[loss=0.2036, simple_loss=0.2801, pruned_loss=0.06359, over 19679.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2924, pruned_loss=0.06801, over 3830215.96 frames. ], batch size: 53, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:00,898 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.601e+02 4.703e+02 5.716e+02 7.594e+02 1.568e+03, threshold=1.143e+03, percent-clipped=5.0 +2023-04-02 10:03:08,184 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113310.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:21,623 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113320.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:38,694 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113335.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:03:54,663 INFO [train.py:903] (3/4) Epoch 17, batch 4100, loss[loss=0.2236, simple_loss=0.3037, pruned_loss=0.07179, over 19600.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2924, pruned_loss=0.06791, over 3836028.31 frames. ], batch size: 61, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:03:57,603 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:28,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:04:31,522 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 10:04:56,310 INFO [train.py:903] (3/4) Epoch 17, batch 4150, loss[loss=0.1555, simple_loss=0.2362, pruned_loss=0.03737, over 19776.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2917, pruned_loss=0.06751, over 3842218.86 frames. ], batch size: 47, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:04:56,637 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:05:03,828 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 5.375e+02 6.520e+02 8.152e+02 2.133e+03, threshold=1.304e+03, percent-clipped=6.0 +2023-04-02 10:05:57,676 INFO [train.py:903] (3/4) Epoch 17, batch 4200, loss[loss=0.1916, simple_loss=0.2707, pruned_loss=0.05628, over 19593.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2926, pruned_loss=0.0684, over 3831759.98 frames. ], batch size: 52, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:06:02,338 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 10:06:41,260 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7362, 2.0669, 2.3571, 2.1962, 3.1465, 3.7057, 3.6477, 3.9956], + device='cuda:3'), covar=tensor([0.1445, 0.2830, 0.2507, 0.1880, 0.0801, 0.0305, 0.0184, 0.0252], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0310, 0.0339, 0.0258, 0.0232, 0.0177, 0.0211, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:06:59,531 INFO [train.py:903] (3/4) Epoch 17, batch 4250, loss[loss=0.2181, simple_loss=0.2854, pruned_loss=0.07541, over 19388.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2939, pruned_loss=0.06945, over 3813046.29 frames. ], batch size: 47, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:07:06,464 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.808e+02 5.898e+02 7.585e+02 1.571e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 10:07:13,476 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 10:07:15,390 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 10:07:21,822 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5779, 2.3655, 1.7451, 1.4957, 2.2170, 1.4383, 1.3113, 1.8913], + device='cuda:3'), covar=tensor([0.1055, 0.0690, 0.1008, 0.0837, 0.0533, 0.1185, 0.0820, 0.0522], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0310, 0.0329, 0.0255, 0.0244, 0.0329, 0.0291, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:07:24,990 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 10:08:02,054 INFO [train.py:903] (3/4) Epoch 17, batch 4300, loss[loss=0.2094, simple_loss=0.2891, pruned_loss=0.06484, over 17982.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2939, pruned_loss=0.06909, over 3813875.13 frames. ], batch size: 83, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:08:03,898 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 10:08:55,443 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 10:09:02,225 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113596.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:09:04,377 INFO [train.py:903] (3/4) Epoch 17, batch 4350, loss[loss=0.2142, simple_loss=0.2897, pruned_loss=0.06934, over 19595.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2931, pruned_loss=0.06828, over 3828059.75 frames. ], batch size: 50, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:09:12,442 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 4.847e+02 6.118e+02 7.738e+02 1.753e+03, threshold=1.224e+03, percent-clipped=4.0 +2023-04-02 10:09:49,677 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:07,377 INFO [train.py:903] (3/4) Epoch 17, batch 4400, loss[loss=0.2143, simple_loss=0.2952, pruned_loss=0.06667, over 19672.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.292, pruned_loss=0.06819, over 3829503.36 frames. ], batch size: 59, lr: 4.85e-03, grad_scale: 8.0 +2023-04-02 10:10:14,945 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113654.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:26,336 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113664.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:10:33,150 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 10:10:43,117 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 10:10:45,285 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113679.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:11:07,792 INFO [train.py:903] (3/4) Epoch 17, batch 4450, loss[loss=0.2352, simple_loss=0.3227, pruned_loss=0.07387, over 19704.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2928, pruned_loss=0.06899, over 3838281.46 frames. ], batch size: 63, lr: 4.84e-03, grad_scale: 16.0 +2023-04-02 10:11:14,459 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.974e+02 5.100e+02 6.811e+02 8.906e+02 1.680e+03, threshold=1.362e+03, percent-clipped=7.0 +2023-04-02 10:11:22,995 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113711.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:12:07,712 INFO [train.py:903] (3/4) Epoch 17, batch 4500, loss[loss=0.2135, simple_loss=0.309, pruned_loss=0.059, over 19704.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.294, pruned_loss=0.06988, over 3840657.49 frames. ], batch size: 59, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:13:09,295 INFO [train.py:903] (3/4) Epoch 17, batch 4550, loss[loss=0.2182, simple_loss=0.2997, pruned_loss=0.06839, over 19492.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2945, pruned_loss=0.07018, over 3831670.93 frames. ], batch size: 64, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:13:19,359 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 10:13:20,452 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 5.090e+02 6.214e+02 7.749e+02 1.433e+03, threshold=1.243e+03, percent-clipped=2.0 +2023-04-02 10:13:42,464 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 10:14:12,747 INFO [train.py:903] (3/4) Epoch 17, batch 4600, loss[loss=0.2069, simple_loss=0.2842, pruned_loss=0.06475, over 19739.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2944, pruned_loss=0.07025, over 3823485.35 frames. ], batch size: 51, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:14,385 INFO [train.py:903] (3/4) Epoch 17, batch 4650, loss[loss=0.2096, simple_loss=0.2874, pruned_loss=0.06593, over 19849.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.295, pruned_loss=0.07017, over 3828456.38 frames. ], batch size: 52, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:15:23,609 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 5.276e+02 6.482e+02 7.907e+02 1.823e+03, threshold=1.296e+03, percent-clipped=2.0 +2023-04-02 10:15:31,987 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 10:15:44,642 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 10:15:57,554 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9884, 3.4274, 1.9581, 2.0001, 3.0921, 1.8755, 1.4075, 2.1430], + device='cuda:3'), covar=tensor([0.1441, 0.0526, 0.1127, 0.0870, 0.0529, 0.1236, 0.1015, 0.0789], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0311, 0.0330, 0.0255, 0.0243, 0.0329, 0.0291, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:16:16,826 INFO [train.py:903] (3/4) Epoch 17, batch 4700, loss[loss=0.2258, simple_loss=0.3036, pruned_loss=0.07401, over 19371.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2949, pruned_loss=0.06994, over 3834346.01 frames. ], batch size: 70, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:16:43,000 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113967.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:16:43,744 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 10:16:56,543 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=113979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:12,237 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:17:19,856 INFO [train.py:903] (3/4) Epoch 17, batch 4750, loss[loss=0.1852, simple_loss=0.2631, pruned_loss=0.05364, over 19770.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2934, pruned_loss=0.06881, over 3842479.72 frames. ], batch size: 47, lr: 4.84e-03, grad_scale: 4.0 +2023-04-02 10:17:32,731 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.836e+02 6.122e+02 7.624e+02 1.576e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-02 10:17:35,088 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114008.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:18:24,454 INFO [train.py:903] (3/4) Epoch 17, batch 4800, loss[loss=0.2122, simple_loss=0.2851, pruned_loss=0.06968, over 19577.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2942, pruned_loss=0.06966, over 3825042.51 frames. ], batch size: 52, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:22,151 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:19:26,560 INFO [train.py:903] (3/4) Epoch 17, batch 4850, loss[loss=0.202, simple_loss=0.2756, pruned_loss=0.06421, over 18603.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2946, pruned_loss=0.06996, over 3815854.16 frames. ], batch size: 41, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:19:35,430 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.130e+02 6.675e+02 8.728e+02 1.864e+03, threshold=1.335e+03, percent-clipped=11.0 +2023-04-02 10:19:38,303 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1895, 2.2019, 2.4366, 3.1766, 2.1721, 2.8411, 2.6136, 2.1173], + device='cuda:3'), covar=tensor([0.4096, 0.4003, 0.1753, 0.2228, 0.4385, 0.2020, 0.4154, 0.3289], + device='cuda:3'), in_proj_covar=tensor([0.0857, 0.0906, 0.0688, 0.0913, 0.0837, 0.0774, 0.0819, 0.0756], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 10:19:52,811 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 10:19:57,430 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:09,285 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 10:20:14,655 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 10:20:19,134 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 10:20:20,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 10:20:25,234 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:20:28,430 INFO [train.py:903] (3/4) Epoch 17, batch 4900, loss[loss=0.2041, simple_loss=0.2923, pruned_loss=0.05795, over 19712.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2936, pruned_loss=0.06941, over 3808005.06 frames. ], batch size: 51, lr: 4.84e-03, grad_scale: 8.0 +2023-04-02 10:20:28,479 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 10:20:48,168 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 10:21:23,045 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0154, 1.3007, 1.7351, 1.3120, 2.7992, 3.7797, 3.4847, 4.0105], + device='cuda:3'), covar=tensor([0.1790, 0.3780, 0.3228, 0.2366, 0.0565, 0.0161, 0.0212, 0.0201], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0312, 0.0341, 0.0259, 0.0233, 0.0178, 0.0211, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:21:29,557 INFO [train.py:903] (3/4) Epoch 17, batch 4950, loss[loss=0.234, simple_loss=0.3133, pruned_loss=0.07738, over 19670.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2938, pruned_loss=0.06968, over 3795530.24 frames. ], batch size: 60, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:21:41,941 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.073e+02 6.090e+02 7.599e+02 1.461e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-02 10:21:48,510 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 10:22:09,564 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 10:22:31,811 INFO [train.py:903] (3/4) Epoch 17, batch 5000, loss[loss=0.2067, simple_loss=0.2946, pruned_loss=0.05938, over 17976.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2944, pruned_loss=0.07015, over 3801224.40 frames. ], batch size: 83, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:22:39,615 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 10:22:50,097 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 10:23:32,992 INFO [train.py:903] (3/4) Epoch 17, batch 5050, loss[loss=0.1703, simple_loss=0.2511, pruned_loss=0.04471, over 18677.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2933, pruned_loss=0.0693, over 3796725.23 frames. ], batch size: 41, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:23:42,365 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 5.068e+02 6.244e+02 7.899e+02 1.430e+03, threshold=1.249e+03, percent-clipped=5.0 +2023-04-02 10:24:10,966 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 10:24:34,966 INFO [train.py:903] (3/4) Epoch 17, batch 5100, loss[loss=0.1927, simple_loss=0.2685, pruned_loss=0.05844, over 19770.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2935, pruned_loss=0.06911, over 3797772.90 frames. ], batch size: 47, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:24:37,674 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114350.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:24:44,365 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 10:24:46,825 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 10:24:52,458 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 10:25:10,325 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114375.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:14,877 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114379.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:20,129 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-04-02 10:25:36,283 INFO [train.py:903] (3/4) Epoch 17, batch 5150, loss[loss=0.2281, simple_loss=0.3215, pruned_loss=0.0673, over 19665.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2949, pruned_loss=0.06966, over 3800570.19 frames. ], batch size: 58, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:25:44,134 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6995, 4.1532, 4.3926, 4.4463, 1.7305, 4.1096, 3.6111, 4.0727], + device='cuda:3'), covar=tensor([0.1631, 0.1042, 0.0617, 0.0627, 0.5681, 0.0882, 0.0690, 0.1178], + device='cuda:3'), in_proj_covar=tensor([0.0744, 0.0694, 0.0892, 0.0780, 0.0798, 0.0644, 0.0537, 0.0821], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 10:25:47,613 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114404.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:25:49,639 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.288e+02 6.652e+02 7.839e+02 1.735e+03, threshold=1.330e+03, percent-clipped=3.0 +2023-04-02 10:25:50,821 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 10:26:24,429 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 10:26:41,804 INFO [train.py:903] (3/4) Epoch 17, batch 5200, loss[loss=0.2616, simple_loss=0.327, pruned_loss=0.09808, over 12789.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2951, pruned_loss=0.07027, over 3796946.66 frames. ], batch size: 136, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:26:54,840 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 10:27:10,257 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8660, 2.4311, 2.2252, 2.6730, 2.5923, 2.3497, 2.0763, 2.9703], + device='cuda:3'), covar=tensor([0.0844, 0.1604, 0.1376, 0.1057, 0.1322, 0.0498, 0.1302, 0.0517], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0356, 0.0301, 0.0245, 0.0300, 0.0248, 0.0295, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:27:19,897 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 10:27:33,131 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=114489.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:27:37,829 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 10:27:43,639 INFO [train.py:903] (3/4) Epoch 17, batch 5250, loss[loss=0.1932, simple_loss=0.2816, pruned_loss=0.05239, over 19543.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2939, pruned_loss=0.06938, over 3797151.01 frames. ], batch size: 56, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:27:53,069 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.560e+02 4.802e+02 5.852e+02 7.465e+02 1.395e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 10:28:44,585 INFO [train.py:903] (3/4) Epoch 17, batch 5300, loss[loss=0.2041, simple_loss=0.2754, pruned_loss=0.06637, over 19733.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2921, pruned_loss=0.06827, over 3809062.82 frames. ], batch size: 46, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:28:59,071 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 10:29:31,221 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3357, 1.3736, 1.6974, 1.5517, 2.4887, 2.1449, 2.6439, 1.2034], + device='cuda:3'), covar=tensor([0.2393, 0.4321, 0.2648, 0.1874, 0.1569, 0.2083, 0.1437, 0.4188], + device='cuda:3'), in_proj_covar=tensor([0.0513, 0.0614, 0.0668, 0.0461, 0.0607, 0.0515, 0.0649, 0.0524], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 10:29:44,122 INFO [train.py:903] (3/4) Epoch 17, batch 5350, loss[loss=0.1976, simple_loss=0.2715, pruned_loss=0.06179, over 19764.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2926, pruned_loss=0.06797, over 3823678.47 frames. ], batch size: 47, lr: 4.83e-03, grad_scale: 8.0 +2023-04-02 10:29:44,501 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1789, 1.4493, 1.9477, 1.4405, 3.0441, 4.3506, 4.2486, 4.8571], + device='cuda:3'), covar=tensor([0.1689, 0.3583, 0.3096, 0.2222, 0.0625, 0.0228, 0.0179, 0.0162], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0311, 0.0340, 0.0259, 0.0232, 0.0178, 0.0212, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:29:45,505 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4573, 2.2491, 2.0470, 2.4622, 2.3385, 2.0474, 2.1020, 2.4743], + device='cuda:3'), covar=tensor([0.0887, 0.1448, 0.1307, 0.0881, 0.1227, 0.0523, 0.1146, 0.0568], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0354, 0.0301, 0.0245, 0.0300, 0.0248, 0.0295, 0.0248], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:29:51,254 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:29:54,896 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 5.171e+02 6.688e+02 9.089e+02 2.274e+03, threshold=1.338e+03, percent-clipped=9.0 +2023-04-02 10:30:19,023 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 10:30:46,226 INFO [train.py:903] (3/4) Epoch 17, batch 5400, loss[loss=0.2339, simple_loss=0.3123, pruned_loss=0.07772, over 19594.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2928, pruned_loss=0.06812, over 3833112.66 frames. ], batch size: 61, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:29,351 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4768, 1.5652, 1.7571, 1.8703, 1.3738, 1.7285, 1.8213, 1.6820], + device='cuda:3'), covar=tensor([0.3647, 0.3048, 0.1687, 0.1865, 0.3199, 0.1769, 0.4083, 0.2859], + device='cuda:3'), in_proj_covar=tensor([0.0865, 0.0915, 0.0692, 0.0922, 0.0844, 0.0779, 0.0821, 0.0761], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 10:31:29,566 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 10:31:47,169 INFO [train.py:903] (3/4) Epoch 17, batch 5450, loss[loss=0.2332, simple_loss=0.3118, pruned_loss=0.07732, over 19602.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2936, pruned_loss=0.06881, over 3823483.22 frames. ], batch size: 61, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:31:56,199 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.503e+02 5.761e+02 7.243e+02 1.420e+03, threshold=1.152e+03, percent-clipped=1.0 +2023-04-02 10:32:31,004 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:32:47,134 INFO [train.py:903] (3/4) Epoch 17, batch 5500, loss[loss=0.2036, simple_loss=0.2758, pruned_loss=0.06569, over 19403.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2937, pruned_loss=0.06834, over 3821897.68 frames. ], batch size: 48, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:32:47,643 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3625, 1.4971, 1.6557, 1.6218, 2.2768, 2.1209, 2.3841, 0.9171], + device='cuda:3'), covar=tensor([0.2353, 0.4012, 0.2519, 0.1814, 0.1450, 0.2092, 0.1267, 0.4225], + device='cuda:3'), in_proj_covar=tensor([0.0511, 0.0612, 0.0668, 0.0461, 0.0607, 0.0515, 0.0648, 0.0523], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 10:33:10,793 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 10:33:46,750 INFO [train.py:903] (3/4) Epoch 17, batch 5550, loss[loss=0.2251, simple_loss=0.302, pruned_loss=0.0741, over 19611.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2929, pruned_loss=0.06794, over 3841507.18 frames. ], batch size: 61, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:33:54,795 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 10:33:55,924 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.950e+02 6.230e+02 7.289e+02 1.704e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 10:34:05,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1612, 2.2060, 2.3994, 3.0320, 2.1928, 2.8781, 2.5553, 2.1371], + device='cuda:3'), covar=tensor([0.4212, 0.4076, 0.1906, 0.2486, 0.4406, 0.2046, 0.4355, 0.3314], + device='cuda:3'), in_proj_covar=tensor([0.0861, 0.0911, 0.0688, 0.0917, 0.0837, 0.0775, 0.0817, 0.0758], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 10:34:39,090 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-02 10:34:41,648 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 10:34:48,921 INFO [train.py:903] (3/4) Epoch 17, batch 5600, loss[loss=0.2162, simple_loss=0.2962, pruned_loss=0.0681, over 18801.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2929, pruned_loss=0.06792, over 3826065.41 frames. ], batch size: 74, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:03,678 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:33,269 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114885.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:39,359 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114889.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:35:50,181 INFO [train.py:903] (3/4) Epoch 17, batch 5650, loss[loss=0.2138, simple_loss=0.2906, pruned_loss=0.06851, over 19673.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2937, pruned_loss=0.06878, over 3817657.56 frames. ], batch size: 60, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:35:59,360 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.160e+02 6.498e+02 8.575e+02 1.504e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-02 10:36:35,344 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2099, 1.1586, 1.6192, 1.3059, 2.4332, 3.3145, 3.1003, 3.6010], + device='cuda:3'), covar=tensor([0.1734, 0.4803, 0.4041, 0.2200, 0.0652, 0.0219, 0.0286, 0.0294], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0312, 0.0340, 0.0259, 0.0233, 0.0178, 0.0212, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:36:36,156 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 10:36:51,140 INFO [train.py:903] (3/4) Epoch 17, batch 5700, loss[loss=0.1853, simple_loss=0.265, pruned_loss=0.0528, over 19502.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2932, pruned_loss=0.06887, over 3822713.16 frames. ], batch size: 49, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:08,346 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4242, 1.5153, 1.7557, 1.6703, 2.7122, 2.2426, 2.7364, 1.1671], + device='cuda:3'), covar=tensor([0.2391, 0.4180, 0.2672, 0.1850, 0.1373, 0.2064, 0.1370, 0.4146], + device='cuda:3'), in_proj_covar=tensor([0.0513, 0.0615, 0.0673, 0.0463, 0.0611, 0.0516, 0.0650, 0.0526], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 10:37:43,462 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4292, 1.2925, 1.2533, 1.8182, 1.4572, 1.7340, 1.8022, 1.5292], + device='cuda:3'), covar=tensor([0.0877, 0.0994, 0.1145, 0.0845, 0.0897, 0.0779, 0.0869, 0.0769], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0222, 0.0223, 0.0243, 0.0227, 0.0209, 0.0188, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 10:37:50,250 INFO [train.py:903] (3/4) Epoch 17, batch 5750, loss[loss=0.2071, simple_loss=0.2693, pruned_loss=0.07244, over 19766.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2939, pruned_loss=0.06922, over 3811068.94 frames. ], batch size: 46, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:37:50,264 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 10:37:57,211 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 10:37:59,472 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.823e+02 5.221e+02 6.429e+02 7.572e+02 1.818e+03, threshold=1.286e+03, percent-clipped=4.0 +2023-04-02 10:38:04,569 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 10:38:50,578 INFO [train.py:903] (3/4) Epoch 17, batch 5800, loss[loss=0.2117, simple_loss=0.278, pruned_loss=0.07268, over 19772.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2929, pruned_loss=0.06836, over 3825043.08 frames. ], batch size: 47, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:38:52,860 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6874, 4.2220, 4.4566, 4.4738, 1.6371, 4.1853, 3.5986, 4.1664], + device='cuda:3'), covar=tensor([0.1604, 0.0780, 0.0602, 0.0633, 0.5887, 0.0884, 0.0698, 0.1076], + device='cuda:3'), in_proj_covar=tensor([0.0741, 0.0691, 0.0891, 0.0775, 0.0794, 0.0645, 0.0536, 0.0819], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 10:39:27,225 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:39:28,644 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1963, 1.7890, 1.6870, 1.9662, 1.7803, 1.8124, 1.5127, 2.0334], + device='cuda:3'), covar=tensor([0.0862, 0.1255, 0.1488, 0.1035, 0.1291, 0.0529, 0.1447, 0.0682], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0352, 0.0302, 0.0245, 0.0298, 0.0247, 0.0294, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:39:35,785 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-02 10:39:52,203 INFO [train.py:903] (3/4) Epoch 17, batch 5850, loss[loss=0.2222, simple_loss=0.3016, pruned_loss=0.07144, over 18253.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2932, pruned_loss=0.06862, over 3799694.74 frames. ], batch size: 83, lr: 4.82e-03, grad_scale: 8.0 +2023-04-02 10:39:59,358 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:40:01,406 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 4.663e+02 6.050e+02 7.882e+02 1.454e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 10:40:51,591 INFO [train.py:903] (3/4) Epoch 17, batch 5900, loss[loss=0.1707, simple_loss=0.2519, pruned_loss=0.0448, over 19760.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2931, pruned_loss=0.06881, over 3808883.32 frames. ], batch size: 47, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:40:55,187 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 10:40:55,630 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0753, 1.8032, 1.4878, 1.1681, 1.6314, 1.1863, 1.0604, 1.5851], + device='cuda:3'), covar=tensor([0.0832, 0.0742, 0.1010, 0.0824, 0.0504, 0.1208, 0.0606, 0.0364], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0309, 0.0328, 0.0257, 0.0245, 0.0327, 0.0290, 0.0268], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:41:13,992 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 10:41:45,877 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:41:51,141 INFO [train.py:903] (3/4) Epoch 17, batch 5950, loss[loss=0.2466, simple_loss=0.3171, pruned_loss=0.08803, over 17234.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2922, pruned_loss=0.06867, over 3809082.54 frames. ], batch size: 101, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:00,464 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.938e+02 6.318e+02 8.201e+02 2.090e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:42:20,345 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3468, 1.3954, 1.6785, 1.5725, 2.5771, 2.2767, 2.7553, 1.0513], + device='cuda:3'), covar=tensor([0.2413, 0.4268, 0.2663, 0.1928, 0.1455, 0.2060, 0.1381, 0.4216], + device='cuda:3'), in_proj_covar=tensor([0.0514, 0.0615, 0.0674, 0.0464, 0.0611, 0.0518, 0.0652, 0.0527], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 10:42:35,073 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115233.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:42:43,153 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6068, 4.1842, 2.7720, 3.7066, 1.0449, 4.0481, 3.9714, 4.0895], + device='cuda:3'), covar=tensor([0.0652, 0.0981, 0.1760, 0.0787, 0.3748, 0.0685, 0.0902, 0.1112], + device='cuda:3'), in_proj_covar=tensor([0.0471, 0.0388, 0.0469, 0.0336, 0.0391, 0.0406, 0.0404, 0.0433], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:42:51,742 INFO [train.py:903] (3/4) Epoch 17, batch 6000, loss[loss=0.2263, simple_loss=0.2975, pruned_loss=0.07755, over 19538.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2921, pruned_loss=0.06843, over 3798084.52 frames. ], batch size: 54, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:42:51,742 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 10:43:04,253 INFO [train.py:937] (3/4) Epoch 17, validation: loss=0.1707, simple_loss=0.2712, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 10:43:04,254 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 10:44:04,140 INFO [train.py:903] (3/4) Epoch 17, batch 6050, loss[loss=0.1957, simple_loss=0.2775, pruned_loss=0.05695, over 19769.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2933, pruned_loss=0.06909, over 3792101.85 frames. ], batch size: 54, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:44:15,951 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 5.156e+02 6.136e+02 7.598e+02 1.906e+03, threshold=1.227e+03, percent-clipped=4.0 +2023-04-02 10:44:20,981 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-02 10:44:22,793 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3425, 1.7734, 2.2867, 1.8456, 3.3683, 4.9105, 4.7862, 5.2611], + device='cuda:3'), covar=tensor([0.1521, 0.3250, 0.2738, 0.1926, 0.0489, 0.0146, 0.0156, 0.0158], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0314, 0.0341, 0.0261, 0.0235, 0.0179, 0.0213, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:45:03,331 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3625, 3.8174, 3.9381, 3.9404, 1.5924, 3.7270, 3.2560, 3.6907], + device='cuda:3'), covar=tensor([0.1665, 0.0790, 0.0668, 0.0781, 0.5274, 0.0853, 0.0718, 0.1105], + device='cuda:3'), in_proj_covar=tensor([0.0747, 0.0697, 0.0898, 0.0778, 0.0799, 0.0648, 0.0537, 0.0824], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 10:45:06,512 INFO [train.py:903] (3/4) Epoch 17, batch 6100, loss[loss=0.2122, simple_loss=0.2868, pruned_loss=0.06879, over 19753.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2928, pruned_loss=0.06873, over 3808746.51 frames. ], batch size: 51, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:45:06,913 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115348.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:34,913 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6441, 4.1751, 2.8409, 3.7691, 1.0329, 4.0881, 4.0680, 4.1462], + device='cuda:3'), covar=tensor([0.0579, 0.1187, 0.1812, 0.0803, 0.3970, 0.0732, 0.0793, 0.1116], + device='cuda:3'), in_proj_covar=tensor([0.0473, 0.0389, 0.0472, 0.0335, 0.0393, 0.0409, 0.0405, 0.0435], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:45:54,927 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:45:56,156 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:46:06,534 INFO [train.py:903] (3/4) Epoch 17, batch 6150, loss[loss=0.1935, simple_loss=0.2712, pruned_loss=0.05786, over 19411.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2932, pruned_loss=0.06907, over 3813276.16 frames. ], batch size: 48, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:46:15,597 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.636e+02 5.209e+02 6.440e+02 8.380e+02 1.538e+03, threshold=1.288e+03, percent-clipped=5.0 +2023-04-02 10:46:33,786 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 10:47:07,332 INFO [train.py:903] (3/4) Epoch 17, batch 6200, loss[loss=0.2015, simple_loss=0.2918, pruned_loss=0.05558, over 19648.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2935, pruned_loss=0.06928, over 3802161.53 frames. ], batch size: 55, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:47:07,487 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115448.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:08,928 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:47:12,269 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8593, 0.9193, 0.8621, 0.7252, 0.7725, 0.7747, 0.0753, 0.2875], + device='cuda:3'), covar=tensor([0.0501, 0.0488, 0.0319, 0.0450, 0.0780, 0.0525, 0.1044, 0.0747], + device='cuda:3'), in_proj_covar=tensor([0.0349, 0.0345, 0.0346, 0.0375, 0.0445, 0.0380, 0.0326, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 10:47:39,870 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:48:07,507 INFO [train.py:903] (3/4) Epoch 17, batch 6250, loss[loss=0.2054, simple_loss=0.2873, pruned_loss=0.06171, over 19688.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2938, pruned_loss=0.06913, over 3806971.75 frames. ], batch size: 53, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:48:16,577 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.690e+02 5.769e+02 7.890e+02 2.007e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-02 10:48:37,586 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 10:49:09,165 INFO [train.py:903] (3/4) Epoch 17, batch 6300, loss[loss=0.2077, simple_loss=0.2891, pruned_loss=0.06316, over 19593.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2942, pruned_loss=0.06885, over 3818495.01 frames. ], batch size: 61, lr: 4.81e-03, grad_scale: 8.0 +2023-04-02 10:49:27,741 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115563.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:12,479 INFO [train.py:903] (3/4) Epoch 17, batch 6350, loss[loss=0.1976, simple_loss=0.2821, pruned_loss=0.05653, over 19661.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2923, pruned_loss=0.0673, over 3834652.82 frames. ], batch size: 53, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:50:19,289 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 10:50:20,012 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115604.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:50:21,935 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.077e+02 8.091e+02 1.466e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 10:50:50,668 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115629.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:51:13,882 INFO [train.py:903] (3/4) Epoch 17, batch 6400, loss[loss=0.2545, simple_loss=0.332, pruned_loss=0.08848, over 19510.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2917, pruned_loss=0.06723, over 3836576.94 frames. ], batch size: 64, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:15,080 INFO [train.py:903] (3/4) Epoch 17, batch 6450, loss[loss=0.1878, simple_loss=0.2683, pruned_loss=0.05362, over 19488.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06687, over 3833207.66 frames. ], batch size: 49, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:52:25,113 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.829e+02 5.862e+02 7.962e+02 1.327e+03, threshold=1.172e+03, percent-clipped=3.0 +2023-04-02 10:52:41,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0630, 1.2869, 1.7347, 0.9265, 2.3331, 3.0892, 2.7281, 3.2525], + device='cuda:3'), covar=tensor([0.1568, 0.3658, 0.3122, 0.2569, 0.0588, 0.0198, 0.0258, 0.0269], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0313, 0.0341, 0.0260, 0.0235, 0.0179, 0.0211, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:52:58,106 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115732.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:52:59,235 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=115733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:53:01,411 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 10:53:16,380 INFO [train.py:903] (3/4) Epoch 17, batch 6500, loss[loss=0.2096, simple_loss=0.277, pruned_loss=0.07104, over 19733.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2916, pruned_loss=0.06724, over 3835672.30 frames. ], batch size: 45, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:53:24,003 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 10:54:18,490 INFO [train.py:903] (3/4) Epoch 17, batch 6550, loss[loss=0.2777, simple_loss=0.3447, pruned_loss=0.1054, over 19655.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2929, pruned_loss=0.06781, over 3837230.08 frames. ], batch size: 58, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:54:28,764 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.723e+02 5.150e+02 6.522e+02 8.804e+02 2.234e+03, threshold=1.304e+03, percent-clipped=7.0 +2023-04-02 10:54:43,123 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:08,075 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9248, 4.5305, 3.3494, 4.0063, 2.0222, 4.3394, 4.3285, 4.4676], + device='cuda:3'), covar=tensor([0.0491, 0.0822, 0.1655, 0.0759, 0.2900, 0.0707, 0.0838, 0.1064], + device='cuda:3'), in_proj_covar=tensor([0.0477, 0.0393, 0.0475, 0.0335, 0.0397, 0.0412, 0.0406, 0.0435], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 10:55:15,231 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115844.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:18,691 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115847.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:55:19,546 INFO [train.py:903] (3/4) Epoch 17, batch 6600, loss[loss=0.2532, simple_loss=0.3247, pruned_loss=0.09089, over 19569.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2936, pruned_loss=0.06838, over 3815223.75 frames. ], batch size: 61, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:55:19,907 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:56:19,810 INFO [train.py:903] (3/4) Epoch 17, batch 6650, loss[loss=0.2161, simple_loss=0.2989, pruned_loss=0.06669, over 19626.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2922, pruned_loss=0.06759, over 3819177.44 frames. ], batch size: 57, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:56:30,892 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.833e+02 5.946e+02 8.225e+02 1.682e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-02 10:56:35,576 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115910.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 10:57:08,368 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1261, 1.2057, 1.7169, 1.1638, 2.4130, 3.4249, 3.0930, 3.5839], + device='cuda:3'), covar=tensor([0.1617, 0.3779, 0.3134, 0.2430, 0.0593, 0.0179, 0.0213, 0.0246], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0314, 0.0341, 0.0259, 0.0235, 0.0178, 0.0210, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 10:57:21,972 INFO [train.py:903] (3/4) Epoch 17, batch 6700, loss[loss=0.2115, simple_loss=0.2908, pruned_loss=0.06613, over 19727.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2913, pruned_loss=0.06716, over 3830578.95 frames. ], batch size: 51, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:05,025 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 10:58:20,393 INFO [train.py:903] (3/4) Epoch 17, batch 6750, loss[loss=0.2309, simple_loss=0.3128, pruned_loss=0.07451, over 19657.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2916, pruned_loss=0.06771, over 3836480.28 frames. ], batch size: 55, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 10:58:31,538 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.004e+02 5.304e+02 6.320e+02 7.514e+02 1.971e+03, threshold=1.264e+03, percent-clipped=7.0 +2023-04-02 10:58:59,781 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 10:59:17,210 INFO [train.py:903] (3/4) Epoch 17, batch 6800, loss[loss=0.2193, simple_loss=0.2906, pruned_loss=0.07395, over 19675.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2918, pruned_loss=0.06768, over 3845642.50 frames. ], batch size: 53, lr: 4.80e-03, grad_scale: 8.0 +2023-04-02 11:00:02,981 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 11:00:03,440 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 11:00:07,138 INFO [train.py:903] (3/4) Epoch 18, batch 0, loss[loss=0.2097, simple_loss=0.2905, pruned_loss=0.06441, over 19695.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2905, pruned_loss=0.06441, over 19695.00 frames. ], batch size: 60, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:00:07,139 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 11:00:18,780 INFO [train.py:937] (3/4) Epoch 18, validation: loss=0.1712, simple_loss=0.2722, pruned_loss=0.03505, over 944034.00 frames. +2023-04-02 11:00:18,781 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 11:00:32,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 11:00:51,654 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:52,780 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:00:55,572 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.457e+02 4.972e+02 6.494e+02 8.085e+02 1.604e+03, threshold=1.299e+03, percent-clipped=1.0 +2023-04-02 11:01:14,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5104, 2.2743, 2.1413, 2.6772, 2.4853, 2.1238, 2.1492, 2.3346], + device='cuda:3'), covar=tensor([0.0970, 0.1560, 0.1475, 0.0994, 0.1282, 0.0518, 0.1214, 0.0741], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0353, 0.0303, 0.0245, 0.0297, 0.0246, 0.0294, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:01:15,459 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116123.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:18,718 INFO [train.py:903] (3/4) Epoch 18, batch 50, loss[loss=0.2218, simple_loss=0.3041, pruned_loss=0.06979, over 19604.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2989, pruned_loss=0.07185, over 872454.18 frames. ], batch size: 57, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:01:21,415 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:23,545 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:30,196 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:01:47,382 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-02 11:01:52,481 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 11:01:55,424 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-02 11:02:21,175 INFO [train.py:903] (3/4) Epoch 18, batch 100, loss[loss=0.2182, simple_loss=0.2965, pruned_loss=0.06993, over 19163.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2955, pruned_loss=0.07013, over 1524878.98 frames. ], batch size: 69, lr: 4.66e-03, grad_scale: 8.0 +2023-04-02 11:02:32,231 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 11:02:58,314 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 4.838e+02 6.090e+02 7.458e+02 2.009e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 11:03:21,606 INFO [train.py:903] (3/4) Epoch 18, batch 150, loss[loss=0.2825, simple_loss=0.3551, pruned_loss=0.105, over 18814.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2951, pruned_loss=0.06923, over 2041580.82 frames. ], batch size: 74, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:03:56,113 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116254.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 11:04:20,473 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 11:04:21,605 INFO [train.py:903] (3/4) Epoch 18, batch 200, loss[loss=0.2295, simple_loss=0.2982, pruned_loss=0.08039, over 12655.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2957, pruned_loss=0.07039, over 2413731.51 frames. ], batch size: 136, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:01,398 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 4.750e+02 5.613e+02 7.153e+02 1.890e+03, threshold=1.123e+03, percent-clipped=2.0 +2023-04-02 11:05:24,092 INFO [train.py:903] (3/4) Epoch 18, batch 250, loss[loss=0.1654, simple_loss=0.2461, pruned_loss=0.04231, over 19738.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2941, pruned_loss=0.06908, over 2726762.80 frames. ], batch size: 46, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:05:43,851 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116341.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:06:18,099 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116369.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:06:25,436 INFO [train.py:903] (3/4) Epoch 18, batch 300, loss[loss=0.2535, simple_loss=0.3342, pruned_loss=0.08635, over 19631.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2955, pruned_loss=0.07007, over 2969494.12 frames. ], batch size: 57, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:06:25,600 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:07:03,942 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 5.241e+02 6.705e+02 8.261e+02 1.478e+03, threshold=1.341e+03, percent-clipped=3.0 +2023-04-02 11:07:28,467 INFO [train.py:903] (3/4) Epoch 18, batch 350, loss[loss=0.2118, simple_loss=0.2889, pruned_loss=0.06732, over 19579.00 frames. ], tot_loss[loss=0.215, simple_loss=0.293, pruned_loss=0.06847, over 3150694.13 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:07:31,286 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2330, 1.3299, 1.4170, 1.5321, 1.1602, 1.4347, 1.4836, 1.3363], + device='cuda:3'), covar=tensor([0.2764, 0.2153, 0.1308, 0.1478, 0.2375, 0.1289, 0.3058, 0.2181], + device='cuda:3'), in_proj_covar=tensor([0.0856, 0.0909, 0.0686, 0.0911, 0.0837, 0.0773, 0.0813, 0.0754], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 11:07:33,269 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 11:07:44,264 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-02 11:08:16,153 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116464.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:19,446 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116467.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:29,497 INFO [train.py:903] (3/4) Epoch 18, batch 400, loss[loss=0.2105, simple_loss=0.2875, pruned_loss=0.06671, over 19765.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.293, pruned_loss=0.06855, over 3310424.77 frames. ], batch size: 54, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:08:31,857 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116478.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:08:48,759 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116491.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:08,770 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.874e+02 5.859e+02 7.069e+02 1.370e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:09:27,691 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116523.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:09:31,055 INFO [train.py:903] (3/4) Epoch 18, batch 450, loss[loss=0.2322, simple_loss=0.3137, pruned_loss=0.07537, over 17510.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2926, pruned_loss=0.06801, over 3422891.98 frames. ], batch size: 101, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:06,943 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 11:10:08,080 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 11:10:36,072 INFO [train.py:903] (3/4) Epoch 18, batch 500, loss[loss=0.214, simple_loss=0.2904, pruned_loss=0.06882, over 19784.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2921, pruned_loss=0.06756, over 3519583.78 frames. ], batch size: 54, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:10:43,278 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116582.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:10:57,199 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116593.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:11:13,493 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 5.090e+02 6.291e+02 8.243e+02 1.843e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-02 11:11:38,155 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116625.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 11:11:38,804 INFO [train.py:903] (3/4) Epoch 18, batch 550, loss[loss=0.2126, simple_loss=0.2895, pruned_loss=0.0679, over 19601.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2929, pruned_loss=0.06812, over 3590849.76 frames. ], batch size: 50, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:11:51,138 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:12:07,798 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116650.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 11:12:07,846 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2558, 1.3698, 1.6941, 1.5067, 2.1529, 1.8518, 2.0619, 0.8856], + device='cuda:3'), covar=tensor([0.2601, 0.4304, 0.2519, 0.1997, 0.1680, 0.2468, 0.1722, 0.4565], + device='cuda:3'), in_proj_covar=tensor([0.0512, 0.0612, 0.0671, 0.0463, 0.0611, 0.0516, 0.0648, 0.0522], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 11:12:41,002 INFO [train.py:903] (3/4) Epoch 18, batch 600, loss[loss=0.2282, simple_loss=0.3088, pruned_loss=0.07381, over 18827.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2928, pruned_loss=0.06837, over 3642412.37 frames. ], batch size: 74, lr: 4.65e-03, grad_scale: 8.0 +2023-04-02 11:12:51,638 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116685.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:18,881 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.849e+02 4.880e+02 6.214e+02 8.095e+02 1.532e+03, threshold=1.243e+03, percent-clipped=4.0 +2023-04-02 11:13:21,159 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 11:13:28,763 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9763, 1.8145, 1.7139, 2.0750, 1.8136, 1.8348, 1.6809, 1.9456], + device='cuda:3'), covar=tensor([0.0969, 0.1425, 0.1357, 0.0938, 0.1264, 0.0503, 0.1215, 0.0704], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0356, 0.0302, 0.0247, 0.0299, 0.0247, 0.0296, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:13:37,825 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116722.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:13:42,029 INFO [train.py:903] (3/4) Epoch 18, batch 650, loss[loss=0.2859, simple_loss=0.3594, pruned_loss=0.1062, over 18844.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2934, pruned_loss=0.06882, over 3676036.56 frames. ], batch size: 74, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:13:55,007 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0222, 1.3185, 1.5055, 1.5107, 2.6524, 1.1536, 2.2275, 2.9821], + device='cuda:3'), covar=tensor([0.0615, 0.2670, 0.2682, 0.1765, 0.0724, 0.2298, 0.1275, 0.0368], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0354, 0.0374, 0.0340, 0.0362, 0.0345, 0.0359, 0.0384], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:14:08,750 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:34,416 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.35 vs. limit=5.0 +2023-04-02 11:14:38,823 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116772.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:14:43,006 INFO [train.py:903] (3/4) Epoch 18, batch 700, loss[loss=0.2216, simple_loss=0.2999, pruned_loss=0.07166, over 17387.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2931, pruned_loss=0.06892, over 3701861.59 frames. ], batch size: 101, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:15:15,683 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:23,659 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.148e+02 4.855e+02 5.777e+02 7.000e+02 1.472e+03, threshold=1.155e+03, percent-clipped=1.0 +2023-04-02 11:15:24,934 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:27,850 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 11:15:39,746 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:15:47,502 INFO [train.py:903] (3/4) Epoch 18, batch 750, loss[loss=0.1632, simple_loss=0.2479, pruned_loss=0.03925, over 19586.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2923, pruned_loss=0.0683, over 3733071.32 frames. ], batch size: 52, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:16:03,714 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:16,556 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116849.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:35,164 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116863.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:39,890 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116867.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:50,651 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116874.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:16:52,571 INFO [train.py:903] (3/4) Epoch 18, batch 800, loss[loss=0.1866, simple_loss=0.2716, pruned_loss=0.05081, over 19755.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2922, pruned_loss=0.0679, over 3766659.75 frames. ], batch size: 51, lr: 4.64e-03, grad_scale: 8.0 +2023-04-02 11:17:06,580 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 11:17:32,326 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.513e+02 6.326e+02 7.669e+02 1.889e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-02 11:17:51,874 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116923.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:17:55,033 INFO [train.py:903] (3/4) Epoch 18, batch 850, loss[loss=0.2167, simple_loss=0.3008, pruned_loss=0.06633, over 19670.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.292, pruned_loss=0.0677, over 3783302.40 frames. ], batch size: 59, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:18:12,871 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0972, 1.2228, 1.5527, 0.6156, 2.0274, 2.4991, 2.1386, 2.6143], + device='cuda:3'), covar=tensor([0.1390, 0.3701, 0.3143, 0.2499, 0.0540, 0.0247, 0.0335, 0.0324], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0312, 0.0339, 0.0256, 0.0231, 0.0177, 0.0209, 0.0236], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 11:18:48,135 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 11:18:56,389 INFO [train.py:903] (3/4) Epoch 18, batch 900, loss[loss=0.2087, simple_loss=0.2932, pruned_loss=0.06214, over 19586.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2936, pruned_loss=0.06868, over 3795415.29 frames. ], batch size: 61, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:19:01,375 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=116980.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:04,004 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:19:38,590 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 4.671e+02 5.637e+02 7.276e+02 1.422e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 11:20:00,615 INFO [train.py:903] (3/4) Epoch 18, batch 950, loss[loss=0.2102, simple_loss=0.2943, pruned_loss=0.06308, over 19647.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2926, pruned_loss=0.06753, over 3802708.03 frames. ], batch size: 58, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:20:02,906 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 11:20:38,650 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:20:51,139 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:03,205 INFO [train.py:903] (3/4) Epoch 18, batch 1000, loss[loss=0.216, simple_loss=0.2941, pruned_loss=0.06895, over 19771.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.293, pruned_loss=0.06804, over 3798917.32 frames. ], batch size: 54, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:21:11,176 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117081.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:27,370 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117095.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:21:43,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.648e+02 4.999e+02 6.181e+02 7.829e+02 2.221e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-02 11:21:56,748 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 11:22:07,201 INFO [train.py:903] (3/4) Epoch 18, batch 1050, loss[loss=0.2715, simple_loss=0.3351, pruned_loss=0.104, over 19274.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2949, pruned_loss=0.06888, over 3811332.49 frames. ], batch size: 66, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:22:40,249 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 11:22:55,006 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117164.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:09,065 INFO [train.py:903] (3/4) Epoch 18, batch 1100, loss[loss=0.2758, simple_loss=0.3337, pruned_loss=0.109, over 13122.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.294, pruned_loss=0.06844, over 3821253.97 frames. ], batch size: 136, lr: 4.64e-03, grad_scale: 4.0 +2023-04-02 11:23:13,182 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:15,446 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117181.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:25,752 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2650, 2.1053, 1.9246, 1.8025, 1.6063, 1.7827, 0.7147, 1.3129], + device='cuda:3'), covar=tensor([0.0505, 0.0593, 0.0514, 0.0754, 0.1104, 0.0947, 0.1185, 0.0971], + device='cuda:3'), in_proj_covar=tensor([0.0351, 0.0347, 0.0347, 0.0376, 0.0449, 0.0382, 0.0328, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 11:23:44,528 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117204.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:23:49,510 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.961e+02 5.150e+02 6.225e+02 7.900e+02 1.283e+03, threshold=1.245e+03, percent-clipped=2.0 +2023-04-02 11:24:11,116 INFO [train.py:903] (3/4) Epoch 18, batch 1150, loss[loss=0.1998, simple_loss=0.2829, pruned_loss=0.05836, over 19674.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2933, pruned_loss=0.06809, over 3833508.98 frames. ], batch size: 53, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:24:27,286 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117238.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:28,297 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117239.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:24:39,746 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4665, 2.2335, 1.7190, 2.0533, 0.8758, 2.1950, 2.1322, 2.1867], + device='cuda:3'), covar=tensor([0.1201, 0.1234, 0.1771, 0.0846, 0.2673, 0.0966, 0.1044, 0.1278], + device='cuda:3'), in_proj_covar=tensor([0.0479, 0.0393, 0.0475, 0.0337, 0.0395, 0.0413, 0.0406, 0.0436], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:24:58,359 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117263.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:14,039 INFO [train.py:903] (3/4) Epoch 18, batch 1200, loss[loss=0.1939, simple_loss=0.2759, pruned_loss=0.05598, over 19596.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.293, pruned_loss=0.06814, over 3829878.41 frames. ], batch size: 52, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:25:19,010 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:25:31,276 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7674, 1.4948, 1.4207, 1.7881, 1.4746, 1.5709, 1.4569, 1.6860], + device='cuda:3'), covar=tensor([0.1038, 0.1310, 0.1453, 0.0937, 0.1213, 0.0575, 0.1313, 0.0780], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0355, 0.0302, 0.0248, 0.0299, 0.0247, 0.0295, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:25:50,718 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 11:25:54,166 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 4.775e+02 5.862e+02 7.562e+02 1.280e+03, threshold=1.172e+03, percent-clipped=1.0 +2023-04-02 11:26:12,980 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-02 11:26:18,172 INFO [train.py:903] (3/4) Epoch 18, batch 1250, loss[loss=0.1915, simple_loss=0.2698, pruned_loss=0.05655, over 19578.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.293, pruned_loss=0.06829, over 3823232.22 frames. ], batch size: 52, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:26:43,761 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:49,497 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117351.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:26:58,440 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2446, 2.1017, 1.9567, 1.8392, 1.5604, 1.7899, 0.5834, 1.1401], + device='cuda:3'), covar=tensor([0.0522, 0.0576, 0.0402, 0.0671, 0.1104, 0.0798, 0.1155, 0.0933], + device='cuda:3'), in_proj_covar=tensor([0.0350, 0.0347, 0.0348, 0.0376, 0.0449, 0.0381, 0.0328, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 11:27:20,894 INFO [train.py:903] (3/4) Epoch 18, batch 1300, loss[loss=0.2261, simple_loss=0.3049, pruned_loss=0.0736, over 17302.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.294, pruned_loss=0.06876, over 3812782.05 frames. ], batch size: 101, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:27:21,304 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:27:48,194 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6771, 1.6571, 1.6634, 2.3333, 1.5894, 2.1253, 2.1347, 1.9060], + device='cuda:3'), covar=tensor([0.0886, 0.0931, 0.1002, 0.0727, 0.0931, 0.0717, 0.0828, 0.0696], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0221, 0.0223, 0.0240, 0.0226, 0.0209, 0.0186, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 11:28:01,376 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.655e+02 5.418e+02 6.594e+02 8.357e+02 1.516e+03, threshold=1.319e+03, percent-clipped=5.0 +2023-04-02 11:28:22,249 INFO [train.py:903] (3/4) Epoch 18, batch 1350, loss[loss=0.2436, simple_loss=0.3105, pruned_loss=0.0884, over 19533.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2939, pruned_loss=0.06868, over 3823266.12 frames. ], batch size: 54, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:28:37,217 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:07,761 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117462.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:29:21,700 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-02 11:29:24,518 INFO [train.py:903] (3/4) Epoch 18, batch 1400, loss[loss=0.2152, simple_loss=0.292, pruned_loss=0.06925, over 19752.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2948, pruned_loss=0.0692, over 3830289.79 frames. ], batch size: 51, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:30:04,548 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.042e+02 5.443e+02 6.741e+02 8.791e+02 2.167e+03, threshold=1.348e+03, percent-clipped=5.0 +2023-04-02 11:30:28,246 INFO [train.py:903] (3/4) Epoch 18, batch 1450, loss[loss=0.1967, simple_loss=0.2852, pruned_loss=0.05414, over 19622.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.295, pruned_loss=0.06941, over 3809314.27 frames. ], batch size: 61, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:30:29,437 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 11:30:40,064 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117535.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:11,240 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117560.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:30,927 INFO [train.py:903] (3/4) Epoch 18, batch 1500, loss[loss=0.2486, simple_loss=0.3182, pruned_loss=0.0895, over 19636.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2951, pruned_loss=0.06968, over 3812567.88 frames. ], batch size: 58, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:31:39,057 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:31:53,561 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.69 vs. limit=2.0 +2023-04-02 11:32:11,893 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.657e+02 6.062e+02 7.787e+02 1.498e+03, threshold=1.212e+03, percent-clipped=2.0 +2023-04-02 11:32:32,137 INFO [train.py:903] (3/4) Epoch 18, batch 1550, loss[loss=0.2543, simple_loss=0.3305, pruned_loss=0.08906, over 19518.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2948, pruned_loss=0.06974, over 3820107.02 frames. ], batch size: 54, lr: 4.63e-03, grad_scale: 4.0 +2023-04-02 11:33:34,528 INFO [train.py:903] (3/4) Epoch 18, batch 1600, loss[loss=0.1749, simple_loss=0.2543, pruned_loss=0.04773, over 19773.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.294, pruned_loss=0.0691, over 3822028.29 frames. ], batch size: 49, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:33:54,974 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=117691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:01,911 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 11:34:03,402 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117698.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:34:15,613 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.549e+02 4.964e+02 5.954e+02 7.051e+02 1.393e+03, threshold=1.191e+03, percent-clipped=4.0 +2023-04-02 11:34:37,826 INFO [train.py:903] (3/4) Epoch 18, batch 1650, loss[loss=0.2007, simple_loss=0.2644, pruned_loss=0.06852, over 19078.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2932, pruned_loss=0.06827, over 3834841.76 frames. ], batch size: 42, lr: 4.63e-03, grad_scale: 8.0 +2023-04-02 11:34:44,904 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9487, 1.1790, 1.5426, 0.6575, 1.9944, 2.3542, 2.0600, 2.4672], + device='cuda:3'), covar=tensor([0.1568, 0.3663, 0.3162, 0.2704, 0.0685, 0.0312, 0.0344, 0.0389], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0309, 0.0338, 0.0257, 0.0230, 0.0177, 0.0209, 0.0235], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 11:35:04,237 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3303, 2.3680, 2.5658, 3.2502, 2.3172, 3.0851, 2.6232, 2.4226], + device='cuda:3'), covar=tensor([0.4115, 0.4034, 0.1672, 0.2293, 0.4366, 0.1956, 0.4486, 0.3090], + device='cuda:3'), in_proj_covar=tensor([0.0858, 0.0911, 0.0689, 0.0910, 0.0837, 0.0775, 0.0818, 0.0757], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 11:35:39,482 INFO [train.py:903] (3/4) Epoch 18, batch 1700, loss[loss=0.2149, simple_loss=0.2947, pruned_loss=0.06757, over 19544.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.293, pruned_loss=0.06814, over 3831284.50 frames. ], batch size: 56, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:36:16,657 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:36:19,945 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.954e+02 5.098e+02 6.258e+02 7.253e+02 1.524e+03, threshold=1.252e+03, percent-clipped=2.0 +2023-04-02 11:36:21,118 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 11:36:40,234 INFO [train.py:903] (3/4) Epoch 18, batch 1750, loss[loss=0.1952, simple_loss=0.279, pruned_loss=0.05575, over 19484.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.292, pruned_loss=0.06768, over 3837288.24 frames. ], batch size: 49, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:37:14,809 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 11:37:43,036 INFO [train.py:903] (3/4) Epoch 18, batch 1800, loss[loss=0.1755, simple_loss=0.2664, pruned_loss=0.04227, over 19737.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2911, pruned_loss=0.06729, over 3847498.86 frames. ], batch size: 51, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:38:23,379 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 4.803e+02 6.041e+02 7.952e+02 1.877e+03, threshold=1.208e+03, percent-clipped=3.0 +2023-04-02 11:38:42,054 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 11:38:45,247 INFO [train.py:903] (3/4) Epoch 18, batch 1850, loss[loss=0.1716, simple_loss=0.2451, pruned_loss=0.04904, over 19369.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2911, pruned_loss=0.06752, over 3838044.90 frames. ], batch size: 47, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:18,895 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 11:39:19,318 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117954.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:39:47,632 INFO [train.py:903] (3/4) Epoch 18, batch 1900, loss[loss=0.2245, simple_loss=0.3069, pruned_loss=0.07103, over 19782.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2913, pruned_loss=0.06764, over 3820498.95 frames. ], batch size: 56, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:39:51,614 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117979.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:40:03,238 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 11:40:07,848 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 11:40:27,943 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.102e+02 5.120e+02 6.309e+02 7.973e+02 1.539e+03, threshold=1.262e+03, percent-clipped=6.0 +2023-04-02 11:40:34,564 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 11:40:48,202 INFO [train.py:903] (3/4) Epoch 18, batch 1950, loss[loss=0.2064, simple_loss=0.2792, pruned_loss=0.06687, over 19486.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2931, pruned_loss=0.06903, over 3809087.91 frames. ], batch size: 49, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:41:28,946 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118058.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:33,474 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:41:49,952 INFO [train.py:903] (3/4) Epoch 18, batch 2000, loss[loss=0.2455, simple_loss=0.316, pruned_loss=0.08751, over 17310.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2935, pruned_loss=0.06957, over 3795869.43 frames. ], batch size: 101, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:42:05,400 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:42:11,297 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.61 vs. limit=5.0 +2023-04-02 11:42:31,795 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 5.367e+02 6.632e+02 8.072e+02 1.503e+03, threshold=1.326e+03, percent-clipped=5.0 +2023-04-02 11:42:46,431 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 11:42:54,015 INFO [train.py:903] (3/4) Epoch 18, batch 2050, loss[loss=0.2072, simple_loss=0.2888, pruned_loss=0.06287, over 19777.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2933, pruned_loss=0.06906, over 3799177.45 frames. ], batch size: 54, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:43:06,254 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 11:43:07,433 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 11:43:26,101 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 11:43:55,356 INFO [train.py:903] (3/4) Epoch 18, batch 2100, loss[loss=0.2445, simple_loss=0.3219, pruned_loss=0.08358, over 19660.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2939, pruned_loss=0.06913, over 3807551.81 frames. ], batch size: 60, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:44:07,021 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:44:21,260 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 11:44:36,112 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 5.186e+02 6.689e+02 8.493e+02 1.656e+03, threshold=1.338e+03, percent-clipped=6.0 +2023-04-02 11:44:44,889 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 11:44:56,295 INFO [train.py:903] (3/4) Epoch 18, batch 2150, loss[loss=0.2311, simple_loss=0.3007, pruned_loss=0.08077, over 19578.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2931, pruned_loss=0.0684, over 3821198.12 frames. ], batch size: 52, lr: 4.62e-03, grad_scale: 8.0 +2023-04-02 11:45:39,512 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6021, 1.2203, 1.2642, 1.5041, 1.1479, 1.4090, 1.2380, 1.4629], + device='cuda:3'), covar=tensor([0.1151, 0.1254, 0.1619, 0.1016, 0.1270, 0.0601, 0.1492, 0.0830], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0350, 0.0299, 0.0245, 0.0296, 0.0244, 0.0292, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:45:57,805 INFO [train.py:903] (3/4) Epoch 18, batch 2200, loss[loss=0.2163, simple_loss=0.2981, pruned_loss=0.0673, over 18678.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2938, pruned_loss=0.06884, over 3827987.17 frames. ], batch size: 74, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:46:13,121 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:37,341 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:46:39,292 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 5.366e+02 7.280e+02 9.450e+02 2.114e+03, threshold=1.456e+03, percent-clipped=6.0 +2023-04-02 11:47:01,193 INFO [train.py:903] (3/4) Epoch 18, batch 2250, loss[loss=0.2244, simple_loss=0.3002, pruned_loss=0.0743, over 19685.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2953, pruned_loss=0.06949, over 3842357.27 frames. ], batch size: 59, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:47:27,313 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:47:47,809 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6502, 4.2304, 2.6781, 3.8198, 0.8779, 4.0788, 4.0628, 4.1140], + device='cuda:3'), covar=tensor([0.0609, 0.1115, 0.2037, 0.0785, 0.4225, 0.0701, 0.0844, 0.1012], + device='cuda:3'), in_proj_covar=tensor([0.0478, 0.0395, 0.0479, 0.0340, 0.0399, 0.0416, 0.0411, 0.0441], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:48:02,750 INFO [train.py:903] (3/4) Epoch 18, batch 2300, loss[loss=0.201, simple_loss=0.2759, pruned_loss=0.06307, over 19731.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2944, pruned_loss=0.06919, over 3840369.35 frames. ], batch size: 51, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:48:15,082 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 11:48:22,177 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8859, 1.4182, 1.6122, 1.5282, 2.6550, 1.2730, 2.2432, 2.8112], + device='cuda:3'), covar=tensor([0.0538, 0.2356, 0.2260, 0.1684, 0.0624, 0.2068, 0.1864, 0.0451], + device='cuda:3'), in_proj_covar=tensor([0.0398, 0.0359, 0.0377, 0.0344, 0.0367, 0.0347, 0.0367, 0.0388], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:48:35,792 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:48:44,940 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.253e+02 6.243e+02 7.561e+02 1.558e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-04-02 11:49:05,625 INFO [train.py:903] (3/4) Epoch 18, batch 2350, loss[loss=0.2172, simple_loss=0.2993, pruned_loss=0.06756, over 19673.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2938, pruned_loss=0.06887, over 3833930.12 frames. ], batch size: 60, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:49:12,834 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4350, 2.2019, 1.7290, 1.4914, 2.0802, 1.3561, 1.3951, 1.8461], + device='cuda:3'), covar=tensor([0.0913, 0.0777, 0.0985, 0.0838, 0.0497, 0.1211, 0.0729, 0.0479], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0314, 0.0336, 0.0262, 0.0247, 0.0334, 0.0293, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:49:46,248 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 11:50:02,118 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 11:50:06,554 INFO [train.py:903] (3/4) Epoch 18, batch 2400, loss[loss=0.1813, simple_loss=0.2678, pruned_loss=0.04742, over 19757.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2942, pruned_loss=0.06944, over 3812009.42 frames. ], batch size: 54, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:50:48,686 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.821e+02 5.593e+02 7.730e+02 1.750e+03, threshold=1.119e+03, percent-clipped=3.0 +2023-04-02 11:50:58,533 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118517.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:10,676 INFO [train.py:903] (3/4) Epoch 18, batch 2450, loss[loss=0.2559, simple_loss=0.3261, pruned_loss=0.09287, over 13530.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2931, pruned_loss=0.06845, over 3812807.65 frames. ], batch size: 137, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:51:16,262 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118530.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:51:57,107 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4290, 1.5005, 1.9036, 1.5271, 2.8441, 3.6560, 3.4227, 3.8340], + device='cuda:3'), covar=tensor([0.1564, 0.3522, 0.3031, 0.2302, 0.0589, 0.0191, 0.0190, 0.0241], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0310, 0.0340, 0.0257, 0.0232, 0.0178, 0.0210, 0.0237], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 11:52:12,593 INFO [train.py:903] (3/4) Epoch 18, batch 2500, loss[loss=0.2231, simple_loss=0.3079, pruned_loss=0.06919, over 19700.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2928, pruned_loss=0.06789, over 3817554.71 frames. ], batch size: 59, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:52:53,545 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 5.081e+02 6.040e+02 7.266e+02 1.380e+03, threshold=1.208e+03, percent-clipped=1.0 +2023-04-02 11:53:13,672 INFO [train.py:903] (3/4) Epoch 18, batch 2550, loss[loss=0.2385, simple_loss=0.316, pruned_loss=0.08048, over 19599.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2937, pruned_loss=0.06847, over 3821731.65 frames. ], batch size: 57, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:53:19,829 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118631.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:37,342 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-02 11:53:37,914 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118645.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:43,519 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:53:45,538 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:06,484 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 11:54:15,889 INFO [train.py:903] (3/4) Epoch 18, batch 2600, loss[loss=0.1986, simple_loss=0.2862, pruned_loss=0.05552, over 19774.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2932, pruned_loss=0.06862, over 3823353.63 frames. ], batch size: 54, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:54:35,487 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:54:56,094 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.742e+02 5.749e+02 7.114e+02 1.367e+03, threshold=1.150e+03, percent-clipped=3.0 +2023-04-02 11:55:16,572 INFO [train.py:903] (3/4) Epoch 18, batch 2650, loss[loss=0.2191, simple_loss=0.3022, pruned_loss=0.06798, over 18173.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2941, pruned_loss=0.06898, over 3818760.30 frames. ], batch size: 83, lr: 4.61e-03, grad_scale: 8.0 +2023-04-02 11:55:33,742 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 11:55:42,205 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:06,207 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:15,267 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:18,245 INFO [train.py:903] (3/4) Epoch 18, batch 2700, loss[loss=0.1944, simple_loss=0.2682, pruned_loss=0.06033, over 19367.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2928, pruned_loss=0.06822, over 3823109.45 frames. ], batch size: 47, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:56:44,803 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118798.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:49,107 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.88 vs. limit=5.0 +2023-04-02 11:56:55,808 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:56:59,035 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.246e+02 5.095e+02 6.154e+02 8.195e+02 1.746e+03, threshold=1.231e+03, percent-clipped=5.0 +2023-04-02 11:57:05,253 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1960, 5.5787, 2.9670, 4.9145, 0.9661, 5.7115, 5.6211, 5.6426], + device='cuda:3'), covar=tensor([0.0378, 0.0749, 0.1831, 0.0652, 0.4105, 0.0490, 0.0724, 0.0913], + device='cuda:3'), in_proj_covar=tensor([0.0483, 0.0394, 0.0480, 0.0341, 0.0402, 0.0419, 0.0413, 0.0445], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:57:06,522 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3475, 1.2800, 0.9647, 1.2125, 1.1327, 1.0633, 0.9478, 1.1648], + device='cuda:3'), covar=tensor([0.1217, 0.1229, 0.1861, 0.1095, 0.1283, 0.1130, 0.1844, 0.1104], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0353, 0.0299, 0.0248, 0.0298, 0.0246, 0.0295, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:57:20,473 INFO [train.py:903] (3/4) Epoch 18, batch 2750, loss[loss=0.2399, simple_loss=0.3222, pruned_loss=0.07885, over 18806.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.293, pruned_loss=0.06826, over 3817731.58 frames. ], batch size: 74, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:15,368 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4380, 1.4843, 1.8422, 1.6132, 3.1187, 2.3503, 3.3212, 1.5688], + device='cuda:3'), covar=tensor([0.2572, 0.4445, 0.2784, 0.2066, 0.1442, 0.2234, 0.1450, 0.4166], + device='cuda:3'), in_proj_covar=tensor([0.0519, 0.0622, 0.0679, 0.0468, 0.0615, 0.0521, 0.0655, 0.0530], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 11:58:23,105 INFO [train.py:903] (3/4) Epoch 18, batch 2800, loss[loss=0.2086, simple_loss=0.293, pruned_loss=0.06212, over 19520.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2934, pruned_loss=0.06854, over 3830056.24 frames. ], batch size: 54, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:58:41,681 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2218, 1.2845, 1.2738, 1.0889, 1.0748, 1.1091, 0.0442, 0.3411], + device='cuda:3'), covar=tensor([0.0634, 0.0613, 0.0384, 0.0521, 0.1258, 0.0594, 0.1226, 0.1039], + device='cuda:3'), in_proj_covar=tensor([0.0349, 0.0348, 0.0347, 0.0374, 0.0448, 0.0380, 0.0329, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 11:58:44,038 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6751, 1.3760, 1.5281, 1.6172, 3.2724, 1.2923, 2.5897, 3.6616], + device='cuda:3'), covar=tensor([0.0426, 0.2676, 0.2684, 0.1790, 0.0649, 0.2278, 0.1037, 0.0232], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0357, 0.0378, 0.0342, 0.0365, 0.0345, 0.0366, 0.0389], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 11:58:54,250 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 11:59:03,985 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.804e+02 6.148e+02 8.494e+02 1.418e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 11:59:24,481 INFO [train.py:903] (3/4) Epoch 18, batch 2850, loss[loss=0.2478, simple_loss=0.3235, pruned_loss=0.08604, over 19394.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2927, pruned_loss=0.06832, over 3825739.93 frames. ], batch size: 70, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 11:59:24,957 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118926.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:22,866 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 12:00:26,007 INFO [train.py:903] (3/4) Epoch 18, batch 2900, loss[loss=0.22, simple_loss=0.3075, pruned_loss=0.06625, over 19720.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2912, pruned_loss=0.06717, over 3820275.54 frames. ], batch size: 63, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:00:27,569 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1428, 2.8793, 2.1845, 2.2696, 2.0524, 2.4894, 1.0409, 2.0932], + device='cuda:3'), covar=tensor([0.0637, 0.0543, 0.0626, 0.0964, 0.0979, 0.0976, 0.1222, 0.0948], + device='cuda:3'), in_proj_covar=tensor([0.0351, 0.0349, 0.0348, 0.0375, 0.0449, 0.0381, 0.0330, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:00:46,052 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=118993.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:54,396 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-02 12:00:54,945 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3681, 1.4833, 1.9975, 1.7032, 2.7433, 2.4378, 3.1038, 1.5384], + device='cuda:3'), covar=tensor([0.2774, 0.4706, 0.2867, 0.2128, 0.2070, 0.2461, 0.1996, 0.4514], + device='cuda:3'), in_proj_covar=tensor([0.0520, 0.0625, 0.0682, 0.0469, 0.0618, 0.0524, 0.0659, 0.0533], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 12:00:56,064 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119002.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:00:58,541 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 12:01:05,223 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 5.127e+02 5.908e+02 8.371e+02 2.467e+03, threshold=1.182e+03, percent-clipped=10.0 +2023-04-02 12:01:21,099 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:25,122 INFO [train.py:903] (3/4) Epoch 18, batch 2950, loss[loss=0.1668, simple_loss=0.2539, pruned_loss=0.03984, over 19849.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2907, pruned_loss=0.06682, over 3835217.46 frames. ], batch size: 52, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:01:26,625 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:01:50,472 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:08,484 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:24,526 INFO [train.py:903] (3/4) Epoch 18, batch 3000, loss[loss=0.243, simple_loss=0.3159, pruned_loss=0.08499, over 19092.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2908, pruned_loss=0.06718, over 3830281.27 frames. ], batch size: 75, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:02:24,526 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 12:02:37,004 INFO [train.py:937] (3/4) Epoch 18, validation: loss=0.1707, simple_loss=0.2711, pruned_loss=0.03521, over 944034.00 frames. +2023-04-02 12:02:37,005 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 12:02:40,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 12:02:50,811 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:02:57,672 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:17,132 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:17,948 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 5.504e+02 6.538e+02 8.295e+02 4.074e+03, threshold=1.308e+03, percent-clipped=8.0 +2023-04-02 12:03:20,394 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119111.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:03:37,844 INFO [train.py:903] (3/4) Epoch 18, batch 3050, loss[loss=0.2291, simple_loss=0.3107, pruned_loss=0.07372, over 19648.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2911, pruned_loss=0.06743, over 3824836.61 frames. ], batch size: 58, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:04:37,433 INFO [train.py:903] (3/4) Epoch 18, batch 3100, loss[loss=0.1959, simple_loss=0.2869, pruned_loss=0.05247, over 17972.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2918, pruned_loss=0.06773, over 3823922.63 frames. ], batch size: 83, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:05:18,223 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.828e+02 5.979e+02 7.400e+02 1.693e+03, threshold=1.196e+03, percent-clipped=2.0 +2023-04-02 12:05:39,402 INFO [train.py:903] (3/4) Epoch 18, batch 3150, loss[loss=0.2273, simple_loss=0.3057, pruned_loss=0.07441, over 19705.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2913, pruned_loss=0.06753, over 3827837.31 frames. ], batch size: 59, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:06:04,649 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 12:06:06,960 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 12:06:39,660 INFO [train.py:903] (3/4) Epoch 18, batch 3200, loss[loss=0.179, simple_loss=0.2687, pruned_loss=0.04466, over 19770.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.292, pruned_loss=0.0677, over 3827387.21 frames. ], batch size: 54, lr: 4.60e-03, grad_scale: 8.0 +2023-04-02 12:07:09,083 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1587, 2.0809, 1.8588, 1.6777, 1.6163, 1.7191, 0.5210, 0.9977], + device='cuda:3'), covar=tensor([0.0513, 0.0515, 0.0422, 0.0675, 0.1068, 0.0796, 0.1131, 0.0967], + device='cuda:3'), in_proj_covar=tensor([0.0349, 0.0345, 0.0345, 0.0371, 0.0446, 0.0378, 0.0326, 0.0333], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:07:18,202 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.710e+02 5.964e+02 8.137e+02 2.705e+03, threshold=1.193e+03, percent-clipped=4.0 +2023-04-02 12:07:39,012 INFO [train.py:903] (3/4) Epoch 18, batch 3250, loss[loss=0.2637, simple_loss=0.324, pruned_loss=0.1017, over 13295.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2926, pruned_loss=0.06846, over 3817592.10 frames. ], batch size: 136, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:24,488 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:08:37,659 INFO [train.py:903] (3/4) Epoch 18, batch 3300, loss[loss=0.202, simple_loss=0.2793, pruned_loss=0.06234, over 19625.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06814, over 3831347.57 frames. ], batch size: 50, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:08:42,299 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 12:08:50,158 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3264, 3.7214, 4.0538, 4.1549, 1.6725, 3.8556, 3.2577, 3.5065], + device='cuda:3'), covar=tensor([0.2380, 0.1541, 0.1017, 0.1179, 0.7049, 0.1726, 0.1221, 0.2053], + device='cuda:3'), in_proj_covar=tensor([0.0758, 0.0701, 0.0907, 0.0790, 0.0808, 0.0662, 0.0549, 0.0841], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 12:08:53,576 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:09:17,496 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 5.369e+02 6.623e+02 8.148e+02 1.799e+03, threshold=1.325e+03, percent-clipped=5.0 +2023-04-02 12:09:37,819 INFO [train.py:903] (3/4) Epoch 18, batch 3350, loss[loss=0.1852, simple_loss=0.2598, pruned_loss=0.05526, over 19734.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2926, pruned_loss=0.0681, over 3848244.57 frames. ], batch size: 45, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:09:39,303 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7642, 4.3048, 4.4905, 4.5139, 1.6026, 4.2076, 3.6760, 4.2003], + device='cuda:3'), covar=tensor([0.1542, 0.0686, 0.0582, 0.0619, 0.5882, 0.0735, 0.0610, 0.1160], + device='cuda:3'), in_proj_covar=tensor([0.0756, 0.0698, 0.0903, 0.0787, 0.0805, 0.0661, 0.0547, 0.0838], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 12:09:46,648 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3697, 1.5172, 1.7293, 1.5785, 2.7962, 1.3627, 2.4927, 3.2818], + device='cuda:3'), covar=tensor([0.0713, 0.3071, 0.2836, 0.2226, 0.1163, 0.2692, 0.1458, 0.0424], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0358, 0.0377, 0.0343, 0.0366, 0.0347, 0.0368, 0.0389], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:09:50,673 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119437.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:11,914 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:10:37,461 INFO [train.py:903] (3/4) Epoch 18, batch 3400, loss[loss=0.2048, simple_loss=0.2929, pruned_loss=0.0583, over 19588.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2922, pruned_loss=0.06775, over 3844805.40 frames. ], batch size: 61, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:18,514 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 4.894e+02 5.911e+02 7.983e+02 1.559e+03, threshold=1.182e+03, percent-clipped=2.0 +2023-04-02 12:11:37,562 INFO [train.py:903] (3/4) Epoch 18, batch 3450, loss[loss=0.184, simple_loss=0.2705, pruned_loss=0.04875, over 19679.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.293, pruned_loss=0.06823, over 3834260.13 frames. ], batch size: 53, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:11:43,131 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 12:11:45,635 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:09,611 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119552.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:31,744 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119570.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:12:38,211 INFO [train.py:903] (3/4) Epoch 18, batch 3500, loss[loss=0.2686, simple_loss=0.344, pruned_loss=0.09656, over 19672.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2932, pruned_loss=0.06835, over 3828647.81 frames. ], batch size: 60, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:12:47,662 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.82 vs. limit=5.0 +2023-04-02 12:12:51,541 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4437, 1.4564, 1.7482, 1.6594, 2.6075, 2.2427, 2.6868, 1.1062], + device='cuda:3'), covar=tensor([0.2242, 0.3988, 0.2434, 0.1747, 0.1408, 0.2032, 0.1391, 0.4107], + device='cuda:3'), in_proj_covar=tensor([0.0520, 0.0620, 0.0681, 0.0469, 0.0616, 0.0522, 0.0657, 0.0531], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 12:12:55,123 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 12:13:20,027 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.884e+02 6.516e+02 7.989e+02 1.670e+03, threshold=1.303e+03, percent-clipped=4.0 +2023-04-02 12:13:39,059 INFO [train.py:903] (3/4) Epoch 18, batch 3550, loss[loss=0.1992, simple_loss=0.2705, pruned_loss=0.06396, over 19746.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2932, pruned_loss=0.06834, over 3834746.19 frames. ], batch size: 47, lr: 4.59e-03, grad_scale: 4.0 +2023-04-02 12:14:39,530 INFO [train.py:903] (3/4) Epoch 18, batch 3600, loss[loss=0.2125, simple_loss=0.2794, pruned_loss=0.07275, over 19762.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2931, pruned_loss=0.06805, over 3827978.68 frames. ], batch size: 47, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:14:47,966 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:15:20,525 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.936e+02 6.005e+02 7.350e+02 2.220e+03, threshold=1.201e+03, percent-clipped=3.0 +2023-04-02 12:15:39,380 INFO [train.py:903] (3/4) Epoch 18, batch 3650, loss[loss=0.247, simple_loss=0.3157, pruned_loss=0.08922, over 19769.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.293, pruned_loss=0.06801, over 3822307.01 frames. ], batch size: 54, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:40,014 INFO [train.py:903] (3/4) Epoch 18, batch 3700, loss[loss=0.2277, simple_loss=0.321, pruned_loss=0.0672, over 19677.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2935, pruned_loss=0.06814, over 3809751.76 frames. ], batch size: 55, lr: 4.59e-03, grad_scale: 8.0 +2023-04-02 12:16:58,975 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 12:17:19,681 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119808.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:21,521 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 5.017e+02 5.995e+02 7.636e+02 1.424e+03, threshold=1.199e+03, percent-clipped=3.0 +2023-04-02 12:17:32,214 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3870, 1.3978, 1.8957, 1.5413, 2.7596, 3.6424, 3.4407, 3.8348], + device='cuda:3'), covar=tensor([0.1538, 0.3679, 0.3177, 0.2321, 0.0634, 0.0269, 0.0211, 0.0277], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0309, 0.0340, 0.0258, 0.0233, 0.0178, 0.0211, 0.0239], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 12:17:40,475 INFO [train.py:903] (3/4) Epoch 18, batch 3750, loss[loss=0.1825, simple_loss=0.2564, pruned_loss=0.05426, over 19756.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.294, pruned_loss=0.06886, over 3790101.49 frames. ], batch size: 45, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:17:40,915 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119826.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:48,730 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:17:58,523 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6916, 1.8527, 1.7289, 2.6477, 1.8163, 2.4746, 1.9018, 1.5068], + device='cuda:3'), covar=tensor([0.4876, 0.4117, 0.2670, 0.2688, 0.4340, 0.2223, 0.5892, 0.4871], + device='cuda:3'), in_proj_covar=tensor([0.0870, 0.0926, 0.0696, 0.0921, 0.0850, 0.0789, 0.0827, 0.0766], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 12:18:10,666 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:18:20,895 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1364, 5.1244, 5.9235, 5.9888, 1.8708, 5.6280, 4.7327, 5.5243], + device='cuda:3'), covar=tensor([0.1486, 0.0841, 0.0496, 0.0566, 0.6293, 0.0731, 0.0557, 0.1148], + device='cuda:3'), in_proj_covar=tensor([0.0757, 0.0696, 0.0905, 0.0790, 0.0806, 0.0656, 0.0546, 0.0839], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 12:18:33,223 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5009, 1.7142, 2.0998, 1.9134, 3.0847, 2.7018, 3.3714, 1.4992], + device='cuda:3'), covar=tensor([0.2404, 0.4103, 0.2497, 0.1776, 0.1665, 0.2011, 0.1659, 0.4093], + device='cuda:3'), in_proj_covar=tensor([0.0518, 0.0618, 0.0678, 0.0467, 0.0611, 0.0519, 0.0654, 0.0530], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:18:39,735 INFO [train.py:903] (3/4) Epoch 18, batch 3800, loss[loss=0.2319, simple_loss=0.3119, pruned_loss=0.076, over 19619.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2934, pruned_loss=0.06864, over 3806763.39 frames. ], batch size: 57, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:18:40,735 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=119876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:19:12,066 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 12:19:22,041 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.499e+02 6.857e+02 8.596e+02 2.059e+03, threshold=1.371e+03, percent-clipped=8.0 +2023-04-02 12:19:41,469 INFO [train.py:903] (3/4) Epoch 18, batch 3850, loss[loss=0.2162, simple_loss=0.2977, pruned_loss=0.06739, over 18823.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.292, pruned_loss=0.06758, over 3814664.72 frames. ], batch size: 74, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:20:40,743 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-02 12:20:43,326 INFO [train.py:903] (3/4) Epoch 18, batch 3900, loss[loss=0.2074, simple_loss=0.2914, pruned_loss=0.06173, over 19551.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2907, pruned_loss=0.06656, over 3829770.45 frames. ], batch size: 61, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:02,839 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:26,391 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.199e+02 4.757e+02 5.722e+02 7.251e+02 1.425e+03, threshold=1.144e+03, percent-clipped=2.0 +2023-04-02 12:21:45,628 INFO [train.py:903] (3/4) Epoch 18, batch 3950, loss[loss=0.2234, simple_loss=0.3061, pruned_loss=0.07036, over 19085.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2903, pruned_loss=0.06617, over 3836336.99 frames. ], batch size: 69, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:21:47,751 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=120027.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:21:50,875 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 12:22:05,642 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0044, 3.6489, 2.4887, 3.2537, 1.0207, 3.5978, 3.4385, 3.5839], + device='cuda:3'), covar=tensor([0.0741, 0.1017, 0.1935, 0.0965, 0.3687, 0.0753, 0.0932, 0.1029], + device='cuda:3'), in_proj_covar=tensor([0.0480, 0.0392, 0.0479, 0.0341, 0.0396, 0.0415, 0.0408, 0.0442], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:22:47,956 INFO [train.py:903] (3/4) Epoch 18, batch 4000, loss[loss=0.2547, simple_loss=0.3299, pruned_loss=0.08973, over 17586.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2898, pruned_loss=0.06596, over 3838436.37 frames. ], batch size: 101, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:23:29,303 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 4.630e+02 5.764e+02 7.444e+02 1.534e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-02 12:23:36,025 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 12:23:49,506 INFO [train.py:903] (3/4) Epoch 18, batch 4050, loss[loss=0.2209, simple_loss=0.2968, pruned_loss=0.0725, over 19312.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.29, pruned_loss=0.06624, over 3829268.31 frames. ], batch size: 66, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:24:08,119 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=120142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:24:47,334 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2575, 1.1606, 1.1558, 1.2918, 0.9974, 1.2456, 1.3322, 1.2103], + device='cuda:3'), covar=tensor([0.0940, 0.1069, 0.1141, 0.0722, 0.0886, 0.0872, 0.0814, 0.0838], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0224, 0.0244, 0.0227, 0.0208, 0.0188, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 12:24:49,140 INFO [train.py:903] (3/4) Epoch 18, batch 4100, loss[loss=0.2077, simple_loss=0.2912, pruned_loss=0.06211, over 17280.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2906, pruned_loss=0.06664, over 3834202.80 frames. ], batch size: 101, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:25:24,879 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 12:25:29,250 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.087e+02 6.190e+02 7.903e+02 1.294e+03, threshold=1.238e+03, percent-clipped=4.0 +2023-04-02 12:25:48,080 INFO [train.py:903] (3/4) Epoch 18, batch 4150, loss[loss=0.2265, simple_loss=0.3087, pruned_loss=0.07215, over 19677.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06685, over 3830185.61 frames. ], batch size: 58, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:14,724 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:46,375 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:26:50,728 INFO [train.py:903] (3/4) Epoch 18, batch 4200, loss[loss=0.2077, simple_loss=0.2795, pruned_loss=0.06798, over 19381.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.292, pruned_loss=0.06754, over 3836067.17 frames. ], batch size: 48, lr: 4.58e-03, grad_scale: 8.0 +2023-04-02 12:26:57,176 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 12:27:06,726 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1472, 2.0733, 1.7612, 1.5719, 1.5241, 1.6192, 0.5580, 1.0398], + device='cuda:3'), covar=tensor([0.0508, 0.0595, 0.0455, 0.0772, 0.1210, 0.0966, 0.1222, 0.1008], + device='cuda:3'), in_proj_covar=tensor([0.0348, 0.0345, 0.0346, 0.0372, 0.0448, 0.0379, 0.0325, 0.0334], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:27:30,828 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.475e+02 4.968e+02 6.385e+02 7.954e+02 1.571e+03, threshold=1.277e+03, percent-clipped=3.0 +2023-04-02 12:27:51,846 INFO [train.py:903] (3/4) Epoch 18, batch 4250, loss[loss=0.2064, simple_loss=0.2769, pruned_loss=0.06798, over 19395.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2914, pruned_loss=0.06704, over 3839737.24 frames. ], batch size: 48, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:28:09,643 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 12:28:18,893 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 12:28:19,571 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.65 vs. limit=5.0 +2023-04-02 12:28:51,543 INFO [train.py:903] (3/4) Epoch 18, batch 4300, loss[loss=0.2594, simple_loss=0.3378, pruned_loss=0.09055, over 19342.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2917, pruned_loss=0.06703, over 3837918.95 frames. ], batch size: 66, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:19,875 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:34,133 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 5.084e+02 5.969e+02 7.631e+02 1.294e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 12:29:46,157 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 12:29:49,849 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120423.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:29:52,877 INFO [train.py:903] (3/4) Epoch 18, batch 4350, loss[loss=0.2199, simple_loss=0.3019, pruned_loss=0.06894, over 19671.00 frames. ], tot_loss[loss=0.214, simple_loss=0.293, pruned_loss=0.06755, over 3832847.54 frames. ], batch size: 60, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:29:57,442 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0105, 1.1454, 1.4691, 0.7386, 2.0183, 2.2174, 1.9921, 2.3191], + device='cuda:3'), covar=tensor([0.1491, 0.3564, 0.3034, 0.2508, 0.0733, 0.0390, 0.0367, 0.0420], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0311, 0.0339, 0.0258, 0.0234, 0.0179, 0.0211, 0.0238], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 12:30:20,710 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3018, 1.1303, 1.4642, 1.5015, 2.6516, 1.1020, 2.3408, 3.1499], + device='cuda:3'), covar=tensor([0.0688, 0.3422, 0.3070, 0.1993, 0.1240, 0.2791, 0.1240, 0.0464], + device='cuda:3'), in_proj_covar=tensor([0.0394, 0.0354, 0.0374, 0.0338, 0.0363, 0.0343, 0.0364, 0.0385], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:30:55,181 INFO [train.py:903] (3/4) Epoch 18, batch 4400, loss[loss=0.1966, simple_loss=0.2833, pruned_loss=0.05495, over 19317.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2924, pruned_loss=0.06746, over 3832018.03 frames. ], batch size: 66, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:31:18,744 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 12:31:28,328 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 12:31:35,104 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.092e+02 6.306e+02 8.223e+02 1.474e+03, threshold=1.261e+03, percent-clipped=7.0 +2023-04-02 12:31:55,621 INFO [train.py:903] (3/4) Epoch 18, batch 4450, loss[loss=0.2308, simple_loss=0.3143, pruned_loss=0.07363, over 18140.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2929, pruned_loss=0.068, over 3829125.35 frames. ], batch size: 83, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:32:55,856 INFO [train.py:903] (3/4) Epoch 18, batch 4500, loss[loss=0.2477, simple_loss=0.3201, pruned_loss=0.08768, over 19506.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.292, pruned_loss=0.0674, over 3837202.19 frames. ], batch size: 64, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:33:37,594 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.517e+02 5.202e+02 6.117e+02 7.756e+02 2.072e+03, threshold=1.223e+03, percent-clipped=4.0 +2023-04-02 12:33:56,189 INFO [train.py:903] (3/4) Epoch 18, batch 4550, loss[loss=0.2193, simple_loss=0.2987, pruned_loss=0.06989, over 19776.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.292, pruned_loss=0.0677, over 3845545.77 frames. ], batch size: 56, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:34:05,863 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 12:34:29,497 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 12:34:56,266 INFO [train.py:903] (3/4) Epoch 18, batch 4600, loss[loss=0.2654, simple_loss=0.335, pruned_loss=0.09792, over 18778.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.294, pruned_loss=0.06927, over 3812008.88 frames. ], batch size: 74, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:35:17,310 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=120694.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:35:27,854 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0078, 1.9512, 1.9806, 2.7683, 1.9392, 2.5307, 2.4856, 2.4180], + device='cuda:3'), covar=tensor([0.0758, 0.0878, 0.0927, 0.0778, 0.0878, 0.0715, 0.0854, 0.0594], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0245, 0.0229, 0.0211, 0.0189, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 12:35:35,196 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 4.865e+02 6.169e+02 7.995e+02 1.948e+03, threshold=1.234e+03, percent-clipped=9.0 +2023-04-02 12:35:55,082 INFO [train.py:903] (3/4) Epoch 18, batch 4650, loss[loss=0.2841, simple_loss=0.3408, pruned_loss=0.1137, over 12543.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.292, pruned_loss=0.06841, over 3825792.82 frames. ], batch size: 136, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:36:13,548 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 12:36:23,040 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 12:36:24,595 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 12:36:55,849 INFO [train.py:903] (3/4) Epoch 18, batch 4700, loss[loss=0.2083, simple_loss=0.2979, pruned_loss=0.05931, over 19351.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.292, pruned_loss=0.0676, over 3827302.96 frames. ], batch size: 70, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:37:18,766 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 12:37:37,347 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.224e+02 6.544e+02 8.077e+02 2.112e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 12:37:51,142 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4918, 1.3741, 1.4183, 1.7856, 1.5310, 1.6811, 1.5900, 1.5252], + device='cuda:3'), covar=tensor([0.0695, 0.0770, 0.0811, 0.0630, 0.0804, 0.0642, 0.0859, 0.0610], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0223, 0.0225, 0.0244, 0.0227, 0.0209, 0.0189, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 12:37:54,550 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-02 12:37:56,118 INFO [train.py:903] (3/4) Epoch 18, batch 4750, loss[loss=0.2958, simple_loss=0.3478, pruned_loss=0.1219, over 13495.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2931, pruned_loss=0.06867, over 3811310.29 frames. ], batch size: 137, lr: 4.57e-03, grad_scale: 8.0 +2023-04-02 12:38:57,283 INFO [train.py:903] (3/4) Epoch 18, batch 4800, loss[loss=0.2009, simple_loss=0.2763, pruned_loss=0.06272, over 19415.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2923, pruned_loss=0.06821, over 3822632.83 frames. ], batch size: 48, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:39:38,286 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.835e+02 5.063e+02 6.517e+02 8.294e+02 1.429e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-04-02 12:39:57,799 INFO [train.py:903] (3/4) Epoch 18, batch 4850, loss[loss=0.228, simple_loss=0.3071, pruned_loss=0.07446, over 19307.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2915, pruned_loss=0.06792, over 3797516.35 frames. ], batch size: 66, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:40:19,482 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 12:40:38,945 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 12:40:44,430 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 12:40:45,486 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 12:40:55,301 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 12:40:57,465 INFO [train.py:903] (3/4) Epoch 18, batch 4900, loss[loss=0.2028, simple_loss=0.2906, pruned_loss=0.05745, over 19695.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2925, pruned_loss=0.06873, over 3785908.63 frames. ], batch size: 59, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:41:15,405 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 12:41:38,440 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.463e+02 5.379e+02 6.693e+02 8.152e+02 1.326e+03, threshold=1.339e+03, percent-clipped=1.0 +2023-04-02 12:41:50,636 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:41:56,826 INFO [train.py:903] (3/4) Epoch 18, batch 4950, loss[loss=0.2109, simple_loss=0.2988, pruned_loss=0.0615, over 19423.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2933, pruned_loss=0.06934, over 3805197.41 frames. ], batch size: 70, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:42:11,752 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121038.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:42:13,943 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 12:42:36,735 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 12:42:55,224 INFO [train.py:903] (3/4) Epoch 18, batch 5000, loss[loss=0.304, simple_loss=0.3529, pruned_loss=0.1275, over 13491.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2954, pruned_loss=0.0706, over 3801455.81 frames. ], batch size: 136, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:43:05,401 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 12:43:11,828 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6121, 1.4986, 1.5340, 2.1474, 1.5874, 1.9490, 1.9686, 1.7106], + device='cuda:3'), covar=tensor([0.0846, 0.0952, 0.1012, 0.0743, 0.0857, 0.0733, 0.0814, 0.0729], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0224, 0.0226, 0.0244, 0.0229, 0.0210, 0.0189, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 12:43:16,152 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 12:43:36,123 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.049e+02 6.044e+02 8.198e+02 1.739e+03, threshold=1.209e+03, percent-clipped=8.0 +2023-04-02 12:43:55,696 INFO [train.py:903] (3/4) Epoch 18, batch 5050, loss[loss=0.2563, simple_loss=0.3194, pruned_loss=0.09659, over 13739.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2951, pruned_loss=0.07018, over 3818798.84 frames. ], batch size: 136, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:44:27,542 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:44:29,509 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 12:44:55,825 INFO [train.py:903] (3/4) Epoch 18, batch 5100, loss[loss=0.2065, simple_loss=0.2996, pruned_loss=0.05673, over 19533.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.296, pruned_loss=0.07059, over 3801083.59 frames. ], batch size: 64, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:45:03,048 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:45:05,057 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 12:45:07,244 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 12:45:12,795 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 12:45:37,241 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 4.928e+02 6.063e+02 8.354e+02 2.244e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 12:45:54,370 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4930, 2.2964, 1.7039, 1.4920, 2.1364, 1.3204, 1.3995, 1.9716], + device='cuda:3'), covar=tensor([0.0969, 0.0776, 0.0960, 0.0827, 0.0499, 0.1256, 0.0703, 0.0449], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0313, 0.0326, 0.0259, 0.0247, 0.0332, 0.0292, 0.0270], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:45:56,099 INFO [train.py:903] (3/4) Epoch 18, batch 5150, loss[loss=0.3051, simple_loss=0.3594, pruned_loss=0.1254, over 12989.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2951, pruned_loss=0.06989, over 3797448.40 frames. ], batch size: 136, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:46:05,287 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 12:46:40,455 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 12:46:55,813 INFO [train.py:903] (3/4) Epoch 18, batch 5200, loss[loss=0.2151, simple_loss=0.2966, pruned_loss=0.06679, over 19787.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2944, pruned_loss=0.06936, over 3808445.50 frames. ], batch size: 56, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:03,719 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5637, 1.9441, 2.0792, 2.0228, 3.3309, 1.7207, 2.8038, 3.4529], + device='cuda:3'), covar=tensor([0.0448, 0.2203, 0.2208, 0.1609, 0.0504, 0.2094, 0.1475, 0.0335], + device='cuda:3'), in_proj_covar=tensor([0.0394, 0.0355, 0.0376, 0.0341, 0.0365, 0.0347, 0.0366, 0.0387], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:47:08,967 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 12:47:36,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.110e+02 6.332e+02 8.392e+02 3.036e+03, threshold=1.266e+03, percent-clipped=7.0 +2023-04-02 12:47:39,674 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1001, 1.3606, 1.6110, 1.2402, 2.5827, 3.5100, 3.2510, 3.7508], + device='cuda:3'), covar=tensor([0.1661, 0.3605, 0.3277, 0.2421, 0.0610, 0.0178, 0.0219, 0.0252], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0312, 0.0343, 0.0259, 0.0235, 0.0179, 0.0212, 0.0243], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 12:47:49,046 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 12:47:50,690 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4649, 1.5666, 1.8161, 1.7332, 2.6504, 2.4192, 2.7526, 1.2871], + device='cuda:3'), covar=tensor([0.2171, 0.3968, 0.2438, 0.1754, 0.1429, 0.1889, 0.1400, 0.3769], + device='cuda:3'), in_proj_covar=tensor([0.0517, 0.0621, 0.0680, 0.0466, 0.0612, 0.0519, 0.0653, 0.0531], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:47:54,555 INFO [train.py:903] (3/4) Epoch 18, batch 5250, loss[loss=0.2509, simple_loss=0.3336, pruned_loss=0.08405, over 19660.00 frames. ], tot_loss[loss=0.217, simple_loss=0.295, pruned_loss=0.06948, over 3809663.22 frames. ], batch size: 59, lr: 4.56e-03, grad_scale: 8.0 +2023-04-02 12:47:59,199 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121329.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 12:48:22,545 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5078, 1.6013, 1.8866, 1.8054, 2.8736, 2.4322, 2.9689, 1.3247], + device='cuda:3'), covar=tensor([0.2314, 0.4221, 0.2596, 0.1775, 0.1358, 0.1989, 0.1381, 0.4082], + device='cuda:3'), in_proj_covar=tensor([0.0517, 0.0620, 0.0680, 0.0466, 0.0611, 0.0518, 0.0653, 0.0531], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:48:40,812 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121365.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:48:54,017 INFO [train.py:903] (3/4) Epoch 18, batch 5300, loss[loss=0.2367, simple_loss=0.319, pruned_loss=0.07714, over 19350.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2943, pruned_loss=0.06926, over 3820922.80 frames. ], batch size: 70, lr: 4.56e-03, grad_scale: 4.0 +2023-04-02 12:49:10,552 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 12:49:14,170 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3345, 2.3766, 2.5697, 3.2762, 2.3207, 3.0441, 2.7242, 2.5121], + device='cuda:3'), covar=tensor([0.4012, 0.3814, 0.1676, 0.2239, 0.4217, 0.1908, 0.4178, 0.2937], + device='cuda:3'), in_proj_covar=tensor([0.0870, 0.0928, 0.0695, 0.0923, 0.0851, 0.0788, 0.0826, 0.0764], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 12:49:33,524 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:35,381 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.404e+02 6.878e+02 8.269e+02 2.518e+03, threshold=1.376e+03, percent-clipped=11.0 +2023-04-02 12:49:52,539 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:49:53,398 INFO [train.py:903] (3/4) Epoch 18, batch 5350, loss[loss=0.1937, simple_loss=0.2649, pruned_loss=0.06125, over 19773.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2943, pruned_loss=0.06917, over 3813763.12 frames. ], batch size: 47, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:03,610 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:50:26,223 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 12:50:53,958 INFO [train.py:903] (3/4) Epoch 18, batch 5400, loss[loss=0.1688, simple_loss=0.2468, pruned_loss=0.04537, over 18682.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2947, pruned_loss=0.06932, over 3824914.25 frames. ], batch size: 41, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:50:59,568 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121480.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:35,380 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 5.292e+02 6.238e+02 7.901e+02 1.766e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-02 12:51:54,666 INFO [train.py:903] (3/4) Epoch 18, batch 5450, loss[loss=0.2242, simple_loss=0.3106, pruned_loss=0.06888, over 18779.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2956, pruned_loss=0.06947, over 3826639.63 frames. ], batch size: 74, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:51:54,819 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121526.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:51:56,524 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-02 12:52:54,549 INFO [train.py:903] (3/4) Epoch 18, batch 5500, loss[loss=0.2593, simple_loss=0.3318, pruned_loss=0.09335, over 19327.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2943, pruned_loss=0.06894, over 3826739.74 frames. ], batch size: 66, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:53:09,246 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9523, 1.8180, 1.6097, 2.0446, 1.7853, 1.7093, 1.5336, 1.8682], + device='cuda:3'), covar=tensor([0.1039, 0.1518, 0.1444, 0.1032, 0.1341, 0.0548, 0.1382, 0.0720], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0355, 0.0302, 0.0249, 0.0299, 0.0247, 0.0297, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:53:19,885 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 12:53:37,491 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.754e+02 5.891e+02 7.661e+02 1.861e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-02 12:53:55,066 INFO [train.py:903] (3/4) Epoch 18, batch 5550, loss[loss=0.1776, simple_loss=0.2619, pruned_loss=0.04665, over 19422.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2944, pruned_loss=0.06924, over 3824307.65 frames. ], batch size: 48, lr: 4.55e-03, grad_scale: 4.0 +2023-04-02 12:54:02,990 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 12:54:13,258 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121641.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:54:51,533 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 12:54:52,547 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121673.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 12:54:55,719 INFO [train.py:903] (3/4) Epoch 18, batch 5600, loss[loss=0.2141, simple_loss=0.2942, pruned_loss=0.06701, over 19764.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2935, pruned_loss=0.06907, over 3822918.92 frames. ], batch size: 54, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:55:15,337 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8425, 4.3687, 2.6663, 3.8505, 1.3711, 4.3261, 4.1985, 4.3299], + device='cuda:3'), covar=tensor([0.0537, 0.0995, 0.2122, 0.0856, 0.3478, 0.0651, 0.0869, 0.1042], + device='cuda:3'), in_proj_covar=tensor([0.0484, 0.0391, 0.0479, 0.0340, 0.0392, 0.0418, 0.0408, 0.0439], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 12:55:37,561 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.280e+02 4.851e+02 5.729e+02 6.791e+02 1.695e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-04-02 12:55:56,733 INFO [train.py:903] (3/4) Epoch 18, batch 5650, loss[loss=0.2078, simple_loss=0.3002, pruned_loss=0.05768, over 19614.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2934, pruned_loss=0.06881, over 3826800.64 frames. ], batch size: 57, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:56:08,932 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:37,728 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121761.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:44,587 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 12:56:48,206 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=121769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:56:50,038 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.41 vs. limit=5.0 +2023-04-02 12:56:57,050 INFO [train.py:903] (3/4) Epoch 18, batch 5700, loss[loss=0.2456, simple_loss=0.3175, pruned_loss=0.08689, over 19647.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2951, pruned_loss=0.06998, over 3824799.50 frames. ], batch size: 60, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:10,998 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121788.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 12:57:39,339 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.821e+02 5.859e+02 7.608e+02 1.521e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-02 12:57:57,040 INFO [train.py:903] (3/4) Epoch 18, batch 5750, loss[loss=0.242, simple_loss=0.3168, pruned_loss=0.08357, over 19756.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2948, pruned_loss=0.06995, over 3817077.71 frames. ], batch size: 63, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:57:58,073 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 12:58:05,923 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 12:58:11,395 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 12:58:42,314 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9401, 1.8989, 1.7436, 1.5706, 1.4247, 1.5835, 0.4197, 0.8700], + device='cuda:3'), covar=tensor([0.0562, 0.0544, 0.0403, 0.0636, 0.1076, 0.0683, 0.1112, 0.0940], + device='cuda:3'), in_proj_covar=tensor([0.0351, 0.0347, 0.0349, 0.0375, 0.0451, 0.0379, 0.0329, 0.0336], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 12:58:43,873 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 12:58:57,512 INFO [train.py:903] (3/4) Epoch 18, batch 5800, loss[loss=0.2382, simple_loss=0.3143, pruned_loss=0.08105, over 19575.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2945, pruned_loss=0.06954, over 3827902.00 frames. ], batch size: 61, lr: 4.55e-03, grad_scale: 8.0 +2023-04-02 12:59:08,950 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:24,594 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121897.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:39,870 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 5.174e+02 6.386e+02 8.157e+02 2.937e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-02 12:59:54,634 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 12:59:59,627 INFO [train.py:903] (3/4) Epoch 18, batch 5850, loss[loss=0.2469, simple_loss=0.32, pruned_loss=0.08685, over 13677.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2945, pruned_loss=0.06986, over 3797736.81 frames. ], batch size: 136, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:00:06,630 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6397, 2.4234, 1.8354, 1.6636, 2.2527, 1.3862, 1.5095, 2.0398], + device='cuda:3'), covar=tensor([0.1065, 0.0788, 0.0970, 0.0817, 0.0509, 0.1277, 0.0732, 0.0480], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0313, 0.0327, 0.0257, 0.0245, 0.0331, 0.0291, 0.0270], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:00:51,267 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121968.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:01:00,889 INFO [train.py:903] (3/4) Epoch 18, batch 5900, loss[loss=0.2371, simple_loss=0.3249, pruned_loss=0.07468, over 19755.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2933, pruned_loss=0.06837, over 3815167.97 frames. ], batch size: 63, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:01:02,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 13:01:23,289 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 13:01:39,600 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5955, 1.1976, 1.5008, 1.1771, 2.2516, 1.0485, 2.1732, 2.4297], + device='cuda:3'), covar=tensor([0.0675, 0.2697, 0.2603, 0.1704, 0.0847, 0.2045, 0.0964, 0.0496], + device='cuda:3'), in_proj_covar=tensor([0.0390, 0.0352, 0.0373, 0.0338, 0.0364, 0.0346, 0.0363, 0.0385], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:01:43,749 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.950e+02 6.415e+02 8.144e+02 2.513e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-02 13:02:01,670 INFO [train.py:903] (3/4) Epoch 18, batch 5950, loss[loss=0.2364, simple_loss=0.3101, pruned_loss=0.08135, over 19343.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2947, pruned_loss=0.06913, over 3829109.23 frames. ], batch size: 66, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:02:24,950 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122044.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:02:42,896 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-02 13:02:53,782 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122069.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:03:01,811 INFO [train.py:903] (3/4) Epoch 18, batch 6000, loss[loss=0.2304, simple_loss=0.3038, pruned_loss=0.07846, over 19650.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2951, pruned_loss=0.06938, over 3831991.24 frames. ], batch size: 55, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:03:01,812 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 13:03:14,300 INFO [train.py:937] (3/4) Epoch 18, validation: loss=0.1702, simple_loss=0.2706, pruned_loss=0.03489, over 944034.00 frames. +2023-04-02 13:03:14,302 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 13:03:57,709 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.792e+02 5.055e+02 6.106e+02 7.549e+02 1.634e+03, threshold=1.221e+03, percent-clipped=4.0 +2023-04-02 13:04:15,902 INFO [train.py:903] (3/4) Epoch 18, batch 6050, loss[loss=0.1981, simple_loss=0.2723, pruned_loss=0.0619, over 19723.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2939, pruned_loss=0.0686, over 3830205.87 frames. ], batch size: 51, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:04:28,039 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:04:33,704 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:04,404 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:05:18,133 INFO [train.py:903] (3/4) Epoch 18, batch 6100, loss[loss=0.2798, simple_loss=0.3364, pruned_loss=0.1116, over 17359.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2941, pruned_loss=0.06881, over 3831055.77 frames. ], batch size: 101, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:05:59,976 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.309e+02 6.592e+02 8.010e+02 1.726e+03, threshold=1.318e+03, percent-clipped=1.0 +2023-04-02 13:06:03,477 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122213.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:06:17,058 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0095, 2.1125, 2.3654, 2.7418, 2.0498, 2.6734, 2.4641, 2.1669], + device='cuda:3'), covar=tensor([0.3939, 0.3654, 0.1706, 0.2148, 0.3858, 0.1826, 0.4065, 0.2981], + device='cuda:3'), in_proj_covar=tensor([0.0870, 0.0926, 0.0696, 0.0920, 0.0854, 0.0785, 0.0825, 0.0762], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 13:06:18,826 INFO [train.py:903] (3/4) Epoch 18, batch 6150, loss[loss=0.2015, simple_loss=0.2686, pruned_loss=0.06725, over 17813.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2933, pruned_loss=0.06881, over 3820330.76 frames. ], batch size: 39, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:06:38,374 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1642, 2.3334, 2.4296, 3.0738, 2.2881, 2.9593, 2.4454, 2.1098], + device='cuda:3'), covar=tensor([0.4375, 0.3974, 0.1892, 0.2523, 0.4376, 0.2045, 0.4823, 0.3515], + device='cuda:3'), in_proj_covar=tensor([0.0868, 0.0924, 0.0695, 0.0918, 0.0852, 0.0783, 0.0823, 0.0760], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 13:06:46,739 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 13:07:20,189 INFO [train.py:903] (3/4) Epoch 18, batch 6200, loss[loss=0.213, simple_loss=0.2963, pruned_loss=0.06486, over 19743.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2911, pruned_loss=0.0672, over 3831308.06 frames. ], batch size: 63, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:04,095 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.549e+02 5.662e+02 6.707e+02 1.660e+03, threshold=1.132e+03, percent-clipped=3.0 +2023-04-02 13:08:05,435 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122312.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:17,460 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 13:08:21,487 INFO [train.py:903] (3/4) Epoch 18, batch 6250, loss[loss=0.2154, simple_loss=0.2756, pruned_loss=0.0776, over 18674.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2908, pruned_loss=0.06713, over 3828698.66 frames. ], batch size: 41, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:08:35,738 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122337.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:08:53,453 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 13:09:23,683 INFO [train.py:903] (3/4) Epoch 18, batch 6300, loss[loss=0.2203, simple_loss=0.2926, pruned_loss=0.07407, over 19740.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2899, pruned_loss=0.06681, over 3825849.62 frames. ], batch size: 51, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:06,341 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.826e+02 5.839e+02 7.420e+02 1.796e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-02 13:10:25,342 INFO [train.py:903] (3/4) Epoch 18, batch 6350, loss[loss=0.2023, simple_loss=0.2899, pruned_loss=0.05736, over 19582.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2906, pruned_loss=0.06727, over 3810534.63 frames. ], batch size: 52, lr: 4.54e-03, grad_scale: 8.0 +2023-04-02 13:10:26,819 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:25,786 INFO [train.py:903] (3/4) Epoch 18, batch 6400, loss[loss=0.2586, simple_loss=0.3307, pruned_loss=0.09329, over 19652.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2909, pruned_loss=0.06733, over 3815087.57 frames. ], batch size: 58, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:11:29,350 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:11:30,830 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2147, 1.3026, 1.2516, 1.0521, 1.0699, 1.1206, 0.0894, 0.3468], + device='cuda:3'), covar=tensor([0.0680, 0.0639, 0.0436, 0.0560, 0.1394, 0.0631, 0.1285, 0.1068], + device='cuda:3'), in_proj_covar=tensor([0.0350, 0.0347, 0.0347, 0.0373, 0.0449, 0.0379, 0.0329, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 13:11:38,365 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122486.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:12:08,691 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.263e+02 4.572e+02 5.352e+02 7.001e+02 2.095e+03, threshold=1.070e+03, percent-clipped=5.0 +2023-04-02 13:12:27,168 INFO [train.py:903] (3/4) Epoch 18, batch 6450, loss[loss=0.2192, simple_loss=0.3095, pruned_loss=0.06445, over 19552.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.06648, over 3821213.28 frames. ], batch size: 56, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:12:43,823 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 13:13:05,519 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122557.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:13:09,884 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 13:13:27,310 INFO [train.py:903] (3/4) Epoch 18, batch 6500, loss[loss=0.2139, simple_loss=0.2969, pruned_loss=0.06546, over 19786.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2889, pruned_loss=0.06555, over 3820995.57 frames. ], batch size: 56, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:13:32,625 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 13:13:41,127 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-02 13:13:50,450 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:14:06,029 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2438, 2.1200, 2.0039, 1.7830, 1.5914, 1.8440, 0.5119, 1.2086], + device='cuda:3'), covar=tensor([0.0624, 0.0601, 0.0448, 0.0834, 0.1166, 0.0878, 0.1294, 0.0967], + device='cuda:3'), in_proj_covar=tensor([0.0352, 0.0347, 0.0348, 0.0375, 0.0451, 0.0380, 0.0330, 0.0335], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 13:14:10,392 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.824e+02 5.508e+02 6.898e+02 1.222e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 13:14:28,807 INFO [train.py:903] (3/4) Epoch 18, batch 6550, loss[loss=0.2213, simple_loss=0.31, pruned_loss=0.06626, over 19708.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06546, over 3825782.10 frames. ], batch size: 59, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:25,367 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:29,440 INFO [train.py:903] (3/4) Epoch 18, batch 6600, loss[loss=0.2119, simple_loss=0.2983, pruned_loss=0.0628, over 18801.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2895, pruned_loss=0.06568, over 3826622.45 frames. ], batch size: 74, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:15:35,566 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:15:37,955 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122683.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:08,836 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122708.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:16:11,858 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.976e+02 6.114e+02 8.137e+02 1.508e+03, threshold=1.223e+03, percent-clipped=10.0 +2023-04-02 13:16:29,835 INFO [train.py:903] (3/4) Epoch 18, batch 6650, loss[loss=0.1982, simple_loss=0.2886, pruned_loss=0.05388, over 19439.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2908, pruned_loss=0.06606, over 3829861.45 frames. ], batch size: 48, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:30,797 INFO [train.py:903] (3/4) Epoch 18, batch 6700, loss[loss=0.1852, simple_loss=0.2673, pruned_loss=0.05155, over 19847.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2905, pruned_loss=0.06596, over 3817115.35 frames. ], batch size: 52, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:17:55,704 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:12,038 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.791e+02 5.844e+02 7.603e+02 1.856e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 13:18:28,395 INFO [train.py:903] (3/4) Epoch 18, batch 6750, loss[loss=0.2445, simple_loss=0.3096, pruned_loss=0.08972, over 19496.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2914, pruned_loss=0.06679, over 3810198.41 frames. ], batch size: 49, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:18:33,091 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=122830.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:18:55,613 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:24,287 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:19:25,061 INFO [train.py:903] (3/4) Epoch 18, batch 6800, loss[loss=0.1989, simple_loss=0.2668, pruned_loss=0.06546, over 19309.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06666, over 3812809.79 frames. ], batch size: 44, lr: 4.53e-03, grad_scale: 8.0 +2023-04-02 13:19:40,837 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:20:09,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 13:20:09,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 13:20:12,597 INFO [train.py:903] (3/4) Epoch 19, batch 0, loss[loss=0.2186, simple_loss=0.2986, pruned_loss=0.06933, over 19782.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2986, pruned_loss=0.06933, over 19782.00 frames. ], batch size: 56, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:20:12,597 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 13:20:24,047 INFO [train.py:937] (3/4) Epoch 19, validation: loss=0.171, simple_loss=0.2713, pruned_loss=0.03533, over 944034.00 frames. +2023-04-02 13:20:24,048 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 13:20:32,696 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 4.987e+02 6.075e+02 7.792e+02 1.350e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 13:20:37,457 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 13:20:53,733 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122928.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:20:55,940 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0688, 5.1182, 5.9452, 5.9254, 1.9705, 5.5684, 4.7252, 5.5470], + device='cuda:3'), covar=tensor([0.1573, 0.0743, 0.0514, 0.0545, 0.6049, 0.0691, 0.0592, 0.1089], + device='cuda:3'), in_proj_covar=tensor([0.0751, 0.0696, 0.0898, 0.0796, 0.0803, 0.0652, 0.0542, 0.0826], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 13:21:14,005 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122945.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:17,738 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-02 13:21:24,691 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:21:25,465 INFO [train.py:903] (3/4) Epoch 19, batch 50, loss[loss=0.1689, simple_loss=0.2488, pruned_loss=0.04447, over 19743.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2895, pruned_loss=0.06709, over 864950.14 frames. ], batch size: 45, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:22:04,202 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 13:22:27,933 INFO [train.py:903] (3/4) Epoch 19, batch 100, loss[loss=0.2415, simple_loss=0.3024, pruned_loss=0.09034, over 19720.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2912, pruned_loss=0.06764, over 1526678.71 frames. ], batch size: 45, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:22:35,841 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.975e+02 5.926e+02 8.151e+02 1.966e+03, threshold=1.185e+03, percent-clipped=7.0 +2023-04-02 13:22:40,154 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 13:22:51,561 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0130, 1.2631, 1.6288, 0.8993, 2.3684, 3.0621, 2.7946, 3.2708], + device='cuda:3'), covar=tensor([0.1648, 0.3592, 0.3240, 0.2583, 0.0573, 0.0205, 0.0231, 0.0274], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0311, 0.0341, 0.0259, 0.0234, 0.0179, 0.0211, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 13:23:25,708 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123052.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:23:27,454 INFO [train.py:903] (3/4) Epoch 19, batch 150, loss[loss=0.2305, simple_loss=0.3106, pruned_loss=0.0752, over 19319.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.294, pruned_loss=0.06935, over 2035179.40 frames. ], batch size: 70, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:23:55,479 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123077.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:24:27,285 INFO [train.py:903] (3/4) Epoch 19, batch 200, loss[loss=0.3056, simple_loss=0.3571, pruned_loss=0.1271, over 19736.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2936, pruned_loss=0.06877, over 2434424.89 frames. ], batch size: 63, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:24:27,696 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3448, 2.0713, 1.6079, 1.3759, 1.9304, 1.3198, 1.3145, 1.8001], + device='cuda:3'), covar=tensor([0.0885, 0.0719, 0.0974, 0.0779, 0.0443, 0.1180, 0.0576, 0.0396], + device='cuda:3'), in_proj_covar=tensor([0.0296, 0.0310, 0.0327, 0.0256, 0.0243, 0.0331, 0.0288, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:24:29,573 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 13:24:35,481 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.274e+02 5.223e+02 5.868e+02 7.012e+02 1.944e+03, threshold=1.174e+03, percent-clipped=7.0 +2023-04-02 13:25:27,092 INFO [train.py:903] (3/4) Epoch 19, batch 250, loss[loss=0.2036, simple_loss=0.2806, pruned_loss=0.06331, over 19733.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2928, pruned_loss=0.06799, over 2755605.89 frames. ], batch size: 51, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:25,153 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:26:28,233 INFO [train.py:903] (3/4) Epoch 19, batch 300, loss[loss=0.2298, simple_loss=0.3071, pruned_loss=0.07621, over 19272.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2921, pruned_loss=0.06797, over 2994266.97 frames. ], batch size: 66, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:26:37,167 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.833e+02 6.226e+02 7.852e+02 1.722e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-02 13:26:55,621 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123226.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:04,614 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:27:29,487 INFO [train.py:903] (3/4) Epoch 19, batch 350, loss[loss=0.2251, simple_loss=0.3033, pruned_loss=0.07352, over 19607.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2912, pruned_loss=0.06727, over 3182840.35 frames. ], batch size: 57, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:27:35,244 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 13:28:30,167 INFO [train.py:903] (3/4) Epoch 19, batch 400, loss[loss=0.2013, simple_loss=0.2845, pruned_loss=0.05902, over 19576.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2912, pruned_loss=0.06671, over 3320145.48 frames. ], batch size: 52, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:28:37,938 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.570e+02 5.151e+02 6.306e+02 7.944e+02 1.366e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-02 13:28:55,716 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0685, 1.3105, 1.5889, 1.0022, 2.3060, 2.9903, 2.7438, 3.2404], + device='cuda:3'), covar=tensor([0.1605, 0.3589, 0.3334, 0.2486, 0.0613, 0.0232, 0.0271, 0.0315], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0310, 0.0341, 0.0258, 0.0235, 0.0179, 0.0211, 0.0242], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 13:29:04,100 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123332.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:24,968 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:29:30,465 INFO [train.py:903] (3/4) Epoch 19, batch 450, loss[loss=0.2062, simple_loss=0.2725, pruned_loss=0.07, over 19784.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2921, pruned_loss=0.06714, over 3436814.39 frames. ], batch size: 46, lr: 4.40e-03, grad_scale: 8.0 +2023-04-02 13:30:04,831 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 13:30:05,743 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 13:30:30,933 INFO [train.py:903] (3/4) Epoch 19, batch 500, loss[loss=0.1954, simple_loss=0.2702, pruned_loss=0.0603, over 19615.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2915, pruned_loss=0.06721, over 3534176.94 frames. ], batch size: 50, lr: 4.40e-03, grad_scale: 16.0 +2023-04-02 13:30:39,754 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.352e+02 7.180e+02 8.361e+02 2.088e+03, threshold=1.436e+03, percent-clipped=3.0 +2023-04-02 13:31:21,112 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:31:30,114 INFO [train.py:903] (3/4) Epoch 19, batch 550, loss[loss=0.2332, simple_loss=0.3127, pruned_loss=0.07687, over 19502.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2925, pruned_loss=0.06746, over 3595468.89 frames. ], batch size: 64, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:30,274 INFO [train.py:903] (3/4) Epoch 19, batch 600, loss[loss=0.2167, simple_loss=0.2993, pruned_loss=0.06707, over 19605.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2912, pruned_loss=0.06725, over 3655905.50 frames. ], batch size: 57, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:32:39,906 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.960e+02 5.982e+02 8.370e+02 1.865e+03, threshold=1.196e+03, percent-clipped=4.0 +2023-04-02 13:33:12,186 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1646, 1.2394, 1.6118, 1.0038, 2.3152, 3.0353, 2.6990, 3.2053], + device='cuda:3'), covar=tensor([0.1555, 0.3724, 0.3274, 0.2492, 0.0612, 0.0225, 0.0260, 0.0298], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0312, 0.0343, 0.0259, 0.0237, 0.0179, 0.0212, 0.0243], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 13:33:14,022 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 13:33:30,842 INFO [train.py:903] (3/4) Epoch 19, batch 650, loss[loss=0.2284, simple_loss=0.3132, pruned_loss=0.07185, over 19624.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2919, pruned_loss=0.06763, over 3691478.80 frames. ], batch size: 57, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:30,415 INFO [train.py:903] (3/4) Epoch 19, batch 700, loss[loss=0.2134, simple_loss=0.2997, pruned_loss=0.06351, over 19531.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2922, pruned_loss=0.06783, over 3713596.28 frames. ], batch size: 56, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:34:31,932 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:34:41,233 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.460e+02 6.000e+02 7.674e+02 1.249e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 13:35:02,792 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:35:31,845 INFO [train.py:903] (3/4) Epoch 19, batch 750, loss[loss=0.2188, simple_loss=0.299, pruned_loss=0.06931, over 19523.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2907, pruned_loss=0.06695, over 3745570.16 frames. ], batch size: 54, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:35:59,275 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123676.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:36:34,034 INFO [train.py:903] (3/4) Epoch 19, batch 800, loss[loss=0.2077, simple_loss=0.2893, pruned_loss=0.06304, over 19651.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2916, pruned_loss=0.06694, over 3761797.91 frames. ], batch size: 55, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:36:43,873 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.634e+02 4.695e+02 6.255e+02 7.449e+02 1.390e+03, threshold=1.251e+03, percent-clipped=2.0 +2023-04-02 13:36:47,183 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 13:37:34,011 INFO [train.py:903] (3/4) Epoch 19, batch 850, loss[loss=0.1954, simple_loss=0.2821, pruned_loss=0.05434, over 17676.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2925, pruned_loss=0.06727, over 3770592.29 frames. ], batch size: 101, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:17,263 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=123790.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:18,596 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:38:23,841 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 13:38:34,410 INFO [train.py:903] (3/4) Epoch 19, batch 900, loss[loss=0.2237, simple_loss=0.2945, pruned_loss=0.07643, over 17680.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2924, pruned_loss=0.06724, over 3781476.73 frames. ], batch size: 39, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:38:44,943 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.405e+02 4.993e+02 6.523e+02 8.112e+02 2.572e+03, threshold=1.305e+03, percent-clipped=9.0 +2023-04-02 13:39:35,390 INFO [train.py:903] (3/4) Epoch 19, batch 950, loss[loss=0.195, simple_loss=0.2713, pruned_loss=0.0593, over 19616.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2928, pruned_loss=0.06737, over 3791242.45 frames. ], batch size: 50, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:39:35,402 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 13:40:18,682 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:35,890 INFO [train.py:903] (3/4) Epoch 19, batch 1000, loss[loss=0.2228, simple_loss=0.3043, pruned_loss=0.07065, over 19673.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.293, pruned_loss=0.06792, over 3810730.71 frames. ], batch size: 60, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:40:37,306 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123905.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:40:44,459 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.613e+02 5.533e+02 6.903e+02 9.244e+02 2.435e+03, threshold=1.381e+03, percent-clipped=8.0 +2023-04-02 13:41:23,922 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 13:41:33,875 INFO [train.py:903] (3/4) Epoch 19, batch 1050, loss[loss=0.2045, simple_loss=0.2707, pruned_loss=0.06915, over 19776.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2923, pruned_loss=0.06758, over 3819177.89 frames. ], batch size: 47, lr: 4.39e-03, grad_scale: 8.0 +2023-04-02 13:41:37,444 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3173, 2.0193, 1.5976, 1.3478, 1.8654, 1.2596, 1.1938, 1.7766], + device='cuda:3'), covar=tensor([0.0908, 0.0818, 0.1126, 0.0830, 0.0508, 0.1304, 0.0735, 0.0447], + device='cuda:3'), in_proj_covar=tensor([0.0299, 0.0314, 0.0334, 0.0260, 0.0245, 0.0336, 0.0291, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:42:03,300 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 13:42:28,047 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0804, 1.7559, 2.0449, 1.6066, 4.6352, 1.1557, 2.5741, 4.9214], + device='cuda:3'), covar=tensor([0.0411, 0.2623, 0.2423, 0.2060, 0.0660, 0.2613, 0.1406, 0.0179], + device='cuda:3'), in_proj_covar=tensor([0.0400, 0.0357, 0.0377, 0.0342, 0.0368, 0.0350, 0.0369, 0.0389], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:42:36,021 INFO [train.py:903] (3/4) Epoch 19, batch 1100, loss[loss=0.2885, simple_loss=0.3523, pruned_loss=0.1123, over 19145.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2914, pruned_loss=0.06762, over 3814843.22 frames. ], batch size: 69, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:42:45,329 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.737e+02 5.703e+02 7.685e+02 1.238e+03, threshold=1.141e+03, percent-clipped=0.0 +2023-04-02 13:43:28,388 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:36,792 INFO [train.py:903] (3/4) Epoch 19, batch 1150, loss[loss=0.3038, simple_loss=0.3533, pruned_loss=0.1271, over 13924.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2911, pruned_loss=0.06737, over 3814785.33 frames. ], batch size: 138, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:43:37,115 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124054.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:43:59,177 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:08,944 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:44:37,473 INFO [train.py:903] (3/4) Epoch 19, batch 1200, loss[loss=0.2325, simple_loss=0.3158, pruned_loss=0.07464, over 19075.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2912, pruned_loss=0.06722, over 3828506.84 frames. ], batch size: 69, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:44:48,187 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.573e+02 5.076e+02 6.121e+02 8.217e+02 1.455e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-02 13:45:00,037 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-02 13:45:09,222 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 13:45:38,205 INFO [train.py:903] (3/4) Epoch 19, batch 1250, loss[loss=0.1727, simple_loss=0.2558, pruned_loss=0.04483, over 19606.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2914, pruned_loss=0.06742, over 3835640.38 frames. ], batch size: 50, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:45:47,124 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:16,831 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124186.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:46:38,208 INFO [train.py:903] (3/4) Epoch 19, batch 1300, loss[loss=0.2616, simple_loss=0.3422, pruned_loss=0.09049, over 18781.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2922, pruned_loss=0.06803, over 3830341.74 frames. ], batch size: 74, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:46:39,476 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3531, 3.9277, 2.5081, 3.4964, 0.7989, 3.8255, 3.7683, 3.8628], + device='cuda:3'), covar=tensor([0.0669, 0.1024, 0.2081, 0.0960, 0.4158, 0.0778, 0.0913, 0.1161], + device='cuda:3'), in_proj_covar=tensor([0.0484, 0.0394, 0.0479, 0.0338, 0.0394, 0.0419, 0.0407, 0.0442], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:46:48,366 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.504e+02 4.873e+02 5.866e+02 7.986e+02 1.872e+03, threshold=1.173e+03, percent-clipped=5.0 +2023-04-02 13:46:53,198 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-02 13:47:15,087 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:47:37,774 INFO [train.py:903] (3/4) Epoch 19, batch 1350, loss[loss=0.2306, simple_loss=0.3083, pruned_loss=0.07646, over 19139.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2928, pruned_loss=0.06829, over 3832052.87 frames. ], batch size: 69, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:22,197 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:48:30,542 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8464, 4.4202, 2.5627, 3.8850, 0.9310, 4.2066, 4.2483, 4.2888], + device='cuda:3'), covar=tensor([0.0527, 0.0906, 0.2074, 0.0804, 0.4072, 0.0747, 0.0831, 0.1091], + device='cuda:3'), in_proj_covar=tensor([0.0487, 0.0393, 0.0481, 0.0338, 0.0395, 0.0420, 0.0408, 0.0443], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 13:48:39,028 INFO [train.py:903] (3/4) Epoch 19, batch 1400, loss[loss=0.2188, simple_loss=0.3025, pruned_loss=0.06757, over 19616.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2927, pruned_loss=0.06812, over 3823272.75 frames. ], batch size: 50, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:48:48,825 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 5.508e+02 6.829e+02 9.566e+02 2.163e+03, threshold=1.366e+03, percent-clipped=9.0 +2023-04-02 13:49:21,062 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-02 13:49:32,988 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:49:38,323 INFO [train.py:903] (3/4) Epoch 19, batch 1450, loss[loss=0.1863, simple_loss=0.2705, pruned_loss=0.05104, over 19770.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2912, pruned_loss=0.06739, over 3823948.37 frames. ], batch size: 54, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:49:40,272 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 13:49:53,733 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124366.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 13:50:04,342 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.75 vs. limit=5.0 +2023-04-02 13:50:32,827 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:50:40,519 INFO [train.py:903] (3/4) Epoch 19, batch 1500, loss[loss=0.2007, simple_loss=0.2922, pruned_loss=0.05461, over 19529.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.06625, over 3838042.61 frames. ], batch size: 56, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:50:50,217 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.822e+02 6.235e+02 8.378e+02 1.519e+03, threshold=1.247e+03, percent-clipped=2.0 +2023-04-02 13:51:04,439 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:28,513 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 13:51:35,999 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:51:39,966 INFO [train.py:903] (3/4) Epoch 19, batch 1550, loss[loss=0.2109, simple_loss=0.2839, pruned_loss=0.06893, over 19734.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.291, pruned_loss=0.06674, over 3844009.04 frames. ], batch size: 51, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:40,757 INFO [train.py:903] (3/4) Epoch 19, batch 1600, loss[loss=0.2212, simple_loss=0.2967, pruned_loss=0.07285, over 19653.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2907, pruned_loss=0.06662, over 3848049.62 frames. ], batch size: 53, lr: 4.38e-03, grad_scale: 8.0 +2023-04-02 13:52:51,837 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.803e+02 4.800e+02 6.281e+02 8.115e+02 1.566e+03, threshold=1.256e+03, percent-clipped=2.0 +2023-04-02 13:52:52,218 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124513.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:06,496 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 13:53:23,390 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:53:23,656 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 13:53:40,466 INFO [train.py:903] (3/4) Epoch 19, batch 1650, loss[loss=0.1982, simple_loss=0.2744, pruned_loss=0.06095, over 19848.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2914, pruned_loss=0.06713, over 3840208.22 frames. ], batch size: 52, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:43,095 INFO [train.py:903] (3/4) Epoch 19, batch 1700, loss[loss=0.1941, simple_loss=0.2866, pruned_loss=0.05078, over 19739.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.289, pruned_loss=0.06558, over 3849518.30 frames. ], batch size: 63, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:54:44,667 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:54:53,183 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.721e+02 4.942e+02 5.736e+02 7.226e+02 1.444e+03, threshold=1.147e+03, percent-clipped=3.0 +2023-04-02 13:55:14,615 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:18,820 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124634.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:55:21,980 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 13:55:42,975 INFO [train.py:903] (3/4) Epoch 19, batch 1750, loss[loss=0.2099, simple_loss=0.2794, pruned_loss=0.07024, over 19778.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2904, pruned_loss=0.06652, over 3845822.06 frames. ], batch size: 48, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:55:46,482 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124656.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:16,378 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124681.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:56:37,883 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-02 13:56:44,213 INFO [train.py:903] (3/4) Epoch 19, batch 1800, loss[loss=0.1965, simple_loss=0.2747, pruned_loss=0.05918, over 19621.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2903, pruned_loss=0.06645, over 3829433.34 frames. ], batch size: 50, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:56:51,811 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124710.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:56:54,991 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 5.111e+02 6.286e+02 7.731e+02 1.656e+03, threshold=1.257e+03, percent-clipped=2.0 +2023-04-02 13:57:38,765 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 13:57:39,049 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:57:44,335 INFO [train.py:903] (3/4) Epoch 19, batch 1850, loss[loss=0.2, simple_loss=0.2775, pruned_loss=0.06124, over 19479.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06624, over 3821665.53 frames. ], batch size: 49, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:03,215 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:17,486 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 13:58:33,218 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:33,467 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:34,552 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124795.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:58:44,560 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0346, 2.1327, 2.3721, 2.6931, 2.0560, 2.6345, 2.4116, 2.2127], + device='cuda:3'), covar=tensor([0.4133, 0.3698, 0.1745, 0.2199, 0.3835, 0.1933, 0.4495, 0.3069], + device='cuda:3'), in_proj_covar=tensor([0.0870, 0.0928, 0.0696, 0.0914, 0.0852, 0.0787, 0.0824, 0.0762], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 13:58:46,482 INFO [train.py:903] (3/4) Epoch 19, batch 1900, loss[loss=0.2385, simple_loss=0.3077, pruned_loss=0.08467, over 19623.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2898, pruned_loss=0.06584, over 3816328.01 frames. ], batch size: 50, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 13:58:56,685 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 4.862e+02 5.969e+02 7.822e+02 1.490e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 13:59:01,922 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 13:59:05,770 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124820.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:07,770 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 13:59:11,242 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124825.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 13:59:26,795 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124838.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 13:59:31,995 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 13:59:46,593 INFO [train.py:903] (3/4) Epoch 19, batch 1950, loss[loss=0.1802, simple_loss=0.2589, pruned_loss=0.05081, over 19792.00 frames. ], tot_loss[loss=0.211, simple_loss=0.29, pruned_loss=0.06603, over 3818697.92 frames. ], batch size: 46, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:47,547 INFO [train.py:903] (3/4) Epoch 19, batch 2000, loss[loss=0.1638, simple_loss=0.2458, pruned_loss=0.04094, over 19391.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2904, pruned_loss=0.06617, over 3807758.32 frames. ], batch size: 48, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:00:50,109 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1603, 2.0392, 1.8932, 1.7524, 1.5274, 1.6792, 0.6028, 1.0612], + device='cuda:3'), covar=tensor([0.0599, 0.0592, 0.0404, 0.0668, 0.1158, 0.0846, 0.1204, 0.0974], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0349, 0.0351, 0.0373, 0.0451, 0.0383, 0.0333, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:00:54,328 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:00:58,642 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.950e+02 6.179e+02 7.428e+02 1.573e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 14:01:45,076 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 14:01:48,486 INFO [train.py:903] (3/4) Epoch 19, batch 2050, loss[loss=0.234, simple_loss=0.3143, pruned_loss=0.07691, over 19623.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2895, pruned_loss=0.06566, over 3818582.61 frames. ], batch size: 57, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:06,429 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 14:02:07,323 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 14:02:25,175 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 14:02:44,759 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:02:50,764 INFO [train.py:903] (3/4) Epoch 19, batch 2100, loss[loss=0.2003, simple_loss=0.281, pruned_loss=0.05985, over 19540.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2893, pruned_loss=0.0659, over 3820974.87 frames. ], batch size: 56, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:02:52,331 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:00,986 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.657e+02 4.808e+02 5.766e+02 7.881e+02 2.968e+03, threshold=1.153e+03, percent-clipped=4.0 +2023-04-02 14:03:15,316 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:18,741 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 14:03:22,166 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125030.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:03:31,209 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4156, 1.5549, 1.7912, 1.7261, 2.7852, 2.2167, 2.9746, 1.3350], + device='cuda:3'), covar=tensor([0.2320, 0.4037, 0.2610, 0.1744, 0.1416, 0.2061, 0.1283, 0.4014], + device='cuda:3'), in_proj_covar=tensor([0.0515, 0.0623, 0.0684, 0.0467, 0.0614, 0.0518, 0.0653, 0.0532], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:03:39,536 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 14:03:49,807 INFO [train.py:903] (3/4) Epoch 19, batch 2150, loss[loss=0.167, simple_loss=0.2452, pruned_loss=0.04445, over 18653.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2898, pruned_loss=0.06621, over 3818606.92 frames. ], batch size: 41, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:22,807 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125081.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:04:29,459 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.03 vs. limit=5.0 +2023-04-02 14:04:50,319 INFO [train.py:903] (3/4) Epoch 19, batch 2200, loss[loss=0.2406, simple_loss=0.315, pruned_loss=0.08312, over 19367.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2906, pruned_loss=0.06655, over 3831407.48 frames. ], batch size: 66, lr: 4.37e-03, grad_scale: 8.0 +2023-04-02 14:04:53,049 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125106.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:05:01,343 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.808e+02 4.810e+02 5.592e+02 6.977e+02 1.826e+03, threshold=1.118e+03, percent-clipped=4.0 +2023-04-02 14:05:04,018 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125115.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:34,461 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:41,540 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:05:51,111 INFO [train.py:903] (3/4) Epoch 19, batch 2250, loss[loss=0.2478, simple_loss=0.3357, pruned_loss=0.07996, over 19332.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2913, pruned_loss=0.06671, over 3830555.97 frames. ], batch size: 66, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:06:05,775 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:25,691 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125182.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:35,060 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:06:52,197 INFO [train.py:903] (3/4) Epoch 19, batch 2300, loss[loss=0.2076, simple_loss=0.2936, pruned_loss=0.06078, over 19666.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2913, pruned_loss=0.06678, over 3824891.72 frames. ], batch size: 58, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:07:04,356 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.756e+02 5.218e+02 6.176e+02 8.185e+02 2.110e+03, threshold=1.235e+03, percent-clipped=6.0 +2023-04-02 14:07:06,715 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 14:07:52,839 INFO [train.py:903] (3/4) Epoch 19, batch 2350, loss[loss=0.1876, simple_loss=0.2721, pruned_loss=0.05148, over 19772.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2909, pruned_loss=0.06629, over 3833473.20 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 4.0 +2023-04-02 14:08:32,698 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 14:08:45,078 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125297.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:08:49,235 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 14:08:53,423 INFO [train.py:903] (3/4) Epoch 19, batch 2400, loss[loss=0.1894, simple_loss=0.2709, pruned_loss=0.0539, over 19582.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2909, pruned_loss=0.06655, over 3823348.58 frames. ], batch size: 52, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:09:05,367 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.876e+02 4.925e+02 6.150e+02 7.515e+02 1.529e+03, threshold=1.230e+03, percent-clipped=3.0 +2023-04-02 14:09:53,258 INFO [train.py:903] (3/4) Epoch 19, batch 2450, loss[loss=0.2205, simple_loss=0.2974, pruned_loss=0.07177, over 18711.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2905, pruned_loss=0.06652, over 3815726.25 frames. ], batch size: 74, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:10:14,740 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125371.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:17,719 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125374.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:41,885 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,292 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:44,326 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125396.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:10:53,808 INFO [train.py:903] (3/4) Epoch 19, batch 2500, loss[loss=0.2607, simple_loss=0.3317, pruned_loss=0.09488, over 18696.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2912, pruned_loss=0.06689, over 3830289.61 frames. ], batch size: 74, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:11:05,671 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.898e+02 6.460e+02 8.264e+02 2.020e+03, threshold=1.292e+03, percent-clipped=4.0 +2023-04-02 14:11:13,988 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:35,784 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:11:39,886 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.1155, 3.8151, 2.9556, 3.2889, 1.6456, 3.6279, 3.5565, 3.6326], + device='cuda:3'), covar=tensor([0.0803, 0.1029, 0.1736, 0.0918, 0.2925, 0.0858, 0.1035, 0.1435], + device='cuda:3'), in_proj_covar=tensor([0.0484, 0.0393, 0.0478, 0.0335, 0.0391, 0.0417, 0.0408, 0.0443], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:11:54,214 INFO [train.py:903] (3/4) Epoch 19, batch 2550, loss[loss=0.1857, simple_loss=0.2706, pruned_loss=0.05044, over 19517.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2923, pruned_loss=0.06769, over 3801376.94 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:12:12,054 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0923, 5.0753, 5.8867, 5.9114, 1.8616, 5.5706, 4.7406, 5.5178], + device='cuda:3'), covar=tensor([0.1586, 0.0720, 0.0529, 0.0550, 0.6206, 0.0605, 0.0585, 0.1241], + device='cuda:3'), in_proj_covar=tensor([0.0760, 0.0702, 0.0910, 0.0796, 0.0806, 0.0659, 0.0547, 0.0839], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 14:12:30,646 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2973, 1.2087, 1.6773, 1.3134, 2.5545, 3.6747, 3.3923, 3.8038], + device='cuda:3'), covar=tensor([0.1611, 0.3902, 0.3416, 0.2380, 0.0660, 0.0175, 0.0204, 0.0260], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0313, 0.0344, 0.0261, 0.0238, 0.0180, 0.0212, 0.0244], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 14:12:38,053 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:12:47,117 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 14:12:53,798 INFO [train.py:903] (3/4) Epoch 19, batch 2600, loss[loss=0.2035, simple_loss=0.2905, pruned_loss=0.05821, over 19587.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2922, pruned_loss=0.0676, over 3801549.18 frames. ], batch size: 61, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:13:01,764 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7625, 1.8997, 2.1732, 2.3516, 1.7391, 2.2736, 2.2112, 2.0539], + device='cuda:3'), covar=tensor([0.4088, 0.3523, 0.1768, 0.2166, 0.3774, 0.1974, 0.4659, 0.3033], + device='cuda:3'), in_proj_covar=tensor([0.0871, 0.0929, 0.0699, 0.0916, 0.0853, 0.0789, 0.0827, 0.0764], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 14:13:05,878 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 4.815e+02 5.841e+02 7.665e+02 1.339e+03, threshold=1.168e+03, percent-clipped=2.0 +2023-04-02 14:13:39,741 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0569, 1.4082, 1.7898, 1.3148, 2.7450, 3.7062, 3.3895, 3.8526], + device='cuda:3'), covar=tensor([0.1729, 0.3626, 0.3269, 0.2399, 0.0585, 0.0152, 0.0215, 0.0237], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0311, 0.0343, 0.0260, 0.0237, 0.0179, 0.0212, 0.0244], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 14:13:43,176 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7941, 1.6673, 1.6295, 2.2877, 1.7123, 2.1453, 2.0698, 1.8681], + device='cuda:3'), covar=tensor([0.0778, 0.0870, 0.0998, 0.0679, 0.0839, 0.0702, 0.0921, 0.0684], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0221, 0.0227, 0.0245, 0.0228, 0.0212, 0.0190, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 14:13:52,943 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125553.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:13:53,671 INFO [train.py:903] (3/4) Epoch 19, batch 2650, loss[loss=0.219, simple_loss=0.2897, pruned_loss=0.07415, over 19492.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2917, pruned_loss=0.06719, over 3815798.35 frames. ], batch size: 49, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:15,477 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 14:14:23,594 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125578.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:14:34,897 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 14:14:54,498 INFO [train.py:903] (3/4) Epoch 19, batch 2700, loss[loss=0.2031, simple_loss=0.28, pruned_loss=0.06314, over 19776.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2924, pruned_loss=0.06777, over 3821459.62 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:14:56,033 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:14:59,365 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-02 14:15:07,182 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.817e+02 5.964e+02 7.468e+02 1.608e+03, threshold=1.193e+03, percent-clipped=5.0 +2023-04-02 14:15:15,710 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:15:31,473 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8635, 1.4845, 1.4602, 1.8017, 1.5166, 1.5861, 1.4405, 1.7320], + device='cuda:3'), covar=tensor([0.1046, 0.1360, 0.1515, 0.0982, 0.1228, 0.0586, 0.1361, 0.0766], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0356, 0.0309, 0.0249, 0.0300, 0.0249, 0.0301, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:15:56,194 INFO [train.py:903] (3/4) Epoch 19, batch 2750, loss[loss=0.2172, simple_loss=0.2962, pruned_loss=0.06914, over 19786.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2919, pruned_loss=0.0675, over 3821672.06 frames. ], batch size: 56, lr: 4.36e-03, grad_scale: 8.0 +2023-04-02 14:16:49,132 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1023, 1.7459, 1.3596, 1.1356, 1.5836, 1.0401, 1.1013, 1.5542], + device='cuda:3'), covar=tensor([0.0832, 0.0793, 0.1042, 0.0850, 0.0533, 0.1268, 0.0642, 0.0438], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0312, 0.0333, 0.0260, 0.0245, 0.0334, 0.0290, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:16:55,540 INFO [train.py:903] (3/4) Epoch 19, batch 2800, loss[loss=0.1742, simple_loss=0.2453, pruned_loss=0.05151, over 19118.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2932, pruned_loss=0.06797, over 3829229.98 frames. ], batch size: 42, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:17:01,166 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6024, 2.1816, 2.2986, 2.6814, 2.3149, 2.3668, 2.0460, 2.4719], + device='cuda:3'), covar=tensor([0.0882, 0.1699, 0.1235, 0.0988, 0.1373, 0.0451, 0.1246, 0.0641], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0356, 0.0309, 0.0249, 0.0300, 0.0248, 0.0300, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:17:08,446 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.000e+02 5.188e+02 6.338e+02 7.813e+02 1.733e+03, threshold=1.268e+03, percent-clipped=8.0 +2023-04-02 14:17:12,865 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:37,023 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:42,797 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125743.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:17:55,248 INFO [train.py:903] (3/4) Epoch 19, batch 2850, loss[loss=0.212, simple_loss=0.2979, pruned_loss=0.06307, over 19658.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2933, pruned_loss=0.0678, over 3829412.40 frames. ], batch size: 58, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:04,516 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0634, 1.7744, 2.1494, 1.7561, 4.5476, 1.2659, 2.6275, 4.9614], + device='cuda:3'), covar=tensor([0.0397, 0.2617, 0.2491, 0.1978, 0.0722, 0.2581, 0.1348, 0.0161], + device='cuda:3'), in_proj_covar=tensor([0.0397, 0.0358, 0.0377, 0.0342, 0.0367, 0.0350, 0.0368, 0.0388], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:18:15,696 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1815, 1.8221, 1.4233, 1.2295, 1.6292, 1.1312, 1.1989, 1.6848], + device='cuda:3'), covar=tensor([0.0778, 0.0807, 0.1116, 0.0771, 0.0541, 0.1286, 0.0608, 0.0389], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0312, 0.0334, 0.0260, 0.0245, 0.0335, 0.0291, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:18:30,688 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:18:56,094 INFO [train.py:903] (3/4) Epoch 19, batch 2900, loss[loss=0.187, simple_loss=0.2634, pruned_loss=0.05534, over 19753.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2924, pruned_loss=0.06742, over 3833443.56 frames. ], batch size: 45, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:18:56,110 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 14:19:09,042 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.625e+02 5.511e+02 7.350e+02 1.619e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-02 14:19:31,569 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:47,779 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3348, 1.9856, 1.5412, 1.2575, 1.7456, 1.1039, 1.2716, 1.8838], + device='cuda:3'), covar=tensor([0.0796, 0.0738, 0.1086, 0.0775, 0.0554, 0.1248, 0.0638, 0.0399], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0314, 0.0336, 0.0261, 0.0247, 0.0337, 0.0293, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:19:55,782 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:19:56,472 INFO [train.py:903] (3/4) Epoch 19, batch 2950, loss[loss=0.2411, simple_loss=0.3104, pruned_loss=0.08596, over 19699.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2931, pruned_loss=0.06793, over 3811362.42 frames. ], batch size: 59, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:05,757 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:36,551 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:50,745 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:20:57,381 INFO [train.py:903] (3/4) Epoch 19, batch 3000, loss[loss=0.234, simple_loss=0.3059, pruned_loss=0.08104, over 19671.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2933, pruned_loss=0.06803, over 3817944.59 frames. ], batch size: 60, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:20:57,381 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 14:21:08,244 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5383, 1.6024, 1.5493, 1.3464, 1.2493, 1.3153, 0.3410, 0.6325], + device='cuda:3'), covar=tensor([0.0638, 0.0695, 0.0455, 0.0729, 0.1145, 0.0988, 0.1357, 0.1257], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0352, 0.0355, 0.0378, 0.0457, 0.0388, 0.0334, 0.0343], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:21:10,734 INFO [train.py:937] (3/4) Epoch 19, validation: loss=0.1696, simple_loss=0.2702, pruned_loss=0.03451, over 944034.00 frames. +2023-04-02 14:21:10,736 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 14:21:10,824 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 14:21:24,051 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.456e+02 4.999e+02 6.816e+02 8.693e+02 1.814e+03, threshold=1.363e+03, percent-clipped=12.0 +2023-04-02 14:22:11,565 INFO [train.py:903] (3/4) Epoch 19, batch 3050, loss[loss=0.1847, simple_loss=0.2655, pruned_loss=0.05193, over 19613.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2935, pruned_loss=0.06774, over 3823454.84 frames. ], batch size: 50, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:22:24,982 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=125965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:23:10,423 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5635, 1.1748, 1.5646, 1.4788, 3.1247, 1.1146, 2.3068, 3.5210], + device='cuda:3'), covar=tensor([0.0524, 0.3072, 0.2770, 0.1944, 0.0741, 0.2639, 0.1348, 0.0288], + device='cuda:3'), in_proj_covar=tensor([0.0396, 0.0359, 0.0375, 0.0342, 0.0366, 0.0350, 0.0369, 0.0388], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:23:13,569 INFO [train.py:903] (3/4) Epoch 19, batch 3100, loss[loss=0.1946, simple_loss=0.2676, pruned_loss=0.06084, over 18219.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2931, pruned_loss=0.06767, over 3812215.04 frames. ], batch size: 40, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:23:21,474 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1047, 3.5115, 2.0254, 1.9096, 3.0793, 1.6987, 1.3628, 2.2755], + device='cuda:3'), covar=tensor([0.1329, 0.0545, 0.1023, 0.0941, 0.0527, 0.1159, 0.1000, 0.0683], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0313, 0.0334, 0.0261, 0.0246, 0.0335, 0.0292, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:23:26,818 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.651e+02 5.591e+02 6.916e+02 1.279e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-02 14:23:28,124 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126016.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:13,738 INFO [train.py:903] (3/4) Epoch 19, batch 3150, loss[loss=0.1875, simple_loss=0.2741, pruned_loss=0.05042, over 19768.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2934, pruned_loss=0.06754, over 3821027.82 frames. ], batch size: 54, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:24:40,340 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 14:24:45,989 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:53,516 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:24:55,931 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:04,721 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4189, 2.1667, 1.6278, 1.4886, 2.0038, 1.2336, 1.3826, 1.8753], + device='cuda:3'), covar=tensor([0.0981, 0.0735, 0.0963, 0.0744, 0.0505, 0.1163, 0.0621, 0.0437], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0311, 0.0332, 0.0259, 0.0243, 0.0333, 0.0289, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:25:14,245 INFO [train.py:903] (3/4) Epoch 19, batch 3200, loss[loss=0.218, simple_loss=0.3023, pruned_loss=0.0668, over 19662.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2931, pruned_loss=0.06727, over 3825939.61 frames. ], batch size: 58, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:25:21,281 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:26,837 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126114.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:25:27,595 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 4.817e+02 6.226e+02 7.515e+02 1.545e+03, threshold=1.245e+03, percent-clipped=7.0 +2023-04-02 14:25:51,074 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:15,162 INFO [train.py:903] (3/4) Epoch 19, batch 3250, loss[loss=0.2176, simple_loss=0.2958, pruned_loss=0.06968, over 18207.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2937, pruned_loss=0.06824, over 3819069.78 frames. ], batch size: 83, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:26:15,582 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126154.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:46,075 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126179.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:26:58,359 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3509, 3.1117, 2.2389, 2.2979, 2.1544, 2.5258, 1.1506, 2.2419], + device='cuda:3'), covar=tensor([0.0625, 0.0559, 0.0766, 0.1158, 0.1075, 0.1252, 0.1375, 0.1028], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0349, 0.0352, 0.0377, 0.0454, 0.0386, 0.0333, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:27:14,784 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:27:16,666 INFO [train.py:903] (3/4) Epoch 19, batch 3300, loss[loss=0.2146, simple_loss=0.2856, pruned_loss=0.07177, over 19584.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2921, pruned_loss=0.06715, over 3826957.54 frames. ], batch size: 52, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:27:20,136 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 14:27:30,251 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.439e+02 5.162e+02 6.410e+02 7.971e+02 2.422e+03, threshold=1.282e+03, percent-clipped=4.0 +2023-04-02 14:28:17,454 INFO [train.py:903] (3/4) Epoch 19, batch 3350, loss[loss=0.2591, simple_loss=0.3217, pruned_loss=0.09821, over 18113.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2915, pruned_loss=0.06723, over 3812679.10 frames. ], batch size: 83, lr: 4.35e-03, grad_scale: 8.0 +2023-04-02 14:28:38,574 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2336, 1.2915, 1.2369, 1.0373, 1.0788, 1.0578, 0.0860, 0.3542], + device='cuda:3'), covar=tensor([0.0651, 0.0628, 0.0426, 0.0539, 0.1239, 0.0598, 0.1207, 0.1050], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0349, 0.0352, 0.0376, 0.0452, 0.0384, 0.0332, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:29:17,954 INFO [train.py:903] (3/4) Epoch 19, batch 3400, loss[loss=0.2223, simple_loss=0.3029, pruned_loss=0.0708, over 18372.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2913, pruned_loss=0.06693, over 3816937.21 frames. ], batch size: 84, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:29:31,302 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 5.180e+02 6.014e+02 8.021e+02 1.733e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 14:29:57,171 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126336.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:18,211 INFO [train.py:903] (3/4) Epoch 19, batch 3450, loss[loss=0.2186, simple_loss=0.2984, pruned_loss=0.06938, over 18866.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2904, pruned_loss=0.06632, over 3824454.23 frames. ], batch size: 74, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:30:22,545 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 14:30:27,175 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126360.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:28,563 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126361.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:30:41,477 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 14:31:13,313 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126398.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:31:20,717 INFO [train.py:903] (3/4) Epoch 19, batch 3500, loss[loss=0.2813, simple_loss=0.3393, pruned_loss=0.1116, over 13847.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2907, pruned_loss=0.06684, over 3806525.83 frames. ], batch size: 136, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:31:23,782 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-02 14:31:34,960 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.908e+02 6.053e+02 7.325e+02 1.346e+03, threshold=1.211e+03, percent-clipped=1.0 +2023-04-02 14:31:53,999 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9868, 3.6258, 2.4006, 3.2616, 0.7460, 3.4984, 3.4727, 3.5044], + device='cuda:3'), covar=tensor([0.0854, 0.1112, 0.2139, 0.0919, 0.4331, 0.0884, 0.1006, 0.1398], + device='cuda:3'), in_proj_covar=tensor([0.0488, 0.0394, 0.0480, 0.0337, 0.0396, 0.0419, 0.0409, 0.0446], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:32:21,707 INFO [train.py:903] (3/4) Epoch 19, batch 3550, loss[loss=0.2175, simple_loss=0.2946, pruned_loss=0.07017, over 19530.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2914, pruned_loss=0.06727, over 3808299.69 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 4.0 +2023-04-02 14:32:26,708 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126458.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:38,980 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126468.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:48,142 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126475.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:32:57,930 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126483.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:33:00,140 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0009, 1.9333, 1.7723, 1.5971, 1.4340, 1.5688, 0.4184, 0.9012], + device='cuda:3'), covar=tensor([0.0518, 0.0552, 0.0400, 0.0637, 0.1084, 0.0702, 0.1113, 0.0938], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0349, 0.0353, 0.0376, 0.0454, 0.0386, 0.0332, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:33:21,950 INFO [train.py:903] (3/4) Epoch 19, batch 3600, loss[loss=0.1974, simple_loss=0.2775, pruned_loss=0.05861, over 19853.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2908, pruned_loss=0.06683, over 3818416.05 frames. ], batch size: 52, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:33:37,204 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.926e+02 5.826e+02 7.456e+02 2.258e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 14:34:22,223 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8372, 4.4127, 2.7580, 3.8596, 1.0843, 4.2544, 4.2237, 4.2820], + device='cuda:3'), covar=tensor([0.0565, 0.0989, 0.2018, 0.0874, 0.3911, 0.0660, 0.0819, 0.1034], + device='cuda:3'), in_proj_covar=tensor([0.0488, 0.0395, 0.0483, 0.0338, 0.0395, 0.0419, 0.0409, 0.0447], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:34:23,084 INFO [train.py:903] (3/4) Epoch 19, batch 3650, loss[loss=0.2015, simple_loss=0.2747, pruned_loss=0.06414, over 19393.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.291, pruned_loss=0.06698, over 3822009.25 frames. ], batch size: 48, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:24,552 INFO [train.py:903] (3/4) Epoch 19, batch 3700, loss[loss=0.26, simple_loss=0.3296, pruned_loss=0.09524, over 19672.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2914, pruned_loss=0.06757, over 3824619.56 frames. ], batch size: 60, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:35:38,478 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.326e+02 6.409e+02 8.349e+02 1.648e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-02 14:36:06,933 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 14:36:23,985 INFO [train.py:903] (3/4) Epoch 19, batch 3750, loss[loss=0.2207, simple_loss=0.2981, pruned_loss=0.07169, over 19751.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2911, pruned_loss=0.06711, over 3821802.72 frames. ], batch size: 51, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:10,302 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9167, 1.3646, 1.0964, 1.0241, 1.2104, 1.0205, 1.0145, 1.2907], + device='cuda:3'), covar=tensor([0.0574, 0.0859, 0.1073, 0.0739, 0.0568, 0.1290, 0.0554, 0.0497], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0312, 0.0331, 0.0260, 0.0243, 0.0333, 0.0289, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:37:25,133 INFO [train.py:903] (3/4) Epoch 19, batch 3800, loss[loss=0.209, simple_loss=0.2905, pruned_loss=0.06374, over 19533.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2919, pruned_loss=0.06764, over 3801908.94 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:37:40,985 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.757e+02 5.693e+02 7.302e+02 1.543e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-02 14:37:57,333 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 14:37:58,875 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126731.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:11,174 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1778, 3.6376, 2.0604, 2.0767, 3.1741, 1.8164, 1.5076, 2.1579], + device='cuda:3'), covar=tensor([0.1357, 0.0471, 0.1143, 0.0917, 0.0524, 0.1291, 0.1033, 0.0767], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0312, 0.0332, 0.0261, 0.0244, 0.0334, 0.0290, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:38:12,100 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:38:26,704 INFO [train.py:903] (3/4) Epoch 19, batch 3850, loss[loss=0.1632, simple_loss=0.238, pruned_loss=0.04417, over 18684.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2914, pruned_loss=0.06741, over 3808675.93 frames. ], batch size: 41, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:38:30,286 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126756.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:39:28,455 INFO [train.py:903] (3/4) Epoch 19, batch 3900, loss[loss=0.2235, simple_loss=0.3008, pruned_loss=0.07314, over 17546.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2925, pruned_loss=0.06811, over 3809949.67 frames. ], batch size: 101, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:39:30,208 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.27 vs. limit=5.0 +2023-04-02 14:39:37,802 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=126812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:39:42,918 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.082e+02 6.454e+02 7.734e+02 3.345e+03, threshold=1.291e+03, percent-clipped=6.0 +2023-04-02 14:40:04,533 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126834.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:29,140 INFO [train.py:903] (3/4) Epoch 19, batch 3950, loss[loss=0.1899, simple_loss=0.2802, pruned_loss=0.04976, over 19785.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2913, pruned_loss=0.06729, over 3815367.14 frames. ], batch size: 56, lr: 4.34e-03, grad_scale: 8.0 +2023-04-02 14:40:33,528 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126857.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:40:35,261 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 14:41:29,513 INFO [train.py:903] (3/4) Epoch 19, batch 4000, loss[loss=0.253, simple_loss=0.3232, pruned_loss=0.09138, over 17993.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2914, pruned_loss=0.06775, over 3802537.96 frames. ], batch size: 83, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:41:43,557 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.579e+02 4.980e+02 6.258e+02 9.023e+02 1.716e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-02 14:41:57,631 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:42:16,841 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 14:42:30,357 INFO [train.py:903] (3/4) Epoch 19, batch 4050, loss[loss=0.1641, simple_loss=0.2411, pruned_loss=0.04356, over 19767.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2911, pruned_loss=0.06724, over 3812273.30 frames. ], batch size: 47, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:17,812 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8416, 3.2682, 3.3259, 3.3572, 1.3425, 3.2221, 2.8407, 3.0956], + device='cuda:3'), covar=tensor([0.1673, 0.0970, 0.0827, 0.0902, 0.5380, 0.1000, 0.0815, 0.1326], + device='cuda:3'), in_proj_covar=tensor([0.0766, 0.0713, 0.0916, 0.0804, 0.0817, 0.0671, 0.0554, 0.0854], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 14:43:30,607 INFO [train.py:903] (3/4) Epoch 19, batch 4100, loss[loss=0.192, simple_loss=0.2767, pruned_loss=0.05363, over 19524.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2911, pruned_loss=0.06733, over 3814401.57 frames. ], batch size: 54, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:43:45,830 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.699e+02 5.681e+02 7.096e+02 1.300e+03, threshold=1.136e+03, percent-clipped=1.0 +2023-04-02 14:44:06,969 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 14:44:31,640 INFO [train.py:903] (3/4) Epoch 19, batch 4150, loss[loss=0.2095, simple_loss=0.2952, pruned_loss=0.06194, over 19353.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2907, pruned_loss=0.06707, over 3805469.70 frames. ], batch size: 66, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:45:32,545 INFO [train.py:903] (3/4) Epoch 19, batch 4200, loss[loss=0.2201, simple_loss=0.3098, pruned_loss=0.06521, over 19637.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2907, pruned_loss=0.06689, over 3810116.76 frames. ], batch size: 58, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:45:35,885 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 14:45:43,861 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127113.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:45:46,907 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 4.915e+02 5.762e+02 6.825e+02 1.362e+03, threshold=1.152e+03, percent-clipped=8.0 +2023-04-02 14:46:14,876 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127138.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:46:28,562 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2231, 1.3028, 1.2267, 1.0440, 1.0720, 1.0916, 0.1167, 0.3901], + device='cuda:3'), covar=tensor([0.0634, 0.0609, 0.0396, 0.0527, 0.1244, 0.0607, 0.1157, 0.0996], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0348, 0.0353, 0.0375, 0.0454, 0.0384, 0.0331, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 14:46:32,718 INFO [train.py:903] (3/4) Epoch 19, batch 4250, loss[loss=0.2231, simple_loss=0.2988, pruned_loss=0.07368, over 19767.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.29, pruned_loss=0.06667, over 3811707.17 frames. ], batch size: 54, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:46:46,857 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5671, 1.0915, 1.3225, 1.3155, 2.2260, 1.0679, 1.9940, 2.4792], + device='cuda:3'), covar=tensor([0.0653, 0.2836, 0.2898, 0.1585, 0.0808, 0.2043, 0.1082, 0.0470], + device='cuda:3'), in_proj_covar=tensor([0.0394, 0.0357, 0.0375, 0.0341, 0.0366, 0.0347, 0.0367, 0.0386], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:46:50,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 14:47:02,375 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 14:47:03,656 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:09,530 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127183.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:34,690 INFO [train.py:903] (3/4) Epoch 19, batch 4300, loss[loss=0.2064, simple_loss=0.2848, pruned_loss=0.06405, over 19764.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2904, pruned_loss=0.06708, over 3810443.34 frames. ], batch size: 54, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:47:40,432 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127208.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:47:50,093 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.800e+02 4.893e+02 5.914e+02 7.996e+02 1.682e+03, threshold=1.183e+03, percent-clipped=7.0 +2023-04-02 14:48:28,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 14:48:35,535 INFO [train.py:903] (3/4) Epoch 19, batch 4350, loss[loss=0.1927, simple_loss=0.2853, pruned_loss=0.05007, over 19678.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2907, pruned_loss=0.06695, over 3818908.44 frames. ], batch size: 53, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:48:51,136 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:02,191 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 14:49:23,150 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127293.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:49:35,819 INFO [train.py:903] (3/4) Epoch 19, batch 4400, loss[loss=0.1834, simple_loss=0.254, pruned_loss=0.05639, over 19296.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2901, pruned_loss=0.06658, over 3822886.20 frames. ], batch size: 44, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:49:49,568 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 5.239e+02 6.175e+02 6.853e+02 1.670e+03, threshold=1.235e+03, percent-clipped=2.0 +2023-04-02 14:50:02,112 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 14:50:12,010 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 14:50:12,999 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 14:50:36,323 INFO [train.py:903] (3/4) Epoch 19, batch 4450, loss[loss=0.2688, simple_loss=0.328, pruned_loss=0.1048, over 13420.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2902, pruned_loss=0.06642, over 3811280.83 frames. ], batch size: 136, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:38,004 INFO [train.py:903] (3/4) Epoch 19, batch 4500, loss[loss=0.2382, simple_loss=0.3227, pruned_loss=0.07682, over 19655.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2903, pruned_loss=0.06633, over 3815156.68 frames. ], batch size: 60, lr: 4.33e-03, grad_scale: 8.0 +2023-04-02 14:51:52,898 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 5.116e+02 6.133e+02 7.767e+02 1.446e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-02 14:51:53,551 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 14:52:28,102 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127445.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 14:52:39,295 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-04-02 14:52:39,717 INFO [train.py:903] (3/4) Epoch 19, batch 4550, loss[loss=0.2353, simple_loss=0.314, pruned_loss=0.07829, over 18192.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.0664, over 3825374.39 frames. ], batch size: 83, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:52:48,339 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 14:53:11,963 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 14:53:29,921 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4926, 1.2916, 1.2660, 1.5473, 1.2875, 1.3211, 1.2801, 1.4068], + device='cuda:3'), covar=tensor([0.0834, 0.1165, 0.1113, 0.0668, 0.0978, 0.0467, 0.1071, 0.0604], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0350, 0.0302, 0.0246, 0.0294, 0.0243, 0.0295, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:53:40,565 INFO [train.py:903] (3/4) Epoch 19, batch 4600, loss[loss=0.2078, simple_loss=0.2966, pruned_loss=0.05949, over 18894.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2912, pruned_loss=0.06659, over 3818032.00 frames. ], batch size: 74, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:53:52,444 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5779, 2.3387, 2.1707, 2.6444, 2.2611, 2.1234, 1.9232, 2.3369], + device='cuda:3'), covar=tensor([0.0891, 0.1571, 0.1364, 0.1087, 0.1402, 0.0516, 0.1362, 0.0677], + device='cuda:3'), in_proj_covar=tensor([0.0262, 0.0351, 0.0303, 0.0247, 0.0295, 0.0244, 0.0296, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:53:54,253 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 5.018e+02 6.286e+02 8.427e+02 2.189e+03, threshold=1.257e+03, percent-clipped=8.0 +2023-04-02 14:54:11,483 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3901, 1.4119, 1.9890, 1.6274, 3.0199, 4.5802, 4.5289, 5.0401], + device='cuda:3'), covar=tensor([0.1561, 0.3747, 0.3147, 0.2285, 0.0606, 0.0202, 0.0160, 0.0186], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0316, 0.0347, 0.0261, 0.0239, 0.0182, 0.0214, 0.0246], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 14:54:16,999 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7880, 4.2752, 4.4775, 4.4916, 1.6295, 4.2230, 3.6620, 4.1931], + device='cuda:3'), covar=tensor([0.1393, 0.0828, 0.0539, 0.0577, 0.5655, 0.0806, 0.0625, 0.1039], + device='cuda:3'), in_proj_covar=tensor([0.0758, 0.0711, 0.0910, 0.0795, 0.0812, 0.0666, 0.0551, 0.0848], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 14:54:32,459 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:34,766 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:54:37,339 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-02 14:54:39,919 INFO [train.py:903] (3/4) Epoch 19, batch 4650, loss[loss=0.1972, simple_loss=0.2849, pruned_loss=0.05476, over 19670.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.29, pruned_loss=0.06585, over 3818310.69 frames. ], batch size: 60, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:54:50,256 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3043, 3.8214, 3.9460, 3.9543, 1.6134, 3.7415, 3.2824, 3.7172], + device='cuda:3'), covar=tensor([0.1737, 0.0808, 0.0708, 0.0764, 0.5584, 0.0843, 0.0741, 0.1192], + device='cuda:3'), in_proj_covar=tensor([0.0761, 0.0715, 0.0914, 0.0798, 0.0814, 0.0668, 0.0553, 0.0850], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 14:54:55,882 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 14:55:05,459 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127574.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:07,391 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 14:55:16,480 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4871, 1.3213, 1.3322, 1.6972, 1.3723, 1.6240, 1.6936, 1.5351], + device='cuda:3'), covar=tensor([0.0926, 0.1057, 0.1130, 0.0823, 0.0912, 0.0839, 0.0888, 0.0745], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0246, 0.0229, 0.0212, 0.0190, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 14:55:40,716 INFO [train.py:903] (3/4) Epoch 19, batch 4700, loss[loss=0.2218, simple_loss=0.3015, pruned_loss=0.07101, over 19273.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2892, pruned_loss=0.0657, over 3823619.38 frames. ], batch size: 66, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:55:50,515 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127611.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:55:55,864 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 5.036e+02 6.203e+02 8.078e+02 1.735e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-02 14:56:02,608 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 14:56:41,689 INFO [train.py:903] (3/4) Epoch 19, batch 4750, loss[loss=0.2261, simple_loss=0.3107, pruned_loss=0.07074, over 19371.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2885, pruned_loss=0.06554, over 3817230.45 frames. ], batch size: 70, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:13,621 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:57:41,813 INFO [train.py:903] (3/4) Epoch 19, batch 4800, loss[loss=0.2464, simple_loss=0.3162, pruned_loss=0.08834, over 19663.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2892, pruned_loss=0.06582, over 3813611.09 frames. ], batch size: 55, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:57:44,303 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6444, 1.3852, 1.5192, 1.5919, 3.2310, 1.1731, 2.2781, 3.6033], + device='cuda:3'), covar=tensor([0.0447, 0.2751, 0.2820, 0.1862, 0.0658, 0.2515, 0.1353, 0.0252], + device='cuda:3'), in_proj_covar=tensor([0.0394, 0.0358, 0.0377, 0.0341, 0.0367, 0.0347, 0.0369, 0.0386], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 14:57:55,392 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.990e+02 6.329e+02 8.030e+02 1.437e+03, threshold=1.266e+03, percent-clipped=1.0 +2023-04-02 14:58:07,639 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127726.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:58:40,153 INFO [train.py:903] (3/4) Epoch 19, batch 4850, loss[loss=0.2089, simple_loss=0.3012, pruned_loss=0.05826, over 19603.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2913, pruned_loss=0.06712, over 3815081.50 frames. ], batch size: 57, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:04,712 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 14:59:22,982 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127789.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 14:59:25,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 14:59:30,825 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 14:59:30,849 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 14:59:32,243 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 14:59:40,801 INFO [train.py:903] (3/4) Epoch 19, batch 4900, loss[loss=0.2126, simple_loss=0.2951, pruned_loss=0.06502, over 19554.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2911, pruned_loss=0.06686, over 3815653.92 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 14:59:40,836 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 14:59:55,908 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.848e+02 5.865e+02 7.992e+02 2.664e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-02 15:00:01,789 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 15:00:41,591 INFO [train.py:903] (3/4) Epoch 19, batch 4950, loss[loss=0.3139, simple_loss=0.368, pruned_loss=0.1298, over 12811.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2913, pruned_loss=0.06688, over 3827723.53 frames. ], batch size: 136, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:00:58,838 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:00:59,643 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 15:01:22,208 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 15:01:24,505 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:26,339 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=127891.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:01:41,515 INFO [train.py:903] (3/4) Epoch 19, batch 5000, loss[loss=0.1783, simple_loss=0.2643, pruned_loss=0.04614, over 19605.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2911, pruned_loss=0.0667, over 3830466.90 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:01:42,720 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127904.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:01:51,187 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 15:01:55,670 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.675e+02 5.614e+02 6.818e+02 2.294e+03, threshold=1.123e+03, percent-clipped=3.0 +2023-04-02 15:02:03,226 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 15:02:23,457 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8804, 1.3068, 1.6356, 0.5814, 1.9835, 2.4293, 2.1228, 2.5915], + device='cuda:3'), covar=tensor([0.1681, 0.3575, 0.3138, 0.2680, 0.0597, 0.0257, 0.0339, 0.0351], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0314, 0.0345, 0.0260, 0.0237, 0.0180, 0.0213, 0.0245], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 15:02:41,854 INFO [train.py:903] (3/4) Epoch 19, batch 5050, loss[loss=0.2087, simple_loss=0.281, pruned_loss=0.06826, over 19417.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2912, pruned_loss=0.06668, over 3833453.58 frames. ], batch size: 48, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:02:55,372 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8424, 1.5190, 1.6579, 1.7713, 4.4001, 1.1636, 2.4605, 4.7742], + device='cuda:3'), covar=tensor([0.0445, 0.2857, 0.2971, 0.1876, 0.0760, 0.2727, 0.1539, 0.0163], + device='cuda:3'), in_proj_covar=tensor([0.0398, 0.0358, 0.0378, 0.0344, 0.0368, 0.0349, 0.0372, 0.0390], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:03:16,179 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127982.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:18,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 15:03:43,134 INFO [train.py:903] (3/4) Epoch 19, batch 5100, loss[loss=0.1828, simple_loss=0.2755, pruned_loss=0.04502, over 19297.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2913, pruned_loss=0.06672, over 3829405.99 frames. ], batch size: 66, lr: 4.32e-03, grad_scale: 8.0 +2023-04-02 15:03:45,734 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:47,029 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:03:56,497 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 15:03:58,282 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.139e+02 4.818e+02 5.706e+02 8.227e+02 1.561e+03, threshold=1.141e+03, percent-clipped=7.0 +2023-04-02 15:04:00,667 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 15:04:04,156 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 15:04:07,798 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128024.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:04:25,500 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8195, 1.3061, 1.4649, 1.4921, 3.2748, 1.0811, 2.3710, 3.7877], + device='cuda:3'), covar=tensor([0.0641, 0.3006, 0.2945, 0.2115, 0.0909, 0.2748, 0.1467, 0.0305], + device='cuda:3'), in_proj_covar=tensor([0.0398, 0.0360, 0.0379, 0.0345, 0.0370, 0.0350, 0.0373, 0.0391], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:04:43,095 INFO [train.py:903] (3/4) Epoch 19, batch 5150, loss[loss=0.2367, simple_loss=0.3057, pruned_loss=0.08384, over 19587.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2907, pruned_loss=0.06645, over 3829661.78 frames. ], batch size: 52, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:04:58,122 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:05:31,570 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:05:44,863 INFO [train.py:903] (3/4) Epoch 19, batch 5200, loss[loss=0.1575, simple_loss=0.2339, pruned_loss=0.04058, over 19332.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.06627, over 3830207.20 frames. ], batch size: 44, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:05:59,005 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.969e+02 4.920e+02 6.208e+02 7.921e+02 1.726e+03, threshold=1.242e+03, percent-clipped=7.0 +2023-04-02 15:05:59,055 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 15:06:18,700 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1550, 5.0683, 5.9382, 5.9453, 1.7927, 5.6209, 4.7047, 5.5485], + device='cuda:3'), covar=tensor([0.1603, 0.0842, 0.0554, 0.0618, 0.6311, 0.0630, 0.0611, 0.1280], + device='cuda:3'), in_proj_covar=tensor([0.0753, 0.0710, 0.0910, 0.0798, 0.0806, 0.0666, 0.0548, 0.0842], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 15:06:28,662 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128139.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:30,660 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128141.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:06:41,119 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 15:06:45,759 INFO [train.py:903] (3/4) Epoch 19, batch 5250, loss[loss=0.2004, simple_loss=0.28, pruned_loss=0.06037, over 19275.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2903, pruned_loss=0.06634, over 3826587.29 frames. ], batch size: 66, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:06:50,379 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3752, 2.4153, 2.0413, 2.6779, 2.3764, 2.2684, 2.0038, 2.3980], + device='cuda:3'), covar=tensor([0.0999, 0.1489, 0.1431, 0.1007, 0.1295, 0.0479, 0.1306, 0.0641], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0355, 0.0307, 0.0250, 0.0297, 0.0247, 0.0298, 0.0250], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:06:53,717 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128160.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:07:23,111 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128185.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:07:45,449 INFO [train.py:903] (3/4) Epoch 19, batch 5300, loss[loss=0.2691, simple_loss=0.3324, pruned_loss=0.1028, over 18262.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.06698, over 3817048.84 frames. ], batch size: 83, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:07:54,712 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:07:59,667 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.001e+02 6.088e+02 7.600e+02 1.403e+03, threshold=1.218e+03, percent-clipped=2.0 +2023-04-02 15:08:00,872 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 15:08:21,813 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128234.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:46,733 INFO [train.py:903] (3/4) Epoch 19, batch 5350, loss[loss=0.1771, simple_loss=0.255, pruned_loss=0.04961, over 19775.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2913, pruned_loss=0.06682, over 3803866.32 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:08:50,322 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:08:57,122 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:04,899 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 15:09:20,021 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 15:09:26,926 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128287.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:09:47,391 INFO [train.py:903] (3/4) Epoch 19, batch 5400, loss[loss=0.2173, simple_loss=0.298, pruned_loss=0.06829, over 19657.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2911, pruned_loss=0.06646, over 3805111.97 frames. ], batch size: 55, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:10:01,763 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 4.631e+02 5.571e+02 7.152e+02 1.493e+03, threshold=1.114e+03, percent-clipped=2.0 +2023-04-02 15:10:15,382 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:29,855 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128338.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:43,533 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:10:48,549 INFO [train.py:903] (3/4) Epoch 19, batch 5450, loss[loss=0.2457, simple_loss=0.3254, pruned_loss=0.08302, over 19675.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2908, pruned_loss=0.06627, over 3819976.15 frames. ], batch size: 59, lr: 4.31e-03, grad_scale: 16.0 +2023-04-02 15:11:39,585 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128395.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:11:50,176 INFO [train.py:903] (3/4) Epoch 19, batch 5500, loss[loss=0.2133, simple_loss=0.2908, pruned_loss=0.06792, over 19599.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06606, over 3808245.18 frames. ], batch size: 57, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:06,851 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.166e+02 6.121e+02 7.872e+02 1.632e+03, threshold=1.224e+03, percent-clipped=5.0 +2023-04-02 15:12:10,665 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128420.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:12,701 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 15:12:23,815 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128431.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:12:50,272 INFO [train.py:903] (3/4) Epoch 19, batch 5550, loss[loss=0.2438, simple_loss=0.328, pruned_loss=0.07982, over 19668.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2901, pruned_loss=0.06619, over 3812451.83 frames. ], batch size: 60, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:12:56,488 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 15:13:25,601 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3220, 2.3629, 2.5898, 3.1152, 2.3251, 3.0654, 2.7576, 2.4045], + device='cuda:3'), covar=tensor([0.4200, 0.3985, 0.1739, 0.2360, 0.4184, 0.1942, 0.4170, 0.3043], + device='cuda:3'), in_proj_covar=tensor([0.0879, 0.0936, 0.0701, 0.0925, 0.0861, 0.0797, 0.0834, 0.0767], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 15:13:33,389 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.05 vs. limit=5.0 +2023-04-02 15:13:44,847 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 15:13:51,306 INFO [train.py:903] (3/4) Epoch 19, batch 5600, loss[loss=0.2248, simple_loss=0.2994, pruned_loss=0.07514, over 17545.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.29, pruned_loss=0.06609, over 3824055.59 frames. ], batch size: 101, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:14:01,896 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128512.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:07,029 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.064e+02 4.843e+02 5.877e+02 7.578e+02 1.194e+03, threshold=1.175e+03, percent-clipped=0.0 +2023-04-02 15:14:32,677 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:14:52,044 INFO [train.py:903] (3/4) Epoch 19, batch 5650, loss[loss=0.2935, simple_loss=0.3456, pruned_loss=0.1207, over 19787.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.29, pruned_loss=0.06627, over 3818980.84 frames. ], batch size: 54, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:27,799 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:37,259 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 15:15:53,141 INFO [train.py:903] (3/4) Epoch 19, batch 5700, loss[loss=0.2154, simple_loss=0.3029, pruned_loss=0.06397, over 19786.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2923, pruned_loss=0.06744, over 3807664.62 frames. ], batch size: 56, lr: 4.31e-03, grad_scale: 8.0 +2023-04-02 15:15:54,793 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128605.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:15:58,274 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:08,026 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 5.156e+02 6.108e+02 7.232e+02 1.309e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 15:16:25,772 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:16:53,490 INFO [train.py:903] (3/4) Epoch 19, batch 5750, loss[loss=0.2009, simple_loss=0.2822, pruned_loss=0.05984, over 19670.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06662, over 3815958.38 frames. ], batch size: 53, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:16:55,739 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 15:17:05,272 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 15:17:09,593 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 15:17:27,299 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128682.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:17:55,080 INFO [train.py:903] (3/4) Epoch 19, batch 5800, loss[loss=0.1723, simple_loss=0.2502, pruned_loss=0.04718, over 19411.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.06646, over 3814943.39 frames. ], batch size: 48, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:18:10,461 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 4.671e+02 6.414e+02 7.787e+02 1.302e+03, threshold=1.283e+03, percent-clipped=2.0 +2023-04-02 15:18:55,625 INFO [train.py:903] (3/4) Epoch 19, batch 5850, loss[loss=0.1946, simple_loss=0.2812, pruned_loss=0.05404, over 19660.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2903, pruned_loss=0.06636, over 3819696.49 frames. ], batch size: 55, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:19:20,645 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=128775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:26,383 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6519, 4.2404, 2.6521, 3.7436, 1.2197, 4.1076, 4.0229, 4.0985], + device='cuda:3'), covar=tensor([0.0640, 0.0920, 0.2007, 0.0855, 0.3629, 0.0668, 0.0868, 0.1139], + device='cuda:3'), in_proj_covar=tensor([0.0494, 0.0398, 0.0487, 0.0342, 0.0399, 0.0422, 0.0413, 0.0450], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:19:48,478 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128797.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:50,618 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:19:55,918 INFO [train.py:903] (3/4) Epoch 19, batch 5900, loss[loss=0.201, simple_loss=0.2729, pruned_loss=0.06457, over 19720.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2895, pruned_loss=0.06588, over 3834301.94 frames. ], batch size: 47, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:20:02,596 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 15:20:11,628 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.258e+02 4.778e+02 5.849e+02 7.721e+02 1.320e+03, threshold=1.170e+03, percent-clipped=1.0 +2023-04-02 15:20:22,225 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 15:20:56,181 INFO [train.py:903] (3/4) Epoch 19, batch 5950, loss[loss=0.1875, simple_loss=0.2722, pruned_loss=0.05142, over 19698.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2899, pruned_loss=0.06596, over 3833384.73 frames. ], batch size: 59, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:21:13,042 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 15:21:41,370 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128890.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:21:57,518 INFO [train.py:903] (3/4) Epoch 19, batch 6000, loss[loss=0.1701, simple_loss=0.2489, pruned_loss=0.04567, over 19760.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06564, over 3824577.24 frames. ], batch size: 47, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:21:57,518 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 15:22:12,604 INFO [train.py:937] (3/4) Epoch 19, validation: loss=0.1702, simple_loss=0.2702, pruned_loss=0.03514, over 944034.00 frames. +2023-04-02 15:22:12,606 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 15:22:17,226 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128908.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:22:28,003 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.136e+02 6.485e+02 9.043e+02 2.174e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 15:23:11,990 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-02 15:23:13,563 INFO [train.py:903] (3/4) Epoch 19, batch 6050, loss[loss=0.221, simple_loss=0.3007, pruned_loss=0.07063, over 19601.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2891, pruned_loss=0.0655, over 3822493.56 frames. ], batch size: 61, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:24:15,361 INFO [train.py:903] (3/4) Epoch 19, batch 6100, loss[loss=0.1942, simple_loss=0.2735, pruned_loss=0.05745, over 19782.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.288, pruned_loss=0.06456, over 3840789.91 frames. ], batch size: 46, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:24:30,774 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.994e+02 6.076e+02 7.380e+02 1.472e+03, threshold=1.215e+03, percent-clipped=4.0 +2023-04-02 15:25:15,141 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129053.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:25:15,828 INFO [train.py:903] (3/4) Epoch 19, batch 6150, loss[loss=0.1776, simple_loss=0.2553, pruned_loss=0.04999, over 19746.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.0657, over 3835853.46 frames. ], batch size: 46, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:25:17,149 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9707, 2.0302, 2.1391, 1.9451, 3.6542, 1.6574, 2.9093, 3.6522], + device='cuda:3'), covar=tensor([0.0446, 0.2335, 0.2311, 0.1799, 0.0636, 0.2331, 0.1686, 0.0315], + device='cuda:3'), in_proj_covar=tensor([0.0395, 0.0357, 0.0377, 0.0341, 0.0367, 0.0348, 0.0370, 0.0389], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:25:18,747 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.43 vs. limit=5.0 +2023-04-02 15:25:44,286 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 15:25:44,624 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:26:16,327 INFO [train.py:903] (3/4) Epoch 19, batch 6200, loss[loss=0.2242, simple_loss=0.3013, pruned_loss=0.07353, over 19617.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2917, pruned_loss=0.06663, over 3817269.79 frames. ], batch size: 57, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:26:32,071 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.839e+02 4.815e+02 6.250e+02 7.621e+02 1.523e+03, threshold=1.250e+03, percent-clipped=7.0 +2023-04-02 15:27:04,018 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129143.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:07,735 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129146.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:17,161 INFO [train.py:903] (3/4) Epoch 19, batch 6250, loss[loss=0.2218, simple_loss=0.2985, pruned_loss=0.07257, over 12681.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2926, pruned_loss=0.06707, over 3810637.36 frames. ], batch size: 135, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:27:38,408 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:27:47,708 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 15:28:17,842 INFO [train.py:903] (3/4) Epoch 19, batch 6300, loss[loss=0.2019, simple_loss=0.2851, pruned_loss=0.05938, over 18768.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2922, pruned_loss=0.06689, over 3822937.30 frames. ], batch size: 74, lr: 4.30e-03, grad_scale: 8.0 +2023-04-02 15:28:33,726 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 5.181e+02 6.373e+02 8.503e+02 1.874e+03, threshold=1.275e+03, percent-clipped=7.0 +2023-04-02 15:29:17,504 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129252.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:29:19,488 INFO [train.py:903] (3/4) Epoch 19, batch 6350, loss[loss=0.2126, simple_loss=0.3002, pruned_loss=0.06249, over 18955.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2913, pruned_loss=0.0663, over 3823209.02 frames. ], batch size: 74, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:29:25,283 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129258.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:30:21,284 INFO [train.py:903] (3/4) Epoch 19, batch 6400, loss[loss=0.1996, simple_loss=0.2783, pruned_loss=0.06048, over 19725.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2902, pruned_loss=0.06601, over 3824345.32 frames. ], batch size: 51, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:30:36,125 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9544, 1.8760, 1.8116, 1.5066, 1.5018, 1.5365, 0.4683, 0.8266], + device='cuda:3'), covar=tensor([0.0562, 0.0565, 0.0380, 0.0632, 0.1037, 0.0787, 0.1145, 0.1004], + device='cuda:3'), in_proj_covar=tensor([0.0354, 0.0347, 0.0350, 0.0374, 0.0449, 0.0380, 0.0329, 0.0336], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 15:30:36,856 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.331e+02 4.991e+02 6.008e+02 7.727e+02 1.608e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-02 15:31:22,261 INFO [train.py:903] (3/4) Epoch 19, batch 6450, loss[loss=0.1693, simple_loss=0.2431, pruned_loss=0.04774, over 19732.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2903, pruned_loss=0.06558, over 3835706.20 frames. ], batch size: 45, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:31:39,037 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129367.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:32:06,817 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 15:32:22,354 INFO [train.py:903] (3/4) Epoch 19, batch 6500, loss[loss=0.164, simple_loss=0.2458, pruned_loss=0.04111, over 19712.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2901, pruned_loss=0.06562, over 3827378.15 frames. ], batch size: 45, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:32:29,749 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 15:32:38,766 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 5.340e+02 6.897e+02 8.888e+02 1.987e+03, threshold=1.379e+03, percent-clipped=7.0 +2023-04-02 15:32:56,871 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129431.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:33:21,208 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1364, 1.9863, 1.9213, 1.6540, 1.5328, 1.6825, 0.5287, 1.0581], + device='cuda:3'), covar=tensor([0.0583, 0.0621, 0.0410, 0.0694, 0.1100, 0.0856, 0.1192, 0.0937], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0351, 0.0353, 0.0377, 0.0453, 0.0384, 0.0332, 0.0339], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 15:33:24,201 INFO [train.py:903] (3/4) Epoch 19, batch 6550, loss[loss=0.1936, simple_loss=0.2775, pruned_loss=0.05486, over 19053.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.292, pruned_loss=0.06696, over 3811522.55 frames. ], batch size: 69, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:01,180 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 15:34:03,300 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.04 vs. limit=5.0 +2023-04-02 15:34:24,464 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-02 15:34:24,939 INFO [train.py:903] (3/4) Epoch 19, batch 6600, loss[loss=0.2067, simple_loss=0.2907, pruned_loss=0.06136, over 19688.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2916, pruned_loss=0.06682, over 3819086.91 frames. ], batch size: 59, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:34:37,646 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129514.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:34:40,455 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.539e+02 5.847e+02 6.807e+02 8.552e+02 1.538e+03, threshold=1.361e+03, percent-clipped=4.0 +2023-04-02 15:35:07,250 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:35:25,978 INFO [train.py:903] (3/4) Epoch 19, batch 6650, loss[loss=0.1803, simple_loss=0.2605, pruned_loss=0.05003, over 19748.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.06644, over 3806177.29 frames. ], batch size: 51, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:35:59,848 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3416, 1.1411, 1.2531, 1.3105, 2.1225, 1.1159, 1.8700, 2.3236], + device='cuda:3'), covar=tensor([0.0487, 0.2120, 0.2182, 0.1403, 0.0598, 0.1800, 0.1645, 0.0402], + device='cuda:3'), in_proj_covar=tensor([0.0395, 0.0357, 0.0376, 0.0339, 0.0366, 0.0347, 0.0368, 0.0389], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:36:26,349 INFO [train.py:903] (3/4) Epoch 19, batch 6700, loss[loss=0.1829, simple_loss=0.2635, pruned_loss=0.05111, over 19604.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2906, pruned_loss=0.0662, over 3812656.85 frames. ], batch size: 50, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:36:42,863 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.775e+02 4.923e+02 5.649e+02 7.598e+02 1.428e+03, threshold=1.130e+03, percent-clipped=1.0 +2023-04-02 15:36:51,107 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129623.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:36:58,789 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129630.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:37:17,658 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6138, 1.4726, 1.5060, 1.9672, 1.5479, 1.8822, 1.8745, 1.6560], + device='cuda:3'), covar=tensor([0.0799, 0.0895, 0.0994, 0.0670, 0.0819, 0.0679, 0.0826, 0.0714], + device='cuda:3'), in_proj_covar=tensor([0.0208, 0.0219, 0.0224, 0.0244, 0.0226, 0.0209, 0.0188, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 15:37:18,790 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129648.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:37:25,090 INFO [train.py:903] (3/4) Epoch 19, batch 6750, loss[loss=0.2047, simple_loss=0.2807, pruned_loss=0.06434, over 19752.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2905, pruned_loss=0.06622, over 3808823.61 frames. ], batch size: 54, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:20,255 INFO [train.py:903] (3/4) Epoch 19, batch 6800, loss[loss=0.2105, simple_loss=0.2894, pruned_loss=0.06582, over 19609.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2911, pruned_loss=0.06694, over 3812442.97 frames. ], batch size: 50, lr: 4.29e-03, grad_scale: 8.0 +2023-04-02 15:38:34,413 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.884e+02 6.226e+02 8.201e+02 1.689e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-02 15:39:05,129 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 15:39:06,149 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 15:39:08,367 INFO [train.py:903] (3/4) Epoch 20, batch 0, loss[loss=0.2291, simple_loss=0.2922, pruned_loss=0.08298, over 19306.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2922, pruned_loss=0.08298, over 19306.00 frames. ], batch size: 44, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:39:08,367 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 15:39:19,738 INFO [train.py:937] (3/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2703, pruned_loss=0.03432, over 944034.00 frames. +2023-04-02 15:39:19,739 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 15:39:31,872 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 15:39:40,392 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8975, 1.6509, 1.6161, 1.9664, 1.7030, 1.6401, 1.5118, 1.8167], + device='cuda:3'), covar=tensor([0.1124, 0.1588, 0.1561, 0.1069, 0.1355, 0.0602, 0.1480, 0.0782], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0357, 0.0308, 0.0251, 0.0301, 0.0249, 0.0301, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:39:49,274 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7662, 1.2514, 1.5348, 1.4498, 3.3116, 1.1768, 2.3728, 3.7496], + device='cuda:3'), covar=tensor([0.0443, 0.2930, 0.2951, 0.1997, 0.0726, 0.2621, 0.1327, 0.0243], + device='cuda:3'), in_proj_covar=tensor([0.0396, 0.0358, 0.0377, 0.0340, 0.0366, 0.0348, 0.0371, 0.0390], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:39:50,422 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3204, 1.3589, 1.8078, 1.2774, 2.6132, 3.5432, 3.2863, 3.7787], + device='cuda:3'), covar=tensor([0.1507, 0.3668, 0.3149, 0.2383, 0.0567, 0.0183, 0.0211, 0.0246], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0317, 0.0348, 0.0263, 0.0239, 0.0182, 0.0214, 0.0249], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 15:40:12,661 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129775.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:40:20,201 INFO [train.py:903] (3/4) Epoch 20, batch 50, loss[loss=0.1888, simple_loss=0.267, pruned_loss=0.05528, over 19773.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2928, pruned_loss=0.06661, over 874181.36 frames. ], batch size: 49, lr: 4.18e-03, grad_scale: 8.0 +2023-04-02 15:40:51,343 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129809.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:40:54,557 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 15:41:03,010 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.543e+02 6.891e+02 8.835e+02 1.770e+03, threshold=1.378e+03, percent-clipped=8.0 +2023-04-02 15:41:20,220 INFO [train.py:903] (3/4) Epoch 20, batch 100, loss[loss=0.2476, simple_loss=0.3177, pruned_loss=0.08879, over 13148.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2921, pruned_loss=0.06631, over 1522410.08 frames. ], batch size: 135, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:41:31,342 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 15:41:56,935 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3001, 1.8910, 1.8969, 2.6592, 1.7960, 2.4243, 2.3570, 2.2886], + device='cuda:3'), covar=tensor([0.0749, 0.0900, 0.0960, 0.0830, 0.0956, 0.0689, 0.0906, 0.0630], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0243, 0.0226, 0.0210, 0.0188, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 15:42:21,223 INFO [train.py:903] (3/4) Epoch 20, batch 150, loss[loss=0.2187, simple_loss=0.2988, pruned_loss=0.06925, over 19602.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2924, pruned_loss=0.066, over 2038359.15 frames. ], batch size: 50, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:42:30,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129890.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 15:43:03,460 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 4.799e+02 5.935e+02 7.467e+02 3.197e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 15:43:21,685 INFO [train.py:903] (3/4) Epoch 20, batch 200, loss[loss=0.1971, simple_loss=0.2649, pruned_loss=0.06466, over 19072.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2916, pruned_loss=0.06578, over 2437084.97 frames. ], batch size: 42, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:43:22,845 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 15:44:13,780 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=129974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:44:23,550 INFO [train.py:903] (3/4) Epoch 20, batch 250, loss[loss=0.2243, simple_loss=0.3065, pruned_loss=0.07102, over 19659.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2922, pruned_loss=0.06582, over 2741986.01 frames. ], batch size: 60, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:45:06,933 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.298e+02 6.354e+02 8.098e+02 1.543e+03, threshold=1.271e+03, percent-clipped=7.0 +2023-04-02 15:45:08,342 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130018.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:10,561 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:45:25,485 INFO [train.py:903] (3/4) Epoch 20, batch 300, loss[loss=0.1992, simple_loss=0.2915, pruned_loss=0.05349, over 19682.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2917, pruned_loss=0.06618, over 2997798.22 frames. ], batch size: 59, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:46:25,933 INFO [train.py:903] (3/4) Epoch 20, batch 350, loss[loss=0.2621, simple_loss=0.335, pruned_loss=0.09458, over 19537.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2902, pruned_loss=0.06577, over 3179680.85 frames. ], batch size: 56, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:46:35,018 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 15:46:35,365 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130089.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:46:50,775 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.65 vs. limit=5.0 +2023-04-02 15:47:08,654 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 5.230e+02 6.397e+02 7.792e+02 1.393e+03, threshold=1.279e+03, percent-clipped=3.0 +2023-04-02 15:47:26,645 INFO [train.py:903] (3/4) Epoch 20, batch 400, loss[loss=0.1881, simple_loss=0.2753, pruned_loss=0.05048, over 19577.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06629, over 3316202.86 frames. ], batch size: 52, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:47:32,417 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7532, 4.2420, 4.4513, 4.4404, 1.7833, 4.1918, 3.6469, 4.1515], + device='cuda:3'), covar=tensor([0.1600, 0.0945, 0.0545, 0.0633, 0.5769, 0.0926, 0.0644, 0.0971], + device='cuda:3'), in_proj_covar=tensor([0.0759, 0.0712, 0.0914, 0.0800, 0.0811, 0.0672, 0.0550, 0.0849], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 15:47:42,950 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130146.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:47:53,134 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130153.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:48:14,998 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130171.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 15:48:27,089 INFO [train.py:903] (3/4) Epoch 20, batch 450, loss[loss=0.2104, simple_loss=0.2947, pruned_loss=0.063, over 17317.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2893, pruned_loss=0.06565, over 3440573.23 frames. ], batch size: 101, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:49:03,625 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 15:49:04,563 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 15:49:09,155 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.003e+02 6.443e+02 8.043e+02 1.786e+03, threshold=1.289e+03, percent-clipped=5.0 +2023-04-02 15:49:27,174 INFO [train.py:903] (3/4) Epoch 20, batch 500, loss[loss=0.2029, simple_loss=0.2937, pruned_loss=0.05606, over 19544.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.29, pruned_loss=0.06585, over 3520180.76 frames. ], batch size: 56, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:49:37,967 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-02 15:50:10,067 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130268.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:50:15,785 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-02 15:50:27,988 INFO [train.py:903] (3/4) Epoch 20, batch 550, loss[loss=0.2365, simple_loss=0.3165, pruned_loss=0.07823, over 19700.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06544, over 3598288.58 frames. ], batch size: 59, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:11,220 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.062e+02 6.327e+02 8.479e+02 2.088e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 15:51:28,460 INFO [train.py:903] (3/4) Epoch 20, batch 600, loss[loss=0.2125, simple_loss=0.2933, pruned_loss=0.06583, over 19766.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2894, pruned_loss=0.06562, over 3657674.87 frames. ], batch size: 56, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:51:44,890 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130345.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:06,331 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130362.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:08,732 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130364.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:15,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 15:52:16,083 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130370.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:52:30,045 INFO [train.py:903] (3/4) Epoch 20, batch 650, loss[loss=0.2305, simple_loss=0.3068, pruned_loss=0.07705, over 18198.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.06621, over 3697268.43 frames. ], batch size: 83, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:52:56,085 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130402.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:53:13,835 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.680e+02 5.222e+02 6.307e+02 8.250e+02 2.391e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 15:53:31,367 INFO [train.py:903] (3/4) Epoch 20, batch 700, loss[loss=0.2399, simple_loss=0.3316, pruned_loss=0.07412, over 19783.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06605, over 3715720.77 frames. ], batch size: 56, lr: 4.17e-03, grad_scale: 8.0 +2023-04-02 15:53:55,131 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130450.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:02,155 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:28,625 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130477.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:32,015 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:54:35,653 INFO [train.py:903] (3/4) Epoch 20, batch 750, loss[loss=0.2146, simple_loss=0.3006, pruned_loss=0.06433, over 19744.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2918, pruned_loss=0.06693, over 3728497.70 frames. ], batch size: 63, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:54:40,887 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0803, 2.1219, 2.3276, 2.1473, 3.1708, 2.7545, 3.2660, 2.2084], + device='cuda:3'), covar=tensor([0.1837, 0.3218, 0.2106, 0.1541, 0.1248, 0.1678, 0.1313, 0.3352], + device='cuda:3'), in_proj_covar=tensor([0.0522, 0.0628, 0.0692, 0.0472, 0.0612, 0.0522, 0.0654, 0.0537], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 15:55:19,213 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.790e+02 6.042e+02 7.311e+02 1.890e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 15:55:28,398 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130524.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:55:37,193 INFO [train.py:903] (3/4) Epoch 20, batch 800, loss[loss=0.2561, simple_loss=0.3237, pruned_loss=0.0943, over 13794.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2918, pruned_loss=0.06675, over 3729991.12 frames. ], batch size: 135, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:55:53,540 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 15:55:58,670 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130549.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:21,738 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130566.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:40,733 INFO [train.py:903] (3/4) Epoch 20, batch 850, loss[loss=0.2257, simple_loss=0.2958, pruned_loss=0.07778, over 19831.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2931, pruned_loss=0.06762, over 3727205.92 frames. ], batch size: 52, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:56:41,920 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:56:44,152 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1112, 1.1595, 1.4975, 1.3897, 2.7169, 1.1061, 2.2426, 2.9957], + device='cuda:3'), covar=tensor([0.0523, 0.2855, 0.2668, 0.1763, 0.0738, 0.2305, 0.1081, 0.0356], + device='cuda:3'), in_proj_covar=tensor([0.0397, 0.0355, 0.0375, 0.0337, 0.0366, 0.0344, 0.0370, 0.0390], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 15:56:57,585 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4044, 1.4950, 1.8231, 1.4614, 2.3676, 2.6948, 2.6067, 2.8542], + device='cuda:3'), covar=tensor([0.1344, 0.3107, 0.2735, 0.2382, 0.1096, 0.0386, 0.0252, 0.0349], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0315, 0.0346, 0.0261, 0.0237, 0.0182, 0.0213, 0.0247], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 15:57:25,285 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.920e+02 5.760e+02 7.852e+02 1.760e+03, threshold=1.152e+03, percent-clipped=6.0 +2023-04-02 15:57:33,245 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 15:57:40,812 INFO [train.py:903] (3/4) Epoch 20, batch 900, loss[loss=0.2375, simple_loss=0.3113, pruned_loss=0.08186, over 19700.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2942, pruned_loss=0.06852, over 3748024.59 frames. ], batch size: 59, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:44,441 INFO [train.py:903] (3/4) Epoch 20, batch 950, loss[loss=0.2069, simple_loss=0.2955, pruned_loss=0.05911, over 18215.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.294, pruned_loss=0.0683, over 3761921.20 frames. ], batch size: 83, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:58:47,660 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 15:59:28,722 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.021e+02 4.928e+02 5.917e+02 7.294e+02 1.421e+03, threshold=1.183e+03, percent-clipped=3.0 +2023-04-02 15:59:46,665 INFO [train.py:903] (3/4) Epoch 20, batch 1000, loss[loss=0.2229, simple_loss=0.3047, pruned_loss=0.07059, over 19432.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2931, pruned_loss=0.06816, over 3762279.83 frames. ], batch size: 70, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 15:59:48,220 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 15:59:50,480 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130735.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:03,671 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130746.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:18,654 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130758.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:20,910 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:00:38,585 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 16:00:48,485 INFO [train.py:903] (3/4) Epoch 20, batch 1050, loss[loss=0.2227, simple_loss=0.2989, pruned_loss=0.07324, over 18282.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2927, pruned_loss=0.06815, over 3754114.41 frames. ], batch size: 84, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:01:00,660 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8043, 4.3123, 4.5119, 4.5419, 1.7087, 4.2727, 3.7394, 4.2664], + device='cuda:3'), covar=tensor([0.1480, 0.0709, 0.0557, 0.0610, 0.5592, 0.0754, 0.0591, 0.1025], + device='cuda:3'), in_proj_covar=tensor([0.0759, 0.0717, 0.0918, 0.0801, 0.0817, 0.0677, 0.0555, 0.0857], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 16:01:02,849 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130794.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:10,718 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130800.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:01:20,589 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 16:01:33,079 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.731e+02 5.562e+02 6.742e+02 8.268e+02 2.102e+03, threshold=1.348e+03, percent-clipped=2.0 +2023-04-02 16:01:49,835 INFO [train.py:903] (3/4) Epoch 20, batch 1100, loss[loss=0.1989, simple_loss=0.2738, pruned_loss=0.06196, over 19727.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2926, pruned_loss=0.0678, over 3772318.53 frames. ], batch size: 51, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:02:28,187 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:02:42,048 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3888, 2.4609, 2.0084, 2.5781, 2.4075, 1.8681, 2.0302, 2.2314], + device='cuda:3'), covar=tensor([0.1152, 0.1750, 0.1669, 0.1108, 0.1490, 0.0766, 0.1552, 0.0860], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0360, 0.0312, 0.0252, 0.0302, 0.0251, 0.0305, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:02:52,361 INFO [train.py:903] (3/4) Epoch 20, batch 1150, loss[loss=0.1765, simple_loss=0.256, pruned_loss=0.04845, over 17727.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06793, over 3780138.58 frames. ], batch size: 39, lr: 4.16e-03, grad_scale: 4.0 +2023-04-02 16:02:53,748 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8584, 1.6027, 1.4629, 1.8296, 1.5131, 1.5646, 1.4696, 1.6764], + device='cuda:3'), covar=tensor([0.1013, 0.1284, 0.1489, 0.0960, 0.1190, 0.0573, 0.1318, 0.0748], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0359, 0.0311, 0.0252, 0.0301, 0.0251, 0.0304, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:03:26,741 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130909.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:27,655 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:33,842 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130915.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:39,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 5.063e+02 6.056e+02 7.993e+02 1.743e+03, threshold=1.211e+03, percent-clipped=5.0 +2023-04-02 16:03:50,408 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=130927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:03:55,686 INFO [train.py:903] (3/4) Epoch 20, batch 1200, loss[loss=0.2464, simple_loss=0.3208, pruned_loss=0.08596, over 19744.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2911, pruned_loss=0.06722, over 3790491.43 frames. ], batch size: 63, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:04:23,990 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 16:04:56,114 INFO [train.py:903] (3/4) Epoch 20, batch 1250, loss[loss=0.2188, simple_loss=0.3079, pruned_loss=0.06481, over 19788.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2905, pruned_loss=0.06686, over 3786619.57 frames. ], batch size: 56, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:05:07,180 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 16:05:42,762 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.121e+02 6.297e+02 7.673e+02 2.016e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-04-02 16:05:51,140 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131025.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:05:58,785 INFO [train.py:903] (3/4) Epoch 20, batch 1300, loss[loss=0.1965, simple_loss=0.2818, pruned_loss=0.05563, over 19658.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2895, pruned_loss=0.06613, over 3805056.74 frames. ], batch size: 55, lr: 4.16e-03, grad_scale: 8.0 +2023-04-02 16:06:12,061 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:06:31,638 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1737, 5.6524, 3.1971, 4.9814, 1.3844, 5.7152, 5.5678, 5.6952], + device='cuda:3'), covar=tensor([0.0325, 0.0750, 0.1647, 0.0631, 0.3632, 0.0489, 0.0636, 0.0952], + device='cuda:3'), in_proj_covar=tensor([0.0491, 0.0395, 0.0484, 0.0343, 0.0400, 0.0421, 0.0415, 0.0447], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:06:51,900 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9995, 4.4355, 4.7618, 4.7506, 1.8294, 4.4389, 3.8715, 4.4294], + device='cuda:3'), covar=tensor([0.1630, 0.0902, 0.0528, 0.0617, 0.5626, 0.0811, 0.0616, 0.1055], + device='cuda:3'), in_proj_covar=tensor([0.0758, 0.0716, 0.0916, 0.0798, 0.0814, 0.0675, 0.0551, 0.0855], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 16:06:59,468 INFO [train.py:903] (3/4) Epoch 20, batch 1350, loss[loss=0.2003, simple_loss=0.2799, pruned_loss=0.06035, over 19832.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2896, pruned_loss=0.0662, over 3812077.11 frames. ], batch size: 52, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:07:43,044 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131117.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:07:44,787 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.090e+02 6.517e+02 8.267e+02 2.193e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-02 16:08:02,292 INFO [train.py:903] (3/4) Epoch 20, batch 1400, loss[loss=0.228, simple_loss=0.3152, pruned_loss=0.07037, over 19677.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2892, pruned_loss=0.06567, over 3819893.26 frames. ], batch size: 58, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:08:15,155 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:42,766 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:50,703 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:08:51,746 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6597, 1.4966, 1.5625, 2.2804, 1.7414, 2.0944, 2.0902, 1.7241], + device='cuda:3'), covar=tensor([0.0843, 0.0979, 0.1068, 0.0745, 0.0837, 0.0738, 0.0909, 0.0733], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0219, 0.0225, 0.0242, 0.0226, 0.0209, 0.0188, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 16:09:03,070 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3864, 1.4382, 1.6748, 1.6266, 2.5623, 2.1574, 2.5743, 1.1264], + device='cuda:3'), covar=tensor([0.2490, 0.4263, 0.2729, 0.1936, 0.1454, 0.2158, 0.1408, 0.4365], + device='cuda:3'), in_proj_covar=tensor([0.0526, 0.0630, 0.0695, 0.0475, 0.0614, 0.0526, 0.0658, 0.0541], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 16:09:03,834 INFO [train.py:903] (3/4) Epoch 20, batch 1450, loss[loss=0.2167, simple_loss=0.3004, pruned_loss=0.06649, over 19675.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2891, pruned_loss=0.06581, over 3828789.29 frames. ], batch size: 60, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:09:06,070 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 16:09:14,530 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131190.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:16,774 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:21,594 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131196.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:09:50,839 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.028e+02 6.181e+02 7.641e+02 1.699e+03, threshold=1.236e+03, percent-clipped=6.0 +2023-04-02 16:10:06,714 INFO [train.py:903] (3/4) Epoch 20, batch 1500, loss[loss=0.1566, simple_loss=0.233, pruned_loss=0.0401, over 19004.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2897, pruned_loss=0.06597, over 3820423.14 frames. ], batch size: 42, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:10:20,896 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0013, 1.9087, 1.7530, 1.5228, 1.4257, 1.5559, 0.3754, 0.8863], + device='cuda:3'), covar=tensor([0.0535, 0.0573, 0.0402, 0.0685, 0.1143, 0.0794, 0.1182, 0.0971], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0349, 0.0352, 0.0377, 0.0452, 0.0383, 0.0332, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 16:11:06,980 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:07,712 INFO [train.py:903] (3/4) Epoch 20, batch 1550, loss[loss=0.236, simple_loss=0.3149, pruned_loss=0.07856, over 19663.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2908, pruned_loss=0.0665, over 3817252.92 frames. ], batch size: 55, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:11:29,267 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:38,467 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:11:53,760 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 5.087e+02 6.243e+02 7.473e+02 1.350e+03, threshold=1.249e+03, percent-clipped=1.0 +2023-04-02 16:11:58,753 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:12:10,212 INFO [train.py:903] (3/4) Epoch 20, batch 1600, loss[loss=0.2141, simple_loss=0.3114, pruned_loss=0.05846, over 19700.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2917, pruned_loss=0.0668, over 3806864.68 frames. ], batch size: 59, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:12:36,128 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 16:13:12,820 INFO [train.py:903] (3/4) Epoch 20, batch 1650, loss[loss=0.2259, simple_loss=0.3089, pruned_loss=0.0714, over 19276.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2917, pruned_loss=0.06674, over 3820729.21 frames. ], batch size: 66, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:13:59,216 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.697e+02 5.218e+02 6.304e+02 8.075e+02 1.501e+03, threshold=1.261e+03, percent-clipped=5.0 +2023-04-02 16:14:08,616 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131427.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:15,064 INFO [train.py:903] (3/4) Epoch 20, batch 1700, loss[loss=0.2418, simple_loss=0.3172, pruned_loss=0.08314, over 19545.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2911, pruned_loss=0.06652, over 3823643.76 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:14:17,629 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131434.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:14:51,571 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5771, 1.4618, 1.7574, 1.6923, 3.8756, 1.1194, 2.7600, 4.2240], + device='cuda:3'), covar=tensor([0.0525, 0.3406, 0.3148, 0.2206, 0.1029, 0.3211, 0.1498, 0.0330], + device='cuda:3'), in_proj_covar=tensor([0.0396, 0.0356, 0.0376, 0.0338, 0.0365, 0.0346, 0.0370, 0.0391], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:14:55,854 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 16:15:13,169 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0514, 3.5749, 2.0182, 2.1744, 3.0867, 1.9492, 1.5797, 2.2819], + device='cuda:3'), covar=tensor([0.1508, 0.0607, 0.1096, 0.0888, 0.0555, 0.1172, 0.0973, 0.0698], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0313, 0.0331, 0.0261, 0.0245, 0.0335, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:15:16,179 INFO [train.py:903] (3/4) Epoch 20, batch 1750, loss[loss=0.1777, simple_loss=0.2578, pruned_loss=0.04883, over 19390.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.291, pruned_loss=0.06677, over 3804196.26 frames. ], batch size: 48, lr: 4.15e-03, grad_scale: 8.0 +2023-04-02 16:15:52,678 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131510.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:16:01,414 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 16:16:02,700 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.468e+02 5.359e+02 6.412e+02 7.691e+02 1.507e+03, threshold=1.282e+03, percent-clipped=3.0 +2023-04-02 16:16:18,695 INFO [train.py:903] (3/4) Epoch 20, batch 1800, loss[loss=0.2218, simple_loss=0.2881, pruned_loss=0.07775, over 19389.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2906, pruned_loss=0.06654, over 3809071.04 frames. ], batch size: 48, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:16:24,467 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131536.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:17:16,153 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 16:17:22,072 INFO [train.py:903] (3/4) Epoch 20, batch 1850, loss[loss=0.2018, simple_loss=0.2831, pruned_loss=0.06024, over 19844.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2917, pruned_loss=0.06724, over 3798155.92 frames. ], batch size: 52, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:17:54,263 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 16:18:09,983 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.826e+02 6.634e+02 9.045e+02 2.049e+03, threshold=1.327e+03, percent-clipped=6.0 +2023-04-02 16:18:25,168 INFO [train.py:903] (3/4) Epoch 20, batch 1900, loss[loss=0.2261, simple_loss=0.2988, pruned_loss=0.07668, over 19784.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2907, pruned_loss=0.06622, over 3814782.39 frames. ], batch size: 56, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:18:40,091 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 16:18:45,473 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 16:18:47,991 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:19:10,735 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 16:19:25,447 INFO [train.py:903] (3/4) Epoch 20, batch 1950, loss[loss=0.2236, simple_loss=0.3101, pruned_loss=0.06858, over 19614.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2915, pruned_loss=0.06668, over 3807671.74 frames. ], batch size: 57, lr: 4.15e-03, grad_scale: 4.0 +2023-04-02 16:20:11,291 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7681, 3.2724, 3.3089, 3.3237, 1.4106, 3.1944, 2.7738, 3.0687], + device='cuda:3'), covar=tensor([0.1874, 0.1053, 0.0852, 0.0939, 0.5452, 0.1030, 0.0875, 0.1335], + device='cuda:3'), in_proj_covar=tensor([0.0766, 0.0718, 0.0922, 0.0810, 0.0818, 0.0683, 0.0555, 0.0860], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 16:20:13,300 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.458e+02 5.129e+02 6.435e+02 8.344e+02 2.370e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-04-02 16:20:28,635 INFO [train.py:903] (3/4) Epoch 20, batch 2000, loss[loss=0.2266, simple_loss=0.3044, pruned_loss=0.07436, over 19530.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2924, pruned_loss=0.06718, over 3812584.69 frames. ], batch size: 56, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:18,598 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131771.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:25,592 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 16:21:28,789 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131778.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:21:33,331 INFO [train.py:903] (3/4) Epoch 20, batch 2050, loss[loss=0.1914, simple_loss=0.2799, pruned_loss=0.0515, over 18841.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2924, pruned_loss=0.06732, over 3794237.52 frames. ], batch size: 74, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:21:45,621 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 16:21:46,760 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 16:22:07,045 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0318, 1.9507, 1.7682, 1.5360, 1.5057, 1.5732, 0.3508, 0.8968], + device='cuda:3'), covar=tensor([0.0599, 0.0645, 0.0428, 0.0710, 0.1122, 0.0816, 0.1224, 0.1018], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0351, 0.0354, 0.0380, 0.0454, 0.0384, 0.0334, 0.0337], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 16:22:07,705 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 16:22:22,559 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 4.827e+02 6.005e+02 7.777e+02 1.829e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 16:22:35,844 INFO [train.py:903] (3/4) Epoch 20, batch 2100, loss[loss=0.2035, simple_loss=0.2766, pruned_loss=0.06519, over 19747.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2928, pruned_loss=0.06784, over 3802538.92 frames. ], batch size: 51, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:02,420 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 16:23:03,499 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=131854.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:26,836 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 16:23:29,495 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:37,624 INFO [train.py:903] (3/4) Epoch 20, batch 2150, loss[loss=0.2327, simple_loss=0.3073, pruned_loss=0.07909, over 19333.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2914, pruned_loss=0.06711, over 3813343.78 frames. ], batch size: 66, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:23:42,694 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131886.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:23:51,980 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131893.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:09,611 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:26,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.118e+02 6.039e+02 8.265e+02 1.505e+03, threshold=1.208e+03, percent-clipped=4.0 +2023-04-02 16:24:39,702 INFO [train.py:903] (3/4) Epoch 20, batch 2200, loss[loss=0.2171, simple_loss=0.2962, pruned_loss=0.06898, over 19540.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2915, pruned_loss=0.06657, over 3823284.43 frames. ], batch size: 54, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:24:40,123 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131932.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:24:42,801 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 16:25:26,165 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131969.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:25:42,873 INFO [train.py:903] (3/4) Epoch 20, batch 2250, loss[loss=0.199, simple_loss=0.2652, pruned_loss=0.06638, over 19684.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2917, pruned_loss=0.06727, over 3805227.41 frames. ], batch size: 45, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:25:52,172 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1701, 1.7989, 1.4197, 1.2345, 1.6253, 1.1597, 1.1365, 1.6430], + device='cuda:3'), covar=tensor([0.0823, 0.0837, 0.1112, 0.0807, 0.0510, 0.1328, 0.0658, 0.0465], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0315, 0.0333, 0.0261, 0.0245, 0.0336, 0.0291, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:26:31,963 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.368e+02 4.978e+02 6.272e+02 7.965e+02 1.499e+03, threshold=1.254e+03, percent-clipped=2.0 +2023-04-02 16:26:44,540 INFO [train.py:903] (3/4) Epoch 20, batch 2300, loss[loss=0.191, simple_loss=0.2715, pruned_loss=0.05522, over 19416.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2915, pruned_loss=0.06713, over 3822515.96 frames. ], batch size: 48, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:26:58,062 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 16:27:06,368 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132049.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:35,729 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132072.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:27:47,312 INFO [train.py:903] (3/4) Epoch 20, batch 2350, loss[loss=0.1965, simple_loss=0.2707, pruned_loss=0.06111, over 19675.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2909, pruned_loss=0.06669, over 3838805.94 frames. ], batch size: 53, lr: 4.14e-03, grad_scale: 4.0 +2023-04-02 16:28:26,825 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 16:28:35,986 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 4.795e+02 5.776e+02 7.778e+02 1.972e+03, threshold=1.155e+03, percent-clipped=8.0 +2023-04-02 16:28:42,740 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 16:28:49,774 INFO [train.py:903] (3/4) Epoch 20, batch 2400, loss[loss=0.2222, simple_loss=0.309, pruned_loss=0.06769, over 19732.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2912, pruned_loss=0.06705, over 3816799.59 frames. ], batch size: 63, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:29:03,490 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132142.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:11,530 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132149.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:32,902 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:41,722 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:29:51,982 INFO [train.py:903] (3/4) Epoch 20, batch 2450, loss[loss=0.1982, simple_loss=0.2884, pruned_loss=0.054, over 19519.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2915, pruned_loss=0.067, over 3825477.37 frames. ], batch size: 54, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:30:38,913 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:41,019 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.995e+02 6.263e+02 8.063e+02 1.363e+03, threshold=1.253e+03, percent-clipped=5.0 +2023-04-02 16:30:47,015 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132225.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:30:54,271 INFO [train.py:903] (3/4) Epoch 20, batch 2500, loss[loss=0.2201, simple_loss=0.2988, pruned_loss=0.07066, over 19752.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2914, pruned_loss=0.06669, over 3842083.33 frames. ], batch size: 63, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:31:06,001 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1238, 1.8045, 1.7805, 2.0402, 1.7557, 1.8856, 1.7382, 1.9604], + device='cuda:3'), covar=tensor([0.0961, 0.1538, 0.1370, 0.1033, 0.1359, 0.0491, 0.1322, 0.0728], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0354, 0.0308, 0.0249, 0.0299, 0.0248, 0.0304, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:31:15,979 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132250.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:31:43,685 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-02 16:31:56,371 INFO [train.py:903] (3/4) Epoch 20, batch 2550, loss[loss=0.2135, simple_loss=0.2942, pruned_loss=0.06635, over 19598.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2907, pruned_loss=0.06618, over 3847072.10 frames. ], batch size: 61, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:32:30,461 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9773, 1.3619, 1.0507, 1.0549, 1.1898, 1.0378, 0.9806, 1.2921], + device='cuda:3'), covar=tensor([0.0580, 0.0804, 0.1140, 0.0700, 0.0568, 0.1329, 0.0551, 0.0480], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0316, 0.0333, 0.0260, 0.0247, 0.0336, 0.0292, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:32:45,605 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.513e+02 4.985e+02 5.787e+02 8.066e+02 1.995e+03, threshold=1.157e+03, percent-clipped=4.0 +2023-04-02 16:32:52,689 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 16:32:58,517 INFO [train.py:903] (3/4) Epoch 20, batch 2600, loss[loss=0.2406, simple_loss=0.3221, pruned_loss=0.07954, over 18710.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2905, pruned_loss=0.06578, over 3843178.73 frames. ], batch size: 74, lr: 4.14e-03, grad_scale: 8.0 +2023-04-02 16:33:02,441 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:01,398 INFO [train.py:903] (3/4) Epoch 20, batch 2650, loss[loss=0.2196, simple_loss=0.2929, pruned_loss=0.07319, over 19362.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2916, pruned_loss=0.06641, over 3844124.69 frames. ], batch size: 47, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:34:14,303 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8181, 1.4521, 1.5384, 1.5332, 3.4024, 1.0826, 2.4465, 3.8283], + device='cuda:3'), covar=tensor([0.0437, 0.2662, 0.2762, 0.1875, 0.0696, 0.2631, 0.1247, 0.0228], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0357, 0.0377, 0.0340, 0.0368, 0.0348, 0.0373, 0.0393], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:34:15,341 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:23,149 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 16:34:44,237 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=132416.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:34:50,450 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.865e+02 6.149e+02 7.368e+02 1.585e+03, threshold=1.230e+03, percent-clipped=4.0 +2023-04-02 16:35:04,063 INFO [train.py:903] (3/4) Epoch 20, batch 2700, loss[loss=0.2546, simple_loss=0.3226, pruned_loss=0.0933, over 19657.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2907, pruned_loss=0.06617, over 3838354.59 frames. ], batch size: 60, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:36:00,514 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5589, 1.6769, 1.9528, 1.8381, 2.6172, 2.4299, 2.7423, 1.1204], + device='cuda:3'), covar=tensor([0.2302, 0.4008, 0.2510, 0.1739, 0.1490, 0.1882, 0.1476, 0.4350], + device='cuda:3'), in_proj_covar=tensor([0.0529, 0.0636, 0.0700, 0.0479, 0.0618, 0.0528, 0.0662, 0.0543], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 16:36:06,700 INFO [train.py:903] (3/4) Epoch 20, batch 2750, loss[loss=0.1884, simple_loss=0.2612, pruned_loss=0.05775, over 19751.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2894, pruned_loss=0.06575, over 3831584.70 frames. ], batch size: 46, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:36:39,228 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132508.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:36:55,660 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 5.186e+02 6.180e+02 7.968e+02 1.505e+03, threshold=1.236e+03, percent-clipped=2.0 +2023-04-02 16:36:59,734 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0772, 3.4142, 2.0249, 1.6824, 3.2200, 1.6517, 1.5132, 2.4891], + device='cuda:3'), covar=tensor([0.1254, 0.0600, 0.1065, 0.1140, 0.0474, 0.1326, 0.0990, 0.0573], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0316, 0.0335, 0.0261, 0.0247, 0.0336, 0.0292, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:37:07,829 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132531.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:37:08,668 INFO [train.py:903] (3/4) Epoch 20, batch 2800, loss[loss=0.2442, simple_loss=0.3119, pruned_loss=0.08819, over 13658.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2893, pruned_loss=0.06559, over 3826396.27 frames. ], batch size: 136, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:37:34,501 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 16:38:13,010 INFO [train.py:903] (3/4) Epoch 20, batch 2850, loss[loss=0.2093, simple_loss=0.288, pruned_loss=0.06527, over 19479.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2891, pruned_loss=0.06539, over 3826890.39 frames. ], batch size: 49, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:38:22,432 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:38:45,315 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 16:38:52,923 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:39:01,717 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 4.861e+02 5.815e+02 7.642e+02 3.357e+03, threshold=1.163e+03, percent-clipped=7.0 +2023-04-02 16:39:14,636 INFO [train.py:903] (3/4) Epoch 20, batch 2900, loss[loss=0.2195, simple_loss=0.2792, pruned_loss=0.07992, over 19759.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06539, over 3840401.64 frames. ], batch size: 46, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:39:14,680 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 16:40:13,481 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132678.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:40:18,639 INFO [train.py:903] (3/4) Epoch 20, batch 2950, loss[loss=0.222, simple_loss=0.3011, pruned_loss=0.07146, over 19517.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2898, pruned_loss=0.06546, over 3843720.29 frames. ], batch size: 54, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:09,139 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.627e+02 5.679e+02 7.371e+02 2.153e+03, threshold=1.136e+03, percent-clipped=3.0 +2023-04-02 16:41:20,811 INFO [train.py:903] (3/4) Epoch 20, batch 3000, loss[loss=0.2078, simple_loss=0.2947, pruned_loss=0.06044, over 19640.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2891, pruned_loss=0.06542, over 3842014.63 frames. ], batch size: 58, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:41:20,811 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 16:41:34,260 INFO [train.py:937] (3/4) Epoch 20, validation: loss=0.1695, simple_loss=0.2697, pruned_loss=0.03462, over 944034.00 frames. +2023-04-02 16:41:34,261 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 16:41:40,319 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 16:42:12,989 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:13,128 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:23,090 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3518, 3.5964, 2.2337, 2.2927, 3.3543, 1.9624, 1.5595, 2.5332], + device='cuda:3'), covar=tensor([0.1264, 0.0545, 0.0905, 0.0854, 0.0490, 0.1147, 0.0976, 0.0588], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0311, 0.0329, 0.0257, 0.0243, 0.0333, 0.0288, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:42:35,226 INFO [train.py:903] (3/4) Epoch 20, batch 3050, loss[loss=0.1913, simple_loss=0.2709, pruned_loss=0.0558, over 19659.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2888, pruned_loss=0.0656, over 3848034.61 frames. ], batch size: 53, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:42:41,474 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:42:43,583 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132789.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:13,139 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:43:24,237 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.615e+02 4.785e+02 6.187e+02 7.720e+02 1.879e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-02 16:43:37,018 INFO [train.py:903] (3/4) Epoch 20, batch 3100, loss[loss=0.2262, simple_loss=0.3165, pruned_loss=0.06796, over 17946.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2893, pruned_loss=0.06556, over 3835161.75 frames. ], batch size: 83, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:44:40,224 INFO [train.py:903] (3/4) Epoch 20, batch 3150, loss[loss=0.2139, simple_loss=0.2874, pruned_loss=0.07022, over 19777.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2882, pruned_loss=0.06502, over 3837076.30 frames. ], batch size: 47, lr: 4.13e-03, grad_scale: 4.0 +2023-04-02 16:45:07,812 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 16:45:29,824 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.025e+02 5.951e+02 7.011e+02 1.371e+03, threshold=1.190e+03, percent-clipped=2.0 +2023-04-02 16:45:42,517 INFO [train.py:903] (3/4) Epoch 20, batch 3200, loss[loss=0.2036, simple_loss=0.2702, pruned_loss=0.0685, over 19789.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2885, pruned_loss=0.06544, over 3836207.93 frames. ], batch size: 48, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:46:46,032 INFO [train.py:903] (3/4) Epoch 20, batch 3250, loss[loss=0.2908, simple_loss=0.3613, pruned_loss=0.1101, over 19335.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2892, pruned_loss=0.06583, over 3838988.81 frames. ], batch size: 66, lr: 4.13e-03, grad_scale: 8.0 +2023-04-02 16:47:37,647 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.383e+02 4.867e+02 6.333e+02 8.818e+02 1.782e+03, threshold=1.267e+03, percent-clipped=7.0 +2023-04-02 16:47:37,839 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:47:49,130 INFO [train.py:903] (3/4) Epoch 20, batch 3300, loss[loss=0.2711, simple_loss=0.3265, pruned_loss=0.1078, over 13334.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2892, pruned_loss=0.06613, over 3814420.14 frames. ], batch size: 137, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:47:50,544 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5049, 2.0606, 1.5825, 1.4319, 1.9656, 1.2992, 1.3809, 1.8504], + device='cuda:3'), covar=tensor([0.0978, 0.0870, 0.1012, 0.0833, 0.0478, 0.1256, 0.0709, 0.0505], + device='cuda:3'), in_proj_covar=tensor([0.0299, 0.0312, 0.0332, 0.0259, 0.0244, 0.0334, 0.0288, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:47:57,157 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 16:48:26,007 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5283, 1.5685, 1.9132, 1.8485, 2.8317, 2.5389, 2.9460, 1.3484], + device='cuda:3'), covar=tensor([0.2352, 0.4197, 0.2702, 0.1828, 0.1426, 0.1889, 0.1411, 0.4156], + device='cuda:3'), in_proj_covar=tensor([0.0526, 0.0633, 0.0697, 0.0478, 0.0614, 0.0525, 0.0658, 0.0541], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 16:48:38,078 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 16:48:54,786 INFO [train.py:903] (3/4) Epoch 20, batch 3350, loss[loss=0.2048, simple_loss=0.2705, pruned_loss=0.06958, over 19762.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.288, pruned_loss=0.06514, over 3823501.71 frames. ], batch size: 47, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:49:27,433 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133108.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:49:45,520 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.956e+02 6.093e+02 7.175e+02 1.819e+03, threshold=1.219e+03, percent-clipped=1.0 +2023-04-02 16:49:57,529 INFO [train.py:903] (3/4) Epoch 20, batch 3400, loss[loss=0.2336, simple_loss=0.3075, pruned_loss=0.0799, over 19758.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2897, pruned_loss=0.06621, over 3818241.17 frames. ], batch size: 56, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:50:06,221 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133137.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:51:02,180 INFO [train.py:903] (3/4) Epoch 20, batch 3450, loss[loss=0.1883, simple_loss=0.2698, pruned_loss=0.05341, over 19742.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2903, pruned_loss=0.06673, over 3820046.83 frames. ], batch size: 51, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:51:08,031 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 16:51:52,698 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.981e+02 4.746e+02 5.634e+02 7.504e+02 1.582e+03, threshold=1.127e+03, percent-clipped=2.0 +2023-04-02 16:51:54,150 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133223.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:52:04,226 INFO [train.py:903] (3/4) Epoch 20, batch 3500, loss[loss=0.2181, simple_loss=0.2916, pruned_loss=0.07231, over 19665.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2902, pruned_loss=0.06701, over 3830241.22 frames. ], batch size: 53, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:52:56,236 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8972, 5.0233, 5.7395, 5.7264, 2.0455, 5.4216, 4.6358, 5.4278], + device='cuda:3'), covar=tensor([0.1787, 0.0777, 0.0572, 0.0653, 0.6091, 0.0802, 0.0561, 0.1198], + device='cuda:3'), in_proj_covar=tensor([0.0758, 0.0717, 0.0915, 0.0803, 0.0815, 0.0676, 0.0549, 0.0849], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 16:53:08,050 INFO [train.py:903] (3/4) Epoch 20, batch 3550, loss[loss=0.1885, simple_loss=0.2689, pruned_loss=0.05404, over 19741.00 frames. ], tot_loss[loss=0.213, simple_loss=0.291, pruned_loss=0.06751, over 3830621.21 frames. ], batch size: 51, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:53:34,557 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 16:53:58,718 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 5.017e+02 5.933e+02 7.980e+02 2.795e+03, threshold=1.187e+03, percent-clipped=11.0 +2023-04-02 16:54:10,372 INFO [train.py:903] (3/4) Epoch 20, batch 3600, loss[loss=0.2265, simple_loss=0.3082, pruned_loss=0.0724, over 19768.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2911, pruned_loss=0.0673, over 3834334.67 frames. ], batch size: 54, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:55:15,915 INFO [train.py:903] (3/4) Epoch 20, batch 3650, loss[loss=0.215, simple_loss=0.2861, pruned_loss=0.07195, over 19404.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2903, pruned_loss=0.06676, over 3819519.80 frames. ], batch size: 48, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:55:28,146 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133392.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 16:55:29,490 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:55:30,503 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:56:02,492 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133418.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:56:06,669 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.934e+02 5.113e+02 6.456e+02 7.889e+02 1.610e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-04-02 16:56:18,436 INFO [train.py:903] (3/4) Epoch 20, batch 3700, loss[loss=0.188, simple_loss=0.2652, pruned_loss=0.05539, over 19034.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2905, pruned_loss=0.06702, over 3821723.12 frames. ], batch size: 42, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:56:21,232 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.3370, 5.7464, 3.4747, 5.1137, 1.1104, 5.8090, 5.7374, 5.8883], + device='cuda:3'), covar=tensor([0.0369, 0.0955, 0.1735, 0.0671, 0.4382, 0.0548, 0.0714, 0.0757], + device='cuda:3'), in_proj_covar=tensor([0.0489, 0.0396, 0.0480, 0.0341, 0.0399, 0.0420, 0.0412, 0.0446], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 16:57:19,839 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:23,017 INFO [train.py:903] (3/4) Epoch 20, batch 3750, loss[loss=0.2362, simple_loss=0.3181, pruned_loss=0.07712, over 19370.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2898, pruned_loss=0.06661, over 3816243.63 frames. ], batch size: 70, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:57:34,894 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133492.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:57:50,980 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 16:58:13,502 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.259e+02 5.077e+02 5.935e+02 8.206e+02 1.595e+03, threshold=1.187e+03, percent-clipped=3.0 +2023-04-02 16:58:24,815 INFO [train.py:903] (3/4) Epoch 20, batch 3800, loss[loss=0.2869, simple_loss=0.3442, pruned_loss=0.1149, over 19735.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2894, pruned_loss=0.06663, over 3827396.60 frames. ], batch size: 63, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 16:58:58,163 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 16:59:30,207 INFO [train.py:903] (3/4) Epoch 20, batch 3850, loss[loss=0.1759, simple_loss=0.2589, pruned_loss=0.04645, over 19480.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2892, pruned_loss=0.06642, over 3809587.65 frames. ], batch size: 49, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 17:00:00,653 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5536, 2.1799, 1.7738, 1.4573, 2.0545, 1.4270, 1.5266, 1.9570], + device='cuda:3'), covar=tensor([0.0923, 0.0758, 0.0982, 0.0835, 0.0509, 0.1237, 0.0646, 0.0427], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0311, 0.0332, 0.0258, 0.0245, 0.0334, 0.0289, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:00:20,971 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.481e+02 5.079e+02 6.219e+02 7.261e+02 1.808e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-02 17:00:25,931 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0192, 2.0788, 1.9286, 1.7618, 1.6298, 1.8085, 1.1179, 1.4124], + device='cuda:3'), covar=tensor([0.0553, 0.0576, 0.0393, 0.0615, 0.0919, 0.0826, 0.1131, 0.0865], + device='cuda:3'), in_proj_covar=tensor([0.0357, 0.0350, 0.0356, 0.0380, 0.0454, 0.0383, 0.0331, 0.0336], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:00:32,689 INFO [train.py:903] (3/4) Epoch 20, batch 3900, loss[loss=0.1902, simple_loss=0.268, pruned_loss=0.05617, over 19691.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2891, pruned_loss=0.06605, over 3814827.79 frames. ], batch size: 53, lr: 4.12e-03, grad_scale: 8.0 +2023-04-02 17:01:21,152 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133670.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:01:37,398 INFO [train.py:903] (3/4) Epoch 20, batch 3950, loss[loss=0.1965, simple_loss=0.2691, pruned_loss=0.06193, over 17238.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2891, pruned_loss=0.06586, over 3799374.22 frames. ], batch size: 38, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:01:42,243 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 17:02:02,383 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7850, 4.1789, 4.5068, 4.5085, 1.9312, 4.2445, 3.7258, 4.2501], + device='cuda:3'), covar=tensor([0.1530, 0.1471, 0.0566, 0.0611, 0.5709, 0.0978, 0.0622, 0.0981], + device='cuda:3'), in_proj_covar=tensor([0.0769, 0.0725, 0.0923, 0.0812, 0.0822, 0.0687, 0.0557, 0.0862], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 17:02:26,845 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 5.167e+02 6.244e+02 7.613e+02 1.189e+03, threshold=1.249e+03, percent-clipped=0.0 +2023-04-02 17:02:38,778 INFO [train.py:903] (3/4) Epoch 20, batch 4000, loss[loss=0.2029, simple_loss=0.2906, pruned_loss=0.0576, over 19615.00 frames. ], tot_loss[loss=0.21, simple_loss=0.289, pruned_loss=0.06554, over 3802081.16 frames. ], batch size: 57, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:02:43,769 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133736.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:02:47,222 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133738.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:03:26,433 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 17:03:41,996 INFO [train.py:903] (3/4) Epoch 20, batch 4050, loss[loss=0.2279, simple_loss=0.3013, pruned_loss=0.07723, over 17441.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2887, pruned_loss=0.06556, over 3799342.36 frames. ], batch size: 101, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:30,941 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.971e+02 6.413e+02 8.150e+02 1.897e+03, threshold=1.283e+03, percent-clipped=7.0 +2023-04-02 17:04:42,295 INFO [train.py:903] (3/4) Epoch 20, batch 4100, loss[loss=0.2156, simple_loss=0.3007, pruned_loss=0.06528, over 19693.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2888, pruned_loss=0.06575, over 3804254.39 frames. ], batch size: 59, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:04:47,153 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=133836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:04:59,743 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6006, 1.4004, 1.4287, 1.9313, 1.5603, 1.7830, 1.9938, 1.5926], + device='cuda:3'), covar=tensor([0.0831, 0.0936, 0.0985, 0.0684, 0.0755, 0.0734, 0.0695, 0.0728], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0219, 0.0223, 0.0240, 0.0225, 0.0208, 0.0186, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 17:05:06,747 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133851.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:05:09,070 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133853.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:05:18,885 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 17:05:28,136 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3492, 3.9933, 2.5325, 3.5545, 1.2454, 3.8526, 3.8076, 3.8216], + device='cuda:3'), covar=tensor([0.0680, 0.1029, 0.2047, 0.0849, 0.3615, 0.0765, 0.0917, 0.1306], + device='cuda:3'), in_proj_covar=tensor([0.0491, 0.0400, 0.0483, 0.0341, 0.0400, 0.0424, 0.0416, 0.0449], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:05:45,979 INFO [train.py:903] (3/4) Epoch 20, batch 4150, loss[loss=0.2062, simple_loss=0.2907, pruned_loss=0.06088, over 19549.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2887, pruned_loss=0.06531, over 3820882.58 frames. ], batch size: 56, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:05:56,151 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.86 vs. limit=5.0 +2023-04-02 17:06:02,512 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0281, 1.1176, 1.3704, 1.3176, 2.4217, 1.0463, 2.2165, 2.9190], + device='cuda:3'), covar=tensor([0.0809, 0.3632, 0.3478, 0.2122, 0.1287, 0.2859, 0.1309, 0.0470], + device='cuda:3'), in_proj_covar=tensor([0.0399, 0.0357, 0.0377, 0.0339, 0.0368, 0.0347, 0.0370, 0.0394], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:06:35,676 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.109e+02 4.757e+02 5.877e+02 6.653e+02 1.329e+03, threshold=1.175e+03, percent-clipped=1.0 +2023-04-02 17:06:47,886 INFO [train.py:903] (3/4) Epoch 20, batch 4200, loss[loss=0.1875, simple_loss=0.2778, pruned_loss=0.04864, over 19611.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2894, pruned_loss=0.06576, over 3821853.95 frames. ], batch size: 57, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:06:51,418 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 17:07:11,273 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8710, 1.9409, 2.1818, 2.2728, 1.7349, 2.1844, 2.2418, 2.0399], + device='cuda:3'), covar=tensor([0.3789, 0.3280, 0.1662, 0.2147, 0.3470, 0.1921, 0.4304, 0.2972], + device='cuda:3'), in_proj_covar=tensor([0.0889, 0.0950, 0.0709, 0.0933, 0.0869, 0.0802, 0.0837, 0.0775], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 17:07:12,219 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:07:50,937 INFO [train.py:903] (3/4) Epoch 20, batch 4250, loss[loss=0.2242, simple_loss=0.3082, pruned_loss=0.07008, over 19605.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2893, pruned_loss=0.06525, over 3827523.75 frames. ], batch size: 57, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:07:58,102 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5806, 1.6943, 1.9575, 1.9703, 1.4629, 1.8578, 1.9997, 1.8040], + device='cuda:3'), covar=tensor([0.4080, 0.3533, 0.1793, 0.2228, 0.3662, 0.2056, 0.4649, 0.3243], + device='cuda:3'), in_proj_covar=tensor([0.0887, 0.0948, 0.0707, 0.0930, 0.0867, 0.0801, 0.0836, 0.0772], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 17:08:08,179 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 17:08:21,584 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 17:08:32,207 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:33,434 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:08:41,177 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.039e+02 5.008e+02 5.790e+02 6.980e+02 1.679e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-02 17:08:54,656 INFO [train.py:903] (3/4) Epoch 20, batch 4300, loss[loss=0.2645, simple_loss=0.3379, pruned_loss=0.09552, over 19558.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06538, over 3831441.25 frames. ], batch size: 61, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:09:48,888 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8956, 1.9749, 2.2329, 2.5203, 1.8889, 2.4515, 2.2818, 1.9895], + device='cuda:3'), covar=tensor([0.3889, 0.3491, 0.1738, 0.2141, 0.3575, 0.1852, 0.4438, 0.3183], + device='cuda:3'), in_proj_covar=tensor([0.0888, 0.0950, 0.0710, 0.0932, 0.0868, 0.0803, 0.0835, 0.0774], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 17:09:50,637 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 17:09:56,295 INFO [train.py:903] (3/4) Epoch 20, batch 4350, loss[loss=0.2259, simple_loss=0.3009, pruned_loss=0.07549, over 19773.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2895, pruned_loss=0.06546, over 3819044.11 frames. ], batch size: 54, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:10:00,763 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-02 17:10:30,290 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134107.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 17:10:32,501 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134109.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:10:48,203 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 5.216e+02 6.315e+02 8.361e+02 2.012e+03, threshold=1.263e+03, percent-clipped=10.0 +2023-04-02 17:10:57,742 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:00,859 INFO [train.py:903] (3/4) Epoch 20, batch 4400, loss[loss=0.2266, simple_loss=0.3052, pruned_loss=0.074, over 19685.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06544, over 3818179.45 frames. ], batch size: 55, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:11:01,288 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134132.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 17:11:03,623 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134134.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:11:29,062 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 17:11:37,427 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 17:12:05,047 INFO [train.py:903] (3/4) Epoch 20, batch 4450, loss[loss=0.1937, simple_loss=0.2678, pruned_loss=0.05983, over 16053.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2877, pruned_loss=0.06461, over 3827980.11 frames. ], batch size: 35, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:12:36,963 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134207.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:12:56,173 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.329e+02 4.942e+02 5.989e+02 7.537e+02 1.405e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-02 17:13:08,117 INFO [train.py:903] (3/4) Epoch 20, batch 4500, loss[loss=0.2126, simple_loss=0.2918, pruned_loss=0.06673, over 19595.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2871, pruned_loss=0.06441, over 3821304.48 frames. ], batch size: 57, lr: 4.11e-03, grad_scale: 8.0 +2023-04-02 17:13:08,554 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134232.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:14:10,218 INFO [train.py:903] (3/4) Epoch 20, batch 4550, loss[loss=0.1913, simple_loss=0.2765, pruned_loss=0.05303, over 19683.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2891, pruned_loss=0.06558, over 3824764.36 frames. ], batch size: 53, lr: 4.11e-03, grad_scale: 4.0 +2023-04-02 17:14:15,066 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2226, 2.0707, 2.0201, 1.8206, 1.6776, 1.7962, 0.5845, 1.1432], + device='cuda:3'), covar=tensor([0.0601, 0.0599, 0.0401, 0.0692, 0.1117, 0.0823, 0.1186, 0.0946], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0351, 0.0355, 0.0379, 0.0455, 0.0383, 0.0332, 0.0338], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:14:19,194 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 17:14:42,931 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 17:15:02,249 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:04,308 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.805e+02 4.924e+02 5.886e+02 8.898e+02 2.816e+03, threshold=1.177e+03, percent-clipped=9.0 +2023-04-02 17:15:09,063 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:14,419 INFO [train.py:903] (3/4) Epoch 20, batch 4600, loss[loss=0.2644, simple_loss=0.3368, pruned_loss=0.09602, over 13765.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2888, pruned_loss=0.06546, over 3816918.57 frames. ], batch size: 136, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:15:30,248 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 17:15:47,469 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:15:58,905 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4908, 1.1056, 1.3561, 1.1424, 2.1067, 0.9206, 2.1018, 2.3567], + device='cuda:3'), covar=tensor([0.0936, 0.3097, 0.3004, 0.1884, 0.1188, 0.2307, 0.1143, 0.0590], + device='cuda:3'), in_proj_covar=tensor([0.0403, 0.0360, 0.0381, 0.0342, 0.0372, 0.0347, 0.0373, 0.0396], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:16:13,235 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-02 17:16:15,900 INFO [train.py:903] (3/4) Epoch 20, batch 4650, loss[loss=0.1753, simple_loss=0.2489, pruned_loss=0.05091, over 19032.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2894, pruned_loss=0.06593, over 3817022.00 frames. ], batch size: 42, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:16:20,699 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134385.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:16:32,930 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 17:16:44,641 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 17:16:52,769 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134410.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:17:06,585 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-02 17:17:09,007 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.247e+02 4.575e+02 5.939e+02 8.134e+02 1.295e+03, threshold=1.188e+03, percent-clipped=3.0 +2023-04-02 17:17:19,255 INFO [train.py:903] (3/4) Epoch 20, batch 4700, loss[loss=0.2461, simple_loss=0.3173, pruned_loss=0.08743, over 19614.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2887, pruned_loss=0.06519, over 3825114.38 frames. ], batch size: 61, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:17:40,728 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1937, 1.8171, 1.4585, 1.1792, 1.5886, 1.1820, 1.2000, 1.6460], + device='cuda:3'), covar=tensor([0.0707, 0.0770, 0.1007, 0.0810, 0.0504, 0.1243, 0.0605, 0.0387], + device='cuda:3'), in_proj_covar=tensor([0.0294, 0.0310, 0.0332, 0.0256, 0.0244, 0.0334, 0.0286, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:17:41,482 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 17:18:11,968 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:18:20,721 INFO [train.py:903] (3/4) Epoch 20, batch 4750, loss[loss=0.2909, simple_loss=0.3429, pruned_loss=0.1194, over 19589.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2888, pruned_loss=0.06544, over 3827990.16 frames. ], batch size: 61, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:19:08,617 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0472, 2.0163, 1.7272, 2.1723, 2.0275, 1.8301, 1.7776, 2.0060], + device='cuda:3'), covar=tensor([0.1016, 0.1323, 0.1343, 0.0943, 0.1151, 0.0520, 0.1261, 0.0667], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0354, 0.0306, 0.0249, 0.0297, 0.0248, 0.0303, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:19:08,976 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.83 vs. limit=5.0 +2023-04-02 17:19:14,951 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.087e+02 5.001e+02 5.955e+02 7.090e+02 1.974e+03, threshold=1.191e+03, percent-clipped=7.0 +2023-04-02 17:19:25,460 INFO [train.py:903] (3/4) Epoch 20, batch 4800, loss[loss=0.1772, simple_loss=0.2531, pruned_loss=0.05063, over 17369.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.287, pruned_loss=0.06441, over 3830237.94 frames. ], batch size: 38, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:20:12,437 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3047, 2.3425, 2.5400, 3.0155, 2.3098, 2.8644, 2.5846, 2.2984], + device='cuda:3'), covar=tensor([0.4010, 0.3670, 0.1798, 0.2350, 0.4124, 0.2110, 0.4443, 0.3164], + device='cuda:3'), in_proj_covar=tensor([0.0881, 0.0946, 0.0707, 0.0925, 0.0866, 0.0800, 0.0832, 0.0772], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 17:20:26,814 INFO [train.py:903] (3/4) Epoch 20, batch 4850, loss[loss=0.2732, simple_loss=0.3383, pruned_loss=0.1041, over 19294.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2885, pruned_loss=0.06501, over 3826755.59 frames. ], batch size: 66, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:20:49,974 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 17:21:13,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 17:21:18,735 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 17:21:18,763 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 17:21:21,956 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.787e+02 5.710e+02 7.760e+02 1.554e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-02 17:21:31,390 INFO [train.py:903] (3/4) Epoch 20, batch 4900, loss[loss=0.2071, simple_loss=0.2909, pruned_loss=0.0616, over 19432.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2868, pruned_loss=0.06414, over 3829212.74 frames. ], batch size: 70, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:21:31,391 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 17:21:51,446 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 17:22:03,290 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1534, 3.5314, 2.1866, 2.1531, 3.3190, 2.0041, 1.3335, 2.2107], + device='cuda:3'), covar=tensor([0.1416, 0.0688, 0.1033, 0.0887, 0.0455, 0.1132, 0.1083, 0.0718], + device='cuda:3'), in_proj_covar=tensor([0.0297, 0.0312, 0.0333, 0.0257, 0.0245, 0.0335, 0.0287, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:22:14,400 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134666.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:21,072 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=134671.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:22:33,369 INFO [train.py:903] (3/4) Epoch 20, batch 4950, loss[loss=0.2316, simple_loss=0.3069, pruned_loss=0.07811, over 19659.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2871, pruned_loss=0.06432, over 3836896.95 frames. ], batch size: 58, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:22:49,211 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 17:23:15,498 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 17:23:28,332 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.953e+02 4.876e+02 5.783e+02 7.285e+02 1.244e+03, threshold=1.157e+03, percent-clipped=2.0 +2023-04-02 17:23:34,575 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134730.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:23:37,531 INFO [train.py:903] (3/4) Epoch 20, batch 5000, loss[loss=0.2618, simple_loss=0.3323, pruned_loss=0.09562, over 17514.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2873, pruned_loss=0.06436, over 3831715.81 frames. ], batch size: 101, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:23:45,565 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 17:23:53,692 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3669, 1.4766, 1.7887, 1.6408, 2.8086, 2.2308, 2.8918, 1.4495], + device='cuda:3'), covar=tensor([0.2451, 0.4278, 0.2628, 0.1940, 0.1424, 0.2189, 0.1524, 0.4020], + device='cuda:3'), in_proj_covar=tensor([0.0523, 0.0632, 0.0694, 0.0476, 0.0612, 0.0521, 0.0655, 0.0540], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:23:56,668 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 17:24:06,115 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:37,413 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134781.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:24:38,201 INFO [train.py:903] (3/4) Epoch 20, batch 5050, loss[loss=0.2228, simple_loss=0.3029, pruned_loss=0.07135, over 19325.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2886, pruned_loss=0.0652, over 3829246.86 frames. ], batch size: 70, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:24:44,242 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134786.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:25:13,074 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 17:25:31,563 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.915e+02 6.341e+02 8.226e+02 2.739e+03, threshold=1.268e+03, percent-clipped=9.0 +2023-04-02 17:25:41,324 INFO [train.py:903] (3/4) Epoch 20, batch 5100, loss[loss=0.236, simple_loss=0.3165, pruned_loss=0.07775, over 17531.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2886, pruned_loss=0.06516, over 3835837.43 frames. ], batch size: 101, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:25:50,468 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 17:25:53,843 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 17:25:58,156 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 17:26:02,200 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0374, 5.0685, 5.9264, 5.9572, 2.2212, 5.5731, 4.7521, 5.5429], + device='cuda:3'), covar=tensor([0.1707, 0.0906, 0.0535, 0.0545, 0.5506, 0.0697, 0.0587, 0.1160], + device='cuda:3'), in_proj_covar=tensor([0.0766, 0.0721, 0.0922, 0.0808, 0.0816, 0.0686, 0.0553, 0.0857], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 17:26:41,287 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134880.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:26:43,298 INFO [train.py:903] (3/4) Epoch 20, batch 5150, loss[loss=0.2092, simple_loss=0.2903, pruned_loss=0.06406, over 19793.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06541, over 3829305.12 frames. ], batch size: 56, lr: 4.10e-03, grad_scale: 4.0 +2023-04-02 17:26:47,036 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8758, 1.4336, 1.8689, 1.6883, 4.2719, 1.1901, 2.6857, 4.7288], + device='cuda:3'), covar=tensor([0.0504, 0.3188, 0.2909, 0.2096, 0.0778, 0.2859, 0.1427, 0.0204], + device='cuda:3'), in_proj_covar=tensor([0.0401, 0.0361, 0.0380, 0.0342, 0.0371, 0.0346, 0.0372, 0.0397], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:26:55,950 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 17:27:03,555 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 17:27:10,858 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 17:27:32,563 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 17:27:37,090 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.618e+02 4.964e+02 6.321e+02 8.056e+02 1.479e+03, threshold=1.264e+03, percent-clipped=3.0 +2023-04-02 17:27:46,171 INFO [train.py:903] (3/4) Epoch 20, batch 5200, loss[loss=0.2053, simple_loss=0.29, pruned_loss=0.06031, over 19582.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2889, pruned_loss=0.06503, over 3817613.19 frames. ], batch size: 52, lr: 4.10e-03, grad_scale: 8.0 +2023-04-02 17:28:00,824 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 17:28:46,951 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 17:28:49,189 INFO [train.py:903] (3/4) Epoch 20, batch 5250, loss[loss=0.2267, simple_loss=0.3075, pruned_loss=0.07298, over 19533.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2889, pruned_loss=0.06495, over 3829164.17 frames. ], batch size: 56, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:42,806 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.326e+02 4.845e+02 5.899e+02 8.450e+02 1.811e+03, threshold=1.180e+03, percent-clipped=4.0 +2023-04-02 17:29:51,989 INFO [train.py:903] (3/4) Epoch 20, batch 5300, loss[loss=0.2048, simple_loss=0.2901, pruned_loss=0.05978, over 19525.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2885, pruned_loss=0.06543, over 3818519.77 frames. ], batch size: 54, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:29:59,112 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135037.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:04,905 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:11,575 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 17:30:30,714 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:36,544 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135067.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:30:54,699 INFO [train.py:903] (3/4) Epoch 20, batch 5350, loss[loss=0.2337, simple_loss=0.3114, pruned_loss=0.07802, over 19647.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2891, pruned_loss=0.06541, over 3808143.99 frames. ], batch size: 55, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:31:29,776 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 17:31:48,516 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.248e+02 6.375e+02 8.534e+02 1.946e+03, threshold=1.275e+03, percent-clipped=9.0 +2023-04-02 17:31:57,756 INFO [train.py:903] (3/4) Epoch 20, batch 5400, loss[loss=0.2125, simple_loss=0.2966, pruned_loss=0.06419, over 19681.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2895, pruned_loss=0.06544, over 3809798.74 frames. ], batch size: 59, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:31:59,403 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3692, 1.3890, 1.6561, 1.5813, 2.5559, 2.2543, 2.7687, 1.2165], + device='cuda:3'), covar=tensor([0.2555, 0.4380, 0.2789, 0.2037, 0.1691, 0.2096, 0.1478, 0.4397], + device='cuda:3'), in_proj_covar=tensor([0.0522, 0.0628, 0.0694, 0.0474, 0.0610, 0.0519, 0.0650, 0.0538], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:33:00,881 INFO [train.py:903] (3/4) Epoch 20, batch 5450, loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04387, over 19626.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2901, pruned_loss=0.06533, over 3803339.48 frames. ], batch size: 57, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:33:09,042 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135189.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:15,483 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6619, 2.5599, 2.2170, 2.6346, 2.3984, 2.2141, 2.2171, 2.5680], + device='cuda:3'), covar=tensor([0.0950, 0.1492, 0.1349, 0.1066, 0.1403, 0.0516, 0.1213, 0.0637], + device='cuda:3'), in_proj_covar=tensor([0.0263, 0.0351, 0.0305, 0.0248, 0.0297, 0.0247, 0.0303, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:33:52,715 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135224.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:33:54,884 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.649e+02 5.255e+02 6.010e+02 7.558e+02 1.824e+03, threshold=1.202e+03, percent-clipped=3.0 +2023-04-02 17:34:03,096 INFO [train.py:903] (3/4) Epoch 20, batch 5500, loss[loss=0.1846, simple_loss=0.2637, pruned_loss=0.05277, over 19625.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2891, pruned_loss=0.06529, over 3799839.90 frames. ], batch size: 50, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:34:29,170 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 17:34:30,819 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2098, 1.2857, 1.2640, 1.0107, 1.1339, 1.0908, 0.0845, 0.3155], + device='cuda:3'), covar=tensor([0.0702, 0.0673, 0.0423, 0.0578, 0.1269, 0.0632, 0.1298, 0.1110], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0351, 0.0354, 0.0379, 0.0455, 0.0384, 0.0333, 0.0339], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:35:05,527 INFO [train.py:903] (3/4) Epoch 20, batch 5550, loss[loss=0.2196, simple_loss=0.3015, pruned_loss=0.06884, over 19683.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2895, pruned_loss=0.06558, over 3804833.92 frames. ], batch size: 55, lr: 4.09e-03, grad_scale: 4.0 +2023-04-02 17:35:13,932 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 17:35:20,846 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5584, 3.9770, 4.3606, 4.4055, 1.6077, 4.0771, 3.5784, 3.7904], + device='cuda:3'), covar=tensor([0.2169, 0.1319, 0.0906, 0.1083, 0.7561, 0.1624, 0.1035, 0.2033], + device='cuda:3'), in_proj_covar=tensor([0.0769, 0.0724, 0.0928, 0.0813, 0.0821, 0.0691, 0.0554, 0.0862], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 17:35:38,280 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2073, 1.2987, 1.2809, 1.0654, 1.1374, 1.1179, 0.0951, 0.3578], + device='cuda:3'), covar=tensor([0.0725, 0.0685, 0.0438, 0.0597, 0.1345, 0.0667, 0.1316, 0.1130], + device='cuda:3'), in_proj_covar=tensor([0.0360, 0.0353, 0.0355, 0.0380, 0.0457, 0.0386, 0.0335, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:36:00,656 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 4.884e+02 6.418e+02 8.039e+02 2.322e+03, threshold=1.284e+03, percent-clipped=8.0 +2023-04-02 17:36:04,009 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 17:36:07,361 INFO [train.py:903] (3/4) Epoch 20, batch 5600, loss[loss=0.1842, simple_loss=0.2651, pruned_loss=0.05163, over 19748.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2885, pruned_loss=0.06484, over 3809867.01 frames. ], batch size: 51, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:36:18,152 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135339.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:36:31,084 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1633, 2.8002, 2.2806, 2.2167, 1.9865, 2.5471, 0.8282, 2.0096], + device='cuda:3'), covar=tensor([0.0657, 0.0617, 0.0660, 0.1146, 0.1163, 0.1113, 0.1486, 0.1088], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0354, 0.0355, 0.0381, 0.0458, 0.0388, 0.0335, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:37:11,260 INFO [train.py:903] (3/4) Epoch 20, batch 5650, loss[loss=0.1941, simple_loss=0.2822, pruned_loss=0.05298, over 19467.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2889, pruned_loss=0.06497, over 3806359.12 frames. ], batch size: 49, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:38:01,350 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 17:38:05,718 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.892e+02 5.590e+02 6.975e+02 1.698e+03, threshold=1.118e+03, percent-clipped=3.0 +2023-04-02 17:38:12,541 INFO [train.py:903] (3/4) Epoch 20, batch 5700, loss[loss=0.2229, simple_loss=0.3034, pruned_loss=0.07117, over 18291.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2895, pruned_loss=0.06575, over 3810653.32 frames. ], batch size: 83, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:39:14,105 INFO [train.py:903] (3/4) Epoch 20, batch 5750, loss[loss=0.1794, simple_loss=0.2582, pruned_loss=0.05032, over 19736.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2899, pruned_loss=0.06597, over 3810744.92 frames. ], batch size: 46, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:39:17,223 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 17:39:25,448 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 17:39:31,256 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 17:40:10,278 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.984e+02 5.062e+02 5.931e+02 7.729e+02 1.708e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-02 17:40:18,218 INFO [train.py:903] (3/4) Epoch 20, batch 5800, loss[loss=0.2104, simple_loss=0.2982, pruned_loss=0.06128, over 19673.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.289, pruned_loss=0.06554, over 3813300.55 frames. ], batch size: 58, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:40:19,532 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135533.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:41:20,583 INFO [train.py:903] (3/4) Epoch 20, batch 5850, loss[loss=0.2412, simple_loss=0.3169, pruned_loss=0.0828, over 19712.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2899, pruned_loss=0.06586, over 3819179.11 frames. ], batch size: 63, lr: 4.09e-03, grad_scale: 8.0 +2023-04-02 17:41:36,370 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135595.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:08,943 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:11,758 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-02 17:42:15,863 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.765e+02 5.164e+02 6.346e+02 8.557e+02 2.066e+03, threshold=1.269e+03, percent-clipped=9.0 +2023-04-02 17:42:23,022 INFO [train.py:903] (3/4) Epoch 20, batch 5900, loss[loss=0.2127, simple_loss=0.2973, pruned_loss=0.06401, over 19335.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2903, pruned_loss=0.0662, over 3812699.23 frames. ], batch size: 66, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:42:25,476 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 17:42:43,086 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:43,155 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135648.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:42:46,320 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 17:43:24,712 INFO [train.py:903] (3/4) Epoch 20, batch 5950, loss[loss=0.1921, simple_loss=0.278, pruned_loss=0.05311, over 19723.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2898, pruned_loss=0.06598, over 3831199.73 frames. ], batch size: 63, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:19,051 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.126e+02 6.708e+02 1.004e+03 2.382e+03, threshold=1.342e+03, percent-clipped=11.0 +2023-04-02 17:44:27,252 INFO [train.py:903] (3/4) Epoch 20, batch 6000, loss[loss=0.2102, simple_loss=0.2899, pruned_loss=0.06519, over 19752.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.29, pruned_loss=0.0661, over 3828052.81 frames. ], batch size: 54, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:44:27,252 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 17:44:39,926 INFO [train.py:937] (3/4) Epoch 20, validation: loss=0.1697, simple_loss=0.2697, pruned_loss=0.0349, over 944034.00 frames. +2023-04-02 17:44:39,927 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 17:45:41,594 INFO [train.py:903] (3/4) Epoch 20, batch 6050, loss[loss=0.2629, simple_loss=0.3329, pruned_loss=0.09647, over 13689.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2901, pruned_loss=0.06634, over 3826277.13 frames. ], batch size: 136, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:46:36,913 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 4.873e+02 5.806e+02 7.519e+02 1.887e+03, threshold=1.161e+03, percent-clipped=2.0 +2023-04-02 17:46:43,930 INFO [train.py:903] (3/4) Epoch 20, batch 6100, loss[loss=0.2004, simple_loss=0.2851, pruned_loss=0.05791, over 18710.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2901, pruned_loss=0.06658, over 3818052.81 frames. ], batch size: 74, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:47:05,979 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4988, 1.5659, 1.8599, 1.7452, 2.6348, 2.3447, 2.7321, 1.1542], + device='cuda:3'), covar=tensor([0.2420, 0.4273, 0.2739, 0.1941, 0.1519, 0.2018, 0.1478, 0.4479], + device='cuda:3'), in_proj_covar=tensor([0.0525, 0.0634, 0.0697, 0.0476, 0.0615, 0.0522, 0.0655, 0.0541], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:47:46,908 INFO [train.py:903] (3/4) Epoch 20, batch 6150, loss[loss=0.2036, simple_loss=0.2776, pruned_loss=0.06482, over 19613.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2891, pruned_loss=0.06593, over 3824366.08 frames. ], batch size: 50, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:48:15,330 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135904.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:16,084 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 17:48:21,068 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0578, 1.2790, 1.7265, 1.2637, 2.8724, 3.7736, 3.5078, 3.9999], + device='cuda:3'), covar=tensor([0.1725, 0.3762, 0.3381, 0.2450, 0.0546, 0.0188, 0.0202, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0318, 0.0347, 0.0264, 0.0239, 0.0183, 0.0215, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 17:48:38,647 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7885, 4.2715, 4.4970, 4.4937, 1.6883, 4.2623, 3.6623, 4.2223], + device='cuda:3'), covar=tensor([0.1616, 0.0773, 0.0617, 0.0649, 0.5953, 0.0775, 0.0647, 0.1193], + device='cuda:3'), in_proj_covar=tensor([0.0768, 0.0723, 0.0927, 0.0809, 0.0820, 0.0685, 0.0557, 0.0859], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 17:48:41,891 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 4.701e+02 6.211e+02 7.118e+02 1.417e+03, threshold=1.242e+03, percent-clipped=3.0 +2023-04-02 17:48:43,239 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1236, 1.2551, 1.7876, 1.1290, 2.4978, 3.3149, 3.0341, 3.5697], + device='cuda:3'), covar=tensor([0.1635, 0.3839, 0.3222, 0.2592, 0.0582, 0.0220, 0.0232, 0.0267], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0319, 0.0348, 0.0264, 0.0239, 0.0183, 0.0215, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 17:48:45,696 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:48:49,676 INFO [train.py:903] (3/4) Epoch 20, batch 6200, loss[loss=0.2407, simple_loss=0.3249, pruned_loss=0.07827, over 18014.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2889, pruned_loss=0.06571, over 3825426.16 frames. ], batch size: 83, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:49:52,113 INFO [train.py:903] (3/4) Epoch 20, batch 6250, loss[loss=0.1807, simple_loss=0.2654, pruned_loss=0.04799, over 19853.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2889, pruned_loss=0.06516, over 3827883.63 frames. ], batch size: 52, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:50:04,695 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=135992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:15,890 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136000.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:19,573 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4565, 1.5601, 1.7617, 1.6915, 2.6289, 2.3054, 2.7625, 1.1534], + device='cuda:3'), covar=tensor([0.2422, 0.4264, 0.2727, 0.1861, 0.1453, 0.2119, 0.1474, 0.4333], + device='cuda:3'), in_proj_covar=tensor([0.0527, 0.0636, 0.0699, 0.0477, 0.0617, 0.0525, 0.0658, 0.0543], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 17:50:24,785 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 17:50:30,750 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136012.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:50:48,357 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 5.185e+02 6.758e+02 8.208e+02 2.074e+03, threshold=1.352e+03, percent-clipped=8.0 +2023-04-02 17:50:55,344 INFO [train.py:903] (3/4) Epoch 20, batch 6300, loss[loss=0.2377, simple_loss=0.3141, pruned_loss=0.08067, over 19538.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.289, pruned_loss=0.06572, over 3832467.84 frames. ], batch size: 56, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:50:56,869 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3184, 1.7916, 1.9723, 1.9059, 2.8778, 1.6082, 2.8308, 3.2415], + device='cuda:3'), covar=tensor([0.0765, 0.2763, 0.2714, 0.2038, 0.0998, 0.2502, 0.1789, 0.0476], + device='cuda:3'), in_proj_covar=tensor([0.0404, 0.0360, 0.0380, 0.0342, 0.0372, 0.0347, 0.0372, 0.0397], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:51:58,440 INFO [train.py:903] (3/4) Epoch 20, batch 6350, loss[loss=0.2199, simple_loss=0.2914, pruned_loss=0.07424, over 19858.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2888, pruned_loss=0.06517, over 3835313.45 frames. ], batch size: 52, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:52:30,875 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136107.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:52:53,613 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.408e+02 5.045e+02 5.932e+02 7.156e+02 1.987e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 17:53:01,266 INFO [train.py:903] (3/4) Epoch 20, batch 6400, loss[loss=0.2488, simple_loss=0.3107, pruned_loss=0.09345, over 13687.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2892, pruned_loss=0.06524, over 3831825.78 frames. ], batch size: 135, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:53:36,363 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136160.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:54:02,563 INFO [train.py:903] (3/4) Epoch 20, batch 6450, loss[loss=0.233, simple_loss=0.3139, pruned_loss=0.07604, over 19671.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2905, pruned_loss=0.06592, over 3830918.28 frames. ], batch size: 55, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:54:51,320 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 17:54:58,994 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.358e+02 5.012e+02 6.099e+02 8.089e+02 3.011e+03, threshold=1.220e+03, percent-clipped=7.0 +2023-04-02 17:55:01,042 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-02 17:55:07,146 INFO [train.py:903] (3/4) Epoch 20, batch 6500, loss[loss=0.1912, simple_loss=0.267, pruned_loss=0.05768, over 19610.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2915, pruned_loss=0.06638, over 3812335.11 frames. ], batch size: 50, lr: 4.08e-03, grad_scale: 8.0 +2023-04-02 17:55:14,051 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 17:55:20,946 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0019, 2.0638, 2.2997, 2.5962, 1.9046, 2.4662, 2.3567, 2.1549], + device='cuda:3'), covar=tensor([0.4194, 0.3893, 0.1880, 0.2401, 0.4146, 0.2180, 0.4592, 0.3292], + device='cuda:3'), in_proj_covar=tensor([0.0878, 0.0942, 0.0700, 0.0919, 0.0862, 0.0793, 0.0826, 0.0768], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 17:55:41,824 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7334, 1.7252, 1.3016, 1.7495, 1.6667, 1.4074, 1.3459, 1.5766], + device='cuda:3'), covar=tensor([0.1219, 0.1461, 0.1902, 0.1117, 0.1364, 0.0982, 0.1866, 0.1002], + device='cuda:3'), in_proj_covar=tensor([0.0264, 0.0353, 0.0305, 0.0248, 0.0297, 0.0246, 0.0304, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 17:56:09,976 INFO [train.py:903] (3/4) Epoch 20, batch 6550, loss[loss=0.2433, simple_loss=0.3198, pruned_loss=0.08342, over 19364.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2913, pruned_loss=0.06646, over 3825776.32 frames. ], batch size: 70, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:56:48,277 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 17:57:06,361 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 5.319e+02 6.594e+02 8.030e+02 1.579e+03, threshold=1.319e+03, percent-clipped=2.0 +2023-04-02 17:57:14,430 INFO [train.py:903] (3/4) Epoch 20, batch 6600, loss[loss=0.253, simple_loss=0.3269, pruned_loss=0.08952, over 19525.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2903, pruned_loss=0.0657, over 3816905.58 frames. ], batch size: 56, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:57:28,429 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136344.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:41,111 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136353.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:45,650 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136356.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:57:53,895 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136363.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:16,828 INFO [train.py:903] (3/4) Epoch 20, batch 6650, loss[loss=0.2108, simple_loss=0.2948, pruned_loss=0.06338, over 19260.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2906, pruned_loss=0.06601, over 3818426.47 frames. ], batch size: 66, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:58:26,169 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 17:58:43,792 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 17:59:14,402 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 4.763e+02 5.879e+02 7.861e+02 2.647e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 17:59:22,071 INFO [train.py:903] (3/4) Epoch 20, batch 6700, loss[loss=0.239, simple_loss=0.3209, pruned_loss=0.0786, over 19666.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2914, pruned_loss=0.06626, over 3829714.96 frames. ], batch size: 55, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 17:59:55,295 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:08,534 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136471.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:00:20,885 INFO [train.py:903] (3/4) Epoch 20, batch 6750, loss[loss=0.1877, simple_loss=0.2715, pruned_loss=0.05198, over 19762.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2913, pruned_loss=0.06633, over 3827579.88 frames. ], batch size: 54, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:00:21,335 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6033, 1.6840, 1.9222, 1.8932, 1.4551, 1.8376, 1.9327, 1.7699], + device='cuda:3'), covar=tensor([0.4103, 0.3708, 0.1974, 0.2415, 0.3793, 0.2272, 0.5098, 0.3507], + device='cuda:3'), in_proj_covar=tensor([0.0882, 0.0947, 0.0704, 0.0924, 0.0865, 0.0797, 0.0830, 0.0773], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 18:00:45,723 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:01:11,793 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.251e+02 5.547e+02 6.932e+02 9.039e+02 1.788e+03, threshold=1.386e+03, percent-clipped=9.0 +2023-04-02 18:01:19,066 INFO [train.py:903] (3/4) Epoch 20, batch 6800, loss[loss=0.2365, simple_loss=0.3179, pruned_loss=0.07756, over 19619.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2903, pruned_loss=0.06582, over 3820594.48 frames. ], batch size: 57, lr: 4.07e-03, grad_scale: 8.0 +2023-04-02 18:02:04,754 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 18:02:05,227 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 18:02:07,962 INFO [train.py:903] (3/4) Epoch 21, batch 0, loss[loss=0.2207, simple_loss=0.2916, pruned_loss=0.07493, over 19373.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2916, pruned_loss=0.07493, over 19373.00 frames. ], batch size: 47, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:02:07,962 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 18:02:18,720 INFO [train.py:937] (3/4) Epoch 21, validation: loss=0.1691, simple_loss=0.2696, pruned_loss=0.03427, over 944034.00 frames. +2023-04-02 18:02:18,721 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 18:02:30,979 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 18:03:20,704 INFO [train.py:903] (3/4) Epoch 21, batch 50, loss[loss=0.2446, simple_loss=0.3186, pruned_loss=0.08528, over 19514.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2851, pruned_loss=0.06417, over 860719.80 frames. ], batch size: 64, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:03:24,696 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 18:03:33,346 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136619.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:03:43,287 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.947e+02 6.173e+02 6.953e+02 1.295e+03, threshold=1.235e+03, percent-clipped=0.0 +2023-04-02 18:03:54,607 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 18:04:22,847 INFO [train.py:903] (3/4) Epoch 21, batch 100, loss[loss=0.2164, simple_loss=0.2768, pruned_loss=0.07799, over 17310.00 frames. ], tot_loss[loss=0.206, simple_loss=0.284, pruned_loss=0.06402, over 1513240.45 frames. ], batch size: 38, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:04:25,191 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:04:34,145 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 18:04:42,067 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2790, 1.9995, 1.9298, 2.7683, 2.0990, 2.5494, 2.5389, 2.2296], + device='cuda:3'), covar=tensor([0.0749, 0.0863, 0.0962, 0.0799, 0.0828, 0.0690, 0.0861, 0.0646], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0242, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 18:05:00,465 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-02 18:05:09,051 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=136697.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:25,010 INFO [train.py:903] (3/4) Epoch 21, batch 150, loss[loss=0.2307, simple_loss=0.3099, pruned_loss=0.07573, over 18284.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2856, pruned_loss=0.06425, over 2028293.10 frames. ], batch size: 83, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:05:31,976 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:05:45,589 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 5.031e+02 5.954e+02 7.665e+02 1.668e+03, threshold=1.191e+03, percent-clipped=3.0 +2023-04-02 18:05:45,999 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136727.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:02,669 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136740.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:18,654 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136752.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:06:25,310 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 18:06:27,565 INFO [train.py:903] (3/4) Epoch 21, batch 200, loss[loss=0.207, simple_loss=0.2789, pruned_loss=0.0675, over 19586.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2871, pruned_loss=0.0654, over 2418401.97 frames. ], batch size: 52, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:07:29,898 INFO [train.py:903] (3/4) Epoch 21, batch 250, loss[loss=0.1994, simple_loss=0.2907, pruned_loss=0.05399, over 19769.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.288, pruned_loss=0.06575, over 2725638.67 frames. ], batch size: 54, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:07:32,551 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8808, 1.4433, 1.5536, 1.4856, 3.4190, 1.2314, 2.4481, 3.9381], + device='cuda:3'), covar=tensor([0.0446, 0.2760, 0.2719, 0.1968, 0.0703, 0.2487, 0.1308, 0.0206], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0361, 0.0380, 0.0344, 0.0371, 0.0348, 0.0372, 0.0397], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:07:32,600 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:47,509 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:07:52,057 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.815e+02 6.209e+02 8.068e+02 1.278e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-04-02 18:08:32,585 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 18:08:33,040 INFO [train.py:903] (3/4) Epoch 21, batch 300, loss[loss=0.2018, simple_loss=0.2703, pruned_loss=0.06665, over 19747.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2857, pruned_loss=0.06441, over 2975413.33 frames. ], batch size: 47, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:08:53,757 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:07,904 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.02 vs. limit=5.0 +2023-04-02 18:09:23,358 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136900.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:09:36,407 INFO [train.py:903] (3/4) Epoch 21, batch 350, loss[loss=0.1658, simple_loss=0.2457, pruned_loss=0.04295, over 15558.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2887, pruned_loss=0.06601, over 3162124.96 frames. ], batch size: 34, lr: 3.97e-03, grad_scale: 4.0 +2023-04-02 18:09:38,671 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 18:09:56,983 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.835e+02 5.943e+02 7.281e+02 1.741e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-02 18:10:38,521 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4533, 1.4139, 1.9850, 1.6286, 2.8870, 4.5934, 4.4549, 5.1051], + device='cuda:3'), covar=tensor([0.1553, 0.3993, 0.3451, 0.2338, 0.0682, 0.0219, 0.0176, 0.0135], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0319, 0.0347, 0.0264, 0.0240, 0.0183, 0.0215, 0.0251], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 18:10:39,349 INFO [train.py:903] (3/4) Epoch 21, batch 400, loss[loss=0.1892, simple_loss=0.2834, pruned_loss=0.04744, over 19793.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2888, pruned_loss=0.06568, over 3316068.77 frames. ], batch size: 56, lr: 3.97e-03, grad_scale: 8.0 +2023-04-02 18:11:35,383 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:11:41,216 INFO [train.py:903] (3/4) Epoch 21, batch 450, loss[loss=0.1846, simple_loss=0.258, pruned_loss=0.05555, over 19773.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2889, pruned_loss=0.0658, over 3434266.72 frames. ], batch size: 47, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:12:03,757 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.613e+02 5.973e+02 7.527e+02 1.521e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-02 18:12:12,939 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 18:12:14,060 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 18:12:43,472 INFO [train.py:903] (3/4) Epoch 21, batch 500, loss[loss=0.2649, simple_loss=0.3291, pruned_loss=0.1003, over 19493.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2903, pruned_loss=0.06643, over 3522186.79 frames. ], batch size: 64, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:12:53,183 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:25,170 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137093.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:13:45,872 INFO [train.py:903] (3/4) Epoch 21, batch 550, loss[loss=0.176, simple_loss=0.2556, pruned_loss=0.04813, over 19488.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2893, pruned_loss=0.06528, over 3593465.97 frames. ], batch size: 49, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:13:59,997 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:14:09,838 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.365e+02 6.823e+02 8.347e+02 2.113e+03, threshold=1.365e+03, percent-clipped=7.0 +2023-04-02 18:14:48,995 INFO [train.py:903] (3/4) Epoch 21, batch 600, loss[loss=0.2462, simple_loss=0.3264, pruned_loss=0.08295, over 19721.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2894, pruned_loss=0.06513, over 3654151.05 frames. ], batch size: 63, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:14:57,068 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3288, 3.9179, 2.5702, 3.5017, 1.0115, 3.9310, 3.7562, 3.8742], + device='cuda:3'), covar=tensor([0.0713, 0.1179, 0.2048, 0.0881, 0.4132, 0.0694, 0.0906, 0.1249], + device='cuda:3'), in_proj_covar=tensor([0.0492, 0.0403, 0.0486, 0.0338, 0.0401, 0.0423, 0.0418, 0.0453], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:15:00,564 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=137168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:15:29,957 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 18:15:53,985 INFO [train.py:903] (3/4) Epoch 21, batch 650, loss[loss=0.2231, simple_loss=0.3089, pruned_loss=0.06861, over 19513.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2904, pruned_loss=0.06552, over 3691316.01 frames. ], batch size: 64, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:16:12,098 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0146, 1.6370, 1.9748, 1.7343, 4.5398, 1.1941, 2.5525, 4.9019], + device='cuda:3'), covar=tensor([0.0391, 0.2745, 0.2621, 0.1907, 0.0703, 0.2680, 0.1444, 0.0159], + device='cuda:3'), in_proj_covar=tensor([0.0405, 0.0362, 0.0382, 0.0343, 0.0371, 0.0347, 0.0373, 0.0398], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:16:16,604 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 4.775e+02 5.968e+02 8.002e+02 1.696e+03, threshold=1.194e+03, percent-clipped=7.0 +2023-04-02 18:16:56,150 INFO [train.py:903] (3/4) Epoch 21, batch 700, loss[loss=0.2333, simple_loss=0.3051, pruned_loss=0.08075, over 12994.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.29, pruned_loss=0.06592, over 3703347.77 frames. ], batch size: 136, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:17:26,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:17:42,550 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6780, 1.7192, 1.6305, 1.4165, 1.3622, 1.4242, 0.3039, 0.6301], + device='cuda:3'), covar=tensor([0.0680, 0.0604, 0.0386, 0.0609, 0.1188, 0.0718, 0.1332, 0.1102], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0352, 0.0355, 0.0380, 0.0458, 0.0388, 0.0335, 0.0339], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 18:17:54,697 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 18:18:00,855 INFO [train.py:903] (3/4) Epoch 21, batch 750, loss[loss=0.1868, simple_loss=0.2791, pruned_loss=0.04728, over 19715.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.29, pruned_loss=0.0657, over 3742912.96 frames. ], batch size: 59, lr: 3.96e-03, grad_scale: 4.0 +2023-04-02 18:18:22,897 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 4.851e+02 6.236e+02 7.648e+02 2.101e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-04-02 18:19:01,644 INFO [train.py:903] (3/4) Epoch 21, batch 800, loss[loss=0.2064, simple_loss=0.2841, pruned_loss=0.06437, over 19785.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2906, pruned_loss=0.06627, over 3746753.48 frames. ], batch size: 47, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:19:07,856 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 18:19:22,454 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:19:34,038 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1911, 1.8041, 1.4392, 1.2077, 1.6230, 1.1802, 1.1758, 1.6744], + device='cuda:3'), covar=tensor([0.0743, 0.0778, 0.1070, 0.0816, 0.0534, 0.1264, 0.0593, 0.0426], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0313, 0.0336, 0.0260, 0.0245, 0.0337, 0.0292, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:19:53,527 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:20:05,536 INFO [train.py:903] (3/4) Epoch 21, batch 850, loss[loss=0.1605, simple_loss=0.245, pruned_loss=0.03801, over 19328.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2908, pruned_loss=0.06632, over 3777872.04 frames. ], batch size: 44, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:20:27,117 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.583e+02 6.647e+02 8.818e+02 2.027e+03, threshold=1.329e+03, percent-clipped=5.0 +2023-04-02 18:20:48,411 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 18:21:06,665 INFO [train.py:903] (3/4) Epoch 21, batch 900, loss[loss=0.2035, simple_loss=0.2874, pruned_loss=0.0598, over 19321.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.291, pruned_loss=0.06642, over 3797590.49 frames. ], batch size: 66, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:21:59,584 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 18:22:08,624 INFO [train.py:903] (3/4) Epoch 21, batch 950, loss[loss=0.1663, simple_loss=0.245, pruned_loss=0.04375, over 19754.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2913, pruned_loss=0.06628, over 3809387.13 frames. ], batch size: 46, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:22:31,547 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 5.256e+02 6.264e+02 7.895e+02 1.664e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-02 18:22:34,062 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4597, 1.5499, 1.8217, 1.7713, 2.3476, 2.0926, 2.3529, 1.0008], + device='cuda:3'), covar=tensor([0.2936, 0.4673, 0.3073, 0.2344, 0.1807, 0.2509, 0.1760, 0.5179], + device='cuda:3'), in_proj_covar=tensor([0.0530, 0.0639, 0.0703, 0.0482, 0.0619, 0.0527, 0.0661, 0.0544], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 18:22:45,797 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:10,899 INFO [train.py:903] (3/4) Epoch 21, batch 1000, loss[loss=0.1965, simple_loss=0.2881, pruned_loss=0.05242, over 19542.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2889, pruned_loss=0.06503, over 3817460.98 frames. ], batch size: 56, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:23:15,683 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:23:55,873 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 18:24:10,009 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.26 vs. limit=5.0 +2023-04-02 18:24:14,003 INFO [train.py:903] (3/4) Epoch 21, batch 1050, loss[loss=0.2083, simple_loss=0.2899, pruned_loss=0.06335, over 19777.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2888, pruned_loss=0.06511, over 3817996.99 frames. ], batch size: 54, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:24:35,335 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.318e+02 6.486e+02 8.521e+02 3.216e+03, threshold=1.297e+03, percent-clipped=7.0 +2023-04-02 18:24:36,550 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 18:25:17,797 INFO [train.py:903] (3/4) Epoch 21, batch 1100, loss[loss=0.1954, simple_loss=0.2809, pruned_loss=0.05494, over 19589.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2895, pruned_loss=0.06583, over 3800512.35 frames. ], batch size: 61, lr: 3.96e-03, grad_scale: 8.0 +2023-04-02 18:25:36,463 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9037, 1.9576, 2.2355, 2.5234, 1.8889, 2.4762, 2.2950, 2.0412], + device='cuda:3'), covar=tensor([0.4038, 0.3823, 0.1833, 0.2283, 0.3970, 0.1959, 0.4504, 0.3271], + device='cuda:3'), in_proj_covar=tensor([0.0888, 0.0953, 0.0711, 0.0930, 0.0871, 0.0803, 0.0835, 0.0777], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 18:25:47,829 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6298, 1.7021, 1.8962, 2.0286, 1.5271, 1.9850, 1.9326, 1.8082], + device='cuda:3'), covar=tensor([0.4060, 0.3587, 0.1932, 0.2183, 0.3723, 0.1999, 0.4834, 0.3341], + device='cuda:3'), in_proj_covar=tensor([0.0887, 0.0952, 0.0710, 0.0929, 0.0870, 0.0802, 0.0834, 0.0776], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 18:26:19,697 INFO [train.py:903] (3/4) Epoch 21, batch 1150, loss[loss=0.1785, simple_loss=0.2546, pruned_loss=0.05117, over 19409.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2904, pruned_loss=0.06593, over 3802541.98 frames. ], batch size: 48, lr: 3.95e-03, grad_scale: 4.0 +2023-04-02 18:26:43,799 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 5.127e+02 6.263e+02 7.580e+02 1.245e+03, threshold=1.253e+03, percent-clipped=0.0 +2023-04-02 18:27:22,178 INFO [train.py:903] (3/4) Epoch 21, batch 1200, loss[loss=0.1736, simple_loss=0.2437, pruned_loss=0.05176, over 19020.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2911, pruned_loss=0.0663, over 3821809.18 frames. ], batch size: 42, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:27:28,769 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-02 18:27:46,793 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 18:28:25,534 INFO [train.py:903] (3/4) Epoch 21, batch 1250, loss[loss=0.1739, simple_loss=0.2532, pruned_loss=0.04733, over 19761.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2906, pruned_loss=0.06596, over 3822287.03 frames. ], batch size: 47, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:28:36,403 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7786, 1.3478, 1.5222, 1.4950, 3.1984, 1.1086, 2.3304, 3.7958], + device='cuda:3'), covar=tensor([0.0558, 0.3041, 0.2939, 0.2045, 0.0853, 0.2849, 0.1537, 0.0254], + device='cuda:3'), in_proj_covar=tensor([0.0403, 0.0360, 0.0379, 0.0343, 0.0370, 0.0345, 0.0373, 0.0397], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:28:48,828 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.413e+02 5.084e+02 5.991e+02 7.123e+02 1.423e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-02 18:29:27,971 INFO [train.py:903] (3/4) Epoch 21, batch 1300, loss[loss=0.219, simple_loss=0.301, pruned_loss=0.06852, over 19288.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2901, pruned_loss=0.06566, over 3833142.62 frames. ], batch size: 66, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:29:36,706 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.29 vs. limit=5.0 +2023-04-02 18:30:30,624 INFO [train.py:903] (3/4) Epoch 21, batch 1350, loss[loss=0.2017, simple_loss=0.2877, pruned_loss=0.05791, over 19546.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2906, pruned_loss=0.06589, over 3835760.07 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:30:30,941 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137910.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:34,446 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137913.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:30:54,801 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.098e+02 6.340e+02 8.452e+02 2.491e+03, threshold=1.268e+03, percent-clipped=6.0 +2023-04-02 18:31:33,358 INFO [train.py:903] (3/4) Epoch 21, batch 1400, loss[loss=0.1957, simple_loss=0.2615, pruned_loss=0.06496, over 19753.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2894, pruned_loss=0.06545, over 3840214.95 frames. ], batch size: 46, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:32:28,137 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 18:32:37,232 INFO [train.py:903] (3/4) Epoch 21, batch 1450, loss[loss=0.204, simple_loss=0.2946, pruned_loss=0.05668, over 19653.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.29, pruned_loss=0.06565, over 3841172.71 frames. ], batch size: 58, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:33:01,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 4.676e+02 5.543e+02 6.978e+02 2.034e+03, threshold=1.109e+03, percent-clipped=2.0 +2023-04-02 18:33:31,756 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-02 18:33:39,041 INFO [train.py:903] (3/4) Epoch 21, batch 1500, loss[loss=0.2293, simple_loss=0.3146, pruned_loss=0.07207, over 19347.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2897, pruned_loss=0.06551, over 3844623.75 frames. ], batch size: 70, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:33:50,725 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0540, 1.9672, 1.8390, 2.1679, 1.9969, 1.8207, 1.8215, 2.0773], + device='cuda:3'), covar=tensor([0.0955, 0.1410, 0.1331, 0.0963, 0.1173, 0.0541, 0.1254, 0.0649], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0354, 0.0307, 0.0248, 0.0297, 0.0249, 0.0307, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:34:26,155 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1019, 2.0397, 1.6839, 2.0984, 1.9247, 1.7871, 1.7001, 1.9912], + device='cuda:3'), covar=tensor([0.1049, 0.1506, 0.1602, 0.1133, 0.1450, 0.0637, 0.1502, 0.0796], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0355, 0.0309, 0.0249, 0.0298, 0.0251, 0.0309, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:34:42,051 INFO [train.py:903] (3/4) Epoch 21, batch 1550, loss[loss=0.2338, simple_loss=0.3111, pruned_loss=0.07823, over 19671.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.29, pruned_loss=0.06569, over 3843234.24 frames. ], batch size: 58, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:35:05,479 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 5.280e+02 6.228e+02 7.726e+02 2.313e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 18:35:44,859 INFO [train.py:903] (3/4) Epoch 21, batch 1600, loss[loss=0.2258, simple_loss=0.3072, pruned_loss=0.07225, over 19574.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.289, pruned_loss=0.0651, over 3855959.44 frames. ], batch size: 61, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:07,166 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 18:36:33,727 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 18:36:48,270 INFO [train.py:903] (3/4) Epoch 21, batch 1650, loss[loss=0.1956, simple_loss=0.2764, pruned_loss=0.05742, over 19738.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2889, pruned_loss=0.06492, over 3835141.19 frames. ], batch size: 51, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:36:54,047 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7846, 1.7128, 1.3749, 1.7154, 1.8062, 1.4494, 1.4017, 1.6292], + device='cuda:3'), covar=tensor([0.1227, 0.1610, 0.1880, 0.1228, 0.1413, 0.0994, 0.1955, 0.1038], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0357, 0.0309, 0.0250, 0.0300, 0.0251, 0.0309, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:37:12,981 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.591e+02 5.832e+02 7.172e+02 1.632e+03, threshold=1.166e+03, percent-clipped=1.0 +2023-04-02 18:37:43,213 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138254.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:46,576 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138257.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:37:49,813 INFO [train.py:903] (3/4) Epoch 21, batch 1700, loss[loss=0.218, simple_loss=0.3035, pruned_loss=0.06625, over 19388.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2887, pruned_loss=0.06489, over 3837242.31 frames. ], batch size: 70, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:38:25,357 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138288.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:38:28,571 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 18:38:53,713 INFO [train.py:903] (3/4) Epoch 21, batch 1750, loss[loss=0.1939, simple_loss=0.2746, pruned_loss=0.05664, over 19766.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2869, pruned_loss=0.0642, over 3837512.39 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:39:16,069 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 4.819e+02 5.989e+02 8.047e+02 2.111e+03, threshold=1.198e+03, percent-clipped=8.0 +2023-04-02 18:39:30,289 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-02 18:39:30,904 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:39:55,296 INFO [train.py:903] (3/4) Epoch 21, batch 1800, loss[loss=0.1806, simple_loss=0.2606, pruned_loss=0.05033, over 19733.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2873, pruned_loss=0.06447, over 3838133.25 frames. ], batch size: 51, lr: 3.95e-03, grad_scale: 8.0 +2023-04-02 18:40:05,998 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138369.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:10,864 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138372.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:40:54,423 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 18:40:57,827 INFO [train.py:903] (3/4) Epoch 21, batch 1850, loss[loss=0.2277, simple_loss=0.2937, pruned_loss=0.08089, over 19468.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2873, pruned_loss=0.06438, over 3844838.93 frames. ], batch size: 49, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:41:22,807 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 4.784e+02 5.677e+02 7.800e+02 1.333e+03, threshold=1.135e+03, percent-clipped=2.0 +2023-04-02 18:41:34,221 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 18:41:34,577 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2282, 2.0834, 2.1085, 1.8682, 1.6661, 1.8265, 0.5269, 1.2107], + device='cuda:3'), covar=tensor([0.0660, 0.0662, 0.0389, 0.0789, 0.1101, 0.0949, 0.1342, 0.1059], + device='cuda:3'), in_proj_covar=tensor([0.0360, 0.0354, 0.0359, 0.0383, 0.0460, 0.0388, 0.0335, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 18:42:01,831 INFO [train.py:903] (3/4) Epoch 21, batch 1900, loss[loss=0.1894, simple_loss=0.2726, pruned_loss=0.0531, over 19851.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2871, pruned_loss=0.06428, over 3846004.56 frames. ], batch size: 52, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:42:18,810 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 18:42:23,575 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 18:42:48,430 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 18:43:04,472 INFO [train.py:903] (3/4) Epoch 21, batch 1950, loss[loss=0.2392, simple_loss=0.3236, pruned_loss=0.07741, over 19679.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06416, over 3849369.85 frames. ], batch size: 55, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:43:27,750 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.334e+02 4.783e+02 6.007e+02 7.405e+02 3.008e+03, threshold=1.201e+03, percent-clipped=4.0 +2023-04-02 18:43:49,755 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1343, 1.9965, 1.8978, 1.6728, 1.5693, 1.6855, 0.4752, 1.0010], + device='cuda:3'), covar=tensor([0.0656, 0.0610, 0.0425, 0.0722, 0.1116, 0.0809, 0.1276, 0.1057], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0354, 0.0360, 0.0383, 0.0461, 0.0389, 0.0336, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 18:44:06,909 INFO [train.py:903] (3/4) Epoch 21, batch 2000, loss[loss=0.2259, simple_loss=0.3193, pruned_loss=0.06618, over 19591.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2877, pruned_loss=0.06383, over 3856701.02 frames. ], batch size: 61, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:44:09,466 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.9185, 5.3767, 2.9272, 4.7648, 1.4221, 5.4445, 5.2675, 5.5185], + device='cuda:3'), covar=tensor([0.0407, 0.0814, 0.1989, 0.0736, 0.3529, 0.0569, 0.0753, 0.1103], + device='cuda:3'), in_proj_covar=tensor([0.0497, 0.0407, 0.0493, 0.0344, 0.0402, 0.0429, 0.0421, 0.0458], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:45:03,888 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 18:45:08,462 INFO [train.py:903] (3/4) Epoch 21, batch 2050, loss[loss=0.2105, simple_loss=0.2951, pruned_loss=0.06294, over 17236.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06404, over 3831641.90 frames. ], batch size: 101, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:45:22,115 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 18:45:23,304 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 18:45:28,193 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138625.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:32,705 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138628.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:33,534 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.887e+02 4.818e+02 6.151e+02 8.119e+02 1.355e+03, threshold=1.230e+03, percent-clipped=5.0 +2023-04-02 18:45:38,226 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138632.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:45:45,206 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 18:46:00,112 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:03,382 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9689, 4.5158, 2.7588, 3.9848, 0.9623, 4.4442, 4.3246, 4.5134], + device='cuda:3'), covar=tensor([0.0506, 0.0855, 0.1898, 0.0764, 0.3888, 0.0599, 0.0778, 0.1017], + device='cuda:3'), in_proj_covar=tensor([0.0493, 0.0404, 0.0487, 0.0342, 0.0398, 0.0425, 0.0417, 0.0454], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:46:03,580 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138653.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:46:12,219 INFO [train.py:903] (3/4) Epoch 21, batch 2100, loss[loss=0.2069, simple_loss=0.286, pruned_loss=0.06384, over 19763.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.06448, over 3819653.92 frames. ], batch size: 54, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:46:29,064 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1016, 2.8444, 2.2374, 2.1998, 2.0717, 2.4588, 1.0306, 2.0484], + device='cuda:3'), covar=tensor([0.0747, 0.0592, 0.0707, 0.1086, 0.1078, 0.1186, 0.1407, 0.1071], + device='cuda:3'), in_proj_covar=tensor([0.0360, 0.0353, 0.0359, 0.0382, 0.0460, 0.0388, 0.0335, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 18:46:33,620 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5246, 1.0711, 1.2895, 1.2085, 2.1931, 0.9757, 1.9008, 2.4008], + device='cuda:3'), covar=tensor([0.0728, 0.2894, 0.2958, 0.1709, 0.0881, 0.2253, 0.1234, 0.0517], + device='cuda:3'), in_proj_covar=tensor([0.0408, 0.0365, 0.0384, 0.0346, 0.0373, 0.0349, 0.0376, 0.0401], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:46:39,345 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 18:46:41,766 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=138684.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:01,045 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138699.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:02,063 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 18:47:06,986 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138704.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:14,547 INFO [train.py:903] (3/4) Epoch 21, batch 2150, loss[loss=0.2222, simple_loss=0.3034, pruned_loss=0.0705, over 19741.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2893, pruned_loss=0.06492, over 3824947.66 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:47:21,597 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:47:26,591 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0159, 2.0945, 2.3795, 2.6875, 1.9621, 2.5658, 2.4581, 2.1557], + device='cuda:3'), covar=tensor([0.4251, 0.3831, 0.1817, 0.2386, 0.4337, 0.2153, 0.4485, 0.3165], + device='cuda:3'), in_proj_covar=tensor([0.0883, 0.0946, 0.0707, 0.0924, 0.0865, 0.0797, 0.0827, 0.0772], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 18:47:37,627 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 5.033e+02 5.895e+02 7.227e+02 1.459e+03, threshold=1.179e+03, percent-clipped=3.0 +2023-04-02 18:48:00,981 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138747.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:48:18,144 INFO [train.py:903] (3/4) Epoch 21, batch 2200, loss[loss=0.2451, simple_loss=0.3279, pruned_loss=0.08114, over 19414.00 frames. ], tot_loss[loss=0.208, simple_loss=0.288, pruned_loss=0.06398, over 3841591.23 frames. ], batch size: 70, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:49:08,123 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:49:20,488 INFO [train.py:903] (3/4) Epoch 21, batch 2250, loss[loss=0.2323, simple_loss=0.319, pruned_loss=0.07277, over 19608.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2884, pruned_loss=0.06443, over 3841383.54 frames. ], batch size: 61, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:49:44,481 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.352e+02 5.230e+02 6.732e+02 8.271e+02 2.316e+03, threshold=1.346e+03, percent-clipped=7.0 +2023-04-02 18:50:23,834 INFO [train.py:903] (3/4) Epoch 21, batch 2300, loss[loss=0.187, simple_loss=0.2683, pruned_loss=0.05283, over 19613.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2884, pruned_loss=0.06484, over 3834279.31 frames. ], batch size: 50, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:50:38,383 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 18:51:27,176 INFO [train.py:903] (3/4) Epoch 21, batch 2350, loss[loss=0.228, simple_loss=0.2997, pruned_loss=0.07816, over 13187.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2885, pruned_loss=0.06479, over 3833126.06 frames. ], batch size: 136, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:51:48,954 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:51:50,726 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.807e+02 6.646e+02 8.268e+02 1.737e+03, threshold=1.329e+03, percent-clipped=3.0 +2023-04-02 18:52:08,317 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 18:52:24,449 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 18:52:31,433 INFO [train.py:903] (3/4) Epoch 21, batch 2400, loss[loss=0.2242, simple_loss=0.2992, pruned_loss=0.07456, over 19773.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2883, pruned_loss=0.06484, over 3826748.05 frames. ], batch size: 46, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:25,710 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7160, 1.8418, 1.3799, 1.6755, 1.7260, 1.2856, 1.3503, 1.5062], + device='cuda:3'), covar=tensor([0.1325, 0.1622, 0.2022, 0.1270, 0.1516, 0.1215, 0.2086, 0.1205], + device='cuda:3'), in_proj_covar=tensor([0.0266, 0.0354, 0.0308, 0.0248, 0.0298, 0.0249, 0.0307, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:53:26,798 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139003.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:34,427 INFO [train.py:903] (3/4) Epoch 21, batch 2450, loss[loss=0.2003, simple_loss=0.2723, pruned_loss=0.06421, over 19095.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2886, pruned_loss=0.06481, over 3826076.43 frames. ], batch size: 42, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:53:59,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139028.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:53:59,946 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.027e+02 6.383e+02 8.090e+02 1.476e+03, threshold=1.277e+03, percent-clipped=1.0 +2023-04-02 18:54:17,224 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:22,828 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:32,631 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139055.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:38,306 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:54:39,184 INFO [train.py:903] (3/4) Epoch 21, batch 2500, loss[loss=0.2478, simple_loss=0.3192, pruned_loss=0.08814, over 19660.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2896, pruned_loss=0.06532, over 3831436.80 frames. ], batch size: 58, lr: 3.94e-03, grad_scale: 8.0 +2023-04-02 18:55:05,390 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139080.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:55:35,400 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9781, 1.7339, 1.6520, 1.9101, 1.6036, 1.5955, 1.5389, 1.8068], + device='cuda:3'), covar=tensor([0.1022, 0.1443, 0.1401, 0.1116, 0.1373, 0.0573, 0.1514, 0.0739], + device='cuda:3'), in_proj_covar=tensor([0.0265, 0.0353, 0.0307, 0.0247, 0.0296, 0.0248, 0.0306, 0.0252], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 18:55:42,986 INFO [train.py:903] (3/4) Epoch 21, batch 2550, loss[loss=0.1874, simple_loss=0.2737, pruned_loss=0.05053, over 19676.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2884, pruned_loss=0.06499, over 3831341.98 frames. ], batch size: 53, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:56:06,423 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.656e+02 4.926e+02 6.170e+02 7.459e+02 2.294e+03, threshold=1.234e+03, percent-clipped=3.0 +2023-04-02 18:56:35,495 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 18:56:43,491 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139158.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:56:45,332 INFO [train.py:903] (3/4) Epoch 21, batch 2600, loss[loss=0.2135, simple_loss=0.2943, pruned_loss=0.06642, over 19711.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2893, pruned_loss=0.0652, over 3827934.65 frames. ], batch size: 51, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:56:49,215 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:02,184 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:57:47,166 INFO [train.py:903] (3/4) Epoch 21, batch 2650, loss[loss=0.2298, simple_loss=0.3018, pruned_loss=0.07889, over 19767.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06495, over 3828740.72 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 18:58:08,403 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 18:58:13,099 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 4.962e+02 5.879e+02 7.846e+02 2.263e+03, threshold=1.176e+03, percent-clipped=6.0 +2023-04-02 18:58:29,464 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5853, 1.4567, 1.4942, 2.2050, 1.7237, 1.9073, 2.0754, 1.6673], + device='cuda:3'), covar=tensor([0.0823, 0.0968, 0.1021, 0.0766, 0.0822, 0.0771, 0.0817, 0.0731], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0225, 0.0241, 0.0225, 0.0211, 0.0187, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 18:58:39,736 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4363, 1.4996, 1.8424, 1.6685, 3.0220, 2.5613, 3.3708, 1.6202], + device='cuda:3'), covar=tensor([0.2542, 0.4408, 0.2809, 0.2034, 0.1662, 0.2039, 0.1492, 0.4164], + device='cuda:3'), in_proj_covar=tensor([0.0529, 0.0638, 0.0705, 0.0482, 0.0619, 0.0525, 0.0659, 0.0546], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 18:58:45,357 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:58:49,502 INFO [train.py:903] (3/4) Epoch 21, batch 2700, loss[loss=0.2192, simple_loss=0.2992, pruned_loss=0.06962, over 19474.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06448, over 3835839.46 frames. ], batch size: 64, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 18:59:04,235 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139271.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:33,133 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139294.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:49,906 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139307.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 18:59:52,994 INFO [train.py:903] (3/4) Epoch 21, batch 2750, loss[loss=0.2087, simple_loss=0.2832, pruned_loss=0.06713, over 19404.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.06533, over 3818453.69 frames. ], batch size: 48, lr: 3.93e-03, grad_scale: 4.0 +2023-04-02 19:00:18,058 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.927e+02 4.705e+02 5.796e+02 7.407e+02 1.811e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-02 19:00:55,541 INFO [train.py:903] (3/4) Epoch 21, batch 2800, loss[loss=0.2193, simple_loss=0.2925, pruned_loss=0.07309, over 19660.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.06465, over 3819208.61 frames. ], batch size: 53, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:01:28,012 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139386.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:01:58,454 INFO [train.py:903] (3/4) Epoch 21, batch 2850, loss[loss=0.212, simple_loss=0.292, pruned_loss=0.06601, over 19532.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2891, pruned_loss=0.06491, over 3818211.26 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:02:03,753 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139414.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:09,495 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139419.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:10,544 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9340, 5.0683, 5.7773, 5.7707, 2.1644, 5.4379, 4.7007, 5.3750], + device='cuda:3'), covar=tensor([0.1600, 0.0956, 0.0518, 0.0559, 0.5625, 0.0724, 0.0570, 0.1127], + device='cuda:3'), in_proj_covar=tensor([0.0778, 0.0730, 0.0942, 0.0820, 0.0826, 0.0693, 0.0568, 0.0870], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 19:02:23,967 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.945e+02 5.282e+02 6.184e+02 7.725e+02 1.552e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-02 19:02:24,365 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139430.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:35,340 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:42,196 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139444.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:54,852 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139455.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:02:57,812 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 19:03:00,100 INFO [train.py:903] (3/4) Epoch 21, batch 2900, loss[loss=0.2003, simple_loss=0.2782, pruned_loss=0.0612, over 19607.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2907, pruned_loss=0.06588, over 3812300.90 frames. ], batch size: 50, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:04:04,508 INFO [train.py:903] (3/4) Epoch 21, batch 2950, loss[loss=0.2551, simple_loss=0.3234, pruned_loss=0.09338, over 13120.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2906, pruned_loss=0.06558, over 3817953.33 frames. ], batch size: 135, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:04:28,834 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.684e+02 5.947e+02 7.442e+02 1.403e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-02 19:05:06,918 INFO [train.py:903] (3/4) Epoch 21, batch 3000, loss[loss=0.1956, simple_loss=0.2793, pruned_loss=0.056, over 19735.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2899, pruned_loss=0.0654, over 3827991.06 frames. ], batch size: 51, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:05:06,918 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 19:05:20,610 INFO [train.py:937] (3/4) Epoch 21, validation: loss=0.1693, simple_loss=0.2693, pruned_loss=0.03465, over 944034.00 frames. +2023-04-02 19:05:20,613 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 19:05:24,360 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 19:05:28,248 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8757, 1.9830, 2.2427, 2.4956, 1.9075, 2.3929, 2.2714, 2.0465], + device='cuda:3'), covar=tensor([0.4121, 0.3510, 0.1720, 0.2215, 0.3722, 0.1990, 0.4411, 0.3172], + device='cuda:3'), in_proj_covar=tensor([0.0891, 0.0954, 0.0710, 0.0929, 0.0869, 0.0804, 0.0835, 0.0776], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 19:05:41,465 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 19:05:57,513 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139589.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:12,383 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:12,520 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:06:23,831 INFO [train.py:903] (3/4) Epoch 21, batch 3050, loss[loss=0.1935, simple_loss=0.2788, pruned_loss=0.05408, over 19660.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2907, pruned_loss=0.06589, over 3830609.37 frames. ], batch size: 55, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:06:48,011 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.171e+02 6.119e+02 7.965e+02 1.649e+03, threshold=1.224e+03, percent-clipped=3.0 +2023-04-02 19:06:57,081 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139638.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:01,742 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 19:07:03,673 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139642.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:14,485 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139651.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:07:24,509 INFO [train.py:903] (3/4) Epoch 21, batch 3100, loss[loss=0.2177, simple_loss=0.3022, pruned_loss=0.06655, over 19700.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2916, pruned_loss=0.06652, over 3839652.42 frames. ], batch size: 59, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:07:34,139 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139667.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:08:05,845 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8232, 1.9090, 2.2151, 2.3958, 1.7841, 2.2790, 2.2039, 1.9515], + device='cuda:3'), covar=tensor([0.4188, 0.4100, 0.1933, 0.2425, 0.4276, 0.2283, 0.4836, 0.3515], + device='cuda:3'), in_proj_covar=tensor([0.0893, 0.0956, 0.0713, 0.0929, 0.0871, 0.0805, 0.0836, 0.0777], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 19:08:25,946 INFO [train.py:903] (3/4) Epoch 21, batch 3150, loss[loss=0.2012, simple_loss=0.2873, pruned_loss=0.05758, over 19533.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.292, pruned_loss=0.06691, over 3824909.57 frames. ], batch size: 56, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:08:29,912 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8599, 0.9246, 0.8830, 0.7713, 0.7497, 0.8377, 0.1178, 0.3023], + device='cuda:3'), covar=tensor([0.0500, 0.0490, 0.0334, 0.0430, 0.0842, 0.0481, 0.1045, 0.0787], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0354, 0.0359, 0.0383, 0.0459, 0.0388, 0.0334, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 19:08:32,271 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139715.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:08:51,321 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.442e+02 5.084e+02 6.643e+02 9.166e+02 2.493e+03, threshold=1.329e+03, percent-clipped=12.0 +2023-04-02 19:08:52,531 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 19:08:57,931 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-02 19:09:13,969 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-02 19:09:19,228 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139753.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:09:28,042 INFO [train.py:903] (3/4) Epoch 21, batch 3200, loss[loss=0.1675, simple_loss=0.2448, pruned_loss=0.0451, over 19351.00 frames. ], tot_loss[loss=0.213, simple_loss=0.292, pruned_loss=0.06706, over 3810360.77 frames. ], batch size: 47, lr: 3.93e-03, grad_scale: 8.0 +2023-04-02 19:09:36,915 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139766.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:01,973 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:27,849 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:30,928 INFO [train.py:903] (3/4) Epoch 21, batch 3250, loss[loss=0.2556, simple_loss=0.3288, pruned_loss=0.09115, over 12919.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2917, pruned_loss=0.06698, over 3808063.28 frames. ], batch size: 138, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:10:46,075 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139822.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:48,411 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:10:55,283 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.059e+02 5.030e+02 6.558e+02 8.674e+02 2.471e+03, threshold=1.312e+03, percent-clipped=9.0 +2023-04-02 19:11:32,335 INFO [train.py:903] (3/4) Epoch 21, batch 3300, loss[loss=0.2451, simple_loss=0.3326, pruned_loss=0.07878, over 17592.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2917, pruned_loss=0.06656, over 3812833.62 frames. ], batch size: 101, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:11:35,823 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 19:11:43,202 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:12:34,957 INFO [train.py:903] (3/4) Epoch 21, batch 3350, loss[loss=0.2194, simple_loss=0.2997, pruned_loss=0.06954, over 19525.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2923, pruned_loss=0.06668, over 3815803.61 frames. ], batch size: 56, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:12:44,268 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2776, 1.2042, 1.6169, 1.0999, 2.3783, 3.3378, 3.0428, 3.4938], + device='cuda:3'), covar=tensor([0.1498, 0.3938, 0.3507, 0.2558, 0.0621, 0.0212, 0.0223, 0.0271], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0321, 0.0350, 0.0263, 0.0242, 0.0185, 0.0216, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 19:13:00,886 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.734e+02 5.660e+02 7.256e+02 1.171e+03, threshold=1.132e+03, percent-clipped=0.0 +2023-04-02 19:13:04,675 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139933.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:17,290 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=139944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:13:37,533 INFO [train.py:903] (3/4) Epoch 21, batch 3400, loss[loss=0.1684, simple_loss=0.2468, pruned_loss=0.04498, over 19362.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2921, pruned_loss=0.06687, over 3815032.41 frames. ], batch size: 47, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:13:42,168 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9150, 1.3121, 1.5207, 1.4532, 3.4537, 1.0728, 2.3293, 3.9049], + device='cuda:3'), covar=tensor([0.0472, 0.2945, 0.3000, 0.2044, 0.0727, 0.2656, 0.1366, 0.0233], + device='cuda:3'), in_proj_covar=tensor([0.0407, 0.0363, 0.0382, 0.0343, 0.0369, 0.0346, 0.0373, 0.0398], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:13:52,367 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139971.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:21,017 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8083, 1.6156, 1.4187, 1.7723, 1.3947, 1.5152, 1.4110, 1.6641], + device='cuda:3'), covar=tensor([0.1118, 0.1206, 0.1541, 0.1026, 0.1326, 0.0655, 0.1546, 0.0803], + device='cuda:3'), in_proj_covar=tensor([0.0268, 0.0356, 0.0309, 0.0249, 0.0299, 0.0250, 0.0309, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:14:22,097 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139996.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:41,270 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140009.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:14:42,107 INFO [train.py:903] (3/4) Epoch 21, batch 3450, loss[loss=0.2102, simple_loss=0.2918, pruned_loss=0.06428, over 17453.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2914, pruned_loss=0.06646, over 3805786.24 frames. ], batch size: 101, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:14:43,261 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 19:14:46,244 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.39 vs. limit=5.0 +2023-04-02 19:14:56,584 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140022.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:06,287 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+02 5.068e+02 6.577e+02 8.644e+02 2.362e+03, threshold=1.315e+03, percent-clipped=9.0 +2023-04-02 19:15:11,108 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140034.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:27,238 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140047.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:28,360 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140048.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:41,624 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:15:42,383 INFO [train.py:903] (3/4) Epoch 21, batch 3500, loss[loss=0.2572, simple_loss=0.3214, pruned_loss=0.09651, over 12802.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2919, pruned_loss=0.06645, over 3796480.91 frames. ], batch size: 137, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:45,563 INFO [train.py:903] (3/4) Epoch 21, batch 3550, loss[loss=0.2244, simple_loss=0.3081, pruned_loss=0.07034, over 19671.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2917, pruned_loss=0.06653, over 3803274.89 frames. ], batch size: 59, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:16:59,827 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140121.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:08,454 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:10,317 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.095e+02 6.549e+02 8.089e+02 2.006e+03, threshold=1.310e+03, percent-clipped=2.0 +2023-04-02 19:17:12,586 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140131.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:34,926 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9357, 4.9839, 5.6754, 5.6674, 1.9211, 5.3609, 4.5184, 5.3439], + device='cuda:3'), covar=tensor([0.1586, 0.0910, 0.0545, 0.0590, 0.6026, 0.0670, 0.0581, 0.1125], + device='cuda:3'), in_proj_covar=tensor([0.0774, 0.0732, 0.0935, 0.0822, 0.0824, 0.0694, 0.0567, 0.0870], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 19:17:36,901 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140151.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:36,974 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3170, 3.0057, 2.1921, 2.6550, 0.7178, 2.9787, 2.8774, 2.9383], + device='cuda:3'), covar=tensor([0.1097, 0.1412, 0.2090, 0.1136, 0.3906, 0.0985, 0.1135, 0.1389], + device='cuda:3'), in_proj_covar=tensor([0.0501, 0.0412, 0.0493, 0.0348, 0.0404, 0.0431, 0.0425, 0.0462], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:17:47,035 INFO [train.py:903] (3/4) Epoch 21, batch 3600, loss[loss=0.2388, simple_loss=0.3189, pruned_loss=0.07933, over 19671.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2918, pruned_loss=0.06649, over 3818021.28 frames. ], batch size: 58, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:17:56,437 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140166.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:17:56,723 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3069, 2.0332, 1.5629, 1.3944, 1.8673, 1.2905, 1.3380, 1.7763], + device='cuda:3'), covar=tensor([0.0859, 0.0749, 0.1040, 0.0779, 0.0485, 0.1182, 0.0589, 0.0427], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0314, 0.0337, 0.0262, 0.0246, 0.0336, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:17:58,699 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140168.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:18:34,548 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0438, 1.9431, 1.7849, 2.1344, 1.8733, 1.7570, 1.7050, 1.9863], + device='cuda:3'), covar=tensor([0.1054, 0.1486, 0.1335, 0.0975, 0.1288, 0.0551, 0.1396, 0.0681], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0360, 0.0311, 0.0251, 0.0301, 0.0252, 0.0310, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:18:51,571 INFO [train.py:903] (3/4) Epoch 21, batch 3650, loss[loss=0.2173, simple_loss=0.2865, pruned_loss=0.07404, over 19474.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2921, pruned_loss=0.06677, over 3820793.10 frames. ], batch size: 49, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:18:54,128 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140212.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:15,567 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.123e+02 6.079e+02 7.474e+02 1.635e+03, threshold=1.216e+03, percent-clipped=1.0 +2023-04-02 19:19:37,431 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140246.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:19:54,178 INFO [train.py:903] (3/4) Epoch 21, batch 3700, loss[loss=0.1956, simple_loss=0.2709, pruned_loss=0.06017, over 19487.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2915, pruned_loss=0.06685, over 3822047.33 frames. ], batch size: 49, lr: 3.92e-03, grad_scale: 8.0 +2023-04-02 19:20:01,738 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140266.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:15,897 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140277.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:20,535 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140281.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:23,884 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140283.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:24,931 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2616, 3.8291, 3.9391, 3.9374, 1.5294, 3.7065, 3.2291, 3.6996], + device='cuda:3'), covar=tensor([0.1706, 0.0810, 0.0630, 0.0778, 0.5776, 0.0948, 0.0741, 0.1128], + device='cuda:3'), in_proj_covar=tensor([0.0765, 0.0723, 0.0923, 0.0811, 0.0812, 0.0687, 0.0560, 0.0859], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 19:20:49,130 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140304.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:20:55,397 INFO [train.py:903] (3/4) Epoch 21, batch 3750, loss[loss=0.1933, simple_loss=0.272, pruned_loss=0.05733, over 19500.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.292, pruned_loss=0.06709, over 3816803.42 frames. ], batch size: 49, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:21:02,665 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:17,923 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:20,277 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140329.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:22,189 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.949e+02 4.554e+02 6.128e+02 7.118e+02 1.255e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-02 19:21:33,873 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:21:40,806 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4144, 1.7543, 2.2056, 1.7576, 3.2626, 4.8444, 4.7293, 5.2214], + device='cuda:3'), covar=tensor([0.1614, 0.3475, 0.3070, 0.2241, 0.0545, 0.0172, 0.0156, 0.0162], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0320, 0.0348, 0.0262, 0.0242, 0.0184, 0.0215, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 19:21:57,852 INFO [train.py:903] (3/4) Epoch 21, batch 3800, loss[loss=0.2697, simple_loss=0.3305, pruned_loss=0.1044, over 13447.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2899, pruned_loss=0.06605, over 3806897.47 frames. ], batch size: 138, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:22:30,457 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 19:22:54,726 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:23:01,180 INFO [train.py:903] (3/4) Epoch 21, batch 3850, loss[loss=0.2429, simple_loss=0.3329, pruned_loss=0.07643, over 19305.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2907, pruned_loss=0.06652, over 3806562.83 frames. ], batch size: 66, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:23:02,817 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4265, 1.5184, 1.7476, 1.6874, 2.5640, 2.3116, 2.6781, 1.1018], + device='cuda:3'), covar=tensor([0.2342, 0.4022, 0.2583, 0.1789, 0.1454, 0.1928, 0.1432, 0.4162], + device='cuda:3'), in_proj_covar=tensor([0.0531, 0.0639, 0.0705, 0.0483, 0.0616, 0.0528, 0.0660, 0.0545], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 19:23:25,867 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 5.035e+02 6.153e+02 7.922e+02 1.662e+03, threshold=1.231e+03, percent-clipped=3.0 +2023-04-02 19:23:33,491 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-04-02 19:24:03,190 INFO [train.py:903] (3/4) Epoch 21, batch 3900, loss[loss=0.2031, simple_loss=0.295, pruned_loss=0.05563, over 19681.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2893, pruned_loss=0.06545, over 3819282.43 frames. ], batch size: 59, lr: 3.92e-03, grad_scale: 4.0 +2023-04-02 19:24:04,950 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0651, 1.8387, 1.7168, 1.9828, 1.7019, 1.7026, 1.6797, 1.8836], + device='cuda:3'), covar=tensor([0.1037, 0.1378, 0.1490, 0.1033, 0.1397, 0.0609, 0.1472, 0.0777], + device='cuda:3'), in_proj_covar=tensor([0.0271, 0.0360, 0.0311, 0.0250, 0.0301, 0.0252, 0.0310, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:24:09,119 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:10,724 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7798, 1.8877, 2.1661, 2.2990, 1.6983, 2.2067, 2.1825, 2.0192], + device='cuda:3'), covar=tensor([0.4099, 0.3682, 0.1820, 0.2308, 0.3947, 0.2029, 0.4712, 0.3201], + device='cuda:3'), in_proj_covar=tensor([0.0891, 0.0953, 0.0714, 0.0928, 0.0871, 0.0808, 0.0836, 0.0777], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 19:24:17,262 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140472.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:24:55,683 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140502.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:01,348 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140507.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:04,340 INFO [train.py:903] (3/4) Epoch 21, batch 3950, loss[loss=0.2448, simple_loss=0.3091, pruned_loss=0.09031, over 13707.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.29, pruned_loss=0.06575, over 3827214.41 frames. ], batch size: 137, lr: 3.91e-03, grad_scale: 4.0 +2023-04-02 19:25:10,277 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 19:25:20,279 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140522.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,824 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:26,932 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140527.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:31,861 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.763e+02 5.824e+02 7.544e+02 1.413e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-02 19:25:40,198 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140537.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:42,492 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140539.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:25:51,855 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140547.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:07,106 INFO [train.py:903] (3/4) Epoch 21, batch 4000, loss[loss=0.2264, simple_loss=0.3072, pruned_loss=0.0728, over 19588.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2904, pruned_loss=0.06596, over 3819056.80 frames. ], batch size: 61, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:26:09,736 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140562.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:13,074 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140564.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:33,130 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140580.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:36,738 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:39,037 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4128, 1.3864, 1.5662, 1.6750, 3.0295, 1.2782, 2.3548, 3.4203], + device='cuda:3'), covar=tensor([0.0521, 0.2594, 0.2720, 0.1653, 0.0694, 0.2241, 0.1175, 0.0245], + device='cuda:3'), in_proj_covar=tensor([0.0407, 0.0364, 0.0384, 0.0344, 0.0371, 0.0347, 0.0373, 0.0399], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:26:41,328 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:26:55,193 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 19:27:09,320 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140608.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:11,209 INFO [train.py:903] (3/4) Epoch 21, batch 4050, loss[loss=0.1845, simple_loss=0.2641, pruned_loss=0.05248, over 19385.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2889, pruned_loss=0.06528, over 3819120.67 frames. ], batch size: 48, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:27:25,055 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140621.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:27:36,528 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 5.165e+02 6.710e+02 8.332e+02 1.443e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-04-02 19:27:37,926 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0302, 1.9022, 1.7048, 2.0840, 1.8060, 1.7330, 1.6097, 1.8823], + device='cuda:3'), covar=tensor([0.1068, 0.1430, 0.1499, 0.0997, 0.1289, 0.0570, 0.1489, 0.0773], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0359, 0.0311, 0.0249, 0.0300, 0.0252, 0.0310, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:28:13,404 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-02 19:28:13,821 INFO [train.py:903] (3/4) Epoch 21, batch 4100, loss[loss=0.2204, simple_loss=0.2997, pruned_loss=0.07058, over 18081.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2876, pruned_loss=0.06464, over 3825620.81 frames. ], batch size: 83, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:28:52,821 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 19:29:08,488 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.04 vs. limit=5.0 +2023-04-02 19:29:15,738 INFO [train.py:903] (3/4) Epoch 21, batch 4150, loss[loss=0.2318, simple_loss=0.3081, pruned_loss=0.07781, over 19587.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2869, pruned_loss=0.06452, over 3822786.08 frames. ], batch size: 61, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:29:42,685 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.318e+02 6.390e+02 7.957e+02 1.686e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 19:29:49,995 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140736.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:29:55,919 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140741.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:05,109 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140749.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:30:17,540 INFO [train.py:903] (3/4) Epoch 21, batch 4200, loss[loss=0.2007, simple_loss=0.2897, pruned_loss=0.05587, over 19683.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2876, pruned_loss=0.06464, over 3825629.55 frames. ], batch size: 55, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:30:24,402 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 19:31:21,340 INFO [train.py:903] (3/4) Epoch 21, batch 4250, loss[loss=0.187, simple_loss=0.2628, pruned_loss=0.05558, over 19357.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2883, pruned_loss=0.06532, over 3820716.96 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:31:40,147 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 19:31:42,805 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140827.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:31:47,368 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 5.291e+02 6.401e+02 8.566e+02 1.506e+03, threshold=1.280e+03, percent-clipped=6.0 +2023-04-02 19:31:50,845 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 19:31:53,739 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140836.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:01,580 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140843.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:13,503 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140851.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:24,303 INFO [train.py:903] (3/4) Epoch 21, batch 4300, loss[loss=0.216, simple_loss=0.2866, pruned_loss=0.07273, over 19603.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2875, pruned_loss=0.06526, over 3816029.18 frames. ], batch size: 50, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:32:25,828 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140861.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:29,371 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140864.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:33,870 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140868.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:32:37,200 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=140871.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:33:18,593 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 19:33:24,440 INFO [train.py:903] (3/4) Epoch 21, batch 4350, loss[loss=0.2024, simple_loss=0.2877, pruned_loss=0.05859, over 19675.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2896, pruned_loss=0.06634, over 3816414.44 frames. ], batch size: 59, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:33:51,510 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.814e+02 5.846e+02 6.905e+02 1.613e+03, threshold=1.169e+03, percent-clipped=2.0 +2023-04-02 19:33:57,271 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:25,324 INFO [train.py:903] (3/4) Epoch 21, batch 4400, loss[loss=0.2522, simple_loss=0.3226, pruned_loss=0.09092, over 13144.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2889, pruned_loss=0.06557, over 3813991.60 frames. ], batch size: 135, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:34:32,100 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140965.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:33,402 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140966.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:34:53,850 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 19:34:58,747 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140986.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:02,881 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 19:35:05,698 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140992.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:23,744 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 19:35:27,466 INFO [train.py:903] (3/4) Epoch 21, batch 4450, loss[loss=0.2499, simple_loss=0.3314, pruned_loss=0.08426, over 19603.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2889, pruned_loss=0.06557, over 3817637.02 frames. ], batch size: 61, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:35:37,895 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141017.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:35:54,629 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.226e+02 6.450e+02 8.222e+02 2.218e+03, threshold=1.290e+03, percent-clipped=9.0 +2023-04-02 19:36:32,492 INFO [train.py:903] (3/4) Epoch 21, batch 4500, loss[loss=0.1882, simple_loss=0.2678, pruned_loss=0.05434, over 19688.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2892, pruned_loss=0.06578, over 3824636.71 frames. ], batch size: 53, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:36:38,415 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7961, 4.3810, 2.8168, 3.8957, 1.2073, 4.2817, 4.2149, 4.3263], + device='cuda:3'), covar=tensor([0.0562, 0.0817, 0.1833, 0.0766, 0.3661, 0.0648, 0.0895, 0.1187], + device='cuda:3'), in_proj_covar=tensor([0.0494, 0.0404, 0.0484, 0.0342, 0.0394, 0.0425, 0.0417, 0.0453], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:37:01,396 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141085.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:34,670 INFO [train.py:903] (3/4) Epoch 21, batch 4550, loss[loss=0.1633, simple_loss=0.2411, pruned_loss=0.04279, over 19729.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2878, pruned_loss=0.06507, over 3838981.27 frames. ], batch size: 45, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:37:37,502 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2340, 2.2686, 2.5346, 2.9149, 2.1104, 2.7848, 2.5206, 2.2096], + device='cuda:3'), covar=tensor([0.4118, 0.3922, 0.1766, 0.2840, 0.4522, 0.2208, 0.4780, 0.3392], + device='cuda:3'), in_proj_covar=tensor([0.0891, 0.0957, 0.0713, 0.0930, 0.0872, 0.0809, 0.0838, 0.0777], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 19:37:46,072 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 19:37:46,505 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:37:59,729 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.292e+02 5.246e+02 6.195e+02 7.478e+02 1.454e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-04-02 19:38:10,835 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 19:38:18,684 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141145.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:35,232 INFO [train.py:903] (3/4) Epoch 21, batch 4600, loss[loss=0.225, simple_loss=0.3089, pruned_loss=0.07058, over 19332.00 frames. ], tot_loss[loss=0.209, simple_loss=0.288, pruned_loss=0.06499, over 3842152.37 frames. ], batch size: 66, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:38:36,685 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141161.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:38:49,788 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141171.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:25,219 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141200.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:36,149 INFO [train.py:903] (3/4) Epoch 21, batch 4650, loss[loss=0.1938, simple_loss=0.2777, pruned_loss=0.05494, over 19587.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2878, pruned_loss=0.06486, over 3837888.39 frames. ], batch size: 52, lr: 3.91e-03, grad_scale: 8.0 +2023-04-02 19:39:53,023 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141222.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:39:56,097 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 19:40:02,331 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.173e+02 4.513e+02 5.853e+02 7.712e+02 1.984e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-02 19:40:05,981 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 19:40:15,514 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141242.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:21,273 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:40:37,900 INFO [train.py:903] (3/4) Epoch 21, batch 4700, loss[loss=0.1742, simple_loss=0.2459, pruned_loss=0.05123, over 19735.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2892, pruned_loss=0.06559, over 3834652.20 frames. ], batch size: 45, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:40:47,274 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141267.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:00,553 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 19:41:00,660 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141279.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:08,818 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141286.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:38,505 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141309.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:41:39,456 INFO [train.py:903] (3/4) Epoch 21, batch 4750, loss[loss=0.1727, simple_loss=0.2609, pruned_loss=0.04222, over 19759.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2887, pruned_loss=0.06535, over 3838436.30 frames. ], batch size: 47, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:42:03,068 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.546e+02 5.235e+02 6.308e+02 8.195e+02 2.468e+03, threshold=1.262e+03, percent-clipped=8.0 +2023-04-02 19:42:25,003 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:42:39,819 INFO [train.py:903] (3/4) Epoch 21, batch 4800, loss[loss=0.2436, simple_loss=0.3187, pruned_loss=0.08425, over 18796.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2892, pruned_loss=0.06548, over 3833020.31 frames. ], batch size: 74, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:43:23,106 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141394.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:43:24,864 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.05 vs. limit=5.0 +2023-04-02 19:43:41,808 INFO [train.py:903] (3/4) Epoch 21, batch 4850, loss[loss=0.2113, simple_loss=0.2957, pruned_loss=0.06347, over 19408.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2886, pruned_loss=0.06525, over 3827927.47 frames. ], batch size: 70, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:01,436 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141424.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:07,736 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 19:44:08,841 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.291e+02 5.192e+02 6.534e+02 8.985e+02 2.500e+03, threshold=1.307e+03, percent-clipped=12.0 +2023-04-02 19:44:13,611 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141435.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:44:28,307 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 19:44:33,296 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 19:44:34,458 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 19:44:37,275 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8774, 1.9802, 2.1505, 2.5019, 1.8548, 2.3881, 2.2101, 2.0306], + device='cuda:3'), covar=tensor([0.4096, 0.3713, 0.1881, 0.2271, 0.3879, 0.2051, 0.4535, 0.3169], + device='cuda:3'), in_proj_covar=tensor([0.0888, 0.0952, 0.0710, 0.0928, 0.0871, 0.0809, 0.0834, 0.0774], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 19:44:40,772 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141456.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:44,244 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:44:45,062 INFO [train.py:903] (3/4) Epoch 21, batch 4900, loss[loss=0.1923, simple_loss=0.2835, pruned_loss=0.05057, over 19627.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2885, pruned_loss=0.06513, over 3830460.01 frames. ], batch size: 57, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:44:47,115 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 19:44:59,109 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0668, 0.8948, 0.8726, 1.0652, 0.7889, 0.9419, 0.8613, 0.9854], + device='cuda:3'), covar=tensor([0.0872, 0.1083, 0.1119, 0.0702, 0.1016, 0.0501, 0.1104, 0.0661], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0362, 0.0313, 0.0252, 0.0302, 0.0254, 0.0311, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 19:45:05,765 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 19:45:10,383 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141481.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:19,553 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9732, 5.0022, 5.7038, 5.7228, 2.2381, 5.4134, 4.5303, 5.3211], + device='cuda:3'), covar=tensor([0.1628, 0.0899, 0.0614, 0.0614, 0.5602, 0.0728, 0.0689, 0.1264], + device='cuda:3'), in_proj_covar=tensor([0.0787, 0.0736, 0.0951, 0.0832, 0.0831, 0.0704, 0.0573, 0.0878], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 19:45:40,794 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141505.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:45:46,105 INFO [train.py:903] (3/4) Epoch 21, batch 4950, loss[loss=0.2167, simple_loss=0.3059, pruned_loss=0.06371, over 17953.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.06504, over 3828575.33 frames. ], batch size: 83, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:03,684 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 19:46:10,475 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.116e+02 6.031e+02 7.721e+02 1.403e+03, threshold=1.206e+03, percent-clipped=1.0 +2023-04-02 19:46:26,570 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141542.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:46:28,480 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 19:46:46,930 INFO [train.py:903] (3/4) Epoch 21, batch 5000, loss[loss=0.175, simple_loss=0.2592, pruned_loss=0.04541, over 19742.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2883, pruned_loss=0.0646, over 3824986.70 frames. ], batch size: 51, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:46:53,586 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 19:46:55,140 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141567.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:47:08,297 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 19:47:48,201 INFO [train.py:903] (3/4) Epoch 21, batch 5050, loss[loss=0.2381, simple_loss=0.3221, pruned_loss=0.07707, over 19345.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06408, over 3828590.68 frames. ], batch size: 66, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:48:03,144 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141620.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:04,285 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7418, 4.1566, 4.4311, 4.4384, 1.7132, 4.1619, 3.6226, 4.1392], + device='cuda:3'), covar=tensor([0.1605, 0.1091, 0.0626, 0.0658, 0.6154, 0.0961, 0.0701, 0.1191], + device='cuda:3'), in_proj_covar=tensor([0.0791, 0.0742, 0.0959, 0.0836, 0.0839, 0.0708, 0.0578, 0.0883], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 19:48:16,455 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.325e+02 4.754e+02 5.832e+02 6.826e+02 1.423e+03, threshold=1.166e+03, percent-clipped=2.0 +2023-04-02 19:48:25,906 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 19:48:39,270 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141650.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:48:53,396 INFO [train.py:903] (3/4) Epoch 21, batch 5100, loss[loss=0.2277, simple_loss=0.3078, pruned_loss=0.07376, over 18629.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2866, pruned_loss=0.06389, over 3827698.20 frames. ], batch size: 74, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:49:04,595 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 19:49:07,002 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 19:49:11,625 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 19:49:11,965 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141675.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:17,717 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141680.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:30,078 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141691.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:50,713 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141705.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:49:56,350 INFO [train.py:903] (3/4) Epoch 21, batch 5150, loss[loss=0.1937, simple_loss=0.2673, pruned_loss=0.06003, over 19411.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2879, pruned_loss=0.06478, over 3834048.43 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:50:09,418 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 19:50:20,928 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 4.996e+02 6.424e+02 8.131e+02 1.633e+03, threshold=1.285e+03, percent-clipped=6.0 +2023-04-02 19:50:33,517 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3936, 1.4610, 1.7047, 1.6053, 2.5655, 2.2410, 2.7091, 1.1872], + device='cuda:3'), covar=tensor([0.2543, 0.4325, 0.2783, 0.2010, 0.1534, 0.2200, 0.1499, 0.4402], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0641, 0.0711, 0.0485, 0.0619, 0.0533, 0.0662, 0.0550], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 19:50:46,539 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 19:50:58,093 INFO [train.py:903] (3/4) Epoch 21, batch 5200, loss[loss=0.1834, simple_loss=0.2726, pruned_loss=0.04711, over 19833.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2894, pruned_loss=0.06558, over 3838770.41 frames. ], batch size: 52, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:51:14,053 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 19:51:23,420 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141779.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:51:51,427 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=141803.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:54,966 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141806.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:51:58,274 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 19:51:59,438 INFO [train.py:903] (3/4) Epoch 21, batch 5250, loss[loss=0.2046, simple_loss=0.2775, pruned_loss=0.06584, over 19712.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2903, pruned_loss=0.06605, over 3838727.47 frames. ], batch size: 51, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:52:28,865 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 4.884e+02 6.185e+02 7.574e+02 1.457e+03, threshold=1.237e+03, percent-clipped=1.0 +2023-04-02 19:53:02,552 INFO [train.py:903] (3/4) Epoch 21, batch 5300, loss[loss=0.1874, simple_loss=0.2758, pruned_loss=0.04947, over 19586.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.06579, over 3829805.69 frames. ], batch size: 52, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:53:23,415 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:53:24,178 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 19:53:36,407 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7169, 1.7279, 1.5952, 1.4280, 1.3509, 1.4720, 0.2550, 0.6587], + device='cuda:3'), covar=tensor([0.0639, 0.0576, 0.0408, 0.0618, 0.1203, 0.0728, 0.1249, 0.1078], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0352, 0.0355, 0.0380, 0.0458, 0.0384, 0.0333, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 19:53:45,666 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141894.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:53:55,961 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141901.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:06,888 INFO [train.py:903] (3/4) Epoch 21, batch 5350, loss[loss=0.2071, simple_loss=0.2853, pruned_loss=0.06447, over 19760.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2897, pruned_loss=0.06546, over 3828002.68 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-04-02 19:54:16,602 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:32,496 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.997e+02 5.942e+02 7.628e+02 1.099e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-02 19:54:35,114 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141934.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:54:43,734 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 19:54:51,527 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141946.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:55:07,469 INFO [train.py:903] (3/4) Epoch 21, batch 5400, loss[loss=0.1842, simple_loss=0.2658, pruned_loss=0.05129, over 19853.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2902, pruned_loss=0.06578, over 3824281.75 frames. ], batch size: 52, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:56:10,567 INFO [train.py:903] (3/4) Epoch 21, batch 5450, loss[loss=0.1851, simple_loss=0.2678, pruned_loss=0.05124, over 19646.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.06561, over 3815374.41 frames. ], batch size: 53, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:56:39,854 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 5.354e+02 6.392e+02 8.231e+02 1.329e+03, threshold=1.278e+03, percent-clipped=1.0 +2023-04-02 19:57:14,046 INFO [train.py:903] (3/4) Epoch 21, batch 5500, loss[loss=0.1799, simple_loss=0.2639, pruned_loss=0.04794, over 19587.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.065, over 3831952.46 frames. ], batch size: 52, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:57:17,998 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142062.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:57:42,014 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 19:57:48,307 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142087.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:58:17,746 INFO [train.py:903] (3/4) Epoch 21, batch 5550, loss[loss=0.2435, simple_loss=0.3042, pruned_loss=0.09141, over 13668.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2884, pruned_loss=0.06495, over 3822632.56 frames. ], batch size: 137, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:58:26,202 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 19:58:43,632 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.904e+02 5.778e+02 7.569e+02 2.193e+03, threshold=1.156e+03, percent-clipped=4.0 +2023-04-02 19:59:07,863 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142150.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 19:59:15,748 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 19:59:19,357 INFO [train.py:903] (3/4) Epoch 21, batch 5600, loss[loss=0.2458, simple_loss=0.3215, pruned_loss=0.08501, over 19314.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2892, pruned_loss=0.06557, over 3820783.22 frames. ], batch size: 66, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 19:59:36,307 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142174.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 19:59:37,386 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142175.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:00:08,592 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142199.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:00:20,415 INFO [train.py:903] (3/4) Epoch 21, batch 5650, loss[loss=0.2859, simple_loss=0.3572, pruned_loss=0.1073, over 19735.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.29, pruned_loss=0.06643, over 3827351.05 frames. ], batch size: 63, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:00:36,097 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5221, 2.1749, 1.5601, 1.2646, 2.1306, 1.1011, 1.3010, 1.9532], + device='cuda:3'), covar=tensor([0.1253, 0.0865, 0.1205, 0.1118, 0.0544, 0.1516, 0.0891, 0.0531], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0315, 0.0338, 0.0263, 0.0246, 0.0338, 0.0291, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:00:49,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 4.896e+02 5.835e+02 7.915e+02 1.772e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-02 20:01:10,304 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 20:01:22,637 INFO [train.py:903] (3/4) Epoch 21, batch 5700, loss[loss=0.2007, simple_loss=0.2847, pruned_loss=0.05835, over 19666.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.06572, over 3828239.83 frames. ], batch size: 60, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:01:45,802 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:01:59,759 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142290.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:02:26,067 INFO [train.py:903] (3/4) Epoch 21, batch 5750, loss[loss=0.1755, simple_loss=0.2493, pruned_loss=0.05086, over 19790.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2897, pruned_loss=0.06567, over 3817075.14 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:02:28,359 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 20:02:36,341 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 20:02:41,026 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 20:02:51,479 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 5.170e+02 6.700e+02 8.460e+02 1.665e+03, threshold=1.340e+03, percent-clipped=6.0 +2023-04-02 20:03:26,602 INFO [train.py:903] (3/4) Epoch 21, batch 5800, loss[loss=0.2044, simple_loss=0.2786, pruned_loss=0.06514, over 19427.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2892, pruned_loss=0.06541, over 3812362.01 frames. ], batch size: 48, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:03:59,394 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 20:04:08,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142393.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:22,734 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142405.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:04:28,041 INFO [train.py:903] (3/4) Epoch 21, batch 5850, loss[loss=0.2188, simple_loss=0.304, pruned_loss=0.0668, over 17254.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2895, pruned_loss=0.06531, over 3813801.56 frames. ], batch size: 101, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:04:37,184 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.71 vs. limit=5.0 +2023-04-02 20:04:57,529 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.411e+02 4.867e+02 6.014e+02 7.504e+02 1.855e+03, threshold=1.203e+03, percent-clipped=4.0 +2023-04-02 20:05:31,489 INFO [train.py:903] (3/4) Epoch 21, batch 5900, loss[loss=0.2369, simple_loss=0.313, pruned_loss=0.08035, over 13928.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2883, pruned_loss=0.06485, over 3816924.26 frames. ], batch size: 135, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:05:35,065 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 20:05:36,581 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1535, 1.2879, 1.5640, 1.4185, 2.7466, 1.1626, 2.1742, 3.1074], + device='cuda:3'), covar=tensor([0.0587, 0.2712, 0.2612, 0.1718, 0.0769, 0.2214, 0.1183, 0.0305], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0363, 0.0383, 0.0346, 0.0373, 0.0347, 0.0374, 0.0398], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:05:58,763 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 20:06:36,831 INFO [train.py:903] (3/4) Epoch 21, batch 5950, loss[loss=0.1913, simple_loss=0.271, pruned_loss=0.05584, over 17697.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06511, over 3806877.97 frames. ], batch size: 101, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:02,051 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.752e+02 5.691e+02 7.550e+02 2.003e+03, threshold=1.138e+03, percent-clipped=5.0 +2023-04-02 20:07:37,097 INFO [train.py:903] (3/4) Epoch 21, batch 6000, loss[loss=0.211, simple_loss=0.2948, pruned_loss=0.06355, over 19027.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2897, pruned_loss=0.06576, over 3802065.74 frames. ], batch size: 75, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:07:37,098 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 20:07:50,389 INFO [train.py:937] (3/4) Epoch 21, validation: loss=0.1692, simple_loss=0.2693, pruned_loss=0.03459, over 944034.00 frames. +2023-04-02 20:07:50,390 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 20:08:27,748 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142591.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:08:52,466 INFO [train.py:903] (3/4) Epoch 21, batch 6050, loss[loss=0.1947, simple_loss=0.2817, pruned_loss=0.05383, over 19607.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2901, pruned_loss=0.06604, over 3806005.46 frames. ], batch size: 57, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:03,420 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8030, 1.8927, 2.2020, 2.3525, 1.7218, 2.2903, 2.2729, 2.0513], + device='cuda:3'), covar=tensor([0.4226, 0.3925, 0.1850, 0.2263, 0.3960, 0.2076, 0.4684, 0.3331], + device='cuda:3'), in_proj_covar=tensor([0.0893, 0.0955, 0.0713, 0.0927, 0.0872, 0.0809, 0.0835, 0.0776], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 20:09:18,890 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.812e+02 5.741e+02 7.692e+02 1.541e+03, threshold=1.148e+03, percent-clipped=3.0 +2023-04-02 20:09:41,455 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:09:53,720 INFO [train.py:903] (3/4) Epoch 21, batch 6100, loss[loss=0.197, simple_loss=0.2832, pruned_loss=0.05543, over 19527.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2896, pruned_loss=0.06539, over 3812456.89 frames. ], batch size: 54, lr: 3.89e-03, grad_scale: 8.0 +2023-04-02 20:09:55,353 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:12,469 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142674.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:28,104 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142686.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:10:55,895 INFO [train.py:903] (3/4) Epoch 21, batch 6150, loss[loss=0.2011, simple_loss=0.291, pruned_loss=0.05559, over 19661.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.29, pruned_loss=0.06581, over 3792935.34 frames. ], batch size: 58, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:11:05,637 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 20:11:25,007 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.643e+02 5.511e+02 6.918e+02 9.659e+02 2.206e+03, threshold=1.384e+03, percent-clipped=13.0 +2023-04-02 20:11:26,180 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 20:11:36,770 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142742.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:11:59,801 INFO [train.py:903] (3/4) Epoch 21, batch 6200, loss[loss=0.189, simple_loss=0.2826, pruned_loss=0.0477, over 19668.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2888, pruned_loss=0.06521, over 3810501.78 frames. ], batch size: 53, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:12:03,977 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-04-02 20:12:15,045 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-02 20:13:02,737 INFO [train.py:903] (3/4) Epoch 21, batch 6250, loss[loss=0.2298, simple_loss=0.3001, pruned_loss=0.07971, over 19617.00 frames. ], tot_loss[loss=0.21, simple_loss=0.289, pruned_loss=0.06555, over 3800003.70 frames. ], batch size: 61, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:13:28,466 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 5.305e+02 6.117e+02 7.859e+02 2.157e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 20:13:30,695 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 20:14:05,184 INFO [train.py:903] (3/4) Epoch 21, batch 6300, loss[loss=0.1669, simple_loss=0.2461, pruned_loss=0.04382, over 19734.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2872, pruned_loss=0.06445, over 3814000.44 frames. ], batch size: 45, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:15:07,141 INFO [train.py:903] (3/4) Epoch 21, batch 6350, loss[loss=0.1943, simple_loss=0.2749, pruned_loss=0.05684, over 19488.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2866, pruned_loss=0.06362, over 3826028.94 frames. ], batch size: 49, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:15:36,285 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.232e+02 4.679e+02 5.550e+02 7.220e+02 1.923e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-02 20:15:40,034 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=142935.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:15:52,189 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-02 20:16:11,259 INFO [train.py:903] (3/4) Epoch 21, batch 6400, loss[loss=0.1977, simple_loss=0.2951, pruned_loss=0.05019, over 19710.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2872, pruned_loss=0.06353, over 3834753.11 frames. ], batch size: 59, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:14,723 INFO [train.py:903] (3/4) Epoch 21, batch 6450, loss[loss=0.1958, simple_loss=0.2702, pruned_loss=0.06068, over 19363.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.287, pruned_loss=0.06341, over 3836400.17 frames. ], batch size: 47, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:17:40,516 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.995e+02 6.270e+02 8.275e+02 2.312e+03, threshold=1.254e+03, percent-clipped=6.0 +2023-04-02 20:18:01,479 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 20:18:04,907 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:18:16,205 INFO [train.py:903] (3/4) Epoch 21, batch 6500, loss[loss=0.213, simple_loss=0.3017, pruned_loss=0.06216, over 17564.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2872, pruned_loss=0.06333, over 3850568.56 frames. ], batch size: 101, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:18:23,412 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 20:18:48,310 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.29 vs. limit=5.0 +2023-04-02 20:18:48,998 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143086.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:19:16,578 INFO [train.py:903] (3/4) Epoch 21, batch 6550, loss[loss=0.2056, simple_loss=0.2921, pruned_loss=0.05954, over 17335.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2872, pruned_loss=0.06353, over 3831577.75 frames. ], batch size: 101, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:19:44,581 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 5.073e+02 6.169e+02 7.633e+02 2.146e+03, threshold=1.234e+03, percent-clipped=4.0 +2023-04-02 20:19:46,053 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.2634, 5.6391, 3.0738, 4.9224, 1.1336, 5.8763, 5.6847, 5.8877], + device='cuda:3'), covar=tensor([0.0369, 0.0815, 0.1810, 0.0692, 0.4135, 0.0479, 0.0767, 0.0816], + device='cuda:3'), in_proj_covar=tensor([0.0501, 0.0406, 0.0491, 0.0345, 0.0400, 0.0429, 0.0421, 0.0456], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:20:19,908 INFO [train.py:903] (3/4) Epoch 21, batch 6600, loss[loss=0.2055, simple_loss=0.2895, pruned_loss=0.06078, over 19662.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2874, pruned_loss=0.06369, over 3818693.63 frames. ], batch size: 60, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:11,156 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143201.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:21:22,423 INFO [train.py:903] (3/4) Epoch 21, batch 6650, loss[loss=0.2407, simple_loss=0.3111, pruned_loss=0.08522, over 12905.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2863, pruned_loss=0.06316, over 3815794.45 frames. ], batch size: 135, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:21:46,321 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.72 vs. limit=5.0 +2023-04-02 20:21:47,859 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.940e+02 5.672e+02 7.065e+02 1.538e+03, threshold=1.134e+03, percent-clipped=2.0 +2023-04-02 20:22:23,656 INFO [train.py:903] (3/4) Epoch 21, batch 6700, loss[loss=0.1801, simple_loss=0.2629, pruned_loss=0.04858, over 19846.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2863, pruned_loss=0.06316, over 3823547.14 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:23:19,534 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143306.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:23,647 INFO [train.py:903] (3/4) Epoch 21, batch 6750, loss[loss=0.1696, simple_loss=0.2547, pruned_loss=0.0423, over 19370.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2865, pruned_loss=0.06352, over 3829422.01 frames. ], batch size: 48, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:23:48,083 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143331.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:23:48,851 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.995e+02 6.197e+02 7.772e+02 2.067e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-02 20:24:20,249 INFO [train.py:903] (3/4) Epoch 21, batch 6800, loss[loss=0.1938, simple_loss=0.2768, pruned_loss=0.05538, over 19599.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2869, pruned_loss=0.06404, over 3835978.03 frames. ], batch size: 50, lr: 3.88e-03, grad_scale: 8.0 +2023-04-02 20:24:21,784 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9785, 4.4947, 2.8300, 3.9187, 1.0047, 4.4474, 4.3888, 4.4742], + device='cuda:3'), covar=tensor([0.0534, 0.0892, 0.1880, 0.0810, 0.3852, 0.0572, 0.0791, 0.0980], + device='cuda:3'), in_proj_covar=tensor([0.0494, 0.0401, 0.0486, 0.0341, 0.0395, 0.0423, 0.0418, 0.0451], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:25:05,895 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 20:25:07,119 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 20:25:09,792 INFO [train.py:903] (3/4) Epoch 22, batch 0, loss[loss=0.2183, simple_loss=0.2984, pruned_loss=0.06906, over 19545.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2984, pruned_loss=0.06906, over 19545.00 frames. ], batch size: 56, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:25:09,792 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 20:25:18,055 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4130, 1.3568, 1.3656, 1.7504, 1.4190, 1.6211, 1.5628, 1.5060], + device='cuda:3'), covar=tensor([0.0759, 0.0925, 0.0909, 0.0631, 0.0907, 0.0776, 0.0883, 0.0699], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0227, 0.0213, 0.0187, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 20:25:20,456 INFO [train.py:937] (3/4) Epoch 22, validation: loss=0.1683, simple_loss=0.2691, pruned_loss=0.03373, over 944034.00 frames. +2023-04-02 20:25:20,457 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 20:25:29,870 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9749, 4.3434, 4.6219, 4.6789, 1.7021, 4.3626, 3.8277, 4.3127], + device='cuda:3'), covar=tensor([0.1532, 0.0890, 0.0594, 0.0606, 0.6347, 0.0921, 0.0668, 0.1163], + device='cuda:3'), in_proj_covar=tensor([0.0776, 0.0734, 0.0943, 0.0827, 0.0825, 0.0701, 0.0568, 0.0874], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 20:25:31,891 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 20:25:55,302 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143418.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:26:14,249 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.908e+02 5.891e+02 8.006e+02 1.582e+03, threshold=1.178e+03, percent-clipped=4.0 +2023-04-02 20:26:21,014 INFO [train.py:903] (3/4) Epoch 22, batch 50, loss[loss=0.1696, simple_loss=0.2498, pruned_loss=0.04468, over 19378.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2882, pruned_loss=0.06305, over 866096.02 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:26:42,108 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143457.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:26:47,952 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0667, 1.9049, 1.8051, 2.0585, 1.8128, 1.8031, 1.7182, 2.0007], + device='cuda:3'), covar=tensor([0.0936, 0.1381, 0.1331, 0.0979, 0.1222, 0.0498, 0.1350, 0.0630], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0354, 0.0308, 0.0247, 0.0296, 0.0248, 0.0307, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:26:53,919 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 20:27:13,769 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143482.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:27:19,994 INFO [train.py:903] (3/4) Epoch 22, batch 100, loss[loss=0.2447, simple_loss=0.3167, pruned_loss=0.08638, over 18206.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.284, pruned_loss=0.06107, over 1539061.47 frames. ], batch size: 83, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:27:23,819 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143491.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:27:31,533 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 20:28:12,239 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.507e+02 5.227e+02 6.391e+02 8.671e+02 1.540e+03, threshold=1.278e+03, percent-clipped=3.0 +2023-04-02 20:28:19,037 INFO [train.py:903] (3/4) Epoch 22, batch 150, loss[loss=0.2274, simple_loss=0.3051, pruned_loss=0.07489, over 19578.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2871, pruned_loss=0.06296, over 2051844.78 frames. ], batch size: 61, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:28:45,136 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5020, 2.2893, 1.7193, 1.4017, 2.0671, 1.3288, 1.3668, 1.9546], + device='cuda:3'), covar=tensor([0.1081, 0.0794, 0.0991, 0.0919, 0.0532, 0.1308, 0.0762, 0.0518], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0313, 0.0334, 0.0260, 0.0245, 0.0335, 0.0289, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:29:18,916 INFO [train.py:903] (3/4) Epoch 22, batch 200, loss[loss=0.2228, simple_loss=0.3015, pruned_loss=0.07203, over 18811.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2879, pruned_loss=0.06363, over 2448494.53 frames. ], batch size: 74, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:29:18,969 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 20:29:32,292 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-02 20:30:12,489 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.534e+02 5.144e+02 6.083e+02 7.742e+02 1.350e+03, threshold=1.217e+03, percent-clipped=1.0 +2023-04-02 20:30:20,919 INFO [train.py:903] (3/4) Epoch 22, batch 250, loss[loss=0.1881, simple_loss=0.2593, pruned_loss=0.05845, over 18259.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.06333, over 2744845.76 frames. ], batch size: 40, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:30:33,624 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.66 vs. limit=5.0 +2023-04-02 20:31:20,923 INFO [train.py:903] (3/4) Epoch 22, batch 300, loss[loss=0.1964, simple_loss=0.2647, pruned_loss=0.06405, over 19750.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2867, pruned_loss=0.06357, over 2982588.43 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:32:15,079 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.028e+02 5.065e+02 6.247e+02 8.237e+02 1.383e+03, threshold=1.249e+03, percent-clipped=3.0 +2023-04-02 20:32:22,218 INFO [train.py:903] (3/4) Epoch 22, batch 350, loss[loss=0.1529, simple_loss=0.2366, pruned_loss=0.03457, over 19366.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2847, pruned_loss=0.06261, over 3174051.09 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:32:29,130 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 20:32:51,106 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143762.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:33:01,966 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7503, 4.2310, 4.4355, 4.4503, 1.8628, 4.2084, 3.6452, 4.1854], + device='cuda:3'), covar=tensor([0.1533, 0.0958, 0.0588, 0.0644, 0.5496, 0.0883, 0.0627, 0.1016], + device='cuda:3'), in_proj_covar=tensor([0.0773, 0.0729, 0.0936, 0.0819, 0.0821, 0.0692, 0.0564, 0.0863], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 20:33:20,943 INFO [train.py:903] (3/4) Epoch 22, batch 400, loss[loss=0.2424, simple_loss=0.323, pruned_loss=0.08088, over 19671.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2865, pruned_loss=0.06324, over 3328559.65 frames. ], batch size: 59, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:10,388 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 20:34:15,304 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.174e+02 5.215e+02 6.557e+02 8.093e+02 2.351e+03, threshold=1.311e+03, percent-clipped=8.0 +2023-04-02 20:34:17,790 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=143835.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:34:20,913 INFO [train.py:903] (3/4) Epoch 22, batch 450, loss[loss=0.1814, simple_loss=0.2541, pruned_loss=0.05429, over 19766.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06411, over 3437628.93 frames. ], batch size: 45, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:34:37,220 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.52 vs. limit=5.0 +2023-04-02 20:34:57,893 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 20:34:58,985 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 20:35:06,270 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3963, 1.4934, 1.7933, 1.6701, 2.7015, 2.1729, 2.7715, 1.2083], + device='cuda:3'), covar=tensor([0.2414, 0.4054, 0.2515, 0.1849, 0.1359, 0.2150, 0.1389, 0.4249], + device='cuda:3'), in_proj_covar=tensor([0.0530, 0.0639, 0.0710, 0.0482, 0.0616, 0.0530, 0.0661, 0.0545], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 20:35:08,556 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143877.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:35:22,926 INFO [train.py:903] (3/4) Epoch 22, batch 500, loss[loss=0.1697, simple_loss=0.2462, pruned_loss=0.04656, over 19136.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2879, pruned_loss=0.06413, over 3533046.14 frames. ], batch size: 42, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:17,499 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 5.123e+02 6.359e+02 8.434e+02 1.804e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-02 20:36:23,290 INFO [train.py:903] (3/4) Epoch 22, batch 550, loss[loss=0.224, simple_loss=0.2958, pruned_loss=0.07606, over 19769.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.288, pruned_loss=0.06443, over 3585547.73 frames. ], batch size: 54, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:36:37,276 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143950.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:37:23,301 INFO [train.py:903] (3/4) Epoch 22, batch 600, loss[loss=0.2235, simple_loss=0.3023, pruned_loss=0.07238, over 18852.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2871, pruned_loss=0.06396, over 3644156.14 frames. ], batch size: 74, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:37:41,216 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5996, 1.4975, 1.5474, 1.9431, 1.5343, 1.8703, 1.9486, 1.7103], + device='cuda:3'), covar=tensor([0.0801, 0.0919, 0.0957, 0.0732, 0.0833, 0.0694, 0.0764, 0.0686], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0226, 0.0212, 0.0187, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 20:38:06,438 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-02 20:38:06,636 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 20:38:13,658 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9433, 2.7183, 2.3934, 2.8279, 2.5056, 2.3588, 2.0940, 2.6677], + device='cuda:3'), covar=tensor([0.0854, 0.1490, 0.1398, 0.1057, 0.1380, 0.0487, 0.1477, 0.0676], + device='cuda:3'), in_proj_covar=tensor([0.0267, 0.0354, 0.0310, 0.0249, 0.0298, 0.0249, 0.0307, 0.0256], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:38:17,797 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.931e+02 4.914e+02 6.190e+02 8.004e+02 1.732e+03, threshold=1.238e+03, percent-clipped=3.0 +2023-04-02 20:38:23,571 INFO [train.py:903] (3/4) Epoch 22, batch 650, loss[loss=0.1741, simple_loss=0.2525, pruned_loss=0.04786, over 19749.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2874, pruned_loss=0.06391, over 3684678.47 frames. ], batch size: 45, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:38:24,336 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-02 20:39:25,482 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0110, 4.3004, 4.6693, 4.7137, 2.0064, 4.4028, 3.8314, 4.3694], + device='cuda:3'), covar=tensor([0.1648, 0.1491, 0.0618, 0.0679, 0.5837, 0.0943, 0.0675, 0.1182], + device='cuda:3'), in_proj_covar=tensor([0.0774, 0.0733, 0.0937, 0.0821, 0.0826, 0.0695, 0.0565, 0.0867], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 20:39:26,362 INFO [train.py:903] (3/4) Epoch 22, batch 700, loss[loss=0.2235, simple_loss=0.3072, pruned_loss=0.06992, over 18246.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2869, pruned_loss=0.06314, over 3715472.42 frames. ], batch size: 83, lr: 3.78e-03, grad_scale: 8.0 +2023-04-02 20:40:19,657 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 4.796e+02 6.107e+02 7.975e+02 1.533e+03, threshold=1.221e+03, percent-clipped=5.0 +2023-04-02 20:40:20,134 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144133.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:40:26,351 INFO [train.py:903] (3/4) Epoch 22, batch 750, loss[loss=0.1931, simple_loss=0.2658, pruned_loss=0.06019, over 19424.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2871, pruned_loss=0.06361, over 3734242.95 frames. ], batch size: 48, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:40:49,266 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144158.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:41:26,350 INFO [train.py:903] (3/4) Epoch 22, batch 800, loss[loss=0.22, simple_loss=0.2982, pruned_loss=0.07084, over 18764.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.06314, over 3759808.84 frames. ], batch size: 74, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:41:44,756 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 20:41:48,073 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144206.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 20:42:19,066 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144231.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 20:42:20,959 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.021e+02 6.351e+02 8.019e+02 1.751e+03, threshold=1.270e+03, percent-clipped=5.0 +2023-04-02 20:42:26,009 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4194, 1.4824, 1.7335, 1.6596, 2.3817, 2.0955, 2.4456, 1.0414], + device='cuda:3'), covar=tensor([0.2687, 0.4657, 0.2976, 0.2230, 0.1773, 0.2464, 0.1683, 0.5000], + device='cuda:3'), in_proj_covar=tensor([0.0530, 0.0642, 0.0711, 0.0482, 0.0619, 0.0529, 0.0661, 0.0547], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 20:42:26,670 INFO [train.py:903] (3/4) Epoch 22, batch 850, loss[loss=0.1972, simple_loss=0.28, pruned_loss=0.05722, over 19855.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2863, pruned_loss=0.06334, over 3784689.83 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:43:19,878 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 20:43:26,414 INFO [train.py:903] (3/4) Epoch 22, batch 900, loss[loss=0.2149, simple_loss=0.2907, pruned_loss=0.06956, over 19565.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2872, pruned_loss=0.06378, over 3782968.20 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:44:03,937 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8683, 1.8634, 1.3731, 1.8020, 1.8499, 1.4207, 1.5019, 1.6468], + device='cuda:3'), covar=tensor([0.1192, 0.1478, 0.1955, 0.1233, 0.1379, 0.0977, 0.1816, 0.1077], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0356, 0.0312, 0.0249, 0.0299, 0.0249, 0.0308, 0.0256], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:44:21,523 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.404e+02 5.111e+02 6.361e+02 7.451e+02 1.172e+03, threshold=1.272e+03, percent-clipped=0.0 +2023-04-02 20:44:26,103 INFO [train.py:903] (3/4) Epoch 22, batch 950, loss[loss=0.2002, simple_loss=0.2713, pruned_loss=0.06459, over 19711.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2869, pruned_loss=0.06372, over 3782103.69 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:44:30,638 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 20:45:22,972 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6613, 1.4965, 1.4969, 2.0694, 1.5617, 1.8791, 1.9918, 1.6632], + device='cuda:3'), covar=tensor([0.0842, 0.0955, 0.1025, 0.0759, 0.0870, 0.0818, 0.0824, 0.0719], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0221, 0.0225, 0.0238, 0.0225, 0.0211, 0.0186, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 20:45:27,344 INFO [train.py:903] (3/4) Epoch 22, batch 1000, loss[loss=0.1948, simple_loss=0.2804, pruned_loss=0.05464, over 19747.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2882, pruned_loss=0.06451, over 3767066.00 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:45:29,233 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 20:45:53,083 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-02 20:46:17,111 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=144429.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:46:17,952 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 20:46:22,205 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 5.215e+02 6.539e+02 8.059e+02 1.779e+03, threshold=1.308e+03, percent-clipped=4.0 +2023-04-02 20:46:26,888 INFO [train.py:903] (3/4) Epoch 22, batch 1050, loss[loss=0.1859, simple_loss=0.2667, pruned_loss=0.05255, over 19728.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2877, pruned_loss=0.06434, over 3789937.50 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:47:00,704 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 20:47:22,075 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-02 20:47:26,634 INFO [train.py:903] (3/4) Epoch 22, batch 1100, loss[loss=0.2615, simple_loss=0.3261, pruned_loss=0.09848, over 13630.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2876, pruned_loss=0.06442, over 3794014.81 frames. ], batch size: 135, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:48:21,826 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.380e+02 5.103e+02 6.169e+02 7.547e+02 2.403e+03, threshold=1.234e+03, percent-clipped=2.0 +2023-04-02 20:48:27,962 INFO [train.py:903] (3/4) Epoch 22, batch 1150, loss[loss=0.1813, simple_loss=0.2621, pruned_loss=0.05021, over 19576.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06339, over 3814204.19 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:49:28,353 INFO [train.py:903] (3/4) Epoch 22, batch 1200, loss[loss=0.2009, simple_loss=0.2813, pruned_loss=0.06026, over 19847.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2869, pruned_loss=0.06394, over 3807501.74 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 8.0 +2023-04-02 20:50:00,834 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 20:50:12,312 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5270, 1.5584, 1.8425, 1.7724, 2.7273, 2.4349, 2.9003, 1.3666], + device='cuda:3'), covar=tensor([0.2333, 0.4186, 0.2534, 0.1800, 0.1505, 0.1936, 0.1418, 0.4149], + device='cuda:3'), in_proj_covar=tensor([0.0531, 0.0643, 0.0712, 0.0484, 0.0621, 0.0529, 0.0662, 0.0549], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 20:50:23,743 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 4.877e+02 6.112e+02 7.869e+02 2.071e+03, threshold=1.222e+03, percent-clipped=4.0 +2023-04-02 20:50:27,109 INFO [train.py:903] (3/4) Epoch 22, batch 1250, loss[loss=0.24, simple_loss=0.3165, pruned_loss=0.08174, over 19361.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2877, pruned_loss=0.06438, over 3823701.27 frames. ], batch size: 66, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:51:16,185 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-02 20:51:28,180 INFO [train.py:903] (3/4) Epoch 22, batch 1300, loss[loss=0.1864, simple_loss=0.2713, pruned_loss=0.05078, over 19830.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2869, pruned_loss=0.06396, over 3811329.78 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:52:26,776 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.679e+02 5.951e+02 8.140e+02 2.957e+03, threshold=1.190e+03, percent-clipped=7.0 +2023-04-02 20:52:30,257 INFO [train.py:903] (3/4) Epoch 22, batch 1350, loss[loss=0.2035, simple_loss=0.2765, pruned_loss=0.06529, over 19617.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2864, pruned_loss=0.06396, over 3816600.68 frames. ], batch size: 50, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:52:48,517 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0191, 1.2207, 1.7355, 1.1766, 2.6303, 3.5318, 3.1909, 3.7464], + device='cuda:3'), covar=tensor([0.1791, 0.3988, 0.3313, 0.2469, 0.0605, 0.0183, 0.0223, 0.0266], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0320, 0.0349, 0.0263, 0.0241, 0.0184, 0.0215, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 20:53:13,406 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=144773.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:53:31,334 INFO [train.py:903] (3/4) Epoch 22, batch 1400, loss[loss=0.2126, simple_loss=0.2766, pruned_loss=0.0743, over 19003.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2865, pruned_loss=0.0637, over 3797760.10 frames. ], batch size: 42, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:54:11,100 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 20:54:28,429 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.383e+02 4.808e+02 5.945e+02 7.380e+02 1.517e+03, threshold=1.189e+03, percent-clipped=2.0 +2023-04-02 20:54:29,518 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 20:54:31,678 INFO [train.py:903] (3/4) Epoch 22, batch 1450, loss[loss=0.21, simple_loss=0.2829, pruned_loss=0.06858, over 19604.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2874, pruned_loss=0.06391, over 3798924.62 frames. ], batch size: 50, lr: 3.77e-03, grad_scale: 4.0 +2023-04-02 20:54:47,899 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1107, 1.1147, 1.3480, 1.3223, 2.6942, 1.0448, 2.1475, 3.0109], + device='cuda:3'), covar=tensor([0.0616, 0.3032, 0.3183, 0.1923, 0.0806, 0.2549, 0.1235, 0.0376], + device='cuda:3'), in_proj_covar=tensor([0.0405, 0.0363, 0.0383, 0.0345, 0.0371, 0.0348, 0.0375, 0.0401], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:55:30,803 INFO [train.py:903] (3/4) Epoch 22, batch 1500, loss[loss=0.2432, simple_loss=0.3257, pruned_loss=0.08028, over 18804.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.288, pruned_loss=0.06387, over 3806789.12 frames. ], batch size: 74, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:55:31,173 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=144888.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:55:49,810 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7082, 1.7513, 1.6274, 1.3837, 1.3895, 1.4275, 0.2212, 0.6454], + device='cuda:3'), covar=tensor([0.0679, 0.0633, 0.0438, 0.0667, 0.1356, 0.0769, 0.1350, 0.1155], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0355, 0.0356, 0.0381, 0.0457, 0.0385, 0.0334, 0.0339], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 20:56:27,870 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.957e+02 5.987e+02 8.036e+02 1.770e+03, threshold=1.197e+03, percent-clipped=5.0 +2023-04-02 20:56:31,408 INFO [train.py:903] (3/4) Epoch 22, batch 1550, loss[loss=0.2029, simple_loss=0.2868, pruned_loss=0.05952, over 19590.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.06435, over 3782139.82 frames. ], batch size: 61, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 20:57:08,473 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0805, 5.1254, 5.8907, 5.9105, 1.9315, 5.5538, 4.7654, 5.5378], + device='cuda:3'), covar=tensor([0.1688, 0.0877, 0.0586, 0.0569, 0.6324, 0.0816, 0.0595, 0.1214], + device='cuda:3'), in_proj_covar=tensor([0.0781, 0.0740, 0.0944, 0.0830, 0.0830, 0.0702, 0.0568, 0.0876], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 20:57:21,881 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-02 20:57:30,413 INFO [train.py:903] (3/4) Epoch 22, batch 1600, loss[loss=0.2214, simple_loss=0.3027, pruned_loss=0.07001, over 19605.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06418, over 3802898.41 frames. ], batch size: 57, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:57:50,805 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 20:58:02,389 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:58:27,678 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.884e+02 5.870e+02 7.908e+02 1.403e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-02 20:58:31,145 INFO [train.py:903] (3/4) Epoch 22, batch 1650, loss[loss=0.1974, simple_loss=0.2909, pruned_loss=0.052, over 19633.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2891, pruned_loss=0.06451, over 3794448.88 frames. ], batch size: 57, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 20:58:39,518 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:58:48,578 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8336, 1.3120, 1.4527, 1.7071, 3.4055, 1.2709, 2.5098, 3.8575], + device='cuda:3'), covar=tensor([0.0504, 0.2846, 0.2983, 0.1869, 0.0750, 0.2512, 0.1310, 0.0258], + device='cuda:3'), in_proj_covar=tensor([0.0407, 0.0364, 0.0386, 0.0348, 0.0373, 0.0349, 0.0377, 0.0402], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:58:49,782 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9654, 1.6186, 1.8197, 1.7475, 4.4951, 1.0814, 2.6723, 4.9429], + device='cuda:3'), covar=tensor([0.0427, 0.2781, 0.2919, 0.2007, 0.0760, 0.2798, 0.1379, 0.0161], + device='cuda:3'), in_proj_covar=tensor([0.0407, 0.0364, 0.0386, 0.0348, 0.0373, 0.0349, 0.0377, 0.0402], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 20:59:27,094 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145084.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 20:59:31,377 INFO [train.py:903] (3/4) Epoch 22, batch 1700, loss[loss=0.2145, simple_loss=0.3014, pruned_loss=0.06374, over 19341.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2884, pruned_loss=0.06403, over 3803763.38 frames. ], batch size: 70, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:00:08,578 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 21:00:28,006 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 4.845e+02 6.228e+02 7.810e+02 2.223e+03, threshold=1.246e+03, percent-clipped=2.0 +2023-04-02 21:00:33,034 INFO [train.py:903] (3/4) Epoch 22, batch 1750, loss[loss=0.3032, simple_loss=0.3595, pruned_loss=0.1234, over 13442.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06407, over 3800927.93 frames. ], batch size: 137, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:00:40,320 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145144.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:00:50,966 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.7983, 5.2616, 3.0409, 4.5887, 1.2527, 5.3190, 5.1964, 5.3642], + device='cuda:3'), covar=tensor([0.0425, 0.0829, 0.1910, 0.0740, 0.3964, 0.0492, 0.0765, 0.1041], + device='cuda:3'), in_proj_covar=tensor([0.0503, 0.0414, 0.0495, 0.0346, 0.0406, 0.0434, 0.0428, 0.0462], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:01:09,191 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145169.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:01:31,687 INFO [train.py:903] (3/4) Epoch 22, batch 1800, loss[loss=0.1906, simple_loss=0.2623, pruned_loss=0.05942, over 18695.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2882, pruned_loss=0.06416, over 3816203.40 frames. ], batch size: 41, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:01:56,775 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-02 21:01:57,538 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6469, 1.2379, 1.2607, 1.5060, 1.0808, 1.4289, 1.2807, 1.4778], + device='cuda:3'), covar=tensor([0.1129, 0.1252, 0.1591, 0.1044, 0.1365, 0.0603, 0.1506, 0.0857], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0359, 0.0314, 0.0251, 0.0302, 0.0252, 0.0311, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:02:27,947 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.845e+02 5.086e+02 5.993e+02 7.804e+02 1.410e+03, threshold=1.199e+03, percent-clipped=2.0 +2023-04-02 21:02:27,971 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 21:02:31,524 INFO [train.py:903] (3/4) Epoch 22, batch 1850, loss[loss=0.1949, simple_loss=0.2816, pruned_loss=0.0541, over 19660.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2873, pruned_loss=0.06422, over 3821717.05 frames. ], batch size: 55, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:04,102 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 21:03:30,818 INFO [train.py:903] (3/4) Epoch 22, batch 1900, loss[loss=0.1987, simple_loss=0.2847, pruned_loss=0.0564, over 19451.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2877, pruned_loss=0.06392, over 3828819.08 frames. ], batch size: 64, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:03:48,271 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 21:03:52,770 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 21:04:15,275 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 21:04:26,520 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.830e+02 5.286e+02 6.049e+02 6.874e+02 1.450e+03, threshold=1.210e+03, percent-clipped=2.0 +2023-04-02 21:04:30,789 INFO [train.py:903] (3/4) Epoch 22, batch 1950, loss[loss=0.1811, simple_loss=0.2562, pruned_loss=0.05304, over 19333.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2883, pruned_loss=0.06411, over 3815106.00 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:04:38,867 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-02 21:04:55,828 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145358.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:31,551 INFO [train.py:903] (3/4) Epoch 22, batch 2000, loss[loss=0.1718, simple_loss=0.2589, pruned_loss=0.04233, over 19848.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2879, pruned_loss=0.06364, over 3806926.22 frames. ], batch size: 52, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:05:32,828 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:05:33,000 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145389.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:19,686 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145428.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:06:27,596 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.585e+02 4.801e+02 6.060e+02 7.916e+02 1.266e+03, threshold=1.212e+03, percent-clipped=1.0 +2023-04-02 21:06:27,635 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 21:06:30,885 INFO [train.py:903] (3/4) Epoch 22, batch 2050, loss[loss=0.1966, simple_loss=0.2682, pruned_loss=0.06245, over 19744.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2873, pruned_loss=0.06333, over 3820909.59 frames. ], batch size: 46, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:06:46,540 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 21:06:46,569 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 21:07:06,389 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 21:07:13,320 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145473.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:07:30,917 INFO [train.py:903] (3/4) Epoch 22, batch 2100, loss[loss=0.2107, simple_loss=0.2733, pruned_loss=0.07407, over 19725.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2875, pruned_loss=0.06355, over 3819295.27 frames. ], batch size: 46, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:07:51,410 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145504.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:08:01,441 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 21:08:22,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 21:08:27,088 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.088e+02 4.926e+02 6.113e+02 7.931e+02 1.598e+03, threshold=1.223e+03, percent-clipped=5.0 +2023-04-02 21:08:30,650 INFO [train.py:903] (3/4) Epoch 22, batch 2150, loss[loss=0.1875, simple_loss=0.2636, pruned_loss=0.05567, over 19754.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2874, pruned_loss=0.06363, over 3835448.63 frames. ], batch size: 46, lr: 3.76e-03, grad_scale: 8.0 +2023-04-02 21:08:38,981 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145543.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:09:32,291 INFO [train.py:903] (3/4) Epoch 22, batch 2200, loss[loss=0.2179, simple_loss=0.2827, pruned_loss=0.07653, over 19336.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2888, pruned_loss=0.06456, over 3819558.91 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:10:00,321 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145612.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:10:29,862 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.056e+02 6.179e+02 8.064e+02 2.249e+03, threshold=1.236e+03, percent-clipped=3.0 +2023-04-02 21:10:32,066 INFO [train.py:903] (3/4) Epoch 22, batch 2250, loss[loss=0.2301, simple_loss=0.3085, pruned_loss=0.07584, over 19337.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2882, pruned_loss=0.06423, over 3827077.63 frames. ], batch size: 66, lr: 3.76e-03, grad_scale: 4.0 +2023-04-02 21:10:50,655 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3682, 1.1935, 1.2321, 1.8048, 1.3325, 1.5232, 1.4937, 1.4115], + device='cuda:3'), covar=tensor([0.1033, 0.1227, 0.1184, 0.0708, 0.0909, 0.0917, 0.0955, 0.0902], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0221, 0.0225, 0.0240, 0.0226, 0.0211, 0.0186, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 21:11:01,371 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:11:33,804 INFO [train.py:903] (3/4) Epoch 22, batch 2300, loss[loss=0.2556, simple_loss=0.329, pruned_loss=0.0911, over 19358.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2878, pruned_loss=0.06411, over 3831828.14 frames. ], batch size: 66, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:11:45,979 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 21:12:22,731 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145729.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:27,056 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145733.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:12:27,584 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-02 21:12:30,302 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.899e+02 6.109e+02 7.402e+02 2.135e+03, threshold=1.222e+03, percent-clipped=2.0 +2023-04-02 21:12:32,754 INFO [train.py:903] (3/4) Epoch 22, batch 2350, loss[loss=0.1803, simple_loss=0.253, pruned_loss=0.0538, over 19741.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2879, pruned_loss=0.06402, over 3834222.55 frames. ], batch size: 45, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:12:54,218 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145754.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:01,136 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145760.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:14,112 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 21:13:31,591 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 21:13:31,939 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145785.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:13:35,673 INFO [train.py:903] (3/4) Epoch 22, batch 2400, loss[loss=0.2605, simple_loss=0.3344, pruned_loss=0.09332, over 19544.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2881, pruned_loss=0.06393, over 3821880.76 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:13:48,799 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145799.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:20,082 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145824.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:14:34,014 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.826e+02 5.747e+02 7.009e+02 1.532e+03, threshold=1.149e+03, percent-clipped=5.0 +2023-04-02 21:14:36,522 INFO [train.py:903] (3/4) Epoch 22, batch 2450, loss[loss=0.2106, simple_loss=0.2999, pruned_loss=0.06062, over 19599.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2871, pruned_loss=0.06326, over 3824539.83 frames. ], batch size: 57, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:14:38,006 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145839.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:14:49,281 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145848.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:15:37,466 INFO [train.py:903] (3/4) Epoch 22, batch 2500, loss[loss=0.2928, simple_loss=0.349, pruned_loss=0.1183, over 13237.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2875, pruned_loss=0.06327, over 3829375.11 frames. ], batch size: 136, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:01,264 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6326, 1.3758, 1.4624, 1.4658, 3.2064, 1.1718, 2.3715, 3.6475], + device='cuda:3'), covar=tensor([0.0511, 0.2823, 0.3057, 0.1976, 0.0689, 0.2483, 0.1291, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0410, 0.0367, 0.0388, 0.0351, 0.0376, 0.0350, 0.0381, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:16:34,428 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.066e+02 4.836e+02 5.791e+02 7.519e+02 1.267e+03, threshold=1.158e+03, percent-clipped=1.0 +2023-04-02 21:16:36,601 INFO [train.py:903] (3/4) Epoch 22, batch 2550, loss[loss=0.2355, simple_loss=0.3173, pruned_loss=0.07689, over 18116.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2882, pruned_loss=0.064, over 3828756.78 frames. ], batch size: 83, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:16:59,330 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=145956.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:17:33,989 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 21:17:38,147 INFO [train.py:903] (3/4) Epoch 22, batch 2600, loss[loss=0.1999, simple_loss=0.2914, pruned_loss=0.0542, over 19522.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.288, pruned_loss=0.0641, over 3825071.26 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:17:59,423 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146005.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:18:38,070 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.022e+02 6.231e+02 7.783e+02 1.698e+03, threshold=1.246e+03, percent-clipped=5.0 +2023-04-02 21:18:40,363 INFO [train.py:903] (3/4) Epoch 22, batch 2650, loss[loss=0.1888, simple_loss=0.2694, pruned_loss=0.05404, over 19732.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2885, pruned_loss=0.06457, over 3813536.76 frames. ], batch size: 51, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:19:00,431 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 21:19:21,403 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146071.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:19:25,863 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3466, 1.3702, 1.5645, 1.4952, 1.7455, 1.8535, 1.8126, 0.5708], + device='cuda:3'), covar=tensor([0.2433, 0.4285, 0.2562, 0.1961, 0.1626, 0.2292, 0.1395, 0.4791], + device='cuda:3'), in_proj_covar=tensor([0.0536, 0.0644, 0.0714, 0.0483, 0.0620, 0.0531, 0.0663, 0.0551], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 21:19:41,310 INFO [train.py:903] (3/4) Epoch 22, batch 2700, loss[loss=0.181, simple_loss=0.2601, pruned_loss=0.05095, over 19760.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2871, pruned_loss=0.06384, over 3817605.63 frames. ], batch size: 45, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:20:01,757 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146104.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:20,683 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146120.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:32,016 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146129.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:20:39,324 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.787e+02 6.288e+02 8.148e+02 2.582e+03, threshold=1.258e+03, percent-clipped=4.0 +2023-04-02 21:20:41,736 INFO [train.py:903] (3/4) Epoch 22, batch 2750, loss[loss=0.215, simple_loss=0.2996, pruned_loss=0.06522, over 19485.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2864, pruned_loss=0.06348, over 3830635.82 frames. ], batch size: 64, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:20:44,451 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2220, 2.2659, 2.4352, 2.8932, 2.1490, 2.7661, 2.4247, 2.2840], + device='cuda:3'), covar=tensor([0.4659, 0.4351, 0.2069, 0.2782, 0.4671, 0.2378, 0.5189, 0.3524], + device='cuda:3'), in_proj_covar=tensor([0.0899, 0.0963, 0.0718, 0.0934, 0.0880, 0.0816, 0.0845, 0.0782], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 21:21:18,244 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:21:29,665 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5070, 1.4271, 2.1580, 1.6155, 2.9807, 4.7122, 4.6083, 5.0766], + device='cuda:3'), covar=tensor([0.1564, 0.3774, 0.3047, 0.2338, 0.0628, 0.0204, 0.0165, 0.0185], + device='cuda:3'), in_proj_covar=tensor([0.0271, 0.0319, 0.0351, 0.0264, 0.0241, 0.0186, 0.0214, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 21:21:37,445 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146183.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:21:43,824 INFO [train.py:903] (3/4) Epoch 22, batch 2800, loss[loss=0.1781, simple_loss=0.2573, pruned_loss=0.04941, over 19393.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.286, pruned_loss=0.06304, over 3839976.28 frames. ], batch size: 47, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:22:04,640 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6539, 1.4258, 1.5189, 2.1322, 1.5436, 1.8785, 1.9296, 1.6329], + device='cuda:3'), covar=tensor([0.0897, 0.1123, 0.1090, 0.0799, 0.0985, 0.0853, 0.0970, 0.0833], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0220, 0.0223, 0.0238, 0.0224, 0.0209, 0.0186, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 21:22:19,574 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-02 21:22:42,904 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.980e+02 4.555e+02 5.863e+02 7.335e+02 1.249e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-02 21:22:45,126 INFO [train.py:903] (3/4) Epoch 22, batch 2850, loss[loss=0.198, simple_loss=0.2803, pruned_loss=0.05786, over 19353.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2871, pruned_loss=0.06314, over 3824199.38 frames. ], batch size: 70, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:42,925 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 21:23:45,159 INFO [train.py:903] (3/4) Epoch 22, batch 2900, loss[loss=0.1868, simple_loss=0.2732, pruned_loss=0.05022, over 19592.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2878, pruned_loss=0.06346, over 3821538.20 frames. ], batch size: 52, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:23:57,327 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146298.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:24:33,232 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146327.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:24:43,683 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 4.789e+02 5.854e+02 7.393e+02 1.532e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-04-02 21:24:45,869 INFO [train.py:903] (3/4) Epoch 22, batch 2950, loss[loss=0.224, simple_loss=0.3026, pruned_loss=0.07268, over 19663.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06404, over 3826735.79 frames. ], batch size: 59, lr: 3.75e-03, grad_scale: 8.0 +2023-04-02 21:25:04,151 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146352.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:31,426 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1404, 1.9833, 1.6102, 1.9166, 1.8765, 1.5788, 1.4318, 1.8825], + device='cuda:3'), covar=tensor([0.1055, 0.1562, 0.1746, 0.1165, 0.1471, 0.0775, 0.1807, 0.0826], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0354, 0.0309, 0.0248, 0.0298, 0.0249, 0.0306, 0.0253], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:25:32,592 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146376.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:25:46,769 INFO [train.py:903] (3/4) Epoch 22, batch 3000, loss[loss=0.2448, simple_loss=0.3168, pruned_loss=0.08644, over 12784.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06378, over 3826800.29 frames. ], batch size: 136, lr: 3.75e-03, grad_scale: 4.0 +2023-04-02 21:25:46,770 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 21:25:59,181 INFO [train.py:937] (3/4) Epoch 22, validation: loss=0.1687, simple_loss=0.2687, pruned_loss=0.0344, over 944034.00 frames. +2023-04-02 21:25:59,182 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 21:26:02,607 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 21:26:16,170 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146401.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:26:37,349 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1132, 3.4147, 2.0819, 2.2513, 3.0844, 1.8757, 1.5973, 2.1972], + device='cuda:3'), covar=tensor([0.1301, 0.0573, 0.1045, 0.0757, 0.0469, 0.1150, 0.0899, 0.0685], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0318, 0.0340, 0.0267, 0.0249, 0.0338, 0.0293, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:26:58,610 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 5.066e+02 6.686e+02 8.533e+02 1.871e+03, threshold=1.337e+03, percent-clipped=6.0 +2023-04-02 21:26:59,734 INFO [train.py:903] (3/4) Epoch 22, batch 3050, loss[loss=0.2123, simple_loss=0.2786, pruned_loss=0.07299, over 19783.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2873, pruned_loss=0.06386, over 3836213.66 frames. ], batch size: 46, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:27:20,073 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8319, 4.2913, 4.5039, 4.5424, 1.7964, 4.2320, 3.7406, 4.2499], + device='cuda:3'), covar=tensor([0.1697, 0.0799, 0.0603, 0.0712, 0.5655, 0.0812, 0.0664, 0.1148], + device='cuda:3'), in_proj_covar=tensor([0.0785, 0.0743, 0.0948, 0.0832, 0.0833, 0.0708, 0.0570, 0.0882], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 21:28:00,898 INFO [train.py:903] (3/4) Epoch 22, batch 3100, loss[loss=0.2241, simple_loss=0.3101, pruned_loss=0.06902, over 19587.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2874, pruned_loss=0.06411, over 3818927.00 frames. ], batch size: 61, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:28:27,624 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146511.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:28:59,180 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.441e+02 5.077e+02 6.322e+02 8.082e+02 1.628e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-02 21:29:00,359 INFO [train.py:903] (3/4) Epoch 22, batch 3150, loss[loss=0.2044, simple_loss=0.2862, pruned_loss=0.06124, over 19762.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2875, pruned_loss=0.06408, over 3820335.38 frames. ], batch size: 56, lr: 3.74e-03, grad_scale: 4.0 +2023-04-02 21:29:20,747 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146554.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 21:29:29,191 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 21:29:51,077 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146579.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 21:30:00,731 INFO [train.py:903] (3/4) Epoch 22, batch 3200, loss[loss=0.2226, simple_loss=0.304, pruned_loss=0.07062, over 19561.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2866, pruned_loss=0.06356, over 3834203.59 frames. ], batch size: 61, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:30:04,651 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3623, 1.4372, 1.7496, 1.6045, 2.4748, 2.1483, 2.6041, 1.0335], + device='cuda:3'), covar=tensor([0.2553, 0.4292, 0.2570, 0.1925, 0.1505, 0.2155, 0.1388, 0.4450], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0639, 0.0707, 0.0479, 0.0615, 0.0526, 0.0660, 0.0547], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 21:30:20,494 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5236, 2.2766, 1.6263, 1.5529, 1.9972, 1.2359, 1.4711, 1.8943], + device='cuda:3'), covar=tensor([0.1037, 0.0775, 0.1162, 0.0879, 0.0667, 0.1430, 0.0749, 0.0562], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0267, 0.0249, 0.0338, 0.0293, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:30:28,042 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146609.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:30:40,791 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6801, 1.5088, 1.6333, 1.4886, 3.2944, 1.1991, 2.4886, 3.6871], + device='cuda:3'), covar=tensor([0.0452, 0.2576, 0.2617, 0.1927, 0.0691, 0.2396, 0.1132, 0.0235], + device='cuda:3'), in_proj_covar=tensor([0.0408, 0.0364, 0.0386, 0.0349, 0.0372, 0.0348, 0.0380, 0.0404], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:30:47,443 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146626.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:31:01,672 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 4.719e+02 5.878e+02 7.469e+02 1.229e+03, threshold=1.176e+03, percent-clipped=0.0 +2023-04-02 21:31:02,824 INFO [train.py:903] (3/4) Epoch 22, batch 3250, loss[loss=0.1961, simple_loss=0.282, pruned_loss=0.05506, over 17477.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2871, pruned_loss=0.06405, over 3822772.42 frames. ], batch size: 101, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:31:10,883 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:31:25,803 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1774, 2.8677, 2.2732, 2.2597, 2.0480, 2.5376, 1.0520, 2.0628], + device='cuda:3'), covar=tensor([0.0616, 0.0584, 0.0615, 0.0960, 0.0974, 0.0957, 0.1270, 0.0984], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0356, 0.0358, 0.0382, 0.0457, 0.0386, 0.0336, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 21:32:03,196 INFO [train.py:903] (3/4) Epoch 22, batch 3300, loss[loss=0.1914, simple_loss=0.2772, pruned_loss=0.05281, over 19775.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2868, pruned_loss=0.06424, over 3822323.48 frames. ], batch size: 56, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:32:09,858 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 21:32:11,322 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0489, 1.7631, 1.9167, 2.6095, 2.0514, 2.2498, 2.3324, 2.1138], + device='cuda:3'), covar=tensor([0.0719, 0.0859, 0.0888, 0.0721, 0.0831, 0.0713, 0.0801, 0.0625], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0225, 0.0240, 0.0226, 0.0210, 0.0187, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 21:32:59,758 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.268e+02 6.638e+02 8.509e+02 1.642e+03, threshold=1.328e+03, percent-clipped=7.0 +2023-04-02 21:33:00,760 INFO [train.py:903] (3/4) Epoch 22, batch 3350, loss[loss=0.2778, simple_loss=0.3385, pruned_loss=0.1086, over 19700.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2883, pruned_loss=0.06491, over 3820137.38 frames. ], batch size: 63, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:33:41,153 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8313, 4.4285, 2.8587, 3.8830, 1.0449, 4.3082, 4.2459, 4.3733], + device='cuda:3'), covar=tensor([0.0555, 0.0999, 0.1751, 0.0742, 0.3897, 0.0619, 0.0831, 0.0963], + device='cuda:3'), in_proj_covar=tensor([0.0504, 0.0411, 0.0492, 0.0344, 0.0402, 0.0431, 0.0424, 0.0459], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:34:00,057 INFO [train.py:903] (3/4) Epoch 22, batch 3400, loss[loss=0.2644, simple_loss=0.348, pruned_loss=0.09043, over 19719.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06411, over 3810186.29 frames. ], batch size: 59, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:34:59,710 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 5.013e+02 6.159e+02 8.066e+02 2.491e+03, threshold=1.232e+03, percent-clipped=4.0 +2023-04-02 21:35:00,913 INFO [train.py:903] (3/4) Epoch 22, batch 3450, loss[loss=0.2454, simple_loss=0.3211, pruned_loss=0.08487, over 19539.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2886, pruned_loss=0.06472, over 3789411.92 frames. ], batch size: 56, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:35:04,225 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 21:35:28,808 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146862.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:35:54,065 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146882.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:35:55,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8772, 4.2745, 4.6066, 4.6173, 1.7292, 4.3487, 3.7832, 4.2787], + device='cuda:3'), covar=tensor([0.1661, 0.0903, 0.0619, 0.0663, 0.6096, 0.0912, 0.0672, 0.1224], + device='cuda:3'), in_proj_covar=tensor([0.0778, 0.0739, 0.0944, 0.0827, 0.0826, 0.0707, 0.0567, 0.0878], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 21:35:59,851 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 21:36:01,458 INFO [train.py:903] (3/4) Epoch 22, batch 3500, loss[loss=0.2569, simple_loss=0.3273, pruned_loss=0.09323, over 17301.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06507, over 3780752.04 frames. ], batch size: 101, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:36:23,421 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146907.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:37:00,108 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.850e+02 4.615e+02 6.325e+02 8.235e+02 2.059e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-02 21:37:01,340 INFO [train.py:903] (3/4) Epoch 22, batch 3550, loss[loss=0.1927, simple_loss=0.2609, pruned_loss=0.06226, over 19322.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2883, pruned_loss=0.065, over 3780646.32 frames. ], batch size: 44, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:37:18,272 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146953.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:38:02,336 INFO [train.py:903] (3/4) Epoch 22, batch 3600, loss[loss=0.2091, simple_loss=0.2958, pruned_loss=0.06122, over 19673.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2882, pruned_loss=0.06476, over 3800233.71 frames. ], batch size: 60, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:38:02,530 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=146988.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:39:01,661 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.391e+02 4.948e+02 6.297e+02 8.667e+02 2.605e+03, threshold=1.259e+03, percent-clipped=8.0 +2023-04-02 21:39:02,712 INFO [train.py:903] (3/4) Epoch 22, batch 3650, loss[loss=0.2066, simple_loss=0.2891, pruned_loss=0.06203, over 19735.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2886, pruned_loss=0.06514, over 3800472.66 frames. ], batch size: 63, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:39:07,563 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6007, 1.5280, 1.4582, 2.0560, 1.4700, 2.0120, 1.9422, 1.7547], + device='cuda:3'), covar=tensor([0.0855, 0.0897, 0.1027, 0.0733, 0.0907, 0.0681, 0.0792, 0.0663], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0222, 0.0226, 0.0241, 0.0227, 0.0211, 0.0187, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-02 21:39:25,249 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2751, 2.1725, 1.9823, 1.8358, 1.6255, 1.8638, 0.6159, 1.2825], + device='cuda:3'), covar=tensor([0.0631, 0.0645, 0.0529, 0.0875, 0.1282, 0.0945, 0.1409, 0.1055], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0356, 0.0360, 0.0383, 0.0459, 0.0387, 0.0337, 0.0342], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 21:39:39,058 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:03,857 INFO [train.py:903] (3/4) Epoch 22, batch 3700, loss[loss=0.2268, simple_loss=0.31, pruned_loss=0.07177, over 19504.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2896, pruned_loss=0.06561, over 3789821.95 frames. ], batch size: 64, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:40:20,504 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-02 21:40:21,169 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147103.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:40:25,103 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.93 vs. limit=5.0 +2023-04-02 21:41:02,895 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.816e+02 5.960e+02 7.144e+02 1.653e+03, threshold=1.192e+03, percent-clipped=4.0 +2023-04-02 21:41:04,071 INFO [train.py:903] (3/4) Epoch 22, batch 3750, loss[loss=0.2003, simple_loss=0.2887, pruned_loss=0.05594, over 19056.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2895, pruned_loss=0.06524, over 3800247.49 frames. ], batch size: 69, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:42:04,509 INFO [train.py:903] (3/4) Epoch 22, batch 3800, loss[loss=0.2106, simple_loss=0.2867, pruned_loss=0.06727, over 19674.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2874, pruned_loss=0.06442, over 3810319.03 frames. ], batch size: 53, lr: 3.74e-03, grad_scale: 8.0 +2023-04-02 21:42:26,702 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147206.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:42:38,890 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-02 21:42:40,377 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4381, 1.2313, 1.5771, 1.7240, 2.9911, 1.3357, 2.2132, 3.3258], + device='cuda:3'), covar=tensor([0.0553, 0.2978, 0.2827, 0.1618, 0.0723, 0.2212, 0.1339, 0.0309], + device='cuda:3'), in_proj_covar=tensor([0.0405, 0.0363, 0.0384, 0.0347, 0.0372, 0.0346, 0.0378, 0.0401], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:43:02,578 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.867e+02 5.077e+02 5.970e+02 7.548e+02 1.289e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-04-02 21:43:03,536 INFO [train.py:903] (3/4) Epoch 22, batch 3850, loss[loss=0.2336, simple_loss=0.31, pruned_loss=0.07863, over 19596.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2874, pruned_loss=0.0642, over 3827729.27 frames. ], batch size: 57, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:43:31,150 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147259.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:05,363 INFO [train.py:903] (3/4) Epoch 22, batch 3900, loss[loss=0.2257, simple_loss=0.3044, pruned_loss=0.07352, over 13385.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2879, pruned_loss=0.06437, over 3819136.97 frames. ], batch size: 135, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:44:42,217 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1711, 2.0560, 1.9432, 2.2036, 2.0456, 1.8372, 1.9086, 2.1094], + device='cuda:3'), covar=tensor([0.0797, 0.1175, 0.1176, 0.0900, 0.1063, 0.0512, 0.1097, 0.0590], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0354, 0.0310, 0.0250, 0.0301, 0.0251, 0.0308, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:44:46,178 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147321.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:49,712 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147324.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:44:55,066 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147328.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:04,849 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 4.890e+02 6.799e+02 8.609e+02 1.784e+03, threshold=1.360e+03, percent-clipped=9.0 +2023-04-02 21:45:05,883 INFO [train.py:903] (3/4) Epoch 22, batch 3950, loss[loss=0.256, simple_loss=0.3289, pruned_loss=0.09159, over 19773.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2889, pruned_loss=0.06505, over 3794052.01 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:45:08,151 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-02 21:45:18,232 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147349.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:45:30,101 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147359.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:00,887 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:04,830 INFO [train.py:903] (3/4) Epoch 22, batch 4000, loss[loss=0.2191, simple_loss=0.2948, pruned_loss=0.07166, over 19591.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2881, pruned_loss=0.0647, over 3791870.42 frames. ], batch size: 57, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:46:50,160 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147425.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:46:52,234 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-02 21:47:03,830 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.354e+02 4.920e+02 5.925e+02 7.748e+02 1.160e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-04-02 21:47:05,863 INFO [train.py:903] (3/4) Epoch 22, batch 4050, loss[loss=0.2177, simple_loss=0.2926, pruned_loss=0.07138, over 17472.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2884, pruned_loss=0.06455, over 3799384.97 frames. ], batch size: 101, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:47:24,617 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147452.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:47:40,272 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1908, 1.4497, 1.8883, 1.6350, 3.0706, 4.5900, 4.4141, 5.0299], + device='cuda:3'), covar=tensor([0.1753, 0.3794, 0.3516, 0.2317, 0.0581, 0.0203, 0.0162, 0.0156], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0324, 0.0356, 0.0267, 0.0245, 0.0188, 0.0217, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 21:47:51,679 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2294, 2.2996, 2.4810, 2.9497, 2.2365, 2.8572, 2.4603, 2.2883], + device='cuda:3'), covar=tensor([0.4085, 0.4017, 0.1887, 0.2673, 0.4516, 0.2187, 0.4752, 0.3257], + device='cuda:3'), in_proj_covar=tensor([0.0899, 0.0964, 0.0714, 0.0932, 0.0877, 0.0815, 0.0842, 0.0779], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 21:48:07,526 INFO [train.py:903] (3/4) Epoch 22, batch 4100, loss[loss=0.2339, simple_loss=0.3106, pruned_loss=0.07859, over 18733.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.289, pruned_loss=0.06485, over 3802502.40 frames. ], batch size: 74, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:48:44,884 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-02 21:49:04,890 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1369, 1.8202, 1.6672, 2.0352, 1.7607, 1.7890, 1.6370, 2.0168], + device='cuda:3'), covar=tensor([0.0990, 0.1447, 0.1492, 0.0993, 0.1318, 0.0559, 0.1482, 0.0704], + device='cuda:3'), in_proj_covar=tensor([0.0269, 0.0355, 0.0311, 0.0250, 0.0301, 0.0251, 0.0309, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:49:07,917 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 5.254e+02 6.349e+02 7.493e+02 1.711e+03, threshold=1.270e+03, percent-clipped=4.0 +2023-04-02 21:49:09,113 INFO [train.py:903] (3/4) Epoch 22, batch 4150, loss[loss=0.18, simple_loss=0.2657, pruned_loss=0.0471, over 19413.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2886, pruned_loss=0.06438, over 3792173.13 frames. ], batch size: 48, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:49:56,983 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147577.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:09,104 INFO [train.py:903] (3/4) Epoch 22, batch 4200, loss[loss=0.2437, simple_loss=0.3105, pruned_loss=0.08845, over 13196.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2889, pruned_loss=0.06478, over 3787490.67 frames. ], batch size: 136, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:50:13,779 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-02 21:50:15,176 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6069, 1.3087, 1.5058, 1.4153, 3.2064, 1.0115, 2.2797, 3.5786], + device='cuda:3'), covar=tensor([0.0498, 0.2843, 0.2948, 0.1994, 0.0772, 0.2607, 0.1315, 0.0269], + device='cuda:3'), in_proj_covar=tensor([0.0407, 0.0365, 0.0387, 0.0349, 0.0373, 0.0346, 0.0380, 0.0403], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:50:26,629 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147602.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:27,412 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147603.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:50:59,156 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7431, 1.6502, 1.8459, 1.8865, 3.3766, 1.3886, 2.5575, 3.7272], + device='cuda:3'), covar=tensor([0.0484, 0.2614, 0.2547, 0.1682, 0.0702, 0.2331, 0.1400, 0.0250], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0364, 0.0385, 0.0348, 0.0373, 0.0346, 0.0380, 0.0403], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:51:09,771 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.286e+02 4.933e+02 5.671e+02 7.351e+02 2.024e+03, threshold=1.134e+03, percent-clipped=5.0 +2023-04-02 21:51:10,939 INFO [train.py:903] (3/4) Epoch 22, batch 4250, loss[loss=0.196, simple_loss=0.2745, pruned_loss=0.05875, over 19483.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2888, pruned_loss=0.06475, over 3784992.06 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:51:25,902 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-02 21:51:38,186 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-02 21:51:45,138 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2066, 1.2694, 1.7375, 1.3288, 2.6165, 3.5236, 3.2010, 3.6960], + device='cuda:3'), covar=tensor([0.1711, 0.3950, 0.3450, 0.2448, 0.0616, 0.0182, 0.0214, 0.0264], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0324, 0.0355, 0.0267, 0.0245, 0.0188, 0.0216, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 21:51:51,748 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147672.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:51:57,375 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147677.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:52:11,231 INFO [train.py:903] (3/4) Epoch 22, batch 4300, loss[loss=0.1882, simple_loss=0.2702, pruned_loss=0.05311, over 19624.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2875, pruned_loss=0.06386, over 3799346.24 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:52:47,218 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147718.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:52:52,679 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5802, 1.5099, 1.4482, 1.8985, 1.4744, 1.7461, 1.8175, 1.6884], + device='cuda:3'), covar=tensor([0.0856, 0.0952, 0.1060, 0.0763, 0.0863, 0.0817, 0.0852, 0.0709], + device='cuda:3'), in_proj_covar=tensor([0.0209, 0.0219, 0.0222, 0.0237, 0.0224, 0.0211, 0.0184, 0.0202], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 21:53:02,985 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-02 21:53:11,622 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 5.055e+02 6.219e+02 8.115e+02 2.735e+03, threshold=1.244e+03, percent-clipped=11.0 +2023-04-02 21:53:11,644 INFO [train.py:903] (3/4) Epoch 22, batch 4350, loss[loss=0.196, simple_loss=0.2775, pruned_loss=0.05724, over 19609.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2889, pruned_loss=0.06485, over 3789527.13 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 4.0 +2023-04-02 21:53:48,737 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147769.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:11,201 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147787.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:12,001 INFO [train.py:903] (3/4) Epoch 22, batch 4400, loss[loss=0.2259, simple_loss=0.3182, pruned_loss=0.06676, over 19263.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2887, pruned_loss=0.06502, over 3784693.42 frames. ], batch size: 66, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:54:20,677 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=147796.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:54:37,131 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-02 21:54:46,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-02 21:55:06,308 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147833.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:55:12,512 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.137e+02 6.374e+02 7.743e+02 1.534e+03, threshold=1.275e+03, percent-clipped=3.0 +2023-04-02 21:55:12,529 INFO [train.py:903] (3/4) Epoch 22, batch 4450, loss[loss=0.1824, simple_loss=0.2664, pruned_loss=0.04923, over 19619.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2887, pruned_loss=0.06514, over 3771869.93 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:55:57,402 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:56:08,754 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147884.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:56:08,809 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3649, 2.1363, 1.5836, 1.4026, 1.9727, 1.2644, 1.2086, 1.7923], + device='cuda:3'), covar=tensor([0.1117, 0.0788, 0.1085, 0.0925, 0.0545, 0.1327, 0.0762, 0.0503], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0315, 0.0336, 0.0263, 0.0248, 0.0336, 0.0289, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:56:12,918 INFO [train.py:903] (3/4) Epoch 22, batch 4500, loss[loss=0.2017, simple_loss=0.2895, pruned_loss=0.05696, over 19787.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2881, pruned_loss=0.06477, over 3784589.89 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:56:41,882 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147911.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:03,801 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147929.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:57:15,412 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 4.535e+02 5.624e+02 7.235e+02 1.683e+03, threshold=1.125e+03, percent-clipped=3.0 +2023-04-02 21:57:15,434 INFO [train.py:903] (3/4) Epoch 22, batch 4550, loss[loss=0.2346, simple_loss=0.3149, pruned_loss=0.07719, over 19520.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2895, pruned_loss=0.06507, over 3800845.15 frames. ], batch size: 54, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:57:23,466 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-02 21:57:46,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-02 21:57:58,529 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147974.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:15,825 INFO [train.py:903] (3/4) Epoch 22, batch 4600, loss[loss=0.2152, simple_loss=0.2965, pruned_loss=0.06698, over 19662.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.289, pruned_loss=0.06496, over 3810633.56 frames. ], batch size: 55, lr: 3.73e-03, grad_scale: 8.0 +2023-04-02 21:58:28,678 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147999.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:58:56,686 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148021.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:14,041 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6137, 1.2847, 1.2170, 1.4898, 1.1622, 1.3329, 1.2239, 1.4026], + device='cuda:3'), covar=tensor([0.1087, 0.1164, 0.1556, 0.1030, 0.1288, 0.0632, 0.1500, 0.0821], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0354, 0.0311, 0.0250, 0.0301, 0.0250, 0.0308, 0.0254], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 21:59:16,085 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.857e+02 6.032e+02 8.227e+02 1.754e+03, threshold=1.206e+03, percent-clipped=4.0 +2023-04-02 21:59:16,102 INFO [train.py:903] (3/4) Epoch 22, batch 4650, loss[loss=0.1883, simple_loss=0.26, pruned_loss=0.05835, over 19747.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2888, pruned_loss=0.06494, over 3814268.20 frames. ], batch size: 46, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 21:59:22,687 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148043.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 21:59:32,296 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-02 21:59:43,982 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-02 21:59:53,353 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148068.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:00:16,032 INFO [train.py:903] (3/4) Epoch 22, batch 4700, loss[loss=0.2219, simple_loss=0.3086, pruned_loss=0.06756, over 19650.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06511, over 3818210.70 frames. ], batch size: 58, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:00:39,956 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-02 22:01:15,659 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148136.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:17,614 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.448e+02 6.322e+02 7.572e+02 1.580e+03, threshold=1.264e+03, percent-clipped=4.0 +2023-04-02 22:01:17,632 INFO [train.py:903] (3/4) Epoch 22, batch 4750, loss[loss=0.236, simple_loss=0.3098, pruned_loss=0.08105, over 19688.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2886, pruned_loss=0.06486, over 3826504.83 frames. ], batch size: 59, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:01:21,299 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148140.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:50,465 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:01:52,792 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148167.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:05,586 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148177.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:18,759 INFO [train.py:903] (3/4) Epoch 22, batch 4800, loss[loss=0.1915, simple_loss=0.27, pruned_loss=0.05646, over 19834.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2889, pruned_loss=0.06512, over 3830127.52 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:02:23,669 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148192.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:02:57,716 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148219.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:03:14,872 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5263, 4.1280, 2.6725, 3.6297, 1.2123, 4.0160, 3.9576, 4.0383], + device='cuda:3'), covar=tensor([0.0633, 0.0908, 0.2007, 0.0856, 0.3684, 0.0763, 0.0939, 0.1219], + device='cuda:3'), in_proj_covar=tensor([0.0505, 0.0409, 0.0493, 0.0345, 0.0398, 0.0433, 0.0424, 0.0460], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:03:18,978 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.799e+02 5.785e+02 7.279e+02 1.291e+03, threshold=1.157e+03, percent-clipped=1.0 +2023-04-02 22:03:18,995 INFO [train.py:903] (3/4) Epoch 22, batch 4850, loss[loss=0.1965, simple_loss=0.2781, pruned_loss=0.05747, over 19590.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.0646, over 3837959.17 frames. ], batch size: 52, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:03:44,130 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-02 22:03:47,705 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9294, 1.3061, 1.6452, 1.5299, 3.4889, 1.1699, 2.5265, 3.8943], + device='cuda:3'), covar=tensor([0.0436, 0.2906, 0.2753, 0.1967, 0.0715, 0.2510, 0.1247, 0.0231], + device='cuda:3'), in_proj_covar=tensor([0.0406, 0.0364, 0.0385, 0.0346, 0.0373, 0.0347, 0.0380, 0.0402], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:04:01,911 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148273.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:02,927 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-02 22:04:08,079 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-02 22:04:09,252 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-02 22:04:18,710 INFO [train.py:903] (3/4) Epoch 22, batch 4900, loss[loss=0.2124, simple_loss=0.2682, pruned_loss=0.07828, over 19736.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.0648, over 3842255.61 frames. ], batch size: 45, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:04:18,724 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-02 22:04:24,282 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:04:37,221 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4311, 1.2846, 1.4899, 1.3842, 3.0319, 1.1112, 2.3395, 3.3587], + device='cuda:3'), covar=tensor([0.0488, 0.2790, 0.2913, 0.1950, 0.0667, 0.2498, 0.1231, 0.0297], + device='cuda:3'), in_proj_covar=tensor([0.0408, 0.0365, 0.0385, 0.0347, 0.0374, 0.0348, 0.0381, 0.0403], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:04:39,208 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-02 22:04:40,556 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148305.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:14,441 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148334.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:05:19,520 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.396e+02 6.572e+02 8.433e+02 1.736e+03, threshold=1.314e+03, percent-clipped=6.0 +2023-04-02 22:05:19,538 INFO [train.py:903] (3/4) Epoch 22, batch 4950, loss[loss=0.1809, simple_loss=0.2644, pruned_loss=0.0487, over 19531.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2893, pruned_loss=0.0651, over 3833523.62 frames. ], batch size: 54, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:05:33,636 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-02 22:05:35,934 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-02 22:06:01,184 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-02 22:06:10,281 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8663, 1.9264, 2.0891, 2.3849, 1.9166, 2.2853, 2.1621, 1.9913], + device='cuda:3'), covar=tensor([0.3512, 0.3133, 0.1533, 0.1982, 0.3207, 0.1712, 0.3729, 0.2651], + device='cuda:3'), in_proj_covar=tensor([0.0897, 0.0963, 0.0716, 0.0929, 0.0879, 0.0815, 0.0841, 0.0781], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 22:06:15,530 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1707, 3.6175, 3.7650, 3.7534, 1.7083, 3.5315, 3.1571, 3.5410], + device='cuda:3'), covar=tensor([0.1656, 0.1434, 0.0728, 0.0842, 0.5569, 0.1228, 0.0703, 0.1161], + device='cuda:3'), in_proj_covar=tensor([0.0772, 0.0739, 0.0939, 0.0820, 0.0823, 0.0703, 0.0562, 0.0874], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 22:06:20,895 INFO [train.py:903] (3/4) Epoch 22, batch 5000, loss[loss=0.1729, simple_loss=0.2457, pruned_loss=0.05005, over 19742.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2883, pruned_loss=0.06447, over 3834158.45 frames. ], batch size: 45, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:06:21,237 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148388.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:25,709 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148392.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:06:29,621 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-02 22:06:40,558 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-02 22:06:55,461 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148417.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:19,244 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.597e+02 4.828e+02 6.337e+02 7.923e+02 1.456e+03, threshold=1.267e+03, percent-clipped=1.0 +2023-04-02 22:07:19,262 INFO [train.py:903] (3/4) Epoch 22, batch 5050, loss[loss=0.2127, simple_loss=0.297, pruned_loss=0.0642, over 19526.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.06467, over 3842384.32 frames. ], batch size: 56, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:07:30,120 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148447.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:07:54,481 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-02 22:08:14,134 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9302, 1.3337, 1.0662, 1.0049, 1.1850, 1.0019, 0.9748, 1.1976], + device='cuda:3'), covar=tensor([0.0661, 0.0885, 0.1222, 0.0749, 0.0686, 0.1373, 0.0600, 0.0503], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0317, 0.0337, 0.0265, 0.0249, 0.0338, 0.0291, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:08:19,393 INFO [train.py:903] (3/4) Epoch 22, batch 5100, loss[loss=0.213, simple_loss=0.2868, pruned_loss=0.06962, over 19621.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2893, pruned_loss=0.06493, over 3834976.13 frames. ], batch size: 50, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:08:30,462 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-02 22:08:33,784 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-02 22:08:39,206 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-02 22:08:47,624 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8614, 1.3318, 1.0722, 1.0164, 1.1845, 1.0235, 0.9160, 1.2091], + device='cuda:3'), covar=tensor([0.0673, 0.0919, 0.1155, 0.0740, 0.0580, 0.1315, 0.0594, 0.0498], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0315, 0.0334, 0.0263, 0.0248, 0.0336, 0.0289, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:08:53,294 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148516.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:09:04,405 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0630, 4.4645, 4.8387, 4.8202, 1.8571, 4.5210, 3.9539, 4.5434], + device='cuda:3'), covar=tensor([0.1629, 0.0802, 0.0569, 0.0630, 0.5722, 0.0866, 0.0646, 0.1050], + device='cuda:3'), in_proj_covar=tensor([0.0777, 0.0740, 0.0943, 0.0823, 0.0828, 0.0706, 0.0567, 0.0877], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 22:09:19,526 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 5.468e+02 6.941e+02 9.893e+02 2.948e+03, threshold=1.388e+03, percent-clipped=12.0 +2023-04-02 22:09:19,544 INFO [train.py:903] (3/4) Epoch 22, batch 5150, loss[loss=0.2095, simple_loss=0.2898, pruned_loss=0.0646, over 19548.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2886, pruned_loss=0.06486, over 3819781.03 frames. ], batch size: 54, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:09:31,357 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-02 22:09:32,953 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148548.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:02,810 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148573.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:05,888 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:10:20,793 INFO [train.py:903] (3/4) Epoch 22, batch 5200, loss[loss=0.2374, simple_loss=0.3147, pruned_loss=0.08008, over 13502.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2887, pruned_loss=0.06484, over 3813829.20 frames. ], batch size: 136, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:10:23,564 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148590.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:10:33,156 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-02 22:10:53,704 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148615.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:17,524 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-02 22:11:21,001 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.230e+02 4.558e+02 5.776e+02 7.251e+02 2.001e+03, threshold=1.155e+03, percent-clipped=2.0 +2023-04-02 22:11:21,019 INFO [train.py:903] (3/4) Epoch 22, batch 5250, loss[loss=0.1754, simple_loss=0.248, pruned_loss=0.05139, over 19763.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2871, pruned_loss=0.0638, over 3817823.41 frames. ], batch size: 46, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:11:28,745 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:33,740 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148649.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:58,326 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:11:58,441 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148669.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:12:20,276 INFO [train.py:903] (3/4) Epoch 22, batch 5300, loss[loss=0.2331, simple_loss=0.3101, pruned_loss=0.07807, over 19480.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2883, pruned_loss=0.0646, over 3824860.80 frames. ], batch size: 64, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:12:39,144 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-02 22:13:17,770 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148734.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:22,172 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.679e+02 5.309e+02 6.457e+02 8.011e+02 2.116e+03, threshold=1.291e+03, percent-clipped=5.0 +2023-04-02 22:13:22,190 INFO [train.py:903] (3/4) Epoch 22, batch 5350, loss[loss=0.2209, simple_loss=0.2992, pruned_loss=0.07124, over 17249.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2877, pruned_loss=0.06431, over 3812870.66 frames. ], batch size: 101, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:13:54,749 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148764.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:13:55,483 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-02 22:14:16,567 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-02 22:14:24,407 INFO [train.py:903] (3/4) Epoch 22, batch 5400, loss[loss=0.2481, simple_loss=0.3224, pruned_loss=0.08687, over 19678.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2882, pruned_loss=0.06446, over 3803591.62 frames. ], batch size: 60, lr: 3.72e-03, grad_scale: 8.0 +2023-04-02 22:14:28,067 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148791.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:14:34,166 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-02 22:15:24,093 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 4.882e+02 5.931e+02 8.184e+02 2.288e+03, threshold=1.186e+03, percent-clipped=7.0 +2023-04-02 22:15:24,111 INFO [train.py:903] (3/4) Epoch 22, batch 5450, loss[loss=0.2049, simple_loss=0.2838, pruned_loss=0.06296, over 19758.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2886, pruned_loss=0.06437, over 3825866.23 frames. ], batch size: 54, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:15:50,186 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=148860.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:24,046 INFO [train.py:903] (3/4) Epoch 22, batch 5500, loss[loss=0.208, simple_loss=0.2875, pruned_loss=0.06426, over 19505.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2885, pruned_loss=0.06447, over 3832364.55 frames. ], batch size: 64, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:16:47,587 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148906.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:16:49,402 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-02 22:17:25,270 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.265e+02 4.956e+02 6.031e+02 8.410e+02 1.981e+03, threshold=1.206e+03, percent-clipped=11.0 +2023-04-02 22:17:25,288 INFO [train.py:903] (3/4) Epoch 22, batch 5550, loss[loss=0.2152, simple_loss=0.3009, pruned_loss=0.06477, over 19742.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2886, pruned_loss=0.06421, over 3839613.57 frames. ], batch size: 63, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:17:33,832 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-02 22:18:10,834 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148975.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:18:11,970 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4627, 2.1187, 1.5633, 1.5037, 1.9722, 1.3166, 1.4003, 1.7494], + device='cuda:3'), covar=tensor([0.1015, 0.0800, 0.1109, 0.0798, 0.0553, 0.1299, 0.0711, 0.0564], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0318, 0.0339, 0.0265, 0.0249, 0.0339, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:18:21,590 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-02 22:18:27,005 INFO [train.py:903] (3/4) Epoch 22, batch 5600, loss[loss=0.2059, simple_loss=0.297, pruned_loss=0.05738, over 19757.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2887, pruned_loss=0.06434, over 3823010.29 frames. ], batch size: 63, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:18:41,576 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1694, 2.8809, 2.2853, 2.2349, 2.0950, 2.5294, 1.1080, 2.0731], + device='cuda:3'), covar=tensor([0.0696, 0.0641, 0.0683, 0.1256, 0.1167, 0.1062, 0.1394, 0.1042], + device='cuda:3'), in_proj_covar=tensor([0.0354, 0.0355, 0.0357, 0.0381, 0.0460, 0.0388, 0.0335, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 22:18:56,784 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149013.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:18:59,351 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-02 22:19:05,995 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149020.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:19:27,592 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.511e+02 5.096e+02 6.059e+02 8.103e+02 1.757e+03, threshold=1.212e+03, percent-clipped=10.0 +2023-04-02 22:19:27,609 INFO [train.py:903] (3/4) Epoch 22, batch 5650, loss[loss=0.2188, simple_loss=0.2992, pruned_loss=0.06924, over 19525.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.289, pruned_loss=0.06461, over 3810086.71 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:19:29,171 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4562, 1.4151, 1.3739, 1.6673, 1.2252, 1.6390, 1.5980, 1.5050], + device='cuda:3'), covar=tensor([0.0907, 0.1022, 0.1095, 0.0823, 0.0989, 0.0828, 0.0931, 0.0791], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0224, 0.0240, 0.0227, 0.0213, 0.0186, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 22:19:35,829 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149045.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:15,157 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-02 22:20:16,299 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149078.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:20:27,395 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4913, 3.6680, 4.0122, 4.0126, 2.3345, 3.7463, 3.3982, 3.8047], + device='cuda:3'), covar=tensor([0.1514, 0.2900, 0.0686, 0.0787, 0.4705, 0.1369, 0.0674, 0.1093], + device='cuda:3'), in_proj_covar=tensor([0.0789, 0.0747, 0.0955, 0.0838, 0.0845, 0.0720, 0.0571, 0.0888], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 22:20:28,279 INFO [train.py:903] (3/4) Epoch 22, batch 5700, loss[loss=0.2294, simple_loss=0.3008, pruned_loss=0.079, over 13335.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2888, pruned_loss=0.06455, over 3797995.00 frames. ], batch size: 138, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:17,818 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149128.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:21:29,598 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 4.618e+02 6.077e+02 7.635e+02 1.470e+03, threshold=1.215e+03, percent-clipped=6.0 +2023-04-02 22:21:29,617 INFO [train.py:903] (3/4) Epoch 22, batch 5750, loss[loss=0.1856, simple_loss=0.2763, pruned_loss=0.04743, over 19771.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06431, over 3800674.77 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:21:30,804 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-02 22:21:39,626 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-02 22:21:46,345 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-02 22:21:59,276 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149162.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:29,865 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149187.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:22:30,652 INFO [train.py:903] (3/4) Epoch 22, batch 5800, loss[loss=0.1825, simple_loss=0.2642, pruned_loss=0.05039, over 19619.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2887, pruned_loss=0.06471, over 3789287.95 frames. ], batch size: 50, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:22:37,212 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149193.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:23,102 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149231.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:23:30,416 INFO [train.py:903] (3/4) Epoch 22, batch 5850, loss[loss=0.2675, simple_loss=0.3283, pruned_loss=0.1033, over 13621.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2891, pruned_loss=0.06515, over 3789511.56 frames. ], batch size: 135, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:23:31,587 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.174e+02 6.346e+02 7.936e+02 1.645e+03, threshold=1.269e+03, percent-clipped=7.0 +2023-04-02 22:23:52,875 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149256.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:24:20,905 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-02 22:24:30,287 INFO [train.py:903] (3/4) Epoch 22, batch 5900, loss[loss=0.1873, simple_loss=0.2614, pruned_loss=0.05659, over 19748.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2884, pruned_loss=0.06457, over 3805197.23 frames. ], batch size: 46, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:24:35,574 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-02 22:24:56,377 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-02 22:25:06,676 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1561, 1.3289, 1.5630, 1.4324, 2.7789, 1.0663, 2.2065, 3.0595], + device='cuda:3'), covar=tensor([0.0529, 0.2762, 0.2773, 0.1694, 0.0686, 0.2393, 0.1146, 0.0332], + device='cuda:3'), in_proj_covar=tensor([0.0413, 0.0368, 0.0388, 0.0348, 0.0376, 0.0352, 0.0384, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:25:12,709 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 22:25:31,732 INFO [train.py:903] (3/4) Epoch 22, batch 5950, loss[loss=0.2536, simple_loss=0.3238, pruned_loss=0.09164, over 19664.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2891, pruned_loss=0.06502, over 3796167.37 frames. ], batch size: 58, lr: 3.71e-03, grad_scale: 4.0 +2023-04-02 22:25:32,879 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.792e+02 4.957e+02 5.985e+02 7.132e+02 1.534e+03, threshold=1.197e+03, percent-clipped=1.0 +2023-04-02 22:25:48,140 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-02 22:26:28,512 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149384.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:26:32,557 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0896, 2.8728, 1.8481, 1.6214, 2.8073, 1.6372, 1.4706, 2.3624], + device='cuda:3'), covar=tensor([0.1042, 0.0745, 0.0950, 0.1025, 0.0440, 0.1238, 0.0947, 0.0553], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0317, 0.0338, 0.0265, 0.0247, 0.0337, 0.0290, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:26:33,225 INFO [train.py:903] (3/4) Epoch 22, batch 6000, loss[loss=0.1971, simple_loss=0.2868, pruned_loss=0.05371, over 17384.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2889, pruned_loss=0.06446, over 3791408.18 frames. ], batch size: 101, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:26:33,225 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 22:26:46,897 INFO [train.py:937] (3/4) Epoch 22, validation: loss=0.1681, simple_loss=0.2682, pruned_loss=0.03398, over 944034.00 frames. +2023-04-02 22:26:46,898 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 22:27:13,636 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149409.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:27:48,670 INFO [train.py:903] (3/4) Epoch 22, batch 6050, loss[loss=0.205, simple_loss=0.2909, pruned_loss=0.05953, over 19301.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2885, pruned_loss=0.06439, over 3806831.98 frames. ], batch size: 66, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:27:49,811 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 4.880e+02 5.766e+02 7.280e+02 1.810e+03, threshold=1.153e+03, percent-clipped=3.0 +2023-04-02 22:28:02,676 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149449.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:15,554 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-02 22:28:32,630 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149474.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:28:50,321 INFO [train.py:903] (3/4) Epoch 22, batch 6100, loss[loss=0.2168, simple_loss=0.3045, pruned_loss=0.06455, over 19482.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2885, pruned_loss=0.06485, over 3811353.70 frames. ], batch size: 64, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:29:42,734 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149532.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:29:48,979 INFO [train.py:903] (3/4) Epoch 22, batch 6150, loss[loss=0.2515, simple_loss=0.3308, pruned_loss=0.08608, over 19326.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2881, pruned_loss=0.06458, over 3817316.28 frames. ], batch size: 66, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:29:50,021 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.761e+02 6.063e+02 7.648e+02 1.908e+03, threshold=1.213e+03, percent-clipped=8.0 +2023-04-02 22:30:19,809 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-02 22:30:44,104 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149583.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:30:49,421 INFO [train.py:903] (3/4) Epoch 22, batch 6200, loss[loss=0.1826, simple_loss=0.2699, pruned_loss=0.04759, over 19753.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2882, pruned_loss=0.06422, over 3815773.69 frames. ], batch size: 51, lr: 3.71e-03, grad_scale: 8.0 +2023-04-02 22:31:24,694 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-02 22:31:36,837 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-02 22:31:51,302 INFO [train.py:903] (3/4) Epoch 22, batch 6250, loss[loss=0.181, simple_loss=0.2724, pruned_loss=0.04483, over 19481.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2876, pruned_loss=0.06378, over 3825179.41 frames. ], batch size: 64, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:31:52,392 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.068e+02 6.178e+02 8.268e+02 1.694e+03, threshold=1.236e+03, percent-clipped=5.0 +2023-04-02 22:32:21,863 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-02 22:32:39,423 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1141, 1.9085, 1.8197, 2.9487, 2.1831, 2.3449, 2.5610, 2.1813], + device='cuda:3'), covar=tensor([0.0833, 0.0905, 0.0967, 0.0676, 0.0765, 0.0732, 0.0832, 0.0651], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0221, 0.0223, 0.0239, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 22:32:52,474 INFO [train.py:903] (3/4) Epoch 22, batch 6300, loss[loss=0.1904, simple_loss=0.2824, pruned_loss=0.04917, over 19661.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.06324, over 3814780.35 frames. ], batch size: 55, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:33:51,828 INFO [train.py:903] (3/4) Epoch 22, batch 6350, loss[loss=0.1855, simple_loss=0.2653, pruned_loss=0.05287, over 19606.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2878, pruned_loss=0.06367, over 3826188.65 frames. ], batch size: 50, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:33:52,935 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.551e+02 6.531e+02 8.044e+02 1.579e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-02 22:34:23,954 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149764.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:34:37,667 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8065, 1.7255, 2.0176, 1.7762, 4.3571, 1.0041, 2.5075, 4.6803], + device='cuda:3'), covar=tensor([0.0478, 0.2646, 0.2606, 0.1864, 0.0742, 0.2910, 0.1603, 0.0198], + device='cuda:3'), in_proj_covar=tensor([0.0414, 0.0368, 0.0388, 0.0347, 0.0375, 0.0353, 0.0383, 0.0405], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:34:52,501 INFO [train.py:903] (3/4) Epoch 22, batch 6400, loss[loss=0.2474, simple_loss=0.3128, pruned_loss=0.09102, over 13538.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2876, pruned_loss=0.06359, over 3819958.67 frames. ], batch size: 137, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:37,226 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9911, 3.6483, 2.5816, 3.3157, 1.0163, 3.5940, 3.4577, 3.5763], + device='cuda:3'), covar=tensor([0.0779, 0.1166, 0.1827, 0.0935, 0.3683, 0.0777, 0.1014, 0.1277], + device='cuda:3'), in_proj_covar=tensor([0.0499, 0.0408, 0.0493, 0.0343, 0.0398, 0.0429, 0.0422, 0.0457], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:35:54,292 INFO [train.py:903] (3/4) Epoch 22, batch 6450, loss[loss=0.1786, simple_loss=0.2537, pruned_loss=0.05176, over 19811.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2874, pruned_loss=0.06356, over 3807003.22 frames. ], batch size: 48, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:35:55,271 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.788e+02 5.699e+02 7.070e+02 1.580e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-02 22:36:39,138 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-02 22:36:40,519 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149876.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:36:50,720 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9496, 2.0259, 2.2862, 2.6793, 1.9908, 2.6183, 2.2838, 2.0715], + device='cuda:3'), covar=tensor([0.4329, 0.4009, 0.1854, 0.2322, 0.4219, 0.2041, 0.4867, 0.3385], + device='cuda:3'), in_proj_covar=tensor([0.0899, 0.0967, 0.0719, 0.0932, 0.0880, 0.0818, 0.0841, 0.0782], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 22:36:54,745 INFO [train.py:903] (3/4) Epoch 22, batch 6500, loss[loss=0.2464, simple_loss=0.337, pruned_loss=0.07792, over 18551.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2879, pruned_loss=0.06336, over 3821444.14 frames. ], batch size: 84, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:00,219 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-02 22:37:02,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0328, 5.0658, 5.8306, 5.8409, 1.9227, 5.5189, 4.6491, 5.4729], + device='cuda:3'), covar=tensor([0.1628, 0.0965, 0.0548, 0.0622, 0.6280, 0.0816, 0.0625, 0.1163], + device='cuda:3'), in_proj_covar=tensor([0.0783, 0.0743, 0.0948, 0.0826, 0.0834, 0.0708, 0.0564, 0.0878], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 22:37:31,259 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-02 22:37:41,923 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=149927.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:37:55,093 INFO [train.py:903] (3/4) Epoch 22, batch 6550, loss[loss=0.1937, simple_loss=0.2642, pruned_loss=0.06162, over 19610.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2882, pruned_loss=0.06344, over 3825202.46 frames. ], batch size: 50, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:37:56,255 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 4.654e+02 5.933e+02 7.304e+02 1.667e+03, threshold=1.187e+03, percent-clipped=4.0 +2023-04-02 22:38:08,356 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-02 22:38:55,831 INFO [train.py:903] (3/4) Epoch 22, batch 6600, loss[loss=0.1564, simple_loss=0.2368, pruned_loss=0.03806, over 19731.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2881, pruned_loss=0.06354, over 3804953.02 frames. ], batch size: 46, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:38:58,358 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7630, 4.3499, 2.7754, 3.7649, 1.1977, 4.2881, 4.1669, 4.3115], + device='cuda:3'), covar=tensor([0.0575, 0.0936, 0.1839, 0.0798, 0.3722, 0.0645, 0.0920, 0.1212], + device='cuda:3'), in_proj_covar=tensor([0.0501, 0.0409, 0.0493, 0.0344, 0.0399, 0.0431, 0.0423, 0.0458], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:38:59,630 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149991.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:39:39,196 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2346, 1.2804, 1.6590, 1.2679, 2.6856, 3.5748, 3.2861, 3.7920], + device='cuda:3'), covar=tensor([0.1649, 0.3932, 0.3620, 0.2581, 0.0653, 0.0218, 0.0227, 0.0282], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0323, 0.0354, 0.0264, 0.0244, 0.0188, 0.0215, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 22:39:59,904 INFO [train.py:903] (3/4) Epoch 22, batch 6650, loss[loss=0.2303, simple_loss=0.3138, pruned_loss=0.0734, over 19785.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2876, pruned_loss=0.06348, over 3820753.37 frames. ], batch size: 56, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:40:01,056 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 4.673e+02 5.867e+02 7.414e+02 1.313e+03, threshold=1.173e+03, percent-clipped=2.0 +2023-04-02 22:40:04,782 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150042.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:40:59,358 INFO [train.py:903] (3/4) Epoch 22, batch 6700, loss[loss=0.1842, simple_loss=0.2567, pruned_loss=0.05585, over 19066.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2871, pruned_loss=0.06371, over 3807258.23 frames. ], batch size: 42, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:10,398 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150097.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:41:23,665 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150108.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:41:57,254 INFO [train.py:903] (3/4) Epoch 22, batch 6750, loss[loss=0.1932, simple_loss=0.2802, pruned_loss=0.05312, over 19670.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2871, pruned_loss=0.06395, over 3795064.39 frames. ], batch size: 55, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:41:58,371 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.145e+02 4.879e+02 6.504e+02 7.654e+02 1.720e+03, threshold=1.301e+03, percent-clipped=5.0 +2023-04-02 22:42:53,123 INFO [train.py:903] (3/4) Epoch 22, batch 6800, loss[loss=0.2143, simple_loss=0.2873, pruned_loss=0.07065, over 19713.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2872, pruned_loss=0.06401, over 3799168.33 frames. ], batch size: 51, lr: 3.70e-03, grad_scale: 8.0 +2023-04-02 22:43:38,162 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-02 22:43:39,167 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-02 22:43:42,617 INFO [train.py:903] (3/4) Epoch 23, batch 0, loss[loss=0.234, simple_loss=0.3035, pruned_loss=0.08231, over 13122.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3035, pruned_loss=0.08231, over 13122.00 frames. ], batch size: 137, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:43:42,617 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 22:43:54,249 INFO [train.py:937] (3/4) Epoch 23, validation: loss=0.1688, simple_loss=0.2693, pruned_loss=0.03418, over 944034.00 frames. +2023-04-02 22:43:54,250 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 22:44:03,534 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150223.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:44:06,612 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-02 22:44:21,434 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.848e+02 5.561e+02 7.527e+02 1.735e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-04-02 22:44:31,829 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150247.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:44:35,195 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6047, 2.2512, 1.6885, 1.4925, 2.0910, 1.3078, 1.4050, 1.9683], + device='cuda:3'), covar=tensor([0.1127, 0.0849, 0.1219, 0.0929, 0.0627, 0.1390, 0.0854, 0.0611], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0320, 0.0342, 0.0269, 0.0250, 0.0339, 0.0294, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:44:55,826 INFO [train.py:903] (3/4) Epoch 23, batch 50, loss[loss=0.2187, simple_loss=0.2973, pruned_loss=0.07005, over 18842.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2873, pruned_loss=0.06226, over 860254.24 frames. ], batch size: 74, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:45:03,036 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150272.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:27,192 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150292.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:30,274 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-02 22:45:36,373 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150298.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:45:57,952 INFO [train.py:903] (3/4) Epoch 23, batch 100, loss[loss=0.191, simple_loss=0.2768, pruned_loss=0.05266, over 19375.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2874, pruned_loss=0.06235, over 1507317.57 frames. ], batch size: 70, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:46:06,469 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150323.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:46:07,254 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-02 22:46:26,514 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.179e+02 4.910e+02 5.630e+02 7.676e+02 1.557e+03, threshold=1.126e+03, percent-clipped=7.0 +2023-04-02 22:46:48,755 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5206, 2.2085, 1.6768, 1.3289, 2.0633, 1.2834, 1.3179, 1.9937], + device='cuda:3'), covar=tensor([0.1219, 0.0816, 0.1141, 0.1035, 0.0617, 0.1325, 0.0881, 0.0513], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0318, 0.0340, 0.0268, 0.0248, 0.0337, 0.0292, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:46:59,544 INFO [train.py:903] (3/4) Epoch 23, batch 150, loss[loss=0.2046, simple_loss=0.2699, pruned_loss=0.06958, over 19758.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2853, pruned_loss=0.06182, over 2016873.09 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:47:59,879 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-02 22:48:01,005 INFO [train.py:903] (3/4) Epoch 23, batch 200, loss[loss=0.2589, simple_loss=0.3371, pruned_loss=0.0904, over 19671.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2878, pruned_loss=0.06379, over 2400796.42 frames. ], batch size: 55, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:48:30,850 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.764e+02 5.350e+02 6.653e+02 9.712e+02 2.771e+03, threshold=1.331e+03, percent-clipped=16.0 +2023-04-02 22:48:33,188 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150441.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:48:38,323 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 22:49:02,669 INFO [train.py:903] (3/4) Epoch 23, batch 250, loss[loss=0.2526, simple_loss=0.3251, pruned_loss=0.09003, over 18871.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2878, pruned_loss=0.06425, over 2715303.97 frames. ], batch size: 74, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:49:20,036 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150479.0, num_to_drop=1, layers_to_drop={1} +2023-04-02 22:49:43,865 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2310, 1.2711, 1.6010, 1.3064, 2.6411, 3.7415, 3.4543, 4.0282], + device='cuda:3'), covar=tensor([0.1741, 0.4016, 0.3759, 0.2516, 0.0661, 0.0181, 0.0217, 0.0232], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0325, 0.0355, 0.0267, 0.0246, 0.0189, 0.0217, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 22:49:48,359 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0615, 4.3925, 4.7492, 4.7426, 1.8071, 4.4793, 3.8963, 4.4410], + device='cuda:3'), covar=tensor([0.1577, 0.1057, 0.0615, 0.0673, 0.6124, 0.0894, 0.0671, 0.1185], + device='cuda:3'), in_proj_covar=tensor([0.0784, 0.0744, 0.0952, 0.0832, 0.0836, 0.0711, 0.0566, 0.0878], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 22:49:49,662 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150504.0, num_to_drop=1, layers_to_drop={0} +2023-04-02 22:49:54,030 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5718, 1.6007, 1.8261, 1.8069, 2.6546, 2.3184, 2.7413, 1.2392], + device='cuda:3'), covar=tensor([0.2333, 0.4130, 0.2581, 0.1860, 0.1357, 0.2068, 0.1282, 0.4180], + device='cuda:3'), in_proj_covar=tensor([0.0535, 0.0642, 0.0715, 0.0484, 0.0618, 0.0530, 0.0661, 0.0548], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 22:50:05,853 INFO [train.py:903] (3/4) Epoch 23, batch 300, loss[loss=0.1771, simple_loss=0.2532, pruned_loss=0.05051, over 19778.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2864, pruned_loss=0.06338, over 2975877.00 frames. ], batch size: 48, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:50:34,498 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.361e+02 5.024e+02 5.928e+02 7.198e+02 2.066e+03, threshold=1.186e+03, percent-clipped=3.0 +2023-04-02 22:50:54,909 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150555.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:50:56,043 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150556.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:51:07,074 INFO [train.py:903] (3/4) Epoch 23, batch 350, loss[loss=0.2135, simple_loss=0.2914, pruned_loss=0.0678, over 14034.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2869, pruned_loss=0.06347, over 3160387.33 frames. ], batch size: 135, lr: 3.61e-03, grad_scale: 4.0 +2023-04-02 22:51:11,929 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-02 22:52:09,603 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-02 22:52:09,973 INFO [train.py:903] (3/4) Epoch 23, batch 400, loss[loss=0.1999, simple_loss=0.2736, pruned_loss=0.0631, over 19383.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2861, pruned_loss=0.0632, over 3304678.88 frames. ], batch size: 47, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:52:36,602 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150636.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:52:39,072 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2615, 2.3352, 2.6006, 3.2460, 2.3516, 3.0068, 2.6072, 2.3661], + device='cuda:3'), covar=tensor([0.4484, 0.4460, 0.1938, 0.2600, 0.4729, 0.2270, 0.5002, 0.3392], + device='cuda:3'), in_proj_covar=tensor([0.0903, 0.0968, 0.0719, 0.0932, 0.0883, 0.0819, 0.0845, 0.0784], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 22:52:40,896 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.962e+02 5.093e+02 6.486e+02 8.008e+02 1.724e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 22:53:11,936 INFO [train.py:903] (3/4) Epoch 23, batch 450, loss[loss=0.2375, simple_loss=0.3111, pruned_loss=0.08197, over 19313.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.285, pruned_loss=0.0624, over 3437222.52 frames. ], batch size: 66, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:53:46,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-02 22:53:46,089 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-02 22:54:15,722 INFO [train.py:903] (3/4) Epoch 23, batch 500, loss[loss=0.2214, simple_loss=0.2845, pruned_loss=0.07912, over 18652.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2859, pruned_loss=0.06272, over 3524266.15 frames. ], batch size: 41, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:54:29,756 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4105, 4.0348, 2.6463, 3.6052, 1.2622, 3.9412, 3.8981, 3.9610], + device='cuda:3'), covar=tensor([0.0671, 0.1078, 0.2018, 0.0881, 0.3637, 0.0653, 0.0888, 0.1098], + device='cuda:3'), in_proj_covar=tensor([0.0505, 0.0413, 0.0498, 0.0346, 0.0402, 0.0436, 0.0425, 0.0460], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 22:54:45,185 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 5.191e+02 6.635e+02 8.528e+02 2.142e+03, threshold=1.327e+03, percent-clipped=5.0 +2023-04-02 22:54:57,354 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:54:58,668 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150751.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:55:17,409 INFO [train.py:903] (3/4) Epoch 23, batch 550, loss[loss=0.2046, simple_loss=0.2735, pruned_loss=0.06783, over 19761.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06356, over 3587797.73 frames. ], batch size: 47, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:55:40,337 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4251, 1.5261, 1.8241, 1.7061, 2.6029, 2.1986, 2.7981, 1.2864], + device='cuda:3'), covar=tensor([0.2665, 0.4414, 0.2796, 0.2069, 0.1606, 0.2344, 0.1572, 0.4467], + device='cuda:3'), in_proj_covar=tensor([0.0534, 0.0642, 0.0711, 0.0483, 0.0616, 0.0531, 0.0660, 0.0547], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 22:56:14,493 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150812.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:18,580 INFO [train.py:903] (3/4) Epoch 23, batch 600, loss[loss=0.1609, simple_loss=0.2369, pruned_loss=0.04244, over 19715.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2888, pruned_loss=0.06445, over 3637020.19 frames. ], batch size: 46, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:56:45,484 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150837.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:56:48,650 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+02 5.000e+02 5.859e+02 6.998e+02 1.831e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-02 22:56:59,194 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-02 22:57:21,218 INFO [train.py:903] (3/4) Epoch 23, batch 650, loss[loss=0.2239, simple_loss=0.3087, pruned_loss=0.06955, over 19527.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2877, pruned_loss=0.06389, over 3686864.00 frames. ], batch size: 64, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:02,426 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=150899.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:23,647 INFO [train.py:903] (3/4) Epoch 23, batch 700, loss[loss=0.1705, simple_loss=0.2572, pruned_loss=0.0419, over 19484.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.0633, over 3731744.40 frames. ], batch size: 49, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 22:58:27,253 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150918.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 22:58:52,730 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.226e+02 5.019e+02 5.794e+02 7.164e+02 1.349e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-02 22:58:58,122 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.74 vs. limit=5.0 +2023-04-02 22:59:26,031 INFO [train.py:903] (3/4) Epoch 23, batch 750, loss[loss=0.1854, simple_loss=0.2723, pruned_loss=0.04929, over 19564.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2866, pruned_loss=0.0633, over 3764414.05 frames. ], batch size: 61, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 23:00:17,629 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151007.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:26,934 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151014.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:28,669 INFO [train.py:903] (3/4) Epoch 23, batch 800, loss[loss=0.2005, simple_loss=0.2893, pruned_loss=0.05589, over 19542.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2859, pruned_loss=0.06298, over 3787608.17 frames. ], batch size: 54, lr: 3.61e-03, grad_scale: 8.0 +2023-04-02 23:00:36,353 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 23:00:44,076 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-02 23:00:46,696 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-02 23:00:48,201 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151032.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:00:57,135 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 5.089e+02 6.161e+02 7.478e+02 1.780e+03, threshold=1.232e+03, percent-clipped=6.0 +2023-04-02 23:01:29,773 INFO [train.py:903] (3/4) Epoch 23, batch 850, loss[loss=0.1683, simple_loss=0.2437, pruned_loss=0.04647, over 19787.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2851, pruned_loss=0.06279, over 3803136.83 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:01:38,466 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-02 23:02:00,721 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5362, 1.7393, 2.0063, 1.8015, 3.3376, 2.6968, 3.6206, 1.7080], + device='cuda:3'), covar=tensor([0.2419, 0.4099, 0.2529, 0.1873, 0.1347, 0.1985, 0.1406, 0.3977], + device='cuda:3'), in_proj_covar=tensor([0.0531, 0.0638, 0.0707, 0.0481, 0.0611, 0.0526, 0.0653, 0.0545], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:02:04,874 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151094.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:02:25,394 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-02 23:02:31,810 INFO [train.py:903] (3/4) Epoch 23, batch 900, loss[loss=0.1842, simple_loss=0.2525, pruned_loss=0.05793, over 19731.00 frames. ], tot_loss[loss=0.205, simple_loss=0.285, pruned_loss=0.06253, over 3816937.18 frames. ], batch size: 45, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:02,018 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 5.070e+02 6.543e+02 8.443e+02 1.332e+03, threshold=1.309e+03, percent-clipped=3.0 +2023-04-02 23:03:21,217 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-02 23:03:32,679 INFO [train.py:903] (3/4) Epoch 23, batch 950, loss[loss=0.2153, simple_loss=0.2888, pruned_loss=0.07095, over 19864.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2866, pruned_loss=0.06396, over 3823626.42 frames. ], batch size: 52, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:03:39,535 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-02 23:04:11,660 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6739, 4.2922, 2.7516, 3.7969, 1.0751, 4.1681, 4.0675, 4.1594], + device='cuda:3'), covar=tensor([0.0579, 0.0894, 0.1788, 0.0877, 0.3731, 0.0661, 0.0839, 0.0981], + device='cuda:3'), in_proj_covar=tensor([0.0504, 0.0413, 0.0497, 0.0346, 0.0400, 0.0435, 0.0426, 0.0459], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:04:16,137 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-02 23:04:17,808 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151202.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:27,639 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151209.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:04:35,177 INFO [train.py:903] (3/4) Epoch 23, batch 1000, loss[loss=0.1857, simple_loss=0.2731, pruned_loss=0.04911, over 19731.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2865, pruned_loss=0.06405, over 3803965.56 frames. ], batch size: 51, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:04:47,465 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4960, 1.5076, 1.6720, 1.6683, 2.3659, 2.1656, 2.4656, 0.9360], + device='cuda:3'), covar=tensor([0.2423, 0.4259, 0.2722, 0.1977, 0.1436, 0.2141, 0.1284, 0.4552], + device='cuda:3'), in_proj_covar=tensor([0.0534, 0.0642, 0.0711, 0.0484, 0.0616, 0.0528, 0.0658, 0.0548], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:05:04,890 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.146e+02 6.401e+02 7.951e+02 1.702e+03, threshold=1.280e+03, percent-clipped=4.0 +2023-04-02 23:05:32,304 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-02 23:05:33,597 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151262.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:05:37,845 INFO [train.py:903] (3/4) Epoch 23, batch 1050, loss[loss=0.1889, simple_loss=0.265, pruned_loss=0.05637, over 19610.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2871, pruned_loss=0.06413, over 3799787.83 frames. ], batch size: 50, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:05:42,845 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151270.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:12,776 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-02 23:06:14,326 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151295.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:21,313 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151300.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:06:40,096 INFO [train.py:903] (3/4) Epoch 23, batch 1100, loss[loss=0.1883, simple_loss=0.2694, pruned_loss=0.0536, over 19748.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.0643, over 3789647.10 frames. ], batch size: 51, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:06:40,407 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151316.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:07:09,139 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.937e+02 5.070e+02 6.152e+02 7.617e+02 1.362e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-04-02 23:07:40,855 INFO [train.py:903] (3/4) Epoch 23, batch 1150, loss[loss=0.2106, simple_loss=0.2797, pruned_loss=0.07071, over 19298.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.288, pruned_loss=0.06466, over 3792071.32 frames. ], batch size: 44, lr: 3.60e-03, grad_scale: 4.0 +2023-04-02 23:07:55,726 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151377.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:08:43,929 INFO [train.py:903] (3/4) Epoch 23, batch 1200, loss[loss=0.2029, simple_loss=0.2952, pruned_loss=0.05527, over 19768.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2877, pruned_loss=0.06437, over 3787032.02 frames. ], batch size: 54, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:09:14,770 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.896e+02 6.001e+02 7.643e+02 1.247e+03, threshold=1.200e+03, percent-clipped=2.0 +2023-04-02 23:09:18,077 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-02 23:09:45,312 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151465.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:09:46,023 INFO [train.py:903] (3/4) Epoch 23, batch 1250, loss[loss=0.1914, simple_loss=0.2708, pruned_loss=0.05596, over 19406.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2893, pruned_loss=0.06502, over 3795638.24 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:10:12,468 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3758, 2.1436, 1.6440, 1.4240, 1.9864, 1.2471, 1.2676, 1.8084], + device='cuda:3'), covar=tensor([0.1110, 0.0798, 0.1080, 0.0926, 0.0534, 0.1353, 0.0818, 0.0535], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0316, 0.0339, 0.0266, 0.0246, 0.0339, 0.0291, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:10:16,529 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151490.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:10:17,590 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1965, 2.3076, 2.5267, 3.1308, 2.3377, 2.9777, 2.6713, 2.4063], + device='cuda:3'), covar=tensor([0.4275, 0.4088, 0.1959, 0.2517, 0.4475, 0.2134, 0.4474, 0.3296], + device='cuda:3'), in_proj_covar=tensor([0.0906, 0.0973, 0.0719, 0.0936, 0.0884, 0.0817, 0.0844, 0.0787], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 23:10:46,707 INFO [train.py:903] (3/4) Epoch 23, batch 1300, loss[loss=0.219, simple_loss=0.3047, pruned_loss=0.06665, over 19589.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2892, pruned_loss=0.06521, over 3810051.74 frames. ], batch size: 57, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:11:16,030 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 5.118e+02 6.046e+02 8.032e+02 1.744e+03, threshold=1.209e+03, percent-clipped=5.0 +2023-04-02 23:11:22,856 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151546.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:11:46,631 INFO [train.py:903] (3/4) Epoch 23, batch 1350, loss[loss=0.2135, simple_loss=0.2907, pruned_loss=0.06813, over 13500.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06479, over 3821619.05 frames. ], batch size: 136, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:12:48,225 INFO [train.py:903] (3/4) Epoch 23, batch 1400, loss[loss=0.1641, simple_loss=0.2453, pruned_loss=0.04145, over 19762.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2891, pruned_loss=0.06478, over 3811164.98 frames. ], batch size: 48, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:08,456 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151633.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:13,630 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.4626, 4.9954, 3.0490, 4.3707, 1.3234, 4.9435, 4.8508, 5.0139], + device='cuda:3'), covar=tensor([0.0420, 0.0851, 0.1913, 0.0755, 0.3885, 0.0587, 0.0824, 0.1233], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0413, 0.0498, 0.0347, 0.0402, 0.0437, 0.0427, 0.0459], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:13:17,802 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 5.043e+02 6.240e+02 8.237e+02 1.280e+03, threshold=1.248e+03, percent-clipped=3.0 +2023-04-02 23:13:21,407 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151644.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:38,327 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151658.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:40,485 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=151660.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:41,886 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151661.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:13:48,349 INFO [train.py:903] (3/4) Epoch 23, batch 1450, loss[loss=0.1983, simple_loss=0.269, pruned_loss=0.06382, over 19384.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2886, pruned_loss=0.06426, over 3809636.33 frames. ], batch size: 47, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:13:48,384 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-02 23:14:36,967 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151706.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:14:49,055 INFO [train.py:903] (3/4) Epoch 23, batch 1500, loss[loss=0.198, simple_loss=0.2868, pruned_loss=0.05459, over 19691.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2886, pruned_loss=0.06443, over 3805390.35 frames. ], batch size: 59, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:18,488 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.905e+02 6.054e+02 7.299e+02 2.065e+03, threshold=1.211e+03, percent-clipped=4.0 +2023-04-02 23:15:33,010 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5932, 1.8005, 2.0241, 1.8586, 3.2330, 2.6424, 3.6256, 1.6330], + device='cuda:3'), covar=tensor([0.2534, 0.4309, 0.2857, 0.1949, 0.1513, 0.2132, 0.1514, 0.4320], + device='cuda:3'), in_proj_covar=tensor([0.0540, 0.0647, 0.0717, 0.0488, 0.0620, 0.0533, 0.0665, 0.0554], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 23:15:40,400 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151759.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:15:47,607 INFO [train.py:903] (3/4) Epoch 23, batch 1550, loss[loss=0.2065, simple_loss=0.2914, pruned_loss=0.06083, over 19478.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.289, pruned_loss=0.06479, over 3805939.75 frames. ], batch size: 64, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:15:59,944 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151775.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:16:45,623 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3489, 4.0079, 3.0726, 3.5596, 1.8616, 3.8765, 3.8137, 3.8702], + device='cuda:3'), covar=tensor([0.0652, 0.0868, 0.1832, 0.0892, 0.2745, 0.0750, 0.0896, 0.1254], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0411, 0.0497, 0.0347, 0.0401, 0.0436, 0.0428, 0.0458], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:16:50,043 INFO [train.py:903] (3/4) Epoch 23, batch 1600, loss[loss=0.1726, simple_loss=0.2496, pruned_loss=0.04784, over 19752.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.0642, over 3800836.74 frames. ], batch size: 46, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:16:53,651 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151819.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:17:10,289 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-02 23:17:20,214 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.863e+02 5.887e+02 6.951e+02 2.426e+03, threshold=1.177e+03, percent-clipped=3.0 +2023-04-02 23:17:50,192 INFO [train.py:903] (3/4) Epoch 23, batch 1650, loss[loss=0.2104, simple_loss=0.2975, pruned_loss=0.06163, over 19541.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2872, pruned_loss=0.06379, over 3801957.91 frames. ], batch size: 64, lr: 3.60e-03, grad_scale: 8.0 +2023-04-02 23:18:51,789 INFO [train.py:903] (3/4) Epoch 23, batch 1700, loss[loss=0.1905, simple_loss=0.2613, pruned_loss=0.0599, over 18616.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06393, over 3805529.77 frames. ], batch size: 41, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:18:53,411 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151917.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:21,472 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.018e+02 6.073e+02 7.613e+02 1.748e+03, threshold=1.215e+03, percent-clipped=5.0 +2023-04-02 23:19:23,213 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151942.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:19:26,187 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-02 23:19:42,954 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8123, 1.8241, 1.4190, 1.8512, 1.8464, 1.4541, 1.5951, 1.6677], + device='cuda:3'), covar=tensor([0.1348, 0.1679, 0.2131, 0.1369, 0.1531, 0.1042, 0.1923, 0.1123], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0359, 0.0318, 0.0255, 0.0307, 0.0255, 0.0313, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:19:52,380 INFO [train.py:903] (3/4) Epoch 23, batch 1750, loss[loss=0.1757, simple_loss=0.2502, pruned_loss=0.05062, over 19735.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2865, pruned_loss=0.06304, over 3813851.97 frames. ], batch size: 45, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:20:41,352 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6242, 4.2041, 2.5609, 3.7070, 0.8854, 4.1514, 4.0251, 4.1447], + device='cuda:3'), covar=tensor([0.0646, 0.1079, 0.2225, 0.0919, 0.4226, 0.0714, 0.0918, 0.1218], + device='cuda:3'), in_proj_covar=tensor([0.0508, 0.0412, 0.0497, 0.0347, 0.0400, 0.0435, 0.0429, 0.0458], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:20:53,710 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152015.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:20:54,415 INFO [train.py:903] (3/4) Epoch 23, batch 1800, loss[loss=0.2095, simple_loss=0.293, pruned_loss=0.06302, over 18269.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2865, pruned_loss=0.06296, over 3825319.13 frames. ], batch size: 83, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:21:13,149 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:24,577 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152040.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:26,139 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.69 vs. limit=5.0 +2023-04-02 23:21:26,672 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 5.200e+02 6.601e+02 8.823e+02 1.720e+03, threshold=1.320e+03, percent-clipped=12.0 +2023-04-02 23:21:36,343 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152050.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:44,277 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152056.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:21:48,442 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-02 23:21:55,077 INFO [train.py:903] (3/4) Epoch 23, batch 1850, loss[loss=0.1735, simple_loss=0.2467, pruned_loss=0.05015, over 19271.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2869, pruned_loss=0.06293, over 3823831.20 frames. ], batch size: 44, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:22:26,617 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-02 23:22:54,488 INFO [train.py:903] (3/4) Epoch 23, batch 1900, loss[loss=0.2072, simple_loss=0.2904, pruned_loss=0.06198, over 19608.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06345, over 3830022.48 frames. ], batch size: 52, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:23:09,868 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-02 23:23:16,386 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-02 23:23:26,803 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.505e+02 4.994e+02 5.968e+02 7.712e+02 2.482e+03, threshold=1.194e+03, percent-clipped=3.0 +2023-04-02 23:23:41,406 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-02 23:23:52,773 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152163.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:55,391 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152165.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:23:56,159 INFO [train.py:903] (3/4) Epoch 23, batch 1950, loss[loss=0.2192, simple_loss=0.2964, pruned_loss=0.07099, over 19655.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06294, over 3832962.24 frames. ], batch size: 60, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:24:58,528 INFO [train.py:903] (3/4) Epoch 23, batch 2000, loss[loss=0.2393, simple_loss=0.3262, pruned_loss=0.07621, over 19404.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.286, pruned_loss=0.06225, over 3839695.33 frames. ], batch size: 70, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:25:28,732 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.661e+02 5.613e+02 7.041e+02 1.127e+03, threshold=1.123e+03, percent-clipped=0.0 +2023-04-02 23:25:31,156 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152243.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:25:53,679 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-02 23:25:58,369 INFO [train.py:903] (3/4) Epoch 23, batch 2050, loss[loss=0.2581, simple_loss=0.3216, pruned_loss=0.09732, over 12924.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2856, pruned_loss=0.06239, over 3833431.43 frames. ], batch size: 137, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:26:13,093 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-02 23:26:13,415 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152278.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:14,188 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-02 23:26:29,852 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152291.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:26:36,226 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-02 23:26:58,783 INFO [train.py:903] (3/4) Epoch 23, batch 2100, loss[loss=0.1909, simple_loss=0.2722, pruned_loss=0.05476, over 19769.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06215, over 3826766.64 frames. ], batch size: 56, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:27:09,415 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5349, 1.4949, 1.4791, 2.0093, 1.6679, 1.8446, 1.9358, 1.6810], + device='cuda:3'), covar=tensor([0.0864, 0.0959, 0.1047, 0.0720, 0.0808, 0.0791, 0.0784, 0.0743], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0223, 0.0239, 0.0226, 0.0212, 0.0187, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-02 23:27:27,818 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-02 23:27:30,769 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-02 23:27:31,218 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 4.808e+02 5.705e+02 7.050e+02 1.568e+03, threshold=1.141e+03, percent-clipped=6.0 +2023-04-02 23:27:48,004 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-02 23:27:59,303 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.68 vs. limit=5.0 +2023-04-02 23:27:59,782 INFO [train.py:903] (3/4) Epoch 23, batch 2150, loss[loss=0.2405, simple_loss=0.3192, pruned_loss=0.08083, over 19661.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2859, pruned_loss=0.06267, over 3825995.66 frames. ], batch size: 53, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:28:37,109 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-02 23:28:45,738 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8632, 1.8221, 1.6939, 1.5476, 1.5227, 1.5935, 0.6109, 0.9875], + device='cuda:3'), covar=tensor([0.0582, 0.0621, 0.0436, 0.0639, 0.0959, 0.0706, 0.1196, 0.0943], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0352, 0.0359, 0.0381, 0.0458, 0.0387, 0.0335, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:29:00,654 INFO [train.py:903] (3/4) Epoch 23, batch 2200, loss[loss=0.2767, simple_loss=0.3385, pruned_loss=0.1075, over 18734.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2869, pruned_loss=0.06334, over 3807063.85 frames. ], batch size: 74, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:29:07,526 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152421.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:17,919 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4971, 1.6399, 1.9773, 1.7753, 3.1305, 2.6139, 3.3335, 1.7317], + device='cuda:3'), covar=tensor([0.2620, 0.4348, 0.2727, 0.1980, 0.1611, 0.2122, 0.1676, 0.4165], + device='cuda:3'), in_proj_covar=tensor([0.0534, 0.0643, 0.0714, 0.0486, 0.0617, 0.0532, 0.0662, 0.0549], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:29:28,755 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2558, 2.9733, 2.2384, 2.3217, 2.2173, 2.6361, 1.1018, 2.1044], + device='cuda:3'), covar=tensor([0.0568, 0.0597, 0.0720, 0.1076, 0.1097, 0.0974, 0.1410, 0.1095], + device='cuda:3'), in_proj_covar=tensor([0.0355, 0.0353, 0.0360, 0.0382, 0.0460, 0.0388, 0.0336, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:29:31,740 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.084e+02 6.029e+02 8.181e+02 1.825e+03, threshold=1.206e+03, percent-clipped=10.0 +2023-04-02 23:29:37,618 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152446.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:29:58,426 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152463.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:30:01,457 INFO [train.py:903] (3/4) Epoch 23, batch 2250, loss[loss=0.2724, simple_loss=0.3418, pruned_loss=0.1015, over 19735.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2865, pruned_loss=0.06305, over 3815068.03 frames. ], batch size: 63, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:31:01,770 INFO [train.py:903] (3/4) Epoch 23, batch 2300, loss[loss=0.2177, simple_loss=0.2958, pruned_loss=0.06981, over 19698.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2867, pruned_loss=0.06343, over 3829985.30 frames. ], batch size: 59, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:31:17,225 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-02 23:31:25,345 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152534.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:31:36,144 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.927e+02 5.902e+02 7.617e+02 2.113e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-04-02 23:31:41,228 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0680, 3.1397, 1.9280, 1.8897, 2.8224, 1.7173, 1.4342, 2.2020], + device='cuda:3'), covar=tensor([0.1329, 0.0689, 0.1106, 0.0916, 0.0581, 0.1287, 0.1033, 0.0718], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0315, 0.0336, 0.0266, 0.0248, 0.0338, 0.0290, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:31:55,675 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152559.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:04,337 INFO [train.py:903] (3/4) Epoch 23, batch 2350, loss[loss=0.2531, simple_loss=0.3141, pruned_loss=0.09603, over 13535.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2861, pruned_loss=0.06294, over 3823123.42 frames. ], batch size: 135, lr: 3.59e-03, grad_scale: 4.0 +2023-04-02 23:32:30,252 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152587.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:43,251 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-02 23:32:45,464 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152600.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:32:54,455 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152607.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:03,113 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-02 23:33:05,171 INFO [train.py:903] (3/4) Epoch 23, batch 2400, loss[loss=0.2354, simple_loss=0.3137, pruned_loss=0.07859, over 19832.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2868, pruned_loss=0.06324, over 3826866.14 frames. ], batch size: 52, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:33:28,191 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152635.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:33:38,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 5.347e+02 6.485e+02 7.646e+02 1.871e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-04-02 23:34:06,659 INFO [train.py:903] (3/4) Epoch 23, batch 2450, loss[loss=0.2081, simple_loss=0.2994, pruned_loss=0.0584, over 19525.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.286, pruned_loss=0.06267, over 3824312.41 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:34:51,455 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152702.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:34:55,912 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1028, 5.0994, 5.8624, 5.8609, 2.1937, 5.5665, 4.7022, 5.4839], + device='cuda:3'), covar=tensor([0.1523, 0.0807, 0.0537, 0.0566, 0.5706, 0.0802, 0.0607, 0.1096], + device='cuda:3'), in_proj_covar=tensor([0.0781, 0.0744, 0.0952, 0.0836, 0.0838, 0.0714, 0.0564, 0.0886], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-02 23:35:06,561 INFO [train.py:903] (3/4) Epoch 23, batch 2500, loss[loss=0.2312, simple_loss=0.3118, pruned_loss=0.07532, over 19592.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2857, pruned_loss=0.0628, over 3830631.82 frames. ], batch size: 61, lr: 3.59e-03, grad_scale: 8.0 +2023-04-02 23:35:40,606 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.829e+02 4.968e+02 5.949e+02 7.714e+02 2.745e+03, threshold=1.190e+03, percent-clipped=5.0 +2023-04-02 23:35:42,101 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152744.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:35:48,910 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2166, 1.8125, 1.9008, 1.8563, 2.9758, 1.5838, 2.5814, 3.2093], + device='cuda:3'), covar=tensor([0.0543, 0.2232, 0.2320, 0.1628, 0.0563, 0.2101, 0.1593, 0.0356], + device='cuda:3'), in_proj_covar=tensor([0.0413, 0.0368, 0.0389, 0.0349, 0.0376, 0.0353, 0.0383, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:35:48,954 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152750.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:36:08,241 INFO [train.py:903] (3/4) Epoch 23, batch 2550, loss[loss=0.1996, simple_loss=0.2782, pruned_loss=0.06056, over 19780.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2854, pruned_loss=0.06255, over 3819520.30 frames. ], batch size: 56, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:36:11,527 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-02 23:36:57,889 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152807.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:37:01,065 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-02 23:37:08,439 INFO [train.py:903] (3/4) Epoch 23, batch 2600, loss[loss=0.1685, simple_loss=0.2644, pruned_loss=0.03629, over 19531.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2865, pruned_loss=0.0626, over 3829639.07 frames. ], batch size: 54, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:37:40,421 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 4.724e+02 5.510e+02 7.301e+02 1.657e+03, threshold=1.102e+03, percent-clipped=4.0 +2023-04-02 23:38:08,562 INFO [train.py:903] (3/4) Epoch 23, batch 2650, loss[loss=0.1992, simple_loss=0.2742, pruned_loss=0.06207, over 19413.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2873, pruned_loss=0.06319, over 3836788.77 frames. ], batch size: 48, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:38:27,770 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-02 23:39:08,680 INFO [train.py:903] (3/4) Epoch 23, batch 2700, loss[loss=0.2516, simple_loss=0.3279, pruned_loss=0.08766, over 19751.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2865, pruned_loss=0.06266, over 3844199.48 frames. ], batch size: 63, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:39:16,557 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152922.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:42,349 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.895e+02 5.730e+02 7.672e+02 1.465e+03, threshold=1.146e+03, percent-clipped=5.0 +2023-04-02 23:39:43,665 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152944.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:39:51,724 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=152951.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:00,060 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152958.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:09,443 INFO [train.py:903] (3/4) Epoch 23, batch 2750, loss[loss=0.2433, simple_loss=0.3257, pruned_loss=0.08042, over 19683.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.287, pruned_loss=0.06289, over 3825861.24 frames. ], batch size: 60, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:40:31,190 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152983.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:40:57,885 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153006.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:10,184 INFO [train.py:903] (3/4) Epoch 23, batch 2800, loss[loss=0.2057, simple_loss=0.2761, pruned_loss=0.06763, over 19612.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2856, pruned_loss=0.06235, over 3840500.89 frames. ], batch size: 50, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:41:27,948 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153031.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:41:42,353 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 4.940e+02 6.140e+02 7.865e+02 1.529e+03, threshold=1.228e+03, percent-clipped=3.0 +2023-04-02 23:42:02,553 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153059.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:10,892 INFO [train.py:903] (3/4) Epoch 23, batch 2850, loss[loss=0.2237, simple_loss=0.2962, pruned_loss=0.07557, over 19742.00 frames. ], tot_loss[loss=0.206, simple_loss=0.286, pruned_loss=0.06301, over 3837403.91 frames. ], batch size: 51, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:42:11,256 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153066.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:42:36,416 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153088.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:43:09,824 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-02 23:43:11,008 INFO [train.py:903] (3/4) Epoch 23, batch 2900, loss[loss=0.21, simple_loss=0.292, pruned_loss=0.06397, over 19658.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.286, pruned_loss=0.06283, over 3839991.60 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:43:12,485 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0120, 2.9227, 1.8667, 1.9199, 2.6524, 1.6816, 1.4751, 2.1482], + device='cuda:3'), covar=tensor([0.1285, 0.0756, 0.1028, 0.0798, 0.0607, 0.1189, 0.0945, 0.0705], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0315, 0.0337, 0.0265, 0.0248, 0.0338, 0.0290, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:43:34,524 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153135.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:43:44,842 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.40 vs. limit=5.0 +2023-04-02 23:43:45,170 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.545e+02 5.074e+02 6.117e+02 7.609e+02 1.538e+03, threshold=1.223e+03, percent-clipped=2.0 +2023-04-02 23:44:10,298 INFO [train.py:903] (3/4) Epoch 23, batch 2950, loss[loss=0.1788, simple_loss=0.2579, pruned_loss=0.04983, over 19485.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2875, pruned_loss=0.06364, over 3818064.66 frames. ], batch size: 49, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:44:13,932 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2899, 1.3018, 1.2655, 1.1492, 1.0155, 1.1559, 0.4168, 0.6556], + device='cuda:3'), covar=tensor([0.0495, 0.0522, 0.0326, 0.0513, 0.0899, 0.0612, 0.1157, 0.0831], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0354, 0.0360, 0.0382, 0.0462, 0.0388, 0.0336, 0.0338], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:44:23,317 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2616, 2.0557, 1.9897, 2.1989, 2.0776, 1.9677, 1.8607, 2.2165], + device='cuda:3'), covar=tensor([0.0953, 0.1372, 0.1357, 0.1007, 0.1232, 0.0509, 0.1367, 0.0663], + device='cuda:3'), in_proj_covar=tensor([0.0270, 0.0353, 0.0310, 0.0251, 0.0301, 0.0249, 0.0307, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:44:23,395 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5754, 1.6588, 1.9541, 1.8466, 2.8731, 2.5207, 2.9974, 1.4314], + device='cuda:3'), covar=tensor([0.2436, 0.4262, 0.2707, 0.1821, 0.1440, 0.2032, 0.1457, 0.4307], + device='cuda:3'), in_proj_covar=tensor([0.0534, 0.0645, 0.0716, 0.0487, 0.0619, 0.0532, 0.0663, 0.0552], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:44:25,505 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153178.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:26,639 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2068, 1.2661, 1.2536, 1.0577, 1.0710, 1.0636, 0.1343, 0.3650], + device='cuda:3'), covar=tensor([0.0742, 0.0719, 0.0462, 0.0609, 0.1412, 0.0698, 0.1421, 0.1197], + device='cuda:3'), in_proj_covar=tensor([0.0356, 0.0354, 0.0360, 0.0382, 0.0462, 0.0388, 0.0337, 0.0339], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-02 23:44:54,245 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:44:54,287 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153203.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:45:09,887 INFO [train.py:903] (3/4) Epoch 23, batch 3000, loss[loss=0.2071, simple_loss=0.2845, pruned_loss=0.06486, over 19687.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2873, pruned_loss=0.06368, over 3824834.89 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:45:09,887 INFO [train.py:928] (3/4) Computing validation loss +2023-04-02 23:45:23,387 INFO [train.py:937] (3/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2685, pruned_loss=0.03441, over 944034.00 frames. +2023-04-02 23:45:23,388 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-02 23:45:26,708 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-02 23:45:57,202 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.963e+02 5.132e+02 6.544e+02 7.997e+02 1.730e+03, threshold=1.309e+03, percent-clipped=4.0 +2023-04-02 23:46:24,012 INFO [train.py:903] (3/4) Epoch 23, batch 3050, loss[loss=0.1835, simple_loss=0.2789, pruned_loss=0.04406, over 19536.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.06413, over 3818640.39 frames. ], batch size: 56, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:47:00,331 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153296.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:25,789 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153315.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:26,484 INFO [train.py:903] (3/4) Epoch 23, batch 3100, loss[loss=0.1948, simple_loss=0.282, pruned_loss=0.05385, over 19789.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2889, pruned_loss=0.06404, over 3796531.93 frames. ], batch size: 56, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:47:33,614 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153322.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:54,375 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153340.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:47:56,825 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-02 23:47:59,270 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 4.897e+02 6.414e+02 9.491e+02 6.432e+03, threshold=1.283e+03, percent-clipped=11.0 +2023-04-02 23:48:03,120 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153347.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:48:06,260 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9624, 1.6575, 1.5872, 1.8670, 1.5610, 1.6717, 1.5518, 1.8185], + device='cuda:3'), covar=tensor([0.1057, 0.1478, 0.1511, 0.1178, 0.1430, 0.0559, 0.1422, 0.0771], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0358, 0.0315, 0.0254, 0.0305, 0.0252, 0.0310, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:48:25,986 INFO [train.py:903] (3/4) Epoch 23, batch 3150, loss[loss=0.1973, simple_loss=0.2743, pruned_loss=0.06019, over 19573.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2885, pruned_loss=0.06371, over 3803202.97 frames. ], batch size: 52, lr: 3.58e-03, grad_scale: 4.0 +2023-04-02 23:48:54,105 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-02 23:48:59,701 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9834, 3.6374, 2.5662, 3.2142, 0.8176, 3.6053, 3.4298, 3.5200], + device='cuda:3'), covar=tensor([0.0826, 0.1047, 0.2057, 0.0983, 0.4187, 0.0799, 0.1006, 0.1391], + device='cuda:3'), in_proj_covar=tensor([0.0506, 0.0410, 0.0494, 0.0346, 0.0397, 0.0434, 0.0426, 0.0460], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-02 23:49:26,046 INFO [train.py:903] (3/4) Epoch 23, batch 3200, loss[loss=0.2176, simple_loss=0.3022, pruned_loss=0.06646, over 19783.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2893, pruned_loss=0.06425, over 3801396.30 frames. ], batch size: 56, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:49:54,852 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153439.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:00,126 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.043e+02 6.093e+02 8.078e+02 1.420e+03, threshold=1.219e+03, percent-clipped=2.0 +2023-04-02 23:50:17,461 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153459.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:26,642 INFO [train.py:903] (3/4) Epoch 23, batch 3250, loss[loss=0.2115, simple_loss=0.2827, pruned_loss=0.07016, over 19627.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2891, pruned_loss=0.06394, over 3811239.00 frames. ], batch size: 50, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:50:43,103 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153479.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:50:48,914 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153484.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:51:18,580 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9417, 2.0375, 2.3254, 2.5884, 1.9197, 2.5320, 2.4322, 2.2091], + device='cuda:3'), covar=tensor([0.3931, 0.3516, 0.1802, 0.2131, 0.3738, 0.1921, 0.4417, 0.3135], + device='cuda:3'), in_proj_covar=tensor([0.0906, 0.0973, 0.0719, 0.0933, 0.0884, 0.0819, 0.0846, 0.0786], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-02 23:51:27,759 INFO [train.py:903] (3/4) Epoch 23, batch 3300, loss[loss=0.1963, simple_loss=0.278, pruned_loss=0.0573, over 19664.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2887, pruned_loss=0.06396, over 3807302.38 frames. ], batch size: 53, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:51:34,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-02 23:52:00,748 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.814e+02 5.081e+02 6.216e+02 8.009e+02 2.047e+03, threshold=1.243e+03, percent-clipped=5.0 +2023-04-02 23:52:26,358 INFO [train.py:903] (3/4) Epoch 23, batch 3350, loss[loss=0.1886, simple_loss=0.2687, pruned_loss=0.05422, over 19374.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2894, pruned_loss=0.0645, over 3803499.76 frames. ], batch size: 47, lr: 3.58e-03, grad_scale: 8.0 +2023-04-02 23:53:00,338 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153594.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:53:26,225 INFO [train.py:903] (3/4) Epoch 23, batch 3400, loss[loss=0.2261, simple_loss=0.302, pruned_loss=0.07509, over 19694.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2887, pruned_loss=0.06442, over 3814262.41 frames. ], batch size: 60, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:53:56,995 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153640.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:54:01,364 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 5.295e+02 6.743e+02 8.549e+02 2.424e+03, threshold=1.349e+03, percent-clipped=5.0 +2023-04-02 23:54:28,041 INFO [train.py:903] (3/4) Epoch 23, batch 3450, loss[loss=0.2118, simple_loss=0.29, pruned_loss=0.06673, over 19614.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2878, pruned_loss=0.0641, over 3816518.14 frames. ], batch size: 57, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:54:31,533 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-02 23:55:30,146 INFO [train.py:903] (3/4) Epoch 23, batch 3500, loss[loss=0.1892, simple_loss=0.2678, pruned_loss=0.05529, over 19717.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2863, pruned_loss=0.06327, over 3819658.86 frames. ], batch size: 51, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:55:58,566 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1768, 1.2949, 1.7411, 0.9764, 2.3600, 3.0894, 2.7434, 3.2393], + device='cuda:3'), covar=tensor([0.1603, 0.3818, 0.3212, 0.2593, 0.0602, 0.0216, 0.0257, 0.0309], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0324, 0.0353, 0.0265, 0.0245, 0.0189, 0.0218, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-02 23:56:02,444 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.496e+02 4.938e+02 5.821e+02 7.521e+02 2.332e+03, threshold=1.164e+03, percent-clipped=1.0 +2023-04-02 23:56:17,965 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153755.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:56:30,175 INFO [train.py:903] (3/4) Epoch 23, batch 3550, loss[loss=0.2178, simple_loss=0.3024, pruned_loss=0.06665, over 18082.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.286, pruned_loss=0.06313, over 3814812.96 frames. ], batch size: 83, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:56:50,128 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=153783.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:57:30,145 INFO [train.py:903] (3/4) Epoch 23, batch 3600, loss[loss=0.2, simple_loss=0.2844, pruned_loss=0.05782, over 19746.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2869, pruned_loss=0.06365, over 3821191.73 frames. ], batch size: 63, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:58:05,069 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 5.155e+02 6.351e+02 8.015e+02 2.586e+03, threshold=1.270e+03, percent-clipped=6.0 +2023-04-02 23:58:12,378 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153850.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:58:30,878 INFO [train.py:903] (3/4) Epoch 23, batch 3650, loss[loss=0.2568, simple_loss=0.3238, pruned_loss=0.09493, over 18248.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.288, pruned_loss=0.0645, over 3805752.42 frames. ], batch size: 83, lr: 3.57e-03, grad_scale: 8.0 +2023-04-02 23:58:42,987 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153875.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:08,894 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153898.0, num_to_drop=0, layers_to_drop=set() +2023-04-02 23:59:31,672 INFO [train.py:903] (3/4) Epoch 23, batch 3700, loss[loss=0.2019, simple_loss=0.3016, pruned_loss=0.05108, over 19592.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2884, pruned_loss=0.06457, over 3805172.13 frames. ], batch size: 61, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:00:00,767 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153941.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:00:04,691 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.617e+02 5.510e+02 6.874e+02 2.344e+03, threshold=1.102e+03, percent-clipped=3.0 +2023-04-03 00:00:31,960 INFO [train.py:903] (3/4) Epoch 23, batch 3750, loss[loss=0.2198, simple_loss=0.3025, pruned_loss=0.06853, over 18743.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2879, pruned_loss=0.06413, over 3800037.99 frames. ], batch size: 74, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:27,852 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:01:33,255 INFO [train.py:903] (3/4) Epoch 23, batch 3800, loss[loss=0.2097, simple_loss=0.2939, pruned_loss=0.06274, over 19663.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06352, over 3804798.27 frames. ], batch size: 59, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:01:59,779 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154036.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:02:05,111 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 00:02:08,268 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.228e+02 4.923e+02 6.103e+02 7.526e+02 2.694e+03, threshold=1.221e+03, percent-clipped=9.0 +2023-04-03 00:02:33,012 INFO [train.py:903] (3/4) Epoch 23, batch 3850, loss[loss=0.2243, simple_loss=0.3002, pruned_loss=0.07419, over 19781.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2871, pruned_loss=0.06296, over 3815830.11 frames. ], batch size: 54, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:02:35,304 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154067.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:02:38,816 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 00:02:38,915 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-03 00:02:45,160 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8804, 1.5060, 1.9376, 1.5921, 4.4299, 1.1196, 2.6438, 4.8481], + device='cuda:3'), covar=tensor([0.0494, 0.2906, 0.2729, 0.2122, 0.0749, 0.2752, 0.1373, 0.0159], + device='cuda:3'), in_proj_covar=tensor([0.0413, 0.0369, 0.0391, 0.0352, 0.0375, 0.0352, 0.0386, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:02:47,671 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1740, 2.8743, 2.2777, 2.2286, 2.1327, 2.4870, 1.0992, 2.0819], + device='cuda:3'), covar=tensor([0.0644, 0.0608, 0.0729, 0.1166, 0.1129, 0.1168, 0.1466, 0.1111], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0355, 0.0363, 0.0386, 0.0463, 0.0391, 0.0339, 0.0342], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 00:03:09,354 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:13,920 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6300, 1.5928, 1.2209, 1.6021, 1.4603, 1.2976, 1.2503, 1.4758], + device='cuda:3'), covar=tensor([0.1337, 0.1420, 0.2062, 0.1227, 0.1476, 0.1049, 0.1997, 0.1121], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0354, 0.0310, 0.0250, 0.0302, 0.0249, 0.0309, 0.0255], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:03:18,589 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:03:35,394 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-03 00:03:35,945 INFO [train.py:903] (3/4) Epoch 23, batch 3900, loss[loss=0.1794, simple_loss=0.2604, pruned_loss=0.04923, over 19582.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.0618, over 3821618.49 frames. ], batch size: 52, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:03:57,517 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7762, 1.8443, 2.2198, 2.0453, 3.3999, 2.8791, 3.7152, 1.8529], + device='cuda:3'), covar=tensor([0.2431, 0.4345, 0.2837, 0.1830, 0.1410, 0.2024, 0.1391, 0.4042], + device='cuda:3'), in_proj_covar=tensor([0.0539, 0.0649, 0.0718, 0.0488, 0.0621, 0.0536, 0.0662, 0.0554], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 00:04:09,481 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 4.608e+02 5.656e+02 7.392e+02 1.919e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-04-03 00:04:22,808 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:04:37,504 INFO [train.py:903] (3/4) Epoch 23, batch 3950, loss[loss=0.1794, simple_loss=0.2516, pruned_loss=0.05358, over 19066.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2862, pruned_loss=0.06244, over 3825467.99 frames. ], batch size: 42, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:04:44,236 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 00:04:52,282 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:05:37,018 INFO [train.py:903] (3/4) Epoch 23, batch 4000, loss[loss=0.2128, simple_loss=0.2943, pruned_loss=0.06567, over 19794.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2866, pruned_loss=0.06294, over 3808768.53 frames. ], batch size: 56, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:05:50,660 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 00:06:02,275 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 00:06:06,035 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:06:12,358 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.243e+02 5.130e+02 6.145e+02 8.525e+02 2.203e+03, threshold=1.229e+03, percent-clipped=9.0 +2023-04-03 00:06:27,023 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 00:06:37,053 INFO [train.py:903] (3/4) Epoch 23, batch 4050, loss[loss=0.1847, simple_loss=0.2558, pruned_loss=0.05683, over 19350.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2873, pruned_loss=0.06324, over 3812750.55 frames. ], batch size: 44, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:07:01,676 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154285.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:23,111 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:29,657 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:07:37,599 INFO [train.py:903] (3/4) Epoch 23, batch 4100, loss[loss=0.2112, simple_loss=0.2944, pruned_loss=0.064, over 19767.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06349, over 3810224.97 frames. ], batch size: 54, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:08:11,156 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.105e+02 5.940e+02 7.682e+02 1.555e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-04-03 00:08:13,557 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 00:08:25,634 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154355.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:08:39,411 INFO [train.py:903] (3/4) Epoch 23, batch 4150, loss[loss=0.1857, simple_loss=0.2592, pruned_loss=0.05607, over 17333.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06292, over 3810690.68 frames. ], batch size: 38, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:22,104 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:09:33,989 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154411.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:09:39,113 INFO [train.py:903] (3/4) Epoch 23, batch 4200, loss[loss=0.2065, simple_loss=0.2819, pruned_loss=0.06553, over 19671.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2865, pruned_loss=0.06269, over 3818794.74 frames. ], batch size: 53, lr: 3.57e-03, grad_scale: 8.0 +2023-04-03 00:09:41,423 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 00:10:07,070 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:14,754 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.103e+02 4.744e+02 5.863e+02 7.378e+02 1.705e+03, threshold=1.173e+03, percent-clipped=3.0 +2023-04-03 00:10:17,215 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154446.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:10:40,158 INFO [train.py:903] (3/4) Epoch 23, batch 4250, loss[loss=0.2228, simple_loss=0.3077, pruned_loss=0.06898, over 19574.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06251, over 3825954.90 frames. ], batch size: 61, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:10:54,216 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 00:11:05,307 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 00:11:12,092 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:11:40,251 INFO [train.py:903] (3/4) Epoch 23, batch 4300, loss[loss=0.2013, simple_loss=0.2778, pruned_loss=0.06239, over 19767.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.06169, over 3840633.56 frames. ], batch size: 54, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:11:53,659 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154526.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:11:55,919 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5405, 2.2107, 1.6344, 1.4444, 2.0738, 1.3033, 1.3225, 1.9324], + device='cuda:3'), covar=tensor([0.1042, 0.0762, 0.1152, 0.0885, 0.0567, 0.1297, 0.0777, 0.0498], + device='cuda:3'), in_proj_covar=tensor([0.0300, 0.0313, 0.0337, 0.0263, 0.0246, 0.0336, 0.0289, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:12:13,328 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.652e+02 5.888e+02 7.584e+02 1.931e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 00:12:24,518 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154553.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:33,755 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 00:12:36,076 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:12:41,454 INFO [train.py:903] (3/4) Epoch 23, batch 4350, loss[loss=0.2321, simple_loss=0.3236, pruned_loss=0.07026, over 19496.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2861, pruned_loss=0.06211, over 3846038.23 frames. ], batch size: 64, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:13:01,191 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:13:04,734 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7613, 1.5917, 1.3809, 1.6640, 1.4086, 1.3762, 1.2435, 1.6057], + device='cuda:3'), covar=tensor([0.1119, 0.1341, 0.1695, 0.1098, 0.1419, 0.0765, 0.1827, 0.0861], + device='cuda:3'), in_proj_covar=tensor([0.0272, 0.0355, 0.0313, 0.0252, 0.0302, 0.0251, 0.0311, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:13:40,266 INFO [train.py:903] (3/4) Epoch 23, batch 4400, loss[loss=0.1814, simple_loss=0.2674, pruned_loss=0.04768, over 19586.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2859, pruned_loss=0.06227, over 3844923.31 frames. ], batch size: 52, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:14:04,353 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 00:14:14,042 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.197e+02 6.555e+02 7.915e+02 1.480e+03, threshold=1.311e+03, percent-clipped=6.0 +2023-04-03 00:14:15,158 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 00:14:16,486 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:18,497 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154648.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:23,887 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:27,578 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154656.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:14:38,569 INFO [train.py:903] (3/4) Epoch 23, batch 4450, loss[loss=0.2056, simple_loss=0.2941, pruned_loss=0.05858, over 19768.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2858, pruned_loss=0.06255, over 3841459.86 frames. ], batch size: 56, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:14:58,481 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154681.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:18,437 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:15:19,415 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154699.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:15:38,749 INFO [train.py:903] (3/4) Epoch 23, batch 4500, loss[loss=0.2477, simple_loss=0.3225, pruned_loss=0.0865, over 19335.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2866, pruned_loss=0.06313, over 3819834.89 frames. ], batch size: 66, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:06,367 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154738.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:13,918 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.922e+02 6.448e+02 7.735e+02 1.395e+03, threshold=1.290e+03, percent-clipped=1.0 +2023-04-03 00:16:38,071 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:41,146 INFO [train.py:903] (3/4) Epoch 23, batch 4550, loss[loss=0.2341, simple_loss=0.3097, pruned_loss=0.07932, over 17316.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2867, pruned_loss=0.06303, over 3824960.00 frames. ], batch size: 101, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:16:43,820 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:16:48,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 00:17:00,102 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154782.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 00:17:08,985 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8965, 2.5986, 2.5742, 2.7723, 2.6922, 2.5184, 2.2841, 2.8642], + device='cuda:3'), covar=tensor([0.0869, 0.1508, 0.1286, 0.1090, 0.1327, 0.0485, 0.1330, 0.0630], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0356, 0.0314, 0.0253, 0.0304, 0.0253, 0.0312, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:17:11,895 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 00:17:16,374 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6370, 4.2402, 2.6988, 3.7712, 1.0807, 4.2115, 4.0807, 4.1725], + device='cuda:3'), covar=tensor([0.0592, 0.0937, 0.1872, 0.0858, 0.3646, 0.0592, 0.0842, 0.0996], + device='cuda:3'), in_proj_covar=tensor([0.0513, 0.0414, 0.0496, 0.0348, 0.0399, 0.0438, 0.0428, 0.0463], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:17:31,879 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154807.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:17:34,167 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154809.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:17:39,921 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154814.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:17:41,844 INFO [train.py:903] (3/4) Epoch 23, batch 4600, loss[loss=0.2008, simple_loss=0.2874, pruned_loss=0.05713, over 19357.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2855, pruned_loss=0.06247, over 3836002.79 frames. ], batch size: 66, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:17:43,431 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154817.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:02,697 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154834.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:05,369 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:13,499 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:18:17,275 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.757e+02 5.456e+02 7.137e+02 2.039e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-04-03 00:18:39,894 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3595, 1.4329, 1.6367, 1.4210, 2.9590, 1.0279, 2.3252, 3.4364], + device='cuda:3'), covar=tensor([0.0555, 0.2726, 0.2665, 0.1984, 0.0723, 0.2586, 0.1267, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0411, 0.0367, 0.0389, 0.0350, 0.0373, 0.0350, 0.0383, 0.0404], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:18:41,882 INFO [train.py:903] (3/4) Epoch 23, batch 4650, loss[loss=0.1736, simple_loss=0.2522, pruned_loss=0.04751, over 19475.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2854, pruned_loss=0.06257, over 3828627.93 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:18:45,721 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4921, 1.4185, 1.3708, 1.7655, 1.3728, 1.7157, 1.7370, 1.5352], + device='cuda:3'), covar=tensor([0.0894, 0.0967, 0.1084, 0.0717, 0.0862, 0.0780, 0.0771, 0.0735], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0221, 0.0226, 0.0240, 0.0226, 0.0212, 0.0187, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-03 00:18:57,547 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 00:19:09,934 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 00:19:42,549 INFO [train.py:903] (3/4) Epoch 23, batch 4700, loss[loss=0.2273, simple_loss=0.2913, pruned_loss=0.08168, over 19391.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2857, pruned_loss=0.06265, over 3833586.27 frames. ], batch size: 47, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:20:04,434 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 00:20:17,977 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.550e+02 5.511e+02 7.065e+02 1.410e+03, threshold=1.102e+03, percent-clipped=2.0 +2023-04-03 00:20:25,040 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154951.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:28,291 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:20:44,148 INFO [train.py:903] (3/4) Epoch 23, batch 4750, loss[loss=0.2011, simple_loss=0.2871, pruned_loss=0.0575, over 19677.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2858, pruned_loss=0.0628, over 3839094.95 frames. ], batch size: 59, lr: 3.56e-03, grad_scale: 4.0 +2023-04-03 00:21:00,287 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:12,344 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=154990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:17,064 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:22,627 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0187, 2.1095, 2.2642, 2.6329, 2.0380, 2.6343, 2.2106, 2.0663], + device='cuda:3'), covar=tensor([0.4353, 0.3849, 0.2022, 0.2381, 0.3966, 0.2054, 0.5361, 0.3473], + device='cuda:3'), in_proj_covar=tensor([0.0907, 0.0975, 0.0723, 0.0937, 0.0886, 0.0825, 0.0846, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 00:21:45,312 INFO [train.py:903] (3/4) Epoch 23, batch 4800, loss[loss=0.2232, simple_loss=0.2917, pruned_loss=0.07737, over 19482.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06257, over 3848854.57 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:21:49,080 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:21:54,341 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:18,867 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:19,572 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.470e+02 5.324e+02 6.216e+02 7.674e+02 2.163e+03, threshold=1.243e+03, percent-clipped=8.0 +2023-04-03 00:22:26,106 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:22:44,546 INFO [train.py:903] (3/4) Epoch 23, batch 4850, loss[loss=0.2071, simple_loss=0.29, pruned_loss=0.06207, over 19526.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2869, pruned_loss=0.06296, over 3836697.44 frames. ], batch size: 56, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:22:49,284 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0510, 1.9676, 1.7642, 1.6181, 1.2603, 1.5107, 0.5699, 0.9422], + device='cuda:3'), covar=tensor([0.0803, 0.0755, 0.0588, 0.0940, 0.1634, 0.1292, 0.1527, 0.1325], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0353, 0.0360, 0.0383, 0.0461, 0.0389, 0.0336, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 00:22:49,293 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155070.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:22:49,326 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6537, 1.8073, 1.7716, 2.5649, 1.8699, 2.4352, 1.8555, 1.5346], + device='cuda:3'), covar=tensor([0.5278, 0.4651, 0.3078, 0.3194, 0.4845, 0.2655, 0.6791, 0.5537], + device='cuda:3'), in_proj_covar=tensor([0.0906, 0.0975, 0.0722, 0.0937, 0.0887, 0.0824, 0.0845, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 00:23:03,365 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:09,618 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 00:23:11,892 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:21,450 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155095.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 00:23:29,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 00:23:32,600 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:23:34,406 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 00:23:34,434 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 00:23:44,624 INFO [train.py:903] (3/4) Epoch 23, batch 4900, loss[loss=0.1711, simple_loss=0.2543, pruned_loss=0.04398, over 19725.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2863, pruned_loss=0.06273, over 3823414.70 frames. ], batch size: 51, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:23:44,636 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 00:24:04,411 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 00:24:20,243 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.924e+02 5.163e+02 5.938e+02 7.647e+02 1.407e+03, threshold=1.188e+03, percent-clipped=5.0 +2023-04-03 00:24:25,548 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.46 vs. limit=2.0 +2023-04-03 00:24:46,212 INFO [train.py:903] (3/4) Epoch 23, batch 4950, loss[loss=0.1954, simple_loss=0.2784, pruned_loss=0.05623, over 19769.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.286, pruned_loss=0.06221, over 3825958.64 frames. ], batch size: 54, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:01,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 00:25:21,567 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155197.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:23,143 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 00:25:29,804 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8826, 1.9704, 2.1871, 2.4719, 1.8781, 2.3236, 2.2306, 2.0670], + device='cuda:3'), covar=tensor([0.4078, 0.3602, 0.1805, 0.2245, 0.3797, 0.2045, 0.4615, 0.3108], + device='cuda:3'), in_proj_covar=tensor([0.0910, 0.0978, 0.0725, 0.0940, 0.0890, 0.0827, 0.0847, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 00:25:34,815 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:25:44,527 INFO [train.py:903] (3/4) Epoch 23, batch 5000, loss[loss=0.1812, simple_loss=0.2733, pruned_loss=0.04451, over 19539.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2862, pruned_loss=0.06237, over 3829353.60 frames. ], batch size: 56, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:25:52,522 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 00:25:54,041 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3735, 2.3802, 2.5305, 3.1994, 2.4316, 3.0406, 2.6566, 2.5211], + device='cuda:3'), covar=tensor([0.4150, 0.4089, 0.1933, 0.2464, 0.4349, 0.2116, 0.4660, 0.3189], + device='cuda:3'), in_proj_covar=tensor([0.0911, 0.0979, 0.0726, 0.0941, 0.0891, 0.0828, 0.0849, 0.0790], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 00:26:02,955 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:26:03,632 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 00:26:19,061 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.751e+02 5.889e+02 7.363e+02 1.722e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 00:26:43,567 INFO [train.py:903] (3/4) Epoch 23, batch 5050, loss[loss=0.1769, simple_loss=0.2557, pruned_loss=0.04902, over 19835.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06136, over 3847420.67 frames. ], batch size: 52, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:17,601 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 00:27:41,260 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.79 vs. limit=5.0 +2023-04-03 00:27:42,610 INFO [train.py:903] (3/4) Epoch 23, batch 5100, loss[loss=0.2018, simple_loss=0.2693, pruned_loss=0.06713, over 19798.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2849, pruned_loss=0.06196, over 3833869.19 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 8.0 +2023-04-03 00:27:45,300 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7638, 1.8442, 2.1919, 2.2681, 1.6909, 2.1775, 2.1646, 1.9960], + device='cuda:3'), covar=tensor([0.4479, 0.4202, 0.2058, 0.2434, 0.4138, 0.2349, 0.5334, 0.3665], + device='cuda:3'), in_proj_covar=tensor([0.0907, 0.0976, 0.0722, 0.0937, 0.0888, 0.0824, 0.0847, 0.0787], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 00:27:53,104 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 00:27:56,480 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 00:28:01,507 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 00:28:10,587 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:18,270 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.450e+02 5.084e+02 6.467e+02 7.878e+02 1.414e+03, threshold=1.293e+03, percent-clipped=6.0 +2023-04-03 00:28:37,848 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:28:43,671 INFO [train.py:903] (3/4) Epoch 23, batch 5150, loss[loss=0.2309, simple_loss=0.3059, pruned_loss=0.07795, over 19663.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2853, pruned_loss=0.06249, over 3829315.31 frames. ], batch size: 55, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:28:44,019 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7323, 1.4521, 1.6672, 1.5902, 3.3238, 1.1905, 2.5101, 3.7603], + device='cuda:3'), covar=tensor([0.0498, 0.2792, 0.2701, 0.1900, 0.0678, 0.2496, 0.1235, 0.0224], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0370, 0.0392, 0.0352, 0.0376, 0.0354, 0.0387, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:28:56,726 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 00:29:08,599 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:29:13,153 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4655, 1.5362, 1.7480, 1.6809, 2.2847, 2.1507, 2.3688, 0.9100], + device='cuda:3'), covar=tensor([0.2400, 0.4143, 0.2628, 0.1884, 0.1513, 0.2160, 0.1423, 0.4693], + device='cuda:3'), in_proj_covar=tensor([0.0538, 0.0647, 0.0719, 0.0490, 0.0622, 0.0535, 0.0659, 0.0553], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 00:29:30,439 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 00:29:38,190 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7349, 4.0436, 4.5369, 4.5706, 1.9838, 4.2624, 3.6072, 3.9392], + device='cuda:3'), covar=tensor([0.2109, 0.1471, 0.0980, 0.1237, 0.6880, 0.1799, 0.1283, 0.1980], + device='cuda:3'), in_proj_covar=tensor([0.0787, 0.0751, 0.0958, 0.0838, 0.0841, 0.0719, 0.0574, 0.0890], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 00:29:45,066 INFO [train.py:903] (3/4) Epoch 23, batch 5200, loss[loss=0.23, simple_loss=0.2971, pruned_loss=0.08142, over 19477.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2846, pruned_loss=0.06203, over 3829890.54 frames. ], batch size: 49, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:29:58,653 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 00:30:02,273 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:19,770 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 5.305e+02 6.432e+02 7.969e+02 2.733e+03, threshold=1.286e+03, percent-clipped=6.0 +2023-04-03 00:30:30,716 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:30,784 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155453.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:41,482 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 00:30:44,914 INFO [train.py:903] (3/4) Epoch 23, batch 5250, loss[loss=0.2054, simple_loss=0.2883, pruned_loss=0.06126, over 19686.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2855, pruned_loss=0.06247, over 3824179.07 frames. ], batch size: 60, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:30:55,622 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155475.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:30:59,196 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:31:45,243 INFO [train.py:903] (3/4) Epoch 23, batch 5300, loss[loss=0.1963, simple_loss=0.2738, pruned_loss=0.05942, over 19581.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2865, pruned_loss=0.06275, over 3819163.01 frames. ], batch size: 52, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:32:03,697 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 00:32:21,416 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.105e+02 4.703e+02 5.856e+02 7.687e+02 1.612e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 00:32:22,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:32:46,442 INFO [train.py:903] (3/4) Epoch 23, batch 5350, loss[loss=0.1941, simple_loss=0.2818, pruned_loss=0.05317, over 19651.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2868, pruned_loss=0.06275, over 3809954.88 frames. ], batch size: 55, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:18,092 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 00:33:46,943 INFO [train.py:903] (3/4) Epoch 23, batch 5400, loss[loss=0.1876, simple_loss=0.2586, pruned_loss=0.05823, over 19778.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.287, pruned_loss=0.06304, over 3809017.37 frames. ], batch size: 47, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:33:56,225 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155623.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:34:21,905 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.892e+02 4.747e+02 5.806e+02 7.220e+02 1.360e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 00:34:36,328 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5428, 1.6005, 2.0976, 1.7847, 3.1091, 4.7658, 4.5796, 5.1940], + device='cuda:3'), covar=tensor([0.1477, 0.3569, 0.3086, 0.2119, 0.0574, 0.0170, 0.0167, 0.0179], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0325, 0.0355, 0.0265, 0.0246, 0.0190, 0.0218, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 00:34:48,073 INFO [train.py:903] (3/4) Epoch 23, batch 5450, loss[loss=0.1866, simple_loss=0.2763, pruned_loss=0.04847, over 19521.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2861, pruned_loss=0.06251, over 3805279.43 frames. ], batch size: 54, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:34:58,345 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155675.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:10,198 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1103, 1.3026, 1.7290, 1.2137, 2.7458, 3.7233, 3.4042, 3.8796], + device='cuda:3'), covar=tensor([0.1746, 0.3962, 0.3541, 0.2625, 0.0629, 0.0219, 0.0220, 0.0278], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0325, 0.0354, 0.0265, 0.0246, 0.0190, 0.0218, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 00:35:39,183 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 00:35:39,988 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:35:47,581 INFO [train.py:903] (3/4) Epoch 23, batch 5500, loss[loss=0.2545, simple_loss=0.3271, pruned_loss=0.09091, over 19278.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2862, pruned_loss=0.0629, over 3801706.27 frames. ], batch size: 66, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:10,848 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:36:13,485 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 00:36:14,851 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7870, 4.3275, 2.8353, 3.8922, 0.8885, 4.3483, 4.2033, 4.3066], + device='cuda:3'), covar=tensor([0.0631, 0.0979, 0.1935, 0.0790, 0.4204, 0.0636, 0.0875, 0.1168], + device='cuda:3'), in_proj_covar=tensor([0.0518, 0.0418, 0.0502, 0.0352, 0.0404, 0.0442, 0.0433, 0.0467], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:36:21,510 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3100, 1.4228, 1.6893, 1.6147, 2.2311, 1.9463, 2.1703, 0.9122], + device='cuda:3'), covar=tensor([0.2768, 0.4663, 0.2967, 0.2207, 0.1693, 0.2650, 0.1770, 0.5099], + device='cuda:3'), in_proj_covar=tensor([0.0539, 0.0650, 0.0722, 0.0492, 0.0625, 0.0539, 0.0662, 0.0556], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 00:36:24,174 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.224e+02 5.057e+02 6.298e+02 8.158e+02 1.659e+03, threshold=1.260e+03, percent-clipped=6.0 +2023-04-03 00:36:46,709 INFO [train.py:903] (3/4) Epoch 23, batch 5550, loss[loss=0.1792, simple_loss=0.2628, pruned_loss=0.04781, over 19400.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2862, pruned_loss=0.06293, over 3816776.42 frames. ], batch size: 48, lr: 3.55e-03, grad_scale: 4.0 +2023-04-03 00:36:56,205 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 00:37:30,539 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:30,684 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:35,029 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9058, 1.3143, 1.0778, 1.0422, 1.1644, 1.0301, 1.0358, 1.2622], + device='cuda:3'), covar=tensor([0.0640, 0.0941, 0.1163, 0.0751, 0.0633, 0.1408, 0.0568, 0.0524], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0314, 0.0336, 0.0265, 0.0247, 0.0339, 0.0289, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:37:42,230 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 00:37:48,755 INFO [train.py:903] (3/4) Epoch 23, batch 5600, loss[loss=0.1678, simple_loss=0.2434, pruned_loss=0.0461, over 19742.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2878, pruned_loss=0.06384, over 3813774.74 frames. ], batch size: 46, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:37:52,350 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155819.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:37:59,040 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3245, 3.0824, 2.4104, 2.4125, 2.1798, 2.6514, 0.9664, 2.1789], + device='cuda:3'), covar=tensor([0.0676, 0.0592, 0.0731, 0.1134, 0.1142, 0.1109, 0.1576, 0.1154], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0353, 0.0360, 0.0384, 0.0462, 0.0389, 0.0337, 0.0342], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 00:38:01,977 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 00:38:02,554 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:38:23,384 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 5.365e+02 7.033e+02 8.601e+02 1.530e+03, threshold=1.407e+03, percent-clipped=6.0 +2023-04-03 00:38:48,667 INFO [train.py:903] (3/4) Epoch 23, batch 5650, loss[loss=0.2401, simple_loss=0.3153, pruned_loss=0.08248, over 19469.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06401, over 3813115.39 frames. ], batch size: 64, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:38:50,814 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 00:39:33,331 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 00:39:47,802 INFO [train.py:903] (3/4) Epoch 23, batch 5700, loss[loss=0.2429, simple_loss=0.3186, pruned_loss=0.08362, over 18040.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2875, pruned_loss=0.0644, over 3810235.35 frames. ], batch size: 83, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:10,805 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:17,505 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3858, 1.3079, 1.7779, 1.4288, 2.7119, 3.7455, 3.4616, 3.9625], + device='cuda:3'), covar=tensor([0.1505, 0.3901, 0.3384, 0.2397, 0.0595, 0.0176, 0.0206, 0.0243], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0324, 0.0353, 0.0264, 0.0246, 0.0190, 0.0217, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 00:40:24,773 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 4.906e+02 6.184e+02 7.924e+02 2.131e+03, threshold=1.237e+03, percent-clipped=2.0 +2023-04-03 00:40:47,848 INFO [train.py:903] (3/4) Epoch 23, batch 5750, loss[loss=0.2027, simple_loss=0.2848, pruned_loss=0.06024, over 19569.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2871, pruned_loss=0.06387, over 3820076.14 frames. ], batch size: 61, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:40:50,070 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=155967.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:40:51,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 00:40:58,276 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.80 vs. limit=5.0 +2023-04-03 00:40:58,764 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 00:41:04,145 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 00:41:50,977 INFO [train.py:903] (3/4) Epoch 23, batch 5800, loss[loss=0.1975, simple_loss=0.2818, pruned_loss=0.05657, over 18697.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2868, pruned_loss=0.06393, over 3815883.52 frames. ], batch size: 74, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:41:54,506 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156019.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:42:25,081 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 4.982e+02 6.301e+02 7.857e+02 1.493e+03, threshold=1.260e+03, percent-clipped=3.0 +2023-04-03 00:42:39,979 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 00:42:50,189 INFO [train.py:903] (3/4) Epoch 23, batch 5850, loss[loss=0.257, simple_loss=0.3209, pruned_loss=0.09655, over 13921.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2869, pruned_loss=0.06398, over 3809437.69 frames. ], batch size: 136, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:42:56,254 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 00:43:08,258 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:15,708 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156088.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:43:48,548 INFO [train.py:903] (3/4) Epoch 23, batch 5900, loss[loss=0.1678, simple_loss=0.2457, pruned_loss=0.04491, over 19763.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2867, pruned_loss=0.06371, over 3820490.42 frames. ], batch size: 46, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:43:52,974 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 00:44:08,213 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 00:44:10,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:14,067 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 00:44:24,575 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.271e+02 5.043e+02 6.365e+02 8.179e+02 2.050e+03, threshold=1.273e+03, percent-clipped=8.0 +2023-04-03 00:44:25,631 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:44:48,124 INFO [train.py:903] (3/4) Epoch 23, batch 5950, loss[loss=0.1743, simple_loss=0.2508, pruned_loss=0.04891, over 19797.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2869, pruned_loss=0.06406, over 3824032.73 frames. ], batch size: 47, lr: 3.55e-03, grad_scale: 8.0 +2023-04-03 00:45:19,209 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:47,986 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:45:48,679 INFO [train.py:903] (3/4) Epoch 23, batch 6000, loss[loss=0.1889, simple_loss=0.2689, pruned_loss=0.05441, over 19752.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2874, pruned_loss=0.06437, over 3804428.36 frames. ], batch size: 48, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:45:48,679 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 00:46:01,141 INFO [train.py:937] (3/4) Epoch 23, validation: loss=0.1686, simple_loss=0.2684, pruned_loss=0.03439, over 944034.00 frames. +2023-04-03 00:46:01,142 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 00:46:23,292 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:46:37,238 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 4.873e+02 6.527e+02 8.069e+02 1.468e+03, threshold=1.305e+03, percent-clipped=4.0 +2023-04-03 00:46:55,749 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156261.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:47:01,920 INFO [train.py:903] (3/4) Epoch 23, batch 6050, loss[loss=0.2087, simple_loss=0.2914, pruned_loss=0.06304, over 19759.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2879, pruned_loss=0.06445, over 3808227.21 frames. ], batch size: 63, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:47:13,011 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-03 00:47:53,117 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:02,112 INFO [train.py:903] (3/4) Epoch 23, batch 6100, loss[loss=0.2267, simple_loss=0.3034, pruned_loss=0.07496, over 19549.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2882, pruned_loss=0.0643, over 3822949.64 frames. ], batch size: 54, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:48:27,997 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156338.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:48:37,367 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.768e+02 6.249e+02 8.138e+02 1.749e+03, threshold=1.250e+03, percent-clipped=2.0 +2023-04-03 00:48:58,814 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:01,830 INFO [train.py:903] (3/4) Epoch 23, batch 6150, loss[loss=0.2892, simple_loss=0.3523, pruned_loss=0.113, over 19764.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.287, pruned_loss=0.06397, over 3828891.82 frames. ], batch size: 63, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:49:31,069 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:49:31,828 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 00:50:00,534 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156415.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:01,340 INFO [train.py:903] (3/4) Epoch 23, batch 6200, loss[loss=0.2017, simple_loss=0.285, pruned_loss=0.05923, over 17702.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2878, pruned_loss=0.06396, over 3817719.21 frames. ], batch size: 101, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:50:22,393 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156432.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:50:38,865 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.999e+02 4.815e+02 5.704e+02 6.895e+02 2.552e+03, threshold=1.141e+03, percent-clipped=3.0 +2023-04-03 00:51:02,806 INFO [train.py:903] (3/4) Epoch 23, batch 6250, loss[loss=0.2171, simple_loss=0.2939, pruned_loss=0.07015, over 19686.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2883, pruned_loss=0.06417, over 3807191.58 frames. ], batch size: 53, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:51:23,600 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-03 00:51:29,562 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0976, 4.4765, 4.8151, 4.8180, 1.8921, 4.5254, 3.9213, 4.5127], + device='cuda:3'), covar=tensor([0.1648, 0.0815, 0.0628, 0.0671, 0.5924, 0.0897, 0.0657, 0.1192], + device='cuda:3'), in_proj_covar=tensor([0.0780, 0.0746, 0.0948, 0.0826, 0.0833, 0.0715, 0.0569, 0.0880], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 00:51:32,680 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 00:52:02,820 INFO [train.py:903] (3/4) Epoch 23, batch 6300, loss[loss=0.2988, simple_loss=0.3608, pruned_loss=0.1184, over 17506.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2883, pruned_loss=0.06395, over 3799940.27 frames. ], batch size: 101, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:52:04,466 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:34,305 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156542.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:52:39,265 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 5.353e+02 6.743e+02 8.019e+02 1.408e+03, threshold=1.349e+03, percent-clipped=4.0 +2023-04-03 00:52:40,698 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:03,411 INFO [train.py:903] (3/4) Epoch 23, batch 6350, loss[loss=0.2219, simple_loss=0.3066, pruned_loss=0.06859, over 17454.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2877, pruned_loss=0.06353, over 3813838.64 frames. ], batch size: 101, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:53:17,256 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:53:22,118 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-03 00:54:02,664 INFO [train.py:903] (3/4) Epoch 23, batch 6400, loss[loss=0.1741, simple_loss=0.2513, pruned_loss=0.04846, over 19733.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2885, pruned_loss=0.06397, over 3815814.41 frames. ], batch size: 46, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:54:39,387 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 4.834e+02 5.927e+02 7.987e+02 2.615e+03, threshold=1.185e+03, percent-clipped=4.0 +2023-04-03 00:54:46,364 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=156652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:04,062 INFO [train.py:903] (3/4) Epoch 23, batch 6450, loss[loss=0.2511, simple_loss=0.3222, pruned_loss=0.09004, over 19666.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2893, pruned_loss=0.06469, over 3806862.08 frames. ], batch size: 60, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:55:35,962 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:55:47,917 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 00:56:04,449 INFO [train.py:903] (3/4) Epoch 23, batch 6500, loss[loss=0.2235, simple_loss=0.2823, pruned_loss=0.08234, over 19753.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2888, pruned_loss=0.06441, over 3822189.96 frames. ], batch size: 45, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:56:10,078 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 00:56:39,934 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.419e+02 5.077e+02 6.090e+02 8.057e+02 1.603e+03, threshold=1.218e+03, percent-clipped=6.0 +2023-04-03 00:57:04,749 INFO [train.py:903] (3/4) Epoch 23, batch 6550, loss[loss=0.305, simple_loss=0.3524, pruned_loss=0.1288, over 13282.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.289, pruned_loss=0.06461, over 3799765.68 frames. ], batch size: 137, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:57:06,356 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:06,424 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:57:50,677 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:04,880 INFO [train.py:903] (3/4) Epoch 23, batch 6600, loss[loss=0.1926, simple_loss=0.2797, pruned_loss=0.05276, over 19327.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2882, pruned_loss=0.06393, over 3801995.13 frames. ], batch size: 66, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:58:16,146 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.2113, 5.6749, 3.2051, 4.9183, 0.9674, 5.8414, 5.5908, 5.8755], + device='cuda:3'), covar=tensor([0.0381, 0.0915, 0.1656, 0.0741, 0.4084, 0.0440, 0.0779, 0.0832], + device='cuda:3'), in_proj_covar=tensor([0.0518, 0.0414, 0.0499, 0.0351, 0.0403, 0.0437, 0.0435, 0.0469], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 00:58:20,348 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 00:58:41,922 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.453e+02 5.163e+02 6.336e+02 8.010e+02 1.885e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 00:59:05,158 INFO [train.py:903] (3/4) Epoch 23, batch 6650, loss[loss=0.2017, simple_loss=0.2868, pruned_loss=0.0583, over 19654.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06286, over 3813761.44 frames. ], batch size: 58, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 00:59:46,891 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2874, 1.5237, 1.8722, 1.4442, 2.7925, 3.4682, 3.2919, 3.5871], + device='cuda:3'), covar=tensor([0.1713, 0.3579, 0.3284, 0.2523, 0.0697, 0.0229, 0.0219, 0.0341], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0265, 0.0245, 0.0190, 0.0216, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 00:59:51,184 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156903.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:00:07,306 INFO [train.py:903] (3/4) Epoch 23, batch 6700, loss[loss=0.2461, simple_loss=0.3205, pruned_loss=0.08587, over 19429.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2869, pruned_loss=0.0633, over 3804461.10 frames. ], batch size: 70, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:00:17,072 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-03 01:00:41,740 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.603e+02 5.252e+02 6.535e+02 7.903e+02 1.565e+03, threshold=1.307e+03, percent-clipped=2.0 +2023-04-03 01:00:44,238 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9741, 1.2445, 1.6315, 0.6121, 2.0669, 2.4440, 2.1534, 2.6041], + device='cuda:3'), covar=tensor([0.1537, 0.3816, 0.3305, 0.2831, 0.0614, 0.0292, 0.0348, 0.0370], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0324, 0.0353, 0.0264, 0.0245, 0.0190, 0.0216, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 01:00:45,432 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:01:04,464 INFO [train.py:903] (3/4) Epoch 23, batch 6750, loss[loss=0.2671, simple_loss=0.3477, pruned_loss=0.09324, over 19665.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2862, pruned_loss=0.06355, over 3792241.18 frames. ], batch size: 60, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:01:13,650 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:01:29,359 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0561, 2.1347, 2.4489, 2.6713, 2.0091, 2.6077, 2.3805, 2.1929], + device='cuda:3'), covar=tensor([0.4430, 0.4130, 0.1950, 0.2672, 0.4446, 0.2254, 0.5122, 0.3390], + device='cuda:3'), in_proj_covar=tensor([0.0909, 0.0978, 0.0723, 0.0935, 0.0890, 0.0824, 0.0847, 0.0787], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 01:02:00,909 INFO [train.py:903] (3/4) Epoch 23, batch 6800, loss[loss=0.1737, simple_loss=0.2526, pruned_loss=0.04744, over 19755.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.286, pruned_loss=0.06364, over 3775907.75 frames. ], batch size: 47, lr: 3.54e-03, grad_scale: 8.0 +2023-04-03 01:02:09,471 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:13,976 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3588, 2.0321, 1.6171, 1.3832, 1.8555, 1.3883, 1.2672, 1.8501], + device='cuda:3'), covar=tensor([0.0986, 0.0904, 0.1118, 0.0896, 0.0622, 0.1241, 0.0724, 0.0478], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0314, 0.0337, 0.0267, 0.0247, 0.0338, 0.0289, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:02:14,983 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:02:22,951 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7741, 1.9607, 2.2664, 2.3000, 3.0251, 3.6239, 3.4764, 3.9958], + device='cuda:3'), covar=tensor([0.1550, 0.3871, 0.3513, 0.2122, 0.1283, 0.0454, 0.0265, 0.0291], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0325, 0.0354, 0.0265, 0.0246, 0.0190, 0.0217, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 01:02:44,734 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 01:02:45,839 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 01:02:48,304 INFO [train.py:903] (3/4) Epoch 24, batch 0, loss[loss=0.1896, simple_loss=0.2604, pruned_loss=0.05942, over 19307.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2604, pruned_loss=0.05942, over 19307.00 frames. ], batch size: 44, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:02:48,304 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 01:02:59,924 INFO [train.py:937] (3/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2685, pruned_loss=0.03408, over 944034.00 frames. +2023-04-03 01:02:59,925 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 01:03:03,177 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 5.212e+02 6.445e+02 8.399e+02 3.393e+03, threshold=1.289e+03, percent-clipped=7.0 +2023-04-03 01:03:05,651 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:03:12,283 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 01:04:00,820 INFO [train.py:903] (3/4) Epoch 24, batch 50, loss[loss=0.1723, simple_loss=0.2614, pruned_loss=0.04163, over 19775.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2896, pruned_loss=0.06414, over 858159.56 frames. ], batch size: 54, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:04:01,277 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1782, 2.1119, 1.9300, 1.7561, 1.7168, 1.7522, 0.5046, 1.0692], + device='cuda:3'), covar=tensor([0.0572, 0.0617, 0.0467, 0.0747, 0.1164, 0.0865, 0.1379, 0.1143], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0357, 0.0362, 0.0386, 0.0466, 0.0391, 0.0338, 0.0342], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:04:20,669 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:04:33,371 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 01:04:47,077 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4750, 1.4833, 2.1610, 1.7870, 3.2417, 4.8053, 4.6678, 5.1728], + device='cuda:3'), covar=tensor([0.1558, 0.3824, 0.3202, 0.2227, 0.0587, 0.0170, 0.0159, 0.0173], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0326, 0.0355, 0.0266, 0.0246, 0.0190, 0.0217, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 01:05:01,215 INFO [train.py:903] (3/4) Epoch 24, batch 100, loss[loss=0.1583, simple_loss=0.2471, pruned_loss=0.03477, over 19679.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.0621, over 1532504.96 frames. ], batch size: 53, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:05:03,493 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 5.500e+02 6.534e+02 8.918e+02 1.825e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 01:05:11,342 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 01:05:19,853 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157160.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:05:26,359 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7541, 1.5426, 1.6989, 1.4793, 4.3215, 1.2748, 2.4603, 4.5444], + device='cuda:3'), covar=tensor([0.0483, 0.2808, 0.2904, 0.2098, 0.0736, 0.2558, 0.1623, 0.0218], + device='cuda:3'), in_proj_covar=tensor([0.0414, 0.0368, 0.0390, 0.0351, 0.0374, 0.0350, 0.0383, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:06:02,079 INFO [train.py:903] (3/4) Epoch 24, batch 150, loss[loss=0.1934, simple_loss=0.2744, pruned_loss=0.0562, over 19676.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06051, over 2052476.99 frames. ], batch size: 53, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:06:42,476 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:07:01,360 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 01:07:02,476 INFO [train.py:903] (3/4) Epoch 24, batch 200, loss[loss=0.2095, simple_loss=0.3005, pruned_loss=0.05922, over 18838.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06163, over 2458751.09 frames. ], batch size: 74, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:07:04,623 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.193e+02 4.992e+02 5.973e+02 7.088e+02 2.080e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 01:07:05,926 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:07:12,107 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6289, 1.3456, 1.2492, 1.5354, 1.1622, 1.3788, 1.2186, 1.4574], + device='cuda:3'), covar=tensor([0.1038, 0.1113, 0.1576, 0.0989, 0.1267, 0.0606, 0.1515, 0.0825], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0356, 0.0314, 0.0252, 0.0304, 0.0253, 0.0312, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:08:03,064 INFO [train.py:903] (3/4) Epoch 24, batch 250, loss[loss=0.1968, simple_loss=0.2874, pruned_loss=0.05307, over 19722.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06281, over 2761979.74 frames. ], batch size: 59, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:08:45,261 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6623, 1.5441, 1.5781, 2.1332, 1.7925, 1.8648, 1.8510, 1.7077], + device='cuda:3'), covar=tensor([0.0808, 0.0900, 0.0989, 0.0695, 0.0777, 0.0744, 0.0842, 0.0681], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0223, 0.0227, 0.0241, 0.0228, 0.0215, 0.0190, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 01:09:03,293 INFO [train.py:903] (3/4) Epoch 24, batch 300, loss[loss=0.2069, simple_loss=0.292, pruned_loss=0.06087, over 19615.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06347, over 3001104.85 frames. ], batch size: 57, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:09:06,236 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.405e+02 6.557e+02 9.024e+02 1.464e+03, threshold=1.311e+03, percent-clipped=9.0 +2023-04-03 01:09:25,613 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157362.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:34,511 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4595, 1.0718, 1.3481, 1.0866, 2.1082, 0.9527, 2.0841, 2.3613], + device='cuda:3'), covar=tensor([0.1109, 0.3341, 0.3143, 0.2119, 0.1303, 0.2439, 0.1232, 0.0622], + device='cuda:3'), in_proj_covar=tensor([0.0414, 0.0369, 0.0390, 0.0352, 0.0374, 0.0350, 0.0383, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:09:37,286 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:39,546 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:09:41,169 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-03 01:09:56,099 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3958, 2.1358, 1.6700, 1.3948, 1.9985, 1.3524, 1.3943, 1.8858], + device='cuda:3'), covar=tensor([0.0945, 0.0747, 0.0973, 0.0872, 0.0531, 0.1202, 0.0643, 0.0465], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0314, 0.0336, 0.0266, 0.0246, 0.0338, 0.0288, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:10:05,068 INFO [train.py:903] (3/4) Epoch 24, batch 350, loss[loss=0.1815, simple_loss=0.2754, pruned_loss=0.04378, over 19546.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2862, pruned_loss=0.06292, over 3188307.65 frames. ], batch size: 54, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:10:10,686 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 01:10:12,265 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:10:39,435 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157422.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:11:05,175 INFO [train.py:903] (3/4) Epoch 24, batch 400, loss[loss=0.2167, simple_loss=0.297, pruned_loss=0.06817, over 19323.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2865, pruned_loss=0.06288, over 3341503.92 frames. ], batch size: 66, lr: 3.46e-03, grad_scale: 8.0 +2023-04-03 01:11:07,648 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.933e+02 4.825e+02 6.674e+02 8.153e+02 1.427e+03, threshold=1.335e+03, percent-clipped=2.0 +2023-04-03 01:11:48,277 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1789, 2.0806, 2.0035, 1.8463, 1.6772, 1.7551, 0.6203, 1.0766], + device='cuda:3'), covar=tensor([0.0595, 0.0604, 0.0404, 0.0699, 0.1126, 0.0859, 0.1315, 0.1082], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0354, 0.0361, 0.0384, 0.0463, 0.0392, 0.0337, 0.0340], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:11:52,611 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157482.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:11:56,178 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5695, 1.7419, 1.7691, 2.2613, 1.7384, 2.1647, 1.8690, 1.5102], + device='cuda:3'), covar=tensor([0.5467, 0.4433, 0.3061, 0.2907, 0.4328, 0.2534, 0.6909, 0.5602], + device='cuda:3'), in_proj_covar=tensor([0.0908, 0.0977, 0.0723, 0.0934, 0.0888, 0.0825, 0.0847, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 01:11:58,254 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:01,775 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1328, 1.8387, 1.5362, 1.2874, 1.6757, 1.3161, 1.1625, 1.5878], + device='cuda:3'), covar=tensor([0.0854, 0.0812, 0.1037, 0.0844, 0.0536, 0.1282, 0.0629, 0.0503], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0315, 0.0338, 0.0267, 0.0247, 0.0339, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:12:05,707 INFO [train.py:903] (3/4) Epoch 24, batch 450, loss[loss=0.2129, simple_loss=0.286, pruned_loss=0.06985, over 19736.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06256, over 3455265.60 frames. ], batch size: 51, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:12:20,127 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:24,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:12:31,978 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7193, 1.7500, 1.6297, 1.4001, 1.4195, 1.4531, 0.2526, 0.7075], + device='cuda:3'), covar=tensor([0.0740, 0.0681, 0.0448, 0.0693, 0.1342, 0.0798, 0.1318, 0.1172], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0354, 0.0362, 0.0384, 0.0464, 0.0392, 0.0338, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:12:40,680 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 01:12:40,717 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 01:12:43,480 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1424, 1.8191, 1.4887, 1.2804, 1.6093, 1.2801, 1.2075, 1.6362], + device='cuda:3'), covar=tensor([0.0870, 0.0844, 0.1186, 0.0904, 0.0619, 0.1391, 0.0652, 0.0529], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0315, 0.0339, 0.0268, 0.0248, 0.0340, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:12:44,468 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157524.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:13:08,946 INFO [train.py:903] (3/4) Epoch 24, batch 500, loss[loss=0.1996, simple_loss=0.2828, pruned_loss=0.05819, over 19667.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2857, pruned_loss=0.06268, over 3539829.90 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:13:12,128 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.617e+02 5.203e+02 6.096e+02 8.720e+02 1.456e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 01:14:02,846 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:12,214 INFO [train.py:903] (3/4) Epoch 24, batch 550, loss[loss=0.2466, simple_loss=0.3145, pruned_loss=0.08934, over 18003.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2864, pruned_loss=0.06288, over 3603659.67 frames. ], batch size: 83, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:14:32,907 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4574, 1.5482, 1.7470, 1.6973, 2.6390, 2.2656, 2.7268, 1.0277], + device='cuda:3'), covar=tensor([0.2562, 0.4290, 0.2818, 0.1983, 0.1487, 0.2150, 0.1451, 0.4764], + device='cuda:3'), in_proj_covar=tensor([0.0538, 0.0647, 0.0721, 0.0491, 0.0620, 0.0534, 0.0661, 0.0552], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:14:41,087 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157618.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:14:42,244 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,033 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:15:13,772 INFO [train.py:903] (3/4) Epoch 24, batch 600, loss[loss=0.2525, simple_loss=0.3309, pruned_loss=0.08708, over 18313.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2863, pruned_loss=0.06273, over 3665289.88 frames. ], batch size: 83, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:15:15,912 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.272e+02 4.646e+02 5.574e+02 6.767e+02 1.170e+03, threshold=1.115e+03, percent-clipped=0.0 +2023-04-03 01:15:28,544 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4105, 3.9972, 2.6672, 3.5401, 0.9228, 3.9714, 3.8642, 3.9271], + device='cuda:3'), covar=tensor([0.0661, 0.1094, 0.1965, 0.0935, 0.3953, 0.0780, 0.0885, 0.1234], + device='cuda:3'), in_proj_covar=tensor([0.0515, 0.0415, 0.0498, 0.0349, 0.0402, 0.0439, 0.0432, 0.0468], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:15:29,132 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.83 vs. limit=5.0 +2023-04-03 01:15:39,900 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0069, 4.4105, 4.6949, 4.7232, 1.7663, 4.4423, 3.8793, 4.4168], + device='cuda:3'), covar=tensor([0.1639, 0.0788, 0.0586, 0.0666, 0.6092, 0.0787, 0.0655, 0.1130], + device='cuda:3'), in_proj_covar=tensor([0.0792, 0.0757, 0.0965, 0.0844, 0.0850, 0.0731, 0.0580, 0.0889], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 01:15:53,094 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 01:16:14,773 INFO [train.py:903] (3/4) Epoch 24, batch 650, loss[loss=0.2065, simple_loss=0.2939, pruned_loss=0.05952, over 19655.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06274, over 3707075.12 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:16:45,986 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157718.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:14,721 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157743.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:15,392 INFO [train.py:903] (3/4) Epoch 24, batch 700, loss[loss=0.1912, simple_loss=0.2671, pruned_loss=0.05763, over 19493.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2859, pruned_loss=0.06277, over 3721498.15 frames. ], batch size: 49, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:17:15,554 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:17:20,750 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.032e+02 5.232e+02 6.869e+02 8.195e+02 1.483e+03, threshold=1.374e+03, percent-clipped=7.0 +2023-04-03 01:17:44,071 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157766.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:17:46,339 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:18:19,086 INFO [train.py:903] (3/4) Epoch 24, batch 750, loss[loss=0.2476, simple_loss=0.3276, pruned_loss=0.0838, over 19320.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2877, pruned_loss=0.06359, over 3746649.65 frames. ], batch size: 70, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:18:31,207 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.28 vs. limit=5.0 +2023-04-03 01:19:06,846 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:19,800 INFO [train.py:903] (3/4) Epoch 24, batch 800, loss[loss=0.2005, simple_loss=0.2714, pruned_loss=0.06484, over 19396.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2872, pruned_loss=0.06312, over 3774412.23 frames. ], batch size: 48, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:19:23,271 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 5.074e+02 6.358e+02 8.526e+02 1.766e+03, threshold=1.272e+03, percent-clipped=4.0 +2023-04-03 01:19:30,234 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 01:19:37,379 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157859.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:37,397 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6787, 1.5164, 1.5556, 2.2687, 1.7100, 1.8840, 1.8914, 1.7418], + device='cuda:3'), covar=tensor([0.0867, 0.0948, 0.1051, 0.0725, 0.0880, 0.0851, 0.0923, 0.0716], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0224, 0.0229, 0.0242, 0.0229, 0.0215, 0.0191, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 01:19:48,547 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157868.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:19:52,870 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:19:59,192 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157875.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:20:05,682 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157881.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:20:20,146 INFO [train.py:903] (3/4) Epoch 24, batch 850, loss[loss=0.2223, simple_loss=0.2967, pruned_loss=0.07389, over 19678.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2862, pruned_loss=0.06254, over 3788593.71 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:20:27,479 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157900.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:20:27,510 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3418, 3.0320, 2.4581, 2.4127, 2.1761, 2.6301, 1.0075, 2.2321], + device='cuda:3'), covar=tensor([0.0702, 0.0649, 0.0692, 0.1025, 0.1164, 0.1072, 0.1460, 0.1131], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0357, 0.0363, 0.0388, 0.0467, 0.0394, 0.0339, 0.0343], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:20:37,623 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-03 01:21:05,431 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=157930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:21:08,630 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 01:21:21,347 INFO [train.py:903] (3/4) Epoch 24, batch 900, loss[loss=0.2241, simple_loss=0.3016, pruned_loss=0.07329, over 19615.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2858, pruned_loss=0.06261, over 3785672.02 frames. ], batch size: 57, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:21:25,793 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.800e+02 5.808e+02 7.910e+02 1.683e+03, threshold=1.162e+03, percent-clipped=5.0 +2023-04-03 01:22:10,200 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157983.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:22:22,968 INFO [train.py:903] (3/4) Epoch 24, batch 950, loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06793, over 19780.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2858, pruned_loss=0.06264, over 3785665.50 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:22:23,033 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 01:23:26,864 INFO [train.py:903] (3/4) Epoch 24, batch 1000, loss[loss=0.1748, simple_loss=0.2535, pruned_loss=0.04806, over 19360.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2851, pruned_loss=0.06221, over 3799757.23 frames. ], batch size: 47, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:23:28,219 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:23:31,294 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.770e+02 5.075e+02 5.979e+02 8.043e+02 1.884e+03, threshold=1.196e+03, percent-clipped=5.0 +2023-04-03 01:24:17,498 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 01:24:22,286 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:22,533 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 01:24:27,542 INFO [train.py:903] (3/4) Epoch 24, batch 1050, loss[loss=0.1843, simple_loss=0.2708, pruned_loss=0.04896, over 19513.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2852, pruned_loss=0.06245, over 3806325.12 frames. ], batch size: 64, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:24:51,171 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158114.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:52,308 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158115.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:24:56,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 01:25:07,407 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9229, 1.6818, 1.8557, 1.6790, 4.4357, 1.2577, 2.7224, 4.8454], + device='cuda:3'), covar=tensor([0.0453, 0.2771, 0.2803, 0.2044, 0.0747, 0.2648, 0.1300, 0.0176], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0373, 0.0394, 0.0354, 0.0377, 0.0354, 0.0389, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:25:20,113 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158137.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:25:23,288 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:23,375 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158140.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:25:23,660 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-03 01:25:27,551 INFO [train.py:903] (3/4) Epoch 24, batch 1100, loss[loss=0.2196, simple_loss=0.3027, pruned_loss=0.06823, over 19648.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2861, pruned_loss=0.06329, over 3805180.89 frames. ], batch size: 55, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:25:31,914 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.699e+02 5.122e+02 6.777e+02 7.992e+02 2.032e+03, threshold=1.355e+03, percent-clipped=5.0 +2023-04-03 01:25:51,780 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158162.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:26:28,900 INFO [train.py:903] (3/4) Epoch 24, batch 1150, loss[loss=0.2164, simple_loss=0.3104, pruned_loss=0.06115, over 19382.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.0631, over 3813461.40 frames. ], batch size: 70, lr: 3.45e-03, grad_scale: 4.0 +2023-04-03 01:26:56,913 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:26:58,667 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-03 01:27:25,377 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158239.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:27:31,331 INFO [train.py:903] (3/4) Epoch 24, batch 1200, loss[loss=0.1755, simple_loss=0.2565, pruned_loss=0.04718, over 19393.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2854, pruned_loss=0.06295, over 3819917.14 frames. ], batch size: 48, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:27:37,766 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.926e+02 5.852e+02 7.782e+02 1.430e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 01:27:56,353 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158264.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:28:02,816 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 01:28:34,709 INFO [train.py:903] (3/4) Epoch 24, batch 1250, loss[loss=0.196, simple_loss=0.2746, pruned_loss=0.05868, over 19487.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2865, pruned_loss=0.06332, over 3812177.66 frames. ], batch size: 49, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:28:43,367 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:15,088 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:20,361 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:29:35,685 INFO [train.py:903] (3/4) Epoch 24, batch 1300, loss[loss=0.1988, simple_loss=0.2843, pruned_loss=0.05669, over 19529.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2872, pruned_loss=0.06357, over 3810694.08 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 8.0 +2023-04-03 01:29:40,390 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.859e+02 5.283e+02 7.241e+02 8.945e+02 2.355e+03, threshold=1.448e+03, percent-clipped=9.0 +2023-04-03 01:30:24,672 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5157, 1.5960, 1.7660, 1.9852, 1.5531, 1.9524, 1.8152, 1.7443], + device='cuda:3'), covar=tensor([0.3299, 0.2945, 0.1567, 0.1746, 0.2977, 0.1620, 0.3538, 0.2498], + device='cuda:3'), in_proj_covar=tensor([0.0909, 0.0980, 0.0724, 0.0938, 0.0889, 0.0825, 0.0843, 0.0788], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 01:30:36,952 INFO [train.py:903] (3/4) Epoch 24, batch 1350, loss[loss=0.1842, simple_loss=0.2739, pruned_loss=0.04727, over 19526.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2872, pruned_loss=0.06366, over 3799608.62 frames. ], batch size: 64, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:30:48,163 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:31:38,843 INFO [train.py:903] (3/4) Epoch 24, batch 1400, loss[loss=0.1818, simple_loss=0.2739, pruned_loss=0.04484, over 19609.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2876, pruned_loss=0.06379, over 3808269.97 frames. ], batch size: 57, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:31:43,422 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.106e+02 6.656e+02 8.188e+02 2.197e+03, threshold=1.331e+03, percent-clipped=4.0 +2023-04-03 01:32:27,057 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:32:40,276 INFO [train.py:903] (3/4) Epoch 24, batch 1450, loss[loss=0.2068, simple_loss=0.2983, pruned_loss=0.05766, over 19701.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2882, pruned_loss=0.06389, over 3818122.81 frames. ], batch size: 59, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:32:40,706 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6893, 1.2535, 1.3228, 1.5440, 1.0964, 1.4517, 1.2777, 1.5098], + device='cuda:3'), covar=tensor([0.1113, 0.1192, 0.1637, 0.1016, 0.1393, 0.0635, 0.1592, 0.0852], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0357, 0.0315, 0.0254, 0.0306, 0.0254, 0.0314, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:32:42,618 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 01:32:52,791 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4481, 1.4917, 1.7291, 1.6817, 2.4287, 2.1430, 2.6513, 1.0526], + device='cuda:3'), covar=tensor([0.2499, 0.4292, 0.2622, 0.1896, 0.1532, 0.2146, 0.1326, 0.4593], + device='cuda:3'), in_proj_covar=tensor([0.0543, 0.0652, 0.0725, 0.0495, 0.0623, 0.0538, 0.0665, 0.0558], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:33:41,525 INFO [train.py:903] (3/4) Epoch 24, batch 1500, loss[loss=0.1881, simple_loss=0.277, pruned_loss=0.04961, over 19674.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2872, pruned_loss=0.0633, over 3818423.25 frames. ], batch size: 60, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:33:46,125 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.633e+02 4.870e+02 5.968e+02 7.477e+02 1.869e+03, threshold=1.194e+03, percent-clipped=2.0 +2023-04-03 01:34:33,946 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:34:42,446 INFO [train.py:903] (3/4) Epoch 24, batch 1550, loss[loss=0.2106, simple_loss=0.2806, pruned_loss=0.0703, over 19407.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2873, pruned_loss=0.06342, over 3819203.56 frames. ], batch size: 48, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:34:48,575 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:04,356 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:35:36,448 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-03 01:35:46,044 INFO [train.py:903] (3/4) Epoch 24, batch 1600, loss[loss=0.2094, simple_loss=0.2902, pruned_loss=0.06435, over 19660.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2883, pruned_loss=0.06358, over 3815152.71 frames. ], batch size: 55, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:35:51,809 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.433e+02 4.721e+02 6.000e+02 7.264e+02 1.836e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 01:36:12,729 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 01:36:50,304 INFO [train.py:903] (3/4) Epoch 24, batch 1650, loss[loss=0.2247, simple_loss=0.3042, pruned_loss=0.0726, over 19342.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.287, pruned_loss=0.06334, over 3809269.13 frames. ], batch size: 66, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:02,692 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1182, 1.3233, 1.5817, 1.3647, 2.7372, 1.1138, 2.2464, 3.0894], + device='cuda:3'), covar=tensor([0.0576, 0.2691, 0.2714, 0.1864, 0.0724, 0.2382, 0.1220, 0.0322], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0374, 0.0394, 0.0354, 0.0378, 0.0356, 0.0389, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:37:52,883 INFO [train.py:903] (3/4) Epoch 24, batch 1700, loss[loss=0.2846, simple_loss=0.3524, pruned_loss=0.1085, over 18117.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2867, pruned_loss=0.06296, over 3807900.51 frames. ], batch size: 83, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:37:55,358 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=158746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:37:57,371 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.745e+02 5.793e+02 7.168e+02 1.456e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 01:38:35,593 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 01:38:54,543 INFO [train.py:903] (3/4) Epoch 24, batch 1750, loss[loss=0.2229, simple_loss=0.303, pruned_loss=0.07138, over 18242.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2873, pruned_loss=0.06325, over 3808142.33 frames. ], batch size: 83, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:38:56,127 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:03,155 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:39:57,550 INFO [train.py:903] (3/4) Epoch 24, batch 1800, loss[loss=0.1918, simple_loss=0.2637, pruned_loss=0.05991, over 19463.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2873, pruned_loss=0.06305, over 3813954.07 frames. ], batch size: 49, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:40:02,407 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.690e+02 5.979e+02 7.282e+02 2.087e+03, threshold=1.196e+03, percent-clipped=3.0 +2023-04-03 01:40:08,504 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158853.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:40:11,857 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158855.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:19,307 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:40,526 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:40:56,889 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 01:40:58,003 INFO [train.py:903] (3/4) Epoch 24, batch 1850, loss[loss=0.2102, simple_loss=0.2974, pruned_loss=0.06145, over 19705.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2867, pruned_loss=0.06326, over 3825149.79 frames. ], batch size: 59, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:41:33,337 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 01:42:00,400 INFO [train.py:903] (3/4) Epoch 24, batch 1900, loss[loss=0.2016, simple_loss=0.2869, pruned_loss=0.05814, over 19733.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2866, pruned_loss=0.06343, over 3809274.11 frames. ], batch size: 51, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:42:04,923 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.445e+02 4.936e+02 5.873e+02 7.509e+02 2.125e+03, threshold=1.175e+03, percent-clipped=8.0 +2023-04-03 01:42:18,814 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 01:42:24,109 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 01:42:41,454 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6617, 4.2512, 2.5699, 3.7820, 1.1490, 4.2233, 4.0917, 4.1871], + device='cuda:3'), covar=tensor([0.0584, 0.0999, 0.2011, 0.0826, 0.3689, 0.0625, 0.0848, 0.1170], + device='cuda:3'), in_proj_covar=tensor([0.0511, 0.0414, 0.0496, 0.0345, 0.0401, 0.0437, 0.0430, 0.0462], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:42:46,677 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 01:43:00,389 INFO [train.py:903] (3/4) Epoch 24, batch 1950, loss[loss=0.1659, simple_loss=0.2447, pruned_loss=0.04354, over 19742.00 frames. ], tot_loss[loss=0.207, simple_loss=0.287, pruned_loss=0.06351, over 3818238.38 frames. ], batch size: 45, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:03,844 INFO [train.py:903] (3/4) Epoch 24, batch 2000, loss[loss=0.2316, simple_loss=0.3043, pruned_loss=0.07944, over 13459.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2873, pruned_loss=0.06371, over 3806739.24 frames. ], batch size: 135, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:44:08,592 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 5.106e+02 6.605e+02 9.481e+02 1.726e+03, threshold=1.321e+03, percent-clipped=5.0 +2023-04-03 01:44:57,706 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159087.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:44:59,847 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 01:45:03,909 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-03 01:45:06,688 INFO [train.py:903] (3/4) Epoch 24, batch 2050, loss[loss=0.1727, simple_loss=0.2497, pruned_loss=0.04783, over 19754.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2875, pruned_loss=0.0641, over 3794442.69 frames. ], batch size: 48, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:45:19,179 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 01:45:20,292 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 01:45:35,014 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:45:41,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 01:46:02,782 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159139.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:03,045 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1930, 2.0643, 1.9267, 1.8356, 1.6159, 1.7877, 0.7084, 1.2497], + device='cuda:3'), covar=tensor([0.0574, 0.0627, 0.0501, 0.0765, 0.1102, 0.0856, 0.1282, 0.0976], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0358, 0.0362, 0.0387, 0.0463, 0.0392, 0.0339, 0.0344], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 01:46:06,458 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:08,193 INFO [train.py:903] (3/4) Epoch 24, batch 2100, loss[loss=0.2245, simple_loss=0.2918, pruned_loss=0.0786, over 19760.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2872, pruned_loss=0.06403, over 3794807.63 frames. ], batch size: 51, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:46:10,463 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159145.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:13,634 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.527e+02 4.786e+02 5.922e+02 8.131e+02 1.881e+03, threshold=1.184e+03, percent-clipped=4.0 +2023-04-03 01:46:37,332 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 01:46:49,821 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:46:58,694 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 01:47:10,232 INFO [train.py:903] (3/4) Epoch 24, batch 2150, loss[loss=0.1943, simple_loss=0.277, pruned_loss=0.05577, over 19840.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2877, pruned_loss=0.06401, over 3816874.03 frames. ], batch size: 52, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:47:13,885 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159197.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:48:12,824 INFO [train.py:903] (3/4) Epoch 24, batch 2200, loss[loss=0.2471, simple_loss=0.3124, pruned_loss=0.09087, over 19611.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2881, pruned_loss=0.0641, over 3823510.62 frames. ], batch size: 50, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:48:18,010 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 4.881e+02 6.157e+02 7.813e+02 2.191e+03, threshold=1.231e+03, percent-clipped=6.0 +2023-04-03 01:48:25,324 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:33,256 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:48:41,424 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8086, 1.4312, 1.6273, 1.4111, 3.2793, 0.9849, 2.3827, 3.8389], + device='cuda:3'), covar=tensor([0.0594, 0.3063, 0.2983, 0.2327, 0.0892, 0.3067, 0.1546, 0.0256], + device='cuda:3'), in_proj_covar=tensor([0.0418, 0.0374, 0.0395, 0.0354, 0.0379, 0.0357, 0.0391, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:49:14,285 INFO [train.py:903] (3/4) Epoch 24, batch 2250, loss[loss=0.1918, simple_loss=0.2809, pruned_loss=0.05138, over 19658.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2881, pruned_loss=0.06435, over 3828703.47 frames. ], batch size: 55, lr: 3.44e-03, grad_scale: 8.0 +2023-04-03 01:49:31,873 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159308.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:49:37,404 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159312.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 01:49:46,545 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8790, 1.3058, 1.5588, 1.5112, 3.4258, 0.9968, 2.5885, 3.8902], + device='cuda:3'), covar=tensor([0.0489, 0.3057, 0.3007, 0.1997, 0.0732, 0.2814, 0.1237, 0.0213], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0354, 0.0378, 0.0356, 0.0390, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:50:16,678 INFO [train.py:903] (3/4) Epoch 24, batch 2300, loss[loss=0.1874, simple_loss=0.2699, pruned_loss=0.0524, over 19620.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2873, pruned_loss=0.06402, over 3834939.29 frames. ], batch size: 57, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:50:21,055 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.893e+02 4.864e+02 6.208e+02 8.672e+02 1.812e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 01:50:31,271 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 01:50:52,353 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7946, 1.3673, 1.5575, 1.6218, 3.3525, 1.0200, 2.5639, 3.8901], + device='cuda:3'), covar=tensor([0.0471, 0.2714, 0.2828, 0.1769, 0.0688, 0.2641, 0.1168, 0.0202], + device='cuda:3'), in_proj_covar=tensor([0.0415, 0.0370, 0.0391, 0.0351, 0.0375, 0.0354, 0.0387, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:51:19,170 INFO [train.py:903] (3/4) Epoch 24, batch 2350, loss[loss=0.2302, simple_loss=0.3182, pruned_loss=0.0711, over 19680.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2866, pruned_loss=0.06358, over 3815609.88 frames. ], batch size: 60, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:00,171 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 01:52:04,794 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:52:17,048 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 01:52:19,433 INFO [train.py:903] (3/4) Epoch 24, batch 2400, loss[loss=0.1726, simple_loss=0.2646, pruned_loss=0.04029, over 19674.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2868, pruned_loss=0.06356, over 3814945.16 frames. ], batch size: 53, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:52:25,062 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.946e+02 5.943e+02 8.368e+02 2.189e+03, threshold=1.189e+03, percent-clipped=6.0 +2023-04-03 01:53:22,618 INFO [train.py:903] (3/4) Epoch 24, batch 2450, loss[loss=0.2053, simple_loss=0.2988, pruned_loss=0.05588, over 17423.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2866, pruned_loss=0.06328, over 3804258.95 frames. ], batch size: 101, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:53:42,121 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159510.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:49,979 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:53:55,525 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:14,783 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:21,721 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:24,656 INFO [train.py:903] (3/4) Epoch 24, batch 2500, loss[loss=0.2241, simple_loss=0.3034, pruned_loss=0.07244, over 19244.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2868, pruned_loss=0.06341, over 3806866.26 frames. ], batch size: 66, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:54:27,421 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159546.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:54:29,312 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.941e+02 6.110e+02 7.649e+02 1.406e+03, threshold=1.222e+03, percent-clipped=1.0 +2023-04-03 01:54:55,485 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159568.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:55:25,732 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159593.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 01:55:26,480 INFO [train.py:903] (3/4) Epoch 24, batch 2550, loss[loss=0.2231, simple_loss=0.3034, pruned_loss=0.07139, over 18794.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2871, pruned_loss=0.06356, over 3805798.23 frames. ], batch size: 74, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:19,878 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:20,606 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 01:56:29,399 INFO [train.py:903] (3/4) Epoch 24, batch 2600, loss[loss=0.1924, simple_loss=0.2675, pruned_loss=0.05866, over 19758.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2869, pruned_loss=0.06382, over 3793213.92 frames. ], batch size: 45, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:56:34,434 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 01:56:34,953 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.779e+02 5.928e+02 8.262e+02 1.528e+03, threshold=1.186e+03, percent-clipped=6.0 +2023-04-03 01:56:36,554 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159649.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:56:39,987 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:57:31,593 INFO [train.py:903] (3/4) Epoch 24, batch 2650, loss[loss=0.1874, simple_loss=0.2738, pruned_loss=0.05043, over 19292.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2873, pruned_loss=0.06428, over 3765018.55 frames. ], batch size: 66, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:57:43,896 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 01:58:18,682 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2253, 1.2838, 1.8651, 1.2722, 2.7214, 3.7057, 3.3846, 3.8827], + device='cuda:3'), covar=tensor([0.1629, 0.3916, 0.3225, 0.2482, 0.0583, 0.0189, 0.0228, 0.0262], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0325, 0.0356, 0.0264, 0.0245, 0.0190, 0.0216, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 01:58:20,963 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8659, 1.3468, 1.0640, 0.9267, 1.1646, 0.9684, 0.9160, 1.2577], + device='cuda:3'), covar=tensor([0.0645, 0.0842, 0.1188, 0.0907, 0.0671, 0.1418, 0.0630, 0.0518], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0315, 0.0339, 0.0268, 0.0248, 0.0339, 0.0292, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:58:34,703 INFO [train.py:903] (3/4) Epoch 24, batch 2700, loss[loss=0.2283, simple_loss=0.3096, pruned_loss=0.07348, over 19476.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2879, pruned_loss=0.0643, over 3791239.16 frames. ], batch size: 64, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:58:39,053 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 5.237e+02 6.508e+02 8.466e+02 2.382e+03, threshold=1.302e+03, percent-clipped=8.0 +2023-04-03 01:59:02,798 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4437, 2.1827, 1.7547, 1.4712, 1.9976, 1.4363, 1.2872, 1.8757], + device='cuda:3'), covar=tensor([0.0967, 0.0746, 0.0982, 0.0848, 0.0608, 0.1218, 0.0751, 0.0488], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0315, 0.0340, 0.0269, 0.0248, 0.0340, 0.0292, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 01:59:03,870 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159767.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 01:59:36,001 INFO [train.py:903] (3/4) Epoch 24, batch 2750, loss[loss=0.1775, simple_loss=0.2537, pruned_loss=0.05069, over 19759.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2876, pruned_loss=0.06424, over 3806587.19 frames. ], batch size: 47, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 01:59:46,851 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159802.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:19,561 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159827.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:00:40,383 INFO [train.py:903] (3/4) Epoch 24, batch 2800, loss[loss=0.1875, simple_loss=0.2622, pruned_loss=0.05637, over 16504.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2877, pruned_loss=0.06412, over 3800286.97 frames. ], batch size: 36, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:00:45,933 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.444e+02 4.661e+02 5.641e+02 7.181e+02 2.352e+03, threshold=1.128e+03, percent-clipped=2.0 +2023-04-03 02:01:16,166 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:40,888 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:01:42,763 INFO [train.py:903] (3/4) Epoch 24, batch 2850, loss[loss=0.1781, simple_loss=0.2514, pruned_loss=0.05243, over 19785.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2874, pruned_loss=0.06427, over 3812065.52 frames. ], batch size: 47, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:10,720 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:25,870 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159928.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:02:39,627 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 02:02:45,326 INFO [train.py:903] (3/4) Epoch 24, batch 2900, loss[loss=0.1963, simple_loss=0.283, pruned_loss=0.05481, over 19680.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2874, pruned_loss=0.06426, over 3807015.08 frames. ], batch size: 59, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:02:51,070 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.210e+02 4.874e+02 6.501e+02 8.672e+02 1.518e+03, threshold=1.300e+03, percent-clipped=5.0 +2023-04-03 02:03:45,537 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=159993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:03:46,480 INFO [train.py:903] (3/4) Epoch 24, batch 2950, loss[loss=0.1988, simple_loss=0.277, pruned_loss=0.06035, over 19745.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2876, pruned_loss=0.06441, over 3799008.74 frames. ], batch size: 51, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:13,447 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1832, 2.0067, 1.8948, 2.0828, 1.9120, 1.8578, 1.7530, 2.0625], + device='cuda:3'), covar=tensor([0.1042, 0.1462, 0.1430, 0.1239, 0.1456, 0.0555, 0.1409, 0.0745], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0355, 0.0312, 0.0254, 0.0303, 0.0252, 0.0312, 0.0257], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:04:24,694 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:04:45,842 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 02:04:49,466 INFO [train.py:903] (3/4) Epoch 24, batch 3000, loss[loss=0.1816, simple_loss=0.2665, pruned_loss=0.04837, over 19499.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2867, pruned_loss=0.06384, over 3799786.39 frames. ], batch size: 64, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:04:49,466 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 02:05:02,002 INFO [train.py:937] (3/4) Epoch 24, validation: loss=0.1679, simple_loss=0.268, pruned_loss=0.03397, over 944034.00 frames. +2023-04-03 02:05:02,004 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 02:05:08,149 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:05:08,848 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.319e+02 4.966e+02 6.275e+02 7.790e+02 1.988e+03, threshold=1.255e+03, percent-clipped=5.0 +2023-04-03 02:05:33,081 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-03 02:05:47,965 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.28 vs. limit=5.0 +2023-04-03 02:06:04,586 INFO [train.py:903] (3/4) Epoch 24, batch 3050, loss[loss=0.2386, simple_loss=0.3156, pruned_loss=0.08077, over 19662.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2863, pruned_loss=0.06294, over 3816956.29 frames. ], batch size: 60, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:06:19,319 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 02:06:23,602 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:07:08,733 INFO [train.py:903] (3/4) Epoch 24, batch 3100, loss[loss=0.2096, simple_loss=0.2888, pruned_loss=0.06523, over 19674.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2866, pruned_loss=0.06289, over 3817691.81 frames. ], batch size: 53, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:07:14,564 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 4.859e+02 5.894e+02 7.109e+02 1.682e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 02:07:37,222 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1549, 1.4547, 1.7598, 1.5280, 2.9798, 4.6674, 4.4637, 5.0236], + device='cuda:3'), covar=tensor([0.1762, 0.3806, 0.3692, 0.2412, 0.0709, 0.0210, 0.0180, 0.0227], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0327, 0.0357, 0.0265, 0.0246, 0.0191, 0.0217, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 02:07:52,747 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4567, 1.5591, 1.8599, 1.7420, 2.7096, 2.3313, 2.9184, 1.2563], + device='cuda:3'), covar=tensor([0.2513, 0.4194, 0.2640, 0.1920, 0.1530, 0.2132, 0.1442, 0.4448], + device='cuda:3'), in_proj_covar=tensor([0.0544, 0.0656, 0.0726, 0.0495, 0.0623, 0.0537, 0.0666, 0.0559], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 02:08:10,531 INFO [train.py:903] (3/4) Epoch 24, batch 3150, loss[loss=0.271, simple_loss=0.3417, pruned_loss=0.1001, over 19743.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2866, pruned_loss=0.0627, over 3821907.83 frames. ], batch size: 63, lr: 3.43e-03, grad_scale: 8.0 +2023-04-03 02:08:36,004 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 02:08:40,602 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160217.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:08:56,834 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2827, 2.2859, 2.5030, 2.9633, 2.2466, 2.8329, 2.5816, 2.2909], + device='cuda:3'), covar=tensor([0.4117, 0.4261, 0.1949, 0.2714, 0.4500, 0.2331, 0.4747, 0.3434], + device='cuda:3'), in_proj_covar=tensor([0.0919, 0.0990, 0.0731, 0.0945, 0.0896, 0.0831, 0.0853, 0.0794], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 02:09:14,343 INFO [train.py:903] (3/4) Epoch 24, batch 3200, loss[loss=0.1951, simple_loss=0.287, pruned_loss=0.05163, over 18204.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06278, over 3829871.96 frames. ], batch size: 83, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:09:18,035 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6958, 4.2931, 2.8306, 3.7889, 0.9610, 4.1804, 4.1563, 4.1643], + device='cuda:3'), covar=tensor([0.0589, 0.0897, 0.1790, 0.0841, 0.3964, 0.0698, 0.0843, 0.0986], + device='cuda:3'), in_proj_covar=tensor([0.0512, 0.0413, 0.0498, 0.0346, 0.0403, 0.0437, 0.0431, 0.0464], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:09:19,294 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.6407, 0.8768, 0.6785, 0.6753, 0.7955, 0.6609, 0.6629, 0.8281], + device='cuda:3'), covar=tensor([0.0486, 0.0608, 0.0774, 0.0496, 0.0423, 0.0959, 0.0431, 0.0400], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0316, 0.0339, 0.0267, 0.0247, 0.0342, 0.0292, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:09:20,052 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.134e+02 6.599e+02 8.700e+02 2.161e+03, threshold=1.320e+03, percent-clipped=4.0 +2023-04-03 02:09:49,740 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:10:17,064 INFO [train.py:903] (3/4) Epoch 24, batch 3250, loss[loss=0.1686, simple_loss=0.2397, pruned_loss=0.04876, over 19032.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2868, pruned_loss=0.0628, over 3835472.17 frames. ], batch size: 42, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:10:56,755 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160325.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:11:04,804 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160332.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:11:21,817 INFO [train.py:903] (3/4) Epoch 24, batch 3300, loss[loss=0.2398, simple_loss=0.3129, pruned_loss=0.08339, over 19765.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2874, pruned_loss=0.06291, over 3841922.85 frames. ], batch size: 54, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:11:24,391 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 02:11:27,809 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.973e+02 5.834e+02 7.675e+02 1.997e+03, threshold=1.167e+03, percent-clipped=3.0 +2023-04-03 02:11:46,525 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160364.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:15,770 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:18,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160389.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:12:23,805 INFO [train.py:903] (3/4) Epoch 24, batch 3350, loss[loss=0.1987, simple_loss=0.2791, pruned_loss=0.05909, over 19601.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.287, pruned_loss=0.06298, over 3844416.95 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:12:47,487 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5824, 4.0644, 4.2538, 4.2620, 1.6917, 4.0564, 3.5008, 4.0203], + device='cuda:3'), covar=tensor([0.1640, 0.0925, 0.0644, 0.0749, 0.5937, 0.0909, 0.0714, 0.1139], + device='cuda:3'), in_proj_covar=tensor([0.0787, 0.0753, 0.0959, 0.0839, 0.0842, 0.0731, 0.0572, 0.0891], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 02:13:26,804 INFO [train.py:903] (3/4) Epoch 24, batch 3400, loss[loss=0.2186, simple_loss=0.3029, pruned_loss=0.0671, over 19673.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.0621, over 3846653.98 frames. ], batch size: 60, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:13:32,530 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 5.133e+02 6.647e+02 9.203e+02 1.938e+03, threshold=1.329e+03, percent-clipped=8.0 +2023-04-03 02:14:28,012 INFO [train.py:903] (3/4) Epoch 24, batch 3450, loss[loss=0.2362, simple_loss=0.3172, pruned_loss=0.0776, over 19494.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2869, pruned_loss=0.06278, over 3841150.38 frames. ], batch size: 64, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:14:31,634 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 02:15:11,039 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-03 02:15:20,972 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9243, 4.4523, 2.7751, 3.8984, 1.0185, 4.4364, 4.2918, 4.4155], + device='cuda:3'), covar=tensor([0.0582, 0.0981, 0.1891, 0.0817, 0.4037, 0.0579, 0.0971, 0.1113], + device='cuda:3'), in_proj_covar=tensor([0.0509, 0.0413, 0.0498, 0.0345, 0.0402, 0.0437, 0.0431, 0.0462], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:15:29,545 INFO [train.py:903] (3/4) Epoch 24, batch 3500, loss[loss=0.2127, simple_loss=0.2952, pruned_loss=0.06512, over 19688.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06257, over 3834452.36 frames. ], batch size: 60, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:15:38,053 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.685e+02 5.887e+02 7.904e+02 2.662e+03, threshold=1.177e+03, percent-clipped=4.0 +2023-04-03 02:16:26,902 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:16:33,575 INFO [train.py:903] (3/4) Epoch 24, batch 3550, loss[loss=0.2029, simple_loss=0.2916, pruned_loss=0.05714, over 18808.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2864, pruned_loss=0.06239, over 3828230.85 frames. ], batch size: 74, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:16:39,948 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5700, 1.5185, 1.5209, 2.1932, 1.6626, 1.9815, 1.9254, 1.7214], + device='cuda:3'), covar=tensor([0.0862, 0.0906, 0.1011, 0.0611, 0.0802, 0.0701, 0.0820, 0.0684], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0222, 0.0228, 0.0239, 0.0225, 0.0213, 0.0189, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 02:16:57,364 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160613.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:17:35,772 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.70 vs. limit=5.0 +2023-04-03 02:17:36,578 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:17:37,258 INFO [train.py:903] (3/4) Epoch 24, batch 3600, loss[loss=0.2388, simple_loss=0.3099, pruned_loss=0.08387, over 19543.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2874, pruned_loss=0.06295, over 3830090.65 frames. ], batch size: 56, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:17:44,410 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.188e+02 4.811e+02 5.669e+02 7.209e+02 1.690e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 02:18:08,006 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160668.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:18:09,957 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=160669.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:18:40,044 INFO [train.py:903] (3/4) Epoch 24, batch 3650, loss[loss=0.1738, simple_loss=0.2602, pruned_loss=0.04376, over 19713.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2877, pruned_loss=0.06256, over 3842742.61 frames. ], batch size: 45, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:19:40,899 INFO [train.py:903] (3/4) Epoch 24, batch 3700, loss[loss=0.2548, simple_loss=0.3249, pruned_loss=0.09238, over 19331.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2878, pruned_loss=0.06321, over 3850721.45 frames. ], batch size: 66, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:19:46,834 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-04-03 02:19:49,425 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.616e+02 6.347e+02 7.690e+02 1.972e+03, threshold=1.269e+03, percent-clipped=6.0 +2023-04-03 02:20:26,876 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0891, 1.9996, 1.7880, 2.0956, 1.8832, 1.7748, 1.7156, 1.9932], + device='cuda:3'), covar=tensor([0.1097, 0.1413, 0.1440, 0.1199, 0.1404, 0.0571, 0.1495, 0.0744], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0358, 0.0313, 0.0256, 0.0305, 0.0254, 0.0314, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:20:30,349 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160784.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:20:44,492 INFO [train.py:903] (3/4) Epoch 24, batch 3750, loss[loss=0.201, simple_loss=0.2806, pruned_loss=0.06075, over 19584.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2879, pruned_loss=0.06354, over 3856206.90 frames. ], batch size: 52, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:45,655 INFO [train.py:903] (3/4) Epoch 24, batch 3800, loss[loss=0.2239, simple_loss=0.3082, pruned_loss=0.06975, over 19325.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2874, pruned_loss=0.06312, over 3853660.40 frames. ], batch size: 66, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:21:53,460 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.186e+02 4.635e+02 5.250e+02 7.046e+02 1.734e+03, threshold=1.050e+03, percent-clipped=4.0 +2023-04-03 02:22:18,606 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 02:22:45,256 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4782, 2.5589, 2.1222, 2.5783, 2.4586, 2.1034, 1.9254, 2.4871], + device='cuda:3'), covar=tensor([0.1026, 0.1399, 0.1475, 0.1035, 0.1252, 0.0550, 0.1491, 0.0703], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0356, 0.0313, 0.0255, 0.0305, 0.0254, 0.0314, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:22:47,092 INFO [train.py:903] (3/4) Epoch 24, batch 3850, loss[loss=0.1465, simple_loss=0.2315, pruned_loss=0.03076, over 19763.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2874, pruned_loss=0.06329, over 3827691.34 frames. ], batch size: 48, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:22:58,402 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 02:23:18,509 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160919.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:23:48,701 INFO [train.py:903] (3/4) Epoch 24, batch 3900, loss[loss=0.2382, simple_loss=0.3142, pruned_loss=0.08105, over 19513.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2868, pruned_loss=0.06302, over 3815418.52 frames. ], batch size: 64, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:23:58,397 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 5.134e+02 6.381e+02 7.692e+02 1.884e+03, threshold=1.276e+03, percent-clipped=12.0 +2023-04-03 02:24:38,106 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:24:53,492 INFO [train.py:903] (3/4) Epoch 24, batch 3950, loss[loss=0.2439, simple_loss=0.3181, pruned_loss=0.08483, over 13175.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.286, pruned_loss=0.06239, over 3814193.74 frames. ], batch size: 136, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:24:57,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 02:25:41,872 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5601, 1.5663, 1.7906, 1.9059, 4.1301, 1.3644, 2.6972, 4.3965], + device='cuda:3'), covar=tensor([0.0468, 0.2868, 0.2810, 0.1827, 0.0725, 0.2538, 0.1525, 0.0208], + device='cuda:3'), in_proj_covar=tensor([0.0412, 0.0367, 0.0389, 0.0350, 0.0373, 0.0353, 0.0385, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:25:51,245 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161040.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 02:25:55,367 INFO [train.py:903] (3/4) Epoch 24, batch 4000, loss[loss=0.1785, simple_loss=0.2519, pruned_loss=0.05258, over 19098.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.285, pruned_loss=0.06194, over 3817677.33 frames. ], batch size: 42, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:26:03,425 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.651e+02 5.074e+02 6.327e+02 7.723e+02 1.762e+03, threshold=1.265e+03, percent-clipped=6.0 +2023-04-03 02:26:21,958 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161065.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 02:26:41,782 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 02:26:57,369 INFO [train.py:903] (3/4) Epoch 24, batch 4050, loss[loss=0.2666, simple_loss=0.3258, pruned_loss=0.1037, over 13685.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06256, over 3818382.89 frames. ], batch size: 135, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:27:57,730 INFO [train.py:903] (3/4) Epoch 24, batch 4100, loss[loss=0.1771, simple_loss=0.2523, pruned_loss=0.05098, over 19379.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06284, over 3820199.40 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 8.0 +2023-04-03 02:28:06,049 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.915e+02 6.129e+02 7.795e+02 1.333e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 02:28:31,037 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 02:29:00,663 INFO [train.py:903] (3/4) Epoch 24, batch 4150, loss[loss=0.2061, simple_loss=0.2829, pruned_loss=0.06461, over 19619.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06331, over 3824255.15 frames. ], batch size: 50, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:29:49,592 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8463, 1.3628, 1.0743, 0.9407, 1.1702, 0.9896, 0.8648, 1.2083], + device='cuda:3'), covar=tensor([0.0645, 0.0830, 0.1174, 0.0811, 0.0611, 0.1347, 0.0655, 0.0529], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0315, 0.0337, 0.0267, 0.0247, 0.0340, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:29:50,580 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:01,368 INFO [train.py:903] (3/4) Epoch 24, batch 4200, loss[loss=0.1847, simple_loss=0.2734, pruned_loss=0.04794, over 19769.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2878, pruned_loss=0.06398, over 3817967.12 frames. ], batch size: 54, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:30:02,568 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 02:30:04,062 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3282, 1.3094, 1.2803, 1.1349, 1.0635, 1.1818, 0.3734, 0.6444], + device='cuda:3'), covar=tensor([0.0519, 0.0527, 0.0328, 0.0503, 0.0845, 0.0576, 0.1196, 0.0830], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0361, 0.0365, 0.0388, 0.0466, 0.0395, 0.0342, 0.0347], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 02:30:08,530 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:30:09,255 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.684e+02 6.110e+02 7.845e+02 2.290e+03, threshold=1.222e+03, percent-clipped=7.0 +2023-04-03 02:30:24,271 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:31:03,249 INFO [train.py:903] (3/4) Epoch 24, batch 4250, loss[loss=0.2068, simple_loss=0.2924, pruned_loss=0.06065, over 19606.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2867, pruned_loss=0.06311, over 3820120.58 frames. ], batch size: 57, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:31:17,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 02:31:28,269 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 02:31:44,254 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:32:04,738 INFO [train.py:903] (3/4) Epoch 24, batch 4300, loss[loss=0.232, simple_loss=0.3099, pruned_loss=0.07704, over 18658.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06254, over 3832889.04 frames. ], batch size: 74, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:32:12,543 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.040e+02 4.585e+02 5.768e+02 7.257e+02 2.214e+03, threshold=1.154e+03, percent-clipped=5.0 +2023-04-03 02:32:38,073 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3117, 1.3156, 1.7718, 1.3867, 2.7168, 3.6771, 3.3747, 3.8776], + device='cuda:3'), covar=tensor([0.1545, 0.3771, 0.3294, 0.2423, 0.0614, 0.0212, 0.0223, 0.0253], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0325, 0.0356, 0.0265, 0.0245, 0.0190, 0.0217, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 02:32:47,563 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:32:57,533 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 02:33:06,258 INFO [train.py:903] (3/4) Epoch 24, batch 4350, loss[loss=0.2238, simple_loss=0.2974, pruned_loss=0.07512, over 19178.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2858, pruned_loss=0.0626, over 3821737.11 frames. ], batch size: 69, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:33:37,841 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:34:07,070 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:34:08,991 INFO [train.py:903] (3/4) Epoch 24, batch 4400, loss[loss=0.1642, simple_loss=0.2488, pruned_loss=0.03981, over 19465.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.06242, over 3807703.59 frames. ], batch size: 49, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:34:15,589 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.985e+02 5.009e+02 6.093e+02 7.233e+02 1.222e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 02:34:31,952 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 02:34:41,589 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 02:34:43,223 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5434, 1.5605, 1.8369, 1.7470, 2.5831, 2.2868, 2.7505, 1.3530], + device='cuda:3'), covar=tensor([0.2391, 0.4214, 0.2656, 0.1894, 0.1520, 0.2032, 0.1401, 0.4242], + device='cuda:3'), in_proj_covar=tensor([0.0540, 0.0651, 0.0725, 0.0493, 0.0623, 0.0535, 0.0661, 0.0556], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 02:34:50,984 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:35:04,478 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7445, 4.2990, 2.7982, 3.8124, 1.0121, 4.3472, 4.1896, 4.2365], + device='cuda:3'), covar=tensor([0.0588, 0.0997, 0.1966, 0.0845, 0.4137, 0.0594, 0.0863, 0.1168], + device='cuda:3'), in_proj_covar=tensor([0.0511, 0.0418, 0.0503, 0.0349, 0.0402, 0.0438, 0.0435, 0.0465], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:35:10,531 INFO [train.py:903] (3/4) Epoch 24, batch 4450, loss[loss=0.2533, simple_loss=0.3357, pruned_loss=0.08548, over 18822.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2855, pruned_loss=0.0624, over 3813661.17 frames. ], batch size: 74, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:36:10,900 INFO [train.py:903] (3/4) Epoch 24, batch 4500, loss[loss=0.2325, simple_loss=0.3078, pruned_loss=0.07859, over 18134.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2867, pruned_loss=0.06313, over 3813850.26 frames. ], batch size: 83, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:36:17,765 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.067e+02 5.206e+02 6.185e+02 8.214e+02 2.130e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 02:36:53,398 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:37:11,179 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:37:12,188 INFO [train.py:903] (3/4) Epoch 24, batch 4550, loss[loss=0.1688, simple_loss=0.2503, pruned_loss=0.04362, over 19766.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2859, pruned_loss=0.06259, over 3825903.15 frames. ], batch size: 46, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:37:20,028 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 02:37:44,741 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 02:38:02,414 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:38:15,054 INFO [train.py:903] (3/4) Epoch 24, batch 4600, loss[loss=0.1805, simple_loss=0.2582, pruned_loss=0.05139, over 19374.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2852, pruned_loss=0.06234, over 3822640.79 frames. ], batch size: 47, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:38:21,972 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.894e+02 5.872e+02 7.852e+02 1.807e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 02:38:33,874 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:03,287 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1291, 3.3513, 1.9300, 2.0316, 2.9931, 1.7481, 1.4241, 2.2682], + device='cuda:3'), covar=tensor([0.1328, 0.0626, 0.1146, 0.0896, 0.0633, 0.1258, 0.1035, 0.0636], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0317, 0.0340, 0.0268, 0.0248, 0.0342, 0.0293, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:39:15,663 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:16,432 INFO [train.py:903] (3/4) Epoch 24, batch 4650, loss[loss=0.1983, simple_loss=0.2827, pruned_loss=0.0569, over 19608.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2856, pruned_loss=0.06204, over 3823110.99 frames. ], batch size: 52, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:39:22,580 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:23,628 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6307, 4.1233, 4.2919, 4.2971, 1.6068, 4.0714, 3.5314, 4.0520], + device='cuda:3'), covar=tensor([0.1645, 0.0926, 0.0606, 0.0711, 0.6052, 0.1005, 0.0690, 0.1054], + device='cuda:3'), in_proj_covar=tensor([0.0797, 0.0762, 0.0966, 0.0846, 0.0847, 0.0732, 0.0579, 0.0897], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 02:39:33,584 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 02:39:33,924 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:39:44,914 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 02:39:53,524 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:40:19,111 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-03 02:40:19,299 INFO [train.py:903] (3/4) Epoch 24, batch 4700, loss[loss=0.1789, simple_loss=0.2543, pruned_loss=0.05174, over 19358.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2861, pruned_loss=0.06248, over 3820055.43 frames. ], batch size: 47, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:40:26,417 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 5.147e+02 6.134e+02 7.606e+02 1.537e+03, threshold=1.227e+03, percent-clipped=3.0 +2023-04-03 02:40:39,910 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 02:40:43,392 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:41:20,593 INFO [train.py:903] (3/4) Epoch 24, batch 4750, loss[loss=0.2144, simple_loss=0.2933, pruned_loss=0.0677, over 19268.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2869, pruned_loss=0.06244, over 3826518.40 frames. ], batch size: 66, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:41:57,360 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=161822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:09,943 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:42:23,975 INFO [train.py:903] (3/4) Epoch 24, batch 4800, loss[loss=0.2002, simple_loss=0.2852, pruned_loss=0.05757, over 19771.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2863, pruned_loss=0.06207, over 3846288.14 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:42:31,545 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 5.225e+02 6.101e+02 7.305e+02 1.695e+03, threshold=1.220e+03, percent-clipped=2.0 +2023-04-03 02:42:45,753 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6224, 1.7591, 1.9513, 2.0378, 1.5877, 1.9255, 1.9884, 1.8480], + device='cuda:3'), covar=tensor([0.3827, 0.3179, 0.1792, 0.2229, 0.3508, 0.2003, 0.4712, 0.3079], + device='cuda:3'), in_proj_covar=tensor([0.0914, 0.0986, 0.0727, 0.0940, 0.0892, 0.0829, 0.0849, 0.0791], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 02:43:04,826 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:43:25,945 INFO [train.py:903] (3/4) Epoch 24, batch 4850, loss[loss=0.1712, simple_loss=0.2591, pruned_loss=0.0416, over 19496.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2868, pruned_loss=0.06203, over 3851892.50 frames. ], batch size: 49, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:43:50,732 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 02:44:01,375 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-03 02:44:04,442 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3875, 2.4688, 2.6382, 3.1895, 2.4748, 3.0254, 2.7636, 2.5190], + device='cuda:3'), covar=tensor([0.4347, 0.4315, 0.1945, 0.2526, 0.4551, 0.2267, 0.4736, 0.3353], + device='cuda:3'), in_proj_covar=tensor([0.0916, 0.0988, 0.0729, 0.0943, 0.0895, 0.0832, 0.0852, 0.0794], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 02:44:11,974 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 02:44:16,555 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 02:44:17,713 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 02:44:19,192 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161937.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:27,638 INFO [train.py:903] (3/4) Epoch 24, batch 4900, loss[loss=0.2382, simple_loss=0.3195, pruned_loss=0.07848, over 19773.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2868, pruned_loss=0.06227, over 3845378.25 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:44:27,698 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 02:44:33,871 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:34,696 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 5.435e+02 6.496e+02 8.047e+02 2.666e+03, threshold=1.299e+03, percent-clipped=6.0 +2023-04-03 02:44:47,038 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 02:44:53,284 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2156, 2.0742, 1.9627, 1.7991, 1.6111, 1.7791, 0.5739, 1.2769], + device='cuda:3'), covar=tensor([0.0635, 0.0674, 0.0548, 0.0965, 0.1219, 0.1033, 0.1451, 0.1128], + device='cuda:3'), in_proj_covar=tensor([0.0358, 0.0356, 0.0359, 0.0384, 0.0460, 0.0390, 0.0338, 0.0343], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 02:44:53,293 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:44:54,782 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.76 vs. limit=5.0 +2023-04-03 02:45:05,679 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161974.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:23,800 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:45:24,320 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-03 02:45:28,868 INFO [train.py:903] (3/4) Epoch 24, batch 4950, loss[loss=0.211, simple_loss=0.2953, pruned_loss=0.06334, over 19788.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2872, pruned_loss=0.06259, over 3839509.45 frames. ], batch size: 56, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:45:38,511 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-03 02:45:47,823 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 02:46:13,010 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 02:46:20,314 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0942, 1.1943, 1.5313, 0.7014, 1.9635, 2.4448, 2.1488, 2.6305], + device='cuda:3'), covar=tensor([0.1490, 0.3934, 0.3480, 0.2775, 0.0670, 0.0294, 0.0344, 0.0397], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0326, 0.0357, 0.0266, 0.0246, 0.0191, 0.0218, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 02:46:33,465 INFO [train.py:903] (3/4) Epoch 24, batch 5000, loss[loss=0.1841, simple_loss=0.2591, pruned_loss=0.05461, over 19753.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2875, pruned_loss=0.06274, over 3836829.99 frames. ], batch size: 46, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:46:41,315 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.589e+02 5.769e+02 7.322e+02 1.477e+03, threshold=1.154e+03, percent-clipped=3.0 +2023-04-03 02:46:44,803 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 02:46:56,733 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 02:47:35,233 INFO [train.py:903] (3/4) Epoch 24, batch 5050, loss[loss=0.2314, simple_loss=0.308, pruned_loss=0.07743, over 19662.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2874, pruned_loss=0.06288, over 3835396.35 frames. ], batch size: 55, lr: 3.41e-03, grad_scale: 8.0 +2023-04-03 02:47:36,676 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:47:49,441 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4223, 4.0337, 2.5401, 3.5583, 0.9754, 3.9866, 3.9097, 3.9421], + device='cuda:3'), covar=tensor([0.0644, 0.0938, 0.2017, 0.0862, 0.3646, 0.0695, 0.0917, 0.1036], + device='cuda:3'), in_proj_covar=tensor([0.0516, 0.0421, 0.0506, 0.0353, 0.0405, 0.0444, 0.0439, 0.0470], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:48:14,945 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 02:48:25,559 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:37,733 INFO [train.py:903] (3/4) Epoch 24, batch 5100, loss[loss=0.1713, simple_loss=0.2541, pruned_loss=0.04427, over 19729.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2859, pruned_loss=0.06202, over 3841428.51 frames. ], batch size: 46, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:48:44,638 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.540e+02 5.777e+02 7.463e+02 1.637e+03, threshold=1.155e+03, percent-clipped=6.0 +2023-04-03 02:48:51,574 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 02:48:55,175 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 02:48:55,632 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:48:59,602 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 02:49:19,006 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:39,595 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162193.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:49:40,374 INFO [train.py:903] (3/4) Epoch 24, batch 5150, loss[loss=0.2294, simple_loss=0.3042, pruned_loss=0.07727, over 18751.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2855, pruned_loss=0.06178, over 3848572.82 frames. ], batch size: 74, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:49:57,186 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 02:49:57,528 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9996, 1.1829, 1.6162, 1.0882, 2.2395, 2.9610, 2.7006, 3.3292], + device='cuda:3'), covar=tensor([0.1844, 0.4998, 0.4462, 0.2727, 0.0713, 0.0293, 0.0329, 0.0364], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0324, 0.0354, 0.0265, 0.0244, 0.0190, 0.0216, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 02:50:03,593 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6676, 1.7418, 2.0015, 1.9803, 1.4475, 1.8859, 2.0024, 1.9077], + device='cuda:3'), covar=tensor([0.4054, 0.3741, 0.1951, 0.2497, 0.3990, 0.2318, 0.4918, 0.3319], + device='cuda:3'), in_proj_covar=tensor([0.0917, 0.0989, 0.0730, 0.0945, 0.0896, 0.0831, 0.0852, 0.0797], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 02:50:11,313 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:31,571 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 02:50:43,108 INFO [train.py:903] (3/4) Epoch 24, batch 5200, loss[loss=0.2348, simple_loss=0.3063, pruned_loss=0.08167, over 13226.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2851, pruned_loss=0.06164, over 3852109.08 frames. ], batch size: 136, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:50:50,174 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:50:51,037 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 4.774e+02 5.644e+02 7.681e+02 1.514e+03, threshold=1.129e+03, percent-clipped=4.0 +2023-04-03 02:50:59,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 02:51:44,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 02:51:44,378 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:51:44,830 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 02:51:46,274 INFO [train.py:903] (3/4) Epoch 24, batch 5250, loss[loss=0.2103, simple_loss=0.2942, pruned_loss=0.06318, over 19660.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2851, pruned_loss=0.06159, over 3855964.17 frames. ], batch size: 58, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:50,091 INFO [train.py:903] (3/4) Epoch 24, batch 5300, loss[loss=0.1851, simple_loss=0.2613, pruned_loss=0.05447, over 19389.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06103, over 3859366.40 frames. ], batch size: 48, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:52:57,096 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.862e+02 5.825e+02 7.901e+02 2.284e+03, threshold=1.165e+03, percent-clipped=8.0 +2023-04-03 02:53:08,421 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 02:53:51,572 INFO [train.py:903] (3/4) Epoch 24, batch 5350, loss[loss=0.2521, simple_loss=0.3185, pruned_loss=0.09287, over 13030.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2852, pruned_loss=0.06164, over 3827261.34 frames. ], batch size: 136, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:54:28,294 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 02:54:48,009 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:54:54,720 INFO [train.py:903] (3/4) Epoch 24, batch 5400, loss[loss=0.175, simple_loss=0.248, pruned_loss=0.05096, over 19779.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2855, pruned_loss=0.06202, over 3825829.13 frames. ], batch size: 47, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:55:02,636 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.752e+02 4.703e+02 6.237e+02 7.666e+02 1.372e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 02:55:56,871 INFO [train.py:903] (3/4) Epoch 24, batch 5450, loss[loss=0.1902, simple_loss=0.2773, pruned_loss=0.05156, over 19778.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2858, pruned_loss=0.06152, over 3838282.18 frames. ], batch size: 54, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:56:41,299 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:00,253 INFO [train.py:903] (3/4) Epoch 24, batch 5500, loss[loss=0.2219, simple_loss=0.3013, pruned_loss=0.07123, over 19668.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2846, pruned_loss=0.06113, over 3837091.25 frames. ], batch size: 60, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:57:02,962 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4782, 1.3508, 1.5740, 1.5134, 3.0322, 1.1017, 2.4930, 3.4156], + device='cuda:3'), covar=tensor([0.0499, 0.2832, 0.2813, 0.1872, 0.0707, 0.2501, 0.1133, 0.0280], + device='cuda:3'), in_proj_covar=tensor([0.0414, 0.0368, 0.0389, 0.0350, 0.0372, 0.0352, 0.0385, 0.0405], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 02:57:05,486 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:07,341 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.996e+02 5.077e+02 6.464e+02 7.861e+02 1.317e+03, threshold=1.293e+03, percent-clipped=1.0 +2023-04-03 02:57:12,105 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:25,898 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 02:57:36,186 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:57:44,060 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3353, 1.2887, 1.7515, 1.3423, 2.7052, 3.6356, 3.3235, 3.8917], + device='cuda:3'), covar=tensor([0.1618, 0.3905, 0.3468, 0.2504, 0.0629, 0.0207, 0.0226, 0.0254], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0325, 0.0356, 0.0265, 0.0244, 0.0190, 0.0216, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 02:58:01,107 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 02:58:02,007 INFO [train.py:903] (3/4) Epoch 24, batch 5550, loss[loss=0.2423, simple_loss=0.3219, pruned_loss=0.08133, over 19722.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06106, over 3838049.88 frames. ], batch size: 63, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 02:58:08,797 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 02:58:59,595 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 02:59:02,953 INFO [train.py:903] (3/4) Epoch 24, batch 5600, loss[loss=0.1859, simple_loss=0.2549, pruned_loss=0.05843, over 19784.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2847, pruned_loss=0.06149, over 3836345.73 frames. ], batch size: 48, lr: 3.40e-03, grad_scale: 16.0 +2023-04-03 02:59:03,451 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8255, 1.9388, 2.2507, 2.2985, 1.7521, 2.2093, 2.2367, 2.0382], + device='cuda:3'), covar=tensor([0.4162, 0.3845, 0.1909, 0.2403, 0.4014, 0.2209, 0.4691, 0.3411], + device='cuda:3'), in_proj_covar=tensor([0.0915, 0.0988, 0.0728, 0.0939, 0.0893, 0.0829, 0.0850, 0.0793], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 02:59:12,266 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.260e+02 5.034e+02 5.949e+02 8.933e+02 2.100e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 02:59:53,181 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7951, 1.4626, 1.6623, 1.4661, 4.3169, 1.0178, 2.6446, 4.7633], + device='cuda:3'), covar=tensor([0.0543, 0.2886, 0.3154, 0.2193, 0.0816, 0.2929, 0.1564, 0.0178], + device='cuda:3'), in_proj_covar=tensor([0.0414, 0.0368, 0.0389, 0.0349, 0.0373, 0.0352, 0.0387, 0.0405], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:00:07,499 INFO [train.py:903] (3/4) Epoch 24, batch 5650, loss[loss=0.2256, simple_loss=0.3005, pruned_loss=0.0754, over 19670.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.0612, over 3833678.34 frames. ], batch size: 60, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:00:25,014 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162708.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:00:41,598 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 03:00:55,019 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 03:01:09,541 INFO [train.py:903] (3/4) Epoch 24, batch 5700, loss[loss=0.1997, simple_loss=0.281, pruned_loss=0.05915, over 19670.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2844, pruned_loss=0.06138, over 3830107.66 frames. ], batch size: 58, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:01:17,488 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.412e+02 4.704e+02 5.740e+02 7.100e+02 1.656e+03, threshold=1.148e+03, percent-clipped=4.0 +2023-04-03 03:02:11,505 INFO [train.py:903] (3/4) Epoch 24, batch 5750, loss[loss=0.2664, simple_loss=0.3287, pruned_loss=0.102, over 13542.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06176, over 3810446.42 frames. ], batch size: 136, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:02:13,886 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 03:02:22,230 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 03:02:28,834 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 03:02:31,428 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:01,627 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 03:03:03,522 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:04,583 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4296, 1.2943, 1.4114, 1.3620, 2.1802, 1.1707, 1.8854, 2.4046], + device='cuda:3'), covar=tensor([0.0547, 0.2112, 0.2161, 0.1425, 0.0588, 0.1821, 0.1843, 0.0446], + device='cuda:3'), in_proj_covar=tensor([0.0415, 0.0369, 0.0390, 0.0350, 0.0373, 0.0354, 0.0387, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:03:13,258 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3083, 1.3565, 1.5028, 1.4712, 1.8090, 1.7580, 1.8036, 0.6370], + device='cuda:3'), covar=tensor([0.2665, 0.4516, 0.2895, 0.2152, 0.1720, 0.2585, 0.1440, 0.5156], + device='cuda:3'), in_proj_covar=tensor([0.0545, 0.0658, 0.0734, 0.0498, 0.0630, 0.0545, 0.0670, 0.0564], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 03:03:13,920 INFO [train.py:903] (3/4) Epoch 24, batch 5800, loss[loss=0.2464, simple_loss=0.3195, pruned_loss=0.08665, over 19600.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.0617, over 3803473.48 frames. ], batch size: 61, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:03:15,413 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162845.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:03:22,908 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.123e+02 4.857e+02 6.549e+02 8.249e+02 1.553e+03, threshold=1.310e+03, percent-clipped=3.0 +2023-04-03 03:03:51,978 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=162873.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:04:16,920 INFO [train.py:903] (3/4) Epoch 24, batch 5850, loss[loss=0.1883, simple_loss=0.2638, pruned_loss=0.05641, over 17408.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2834, pruned_loss=0.06067, over 3818686.21 frames. ], batch size: 38, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:04:49,716 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9597, 4.5688, 2.6902, 3.9594, 1.0256, 4.4917, 4.3603, 4.4074], + device='cuda:3'), covar=tensor([0.0527, 0.0823, 0.1961, 0.0797, 0.3869, 0.0587, 0.0869, 0.1083], + device='cuda:3'), in_proj_covar=tensor([0.0510, 0.0417, 0.0501, 0.0351, 0.0401, 0.0440, 0.0434, 0.0466], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:05:20,443 INFO [train.py:903] (3/4) Epoch 24, batch 5900, loss[loss=0.1569, simple_loss=0.2385, pruned_loss=0.03768, over 19316.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2836, pruned_loss=0.06083, over 3819127.12 frames. ], batch size: 44, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:05:26,347 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 03:05:28,694 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.343e+02 4.550e+02 5.410e+02 6.827e+02 1.573e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 03:05:43,787 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162964.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:05:44,426 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 03:05:44,660 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0613, 3.7087, 2.5659, 3.2964, 0.9484, 3.6045, 3.5517, 3.5619], + device='cuda:3'), covar=tensor([0.0824, 0.1103, 0.2056, 0.0975, 0.3807, 0.0959, 0.1060, 0.1316], + device='cuda:3'), in_proj_covar=tensor([0.0512, 0.0419, 0.0503, 0.0352, 0.0402, 0.0442, 0.0436, 0.0468], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:06:15,266 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162988.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:16,492 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:06:18,783 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8807, 2.7374, 2.3006, 2.2399, 2.0206, 2.4109, 1.2740, 2.0549], + device='cuda:3'), covar=tensor([0.0858, 0.0634, 0.0701, 0.1153, 0.1169, 0.1161, 0.1390, 0.1146], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0360, 0.0363, 0.0389, 0.0467, 0.0396, 0.0342, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 03:06:21,815 INFO [train.py:903] (3/4) Epoch 24, batch 5950, loss[loss=0.1981, simple_loss=0.281, pruned_loss=0.05764, over 19666.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2831, pruned_loss=0.06068, over 3818984.08 frames. ], batch size: 53, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,797 INFO [train.py:903] (3/4) Epoch 24, batch 6000, loss[loss=0.1989, simple_loss=0.2733, pruned_loss=0.06219, over 19760.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06169, over 3823820.53 frames. ], batch size: 46, lr: 3.40e-03, grad_scale: 8.0 +2023-04-03 03:07:22,797 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 03:07:35,177 INFO [train.py:937] (3/4) Epoch 24, validation: loss=0.1683, simple_loss=0.2679, pruned_loss=0.03436, over 944034.00 frames. +2023-04-03 03:07:35,178 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 03:07:43,467 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.276e+02 6.488e+02 8.005e+02 1.643e+03, threshold=1.298e+03, percent-clipped=7.0 +2023-04-03 03:08:35,917 INFO [train.py:903] (3/4) Epoch 24, batch 6050, loss[loss=0.1796, simple_loss=0.2547, pruned_loss=0.05229, over 19152.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06196, over 3817816.46 frames. ], batch size: 42, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:08:53,228 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:09:37,877 INFO [train.py:903] (3/4) Epoch 24, batch 6100, loss[loss=0.2226, simple_loss=0.303, pruned_loss=0.0711, over 19475.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2858, pruned_loss=0.06226, over 3804463.32 frames. ], batch size: 64, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:09:42,783 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4423, 1.5447, 1.8795, 1.6435, 2.6781, 2.2765, 2.8644, 1.3199], + device='cuda:3'), covar=tensor([0.2535, 0.4272, 0.2696, 0.1978, 0.1627, 0.2176, 0.1463, 0.4445], + device='cuda:3'), in_proj_covar=tensor([0.0543, 0.0656, 0.0731, 0.0495, 0.0626, 0.0541, 0.0666, 0.0559], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 03:09:45,768 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 5.048e+02 5.892e+02 7.607e+02 1.565e+03, threshold=1.178e+03, percent-clipped=5.0 +2023-04-03 03:09:52,216 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.80 vs. limit=5.0 +2023-04-03 03:10:34,794 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163189.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:10:40,171 INFO [train.py:903] (3/4) Epoch 24, batch 6150, loss[loss=0.2438, simple_loss=0.3135, pruned_loss=0.08707, over 19690.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2852, pruned_loss=0.06194, over 3790971.31 frames. ], batch size: 60, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:10:52,489 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5643, 2.5561, 2.2476, 2.5156, 2.2698, 2.1662, 2.1419, 2.6310], + device='cuda:3'), covar=tensor([0.0967, 0.1434, 0.1402, 0.1120, 0.1433, 0.0520, 0.1334, 0.0638], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0356, 0.0312, 0.0254, 0.0302, 0.0254, 0.0314, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:11:10,602 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 03:11:28,554 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.68 vs. limit=5.0 +2023-04-03 03:11:29,654 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.32 vs. limit=5.0 +2023-04-03 03:11:43,674 INFO [train.py:903] (3/4) Epoch 24, batch 6200, loss[loss=0.2374, simple_loss=0.3074, pruned_loss=0.08365, over 13490.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.061, over 3794132.37 frames. ], batch size: 135, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:11:44,112 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:11:51,418 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.538e+02 5.825e+02 7.342e+02 1.276e+03, threshold=1.165e+03, percent-clipped=3.0 +2023-04-03 03:12:00,556 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163258.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:03,975 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4811, 2.4476, 2.2470, 2.6035, 2.4897, 2.1719, 2.0590, 2.5481], + device='cuda:3'), covar=tensor([0.1001, 0.1583, 0.1394, 0.1043, 0.1312, 0.0534, 0.1430, 0.0692], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0357, 0.0312, 0.0254, 0.0303, 0.0254, 0.0315, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:12:14,197 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163269.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:12:38,770 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1738, 2.2818, 2.5065, 2.8078, 2.2763, 2.7449, 2.5381, 2.3409], + device='cuda:3'), covar=tensor([0.4009, 0.3759, 0.1777, 0.2252, 0.3791, 0.2075, 0.4474, 0.3137], + device='cuda:3'), in_proj_covar=tensor([0.0912, 0.0985, 0.0725, 0.0937, 0.0893, 0.0826, 0.0849, 0.0791], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 03:12:43,993 INFO [train.py:903] (3/4) Epoch 24, batch 6250, loss[loss=0.2266, simple_loss=0.3053, pruned_loss=0.07392, over 19754.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06156, over 3780816.42 frames. ], batch size: 63, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:12:56,454 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163304.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:13:11,284 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 03:13:45,521 INFO [train.py:903] (3/4) Epoch 24, batch 6300, loss[loss=0.2289, simple_loss=0.3005, pruned_loss=0.07868, over 19523.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2866, pruned_loss=0.0628, over 3782311.97 frames. ], batch size: 54, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:13:54,807 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.008e+02 6.887e+02 8.761e+02 2.377e+03, threshold=1.377e+03, percent-clipped=3.0 +2023-04-03 03:14:40,283 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-03 03:14:48,471 INFO [train.py:903] (3/4) Epoch 24, batch 6350, loss[loss=0.2168, simple_loss=0.3059, pruned_loss=0.0638, over 19515.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2876, pruned_loss=0.06332, over 3791184.00 frames. ], batch size: 56, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:30,614 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4332, 1.4756, 1.4363, 1.7836, 1.4263, 1.6627, 1.6435, 1.5559], + device='cuda:3'), covar=tensor([0.0908, 0.0903, 0.1032, 0.0674, 0.0832, 0.0788, 0.0870, 0.0716], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0226, 0.0214, 0.0190, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 03:15:50,639 INFO [train.py:903] (3/4) Epoch 24, batch 6400, loss[loss=0.2141, simple_loss=0.2948, pruned_loss=0.06674, over 18130.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2865, pruned_loss=0.0627, over 3793152.58 frames. ], batch size: 83, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:15:59,002 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 4.869e+02 5.854e+02 7.378e+02 1.563e+03, threshold=1.171e+03, percent-clipped=2.0 +2023-04-03 03:16:00,351 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:16:01,985 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-03 03:16:52,285 INFO [train.py:903] (3/4) Epoch 24, batch 6450, loss[loss=0.2117, simple_loss=0.2986, pruned_loss=0.06239, over 18183.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2871, pruned_loss=0.06287, over 3805289.59 frames. ], batch size: 83, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:01,213 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-03 03:17:26,473 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3212, 3.9736, 2.7864, 3.5518, 1.1560, 3.9031, 3.8266, 3.8617], + device='cuda:3'), covar=tensor([0.0744, 0.0968, 0.1740, 0.0811, 0.3644, 0.0729, 0.0929, 0.1236], + device='cuda:3'), in_proj_covar=tensor([0.0512, 0.0418, 0.0502, 0.0352, 0.0402, 0.0443, 0.0436, 0.0467], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:17:34,415 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 03:17:53,151 INFO [train.py:903] (3/4) Epoch 24, batch 6500, loss[loss=0.1963, simple_loss=0.2793, pruned_loss=0.0566, over 19675.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2878, pruned_loss=0.06334, over 3816776.84 frames. ], batch size: 53, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:17:56,720 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 03:18:01,366 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.063e+02 4.704e+02 6.024e+02 8.376e+02 1.457e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 03:18:14,509 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163560.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:22,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:25,185 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.79 vs. limit=5.0 +2023-04-03 03:18:44,339 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:18:55,200 INFO [train.py:903] (3/4) Epoch 24, batch 6550, loss[loss=0.2144, simple_loss=0.3007, pruned_loss=0.06408, over 17300.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2869, pruned_loss=0.06261, over 3819647.55 frames. ], batch size: 101, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:19:05,581 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=163602.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:19:35,121 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-03 03:19:57,495 INFO [train.py:903] (3/4) Epoch 24, batch 6600, loss[loss=0.2175, simple_loss=0.2785, pruned_loss=0.07824, over 14805.00 frames. ], tot_loss[loss=0.206, simple_loss=0.287, pruned_loss=0.06244, over 3824479.92 frames. ], batch size: 32, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:20:05,512 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.011e+02 6.018e+02 7.633e+02 1.393e+03, threshold=1.204e+03, percent-clipped=8.0 +2023-04-03 03:20:26,961 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4452, 2.4456, 2.2648, 2.6219, 2.2548, 2.1739, 2.0687, 2.4625], + device='cuda:3'), covar=tensor([0.0972, 0.1528, 0.1292, 0.1035, 0.1404, 0.0536, 0.1385, 0.0694], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0360, 0.0315, 0.0256, 0.0306, 0.0256, 0.0316, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:20:57,666 INFO [train.py:903] (3/4) Epoch 24, batch 6650, loss[loss=0.1735, simple_loss=0.256, pruned_loss=0.04548, over 19790.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2886, pruned_loss=0.06341, over 3815767.54 frames. ], batch size: 48, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:21:24,756 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8682, 1.4339, 1.4750, 1.7151, 3.4126, 1.1537, 2.2498, 3.9509], + device='cuda:3'), covar=tensor([0.0500, 0.2704, 0.3008, 0.1765, 0.0705, 0.2577, 0.1512, 0.0211], + device='cuda:3'), in_proj_covar=tensor([0.0413, 0.0369, 0.0391, 0.0348, 0.0374, 0.0352, 0.0386, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:21:25,791 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163717.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:21:41,459 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4778, 2.5208, 2.3034, 2.6186, 2.3097, 2.2029, 2.2750, 2.6638], + device='cuda:3'), covar=tensor([0.1007, 0.1484, 0.1342, 0.1095, 0.1400, 0.0519, 0.1222, 0.0641], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0359, 0.0314, 0.0255, 0.0306, 0.0255, 0.0316, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:21:52,067 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.58 vs. limit=5.0 +2023-04-03 03:21:58,181 INFO [train.py:903] (3/4) Epoch 24, batch 6700, loss[loss=0.2836, simple_loss=0.3487, pruned_loss=0.1093, over 13113.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2893, pruned_loss=0.06367, over 3806607.74 frames. ], batch size: 136, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:22:07,915 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5182, 1.4049, 1.4790, 2.1472, 1.4953, 1.8053, 1.7568, 1.6066], + device='cuda:3'), covar=tensor([0.0958, 0.1124, 0.1116, 0.0736, 0.0951, 0.0908, 0.0974, 0.0835], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0222, 0.0227, 0.0239, 0.0226, 0.0213, 0.0189, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 03:22:08,770 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.744e+02 6.010e+02 8.153e+02 1.593e+03, threshold=1.202e+03, percent-clipped=6.0 +2023-04-03 03:22:14,730 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2479, 3.7956, 3.8948, 3.9039, 1.5805, 3.7309, 3.2354, 3.6504], + device='cuda:3'), covar=tensor([0.1761, 0.1115, 0.0727, 0.0819, 0.6019, 0.1004, 0.0755, 0.1266], + device='cuda:3'), in_proj_covar=tensor([0.0794, 0.0756, 0.0963, 0.0841, 0.0841, 0.0731, 0.0574, 0.0892], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 03:22:32,671 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:22:57,590 INFO [train.py:903] (3/4) Epoch 24, batch 6750, loss[loss=0.1964, simple_loss=0.2788, pruned_loss=0.05698, over 19746.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2881, pruned_loss=0.06313, over 3821556.49 frames. ], batch size: 51, lr: 3.39e-03, grad_scale: 4.0 +2023-04-03 03:23:30,622 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:23:53,913 INFO [train.py:903] (3/4) Epoch 24, batch 6800, loss[loss=0.2162, simple_loss=0.2871, pruned_loss=0.07264, over 19630.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2876, pruned_loss=0.06287, over 3828710.57 frames. ], batch size: 50, lr: 3.39e-03, grad_scale: 8.0 +2023-04-03 03:23:58,807 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:24:03,015 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 4.890e+02 5.869e+02 7.347e+02 2.478e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 03:24:40,116 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 03:24:40,602 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 03:24:42,803 INFO [train.py:903] (3/4) Epoch 25, batch 0, loss[loss=0.1805, simple_loss=0.2645, pruned_loss=0.04819, over 19673.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2645, pruned_loss=0.04819, over 19673.00 frames. ], batch size: 53, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:24:42,803 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 03:24:54,385 INFO [train.py:937] (3/4) Epoch 25, validation: loss=0.1672, simple_loss=0.2675, pruned_loss=0.03346, over 944034.00 frames. +2023-04-03 03:24:54,386 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 03:25:06,943 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 03:25:12,156 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4804, 1.5638, 1.7511, 1.7630, 2.5583, 2.1832, 2.6234, 1.2095], + device='cuda:3'), covar=tensor([0.2742, 0.4596, 0.3041, 0.2168, 0.1705, 0.2548, 0.1768, 0.4924], + device='cuda:3'), in_proj_covar=tensor([0.0546, 0.0662, 0.0739, 0.0498, 0.0632, 0.0545, 0.0673, 0.0564], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 03:25:57,057 INFO [train.py:903] (3/4) Epoch 25, batch 50, loss[loss=0.1984, simple_loss=0.2653, pruned_loss=0.06577, over 19739.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2832, pruned_loss=0.05928, over 869777.23 frames. ], batch size: 46, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:26:35,505 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.575e+02 5.589e+02 7.102e+02 2.434e+03, threshold=1.118e+03, percent-clipped=5.0 +2023-04-03 03:26:36,734 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 03:27:00,585 INFO [train.py:903] (3/4) Epoch 25, batch 100, loss[loss=0.2114, simple_loss=0.2968, pruned_loss=0.06301, over 18197.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2825, pruned_loss=0.05911, over 1528696.02 frames. ], batch size: 84, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:27:03,115 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163973.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:15,426 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 03:27:34,447 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:27:42,634 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.62 vs. limit=5.0 +2023-04-03 03:28:05,191 INFO [train.py:903] (3/4) Epoch 25, batch 150, loss[loss=0.2042, simple_loss=0.2904, pruned_loss=0.05898, over 19306.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2828, pruned_loss=0.06019, over 2028440.54 frames. ], batch size: 66, lr: 3.32e-03, grad_scale: 8.0 +2023-04-03 03:28:42,973 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.518e+02 5.520e+02 6.265e+02 7.455e+02 1.438e+03, threshold=1.253e+03, percent-clipped=2.0 +2023-04-03 03:29:06,832 INFO [train.py:903] (3/4) Epoch 25, batch 200, loss[loss=0.2469, simple_loss=0.3275, pruned_loss=0.08311, over 18162.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2866, pruned_loss=0.0624, over 2423083.00 frames. ], batch size: 83, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:29:09,372 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 03:29:53,469 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:30:10,582 INFO [train.py:903] (3/4) Epoch 25, batch 250, loss[loss=0.2063, simple_loss=0.2965, pruned_loss=0.05803, over 19758.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.287, pruned_loss=0.06259, over 2728045.83 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:30:10,841 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7792, 1.3299, 1.6299, 1.7190, 4.0505, 1.1399, 2.8026, 4.5228], + device='cuda:3'), covar=tensor([0.0570, 0.3432, 0.3431, 0.2158, 0.1024, 0.3088, 0.1495, 0.0273], + device='cuda:3'), in_proj_covar=tensor([0.0414, 0.0369, 0.0391, 0.0348, 0.0374, 0.0352, 0.0387, 0.0406], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:30:32,846 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3728, 2.2421, 2.1258, 1.9901, 1.8626, 1.9777, 0.6973, 1.3497], + device='cuda:3'), covar=tensor([0.0664, 0.0633, 0.0503, 0.0786, 0.1111, 0.0907, 0.1364, 0.1101], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0362, 0.0366, 0.0388, 0.0466, 0.0397, 0.0343, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 03:30:48,736 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.799e+02 4.875e+02 6.095e+02 7.386e+02 2.001e+03, threshold=1.219e+03, percent-clipped=3.0 +2023-04-03 03:31:13,868 INFO [train.py:903] (3/4) Epoch 25, batch 300, loss[loss=0.2134, simple_loss=0.2775, pruned_loss=0.07472, over 19486.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2852, pruned_loss=0.06218, over 2961407.85 frames. ], batch size: 49, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:31:16,578 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:32:18,090 INFO [train.py:903] (3/4) Epoch 25, batch 350, loss[loss=0.2081, simple_loss=0.283, pruned_loss=0.06663, over 19752.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2843, pruned_loss=0.06187, over 3144891.05 frames. ], batch size: 51, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:32:25,200 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 03:32:42,713 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1111, 1.3223, 1.8040, 0.9298, 2.2606, 3.0614, 2.7284, 3.2328], + device='cuda:3'), covar=tensor([0.1606, 0.3768, 0.3128, 0.2743, 0.0665, 0.0235, 0.0274, 0.0320], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0328, 0.0358, 0.0266, 0.0247, 0.0190, 0.0217, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 03:32:54,601 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 03:32:54,999 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.912e+02 5.440e+02 6.552e+02 9.332e+02 1.789e+03, threshold=1.310e+03, percent-clipped=12.0 +2023-04-03 03:33:20,730 INFO [train.py:903] (3/4) Epoch 25, batch 400, loss[loss=0.2542, simple_loss=0.317, pruned_loss=0.0957, over 13599.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2844, pruned_loss=0.06231, over 3289307.21 frames. ], batch size: 135, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:34:24,824 INFO [train.py:903] (3/4) Epoch 25, batch 450, loss[loss=0.1992, simple_loss=0.2922, pruned_loss=0.05311, over 19660.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2842, pruned_loss=0.06196, over 3415297.23 frames. ], batch size: 55, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:34:59,251 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 03:35:00,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 03:35:02,824 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 4.778e+02 5.577e+02 7.081e+02 1.680e+03, threshold=1.115e+03, percent-clipped=3.0 +2023-04-03 03:35:27,605 INFO [train.py:903] (3/4) Epoch 25, batch 500, loss[loss=0.1939, simple_loss=0.2854, pruned_loss=0.0512, over 19752.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2849, pruned_loss=0.06172, over 3506646.03 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:35:35,599 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:36:31,240 INFO [train.py:903] (3/4) Epoch 25, batch 550, loss[loss=0.2219, simple_loss=0.3036, pruned_loss=0.07013, over 18177.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2851, pruned_loss=0.06197, over 3578455.41 frames. ], batch size: 83, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:37:09,260 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 5.163e+02 6.218e+02 7.641e+02 1.675e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 03:37:09,454 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:37:14,144 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7934, 4.3977, 2.6265, 3.8433, 0.6955, 4.3568, 4.1502, 4.3056], + device='cuda:3'), covar=tensor([0.0572, 0.0887, 0.2008, 0.0819, 0.4338, 0.0617, 0.0901, 0.1008], + device='cuda:3'), in_proj_covar=tensor([0.0521, 0.0424, 0.0509, 0.0356, 0.0408, 0.0447, 0.0442, 0.0473], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:37:34,089 INFO [train.py:903] (3/4) Epoch 25, batch 600, loss[loss=0.175, simple_loss=0.249, pruned_loss=0.05049, over 19048.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06168, over 3641600.97 frames. ], batch size: 42, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:38:16,678 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 03:38:30,636 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:38:36,199 INFO [train.py:903] (3/4) Epoch 25, batch 650, loss[loss=0.2377, simple_loss=0.307, pruned_loss=0.08424, over 19473.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2853, pruned_loss=0.06221, over 3691969.67 frames. ], batch size: 64, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:15,261 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.327e+02 4.538e+02 5.913e+02 7.820e+02 1.600e+03, threshold=1.183e+03, percent-clipped=2.0 +2023-04-03 03:39:33,656 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:39:39,493 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5987, 1.5369, 1.5208, 1.8522, 1.3691, 1.7297, 1.7086, 1.6052], + device='cuda:3'), covar=tensor([0.0807, 0.0887, 0.0972, 0.0620, 0.0806, 0.0768, 0.0838, 0.0701], + device='cuda:3'), in_proj_covar=tensor([0.0210, 0.0221, 0.0226, 0.0237, 0.0225, 0.0211, 0.0188, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-03 03:39:40,281 INFO [train.py:903] (3/4) Epoch 25, batch 700, loss[loss=0.1908, simple_loss=0.2626, pruned_loss=0.05948, over 19296.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2857, pruned_loss=0.06269, over 3717383.75 frames. ], batch size: 44, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:39:58,633 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:11,614 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:40:44,572 INFO [train.py:903] (3/4) Epoch 25, batch 750, loss[loss=0.265, simple_loss=0.3315, pruned_loss=0.09927, over 12999.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2872, pruned_loss=0.06292, over 3741331.03 frames. ], batch size: 136, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:40:57,968 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:41:21,490 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.157e+02 6.753e+02 8.219e+02 1.587e+03, threshold=1.351e+03, percent-clipped=10.0 +2023-04-03 03:41:32,788 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-03 03:41:48,138 INFO [train.py:903] (3/4) Epoch 25, batch 800, loss[loss=0.1907, simple_loss=0.2715, pruned_loss=0.05498, over 19823.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2866, pruned_loss=0.06276, over 3747559.63 frames. ], batch size: 52, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:41:52,845 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164676.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:42:03,209 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 03:42:50,200 INFO [train.py:903] (3/4) Epoch 25, batch 850, loss[loss=0.2077, simple_loss=0.2759, pruned_loss=0.06971, over 19779.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2861, pruned_loss=0.06269, over 3748688.68 frames. ], batch size: 48, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:42:50,365 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:29,203 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 4.928e+02 6.003e+02 7.929e+02 1.446e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 03:43:43,456 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164764.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:43:44,364 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 03:43:52,636 INFO [train.py:903] (3/4) Epoch 25, batch 900, loss[loss=0.2086, simple_loss=0.2968, pruned_loss=0.06023, over 19775.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06222, over 3766548.71 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:29,969 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-03 03:44:38,689 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8994, 2.0076, 2.2439, 2.5309, 1.8543, 2.3921, 2.2733, 2.0666], + device='cuda:3'), covar=tensor([0.4719, 0.4316, 0.2116, 0.2487, 0.4482, 0.2391, 0.5372, 0.3733], + device='cuda:3'), in_proj_covar=tensor([0.0915, 0.0986, 0.0726, 0.0936, 0.0894, 0.0826, 0.0852, 0.0791], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 03:44:56,071 INFO [train.py:903] (3/4) Epoch 25, batch 950, loss[loss=0.2464, simple_loss=0.3162, pruned_loss=0.0883, over 19676.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06278, over 3767679.03 frames. ], batch size: 60, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:44:57,278 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 03:44:57,674 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:15,711 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:28,720 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:45:32,821 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.080e+02 4.681e+02 5.489e+02 6.922e+02 1.500e+03, threshold=1.098e+03, percent-clipped=4.0 +2023-04-03 03:45:43,192 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:46:00,733 INFO [train.py:903] (3/4) Epoch 25, batch 1000, loss[loss=0.186, simple_loss=0.2783, pruned_loss=0.04688, over 19683.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2857, pruned_loss=0.06218, over 3783003.50 frames. ], batch size: 60, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:46:19,837 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:53,199 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:46:55,232 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 03:47:03,440 INFO [train.py:903] (3/4) Epoch 25, batch 1050, loss[loss=0.2001, simple_loss=0.2836, pruned_loss=0.05833, over 19589.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2864, pruned_loss=0.06241, over 3797759.30 frames. ], batch size: 61, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:47:13,161 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164930.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:27,201 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=164940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:47:36,149 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 03:47:41,880 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.266e+02 4.678e+02 5.688e+02 6.996e+02 1.189e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 03:47:45,613 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6034, 1.2266, 1.4869, 1.5822, 3.1936, 1.2370, 2.3834, 3.6511], + device='cuda:3'), covar=tensor([0.0534, 0.2883, 0.2989, 0.1843, 0.0730, 0.2451, 0.1333, 0.0238], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0369, 0.0394, 0.0349, 0.0377, 0.0353, 0.0388, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:48:06,293 INFO [train.py:903] (3/4) Epoch 25, batch 1100, loss[loss=0.1915, simple_loss=0.2846, pruned_loss=0.04923, over 19684.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06171, over 3813644.77 frames. ], batch size: 58, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:07,178 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165020.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:09,318 INFO [train.py:903] (3/4) Epoch 25, batch 1150, loss[loss=0.2435, simple_loss=0.3159, pruned_loss=0.08558, over 19514.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.285, pruned_loss=0.06201, over 3822998.60 frames. ], batch size: 56, lr: 3.31e-03, grad_scale: 8.0 +2023-04-03 03:49:40,417 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165045.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:49:48,271 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.101e+02 6.369e+02 8.453e+02 1.568e+03, threshold=1.274e+03, percent-clipped=10.0 +2023-04-03 03:49:52,205 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:14,733 INFO [train.py:903] (3/4) Epoch 25, batch 1200, loss[loss=0.1757, simple_loss=0.2597, pruned_loss=0.04586, over 19748.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2842, pruned_loss=0.06154, over 3818063.97 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:50:41,038 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:50:46,493 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 03:51:01,325 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165108.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:14,653 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:18,446 INFO [train.py:903] (3/4) Epoch 25, batch 1250, loss[loss=0.2339, simple_loss=0.3225, pruned_loss=0.07263, over 17593.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2844, pruned_loss=0.06149, over 3825949.05 frames. ], batch size: 101, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:51:34,119 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165135.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:41,190 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:51:43,133 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3334, 3.8626, 3.9639, 3.9660, 1.5561, 3.7870, 3.3132, 3.7140], + device='cuda:3'), covar=tensor([0.1638, 0.0873, 0.0657, 0.0764, 0.5788, 0.0966, 0.0686, 0.1197], + device='cuda:3'), in_proj_covar=tensor([0.0802, 0.0766, 0.0974, 0.0850, 0.0856, 0.0736, 0.0579, 0.0903], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 03:51:43,152 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:51:49,359 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3540, 1.3719, 1.8170, 1.6098, 3.0827, 4.7071, 4.5115, 5.1245], + device='cuda:3'), covar=tensor([0.1636, 0.3980, 0.3565, 0.2341, 0.0607, 0.0193, 0.0182, 0.0230], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0329, 0.0359, 0.0267, 0.0248, 0.0191, 0.0218, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 03:51:57,595 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.318e+02 4.844e+02 6.322e+02 8.270e+02 1.695e+03, threshold=1.264e+03, percent-clipped=2.0 +2023-04-03 03:52:21,099 INFO [train.py:903] (3/4) Epoch 25, batch 1300, loss[loss=0.1777, simple_loss=0.2644, pruned_loss=0.04552, over 19763.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06147, over 3835137.47 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:23,838 INFO [train.py:903] (3/4) Epoch 25, batch 1350, loss[loss=0.2121, simple_loss=0.2898, pruned_loss=0.06724, over 19391.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2848, pruned_loss=0.06153, over 3846102.35 frames. ], batch size: 48, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:53:27,597 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:54:04,599 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.919e+02 6.162e+02 7.502e+02 1.875e+03, threshold=1.232e+03, percent-clipped=3.0 +2023-04-03 03:54:29,444 INFO [train.py:903] (3/4) Epoch 25, batch 1400, loss[loss=0.223, simple_loss=0.3003, pruned_loss=0.07287, over 19755.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06133, over 3828453.63 frames. ], batch size: 54, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:54:51,016 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1338, 1.6841, 1.9654, 1.6521, 4.6551, 1.1630, 2.7446, 5.1386], + device='cuda:3'), covar=tensor([0.0422, 0.2707, 0.2712, 0.2071, 0.0700, 0.2752, 0.1352, 0.0147], + device='cuda:3'), in_proj_covar=tensor([0.0413, 0.0367, 0.0391, 0.0348, 0.0373, 0.0350, 0.0386, 0.0407], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:54:53,455 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4177, 2.4049, 2.1856, 2.0695, 1.9148, 2.2631, 1.5986, 1.8972], + device='cuda:3'), covar=tensor([0.0552, 0.0584, 0.0467, 0.0764, 0.0868, 0.0899, 0.1026, 0.0865], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0359, 0.0363, 0.0388, 0.0465, 0.0394, 0.0341, 0.0347], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 03:55:05,171 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:18,718 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:19,117 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 03:55:32,128 INFO [train.py:903] (3/4) Epoch 25, batch 1450, loss[loss=0.2544, simple_loss=0.3251, pruned_loss=0.09186, over 19606.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2837, pruned_loss=0.06136, over 3834302.72 frames. ], batch size: 57, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:55:33,175 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 03:55:37,017 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165326.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:55:48,893 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:56:10,662 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.320e+02 4.584e+02 6.100e+02 8.063e+02 1.924e+03, threshold=1.220e+03, percent-clipped=4.0 +2023-04-03 03:56:33,843 INFO [train.py:903] (3/4) Epoch 25, batch 1500, loss[loss=0.1807, simple_loss=0.2614, pruned_loss=0.05001, over 19791.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06182, over 3842125.08 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:56:59,436 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165391.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:30,062 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:57:36,929 INFO [train.py:903] (3/4) Epoch 25, batch 1550, loss[loss=0.2114, simple_loss=0.2917, pruned_loss=0.06558, over 18301.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2859, pruned_loss=0.06185, over 3838119.59 frames. ], batch size: 83, lr: 3.30e-03, grad_scale: 4.0 +2023-04-03 03:58:06,810 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5832, 2.3477, 1.8014, 1.5921, 2.1935, 1.4914, 1.3410, 1.9957], + device='cuda:3'), covar=tensor([0.1194, 0.0837, 0.1071, 0.0920, 0.0576, 0.1284, 0.0822, 0.0552], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0268, 0.0249, 0.0343, 0.0293, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 03:58:17,713 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.229e+02 4.957e+02 6.075e+02 6.924e+02 1.069e+03, threshold=1.215e+03, percent-clipped=0.0 +2023-04-03 03:58:37,746 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 03:58:42,876 INFO [train.py:903] (3/4) Epoch 25, batch 1600, loss[loss=0.1763, simple_loss=0.2595, pruned_loss=0.04653, over 19716.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2863, pruned_loss=0.06228, over 3837689.12 frames. ], batch size: 51, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 03:58:51,631 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:58:59,592 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:06,509 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 03:59:21,464 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 03:59:44,266 INFO [train.py:903] (3/4) Epoch 25, batch 1650, loss[loss=0.2009, simple_loss=0.2846, pruned_loss=0.05862, over 18384.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2861, pruned_loss=0.06183, over 3841622.68 frames. ], batch size: 83, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:00:23,740 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 4.584e+02 6.289e+02 7.621e+02 1.672e+03, threshold=1.258e+03, percent-clipped=5.0 +2023-04-03 04:00:44,111 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:00:47,354 INFO [train.py:903] (3/4) Epoch 25, batch 1700, loss[loss=0.2414, simple_loss=0.3152, pruned_loss=0.08374, over 13111.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2856, pruned_loss=0.06176, over 3841783.26 frames. ], batch size: 135, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:22,694 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:01:30,700 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 04:01:49,446 INFO [train.py:903] (3/4) Epoch 25, batch 1750, loss[loss=0.1946, simple_loss=0.273, pruned_loss=0.05807, over 19600.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2853, pruned_loss=0.06155, over 3833310.23 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:01:53,781 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.45 vs. limit=5.0 +2023-04-03 04:02:29,218 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.689e+02 5.610e+02 7.383e+02 2.270e+03, threshold=1.122e+03, percent-clipped=4.0 +2023-04-03 04:02:53,379 INFO [train.py:903] (3/4) Epoch 25, batch 1800, loss[loss=0.1966, simple_loss=0.2828, pruned_loss=0.05523, over 19399.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2855, pruned_loss=0.06229, over 3830920.68 frames. ], batch size: 48, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:03:51,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 04:03:56,452 INFO [train.py:903] (3/4) Epoch 25, batch 1850, loss[loss=0.2244, simple_loss=0.3015, pruned_loss=0.07363, over 13727.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2855, pruned_loss=0.06224, over 3825569.50 frames. ], batch size: 135, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:04:28,957 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 04:04:37,061 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.780e+02 4.826e+02 5.776e+02 6.973e+02 2.376e+03, threshold=1.155e+03, percent-clipped=4.0 +2023-04-03 04:04:56,259 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:05:00,660 INFO [train.py:903] (3/4) Epoch 25, batch 1900, loss[loss=0.1876, simple_loss=0.2743, pruned_loss=0.05045, over 19596.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.06216, over 3815495.22 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:05:16,869 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 04:05:21,710 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 04:05:24,267 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2762, 3.8213, 3.9462, 3.9550, 1.5923, 3.8002, 3.2420, 3.6858], + device='cuda:3'), covar=tensor([0.1764, 0.0914, 0.0683, 0.0777, 0.5829, 0.0861, 0.0724, 0.1241], + device='cuda:3'), in_proj_covar=tensor([0.0809, 0.0770, 0.0979, 0.0852, 0.0855, 0.0740, 0.0581, 0.0907], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 04:05:48,088 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 04:06:02,741 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165821.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:06:03,531 INFO [train.py:903] (3/4) Epoch 25, batch 1950, loss[loss=0.2145, simple_loss=0.3047, pruned_loss=0.0622, over 19783.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2856, pruned_loss=0.06285, over 3810058.34 frames. ], batch size: 56, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:06:20,378 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4317, 1.5020, 1.7359, 1.6853, 2.5708, 2.2499, 2.7327, 1.2325], + device='cuda:3'), covar=tensor([0.2554, 0.4340, 0.2869, 0.1985, 0.1564, 0.2269, 0.1443, 0.4698], + device='cuda:3'), in_proj_covar=tensor([0.0542, 0.0655, 0.0730, 0.0493, 0.0625, 0.0538, 0.0661, 0.0560], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:06:44,209 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.671e+02 6.009e+02 7.545e+02 1.239e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 04:06:46,991 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5601, 1.1909, 1.4088, 1.1745, 2.2027, 0.9464, 2.1660, 2.4875], + device='cuda:3'), covar=tensor([0.0758, 0.2941, 0.2885, 0.1844, 0.0884, 0.2399, 0.1060, 0.0470], + device='cuda:3'), in_proj_covar=tensor([0.0415, 0.0370, 0.0393, 0.0350, 0.0375, 0.0354, 0.0388, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:06:48,301 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165856.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:07:08,057 INFO [train.py:903] (3/4) Epoch 25, batch 2000, loss[loss=0.2164, simple_loss=0.3012, pruned_loss=0.06576, over 19574.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2858, pruned_loss=0.06251, over 3821200.58 frames. ], batch size: 61, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:07:20,319 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:00,828 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=165913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:08:07,746 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 04:08:12,473 INFO [train.py:903] (3/4) Epoch 25, batch 2050, loss[loss=0.2176, simple_loss=0.3054, pruned_loss=0.06487, over 19502.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.286, pruned_loss=0.06235, over 3817641.50 frames. ], batch size: 64, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:08:27,907 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 04:08:27,941 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 04:08:38,772 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6193, 4.2035, 2.6713, 3.7081, 1.0850, 4.1924, 4.0166, 4.1166], + device='cuda:3'), covar=tensor([0.0635, 0.0959, 0.1987, 0.0886, 0.3807, 0.0640, 0.0882, 0.1147], + device='cuda:3'), in_proj_covar=tensor([0.0514, 0.0421, 0.0503, 0.0351, 0.0402, 0.0444, 0.0437, 0.0468], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:08:48,982 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 04:08:51,225 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.115e+02 5.221e+02 6.289e+02 7.653e+02 1.251e+03, threshold=1.258e+03, percent-clipped=2.0 +2023-04-03 04:09:15,824 INFO [train.py:903] (3/4) Epoch 25, batch 2100, loss[loss=0.1894, simple_loss=0.2679, pruned_loss=0.05552, over 19863.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06202, over 3830456.60 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:09:44,599 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 04:10:08,501 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 04:10:19,021 INFO [train.py:903] (3/4) Epoch 25, batch 2150, loss[loss=0.1834, simple_loss=0.2638, pruned_loss=0.05152, over 19858.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2848, pruned_loss=0.06198, over 3826215.28 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 8.0 +2023-04-03 04:10:26,455 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1139, 2.1988, 2.4618, 2.7709, 2.1814, 2.6755, 2.4983, 2.2458], + device='cuda:3'), covar=tensor([0.4284, 0.4116, 0.2018, 0.2449, 0.4337, 0.2244, 0.4740, 0.3414], + device='cuda:3'), in_proj_covar=tensor([0.0917, 0.0992, 0.0729, 0.0937, 0.0895, 0.0829, 0.0855, 0.0793], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 04:10:27,620 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166028.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:10:48,714 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2232, 2.1505, 1.8764, 1.7490, 1.4779, 1.7633, 0.5415, 1.3019], + device='cuda:3'), covar=tensor([0.0661, 0.0673, 0.0605, 0.1046, 0.1410, 0.1119, 0.1576, 0.1175], + device='cuda:3'), in_proj_covar=tensor([0.0362, 0.0359, 0.0363, 0.0387, 0.0465, 0.0392, 0.0341, 0.0346], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:10:58,799 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 5.055e+02 6.614e+02 9.413e+02 1.694e+03, threshold=1.323e+03, percent-clipped=9.0 +2023-04-03 04:11:21,716 INFO [train.py:903] (3/4) Epoch 25, batch 2200, loss[loss=0.1742, simple_loss=0.2516, pruned_loss=0.04838, over 19479.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2857, pruned_loss=0.06299, over 3809950.65 frames. ], batch size: 49, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:11:22,010 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166072.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:11:38,557 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0874, 2.0081, 1.7895, 2.1607, 1.9970, 1.7779, 1.7329, 2.0115], + device='cuda:3'), covar=tensor([0.1044, 0.1462, 0.1444, 0.1008, 0.1307, 0.0561, 0.1443, 0.0717], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0355, 0.0311, 0.0254, 0.0301, 0.0252, 0.0313, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:12:02,237 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4838, 2.2557, 1.6713, 1.5868, 2.1166, 1.4497, 1.3182, 1.9966], + device='cuda:3'), covar=tensor([0.1152, 0.0794, 0.1155, 0.0891, 0.0564, 0.1278, 0.0841, 0.0512], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0318, 0.0340, 0.0267, 0.0249, 0.0341, 0.0293, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:12:14,463 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166112.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:12:26,148 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4441, 3.7545, 2.4112, 2.1118, 3.6255, 2.0287, 1.8483, 2.9548], + device='cuda:3'), covar=tensor([0.1066, 0.0619, 0.0883, 0.0970, 0.0411, 0.1192, 0.0871, 0.0426], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0317, 0.0339, 0.0266, 0.0248, 0.0340, 0.0292, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:12:26,901 INFO [train.py:903] (3/4) Epoch 25, batch 2250, loss[loss=0.1798, simple_loss=0.2528, pruned_loss=0.05346, over 17011.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2856, pruned_loss=0.06252, over 3814662.85 frames. ], batch size: 37, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:12:34,230 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9595, 1.9188, 1.8120, 1.5971, 1.4746, 1.6320, 0.4047, 0.9127], + device='cuda:3'), covar=tensor([0.0662, 0.0666, 0.0415, 0.0719, 0.1188, 0.0797, 0.1391, 0.1120], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0360, 0.0364, 0.0389, 0.0466, 0.0393, 0.0343, 0.0347], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:13:04,902 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.777e+02 5.716e+02 7.657e+02 1.924e+03, threshold=1.143e+03, percent-clipped=2.0 +2023-04-03 04:13:17,411 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0624, 3.0873, 1.8166, 2.0118, 2.8471, 1.6578, 1.4729, 2.3714], + device='cuda:3'), covar=tensor([0.1285, 0.0793, 0.1200, 0.0808, 0.0562, 0.1314, 0.1032, 0.0637], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0317, 0.0340, 0.0267, 0.0248, 0.0341, 0.0292, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:13:21,685 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:21,827 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:13:31,168 INFO [train.py:903] (3/4) Epoch 25, batch 2300, loss[loss=0.1889, simple_loss=0.2748, pruned_loss=0.05154, over 19754.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2866, pruned_loss=0.0628, over 3824907.77 frames. ], batch size: 54, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:13:42,665 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 04:14:00,647 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5717, 1.4198, 1.4600, 2.1173, 1.5995, 1.7649, 1.7104, 1.6000], + device='cuda:3'), covar=tensor([0.0926, 0.1098, 0.1107, 0.0751, 0.0965, 0.0937, 0.1041, 0.0842], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0222, 0.0226, 0.0239, 0.0226, 0.0213, 0.0189, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 04:14:34,779 INFO [train.py:903] (3/4) Epoch 25, batch 2350, loss[loss=0.1774, simple_loss=0.2721, pruned_loss=0.04139, over 19676.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2864, pruned_loss=0.06251, over 3830496.37 frames. ], batch size: 58, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:14:42,129 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166227.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:14,911 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 5.104e+02 6.317e+02 8.219e+02 1.547e+03, threshold=1.263e+03, percent-clipped=3.0 +2023-04-03 04:15:17,158 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 04:15:24,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9736, 1.9258, 1.7412, 2.1055, 1.8994, 1.7978, 1.6747, 1.9713], + device='cuda:3'), covar=tensor([0.1113, 0.1524, 0.1464, 0.1067, 0.1340, 0.0583, 0.1476, 0.0747], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0354, 0.0311, 0.0253, 0.0301, 0.0252, 0.0312, 0.0258], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:15:34,627 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 04:15:38,210 INFO [train.py:903] (3/4) Epoch 25, batch 2400, loss[loss=0.1725, simple_loss=0.2583, pruned_loss=0.04335, over 19750.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06256, over 3840988.83 frames. ], batch size: 54, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:15:48,864 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166280.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:15:55,274 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166284.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:25,818 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:16:29,069 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.0396, 5.4444, 3.2746, 4.7104, 1.2619, 5.6416, 5.4610, 5.6725], + device='cuda:3'), covar=tensor([0.0391, 0.0815, 0.1691, 0.0786, 0.3978, 0.0548, 0.0784, 0.0788], + device='cuda:3'), in_proj_covar=tensor([0.0517, 0.0421, 0.0504, 0.0353, 0.0404, 0.0446, 0.0438, 0.0469], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:16:41,764 INFO [train.py:903] (3/4) Epoch 25, batch 2450, loss[loss=0.2198, simple_loss=0.2996, pruned_loss=0.06995, over 19533.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2857, pruned_loss=0.06195, over 3841799.73 frames. ], batch size: 54, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:16:46,748 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:17:20,232 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.023e+02 4.903e+02 5.916e+02 7.490e+02 1.353e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-04-03 04:17:44,933 INFO [train.py:903] (3/4) Epoch 25, batch 2500, loss[loss=0.1851, simple_loss=0.2749, pruned_loss=0.0476, over 19651.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06184, over 3844955.45 frames. ], batch size: 55, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:17:52,048 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5242, 1.6100, 1.7500, 1.7078, 2.4501, 2.1727, 2.5885, 1.1946], + device='cuda:3'), covar=tensor([0.2480, 0.4208, 0.2667, 0.1939, 0.1547, 0.2266, 0.1407, 0.4499], + device='cuda:3'), in_proj_covar=tensor([0.0543, 0.0656, 0.0731, 0.0494, 0.0625, 0.0539, 0.0664, 0.0561], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:18:41,295 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166416.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:18:48,043 INFO [train.py:903] (3/4) Epoch 25, batch 2550, loss[loss=0.2281, simple_loss=0.2993, pruned_loss=0.07851, over 19668.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.06224, over 3829651.32 frames. ], batch size: 53, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:19:21,986 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-03 04:19:28,551 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 5.112e+02 6.516e+02 8.109e+02 2.174e+03, threshold=1.303e+03, percent-clipped=8.0 +2023-04-03 04:19:40,847 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2296, 2.9237, 2.3203, 2.3245, 2.3107, 2.5513, 0.7661, 2.0809], + device='cuda:3'), covar=tensor([0.0694, 0.0598, 0.0703, 0.1198, 0.1035, 0.1125, 0.1618, 0.1094], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0360, 0.0363, 0.0387, 0.0467, 0.0393, 0.0340, 0.0345], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:19:46,426 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 04:19:53,287 INFO [train.py:903] (3/4) Epoch 25, batch 2600, loss[loss=0.1704, simple_loss=0.2481, pruned_loss=0.04637, over 19375.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2854, pruned_loss=0.06214, over 3829889.27 frames. ], batch size: 48, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:20:08,599 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166483.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:29,546 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:34,775 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 04:20:40,256 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166508.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:41,204 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166509.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:20:58,302 INFO [train.py:903] (3/4) Epoch 25, batch 2650, loss[loss=0.191, simple_loss=0.2745, pruned_loss=0.05373, over 17213.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2853, pruned_loss=0.06206, over 3831911.30 frames. ], batch size: 101, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:21:05,517 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:10,215 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166531.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:21:12,677 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5113, 1.4562, 1.7758, 1.4850, 2.4254, 3.3143, 3.0543, 3.5303], + device='cuda:3'), covar=tensor([0.1174, 0.3095, 0.2695, 0.2023, 0.0576, 0.0211, 0.0200, 0.0239], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0328, 0.0359, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 04:21:17,599 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166536.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:19,774 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 04:21:32,642 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:21:38,171 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.868e+02 4.612e+02 5.770e+02 6.756e+02 1.610e+03, threshold=1.154e+03, percent-clipped=1.0 +2023-04-03 04:21:49,313 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166561.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:22:03,059 INFO [train.py:903] (3/4) Epoch 25, batch 2700, loss[loss=0.1905, simple_loss=0.2639, pruned_loss=0.05859, over 19779.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.284, pruned_loss=0.06153, over 3800229.69 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:22:21,047 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-03 04:23:06,181 INFO [train.py:903] (3/4) Epoch 25, batch 2750, loss[loss=0.2375, simple_loss=0.3233, pruned_loss=0.07582, over 19596.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2831, pruned_loss=0.06107, over 3808676.45 frames. ], batch size: 61, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:23:08,979 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:23:45,197 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.014e+02 4.918e+02 6.109e+02 7.546e+02 1.552e+03, threshold=1.222e+03, percent-clipped=5.0 +2023-04-03 04:24:04,581 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:24:07,645 INFO [train.py:903] (3/4) Epoch 25, batch 2800, loss[loss=0.223, simple_loss=0.3067, pruned_loss=0.06959, over 19577.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2844, pruned_loss=0.06192, over 3817822.41 frames. ], batch size: 61, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:24:43,344 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3244, 3.8389, 3.9380, 3.9219, 1.5887, 3.7235, 3.2595, 3.6968], + device='cuda:3'), covar=tensor([0.1707, 0.0835, 0.0666, 0.0787, 0.5903, 0.1128, 0.0709, 0.1146], + device='cuda:3'), in_proj_covar=tensor([0.0803, 0.0766, 0.0975, 0.0853, 0.0851, 0.0740, 0.0580, 0.0905], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 04:25:11,679 INFO [train.py:903] (3/4) Epoch 25, batch 2850, loss[loss=0.2345, simple_loss=0.3075, pruned_loss=0.08078, over 13509.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2857, pruned_loss=0.06217, over 3816188.24 frames. ], batch size: 136, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:25:50,394 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.563e+02 5.950e+02 8.060e+02 1.987e+03, threshold=1.190e+03, percent-clipped=10.0 +2023-04-03 04:25:50,711 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:11,850 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 04:26:14,215 INFO [train.py:903] (3/4) Epoch 25, batch 2900, loss[loss=0.1917, simple_loss=0.2627, pruned_loss=0.06033, over 19765.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2866, pruned_loss=0.06296, over 3798086.35 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:26:27,366 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166782.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:29,642 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:26:33,237 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166787.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 04:27:05,025 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166812.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 04:27:07,372 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3352, 2.3367, 2.6326, 3.1106, 2.3786, 2.9781, 2.6562, 2.4946], + device='cuda:3'), covar=tensor([0.4311, 0.4415, 0.2046, 0.2623, 0.4591, 0.2288, 0.4907, 0.3271], + device='cuda:3'), in_proj_covar=tensor([0.0918, 0.0990, 0.0729, 0.0938, 0.0894, 0.0829, 0.0853, 0.0793], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 04:27:17,225 INFO [train.py:903] (3/4) Epoch 25, batch 2950, loss[loss=0.2142, simple_loss=0.2992, pruned_loss=0.06463, over 19716.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2851, pruned_loss=0.0623, over 3794879.95 frames. ], batch size: 63, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:27:37,592 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8342, 1.6130, 2.1002, 2.0321, 3.3246, 4.8926, 4.7640, 5.3114], + device='cuda:3'), covar=tensor([0.1362, 0.3556, 0.3159, 0.1937, 0.0524, 0.0190, 0.0154, 0.0179], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0329, 0.0359, 0.0268, 0.0249, 0.0192, 0.0218, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 04:27:44,175 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:27:56,482 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.674e+02 5.957e+02 7.713e+02 2.101e+03, threshold=1.191e+03, percent-clipped=6.0 +2023-04-03 04:28:20,024 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:28:21,107 INFO [train.py:903] (3/4) Epoch 25, batch 3000, loss[loss=0.1596, simple_loss=0.2478, pruned_loss=0.03573, over 19857.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2834, pruned_loss=0.06119, over 3812665.73 frames. ], batch size: 52, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:28:21,107 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 04:28:33,780 INFO [train.py:937] (3/4) Epoch 25, validation: loss=0.1677, simple_loss=0.2674, pruned_loss=0.034, over 944034.00 frames. +2023-04-03 04:28:33,781 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 04:28:35,111 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 04:28:44,963 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:01,376 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=166893.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:17,226 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166905.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:29:18,521 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9024, 1.9805, 2.2855, 2.5007, 1.8522, 2.4107, 2.2900, 2.0477], + device='cuda:3'), covar=tensor([0.4296, 0.4095, 0.1973, 0.2491, 0.4263, 0.2297, 0.4952, 0.3543], + device='cuda:3'), in_proj_covar=tensor([0.0919, 0.0992, 0.0730, 0.0939, 0.0894, 0.0829, 0.0853, 0.0795], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 04:29:33,259 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9015, 1.4877, 1.8229, 1.8114, 4.4120, 1.0677, 2.7560, 4.8643], + device='cuda:3'), covar=tensor([0.0433, 0.2953, 0.2880, 0.1920, 0.0735, 0.2781, 0.1345, 0.0150], + device='cuda:3'), in_proj_covar=tensor([0.0415, 0.0371, 0.0393, 0.0348, 0.0375, 0.0353, 0.0390, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:29:38,478 INFO [train.py:903] (3/4) Epoch 25, batch 3050, loss[loss=0.1922, simple_loss=0.2751, pruned_loss=0.05463, over 19660.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2833, pruned_loss=0.0608, over 3810481.60 frames. ], batch size: 55, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:29:53,819 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0251, 2.0675, 2.3441, 2.6684, 2.0433, 2.6136, 2.3904, 2.0948], + device='cuda:3'), covar=tensor([0.4279, 0.4075, 0.1995, 0.2493, 0.4263, 0.2127, 0.4912, 0.3452], + device='cuda:3'), in_proj_covar=tensor([0.0918, 0.0992, 0.0730, 0.0939, 0.0894, 0.0829, 0.0854, 0.0795], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 04:30:09,956 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:17,431 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.386e+02 4.942e+02 6.233e+02 8.295e+02 1.859e+03, threshold=1.247e+03, percent-clipped=9.0 +2023-04-03 04:30:23,351 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:30:41,891 INFO [train.py:903] (3/4) Epoch 25, batch 3100, loss[loss=0.2142, simple_loss=0.2962, pruned_loss=0.06608, over 19649.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.06133, over 3801839.56 frames. ], batch size: 55, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:30:58,955 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:27,868 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:31:44,391 INFO [train.py:903] (3/4) Epoch 25, batch 3150, loss[loss=0.2525, simple_loss=0.3188, pruned_loss=0.09309, over 12833.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06189, over 3780337.82 frames. ], batch size: 136, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:32:08,044 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167040.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:08,790 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 04:32:24,685 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.039e+02 6.011e+02 8.459e+02 2.094e+03, threshold=1.202e+03, percent-clipped=4.0 +2023-04-03 04:32:39,303 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167065.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:32:48,324 INFO [train.py:903] (3/4) Epoch 25, batch 3200, loss[loss=0.2037, simple_loss=0.2978, pruned_loss=0.05477, over 19601.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06132, over 3794345.73 frames. ], batch size: 57, lr: 3.29e-03, grad_scale: 8.0 +2023-04-03 04:33:20,651 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:47,236 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:52,593 INFO [train.py:903] (3/4) Epoch 25, batch 3250, loss[loss=0.205, simple_loss=0.2857, pruned_loss=0.06222, over 17327.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06136, over 3799392.65 frames. ], batch size: 101, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:33:52,913 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:33:57,478 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167126.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:34:33,315 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.514e+02 4.897e+02 6.248e+02 7.764e+02 1.427e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 04:34:56,647 INFO [train.py:903] (3/4) Epoch 25, batch 3300, loss[loss=0.1683, simple_loss=0.253, pruned_loss=0.04185, over 19394.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.06106, over 3799522.75 frames. ], batch size: 48, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:34:56,699 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 04:35:15,541 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8179, 1.5535, 1.8682, 2.0917, 4.3759, 1.2566, 2.6182, 4.8439], + device='cuda:3'), covar=tensor([0.0508, 0.2922, 0.2798, 0.1765, 0.0756, 0.2705, 0.1438, 0.0168], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0372, 0.0393, 0.0349, 0.0377, 0.0353, 0.0391, 0.0411], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:35:47,873 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:50,116 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:35:59,020 INFO [train.py:903] (3/4) Epoch 25, batch 3350, loss[loss=0.2698, simple_loss=0.3469, pruned_loss=0.09628, over 19781.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2853, pruned_loss=0.06217, over 3791609.17 frames. ], batch size: 56, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:36:21,826 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:24,302 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167241.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:25,490 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167242.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:41,239 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.821e+02 5.770e+02 7.565e+02 1.496e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-04-03 04:36:53,552 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167264.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:36:56,851 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167267.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:02,320 INFO [train.py:903] (3/4) Epoch 25, batch 3400, loss[loss=0.192, simple_loss=0.2655, pruned_loss=0.05926, over 19304.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2839, pruned_loss=0.06096, over 3809691.23 frames. ], batch size: 44, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:37:26,142 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:37:28,368 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:38:08,136 INFO [train.py:903] (3/4) Epoch 25, batch 3450, loss[loss=0.1967, simple_loss=0.2756, pruned_loss=0.05884, over 19479.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06054, over 3820654.35 frames. ], batch size: 64, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:38:10,566 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 04:38:36,023 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2370, 1.2958, 1.3083, 1.1087, 1.1253, 1.1929, 0.0695, 0.3995], + device='cuda:3'), covar=tensor([0.0784, 0.0714, 0.0487, 0.0639, 0.1398, 0.0718, 0.1422, 0.1208], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0360, 0.0362, 0.0386, 0.0465, 0.0393, 0.0341, 0.0344], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:38:49,778 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.112e+02 4.608e+02 5.631e+02 6.989e+02 1.333e+03, threshold=1.126e+03, percent-clipped=1.0 +2023-04-03 04:39:07,013 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2648, 2.9889, 2.4138, 2.3490, 2.1940, 2.6653, 1.0929, 2.1503], + device='cuda:3'), covar=tensor([0.0714, 0.0566, 0.0680, 0.1091, 0.1140, 0.0988, 0.1470, 0.1081], + device='cuda:3'), in_proj_covar=tensor([0.0361, 0.0358, 0.0361, 0.0385, 0.0464, 0.0392, 0.0340, 0.0343], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 04:39:12,381 INFO [train.py:903] (3/4) Epoch 25, batch 3500, loss[loss=0.1638, simple_loss=0.2445, pruned_loss=0.04151, over 19713.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06102, over 3830277.85 frames. ], batch size: 46, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:39:42,171 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3552, 2.0794, 1.6264, 1.4373, 1.9502, 1.2721, 1.3384, 1.7922], + device='cuda:3'), covar=tensor([0.1094, 0.0860, 0.1109, 0.0880, 0.0590, 0.1380, 0.0761, 0.0534], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0318, 0.0336, 0.0266, 0.0248, 0.0342, 0.0291, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:39:56,149 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:15,422 INFO [train.py:903] (3/4) Epoch 25, batch 3550, loss[loss=0.1649, simple_loss=0.2498, pruned_loss=0.04001, over 19471.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06077, over 3829108.67 frames. ], batch size: 49, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:40:51,453 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167450.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:40:55,637 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 5.129e+02 6.252e+02 7.981e+02 1.792e+03, threshold=1.250e+03, percent-clipped=6.0 +2023-04-03 04:41:06,612 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:11,119 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167466.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:13,868 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:17,992 INFO [train.py:903] (3/4) Epoch 25, batch 3600, loss[loss=0.2065, simple_loss=0.2877, pruned_loss=0.06259, over 19589.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2822, pruned_loss=0.06046, over 3819631.86 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:41:44,885 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:41:50,869 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167497.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:01,230 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4231, 1.2290, 1.5140, 1.6872, 2.9918, 1.3052, 2.2618, 3.4709], + device='cuda:3'), covar=tensor([0.0525, 0.3157, 0.3043, 0.1740, 0.0743, 0.2399, 0.1285, 0.0260], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0373, 0.0394, 0.0349, 0.0377, 0.0353, 0.0391, 0.0411], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:42:20,840 INFO [train.py:903] (3/4) Epoch 25, batch 3650, loss[loss=0.161, simple_loss=0.2363, pruned_loss=0.04291, over 19734.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2828, pruned_loss=0.06057, over 3831034.03 frames. ], batch size: 45, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:42:21,260 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:42:36,707 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 04:43:00,369 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 5.264e+02 6.466e+02 8.043e+02 1.528e+03, threshold=1.293e+03, percent-clipped=3.0 +2023-04-03 04:43:24,179 INFO [train.py:903] (3/4) Epoch 25, batch 3700, loss[loss=0.2084, simple_loss=0.2701, pruned_loss=0.07334, over 19046.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2842, pruned_loss=0.06166, over 3819049.42 frames. ], batch size: 42, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:43:31,637 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:43:36,335 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:44:28,009 INFO [train.py:903] (3/4) Epoch 25, batch 3750, loss[loss=0.2509, simple_loss=0.3214, pruned_loss=0.09015, over 17201.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2847, pruned_loss=0.06188, over 3813522.99 frames. ], batch size: 101, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:44:57,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6161, 1.7330, 1.9525, 1.9992, 1.5624, 1.9205, 1.9402, 1.7886], + device='cuda:3'), covar=tensor([0.4400, 0.3663, 0.2029, 0.2417, 0.3836, 0.2215, 0.5441, 0.3568], + device='cuda:3'), in_proj_covar=tensor([0.0923, 0.0995, 0.0731, 0.0943, 0.0898, 0.0832, 0.0857, 0.0797], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 04:45:08,827 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.966e+02 5.285e+02 6.593e+02 8.150e+02 1.742e+03, threshold=1.319e+03, percent-clipped=4.0 +2023-04-03 04:45:19,734 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3829, 3.1366, 2.2217, 2.8481, 0.7676, 3.0584, 2.9610, 3.0083], + device='cuda:3'), covar=tensor([0.1131, 0.1385, 0.2160, 0.0995, 0.3869, 0.1045, 0.1159, 0.1477], + device='cuda:3'), in_proj_covar=tensor([0.0521, 0.0422, 0.0509, 0.0356, 0.0407, 0.0450, 0.0441, 0.0473], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:45:19,962 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:45:31,297 INFO [train.py:903] (3/4) Epoch 25, batch 3800, loss[loss=0.2032, simple_loss=0.2874, pruned_loss=0.05947, over 19662.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.284, pruned_loss=0.06157, over 3816245.27 frames. ], batch size: 53, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:45:50,870 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:46:01,963 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 04:46:32,955 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-03 04:46:35,334 INFO [train.py:903] (3/4) Epoch 25, batch 3850, loss[loss=0.2616, simple_loss=0.3391, pruned_loss=0.09205, over 19335.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06277, over 3776450.86 frames. ], batch size: 66, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:46:54,897 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167737.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:47:16,489 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.257e+02 4.991e+02 5.959e+02 7.597e+02 1.650e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 04:47:39,474 INFO [train.py:903] (3/4) Epoch 25, batch 3900, loss[loss=0.2064, simple_loss=0.2935, pruned_loss=0.05962, over 17611.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2861, pruned_loss=0.06244, over 3780746.46 frames. ], batch size: 101, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:47:55,782 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167785.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:06,238 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=167794.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:48:43,214 INFO [train.py:903] (3/4) Epoch 25, batch 3950, loss[loss=0.1644, simple_loss=0.2429, pruned_loss=0.04289, over 19392.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2856, pruned_loss=0.06217, over 3799399.64 frames. ], batch size: 47, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:48:45,695 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 04:48:56,960 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:02,584 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:13,779 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 04:49:25,130 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 4.848e+02 5.890e+02 7.638e+02 1.655e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 04:49:29,085 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:34,616 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:49:46,052 INFO [train.py:903] (3/4) Epoch 25, batch 4000, loss[loss=0.2213, simple_loss=0.3081, pruned_loss=0.06729, over 19111.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2861, pruned_loss=0.06235, over 3806909.50 frames. ], batch size: 69, lr: 3.28e-03, grad_scale: 8.0 +2023-04-03 04:50:32,137 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 04:50:33,687 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:50:50,811 INFO [train.py:903] (3/4) Epoch 25, batch 4050, loss[loss=0.1683, simple_loss=0.2431, pruned_loss=0.04672, over 19750.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2864, pruned_loss=0.06243, over 3817419.68 frames. ], batch size: 48, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:51:32,273 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 5.057e+02 6.529e+02 8.113e+02 1.821e+03, threshold=1.306e+03, percent-clipped=7.0 +2023-04-03 04:51:52,926 INFO [train.py:903] (3/4) Epoch 25, batch 4100, loss[loss=0.2079, simple_loss=0.2894, pruned_loss=0.06323, over 19840.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2861, pruned_loss=0.06253, over 3834776.59 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:52:24,786 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 04:52:28,533 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 04:52:56,836 INFO [train.py:903] (3/4) Epoch 25, batch 4150, loss[loss=0.1855, simple_loss=0.2674, pruned_loss=0.05177, over 19849.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2852, pruned_loss=0.06226, over 3822364.22 frames. ], batch size: 52, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:53:36,881 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168053.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:53:39,909 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.124e+02 5.393e+02 6.582e+02 8.080e+02 1.683e+03, threshold=1.316e+03, percent-clipped=2.0 +2023-04-03 04:53:59,615 INFO [train.py:903] (3/4) Epoch 25, batch 4200, loss[loss=0.2303, simple_loss=0.3151, pruned_loss=0.07281, over 19666.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2845, pruned_loss=0.06183, over 3810048.86 frames. ], batch size: 58, lr: 3.28e-03, grad_scale: 4.0 +2023-04-03 04:54:01,978 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 04:54:10,933 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168081.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:54:18,319 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 04:55:03,255 INFO [train.py:903] (3/4) Epoch 25, batch 4250, loss[loss=0.1816, simple_loss=0.2628, pruned_loss=0.05019, over 19729.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2852, pruned_loss=0.06225, over 3803554.33 frames. ], batch size: 51, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:55:13,289 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:55:18,000 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 04:55:20,783 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-04-03 04:55:29,602 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 04:55:46,756 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.961e+02 6.439e+02 7.740e+02 2.119e+03, threshold=1.288e+03, percent-clipped=3.0 +2023-04-03 04:55:59,670 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168165.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:07,290 INFO [train.py:903] (3/4) Epoch 25, batch 4300, loss[loss=0.2167, simple_loss=0.2955, pruned_loss=0.06893, over 19546.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.0616, over 3817799.38 frames. ], batch size: 56, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:56:15,589 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.33 vs. limit=5.0 +2023-04-03 04:56:29,213 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5060, 1.1010, 1.3735, 1.0262, 2.1516, 0.8834, 2.0621, 2.4105], + device='cuda:3'), covar=tensor([0.0940, 0.3260, 0.3003, 0.2042, 0.1142, 0.2410, 0.1194, 0.0550], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0375, 0.0395, 0.0351, 0.0379, 0.0354, 0.0393, 0.0413], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 04:56:30,458 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168190.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:37,478 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168196.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:56:42,775 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.97 vs. limit=5.0 +2023-04-03 04:57:00,322 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 04:57:10,760 INFO [train.py:903] (3/4) Epoch 25, batch 4350, loss[loss=0.1918, simple_loss=0.2649, pruned_loss=0.0593, over 19479.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.283, pruned_loss=0.06092, over 3831291.19 frames. ], batch size: 49, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 04:57:38,657 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:47,840 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168251.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 04:57:49,665 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 04:57:53,364 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 4.774e+02 5.735e+02 7.211e+02 1.236e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 04:58:13,426 INFO [train.py:903] (3/4) Epoch 25, batch 4400, loss[loss=0.1908, simple_loss=0.2827, pruned_loss=0.0495, over 19676.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2828, pruned_loss=0.06085, over 3837384.71 frames. ], batch size: 59, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:58:40,147 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 04:58:50,365 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 04:59:16,256 INFO [train.py:903] (3/4) Epoch 25, batch 4450, loss[loss=0.1843, simple_loss=0.2577, pruned_loss=0.05548, over 19759.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2819, pruned_loss=0.06035, over 3831631.30 frames. ], batch size: 46, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 04:59:59,625 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.609e+02 5.804e+02 7.720e+02 1.927e+03, threshold=1.161e+03, percent-clipped=7.0 +2023-04-03 05:00:17,551 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-03 05:00:20,129 INFO [train.py:903] (3/4) Epoch 25, batch 4500, loss[loss=0.1758, simple_loss=0.2592, pruned_loss=0.0462, over 16118.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2829, pruned_loss=0.0608, over 3818312.33 frames. ], batch size: 35, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:00:44,698 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 05:00:52,068 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:01:23,386 INFO [train.py:903] (3/4) Epoch 25, batch 4550, loss[loss=0.2012, simple_loss=0.2869, pruned_loss=0.0577, over 19747.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2833, pruned_loss=0.06083, over 3823557.33 frames. ], batch size: 63, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:01:34,659 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 05:01:59,985 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 05:02:02,785 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:08,839 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.350e+02 4.803e+02 5.692e+02 6.651e+02 1.392e+03, threshold=1.138e+03, percent-clipped=3.0 +2023-04-03 05:02:15,309 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2846, 1.3570, 1.8243, 1.4283, 2.8283, 3.8893, 3.5826, 4.0656], + device='cuda:3'), covar=tensor([0.1532, 0.3781, 0.3175, 0.2372, 0.0595, 0.0181, 0.0200, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0327, 0.0357, 0.0267, 0.0248, 0.0191, 0.0217, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 05:02:27,736 INFO [train.py:903] (3/4) Epoch 25, batch 4600, loss[loss=0.1905, simple_loss=0.2846, pruned_loss=0.04817, over 19713.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2829, pruned_loss=0.06052, over 3821183.65 frames. ], batch size: 63, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:02:35,453 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168477.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:02:59,875 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7121, 4.2897, 2.8111, 3.7357, 0.9563, 4.2654, 4.1533, 4.2558], + device='cuda:3'), covar=tensor([0.0639, 0.0959, 0.1994, 0.0925, 0.4194, 0.0658, 0.0918, 0.1142], + device='cuda:3'), in_proj_covar=tensor([0.0517, 0.0420, 0.0504, 0.0353, 0.0405, 0.0447, 0.0441, 0.0471], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:03:04,783 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168500.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:20,628 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:27,493 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 05:03:31,417 INFO [train.py:903] (3/4) Epoch 25, batch 4650, loss[loss=0.2083, simple_loss=0.2893, pruned_loss=0.06367, over 19592.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06037, over 3822034.19 frames. ], batch size: 52, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:03:36,201 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:03:50,731 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 05:04:02,086 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 05:04:02,433 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2083, 1.1643, 1.5389, 1.3306, 2.3807, 3.4151, 3.1566, 3.7042], + device='cuda:3'), covar=tensor([0.1862, 0.5213, 0.4664, 0.2764, 0.0891, 0.0260, 0.0293, 0.0333], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0326, 0.0356, 0.0267, 0.0248, 0.0191, 0.0217, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 05:04:16,036 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.438e+02 5.060e+02 6.015e+02 7.632e+02 1.453e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 05:04:34,758 INFO [train.py:903] (3/4) Epoch 25, batch 4700, loss[loss=0.2433, simple_loss=0.3197, pruned_loss=0.08348, over 19507.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06046, over 3835380.81 frames. ], batch size: 64, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:04:57,660 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 05:05:04,395 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=168595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:05:38,975 INFO [train.py:903] (3/4) Epoch 25, batch 4750, loss[loss=0.1948, simple_loss=0.2839, pruned_loss=0.0528, over 19372.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2848, pruned_loss=0.06131, over 3824310.44 frames. ], batch size: 70, lr: 3.27e-03, grad_scale: 4.0 +2023-04-03 05:06:06,418 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 05:06:09,934 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 05:06:22,546 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.029e+02 6.051e+02 7.124e+02 1.309e+03, threshold=1.210e+03, percent-clipped=1.0 +2023-04-03 05:06:41,004 INFO [train.py:903] (3/4) Epoch 25, batch 4800, loss[loss=0.2006, simple_loss=0.2701, pruned_loss=0.06552, over 19766.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06108, over 3835783.37 frames. ], batch size: 48, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:06:59,405 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8889, 2.6262, 2.4114, 2.7720, 2.5525, 2.3874, 2.1093, 2.6683], + device='cuda:3'), covar=tensor([0.0793, 0.1321, 0.1233, 0.1000, 0.1282, 0.0478, 0.1327, 0.0628], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0356, 0.0315, 0.0253, 0.0303, 0.0254, 0.0314, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:07:29,217 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168710.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:07:43,821 INFO [train.py:903] (3/4) Epoch 25, batch 4850, loss[loss=0.2072, simple_loss=0.2854, pruned_loss=0.06446, over 19312.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06122, over 3822686.57 frames. ], batch size: 66, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:08:09,409 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 05:08:29,303 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.525e+02 5.536e+02 6.758e+02 9.275e+02 1.787e+03, threshold=1.352e+03, percent-clipped=12.0 +2023-04-03 05:08:30,530 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 05:08:36,426 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 05:08:36,451 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 05:08:43,837 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:08:47,080 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 05:08:48,234 INFO [train.py:903] (3/4) Epoch 25, batch 4900, loss[loss=0.1895, simple_loss=0.276, pruned_loss=0.05146, over 19506.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06102, over 3805206.05 frames. ], batch size: 64, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:09:06,552 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 05:09:10,277 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3128, 0.9897, 1.1586, 1.9365, 1.4611, 1.2289, 1.4733, 1.2003], + device='cuda:3'), covar=tensor([0.1156, 0.1774, 0.1424, 0.0854, 0.1106, 0.1452, 0.1282, 0.1147], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0226, 0.0238, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 05:09:16,001 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:09:52,880 INFO [train.py:903] (3/4) Epoch 25, batch 4950, loss[loss=0.1856, simple_loss=0.2708, pruned_loss=0.0502, over 19766.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2842, pruned_loss=0.06101, over 3815637.32 frames. ], batch size: 54, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:10:04,467 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 05:10:30,151 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 05:10:36,919 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.693e+02 5.645e+02 7.417e+02 1.662e+03, threshold=1.129e+03, percent-clipped=1.0 +2023-04-03 05:10:48,988 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9779, 1.9737, 2.1839, 2.0705, 2.8380, 2.5709, 2.8467, 1.9861], + device='cuda:3'), covar=tensor([0.1936, 0.3450, 0.2262, 0.1720, 0.1315, 0.1809, 0.1288, 0.3812], + device='cuda:3'), in_proj_covar=tensor([0.0548, 0.0666, 0.0739, 0.0501, 0.0630, 0.0544, 0.0670, 0.0567], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 05:10:50,514 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 05:10:55,827 INFO [train.py:903] (3/4) Epoch 25, batch 5000, loss[loss=0.2148, simple_loss=0.2998, pruned_loss=0.06486, over 19109.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2841, pruned_loss=0.06088, over 3795283.67 frames. ], batch size: 69, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:11:02,595 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 05:11:13,720 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 05:11:58,388 INFO [train.py:903] (3/4) Epoch 25, batch 5050, loss[loss=0.2033, simple_loss=0.2789, pruned_loss=0.06383, over 19464.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2864, pruned_loss=0.06206, over 3813215.79 frames. ], batch size: 49, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:12:06,816 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9622, 2.0564, 2.3378, 2.6329, 2.0165, 2.5322, 2.3307, 2.0534], + device='cuda:3'), covar=tensor([0.4399, 0.4216, 0.1933, 0.2605, 0.4305, 0.2313, 0.4911, 0.3518], + device='cuda:3'), in_proj_covar=tensor([0.0920, 0.0994, 0.0731, 0.0941, 0.0896, 0.0833, 0.0851, 0.0796], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 05:12:18,646 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5901, 1.6856, 1.9756, 1.9216, 1.4299, 1.8678, 1.9502, 1.7683], + device='cuda:3'), covar=tensor([0.4387, 0.4008, 0.2007, 0.2580, 0.4075, 0.2337, 0.5403, 0.3662], + device='cuda:3'), in_proj_covar=tensor([0.0920, 0.0994, 0.0731, 0.0942, 0.0897, 0.0833, 0.0851, 0.0796], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 05:12:33,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 05:12:41,880 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.713e+02 5.534e+02 7.099e+02 1.364e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-04-03 05:12:55,415 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168966.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:02,137 INFO [train.py:903] (3/4) Epoch 25, batch 5100, loss[loss=0.1702, simple_loss=0.2465, pruned_loss=0.04695, over 19838.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2863, pruned_loss=0.06199, over 3809644.08 frames. ], batch size: 49, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:13:11,321 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 05:13:14,822 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 05:13:19,303 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168985.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:20,126 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 05:13:21,522 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168987.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:13:26,030 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:13:32,755 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7660, 1.6110, 1.6168, 2.2117, 1.6786, 2.1335, 2.1092, 1.8364], + device='cuda:3'), covar=tensor([0.0839, 0.0946, 0.1006, 0.0770, 0.0868, 0.0736, 0.0873, 0.0685], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0224, 0.0226, 0.0239, 0.0226, 0.0214, 0.0189, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 05:14:05,065 INFO [train.py:903] (3/4) Epoch 25, batch 5150, loss[loss=0.2162, simple_loss=0.3019, pruned_loss=0.06532, over 19500.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2855, pruned_loss=0.06171, over 3810333.62 frames. ], batch size: 64, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:14:16,426 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 05:14:31,986 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3159, 2.0086, 1.5074, 1.3732, 1.8354, 1.2312, 1.2741, 1.7770], + device='cuda:3'), covar=tensor([0.0984, 0.0799, 0.1047, 0.0851, 0.0486, 0.1366, 0.0715, 0.0476], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0317, 0.0338, 0.0268, 0.0249, 0.0342, 0.0294, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:14:48,111 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 5.295e+02 6.704e+02 8.101e+02 2.101e+03, threshold=1.341e+03, percent-clipped=6.0 +2023-04-03 05:14:52,537 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:15:08,241 INFO [train.py:903] (3/4) Epoch 25, batch 5200, loss[loss=0.1707, simple_loss=0.2552, pruned_loss=0.04304, over 19495.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2856, pruned_loss=0.06161, over 3807251.33 frames. ], batch size: 49, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:15:23,476 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 05:15:44,208 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169100.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:08,649 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 05:16:13,028 INFO [train.py:903] (3/4) Epoch 25, batch 5250, loss[loss=0.1891, simple_loss=0.2795, pruned_loss=0.04936, over 19540.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2848, pruned_loss=0.06105, over 3815494.87 frames. ], batch size: 56, lr: 3.27e-03, grad_scale: 8.0 +2023-04-03 05:16:27,871 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169134.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:16:56,986 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 5.211e+02 5.791e+02 7.204e+02 1.532e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 05:17:16,195 INFO [train.py:903] (3/4) Epoch 25, batch 5300, loss[loss=0.1777, simple_loss=0.2581, pruned_loss=0.0487, over 19605.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06107, over 3820992.07 frames. ], batch size: 50, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:17:22,413 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169177.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:17:34,462 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 05:18:18,936 INFO [train.py:903] (3/4) Epoch 25, batch 5350, loss[loss=0.2541, simple_loss=0.3278, pruned_loss=0.09018, over 19791.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06149, over 3829336.99 frames. ], batch size: 56, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:18:57,326 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 05:19:04,351 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 4.992e+02 6.506e+02 8.048e+02 1.510e+03, threshold=1.301e+03, percent-clipped=6.0 +2023-04-03 05:19:24,334 INFO [train.py:903] (3/4) Epoch 25, batch 5400, loss[loss=0.2194, simple_loss=0.3, pruned_loss=0.06944, over 19678.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2848, pruned_loss=0.06173, over 3833439.36 frames. ], batch size: 60, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:19:38,735 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1163, 2.1014, 1.7137, 2.1315, 2.0056, 1.8120, 1.7073, 1.9570], + device='cuda:3'), covar=tensor([0.1114, 0.1446, 0.1592, 0.1133, 0.1391, 0.0583, 0.1616, 0.0813], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0355, 0.0313, 0.0251, 0.0302, 0.0254, 0.0314, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:20:27,248 INFO [train.py:903] (3/4) Epoch 25, batch 5450, loss[loss=0.2186, simple_loss=0.2872, pruned_loss=0.07505, over 19579.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.285, pruned_loss=0.06201, over 3812255.38 frames. ], batch size: 52, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:20:33,628 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.34 vs. limit=5.0 +2023-04-03 05:20:36,090 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-03 05:20:36,598 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:20:39,942 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169331.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:20:46,814 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7384, 1.7297, 1.6123, 1.4102, 1.3396, 1.4733, 0.2545, 0.6729], + device='cuda:3'), covar=tensor([0.0638, 0.0599, 0.0395, 0.0632, 0.1146, 0.0724, 0.1332, 0.1061], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0363, 0.0364, 0.0391, 0.0467, 0.0399, 0.0344, 0.0346], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 05:20:50,641 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 05:21:11,465 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.467e+02 5.289e+02 7.172e+02 1.661e+03, threshold=1.058e+03, percent-clipped=2.0 +2023-04-03 05:21:29,438 INFO [train.py:903] (3/4) Epoch 25, batch 5500, loss[loss=0.2189, simple_loss=0.2961, pruned_loss=0.07085, over 18850.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.285, pruned_loss=0.06162, over 3825831.54 frames. ], batch size: 74, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:21:57,710 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 05:22:24,188 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 05:22:33,004 INFO [train.py:903] (3/4) Epoch 25, batch 5550, loss[loss=0.2068, simple_loss=0.2932, pruned_loss=0.06018, over 19461.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2849, pruned_loss=0.06167, over 3837841.32 frames. ], batch size: 64, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:22:43,769 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 05:23:01,515 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:01,725 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169444.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:23:04,029 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169446.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 05:23:17,176 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 5.192e+02 6.082e+02 7.576e+02 1.216e+03, threshold=1.216e+03, percent-clipped=3.0 +2023-04-03 05:23:33,281 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 05:23:36,949 INFO [train.py:903] (3/4) Epoch 25, batch 5600, loss[loss=0.1793, simple_loss=0.2604, pruned_loss=0.04911, over 19804.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2863, pruned_loss=0.06249, over 3832261.11 frames. ], batch size: 49, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:23:44,277 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169478.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:38,815 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169521.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:24:39,771 INFO [train.py:903] (3/4) Epoch 25, batch 5650, loss[loss=0.2206, simple_loss=0.3005, pruned_loss=0.07037, over 19481.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2861, pruned_loss=0.06235, over 3837468.40 frames. ], batch size: 64, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:25:24,921 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 4.961e+02 6.158e+02 8.288e+02 1.627e+03, threshold=1.232e+03, percent-clipped=5.0 +2023-04-03 05:25:27,542 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169559.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:25:30,419 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 05:25:37,670 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4877, 1.3660, 1.4169, 1.8688, 1.4035, 1.7245, 1.7524, 1.5171], + device='cuda:3'), covar=tensor([0.0896, 0.0973, 0.1043, 0.0650, 0.0814, 0.0754, 0.0783, 0.0737], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0222, 0.0225, 0.0238, 0.0225, 0.0212, 0.0188, 0.0203], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0004], + device='cuda:3') +2023-04-03 05:25:43,212 INFO [train.py:903] (3/4) Epoch 25, batch 5700, loss[loss=0.195, simple_loss=0.2634, pruned_loss=0.06329, over 19034.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2867, pruned_loss=0.06235, over 3832517.26 frames. ], batch size: 42, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:11,336 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169593.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:26:11,692 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-03 05:26:12,795 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.00 vs. limit=5.0 +2023-04-03 05:26:22,651 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-03 05:26:29,603 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3820, 2.4276, 2.5833, 2.9213, 2.4652, 2.8278, 2.6137, 2.4747], + device='cuda:3'), covar=tensor([0.3323, 0.2883, 0.1447, 0.1915, 0.3128, 0.1631, 0.3407, 0.2446], + device='cuda:3'), in_proj_covar=tensor([0.0920, 0.0995, 0.0730, 0.0942, 0.0898, 0.0833, 0.0853, 0.0795], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 05:26:47,655 INFO [train.py:903] (3/4) Epoch 25, batch 5750, loss[loss=0.1763, simple_loss=0.2544, pruned_loss=0.04908, over 19735.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2873, pruned_loss=0.06281, over 3816687.43 frames. ], batch size: 46, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:26:48,849 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 05:26:59,218 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 05:27:04,021 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 05:27:06,662 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:09,111 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2884, 1.1954, 1.7297, 1.2863, 2.6237, 3.5133, 3.1463, 3.6325], + device='cuda:3'), covar=tensor([0.1667, 0.4162, 0.3582, 0.2711, 0.0648, 0.0213, 0.0248, 0.0304], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0326, 0.0356, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 05:27:10,238 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:27:32,987 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.159e+02 4.765e+02 5.968e+02 8.362e+02 1.575e+03, threshold=1.194e+03, percent-clipped=5.0 +2023-04-03 05:27:52,468 INFO [train.py:903] (3/4) Epoch 25, batch 5800, loss[loss=0.2027, simple_loss=0.2987, pruned_loss=0.05334, over 19587.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2867, pruned_loss=0.06214, over 3824608.80 frames. ], batch size: 57, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:28:20,543 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2643, 1.3492, 1.8628, 1.1876, 2.5749, 3.4059, 3.0492, 3.5823], + device='cuda:3'), covar=tensor([0.1570, 0.3748, 0.3167, 0.2484, 0.0575, 0.0219, 0.0251, 0.0315], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0326, 0.0357, 0.0267, 0.0248, 0.0192, 0.0217, 0.0267], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 05:28:27,916 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169700.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:28:30,491 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169702.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:28:55,622 INFO [train.py:903] (3/4) Epoch 25, batch 5850, loss[loss=0.2248, simple_loss=0.3136, pruned_loss=0.06803, over 19668.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2858, pruned_loss=0.06179, over 3829542.61 frames. ], batch size: 58, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:29:00,330 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:02,806 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169727.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 05:29:09,611 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3900, 1.4280, 1.6512, 1.6212, 2.3082, 2.1159, 2.2902, 0.9918], + device='cuda:3'), covar=tensor([0.2615, 0.4527, 0.2851, 0.2026, 0.1509, 0.2192, 0.1467, 0.4822], + device='cuda:3'), in_proj_covar=tensor([0.0548, 0.0663, 0.0736, 0.0501, 0.0630, 0.0542, 0.0668, 0.0565], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 05:29:28,448 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169747.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:41,207 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.003e+02 6.037e+02 8.413e+02 1.989e+03, threshold=1.207e+03, percent-clipped=6.0 +2023-04-03 05:29:51,800 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:29:59,817 INFO [train.py:903] (3/4) Epoch 25, batch 5900, loss[loss=0.1791, simple_loss=0.256, pruned_loss=0.0511, over 19394.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06123, over 3841940.32 frames. ], batch size: 48, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:30:04,507 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 05:30:27,861 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 05:30:42,243 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:30:56,654 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:04,213 INFO [train.py:903] (3/4) Epoch 25, batch 5950, loss[loss=0.2184, simple_loss=0.3011, pruned_loss=0.06784, over 18150.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2851, pruned_loss=0.06143, over 3844933.00 frames. ], batch size: 83, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:31:28,362 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169840.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:39,968 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:31:49,601 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 4.837e+02 6.128e+02 7.395e+02 1.765e+03, threshold=1.226e+03, percent-clipped=3.0 +2023-04-03 05:32:09,286 INFO [train.py:903] (3/4) Epoch 25, batch 6000, loss[loss=0.2005, simple_loss=0.2715, pruned_loss=0.0647, over 19377.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06137, over 3839345.84 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 8.0 +2023-04-03 05:32:09,286 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 05:32:21,932 INFO [train.py:937] (3/4) Epoch 25, validation: loss=0.1675, simple_loss=0.2674, pruned_loss=0.03383, over 944034.00 frames. +2023-04-03 05:32:21,933 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 05:32:25,726 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:32:27,349 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.69 vs. limit=5.0 +2023-04-03 05:32:48,264 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:32:57,515 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0559, 1.8579, 1.6843, 2.0451, 1.8761, 1.7422, 1.5903, 1.9416], + device='cuda:3'), covar=tensor([0.1124, 0.1646, 0.1644, 0.1120, 0.1492, 0.0643, 0.1683, 0.0870], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0354, 0.0312, 0.0252, 0.0302, 0.0254, 0.0314, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:33:03,279 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9930, 1.6571, 1.5692, 1.8508, 1.6148, 1.6496, 1.4546, 1.8472], + device='cuda:3'), covar=tensor([0.0994, 0.1238, 0.1483, 0.0978, 0.1287, 0.0575, 0.1594, 0.0749], + device='cuda:3'), in_proj_covar=tensor([0.0274, 0.0355, 0.0313, 0.0252, 0.0302, 0.0254, 0.0314, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:33:20,573 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169917.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:33:26,959 INFO [train.py:903] (3/4) Epoch 25, batch 6050, loss[loss=0.2015, simple_loss=0.2928, pruned_loss=0.05509, over 19495.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2842, pruned_loss=0.06125, over 3829383.52 frames. ], batch size: 64, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:12,787 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.971e+02 6.200e+02 7.955e+02 1.563e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 05:34:30,170 INFO [train.py:903] (3/4) Epoch 25, batch 6100, loss[loss=0.1632, simple_loss=0.2503, pruned_loss=0.03807, over 19498.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2835, pruned_loss=0.06084, over 3830950.26 frames. ], batch size: 49, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:34:44,098 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=169983.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:35:19,629 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 05:35:35,274 INFO [train.py:903] (3/4) Epoch 25, batch 6150, loss[loss=0.1777, simple_loss=0.2522, pruned_loss=0.0516, over 18625.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2832, pruned_loss=0.06075, over 3818179.92 frames. ], batch size: 41, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:35:40,504 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7576, 1.8602, 2.1064, 1.9563, 2.9822, 2.5694, 3.2409, 1.7526], + device='cuda:3'), covar=tensor([0.2366, 0.4053, 0.2649, 0.1883, 0.1678, 0.2167, 0.1527, 0.4275], + device='cuda:3'), in_proj_covar=tensor([0.0546, 0.0663, 0.0737, 0.0500, 0.0630, 0.0541, 0.0666, 0.0564], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 05:36:07,473 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 05:36:22,372 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.332e+02 4.967e+02 5.743e+02 7.363e+02 2.013e+03, threshold=1.149e+03, percent-clipped=2.0 +2023-04-03 05:36:28,775 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:36:40,353 INFO [train.py:903] (3/4) Epoch 25, batch 6200, loss[loss=0.202, simple_loss=0.2689, pruned_loss=0.06753, over 16821.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2834, pruned_loss=0.0608, over 3812101.68 frames. ], batch size: 37, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:37:04,433 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:13,850 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:26,517 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:37:39,279 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8522, 1.2826, 1.6365, 2.8417, 1.7549, 1.6035, 2.0411, 1.5132], + device='cuda:3'), covar=tensor([0.1094, 0.1820, 0.1382, 0.0944, 0.1251, 0.1520, 0.1435, 0.1173], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0225, 0.0238, 0.0225, 0.0214, 0.0189, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 05:37:43,648 INFO [train.py:903] (3/4) Epoch 25, batch 6250, loss[loss=0.1842, simple_loss=0.2598, pruned_loss=0.05427, over 19383.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2829, pruned_loss=0.06062, over 3821124.90 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:38:16,018 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 05:38:16,221 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170148.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:38:29,525 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.163e+02 5.199e+02 6.170e+02 7.836e+02 1.706e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 05:38:47,598 INFO [train.py:903] (3/4) Epoch 25, batch 6300, loss[loss=0.1807, simple_loss=0.2637, pruned_loss=0.04884, over 19623.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06073, over 3826290.73 frames. ], batch size: 50, lr: 3.26e-03, grad_scale: 4.0 +2023-04-03 05:39:08,055 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4502, 2.4250, 2.1685, 2.0662, 1.9637, 2.2220, 1.4455, 1.9206], + device='cuda:3'), covar=tensor([0.0628, 0.0601, 0.0469, 0.0801, 0.0796, 0.0998, 0.1145, 0.0806], + device='cuda:3'), in_proj_covar=tensor([0.0362, 0.0359, 0.0362, 0.0387, 0.0463, 0.0396, 0.0341, 0.0344], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 05:39:32,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170206.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:39:51,012 INFO [train.py:903] (3/4) Epoch 25, batch 6350, loss[loss=0.2433, simple_loss=0.3157, pruned_loss=0.08551, over 19611.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06122, over 3819943.15 frames. ], batch size: 57, lr: 3.25e-03, grad_scale: 4.0 +2023-04-03 05:39:53,874 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:36,695 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.417e+02 5.475e+02 7.140e+02 1.571e+03, threshold=1.095e+03, percent-clipped=3.0 +2023-04-03 05:40:42,944 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:40:54,617 INFO [train.py:903] (3/4) Epoch 25, batch 6400, loss[loss=0.1964, simple_loss=0.2765, pruned_loss=0.05815, over 19739.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2845, pruned_loss=0.06092, over 3819486.58 frames. ], batch size: 51, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:41:04,248 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8473, 1.6500, 1.4562, 1.7924, 1.5333, 1.6152, 1.4616, 1.6929], + device='cuda:3'), covar=tensor([0.1141, 0.1356, 0.1617, 0.1024, 0.1338, 0.0600, 0.1607, 0.0841], + device='cuda:3'), in_proj_covar=tensor([0.0273, 0.0354, 0.0312, 0.0252, 0.0301, 0.0253, 0.0313, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:41:59,011 INFO [train.py:903] (3/4) Epoch 25, batch 6450, loss[loss=0.1842, simple_loss=0.2744, pruned_loss=0.047, over 19761.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2853, pruned_loss=0.06139, over 3808890.97 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:42:32,625 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2438, 1.2868, 1.6231, 1.0502, 2.4415, 3.3515, 3.0516, 3.5896], + device='cuda:3'), covar=tensor([0.1549, 0.4138, 0.3785, 0.2801, 0.0657, 0.0210, 0.0239, 0.0296], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0269, 0.0251, 0.0193, 0.0219, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 05:42:40,350 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:42:44,566 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.688e+02 6.468e+02 8.318e+02 2.178e+03, threshold=1.294e+03, percent-clipped=13.0 +2023-04-03 05:42:45,706 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 05:43:03,216 INFO [train.py:903] (3/4) Epoch 25, batch 6500, loss[loss=0.1635, simple_loss=0.2432, pruned_loss=0.04187, over 19738.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2864, pruned_loss=0.06194, over 3789877.22 frames. ], batch size: 45, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:43:07,921 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 05:43:12,752 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:43:48,145 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170407.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:44:06,260 INFO [train.py:903] (3/4) Epoch 25, batch 6550, loss[loss=0.1712, simple_loss=0.2527, pruned_loss=0.04488, over 19809.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2874, pruned_loss=0.0625, over 3799448.53 frames. ], batch size: 49, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:44:52,748 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.881e+02 6.158e+02 7.799e+02 1.457e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-04-03 05:44:57,811 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:09,731 INFO [train.py:903] (3/4) Epoch 25, batch 6600, loss[loss=0.2023, simple_loss=0.2941, pruned_loss=0.05532, over 19579.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2867, pruned_loss=0.06209, over 3806591.72 frames. ], batch size: 61, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:45:19,115 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170479.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:20,455 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:27,515 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4786, 1.4893, 1.8066, 1.7228, 2.6072, 2.2856, 2.8083, 1.1988], + device='cuda:3'), covar=tensor([0.2464, 0.4488, 0.2800, 0.1917, 0.1617, 0.2149, 0.1497, 0.4618], + device='cuda:3'), in_proj_covar=tensor([0.0547, 0.0664, 0.0738, 0.0498, 0.0628, 0.0542, 0.0667, 0.0564], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 05:45:28,736 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170487.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:45:51,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:09,043 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:13,017 INFO [train.py:903] (3/4) Epoch 25, batch 6650, loss[loss=0.191, simple_loss=0.2733, pruned_loss=0.05436, over 19692.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2865, pruned_loss=0.06196, over 3822248.27 frames. ], batch size: 53, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:46:13,409 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170522.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:41,282 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170544.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:46:58,412 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.718e+02 4.827e+02 5.978e+02 7.974e+02 2.215e+03, threshold=1.196e+03, percent-clipped=6.0 +2023-04-03 05:47:16,979 INFO [train.py:903] (3/4) Epoch 25, batch 6700, loss[loss=0.1953, simple_loss=0.2751, pruned_loss=0.05773, over 19383.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2866, pruned_loss=0.06202, over 3819036.85 frames. ], batch size: 70, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:47:36,701 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:48:17,851 INFO [train.py:903] (3/4) Epoch 25, batch 6750, loss[loss=0.1992, simple_loss=0.2907, pruned_loss=0.05384, over 19710.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2855, pruned_loss=0.06152, over 3810413.01 frames. ], batch size: 59, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:48:59,810 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.526e+02 5.801e+02 6.899e+02 1.670e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 05:49:15,798 INFO [train.py:903] (3/4) Epoch 25, batch 6800, loss[loss=0.181, simple_loss=0.2595, pruned_loss=0.05126, over 19361.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.0612, over 3817934.76 frames. ], batch size: 47, lr: 3.25e-03, grad_scale: 8.0 +2023-04-03 05:50:02,671 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 05:50:04,140 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 05:50:07,357 INFO [train.py:903] (3/4) Epoch 26, batch 0, loss[loss=0.1832, simple_loss=0.2607, pruned_loss=0.05279, over 19752.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2607, pruned_loss=0.05279, over 19752.00 frames. ], batch size: 47, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:50:07,357 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 05:50:19,303 INFO [train.py:937] (3/4) Epoch 26, validation: loss=0.1673, simple_loss=0.2675, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 05:50:19,305 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 05:50:32,180 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 05:50:44,728 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6746, 4.2346, 2.7595, 3.7330, 0.8655, 4.2225, 4.1134, 4.1277], + device='cuda:3'), covar=tensor([0.0607, 0.1124, 0.2028, 0.0899, 0.4237, 0.0712, 0.0967, 0.1182], + device='cuda:3'), in_proj_covar=tensor([0.0520, 0.0424, 0.0510, 0.0357, 0.0409, 0.0450, 0.0445, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:51:06,034 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-03 05:51:20,628 INFO [train.py:903] (3/4) Epoch 26, batch 50, loss[loss=0.2041, simple_loss=0.2814, pruned_loss=0.06337, over 19586.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2863, pruned_loss=0.06214, over 859335.05 frames. ], batch size: 52, lr: 3.19e-03, grad_scale: 8.0 +2023-04-03 05:51:28,226 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9075, 1.1944, 1.4581, 0.6126, 2.0237, 2.4606, 2.2110, 2.6306], + device='cuda:3'), covar=tensor([0.1780, 0.4265, 0.3765, 0.3049, 0.0708, 0.0301, 0.0358, 0.0429], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0328, 0.0358, 0.0268, 0.0250, 0.0191, 0.0218, 0.0269], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 05:51:30,184 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.268e+02 6.216e+02 7.845e+02 1.668e+03, threshold=1.243e+03, percent-clipped=9.0 +2023-04-03 05:51:54,979 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170778.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:51:55,730 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 05:52:04,556 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 05:52:21,931 INFO [train.py:903] (3/4) Epoch 26, batch 100, loss[loss=0.1908, simple_loss=0.2706, pruned_loss=0.05545, over 19576.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2871, pruned_loss=0.06288, over 1515004.02 frames. ], batch size: 52, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:52:26,029 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170803.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:52:32,351 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 05:52:50,745 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170823.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:53:24,741 INFO [train.py:903] (3/4) Epoch 26, batch 150, loss[loss=0.207, simple_loss=0.2942, pruned_loss=0.05989, over 19667.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2854, pruned_loss=0.06221, over 2019548.27 frames. ], batch size: 55, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:53:36,324 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.111e+02 4.908e+02 6.470e+02 7.917e+02 1.560e+03, threshold=1.294e+03, percent-clipped=6.0 +2023-04-03 05:54:25,519 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 05:54:26,689 INFO [train.py:903] (3/4) Epoch 26, batch 200, loss[loss=0.1987, simple_loss=0.2744, pruned_loss=0.06147, over 19587.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2845, pruned_loss=0.06086, over 2431743.09 frames. ], batch size: 52, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:05,066 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=170931.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:14,504 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:24,808 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170946.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:55:30,413 INFO [train.py:903] (3/4) Epoch 26, batch 250, loss[loss=0.1762, simple_loss=0.2581, pruned_loss=0.04712, over 19724.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2849, pruned_loss=0.06132, over 2733755.80 frames. ], batch size: 51, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:55:31,162 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-03 05:55:39,639 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4256, 2.1661, 1.6878, 1.5125, 2.0207, 1.4202, 1.4077, 1.8883], + device='cuda:3'), covar=tensor([0.1109, 0.0876, 0.1080, 0.0907, 0.0601, 0.1329, 0.0744, 0.0497], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0318, 0.0336, 0.0270, 0.0249, 0.0342, 0.0294, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 05:55:42,602 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 4.848e+02 5.950e+02 8.014e+02 1.769e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 05:56:34,953 INFO [train.py:903] (3/4) Epoch 26, batch 300, loss[loss=0.1971, simple_loss=0.2878, pruned_loss=0.05317, over 19618.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2833, pruned_loss=0.06049, over 2990761.81 frames. ], batch size: 57, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:34,254 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171046.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:57:34,585 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-03 05:57:38,583 INFO [train.py:903] (3/4) Epoch 26, batch 350, loss[loss=0.1951, simple_loss=0.2736, pruned_loss=0.05828, over 19571.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2831, pruned_loss=0.06028, over 3178883.33 frames. ], batch size: 52, lr: 3.18e-03, grad_scale: 4.0 +2023-04-03 05:57:45,651 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 05:57:49,063 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.001e+02 4.764e+02 6.020e+02 7.720e+02 1.602e+03, threshold=1.204e+03, percent-clipped=4.0 +2023-04-03 05:58:42,176 INFO [train.py:903] (3/4) Epoch 26, batch 400, loss[loss=0.2304, simple_loss=0.3142, pruned_loss=0.07329, over 19357.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06028, over 3327392.67 frames. ], batch size: 70, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:58:55,426 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171110.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 05:59:43,996 INFO [train.py:903] (3/4) Epoch 26, batch 450, loss[loss=0.1734, simple_loss=0.2574, pruned_loss=0.04471, over 19675.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06056, over 3439884.62 frames. ], batch size: 55, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 05:59:56,343 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 5.217e+02 6.577e+02 9.059e+02 2.566e+03, threshold=1.315e+03, percent-clipped=7.0 +2023-04-03 06:00:19,585 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 06:00:20,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 06:00:30,528 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5359, 1.1082, 1.1773, 1.4357, 0.9983, 1.2953, 1.1608, 1.3532], + device='cuda:3'), covar=tensor([0.1189, 0.1404, 0.1621, 0.1017, 0.1429, 0.0672, 0.1682, 0.0897], + device='cuda:3'), in_proj_covar=tensor([0.0275, 0.0354, 0.0314, 0.0254, 0.0304, 0.0255, 0.0315, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:00:38,900 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:00:47,725 INFO [train.py:903] (3/4) Epoch 26, batch 500, loss[loss=0.1802, simple_loss=0.2535, pruned_loss=0.05348, over 19774.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2837, pruned_loss=0.06072, over 3535727.24 frames. ], batch size: 49, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:01:05,590 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:11,786 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:01:52,223 INFO [train.py:903] (3/4) Epoch 26, batch 550, loss[loss=0.2919, simple_loss=0.3466, pruned_loss=0.1186, over 19675.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.06108, over 3608918.03 frames. ], batch size: 60, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:03,094 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.798e+02 6.471e+02 7.842e+02 1.459e+03, threshold=1.294e+03, percent-clipped=3.0 +2023-04-03 06:02:12,064 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 06:02:44,064 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171290.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:02:55,675 INFO [train.py:903] (3/4) Epoch 26, batch 600, loss[loss=0.2259, simple_loss=0.3032, pruned_loss=0.07428, over 19324.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2841, pruned_loss=0.0608, over 3657623.63 frames. ], batch size: 66, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:02:58,422 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:31,106 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:03:36,407 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 06:03:57,549 INFO [train.py:903] (3/4) Epoch 26, batch 650, loss[loss=0.2053, simple_loss=0.2982, pruned_loss=0.05626, over 19714.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2838, pruned_loss=0.06097, over 3689902.91 frames. ], batch size: 63, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:04:09,335 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.689e+02 4.759e+02 5.860e+02 7.918e+02 1.260e+03, threshold=1.172e+03, percent-clipped=0.0 +2023-04-03 06:04:16,450 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171363.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:05:01,712 INFO [train.py:903] (3/4) Epoch 26, batch 700, loss[loss=0.2074, simple_loss=0.2815, pruned_loss=0.06665, over 19482.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2825, pruned_loss=0.06043, over 3720610.03 frames. ], batch size: 49, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:05:10,683 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171405.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:08,107 INFO [train.py:903] (3/4) Epoch 26, batch 750, loss[loss=0.2683, simple_loss=0.3275, pruned_loss=0.1045, over 13549.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2842, pruned_loss=0.06172, over 3733174.22 frames. ], batch size: 136, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:06:11,870 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7910, 1.4737, 1.5713, 1.5973, 3.3704, 1.1544, 2.4787, 3.8012], + device='cuda:3'), covar=tensor([0.0494, 0.2776, 0.2972, 0.1876, 0.0677, 0.2580, 0.1311, 0.0230], + device='cuda:3'), in_proj_covar=tensor([0.0420, 0.0374, 0.0394, 0.0354, 0.0379, 0.0354, 0.0392, 0.0413], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:06:12,968 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171454.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:06:18,671 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.178e+02 5.123e+02 6.550e+02 8.686e+02 2.549e+03, threshold=1.310e+03, percent-clipped=11.0 +2023-04-03 06:07:12,528 INFO [train.py:903] (3/4) Epoch 26, batch 800, loss[loss=0.1981, simple_loss=0.2902, pruned_loss=0.05302, over 19669.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06149, over 3748254.08 frames. ], batch size: 58, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:07:25,367 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 06:07:32,365 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171516.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:14,608 INFO [train.py:903] (3/4) Epoch 26, batch 850, loss[loss=0.2007, simple_loss=0.286, pruned_loss=0.05769, over 19777.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2857, pruned_loss=0.06222, over 3769618.82 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:08:24,876 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:08:25,743 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.196e+02 4.689e+02 5.699e+02 7.261e+02 1.636e+03, threshold=1.140e+03, percent-clipped=2.0 +2023-04-03 06:08:40,934 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:09:05,319 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 06:09:18,245 INFO [train.py:903] (3/4) Epoch 26, batch 900, loss[loss=0.2213, simple_loss=0.2997, pruned_loss=0.07148, over 19788.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2858, pruned_loss=0.06194, over 3790805.29 frames. ], batch size: 56, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:17,110 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.82 vs. limit=5.0 +2023-04-03 06:10:22,182 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 06:10:23,384 INFO [train.py:903] (3/4) Epoch 26, batch 950, loss[loss=0.1975, simple_loss=0.2888, pruned_loss=0.05311, over 19611.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.286, pruned_loss=0.06179, over 3810322.38 frames. ], batch size: 57, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:10:34,908 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.376e+02 5.213e+02 6.211e+02 8.451e+02 1.981e+03, threshold=1.242e+03, percent-clipped=10.0 +2023-04-03 06:10:37,599 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171661.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:10:45,898 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9401, 1.3491, 1.0799, 0.8472, 1.1874, 0.9149, 0.9535, 1.2876], + device='cuda:3'), covar=tensor([0.0630, 0.0777, 0.1069, 0.0874, 0.0543, 0.1313, 0.0574, 0.0426], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0318, 0.0337, 0.0270, 0.0250, 0.0343, 0.0295, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:10:51,825 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171673.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:00,208 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2600, 2.3038, 2.0313, 2.3303, 2.2379, 1.8753, 1.9644, 2.2307], + device='cuda:3'), covar=tensor([0.0958, 0.1309, 0.1431, 0.1014, 0.1213, 0.0835, 0.1450, 0.0792], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0354, 0.0314, 0.0254, 0.0305, 0.0254, 0.0315, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:11:09,460 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171686.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:11:27,631 INFO [train.py:903] (3/4) Epoch 26, batch 1000, loss[loss=0.1976, simple_loss=0.2818, pruned_loss=0.05668, over 19736.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2859, pruned_loss=0.06195, over 3808465.21 frames. ], batch size: 63, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:11:36,120 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171707.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:12:19,826 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 06:12:31,841 INFO [train.py:903] (3/4) Epoch 26, batch 1050, loss[loss=0.2671, simple_loss=0.3327, pruned_loss=0.1007, over 13761.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06105, over 3816641.26 frames. ], batch size: 135, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:12:42,470 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.548e+02 5.465e+02 6.383e+02 7.807e+02 1.569e+03, threshold=1.277e+03, percent-clipped=7.0 +2023-04-03 06:12:50,010 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-03 06:13:01,890 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 06:13:34,460 INFO [train.py:903] (3/4) Epoch 26, batch 1100, loss[loss=0.2159, simple_loss=0.2986, pruned_loss=0.06665, over 19590.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.0614, over 3824836.13 frames. ], batch size: 57, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:05,456 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:09,068 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171825.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:40,968 INFO [train.py:903] (3/4) Epoch 26, batch 1150, loss[loss=0.2209, simple_loss=0.2985, pruned_loss=0.07159, over 19617.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.06021, over 3835249.03 frames. ], batch size: 57, lr: 3.18e-03, grad_scale: 8.0 +2023-04-03 06:14:41,487 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171850.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:14:54,273 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.298e+02 6.784e+02 8.276e+02 1.649e+03, threshold=1.357e+03, percent-clipped=5.0 +2023-04-03 06:14:55,527 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=171860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:15:46,139 INFO [train.py:903] (3/4) Epoch 26, batch 1200, loss[loss=0.1783, simple_loss=0.2626, pruned_loss=0.04701, over 19853.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.06035, over 3833402.43 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:12,790 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 06:16:22,268 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:29,742 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 06:16:48,465 INFO [train.py:903] (3/4) Epoch 26, batch 1250, loss[loss=0.2095, simple_loss=0.2881, pruned_loss=0.0654, over 19753.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.284, pruned_loss=0.06082, over 3824238.16 frames. ], batch size: 51, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:16:53,750 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:54,742 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171955.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:16:58,916 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 5.182e+02 6.196e+02 7.754e+02 1.405e+03, threshold=1.239e+03, percent-clipped=1.0 +2023-04-03 06:17:21,629 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:17:51,744 INFO [train.py:903] (3/4) Epoch 26, batch 1300, loss[loss=0.1952, simple_loss=0.2864, pruned_loss=0.05202, over 19681.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06035, over 3826027.43 frames. ], batch size: 59, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:56,743 INFO [train.py:903] (3/4) Epoch 26, batch 1350, loss[loss=0.2338, simple_loss=0.3117, pruned_loss=0.07794, over 19579.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2831, pruned_loss=0.06017, over 3829821.09 frames. ], batch size: 61, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:18:59,494 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1122, 5.6009, 2.7868, 4.7759, 1.1796, 5.7163, 5.4907, 5.6684], + device='cuda:3'), covar=tensor([0.0354, 0.0719, 0.1983, 0.0708, 0.3662, 0.0461, 0.0790, 0.0851], + device='cuda:3'), in_proj_covar=tensor([0.0521, 0.0423, 0.0510, 0.0357, 0.0409, 0.0449, 0.0444, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:19:09,279 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-03 06:19:09,471 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.423e+02 5.487e+02 6.725e+02 8.152e+02 1.378e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-04-03 06:19:16,130 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-03 06:19:25,635 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0402, 1.9793, 1.7141, 2.1040, 1.9285, 1.7939, 1.6601, 1.9466], + device='cuda:3'), covar=tensor([0.1139, 0.1509, 0.1582, 0.1136, 0.1409, 0.0581, 0.1549, 0.0791], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0357, 0.0318, 0.0256, 0.0308, 0.0256, 0.0318, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:19:34,023 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:01,391 INFO [train.py:903] (3/4) Epoch 26, batch 1400, loss[loss=0.2327, simple_loss=0.3096, pruned_loss=0.07786, over 19600.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2826, pruned_loss=0.05968, over 3844393.79 frames. ], batch size: 61, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:20:07,127 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:20:47,887 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172136.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:21:05,089 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 06:21:06,112 INFO [train.py:903] (3/4) Epoch 26, batch 1450, loss[loss=0.228, simple_loss=0.3022, pruned_loss=0.07696, over 19529.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2845, pruned_loss=0.06072, over 3836156.01 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:21:16,564 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.245e+02 4.778e+02 6.066e+02 7.310e+02 2.231e+03, threshold=1.213e+03, percent-clipped=2.0 +2023-04-03 06:21:22,865 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4354, 1.4627, 1.6060, 1.5943, 1.7674, 1.9332, 1.7913, 0.5370], + device='cuda:3'), covar=tensor([0.2464, 0.4378, 0.2787, 0.2042, 0.1758, 0.2363, 0.1570, 0.5162], + device='cuda:3'), in_proj_covar=tensor([0.0548, 0.0662, 0.0740, 0.0500, 0.0630, 0.0541, 0.0667, 0.0565], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 06:22:09,972 INFO [train.py:903] (3/4) Epoch 26, batch 1500, loss[loss=0.2091, simple_loss=0.294, pruned_loss=0.06209, over 19502.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06112, over 3834277.92 frames. ], batch size: 64, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:22:33,633 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0188, 3.6357, 2.4427, 3.2966, 1.0682, 3.6618, 3.5526, 3.5955], + device='cuda:3'), covar=tensor([0.0814, 0.1220, 0.2142, 0.1002, 0.3734, 0.0812, 0.1115, 0.1352], + device='cuda:3'), in_proj_covar=tensor([0.0522, 0.0425, 0.0513, 0.0358, 0.0410, 0.0452, 0.0446, 0.0476], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:22:51,466 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172231.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:14,778 INFO [train.py:903] (3/4) Epoch 26, batch 1550, loss[loss=0.2047, simple_loss=0.2937, pruned_loss=0.05787, over 19697.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2848, pruned_loss=0.06104, over 3845906.40 frames. ], batch size: 59, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:23:23,380 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:23:26,539 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.787e+02 4.585e+02 5.725e+02 7.044e+02 1.122e+03, threshold=1.145e+03, percent-clipped=0.0 +2023-04-03 06:23:59,126 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6676, 1.5544, 1.7292, 1.7406, 3.2330, 1.4997, 2.7769, 3.6631], + device='cuda:3'), covar=tensor([0.0638, 0.3002, 0.2972, 0.2127, 0.0811, 0.2671, 0.1471, 0.0319], + device='cuda:3'), in_proj_covar=tensor([0.0420, 0.0374, 0.0395, 0.0354, 0.0380, 0.0354, 0.0393, 0.0414], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:24:17,373 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:24:18,407 INFO [train.py:903] (3/4) Epoch 26, batch 1600, loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.06158, over 19766.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.0612, over 3846994.08 frames. ], batch size: 54, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:24:30,664 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:24:45,012 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 06:24:49,285 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1381, 3.4052, 1.9673, 2.0680, 3.1057, 1.7323, 1.4964, 2.3597], + device='cuda:3'), covar=tensor([0.1490, 0.0719, 0.1174, 0.0963, 0.0550, 0.1349, 0.1088, 0.0715], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0320, 0.0340, 0.0272, 0.0251, 0.0345, 0.0294, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:25:22,886 INFO [train.py:903] (3/4) Epoch 26, batch 1650, loss[loss=0.2643, simple_loss=0.3396, pruned_loss=0.09456, over 19674.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2848, pruned_loss=0.06128, over 3848082.53 frames. ], batch size: 58, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:25:32,975 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.237e+02 4.806e+02 6.116e+02 7.815e+02 1.931e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 06:26:24,985 INFO [train.py:903] (3/4) Epoch 26, batch 1700, loss[loss=0.2592, simple_loss=0.3257, pruned_loss=0.09637, over 12871.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2844, pruned_loss=0.06161, over 3837993.97 frames. ], batch size: 136, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:26:27,915 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-03 06:26:43,124 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172414.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:27:07,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 06:27:27,220 INFO [train.py:903] (3/4) Epoch 26, batch 1750, loss[loss=0.1816, simple_loss=0.2645, pruned_loss=0.04942, over 19728.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06127, over 3832523.74 frames. ], batch size: 51, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:27:39,757 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.268e+02 4.883e+02 5.717e+02 7.034e+02 1.807e+03, threshold=1.143e+03, percent-clipped=3.0 +2023-04-03 06:27:52,745 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9015, 1.8713, 1.4835, 1.8671, 1.7793, 1.5266, 1.4775, 1.7721], + device='cuda:3'), covar=tensor([0.1215, 0.1511, 0.1838, 0.1221, 0.1473, 0.0941, 0.1859, 0.1001], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0356, 0.0315, 0.0254, 0.0307, 0.0254, 0.0316, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:28:06,465 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:28:09,158 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8736, 1.2048, 1.6187, 0.5714, 2.0267, 2.4714, 2.1927, 2.6537], + device='cuda:3'), covar=tensor([0.1682, 0.3996, 0.3434, 0.2918, 0.0671, 0.0305, 0.0358, 0.0412], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0330, 0.0361, 0.0270, 0.0252, 0.0193, 0.0219, 0.0270], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 06:28:26,586 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8517, 4.4259, 2.7661, 3.8923, 0.9562, 4.3643, 4.2554, 4.3161], + device='cuda:3'), covar=tensor([0.0537, 0.0865, 0.1913, 0.0816, 0.3821, 0.0625, 0.0903, 0.1034], + device='cuda:3'), in_proj_covar=tensor([0.0518, 0.0420, 0.0507, 0.0355, 0.0406, 0.0447, 0.0441, 0.0472], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:28:32,020 INFO [train.py:903] (3/4) Epoch 26, batch 1800, loss[loss=0.217, simple_loss=0.2954, pruned_loss=0.06932, over 19774.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06078, over 3845614.50 frames. ], batch size: 54, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:31,754 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 06:29:36,206 INFO [train.py:903] (3/4) Epoch 26, batch 1850, loss[loss=0.174, simple_loss=0.2515, pruned_loss=0.04824, over 19154.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2835, pruned_loss=0.06083, over 3838233.97 frames. ], batch size: 42, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:29:46,975 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.860e+02 4.880e+02 5.794e+02 7.257e+02 1.575e+03, threshold=1.159e+03, percent-clipped=2.0 +2023-04-03 06:30:11,359 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 06:30:20,758 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0264, 4.6123, 2.7863, 4.0693, 1.1376, 4.5351, 4.4630, 4.5223], + device='cuda:3'), covar=tensor([0.0497, 0.0847, 0.1888, 0.0799, 0.3626, 0.0566, 0.0848, 0.0885], + device='cuda:3'), in_proj_covar=tensor([0.0521, 0.0423, 0.0511, 0.0357, 0.0407, 0.0450, 0.0443, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:30:34,192 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=172595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:30:39,839 INFO [train.py:903] (3/4) Epoch 26, batch 1900, loss[loss=0.2186, simple_loss=0.3074, pruned_loss=0.06487, over 19369.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2824, pruned_loss=0.06027, over 3836712.72 frames. ], batch size: 66, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:30:59,239 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 06:31:01,809 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:05,048 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 06:31:23,069 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9053, 4.3726, 4.6684, 4.6607, 1.6713, 4.3371, 3.7741, 4.3688], + device='cuda:3'), covar=tensor([0.1906, 0.0873, 0.0639, 0.0723, 0.6559, 0.0991, 0.0732, 0.1320], + device='cuda:3'), in_proj_covar=tensor([0.0806, 0.0774, 0.0978, 0.0859, 0.0852, 0.0745, 0.0584, 0.0906], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 06:31:29,995 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 06:31:36,115 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:31:44,211 INFO [train.py:903] (3/4) Epoch 26, batch 1950, loss[loss=0.2231, simple_loss=0.3033, pruned_loss=0.07144, over 19660.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2832, pruned_loss=0.06059, over 3834127.95 frames. ], batch size: 60, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:31:57,688 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.105e+02 6.247e+02 7.487e+02 1.872e+03, threshold=1.249e+03, percent-clipped=7.0 +2023-04-03 06:32:12,276 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172670.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:43,425 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172695.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:32:48,408 INFO [train.py:903] (3/4) Epoch 26, batch 2000, loss[loss=0.2246, simple_loss=0.3032, pruned_loss=0.07301, over 19789.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.0606, over 3843421.92 frames. ], batch size: 56, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:14,449 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 06:33:39,914 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9031, 4.2856, 4.6176, 4.6116, 2.1387, 4.3186, 3.7966, 4.3609], + device='cuda:3'), covar=tensor([0.1701, 0.1424, 0.0587, 0.0691, 0.5547, 0.1127, 0.0672, 0.1003], + device='cuda:3'), in_proj_covar=tensor([0.0803, 0.0774, 0.0978, 0.0859, 0.0852, 0.0744, 0.0584, 0.0905], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 06:33:46,801 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 06:33:51,657 INFO [train.py:903] (3/4) Epoch 26, batch 2050, loss[loss=0.2107, simple_loss=0.274, pruned_loss=0.07369, over 19711.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.284, pruned_loss=0.06126, over 3830580.17 frames. ], batch size: 46, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:33:55,301 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172752.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:34:03,947 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.396e+02 5.217e+02 6.186e+02 8.512e+02 2.102e+03, threshold=1.237e+03, percent-clipped=6.0 +2023-04-03 06:34:06,456 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 06:34:07,797 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 06:34:27,709 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 06:34:54,268 INFO [train.py:903] (3/4) Epoch 26, batch 2100, loss[loss=0.1779, simple_loss=0.2712, pruned_loss=0.04229, over 19570.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2836, pruned_loss=0.06111, over 3833256.79 frames. ], batch size: 52, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:25,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 06:35:47,309 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 06:35:56,679 INFO [train.py:903] (3/4) Epoch 26, batch 2150, loss[loss=0.153, simple_loss=0.2375, pruned_loss=0.03422, over 19742.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.283, pruned_loss=0.06091, over 3834916.56 frames. ], batch size: 48, lr: 3.17e-03, grad_scale: 8.0 +2023-04-03 06:35:58,327 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=172851.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:02,861 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6141, 1.3334, 1.5281, 1.5603, 3.1730, 1.0522, 2.4560, 3.6453], + device='cuda:3'), covar=tensor([0.0496, 0.2916, 0.2944, 0.1864, 0.0717, 0.2543, 0.1288, 0.0233], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0371, 0.0392, 0.0351, 0.0378, 0.0352, 0.0389, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:36:10,068 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.604e+02 4.749e+02 6.121e+02 8.086e+02 1.727e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 06:36:14,750 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=172863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:21,385 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-03 06:36:31,089 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=172876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:36:59,989 INFO [train.py:903] (3/4) Epoch 26, batch 2200, loss[loss=0.1851, simple_loss=0.2749, pruned_loss=0.04761, over 19467.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06127, over 3828316.21 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 4.0 +2023-04-03 06:38:04,037 INFO [train.py:903] (3/4) Epoch 26, batch 2250, loss[loss=0.1894, simple_loss=0.2796, pruned_loss=0.04963, over 19764.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06131, over 3829052.51 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:38:16,977 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172960.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:38:17,990 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.806e+02 5.924e+02 7.729e+02 1.368e+03, threshold=1.185e+03, percent-clipped=2.0 +2023-04-03 06:38:51,894 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=172987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:39:07,853 INFO [train.py:903] (3/4) Epoch 26, batch 2300, loss[loss=0.1722, simple_loss=0.2583, pruned_loss=0.04306, over 19767.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2846, pruned_loss=0.06167, over 3822945.67 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:39:13,393 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 06:39:19,742 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 06:40:11,067 INFO [train.py:903] (3/4) Epoch 26, batch 2350, loss[loss=0.1838, simple_loss=0.2793, pruned_loss=0.04411, over 19781.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2832, pruned_loss=0.06076, over 3835238.77 frames. ], batch size: 56, lr: 3.16e-03, grad_scale: 4.0 +2023-04-03 06:40:25,945 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.095e+02 5.175e+02 6.518e+02 7.971e+02 2.695e+03, threshold=1.304e+03, percent-clipped=8.0 +2023-04-03 06:40:43,713 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173075.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:40:54,414 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 06:41:08,290 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:10,530 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 06:41:14,056 INFO [train.py:903] (3/4) Epoch 26, batch 2400, loss[loss=0.2054, simple_loss=0.2798, pruned_loss=0.06551, over 19611.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2836, pruned_loss=0.06106, over 3832090.82 frames. ], batch size: 50, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:41:18,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:41:48,405 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3391, 1.5890, 2.1076, 1.6111, 3.0881, 4.7584, 4.5893, 5.1685], + device='cuda:3'), covar=tensor([0.1620, 0.3681, 0.3165, 0.2407, 0.0635, 0.0176, 0.0169, 0.0191], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0330, 0.0362, 0.0271, 0.0252, 0.0193, 0.0220, 0.0270], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 06:42:19,069 INFO [train.py:903] (3/4) Epoch 26, batch 2450, loss[loss=0.1783, simple_loss=0.2701, pruned_loss=0.04326, over 18278.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2837, pruned_loss=0.06097, over 3836267.02 frames. ], batch size: 40, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:42:32,913 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.972e+02 5.973e+02 7.732e+02 1.743e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 06:42:41,761 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173168.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:22,682 INFO [train.py:903] (3/4) Epoch 26, batch 2500, loss[loss=0.1825, simple_loss=0.2708, pruned_loss=0.04708, over 19777.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.0607, over 3822413.55 frames. ], batch size: 54, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:43:31,272 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173207.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:43:36,111 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173211.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:44:25,912 INFO [train.py:903] (3/4) Epoch 26, batch 2550, loss[loss=0.2117, simple_loss=0.2897, pruned_loss=0.06687, over 17415.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2831, pruned_loss=0.06055, over 3834947.28 frames. ], batch size: 101, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:44:40,272 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3687, 2.0250, 1.5932, 1.3762, 1.8238, 1.2823, 1.3935, 1.7946], + device='cuda:3'), covar=tensor([0.0962, 0.0793, 0.1073, 0.0884, 0.0606, 0.1293, 0.0658, 0.0498], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0317, 0.0337, 0.0269, 0.0248, 0.0341, 0.0291, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:44:40,983 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.733e+02 4.982e+02 5.984e+02 8.594e+02 2.255e+03, threshold=1.197e+03, percent-clipped=6.0 +2023-04-03 06:45:23,060 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 06:45:30,064 INFO [train.py:903] (3/4) Epoch 26, batch 2600, loss[loss=0.1863, simple_loss=0.2688, pruned_loss=0.0519, over 19682.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06046, over 3838911.09 frames. ], batch size: 53, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:45:59,771 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173322.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,426 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173327.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:05,522 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4305, 2.0335, 2.1291, 2.9939, 1.9463, 2.4519, 2.5130, 2.2623], + device='cuda:3'), covar=tensor([0.0737, 0.0922, 0.0914, 0.0760, 0.0887, 0.0780, 0.0900, 0.0684], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0226, 0.0228, 0.0242, 0.0228, 0.0214, 0.0190, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 06:46:10,053 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:35,582 INFO [train.py:903] (3/4) Epoch 26, batch 2650, loss[loss=0.2221, simple_loss=0.2986, pruned_loss=0.07277, over 18785.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2828, pruned_loss=0.06035, over 3828860.15 frames. ], batch size: 74, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:46:42,202 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 06:46:43,071 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173356.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:45,486 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:46:49,593 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 5.033e+02 5.962e+02 7.363e+02 1.395e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 06:46:55,536 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 06:47:17,711 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:47:39,142 INFO [train.py:903] (3/4) Epoch 26, batch 2700, loss[loss=0.1982, simple_loss=0.2868, pruned_loss=0.05486, over 19569.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.283, pruned_loss=0.06019, over 3834176.38 frames. ], batch size: 61, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:00,262 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0249, 4.4604, 4.8036, 4.8034, 1.7251, 4.4965, 3.8766, 4.5145], + device='cuda:3'), covar=tensor([0.1764, 0.0883, 0.0602, 0.0702, 0.6366, 0.0868, 0.0684, 0.1155], + device='cuda:3'), in_proj_covar=tensor([0.0806, 0.0773, 0.0978, 0.0861, 0.0856, 0.0742, 0.0584, 0.0909], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 06:48:32,625 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7951, 1.8621, 2.1335, 2.2162, 1.6853, 2.1164, 2.1017, 1.9591], + device='cuda:3'), covar=tensor([0.4175, 0.3930, 0.2030, 0.2503, 0.4085, 0.2304, 0.5070, 0.3447], + device='cuda:3'), in_proj_covar=tensor([0.0928, 0.1004, 0.0736, 0.0949, 0.0908, 0.0841, 0.0859, 0.0805], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 06:48:39,711 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-03 06:48:41,208 INFO [train.py:903] (3/4) Epoch 26, batch 2750, loss[loss=0.2198, simple_loss=0.2991, pruned_loss=0.0703, over 19586.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.05998, over 3829654.14 frames. ], batch size: 61, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:48:54,809 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.847e+02 4.981e+02 6.163e+02 7.639e+02 1.916e+03, threshold=1.233e+03, percent-clipped=5.0 +2023-04-03 06:49:03,502 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:34,182 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:49:44,193 INFO [train.py:903] (3/4) Epoch 26, batch 2800, loss[loss=0.215, simple_loss=0.2926, pruned_loss=0.06869, over 19604.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2834, pruned_loss=0.0604, over 3811102.11 frames. ], batch size: 57, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:49:52,990 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4604, 1.5164, 1.7436, 1.7361, 2.6287, 2.2567, 2.7945, 1.2266], + device='cuda:3'), covar=tensor([0.2540, 0.4347, 0.2745, 0.1948, 0.1497, 0.2161, 0.1388, 0.4641], + device='cuda:3'), in_proj_covar=tensor([0.0545, 0.0661, 0.0739, 0.0500, 0.0629, 0.0539, 0.0666, 0.0566], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 06:49:56,459 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173509.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:49:56,869 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.56 vs. limit=5.0 +2023-04-03 06:50:00,282 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:50:48,002 INFO [train.py:903] (3/4) Epoch 26, batch 2850, loss[loss=0.2095, simple_loss=0.2945, pruned_loss=0.06223, over 19629.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2844, pruned_loss=0.06066, over 3817754.56 frames. ], batch size: 50, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:01,776 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.227e+02 4.954e+02 6.202e+02 8.183e+02 1.932e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 06:51:23,220 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173578.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:51:51,137 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 06:51:52,155 INFO [train.py:903] (3/4) Epoch 26, batch 2900, loss[loss=0.2523, simple_loss=0.3183, pruned_loss=0.0932, over 19607.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2851, pruned_loss=0.06115, over 3820166.81 frames. ], batch size: 57, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:51:57,199 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173603.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:08,019 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9877, 2.0492, 2.3584, 2.5612, 1.9680, 2.5125, 2.3283, 2.1710], + device='cuda:3'), covar=tensor([0.4283, 0.4063, 0.1911, 0.2539, 0.4252, 0.2196, 0.4975, 0.3331], + device='cuda:3'), in_proj_covar=tensor([0.0928, 0.1003, 0.0736, 0.0946, 0.0907, 0.0840, 0.0859, 0.0805], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 06:52:27,628 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:34,772 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=173632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:52:56,769 INFO [train.py:903] (3/4) Epoch 26, batch 2950, loss[loss=0.2032, simple_loss=0.2883, pruned_loss=0.05901, over 19780.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2849, pruned_loss=0.06121, over 3820699.74 frames. ], batch size: 56, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:10,750 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.057e+02 5.287e+02 6.719e+02 8.706e+02 2.181e+03, threshold=1.344e+03, percent-clipped=5.0 +2023-04-03 06:53:23,945 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173671.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:53:59,627 INFO [train.py:903] (3/4) Epoch 26, batch 3000, loss[loss=0.1973, simple_loss=0.2839, pruned_loss=0.0553, over 19688.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2841, pruned_loss=0.06086, over 3830366.00 frames. ], batch size: 59, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:53:59,627 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 06:54:12,257 INFO [train.py:937] (3/4) Epoch 26, validation: loss=0.1681, simple_loss=0.2675, pruned_loss=0.03435, over 944034.00 frames. +2023-04-03 06:54:12,259 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 06:54:17,259 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 06:55:16,507 INFO [train.py:903] (3/4) Epoch 26, batch 3050, loss[loss=0.172, simple_loss=0.2526, pruned_loss=0.04572, over 19401.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2844, pruned_loss=0.0609, over 3825496.13 frames. ], batch size: 48, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:55:30,851 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.198e+02 4.825e+02 5.950e+02 7.456e+02 1.374e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 06:56:02,634 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173786.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:56:20,031 INFO [train.py:903] (3/4) Epoch 26, batch 3100, loss[loss=0.1731, simple_loss=0.2519, pruned_loss=0.04712, over 19763.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06032, over 3832235.37 frames. ], batch size: 48, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:56:32,224 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-03 06:56:49,601 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9059, 1.2884, 1.6275, 0.5685, 1.9463, 2.4421, 2.1464, 2.5955], + device='cuda:3'), covar=tensor([0.1638, 0.3850, 0.3375, 0.2830, 0.0678, 0.0273, 0.0341, 0.0386], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0328, 0.0361, 0.0269, 0.0252, 0.0193, 0.0218, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 06:57:23,192 INFO [train.py:903] (3/4) Epoch 26, batch 3150, loss[loss=0.1896, simple_loss=0.275, pruned_loss=0.05207, over 19586.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06118, over 3815381.97 frames. ], batch size: 52, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:57:23,615 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1255, 1.2395, 1.7313, 1.0336, 2.3770, 3.0846, 2.8264, 3.2798], + device='cuda:3'), covar=tensor([0.1599, 0.3897, 0.3237, 0.2617, 0.0591, 0.0217, 0.0243, 0.0331], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0328, 0.0361, 0.0269, 0.0252, 0.0193, 0.0218, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 06:57:26,865 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173853.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 06:57:37,138 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 5.081e+02 6.233e+02 7.406e+02 2.417e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-04-03 06:57:49,125 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 06:58:00,852 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 06:58:02,702 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.1294, 5.5992, 2.9915, 4.7742, 0.9397, 5.7123, 5.4859, 5.7156], + device='cuda:3'), covar=tensor([0.0364, 0.0789, 0.2009, 0.0724, 0.4274, 0.0553, 0.0864, 0.0943], + device='cuda:3'), in_proj_covar=tensor([0.0525, 0.0426, 0.0513, 0.0359, 0.0411, 0.0451, 0.0448, 0.0476], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:58:05,087 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=173883.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:58:25,322 INFO [train.py:903] (3/4) Epoch 26, batch 3200, loss[loss=0.2349, simple_loss=0.3085, pruned_loss=0.08066, over 18123.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06152, over 3813406.10 frames. ], batch size: 83, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:58:34,884 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9873, 1.6979, 1.5673, 1.8761, 1.6616, 1.6471, 1.4844, 1.8267], + device='cuda:3'), covar=tensor([0.1077, 0.1474, 0.1622, 0.1067, 0.1441, 0.0641, 0.1683, 0.0851], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0359, 0.0316, 0.0256, 0.0307, 0.0256, 0.0319, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:58:35,893 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=173908.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 06:59:25,925 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6068, 1.9830, 2.1061, 2.0730, 3.3416, 1.7450, 2.8601, 3.5656], + device='cuda:3'), covar=tensor([0.0442, 0.2255, 0.2319, 0.1617, 0.0501, 0.2065, 0.1705, 0.0280], + device='cuda:3'), in_proj_covar=tensor([0.0418, 0.0373, 0.0392, 0.0351, 0.0377, 0.0354, 0.0391, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 06:59:27,977 INFO [train.py:903] (3/4) Epoch 26, batch 3250, loss[loss=0.2219, simple_loss=0.3036, pruned_loss=0.07005, over 19329.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2855, pruned_loss=0.06193, over 3806420.85 frames. ], batch size: 66, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 06:59:42,863 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.716e+02 4.798e+02 6.185e+02 8.155e+02 2.789e+03, threshold=1.237e+03, percent-clipped=7.0 +2023-04-03 06:59:52,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=173968.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 07:00:01,804 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=173976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:00:31,303 INFO [train.py:903] (3/4) Epoch 26, batch 3300, loss[loss=0.2165, simple_loss=0.2872, pruned_loss=0.0729, over 19610.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.285, pruned_loss=0.06181, over 3809330.62 frames. ], batch size: 50, lr: 3.16e-03, grad_scale: 8.0 +2023-04-03 07:00:35,708 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 07:00:38,017 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9676, 1.8028, 1.7835, 2.0107, 1.8813, 1.7057, 1.7261, 1.9104], + device='cuda:3'), covar=tensor([0.0873, 0.1270, 0.1191, 0.0821, 0.1027, 0.0526, 0.1242, 0.0655], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0358, 0.0316, 0.0256, 0.0306, 0.0256, 0.0319, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:01:14,204 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174033.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:17,518 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:26,023 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:01:35,839 INFO [train.py:903] (3/4) Epoch 26, batch 3350, loss[loss=0.2249, simple_loss=0.2914, pruned_loss=0.07919, over 19733.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2841, pruned_loss=0.06119, over 3822511.41 frames. ], batch size: 51, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:01:49,566 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.212e+02 4.999e+02 5.843e+02 7.881e+02 1.777e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 07:01:56,819 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:28,303 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:02:39,919 INFO [train.py:903] (3/4) Epoch 26, batch 3400, loss[loss=0.1825, simple_loss=0.2588, pruned_loss=0.05309, over 19751.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2843, pruned_loss=0.06132, over 3815721.97 frames. ], batch size: 45, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:03:15,052 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 07:03:41,102 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 07:03:42,262 INFO [train.py:903] (3/4) Epoch 26, batch 3450, loss[loss=0.2057, simple_loss=0.2885, pruned_loss=0.06143, over 19656.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2846, pruned_loss=0.06152, over 3815350.97 frames. ], batch size: 55, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:03:53,287 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:03:57,816 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.666e+02 5.900e+02 7.449e+02 1.550e+03, threshold=1.180e+03, percent-clipped=6.0 +2023-04-03 07:04:02,620 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1360, 1.1844, 1.4483, 1.3641, 2.7225, 1.0706, 2.1684, 3.0901], + device='cuda:3'), covar=tensor([0.0598, 0.3106, 0.3029, 0.1980, 0.0796, 0.2520, 0.1292, 0.0330], + device='cuda:3'), in_proj_covar=tensor([0.0422, 0.0376, 0.0395, 0.0353, 0.0382, 0.0357, 0.0394, 0.0414], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:04:47,404 INFO [train.py:903] (3/4) Epoch 26, batch 3500, loss[loss=0.1889, simple_loss=0.2646, pruned_loss=0.05658, over 19613.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2851, pruned_loss=0.06203, over 3814574.68 frames. ], batch size: 50, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:05:18,052 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174224.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 07:05:49,655 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174249.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 07:05:50,383 INFO [train.py:903] (3/4) Epoch 26, batch 3550, loss[loss=0.208, simple_loss=0.2894, pruned_loss=0.06335, over 19542.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2864, pruned_loss=0.06299, over 3799929.40 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:03,179 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.676e+02 6.063e+02 7.972e+02 1.969e+03, threshold=1.213e+03, percent-clipped=7.0 +2023-04-03 07:06:12,983 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5105, 1.5690, 1.8690, 1.7509, 2.7218, 2.3363, 2.9459, 1.3171], + device='cuda:3'), covar=tensor([0.2436, 0.4198, 0.2707, 0.1908, 0.1449, 0.2077, 0.1333, 0.4462], + device='cuda:3'), in_proj_covar=tensor([0.0545, 0.0664, 0.0740, 0.0499, 0.0628, 0.0539, 0.0664, 0.0567], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 07:06:17,607 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3045, 1.4973, 1.9630, 1.5126, 2.9342, 4.6607, 4.4993, 5.1159], + device='cuda:3'), covar=tensor([0.1654, 0.3818, 0.3344, 0.2565, 0.0712, 0.0216, 0.0176, 0.0222], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0327, 0.0361, 0.0269, 0.0250, 0.0193, 0.0218, 0.0270], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 07:06:53,369 INFO [train.py:903] (3/4) Epoch 26, batch 3600, loss[loss=0.1759, simple_loss=0.2674, pruned_loss=0.04223, over 19774.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2843, pruned_loss=0.06164, over 3819401.79 frames. ], batch size: 54, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:06:54,925 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:05,326 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174309.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:54,127 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:07:57,119 INFO [train.py:903] (3/4) Epoch 26, batch 3650, loss[loss=0.2291, simple_loss=0.3014, pruned_loss=0.07843, over 19690.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2838, pruned_loss=0.06135, over 3828486.93 frames. ], batch size: 53, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:08:12,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 5.064e+02 6.896e+02 8.623e+02 2.807e+03, threshold=1.379e+03, percent-clipped=9.0 +2023-04-03 07:08:26,880 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:28,386 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 07:08:32,716 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:08:35,179 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:09:02,303 INFO [train.py:903] (3/4) Epoch 26, batch 3700, loss[loss=0.2145, simple_loss=0.2854, pruned_loss=0.07178, over 19630.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06162, over 3829726.42 frames. ], batch size: 50, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:09:10,646 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:09:15,242 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1384, 2.8831, 2.2769, 2.2532, 2.0220, 2.5266, 1.0663, 2.1066], + device='cuda:3'), covar=tensor([0.0801, 0.0625, 0.0797, 0.1266, 0.1241, 0.1133, 0.1530, 0.1132], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0362, 0.0367, 0.0391, 0.0468, 0.0396, 0.0346, 0.0346], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 07:09:15,871 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 07:10:07,621 INFO [train.py:903] (3/4) Epoch 26, batch 3750, loss[loss=0.2488, simple_loss=0.3176, pruned_loss=0.09002, over 19796.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06122, over 3829131.28 frames. ], batch size: 56, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:10:20,548 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.722e+02 4.995e+02 5.757e+02 7.069e+02 1.518e+03, threshold=1.151e+03, percent-clipped=1.0 +2023-04-03 07:10:24,805 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 07:11:01,670 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:04,059 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:10,963 INFO [train.py:903] (3/4) Epoch 26, batch 3800, loss[loss=0.1729, simple_loss=0.2513, pruned_loss=0.04725, over 19383.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06103, over 3820068.71 frames. ], batch size: 47, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:11:12,312 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:11:42,948 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 07:11:55,976 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 07:12:14,059 INFO [train.py:903] (3/4) Epoch 26, batch 3850, loss[loss=0.2102, simple_loss=0.2862, pruned_loss=0.06716, over 19739.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2862, pruned_loss=0.06197, over 3819211.81 frames. ], batch size: 45, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:12:27,530 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 5.118e+02 6.382e+02 8.415e+02 1.605e+03, threshold=1.276e+03, percent-clipped=5.0 +2023-04-03 07:13:15,048 INFO [train.py:903] (3/4) Epoch 26, batch 3900, loss[loss=0.1772, simple_loss=0.2555, pruned_loss=0.04949, over 19611.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2861, pruned_loss=0.06213, over 3827688.48 frames. ], batch size: 50, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:13:36,295 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:13,844 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:20,757 INFO [train.py:903] (3/4) Epoch 26, batch 3950, loss[loss=0.2076, simple_loss=0.2859, pruned_loss=0.06469, over 19587.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2869, pruned_loss=0.06238, over 3823440.78 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:14:24,354 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 07:14:24,512 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:14:34,032 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.927e+02 6.085e+02 7.650e+02 1.385e+03, threshold=1.217e+03, percent-clipped=3.0 +2023-04-03 07:14:54,713 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 07:15:24,376 INFO [train.py:903] (3/4) Epoch 26, batch 4000, loss[loss=0.1781, simple_loss=0.2521, pruned_loss=0.05208, over 19757.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2862, pruned_loss=0.06195, over 3816546.88 frames. ], batch size: 46, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:15:54,776 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=174723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:13,021 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 07:16:15,486 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1329, 1.8083, 1.8751, 1.6171, 4.7022, 1.2715, 2.6753, 4.9787], + device='cuda:3'), covar=tensor([0.0421, 0.2690, 0.2718, 0.2068, 0.0666, 0.2626, 0.1407, 0.0160], + device='cuda:3'), in_proj_covar=tensor([0.0418, 0.0372, 0.0390, 0.0349, 0.0377, 0.0353, 0.0389, 0.0409], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:16:23,701 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0375, 1.7685, 1.9432, 2.7817, 1.9160, 2.3799, 2.5687, 1.9859], + device='cuda:3'), covar=tensor([0.0851, 0.0959, 0.0978, 0.0768, 0.0902, 0.0714, 0.0815, 0.0706], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0223, 0.0227, 0.0240, 0.0224, 0.0212, 0.0187, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 07:16:24,951 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:26,990 INFO [train.py:903] (3/4) Epoch 26, batch 4050, loss[loss=0.1947, simple_loss=0.2772, pruned_loss=0.05614, over 19840.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2859, pruned_loss=0.06186, over 3829508.13 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:16:27,193 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:27,483 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:40,317 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:41,107 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 4.947e+02 6.251e+02 7.600e+02 1.203e+03, threshold=1.250e+03, percent-clipped=0.0 +2023-04-03 07:16:52,077 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174768.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:16:59,323 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174773.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:01,697 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174775.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:17:32,067 INFO [train.py:903] (3/4) Epoch 26, batch 4100, loss[loss=0.2342, simple_loss=0.3251, pruned_loss=0.07169, over 17615.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2846, pruned_loss=0.06134, over 3833853.60 frames. ], batch size: 101, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:17:50,339 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.29 vs. limit=5.0 +2023-04-03 07:18:07,596 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 07:18:38,176 INFO [train.py:903] (3/4) Epoch 26, batch 4150, loss[loss=0.1824, simple_loss=0.2663, pruned_loss=0.04928, over 19590.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2834, pruned_loss=0.06056, over 3834946.00 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:18:53,313 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.046e+02 6.484e+02 8.542e+02 2.236e+03, threshold=1.297e+03, percent-clipped=8.0 +2023-04-03 07:18:57,193 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=174865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:05,609 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=174872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:26,294 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 07:19:38,551 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=174897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:19:41,592 INFO [train.py:903] (3/4) Epoch 26, batch 4200, loss[loss=0.2063, simple_loss=0.2916, pruned_loss=0.0605, over 19594.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2851, pruned_loss=0.0614, over 3823866.30 frames. ], batch size: 61, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:19:42,860 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 07:19:46,872 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1348, 1.3736, 1.7928, 1.1933, 2.5617, 3.4103, 3.0772, 3.6082], + device='cuda:3'), covar=tensor([0.1556, 0.3745, 0.3207, 0.2481, 0.0550, 0.0191, 0.0236, 0.0301], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0329, 0.0362, 0.0271, 0.0252, 0.0194, 0.0219, 0.0271], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 07:20:44,732 INFO [train.py:903] (3/4) Epoch 26, batch 4250, loss[loss=0.1884, simple_loss=0.2675, pruned_loss=0.05463, over 19729.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2849, pruned_loss=0.06132, over 3822227.81 frames. ], batch size: 47, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:20:55,198 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 07:21:01,853 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 4.804e+02 5.687e+02 7.207e+02 1.586e+03, threshold=1.137e+03, percent-clipped=5.0 +2023-04-03 07:21:07,833 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 07:21:10,193 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0408, 1.8101, 1.9587, 2.6533, 1.7000, 2.3108, 2.2608, 2.0757], + device='cuda:3'), covar=tensor([0.0827, 0.0930, 0.0933, 0.0797, 0.0970, 0.0774, 0.0917, 0.0683], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0225, 0.0212, 0.0188, 0.0204], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 07:21:49,466 INFO [train.py:903] (3/4) Epoch 26, batch 4300, loss[loss=0.1759, simple_loss=0.2643, pruned_loss=0.04376, over 19816.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06115, over 3816936.69 frames. ], batch size: 49, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:22:11,506 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:19,610 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.7065, 5.2693, 3.2294, 4.5488, 1.2232, 5.2890, 5.1262, 5.2324], + device='cuda:3'), covar=tensor([0.0383, 0.0754, 0.1666, 0.0653, 0.3850, 0.0544, 0.0779, 0.1039], + device='cuda:3'), in_proj_covar=tensor([0.0523, 0.0423, 0.0508, 0.0355, 0.0409, 0.0448, 0.0445, 0.0472], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:22:20,977 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175024.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:38,871 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 07:22:41,557 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:52,853 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:22:53,613 INFO [train.py:903] (3/4) Epoch 26, batch 4350, loss[loss=0.1916, simple_loss=0.2789, pruned_loss=0.05217, over 19848.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06075, over 3830453.35 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 4.0 +2023-04-03 07:23:08,531 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 4.980e+02 6.586e+02 7.974e+02 2.340e+03, threshold=1.317e+03, percent-clipped=8.0 +2023-04-03 07:23:14,403 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175067.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:23:56,993 INFO [train.py:903] (3/4) Epoch 26, batch 4400, loss[loss=0.1891, simple_loss=0.2725, pruned_loss=0.05285, over 19837.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06063, over 3825200.70 frames. ], batch size: 52, lr: 3.15e-03, grad_scale: 8.0 +2023-04-03 07:24:17,272 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 07:24:23,150 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:26,278 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 07:24:55,280 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175146.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:24:59,528 INFO [train.py:903] (3/4) Epoch 26, batch 4450, loss[loss=0.25, simple_loss=0.3216, pruned_loss=0.08918, over 17493.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.06134, over 3823349.14 frames. ], batch size: 101, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:25:08,212 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:25:14,960 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 4.830e+02 6.413e+02 8.619e+02 2.132e+03, threshold=1.283e+03, percent-clipped=8.0 +2023-04-03 07:25:41,757 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175182.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:26:02,757 INFO [train.py:903] (3/4) Epoch 26, batch 4500, loss[loss=0.1906, simple_loss=0.276, pruned_loss=0.0526, over 19608.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2842, pruned_loss=0.06146, over 3815517.33 frames. ], batch size: 61, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:26:35,234 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 07:26:36,029 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:27:08,200 INFO [train.py:903] (3/4) Epoch 26, batch 4550, loss[loss=0.2566, simple_loss=0.3412, pruned_loss=0.08603, over 19527.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2838, pruned_loss=0.06127, over 3805603.11 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:27:15,053 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 07:27:23,523 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.834e+02 6.204e+02 7.660e+02 2.009e+03, threshold=1.241e+03, percent-clipped=3.0 +2023-04-03 07:27:40,264 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 07:28:13,289 INFO [train.py:903] (3/4) Epoch 26, batch 4600, loss[loss=0.1926, simple_loss=0.2761, pruned_loss=0.05459, over 19669.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.284, pruned_loss=0.06123, over 3812266.95 frames. ], batch size: 55, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:16,052 INFO [train.py:903] (3/4) Epoch 26, batch 4650, loss[loss=0.2233, simple_loss=0.305, pruned_loss=0.07077, over 17478.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2842, pruned_loss=0.0611, over 3815303.96 frames. ], batch size: 101, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:29:29,905 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.676e+02 4.879e+02 5.869e+02 7.462e+02 1.692e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 07:29:31,125 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 07:29:44,497 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 07:30:03,985 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:17,582 INFO [train.py:903] (3/4) Epoch 26, batch 4700, loss[loss=0.2525, simple_loss=0.3263, pruned_loss=0.08939, over 17370.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06196, over 3813629.94 frames. ], batch size: 101, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:30:19,595 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2051, 2.2413, 2.4425, 2.9868, 2.2910, 2.8706, 2.4412, 2.2152], + device='cuda:3'), covar=tensor([0.4319, 0.4398, 0.2032, 0.2718, 0.4701, 0.2354, 0.5149, 0.3597], + device='cuda:3'), in_proj_covar=tensor([0.0928, 0.1004, 0.0737, 0.0951, 0.0908, 0.0843, 0.0858, 0.0803], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 07:30:34,462 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8909, 4.3133, 4.6202, 4.6147, 1.7493, 4.3398, 3.7267, 4.3352], + device='cuda:3'), covar=tensor([0.1790, 0.0949, 0.0681, 0.0759, 0.6114, 0.1032, 0.0732, 0.1224], + device='cuda:3'), in_proj_covar=tensor([0.0812, 0.0775, 0.0977, 0.0862, 0.0852, 0.0744, 0.0582, 0.0907], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 07:30:35,691 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:30:40,993 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 07:31:07,019 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175438.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:31:21,517 INFO [train.py:903] (3/4) Epoch 26, batch 4750, loss[loss=0.1977, simple_loss=0.2827, pruned_loss=0.05637, over 19662.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2856, pruned_loss=0.0621, over 3806888.88 frames. ], batch size: 58, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:31:37,189 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.945e+02 6.030e+02 7.655e+02 2.128e+03, threshold=1.206e+03, percent-clipped=6.0 +2023-04-03 07:31:38,786 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175463.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:22,862 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:32:23,788 INFO [train.py:903] (3/4) Epoch 26, batch 4800, loss[loss=0.2156, simple_loss=0.291, pruned_loss=0.07011, over 13085.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2847, pruned_loss=0.06165, over 3813262.01 frames. ], batch size: 137, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:32:25,152 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175501.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:33:28,514 INFO [train.py:903] (3/4) Epoch 26, batch 4850, loss[loss=0.1801, simple_loss=0.2729, pruned_loss=0.0436, over 19463.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06147, over 3809340.78 frames. ], batch size: 64, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:33:42,550 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.071e+02 4.553e+02 5.462e+02 6.461e+02 1.537e+03, threshold=1.092e+03, percent-clipped=2.0 +2023-04-03 07:33:49,690 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 07:33:51,024 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175569.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:34:12,021 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 07:34:18,041 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 07:34:18,069 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 07:34:27,160 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 07:34:30,589 INFO [train.py:903] (3/4) Epoch 26, batch 4900, loss[loss=0.2575, simple_loss=0.331, pruned_loss=0.092, over 19610.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2855, pruned_loss=0.06172, over 3809819.27 frames. ], batch size: 57, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:34:46,816 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 07:34:50,785 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:35:31,952 INFO [train.py:903] (3/4) Epoch 26, batch 4950, loss[loss=0.203, simple_loss=0.2883, pruned_loss=0.05883, over 19538.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2846, pruned_loss=0.06114, over 3823355.91 frames. ], batch size: 56, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:35:49,939 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.878e+02 6.108e+02 7.489e+02 1.803e+03, threshold=1.222e+03, percent-clipped=10.0 +2023-04-03 07:35:49,976 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 07:36:13,140 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 07:36:15,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175684.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:36:36,321 INFO [train.py:903] (3/4) Epoch 26, batch 5000, loss[loss=0.2048, simple_loss=0.2783, pruned_loss=0.0657, over 19603.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2836, pruned_loss=0.0607, over 3822077.43 frames. ], batch size: 50, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:36:46,069 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 07:36:56,350 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 07:37:10,909 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175728.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:16,384 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175732.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:39,578 INFO [train.py:903] (3/4) Epoch 26, batch 5050, loss[loss=0.2132, simple_loss=0.2949, pruned_loss=0.06572, over 19770.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2834, pruned_loss=0.06005, over 3826244.15 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:37:41,475 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.67 vs. limit=5.0 +2023-04-03 07:37:46,846 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:37:49,654 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.64 vs. limit=5.0 +2023-04-03 07:37:53,680 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.916e+02 5.717e+02 6.994e+02 1.273e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-04-03 07:38:14,302 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 07:38:39,106 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0312, 3.7228, 2.4954, 3.3576, 0.8804, 3.6748, 3.5316, 3.6029], + device='cuda:3'), covar=tensor([0.0748, 0.1027, 0.1967, 0.0924, 0.3891, 0.0767, 0.1006, 0.1171], + device='cuda:3'), in_proj_covar=tensor([0.0523, 0.0423, 0.0507, 0.0358, 0.0409, 0.0449, 0.0445, 0.0473], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:38:42,427 INFO [train.py:903] (3/4) Epoch 26, batch 5100, loss[loss=0.2563, simple_loss=0.3294, pruned_loss=0.09154, over 19593.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2852, pruned_loss=0.06144, over 3824926.52 frames. ], batch size: 52, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:38:49,479 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 07:38:49,800 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4011, 2.1735, 1.6484, 1.4977, 2.0340, 1.3560, 1.2695, 1.8709], + device='cuda:3'), covar=tensor([0.1030, 0.0838, 0.1016, 0.0819, 0.0497, 0.1246, 0.0765, 0.0473], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0319, 0.0337, 0.0271, 0.0250, 0.0344, 0.0291, 0.0273], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:38:52,870 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 07:38:58,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 07:39:30,272 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2257, 1.2640, 1.2771, 1.1080, 1.0610, 1.1429, 0.1226, 0.3828], + device='cuda:3'), covar=tensor([0.0793, 0.0743, 0.0481, 0.0638, 0.1420, 0.0726, 0.1507, 0.1245], + device='cuda:3'), in_proj_covar=tensor([0.0363, 0.0361, 0.0366, 0.0389, 0.0469, 0.0396, 0.0344, 0.0346], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 07:39:37,122 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=175843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:42,013 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175847.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:39:45,161 INFO [train.py:903] (3/4) Epoch 26, batch 5150, loss[loss=0.1893, simple_loss=0.2607, pruned_loss=0.05901, over 19369.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06144, over 3820486.10 frames. ], batch size: 47, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:39:55,538 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 07:40:01,368 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 5.003e+02 6.332e+02 8.398e+02 1.509e+03, threshold=1.266e+03, percent-clipped=8.0 +2023-04-03 07:40:14,025 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:15,284 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:30,092 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 07:40:47,123 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175897.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:49,344 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:40:50,194 INFO [train.py:903] (3/4) Epoch 26, batch 5200, loss[loss=0.178, simple_loss=0.2605, pruned_loss=0.04772, over 19426.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06118, over 3815005.51 frames. ], batch size: 48, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:40:54,086 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5681, 1.1631, 1.4280, 1.3488, 2.2019, 1.1318, 2.0787, 2.5440], + device='cuda:3'), covar=tensor([0.0678, 0.2842, 0.2796, 0.1614, 0.0876, 0.1960, 0.1049, 0.0430], + device='cuda:3'), in_proj_covar=tensor([0.0417, 0.0371, 0.0389, 0.0349, 0.0376, 0.0353, 0.0388, 0.0410], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:41:02,247 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 07:41:41,511 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=175940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:41:45,632 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 07:41:53,689 INFO [train.py:903] (3/4) Epoch 26, batch 5250, loss[loss=0.1986, simple_loss=0.2829, pruned_loss=0.05718, over 19444.00 frames. ], tot_loss[loss=0.202, simple_loss=0.283, pruned_loss=0.06049, over 3828943.64 frames. ], batch size: 70, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:42:03,288 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=175958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:07,499 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.675e+02 5.849e+02 7.641e+02 1.436e+03, threshold=1.170e+03, percent-clipped=2.0 +2023-04-03 07:42:09,026 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=175963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:11,349 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=175965.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:42:25,590 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.00 vs. limit=5.0 +2023-04-03 07:42:54,939 INFO [train.py:903] (3/4) Epoch 26, batch 5300, loss[loss=0.2033, simple_loss=0.284, pruned_loss=0.06133, over 19574.00 frames. ], tot_loss[loss=0.203, simple_loss=0.284, pruned_loss=0.06097, over 3831317.87 frames. ], batch size: 52, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:43:08,838 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 07:43:09,634 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 07:43:57,534 INFO [train.py:903] (3/4) Epoch 26, batch 5350, loss[loss=0.2346, simple_loss=0.3158, pruned_loss=0.07667, over 19469.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2841, pruned_loss=0.06112, over 3830762.80 frames. ], batch size: 64, lr: 3.14e-03, grad_scale: 8.0 +2023-04-03 07:44:14,899 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.150e+02 4.809e+02 5.990e+02 7.353e+02 1.688e+03, threshold=1.198e+03, percent-clipped=9.0 +2023-04-03 07:44:27,825 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176072.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:44:30,115 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 07:45:03,474 INFO [train.py:903] (3/4) Epoch 26, batch 5400, loss[loss=0.2189, simple_loss=0.3137, pruned_loss=0.06207, over 19524.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2856, pruned_loss=0.0616, over 3824293.82 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:45:07,661 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:36,780 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:45:37,872 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:08,012 INFO [train.py:903] (3/4) Epoch 26, batch 5450, loss[loss=0.1778, simple_loss=0.2528, pruned_loss=0.05144, over 19735.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2856, pruned_loss=0.06193, over 3812626.32 frames. ], batch size: 46, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:46:10,696 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:46:23,248 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.815e+02 4.882e+02 5.855e+02 6.912e+02 1.680e+03, threshold=1.171e+03, percent-clipped=1.0 +2023-04-03 07:46:55,139 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:11,317 INFO [train.py:903] (3/4) Epoch 26, batch 5500, loss[loss=0.207, simple_loss=0.2967, pruned_loss=0.0587, over 19183.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2844, pruned_loss=0.06075, over 3819292.36 frames. ], batch size: 69, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:47:28,797 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:47:30,654 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 07:47:37,975 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9961, 1.6228, 1.6124, 1.8271, 1.5124, 1.6598, 1.5054, 1.8189], + device='cuda:3'), covar=tensor([0.1120, 0.1311, 0.1617, 0.1132, 0.1434, 0.0597, 0.1603, 0.0800], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0356, 0.0315, 0.0255, 0.0304, 0.0256, 0.0318, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 07:48:02,322 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:06,833 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:14,664 INFO [train.py:903] (3/4) Epoch 26, batch 5550, loss[loss=0.2739, simple_loss=0.3414, pruned_loss=0.1032, over 17293.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2839, pruned_loss=0.0607, over 3828556.57 frames. ], batch size: 101, lr: 3.14e-03, grad_scale: 4.0 +2023-04-03 07:48:17,144 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 07:48:29,013 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176260.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:48:32,235 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.802e+02 5.930e+02 7.241e+02 1.738e+03, threshold=1.186e+03, percent-clipped=4.0 +2023-04-03 07:48:47,667 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:08,001 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 07:49:18,900 INFO [train.py:903] (3/4) Epoch 26, batch 5600, loss[loss=0.1885, simple_loss=0.2639, pruned_loss=0.0566, over 19091.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06028, over 3830537.23 frames. ], batch size: 42, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:49:28,230 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176307.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:49:45,712 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9295, 4.4083, 4.6909, 4.6751, 1.7200, 4.4124, 3.7973, 4.4036], + device='cuda:3'), covar=tensor([0.1691, 0.0864, 0.0620, 0.0714, 0.6167, 0.0932, 0.0755, 0.1075], + device='cuda:3'), in_proj_covar=tensor([0.0815, 0.0774, 0.0980, 0.0863, 0.0859, 0.0748, 0.0582, 0.0910], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 07:50:23,052 INFO [train.py:903] (3/4) Epoch 26, batch 5650, loss[loss=0.1512, simple_loss=0.2323, pruned_loss=0.03504, over 19716.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.284, pruned_loss=0.06109, over 3820193.12 frames. ], batch size: 46, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:50:26,196 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 07:50:32,662 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:50:39,207 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.381e+02 5.619e+02 7.137e+02 2.187e+03, threshold=1.124e+03, percent-clipped=4.0 +2023-04-03 07:51:02,583 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 07:51:24,804 INFO [train.py:903] (3/4) Epoch 26, batch 5700, loss[loss=0.2021, simple_loss=0.2908, pruned_loss=0.0567, over 19781.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2848, pruned_loss=0.06123, over 3823124.38 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:51:52,472 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:19,184 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:52:22,356 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 07:52:26,812 INFO [train.py:903] (3/4) Epoch 26, batch 5750, loss[loss=0.1646, simple_loss=0.2469, pruned_loss=0.04112, over 19800.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2851, pruned_loss=0.06116, over 3828518.76 frames. ], batch size: 47, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:52:30,380 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 07:52:33,966 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 07:52:44,010 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.352e+02 5.112e+02 6.171e+02 7.862e+02 1.795e+03, threshold=1.234e+03, percent-clipped=7.0 +2023-04-03 07:52:50,854 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:09,514 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176484.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:53:28,523 INFO [train.py:903] (3/4) Epoch 26, batch 5800, loss[loss=0.1943, simple_loss=0.2867, pruned_loss=0.05097, over 19526.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2852, pruned_loss=0.06134, over 3812861.66 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:32,081 INFO [train.py:903] (3/4) Epoch 26, batch 5850, loss[loss=0.1653, simple_loss=0.2518, pruned_loss=0.03939, over 19477.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2853, pruned_loss=0.06153, over 3815284.37 frames. ], batch size: 49, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:54:48,246 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.827e+02 6.114e+02 8.553e+02 2.097e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 07:55:29,962 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 07:55:33,096 INFO [train.py:903] (3/4) Epoch 26, batch 5900, loss[loss=0.227, simple_loss=0.2998, pruned_loss=0.07715, over 19357.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2845, pruned_loss=0.06178, over 3821119.72 frames. ], batch size: 70, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:55:37,923 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:49,994 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:55:51,770 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 07:55:55,483 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176619.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:22,772 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:27,332 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:56:35,021 INFO [train.py:903] (3/4) Epoch 26, batch 5950, loss[loss=0.214, simple_loss=0.2909, pruned_loss=0.06859, over 19667.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.06119, over 3834865.89 frames. ], batch size: 59, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 07:56:51,414 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.421e+02 5.079e+02 6.315e+02 7.489e+02 1.732e+03, threshold=1.263e+03, percent-clipped=4.0 +2023-04-03 07:57:12,971 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:18,414 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:57:38,458 INFO [train.py:903] (3/4) Epoch 26, batch 6000, loss[loss=0.1904, simple_loss=0.2658, pruned_loss=0.05755, over 19399.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.285, pruned_loss=0.0613, over 3825038.73 frames. ], batch size: 48, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:57:38,458 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 07:57:51,359 INFO [train.py:937] (3/4) Epoch 26, validation: loss=0.1675, simple_loss=0.2672, pruned_loss=0.03393, over 944034.00 frames. +2023-04-03 07:57:51,360 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 07:57:55,565 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=176703.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:16,481 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176719.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:36,024 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 07:58:44,226 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=5.42 vs. limit=5.0 +2023-04-03 07:58:54,251 INFO [train.py:903] (3/4) Epoch 26, batch 6050, loss[loss=0.2556, simple_loss=0.3412, pruned_loss=0.08496, over 19338.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2858, pruned_loss=0.06184, over 3811903.97 frames. ], batch size: 70, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 07:59:04,018 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4470, 3.1840, 2.4363, 2.5266, 2.2394, 2.7567, 1.2489, 2.2335], + device='cuda:3'), covar=tensor([0.0675, 0.0598, 0.0788, 0.1198, 0.1151, 0.1069, 0.1560, 0.1108], + device='cuda:3'), in_proj_covar=tensor([0.0359, 0.0358, 0.0363, 0.0386, 0.0463, 0.0391, 0.0341, 0.0341], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 07:59:11,768 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.381e+02 5.094e+02 6.239e+02 7.368e+02 1.384e+03, threshold=1.248e+03, percent-clipped=1.0 +2023-04-03 07:59:57,921 INFO [train.py:903] (3/4) Epoch 26, batch 6100, loss[loss=0.2013, simple_loss=0.2676, pruned_loss=0.06747, over 16440.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.06161, over 3814471.51 frames. ], batch size: 36, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:00:33,086 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176828.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:01:00,302 INFO [train.py:903] (3/4) Epoch 26, batch 6150, loss[loss=0.1632, simple_loss=0.238, pruned_loss=0.04419, over 19732.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.0608, over 3833035.68 frames. ], batch size: 46, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:01:18,088 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.177e+02 4.899e+02 5.851e+02 7.446e+02 2.190e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 08:01:21,469 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 08:01:25,360 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=176871.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:02:01,466 INFO [train.py:903] (3/4) Epoch 26, batch 6200, loss[loss=0.2479, simple_loss=0.3287, pruned_loss=0.08355, over 19543.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2847, pruned_loss=0.06143, over 3823449.28 frames. ], batch size: 56, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:02:54,413 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=176943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:02,385 INFO [train.py:903] (3/4) Epoch 26, batch 6250, loss[loss=0.216, simple_loss=0.3048, pruned_loss=0.06357, over 19681.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2844, pruned_loss=0.06094, over 3832626.26 frames. ], batch size: 59, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:03:20,781 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.905e+02 4.899e+02 6.025e+02 7.517e+02 2.005e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-04-03 08:03:24,766 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1622, 3.1583, 1.9154, 1.9123, 2.8685, 1.6918, 1.5974, 2.2980], + device='cuda:3'), covar=tensor([0.1336, 0.0723, 0.1074, 0.0920, 0.0552, 0.1327, 0.1010, 0.0667], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0319, 0.0337, 0.0272, 0.0249, 0.0346, 0.0291, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 08:03:29,813 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 08:03:33,703 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176975.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:37,034 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9556, 1.5834, 1.7074, 1.5321, 3.5427, 1.1339, 2.4270, 4.0778], + device='cuda:3'), covar=tensor([0.0487, 0.2744, 0.2757, 0.2003, 0.0643, 0.2659, 0.1311, 0.0181], + device='cuda:3'), in_proj_covar=tensor([0.0419, 0.0374, 0.0391, 0.0351, 0.0378, 0.0356, 0.0391, 0.0411], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 08:03:47,548 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=176987.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:03:51,232 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=176990.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:03,343 INFO [train.py:903] (3/4) Epoch 26, batch 6300, loss[loss=0.2135, simple_loss=0.2796, pruned_loss=0.07369, over 19411.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2834, pruned_loss=0.06081, over 3823845.19 frames. ], batch size: 48, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:04:04,701 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:23,698 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177015.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:37,548 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177027.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:04:56,729 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 08:05:06,334 INFO [train.py:903] (3/4) Epoch 26, batch 6350, loss[loss=0.2168, simple_loss=0.2807, pruned_loss=0.07644, over 19750.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2835, pruned_loss=0.06106, over 3827308.21 frames. ], batch size: 47, lr: 3.13e-03, grad_scale: 4.0 +2023-04-03 08:05:26,140 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.448e+02 5.061e+02 6.104e+02 7.524e+02 1.291e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 08:05:27,891 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1898, 1.8133, 1.4438, 1.2646, 1.6326, 1.2317, 1.1358, 1.6529], + device='cuda:3'), covar=tensor([0.0866, 0.0799, 0.1086, 0.0841, 0.0578, 0.1309, 0.0710, 0.0433], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0317, 0.0334, 0.0270, 0.0248, 0.0343, 0.0290, 0.0272], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 08:06:11,963 INFO [train.py:903] (3/4) Epoch 26, batch 6400, loss[loss=0.1851, simple_loss=0.2688, pruned_loss=0.05072, over 19598.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2835, pruned_loss=0.06089, over 3822599.89 frames. ], batch size: 57, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:06:14,743 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177102.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:38,229 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 08:06:45,698 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177127.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:06:48,162 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177129.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:05,480 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:07:14,960 INFO [train.py:903] (3/4) Epoch 26, batch 6450, loss[loss=0.1975, simple_loss=0.2844, pruned_loss=0.05523, over 18895.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.06116, over 3826215.70 frames. ], batch size: 74, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:07:33,737 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.467e+02 4.651e+02 5.846e+02 7.696e+02 2.286e+03, threshold=1.169e+03, percent-clipped=3.0 +2023-04-03 08:07:56,651 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 08:08:15,588 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177199.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:16,334 INFO [train.py:903] (3/4) Epoch 26, batch 6500, loss[loss=0.2314, simple_loss=0.3078, pruned_loss=0.07752, over 17549.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2847, pruned_loss=0.06174, over 3823094.71 frames. ], batch size: 101, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:08:17,677 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 08:08:27,444 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6129, 1.7226, 1.9560, 1.9601, 1.4844, 1.9187, 1.9535, 1.8142], + device='cuda:3'), covar=tensor([0.4180, 0.3645, 0.1990, 0.2434, 0.3927, 0.2252, 0.5136, 0.3393], + device='cuda:3'), in_proj_covar=tensor([0.0926, 0.1002, 0.0735, 0.0948, 0.0904, 0.0841, 0.0855, 0.0801], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 08:08:38,043 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177215.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:08:49,220 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:09:20,907 INFO [train.py:903] (3/4) Epoch 26, batch 6550, loss[loss=0.2133, simple_loss=0.2947, pruned_loss=0.06599, over 19493.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2851, pruned_loss=0.06208, over 3826821.04 frames. ], batch size: 64, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:09:39,910 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.007e+02 4.796e+02 6.270e+02 7.966e+02 1.683e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 08:10:25,190 INFO [train.py:903] (3/4) Epoch 26, batch 6600, loss[loss=0.2372, simple_loss=0.3154, pruned_loss=0.07956, over 18126.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2846, pruned_loss=0.0618, over 3819668.02 frames. ], batch size: 83, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:10:34,194 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.17 vs. limit=5.0 +2023-04-03 08:11:02,469 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177330.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:27,589 INFO [train.py:903] (3/4) Epoch 26, batch 6650, loss[loss=0.1782, simple_loss=0.2674, pruned_loss=0.0445, over 19611.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2842, pruned_loss=0.0613, over 3835246.41 frames. ], batch size: 57, lr: 3.13e-03, grad_scale: 8.0 +2023-04-03 08:11:37,315 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:11:47,329 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.339e+02 5.694e+02 7.782e+02 1.307e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 08:12:11,033 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:28,608 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:12:30,480 INFO [train.py:903] (3/4) Epoch 26, batch 6700, loss[loss=0.1905, simple_loss=0.2789, pruned_loss=0.05106, over 19646.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2836, pruned_loss=0.06079, over 3825799.27 frames. ], batch size: 58, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:13:01,394 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177423.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:31,551 INFO [train.py:903] (3/4) Epoch 26, batch 6750, loss[loss=0.1924, simple_loss=0.2666, pruned_loss=0.05915, over 19734.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06066, over 3832718.14 frames. ], batch size: 47, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:13:48,517 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.721e+02 5.880e+02 7.244e+02 1.873e+03, threshold=1.176e+03, percent-clipped=5.0 +2023-04-03 08:13:55,626 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177471.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:13:57,777 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177473.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:14:00,226 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1314, 1.0742, 1.1194, 1.2155, 0.9098, 1.2155, 1.1865, 1.1658], + device='cuda:3'), covar=tensor([0.0863, 0.0916, 0.0980, 0.0642, 0.0941, 0.0823, 0.0817, 0.0758], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0224, 0.0228, 0.0240, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 08:14:28,156 INFO [train.py:903] (3/4) Epoch 26, batch 6800, loss[loss=0.2302, simple_loss=0.3146, pruned_loss=0.07291, over 19297.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06121, over 3826247.89 frames. ], batch size: 66, lr: 3.12e-03, grad_scale: 8.0 +2023-04-03 08:14:46,325 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4705, 2.5483, 2.6730, 3.1475, 2.5988, 3.0386, 2.7020, 2.5270], + device='cuda:3'), covar=tensor([0.3608, 0.3270, 0.1610, 0.2079, 0.3344, 0.1737, 0.3837, 0.2753], + device='cuda:3'), in_proj_covar=tensor([0.0923, 0.1002, 0.0734, 0.0946, 0.0902, 0.0841, 0.0854, 0.0801], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 08:15:14,893 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 08:15:15,373 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 08:15:18,382 INFO [train.py:903] (3/4) Epoch 27, batch 0, loss[loss=0.2306, simple_loss=0.3079, pruned_loss=0.07663, over 19734.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3079, pruned_loss=0.07663, over 19734.00 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:15:18,382 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 08:15:30,263 INFO [train.py:937] (3/4) Epoch 27, validation: loss=0.1666, simple_loss=0.2668, pruned_loss=0.03317, over 944034.00 frames. +2023-04-03 08:15:30,264 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 08:15:42,939 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 08:16:15,168 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.975e+02 6.244e+02 7.696e+02 2.158e+03, threshold=1.249e+03, percent-clipped=8.0 +2023-04-03 08:16:24,559 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177571.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:33,682 INFO [train.py:903] (3/4) Epoch 27, batch 50, loss[loss=0.1724, simple_loss=0.2539, pruned_loss=0.04548, over 19680.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2876, pruned_loss=0.06254, over 864131.17 frames. ], batch size: 53, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:16:44,159 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:44,214 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177586.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:16:46,452 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=177588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:06,226 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 08:17:15,812 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177611.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:17:35,978 INFO [train.py:903] (3/4) Epoch 27, batch 100, loss[loss=0.2421, simple_loss=0.3179, pruned_loss=0.08319, over 19471.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2879, pruned_loss=0.06251, over 1523907.71 frames. ], batch size: 64, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:17:47,459 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 08:17:53,818 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2601, 1.3004, 1.2667, 1.1112, 1.1396, 1.1284, 0.1000, 0.3748], + device='cuda:3'), covar=tensor([0.0750, 0.0710, 0.0488, 0.0694, 0.1379, 0.0740, 0.1458, 0.1244], + device='cuda:3'), in_proj_covar=tensor([0.0360, 0.0360, 0.0363, 0.0387, 0.0467, 0.0392, 0.0342, 0.0344], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 08:18:23,249 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.041e+02 5.210e+02 6.703e+02 8.227e+02 2.617e+03, threshold=1.341e+03, percent-clipped=11.0 +2023-04-03 08:18:39,597 INFO [train.py:903] (3/4) Epoch 27, batch 150, loss[loss=0.2045, simple_loss=0.2932, pruned_loss=0.0579, over 19567.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2872, pruned_loss=0.06252, over 2044439.04 frames. ], batch size: 61, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:18:44,583 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:18:57,584 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 08:19:40,063 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 08:19:42,203 INFO [train.py:903] (3/4) Epoch 27, batch 200, loss[loss=0.1689, simple_loss=0.2584, pruned_loss=0.03973, over 19586.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2867, pruned_loss=0.06193, over 2452606.79 frames. ], batch size: 52, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:20:29,448 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 4.383e+02 5.368e+02 7.234e+02 1.640e+03, threshold=1.074e+03, percent-clipped=1.0 +2023-04-03 08:20:41,538 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.80 vs. limit=5.0 +2023-04-03 08:20:46,587 INFO [train.py:903] (3/4) Epoch 27, batch 250, loss[loss=0.1823, simple_loss=0.2741, pruned_loss=0.04522, over 19634.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2851, pruned_loss=0.06114, over 2770387.37 frames. ], batch size: 50, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:21:09,866 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6175, 4.1237, 4.2911, 4.3156, 1.7236, 4.0273, 3.5611, 4.0409], + device='cuda:3'), covar=tensor([0.1752, 0.0862, 0.0697, 0.0739, 0.6162, 0.0994, 0.0675, 0.1191], + device='cuda:3'), in_proj_covar=tensor([0.0812, 0.0777, 0.0981, 0.0862, 0.0860, 0.0746, 0.0580, 0.0908], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 08:21:12,216 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:32,145 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=177815.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:21:50,878 INFO [train.py:903] (3/4) Epoch 27, batch 300, loss[loss=0.2035, simple_loss=0.2879, pruned_loss=0.05955, over 18363.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2838, pruned_loss=0.061, over 3005102.83 frames. ], batch size: 83, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:22:08,570 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:10,860 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=177844.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:36,373 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.100e+02 5.015e+02 6.251e+02 7.839e+02 1.329e+03, threshold=1.250e+03, percent-clipped=8.0 +2023-04-03 08:22:40,026 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177867.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:42,386 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=177869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:22:52,702 INFO [train.py:903] (3/4) Epoch 27, batch 350, loss[loss=0.2051, simple_loss=0.2921, pruned_loss=0.05901, over 19498.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2854, pruned_loss=0.06172, over 3196004.07 frames. ], batch size: 64, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:23:00,617 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 08:23:00,929 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9060, 4.3858, 4.6855, 4.6710, 1.9125, 4.4121, 3.7844, 4.3833], + device='cuda:3'), covar=tensor([0.1780, 0.0787, 0.0583, 0.0735, 0.5904, 0.0873, 0.0709, 0.1189], + device='cuda:3'), in_proj_covar=tensor([0.0810, 0.0777, 0.0979, 0.0861, 0.0858, 0.0745, 0.0579, 0.0909], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 08:23:40,516 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=177915.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:23:56,872 INFO [train.py:903] (3/4) Epoch 27, batch 400, loss[loss=0.189, simple_loss=0.2781, pruned_loss=0.05002, over 19758.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2844, pruned_loss=0.06081, over 3340435.76 frames. ], batch size: 63, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:24:43,714 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.795e+02 5.555e+02 6.674e+02 1.146e+03, threshold=1.111e+03, percent-clipped=0.0 +2023-04-03 08:24:57,928 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-03 08:24:58,340 INFO [train.py:903] (3/4) Epoch 27, batch 450, loss[loss=0.2942, simple_loss=0.3441, pruned_loss=0.1222, over 13765.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2851, pruned_loss=0.06184, over 3424239.76 frames. ], batch size: 135, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:25:39,717 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 08:25:40,954 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 08:25:59,729 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178026.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:02,522 INFO [train.py:903] (3/4) Epoch 27, batch 500, loss[loss=0.2638, simple_loss=0.3301, pruned_loss=0.09873, over 19771.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.06186, over 3527081.34 frames. ], batch size: 56, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:26:06,491 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:26:06,599 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6933, 1.8422, 2.1585, 1.9827, 3.4965, 2.7709, 3.7656, 1.8063], + device='cuda:3'), covar=tensor([0.2460, 0.4287, 0.2723, 0.1849, 0.1351, 0.2055, 0.1382, 0.4214], + device='cuda:3'), in_proj_covar=tensor([0.0549, 0.0665, 0.0744, 0.0503, 0.0630, 0.0544, 0.0665, 0.0568], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 08:26:48,570 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 5.151e+02 6.325e+02 8.134e+02 1.856e+03, threshold=1.265e+03, percent-clipped=5.0 +2023-04-03 08:26:51,474 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0734, 2.8461, 2.1971, 2.2115, 2.0155, 2.5048, 1.1317, 2.0724], + device='cuda:3'), covar=tensor([0.0794, 0.0686, 0.0725, 0.1194, 0.1203, 0.1187, 0.1488, 0.1073], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0363, 0.0367, 0.0390, 0.0472, 0.0395, 0.0346, 0.0347], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 08:27:07,128 INFO [train.py:903] (3/4) Epoch 27, batch 550, loss[loss=0.2008, simple_loss=0.2853, pruned_loss=0.05816, over 19681.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2835, pruned_loss=0.06066, over 3601795.42 frames. ], batch size: 58, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:27:15,659 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0849, 5.1082, 5.8728, 5.9385, 2.0734, 5.5519, 4.7271, 5.5379], + device='cuda:3'), covar=tensor([0.1830, 0.0845, 0.0601, 0.0661, 0.6328, 0.0819, 0.0650, 0.1224], + device='cuda:3'), in_proj_covar=tensor([0.0815, 0.0781, 0.0984, 0.0865, 0.0861, 0.0749, 0.0580, 0.0913], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 08:28:09,845 INFO [train.py:903] (3/4) Epoch 27, batch 600, loss[loss=0.1908, simple_loss=0.2723, pruned_loss=0.05462, over 19363.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2828, pruned_loss=0.0604, over 3651582.51 frames. ], batch size: 47, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:28:15,816 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9359, 4.3702, 4.6776, 4.6765, 1.9120, 4.3599, 3.8373, 4.4029], + device='cuda:3'), covar=tensor([0.1855, 0.1112, 0.0594, 0.0717, 0.6404, 0.1086, 0.0712, 0.1077], + device='cuda:3'), in_proj_covar=tensor([0.0818, 0.0781, 0.0987, 0.0868, 0.0864, 0.0751, 0.0583, 0.0916], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 08:28:25,333 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:26,330 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178142.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:48,732 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178159.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:28:54,525 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 08:28:55,502 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.215e+02 5.259e+02 6.338e+02 7.640e+02 1.730e+03, threshold=1.268e+03, percent-clipped=4.0 +2023-04-03 08:29:11,522 INFO [train.py:903] (3/4) Epoch 27, batch 650, loss[loss=0.203, simple_loss=0.2813, pruned_loss=0.06231, over 19610.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2838, pruned_loss=0.06087, over 3700325.86 frames. ], batch size: 50, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:29:19,040 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1939, 2.0371, 1.8158, 2.0555, 1.7984, 1.8254, 1.7228, 2.0579], + device='cuda:3'), covar=tensor([0.1019, 0.1296, 0.1595, 0.1105, 0.1483, 0.0576, 0.1560, 0.0734], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0360, 0.0320, 0.0258, 0.0309, 0.0259, 0.0323, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 08:30:11,959 INFO [train.py:903] (3/4) Epoch 27, batch 700, loss[loss=0.1842, simple_loss=0.2776, pruned_loss=0.04541, over 19549.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.06051, over 3737261.67 frames. ], batch size: 61, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:30:49,315 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:30:58,538 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.944e+02 4.694e+02 6.359e+02 8.615e+02 1.569e+03, threshold=1.272e+03, percent-clipped=7.0 +2023-04-03 08:31:11,157 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:16,249 INFO [train.py:903] (3/4) Epoch 27, batch 750, loss[loss=0.2219, simple_loss=0.3051, pruned_loss=0.06936, over 19789.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2834, pruned_loss=0.06067, over 3738568.27 frames. ], batch size: 56, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:31:26,140 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:31:56,981 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:32:17,797 INFO [train.py:903] (3/4) Epoch 27, batch 800, loss[loss=0.1833, simple_loss=0.2583, pruned_loss=0.05417, over 19717.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.283, pruned_loss=0.06068, over 3765990.24 frames. ], batch size: 46, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:32:31,916 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 08:33:04,022 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.236e+02 4.845e+02 6.131e+02 7.065e+02 2.334e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-04-03 08:33:20,261 INFO [train.py:903] (3/4) Epoch 27, batch 850, loss[loss=0.1886, simple_loss=0.2791, pruned_loss=0.04909, over 19659.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2826, pruned_loss=0.06069, over 3774623.70 frames. ], batch size: 58, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:33:46,345 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178397.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:34:12,683 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 08:34:16,564 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:34:22,834 INFO [train.py:903] (3/4) Epoch 27, batch 900, loss[loss=0.2805, simple_loss=0.3408, pruned_loss=0.1101, over 18778.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2831, pruned_loss=0.06063, over 3794614.15 frames. ], batch size: 74, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:34:30,685 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.60 vs. limit=5.0 +2023-04-03 08:35:10,874 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.108e+02 4.416e+02 5.422e+02 6.593e+02 1.258e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-04-03 08:35:17,045 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178470.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:35:28,161 INFO [train.py:903] (3/4) Epoch 27, batch 950, loss[loss=0.1839, simple_loss=0.2658, pruned_loss=0.05097, over 19750.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2821, pruned_loss=0.06, over 3805823.34 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:35:30,650 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 08:36:11,791 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:32,191 INFO [train.py:903] (3/4) Epoch 27, batch 1000, loss[loss=0.213, simple_loss=0.2807, pruned_loss=0.07267, over 16048.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2827, pruned_loss=0.06037, over 3798216.68 frames. ], batch size: 35, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:36:34,991 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=178530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:36:45,479 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:06,264 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=178555.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:37:19,295 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.158e+02 4.971e+02 6.338e+02 8.822e+02 2.004e+03, threshold=1.268e+03, percent-clipped=12.0 +2023-04-03 08:37:25,264 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 08:37:35,828 INFO [train.py:903] (3/4) Epoch 27, batch 1050, loss[loss=0.1701, simple_loss=0.2557, pruned_loss=0.04222, over 19584.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2827, pruned_loss=0.06037, over 3818358.40 frames. ], batch size: 52, lr: 3.06e-03, grad_scale: 8.0 +2023-04-03 08:37:48,984 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2941, 1.5159, 1.9429, 1.5804, 2.8953, 4.5518, 4.4635, 5.0190], + device='cuda:3'), covar=tensor([0.1651, 0.3902, 0.3438, 0.2432, 0.0670, 0.0205, 0.0175, 0.0189], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0329, 0.0362, 0.0271, 0.0253, 0.0195, 0.0218, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 08:38:09,365 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 08:38:38,428 INFO [train.py:903] (3/4) Epoch 27, batch 1100, loss[loss=0.2191, simple_loss=0.299, pruned_loss=0.06958, over 18794.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06006, over 3832678.72 frames. ], batch size: 74, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:38:50,329 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178637.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:39:24,188 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6145, 1.6331, 1.8579, 1.7607, 2.5768, 2.3923, 2.8347, 1.2690], + device='cuda:3'), covar=tensor([0.2448, 0.4324, 0.2734, 0.1975, 0.1707, 0.2133, 0.1472, 0.4819], + device='cuda:3'), in_proj_covar=tensor([0.0552, 0.0667, 0.0745, 0.0504, 0.0633, 0.0544, 0.0665, 0.0568], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 08:39:27,056 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.758e+02 5.833e+02 7.524e+02 1.653e+03, threshold=1.167e+03, percent-clipped=2.0 +2023-04-03 08:39:42,281 INFO [train.py:903] (3/4) Epoch 27, batch 1150, loss[loss=0.2014, simple_loss=0.2886, pruned_loss=0.05712, over 18754.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05955, over 3833740.96 frames. ], batch size: 74, lr: 3.05e-03, grad_scale: 4.0 +2023-04-03 08:40:25,352 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178712.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:40:30,194 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178715.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:40:47,635 INFO [train.py:903] (3/4) Epoch 27, batch 1200, loss[loss=0.1664, simple_loss=0.2417, pruned_loss=0.04553, over 19757.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2832, pruned_loss=0.0599, over 3825597.69 frames. ], batch size: 45, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:41:04,868 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3023, 2.1712, 2.1078, 1.9269, 1.7211, 1.8624, 0.6870, 1.3295], + device='cuda:3'), covar=tensor([0.0652, 0.0685, 0.0517, 0.0947, 0.1296, 0.1075, 0.1480, 0.1186], + device='cuda:3'), in_proj_covar=tensor([0.0367, 0.0367, 0.0370, 0.0394, 0.0474, 0.0398, 0.0348, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 08:41:18,726 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 08:41:38,085 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.626e+02 4.854e+02 6.177e+02 8.249e+02 1.593e+03, threshold=1.235e+03, percent-clipped=5.0 +2023-04-03 08:41:53,228 INFO [train.py:903] (3/4) Epoch 27, batch 1250, loss[loss=0.1723, simple_loss=0.2481, pruned_loss=0.04827, over 19358.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2818, pruned_loss=0.0592, over 3831710.34 frames. ], batch size: 47, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:42:03,118 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4583, 2.4099, 2.2818, 2.5584, 2.3278, 2.0945, 2.1413, 2.4141], + device='cuda:3'), covar=tensor([0.0835, 0.1251, 0.1169, 0.0777, 0.1176, 0.0496, 0.1246, 0.0575], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0361, 0.0319, 0.0258, 0.0309, 0.0258, 0.0323, 0.0265], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 08:42:05,330 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:40,245 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:42:47,795 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3464, 1.9959, 1.5015, 1.3962, 1.8263, 1.2482, 1.3666, 1.7703], + device='cuda:3'), covar=tensor([0.0990, 0.0901, 0.1147, 0.0885, 0.0594, 0.1359, 0.0698, 0.0471], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0321, 0.0337, 0.0273, 0.0250, 0.0346, 0.0294, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 08:42:56,590 INFO [train.py:903] (3/4) Epoch 27, batch 1300, loss[loss=0.2138, simple_loss=0.2833, pruned_loss=0.07212, over 19732.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.0594, over 3825235.82 frames. ], batch size: 51, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:43:01,013 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 08:43:34,985 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=1.98 vs. limit=5.0 +2023-04-03 08:43:44,695 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 5.004e+02 6.015e+02 7.494e+02 1.649e+03, threshold=1.203e+03, percent-clipped=2.0 +2023-04-03 08:43:58,891 INFO [train.py:903] (3/4) Epoch 27, batch 1350, loss[loss=0.1898, simple_loss=0.2801, pruned_loss=0.04978, over 19789.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2822, pruned_loss=0.05932, over 3837503.23 frames. ], batch size: 56, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:44:11,444 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2597, 1.8075, 1.4947, 1.1609, 1.6214, 1.1586, 1.2477, 1.7364], + device='cuda:3'), covar=tensor([0.0800, 0.0830, 0.1069, 0.0997, 0.0667, 0.1352, 0.0670, 0.0419], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0322, 0.0338, 0.0274, 0.0251, 0.0347, 0.0295, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 08:44:41,959 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=178912.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:45:03,473 INFO [train.py:903] (3/4) Epoch 27, batch 1400, loss[loss=0.2123, simple_loss=0.2946, pruned_loss=0.06498, over 19531.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2822, pruned_loss=0.0592, over 3838155.32 frames. ], batch size: 54, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:45:04,999 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=178929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:45:50,939 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.777e+02 6.195e+02 8.137e+02 1.607e+03, threshold=1.239e+03, percent-clipped=6.0 +2023-04-03 08:46:03,713 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 08:46:04,755 INFO [train.py:903] (3/4) Epoch 27, batch 1450, loss[loss=0.2224, simple_loss=0.2868, pruned_loss=0.07894, over 19740.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2832, pruned_loss=0.06, over 3835894.49 frames. ], batch size: 51, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:46:09,313 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=178981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:46:20,296 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 08:47:07,115 INFO [train.py:903] (3/4) Epoch 27, batch 1500, loss[loss=0.2303, simple_loss=0.3022, pruned_loss=0.07925, over 13516.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2828, pruned_loss=0.06005, over 3843080.45 frames. ], batch size: 135, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:47:42,378 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:47:45,651 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179059.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:47:54,377 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.273e+02 4.749e+02 5.669e+02 7.445e+02 1.551e+03, threshold=1.134e+03, percent-clipped=3.0 +2023-04-03 08:48:08,105 INFO [train.py:903] (3/4) Epoch 27, batch 1550, loss[loss=0.1962, simple_loss=0.2779, pruned_loss=0.05723, over 19595.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2845, pruned_loss=0.06122, over 3827189.41 frames. ], batch size: 52, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:48:32,743 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179096.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:12,296 INFO [train.py:903] (3/4) Epoch 27, batch 1600, loss[loss=0.1647, simple_loss=0.2446, pruned_loss=0.04235, over 19415.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2829, pruned_loss=0.06008, over 3838834.23 frames. ], batch size: 48, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:49:18,229 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:49:37,308 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 08:49:59,054 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.135e+02 4.994e+02 6.186e+02 7.700e+02 1.707e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 08:50:06,443 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179171.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:09,903 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179174.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:50:14,086 INFO [train.py:903] (3/4) Epoch 27, batch 1650, loss[loss=0.2058, simple_loss=0.2903, pruned_loss=0.06066, over 19524.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2815, pruned_loss=0.05949, over 3840785.64 frames. ], batch size: 54, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:50:23,752 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179185.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:50:55,268 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179210.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:17,033 INFO [train.py:903] (3/4) Epoch 27, batch 1700, loss[loss=0.1931, simple_loss=0.28, pruned_loss=0.05313, over 17458.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2812, pruned_loss=0.05939, over 3847026.52 frames. ], batch size: 101, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:51:41,270 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:51:52,273 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179256.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:51:58,254 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 08:52:04,052 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 5.162e+02 6.407e+02 7.875e+02 2.392e+03, threshold=1.281e+03, percent-clipped=6.0 +2023-04-03 08:52:18,192 INFO [train.py:903] (3/4) Epoch 27, batch 1750, loss[loss=0.2022, simple_loss=0.2783, pruned_loss=0.06304, over 19755.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2828, pruned_loss=0.06042, over 3832476.77 frames. ], batch size: 51, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:53:13,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9820, 1.9435, 1.8898, 1.7796, 1.6267, 1.8078, 1.0124, 1.4309], + device='cuda:3'), covar=tensor([0.0534, 0.0642, 0.0447, 0.0709, 0.0990, 0.0838, 0.1302, 0.0884], + device='cuda:3'), in_proj_covar=tensor([0.0364, 0.0364, 0.0367, 0.0391, 0.0472, 0.0397, 0.0345, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 08:53:21,038 INFO [train.py:903] (3/4) Epoch 27, batch 1800, loss[loss=0.1676, simple_loss=0.2487, pruned_loss=0.04323, over 15944.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2831, pruned_loss=0.06081, over 3816333.86 frames. ], batch size: 35, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:53:21,459 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0042, 1.7193, 1.6212, 1.9053, 1.6343, 1.6599, 1.5149, 1.8561], + device='cuda:3'), covar=tensor([0.1081, 0.1436, 0.1531, 0.1054, 0.1393, 0.0610, 0.1616, 0.0762], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0359, 0.0318, 0.0256, 0.0307, 0.0257, 0.0320, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 08:53:51,557 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:08,233 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 4.924e+02 6.082e+02 7.444e+02 1.664e+03, threshold=1.216e+03, percent-clipped=4.0 +2023-04-03 08:54:16,177 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179371.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:54:21,557 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 08:54:23,167 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:54:23,933 INFO [train.py:903] (3/4) Epoch 27, batch 1850, loss[loss=0.2184, simple_loss=0.3024, pruned_loss=0.06721, over 19612.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2833, pruned_loss=0.06052, over 3830710.91 frames. ], batch size: 61, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:54:25,271 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179379.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:00,548 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 08:55:21,046 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9535, 2.0015, 1.8682, 2.9129, 2.1398, 2.7152, 1.9983, 1.6897], + device='cuda:3'), covar=tensor([0.4735, 0.4460, 0.2884, 0.3418, 0.4692, 0.2566, 0.6419, 0.4982], + device='cuda:3'), in_proj_covar=tensor([0.0927, 0.1004, 0.0737, 0.0948, 0.0905, 0.0844, 0.0855, 0.0802], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 08:55:25,821 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179427.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:55:26,557 INFO [train.py:903] (3/4) Epoch 27, batch 1900, loss[loss=0.1977, simple_loss=0.2841, pruned_loss=0.0556, over 19713.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.0603, over 3836731.21 frames. ], batch size: 63, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:55:26,998 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1632, 1.4434, 1.8155, 1.0841, 2.4185, 3.0802, 2.7914, 3.2745], + device='cuda:3'), covar=tensor([0.1540, 0.3529, 0.3074, 0.2580, 0.0580, 0.0226, 0.0255, 0.0292], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0330, 0.0363, 0.0270, 0.0253, 0.0195, 0.0218, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 08:55:29,287 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179430.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:55:45,919 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 08:55:49,549 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 08:55:56,566 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179452.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:56:00,940 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179455.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 08:56:10,083 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179462.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:56:14,505 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.488e+02 5.214e+02 6.120e+02 7.486e+02 1.853e+03, threshold=1.224e+03, percent-clipped=1.0 +2023-04-03 08:56:15,775 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 08:56:28,553 INFO [train.py:903] (3/4) Epoch 27, batch 1950, loss[loss=0.1587, simple_loss=0.245, pruned_loss=0.03617, over 19473.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.06, over 3840168.00 frames. ], batch size: 49, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:02,060 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:57:13,666 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-04-03 08:57:31,829 INFO [train.py:903] (3/4) Epoch 27, batch 2000, loss[loss=0.2096, simple_loss=0.2947, pruned_loss=0.06221, over 19681.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05984, over 3847533.86 frames. ], batch size: 60, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:57:32,247 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179528.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 08:58:19,141 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.349e+02 5.376e+02 6.611e+02 8.547e+02 2.231e+03, threshold=1.322e+03, percent-clipped=11.0 +2023-04-03 08:58:33,829 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 08:58:34,814 INFO [train.py:903] (3/4) Epoch 27, batch 2050, loss[loss=0.212, simple_loss=0.2934, pruned_loss=0.06535, over 19521.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2842, pruned_loss=0.06062, over 3840942.26 frames. ], batch size: 56, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 08:58:53,466 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 08:58:54,673 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 08:59:14,222 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 08:59:37,263 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=179627.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 08:59:37,893 INFO [train.py:903] (3/4) Epoch 27, batch 2100, loss[loss=0.2421, simple_loss=0.3228, pruned_loss=0.0807, over 17147.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2838, pruned_loss=0.06012, over 3838532.15 frames. ], batch size: 101, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:00:03,485 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9181, 1.7066, 1.7470, 2.4482, 1.8783, 2.0929, 2.2082, 1.8933], + device='cuda:3'), covar=tensor([0.0741, 0.0906, 0.0958, 0.0651, 0.0829, 0.0832, 0.0865, 0.0704], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0224, 0.0227, 0.0239, 0.0226, 0.0213, 0.0188, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 09:00:07,552 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 09:00:07,917 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=179652.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:00:10,155 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179654.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:00:25,319 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.840e+02 4.609e+02 5.792e+02 6.874e+02 1.495e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-04-03 09:00:28,911 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 09:00:39,305 INFO [train.py:903] (3/4) Epoch 27, batch 2150, loss[loss=0.206, simple_loss=0.2975, pruned_loss=0.05728, over 19617.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2836, pruned_loss=0.0606, over 3840684.77 frames. ], batch size: 67, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:00:42,472 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 09:01:37,657 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:01:43,430 INFO [train.py:903] (3/4) Epoch 27, batch 2200, loss[loss=0.217, simple_loss=0.299, pruned_loss=0.06748, over 19135.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2843, pruned_loss=0.06095, over 3841264.65 frames. ], batch size: 69, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:02:30,599 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.069e+02 5.235e+02 6.025e+02 7.394e+02 1.358e+03, threshold=1.205e+03, percent-clipped=1.0 +2023-04-03 09:02:46,342 INFO [train.py:903] (3/4) Epoch 27, batch 2250, loss[loss=0.2601, simple_loss=0.332, pruned_loss=0.0941, over 19665.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2838, pruned_loss=0.06066, over 3837932.57 frames. ], batch size: 58, lr: 3.05e-03, grad_scale: 8.0 +2023-04-03 09:03:21,828 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179806.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:03:33,123 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:03:50,157 INFO [train.py:903] (3/4) Epoch 27, batch 2300, loss[loss=0.192, simple_loss=0.2787, pruned_loss=0.05263, over 19699.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2849, pruned_loss=0.0611, over 3833340.73 frames. ], batch size: 59, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:04:02,183 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179838.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:04:03,018 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 09:04:38,194 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.173e+02 6.408e+02 8.011e+02 2.024e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 09:04:51,777 INFO [train.py:903] (3/4) Epoch 27, batch 2350, loss[loss=0.1798, simple_loss=0.2605, pruned_loss=0.04953, over 19626.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06142, over 3839121.28 frames. ], batch size: 50, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:05:34,683 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 09:05:45,187 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=179921.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:05:48,595 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179924.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:05:50,590 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 09:05:53,778 INFO [train.py:903] (3/4) Epoch 27, batch 2400, loss[loss=0.2229, simple_loss=0.3106, pruned_loss=0.06766, over 19669.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2853, pruned_loss=0.06156, over 3827338.62 frames. ], batch size: 58, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:06:11,791 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=179942.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:06:41,355 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.340e+02 4.979e+02 6.417e+02 8.310e+02 2.182e+03, threshold=1.283e+03, percent-clipped=4.0 +2023-04-03 09:06:57,082 INFO [train.py:903] (3/4) Epoch 27, batch 2450, loss[loss=0.1798, simple_loss=0.2629, pruned_loss=0.04841, over 19864.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2858, pruned_loss=0.06211, over 3806890.51 frames. ], batch size: 52, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:07:21,329 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=179998.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:07:46,433 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-03 09:08:00,988 INFO [train.py:903] (3/4) Epoch 27, batch 2500, loss[loss=0.2365, simple_loss=0.3102, pruned_loss=0.08137, over 19596.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2852, pruned_loss=0.06174, over 3791629.43 frames. ], batch size: 61, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:08:19,354 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:08:47,565 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0538, 1.3409, 1.6450, 0.9682, 2.3017, 3.0805, 2.7739, 3.3003], + device='cuda:3'), covar=tensor([0.1680, 0.3868, 0.3464, 0.2811, 0.0647, 0.0222, 0.0271, 0.0298], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0331, 0.0364, 0.0270, 0.0254, 0.0196, 0.0220, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 09:08:48,295 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 4.714e+02 5.802e+02 6.784e+02 1.958e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 09:09:02,320 INFO [train.py:903] (3/4) Epoch 27, batch 2550, loss[loss=0.1687, simple_loss=0.2442, pruned_loss=0.04657, over 19789.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.06096, over 3814244.18 frames. ], batch size: 48, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:09:03,695 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7961, 4.2569, 4.4732, 4.4894, 1.7818, 4.2389, 3.6647, 4.1859], + device='cuda:3'), covar=tensor([0.1681, 0.1040, 0.0658, 0.0693, 0.5959, 0.1061, 0.0724, 0.1120], + device='cuda:3'), in_proj_covar=tensor([0.0817, 0.0781, 0.0989, 0.0867, 0.0865, 0.0750, 0.0582, 0.0915], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 09:09:22,052 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:47,093 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180113.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:53,124 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0934, 3.3460, 1.8637, 2.0246, 2.9951, 1.7322, 1.5450, 2.1742], + device='cuda:3'), covar=tensor([0.1428, 0.0708, 0.1211, 0.0937, 0.0626, 0.1342, 0.0998, 0.0753], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0321, 0.0338, 0.0273, 0.0250, 0.0344, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:09:54,432 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180119.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:09:58,713 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 09:10:04,541 INFO [train.py:903] (3/4) Epoch 27, batch 2600, loss[loss=0.2126, simple_loss=0.2977, pruned_loss=0.06378, over 19527.00 frames. ], tot_loss[loss=0.204, simple_loss=0.285, pruned_loss=0.06146, over 3797979.37 frames. ], batch size: 56, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:10:15,938 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0665, 1.8991, 1.7203, 2.0202, 1.7688, 1.7827, 1.6656, 1.9051], + device='cuda:3'), covar=tensor([0.0993, 0.1378, 0.1488, 0.1102, 0.1378, 0.0555, 0.1388, 0.0793], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0359, 0.0317, 0.0258, 0.0307, 0.0256, 0.0321, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:10:42,772 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:10:52,190 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 5.258e+02 6.240e+02 7.647e+02 1.495e+03, threshold=1.248e+03, percent-clipped=4.0 +2023-04-03 09:11:07,747 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180177.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:11:08,382 INFO [train.py:903] (3/4) Epoch 27, batch 2650, loss[loss=0.1735, simple_loss=0.2502, pruned_loss=0.04842, over 19755.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2847, pruned_loss=0.06146, over 3794598.91 frames. ], batch size: 46, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:11:18,552 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6320, 4.2382, 2.6303, 3.6774, 0.8788, 4.2572, 4.0467, 4.1808], + device='cuda:3'), covar=tensor([0.0623, 0.0989, 0.2053, 0.0921, 0.4021, 0.0659, 0.0916, 0.1235], + device='cuda:3'), in_proj_covar=tensor([0.0528, 0.0427, 0.0511, 0.0358, 0.0407, 0.0455, 0.0447, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:11:29,719 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 09:11:38,057 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180202.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:11:59,670 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-03 09:12:04,724 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180223.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:12:11,766 INFO [train.py:903] (3/4) Epoch 27, batch 2700, loss[loss=0.226, simple_loss=0.3087, pruned_loss=0.07166, over 19757.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2852, pruned_loss=0.06141, over 3799561.13 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:12:33,808 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 09:12:39,112 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2878, 2.3755, 2.5492, 2.8913, 2.4046, 2.8029, 2.5547, 2.4333], + device='cuda:3'), covar=tensor([0.3569, 0.3293, 0.1658, 0.2076, 0.3433, 0.1868, 0.3811, 0.2608], + device='cuda:3'), in_proj_covar=tensor([0.0934, 0.1012, 0.0742, 0.0954, 0.0912, 0.0852, 0.0859, 0.0809], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 09:13:00,979 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.426e+02 5.233e+02 6.139e+02 8.955e+02 2.009e+03, threshold=1.228e+03, percent-clipped=9.0 +2023-04-03 09:13:02,196 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180268.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:13:08,003 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:13:11,344 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4786, 2.1709, 1.6092, 1.4855, 1.9547, 1.3551, 1.3714, 1.8784], + device='cuda:3'), covar=tensor([0.1077, 0.0888, 0.1162, 0.0894, 0.0628, 0.1343, 0.0745, 0.0472], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0323, 0.0341, 0.0275, 0.0253, 0.0347, 0.0295, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:13:13,279 INFO [train.py:903] (3/4) Epoch 27, batch 2750, loss[loss=0.2115, simple_loss=0.2961, pruned_loss=0.06347, over 17314.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2834, pruned_loss=0.06068, over 3805290.07 frames. ], batch size: 101, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:13:20,962 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 09:13:22,799 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180286.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:14:15,231 INFO [train.py:903] (3/4) Epoch 27, batch 2800, loss[loss=0.2106, simple_loss=0.2928, pruned_loss=0.06419, over 19759.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2825, pruned_loss=0.06023, over 3810329.67 frames. ], batch size: 63, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:14:49,797 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180354.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:04,816 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 4.838e+02 6.554e+02 8.151e+02 1.805e+03, threshold=1.311e+03, percent-clipped=3.0 +2023-04-03 09:15:08,577 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:18,466 INFO [train.py:903] (3/4) Epoch 27, batch 2850, loss[loss=0.2016, simple_loss=0.2887, pruned_loss=0.05721, over 19476.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2827, pruned_loss=0.06052, over 3803495.47 frames. ], batch size: 49, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:15:24,610 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-03 09:15:26,280 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180383.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:15:33,053 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:40,147 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:15:48,634 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180401.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:16:20,689 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 09:16:22,936 INFO [train.py:903] (3/4) Epoch 27, batch 2900, loss[loss=0.1739, simple_loss=0.264, pruned_loss=0.04188, over 19664.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2827, pruned_loss=0.06003, over 3826322.30 frames. ], batch size: 55, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:13,071 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.922e+02 6.355e+02 8.136e+02 1.738e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-04-03 09:17:15,679 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3120, 3.8298, 3.9512, 3.9556, 1.6533, 3.7591, 3.2621, 3.7296], + device='cuda:3'), covar=tensor([0.1801, 0.1036, 0.0682, 0.0792, 0.5768, 0.1054, 0.0786, 0.1126], + device='cuda:3'), in_proj_covar=tensor([0.0813, 0.0777, 0.0987, 0.0865, 0.0860, 0.0749, 0.0580, 0.0915], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 09:17:26,070 INFO [train.py:903] (3/4) Epoch 27, batch 2950, loss[loss=0.2113, simple_loss=0.2911, pruned_loss=0.06569, over 19683.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05977, over 3835327.56 frames. ], batch size: 53, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:17:45,925 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8646, 1.9199, 2.2709, 1.9817, 2.8262, 3.2203, 3.1233, 3.3808], + device='cuda:3'), covar=tensor([0.1335, 0.3070, 0.2732, 0.2306, 0.1079, 0.0320, 0.0211, 0.0388], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0332, 0.0365, 0.0271, 0.0255, 0.0197, 0.0220, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 09:17:56,638 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180503.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:27,174 INFO [train.py:903] (3/4) Epoch 27, batch 3000, loss[loss=0.1861, simple_loss=0.2667, pruned_loss=0.0528, over 19751.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2836, pruned_loss=0.06001, over 3836321.14 frames. ], batch size: 51, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:18:27,174 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 09:18:39,750 INFO [train.py:937] (3/4) Epoch 27, validation: loss=0.1667, simple_loss=0.2664, pruned_loss=0.03355, over 944034.00 frames. +2023-04-03 09:18:39,751 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 09:18:41,291 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180529.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:18:43,443 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 09:19:12,395 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:29,346 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180567.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:19:30,287 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.587e+02 5.827e+02 7.595e+02 1.750e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 09:19:41,899 INFO [train.py:903] (3/4) Epoch 27, batch 3050, loss[loss=0.1906, simple_loss=0.2659, pruned_loss=0.0577, over 19764.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06044, over 3846895.35 frames. ], batch size: 48, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:19:54,664 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180588.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:20:44,771 INFO [train.py:903] (3/4) Epoch 27, batch 3100, loss[loss=0.219, simple_loss=0.3006, pruned_loss=0.06874, over 19661.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05975, over 3842595.69 frames. ], batch size: 58, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:20:56,826 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4097, 1.5275, 1.7138, 1.6670, 2.4669, 2.1480, 2.6040, 1.0472], + device='cuda:3'), covar=tensor([0.2639, 0.4545, 0.2964, 0.2013, 0.1603, 0.2306, 0.1509, 0.5007], + device='cuda:3'), in_proj_covar=tensor([0.0551, 0.0668, 0.0749, 0.0505, 0.0633, 0.0545, 0.0668, 0.0569], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 09:20:59,825 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180639.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 09:21:20,360 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180657.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:29,225 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180664.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 09:21:33,518 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.719e+02 5.890e+02 7.652e+02 1.677e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-04-03 09:21:46,247 INFO [train.py:903] (3/4) Epoch 27, batch 3150, loss[loss=0.1942, simple_loss=0.2849, pruned_loss=0.05172, over 18126.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2825, pruned_loss=0.05954, over 3840324.30 frames. ], batch size: 84, lr: 3.04e-03, grad_scale: 4.0 +2023-04-03 09:21:52,951 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:21:52,990 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180682.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:11,325 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:22:15,854 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 09:22:49,869 INFO [train.py:903] (3/4) Epoch 27, batch 3200, loss[loss=0.2069, simple_loss=0.2857, pruned_loss=0.06403, over 19749.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.284, pruned_loss=0.06026, over 3833985.83 frames. ], batch size: 54, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:16,637 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180750.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:28,924 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180759.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:23:39,759 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.293e+02 5.255e+02 6.624e+02 9.042e+02 3.460e+03, threshold=1.325e+03, percent-clipped=12.0 +2023-04-03 09:23:51,461 INFO [train.py:903] (3/4) Epoch 27, batch 3250, loss[loss=0.1991, simple_loss=0.2808, pruned_loss=0.05865, over 19285.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06009, over 3836351.12 frames. ], batch size: 66, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:23:59,371 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:36,461 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=180813.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:24:45,537 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.8151, 5.3060, 3.0383, 4.5627, 1.3376, 5.3896, 5.2348, 5.3606], + device='cuda:3'), covar=tensor([0.0396, 0.0837, 0.1938, 0.0854, 0.3800, 0.0519, 0.0822, 0.1138], + device='cuda:3'), in_proj_covar=tensor([0.0527, 0.0425, 0.0511, 0.0357, 0.0405, 0.0452, 0.0447, 0.0475], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:24:54,213 INFO [train.py:903] (3/4) Epoch 27, batch 3300, loss[loss=0.2338, simple_loss=0.3131, pruned_loss=0.07725, over 19398.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2843, pruned_loss=0.06083, over 3821070.97 frames. ], batch size: 66, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:25:00,020 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 09:25:07,333 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 09:25:13,339 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=180843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:25:43,899 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.181e+02 6.566e+02 8.708e+02 1.878e+03, threshold=1.313e+03, percent-clipped=7.0 +2023-04-03 09:25:56,111 INFO [train.py:903] (3/4) Epoch 27, batch 3350, loss[loss=0.2389, simple_loss=0.3129, pruned_loss=0.08242, over 13521.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2842, pruned_loss=0.0607, over 3815239.40 frames. ], batch size: 136, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:26:51,579 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0106, 4.4060, 4.7234, 4.7890, 1.9215, 4.4409, 3.8727, 4.3940], + device='cuda:3'), covar=tensor([0.1750, 0.1048, 0.0682, 0.0666, 0.6023, 0.0979, 0.0726, 0.1389], + device='cuda:3'), in_proj_covar=tensor([0.0822, 0.0783, 0.0997, 0.0875, 0.0865, 0.0756, 0.0585, 0.0923], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 09:27:00,212 INFO [train.py:903] (3/4) Epoch 27, batch 3400, loss[loss=0.1771, simple_loss=0.27, pruned_loss=0.04205, over 19785.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2842, pruned_loss=0.06092, over 3828281.22 frames. ], batch size: 54, lr: 3.04e-03, grad_scale: 8.0 +2023-04-03 09:27:05,046 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=180932.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:11,994 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=180938.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:43,965 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=180963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:27:50,457 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.175e+02 4.823e+02 5.841e+02 8.287e+02 1.627e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 09:27:59,651 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-03 09:28:02,290 INFO [train.py:903] (3/4) Epoch 27, batch 3450, loss[loss=0.1926, simple_loss=0.2726, pruned_loss=0.05628, over 19617.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2846, pruned_loss=0.06122, over 3800007.88 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:28:06,935 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 09:28:18,944 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.60 vs. limit=5.0 +2023-04-03 09:29:04,452 INFO [train.py:903] (3/4) Epoch 27, batch 3500, loss[loss=0.2058, simple_loss=0.2935, pruned_loss=0.05909, over 17290.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.284, pruned_loss=0.06116, over 3806389.67 frames. ], batch size: 101, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:29:28,501 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181047.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:29:53,846 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.012e+02 5.177e+02 6.114e+02 8.226e+02 1.424e+03, threshold=1.223e+03, percent-clipped=6.0 +2023-04-03 09:29:55,530 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181069.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:06,527 INFO [train.py:903] (3/4) Epoch 27, batch 3550, loss[loss=0.2443, simple_loss=0.3262, pruned_loss=0.08118, over 19599.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06074, over 3826473.42 frames. ], batch size: 57, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:30:27,217 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:30:27,439 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181094.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:31:09,188 INFO [train.py:903] (3/4) Epoch 27, batch 3600, loss[loss=0.247, simple_loss=0.3376, pruned_loss=0.07819, over 19563.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2846, pruned_loss=0.06142, over 3826035.47 frames. ], batch size: 61, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:32:00,758 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.317e+02 4.881e+02 5.766e+02 7.270e+02 1.755e+03, threshold=1.153e+03, percent-clipped=5.0 +2023-04-03 09:32:12,503 INFO [train.py:903] (3/4) Epoch 27, batch 3650, loss[loss=0.2099, simple_loss=0.2959, pruned_loss=0.06195, over 19485.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2857, pruned_loss=0.06207, over 3821594.53 frames. ], batch size: 64, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:32:24,261 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181187.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:32:33,466 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.4116, 4.0254, 2.8108, 3.6040, 1.0463, 4.0121, 3.8848, 3.9540], + device='cuda:3'), covar=tensor([0.0679, 0.1148, 0.1874, 0.0898, 0.3937, 0.0710, 0.0926, 0.1116], + device='cuda:3'), in_proj_covar=tensor([0.0529, 0.0428, 0.0514, 0.0358, 0.0408, 0.0453, 0.0448, 0.0476], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:32:52,782 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:33:15,871 INFO [train.py:903] (3/4) Epoch 27, batch 3700, loss[loss=0.1534, simple_loss=0.2443, pruned_loss=0.03122, over 19621.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2846, pruned_loss=0.06134, over 3818803.62 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:06,266 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.101e+02 5.357e+02 6.375e+02 8.389e+02 2.807e+03, threshold=1.275e+03, percent-clipped=10.0 +2023-04-03 09:34:17,475 INFO [train.py:903] (3/4) Epoch 27, batch 3750, loss[loss=0.1807, simple_loss=0.2648, pruned_loss=0.04837, over 19773.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2857, pruned_loss=0.06233, over 3817353.29 frames. ], batch size: 54, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:34:48,314 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:34:49,639 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:04,207 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181315.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:20,116 INFO [train.py:903] (3/4) Epoch 27, batch 3800, loss[loss=0.1879, simple_loss=0.2589, pruned_loss=0.0585, over 19100.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2858, pruned_loss=0.06242, over 3799124.39 frames. ], batch size: 42, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:35:20,561 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181328.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:35:48,534 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 09:36:10,636 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.827e+02 5.403e+02 6.773e+02 8.761e+02 1.536e+03, threshold=1.355e+03, percent-clipped=8.0 +2023-04-03 09:36:22,053 INFO [train.py:903] (3/4) Epoch 27, batch 3850, loss[loss=0.1954, simple_loss=0.2772, pruned_loss=0.05682, over 19854.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2861, pruned_loss=0.06269, over 3804429.20 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:37:25,663 INFO [train.py:903] (3/4) Epoch 27, batch 3900, loss[loss=0.2129, simple_loss=0.2954, pruned_loss=0.06522, over 18759.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2855, pruned_loss=0.06189, over 3819858.17 frames. ], batch size: 74, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:38:13,057 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181465.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:38:17,126 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.394e+02 5.123e+02 6.226e+02 8.139e+02 1.802e+03, threshold=1.245e+03, percent-clipped=4.0 +2023-04-03 09:38:28,446 INFO [train.py:903] (3/4) Epoch 27, batch 3950, loss[loss=0.2264, simple_loss=0.3106, pruned_loss=0.07111, over 19820.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2847, pruned_loss=0.06132, over 3824240.62 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:38:30,592 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 09:38:43,821 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181490.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:39:30,606 INFO [train.py:903] (3/4) Epoch 27, batch 4000, loss[loss=0.1641, simple_loss=0.2413, pruned_loss=0.04348, over 19759.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2841, pruned_loss=0.06096, over 3814727.73 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:39:55,687 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:08,375 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=181558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:40:14,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 09:40:21,508 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.200e+02 5.331e+02 6.990e+02 9.796e+02 2.756e+03, threshold=1.398e+03, percent-clipped=11.0 +2023-04-03 09:40:32,606 INFO [train.py:903] (3/4) Epoch 27, batch 4050, loss[loss=0.1689, simple_loss=0.2577, pruned_loss=0.04002, over 19850.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2839, pruned_loss=0.06099, over 3802618.73 frames. ], batch size: 52, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:40:38,826 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=181583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:41:35,049 INFO [train.py:903] (3/4) Epoch 27, batch 4100, loss[loss=0.2305, simple_loss=0.31, pruned_loss=0.07551, over 19654.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2845, pruned_loss=0.0614, over 3805778.24 frames. ], batch size: 60, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:42:07,712 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 09:42:13,760 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181659.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:42:26,970 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 5.142e+02 6.045e+02 7.647e+02 1.406e+03, threshold=1.209e+03, percent-clipped=1.0 +2023-04-03 09:42:35,660 INFO [train.py:903] (3/4) Epoch 27, batch 4150, loss[loss=0.1967, simple_loss=0.2809, pruned_loss=0.05629, over 19292.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2845, pruned_loss=0.06171, over 3819200.90 frames. ], batch size: 66, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:07,931 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4765, 1.3502, 1.5056, 1.6043, 3.1129, 1.0931, 2.4707, 3.4615], + device='cuda:3'), covar=tensor([0.0505, 0.2758, 0.2857, 0.1676, 0.0680, 0.2436, 0.1117, 0.0259], + device='cuda:3'), in_proj_covar=tensor([0.0420, 0.0375, 0.0392, 0.0350, 0.0381, 0.0354, 0.0390, 0.0413], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:43:21,534 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3437, 1.3916, 1.5520, 1.4902, 1.7382, 1.8353, 1.7387, 0.5175], + device='cuda:3'), covar=tensor([0.2535, 0.4399, 0.2797, 0.2102, 0.1796, 0.2433, 0.1565, 0.5300], + device='cuda:3'), in_proj_covar=tensor([0.0553, 0.0669, 0.0751, 0.0506, 0.0635, 0.0547, 0.0670, 0.0571], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 09:43:38,702 INFO [train.py:903] (3/4) Epoch 27, batch 4200, loss[loss=0.1786, simple_loss=0.2627, pruned_loss=0.04725, over 19363.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2838, pruned_loss=0.06137, over 3822076.70 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:43:42,087 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 09:43:55,242 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4682, 1.3940, 1.6069, 1.5292, 3.0938, 1.1970, 2.4275, 3.4950], + device='cuda:3'), covar=tensor([0.0526, 0.2878, 0.2753, 0.1841, 0.0662, 0.2403, 0.1231, 0.0248], + device='cuda:3'), in_proj_covar=tensor([0.0422, 0.0377, 0.0394, 0.0352, 0.0382, 0.0356, 0.0392, 0.0415], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:44:30,939 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.083e+02 6.482e+02 7.948e+02 1.533e+03, threshold=1.296e+03, percent-clipped=4.0 +2023-04-03 09:44:36,209 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=181774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:44:40,467 INFO [train.py:903] (3/4) Epoch 27, batch 4250, loss[loss=0.242, simple_loss=0.3222, pruned_loss=0.08096, over 19660.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2853, pruned_loss=0.06181, over 3822012.35 frames. ], batch size: 60, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:44:55,291 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 09:45:08,385 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 09:45:43,719 INFO [train.py:903] (3/4) Epoch 27, batch 4300, loss[loss=0.2165, simple_loss=0.2988, pruned_loss=0.06711, over 17194.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2847, pruned_loss=0.06125, over 3809706.71 frames. ], batch size: 101, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:45:46,616 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9635, 3.6136, 2.4999, 3.2348, 0.9993, 3.5747, 3.4152, 3.5395], + device='cuda:3'), covar=tensor([0.0909, 0.1083, 0.1985, 0.0936, 0.3818, 0.0824, 0.1140, 0.1412], + device='cuda:3'), in_proj_covar=tensor([0.0527, 0.0427, 0.0512, 0.0357, 0.0406, 0.0452, 0.0448, 0.0477], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:46:16,690 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9468, 4.4462, 4.6636, 4.6452, 1.7606, 4.3573, 3.8150, 4.4165], + device='cuda:3'), covar=tensor([0.1758, 0.0776, 0.0617, 0.0709, 0.6165, 0.0949, 0.0710, 0.1054], + device='cuda:3'), in_proj_covar=tensor([0.0823, 0.0784, 0.0994, 0.0870, 0.0864, 0.0755, 0.0588, 0.0922], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 09:46:36,878 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 4.917e+02 6.450e+02 8.224e+02 1.543e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-04-03 09:46:40,243 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 09:46:47,394 INFO [train.py:903] (3/4) Epoch 27, batch 4350, loss[loss=0.2187, simple_loss=0.3082, pruned_loss=0.06457, over 19566.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2844, pruned_loss=0.06092, over 3813515.39 frames. ], batch size: 61, lr: 3.03e-03, grad_scale: 4.0 +2023-04-03 09:46:59,920 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3430, 3.0879, 2.2665, 2.7826, 0.8476, 3.0286, 2.9136, 2.9732], + device='cuda:3'), covar=tensor([0.1165, 0.1360, 0.2086, 0.1095, 0.3905, 0.1021, 0.1285, 0.1564], + device='cuda:3'), in_proj_covar=tensor([0.0528, 0.0428, 0.0513, 0.0358, 0.0408, 0.0454, 0.0450, 0.0479], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:47:05,494 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=181892.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:47:25,502 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-03 09:47:49,877 INFO [train.py:903] (3/4) Epoch 27, batch 4400, loss[loss=0.2016, simple_loss=0.2896, pruned_loss=0.05684, over 19588.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2853, pruned_loss=0.06148, over 3804970.14 frames. ], batch size: 61, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:14,994 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 09:48:24,191 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 09:48:42,706 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.593e+02 4.919e+02 6.507e+02 9.105e+02 1.976e+03, threshold=1.301e+03, percent-clipped=10.0 +2023-04-03 09:48:52,975 INFO [train.py:903] (3/4) Epoch 27, batch 4450, loss[loss=0.2254, simple_loss=0.3031, pruned_loss=0.0738, over 18192.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2853, pruned_loss=0.06137, over 3798980.92 frames. ], batch size: 83, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:48:53,729 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 09:48:59,282 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4363, 2.4823, 2.6827, 3.2634, 2.4622, 3.0590, 2.6512, 2.5370], + device='cuda:3'), covar=tensor([0.3932, 0.4033, 0.1787, 0.2421, 0.4216, 0.2131, 0.4579, 0.3198], + device='cuda:3'), in_proj_covar=tensor([0.0930, 0.1006, 0.0739, 0.0948, 0.0907, 0.0848, 0.0855, 0.0805], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 09:49:16,016 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=181996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:30,768 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:49:37,369 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0495, 3.6806, 2.6553, 3.2834, 1.0369, 3.6618, 3.5447, 3.6174], + device='cuda:3'), covar=tensor([0.0795, 0.1178, 0.1892, 0.0967, 0.3779, 0.0766, 0.1047, 0.1244], + device='cuda:3'), in_proj_covar=tensor([0.0528, 0.0427, 0.0513, 0.0357, 0.0407, 0.0453, 0.0449, 0.0477], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:49:56,877 INFO [train.py:903] (3/4) Epoch 27, batch 4500, loss[loss=0.2049, simple_loss=0.2721, pruned_loss=0.06886, over 19365.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2845, pruned_loss=0.06119, over 3812076.20 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:49:59,836 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182030.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:31,966 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:50:49,955 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.312e+02 5.059e+02 6.218e+02 7.785e+02 2.105e+03, threshold=1.244e+03, percent-clipped=5.0 +2023-04-03 09:51:00,199 INFO [train.py:903] (3/4) Epoch 27, batch 4550, loss[loss=0.1721, simple_loss=0.2499, pruned_loss=0.04715, over 19770.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.284, pruned_loss=0.06074, over 3815519.32 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:51:09,766 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 09:51:32,197 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 09:51:35,759 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9073, 1.3359, 1.0676, 0.9826, 1.1718, 1.0136, 1.0051, 1.2635], + device='cuda:3'), covar=tensor([0.0601, 0.0949, 0.1185, 0.0803, 0.0644, 0.1339, 0.0603, 0.0518], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0321, 0.0337, 0.0273, 0.0251, 0.0345, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:52:02,910 INFO [train.py:903] (3/4) Epoch 27, batch 4600, loss[loss=0.1587, simple_loss=0.2422, pruned_loss=0.03759, over 19622.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.284, pruned_loss=0.06084, over 3820591.33 frames. ], batch size: 50, lr: 3.03e-03, grad_scale: 8.0 +2023-04-03 09:52:54,752 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.336e+02 4.830e+02 5.724e+02 7.323e+02 1.391e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 09:53:05,187 INFO [train.py:903] (3/4) Epoch 27, batch 4650, loss[loss=0.1852, simple_loss=0.258, pruned_loss=0.05616, over 19752.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2841, pruned_loss=0.06104, over 3819462.38 frames. ], batch size: 45, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:53:22,611 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 09:53:34,165 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 09:53:57,765 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5173, 1.7024, 2.0104, 1.8454, 3.1132, 2.5873, 3.4413, 1.8236], + device='cuda:3'), covar=tensor([0.2655, 0.4496, 0.2962, 0.2023, 0.1585, 0.2222, 0.1556, 0.4238], + device='cuda:3'), in_proj_covar=tensor([0.0552, 0.0667, 0.0751, 0.0505, 0.0636, 0.0544, 0.0668, 0.0570], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 09:54:07,704 INFO [train.py:903] (3/4) Epoch 27, batch 4700, loss[loss=0.2134, simple_loss=0.2988, pruned_loss=0.06394, over 19527.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2835, pruned_loss=0.06077, over 3813162.37 frames. ], batch size: 56, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:54:30,872 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 09:54:51,539 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182263.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:54:59,120 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.994e+02 4.730e+02 5.895e+02 7.660e+02 1.174e+03, threshold=1.179e+03, percent-clipped=2.0 +2023-04-03 09:55:10,410 INFO [train.py:903] (3/4) Epoch 27, batch 4750, loss[loss=0.1842, simple_loss=0.2604, pruned_loss=0.05399, over 19781.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2831, pruned_loss=0.06062, over 3820256.78 frames. ], batch size: 47, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:55:22,536 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182288.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:55:36,026 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182299.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:12,222 INFO [train.py:903] (3/4) Epoch 27, batch 4800, loss[loss=0.2278, simple_loss=0.3109, pruned_loss=0.07233, over 19686.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2832, pruned_loss=0.06046, over 3824322.74 frames. ], batch size: 60, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:56:26,909 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182340.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:56:27,636 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 09:56:42,893 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0896, 2.0320, 1.7751, 2.0949, 1.8666, 1.8188, 1.8201, 2.0701], + device='cuda:3'), covar=tensor([0.1106, 0.1398, 0.1530, 0.1086, 0.1401, 0.0575, 0.1493, 0.0705], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0358, 0.0317, 0.0257, 0.0307, 0.0256, 0.0321, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 09:57:03,560 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.862e+02 5.780e+02 7.243e+02 1.108e+03, threshold=1.156e+03, percent-clipped=0.0 +2023-04-03 09:57:13,593 INFO [train.py:903] (3/4) Epoch 27, batch 4850, loss[loss=0.1537, simple_loss=0.2352, pruned_loss=0.03614, over 19777.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2831, pruned_loss=0.06058, over 3835318.07 frames. ], batch size: 47, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:57:36,901 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 09:57:58,319 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 09:57:58,638 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9602, 4.4833, 4.6981, 4.6875, 1.9240, 4.3896, 3.8185, 4.4409], + device='cuda:3'), covar=tensor([0.1802, 0.0797, 0.0586, 0.0730, 0.6024, 0.0882, 0.0660, 0.1051], + device='cuda:3'), in_proj_covar=tensor([0.0819, 0.0782, 0.0990, 0.0869, 0.0860, 0.0753, 0.0587, 0.0919], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 09:58:03,901 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 09:58:03,922 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 09:58:13,220 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 09:58:14,425 INFO [train.py:903] (3/4) Epoch 27, batch 4900, loss[loss=0.2085, simple_loss=0.2988, pruned_loss=0.05916, over 19532.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.06053, over 3834237.38 frames. ], batch size: 64, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:58:34,876 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 09:58:50,308 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182455.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 09:59:07,215 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.920e+02 4.786e+02 5.871e+02 7.388e+02 1.622e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 09:59:18,868 INFO [train.py:903] (3/4) Epoch 27, batch 4950, loss[loss=0.2011, simple_loss=0.2842, pruned_loss=0.05897, over 19565.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2836, pruned_loss=0.06026, over 3824293.81 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 09:59:36,547 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 10:00:00,867 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 10:00:21,847 INFO [train.py:903] (3/4) Epoch 27, batch 5000, loss[loss=0.1969, simple_loss=0.2844, pruned_loss=0.05468, over 19387.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2842, pruned_loss=0.06057, over 3826368.01 frames. ], batch size: 70, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:00:27,404 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182532.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:00:32,542 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 10:00:44,435 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 10:01:15,154 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.165e+02 4.878e+02 5.976e+02 7.448e+02 1.686e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 10:01:25,320 INFO [train.py:903] (3/4) Epoch 27, batch 5050, loss[loss=0.1736, simple_loss=0.2493, pruned_loss=0.04898, over 19744.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2841, pruned_loss=0.06033, over 3830675.49 frames. ], batch size: 46, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:03,643 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 10:02:27,488 INFO [train.py:903] (3/4) Epoch 27, batch 5100, loss[loss=0.1955, simple_loss=0.2798, pruned_loss=0.05558, over 19519.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2842, pruned_loss=0.06022, over 3826549.63 frames. ], batch size: 64, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:02:31,202 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8222, 4.3018, 4.6065, 4.5596, 1.8018, 4.2872, 3.7377, 4.3381], + device='cuda:3'), covar=tensor([0.1765, 0.1206, 0.0590, 0.0691, 0.6173, 0.1161, 0.0706, 0.1068], + device='cuda:3'), in_proj_covar=tensor([0.0811, 0.0777, 0.0982, 0.0864, 0.0854, 0.0746, 0.0582, 0.0912], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 10:02:37,760 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 10:02:41,996 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 10:02:46,622 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 10:02:46,783 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182643.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:03:19,756 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.110e+02 6.408e+02 8.268e+02 2.195e+03, threshold=1.282e+03, percent-clipped=9.0 +2023-04-03 10:03:30,279 INFO [train.py:903] (3/4) Epoch 27, batch 5150, loss[loss=0.1962, simple_loss=0.2857, pruned_loss=0.05337, over 19658.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2856, pruned_loss=0.06106, over 3817776.00 frames. ], batch size: 58, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:03:44,261 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:04:12,440 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=182711.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:21,099 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:04:24,623 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1104, 5.2057, 5.9561, 5.9815, 2.1782, 5.5897, 4.7621, 5.6066], + device='cuda:3'), covar=tensor([0.1762, 0.0816, 0.0571, 0.0568, 0.6200, 0.0822, 0.0650, 0.1200], + device='cuda:3'), in_proj_covar=tensor([0.0812, 0.0779, 0.0984, 0.0867, 0.0856, 0.0748, 0.0583, 0.0913], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 10:04:26,014 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5560, 2.2961, 1.7450, 1.6117, 2.1184, 1.4230, 1.4777, 1.9168], + device='cuda:3'), covar=tensor([0.1184, 0.0776, 0.1092, 0.0874, 0.0616, 0.1276, 0.0778, 0.0581], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0319, 0.0335, 0.0271, 0.0250, 0.0343, 0.0291, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:04:34,479 INFO [train.py:903] (3/4) Epoch 27, batch 5200, loss[loss=0.2213, simple_loss=0.3136, pruned_loss=0.06452, over 19615.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2846, pruned_loss=0.06055, over 3813713.17 frames. ], batch size: 57, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:04:45,076 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=182736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:04:50,584 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 10:05:11,581 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:05:19,100 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-03 10:05:28,498 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.726e+02 5.796e+02 7.282e+02 1.371e+03, threshold=1.159e+03, percent-clipped=1.0 +2023-04-03 10:05:35,278 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 10:05:37,678 INFO [train.py:903] (3/4) Epoch 27, batch 5250, loss[loss=0.2052, simple_loss=0.2989, pruned_loss=0.05571, over 19657.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2846, pruned_loss=0.06037, over 3802463.02 frames. ], batch size: 55, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:05:47,603 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-04-03 10:05:51,929 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1553, 2.0413, 1.8839, 1.7208, 1.6926, 1.6877, 0.6250, 1.0747], + device='cuda:3'), covar=tensor([0.0641, 0.0718, 0.0544, 0.0886, 0.1202, 0.1002, 0.1464, 0.1184], + device='cuda:3'), in_proj_covar=tensor([0.0365, 0.0363, 0.0370, 0.0393, 0.0472, 0.0397, 0.0347, 0.0349], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 10:06:39,518 INFO [train.py:903] (3/4) Epoch 27, batch 5300, loss[loss=0.1878, simple_loss=0.2536, pruned_loss=0.06105, over 19749.00 frames. ], tot_loss[loss=0.203, simple_loss=0.285, pruned_loss=0.06055, over 3806253.39 frames. ], batch size: 46, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:06:57,248 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.64 vs. limit=5.0 +2023-04-03 10:06:57,448 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 10:06:59,529 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.08 vs. limit=5.0 +2023-04-03 10:07:34,149 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.440e+02 4.985e+02 5.848e+02 7.587e+02 2.195e+03, threshold=1.170e+03, percent-clipped=4.0 +2023-04-03 10:07:37,805 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:39,975 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=182876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:07:42,160 INFO [train.py:903] (3/4) Epoch 27, batch 5350, loss[loss=0.1943, simple_loss=0.2773, pruned_loss=0.05563, over 19532.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2848, pruned_loss=0.06052, over 3799124.15 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:07:44,851 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=182880.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:08:16,938 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 10:08:46,436 INFO [train.py:903] (3/4) Epoch 27, batch 5400, loss[loss=0.214, simple_loss=0.2971, pruned_loss=0.06547, over 19529.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.284, pruned_loss=0.06023, over 3816814.31 frames. ], batch size: 56, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:09:41,328 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.624e+02 4.932e+02 6.009e+02 7.341e+02 1.388e+03, threshold=1.202e+03, percent-clipped=2.0 +2023-04-03 10:09:49,115 INFO [train.py:903] (3/4) Epoch 27, batch 5450, loss[loss=0.1798, simple_loss=0.2644, pruned_loss=0.04759, over 19665.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05969, over 3808732.60 frames. ], batch size: 53, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:10:04,451 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=182991.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:35,569 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183014.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:10:40,368 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-04-03 10:10:51,004 INFO [train.py:903] (3/4) Epoch 27, batch 5500, loss[loss=0.2157, simple_loss=0.3068, pruned_loss=0.06235, over 19663.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2831, pruned_loss=0.05993, over 3814225.77 frames. ], batch size: 55, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:05,227 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183039.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:11:14,836 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 10:11:33,638 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-03 10:11:45,355 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.090e+02 5.032e+02 6.184e+02 7.491e+02 1.557e+03, threshold=1.237e+03, percent-clipped=5.0 +2023-04-03 10:11:52,113 INFO [train.py:903] (3/4) Epoch 27, batch 5550, loss[loss=0.2129, simple_loss=0.2895, pruned_loss=0.06819, over 19574.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2836, pruned_loss=0.06015, over 3813067.71 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 4.0 +2023-04-03 10:11:52,643 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5231, 1.5142, 1.7107, 1.7222, 2.2904, 2.2201, 2.3590, 0.9621], + device='cuda:3'), covar=tensor([0.2538, 0.4576, 0.2883, 0.2056, 0.1578, 0.2289, 0.1427, 0.4904], + device='cuda:3'), in_proj_covar=tensor([0.0556, 0.0672, 0.0758, 0.0509, 0.0638, 0.0548, 0.0673, 0.0575], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 10:11:57,905 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 10:12:46,680 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 10:12:56,191 INFO [train.py:903] (3/4) Epoch 27, batch 5600, loss[loss=0.2522, simple_loss=0.3248, pruned_loss=0.08981, over 19696.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06003, over 3812053.32 frames. ], batch size: 63, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:13:48,464 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0008, 2.0644, 2.3336, 2.5941, 2.0483, 2.4846, 2.2909, 2.1645], + device='cuda:3'), covar=tensor([0.3951, 0.3768, 0.1745, 0.2300, 0.3785, 0.1989, 0.4519, 0.3131], + device='cuda:3'), in_proj_covar=tensor([0.0933, 0.1012, 0.0742, 0.0950, 0.0910, 0.0849, 0.0859, 0.0807], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 10:13:51,541 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.195e+02 4.790e+02 6.069e+02 7.863e+02 1.689e+03, threshold=1.214e+03, percent-clipped=3.0 +2023-04-03 10:13:56,159 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.9735, 2.9146, 2.6790, 2.9420, 2.7890, 2.6710, 2.5674, 2.9372], + device='cuda:3'), covar=tensor([0.0851, 0.1415, 0.1260, 0.1019, 0.1240, 0.0468, 0.1237, 0.0598], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0357, 0.0317, 0.0257, 0.0306, 0.0256, 0.0318, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:13:59,319 INFO [train.py:903] (3/4) Epoch 27, batch 5650, loss[loss=0.1834, simple_loss=0.2691, pruned_loss=0.04884, over 19534.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06028, over 3807049.94 frames. ], batch size: 54, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:14:35,068 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7229, 1.7523, 1.6809, 1.4240, 1.4436, 1.4532, 0.2648, 0.7074], + device='cuda:3'), covar=tensor([0.0719, 0.0661, 0.0467, 0.0738, 0.1353, 0.0822, 0.1457, 0.1229], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0364, 0.0372, 0.0394, 0.0472, 0.0398, 0.0347, 0.0349], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 10:14:36,254 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4517, 1.5659, 1.8079, 1.6824, 2.7636, 2.2032, 2.9361, 1.4851], + device='cuda:3'), covar=tensor([0.2702, 0.4722, 0.3069, 0.2202, 0.1592, 0.2432, 0.1465, 0.4635], + device='cuda:3'), in_proj_covar=tensor([0.0555, 0.0672, 0.0758, 0.0510, 0.0639, 0.0548, 0.0673, 0.0577], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 10:14:44,836 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 10:14:49,514 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183218.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:14:56,365 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:01,000 INFO [train.py:903] (3/4) Epoch 27, batch 5700, loss[loss=0.1652, simple_loss=0.2471, pruned_loss=0.04163, over 19609.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2837, pruned_loss=0.06038, over 3806645.13 frames. ], batch size: 50, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:15:25,157 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183247.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:54,499 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.103e+02 5.954e+02 7.593e+02 1.308e+03, threshold=1.191e+03, percent-clipped=1.0 +2023-04-03 10:15:54,976 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:15:59,205 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 10:16:01,598 INFO [train.py:903] (3/4) Epoch 27, batch 5750, loss[loss=0.1997, simple_loss=0.28, pruned_loss=0.0597, over 19578.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06042, over 3823645.41 frames. ], batch size: 52, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:16:08,285 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 10:16:13,692 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 10:17:05,409 INFO [train.py:903] (3/4) Epoch 27, batch 5800, loss[loss=0.2069, simple_loss=0.29, pruned_loss=0.06189, over 19315.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2835, pruned_loss=0.06064, over 3840255.16 frames. ], batch size: 66, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:17:12,709 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183333.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:19,497 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:17:59,580 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 5.364e+02 7.027e+02 9.052e+02 1.891e+03, threshold=1.405e+03, percent-clipped=4.0 +2023-04-03 10:18:07,742 INFO [train.py:903] (3/4) Epoch 27, batch 5850, loss[loss=0.2089, simple_loss=0.2933, pruned_loss=0.06223, over 19703.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2843, pruned_loss=0.06099, over 3833316.09 frames. ], batch size: 59, lr: 3.02e-03, grad_scale: 8.0 +2023-04-03 10:18:12,169 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 10:18:12,952 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4839, 2.6827, 2.2199, 2.6472, 2.4790, 2.2089, 2.2124, 2.4931], + device='cuda:3'), covar=tensor([0.1032, 0.1377, 0.1372, 0.1009, 0.1239, 0.0527, 0.1299, 0.0686], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0355, 0.0316, 0.0256, 0.0305, 0.0255, 0.0317, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:18:31,484 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 10:18:46,969 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0480, 2.1688, 2.4036, 2.6983, 2.1103, 2.5998, 2.3717, 2.2164], + device='cuda:3'), covar=tensor([0.4252, 0.3870, 0.1924, 0.2492, 0.4175, 0.2178, 0.4950, 0.3314], + device='cuda:3'), in_proj_covar=tensor([0.0930, 0.1008, 0.0740, 0.0946, 0.0907, 0.0847, 0.0858, 0.0803], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 10:19:07,136 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:19:08,989 INFO [train.py:903] (3/4) Epoch 27, batch 5900, loss[loss=0.1921, simple_loss=0.2863, pruned_loss=0.04899, over 19707.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.0601, over 3838838.00 frames. ], batch size: 59, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:19:09,033 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 10:19:32,072 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 10:20:03,306 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 4.868e+02 5.663e+02 7.388e+02 1.454e+03, threshold=1.133e+03, percent-clipped=1.0 +2023-04-03 10:20:10,243 INFO [train.py:903] (3/4) Epoch 27, batch 5950, loss[loss=0.2151, simple_loss=0.2986, pruned_loss=0.0658, over 19466.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2833, pruned_loss=0.06048, over 3844486.09 frames. ], batch size: 64, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,623 INFO [train.py:903] (3/4) Epoch 27, batch 6000, loss[loss=0.1761, simple_loss=0.2587, pruned_loss=0.04674, over 19720.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2823, pruned_loss=0.05993, over 3851891.39 frames. ], batch size: 51, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:21:12,624 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 10:21:25,588 INFO [train.py:937] (3/4) Epoch 27, validation: loss=0.1675, simple_loss=0.2669, pruned_loss=0.03401, over 944034.00 frames. +2023-04-03 10:21:25,589 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 10:21:39,339 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 10:22:22,442 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.170e+02 5.376e+02 6.105e+02 7.773e+02 1.848e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 10:22:28,409 INFO [train.py:903] (3/4) Epoch 27, batch 6050, loss[loss=0.2794, simple_loss=0.3366, pruned_loss=0.1111, over 13105.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2823, pruned_loss=0.05989, over 3841937.93 frames. ], batch size: 136, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:22:43,124 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183589.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:22:50,964 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=183595.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:22:55,539 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0399, 3.2567, 1.8606, 2.0381, 3.0035, 1.6331, 1.3884, 2.1960], + device='cuda:3'), covar=tensor([0.1458, 0.0646, 0.1305, 0.0942, 0.0543, 0.1428, 0.1144, 0.0783], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0319, 0.0336, 0.0273, 0.0250, 0.0344, 0.0292, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:23:08,789 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-03 10:23:14,128 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:20,836 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=183620.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:23:29,936 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.67 vs. limit=5.0 +2023-04-03 10:23:31,577 INFO [train.py:903] (3/4) Epoch 27, batch 6100, loss[loss=0.1829, simple_loss=0.2601, pruned_loss=0.05287, over 18703.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2827, pruned_loss=0.06013, over 3838379.78 frames. ], batch size: 41, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:23:37,192 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:24:00,157 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7747, 1.9235, 2.1841, 2.2582, 1.7619, 2.1815, 2.1329, 1.9595], + device='cuda:3'), covar=tensor([0.4200, 0.3595, 0.2014, 0.2344, 0.3810, 0.2168, 0.5008, 0.3415], + device='cuda:3'), in_proj_covar=tensor([0.0935, 0.1011, 0.0743, 0.0950, 0.0912, 0.0850, 0.0862, 0.0805], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 10:24:27,763 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.311e+02 5.176e+02 6.035e+02 7.763e+02 1.396e+03, threshold=1.207e+03, percent-clipped=4.0 +2023-04-03 10:24:28,029 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9160, 4.4592, 2.8985, 3.8687, 0.8701, 4.4662, 4.2765, 4.4296], + device='cuda:3'), covar=tensor([0.0615, 0.1079, 0.1899, 0.0893, 0.4319, 0.0666, 0.0989, 0.1163], + device='cuda:3'), in_proj_covar=tensor([0.0527, 0.0428, 0.0516, 0.0357, 0.0410, 0.0454, 0.0451, 0.0479], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:24:33,565 INFO [train.py:903] (3/4) Epoch 27, batch 6150, loss[loss=0.1812, simple_loss=0.2733, pruned_loss=0.04456, over 19781.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.282, pruned_loss=0.05952, over 3846959.56 frames. ], batch size: 56, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:24:39,685 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0472, 3.6954, 2.6870, 3.2978, 0.9149, 3.6802, 3.5425, 3.6058], + device='cuda:3'), covar=tensor([0.0829, 0.1287, 0.1928, 0.0999, 0.4162, 0.0769, 0.1029, 0.1231], + device='cuda:3'), in_proj_covar=tensor([0.0529, 0.0429, 0.0517, 0.0357, 0.0411, 0.0455, 0.0452, 0.0480], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:24:53,484 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9452, 2.0979, 2.2132, 2.1163, 3.6601, 1.7612, 3.0093, 3.7461], + device='cuda:3'), covar=tensor([0.0462, 0.2276, 0.2345, 0.1690, 0.0577, 0.2247, 0.1524, 0.0255], + device='cuda:3'), in_proj_covar=tensor([0.0425, 0.0379, 0.0397, 0.0353, 0.0385, 0.0358, 0.0396, 0.0416], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:25:01,288 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 10:25:10,556 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2096, 1.2977, 1.7120, 1.1564, 2.5548, 3.4125, 3.1345, 3.5609], + device='cuda:3'), covar=tensor([0.1560, 0.3906, 0.3389, 0.2633, 0.0634, 0.0202, 0.0216, 0.0288], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0334, 0.0365, 0.0273, 0.0255, 0.0197, 0.0220, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 10:25:34,995 INFO [train.py:903] (3/4) Epoch 27, batch 6200, loss[loss=0.2408, simple_loss=0.3265, pruned_loss=0.07749, over 19731.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2825, pruned_loss=0.05985, over 3839723.24 frames. ], batch size: 63, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:26:22,170 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 10:26:28,522 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183770.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:26:28,796 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2187, 1.2860, 1.2464, 1.0595, 1.1040, 1.1265, 0.0867, 0.3848], + device='cuda:3'), covar=tensor([0.0722, 0.0701, 0.0495, 0.0639, 0.1358, 0.0674, 0.1470, 0.1213], + device='cuda:3'), in_proj_covar=tensor([0.0362, 0.0358, 0.0366, 0.0388, 0.0466, 0.0391, 0.0342, 0.0345], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 10:26:31,934 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.388e+02 4.839e+02 5.826e+02 7.637e+02 1.855e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 10:26:37,746 INFO [train.py:903] (3/4) Epoch 27, batch 6250, loss[loss=0.201, simple_loss=0.2847, pruned_loss=0.0587, over 19659.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.283, pruned_loss=0.06042, over 3834207.92 frames. ], batch size: 55, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:27:08,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 10:27:40,817 INFO [train.py:903] (3/4) Epoch 27, batch 6300, loss[loss=0.2054, simple_loss=0.2827, pruned_loss=0.06402, over 19402.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.283, pruned_loss=0.06089, over 3823758.32 frames. ], batch size: 48, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:20,290 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=183860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:28:36,672 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.559e+02 4.928e+02 5.882e+02 7.199e+02 1.705e+03, threshold=1.176e+03, percent-clipped=2.0 +2023-04-03 10:28:43,614 INFO [train.py:903] (3/4) Epoch 27, batch 6350, loss[loss=0.1691, simple_loss=0.2648, pruned_loss=0.03664, over 19601.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2832, pruned_loss=0.06086, over 3807866.42 frames. ], batch size: 57, lr: 3.01e-03, grad_scale: 4.0 +2023-04-03 10:28:51,874 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=183885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:29:43,978 INFO [train.py:903] (3/4) Epoch 27, batch 6400, loss[loss=0.1847, simple_loss=0.2673, pruned_loss=0.05109, over 19719.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2827, pruned_loss=0.06046, over 3806464.04 frames. ], batch size: 51, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:30:21,983 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1801, 5.2439, 5.9825, 5.9923, 2.0161, 5.6222, 4.7840, 5.7107], + device='cuda:3'), covar=tensor([0.1777, 0.0773, 0.0603, 0.0618, 0.6277, 0.0858, 0.0680, 0.1137], + device='cuda:3'), in_proj_covar=tensor([0.0823, 0.0786, 0.0995, 0.0875, 0.0867, 0.0757, 0.0589, 0.0926], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 10:30:39,966 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.757e+02 6.035e+02 7.575e+02 1.901e+03, threshold=1.207e+03, percent-clipped=7.0 +2023-04-03 10:30:43,705 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=183976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:30:45,870 INFO [train.py:903] (3/4) Epoch 27, batch 6450, loss[loss=0.1617, simple_loss=0.2438, pruned_loss=0.03977, over 19311.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2835, pruned_loss=0.06088, over 3811255.41 frames. ], batch size: 44, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:28,476 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 10:31:50,531 INFO [train.py:903] (3/4) Epoch 27, batch 6500, loss[loss=0.1558, simple_loss=0.2347, pruned_loss=0.03845, over 19728.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06082, over 3800610.73 frames. ], batch size: 46, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:31:52,981 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 10:32:40,180 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:32:46,400 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.542e+02 5.565e+02 7.286e+02 1.442e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-04-03 10:32:53,095 INFO [train.py:903] (3/4) Epoch 27, batch 6550, loss[loss=0.1772, simple_loss=0.2585, pruned_loss=0.04798, over 19803.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06049, over 3815977.26 frames. ], batch size: 49, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:33:08,717 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184091.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:33:47,754 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 10:33:55,001 INFO [train.py:903] (3/4) Epoch 27, batch 6600, loss[loss=0.2224, simple_loss=0.3084, pruned_loss=0.06827, over 18296.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2825, pruned_loss=0.06013, over 3820296.17 frames. ], batch size: 83, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:34:11,923 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:43,118 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:34:50,799 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.027e+02 4.833e+02 5.798e+02 7.178e+02 1.551e+03, threshold=1.160e+03, percent-clipped=2.0 +2023-04-03 10:34:58,097 INFO [train.py:903] (3/4) Epoch 27, batch 6650, loss[loss=0.2044, simple_loss=0.2912, pruned_loss=0.05878, over 18896.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05956, over 3831896.31 frames. ], batch size: 74, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:35:30,804 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184204.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:35:32,185 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1497, 1.2513, 1.7270, 1.2324, 2.5547, 3.3537, 3.0559, 3.5691], + device='cuda:3'), covar=tensor([0.1734, 0.4092, 0.3517, 0.2759, 0.0641, 0.0225, 0.0240, 0.0270], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0331, 0.0362, 0.0271, 0.0253, 0.0195, 0.0219, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 10:35:56,593 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-03 10:35:59,888 INFO [train.py:903] (3/4) Epoch 27, batch 6700, loss[loss=0.2144, simple_loss=0.2996, pruned_loss=0.06466, over 18775.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2837, pruned_loss=0.06022, over 3833560.75 frames. ], batch size: 74, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:36:02,643 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-03 10:36:52,206 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.305e+02 5.122e+02 6.084e+02 8.596e+02 2.606e+03, threshold=1.217e+03, percent-clipped=9.0 +2023-04-03 10:36:57,982 INFO [train.py:903] (3/4) Epoch 27, batch 6750, loss[loss=0.2098, simple_loss=0.2951, pruned_loss=0.06229, over 17542.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2836, pruned_loss=0.06053, over 3831349.17 frames. ], batch size: 101, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:37:44,310 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:37:54,072 INFO [train.py:903] (3/4) Epoch 27, batch 6800, loss[loss=0.1987, simple_loss=0.2849, pruned_loss=0.05624, over 19652.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06199, over 3811059.87 frames. ], batch size: 55, lr: 3.01e-03, grad_scale: 8.0 +2023-04-03 10:38:15,913 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184347.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:38:16,784 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8668, 4.0084, 4.4220, 4.4273, 2.7307, 4.1038, 3.7850, 4.1942], + device='cuda:3'), covar=tensor([0.1436, 0.3173, 0.0614, 0.0743, 0.4399, 0.1384, 0.0644, 0.1035], + device='cuda:3'), in_proj_covar=tensor([0.0827, 0.0787, 0.1000, 0.0878, 0.0869, 0.0762, 0.0590, 0.0930], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 10:38:39,138 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 10:38:40,151 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 10:38:43,606 INFO [train.py:903] (3/4) Epoch 28, batch 0, loss[loss=0.2165, simple_loss=0.302, pruned_loss=0.06546, over 19583.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.302, pruned_loss=0.06546, over 19583.00 frames. ], batch size: 61, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:38:43,606 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 10:38:54,480 INFO [train.py:937] (3/4) Epoch 28, validation: loss=0.1665, simple_loss=0.2666, pruned_loss=0.03316, over 944034.00 frames. +2023-04-03 10:38:54,481 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 10:39:08,338 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 10:39:14,457 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184372.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:15,175 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.715e+02 5.190e+02 6.304e+02 8.212e+02 1.288e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-04-03 10:39:21,680 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:39:42,083 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4240, 1.4514, 1.8504, 1.7066, 2.5797, 2.1429, 2.7167, 1.1755], + device='cuda:3'), covar=tensor([0.2893, 0.4937, 0.3087, 0.2202, 0.1758, 0.2582, 0.1708, 0.5340], + device='cuda:3'), in_proj_covar=tensor([0.0555, 0.0670, 0.0753, 0.0507, 0.0639, 0.0546, 0.0670, 0.0573], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 10:39:57,727 INFO [train.py:903] (3/4) Epoch 28, batch 50, loss[loss=0.2629, simple_loss=0.3261, pruned_loss=0.09984, over 13209.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2824, pruned_loss=0.06029, over 866102.15 frames. ], batch size: 135, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:40:04,866 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184412.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:40:32,251 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 10:40:58,084 INFO [train.py:903] (3/4) Epoch 28, batch 100, loss[loss=0.2309, simple_loss=0.3089, pruned_loss=0.0764, over 19759.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2813, pruned_loss=0.05956, over 1523617.42 frames. ], batch size: 63, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:41:08,342 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 10:41:18,602 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.133e+02 4.572e+02 5.776e+02 7.316e+02 1.195e+03, threshold=1.155e+03, percent-clipped=0.0 +2023-04-03 10:41:56,033 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184504.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:41:58,109 INFO [train.py:903] (3/4) Epoch 28, batch 150, loss[loss=0.1977, simple_loss=0.2848, pruned_loss=0.05531, over 19757.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2808, pruned_loss=0.05965, over 2038332.96 frames. ], batch size: 63, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:42:21,067 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5379, 2.1630, 1.6795, 1.5031, 1.9872, 1.3367, 1.3937, 1.9645], + device='cuda:3'), covar=tensor([0.0995, 0.0831, 0.1125, 0.0945, 0.0620, 0.1396, 0.0770, 0.0488], + device='cuda:3'), in_proj_covar=tensor([0.0299, 0.0317, 0.0334, 0.0270, 0.0248, 0.0341, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:42:24,573 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184527.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:42:57,421 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 10:42:58,595 INFO [train.py:903] (3/4) Epoch 28, batch 200, loss[loss=0.159, simple_loss=0.2336, pruned_loss=0.04222, over 19717.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2809, pruned_loss=0.05908, over 2441141.06 frames. ], batch size: 46, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:43:19,481 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.222e+02 4.969e+02 6.258e+02 7.516e+02 2.266e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 10:43:22,291 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184575.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:51,813 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6049, 1.3625, 1.4854, 1.5233, 3.1698, 1.2836, 2.4251, 3.6589], + device='cuda:3'), covar=tensor([0.0542, 0.2895, 0.3100, 0.1937, 0.0731, 0.2508, 0.1376, 0.0247], + device='cuda:3'), in_proj_covar=tensor([0.0427, 0.0380, 0.0398, 0.0354, 0.0385, 0.0359, 0.0397, 0.0417], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 10:43:51,898 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184600.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:43:59,858 INFO [train.py:903] (3/4) Epoch 28, batch 250, loss[loss=0.2046, simple_loss=0.2699, pruned_loss=0.06963, over 19029.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06003, over 2748795.65 frames. ], batch size: 42, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:01,682 INFO [train.py:903] (3/4) Epoch 28, batch 300, loss[loss=0.2347, simple_loss=0.3061, pruned_loss=0.08161, over 19769.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2811, pruned_loss=0.05895, over 2999710.31 frames. ], batch size: 54, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:45:22,239 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.936e+02 6.396e+02 8.049e+02 1.564e+03, threshold=1.279e+03, percent-clipped=7.0 +2023-04-03 10:46:02,785 INFO [train.py:903] (3/4) Epoch 28, batch 350, loss[loss=0.2071, simple_loss=0.2913, pruned_loss=0.06149, over 17323.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2814, pruned_loss=0.05921, over 3176831.71 frames. ], batch size: 101, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:46:06,329 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 10:46:20,196 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184721.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:46:39,110 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2946, 2.1390, 2.1135, 1.9276, 1.7763, 1.8669, 0.7347, 1.2503], + device='cuda:3'), covar=tensor([0.0654, 0.0663, 0.0477, 0.0785, 0.1213, 0.0894, 0.1363, 0.1100], + device='cuda:3'), in_proj_covar=tensor([0.0367, 0.0364, 0.0370, 0.0393, 0.0471, 0.0395, 0.0346, 0.0349], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 10:47:04,862 INFO [train.py:903] (3/4) Epoch 28, batch 400, loss[loss=0.198, simple_loss=0.2788, pruned_loss=0.05859, over 19760.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2818, pruned_loss=0.05912, over 3337441.87 frames. ], batch size: 54, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:47:24,991 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.910e+02 5.909e+02 7.518e+02 1.907e+03, threshold=1.182e+03, percent-clipped=3.0 +2023-04-03 10:47:38,241 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=184783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:05,126 INFO [train.py:903] (3/4) Epoch 28, batch 450, loss[loss=0.2183, simple_loss=0.2979, pruned_loss=0.06939, over 19519.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2811, pruned_loss=0.05897, over 3445775.29 frames. ], batch size: 54, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:48:07,846 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=184808.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:38,459 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 10:48:39,481 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 10:48:43,531 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184836.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:48:56,809 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=184848.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:49:03,975 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2876, 2.1303, 1.8995, 1.8306, 1.6413, 1.7840, 0.5113, 1.2612], + device='cuda:3'), covar=tensor([0.0683, 0.0696, 0.0637, 0.0970, 0.1391, 0.1073, 0.1576, 0.1246], + device='cuda:3'), in_proj_covar=tensor([0.0369, 0.0365, 0.0371, 0.0395, 0.0474, 0.0397, 0.0348, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 10:49:06,862 INFO [train.py:903] (3/4) Epoch 28, batch 500, loss[loss=0.2278, simple_loss=0.2952, pruned_loss=0.08017, over 19389.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2812, pruned_loss=0.05908, over 3543619.83 frames. ], batch size: 47, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:49:28,423 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.110e+02 5.109e+02 6.404e+02 8.256e+02 1.456e+03, threshold=1.281e+03, percent-clipped=5.0 +2023-04-03 10:49:41,485 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:50:09,155 INFO [train.py:903] (3/4) Epoch 28, batch 550, loss[loss=0.1739, simple_loss=0.2513, pruned_loss=0.04826, over 19750.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2816, pruned_loss=0.05906, over 3616775.36 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:11,432 INFO [train.py:903] (3/4) Epoch 28, batch 600, loss[loss=0.2696, simple_loss=0.3404, pruned_loss=0.09944, over 19667.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2814, pruned_loss=0.05908, over 3673233.30 frames. ], batch size: 58, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:51:12,920 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=184957.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:19,893 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=184963.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:51:31,267 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.601e+02 5.007e+02 6.276e+02 8.222e+02 1.849e+03, threshold=1.255e+03, percent-clipped=3.0 +2023-04-03 10:51:50,595 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 10:52:14,346 INFO [train.py:903] (3/4) Epoch 28, batch 650, loss[loss=0.1901, simple_loss=0.2785, pruned_loss=0.05086, over 18764.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2823, pruned_loss=0.05952, over 3707798.61 frames. ], batch size: 74, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:16,119 INFO [train.py:903] (3/4) Epoch 28, batch 700, loss[loss=0.1804, simple_loss=0.2577, pruned_loss=0.05157, over 19383.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05938, over 3740183.81 frames. ], batch size: 48, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:53:31,593 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6721, 1.5311, 1.6176, 2.0340, 1.7101, 1.8033, 1.8637, 1.6664], + device='cuda:3'), covar=tensor([0.0755, 0.0833, 0.0862, 0.0634, 0.0950, 0.0716, 0.0852, 0.0665], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0226, 0.0228, 0.0240, 0.0227, 0.0215, 0.0189, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 10:53:38,017 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.143e+02 4.589e+02 5.550e+02 7.126e+02 1.317e+03, threshold=1.110e+03, percent-clipped=1.0 +2023-04-03 10:53:45,177 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185078.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:01,399 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:54:19,781 INFO [train.py:903] (3/4) Epoch 28, batch 750, loss[loss=0.2195, simple_loss=0.2861, pruned_loss=0.07648, over 19463.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05986, over 3768532.80 frames. ], batch size: 49, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:54:34,061 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:10,749 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185147.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:55:20,953 INFO [train.py:903] (3/4) Epoch 28, batch 800, loss[loss=0.1948, simple_loss=0.2844, pruned_loss=0.05261, over 18048.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2825, pruned_loss=0.0597, over 3787428.94 frames. ], batch size: 83, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:55:34,889 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 10:55:41,839 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.493e+02 6.558e+02 7.858e+02 2.224e+03, threshold=1.312e+03, percent-clipped=8.0 +2023-04-03 10:55:53,722 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2349, 1.3631, 2.0779, 1.6632, 3.1842, 4.7131, 4.6301, 5.1488], + device='cuda:3'), covar=tensor([0.1708, 0.3995, 0.3251, 0.2373, 0.0604, 0.0233, 0.0178, 0.0178], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0331, 0.0363, 0.0271, 0.0253, 0.0195, 0.0219, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 10:56:24,245 INFO [train.py:903] (3/4) Epoch 28, batch 850, loss[loss=0.1935, simple_loss=0.2706, pruned_loss=0.05821, over 19396.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2819, pruned_loss=0.0592, over 3802775.68 frames. ], batch size: 48, lr: 2.95e-03, grad_scale: 8.0 +2023-04-03 10:56:39,434 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185219.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:56:51,612 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185228.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:11,581 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185244.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:57:15,603 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 10:57:24,912 INFO [train.py:903] (3/4) Epoch 28, batch 900, loss[loss=0.1918, simple_loss=0.2731, pruned_loss=0.05522, over 19536.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05959, over 3812762.18 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:57:47,704 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.300e+02 4.798e+02 5.781e+02 7.336e+02 1.381e+03, threshold=1.156e+03, percent-clipped=1.0 +2023-04-03 10:58:21,556 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:58:28,126 INFO [train.py:903] (3/4) Epoch 28, batch 950, loss[loss=0.1788, simple_loss=0.2499, pruned_loss=0.05387, over 19778.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2819, pruned_loss=0.05932, over 3813282.08 frames. ], batch size: 46, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:58:29,318 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 10:58:40,880 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 10:59:15,319 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185343.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 10:59:32,235 INFO [train.py:903] (3/4) Epoch 28, batch 1000, loss[loss=0.1825, simple_loss=0.2713, pruned_loss=0.04689, over 18383.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2823, pruned_loss=0.05948, over 3815431.87 frames. ], batch size: 84, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 10:59:53,728 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 4.891e+02 5.853e+02 7.878e+02 2.572e+03, threshold=1.171e+03, percent-clipped=6.0 +2023-04-03 11:00:23,365 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 11:00:34,735 INFO [train.py:903] (3/4) Epoch 28, batch 1050, loss[loss=0.205, simple_loss=0.2945, pruned_loss=0.05772, over 19307.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2827, pruned_loss=0.05963, over 3818228.83 frames. ], batch size: 66, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:00:47,037 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185416.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:00:53,766 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185422.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:01:02,624 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 11:01:35,159 INFO [train.py:903] (3/4) Epoch 28, batch 1100, loss[loss=0.1605, simple_loss=0.239, pruned_loss=0.04103, over 19787.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06062, over 3808771.09 frames. ], batch size: 48, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:01:57,231 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.219e+02 4.947e+02 6.329e+02 8.288e+02 1.903e+03, threshold=1.266e+03, percent-clipped=3.0 +2023-04-03 11:02:13,034 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:18,712 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185491.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:02:35,899 INFO [train.py:903] (3/4) Epoch 28, batch 1150, loss[loss=0.1735, simple_loss=0.2531, pruned_loss=0.04694, over 19625.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2841, pruned_loss=0.06079, over 3798271.28 frames. ], batch size: 50, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:02:55,995 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:03:15,694 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:03:40,780 INFO [train.py:903] (3/4) Epoch 28, batch 1200, loss[loss=0.2033, simple_loss=0.2843, pruned_loss=0.06113, over 19716.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2843, pruned_loss=0.06065, over 3808340.98 frames. ], batch size: 51, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:04:01,640 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 5.383e+02 6.799e+02 8.653e+02 1.626e+03, threshold=1.360e+03, percent-clipped=3.0 +2023-04-03 11:04:11,755 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 11:04:34,073 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185599.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:04:41,622 INFO [train.py:903] (3/4) Epoch 28, batch 1250, loss[loss=0.2206, simple_loss=0.2899, pruned_loss=0.07562, over 19785.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2842, pruned_loss=0.06087, over 3820520.08 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 16.0 +2023-04-03 11:04:41,975 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185606.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:03,725 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185624.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:05:44,893 INFO [train.py:903] (3/4) Epoch 28, batch 1300, loss[loss=0.1662, simple_loss=0.2481, pruned_loss=0.0421, over 19746.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06037, over 3824111.07 frames. ], batch size: 51, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:06:04,634 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185672.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:06,494 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 4.542e+02 5.530e+02 7.592e+02 1.164e+03, threshold=1.106e+03, percent-clipped=0.0 +2023-04-03 11:06:35,949 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185697.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:06:46,284 INFO [train.py:903] (3/4) Epoch 28, batch 1350, loss[loss=0.1675, simple_loss=0.2465, pruned_loss=0.04422, over 19762.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2827, pruned_loss=0.06001, over 3834137.93 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:07:24,108 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5552, 1.2552, 1.2232, 1.4985, 1.1133, 1.3482, 1.2447, 1.4283], + device='cuda:3'), covar=tensor([0.1236, 0.1254, 0.1685, 0.1082, 0.1374, 0.0673, 0.1721, 0.0891], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0361, 0.0320, 0.0259, 0.0308, 0.0259, 0.0323, 0.0266], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 11:07:31,232 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2171, 1.3504, 1.7686, 1.1757, 2.5644, 3.3571, 3.0630, 3.5744], + device='cuda:3'), covar=tensor([0.1607, 0.3823, 0.3355, 0.2630, 0.0647, 0.0230, 0.0266, 0.0350], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0332, 0.0365, 0.0272, 0.0255, 0.0196, 0.0220, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 11:07:35,958 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3591, 1.2818, 1.6800, 1.2034, 2.5146, 3.3266, 3.0336, 3.5636], + device='cuda:3'), covar=tensor([0.1505, 0.4013, 0.3634, 0.2700, 0.0669, 0.0225, 0.0261, 0.0315], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0332, 0.0365, 0.0272, 0.0255, 0.0196, 0.0220, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 11:07:48,953 INFO [train.py:903] (3/4) Epoch 28, batch 1400, loss[loss=0.2152, simple_loss=0.293, pruned_loss=0.06874, over 19529.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2826, pruned_loss=0.06013, over 3829923.43 frames. ], batch size: 64, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:08:11,867 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.444e+02 4.899e+02 6.106e+02 7.699e+02 1.518e+03, threshold=1.221e+03, percent-clipped=6.0 +2023-04-03 11:08:15,711 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185777.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:35,156 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:08:49,351 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 11:08:51,547 INFO [train.py:903] (3/4) Epoch 28, batch 1450, loss[loss=0.2211, simple_loss=0.3067, pruned_loss=0.06777, over 17945.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2824, pruned_loss=0.0601, over 3809679.58 frames. ], batch size: 83, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:08:53,248 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0797, 2.1624, 2.3240, 2.7793, 2.1173, 2.5961, 2.3084, 2.1666], + device='cuda:3'), covar=tensor([0.4233, 0.4049, 0.1948, 0.2441, 0.4225, 0.2258, 0.5003, 0.3447], + device='cuda:3'), in_proj_covar=tensor([0.0935, 0.1012, 0.0742, 0.0950, 0.0910, 0.0850, 0.0860, 0.0808], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 11:09:00,618 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-03 11:09:05,685 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=185817.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:09:06,967 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:20,222 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=185830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:09:38,505 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2650, 1.2319, 1.2485, 1.3916, 1.0490, 1.3948, 1.3844, 1.3190], + device='cuda:3'), covar=tensor([0.0944, 0.0994, 0.1097, 0.0638, 0.0908, 0.0866, 0.0808, 0.0808], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0225, 0.0228, 0.0240, 0.0227, 0.0215, 0.0188, 0.0206], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 11:09:54,217 INFO [train.py:903] (3/4) Epoch 28, batch 1500, loss[loss=0.2451, simple_loss=0.3294, pruned_loss=0.08043, over 18756.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2838, pruned_loss=0.06117, over 3813615.31 frames. ], batch size: 74, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:10:02,016 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=185862.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:15,979 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.811e+02 5.017e+02 6.227e+02 8.666e+02 1.816e+03, threshold=1.245e+03, percent-clipped=11.0 +2023-04-03 11:10:33,549 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=185887.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:10:56,447 INFO [train.py:903] (3/4) Epoch 28, batch 1550, loss[loss=0.174, simple_loss=0.2557, pruned_loss=0.04616, over 19349.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.283, pruned_loss=0.06066, over 3814968.03 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:11:45,524 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=185945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:11:58,295 INFO [train.py:903] (3/4) Epoch 28, batch 1600, loss[loss=0.2486, simple_loss=0.3213, pruned_loss=0.08795, over 19461.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2833, pruned_loss=0.06109, over 3816485.13 frames. ], batch size: 70, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:12:20,803 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 11:12:23,176 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.773e+02 5.899e+02 6.974e+02 1.687e+03, threshold=1.180e+03, percent-clipped=2.0 +2023-04-03 11:13:03,460 INFO [train.py:903] (3/4) Epoch 28, batch 1650, loss[loss=0.2061, simple_loss=0.287, pruned_loss=0.06266, over 19577.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2829, pruned_loss=0.06094, over 3820116.89 frames. ], batch size: 52, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:13:54,022 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 11:14:07,006 INFO [train.py:903] (3/4) Epoch 28, batch 1700, loss[loss=0.1777, simple_loss=0.2453, pruned_loss=0.05502, over 19070.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2834, pruned_loss=0.06094, over 3825651.79 frames. ], batch size: 42, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:14:29,606 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.751e+02 5.670e+02 7.170e+02 1.723e+03, threshold=1.134e+03, percent-clipped=7.0 +2023-04-03 11:14:44,410 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 11:15:08,855 INFO [train.py:903] (3/4) Epoch 28, batch 1750, loss[loss=0.2552, simple_loss=0.3263, pruned_loss=0.0921, over 19491.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2828, pruned_loss=0.06027, over 3818446.20 frames. ], batch size: 64, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:15:15,098 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2606, 2.1128, 1.9567, 1.7836, 1.7547, 1.7694, 0.6716, 1.1813], + device='cuda:3'), covar=tensor([0.0735, 0.0700, 0.0613, 0.0997, 0.1300, 0.1035, 0.1477, 0.1214], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0366, 0.0371, 0.0393, 0.0471, 0.0396, 0.0346, 0.0349], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 11:15:24,173 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:15:28,463 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:15:46,553 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-03 11:16:11,525 INFO [train.py:903] (3/4) Epoch 28, batch 1800, loss[loss=0.1855, simple_loss=0.2615, pruned_loss=0.05475, over 19413.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06033, over 3807196.65 frames. ], batch size: 48, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:16:13,506 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-03 11:16:18,384 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186161.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:16:36,642 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.184e+02 6.048e+02 8.514e+02 1.613e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 11:16:38,082 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186176.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:08,560 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 11:17:09,016 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186201.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:15,218 INFO [train.py:903] (3/4) Epoch 28, batch 1850, loss[loss=0.2028, simple_loss=0.2889, pruned_loss=0.05839, over 19761.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2833, pruned_loss=0.06067, over 3791630.46 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:17:39,918 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186226.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:17:46,428 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 11:17:52,432 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186236.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:18:17,536 INFO [train.py:903] (3/4) Epoch 28, batch 1900, loss[loss=0.2109, simple_loss=0.2916, pruned_loss=0.06513, over 19664.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2821, pruned_loss=0.0597, over 3803411.47 frames. ], batch size: 55, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:18:33,594 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 11:18:38,417 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 11:18:39,533 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.824e+02 4.995e+02 5.845e+02 7.128e+02 1.193e+03, threshold=1.169e+03, percent-clipped=0.0 +2023-04-03 11:18:41,912 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186276.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:18:42,905 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:19:02,971 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 11:19:19,217 INFO [train.py:903] (3/4) Epoch 28, batch 1950, loss[loss=0.1595, simple_loss=0.2385, pruned_loss=0.04023, over 19376.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2829, pruned_loss=0.05979, over 3815212.39 frames. ], batch size: 47, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:20,369 INFO [train.py:903] (3/4) Epoch 28, batch 2000, loss[loss=0.189, simple_loss=0.2771, pruned_loss=0.05045, over 19597.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2836, pruned_loss=0.06009, over 3815466.76 frames. ], batch size: 57, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:20:45,108 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.060e+02 6.550e+02 8.587e+02 3.446e+03, threshold=1.310e+03, percent-clipped=8.0 +2023-04-03 11:21:20,868 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 11:21:24,394 INFO [train.py:903] (3/4) Epoch 28, batch 2050, loss[loss=0.1942, simple_loss=0.2833, pruned_loss=0.0526, over 19522.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2818, pruned_loss=0.05933, over 3823683.62 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:21:40,368 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 11:21:42,531 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 11:22:02,198 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 11:22:04,665 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:07,925 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1992, 0.9728, 0.9726, 1.1460, 0.8480, 1.0281, 0.9506, 1.0994], + device='cuda:3'), covar=tensor([0.0975, 0.1175, 0.1331, 0.0871, 0.1217, 0.0572, 0.1373, 0.0760], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0364, 0.0323, 0.0260, 0.0310, 0.0260, 0.0325, 0.0268], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 11:22:26,599 INFO [train.py:903] (3/4) Epoch 28, batch 2100, loss[loss=0.1831, simple_loss=0.2677, pruned_loss=0.04925, over 19588.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2823, pruned_loss=0.05974, over 3816103.89 frames. ], batch size: 52, lr: 2.94e-03, grad_scale: 8.0 +2023-04-03 11:22:35,102 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:22:49,640 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.089e+02 6.089e+02 7.390e+02 1.324e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 11:22:50,535 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-03 11:22:58,695 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 11:23:11,818 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186492.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:13,818 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:20,625 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 11:23:29,672 INFO [train.py:903] (3/4) Epoch 28, batch 2150, loss[loss=0.1996, simple_loss=0.283, pruned_loss=0.05811, over 19656.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.05972, over 3822323.44 frames. ], batch size: 60, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:23:30,063 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8956, 1.4729, 1.5522, 1.8063, 4.4084, 1.2264, 2.6148, 4.7996], + device='cuda:3'), covar=tensor([0.0538, 0.3104, 0.3300, 0.2057, 0.0769, 0.2805, 0.1496, 0.0187], + device='cuda:3'), in_proj_covar=tensor([0.0422, 0.0378, 0.0396, 0.0353, 0.0382, 0.0357, 0.0395, 0.0417], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:23:42,677 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:23:46,781 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186520.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:02,531 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186532.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 11:24:06,091 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186535.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:30,565 INFO [train.py:903] (3/4) Epoch 28, batch 2200, loss[loss=0.1933, simple_loss=0.279, pruned_loss=0.05375, over 19474.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06012, over 3829199.24 frames. ], batch size: 64, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:24:32,121 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186557.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:24:32,226 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186557.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 11:24:55,772 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.530e+02 4.864e+02 5.746e+02 7.619e+02 1.717e+03, threshold=1.149e+03, percent-clipped=3.0 +2023-04-03 11:24:58,717 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186577.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:05,219 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.30 vs. limit=5.0 +2023-04-03 11:25:09,426 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:15,865 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-03 11:25:32,821 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186604.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:25:34,931 INFO [train.py:903] (3/4) Epoch 28, batch 2250, loss[loss=0.1995, simple_loss=0.2746, pruned_loss=0.06214, over 19747.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2812, pruned_loss=0.05925, over 3839461.36 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:25:35,614 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 11:25:54,494 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186621.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:12,671 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186635.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:26:39,389 INFO [train.py:903] (3/4) Epoch 28, batch 2300, loss[loss=0.1896, simple_loss=0.2736, pruned_loss=0.0528, over 19659.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2811, pruned_loss=0.05943, over 3849272.73 frames. ], batch size: 53, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:26:55,447 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 11:27:02,265 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.323e+02 4.896e+02 5.750e+02 7.152e+02 2.246e+03, threshold=1.150e+03, percent-clipped=6.0 +2023-04-03 11:27:42,237 INFO [train.py:903] (3/4) Epoch 28, batch 2350, loss[loss=0.2093, simple_loss=0.2938, pruned_loss=0.06243, over 19590.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2813, pruned_loss=0.0594, over 3841501.33 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:28:07,896 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:20,558 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186736.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:28:26,047 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 11:28:42,190 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 11:28:44,640 INFO [train.py:903] (3/4) Epoch 28, batch 2400, loss[loss=0.1723, simple_loss=0.2635, pruned_loss=0.04055, over 19728.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2814, pruned_loss=0.05914, over 3841329.64 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:29:08,814 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.283e+02 4.439e+02 5.729e+02 7.466e+02 1.887e+03, threshold=1.146e+03, percent-clipped=9.0 +2023-04-03 11:29:20,269 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:29:47,728 INFO [train.py:903] (3/4) Epoch 28, batch 2450, loss[loss=0.1962, simple_loss=0.2856, pruned_loss=0.05337, over 19523.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2819, pruned_loss=0.05918, over 3830574.30 frames. ], batch size: 56, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:22,247 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186833.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:26,698 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:43,209 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7389, 1.5911, 1.5970, 2.1595, 1.5850, 1.9978, 2.0098, 1.8189], + device='cuda:3'), covar=tensor([0.0819, 0.0887, 0.1013, 0.0751, 0.0933, 0.0764, 0.0816, 0.0683], + device='cuda:3'), in_proj_covar=tensor([0.0211, 0.0222, 0.0226, 0.0239, 0.0225, 0.0212, 0.0186, 0.0205], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 11:30:48,876 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.1156, 2.8877, 2.3167, 2.2649, 2.0541, 2.4714, 1.1333, 2.1352], + device='cuda:3'), covar=tensor([0.0713, 0.0648, 0.0753, 0.1196, 0.1237, 0.1249, 0.1475, 0.1147], + device='cuda:3'), in_proj_covar=tensor([0.0367, 0.0366, 0.0371, 0.0393, 0.0473, 0.0398, 0.0347, 0.0350], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 11:30:50,779 INFO [train.py:903] (3/4) Epoch 28, batch 2500, loss[loss=0.16, simple_loss=0.2427, pruned_loss=0.03864, over 19761.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05864, over 3830666.02 frames. ], batch size: 47, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:30:54,437 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:30:54,553 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186858.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:15,068 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.341e+02 4.860e+02 5.866e+02 7.110e+02 2.029e+03, threshold=1.173e+03, percent-clipped=7.0 +2023-04-03 11:31:19,830 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186879.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:35,109 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186891.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:45,080 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:49,213 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:31:54,930 INFO [train.py:903] (3/4) Epoch 28, batch 2550, loss[loss=0.2143, simple_loss=0.2944, pruned_loss=0.06711, over 19670.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2809, pruned_loss=0.05882, over 3819499.78 frames. ], batch size: 60, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:32:06,745 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=186916.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,505 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:22,651 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=186929.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:47,706 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=186948.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:50,818 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 11:32:52,343 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:32:56,644 INFO [train.py:903] (3/4) Epoch 28, batch 2600, loss[loss=0.1958, simple_loss=0.2792, pruned_loss=0.0562, over 19671.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2815, pruned_loss=0.05963, over 3809152.47 frames. ], batch size: 55, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:33:20,687 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.127e+02 5.898e+02 7.788e+02 1.720e+03, threshold=1.180e+03, percent-clipped=7.0 +2023-04-03 11:33:38,273 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4684, 1.2548, 1.3866, 1.4566, 2.2325, 1.2171, 1.9624, 2.4546], + device='cuda:3'), covar=tensor([0.0557, 0.2232, 0.2191, 0.1387, 0.0631, 0.1822, 0.1615, 0.0435], + device='cuda:3'), in_proj_covar=tensor([0.0421, 0.0377, 0.0396, 0.0352, 0.0382, 0.0357, 0.0394, 0.0415], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:33:43,121 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=186992.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:45,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=186994.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:33:59,847 INFO [train.py:903] (3/4) Epoch 28, batch 2650, loss[loss=0.1755, simple_loss=0.2535, pruned_loss=0.04878, over 14737.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2823, pruned_loss=0.06016, over 3795144.57 frames. ], batch size: 32, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:34:12,812 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:14,011 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:22,402 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 11:34:46,307 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187043.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:34:47,571 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187044.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:02,638 INFO [train.py:903] (3/4) Epoch 28, batch 2700, loss[loss=0.201, simple_loss=0.284, pruned_loss=0.05902, over 19589.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2832, pruned_loss=0.06039, over 3806515.51 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:35:02,900 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:13,332 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187063.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:22,297 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187071.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:35:26,784 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.432e+02 4.685e+02 5.845e+02 7.450e+02 1.306e+03, threshold=1.169e+03, percent-clipped=4.0 +2023-04-03 11:36:06,485 INFO [train.py:903] (3/4) Epoch 28, batch 2750, loss[loss=0.1757, simple_loss=0.2518, pruned_loss=0.04981, over 19762.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2827, pruned_loss=0.05994, over 3823989.10 frames. ], batch size: 47, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:37:06,454 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:08,435 INFO [train.py:903] (3/4) Epoch 28, batch 2800, loss[loss=0.1764, simple_loss=0.2497, pruned_loss=0.05154, over 19725.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2818, pruned_loss=0.05967, over 3824791.45 frames. ], batch size: 46, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:37:31,617 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.065e+02 4.906e+02 5.640e+02 7.444e+02 1.445e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-04-03 11:37:36,920 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:37:46,960 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187186.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:05,782 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187202.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:10,182 INFO [train.py:903] (3/4) Epoch 28, batch 2850, loss[loss=0.2543, simple_loss=0.3151, pruned_loss=0.09671, over 19773.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2821, pruned_loss=0.05995, over 3827114.42 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:38:13,801 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187208.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:44,982 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187233.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:38:46,482 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-03 11:39:05,636 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187250.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:11,665 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 11:39:12,697 INFO [train.py:903] (3/4) Epoch 28, batch 2900, loss[loss=0.2166, simple_loss=0.3105, pruned_loss=0.06134, over 19672.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.281, pruned_loss=0.05933, over 3826844.58 frames. ], batch size: 58, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:39:32,591 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187272.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:33,456 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187273.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:35,532 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 5.012e+02 6.172e+02 7.442e+02 2.226e+03, threshold=1.234e+03, percent-clipped=8.0 +2023-04-03 11:39:35,980 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187275.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:39:54,724 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.88 vs. limit=5.0 +2023-04-03 11:40:02,602 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:06,121 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187300.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:13,518 INFO [train.py:903] (3/4) Epoch 28, batch 2950, loss[loss=0.2089, simple_loss=0.2982, pruned_loss=0.05983, over 19656.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2822, pruned_loss=0.06013, over 3808857.69 frames. ], batch size: 58, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:40:26,542 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187317.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:28,985 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187319.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:35,752 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187325.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:40:59,985 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187344.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:41:13,833 INFO [train.py:903] (3/4) Epoch 28, batch 3000, loss[loss=0.2503, simple_loss=0.3177, pruned_loss=0.09141, over 12839.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2836, pruned_loss=0.06104, over 3802285.03 frames. ], batch size: 136, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:41:13,833 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 11:41:26,709 INFO [train.py:937] (3/4) Epoch 28, validation: loss=0.1673, simple_loss=0.2667, pruned_loss=0.03394, over 944034.00 frames. +2023-04-03 11:41:26,709 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 11:41:29,122 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 11:41:35,521 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1050, 2.2069, 2.4216, 2.7815, 2.1245, 2.6903, 2.4310, 2.2168], + device='cuda:3'), covar=tensor([0.4325, 0.4185, 0.2060, 0.2366, 0.4246, 0.2233, 0.5139, 0.3580], + device='cuda:3'), in_proj_covar=tensor([0.0935, 0.1013, 0.0742, 0.0949, 0.0911, 0.0853, 0.0861, 0.0809], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 11:41:41,220 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0922, 1.6892, 1.9686, 2.0942, 4.6181, 1.3320, 2.8425, 5.1074], + device='cuda:3'), covar=tensor([0.0478, 0.2877, 0.2809, 0.1840, 0.0742, 0.2709, 0.1384, 0.0167], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0381, 0.0401, 0.0355, 0.0385, 0.0361, 0.0399, 0.0420], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:41:49,225 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.693e+02 4.898e+02 6.411e+02 7.995e+02 1.373e+03, threshold=1.282e+03, percent-clipped=5.0 +2023-04-03 11:42:05,148 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187387.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:06,466 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187388.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:08,740 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187390.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:21,160 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:42:27,782 INFO [train.py:903] (3/4) Epoch 28, batch 3050, loss[loss=0.2172, simple_loss=0.3002, pruned_loss=0.06704, over 19296.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2825, pruned_loss=0.06039, over 3798283.49 frames. ], batch size: 66, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:42:56,177 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-03 11:43:13,143 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187442.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:29,503 INFO [train.py:903] (3/4) Epoch 28, batch 3100, loss[loss=0.1837, simple_loss=0.2694, pruned_loss=0.04895, over 19577.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2816, pruned_loss=0.05987, over 3810722.76 frames. ], batch size: 52, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:43:43,950 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:43:54,450 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.395e+02 4.792e+02 5.797e+02 7.675e+02 1.223e+03, threshold=1.159e+03, percent-clipped=0.0 +2023-04-03 11:44:27,074 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187502.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:32,164 INFO [train.py:903] (3/4) Epoch 28, batch 3150, loss[loss=0.2215, simple_loss=0.296, pruned_loss=0.07351, over 13072.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05944, over 3815595.45 frames. ], batch size: 136, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:44:44,944 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:44:54,641 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 11:45:01,662 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187530.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:06,302 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7663, 4.3193, 4.4973, 4.5082, 1.8200, 4.2717, 3.6743, 4.2458], + device='cuda:3'), covar=tensor([0.1834, 0.0900, 0.0648, 0.0728, 0.6004, 0.1040, 0.0755, 0.1146], + device='cuda:3'), in_proj_covar=tensor([0.0829, 0.0792, 0.1004, 0.0881, 0.0867, 0.0769, 0.0594, 0.0932], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 11:45:10,628 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:15,263 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=187541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:35,285 INFO [train.py:903] (3/4) Epoch 28, batch 3200, loss[loss=0.2324, simple_loss=0.3081, pruned_loss=0.07833, over 17203.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05957, over 3819099.53 frames. ], batch size: 101, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:45:35,960 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:45:55,556 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:45:57,382 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.914e+02 6.228e+02 8.001e+02 2.182e+03, threshold=1.246e+03, percent-clipped=10.0 +2023-04-03 11:46:27,908 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187598.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:46:36,793 INFO [train.py:903] (3/4) Epoch 28, batch 3250, loss[loss=0.1747, simple_loss=0.257, pruned_loss=0.04617, over 19613.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05973, over 3828023.92 frames. ], batch size: 50, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:46:55,616 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:47:22,872 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.09 vs. limit=5.0 +2023-04-03 11:47:23,903 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187644.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:47:30,157 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.41 vs. limit=2.0 +2023-04-03 11:47:37,435 INFO [train.py:903] (3/4) Epoch 28, batch 3300, loss[loss=0.2179, simple_loss=0.2963, pruned_loss=0.06977, over 19578.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2834, pruned_loss=0.06047, over 3828162.07 frames. ], batch size: 61, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:47:37,453 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 11:47:54,269 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187669.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:47:56,603 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0369, 1.8510, 1.6793, 2.0465, 1.6716, 1.7025, 1.6401, 1.9242], + device='cuda:3'), covar=tensor([0.1063, 0.1503, 0.1527, 0.1054, 0.1480, 0.0622, 0.1592, 0.0786], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0360, 0.0319, 0.0257, 0.0306, 0.0256, 0.0322, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:47:57,868 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4035, 2.3519, 2.2654, 2.5576, 2.3792, 2.0485, 2.1707, 2.3915], + device='cuda:3'), covar=tensor([0.0852, 0.1250, 0.1194, 0.0829, 0.1084, 0.0508, 0.1225, 0.0596], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0360, 0.0319, 0.0257, 0.0306, 0.0256, 0.0322, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:48:01,982 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.234e+02 4.770e+02 6.093e+02 7.830e+02 1.620e+03, threshold=1.219e+03, percent-clipped=4.0 +2023-04-03 11:48:08,592 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.11 vs. limit=5.0 +2023-04-03 11:48:10,620 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 11:48:40,829 INFO [train.py:903] (3/4) Epoch 28, batch 3350, loss[loss=0.205, simple_loss=0.2942, pruned_loss=0.05787, over 19605.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06043, over 3826403.34 frames. ], batch size: 57, lr: 2.93e-03, grad_scale: 8.0 +2023-04-03 11:49:14,752 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187734.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:49:42,975 INFO [train.py:903] (3/4) Epoch 28, batch 3400, loss[loss=0.2131, simple_loss=0.3, pruned_loss=0.06312, over 19676.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2832, pruned_loss=0.06023, over 3837954.78 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:49:45,557 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187758.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:49:54,718 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1861, 0.9010, 0.9685, 1.1309, 0.8183, 1.0048, 0.9192, 1.0776], + device='cuda:3'), covar=tensor([0.0832, 0.1111, 0.1114, 0.0733, 0.1069, 0.0527, 0.1236, 0.0661], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0359, 0.0318, 0.0255, 0.0304, 0.0255, 0.0320, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:50:02,064 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=187771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:07,730 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.151e+02 4.801e+02 5.865e+02 7.575e+02 1.695e+03, threshold=1.173e+03, percent-clipped=1.0 +2023-04-03 11:50:16,882 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:34,172 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=187796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:50:46,600 INFO [train.py:903] (3/4) Epoch 28, batch 3450, loss[loss=0.2028, simple_loss=0.2979, pruned_loss=0.05387, over 19569.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2828, pruned_loss=0.05988, over 3830388.30 frames. ], batch size: 61, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:50:47,821 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 11:51:41,401 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187849.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:51:49,297 INFO [train.py:903] (3/4) Epoch 28, batch 3500, loss[loss=0.1972, simple_loss=0.2789, pruned_loss=0.05777, over 19549.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06008, over 3815867.30 frames. ], batch size: 52, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:52:12,001 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187874.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:14,010 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.602e+02 4.936e+02 6.004e+02 7.154e+02 1.224e+03, threshold=1.201e+03, percent-clipped=1.0 +2023-04-03 11:52:20,877 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187881.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:26,772 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=187885.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:52:50,771 INFO [train.py:903] (3/4) Epoch 28, batch 3550, loss[loss=0.2055, simple_loss=0.2868, pruned_loss=0.06205, over 19694.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.284, pruned_loss=0.06059, over 3820189.66 frames. ], batch size: 59, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:53:52,892 INFO [train.py:903] (3/4) Epoch 28, batch 3600, loss[loss=0.1986, simple_loss=0.2847, pruned_loss=0.05625, over 18701.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2847, pruned_loss=0.06074, over 3811970.49 frames. ], batch size: 74, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:54:17,598 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.816e+02 4.904e+02 5.870e+02 7.567e+02 1.667e+03, threshold=1.174e+03, percent-clipped=3.0 +2023-04-03 11:54:34,351 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187989.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:42,622 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=187996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:48,323 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188000.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:54:56,060 INFO [train.py:903] (3/4) Epoch 28, batch 3650, loss[loss=0.1986, simple_loss=0.2846, pruned_loss=0.05629, over 19310.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2855, pruned_loss=0.06078, over 3801007.75 frames. ], batch size: 70, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:55:01,222 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-03 11:55:37,616 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1070, 2.1778, 2.4688, 2.6932, 2.1179, 2.6277, 2.4578, 2.2665], + device='cuda:3'), covar=tensor([0.4220, 0.4120, 0.1970, 0.2499, 0.4431, 0.2386, 0.4883, 0.3495], + device='cuda:3'), in_proj_covar=tensor([0.0940, 0.1017, 0.0745, 0.0954, 0.0914, 0.0858, 0.0865, 0.0812], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 11:55:57,938 INFO [train.py:903] (3/4) Epoch 28, batch 3700, loss[loss=0.2124, simple_loss=0.2979, pruned_loss=0.0635, over 19616.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2849, pruned_loss=0.06112, over 3804874.36 frames. ], batch size: 57, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 11:56:10,322 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-03 11:56:23,996 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.791e+02 5.013e+02 6.013e+02 7.793e+02 2.143e+03, threshold=1.203e+03, percent-clipped=3.0 +2023-04-03 11:56:59,875 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:57:00,592 INFO [train.py:903] (3/4) Epoch 28, batch 3750, loss[loss=0.175, simple_loss=0.2613, pruned_loss=0.04436, over 19664.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2854, pruned_loss=0.06115, over 3800626.55 frames. ], batch size: 53, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:57:32,241 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188130.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 11:58:03,790 INFO [train.py:903] (3/4) Epoch 28, batch 3800, loss[loss=0.1989, simple_loss=0.287, pruned_loss=0.05546, over 19492.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2853, pruned_loss=0.0612, over 3798920.51 frames. ], batch size: 64, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:58:28,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3783, 3.1026, 2.2166, 2.7420, 0.7511, 3.0714, 2.9106, 3.0296], + device='cuda:3'), covar=tensor([0.1024, 0.1233, 0.2065, 0.1084, 0.3930, 0.0935, 0.1203, 0.1456], + device='cuda:3'), in_proj_covar=tensor([0.0529, 0.0430, 0.0515, 0.0361, 0.0409, 0.0456, 0.0452, 0.0484], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 11:58:30,486 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.894e+02 5.177e+02 6.017e+02 8.386e+02 1.721e+03, threshold=1.203e+03, percent-clipped=7.0 +2023-04-03 11:58:33,986 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 11:59:06,272 INFO [train.py:903] (3/4) Epoch 28, batch 3850, loss[loss=0.2048, simple_loss=0.2906, pruned_loss=0.05952, over 19724.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2843, pruned_loss=0.06082, over 3792584.02 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 11:59:35,001 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2757, 2.1119, 2.0074, 1.8381, 1.6350, 1.7911, 0.6079, 1.3052], + device='cuda:3'), covar=tensor([0.0687, 0.0724, 0.0579, 0.0979, 0.1304, 0.1118, 0.1491, 0.1197], + device='cuda:3'), in_proj_covar=tensor([0.0369, 0.0367, 0.0374, 0.0395, 0.0475, 0.0400, 0.0347, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 11:59:56,134 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:05,189 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188252.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:09,176 INFO [train.py:903] (3/4) Epoch 28, batch 3900, loss[loss=0.182, simple_loss=0.2554, pruned_loss=0.05426, over 19772.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2825, pruned_loss=0.05988, over 3805716.34 frames. ], batch size: 47, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:00:09,615 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=188256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:26,398 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188270.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:34,023 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.127e+02 5.053e+02 6.264e+02 8.116e+02 1.975e+03, threshold=1.253e+03, percent-clipped=4.0 +2023-04-03 12:00:34,442 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188277.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:00:39,114 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=188281.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:04,086 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188301.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:01:09,681 INFO [train.py:903] (3/4) Epoch 28, batch 3950, loss[loss=0.2208, simple_loss=0.2953, pruned_loss=0.0732, over 19686.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2826, pruned_loss=0.06002, over 3818093.38 frames. ], batch size: 53, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:01:15,489 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 12:01:53,022 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 12:02:01,855 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6017, 2.1547, 1.6660, 1.6031, 2.0584, 1.4853, 1.5606, 1.9570], + device='cuda:3'), covar=tensor([0.1012, 0.0791, 0.0995, 0.0829, 0.0510, 0.1167, 0.0683, 0.0480], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0319, 0.0342, 0.0272, 0.0252, 0.0345, 0.0293, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:02:12,178 INFO [train.py:903] (3/4) Epoch 28, batch 4000, loss[loss=0.2087, simple_loss=0.2907, pruned_loss=0.06336, over 19731.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2824, pruned_loss=0.05977, over 3814187.90 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:02:38,279 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.378e+02 4.980e+02 6.272e+02 8.090e+02 1.579e+03, threshold=1.254e+03, percent-clipped=5.0 +2023-04-03 12:02:59,851 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 12:03:14,916 INFO [train.py:903] (3/4) Epoch 28, batch 4050, loss[loss=0.2802, simple_loss=0.3514, pruned_loss=0.1045, over 19496.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2817, pruned_loss=0.05962, over 3808564.14 frames. ], batch size: 64, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:03:40,257 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3731, 3.9667, 2.5407, 3.5144, 0.8113, 3.9908, 3.8047, 3.9298], + device='cuda:3'), covar=tensor([0.0711, 0.1094, 0.2063, 0.0961, 0.4243, 0.0702, 0.0947, 0.1296], + device='cuda:3'), in_proj_covar=tensor([0.0531, 0.0433, 0.0519, 0.0363, 0.0413, 0.0459, 0.0456, 0.0486], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:04:05,447 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5479, 1.5839, 1.6968, 1.7554, 2.3099, 2.1976, 2.3048, 0.8921], + device='cuda:3'), covar=tensor([0.2458, 0.4408, 0.2844, 0.1989, 0.1550, 0.2216, 0.1472, 0.4993], + device='cuda:3'), in_proj_covar=tensor([0.0554, 0.0670, 0.0755, 0.0508, 0.0635, 0.0547, 0.0670, 0.0571], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:04:17,049 INFO [train.py:903] (3/4) Epoch 28, batch 4100, loss[loss=0.2115, simple_loss=0.3023, pruned_loss=0.0604, over 19745.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2814, pruned_loss=0.05922, over 3821300.78 frames. ], batch size: 63, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:04:43,426 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 5.116e+02 6.170e+02 8.694e+02 1.686e+03, threshold=1.234e+03, percent-clipped=5.0 +2023-04-03 12:04:53,754 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 12:05:11,248 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:05:19,164 INFO [train.py:903] (3/4) Epoch 28, batch 4150, loss[loss=0.1975, simple_loss=0.285, pruned_loss=0.055, over 19658.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05973, over 3818116.49 frames. ], batch size: 55, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:05:38,322 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6475, 1.8358, 2.1628, 1.9926, 3.2142, 2.8272, 3.5021, 1.6929], + device='cuda:3'), covar=tensor([0.2612, 0.4532, 0.2987, 0.1957, 0.1538, 0.2108, 0.1627, 0.4675], + device='cuda:3'), in_proj_covar=tensor([0.0553, 0.0668, 0.0754, 0.0508, 0.0633, 0.0546, 0.0669, 0.0570], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 12:06:02,309 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1246, 3.3818, 1.9624, 2.0767, 3.0979, 1.8082, 1.5818, 2.3725], + device='cuda:3'), covar=tensor([0.1367, 0.0691, 0.1109, 0.0912, 0.0527, 0.1198, 0.0956, 0.0622], + device='cuda:3'), in_proj_covar=tensor([0.0302, 0.0317, 0.0341, 0.0271, 0.0251, 0.0344, 0.0292, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:06:03,686 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.28 vs. limit=5.0 +2023-04-03 12:06:21,496 INFO [train.py:903] (3/4) Epoch 28, batch 4200, loss[loss=0.21, simple_loss=0.2833, pruned_loss=0.06833, over 19849.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2826, pruned_loss=0.05966, over 3835194.13 frames. ], batch size: 52, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:06:24,953 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 12:06:35,212 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9913, 1.9359, 1.6434, 2.0831, 1.7975, 1.6899, 1.6331, 1.8901], + device='cuda:3'), covar=tensor([0.1098, 0.1429, 0.1594, 0.1056, 0.1437, 0.0602, 0.1584, 0.0794], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0359, 0.0318, 0.0256, 0.0304, 0.0256, 0.0320, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:06:46,549 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.847e+02 4.775e+02 5.885e+02 7.540e+02 1.202e+03, threshold=1.177e+03, percent-clipped=0.0 +2023-04-03 12:07:22,472 INFO [train.py:903] (3/4) Epoch 28, batch 4250, loss[loss=0.21, simple_loss=0.2897, pruned_loss=0.0651, over 19177.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2824, pruned_loss=0.06003, over 3844235.76 frames. ], batch size: 69, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:07:33,738 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 12:07:44,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 12:08:10,317 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188645.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:08:12,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4813, 1.5329, 1.8943, 1.7635, 2.6445, 2.3563, 2.8524, 1.2217], + device='cuda:3'), covar=tensor([0.2656, 0.4582, 0.2918, 0.2073, 0.1729, 0.2301, 0.1608, 0.4928], + device='cuda:3'), in_proj_covar=tensor([0.0556, 0.0672, 0.0757, 0.0509, 0.0637, 0.0549, 0.0671, 0.0573], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:08:24,249 INFO [train.py:903] (3/4) Epoch 28, batch 4300, loss[loss=0.2011, simple_loss=0.2718, pruned_loss=0.06524, over 19482.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2816, pruned_loss=0.05971, over 3832345.72 frames. ], batch size: 49, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:08:43,058 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7961, 1.5767, 1.4558, 1.7908, 1.4614, 1.5411, 1.4517, 1.6725], + device='cuda:3'), covar=tensor([0.1165, 0.1343, 0.1660, 0.1115, 0.1406, 0.0637, 0.1648, 0.0890], + device='cuda:3'), in_proj_covar=tensor([0.0276, 0.0358, 0.0317, 0.0255, 0.0304, 0.0255, 0.0319, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:08:45,380 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 12:08:51,756 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.988e+02 4.883e+02 5.889e+02 8.258e+02 1.553e+03, threshold=1.178e+03, percent-clipped=6.0 +2023-04-03 12:09:15,642 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 12:09:26,307 INFO [train.py:903] (3/4) Epoch 28, batch 4350, loss[loss=0.2044, simple_loss=0.2887, pruned_loss=0.06008, over 19598.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2824, pruned_loss=0.06024, over 3819093.71 frames. ], batch size: 57, lr: 2.92e-03, grad_scale: 4.0 +2023-04-03 12:10:30,035 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 12:10:30,423 INFO [train.py:903] (3/4) Epoch 28, batch 4400, loss[loss=0.2317, simple_loss=0.3074, pruned_loss=0.07797, over 13845.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2821, pruned_loss=0.05996, over 3816593.66 frames. ], batch size: 136, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:10:35,412 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188760.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:10:45,485 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7489, 4.3574, 2.9392, 3.8709, 1.6600, 4.3385, 4.1910, 4.3553], + device='cuda:3'), covar=tensor([0.0563, 0.0814, 0.1772, 0.0791, 0.3131, 0.0659, 0.0839, 0.1191], + device='cuda:3'), in_proj_covar=tensor([0.0525, 0.0428, 0.0513, 0.0361, 0.0409, 0.0455, 0.0451, 0.0481], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:10:45,704 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2000, 2.1471, 2.0368, 1.8833, 1.6966, 1.8931, 0.8245, 1.4171], + device='cuda:3'), covar=tensor([0.0631, 0.0664, 0.0489, 0.0865, 0.1068, 0.1004, 0.1392, 0.1081], + device='cuda:3'), in_proj_covar=tensor([0.0370, 0.0368, 0.0375, 0.0397, 0.0477, 0.0401, 0.0349, 0.0353], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 12:10:52,235 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 12:10:57,775 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.415e+02 4.797e+02 5.749e+02 7.341e+02 1.454e+03, threshold=1.150e+03, percent-clipped=2.0 +2023-04-03 12:11:02,386 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 12:11:08,439 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0944, 1.2936, 1.8097, 1.3174, 2.7309, 3.6689, 3.3649, 3.8179], + device='cuda:3'), covar=tensor([0.1784, 0.4021, 0.3494, 0.2644, 0.0630, 0.0197, 0.0207, 0.0280], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0332, 0.0365, 0.0272, 0.0256, 0.0198, 0.0221, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 12:11:31,836 INFO [train.py:903] (3/4) Epoch 28, batch 4450, loss[loss=0.1984, simple_loss=0.2791, pruned_loss=0.05883, over 19687.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2814, pruned_loss=0.05915, over 3828430.39 frames. ], batch size: 53, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:12:19,636 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=188843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:12:36,107 INFO [train.py:903] (3/4) Epoch 28, batch 4500, loss[loss=0.1974, simple_loss=0.2833, pruned_loss=0.05577, over 19627.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2808, pruned_loss=0.05876, over 3834170.75 frames. ], batch size: 57, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:13:04,411 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 4.536e+02 5.549e+02 7.562e+02 1.666e+03, threshold=1.110e+03, percent-clipped=5.0 +2023-04-03 12:13:38,222 INFO [train.py:903] (3/4) Epoch 28, batch 4550, loss[loss=0.2363, simple_loss=0.3134, pruned_loss=0.07963, over 13798.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.281, pruned_loss=0.05895, over 3835151.75 frames. ], batch size: 136, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:13:46,946 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 12:14:12,230 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 12:14:29,480 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188947.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:31,867 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=188949.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:14:39,519 INFO [train.py:903] (3/4) Epoch 28, batch 4600, loss[loss=0.2064, simple_loss=0.2904, pruned_loss=0.06119, over 19314.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.05941, over 3820339.91 frames. ], batch size: 66, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:14:43,965 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=188958.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:15:07,936 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.099e+02 4.624e+02 5.779e+02 7.629e+02 1.899e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 12:15:43,021 INFO [train.py:903] (3/4) Epoch 28, batch 4650, loss[loss=0.2473, simple_loss=0.3133, pruned_loss=0.09062, over 12796.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06023, over 3821932.86 frames. ], batch size: 136, lr: 2.92e-03, grad_scale: 8.0 +2023-04-03 12:15:55,988 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189016.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:01,293 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 12:16:12,603 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 12:16:26,493 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189041.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:34,757 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189048.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:16:44,449 INFO [train.py:903] (3/4) Epoch 28, batch 4700, loss[loss=0.269, simple_loss=0.3418, pruned_loss=0.0981, over 19681.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2836, pruned_loss=0.06019, over 3839143.82 frames. ], batch size: 59, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:16:47,419 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.77 vs. limit=5.0 +2023-04-03 12:17:07,720 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 12:17:10,868 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.010e+02 4.848e+02 5.842e+02 7.352e+02 2.015e+03, threshold=1.168e+03, percent-clipped=3.0 +2023-04-03 12:17:17,571 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189082.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:36,200 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:17:43,645 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1222, 1.3609, 1.7033, 1.0423, 2.3294, 3.0903, 2.7830, 3.2289], + device='cuda:3'), covar=tensor([0.1665, 0.3847, 0.3463, 0.2752, 0.0688, 0.0224, 0.0273, 0.0362], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0333, 0.0365, 0.0272, 0.0256, 0.0198, 0.0221, 0.0280], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 12:17:46,802 INFO [train.py:903] (3/4) Epoch 28, batch 4750, loss[loss=0.1918, simple_loss=0.2816, pruned_loss=0.051, over 19575.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2824, pruned_loss=0.05933, over 3828001.67 frames. ], batch size: 61, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:17:56,578 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-03 12:18:17,602 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.2255, 2.3356, 2.3106, 2.0605, 4.7611, 1.6141, 3.0093, 5.2817], + device='cuda:3'), covar=tensor([0.0397, 0.2323, 0.2625, 0.1977, 0.0678, 0.2551, 0.1314, 0.0135], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0381, 0.0400, 0.0354, 0.0383, 0.0360, 0.0399, 0.0421], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:18:22,692 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-04-03 12:18:47,826 INFO [train.py:903] (3/4) Epoch 28, batch 4800, loss[loss=0.194, simple_loss=0.2752, pruned_loss=0.05647, over 19791.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2829, pruned_loss=0.05962, over 3835913.75 frames. ], batch size: 48, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:19:16,003 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 5.029e+02 6.156e+02 7.557e+02 1.439e+03, threshold=1.231e+03, percent-clipped=4.0 +2023-04-03 12:19:50,824 INFO [train.py:903] (3/4) Epoch 28, batch 4850, loss[loss=0.2107, simple_loss=0.2976, pruned_loss=0.06188, over 19159.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2843, pruned_loss=0.06039, over 3826864.11 frames. ], batch size: 75, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:20:01,566 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:12,580 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 12:20:32,918 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:20:33,799 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 12:20:39,520 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 12:20:40,696 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 12:20:51,042 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 12:20:53,316 INFO [train.py:903] (3/4) Epoch 28, batch 4900, loss[loss=0.1689, simple_loss=0.2517, pruned_loss=0.04304, over 19416.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2834, pruned_loss=0.05969, over 3841132.90 frames. ], batch size: 48, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:21:05,668 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189266.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:09,132 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3095, 1.2930, 1.4762, 1.4646, 1.8424, 1.8503, 1.8679, 0.6615], + device='cuda:3'), covar=tensor([0.2634, 0.4558, 0.2801, 0.2109, 0.1663, 0.2448, 0.1428, 0.5177], + device='cuda:3'), in_proj_covar=tensor([0.0558, 0.0675, 0.0760, 0.0511, 0.0639, 0.0551, 0.0675, 0.0577], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:21:10,805 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 12:21:19,662 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.182e+02 4.742e+02 6.083e+02 7.635e+02 1.565e+03, threshold=1.217e+03, percent-clipped=2.0 +2023-04-03 12:21:36,051 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189291.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:38,173 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189293.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:21:53,516 INFO [train.py:903] (3/4) Epoch 28, batch 4950, loss[loss=0.2054, simple_loss=0.2923, pruned_loss=0.05928, over 19768.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06031, over 3842180.25 frames. ], batch size: 56, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:22:10,588 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 12:22:34,588 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 12:22:49,563 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5126, 1.5245, 1.7995, 1.7267, 2.5783, 2.3116, 2.7839, 1.2101], + device='cuda:3'), covar=tensor([0.2645, 0.4574, 0.2980, 0.2038, 0.1749, 0.2280, 0.1672, 0.4927], + device='cuda:3'), in_proj_covar=tensor([0.0556, 0.0674, 0.0758, 0.0509, 0.0637, 0.0548, 0.0673, 0.0575], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:22:56,069 INFO [train.py:903] (3/4) Epoch 28, batch 5000, loss[loss=0.2415, simple_loss=0.3138, pruned_loss=0.08461, over 13389.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06038, over 3829747.03 frames. ], batch size: 136, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:23:04,849 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 12:23:08,787 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189365.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:11,198 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4487, 1.1727, 1.3512, 1.3020, 2.2094, 1.1051, 1.9033, 2.3983], + device='cuda:3'), covar=tensor([0.0534, 0.2203, 0.2261, 0.1506, 0.0594, 0.1938, 0.1719, 0.0435], + device='cuda:3'), in_proj_covar=tensor([0.0425, 0.0380, 0.0399, 0.0353, 0.0382, 0.0358, 0.0398, 0.0420], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:23:15,677 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 12:23:24,840 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.003e+02 6.019e+02 8.012e+02 1.544e+03, threshold=1.204e+03, percent-clipped=7.0 +2023-04-03 12:23:42,173 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:23:59,713 INFO [train.py:903] (3/4) Epoch 28, batch 5050, loss[loss=0.1958, simple_loss=0.2778, pruned_loss=0.05687, over 19336.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2844, pruned_loss=0.06123, over 3813322.80 frames. ], batch size: 66, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:24:00,182 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:02,612 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:24,216 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189426.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:24:26,723 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7389, 2.7850, 2.4435, 2.8729, 2.6920, 2.3477, 2.2340, 2.7332], + device='cuda:3'), covar=tensor([0.0923, 0.1321, 0.1366, 0.0959, 0.1289, 0.0513, 0.1464, 0.0631], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0359, 0.0318, 0.0256, 0.0304, 0.0255, 0.0321, 0.0262], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:24:33,170 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 12:24:44,047 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189441.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:25:01,203 INFO [train.py:903] (3/4) Epoch 28, batch 5100, loss[loss=0.1686, simple_loss=0.2576, pruned_loss=0.03983, over 19819.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2839, pruned_loss=0.06088, over 3814522.40 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:25:10,105 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 12:25:12,425 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 12:25:19,115 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 12:25:27,910 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.508e+02 5.191e+02 6.540e+02 8.236e+02 1.634e+03, threshold=1.308e+03, percent-clipped=9.0 +2023-04-03 12:26:01,392 INFO [train.py:903] (3/4) Epoch 28, batch 5150, loss[loss=0.2153, simple_loss=0.2999, pruned_loss=0.06539, over 19622.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2821, pruned_loss=0.05983, over 3826162.90 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:26:02,925 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189507.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:11,046 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-03 12:26:11,419 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 12:26:18,172 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189519.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:44,896 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189541.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:26:46,846 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 12:27:02,654 INFO [train.py:903] (3/4) Epoch 28, batch 5200, loss[loss=0.2061, simple_loss=0.2788, pruned_loss=0.06672, over 19493.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2812, pruned_loss=0.05941, over 3822819.13 frames. ], batch size: 49, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:27:03,050 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189556.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:27:12,489 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-03 12:27:17,234 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 12:27:27,472 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0190, 1.8524, 1.6609, 2.0173, 1.7830, 1.7175, 1.7370, 1.8898], + device='cuda:3'), covar=tensor([0.1088, 0.1510, 0.1569, 0.1048, 0.1356, 0.0601, 0.1425, 0.0779], + device='cuda:3'), in_proj_covar=tensor([0.0277, 0.0358, 0.0317, 0.0255, 0.0303, 0.0254, 0.0320, 0.0261], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:27:30,434 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.217e+02 4.850e+02 5.941e+02 7.550e+02 1.552e+03, threshold=1.188e+03, percent-clipped=2.0 +2023-04-03 12:27:42,769 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-03 12:28:01,601 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 12:28:04,859 INFO [train.py:903] (3/4) Epoch 28, batch 5250, loss[loss=0.2092, simple_loss=0.2849, pruned_loss=0.06674, over 16010.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.282, pruned_loss=0.05964, over 3819324.96 frames. ], batch size: 35, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:28:09,622 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:28:20,325 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.07 vs. limit=5.0 +2023-04-03 12:28:53,302 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.2770, 5.7260, 3.3249, 5.0478, 1.1952, 5.8659, 5.7183, 5.9146], + device='cuda:3'), covar=tensor([0.0357, 0.0951, 0.1752, 0.0777, 0.4146, 0.0619, 0.0781, 0.1030], + device='cuda:3'), in_proj_covar=tensor([0.0529, 0.0430, 0.0516, 0.0361, 0.0409, 0.0456, 0.0451, 0.0482], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:29:01,918 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5798, 4.1118, 4.2873, 4.2921, 1.7272, 4.0815, 3.5332, 4.0627], + device='cuda:3'), covar=tensor([0.1831, 0.0897, 0.0648, 0.0766, 0.6206, 0.0968, 0.0769, 0.1124], + device='cuda:3'), in_proj_covar=tensor([0.0816, 0.0783, 0.0992, 0.0871, 0.0862, 0.0757, 0.0584, 0.0923], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 12:29:04,829 INFO [train.py:903] (3/4) Epoch 28, batch 5300, loss[loss=0.2144, simple_loss=0.295, pruned_loss=0.06692, over 17234.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05981, over 3822255.50 frames. ], batch size: 101, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:29:08,403 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189658.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:12,992 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:15,036 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189664.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:21,498 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 12:29:31,786 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 4.811e+02 6.188e+02 7.440e+02 1.460e+03, threshold=1.238e+03, percent-clipped=2.0 +2023-04-03 12:29:44,175 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189687.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:29:46,199 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189689.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:05,527 INFO [train.py:903] (3/4) Epoch 28, batch 5350, loss[loss=0.1799, simple_loss=0.2677, pruned_loss=0.04601, over 19658.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2828, pruned_loss=0.06009, over 3820558.09 frames. ], batch size: 55, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:30:09,144 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189709.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:26,406 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:29,830 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189725.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:30:38,356 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 12:30:42,327 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9549, 2.0040, 2.2565, 2.5662, 1.9361, 2.4872, 2.1989, 2.0293], + device='cuda:3'), covar=tensor([0.4489, 0.4366, 0.2127, 0.2621, 0.4460, 0.2428, 0.5620, 0.3877], + device='cuda:3'), in_proj_covar=tensor([0.0941, 0.1020, 0.0746, 0.0955, 0.0918, 0.0860, 0.0865, 0.0813], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:30:54,232 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=189746.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:04,989 INFO [train.py:903] (3/4) Epoch 28, batch 5400, loss[loss=0.2065, simple_loss=0.2932, pruned_loss=0.05992, over 19659.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.283, pruned_loss=0.06039, over 3817925.98 frames. ], batch size: 55, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:31:16,009 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189763.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:33,364 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 4.839e+02 5.999e+02 7.745e+02 2.007e+03, threshold=1.200e+03, percent-clipped=4.0 +2023-04-03 12:31:44,821 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8473, 3.2945, 3.3424, 3.3713, 1.5400, 3.2258, 2.8158, 3.1407], + device='cuda:3'), covar=tensor([0.1859, 0.1185, 0.0878, 0.1059, 0.5416, 0.1332, 0.0936, 0.1285], + device='cuda:3'), in_proj_covar=tensor([0.0815, 0.0783, 0.0992, 0.0871, 0.0862, 0.0757, 0.0585, 0.0923], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 12:31:44,944 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189788.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:31:56,152 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:07,655 INFO [train.py:903] (3/4) Epoch 28, batch 5450, loss[loss=0.2058, simple_loss=0.2947, pruned_loss=0.05852, over 18684.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2838, pruned_loss=0.06103, over 3815586.75 frames. ], batch size: 74, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:32:15,168 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189812.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:18,549 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2783, 3.8119, 3.9237, 3.9382, 1.6964, 3.7187, 3.2115, 3.6737], + device='cuda:3'), covar=tensor([0.1856, 0.1058, 0.0732, 0.0850, 0.5995, 0.1209, 0.0840, 0.1277], + device='cuda:3'), in_proj_covar=tensor([0.0816, 0.0784, 0.0994, 0.0874, 0.0863, 0.0759, 0.0586, 0.0925], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 12:32:27,681 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189822.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:30,050 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189824.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:32:30,069 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6103, 1.4600, 1.5874, 2.1707, 1.6099, 1.8268, 1.8708, 1.6266], + device='cuda:3'), covar=tensor([0.0884, 0.0976, 0.1001, 0.0745, 0.0869, 0.0850, 0.0885, 0.0758], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0241, 0.0226, 0.0215, 0.0189, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 12:32:45,841 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=189837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:03,746 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.7316, 2.5877, 2.1478, 2.0922, 1.9120, 2.2550, 1.3434, 1.9319], + device='cuda:3'), covar=tensor([0.0775, 0.0751, 0.0771, 0.1200, 0.1215, 0.1332, 0.1424, 0.1131], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0367, 0.0370, 0.0395, 0.0474, 0.0399, 0.0347, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 12:33:08,052 INFO [train.py:903] (3/4) Epoch 28, batch 5500, loss[loss=0.2576, simple_loss=0.326, pruned_loss=0.09455, over 17550.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2838, pruned_loss=0.06094, over 3803300.36 frames. ], batch size: 101, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:33:17,139 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=189863.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:33:30,489 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 12:33:35,034 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.121e+02 4.755e+02 5.876e+02 8.325e+02 1.733e+03, threshold=1.175e+03, percent-clipped=4.0 +2023-04-03 12:34:10,042 INFO [train.py:903] (3/4) Epoch 28, batch 5550, loss[loss=0.1873, simple_loss=0.2777, pruned_loss=0.04843, over 19788.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2822, pruned_loss=0.05996, over 3812533.55 frames. ], batch size: 54, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:34:17,012 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 12:35:06,753 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 12:35:11,471 INFO [train.py:903] (3/4) Epoch 28, batch 5600, loss[loss=0.1788, simple_loss=0.2565, pruned_loss=0.05054, over 19838.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2823, pruned_loss=0.06003, over 3821111.29 frames. ], batch size: 52, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:35:40,030 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.214e+02 4.597e+02 5.831e+02 7.652e+02 2.230e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 12:35:40,319 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=189978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:35:44,621 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=189981.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:11,950 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190002.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:36:17,387 INFO [train.py:903] (3/4) Epoch 28, batch 5650, loss[loss=0.1856, simple_loss=0.2728, pruned_loss=0.04922, over 19537.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2821, pruned_loss=0.05979, over 3817504.44 frames. ], batch size: 54, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:36:17,848 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:04,773 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 12:37:18,828 INFO [train.py:903] (3/4) Epoch 28, batch 5700, loss[loss=0.2052, simple_loss=0.2903, pruned_loss=0.06004, over 19708.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2823, pruned_loss=0.06003, over 3822248.22 frames. ], batch size: 59, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:37:28,354 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4010, 2.0997, 1.6665, 1.4583, 1.8892, 1.3882, 1.2731, 1.9151], + device='cuda:3'), covar=tensor([0.1008, 0.0884, 0.1177, 0.0912, 0.0655, 0.1389, 0.0743, 0.0447], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0322, 0.0345, 0.0275, 0.0254, 0.0348, 0.0297, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:37:31,552 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190066.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:37:45,568 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.283e+02 6.430e+02 7.892e+02 1.365e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-04-03 12:37:49,361 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190080.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:02,455 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,036 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190105.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:21,824 INFO [train.py:903] (3/4) Epoch 28, batch 5750, loss[loss=0.2138, simple_loss=0.2979, pruned_loss=0.06484, over 19513.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.05941, over 3829530.51 frames. ], batch size: 64, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:38:24,243 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 12:38:28,167 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5742, 1.4146, 1.4974, 1.8220, 3.1635, 1.3526, 2.5344, 3.6030], + device='cuda:3'), covar=tensor([0.0524, 0.2911, 0.3104, 0.1683, 0.0676, 0.2399, 0.1349, 0.0268], + device='cuda:3'), in_proj_covar=tensor([0.0424, 0.0378, 0.0398, 0.0353, 0.0382, 0.0357, 0.0397, 0.0419], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:38:32,471 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 12:38:35,228 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190117.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:38:37,064 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 12:39:23,778 INFO [train.py:903] (3/4) Epoch 28, batch 5800, loss[loss=0.2265, simple_loss=0.3018, pruned_loss=0.07558, over 19668.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2827, pruned_loss=0.06, over 3821255.35 frames. ], batch size: 53, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:39:25,215 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190157.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:39:41,886 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190169.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:39:52,008 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.681e+02 5.738e+02 7.022e+02 1.341e+03, threshold=1.148e+03, percent-clipped=1.0 +2023-04-03 12:39:56,749 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:26,978 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190205.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:40:27,627 INFO [train.py:903] (3/4) Epoch 28, batch 5850, loss[loss=0.3347, simple_loss=0.3661, pruned_loss=0.1516, over 13577.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2829, pruned_loss=0.06008, over 3820295.36 frames. ], batch size: 135, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:40:44,021 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5719, 4.1410, 4.2764, 4.2869, 1.8130, 4.0554, 3.4903, 4.0692], + device='cuda:3'), covar=tensor([0.1855, 0.0820, 0.0670, 0.0804, 0.5749, 0.1010, 0.0773, 0.1107], + device='cuda:3'), in_proj_covar=tensor([0.0822, 0.0788, 0.1000, 0.0878, 0.0865, 0.0763, 0.0588, 0.0927], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 12:41:02,516 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190234.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:03,605 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5440, 4.0755, 4.2323, 4.2263, 1.6816, 4.0052, 3.4484, 4.0134], + device='cuda:3'), covar=tensor([0.1800, 0.1012, 0.0697, 0.0838, 0.6434, 0.1194, 0.0782, 0.1152], + device='cuda:3'), in_proj_covar=tensor([0.0823, 0.0789, 0.1002, 0.0880, 0.0867, 0.0765, 0.0589, 0.0928], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 12:41:29,989 INFO [train.py:903] (3/4) Epoch 28, batch 5900, loss[loss=0.2063, simple_loss=0.2951, pruned_loss=0.05874, over 19619.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2836, pruned_loss=0.06015, over 3819470.56 frames. ], batch size: 57, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:41:32,298 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 12:41:33,980 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190259.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:41:54,166 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 12:41:56,486 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.583e+02 5.674e+02 7.520e+02 1.844e+03, threshold=1.135e+03, percent-clipped=9.0 +2023-04-03 12:42:32,647 INFO [train.py:903] (3/4) Epoch 28, batch 5950, loss[loss=0.1904, simple_loss=0.2755, pruned_loss=0.05261, over 19692.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2831, pruned_loss=0.05995, over 3818335.67 frames. ], batch size: 60, lr: 2.91e-03, grad_scale: 8.0 +2023-04-03 12:43:08,000 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190334.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:43:34,396 INFO [train.py:903] (3/4) Epoch 28, batch 6000, loss[loss=0.1949, simple_loss=0.2673, pruned_loss=0.06127, over 19462.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2821, pruned_loss=0.05942, over 3821401.04 frames. ], batch size: 49, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:43:34,396 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 12:43:48,493 INFO [train.py:937] (3/4) Epoch 28, validation: loss=0.1668, simple_loss=0.2663, pruned_loss=0.03368, over 944034.00 frames. +2023-04-03 12:43:48,494 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 12:44:08,756 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:11,929 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9890, 1.4324, 1.7641, 1.1248, 2.5800, 3.5268, 3.2430, 3.7810], + device='cuda:3'), covar=tensor([0.1760, 0.3785, 0.3449, 0.2714, 0.0660, 0.0216, 0.0252, 0.0303], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0333, 0.0366, 0.0272, 0.0256, 0.0198, 0.0221, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 12:44:15,159 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.216e+02 4.800e+02 5.909e+02 8.119e+02 1.607e+03, threshold=1.182e+03, percent-clipped=4.0 +2023-04-03 12:44:41,370 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190398.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:44:50,437 INFO [train.py:903] (3/4) Epoch 28, batch 6050, loss[loss=0.21, simple_loss=0.2971, pruned_loss=0.06139, over 19740.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2813, pruned_loss=0.05907, over 3828998.50 frames. ], batch size: 51, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:04,537 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:13,171 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1737, 1.3273, 1.7118, 1.0422, 2.2553, 3.0091, 2.6990, 3.1319], + device='cuda:3'), covar=tensor([0.1657, 0.3975, 0.3452, 0.2843, 0.0736, 0.0225, 0.0275, 0.0357], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0333, 0.0366, 0.0272, 0.0255, 0.0197, 0.0221, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 12:45:28,537 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190437.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:50,732 INFO [train.py:903] (3/4) Epoch 28, batch 6100, loss[loss=0.2216, simple_loss=0.3033, pruned_loss=0.06996, over 19374.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2813, pruned_loss=0.05941, over 3816888.42 frames. ], batch size: 66, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:45:55,935 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.55 vs. limit=5.0 +2023-04-03 12:45:57,940 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190461.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:45:59,837 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190462.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:17,944 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.255e+02 4.836e+02 5.950e+02 7.277e+02 1.439e+03, threshold=1.190e+03, percent-clipped=1.0 +2023-04-03 12:46:28,840 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190486.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:46:46,971 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190501.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:46:53,520 INFO [train.py:903] (3/4) Epoch 28, batch 6150, loss[loss=0.1921, simple_loss=0.2792, pruned_loss=0.05248, over 19679.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2809, pruned_loss=0.05887, over 3829191.48 frames. ], batch size: 60, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:47:02,045 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190513.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:47:22,391 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2421, 2.2681, 2.4970, 2.9576, 2.2561, 2.7945, 2.4304, 2.2837], + device='cuda:3'), covar=tensor([0.4434, 0.4303, 0.2038, 0.2733, 0.4643, 0.2465, 0.5190, 0.3601], + device='cuda:3'), in_proj_covar=tensor([0.0940, 0.1020, 0.0748, 0.0957, 0.0917, 0.0858, 0.0862, 0.0812], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:47:23,097 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 12:47:23,495 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3788, 2.0667, 1.5924, 1.3891, 1.9519, 1.2598, 1.3298, 1.8960], + device='cuda:3'), covar=tensor([0.1140, 0.0885, 0.1129, 0.0971, 0.0618, 0.1417, 0.0809, 0.0506], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0320, 0.0344, 0.0274, 0.0253, 0.0345, 0.0295, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:47:56,364 INFO [train.py:903] (3/4) Epoch 28, batch 6200, loss[loss=0.171, simple_loss=0.253, pruned_loss=0.04447, over 19751.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2796, pruned_loss=0.05797, over 3843887.42 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:48:23,170 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.897e+02 5.800e+02 7.277e+02 1.783e+03, threshold=1.160e+03, percent-clipped=5.0 +2023-04-03 12:48:45,240 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1596, 2.2112, 2.4248, 2.2763, 3.1589, 2.7283, 3.2563, 2.2363], + device='cuda:3'), covar=tensor([0.2043, 0.3347, 0.2282, 0.1661, 0.1356, 0.1905, 0.1399, 0.3712], + device='cuda:3'), in_proj_covar=tensor([0.0556, 0.0674, 0.0758, 0.0511, 0.0637, 0.0549, 0.0672, 0.0577], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 12:48:59,515 INFO [train.py:903] (3/4) Epoch 28, batch 6250, loss[loss=0.2124, simple_loss=0.2974, pruned_loss=0.06369, over 18209.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2798, pruned_loss=0.05793, over 3826143.21 frames. ], batch size: 83, lr: 2.90e-03, grad_scale: 16.0 +2023-04-03 12:49:03,317 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190609.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:49:11,021 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190616.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:49:13,816 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.41 vs. limit=2.0 +2023-04-03 12:49:22,711 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.3950, 4.0047, 2.5642, 3.4570, 0.7854, 4.0006, 3.7754, 3.9083], + device='cuda:3'), covar=tensor([0.0681, 0.0990, 0.2110, 0.0975, 0.4221, 0.0690, 0.1045, 0.1293], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0434, 0.0521, 0.0362, 0.0412, 0.0459, 0.0453, 0.0485], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:49:26,270 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190627.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:27,621 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190628.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:49:31,970 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 12:50:00,482 INFO [train.py:903] (3/4) Epoch 28, batch 6300, loss[loss=0.1761, simple_loss=0.2501, pruned_loss=0.051, over 19758.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2805, pruned_loss=0.05864, over 3819453.22 frames. ], batch size: 46, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:50:28,872 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190678.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:50:29,804 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.019e+02 4.803e+02 6.276e+02 7.910e+02 2.564e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 12:50:47,639 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=190693.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:51:04,102 INFO [train.py:903] (3/4) Epoch 28, batch 6350, loss[loss=0.1999, simple_loss=0.2811, pruned_loss=0.05941, over 19853.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2812, pruned_loss=0.0592, over 3808443.53 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:06,527 INFO [train.py:903] (3/4) Epoch 28, batch 6400, loss[loss=0.1835, simple_loss=0.2553, pruned_loss=0.05588, over 19031.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05948, over 3801564.00 frames. ], batch size: 42, lr: 2.90e-03, grad_scale: 8.0 +2023-04-03 12:52:13,580 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:52:36,097 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.096e+02 4.755e+02 6.032e+02 8.092e+02 1.400e+03, threshold=1.206e+03, percent-clipped=2.0 +2023-04-03 12:52:52,755 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190793.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:53:04,359 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7558, 4.1118, 4.5815, 4.6120, 1.8572, 4.2896, 3.6553, 4.0362], + device='cuda:3'), covar=tensor([0.2487, 0.1435, 0.0849, 0.1174, 0.7386, 0.2109, 0.1173, 0.1836], + device='cuda:3'), in_proj_covar=tensor([0.0828, 0.0793, 0.1004, 0.0882, 0.0867, 0.0768, 0.0590, 0.0932], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 12:53:08,684 INFO [train.py:903] (3/4) Epoch 28, batch 6450, loss[loss=0.1846, simple_loss=0.2677, pruned_loss=0.05077, over 19857.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2828, pruned_loss=0.0598, over 3803165.54 frames. ], batch size: 52, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:53:55,122 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 12:54:10,283 INFO [train.py:903] (3/4) Epoch 28, batch 6500, loss[loss=0.259, simple_loss=0.3209, pruned_loss=0.09852, over 13322.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05978, over 3796570.92 frames. ], batch size: 135, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:54:17,913 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 12:54:32,004 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190872.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:54:36,607 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=190876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:54:40,957 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.047e+02 4.758e+02 5.991e+02 7.680e+02 1.999e+03, threshold=1.198e+03, percent-clipped=5.0 +2023-04-03 12:54:47,169 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=190884.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:55:01,653 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190897.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 12:55:13,651 INFO [train.py:903] (3/4) Epoch 28, batch 6550, loss[loss=0.1991, simple_loss=0.2871, pruned_loss=0.0555, over 19793.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2836, pruned_loss=0.06007, over 3797001.98 frames. ], batch size: 56, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:55:18,346 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=190909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:56:00,418 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4604, 1.3974, 1.5834, 1.4226, 3.0529, 1.0546, 2.3838, 3.4892], + device='cuda:3'), covar=tensor([0.0542, 0.2926, 0.3014, 0.2044, 0.0754, 0.2715, 0.1267, 0.0268], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0380, 0.0399, 0.0355, 0.0385, 0.0360, 0.0400, 0.0419], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:56:14,190 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190953.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:56:17,106 INFO [train.py:903] (3/4) Epoch 28, batch 6600, loss[loss=0.2086, simple_loss=0.2966, pruned_loss=0.06032, over 19657.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2834, pruned_loss=0.05973, over 3807305.08 frames. ], batch size: 60, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 12:56:35,885 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=190971.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:56:48,224 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.870e+02 6.050e+02 8.150e+02 2.542e+03, threshold=1.210e+03, percent-clipped=13.0 +2023-04-03 12:57:19,904 INFO [train.py:903] (3/4) Epoch 28, batch 6650, loss[loss=0.2689, simple_loss=0.3297, pruned_loss=0.104, over 13627.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06019, over 3802478.45 frames. ], batch size: 136, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:57:31,516 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.51 vs. limit=5.0 +2023-04-03 12:57:59,362 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:14,561 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191049.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:17,968 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0434, 1.3127, 1.7502, 1.2804, 2.6954, 3.7404, 3.4529, 4.0164], + device='cuda:3'), covar=tensor([0.1830, 0.3978, 0.3603, 0.2694, 0.0681, 0.0220, 0.0229, 0.0290], + device='cuda:3'), in_proj_covar=tensor([0.0283, 0.0336, 0.0367, 0.0274, 0.0258, 0.0199, 0.0222, 0.0281], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 12:58:20,366 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1388, 3.3709, 1.9383, 2.1163, 3.0101, 1.7181, 1.6972, 2.3876], + device='cuda:3'), covar=tensor([0.1281, 0.0608, 0.1169, 0.0915, 0.0596, 0.1337, 0.0929, 0.0623], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0317, 0.0342, 0.0272, 0.0252, 0.0342, 0.0293, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 12:58:22,342 INFO [train.py:903] (3/4) Epoch 28, batch 6700, loss[loss=0.2537, simple_loss=0.3146, pruned_loss=0.09637, over 13123.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2829, pruned_loss=0.05992, over 3803095.96 frames. ], batch size: 138, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:58:38,439 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191068.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 12:58:45,550 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191074.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:52,986 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.114e+02 5.245e+02 6.198e+02 8.145e+02 2.088e+03, threshold=1.240e+03, percent-clipped=7.0 +2023-04-03 12:58:58,990 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:58:59,053 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 12:59:11,381 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2182, 1.1664, 1.2172, 1.2935, 1.0033, 1.2996, 1.2864, 1.2695], + device='cuda:3'), covar=tensor([0.0923, 0.1000, 0.1061, 0.0698, 0.0965, 0.0919, 0.0878, 0.0810], + device='cuda:3'), in_proj_covar=tensor([0.0215, 0.0224, 0.0230, 0.0241, 0.0228, 0.0216, 0.0190, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 12:59:21,826 INFO [train.py:903] (3/4) Epoch 28, batch 6750, loss[loss=0.23, simple_loss=0.3107, pruned_loss=0.07467, over 19314.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2828, pruned_loss=0.05992, over 3816769.93 frames. ], batch size: 66, lr: 2.90e-03, grad_scale: 2.0 +2023-04-03 12:59:51,523 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:04,804 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7926, 1.6221, 1.6950, 2.4129, 1.6835, 2.0816, 2.0439, 1.8990], + device='cuda:3'), covar=tensor([0.0846, 0.0886, 0.0967, 0.0660, 0.0895, 0.0782, 0.0897, 0.0667], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0223, 0.0229, 0.0241, 0.0227, 0.0216, 0.0189, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 13:00:11,326 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:13,736 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191152.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:17,984 INFO [train.py:903] (3/4) Epoch 28, batch 6800, loss[loss=0.2338, simple_loss=0.3074, pruned_loss=0.0801, over 19730.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2839, pruned_loss=0.06062, over 3806689.21 frames. ], batch size: 63, lr: 2.90e-03, grad_scale: 4.0 +2023-04-03 13:00:19,437 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:00:37,661 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0778, 1.0801, 1.1759, 1.1796, 1.3544, 1.4227, 1.3462, 0.6135], + device='cuda:3'), covar=tensor([0.1873, 0.3246, 0.1986, 0.1557, 0.1352, 0.1822, 0.1192, 0.4476], + device='cuda:3'), in_proj_covar=tensor([0.0557, 0.0673, 0.0760, 0.0511, 0.0639, 0.0548, 0.0673, 0.0578], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 13:00:45,260 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 5.099e+02 6.292e+02 8.518e+02 1.501e+03, threshold=1.258e+03, percent-clipped=3.0 +2023-04-03 13:01:04,263 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 13:01:05,298 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 13:01:08,826 INFO [train.py:903] (3/4) Epoch 29, batch 0, loss[loss=0.1998, simple_loss=0.281, pruned_loss=0.05936, over 19765.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.281, pruned_loss=0.05936, over 19765.00 frames. ], batch size: 54, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:01:08,827 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 13:01:20,492 INFO [train.py:937] (3/4) Epoch 29, validation: loss=0.1669, simple_loss=0.2669, pruned_loss=0.03339, over 944034.00 frames. +2023-04-03 13:01:20,493 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 13:01:31,763 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 13:01:43,669 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191203.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:02:01,247 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-03 13:02:19,661 INFO [train.py:903] (3/4) Epoch 29, batch 50, loss[loss=0.1816, simple_loss=0.275, pruned_loss=0.04406, over 19669.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2818, pruned_loss=0.05934, over 866215.70 frames. ], batch size: 58, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:02:55,009 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 13:03:01,004 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0881, 2.0513, 2.0075, 1.8553, 1.5732, 1.7807, 0.5552, 1.1321], + device='cuda:3'), covar=tensor([0.0731, 0.0654, 0.0469, 0.0801, 0.1330, 0.0877, 0.1417, 0.1197], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0364, 0.0365, 0.0391, 0.0469, 0.0397, 0.0345, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 13:03:15,130 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.210e+02 6.215e+02 8.429e+02 1.754e+03, threshold=1.243e+03, percent-clipped=6.0 +2023-04-03 13:03:18,589 INFO [train.py:903] (3/4) Epoch 29, batch 100, loss[loss=0.2145, simple_loss=0.2987, pruned_loss=0.06513, over 19521.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2826, pruned_loss=0.05843, over 1530661.05 frames. ], batch size: 64, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:03:33,066 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 13:04:07,326 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191324.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:04:19,665 INFO [train.py:903] (3/4) Epoch 29, batch 150, loss[loss=0.1759, simple_loss=0.2571, pruned_loss=0.04733, over 19754.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2814, pruned_loss=0.05881, over 2044766.13 frames. ], batch size: 47, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:04:29,083 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191342.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:04:36,947 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191349.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:04:59,835 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:05,619 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.0943, 2.8261, 2.3788, 2.2568, 2.0517, 2.4633, 1.2287, 2.1051], + device='cuda:3'), covar=tensor([0.0797, 0.0690, 0.0714, 0.1305, 0.1226, 0.1231, 0.1488, 0.1146], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0365, 0.0366, 0.0392, 0.0470, 0.0398, 0.0345, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 13:05:07,807 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8113, 4.3818, 2.9896, 3.7504, 0.9564, 4.3906, 4.2002, 4.3142], + device='cuda:3'), covar=tensor([0.0603, 0.0950, 0.1719, 0.0968, 0.4060, 0.0617, 0.0984, 0.1016], + device='cuda:3'), in_proj_covar=tensor([0.0528, 0.0429, 0.0514, 0.0358, 0.0407, 0.0454, 0.0448, 0.0479], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:05:15,358 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.397e+02 5.272e+02 6.303e+02 7.742e+02 1.475e+03, threshold=1.261e+03, percent-clipped=3.0 +2023-04-03 13:05:15,420 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 13:05:18,079 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=191383.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:05:18,891 INFO [train.py:903] (3/4) Epoch 29, batch 200, loss[loss=0.1705, simple_loss=0.2501, pruned_loss=0.04541, over 19616.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2826, pruned_loss=0.05926, over 2431209.11 frames. ], batch size: 50, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:05:48,172 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191408.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:13,737 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191430.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:18,511 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:06:20,064 INFO [train.py:903] (3/4) Epoch 29, batch 250, loss[loss=0.1972, simple_loss=0.2824, pruned_loss=0.05604, over 19606.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2819, pruned_loss=0.05906, over 2747939.67 frames. ], batch size: 61, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:07:11,569 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6967, 1.4950, 1.6037, 2.1566, 1.6075, 1.8996, 1.8931, 1.7210], + device='cuda:3'), covar=tensor([0.0876, 0.0991, 0.1029, 0.0662, 0.0857, 0.0805, 0.0923, 0.0728], + device='cuda:3'), in_proj_covar=tensor([0.0214, 0.0225, 0.0230, 0.0241, 0.0228, 0.0217, 0.0190, 0.0210], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 13:07:16,619 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.311e+02 6.503e+02 8.385e+02 2.446e+03, threshold=1.301e+03, percent-clipped=7.0 +2023-04-03 13:07:20,115 INFO [train.py:903] (3/4) Epoch 29, batch 300, loss[loss=0.1771, simple_loss=0.2619, pruned_loss=0.04618, over 19719.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2816, pruned_loss=0.05896, over 2988612.23 frames. ], batch size: 51, lr: 2.85e-03, grad_scale: 8.0 +2023-04-03 13:07:33,157 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191494.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:07:57,193 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=4.57 vs. limit=5.0 +2023-04-03 13:08:19,686 INFO [train.py:903] (3/4) Epoch 29, batch 350, loss[loss=0.2192, simple_loss=0.2971, pruned_loss=0.07065, over 19428.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2802, pruned_loss=0.05853, over 3183226.84 frames. ], batch size: 64, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:08:27,202 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 13:08:33,102 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191545.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:08:35,146 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191547.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:09:02,265 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.59 vs. limit=5.0 +2023-04-03 13:09:16,624 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.772e+02 5.697e+02 6.938e+02 1.378e+03, threshold=1.139e+03, percent-clipped=1.0 +2023-04-03 13:09:19,980 INFO [train.py:903] (3/4) Epoch 29, batch 400, loss[loss=0.2, simple_loss=0.2808, pruned_loss=0.05965, over 19576.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2797, pruned_loss=0.05826, over 3337003.81 frames. ], batch size: 52, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:09:26,934 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9505, 2.0206, 2.2812, 2.6185, 1.9244, 2.4462, 2.2350, 2.0722], + device='cuda:3'), covar=tensor([0.4300, 0.4084, 0.2048, 0.2479, 0.4306, 0.2296, 0.5101, 0.3529], + device='cuda:3'), in_proj_covar=tensor([0.0941, 0.1020, 0.0746, 0.0955, 0.0918, 0.0859, 0.0863, 0.0810], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 13:09:50,423 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191609.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:19,938 INFO [train.py:903] (3/4) Epoch 29, batch 450, loss[loss=0.22, simple_loss=0.2985, pruned_loss=0.07078, over 19769.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.28, pruned_loss=0.05843, over 3458972.61 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:10:55,239 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:10:57,252 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 13:10:58,392 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 13:11:17,384 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 4.702e+02 5.973e+02 7.083e+02 1.398e+03, threshold=1.195e+03, percent-clipped=2.0 +2023-04-03 13:11:20,347 INFO [train.py:903] (3/4) Epoch 29, batch 500, loss[loss=0.1811, simple_loss=0.2669, pruned_loss=0.04763, over 19779.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.28, pruned_loss=0.05836, over 3551681.65 frames. ], batch size: 56, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:11:27,827 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-04-03 13:12:13,383 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=191727.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:12:21,065 INFO [train.py:903] (3/4) Epoch 29, batch 550, loss[loss=0.2331, simple_loss=0.3052, pruned_loss=0.08048, over 14042.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2796, pruned_loss=0.05831, over 3603269.93 frames. ], batch size: 136, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:13:19,227 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.419e+02 6.471e+02 8.968e+02 1.855e+03, threshold=1.294e+03, percent-clipped=10.0 +2023-04-03 13:13:22,383 INFO [train.py:903] (3/4) Epoch 29, batch 600, loss[loss=0.2308, simple_loss=0.3086, pruned_loss=0.07647, over 19758.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2804, pruned_loss=0.05863, over 3659399.26 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:13:42,719 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191801.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:02,346 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 13:14:13,358 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191826.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:14:22,145 INFO [train.py:903] (3/4) Epoch 29, batch 650, loss[loss=0.1821, simple_loss=0.2652, pruned_loss=0.04957, over 19617.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2815, pruned_loss=0.05911, over 3701290.07 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:14:31,535 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=191842.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:01,025 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:18,379 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.883e+02 6.286e+02 7.850e+02 2.365e+03, threshold=1.257e+03, percent-clipped=4.0 +2023-04-03 13:15:21,594 INFO [train.py:903] (3/4) Epoch 29, batch 700, loss[loss=0.2049, simple_loss=0.297, pruned_loss=0.05637, over 19477.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2823, pruned_loss=0.05944, over 3732470.09 frames. ], batch size: 64, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:15:29,601 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191890.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:15:36,392 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8165, 1.3323, 1.0681, 0.9417, 1.1636, 1.0062, 0.8758, 1.2084], + device='cuda:3'), covar=tensor([0.0731, 0.1003, 0.1248, 0.0879, 0.0679, 0.1422, 0.0724, 0.0585], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0318, 0.0342, 0.0272, 0.0252, 0.0345, 0.0293, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:16:03,823 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=191918.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:16:23,389 INFO [train.py:903] (3/4) Epoch 29, batch 750, loss[loss=0.1562, simple_loss=0.2451, pruned_loss=0.03368, over 19619.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2811, pruned_loss=0.05885, over 3763498.37 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 4.0 +2023-04-03 13:16:29,942 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3502, 2.3583, 2.6836, 3.1759, 2.3916, 2.9333, 2.6724, 2.5024], + device='cuda:3'), covar=tensor([0.4294, 0.4389, 0.1939, 0.2631, 0.4696, 0.2391, 0.5049, 0.3390], + device='cuda:3'), in_proj_covar=tensor([0.0944, 0.1021, 0.0748, 0.0956, 0.0920, 0.0860, 0.0867, 0.0811], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 13:16:34,302 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=191943.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:17:21,356 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.754e+02 5.591e+02 7.301e+02 1.189e+03, threshold=1.118e+03, percent-clipped=0.0 +2023-04-03 13:17:23,708 INFO [train.py:903] (3/4) Epoch 29, batch 800, loss[loss=0.1957, simple_loss=0.2824, pruned_loss=0.05452, over 19758.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05863, over 3790209.47 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:17:36,669 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 13:17:38,139 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8543, 4.4571, 2.6595, 3.8723, 0.7876, 4.3946, 4.2847, 4.3317], + device='cuda:3'), covar=tensor([0.0520, 0.0826, 0.1940, 0.0842, 0.4060, 0.0653, 0.0897, 0.1055], + device='cuda:3'), in_proj_covar=tensor([0.0530, 0.0431, 0.0518, 0.0360, 0.0410, 0.0457, 0.0454, 0.0483], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:17:51,917 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.1727, 1.8122, 2.1090, 1.9500, 4.6254, 1.2863, 2.6972, 5.0904], + device='cuda:3'), covar=tensor([0.0446, 0.2767, 0.2615, 0.1947, 0.0730, 0.2704, 0.1366, 0.0161], + device='cuda:3'), in_proj_covar=tensor([0.0425, 0.0380, 0.0400, 0.0357, 0.0385, 0.0360, 0.0400, 0.0420], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:18:26,431 INFO [train.py:903] (3/4) Epoch 29, batch 850, loss[loss=0.1818, simple_loss=0.2686, pruned_loss=0.04754, over 19688.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2811, pruned_loss=0.0584, over 3800055.20 frames. ], batch size: 53, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:17,579 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 13:19:23,133 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.899e+02 4.771e+02 5.573e+02 7.809e+02 2.111e+03, threshold=1.115e+03, percent-clipped=7.0 +2023-04-03 13:19:25,389 INFO [train.py:903] (3/4) Epoch 29, batch 900, loss[loss=0.1929, simple_loss=0.2772, pruned_loss=0.05432, over 18708.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2812, pruned_loss=0.05856, over 3808381.02 frames. ], batch size: 74, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:19:43,240 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192098.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:19:53,200 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-03 13:20:09,373 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192120.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:10,714 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6482, 1.7359, 2.0154, 2.0190, 1.5473, 1.9298, 1.9783, 1.8671], + device='cuda:3'), covar=tensor([0.4351, 0.4067, 0.2061, 0.2546, 0.4020, 0.2376, 0.5440, 0.3548], + device='cuda:3'), in_proj_covar=tensor([0.0949, 0.1027, 0.0753, 0.0962, 0.0923, 0.0864, 0.0870, 0.0815], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 13:20:12,931 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192123.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:20:26,584 INFO [train.py:903] (3/4) Epoch 29, batch 950, loss[loss=0.1904, simple_loss=0.2806, pruned_loss=0.0501, over 19602.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.281, pruned_loss=0.05828, over 3824112.90 frames. ], batch size: 57, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:20:30,191 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 13:21:13,719 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192173.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:21:24,649 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.322e+02 5.272e+02 6.130e+02 7.632e+02 1.213e+03, threshold=1.226e+03, percent-clipped=2.0 +2023-04-03 13:21:27,723 INFO [train.py:903] (3/4) Epoch 29, batch 1000, loss[loss=0.1975, simple_loss=0.2808, pruned_loss=0.05708, over 19588.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2807, pruned_loss=0.05791, over 3828846.08 frames. ], batch size: 52, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:21,150 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2468, 1.9914, 1.5280, 1.3500, 1.8467, 1.1732, 1.3284, 1.7471], + device='cuda:3'), covar=tensor([0.1004, 0.0810, 0.1162, 0.0853, 0.0597, 0.1382, 0.0707, 0.0515], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0318, 0.0342, 0.0273, 0.0252, 0.0345, 0.0293, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:22:23,179 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 13:22:27,656 INFO [train.py:903] (3/4) Epoch 29, batch 1050, loss[loss=0.1704, simple_loss=0.2525, pruned_loss=0.04412, over 19424.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2815, pruned_loss=0.05841, over 3835611.63 frames. ], batch size: 48, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:22:31,101 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192237.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:22:34,342 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192239.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:22:55,092 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-03 13:23:02,363 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 13:23:25,657 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.003e+02 4.755e+02 5.709e+02 7.263e+02 1.610e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 13:23:28,075 INFO [train.py:903] (3/4) Epoch 29, batch 1100, loss[loss=0.2182, simple_loss=0.299, pruned_loss=0.06863, over 17435.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2811, pruned_loss=0.05823, over 3824789.86 frames. ], batch size: 101, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:29,302 INFO [train.py:903] (3/4) Epoch 29, batch 1150, loss[loss=0.1727, simple_loss=0.2497, pruned_loss=0.0479, over 19384.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.281, pruned_loss=0.0583, over 3817021.56 frames. ], batch size: 47, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:24:36,022 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6179, 1.4727, 1.5277, 2.0049, 1.5043, 1.8370, 1.8442, 1.6848], + device='cuda:3'), covar=tensor([0.0852, 0.0943, 0.1007, 0.0681, 0.0950, 0.0793, 0.0887, 0.0713], + device='cuda:3'), in_proj_covar=tensor([0.0215, 0.0224, 0.0230, 0.0241, 0.0228, 0.0216, 0.0189, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 13:25:27,744 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 4.839e+02 5.803e+02 7.271e+02 1.538e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 13:25:30,849 INFO [train.py:903] (3/4) Epoch 29, batch 1200, loss[loss=0.2032, simple_loss=0.2952, pruned_loss=0.05555, over 19640.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.05818, over 3825693.50 frames. ], batch size: 57, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:01,113 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:01,919 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 13:26:10,314 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192417.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:26:30,220 INFO [train.py:903] (3/4) Epoch 29, batch 1250, loss[loss=0.2356, simple_loss=0.3066, pruned_loss=0.08232, over 19584.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2815, pruned_loss=0.05882, over 3815994.97 frames. ], batch size: 61, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:26:50,091 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192451.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:07,166 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:27:28,115 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.043e+02 5.075e+02 6.202e+02 7.494e+02 1.680e+03, threshold=1.240e+03, percent-clipped=4.0 +2023-04-03 13:27:30,143 INFO [train.py:903] (3/4) Epoch 29, batch 1300, loss[loss=0.274, simple_loss=0.3288, pruned_loss=0.1097, over 12666.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2829, pruned_loss=0.06004, over 3802406.26 frames. ], batch size: 136, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:28:07,574 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-03 13:28:10,139 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:28:30,907 INFO [train.py:903] (3/4) Epoch 29, batch 1350, loss[loss=0.2175, simple_loss=0.3045, pruned_loss=0.0653, over 19581.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2836, pruned_loss=0.06055, over 3804369.18 frames. ], batch size: 61, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:29:25,906 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192579.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:27,889 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192581.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:28,923 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.238e+02 5.533e+02 7.774e+02 1.028e+03 2.542e+03, threshold=1.555e+03, percent-clipped=13.0 +2023-04-03 13:29:31,164 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192583.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:29:32,199 INFO [train.py:903] (3/4) Epoch 29, batch 1400, loss[loss=0.1983, simple_loss=0.2826, pruned_loss=0.05704, over 19744.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2833, pruned_loss=0.06044, over 3814235.18 frames. ], batch size: 63, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:29:40,690 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6511, 1.2346, 1.2600, 1.5230, 1.0719, 1.4065, 1.2561, 1.4706], + device='cuda:3'), covar=tensor([0.1192, 0.1302, 0.1729, 0.1086, 0.1424, 0.0663, 0.1653, 0.0890], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0361, 0.0320, 0.0259, 0.0307, 0.0257, 0.0323, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 13:30:30,407 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192632.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:32,442 INFO [train.py:903] (3/4) Epoch 29, batch 1450, loss[loss=0.2133, simple_loss=0.2968, pruned_loss=0.06485, over 19601.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06062, over 3819094.29 frames. ], batch size: 57, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:30:32,473 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 13:30:32,745 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192634.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:30:48,187 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7344, 1.7454, 1.6846, 1.5236, 1.3869, 1.3987, 0.3384, 0.7019], + device='cuda:3'), covar=tensor([0.0693, 0.0668, 0.0452, 0.0668, 0.1369, 0.0817, 0.1371, 0.1176], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0365, 0.0368, 0.0392, 0.0472, 0.0399, 0.0346, 0.0347], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 13:30:49,186 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5831, 1.1449, 1.3123, 1.1878, 2.2244, 1.0988, 2.1017, 2.5721], + device='cuda:3'), covar=tensor([0.0730, 0.2916, 0.3182, 0.1925, 0.0869, 0.2177, 0.1228, 0.0435], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0379, 0.0400, 0.0355, 0.0385, 0.0360, 0.0399, 0.0421], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:31:29,875 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.834e+02 5.734e+02 7.377e+02 1.406e+03, threshold=1.147e+03, percent-clipped=0.0 +2023-04-03 13:31:32,942 INFO [train.py:903] (3/4) Epoch 29, batch 1500, loss[loss=0.1729, simple_loss=0.2606, pruned_loss=0.04254, over 19624.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06047, over 3808572.82 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:31:41,392 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192691.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:47,436 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192696.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:31:50,561 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192698.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:32,341 INFO [train.py:903] (3/4) Epoch 29, batch 1550, loss[loss=0.223, simple_loss=0.29, pruned_loss=0.07801, over 19615.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.283, pruned_loss=0.06057, over 3818980.77 frames. ], batch size: 50, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:32:56,577 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192753.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:32:57,543 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:05,616 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:31,831 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 5.130e+02 6.287e+02 7.695e+02 1.691e+03, threshold=1.257e+03, percent-clipped=3.0 +2023-04-03 13:33:34,857 INFO [train.py:903] (3/4) Epoch 29, batch 1600, loss[loss=0.2272, simple_loss=0.308, pruned_loss=0.07319, over 18678.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06028, over 3810662.07 frames. ], batch size: 74, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:33:47,559 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:33:57,596 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 13:34:34,143 INFO [train.py:903] (3/4) Epoch 29, batch 1650, loss[loss=0.1755, simple_loss=0.2617, pruned_loss=0.04466, over 19696.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2835, pruned_loss=0.06023, over 3825637.21 frames. ], batch size: 53, lr: 2.84e-03, grad_scale: 8.0 +2023-04-03 13:34:35,741 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:34:51,801 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=192849.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:35:04,865 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192860.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:15,872 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:23,843 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192876.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:25,041 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0311, 1.8127, 1.6781, 2.1149, 1.7818, 1.7200, 1.7156, 1.9192], + device='cuda:3'), covar=tensor([0.1107, 0.1622, 0.1644, 0.1117, 0.1498, 0.0635, 0.1576, 0.0846], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0362, 0.0321, 0.0259, 0.0308, 0.0258, 0.0324, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 13:35:30,384 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.635e+02 4.944e+02 5.872e+02 7.824e+02 1.796e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 13:35:32,715 INFO [train.py:903] (3/4) Epoch 29, batch 1700, loss[loss=0.228, simple_loss=0.305, pruned_loss=0.0755, over 19315.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2836, pruned_loss=0.06041, over 3818710.20 frames. ], batch size: 66, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:35:38,395 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192888.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:35:52,009 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-04-03 13:36:04,838 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=192910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:08,206 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192913.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:13,393 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 13:36:32,300 INFO [train.py:903] (3/4) Epoch 29, batch 1750, loss[loss=0.2137, simple_loss=0.2923, pruned_loss=0.06752, over 19594.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.283, pruned_loss=0.06035, over 3822591.92 frames. ], batch size: 52, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:36:55,847 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192952.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:36:58,027 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=192954.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:24,866 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:25,783 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=192978.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:27,198 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=192979.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:37:31,717 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 4.741e+02 6.117e+02 7.797e+02 1.758e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 13:37:34,999 INFO [train.py:903] (3/4) Epoch 29, batch 1800, loss[loss=0.2206, simple_loss=0.308, pruned_loss=0.06661, over 19676.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.283, pruned_loss=0.06011, over 3830694.82 frames. ], batch size: 60, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:38:29,372 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 13:38:33,981 INFO [train.py:903] (3/4) Epoch 29, batch 1850, loss[loss=0.2068, simple_loss=0.2887, pruned_loss=0.06246, over 19729.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.283, pruned_loss=0.06064, over 3816161.70 frames. ], batch size: 63, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:38:35,285 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193035.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:04,664 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193059.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:06,412 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 13:39:32,451 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.498e+02 4.794e+02 5.817e+02 7.567e+02 2.266e+03, threshold=1.163e+03, percent-clipped=4.0 +2023-04-03 13:39:34,764 INFO [train.py:903] (3/4) Epoch 29, batch 1900, loss[loss=0.2381, simple_loss=0.312, pruned_loss=0.08215, over 17480.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.05998, over 3819316.17 frames. ], batch size: 101, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:39:45,997 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:49,045 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 13:39:51,204 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193097.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:39:55,327 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 13:40:18,690 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 13:40:25,438 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:33,605 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193132.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:35,437 INFO [train.py:903] (3/4) Epoch 29, batch 1950, loss[loss=0.1967, simple_loss=0.2787, pruned_loss=0.05735, over 19677.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2821, pruned_loss=0.05961, over 3826402.78 frames. ], batch size: 53, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:40:53,402 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-03 13:40:56,452 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:40:56,492 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:04,228 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193157.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:08,740 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8990, 0.8199, 0.8656, 1.0477, 0.8341, 0.9263, 0.9374, 0.8959], + device='cuda:3'), covar=tensor([0.0810, 0.0923, 0.0948, 0.0571, 0.0939, 0.0799, 0.0861, 0.0755], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0230, 0.0240, 0.0227, 0.0215, 0.0189, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 13:41:15,432 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193166.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:34,714 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.104e+02 4.788e+02 5.811e+02 7.006e+02 1.429e+03, threshold=1.162e+03, percent-clipped=3.0 +2023-04-03 13:41:37,852 INFO [train.py:903] (3/4) Epoch 29, batch 2000, loss[loss=0.1823, simple_loss=0.2725, pruned_loss=0.04608, over 19665.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2823, pruned_loss=0.05953, over 3827725.74 frames. ], batch size: 58, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:41:46,097 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:41:48,034 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193193.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:41:59,702 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.34 vs. limit=5.0 +2023-04-03 13:42:10,516 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193212.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:31,349 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 13:42:31,599 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193229.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:42:37,084 INFO [train.py:903] (3/4) Epoch 29, batch 2050, loss[loss=0.1782, simple_loss=0.2513, pruned_loss=0.05252, over 19312.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05982, over 3842619.07 frames. ], batch size: 44, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:42:50,732 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 13:42:50,762 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 13:43:13,504 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 13:43:34,587 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.912e+02 6.403e+02 8.614e+02 2.001e+03, threshold=1.281e+03, percent-clipped=8.0 +2023-04-03 13:43:36,910 INFO [train.py:903] (3/4) Epoch 29, batch 2100, loss[loss=0.1972, simple_loss=0.2835, pruned_loss=0.05539, over 19774.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2831, pruned_loss=0.06034, over 3838333.02 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:44:00,083 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193302.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:44:07,583 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 13:44:07,963 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193308.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:44:25,618 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 13:44:37,257 INFO [train.py:903] (3/4) Epoch 29, batch 2150, loss[loss=0.1794, simple_loss=0.2601, pruned_loss=0.04938, over 19468.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.283, pruned_loss=0.06019, over 3828803.78 frames. ], batch size: 49, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:44:56,808 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193349.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:25,847 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193374.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:45:35,169 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 5.242e+02 6.391e+02 8.913e+02 2.261e+03, threshold=1.278e+03, percent-clipped=9.0 +2023-04-03 13:45:37,407 INFO [train.py:903] (3/4) Epoch 29, batch 2200, loss[loss=0.1795, simple_loss=0.2665, pruned_loss=0.04627, over 19503.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2842, pruned_loss=0.06077, over 3822527.44 frames. ], batch size: 64, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:00,721 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:04,436 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193406.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:30,803 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2053, 1.5417, 2.0253, 1.5108, 2.9929, 4.5792, 4.4259, 4.9941], + device='cuda:3'), covar=tensor([0.1743, 0.3887, 0.3416, 0.2543, 0.0713, 0.0210, 0.0176, 0.0211], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0273, 0.0257, 0.0198, 0.0221, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 13:46:36,066 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193431.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:46:39,105 INFO [train.py:903] (3/4) Epoch 29, batch 2250, loss[loss=0.2387, simple_loss=0.31, pruned_loss=0.08369, over 19577.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2835, pruned_loss=0.06046, over 3828535.51 frames. ], batch size: 61, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:46:45,262 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:20,663 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193468.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:25,803 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193472.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:47:26,835 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2428, 1.3193, 1.7864, 1.2856, 2.5079, 3.2981, 3.0302, 3.4715], + device='cuda:3'), covar=tensor([0.1704, 0.4116, 0.3549, 0.2725, 0.0712, 0.0232, 0.0281, 0.0384], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0335, 0.0367, 0.0274, 0.0257, 0.0198, 0.0221, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 13:47:36,425 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 4.884e+02 5.894e+02 7.303e+02 1.661e+03, threshold=1.179e+03, percent-clipped=4.0 +2023-04-03 13:47:38,742 INFO [train.py:903] (3/4) Epoch 29, batch 2300, loss[loss=0.189, simple_loss=0.2759, pruned_loss=0.05102, over 18045.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2833, pruned_loss=0.06023, over 3821448.49 frames. ], batch size: 83, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:47:50,478 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193493.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:47:53,671 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 13:48:21,005 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193518.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:48:39,106 INFO [train.py:903] (3/4) Epoch 29, batch 2350, loss[loss=0.1705, simple_loss=0.2571, pruned_loss=0.04194, over 19849.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.283, pruned_loss=0.05982, over 3829116.09 frames. ], batch size: 52, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:16,327 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193564.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 13:49:20,453 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 13:49:27,329 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193573.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:49:36,503 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 13:49:37,596 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.068e+02 4.546e+02 5.901e+02 7.828e+02 1.716e+03, threshold=1.180e+03, percent-clipped=11.0 +2023-04-03 13:49:40,761 INFO [train.py:903] (3/4) Epoch 29, batch 2400, loss[loss=0.2074, simple_loss=0.305, pruned_loss=0.05487, over 19684.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2829, pruned_loss=0.05963, over 3824000.58 frames. ], batch size: 55, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:49:47,424 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193589.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:50:40,462 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:50:41,253 INFO [train.py:903] (3/4) Epoch 29, batch 2450, loss[loss=0.1993, simple_loss=0.2802, pruned_loss=0.05917, over 19624.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.283, pruned_loss=0.0599, over 3812408.27 frames. ], batch size: 50, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:50:54,660 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193646.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:51:39,135 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.882e+02 5.957e+02 8.043e+02 1.393e+03, threshold=1.191e+03, percent-clipped=5.0 +2023-04-03 13:51:39,619 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2347, 2.1688, 2.1032, 1.9887, 1.7565, 1.8713, 0.6729, 1.2070], + device='cuda:3'), covar=tensor([0.0707, 0.0655, 0.0481, 0.0775, 0.1208, 0.0927, 0.1435, 0.1190], + device='cuda:3'), in_proj_covar=tensor([0.0370, 0.0368, 0.0371, 0.0395, 0.0475, 0.0402, 0.0348, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 13:51:41,321 INFO [train.py:903] (3/4) Epoch 29, batch 2500, loss[loss=0.2114, simple_loss=0.2795, pruned_loss=0.07164, over 19801.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2824, pruned_loss=0.05946, over 3805665.17 frames. ], batch size: 49, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:51:45,911 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193688.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:52:40,704 INFO [train.py:903] (3/4) Epoch 29, batch 2550, loss[loss=0.1714, simple_loss=0.2514, pruned_loss=0.04568, over 17750.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2831, pruned_loss=0.06014, over 3807881.24 frames. ], batch size: 39, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:53:15,362 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193761.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:30,078 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193774.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:35,078 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 13:53:41,002 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 5.186e+02 6.141e+02 8.140e+02 1.584e+03, threshold=1.228e+03, percent-clipped=10.0 +2023-04-03 13:53:41,172 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193783.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:53:42,128 INFO [train.py:903] (3/4) Epoch 29, batch 2600, loss[loss=0.2027, simple_loss=0.2741, pruned_loss=0.06568, over 19401.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2835, pruned_loss=0.06013, over 3815412.72 frames. ], batch size: 47, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:02,298 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193799.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:54:22,070 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193816.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:54:39,253 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8020, 4.3936, 2.7365, 3.8500, 0.9167, 4.3898, 4.2238, 4.3323], + device='cuda:3'), covar=tensor([0.0604, 0.0975, 0.2006, 0.0881, 0.4188, 0.0625, 0.0967, 0.1178], + device='cuda:3'), in_proj_covar=tensor([0.0532, 0.0430, 0.0520, 0.0358, 0.0412, 0.0457, 0.0453, 0.0485], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:54:44,187 INFO [train.py:903] (3/4) Epoch 29, batch 2650, loss[loss=0.2467, simple_loss=0.3177, pruned_loss=0.08785, over 17676.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2844, pruned_loss=0.06043, over 3798157.18 frames. ], batch size: 101, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:54:57,015 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7478, 1.7684, 1.7681, 1.5428, 1.4953, 1.5286, 0.2981, 0.7114], + device='cuda:3'), covar=tensor([0.0768, 0.0727, 0.0452, 0.0689, 0.1357, 0.0823, 0.1434, 0.1250], + device='cuda:3'), in_proj_covar=tensor([0.0369, 0.0366, 0.0370, 0.0395, 0.0474, 0.0400, 0.0348, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 13:54:59,846 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 13:55:43,537 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 4.600e+02 5.538e+02 7.187e+02 1.315e+03, threshold=1.108e+03, percent-clipped=1.0 +2023-04-03 13:55:44,725 INFO [train.py:903] (3/4) Epoch 29, batch 2700, loss[loss=0.2178, simple_loss=0.2957, pruned_loss=0.06996, over 12911.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2835, pruned_loss=0.05972, over 3793540.50 frames. ], batch size: 135, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:56:00,585 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193898.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:05,931 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=193902.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:56:42,165 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=193931.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 13:56:44,218 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.4810, 5.0741, 3.1529, 4.4256, 1.5858, 5.0883, 4.9108, 5.0439], + device='cuda:3'), covar=tensor([0.0401, 0.0760, 0.1718, 0.0737, 0.3451, 0.0568, 0.0848, 0.1137], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0431, 0.0520, 0.0358, 0.0412, 0.0457, 0.0453, 0.0485], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:56:45,191 INFO [train.py:903] (3/4) Epoch 29, batch 2750, loss[loss=0.1845, simple_loss=0.2716, pruned_loss=0.04869, over 17362.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2834, pruned_loss=0.05966, over 3799693.55 frames. ], batch size: 101, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:56:58,025 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=193944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:27,813 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=193969.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:36,672 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=193977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:57:44,123 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 4.680e+02 5.858e+02 7.532e+02 1.982e+03, threshold=1.172e+03, percent-clipped=4.0 +2023-04-03 13:57:45,310 INFO [train.py:903] (3/4) Epoch 29, batch 2800, loss[loss=0.2025, simple_loss=0.2838, pruned_loss=0.06066, over 19340.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06035, over 3790889.86 frames. ], batch size: 66, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:57:55,488 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8440, 1.3472, 1.0814, 1.0196, 1.1698, 1.0286, 0.9063, 1.2411], + device='cuda:3'), covar=tensor([0.0759, 0.0913, 0.1224, 0.0886, 0.0624, 0.1409, 0.0685, 0.0563], + device='cuda:3'), in_proj_covar=tensor([0.0301, 0.0318, 0.0341, 0.0272, 0.0252, 0.0344, 0.0291, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 13:58:26,695 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194017.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:58:48,020 INFO [train.py:903] (3/4) Epoch 29, batch 2850, loss[loss=0.2074, simple_loss=0.2762, pruned_loss=0.06931, over 17749.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2836, pruned_loss=0.06022, over 3792520.81 frames. ], batch size: 39, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:58:58,385 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194042.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:45,136 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 13:59:47,410 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.652e+02 5.661e+02 7.187e+02 2.087e+03, threshold=1.132e+03, percent-clipped=4.0 +2023-04-03 13:59:48,552 INFO [train.py:903] (3/4) Epoch 29, batch 2900, loss[loss=0.196, simple_loss=0.2907, pruned_loss=0.0506, over 19684.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.283, pruned_loss=0.06029, over 3793692.67 frames. ], batch size: 59, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 13:59:55,306 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194090.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 13:59:57,653 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:00:47,881 INFO [train.py:903] (3/4) Epoch 29, batch 2950, loss[loss=0.1966, simple_loss=0.288, pruned_loss=0.05263, over 19543.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2832, pruned_loss=0.06071, over 3799059.89 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:13,365 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194154.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:42,969 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194179.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:01:46,911 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.793e+02 4.745e+02 5.839e+02 7.587e+02 1.918e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-04-03 14:01:48,097 INFO [train.py:903] (3/4) Epoch 29, batch 3000, loss[loss=0.196, simple_loss=0.2832, pruned_loss=0.05438, over 19794.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2815, pruned_loss=0.05971, over 3802800.96 frames. ], batch size: 56, lr: 2.83e-03, grad_scale: 8.0 +2023-04-03 14:01:48,097 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 14:02:01,076 INFO [train.py:937] (3/4) Epoch 29, validation: loss=0.1668, simple_loss=0.2661, pruned_loss=0.03375, over 944034.00 frames. +2023-04-03 14:02:01,077 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 14:02:02,348 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 14:02:05,062 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194187.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:02:36,777 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194212.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:03:01,745 INFO [train.py:903] (3/4) Epoch 29, batch 3050, loss[loss=0.2147, simple_loss=0.2998, pruned_loss=0.0648, over 18781.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2816, pruned_loss=0.05901, over 3814451.79 frames. ], batch size: 74, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:03:14,228 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.78 vs. limit=5.0 +2023-04-03 14:03:17,761 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194246.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:03:45,897 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.2833, 2.9813, 2.5205, 2.5738, 2.2203, 2.5694, 1.0884, 2.1859], + device='cuda:3'), covar=tensor([0.0711, 0.0598, 0.0622, 0.1020, 0.1126, 0.1172, 0.1466, 0.1116], + device='cuda:3'), in_proj_covar=tensor([0.0369, 0.0366, 0.0370, 0.0394, 0.0475, 0.0401, 0.0349, 0.0350], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 14:03:54,724 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2812, 2.3215, 2.6135, 2.9812, 2.2919, 2.8407, 2.6022, 2.4516], + device='cuda:3'), covar=tensor([0.4475, 0.4558, 0.2015, 0.2849, 0.4945, 0.2512, 0.4969, 0.3391], + device='cuda:3'), in_proj_covar=tensor([0.0943, 0.1021, 0.0747, 0.0955, 0.0919, 0.0858, 0.0863, 0.0811], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:04:02,892 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 5.546e+02 6.449e+02 7.988e+02 2.570e+03, threshold=1.290e+03, percent-clipped=8.0 +2023-04-03 14:04:03,997 INFO [train.py:903] (3/4) Epoch 29, batch 3100, loss[loss=0.2319, simple_loss=0.3014, pruned_loss=0.08124, over 13186.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05931, over 3797384.10 frames. ], batch size: 136, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:04,157 INFO [train.py:903] (3/4) Epoch 29, batch 3150, loss[loss=0.2868, simple_loss=0.355, pruned_loss=0.1093, over 19364.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2838, pruned_loss=0.06034, over 3796579.38 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:05:06,674 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:19,984 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:27,478 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 14:05:36,271 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194361.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:41,708 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1939, 1.3331, 1.7448, 0.9715, 2.4166, 3.1185, 2.8032, 3.2625], + device='cuda:3'), covar=tensor([0.1546, 0.3833, 0.3354, 0.2793, 0.0607, 0.0227, 0.0254, 0.0335], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0333, 0.0366, 0.0273, 0.0256, 0.0198, 0.0220, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 14:05:49,289 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194371.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:51,756 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:05:59,649 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194380.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:06:02,722 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.204e+02 4.902e+02 5.790e+02 7.151e+02 3.080e+03, threshold=1.158e+03, percent-clipped=4.0 +2023-04-03 14:06:03,888 INFO [train.py:903] (3/4) Epoch 29, batch 3200, loss[loss=0.2284, simple_loss=0.3045, pruned_loss=0.07613, over 18047.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2835, pruned_loss=0.06052, over 3801283.43 frames. ], batch size: 83, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:06:49,303 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4178, 2.1638, 1.6596, 1.4759, 2.0222, 1.3979, 1.3392, 1.9215], + device='cuda:3'), covar=tensor([0.1047, 0.0788, 0.1041, 0.0929, 0.0575, 0.1322, 0.0726, 0.0472], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0320, 0.0343, 0.0273, 0.0253, 0.0346, 0.0291, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:07:04,918 INFO [train.py:903] (3/4) Epoch 29, batch 3250, loss[loss=0.1783, simple_loss=0.2659, pruned_loss=0.04532, over 19600.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2831, pruned_loss=0.06003, over 3806350.62 frames. ], batch size: 50, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:07:05,062 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194434.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:07:23,495 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3720, 1.4932, 1.7962, 1.6062, 2.5054, 2.0348, 2.7041, 1.1822], + device='cuda:3'), covar=tensor([0.2688, 0.4252, 0.2655, 0.2162, 0.1497, 0.2424, 0.1275, 0.4901], + device='cuda:3'), in_proj_covar=tensor([0.0557, 0.0675, 0.0763, 0.0511, 0.0638, 0.0549, 0.0672, 0.0576], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:08:04,290 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.916e+02 6.594e+02 9.076e+02 2.390e+03, threshold=1.319e+03, percent-clipped=9.0 +2023-04-03 14:08:05,462 INFO [train.py:903] (3/4) Epoch 29, batch 3300, loss[loss=0.1475, simple_loss=0.2207, pruned_loss=0.03717, over 19040.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.283, pruned_loss=0.06003, over 3820164.43 frames. ], batch size: 42, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:08:09,431 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 14:09:07,373 INFO [train.py:903] (3/4) Epoch 29, batch 3350, loss[loss=0.2125, simple_loss=0.294, pruned_loss=0.06546, over 19661.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2832, pruned_loss=0.06039, over 3805816.33 frames. ], batch size: 55, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:09:24,564 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194549.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:10:06,127 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.313e+02 5.166e+02 6.518e+02 8.363e+02 1.379e+03, threshold=1.304e+03, percent-clipped=1.0 +2023-04-03 14:10:07,296 INFO [train.py:903] (3/4) Epoch 29, batch 3400, loss[loss=0.2064, simple_loss=0.2957, pruned_loss=0.0586, over 19304.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2836, pruned_loss=0.06027, over 3816145.54 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:10:48,330 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194617.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:07,050 INFO [train.py:903] (3/4) Epoch 29, batch 3450, loss[loss=0.1972, simple_loss=0.2868, pruned_loss=0.05386, over 19535.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2836, pruned_loss=0.06029, over 3809642.23 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:11:09,272 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 14:11:17,047 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:11:29,993 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194652.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:02,512 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194680.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:05,594 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.890e+02 4.627e+02 5.712e+02 7.363e+02 1.612e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-04-03 14:12:06,669 INFO [train.py:903] (3/4) Epoch 29, batch 3500, loss[loss=0.1958, simple_loss=0.2766, pruned_loss=0.05754, over 19724.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2824, pruned_loss=0.06001, over 3815046.21 frames. ], batch size: 51, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:12:43,655 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194715.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:12:54,394 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194724.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:13:06,886 INFO [train.py:903] (3/4) Epoch 29, batch 3550, loss[loss=0.2501, simple_loss=0.3236, pruned_loss=0.08825, over 18141.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2819, pruned_loss=0.05936, over 3819831.40 frames. ], batch size: 83, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:14:06,358 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.806e+02 5.826e+02 6.989e+02 1.690e+03, threshold=1.165e+03, percent-clipped=2.0 +2023-04-03 14:14:07,571 INFO [train.py:903] (3/4) Epoch 29, batch 3600, loss[loss=0.2148, simple_loss=0.2964, pruned_loss=0.06662, over 18388.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2813, pruned_loss=0.05887, over 3813850.18 frames. ], batch size: 84, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:14:20,128 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194795.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:14:20,699 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-03 14:14:30,671 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2225, 1.1797, 1.2224, 1.4300, 1.0703, 1.2250, 1.2894, 1.2369], + device='cuda:3'), covar=tensor([0.0967, 0.1001, 0.1123, 0.0655, 0.0875, 0.0918, 0.0838, 0.0852], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0229, 0.0240, 0.0225, 0.0214, 0.0188, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 14:14:33,723 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=194805.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,762 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:02,804 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=194830.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:15:06,687 INFO [train.py:903] (3/4) Epoch 29, batch 3650, loss[loss=0.2401, simple_loss=0.3136, pruned_loss=0.08326, over 18796.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2822, pruned_loss=0.05941, over 3818071.05 frames. ], batch size: 74, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:15:12,779 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=194839.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:15:45,396 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=194865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:16:05,568 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.663e+02 5.135e+02 6.261e+02 7.520e+02 2.080e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-04-03 14:16:06,486 INFO [train.py:903] (3/4) Epoch 29, batch 3700, loss[loss=0.2306, simple_loss=0.3095, pruned_loss=0.07582, over 19661.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05986, over 3823831.09 frames. ], batch size: 58, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:16:20,683 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5835, 1.2615, 1.4957, 1.2589, 2.2740, 1.0908, 2.1192, 2.5545], + device='cuda:3'), covar=tensor([0.0722, 0.2933, 0.2880, 0.1818, 0.0835, 0.2201, 0.1155, 0.0435], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0382, 0.0402, 0.0356, 0.0387, 0.0361, 0.0400, 0.0421], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:17:08,165 INFO [train.py:903] (3/4) Epoch 29, batch 3750, loss[loss=0.1857, simple_loss=0.274, pruned_loss=0.04871, over 18800.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05947, over 3822300.09 frames. ], batch size: 74, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:06,318 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.993e+02 4.741e+02 6.225e+02 7.602e+02 2.236e+03, threshold=1.245e+03, percent-clipped=10.0 +2023-04-03 14:18:07,441 INFO [train.py:903] (3/4) Epoch 29, batch 3800, loss[loss=0.2268, simple_loss=0.3075, pruned_loss=0.07311, over 17447.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2832, pruned_loss=0.06022, over 3823892.80 frames. ], batch size: 101, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:18:21,207 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=194996.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:18:34,674 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 14:19:01,234 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195029.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:06,523 INFO [train.py:903] (3/4) Epoch 29, batch 3850, loss[loss=0.1996, simple_loss=0.2635, pruned_loss=0.06781, over 19778.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.284, pruned_loss=0.0604, over 3819533.96 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:19:21,441 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-03 14:19:27,256 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195051.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:19:41,455 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195062.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:19:45,042 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2575, 1.3117, 1.2972, 1.1236, 1.0841, 1.1333, 0.1052, 0.4173], + device='cuda:3'), covar=tensor([0.0826, 0.0754, 0.0514, 0.0637, 0.1403, 0.0726, 0.1441, 0.1215], + device='cuda:3'), in_proj_covar=tensor([0.0371, 0.0369, 0.0374, 0.0398, 0.0478, 0.0403, 0.0350, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 14:19:56,964 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195076.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:06,401 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.743e+02 5.822e+02 7.676e+02 1.767e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-04-03 14:20:06,418 INFO [train.py:903] (3/4) Epoch 29, batch 3900, loss[loss=0.2027, simple_loss=0.2959, pruned_loss=0.0547, over 19711.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06011, over 3824762.76 frames. ], batch size: 59, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:20:08,924 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195086.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:13,202 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:21,710 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195095.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:20:39,828 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:39,865 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195111.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:20:50,176 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195120.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:21:07,592 INFO [train.py:903] (3/4) Epoch 29, batch 3950, loss[loss=0.2177, simple_loss=0.2983, pruned_loss=0.06849, over 19525.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05938, over 3824424.37 frames. ], batch size: 64, lr: 2.82e-03, grad_scale: 4.0 +2023-04-03 14:21:12,683 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 14:21:20,032 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-03 14:22:08,284 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 4.632e+02 5.828e+02 7.591e+02 1.503e+03, threshold=1.166e+03, percent-clipped=4.0 +2023-04-03 14:22:08,302 INFO [train.py:903] (3/4) Epoch 29, batch 4000, loss[loss=0.1667, simple_loss=0.2533, pruned_loss=0.04009, over 19664.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2828, pruned_loss=0.05953, over 3820431.35 frames. ], batch size: 53, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:22:10,980 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9071, 1.1014, 1.4935, 0.8148, 2.0671, 2.4153, 2.2322, 2.7516], + device='cuda:3'), covar=tensor([0.1826, 0.5245, 0.4538, 0.2863, 0.0742, 0.0360, 0.0406, 0.0424], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0334, 0.0367, 0.0273, 0.0258, 0.0198, 0.0221, 0.0278], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 14:22:38,726 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:22:38,979 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9648, 1.7115, 1.6414, 1.9464, 1.7014, 1.6390, 1.5668, 1.8390], + device='cuda:3'), covar=tensor([0.1133, 0.1533, 0.1592, 0.1050, 0.1352, 0.0625, 0.1583, 0.0817], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0359, 0.0318, 0.0257, 0.0307, 0.0258, 0.0322, 0.0260], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 14:22:52,716 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 14:23:07,865 INFO [train.py:903] (3/4) Epoch 29, batch 4050, loss[loss=0.2009, simple_loss=0.2808, pruned_loss=0.06043, over 19622.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2841, pruned_loss=0.0606, over 3815011.63 frames. ], batch size: 50, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:24:08,397 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.197e+02 5.118e+02 6.147e+02 8.377e+02 1.783e+03, threshold=1.229e+03, percent-clipped=5.0 +2023-04-03 14:24:08,416 INFO [train.py:903] (3/4) Epoch 29, batch 4100, loss[loss=0.2265, simple_loss=0.3043, pruned_loss=0.07437, over 19805.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.284, pruned_loss=0.06021, over 3811679.76 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:24:41,892 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 14:24:45,299 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3702, 1.3984, 1.7269, 1.6824, 2.3441, 2.0367, 2.3374, 0.9915], + device='cuda:3'), covar=tensor([0.2986, 0.5076, 0.3186, 0.2328, 0.1847, 0.2860, 0.2041, 0.5526], + device='cuda:3'), in_proj_covar=tensor([0.0560, 0.0678, 0.0769, 0.0515, 0.0641, 0.0552, 0.0677, 0.0579], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:24:57,114 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:08,843 INFO [train.py:903] (3/4) Epoch 29, batch 4150, loss[loss=0.1916, simple_loss=0.2841, pruned_loss=0.04958, over 19349.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2837, pruned_loss=0.06002, over 3821802.78 frames. ], batch size: 70, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:25:31,184 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0708, 1.9843, 1.7816, 2.1493, 1.7874, 1.7626, 1.7518, 2.0093], + device='cuda:3'), covar=tensor([0.1150, 0.1476, 0.1560, 0.1079, 0.1457, 0.0615, 0.1476, 0.0771], + device='cuda:3'), in_proj_covar=tensor([0.0278, 0.0358, 0.0317, 0.0256, 0.0307, 0.0257, 0.0322, 0.0259], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:25:49,118 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195367.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:25:56,332 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:10,052 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.652e+02 4.925e+02 6.547e+02 8.865e+02 3.200e+03, threshold=1.309e+03, percent-clipped=9.0 +2023-04-03 14:26:10,070 INFO [train.py:903] (3/4) Epoch 29, batch 4200, loss[loss=0.1998, simple_loss=0.2841, pruned_loss=0.05774, over 17218.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2837, pruned_loss=0.0605, over 3826263.12 frames. ], batch size: 101, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:26:13,437 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 14:26:19,382 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195392.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:26:32,833 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0763, 2.0553, 2.2527, 2.1663, 2.9435, 2.5988, 2.9902, 2.0726], + device='cuda:3'), covar=tensor([0.2043, 0.3575, 0.2296, 0.1645, 0.1330, 0.1939, 0.1353, 0.4024], + device='cuda:3'), in_proj_covar=tensor([0.0559, 0.0677, 0.0766, 0.0514, 0.0639, 0.0551, 0.0675, 0.0578], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:26:35,645 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195406.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 14:26:40,932 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=195410.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:07,662 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:27:08,636 INFO [train.py:903] (3/4) Epoch 29, batch 4250, loss[loss=0.1676, simple_loss=0.2432, pruned_loss=0.04607, over 19376.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2851, pruned_loss=0.06141, over 3810085.33 frames. ], batch size: 47, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:27:22,566 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 14:27:32,633 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 14:28:07,588 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5271, 2.2963, 1.6567, 1.5438, 2.0956, 1.3613, 1.3712, 1.9878], + device='cuda:3'), covar=tensor([0.1293, 0.0913, 0.1197, 0.0969, 0.0660, 0.1438, 0.0926, 0.0605], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0323, 0.0345, 0.0274, 0.0255, 0.0349, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:28:08,329 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 4.660e+02 5.852e+02 7.750e+02 1.648e+03, threshold=1.170e+03, percent-clipped=3.0 +2023-04-03 14:28:08,347 INFO [train.py:903] (3/4) Epoch 29, batch 4300, loss[loss=0.1845, simple_loss=0.2787, pruned_loss=0.04518, over 19524.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2837, pruned_loss=0.06034, over 3817288.56 frames. ], batch size: 56, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:28:14,173 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195488.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:28:32,657 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3702, 3.8728, 3.9752, 3.9868, 1.6256, 3.7893, 3.3203, 3.7486], + device='cuda:3'), covar=tensor([0.1686, 0.0907, 0.0709, 0.0780, 0.5882, 0.1141, 0.0735, 0.1158], + device='cuda:3'), in_proj_covar=tensor([0.0829, 0.0796, 0.1007, 0.0881, 0.0874, 0.0772, 0.0596, 0.0936], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 14:28:54,514 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195521.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:29:01,491 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 14:29:09,910 INFO [train.py:903] (3/4) Epoch 29, batch 4350, loss[loss=0.1709, simple_loss=0.2578, pruned_loss=0.04204, over 19783.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2833, pruned_loss=0.06015, over 3817697.76 frames. ], batch size: 49, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:29:28,193 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:05,596 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:10,384 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.562e+02 4.994e+02 6.103e+02 7.630e+02 1.747e+03, threshold=1.221e+03, percent-clipped=1.0 +2023-04-03 14:30:10,402 INFO [train.py:903] (3/4) Epoch 29, batch 4400, loss[loss=0.26, simple_loss=0.3317, pruned_loss=0.09418, over 19321.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2826, pruned_loss=0.06025, over 3819801.44 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 8.0 +2023-04-03 14:30:24,505 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5709, 2.3536, 1.7117, 1.5541, 2.1760, 1.4437, 1.4014, 2.0219], + device='cuda:3'), covar=tensor([0.1155, 0.0790, 0.1152, 0.0940, 0.0608, 0.1375, 0.0905, 0.0547], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0321, 0.0343, 0.0273, 0.0254, 0.0347, 0.0291, 0.0275], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:30:29,900 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 14:30:35,588 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:30:38,553 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 14:31:09,646 INFO [train.py:903] (3/4) Epoch 29, batch 4450, loss[loss=0.1852, simple_loss=0.2593, pruned_loss=0.05558, over 19376.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2836, pruned_loss=0.06057, over 3833300.46 frames. ], batch size: 47, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:32:08,773 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.083e+02 5.988e+02 7.824e+02 1.664e+03, threshold=1.198e+03, percent-clipped=3.0 +2023-04-03 14:32:08,796 INFO [train.py:903] (3/4) Epoch 29, batch 4500, loss[loss=0.241, simple_loss=0.3126, pruned_loss=0.08469, over 19748.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2833, pruned_loss=0.06072, over 3826540.58 frames. ], batch size: 63, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:10,071 INFO [train.py:903] (3/4) Epoch 29, batch 4550, loss[loss=0.219, simple_loss=0.3096, pruned_loss=0.06415, over 19528.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.06127, over 3821372.05 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:33:15,624 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 14:33:21,610 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195744.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:33,977 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=195754.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:33:39,344 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 14:33:51,778 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195769.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:34:01,505 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195777.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:34:08,707 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.333e+02 5.047e+02 5.905e+02 8.200e+02 1.793e+03, threshold=1.181e+03, percent-clipped=7.0 +2023-04-03 14:34:08,728 INFO [train.py:903] (3/4) Epoch 29, batch 4600, loss[loss=0.2065, simple_loss=0.2948, pruned_loss=0.0591, over 19368.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2844, pruned_loss=0.06173, over 3826262.45 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:34:31,284 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195802.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 14:34:33,627 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=195804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:05,200 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=195829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:35:10,606 INFO [train.py:903] (3/4) Epoch 29, batch 4650, loss[loss=0.1853, simple_loss=0.2729, pruned_loss=0.04885, over 19534.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2852, pruned_loss=0.06202, over 3811433.03 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:35:22,177 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 14:35:33,379 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 14:35:38,583 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6526, 1.4570, 1.5641, 2.2105, 1.6576, 1.8224, 1.7961, 1.6240], + device='cuda:3'), covar=tensor([0.0973, 0.1114, 0.1099, 0.0735, 0.1026, 0.0915, 0.1003, 0.0878], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0225, 0.0229, 0.0241, 0.0227, 0.0214, 0.0189, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 14:35:54,064 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=195869.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:36:10,282 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.073e+02 5.115e+02 6.253e+02 8.443e+02 2.371e+03, threshold=1.251e+03, percent-clipped=7.0 +2023-04-03 14:36:10,300 INFO [train.py:903] (3/4) Epoch 29, batch 4700, loss[loss=0.2269, simple_loss=0.3013, pruned_loss=0.0763, over 13102.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.06161, over 3811455.01 frames. ], batch size: 136, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:36:29,177 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 14:36:33,172 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.08 vs. limit=5.0 +2023-04-03 14:36:48,437 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-03 14:37:03,934 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.30 vs. limit=5.0 +2023-04-03 14:37:11,423 INFO [train.py:903] (3/4) Epoch 29, batch 4750, loss[loss=0.1624, simple_loss=0.2361, pruned_loss=0.04437, over 19746.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06064, over 3814499.31 frames. ], batch size: 46, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:37:53,318 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3217, 1.8634, 1.5579, 1.2232, 1.7572, 1.2108, 1.2941, 1.7453], + device='cuda:3'), covar=tensor([0.1006, 0.0940, 0.0981, 0.0999, 0.0614, 0.1303, 0.0766, 0.0484], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0321, 0.0343, 0.0273, 0.0254, 0.0347, 0.0291, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:38:11,673 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.980e+02 6.350e+02 8.555e+02 2.103e+03, threshold=1.270e+03, percent-clipped=3.0 +2023-04-03 14:38:11,690 INFO [train.py:903] (3/4) Epoch 29, batch 4800, loss[loss=0.2005, simple_loss=0.2913, pruned_loss=0.05483, over 19355.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2826, pruned_loss=0.05985, over 3826185.57 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:39:14,512 INFO [train.py:903] (3/4) Epoch 29, batch 4850, loss[loss=0.2174, simple_loss=0.2983, pruned_loss=0.06818, over 19337.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.283, pruned_loss=0.0599, over 3836633.54 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:39:37,049 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 14:39:55,387 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 14:40:01,753 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 14:40:02,681 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 14:40:11,426 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 14:40:13,829 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.943e+02 4.641e+02 5.767e+02 7.663e+02 1.512e+03, threshold=1.153e+03, percent-clipped=2.0 +2023-04-03 14:40:13,852 INFO [train.py:903] (3/4) Epoch 29, batch 4900, loss[loss=0.1683, simple_loss=0.2537, pruned_loss=0.04143, over 19569.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2835, pruned_loss=0.06032, over 3820142.91 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:40:32,816 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 14:41:03,530 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=196125.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:14,060 INFO [train.py:903] (3/4) Epoch 29, batch 4950, loss[loss=0.2234, simple_loss=0.307, pruned_loss=0.06985, over 19664.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2841, pruned_loss=0.06023, over 3824788.72 frames. ], batch size: 60, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:41:18,075 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.4360, 2.5273, 2.7507, 3.2259, 2.5259, 3.1612, 2.6887, 2.4324], + device='cuda:3'), covar=tensor([0.4287, 0.4249, 0.1976, 0.2609, 0.4237, 0.2192, 0.5254, 0.3596], + device='cuda:3'), in_proj_covar=tensor([0.0947, 0.1029, 0.0751, 0.0962, 0.0927, 0.0865, 0.0866, 0.0815], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:41:31,258 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 14:41:33,821 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=196150.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:41:54,186 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 14:42:14,323 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.427e+02 4.864e+02 6.197e+02 7.725e+02 1.739e+03, threshold=1.239e+03, percent-clipped=10.0 +2023-04-03 14:42:14,341 INFO [train.py:903] (3/4) Epoch 29, batch 5000, loss[loss=0.2247, simple_loss=0.2941, pruned_loss=0.07761, over 19676.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2833, pruned_loss=0.06007, over 3825081.52 frames. ], batch size: 53, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:42:23,520 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 14:42:33,258 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9334, 2.0243, 2.2471, 2.4216, 1.8537, 2.3743, 2.1960, 2.1348], + device='cuda:3'), covar=tensor([0.4264, 0.4127, 0.2102, 0.2659, 0.4334, 0.2364, 0.5265, 0.3478], + device='cuda:3'), in_proj_covar=tensor([0.0945, 0.1026, 0.0749, 0.0959, 0.0924, 0.0863, 0.0864, 0.0813], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:42:35,050 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 14:43:14,851 INFO [train.py:903] (3/4) Epoch 29, batch 5050, loss[loss=0.2251, simple_loss=0.3003, pruned_loss=0.07499, over 13645.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2829, pruned_loss=0.05987, over 3824598.20 frames. ], batch size: 138, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:43:49,695 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 14:44:15,610 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 4.774e+02 5.782e+02 7.176e+02 1.455e+03, threshold=1.156e+03, percent-clipped=5.0 +2023-04-03 14:44:15,632 INFO [train.py:903] (3/4) Epoch 29, batch 5100, loss[loss=0.1819, simple_loss=0.2682, pruned_loss=0.04775, over 19675.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2826, pruned_loss=0.05935, over 3829392.26 frames. ], batch size: 53, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:44:24,668 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 14:44:27,980 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 14:44:32,507 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 14:45:16,435 INFO [train.py:903] (3/4) Epoch 29, batch 5150, loss[loss=0.1846, simple_loss=0.2687, pruned_loss=0.0503, over 19421.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2833, pruned_loss=0.05959, over 3829034.51 frames. ], batch size: 48, lr: 2.81e-03, grad_scale: 4.0 +2023-04-03 14:45:23,033 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196339.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:45:26,218 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 14:45:59,978 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 14:46:06,892 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4641, 1.5147, 1.7519, 1.6936, 2.4620, 2.1616, 2.6583, 1.1336], + device='cuda:3'), covar=tensor([0.2612, 0.4622, 0.2881, 0.2100, 0.1605, 0.2349, 0.1491, 0.4866], + device='cuda:3'), in_proj_covar=tensor([0.0557, 0.0677, 0.0765, 0.0514, 0.0640, 0.0550, 0.0672, 0.0578], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:46:10,269 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:46:17,532 INFO [train.py:903] (3/4) Epoch 29, batch 5200, loss[loss=0.1897, simple_loss=0.2539, pruned_loss=0.06272, over 19726.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05937, over 3830081.35 frames. ], batch size: 46, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:46:18,463 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 5.187e+02 6.337e+02 8.295e+02 1.863e+03, threshold=1.267e+03, percent-clipped=5.0 +2023-04-03 14:46:28,788 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 14:47:12,670 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 14:47:19,220 INFO [train.py:903] (3/4) Epoch 29, batch 5250, loss[loss=0.1837, simple_loss=0.2698, pruned_loss=0.04881, over 19766.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2816, pruned_loss=0.05881, over 3837517.26 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:48:20,445 INFO [train.py:903] (3/4) Epoch 29, batch 5300, loss[loss=0.2041, simple_loss=0.3022, pruned_loss=0.05303, over 19533.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2829, pruned_loss=0.05922, over 3832252.78 frames. ], batch size: 56, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:48:21,523 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.666e+02 4.792e+02 5.709e+02 7.130e+02 1.478e+03, threshold=1.142e+03, percent-clipped=2.0 +2023-04-03 14:48:35,852 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 14:49:20,281 INFO [train.py:903] (3/4) Epoch 29, batch 5350, loss[loss=0.2021, simple_loss=0.285, pruned_loss=0.0596, over 19666.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2823, pruned_loss=0.05908, over 3837916.98 frames. ], batch size: 53, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:49:51,996 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 14:50:21,629 INFO [train.py:903] (3/4) Epoch 29, batch 5400, loss[loss=0.1832, simple_loss=0.2753, pruned_loss=0.04556, over 19527.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2828, pruned_loss=0.05949, over 3823483.11 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:50:22,761 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 4.834e+02 5.946e+02 7.840e+02 1.782e+03, threshold=1.189e+03, percent-clipped=5.0 +2023-04-03 14:50:36,285 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196596.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:51:22,486 INFO [train.py:903] (3/4) Epoch 29, batch 5450, loss[loss=0.1639, simple_loss=0.2379, pruned_loss=0.04496, over 19723.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2819, pruned_loss=0.05874, over 3826557.74 frames. ], batch size: 45, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:51:38,162 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8702, 1.2034, 1.5834, 0.5997, 1.9667, 2.4605, 2.1722, 2.6327], + device='cuda:3'), covar=tensor([0.1730, 0.4098, 0.3581, 0.3098, 0.0687, 0.0294, 0.0348, 0.0383], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0335, 0.0367, 0.0274, 0.0257, 0.0199, 0.0221, 0.0279], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 14:51:45,536 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7825, 4.2366, 4.4672, 4.4881, 1.5792, 4.2119, 3.6146, 4.2016], + device='cuda:3'), covar=tensor([0.1723, 0.0964, 0.0662, 0.0766, 0.6645, 0.1059, 0.0759, 0.1182], + device='cuda:3'), in_proj_covar=tensor([0.0830, 0.0803, 0.1014, 0.0887, 0.0880, 0.0775, 0.0598, 0.0939], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 14:52:21,558 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196683.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:52:22,556 INFO [train.py:903] (3/4) Epoch 29, batch 5500, loss[loss=0.1909, simple_loss=0.2681, pruned_loss=0.05685, over 19617.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2825, pruned_loss=0.0594, over 3813041.00 frames. ], batch size: 50, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:52:23,692 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.565e+02 4.767e+02 5.823e+02 7.066e+02 1.237e+03, threshold=1.165e+03, percent-clipped=1.0 +2023-04-03 14:52:46,218 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 14:53:08,294 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196722.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:53:22,918 INFO [train.py:903] (3/4) Epoch 29, batch 5550, loss[loss=0.1729, simple_loss=0.2528, pruned_loss=0.04645, over 19487.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2822, pruned_loss=0.05956, over 3825904.66 frames. ], batch size: 49, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:53:30,040 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 14:54:18,742 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 14:54:22,915 INFO [train.py:903] (3/4) Epoch 29, batch 5600, loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.04094, over 19528.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2819, pruned_loss=0.05934, over 3839982.98 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:54:24,078 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.937e+02 5.977e+02 7.468e+02 1.263e+03, threshold=1.195e+03, percent-clipped=3.0 +2023-04-03 14:54:36,515 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3133, 2.1707, 2.0572, 1.9362, 1.7283, 1.9050, 0.7323, 1.3392], + device='cuda:3'), covar=tensor([0.0637, 0.0637, 0.0549, 0.0899, 0.1251, 0.0982, 0.1458, 0.1124], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0366, 0.0371, 0.0395, 0.0476, 0.0403, 0.0350, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 14:54:39,801 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196797.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:54:40,968 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196798.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:55:24,436 INFO [train.py:903] (3/4) Epoch 29, batch 5650, loss[loss=0.207, simple_loss=0.2672, pruned_loss=0.07336, over 19309.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2811, pruned_loss=0.05898, over 3840770.25 frames. ], batch size: 44, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:55:28,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=196837.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:06,799 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9537, 2.0414, 2.2776, 2.5846, 1.9705, 2.4251, 2.2502, 2.1267], + device='cuda:3'), covar=tensor([0.4224, 0.4140, 0.1940, 0.2513, 0.4083, 0.2348, 0.5115, 0.3418], + device='cuda:3'), in_proj_covar=tensor([0.0946, 0.1026, 0.0751, 0.0958, 0.0926, 0.0865, 0.0866, 0.0814], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 14:56:10,126 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196872.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:56:10,956 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 14:56:24,237 INFO [train.py:903] (3/4) Epoch 29, batch 5700, loss[loss=0.1895, simple_loss=0.2793, pruned_loss=0.04981, over 19378.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05846, over 3837154.38 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:56:25,388 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.192e+02 5.159e+02 6.169e+02 7.777e+02 1.148e+03, threshold=1.234e+03, percent-clipped=0.0 +2023-04-03 14:56:28,884 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2527, 2.0261, 1.5731, 1.3614, 1.8225, 1.3005, 1.2400, 1.8577], + device='cuda:3'), covar=tensor([0.0943, 0.0786, 0.1019, 0.0869, 0.0572, 0.1257, 0.0736, 0.0440], + device='cuda:3'), in_proj_covar=tensor([0.0306, 0.0321, 0.0344, 0.0275, 0.0254, 0.0347, 0.0292, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 14:56:41,818 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=196899.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:57:04,568 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3011, 2.1876, 2.1004, 1.9276, 1.7786, 1.9030, 0.7553, 1.3055], + device='cuda:3'), covar=tensor([0.0750, 0.0723, 0.0526, 0.0968, 0.1260, 0.1108, 0.1522, 0.1226], + device='cuda:3'), in_proj_covar=tensor([0.0370, 0.0367, 0.0372, 0.0395, 0.0476, 0.0404, 0.0350, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 14:57:24,055 INFO [train.py:903] (3/4) Epoch 29, batch 5750, loss[loss=0.2273, simple_loss=0.3002, pruned_loss=0.07722, over 19857.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2812, pruned_loss=0.05897, over 3840262.97 frames. ], batch size: 52, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:57:25,244 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 14:57:30,928 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=196940.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:57:31,938 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 14:57:38,015 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 14:57:58,319 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.39 vs. limit=2.0 +2023-04-03 14:58:24,227 INFO [train.py:903] (3/4) Epoch 29, batch 5800, loss[loss=0.186, simple_loss=0.2584, pruned_loss=0.05682, over 19783.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2827, pruned_loss=0.05983, over 3811255.99 frames. ], batch size: 46, lr: 2.81e-03, grad_scale: 8.0 +2023-04-03 14:58:25,407 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.987e+02 6.141e+02 8.368e+02 1.662e+03, threshold=1.228e+03, percent-clipped=5.0 +2023-04-03 14:59:24,878 INFO [train.py:903] (3/4) Epoch 29, batch 5850, loss[loss=0.1632, simple_loss=0.2424, pruned_loss=0.04195, over 19766.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.05989, over 3817481.73 frames. ], batch size: 46, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 14:59:49,587 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197054.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 14:59:50,553 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197055.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:19,107 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197079.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:24,408 INFO [train.py:903] (3/4) Epoch 29, batch 5900, loss[loss=0.1798, simple_loss=0.2735, pruned_loss=0.04307, over 19665.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2833, pruned_loss=0.05991, over 3815888.11 frames. ], batch size: 60, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:00:25,528 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.889e+02 5.905e+02 7.501e+02 2.168e+03, threshold=1.181e+03, percent-clipped=6.0 +2023-04-03 15:00:26,682 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 15:00:35,852 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197093.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:00:46,555 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 15:01:06,443 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197118.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:24,667 INFO [train.py:903] (3/4) Epoch 29, batch 5950, loss[loss=0.1712, simple_loss=0.2511, pruned_loss=0.0457, over 19625.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2827, pruned_loss=0.05964, over 3832559.79 frames. ], batch size: 50, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:01:32,742 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197141.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:01:37,748 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-03 15:02:06,322 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.2295, 2.3021, 2.5064, 3.0088, 2.3967, 2.8678, 2.4726, 2.3304], + device='cuda:3'), covar=tensor([0.4358, 0.4160, 0.1977, 0.2672, 0.4528, 0.2348, 0.5058, 0.3414], + device='cuda:3'), in_proj_covar=tensor([0.0947, 0.1026, 0.0750, 0.0959, 0.0926, 0.0863, 0.0865, 0.0814], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 15:02:12,822 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1463, 3.5108, 2.1482, 1.8368, 3.2582, 1.7650, 1.5163, 2.6256], + device='cuda:3'), covar=tensor([0.1311, 0.0590, 0.0957, 0.1154, 0.0530, 0.1287, 0.1047, 0.0517], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0321, 0.0345, 0.0275, 0.0254, 0.0348, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:02:24,684 INFO [train.py:903] (3/4) Epoch 29, batch 6000, loss[loss=0.1927, simple_loss=0.2784, pruned_loss=0.05343, over 19774.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2823, pruned_loss=0.05933, over 3830594.50 frames. ], batch size: 56, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:02:24,685 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 15:02:43,144 INFO [train.py:937] (3/4) Epoch 29, validation: loss=0.167, simple_loss=0.2662, pruned_loss=0.03392, over 944034.00 frames. +2023-04-03 15:02:43,145 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 15:02:44,354 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.479e+02 4.704e+02 5.833e+02 7.372e+02 1.660e+03, threshold=1.167e+03, percent-clipped=5.0 +2023-04-03 15:03:12,927 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0356, 1.8610, 1.6884, 2.0296, 1.7539, 1.6930, 1.6933, 1.9448], + device='cuda:3'), covar=tensor([0.1150, 0.1527, 0.1582, 0.1112, 0.1501, 0.0653, 0.1613, 0.0793], + device='cuda:3'), in_proj_covar=tensor([0.0279, 0.0359, 0.0319, 0.0257, 0.0308, 0.0259, 0.0323, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 15:03:23,061 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:03:44,115 INFO [train.py:903] (3/4) Epoch 29, batch 6050, loss[loss=0.1738, simple_loss=0.2555, pruned_loss=0.04601, over 19739.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2818, pruned_loss=0.0591, over 3822549.43 frames. ], batch size: 51, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:03:44,608 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5202, 1.5981, 1.9284, 1.8148, 2.6218, 2.3549, 2.8378, 1.2184], + device='cuda:3'), covar=tensor([0.2580, 0.4536, 0.2831, 0.1978, 0.1734, 0.2214, 0.1633, 0.5014], + device='cuda:3'), in_proj_covar=tensor([0.0560, 0.0680, 0.0769, 0.0516, 0.0643, 0.0551, 0.0676, 0.0580], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 15:03:56,028 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:03:56,327 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7696, 1.8883, 2.1398, 2.2855, 1.7684, 2.2244, 2.1149, 1.9522], + device='cuda:3'), covar=tensor([0.4504, 0.4075, 0.2160, 0.2610, 0.4178, 0.2423, 0.5364, 0.3734], + device='cuda:3'), in_proj_covar=tensor([0.0947, 0.1026, 0.0750, 0.0959, 0.0926, 0.0864, 0.0866, 0.0815], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 15:04:11,916 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197256.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:04:45,072 INFO [train.py:903] (3/4) Epoch 29, batch 6100, loss[loss=0.1801, simple_loss=0.2469, pruned_loss=0.05663, over 19734.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2818, pruned_loss=0.05907, over 3822021.64 frames. ], batch size: 46, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:04:46,195 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.008e+02 5.045e+02 6.568e+02 8.070e+02 1.422e+03, threshold=1.314e+03, percent-clipped=5.0 +2023-04-03 15:05:17,481 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197311.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:42,397 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197331.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:05:45,467 INFO [train.py:903] (3/4) Epoch 29, batch 6150, loss[loss=0.2175, simple_loss=0.2965, pruned_loss=0.06929, over 17232.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2825, pruned_loss=0.05945, over 3818893.84 frames. ], batch size: 100, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:05:48,251 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197336.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:13,324 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 15:06:14,674 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197358.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:06:46,126 INFO [train.py:903] (3/4) Epoch 29, batch 6200, loss[loss=0.2071, simple_loss=0.2865, pruned_loss=0.06383, over 19780.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2834, pruned_loss=0.06021, over 3812503.57 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:06:47,112 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.150e+02 5.975e+02 7.505e+02 2.002e+03, threshold=1.195e+03, percent-clipped=6.0 +2023-04-03 15:07:32,512 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8860, 1.8677, 1.8689, 1.6854, 1.5188, 1.6375, 0.5171, 0.9706], + device='cuda:3'), covar=tensor([0.0763, 0.0711, 0.0497, 0.0860, 0.1361, 0.1020, 0.1495, 0.1247], + device='cuda:3'), in_proj_covar=tensor([0.0367, 0.0364, 0.0369, 0.0394, 0.0472, 0.0401, 0.0347, 0.0349], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 15:07:45,746 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197433.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:07:46,598 INFO [train.py:903] (3/4) Epoch 29, batch 6250, loss[loss=0.1701, simple_loss=0.2489, pruned_loss=0.04565, over 19473.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2818, pruned_loss=0.05969, over 3806796.06 frames. ], batch size: 49, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:08:18,023 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 15:08:48,228 INFO [train.py:903] (3/4) Epoch 29, batch 6300, loss[loss=0.2219, simple_loss=0.2997, pruned_loss=0.07206, over 18118.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2825, pruned_loss=0.05964, over 3810415.25 frames. ], batch size: 83, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:08:50,595 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 4.620e+02 5.575e+02 6.491e+02 1.508e+03, threshold=1.115e+03, percent-clipped=2.0 +2023-04-03 15:09:13,181 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-04-03 15:09:21,983 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197512.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:49,200 INFO [train.py:903] (3/4) Epoch 29, batch 6350, loss[loss=0.1943, simple_loss=0.2824, pruned_loss=0.05308, over 19544.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2814, pruned_loss=0.05846, over 3825537.97 frames. ], batch size: 61, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:09:49,488 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197534.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:09:52,893 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197537.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:23,567 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197563.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:10:49,102 INFO [train.py:903] (3/4) Epoch 29, batch 6400, loss[loss=0.2045, simple_loss=0.2906, pruned_loss=0.05919, over 19638.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2801, pruned_loss=0.05794, over 3827710.19 frames. ], batch size: 55, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:10:52,231 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.287e+02 5.708e+02 7.339e+02 1.464e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-04-03 15:10:53,764 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197587.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:23,622 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197612.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:25,880 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=197614.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:11:36,491 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.2268, 5.6782, 3.3242, 4.8906, 1.3354, 5.7778, 5.6172, 5.8116], + device='cuda:3'), covar=tensor([0.0316, 0.0732, 0.1666, 0.0730, 0.3806, 0.0531, 0.0807, 0.1010], + device='cuda:3'), in_proj_covar=tensor([0.0534, 0.0433, 0.0521, 0.0357, 0.0410, 0.0458, 0.0453, 0.0487], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:11:49,891 INFO [train.py:903] (3/4) Epoch 29, batch 6450, loss[loss=0.2183, simple_loss=0.299, pruned_loss=0.06878, over 18115.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2808, pruned_loss=0.05814, over 3812216.60 frames. ], batch size: 83, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:11:56,924 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=197639.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:12:13,560 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2647, 1.3161, 1.2824, 1.0752, 1.1220, 1.1495, 0.0762, 0.3784], + device='cuda:3'), covar=tensor([0.0758, 0.0696, 0.0505, 0.0660, 0.1344, 0.0706, 0.1474, 0.1231], + device='cuda:3'), in_proj_covar=tensor([0.0366, 0.0362, 0.0367, 0.0393, 0.0470, 0.0397, 0.0345, 0.0347], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 15:12:37,434 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 15:12:49,913 INFO [train.py:903] (3/4) Epoch 29, batch 6500, loss[loss=0.2125, simple_loss=0.2945, pruned_loss=0.06525, over 19524.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2819, pruned_loss=0.05881, over 3810530.21 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:12:52,112 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 4.970e+02 6.097e+02 8.194e+02 1.467e+03, threshold=1.219e+03, percent-clipped=8.0 +2023-04-03 15:12:58,634 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 15:13:50,301 INFO [train.py:903] (3/4) Epoch 29, batch 6550, loss[loss=0.1811, simple_loss=0.2565, pruned_loss=0.05286, over 19773.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2821, pruned_loss=0.05932, over 3814235.61 frames. ], batch size: 46, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:13:54,318 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-03 15:14:03,100 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.53 vs. limit=5.0 +2023-04-03 15:14:42,389 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197777.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:14:50,635 INFO [train.py:903] (3/4) Epoch 29, batch 6600, loss[loss=0.2202, simple_loss=0.2918, pruned_loss=0.07428, over 13317.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2831, pruned_loss=0.05947, over 3814375.10 frames. ], batch size: 135, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:14:54,078 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.141e+02 4.758e+02 5.997e+02 7.159e+02 2.116e+03, threshold=1.199e+03, percent-clipped=4.0 +2023-04-03 15:15:28,413 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.4630, 1.2618, 1.5143, 1.5824, 3.0820, 1.2159, 2.3214, 3.4462], + device='cuda:3'), covar=tensor([0.0566, 0.2972, 0.2952, 0.1896, 0.0681, 0.2478, 0.1354, 0.0286], + device='cuda:3'), in_proj_covar=tensor([0.0425, 0.0380, 0.0400, 0.0355, 0.0384, 0.0358, 0.0399, 0.0420], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:15:51,169 INFO [train.py:903] (3/4) Epoch 29, batch 6650, loss[loss=0.213, simple_loss=0.2947, pruned_loss=0.06563, over 19597.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2832, pruned_loss=0.05963, over 3820079.02 frames. ], batch size: 57, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:45,534 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197878.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:16:52,896 INFO [train.py:903] (3/4) Epoch 29, batch 6700, loss[loss=0.1811, simple_loss=0.2785, pruned_loss=0.04185, over 19718.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2827, pruned_loss=0.0595, over 3807457.68 frames. ], batch size: 59, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:16:56,373 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.106e+02 5.093e+02 5.944e+02 7.634e+02 1.783e+03, threshold=1.189e+03, percent-clipped=4.0 +2023-04-03 15:17:02,452 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197892.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:17:15,694 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7524, 1.5287, 1.5866, 2.2632, 1.7680, 1.8912, 1.9769, 1.7249], + device='cuda:3'), covar=tensor([0.0838, 0.0937, 0.1020, 0.0698, 0.0825, 0.0772, 0.0874, 0.0753], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0225, 0.0230, 0.0243, 0.0228, 0.0215, 0.0189, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 15:17:19,985 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=197907.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:23,552 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=197910.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:17:24,791 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8162, 1.2136, 0.9850, 0.9225, 1.0616, 0.9218, 0.8983, 1.1182], + device='cuda:3'), covar=tensor([0.0714, 0.0857, 0.1100, 0.0772, 0.0617, 0.1325, 0.0641, 0.0559], + device='cuda:3'), in_proj_covar=tensor([0.0306, 0.0320, 0.0344, 0.0276, 0.0254, 0.0346, 0.0291, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:17:50,661 INFO [train.py:903] (3/4) Epoch 29, batch 6750, loss[loss=0.1847, simple_loss=0.2709, pruned_loss=0.04925, over 19718.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2834, pruned_loss=0.05986, over 3818586.43 frames. ], batch size: 51, lr: 2.80e-03, grad_scale: 4.0 +2023-04-03 15:17:51,944 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.2733, 5.2961, 6.1165, 6.1404, 2.3214, 5.7320, 4.8569, 5.8159], + device='cuda:3'), covar=tensor([0.1722, 0.0777, 0.0649, 0.0599, 0.6124, 0.0839, 0.0646, 0.1209], + device='cuda:3'), in_proj_covar=tensor([0.0822, 0.0795, 0.1004, 0.0879, 0.0871, 0.0771, 0.0592, 0.0935], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 15:18:36,377 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3259, 2.0368, 1.6292, 1.4601, 1.8464, 1.4086, 1.3152, 1.8640], + device='cuda:3'), covar=tensor([0.1037, 0.0914, 0.1130, 0.0957, 0.0634, 0.1344, 0.0732, 0.0479], + device='cuda:3'), in_proj_covar=tensor([0.0306, 0.0321, 0.0345, 0.0276, 0.0254, 0.0347, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:18:45,938 INFO [train.py:903] (3/4) Epoch 29, batch 6800, loss[loss=0.1778, simple_loss=0.2587, pruned_loss=0.04849, over 19835.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2832, pruned_loss=0.0597, over 3826205.09 frames. ], batch size: 52, lr: 2.80e-03, grad_scale: 8.0 +2023-04-03 15:18:48,402 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8693, 4.4954, 2.8640, 3.9530, 1.2748, 4.3852, 4.2772, 4.3345], + device='cuda:3'), covar=tensor([0.0544, 0.0959, 0.1795, 0.0825, 0.3607, 0.0601, 0.0909, 0.0945], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0432, 0.0520, 0.0357, 0.0410, 0.0460, 0.0453, 0.0487], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:18:49,160 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.543e+02 4.853e+02 6.089e+02 7.661e+02 1.249e+03, threshold=1.218e+03, percent-clipped=1.0 +2023-04-03 15:18:55,186 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1135, 1.8850, 1.7879, 1.9816, 1.7981, 1.7518, 1.6277, 1.9435], + device='cuda:3'), covar=tensor([0.1068, 0.1506, 0.1516, 0.1183, 0.1502, 0.0613, 0.1574, 0.0800], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0360, 0.0320, 0.0259, 0.0310, 0.0259, 0.0322, 0.0264], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 15:18:56,346 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=197993.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:19:33,678 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9 from training. Duration: 27.47775 +2023-04-03 15:19:34,755 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9 from training. Duration: 26.6166875 +2023-04-03 15:19:37,012 INFO [train.py:903] (3/4) Epoch 30, batch 0, loss[loss=0.178, simple_loss=0.2585, pruned_loss=0.04876, over 19577.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2585, pruned_loss=0.04876, over 19577.00 frames. ], batch size: 52, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:19:37,012 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 15:19:49,305 INFO [train.py:937] (3/4) Epoch 30, validation: loss=0.167, simple_loss=0.2667, pruned_loss=0.03362, over 944034.00 frames. +2023-04-03 15:19:49,306 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 15:20:02,499 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198022.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:20:03,294 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425 from training. Duration: 25.775 +2023-04-03 15:20:35,582 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-03 15:20:51,414 INFO [train.py:903] (3/4) Epoch 30, batch 50, loss[loss=0.1998, simple_loss=0.2876, pruned_loss=0.05605, over 19538.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2862, pruned_loss=0.06047, over 866455.61 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:21:13,154 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-03 15:21:20,382 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.592e+02 4.959e+02 5.838e+02 7.711e+02 1.808e+03, threshold=1.168e+03, percent-clipped=5.0 +2023-04-03 15:21:26,258 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1_rvb from training. Duration: 27.0318125 +2023-04-03 15:21:32,472 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 15:21:36,598 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198099.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:21:52,087 INFO [train.py:903] (3/4) Epoch 30, batch 100, loss[loss=0.2192, simple_loss=0.3057, pruned_loss=0.06635, over 19355.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2843, pruned_loss=0.05986, over 1522514.60 frames. ], batch size: 70, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:22:04,478 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9 from training. Duration: 29.1166875 +2023-04-03 15:22:04,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0660, 5.1398, 5.9317, 5.9549, 2.1655, 5.6045, 4.6953, 5.5923], + device='cuda:3'), covar=tensor([0.1672, 0.0784, 0.0526, 0.0585, 0.6017, 0.0806, 0.0645, 0.1074], + device='cuda:3'), in_proj_covar=tensor([0.0823, 0.0797, 0.1003, 0.0881, 0.0872, 0.0769, 0.0592, 0.0934], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 15:22:31,191 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5180, 1.5430, 1.8246, 1.7841, 2.5922, 2.2101, 2.7081, 1.2726], + device='cuda:3'), covar=tensor([0.2740, 0.4733, 0.2982, 0.2128, 0.1621, 0.2448, 0.1551, 0.4965], + device='cuda:3'), in_proj_covar=tensor([0.0561, 0.0682, 0.0770, 0.0517, 0.0642, 0.0553, 0.0675, 0.0579], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 15:22:37,681 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198148.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:22:54,432 INFO [train.py:903] (3/4) Epoch 30, batch 150, loss[loss=0.2082, simple_loss=0.2908, pruned_loss=0.06275, over 19764.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2821, pruned_loss=0.05847, over 2038248.81 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:23:07,411 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198173.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:23:10,046 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-03 15:23:25,378 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.519e+02 4.863e+02 5.858e+02 7.546e+02 1.579e+03, threshold=1.172e+03, percent-clipped=2.0 +2023-04-03 15:23:52,150 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994 from training. Duration: 27.14 +2023-04-03 15:23:55,535 INFO [train.py:903] (3/4) Epoch 30, batch 200, loss[loss=0.1789, simple_loss=0.2523, pruned_loss=0.05276, over 19746.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2804, pruned_loss=0.05816, over 2427740.41 frames. ], batch size: 46, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:24:42,342 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198249.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:47,682 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198254.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:24:57,533 INFO [train.py:903] (3/4) Epoch 30, batch 250, loss[loss=0.1969, simple_loss=0.2777, pruned_loss=0.05808, over 19599.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2812, pruned_loss=0.05839, over 2736618.97 frames. ], batch size: 52, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:25:13,976 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198274.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:19,537 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198278.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:25:29,203 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.793e+02 5.720e+02 7.294e+02 1.524e+03, threshold=1.144e+03, percent-clipped=6.0 +2023-04-03 15:25:49,114 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198303.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:26:01,274 INFO [train.py:903] (3/4) Epoch 30, batch 300, loss[loss=0.2108, simple_loss=0.2945, pruned_loss=0.0636, over 17485.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2807, pruned_loss=0.05823, over 2980965.20 frames. ], batch size: 101, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:03,945 INFO [train.py:903] (3/4) Epoch 30, batch 350, loss[loss=0.1905, simple_loss=0.2801, pruned_loss=0.0504, over 19525.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2806, pruned_loss=0.05843, over 3177127.65 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:27:06,267 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 15:27:12,181 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198369.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:27:33,419 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.869e+02 4.876e+02 6.987e+02 8.997e+02 2.429e+03, threshold=1.397e+03, percent-clipped=9.0 +2023-04-03 15:28:04,751 INFO [train.py:903] (3/4) Epoch 30, batch 400, loss[loss=0.2077, simple_loss=0.2912, pruned_loss=0.06215, over 19497.00 frames. ], tot_loss[loss=0.198, simple_loss=0.28, pruned_loss=0.05807, over 3330793.63 frames. ], batch size: 64, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:28:44,692 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198443.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:29:06,534 INFO [train.py:903] (3/4) Epoch 30, batch 450, loss[loss=0.177, simple_loss=0.2593, pruned_loss=0.0474, over 19615.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.279, pruned_loss=0.05712, over 3454687.90 frames. ], batch size: 50, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:29:08,589 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-03 15:29:38,786 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.061e+02 5.102e+02 6.085e+02 7.779e+02 1.785e+03, threshold=1.217e+03, percent-clipped=4.0 +2023-04-03 15:29:40,844 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149 from training. Duration: 25.285 +2023-04-03 15:29:42,029 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9 from training. Duration: 25.3333125 +2023-04-03 15:29:54,086 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0476, 2.1449, 2.3810, 2.5862, 2.0188, 2.5020, 2.3297, 2.2199], + device='cuda:3'), covar=tensor([0.4295, 0.4105, 0.1923, 0.2678, 0.4370, 0.2367, 0.5131, 0.3445], + device='cuda:3'), in_proj_covar=tensor([0.0946, 0.1028, 0.0748, 0.0959, 0.0925, 0.0863, 0.0864, 0.0815], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 15:30:08,494 INFO [train.py:903] (3/4) Epoch 30, batch 500, loss[loss=0.1895, simple_loss=0.2661, pruned_loss=0.05647, over 19470.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2792, pruned_loss=0.05742, over 3540584.69 frames. ], batch size: 49, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:30:11,060 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7880, 1.4446, 1.8289, 1.6225, 4.3434, 1.1857, 2.4858, 4.7141], + device='cuda:3'), covar=tensor([0.0482, 0.3061, 0.2809, 0.2069, 0.0737, 0.2838, 0.1692, 0.0183], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0383, 0.0402, 0.0356, 0.0387, 0.0360, 0.0401, 0.0424], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:30:16,803 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198517.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:06,315 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198558.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:31:11,516 INFO [train.py:903] (3/4) Epoch 30, batch 550, loss[loss=0.1862, simple_loss=0.2667, pruned_loss=0.05283, over 19565.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2802, pruned_loss=0.05747, over 3606206.49 frames. ], batch size: 52, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:31:40,933 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 4.928e+02 6.417e+02 8.667e+02 2.852e+03, threshold=1.283e+03, percent-clipped=10.0 +2023-04-03 15:31:47,763 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5040, 1.4362, 1.4535, 1.8704, 1.5008, 1.6238, 1.6813, 1.5295], + device='cuda:3'), covar=tensor([0.0832, 0.0893, 0.0993, 0.0616, 0.0903, 0.0824, 0.0860, 0.0726], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0227, 0.0240, 0.0226, 0.0215, 0.0188, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 15:31:56,099 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-03 15:32:13,287 INFO [train.py:903] (3/4) Epoch 30, batch 600, loss[loss=0.2021, simple_loss=0.2938, pruned_loss=0.05517, over 19767.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2795, pruned_loss=0.05689, over 3663227.25 frames. ], batch size: 54, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:32:27,174 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7039, 1.5137, 1.7067, 2.2571, 1.8394, 1.7166, 1.7595, 1.7204], + device='cuda:3'), covar=tensor([0.0985, 0.1299, 0.1032, 0.0668, 0.1109, 0.1098, 0.1208, 0.0889], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0226, 0.0215, 0.0188, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 15:32:28,389 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198625.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:38,410 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198633.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:32:52,855 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9 from training. Duration: 27.8166875 +2023-04-03 15:33:01,346 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198650.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:33:14,979 INFO [train.py:903] (3/4) Epoch 30, batch 650, loss[loss=0.1899, simple_loss=0.2699, pruned_loss=0.055, over 19832.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2792, pruned_loss=0.05694, over 3692777.48 frames. ], batch size: 52, lr: 2.75e-03, grad_scale: 8.0 +2023-04-03 15:33:46,600 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.000e+02 5.780e+02 7.067e+02 2.104e+03, threshold=1.156e+03, percent-clipped=2.0 +2023-04-03 15:34:00,601 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0507, 3.3644, 1.8724, 2.0914, 3.0847, 1.7442, 1.3941, 2.2349], + device='cuda:3'), covar=tensor([0.1495, 0.0715, 0.1188, 0.0944, 0.0596, 0.1366, 0.1127, 0.0766], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0321, 0.0346, 0.0276, 0.0253, 0.0349, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:34:02,337 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-03 15:34:16,541 INFO [train.py:903] (3/4) Epoch 30, batch 700, loss[loss=0.201, simple_loss=0.279, pruned_loss=0.06149, over 19843.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2803, pruned_loss=0.0576, over 3724004.61 frames. ], batch size: 52, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:18,678 INFO [train.py:903] (3/4) Epoch 30, batch 750, loss[loss=0.2047, simple_loss=0.2763, pruned_loss=0.06654, over 19408.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2812, pruned_loss=0.05786, over 3756637.47 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:35:51,149 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.908e+02 5.891e+02 7.920e+02 1.978e+03, threshold=1.178e+03, percent-clipped=7.0 +2023-04-03 15:36:23,528 INFO [train.py:903] (3/4) Epoch 30, batch 800, loss[loss=0.1803, simple_loss=0.2462, pruned_loss=0.05719, over 19733.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2798, pruned_loss=0.05741, over 3765342.20 frames. ], batch size: 45, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:36:27,263 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=198814.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:36:39,855 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1 from training. Duration: 0.9681875 +2023-04-03 15:36:56,808 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=198839.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:25,167 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198861.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:26,181 INFO [train.py:903] (3/4) Epoch 30, batch 850, loss[loss=0.2084, simple_loss=0.2946, pruned_loss=0.06108, over 19541.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2799, pruned_loss=0.05724, over 3778929.70 frames. ], batch size: 64, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:37:35,509 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198870.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:37:55,570 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.084e+02 4.755e+02 5.781e+02 7.125e+02 1.514e+03, threshold=1.156e+03, percent-clipped=6.0 +2023-04-03 15:38:13,675 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198901.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:38:20,315 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9_rvb from training. Duration: 25.061125 +2023-04-03 15:38:26,177 INFO [train.py:903] (3/4) Epoch 30, batch 900, loss[loss=0.2097, simple_loss=0.2908, pruned_loss=0.0643, over 19483.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2797, pruned_loss=0.05774, over 3789158.80 frames. ], batch size: 64, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:07,725 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=198945.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:27,821 INFO [train.py:903] (3/4) Epoch 30, batch 950, loss[loss=0.1841, simple_loss=0.2665, pruned_loss=0.05088, over 19686.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2799, pruned_loss=0.05792, over 3787747.05 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:39:32,350 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9_rvb from training. Duration: 26.32775 +2023-04-03 15:39:47,116 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=198976.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:48,127 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=198977.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:39:59,673 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.142e+02 5.052e+02 6.305e+02 7.870e+02 1.588e+03, threshold=1.261e+03, percent-clipped=4.0 +2023-04-03 15:40:30,262 INFO [train.py:903] (3/4) Epoch 30, batch 1000, loss[loss=0.2292, simple_loss=0.303, pruned_loss=0.07774, over 19414.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.05824, over 3798867.19 frames. ], batch size: 70, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:40:34,412 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-03 15:41:23,981 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0026-44402_sp0.9 from training. Duration: 25.061125 +2023-04-03 15:41:33,182 INFO [train.py:903] (3/4) Epoch 30, batch 1050, loss[loss=0.2083, simple_loss=0.2876, pruned_loss=0.06446, over 19662.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2811, pruned_loss=0.05861, over 3783150.32 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:42:03,436 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.270e+02 4.631e+02 5.961e+02 7.864e+02 1.953e+03, threshold=1.192e+03, percent-clipped=2.0 +2023-04-03 15:42:04,604 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1 from training. Duration: 0.7545625 +2023-04-03 15:42:11,038 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199092.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:42:35,181 INFO [train.py:903] (3/4) Epoch 30, batch 1100, loss[loss=0.1646, simple_loss=0.2469, pruned_loss=0.0412, over 19394.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2804, pruned_loss=0.05813, over 3790774.23 frames. ], batch size: 47, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:43:38,423 INFO [train.py:903] (3/4) Epoch 30, batch 1150, loss[loss=0.1882, simple_loss=0.2764, pruned_loss=0.05002, over 18850.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2806, pruned_loss=0.05859, over 3786094.76 frames. ], batch size: 74, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:10,291 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.110e+02 5.915e+02 7.639e+02 1.372e+03, threshold=1.183e+03, percent-clipped=6.0 +2023-04-03 15:44:18,777 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199194.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:44:28,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.8954, 1.3428, 1.1007, 1.0059, 1.1737, 1.0579, 0.9537, 1.2658], + device='cuda:3'), covar=tensor([0.0698, 0.0921, 0.1223, 0.0827, 0.0652, 0.1361, 0.0666, 0.0537], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0319, 0.0343, 0.0274, 0.0252, 0.0346, 0.0290, 0.0274], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:44:41,424 INFO [train.py:903] (3/4) Epoch 30, batch 1200, loss[loss=0.1894, simple_loss=0.2601, pruned_loss=0.05933, over 19466.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2805, pruned_loss=0.05868, over 3799011.20 frames. ], batch size: 49, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:44:43,962 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199214.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:06,899 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199232.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:14,390 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983 from training. Duration: 0.83 +2023-04-03 15:45:22,172 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199245.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:38,756 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199257.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:45:44,966 INFO [train.py:903] (3/4) Epoch 30, batch 1250, loss[loss=0.1988, simple_loss=0.2817, pruned_loss=0.05796, over 18761.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2811, pruned_loss=0.05901, over 3814161.88 frames. ], batch size: 74, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:45:54,324 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.5570, 4.1312, 2.8198, 3.6080, 1.0132, 4.1498, 3.9782, 4.0777], + device='cuda:3'), covar=tensor([0.0696, 0.0995, 0.1915, 0.0925, 0.4057, 0.0722, 0.0985, 0.1385], + device='cuda:3'), in_proj_covar=tensor([0.0534, 0.0432, 0.0520, 0.0359, 0.0408, 0.0459, 0.0453, 0.0488], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:46:14,641 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.608e+02 5.004e+02 5.991e+02 7.632e+02 1.398e+03, threshold=1.198e+03, percent-clipped=2.0 +2023-04-03 15:46:17,149 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199289.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:46:46,511 INFO [train.py:903] (3/4) Epoch 30, batch 1300, loss[loss=0.2229, simple_loss=0.2828, pruned_loss=0.08154, over 19053.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2821, pruned_loss=0.05924, over 3823485.37 frames. ], batch size: 42, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:47:07,594 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199329.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:32,056 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199348.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:46,101 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199360.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:47:47,888 INFO [train.py:903] (3/4) Epoch 30, batch 1350, loss[loss=0.2193, simple_loss=0.2986, pruned_loss=0.07, over 19767.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2815, pruned_loss=0.05878, over 3817540.98 frames. ], batch size: 54, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:03,218 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199373.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:20,889 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.443e+02 4.672e+02 5.739e+02 7.239e+02 1.592e+03, threshold=1.148e+03, percent-clipped=7.0 +2023-04-03 15:48:40,938 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199404.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:48:50,927 INFO [train.py:903] (3/4) Epoch 30, batch 1400, loss[loss=0.1848, simple_loss=0.2776, pruned_loss=0.04603, over 18249.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2818, pruned_loss=0.05894, over 3806955.76 frames. ], batch size: 83, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:48:58,990 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199418.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:49:03,142 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-03 15:49:53,480 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.28 vs. limit=5.0 +2023-04-03 15:49:55,030 INFO [train.py:903] (3/4) Epoch 30, batch 1450, loss[loss=0.223, simple_loss=0.3068, pruned_loss=0.06956, over 19536.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2816, pruned_loss=0.059, over 3813936.96 frames. ], batch size: 56, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:49:56,189 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_rvb from training. Duration: 25.85 +2023-04-03 15:50:11,596 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199476.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:50:25,539 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.487e+02 5.040e+02 6.121e+02 7.927e+02 1.972e+03, threshold=1.224e+03, percent-clipped=6.0 +2023-04-03 15:50:56,161 INFO [train.py:903] (3/4) Epoch 30, batch 1500, loss[loss=0.1944, simple_loss=0.2843, pruned_loss=0.0523, over 19656.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2817, pruned_loss=0.05875, over 3827097.79 frames. ], batch size: 55, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:50:56,472 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199512.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:51:28,282 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199538.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:51:46,212 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.5957, 1.3152, 1.4014, 1.4169, 2.3713, 1.1717, 1.9634, 2.5946], + device='cuda:3'), covar=tensor([0.0563, 0.2370, 0.2436, 0.1552, 0.0608, 0.2026, 0.1695, 0.0432], + device='cuda:3'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0388, 0.0362, 0.0403, 0.0424], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 15:51:47,420 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199553.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 15:51:58,247 INFO [train.py:903] (3/4) Epoch 30, batch 1550, loss[loss=0.1749, simple_loss=0.2647, pruned_loss=0.04253, over 19664.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2822, pruned_loss=0.05896, over 3819041.20 frames. ], batch size: 58, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:52:28,194 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199585.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:52:31,297 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.717e+02 5.804e+02 6.903e+02 1.639e+03, threshold=1.161e+03, percent-clipped=1.0 +2023-04-03 15:52:46,522 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1374, 2.0258, 1.9345, 1.7831, 1.6070, 1.7089, 0.5823, 1.1081], + device='cuda:3'), covar=tensor([0.0710, 0.0730, 0.0548, 0.0879, 0.1356, 0.1016, 0.1561, 0.1182], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0364, 0.0370, 0.0395, 0.0472, 0.0398, 0.0347, 0.0348], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 15:52:59,340 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199610.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:01,355 INFO [train.py:903] (3/4) Epoch 30, batch 1600, loss[loss=0.2048, simple_loss=0.2941, pruned_loss=0.05773, over 17468.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2824, pruned_loss=0.05872, over 3815560.59 frames. ], batch size: 101, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:53:06,429 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199616.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:26,387 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9_rvb from training. Duration: 30.1555625 +2023-04-03 15:53:37,708 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:53:53,168 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199653.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:01,237 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199660.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:03,852 INFO [train.py:903] (3/4) Epoch 30, batch 1650, loss[loss=0.2017, simple_loss=0.2873, pruned_loss=0.058, over 18279.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2822, pruned_loss=0.05838, over 3817867.90 frames. ], batch size: 83, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:54:32,734 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199685.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:54:35,820 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.122e+02 4.724e+02 5.858e+02 7.730e+02 3.208e+03, threshold=1.172e+03, percent-clipped=5.0 +2023-04-03 15:55:06,071 INFO [train.py:903] (3/4) Epoch 30, batch 1700, loss[loss=0.2239, simple_loss=0.3056, pruned_loss=0.07114, over 19082.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2814, pruned_loss=0.05816, over 3818556.06 frames. ], batch size: 69, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 15:55:24,049 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0862, 1.9650, 1.9004, 1.7788, 1.5526, 1.6705, 0.6204, 1.0927], + device='cuda:3'), covar=tensor([0.0794, 0.0775, 0.0563, 0.0904, 0.1391, 0.1082, 0.1503, 0.1276], + device='cuda:3'), in_proj_covar=tensor([0.0371, 0.0366, 0.0373, 0.0398, 0.0476, 0.0402, 0.0350, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 15:55:40,547 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-03 15:55:46,467 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590 from training. Duration: 25.85 +2023-04-03 15:56:09,059 INFO [train.py:903] (3/4) Epoch 30, batch 1750, loss[loss=0.2107, simple_loss=0.2943, pruned_loss=0.06355, over 19782.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2808, pruned_loss=0.05814, over 3818654.36 frames. ], batch size: 56, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:56:09,245 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199762.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:56:42,931 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.076e+02 4.893e+02 6.270e+02 7.375e+02 1.627e+03, threshold=1.254e+03, percent-clipped=1.0 +2023-04-03 15:57:00,469 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8336, 4.2877, 4.5034, 4.4979, 1.7741, 4.2402, 3.6653, 4.2601], + device='cuda:3'), covar=tensor([0.1715, 0.0827, 0.0643, 0.0757, 0.6491, 0.1142, 0.0741, 0.1135], + device='cuda:3'), in_proj_covar=tensor([0.0829, 0.0803, 0.1015, 0.0891, 0.0877, 0.0776, 0.0600, 0.0943], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 15:57:11,415 INFO [train.py:903] (3/4) Epoch 30, batch 1800, loss[loss=0.2282, simple_loss=0.3029, pruned_loss=0.07674, over 12718.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2802, pruned_loss=0.05786, over 3817799.66 frames. ], batch size: 135, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:57:21,125 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199820.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:06,473 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199856.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:58:08,425 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9_rvb from training. Duration: 25.0944375 +2023-04-03 15:58:13,035 INFO [train.py:903] (3/4) Epoch 30, batch 1850, loss[loss=0.1733, simple_loss=0.2565, pruned_loss=0.04506, over 19610.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2803, pruned_loss=0.05781, over 3818288.18 frames. ], batch size: 50, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:58:15,402 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.7906, 4.2958, 4.5071, 4.4953, 1.6438, 4.2664, 3.6641, 4.2424], + device='cuda:3'), covar=tensor([0.1719, 0.0875, 0.0640, 0.0732, 0.6522, 0.1060, 0.0739, 0.1097], + device='cuda:3'), in_proj_covar=tensor([0.0831, 0.0806, 0.1020, 0.0894, 0.0881, 0.0779, 0.0603, 0.0948], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 15:58:32,560 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199877.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:58:46,821 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.213e+02 5.043e+02 6.338e+02 8.325e+02 2.069e+03, threshold=1.268e+03, percent-clipped=7.0 +2023-04-03 15:58:46,860 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_sp0.9_rvb from training. Duration: 27.8166875 +2023-04-03 15:58:57,092 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=199897.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 15:59:12,888 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=199909.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:15,996 INFO [train.py:903] (3/4) Epoch 30, batch 1900, loss[loss=0.1684, simple_loss=0.2521, pruned_loss=0.04235, over 19668.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2795, pruned_loss=0.05724, over 3834936.24 frames. ], batch size: 53, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 15:59:33,273 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9 from training. Duration: 29.816625 +2023-04-03 15:59:33,566 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=199927.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:38,036 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9_rvb from training. Duration: 27.02225 +2023-04-03 15:59:42,795 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=199934.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 15:59:43,930 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199935.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:00:03,955 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9 from training. Duration: 0.92225 +2023-04-03 16:00:17,076 INFO [train.py:903] (3/4) Epoch 30, batch 1950, loss[loss=0.2008, simple_loss=0.2822, pruned_loss=0.05969, over 19575.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.28, pruned_loss=0.05775, over 3830821.48 frames. ], batch size: 61, lr: 2.74e-03, grad_scale: 4.0 +2023-04-03 16:00:29,591 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=199971.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:00:51,090 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 5.211e+02 6.525e+02 7.685e+02 1.771e+03, threshold=1.305e+03, percent-clipped=2.0 +2023-04-03 16:01:21,187 INFO [train.py:903] (3/4) Epoch 30, batch 2000, loss[loss=0.1972, simple_loss=0.2789, pruned_loss=0.05777, over 19657.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2807, pruned_loss=0.05781, over 3825825.00 frames. ], batch size: 55, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:01:22,698 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200012.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:02:21,500 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_rvb from training. Duration: 26.8349375 +2023-04-03 16:02:23,758 INFO [train.py:903] (3/4) Epoch 30, batch 2050, loss[loss=0.1385, simple_loss=0.2197, pruned_loss=0.02862, over 19744.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2809, pruned_loss=0.05806, over 3834844.03 frames. ], batch size: 46, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:02:43,237 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1 from training. Duration: 0.95 +2023-04-03 16:02:43,268 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp1.1 from training. Duration: 27.0318125 +2023-04-03 16:02:50,489 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9864, 2.0592, 2.3619, 2.6435, 2.0572, 2.5191, 2.3187, 2.1553], + device='cuda:3'), covar=tensor([0.4253, 0.4021, 0.1992, 0.2471, 0.4109, 0.2250, 0.5044, 0.3356], + device='cuda:3'), in_proj_covar=tensor([0.0947, 0.1032, 0.0750, 0.0959, 0.0926, 0.0866, 0.0867, 0.0815], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 16:02:57,797 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.348e+02 4.986e+02 6.252e+02 8.206e+02 1.738e+03, threshold=1.250e+03, percent-clipped=4.0 +2023-04-03 16:03:03,561 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1 from training. Duration: 0.836375 +2023-04-03 16:03:11,191 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.36 vs. limit=5.0 +2023-04-03 16:03:26,457 INFO [train.py:903] (3/4) Epoch 30, batch 2100, loss[loss=0.1855, simple_loss=0.2799, pruned_loss=0.04554, over 19621.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2803, pruned_loss=0.05771, over 3836628.89 frames. ], batch size: 57, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:03:52,969 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200133.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:03:57,151 WARNING [train.py:1073] (3/4) Exclude cut with ID 453-131332-0000-131866_sp0.9_rvb from training. Duration: 25.3333125 +2023-04-03 16:04:18,930 WARNING [train.py:1073] (3/4) Exclude cut with ID 7699-105389-0094-102071_sp0.9_rvb from training. Duration: 26.6166875 +2023-04-03 16:04:24,940 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200158.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:04:29,059 INFO [train.py:903] (3/4) Epoch 30, batch 2150, loss[loss=0.2118, simple_loss=0.3, pruned_loss=0.06181, over 19745.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2809, pruned_loss=0.05824, over 3839442.53 frames. ], batch size: 63, lr: 2.74e-03, grad_scale: 8.0 +2023-04-03 16:05:02,778 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.819e+02 5.957e+02 8.119e+02 2.108e+03, threshold=1.191e+03, percent-clipped=2.0 +2023-04-03 16:05:05,543 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200191.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:10,687 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200195.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:05:30,903 INFO [train.py:903] (3/4) Epoch 30, batch 2200, loss[loss=0.1953, simple_loss=0.2714, pruned_loss=0.05959, over 19605.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2811, pruned_loss=0.05869, over 3844355.36 frames. ], batch size: 50, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:05:37,204 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200216.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:05:39,453 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0051, 4.4243, 4.7137, 4.7042, 1.6716, 4.4079, 3.8167, 4.4455], + device='cuda:3'), covar=tensor([0.1659, 0.0832, 0.0628, 0.0678, 0.6518, 0.0911, 0.0700, 0.1154], + device='cuda:3'), in_proj_covar=tensor([0.0824, 0.0798, 0.1011, 0.0885, 0.0872, 0.0772, 0.0597, 0.0936], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 16:05:50,128 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200227.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:20,844 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200252.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:06:33,032 INFO [train.py:903] (3/4) Epoch 30, batch 2250, loss[loss=0.1868, simple_loss=0.2742, pruned_loss=0.04969, over 19678.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2817, pruned_loss=0.0594, over 3849574.08 frames. ], batch size: 60, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:06:41,188 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200268.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 16:06:44,368 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:08,303 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.532e+02 5.298e+02 6.530e+02 8.599e+02 1.543e+03, threshold=1.306e+03, percent-clipped=6.0 +2023-04-03 16:07:12,104 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200293.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:07:17,571 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200297.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:07:35,295 INFO [train.py:903] (3/4) Epoch 30, batch 2300, loss[loss=0.2221, simple_loss=0.3039, pruned_loss=0.07012, over 17409.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2821, pruned_loss=0.0598, over 3828199.61 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:07:51,106 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_rvb from training. Duration: 26.205 +2023-04-03 16:08:18,925 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-03 16:08:37,754 INFO [train.py:903] (3/4) Epoch 30, batch 2350, loss[loss=0.1645, simple_loss=0.2474, pruned_loss=0.04083, over 19776.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2811, pruned_loss=0.05903, over 3833090.47 frames. ], batch size: 47, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:08:47,325 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.65 vs. limit=5.0 +2023-04-03 16:09:07,678 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200386.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:09:12,874 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.379e+02 4.836e+02 5.833e+02 7.163e+02 1.475e+03, threshold=1.167e+03, percent-clipped=1.0 +2023-04-03 16:09:21,096 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_rvb from training. Duration: 25.775 +2023-04-03 16:09:34,047 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.8488, 1.7008, 1.8679, 1.4587, 4.3738, 1.1614, 2.6100, 4.7241], + device='cuda:3'), covar=tensor([0.0486, 0.2839, 0.2972, 0.2280, 0.0771, 0.2882, 0.1641, 0.0188], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0383, 0.0404, 0.0356, 0.0386, 0.0361, 0.0402, 0.0423], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:09:38,137 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9_rvb from training. Duration: 25.45 +2023-04-03 16:09:40,555 INFO [train.py:903] (3/4) Epoch 30, batch 2400, loss[loss=0.194, simple_loss=0.2745, pruned_loss=0.05678, over 19583.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2812, pruned_loss=0.05922, over 3835038.08 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:10:24,211 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.6004, 1.1337, 1.3545, 1.1723, 2.2262, 1.0529, 2.0342, 2.5229], + device='cuda:3'), covar=tensor([0.0754, 0.3044, 0.3216, 0.1919, 0.0908, 0.2228, 0.1269, 0.0488], + device='cuda:3'), in_proj_covar=tensor([0.0427, 0.0383, 0.0403, 0.0356, 0.0386, 0.0361, 0.0402, 0.0423], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:10:43,361 INFO [train.py:903] (3/4) Epoch 30, batch 2450, loss[loss=0.2125, simple_loss=0.2959, pruned_loss=0.0646, over 18237.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2816, pruned_loss=0.05905, over 3832570.88 frames. ], batch size: 84, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:11:19,107 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.119e+02 5.121e+02 5.946e+02 7.814e+02 2.121e+03, threshold=1.189e+03, percent-clipped=7.0 +2023-04-03 16:11:27,561 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8400, 3.3181, 3.3593, 3.3644, 1.4519, 3.2409, 2.8373, 3.1588], + device='cuda:3'), covar=tensor([0.1804, 0.0956, 0.0860, 0.1023, 0.5599, 0.1130, 0.0896, 0.1307], + device='cuda:3'), in_proj_covar=tensor([0.0819, 0.0795, 0.1006, 0.0883, 0.0868, 0.0770, 0.0593, 0.0935], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 16:11:46,465 INFO [train.py:903] (3/4) Epoch 30, batch 2500, loss[loss=0.2232, simple_loss=0.3063, pruned_loss=0.07006, over 19332.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2828, pruned_loss=0.05971, over 3822741.93 frames. ], batch size: 66, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:12:20,493 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200539.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:12:48,793 INFO [train.py:903] (3/4) Epoch 30, batch 2550, loss[loss=0.1902, simple_loss=0.2713, pruned_loss=0.05453, over 19744.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2832, pruned_loss=0.06008, over 3818102.77 frames. ], batch size: 54, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:13:23,044 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.619e+02 4.923e+02 6.163e+02 7.973e+02 2.573e+03, threshold=1.233e+03, percent-clipped=12.0 +2023-04-03 16:13:41,769 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.7253, 4.3571, 2.9328, 3.8259, 1.2922, 4.2744, 4.1573, 4.2375], + device='cuda:3'), covar=tensor([0.0606, 0.0870, 0.1729, 0.0793, 0.3407, 0.0663, 0.0958, 0.1190], + device='cuda:3'), in_proj_covar=tensor([0.0533, 0.0431, 0.0517, 0.0358, 0.0409, 0.0458, 0.0449, 0.0487], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:13:47,178 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9 from training. Duration: 25.988875 +2023-04-03 16:13:51,625 INFO [train.py:903] (3/4) Epoch 30, batch 2600, loss[loss=0.2051, simple_loss=0.2746, pruned_loss=0.0678, over 19735.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2828, pruned_loss=0.05965, over 3827021.47 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:28,371 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=200641.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:29,709 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200642.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:14:44,515 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200654.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:14:54,163 INFO [train.py:903] (3/4) Epoch 30, batch 2650, loss[loss=0.1884, simple_loss=0.279, pruned_loss=0.04888, over 19595.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.284, pruned_loss=0.06028, over 3816102.40 frames. ], batch size: 52, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:14:55,566 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=200663.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:00,429 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200667.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:15:12,708 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.8076, 1.4675, 1.7024, 1.7282, 3.3660, 1.3800, 2.4610, 3.8551], + device='cuda:3'), covar=tensor([0.0498, 0.2893, 0.2831, 0.1782, 0.0680, 0.2374, 0.1379, 0.0222], + device='cuda:3'), in_proj_covar=tensor([0.0425, 0.0382, 0.0402, 0.0355, 0.0386, 0.0361, 0.0402, 0.0422], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:15:15,800 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9_rvb from training. Duration: 27.25 +2023-04-03 16:15:28,264 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.276e+02 4.881e+02 6.412e+02 8.116e+02 1.737e+03, threshold=1.282e+03, percent-clipped=7.0 +2023-04-03 16:15:55,154 INFO [train.py:903] (3/4) Epoch 30, batch 2700, loss[loss=0.1864, simple_loss=0.2669, pruned_loss=0.05293, over 19658.00 frames. ], tot_loss[loss=0.202, simple_loss=0.284, pruned_loss=0.06004, over 3816094.63 frames. ], batch size: 53, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:16:34,929 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 16:16:51,756 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=200756.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:16:59,612 INFO [train.py:903] (3/4) Epoch 30, batch 2750, loss[loss=0.2177, simple_loss=0.3008, pruned_loss=0.06726, over 18961.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2829, pruned_loss=0.0593, over 3821878.70 frames. ], batch size: 74, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:17:34,086 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.266e+02 4.594e+02 5.767e+02 6.844e+02 1.269e+03, threshold=1.153e+03, percent-clipped=0.0 +2023-04-03 16:18:02,567 INFO [train.py:903] (3/4) Epoch 30, batch 2800, loss[loss=0.201, simple_loss=0.2882, pruned_loss=0.05692, over 17970.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.282, pruned_loss=0.05893, over 3815892.31 frames. ], batch size: 83, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:05,168 INFO [train.py:903] (3/4) Epoch 30, batch 2850, loss[loss=0.1987, simple_loss=0.2818, pruned_loss=0.05777, over 19349.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2819, pruned_loss=0.05869, over 3819406.87 frames. ], batch size: 66, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:19:39,283 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.865e+02 5.683e+02 7.765e+02 1.857e+03, threshold=1.137e+03, percent-clipped=6.0 +2023-04-03 16:20:04,686 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=200910.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:20:06,539 INFO [train.py:903] (3/4) Epoch 30, batch 2900, loss[loss=0.2136, simple_loss=0.2964, pruned_loss=0.06539, over 19654.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2813, pruned_loss=0.05825, over 3831378.11 frames. ], batch size: 55, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:20:07,768 WARNING [train.py:1073] (3/4) Exclude cut with ID 8631-249866-0030-64025_sp0.9 from training. Duration: 26.32775 +2023-04-03 16:20:36,303 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=200935.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 16:21:00,057 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3384, 1.9731, 1.5155, 1.3647, 1.8147, 1.2556, 1.2946, 1.7916], + device='cuda:3'), covar=tensor([0.1016, 0.0887, 0.1215, 0.0921, 0.0664, 0.1427, 0.0772, 0.0490], + device='cuda:3'), in_proj_covar=tensor([0.0305, 0.0321, 0.0345, 0.0277, 0.0255, 0.0350, 0.0291, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:21:08,763 INFO [train.py:903] (3/4) Epoch 30, batch 2950, loss[loss=0.1907, simple_loss=0.2697, pruned_loss=0.05582, over 19740.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2813, pruned_loss=0.05851, over 3822043.01 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:21:44,102 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 4.844e+02 6.179e+02 7.399e+02 1.416e+03, threshold=1.236e+03, percent-clipped=4.0 +2023-04-03 16:22:06,393 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:11,902 INFO [train.py:903] (3/4) Epoch 30, batch 3000, loss[loss=0.191, simple_loss=0.2808, pruned_loss=0.05064, over 19673.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2798, pruned_loss=0.05758, over 3837848.37 frames. ], batch size: 55, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:22:11,902 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 16:22:26,180 INFO [train.py:937] (3/4) Epoch 30, validation: loss=0.1666, simple_loss=0.266, pruned_loss=0.03357, over 944034.00 frames. +2023-04-03 16:22:26,181 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 16:22:26,663 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201012.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:22:32,366 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_rvb from training. Duration: 29.735 +2023-04-03 16:22:57,372 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201037.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:23:27,828 INFO [train.py:903] (3/4) Epoch 30, batch 3050, loss[loss=0.2175, simple_loss=0.3045, pruned_loss=0.06528, over 19686.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2795, pruned_loss=0.05742, over 3843065.15 frames. ], batch size: 59, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:23:57,907 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.0561, 1.2793, 1.6736, 0.9869, 2.3106, 3.0236, 2.7193, 3.2226], + device='cuda:3'), covar=tensor([0.1755, 0.4084, 0.3649, 0.2969, 0.0701, 0.0253, 0.0279, 0.0353], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0335, 0.0367, 0.0274, 0.0257, 0.0200, 0.0222, 0.0281], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 16:24:02,339 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201089.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:24:03,230 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.915e+02 4.896e+02 5.871e+02 7.483e+02 2.064e+03, threshold=1.174e+03, percent-clipped=5.0 +2023-04-03 16:24:32,128 INFO [train.py:903] (3/4) Epoch 30, batch 3100, loss[loss=0.1956, simple_loss=0.2833, pruned_loss=0.05396, over 19727.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2794, pruned_loss=0.05762, over 3841449.82 frames. ], batch size: 63, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:24:43,641 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201122.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:25:03,520 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-03 16:25:33,508 INFO [train.py:903] (3/4) Epoch 30, batch 3150, loss[loss=0.1852, simple_loss=0.2533, pruned_loss=0.05856, over 18560.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2788, pruned_loss=0.05747, over 3837917.95 frames. ], batch size: 41, lr: 2.73e-03, grad_scale: 4.0 +2023-04-03 16:26:02,513 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1 from training. Duration: 25.3818125 +2023-04-03 16:26:10,329 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.152e+02 4.625e+02 5.752e+02 7.402e+02 1.953e+03, threshold=1.150e+03, percent-clipped=5.0 +2023-04-03 16:26:16,496 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5447, 1.3610, 1.3023, 1.5267, 1.3144, 1.2998, 1.3005, 1.4645], + device='cuda:3'), covar=tensor([0.0875, 0.1156, 0.1217, 0.0879, 0.1136, 0.0539, 0.1256, 0.0710], + device='cuda:3'), in_proj_covar=tensor([0.0280, 0.0358, 0.0319, 0.0258, 0.0309, 0.0260, 0.0324, 0.0263], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:3') +2023-04-03 16:26:35,926 INFO [train.py:903] (3/4) Epoch 30, batch 3200, loss[loss=0.1869, simple_loss=0.2796, pruned_loss=0.04707, over 17562.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2786, pruned_loss=0.05739, over 3826086.50 frames. ], batch size: 101, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:26:38,599 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1382, 1.6790, 1.8426, 2.9985, 2.2675, 2.2547, 2.4223, 2.0023], + device='cuda:3'), covar=tensor([0.0868, 0.1088, 0.1100, 0.0745, 0.0839, 0.0934, 0.0964, 0.0794], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0240, 0.0227, 0.0216, 0.0187, 0.0209], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 16:27:03,294 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-03 16:27:26,678 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.5915, 1.6047, 1.7618, 1.7304, 2.3671, 2.0788, 2.4271, 1.5324], + device='cuda:3'), covar=tensor([0.2028, 0.3562, 0.2345, 0.1663, 0.1322, 0.1897, 0.1204, 0.4128], + device='cuda:3'), in_proj_covar=tensor([0.0557, 0.0679, 0.0766, 0.0514, 0.0637, 0.0549, 0.0672, 0.0580], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 16:27:40,098 INFO [train.py:903] (3/4) Epoch 30, batch 3250, loss[loss=0.21, simple_loss=0.295, pruned_loss=0.06252, over 18032.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2791, pruned_loss=0.05763, over 3815075.58 frames. ], batch size: 83, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:27:51,503 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201271.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:14,881 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.466e+02 4.729e+02 5.856e+02 7.119e+02 1.424e+03, threshold=1.171e+03, percent-clipped=4.0 +2023-04-03 16:28:16,398 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201292.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:28:42,757 INFO [train.py:903] (3/4) Epoch 30, batch 3300, loss[loss=0.1694, simple_loss=0.2552, pruned_loss=0.04173, over 19778.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2787, pruned_loss=0.05742, over 3825928.65 frames. ], batch size: 48, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:28:49,690 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465 from training. Duration: 26.8349375 +2023-04-03 16:28:51,286 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6904, 2.3206, 1.6965, 1.5381, 2.1736, 1.3694, 1.4477, 2.1325], + device='cuda:3'), covar=tensor([0.1083, 0.0838, 0.1165, 0.0952, 0.0581, 0.1367, 0.0816, 0.0503], + device='cuda:3'), in_proj_covar=tensor([0.0303, 0.0319, 0.0343, 0.0276, 0.0253, 0.0348, 0.0290, 0.0276], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:29:22,254 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-03 16:29:44,707 INFO [train.py:903] (3/4) Epoch 30, batch 3350, loss[loss=0.2, simple_loss=0.2736, pruned_loss=0.0632, over 19736.00 frames. ], tot_loss[loss=0.198, simple_loss=0.28, pruned_loss=0.05802, over 3828259.21 frames. ], batch size: 51, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:30:05,863 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201378.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:21,794 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.184e+02 4.772e+02 5.759e+02 7.012e+02 1.305e+03, threshold=1.152e+03, percent-clipped=2.0 +2023-04-03 16:30:37,323 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201403.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:30:48,360 INFO [train.py:903] (3/4) Epoch 30, batch 3400, loss[loss=0.2107, simple_loss=0.2855, pruned_loss=0.06795, over 19665.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2807, pruned_loss=0.0582, over 3820069.83 frames. ], batch size: 55, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:16,058 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201433.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:31:51,805 INFO [train.py:903] (3/4) Epoch 30, batch 3450, loss[loss=0.2084, simple_loss=0.2971, pruned_loss=0.05985, over 19582.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2811, pruned_loss=0.05878, over 3815987.75 frames. ], batch size: 61, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:31:57,362 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9_rvb from training. Duration: 25.2444375 +2023-04-03 16:32:27,320 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.371e+02 5.269e+02 6.567e+02 8.372e+02 2.121e+03, threshold=1.313e+03, percent-clipped=9.0 +2023-04-03 16:32:29,858 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7601, 1.7598, 1.6676, 1.4719, 1.4402, 1.4910, 0.2574, 0.7039], + device='cuda:3'), covar=tensor([0.0692, 0.0667, 0.0448, 0.0721, 0.1271, 0.0796, 0.1388, 0.1219], + device='cuda:3'), in_proj_covar=tensor([0.0369, 0.0366, 0.0371, 0.0396, 0.0474, 0.0401, 0.0349, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 16:32:55,651 INFO [train.py:903] (3/4) Epoch 30, batch 3500, loss[loss=0.1769, simple_loss=0.2553, pruned_loss=0.04926, over 19735.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2812, pruned_loss=0.05903, over 3793554.65 frames. ], batch size: 46, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:33:41,237 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201548.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:33:58,213 INFO [train.py:903] (3/4) Epoch 30, batch 3550, loss[loss=0.1805, simple_loss=0.2632, pruned_loss=0.04885, over 19774.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2811, pruned_loss=0.05839, over 3808226.98 frames. ], batch size: 54, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:34:35,156 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.339e+02 4.645e+02 6.006e+02 7.208e+02 1.141e+03, threshold=1.201e+03, percent-clipped=0.0 +2023-04-03 16:34:37,086 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-04-03 16:35:01,955 INFO [train.py:903] (3/4) Epoch 30, batch 3600, loss[loss=0.1807, simple_loss=0.2566, pruned_loss=0.05243, over 19308.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2813, pruned_loss=0.0585, over 3808406.95 frames. ], batch size: 44, lr: 2.73e-03, grad_scale: 8.0 +2023-04-03 16:35:06,946 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201615.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:35:14,966 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4613, 1.4757, 1.5805, 1.6099, 1.8020, 1.9619, 1.8507, 0.6483], + device='cuda:3'), covar=tensor([0.2382, 0.4342, 0.2752, 0.1926, 0.1604, 0.2340, 0.1398, 0.4917], + device='cuda:3'), in_proj_covar=tensor([0.0556, 0.0677, 0.0764, 0.0514, 0.0637, 0.0551, 0.0670, 0.0579], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 16:35:32,771 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=201636.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:04,985 INFO [train.py:903] (3/4) Epoch 30, batch 3650, loss[loss=0.2029, simple_loss=0.2732, pruned_loss=0.06633, over 19141.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2808, pruned_loss=0.05898, over 3789162.58 frames. ], batch size: 42, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:36:05,310 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201662.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:20,487 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0817, 4.4505, 4.8185, 4.8375, 1.7797, 4.5784, 3.8952, 4.5518], + device='cuda:3'), covar=tensor([0.1720, 0.0885, 0.0663, 0.0659, 0.6408, 0.0864, 0.0707, 0.1129], + device='cuda:3'), in_proj_covar=tensor([0.0824, 0.0798, 0.1008, 0.0886, 0.0874, 0.0775, 0.0596, 0.0942], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 16:36:27,550 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201679.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:36:42,039 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 4.844e+02 5.974e+02 7.556e+02 1.962e+03, threshold=1.195e+03, percent-clipped=4.0 +2023-04-03 16:37:09,545 INFO [train.py:903] (3/4) Epoch 30, batch 3700, loss[loss=0.1982, simple_loss=0.2687, pruned_loss=0.06384, over 19735.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2809, pruned_loss=0.05843, over 3797993.66 frames. ], batch size: 45, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:37:16,772 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.6180, 1.5274, 1.7394, 1.5549, 3.2225, 1.1739, 2.5741, 3.6260], + device='cuda:3'), covar=tensor([0.0512, 0.2817, 0.2668, 0.1870, 0.0683, 0.2565, 0.1239, 0.0248], + device='cuda:3'), in_proj_covar=tensor([0.0431, 0.0386, 0.0404, 0.0358, 0.0390, 0.0364, 0.0405, 0.0427], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:37:20,704 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-03 16:37:23,588 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=201724.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:31,413 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201730.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:37:57,747 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=201751.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:38:11,311 INFO [train.py:903] (3/4) Epoch 30, batch 3750, loss[loss=0.1749, simple_loss=0.2498, pruned_loss=0.04999, over 19760.00 frames. ], tot_loss[loss=0.199, simple_loss=0.281, pruned_loss=0.05848, over 3797380.23 frames. ], batch size: 46, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:38:47,452 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.241e+02 4.921e+02 6.534e+02 8.381e+02 2.079e+03, threshold=1.307e+03, percent-clipped=7.0 +2023-04-03 16:39:04,352 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201804.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:14,021 INFO [train.py:903] (3/4) Epoch 30, batch 3800, loss[loss=0.2376, simple_loss=0.3245, pruned_loss=0.0754, over 19536.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2818, pruned_loss=0.05883, over 3819576.36 frames. ], batch size: 54, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:39:35,678 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=201829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:39:41,263 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914_sp0.9_rvb from training. Duration: 29.1166875 +2023-04-03 16:39:41,670 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.3234, 3.0011, 2.3674, 2.3437, 2.3162, 2.6624, 1.0445, 2.1850], + device='cuda:3'), covar=tensor([0.0712, 0.0643, 0.0780, 0.1324, 0.1103, 0.1167, 0.1582, 0.1230], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0365, 0.0371, 0.0395, 0.0473, 0.0401, 0.0348, 0.0351], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 16:39:46,578 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-03 16:40:15,946 INFO [train.py:903] (3/4) Epoch 30, batch 3850, loss[loss=0.2107, simple_loss=0.2955, pruned_loss=0.06296, over 18711.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2829, pruned_loss=0.05961, over 3812800.67 frames. ], batch size: 74, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:40:51,662 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.891e+02 4.977e+02 6.719e+02 8.916e+02 2.147e+03, threshold=1.344e+03, percent-clipped=8.0 +2023-04-03 16:41:00,222 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9749, 2.0339, 2.3095, 2.5680, 1.9236, 2.4640, 2.2759, 2.0025], + device='cuda:3'), covar=tensor([0.4492, 0.4288, 0.2124, 0.2707, 0.4498, 0.2448, 0.5501, 0.3843], + device='cuda:3'), in_proj_covar=tensor([0.0956, 0.1039, 0.0757, 0.0966, 0.0934, 0.0873, 0.0872, 0.0821], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 16:41:18,287 INFO [train.py:903] (3/4) Epoch 30, batch 3900, loss[loss=0.2094, simple_loss=0.281, pruned_loss=0.06891, over 19692.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.282, pruned_loss=0.05951, over 3801776.75 frames. ], batch size: 53, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:10,307 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0900, 2.1837, 2.4925, 2.7384, 2.1166, 2.6377, 2.3695, 2.2380], + device='cuda:3'), covar=tensor([0.4655, 0.4276, 0.2103, 0.2461, 0.4301, 0.2276, 0.5498, 0.3491], + device='cuda:3'), in_proj_covar=tensor([0.0956, 0.1039, 0.0756, 0.0965, 0.0932, 0.0874, 0.0872, 0.0821], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 16:42:20,922 INFO [train.py:903] (3/4) Epoch 30, batch 3950, loss[loss=0.1875, simple_loss=0.2704, pruned_loss=0.05233, over 19682.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2808, pruned_loss=0.05867, over 3805395.83 frames. ], batch size: 53, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:42:20,946 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9 from training. Duration: 28.0944375 +2023-04-03 16:42:51,066 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=201986.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:42:56,914 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 4.814e+02 5.725e+02 7.208e+02 1.816e+03, threshold=1.145e+03, percent-clipped=2.0 +2023-04-03 16:43:11,427 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-03 16:43:16,600 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202006.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:18,126 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202007.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:19,120 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202008.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:23,682 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202011.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:24,452 INFO [train.py:903] (3/4) Epoch 30, batch 4000, loss[loss=0.225, simple_loss=0.3064, pruned_loss=0.07185, over 19389.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2814, pruned_loss=0.05887, over 3808408.10 frames. ], batch size: 70, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:43:38,593 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202023.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:43:42,204 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1560, 2.8096, 1.7914, 1.7901, 2.6322, 1.6868, 1.4800, 2.3553], + device='cuda:3'), covar=tensor([0.1216, 0.0949, 0.1109, 0.1023, 0.0597, 0.1262, 0.1037, 0.0661], + device='cuda:3'), in_proj_covar=tensor([0.0304, 0.0322, 0.0345, 0.0277, 0.0255, 0.0350, 0.0292, 0.0277], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:43:47,429 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.3451, 1.3392, 1.4772, 1.4929, 1.8936, 1.7589, 1.9090, 1.1524], + device='cuda:3'), covar=tensor([0.2132, 0.3731, 0.2363, 0.1756, 0.1386, 0.2052, 0.1317, 0.4421], + device='cuda:3'), in_proj_covar=tensor([0.0556, 0.0677, 0.0764, 0.0514, 0.0637, 0.0551, 0.0670, 0.0579], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 16:43:49,619 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202032.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:44:08,192 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp1.1_rvb from training. Duration: 0.7545625 +2023-04-03 16:44:27,670 INFO [train.py:903] (3/4) Epoch 30, batch 4050, loss[loss=0.1593, simple_loss=0.2401, pruned_loss=0.0392, over 19752.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2813, pruned_loss=0.05878, over 3814821.97 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:44:35,678 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202068.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:45:02,976 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.018e+02 4.674e+02 6.115e+02 7.260e+02 2.667e+03, threshold=1.223e+03, percent-clipped=7.0 +2023-04-03 16:45:30,404 INFO [train.py:903] (3/4) Epoch 30, batch 4100, loss[loss=0.2033, simple_loss=0.2886, pruned_loss=0.059, over 19601.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2813, pruned_loss=0.05858, over 3819029.27 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:45:42,124 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202121.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:45:47,978 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.9139, 4.3434, 4.6693, 4.6496, 1.8294, 4.4117, 3.8128, 4.4121], + device='cuda:3'), covar=tensor([0.1740, 0.0898, 0.0612, 0.0760, 0.6260, 0.0978, 0.0685, 0.1050], + device='cuda:3'), in_proj_covar=tensor([0.0826, 0.0803, 0.1014, 0.0893, 0.0877, 0.0777, 0.0600, 0.0944], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 16:45:48,423 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 16:46:03,041 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202138.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:46:04,982 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1_rvb from training. Duration: 0.97725 +2023-04-03 16:46:32,902 INFO [train.py:903] (3/4) Epoch 30, batch 4150, loss[loss=0.1889, simple_loss=0.271, pruned_loss=0.05337, over 19543.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2823, pruned_loss=0.05934, over 3805592.84 frames. ], batch size: 54, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:46:45,118 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-03 16:46:59,282 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202183.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:47:08,854 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.767e+02 4.823e+02 5.653e+02 7.013e+02 1.196e+03, threshold=1.131e+03, percent-clipped=0.0 +2023-04-03 16:47:27,798 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.0633, 5.1184, 5.9280, 5.9188, 1.9889, 5.6142, 4.6869, 5.6033], + device='cuda:3'), covar=tensor([0.1842, 0.0853, 0.0577, 0.0642, 0.6567, 0.0840, 0.0641, 0.1142], + device='cuda:3'), in_proj_covar=tensor([0.0829, 0.0805, 0.1015, 0.0894, 0.0878, 0.0778, 0.0601, 0.0947], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 16:47:34,531 INFO [train.py:903] (3/4) Epoch 30, batch 4200, loss[loss=0.1774, simple_loss=0.2484, pruned_loss=0.05322, over 18724.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.281, pruned_loss=0.05838, over 3819051.83 frames. ], batch size: 41, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:47:37,784 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9 from training. Duration: 26.438875 +2023-04-03 16:48:35,837 INFO [train.py:903] (3/4) Epoch 30, batch 4250, loss[loss=0.1893, simple_loss=0.2729, pruned_loss=0.0528, over 19688.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2824, pruned_loss=0.05927, over 3834586.03 frames. ], batch size: 53, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:48:52,007 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0003-9465_sp0.9_rvb from training. Duration: 29.816625 +2023-04-03 16:48:57,127 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7592, 1.6064, 1.6039, 2.4749, 1.7893, 1.9774, 1.9908, 1.8031], + device='cuda:3'), covar=tensor([0.0843, 0.0924, 0.1033, 0.0596, 0.0822, 0.0781, 0.0883, 0.0713], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0223, 0.0228, 0.0240, 0.0227, 0.0216, 0.0187, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 16:49:03,283 WARNING [train.py:1073] (3/4) Exclude cut with ID 4964-30587-0040-138716_sp0.9 from training. Duration: 25.0944375 +2023-04-03 16:49:12,501 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.375e+02 5.334e+02 6.771e+02 9.277e+02 2.113e+03, threshold=1.354e+03, percent-clipped=7.0 +2023-04-03 16:49:38,454 INFO [train.py:903] (3/4) Epoch 30, batch 4300, loss[loss=0.1818, simple_loss=0.2649, pruned_loss=0.04933, over 19800.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.05896, over 3836981.57 frames. ], batch size: 49, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:50:28,702 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=202352.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:50:34,353 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9 from training. Duration: 33.038875 +2023-04-03 16:50:41,242 INFO [train.py:903] (3/4) Epoch 30, batch 4350, loss[loss=0.2155, simple_loss=0.3099, pruned_loss=0.06052, over 19744.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2815, pruned_loss=0.05865, over 3840154.64 frames. ], batch size: 63, lr: 2.72e-03, grad_scale: 4.0 +2023-04-03 16:50:59,687 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202377.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:05,029 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.6579, 4.2399, 2.6774, 3.6989, 0.8636, 4.2320, 4.0607, 4.1710], + device='cuda:3'), covar=tensor([0.0595, 0.0942, 0.2069, 0.0936, 0.4135, 0.0701, 0.1006, 0.1290], + device='cuda:3'), in_proj_covar=tensor([0.0541, 0.0440, 0.0525, 0.0365, 0.0416, 0.0464, 0.0459, 0.0495], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 16:51:18,783 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.299e+02 4.712e+02 5.570e+02 7.585e+02 1.545e+03, threshold=1.114e+03, percent-clipped=3.0 +2023-04-03 16:51:22,541 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202394.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:31,130 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202402.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:51:43,039 INFO [train.py:903] (3/4) Epoch 30, batch 4400, loss[loss=0.1991, simple_loss=0.2709, pruned_loss=0.06369, over 19378.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2817, pruned_loss=0.05926, over 3824035.80 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:51:52,573 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202419.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:08,207 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_rvb from training. Duration: 25.285 +2023-04-03 16:52:17,004 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0009-44385_sp0.9 from training. Duration: 27.02225 +2023-04-03 16:52:17,410 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:46,168 INFO [train.py:903] (3/4) Epoch 30, batch 4450, loss[loss=0.1765, simple_loss=0.2534, pruned_loss=0.04978, over 15995.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2801, pruned_loss=0.05821, over 3823572.53 frames. ], batch size: 35, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:52:48,945 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202464.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:52,378 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=202467.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:52:58,717 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.50 vs. limit=5.0 +2023-04-03 16:53:23,819 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 4.882e+02 5.921e+02 7.342e+02 1.295e+03, threshold=1.184e+03, percent-clipped=2.0 +2023-04-03 16:53:39,216 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.6912, 1.8751, 2.1502, 1.9794, 3.2686, 2.7193, 3.5398, 1.8267], + device='cuda:3'), covar=tensor([0.2621, 0.4456, 0.2953, 0.1956, 0.1523, 0.2232, 0.1643, 0.4426], + device='cuda:3'), in_proj_covar=tensor([0.0561, 0.0682, 0.0769, 0.0519, 0.0640, 0.0555, 0.0673, 0.0583], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 16:53:48,741 INFO [train.py:903] (3/4) Epoch 30, batch 4500, loss[loss=0.232, simple_loss=0.3188, pruned_loss=0.07255, over 19601.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2809, pruned_loss=0.05844, over 3817755.61 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:54:50,692 INFO [train.py:903] (3/4) Epoch 30, batch 4550, loss[loss=0.2485, simple_loss=0.3314, pruned_loss=0.08285, over 19608.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2806, pruned_loss=0.05822, over 3810260.13 frames. ], batch size: 57, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:55:01,208 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9_rvb from training. Duration: 28.72225 +2023-04-03 16:55:25,955 WARNING [train.py:1073] (3/4) Exclude cut with ID 4278-13270-0009-62705_sp0.9 from training. Duration: 25.45 +2023-04-03 16:55:27,094 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 5.006e+02 6.254e+02 8.173e+02 1.576e+03, threshold=1.251e+03, percent-clipped=3.0 +2023-04-03 16:55:52,912 INFO [train.py:903] (3/4) Epoch 30, batch 4600, loss[loss=0.2123, simple_loss=0.295, pruned_loss=0.06479, over 19527.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2811, pruned_loss=0.05883, over 3815597.26 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:56:54,622 INFO [train.py:903] (3/4) Epoch 30, batch 4650, loss[loss=0.1953, simple_loss=0.2729, pruned_loss=0.05888, over 19609.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2808, pruned_loss=0.05846, over 3809919.84 frames. ], batch size: 50, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:57:12,731 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_rvb from training. Duration: 0.92 +2023-04-03 16:57:22,314 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8040, 1.9031, 2.1761, 2.2999, 1.7601, 2.2489, 2.1313, 1.9766], + device='cuda:3'), covar=tensor([0.4407, 0.4134, 0.2020, 0.2541, 0.4273, 0.2338, 0.5437, 0.3714], + device='cuda:3'), in_proj_covar=tensor([0.0953, 0.1036, 0.0753, 0.0964, 0.0930, 0.0871, 0.0868, 0.0820], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 16:57:23,006 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_rvb from training. Duration: 0.83 +2023-04-03 16:57:31,776 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.029e+02 5.947e+02 7.618e+02 1.686e+03, threshold=1.189e+03, percent-clipped=3.0 +2023-04-03 16:57:55,568 INFO [train.py:903] (3/4) Epoch 30, batch 4700, loss[loss=0.1992, simple_loss=0.2876, pruned_loss=0.05534, over 19546.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2811, pruned_loss=0.05895, over 3803522.50 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:58:10,563 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=202723.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:18,351 WARNING [train.py:1073] (3/4) Exclude cut with ID 3033-130750-0096-107983_sp0.9_rvb from training. Duration: 0.92225 +2023-04-03 16:58:41,355 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=202748.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 16:58:59,217 INFO [train.py:903] (3/4) Epoch 30, batch 4750, loss[loss=0.1848, simple_loss=0.2785, pruned_loss=0.04556, over 19536.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2809, pruned_loss=0.05857, over 3798769.62 frames. ], batch size: 56, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 16:59:35,972 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.278e+02 5.055e+02 5.959e+02 7.636e+02 1.705e+03, threshold=1.192e+03, percent-clipped=6.0 +2023-04-03 17:00:01,990 INFO [train.py:903] (3/4) Epoch 30, batch 4800, loss[loss=0.1886, simple_loss=0.2789, pruned_loss=0.04911, over 19661.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2815, pruned_loss=0.05892, over 3808375.94 frames. ], batch size: 55, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:00:08,682 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-03 17:00:19,798 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-03 17:00:47,843 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=202848.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:01:03,241 INFO [train.py:903] (3/4) Epoch 30, batch 4850, loss[loss=0.1622, simple_loss=0.2399, pruned_loss=0.04221, over 19756.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2815, pruned_loss=0.05889, over 3802219.82 frames. ], batch size: 47, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:01:27,953 WARNING [train.py:1073] (3/4) Exclude cut with ID 774-127930-0014-48411_sp1.1_rvb from training. Duration: 0.95 +2023-04-03 17:01:34,074 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3579, 1.9648, 2.1415, 2.8575, 1.9849, 2.4558, 2.4683, 2.3422], + device='cuda:3'), covar=tensor([0.0769, 0.0881, 0.0902, 0.0757, 0.0916, 0.0746, 0.0914, 0.0631], + device='cuda:3'), in_proj_covar=tensor([0.0213, 0.0224, 0.0228, 0.0240, 0.0227, 0.0216, 0.0188, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 17:01:41,373 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.446e+02 5.041e+02 6.050e+02 7.335e+02 2.031e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:01:47,279 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1_rvb from training. Duration: 0.9409375 +2023-04-03 17:01:53,230 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467 from training. Duration: 25.035 +2023-04-03 17:01:55,296 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9 from training. Duration: 27.511125 +2023-04-03 17:02:02,748 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.8566, 1.9647, 2.2939, 2.2944, 1.7748, 2.1946, 2.2466, 2.1486], + device='cuda:3'), covar=tensor([0.4248, 0.3990, 0.2013, 0.2546, 0.4229, 0.2392, 0.5112, 0.3462], + device='cuda:3'), in_proj_covar=tensor([0.0950, 0.1035, 0.0753, 0.0961, 0.0929, 0.0870, 0.0867, 0.0817], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 17:02:03,502 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp1.1_rvb from training. Duration: 25.3818125 +2023-04-03 17:02:05,914 INFO [train.py:903] (3/4) Epoch 30, batch 4900, loss[loss=0.1668, simple_loss=0.2425, pruned_loss=0.04551, over 19732.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2806, pruned_loss=0.05851, over 3794341.59 frames. ], batch size: 45, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:02:25,014 WARNING [train.py:1073] (3/4) Exclude cut with ID 3972-170212-0014-103914 from training. Duration: 26.205 +2023-04-03 17:03:07,819 INFO [train.py:903] (3/4) Epoch 30, batch 4950, loss[loss=0.1988, simple_loss=0.276, pruned_loss=0.0608, over 19465.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2816, pruned_loss=0.0595, over 3800538.20 frames. ], batch size: 49, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:03:22,407 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0007-9590_sp0.9 from training. Duration: 28.72225 +2023-04-03 17:03:27,587 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-03 17:03:43,835 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.771e+02 6.104e+02 8.250e+02 2.222e+03, threshold=1.221e+03, percent-clipped=2.0 +2023-04-03 17:03:46,198 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585 from training. Duration: 0.92 +2023-04-03 17:04:09,798 INFO [train.py:903] (3/4) Epoch 30, batch 5000, loss[loss=0.1782, simple_loss=0.2588, pruned_loss=0.04882, over 19477.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2812, pruned_loss=0.05936, over 3795737.56 frames. ], batch size: 49, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:04:16,534 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_rvb from training. Duration: 27.14 +2023-04-03 17:04:28,733 WARNING [train.py:1073] (3/4) Exclude cut with ID 5239-32139-0047-92994_sp0.9 from training. Duration: 30.1555625 +2023-04-03 17:05:02,533 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203056.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:05:09,000 INFO [train.py:903] (3/4) Epoch 30, batch 5050, loss[loss=0.2251, simple_loss=0.3086, pruned_loss=0.07079, over 19586.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2807, pruned_loss=0.05871, over 3803562.77 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:05:18,170 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([6.2366, 5.6773, 3.0654, 4.9335, 1.3989, 5.8219, 5.6251, 5.8136], + device='cuda:3'), covar=tensor([0.0346, 0.0748, 0.1887, 0.0728, 0.3534, 0.0401, 0.0816, 0.0834], + device='cuda:3'), in_proj_covar=tensor([0.0538, 0.0437, 0.0520, 0.0361, 0.0413, 0.0461, 0.0454, 0.0491], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 17:05:45,255 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9_rvb from training. Duration: 26.62775 +2023-04-03 17:05:46,418 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 4.511e+02 5.411e+02 6.959e+02 2.884e+03, threshold=1.082e+03, percent-clipped=1.0 +2023-04-03 17:05:59,241 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203103.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:05:59,653 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=2.63 vs. limit=5.0 +2023-04-03 17:06:10,217 INFO [train.py:903] (3/4) Epoch 30, batch 5100, loss[loss=0.1784, simple_loss=0.2602, pruned_loss=0.04829, over 19842.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2808, pruned_loss=0.05867, over 3813550.99 frames. ], batch size: 52, lr: 2.72e-03, grad_scale: 8.0 +2023-04-03 17:06:23,794 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9 from training. Duration: 31.02225 +2023-04-03 17:06:27,358 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0005-9467_rvb from training. Duration: 25.035 +2023-04-03 17:06:28,814 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0915, 1.7316, 1.8700, 2.8942, 2.1186, 2.3648, 2.3747, 2.1284], + device='cuda:3'), covar=tensor([0.0821, 0.0921, 0.1001, 0.0728, 0.0826, 0.0769, 0.0860, 0.0677], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0239, 0.0226, 0.0216, 0.0187, 0.0208], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 17:06:30,778 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_rvb from training. Duration: 27.92 +2023-04-03 17:07:12,614 INFO [train.py:903] (3/4) Epoch 30, batch 5150, loss[loss=0.2033, simple_loss=0.2856, pruned_loss=0.06048, over 19280.00 frames. ], tot_loss[loss=0.198, simple_loss=0.28, pruned_loss=0.058, over 3820608.92 frames. ], batch size: 66, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:07:27,945 WARNING [train.py:1073] (3/4) Exclude cut with ID 2411-132532-0017-25057_sp1.1 from training. Duration: 0.9681875 +2023-04-03 17:07:49,458 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.244e+02 5.115e+02 6.435e+02 8.085e+02 2.061e+03, threshold=1.287e+03, percent-clipped=7.0 +2023-04-03 17:07:49,683 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203192.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:08:02,564 WARNING [train.py:1073] (3/4) Exclude cut with ID 4133-6541-0027-26893_sp1.1_rvb from training. Duration: 0.9681875 +2023-04-03 17:08:14,981 INFO [train.py:903] (3/4) Epoch 30, batch 5200, loss[loss=0.1999, simple_loss=0.2794, pruned_loss=0.06017, over 19467.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2802, pruned_loss=0.05821, over 3816154.97 frames. ], batch size: 49, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:08:30,103 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9 from training. Duration: 28.638875 +2023-04-03 17:09:14,153 WARNING [train.py:1073] (3/4) Exclude cut with ID 6709-74022-0004-57021_sp1.1 from training. Duration: 0.9409375 +2023-04-03 17:09:16,379 INFO [train.py:903] (3/4) Epoch 30, batch 5250, loss[loss=0.1951, simple_loss=0.2676, pruned_loss=0.06132, over 18198.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2802, pruned_loss=0.05821, over 3814116.03 frames. ], batch size: 40, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:09:53,738 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.978e+02 4.929e+02 6.276e+02 8.119e+02 2.486e+03, threshold=1.255e+03, percent-clipped=4.0 +2023-04-03 17:10:12,248 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203307.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:10:17,558 INFO [train.py:903] (3/4) Epoch 30, batch 5300, loss[loss=0.1593, simple_loss=0.249, pruned_loss=0.03477, over 19805.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2804, pruned_loss=0.05846, over 3822229.48 frames. ], batch size: 49, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:10:36,829 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390 from training. Duration: 27.92 +2023-04-03 17:11:02,659 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.9431, 4.4871, 2.8345, 3.9672, 1.1136, 4.5100, 4.3681, 4.4535], + device='cuda:3'), covar=tensor([0.0532, 0.0938, 0.1871, 0.0807, 0.3937, 0.0592, 0.0918, 0.1044], + device='cuda:3'), in_proj_covar=tensor([0.0537, 0.0436, 0.0519, 0.0361, 0.0412, 0.0461, 0.0453, 0.0490], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 17:11:18,772 INFO [train.py:903] (3/4) Epoch 30, batch 5350, loss[loss=0.2123, simple_loss=0.2998, pruned_loss=0.06239, over 19326.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2806, pruned_loss=0.05875, over 3821889.47 frames. ], batch size: 70, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:11:53,869 WARNING [train.py:1073] (3/4) Exclude cut with ID 8291-282929-0024-9607_sp0.9_rvb from training. Duration: 26.438875 +2023-04-03 17:11:56,057 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.690e+02 5.524e+02 6.450e+02 8.325e+02 1.910e+03, threshold=1.290e+03, percent-clipped=5.0 +2023-04-03 17:12:05,339 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203400.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:12:20,979 INFO [train.py:903] (3/4) Epoch 30, batch 5400, loss[loss=0.2249, simple_loss=0.2874, pruned_loss=0.08123, over 19432.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2814, pruned_loss=0.05928, over 3816229.77 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:02,976 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203447.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:20,914 INFO [train.py:903] (3/4) Epoch 30, batch 5450, loss[loss=0.2132, simple_loss=0.3047, pruned_loss=0.06084, over 19616.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2817, pruned_loss=0.05945, over 3822567.83 frames. ], batch size: 57, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:13:21,398 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3065, 2.3230, 2.5538, 2.9793, 2.3040, 2.8160, 2.5081, 2.3310], + device='cuda:3'), covar=tensor([0.4341, 0.4567, 0.2036, 0.2872, 0.4739, 0.2514, 0.5095, 0.3601], + device='cuda:3'), in_proj_covar=tensor([0.0954, 0.1036, 0.0753, 0.0963, 0.0930, 0.0869, 0.0867, 0.0819], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 17:13:50,543 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203485.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:13:59,460 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.086e+02 4.910e+02 6.531e+02 8.117e+02 1.984e+03, threshold=1.306e+03, percent-clipped=3.0 +2023-04-03 17:14:10,106 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4526, 1.3473, 1.8996, 1.6827, 3.0562, 4.5898, 4.4481, 5.0908], + device='cuda:3'), covar=tensor([0.1640, 0.4149, 0.3628, 0.2466, 0.0676, 0.0204, 0.0187, 0.0197], + device='cuda:3'), in_proj_covar=tensor([0.0282, 0.0336, 0.0368, 0.0275, 0.0258, 0.0200, 0.0221, 0.0283], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 17:14:23,410 INFO [train.py:903] (3/4) Epoch 30, batch 5500, loss[loss=0.2134, simple_loss=0.2973, pruned_loss=0.06476, over 19429.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2823, pruned_loss=0.05965, over 3816903.59 frames. ], batch size: 70, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:14:26,977 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203515.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:14:48,302 WARNING [train.py:1073] (3/4) Exclude cut with ID 3488-85273-0017-111273_sp0.9_rvb from training. Duration: 27.47775 +2023-04-03 17:15:24,339 INFO [train.py:903] (3/4) Epoch 30, batch 5550, loss[loss=0.1877, simple_loss=0.2673, pruned_loss=0.05406, over 19842.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2819, pruned_loss=0.05918, over 3818337.94 frames. ], batch size: 52, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:15:24,659 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203562.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:15:26,892 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203563.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:15:31,867 WARNING [train.py:1073] (3/4) Exclude cut with ID 7255-291500-0009-9471_sp0.9 from training. Duration: 26.62775 +2023-04-03 17:15:56,784 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203588.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:16:01,699 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.118e+02 4.952e+02 6.311e+02 7.819e+02 2.120e+03, threshold=1.262e+03, percent-clipped=2.0 +2023-04-03 17:16:03,243 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7697, 1.5695, 1.6581, 2.2125, 1.6397, 1.9827, 1.9702, 1.8317], + device='cuda:3'), covar=tensor([0.0808, 0.0895, 0.0921, 0.0713, 0.0887, 0.0768, 0.0858, 0.0695], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0223, 0.0227, 0.0238, 0.0226, 0.0215, 0.0187, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 17:16:20,043 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([5.2841, 5.2293, 6.1039, 6.1064, 1.9994, 5.7507, 4.8251, 5.8009], + device='cuda:3'), covar=tensor([0.1787, 0.0718, 0.0545, 0.0641, 0.6829, 0.0733, 0.0627, 0.1099], + device='cuda:3'), in_proj_covar=tensor([0.0830, 0.0807, 0.1016, 0.0892, 0.0879, 0.0779, 0.0600, 0.0947], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 17:16:22,145 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403 from training. Duration: 29.735 +2023-04-03 17:16:27,763 INFO [train.py:903] (3/4) Epoch 30, batch 5600, loss[loss=0.2151, simple_loss=0.2948, pruned_loss=0.06769, over 19642.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2827, pruned_loss=0.05971, over 3823195.68 frames. ], batch size: 50, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:16:48,417 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-03 17:17:02,907 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.0960, 2.1437, 2.4509, 2.6255, 2.0676, 2.5329, 2.4177, 2.2525], + device='cuda:3'), covar=tensor([0.4437, 0.4299, 0.1998, 0.2716, 0.4369, 0.2468, 0.5141, 0.3533], + device='cuda:3'), in_proj_covar=tensor([0.0955, 0.1039, 0.0755, 0.0966, 0.0933, 0.0871, 0.0869, 0.0821], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 17:17:29,530 INFO [train.py:903] (3/4) Epoch 30, batch 5650, loss[loss=0.1989, simple_loss=0.2831, pruned_loss=0.05731, over 17451.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2814, pruned_loss=0.0591, over 3824633.56 frames. ], batch size: 101, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:18:06,343 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.203e+02 4.913e+02 5.869e+02 7.597e+02 1.607e+03, threshold=1.174e+03, percent-clipped=2.0 +2023-04-03 17:18:18,649 WARNING [train.py:1073] (3/4) Exclude cut with ID 6951-79737-0043-83149_sp0.9_rvb from training. Duration: 28.0944375 +2023-04-03 17:18:29,969 INFO [train.py:903] (3/4) Epoch 30, batch 5700, loss[loss=0.1897, simple_loss=0.2643, pruned_loss=0.05755, over 19716.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2821, pruned_loss=0.05977, over 3816981.85 frames. ], batch size: 46, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:03,862 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4471, 1.4943, 1.8379, 1.7372, 2.8293, 2.2912, 2.9877, 1.2561], + device='cuda:3'), covar=tensor([0.2774, 0.4799, 0.3064, 0.2130, 0.1483, 0.2361, 0.1474, 0.5171], + device='cuda:3'), in_proj_covar=tensor([0.0562, 0.0684, 0.0769, 0.0518, 0.0641, 0.0555, 0.0674, 0.0584], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 17:19:31,945 INFO [train.py:903] (3/4) Epoch 30, batch 5750, loss[loss=0.1796, simple_loss=0.2764, pruned_loss=0.04139, over 19501.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05942, over 3825492.62 frames. ], batch size: 64, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:19:35,373 WARNING [train.py:1073] (3/4) Exclude cut with ID 2929-85685-0079-61403_sp0.9_rvb from training. Duration: 33.038875 +2023-04-03 17:19:35,667 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203765.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:44,462 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203771.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:19:45,208 WARNING [train.py:1073] (3/4) Exclude cut with ID 7859-102521-0017-21930_sp0.9 from training. Duration: 27.25 +2023-04-03 17:19:51,013 WARNING [train.py:1073] (3/4) Exclude cut with ID 497-129325-0061-9566_sp1.1 from training. Duration: 0.97725 +2023-04-03 17:20:00,635 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203784.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:09,168 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.021e+02 5.985e+02 7.897e+02 1.857e+03, threshold=1.197e+03, percent-clipped=7.0 +2023-04-03 17:20:15,341 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203796.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:35,340 INFO [train.py:903] (3/4) Epoch 30, batch 5800, loss[loss=0.1843, simple_loss=0.2732, pruned_loss=0.04766, over 19666.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2814, pruned_loss=0.05934, over 3820998.91 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:20:38,237 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-03 17:20:43,827 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=203818.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:20:55,518 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.4825, 2.1333, 1.7428, 1.3108, 1.9711, 1.2654, 1.3547, 1.9379], + device='cuda:3'), covar=tensor([0.1060, 0.0845, 0.1062, 0.1179, 0.0691, 0.1442, 0.0819, 0.0534], + device='cuda:3'), in_proj_covar=tensor([0.0307, 0.0324, 0.0347, 0.0279, 0.0257, 0.0351, 0.0293, 0.0281], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 17:20:56,429 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=203829.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:13,724 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=203843.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:21:37,905 INFO [train.py:903] (3/4) Epoch 30, batch 5850, loss[loss=0.2047, simple_loss=0.2906, pruned_loss=0.05937, over 19752.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2809, pruned_loss=0.05901, over 3824120.59 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:21:41,711 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203865.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:22:15,806 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.919e+02 5.824e+02 7.977e+02 1.569e+03, threshold=1.165e+03, percent-clipped=4.0 +2023-04-03 17:22:39,479 INFO [train.py:903] (3/4) Epoch 30, batch 5900, loss[loss=0.1612, simple_loss=0.2446, pruned_loss=0.03885, over 19481.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.282, pruned_loss=0.0598, over 3808513.79 frames. ], batch size: 49, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:22:42,668 WARNING [train.py:1073] (3/4) Exclude cut with ID 1914-133440-0024-53073_sp0.9 from training. Duration: 25.2444375 +2023-04-03 17:22:47,262 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=203918.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:23:06,099 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0021-44397_sp0.9_rvb from training. Duration: 27.511125 +2023-04-03 17:23:19,559 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=203944.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:23:25,701 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.9964, 2.1298, 2.3706, 2.6792, 2.0824, 2.5856, 2.3267, 2.1586], + device='cuda:3'), covar=tensor([0.4460, 0.4326, 0.2118, 0.2808, 0.4708, 0.2475, 0.5266, 0.3634], + device='cuda:3'), in_proj_covar=tensor([0.0952, 0.1036, 0.0753, 0.0965, 0.0931, 0.0870, 0.0865, 0.0818], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 17:23:40,674 INFO [train.py:903] (3/4) Epoch 30, batch 5950, loss[loss=0.2146, simple_loss=0.2947, pruned_loss=0.06729, over 19681.00 frames. ], tot_loss[loss=0.202, simple_loss=0.283, pruned_loss=0.06055, over 3812094.83 frames. ], batch size: 59, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:18,145 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.982e+02 5.092e+02 6.183e+02 7.281e+02 1.501e+03, threshold=1.237e+03, percent-clipped=4.0 +2023-04-03 17:24:45,151 INFO [train.py:903] (3/4) Epoch 30, batch 6000, loss[loss=0.2165, simple_loss=0.3002, pruned_loss=0.06643, over 19776.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2827, pruned_loss=0.05992, over 3818004.35 frames. ], batch size: 56, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:24:45,151 INFO [train.py:928] (3/4) Computing validation loss +2023-04-03 17:24:58,727 INFO [train.py:937] (3/4) Epoch 30, validation: loss=0.167, simple_loss=0.2658, pruned_loss=0.03407, over 944034.00 frames. +2023-04-03 17:24:58,728 INFO [train.py:938] (3/4) Maximum memory allocated so far is 18741MB +2023-04-03 17:26:02,080 INFO [train.py:903] (3/4) Epoch 30, batch 6050, loss[loss=0.2038, simple_loss=0.2899, pruned_loss=0.05887, over 19349.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2816, pruned_loss=0.05934, over 3812667.12 frames. ], batch size: 70, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:26:38,219 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.289e+02 5.054e+02 6.352e+02 7.854e+02 1.582e+03, threshold=1.270e+03, percent-clipped=1.0 +2023-04-03 17:26:40,404 INFO [scaling.py:679] (3/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-03 17:26:42,110 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204095.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:27:00,737 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204109.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:27:04,111 INFO [train.py:903] (3/4) Epoch 30, batch 6100, loss[loss=0.1891, simple_loss=0.2591, pruned_loss=0.05957, over 19787.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2814, pruned_loss=0.05963, over 3812553.66 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:27:23,325 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204128.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:04,387 INFO [train.py:903] (3/4) Epoch 30, batch 6150, loss[loss=0.1897, simple_loss=0.2787, pruned_loss=0.05035, over 19781.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2824, pruned_loss=0.05969, over 3814580.81 frames. ], batch size: 54, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:28:04,790 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([0.9855, 1.2707, 1.5921, 0.6764, 2.0216, 2.5058, 2.2039, 2.6873], + device='cuda:3'), covar=tensor([0.1546, 0.3922, 0.3501, 0.2940, 0.0667, 0.0278, 0.0346, 0.0376], + device='cuda:3'), in_proj_covar=tensor([0.0281, 0.0336, 0.0368, 0.0275, 0.0258, 0.0200, 0.0221, 0.0282], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:3') +2023-04-03 17:28:29,840 INFO [zipformer.py:1188] (3/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=204181.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:28:34,976 WARNING [train.py:1073] (3/4) Exclude cut with ID 7357-94126-0014-44390_sp0.9_rvb from training. Duration: 31.02225 +2023-04-03 17:28:42,856 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.206e+02 5.061e+02 6.485e+02 7.945e+02 1.594e+03, threshold=1.297e+03, percent-clipped=2.0 +2023-04-03 17:28:52,863 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204200.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:03,794 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204209.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:07,249 INFO [train.py:903] (3/4) Epoch 30, batch 6200, loss[loss=0.1621, simple_loss=0.2472, pruned_loss=0.03847, over 19577.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.05999, over 3794078.86 frames. ], batch size: 52, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:29:24,454 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204224.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:25,562 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204225.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:29:29,110 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7231, 1.7574, 1.6204, 1.4333, 1.4636, 1.4418, 0.3032, 0.6721], + device='cuda:3'), covar=tensor([0.0722, 0.0721, 0.0498, 0.0808, 0.1299, 0.0850, 0.1374, 0.1272], + device='cuda:3'), in_proj_covar=tensor([0.0369, 0.0369, 0.0372, 0.0397, 0.0476, 0.0402, 0.0349, 0.0352], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 17:29:45,851 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204243.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:30:09,328 INFO [train.py:903] (3/4) Epoch 30, batch 6250, loss[loss=0.2457, simple_loss=0.3135, pruned_loss=0.08895, over 12665.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2826, pruned_loss=0.06, over 3787707.71 frames. ], batch size: 136, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:30:10,640 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204262.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:30:18,766 INFO [scaling.py:679] (3/4) Whitening: num_groups=1, num_channels=256, metric=3.87 vs. limit=5.0 +2023-04-03 17:30:38,604 WARNING [train.py:1073] (3/4) Exclude cut with ID 6758-72288-0033-148662_sp0.9_rvb from training. Duration: 25.988875 +2023-04-03 17:30:46,053 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.091e+02 5.004e+02 5.938e+02 7.424e+02 1.100e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-04-03 17:31:10,306 INFO [train.py:903] (3/4) Epoch 30, batch 6300, loss[loss=0.198, simple_loss=0.2746, pruned_loss=0.06071, over 19628.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2836, pruned_loss=0.06032, over 3795528.39 frames. ], batch size: 50, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:31:24,177 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204324.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:32:11,321 INFO [train.py:903] (3/4) Epoch 30, batch 6350, loss[loss=0.1818, simple_loss=0.2554, pruned_loss=0.05407, over 19409.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2822, pruned_loss=0.0598, over 3799189.71 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 4.0 +2023-04-03 17:32:30,587 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204377.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:32:50,298 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.055e+02 4.728e+02 6.049e+02 7.442e+02 1.533e+03, threshold=1.210e+03, percent-clipped=4.0 +2023-04-03 17:33:07,776 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.1106, 2.0517, 1.9392, 1.7681, 1.6680, 1.7716, 0.5899, 1.0532], + device='cuda:3'), covar=tensor([0.0734, 0.0703, 0.0507, 0.0813, 0.1241, 0.0925, 0.1448, 0.1227], + device='cuda:3'), in_proj_covar=tensor([0.0367, 0.0366, 0.0370, 0.0394, 0.0472, 0.0398, 0.0347, 0.0350], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 17:33:12,664 INFO [train.py:903] (3/4) Epoch 30, batch 6400, loss[loss=0.1831, simple_loss=0.2566, pruned_loss=0.05486, over 19387.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2821, pruned_loss=0.05974, over 3810009.92 frames. ], batch size: 48, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:33:45,718 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204439.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:33:45,889 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.8155, 3.3157, 3.3436, 3.3431, 1.4351, 3.2119, 2.7960, 3.1476], + device='cuda:3'), covar=tensor([0.1855, 0.1133, 0.0846, 0.0994, 0.5936, 0.1227, 0.0896, 0.1332], + device='cuda:3'), in_proj_covar=tensor([0.0825, 0.0803, 0.1009, 0.0883, 0.0872, 0.0774, 0.0597, 0.0940], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:3') +2023-04-03 17:34:00,722 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.2392, 1.2937, 1.2834, 1.1097, 1.0980, 1.1492, 0.1023, 0.3863], + device='cuda:3'), covar=tensor([0.0830, 0.0744, 0.0535, 0.0698, 0.1434, 0.0778, 0.1517, 0.1300], + device='cuda:3'), in_proj_covar=tensor([0.0368, 0.0367, 0.0370, 0.0395, 0.0474, 0.0399, 0.0349, 0.0350], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 17:34:13,529 INFO [train.py:903] (3/4) Epoch 30, batch 6450, loss[loss=0.1527, simple_loss=0.231, pruned_loss=0.03725, over 19773.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2824, pruned_loss=0.05956, over 3811708.86 frames. ], batch size: 47, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:34:35,941 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204480.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:34:51,302 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 4.981e+02 6.276e+02 8.010e+02 2.155e+03, threshold=1.255e+03, percent-clipped=8.0 +2023-04-03 17:34:54,949 WARNING [train.py:1073] (3/4) Exclude cut with ID 5622-44585-0006-50425_sp0.9_rvb from training. Duration: 28.638875 +2023-04-03 17:34:59,509 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204499.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:08,056 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204505.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:16,837 INFO [train.py:903] (3/4) Epoch 30, batch 6500, loss[loss=0.2262, simple_loss=0.3081, pruned_loss=0.07219, over 18137.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05925, over 3817193.02 frames. ], batch size: 83, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:35:19,109 WARNING [train.py:1073] (3/4) Exclude cut with ID 3557-8342-0013-71585_sp1.1_rvb from training. Duration: 0.836375 +2023-04-03 17:35:30,878 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204524.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:35:31,809 INFO [zipformer.py:1188] (3/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=204525.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:09,153 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204554.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:11,508 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.7493, 1.6150, 1.6823, 2.1671, 1.7343, 2.0238, 1.9138, 1.8442], + device='cuda:3'), covar=tensor([0.0776, 0.0844, 0.0888, 0.0614, 0.0811, 0.0688, 0.0853, 0.0635], + device='cuda:3'), in_proj_covar=tensor([0.0212, 0.0221, 0.0227, 0.0238, 0.0226, 0.0215, 0.0186, 0.0207], + device='cuda:3'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0004, 0.0005], + device='cuda:3') +2023-04-03 17:36:18,054 INFO [train.py:903] (3/4) Epoch 30, batch 6550, loss[loss=0.2193, simple_loss=0.3006, pruned_loss=0.06899, over 19589.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2811, pruned_loss=0.05876, over 3808205.09 frames. ], batch size: 61, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:36:28,054 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([4.0398, 3.6995, 2.6111, 3.3084, 0.8821, 3.7079, 3.5274, 3.6624], + device='cuda:3'), covar=tensor([0.0769, 0.1042, 0.1839, 0.0946, 0.3801, 0.0696, 0.1062, 0.1113], + device='cuda:3'), in_proj_covar=tensor([0.0535, 0.0434, 0.0517, 0.0358, 0.0407, 0.0457, 0.0451, 0.0486], + device='cuda:3'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 17:36:40,518 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204580.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:36:56,886 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.390e+02 5.288e+02 6.916e+02 9.470e+02 2.608e+03, threshold=1.383e+03, percent-clipped=11.0 +2023-04-03 17:37:11,407 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204605.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:37:20,120 INFO [train.py:903] (3/4) Epoch 30, batch 6600, loss[loss=0.2133, simple_loss=0.2955, pruned_loss=0.06552, over 19661.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2812, pruned_loss=0.05847, over 3812034.07 frames. ], batch size: 58, lr: 2.71e-03, grad_scale: 8.0 +2023-04-03 17:37:46,930 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204633.0, num_to_drop=1, layers_to_drop={0} +2023-04-03 17:37:54,607 INFO [zipformer.py:1188] (3/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=204640.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:38:17,215 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204658.0, num_to_drop=1, layers_to_drop={1} +2023-04-03 17:38:21,420 INFO [train.py:903] (3/4) Epoch 30, batch 6650, loss[loss=0.211, simple_loss=0.283, pruned_loss=0.06956, over 19691.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2816, pruned_loss=0.05876, over 3806524.67 frames. ], batch size: 53, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:38:59,516 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.417e+02 4.923e+02 6.497e+02 8.594e+02 3.232e+03, threshold=1.299e+03, percent-clipped=5.0 +2023-04-03 17:39:24,423 INFO [train.py:903] (3/4) Epoch 30, batch 6700, loss[loss=0.2168, simple_loss=0.2929, pruned_loss=0.07033, over 19539.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2808, pruned_loss=0.05838, over 3820267.64 frames. ], batch size: 56, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:39:42,300 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([3.5049, 1.6760, 1.8634, 1.7408, 3.2019, 1.4079, 2.5955, 3.5614], + device='cuda:3'), covar=tensor([0.0554, 0.2539, 0.2529, 0.1820, 0.0599, 0.2338, 0.1399, 0.0271], + device='cuda:3'), in_proj_covar=tensor([0.0426, 0.0381, 0.0402, 0.0357, 0.0386, 0.0361, 0.0401, 0.0423], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:3') +2023-04-03 17:40:21,757 INFO [train.py:903] (3/4) Epoch 30, batch 6750, loss[loss=0.2542, simple_loss=0.3245, pruned_loss=0.09194, over 19402.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2799, pruned_loss=0.05818, over 3807544.18 frames. ], batch size: 70, lr: 2.70e-03, grad_scale: 4.0 +2023-04-03 17:40:41,843 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([1.1832, 1.2505, 1.2278, 1.0594, 0.9381, 1.0865, 0.1674, 0.3582], + device='cuda:3'), covar=tensor([0.0923, 0.0881, 0.0552, 0.0744, 0.1725, 0.0921, 0.1614, 0.1491], + device='cuda:3'), in_proj_covar=tensor([0.0371, 0.0370, 0.0374, 0.0399, 0.0477, 0.0402, 0.0351, 0.0353], + device='cuda:3'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:3') +2023-04-03 17:40:46,244 INFO [zipformer.py:2441] (3/4) attn_weights_entropy = tensor([2.3354, 2.3725, 2.5778, 2.9495, 2.3536, 2.8009, 2.5046, 2.3580], + device='cuda:3'), covar=tensor([0.4180, 0.4070, 0.1916, 0.2693, 0.4353, 0.2355, 0.4796, 0.3344], + device='cuda:3'), in_proj_covar=tensor([0.0955, 0.1039, 0.0754, 0.0967, 0.0935, 0.0871, 0.0869, 0.0821], + device='cuda:3'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:3') +2023-04-03 17:40:58,539 INFO [optim.py:369] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.849e+02 6.262e+02 9.905e+02 2.863e+03, threshold=1.252e+03, percent-clipped=11.0 +2023-04-03 17:41:17,946 INFO [zipformer.py:1188] (3/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=204810.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:19,756 INFO [train.py:903] (3/4) Epoch 30, batch 6800, loss[loss=0.2164, simple_loss=0.2907, pruned_loss=0.07102, over 19695.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2796, pruned_loss=0.05788, over 3811641.84 frames. ], batch size: 53, lr: 2.70e-03, grad_scale: 8.0 +2023-04-03 17:41:45,193 INFO [zipformer.py:1188] (3/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=204835.0, num_to_drop=0, layers_to_drop=set() +2023-04-03 17:41:49,905 INFO [train.py:1171] (3/4) Done! diff --git a/log/modified_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt b/log/modified_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..56f413bb7ffd5c8b4c046a7b4eb4ea8c771c2b7d --- /dev/null +++ b/log/modified_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt @@ -0,0 +1,12735 @@ +%WER = 3.41 +Errors: 213 insertions, 129 deletions, 1450 substitutions, over 52576 reference words (50997 correct) +Search below for sections starting with PER-UTT DETAILS:, SUBSTITUTIONS:, DELETIONS:, INSERTIONS:, PER-WORD STATS: + +PER-UTT DETAILS: corr or (ref->hyp) +1089-134686-0000-1733: HE HOPED THERE WOULD BE STEW FOR DINNER TURNIPS AND CARROTS AND BRUISED POTATOES AND FAT MUTTON PIECES TO BE LADLED OUT IN THICK PEPPERED (FLOUR->FLOWER) FATTENED SAUCE +1089-134686-0001-1734: STUFF IT INTO YOU HIS BELLY COUNSELLED HIM +1089-134686-0002-1735: AFTER EARLY (NIGHTFALL->NIGHT FALL) THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS +1089-134686-0003-1736: HELLO BERTIE ANY GOOD IN YOUR MIND +1089-134686-0004-1737: NUMBER TEN FRESH (NELLY->NELLIE) IS WAITING ON YOU GOOD NIGHT HUSBAND +1089-134686-0005-1738: THE MUSIC CAME NEARER AND HE RECALLED THE WORDS THE WORDS OF SHELLEY'S FRAGMENT UPON THE MOON WANDERING COMPANIONLESS PALE FOR WEARINESS +1089-134686-0006-1739: THE DULL LIGHT FELL MORE FAINTLY UPON THE PAGE WHEREON ANOTHER EQUATION BEGAN TO UNFOLD ITSELF SLOWLY AND TO SPREAD ABROAD ITS WIDENING TAIL +1089-134686-0007-1740: A COLD LUCID INDIFFERENCE REIGNED IN HIS SOUL +1089-134686-0008-1741: THE CHAOS IN WHICH HIS (ARDOUR->ARDOR) EXTINGUISHED ITSELF WAS A COLD INDIFFERENT KNOWLEDGE OF HIMSELF +1089-134686-0009-1742: AT MOST BY AN ALMS GIVEN TO A BEGGAR WHOSE BLESSING HE FLED FROM HE MIGHT HOPE WEARILY TO WIN FOR HIMSELF SOME MEASURE OF ACTUAL GRACE +1089-134686-0010-1743: WELL NOW ENNIS I DECLARE YOU HAVE A HEAD AND SO HAS MY STICK +1089-134686-0011-1744: ON SATURDAY MORNINGS WHEN THE (SODALITY->SODELITY) MET IN THE CHAPEL TO RECITE THE LITTLE OFFICE HIS PLACE WAS A CUSHIONED KNEELING DESK AT THE RIGHT OF THE ALTAR FROM WHICH HE LED HIS WING OF BOYS THROUGH THE RESPONSES +1089-134686-0012-1745: HER EYES SEEMED TO REGARD HIM WITH MILD PITY HER HOLINESS A STRANGE LIGHT GLOWING FAINTLY UPON HER FRAIL FLESH DID NOT HUMILIATE THE SINNER WHO APPROACHED HER +1089-134686-0013-1746: IF EVER HE WAS IMPELLED TO CAST SIN FROM HIM AND TO REPENT THE IMPULSE THAT MOVED HIM WAS THE WISH TO BE HER KNIGHT +1089-134686-0014-1747: HE TRIED TO THINK HOW IT COULD BE +1089-134686-0015-1748: BUT THE DUSK DEEPENING IN THE SCHOOLROOM COVERED OVER HIS THOUGHTS THE BELL RANG +1089-134686-0016-1749: THEN YOU CAN ASK HIM QUESTIONS ON THE CATECHISM (DEDALUS->DAEDALUS) +1089-134686-0017-1750: STEPHEN LEANING BACK AND DRAWING IDLY ON HIS SCRIBBLER LISTENED TO THE TALK ABOUT HIM WHICH HERON CHECKED FROM TIME TO TIME BY SAYING +1089-134686-0018-1751: IT WAS STRANGE TOO THAT HE FOUND AN ARID PLEASURE IN FOLLOWING UP TO THE END THE RIGID LINES OF THE DOCTRINES OF THE CHURCH AND PENETRATING INTO OBSCURE SILENCES ONLY TO HEAR AND FEEL THE MORE DEEPLY HIS OWN CONDEMNATION +1089-134686-0019-1752: THE SENTENCE OF SAINT JAMES WHICH SAYS THAT HE WHO OFFENDS AGAINST ONE COMMANDMENT BECOMES GUILTY OF ALL HAD SEEMED TO HIM FIRST A SWOLLEN PHRASE UNTIL HE HAD BEGUN TO GROPE IN THE DARKNESS OF HIS OWN STATE +1089-134686-0020-1753: IF A MAN HAD STOLEN A POUND IN HIS YOUTH AND HAD USED THAT POUND TO (AMASS->A MASS) A HUGE FORTUNE HOW MUCH WAS HE OBLIGED TO GIVE BACK THE POUND HE HAD STOLEN ONLY OR THE POUND TOGETHER WITH THE COMPOUND INTEREST ACCRUING UPON IT OR ALL HIS HUGE FORTUNE +1089-134686-0021-1754: IF A LAYMAN IN GIVING BAPTISM POUR THE WATER BEFORE SAYING THE WORDS IS THE CHILD BAPTIZED +1089-134686-0022-1755: HOW COMES IT THAT WHILE THE FIRST BEATITUDE PROMISES THE KINGDOM OF HEAVEN TO THE POOR OF HEART THE SECOND BEATITUDE PROMISES ALSO TO THE MEEK THAT THEY SHALL POSSESS THE LAND +1089-134686-0023-1756: WHY WAS THE SACRAMENT OF THE EUCHARIST INSTITUTED UNDER THE TWO SPECIES OF BREAD AND WINE IF JESUS CHRIST BE PRESENT BODY AND BLOOD SOUL AND DIVINITY IN THE BREAD ALONE AND IN THE WINE ALONE +1089-134686-0024-1757: IF THE WINE CHANGE INTO VINEGAR AND THE HOST CRUMBLE INTO CORRUPTION AFTER THEY HAVE BEEN CONSECRATED IS JESUS CHRIST STILL PRESENT UNDER THEIR SPECIES AS GOD AND AS MAN +1089-134686-0025-1758: A GENTLE KICK FROM THE TALL BOY IN THE BENCH BEHIND URGED STEPHEN TO ASK A DIFFICULT QUESTION +1089-134686-0026-1759: THE RECTOR DID NOT ASK FOR A CATECHISM TO HEAR THE LESSON FROM +1089-134686-0027-1760: HE CLASPED HIS HANDS ON THE DESK AND SAID +1089-134686-0028-1761: THE RETREAT WILL BEGIN ON WEDNESDAY AFTERNOON IN (HONOUR->HONOR) OF SAINT FRANCIS (XAVIER->ZAVIOUR) WHOSE FEAST DAY IS SATURDAY +1089-134686-0029-1762: ON FRIDAY CONFESSION WILL BE HEARD ALL THE AFTERNOON AFTER BEADS +1089-134686-0030-1763: BEWARE OF MAKING THAT MISTAKE +1089-134686-0031-1764: STEPHEN'S HEART BEGAN SLOWLY TO FOLD AND FADE WITH FEAR LIKE A WITHERING FLOWER +1089-134686-0032-1765: HE (IS->HAS) CALLED AS YOU KNOW THE APOSTLE OF THE INDIES +1089-134686-0033-1766: A GREAT SAINT SAINT FRANCIS (XAVIER->ZAVIER) +1089-134686-0034-1767: THE RECTOR PAUSED AND THEN SHAKING HIS CLASPED HANDS BEFORE HIM WENT ON +1089-134686-0035-1768: HE HAD THE FAITH IN HIM THAT MOVES MOUNTAINS +1089-134686-0036-1769: A GREAT SAINT SAINT FRANCIS (XAVIER->ZEVIER) +1089-134686-0037-1770: IN THE SILENCE THEIR DARK FIRE KINDLED THE DUSK INTO A TAWNY GLOW +1089-134691-0000-1707: HE COULD WAIT NO LONGER +1089-134691-0001-1708: FOR A FULL HOUR HE HAD PACED UP AND DOWN WAITING BUT HE COULD WAIT NO LONGER +1089-134691-0002-1709: HE SET OFF ABRUPTLY FOR THE BULL WALKING RAPIDLY LEST HIS FATHER'S SHRILL WHISTLE MIGHT CALL HIM BACK AND IN A FEW MOMENTS HE HAD ROUNDED THE CURVE AT THE POLICE BARRACK AND WAS SAFE +1089-134691-0003-1710: THE UNIVERSITY +1089-134691-0004-1711: PRIDE AFTER SATISFACTION UPLIFTED HIM LIKE LONG SLOW WAVES +1089-134691-0005-1712: WHOSE FEET ARE AS THE FEET OF (HARTS->HEARTS) AND UNDERNEATH THE EVERLASTING ARMS +1089-134691-0006-1713: THE PRIDE OF THAT DIM IMAGE BROUGHT BACK TO HIS MIND THE DIGNITY OF THE OFFICE HE HAD REFUSED +1089-134691-0007-1714: SOON THE WHOLE BRIDGE WAS TREMBLING AND RESOUNDING +1089-134691-0008-1715: THE UNCOUTH FACES PASSED HIM TWO BY TWO STAINED YELLOW OR RED OR LIVID BY THE SEA AND AS HE STROVE TO LOOK AT THEM WITH EASE AND INDIFFERENCE A FAINT STAIN OF PERSONAL SHAME AND COMMISERATION ROSE TO HIS OWN FACE +1089-134691-0009-1716: ANGRY WITH HIMSELF HE TRIED TO HIDE HIS FACE FROM THEIR EYES BY GAZING DOWN SIDEWAYS INTO THE SHALLOW SWIRLING WATER UNDER THE BRIDGE BUT HE STILL SAW A REFLECTION THEREIN OF THEIR TOP HEAVY SILK HATS AND HUMBLE TAPE LIKE COLLARS AND LOOSELY HANGING CLERICAL CLOTHES BROTHER HICKEY +1089-134691-0010-1717: BROTHER (MAC ARDLE->MICARTLE) BROTHER (KEOGH->KIEV) +1089-134691-0011-1718: THEIR PIETY WOULD BE LIKE THEIR NAMES LIKE THEIR FACES LIKE THEIR CLOTHES AND (IT->*) WAS IDLE FOR HIM TO TELL HIMSELF THAT THEIR HUMBLE AND CONTRITE HEARTS IT MIGHT BE PAID A FAR RICHER TRIBUTE OF DEVOTION THAN HIS HAD EVER BEEN A GIFT TENFOLD MORE ACCEPTABLE THAN HIS ELABORATE ADORATION +1089-134691-0012-1719: IT WAS IDLE FOR HIM TO MOVE HIMSELF TO BE GENEROUS TOWARDS THEM TO TELL HIMSELF THAT IF HE EVER CAME TO THEIR GATES STRIPPED OF HIS PRIDE BEATEN AND IN BEGGAR'S WEEDS THAT THEY WOULD BE GENEROUS TOWARDS HIM LOVING HIM AS THEMSELVES +1089-134691-0013-1720: IDLE AND EMBITTERING FINALLY TO ARGUE AGAINST HIS OWN DISPASSIONATE CERTITUDE THAT THE COMMANDMENT OF LOVE BADE US NOT TO LOVE OUR NEIGHBOUR AS OURSELVES WITH THE SAME AMOUNT AND INTENSITY OF LOVE BUT TO LOVE HIM AS OURSELVES WITH THE SAME KIND OF LOVE +1089-134691-0014-1721: THE PHRASE AND THE DAY AND THE SCENE HARMONIZED IN (A CHORD->ACCORD) +1089-134691-0015-1722: WORDS WAS IT THEIR (COLOURS->COLORS) +1089-134691-0016-1723: THEY WERE VOYAGING ACROSS THE DESERTS OF THE SKY A HOST OF NOMADS ON THE MARCH VOYAGING HIGH OVER IRELAND WESTWARD BOUND +1089-134691-0017-1724: THE EUROPE THEY HAD COME FROM LAY OUT THERE BEYOND THE IRISH SEA EUROPE OF STRANGE TONGUES AND VALLEYED AND (WOODBEGIRT->WOOD BEGIRT) AND (CITADELLED->CITADELED) AND OF ENTRENCHED AND MARSHALLED RACES +1089-134691-0018-1725: AGAIN AGAIN +1089-134691-0019-1726: A VOICE FROM BEYOND THE WORLD WAS CALLING +1089-134691-0020-1727: HELLO STEPHANOS HERE COMES THE (DEDALUS->DAEDALUS) +1089-134691-0021-1728: THEIR DIVING STONE POISED ON ITS RUDE SUPPORTS AND ROCKING UNDER THEIR PLUNGES AND THE ROUGH HEWN STONES OF THE SLOPING BREAKWATER OVER WHICH THEY SCRAMBLED IN THEIR (HORSEPLAY->HORSE PLAY) GLEAMED WITH COLD WET LUSTRE +1089-134691-0022-1729: HE STOOD STILL IN DEFERENCE TO THEIR CALLS AND PARRIED THEIR BANTER WITH EASY WORDS +1089-134691-0023-1730: IT WAS A PAIN TO SEE THEM AND A SWORD LIKE PAIN TO SEE THE SIGNS OF ADOLESCENCE THAT MADE REPELLENT THEIR PITIABLE NAKEDNESS +1089-134691-0024-1731: STEPHANOS (DEDALOS->DE LOSS) +1089-134691-0025-1732: A MOMENT BEFORE THE GHOST OF THE ANCIENT KINGDOM OF THE DANES HAD LOOKED FORTH THROUGH THE VESTURE OF THE (HAZEWRAPPED->HAYES WRAPPED) CITY +1188-133604-0000-1771: YOU WILL FIND ME CONTINUALLY SPEAKING OF FOUR MEN TITIAN (HOLBEIN->HOLBINE) TURNER AND (TINTORET->TINTARETTE) IN ALMOST THE SAME TERMS +1188-133604-0001-1772: (THEY->THE) UNITE EVERY QUALITY AND SOMETIMES YOU WILL FIND ME REFERRING TO THEM AS COLORISTS SOMETIMES AS (CHIAROSCURISTS->KIERRASCURISTS) +1188-133604-0002-1773: BY BEING STUDIOUS OF (COLOR->COLOUR) THEY ARE STUDIOUS OF DIVISION AND WHILE THE (CHIAROSCURIST->CUIRASCURISTS) DEVOTES HIMSELF TO THE REPRESENTATION OF DEGREES OF FORCE IN ONE THING UNSEPARATED LIGHT THE COLORISTS HAVE FOR THEIR FUNCTION THE ATTAINMENT OF BEAUTY BY ARRANGEMENT OF THE DIVISIONS OF LIGHT +1188-133604-0003-1774: MY FIRST AND PRINCIPAL REASON WAS THAT THEY ENFORCED BEYOND ALL RESISTANCE ON ANY STUDENT WHO MIGHT ATTEMPT TO COPY THEM THIS METHOD OF LAYING PORTIONS OF DISTINCT HUE SIDE BY SIDE +1188-133604-0004-1775: SOME OF THE TOUCHES INDEED WHEN THE TINT HAS BEEN MIXED WITH MUCH WATER HAVE BEEN LAID IN LITTLE DROPS OR PONDS SO THAT THE PIGMENT MIGHT CRYSTALLIZE HARD AT THE EDGE +1188-133604-0005-1776: IT IS THE HEAD OF A PARROT WITH A LITTLE FLOWER IN HIS BEAK FROM A PICTURE OF (CARPACCIO'S->CARPATIUS) ONE OF HIS SERIES OF THE LIFE OF SAINT GEORGE +1188-133604-0006-1777: THEN HE COMES TO THE BEAK OF IT +1188-133604-0007-1778: THE BROWN GROUND BENEATH IS LEFT FOR THE MOST PART ONE TOUCH OF BLACK IS PUT FOR THE HOLLOW (TWO->TOO) DELICATE LINES OF DARK (GRAY->GREY) DEFINE THE OUTER CURVE AND ONE LITTLE QUIVERING TOUCH OF WHITE DRAWS THE INNER EDGE OF THE MANDIBLE +1188-133604-0008-1779: FOR BELIEVE ME THE FINAL PHILOSOPHY OF ART CAN ONLY RATIFY THEIR OPINION THAT THE BEAUTY OF A COCK ROBIN IS TO BE (RED->READ) AND OF A GRASS PLOT TO BE GREEN AND THE BEST SKILL OF ART IS (IN->AN) INSTANTLY SEIZING ON THE MANIFOLD DELICIOUSNESS OF LIGHT WHICH YOU CAN ONLY SEIZE BY PRECISION OF INSTANTANEOUS TOUCH +1188-133604-0009-1780: NOW YOU WILL SEE IN THESE STUDIES THAT THE MOMENT THE WHITE IS (INCLOSED->ENCLOSED) PROPERLY AND (HARMONIZED->HARMONIZE) WITH THE OTHER HUES IT BECOMES SOMEHOW MORE PRECIOUS AND PEARLY THAN THE WHITE PAPER AND THAT I AM NOT AFRAID TO LEAVE A WHOLE FIELD OF UNTREATED WHITE PAPER ALL ROUND IT BEING SURE THAT EVEN THE LITTLE DIAMONDS IN THE ROUND WINDOW WILL TELL AS JEWELS IF THEY ARE GRADATED JUSTLY +1188-133604-0010-1781: BUT IN THIS (VIGNETTE->VINEYARD) COPIED FROM TURNER YOU HAVE THE TWO PRINCIPLES BROUGHT OUT PERFECTLY +1188-133604-0011-1782: THEY ARE BEYOND ALL OTHER WORKS (THAT->THAN) I KNOW EXISTING DEPENDENT FOR THEIR EFFECT ON LOW SUBDUED TONES THEIR FAVORITE CHOICE IN TIME OF DAY BEING EITHER DAWN OR TWILIGHT AND EVEN THEIR BRIGHTEST SUNSETS PRODUCED CHIEFLY OUT OF GRAY PAPER +1188-133604-0012-1783: IT MAY BE THAT A GREAT (COLORIST->COLORLESS) WILL USE HIS UTMOST FORCE OF COLOR AS A SINGER HIS FULL POWER OF VOICE BUT LOUD OR LOW THE VIRTUE IS IN BOTH CASES ALWAYS IN REFINEMENT NEVER IN LOUDNESS +1188-133604-0013-1784: IT MUST REMEMBER BE ONE OR THE OTHER +1188-133604-0014-1785: DO NOT THEREFORE THINK THAT THE GOTHIC (SCHOOL IS->SCHOOLS) AN EASY ONE +1188-133604-0015-1786: THE LAW OF THAT SCHOOL IS THAT EVERYTHING SHALL BE SEEN CLEARLY OR AT LEAST ONLY IN SUCH MIST OR FAINTNESS AS SHALL BE DELIGHTFUL AND I HAVE NO DOUBT THAT THE BEST INTRODUCTION TO IT WOULD BE THE ELEMENTARY PRACTICE OF PAINTING EVERY STUDY ON A GOLDEN GROUND +1188-133604-0016-1787: THIS AT ONCE COMPELS YOU TO UNDERSTAND THAT THE WORK IS TO BE IMAGINATIVE AND DECORATIVE THAT IT REPRESENTS BEAUTIFUL THINGS IN THE CLEAREST WAY BUT NOT UNDER EXISTING CONDITIONS AND THAT IN FACT YOU ARE PRODUCING (JEWELER'S->JEWELLERS) WORK RATHER THAN PICTURES +1188-133604-0017-1788: THAT A STYLE (IS->WAS) RESTRAINED OR SEVERE DOES NOT MEAN THAT IT IS ALSO ERRONEOUS +1188-133604-0018-1789: IN ALL EARLY GOTHIC ART INDEED YOU WILL FIND FAILURE OF THIS KIND ESPECIALLY DISTORTION AND RIGIDITY WHICH ARE IN MANY RESPECTS PAINFULLY TO BE COMPARED WITH THE SPLENDID REPOSE OF CLASSIC ART +1188-133604-0019-1790: THE LARGE LETTER CONTAINS INDEED ENTIRELY FEEBLE AND ILL DRAWN FIGURES THAT IS MERELY CHILDISH (AND->IN) FAILING WORK OF AN INFERIOR HAND IT IS NOT CHARACTERISTIC OF GOTHIC OR ANY OTHER SCHOOL +1188-133604-0020-1791: BUT OBSERVE YOU CAN ONLY DO THIS ON ONE CONDITION THAT OF STRIVING ALSO TO CREATE IN REALITY THE BEAUTY WHICH YOU SEEK IN IMAGINATION +1188-133604-0021-1792: IT WILL BE WHOLLY IMPOSSIBLE FOR YOU TO RETAIN THE TRANQUILLITY OF TEMPER AND FELICITY OF FAITH NECESSARY FOR NOBLE (PURIST->PUREST) PAINTING UNLESS YOU ARE ACTIVELY ENGAGED IN PROMOTING THE FELICITY AND PEACE OF PRACTICAL LIFE +1188-133604-0022-1793: YOU MUST LOOK AT HIM IN THE FACE FIGHT HIM CONQUER HIM WITH WHAT (SCATHE->SCATH) YOU MAY YOU NEED NOT THINK TO KEEP OUT OF THE WAY OF HIM +1188-133604-0023-1794: THE (COLORIST->CHOLERIST) SAYS FIRST OF ALL AS MY DELICIOUS (PAROQUET->PARAQUET) WAS RUBY SO THIS NASTY VIPER SHALL BE BLACK AND THEN (IS->AS) THE QUESTION CAN I ROUND HIM OFF EVEN THOUGH HE IS BLACK AND MAKE HIM SLIMY AND YET SPRINGY AND CLOSE DOWN CLOTTED LIKE A POOL OF BLACK BLOOD ON THE EARTH ALL THE SAME +1188-133604-0024-1795: NOTHING WILL BE MORE PRECIOUS TO YOU I THINK IN THE PRACTICAL STUDY OF ART THAN THE CONVICTION WHICH WILL FORCE ITSELF ON YOU MORE AND MORE EVERY HOUR OF THE WAY ALL THINGS ARE BOUND TOGETHER LITTLE AND GREAT IN SPIRIT AND IN MATTER +1188-133604-0025-1796: YOU KNOW I HAVE JUST BEEN TELLING YOU HOW THIS SCHOOL OF MATERIALISM (AND->IN) CLAY INVOLVED ITSELF AT LAST IN CLOUD AND FIRE +1188-133604-0026-1797: HERE IS AN EQUALLY TYPICAL GREEK SCHOOL LANDSCAPE BY WILSON LOST WHOLLY IN GOLDEN MIST THE TREES SO SLIGHTLY DRAWN THAT YOU DON'T KNOW IF THEY ARE TREES OR TOWERS AND NO CARE FOR COLOR (WHATEVER->WHATSOEVER) PERFECTLY DECEPTIVE AND (MARVELOUS->MARVELLOUS) EFFECT OF SUNSHINE THROUGH THE MIST APOLLO (AND->IN) THE PYTHON +1188-133604-0027-1798: NOW HERE IS RAPHAEL EXACTLY BETWEEN THE TWO TREES STILL DRAWN LEAF BY LEAF WHOLLY FORMAL BUT BEAUTIFUL MIST COMING GRADUALLY INTO THE DISTANCE +1188-133604-0028-1799: WELL THEN LAST HERE IS TURNER'S GREEK SCHOOL OF THE HIGHEST CLASS AND YOU DEFINE HIS ART ABSOLUTELY AS FIRST THE DISPLAYING INTENSELY AND WITH THE STERNEST INTELLECT OF NATURAL FORM AS IT IS AND THEN THE ENVELOPMENT OF IT WITH CLOUD AND FIRE +1188-133604-0029-1800: ONLY THERE ARE TWO SORTS OF CLOUD (AND->IN) FIRE +1188-133604-0030-1801: HE KNOWS THEM BOTH +1188-133604-0031-1802: THERE'S ONE AND THERE'S ANOTHER THE DUDLEY AND THE FLINT +1188-133604-0032-1803: IT IS ONLY A PENCIL OUTLINE BY EDWARD BURNE JONES IN ILLUSTRATION OF THE STORY OF PSYCHE IT IS THE INTRODUCTION OF PSYCHE AFTER ALL HER TROUBLES INTO HEAVEN +1188-133604-0033-1804: EVERY PLANT IN THE GRASS IS SET FORMALLY GROWS PERFECTLY AND MAY BE REALIZED COMPLETELY +1188-133604-0034-1805: EXQUISITE ORDER AND UNIVERSAL WITH ETERNAL LIFE AND LIGHT THIS IS THE FAITH AND EFFORT OF THE SCHOOLS OF (CRYSTAL->CRISTEL) AND YOU MAY DESCRIBE AND COMPLETE THEIR WORK QUITE LITERALLY BY TAKING ANY VERSES OF CHAUCER IN HIS TENDER MOOD AND OBSERVING HOW HE INSISTS ON THE CLEARNESS AND BRIGHTNESS FIRST AND THEN ON THE ORDER +1188-133604-0035-1806: THUS IN CHAUCER'S DREAM +1188-133604-0036-1807: IN BOTH THESE HIGH MYTHICAL SUBJECTS THE SURROUNDING NATURE THOUGH SUFFERING IS STILL DIGNIFIED AND BEAUTIFUL +1188-133604-0037-1808: EVERY LINE IN WHICH THE MASTER TRACES IT EVEN WHERE SEEMINGLY NEGLIGENT IS LOVELY AND SET DOWN WITH A MEDITATIVE CALMNESS WHICH MAKES THESE TWO ETCHINGS CAPABLE OF BEING PLACED BESIDE THE MOST TRANQUIL WORK OF (HOLBEIN->HOLBINE) OR (DUERER->DURE) +1188-133604-0038-1809: BUT NOW HERE IS A SUBJECT OF WHICH YOU WILL WONDER AT FIRST WHY TURNER DREW IT AT ALL +1188-133604-0039-1810: IT HAS NO BEAUTY WHATSOEVER NO SPECIALTY OF PICTURESQUENESS (AND->IN) ALL ITS LINES ARE CRAMPED AND POOR +1188-133604-0040-1811: THE CRAMPNESS (AND->IN) THE POVERTY ARE ALL INTENDED +1188-133604-0041-1812: IT IS A GLEANER BRINGING DOWN HER ONE SHEAF OF CORN TO AN OLD (WATERMILL->WATER MILL) ITSELF MOSSY AND RENT SCARCELY ABLE TO GET ITS STONES TO TURN +1188-133604-0042-1813: THE SCENE IS ABSOLUTELY ARCADIAN +1188-133604-0043-1814: SEE THAT YOUR LIVES BE IN NOTHING WORSE THAN A BOY'S CLIMBING FOR HIS ENTANGLED KITE +1188-133604-0044-1815: IT WILL BE WELL FOR YOU IF YOU JOIN NOT WITH THOSE WHO INSTEAD OF KITES FLY FALCONS WHO INSTEAD OF OBEYING THE LAST WORDS OF THE GREAT CLOUD SHEPHERD TO FEED HIS SHEEP LIVE THE LIVES HOW MUCH LESS THAN VANITY OF THE WAR WOLF AND THE (GIER->GEAR) EAGLE +121-121726-0000-2558: ALSO A POPULAR CONTRIVANCE WHEREBY LOVE MAKING MAY BE SUSPENDED BUT NOT STOPPED DURING THE PICNIC SEASON +121-121726-0001-2559: (HARANGUE->HURRY) THE TIRESOME PRODUCT OF A TIRELESS TONGUE +121-121726-0002-2560: ANGOR PAIN PAINFUL TO HEAR +121-121726-0003-2561: (HAY->HEY) FEVER A (HEART->HARD) TROUBLE CAUSED BY FALLING IN LOVE WITH A GRASS WIDOW +121-121726-0004-2562: HEAVEN A GOOD PLACE TO BE RAISED TO +121-121726-0005-2563: HEDGE A FENCE +121-121726-0006-2564: HEREDITY THE CAUSE OF ALL OUR FAULTS +121-121726-0007-2565: HORSE SENSE A DEGREE OF WISDOM THAT KEEPS ONE FROM BETTING ON THE RACES +121-121726-0008-2566: HOSE MAN'S EXCUSE FOR WETTING THE WALK +121-121726-0009-2567: HOTEL A PLACE WHERE A GUEST OFTEN GIVES UP GOOD DOLLARS FOR POOR QUARTERS +121-121726-0010-2568: (HOUSECLEANING->HOUSE CLEANING) A DOMESTIC UPHEAVAL THAT MAKES IT EASY FOR THE GOVERNMENT TO ENLIST ALL THE SOLDIERS IT NEEDS +121-121726-0011-2569: HUSBAND THE NEXT THING TO A WIFE +121-121726-0012-2570: HUSSY WOMAN AND BOND TIE +121-121726-0013-2571: TIED TO A WOMAN +121-121726-0014-2572: HYPOCRITE A HORSE DEALER +121-123852-0000-2615: THOSE PRETTY WRONGS THAT LIBERTY COMMITS WHEN I AM SOMETIME ABSENT FROM THY HEART THY BEAUTY AND THY YEARS FULL WELL BEFITS FOR STILL TEMPTATION FOLLOWS WHERE THOU ART +121-123852-0001-2616: (AY->I) ME +121-123852-0002-2617: NO MATTER THEN ALTHOUGH MY FOOT DID STAND UPON THE FARTHEST EARTH (REMOV'D->REMOVED) FROM THEE FOR NIMBLE THOUGHT CAN JUMP BOTH SEA AND LAND AS SOON AS THINK THE PLACE WHERE HE WOULD BE BUT AH +121-123852-0003-2618: THOUGHT KILLS ME THAT I AM NOT (THOUGHT->BOUGHT) TO LEAP LARGE LENGTHS OF MILES WHEN THOU ART GONE BUT THAT SO MUCH OF EARTH AND WATER WROUGHT I MUST ATTEND TIME'S LEISURE WITH MY MOAN RECEIVING (NOUGHT->NAUGHT) BY ELEMENTS SO SLOW BUT HEAVY TEARS (BADGES->BADGERS) OF EITHER'S WOE +121-123852-0004-2619: MY HEART DOTH PLEAD THAT THOU IN HIM DOST LIE A CLOSET NEVER (PIERC'D->PIERCED) WITH CRYSTAL EYES BUT THE DEFENDANT DOTH THAT (PLEA->PLEAD) DENY AND SAYS IN HIM THY FAIR APPEARANCE LIES +121-123859-0000-2573: YOU ARE MY ALL THE WORLD AND I MUST STRIVE TO KNOW MY SHAMES AND PRAISES FROM YOUR TONGUE NONE ELSE TO ME NOR I TO NONE ALIVE THAT MY (STEEL'D SENSE->STEELED SCENTS) OR CHANGES RIGHT OR WRONG +121-123859-0001-2574: (O->OH) TIS THE FIRST TIS FLATTERY IN MY SEEING AND MY GREAT MIND MOST KINGLY DRINKS IT UP MINE EYE WELL KNOWS WHAT WITH HIS GUST IS (GREEING->GREEN) AND TO HIS (PALATE->PALLET) DOTH PREPARE THE CUP IF IT BE (POISON'D->POISONED) TIS THE LESSER SIN THAT MINE EYE LOVES IT AND DOTH FIRST BEGIN +121-123859-0002-2575: BUT RECKONING TIME WHOSE (MILLION'D->MILLIONED) ACCIDENTS CREEP IN TWIXT VOWS AND CHANGE DECREES OF KINGS TAN SACRED BEAUTY BLUNT THE (SHARP'ST INTENTS->SHARPEST INTENSE) DIVERT STRONG MINDS TO THE COURSE OF ALTERING THINGS ALAS WHY FEARING OF TIME'S TYRANNY MIGHT I NOT THEN SAY NOW I LOVE YOU BEST WHEN I WAS CERTAIN (O'ER INCERTAINTY->OR IN CERTAINTY) CROWNING THE PRESENT DOUBTING OF THE REST +121-123859-0003-2576: LOVE IS A BABE THEN MIGHT I NOT SAY SO TO GIVE FULL GROWTH TO THAT WHICH STILL DOTH GROW +121-123859-0004-2577: SO I (RETURN REBUK'D->RETURNED REBUKED) TO MY CONTENT AND GAIN BY ILL THRICE MORE THAN I HAVE SPENT +121-127105-0000-2578: IT WAS THIS OBSERVATION THAT DREW FROM DOUGLAS NOT IMMEDIATELY BUT LATER IN THE EVENING A REPLY THAT HAD THE INTERESTING CONSEQUENCE TO WHICH I CALL ATTENTION +121-127105-0001-2579: (SOMEONE->SOME ONE) ELSE TOLD A STORY NOT PARTICULARLY EFFECTIVE WHICH I SAW HE WAS NOT FOLLOWING +121-127105-0002-2580: CRIED ONE OF THE WOMEN HE TOOK NO NOTICE OF HER HE LOOKED AT ME BUT AS IF INSTEAD OF ME HE SAW WHAT HE SPOKE OF +121-127105-0003-2581: THERE WAS A UNANIMOUS GROAN AT THIS AND MUCH REPROACH AFTER WHICH IN HIS PREOCCUPIED WAY HE EXPLAINED +121-127105-0004-2582: THE (STORY'S->STORIES) WRITTEN +121-127105-0005-2583: I COULD WRITE TO MY MAN AND ENCLOSE THE KEY HE COULD SEND DOWN THE PACKET AS HE FINDS IT +121-127105-0006-2584: THE OTHERS RESENTED POSTPONEMENT BUT IT WAS JUST HIS SCRUPLES THAT CHARMED ME +121-127105-0007-2585: TO THIS HIS ANSWER WAS PROMPT OH THANK GOD NO AND IS THE RECORD YOURS +121-127105-0008-2586: HE HUNG FIRE AGAIN A WOMAN'S +121-127105-0009-2587: SHE HAS BEEN DEAD THESE TWENTY YEARS +121-127105-0010-2588: SHE SENT ME THE PAGES IN QUESTION BEFORE SHE DIED +121-127105-0011-2589: SHE WAS THE MOST AGREEABLE WOMAN I'VE EVER KNOWN IN HER POSITION SHE WOULD HAVE BEEN WORTHY OF ANY WHATEVER +121-127105-0012-2590: (IT WASN'T->TWASN'T) SIMPLY THAT SHE SAID SO BUT THAT I KNEW SHE HADN'T I WAS SURE I COULD SEE +121-127105-0013-2591: YOU'LL EASILY JUDGE WHY WHEN YOU HEAR BECAUSE THE THING HAD BEEN SUCH A SCARE HE CONTINUED TO FIX ME +121-127105-0014-2592: YOU ARE ACUTE +121-127105-0015-2593: HE QUITTED THE FIRE AND DROPPED BACK INTO HIS CHAIR +121-127105-0016-2594: PROBABLY NOT TILL THE SECOND POST +121-127105-0017-2595: IT WAS ALMOST THE TONE OF HOPE EVERYBODY WILL STAY +121-127105-0018-2596: CRIED THE LADIES WHOSE DEPARTURE HAD BEEN FIXED +121-127105-0019-2597: MISSUS GRIFFIN HOWEVER EXPRESSED THE NEED FOR A LITTLE MORE LIGHT +121-127105-0020-2598: WHO WAS IT SHE WAS IN LOVE WITH THE STORY WILL TELL I TOOK UPON MYSELF TO REPLY OH I CAN'T WAIT FOR THE STORY THE STORY WON'T TELL SAID DOUGLAS NOT IN ANY LITERAL VULGAR WAY MORE'S THE PITY THEN +121-127105-0021-2599: WON'T YOU TELL DOUGLAS +121-127105-0022-2600: (WELL->FOR) IF I DON'T KNOW WHO SHE WAS IN LOVE WITH I KNOW WHO HE WAS +121-127105-0023-2601: LET ME SAY HERE DISTINCTLY TO HAVE DONE WITH IT THAT THIS NARRATIVE FROM AN EXACT TRANSCRIPT OF MY OWN MADE MUCH LATER IS WHAT I SHALL PRESENTLY GIVE +121-127105-0024-2602: POOR DOUGLAS BEFORE HIS DEATH WHEN IT WAS IN SIGHT COMMITTED TO ME THE MANUSCRIPT THAT REACHED HIM ON THE THIRD OF THESE DAYS AND THAT ON THE SAME SPOT WITH IMMENSE EFFECT HE BEGAN TO READ TO OUR HUSHED LITTLE CIRCLE ON THE NIGHT OF THE FOURTH +121-127105-0025-2603: THE DEPARTING LADIES WHO HAD SAID THEY WOULD STAY DIDN'T OF COURSE THANK HEAVEN STAY THEY DEPARTED IN CONSEQUENCE OF ARRANGEMENTS MADE IN A RAGE OF CURIOSITY AS THEY PROFESSED PRODUCED BY THE TOUCHES WITH WHICH HE HAD ALREADY WORKED US UP +121-127105-0026-2604: THE FIRST OF THESE TOUCHES CONVEYED THAT THE WRITTEN STATEMENT TOOK UP THE TALE AT A POINT AFTER IT HAD IN A MANNER BEGUN +121-127105-0027-2605: HE HAD FOR HIS OWN TOWN RESIDENCE A BIG HOUSE FILLED WITH THE SPOILS OF TRAVEL AND THE TROPHIES OF THE CHASE BUT IT WAS TO HIS COUNTRY HOME AN OLD FAMILY PLACE IN ESSEX THAT HE WISHED HER IMMEDIATELY TO PROCEED +121-127105-0028-2606: THE AWKWARD THING WAS THAT THEY HAD PRACTICALLY NO OTHER RELATIONS AND THAT HIS OWN AFFAIRS TOOK UP ALL HIS TIME +121-127105-0029-2607: THERE WERE PLENTY OF PEOPLE TO HELP BUT OF COURSE THE YOUNG LADY WHO SHOULD GO DOWN AS GOVERNESS WOULD BE IN SUPREME AUTHORITY +121-127105-0030-2608: I DON'T ANTICIPATE +121-127105-0031-2609: SHE WAS YOUNG UNTRIED NERVOUS IT WAS A VISION OF SERIOUS DUTIES AND LITTLE COMPANY OF REALLY GREAT LONELINESS +121-127105-0032-2610: YES BUT THAT'S JUST THE BEAUTY OF HER PASSION +121-127105-0033-2611: IT WAS THE BEAUTY OF IT +121-127105-0034-2612: IT SOUNDED DULL IT SOUNDED STRANGE AND ALL THE MORE SO BECAUSE OF HIS MAIN CONDITION WHICH WAS +121-127105-0035-2613: SHE PROMISED TO DO THIS AND SHE MENTIONED TO ME THAT WHEN FOR A MOMENT DISBURDENED DELIGHTED HE HELD HER HAND THANKING HER FOR THE SACRIFICE SHE ALREADY FELT REWARDED +121-127105-0036-2614: BUT WAS THAT ALL HER REWARD ONE OF THE LADIES ASKED +1221-135766-0000-1305: HOW STRANGE IT SEEMED TO THE SAD WOMAN AS SHE WATCHED THE GROWTH AND THE BEAUTY THAT BECAME EVERY DAY MORE BRILLIANT AND THE INTELLIGENCE THAT THREW ITS QUIVERING SUNSHINE OVER THE TINY FEATURES OF THIS CHILD +1221-135766-0001-1306: GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONOURED BOSOM TO CONNECT HER PARENT (FOR EVER->FOREVER) WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN +1221-135766-0002-1307: YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION +1221-135766-0003-1308: THE CHILD HAD A NATIVE GRACE WHICH DOES NOT INVARIABLY (CO EXIST->COEXIST) WITH FAULTLESS BEAUTY ITS ATTIRE HOWEVER SIMPLE ALWAYS IMPRESSED THE BEHOLDER AS IF IT WERE THE VERY GARB THAT PRECISELY BECAME IT BEST +1221-135766-0004-1309: THIS OUTWARD MUTABILITY INDICATED AND DID NOT MORE THAN FAIRLY EXPRESS THE VARIOUS PROPERTIES OF HER INNER LIFE +1221-135766-0005-1310: HESTER COULD ONLY ACCOUNT FOR THE CHILD'S CHARACTER AND EVEN THEN MOST VAGUELY AND IMPERFECTLY BY RECALLING WHAT SHE HERSELF HAD BEEN DURING THAT MOMENTOUS PERIOD WHILE PEARL WAS IMBIBING HER SOUL FROM THE SPIRITUAL WORLD AND HER BODILY FRAME FROM ITS MATERIAL OF EARTH +1221-135766-0006-1311: THEY WERE NOW ILLUMINATED BY THE MORNING RADIANCE OF A YOUNG CHILD'S DISPOSITION BUT LATER IN THE DAY OF EARTHLY EXISTENCE MIGHT BE PROLIFIC OF THE STORM AND WHIRLWIND +1221-135766-0007-1312: HESTER PRYNNE NEVERTHELESS THE LOVING MOTHER OF THIS ONE CHILD RAN LITTLE RISK OF ERRING ON THE SIDE OF UNDUE SEVERITY +1221-135766-0008-1313: MINDFUL HOWEVER OF HER OWN ERRORS AND MISFORTUNES SHE EARLY SOUGHT TO IMPOSE A TENDER BUT STRICT CONTROL OVER THE INFANT IMMORTALITY THAT WAS COMMITTED TO HER CHARGE +1221-135766-0009-1314: AS TO ANY OTHER KIND OF DISCIPLINE WHETHER ADDRESSED TO HER MIND OR HEART LITTLE PEARL MIGHT OR MIGHT NOT BE WITHIN ITS REACH IN ACCORDANCE WITH THE CAPRICE THAT (RULED->ROLLED) THE MOMENT +1221-135766-0010-1315: IT WAS A LOOK SO INTELLIGENT YET INEXPLICABLE PERVERSE SOMETIMES SO MALICIOUS BUT GENERALLY ACCOMPANIED BY A WILD FLOW OF SPIRITS THAT HESTER COULD NOT HELP QUESTIONING AT SUCH MOMENTS WHETHER PEARL WAS A HUMAN CHILD +1221-135766-0011-1316: BEHOLDING IT HESTER WAS CONSTRAINED TO RUSH TOWARDS THE CHILD TO PURSUE THE LITTLE ELF IN THE FLIGHT WHICH SHE INVARIABLY BEGAN TO SNATCH HER TO HER BOSOM WITH A CLOSE PRESSURE AND EARNEST KISSES NOT SO MUCH FROM OVERFLOWING LOVE AS TO ASSURE HERSELF THAT PEARL WAS FLESH AND BLOOD AND NOT UTTERLY DELUSIVE +1221-135766-0012-1317: BROODING OVER ALL THESE MATTERS THE MOTHER FELT LIKE ONE WHO HAS EVOKED A SPIRIT BUT BY SOME IRREGULARITY IN THE PROCESS OF CONJURATION HAS FAILED TO WIN THE MASTER WORD THAT SHOULD CONTROL THIS NEW AND INCOMPREHENSIBLE INTELLIGENCE +1221-135766-0013-1318: PEARL WAS A BORN OUTCAST OF THE (INFANTILE->INVENTILE) WORLD +1221-135766-0014-1319: PEARL SAW AND GAZED INTENTLY BUT NEVER SOUGHT TO MAKE ACQUAINTANCE +1221-135766-0015-1320: IF SPOKEN TO SHE WOULD NOT SPEAK AGAIN +1221-135767-0000-1280: HESTER PRYNNE WENT ONE DAY TO THE MANSION OF GOVERNOR BELLINGHAM WITH A PAIR OF GLOVES WHICH SHE HAD FRINGED AND EMBROIDERED TO HIS ORDER AND WHICH WERE TO BE WORN ON SOME GREAT OCCASION OF STATE FOR THOUGH THE CHANCES OF A POPULAR ELECTION HAD CAUSED THIS FORMER RULER TO DESCEND A STEP OR TWO FROM THE HIGHEST RANK HE STILL HELD AN (HONOURABLE->HONORABLE) AND INFLUENTIAL PLACE AMONG THE COLONIAL MAGISTRACY +1221-135767-0001-1281: ANOTHER AND FAR MORE IMPORTANT REASON THAN THE DELIVERY OF A PAIR OF EMBROIDERED GLOVES IMPELLED HESTER AT THIS TIME TO SEEK AN INTERVIEW WITH A PERSONAGE OF SO MUCH POWER AND ACTIVITY IN THE AFFAIRS OF THE SETTLEMENT +1221-135767-0002-1282: AT THAT EPOCH OF PRISTINE SIMPLICITY HOWEVER MATTERS OF EVEN SLIGHTER PUBLIC INTEREST AND OF FAR LESS INTRINSIC WEIGHT THAN THE WELFARE OF HESTER AND HER CHILD WERE STRANGELY MIXED UP WITH THE DELIBERATIONS OF LEGISLATORS AND ACTS OF STATE +1221-135767-0003-1283: THE PERIOD WAS HARDLY IF AT ALL EARLIER THAN THAT OF OUR STORY WHEN A DISPUTE CONCERNING THE RIGHT OF PROPERTY IN A PIG NOT ONLY CAUSED A FIERCE AND BITTER CONTEST IN THE LEGISLATIVE BODY OF THE COLONY BUT RESULTED IN AN IMPORTANT MODIFICATION OF THE FRAMEWORK ITSELF OF THE LEGISLATURE +1221-135767-0004-1284: WE HAVE SPOKEN OF PEARL'S RICH AND LUXURIANT BEAUTY A BEAUTY THAT SHONE WITH DEEP AND VIVID TINTS A BRIGHT COMPLEXION EYES POSSESSING INTENSITY BOTH OF DEPTH AND GLOW AND HAIR ALREADY OF A DEEP GLOSSY BROWN AND WHICH IN AFTER YEARS WOULD BE NEARLY AKIN TO BLACK +1221-135767-0005-1285: IT WAS THE SCARLET LETTER IN ANOTHER FORM THE SCARLET LETTER ENDOWED WITH LIFE +1221-135767-0006-1286: THE MOTHER HERSELF AS IF THE RED IGNOMINY WERE SO DEEPLY SCORCHED INTO HER BRAIN THAT ALL HER CONCEPTIONS ASSUMED ITS FORM HAD CAREFULLY WROUGHT OUT THE SIMILITUDE LAVISHING MANY HOURS OF MORBID INGENUITY TO CREATE AN ANALOGY BETWEEN THE OBJECT OF HER AFFECTION AND THE EMBLEM OF HER GUILT AND TORTURE +1221-135767-0007-1287: BUT IN TRUTH PEARL WAS THE ONE AS WELL AS THE OTHER AND ONLY IN CONSEQUENCE OF THAT IDENTITY HAD HESTER CONTRIVED SO PERFECTLY TO REPRESENT THE SCARLET LETTER IN HER APPEARANCE +1221-135767-0008-1288: COME THEREFORE AND LET US FLING MUD AT THEM +1221-135767-0009-1289: BUT PEARL WHO WAS A DAUNTLESS CHILD AFTER FROWNING STAMPING HER FOOT AND SHAKING HER LITTLE HAND WITH A VARIETY OF THREATENING GESTURES SUDDENLY MADE A RUSH AT THE KNOT OF HER ENEMIES AND PUT THEM ALL TO FLIGHT +1221-135767-0010-1290: SHE SCREAMED AND SHOUTED TOO WITH A TERRIFIC VOLUME OF SOUND WHICH DOUBTLESS CAUSED THE HEARTS OF THE FUGITIVES TO QUAKE WITHIN THEM +1221-135767-0011-1291: IT WAS FURTHER DECORATED WITH STRANGE AND SEEMINGLY CABALISTIC FIGURES AND DIAGRAMS SUITABLE TO THE QUAINT TASTE OF THE AGE WHICH HAD BEEN DRAWN IN THE STUCCO WHEN NEWLY LAID ON AND HAD NOW GROWN HARD AND DURABLE FOR THE ADMIRATION OF AFTER TIMES +1221-135767-0012-1292: THEY APPROACHED THE DOOR WHICH WAS OF AN ARCHED FORM AND FLANKED ON EACH SIDE BY A NARROW TOWER OR PROJECTION OF THE EDIFICE IN BOTH OF WHICH WERE LATTICE WINDOWS THE WOODEN SHUTTERS TO CLOSE OVER THEM AT NEED +1221-135767-0013-1293: LIFTING THE IRON HAMMER THAT HUNG AT THE PORTAL HESTER PRYNNE GAVE A SUMMONS WHICH WAS ANSWERED BY ONE OF THE GOVERNOR'S BOND (SERVANT->SERVANTS) A FREE BORN ENGLISHMAN BUT NOW A SEVEN YEARS SLAVE +1221-135767-0014-1294: YEA HIS HONOURABLE WORSHIP IS WITHIN BUT HE HATH A GODLY MINISTER OR TWO WITH HIM AND LIKEWISE A LEECH +1221-135767-0015-1295: YE MAY NOT SEE HIS WORSHIP NOW +1221-135767-0016-1296: WITH MANY VARIATIONS SUGGESTED BY THE NATURE OF HIS BUILDING MATERIALS DIVERSITY OF CLIMATE AND A DIFFERENT MODE OF SOCIAL LIFE GOVERNOR BELLINGHAM HAD PLANNED HIS NEW HABITATION AFTER THE RESIDENCES OF GENTLEMEN OF (FAIR ESTATE->FAIREST STATE) IN HIS NATIVE LAND +1221-135767-0017-1297: ON THE TABLE IN TOKEN THAT THE SENTIMENT OF OLD ENGLISH HOSPITALITY HAD NOT BEEN LEFT BEHIND STOOD A LARGE PEWTER TANKARD AT THE BOTTOM OF WHICH HAD HESTER OR PEARL PEEPED INTO IT THEY MIGHT HAVE SEEN THE FROTHY REMNANT OF A RECENT DRAUGHT OF ALE +1221-135767-0018-1298: LITTLE PEARL WHO WAS AS GREATLY PLEASED WITH THE GLEAMING ARMOUR AS SHE HAD BEEN WITH THE GLITTERING FRONTISPIECE OF THE HOUSE SPENT SOME TIME LOOKING INTO THE POLISHED MIRROR OF THE BREASTPLATE +1221-135767-0019-1299: MOTHER CRIED SHE I SEE YOU HERE LOOK LOOK +1221-135767-0020-1300: IN TRUTH SHE SEEMED ABSOLUTELY HIDDEN BEHIND IT +1221-135767-0021-1301: PEARL ACCORDINGLY RAN TO THE BOW WINDOW AT THE FURTHER END OF THE HALL AND LOOKED ALONG THE VISTA OF A GARDEN WALK CARPETED WITH CLOSELY SHAVEN GRASS AND BORDERED WITH SOME RUDE AND (IMMATURE->IMMATEUR) ATTEMPT AT SHRUBBERY +1221-135767-0022-1302: BUT THE PROPRIETOR APPEARED (ALREADY->ALL READY) TO HAVE RELINQUISHED AS HOPELESS THE EFFORT TO PERPETUATE ON THIS SIDE OF THE ATLANTIC IN A HARD SOIL AND AMID THE CLOSE STRUGGLE FOR SUBSISTENCE THE NATIVE ENGLISH TASTE FOR ORNAMENTAL GARDENING +1221-135767-0023-1303: THERE WERE A FEW ROSE BUSHES HOWEVER AND A NUMBER OF APPLE TREES PROBABLY THE DESCENDANTS OF THOSE PLANTED BY THE REVEREND MISTER BLACKSTONE THE FIRST SETTLER OF THE PENINSULA THAT HALF MYTHOLOGICAL PERSONAGE WHO RIDES THROUGH OUR EARLY ANNALS SEATED ON THE BACK OF A BULL +1221-135767-0024-1304: PEARL SEEING THE ROSE BUSHES BEGAN TO CRY FOR A RED ROSE AND WOULD NOT BE PACIFIED +1284-1180-0000-829: HE WORE BLUE SILK STOCKINGS BLUE KNEE (PANTS->PANS) WITH GOLD BUCKLES A BLUE RUFFLED WAIST AND A JACKET OF BRIGHT BLUE BRAIDED WITH GOLD +1284-1180-0001-830: HIS HAT HAD A PEAKED CROWN AND A FLAT BRIM AND AROUND THE BRIM WAS A ROW OF TINY GOLDEN BELLS THAT TINKLED WHEN HE MOVED +1284-1180-0002-831: INSTEAD OF SHOES THE OLD MAN WORE BOOTS WITH (TURNOVER->TURN OVER) TOPS AND HIS BLUE COAT HAD WIDE CUFFS OF GOLD BRAID +1284-1180-0003-832: FOR A LONG TIME HE HAD WISHED TO EXPLORE THE BEAUTIFUL LAND OF OZ IN WHICH THEY LIVED +1284-1180-0004-833: WHEN THEY WERE OUTSIDE UNC SIMPLY LATCHED THE DOOR AND STARTED UP THE PATH +1284-1180-0005-834: NO ONE WOULD DISTURB THEIR LITTLE HOUSE EVEN IF (ANYONE->ANY ONE) CAME SO FAR INTO THE THICK FOREST WHILE THEY WERE GONE +1284-1180-0006-835: AT THE FOOT OF THE MOUNTAIN THAT SEPARATED THE COUNTRY OF THE MUNCHKINS FROM THE COUNTRY OF THE GILLIKINS THE PATH DIVIDED +1284-1180-0007-836: HE KNEW IT WOULD TAKE THEM TO THE HOUSE OF THE CROOKED MAGICIAN WHOM HE HAD NEVER SEEN BUT WHO WAS (THEIR->THERE) NEAREST (NEIGHBOR->NEIGHBOUR) +1284-1180-0008-837: ALL THE MORNING THEY TRUDGED UP THE MOUNTAIN PATH AND AT NOON UNC AND OJO SAT ON A FALLEN TREE TRUNK AND ATE THE LAST OF THE BREAD WHICH THE OLD MUNCHKIN HAD PLACED IN HIS POCKET +1284-1180-0009-838: THEN THEY STARTED ON AGAIN AND TWO HOURS LATER CAME IN SIGHT OF THE HOUSE OF DOCTOR PIPT +1284-1180-0010-839: UNC KNOCKED AT THE DOOR OF THE HOUSE AND A CHUBBY PLEASANT FACED WOMAN DRESSED ALL IN BLUE OPENED IT AND GREETED THE VISITORS WITH A SMILE +1284-1180-0011-840: I AM MY DEAR AND ALL STRANGERS ARE WELCOME TO MY HOME +1284-1180-0012-841: WE HAVE COME FROM A FAR LONELIER PLACE THAN THIS A LONELIER PLACE +1284-1180-0013-842: AND YOU MUST BE OJO THE UNLUCKY SHE ADDED +1284-1180-0014-843: OJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HIS LIFE +1284-1180-0015-844: WE ARE TRAVELING REPLIED OJO AND WE STOPPED AT YOUR HOUSE JUST TO REST AND REFRESH OURSELVES +1284-1180-0016-845: THE WOMAN SEEMED THOUGHTFUL +1284-1180-0017-846: (AT->THAT) ONE END STOOD A GREAT FIREPLACE IN WHICH A BLUE LOG WAS BLAZING WITH A BLUE FLAME AND OVER THE FIRE HUNG FOUR KETTLES IN A ROW ALL BUBBLING AND STEAMING AT A GREAT RATE +1284-1180-0018-847: IT TAKES ME SEVERAL YEARS TO MAKE THIS MAGIC POWDER BUT AT THIS MOMENT I AM PLEASED TO SAY IT IS NEARLY DONE YOU SEE I AM MAKING IT FOR MY GOOD WIFE MARGOLOTTE WHO WANTS TO USE SOME OF IT FOR A PURPOSE OF HER OWN +1284-1180-0019-848: YOU MUST KNOW SAID MARGOLOTTE WHEN THEY WERE ALL SEATED TOGETHER ON THE BROAD WINDOW SEAT THAT MY HUSBAND FOOLISHLY GAVE AWAY ALL THE POWDER OF LIFE HE FIRST MADE TO OLD (MOMBI->MUMBIE) THE WITCH WHO USED TO LIVE IN THE COUNTRY OF THE GILLIKINS TO THE NORTH OF HERE +1284-1180-0020-849: THE FIRST LOT WE TESTED ON OUR GLASS (CAT->HAT) WHICH NOT ONLY BEGAN TO LIVE BUT HAS LIVED EVER SINCE +1284-1180-0021-850: I THINK THE NEXT GLASS CAT THE MAGICIAN MAKES WILL HAVE NEITHER BRAINS NOR HEART FOR THEN IT WILL NOT OBJECT TO CATCHING MICE AND MAY PROVE OF SOME USE TO US +1284-1180-0022-851: I'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ +1284-1180-0023-852: YOU SEE (I'VE->I HAVE) LIVED ALL MY LIFE WITH UNC NUNKIE THE SILENT ONE AND THERE WAS NO ONE TO TELL ME ANYTHING +1284-1180-0024-853: THAT IS ONE REASON YOU ARE OJO THE UNLUCKY SAID THE WOMAN IN (A->*) SYMPATHETIC TONE +1284-1180-0025-854: I THINK I MUST SHOW YOU MY PATCHWORK GIRL SAID MARGOLOTTE LAUGHING AT THE BOY'S ASTONISHMENT FOR SHE IS RATHER DIFFICULT TO EXPLAIN +1284-1180-0026-855: BUT FIRST I WILL TELL YOU THAT FOR MANY YEARS I HAVE LONGED FOR A SERVANT TO HELP ME WITH THE HOUSEWORK AND TO (COOK->COPE) THE MEALS AND WASH THE DISHES +1284-1180-0027-856: YET THAT TASK WAS NOT SO EASY AS YOU MAY SUPPOSE +1284-1180-0028-857: A BED QUILT MADE OF PATCHES OF DIFFERENT KINDS AND (COLORS->COLLARS) OF CLOTH ALL NEATLY SEWED TOGETHER +1284-1180-0029-858: SOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE THE PATCHES AND COLORS ARE SO MIXED UP +1284-1180-0030-859: WHEN I FOUND IT I SAID TO MYSELF THAT IT WOULD DO NICELY FOR MY SERVANT GIRL FOR WHEN SHE WAS BROUGHT TO LIFE SHE WOULD NOT BE PROUD NOR HAUGHTY AS THE GLASS CAT IS FOR SUCH A DREADFUL MIXTURE OF (COLORS->COLOURS) WOULD DISCOURAGE HER FROM TRYING TO BE AS DIGNIFIED AS THE BLUE MUNCHKINS ARE +1284-1180-0031-860: AT THE EMERALD CITY WHERE OUR PRINCESS OZMA LIVES GREEN IS THE POPULAR COLOR +1284-1180-0032-861: I WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE WENT TO A TALL CUPBOARD AND THREW OPEN THE DOORS +1284-1181-0000-807: OJO EXAMINED THIS CURIOUS CONTRIVANCE WITH WONDER +1284-1181-0001-808: MARGOLOTTE HAD FIRST MADE THE GIRL'S FORM FROM THE PATCHWORK QUILT AND THEN SHE HAD DRESSED IT WITH A PATCHWORK SKIRT AND AN APRON WITH POCKETS IN IT USING THE SAME GAY MATERIAL THROUGHOUT +1284-1181-0002-809: THE HEAD OF THE PATCHWORK GIRL WAS THE MOST CURIOUS PART OF HER +1284-1181-0003-810: THE HAIR WAS OF BROWN YARN AND HUNG DOWN ON HER NECK IN SEVERAL NEAT BRAIDS +1284-1181-0004-811: GOLD IS THE MOST COMMON METAL IN THE LAND OF OZ AND IS USED FOR MANY PURPOSES BECAUSE IT IS SOFT AND PLIABLE +1284-1181-0005-812: NO I FORGOT ALL ABOUT THE BRAINS EXCLAIMED THE WOMAN +1284-1181-0006-813: WELL THAT MAY BE TRUE AGREED MARGOLOTTE BUT ON THE CONTRARY A SERVANT WITH TOO MUCH BRAINS IS SURE TO BECOME INDEPENDENT AND HIGH AND MIGHTY AND FEEL ABOVE HER WORK +1284-1181-0007-814: SHE POURED INTO THE DISH A QUANTITY FROM EACH OF THESE BOTTLES +1284-1181-0008-815: I THINK THAT WILL DO SHE CONTINUED FOR THE OTHER QUALITIES ARE NOT NEEDED IN A SERVANT +1284-1181-0009-816: SHE RAN TO HER HUSBAND'S SIDE AT ONCE AND HELPED HIM LIFT THE FOUR KETTLES FROM THE FIRE +1284-1181-0010-817: THEIR CONTENTS HAD ALL BOILED AWAY LEAVING IN THE BOTTOM OF EACH KETTLE A FEW GRAINS OF FINE WHITE POWDER +1284-1181-0011-818: VERY CAREFULLY THE MAGICIAN REMOVED THIS POWDER PLACING IT (ALL TOGETHER->ALTOGETHER) IN A GOLDEN DISH WHERE HE MIXED IT WITH A GOLDEN SPOON +1284-1181-0012-819: NO ONE SAW HIM DO THIS FOR ALL WERE LOOKING AT THE POWDER OF LIFE BUT SOON THE WOMAN REMEMBERED WHAT SHE HAD BEEN DOING AND CAME BACK TO THE CUPBOARD +1284-1181-0013-820: OJO BECAME A BIT UNEASY AT THIS FOR HE HAD ALREADY PUT QUITE A LOT OF THE CLEVERNESS POWDER IN THE DISH BUT HE DARED NOT INTERFERE AND SO HE COMFORTED HIMSELF WITH THE THOUGHT THAT ONE CANNOT HAVE TOO MUCH CLEVERNESS +1284-1181-0014-821: HE SELECTED A SMALL GOLD BOTTLE WITH A PEPPER BOX TOP SO THAT THE POWDER MIGHT BE SPRINKLED ON ANY OBJECT THROUGH THE SMALL HOLES +1284-1181-0015-822: MOST PEOPLE TALK TOO MUCH SO IT IS A RELIEF TO FIND ONE WHO TALKS TOO LITTLE +1284-1181-0016-823: I AM NOT ALLOWED TO PERFORM MAGIC EXCEPT FOR MY OWN AMUSEMENT HE TOLD HIS VISITORS AS HE LIGHTED A PIPE WITH A CROOKED STEM AND BEGAN TO SMOKE +1284-1181-0017-824: THE WIZARD OF OZ WHO USED TO BE A HUMBUG AND KNEW NO MAGIC AT ALL HAS BEEN TAKING LESSONS OF GLINDA AND I'M TOLD HE IS GETTING TO BE A PRETTY GOOD WIZARD BUT HE IS MERELY THE ASSISTANT OF THE GREAT SORCERESS +1284-1181-0018-825: IT TRULY IS ASSERTED THE MAGICIAN +1284-1181-0019-826: I NOW USE THEM AS ORNAMENTAL STATUARY IN MY GARDEN +1284-1181-0020-827: DEAR ME WHAT A (CHATTERBOX->CHATTER BOX) YOU'RE GETTING TO BE (UNC->YOUNG) REMARKED THE MAGICIAN WHO WAS PLEASED WITH THE COMPLIMENT +1284-1181-0021-828: ASKED THE VOICE IN SCORNFUL ACCENTS +1284-134647-0000-862: THE GRATEFUL APPLAUSE OF THE CLERGY HAS CONSECRATED THE MEMORY OF A PRINCE WHO INDULGED THEIR PASSIONS AND PROMOTED THEIR INTEREST +1284-134647-0001-863: THE EDICT OF MILAN THE GREAT CHARTER OF TOLERATION HAD CONFIRMED TO EACH INDIVIDUAL OF THE ROMAN WORLD THE PRIVILEGE OF CHOOSING AND PROFESSING HIS OWN RELIGION +1284-134647-0002-864: BUT THIS INESTIMABLE PRIVILEGE WAS SOON VIOLATED WITH THE KNOWLEDGE OF TRUTH THE EMPEROR IMBIBED THE MAXIMS OF PERSECUTION AND THE (SECTS->SEX) WHICH DISSENTED FROM THE CATHOLIC CHURCH WERE AFFLICTED AND OPPRESSED BY THE TRIUMPH OF CHRISTIANITY +1284-134647-0003-865: (CONSTANTINE->KONSTANTINE) EASILY BELIEVED THAT THE HERETICS WHO PRESUMED TO DISPUTE HIS OPINIONS OR TO OPPOSE HIS COMMANDS WERE GUILTY OF THE MOST ABSURD AND CRIMINAL OBSTINACY AND THAT A SEASONABLE APPLICATION OF MODERATE SEVERITIES MIGHT SAVE THOSE UNHAPPY MEN FROM THE DANGER OF AN EVERLASTING CONDEMNATION +1284-134647-0004-866: SOME OF THE PENAL REGULATIONS WERE COPIED FROM THE EDICTS OF DIOCLETIAN AND THIS METHOD OF CONVERSION WAS APPLAUDED BY THE SAME BISHOPS WHO HAD (FELT->FELLED) THE HAND OF OPPRESSION AND PLEADED FOR THE RIGHTS OF HUMANITY +1284-134647-0005-867: THEY ASSERTED WITH CONFIDENCE AND ALMOST WITH EXULTATION THAT THE APOSTOLICAL SUCCESSION WAS INTERRUPTED THAT ALL THE BISHOPS OF EUROPE AND ASIA WERE (INFECTED->IN EFFECTED) BY THE CONTAGION OF GUILT AND SCHISM AND THAT THE PREROGATIVES OF THE CATHOLIC CHURCH WERE CONFINED TO THE CHOSEN PORTION OF THE AFRICAN BELIEVERS WHO ALONE HAD PRESERVED INVIOLATE THE INTEGRITY OF THEIR FAITH AND DISCIPLINE +1284-134647-0006-868: BISHOPS VIRGINS AND EVEN SPOTLESS INFANTS WERE SUBJECTED TO THE DISGRACE OF A PUBLIC PENANCE BEFORE THEY COULD BE ADMITTED TO THE COMMUNION OF THE DONATISTS +1284-134647-0007-869: PROSCRIBED BY THE CIVIL AND ECCLESIASTICAL POWERS OF THE EMPIRE THE (DONATISTS->DONATIST) STILL MAINTAINED IN SOME PROVINCES PARTICULARLY IN NUMIDIA THEIR SUPERIOR NUMBERS AND FOUR HUNDRED BISHOPS ACKNOWLEDGED THE JURISDICTION OF THEIR PRIMATE +1320-122612-0000-120: SINCE THE PERIOD OF OUR TALE THE ACTIVE SPIRIT OF THE COUNTRY HAS SURROUNDED IT WITH A BELT OF RICH (AND THRIVING->ENTHRIBING) SETTLEMENTS THOUGH NONE BUT THE HUNTER OR THE SAVAGE IS EVER KNOWN EVEN NOW TO PENETRATE ITS WILD RECESSES +1320-122612-0001-121: THE DEWS WERE SUFFERED TO EXHALE AND THE SUN HAD DISPERSED THE MISTS AND WAS SHEDDING A STRONG AND CLEAR LIGHT IN THE FOREST WHEN THE (TRAVELERS->TRAVELLERS) RESUMED THEIR JOURNEY +1320-122612-0002-122: AFTER PROCEEDING A FEW MILES THE PROGRESS OF HAWKEYE WHO LED THE ADVANCE BECAME MORE DELIBERATE AND WATCHFUL +1320-122612-0003-123: HE OFTEN STOPPED TO EXAMINE THE TREES NOR DID HE CROSS A RIVULET WITHOUT ATTENTIVELY CONSIDERING THE QUANTITY THE VELOCITY AND THE COLOR OF ITS WATERS +1320-122612-0004-124: DISTRUSTING HIS OWN JUDGMENT HIS APPEALS TO THE OPINION OF (CHINGACHGOOK->CHINGACHOOK) WERE FREQUENT AND EARNEST +1320-122612-0005-125: YET HERE ARE WE WITHIN A SHORT RANGE OF THE (SCAROONS->SCARONS) AND NOT A SIGN OF A TRAIL HAVE WE CROSSED +1320-122612-0006-126: LET US RETRACE OUR STEPS AND EXAMINE AS WE GO WITH KEENER EYES +1320-122612-0007-127: (CHINGACHGOOK->CHINGACHOOK) HAD CAUGHT THE LOOK AND MOTIONING WITH HIS HAND HE BADE HIM SPEAK +1320-122612-0008-128: THE EYES OF THE WHOLE PARTY FOLLOWED THE UNEXPECTED MOVEMENT AND READ THEIR SUCCESS IN THE AIR OF TRIUMPH THAT THE YOUTH ASSUMED +1320-122612-0009-129: IT WOULD HAVE BEEN MORE WONDERFUL HAD HE SPOKEN WITHOUT A BIDDING +1320-122612-0010-130: SEE SAID UNCAS POINTING NORTH AND SOUTH AT THE EVIDENT MARKS OF THE BROAD TRAIL ON EITHER SIDE OF HIM THE DARK HAIR HAS GONE TOWARD THE FOREST +1320-122612-0011-131: IF A ROCK OR A RIVULET OR A BIT OF EARTH HARDER THAN COMMON SEVERED THE LINKS OF THE (CLEW->CLUE) THEY FOLLOWED THE TRUE EYE OF THE SCOUT RECOVERED THEM AT A DISTANCE AND SELDOM RENDERED THE DELAY OF A SINGLE MOMENT NECESSARY +1320-122612-0012-132: EXTINGUISHED BRANDS WERE LYING AROUND A SPRING THE OFFALS OF A DEER WERE SCATTERED ABOUT THE PLACE AND THE TREES BORE EVIDENT MARKS OF HAVING BEEN BROWSED BY THE HORSES +1320-122612-0013-133: A CIRCLE OF A FEW HUNDRED FEET IN CIRCUMFERENCE WAS DRAWN AND EACH OF THE PARTY TOOK A SEGMENT FOR HIS PORTION +1320-122612-0014-134: THE EXAMINATION HOWEVER RESULTED IN NO DISCOVERY +1320-122612-0015-135: THE WHOLE PARTY CROWDED TO THE SPOT WHERE UNCAS POINTED OUT THE IMPRESSION OF A MOCCASIN IN THE MOIST (ALLUVION->ALLUVIAN) +1320-122612-0016-136: RUN BACK UNCAS AND BRING ME THE SIZE OF THE SINGER'S FOOT +1320-122617-0000-78: NOTWITHSTANDING THE HIGH RESOLUTION OF HAWKEYE HE FULLY COMPREHENDED ALL THE DIFFICULTIES AND DANGER HE WAS ABOUT TO INCUR +1320-122617-0001-79: IN HIS RETURN TO THE CAMP HIS ACUTE AND PRACTISED INTELLECTS WERE INTENTLY ENGAGED IN DEVISING MEANS TO COUNTERACT A WATCHFULNESS AND SUSPICION ON THE PART OF HIS ENEMIES THAT HE KNEW WERE IN NO DEGREE INFERIOR TO HIS OWN +1320-122617-0002-80: IN OTHER WORDS WHILE HE HAD IMPLICIT FAITH IN THE ABILITY OF (BALAAM'S->BAYLIM'S) ASS TO SPEAK HE WAS SOMEWHAT (SKEPTICAL->SCEPTICAL) ON THE SUBJECT OF A BEAR'S SINGING AND YET HE HAD BEEN ASSURED OF THE LATTER ON THE TESTIMONY OF HIS OWN EXQUISITE ORGANS +1320-122617-0003-81: THERE WAS SOMETHING IN HIS AIR AND MANNER THAT BETRAYED TO THE SCOUT THE UTTER CONFUSION OF THE STATE OF HIS MIND +1320-122617-0004-82: THE INGENIOUS HAWKEYE WHO RECALLED THE HASTY MANNER IN WHICH THE OTHER HAD ABANDONED HIS POST AT THE BEDSIDE OF THE SICK WOMAN WAS NOT WITHOUT HIS SUSPICIONS CONCERNING THE SUBJECT OF SO MUCH SOLEMN DELIBERATION +1320-122617-0005-83: THE BEAR SHOOK HIS SHAGGY SIDES AND THEN A WELL KNOWN VOICE REPLIED +1320-122617-0006-84: CAN THESE THINGS BE RETURNED DAVID BREATHING MORE FREELY AS THE TRUTH BEGAN TO DAWN UPON HIM +1320-122617-0007-85: COME COME RETURNED HAWKEYE UNCASING HIS HONEST COUNTENANCE THE BETTER TO ASSURE THE WAVERING CONFIDENCE OF HIS COMPANION YOU MAY SEE A SKIN WHICH IF IT BE NOT AS WHITE AS ONE OF THE GENTLE ONES HAS NO TINGE OF RED TO IT THAT THE WINDS OF THE HEAVEN AND THE SUN HAVE NOT BESTOWED NOW LET US TO BUSINESS +1320-122617-0008-86: THE YOUNG MAN IS IN BONDAGE AND MUCH I FEAR HIS DEATH IS DECREED +1320-122617-0009-87: I GREATLY MOURN THAT ONE SO WELL DISPOSED SHOULD DIE IN HIS IGNORANCE AND I HAVE SOUGHT A GOODLY HYMN CAN YOU LEAD ME TO HIM +1320-122617-0010-88: THE TASK WILL NOT BE DIFFICULT RETURNED DAVID HESITATING THOUGH I GREATLY FEAR YOUR PRESENCE WOULD RATHER INCREASE THAN MITIGATE HIS UNHAPPY FORTUNES +1320-122617-0011-89: THE LODGE IN WHICH UNCAS WAS CONFINED WAS IN THE VERY CENTER OF THE VILLAGE AND IN A SITUATION PERHAPS MORE DIFFICULT THAN ANY OTHER TO APPROACH OR LEAVE WITHOUT OBSERVATION +1320-122617-0012-90: FOUR OR FIVE OF THE LATTER ONLY LINGERED ABOUT THE DOOR OF THE PRISON OF UNCAS WARY BUT CLOSE OBSERVERS OF THE MANNER OF THEIR CAPTIVE +1320-122617-0013-91: DELIVERED IN A STRONG TONE OF ASSENT ANNOUNCED THE GRATIFICATION THE SAVAGE WOULD RECEIVE (IN->AND) WITNESSING SUCH AN EXHIBITION OF WEAKNESS IN AN ENEMY SO LONG HATED AND SO MUCH FEARED +1320-122617-0014-92: THEY DREW BACK A LITTLE FROM THE ENTRANCE AND MOTIONED TO THE SUPPOSED (CONJURER->CONJUROR) TO ENTER +1320-122617-0015-93: BUT THE BEAR INSTEAD OF OBEYING MAINTAINED THE (SEAT->SEED) IT HAD TAKEN AND GROWLED +1320-122617-0016-94: THE CUNNING MAN IS AFRAID THAT HIS BREATH WILL BLOW UPON HIS BROTHERS AND TAKE AWAY THEIR COURAGE TOO CONTINUED DAVID IMPROVING THE HINT HE RECEIVED THEY MUST STAND FURTHER OFF +1320-122617-0017-95: THEN AS IF SATISFIED OF THEIR SAFETY THE SCOUT LEFT HIS POSITION AND SLOWLY ENTERED THE PLACE +1320-122617-0018-96: IT WAS SILENT AND GLOOMY BEING TENANTED SOLELY BY THE CAPTIVE AND LIGHTED BY THE DYING EMBERS OF A FIRE WHICH HAD BEEN USED FOR THE (PURPOSED->PURPOSE) OF COOKERY +1320-122617-0019-97: UNCAS OCCUPIED A DISTANT CORNER IN A RECLINING ATTITUDE BEING RIGIDLY BOUND BOTH HANDS AND FEET BY STRONG AND PAINFUL (WITHES->WIDTHS) +1320-122617-0020-98: THE SCOUT WHO HAD LEFT DAVID AT THE DOOR TO ASCERTAIN THEY WERE NOT OBSERVED THOUGHT IT PRUDENT TO PRESERVE HIS DISGUISE UNTIL ASSURED OF THEIR PRIVACY +1320-122617-0021-99: WHAT SHALL WE DO WITH THE MINGOES AT THE DOOR THEY COUNT SIX AND (THIS->THE) SINGER IS AS GOOD AS NOTHING +1320-122617-0022-100: THE DELAWARES ARE CHILDREN OF THE TORTOISE AND (THEY->THE) OUTSTRIP THE DEER +1320-122617-0023-101: UNCAS WHO HAD ALREADY APPROACHED THE DOOR IN READINESS TO LEAD THE WAY NOW RECOILED AND PLACED HIMSELF ONCE MORE IN THE BOTTOM OF THE LODGE +1320-122617-0024-102: BUT HAWKEYE WHO WAS TOO MUCH OCCUPIED WITH HIS OWN THOUGHTS TO NOTE THE MOVEMENT CONTINUED SPEAKING MORE TO HIMSELF THAN TO HIS COMPANION +1320-122617-0025-103: SO UNCAS YOU HAD BETTER TAKE THE LEAD WHILE I WILL PUT ON THE SKIN AGAIN AND TRUST TO CUNNING FOR WANT OF SPEED +1320-122617-0026-104: WELL WHAT CAN'T BE DONE BY MAIN COURAGE (IN->AND) WAR MUST BE DONE BY CIRCUMVENTION +1320-122617-0027-105: AS SOON AS THESE DISPOSITIONS WERE MADE THE SCOUT TURNED TO DAVID AND GAVE HIM HIS PARTING INSTRUCTIONS +1320-122617-0028-106: MY PURSUITS ARE PEACEFUL AND MY TEMPER I HUMBLY TRUST IS GREATLY GIVEN TO MERCY AND LOVE RETURNED DAVID A LITTLE NETTLED AT SO DIRECT AN ATTACK ON HIS MANHOOD BUT THERE ARE NONE WHO CAN SAY THAT I HAVE EVER FORGOTTEN MY FAITH IN THE LORD EVEN IN THE GREATEST STRAITS +1320-122617-0029-107: IF YOU ARE NOT THEN KNOCKED ON THE HEAD YOUR BEING A NON (COMPOSSER WILL->COMPOSSIBLE) PROTECT YOU AND YOU'LL THEN HAVE A GOOD REASON TO EXPECT TO DIE IN YOUR BED +1320-122617-0030-108: SO CHOOSE FOR YOURSELF TO MAKE A RUSH OR TARRY HERE +1320-122617-0031-109: BRAVELY AND GENEROUSLY HAS HE BATTLED IN MY BEHALF AND THIS AND MORE WILL I DARE IN HIS SERVICE +1320-122617-0032-110: KEEP SILENT AS LONG AS MAY BE AND IT WOULD BE WISE WHEN YOU DO SPEAK TO BREAK OUT SUDDENLY IN ONE OF YOUR SHOUTINGS WHICH WILL SERVE TO REMIND THE INDIANS THAT YOU ARE NOT ALTOGETHER AS RESPONSIBLE AS MEN SHOULD BE +1320-122617-0033-111: IF HOWEVER THEY TAKE YOUR SCALP AS I TRUST AND BELIEVE THEY WILL NOT DEPEND ON IT UNCAS AND I WILL NOT FORGET THE DEED BUT REVENGE IT (AS->IS) BECOMES TRUE WARRIORS AND TRUSTY FRIENDS +1320-122617-0034-112: HOLD SAID DAVID PERCEIVING THAT WITH THIS ASSURANCE THEY WERE ABOUT TO LEAVE HIM I AM AN UNWORTHY AND HUMBLE FOLLOWER OF ONE WHO TAUGHT NOT THE DAMNABLE PRINCIPLE OF REVENGE +1320-122617-0035-113: THEN HEAVING A HEAVY SIGH PROBABLY AMONG THE LAST HE EVER DREW IN PINING FOR A CONDITION HE HAD SO LONG ABANDONED HE ADDED IT IS WHAT I WOULD WISH TO PRACTISE MYSELF AS ONE WITHOUT A CROSS OF BLOOD THOUGH IT IS NOT ALWAYS EASY TO DEAL WITH AN INDIAN AS YOU WOULD WITH A FELLOW CHRISTIAN +1320-122617-0036-114: GOD BLESS YOU FRIEND I DO BELIEVE YOUR SCENT (IS->HAS) NOT GREATLY WRONG WHEN THE MATTER IS DULY CONSIDERED AND KEEPING ETERNITY BEFORE THE EYES THOUGH MUCH DEPENDS ON THE NATURAL GIFTS AND THE FORCE OF TEMPTATION +1320-122617-0037-115: THE DELAWARE DOG HE SAID LEANING FORWARD AND PEERING THROUGH THE DIM LIGHT TO CATCH THE EXPRESSION OF THE OTHER'S FEATURES IS HE AFRAID +1320-122617-0038-116: WILL THE HURONS HEAR HIS GROANS +1320-122617-0039-117: THE (MOHICAN->MOHICANS) STARTED ON HIS FEET AND SHOOK HIS SHAGGY COVERING AS THOUGH THE ANIMAL HE COUNTERFEITED WAS ABOUT TO MAKE SOME DESPERATE EFFORT +1320-122617-0040-118: HE HAD NO OCCASION TO DELAY FOR AT THE NEXT INSTANT A BURST OF CRIES FILLED THE OUTER AIR AND RAN ALONG THE WHOLE EXTENT OF THE VILLAGE +1320-122617-0041-119: UNCAS CAST HIS SKIN AND STEPPED FORTH IN HIS OWN BEAUTIFUL PROPORTIONS +1580-141083-0000-1949: I WILL ENDEAVOUR IN MY STATEMENT TO AVOID SUCH TERMS AS WOULD SERVE TO LIMIT THE EVENTS TO ANY PARTICULAR PLACE OR GIVE A CLUE AS TO THE PEOPLE CONCERNED +1580-141083-0001-1950: I HAD ALWAYS KNOWN HIM TO BE RESTLESS IN HIS MANNER BUT ON THIS PARTICULAR OCCASION HE WAS IN SUCH A STATE OF UNCONTROLLABLE AGITATION THAT IT WAS CLEAR SOMETHING VERY UNUSUAL HAD OCCURRED +1580-141083-0002-1951: MY FRIEND'S TEMPER HAD NOT IMPROVED SINCE HE HAD BEEN DEPRIVED OF THE CONGENIAL SURROUNDINGS OF BAKER STREET +1580-141083-0003-1952: WITHOUT HIS (SCRAPBOOKS->SCRAP BOOKS) HIS CHEMICALS AND HIS HOMELY UNTIDINESS HE WAS AN UNCOMFORTABLE MAN +1580-141083-0004-1953: I HAD TO READ IT OVER CAREFULLY AS THE TEXT MUST BE ABSOLUTELY CORRECT +1580-141083-0005-1954: I WAS ABSENT RATHER MORE THAN AN HOUR +1580-141083-0006-1955: THE ONLY DUPLICATE WHICH EXISTED SO FAR AS I KNEW WAS THAT WHICH BELONGED TO MY SERVANT (BANNISTER->BANISTER) A MAN WHO HAS LOOKED AFTER MY ROOM FOR TEN YEARS AND WHOSE HONESTY IS ABSOLUTELY ABOVE SUSPICION +1580-141083-0007-1956: THE MOMENT I LOOKED AT MY TABLE I WAS AWARE THAT SOMEONE HAD RUMMAGED AMONG MY PAPERS +1580-141083-0008-1957: THE PROOF WAS IN THREE LONG SLIPS I HAD LEFT THEM (ALL TOGETHER->ALTOGETHER) +1580-141083-0009-1958: (THE ALTERNATIVE->THEY ALL TURNED OF) WAS THAT (SOMEONE->SOME ONE) PASSING HAD OBSERVED THE KEY IN THE DOOR HAD KNOWN THAT I WAS OUT AND HAD ENTERED TO LOOK AT THE PAPERS +1580-141083-0010-1959: I GAVE HIM A LITTLE BRANDY AND LEFT HIM COLLAPSED IN A CHAIR WHILE I MADE A MOST CAREFUL EXAMINATION OF THE ROOM +1580-141083-0011-1960: A BROKEN TIP OF LEAD WAS LYING THERE ALSO +1580-141083-0012-1961: NOT ONLY THIS BUT ON THE TABLE I FOUND A SMALL BALL OF BLACK DOUGH OR CLAY WITH SPECKS OF SOMETHING WHICH LOOKS LIKE SAWDUST IN IT +1580-141083-0013-1962: ABOVE ALL THINGS I DESIRE TO SETTLE THE MATTER QUIETLY AND DISCREETLY +1580-141083-0014-1963: TO THE BEST OF MY BELIEF THEY WERE ROLLED UP +1580-141083-0015-1964: DID (ANYONE->ANY ONE) KNOW THAT THESE PROOFS WOULD BE THERE NO ONE SAVE THE PRINTER +1580-141083-0016-1965: I WAS IN SUCH A HURRY TO COME TO YOU YOU LEFT YOUR DOOR OPEN +1580-141083-0017-1966: SO IT SEEMS TO ME +1580-141083-0018-1967: NOW MISTER (SOAMES->SOLMES) AT YOUR DISPOSAL +1580-141083-0019-1968: ABOVE WERE THREE STUDENTS ONE ON EACH STORY +1580-141083-0020-1969: THEN HE APPROACHED IT AND STANDING ON TIPTOE WITH HIS (NECK->NET) CRANED HE LOOKED INTO THE ROOM +1580-141083-0021-1970: THERE IS NO OPENING EXCEPT THE ONE (PANE->PAIN) SAID OUR LEARNED GUIDE +1580-141083-0022-1971: I AM AFRAID THERE ARE NO SIGNS HERE SAID HE +1580-141083-0023-1972: ONE COULD HARDLY HOPE FOR ANY UPON SO DRY A DAY +1580-141083-0024-1973: YOU LEFT HIM IN A CHAIR YOU SAY WHICH CHAIR BY THE WINDOW THERE +1580-141083-0025-1974: THE (MAN->MEN) ENTERED AND TOOK THE PAPERS SHEET BY SHEET FROM THE CENTRAL TABLE +1580-141083-0026-1975: AS A MATTER OF FACT HE COULD NOT SAID (SOAMES->SOLMES) FOR I ENTERED BY THE SIDE DOOR +1580-141083-0027-1976: HOW LONG WOULD IT TAKE HIM TO DO THAT USING EVERY POSSIBLE CONTRACTION A QUARTER OF AN HOUR NOT LESS +1580-141083-0028-1977: THEN HE TOSSED IT DOWN AND SEIZED THE NEXT +1580-141083-0029-1978: HE WAS IN THE MIDST OF THAT WHEN YOUR RETURN CAUSED HIM TO MAKE A VERY HURRIED RETREAT VERY HURRIED SINCE HE HAD NOT TIME TO REPLACE THE PAPERS WHICH WOULD TELL YOU THAT HE HAD BEEN THERE +1580-141083-0030-1979: MISTER (SOAMES->SOLMES) WAS SOMEWHAT OVERWHELMED BY THIS FLOOD OF INFORMATION +1580-141083-0031-1980: HOLMES HELD OUT A SMALL CHIP WITH THE LETTERS N N AND A SPACE OF CLEAR WOOD AFTER THEM YOU SEE +1580-141083-0032-1981: WATSON I HAVE ALWAYS DONE YOU AN INJUSTICE THERE ARE OTHERS +1580-141083-0033-1982: I WAS HOPING THAT IF THE PAPER ON WHICH HE WROTE WAS THIN SOME TRACE OF IT MIGHT COME THROUGH UPON THIS POLISHED SURFACE NO I SEE NOTHING +1580-141083-0034-1983: AS HOLMES DREW THE CURTAIN I WAS AWARE FROM SOME LITTLE RIGIDITY AND (*->AN) ALERTNESS OF HIS ATTITUDE THAT HE WAS PREPARED FOR AN EMERGENCY +1580-141083-0035-1984: HOLMES TURNED AWAY AND STOOPED SUDDENLY TO THE FLOOR (HALLOA WHAT'S->HULLO WHAT IS) THIS +1580-141083-0036-1985: HOLMES (HELD IT->HUTTED) OUT ON HIS OPEN PALM IN THE GLARE OF THE ELECTRIC LIGHT +1580-141083-0037-1986: WHAT COULD HE DO HE CAUGHT UP EVERYTHING WHICH WOULD BETRAY HIM AND HE RUSHED INTO YOUR BEDROOM TO CONCEAL HIMSELF +1580-141083-0038-1987: I UNDERSTAND YOU TO SAY THAT THERE ARE THREE STUDENTS WHO USE THIS (STAIR->STARE) AND ARE IN THE HABIT OF PASSING YOUR DOOR YES THERE ARE +1580-141083-0039-1988: AND THEY ARE ALL IN FOR THIS EXAMINATION YES +1580-141083-0040-1989: ONE HARDLY LIKES TO THROW SUSPICION WHERE THERE ARE NO PROOFS +1580-141083-0041-1990: LET US (HEAR->SEE) THE SUSPICIONS I WILL LOOK AFTER THE PROOFS +1580-141083-0042-1991: MY SCHOLAR HAS BEEN LEFT (*->A) VERY POOR BUT HE IS HARD WORKING AND INDUSTRIOUS HE WILL DO WELL +1580-141083-0043-1992: THE TOP FLOOR BELONGS TO (MILES->MYLES) MC LAREN +1580-141083-0044-1993: I DARE NOT GO SO FAR AS THAT BUT OF THE THREE HE IS PERHAPS THE LEAST UNLIKELY +1580-141083-0045-1994: HE WAS STILL SUFFERING FROM THIS SUDDEN DISTURBANCE OF THE QUIET ROUTINE OF HIS LIFE +1580-141083-0046-1995: BUT I HAVE OCCASIONALLY DONE THE SAME THING AT OTHER TIMES +1580-141083-0047-1996: DID YOU LOOK AT THESE PAPERS ON THE TABLE +1580-141083-0048-1997: HOW CAME YOU TO LEAVE THE KEY IN THE DOOR +1580-141083-0049-1998: (ANYONE->ANY ONE) IN THE ROOM COULD GET OUT YES SIR +1580-141083-0050-1999: I (*->HAVE) REALLY DON'T THINK HE KNEW MUCH ABOUT IT MISTER HOLMES +1580-141083-0051-2000: ONLY FOR A MINUTE OR SO +1580-141083-0052-2001: OH I WOULD NOT VENTURE TO SAY SIR +1580-141083-0053-2002: YOU HAVEN'T SEEN ANY OF THEM NO SIR +1580-141084-0000-2003: IT WAS THE INDIAN WHOSE DARK SILHOUETTE APPEARED SUDDENLY UPON HIS BLIND +1580-141084-0001-2004: HE WAS PACING SWIFTLY UP AND DOWN HIS ROOM +1580-141084-0002-2005: (THIS->THE) SET OF ROOMS IS QUITE THE OLDEST IN THE COLLEGE AND IT IS NOT UNUSUAL FOR VISITORS TO GO OVER THEM +1580-141084-0003-2006: NO NAMES PLEASE SAID HOLMES AS WE KNOCKED AT (GILCHRIST'S->GILCHER'S) DOOR +1580-141084-0004-2007: OF COURSE HE DID NOT REALIZE THAT IT WAS I WHO WAS KNOCKING BUT NONE THE LESS HIS CONDUCT WAS VERY UNCOURTEOUS AND INDEED UNDER THE CIRCUMSTANCES RATHER SUSPICIOUS +1580-141084-0005-2008: THAT IS VERY IMPORTANT SAID HOLMES +1580-141084-0006-2009: YOU DON'T SEEM TO REALIZE THE POSITION +1580-141084-0007-2010: TO MORROW (IS->WAS) THE EXAMINATION +1580-141084-0008-2011: I CANNOT ALLOW THE EXAMINATION TO BE HELD IF ONE OF THE PAPERS HAS BEEN TAMPERED WITH THE SITUATION MUST BE FACED +1580-141084-0009-2012: IT IS POSSIBLE THAT I MAY BE IN A POSITION THEN TO INDICATE SOME COURSE OF ACTION +1580-141084-0010-2013: I WILL TAKE THE BLACK CLAY WITH ME ALSO THE PENCIL CUTTINGS GOOD BYE +1580-141084-0011-2014: WHEN WE WERE OUT IN THE DARKNESS OF THE QUADRANGLE WE AGAIN LOOKED UP AT THE WINDOWS +1580-141084-0012-2015: THE FOUL MOUTHED FELLOW AT THE TOP +1580-141084-0013-2016: HE IS THE ONE WITH THE WORST RECORD +1580-141084-0014-2017: WHY (BANNISTER->BANISTER) THE SERVANT WHAT'S HIS GAME IN THE MATTER +1580-141084-0015-2018: HE IMPRESSED ME AS BEING A PERFECTLY HONEST MAN +1580-141084-0016-2019: MY FRIEND DID NOT APPEAR TO BE DEPRESSED BY HIS FAILURE BUT SHRUGGED HIS SHOULDERS (IN->AND) HALF HUMOROUS RESIGNATION +1580-141084-0017-2020: NO GOOD MY DEAR WATSON +1580-141084-0018-2021: I THINK SO YOU HAVE FORMED A CONCLUSION +1580-141084-0019-2022: YES MY DEAR WATSON I HAVE SOLVED THE MYSTERY +1580-141084-0020-2023: LOOK AT THAT HE HELD OUT HIS HAND +1580-141084-0021-2024: ON THE PALM WERE THREE LITTLE PYRAMIDS OF BLACK DOUGHY CLAY +1580-141084-0022-2025: AND ONE MORE THIS MORNING +1580-141084-0023-2026: IN A FEW HOURS THE EXAMINATION WOULD COMMENCE AND HE WAS STILL IN THE DILEMMA BETWEEN MAKING THE FACTS PUBLIC AND ALLOWING THE CULPRIT TO COMPETE FOR THE VALUABLE SCHOLARSHIP +1580-141084-0024-2027: HE COULD HARDLY STAND STILL SO GREAT WAS HIS MENTAL AGITATION AND HE RAN TOWARDS HOLMES WITH (TWO->TOO) EAGER HANDS OUTSTRETCHED THANK HEAVEN THAT YOU HAVE COME +1580-141084-0025-2028: YOU KNOW HIM I THINK SO +1580-141084-0026-2029: IF THIS MATTER IS NOT TO BECOME PUBLIC WE MUST GIVE OURSELVES CERTAIN POWERS AND RESOLVE OURSELVES INTO A SMALL PRIVATE COURT MARTIAL +1580-141084-0027-2030: NO SIR CERTAINLY NOT +1580-141084-0028-2031: THERE WAS NO MAN SIR +1580-141084-0029-2032: HIS TROUBLED BLUE EYES GLANCED AT EACH OF US AND FINALLY RESTED WITH AN EXPRESSION OF BLANK DISMAY UPON (BANNISTER->BANISTER) IN THE FARTHER CORNER +1580-141084-0030-2033: JUST CLOSE THE DOOR SAID HOLMES +1580-141084-0031-2034: WE WANT TO KNOW MISTER (GILCHRIST->GILGRIST) HOW YOU AN HONOURABLE MAN EVER CAME TO COMMIT SUCH AN ACTION AS THAT OF YESTERDAY +1580-141084-0032-2035: FOR A MOMENT (GILCHRIST->GO CHRIST) WITH UPRAISED HAND TRIED TO CONTROL HIS WRITHING FEATURES +1580-141084-0033-2036: COME COME SAID HOLMES KINDLY IT IS HUMAN TO ERR AND AT LEAST NO ONE CAN ACCUSE YOU OF BEING A CALLOUS CRIMINAL +1580-141084-0034-2037: WELL WELL DON'T TROUBLE TO ANSWER LISTEN AND SEE THAT I DO YOU (NO->KNOW) INJUSTICE +1580-141084-0035-2038: HE COULD EXAMINE THE PAPERS IN HIS OWN OFFICE +1580-141084-0036-2039: THE INDIAN I ALSO THOUGHT NOTHING OF +1580-141084-0037-2040: WHEN I APPROACHED YOUR ROOM I EXAMINED THE WINDOW +1580-141084-0038-2041: NO ONE LESS THAN THAT WOULD HAVE A CHANCE +1580-141084-0039-2042: I ENTERED AND I TOOK YOU INTO MY CONFIDENCE AS TO THE SUGGESTIONS OF THE SIDE TABLE +1580-141084-0040-2043: HE RETURNED CARRYING HIS JUMPING SHOES WHICH ARE PROVIDED AS YOU ARE AWARE WITH SEVERAL SHARP SPIKES +1580-141084-0041-2044: NO HARM WOULD HAVE BEEN DONE HAD IT NOT BEEN THAT AS HE PASSED YOUR DOOR HE PERCEIVED THE KEY WHICH HAD BEEN LEFT BY THE CARELESSNESS OF YOUR SERVANT +1580-141084-0042-2045: A SUDDEN IMPULSE CAME OVER HIM TO ENTER AND SEE IF THEY WERE INDEED THE PROOFS +1580-141084-0043-2046: HE PUT HIS SHOES ON THE TABLE +1580-141084-0044-2047: GLOVES SAID THE YOUNG MAN +1580-141084-0045-2048: SUDDENLY HE HEARD HIM AT THE VERY DOOR THERE WAS NO POSSIBLE ESCAPE +1580-141084-0046-2049: HAVE I TOLD THE TRUTH MISTER (GILCHRIST->GILGRIST) +1580-141084-0047-2050: I HAVE A LETTER HERE MISTER (SOAMES->SOLMES) WHICH I WROTE TO YOU EARLY THIS MORNING IN THE MIDDLE OF A RESTLESS NIGHT +1580-141084-0048-2051: IT (WILL->WOULD) BE CLEAR TO YOU FROM WHAT I HAVE SAID THAT ONLY YOU COULD HAVE LET THIS YOUNG MAN OUT SINCE YOU WERE LEFT IN THE ROOM AND MUST HAVE LOCKED THE DOOR WHEN YOU WENT OUT +1580-141084-0049-2052: IT WAS SIMPLE ENOUGH SIR IF YOU ONLY HAD KNOWN BUT WITH ALL YOUR CLEVERNESS IT WAS IMPOSSIBLE THAT YOU COULD KNOW +1580-141084-0050-2053: IF MISTER (SOAMES->SOLMES) SAW THEM THE GAME WAS UP +1995-1826-0000-750: IN THE DEBATE BETWEEN THE SENIOR SOCIETIES HER DEFENCE OF THE FIFTEENTH AMENDMENT HAD BEEN NOT ONLY A NOTABLE BIT OF REASONING BUT DELIVERED WITH REAL ENTHUSIASM +1995-1826-0001-751: THE SOUTH SHE HAD NOT THOUGHT OF SERIOUSLY AND YET KNOWING OF ITS DELIGHTFUL HOSPITALITY AND MILD CLIMATE SHE WAS NOT AVERSE TO CHARLESTON OR NEW ORLEANS +1995-1826-0002-752: JOHN TAYLOR WHO HAD SUPPORTED HER THROUGH COLLEGE WAS INTERESTED IN COTTON +1995-1826-0003-753: BETTER GO HE HAD (COUNSELLED->COUNSEL) SENTENTIOUSLY +1995-1826-0004-754: MIGHT LEARN SOMETHING USEFUL DOWN THERE +1995-1826-0005-755: BUT JOHN THERE'S NO SOCIETY JUST ELEMENTARY WORK +1995-1826-0006-756: BEEN LOOKING UP (TOOMS->TOMBS) COUNTY +1995-1826-0007-757: (FIND->FIVE) SOME (CRESSWELLS->CRUSTWELLS) THERE BIG PLANTATIONS RATED AT TWO HUNDRED AND FIFTY THOUSAND DOLLARS +1995-1826-0008-758: SOME OTHERS TOO BIG COTTON COUNTY +1995-1826-0009-759: YOU OUGHT TO KNOW JOHN IF I TEACH NEGROES I'LL SCARCELY SEE MUCH OF PEOPLE IN MY OWN CLASS +1995-1826-0010-760: AT ANY RATE I SAY GO +1995-1826-0011-761: HERE SHE WAS TEACHING DIRTY CHILDREN AND THE SMELL OF CONFUSED ODORS AND BODILY PERSPIRATION WAS TO HER AT TIMES UNBEARABLE +1995-1826-0012-762: SHE WANTED A GLANCE OF THE NEW BOOKS AND PERIODICALS AND TALK OF (GREAT PHILANTHROPIES->GRATEFUL ANTHROPIES) AND REFORMS +1995-1826-0013-763: SO FOR THE HUNDREDTH TIME SHE WAS THINKING (TODAY->TO DAY) AS SHE WALKED ALONE UP THE LANE BACK OF THE BARN AND THEN SLOWLY DOWN THROUGH THE BOTTOMS +1995-1826-0014-764: COTTON SHE PAUSED +1995-1826-0015-765: SHE HAD ALMOST FORGOTTEN THAT IT WAS HERE WITHIN TOUCH (AND->IN) SIGHT +1995-1826-0016-766: THE GLIMMERING SEA OF DELICATE LEAVES WHISPERED AND MURMURED BEFORE HER STRETCHING AWAY TO THE NORTHWARD +1995-1826-0017-767: THERE MIGHT BE A BIT OF POETRY HERE AND THERE BUT MOST OF THIS PLACE WAS SUCH DESPERATE PROSE +1995-1826-0018-768: HER REGARD SHIFTED TO THE GREEN STALKS AND LEAVES AGAIN AND SHE STARTED TO MOVE AWAY +1995-1826-0019-769: COTTON IS A WONDERFUL THING IS IT NOT BOYS SHE SAID RATHER PRIMLY +1995-1826-0020-770: MISS TAYLOR DID NOT KNOW MUCH ABOUT COTTON BUT AT LEAST ONE MORE (REMARK->REMARKED) SEEMED CALLED FOR +1995-1826-0021-771: DON'T KNOW WELL OF ALL THINGS INWARDLY COMMENTED MISS TAYLOR LITERALLY BORN IN COTTON AND OH WELL AS MUCH AS TO ASK WHAT'S THE USE SHE TURNED AGAIN TO GO +1995-1826-0022-772: I SUPPOSE THOUGH IT'S TOO EARLY FOR THEM THEN CAME THE EXPLOSION +1995-1826-0023-773: (GOOBERS->GOULD WAS) DON'T GROW ON THE (TOPS OF VINES->TOPSY BANDS) BUT (UNDERGROUND->ON THE GROUND) ON THE ROOTS LIKE YAMS IS THAT SO +1995-1826-0024-774: THE GOLDEN FLEECE IT'S THE SILVER FLEECE HE (HARKENED->HEARKENED) +1995-1826-0025-775: (SOME TIME YOU'LL->SOMETIME YOU) TELL ME PLEASE WON'T YOU +1995-1826-0026-776: (NOW->THOU) FOR ONE LITTLE HALF HOUR SHE HAD BEEN A WOMAN TALKING TO A BOY NO NOT EVEN THAT SHE HAD BEEN TALKING JUST TALKING THERE WERE NO PERSONS IN THE CONVERSATION JUST THINGS ONE THING COTTON +1995-1836-0000-735: THE HON (CHARLES->*) SMITH MISS SARAH'S BROTHER WAS WALKING SWIFTLY UPTOWN FROM MISTER EASTERLY'S WALL STREET OFFICE AND HIS FACE WAS PALE +1995-1836-0001-736: AT LAST THE COTTON COMBINE WAS TO ALL APPEARANCES AN ASSURED FACT AND HE WAS SLATED FOR THE SENATE +1995-1836-0002-737: WHY SHOULD HE NOT BE AS OTHER MEN +1995-1836-0003-738: SHE WAS NOT HERSELF (A NOTABLY->UNNOTABLY) INTELLIGENT WOMAN SHE GREATLY ADMIRED INTELLIGENCE OR WHATEVER LOOKED TO HER LIKE INTELLIGENCE IN OTHERS +1995-1836-0004-739: AS SHE AWAITED HER (GUESTS->GUESS) SHE SURVEYED THE TABLE WITH BOTH SATISFACTION AND DISQUIETUDE FOR HER SOCIAL FUNCTIONS WERE FEW (TONIGHT->TO NIGHT) THERE WERE SHE CHECKED THEM OFF ON HER FINGERS SIR JAMES (CREIGHTON->CRIGHTON) THE RICH ENGLISH MANUFACTURER AND LADY (CREIGHTON->CRIGHTON) MISTER AND MISSUS (VANDERPOOL->VAN DERPOOL) MISTER HARRY CRESSWELL AND HIS SISTER JOHN TAYLOR AND HIS SISTER AND MISTER CHARLES SMITH WHOM THE EVENING PAPERS MENTIONED AS LIKELY TO BE UNITED STATES SENATOR FROM NEW JERSEY A SELECTION OF GUESTS THAT HAD BEEN DETERMINED UNKNOWN TO THE HOSTESS BY THE MEETING OF COTTON INTERESTS EARLIER IN THE DAY +1995-1836-0005-740: MISSUS (GREY->GRAY) HAD MET SOUTHERNERS BEFORE BUT NOT INTIMATELY AND SHE ALWAYS HAD IN MIND VIVIDLY THEIR CRUELTY TO POOR NEGROES A SUBJECT SHE MADE A POINT OF INTRODUCING FORTHWITH +1995-1836-0006-741: SHE WAS THEREFORE MOST AGREEABLY SURPRISED TO HEAR MISTER (CRESSWELL->CRESWELL) EXPRESS HIMSELF SO CORDIALLY AS APPROVING OF NEGRO EDUCATION +1995-1836-0007-742: (BUT YOU->DO) BELIEVE IN SOME EDUCATION ASKED MARY TAYLOR +1995-1836-0008-743: I BELIEVE IN THE TRAINING OF PEOPLE TO THEIR (HIGHEST->HAS) CAPACITY THE ENGLISHMAN HERE HEARTILY SECONDED HIM +1995-1836-0009-744: BUT (CRESSWELL->CRASWELL) ADDED SIGNIFICANTLY CAPACITY DIFFERS ENORMOUSLY BETWEEN RACES +1995-1836-0010-745: THE VANDERPOOLS WERE SURE (OF->*) THIS AND THE ENGLISHMAN INSTANCING INDIA BECAME QUITE ELOQUENT MISSUS (GREY->GRAY) WAS MYSTIFIED BUT HARDLY DARED ADMIT IT THE GENERAL TREND OF THE CONVERSATION SEEMED TO BE THAT MOST INDIVIDUALS NEEDED TO BE SUBMITTED TO THE SHARPEST SCRUTINY BEFORE BEING ALLOWED MUCH EDUCATION AND AS FOR THE LOWER RACES IT WAS SIMPLY CRIMINAL TO OPEN SUCH USELESS OPPORTUNITIES TO THEM +1995-1836-0011-746: POSITIVELY HEROIC ADDED (CRESSWELL->CRASWELL) AVOIDING HIS SISTER'S EYES +1995-1836-0012-747: BUT (WE'RE->WE ARE) NOT (ER->A) EXACTLY (WELCOMED->WELCOME) +1995-1836-0013-748: MARY TAYLOR HOWEVER RELATED THE TALE OF ZORA TO MISSUS (GREY'S->GRAY'S) PRIVATE EAR LATER +1995-1836-0014-749: FORTUNATELY SAID MISTER (VANDERPOOL NORTHERNERS->VAN DERPOOL NOR THE NOSE) AND SOUTHERNERS (ARE ARRIVING->ALL RIVING) AT A BETTER MUTUAL UNDERSTANDING ON MOST OF THESE MATTERS +1995-1837-0000-777: HE KNEW THE SILVER FLEECE HIS AND (ZORA'S->ZORAS) MUST BE RUINED +1995-1837-0001-778: IT WAS THE FIRST GREAT SORROW OF HIS LIFE IT WAS NOT SO MUCH THE LOSS OF THE COTTON ITSELF BUT THE FANTASY THE HOPES THE DREAMS BUILT AROUND IT +1995-1837-0002-779: AH THE SWAMP THE CRUEL SWAMP +1995-1837-0003-780: (THE->WHO) REVELATION OF HIS LOVE LIGHTED AND BRIGHTENED SLOWLY TILL IT FLAMED LIKE A SUNRISE OVER HIM AND LEFT HIM IN BURNING WONDER +1995-1837-0004-781: HE PANTED TO KNOW IF SHE TOO KNEW OR KNEW AND CARED NOT OR CARED AND KNEW NOT +1995-1837-0005-782: SHE WAS SO STRANGE AND HUMAN A CREATURE +1995-1837-0006-783: THE WORLD WAS WATER VEILED IN MISTS +1995-1837-0007-784: THEN OF A SUDDEN AT MIDDAY THE SUN SHOT OUT HOT AND STILL NO BREATH OF AIR STIRRED THE SKY WAS LIKE BLUE STEEL THE EARTH STEAMED +1995-1837-0008-785: WHERE WAS THE USE OF IMAGINING +1995-1837-0009-786: THE LAGOON HAD BEEN LEVEL WITH THE (DYKES->DIKES) A WEEK AGO AND NOW +1995-1837-0010-787: PERHAPS SHE TOO MIGHT BE THERE WAITING WEEPING +1995-1837-0011-788: HE STARTED AT THE THOUGHT HE HURRIED FORTH SADLY +1995-1837-0012-789: HE SPLASHED AND STAMPED ALONG FARTHER AND FARTHER ONWARD UNTIL HE NEARED THE RAMPART OF THE CLEARING AND PUT FOOT UPON THE TREE BRIDGE +1995-1837-0013-790: THEN HE LOOKED DOWN THE LAGOON WAS DRY +1995-1837-0014-791: HE STOOD A MOMENT BEWILDERED THEN TURNED AND RUSHED UPON THE ISLAND A GREAT SHEET OF DAZZLING SUNLIGHT SWEPT THE PLACE AND BENEATH LAY A MIGHTY MASS OF OLIVE GREEN THICK TALL WET AND WILLOWY +1995-1837-0015-792: THE SQUARES OF COTTON SHARP EDGED HEAVY WERE JUST ABOUT TO BURST TO (BOLLS->BOWLS) +1995-1837-0016-793: FOR ONE LONG MOMENT HE PAUSED STUPID AGAPE WITH UTTER AMAZEMENT THEN LEANED DIZZILY AGAINST (A->THE) TREE +1995-1837-0017-794: HE GAZED ABOUT PERPLEXED ASTONISHED +1995-1837-0018-795: HERE LAY THE READING OF THE RIDDLE WITH INFINITE WORK AND PAIN SOME ONE HAD DUG A CANAL FROM THE LAGOON TO THE CREEK INTO WHICH THE FORMER HAD DRAINED BY A LONG AND CROOKED WAY THUS ALLOWING IT TO EMPTY DIRECTLY +1995-1837-0019-796: HE SAT DOWN WEAK BEWILDERED AND ONE THOUGHT WAS UPPERMOST (ZORA->SORA) +1995-1837-0020-797: THE YEARS OF THE DAYS OF HER DYING WERE TEN +1995-1837-0021-798: THE HOPE AND DREAM OF HARVEST WAS UPON THE LAND +1995-1837-0022-799: UP IN THE SICK ROOM ZORA LAY ON THE LITTLE WHITE BED +1995-1837-0023-800: THE NET AND WEB OF ENDLESS THINGS HAD BEEN CRAWLING AND CREEPING AROUND HER SHE HAD STRUGGLED IN DUMB SPEECHLESS TERROR AGAINST SOME MIGHTY GRASPING THAT STROVE FOR HER LIFE WITH GNARLED AND CREEPING FINGERS BUT NOW AT LAST (WEAKLY->WEEKLY) SHE OPENED HER EYES AND QUESTIONED +1995-1837-0024-801: FOR A WHILE SHE LAY IN HER CHAIR IN HAPPY DREAMY PLEASURE AT SUN AND BIRD AND TREE +1995-1837-0025-802: SHE ROSE WITH A FLEETING GLANCE GATHERED THE SHAWL (ROUND->AROUND) HER THEN GLIDING FORWARD WAVERING TREMULOUS SLIPPED ACROSS THE ROAD AND INTO THE SWAMP +1995-1837-0026-803: SHE HAD BEEN BORN WITHIN ITS BORDERS WITHIN ITS BORDERS SHE HAD LIVED AND GROWN AND WITHIN ITS (BORDERS->BORDER) SHE HAD MET HER LOVE +1995-1837-0027-804: ON SHE HURRIED UNTIL SWEEPING DOWN TO THE LAGOON AND THE ISLAND LO THE COTTON LAY BEFORE HER +1995-1837-0028-805: THE CHAIR WAS EMPTY BUT HE KNEW +1995-1837-0029-806: HE DARTED THROUGH THE TREES AND PAUSED A TALL MAN STRONGLY BUT SLIMLY MADE +2094-142345-0000-308: IT IS A VERY FINE OLD PLACE OF RED BRICK SOFTENED BY A PALE POWDERY LICHEN WHICH HAS DISPERSED ITSELF WITH HAPPY IRREGULARITY SO AS TO BRING THE RED BRICK INTO TERMS OF FRIENDLY COMPANIONSHIP WITH (THE->A) LIMESTONE ORNAMENTS SURROUNDING THE THREE GABLES THE WINDOWS AND THE DOOR PLACE +2094-142345-0001-309: BUT THE WINDOWS ARE PATCHED WITH WOODEN PANES AND THE DOOR I THINK IS LIKE THE GATE IT IS NEVER OPENED +2094-142345-0002-310: FOR IT IS A SOLID HEAVY HANDSOME DOOR AND MUST ONCE HAVE BEEN IN THE HABIT OF SHUTTING WITH A SONOROUS BANG BEHIND (A->THE) LIVERIED LACKEY WHO HAD JUST SEEN HIS MASTER AND MISTRESS OFF THE GROUNDS IN A CARRIAGE AND PAIR +2094-142345-0003-311: A LARGE OPEN FIREPLACE WITH RUSTY DOGS IN IT AND A BARE BOARDED FLOOR AT THE FAR END FLEECES OF WOOL STACKED UP IN THE MIDDLE OF THE FLOOR SOME EMPTY CORN BAGS +2094-142345-0004-312: AND WHAT THROUGH THE LEFT HAND WINDOW +2094-142345-0005-313: SEVERAL CLOTHES HORSES A PILLION A SPINNING WHEEL AND AN OLD BOX WIDE OPEN AND STUFFED FULL OF COLOURED RAGS +2094-142345-0006-314: AT THE EDGE OF THIS BOX THERE LIES A GREAT WOODEN DOLL WHICH SO FAR AS MUTILATION IS CONCERNED BEARS A STRONG RESEMBLANCE TO THE FINEST GREEK SCULPTURE AND ESPECIALLY IN THE TOTAL LOSS OF ITS NOSE +2094-142345-0007-315: THE HISTORY OF THE HOUSE IS PLAIN NOW +2094-142345-0008-316: BUT THERE IS ALWAYS A STRONGER SENSE OF LIFE WHEN THE SUN IS BRILLIANT AFTER RAIN AND NOW HE IS POURING DOWN HIS BEAMS AND MAKING SPARKLES AMONG THE WET STRAW AND LIGHTING UP EVERY PATCH OF VIVID GREEN MOSS ON THE RED TILES OF THE COW SHED AND TURNING EVEN THE MUDDY WATER THAT IS HURRYING ALONG THE CHANNEL TO THE DRAIN INTO A MIRROR FOR THE YELLOW (BILLED->BUILD) DUCKS WHO ARE SEIZING THE OPPORTUNITY OF GETTING A DRINK WITH AS MUCH BODY IN IT AS POSSIBLE +2094-142345-0009-317: FOR THE GREAT BARN DOORS ARE THROWN WIDE OPEN AND MEN ARE BUSY THERE MENDING THE HARNESS UNDER THE SUPERINTENDENCE OF MISTER GOBY THE (WHITTAW->WIDOW) OTHERWISE SADDLER WHO ENTERTAINS THEM WITH THE LATEST (TREDDLESTON->TREDDLESTONE) GOSSIP +2094-142345-0010-318: (HETTY->HETTY'S) SORREL OFTEN TOOK THE OPPORTUNITY WHEN HER AUNT'S BACK WAS TURNED OF LOOKING AT THE PLEASING REFLECTION OF HERSELF IN THOSE POLISHED (SURFACES->SERVICES) FOR THE OAK TABLE WAS USUALLY TURNED UP LIKE A SCREEN AND WAS MORE FOR ORNAMENT THAN FOR USE AND SHE COULD SEE HERSELF SOMETIMES IN THE GREAT ROUND PEWTER DISHES THAT WERE RANGED ON THE SHELVES ABOVE THE LONG DEAL DINNER TABLE OR IN THE HOBS OF THE GRATE WHICH ALWAYS SHONE LIKE JASPER +2094-142345-0011-319: DO NOT SUPPOSE HOWEVER THAT MISSUS POYSER WAS ELDERLY OR SHREWISH IN HER APPEARANCE SHE WAS A GOOD LOOKING WOMAN NOT MORE THAN EIGHT AND THIRTY OF FAIR COMPLEXION AND SANDY HAIR WELL SHAPEN LIGHT FOOTED +2094-142345-0012-320: THE FAMILY LIKENESS BETWEEN HER AND HER NIECE (DINAH->DINA) MORRIS WITH THE CONTRAST BETWEEN HER KEENNESS AND (DINAH'S->DYNAS) SERAPHIC GENTLENESS OF EXPRESSION MIGHT HAVE SERVED A PAINTER AS AN EXCELLENT SUGGESTION FOR A MARTHA AND MARY +2094-142345-0013-321: HER TONGUE WAS NOT LESS KEEN THAN HER EYE AND WHENEVER A DAMSEL CAME WITHIN (EARSHOT->EAR SHOT) SEEMED TO TAKE UP AN UNFINISHED LECTURE AS A BARREL ORGAN TAKES UP A TUNE PRECISELY AT THE POINT WHERE IT HAD LEFT OFF +2094-142345-0014-322: THE FACT THAT IT WAS CHURNING DAY WAS ANOTHER REASON WHY IT WAS INCONVENIENT TO HAVE THE (WHITTAWS->WIDOWS) AND WHY CONSEQUENTLY MISSUS POYSER SHOULD SCOLD MOLLY THE HOUSEMAID WITH UNUSUAL SEVERITY +2094-142345-0015-323: TO ALL APPEARANCE MOLLY HAD GOT THROUGH HER AFTER DINNER WORK IN AN EXEMPLARY MANNER HAD CLEANED HERSELF WITH GREAT DISPATCH AND NOW CAME TO ASK SUBMISSIVELY IF SHE SHOULD SIT DOWN TO HER SPINNING TILL MILKING TIME +2094-142345-0016-324: SPINNING INDEED +2094-142345-0017-325: I NEVER KNEW YOUR EQUALS FOR GALLOWSNESS +2094-142345-0018-326: WHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE TO KNOW +2094-142345-0019-327: COMB THE WOOL FOR THE (WHITTAWS->WIDOWS) INDEED +2094-142345-0020-328: THAT'S WHAT YOU'D LIKE TO BE DOING IS IT +2094-142345-0021-329: THAT'S THE WAY WITH YOU THAT'S THE ROAD YOU'D ALL LIKE TO GO HEADLONGS TO RUIN +2094-142345-0022-330: MISTER (OTTLEY'S->OAKLEIGHS) INDEED +2094-142345-0023-331: (YOU'RE->YOU ARE) A RARE (UN->AND) FOR SITTING DOWN TO YOUR WORK A LITTLE WHILE AFTER (IT'S->ITS) TIME TO PUT BY +2094-142345-0024-332: (MUNNY->MONEY) MY (IRON'S->IRONS) TWITE (TOLD PEASE->COLD PIECE) PUT IT DOWN TO WARM +2094-142345-0025-333: COLD IS IT MY DARLING BLESS YOUR SWEET FACE +2094-142345-0026-334: SHE'S GOING TO PUT THE IRONING THINGS AWAY +2094-142345-0027-335: (MUNNY->MONEY) I (TOULD IKE->DID LIKE) TO DO INTO (DE->THE) BARN TO TOMMY TO SEE (DE WHITTAWD->THE WIDOWED) +2094-142345-0028-336: NO NO NO (TOTTY UD->NO TODDY HAD) GET HER FEET WET SAID MISSUS POYSER CARRYING AWAY HER IRON +2094-142345-0029-337: DID EVER ANYBODY SEE THE LIKE SCREAMED MISSUS POYSER RUNNING TOWARDS THE TABLE WHEN HER EYE HAD FALLEN ON THE BLUE STREAM +2094-142345-0030-338: TOTTY HOWEVER HAD DESCENDED FROM HER CHAIR WITH GREAT SWIFTNESS AND WAS ALREADY IN RETREAT TOWARDS THE DAIRY WITH A SORT OF WADDLING RUN AND AN AMOUNT OF FAT ON THE NAPE OF HER NECK WHICH MADE HER LOOK LIKE THE METAMORPHOSIS OF A WHITE SUCKLING PIG +2094-142345-0031-339: AND SHE WAS VERY FOND OF YOU TOO AUNT RACHEL +2094-142345-0032-340: I OFTEN HEARD HER TALK OF YOU IN THE SAME SORT OF WAY +2094-142345-0033-341: WHEN SHE HAD THAT BAD ILLNESS AND I WAS ONLY ELEVEN YEARS OLD SHE USED TO SAY YOU'LL HAVE A FRIEND ON EARTH IN YOUR AUNT RACHEL IF I'M TAKEN FROM YOU FOR SHE HAS A KIND HEART AND I'M SURE I'VE FOUND IT SO +2094-142345-0034-342: AND THERE'S LINEN IN THE HOUSE AS I COULD WELL SPARE YOU FOR (I'VE->I) GOT LOTS (O->OF) SHEETING AND TABLE CLOTHING AND (TOWELLING AS->TOWELINGS) ISN'T MADE UP +2094-142345-0035-343: BUT NOT MORE THAN WHAT'S IN THE BIBLE (AUNT->AND) SAID DINAH +2094-142345-0036-344: NAY DEAR AUNT YOU NEVER HEARD ME SAY THAT ALL PEOPLE ARE CALLED TO FORSAKE THEIR WORK AND THEIR FAMILIES +2094-142345-0037-345: WE CAN ALL BE SERVANTS OF GOD WHEREVER OUR LOT IS CAST BUT HE GIVES US DIFFERENT SORTS OF WORK ACCORDING AS HE FITS US FOR IT AND CALLS US TO IT +2094-142345-0038-346: I CAN NO MORE HELP SPENDING MY LIFE IN TRYING TO DO WHAT I CAN FOR THE SOULS OF OTHERS (THAN->THEN) YOU COULD HELP RUNNING IF YOU HEARD LITTLE TOTTY CRYING AT THE OTHER END OF THE HOUSE THE VOICE WOULD GO TO YOUR HEART YOU WOULD THINK THE DEAR CHILD WAS IN TROUBLE OR IN DANGER AND YOU COULDN'T REST WITHOUT RUNNING TO HELP HER AND COMFORT HER +2094-142345-0039-347: I'VE STRONG ASSURANCE THAT NO EVIL WILL HAPPEN TO YOU AND MY UNCLE AND THE CHILDREN FROM ANYTHING (I'VE->I HAVE) DONE +2094-142345-0040-348: I DIDN'T PREACH WITHOUT DIRECTION +2094-142345-0041-349: DIRECTION +2094-142345-0042-350: I (HANNA->HAD A) COMMON PATIENCE WITH YOU +2094-142345-0043-351: BY THIS TIME THE TWO GENTLEMEN HAD REACHED THE PALINGS AND HAD GOT DOWN FROM THEIR HORSES IT WAS PLAIN THEY MEANT TO COME IN +2094-142345-0044-352: SAID MISTER IRWINE WITH HIS STATELY CORDIALITY +2094-142345-0045-353: OH SIR DON'T MENTION IT SAID MISSUS POYSER +2094-142345-0046-354: I DELIGHT IN YOUR KITCHEN +2094-142345-0047-355: POYSER IS NOT AT HOME IS HE +2094-142345-0048-356: SAID CAPTAIN DONNITHORNE (SEATING->SITTING) HIMSELF WHERE HE COULD SEE ALONG THE SHORT PASSAGE TO THE OPEN DAIRY DOOR +2094-142345-0049-357: NO SIR HE ISN'T HE'S GONE TO (ROSSETER->ROSSITUR) TO SEE MISTER WEST THE FACTOR ABOUT THE WOOL +2094-142345-0050-358: BUT THERE'S FATHER (THE->IN) BARN SIR IF HE'D BE OF ANY USE +2094-142345-0051-359: NO THANK YOU I'LL JUST LOOK AT THE WHELPS AND LEAVE A MESSAGE ABOUT THEM WITH YOUR SHEPHERD +2094-142345-0052-360: I MUST COME ANOTHER DAY AND SEE YOUR HUSBAND I WANT TO HAVE A CONSULTATION WITH HIM ABOUT HORSES +2094-142345-0053-361: FOR IF (HE'S->IS) ANYWHERE ON THE FARM WE CAN SEND FOR HIM IN A MINUTE +2094-142345-0054-362: OH SIR SAID MISSUS POYSER RATHER ALARMED YOU WOULDN'T LIKE IT AT ALL +2094-142345-0055-363: BUT YOU KNOW MORE ABOUT THAT THAN I DO SIR +2094-142345-0056-364: I THINK I SHOULD BE DOING YOU A SERVICE TO TURN YOU OUT OF SUCH A PLACE +2094-142345-0057-365: I (KNOW HIS->KNOWS) FARM IS IN BETTER ORDER THAN ANY OTHER WITHIN TEN MILES OF US AND AS FOR THE KITCHEN HE ADDED SMILING I DON'T BELIEVE THERE'S ONE IN THE KINGDOM TO BEAT IT +2094-142345-0058-366: BY THE BY (I'VE->I HAVE) NEVER SEEN YOUR DAIRY I MUST SEE YOUR (DAIRY->DEARIE) MISSUS POYSER +2094-142345-0059-367: THIS MISSUS POYSER SAID BLUSHING AND BELIEVING THAT THE CAPTAIN WAS REALLY INTERESTED IN HER MILK PANS AND WOULD ADJUST HIS OPINION OF HER TO THE APPEARANCE OF HER DAIRY +2094-142345-0060-368: OH I'VE NO DOUBT IT'S IN CAPITAL ORDER +2300-131720-0000-1816: THE PARIS PLANT LIKE THAT AT THE CRYSTAL PALACE WAS A TEMPORARY EXHIBIT +2300-131720-0001-1817: THE LONDON PLANT WAS LESS TEMPORARY BUT NOT PERMANENT SUPPLYING BEFORE IT WAS TORN OUT NO FEWER THAN THREE THOUSAND LAMPS IN HOTELS CHURCHES STORES AND DWELLINGS IN THE VICINITY OF HOLBORN (VIADUCT->VIEDUC) +2300-131720-0002-1818: THERE (MESSRS->MESSIERS) JOHNSON AND HAMMER PUT INTO PRACTICE MANY OF THE IDEAS NOW STANDARD IN THE ART AND SECURED MUCH USEFUL DATA FOR THE WORK IN NEW YORK OF WHICH THE STORY HAS JUST BEEN TOLD +2300-131720-0003-1819: THE DYNAMO ELECTRIC MACHINE THOUGH SMALL WAS ROBUST FOR UNDER ALL THE VARYING SPEEDS OF WATER POWER AND THE VICISSITUDES OF THE PLANT TO WHICH IT BELONGED IT CONTINUED IN ACTIVE USE UNTIL EIGHTEEN NINETY NINE SEVENTEEN YEARS +2300-131720-0004-1820: OWING TO HIS INSISTENCE ON LOW PRESSURE DIRECT CURRENT FOR USE IN DENSELY POPULATED DISTRICTS AS THE ONLY SAFE AND TRULY UNIVERSAL PROFITABLE WAY OF DELIVERING ELECTRICAL ENERGY TO THE CONSUMERS EDISON HAS BEEN FREQUENTLY SPOKEN OF AS AN OPPONENT OF THE ALTERNATING CURRENT +2300-131720-0005-1821: WHY IF WE ERECT A STATION AT THE FALLS IT IS A GREAT ECONOMY TO GET IT UP TO THE CITY +2300-131720-0006-1822: THERE SEEMS NO GOOD REASON FOR BELIEVING THAT IT WILL CHANGE +2300-131720-0007-1823: BROAD AS THE PRAIRIES AND FREE IN THOUGHT AS THE WINDS THAT (SWEEP->SWEPT) THEM HE IS (IDIOSYNCRATICALLY->IDIOS AND CRADICALLY) OPPOSED TO LOOSE AND WASTEFUL METHODS TO PLANS OF EMPIRE THAT NEGLECT THE POOR AT THE GATE +2300-131720-0008-1824: EVERYTHING HE HAS DONE HAS BEEN AIMED AT THE CONSERVATION OF ENERGY THE CONTRACTION OF SPACE THE INTENSIFICATION OF CULTURE +2300-131720-0009-1825: FOR SOME YEARS IT WAS NOT FOUND FEASIBLE TO OPERATE MOTORS ON ALTERNATING CURRENT CIRCUITS AND THAT REASON WAS OFTEN URGED AGAINST (IT->ITS) SERIOUSLY +2300-131720-0010-1826: IT COULD NOT BE USED FOR ELECTROPLATING OR DEPOSITION NOR COULD IT CHARGE STORAGE BATTERIES ALL OF WHICH ARE EASILY WITHIN THE ABILITY OF THE DIRECT CURRENT +2300-131720-0011-1827: BUT WHEN IT CAME TO BE A QUESTION OF LIGHTING A SCATTERED SUBURB A GROUP OF DWELLINGS ON THE OUTSKIRTS A REMOTE COUNTRY RESIDENCE OR A FARM HOUSE THE ALTERNATING CURRENT IN ALL ELEMENTS SAVE ITS DANGER WAS AND IS IDEAL +2300-131720-0012-1828: EDISON WAS INTOLERANT OF SHAM AND (SHODDY->SHODY) AND NOTHING WOULD SATISFY HIM THAT COULD NOT STAND CROSS EXAMINATION BY MICROSCOPE TEST TUBE AND GALVANOMETER +2300-131720-0013-1829: UNLESS HE COULD SECURE AN ENGINE OF SMOOTHER RUNNING AND MORE EXACTLY (GOVERNED->GOVERN) AND REGULATED THAN THOSE AVAILABLE FOR HIS DYNAMO AND LAMP EDISON REALIZED THAT HE WOULD FIND IT ALMOST IMPOSSIBLE TO GIVE A STEADY LIGHT +2300-131720-0014-1830: MISTER EDISON WAS A LEADER FAR AHEAD OF THE TIME +2300-131720-0015-1831: HE OBTAINED THE DESIRED SPEED AND (LOAD->LOWED) WITH A FRICTION (BRAKE->BREAK) ALSO REGULATOR OF SPEED BUT WAITED FOR AN INDICATOR TO VERIFY IT +2300-131720-0016-1832: THEN AGAIN THERE WAS NO KNOWN WAY TO (LUBRICATE->LUBRICADE) AN ENGINE FOR CONTINUOUS RUNNING AND MISTER EDISON INFORMED ME THAT AS A MARINE ENGINE STARTED BEFORE THE SHIP LEFT NEW YORK AND CONTINUED RUNNING UNTIL IT REACHED ITS HOME PORT SO AN ENGINE FOR HIS PURPOSES MUST PRODUCE LIGHT AT ALL TIMES +2300-131720-0017-1833: EDISON HAD INSTALLED HIS HISTORIC FIRST GREAT CENTRAL STATION SYSTEM IN NEW YORK ON THE MULTIPLE ARC SYSTEM COVERED BY HIS FEEDER AND MAIN INVENTION WHICH RESULTED IN A NOTABLE SAVING IN THE COST OF CONDUCTORS AS AGAINST A STRAIGHT TWO WIRE SYSTEM THROUGHOUT OF THE TREE KIND +2300-131720-0018-1834: HE SOON FORESAW THAT STILL GREATER ECONOMY WOULD BE NECESSARY FOR COMMERCIAL SUCCESS NOT ALONE FOR THE LARGER TERRITORY OPENING BUT FOR THE COMPACT (DISTRICTS->DISTRICT) OF LARGE CITIES +2300-131720-0019-1835: THE STRONG POSITION HELD BY THE EDISON SYSTEM UNDER THE STRENUOUS COMPETITION THAT WAS ALREADY SPRINGING UP WAS ENORMOUSLY IMPROVED BY THE INTRODUCTION OF THE THREE WIRE SYSTEM AND IT GAVE AN IMMEDIATE IMPETUS TO INCANDESCENT LIGHTING +2300-131720-0020-1836: IT WAS SPECIALLY SUITED FOR A TRIAL PLANT ALSO IN THE EARLY DAYS WHEN A YIELD OF SIX OR EIGHT LAMPS TO THE HORSE (POWER->BOWER) WAS CONSIDERED SUBJECT FOR CONGRATULATION +2300-131720-0021-1837: THE STREET CONDUCTORS WERE OF THE OVERHEAD POLE LINE CONSTRUCTION AND WERE INSTALLED BY THE CONSTRUCTION COMPANY THAT HAD BEEN ORGANIZED BY EDISON TO BUILD (AND->AN) EQUIP CENTRAL STATIONS +2300-131720-0022-1838: MEANWHILE HE HAD CALLED UPON ME TO MAKE A REPORT OF THE THREE WIRE SYSTEM KNOWN IN ENGLAND AS THE HOPKINSON BOTH DOCTOR JOHN HOPKINSON AND MISTER EDISON BEING INDEPENDENT (INVENTORS->IN VENORS) AT PRACTICALLY THE SAME TIME +2300-131720-0023-1839: I THINK HE WAS PERHAPS MORE APPRECIATIVE THAN I WAS OF THE DISCIPLINE OF THE EDISON CONSTRUCTION DEPARTMENT AND THOUGHT IT WOULD BE WELL FOR US TO WAIT UNTIL THE MORNING OF THE FOURTH BEFORE WE STARTED UP +2300-131720-0024-1840: BUT THE PLANT RAN AND IT WAS THE FIRST THREE WIRE STATION IN THIS COUNTRY +2300-131720-0025-1841: THEY WERE LATER USED AS RESERVE MACHINES AND FINALLY WITH THE ENGINE RETIRED FROM SERVICE AS PART OF THE COLLECTION OF EDISONIA BUT THEY REMAIN (IN PRACTICALLY->IMPRACTICALLY) AS GOOD CONDITION AS (WHEN->ONE) INSTALLED IN EIGHTEEN EIGHTY THREE +2300-131720-0026-1842: THE (ARC->ARK) LAMP INSTALLED OUTSIDE A CUSTOMER'S PREMISES OR IN A CIRCUIT FOR PUBLIC STREET LIGHTING BURNED SO MANY HOURS NIGHTLY SO MANY NIGHTS IN THE MONTH AND WAS PAID FOR AT THAT RATE SUBJECT TO REBATE FOR HOURS WHEN THE LAMP MIGHT BE OUT THROUGH ACCIDENT +2300-131720-0027-1843: EDISON HELD THAT THE ELECTRICITY SOLD MUST BE MEASURED JUST LIKE GAS OR WATER AND HE PROCEEDED TO DEVELOP A METER +2300-131720-0028-1844: THERE WAS INFINITE SCEPTICISM AROUND HIM ON THE SUBJECT AND WHILE OTHER INVENTORS WERE ALSO GIVING THE SUBJECT THEIR THOUGHT THE PUBLIC TOOK IT FOR GRANTED THAT ANYTHING SO UTTERLY INTANGIBLE AS ELECTRICITY THAT COULD NOT BE SEEN OR WEIGHED AND ONLY GAVE SECONDARY EVIDENCE OF ITSELF AT THE EXACT POINT OF USE COULD NOT BE BROUGHT TO ACCURATE REGISTRATION +2300-131720-0029-1845: HENCE THE EDISON ELECTROLYTIC METER IS NO LONGER USED DESPITE ITS EXCELLENT QUALITIES +2300-131720-0030-1846: THE (PRINCIPLE->PRINCIPAL) EMPLOYED IN THE EDISON ELECTROLYTIC METER IS THAT WHICH EXEMPLIFIES THE POWER OF ELECTRICITY TO DECOMPOSE A CHEMICAL SUBSTANCE +2300-131720-0031-1847: ASSOCIATED WITH THIS SIMPLE FORM OF APPARATUS WERE VARIOUS INGENIOUS DETAILS AND REFINEMENTS TO SECURE REGULARITY OF OPERATION FREEDOM FROM INACCURACY AND IMMUNITY FROM SUCH TAMPERING AS WOULD PERMIT THEFT OF CURRENT OR DAMAGE +2300-131720-0032-1848: THE STANDARD EDISON METER PRACTICE WAS TO REMOVE THE CELLS ONCE A MONTH TO THE METER ROOM OF THE CENTRAL STATION COMPANY FOR EXAMINATION ANOTHER SET BEING SUBSTITUTED +2300-131720-0033-1849: IN DECEMBER EIGHTEEN EIGHTY EIGHT MISTER W J JENKS READ AN INTERESTING PAPER BEFORE THE AMERICAN INSTITUTE OF ELECTRICAL ENGINEERS ON THE SIX YEARS OF PRACTICAL EXPERIENCE HAD UP TO THAT TIME WITH THE (METER->METRE) THEN MORE GENERALLY IN USE THAN ANY OTHER +2300-131720-0034-1850: THE OTHERS HAVING BEEN IN OPERATION TOO SHORT A TIME TO SHOW DEFINITE RESULTS ALTHOUGH THEY ALSO WENT QUICKLY TO A DIVIDEND BASIS +2300-131720-0035-1851: IN THIS CONNECTION IT SHOULD BE MENTIONED THAT THE ASSOCIATION OF EDISON ILLUMINATING COMPANIES IN THE SAME YEAR ADOPTED RESOLUTIONS UNANIMOUSLY TO THE EFFECT THAT THE EDISON METER WAS ACCURATE AND THAT ITS USE WAS NOT EXPENSIVE FOR STATIONS ABOVE ONE THOUSAND LIGHTS AND THAT THE BEST FINANCIAL RESULTS WERE INVARIABLY SECURED IN A STATION SELLING CURRENT BY METER +2300-131720-0036-1852: THE (METER->METRE) CONTINUED IN GENERAL SERVICE DURING EIGHTEEN NINETY NINE AND PROBABLY UP TO THE CLOSE OF THE CENTURY +2300-131720-0037-1853: HE WEIGHED AND (REWEIGHED->REWAIED) THE (METER->METRE) PLATES AND PURSUED EVERY LINE OF INVESTIGATION IMAGINABLE BUT ALL IN VAIN +2300-131720-0038-1854: HE FELT HE WAS UP AGAINST IT AND THAT PERHAPS ANOTHER KIND OF A JOB WOULD SUIT HIM BETTER +2300-131720-0039-1855: THE PROBLEM WAS SOLVED +2300-131720-0040-1856: WE WERE MORE INTERESTED IN THE TECHNICAL CONDITION OF THE STATION THAN IN THE COMMERCIAL PART +2300-131720-0041-1857: WE HAD (METERS->METRES) IN WHICH THERE WERE TWO BOTTLES OF LIQUID +237-126133-0000-2407: HERE SHE WOULD STAY COMFORTED AND (SOOTHED->SOOTHE) AMONG THE LOVELY PLANTS AND RICH EXOTICS REJOICING THE HEART OF OLD TURNER THE GARDENER WHO SINCE POLLY'S FIRST RAPTUROUS ENTRANCE HAD TAKEN HER INTO HIS GOOD GRACES FOR ALL TIME +237-126133-0001-2408: EVERY CHANCE SHE COULD STEAL AFTER PRACTICE HOURS WERE OVER AND AFTER THE CLAMOROUS DEMANDS OF THE BOYS UPON HER TIME WERE FULLY SATISFIED WAS SEIZED TO FLY ON THE WINGS OF THE WIND TO THE FLOWERS +237-126133-0002-2409: THEN DEAR SAID MISSUS WHITNEY YOU MUST BE KINDER TO HER THAN EVER THINK WHAT IT WOULD BE FOR ONE OF YOU TO BE AWAY FROM HOME EVEN AMONG FRIENDS +237-126133-0003-2410: SOMEHOW OF ALL THE DAYS WHEN THE HOME FEELING WAS THE STRONGEST THIS DAY IT SEEMED AS IF SHE COULD BEAR IT NO LONGER +237-126133-0004-2411: IF SHE COULD ONLY SEE PHRONSIE FOR JUST ONE MOMENT +237-126133-0005-2412: OH SHE'S ALWAYS AT THE PIANO SAID VAN SHE MUST BE THERE NOW SOMEWHERE AND THEN SOMEBODY LAUGHED +237-126133-0006-2413: AT THIS THE BUNDLE OPENED SUDDENLY AND OUT POPPED PHRONSIE +237-126133-0007-2414: BUT POLLY COULDN'T SPEAK AND IF JASPER HADN'T CAUGHT HER JUST IN TIME SHE WOULD HAVE TUMBLED OVER BACKWARD FROM THE STOOL PHRONSIE AND ALL +237-126133-0008-2415: ASKED PHRONSIE WITH HER LITTLE FACE CLOSE TO POLLY'S OWN +237-126133-0009-2416: NOW YOU'LL STAY CRIED VAN SAY POLLY WON'T YOU +237-126133-0010-2417: OH YOU ARE THE DEAREST AND BEST MISTER KING I EVER SAW BUT HOW DID YOU MAKE MAMMY LET HER COME +237-126133-0011-2418: ISN'T HE SPLENDID CRIED JASPER (IN->AN) INTENSE PRIDE SWELLING UP FATHER KNEW HOW TO DO IT +237-126133-0012-2419: THERE THERE HE (SAID->SAT) SOOTHINGLY PATTING HER BROWN FUZZY HEAD +237-126133-0013-2420: I KNOW GASPED POLLY CONTROLLING HER SOBS I WON'T ONLY I CAN'T THANK YOU +237-126133-0014-2421: ASKED PHRONSIE IN INTENSE INTEREST SLIPPING DOWN OUT OF POLLY'S ARMS AND CROWDING UP CLOSE TO JASPER'S SIDE +237-126133-0015-2422: YES ALL ALONE BY HIMSELF ASSERTED JASPER VEHEMENTLY AND WINKING FURIOUSLY TO THE OTHERS TO STOP THEIR LAUGHING HE DID NOW TRULY PHRONSIE +237-126133-0016-2423: OH NO (JASPER->JAPSER) I MUST GO BY MY VERY OWN SELF +237-126133-0017-2424: THERE JAP YOU'VE CAUGHT IT LAUGHED PERCY WHILE THE OTHERS SCREAMED AT THE SIGHT OF JASPER'S FACE +237-126133-0018-2425: DON'T MIND IT POLLY WHISPERED JASPER TWASN'T HER FAULT +237-126133-0019-2426: DEAR ME EJACULATED THE OLD GENTLEMAN IN THE UTMOST AMAZEMENT AND SUCH A TIME AS I'VE HAD TO GET HER HERE TOO +237-126133-0020-2427: HOW DID HER MOTHER EVER LET HER GO +237-126133-0021-2428: SHE ASKED IMPULSIVELY I DIDN'T BELIEVE YOU COULD PERSUADE HER FATHER +237-126133-0022-2429: I DIDN'T HAVE ANY FEARS IF I WORKED IT RIGHTLY SAID THE OLD GENTLEMAN COMPLACENTLY +237-126133-0023-2430: HE CRIED IN HIGH DUDGEON JUST AS IF HE OWNED THE WHOLE OF THE PEPPERS AND COULD DISPOSE OF THEM ALL TO SUIT HIS FANCY +237-126133-0024-2431: AND THE OLD GENTLEMAN WAS SO DELIGHTED WITH HIS SUCCESS THAT HE HAD TO BURST OUT INTO A SERIES OF SHORT HAPPY BITS OF LAUGHTER THAT OCCUPIED QUITE A SPACE OF TIME +237-126133-0025-2432: AT LAST HE CAME OUT OF THEM AND WIPED HIS FACE VIGOROUSLY +237-134493-0000-2388: IT IS SIXTEEN YEARS SINCE JOHN (BERGSON->BERKES AND) DIED +237-134493-0001-2389: HIS WIFE NOW LIES BESIDE HIM AND THE WHITE SHAFT THAT MARKS THEIR GRAVES GLEAMS ACROSS THE WHEAT FIELDS +237-134493-0002-2390: FROM THE NORWEGIAN GRAVEYARD ONE LOOKS OUT OVER A VAST (CHECKER->CHEQUER) BOARD MARKED OFF IN SQUARES OF WHEAT AND CORN LIGHT AND DARK (DARK->*) AND LIGHT +237-134493-0003-2391: FROM THE GRAVEYARD GATE ONE CAN COUNT A DOZEN (GAYLY->GAILY) PAINTED FARMHOUSES THE GILDED WEATHER (VANES->VEINS) ON THE BIG RED BARNS WINK AT EACH OTHER ACROSS THE GREEN AND BROWN AND YELLOW FIELDS +237-134493-0004-2392: THE AIR AND THE EARTH ARE CURIOUSLY MATED AND INTERMINGLED AS IF THE ONE WERE THE BREATH OF THE OTHER +237-134493-0005-2393: HE WAS A SPLENDID FIGURE OF A BOY TALL AND STRAIGHT AS A YOUNG PINE TREE WITH A HANDSOME HEAD AND STORMY GRAY EYES DEEPLY SET UNDER A SERIOUS BROW +237-134493-0006-2394: THAT'S NOT MUCH OF A JOB FOR AN ATHLETE HERE I'VE BEEN TO TOWN AND BACK +237-134493-0007-2395: ALEXANDRA (LETS->THAT'S) YOU SLEEP LATE +237-134493-0008-2396: SHE GATHERED UP HER REINS +237-134493-0009-2397: PLEASE WAIT FOR ME MARIE (EMIL->AMYL) COAXED +237-134493-0010-2398: I NEVER SEE (LOU'S->LOOSE) SCYTHE OVER HERE +237-134493-0011-2399: HOW BROWN YOU'VE GOT SINCE YOU CAME HOME I WISH I HAD AN (ATHLETE->ADETE) TO MOW MY ORCHARD +237-134493-0012-2400: I GET WET TO MY KNEES WHEN I GO DOWN TO (PICK->PIC) CHERRIES +237-134493-0013-2401: INDEED HE HAD LOOKED AWAY WITH THE PURPOSE OF NOT SEEING IT +237-134493-0014-2402: THEY THINK (YOU'RE->YOU ARE) PROUD BECAUSE YOU'VE BEEN AWAY TO SCHOOL OR SOMETHING +237-134493-0015-2403: THERE WAS SOMETHING INDIVIDUAL ABOUT THE GREAT FARM A MOST UNUSUAL TRIMNESS AND CARE FOR DETAIL +237-134493-0016-2404: ON EITHER SIDE OF THE ROAD FOR A MILE BEFORE YOU REACHED THE FOOT OF THE HILL STOOD TALL (OSAGE->O SAGE) ORANGE HEDGES THEIR GLOSSY GREEN MARKING OFF THE YELLOW FIELDS +237-134493-0017-2405: ANY ONE THEREABOUTS WOULD HAVE TOLD YOU THAT THIS WAS ONE OF THE RICHEST FARMS ON THE DIVIDE AND THAT THE FARMER WAS A WOMAN ALEXANDRA (BERGSON->BERGIN) +237-134493-0018-2406: THERE IS EVEN A WHITE ROW OF BEEHIVES IN THE ORCHARD UNDER THE WALNUT TREES +237-134500-0000-2345: FRANK READ ENGLISH SLOWLY AND THE MORE HE READ ABOUT THIS DIVORCE CASE THE ANGRIER HE GREW +237-134500-0001-2346: MARIE SIGHED +237-134500-0002-2347: A (BRISK->BRACE) WIND HAD COME UP AND WAS DRIVING PUFFY WHITE CLOUDS ACROSS THE SKY +237-134500-0003-2348: THE (ORCHARD->ARCHER) WAS SPARKLING AND RIPPLING IN THE SUN +237-134500-0004-2349: THAT INVITATION DECIDED HER +237-134500-0005-2350: OH BUT (I'M->I AM) GLAD TO GET THIS PLACE MOWED +237-134500-0006-2351: JUST SMELL THE WILD ROSES THEY ARE ALWAYS SO SPICY AFTER A RAIN +237-134500-0007-2352: WE NEVER HAD SO MANY OF THEM IN HERE BEFORE +237-134500-0008-2353: I SUPPOSE IT'S THE WET SEASON WILL YOU HAVE TO CUT THEM TOO +237-134500-0009-2354: I SUPPOSE THAT'S THE WET SEASON TOO THEN +237-134500-0010-2355: IT'S EXCITING TO SEE EVERYTHING GROWING SO FAST AND TO GET THE GRASS CUT +237-134500-0011-2356: AREN'T YOU SPLASHED LOOK AT THE SPIDER WEBS ALL OVER THE GRASS +237-134500-0012-2357: IN A FEW MOMENTS HE HEARD THE CHERRIES DROPPING SMARTLY INTO THE PAIL AND HE BEGAN TO SWING HIS SCYTHE WITH THAT LONG EVEN STROKE THAT FEW AMERICAN BOYS EVER LEARN +237-134500-0013-2358: MARIE PICKED CHERRIES AND SANG SOFTLY TO HERSELF STRIPPING ONE GLITTERING (BRANCH->RANCH) AFTER ANOTHER SHIVERING WHEN SHE (CAUGHT->THOUGHT) A SHOWER OF RAINDROPS ON HER NECK AND HAIR +237-134500-0014-2359: AND (EMIL->AMYL) MOWED HIS WAY SLOWLY DOWN TOWARD THE CHERRY TREES +237-134500-0015-2360: THAT SUMMER THE RAINS HAD BEEN SO MANY AND OPPORTUNE THAT IT WAS ALMOST MORE THAN (SHABATA->SHEBATA) AND HIS MAN COULD DO TO KEEP UP WITH THE CORN THE ORCHARD WAS A NEGLECTED WILDERNESS +237-134500-0016-2361: I DON'T KNOW ALL OF THEM BUT I KNOW LINDENS ARE +237-134500-0017-2362: IF I FEEL THAT WAY I FEEL THAT WAY +237-134500-0018-2363: HE REACHED UP AMONG THE BRANCHES AND BEGAN TO PICK THE SWEET INSIPID FRUIT LONG IVORY COLORED BERRIES TIPPED WITH FAINT PINK LIKE WHITE CORAL THAT FALL TO THE GROUND UNHEEDED ALL SUMMER THROUGH +237-134500-0019-2364: HE DROPPED A HANDFUL INTO HER LAP +237-134500-0020-2365: YES DON'T YOU +237-134500-0021-2366: OH EVER SO MUCH ONLY HE SEEMS KIND OF (STAID AND->STAY AT IN) SCHOOL TEACHERY +237-134500-0022-2367: WHEN SHE USED TO TELL ME ABOUT HIM I ALWAYS WONDERED WHETHER SHE WASN'T A LITTLE IN LOVE WITH HIM +237-134500-0023-2368: IT WOULD SERVE YOU ALL RIGHT IF SHE WALKED OFF WITH (CARL->KARL) +237-134500-0024-2369: I LIKE TO TALK TO (CARL->KARL) ABOUT NEW YORK AND WHAT A FELLOW CAN DO THERE +237-134500-0025-2370: OH (EMIL->AMY ILL) +237-134500-0026-2371: SURELY YOU ARE NOT THINKING OF GOING OFF THERE +237-134500-0027-2372: MARIE'S FACE FELL UNDER HIS BROODING GAZE +237-134500-0028-2373: (I'M->I AM) SURE (ALEXANDRA HOPES->ALEXANDER HELPS) YOU WILL STAY ON HERE SHE MURMURED +237-134500-0029-2374: I DON'T WANT TO STAND AROUND AND LOOK ON +237-134500-0030-2375: I WANT TO BE DOING SOMETHING ON MY OWN ACCOUNT +237-134500-0031-2376: SOMETIMES I DON'T WANT TO DO ANYTHING AT ALL AND SOMETIMES I WANT TO PULL THE FOUR CORNERS OF THE DIVIDE TOGETHER HE THREW OUT HIS ARM AND BROUGHT IT BACK WITH A JERK SO LIKE A (TABLE CLOTH->TABLECLOTH) +237-134500-0032-2377: I GET TIRED OF SEEING (MEN->MAN) AND HORSES GOING UP AND DOWN UP AND DOWN +237-134500-0033-2378: I WISH YOU WEREN'T SO RESTLESS AND DIDN'T GET SO WORKED UP OVER THINGS SHE SAID SADLY +237-134500-0034-2379: THANK YOU HE RETURNED SHORTLY +237-134500-0035-2380: AND YOU NEVER USED TO BE CROSS TO ME +237-134500-0036-2381: I CAN'T PLAY WITH YOU LIKE A LITTLE BOY ANY MORE HE SAID SLOWLY THAT'S WHAT YOU MISS MARIE +237-134500-0037-2382: BUT (EMIL->AM ILL) IF I UNDERSTAND (THEN->IN) ALL OUR GOOD TIMES ARE OVER WE CAN NEVER DO NICE THINGS TOGETHER ANY MORE +237-134500-0038-2383: AND ANYHOW THERE'S NOTHING TO UNDERSTAND +237-134500-0039-2384: THAT WON'T LAST IT WILL GO AWAY AND THINGS WILL BE JUST AS THEY USED TO +237-134500-0040-2385: I PRAY FOR YOU BUT THAT'S NOT THE SAME AS IF YOU PRAYED YOURSELF +237-134500-0041-2386: I CAN'T PRAY TO HAVE THE THINGS I WANT HE SAID SLOWLY AND I WON'T PRAY NOT TO HAVE THEM NOT IF I'M DAMNED FOR IT +237-134500-0042-2387: THEN ALL OUR GOOD TIMES ARE OVER +260-123286-0000-200: SATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL ROUND NO LAND IN SIGHT +260-123286-0001-201: THE HORIZON SEEMS EXTREMELY DISTANT +260-123286-0002-202: ALL MY DANGER AND SUFFERINGS WERE NEEDED TO STRIKE A SPARK OF HUMAN FEELING OUT OF HIM BUT NOW THAT I AM WELL HIS NATURE HAS RESUMED ITS SWAY +260-123286-0003-203: YOU SEEM ANXIOUS MY UNCLE I SAID SEEING HIM CONTINUALLY WITH HIS GLASS TO HIS EYE ANXIOUS +260-123286-0004-204: ONE MIGHT BE WITH LESS REASON THAN NOW +260-123286-0005-205: I AM NOT COMPLAINING THAT THE RATE IS SLOW BUT THAT THE SEA IS SO WIDE +260-123286-0006-206: WE ARE LOSING TIME AND THE FACT IS I HAVE NOT COME ALL THIS WAY TO TAKE A LITTLE SAIL UPON A POND ON A RAFT +260-123286-0007-207: HE CALLED THIS SEA (A POND->UPON) AND OUR LONG VOYAGE TAKING A LITTLE SAIL +260-123286-0008-208: THEREFORE DON'T TALK TO ME ABOUT VIEWS AND PROSPECTS +260-123286-0009-209: I TAKE THIS (AS->IS) MY ANSWER AND I LEAVE THE PROFESSOR TO BITE HIS LIPS WITH IMPATIENCE +260-123286-0010-210: SUNDAY AUGUST SIXTEENTH +260-123286-0011-211: NOTHING NEW (WEATHER->WHETHER) UNCHANGED THE WIND FRESHENS +260-123286-0012-212: BUT THERE SEEMED NO REASON (TO->OF) FEAR +260-123286-0013-213: THE SHADOW OF THE RAFT WAS CLEARLY OUTLINED UPON THE SURFACE OF THE WAVES +260-123286-0014-214: TRULY (THIS->THE) SEA IS OF INFINITE WIDTH +260-123286-0015-215: IT MUST BE AS WIDE AS THE MEDITERRANEAN OR THE ATLANTIC AND WHY NOT +260-123286-0016-216: THESE THOUGHTS AGITATED ME ALL DAY AND MY IMAGINATION SCARCELY CALMED DOWN AFTER SEVERAL HOURS SLEEP +260-123286-0017-217: I SHUDDER AS I RECALL THESE MONSTERS TO MY REMEMBRANCE +260-123286-0018-218: I SAW AT THE HAMBURG MUSEUM THE SKELETON OF ONE OF THESE CREATURES THIRTY FEET IN LENGTH +260-123286-0019-219: I SUPPOSE PROFESSOR LIEDENBROCK WAS OF MY OPINION TOO AND EVEN SHARED MY FEARS FOR AFTER HAVING EXAMINED THE (PICK->PIG) HIS EYES TRAVERSED THE OCEAN FROM SIDE TO SIDE +260-123286-0020-220: TUESDAY AUGUST EIGHTEENTH +260-123286-0021-221: DURING HIS WATCH I SLEPT +260-123286-0022-222: TWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE ME +260-123286-0023-223: THE RAFT WAS HEAVED UP ON A WATERY MOUNTAIN AND PITCHED DOWN AGAIN AT A DISTANCE OF TWENTY FATHOMS +260-123286-0024-224: THERE'S A (WHALE->WAIL) A (WHALE->WELL) CRIED THE PROFESSOR +260-123286-0025-225: (FLIGHT->FIGHT) WAS OUT OF THE QUESTION NOW THE REPTILES ROSE THEY WHEELED AROUND OUR LITTLE RAFT WITH A RAPIDITY GREATER THAN THAT OF EXPRESS TRAINS +260-123286-0026-226: TWO MONSTERS (ONLY->OMER) WERE CREATING ALL THIS COMMOTION AND BEFORE MY EYES (ARE->OUR) TWO REPTILES OF THE PRIMITIVE WORLD +260-123286-0027-227: I CAN DISTINGUISH THE EYE OF THE (ICHTHYOSAURUS->ITHUSORIS) GLOWING LIKE A RED HOT (COAL->CO) AND AS LARGE AS A MAN'S HEAD +260-123286-0028-228: ITS JAW IS ENORMOUS AND ACCORDING TO NATURALISTS IT IS ARMED WITH NO LESS THAN ONE HUNDRED AND EIGHTY TWO TEETH +260-123286-0029-229: THOSE HUGE CREATURES ATTACKED EACH OTHER WITH THE GREATEST ANIMOSITY +260-123286-0030-230: SUDDENLY THE (ICHTHYOSAURUS->IDEAS) AND THE PLESIOSAURUS DISAPPEAR BELOW LEAVING A (WHIRLPOOL->WAR POOL) EDDYING IN THE WATER +260-123286-0031-231: AS FOR THE (ICHTHYOSAURUS->ITHUSORIS) HAS HE RETURNED (TO->*) HIS SUBMARINE CAVERN +260-123288-0000-232: THE ROARINGS BECOME LOST IN THE DISTANCE +260-123288-0001-233: THE WEATHER IF WE MAY USE (THAT->THE) TERM WILL CHANGE BEFORE LONG +260-123288-0002-234: THE ATMOSPHERE IS CHARGED WITH (VAPOURS->VAPORS) PERVADED WITH THE ELECTRICITY GENERATED BY THE EVAPORATION OF (SALINE->SAILING) WATERS +260-123288-0003-235: THE ELECTRIC LIGHT CAN SCARCELY PENETRATE (THROUGH->TO) THE DENSE CURTAIN WHICH (HAS->IS) DROPPED OVER THE THEATRE ON WHICH THE BATTLE OF THE ELEMENTS IS ABOUT TO BE WAGED +260-123288-0004-236: THE AIR IS HEAVY THE SEA IS CALM +260-123288-0005-237: FROM TIME TO TIME A FLEECY TUFT OF (MIST->MISTS) WITH YET SOME GLEAMING LIGHT LEFT UPON IT DROPS DOWN UPON THE DENSE FLOOR OF (GREY->GRAY) AND LOSES ITSELF IN THE OPAQUE AND IMPENETRABLE MASS +260-123288-0006-238: THE ATMOSPHERE (IS->AS) EVIDENTLY CHARGED (AND->IN) SURCHARGED WITH ELECTRICITY +260-123288-0007-239: THE WIND NEVER (LULLS->LOLLS) BUT TO ACQUIRE INCREASED STRENGTH THE VAST BANK OF HEAVY CLOUDS IS A HUGE RESERVOIR OF FEARFUL WINDY GUSTS AND RUSHING STORMS +260-123288-0008-240: THERE'S A HEAVY STORM COMING ON I CRIED POINTING TOWARDS THE HORIZON +260-123288-0009-241: THOSE CLOUDS SEEM AS IF THEY WERE GOING TO CRUSH THE SEA +260-123288-0010-242: ON THE MAST ALREADY I SEE THE LIGHT PLAY OF A (LAMBENT->LAMENT) SAINT (ELMO'S->ABLE'S) FIRE THE OUTSTRETCHED SAIL CATCHES NOT A BREATH OF WIND AND HANGS LIKE A SHEET OF LEAD +260-123288-0011-243: BUT IF WE HAVE NOW CEASED TO ADVANCE WHY DO WE YET LEAVE THAT SAIL LOOSE WHICH AT THE FIRST SHOCK OF (THE->A) TEMPEST MAY CAPSIZE US IN A MOMENT +260-123288-0012-244: THAT WILL BE (*->THE) SAFEST NO NO NEVER +260-123288-0013-245: (THE->THEY) PILED UP (VAPOURS CONDENSE->VAPORS CONTENSED) INTO WATER AND THE AIR PUT INTO VIOLENT ACTION TO SUPPLY THE VACUUM LEFT BY THE CONDENSATION OF THE (MISTS->MIST) ROUSES ITSELF INTO A WHIRLWIND +260-123288-0014-246: HANS STIRS NOT +260-123288-0015-247: FROM THE UNDER SURFACE OF THE CLOUDS THERE ARE CONTINUAL (EMISSIONS->OMISSIONS) OF LURID LIGHT ELECTRIC MATTER IS IN CONTINUAL EVOLUTION FROM THEIR COMPONENT MOLECULES THE GASEOUS ELEMENTS OF THE AIR NEED TO BE SLAKED WITH MOISTURE FOR INNUMERABLE COLUMNS OF WATER RUSH UPWARDS INTO THE AIR AND FALL BACK AGAIN IN WHITE FOAM +260-123288-0016-248: I REFER TO THE THERMOMETER IT INDICATES THE FIGURE IS OBLITERATED +260-123288-0017-249: IS THE (ATMOSPHERIC->ATMOSPHERE) CONDITION HAVING ONCE REACHED (THIS->ITS) DENSITY TO BECOME FINAL +260-123288-0018-250: THE RAFT BEARS ON STILL TO THE SOUTH EAST +260-123288-0019-251: AT NOON THE VIOLENCE OF THE STORM REDOUBLES +260-123288-0020-252: EACH OF US IS LASHED TO SOME PART OF THE RAFT +260-123288-0021-253: THE WAVES RISE ABOVE OUR HEADS +260-123288-0022-254: THEY (SEEM->SEEMED) TO BE WE ARE LOST BUT I AM NOT SURE +260-123288-0023-255: HE NODS HIS CONSENT +260-123288-0024-256: THE (FIREBALL->FIRE BALL) HALF OF IT WHITE HALF AZURE BLUE AND THE SIZE OF A TEN INCH SHELL MOVED SLOWLY ABOUT THE RAFT BUT REVOLVING ON ITS OWN AXIS WITH ASTONISHING VELOCITY AS IF (WHIPPED->WHIP) ROUND BY THE FORCE OF THE WHIRLWIND +260-123288-0025-257: HERE IT COMES THERE IT GLIDES NOW IT IS UP THE RAGGED STUMP OF THE MAST THENCE IT LIGHTLY LEAPS ON THE PROVISION BAG DESCENDS WITH A LIGHT BOUND AND JUST SKIMS THE POWDER MAGAZINE HORRIBLE +260-123288-0026-258: WE SHALL BE BLOWN UP BUT NO THE DAZZLING DISK OF MYSTERIOUS LIGHT NIMBLY LEAPS ASIDE IT APPROACHES HANS WHO FIXES HIS BLUE EYE UPON IT STEADILY IT THREATENS THE HEAD OF MY UNCLE WHO FALLS UPON HIS KNEES WITH HIS HEAD DOWN TO AVOID IT +260-123288-0027-259: A SUFFOCATING SMELL OF NITROGEN FILLS THE AIR IT ENTERS THE THROAT IT FILLS THE LUNGS +260-123288-0028-260: WE SUFFER STIFLING PAINS +260-123440-0000-179: AND HOW ODD THE DIRECTIONS WILL LOOK +260-123440-0001-180: POOR ALICE +260-123440-0002-181: IT WAS THE WHITE RABBIT RETURNING SPLENDIDLY DRESSED WITH A PAIR OF WHITE KID GLOVES IN ONE HAND AND A LARGE FAN IN THE OTHER HE CAME TROTTING ALONG IN A GREAT HURRY MUTTERING TO HIMSELF AS HE CAME OH THE DUCHESS THE DUCHESS +260-123440-0003-182: OH WON'T SHE BE SAVAGE IF I'VE KEPT HER WAITING +260-123440-0004-183: ALICE TOOK UP THE FAN AND GLOVES AND AS THE HALL WAS VERY HOT SHE KEPT FANNING HERSELF ALL THE TIME SHE WENT ON TALKING DEAR DEAR HOW QUEER EVERYTHING IS TO DAY +260-123440-0005-184: AND YESTERDAY THINGS WENT ON JUST AS USUAL +260-123440-0006-185: I WONDER IF I'VE BEEN CHANGED IN THE NIGHT +260-123440-0007-186: I ALMOST THINK I CAN REMEMBER FEELING A LITTLE DIFFERENT +260-123440-0008-187: I'LL TRY IF I KNOW ALL THE THINGS I USED TO KNOW +260-123440-0009-188: I SHALL NEVER GET TO TWENTY AT THAT RATE +260-123440-0010-189: HOW CHEERFULLY HE SEEMS TO GRIN HOW NEATLY SPREAD HIS CLAWS AND WELCOME LITTLE FISHES IN WITH GENTLY SMILING JAWS +260-123440-0011-190: NO I'VE MADE UP MY MIND ABOUT IT IF I'M MABEL I'LL STAY DOWN HERE +260-123440-0012-191: IT'LL BE NO USE THEIR PUTTING THEIR HEADS DOWN AND SAYING COME UP AGAIN DEAR +260-123440-0013-192: I AM SO VERY TIRED OF BEING ALL ALONE HERE +260-123440-0014-193: AND I DECLARE IT'S TOO BAD THAT IT IS +260-123440-0015-194: I WISH I HADN'T CRIED SO MUCH SAID ALICE AS SHE SWAM ABOUT TRYING TO FIND HER WAY OUT +260-123440-0016-195: I SHALL BE PUNISHED FOR IT NOW I SUPPOSE BY BEING DROWNED IN MY OWN TEARS +260-123440-0017-196: THAT WILL BE A QUEER THING TO BE SURE +260-123440-0018-197: I AM VERY TIRED OF SWIMMING ABOUT HERE (O->OH) MOUSE +260-123440-0019-198: CRIED ALICE AGAIN FOR THIS TIME THE MOUSE WAS BRISTLING ALL OVER AND SHE FELT CERTAIN IT MUST BE REALLY OFFENDED +260-123440-0020-199: WE WON'T TALK ABOUT HER ANY MORE IF YOU'D RATHER NOT WE INDEED +2830-3979-0000-1120: WE WANT YOU TO HELP US PUBLISH SOME LEADING WORK OF LUTHER'S FOR THE GENERAL AMERICAN MARKET WILL YOU DO IT +2830-3979-0001-1121: THE CONDITION IS THAT I WILL BE PERMITTED TO MAKE LUTHER TALK AMERICAN (STREAMLINE HIM->STREAM LINE HYMN) SO TO SPEAK BECAUSE YOU WILL NEVER GET PEOPLE WHETHER IN OR OUTSIDE THE LUTHERAN CHURCH ACTUALLY TO READ LUTHER UNLESS WE MAKE HIM TALK AS HE WOULD TALK (TODAY->TO DAY) TO AMERICANS +2830-3979-0002-1122: LET US BEGIN WITH THAT HIS COMMENTARY ON (GALATIANS->GALLATIONS) +2830-3979-0003-1123: THE UNDERTAKING WHICH (SEEMED->SEEMS) SO ATTRACTIVE WHEN VIEWED AS A LITERARY TASK PROVED A MOST DIFFICULT ONE AND AT TIMES BECAME OPPRESSIVE +2830-3979-0004-1124: IT WAS WRITTEN IN LATIN +2830-3979-0005-1125: THE WORK HAD TO BE CONDENSED +2830-3979-0006-1126: A WORD SHOULD NOW BE SAID ABOUT THE ORIGIN OF LUTHER'S COMMENTARY (ON GALATIANS->ANGULATIONS) +2830-3979-0007-1127: MUCH LATER WHEN A FRIEND OF HIS WAS PREPARING AN (EDITION->ADDITION) OF ALL HIS LATIN WORKS HE REMARKED TO HIS HOME CIRCLE IF I HAD MY WAY ABOUT IT THEY WOULD REPUBLISH ONLY THOSE OF MY BOOKS WHICH HAVE DOCTRINE MY (GALATIANS->GALLATIONS) FOR INSTANCE +2830-3979-0008-1128: IN OTHER WORDS THESE THREE MEN TOOK DOWN THE LECTURES WHICH LUTHER ADDRESSED TO HIS STUDENTS IN THE COURSE OF (GALATIANS->GALLATIONS) AND (ROERER->ROAR) PREPARED THE MANUSCRIPT FOR THE PRINTER +2830-3979-0009-1129: IT PRESENTS LIKE NO OTHER OF LUTHER'S WRITINGS THE CENTRAL THOUGHT OF CHRISTIANITY THE JUSTIFICATION OF THE SINNER FOR THE SAKE OF CHRIST'S MERITS ALONE +2830-3979-0010-1130: BUT THE ESSENCE OF LUTHER'S LECTURES IS THERE +2830-3979-0011-1131: THE LORD WHO HAS GIVEN US POWER TO TEACH AND TO HEAR LET HIM ALSO GIVE US THE POWER TO SERVE AND TO DO LUKE TWO +2830-3979-0012-1132: THE WORD OF OUR GOD SHALL STAND (FOREVER->FOR EVER) +2830-3980-0000-1043: IN EVERY WAY THEY SOUGHT TO UNDERMINE THE AUTHORITY OF SAINT PAUL +2830-3980-0001-1044: THEY SAID TO THE GALATIANS YOU HAVE NO RIGHT TO THINK HIGHLY OF PAUL +2830-3980-0002-1045: HE WAS THE LAST TO TURN TO CHRIST +2830-3980-0003-1046: PAUL CAME LATER (AND IS->IN HIS) BENEATH US +2830-3980-0004-1047: INDEED HE PERSECUTED THE CHURCH OF CHRIST FOR A LONG TIME +2830-3980-0005-1048: DO YOU SUPPOSE THAT GOD FOR THE SAKE OF A FEW LUTHERAN HERETICS WOULD DISOWN HIS ENTIRE CHURCH +2830-3980-0006-1049: AGAINST THESE BOASTING FALSE APOSTLES PAUL BOLDLY DEFENDS HIS APOSTOLIC AUTHORITY AND MINISTRY +2830-3980-0007-1050: AS THE AMBASSADOR OF A GOVERNMENT IS HONORED FOR HIS OFFICE AND NOT FOR HIS PRIVATE PERSON SO THE MINISTER OF CHRIST SHOULD EXALT HIS OFFICE IN ORDER TO GAIN AUTHORITY AMONG MEN +2830-3980-0008-1051: (PAUL TAKES->POLITICS) PRIDE IN HIS MINISTRY NOT TO HIS OWN PRAISE BUT TO THE PRAISE OF GOD +2830-3980-0009-1052: PAUL (AN->AND) APOSTLE NOT OF MEN ET CETERA +2830-3980-0010-1053: EITHER HE CALLS MINISTERS THROUGH THE AGENCY OF MEN OR HE CALLS THEM DIRECTLY AS HE CALLED THE PROPHETS AND APOSTLES +2830-3980-0011-1054: PAUL DECLARES THAT THE FALSE APOSTLES WERE CALLED OR SENT NEITHER BY MEN NOR BY MAN +2830-3980-0012-1055: THE MOST THEY COULD CLAIM IS THAT THEY WERE SENT BY OTHERS +2830-3980-0013-1056: HE MENTIONS THE APOSTLES FIRST BECAUSE THEY WERE APPOINTED DIRECTLY BY GOD +2830-3980-0014-1057: THE CALL IS NOT TO BE TAKEN LIGHTLY +2830-3980-0015-1058: FOR A PERSON TO (POSSESS->POSSESSED) KNOWLEDGE IS NOT ENOUGH +2830-3980-0016-1059: IT SPOILS ONE'S BEST WORK +2830-3980-0017-1060: WHEN I WAS A YOUNG MAN I THOUGHT PAUL WAS MAKING TOO MUCH OF HIS CALL +2830-3980-0018-1061: I DID NOT THEN REALIZE THE IMPORTANCE OF THE MINISTRY +2830-3980-0019-1062: I KNEW NOTHING OF THE DOCTRINE OF FAITH BECAUSE WE WERE TAUGHT SOPHISTRY INSTEAD OF CERTAINTY AND NOBODY UNDERSTOOD SPIRITUAL BOASTING +2830-3980-0020-1063: THIS IS NO SINFUL PRIDE IT IS (HOLY->WHOLLY) PRIDE +2830-3980-0021-1064: AND GOD THE FATHER WHO RAISED HIM FROM THE DEAD +2830-3980-0022-1065: THE (CLAUSE->CLAS) SEEMS SUPERFLUOUS ON FIRST SIGHT +2830-3980-0023-1066: THESE (PERVERTERS->PERVERTED) OF THE RIGHTEOUSNESS OF CHRIST RESIST THE FATHER AND THE SON AND THE WORKS OF THEM BOTH +2830-3980-0024-1067: IN THIS WHOLE EPISTLE PAUL TREATS OF THE RESURRECTION OF CHRIST +2830-3980-0025-1068: BY HIS RESURRECTION CHRIST WON THE VICTORY OVER LAW SIN FLESH WORLD DEVIL DEATH HELL AND EVERY EVIL +2830-3980-0026-1069: (VERSE TWO->FIRST TOO) +2830-3980-0027-1070: AND ALL THE BRETHREN WHICH ARE WITH ME +2830-3980-0028-1071: THIS SHOULD GO FAR IN SHUTTING THE MOUTHS OF THE FALSE APOSTLES +2830-3980-0029-1072: ALTHOUGH THE BRETHREN WITH ME ARE NOT APOSTLES LIKE MYSELF YET THEY ARE ALL OF ONE MIND WITH ME THINK WRITE AND TEACH AS I DO +2830-3980-0030-1073: THEY DO NOT GO WHERE THE ENEMIES OF THE GOSPEL PREDOMINATE THEY GO WHERE THE CHRISTIANS ARE +2830-3980-0031-1074: WHY DO THEY NOT INVADE THE CATHOLIC PROVINCES AND PREACH THEIR DOCTRINE TO GODLESS PRINCES BISHOPS AND DOCTORS AS WE HAVE DONE BY THE HELP OF GOD +2830-3980-0032-1075: WE LOOK FOR THAT REWARD WHICH (EYE->I) HATH NOT SEEN NOR EAR HEARD NEITHER HATH ENTERED INTO THE HEART OF MAN +2830-3980-0033-1076: NOT ALL THE (GALATIANS->GALLATIONS) HAD BECOME PERVERTED +2830-3980-0034-1077: THESE MEANS CANNOT BE CONTAMINATED +2830-3980-0035-1078: THEY (REMAIN->REMAINED) DIVINE REGARDLESS OF MEN'S OPINION +2830-3980-0036-1079: WHEREVER THE MEANS OF GRACE ARE FOUND THERE IS THE HOLY CHURCH EVEN THOUGH ANTICHRIST REIGNS THERE +2830-3980-0037-1080: SO MUCH FOR THE TITLE OF THE EPISTLE NOW FOLLOWS THE GREETING OF THE APOSTLE VERSE THREE +2830-3980-0038-1081: GRACE BE TO YOU (AND->IN) PEACE FROM GOD THE FATHER AND FROM OUR LORD JESUS CHRIST +2830-3980-0039-1082: THE TERMS OF GRACE AND PEACE ARE COMMON TERMS WITH PAUL AND ARE NOW PRETTY WELL UNDERSTOOD +2830-3980-0040-1083: THE GREETING OF THE APOSTLE IS REFRESHING +2830-3980-0041-1084: GRACE INVOLVES THE REMISSION OF SINS PEACE AND A HAPPY CONSCIENCE +2830-3980-0042-1085: THE WORLD (BRANDS->BRINGS) THIS A PERNICIOUS DOCTRINE +2830-3980-0043-1086: EXPERIENCE PROVES THIS +2830-3980-0044-1087: HOWEVER THE GRACE AND PEACE OF GOD WILL +2830-3980-0045-1088: MEN SHOULD NOT SPECULATE ABOUT THE NATURE OF GOD +2830-3980-0046-1089: WAS IT NOT ENOUGH TO SAY FROM GOD THE FATHER +2830-3980-0047-1090: TO DO SO IS TO LOSE GOD ALTOGETHER BECAUSE GOD BECOMES INTOLERABLE WHEN WE SEEK TO MEASURE (AND TO->INTO) COMPREHEND HIS INFINITE MAJESTY +2830-3980-0048-1091: HE CAME DOWN TO EARTH LIVED AMONG MEN SUFFERED WAS CRUCIFIED AND THEN HE DIED STANDING CLEARLY BEFORE US SO THAT OUR HEARTS AND EYES MAY FASTEN UPON HIM +2830-3980-0049-1092: EMBRACE HIM AND FORGET ABOUT THE NATURE OF GOD +2830-3980-0050-1093: DID NOT CHRIST HIMSELF SAY I AM THE WAY AND THE TRUTH AND THE LIFE NO MAN COMETH UNTO THE FATHER BUT BY ME +2830-3980-0051-1094: WHEN YOU ARGUE ABOUT THE NATURE OF GOD APART FROM THE QUESTION OF JUSTIFICATION YOU MAY BE AS PROFOUND AS YOU LIKE +2830-3980-0052-1095: WE ARE TO HEAR CHRIST WHO HAS BEEN APPOINTED BY THE FATHER AS OUR DIVINE TEACHER +2830-3980-0053-1096: AT THE SAME TIME PAUL CONFIRMS OUR CREED THAT CHRIST IS VERY GOD +2830-3980-0054-1097: THAT CHRIST IS VERY GOD IS APPARENT IN THAT PAUL ASCRIBES TO HIM DIVINE POWERS EQUALLY WITH THE FATHER AS FOR INSTANCE THE POWER (TO->DOES) DISPENSE GRACE AND PEACE +2830-3980-0055-1098: TO BESTOW PEACE AND GRACE LIES IN THE PROVINCE OF GOD WHO ALONE CAN CREATE THESE BLESSINGS THE ANGELS CANNOT +2830-3980-0056-1099: OTHERWISE PAUL SHOULD HAVE WRITTEN GRACE FROM GOD THE FATHER AND PEACE FROM OUR LORD JESUS CHRIST +2830-3980-0057-1100: THE ARIANS TOOK CHRIST FOR A NOBLE AND PERFECT CREATURE SUPERIOR EVEN TO THE ANGELS BECAUSE BY HIM GOD CREATED HEAVEN AND EARTH +2830-3980-0058-1101: MOHAMMED ALSO SPEAKS HIGHLY OF CHRIST +2830-3980-0059-1102: PAUL STICKS TO HIS THEME +2830-3980-0060-1103: HE NEVER LOSES SIGHT OF THE PURPOSE OF HIS EPISTLE +2830-3980-0061-1104: NOT GOLD OR SILVER OR (PASCHAL->PASSION) LAMBS OR AN ANGEL BUT HIMSELF WHAT FOR +2830-3980-0062-1105: NOT FOR A CROWN OR A KINGDOM OR (OUR->A) GOODNESS BUT FOR OUR SINS +2830-3980-0063-1106: UNDERSCORE THESE WORDS FOR THEY ARE FULL OF COMFORT FOR SORE CONSCIENCES +2830-3980-0064-1107: HOW MAY WE OBTAIN REMISSION OF OUR SINS +2830-3980-0065-1108: PAUL ANSWERS THE MAN WHO IS NAMED JESUS CHRIST AND THE SON OF GOD GAVE HIMSELF FOR OUR SINS +2830-3980-0066-1109: SINCE CHRIST WAS GIVEN FOR OUR SINS IT STANDS TO REASON THAT THEY CANNOT BE PUT AWAY BY OUR OWN EFFORTS +2830-3980-0067-1110: THIS SENTENCE ALSO DEFINES OUR SINS AS GREAT SO GREAT IN FACT THAT THE WHOLE WORLD COULD NOT MAKE AMENDS FOR A SINGLE SIN +2830-3980-0068-1111: THE GREATNESS OF THE RANSOM CHRIST THE SON OF GOD INDICATES THIS +2830-3980-0069-1112: THE VICIOUS CHARACTER OF SIN IS BROUGHT OUT BY THE WORDS WHO GAVE HIMSELF FOR OUR SINS +2830-3980-0070-1113: BUT WE ARE CARELESS WE MAKE LIGHT OF SIN +2830-3980-0071-1114: WE THINK THAT BY SOME LITTLE WORK OR MERIT WE CAN DISMISS (SIN->IN) +2830-3980-0072-1115: THIS PASSAGE THEN BEARS OUT THE FACT THAT ALL MEN ARE SOLD UNDER SIN +2830-3980-0073-1116: THIS ATTITUDE SPRINGS FROM A FALSE CONCEPTION OF SIN THE CONCEPTION THAT SIN IS A SMALL MATTER EASILY (TAKEN->TAKING) CARE OF BY GOOD WORKS THAT WE MUST PRESENT OURSELVES (UNTO->INTO) GOD WITH (A->*) GOOD CONSCIENCE THAT WE MUST FEEL NO SIN BEFORE WE MAY FEEL THAT CHRIST WAS GIVEN FOR OUR SINS +2830-3980-0074-1117: (THIS ATTITUDE->THE SATITUDE) IS UNIVERSAL (AND->IN) PARTICULARLY DEVELOPED IN THOSE WHO CONSIDER THEMSELVES BETTER THAN OTHERS +2830-3980-0075-1118: BUT THE REAL SIGNIFICANCE AND COMFORT OF THE WORDS FOR OUR SINS IS LOST UPON THEM +2830-3980-0076-1119: ON THE OTHER HAND WE ARE NOT TO REGARD THEM AS SO TERRIBLE THAT WE MUST DESPAIR +2961-960-0000-497: HE PASSES ABRUPTLY FROM PERSONS TO IDEAS AND NUMBERS AND FROM IDEAS AND NUMBERS TO PERSONS FROM THE HEAVENS TO MAN FROM ASTRONOMY TO PHYSIOLOGY HE CONFUSES OR RATHER DOES NOT DISTINGUISH SUBJECT AND OBJECT FIRST AND FINAL CAUSES AND IS DREAMING OF GEOMETRICAL FIGURES LOST IN A FLUX OF SENSE +2961-960-0001-498: THE INFLUENCE (WITH->WHICH) THE TIMAEUS HAS EXERCISED UPON POSTERITY IS DUE PARTLY TO A MISUNDERSTANDING +2961-960-0002-499: IN THE SUPPOSED DEPTHS OF THIS DIALOGUE THE NEO (PLATONISTS->PLATINISTS) FOUND HIDDEN MEANINGS (AND->IN) CONNECTIONS WITH THE JEWISH AND CHRISTIAN SCRIPTURES AND OUT OF THEM THEY ELICITED DOCTRINES QUITE AT VARIANCE WITH THE SPIRIT OF PLATO +2961-960-0003-500: THEY WERE ABSORBED IN HIS THEOLOGY AND WERE UNDER THE DOMINION OF HIS NAME WHILE THAT WHICH WAS TRULY GREAT AND TRULY (CHARACTERISTIC->CORRECTORISTIC) IN HIM HIS EFFORT TO REALIZE AND CONNECT ABSTRACTIONS WAS NOT UNDERSTOOD BY THEM AT ALL +2961-960-0004-501: THERE IS NO DANGER OF THE MODERN (COMMENTATORS->COMMON TEACHERS) ON THE TIMAEUS FALLING INTO THE ABSURDITIES OF THE (NEO PLATONISTS->NEW PLATANISTS) +2961-960-0005-502: IN THE PRESENT DAY WE ARE WELL AWARE THAT AN ANCIENT PHILOSOPHER IS TO BE INTERPRETED FROM HIMSELF AND BY THE CONTEMPORARY HISTORY OF THOUGHT +2961-960-0006-503: THE FANCIES OF THE (NEO PLATONISTS->NEW PLATANISTS) ARE ONLY INTERESTING TO US BECAUSE THEY EXHIBIT A PHASE OF THE HUMAN MIND WHICH PREVAILED WIDELY IN THE FIRST CENTURIES OF THE CHRISTIAN ERA AND IS NOT WHOLLY EXTINCT IN OUR OWN DAY +2961-960-0007-504: BUT THEY HAVE NOTHING TO DO WITH THE INTERPRETATION OF PLATO AND IN SPIRIT THEY ARE OPPOSED TO HIM +2961-960-0008-505: WE DO NOT KNOW HOW PLATO WOULD HAVE ARRANGED HIS OWN (DIALOGUES->DIALECTS) OR WHETHER THE THOUGHT OF ARRANGING ANY OF THEM BESIDES THE (TWO TRILOGIES->TUTRILOGIES) WHICH HE HAS EXPRESSLY CONNECTED WAS EVER PRESENT TO HIS MIND +2961-960-0009-506: THE DIALOGUE IS PRIMARILY CONCERNED WITH THE ANIMAL CREATION INCLUDING UNDER THIS TERM THE HEAVENLY BODIES AND WITH MAN ONLY AS ONE AMONG THE ANIMALS +2961-960-0010-507: BUT HE HAS NOT AS YET (DEFINED->THE FIND) THIS INTERMEDIATE TERRITORY WHICH LIES SOMEWHERE BETWEEN MEDICINE AND MATHEMATICS AND HE WOULD HAVE FELT THAT THERE WAS AS GREAT AN IMPIETY IN RANKING THEORIES OF PHYSICS FIRST IN THE ORDER OF KNOWLEDGE AS IN PLACING THE BODY BEFORE THE SOUL +2961-960-0011-508: WITH (HERACLEITUS->HERACLITUS) HE ACKNOWLEDGES THE PERPETUAL FLUX LIKE (ANAXAGORAS->AN EXAGGERUS) HE ASSERTS THE PREDOMINANCE OF MIND ALTHOUGH ADMITTING AN ELEMENT OF NECESSITY WHICH REASON IS INCAPABLE OF SUBDUING LIKE THE (PYTHAGOREANS->PYTHAGORIANS) HE SUPPOSES THE MYSTERY OF THE WORLD TO BE CONTAINED IN NUMBER +2961-960-0012-509: MANY IF NOT ALL THE ELEMENTS OF THE (PRE SOCRATIC->PRIESTHOO CRADIC) PHILOSOPHY ARE INCLUDED IN THE (TIMAEUS->TIMEUS) +2961-960-0013-510: IT IS PROBABLE THAT THE RELATION OF THE IDEAS TO GOD OR OF GOD TO THE WORLD WAS DIFFERENTLY CONCEIVED BY HIM AT DIFFERENT TIMES OF HIS LIFE +2961-960-0014-511: THE IDEAS ALSO REMAIN BUT THEY HAVE BECOME TYPES IN NATURE FORMS OF MEN ANIMALS BIRDS FISHES +2961-960-0015-512: THE STYLE AND PLAN OF THE (TIMAEUS->TENEAS) DIFFER GREATLY FROM THAT OF ANY OTHER OF THE PLATONIC DIALOGUES +2961-960-0016-513: BUT PLATO HAS NOT THE SAME MASTERY OVER HIS INSTRUMENT WHICH HE EXHIBITS IN THE (PHAEDRUS->FEATURES) OR (SYMPOSIUM->SIMPOS HIM) +2961-960-0017-514: NOTHING CAN EXCEED THE BEAUTY OR ART OF (THE->*) INTRODUCTION IN WHICH (HE IS->HIS) USING WORDS AFTER HIS ACCUSTOMED MANNER +2961-960-0018-515: BUT IN THE REST OF THE WORK THE POWER OF LANGUAGE SEEMS TO FAIL HIM AND THE DRAMATIC FORM IS WHOLLY GIVEN UP +2961-960-0019-516: HE COULD WRITE IN (ONE->ONE'S) STYLE BUT NOT IN ANOTHER (AND->*) THE GREEK LANGUAGE HAD NOT AS YET BEEN FASHIONED BY ANY POET OR PHILOSOPHER TO DESCRIBE PHYSICAL PHENOMENA +2961-960-0020-517: AND HENCE WE FIND THE SAME SORT OF CLUMSINESS IN THE (TIMAEUS->TIMAIRS) OF PLATO WHICH CHARACTERIZES THE PHILOSOPHICAL POEM OF LUCRETIUS +2961-960-0021-518: THERE IS A WANT OF FLOW AND OFTEN A DEFECT OF RHYTHM THE MEANING IS SOMETIMES OBSCURE AND THERE IS A GREATER USE OF APPOSITION (AND->IN) MORE OF REPETITION THAN OCCURS IN PLATO'S EARLIER WRITINGS +2961-960-0022-519: PLATO HAD NOT THE COMMAND OF HIS MATERIALS WHICH WOULD HAVE ENABLED HIM TO PRODUCE A PERFECT WORK OF ART +2961-961-0000-520: SOCRATES BEGINS (THE TIMAEUS->TO TEARS) WITH A SUMMARY OF THE REPUBLIC +2961-961-0001-521: AND NOW HE DESIRES TO SEE THE IDEAL STATE SET IN MOTION HE WOULD LIKE TO KNOW HOW SHE BEHAVED IN SOME GREAT STRUGGLE +2961-961-0002-522: AND THEREFORE TO YOU I TURN (TIMAEUS->TO ME AS) CITIZEN OF (LOCRIS->LOCHRIS) WHO ARE AT ONCE A PHILOSOPHER (AND->IN) A STATESMAN AND TO YOU (CRITIAS->CRITIUS) WHOM ALL ATHENIANS KNOW TO BE SIMILARLY ACCOMPLISHED AND TO HERMOCRATES (WHO IS->WHOSE) ALSO FITTED BY NATURE AND EDUCATION TO SHARE IN OUR DISCOURSE +2961-961-0003-523: I WILL IF (TIMAEUS APPROVES->TO ME AS IT PROVES) I APPROVE +2961-961-0004-524: LISTEN THEN SOCRATES TO A TALE OF (SOLON'S->SILENCE) WHO BEING THE FRIEND OF (DROPIDAS MY->TROPIDAS BY) GREAT GRANDFATHER TOLD IT TO MY GRANDFATHER (CRITIAS->CRITIUS) AND HE TOLD ME +2961-961-0005-525: SOME POEMS OF (SOLON->SOLEMN) WERE RECITED BY THE BOYS +2961-961-0006-526: AND WHAT WAS THE SUBJECT OF THE POEM SAID THE PERSON WHO MADE THE REMARK +2961-961-0007-527: THE SUBJECT WAS A VERY NOBLE ONE HE DESCRIBED THE MOST FAMOUS ACTION IN WHICH THE ATHENIAN PEOPLE WERE EVER ENGAGED +2961-961-0008-528: BUT THE MEMORY OF THEIR EXPLOITS (HAS->HAD) PASSED AWAY OWING TO THE LAPSE OF TIME AND THE EXTINCTION OF THE ACTORS +2961-961-0009-529: TELL US SAID THE OTHER THE WHOLE STORY AND WHERE SOLON HEARD THE STORY +2961-961-0010-530: BUT IN EGYPT THE TRADITIONS OF OUR OWN AND OTHER LANDS ARE BY US REGISTERED FOR EVER IN OUR TEMPLES +2961-961-0011-531: THE GENEALOGIES WHICH YOU HAVE RECITED TO US OUT OF YOUR OWN (ANNALS SOLON->ANNAL SOLEMN) ARE A MERE CHILDREN'S STORY +2961-961-0012-532: FOR IN THE TIMES BEFORE THE GREAT FLOOD ATHENS WAS THE GREATEST AND BEST OF CITIES AND DID THE NOBLEST DEEDS AND HAD THE BEST CONSTITUTION OF ANY UNDER THE FACE OF HEAVEN +2961-961-0013-533: (SOLON->SOLEMN) MARVELLED AND DESIRED TO BE INFORMED OF THE PARTICULARS +2961-961-0014-534: NINE THOUSAND YEARS HAVE ELAPSED SINCE SHE (FOUNDED->FOUND IT) YOURS AND EIGHT THOUSAND SINCE (SHE FOUNDED->YOU FOUND IT) OURS AS OUR ANNALS RECORD +2961-961-0015-535: MANY LAWS EXIST AMONG US WHICH ARE THE COUNTERPART OF YOURS AS THEY WERE IN THE OLDEN TIME +2961-961-0016-536: I WILL BRIEFLY DESCRIBE (THEM->HIM) TO YOU AND YOU SHALL READ THE ACCOUNT OF THEM AT YOUR LEISURE IN THE SACRED REGISTERS +2961-961-0017-537: OBSERVE AGAIN WHAT CARE THE LAW TOOK IN THE PURSUIT OF WISDOM SEARCHING OUT THE DEEP THINGS OF THE WORLD AND APPLYING THEM TO THE USE OF (MAN->MEN) +2961-961-0018-538: THE MOST FAMOUS OF THEM ALL WAS THE OVERTHROW OF THE ISLAND OF ATLANTIS +2961-961-0019-539: FOR AT THE PERIL OF HER OWN EXISTENCE AND WHEN THE (OTHER->OTTER) HELLENES HAD DESERTED HER SHE REPELLED THE INVADER AND OF HER OWN ACCORD GAVE LIBERTY TO ALL THE NATIONS WITHIN THE PILLARS +2961-961-0020-540: THIS IS THE EXPLANATION OF THE SHALLOWS WHICH ARE FOUND IN THAT PART OF THE ATLANTIC OCEAN +2961-961-0021-541: BUT I WOULD NOT SPEAK AT THE TIME BECAUSE I WANTED TO REFRESH MY MEMORY +2961-961-0022-542: THEN (NOW->THOU) LET ME EXPLAIN TO YOU THE ORDER OF OUR ENTERTAINMENT FIRST TIMAEUS WHO IS A NATURAL PHILOSOPHER WILL SPEAK OF THE ORIGIN OF THE WORLD GOING DOWN TO THE CREATION OF (MAN->MEN) AND THEN I SHALL RECEIVE THE MEN WHOM HE HAS CREATED AND SOME OF WHOM WILL HAVE BEEN EDUCATED BY YOU AND INTRODUCE THEM TO YOU AS THE LOST ATHENIAN CITIZENS OF WHOM THE EGYPTIAN (RECORD->RECORDS) SPOKE +3570-5694-0000-2433: BUT ALREADY AT A POINT IN ECONOMIC EVOLUTION FAR (ANTEDATING->ANTETING) THE EMERGENCE OF THE LADY (SPECIALISED->SPECIALIZED) CONSUMPTION OF GOODS AS AN EVIDENCE OF PECUNIARY STRENGTH HAD BEGUN TO WORK OUT IN A MORE OR LESS (ELABORATE->CELEBRATE) SYSTEM +3570-5694-0001-2434: THE UTILITY OF CONSUMPTION AS AN EVIDENCE OF WEALTH IS TO BE CLASSED AS A DERIVATIVE GROWTH +3570-5694-0002-2435: SUCH CONSUMPTION AS FALLS (TO->THROUGH) THE WOMEN IS MERELY INCIDENTAL TO THEIR WORK IT IS A MEANS TO THEIR CONTINUED (LABOUR->LABOR) AND NOT (A->TO) CONSUMPTION DIRECTED TO THEIR OWN COMFORT AND (FULNESS->FULLNESS) OF LIFE +3570-5694-0003-2436: WITH A FURTHER ADVANCE (IN->AND) CULTURE THIS (TABU->TABOU) MAY (CHANGE->CHANGED) INTO SIMPLE CUSTOM OF A MORE OR LESS RIGOROUS CHARACTER BUT WHATEVER BE THE THEORETICAL BASIS OF THE DISTINCTION WHICH IS MAINTAINED WHETHER IT BE (*->AT) A (TABU->BOOT) OR A LARGER CONVENTIONALITY THE FEATURES OF THE CONVENTIONAL SCHEME OF CONSUMPTION DO NOT CHANGE EASILY +3570-5694-0004-2437: IN THE NATURE OF THINGS LUXURIES AND THE COMFORTS OF LIFE BELONG TO THE LEISURE CLASS +3570-5694-0005-2438: UNDER THE (TABU->TABOO) CERTAIN VICTUALS AND MORE PARTICULARLY CERTAIN BEVERAGES ARE STRICTLY RESERVED FOR THE USE OF THE SUPERIOR CLASS +3570-5694-0006-2439: DRUNKENNESS AND THE OTHER PATHOLOGICAL CONSEQUENCES OF THE FREE USE OF STIMULANTS THEREFORE TEND IN THEIR TURN TO BECOME HONORIFIC AS BEING A MARK AT THE SECOND REMOVE OF THE SUPERIOR STATUS OF THOSE WHO ARE ABLE TO AFFORD THE INDULGENCE +3570-5694-0007-2440: IT HAS EVEN HAPPENED THAT THE NAME FOR CERTAIN DISEASED CONDITIONS OF THE BODY ARISING FROM SUCH AN ORIGIN HAS PASSED INTO EVERYDAY SPEECH AS A SYNONYM FOR NOBLE OR GENTLE +3570-5694-0008-2441: THE CONSUMPTION OF LUXURIES IN THE TRUE SENSE IS A CONSUMPTION DIRECTED TO THE COMFORT OF THE CONSUMER HIMSELF AND IS THEREFORE A MARK OF THE MASTER +3570-5694-0009-2442: WITH MANY QUALIFICATIONS WITH MORE QUALIFICATIONS AS THE PATRIARCHAL TRADITION HAS GRADUALLY WEAKENED THE GENERAL RULE IS FELT TO BE RIGHT AND BINDING THAT WOMEN SHOULD CONSUME ONLY FOR THE BENEFIT OF THEIR MASTERS +3570-5694-0010-2443: THE OBJECTION OF COURSE PRESENTS ITSELF THAT EXPENDITURE ON WOMEN'S DRESS AND HOUSEHOLD PARAPHERNALIA IS AN OBVIOUS EXCEPTION TO THIS RULE BUT IT WILL APPEAR IN THE SEQUEL THAT THIS EXCEPTION IS MUCH MORE OBVIOUS THAN SUBSTANTIAL +3570-5694-0011-2444: THE CUSTOM OF FESTIVE GATHERINGS PROBABLY ORIGINATED IN MOTIVES OF CONVIVIALITY AND RELIGION THESE MOTIVES ARE ALSO PRESENT IN THE LATER DEVELOPMENT (BUT->THAT) THEY DO NOT CONTINUE TO BE THE SOLE MOTIVES +3570-5694-0012-2445: THERE IS A MORE OR LESS ELABORATE SYSTEM OF RANK AND (GRADES->GRATES) +3570-5694-0013-2446: THIS DIFFERENTIATION IS FURTHERED BY THE INHERITANCE OF WEALTH AND THE CONSEQUENT INHERITANCE OF GENTILITY +3570-5694-0014-2447: MANY OF THESE (AFFILIATED->ARE FILIATED) GENTLEMEN OF LEISURE ARE AT THE SAME TIME (LESSER MEN->LESS AMEN) OF SUBSTANCE IN THEIR OWN RIGHT SO THAT SOME OF THEM ARE SCARCELY AT ALL OTHERS ONLY PARTIALLY TO BE RATED AS VICARIOUS CONSUMERS +3570-5694-0015-2448: SO MANY OF THEM HOWEVER AS MAKE UP THE RETAINER AND HANGERS ON OF THE PATRON MAY BE CLASSED AS VICARIOUS CONSUMER WITHOUT QUALIFICATION +3570-5694-0016-2449: MANY OF THESE AGAIN AND ALSO MANY OF THE OTHER ARISTOCRACY OF LESS DEGREE HAVE IN TURN ATTACHED TO THEIR PERSONS A MORE OR LESS COMPREHENSIVE GROUP OF VICARIOUS CONSUMER IN THE PERSONS OF THEIR WIVES AND CHILDREN THEIR SERVANTS RETAINERS ET CETERA +3570-5694-0017-2450: THE WEARING OF UNIFORMS (OR->ARE) LIVERIES IMPLIES A CONSIDERABLE DEGREE OF DEPENDENCE AND MAY EVEN BE SAID TO BE A MARK OF SERVITUDE REAL OR OSTENSIBLE +3570-5694-0018-2451: THE WEARERS OF UNIFORMS AND LIVERIES MAY BE ROUGHLY DIVIDED INTO TWO CLASSES THE FREE AND THE SERVILE OR THE NOBLE AND THE IGNOBLE +3570-5694-0019-2452: BUT THE GENERAL DISTINCTION IS NOT ON THAT ACCOUNT TO BE OVERLOOKED +3570-5694-0020-2453: SO THOSE (OFFICES->OFFICERS) WHICH ARE BY RIGHT THE PROPER EMPLOYMENT OF THE LEISURE CLASS ARE NOBLE SUCH AS GOVERNMENT FIGHTING HUNTING THE CARE OF ARMS AND ACCOUTREMENTS AND THE LIKE IN SHORT THOSE WHICH MAY BE CLASSED AS OSTENSIBLY PREDATORY EMPLOYMENTS +3570-5694-0021-2454: WHENEVER AS IN THESE CASES THE MENIAL SERVICE IN QUESTION HAS TO DO DIRECTLY WITH (THE->A) PRIMARY LEISURE EMPLOYMENTS OF FIGHTING AND HUNTING IT EASILY ACQUIRES A REFLECTED HONORIFIC CHARACTER +3570-5694-0022-2455: THE LIVERY BECOMES OBNOXIOUS TO NEARLY ALL WHO ARE REQUIRED TO WEAR IT +3570-5695-0000-2456: IN A GENERAL WAY THOUGH NOT WHOLLY NOR CONSISTENTLY THESE TWO GROUPS COINCIDE +3570-5695-0001-2457: THE DEPENDENT WHO WAS FIRST DELEGATED FOR THESE DUTIES WAS THE WIFE OR THE CHIEF WIFE AND AS WOULD BE EXPECTED IN (THE->A) LATER DEVELOPMENT OF THE INSTITUTION WHEN THE NUMBER OF PERSONS BY WHOM THESE DUTIES ARE CUSTOMARILY PERFORMED GRADUALLY NARROWS THE WIFE REMAINS THE LAST +3570-5695-0002-2458: BUT AS WE DESCEND THE SOCIAL SCALE THE POINT IS PRESENTLY REACHED WHERE THE DUTIES OF (VICARIOUS->VIPEROUS) LEISURE AND CONSUMPTION DEVOLVE UPON THE WIFE ALONE +3570-5695-0003-2459: IN THE COMMUNITIES OF THE WESTERN CULTURE THIS POINT IS AT PRESENT FOUND AMONG THE LOWER MIDDLE CLASS +3570-5695-0004-2460: IF BEAUTY OR COMFORT IS ACHIEVED AND IT IS A MORE OR LESS FORTUITOUS CIRCUMSTANCE IF THEY ARE THEY MUST BE ACHIEVED BY MEANS AND METHODS THAT COMMEND THEMSELVES TO THE GREAT ECONOMIC LAW OF WASTED EFFORT +3570-5695-0005-2461: THE MAN OF THE HOUSEHOLD ALSO CAN DO SOMETHING IN THIS DIRECTION AND INDEED HE COMMONLY DOES BUT WITH A STILL LOWER DESCENT INTO THE LEVELS OF INDIGENCE ALONG THE MARGIN OF THE SLUMS THE MAN AND PRESENTLY ALSO THE CHILDREN VIRTUALLY CEASE TO CONSUME VALUABLE GOODS FOR APPEARANCES AND THE WOMAN REMAINS VIRTUALLY THE SOLE EXPONENT OF THE HOUSEHOLD'S PECUNIARY DECENCY +3570-5695-0006-2462: VERY MUCH OF SQUALOR AND DISCOMFORT WILL BE ENDURED BEFORE THE LAST TRINKET OR THE LAST (PRETENSE->PRETENCE) OF PECUNIARY (DECENCY IS->DECENCIES) PUT AWAY +3570-5695-0007-2463: THERE IS NO CLASS AND NO COUNTRY THAT HAS YIELDED SO ABJECTLY BEFORE THE PRESSURE OF PHYSICAL WANT AS TO DENY THEMSELVES ALL GRATIFICATION OF THIS HIGHER OR SPIRITUAL NEED +3570-5695-0008-2464: THE QUESTION IS WHICH OF THE TWO METHODS WILL MOST EFFECTIVELY REACH THE PERSONS WHOSE CONVICTIONS IT IS DESIRED TO (AFFECT->EFFECT) +3570-5695-0009-2465: EACH WILL THEREFORE SERVE ABOUT EQUALLY WELL DURING THE EARLIER STAGES OF SOCIAL GROWTH +3570-5695-0010-2466: THE MODERN ORGANIZATION OF INDUSTRY WORKS IN THE SAME DIRECTION ALSO BY ANOTHER LINE +3570-5695-0011-2467: IT IS EVIDENT THEREFORE THAT THE PRESENT TREND OF THE DEVELOPMENT IS IN THE DIRECTION OF HEIGHTENING THE UTILITY OF CONSPICUOUS CONSUMPTION AS COMPARED WITH LEISURE +3570-5695-0012-2468: IT IS ALSO NOTICEABLE THAT THE SERVICEABILITY OF CONSUMPTION AS A MEANS OF REPUTE AS WELL AS THE INSISTENCE ON IT AS AN ELEMENT OF DECENCY IS AT ITS BEST IN THOSE PORTIONS OF THE COMMUNITY WHERE THE HUMAN (CONTACT->CONDUCT) OF THE INDIVIDUAL IS WIDEST AND THE MOBILITY OF THE POPULATION IS GREATEST +3570-5695-0013-2469: CONSUMPTION BECOMES A LARGER ELEMENT IN THE STANDARD OF LIVING IN THE CITY THAN IN THE COUNTRY +3570-5695-0014-2470: AMONG THE COUNTRY POPULATION ITS (PLACE IS->PLACES) TO SOME EXTENT TAKEN BY SAVINGS AND HOME COMFORTS KNOWN THROUGH THE MEDIUM OF (NEIGHBORHOOD->NEIGHBOURHOOD) GOSSIP SUFFICIENTLY TO SERVE THE LIKE GENERAL PURPOSE OF PECUNIARY REPUTE +3570-5695-0015-2471: THE RESULT IS A GREAT MOBILITY OF THE LABOR EMPLOYED IN PRINTING PERHAPS GREATER THAN IN ANY OTHER EQUALLY WELL DEFINED AND CONSIDERABLE BODY OF WORKMEN +3570-5696-0000-2472: UNDER THE SIMPLE TEST OF EFFECTIVENESS FOR ADVERTISING WE SHOULD EXPECT TO FIND LEISURE AND THE CONSPICUOUS CONSUMPTION OF GOODS DIVIDING THE FIELD OF PECUNIARY EMULATION PRETTY EVENLY BETWEEN THEM AT THE OUTSET +3570-5696-0001-2473: BUT THE ACTUAL COURSE OF DEVELOPMENT HAS BEEN SOMEWHAT DIFFERENT FROM THIS IDEAL SCHEME LEISURE HELD THE FIRST PLACE AT THE START AND CAME TO (HOLD A->ALL THE) RANK (VERY MUCH->VERIMENT) ABOVE WASTEFUL CONSUMPTION OF GOODS BOTH AS A DIRECT EXPONENT OF WEALTH AND AS AN ELEMENT IN THE STANDARD OF DECENCY DURING THE (QUASI->COURSE I) PEACEABLE CULTURE +3570-5696-0002-2474: OTHER CIRCUMSTANCES PERMITTING THAT INSTINCT DISPOSES MEN TO LOOK WITH FAVOR UPON PRODUCTIVE EFFICIENCY AND ON WHATEVER IS OF HUMAN USE +3570-5696-0003-2475: A RECONCILIATION BETWEEN THE TWO CONFLICTING REQUIREMENTS IS (EFFECTED->AFFECTED) BY (A->*) RESORT TO MAKE BELIEVE (MANY AND->MEN IN) INTRICATE POLITE OBSERVANCES AND SOCIAL DUTIES OF A CEREMONIAL NATURE ARE DEVELOPED MANY ORGANIZATIONS ARE FOUNDED WITH SOME SPECIOUS OBJECT OF AMELIORATION EMBODIED IN THEIR OFFICIAL STYLE AND TITLE THERE IS MUCH COMING AND GOING AND A DEAL OF TALK TO THE END THAT THE (TALKERS MAY->TALK IS) NOT HAVE OCCASION TO REFLECT ON WHAT IS THE EFFECTUAL ECONOMIC VALUE OF THEIR TRAFFIC +3570-5696-0004-2476: THE (SALIENT->SAILORED) FEATURES OF THIS DEVELOPMENT OF DOMESTIC SERVICE HAVE ALREADY BEEN INDICATED +3570-5696-0005-2477: THROUGHOUT THE ENTIRE EVOLUTION OF CONSPICUOUS EXPENDITURE WHETHER OF GOODS OR OF SERVICES OR HUMAN LIFE RUNS THE OBVIOUS IMPLICATION THAT IN ORDER TO EFFECTUALLY MEND THE CONSUMER'S GOOD FAME IT MUST BE AN EXPENDITURE OF SUPERFLUITIES +3570-5696-0006-2478: AS USED IN THE SPEECH OF (EVERYDAY->EVERY DAY) LIFE THE WORD CARRIES AN UNDERTONE OF DEPRECATION +3570-5696-0007-2479: THE USE OF THE WORD WASTE AS A TECHNICAL TERM THEREFORE IMPLIES NO DEPRECATION OF THE MOTIVES OR OF THE ENDS SOUGHT BY THE CONSUMER UNDER THIS CANON OF CONSPICUOUS WASTE +3570-5696-0008-2480: BUT IT IS (ON OTHER->ANOTHER) GROUNDS WORTH NOTING THAT THE TERM (WASTE->WASTES) IN THE LANGUAGE OF EVERYDAY LIFE IMPLIES DEPRECATION OF WHAT IS CHARACTERIZED AS WASTEFUL +3570-5696-0009-2481: IN STRICT ACCURACY NOTHING SHOULD BE INCLUDED UNDER THE HEAD OF CONSPICUOUS WASTE BUT SUCH EXPENDITURE AS IS INCURRED ON THE GROUND OF AN INVIDIOUS PECUNIARY COMPARISON +3570-5696-0010-2482: AN ARTICLE MAY BE USEFUL AND WASTEFUL BOTH AND ITS UTILITY TO THE CONSUMER MAY BE MADE UP OF USE AND WASTE IN THE MOST VARYING PROPORTIONS +3575-170457-0000-369: AND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP I LAID MY HEAD SHE FEARED FOR TIME I WAS NOT MADE BUT FOR ETERNITY +3575-170457-0001-370: WHY ARE WE TO BE DENIED EACH OTHER'S SOCIETY +3575-170457-0002-371: WHY ARE WE TO BE DIVIDED +3575-170457-0003-372: SURELY IT MUST BE BECAUSE WE ARE IN DANGER OF LOVING EACH OTHER TOO WELL OF LOSING SIGHT OF THE CREATOR (IN->AND) IDOLATRY OF THE CREATURE +3575-170457-0004-373: WE USED TO DISPUTE ABOUT POLITICS AND RELIGION +3575-170457-0005-374: SHE (A TORY AND->ATTORIAN) CLERGYMAN'S DAUGHTER WAS ALWAYS IN A MINORITY OF ONE IN OUR HOUSE OF VIOLENT (DISSENT->DESCENT) AND RADICALISM +3575-170457-0006-375: HER FEEBLE HEALTH GAVE HER HER YIELDING MANNER FOR SHE COULD NEVER OPPOSE ANY ONE WITHOUT GATHERING UP ALL HER STRENGTH FOR THE STRUGGLE +3575-170457-0007-376: HE SPOKE FRENCH PERFECTLY I HAVE BEEN TOLD WHEN NEED WAS BUT DELIGHTED USUALLY IN TALKING THE BROADEST YORKSHIRE +3575-170457-0008-377: AND SO LIFE AND DEATH HAVE DISPERSED THE CIRCLE OF VIOLENT RADICALS AND DISSENTERS INTO WHICH TWENTY YEARS AGO THE LITTLE QUIET RESOLUTE CLERGYMAN'S DAUGHTER WAS RECEIVED AND BY WHOM SHE WAS TRULY LOVED AND HONOURED +3575-170457-0009-378: JANUARY AND FEBRUARY OF EIGHTEEN THIRTY SEVEN HAD PASSED AWAY AND STILL THERE WAS NO REPLY FROM (SOUTHEY->SALVI) +3575-170457-0010-379: I AM NOT DEPRECIATING IT WHEN I SAY THAT IN THESE TIMES IT IS NOT RARE +3575-170457-0011-380: BUT IT IS NOT WITH A VIEW TO DISTINCTION THAT YOU SHOULD CULTIVATE THIS TALENT IF YOU CONSULT YOUR OWN HAPPINESS +3575-170457-0012-381: YOU WILL SAY THAT A WOMAN HAS NO NEED OF SUCH A CAUTION THERE CAN BE NO PERIL IN IT FOR HER +3575-170457-0013-382: THE MORE SHE IS ENGAGED IN HER PROPER DUTIES THE LESS LEISURE WILL SHE HAVE FOR IT EVEN AS AN ACCOMPLISHMENT AND A RECREATION +3575-170457-0014-383: TO THOSE DUTIES YOU HAVE NOT YET BEEN CALLED AND WHEN YOU ARE YOU WILL BE LESS EAGER FOR CELEBRITY +3575-170457-0015-384: BUT DO NOT SUPPOSE THAT I DISPARAGE THE GIFT WHICH YOU POSSESS NOR THAT I WOULD DISCOURAGE YOU FROM EXERCISING IT I ONLY EXHORT YOU SO TO THINK OF IT AND SO TO USE IT AS TO RENDER IT CONDUCIVE TO YOUR OWN PERMANENT GOOD +3575-170457-0016-385: FAREWELL MADAM +3575-170457-0017-386: THOUGH I MAY BE BUT AN UNGRACIOUS ADVISER YOU WILL ALLOW ME THEREFORE TO SUBSCRIBE MYSELF WITH THE BEST WISHES FOR YOUR HAPPINESS HERE AND HEREAFTER YOUR TRUE FRIEND ROBERT (SOUTHEY->SELVEY) +3575-170457-0018-387: SIR MARCH SIXTEENTH +3575-170457-0019-388: I (HAD->HAVE) NOT VENTURED TO HOPE FOR SUCH A REPLY SO (CONSIDERATE->CONSIDER IT) IN ITS TONE SO NOBLE IN ITS SPIRIT +3575-170457-0020-389: I KNOW THE FIRST LETTER I WROTE TO YOU WAS ALL SENSELESS TRASH FROM BEGINNING TO END BUT I AM NOT ALTOGETHER THE IDLE DREAMING BEING IT WOULD SEEM TO DENOTE +3575-170457-0021-390: I THOUGHT IT THEREFORE MY DUTY WHEN I LEFT SCHOOL TO BECOME A GOVERNESS +3575-170457-0022-391: IN THE EVENINGS I CONFESS I DO THINK BUT I NEVER TROUBLE ANY ONE ELSE WITH MY THOUGHTS +3575-170457-0023-392: I CAREFULLY AVOID ANY APPEARANCE OF PREOCCUPATION AND ECCENTRICITY WHICH MIGHT LEAD THOSE I LIVE AMONGST TO SUSPECT THE NATURE OF MY PURSUITS +3575-170457-0024-393: I DON'T ALWAYS SUCCEED FOR SOMETIMES WHEN I'M TEACHING OR SEWING I WOULD RATHER BE READING (OR->A) WRITING BUT I TRY TO DENY MYSELF AND MY FATHER'S APPROBATION AMPLY REWARDED ME FOR THE PRIVATION +3575-170457-0025-394: AGAIN I THANK YOU THIS INCIDENT I SUPPOSE WILL BE RENEWED NO MORE IF I LIVE TO BE AN OLD WOMAN I SHALL REMEMBER IT THIRTY YEARS HENCE AS A BRIGHT DREAM +3575-170457-0026-395: P S PRAY SIR EXCUSE ME FOR WRITING TO YOU A SECOND TIME I COULD NOT HELP WRITING PARTLY TO TELL YOU HOW THANKFUL I AM FOR YOUR KINDNESS AND PARTLY TO LET YOU KNOW THAT YOUR ADVICE SHALL NOT BE WASTED HOWEVER SORROWFULLY AND RELUCTANTLY IT MAY BE AT FIRST FOLLOWED (C B->*) +3575-170457-0027-396: I CANNOT DENY MYSELF THE GRATIFICATION OF INSERTING (SOUTHEY'S->SO THESE) REPLY +3575-170457-0028-397: (KESWICK->KEZWICK) MARCH TWENTY SECOND EIGHTEEN THIRTY SEVEN DEAR (MADAM->MADAME) +3575-170457-0029-398: YOUR LETTER HAS GIVEN ME GREAT PLEASURE AND I SHOULD NOT FORGIVE MYSELF IF I DID NOT TELL YOU SO +3575-170457-0030-399: OF THIS SECOND LETTER ALSO SHE SPOKE AND TOLD ME THAT IT CONTAINED AN INVITATION FOR HER TO GO AND SEE THE POET IF EVER SHE VISITED THE LAKES +3575-170457-0031-400: ON AUGUST TWENTY SEVENTH EIGHTEEN THIRTY SEVEN SHE WRITES +3575-170457-0032-401: COME COME (I AM->I'M) GETTING REALLY TIRED OF YOUR ABSENCE +3575-170457-0033-402: SATURDAY AFTER SATURDAY COMES (ROUND->AROUND) AND I CAN HAVE NO HOPE OF HEARING YOUR KNOCK AT THE DOOR AND THEN BEING TOLD THAT MISS (E->EA) IS COME OH DEAR +3575-170457-0034-403: IN THIS MONOTONOUS LIFE OF (MINE->MIND) THAT WAS A PLEASANT EVENT +3575-170457-0035-404: I WISH (IT WOULD->YOU WERE) RECUR AGAIN BUT IT WILL TAKE TWO OR THREE INTERVIEWS BEFORE THE STIFFNESS THE ESTRANGEMENT OF THIS LONG SEPARATION WILL WEAR AWAY +3575-170457-0036-405: MY EYES (FILL->FELL) WITH TEARS WHEN I CONTRAST THE BLISS OF SUCH A STATE BRIGHTENED BY HOPES OF THE FUTURE WITH THE MELANCHOLY STATE I NOW LIVE IN UNCERTAIN THAT I EVER FELT TRUE CONTRITION WANDERING IN THOUGHT (AND DEED->INDEED) LONGING FOR HOLINESS WHICH I SHALL NEVER NEVER OBTAIN SMITTEN (AT->THAT) TIMES TO THE HEART WITH THE CONVICTION THAT GHASTLY CALVINISTIC DOCTRINES ARE TRUE DARKENED (IN->AND) SHORT BY THE VERY SHADOWS OF SPIRITUAL DEATH +3575-170457-0037-406: IF CHRISTIAN PERFECTION BE NECESSARY TO SALVATION I SHALL NEVER BE SAVED MY HEART IS A VERY (HOTBED->HOT BED) FOR SINFUL THOUGHTS AND WHEN I DECIDE ON AN ACTION I SCARCELY REMEMBER TO LOOK TO MY REDEEMER FOR (*->A) DIRECTION +3575-170457-0038-407: AND MEANTIME I KNOW THE GREATNESS OF JEHOVAH I ACKNOWLEDGE THE PERFECTION OF HIS WORD I ADORE THE PURITY OF THE CHRISTIAN FAITH MY THEORY IS RIGHT MY PRACTICE HORRIBLY WRONG +3575-170457-0039-408: THE CHRISTMAS HOLIDAYS CAME AND SHE AND ANNE RETURNED TO THE PARSONAGE AND TO THAT HAPPY HOME CIRCLE IN WHICH ALONE THEIR NATURES EXPANDED AMONGST ALL OTHER PEOPLE THEY SHRIVELLED UP MORE OR LESS +3575-170457-0040-409: INDEED THERE WERE ONLY ONE OR TWO STRANGERS WHO COULD BE ADMITTED AMONG THE SISTERS WITHOUT PRODUCING THE SAME RESULT +3575-170457-0041-410: SHE WAS GONE OUT INTO THE VILLAGE ON SOME ERRAND WHEN AS SHE WAS DESCENDING THE STEEP STREET HER FOOT SLIPPED ON THE ICE AND SHE FELL (IT->HE) WAS DARK AND NO ONE SAW HER MISCHANCE TILL AFTER A TIME HER GROANS ATTRACTED THE ATTENTION OF A PASSER BY +3575-170457-0042-411: UNFORTUNATELY THE FRACTURE COULD NOT BE SET TILL SIX O'CLOCK THE NEXT MORNING AS NO SURGEON WAS TO BE HAD BEFORE THAT TIME AND SHE NOW LIES AT (OUR->HER) HOUSE IN A VERY DOUBTFUL AND DANGEROUS STATE +3575-170457-0043-412: HOWEVER REMEMBERING WHAT YOU TOLD ME NAMELY THAT YOU HAD COMMENDED THE MATTER TO A HIGHER DECISION THAN OURS AND THAT YOU WERE RESOLVED TO SUBMIT WITH RESIGNATION TO THAT DECISION WHATEVER IT MIGHT BE I HOLD IT MY DUTY TO YIELD ALSO AND TO BE SILENT (IT->AND) MAY BE ALL FOR THE BEST +3575-170457-0044-413: AFTER THIS DISAPPOINTMENT I NEVER DARE RECKON WITH CERTAINTY ON THE ENJOYMENT OF A PLEASURE AGAIN IT SEEMS AS IF SOME FATALITY STOOD BETWEEN YOU AND ME +3575-170457-0045-414: I AM NOT GOOD ENOUGH FOR YOU AND YOU MUST BE KEPT FROM THE CONTAMINATION OF (TOO->TWO) INTIMATE SOCIETY +3575-170457-0046-415: A GOOD (NEIGHBOUR->NEIGHBOR) OF THE BRONTES A CLEVER INTELLIGENT YORKSHIRE WOMAN WHO KEEPS A (DRUGGIST'S->DRUGGIST) SHOP IN HAWORTH (AND->*) FROM HER OCCUPATION HER EXPERIENCE AND EXCELLENT SENSE HOLDS THE POSITION OF VILLAGE (DOCTRESS->DOCTRIS) AND NURSE AND AS SUCH HAS BEEN A FRIEND IN MANY A TIME OF TRIAL AND SICKNESS AND DEATH IN THE (HOUSEHOLDS->HOUSEHOLD) ROUND TOLD ME A CHARACTERISTIC LITTLE INCIDENT CONNECTED WITH TABBY'S FRACTURED LEG +3575-170457-0047-416: TABBY HAD LIVED WITH THEM FOR TEN OR TWELVE YEARS AND WAS AS CHARLOTTE EXPRESSED IT ONE OF THE FAMILY +3575-170457-0048-417: HE REFUSED AT FIRST TO LISTEN TO THE CAREFUL ADVICE IT WAS REPUGNANT TO HIS LIBERAL NATURE +3575-170457-0049-418: THIS DECISION WAS COMMUNICATED TO THE GIRLS +3575-170457-0050-419: TABBY HAD TENDED THEM IN THEIR CHILDHOOD THEY AND NONE OTHER SHOULD TEND HER IN HER INFIRMITY (AND->IN) AGE +3575-170457-0051-420: AT TEA TIME THEY WERE SAD AND SILENT AND THE MEAL WENT AWAY UNTOUCHED BY ANY OF THE THREE +3575-170457-0052-421: SHE HAD ANOTHER WEIGHT ON HER MIND THIS CHRISTMAS +3575-170457-0053-422: BUT ANNE HAD BEGUN TO SUFFER JUST BEFORE THE HOLIDAYS AND CHARLOTTE WATCHED OVER HER YOUNGER SISTERS WITH (THE->A) JEALOUS VIGILANCE OF SOME WILD CREATURE THAT CHANGES HER VERY NATURE IF DANGER THREATENS HER YOUNG +3575-170457-0054-423: STUNG BY ANXIETY FOR THIS LITTLE SISTER SHE UPBRAIDED MISS W FOR HER FANCIED INDIFFERENCE TO ANNE'S STATE OF HEALTH +3575-170457-0055-424: STILL HER HEART HAD RECEIVED A SHOCK IN THE PERCEPTION OF ANNE'S DELICACY AND ALL THESE HOLIDAYS SHE WATCHED OVER HER WITH THE LONGING FOND ANXIETY WHICH IS SO FULL OF SUDDEN PANGS OF FEAR +3575-170457-0056-425: I DOUBT WHETHER BRANWELL WAS MAINTAINING HIMSELF AT THIS TIME +3729-6852-0000-1660: TO CELEBRATE THE ARRIVAL OF HER SON (SILVIA->SYLVIA) GAVE A SPLENDID SUPPER TO WHICH SHE HAD INVITED ALL HER RELATIVES AND IT WAS A GOOD OPPORTUNITY FOR ME TO MAKE THEIR ACQUAINTANCE +3729-6852-0001-1661: WITHOUT SAYING IT POSITIVELY SHE MADE ME UNDERSTAND THAT BEING HERSELF AN ILLUSTRIOUS MEMBER OF THE REPUBLIC OF LETTERS SHE WAS WELL AWARE THAT SHE WAS SPEAKING TO AN INSECT +3729-6852-0002-1662: IN ORDER TO PLEASE HER I SPOKE TO HER OF THE (ABBE CONTI->ABBEY KANTI) AND I HAD OCCASION TO QUOTE TWO LINES OF THAT PROFOUND WRITER +3729-6852-0003-1663: (MADAM->MADAME) CORRECTED ME WITH A PATRONIZING AIR FOR MY PRONUNCIATION OF THE WORD (SCEVRA->SCAFFRA) WHICH MEANS DIVIDED SAYING THAT IT OUGHT TO BE PRONOUNCED (SCEURA->SKURA) AND SHE ADDED THAT I OUGHT TO BE VERY GLAD TO HAVE LEARNED SO MUCH ON THE FIRST DAY OF MY ARRIVAL IN PARIS TELLING ME THAT IT WOULD BE AN IMPORTANT DAY IN MY LIFE +3729-6852-0004-1664: HER FACE WAS AN ENIGMA FOR IT INSPIRED (EVERYONE->EVERY ONE) WITH THE WARMEST SYMPATHY AND YET IF YOU EXAMINED IT ATTENTIVELY THERE WAS NOT ONE BEAUTIFUL FEATURE SHE COULD NOT BE CALLED HANDSOME BUT NO ONE COULD HAVE THOUGHT HER UGLY +3729-6852-0005-1665: (SILVIA->SYLVIA) WAS THE ADORATION OF FRANCE AND HER TALENT WAS THE REAL SUPPORT OF ALL THE COMEDIES WHICH THE GREATEST AUTHORS WROTE FOR HER ESPECIALLY OF THE PLAYS OF MARIVAUX FOR WITHOUT HER HIS COMEDIES WOULD NEVER HAVE GONE TO POSTERITY +3729-6852-0006-1666: (SILVIA->SYLVIA) DID NOT THINK THAT HER GOOD CONDUCT WAS A MERIT FOR SHE KNEW THAT SHE WAS VIRTUOUS ONLY BECAUSE HER SELF LOVE COMPELLED HER TO BE SO AND SHE NEVER EXHIBITED ANY PRIDE OR ASSUMED ANY SUPERIORITY TOWARDS HER THEATRICAL SISTERS ALTHOUGH SATISFIED TO SHINE BY THEIR TALENT OR THEIR BEAUTY THEY CARED LITTLE ABOUT RENDERING THEMSELVES CONSPICUOUS BY THEIR VIRTUE +3729-6852-0007-1667: TWO YEARS BEFORE HER DEATH I SAW HER PERFORM THE CHARACTER OF MARIANNE IN THE COMEDY OF (MARIVAUX->MARAVAUX) AND IN SPITE OF HER AGE AND DECLINING HEALTH THE ILLUSION WAS COMPLETE +3729-6852-0008-1668: SHE WAS HONOURABLY BURIED IN THE CHURCH OF SAINT (SAUVEUR->SEVERE) WITHOUT THE SLIGHTEST OPPOSITION FROM THE VENERABLE PRIEST WHO FAR FROM SHARING THE ANTI (CHRISTAIN->CHRISTIAN) INTOLERANCY OF THE CLERGY IN GENERAL SAID THAT HER PROFESSION AS AN ACTRESS HAD NOT HINDERED HER FROM BEING A GOOD CHRISTIAN AND THAT THE EARTH WAS (THE->A) COMMON MOTHER OF ALL HUMAN BEINGS AS JESUS CHRIST HAD BEEN THE SAVIOUR OF ALL MANKIND +3729-6852-0009-1669: YOU WILL FORGIVE ME DEAR READER IF I HAVE MADE YOU ATTEND THE FUNERAL OF (SILVIA->SYLVIA) TEN YEARS BEFORE HER DEATH BELIEVE ME I HAVE NO INTENTION OF PERFORMING A MIRACLE YOU MAY CONSOLE YOURSELF WITH THE IDEA THAT I SHALL SPARE YOU THAT UNPLEASANT TASK WHEN POOR (SILVIA->SYLVIA) DIES +3729-6852-0010-1670: I NEVER HAD ANY FAMILY +3729-6852-0011-1671: I HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I HAVE FORGOTTEN IT SINCE I HAVE BEEN IN SERVICE +3729-6852-0012-1672: I SHALL CALL YOU (ESPRIT->A SPREE) +3729-6852-0013-1673: YOU DO ME A GREAT HONOUR +3729-6852-0014-1674: HERE GO AND GET ME CHANGE FOR A LOUIS I HAVE IT SIR +3729-6852-0015-1675: AT YOUR SERVICE SIR +3729-6852-0016-1676: MADAME QUINSON BESIDES CAN ANSWER YOUR (ENQUIRIES->INQUIRIES) +3729-6852-0017-1677: I SEE A QUANTITY OF CHAIRS FOR HIRE AT THE RATE OF ONE (SOU->SOUS) MEN READING THE NEWSPAPER UNDER THE SHADE OF THE TREES GIRLS AND MEN BREAKFASTING EITHER ALONE OR IN COMPANY WAITERS WHO WERE RAPIDLY GOING UP AND DOWN A NARROW STAIRCASE HIDDEN UNDER THE FOLIAGE +3729-6852-0018-1678: I SIT DOWN AT A SMALL TABLE A WAITER COMES IMMEDIATELY TO (ENQUIRE->INQUIRE) MY WISHES +3729-6852-0019-1679: I TELL HIM TO GIVE ME SOME COFFEE IF IT IS GOOD +3729-6852-0020-1680: THEN TURNING TOWARDS ME HE SAYS THAT I LOOK LIKE A FOREIGNER AND WHEN I SAY THAT I AM AN ITALIAN HE BEGINS TO SPEAK TO ME OF THE COURT (OF->*) THE CITY OF THE THEATRES AND AT LAST HE OFFERS TO ACCOMPANY ME EVERYWHERE +3729-6852-0021-1681: I THANK HIM AND TAKE MY LEAVE +3729-6852-0022-1682: I ADDRESS HIM IN ITALIAN AND HE ANSWERS VERY WITTILY BUT HIS WAY OF SPEAKING MAKES ME SMILE AND I TELL HIM WHY +3729-6852-0023-1683: MY REMARK PLEASES HIM BUT I SOON PROVE TO HIM THAT IT IS NOT THE RIGHT WAY TO SPEAK HOWEVER PERFECT MAY HAVE BEEN THE LANGUAGE OF THAT ANCIENT WRITER +3729-6852-0024-1684: I SEE A CROWD IN ONE CORNER OF THE GARDEN EVERYBODY STANDING STILL AND LOOKING UP +3729-6852-0025-1685: IS THERE NOT A MERIDIAN EVERYWHERE +3729-6852-0026-1686: YES BUT THE MERIDIAN OF THE PALAIS ROYAL IS THE MOST EXACT +3729-6852-0027-1687: THAT IS TRUE (BADAUDERIE->BADR'D GREE) +3729-6852-0028-1688: ALL THESE HONEST PERSONS ARE WAITING THEIR TURN TO GET THEIR SNUFF BOXES FILLED +3729-6852-0029-1689: IT IS SOLD EVERYWHERE BUT FOR THE LAST THREE WEEKS NOBODY WILL USE ANY SNUFF BUT THAT SOLD AT THE (CIVET->SAVEETTE) CAT +3729-6852-0030-1690: IS IT BETTER THAN ANYWHERE ELSE +3729-6852-0031-1691: BUT HOW DID SHE MANAGE TO RENDER IT SO FASHIONABLE +3729-6852-0032-1692: SIMPLY BY STOPPING HER CARRIAGE TWO OR THREE TIMES BEFORE THE SHOP TO HAVE HER SNUFF BOX FILLED AND BY SAYING ALOUD TO THE YOUNG GIRL WHO HANDED BACK THE BOX THAT HER SNUFF WAS THE VERY BEST IN PARIS +3729-6852-0033-1693: YOU ARE NOW IN THE ONLY COUNTRY IN THE WORLD WHERE WIT CAN MAKE A FORTUNE BY SELLING EITHER A GENUINE OR A FALSE ARTICLE IN THE FIRST CASE IT RECEIVES THE WELCOME OF INTELLIGENT AND TALENTED PEOPLE AND IN THE SECOND FOOLS ARE ALWAYS READY TO REWARD IT FOR SILLINESS IS TRULY A CHARACTERISTIC OF THE PEOPLE HERE AND HOWEVER WONDERFUL IT MAY APPEAR SILLINESS IS THE DAUGHTER OF WIT +3729-6852-0034-1694: LET A MAN RUN AND EVERYBODY WILL RUN AFTER HIM THE CROWD WILL NOT STOP UNLESS THE MAN IS PROVED TO BE MAD BUT TO PROVE IT IS INDEED A DIFFICULT TASK BECAUSE WE HAVE A CROWD OF MEN WHO MAD FROM THEIR BIRTH ARE STILL CONSIDERED WISE +3729-6852-0035-1695: IT SEEMS TO ME I REPLIED THAT SUCH APPROVAL SUCH RATIFICATION OF THE OPINION EXPRESSED BY THE KING THE PRINCES OF THE BLOOD ET CETERA IS RATHER A PROOF OF THE AFFECTION FELT FOR THEM BY THE NATION FOR THE FRENCH CARRY THAT AFFECTION TO SUCH AN EXTENT THAT THEY BELIEVE THEM INFALLIBLE +3729-6852-0036-1696: WHEN THE KING COMES TO PARIS EVERYBODY CALLS OUT VIVE (LE ROI->LAUROI) +3729-6852-0037-1697: SHE INTRODUCED ME TO ALL HER GUESTS AND GAVE ME SOME PARTICULARS RESPECTING EVERY ONE OF THEM +3729-6852-0038-1698: WHAT SIR I SAID TO HIM AM I FORTUNATE ENOUGH TO SEE YOU +3729-6852-0039-1699: HE HIMSELF RECITED THE SAME PASSAGE IN FRENCH AND POLITELY POINTED OUT THE PARTS IN WHICH HE THOUGHT THAT I HAD IMPROVED ON THE ORIGINAL +3729-6852-0040-1700: FOR THE FIRST DAY SIR I THINK THAT WHAT YOU HAVE DONE GIVES GREAT HOPES OF YOU AND WITHOUT ANY DOUBT YOU WILL MAKE RAPID PROGRESS +3729-6852-0041-1701: I BELIEVE IT SIR AND THAT IS WHAT I FEAR THEREFORE THE PRINCIPAL OBJECT OF MY VISIT HERE IS TO DEVOTE MYSELF ENTIRELY TO THE STUDY OF THE FRENCH LANGUAGE +3729-6852-0042-1702: I AM A VERY UNPLEASANT PUPIL ALWAYS ASKING QUESTIONS CURIOUS TROUBLESOME INSATIABLE AND EVEN SUPPOSING THAT I COULD MEET WITH THE TEACHER I REQUIRE I AM AFRAID I AM NOT RICH ENOUGH TO PAY HIM +3729-6852-0043-1703: I RESIDE IN THE (MARAIS RUE->MARAY GRUE) DE (DOUZE PORTES->DUSPORT) +3729-6852-0044-1704: I WILL MAKE YOU TRANSLATE THEM INTO FRENCH AND YOU NEED NOT BE AFRAID OF MY FINDING YOU INSATIABLE +3729-6852-0045-1705: HE HAD A GOOD APPETITE COULD TELL A GOOD STORY WITHOUT LAUGHING (WAS->WITH) CELEBRATED FOR HIS WITTY REPARTEES AND HIS SOCIABLE MANNERS BUT HE SPENT HIS LIFE AT HOME SELDOM GOING OUT AND SEEING HARDLY (ANYONE->ANY ONE) BECAUSE HE ALWAYS HAD A PIPE IN HIS MOUTH AND WAS SURROUNDED BY AT LEAST TWENTY CATS WITH WHICH HE WOULD AMUSE HIMSELF ALL DAY +3729-6852-0046-1706: HIS HOUSEKEEPER HAD THE MANAGEMENT OF EVERYTHING SHE NEVER ALLOWED HIM TO BE IN NEED OF ANYTHING AND SHE GAVE NO ACCOUNT OF HIS MONEY WHICH SHE KEPT ALTOGETHER BECAUSE HE NEVER ASKED HER TO RENDER ANY ACCOUNTS +4077-13751-0000-1258: ON THE SIXTH OF APRIL EIGHTEEN THIRTY THE CHURCH OF JESUS CHRIST OF LATTER DAY SAINTS WAS (FORMALLY->FORMERLY) ORGANIZED AND THUS TOOK ON A LEGAL EXISTENCE +4077-13751-0001-1259: ITS ORIGIN WAS SMALL A GERM AN INSIGNIFICANT SEED HARDLY TO BE THOUGHT OF AS LIKELY TO AROUSE OPPOSITION +4077-13751-0002-1260: INSTEAD OF BUT SIX REGULARLY AFFILIATED MEMBERS AND AT MOST TWO SCORE OF ADHERENTS THE ORGANIZATION NUMBERS (TODAY->TO DAY) MANY HUNDRED THOUSAND SOULS +4077-13751-0003-1261: IN PLACE (OF->HAVE) A SINGLE HAMLET IN THE SMALLEST CORNER OF WHICH THE MEMBERS COULD HAVE CONGREGATED THERE NOW ARE ABOUT SEVENTY STAKES OF ZION AND ABOUT SEVEN HUNDRED ORGANIZED WARDS EACH WARD AND STAKE WITH ITS FULL COMPLEMENT OF OFFICERS AND PRIESTHOOD ORGANIZATIONS +4077-13751-0004-1262: THE (PRACTISE->PRACTICE) OF GATHERING ITS PROSELYTES INTO ONE PLACE PREVENTS THE BUILDING UP AND STRENGTHENING OF FOREIGN BRANCHES AND INASMUCH AS EXTENSIVE AND STRONG ORGANIZATIONS ARE SELDOM MET WITH ABROAD VERY ERRONEOUS IDEAS EXIST CONCERNING THE STRENGTH OF THE CHURCH +4077-13751-0005-1263: NEVERTHELESS THE MUSTARD SEED AMONG THE SMALLEST OF ALL (SEEDS->SEATS) HAS ATTAINED (THE->THAT) PROPORTIONS OF A TREE AND THE BIRDS OF THE AIR ARE NESTING IN ITS BRANCHES THE ACORN IS NOW (AN->IN) OAK OFFERING PROTECTION AND THE SWEETS OF SATISFACTION TO EVERY EARNEST PILGRIM JOURNEYING ITS WAY (FOR->FIR) TRUTH +4077-13751-0006-1264: THEIR EYES WERE FROM THE FIRST TURNED IN ANTICIPATION TOWARD THE EVENING SUN NOT MERELY THAT THE WORK OF (PROSELYTING->PROSELY) SHOULD BE CARRIED ON IN THE WEST BUT THAT THE HEADQUARTERS OF THE CHURCH SHOULD BE (THERE->THEIR) ESTABLISHED +4077-13751-0007-1265: THE BOOK (OF->O) MORMON HAD TAUGHT THE PEOPLE THE TRUE ORIGIN AND DESTINY OF THE AMERICAN INDIANS AND TOWARD THIS DARK SKINNED REMNANT OF A ONCE MIGHTY PEOPLE THE MISSIONARIES OF MORMONISM EARLY TURNED THEIR EYES AND WITH THEIR EYES WENT THEIR HEARTS AND THEIR HOPES +4077-13751-0008-1266: IT IS NOTABLE THAT THE INDIAN TRIBES HAVE (GENERALLY->GERALLY) REGARDED (THE->THEIR) RELIGION OF THE LATTER DAY SAINTS WITH FAVOR SEEING IN THE BOOK OF MORMON STRIKING AGREEMENT WITH THEIR OWN TRADITIONS +4077-13751-0009-1267: THE FIRST WELL ESTABLISHED SEAT OF THE CHURCH WAS IN THE PRETTY LITTLE TOWN OF (KIRTLAND->CURTLIN) OHIO ALMOST WITHIN SIGHT OF LAKE ERIE AND HERE SOON ROSE THE FIRST TEMPLE OF MODERN TIMES +4077-13751-0010-1268: TO THE FERVENT LATTER DAY SAINT A TEMPLE IS NOT SIMPLY A CHURCH BUILDING A HOUSE FOR RELIGIOUS ASSEMBLY +4077-13751-0011-1269: SOON THOUSANDS OF CONVERTS HAD RENTED OR PURCHASED HOMES IN MISSOURI INDEPENDENCE JACKSON COUNTY BEING THEIR (CENTER->CENTRE) BUT FROM THE FIRST THEY WERE UNPOPULAR AMONG THE (MISSOURIANS->MISSOURIENS) +4077-13751-0012-1270: THE LIEUTENANT GOVERNOR (LILBURN->LITTLE BURN) W BOGGS AFTERWARD GOVERNOR WAS A PRONOUNCED MORMON HATER AND THROUGHOUT THE PERIOD OF THE TROUBLES HE (MANIFESTED->MANIFEST HIS) SYMPATHY WITH THE PERSECUTORS +4077-13751-0013-1271: THEIR SUFFERINGS HAVE NEVER YET BEEN FITLY CHRONICLED BY HUMAN SCRIBE +4077-13751-0014-1272: MAKING THEIR WAY ACROSS THE RIVER MOST OF THE REFUGEES FOUND SHELTER AMONG THE MORE HOSPITABLE PEOPLE OF CLAY COUNTY AND AFTERWARD ESTABLISHED THEMSELVES IN (CALDWELL->CAULDWELL) COUNTY THEREIN FOUNDING THE CITY OF FAR WEST +4077-13751-0015-1273: A SMALL SETTLEMENT HAD BEEN FOUNDED BY MORMON FAMILIES ON SHOAL CREEK AND HERE ON THE THIRTIETH OF OCTOBER EIGHTEEN THIRTY EIGHT A COMPANY OF TWO HUNDRED AND FORTY FELL UPON THE HAPLESS SETTLERS AND BUTCHERED A SCORE +4077-13751-0016-1274: BE IT SAID TO THE HONOR OF SOME OF THE OFFICERS ENTRUSTED WITH THE TERRIBLE COMMISSION THAT WHEN THEY LEARNED ITS TRUE SIGNIFICANCE THEY (RESIGNED->RESIGN) THEIR AUTHORITY RATHER THAN HAVE ANYTHING TO DO WITH WHAT THEY DESIGNATED A COLD BLOODED BUTCHERY +4077-13751-0017-1275: OH WHAT A RECORD TO READ WHAT A PICTURE TO GAZE UPON HOW AWFUL THE FACT +4077-13751-0018-1276: AMERICAN (SCHOOL BOYS->SCHOOLBOYS) READ WITH EMOTIONS OF HORROR OF THE ALBIGENSES DRIVEN BEATEN AND KILLED WITH A (PAPAL->PEPPEL) LEGATE DIRECTING THE BUTCHERY AND OF THE (VAUDOIS->FAUDOIS) HUNTED AND HOUNDED LIKE BEASTS AS THE EFFECT OF A ROYAL DECREE AND THEY YET SHALL READ IN THE HISTORY OF THEIR OWN COUNTRY OF SCENES AS TERRIBLE AS THESE IN THE EXHIBITION OF INJUSTICE AND INHUMAN HATE +4077-13751-0019-1277: WHO BEGAN THE QUARREL WAS IT THE MORMONS +4077-13751-0020-1278: AS (A SAMPLE->THE SABLE) OF THE PRESS COMMENTS AGAINST THE BRUTALITY OF THE MISSOURIANS I QUOTE A PARAGRAPH FROM THE (QUINCY->QUINCEY) ARGUS MARCH SIXTEENTH EIGHTEEN THIRTY NINE +4077-13751-0021-1279: IT WILL BE OBSERVED THAT AN ORGANIZED MOB AIDED BY MANY OF THE CIVIL AND MILITARY OFFICERS OF MISSOURI WITH GOVERNOR BOGGS AT THEIR HEAD HAVE BEEN THE PROMINENT ACTORS IN THIS BUSINESS INCITED TOO IT APPEARS AGAINST THE MORMONS BY POLITICAL HATRED AND BY THE ADDITIONAL MOTIVES OF PLUNDER AND REVENGE +4077-13754-0000-1241: THE ARMY FOUND THE PEOPLE IN POVERTY AND LEFT THEM IN COMPARATIVE WEALTH +4077-13754-0001-1242: BUT A WORD FURTHER CONCERNING THE EXPEDITION IN GENERAL +4077-13754-0002-1243: IT WAS THROUGH FLOYD'S ADVICE THAT (BUCHANAN ORDERED->YOU CANNOT ORDER) THE MILITARY EXPEDITION TO UTAH OSTENSIBLY TO INSTALL CERTAIN FEDERAL OFFICIALS AND TO REPRESS AN ALLEGED INFANTILE REBELLION WHICH IN FACT HAD NEVER COME INTO EXISTENCE BUT IN REALITY TO FURTHER THE (INTERESTS->ENTRANCE) OF THE SECESSIONISTS +4077-13754-0003-1244: MOREOVER HAD THE PEOPLE BEEN INCLINED TO REBELLION WHAT (GREATER->GREAT) OPPORTUNITY COULD THEY HAVE WISHED +4077-13754-0004-1245: ALREADY A NORTH AND (A->THE) SOUTH WERE TALKED OF WHY NOT SET UP ALSO (A->*) WEST +4077-13754-0005-1246: THEY KNEW NO NORTH NO SOUTH NO EAST NO WEST THEY STOOD POSITIVELY BY THE CONSTITUTION AND WOULD HAVE NOTHING TO DO IN THE BLOODY STRIFE BETWEEN BROTHERS UNLESS INDEED THEY WERE SUMMONED BY THE AUTHORITY TO WHICH THEY HAD ALREADY ONCE LOYALLY RESPONDED TO FURNISH MEN (AND->IN) ARMS FOR (THEIR->THE) COUNTRY'S NEED +4077-13754-0006-1247: WHAT THE LATTER DAY (SAINTS->SAYS) CALL CELESTIAL MARRIAGE IS CHARACTERISTIC OF THE CHURCH AND IS IN VERY GENERAL (PRACTISE->PRACTICE) BUT OF CELESTIAL MARRIAGE PLURALITY OF WIVES WAS AN INCIDENT NEVER AN ESSENTIAL +4077-13754-0007-1248: WE BELIEVE IN A LITERAL RESURRECTION AND AN ACTUAL HEREAFTER IN WHICH FUTURE (STATE->STATES) SHALL BE RECOGNIZED EVERY SANCTIFIED AND AUTHORIZED RELATIONSHIP EXISTING HERE ON EARTH OF PARENT AND CHILD (BROTHER AND->BRETHREN) SISTER HUSBAND AND WIFE +4077-13754-0008-1249: IT HAS BEEN MY PRIVILEGE TO TREAD THE SOIL OF MANY LANDS TO OBSERVE THE CUSTOMS AND STUDY THE HABITS OF MORE NATIONS THAN ONE AND I HAVE YET (TO FIND->DEFINED) THE PLACE AND MEET THE PEOPLE WHERE AND WITH WHOM THE PURITY OF MAN AND WOMAN IS HELD MORE PRECIOUS THAN AMONG THE MALIGNED MORMONS IN THE MOUNTAIN VALLEYS OF THE WEST +4077-13754-0009-1250: AT THE INCEPTION OF (PLURAL->PEARL) MARRIAGE AMONG THE LATTER DAY SAINTS THERE WAS NO LAW NATIONAL OR STATE AGAINST ITS (PRACTISE->PRACTICE) +4077-13754-0010-1251: IN EIGHTEEN SIXTY TWO A LAW WAS ENACTED WITH (THE->A) PURPOSE OF SUPPRESSING (PLURAL->POOR) MARRIAGE AND AS HAD BEEN PREDICTED IN THE NATIONAL SENATE PRIOR TO ITS PASSAGE IT LAY FOR MANY YEARS A DEAD LETTER +4077-13754-0011-1252: FEDERAL JUDGES AND UNITED STATES ATTORNEYS IN (UTAH->NEW TOP) WHO WERE NOT (MORMONS->MORE MEN'S) NOR LOVERS OF MORMONISM REFUSED TO ENTERTAIN COMPLAINTS OR PROSECUTE CASES UNDER THE LAW BECAUSE OF ITS MANIFEST INJUSTICE AND INADEQUACY +4077-13754-0012-1253: THIS MEANT THAT FOR AN ALLEGED (MISDEMEANOR->MISDEMEANOUR) FOR WHICH CONGRESS PRESCRIBED A MAXIMUM PENALTY OF SIX MONTHS IMPRISONMENT AND A FINE OF THREE HUNDRED DOLLARS A MAN MIGHT BE IMPRISONED FOR LIFE (AYE->I) FOR MANY TERMS OF A MAN'S NATURAL LIFE DID THE COURT'S POWER TO ENFORCE ITS SENTENCES EXTEND SO FAR AND MIGHT BE FINED MILLIONS OF DOLLARS +4077-13754-0013-1254: BEFORE THIS (TRAVESTY->TRAVASTY) ON THE ADMINISTRATION OF LAW COULD BE BROUGHT BEFORE THE COURT OF LAST RESORT AND THERE (MEET->MET) WITH THE REVERSAL AND REBUKE IT DESERVED MEN WERE IMPRISONED UNDER (SENTENCES->SENTENCE) OF MANY YEARS DURATION +4077-13754-0014-1255: THE PEOPLE CONTESTED THESE MEASURES ONE BY ONE IN THE COURTS PRESENTING IN CASE AFTER CASE THE DIFFERENT PHASES OF THE SUBJECT AND URGING THE UNCONSTITUTIONALITY OF THE MEASURE +4077-13754-0015-1256: THEN THE CHURCH WAS DISINCORPORATED AND ITS PROPERTY BOTH REAL AND PERSONAL CONFISCATED AND (ESCHEATED->ISTIATED) TO THE GOVERNMENT OF THE UNITED STATES AND ALTHOUGH THE PERSONAL PROPERTY WAS SOON RESTORED REAL ESTATE OF GREAT VALUE LONG LAY IN THE HANDS OF THE (COURT'S->COURTS) RECEIVER AND THE MORMON CHURCH HAD TO PAY THE NATIONAL GOVERNMENT HIGH RENTAL ON ITS OWN PROPERTY +4077-13754-0016-1257: AND SO THE STORY OF MORMONISM RUNS ON ITS FINALE HAS NOT YET BEEN WRITTEN THE CURRENT PRESS PRESENTS CONTINUOUSLY NEW STAGES OF ITS PROGRESS NEW DEVELOPMENTS OF ITS PLAN +4446-2271-0000-1133: (MAINHALL->MAIN HALL) LIKED ALEXANDER BECAUSE HE WAS AN ENGINEER +4446-2271-0001-1134: (HE HAD->WE NOT) PRECONCEIVED IDEAS ABOUT EVERYTHING AND HIS IDEA ABOUT AMERICANS WAS THAT THEY SHOULD BE ENGINEERS OR MECHANICS +4446-2271-0002-1135: (IT'S->ITS) TREMENDOUSLY WELL PUT ON TOO +4446-2271-0003-1136: IT'S BEEN ON ONLY TWO WEEKS AND I'VE BEEN HALF A DOZEN TIMES ALREADY +4446-2271-0004-1137: DO YOU KNOW ALEXANDER (MAINHALL->MAIN HALL) LOOKED WITH PERPLEXITY UP INTO THE TOP OF THE HANSOM AND RUBBED HIS PINK CHEEK WITH HIS GLOVED FINGER DO YOU KNOW I SOMETIMES THINK OF TAKING TO CRITICISM SERIOUSLY MYSELF +4446-2271-0005-1138: SHE SAVES HER HAND TOO (SHE'S AT->SHE SAID) HER BEST IN THE SECOND ACT +4446-2271-0006-1139: HE'S BEEN WANTING TO MARRY HILDA THESE THREE YEARS AND MORE +4446-2271-0007-1140: SHE DOESN'T TAKE UP WITH ANYBODY YOU KNOW +4446-2271-0008-1141: IRENE (BURGOYNE->WERE GOING) ONE OF HER FAMILY TOLD ME IN CONFIDENCE THAT THERE WAS A ROMANCE SOMEWHERE BACK IN THE BEGINNING +4446-2271-0009-1142: (MAINHALL VOUCHED->MEANHAVED) FOR HER CONSTANCY WITH A LOFTINESS THAT MADE ALEXANDER SMILE EVEN WHILE A KIND OF RAPID EXCITEMENT WAS TINGLING THROUGH HIM +4446-2271-0010-1143: HE'S ANOTHER WHO'S AWFULLY KEEN ABOUT HER LET ME INTRODUCE YOU +4446-2271-0011-1144: SIR HARRY (TOWNE->TOWN) MISTER BARTLEY ALEXANDER THE AMERICAN ENGINEER +4446-2271-0012-1145: I SAY SIR HARRY THE LITTLE (GIRL'S->GIRLS) GOING FAMOUSLY TO NIGHT ISN'T SHE +4446-2271-0013-1146: (DO->*) YOU KNOW I THOUGHT THE DANCE (A BIT->OF GOOD) CONSCIOUS TO NIGHT FOR THE FIRST TIME +4446-2271-0014-1147: (WESTMERE->WESTMARE) AND I WERE BACK AFTER THE FIRST ACT AND WE THOUGHT SHE SEEMED QUITE UNCERTAIN OF HERSELF +4446-2271-0015-1148: A LITTLE ATTACK OF NERVES POSSIBLY +4446-2271-0016-1149: HE WAS BEGINNING TO FEEL (A->THE) KEEN INTEREST IN THE SLENDER BAREFOOT DONKEY GIRL WHO SLIPPED IN AND OUT OF THE PLAY SINGING LIKE SOME ONE WINDING THROUGH A HILLY FIELD +4446-2271-0017-1150: ONE NIGHT WHEN HE AND WINIFRED WERE SITTING TOGETHER ON THE BRIDGE HE TOLD HER (THAT->THE) THINGS HAD HAPPENED WHILE HE WAS STUDYING ABROAD THAT HE WAS SORRY FOR ONE THING IN PARTICULAR AND HE ASKED HER WHETHER SHE THOUGHT SHE OUGHT TO KNOW ABOUT THEM +4446-2271-0018-1151: SHE CONSIDERED (*->FOR) A MOMENT AND THEN SAID NO I THINK NOT (THOUGH->THE WAY) I AM GLAD YOU ASK ME +4446-2271-0019-1152: AFTER THAT IT WAS EASY TO FORGET ACTUALLY TO FORGET +4446-2271-0020-1153: OF COURSE HE REFLECTED SHE ALWAYS HAD THAT COMBINATION OF SOMETHING HOMELY AND SENSIBLE AND SOMETHING UTTERLY WILD AND DAFT +4446-2271-0021-1154: SHE MUST CARE ABOUT THE THEATRE A GREAT DEAL MORE THAN SHE USED TO +4446-2271-0022-1155: I'M GLAD SHE'S HELD HER OWN (SINCE->SEN) +4446-2271-0023-1156: AFTER ALL WE WERE AWFULLY YOUNG +4446-2271-0024-1157: I SHOULDN'T WONDER IF SHE COULD LAUGH ABOUT IT WITH ME NOW +4446-2273-0000-1158: HILDA WAS VERY NICE TO HIM AND HE SAT ON THE EDGE OF HIS CHAIR FLUSHED WITH HIS CONVERSATIONAL EFFORTS AND MOVING HIS CHIN ABOUT NERVOUSLY OVER HIS HIGH COLLAR +4446-2273-0001-1159: THEY ASKED HIM TO COME TO SEE THEM IN CHELSEA AND THEY SPOKE VERY TENDERLY OF HILDA +4446-2273-0002-1160: LAMB WOULDN'T CARE A GREAT DEAL ABOUT MANY OF THEM I FANCY +4446-2273-0003-1161: WHEN BARTLEY ARRIVED AT BEDFORD SQUARE ON SUNDAY EVENING MARIE THE PRETTY LITTLE FRENCH GIRL MET HIM AT THE DOOR AND CONDUCTED HIM UPSTAIRS +4446-2273-0004-1162: I SHOULD NEVER HAVE ASKED YOU IF MOLLY HAD BEEN HERE FOR I REMEMBER YOU DON'T LIKE ENGLISH COOKERY +4446-2273-0005-1163: I HAVEN'T HAD A CHANCE YET TO TELL YOU WHAT A JOLLY LITTLE PLACE I THINK THIS IS +4446-2273-0006-1164: THEY ARE ALL SKETCHES MADE ABOUT THE (VILLA D'ESTE->VILIDESSEA) YOU SEE +4446-2273-0007-1165: THOSE FELLOWS ARE ALL VERY LOYAL EVEN (MAINHALL->MAIN HALL) +4446-2273-0008-1166: I'VE MANAGED TO SAVE SOMETHING EVERY YEAR AND THAT WITH HELPING MY THREE SISTERS NOW AND THEN AND TIDING POOR COUSIN MIKE OVER BAD SEASONS +4446-2273-0009-1167: IT'S NOT PARTICULARLY RARE SHE SAID BUT SOME OF IT WAS MY MOTHER'S +4446-2273-0010-1168: THERE WAS WATERCRESS SOUP AND SOLE AND A DELIGHTFUL OMELETTE STUFFED WITH MUSHROOMS AND TRUFFLES AND TWO SMALL RARE DUCKLINGS AND ARTICHOKES AND A DRY YELLOW RHONE WINE OF WHICH BARTLEY HAD ALWAYS BEEN VERY FOND +4446-2273-0011-1169: THERE IS NOTHING ELSE THAT LOOKS SO JOLLY +4446-2273-0012-1170: THANK YOU BUT I DON'T LIKE IT SO WELL AS THIS +4446-2273-0013-1171: HAVE YOU BEEN IN PARIS MUCH THESE LATE YEARS +4446-2273-0014-1172: THERE ARE (*->A) FEW CHANGES IN THE OLD QUARTER +4446-2273-0015-1173: DON'T I THOUGH I'M SO SORRY TO HEAR IT HOW DID HER SON TURN OUT +4446-2273-0016-1174: HER HAIR IS STILL LIKE FLAX AND HER BLUE EYES ARE JUST LIKE A BABY'S AND SHE HAS THE SAME THREE FRECKLES ON HER LITTLE NOSE AND TALKS ABOUT GOING BACK TO HER (BAINS DE MER->BANDOMERE) +4446-2273-0017-1175: HOW JOLLY IT WAS BEING YOUNG HILDA +4446-2273-0018-1176: DO YOU REMEMBER THAT FIRST WALK WE TOOK TOGETHER IN PARIS +4446-2273-0019-1177: COME WE'LL HAVE OUR COFFEE IN THE OTHER ROOM AND YOU CAN SMOKE +4446-2273-0020-1178: I THINK WE DID SHE ANSWERED DEMURELY +4446-2273-0021-1179: WHAT SHE WANTED FROM US WAS NEITHER OUR FLOWERS NOR OUR (FRANCS->FRANKS) BUT JUST OUR YOUTH +4446-2273-0022-1180: THEY WERE BOTH REMEMBERING WHAT THE WOMAN HAD SAID WHEN SHE TOOK THE MONEY GOD GIVE YOU A HAPPY LOVE +4446-2273-0023-1181: THE STRANGE WOMAN AND HER PASSIONATE SENTENCE THAT RANG OUT SO SHARPLY HAD FRIGHTENED THEM BOTH +4446-2273-0024-1182: BARTLEY STARTED WHEN HILDA RANG THE LITTLE BELL BESIDE HER DEAR ME WHY DID YOU DO THAT +4446-2273-0025-1183: IT WAS VERY JOLLY HE MURMURED LAZILY AS MARIE CAME IN TO TAKE AWAY THE COFFEE +4446-2273-0026-1184: HAVE I TOLD YOU ABOUT MY NEW PLAY +4446-2273-0027-1185: WHEN SHE FINISHED ALEXANDER SHOOK HIMSELF OUT OF A REVERIE +4446-2273-0028-1186: NONSENSE OF COURSE I CAN'T REALLY SING EXCEPT THE WAY MY MOTHER AND GRANDMOTHER DID BEFORE ME +4446-2273-0029-1187: IT'S REALLY TOO WARM IN THIS ROOM TO SING DON'T YOU FEEL IT +4446-2273-0030-1188: ALEXANDER WENT OVER AND OPENED THE WINDOW FOR HER +4446-2273-0031-1189: THERE JUST IN FRONT +4446-2273-0032-1190: HE STOOD A LITTLE BEHIND HER AND TRIED TO STEADY HIMSELF AS HE SAID IT'S SOFT AND MISTY SEE HOW WHITE THE STARS ARE +4446-2273-0033-1191: FOR A LONG TIME NEITHER HILDA NOR BARTLEY SPOKE +4446-2273-0034-1192: HE FELT A TREMOR RUN THROUGH THE SLENDER YELLOW FIGURE IN FRONT OF HIM +4446-2273-0035-1193: BARTLEY LEANED OVER HER SHOULDER WITHOUT TOUCHING HER AND WHISPERED IN HER EAR YOU ARE GIVING ME A CHANCE YES +4446-2273-0036-1194: ALEXANDER (UNCLENCHED->CLENCHED) THE TWO HANDS AT HIS SIDES +4446-2275-0000-1195: THE STOP AT QUEENSTOWN THE TEDIOUS PASSAGE (UP->OF) THE (MERSEY->MERCY) WERE THINGS THAT HE NOTED DIMLY THROUGH HIS GROWING IMPATIENCE +4446-2275-0001-1196: SHE BLUSHED AND SMILED AND FUMBLED HIS CARD IN HER CONFUSION BEFORE SHE RAN UPSTAIRS +4446-2275-0002-1197: ALEXANDER PACED UP AND DOWN THE HALLWAY BUTTONING AND UNBUTTONING HIS OVERCOAT UNTIL SHE RETURNED AND TOOK HIM UP TO HILDA'S LIVING ROOM +4446-2275-0003-1198: THE ROOM WAS EMPTY WHEN HE ENTERED +4446-2275-0004-1199: ALEXANDER DID NOT SIT DOWN +4446-2275-0005-1200: I FELT IT IN MY BONES WHEN I WOKE THIS MORNING THAT SOMETHING SPLENDID WAS GOING TO TURN UP +4446-2275-0006-1201: I THOUGHT IT MIGHT BE SISTER KATE OR COUSIN MIKE WOULD BE HAPPENING ALONG +4446-2275-0007-1202: SHE PUSHED HIM TOWARD THE BIG CHAIR BY THE FIRE AND SAT DOWN ON A STOOL AT THE OPPOSITE SIDE OF THE HEARTH HER KNEES DRAWN UP TO HER CHIN LAUGHING LIKE A HAPPY LITTLE GIRL +4446-2275-0008-1203: WHEN DID YOU COME BARTLEY AND HOW DID IT HAPPEN YOU HAVEN'T SPOKEN A WORD +4446-2275-0009-1204: I GOT IN ABOUT TEN MINUTES AGO +4446-2275-0010-1205: ALEXANDER LEANED FORWARD AND WARMED HIS HANDS BEFORE THE BLAZE +4446-2275-0011-1206: BARTLEY BENT (LOWER->LOWERED) OVER THE FIRE +4446-2275-0012-1207: SHE LOOKED AT HIS HEAVY SHOULDERS AND BIG DETERMINED HEAD THRUST FORWARD LIKE A CATAPULT IN LEASH +4446-2275-0013-1208: I'LL DO ANYTHING YOU WISH ME TO BARTLEY SHE SAID TREMULOUSLY +4446-2275-0014-1209: I CAN'T STAND SEEING YOU MISERABLE +4446-2275-0015-1210: HE PULLED UP A WINDOW AS IF THE AIR WERE HEAVY +4446-2275-0016-1211: HILDA WATCHED HIM FROM (HER->THE) CORNER TREMBLING AND SCARCELY BREATHING DARK SHADOWS GROWING ABOUT HER EYES IT +4446-2275-0017-1212: BUT IT'S WORSE NOW IT'S UNBEARABLE +4446-2275-0018-1213: I GET NOTHING BUT MISERY OUT OF EITHER +4446-2275-0019-1214: THE WORLD IS ALL THERE JUST AS IT USED TO BE BUT I CAN'T GET AT IT ANY MORE +4446-2275-0020-1215: IT WAS MYSELF I WAS DEFYING HILDA +4446-2275-0021-1216: (HILDA'S->HELDA'S) FACE QUIVERED BUT SHE WHISPERED YES I THINK IT MUST HAVE BEEN +4446-2275-0022-1217: BUT WHY DIDN'T YOU TELL ME WHEN YOU WERE HERE IN THE SUMMER +4446-2275-0023-1218: ALEXANDER GROANED I MEANT TO BUT SOMEHOW I COULDN'T +4446-2275-0024-1219: SHE PRESSED HIS HAND GENTLY IN GRATITUDE +4446-2275-0025-1220: WEREN'T YOU HAPPY THEN AT ALL +4446-2275-0026-1221: SHE CLOSED HER EYES AND TOOK A DEEP BREATH AS IF TO DRAW IN AGAIN THE FRAGRANCE OF THOSE DAYS +4446-2275-0027-1222: HE MOVED UNEASILY AND HIS CHAIR CREAKED +4446-2275-0028-1223: YES YES SHE HURRIED PULLING HER HAND GENTLY AWAY FROM HIM +4446-2275-0029-1224: PLEASE TELL ME ONE THING BARTLEY AT LEAST TELL ME THAT YOU BELIEVE I THOUGHT I WAS MAKING YOU HAPPY +4446-2275-0030-1225: YES (HILDA->HELDA) I KNOW THAT HE SAID SIMPLY +4446-2275-0031-1226: I UNDERSTAND BARTLEY I WAS WRONG +4446-2275-0032-1227: BUT I DIDN'T KNOW YOU'VE ONLY TO TELL ME NOW +4446-2275-0033-1228: WHAT I MEAN IS THAT I WANT YOU TO PROMISE NEVER TO SEE ME AGAIN NO MATTER HOW OFTEN I COME NO MATTER HOW HARD I BEG +4446-2275-0034-1229: KEEP AWAY IF YOU WISH WHEN HAVE I EVER FOLLOWED YOU +4446-2275-0035-1230: ALEXANDER ROSE AND SHOOK HIMSELF ANGRILY YES I KNOW I'M COWARDLY +4446-2275-0036-1231: HE TOOK (HER->A) ROUGHLY IN HIS ARMS DO YOU KNOW WHAT I MEAN +4446-2275-0037-1232: OH BARTLEY WHAT AM I TO DO +4446-2275-0038-1233: I WILL ASK THE LEAST IMAGINABLE BUT I MUST HAVE SOMETHING +4446-2275-0039-1234: I MUST KNOW ABOUT YOU +4446-2275-0040-1235: THE SIGHT OF YOU BARTLEY TO SEE YOU LIVING AND HAPPY AND SUCCESSFUL CAN I NEVER MAKE YOU UNDERSTAND WHAT THAT MEANS TO ME +4446-2275-0041-1236: YOU SEE LOVING SOME ONE AS I LOVE YOU MAKES THE WHOLE WORLD DIFFERENT +4446-2275-0042-1237: AND THEN YOU CAME BACK NOT CARING VERY MUCH BUT IT MADE NO DIFFERENCE +4446-2275-0043-1238: BARTLEY BENT OVER AND TOOK HER IN HIS ARMS KISSING HER MOUTH AND HER WET TIRED EYES +4446-2275-0044-1239: (*->I) DON'T CRY DON'T CRY HE WHISPERED +4446-2275-0045-1240: WE'VE TORTURED EACH OTHER ENOUGH FOR (TONIGHT->TO NIGHT) +4507-16021-0000-1469: CHAPTER ONE ORIGIN +4507-16021-0001-1470: IT ENGENDERS A WHOLE WORLD LA (PEGRE->PEG) FOR WHICH (READ->RED) THEFT AND A HELL LA (PEGRENNE->PEGRIN) FOR WHICH (READ->RED) HUNGER +4507-16021-0002-1471: THUS IDLENESS IS THE MOTHER +4507-16021-0003-1472: SHE HAS A SON THEFT AND A DAUGHTER HUNGER +4507-16021-0004-1473: WHAT IS SLANG +4507-16021-0005-1474: WE HAVE NEVER UNDERSTOOD THIS SORT OF OBJECTIONS +4507-16021-0006-1475: SLANG IS ODIOUS +4507-16021-0007-1476: SLANG MAKES ONE SHUDDER +4507-16021-0008-1477: WHO DENIES THAT OF COURSE IT DOES +4507-16021-0009-1478: WHEN IT IS A QUESTION OF PROBING A WOUND A GULF A SOCIETY SINCE (WHEN->ONE) HAS IT BEEN CONSIDERED WRONG TO GO TOO FAR TO GO TO THE BOTTOM +4507-16021-0010-1479: WE HAVE ALWAYS THOUGHT THAT IT WAS SOMETIMES A COURAGEOUS ACT AND AT LEAST A SIMPLE AND USEFUL DEED WORTHY OF THE SYMPATHETIC ATTENTION WHICH DUTY ACCEPTED (AND->IN) FULFILLED MERITS +4507-16021-0011-1480: WHY SHOULD ONE NOT EXPLORE EVERYTHING AND STUDY EVERYTHING +4507-16021-0012-1481: WHY SHOULD ONE HALT ON THE WAY +4507-16021-0013-1482: NOTHING IS MORE LUGUBRIOUS THAN THE CONTEMPLATION THUS IN ITS NUDITY IN THE BROAD LIGHT OF THOUGHT OF THE HORRIBLE SWARMING OF SLANG +4507-16021-0014-1483: (NOW->NO) WHEN HAS HORROR EVER EXCLUDED STUDY +4507-16021-0015-1484: SINCE WHEN HAS MALADY BANISHED MEDICINE +4507-16021-0016-1485: CAN ONE IMAGINE A NATURALIST REFUSING TO STUDY THE VIPER THE BAT THE SCORPION THE CENTIPEDE THE (TARANTULA->TERENTIAL) AND ONE WHO WOULD CAST THEM BACK INTO THEIR DARKNESS SAYING OH HOW UGLY THAT IS +4507-16021-0017-1486: HE WOULD BE LIKE A PHILOLOGIST REFUSING TO EXAMINE A FACT IN LANGUAGE A PHILOSOPHER HESITATING TO SCRUTINIZE A FACT IN HUMANITY +4507-16021-0018-1487: WHAT IS SLANG PROPERLY SPEAKING +4507-16021-0019-1488: IT IS THE LANGUAGE OF WRETCHEDNESS +4507-16021-0020-1489: WE MAY BE STOPPED THE FACT MAY BE PUT TO US IN GENERAL TERMS WHICH IS ONE WAY OF ATTENUATING IT WE MAY BE TOLD THAT ALL TRADES PROFESSIONS IT MAY BE ADDED ALL THE ACCIDENTS OF THE SOCIAL HIERARCHY AND ALL FORMS OF INTELLIGENCE HAVE THEIR OWN SLANG +4507-16021-0021-1490: THE PAINTER WHO SAYS MY GRINDER THE NOTARY WHO SAYS MY SKIP THE GUTTER THE (HAIRDRESSER->HAIR DRESSER) WHO SAYS MY (MEALYBACK->MEALLY BACK) THE COBBLER WHO SAYS MY CUB TALKS SLANG +4507-16021-0022-1491: THERE IS THE SLANG OF THE AFFECTED LADY AS WELL AS OF THE (PRECIEUSES->PURSUS) +4507-16021-0023-1492: THE SUGAR MANUFACTURER WHO SAYS LOAF CLARIFIED LUMPS BASTARD COMMON BURNT THIS HONEST MANUFACTURER TALKS SLANG +4507-16021-0024-1493: ALGEBRA MEDICINE (BOTANY->BARTANY) HAVE EACH THEIR SLANG +4507-16021-0025-1494: TO MEET THE NEEDS OF THIS CONFLICT WRETCHEDNESS HAS INVENTED A LANGUAGE OF COMBAT WHICH IS SLANG +4507-16021-0026-1495: TO KEEP AFLOAT AND TO RESCUE FROM OBLIVION TO HOLD ABOVE THE GULF WERE IT BUT A FRAGMENT OF SOME LANGUAGE WHICH MAN HAS SPOKEN AND WHICH WOULD OTHERWISE BE LOST THAT IS TO SAY ONE OF THE ELEMENTS GOOD OR BAD OF WHICH CIVILIZATION IS COMPOSED OR BY WHICH IT IS COMPLICATED TO EXTEND THE RECORDS OF SOCIAL OBSERVATION IS TO SERVE CIVILIZATION ITSELF +4507-16021-0027-1496: PHOENICIAN VERY GOOD +4507-16021-0028-1497: EVEN DIALECT LET THAT PASS +4507-16021-0029-1498: TO THIS WE REPLY IN ONE WORD ONLY +4507-16021-0030-1499: ASSUREDLY IF THE TONGUE WHICH A NATION OR A PROVINCE HAS SPOKEN IS WORTHY OF INTEREST THE LANGUAGE WHICH HAS BEEN SPOKEN BY A MISERY IS STILL MORE WORTHY OF ATTENTION AND STUDY +4507-16021-0031-1500: AND THEN WE INSIST UPON IT THE STUDY OF SOCIAL DEFORMITIES AND INFIRMITIES AND THE TASK OF POINTING THEM OUT WITH A VIEW TO REMEDY IS NOT A BUSINESS IN WHICH CHOICE IS PERMITTED +4507-16021-0032-1501: HE MUST DESCEND WITH HIS HEART FULL OF CHARITY AND SEVERITY AT THE SAME TIME AS A BROTHER AND AS (A->HE) JUDGE TO THOSE IMPENETRABLE CASEMATES (WHERE->WERE) CRAWL PELL MELL THOSE WHO BLEED AND THOSE WHO DEAL THE BLOW THOSE WHO WEEP AND THOSE WHO CURSE THOSE WHO FAST (AND->IN) THOSE WHO DEVOUR THOSE WHO ENDURE EVIL AND THOSE WHO INFLICT IT +4507-16021-0033-1502: DO WE REALLY KNOW THE MOUNTAIN WELL WHEN WE ARE NOT ACQUAINTED WITH THE CAVERN +4507-16021-0034-1503: THEY CONSTITUTE TWO DIFFERENT ORDERS OF FACTS WHICH CORRESPOND TO EACH OTHER WHICH ARE ALWAYS INTERLACED AND WHICH OFTEN BRING FORTH RESULTS +4507-16021-0035-1504: TRUE HISTORY BEING A MIXTURE OF ALL THINGS THE TRUE HISTORIAN MINGLES IN EVERYTHING +4507-16021-0036-1505: FACTS FORM ONE OF THESE AND IDEAS THE OTHER +4507-16021-0037-1506: THERE IT CLOTHES ITSELF IN WORD MASKS IN METAPHOR RAGS +4507-16021-0038-1507: IN THIS (GUISE->SKIES) IT BECOMES HORRIBLE +4507-16021-0039-1508: ONE PERCEIVES WITHOUT UNDERSTANDING IT A HIDEOUS MURMUR SOUNDING ALMOST LIKE HUMAN ACCENTS BUT MORE NEARLY RESEMBLING A HOWL THAN AN ARTICULATE WORD +4507-16021-0040-1509: ONE THINKS ONE HEARS (HYDRAS->HYDRAST) TALKING +4507-16021-0041-1510: IT IS UNINTELLIGIBLE IN THE DARK +4507-16021-0042-1511: IT IS BLACK IN MISFORTUNE IT IS BLACKER STILL (IN->AND) CRIME THESE TWO BLACKNESSES AMALGAMATED (COMPOSE->COMPOSED) SLANG +4507-16021-0043-1512: THE EARTH IS NOT DEVOID OF RESEMBLANCE TO A JAIL +4507-16021-0044-1513: LOOK CLOSELY AT LIFE +4507-16021-0045-1514: IT IS SO MADE THAT EVERYWHERE WE FEEL THE SENSE OF PUNISHMENT +4507-16021-0046-1515: EACH DAY HAS ITS OWN GREAT GRIEF (OR->FOR) ITS LITTLE CARE +4507-16021-0047-1516: YESTERDAY YOU WERE TREMBLING FOR A HEALTH THAT IS DEAR TO YOU TO DAY YOU FEAR FOR YOUR OWN TO MORROW IT WILL BE ANXIETY ABOUT MONEY THE DAY AFTER TO MORROW THE (DIATRIBE->DIETRIBE) OF A SLANDERER THE DAY AFTER THAT THE MISFORTUNE OF SOME FRIEND THEN THE PREVAILING WEATHER THEN SOMETHING THAT HAS BEEN BROKEN OR LOST THEN A PLEASURE WITH WHICH YOUR CONSCIENCE AND YOUR VERTEBRAL COLUMN REPROACH YOU AGAIN THE COURSE OF PUBLIC AFFAIRS +4507-16021-0048-1517: THIS WITHOUT RECKONING IN THE PAINS OF THE HEART AND SO (IT->TO) GOES ON +4507-16021-0049-1518: THERE IS HARDLY ONE DAY OUT OF A HUNDRED WHICH IS WHOLLY JOYOUS AND SUNNY +4507-16021-0050-1519: AND YOU BELONG TO THAT SMALL CLASS WHO ARE (*->A) HAPPY +4507-16021-0051-1520: IN THIS WORLD EVIDENTLY THE VESTIBULE OF ANOTHER THERE ARE NO FORTUNATE +4507-16021-0052-1521: THE REAL HUMAN DIVISION IS THIS THE LUMINOUS AND THE SHADY +4507-16021-0053-1522: TO DIMINISH THE NUMBER OF THE SHADY TO AUGMENT THE NUMBER OF THE LUMINOUS THAT IS THE OBJECT +4507-16021-0054-1523: THAT IS WHY WE CRY EDUCATION SCIENCE +4507-16021-0055-1524: TO TEACH READING MEANS TO (LIGHT->WRITE) THE FIRE EVERY SYLLABLE SPELLED OUT SPARKLES +4507-16021-0056-1525: HOWEVER HE WHO SAYS LIGHT DOES NOT NECESSARILY SAY JOY +4507-16021-0057-1526: PEOPLE SUFFER IN THE LIGHT EXCESS BURNS +4507-16021-0058-1527: THE FLAME IS THE ENEMY OF THE WING +4507-16021-0059-1528: TO BURN WITHOUT CEASING TO FLY THEREIN LIES THE MARVEL OF GENIUS +4970-29093-0000-2093: YOU'LL NEVER DIG IT OUT OF THE (ASTOR->ASTRO) LIBRARY +4970-29093-0001-2094: TO THE YOUNG AMERICAN HERE OR ELSEWHERE THE PATHS TO FORTUNE ARE INNUMERABLE AND ALL OPEN THERE IS INVITATION IN THE AIR AND SUCCESS IN ALL HIS WIDE HORIZON +4970-29093-0002-2095: HE HAS NO TRADITIONS TO BIND HIM OR GUIDE HIM AND HIS IMPULSE IS TO BREAK AWAY FROM THE OCCUPATION HIS FATHER HAS FOLLOWED AND MAKE A NEW WAY FOR HIMSELF +4970-29093-0003-2096: THE MODEST FELLOW WOULD HAVE LIKED FAME THRUST UPON HIM FOR SOME WORTHY ACHIEVEMENT IT MIGHT BE FOR A BOOK OR FOR THE (SKILLFUL->SKILFUL) MANAGEMENT OF SOME GREAT NEWSPAPER OR FOR SOME DARING EXPEDITION LIKE THAT OF LIEUTENANT STRAIN OR DOCTOR KANE +4970-29093-0004-2097: HE WAS UNABLE TO DECIDE EXACTLY WHAT IT SHOULD BE +4970-29093-0005-2098: SOMETIMES HE THOUGHT HE WOULD LIKE TO STAND IN A CONSPICUOUS PULPIT AND HUMBLY PREACH THE GOSPEL OF REPENTANCE AND IT EVEN CROSSED HIS MIND THAT IT WOULD BE NOBLE TO GIVE HIMSELF TO A MISSIONARY LIFE TO SOME BENIGHTED REGION WHERE THE DATE PALM (GROWS->GROVES) AND THE NIGHTINGALE'S VOICE IS IN TUNE AND THE (BUL BUL->BULBUL) SINGS ON THE OFF NIGHTS +4970-29093-0006-2099: LAW SEEMED TO HIM WELL ENOUGH AS A SCIENCE BUT HE NEVER COULD DISCOVER A PRACTICAL CASE WHERE IT APPEARED TO HIM WORTH WHILE TO GO TO LAW AND ALL THE CLIENTS WHO STOPPED WITH THIS NEW CLERK (IN->AND) THE ANTE ROOM OF THE LAW OFFICE WHERE HE WAS WRITING PHILIP INVARIABLY ADVISED TO SETTLE NO MATTER HOW BUT (SETTLE->SETTLED) GREATLY TO THE DISGUST OF HIS EMPLOYER WHO KNEW THAT JUSTICE BETWEEN MAN AND MAN COULD ONLY BE ATTAINED BY THE RECOGNIZED PROCESSES WITH THE ATTENDANT (FEES->BEES) +4970-29093-0007-2100: IT IS SUCH A NOBLE AMBITION THAT IT IS A PITY IT HAS USUALLY SUCH A SHALLOW FOUNDATION +4970-29093-0008-2101: HE WANTED TO BEGIN AT THE TOP OF THE LADDER +4970-29093-0009-2102: PHILIP THEREFORE READ DILIGENTLY IN THE ASTOR LIBRARY PLANNED LITERARY WORKS THAT SHOULD COMPEL ATTENTION AND NURSED HIS GENIUS +4970-29093-0010-2103: HE HAD NO FRIEND WISE ENOUGH TO TELL HIM TO STEP INTO THE DORKING CONVENTION (THEN->THAN) IN SESSION MAKE A SKETCH OF THE MEN AND WOMEN ON THE PLATFORM AND TAKE IT TO THE EDITOR OF THE DAILY (GRAPEVINE->GRAPE VINE) AND SEE WHAT HE COULD GET A LINE FOR IT +4970-29093-0011-2104: (O->OH) VERY WELL SAID (GRINGO->GREENOW) TURNING AWAY WITH A SHADE OF CONTEMPT YOU'LL FIND IF YOU ARE GOING INTO LITERATURE AND NEWSPAPER WORK THAT YOU CAN'T AFFORD A CONSCIENCE LIKE THAT +4970-29093-0012-2105: BUT PHILIP DID AFFORD IT AND HE WROTE (THANKING->THINKING) HIS FRIENDS AND DECLINING BECAUSE HE SAID THE POLITICAL SCHEME WOULD FAIL AND OUGHT TO FAIL +4970-29093-0013-2106: AND HE WENT BACK TO HIS BOOKS AND TO HIS WAITING FOR AN OPENING LARGE ENOUGH FOR HIS DIGNIFIED ENTRANCE INTO THE LITERARY WORLD +4970-29093-0014-2107: WELL I'M GOING AS AN ENGINEER YOU (CAN->COULD) GO AS ONE +4970-29093-0015-2108: YOU CAN BEGIN BY CARRYING A ROD AND PUTTING DOWN THE FIGURES +4970-29093-0016-2109: NO (ITS->IT'S) NOT TOO SOON +4970-29093-0017-2110: I'VE BEEN READY TO GO ANYWHERE FOR SIX MONTHS +4970-29093-0018-2111: THE TWO YOUNG MEN WHO WERE BY THIS TIME FULL OF THE (ADVENTURE->ADVENTURER) WENT DOWN TO THE WALL STREET OFFICE OF HENRY'S UNCLE AND HAD A TALK WITH THAT WILY OPERATOR +4970-29093-0019-2112: THE NIGHT WAS SPENT IN PACKING UP AND WRITING LETTERS FOR PHILIP WOULD NOT TAKE SUCH AN IMPORTANT STEP WITHOUT INFORMING HIS FRIENDS +4970-29093-0020-2113: WHY IT'S (IN->A) MISSOURI SOMEWHERE ON THE FRONTIER I THINK WE'LL GET A MAP +4970-29093-0021-2114: I WAS AFRAID IT WAS NEARER HOME +4970-29093-0022-2115: HE KNEW HIS UNCLE WOULD BE GLAD TO HEAR THAT HE HAD AT LAST TURNED HIS THOUGHTS TO A PRACTICAL MATTER +4970-29093-0023-2116: HE WELL KNEW THE PERILS OF THE FRONTIER THE SAVAGE STATE OF SOCIETY THE LURKING INDIANS AND THE DANGERS OF FEVER +4970-29095-0000-2054: SHE WAS TIRED OF OTHER THINGS +4970-29095-0001-2055: SHE TRIED THIS MORNING AN AIR OR TWO UPON THE PIANO (SANG->SAYING) A SIMPLE SONG IN A SWEET BUT SLIGHTLY METALLIC VOICE AND THEN SEATING HERSELF BY THE OPEN WINDOW READ PHILIP'S LETTER +4970-29095-0002-2056: WELL MOTHER SAID THE YOUNG STUDENT LOOKING UP WITH A SHADE OF IMPATIENCE +4970-29095-0003-2057: I HOPE THEE TOLD THE ELDERS THAT FATHER AND I ARE RESPONSIBLE FOR THE PIANO AND THAT MUCH AS THEE LOVES MUSIC THEE IS NEVER IN THE ROOM WHEN IT IS PLAYED +4970-29095-0004-2058: I HEARD FATHER TELL COUSIN ABNER THAT HE WAS WHIPPED SO OFTEN FOR WHISTLING WHEN HE WAS A BOY THAT HE WAS DETERMINED TO HAVE WHAT COMPENSATION HE COULD GET NOW +4970-29095-0005-2059: THY WAYS GREATLY TRY ME RUTH AND ALL THY RELATIONS +4970-29095-0006-2060: IS THY FATHER WILLING THEE SHOULD GO AWAY TO A SCHOOL OF THE WORLD'S PEOPLE +4970-29095-0007-2061: I HAVE NOT ASKED HIM RUTH REPLIED WITH A LOOK THAT MIGHT IMPLY THAT SHE WAS ONE OF THOSE DETERMINED LITTLE BODIES WHO FIRST MADE UP HER OWN MIND AND THEN COMPELLED OTHERS TO MAKE UP THEIRS IN ACCORDANCE WITH HERS +4970-29095-0008-2062: MOTHER (I'M->I AM) GOING TO STUDY MEDICINE +4970-29095-0009-2063: MARGARET BOLTON ALMOST LOST FOR A MOMENT HER HABITUAL PLACIDITY +4970-29095-0010-2064: (THEE->THE) STUDY MEDICINE +4970-29095-0011-2065: DOES THEE THINK THEE COULD STAND IT SIX MONTHS +4970-29095-0012-2066: AND BESIDES SUPPOSE THEE DOES LEARN MEDICINE +4970-29095-0013-2067: I WILL (PRACTICE->PRACTISE) IT +4970-29095-0014-2068: (WHERE->WHERE'S) THEE AND THY FAMILY ARE KNOWN +4970-29095-0015-2069: IF I CAN GET (PATIENTS->PATIENCE) +4970-29095-0016-2070: RUTH SAT QUITE STILL FOR A TIME WITH FACE (INTENT->AND TENT) AND FLUSHED IT WAS OUT NOW +4970-29095-0017-2071: THE (SIGHT SEERS->SIGHTSEERS) RETURNED IN HIGH SPIRITS FROM THE CITY +4970-29095-0018-2072: RUTH ASKED THE ENTHUSIASTS IF THEY WOULD LIKE TO LIVE IN SUCH A SOUNDING (MAUSOLEUM->MUZZLEEM) WITH ITS GREAT HALLS AND ECHOING ROOMS AND NO COMFORTABLE PLACE IN IT FOR THE ACCOMMODATION OF ANY BODY +4970-29095-0019-2073: AND THEN THERE WAS BROAD STREET +4970-29095-0020-2074: THERE (*->ARE) CERTAINLY WAS NO END TO IT AND EVEN RUTH WAS (PHILADELPHIAN->PHILADELPHIA) ENOUGH TO BELIEVE THAT A STREET OUGHT NOT TO HAVE ANY END OR ARCHITECTURAL (POINT->BLINT) UPON WHICH THE WEARY EYE COULD REST +4970-29095-0021-2075: BUT NEITHER SAINT (GIRARD->GERARD) NOR BROAD STREET NEITHER WONDERS OF THE MINT NOR THE GLORIES OF THE HALL WHERE THE GHOSTS OF OUR FATHERS SIT ALWAYS SIGNING THE DECLARATION (IMPRESSED->IMPRESS) THE (VISITORS->VISITOR) SO MUCH AS THE SPLENDORS OF THE CHESTNUT STREET WINDOWS AND THE BARGAINS ON EIGHTH STREET +4970-29095-0022-2076: IS THEE GOING TO THE YEARLY MEETING RUTH ASKED ONE OF THE GIRLS +4970-29095-0023-2077: I HAVE NOTHING TO WEAR REPLIED (THAT->THE) DEMURE PERSON +4970-29095-0024-2078: IT HAS OCCUPIED MOTHER A LONG TIME TO FIND (AT->*) THE SHOPS THE EXACT SHADE FOR HER NEW BONNET +4970-29095-0025-2079: AND THEE WON'T GO WHY SHOULD I +4970-29095-0026-2080: IF I GO TO MEETING AT ALL I LIKE BEST TO SIT IN THE QUIET OLD HOUSE IN GERMANTOWN WHERE THE WINDOWS ARE ALL OPEN AND I CAN SEE THE TREES AND (HEAR->HERE) THE STIR OF THE LEAVES +4970-29095-0027-2081: IT'S SUCH A CRUSH AT THE YEARLY MEETING AT ARCH STREET AND THEN THERE'S THE ROW OF SLEEK LOOKING YOUNG MEN WHO (LINE->LIE IN) THE CURBSTONE AND STARE AT US AS WE COME OUT +4970-29095-0028-2082: HE DOESN'T SAY BUT IT'S ON THE FRONTIER AND ON THE MAP EVERYTHING BEYOND IT IS MARKED INDIANS AND DESERT AND LOOKS AS DESOLATE AS A (WEDNESDAY->WIND ZAY) MEETING (HUMPH->*) IT WAS TIME FOR HIM TO DO SOMETHING +4970-29095-0029-2083: IS HE GOING TO START A DAILY NEWSPAPER AMONG THE (KICK A POOS->KICKAPOOS) +4970-29095-0030-2084: FATHER (THEE'S->THESE) UNJUST TO PHILIP HE'S GOING INTO BUSINESS +4970-29095-0031-2085: HE DOESN'T SAY EXACTLY WHAT IT IS SAID RUTH A LITTLE DUBIOUSLY BUT IT'S SOMETHING ABOUT LAND AND RAILROADS AND (THEE->HE) KNOWS FATHER THAT FORTUNES ARE MADE NOBODY KNOWS EXACTLY HOW IN A NEW COUNTRY +4970-29095-0032-2086: (BUT->THAT) PHILIP IS HONEST AND HE HAS TALENT ENOUGH IF HE WILL STOP SCRIBBLING TO MAKE HIS WAY +4970-29095-0033-2087: WHAT A (BOX WOMEN->BOXWOMEN) ARE PUT INTO MEASURED FOR IT AND (PUT IN->PUTTING) YOUNG IF WE GO ANYWHERE IT'S IN A BOX VEILED AND PINIONED AND SHUT IN BY DISABILITIES +4970-29095-0034-2088: WHY SHOULD I (RUST->REST) AND BE STUPID AND SIT IN (INACTION->AN ACTION) BECAUSE I AM A GIRL +4970-29095-0035-2089: AND IF I HAD A FORTUNE WOULD THEE WANT ME TO LEAD A USELESS LIFE +4970-29095-0036-2090: HAS (THEE->THE) CONSULTED THY MOTHER ABOUT A CAREER I SUPPOSE IT IS A CAREER (*->OF) THEE WANTS +4970-29095-0037-2091: BUT THAT WISE AND PLACID WOMAN UNDERSTOOD THE SWEET REBEL A GREAT DEAL BETTER THAN RUTH UNDERSTOOD HERSELF +4970-29095-0038-2092: RUTH WAS GLAD TO HEAR THAT PHILIP HAD MADE A PUSH INTO THE WORLD AND SHE WAS SURE THAT HIS TALENT AND COURAGE WOULD MAKE (A WAY->AWAY) FOR HIM +4992-23283-0000-2140: BUT THE MORE FORGETFULNESS HAD THEN PREVAILED THE MORE POWERFUL WAS THE FORCE OF REMEMBRANCE WHEN SHE AWOKE +4992-23283-0001-2141: MISS MILNER'S HEALTH IS NOT GOOD +4992-23283-0002-2142: SAID MISSUS (HORTON->WHARTON) A FEW MINUTES AFTER +4992-23283-0003-2143: SO THERE IS TO ME ADDED SANDFORD WITH A SARCASTIC SNEER +4992-23283-0004-2144: AND YET YOU MUST OWN HER (BEHAVIOUR->BEHAVIOR) HAS WARRANTED THEM HAS IT NOT BEEN IN THIS PARTICULAR INCOHERENT AND UNACCOUNTABLE +4992-23283-0005-2145: NOT THAT I KNOW OF NOT ONE MORE THAT I KNOW OF HE REPLIED WITH ASTONISHMENT AT WHAT SHE HAD INSINUATED AND YET WITH A PERFECT ASSURANCE THAT SHE WAS IN THE WRONG +4992-23283-0006-2146: PERHAPS I AM MISTAKEN ANSWERED SHE +4992-23283-0007-2147: TO ASK ANY MORE QUESTIONS OF YOU I BELIEVE WOULD BE UNFAIR +4992-23283-0008-2148: HE SEEMED TO WAIT FOR HER REPLY BUT AS SHE MADE NONE HE PROCEEDED +4992-23283-0009-2149: OH MY LORD CRIED MISS WOODLEY WITH A MOST FORCIBLE ACCENT YOU ARE THE LAST (PERSON->PERSONAL) ON EARTH SHE WOULD PARDON ME FOR (ENTRUSTING->INTRUSTING) +4992-23283-0010-2150: BUT IN SUCH A CASE MISS MILNER'S ELECTION OF A HUSBAND SHALL NOT DIRECT MINE +4992-23283-0011-2151: IF SHE DOES NOT KNOW HOW TO ESTIMATE HER OWN VALUE I DO +4992-23283-0012-2152: INDEPENDENT OF HER FORTUNE SHE HAS BEAUTY TO CAPTIVATE THE HEART OF ANY MAN AND WITH ALL HER FOLLIES SHE HAS A FRANKNESS IN HER MANNER AN UNAFFECTED WISDOM IN HER THOUGHTS (A->OF) VIVACITY IN HER CONVERSATION AND WITHAL A SOFTNESS IN HER DEMEANOUR THAT MIGHT ALONE ENGAGE THE AFFECTIONS OF A MAN OF THE NICEST SENTIMENTS AND THE STRONGEST UNDERSTANDING +4992-23283-0013-2153: MY LORD MISS MILNER'S TASTE IS NOT A DEPRAVED ONE IT IS BUT TOO REFINED +4992-23283-0014-2154: WHAT CAN YOU MEAN BY THAT MISS WOODLEY YOU TALK MYSTERIOUSLY +4992-23283-0015-2155: IS SHE NOT AFRAID THAT I WILL THWART HER INCLINATIONS +4992-23283-0016-2156: AGAIN HE SEARCHED HIS OWN THOUGHTS NOR INEFFECTUALLY AS BEFORE +4992-23283-0017-2157: MISS WOODLEY WAS TOO LITTLE VERSED IN THE SUBJECT TO KNOW THIS WOULD HAVE BEEN NOT TO LOVE AT ALL AT LEAST NOT TO THE EXTENT OF BREAKING THROUGH ENGAGEMENTS AND ALL THE VARIOUS OBSTACLES THAT STILL (MILITATED->MITIGATED) AGAINST THEIR UNION +4992-23283-0018-2158: TO RELIEVE HER FROM BOTH HE LAID HIS HAND WITH FORCE UPON HIS HEART AND SAID DO YOU BELIEVE ME +4992-23283-0019-2159: I WILL MAKE NO UNJUST USE OF WHAT I KNOW HE REPLIED WITH FIRMNESS I BELIEVE YOU MY LORD +4992-23283-0020-2160: I HAVE NEVER YET HOWEVER BEEN VANQUISHED BY THEM AND EVEN UPON THIS OCCASION MY REASON SHALL COMBAT THEM TO THE LAST AND MY REASON SHALL FAIL ME BEFORE I DO WRONG +4992-41797-0000-2117: YES DEAD THESE FOUR YEARS (AN->AND) A GOOD JOB FOR HER TOO +4992-41797-0001-2118: WELL AS I SAY IT'S AN AWFUL QUEER WORLD THEY CLAP ALL THE BURGLARS (INTO->AND) JAIL (AND->*) THE MURDERERS (AND->IN) THE (WIFE->WHITE) BEATERS (I'VE->I) ALLERS THOUGHT A GENTLE REPROOF WOULD BE ENOUGH PUNISHMENT FOR A WIFE (BEATER->PETER) CAUSE HE PROBABLY HAS A LOT (O->OF) PROVOCATION THAT NOBODY KNOWS AND THE (FIREBUGS->FIRE BUGS) CAN'T THINK (O->OF) THE RIGHT NAME SOMETHING LIKE (CENDENARIES AN->SENDIARIES AND) THE BREAKERS (O->OF) THE PEACE (AN->AND) WHAT NOT (AN->AND) YET THE LAW HAS (NOTHIN->NOTHING) TO SAY TO A MAN LIKE (HEN LORD->HANDLED) +4992-41797-0002-2119: GRANDFATHER WAS ALEXANDER CAREY L (L->*) D DOCTOR OF LAWS THAT IS +4992-41797-0003-2120: MISTER POPHAM LAID DOWN HIS BRUSH +4992-41797-0004-2121: I (SWAN TO MAN->SWAY INTO MEN) HE EJACULATED IF YOU DON'T WORK HARD YOU CAN'T KEEP UP WITH THE (TIMES->TUBS) DOCTOR OF LAWS +4992-41797-0005-2122: DONE HE AIN'T DONE A THING (HE'D OUGHTER SENCE->HE ORDERS SINCE) HE WAS BORN +4992-41797-0006-2123: HE KEEPS THE THOU SHALT NOT (COMMANDMENTS->COMMAND ITS) FIRST RATE HEN LORD DOES +4992-41797-0007-2124: HE (GIVE->GAVE) UP HIS POSITION AND SHUT THE FAMILY UP IN THAT TOMB OF A HOUSE (SO T->SEWED) HE (COULD->COULDN'T) STUDY HIS BOOKS +4992-41797-0008-2125: MISTER POPHAM EXAGGERATED NOTHING BUT ON THE CONTRARY LEFT MUCH UNSAID IN HIS NARRATIVE OF THE FAMILY AT THE HOUSE OF LORDS +4992-41797-0009-2126: HENRY LORD WITH THE DEGREE OF (PH->P) D TO HIS CREDIT HAD BEEN PROFESSOR OF ZOOLOGY AT A NEW ENGLAND COLLEGE BUT HAD RESIGNED HIS POST IN ORDER TO WRITE A SERIES OF SCIENTIFIC TEXT BOOKS +4992-41797-0010-2127: ALWAYS IRRITABLE COLD INDIFFERENT HE HAD GROWN RAPIDLY MORE SO AS YEARS WENT ON +4992-41797-0011-2128: WHATEVER (APPEALED->APPEAL) TO HER SENSE OF BEAUTY WAS STRAIGHTWAY TRANSFERRED TO PAPER OR CANVAS +4992-41797-0012-2129: SHE IS WILD TO KNOW HOW TO DO THINGS +4992-41797-0013-2130: SHE MAKES EFFORT AFTER EFFORT TREMBLING WITH EAGERNESS AND WHEN SHE FAILS TO REPRODUCE WHAT SHE SEES SHE WORKS HERSELF INTO A FRENZY OF GRIEF AND DISAPPOINTMENT +4992-41797-0014-2131: WHEN SHE COULD NOT MAKE A RABBIT OR A BIRD LOOK REAL ON PAPER SHE SEARCHED IN HER FATHER'S BOOKS FOR PICTURES OF ITS BONES +4992-41797-0015-2132: CYRIL THERE MUST BE SOME BETTER WAY OF DOING I JUST DRAW THE OUTLINE OF AN ANIMAL AND THEN I PUT HAIRS OR FEATHERS ON IT THEY HAVE NO BODIES +4992-41797-0016-2133: THEY COULDN'T RUN (NOR->OR) MOVE THEY'RE JUST PASTEBOARD +4992-41797-0017-2134: HE WOULDN'T SEARCH SO DON'T WORRY REPLIED CYRIL QUIETLY AND THE TWO LOOKED AT EACH OTHER AND KNEW THAT IT WAS SO +4992-41797-0018-2135: THERE IN THE CEDAR HOLLOW THEN LIVED OLIVE LORD AN ANGRY RESENTFUL LITTLE CREATURE WEIGHED DOWN BY A FIERCE SENSE OF INJURY +4992-41797-0019-2136: (OLIVE'S->ALL OF HIS) MOURNFUL BLACK EYES MET NANCY'S SPARKLING BROWN ONES +4992-41797-0020-2137: NANCY'S CURLY CHESTNUT CROP SHONE IN THE SUN AND OLIVE'S THICK BLACK (PLAITS->PLATES) LOOKED BLACKER BY CONTRAST +4992-41797-0021-2138: (SHE'S->SHE IS) WONDERFUL MORE WONDERFUL (THAN->IN) ANYBODY WE'VE EVER SEEN ANYWHERE AND SHE DRAWS BETTER THAN THE TEACHER IN CHARLESTOWN +4992-41797-0022-2139: SHE'S OLDER THAN I AM BUT SO TINY AND SAD AND SHY THAT SHE SEEMS LIKE A CHILD +4992-41806-0000-2161: NATTY HARMON TRIED THE KITCHEN PUMP SECRETLY SEVERAL TIMES DURING THE EVENING FOR THE WATER HAD TO RUN UP HILL ALL THE WAY FROM THE WELL TO THE KITCHEN SINK AND HE BELIEVED THIS TO BE A CONTINUAL MIRACLE THAT MIGHT GIVE OUT AT ANY MOMENT +4992-41806-0001-2162: TO NIGHT THERE WAS NO NEED OF EXTRA HEAT AND THERE WERE GREAT CEREMONIES TO BE OBSERVED IN LIGHTING THE FIRES ON THE HEARTHSTONES +4992-41806-0002-2163: THEY BEGAN WITH THE ONE IN THE FAMILY SITTING ROOM COLONEL WHEELER RALPH THURSTON MISTER AND MISSUS BILL HARMON WITH (NATTY->NANNIE) AND (RUFUS->RUFFUS) MISTER AND MISSUS POPHAM WITH DIGBY AND (LALLIE->LILY) JOY ALL STANDING IN ADMIRING GROUPS AND THRILLING WITH DELIGHT AT THE ORDER OF EVENTS +4992-41806-0003-2164: KATHLEEN WAVED THE TORCH TO AND FRO AS SHE RECITED SOME BEAUTIFUL LINES WRITTEN FOR SOME SUCH PURPOSE AS THAT WHICH CALLED THEM TOGETHER TO NIGHT +4992-41806-0004-2165: (BURN->BURNE) FIRE BURN FLICKER FLICKER FLAME +4992-41806-0005-2166: NEXT CAME OLIVE'S TURN TO HELP IN THE CEREMONIES +4992-41806-0006-2167: RALPH THURSTON HAD FOUND A LINE OF LATIN FOR THEM IN HIS BELOVED (HORACE TIBI SPLENDET->HORNS TIBBY SPLENDID) FOCUS FOR YOU THE HEARTH FIRE SHINES +4992-41806-0007-2168: OLIVE HAD PAINTED THE MOTTO ON A LONG NARROW PANEL OF CANVAS AND GIVING IT TO MISTER POPHAM STOOD BY THE FIRESIDE WHILE HE DEFTLY FITTED IT INTO THE PLACE PREPARED FOR IT +4992-41806-0008-2169: (OLIVE->ALAP) HAS ANOTHER LOVELY GIFT FOR THE YELLOW HOUSE SAID MOTHER CAREY RISING AND TO CARRY OUT THE NEXT PART OF THE PROGRAMME WE SHALL HAVE TO GO IN PROCESSION UPSTAIRS TO MY BEDROOM +4992-41806-0009-2170: EXCLAIMED BILL HARMON TO HIS WIFE AS THEY WENT THROUGH THE LIGHTED HALL +4992-41806-0010-2171: AIN'T THEY THE GREATEST +4992-41806-0011-2172: MOTHER CAREY POURED COFFEE NANCY CHOCOLATE AND THE (OTHERS HELPED SERVE->OTHER SELF SERVED) THE SANDWICHES AND CAKE DOUGHNUTS AND TARTS +4992-41806-0012-2173: AT THAT MOMENT THE GENTLEMAN ENTERED BEARING A HUGE OBJECT CONCEALED BY A PIECE OF GREEN (FELT->FILT) +4992-41806-0013-2174: APPROACHING THE DINING TABLE HE CAREFULLY PLACED THE ARTICLE IN THE CENTRE AND REMOVED THE CLOTH +4992-41806-0014-2175: THINKS (I TO->OUT OF) MYSELF I NEVER SEEN ANYTHING (OSH POPHAM COULDN'T->I PAPA GOOD'N) MEND IF HE TOOK TIME ENOUGH AND GLUE ENOUGH SO I CARRIED THIS LITTLE FELLER HOME IN A BUSHEL BASKET ONE NIGHT LAST MONTH (AN->AND) I'VE SPENT ELEVEN (EVENIN'S PUTTIN->EVENINGS PUTTING) HIM TOGETHER +4992-41806-0015-2176: MISSUS HARMON THOUGHT HE SANG TOO MUCH AND TOLD HER (HUSBAND->HUSBA) PRIVATELY THAT IF HE WAS A CANARY BIRD SHE SHOULD WANT TO KEEP A TABLE COVER (OVER->OF) HIS (HEAD MOST OF->EDMOST TO) THE TIME BUT HE WAS IMMENSELY POPULAR WITH THE REST OF HIS AUDIENCE +4992-41806-0016-2177: THE FACE OF THE MAHOGANY SHONE WITH DELIGHT AND WHY NOT WHEN IT WAS DOING EVERYTHING ALMOST EVERYTHING WITHIN THE SCOPE OF A PIANO AND YET THE FAMILY HAD ENJOYED WEEKS OF GOOD NOURISHING MEALS ON WHAT HAD BEEN SAVED BY ITS EXERTIONS +4992-41806-0017-2178: WE SHUT OUR EYES THE FLOWERS BLOOM ON WE MURMUR BUT THE (CORN EARS->CORNIERS) FILL WE CHOOSE THE SHADOW BUT THE SUN THAT CASTS IT SHINES BEHIND US STILL +5105-28233-0000-1649: LENGTH OF SERVICE FOURTEEN YEARS THREE MONTHS AND FIVE DAYS +5105-28233-0001-1650: HE SEEMED BORN TO PLEASE WITHOUT BEING CONSCIOUS OF THE POWER HE POSSESSED +5105-28233-0002-1651: IT MUST BE OWNED AND NO ONE WAS MORE READY TO CONFESS IT THAN HIMSELF THAT HIS LITERARY ATTAINMENTS WERE BY NO MEANS OF A HIGH ORDER +5105-28233-0003-1652: WE DON'T (SPIN->SPEND) TOPS (IS->AS) A FAVORITE SAYING AMONGST ARTILLERY OFFICERS INDICATING THAT THEY DO NOT SHIRK THEIR DUTY BY FRIVOLOUS PURSUITS BUT IT MUST BE CONFESSED THAT SERVADAC BEING NATURALLY IDLE WAS VERY MUCH GIVEN TO SPINNING TOPS +5105-28233-0004-1653: ONCE (IN->AN) ACTION HE WAS LEADING A DETACHMENT OF INFANTRY THROUGH AN (INTRENCHMENT->ENTRENCHMENT) +5105-28233-0005-1654: SOMETIMES HE WOULD WANDER ON FOOT UPON THE SANDY SHORE AND SOMETIMES HE WOULD ENJOY A RIDE ALONG THE SUMMIT OF THE CLIFF ALTOGETHER BEING IN NO HURRY AT ALL TO BRING HIS TASK TO AN END +5105-28233-0006-1655: NO CATHEDRAL NOT EVEN BURGOS ITSELF COULD VIE WITH THE CHURCH AT (MONTMARTRE->MOUNT MARSHRA) +5105-28233-0007-1656: BEN ZOOF'S MOST AMBITIOUS DESIRE WAS TO INDUCE THE CAPTAIN TO GO WITH HIM AND END HIS DAYS IN HIS MUCH LOVED HOME AND SO INCESSANTLY WERE SERVADAC'S EARS BESIEGED WITH DESCRIPTIONS OF THE UNPARALLELED BEAUTIES AND ADVANTAGES OF THIS EIGHTEENTH (ARRONDISSEMENT->ARE UNDISSIMA) OF PARIS THAT HE COULD SCARCELY HEAR THE NAME OF (MONTMARTRE->MONTMARTRA) WITHOUT A CONSCIOUS THRILL OF AVERSION +5105-28233-0008-1657: WHEN A PRIVATE IN THE EIGHTH CAVALRY HE HAD BEEN ON THE POINT OF QUITTING THE ARMY AT TWENTY EIGHT YEARS OF AGE BUT UNEXPECTEDLY HE HAD BEEN APPOINTED ORDERLY TO CAPTAIN SERVADAC +5105-28233-0009-1658: THE BOND OF UNION THUS EFFECTED COULD NEVER BE SEVERED AND ALTHOUGH BEN (ZOOF'S->ZEF'S) ACHIEVEMENTS HAD FAIRLY EARNED HIM THE RIGHT OF RETIREMENT HE FIRMLY DECLINED ALL (HONORS->HONOURS) OR ANY PENSION THAT MIGHT PART HIM FROM HIS SUPERIOR OFFICER +5105-28233-0010-1659: (UNLIKE->I MAKE) HIS MASTER HE MADE NO PRETENSION TO ANY GIFT OF POETIC POWER BUT HIS INEXHAUSTIBLE MEMORY MADE HIM A LIVING ENCYCLOPAEDIA AND FOR HIS STOCK OF ANECDOTES AND TROOPER'S TALES HE WAS MATCHLESS +5105-28240-0000-1624: FAST AS HIS LEGS COULD CARRY HIM SERVADAC HAD MADE HIS WAY TO THE TOP OF THE CLIFF +5105-28240-0001-1625: IT WAS QUITE TRUE THAT A VESSEL WAS IN SIGHT HARDLY MORE THAN SIX MILES FROM THE SHORE BUT OWING TO THE INCREASE IN THE EARTH'S CONVEXITY AND THE CONSEQUENT LIMITATION OF THE RANGE OF VISION THE RIGGING OF THE TOPMASTS ALONE WAS VISIBLE ABOVE THE WATER +5105-28240-0002-1626: EXCLAIMED SERVADAC KEEPING HIS EYE UNMOVED AT HIS TELESCOPE +5105-28240-0003-1627: SHE IS UNDER (SAIL->SALE) BUT SHE IS COUNT TIMASCHEFF'S YACHT HE WAS RIGHT +5105-28240-0004-1628: IF THE COUNT WERE ON BOARD A STRANGE FATALITY WAS BRINGING HIM TO THE PRESENCE OF HIS RIVAL +5105-28240-0005-1629: HE RECKONED THEREFORE NOT ONLY UPON ASCERTAINING THE EXTENT OF THE LATE CATASTROPHE BUT UPON LEARNING ITS CAUSE +5105-28240-0006-1630: THE WIND BEING ADVERSE THE DOBRYNA DID NOT MAKE VERY RAPID PROGRESS BUT AS THE WEATHER IN SPITE OF A FEW CLOUDS REMAINED CALM AND THE SEA WAS QUITE SMOOTH SHE WAS ENABLED TO HOLD A STEADY COURSE +5105-28240-0007-1631: SERVADAC TOOK IT FOR GRANTED THAT THE DOBRYNA WAS ENDEAVORING TO PUT IN +5105-28240-0008-1632: A NARROW CHANNEL FORMED A PASSAGE THROUGH THE RIDGE OF ROCKS THAT PROTECTED IT FROM THE OPEN SEA AND WHICH EVEN IN THE ROUGHEST WEATHER WOULD (ENSURE->INSURE) THE CALMNESS OF ITS WATERS +5105-28240-0009-1633: SLIGHTLY CHANGING HER COURSE SHE FIRST STRUCK HER MAINSAIL AND IN ORDER TO FACILITATE THE MOVEMENTS OF HER HELMSMAN SOON CARRIED NOTHING BUT HER TWO TOPSAILS BRIGANTINE AND JIB +5105-28240-0010-1634: CAPTAIN SERVADAC HASTENED (TOWARDS->TOWARD) HIM +5105-28240-0011-1635: I LEFT YOU ON A CONTINENT AND HERE I HAVE THE HONOR OF FINDING YOU ON AN ISLAND +5105-28240-0012-1636: NEVER MIND NOW INTERPOSED THE CAPTAIN WE WILL TALK OF THAT BY AND BY +5105-28240-0013-1637: NOTHING MORE THAN YOU KNOW YOURSELF +5105-28240-0014-1638: ARE YOU CERTAIN THAT THIS IS THE MEDITERRANEAN +5105-28240-0015-1639: FOR SOME MOMENTS HE SEEMED PERFECTLY STUPEFIED (*->AND) THEN RECOVERING HIMSELF HE BEGAN TO OVERWHELM THE COUNT WITH A TORRENT OF QUESTIONS +5105-28240-0016-1640: TO ALL THESE INQUIRIES THE COUNT RESPONDED IN THE AFFIRMATIVE +5105-28240-0017-1641: SOME MYSTERIOUS FORCE SEEMED TO HAVE BROUGHT ABOUT A CONVULSION OF THE ELEMENTS +5105-28240-0018-1642: YOU WILL TAKE ME ON BOARD COUNT WILL YOU NOT +5105-28240-0019-1643: MY YACHT IS AT YOUR SERVICE SIR EVEN SHOULD YOU REQUIRE TO MAKE A TOUR (ROUND->AROUND) THE WORLD +5105-28240-0020-1644: THE COUNT SHOOK HIS HEAD +5105-28240-0021-1645: BEFORE STARTING IT WAS INDISPENSABLE THAT THE ENGINE OF THE DOBRYNA SHOULD BE REPAIRED TO SAIL UNDER CANVAS ONLY WOULD IN CONTRARY WINDS AND ROUGH SEAS BE BOTH TEDIOUS AND DIFFICULT +5105-28240-0022-1646: IT WAS ON THE LAST DAY OF JANUARY THAT THE REPAIRS OF THE SCHOONER WERE COMPLETED +5105-28240-0023-1647: A SLIGHT DIMINUTION IN THE EXCESSIVELY HIGH TEMPERATURE WHICH HAD PREVAILED FOR THE LAST FEW WEEKS WAS THE ONLY APPARENT CHANGE IN THE GENERAL ORDER OF THINGS BUT WHETHER THIS WAS TO BE ATTRIBUTED TO ANY ALTERATION IN THE EARTH'S ORBIT WAS A QUESTION WHICH WOULD STILL REQUIRE SEVERAL DAYS TO DECIDE +5105-28240-0024-1648: DOUBTS NOW AROSE AND SOME DISCUSSION FOLLOWED WHETHER OR NOT IT WAS DESIRABLE FOR BEN ZOOF TO ACCOMPANY HIS MASTER +5105-28241-0000-1604: HER SEA GOING QUALITIES WERE EXCELLENT AND WOULD HAVE AMPLY SUFFICED FOR A CIRCUMNAVIGATION OF THE GLOBE +5105-28241-0001-1605: AFTER AN APPRENTICESHIP ON A MERCHANT SHIP HE HAD ENTERED THE IMPERIAL NAVY AND HAD ALREADY REACHED THE RANK OF LIEUTENANT WHEN THE COUNT APPOINTED HIM TO THE CHARGE OF HIS OWN PRIVATE YACHT IN WHICH HE WAS ACCUSTOMED TO SPEND BY (FAR THE->FARTHER) GREATER PART OF HIS TIME THROUGHOUT THE WINTER GENERALLY CRUISING IN THE MEDITERRANEAN WHILST IN THE SUMMER HE VISITED MORE NORTHERN WATERS +5105-28241-0002-1606: THE LATE ASTOUNDING EVENTS HOWEVER HAD RENDERED PROCOPE MANIFESTLY UNEASY AND NOT THE LESS SO FROM HIS CONSCIOUSNESS THAT THE COUNT SECRETLY PARTOOK OF HIS OWN ANXIETY +5105-28241-0003-1607: STEAM UP AND CANVAS SPREAD THE SCHOONER STARTED EASTWARDS +5105-28241-0004-1608: ALTHOUGH ONLY A MODERATE BREEZE WAS BLOWING THE SEA WAS ROUGH A CIRCUMSTANCE TO BE ACCOUNTED FOR ONLY BY THE DIMINUTION IN THE FORCE OF THE EARTH'S ATTRACTION RENDERING THE LIQUID (PARTICLES->PARTICLE) SO BUOYANT THAT BY THE MERE EFFECT OF OSCILLATION THEY WERE CARRIED TO A HEIGHT THAT WAS QUITE UNPRECEDENTED +5105-28241-0005-1609: FOR A FEW MILES SHE FOLLOWED THE LINE HITHERTO PRESUMABLY OCCUPIED BY THE COAST OF ALGERIA BUT NO LAND APPEARED TO THE SOUTH +5105-28241-0006-1610: THE LOG AND THE COMPASS THEREFORE WERE ABLE TO BE CALLED UPON TO DO THE WORK OF THE SEXTANT WHICH HAD BECOME UTTERLY USELESS +5105-28241-0007-1611: (THERE IS->THERE'S) NO FEAR OF THAT SIR +5105-28241-0008-1612: (*->THAT) THE EARTH HAS UNDOUBTEDLY ENTERED UPON A NEW ORBIT BUT SHE IS NOT INCURRING ANY PROBABLE RISK OF BEING PRECIPITATED (ONTO->ON TO) THE SUN +5105-28241-0009-1613: AND WHAT DEMONSTRATION DO YOU OFFER ASKED SERVADAC EAGERLY THAT IT WILL NOT HAPPEN +5105-28241-0010-1614: OCEAN (REIGNED->RAINED) SUPREME +5105-28241-0011-1615: ALL THE IMAGES OF HIS PAST LIFE FLOATED UPON HIS MEMORY HIS THOUGHTS SPED AWAY TO HIS NATIVE FRANCE ONLY TO RETURN AGAIN TO WONDER WHETHER THE DEPTHS OF OCEAN WOULD REVEAL ANY TRACES OF THE ALGERIAN METROPOLIS +5105-28241-0012-1616: IS IT NOT IMPOSSIBLE HE MURMURED ALOUD THAT ANY CITY SHOULD DISAPPEAR SO COMPLETELY +5105-28241-0013-1617: WOULD NOT THE LOFTIEST EMINENCES OF THE CITY AT LEAST BE VISIBLE +5105-28241-0014-1618: ANOTHER CIRCUMSTANCE WAS MOST REMARKABLE +5105-28241-0015-1619: TO THE SURPRISE OF ALL AND ESPECIALLY OF LIEUTENANT PROCOPE THE LINE INDICATED A BOTTOM AT A NEARLY UNIFORM DEPTH OF FROM FOUR TO FIVE FATHOMS AND ALTHOUGH THE SOUNDING WAS PERSEVERED WITH CONTINUOUSLY FOR MORE THAN TWO HOURS OVER A CONSIDERABLE AREA THE DIFFERENCES OF LEVEL WERE INSIGNIFICANT NOT CORRESPONDING IN ANY DEGREE TO WHAT WOULD BE EXPECTED OVER THE SITE OF A CITY THAT HAD BEEN TERRACED LIKE THE SEATS OF AN (AMPHITHEATER->AMPHITHEATRE) +5105-28241-0016-1620: YOU MUST SEE LIEUTENANT I SHOULD THINK THAT WE ARE NOT SO NEAR THE COAST OF ALGERIA AS YOU IMAGINED +5105-28241-0017-1621: AFTER PONDERING (AWHILE->A WHILE) HE SAID IF WE WERE FARTHER AWAY I SHOULD EXPECT TO FIND A DEPTH OF TWO OR THREE HUNDRED FATHOMS INSTEAD OF FIVE FATHOMS FIVE FATHOMS +5105-28241-0018-1622: ITS DEPTH REMAINED INVARIABLE STILL FOUR OR AT MOST FIVE FATHOMS AND ALTHOUGH ITS BOTTOM WAS ASSIDUOUSLY DREDGED IT WAS ONLY TO PROVE IT BARREN OF MARINE PRODUCTION OF ANY TYPE +5105-28241-0019-1623: NOTHING WAS TO BE DONE BUT TO PUT ABOUT AND RETURN (IN->AND) DISAPPOINTMENT (TOWARDS->TOWARD) THE NORTH +5142-33396-0000-898: AT ANOTHER TIME (HARALD->HAROLD) ASKED +5142-33396-0001-899: WHAT IS YOUR COUNTRY OLAF HAVE YOU ALWAYS BEEN A THRALL THE THRALL'S EYES FLASHED +5142-33396-0002-900: TWO HUNDRED WARRIORS FEASTED IN HIS HALL AND FOLLOWED HIM TO BATTLE +5142-33396-0003-901: THE REST OF YOU OFF A VIKING HE HAD THREE SHIPS +5142-33396-0004-902: THESE HE GAVE TO THREE OF MY BROTHERS +5142-33396-0005-903: BUT I STAYED THAT SPRING AND BUILT ME A BOAT +5142-33396-0006-904: I MADE HER (FOR ONLY->FALLING) TWENTY OARS BECAUSE I THOUGHT FEW MEN WOULD FOLLOW ME FOR I WAS YOUNG FIFTEEN YEARS OLD +5142-33396-0007-905: AT THE PROW I CARVED THE HEAD WITH OPEN MOUTH AND FORKED TONGUE THRUST OUT +5142-33396-0008-906: I PAINTED THE EYES RED FOR ANGER +5142-33396-0009-907: THERE STAND SO I SAID AND GLARE AND HISS AT MY FOES +5142-33396-0010-908: IN THE STERN I (CURVED->CARVED) THE TAIL UP ALMOST AS HIGH AS THE HEAD +5142-33396-0011-909: THERE SHE SAT ON THE ROLLERS AS FAIR A SHIP AS I EVER SAW +5142-33396-0012-910: THEN I WILL GET ME A FARM AND (WILL WINTER->WE'LL WINNER) IN THAT LAND NOW WHO WILL FOLLOW ME +5142-33396-0013-911: HE IS BUT A BOY THE (MEN->MAN) SAID +5142-33396-0014-912: THIRTY MEN ONE AFTER ANOTHER RAISED THEIR HORNS AND SAID +5142-33396-0015-913: AS OUR BOAT FLASHED DOWN THE ROLLERS INTO THE WATER I MADE THIS SONG AND SANG IT +5142-33396-0016-914: SO WE HARRIED THE COAST OF NORWAY +5142-33396-0017-915: WE ATE (AT->IT) MANY MEN'S TABLES UNINVITED +5142-33396-0018-916: (MY->I) DRAGON'S BELLY IS NEVER FULL AND ON BOARD WENT THE GOLD +5142-33396-0019-917: OH IT IS BETTER TO LIVE ON THE SEA AND LET OTHER MEN RAISE YOUR CROPS AND COOK YOUR MEALS +5142-33396-0020-918: A HOUSE SMELLS OF SMOKE A (SHIP SMELLS->SHIP'S MILLS) OF FROLIC +5142-33396-0021-919: UP AND DOWN THE WATER WE WENT TO GET MUCH WEALTH AND MUCH FROLIC +5142-33396-0022-920: WHAT (OF->IS) THE FARM OLAF NOT YET I ANSWERED VIKING IS BETTER FOR SUMMER +5142-33396-0023-921: IT WAS SO DARK THAT I COULD SEE NOTHING BUT A FEW SPARKS ON THE HEARTH +5142-33396-0024-922: I STOOD WITH MY BACK TO THE WALL FOR I WANTED NO SWORD REACHING OUT OF THE DARK FOR ME +5142-33396-0025-923: COME COME I CALLED WHEN NO ONE OBEYED A FIRE +5142-33396-0026-924: MY MEN LAUGHED YES A STINGY (HOST->HOSE) +5142-33396-0027-925: HE ACTS AS THOUGH HE (HAD->IS) NOT EXPECTED US +5142-33396-0028-926: ON A BENCH IN A FAR CORNER WERE A DOZEN PEOPLE HUDDLED TOGETHER +5142-33396-0029-927: BRING IN THE TABLE WE ARE HUNGRY +5142-33396-0030-928: THE THRALLS WERE (BRINGING->RINGING) IN A GREAT POT OF MEAT +5142-33396-0031-929: THEY SET UP A CRANE OVER THE FIRE AND HUNG THE POT UPON IT AND WE SAT AND WATCHED IT BOIL WHILE WE JOKED AT LAST THE SUPPER BEGAN +5142-33396-0032-930: THE FARMER SAT GLOOMILY ON THE BENCH AND WOULD NOT EAT AND YOU CANNOT WONDER FOR HE SAW US PUTTING POTFULS OF HIS GOOD BEEF AND (BASKET LOADS->BASCULADES) OF BREAD (INTO->AND) OUR BIG MOUTHS +5142-33396-0033-931: YOU WOULD NOT EAT WITH US YOU CANNOT SAY NO TO HALF OF MY ALE I DRINK THIS TO YOUR HEALTH +5142-33396-0034-932: THEN I DRANK HALF OF THE HORNFUL AND (SENT->SET) THE REST ACROSS THE FIRE TO THE FARMER HE TOOK IT AND SMILED SAYING +5142-33396-0035-933: DID YOU EVER HAVE SUCH A LORDLY GUEST BEFORE I WENT ON +5142-33396-0036-934: SO I WILL GIVE OUT THIS LAW THAT MY MEN SHALL NEVER LEAVE YOU ALONE +5142-33396-0037-935: (HAKON->HAWKIN) THERE SHALL BE YOUR CONSTANT COMPANION FRIEND FARMER +5142-33396-0038-936: HE SHALL NOT LEAVE YOU DAY OR NIGHT WHETHER YOU ARE WORKING OR PLAYING OR SLEEPING +5142-33396-0039-937: I (NAMED->NAME) NINE OTHERS AND SAID +5142-33396-0040-938: AND THESE SHALL FOLLOW YOUR THRALLS IN THE SAME WAY +5142-33396-0041-939: SO I SET GUARDS OVER EVERY ONE IN THAT HOUSE +5142-33396-0042-940: SO NO TALES GOT OUT TO THE NEIGHBORS BESIDES IT WAS A LONELY PLACE AND BY GOOD LUCK NO ONE CAME THAT WAY +5142-33396-0043-941: THEIR EYES DANCED BIG (THORLEIF->TORE LEAF) STOOD UP AND STRETCHED HIMSELF +5142-33396-0044-942: (I AM->I'M) STIFF WITH LONG SITTING HE SAID I ITCH FOR A FIGHT I TURNED TO THE FARMER +5142-33396-0045-943: THIS IS OUR LAST FEAST WITH YOU I SAID +5142-33396-0046-944: BY THE BEARD OF ODIN I CRIED YOU HAVE TAKEN OUR JOKE LIKE A MAN +5142-33396-0047-945: MY MEN POUNDED THE TABLE WITH THEIR FISTS +5142-33396-0048-946: BY THE HAMMER (OF THOR->AUTHOR) SHOUTED GRIM (HERE->THERE) IS NO STINGY COWARD +5142-33396-0049-947: HERE FRIEND TAKE IT AND HE THRUST IT INTO THE FARMER'S HAND +5142-33396-0050-948: MAY YOU DRINK (HEART'S EASE->HEARTSEASE) FROM IT FOR MANY YEARS +5142-33396-0051-949: AND WITH IT I LEAVE YOU A NAME (SIF->SIFT) THE FRIENDLY I SHALL HOPE TO DRINK WITH YOU (SOMETIME->SOME TIME) IN VALHALLA +5142-33396-0052-950: HERE IS A RING FOR (SIF->SIFT) THE FRIENDLY AND HERE IS A BRACELET (*->AND) A SWORD WOULD NOT BE ASHAMED TO HANG AT YOUR SIDE +5142-33396-0053-951: I TOOK FIVE GREAT BRACELETS OF GOLD FROM OUR TREASURE CHEST AND GAVE THEM TO HIM +5142-33396-0054-952: THAT IS THE BEST WAY TO DECIDE FOR THE SPEAR WILL ALWAYS POINT SOMEWHERE AND ONE THING IS AS GOOD AS ANOTHER +5142-33396-0055-953: THAT TIME IT POINTED US INTO YOUR FATHER'S SHIPS +5142-33396-0056-954: HERE THEY SAID IS A RASCAL WHO HAS BEEN HARRYING OUR COASTS +5142-33396-0057-955: WE SUNK HIS SHIP AND MEN BUT HIM WE BROUGHT TO YOU +5142-33396-0058-956: A ROBBER VIKING SAID THE KING AND (*->HE) SCOWLED AT ME +5142-33396-0059-957: YES AND WITH ALL YOUR FINGERS IT TOOK YOU A YEAR TO CATCH ME THE KING FROWNED MORE ANGRILY +5142-33396-0060-958: TAKE HIM OUT (THORKEL->TORQUAL) AND LET HIM TASTE YOUR SWORD +5142-33396-0061-959: YOUR MOTHER THE QUEEN WAS STANDING BY +5142-33396-0062-960: NOW SHE PUT HER HAND ON HIS ARM AND SMILED AND SAID +5142-33396-0063-961: AND WOULD HE NOT BE A GOOD GIFT FOR OUR BABY +5142-33396-0064-962: YOUR FATHER THOUGHT A MOMENT (THEN->AND) LOOKED AT YOUR MOTHER AND SMILED +5142-33396-0065-963: SOFT HEART HE SAID GENTLY TO HER THEN TO (THORKEL->TORCOAL) WELL LET HIM GO (THORKEL->TORKLE) +5142-33396-0066-964: THEN HE TURNED TO ME AGAIN FROWNING +5142-33396-0067-965: BUT YOUNG SHARP TONGUE NOW THAT (WE HAVE->WE'VE) CAUGHT YOU (WE->*) WILL PUT YOU INTO A TRAP THAT YOU CANNOT GET OUT OF +5142-33396-0068-966: SO I LIVED AND NOW AM YOUR TOOTH THRALL WELL IT IS THE LUCK OF WAR +5142-36377-0000-870: IT WAS ONE OF THE MASTERLY AND CHARMING STORIES OF (DUMAS->DE MAU) THE ELDER +5142-36377-0001-871: IN FIVE MINUTES I WAS IN A NEW WORLD AND MY MELANCHOLY ROOM WAS FULL OF THE LIVELIEST FRENCH COMPANY +5142-36377-0002-872: THE SOUND OF AN IMPERATIVE AND UNCOMPROMISING BELL RECALLED ME IN DUE TIME TO THE REGIONS OF REALITY +5142-36377-0003-873: AMBROSE MET ME AT THE BOTTOM OF THE STAIRS AND SHOWED ME THE WAY TO THE SUPPER ROOM +5142-36377-0004-874: SHE SIGNED TO ME WITH A GHOSTLY SOLEMNITY TO TAKE THE VACANT PLACE ON THE LEFT OF HER FATHER +5142-36377-0005-875: THE DOOR OPENED AGAIN WHILE I WAS STILL STUDYING THE TWO BROTHERS WITHOUT I HONESTLY CONFESS BEING VERY FAVORABLY IMPRESSED BY EITHER OF THEM +5142-36377-0006-876: A NEW MEMBER OF THE FAMILY CIRCLE WHO INSTANTLY ATTRACTED MY ATTENTION ENTERED THE ROOM +5142-36377-0007-877: A LITTLE CRACKED THAT IN THE POPULAR PHRASE WAS MY IMPRESSION OF THE STRANGER WHO NOW MADE HIS APPEARANCE IN THE SUPPER ROOM +5142-36377-0008-878: MISTER (MEADOWCROFT->MEDICROFT) THE ELDER HAVING NOT SPOKEN ONE WORD THUS FAR HIMSELF INTRODUCED THE NEWCOMER TO ME WITH A SIDE GLANCE AT HIS SONS WHICH HAD SOMETHING LIKE DEFIANCE IN IT A GLANCE WHICH AS I WAS SORRY TO NOTICE WAS RETURNED WITH THE DEFIANCE ON THEIR SIDE BY THE TWO YOUNG MEN +5142-36377-0009-879: PHILIP (LEFRANK->LE FRANK) THIS IS MY OVERLOOKER MISTER (JAGO->YAGO) SAID THE OLD MAN FORMALLY PRESENTING US +5142-36377-0010-880: HE IS NOT WELL HE HAS COME OVER THE OCEAN FOR REST AND (CHANGE OF->CHANGES) SCENE +5142-36377-0011-881: (MISTER JAGO->THIS GIAGO) IS AN AMERICAN PHILIP +5142-36377-0012-882: MAKE ACQUAINTANCE WITH (MISTER JAGO->MISS GIAGO) SIT TOGETHER +5142-36377-0013-883: THEY POINTEDLY DREW BACK FROM JOHN (JAGO->YAGO) AS HE APPROACHED THE EMPTY CHAIR NEXT (TO->*) ME AND MOVED ROUND TO THE OPPOSITE SIDE OF THE TABLE +5142-36377-0014-884: A PRETTY GIRL AND SO FAR AS I COULD JUDGE BY APPEARANCES A GOOD GIRL TOO DESCRIBING HER GENERALLY I MAY SAY THAT SHE HAD A SMALL HEAD WELL CARRIED AND WELL SET ON HER SHOULDERS BRIGHT GRAY EYES THAT LOOKED AT YOU HONESTLY AND MEANT WHAT THEY LOOKED A TRIM SLIGHT LITTLE FIGURE TOO SLIGHT FOR OUR ENGLISH NOTIONS OF BEAUTY A STRONG AMERICAN ACCENT AND A RARE THING IN AMERICA A PLEASANTLY TONED VOICE WHICH MADE THE ACCENT AGREEABLE TO ENGLISH EARS +5142-36377-0015-885: OUR FIRST IMPRESSIONS OF PEOPLE ARE IN NINE CASES OUT OF TEN THE RIGHT IMPRESSIONS +5142-36377-0016-886: FOR ONCE IN A WAY I PROVED A TRUE PROPHET +5142-36377-0017-887: THE ONLY CHEERFUL CONVERSATION WAS THE CONVERSATION ACROSS THE TABLE BETWEEN NAOMI AND ME +5142-36377-0018-888: HE LOOKED UP (AT NAOMI->AND NOW ON ME) DOUBTINGLY FROM HIS PLATE AND LOOKED DOWN AGAIN SLOWLY WITH A FROWN +5142-36377-0019-889: WHEN I ADDRESSED HIM HE ANSWERED CONSTRAINEDLY +5142-36377-0020-890: A MORE DREARY AND MORE DISUNITED FAMILY PARTY I NEVER SAT AT THE TABLE WITH +5142-36377-0021-891: ENVY HATRED MALICE AND UNCHARITABLENESS ARE NEVER SO ESSENTIALLY DETESTABLE TO MY MIND AS WHEN THEY ARE ANIMATED BY (A->THE) SENSE OF PROPRIETY AND WORK UNDER THE SURFACE BUT FOR MY INTEREST (IN NAOMI->TO NAY OWE ME) AND MY OTHER (INTEREST->INTERESTS) IN THE LITTLE LOVE LOOKS WHICH I NOW AND THEN SURPRISED PASSING BETWEEN HER AND AMBROSE I SHOULD NEVER HAVE SAT THROUGH THAT SUPPER +5142-36377-0022-892: I WISH YOU GOOD NIGHT SHE LAID HER BONY HANDS ON THE BACK OF MISTER MEADOWCROFT'S INVALID CHAIR CUT HIM SHORT IN HIS FAREWELL SALUTATION TO ME AND WHEELED HIM OUT TO HIS BED AS IF SHE WERE WHEELING HIM OUT TO HIS GRAVE +5142-36377-0023-893: YOU WERE QUITE RIGHT TO SAY NO AMBROSE BEGAN NEVER SMOKE WITH (JOHN JAGO->JOHNNIAGO) HIS CIGARS WILL POISON YOU +5142-36377-0024-894: (NAOMI->THEY ONLY) SHOOK HER FOREFINGER REPROACHFULLY AT THEM AS IF THE TWO STURDY YOUNG FARMERS HAD BEEN TWO CHILDREN +5142-36377-0025-895: SILAS SLUNK AWAY WITHOUT A WORD OF PROTEST AMBROSE STOOD HIS GROUND EVIDENTLY BENT ON MAKING HIS PEACE (WITH->WHEN) NAOMI BEFORE HE LEFT HER SEEING THAT I WAS IN THE WAY I WALKED ASIDE TOWARD A GLASS DOOR AT THE LOWER END OF THE ROOM +5142-36586-0000-967: IT IS MANIFEST THAT MAN IS NOW SUBJECT TO MUCH VARIABILITY +5142-36586-0001-968: SO IT IS WITH THE LOWER ANIMALS +5142-36586-0002-969: THE VARIABILITY OF MULTIPLE PARTS +5142-36586-0003-970: BUT THIS SUBJECT WILL BE MORE PROPERLY DISCUSSED WHEN WE TREAT OF THE DIFFERENT RACES OF MANKIND +5142-36586-0004-971: EFFECTS OF THE INCREASED USE AND DISUSE OF PARTS +5142-36600-0000-896: CHAPTER SEVEN ON THE RACES OF MAN +5142-36600-0001-897: IN DETERMINING WHETHER TWO OR MORE ALLIED FORMS OUGHT TO BE RANKED (AS->A) SPECIES OR VARIETIES NATURALISTS ARE PRACTICALLY GUIDED BY THE FOLLOWING CONSIDERATIONS NAMELY THE AMOUNT OF DIFFERENCE BETWEEN THEM AND WHETHER SUCH (DIFFERENCES->DIFFERENCE IS) RELATE TO FEW OR MANY POINTS OF STRUCTURE AND WHETHER THEY ARE OF PHYSIOLOGICAL IMPORTANCE BUT MORE ESPECIALLY WHETHER THEY ARE CONSTANT +5639-40744-0000-137: ELEVEN O'CLOCK HAD STRUCK IT WAS A FINE CLEAR NIGHT (THEY->THERE) WERE THE ONLY PERSONS ON THE ROAD AND THEY SAUNTERED LEISURELY ALONG TO AVOID PAYING THE PRICE OF FATIGUE FOR THE RECREATION PROVIDED FOR THE TOLEDANS IN (THEIR->THE) VALLEY OR ON THE BANKS OF THEIR RIVER +5639-40744-0001-138: SECURE AS HE THOUGHT IN THE CAREFUL ADMINISTRATION OF JUSTICE IN THAT CITY AND THE CHARACTER OF ITS WELL DISPOSED INHABITANTS THE GOOD HIDALGO WAS FAR FROM THINKING THAT ANY DISASTER COULD (BEFAL->BEFALL) HIS FAMILY +5639-40744-0002-139: (RODOLFO->RUDOLPHO) AND HIS COMPANIONS WITH THEIR FACES MUFFLED IN THEIR CLOAKS STARED RUDELY AND INSOLENTLY AT THE MOTHER THE DAUGHTER AND THE SERVANT MAID +5639-40744-0003-140: IN A MOMENT HE COMMUNICATED HIS THOUGHTS TO HIS COMPANIONS AND IN THE NEXT MOMENT THEY RESOLVED TO TURN BACK AND CARRY HER OFF TO PLEASE (RODOLFO->RUDOLPHO) FOR THE RICH WHO ARE OPEN HANDED ALWAYS FIND (PARASITES->PARRICIDES) READY TO ENCOURAGE THEIR BAD PROPENSITIES AND THUS TO CONCEIVE THIS WICKED DESIGN TO COMMUNICATE IT APPROVE IT RESOLVE ON RAVISHING LEOCADIA AND TO CARRY THAT DESIGN INTO EFFECT WAS THE WORK OF A MOMENT +5639-40744-0004-141: THEY DREW THEIR SWORDS HID THEIR FACES IN THE FLAPS OF THEIR CLOAKS TURNED BACK AND SOON CAME IN FRONT OF THE LITTLE PARTY WHO HAD NOT YET DONE GIVING THANKS TO GOD FOR THEIR ESCAPE FROM THOSE AUDACIOUS MEN +5639-40744-0005-142: FINALLY THE ONE PARTY WENT OFF EXULTING AND THE OTHER WAS LEFT IN DESOLATION AND WOE +5639-40744-0006-143: (RODOLFO->RODOLPHO) ARRIVED AT HIS OWN HOUSE WITHOUT ANY IMPEDIMENT (AND LEOCADIA'S->A LEOCADIUS) PARENTS REACHED THEIRS HEART BROKEN AND DESPAIRING +5639-40744-0007-144: MEANWHILE (RODOLFO->RUDOLPHO) HAD (LEOCADIA->LOCALIA) SAFE IN HIS CUSTODY AND IN HIS OWN APARTMENT +5639-40744-0008-145: WHO TOUCHES ME AM I IN BED +5639-40744-0009-146: MOTHER DEAR FATHER DO YOU HEAR ME +5639-40744-0010-147: IT IS THE ONLY AMENDS I ASK OF YOU FOR THE WRONG YOU HAVE DONE ME +5639-40744-0011-148: SHE FOUND THE DOOR BUT IT WAS LOCKED OUTSIDE +5639-40744-0012-149: SHE SUCCEEDED IN OPENING THE WINDOW AND THE MOONLIGHT SHONE IN SO BRIGHTLY THAT SHE COULD DISTINGUISH THE (COLOUR->COLOR) OF SOME DAMASK (HANGINGS->HANGING) IN THE ROOM +5639-40744-0013-150: SHE SAW THAT THE BED WAS GILDED AND SO RICH THAT IT SEEMED THAT OF A PRINCE (*->THE) RATHER (THAN->THAT) OF A PRIVATE GENTLEMAN +5639-40744-0014-151: AMONG OTHER THINGS ON WHICH (SHE->HE) CAST HER EYES WAS A SMALL CRUCIFIX OF SOLID SILVER STANDING ON A CABINET NEAR THE WINDOW +5639-40744-0015-152: THIS PERSON WAS (RODOLFO->RIDOLPHO) WHO THOUGH HE HAD GONE TO LOOK FOR HIS FRIENDS HAD CHANGED HIS MIND IN THAT RESPECT (NOT THINKING->NOTHING) IT ADVISABLE TO ACQUAINT THEM WITH WHAT HAD PASSED BETWEEN HIM AND THE GIRL +5639-40744-0016-153: ON THE CONTRARY HE RESOLVED TO TELL THEM THAT REPENTING OF HIS VIOLENCE AND MOVED BY (HER->A) TEARS HE HAD ONLY CARRIED HER HALF WAY TOWARDS HIS HOUSE AND THEN LET HER GO +5639-40744-0017-154: CHOKING WITH EMOTION (LEOCADI->LUCADIA) MADE A SIGN TO HER PARENTS THAT SHE WISHED TO BE ALONE WITH THEM +5639-40744-0018-155: THAT WOULD BE VERY WELL MY CHILD REPLIED HER FATHER IF YOUR PLAN WERE NOT LIABLE TO BE FRUSTRATED BY ORDINARY CUNNING BUT NO DOUBT THIS IMAGE (HAS->HAD) BEEN ALREADY MISSED BY ITS OWNER AND HE WILL HAVE SET IT DOWN FOR CERTAIN THAT IT WAS TAKEN OUT OF THE ROOM BY THE PERSON HE LOCKED UP THERE +5639-40744-0019-156: WHAT YOU HAD BEST DO MY CHILD IS TO KEEP IT AND PRAY TO IT THAT SINCE IT WAS A WITNESS TO YOUR UNDOING IT WILL DEIGN TO VINDICATE YOUR CAUSE BY ITS RIGHTEOUS JUDGMENT +5639-40744-0020-157: THUS DID (THIS->THE) HUMANE AND RIGHT MINDED FATHER COMFORT HIS UNHAPPY DAUGHTER AND HER MOTHER EMBRACING HER AGAIN DID ALL SHE COULD TO SOOTHE (HER->A) FEELINGS +5639-40744-0021-158: SHE MEANWHILE PASSED HER LIFE WITH HER PARENTS IN THE STRICTEST RETIREMENT NEVER LETTING HERSELF BE SEEN BUT SHUNNING EVERY EYE LEST IT SHOULD READ HER MISFORTUNE IN HER FACE +5639-40744-0022-159: TIME ROLLED ON THE HOUR OF HER DELIVERY ARRIVED IT TOOK PLACE IN THE UTMOST SECRECY HER MOTHER TAKING UPON HER THE OFFICE OF MIDWIFE (AND->AS) SHE GAVE BIRTH TO A SON ONE OF THE MOST BEAUTIFUL EVER SEEN +5639-40744-0023-160: (WHEN->AND) THE BOY WALKED THROUGH THE STREETS BLESSINGS WERE SHOWERED UPON HIM BY ALL WHO SAW HIM (BLESSINGS->BLESSING) UPON HIS BEAUTY UPON THE MOTHER THAT BORE HIM UPON THE FATHER THAT BEGOT HIM UPON THOSE WHO BROUGHT HIM UP SO WELL +5639-40744-0024-161: ONE DAY WHEN THE BOY WAS SENT BY HIS GRANDFATHER WITH A MESSAGE TO A RELATION HE PASSED ALONG A STREET IN WHICH THERE WAS A GREAT CONCOURSE OF HORSEMEN +5639-40744-0025-162: THE BED SHE TOO WELL REMEMBERED WAS THERE AND ABOVE ALL THE CABINET ON WHICH HAD STOOD THE IMAGE SHE HAD TAKEN AWAY WAS STILL ON THE SAME SPOT +5639-40744-0026-163: (LUIS->LOUIS) WAS OUT OF DANGER IN A FORTNIGHT IN A MONTH HE ROSE FROM HIS BED AND (DURING->DREWING) ALL THAT TIME HE WAS VISITED DAILY BY HIS MOTHER AND GRANDMOTHER AND TREATED BY THE MASTER AND MISTRESS OF THE HOUSE AS IF HE WAS THEIR OWN CHILD +5639-40744-0027-164: THUS SAYING AND PRESSING THE CRUCIFIX TO HER BREAST SHE FELL FAINTING INTO THE ARMS OF DONA (ESTAFANIA->ESTAFFANIA) WHO AS A GENTLEWOMAN TO WHOSE SEX PITY IS (AS->THE) NATURAL AS CRUELTY (IS->AS) TO MAN INSTANTLY PRESSED HER LIPS TO THOSE OF THE FAINTING GIRL SHEDDING OVER HER SO MANY TEARS THAT THERE NEEDED NO OTHER SPRINKLING OF WATER TO RECOVER (LEOCADIA->LOCATIA) FROM HER SWOON +5639-40744-0028-165: I HAVE GREAT THINGS TO TELL YOU SENOR SAID (DONA ESTAFANIA->DORNEST DA FANIA) TO HER HUSBAND THE CREAM AND SUBSTANCE OF WHICH IS THIS THE FAINTING GIRL BEFORE YOU IS YOUR DAUGHTER AND (THAT->THE) BOY IS YOUR GRANDSON +5639-40744-0029-166: THIS TRUTH WHICH I HAVE LEARNED FROM HER LIPS IS CONFIRMED BY HIS FACE IN WHICH WE HAVE BOTH BEHELD THAT OF OUR SON +5639-40744-0030-167: JUST THEN (LEOCADIA->LEOKADIA) CAME TO HERSELF AND EMBRACING THE CROSS SEEMED CHANGED INTO A SEA OF TEARS AND THE GENTLEMAN (REMAINED->REMAINING) IN UTTER BEWILDERMENT UNTIL HIS WIFE HAD REPEATED TO HIM FROM BEGINNING TO END (LEOCADIA'S->LEUCEDES) WHOLE STORY AND HE BELIEVED IT THROUGH THE BLESSED DISPENSATION OF HEAVEN WHICH HAD CONFIRMED IT BY SO MANY CONVINCING TESTIMONIES +5639-40744-0031-168: SO PERSUASIVE WERE HER ENTREATIES AND SO STRONG HER ASSURANCES THAT NO HARM WHATEVER COULD RESULT TO THEM FROM THE INFORMATION SHE SOUGHT THEY WERE INDUCED TO CONFESS THAT ONE SUMMER'S NIGHT THE SAME SHE HAD MENTIONED THEMSELVES (AND->IN) ANOTHER FRIEND BEING OUT ON A STROLL WITH (RODOLFO->RODOLPHO) THEY HAD BEEN CONCERNED IN THE (ABDUCTION->ADOCTION) OF A GIRL WHOM (RODOLFO->UDOLPHO) CARRIED OFF WHILST THE REST OF THEM DETAINED HER FAMILY WHO MADE A GREAT OUTCRY AND WOULD HAVE DEFENDED HER IF THEY COULD +5639-40744-0032-169: FOR GOD'S SAKE MY LADY MOTHER GIVE ME A WIFE WHO WOULD BE AN AGREEABLE COMPANION NOT ONE WHO WILL DISGUST ME SO THAT WE MAY BOTH BEAR EVENLY AND WITH MUTUAL GOOD WILL THE YOKE IMPOSED ON US BY HEAVEN INSTEAD OF PULLING THIS WAY AND THAT WAY AND FRETTING EACH OTHER TO DEATH +5639-40744-0033-170: HER BEARING WAS GRACEFUL AND ANIMATED SHE LED HER SON BY THE HAND AND BEFORE HER WALKED TWO MAIDS WITH WAX LIGHTS AND SILVER CANDLESTICKS +5639-40744-0034-171: ALL ROSE TO DO HER REVERENCE AS IF SOMETHING FROM HEAVEN HAD MIRACULOUSLY APPEARED BEFORE THEM BUT GAZING ON HER ENTRANCED WITH ADMIRATION NOT ONE OF THEM WAS ABLE TO ADDRESS A SINGLE WORD TO HER +5639-40744-0035-172: SHE REFLECTED HOW NEAR SHE STOOD TO THE CRISIS WHICH WAS TO DETERMINE WHETHER SHE WAS TO BE BLESSED OR UNHAPPY FOR EVER AND RACKED BY THE INTENSITY OF HER EMOTIONS SHE SUDDENLY CHANGED (COLOUR->COLOR) HER HEAD DROPPED AND SHE FELL FORWARD IN A SWOON INTO THE ARMS OF THE (DISMAYED ESTAFANIA->DISMAYEDESTAFHANIA) +5639-40744-0036-173: HIS MOTHER HAD LEFT HER TO HIM AS BEING HER DESTINED PROTECTOR BUT WHEN SHE SAW THAT HE TOO WAS INSENSIBLE SHE WAS NEAR MAKING A THIRD AND WOULD HAVE DONE SO HAD HE NOT COME TO HIMSELF +5639-40744-0037-174: KNOW THEN SON OF MY HEART THAT THIS FAINTING LADY IS YOUR REAL BRIDE I SAY REAL BECAUSE SHE IS THE ONE WHOM YOUR FATHER AND I HAVE CHOSEN FOR YOU AND (THE->A) PORTRAIT WAS A PRETENCE +5639-40744-0038-175: JUST AT (THE->A) MOMENT WHEN THE TEARS OF THE PITYING BEHOLDERS FLOWED FASTEST AND (THEIR->THERE) EJACULATIONS WERE MOST EXPRESSIVE OF DESPAIR (LEOCADIA->THE OCCASIONA) GAVE SIGNS OF RECOVERY AND BROUGHT BACK GLADNESS TO THE HEARTS OF ALL +5639-40744-0039-176: WHEN SHE CAME TO HER SENSES AND BLUSHING TO FIND HERSELF IN (RODOLFO'S->GODOLPH'S) ARMS WOULD HAVE DISENGAGED HERSELF NO SENORA HE SAID THAT MUST NOT BE STRIVE NOT TO WITHDRAW FROM THE ARMS OF HIM WHO HOLDS YOU IN HIS SOUL +5639-40744-0040-177: THIS WAS DONE FOR THE EVENT TOOK PLACE AT A TIME (WHEN->WITH) THE CONSENT OF THE PARTIES WAS SUFFICIENT FOR THE CELEBRATION OF A MARRIAGE WITHOUT ANY OF THE PRELIMINARY FORMALITIES WHICH ARE NOW SO PROPERLY REQUIRED +5639-40744-0041-178: NOR WAS (RODOLFO->RIDOLPHAL) LESS SURPRISED THAN THEY AND THE BETTER TO ASSURE HIMSELF OF SO WONDERFUL A FACT HE BEGGED (LEOCADIA->LOU KATYA) TO GIVE HIM SOME TOKEN WHICH SHOULD MAKE PERFECTLY CLEAR TO HIM THAT WHICH INDEED HE DID NOT DOUBT SINCE IT WAS AUTHENTICATED BY HIS PARENTS +5683-32865-0000-2483: YOU KNOW CAPTAIN LAKE +5683-32865-0001-2484: SAID LORD CHELFORD ADDRESSING ME +5683-32865-0002-2485: HE HAD HIS HAND UPON LAKE'S SHOULDER +5683-32865-0003-2486: THEY ARE COUSINS YOU KNOW WE ARE ALL COUSINS +5683-32865-0004-2487: WHATEVER LORD CHELFORD SAID MISS BRANDON RECEIVED IT VERY GRACIOUSLY AND EVEN WITH A MOMENTARY SMILE +5683-32865-0005-2488: BUT HER GREETING TO CAPTAIN (LAKE->LEEK) WAS MORE THAN USUALLY HAUGHTY AND FROZEN AND HER FEATURES I FANCIED PARTICULARLY PROUD AND PALE +5683-32865-0006-2489: AT DINNER LAKE WAS EASY AND AMUSING +5683-32865-0007-2490: (I'M->I AM) GLAD YOU LIKE IT SAYS (WYLDER->WILDER) CHUCKLING BENIGNANTLY ON IT OVER HIS SHOULDER +5683-32865-0008-2491: I BELIEVE I HAVE A LITTLE TASTE THAT WAY THOSE ARE ALL REAL YOU KNOW THOSE JEWELS +5683-32865-0009-2492: AND HE PLACED IT IN THAT GENTLEMAN'S FINGERS WHO NOW TOOK HIS TURN AT THE LAMP AND CONTEMPLATED THE LITTLE (PARALLELOGRAM->PARALLELLOGRAM) WITH A GLEAM OF SLY AMUSEMENT +5683-32865-0010-2493: I WAS THINKING IT'S VERY LIKE THE ACE OF HEARTS ANSWERED THE CAPTAIN SOFTLY SMILING ON +5683-32865-0011-2494: WHEREUPON LAKE LAUGHED QUIETLY STILL LOOKING ON THE ACE OF HEARTS WITH HIS SLY EYES +5683-32865-0012-2495: AND WYLDER LAUGHED TOO MORE SUDDENLY AND NOISILY THAN THE HUMOUR OF THE JOKE SEEMED QUITE TO CALL FOR AND GLANCED A GRIM LOOK FROM THE CORNERS OF HIS EYES ON LAKE BUT THE GALLANT CAPTAIN DID NOT SEEM TO PERCEIVE IT AND AFTER A FEW SECONDS MORE HE HANDED IT VERY INNOCENTLY BACK TO MISSUS DOROTHY ONLY REMARKING +5683-32865-0013-2496: DO YOU KNOW LAKE OH I REALLY CAN'T TELL BUT HE'LL SOON TIRE OF COUNTRY LIFE +5683-32865-0014-2497: HE'S NOT A MAN FOR COUNTRY QUARTERS +5683-32865-0015-2498: I HAD A HORRID DREAM ABOUT HIM LAST NIGHT THAT +5683-32865-0016-2499: OH I KNOW THAT'S (LORNE->LORN) BRANDON +5683-32865-0017-2500: ALL THE TIME HE WAS TALKING TO ME HIS ANGRY LITTLE EYES WERE FOLLOWING LAKE +5683-32866-0000-2527: MISS LAKE DECLINED THE CARRIAGE TO NIGHT +5683-32866-0001-2528: AND HE ADDED SOMETHING STILL LESS COMPLIMENTARY +5683-32866-0002-2529: BUT DON'T THESE VERY WISE THINGS SOMETIMES TURN OUT VERY FOOLISHLY +5683-32866-0003-2530: IN THE MEANTIME I HAD FORMED A NEW IDEA OF HER +5683-32866-0004-2531: BY THIS TIME LORD CHELFORD AND WYLDER RETURNED AND DISGUSTED RATHER WITH MYSELF I RUMINATED ON MY WANT OF (GENERAL SHIP->GENERALSHIP) +5683-32866-0005-2532: AND HE MADE A LITTLE DIP OF HIS CANE TOWARDS BRANDON HALL OVER HIS SHOULDER +5683-32866-0006-2533: YES SO THEY SAID BUT THAT WOULD I THINK HAVE BEEN WORSE +5683-32866-0007-2534: IF A FELLOW'S BEEN A LITTLE BIT WILD (HE'S->HE IS) BEELZEBUB AT ONCE +5683-32866-0008-2535: (BRACTON'S->BROCKTON'S) A VERY GOOD FELLOW I CAN ASSURE YOU +5683-32866-0009-2536: I DON'T KNOW (AND->ONE) CAN'T SAY HOW YOU (FINE->FIND) GENTLEMEN (DEFINE->TO FIND) WICKEDNESS ONLY AS AN OBSCURE FEMALE I SPEAK ACCORDING TO MY LIGHTS AND HE IS GENERALLY THOUGHT THE WICKEDEST MAN IN THIS COUNTY +5683-32866-0010-2537: WELL YOU KNOW RADIE WOMEN LIKE WICKED FELLOWS IT IS CONTRAST I SUPPOSE BUT THEY DO AND I'M SURE FROM WHAT BRACTON HAS SAID TO ME I KNOW HIM INTIMATELY THAT DORCAS LIKES HIM AND I CAN'T CONCEIVE WHY THEY ARE NOT MARRIED +5683-32866-0011-2538: THEIR WALK CONTINUED SILENT FOR THE GREATER PART NEITHER WAS QUITE SATISFIED WITH THE OTHER BUT RACHEL AT LAST SAID +5683-32866-0012-2539: NOW THAT'S IMPOSSIBLE RADIE FOR I REALLY DON'T THINK I ONCE THOUGHT OF HIM ALL THIS EVENING EXCEPT JUST WHILE WE WERE TALKING +5683-32866-0013-2540: THERE WAS A BRIGHT MOONLIGHT BROKEN BY THE SHADOWS OF OVERHANGING BOUGHS AND WITHERED LEAVES AND THE MOTTLED LIGHTS AND SHADOWS GLIDED ODDLY ACROSS HIS PALE FEATURES +5683-32866-0014-2541: DON'T INSULT ME STANLEY BY TALKING AGAIN AS YOU DID THIS MORNING +5683-32866-0015-2542: WHAT I SAY IS ALTOGETHER ON YOUR OWN ACCOUNT +5683-32866-0016-2543: MARK MY WORDS YOU'LL FIND HIM TOO STRONG FOR YOU (AYE->I) AND TOO DEEP +5683-32866-0017-2544: I AM VERY UNEASY ABOUT IT WHATEVER IT IS I CAN'T HELP IT +5683-32866-0018-2545: TO MY MIND THERE HAS ALWAYS BEEN SOMETHING INEXPRESSIBLY AWFUL IN FAMILY FEUDS +5683-32866-0019-2546: THE MYSTERY OF THEIR ORIGIN THEIR CAPACITY FOR EVOLVING LATENT FACULTIES OF CRIME AND THE (STEADY->STUDY) VITALITY WITH WHICH THEY (SURVIVE->SURVIVED) THE HEARSE AND SPEAK THEIR DEEP MOUTHED MALIGNITIES IN EVERY NEW BORN GENERATION HAVE ASSOCIATED THEM SOMEHOW IN MY MIND WITH (A->THE) SPELL OF LIFE EXCEEDING AND DISTINCT FROM HUMAN AND (A SPECIAL->ESPECIAL) SATANIC ACTION +5683-32866-0020-2547: THE FLOOR MORE THAN ANYTHING ELSE SHOWED THE GREAT AGE OF THE ROOM +5683-32866-0021-2548: MY BED WAS UNEXCEPTIONABLY COMFORTABLE BUT IN MY THEN MOOD I COULD HAVE WISHED IT A GREAT DEAL MORE MODERN +5683-32866-0022-2549: ITS CURTAINS WERE OF THICK AND FADED TAPESTRY +5683-32866-0023-2550: ALL THE FURNITURE BELONGED TO OTHER TIMES +5683-32866-0024-2551: I (SHAN'T->SHA'N'T) TROUBLE YOU ABOUT MY TRAIN OF THOUGHTS OR FANCIES BUT I BEGAN TO FEEL VERY LIKE A GENTLEMAN IN A GHOST STORY WATCHING EXPERIMENTALLY IN A HAUNTED CHAMBER +5683-32866-0025-2552: I DID NOT EVEN TAKE THE PRECAUTION OF SMOKING UP THE CHIMNEY +5683-32866-0026-2553: I BOLDLY LIGHTED MY (CHEROOT->TROUT) +5683-32866-0027-2554: A COLD BRIGHT MOON WAS SHINING WITH CLEAR SHARP LIGHTS AND SHADOWS +5683-32866-0028-2555: THE SOMBRE OLD TREES LIKE GIGANTIC HEARSE PLUMES BLACK AND AWFUL +5683-32866-0029-2556: SOMEHOW I HAD GROWN NERVOUS +5683-32866-0030-2557: A LITTLE BIT OF PLASTER TUMBLED DOWN THE CHIMNEY AND STARTLED ME CONFOUNDEDLY +5683-32879-0000-2501: IT WAS NOT VERY MUCH PAST ELEVEN THAT MORNING WHEN THE PONY CARRIAGE FROM BRANDON DREW UP BEFORE THE LITTLE GARDEN WICKET OF REDMAN'S FARM +5683-32879-0001-2502: (WELL->WHILE) SHE WAS BETTER THOUGH SHE HAD HAD A BAD NIGHT +5683-32879-0002-2503: SO THERE CAME A STEP AND A LITTLE RUSTLING OF FEMININE DRAPERIES THE SMALL DOOR OPENED AND RACHEL ENTERED WITH HER HAND EXTENDED AND A PALE SMILE OF WELCOME +5683-32879-0003-2504: WOMEN CAN HIDE THEIR PAIN BETTER THAN WE MEN AND BEAR IT BETTER TOO EXCEPT WHEN SHAME DROPS FIRE INTO THE DREADFUL CHALICE +5683-32879-0004-2505: BUT POOR RACHEL LAKE HAD MORE THAN THAT STOICAL HYPOCRISY WHICH ENABLES THE TORTURED SPIRITS OF HER SEX TO LIFT A PALE FACE THROUGH THE FLAMES AND SMILE +5683-32879-0005-2506: THIS TRANSIENT SPRING AND LIGHTING UP ARE BEAUTIFUL A GLAMOUR BEGUILING OUR SENSES +5683-32879-0006-2507: THERE WAS SOMETHING OF SWEETNESS AND FONDNESS IN HER TONES AND MANNER WHICH WAS NEW TO RACHEL AND COMFORTING AND SHE RETURNED THE GREETING AS KINDLY AND FELT MORE LIKE HER FORMER SELF +5683-32879-0007-2508: RACHEL'S PALE AND SHARPENED FEATURES AND DILATED EYE STRUCK HER WITH A PAINFUL SURPRISE +5683-32879-0008-2509: YOU HAVE BEEN SO ILL MY POOR RACHEL +5683-32879-0009-2510: ILL AND TROUBLED DEAR TROUBLED IN MIND AND MISERABLY NERVOUS +5683-32879-0010-2511: POOR RACHEL HER NATURE RECOILED FROM DECEIT AND SHE TOLD AT ALL EVENTS AS MUCH OF THE TRUTH AS SHE DARED +5683-32879-0011-2512: SHE SPOKE WITH A SUDDEN ENERGY WHICH PARTOOK (OF->A) FEAR AND PASSION AND FLUSHED HER THIN CHEEK AND MADE HER LANGUID EYES FLASH +5683-32879-0012-2513: THANK YOU (RACHEL->RACHAEL) MY COUSIN RACHEL MY ONLY FRIEND +5683-32879-0013-2514: CHELFORD HAD A NOTE FROM MISTER (WYLDER->WILDER) THIS MORNING ANOTHER NOTE HIS COMING DELAYED AND SOMETHING OF HIS HAVING TO SEE SOME PERSON WHO (IS->WAS) ABROAD CONTINUED DORCAS AFTER A LITTLE PAUSE +5683-32879-0014-2515: YES SOMETHING EVERYTHING SAID RACHEL HURRIEDLY LOOKING FROWNINGLY AT A FLOWER WHICH SHE WAS TWIRLING IN HER FINGERS +5683-32879-0015-2516: YES SAID RACHEL +5683-32879-0016-2517: AND THE WAN ORACLE HAVING SPOKEN SHE (SATE->SAT) DOWN IN THE SAME SORT OF ABSTRACTION AGAIN BESIDE DORCAS AND SHE LOOKED FULL IN HER COUSIN'S EYES +5683-32879-0017-2518: OF MARK WYLDER I SAY THIS HIS NAME HAS BEEN FOR YEARS HATEFUL TO ME AND RECENTLY IT HAS BECOME FRIGHTFUL AND YOU WILL PROMISE ME SIMPLY THIS THAT YOU WILL NEVER ASK ME TO SPEAK AGAIN ABOUT HIM +5683-32879-0018-2519: IT IS AN ANTIPATHY AN ANTIPATHY I CANNOT GET OVER DEAR DORCAS YOU MAY THINK IT A MADNESS BUT DON'T BLAME ME +5683-32879-0019-2520: I HAVE VERY FEW TO LOVE ME NOW AND I THOUGHT YOU MIGHT LOVE ME AS I HAVE BEGUN TO LOVE YOU +5683-32879-0020-2521: AND SHE THREW HER ARMS ROUND HER COUSIN'S NECK AND BRAVE RACHEL AT LAST BURST INTO TEARS +5683-32879-0021-2522: DORCAS IN HER STRANGE WAY WAS MOVED +5683-32879-0022-2523: I LIKE YOU STILL RACHEL I'M SURE I'LL ALWAYS LIKE YOU +5683-32879-0023-2524: YOU RESEMBLE ME RACHEL YOU ARE FEARLESS AND INFLEXIBLE AND GENEROUS +5683-32879-0024-2525: YES RACHEL I DO LOVE YOU +5683-32879-0025-2526: THANK YOU DORCAS DEAR +61-70968-0000-2179: HE BEGAN A CONFUSED COMPLAINT AGAINST THE WIZARD WHO HAD VANISHED BEHIND THE CURTAIN ON THE LEFT +61-70968-0001-2180: (GIVE->KIVED) NOT SO EARNEST A MIND TO THESE MUMMERIES CHILD +61-70968-0002-2181: A GOLDEN FORTUNE AND A HAPPY LIFE +61-70968-0003-2182: HE WAS LIKE UNTO MY FATHER IN A WAY AND YET WAS NOT MY FATHER +61-70968-0004-2183: ALSO THERE WAS A STRIPLING PAGE WHO TURNED INTO A MAID +61-70968-0005-2184: THIS WAS SO SWEET A LADY SIR AND IN SOME MANNER I DO THINK SHE DIED +61-70968-0006-2185: BUT THEN THE PICTURE WAS GONE AS QUICKLY AS IT CAME +61-70968-0007-2186: SISTER NELL DO YOU HEAR THESE MARVELS +61-70968-0008-2187: TAKE YOUR PLACE AND LET US SEE WHAT THE CRYSTAL CAN SHOW TO YOU +61-70968-0009-2188: LIKE AS NOT YOUNG MASTER THOUGH I AM AN OLD MAN +61-70968-0010-2189: FORTHWITH ALL RAN TO THE OPENING OF THE TENT TO SEE WHAT MIGHT BE AMISS BUT MASTER WILL WHO PEEPED OUT FIRST NEEDED NO MORE THAN ONE GLANCE +61-70968-0011-2190: HE GAVE WAY TO THE OTHERS VERY READILY AND RETREATED UNPERCEIVED BY THE SQUIRE AND MISTRESS FITZOOTH TO THE REAR OF THE TENT +61-70968-0012-2191: CRIES OF (A NOTTINGHAM->UNNOTTINGHAM) A NOTTINGHAM +61-70968-0013-2192: BEFORE THEM FLED THE STROLLER AND HIS THREE SONS (CAPLESS->CAPLICE) AND TERRIFIED +61-70968-0014-2193: WHAT IS THE TUMULT AND RIOTING CRIED OUT THE SQUIRE AUTHORITATIVELY AND HE BLEW TWICE ON (A->THE) SILVER WHISTLE WHICH HUNG AT HIS BELT +61-70968-0015-2194: NAY WE (REFUSED->WERE FREEZED) THEIR REQUEST MOST POLITELY MOST NOBLE SAID THE LITTLE STROLLER +61-70968-0016-2195: AND THEN THEY BECAME VEXED AND WOULD HAVE SNATCHED YOUR PURSE FROM US +61-70968-0017-2196: I COULD NOT SEE MY BOY INJURED EXCELLENCE FOR BUT DOING HIS DUTY AS ONE OF CUMBERLAND'S SONS +61-70968-0018-2197: SO I DID PUSH THIS FELLOW +61-70968-0019-2198: IT IS ENOUGH SAID GEORGE GAMEWELL SHARPLY (AND->AS) HE TURNED UPON THE CROWD +61-70968-0020-2199: SHAME ON YOU CITIZENS CRIED HE I BLUSH FOR MY FELLOWS OF NOTTINGHAM +61-70968-0021-2200: SURELY WE CAN SUBMIT WITH GOOD GRACE +61-70968-0022-2201: TIS FINE FOR YOU TO TALK OLD MAN ANSWERED THE LEAN SULLEN APPRENTICE +61-70968-0023-2202: BUT I WRESTLED WITH THIS FELLOW AND DO KNOW THAT HE PLAYED UNFAIRLY IN THE SECOND BOUT +61-70968-0024-2203: SPOKE THE SQUIRE LOSING ALL (PATIENCE->PATIENT) AND IT WAS TO YOU THAT I GAVE ANOTHER (PURSE IN->PERSON) CONSOLATION +61-70968-0025-2204: COME TO ME MEN HERE HERE HE RAISED HIS VOICE STILL LOUDER +61-70968-0026-2205: THE STROLLERS TOOK THEIR PART IN IT WITH HEARTY ZEST NOW THAT THEY HAD SOME CHANCE OF BEATING OFF THEIR FOES +61-70968-0027-2206: ROBIN AND THE LITTLE TUMBLER BETWEEN THEM TRIED TO FORCE THE SQUIRE TO STAND BACK AND VERY VALIANTLY DID THESE TWO COMPORT THEMSELVES +61-70968-0028-2207: THE HEAD AND CHIEF OF THE RIOT THE NOTTINGHAM (APPRENTICE->APPRENTICED) WITH CLENCHED FISTS THREATENED MONTFICHET +61-70968-0029-2208: THE SQUIRE HELPED TO THRUST THEM ALL IN AND ENTERED SWIFTLY HIMSELF +61-70968-0030-2209: NOW BE SILENT ON YOUR LIVES HE BEGAN BUT THE CAPTURED APPRENTICE SET UP AN INSTANT SHOUT +61-70968-0031-2210: SILENCE YOU (KNAVE->NAVE) CRIED MONTFICHET +61-70968-0032-2211: HE FELT FOR AND FOUND THE WIZARD'S BLACK CLOTH THE SQUIRE WAS QUITE OUT OF BREATH +61-70968-0033-2212: THRUSTING OPEN THE PROPER ENTRANCE OF THE TENT ROBIN SUDDENLY RUSHED FORTH WITH HIS BURDEN WITH A GREAT SHOUT +61-70968-0034-2213: A MONTFICHET A MONTFICHET GAMEWELL TO THE RESCUE +61-70968-0035-2214: TAKING ADVANTAGE OF THIS THE SQUIRE'S FEW MEN REDOUBLED THEIR EFFORTS AND ENCOURAGED BY (ROBIN'S->ROBINS) AND THE LITTLE STROLLER'S CRIES FOUGHT THEIR WAY TO HIM +61-70968-0036-2215: GEORGE MONTFICHET WILL NEVER FORGET THIS DAY +61-70968-0037-2216: WHAT IS YOUR NAME LORDING ASKED THE LITTLE STROLLER PRESENTLY +61-70968-0038-2217: ROBIN FITZOOTH +61-70968-0039-2218: AND MINE IS WILL STUTELEY SHALL WE BE COMRADES +61-70968-0040-2219: RIGHT WILLINGLY FOR BETWEEN US WE HAVE WON THE BATTLE ANSWERED ROBIN +61-70968-0041-2220: I LIKE YOU WILL YOU ARE THE SECOND WILL THAT I HAVE MET AND LIKED WITHIN TWO DAYS IS THERE A SIGN IN THAT +61-70968-0042-2221: (MONTFICHET->MARTFICHERE) CALLED OUT FOR ROBIN TO GIVE HIM AN ARM +61-70968-0043-2222: FRIENDS SAID (MONTFICHET->MONTFICHE) FAINTLY TO THE WRESTLERS BEAR US ESCORT SO FAR AS THE SHERIFF'S HOUSE +61-70968-0044-2223: IT WILL NOT BE SAFE FOR YOU TO STAY HERE NOW +61-70968-0045-2224: PRAY FOLLOW US WITH MINE (AND->IN) MY LORD SHERIFF'S MEN +61-70968-0046-2225: NOTTINGHAM CASTLE WAS REACHED AND ADMITTANCE WAS DEMANDED +61-70968-0047-2226: MASTER MONCEUX THE SHERIFF OF NOTTINGHAM WAS MIGHTILY PUT ABOUT WHEN TOLD OF THE RIOTING +61-70968-0048-2227: AND HENRY MIGHT RETURN TO ENGLAND AT ANY MOMENT +61-70968-0049-2228: HAVE YOUR WILL CHILD IF THE BOY ALSO WILLS IT MONTFICHET ANSWERED FEELING TOO ILL TO OPPOSE ANYTHING VERY STRONGLY JUST THEN +61-70968-0050-2229: HE MADE AN EFFORT TO HIDE HIS CONDITION FROM THEM ALL AND ROBIN FELT HIS FINGERS TIGHTEN UPON HIS ARM +61-70968-0051-2230: (BEG->BEGGED) ME A ROOM OF THE SHERIFF CHILD QUICKLY +61-70968-0052-2231: BUT WHO IS THIS FELLOW PLUCKING AT YOUR (SLEEVE->STEVE) +61-70968-0053-2232: HE IS MY ESQUIRE EXCELLENCY RETURNED ROBIN WITH DIGNITY +61-70968-0054-2233: MISTRESS FITZOOTH HAD BEEN CARRIED OFF BY THE SHERIFF'S DAUGHTER AND HER MAIDS AS SOON AS THEY HAD ENTERED THE HOUSE SO THAT ROBIN ALONE HAD THE CARE OF MONTFICHET +61-70968-0055-2234: ROBIN WAS GLAD WHEN AT LENGTH THEY WERE LEFT TO THEIR OWN DEVICES +61-70968-0056-2235: THE WINE DID CERTAINLY BRING BACK THE COLOR TO THE SQUIRE'S CHEEKS +61-70968-0057-2236: THESE ESCAPADES ARE NOT FOR OLD (GAMEWELL LAD->GAME WELL LED) HIS DAY HAS COME TO TWILIGHT +61-70968-0058-2237: WILL YOU FORGIVE ME NOW +61-70968-0059-2238: (IT WILL->IT'LL) BE NO DISAPPOINTMENT TO ME +61-70968-0060-2239: NO THANKS I AM GLAD TO GIVE YOU SUCH EASY HAPPINESS +61-70968-0061-2240: YOU ARE A WORTHY LEECH WILL PRESENTLY WHISPERED ROBIN THE WINE HAS WORKED A MARVEL +61-70968-0062-2241: (AY->I) AND SHOW YOU SOME PRETTY TRICKS +61-70970-0000-2242: YOUNG FITZOOTH HAD BEEN COMMANDED TO HIS MOTHER'S CHAMBER SO SOON AS HE HAD COME OUT FROM HIS CONVERSE WITH THE SQUIRE +61-70970-0001-2243: THERE BEFELL AN ANXIOUS INTERVIEW MISTRESS FITZOOTH ARGUING FOR AND AGAINST THE SQUIRE'S PROJECT IN A BREATH +61-70970-0002-2244: MOST OF ALL ROBIN THOUGHT OF HIS FATHER WHAT WOULD HE COUNSEL +61-70970-0003-2245: IF FOR A WHIM YOU BEGGAR YOURSELF I CANNOT STAY YOU +61-70970-0004-2246: BUT TAKE IT WHILST I LIVE AND (WEAR->WHERE) MONTFICHET'S SHIELD IN THE DAYS WHEN MY EYES CAN BE REJOICED BY SO BRAVE A SIGHT FOR YOU WILL (NE'ER->NEVER) DISGRACE OUR (SCUTCHEON->STATUNE) I WARRANT ME +61-70970-0005-2247: THE LAD HAD CHECKED HIM THEN +61-70970-0006-2248: NEVER THAT SIR HE HAD SAID +61-70970-0007-2249: HE WAS IN DEEP CONVERSE WITH THE CLERK AND ENTERED THE HALL HOLDING HIM BY THE ARM +61-70970-0008-2250: NOW TO BED BOY +61-70970-0009-2251: TIS LATE AND I GO MYSELF WITHIN A SHORT SPACE +61-70970-0010-2252: DISMISS YOUR SQUIRE ROBIN AND BID ME GOOD (E E N->EEN) +61-70970-0011-2253: AS ANY IN ENGLAND I WOULD SAY SAID GAMEWELL PROUDLY THAT IS IN HIS DAY +61-70970-0012-2254: YET HE WILL TEACH YOU A FEW TRICKS WHEN MORNING IS COME +61-70970-0013-2255: THERE WAS NO CHANCE TO ALTER HIS SLEEPING ROOM TO ONE NEARER TO GAMEWELL'S CHAMBER +61-70970-0014-2256: PRESENTLY HE CROSSED THE FLOOR OF HIS ROOM WITH DECIDED STEP +61-70970-0015-2257: WILL CRIED HE SOFTLY AND STUTELEY WHO HAD CHOSEN HIS COUCH ACROSS THE DOOR OF HIS YOUNG MASTER'S CHAMBER SPRANG UP AT ONCE IN ANSWER +61-70970-0016-2258: WE WILL GO OUT TOGETHER TO THE BOWER THERE IS A WAY DOWN TO THE COURT FROM MY WINDOW +61-70970-0017-2259: REST AND BE STILL UNTIL I WARN YOU +61-70970-0018-2260: THE HOURS PASSED WEARILY BY AND MOVEMENT COULD YET BE HEARD ABOUT THE HALL +61-70970-0019-2261: AT LAST ALL WAS QUIET AND BLACK IN THE COURTYARD OF GAMEWELL +61-70970-0020-2262: WILL WHISPERED ROBIN OPENING HIS DOOR AS HE SPOKE ARE YOU READY +61-70970-0021-2263: THEY THEN RENEWED THEIR JOURNEY AND UNDER THE BETTER LIGHT MADE A SAFE CROSSING OF THE STABLE ROOFS +61-70970-0022-2264: ROBIN ENTERED THE HUT DRAGGING THE UNWILLING ESQUIRE AFTER HIM +61-70970-0023-2265: BE NOT SO FOOLISH FRIEND SAID FITZOOTH CROSSLY +61-70970-0024-2266: THEY MOVED THEREAFTER CAUTIOUSLY ABOUT THE HUT GROPING BEFORE AND ABOUT THEM TO FIND SOMETHING TO SHOW THAT WARRENTON HAD FULFILLED HIS MISSION +61-70970-0025-2267: THEY WERE UPON THE VERGE OF AN OPEN TRAP IN THE FAR CORNER OF THE HUT AND STUTELEY HAD TRIPPED OVER THE EDGE OF THE REVERSED FLAP MOUTH OF THIS PIT +61-70970-0026-2268: FITZOOTH'S HAND RESTED AT LAST UPON THE TOP RUNG OF A LADDER AND SLOWLY THE TRUTH CAME TO HIM +61-70970-0027-2269: ROBIN CAREFULLY DESCENDED THE LADDER AND FOUND HIMSELF SOON UPON FIRM ROCKY GROUND +61-70970-0028-2270: STUTELEY WAS BY HIS SIDE IN A FLASH AND THEN THEY BOTH BEGAN FEELING ABOUT THEM TO ASCERTAIN THE SHAPE AND CHARACTER OF THIS VAULT +61-70970-0029-2271: FROM THE BLACKNESS BEHIND THE LIGHT THEY HEARD A VOICE WARRENTON'S +61-70970-0030-2272: SAVE ME MASTERS BUT YOU STARTLED ME RARELY +61-70970-0031-2273: CRIED HE WAVING THE (LANTHORN->LANTERN) BEFORE HIM TO MAKE SURE THAT THESE WERE NO GHOSTS IN FRONT OF HIM +61-70970-0032-2274: (ENQUIRED->INQUIRED) ROBIN WITH HIS (SUSPICIONS->SUSPICION) STILL UPON HIM +61-70970-0033-2275: TRULY SUCH A HORSE (SHOULD->WOULD) BE WORTH MUCH IN NOTTINGHAM FAIR +61-70970-0034-2276: NAY NAY LORDING ANSWERED WARRENTON WITH A HALF LAUGH +61-70970-0035-2277: WARRENTON SPOKE THUS WITH SIGNIFICANCE TO SHOW ROBIN THAT HE WAS NOT TO THINK (GEOFFREY'S->JEFFREY'S) CLAIMS TO THE ESTATE WOULD BE PASSED BY +61-70970-0036-2278: ROBIN FITZOOTH SAW THAT HIS DOUBTS OF WARRENTON HAD BEEN UNFAIR AND HE BECAME ASHAMED OF HIMSELF FOR (HARBORING->HARBOURING) THEM +61-70970-0037-2279: HIS TONES RANG PLEASANTLY ON WARRENTON'S EARS AND FORTHWITH (A->THE) GOOD FELLOWSHIP WAS HERALDED BETWEEN THEM +61-70970-0038-2280: THE OLD SERVANT TOLD HIM QUIETLY AS THEY CREPT BACK TO GAMEWELL THAT THIS (PASSAGE WAY->PASSAGEWAY) LED FROM THE HUT IN THE (PLEASANCE->PLEASANTS) TO SHERWOOD AND THAT (GEOFFREY->JEFFREY) FOR THE TIME WAS HIDING WITH THE OUTLAWS IN THE FOREST +61-70970-0039-2281: HE IMPLORES US TO BE DISCREET AS THE GRAVE IN THIS MATTER FOR IN SOOTH HIS LIFE IS IN THE HOLLOW OF OUR HANDS +61-70970-0040-2282: THEY (REGAINED->REGAIN) THEIR APARTMENT APPARENTLY WITHOUT DISTURBING THE HOUSEHOLD OF GAMEWELL +672-122797-0000-1529: OUT IN THE WOODS STOOD A NICE LITTLE FIR TREE +672-122797-0001-1530: THE PLACE HE HAD WAS A VERY GOOD ONE THE SUN SHONE ON HIM AS TO FRESH AIR THERE WAS ENOUGH OF THAT AND ROUND HIM GREW MANY LARGE SIZED COMRADES PINES AS WELL AS (FIRS->FURS) +672-122797-0002-1531: HE DID NOT THINK OF THE WARM SUN AND OF THE FRESH AIR HE DID NOT CARE FOR THE LITTLE COTTAGE CHILDREN THAT RAN ABOUT (AND->IN) PRATTLED WHEN THEY WERE IN THE WOODS LOOKING FOR WILD STRAWBERRIES +672-122797-0003-1532: BUT THIS WAS WHAT THE TREE COULD NOT BEAR TO HEAR +672-122797-0004-1533: IN WINTER WHEN THE SNOW LAY GLITTERING ON THE GROUND A HARE WOULD OFTEN COME LEAPING ALONG AND JUMP RIGHT OVER THE LITTLE TREE +672-122797-0005-1534: OH THAT MADE HIM SO ANGRY +672-122797-0006-1535: TO GROW AND GROW TO GET OLDER AND BE TALL THOUGHT THE TREE THAT AFTER ALL IS THE MOST DELIGHTFUL THING IN THE WORLD +672-122797-0007-1536: IN AUTUMN THE (WOOD CUTTERS->WOODCUTTERS) ALWAYS CAME AND FELLED SOME OF THE LARGEST TREES +672-122797-0008-1537: THIS HAPPENED EVERY YEAR AND THE YOUNG FIR TREE THAT HAD NOW GROWN TO A VERY COMELY (SIZE->SIZED) TREMBLED AT THE SIGHT FOR THE MAGNIFICENT GREAT TREES FELL TO THE EARTH WITH NOISE AND CRACKING THE BRANCHES WERE LOPPED OFF AND THE TREES LOOKED LONG AND BARE THEY WERE HARDLY TO BE (RECOGNISED->RECOGNIZED) AND THEN THEY WERE LAID IN CARTS AND THE HORSES DRAGGED THEM OUT OF THE WOOD +672-122797-0009-1538: HAVE YOU NOT MET (THEM ANYWHERE->THE MANY WHERE) +672-122797-0010-1539: REJOICE IN THY GROWTH SAID THE SUNBEAMS +672-122797-0011-1540: AND THEN WHAT HAPPENS THEN +672-122797-0012-1541: I WOULD FAIN KNOW IF I AM DESTINED FOR SO GLORIOUS A CAREER CRIED THE TREE REJOICING +672-122797-0013-1542: I AM NOW TALL AND MY BRANCHES SPREAD LIKE THE OTHERS THAT WERE CARRIED OFF LAST YEAR OH +672-122797-0014-1543: WERE I BUT ALREADY ON THE CART +672-122797-0015-1544: (WERE->WHERE) I IN THE WARM ROOM WITH ALL THE (SPLENDOR->SPLENDOUR) AND MAGNIFICENCE +672-122797-0016-1545: YES THEN SOMETHING BETTER SOMETHING STILL GRANDER WILL SURELY FOLLOW OR WHEREFORE SHOULD THEY THUS ORNAMENT ME +672-122797-0017-1546: SOMETHING BETTER SOMETHING STILL GRANDER MUST FOLLOW BUT WHAT +672-122797-0018-1547: REJOICE IN OUR PRESENCE SAID THE (AIR->HEIR) AND THE SUNLIGHT +672-122797-0019-1548: REJOICE IN THY OWN FRESH YOUTH +672-122797-0020-1549: BUT THE TREE DID NOT REJOICE AT ALL HE GREW AND GREW AND WAS GREEN BOTH WINTER AND SUMMER +672-122797-0021-1550: AND TOWARDS CHRISTMAS HE WAS ONE OF THE FIRST THAT WAS CUT DOWN +672-122797-0022-1551: THE AXE STRUCK DEEP INTO THE VERY PITH THE TREE FELL TO THE EARTH WITH A SIGH HE FELT A PANG IT WAS LIKE A SWOON HE COULD NOT THINK OF HAPPINESS FOR HE WAS SORROWFUL AT BEING SEPARATED FROM HIS HOME FROM THE PLACE WHERE HE HAD SPRUNG UP +672-122797-0023-1552: HE WELL KNEW THAT HE SHOULD NEVER SEE HIS DEAR OLD COMRADES THE LITTLE BUSHES AND FLOWERS AROUND HIM (ANYMORE->ANY MORE) PERHAPS NOT EVEN THE BIRDS +672-122797-0024-1553: THE DEPARTURE WAS NOT AT ALL AGREEABLE +672-122797-0025-1554: THE TREE ONLY CAME TO HIMSELF WHEN HE WAS UNLOADED IN A (COURT YARD->COURTYARD) WITH THE OTHER TREES AND HEARD A MAN SAY THAT ONE IS SPLENDID WE DON'T WANT THE OTHERS +672-122797-0026-1555: THERE TOO WERE LARGE EASY CHAIRS SILKEN SOFAS LARGE TABLES FULL OF PICTURE BOOKS AND FULL OF TOYS WORTH HUNDREDS AND HUNDREDS OF CROWNS AT LEAST THE CHILDREN SAID SO +672-122797-0027-1556: THE SERVANTS AS WELL AS THE YOUNG LADIES DECORATED IT +672-122797-0028-1557: THIS EVENING THEY ALL SAID +672-122797-0029-1558: HOW IT WILL SHINE THIS EVENING +672-122797-0030-1559: PERHAPS THE OTHER TREES FROM THE FOREST WILL COME TO LOOK AT ME +672-122797-0031-1560: IT BLAZED UP FAMOUSLY HELP HELP +672-122797-0032-1561: CRIED THE YOUNG LADIES AND THEY QUICKLY PUT OUT THE FIRE +672-122797-0033-1562: A STORY +672-122797-0034-1563: A STORY CRIED THE CHILDREN DRAWING A LITTLE FAT MAN TOWARDS THE TREE +672-122797-0035-1564: BUT I SHALL TELL ONLY ONE STORY +672-122797-0036-1565: HUMPY (DUMPY->DON'T BE) FELL DOWNSTAIRS AND YET HE MARRIED THE PRINCESS +672-122797-0037-1566: THAT'S THE WAY OF THE WORLD +672-122797-0038-1567: THOUGHT THE FIR TREE AND BELIEVED IT ALL BECAUSE THE MAN WHO TOLD THE STORY WAS SO GOOD LOOKING WELL WELL +672-122797-0039-1568: I WON'T TREMBLE TO MORROW THOUGHT THE FIR TREE +672-122797-0040-1569: AND THE WHOLE NIGHT THE TREE STOOD STILL AND IN DEEP THOUGHT +672-122797-0041-1570: IN THE MORNING THE SERVANT AND THE HOUSEMAID CAME IN +672-122797-0042-1571: BUT THEY DRAGGED HIM OUT OF THE ROOM AND UP THE STAIRS INTO THE LOFT AND HERE IN A DARK CORNER WHERE NO DAYLIGHT COULD ENTER THEY LEFT HIM +672-122797-0043-1572: WHAT'S THE MEANING OF THIS THOUGHT THE TREE +672-122797-0044-1573: AND HE LEANED AGAINST THE WALL LOST IN REVERIE +672-122797-0045-1574: TIME ENOUGH HAD HE TOO FOR HIS REFLECTIONS FOR DAYS AND NIGHTS PASSED ON AND NOBODY CAME UP AND WHEN AT LAST SOMEBODY DID COME IT WAS ONLY TO PUT SOME GREAT TRUNKS IN A CORNER OUT OF THE WAY +672-122797-0046-1575: TIS NOW WINTER OUT OF DOORS THOUGHT THE TREE +672-122797-0047-1576: HOW KIND MAN IS AFTER ALL +672-122797-0048-1577: IF IT ONLY WERE NOT SO DARK HERE AND SO TERRIBLY LONELY +672-122797-0049-1578: SQUEAK (SQUEAK->SQUI) +672-122797-0050-1579: THEY SNUFFED ABOUT THE FIR TREE AND RUSTLED AMONG THE BRANCHES +672-122797-0051-1580: I AM BY NO MEANS OLD SAID THE FIR TREE +672-122797-0052-1581: THERE'S MANY A ONE CONSIDERABLY OLDER THAN I AM +672-122797-0053-1582: THEY WERE SO EXTREMELY CURIOUS +672-122797-0054-1583: I KNOW NO SUCH PLACE SAID THE TREE +672-122797-0055-1584: AND THEN HE TOLD ALL ABOUT HIS YOUTH AND THE LITTLE MICE HAD NEVER HEARD THE LIKE BEFORE AND THEY LISTENED AND SAID +672-122797-0056-1585: SAID THE FIR TREE THINKING OVER WHAT HE HAD HIMSELF RELATED +672-122797-0057-1586: YES IN REALITY THOSE WERE HAPPY TIMES +672-122797-0058-1587: WHO (IS->IT'S) HUMPY DUMPY ASKED THE MICE +672-122797-0059-1588: ONLY THAT ONE ANSWERED THE TREE +672-122797-0060-1589: IT IS A VERY STUPID STORY +672-122797-0061-1590: DON'T YOU KNOW ONE ABOUT BACON AND TALLOW CANDLES CAN'T YOU TELL ANY LARDER STORIES +672-122797-0062-1591: NO SAID THE TREE +672-122797-0063-1592: THEN GOOD BYE SAID THE RATS AND THEY WENT HOME +672-122797-0064-1593: AT LAST THE LITTLE MICE STAYED AWAY ALSO AND THE TREE SIGHED AFTER ALL IT WAS VERY PLEASANT WHEN THE SLEEK LITTLE MICE SAT ROUND ME AND LISTENED TO WHAT I TOLD THEM +672-122797-0065-1594: NOW THAT TOO IS OVER +672-122797-0066-1595: WHY ONE MORNING THERE CAME A QUANTITY OF PEOPLE AND SET TO WORK IN THE LOFT +672-122797-0067-1596: THE TRUNKS WERE MOVED THE TREE WAS PULLED OUT AND THROWN RATHER HARD IT IS TRUE DOWN ON THE FLOOR BUT A MAN DREW HIM TOWARDS THE STAIRS WHERE THE DAYLIGHT SHONE +672-122797-0068-1597: BUT IT WAS NOT THE FIR TREE THAT THEY MEANT +672-122797-0069-1598: IT WAS IN A CORNER THAT HE LAY AMONG WEEDS AND NETTLES +672-122797-0070-1599: THE GOLDEN STAR OF TINSEL WAS STILL ON THE TOP OF THE TREE AND GLITTERED IN THE SUNSHINE +672-122797-0071-1600: IN THE COURT YARD SOME OF THE (MERRY->MARRIED) CHILDREN WERE PLAYING WHO HAD DANCED AT CHRISTMAS ROUND THE FIR TREE AND WERE SO GLAD AT THE SIGHT OF HIM +672-122797-0072-1601: AND THE GARDENER'S BOY CHOPPED THE TREE INTO SMALL PIECES THERE WAS A WHOLE HEAP LYING THERE +672-122797-0073-1602: THE WOOD FLAMED UP SPLENDIDLY UNDER THE LARGE BREWING COPPER AND (IT SIGHED->ITS SIDE) SO DEEPLY +672-122797-0074-1603: HOWEVER THAT WAS OVER NOW THE TREE GONE THE STORY AT AN END +6829-68769-0000-1858: KENNETH AND BETH REFRAINED FROM TELLING THE OTHER GIRLS OR UNCLE JOHN OF OLD WILL ROGERS'S VISIT BUT THEY GOT MISTER WATSON IN THE LIBRARY AND QUESTIONED HIM CLOSELY ABOUT THE PENALTY FOR FORGING A (CHECK->CHEQUE) +6829-68769-0001-1859: IT WAS A SERIOUS CRIME INDEED MISTER WATSON TOLD THEM AND TOM GATES BADE FAIR TO SERVE A LENGTHY TERM IN (*->THE) STATE'S PRISON AS A CONSEQUENCE OF HIS RASH ACT +6829-68769-0002-1860: I CAN'T SEE IT IN THAT LIGHT SAID THE OLD LAWYER +6829-68769-0003-1861: IT WAS A DELIBERATE THEFT FROM HIS EMPLOYERS TO PROTECT A GIRL HE LOVED +6829-68769-0004-1862: BUT THEY COULD NOT HAVE PROVEN A (CASE->GASE) AGAINST LUCY IF SHE WAS INNOCENT AND ALL THEIR THREATS OF ARRESTING HER WERE PROBABLY (*->A) MERE BLUFF +6829-68769-0005-1863: HE WAS (*->A) SOFT HEARTED AND IMPETUOUS SAID BETH AND BEING IN LOVE HE DIDN'T STOP TO COUNT THE COST +6829-68769-0006-1864: IF THE PROSECUTION WERE WITHDRAWN AND THE CASE SETTLED WITH THE VICTIM OF THE FORGED CHECK THEN THE YOUNG MAN WOULD BE ALLOWED HIS FREEDOM +6829-68769-0007-1865: BUT UNDER THE CIRCUMSTANCES I DOUBT (IF->OF) SUCH AN ARRANGEMENT COULD BE MADE +6829-68769-0008-1866: (FAIRVIEW WAS->FAIR VIEW'S) TWELVE MILES AWAY BUT BY TEN O'CLOCK THEY DREW UP AT THE COUNTY (JAIL->DRALE) +6829-68769-0009-1867: THEY WERE RECEIVED IN THE LITTLE OFFICE BY A MAN NAMED MARKHAM WHO WAS THE JAILER +6829-68769-0010-1868: WE WISH TO TALK WITH HIM ANSWERED KENNETH TALK +6829-68769-0011-1869: I'M RUNNING FOR REPRESENTATIVE ON THE REPUBLICAN TICKET SAID KENNETH QUIETLY +6829-68769-0012-1870: (OH->I'LL) SAY THAT'S DIFFERENT OBSERVED MARKHAM ALTERING HIS (DEMEANOR->DEMEANOUR) +6829-68769-0013-1871: (MAY WE SEE->MAYBE SEA) GATES AT ONCE ASKED KENNETH +6829-68769-0014-1872: THEY FOLLOWED THE JAILER ALONG (A->THE) SUCCESSION OF PASSAGES +6829-68769-0015-1873: SOMETIMES (I'M->ON) THAT (YEARNING->YEARNIN) FOR A SMOKE I'M NEARLY CRAZY (AN->AND) I (DUNNO->DON'T KNOW) WHICH IS (WORST->WORSE) DYIN ONE WAY OR (ANOTHER->THE OTHER) +6829-68769-0016-1874: HE UNLOCKED THE DOOR AND CALLED HERE'S VISITORS TOM +6829-68769-0017-1875: (WORSE->HORSE) TOM WORSE (N EVER->THAN ARROW) REPLIED THE JAILER GLOOMILY +6829-68769-0018-1876: (MISS DE GRAF->MISTER GRAFT) SAID KENNETH NOTICING THE BOY'S FACE CRITICALLY AS HE STOOD WHERE THE LIGHT FROM THE PASSAGE FELL UPON IT +6829-68769-0019-1877: SORRY WE HAVEN'T ANY RECEPTION ROOM IN THE JAIL +6829-68769-0020-1878: SIT DOWN PLEASE SAID GATES IN A CHEERFUL AND PLEASANT VOICE THERE'S A (BENCH->PINCH) HERE +6829-68769-0021-1879: A FRESH WHOLESOME LOOKING BOY WAS TOM GATES WITH STEADY GRAY EYES AN INTELLIGENT FOREHEAD BUT A SENSITIVE RATHER WEAK MOUTH +6829-68769-0022-1880: WE HAVE HEARD SOMETHING OF YOUR STORY SAID KENNETH AND (ARE->OUR) INTERESTED IN IT +6829-68769-0023-1881: I DIDN'T STOP TO THINK WHETHER IT WAS FOOLISH OR NOT I DID IT AND I'M GLAD I DID (*->IT) +6829-68769-0024-1882: OLD WILL IS A FINE FELLOW BUT POOR AND HELPLESS SINCE MISSUS ROGERS HAD HER ACCIDENT +6829-68769-0025-1883: THEN ROGERS WOULDN'T DO ANYTHING BUT LEAD HER AROUND AND WAIT UPON HER AND THE PLACE WENT TO RACK AND RUIN +6829-68769-0026-1884: HE SPOKE SIMPLY BUT PACED UP AND DOWN THE NARROW CELL IN FRONT OF THEM +6829-68769-0027-1885: WHOSE NAME DID YOU SIGN TO THE CHECK ASKED KENNETH +6829-68769-0028-1886: HE IS SUPPOSED TO SIGN ALL THE CHECKS OF THE CONCERN +6829-68769-0029-1887: IT'S A STOCK COMPANY (AND->IN) RICH +6829-68769-0030-1888: I WAS (BOOKKEEPER->BITKEEPER) SO IT WAS EASY TO GET A BLANK CHECK AND FORGE THE SIGNATURE +6829-68769-0031-1889: AS REGARDS MY ROBBING THE COMPANY I'LL SAY THAT I SAVED (THEM->HIM) A HEAVY LOSS ONE DAY +6829-68769-0032-1890: I DISCOVERED AND PUT OUT A FIRE THAT WOULD HAVE DESTROYED THE WHOLE PLANT BUT MARSHALL NEVER EVEN THANKED ME +6829-68769-0033-1891: IT WAS BETTER FOR HIM TO THINK THE GIRL UNFEELING THAN TO KNOW THE TRUTH +6829-68769-0034-1892: I'M GOING TO SEE MISTER (MARSHALL->MARSHAL) SAID KENNETH AND DISCOVER WHAT I CAN DO TO ASSIST YOU THANK YOU SIR +6829-68769-0035-1893: IT WON'T BE MUCH BUT I'M GRATEFUL TO FIND A FRIEND +6829-68769-0036-1894: THEY LEFT HIM THEN FOR THE JAILER ARRIVED TO UNLOCK THE DOOR AND ESCORT THEM TO THE OFFICE +6829-68769-0037-1895: I'VE SEEN LOTS OF THAT KIND IN MY DAY +6829-68769-0038-1896: AND IT RUINS A MAN'S DISPOSITION +6829-68769-0039-1897: HE LOOKED UP RATHER UNGRACIOUSLY BUT MOTIONED THEM TO BE SEATED +6829-68769-0040-1898: SOME GIRL HAS BEEN (*->IN) HERE TWICE TO INTERVIEW MY MEN AND I HAVE REFUSED TO ADMIT HER +6829-68769-0041-1899: I'M NOT ELECTIONEERING JUST NOW +6829-68769-0042-1900: OH WELL SIR WHAT ABOUT HIM +6829-68769-0043-1901: AND HE DESERVES A TERM (IN->AND) STATE'S PRISON +6829-68769-0044-1902: IT HAS COST ME TWICE SIXTY DOLLARS (IN->AN) ANNOYANCE +6829-68769-0045-1903: I'LL PAY ALL THE (COSTS->COST) BESIDES +6829-68769-0046-1904: YOU'RE FOOLISH WHY SHOULD YOU DO ALL THIS +6829-68769-0047-1905: I HAVE MY OWN REASONS MISTER (MARSHALL->MARSHAL) +6829-68769-0048-1906: GIVE ME A CHECK FOR A HUNDRED AND FIFTY AND I'LL TURN OVER TO YOU THE FORGED CHECK AND QUASH FURTHER PROCEEDINGS +6829-68769-0049-1907: HE DETESTED THE GRASPING DISPOSITION THAT WOULD (ENDEAVOR->ENDEAVOUR) TO TAKE ADVANTAGE OF HIS EVIDENT DESIRE TO HELP YOUNG GATES +6829-68769-0050-1908: BETH UNEASY AT HIS SILENCE NUDGED HIM +6829-68769-0051-1909: THERE WAS A GRIM SMILE OF AMUSEMENT ON HIS SHREWD FACE +6829-68769-0052-1910: HE MIGHT HAVE HAD THAT FORGED CHECK FOR THE FACE OF IT IF HE'D BEEN SHARP +6829-68769-0053-1911: AND TO THINK WE CAN SAVE ALL THAT MISERY AND DESPAIR BY THE PAYMENT OF A HUNDRED AND FIFTY DOLLARS +6829-68771-0000-1912: SO TO THE SURPRISE OF THE DEMOCRATIC COMMITTEE AND ALL HIS FRIENDS MISTER HOPKINS ANNOUNCED THAT HE WOULD OPPOSE (FORBES'S->FORTS) AGGRESSIVE CAMPAIGN WITH AN EQUAL AGGRESSIVENESS AND SPEND AS MANY DOLLARS IN DOING SO AS MIGHT BE NECESSARY +6829-68771-0001-1913: ONE OF MISTER HOPKINS'S FIRST TASKS AFTER CALLING HIS FAITHFUL (HENCHMEN->HENCHMAN) AROUND HIM WAS TO MAKE A CAREFUL (CANVASS->CANVAS) OF THE VOTERS OF HIS DISTRICT TO SEE WHAT WAS STILL TO BE ACCOMPLISHED +6829-68771-0002-1914: THE WEAK (KNEED->NEED) CONTINGENCY MUST BE STRENGTHENED AND FORTIFIED AND A COUPLE OF HUNDRED VOTES IN ONE WAY OR (ANOTHER->THE OTHER) SECURED FROM THE OPPOSITION +6829-68771-0003-1915: THE DEMOCRATIC COMMITTEE FIGURED OUT A WAY TO DO THIS +6829-68771-0004-1916: UNDER ORDINARY CONDITIONS REYNOLDS WAS SURE TO BE ELECTED BUT THE COMMITTEE PROPOSED TO SACRIFICE HIM IN ORDER TO (ELECT->ELEC) HOPKINS +6829-68771-0005-1917: THE ONLY THING NECESSARY WAS TO FIX SETH REYNOLDS AND THIS HOPKINS ARRANGED PERSONALLY +6829-68771-0006-1918: AND THIS WAS WHY KENNETH AND BETH DISCOVERED HIM CONVERSING WITH THE YOUNG WOMAN IN THE BUGGY +6829-68771-0007-1919: THE DESCRIPTION SHE GAVE OF THE COMING RECEPTION TO THE (WOMAN'S->WOMEN'S) POLITICAL LEAGUE WAS SO HUMOROUS AND DIVERTING THAT THEY WERE BOTH LAUGHING HEARTILY OVER THE THING WHEN THE YOUNG PEOPLE PASSED THEM AND THUS MISTER HOPKINS FAILED TO NOTICE WHO THE OCCUPANTS OF THE OTHER VEHICLE WERE +6829-68771-0008-1920: THESE WOMEN WERE FLATTERED BY THE ATTENTION OF THE YOUNG LADY AND HAD PROMISED TO ASSIST IN ELECTING MISTER FORBES +6829-68771-0009-1921: LOUISE HOPED FOR EXCELLENT RESULTS FROM THIS ORGANIZATION AND WISHED THE ENTERTAINMENT TO BE SO EFFECTIVE IN WINNING THEIR GOOD WILL THAT THEY WOULD WORK EARNESTLY FOR THE CAUSE IN WHICH THEY WERE ENLISTED +6829-68771-0010-1922: THE (FAIRVIEW->FAIR VIEW) BAND WAS ENGAGED TO DISCOURSE AS MUCH HARMONY AS IT COULD PRODUCE AND THE RESOURCES OF THE GREAT HOUSE WERE TAXED TO ENTERTAIN THE GUESTS +6829-68771-0011-1923: TABLES WERE SPREAD ON THE LAWN AND A DAINTY BUT SUBSTANTIAL REPAST WAS TO BE SERVED +6829-68771-0012-1924: THIS WAS THE FIRST OCCASION WITHIN A GENERATION WHEN SUCH AN ENTERTAINMENT HAD BEEN GIVEN AT ELMHURST AND THE ONLY ONE WITHIN THE MEMORY OF MAN (WHERE->WERE) THE NEIGHBORS AND COUNTRY PEOPLE HAD BEEN (*->THE) INVITED GUESTS +6829-68771-0013-1925: THE (ATTENDANCE->ATTENDANTS) WAS UNEXPECTEDLY LARGE AND THE GIRLS WERE DELIGHTED FORESEEING GREAT SUCCESS FOR THEIR (FETE->FIGHT) +6829-68771-0014-1926: WE OUGHT TO HAVE MORE (ATTENDANTS->ATTENDANCE) BETH SAID LOUISE APPROACHING HER COUSIN +6829-68771-0015-1927: WON'T YOU RUN INTO THE HOUSE AND SEE IF MARTHA CAN'T SPARE ONE OR TWO MORE MAIDS +6829-68771-0016-1928: SHE WAS VERY FOND OF THE YOUNG LADIES WHOM SHE HAD KNOWN WHEN AUNT JANE WAS (THE->THEIR) MISTRESS HERE AND BETH WAS HER (ESPECIAL FAVORITE->SPECIAL FAVOURITE) +6829-68771-0017-1929: THE HOUSEKEEPER LED THE WAY (AND->IN) BETH FOLLOWED +6829-68771-0018-1930: FOR A MOMENT BETH STOOD STARING WHILE THE NEW MAID REGARDED HER WITH COMPOSURE AND (A->OF) SLIGHT SMILE UPON HER BEAUTIFUL FACE +6829-68771-0019-1931: SHE WAS DRESSED IN THE REGULATION COSTUME OF THE MAIDS AT ELMHURST A PLAIN BLACK GOWN WITH (*->A) WHITE APRON AND CAP +6829-68771-0020-1932: THEN SHE GAVE A LITTLE LAUGH AND REPLIED NO MISS BETH I'M ELIZABETH PARSONS +6829-68771-0021-1933: BUT IT CAN'T BE PROTESTED THE GIRL +6829-68771-0022-1934: I ATTEND TO THE HOUSEHOLD MENDING YOU KNOW AND CARE FOR THE LINEN +6829-68771-0023-1935: YOU SPEAK LIKE AN EDUCATED PERSON SAID BETH WONDERINGLY WHERE IS YOUR HOME +6829-68771-0024-1936: FOR THE FIRST TIME THE (MAID->MAIDS) SEEMED A LITTLE CONFUSED AND HER GAZE WANDERED FROM THE FACE OF HER VISITOR +6829-68771-0025-1937: SHE SAT DOWN IN A ROCKING CHAIR AND CLASPING HER HANDS IN HER LAP ROCKED SLOWLY BACK AND FORTH I'M SORRY SAID BETH +6829-68771-0026-1938: ELIZA PARSONS SHOOK HER HEAD +6829-68771-0027-1939: THEY THEY EXCITE ME IN SOME WAY AND I I CAN'T BEAR THEM YOU MUST EXCUSE ME +6829-68771-0028-1940: SHE EVEN SEEMED MILDLY AMUSED AT THE ATTENTION SHE ATTRACTED +6829-68771-0029-1941: BETH WAS A BEAUTIFUL GIRL THE HANDSOMEST OF THE THREE COUSINS BY FAR YET ELIZA SURPASSED HER (IN->A) NATURAL CHARM AND SEEMED WELL AWARE OF THE FACT +6829-68771-0030-1942: HER MANNER WAS NEITHER INDEPENDENT NOR ASSERTIVE BUT RATHER ONE OF WELL BRED COMPOSURE AND CALM RELIANCE +6829-68771-0031-1943: HER EYES WANDERED TO THE MAID'S HANDS +6829-68771-0032-1944: HOWEVER HER FEATURES (AND->IN) FORM MIGHT REPRESS ANY EVIDENCE OF NERVOUSNESS THESE HANDS TOLD A DIFFERENT STORY +6829-68771-0033-1945: SHE ROSE QUICKLY TO HER FEET WITH AN IMPETUOUS GESTURE THAT MADE HER VISITOR CATCH HER BREATH +6829-68771-0034-1946: I WISH I KNEW MYSELF SHE CRIED FIERCELY +6829-68771-0035-1947: WILL YOU LEAVE ME ALONE IN MY OWN ROOM OR MUST I GO AWAY TO ESCAPE YOU +6829-68771-0036-1948: ELIZA CLOSED THE DOOR BEHIND HER WITH A DECIDED SLAM AND A KEY CLICKED IN THE LOCK +6930-75918-0000-0: CONCORD RETURNED TO ITS PLACE AMIDST THE TENTS +6930-75918-0001-1: THE ENGLISH (FORWARDED->FOOTED) TO THE FRENCH BASKETS OF FLOWERS OF WHICH THEY HAD MADE A PLENTIFUL PROVISION TO GREET THE ARRIVAL OF THE YOUNG PRINCESS THE FRENCH IN RETURN INVITED THE ENGLISH TO A SUPPER WHICH WAS TO BE GIVEN THE NEXT DAY +6930-75918-0002-2: CONGRATULATIONS WERE POURED IN UPON THE PRINCESS EVERYWHERE DURING HER JOURNEY +6930-75918-0003-3: FROM THE RESPECT PAID HER ON ALL SIDES SHE SEEMED LIKE A QUEEN AND FROM THE ADORATION WITH WHICH SHE WAS TREATED BY TWO OR THREE SHE APPEARED AN OBJECT OF WORSHIP THE QUEEN MOTHER GAVE THE FRENCH THE MOST AFFECTIONATE RECEPTION FRANCE WAS HER NATIVE COUNTRY AND SHE HAD SUFFERED TOO MUCH UNHAPPINESS IN ENGLAND FOR ENGLAND TO HAVE MADE HER FORGET FRANCE +6930-75918-0004-4: SHE TAUGHT HER DAUGHTER THEN BY HER OWN AFFECTION FOR IT THAT LOVE FOR A COUNTRY WHERE THEY HAD BOTH BEEN HOSPITABLY RECEIVED AND (WHERE->WERE) A BRILLIANT FUTURE OPENED (BEFORE->FOR) THEM +6930-75918-0005-5: THE COUNT HAD THROWN HIMSELF BACK ON HIS SEAT LEANING HIS SHOULDERS AGAINST THE PARTITION OF THE TENT AND REMAINED THUS HIS FACE BURIED IN HIS HANDS WITH HEAVING CHEST AND RESTLESS LIMBS +6930-75918-0006-6: THIS HAS INDEED BEEN A HARASSING DAY CONTINUED THE YOUNG MAN HIS EYES FIXED UPON HIS FRIEND +6930-75918-0007-7: YOU WILL BE FRANK WITH ME I ALWAYS AM +6930-75918-0008-8: CAN YOU IMAGINE WHY BUCKINGHAM HAS BEEN SO VIOLENT I SUSPECT +6930-75918-0009-9: IT IS YOU WHO ARE MISTAKEN RAOUL I HAVE READ HIS DISTRESS IN HIS EYES IN HIS EVERY GESTURE AND ACTION THE WHOLE DAY +6930-75918-0010-10: I CAN PERCEIVE LOVE CLEARLY ENOUGH +6930-75918-0011-11: I AM CONVINCED OF WHAT I SAY SAID THE COUNT +6930-75918-0012-12: IT IS ANNOYANCE THEN +6930-75918-0013-13: IN THOSE VERY TERMS I EVEN ADDED MORE +6930-75918-0014-14: BUT CONTINUED RAOUL NOT INTERRUPTED BY THIS MOVEMENT OF HIS FRIEND HEAVEN BE PRAISED THE FRENCH WHO ARE PRONOUNCED TO BE THOUGHTLESS AND INDISCREET RECKLESS EVEN ARE CAPABLE OF BRINGING A CALM AND SOUND JUDGMENT TO BEAR ON MATTERS OF SUCH HIGH IMPORTANCE +6930-75918-0015-15: THUS IT IS THAT THE HONOR OF THREE IS SAVED OUR (COUNTRY'S->COUNTRY) OUR (MASTER'S->MASTERS) AND OUR OWN +6930-75918-0016-16: YES I NEED REPOSE MANY THINGS HAVE AGITATED ME TO DAY BOTH IN MIND AND BODY WHEN YOU RETURN TO MORROW I SHALL NO LONGER BE THE SAME MAN +6930-75918-0017-17: BUT IN THIS FRIENDLY PRESSURE RAOUL COULD DETECT THE NERVOUS AGITATION OF A GREAT INTERNAL CONFLICT +6930-75918-0018-18: THE NIGHT WAS CLEAR STARLIT AND SPLENDID THE TEMPEST HAD PASSED AWAY AND THE SWEET INFLUENCES OF THE EVENING HAD RESTORED LIFE PEACE AND SECURITY EVERYWHERE +6930-75918-0019-19: UPON THE LARGE SQUARE IN FRONT OF THE HOTEL THE SHADOWS OF THE TENTS INTERSECTED BY THE GOLDEN MOONBEAMS FORMED AS IT WERE A HUGE MOSAIC OF JET AND YELLOW FLAGSTONES +6930-75918-0020-20: (BRAGELONNE->BRAGOLON) WATCHED FOR SOME TIME THE CONDUCT OF THE TWO LOVERS LISTENED TO THE LOUD AND UNCIVIL SLUMBERS OF MANICAMP WHO SNORED AS IMPERIOUSLY AS THOUGH HE WAS WEARING HIS BLUE AND GOLD INSTEAD OF HIS VIOLET SUIT +6930-76324-0000-21: GOLIATH MAKES ANOTHER DISCOVERY +6930-76324-0001-22: (THEY->THERE) WERE CERTAINLY NO (NEARER->NEAR) THE SOLUTION OF THEIR PROBLEM +6930-76324-0002-23: THE POOR LITTLE THINGS CRIED CYNTHIA THINK OF THEM HAVING BEEN TURNED TO THE WALL ALL THESE YEARS +6930-76324-0003-24: NOW WHAT (WAS->IS) THE SENSE OF IT (TWO->TOO) INNOCENT BABIES LIKE THAT +6930-76324-0004-25: BUT JOYCE HAD NOT BEEN LISTENING ALL AT ONCE SHE PUT DOWN HER CANDLE ON THE TABLE AND FACED HER COMPANION +6930-76324-0005-26: THE TWIN BROTHER DID SOMETHING SHE DIDN'T LIKE AND SHE TURNED HIS PICTURE TO THE WALL +6930-76324-0006-27: HERS HAPPENED TO BE (IN->ON) THE SAME FRAME TOO BUT SHE EVIDENTLY DIDN'T CARE ABOUT (THAT->IT) +6930-76324-0007-28: NOW WHAT HAVE YOU TO SAY CYNTHIA SPRAGUE +6930-76324-0008-29: I THOUGHT WE WERE STUMPED AGAIN WHEN I FIRST SAW THAT PICTURE BUT IT'S BEEN OF SOME USE AFTER ALL +6930-76324-0009-30: DO YOU SUPPOSE THE MINIATURE WAS A COPY OF THE SAME THING +6930-76324-0010-31: (WHAT->WHEN) IN THE WORLD IS (THAT->IT) QUERIED JOYCE +6930-76324-0011-32: (THEY->MAY) WORRY ME TERRIBLY AND BESIDES I'D LIKE TO SEE WHAT THIS LOVELY FURNITURE LOOKS LIKE WITHOUT SUCH QUANTITIES OF DUST ALL OVER IT GOOD SCHEME (CYN->SIN) +6930-76324-0012-33: (WE'LL->WILL) COME IN HERE THIS AFTERNOON WITH OLD CLOTHES ON AND (HAVE->HALF) A REGULAR HOUSE CLEANING +6930-76324-0013-34: IT CAN'T HURT ANYTHING I'M SURE FOR WE WON'T DISTURB THINGS AT ALL +6930-76324-0014-35: THIS THOUGHT HOWEVER DID NOT ENTER THE HEADS OF THE ENTHUSIASTIC PAIR +6930-76324-0015-36: SMUGGLING THE HOUSE CLEANING PARAPHERNALIA INTO THE CELLAR WINDOW UNOBSERVED THAT AFTERNOON PROVED NO EASY TASK FOR CYNTHIA HAD ADDED A (WHISK->WHISKED) BROOM AND DUST PAN TO THE OUTFIT +6930-76324-0016-37: THE (LURE->LOWER) PROVED TOO MUCH FOR HIM AND HE CAME SPORTING AFTER IT AS (FRISKILY->FRISKLY) AS A YOUNG KITTEN MUCH TO CYNTHIA'S DELIGHT WHEN SHE CAUGHT SIGHT OF HIM +6930-76324-0017-38: OH LET HIM COME ALONG SHE URGED I DO LOVE TO SEE HIM ABOUT THAT OLD HOUSE +6930-76324-0018-39: HE MAKES IT SORT OF COZIER +6930-76324-0019-40: NOW LET'S DUST THE FURNITURE AND PICTURES +6930-76324-0020-41: YET LITTLE AS IT WAS IT HAD ALREADY MADE A VAST DIFFERENCE IN THE ASPECT OF THE ROOM +6930-76324-0021-42: SURFACE DUST AT LEAST HAD BEEN REMOVED AND THE FINE OLD FURNITURE GAVE A HINT OF ITS REAL ELEGANCE AND POLISH +6930-76324-0022-43: THEN SHE SUDDENLY REMARKED +6930-76324-0023-44: AND MY POCKET MONEY IS GETTING LOW AGAIN AND YOU HAVEN'T ANY LEFT AS USUAL +6930-76324-0024-45: THEY SAY ILLUMINATION BY (CANDLE LIGHT->CANDLELIGHT) IS THE PRETTIEST IN THE WORLD +6930-76324-0025-46: WHY IT'S GOLIATH AS USUAL THEY BOTH CRIED PEERING IN +6930-76324-0026-47: ISN'T HE THE GREATEST FOR GETTING INTO ODD CORNERS +6930-76324-0027-48: FORGETTING ALL THEIR WEARINESS THEY SEIZED THEIR CANDLES AND SCURRIED THROUGH THE HOUSE FINDING (AN->ON) OCCASIONAL PAPER TUCKED AWAY IN SOME ODD CORNER +6930-76324-0028-49: WELL I'M CONVINCED THAT THE BOARDED UP HOUSE MYSTERY HAPPENED NOT EARLIER THAN APRIL SIXTEENTH EIGHTEEN SIXTY ONE AND PROBABLY NOT MUCH LATER +6930-81414-0000-50: NO WORDS WERE SPOKEN NO LANGUAGE WAS UTTERED SAVE THAT OF WAILING AND HISSING AND THAT SOMEHOW WAS INDISTINCT AS IF IT EXISTED IN FANCY AND NOT IN REALITY +6930-81414-0001-51: I HEARD A NOISE BEHIND I TURNED AND SAW (KAFFAR->KAFFIR) HIS BLACK EYES SHINING WHILE IN HIS HAND HE HELD A GLEAMING KNIFE HE LIFTED IT ABOVE HIS HEAD AS IF TO STRIKE BUT I HAD THE STRENGTH OF TEN MEN AND I HURLED HIM FROM ME +6930-81414-0002-52: ONWARD SAID A DISTANT VOICE +6930-81414-0003-53: NO SOUND BROKE THE STILLNESS OF THE NIGHT +6930-81414-0004-54: THE STORY OF ITS EVIL INFLUENCE CAME BACK TO ME AND IN MY BEWILDERED CONDITION I WONDERED WHETHER THERE WAS NOT SOME TRUTH IN WHAT HAD BEEN SAID +6930-81414-0005-55: WHAT WAS THAT +6930-81414-0006-56: WHAT THEN A HUMAN HAND LARGE AND SHAPELY APPEARED DISTINCTLY ON THE SURFACE OF THE POND +6930-81414-0007-57: NOTHING MORE NOT EVEN THE WRIST TO WHICH IT MIGHT BE ATTACHED +6930-81414-0008-58: IT DID NOT BECKON OR INDEED MOVE AT ALL IT WAS AS STILL AS THE HAND OF DEATH +6930-81414-0009-59: I AWOKE TO CONSCIOUSNESS FIGHTING AT FIRST IT SEEMED AS IF I WAS FIGHTING WITH (A->THE) PHANTOM BUT GRADUALLY MY OPPONENT BECAME MORE REAL TO ME IT WAS (KAFFAR->KAFFIR) +6930-81414-0010-60: A SOUND OF VOICES A FLASH OF LIGHT +6930-81414-0011-61: A FEELING OF FREEDOM AND I WAS AWAKE WHERE +6930-81414-0012-62: SAID ANOTHER VOICE WHICH I RECOGNIZED AS VOLTAIRE'S (KAFFAR->KAFFIR) +6930-81414-0013-63: I HAD SCARCELY KNOWN WHAT I HAD BEEN SAYING OR DOING UP TO THIS TIME BUT AS HE SPOKE I LOOKED AT MY HAND +6930-81414-0014-64: IN THE LIGHT OF THE MOON I SAW A KNIFE RED WITH BLOOD AND MY HAND TOO WAS ALSO (DISCOLOURED->DISCOLORED) +6930-81414-0015-65: I DO NOT KNOW I AM DAZED BEWILDERED +6930-81414-0016-66: BUT THAT IS (KAFFAR'S->KAFFIR'S) KNIFE +6930-81414-0017-67: I KNOW HE HAD IT THIS VERY EVENING +6930-81414-0018-68: I (REMEMBER->REMEMBERED) SAYING HAVE WE BEEN TOGETHER +6930-81414-0019-69: VOLTAIRE PICKED UP SOMETHING FROM THE GROUND AND LOOKED AT IT +6930-81414-0020-70: I SAY YOU DO KNOW WHAT THIS MEANS AND YOU MUST TELL US +6930-81414-0021-71: A TERRIBLE THOUGHT FLASHED INTO MY MIND +6930-81414-0022-72: I HAD AGAIN BEEN ACTING UNDER THE INFLUENCE OF THIS MAN'S POWER +6930-81414-0023-73: PERCHANCE TOO (KAFFAR'S->KAFFIR'S) DEATH MIGHT SERVE HIM IN GOOD STEAD +6930-81414-0024-74: MY TONGUE REFUSED TO ARTICULATE MY POWER OF SPEECH LEFT ME +6930-81414-0025-75: MY POSITION WAS TOO TERRIBLE +6930-81414-0026-76: MY OVERWROUGHT NERVES YIELDED AT LAST +6930-81414-0027-77: FOR SOME TIME AFTER THAT I REMEMBERED NOTHING DISTINCTLY +7021-79730-0000-1399: THE THREE MODES OF MANAGEMENT +7021-79730-0001-1400: TO SUPPOSE THAT THE OBJECT OF THIS WORK IS TO AID IN EFFECTING SUCH A SUBSTITUTION AS THAT IS ENTIRELY TO MISTAKE ITS NATURE AND DESIGN +7021-79730-0002-1401: BY REASON AND AFFECTION +7021-79730-0003-1402: AS THE (CHAISE->CHASE) DRIVES AWAY MARY STANDS BEWILDERED AND PERPLEXED ON THE (DOOR STEP->DOORSTEP) HER MIND IN A TUMULT OF EXCITEMENT IN WHICH HATRED OF THE DOCTOR DISTRUST AND SUSPICION OF HER MOTHER DISAPPOINTMENT VEXATION AND ILL HUMOR SURGE AND SWELL AMONG THOSE (DELICATE->DELEGATE) ORGANIZATIONS ON WHICH THE STRUCTURE AND DEVELOPMENT OF THE SOUL SO CLOSELY DEPEND DOING PERHAPS AN IRREPARABLE INJURY +7021-79730-0004-1403: THE MOTHER AS SOON AS THE (CHAISE->CHASE) IS SO FAR TURNED THAT MARY CAN NO LONGER WATCH THE EXPRESSION OF HER COUNTENANCE GOES AWAY FROM THE DOOR WITH A SMILE OF COMPLACENCY AND SATISFACTION (UPON->ON) HER FACE AT THE INGENUITY AND SUCCESS OF HER LITTLE ARTIFICE +7021-79730-0005-1404: SO YOU WILL BE A GOOD GIRL I KNOW AND NOT MAKE ANY TROUBLE BUT WILL STAY AT HOME CONTENTEDLY WON'T YOU +7021-79730-0006-1405: THE MOTHER IN MANAGING THE CASE IN THIS WAY (RELIES->REALIZE) PARTLY ON CONVINCING THE REASON OF THE CHILD AND PARTLY ON AN APPEAL TO HER AFFECTION +7021-79730-0007-1406: IF YOU SHOULD NOT BE A GOOD GIRL BUT SHOULD SHOW SIGNS OF MAKING US ANY TROUBLE I SHALL HAVE TO SEND YOU OUT SOMEWHERE TO THE BACK PART OF THE HOUSE UNTIL WE ARE GONE +7021-79730-0008-1407: BUT THIS LAST SUPPOSITION IS ALMOST ALWAYS UNNECESSARY FOR IF MARY HAS BEEN HABITUALLY MANAGED ON THIS PRINCIPLE SHE WILL NOT MAKE ANY TROUBLE +7021-79730-0009-1408: IT IS INDEED TRUE THAT THE IMPORTANCE OF TACT AND SKILL IN THE TRAINING OF THE YOUNG AND OF CULTIVATING THEIR REASON AND SECURING THEIR AFFECTION (CAN NOT->CANNOT) BE OVERRATED +7021-79740-0000-1384: TO SUCH PERSONS THESE INDIRECT MODES OF TRAINING CHILDREN IN HABITS OF SUBORDINATION TO THEIR WILL OR RATHER OF YIELDING TO THEIR INFLUENCE ARE SPECIALLY USEFUL +7021-79740-0001-1385: DELLA HAD A YOUNG SISTER NAMED MARIA AND A COUSIN WHOSE NAME WAS JANE +7021-79740-0002-1386: NOW (DELIA->GALLIA) CONTRIVED TO OBTAIN A GREAT INFLUENCE AND (ASCENDENCY->A SCENE) OVER THE MINDS OF THE CHILDREN BY MEANS OF THESE DOLLS +7021-79740-0003-1387: TO GIVE AN IDEA OF THESE CONVERSATIONS I WILL REPORT ONE OF THEM IN FULL +7021-79740-0004-1388: YOU HAVE COME (ANDELLA ANDELLA->AMDELLA AND DELLA) WAS THE NAME OF JANE'S (DOLL->DAL) TO MAKE ROSALIE A VISIT +7021-79740-0005-1389: I AM VERY GLAD +7021-79740-0006-1390: I EXPECT YOU HAVE BEEN A VERY GOOD GIRL (ANDELLA->ANNE DELA) SINCE YOU WERE HERE LAST +7021-79740-0007-1391: THEN TURNING TO JANE SHE ASKED IN A SOMEWHAT ALTERED TONE HAS SHE BEEN A GOOD GIRL JANE +7021-79740-0008-1392: FOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLAYING UPON THE PIAZZA WITH BLOCKS AND OTHER PLAYTHINGS AND FINALLY HAD GONE INTO THE HOUSE LEAVING ALL THE THINGS ON THE FLOOR OF THE PIAZZA INSTEAD OF PUTTING THEM AWAY IN THEIR PLACES AS THEY OUGHT TO HAVE DONE +7021-79740-0009-1393: THEY WERE NOW PLAYING WITH THEIR DOLLS IN THE PARLOR +7021-79740-0010-1394: (DELIA->DAHLIA) CAME TO THE (PARLOR->PARLOUR) AND WITH AN AIR OF GREAT MYSTERY BECKONED THE CHILDREN ASIDE AND SAID TO THEM IN A WHISPER LEAVE (ANDELLA->ANDDELA) AND ROSALIE HERE AND DON'T SAY A WORD TO THEM +7021-79740-0011-1395: SO SAYING SHE LED THE WAY ON TIPTOE FOLLOWED BY THE CHILDREN OUT OF THE ROOM AND ROUND BY A CIRCUITOUS ROUTE TO THE PIAZZA THERE +7021-79740-0012-1396: SAID SHE POINTING TO THE PLAYTHINGS SEE +7021-79740-0013-1397: PUT THESE PLAYTHINGS ALL AWAY QUICK AND CAREFULLY AND WE WILL NOT LET THEM KNOW (ANY THING->ANYTHING) ABOUT YOUR LEAVING THEM OUT +7021-79740-0014-1398: AND THIS METHOD OF TREATING THE CASE WAS MUCH MORE EFFECTUAL IN MAKING THEM DISPOSED TO AVOID COMMITTING A SIMILAR FAULT ANOTHER TIME THAN ANY DIRECT REBUKES OR EXPRESSIONS OF DISPLEASURE ADDRESSED PERSONALLY TO THEM WOULD HAVE BEEN +7021-79759-0000-1378: NATURE OF THE EFFECT PRODUCED BY EARLY IMPRESSIONS +7021-79759-0001-1379: THAT IS COMPARATIVELY NOTHING +7021-79759-0002-1380: THEY ARE CHIEFLY FORMED FROM COMBINATIONS OF THE IMPRESSIONS MADE IN CHILDHOOD +7021-79759-0003-1381: VAST IMPORTANCE AND INFLUENCE OF THIS MENTAL FURNISHING +7021-79759-0004-1382: WITHOUT GOING TO ANY SUCH EXTREME AS THIS WE CAN EASILY SEE ON REFLECTION HOW VAST AN INFLUENCE ON THE IDEAS AND CONCEPTIONS AS WELL AS ON THE PRINCIPLES OF ACTION (IN->AND) MATURE YEARS MUST BE EXERTED BY THE NATURE AND CHARACTER OF THE IMAGES WHICH THE PERIOD OF INFANCY AND CHILDHOOD (IMPRESSES->IMPRESS) UPON THE MIND +7021-79759-0005-1383: THE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY VIOLENCE TO WHICH A FATHER SUBJECTS HIS SON MAY SOON PASS AWAY BUT THE MEMORY OF IT DOES NOT PASS AWAY WITH THE PAIN +7021-85628-0000-1409: BUT (ANDERS->ANDREWS) CARED NOTHING ABOUT THAT +7021-85628-0001-1410: HE MADE A BOW SO DEEP THAT HIS BACK CAME NEAR BREAKING AND HE WAS DUMBFOUNDED I CAN TELL YOU WHEN HE SAW IT WAS NOBODY BUT ANDERS +7021-85628-0002-1411: HE WAS SUCH A BIG BOY THAT HE WORE HIGH BOOTS AND CARRIED A JACK KNIFE +7021-85628-0003-1412: NOW THIS KNIFE WAS A SPLENDID ONE THOUGH HALF THE BLADE WAS GONE AND THE HANDLE WAS A LITTLE CRACKED AND ANDERS KNEW THAT ONE IS ALMOST A MAN AS SOON AS ONE HAS A (JACK KNIFE->JACKKNIFE) +7021-85628-0004-1413: YES WHY NOT THOUGHT (ANDERS->ANDREWS) +7021-85628-0005-1414: SEEING THAT I AM SO FINE I MAY AS WELL GO AND VISIT THE KING +7021-85628-0006-1415: I AM GOING TO THE COURT BALL ANSWERED (ANDERS->ANDRES) +7021-85628-0007-1416: AND SHE TOOK (ANDERS->ANDREW'S) HAND AND WALKED WITH HIM UP THE BROAD MARBLE STAIRS WHERE SOLDIERS WERE POSTED AT EVERY THIRD STEP AND THROUGH THE MAGNIFICENT HALLS WHERE COURTIERS IN SILK AND VELVET STOOD BOWING WHEREVER HE WENT +7021-85628-0008-1417: FOR LIKE AS NOT THEY MUST HAVE THOUGHT HIM A PRINCE WHEN THEY SAW HIS FINE CAP +7021-85628-0009-1418: AT THE FARTHER END OF THE LARGEST HALL A TABLE WAS SET WITH GOLDEN CUPS AND GOLDEN PLATES IN LONG ROWS +7021-85628-0010-1419: ON HUGE SILVER PLATTERS WERE PYRAMIDS OF TARTS AND CAKES AND RED WINE SPARKLED IN GLITTERING DECANTERS +7021-85628-0011-1420: THE PRINCESS SAT DOWN UNDER A BLUE CANOPY WITH BOUQUETS OF ROSES AND SHE LET (ANDERS->ANDRE) SIT IN A GOLDEN CHAIR BY HER SIDE +7021-85628-0012-1421: BUT YOU MUST NOT EAT WITH YOUR CAP ON YOUR HEAD SHE SAID AND WAS GOING TO TAKE IT OFF +7021-85628-0013-1422: THE PRINCESS CERTAINLY WAS BEAUTIFUL AND HE WOULD HAVE DEARLY LIKED TO BE KISSED BY HER BUT THE CAP WHICH HIS MOTHER HAD MADE HE WOULD NOT GIVE UP ON ANY CONDITION +7021-85628-0014-1423: HE ONLY SHOOK HIS HEAD +7021-85628-0015-1424: WELL BUT NOW SAID THE PRINCESS AND SHE FILLED HIS POCKETS WITH CAKES AND PUT HER OWN HEAVY GOLD CHAIN AROUND HIS NECK AND BENT DOWN AND KISSED HIM +7021-85628-0016-1425: THAT IS A VERY FINE CAP YOU HAVE HE SAID +7021-85628-0017-1426: SO IT IS SAID (ANDERS->ANDREWS) +7021-85628-0018-1427: AND IT IS MADE OF MOTHER'S BEST YARN AND SHE KNITTED IT HERSELF AND EVERYBODY WANTS TO GET IT AWAY FROM ME +7021-85628-0019-1428: WITH ONE JUMP ANDERS GOT OUT OF HIS CHAIR +7021-85628-0020-1429: HE DARTED LIKE AN ARROW THROUGH ALL THE HALLS DOWN ALL THE STAIRS AND ACROSS THE YARD +7021-85628-0021-1430: HE STILL HELD ON TO IT WITH BOTH HANDS AS HE RUSHED INTO HIS MOTHER'S COTTAGE +7021-85628-0022-1431: AND ALL HIS BROTHERS AND SISTERS STOOD ROUND AND LISTENED WITH THEIR MOUTHS OPEN +7021-85628-0023-1432: BUT WHEN HIS BIG BROTHER HEARD THAT HE HAD REFUSED TO GIVE HIS CAP FOR A KING'S GOLDEN CROWN HE SAID THAT ANDERS WAS A STUPID +7021-85628-0024-1433: (ANDERS->ANDREW'S) FACE GREW RED +7021-85628-0025-1434: BUT HIS MOTHER HUGGED HIM CLOSE +7021-85628-0026-1435: NO MY LITTLE (SON->FUN) SHE SAID +7021-85628-0027-1436: IF YOU DRESSED IN SILK AND GOLD FROM TOP TO TOE YOU COULD NOT LOOK ANY NICER THAN IN YOUR LITTLE RED CAP +7127-75946-0000-467: AT THE CONCLUSION OF THE BANQUET WHICH WAS SERVED AT FIVE O'CLOCK THE KING ENTERED HIS CABINET WHERE HIS TAILORS WERE AWAITING HIM FOR THE PURPOSE OF TRYING ON THE CELEBRATED COSTUME REPRESENTING SPRING WHICH WAS THE RESULT OF SO MUCH IMAGINATION AND HAD COST SO MANY EFFORTS OF THOUGHT TO THE DESIGNERS AND ORNAMENT WORKERS OF THE COURT +7127-75946-0001-468: AH VERY WELL +7127-75946-0002-469: LET HIM COME IN THEN SAID THE KING AND AS IF COLBERT HAD BEEN LISTENING AT THE DOOR FOR THE PURPOSE OF KEEPING HIMSELF (AU COURANT->OKARRANT) WITH THE CONVERSATION HE ENTERED AS SOON AS THE KING HAD PRONOUNCED HIS NAME TO THE TWO COURTIERS +7127-75946-0003-470: GENTLEMEN TO YOUR POSTS WHEREUPON SAINT (AIGNAN->DAN) AND (VILLEROY->VILLEROI) TOOK THEIR LEAVE +7127-75946-0004-471: CERTAINLY SIRE BUT I MUST HAVE MONEY TO DO THAT WHAT +7127-75946-0005-472: WHAT DO YOU MEAN INQUIRED (LOUIS->LOUISE) +7127-75946-0006-473: HE HAS GIVEN THEM WITH TOO MUCH GRACE NOT TO HAVE OTHERS STILL TO GIVE IF THEY ARE REQUIRED WHICH IS THE CASE AT THE PRESENT MOMENT +7127-75946-0007-474: IT IS NECESSARY THEREFORE THAT HE SHOULD COMPLY THE KING FROWNED +7127-75946-0008-475: DOES YOUR MAJESTY THEN NO LONGER BELIEVE THE DISLOYAL ATTEMPT +7127-75946-0009-476: NOT AT ALL YOU ARE ON THE CONTRARY MOST AGREEABLE TO ME +7127-75946-0010-477: YOUR MAJESTY'S PLAN THEN IN THIS AFFAIR IS +7127-75946-0011-478: YOU WILL TAKE THEM FROM MY PRIVATE TREASURE +7127-75946-0012-479: THE NEWS CIRCULATED WITH THE RAPIDITY OF LIGHTNING DURING ITS PROGRESS IT KINDLED EVERY VARIETY OF COQUETRY DESIRE AND WILD AMBITION +7127-75946-0013-480: THE KING HAD COMPLETED HIS (TOILETTE->TOILET) BY NINE O'CLOCK HE APPEARED IN AN OPEN CARRIAGE DECORATED WITH BRANCHES OF TREES AND FLOWERS +7127-75946-0014-481: THE QUEENS HAD TAKEN THEIR SEATS UPON A MAGNIFICENT (DIAS->DAIS) OR PLATFORM ERECTED UPON THE BORDERS OF THE LAKE IN A (THEATER->THEATRE) OF WONDERFUL ELEGANCE OF CONSTRUCTION +7127-75946-0015-482: SUDDENLY FOR THE PURPOSE OF RESTORING PEACE AND ORDER (SPRING->SPRANG) ACCOMPANIED BY HIS WHOLE COURT MADE HIS APPEARANCE +7127-75946-0016-483: THE SEASONS ALLIES OF SPRING FOLLOWED HIM CLOSELY TO FORM A QUADRILLE WHICH AFTER MANY WORDS OF MORE OR LESS FLATTERING IMPORT WAS THE COMMENCEMENT OF THE DANCE +7127-75946-0017-484: HIS LEGS THE BEST SHAPED AT COURT WERE DISPLAYED TO GREAT ADVANTAGE IN FLESH (COLORED->COLOURED) SILKEN HOSE (OF->A) SILK SO FINE AND SO TRANSPARENT THAT IT SEEMED ALMOST LIKE FLESH ITSELF +7127-75946-0018-485: THERE WAS SOMETHING IN HIS CARRIAGE WHICH RESEMBLED THE BUOYANT MOVEMENTS OF AN IMMORTAL AND HE DID NOT DANCE SO MUCH AS (SEEM->SEEMED) TO SOAR ALONG +7127-75946-0019-486: YES IT IS SUPPRESSED +7127-75946-0020-487: FAR FROM IT SIRE YOUR MAJESTY (HAVING->HEAVEN) GIVEN NO DIRECTIONS ABOUT IT THE MUSICIANS HAVE RETAINED IT +7127-75946-0021-488: YES SIRE AND READY DRESSED FOR THE BALLET +7127-75946-0022-489: SIRE HE SAID YOUR MAJESTY'S MOST DEVOTED SERVANT APPROACHES TO PERFORM A SERVICE ON THIS OCCASION WITH SIMILAR ZEAL THAT HE HAS ALREADY SHOWN ON THE FIELD OF BATTLE +7127-75946-0023-490: THE KING SEEMED ONLY PLEASED WITH EVERY ONE PRESENT +7127-75946-0024-491: MONSIEUR WAS THE ONLY ONE WHO DID NOT UNDERSTAND ANYTHING ABOUT THE MATTER +7127-75946-0025-492: THE BALLET BEGAN THE EFFECT WAS MORE THAN BEAUTIFUL +7127-75946-0026-493: WHEN THE MUSIC BY ITS BURSTS OF MELODY CARRIED AWAY THESE ILLUSTRIOUS DANCERS WHEN (THE->THIS) SIMPLE UNTUTORED PANTOMIME OF THAT PERIOD ONLY THE MORE NATURAL ON ACCOUNT OF THE VERY INDIFFERENT ACTING OF THE AUGUST ACTORS HAD REACHED ITS CULMINATING POINT OF TRIUMPH THE (THEATER->THEATRE) SHOOK WITH TUMULTUOUS APPLAUSE +7127-75946-0027-494: DISDAINFUL OF A SUCCESS OF WHICH MADAME SHOWED NO (ACKNOWLEDGEMENT->ACKNOWLEDGMENT) HE THOUGHT OF NOTHING BUT BOLDLY REGAINING THE MARKED PREFERENCE OF THE PRINCESS +7127-75946-0028-495: BY DEGREES ALL HIS HAPPINESS ALL HIS BRILLIANCY SUBSIDED INTO REGRET AND UNEASINESS SO THAT HIS LIMBS LOST THEIR POWER HIS ARMS HUNG HEAVILY BY HIS SIDES AND HIS HEAD DROOPED AS THOUGH HE WAS STUPEFIED +7127-75946-0029-496: THE KING WHO HAD FROM THIS MOMENT BECOME IN REALITY THE PRINCIPAL DANCER IN THE QUADRILLE CAST A LOOK UPON HIS VANQUISHED RIVAL +7127-75947-0000-426: EVERY ONE COULD OBSERVE HIS AGITATION AND PROSTRATION A PROSTRATION WHICH WAS INDEED THE MORE REMARKABLE SINCE PEOPLE WERE NOT ACCUSTOMED TO SEE HIM WITH HIS ARMS HANGING LISTLESSLY BY HIS SIDE HIS HEAD BEWILDERED AND HIS EYES WITH ALL THEIR BRIGHT INTELLIGENCE (BEDIMMED->BEDEMNED) +7127-75947-0001-427: UPON THIS MADAME DEIGNED TO TURN HER EYES LANGUISHINGLY TOWARDS THE COMTE OBSERVING +7127-75947-0002-428: DO YOU THINK SO SHE REPLIED WITH INDIFFERENCE +7127-75947-0003-429: YES THE CHARACTER WHICH YOUR ROYAL HIGHNESS ASSUMED IS IN PERFECT HARMONY WITH YOUR OWN +7127-75947-0004-430: EXPLAIN YOURSELF +7127-75947-0005-431: I ALLUDE TO THE GODDESS +7127-75947-0006-432: THE PRINCESS INQUIRED NO +7127-75947-0007-433: SHE THEN ROSE HUMMING THE AIR TO WHICH SHE WAS PRESENTLY GOING TO DANCE +7127-75947-0008-434: THE ARROW PIERCED HIS HEART AND WOUNDED HIM MORTALLY +7127-75947-0009-435: A QUARTER OF AN HOUR AFTERWARDS HE RETURNED TO THE (THEATER->THEATRE) BUT IT WILL BE READILY BELIEVED THAT IT WAS ONLY A POWERFUL EFFORT OF REASON OVER HIS GREAT EXCITEMENT THAT ENABLED HIM TO GO BACK OR PERHAPS FOR LOVE IS THUS STRANGELY CONSTITUTED HE FOUND IT IMPOSSIBLE EVEN TO REMAIN MUCH LONGER SEPARATED FROM (THE->THEIR) PRESENCE OF ONE WHO HAD BROKEN HIS HEART +7127-75947-0010-436: WHEN SHE PERCEIVED THE YOUNG MAN SHE ROSE LIKE A WOMAN SURPRISED IN THE MIDST OF IDEAS SHE WAS DESIROUS OF CONCEALING FROM HERSELF +7127-75947-0011-437: REMAIN I IMPLORE YOU THE EVENING IS MOST LOVELY +7127-75947-0012-438: INDEED (AH->A) +7127-75947-0013-439: I REMEMBER NOW AND I CONGRATULATE MYSELF DO YOU LOVE ANY ONE +7127-75947-0014-440: FORGIVE ME I HARDLY KNOW WHAT I AM SAYING A THOUSAND TIMES FORGIVE ME MADAME WAS RIGHT QUITE RIGHT THIS BRUTAL EXILE HAS COMPLETELY TURNED MY BRAIN +7127-75947-0015-441: THERE CANNOT BE A DOUBT HE RECEIVED YOU KINDLY FOR IN FACT YOU RETURNED WITHOUT HIS PERMISSION +7127-75947-0016-442: OH MADEMOISELLE WHY HAVE I NOT A DEVOTED SISTER OR A TRUE FRIEND SUCH AS YOURSELF +7127-75947-0017-443: WHAT ALREADY HERE THEY SAID TO HER +7127-75947-0018-444: I HAVE BEEN HERE THIS QUARTER OF AN HOUR REPLIED LA (VALLIERE->VALLIERS) +7127-75947-0019-445: DID NOT THE DANCING AMUSE YOU NO +7127-75947-0020-446: NO MORE THAN THE DANCING +7127-75947-0021-447: LA (VALLIERE->VALLIERS) IS QUITE A POETESS SAID (TONNAY CHARENTE->TONIET) +7127-75947-0022-448: I AM A WOMAN AND THERE ARE FEW LIKE ME WHOEVER LOVES ME FLATTERS ME WHOEVER FLATTERS ME PLEASES ME AND WHOEVER PLEASES WELL SAID MONTALAIS YOU DO NOT FINISH +7127-75947-0023-449: IT IS TOO DIFFICULT REPLIED MADEMOISELLE (DE TONNAY CHARENTE->DETONICHAUCHANT) LAUGHING LOUDLY +7127-75947-0024-450: (LOOK->LUCK) YONDER DO YOU NOT SEE THE MOON SLOWLY RISING SILVERING THE TOPMOST BRANCHES OF THE CHESTNUTS AND THE (OAKS->YOKES) +7127-75947-0025-451: EXQUISITE SOFT TURF OF THE WOODS THE HAPPINESS WHICH YOUR FRIENDSHIP CONFERS UPON ME +7127-75947-0026-452: WELL SAID MADEMOISELLE DE (TONNAY CHARENTE->TONE) I ALSO THINK A GOOD DEAL BUT I TAKE CARE +7127-75947-0027-453: TO SAY NOTHING SAID MONTALAIS SO THAT WHEN MADEMOISELLE DE (TONNAY CHARENTE->TO NECHERANT) THINKS (ATHENAIS->ETHNEE) IS THE ONLY ONE WHO KNOWS IT +7127-75947-0028-454: QUICK QUICK THEN AMONG THE HIGH REED GRASS SAID MONTALAIS STOOP (ATHENAIS->ETHINAY) YOU ARE SO TALL +7127-75947-0029-455: THE YOUNG GIRLS HAD INDEED MADE THEMSELVES SMALL INDEED INVISIBLE +7127-75947-0030-456: SHE WAS HERE JUST NOW SAID THE COUNT +7127-75947-0031-457: YOU ARE POSITIVE THEN +7127-75947-0032-458: YES BUT PERHAPS I FRIGHTENED HER (IN->AND) WHAT WAY +7127-75947-0033-459: HOW IS IT LA (VALLIERE->VALLIERS) SAID MADEMOISELLE DE (TONNAY CHARENTE->TINACHANT) THAT THE VICOMTE DE (BRAGELONNE->BRAGELONE) SPOKE OF YOU AS LOUISE +7127-75947-0034-460: IT SEEMS THE KING WILL NOT CONSENT TO IT +7127-75947-0035-461: GOOD GRACIOUS (HAS->AS) THE KING ANY RIGHT TO INTERFERE IN MATTERS OF THAT KIND +7127-75947-0036-462: I GIVE MY CONSENT +7127-75947-0037-463: OH I AM SPEAKING SERIOUSLY REPLIED MONTALAIS AND MY OPINION IN THIS CASE IS QUITE AS GOOD AS THE KING'S I SUPPOSE IS IT NOT LOUISE +7127-75947-0038-464: LET US RUN THEN SAID ALL THREE AND GRACEFULLY LIFTING UP THE LONG SKIRTS OF THEIR SILK DRESSES THEY LIGHTLY RAN ACROSS THE OPEN SPACE BETWEEN THE LAKE AND THE THICKEST COVERT OF THE PARK +7127-75947-0039-465: IN FACT THE SOUND OF MADAME'S AND THE QUEEN'S CARRIAGES COULD BE HEARD IN THE DISTANCE UPON THE HARD DRY GROUND OF THE ROADS FOLLOWED BY THE (MOUNTED->MOUNTAIN) CAVALIERS +7127-75947-0040-466: IN THIS WAY THE FETE OF THE WHOLE COURT WAS A FETE ALSO FOR THE MYSTERIOUS INHABITANTS OF THE FOREST FOR CERTAINLY THE DEER IN THE BRAKE THE PHEASANT ON THE BRANCH THE FOX IN ITS HOLE WERE ALL LISTENING +7176-88083-0000-707: ALL ABOUT HIM WAS A TUMULT OF BRIGHT AND BROKEN COLOR SCATTERED IN BROAD SPLASHES +7176-88083-0001-708: THE (MERGANSER->MERGANCER) HAD A CRESTED HEAD OF IRIDESCENT GREEN BLACK A BROAD COLLAR OF LUSTROUS WHITE BLACK BACK BLACK AND WHITE WINGS WHITE BELLY SIDES FINELY PENCILLED (IN->AND) BLACK AND WHITE AND (A->HER) BREAST OF RICH CHESTNUT RED STREAKED WITH BLACK +7176-88083-0002-709: HIS FEET WERE RED HIS LONG NARROW BEAK WITH ITS SAW TOOTHED EDGES AND SHARP HOOKED TIP WAS BRIGHT RED +7176-88083-0003-710: BUT HERE HE WAS AT A TERRIBLE DISADVANTAGE AS COMPARED WITH THE OWLS HAWKS AND EAGLES HE HAD NO RENDING CLAWS +7176-88083-0004-711: BUT SUDDENLY STRAIGHT AND SWIFT AS A DIVING CORMORANT HE SHOT DOWN INTO THE TORRENT AND DISAPPEARED BENEATH THE SURFACE +7176-88083-0005-712: ONCE FAIRLY A WING HOWEVER HE WHEELED AND MADE BACK HURRIEDLY FOR HIS PERCH +7176-88083-0006-713: IT MIGHT HAVE SEEMED THAT A TROUT OF THIS SIZE WAS A FAIRLY SUBSTANTIAL MEAL +7176-88083-0007-714: BUT SUCH WAS HIS KEENNESS THAT EVEN WHILE THE WIDE FLUKES OF HIS ENGORGED VICTIM WERE STILL STICKING OUT AT THE CORNERS OF HIS BEAK HIS FIERCE RED EYES WERE ONCE MORE PEERING DOWNWARD INTO THE TORRENT IN SEARCH OF FRESH PREY +7176-88083-0008-715: IN DESPAIR HE HURLED HIMSELF DOWNWARD TOO SOON +7176-88083-0009-716: THE GREAT HAWK (FOLLOWED->FOWLED) HURRIEDLY TO RETRIEVE HIS PREY FROM THE GROUND +7176-88083-0010-717: THE CAT GROWLED SOFTLY PICKED UP THE PRIZE IN HER JAWS AND TROTTED INTO THE BUSHES TO DEVOUR IT +7176-88083-0011-718: IN FACT HE HAD JUST FINISHED IT THE LAST OF THE TROUT'S TAIL HAD JUST VANISHED WITH A SPASM DOWN HIS STRAINED GULLET WHEN THE BAFFLED HAWK CAUGHT SIGHT OF HIM AND SWOOPED +7176-88083-0012-719: THE HAWK ALIGHTED ON THE DEAD BRANCH AND SAT UPRIGHT MOTIONLESS AS IF SURPRISED +7176-88083-0013-720: LIKE HIS UNFORTUNATE LITTLE COUSIN THE TEAL HE TOO HAD FELT THE FEAR OF DEATH SMITTEN INTO HIS HEART AND WAS HEADING DESPERATELY FOR THE REFUGE OF SOME DARK OVERHANGING BANK DEEP FRINGED WITH WEEDS WHERE THE DREADFUL EYE OF THE HAWK SHOULD NOT DISCERN HIM +7176-88083-0014-721: THE HAWK SAT UPON THE BRANCH AND WATCHED HIS QUARRY SWIMMING BENEATH THE SURFACE +7176-88083-0015-722: ALMOST INSTANTLY HE WAS FORCED TO THE TOP +7176-88083-0016-723: STRAIGHTWAY THE (HAWK->HOT) GLIDED FROM HIS PERCH AND DARTED AFTER HIM +7176-88083-0017-724: BUT AT THIS POINT IN THE RAPIDS IT WAS IMPOSSIBLE FOR HIM TO STAY DOWN +7176-88083-0018-725: BUT THIS FREQUENTER OF THE HEIGHTS OF AIR FOR ALL HIS SAVAGE VALOR WAS TROUBLED AT THE LEAPING WAVES AND THE TOSSING FOAM OF THESE MAD RAPIDS HE DID NOT UNDERSTAND THEM +7176-88083-0019-726: AS HE FLEW HIS DOWN REACHING CLUTCHING TALONS WERE NOT HALF A YARD ABOVE THE FUGITIVE'S HEAD +7176-88083-0020-727: WHERE THE (WAVES->WAY IS) FOR AN INSTANT SANK THEY CAME CLOSER BUT NOT QUITE WITHIN GRASPING REACH +7176-88083-0021-728: BUT AS BEFORE THE LEAPING WAVES OF THE RAPIDS WERE TOO MUCH FOR HIS PURSUER AND HE WAS ABLE TO FLAP HIS WAY ONWARD IN A CLOUD OF FOAM WHILE DOOM HUNG LOW ABOVE HIS HEAD YET HESITATED TO STRIKE +7176-88083-0022-729: THE HAWK EMBITTERED BY THE LOSS OF HIS FIRST QUARRY HAD BECOME AS DOGGED IN PURSUIT AS A WEASEL NOT TO BE SHAKEN OFF OR EVADED OR DECEIVED +7176-88083-0023-730: HE HAD A LOT OF LINE OUT AND THE PLACE WAS NONE TOO FREE FOR A LONG CAST BUT HE WAS IMPATIENT TO DROP HIS FLIES AGAIN ON THE SPOT WHERE THE BIG FISH WAS FEEDING +7176-88083-0024-731: THE LAST DROP FLY AS LUCK WOULD HAVE IT CAUGHT JUST IN THE CORNER OF THE HAWK'S ANGRILY OPEN BEAK HOOKING ITSELF FIRMLY +7176-88083-0025-732: AT THE SUDDEN SHARP STING OF IT THE GREAT BIRD TURNED HIS HEAD AND NOTICED FOR THE FIRST TIME THE FISHERMAN STANDING ON THE BANK +7176-88083-0026-733: THE DRAG UPON HIS BEAK AND THE LIGHT CHECK UPON HIS WINGS WERE INEXPLICABLE TO HIM AND APPALLING +7176-88083-0027-734: (THEN->THAN) THE LEADER PARTED FROM THE LINE +7176-92135-0000-661: HE IS A WELCOME FIGURE AT THE GARDEN PARTIES OF THE ELECT WHO ARE ALWAYS READY TO ENCOURAGE HIM BY ACCEPTING FREE SEATS FOR HIS PLAY ACTOR MANAGERS NOD TO HIM EDITORS ALLOW HIM TO CONTRIBUTE WITHOUT CHARGE TO A SYMPOSIUM ON THE PRICE OF GOLF BALLS +7176-92135-0001-662: IN SHORT HE BECOMES A PROMINENT FIGURE IN LONDON SOCIETY AND IF HE IS NOT CAREFUL SOMEBODY WILL SAY SO +7176-92135-0002-663: BUT EVEN THE UNSUCCESSFUL DRAMATIST HAS HIS MOMENTS +7176-92135-0003-664: YOUR PLAY MUST BE NOT MERELY A GOOD PLAY BUT A SUCCESSFUL ONE +7176-92135-0004-665: FRANKLY I CANNOT ALWAYS SAY +7176-92135-0005-666: BUT SUPPOSE YOU SAID I'M FOND OF WRITING MY PEOPLE ALWAYS SAY MY LETTERS HOME ARE GOOD ENOUGH FOR PUNCH +7176-92135-0006-667: I'VE GOT A LITTLE IDEA FOR A PLAY ABOUT A MAN AND A WOMAN AND ANOTHER WOMAN AND BUT PERHAPS (I'D->I) BETTER KEEP THE PLOT A SECRET FOR THE MOMENT +7176-92135-0007-668: ANYHOW IT'S JOLLY EXCITING AND I CAN DO THE DIALOGUE ALL RIGHT +7176-92135-0008-669: LEND ME YOUR EAR FOR TEN MINUTES AND YOU SHALL LEARN JUST WHAT STAGECRAFT IS +7176-92135-0009-670: AND I SHOULD BEGIN WITH A SHORT HOMILY ON SOLILOQUY +7176-92135-0010-671: (HAM->HIM) TO BE OR NOT TO BE +7176-92135-0011-672: NOW THE OBJECT OF THIS (SOLILOQUY->SOLOQUY) IS PLAIN +7176-92135-0012-673: INDEED IRRESOLUTION (BEING->MEAN) THE KEYNOTE OF HAMLET'S SOLILOQUY A CLEVER PLAYER COULD TO SOME EXTENT INDICATE THE WHOLE THIRTY LINES BY A SILENT WORKING OF THE (JAW->JOB) BUT AT THE SAME TIME IT WOULD BE IDLE TO DENY THAT HE WOULD MISS THE FINER SHADES OF THE DRAMATIST'S MEANING +7176-92135-0013-674: WE MODERNS HOWEVER SEE THE ABSURDITY OF IT +7176-92135-0014-675: IF IT BE GRANTED FIRST THAT THE THOUGHTS OF A CERTAIN CHARACTER SHOULD BE KNOWN TO THE AUDIENCE AND SECONDLY THAT SOLILOQUY OR THE HABIT OF THINKING ALOUD IS IN OPPOSITION TO MODERN STAGE (TECHNIQUE->TYPENIQUE) HOW SHALL A SOLILOQUY BE AVOIDED WITHOUT DAMAGE TO THE PLAY +7176-92135-0015-676: AND SO ON TILL YOU GET TO THE END (WHEN OPHELIA->ONE OF WILLIAM) MIGHT SAY AH YES OR SOMETHING NON COMMITTAL OF THAT SORT +7176-92135-0016-677: THIS WOULD BE AN EASY WAY OF DOING IT BUT IT WOULD NOT BE THE BEST WAY FOR THE REASON THAT IT IS TOO EASY TO CALL ATTENTION TO ITSELF +7176-92135-0017-678: IN THE OLD BADLY MADE PLAY IT WAS FREQUENTLY NECESSARY FOR ONE OF THE CHARACTERS TO TAKE THE AUDIENCE INTO HIS CONFIDENCE +7176-92135-0018-679: IN THE MODERN WELL CONSTRUCTED PLAY HE SIMPLY RINGS UP AN IMAGINARY CONFEDERATE AND TELLS HIM WHAT HE IS GOING TO DO COULD ANYTHING BE MORE NATURAL +7176-92135-0019-680: I WANT DOUBLE NINE (HAL LO->HELLO) +7176-92135-0020-681: DOUBLE NINE (TWO->TO) THREE (ELSINORE->ELSINOR) DOUBLE (NINE->NOT) YES (HALLO->HELLO) IS THAT YOU HORATIO (HAMLET->HAMLE) SPEAKING +7176-92135-0021-682: I SAY I'VE BEEN (WONDERING->WANDERING) ABOUT THIS BUSINESS +7176-92135-0022-683: TO BE OR NOT TO BE THAT IS THE QUESTION WHETHER TIS NOBLER IN THE MIND TO SUFFER THE SLINGS AND ARROWS WHAT NO HAMLET SPEAKING +7176-92135-0023-684: YOU GAVE ME DOUBLE FIVE I WANT DOUBLE NINE (HALLO->HELLO) IS THAT YOU HORATIO HAMLET SPEAKING +7176-92135-0024-685: TO BE OR NOT TO BE THAT IS THE QUESTION WHETHER TIS NOBLER +7176-92135-0025-686: IT IS TO LET HAMLET IF THAT (HAPPEN->HAPPENED) TO BE THE NAME OF YOUR CHARACTER ENTER WITH A SMALL DOG PET FALCON (MONGOOSE->MONGOO'S) TAME BEAR OR WHATEVER ANIMAL IS MOST IN KEEPING WITH THE PART AND CONFIDE IN THIS ANIMAL SUCH SORROWS HOPES OR SECRET HISTORY AS THE AUDIENCE HAS GOT TO KNOW +7176-92135-0026-687: (ENTER->INTER) HAMLET WITH HIS FAVOURITE (BOAR HOUND->BOREHOUND) +7176-92135-0027-688: LADY (LARKSPUR STARTS->LARKSBURG START) SUDDENLY AND TURNS TOWARDS HIM +7176-92135-0028-689: (LARKSPUR BIT->LARKSPER BID) ME AGAIN THIS MORNING FOR THE THIRD TIME +7176-92135-0029-690: I WANT TO GET AWAY FROM IT ALL (SWOONS->SWOON) +7176-92135-0030-691: (ENTER->ENTERED) LORD ARTHUR (FLUFFINOSE->FLAPHANO'S) +7176-92135-0031-692: AND THERE YOU ARE YOU WILL OF COURSE APPRECIATE THAT THE UNFINISHED SENTENCES NOT ONLY SAVE TIME BUT ALSO MAKE THE MANOEUVRING VERY MUCH MORE NATURAL +7176-92135-0032-693: HOW YOU MAY BE WONDERING ARE YOU TO BEGIN YOUR MASTERPIECE +7176-92135-0033-694: RELAPSES INTO SILENCE FOR THE REST OF THE EVENING +7176-92135-0034-695: THE DUCHESS OF SOUTHBRIDGE TO LORD REGGIE OH (REGGIE->READY) WHAT DID YOU SAY +7176-92135-0035-696: THEN LORD (TUPPENY WELL->TUPPENNY) WHAT ABOUT AUCTION +7176-92135-0036-697: THE CROWD DRIFTS OFF (LEAVING->LEAPING) THE HERO AND HEROINE ALONE IN THE MIDDLE OF THE STAGE AND THEN YOU CAN BEGIN +7176-92135-0037-698: THEN IS THE TIME TO INTRODUCE A MEAL ON THE STAGE +7176-92135-0038-699: A STAGE MEAL IS POPULAR BECAUSE IT (PROVES->PROVED) TO THE AUDIENCE THAT THE ACTORS EVEN WHEN CALLED CHARLES (HAWTREY->HALTREE) OR OWEN (NARES->NEAR'S) ARE REAL PEOPLE JUST LIKE YOU AND ME +7176-92135-0039-700: (TEA->T) PLEASE MATTHEWS BUTLER IMPASSIVELY +7176-92135-0040-701: (HOSTESS->HOSTES) REPLACES LUMP AND INCLINES EMPTY TEAPOT OVER TRAY FOR (A->*) MOMENT THEN (HANDS HIM->HANDSOME) A CUP PAINTED BROWN INSIDE (THUS DECEIVING->LUSTY SEEING) THE GENTLEMAN WITH THE TELESCOPE IN THE UPPER CIRCLE +7176-92135-0041-702: (RE ENTER->REINTER) BUTLER AND THREE FOOTMEN WHO (REMOVE->MOVED) THE TEA THINGS HOSTESS (TO->TWO) GUEST +7176-92135-0042-703: (IN->AND) NOVELS THE HERO HAS OFTEN PUSHED HIS MEALS AWAY UNTASTED BUT NO (STAGE->STEED) HERO WOULD DO ANYTHING SO UNNATURAL AS THIS +7176-92135-0043-704: TWO BITES ARE MADE AND THE (BREAD->ABRET) IS CRUMBLED WITH AN AIR OF GREAT EAGERNESS INDEED ONE FEELS THAT IN REAL LIFE THE (GUEST->GUESTS) WOULD CLUTCH HOLD OF THE FOOTMAN AND SAY HALF A (MO OLD->MOLE) CHAP I HAVEN'T NEARLY FINISHED BUT THE (ACTOR IS->ACTOR'S) BETTER SCHOOLED THAN THIS +7176-92135-0044-705: BUT IT IS THE CIGARETTE WHICH CHIEFLY HAS BROUGHT THE MODERN DRAMA TO ITS PRESENT STATE OF PERFECTION +7176-92135-0045-706: LORD JOHN TAKING OUT GOLD (CIGARETTE->SICK RED) CASE FROM HIS LEFT HAND UPPER WAISTCOAT POCKET +7729-102255-0000-261: THE BOGUS LEGISLATURE NUMBERED THIRTY SIX MEMBERS +7729-102255-0001-262: THIS WAS AT THE MARCH ELECTION EIGHTEEN FIFTY FIVE +7729-102255-0002-263: THAT SUMMER'S (EMIGRATION->IMMIGRATION) HOWEVER BEING MAINLY FROM THE FREE STATES GREATLY CHANGED THE RELATIVE STRENGTH OF THE TWO PARTIES +7729-102255-0003-264: FOR GENERAL SERVICE THEREFORE REQUIRING NO SPECIAL EFFORT THE NUMERICAL STRENGTH OF THE FACTIONS WAS ABOUT EQUAL WHILE ON EXTRAORDINARY OCCASIONS THE TWO THOUSAND BORDER RUFFIAN RESERVE LYING A LITTLE FARTHER BACK FROM THE STATE LINE COULD AT ANY TIME EASILY TURN THE SCALE +7729-102255-0004-265: THE FREE STATE MEN HAD ONLY THEIR CONVICTIONS THEIR INTELLIGENCE THEIR COURAGE AND THE MORAL SUPPORT OF THE NORTH THE CONSPIRACY HAD ITS SECRET COMBINATION THE TERRITORIAL OFFICIALS THE LEGISLATURE THE BOGUS LAWS THE COURTS THE MILITIA OFFICERS THE PRESIDENT AND THE ARMY +7729-102255-0005-266: THIS WAS A FORMIDABLE ARRAY OF ADVANTAGES SLAVERY WAS PLAYING WITH LOADED DICE +7729-102255-0006-267: (COMING->COMMON) BY WAY OF THE MISSOURI RIVER TOWNS HE FELL FIRST AMONG BORDER RUFFIAN COMPANIONSHIP AND INFLUENCES AND PERHAPS HAVING HIS INCLINATIONS ALREADY (MOLDED->MOULDED) BY HIS WASHINGTON INSTRUCTIONS HIS EARLY IMPRESSIONS WERE DECIDEDLY ADVERSE TO THE FREE STATE CAUSE +7729-102255-0007-268: HIS RECEPTION SPEECH AT (WESTPORT->WESTWARD) IN WHICH HE MAINTAINED THE LEGALITY OF THE LEGISLATURE AND HIS DETERMINATION TO ENFORCE THEIR LAWS DELIGHTED HIS PRO SLAVERY AUDITORS +7729-102255-0008-269: ALL THE TERRITORIAL DIGNITARIES WERE PRESENT GOVERNOR (SHANNON->SHAN AND) PRESIDED JOHN CALHOUN THE SURVEYOR GENERAL MADE THE PRINCIPAL SPEECH A DENUNCIATION OF THE (ABOLITIONISTS->ABOLITIONIST) SUPPORTING THE TOPEKA MOVEMENT CHIEF JUSTICE (LECOMPTE->LE COMTE) DIGNIFIED THE OCCASION WITH APPROVING REMARKS +7729-102255-0009-270: ALL (DISSENT->DESCENT) ALL NON COMPLIANCE ALL HESITATION ALL MERE SILENCE EVEN WERE IN THEIR STRONGHOLD TOWNS LIKE (LEAVENWORTH->LEVINWORTH) BRANDED AS ABOLITIONISM DECLARED TO BE HOSTILITY TO THE PUBLIC WELFARE AND PUNISHED WITH PROSCRIPTION PERSONAL VIOLENCE EXPULSION AND FREQUENTLY DEATH +7729-102255-0010-271: OF THE LYNCHINGS THE MOBS AND THE MURDERS IT WOULD BE IMPOSSIBLE EXCEPT IN A VERY EXTENDED WORK TO NOTE THE FREQUENT AND ATROCIOUS DETAILS +7729-102255-0011-272: THE PRESENT CHAPTERS CAN ONLY TOUCH UPON THE MORE SALIENT MOVEMENTS OF THE CIVIL WAR IN KANSAS WHICH HAPPILY (WERE->ARE) NOT SANGUINARY IF HOWEVER THE INDIVIDUAL AND MORE ISOLATED CASES OF BLOODSHED COULD BE DESCRIBED THEY WOULD SHOW A STARTLING AGGREGATE OF BARBARITY AND (*->A) LOSS OF LIFE FOR OPINION'S SAKE +7729-102255-0012-273: SEVERAL HUNDRED FREE STATE MEN PROMPTLY RESPONDED TO THE SUMMONS +7729-102255-0013-274: IT WAS IN FACT THE BEST WEAPON OF ITS DAY +7729-102255-0014-275: THE LEADERS OF THE CONSPIRACY BECAME DISTRUSTFUL OF THEIR POWER TO CRUSH THE TOWN +7729-102255-0015-276: ONE OF HIS MILITIA GENERALS SUGGESTED THAT THE GOVERNOR SHOULD REQUIRE THE OUTLAWS AT LAWRENCE AND ELSEWHERE TO SURRENDER THE (SHARPS->SHARP'S) RIFLES ANOTHER WROTE ASKING HIM TO CALL OUT THE GOVERNMENT TROOPS AT FORT (LEAVENWORTH->LEVINWORTH) +7729-102255-0016-277: THE GOVERNOR ON HIS PART BECOMING DOUBTFUL OF THE LEGALITY OF EMPLOYING MISSOURI MILITIA TO ENFORCE KANSAS LAWS WAS ALSO EAGER TO SECURE THE HELP OF FEDERAL TROOPS +7729-102255-0017-278: SHERIFF JONES HAD HIS POCKETS ALWAYS FULL OF WRITS ISSUED IN THE SPIRIT OF PERSECUTION BUT WAS OFTEN BAFFLED BY THE SHARP WITS AND READY RESOURCES OF THE FREE STATE PEOPLE AND SOMETIMES DEFIED OUTRIGHT +7729-102255-0018-279: LITTLE BY LITTLE HOWEVER THE LATTER BECAME HEMMED AND BOUND IN THE MESHES OF THE VARIOUS DEVICES AND PROCEEDINGS WHICH THE TERRITORIAL OFFICIALS EVOLVED FROM THE BOGUS LAWS +7729-102255-0019-280: TO EMBARRASS THIS DAMAGING EXPOSURE JUDGE (LECOMPTE->LECOMTE) ISSUED A WRIT AGAINST THE EX GOVERNOR ON A FRIVOLOUS CHARGE OF CONTEMPT +7729-102255-0020-281: THE INCIDENT WAS NOT VIOLENT NOR EVEN DRAMATIC NO POSSE WAS SUMMONED NO FURTHER EFFORT MADE AND (REEDER->READER) FEARING PERSONAL VIOLENCE SOON FLED IN DISGUISE +7729-102255-0021-282: BUT THE AFFAIR WAS MAGNIFIED AS A CROWNING PROOF THAT THE FREE STATE MEN WERE (INSURRECTIONISTS->INSURRECTIONOUS) AND OUTLAWS +7729-102255-0022-283: FROM THESE AGAIN SPRANG BARRICADED AND FORTIFIED DWELLINGS CAMPS AND (SCOUTING->SCOUT) PARTIES FINALLY CULMINATING IN ROVING GUERRILLA BANDS HALF PARTISAN HALF PREDATORY +7729-102255-0023-284: (THEIR->THERE ARE) DISTINCTIVE CHARACTERS HOWEVER DISPLAY ONE BROAD AND UNFAILING DIFFERENCE +7729-102255-0024-285: THE FREE STATE MEN CLUNG TO THEIR PRAIRIE TOWNS AND PRAIRIE RAVINES WITH ALL THE OBSTINACY AND COURAGE OF TRUE DEFENDERS OF THEIR HOMES AND FIRESIDES +7729-102255-0025-286: (THEIR->THERE) ASSUMED CHARACTER CHANGED WITH THEIR CHANGING OPPORTUNITIES OR NECESSITIES +7729-102255-0026-287: IN THE SHOOTING OF (SHERIFF->SHERIFF'S) JONES (IN->AND) LAWRENCE AND IN THE REFUSAL OF EX GOVERNOR (BEEDER->READER) TO ALLOW THE DEPUTY MARSHAL TO ARREST HIM THEY DISCOVERED GRAVE (OFFENSES->OFFENCES) AGAINST THE TERRITORIAL AND (*->THE) UNITED STATES LAWS +7729-102255-0027-288: FOOTNOTE (SUMNER->SUMMER) TO SHANNON MAY TWELFTH EIGHTEEN FIFTY SIX +7729-102255-0028-289: PRIVATE PERSONS WHO (HAD LEASED->AT LEAST) THE FREE STATE HOTEL VAINLY BESOUGHT THE VARIOUS AUTHORITIES TO (PREVENT->PRESENT) THE DESTRUCTION OF THEIR PROPERTY +7729-102255-0029-290: TEN DAYS WERE CONSUMED IN THESE NEGOTIATIONS BUT THE SPIRIT OF VENGEANCE REFUSED TO YIELD +7729-102255-0030-291: HE SUMMONED HALF A DOZEN CITIZENS TO JOIN HIS POSSE WHO FOLLOWED OBEYED AND ASSISTED HIM +7729-102255-0031-292: HE CONTINUED HIS PRETENDED SEARCH AND TO GIVE COLOR TO HIS ERRAND MADE (TWO ARRESTS->TO ARREST) +7729-102255-0032-293: THE FREE STATE HOTEL A STONE BUILDING IN DIMENSIONS FIFTY BY SEVENTY FEET THREE STORIES HIGH AND HANDSOMELY FURNISHED PREVIOUSLY OCCUPIED ONLY FOR LODGING ROOMS ON THAT DAY FOR THE FIRST TIME OPENED ITS TABLE ACCOMMODATIONS TO THE PUBLIC AND PROVIDED A FREE DINNER IN HONOR OF THE OCCASION +7729-102255-0033-294: AS HE HAD PROMISED TO PROTECT THE HOTEL THE REASSURED CITIZENS BEGAN TO LAUGH AT THEIR OWN FEARS +7729-102255-0034-295: TO THEIR SORROW THEY WERE SOON UNDECEIVED +7729-102255-0035-296: THE MILITARY FORCE PARTLY RABBLE PARTLY ORGANIZED HAD MEANWHILE MOVED INTO THE TOWN +7729-102255-0036-297: HE PLANTED A COMPANY BEFORE THE HOTEL AND DEMANDED A SURRENDER OF THE ARMS BELONGING TO THE FREE STATE MILITARY COMPANIES +7729-102255-0037-298: HALF AN HOUR LATER TURNING A DEAF EAR TO ALL REMONSTRANCE HE GAVE THE PROPRIETORS UNTIL FIVE O'CLOCK TO REMOVE THEIR FAMILIES AND PERSONAL PROPERTY FROM THE FREE STATE HOTEL +7729-102255-0038-299: (ATCHISON->ATTITSON) WHO HAD BEEN HARANGUING THE MOB PLANTED HIS TWO GUNS BEFORE THE BUILDING AND TRAINED THEM UPON IT +7729-102255-0039-300: THE INMATES BEING REMOVED AT THE APPOINTED HOUR A FEW CANNON BALLS WERE FIRED THROUGH THE STONE WALLS +7729-102255-0040-301: IN THIS INCIDENT CONTRASTING THE CREATIVE AND THE DESTRUCTIVE SPIRIT OF THE FACTIONS THE (EMIGRANT AID->IMMIGRANT AIDS) SOCIETY OF MASSACHUSETTS FINDS ITS MOST HONORABLE AND TRIUMPHANT VINDICATION +7729-102255-0041-302: THE WHOLE PROCEEDING WAS SO CHILDISH THE MISERABLE PLOT SO TRANSPARENT THE (OUTRAGE->OUTRAGED) SO GROSS AS TO BRING DISGUST TO THE BETTER CLASS OF BORDER RUFFIANS WHO WERE WITNESSES AND ACCESSORIES +7729-102255-0042-303: (RELOCATED->RE LOCATED) FOOTNOTE GOVERNOR ROBINSON BEING ON HIS WAY EAST THE STEAMBOAT ON WHICH HE WAS (TRAVELING->TRAVELLING) STOPPED AT LEXINGTON MISSOURI +7729-102255-0043-304: IN A FEW DAYS AN OFFICER CAME WITH A REQUISITION FROM GOVERNOR SHANNON AND TOOK THE PRISONER BY (LAND TO WESTPORT->LANDA WEST PORT) AND AFTERWARDS FROM THERE TO KANSAS CITY (AND LEAVENWORTH->IN LEVINWORTH) +7729-102255-0044-305: HERE HE WAS PLACED IN THE CUSTODY OF CAPTAIN MARTIN OF THE KICKAPOO RANGERS WHO PROVED A KIND JAILER AND MATERIALLY ASSISTED IN PROTECTING HIM FROM THE DANGEROUS INTENTIONS OF THE MOB WHICH AT THAT TIME HELD (LEAVENWORTH->LEVIN WORTH) UNDER (A->THE) REIGN OF TERROR +7729-102255-0045-306: CAPTAIN MARTIN SAID I SHALL GIVE YOU A PISTOL TO HELP PROTECT YOURSELF IF WORSE COMES TO WORST +7729-102255-0046-307: IN THE EARLY MORNING OF THE NEXT DAY MAY TWENTY NINTH A COMPANY OF DRAGOONS WITH ONE EMPTY SADDLE CAME DOWN FROM THE FORT AND WHILE THE PRO SLAVERY MEN STILL SLEPT THE PRISONER AND HIS ESCORT WERE ON THEIR WAY ACROSS THE PRAIRIES TO LECOMPTON IN THE CHARGE OF OFFICERS OF THE UNITED STATES ARMY +8224-274381-0000-1451: THOUGH THROWN INTO PRISON FOR THIS ENTERPRISE AND DETAINED SOME TIME HE WAS NOT DISCOURAGED BUT STILL CONTINUED BY HIS COUNTENANCE AND PROTECTION TO INFUSE SPIRIT INTO THE DISTRESSED ROYALISTS +8224-274381-0001-1452: AMONG OTHER PERSONS OF DISTINCTION WHO UNITED THEMSELVES TO HIM WAS LORD NAPIER OF (MERCHISTON->MURCHISON) SON OF THE FAMOUS INVENTOR OF THE (LOGARITHMS->LOGARTHEMS) THE PERSON TO WHOM THE TITLE OF A GREAT MAN IS MORE JUSTLY DUE THAN TO ANY OTHER WHOM HIS COUNTRY EVER PRODUCED +8224-274381-0002-1453: WHILE THE FORMER FORETOLD THAT THE SCOTTISH COVENANTERS WERE SECRETLY FORMING A UNION WITH THE ENGLISH PARLIAMENT AND (INCULCATED->INCALCATED) THE NECESSITY OF PREVENTING THEM BY SOME VIGOROUS UNDERTAKING THE LATTER STILL INSISTED THAT EVERY SUCH ATTEMPT WOULD PRECIPITATE THEM INTO MEASURES TO WHICH OTHERWISE THEY WERE NOT PERHAPS INCLINED +8224-274381-0003-1454: THE KING'S EARS WERE NOW OPEN TO MONTROSE'S (COUNSELS->COUNCILS) WHO PROPOSED NONE BUT THE BOLDEST AND MOST DARING AGREEABLY TO THE DESPERATE STATE OF THE ROYAL CAUSE IN SCOTLAND +8224-274381-0004-1455: FIVE HUNDRED MEN MORE WHO HAD BEEN LEVIED BY THE COVENANTERS WERE PERSUADED TO EMBRACE THE ROYAL CAUSE AND WITH THIS COMBINED FORCE HE HASTENED TO ATTACK LORD (ELCHO->ELKO) WHO LAY AT PERTH WITH AN ARMY OF SIX THOUSAND MEN ASSEMBLED UPON THE FIRST NEWS OF THE IRISH INVASION +8224-274381-0005-1456: DREADING THE SUPERIOR POWER OF ARGYLE WHO HAVING JOINED HIS VASSALS TO A FORCE LEVIED BY THE PUBLIC WAS APPROACHING WITH A CONSIDERABLE ARMY MONTROSE HASTENED (NORTHWARDS->NORTHWARD) IN ORDER TO ROUSE AGAIN THE MARQUIS OF (HUNTLEY->HUNTLY) AND THE GORDONS WHO HAVING BEFORE HASTILY TAKEN ARMS HAD BEEN INSTANTLY SUPPRESSED BY THE COVENANTERS +8224-274381-0006-1457: THIS NOBLEMAN'S CHARACTER THOUGH CELEBRATED FOR POLITICAL COURAGE AND CONDUCT WAS VERY LOW FOR MILITARY PROWESS AND AFTER SOME SKIRMISHES IN WHICH HE WAS WORSTED HE HERE ALLOWED MONTROSE TO ESCAPE HIM +8224-274381-0007-1458: BY QUICK MARCHES THROUGH THESE INACCESSIBLE MOUNTAINS THAT GENERAL FREED HIMSELF FROM THE SUPERIOR FORCES OF THE COVENANTERS +8224-274381-0008-1459: WITH THESE AND SOME (REENFORCEMENTS->REINFORCEMENTS) OF THE (ATHOLEMEN->ETHEL MEN) AND (MACDONALDS->MC DONALDS) WHOM HE HAD RECALLED MONTROSE FELL SUDDENLY UPON ARGYLE'S COUNTRY AND LET LOOSE UPON IT ALL THE RAGE OF WAR CARRYING OFF THE CATTLE BURNING THE HOUSES AND PUTTING THE INHABITANTS TO THE SWORD +8224-274381-0009-1460: THIS SEVERITY BY WHICH MONTROSE SULLIED HIS VICTORIES WAS THE RESULT OF PRIVATE ANIMOSITY AGAINST THE CHIEFTAIN AS MUCH AS OF ZEAL FOR THE PUBLIC CAUSE (ARGYLE->OUR GUILE) COLLECTING THREE THOUSAND MEN MARCHED IN QUEST OF THE ENEMY WHO HAD RETIRED WITH THEIR PLUNDER AND HE LAY AT (INNERLOCHY->INERLOCHY) SUPPOSING HIMSELF STILL AT A CONSIDERABLE DISTANCE FROM THEM +8224-274381-0010-1461: BY A QUICK AND UNEXPECTED MARCH MONTROSE HASTENED TO (INNERLOCHY->IN A LOCKY) AND PRESENTED HIMSELF IN ORDER OF BATTLE BEFORE THE SURPRISED BUT NOT (AFFRIGHTENED->A FRIGHTENED) COVENANTERS +8224-274381-0011-1462: HIS CONDUCT AND PRESENCE OF MIND IN THIS EMERGENCE APPEARED CONSPICUOUS +8224-274381-0012-1463: MONTROSE WEAK IN CAVALRY HERE LINED HIS TROOPS OF HORSE WITH INFANTRY AND AFTER PUTTING THE ENEMY'S HORSE TO ROUT FELL WITH UNITED FORCE UPON THEIR FOOT WHO WERE ENTIRELY CUT IN PIECES THOUGH WITH THE LOSS OF THE GALLANT LORD GORDON ON THE PART OF THE ROYALISTS +8224-274381-0013-1464: FROM THE SAME MEN NEW REGIMENTS AND NEW COMPANIES WERE FORMED DIFFERENT OFFICERS APPOINTED AND THE WHOLE MILITARY FORCE PUT INTO SUCH HANDS AS THE INDEPENDENTS COULD RELY ON +8224-274381-0014-1465: BESIDES MEMBERS OF PARLIAMENT WHO WERE EXCLUDED MANY OFFICERS UNWILLING TO SERVE UNDER THE NEW GENERALS THREW UP THEIR COMMISSIONS AND (UNWARILY->THEN WARILY) FACILITATED THE PROJECT OF PUTTING THE ARMY ENTIRELY INTO THE HANDS OF THAT FACTION +8224-274381-0015-1466: THOUGH THE DISCIPLINE OF THE FORMER PARLIAMENTARY ARMY WAS NOT CONTEMPTIBLE A MORE EXACT PLAN WAS INTRODUCED AND RIGOROUSLY EXECUTED BY THESE NEW COMMANDERS +8224-274381-0016-1467: (VALOR->VALOUR) INDEED WAS VERY GENERALLY DIFFUSED OVER THE ONE PARTY AS WELL AS THE OTHER DURING THIS PERIOD DISCIPLINE ALSO WAS ATTAINED BY THE FORCES OF THE PARLIAMENT BUT THE PERFECTION OF THE MILITARY ART IN CONCERTING THE GENERAL PLANS OF ACTION AND THE OPERATIONS OF THE FIELD SEEMS STILL ON BOTH SIDES TO HAVE BEEN IN A GREAT MEASURE WANTING +8224-274381-0017-1468: HISTORIANS AT LEAST PERHAPS FROM THEIR OWN IGNORANCE AND INEXPERIENCE HAVE NOT REMARKED ANY THING BUT A HEADLONG IMPETUOUS CONDUCT EACH PARTY HURRYING TO A BATTLE (WHERE->WERE) VALOR AND FORTUNE CHIEFLY DETERMINED THE SUCCESS +8224-274384-0000-1437: HE PASSED THROUGH HENLEY SAINT (ALBANS->ALBAN'S) AND CAME SO NEAR TO LONDON AS HARROW ON THE HILL +8224-274384-0001-1438: THE SCOTTISH GENERALS AND COMMISSIONERS AFFECTED GREAT SURPRISE ON THE APPEARANCE OF THE KING AND THOUGH THEY PAID HIM ALL THE EXTERIOR RESPECT DUE TO HIS DIGNITY THEY INSTANTLY SET A GUARD UPON HIM UNDER COLOR OF PROTECTION AND MADE HIM IN REALITY A PRISONER +8224-274384-0002-1439: THEY INFORMED THE ENGLISH PARLIAMENT OF THIS UNEXPECTED INCIDENT AND ASSURED THEM THAT THEY HAD ENTERED INTO NO PRIVATE TREATY WITH THE KING +8224-274384-0003-1440: OR HATH HE GIVEN US ANY GIFT +8224-274384-0004-1441: AND THE MEN OF ISRAEL ANSWERED THE MEN OF JUDAH AND SAID WE HAVE TEN PARTS IN THE KING AND WE HAVE ALSO MORE RIGHT IN DAVID THAN YE WHY THEN DID YE DESPISE US THAT OUR ADVICE SHOULD NOT BE FIRST HAD IN BRINGING BACK OUR KING +8224-274384-0005-1442: ANOTHER PREACHER AFTER REPROACHING HIM TO HIS FACE WITH HIS MISGOVERNMENT ORDERED THIS (PSALM->SUM) TO BE SUNG +8224-274384-0006-1443: THE KING STOOD UP AND CALLED FOR THAT PSALM WHICH BEGINS WITH THESE WORDS +8224-274384-0007-1444: HAVE MERCY LORD ON ME I PRAY FOR MEN WOULD ME DEVOUR +8224-274384-0008-1445: THE GOOD NATURED AUDIENCE IN PITY TO (FALLEN->FALL IN) MAJESTY SHOWED FOR ONCE GREATER DEFERENCE TO THE KING THAN TO THE MINISTER AND SUNG THE PSALM WHICH THE FORMER HAD CALLED FOR +8224-274384-0009-1446: THE PARLIAMENT AND THE SCOTS LAID THEIR PROPOSALS BEFORE THE KING +8224-274384-0010-1447: BEFORE THE SETTLEMENT OF TERMS THE ADMINISTRATION MUST BE POSSESSED ENTIRELY BY THE PARLIAMENTS OF BOTH KINGDOMS AND HOW INCOMPATIBLE THAT SCHEME WITH THE LIBERTY OF THE KING IS EASILY IMAGINED +8224-274384-0011-1448: THE ENGLISH IT IS EVIDENT HAD THEY NOT BEEN PREVIOUSLY ASSURED OF RECEIVING THE KING WOULD NEVER HAVE PARTED WITH SO CONSIDERABLE A SUM AND WHILE THEY WEAKENED THEMSELVES BY THE SAME MEASURE HAVE STRENGTHENED A PEOPLE WITH WHOM THEY MUST AFTERWARDS HAVE SO MATERIAL AN INTEREST TO DISCUSS +8224-274384-0012-1449: IF ANY STILL RETAINED RANCOR AGAINST HIM IN HIS PRESENT CONDITION THEY PASSED IN SILENCE WHILE HIS WELL WISHERS MORE GENEROUS THAN PRUDENT ACCOMPANIED HIS MARCH WITH TEARS WITH ACCLAMATIONS AND WITH PRAYERS FOR HIS SAFETY +8224-274384-0013-1450: HIS DEATH IN THIS CONJUNCTURE WAS A PUBLIC MISFORTUNE +8230-279154-0000-617: THE ANALYSIS OF KNOWLEDGE WILL OCCUPY US UNTIL THE END OF THE THIRTEENTH LECTURE AND IS THE MOST DIFFICULT PART OF OUR WHOLE ENTERPRISE +8230-279154-0001-618: WHAT IS CALLED PERCEPTION DIFFERS FROM SENSATION BY THE FACT THAT THE SENSATIONAL INGREDIENTS BRING UP HABITUAL ASSOCIATES IMAGES AND EXPECTATIONS OF THEIR USUAL (CORRELATES->COROLLETS) ALL OF WHICH ARE SUBJECTIVELY INDISTINGUISHABLE FROM THE SENSATION +8230-279154-0002-619: WHETHER OR NOT THIS PRINCIPLE IS LIABLE TO EXCEPTIONS (EVERYONE->EVERY ONE) WOULD AGREE THAT (IS->IT) HAS A BROAD MEASURE OF TRUTH THOUGH THE WORD EXACTLY MIGHT SEEM AN OVERSTATEMENT AND IT MIGHT SEEM MORE CORRECT TO SAY THAT IDEAS APPROXIMATELY REPRESENT IMPRESSIONS +8230-279154-0003-620: AND WHAT SORT OF EVIDENCE IS LOGICALLY POSSIBLE +8230-279154-0004-621: THERE IS NO LOGICAL IMPOSSIBILITY IN THE HYPOTHESIS THAT THE WORLD SPRANG INTO BEING FIVE MINUTES AGO EXACTLY AS IT THEN WAS WITH (A->THE) POPULATION THAT REMEMBERED A WHOLLY UNREAL PAST +8230-279154-0005-622: ALL THAT I AM DOING IS TO USE ITS LOGICAL TENABILITY AS A HELP IN THE ANALYSIS OF WHAT OCCURS WHEN WE REMEMBER +8230-279154-0006-623: THE BEHAVIOURIST WHO ATTEMPTS TO MAKE PSYCHOLOGY A RECORD OF (BEHAVIOUR->BEHAVIOR) HAS TO TRUST HIS MEMORY IN MAKING THE RECORD +8230-279154-0007-624: HABIT IS A CONCEPT INVOLVING THE OCCURRENCE OF SIMILAR EVENTS AT DIFFERENT TIMES IF THE BEHAVIOURIST (FEELS->FILLS) CONFIDENT THAT THERE IS SUCH A PHENOMENON AS HABIT THAT CAN ONLY BE BECAUSE HE TRUSTS HIS MEMORY WHEN IT ASSURES HIM THAT THERE HAVE BEEN OTHER TIMES +8230-279154-0008-625: BUT I DO NOT THINK SUCH AN INFERENCE IS WARRANTED +8230-279154-0009-626: OUR CONFIDENCE OR LACK OF CONFIDENCE IN THE ACCURACY OF A MEMORY IMAGE MUST IN FUNDAMENTAL CASES BE BASED UPON A CHARACTERISTIC OF THE IMAGE ITSELF SINCE WE CANNOT EVOKE THE PAST BODILY AND COMPARE IT WITH THE PRESENT IMAGE +8230-279154-0010-627: WE SOMETIMES HAVE IMAGES THAT ARE BY NO MEANS PECULIARLY VAGUE WHICH YET WE DO NOT TRUST FOR EXAMPLE UNDER THE INFLUENCE OF FATIGUE WE MAY SEE A FRIEND'S FACE VIVIDLY AND CLEARLY BUT HORRIBLY DISTORTED +8230-279154-0011-628: SOME IMAGES LIKE SOME SENSATIONS FEEL VERY FAMILIAR WHILE OTHERS FEEL STRANGE +8230-279154-0012-629: FAMILIARITY IS A (FEELING->FILLING) CAPABLE OF DEGREES +8230-279154-0013-630: IN AN IMAGE OF A WELL KNOWN FACE FOR EXAMPLE SOME PARTS MAY FEEL MORE FAMILIAR THAN OTHERS WHEN THIS HAPPENS WE HAVE MORE BELIEF IN THE ACCURACY OF THE FAMILIAR PARTS THAN IN THAT OF THE UNFAMILIAR PARTS +8230-279154-0014-631: I COME NOW TO THE OTHER CHARACTERISTIC WHICH MEMORY IMAGES MUST HAVE IN ORDER TO ACCOUNT FOR OUR KNOWLEDGE OF THE PAST +8230-279154-0015-632: THEY MUST HAVE SOME CHARACTERISTIC WHICH MAKES US REGARD THEM AS REFERRING TO MORE OR LESS REMOTE PORTIONS OF THE PAST +8230-279154-0016-633: IN ACTUAL FACT THERE ARE DOUBTLESS VARIOUS FACTORS THAT CONCUR IN GIVING US THE FEELING OF GREATER OR LESS REMOTENESS IN SOME REMEMBERED EVENT +8230-279154-0017-634: THERE MAY BE A SPECIFIC FEELING WHICH COULD BE CALLED THE (FEELING->FILLING) OF PASTNESS ESPECIALLY WHERE IMMEDIATE MEMORY IS CONCERNED +8230-279154-0018-635: THERE IS OF COURSE A DIFFERENCE BETWEEN KNOWING THE TEMPORAL RELATION OF A REMEMBERED EVENT TO THE PRESENT AND KNOWING THE TIME ORDER OF TWO REMEMBERED EVENTS +8230-279154-0019-636: IT WOULD SEEM THAT ONLY RATHER RECENT EVENTS CAN BE PLACED AT ALL ACCURATELY BY MEANS OF FEELINGS GIVING THEIR TEMPORAL RELATION TO THE PRESENT BUT IT IS CLEAR THAT SUCH FEELINGS MUST PLAY AN ESSENTIAL PART IN THE PROCESS OF DATING REMEMBERED EVENTS +8230-279154-0020-637: IF WE HAD RETAINED THE SUBJECT OR ACT IN KNOWLEDGE THE WHOLE PROBLEM OF MEMORY WOULD HAVE BEEN COMPARATIVELY SIMPLE +8230-279154-0021-638: REMEMBERING HAS TO BE A PRESENT OCCURRENCE IN SOME WAY RESEMBLING OR RELATED TO WHAT IS REMEMBERED +8230-279154-0022-639: SOME POINTS MAY BE TAKEN AS FIXED AND SUCH AS ANY THEORY OF MEMORY MUST ARRIVE AT +8230-279154-0023-640: IN THIS CASE AS IN MOST OTHERS WHAT MAY BE TAKEN AS CERTAIN IN ADVANCE IS RATHER VAGUE +8230-279154-0024-641: THE FIRST OF OUR VAGUE BUT INDUBITABLE DATA IS THAT THERE IS KNOWLEDGE OF THE PAST +8230-279154-0025-642: WE MIGHT PROVISIONALLY THOUGH PERHAPS NOT QUITE CORRECTLY DEFINE MEMORY AS THAT WAY OF KNOWING ABOUT THE PAST WHICH HAS NO ANALOGUE IN OUR KNOWLEDGE OF THE FUTURE SUCH A DEFINITION WOULD AT LEAST SERVE TO MARK THE PROBLEM WITH WHICH WE ARE CONCERNED THOUGH SOME EXPECTATIONS MAY DESERVE TO RANK WITH MEMORY AS REGARDS IMMEDIACY +8230-279154-0026-643: THIS DISTINCTION IS VITAL TO THE UNDERSTANDING OF MEMORY BUT IT IS NOT SO EASY TO CARRY OUT IN PRACTICE AS IT IS TO DRAW IN THEORY +8230-279154-0027-644: A (GRAMOPHONE->GRAMMAPHONE) BY THE HELP OF SUITABLE RECORDS MIGHT RELATE TO US THE INCIDENTS OF ITS PAST AND PEOPLE ARE NOT SO DIFFERENT FROM GRAMOPHONES AS THEY LIKE TO BELIEVE +8230-279154-0028-645: I CAN SET TO WORK NOW TO REMEMBER THINGS I NEVER REMEMBERED BEFORE SUCH AS WHAT I HAD TO EAT FOR BREAKFAST THIS MORNING AND IT CAN HARDLY BE WHOLLY HABIT THAT ENABLES ME TO DO THIS +8230-279154-0029-646: THE FACT THAT A MAN CAN RECITE A POEM DOES NOT SHOW THAT HE REMEMBERS ANY PREVIOUS OCCASION ON WHICH HE HAS RECITED OR READ IT +8230-279154-0030-647: (SEMON'S->SIMMONS) TWO BOOKS MENTIONED IN AN EARLIER LECTURE DO NOT TOUCH KNOWLEDGE MEMORY AT ALL CLOSELY +8230-279154-0031-648: THEY GIVE LAWS ACCORDING TO WHICH IMAGES OF PAST OCCURRENCES COME INTO OUR MINDS BUT DO NOT DISCUSS OUR BELIEF THAT THESE IMAGES REFER TO PAST OCCURRENCES WHICH IS WHAT CONSTITUTES KNOWLEDGE MEMORY +8230-279154-0032-649: IT IS THIS THAT IS OF INTEREST TO THEORY OF KNOWLEDGE +8230-279154-0033-650: IT IS BY NO MEANS ALWAYS RELIABLE ALMOST EVERYBODY HAS AT SOME TIME EXPERIENCED THE WELL KNOWN ILLUSION THAT ALL THAT IS HAPPENING NOW HAPPENED BEFORE AT SOME TIME +8230-279154-0034-651: WHENEVER THE SENSE OF FAMILIARITY OCCURS WITHOUT A DEFINITE OBJECT IT (LEADS->LEAVES) US TO SEARCH THE ENVIRONMENT UNTIL WE ARE SATISFIED THAT WE HAVE FOUND THE APPROPRIATE OBJECT WHICH LEADS US TO THE JUDGMENT THIS IS FAMILIAR +8230-279154-0035-652: THUS NO KNOWLEDGE AS TO THE PAST IS TO BE DERIVED FROM THE FEELING OF FAMILIARITY ALONE +8230-279154-0036-653: A FURTHER STAGE IS RECOGNITION +8230-279154-0037-654: RECOGNITION IN THIS SENSE DOES NOT NECESSARILY INVOLVE MORE THAN A HABIT OF ASSOCIATION THE KIND OF OBJECT WE ARE SEEING AT THE MOMENT IS ASSOCIATED WITH THE WORD CAT OR WITH AN AUDITORY IMAGE OF PURRING OR WHATEVER OTHER CHARACTERISTIC WE MAY HAPPEN TO RECOGNIZE IN THE CAT OF THE MOMENT +8230-279154-0038-655: WE ARE OF COURSE IN FACT ABLE TO JUDGE WHEN WE RECOGNIZE AN OBJECT THAT WE HAVE SEEN IT BEFORE BUT THIS JUDGMENT IS SOMETHING OVER AND ABOVE RECOGNITION IN THIS FIRST SENSE AND MAY VERY PROBABLY BE IMPOSSIBLE TO ANIMALS THAT NEVERTHELESS HAVE THE EXPERIENCE OF RECOGNITION IN THIS FIRST SENSE OF THE WORD +8230-279154-0039-656: THIS KNOWLEDGE IS MEMORY IN ONE SENSE THOUGH IN ANOTHER IT IS NOT +8230-279154-0040-657: THERE ARE HOWEVER SEVERAL POINTS IN WHICH SUCH AN ACCOUNT OF RECOGNITION IS INADEQUATE TO BEGIN WITH IT MIGHT SEEM AT FIRST SIGHT MORE CORRECT TO DEFINE RECOGNITION AS I HAVE SEEN THIS BEFORE THAN AS THIS HAS EXISTED BEFORE +8230-279154-0041-658: THE DEFINITION OF MY EXPERIENCE IS DIFFICULT BROADLY SPEAKING IT IS EVERYTHING THAT IS CONNECTED WITH WHAT I AM EXPERIENCING NOW BY CERTAIN LINKS OF WHICH THE VARIOUS FORMS OF MEMORY ARE AMONG THE MOST IMPORTANT +8230-279154-0042-659: THUS IF I RECOGNIZE A THING THE OCCASION OF ITS PREVIOUS EXISTENCE IN VIRTUE OF WHICH I RECOGNIZE IT FORMS PART OF MY EXPERIENCE BY DEFINITION RECOGNITION WILL BE ONE OF THE MARKS BY WHICH MY EXPERIENCE IS SINGLED OUT FROM THE REST OF THE WORLD +8230-279154-0043-660: OF COURSE THE WORDS THIS HAS EXISTED BEFORE ARE A VERY INADEQUATE TRANSLATION OF WHAT ACTUALLY HAPPENS WHEN WE FORM A JUDGMENT OF RECOGNITION BUT THAT IS UNAVOIDABLE WORDS ARE FRAMED TO EXPRESS A LEVEL OF THOUGHT WHICH IS BY NO MEANS PRIMITIVE AND ARE QUITE INCAPABLE OF EXPRESSING SUCH AN ELEMENTARY OCCURRENCE AS RECOGNITION +8455-210777-0000-972: I REMAINED THERE ALONE FOR MANY HOURS BUT I MUST ACKNOWLEDGE THAT BEFORE I LEFT THE CHAMBERS I HAD GRADUALLY BROUGHT MYSELF TO LOOK AT THE MATTER IN ANOTHER LIGHT +8455-210777-0001-973: HAD (EVA CRASWELLER->EITHER CRUSWELLER) NOT BEEN GOOD LOOKING HAD JACK BEEN STILL AT COLLEGE HAD SIR KENNINGTON OVAL REMAINED IN ENGLAND HAD MISTER (BUNNIT AND->BUNNITT IN) THE BAR KEEPER NOT SUCCEEDED IN STOPPING MY CARRIAGE ON THE HILL SHOULD I HAVE SUCCEEDED IN ARRANGING FOR THE FINAL DEPARTURE OF MY OLD FRIEND +8455-210777-0002-974: ON ARRIVING AT HOME AT MY OWN RESIDENCE I FOUND THAT OUR SALON WAS FILLED WITH A BRILLIANT COMPANY +8455-210777-0003-975: AS I SPOKE I MADE HIM A GRACIOUS BOW AND I THINK I SHOWED HIM BY MY MODE OF ADDRESS THAT I DID NOT BEAR ANY GRUDGE AS TO MY INDIVIDUAL SELF +8455-210777-0004-976: I HAVE COME TO YOUR SHORES MISTER PRESIDENT WITH THE PURPOSE OF SEEING HOW THINGS ARE PROGRESSING IN THIS DISTANT QUARTER OF THE WORLD +8455-210777-0005-977: WE HAVE OUR LITTLE STRUGGLES HERE AS ELSEWHERE AND ALL THINGS CANNOT BE DONE BY ROSE WATER +8455-210777-0006-978: WE ARE QUITE SATISFIED NOW CAPTAIN (BATTLEAX->BATTLE AXE) SAID MY WIFE +8455-210777-0007-979: QUITE SATISFIED SAID EVA +8455-210777-0008-980: THE LADIES IN COMPLIANCE WITH THAT SOFTNESS OF HEART WHICH IS THEIR CHARACTERISTIC ARE ON ONE SIDE AND THE MEN BY WHOM THE WORLD HAS TO BE MANAGED ARE ON THE OTHER +8455-210777-0009-981: NO DOUBT IN PROCESS OF TIME THE LADIES WILL FOLLOW +8455-210777-0010-982: THEIR MASTERS SAID MISSUS NEVERBEND +8455-210777-0011-983: I DID NOT MEAN SAID CAPTAIN (BATTLEAX->BATTLE AXE) TO TOUCH UPON PUBLIC SUBJECTS AT SUCH A MOMENT AS THIS +8455-210777-0012-984: MISSUS NEVERBEND YOU MUST INDEED BE PROUD OF YOUR SON +8455-210777-0013-985: JACK HAD BEEN STANDING IN THE FAR CORNER OF THE ROOM TALKING TO EVA AND WAS NOW REDUCED TO SILENCE BY HIS PRAISES +8455-210777-0014-986: SIR KENNINGTON OVAL IS A VERY FINE PLAYER SAID MY WIFE +8455-210777-0015-987: I (AND->AM) MY WIFE AND SON AND THE TWO (CRASWELLERS->CRESTWELLERS) AND THREE OR FOUR OTHERS AGREED TO DINE ON BOARD THE SHIP ON THE NEXT +8455-210777-0016-988: THIS I FELT WAS PAID TO ME AS BEING PRESIDENT OF THE REPUBLIC AND I ENDEAVOURED TO BEHAVE MYSELF WITH SUCH MINGLED HUMILITY AND DIGNITY AS MIGHT (BEFIT->BE FIT) THE OCCASION BUT I COULD NOT BUT FEEL THAT SOMETHING WAS WANTING TO THE SIMPLICITY OF MY ORDINARY LIFE +8455-210777-0017-989: MY WIFE ON THE SPUR OF THE MOMENT MANAGED TO GIVE THE (GENTLEMEN->GENTLEMAN) A VERY GOOD DINNER +8455-210777-0018-990: THIS SHE SAID WAS TRUE HOSPITALITY AND I AM NOT SURE THAT I DID NOT AGREE WITH (HER->THAT) +8455-210777-0019-991: THEN THERE WERE THREE OR FOUR LEADING MEN OF THE COMMUNITY WITH THEIR WIVES WHO WERE FOR THE MOST PART THE FATHERS AND MOTHERS OF THE YOUNG LADIES +8455-210777-0020-992: OH YES SAID JACK AND I'M NOWHERE +8455-210777-0021-993: BUT I MEAN TO HAVE MY INNINGS BEFORE LONG +8455-210777-0022-994: OF WHAT MISSUS NEVERBEND HAD GONE THROUGH IN PROVIDING BIRDS BEASTS AND FISHES NOT TO TALK OF TARTS AND JELLIES FOR THE DINNER OF THAT DAY NO ONE BUT MYSELF CAN HAVE ANY IDEA BUT IT MUST BE ADMITTED THAT SHE ACCOMPLISHED HER TASK WITH THOROUGH SUCCESS +8455-210777-0023-995: WE SAT WITH THE (OFFICERS->OFFICER) SOME LITTLE TIME AFTER DINNER AND THEN WENT ASHORE +8455-210777-0024-996: HOW MUCH OF EVIL OF REAL ACCOMPLISHED EVIL HAD THERE NOT OCCURRED TO ME DURING THE LAST FEW DAYS +8455-210777-0025-997: WHAT COULD I DO NOW BUT JUST LAY MYSELF DOWN AND DIE +8455-210777-0026-998: AND THE DEATH OF WHICH I DREAMT COULD NOT ALAS +8455-210777-0027-999: WHEN THIS CAPTAIN SHOULD HAVE TAKEN HIMSELF AND HIS VESSEL BACK TO ENGLAND I WOULD RETIRE TO A SMALL FARM WHICH I POSSESSED AT THE (FARTHEST->FURTHEST) SIDE OF THE ISLAND AND THERE IN SECLUSION WOULD I END MY DAYS +8455-210777-0028-1000: JACK WOULD BECOME EVA'S HAPPY HUSBAND AND WOULD REMAIN AMIDST THE HURRIED DUTIES OF THE EAGER WORLD +8455-210777-0029-1001: THINKING OF ALL THIS I WENT TO SLEEP +8455-210777-0030-1002: MISTER NEVERBEND BEGAN THE CAPTAIN AND I (OBSERVED->OBSERVE) THAT UP TO THAT MOMENT HE HAD GENERALLY ADDRESSED ME AS PRESIDENT IT CANNOT BE DENIED THAT WE HAVE COME HERE ON AN UNPLEASANT MISSION +8455-210777-0031-1003: YOU HAVE RECEIVED US WITH ALL THAT COURTESY AND HOSPITALITY FOR WHICH YOUR CHARACTER (*->AND) IN ENGLAND (STANDS->STAND) SO HIGH +8455-210777-0032-1004: IT IS A DUTY SAID I +8455-210777-0033-1005: BUT YOUR POWER IS SO SUPERIOR TO ANY THAT I CAN ADVANCE AS TO MAKE US HERE FEEL THAT THERE IS NO DISGRACE IN YIELDING TO IT +8455-210777-0034-1006: NOT A DOUBT BUT HAD YOUR FORCE BEEN ONLY DOUBLE OR (TREBLE->TROUBLE) OUR OWN I SHOULD HAVE FOUND IT MY DUTY TO STRUGGLE WITH YOU +8455-210777-0035-1007: THAT IS ALL QUITE TRUE MISTER NEVERBEND SAID SIR (FERDINANDO BROWN->FERDINAND OBROWN) +8455-210777-0036-1008: I CAN AFFORD TO SMILE BECAUSE I AM ABSOLUTELY POWERLESS BEFORE YOU BUT I DO NOT THE LESS FEEL THAT IN A MATTER (IN->OF) WHICH THE PROGRESS OF THE WORLD IS CONCERNED I OR RATHER WE HAVE BEEN PUT DOWN BY BRUTE FORCE +8455-210777-0037-1009: YOU HAVE COME TO US THREATENING US WITH ABSOLUTE DESTRUCTION +8455-210777-0038-1010: THEREFORE I FEEL MYSELF QUITE ABLE AS PRESIDENT OF THIS REPUBLIC TO RECEIVE YOU WITH A COURTESY DUE TO THE SERVANTS OF A FRIENDLY ALLY +8455-210777-0039-1011: I CAN ASSURE YOU HE HAS NOT EVEN ALLOWED ME TO SEE THE TRIGGER SINCE I HAVE BEEN ON BOARD +8455-210777-0040-1012: THEN SAID SIR FERDINANDO THERE IS NOTHING FOR IT BUT THAT (HE->WE) MUST TAKE YOU WITH HIM +8455-210777-0041-1013: THERE CAME UPON ME A SUDDEN SHOCK WHEN I HEARD THESE WORDS WHICH EXCEEDED ANYTHING WHICH I HAD YET FELT +8455-210777-0042-1014: YOU HEAR WHAT SIR FERDINANDO BROWN HAS SAID REPLIED CAPTAIN (BATTLEAX->BATTLE AXE) +8455-210777-0043-1015: BUT WHAT IS THE DELICATE MISSION I ASKED +8455-210777-0044-1016: I WAS TO BE TAKEN AWAY AND CARRIED TO ENGLAND OR ELSEWHERE OR DROWNED UPON THE VOYAGE IT MATTERED NOT WHICH +8455-210777-0045-1017: THEN THE REPUBLIC OF (BRITANNULA->BRITAIN NULA) WAS TO BE DECLARED AS NON EXISTENT AND THE BRITISH FLAG WAS TO BE EXALTED AND A BRITISH GOVERNOR INSTALLED IN THE EXECUTIVE CHAMBERS +8455-210777-0046-1018: YOU MAY BE QUITE SURE (IT'S->TO) THERE SAID CAPTAIN (BATTLEAX->BATTLE AXE) AND THAT I CAN SO USE IT AS TO HALF OBLITERATE YOUR TOWN WITHIN TWO MINUTES OF MY RETURN ON BOARD +8455-210777-0047-1019: YOU PROPOSE TO KIDNAP ME I SAID +8455-210777-0048-1020: WHAT (WOULD->WILL) BECOME OF YOUR GUN WERE I TO KIDNAP YOU +8455-210777-0049-1021: LIEUTENANT (CROSSTREES->CROSS TREES) IS A VERY GALLANT OFFICER +8455-210777-0050-1022: ONE OF US ALWAYS REMAINS ON BOARD WHILE THE OTHER IS ON SHORE +8455-210777-0051-1023: WHAT WORLD WIDE INIQUITY SUCH A SPEECH AS THAT DISCLOSES SAID I STILL TURNING MYSELF TO THE CAPTAIN FOR THOUGH I WOULD HAVE CRUSHED THEM BOTH BY MY WORDS HAD IT BEEN POSSIBLE MY DISLIKE (CENTRED->SENATE) ITSELF ON SIR FERDINANDO +8455-210777-0052-1024: YOU WILL ALLOW ME TO SUGGEST SAID HE THAT THAT IS A MATTER OF OPINION +8455-210777-0053-1025: WERE I TO COMPLY WITH YOUR ORDERS WITHOUT EXPRESSING MY OWN OPINION I SHOULD SEEM TO HAVE DONE SO WILLINGLY HEREAFTER +8455-210777-0054-1026: THE LETTER RAN AS FOLLOWS +8455-210777-0055-1027: SIR I HAVE IT IN COMMAND TO INFORM YOUR EXCELLENCY THAT YOU HAVE BEEN APPOINTED GOVERNOR OF THE CROWN COLONY WHICH IS CALLED (BRITANNULA->BRITAIN NULA) +8455-210777-0056-1028: THE PECULIAR CIRCUMSTANCES OF THE COLONY ARE WITHIN YOUR EXCELLENCY'S KNOWLEDGE +8455-210777-0057-1029: BUT IN THEIR SELECTION OF A CONSTITUTION THE (BRITANNULISTS->BRITON ULYSTS) HAVE UNFORTUNATELY ALLOWED THEMSELVES BUT ONE (DELIBERATIVE->DELIBERATE) ASSEMBLY AND HENCE (HAVE->HAS) SPRUNG THEIR PRESENT DIFFICULTIES +8455-210777-0058-1030: IT IS FOUNDED ON THE ACKNOWLEDGED WEAKNESS OF THOSE WHO SURVIVE THAT PERIOD OF LIFE AT WHICH MEN CEASE TO WORK +8455-210777-0059-1031: BUT IT IS SURMISED THAT YOU WILL FIND DIFFICULTIES IN THE WAY OF YOUR ENTERING AT ONCE UPON YOUR (GOVERNMENT->GOVERNOR) +8455-210777-0060-1032: THE JOHN BRIGHT IS (ARMED->ARM) WITH A WEAPON OF GREAT POWER AGAINST WHICH IT IS IMPOSSIBLE THAT THE PEOPLE OF (BRITANNULA->BRITAIN EULO) SHOULD PREVAIL +8455-210777-0061-1033: YOU WILL CARRY OUT WITH YOU ONE HUNDRED MEN OF THE NORTH (NORTH WEST->NORTHWEST) BIRMINGHAM REGIMENT WHICH WILL PROBABLY SUFFICE FOR YOUR OWN SECURITY AS IT IS THOUGHT THAT IF MISTER (NEVERBEND->NEVERBIN) BE WITHDRAWN THE PEOPLE WILL REVERT EASILY TO THEIR OLD HABITS OF OBEDIENCE +8455-210777-0062-1034: WHEN DO YOU INTEND THAT THE JOHN BRIGHT SHALL START +8455-210777-0063-1035: TO DAY I SHOUTED +8455-210777-0064-1036: AND I HAVE NO ONE READY TO WHOM I CAN GIVE UP THE ARCHIVES OF THE GOVERNMENT +8455-210777-0065-1037: I SHALL BE HAPPY TO TAKE CHARGE OF THEM SAID SIR FERDINANDO +8455-210777-0066-1038: THEY OF COURSE MUST ALL BE ALTERED +8455-210777-0067-1039: OR OF THE HABITS OF OUR PEOPLE IT IS QUITE IMPOSSIBLE +8455-210777-0068-1040: YOUR POWER IS SUFFICIENT I SAID +8455-210777-0069-1041: IF YOU WILL GIVE US YOUR PROMISE TO MEET CAPTAIN (BATTLEAX->ADELAX) HERE AT THIS TIME TO MORROW WE WILL STRETCH A POINT AND DELAY THE DEPARTURE OF THE JOHN BRIGHT FOR TWENTY FOUR HOURS +8455-210777-0070-1042: AND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTRACT FROM ME A PROMISE THAT I WOULD DEPART IN PEACE +8463-287645-0000-543: THIS WAS WHAT DID THE MISCHIEF SO FAR AS THE RUNNING AWAY WAS CONCERNED +8463-287645-0001-544: IT IS HARDLY NECESSARY TO SAY MORE OF THEM HERE +8463-287645-0002-545: FROM THE MANNER IN WHICH (HE->SHE) EXPRESSED HIMSELF WITH REGARD TO ROBERT (HOLLAN->HOLLAND) NO MAN IN THE WHOLE RANGE OF HIS RECOLLECTIONS WILL BE LONGER REMEMBERED THAN HE HIS (ENTHRALMENT->ENTHRALIMENT) WHILE UNDER (HOLLAN->HOLLAND) WILL HARDLY EVER BE FORGOTTEN +8463-287645-0003-546: OF THIS PARTY EDWARD A BOY OF SEVENTEEN CALLED FORTH MUCH SYMPATHY HE TOO WAS CLAIMED BY (HOLLAN->HOLLAND) +8463-287645-0004-547: JOHN WESLEY COMBASH JACOB TAYLOR AND THOMAS EDWARD SKINNER +8463-287645-0005-548: A FEW YEARS BACK ONE OF THEIR SLAVES A COACHMAN WAS KEPT ON THE COACH BOX ONE (COLD->CALLED) NIGHT WHEN THEY WERE OUT AT A BALL UNTIL HE BECAME ALMOST FROZEN TO DEATH IN FACT HE DID DIE IN THE INFIRMARY FROM THE EFFECTS OF THE FROST ABOUT ONE WEEK AFTERWARDS +8463-287645-0006-549: THE DOCTOR WHO ATTENDED THE INJURED CREATURE IN THIS CASE WAS SIMPLY TOLD THAT SHE SLIPPED AND FELL DOWN (*->THE) STAIRS AS SHE WAS COMING DOWN +8463-287645-0007-550: ANOTHER CASE SAID JOHN WESLEY WAS A LITTLE GIRL HALF GROWN WHO WAS WASHING WINDOWS (UP STAIRS->UPSTAIRS) ONE DAY AND UNLUCKILY FELL ASLEEP IN THE WINDOW AND IN THIS POSITION WAS FOUND BY HER MISTRESS IN A RAGE THE MISTRESS (HIT->HID) HER A HEAVY SLAP KNOCKED HER OUT OF THE WINDOW AND SHE FELL TO THE PAVEMENT AND DIED IN A FEW HOURS FROM THE EFFECTS THEREOF +8463-287645-0008-551: AS USUAL NOTHING WAS DONE IN THE WAY OF PUNISHMENT +8463-287645-0009-552: I NEVER KNEW OF BUT ONE MAN WHO COULD EVER PLEASE HIM +8463-287645-0010-553: HE WORKED ME VERY HARD HE WANTED TO BE BEATING ME ALL THE TIME +8463-287645-0011-554: SHE WAS A LARGE HOMELY WOMAN THEY WERE COMMON WHITE PEOPLE WITH NO REPUTATION IN THE COMMUNITY +8463-287645-0012-555: SUBSTANTIALLY THIS WAS JACOB'S UNVARNISHED DESCRIPTION OF HIS MASTER AND MISTRESS +8463-287645-0013-556: AS TO HIS AGE AND ALSO THE NAME OF HIS MASTER JACOB'S STATEMENT VARIED SOMEWHAT FROM THE ADVERTISEMENT +8463-287645-0014-557: OF STARTING I DIDN'T KNOW THE WAY TO COME +8463-294825-0000-558: IT'S ALMOST BEYOND CONJECTURE +8463-294825-0001-559: THIS REALITY BEGINS TO EXPLAIN THE DARK POWER AND (OTHERWORLDLY->OTHER WORLDLY) FASCINATION OF TWENTY THOUSAND LEAGUES UNDER THE SEAS +8463-294825-0002-560: FIRST AS A PARIS STOCKBROKER LATER AS A CELEBRATED AUTHOR AND YACHTSMAN HE WENT ON FREQUENT VOYAGES TO BRITAIN AMERICA THE MEDITERRANEAN +8463-294825-0003-561: NEMO BUILDS A FABULOUS (FUTURISTIC->FUTUREISTIC) SUBMARINE THE NAUTILUS THEN CONDUCTS AN UNDERWATER CAMPAIGN OF VENGEANCE AGAINST HIS IMPERIALIST OPPRESSOR +8463-294825-0004-562: IN ALL THE NOVEL (HAD->HEAD) A DIFFICULT GESTATION +8463-294825-0005-563: OTHER SUBTLETIES OCCUR INSIDE EACH EPISODE THE TEXTURES SPARKLING WITH WIT INFORMATION AND INSIGHT +8463-294825-0006-564: HIS SPECIFICATIONS FOR AN OPEN SEA SUBMARINE AND A SELF (CONTAINED->CONTAINING) DIVING SUIT WERE DECADES BEFORE THEIR TIME YET MODERN TECHNOLOGY BEARS THEM OUT TRIUMPHANTLY +8463-294825-0007-565: EVEN THE SUPPORTING CAST IS SHREWDLY DRAWN PROFESSOR ARONNAX THE CAREER SCIENTIST CAUGHT IN AN ETHICAL CONFLICT CONSEIL THE COMPULSIVE CLASSIFIER WHO SUPPLIES HUMOROUS TAG LINES FOR (VERNE'S->VERN'S) FAST FACTS THE HARPOONER NED LAND A CREATURE OF CONSTANT APPETITES MAN AS HEROIC ANIMAL +8463-294825-0008-566: BUT MUCH OF THE (NOVEL'S->NOVELS) BROODING POWER COMES FROM CAPTAIN NEMO +8463-294825-0009-567: THIS COMPULSION LEADS NEMO INTO UGLY CONTRADICTIONS (HE'S->HE IS) A (FIGHTER->FRIGHTER) FOR FREEDOM YET ALL WHO BOARD HIS SHIP (ARE->OR) IMPRISONED THERE FOR GOOD HE WORKS TO SAVE LIVES BOTH HUMAN AND ANIMAL YET HE HIMSELF CREATES A (HOLOCAUST->HOLOCOST) HE DETESTS IMPERIALISM YET HE LAYS PERSONAL CLAIM TO THE SOUTH POLE +8463-294825-0010-568: AND IN THIS LAST ACTION HE FALLS INTO THE CLASSIC SIN OF PRIDE +8463-294825-0011-569: (HE'S->HE IS) SWIFTLY PUNISHED +8463-294825-0012-570: THE NAUTILUS NEARLY PERISHES IN THE ANTARCTIC AND NEMO SINKS INTO A GROWING DEPRESSION +8463-294825-0013-571: FOR MANY THEN THIS BOOK HAS BEEN A SOURCE OF FASCINATION SURELY ONE OF THE MOST INFLUENTIAL NOVELS EVER WRITTEN AN INSPIRATION FOR SUCH SCIENTISTS AND DISCOVERERS AS ENGINEER SIMON LAKE OCEANOGRAPHER WILLIAM (BEEBE->B) POLAR (TRAVELER SIR->TRAVELLERS ARE) ERNEST SHACKLETON +8463-294825-0014-572: FATHOM SIX FEET +8463-294825-0015-573: (GRAM->GRAHAM) ROUGHLY (ONE->WON) TWENTY EIGHTH OF AN OUNCE +8463-294825-0016-574: (MILLIGRAM->MILAGRAM) ROUGHLY ONE TWENTY EIGHT (THOUSAND->THOUSANDTH) OF AN OUNCE +8463-294825-0017-575: (LITER->LATER) ROUGHLY (ONE QUART->WON COURT) +8463-294825-0018-576: METER ROUGHLY ONE YARD THREE INCHES +8463-294825-0019-577: (MILLIMETER->MILLIMETRE) ROUGHLY ONE TWENTY FIFTH OF AN INCH +8463-294828-0000-578: CHAPTER THREE AS MASTER WISHES +8463-294828-0001-579: THREE SECONDS BEFORE THE ARRIVAL OF J B HOBSON'S LETTER I (NO->KNOW) MORE DREAMED OF CHASING THE UNICORN THAN OF TRYING FOR THE NORTHWEST PASSAGE +8463-294828-0002-580: EVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JOURNEY EXHAUSTED AND BADLY NEEDING (A REST->ARREST) +8463-294828-0003-581: I WANTED NOTHING MORE THAN TO SEE MY COUNTRY AGAIN MY FRIENDS MY MODEST QUARTERS BY THE (BOTANICAL->BATTANICAL) GARDENS MY DEARLY BELOVED COLLECTIONS +8463-294828-0004-582: BUT NOW NOTHING COULD HOLD ME BACK +8463-294828-0005-583: CONSEIL WAS MY MANSERVANT +8463-294828-0006-584: FROM RUBBING SHOULDERS WITH SCIENTISTS IN OUR LITTLE UNIVERSE BY THE BOTANICAL GARDENS THE BOY HAD COME TO KNOW A THING OR TWO +8463-294828-0007-585: CLASSIFYING WAS EVERYTHING TO HIM SO HE KNEW NOTHING ELSE WELL VERSED IN (THE->A) THEORY OF CLASSIFICATION HE WAS POORLY VERSED IN ITS PRACTICAL APPLICATION AND I DOUBT THAT HE COULD TELL A SPERM WHALE FROM A (BALEEN->BALINE) WHALE +8463-294828-0008-586: AND YET WHAT A FINE GALLANT LAD +8463-294828-0009-587: NOT ONCE DID HE COMMENT ON THE LENGTH OR THE HARDSHIPS OF (A->THE) JOURNEY +8463-294828-0010-588: NEVER DID HE OBJECT TO BUCKLING UP HIS (SUITCASE->SUIT CASE) FOR ANY COUNTRY WHATEVER CHINA OR THE CONGO NO MATTER HOW FAR OFF IT WAS +8463-294828-0011-589: HE WENT HERE THERE AND EVERYWHERE IN PERFECT CONTENTMENT +8463-294828-0012-590: PLEASE FORGIVE ME FOR THIS UNDERHANDED WAY OF ADMITTING (*->THAT) I HAD TURNED FORTY +8463-294828-0013-591: HE WAS A FANATIC ON FORMALITY AND HE ONLY ADDRESSED ME IN THE THIRD PERSON TO THE POINT WHERE IT GOT (TIRESOME->TO HIRESUME) +8463-294828-0014-592: THERE WAS GOOD REASON TO STOP AND THINK EVEN FOR THE WORLD'S MOST EMOTIONLESS MAN +8463-294828-0015-593: CONSEIL I CALLED A THIRD (TIME->TON) CONSEIL APPEARED +8463-294828-0016-594: (DID->DEAD) MASTER (SUMMON->SUMMONED) ME HE SAID ENTERING +8463-294828-0017-595: PACK AS MUCH INTO MY TRUNK AS YOU CAN MY TRAVELING KIT MY SUITS SHIRTS AND SOCKS DON'T BOTHER COUNTING JUST SQUEEZE IT ALL IN AND HURRY +8463-294828-0018-596: WE'LL DEAL WITH THEM LATER WHAT +8463-294828-0019-597: ANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WHOLE MENAGERIE TO FRANCE +8463-294828-0020-598: YES WE ARE CERTAINLY I REPLIED EVASIVELY BUT AFTER WE MAKE A DETOUR +8463-294828-0021-599: A ROUTE SLIGHTLY LESS DIRECT THAT'S ALL +8463-294828-0022-600: (WE'RE->WERE) LEAVING ON THE ABRAHAM LINCOLN +8463-294828-0023-601: YOU SEE MY FRIEND IT'S AN ISSUE OF THE MONSTER THE NOTORIOUS NARWHALE +8463-294828-0024-602: WE DON'T KNOW WHERE IT WILL TAKE US +8463-294828-0025-603: BUT (WE'RE->WERE) GOING JUST THE SAME +8463-294828-0026-604: WE HAVE A COMMANDER (WHO'S->WHOSE) GAME FOR ANYTHING +8463-294828-0027-605: I LEFT INSTRUCTIONS FOR SHIPPING MY CONTAINERS OF STUFFED ANIMALS AND DRIED PLANTS TO PARIS FRANCE +8463-294828-0028-606: I OPENED A LINE OF CREDIT SUFFICIENT TO COVER THE (BABIRUSA->BABRUSA) AND CONSEIL AT MY HEELS I JUMPED INTO A CARRIAGE +8463-294828-0029-607: OUR BAGGAGE WAS IMMEDIATELY CARRIED TO THE DECK OF THE FRIGATE I RUSHED ABOARD +8463-294828-0030-608: I ASKED FOR COMMANDER (FARRAGUT->FERRAGUT) +8463-294828-0031-609: ONE OF THE SAILORS LED ME TO THE (AFTERDECK->AFTER DECK) WHERE I STOOD IN THE PRESENCE OF A SMART LOOKING OFFICER WHO EXTENDED HIS HAND TO ME +8463-294828-0032-610: IN PERSON WELCOME ABOARD PROFESSOR YOUR CABIN IS WAITING FOR YOU +8463-294828-0033-611: I WAS WELL SATISFIED WITH MY CABIN WHICH WAS LOCATED IN THE STERN AND OPENED INTO THE (OFFICERS MESS->OFFICER'S MASTS) +8463-294828-0034-612: (WE'LL->WILL) BE QUITE COMFORTABLE HERE I TOLD CONSEIL +8463-294828-0035-613: AND SO IF (I'D->I HAD) BEEN DELAYED BY A QUARTER OF AN HOUR OR EVEN LESS THE FRIGATE WOULD HAVE GONE WITHOUT ME AND I WOULD HAVE MISSED OUT ON THIS UNEARTHLY EXTRAORDINARY AND INCONCEIVABLE EXPEDITION WHOSE TRUE STORY MIGHT WELL MEET WITH SOME SKEPTICISM +8463-294828-0036-614: THE WHARVES OF BROOKLYN AND EVERY PART OF NEW YORK BORDERING THE EAST RIVER WERE CROWDED WITH CURIOSITY SEEKERS +8463-294828-0037-615: DEPARTING FROM FIVE HUNDRED THOUSAND THROATS THREE CHEERS BURST FORTH IN SUCCESSION +8463-294828-0038-616: THOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE THESE TIGHTLY PACKED MASSES HAILING THE ABRAHAM LINCOLN UNTIL IT REACHED THE WATERS OF THE HUDSON RIVER AT THE TIP OF THE LONG PENINSULA THAT FORMS NEW YORK CITY +8555-284447-0000-2299: THEN HE RUSHED (DOWN STAIRS->DOWNSTAIRS) INTO THE COURTYARD SHOUTING LOUDLY FOR HIS SOLDIERS AND THREATENING TO PATCH EVERYBODY IN HIS DOMINIONS (IF->AT) THE SAILORMAN WAS NOT RECAPTURED +8555-284447-0001-2300: HOLD HIM FAST MY MEN AND AS SOON AS I'VE HAD MY COFFEE (AND->AN) OATMEAL (I'LL->I WILL) TAKE HIM TO THE ROOM OF THE GREAT KNIFE AND (PATCH->PAT) HIM +8555-284447-0002-2301: I WOULDN'T MIND A CUP (O->OF) COFFEE MYSELF SAID CAP'N BILL (I'VE->I HAVE) HAD (CONSID'BLE->CONSIDERABLE) EXERCISE THIS MORNIN (AND->AN) I'M (ALL READY->ALREADY) FOR (BREAKFAS->BREAKFAST) +8555-284447-0003-2302: BUT CAP'N BILL MADE NO SUCH ATTEMPT KNOWING IT WOULD BE USELESS +8555-284447-0004-2303: AS SOON AS THEY ENTERED THE ROOM OF THE GREAT KNIFE THE BOOLOOROO GAVE A YELL OF DISAPPOINTMENT +8555-284447-0005-2304: THE ROOM OF THE GREAT KNIFE WAS HIGH AND BIG AND AROUND IT RAN ROWS OF BENCHES FOR THE SPECTATORS TO SIT UPON +8555-284447-0006-2305: IN ONE PLACE AT THE HEAD OF THE ROOM WAS A RAISED PLATFORM FOR THE ROYAL FAMILY WITH ELEGANT (THRONE->THROWN) CHAIRS FOR THE KING AND QUEEN AND SIX SMALLER BUT RICHLY UPHOLSTERED CHAIRS (FOR THE SNUBNOSED->WITH A SNUB NOSED) PRINCESSES +8555-284447-0007-2306: (THEREFORE->THEY ARE FOR) HER MAJESTY PAID NO ATTENTION TO (ANYONE->ANY ONE) AND NO ONE PAID ANY ATTENTION TO HER +8555-284447-0008-2307: RICH JEWELS OF (BLUE STONES->BLUESTS) GLITTERED UPON THEIR PERSONS AND THE ROYAL LADIES WERE FULLY AS GORGEOUS AS THEY WERE HAUGHTY AND OVERBEARING +8555-284447-0009-2308: (MORNIN->MORNING) GIRLS (HOPE YE FEEL->OH BE BILL) AS WELL AS (YE->YOU) LOOK +8555-284447-0010-2309: (CONTROL->CONTROLL) YOURSELVES MY DEARS REPLIED THE BOOLOOROO THE WORST PUNISHMENT I KNOW HOW TO INFLICT ON (ANYONE->ANY ONE) THIS PRISONER IS ABOUT TO SUFFER (YOU'LL->YOU WILL) SEE A VERY PRETTY PATCHING MY ROYAL DAUGHTERS +8555-284447-0011-2310: SUPPOSE IT'S (A FRIEND->OF BRAND) +8555-284447-0012-2311: THE CAPTAIN SHOOK HIS HEAD +8555-284447-0013-2312: WHY YOU (SAID->SIT) TO FETCH THE FIRST LIVING CREATURE WE MET AND THAT WAS (THIS BILLYGOAT->THE SPILLIGOAT) REPLIED THE CAPTAIN PANTING HARD AS HE HELD FAST TO ONE OF THE GOAT'S HORNS +8555-284447-0014-2313: THE IDEA OF PATCHING CAP'N BILL TO A GOAT WAS VASTLY AMUSING TO HIM AND THE MORE HE THOUGHT OF IT THE MORE HE ROARED WITH LAUGHTER +8555-284447-0015-2314: THEY LOOK SOMETHING ALIKE YOU KNOW SUGGESTED THE CAPTAIN OF THE GUARDS LOOKING FROM ONE TO THE OTHER DOUBTFULLY AND (THEY'RE->THEY) NEARLY THE SAME SIZE IF YOU STAND THE (GOAT->BOAT) ON HIS HIND LEGS THEY'VE BOTH GOT THE SAME STYLE OF WHISKERS AND THEY'RE BOTH OF (EM->THEM) OBSTINATE AND DANGEROUS SO THEY OUGHT TO MAKE A GOOD PATCH SPLENDID +8555-284447-0016-2315: FINE GLORIOUS +8555-284447-0017-2316: WHEN THIS HAD BEEN ACCOMPLISHED THE BOOLOOROO LEANED OVER TO TRY TO DISCOVER WHY THE FRAME ROLLED AWAY SEEMINGLY OF ITS OWN ACCORD AND HE WAS THE MORE PUZZLED BECAUSE IT HAD NEVER DONE SUCH A THING BEFORE +8555-284447-0018-2317: AT ONCE THE GOAT GAVE A LEAP ESCAPED FROM THE SOLDIERS AND WITH BOWED HEAD RUSHED UPON THE BOOLOOROO +8555-284447-0019-2318: BEFORE ANY COULD STOP HIM HE (BUTTED->BUDDED) HIS MAJESTY SO FURIOUSLY THAT THE (KING->KING'S) SOARED FAR INTO THE AIR AND TUMBLED IN A HEAP AMONG THE BENCHES WHERE HE LAY MOANING AND GROANING +8555-284447-0020-2319: THE (GOAT'S WARLIKE->GOATS WORE LIKE) SPIRIT WAS ROUSED BY THIS SUCCESSFUL ATTACK +8555-284447-0021-2320: THEN THEY SPED IN GREAT HASTE FOR THE DOOR AND THE GOAT GAVE A FINAL BUTT THAT SENT THE ROW OF ROYAL LADIES ALL DIVING INTO THE CORRIDOR IN ANOTHER TANGLE WHEREUPON THEY SHRIEKED IN A MANNER THAT TERRIFIED (EVERYONE->EVERY ONE) WITHIN SOUND OF THEIR VOICES +8555-284447-0022-2321: I HAD A NOTION IT WAS YOU (MATE AS SAVED->MADE TO SEE) ME FROM THE KNIFE +8555-284447-0023-2322: I COULDN'T SHIVER MUCH (BEIN->BEING) BOUND SO TIGHT BUT WHEN I'M LOOSE I MEAN TO HAVE (JUS ONE->JUST SWUNG) GOOD SHIVER TO RELIEVE MY (FEELIN'S->FEELINS) +8555-284447-0024-2323: COME AND GET THE BOOLOOROO SHE SAID GOING TOWARD THE BENCHES +8555-284449-0000-2324: SO THEY WERE QUITE WILLING TO OBEY THE ORDERS OF THEIR GIRL QUEEN AND IN A SHORT TIME THE (BLASTS->BLAST) OF TRUMPETS AND ROLL OF DRUMS AND CLASHING OF CYMBALS TOLD TROT AND CAP'N BILL THAT THE BLUE BANDS HAD (ASSEMBLED->A SIMPLED) BEFORE THE PALACE +8555-284449-0001-2325: THEN THEY ALL MARCHED OUT A LITTLE WAY INTO THE FIELDS AND FOUND THAT THE ARMY OF PINKIES HAD ALREADY FORMED AND WAS ADVANCING STEADILY TOWARD THEM +8555-284449-0002-2326: AT THE HEAD OF THE PINKIES WERE GHIP GHISIZZLE AND BUTTON BRIGHT WHO HAD THE PARROT ON HIS SHOULDER AND THEY WERE SUPPORTED BY CAPTAIN (CORALIE->CORLIE) AND CAPTAIN (TINTINT->TINTANT) AND ROSALIE THE WITCH +8555-284449-0003-2327: WHEN THE (BLUESKINS->BLUESKIN) SAW GHIP GHISIZZLE THEY RAISED ANOTHER GREAT SHOUT FOR HE WAS THE FAVORITE OF THE SOLDIERS AND VERY POPULAR WITH ALL THE PEOPLE +8555-284449-0004-2328: SINCE LAST THURSDAY I (GHIP->GIP) GHISIZZLE HAVE BEEN THE LAWFUL BOOLOOROO OF THE BLUE COUNTRY BUT NOW THAT YOU ARE CONQUERED BY QUEEN TROT I SUPPOSE I AM CONQUERED TOO AND YOU HAVE NO BOOLOOROO AT ALL +8555-284449-0005-2329: WHEN HE FINISHED SHE SAID CHEERFULLY +8555-284449-0006-2330: DON'T WORRY SIZZLE DEAR IT'LL ALL COME RIGHT PRETTY SOON +8555-284449-0007-2331: NOW THEN LET'S ENTER THE CITY (AN->AND) ENJOY THE (GRAND->GREAT) FEAST (THAT'S->ITS) BEING COOKED I'M NEARLY (STARVED->STORM) MYSELF FOR THIS (CONQUERIN KINGDOMS->CONQUERING KINGDOM'S) IS HARD WORK +8555-284449-0008-2332: THEN SHE GAVE ROSALIE BACK HER MAGIC RING THANKING THE KIND WITCH FOR ALL SHE HAD DONE FOR THEM +8555-284449-0009-2333: YOU ARE (*->A) MATE REPLIED THE SAILOR +8555-284449-0010-2334: IT WILL BE SUCH A SATISFACTION +8555-284449-0011-2335: THE GUARDS HAD A TERRIBLE STRUGGLE WITH THE GOAT WHICH WAS LOOSE IN THE ROOM AND STILL WANTED TO FIGHT BUT FINALLY THEY SUBDUED THE ANIMAL AND THEN THEY TOOK THE BOOLOOROO OUT OF THE FRAME HE WAS TIED IN AND BROUGHT BOTH HIM AND THE GOAT BEFORE QUEEN TROT WHO AWAITED THEM IN THE THRONE ROOM OF THE PALACE +8555-284449-0012-2336: I'LL GLADLY DO THAT PROMISED THE NEW BOOLOOROO AND I'LL FEED THE (HONORABLE GOAT->HONED) ALL THE SHAVINGS AND LEATHER AND TIN CANS HE CAN EAT BESIDES THE GRASS +8555-284449-0013-2337: (SCUSE->EXCUSE) ME SAID (TROT->SHOT) I NEGLECTED TO TELL YOU THAT YOU'RE NOT THE BOOLOOROO ANY MORE +8555-284449-0014-2338: THE FORMER BOOLOOROO GROANED +8555-284449-0015-2339: (I'LL NOT->HOW NOW) BE WICKED ANY MORE SIGHED THE OLD BOOLOOROO I'LL REFORM +8555-284449-0016-2340: AS A PRIVATE CITIZEN I SHALL BE A MODEL OF DEPORTMENT BECAUSE IT WOULD BE DANGEROUS TO BE OTHERWISE +8555-284449-0017-2341: WHEN FIRST THEY ENTERED THE THRONE ROOM THEY TRIED TO BE AS HAUGHTY AND SCORNFUL AS EVER BUT THE BLUES WHO WERE ASSEMBLED THERE ALL LAUGHED AT THEM AND JEERED THEM FOR THERE WAS NOT A SINGLE PERSON IN ALL THE BLUE COUNTRY WHO LOVED THE PRINCESSES THE LEAST LITTLE BIT +8555-284449-0018-2342: SO GHIP GHISIZZLE ORDERED THE CAPTAIN TO TAKE A FILE OF SOLDIERS AND ESCORT THE RAVING BEAUTIES TO THEIR NEW HOME +8555-284449-0019-2343: THAT EVENING TROT GAVE A GRAND BALL IN THE PALACE TO WHICH THE MOST IMPORTANT OF THE PINKIES (AND->IN) THE BLUESKINS WERE INVITED +8555-284449-0020-2344: THE COMBINED BANDS OF BOTH THE COUNTRIES PLAYED THE MUSIC AND A FINE SUPPER WAS SERVED +8555-292519-0000-2283: BRIGHTER THAN EARLY (DAWN'S->DAWNS) MOST BRILLIANT DYE ARE BLOWN CLEAR BANDS OF COLOR THROUGH THE SKY THAT SWIRL AND SWEEP AND MEET TO BREAK AND FOAM LIKE RAINBOW VEILS UPON A BUBBLE'S DOME +8555-292519-0001-2284: GUIDED BY YOU HOW WE MIGHT STROLL TOWARDS DEATH OUR ONLY MUSIC ONE ANOTHER'S BREATH THROUGH (GARDENS->GARDEN'S) INTIMATE WITH HOLLYHOCKS WHERE (*->IS) SILENT POPPIES BURN BETWEEN THE ROCKS BY POOLS WHERE BIRCHES BEND TO CONFIDANTS ABOVE GREEN WATERS SCUMMED WITH (*->THE) LILY PLANTS +8555-292519-0002-2285: VENICE +8555-292519-0003-2286: IN A SUNSET GLOWING OF CRIMSON AND GOLD SHE LIES THE GLORY OF THE WORLD A (BEACHED->BEECHED) KING'S GALLEY (WHOSE->WHO) SAILS ARE FURLED WHO IS HUNG WITH TAPESTRIES RICH AND OLD +8555-292519-0004-2287: THE PITY THAT WE MUST COME AND GO +8555-292519-0005-2288: WHILE THE OLD GOLD AND THE MARBLE STAYS FOREVER GLEAMING ITS SOFT STRONG BLAZE CALM IN THE EARLY EVENING GLOW +8555-292519-0006-2289: THE PLEASANT GRAVEYARD OF MY SOUL WITH SENTIMENTAL CYPRESS TREES AND FLOWERS IS FILLED THAT I MAY STROLL IN MEDITATION AT MY EASE +8555-292519-0007-2290: IT IS MY HEART HUNG IN THE SKY AND NO CLOUDS EVER FLOAT BETWEEN THE GRAVE FLOWERS AND MY HEART ON HIGH +8555-292519-0008-2291: OVER THE TRACK LINED CITY STREET THE YOUNG (MEN->MAN) THE GRINNING (MEN->MAN) PASS +8555-292519-0009-2292: HO YE SAILS THAT SEEM TO WANDER (IN->AND) DREAM FILLED MEADOWS SAY IS THE SHORE WHERE I STAND THE ONLY FIELD OF STRUGGLE OR ARE YE HIT AND BATTERED OUT THERE BY WAVES AND WIND GUSTS AS YE TACK OVER A CLASHING SEA OF WATERY ECHOES +8555-292519-0010-2293: OLD DANCES ARE SIMPLIFIED OF THEIR YEARNING BLEACHED BY TIME +8555-292519-0011-2294: HE HAD GOT INTO HER COURTYARD +8555-292519-0012-2295: THROUGH THE BLACK NIGHT RAIN HE SANG TO HER WINDOW BARS +8555-292519-0013-2296: THAT WAS BUT RUSTLING OF (DRIPPING->TRIPPING) PLANTS IN THE DARK +8555-292519-0014-2297: SHE WAS ALONE THAT NIGHT +8555-292519-0015-2298: HE HAD BROKEN INTO HER COURTYARD +908-157963-0000-1321: TO FADE AWAY LIKE MORNING BEAUTY FROM HER MORTAL DAY DOWN BY THE RIVER OF (ADONA->ADONNA) HER SOFT VOICE IS HEARD AND THUS HER GENTLE LAMENTATION FALLS LIKE MORNING DEW +908-157963-0001-1322: (O->OH) LIFE OF THIS OUR SPRING +908-157963-0002-1323: WHY FADES THE LOTUS OF THE WATER +908-157963-0003-1324: WHY FADE THESE CHILDREN OF THE SPRING +908-157963-0004-1325: (THEL->FELL) IS LIKE A WATRY BOW AND LIKE A PARTING CLOUD LIKE A REFLECTION IN A GLASS LIKE SHADOWS IN THE WATER LIKE DREAMS OF INFANTS LIKE A SMILE UPON AN (INFANTS->INFANT'S) FACE +908-157963-0005-1326: LIKE THE (DOVES VOICE->DOVE'S BOYS) LIKE TRANSIENT DAY LIKE MUSIC IN THE AIR AH +908-157963-0006-1327: AND GENTLE SLEEP THE SLEEP OF DEATH AND GENTLY HEAR THE VOICE OF HIM THAT WALKETH IN THE GARDEN IN THE EVENING TIME +908-157963-0007-1328: THE (LILLY->LILY) OF THE VALLEY BREATHING IN THE HUMBLE GRASS (ANSWERD->ANSWERED) THE LOVELY (MAID AND->MAIDEN) SAID I AM A (WATRY->WATCHERY) WEED AND I AM VERY SMALL AND LOVE TO DWELL IN LOWLY VALES SO WEAK THE GILDED BUTTERFLY SCARCE (PERCHES->PURCHASE) ON MY HEAD YET I AM VISITED FROM HEAVEN AND HE THAT SMILES ON ALL WALKS IN THE VALLEY AND EACH MORN OVER ME SPREADS HIS HAND SAYING REJOICE THOU HUMBLE GRASS THOU (NEW BORN->NEWBORN) LILY FLOWER +908-157963-0008-1329: THOU GENTLE MAID OF SILENT VALLEYS AND OF MODEST BROOKS FOR THOU (SHALL->SHALT) BE CLOTHED IN LIGHT AND FED WITH MORNING MANNA TILL (SUMMERS->SUMMER'S) HEAT MELTS THEE BESIDE THE FOUNTAINS AND THE SPRINGS TO FLOURISH IN ETERNAL VALES THEY WHY (SHOULD THEL->SHOULDST THOU) COMPLAIN +908-157963-0009-1330: WHY SHOULD THE MISTRESS OF THE (VALES->VEILS) OF HAR UTTER A SIGH +908-157963-0010-1331: SHE (CEASD->CEASED) AND (SMILD->SMILED) IN TEARS THEN SAT DOWN IN HER SILVER SHRINE +908-157963-0011-1332: WHICH THOU DOST SCATTER ON EVERY LITTLE BLADE OF GRASS THAT SPRINGS REVIVES THE MILKED COW AND TAMES THE FIRE BREATHING STEED +908-157963-0012-1333: BUT (THEL->THOUGH) IS LIKE A FAINT CLOUD KINDLED AT THE RISING SUN I VANISH FROM MY PEARLY THRONE AND WHO SHALL FIND MY PLACE +908-157963-0013-1334: AND WHY IT SCATTERS ITS BRIGHT BEAUTY (THRO->THROUGH) THE (HUMID->HUMAN) AIR +908-157963-0014-1335: DESCEND (O->A) LITTLE CLOUD AND HOVER BEFORE THE EYES OF (THEL->FELL) +908-157963-0015-1336: O LITTLE CLOUD THE VIRGIN SAID I CHARGE THEE TO TELL ME WHY THOU COMPLAINEST NOW WHEN IN ONE HOUR THOU FADE AWAY THEN WE SHALL SEEK THEE BUT NOT FIND AH (THEL->FELL) IS LIKE TO THEE +908-157963-0016-1337: I PASS AWAY YET I COMPLAIN AND NO ONE HEARS MY VOICE +908-157963-0017-1338: THE CLOUD THEN (SHEWD->SHOWED) HIS GOLDEN HEAD AND HIS BRIGHT FORM (EMERG'D->EMERGED) +908-157963-0018-1339: AND FEAREST THOU BECAUSE I VANISH AND AM SEEN NO MORE +908-157963-0019-1340: IT IS TO TENFOLD LIFE TO LOVE TO PEACE AND RAPTURES (HOLY->WHOLLY) UNSEEN DESCENDING WEIGH MY LIGHT WINGS UPON BALMY FLOWERS AND COURT THE FAIR EYED (DEW->DO) TO TAKE ME TO HER SHINING TENT THE WEEPING VIRGIN TREMBLING KNEELS BEFORE THE RISEN SUN +908-157963-0020-1341: TILL WE ARISE (LINK'D->LINKED) IN A GOLDEN BAND AND NEVER PART BUT WALK UNITED BEARING FOOD TO ALL OUR TENDER FLOWERS +908-157963-0021-1342: LIVES NOT ALONE NOR (OR->OF) ITSELF FEAR NOT AND I WILL CALL THE WEAK WORM FROM ITS LOWLY BED AND THOU SHALT HEAR ITS VOICE +908-157963-0022-1343: COME FORTH WORM AND THE SILENT VALLEY TO THY PENSIVE QUEEN +908-157963-0023-1344: THE HELPLESS WORM AROSE AND SAT UPON THE (LILLYS->LILY'S) LEAF AND THE BRIGHT (CLOUD SAILD->CLOUDS SAILED) ON TO FIND HIS PARTNER IN THE VALE +908-157963-0024-1345: IMAGE OF WEAKNESS ART THOU BUT A WORM +908-157963-0025-1346: I SEE THEY LAY HELPLESS AND NAKED WEEPING AND NONE TO ANSWER NONE TO CHERISH THEE WITH MOTHERS SMILES +908-157963-0026-1347: AND SAYS THOU MOTHER OF MY CHILDREN I HAVE LOVED THEE AND I HAVE GIVEN THEE A CROWN THAT NONE CAN TAKE AWAY +908-157963-0027-1348: AND LAY ME DOWN IN THY COLD BED AND LEAVE MY SHINING LOT +908-157963-0028-1349: OR AN EYE OF GIFTS AND GRACES (SHOWRING->SHOWERING) FRUITS AND COINED GOLD +908-157963-0029-1350: WHY A TONGUE (IMPRESS'D->IMPRESSED) WITH HONEY FROM EVERY WIND +908-157963-0030-1351: WHY AN EAR A WHIRLPOOL FIERCE TO DRAW CREATIONS IN +908-31957-0000-1352: ALL IS SAID WITHOUT A WORD +908-31957-0001-1353: I SIT BENEATH THY LOOKS AS CHILDREN DO IN THE NOON SUN WITH SOULS THAT TREMBLE THROUGH THEIR HAPPY EYELIDS FROM AN UNAVERRED YET (PRODIGAL->CHRONICAL) INWARD JOY +908-31957-0002-1354: I DID NOT WRONG MYSELF SO BUT I PLACED A WRONG ON THEE +908-31957-0003-1355: WHEN CALLED BEFORE I TOLD HOW HASTILY I DROPPED MY FLOWERS OR (BRAKE->BREAK) OFF FROM A GAME +908-31957-0004-1356: SHALL I NEVER MISS HOME TALK AND BLESSING AND THE COMMON KISS THAT COMES TO EACH IN TURN NOR COUNT IT STRANGE WHEN I LOOK UP TO DROP ON A NEW RANGE OF WALLS AND FLOORS ANOTHER HOME THAN THIS +908-31957-0005-1357: ALAS I HAVE GRIEVED SO I AM HARD TO LOVE +908-31957-0006-1358: OPEN THY HEART WIDE AND FOLD WITHIN THE WET WINGS OF THY DOVE +908-31957-0007-1359: COULD IT MEAN TO LAST A LOVE SET PENDULOUS BETWEEN SORROW AND SORROW +908-31957-0008-1360: NAY I RATHER THRILLED DISTRUSTING EVERY LIGHT THAT SEEMED TO GILD THE ONWARD PATH AND (FEARED->FEAR) TO OVERLEAN A FINGER EVEN +908-31957-0009-1361: AND THOUGH I HAVE GROWN SERENE AND STRONG SINCE THEN I THINK THAT GOD HAS WILLED A STILL RENEWABLE FEAR +908-31957-0010-1362: O LOVE O TROTH +908-31957-0011-1363: AND LOVE BE FALSE +908-31957-0012-1364: IF HE TO KEEP ONE OATH MUST LOSE ONE JOY BY HIS LIFE'S STAR FORETOLD +908-31957-0013-1365: SLOW TO WORLD GREETINGS QUICK WITH ITS O LIST WHEN THE (ANGELS->ANGEL) SPEAK +908-31957-0014-1366: A RING OF AMETHYST I COULD NOT WEAR HERE PLAINER TO MY SIGHT THAN THAT FIRST KISS +908-31957-0015-1367: THAT WAS THE CHRISM OF LOVE WHICH (LOVE'S->LOVES) OWN CROWN WITH SANCTIFYING SWEETNESS DID (PRECEDE->PROCEED) THE THIRD UPON MY LIPS WAS FOLDED DOWN (IN PERFECT->IMPERFECT) PURPLE (STATE->STAINED) SINCE WHEN INDEED I HAVE BEEN PROUD AND SAID MY LOVE MY OWN +908-31957-0016-1368: DEAREST TEACH ME SO TO POUR OUT GRATITUDE AS THOU DOST GOOD +908-31957-0017-1369: (MUSSULMANS->MUSSELMENS) AND (GIAOURS->GUY ORS) THROW KERCHIEFS AT A SMILE AND HAVE NO RUTH FOR ANY WEEPING +908-31957-0018-1370: BUT THOU ART NOT SUCH A LOVER MY BELOVED +908-31957-0019-1371: THOU CANST WAIT THROUGH SORROW AND SICKNESS TO BRING SOULS TO TOUCH AND THINK IT SOON WHEN OTHERS CRY TOO LATE +908-31957-0020-1372: I (THANK->THINK) ALL WHO HAVE LOVED ME IN THEIR HEARTS WITH THANKS AND LOVE FROM MINE +908-31957-0021-1373: OH TO SHOOT MY SOUL'S FULL MEANING INTO FUTURE YEARS THAT THEY SHOULD LEND IT UTTERANCE AND SALUTE LOVE THAT ENDURES FROM LIFE THAT DISAPPEARS +908-31957-0022-1374: THEN I LONG TRIED BY NATURAL ILLS RECEIVED THE COMFORT FAST WHILE BUDDING AT THY SIGHT MY PILGRIM'S STAFF GAVE OUT GREEN LEAVES WITH MORNING DEWS (IMPEARLED->IMPELLED) +908-31957-0023-1375: I LOVE THEE FREELY AS MEN STRIVE FOR RIGHT I LOVE THEE PURELY AS THEY TURN FROM PRAISE +908-31957-0024-1376: I LOVE THEE WITH THE PASSION PUT TO USE IN MY OLD (GRIEFS->GREEDS) AND WITH MY CHILDHOOD'S FAITH +908-31957-0025-1377: I LOVE THEE WITH A LOVE I SEEMED TO LOSE WITH MY LOST SAINTS I LOVE THEE WITH THE BREATH SMILES TEARS OF ALL MY LIFE AND IF GOD CHOOSE I SHALL BUT LOVE THEE BETTER AFTER DEATH + +SUBSTITUTIONS: count ref -> hyp +29 AND -> IN +16 IN -> AND +15 A -> THE +11 THE -> A +8 AN -> AND +6 THIS -> THE +6 I'VE -> I +6 ANYONE -> ANY +5 SOAMES -> SOLMES +5 SILVIA -> SYLVIA +5 O -> OF +4 WHERE -> WERE +4 TWO -> TOO +4 THEIR -> THERE +4 THAT -> THE +4 O -> OH +4 MEN -> MAN +4 MAN -> MEN +4 IS -> AS +4 IN -> AN +4 I'M -> I +4 GALATIANS -> GALLATIONS +4 BATTLEAX -> BATTLE +4 A -> OF +3 WHEN -> ONE +3 VALLIERE -> VALLIERS +3 TODAY -> TO +3 THEL -> FELL +3 THEATER -> THEATRE +3 THE -> THEIR +3 SOLON -> SOLEMN +3 ROUND -> AROUND +3 RODOLFO -> RUDOLPHO +3 PRACTISE -> PRACTICE +3 METER -> METRE +3 MAINHALL -> MAIN +3 LEAVENWORTH -> LEVINWORTH +3 KAFFAR -> KAFFIR +3 IS -> WAS +3 HOLLAN -> HOLLAND +3 HER -> A +3 HE'S -> HE +3 GREY -> GRAY +3 EVERYONE -> EVERY +3 BRITANNULA -> BRITAIN +3 BANNISTER -> BANISTER +3 ANDERS -> ANDREWS +3 AND -> AN +2 YOU'RE -> YOU +2 WYLDER -> WILDER +2 WHITTAWS -> WIDOWS +2 WE'RE -> WERE +2 WE'LL -> WILL +2 VAPOURS -> VAPORS +2 VANDERPOOL -> VAN +2 TWO -> TO +2 TOWARDS -> TOWARD +2 TONIGHT -> TO +2 TO -> OF +2 TIMAEUS -> TO +2 THEY -> THERE +2 THEY -> THE +2 THEN -> THAN +2 THEM -> HIM +2 THEIR -> THE +2 THEE -> THE +2 THE -> THEY +2 THAT -> IT +2 SOMEONE -> SOME +2 SIF -> SIFT +2 SHE'S -> SHE +2 SEEM -> SEEMED +2 RODOLFO -> RODOLPHO +2 READ -> RED +2 PLATONISTS -> PLATANISTS +2 ONE -> WON +2 OF -> A +2 NOW -> THOU +2 NO -> KNOW +2 NEO -> NEW +2 MUNNY -> MONEY +2 MARSHALL -> MARSHAL +2 MADAM -> MADAME +2 KAFFAR'S -> KAFFIR'S +2 JAGO -> YAGO +2 JAGO -> GIAGO +2 IT'S -> ITS +2 IT -> ITS +2 IS -> HAS +2 INTO -> AND +2 IN -> A +2 ICHTHYOSAURUS -> ITHUSORIS +2 I'D -> I +2 I -> I'M +2 HOLY -> WHOLLY +2 HOLBEIN -> HOLBINE +2 HE -> WE +2 HAS -> HAD +2 HALLO -> HELLO +2 GILCHRIST -> GILGRIST +2 FOUNDED -> FOUND +2 FEELING -> FILLING +2 FAIRVIEW -> FAIR +2 EMIL -> AMYL +2 DISSENT -> DESCENT +2 DEDALUS -> DAEDALUS +2 DE -> THE +2 CRITIAS -> CRITIUS +2 CRESSWELL -> CRASWELL +2 CREIGHTON -> CRIGHTON +2 COLOUR -> COLOR +2 CHINGACHGOOK -> CHINGACHOOK +2 CHAISE -> CHASE +2 CARL -> KARL +2 BUT -> THAT +2 BRAKE -> BREAK +2 BEHAVIOUR -> BEHAVIOR +2 AYE -> I +2 AY -> I +2 AT -> THAT +2 AS -> IS +2 ARE -> OUR +2 ANOTHER -> THE +2 ANDERS -> ANDREW'S +2 AND -> AS +2 ALL -> ALTOGETHER +1 ZORA'S -> ZORAS +1 ZORA -> SORA +1 ZOOF'S -> ZEF'S +1 YOU'LL -> YOU +1 YEARNING -> YEARNIN +1 YE -> YOU +1 YE -> BE +1 XAVIER -> ZEVIER +1 XAVIER -> ZAVIOUR +1 XAVIER -> ZAVIER +1 WOULD -> WILL +1 WOULD -> WERE +1 WORST -> WORSE +1 WORSE -> HORSE +1 WOODBEGIRT -> WOOD +1 WOOD -> WOODCUTTERS +1 WONDERING -> WANDERING +1 WOMAN'S -> WOMEN'S +1 WITHES -> WIDTHS +1 WITH -> WHICH +1 WITH -> WHEN +1 WINTER -> WINNER +1 WILL -> WOULD +1 WILL -> WE'LL +1 WIFE -> WHITE +1 WHOSE -> WHO +1 WHO'S -> WHOSE +1 WHO -> WHOSE +1 WHITTAWD -> WIDOWED +1 WHITTAW -> WIDOW +1 WHISK -> WHISKED +1 WHIRLPOOL -> WAR +1 WHIPPED -> WHIP +1 WHERE -> WHERE'S +1 WHEN -> WITH +1 WHEN -> AND +1 WHATEVER -> WHATSOEVER +1 WHAT'S -> WHAT +1 WHAT -> WHEN +1 WHALE -> WELL +1 WHALE -> WAIL +1 WESTPORT -> WESTWARD +1 WESTPORT -> PORT +1 WESTMERE -> WESTMARE +1 WERE -> WHERE +1 WERE -> ARE +1 WELL -> WHILE +1 WELL -> FOR +1 WELCOMED -> WELCOME +1 WEDNESDAY -> WIND +1 WEATHER -> WHETHER +1 WEAR -> WHERE +1 WEAKLY -> WEEKLY +1 WE'RE -> WE +1 WE -> WE'VE +1 WE -> SEA +1 WAVES -> WAY +1 WATRY -> WATCHERY +1 WATERMILL -> WATER +1 WASTE -> WASTES +1 WAS -> WITH +1 WAS -> VIEW'S +1 WAS -> IS +1 WARLIKE -> WORE +1 VOICE -> BOYS +1 VISITORS -> VISITOR +1 VILLEROY -> VILLEROI +1 VILLA -> VILIDESSEA +1 VIGNETTE -> VINEYARD +1 VICARIOUS -> VIPEROUS +1 VIADUCT -> VIEDUC +1 VERY -> VERIMENT +1 VERSE -> FIRST +1 VERNE'S -> VERN'S +1 VAUDOIS -> FAUDOIS +1 VANES -> VEINS +1 VALOR -> VALOUR +1 VALES -> VEILS +1 UTAH -> NEW +1 UPON -> ON +1 UP -> UPSTAIRS +1 UP -> OF +1 UNWARILY -> THEN +1 UNTO -> INTO +1 UNLIKE -> I +1 UNDERGROUND -> ON +1 UNCLENCHED -> CLENCHED +1 UNC -> YOUNG +1 UN -> AND +1 UD -> TODDY +1 TWO -> TUTRILOGIES +1 TURNOVER -> TURN +1 TUPPENY -> TUPPENNY +1 TROT -> SHOT +1 TREDDLESTON -> TREDDLESTONE +1 TREBLE -> TROUBLE +1 TRAVESTY -> TRAVASTY +1 TRAVELING -> TRAVELLING +1 TRAVELERS -> TRAVELLERS +1 TRAVELER -> TRAVELLERS +1 TOWNE -> TOWN +1 TOWELLING -> TOWELINGS +1 TOULD -> DID +1 TOTTY -> NO +1 TOPS -> TOPSY +1 TOOMS -> TOMBS +1 TOO -> TWO +1 TONNAY -> TONIET +1 TONNAY -> TONE +1 TONNAY -> TO +1 TONNAY -> TINACHANT +1 TOLD -> COLD +1 TOILETTE -> TOILET +1 TO -> WEST +1 TO -> TWO +1 TO -> THROUGH +1 TO -> INTO +1 TO -> DOES +1 TO -> DEFINED +1 TIRESOME -> TO +1 TINTORET -> TINTARETTE +1 TINTINT -> TINTANT +1 TIMES -> TUBS +1 TIME -> YOU +1 TIME -> TON +1 TIMAEUS -> TIMEUS +1 TIMAEUS -> TIMAIRS +1 TIMAEUS -> TENEAS +1 TIMAEUS -> TEARS +1 TIBI -> TIBBY +1 THUS -> LUSTY +1 THROUGH -> TO +1 THRONE -> THROWN +1 THRO -> THROUGH +1 THOUSAND -> THOUSANDTH +1 THOUGHT -> BOUGHT +1 THOUGH -> THE +1 THORLEIF -> TORE +1 THORKEL -> TORQUAL +1 THORKEL -> TORKLE +1 THORKEL -> TORCOAL +1 THIS -> ITS +1 THEY'RE -> THEY +1 THEY -> MAY +1 THEREFORE -> THEY +1 THERE -> THERE'S +1 THERE -> THEIR +1 THEN -> IN +1 THEN -> AND +1 THEM -> THE +1 THEL -> THOUGH +1 THEL -> THOU +1 THEE'S -> THESE +1 THEE -> HE +1 THE -> WHO +1 THE -> TO +1 THE -> THIS +1 THE -> THAT +1 THE -> IN +1 THAT'S -> ITS +1 THAT -> THAN +1 THANKING -> THINKING +1 THANK -> THINK +1 THAN -> THEN +1 THAN -> THAT +1 THAN -> IN +1 TECHNIQUE -> TYPENIQUE +1 TEA -> T +1 TARANTULA -> TERENTIAL +1 TALKERS -> TALK +1 TAKEN -> TAKING +1 TABU -> TABOU +1 TABU -> TABOO +1 TABU -> BOOT +1 TABLE -> TABLECLOTH +1 SYMPOSIUM -> SIMPOS +1 SWOONS -> SWOON +1 SWEEP -> SWEPT +1 SWAN -> SWAY +1 SUSPICIONS -> SUSPICION +1 SURVIVE -> SURVIVED +1 SURFACES -> SERVICES +1 SUMNER -> SUMMER +1 SUMMON -> SUMMONED +1 SUMMERS -> SUMMER'S +1 SUITCASE -> SUIT +1 STREAMLINE -> STREAM +1 STORY'S -> STORIES +1 STEEL'D -> STEELED +1 STEADY -> STUDY +1 STATE -> STATES +1 STATE -> STAINED +1 STARVED -> STORM +1 STARTS -> START +1 STANDS -> STAND +1 STAIR -> STARE +1 STAID -> STAY +1 STAGE -> STEED +1 SQUEAK -> SQUI +1 SPRING -> SPRANG +1 SPLENDOR -> SPLENDOUR +1 SPLENDET -> SPLENDID +1 SPIN -> SPEND +1 SPECIALISED -> SPECIALIZED +1 SOUTHEY'S -> SO +1 SOUTHEY -> SELVEY +1 SOUTHEY -> SALVI +1 SOU -> SOUS +1 SOOTHED -> SOOTHE +1 SON -> FUN +1 SOMETIME -> SOME +1 SOME -> SOMETIME +1 SOLON'S -> SILENCE +1 SOLILOQUY -> SOLOQUY +1 SODALITY -> SODELITY +1 SOCRATIC -> CRADIC +1 SO -> SEWED +1 SNUBNOSED -> SNUB +1 SMILD -> SMILED +1 SMELLS -> MILLS +1 SLEEVE -> STEVE +1 SKILLFUL -> SKILFUL +1 SKEPTICAL -> SCEPTICAL +1 SIZE -> SIZED +1 SIR -> ARE +1 SINCE -> SEN +1 SIN -> IN +1 SIGHT -> SIGHTSEERS +1 SIGHED -> SIDE +1 SHOWRING -> SHOWERING +1 SHOULD -> WOULD +1 SHOULD -> SHOULDST +1 SHODDY -> SHODY +1 SHIP -> SHIP'S +1 SHEWD -> SHOWED +1 SHERIFF -> SHERIFF'S +1 SHE -> YOU +1 SHE -> HE +1 SHARPS -> SHARP'S +1 SHARP'ST -> SHARPEST +1 SHANNON -> SHAN +1 SHAN'T -> SHA'N'T +1 SHALL -> SHALT +1 SHABATA -> SHEBATA +1 SETTLE -> SETTLED +1 SERVE -> SERVED +1 SERVANT -> SERVANTS +1 SENTENCES -> SENTENCE +1 SENT -> SET +1 SENSE -> SCENTS +1 SENCE -> SINCE +1 SEMON'S -> SIMMONS +1 SEEMED -> SEEMS +1 SEEDS -> SEATS +1 SECTS -> SEX +1 SEATING -> SITTING +1 SEAT -> SEED +1 SCUTCHEON -> STATUNE +1 SCUSE -> EXCUSE +1 SCRAPBOOKS -> SCRAP +1 SCOUTING -> SCOUT +1 SCHOOL -> SCHOOLS +1 SCHOOL -> SCHOOLBOYS +1 SCEVRA -> SCAFFRA +1 SCEURA -> SKURA +1 SCATHE -> SCATH +1 SCAROONS -> SCARONS +1 SAVED -> SEE +1 SAUVEUR -> SEVERE +1 SATE -> SAT +1 SANG -> SAYING +1 SAMPLE -> SABLE +1 SALINE -> SAILING +1 SALIENT -> SAILORED +1 SAINTS -> SAYS +1 SAILD -> SAILED +1 SAIL -> SALE +1 SAID -> SIT +1 SAID -> SAT +1 RUST -> REST +1 RULED -> ROLLED +1 RUFUS -> RUFFUS +1 RUE -> GRUE +1 ROSSETER -> ROSSITUR +1 ROERER -> ROAR +1 RODOLFO'S -> GODOLPH'S +1 RODOLFO -> UDOLPHO +1 RODOLFO -> RIDOLPHO +1 RODOLFO -> RIDOLPHAL +1 ROBIN'S -> ROBINS +1 REWEIGHED -> REWAIED +1 RETURN -> RETURNED +1 RESIGNED -> RESIGN +1 REMOVE -> MOVED +1 REMOV'D -> REMOVED +1 REMEMBER -> REMEMBERED +1 REMARK -> REMARKED +1 REMAINED -> REMAINING +1 REMAIN -> REMAINED +1 RELOCATED -> RE +1 RELIES -> REALIZE +1 REIGNED -> RAINED +1 REGGIE -> READY +1 REGAINED -> REGAIN +1 REFUSED -> WERE +1 REENFORCEMENTS -> REINFORCEMENTS +1 REEDER -> READER +1 RED -> READ +1 RECORD -> RECORDS +1 RECOGNISED -> RECOGNIZED +1 REBUK'D -> REBUKED +1 RE -> REINTER +1 RACHEL -> RACHAEL +1 QUINCY -> QUINCEY +1 QUASI -> COURSE +1 QUART -> COURT +1 PYTHAGOREANS -> PYTHAGORIANS +1 PUTTIN -> PUTTING +1 PUT -> PUTTING +1 PURSE -> PERSON +1 PURPOSED -> PURPOSE +1 PURIST -> PUREST +1 PSALM -> SUM +1 PROVES -> PROVED +1 PROSELYTING -> PROSELY +1 PRODIGAL -> CHRONICAL +1 PRINCIPLE -> PRINCIPAL +1 PREVENT -> PRESENT +1 PRETENSE -> PRETENCE +1 PRECIEUSES -> PURSUS +1 PRECEDE -> PROCEED +1 PRE -> PRIESTHOO +1 PRACTICE -> PRACTISE +1 POWER -> BOWER +1 POSSESS -> POSSESSED +1 POPHAM -> PAPA +1 POISON'D -> POISONED +1 POINT -> BLINT +1 PLURAL -> POOR +1 PLURAL -> PEARL +1 PLEASANCE -> PLEASANTS +1 PLEA -> PLEAD +1 PLATONISTS -> PLATINISTS +1 PLAITS -> PLATES +1 PLACE -> PLACES +1 PIERC'D -> PIERCED +1 PICK -> PIG +1 PICK -> PIC +1 PHILANTHROPIES -> ANTHROPIES +1 PHILADELPHIAN -> PHILADELPHIA +1 PHAEDRUS -> FEATURES +1 PH -> P +1 PERVERTERS -> PERVERTED +1 PERSON -> PERSONAL +1 PERCHES -> PURCHASE +1 PEGRENNE -> PEGRIN +1 PEGRE -> PEG +1 PEASE -> PIECE +1 PAUL -> POLITICS +1 PATIENTS -> PATIENCE +1 PATIENCE -> PATIENT +1 PATCH -> PAT +1 PASSAGE -> PASSAGEWAY +1 PASCHAL -> PASSION +1 PARTICLES -> PARTICLE +1 PAROQUET -> PARAQUET +1 PARLOR -> PARLOUR +1 PARASITES -> PARRICIDES +1 PARALLELOGRAM -> PARALLELLOGRAM +1 PAPAL -> PEPPEL +1 PANTS -> PANS +1 PANE -> PAIN +1 PALATE -> PALLET +1 OVER -> OF +1 OUTRAGE -> OUTRAGED +1 OUR -> HER +1 OUR -> A +1 OUGHTER -> ORDERS +1 OTTLEY'S -> OAKLEIGHS +1 OTHERWORLDLY -> OTHER +1 OTHERS -> OTHER +1 OTHER -> OTTER +1 OSH -> I +1 OSAGE -> O +1 ORDERED -> CANNOT +1 ORCHARD -> ARCHER +1 OR -> OF +1 OR -> FOR +1 OR -> ARE +1 OR -> A +1 OPHELIA -> OF +1 ONTO -> ON +1 ONLY -> OMER +1 ONE -> SWUNG +1 ONE -> ONE'S +1 ON -> ANOTHER +1 ON -> ANGULATIONS +1 OLIVE'S -> ALL +1 OLIVE -> ALAP +1 OH -> I'LL +1 OFFICES -> OFFICERS +1 OFFICERS -> OFFICER'S +1 OFFICERS -> OFFICER +1 OFFENSES -> OFFENCES +1 OF -> O +1 OF -> IS +1 OF -> HAVE +1 OF -> BANDS +1 OF -> AUTHOR +1 OBSERVED -> OBSERVE +1 OAKS -> YOKES +1 O'ER -> OR +1 O -> A +1 NOW -> NO +1 NOVEL'S -> NOVELS +1 NOUGHT -> NAUGHT +1 NOTHIN -> NOTHING +1 NOT -> NOW +1 NOT -> NOTHING +1 NORTHWARDS -> NORTHWARD +1 NORTHERNERS -> DERPOOL +1 NORTH -> NORTHWEST +1 NOR -> OR +1 NINE -> NOT +1 NIGHTFALL -> NIGHT +1 NEW -> NEWBORN +1 NEVERBEND -> NEVERBIN +1 NELLY -> NELLIE +1 NEIGHBOUR -> NEIGHBOR +1 NEIGHBORHOOD -> NEIGHBOURHOOD +1 NEIGHBOR -> NEIGHBOUR +1 NECK -> NET +1 NEARER -> NEAR +1 NE'ER -> NEVER +1 NATTY -> NANNIE +1 NARES -> NEAR'S +1 NAOMI -> THEY +1 NAOMI -> NOW +1 NAOMI -> NAY +1 NAMED -> NAME +1 N -> THAN +1 MY -> I +1 MY -> BY +1 MUSSULMANS -> MUSSELMENS +1 MOUNTED -> MOUNTAIN +1 MOST -> TO +1 MORNIN -> MORNING +1 MORMONS -> MORE +1 MONTMARTRE -> MOUNT +1 MONTMARTRE -> MONTMARTRA +1 MONTFICHET -> MONTFICHE +1 MONTFICHET -> MARTFICHERE +1 MONGOOSE -> MONGOO'S +1 MOMBI -> MUMBIE +1 MOLDED -> MOULDED +1 MOHICAN -> MOHICANS +1 MO -> MOLE +1 MISTS -> MIST +1 MISTER -> THIS +1 MISTER -> MISS +1 MIST -> MISTS +1 MISSOURIANS -> MISSOURIENS +1 MISS -> MISTER +1 MISDEMEANOR -> MISDEMEANOUR +1 MINE -> MIND +1 MILLION'D -> MILLIONED +1 MILLIMETER -> MILLIMETRE +1 MILLIGRAM -> MILAGRAM +1 MILITATED -> MITIGATED +1 MILES -> MYLES +1 METERS -> METRES +1 MESSRS -> MESSIERS +1 MESS -> MASTS +1 MERSEY -> MERCY +1 MERRY -> MARRIED +1 MERGANSER -> MERGANCER +1 MERCHISTON -> MURCHISON +1 MEN -> AMEN +1 MEET -> MET +1 MEALYBACK -> MEALLY +1 MEADOWCROFT -> MEDICROFT +1 MAY -> MAYBE +1 MAY -> IS +1 MAUSOLEUM -> MUZZLEEM +1 MATE -> MADE +1 MASTER'S -> MASTERS +1 MARVELOUS -> MARVELLOUS +1 MARIVAUX -> MARAVAUX +1 MARAIS -> MARAY +1 MANY -> MEN +1 MANIFESTED -> MANIFEST +1 MAINHALL -> MEANHAVED +1 MAID -> MAIDS +1 MAID -> MAIDEN +1 MACDONALDS -> MC +1 MAC -> MICARTLE +1 LURE -> LOWER +1 LULLS -> LOLLS +1 LUIS -> LOUIS +1 LUBRICATE -> LUBRICADE +1 LOWER -> LOWERED +1 LOVE'S -> LOVES +1 LOUIS -> LOUISE +1 LOU'S -> LOOSE +1 LORNE -> LORN +1 LOOK -> LUCK +1 LOGARITHMS -> LOGARTHEMS +1 LOCRIS -> LOCHRIS +1 LOAD -> LOWED +1 LITER -> LATER +1 LINK'D -> LINKED +1 LINE -> LIE +1 LILLYS -> LILY'S +1 LILLY -> LILY +1 LILBURN -> LITTLE +1 LIGHT -> WRITE +1 LETS -> THAT'S +1 LESSER -> LESS +1 LEOCADIA'S -> LEUCEDES +1 LEOCADIA'S -> LEOCADIUS +1 LEOCADIA -> THE +1 LEOCADIA -> LOU +1 LEOCADIA -> LOCATIA +1 LEOCADIA -> LOCALIA +1 LEOCADIA -> LEOKADIA +1 LEOCADI -> LUCADIA +1 LEFRANK -> LE +1 LECOMPTE -> LECOMTE +1 LECOMPTE -> LE +1 LEAVING -> LEAPING +1 LEAVENWORTH -> LEVIN +1 LEASED -> LEAST +1 LEADS -> LEAVES +1 LE -> LAUROI +1 LARKSPUR -> LARKSPER +1 LARKSPUR -> LARKSBURG +1 LANTHORN -> LANTERN +1 LAND -> LANDA +1 LAMBENT -> LAMENT +1 LALLIE -> LILY +1 LAKE -> LEEK +1 LAD -> WELL +1 LABOUR -> LABOR +1 KNOW -> KNOWS +1 KNEED -> NEED +1 KNAVE -> NAVE +1 KIRTLAND -> CURTLIN +1 KINGDOMS -> KINGDOM'S +1 KING -> KING'S +1 KICK -> KICKAPOOS +1 KESWICK -> KEZWICK +1 KEOGH -> KIEV +1 JUS -> JUST +1 JOHN -> JOHNNIAGO +1 JEWELER'S -> JEWELLERS +1 JAW -> JOB +1 JASPER -> JAPSER +1 JAIL -> DRALE +1 JACK -> JACKKNIFE +1 ITS -> IT'S +1 IT'S -> TO +1 IT -> YOU +1 IT -> TWASN'T +1 IT -> TO +1 IT -> IT'LL +1 IT -> HE +1 IT -> AND +1 IS -> IT'S +1 IS -> IT +1 IS -> HIS +1 IRON'S -> IRONS +1 INVENTORS -> IN +1 INTRENCHMENT -> ENTRENCHMENT +1 INTERESTS -> ENTRANCE +1 INTEREST -> INTERESTS +1 INTENTS -> INTENSE +1 INTENT -> AND +1 INSURRECTIONISTS -> INSURRECTIONOUS +1 INNERLOCHY -> INERLOCHY +1 INNERLOCHY -> IN +1 INFECTED -> IN +1 INFANTS -> INFANT'S +1 INFANTILE -> INVENTILE +1 INCULCATED -> INCALCATED +1 INCLOSED -> ENCLOSED +1 INCERTAINTY -> IN +1 INACTION -> AN +1 IN -> TO +1 IN -> ON +1 IN -> OF +1 IN -> IMPRACTICALLY +1 IN -> IMPERFECT +1 IMPRESSES -> IMPRESS +1 IMPRESSED -> IMPRESS +1 IMPRESS'D -> IMPRESSED +1 IMPEARLED -> IMPELLED +1 IMMATURE -> IMMATEUR +1 IKE -> LIKE +1 IF -> OF +1 IF -> AT +1 IDIOSYNCRATICALLY -> IDIOS +1 ICHTHYOSAURUS -> IDEAS +1 I'M -> ON +1 I'LL -> I +1 I'LL -> HOW +1 I -> OUT +1 HYDRAS -> HYDRAST +1 HUSBAND -> HUSBA +1 HUNTLEY -> HUNTLY +1 HUMID -> HUMAN +1 HOUSEHOLDS -> HOUSEHOLD +1 HOUSECLEANING -> HOUSE +1 HOTBED -> HOT +1 HOSTESS -> HOSTES +1 HOST -> HOSE +1 HORTON -> WHARTON +1 HORSEPLAY -> HORSE +1 HORACE -> HORNS +1 HOPES -> HELPS +1 HOPE -> OH +1 HONOURABLE -> HONORABLE +1 HONOUR -> HONOR +1 HONORS -> HONOURS +1 HONORABLE -> HONED +1 HOLOCAUST -> HOLOCOST +1 HOLD -> ALL +1 HIT -> HID +1 HIM -> LINE +1 HILDA'S -> HELDA'S +1 HILDA -> HELDA +1 HIGHEST -> HAS +1 HETTY -> HETTY'S +1 HERE -> THERE +1 HERACLEITUS -> HERACLITUS +1 HER -> THE +1 HER -> THAT +1 HENCHMEN -> HENCHMAN +1 HEN -> HANDLED +1 HELPED -> SELF +1 HELD -> HUTTED +1 HEART'S -> HEARTSEASE +1 HEART -> HARD +1 HEAR -> SEE +1 HEAR -> HERE +1 HEAD -> EDMOST +1 HE'S -> IS +1 HE'D -> HE +1 HE -> SHE +1 HE -> HIS +1 HAZEWRAPPED -> HAYES +1 HAY -> HEY +1 HAWTREY -> HALTREE +1 HAWK -> HOT +1 HAVING -> HEAVEN +1 HAVE -> HAS +1 HAVE -> HALF +1 HAS -> IS +1 HAS -> AS +1 HARTS -> HEARTS +1 HARMONIZED -> HARMONIZE +1 HARKENED -> HEARKENED +1 HARBORING -> HARBOURING +1 HARANGUE -> HURRY +1 HARALD -> HAROLD +1 HAPPEN -> HAPPENED +1 HANNA -> HAD +1 HANGINGS -> HANGING +1 HANDS -> HANDSOME +1 HAMLET -> HAMLE +1 HAM -> HIM +1 HALLOA -> HULLO +1 HAL -> HELLO +1 HAKON -> HAWKIN +1 HAIRDRESSER -> HAIR +1 HAD -> NOT +1 HAD -> IS +1 HAD -> HEAD +1 HAD -> HAVE +1 HAD -> AT +1 GUISE -> SKIES +1 GUESTS -> GUESS +1 GUEST -> GUESTS +1 GROWS -> GROVES +1 GRINGO -> GREENOW +1 GRIEFS -> GREEDS +1 GREY'S -> GRAY'S +1 GREEING -> GREEN +1 GREATER -> GREAT +1 GREAT -> GRATEFUL +1 GRAY -> GREY +1 GRAPEVINE -> GRAPE +1 GRAND -> GREAT +1 GRAMOPHONE -> GRAMMAPHONE +1 GRAM -> GRAHAM +1 GRADES -> GRATES +1 GOVERNMENT -> GOVERNOR +1 GOVERNED -> GOVERN +1 GOOBERS -> GOULD +1 GOAT'S -> GOATS +1 GOAT -> BOAT +1 GIVE -> KIVED +1 GIVE -> GAVE +1 GIRL'S -> GIRLS +1 GIRARD -> GERARD +1 GILCHRIST'S -> GILCHER'S +1 GILCHRIST -> GO +1 GIER -> GEAR +1 GIAOURS -> GUY +1 GHIP -> GIP +1 GEOFFREY'S -> JEFFREY'S +1 GEOFFREY -> JEFFREY +1 GENTLEMEN -> GENTLEMAN +1 GENERALLY -> GERALLY +1 GENERAL -> GENERALSHIP +1 GAYLY -> GAILY +1 GARDENS -> GARDEN'S +1 GAMEWELL -> GAME +1 FUTURISTIC -> FUTUREISTIC +1 FULNESS -> FULLNESS +1 FRISKILY -> FRISKLY +1 FRIEND -> BRAND +1 FRANCS -> FRANKS +1 FORWARDED -> FOOTED +1 FORMALLY -> FORMERLY +1 FOREVER -> FOR +1 FORBES'S -> FORTS +1 FOR -> WITH +1 FOR -> FOREVER +1 FOR -> FIR +1 FOR -> FALLING +1 FOLLOWED -> FOWLED +1 FLUFFINOSE -> FLAPHANO'S +1 FLOUR -> FLOWER +1 FLIGHT -> FIGHT +1 FIRS -> FURS +1 FIREBUGS -> FIRE +1 FIREBALL -> FIRE +1 FINE -> FIND +1 FIND -> FIVE +1 FILL -> FELL +1 FIGHTER -> FRIGHTER +1 FETE -> FIGHT +1 FERDINANDO -> FERDINAND +1 FELT -> FILT +1 FELT -> FELLED +1 FEES -> BEES +1 FEELS -> FILLS +1 FEELIN'S -> FEELINS +1 FEEL -> BILL +1 FEARED -> FEAR +1 FAVORITE -> FAVOURITE +1 FARTHEST -> FURTHEST +1 FARRAGUT -> FERRAGUT +1 FAR -> FARTHER +1 FALLEN -> FALL +1 FAIR -> FAIREST +1 EYE -> I +1 EVERYDAY -> EVERY +1 EVER -> ARROW +1 EVENIN'S -> EVENINGS +1 EVA -> EITHER +1 ESTATE -> STATE +1 ESTAFANIA -> ESTAFFANIA +1 ESTAFANIA -> DA +1 ESPRIT -> A +1 ESPECIAL -> SPECIAL +1 ESCHEATED -> ISTIATED +1 ER -> A +1 ENTRUSTING -> INTRUSTING +1 ENTHRALMENT -> ENTHRALIMENT +1 ENTER -> INTER +1 ENTER -> ENTERED +1 ENSURE -> INSURE +1 ENQUIRIES -> INQUIRIES +1 ENQUIRED -> INQUIRED +1 ENQUIRE -> INQUIRE +1 ENDEAVOR -> ENDEAVOUR +1 EMISSIONS -> OMISSIONS +1 EMIL -> AMY +1 EMIL -> AM +1 EMIGRATION -> IMMIGRATION +1 EMIGRANT -> IMMIGRANT +1 EMERG'D -> EMERGED +1 EM -> THEM +1 ELSINORE -> ELSINOR +1 ELMO'S -> ABLE'S +1 ELECT -> ELEC +1 ELCHO -> ELKO +1 ELABORATE -> CELEBRATE +1 EFFECTED -> AFFECTED +1 EDITION -> ADDITION +1 EARSHOT -> EAR +1 E -> EEN +1 E -> EA +1 DYKES -> DIKES +1 DURING -> DREWING +1 DUNNO -> DON'T +1 DUMPY -> DON'T +1 DUMAS -> DE +1 DUERER -> DURE +1 DRUGGIST'S -> DRUGGIST +1 DROPIDAS -> TROPIDAS +1 DRIPPING -> TRIPPING +1 DOWN -> DOWNSTAIRS +1 DOVES -> DOVE'S +1 DOUZE -> DUSPORT +1 DOOR -> DOORSTEP +1 DONATISTS -> DONATIST +1 DONA -> DORNEST +1 DOLL -> DAL +1 DOCTRESS -> DOCTRIS +1 DISTRICTS -> DISTRICT +1 DISMAYED -> DISMAYEDESTAFHANIA +1 DISCOLOURED -> DISCOLORED +1 DINAH'S -> DYNAS +1 DINAH -> DINA +1 DIFFERENCES -> DIFFERENCE +1 DID -> DEAD +1 DIATRIBE -> DIETRIBE +1 DIAS -> DAIS +1 DIALOGUES -> DIALECTS +1 DEW -> DO +1 DEMEANOR -> DEMEANOUR +1 DELICATE -> DELEGATE +1 DELIBERATIVE -> DELIBERATE +1 DELIA -> GALLIA +1 DELIA -> DAHLIA +1 DEFINED -> THE +1 DEFINE -> TO +1 DEDALOS -> DE +1 DECENCY -> DECENCIES +1 DECEIVING -> SEEING +1 DE -> GRAFT +1 DE -> DETONICHAUCHANT +1 DAWN'S -> DAWNS +1 DAIRY -> DEARIE +1 CYN -> SIN +1 CURVED -> CARVED +1 CRYSTAL -> CRISTEL +1 CROSSTREES -> CROSS +1 CRESSWELLS -> CRUSTWELLS +1 CRESSWELL -> CRESWELL +1 CRASWELLERS -> CRESTWELLERS +1 CRASWELLER -> CRUSWELLER +1 COURT'S -> COURTS +1 COURT -> COURTYARD +1 COUNTRY'S -> COUNTRY +1 COUNSELS -> COUNCILS +1 COUNSELLED -> COUNSEL +1 COULDN'T -> GOOD'N +1 COULD -> COULDN'T +1 COSTS -> COST +1 CORRELATES -> COROLLETS +1 CORN -> CORNIERS +1 CORALIE -> CORLIE +1 COOK -> COPE +1 CONTROL -> CONTROLL +1 CONTI -> KANTI +1 CONTAINED -> CONTAINING +1 CONTACT -> CONDUCT +1 CONSTANTINE -> KONSTANTINE +1 CONSIDERATE -> CONSIDER +1 CONSID'BLE -> CONSIDERABLE +1 CONQUERIN -> CONQUERING +1 CONJURER -> CONJUROR +1 CONDENSE -> CONTENSED +1 COMPOSSER -> COMPOSSIBLE +1 COMPOSE -> COMPOSED +1 COMMENTATORS -> COMMON +1 COMMANDMENTS -> COMMAND +1 COMING -> COMMON +1 COLOURS -> COLORS +1 COLORS -> COLOURS +1 COLORS -> COLLARS +1 COLORIST -> COLORLESS +1 COLORIST -> CHOLERIST +1 COLORED -> COLOURED +1 COLOR -> COLOUR +1 COLD -> CALLED +1 COAL -> CO +1 CO -> COEXIST +1 CLOUD -> CLOUDS +1 CLEW -> CLUE +1 CLAUSE -> CLAS +1 CIVET -> SAVEETTE +1 CITADELLED -> CITADELED +1 CIGARETTE -> SICK +1 CHRISTAIN -> CHRISTIAN +1 CHIAROSCURISTS -> KIERRASCURISTS +1 CHIAROSCURIST -> CUIRASCURISTS +1 CHEROOT -> TROUT +1 CHECKER -> CHEQUER +1 CHECK -> CHEQUE +1 CHATTERBOX -> CHATTER +1 CHARENTE -> NECHERANT +1 CHARACTERISTIC -> CORRECTORISTIC +1 CHANGE -> CHANGES +1 CHANGE -> CHANGED +1 CENTRED -> SENATE +1 CENTER -> CENTRE +1 CENDENARIES -> SENDIARIES +1 CEASD -> CEASED +1 CAUGHT -> THOUGHT +1 CAT -> HAT +1 CASE -> GASE +1 CARPACCIO'S -> CARPATIUS +1 CAPLESS -> CAPLICE +1 CANVASS -> CANVAS +1 CANDLE -> CANDLELIGHT +1 CAN -> COULD +1 CAN -> CANNOT +1 CALDWELL -> CAULDWELL +1 BUTTED -> BUDDED +1 BUT -> DO +1 BURN -> BURNE +1 BURGOYNE -> WERE +1 BUNNIT -> BUNNITT +1 BUL -> BULBUL +1 BUCHANAN -> YOU +1 BROWN -> OBROWN +1 BROTHER -> BRETHREN +1 BRITANNULISTS -> BRITON +1 BRISK -> BRACE +1 BRINGING -> RINGING +1 BREAKFAS -> BREAKFAST +1 BREAD -> ABRET +1 BRANDS -> BRINGS +1 BRANCH -> RANCH +1 BRAGELONNE -> BRAGOLON +1 BRAGELONNE -> BRAGELONE +1 BRACTON'S -> BROCKTON'S +1 BOX -> BOXWOMEN +1 BOTANY -> BARTANY +1 BOTANICAL -> BATTANICAL +1 BORDERS -> BORDER +1 BOOKKEEPER -> BITKEEPER +1 BOLLS -> BOWLS +1 BOAR -> BOREHOUND +1 BLUESKINS -> BLUESKIN +1 BLUE -> BLUESTS +1 BLESSINGS -> BLESSING +1 BLASTS -> BLAST +1 BIT -> GOOD +1 BIT -> BID +1 BILLYGOAT -> SPILLIGOAT +1 BILLED -> BUILD +1 BERGSON -> BERKES +1 BERGSON -> BERGIN +1 BENCH -> PINCH +1 BEING -> MEAN +1 BEIN -> BEING +1 BEG -> BEGGED +1 BEFORE -> FOR +1 BEFIT -> BE +1 BEFAL -> BEFALL +1 BEEDER -> READER +1 BEEBE -> B +1 BEDIMMED -> BEDEMNED +1 BEATER -> PETER +1 BEACHED -> BEECHED +1 BATTLEAX -> ADELAX +1 BASKET -> BASCULADES +1 BALEEN -> BALINE +1 BALAAM'S -> BAYLIM'S +1 BAINS -> BANDOMERE +1 BADGES -> BADGERS +1 BADAUDERIE -> BADR'D +1 BABIRUSA -> BABRUSA +1 AWHILE -> A +1 AUNT -> AND +1 AU -> OKARRANT +1 ATTITUDE -> SATITUDE +1 ATTENDANTS -> ATTENDANCE +1 ATTENDANCE -> ATTENDANTS +1 ATMOSPHERIC -> ATMOSPHERE +1 ATHOLEMEN -> ETHEL +1 ATHLETE -> ADETE +1 ATHENAIS -> ETHNEE +1 ATHENAIS -> ETHINAY +1 ATCHISON -> ATTITSON +1 AT -> SAID +1 AT -> IT +1 AT -> AND +1 ASTOR -> ASTRO +1 ASSEMBLED -> A +1 ASCENDENCY -> A +1 AS -> TO +1 AS -> THE +1 AS -> A +1 ARRONDISSEMENT -> ARE +1 ARRIVING -> RIVING +1 ARRESTS -> ARREST +1 ARMED -> ARM +1 ARGYLE -> OUR +1 ARE -> OR +1 ARE -> ALL +1 ARDOUR -> ARDOR +1 ARC -> ARK +1 APPROVES -> ME +1 APPRENTICE -> APPRENTICED +1 APPEALED -> APPEAL +1 ANYWHERE -> MANY +1 ANYMORE -> ANY +1 ANY -> ANYTHING +1 ANTEDATING -> ANTETING +1 ANSWERD -> ANSWERED +1 ANNALS -> ANNAL +1 ANGELS -> ANGEL +1 ANDERS -> ANDRES +1 ANDERS -> ANDRE +1 ANDELLA -> ANNE +1 ANDELLA -> ANDDELA +1 ANDELLA -> AND +1 ANDELLA -> AMDELLA +1 AND -> ONE +1 AND -> INTO +1 AND -> INDEED +1 AND -> ENTHRIBING +1 AND -> AT +1 AND -> AM +1 AND -> A +1 ANAXAGORAS -> AN +1 AN -> ON +1 AN -> IN +1 AMPHITHEATER -> AMPHITHEATRE +1 AMASS -> A +1 ALTERNATIVE -> ALL +1 ALREADY -> ALL +1 ALLUVION -> ALLUVIAN +1 ALL -> ALREADY +1 ALEXANDRA -> ALEXANDER +1 ALBANS -> ALBAN'S +1 AIR -> HEIR +1 AIGNAN -> DAN +1 AID -> AIDS +1 AH -> A +1 AFTERDECK -> AFTER +1 AFFRIGHTENED -> A +1 AFFILIATED -> ARE +1 AFFECT -> EFFECT +1 ADVENTURE -> ADVENTURER +1 ADONA -> ADONNA +1 ACTOR -> ACTOR'S +1 ACKNOWLEDGEMENT -> ACKNOWLEDGMENT +1 ABOLITIONISTS -> ABOLITIONIST +1 ABDUCTION -> ADOCTION +1 ABBE -> ABBEY +1 A -> UPON +1 A -> UNNOTTINGHAM +1 A -> UNNOTABLY +1 A -> TO +1 A -> HER +1 A -> HE +1 A -> ESPECIAL +1 A -> AWAY +1 A -> ATTORIAN +1 A -> ARREST +1 A -> ACCORD + +DELETIONS: count ref +7 IS +6 AND +6 A +4 OF +4 CHARENTE +3 TO +2 WILL +2 WAY +2 TOGETHER +2 THE +2 STAIRS +2 IT +2 IN +2 AM +1 YOU'LL +1 YOU +1 YARD +1 WOMEN +1 WEST +1 WELL +1 WE +1 WASN'T +1 VOUCHED +1 VINES +1 TRILOGIES +1 TORY +1 TONNAY +1 THRIVING +1 THOR +1 THINKING +1 THING +1 TAKES +1 T +1 STONES +1 STEP +1 SPECIAL +1 SHIP +1 SEERS +1 SEE +1 ROI +1 REST +1 READY +1 PRACTICALLY +1 PORTES +1 POOS +1 POND +1 PERFECT +1 OTHER +1 ONLY +1 OLD +1 NOTTINGHAM +1 NOTABLY +1 NOT +1 N +1 MUCH +1 MER +1 LORD +1 LOADS +1 LO +1 LIGHT +1 L +1 KNIFE +1 JAGO +1 HUMPH +1 HOUND +1 HIS +1 HIM +1 HAVE +1 GRAF +1 GOAT +1 GALATIANS +1 FIND +1 EXIST +1 EVER +1 ESTAFANIA +1 ENTER +1 EASE +1 EARS +1 E +1 DO +1 DEED +1 DE +1 DARK +1 D'ESTE +1 CUTTERS +1 COURANT +1 CLOTH +1 CHORD +1 CHARLES +1 C +1 BUL +1 BOYS +1 BORN +1 B +1 AT +1 AS +1 ARDLE + +INSERTIONS: count hyp +11 ONE +11 A +9 THE +8 IS +6 ARE +6 AND +5 IT +5 HAVE +4 IN +4 DAY +4 AXE +4 AM +3 OF +3 ME +3 HALL +2 WILL +2 THAT +2 OTHER +2 NULA +2 NIGHT +2 ILL +2 I +2 HIS +2 HAD +2 FOR +2 FIND +2 AS +1 ZAY +1 WRAPPED +1 WORTH +1 WORLDLY +1 WILLIAM +1 WHILE +1 WHERE +1 WAY +1 WAS +1 WARILY +1 VINE +1 VIEW +1 VENORS +1 UNDISSIMA +1 ULYSTS +1 TURNED +1 TREES +1 TOP +1 TO +1 TIME +1 THESE +1 TENT +1 TEACHERS +1 SPREE +1 SIMPLED +1 SHOT +1 SCENE +1 SAGE +1 RED +1 READY +1 PROVES +1 POOL +1 PLAY +1 OWE +1 OVER +1 ORS +1 ORDER +1 ONLY +1 ON +1 OCCASIONA +1 NOSED +1 NOSE +1 NOR +1 MORE +1 MILL +1 MEN'S +1 MEN +1 MAU +1 MASS +1 MARSHRA +1 MAKE +1 LOSS +1 LOCKY +1 LOCATED +1 LIKE +1 LED +1 LEAF +1 KNOW +1 KATYA +1 ITS +1 HYMN +1 HIRESUME +1 HIM +1 HE +1 GUILE +1 GROUND +1 GREE +1 GOING +1 FRIGHTENED +1 FREEZED +1 FRANK +1 FIT +1 FILIATED +1 FANIA +1 FALL +1 EXAGGERUS +1 EVER +1 EULO +1 EFFECTED +1 DRESSER +1 DONALDS +1 DERPOOL +1 DELLA +1 DELA +1 DECK +1 CRADICALLY +1 COMTE +1 CLEANING +1 CHRIST +1 CERTAINTY +1 CASE +1 BURN +1 BUGS +1 BOX +1 BOOKS +1 BEGIRT +1 BED +1 BE +1 BALL +1 BACK +1 AT +1 AN +1 ACTION + +PER-WORD STATS: word corr tot_errs count_in_ref count_in_hyp +AND 1740 87 1787 1780 +A 1130 78 1166 1172 +THE 3438 73 3461 3488 +IN 876 71 905 918 +IS 449 35 468 465 +TO 1329 32 1340 1350 +OF 1788 31 1799 1808 +I 708 26 711 731 +AN 155 20 165 165 +ONE 187 19 191 202 +IT 548 19 558 557 +THAT 603 16 610 612 +AS 377 15 383 386 +ARE 178 15 182 189 +HE 522 13 526 531 +O 4 12 14 6 +WERE 184 11 186 193 +MEN 57 11 62 63 +THEY 204 10 209 209 +THEIR 167 10 173 171 +HAVE 212 10 215 219 +HAD 316 10 321 321 +FOR 416 10 420 422 +AT 278 10 284 282 +YOU 417 9 418 425 +WILL 139 9 143 144 +TWO 64 9 71 66 +THIS 256 9 263 258 +THERE 135 9 137 142 +ON 277 9 279 284 +WHERE 44 8 49 47 +RODOLFO 0 8 8 0 +MAN 63 8 67 67 +ITS 81 8 82 88 +HAS 104 8 108 108 +ANY 84 8 85 91 +AM 57 8 59 63 +ALL 222 8 225 227 +WHEN 128 7 133 130 +WAS 576 7 579 580 +THAN 85 7 88 89 +OR 172 7 176 175 +I'M 28 7 33 30 +HER 319 7 324 321 +DE 5 7 10 7 +ANDERS 4 7 11 4 +WE 149 6 152 152 +TIMAEUS 3 6 9 3 +THEN 121 6 125 123 +OTHER 63 6 65 67 +OH 32 6 33 37 +I'VE 17 6 23 17 +HIM 213 6 215 217 +ANYONE 0 6 6 0 +WITH 422 5 424 425 +WELL 72 5 75 74 +TOO 60 5 61 64 +TONNAY 0 5 5 0 +THEL 0 5 5 0 +SYLVIA 0 5 0 5 +SOLMES 0 5 0 5 +SOAMES 0 5 5 0 +SILVIA 0 5 5 0 +SHE 279 5 281 282 +OUR 79 5 81 82 +NOW 91 5 94 93 +NOT 335 5 338 337 +LEOCADIA 1 5 6 1 +JAGO 0 5 5 0 +IT'S 26 5 29 28 +INTO 102 5 104 105 +HIS 472 5 473 476 +GALATIANS 1 5 6 1 +FIND 20 5 22 23 +CHARENTE 0 5 5 0 +BATTLEAX 0 5 5 0 +WOULD 139 4 141 141 +WAY 71 4 73 73 +THEM 119 4 122 120 +SOME 86 4 87 89 +RED 18 4 19 21 +PRACTISE 1 4 4 2 +PRACTICE 6 4 7 9 +NO 167 4 169 169 +NEW 34 4 35 37 +ME 184 4 184 188 +MAINHALL 0 4 4 0 +LEAVENWORTH 0 4 4 0 +KNOW 75 4 76 78 +HE'S 5 4 9 5 +GREY 0 4 3 1 +GRAY 4 4 5 7 +GALLATIONS 0 4 0 4 +FELL 16 4 16 20 +EVERY 31 4 31 35 +EMIL 0 4 4 0 +DAY 50 4 50 54 +BATTLE 6 4 6 10 +AXE 1 4 1 5 +ANDELLA 0 4 4 0 +XAVIER 0 3 3 0 +WHOSE 13 3 14 15 +WHO 153 3 154 155 +WE'RE 0 3 3 0 +WE'LL 4 3 6 5 +VALLIERS 0 3 0 3 +VALLIERE 0 3 3 0 +TODAY 0 3 3 0 +TIME 85 3 87 86 +THROUGH 41 3 42 43 +THOU 18 3 18 21 +THORKEL 0 3 3 0 +THEE 27 3 30 27 +THEATRE 2 3 2 5 +THEATER 0 3 3 0 +TABU 0 3 3 0 +STATE 25 3 27 26 +SOLON 1 3 4 1 +SOLEMN 1 3 1 4 +SEEMED 29 3 30 31 +SEE 64 3 65 66 +SAID 159 3 161 160 +RUDOLPHO 0 3 0 3 +ROUND 14 3 17 14 +READY 9 3 10 11 +READ 16 3 18 17 +PLATONISTS 0 3 3 0 +ONLY 75 3 77 76 +OFFICERS 8 3 10 9 +NIGHT 24 3 24 27 +NAOMI 2 3 5 2 +MISTER 46 3 48 47 +METRE 0 3 0 3 +METER 8 3 11 8 +MAY 54 3 56 55 +MAIN 3 3 3 6 +LEVINWORTH 0 3 0 3 +LE 0 3 1 2 +KAFFIR 0 3 0 3 +KAFFAR 0 3 3 0 +ICHTHYOSAURUS 0 3 3 0 +I'LL 12 3 14 13 +HOLLAND 0 3 0 3 +HOLLAN 0 3 3 0 +HELLO 2 3 2 5 +HALL 9 3 9 12 +GREAT 73 3 74 75 +GILCHRIST 0 3 3 0 +FAIR 6 3 7 8 +EVERYONE 0 3 3 0 +EVER 33 3 35 34 +ESTAFANIA 0 3 3 0 +ENTER 6 3 9 6 +E 0 3 3 0 +DO 93 3 94 95 +CRESSWELL 1 3 4 1 +COLOUR 0 3 2 1 +COLORS 1 3 3 2 +COLOR 9 3 10 11 +BUT 341 3 344 341 +BRITANNULA 0 3 3 0 +BRITAIN 1 3 1 4 +BE 314 3 314 317 +BANNISTER 0 3 3 0 +BANISTER 0 3 0 3 +AROUND 12 3 12 15 +ANOTHER 34 3 36 35 +ANDREWS 0 3 0 3 +YOU'RE 3 2 5 3 +YOU'LL 7 2 9 7 +YE 6 2 8 6 +YAGO 0 2 0 2 +WYLDER 3 2 5 3 +WORSE 5 2 6 6 +WOOD 3 2 4 4 +WON 2 2 2 4 +WILDER 0 2 0 2 +WIDOWS 0 2 0 2 +WHOLLY 9 2 9 11 +WHITTAWS 0 2 2 0 +WHILE 34 2 34 36 +WHAT 112 2 113 113 +WHALE 2 2 4 2 +WESTPORT 0 2 2 0 +WEST 6 2 7 7 +VAPOURS 0 2 2 0 +VAPORS 0 2 0 2 +VANDERPOOL 0 2 2 0 +VAN 2 2 2 4 +UPON 93 2 94 94 +UP 108 2 110 108 +TRAVELLERS 0 2 0 2 +TOWARDS 17 2 19 17 +TOWARD 8 2 8 10 +TONIGHT 0 2 2 0 +TOGETHER 14 2 16 14 +THOUGHT 53 2 54 54 +THOUGH 32 2 33 33 +THINKING 7 2 8 8 +THESE 68 2 68 70 +THAT'S 13 2 14 14 +T 0 2 1 1 +STAIRS 6 2 8 6 +SPECIAL 1 2 2 2 +SOUTHEY 0 2 2 0 +SOMETIME 1 2 2 2 +SOMEONE 1 2 3 1 +SO 196 2 197 197 +SINCE 24 2 25 25 +SIN 12 2 13 13 +SIFT 0 2 0 2 +SIF 0 2 2 0 +SHOULD 59 2 61 59 +SHOT 2 2 2 4 +SHIP 7 2 9 7 +SHE'S 4 2 6 4 +SEEM 11 2 13 11 +SCHOOL 9 2 11 9 +SAT 18 2 18 20 +RODOLPHO 0 2 0 2 +REST 13 2 14 14 +REMAINED 5 2 6 6 +READER 1 2 1 3 +RE 0 2 1 1 +PUTTING 7 2 7 9 +PROVES 1 2 2 2 +PLURAL 0 2 2 0 +PLATANISTS 0 2 0 2 +PICK 1 2 3 1 +PERSON 12 2 13 13 +PATIENCE 1 2 2 2 +OVER 58 2 59 59 +NULA 0 2 0 2 +NOTHING 33 2 33 35 +NOR 20 2 21 21 +NEO 1 2 3 1 +NEIGHBOUR 1 2 2 2 +NEIGHBOR 0 2 1 1 +N 2 2 4 2 +MY 223 2 225 223 +MUNNY 0 2 2 0 +MORE 119 2 119 121 +MONTMARTRE 0 2 2 0 +MONTFICHET 7 2 9 7 +MONEY 5 2 5 7 +MISTS 2 2 3 3 +MIST 4 2 5 5 +MISS 17 2 18 18 +MARSHALL 1 2 3 1 +MARSHAL 1 2 1 3 +MANY 40 2 41 41 +MAID 4 2 6 4 +MADAME 4 2 4 6 +MADAM 1 2 3 1 +LOWER 5 2 6 6 +LOUIS 1 2 2 2 +LINE 12 2 13 13 +LILY 2 2 2 4 +LIKE 105 2 105 107 +LIGHT 37 2 39 37 +LEOCADIA'S 0 2 2 0 +LECOMPTE 0 2 2 0 +LARKSPUR 0 2 2 0 +KARL 0 2 0 2 +KAFFIR'S 0 2 0 2 +KAFFAR'S 0 2 2 0 +ITHUSORIS 0 2 0 2 +INTERESTS 1 2 2 2 +INNERLOCHY 0 2 2 0 +IMPRESSED 3 2 4 4 +IMPRESS 0 2 0 2 +ILL 6 2 6 8 +IF 129 2 131 129 +I'D 1 2 3 1 +HOT 3 2 3 5 +HORSE 6 2 6 8 +HONORABLE 1 2 2 2 +HOLY 1 2 3 1 +HOLBINE 0 2 0 2 +HOLBEIN 0 2 2 0 +HERE 69 2 70 70 +HEAR 18 2 20 18 +HEAD 35 2 36 36 +HALLO 0 2 2 0 +GUESTS 4 2 5 5 +GOAT 5 2 7 5 +GIVE 28 2 30 28 +GILGRIST 0 2 0 2 +GIAGO 0 2 0 2 +FOUNDED 3 2 5 3 +FOUND 21 2 21 23 +FOREVER 1 2 2 2 +FIRE 22 2 22 24 +FILLING 0 2 0 2 +FIGHT 3 2 3 5 +FELT 17 2 19 17 +FEELING 9 2 11 9 +FALL 2 2 2 4 +FAIRVIEW 0 2 2 0 +ESPECIAL 0 2 1 1 +EFFECTED 1 2 2 2 +DON'T 38 2 38 40 +DISSENT 0 2 2 0 +DID 66 2 67 67 +DESCENT 2 2 2 4 +DERPOOL 0 2 0 2 +DELIA 0 2 2 0 +DEFINED 1 2 2 2 +DEDALUS 0 2 2 0 +DAEDALUS 0 2 0 2 +CRITIUS 0 2 0 2 +CRITIAS 0 2 2 0 +CRIGHTON 0 2 0 2 +CREIGHTON 0 2 2 0 +CRASWELL 0 2 0 2 +COURT 11 2 12 12 +COULDN'T 5 2 6 6 +COULD 94 2 95 95 +COMMON 8 2 8 10 +COLOURS 0 2 1 1 +COLORIST 0 2 2 0 +COLD 8 2 9 9 +CO 0 2 1 1 +CHINGACHOOK 0 2 0 2 +CHINGACHGOOK 0 2 2 0 +CHASE 1 2 1 3 +CHANGE 7 2 9 7 +CHAISE 0 2 2 0 +CASE 15 2 16 16 +CARL 0 2 2 0 +CANNOT 16 2 16 18 +CAN 64 2 66 64 +BURN 3 2 4 4 +BUL 0 2 2 0 +BREAK 3 2 3 5 +BRAKE 1 2 3 1 +BRAGELONNE 0 2 2 0 +BOYS 5 2 6 6 +BOX 7 2 8 8 +BIT 7 2 9 7 +BERGSON 0 2 2 0 +BEING 39 2 40 40 +BEHAVIOUR 0 2 2 0 +BEHAVIOR 0 2 0 2 +B 1 2 2 2 +AYE 0 2 2 0 +AY 0 2 2 0 +ATTENDANTS 0 2 1 1 +ATTENDANCE 0 2 1 1 +ATHENAIS 0 2 2 0 +ARREST 1 2 1 3 +ANDREW'S 0 2 0 2 +AMYL 0 2 0 2 +ALTOGETHER 6 2 6 8 +ALREADY 21 2 22 22 +ZORAS 0 1 0 1 +ZORA'S 0 1 1 0 +ZORA 2 1 3 2 +ZOOF'S 1 1 2 1 +ZEVIER 0 1 0 1 +ZEF'S 0 1 0 1 +ZAY 0 1 0 1 +ZAVIOUR 0 1 0 1 +ZAVIER 0 1 0 1 +YOUNG 43 1 43 44 +YOKES 0 1 0 1 +YEARNING 1 1 2 1 +YEARNIN 0 1 0 1 +YARD 4 1 5 4 +WRITE 4 1 4 5 +WRAPPED 0 1 0 1 +WORTH 4 1 4 5 +WORST 3 1 4 3 +WORLDLY 0 1 0 1 +WORE 3 1 3 4 +WOODCUTTERS 0 1 0 1 +WOODBEGIRT 0 1 1 0 +WONDERING 1 1 2 1 +WOMEN'S 1 1 1 2 +WOMEN 7 1 8 7 +WOMAN'S 1 1 2 1 +WITHES 0 1 1 0 +WINTER 4 1 5 4 +WINNER 0 1 0 1 +WIND 8 1 8 9 +WILLIAM 1 1 1 2 +WIFE 16 1 17 16 +WIDTHS 0 1 0 1 +WIDOWED 0 1 0 1 +WIDOW 1 1 1 2 +WHO'S 1 1 2 1 +WHITTAWD 0 1 1 0 +WHITTAW 0 1 1 0 +WHITE 23 1 23 24 +WHISKED 0 1 0 1 +WHISK 0 1 1 0 +WHIRLPOOL 1 1 2 1 +WHIPPED 1 1 2 1 +WHIP 0 1 0 1 +WHICH 216 1 216 217 +WHETHER 23 1 23 24 +WHERE'S 0 1 0 1 +WHATSOEVER 1 1 1 2 +WHATEVER 12 1 13 12 +WHAT'S 4 1 5 4 +WHARTON 0 1 0 1 +WESTWARD 1 1 1 2 +WESTMERE 0 1 1 0 +WESTMARE 0 1 0 1 +WELCOMED 0 1 1 0 +WELCOME 6 1 6 7 +WEEKLY 0 1 0 1 +WEDNESDAY 1 1 2 1 +WEATHER 5 1 6 5 +WEAR 4 1 5 4 +WEAKLY 0 1 1 0 +WE'VE 2 1 2 3 +WAVES 6 1 7 6 +WATRY 1 1 2 1 +WATERMILL 0 1 1 0 +WATER 19 1 19 20 +WATCHERY 0 1 0 1 +WASTES 0 1 0 1 +WASTE 4 1 5 4 +WASN'T 1 1 2 1 +WARLIKE 0 1 1 0 +WARILY 0 1 0 1 +WAR 5 1 5 6 +WANDERING 2 1 2 3 +WAIL 0 1 0 1 +VOUCHED 0 1 1 0 +VOICE 17 1 18 17 +VISITORS 4 1 5 4 +VISITOR 2 1 2 3 +VIPEROUS 0 1 0 1 +VINEYARD 0 1 0 1 +VINES 0 1 1 0 +VINE 0 1 0 1 +VILLEROY 0 1 1 0 +VILLEROI 0 1 0 1 +VILLA 0 1 1 0 +VILIDESSEA 0 1 0 1 +VIGNETTE 0 1 1 0 +VIEW'S 0 1 0 1 +VIEW 2 1 2 3 +VIEDUC 0 1 0 1 +VICARIOUS 3 1 4 3 +VIADUCT 0 1 1 0 +VERY 82 1 83 82 +VERSE 1 1 2 1 +VERNE'S 0 1 1 0 +VERN'S 0 1 0 1 +VERIMENT 0 1 0 1 +VENORS 0 1 0 1 +VEINS 0 1 0 1 +VEILS 1 1 1 2 +VAUDOIS 0 1 1 0 +VANES 0 1 1 0 +VALOUR 0 1 0 1 +VALOR 2 1 3 2 +VALES 2 1 3 2 +UTAH 1 1 2 1 +UPSTAIRS 3 1 3 4 +UNWARILY 0 1 1 0 +UNTO 2 1 3 2 +UNNOTTINGHAM 0 1 0 1 +UNNOTABLY 0 1 0 1 +UNLIKE 0 1 1 0 +UNDISSIMA 0 1 0 1 +UNDERGROUND 0 1 1 0 +UNCLENCHED 0 1 1 0 +UNC 4 1 5 4 +UN 0 1 1 0 +ULYSTS 0 1 0 1 +UDOLPHO 0 1 0 1 +UD 0 1 1 0 +TYPENIQUE 0 1 0 1 +TWASN'T 1 1 1 2 +TUTRILOGIES 0 1 0 1 +TURNOVER 0 1 1 0 +TURNED 21 1 21 22 +TURN 18 1 18 19 +TUPPENY 0 1 1 0 +TUPPENNY 0 1 0 1 +TUBS 0 1 0 1 +TROUT 1 1 1 2 +TROUBLE 8 1 8 9 +TROT 4 1 5 4 +TROPIDAS 0 1 0 1 +TRIPPING 0 1 0 1 +TRILOGIES 0 1 1 0 +TREES 19 1 19 20 +TREDDLESTONE 0 1 0 1 +TREDDLESTON 0 1 1 0 +TREBLE 0 1 1 0 +TRAVESTY 0 1 1 0 +TRAVELLING 0 1 0 1 +TRAVELING 2 1 3 2 +TRAVELERS 0 1 1 0 +TRAVELER 0 1 1 0 +TRAVASTY 0 1 0 1 +TOWNE 0 1 1 0 +TOWN 6 1 6 7 +TOWELLING 0 1 1 0 +TOWELINGS 0 1 0 1 +TOULD 0 1 1 0 +TOTTY 2 1 3 2 +TORY 0 1 1 0 +TORQUAL 0 1 0 1 +TORKLE 0 1 0 1 +TORE 0 1 0 1 +TORCOAL 0 1 0 1 +TOPSY 0 1 0 1 +TOPS 3 1 4 3 +TOP 11 1 11 12 +TOOMS 0 1 1 0 +TONIET 0 1 0 1 +TONE 5 1 5 6 +TON 0 1 0 1 +TOMBS 0 1 0 1 +TOLD 31 1 32 31 +TOILETTE 0 1 1 0 +TOILET 0 1 0 1 +TODDY 0 1 0 1 +TIRESOME 1 1 2 1 +TINTORET 0 1 1 0 +TINTINT 0 1 1 0 +TINTARETTE 0 1 0 1 +TINTANT 0 1 0 1 +TINACHANT 0 1 0 1 +TIMEUS 0 1 0 1 +TIMES 20 1 21 20 +TIMAIRS 0 1 0 1 +TIBI 0 1 1 0 +TIBBY 0 1 0 1 +THUS 20 1 21 20 +THROWN 4 1 4 5 +THRONE 3 1 4 3 +THRO 0 1 1 0 +THRIVING 0 1 1 0 +THOUSANDTH 0 1 0 1 +THOUSAND 12 1 13 12 +THORLEIF 0 1 1 0 +THOR 0 1 1 0 +THINK 52 1 52 53 +THING 21 1 22 21 +THEY'RE 2 1 3 2 +THEREFORE 19 1 20 19 +THERE'S 12 1 12 13 +THEE'S 0 1 1 0 +THANKING 2 1 3 2 +THANK 12 1 13 12 +TERENTIAL 0 1 0 1 +TENT 5 1 5 6 +TENEAS 0 1 0 1 +TECHNIQUE 0 1 1 0 +TEARS 11 1 11 12 +TEACHERS 0 1 0 1 +TEA 2 1 3 2 +TARANTULA 0 1 1 0 +TALKERS 0 1 1 0 +TALK 19 1 19 20 +TAKING 7 1 7 8 +TAKES 2 1 3 2 +TAKEN 14 1 15 14 +TABOU 0 1 0 1 +TABOO 0 1 0 1 +TABLECLOTH 0 1 0 1 +TABLE 23 1 24 23 +SYMPOSIUM 1 1 2 1 +SWUNG 0 1 0 1 +SWOONS 0 1 1 0 +SWOON 3 1 3 4 +SWEPT 1 1 1 2 +SWEEP 1 1 2 1 +SWAY 1 1 1 2 +SWAN 0 1 1 0 +SUSPICIONS 2 1 3 2 +SUSPICION 4 1 4 5 +SURVIVED 0 1 0 1 +SURVIVE 1 1 2 1 +SURFACES 0 1 1 0 +SUMNER 0 1 1 0 +SUMMONED 3 1 3 4 +SUMMON 0 1 1 0 +SUMMERS 0 1 1 0 +SUMMER'S 2 1 2 3 +SUMMER 6 1 6 7 +SUM 1 1 1 2 +SUITCASE 0 1 1 0 +SUIT 4 1 4 5 +STUDY 12 1 12 13 +STREAMLINE 0 1 1 0 +STREAM 1 1 1 2 +STORY'S 0 1 1 0 +STORM 3 1 3 4 +STORIES 3 1 3 4 +STONES 2 1 3 2 +STEVE 0 1 0 1 +STEP 6 1 7 6 +STEELED 0 1 0 1 +STEEL'D 0 1 1 0 +STEED 1 1 1 2 +STEADY 4 1 5 4 +STAY 11 1 11 12 +STATUNE 0 1 0 1 +STATES 6 1 6 7 +STARVED 0 1 1 0 +STARTS 0 1 1 0 +START 3 1 3 4 +STARE 1 1 1 2 +STANDS 2 1 3 2 +STAND 13 1 13 14 +STAIR 0 1 1 0 +STAINED 1 1 1 2 +STAID 0 1 1 0 +STAGE 5 1 6 5 +SQUI 0 1 0 1 +SQUEAK 1 1 2 1 +SPRING 7 1 8 7 +SPREE 0 1 0 1 +SPRANG 3 1 3 4 +SPLENDOUR 0 1 0 1 +SPLENDOR 0 1 1 0 +SPLENDID 9 1 9 10 +SPLENDET 0 1 1 0 +SPIN 0 1 1 0 +SPILLIGOAT 0 1 0 1 +SPEND 2 1 2 3 +SPECIALIZED 0 1 0 1 +SPECIALISED 0 1 1 0 +SOUTHEY'S 0 1 1 0 +SOUS 0 1 0 1 +SOU 0 1 1 0 +SORA 0 1 0 1 +SOOTHED 0 1 1 0 +SOOTHE 1 1 1 2 +SON 14 1 15 14 +SOLOQUY 0 1 0 1 +SOLON'S 0 1 1 0 +SOLILOQUY 4 1 5 4 +SODELITY 0 1 0 1 +SODALITY 0 1 1 0 +SOCRATIC 0 1 1 0 +SNUBNOSED 0 1 1 0 +SNUB 0 1 0 1 +SMILED 4 1 4 5 +SMILD 0 1 1 0 +SMELLS 1 1 2 1 +SLEEVE 0 1 1 0 +SKURA 0 1 0 1 +SKILLFUL 0 1 1 0 +SKILFUL 0 1 0 1 +SKIES 0 1 0 1 +SKEPTICAL 0 1 1 0 +SIZED 1 1 1 2 +SIZE 4 1 5 4 +SITTING 4 1 4 5 +SIT 11 1 11 12 +SIR 35 1 36 35 +SIMPOS 0 1 0 1 +SIMPLED 0 1 0 1 +SIMMONS 0 1 0 1 +SILENCE 7 1 7 8 +SIGHTSEERS 0 1 0 1 +SIGHT 19 1 20 19 +SIGHED 3 1 4 3 +SIDE 23 1 23 24 +SICK 2 1 2 3 +SHOWRING 0 1 1 0 +SHOWERING 0 1 0 1 +SHOWED 5 1 5 6 +SHOULDST 0 1 0 1 +SHODY 0 1 0 1 +SHODDY 0 1 1 0 +SHIP'S 0 1 0 1 +SHEWD 0 1 1 0 +SHERIFF'S 3 1 3 4 +SHERIFF 3 1 4 3 +SHEBATA 0 1 0 1 +SHARPS 0 1 1 0 +SHARPEST 1 1 1 2 +SHARP'ST 0 1 1 0 +SHARP'S 0 1 0 1 +SHANNON 2 1 3 2 +SHAN'T 0 1 1 0 +SHAN 0 1 0 1 +SHALT 2 1 2 3 +SHALL 43 1 44 43 +SHABATA 0 1 1 0 +SHA'N'T 0 1 0 1 +SEX 2 1 2 3 +SEWED 1 1 1 2 +SEVERE 1 1 1 2 +SETTLED 1 1 1 2 +SETTLE 2 1 3 2 +SET 19 1 19 20 +SERVICES 1 1 1 2 +SERVED 4 1 4 5 +SERVE 11 1 12 11 +SERVANTS 4 1 4 5 +SERVANT 11 1 12 11 +SENTENCES 2 1 3 2 +SENTENCE 3 1 3 4 +SENT 5 1 6 5 +SENSE 15 1 16 15 +SENDIARIES 0 1 0 1 +SENCE 0 1 1 0 +SENATE 2 1 2 3 +SEN 0 1 0 1 +SEMON'S 0 1 1 0 +SELVEY 0 1 0 1 +SELF 5 1 5 6 +SEERS 0 1 1 0 +SEEMS 12 1 12 13 +SEEING 12 1 12 13 +SEEDS 0 1 1 0 +SEED 2 1 2 3 +SECTS 0 1 1 0 +SEATS 3 1 3 4 +SEATING 1 1 2 1 +SEAT 3 1 4 3 +SEA 18 1 18 19 +SCUTCHEON 0 1 1 0 +SCUSE 0 1 1 0 +SCRAPBOOKS 0 1 1 0 +SCRAP 0 1 0 1 +SCOUTING 0 1 1 0 +SCOUT 5 1 5 6 +SCHOOLS 1 1 1 2 +SCHOOLBOYS 0 1 0 1 +SCEVRA 0 1 1 0 +SCEURA 0 1 1 0 +SCEPTICAL 0 1 0 1 +SCENTS 0 1 0 1 +SCENE 3 1 3 4 +SCATHE 0 1 1 0 +SCATH 0 1 0 1 +SCAROONS 0 1 1 0 +SCARONS 0 1 0 1 +SCAFFRA 0 1 0 1 +SAYS 12 1 12 13 +SAYING 15 1 15 16 +SAVEETTE 0 1 0 1 +SAVED 4 1 5 4 +SAUVEUR 0 1 1 0 +SATITUDE 0 1 0 1 +SATE 0 1 1 0 +SANG 4 1 5 4 +SAMPLE 0 1 1 0 +SALVI 0 1 0 1 +SALINE 0 1 1 0 +SALIENT 1 1 2 1 +SALE 0 1 0 1 +SAINTS 4 1 5 4 +SAILORED 0 1 0 1 +SAILING 0 1 0 1 +SAILED 0 1 0 1 +SAILD 0 1 1 0 +SAIL 5 1 6 5 +SAGE 0 1 0 1 +SABLE 0 1 0 1 +RUST 0 1 1 0 +RULED 0 1 1 0 +RUFUS 0 1 1 0 +RUFFUS 0 1 0 1 +RUE 0 1 1 0 +ROSSITUR 0 1 0 1 +ROSSETER 0 1 1 0 +ROLLED 3 1 3 4 +ROI 0 1 1 0 +ROERER 0 1 1 0 +RODOLFO'S 0 1 1 0 +ROBINS 0 1 0 1 +ROBIN'S 0 1 1 0 +ROAR 0 1 0 1 +RIVING 0 1 0 1 +RINGING 0 1 0 1 +RIDOLPHO 0 1 0 1 +RIDOLPHAL 0 1 0 1 +REWEIGHED 0 1 1 0 +REWAIED 0 1 0 1 +RETURNED 18 1 18 19 +RETURN 8 1 9 8 +RESIGNED 1 1 2 1 +RESIGN 0 1 0 1 +REMOVED 4 1 4 5 +REMOVE 3 1 4 3 +REMOV'D 0 1 1 0 +REMEMBERED 11 1 11 12 +REMEMBER 9 1 10 9 +REMARKED 4 1 4 5 +REMARK 2 1 3 2 +REMAINING 0 1 0 1 +REMAIN 5 1 6 5 +RELOCATED 0 1 1 0 +RELIES 0 1 1 0 +REINTER 0 1 0 1 +REINFORCEMENTS 0 1 0 1 +REIGNED 1 1 2 1 +REGGIE 1 1 2 1 +REGAINED 0 1 1 0 +REGAIN 0 1 0 1 +REFUSED 7 1 8 7 +REENFORCEMENTS 0 1 1 0 +REEDER 0 1 1 0 +RECORDS 2 1 2 3 +RECORD 6 1 7 6 +RECOGNIZED 3 1 3 4 +RECOGNISED 0 1 1 0 +REBUKED 0 1 0 1 +REBUK'D 0 1 1 0 +REALIZE 4 1 4 5 +RANCH 0 1 0 1 +RAINED 0 1 0 1 +RACHEL 15 1 16 15 +RACHAEL 0 1 0 1 +QUINCY 0 1 1 0 +QUINCEY 0 1 0 1 +QUASI 0 1 1 0 +QUART 0 1 1 0 +PYTHAGORIANS 0 1 0 1 +PYTHAGOREANS 0 1 1 0 +PUTTIN 0 1 1 0 +PUT 31 1 32 31 +PURSUS 0 1 0 1 +PURSE 1 1 2 1 +PURPOSED 0 1 1 0 +PURPOSE 10 1 10 11 +PURIST 0 1 1 0 +PUREST 0 1 0 1 +PURCHASE 0 1 0 1 +PSALM 2 1 3 2 +PROVED 6 1 6 7 +PROSELYTING 0 1 1 0 +PROSELY 0 1 0 1 +PRODIGAL 0 1 1 0 +PROCEED 1 1 1 2 +PRINCIPLE 3 1 4 3 +PRINCIPAL 4 1 4 5 +PRIESTHOO 0 1 0 1 +PREVENT 0 1 1 0 +PRETENSE 0 1 1 0 +PRETENCE 1 1 1 2 +PRESENT 20 1 20 21 +PRECIEUSES 0 1 1 0 +PRECEDE 0 1 1 0 +PRE 0 1 1 0 +PRACTICALLY 3 1 4 3 +POWER 21 1 22 21 +POSSESSED 3 1 3 4 +POSSESS 2 1 3 2 +PORTES 0 1 1 0 +PORT 1 1 1 2 +POPHAM 4 1 5 4 +POOS 0 1 1 0 +POOR 15 1 15 16 +POOL 1 1 1 2 +POND 2 1 3 2 +POLITICS 1 1 1 2 +POISONED 0 1 0 1 +POISON'D 0 1 1 0 +POINT 13 1 14 13 +PLEASANTS 0 1 0 1 +PLEASANCE 0 1 1 0 +PLEAD 1 1 1 2 +PLEA 0 1 1 0 +PLAY 12 1 12 13 +PLATINISTS 0 1 0 1 +PLATES 2 1 2 3 +PLAITS 0 1 1 0 +PLACES 1 1 1 2 +PLACE 38 1 39 38 +PINCH 0 1 0 1 +PIG 2 1 2 3 +PIERCED 1 1 1 2 +PIERC'D 0 1 1 0 +PIECE 1 1 1 2 +PIC 0 1 0 1 +PHILANTHROPIES 0 1 1 0 +PHILADELPHIAN 0 1 1 0 +PHILADELPHIA 0 1 0 1 +PHAEDRUS 0 1 1 0 +PH 0 1 1 0 +PETER 0 1 0 1 +PERVERTERS 0 1 1 0 +PERVERTED 1 1 1 2 +PERSONAL 7 1 7 8 +PERFECT 6 1 7 6 +PERCHES 0 1 1 0 +PEPPEL 0 1 0 1 +PEGRIN 0 1 0 1 +PEGRENNE 0 1 1 0 +PEGRE 0 1 1 0 +PEG 0 1 0 1 +PEASE 0 1 1 0 +PEARL 12 1 12 13 +PAUL 14 1 15 14 +PATIENTS 0 1 1 0 +PATIENT 0 1 0 1 +PATCH 3 1 4 3 +PAT 0 1 0 1 +PASSION 3 1 3 4 +PASSAGEWAY 0 1 0 1 +PASSAGE 8 1 9 8 +PASCHAL 0 1 1 0 +PARTICLES 0 1 1 0 +PARTICLE 0 1 0 1 +PARRICIDES 0 1 0 1 +PAROQUET 0 1 1 0 +PARLOUR 0 1 0 1 +PARLOR 1 1 2 1 +PARASITES 0 1 1 0 +PARAQUET 0 1 0 1 +PARALLELOGRAM 0 1 1 0 +PARALLELLOGRAM 0 1 0 1 +PAPAL 0 1 1 0 +PAPA 0 1 0 1 +PANTS 0 1 1 0 +PANS 1 1 1 2 +PANE 0 1 1 0 +PALLET 0 1 0 1 +PALATE 0 1 1 0 +PAIN 7 1 7 8 +P 1 1 1 2 +OWE 0 1 0 1 +OUTRAGED 0 1 0 1 +OUTRAGE 0 1 1 0 +OUT 100 1 100 101 +OUGHTER 0 1 1 0 +OTTLEY'S 0 1 1 0 +OTTER 0 1 0 1 +OTHERWORLDLY 0 1 1 0 +OTHERS 22 1 23 22 +OSH 0 1 1 0 +OSAGE 0 1 1 0 +ORS 0 1 0 1 +ORDERS 3 1 3 4 +ORDERED 2 1 3 2 +ORDER 22 1 22 23 +ORCHARD 3 1 4 3 +OPHELIA 0 1 1 0 +ONTO 0 1 1 0 +ONE'S 1 1 1 2 +OMISSIONS 0 1 0 1 +OMER 0 1 0 1 +OLIVE'S 2 1 3 2 +OLIVE 3 1 4 3 +OLD 39 1 40 39 +OKARRANT 0 1 0 1 +OFFICES 0 1 1 0 +OFFICER'S 0 1 0 1 +OFFICER 4 1 4 5 +OFFENSES 0 1 1 0 +OFFENCES 0 1 0 1 +OCCASIONA 0 1 0 1 +OBSERVED 5 1 6 5 +OBSERVE 4 1 4 5 +OBROWN 0 1 0 1 +OAKS 0 1 1 0 +OAKLEIGHS 0 1 0 1 +O'ER 0 1 1 0 +NOVELS 2 1 2 3 +NOVEL'S 0 1 1 0 +NOUGHT 0 1 1 0 +NOTTINGHAM 6 1 7 6 +NOTHIN 0 1 1 0 +NOTABLY 0 1 1 0 +NOSED 0 1 0 1 +NOSE 2 1 2 3 +NORTHWEST 1 1 1 2 +NORTHWARDS 0 1 1 0 +NORTHWARD 1 1 1 2 +NORTHERNERS 0 1 1 0 +NORTH 7 1 8 7 +NINE 10 1 11 10 +NIGHTFALL 0 1 1 0 +NEWBORN 0 1 0 1 +NEVERBIN 0 1 0 1 +NEVERBEND 5 1 6 5 +NEVER 63 1 63 64 +NET 1 1 1 2 +NELLY 0 1 1 0 +NELLIE 0 1 0 1 +NEIGHBOURHOOD 0 1 0 1 +NEIGHBORHOOD 0 1 1 0 +NEED 12 1 12 13 +NECK 5 1 6 5 +NECHERANT 0 1 0 1 +NEARER 3 1 4 3 +NEAR'S 0 1 0 1 +NEAR 6 1 6 7 +NE'ER 0 1 1 0 +NAY 5 1 5 6 +NAVE 0 1 0 1 +NAUGHT 0 1 0 1 +NATTY 1 1 2 1 +NARES 0 1 1 0 +NANNIE 0 1 0 1 +NAMED 3 1 4 3 +NAME 14 1 14 15 +MYLES 0 1 0 1 +MUZZLEEM 0 1 0 1 +MUSSULMANS 0 1 1 0 +MUSSELMENS 0 1 0 1 +MURCHISON 0 1 0 1 +MUMBIE 0 1 0 1 +MUCH 68 1 69 68 +MOVED 10 1 10 11 +MOUNTED 0 1 1 0 +MOUNTAIN 5 1 5 6 +MOUNT 0 1 0 1 +MOULDED 0 1 0 1 +MOST 50 1 51 50 +MORNING 21 1 21 22 +MORNIN 1 1 2 1 +MORMONS 3 1 4 3 +MONTMARTRA 0 1 0 1 +MONTFICHE 0 1 0 1 +MONGOOSE 0 1 1 0 +MONGOO'S 0 1 0 1 +MOMBI 0 1 1 0 +MOLE 0 1 0 1 +MOLDED 0 1 1 0 +MOHICANS 0 1 0 1 +MOHICAN 0 1 1 0 +MO 0 1 1 0 +MITIGATED 0 1 0 1 +MISSOURIENS 0 1 0 1 +MISSOURIANS 1 1 2 1 +MISDEMEANOUR 0 1 0 1 +MISDEMEANOR 0 1 1 0 +MINE 6 1 7 6 +MIND 29 1 29 30 +MILLS 0 1 0 1 +MILLIONED 0 1 0 1 +MILLION'D 0 1 1 0 +MILLIMETRE 0 1 0 1 +MILLIMETER 0 1 1 0 +MILLIGRAM 0 1 1 0 +MILL 0 1 0 1 +MILITATED 0 1 1 0 +MILES 6 1 7 6 +MILAGRAM 0 1 0 1 +MICARTLE 0 1 0 1 +METRES 0 1 0 1 +METERS 0 1 1 0 +MET 10 1 10 11 +MESSRS 0 1 1 0 +MESSIERS 0 1 0 1 +MESS 0 1 1 0 +MERSEY 0 1 1 0 +MERRY 0 1 1 0 +MERGANSER 0 1 1 0 +MERGANCER 0 1 0 1 +MERCY 2 1 2 3 +MERCHISTON 0 1 1 0 +MER 0 1 1 0 +MEN'S 2 1 2 3 +MEET 6 1 7 6 +MEDICROFT 0 1 0 1 +MEANHAVED 0 1 0 1 +MEAN 9 1 9 10 +MEALYBACK 0 1 1 0 +MEALLY 0 1 0 1 +MEADOWCROFT 0 1 1 0 +MC 1 1 1 2 +MAYBE 0 1 0 1 +MAUSOLEUM 0 1 1 0 +MAU 0 1 0 1 +MATE 1 1 2 1 +MASTS 0 1 0 1 +MASTERS 3 1 3 4 +MASTER'S 1 1 2 1 +MASS 2 1 2 3 +MARVELOUS 0 1 1 0 +MARVELLOUS 0 1 0 1 +MARTFICHERE 0 1 0 1 +MARSHRA 0 1 0 1 +MARRIED 2 1 2 3 +MARIVAUX 1 1 2 1 +MARAY 0 1 0 1 +MARAVAUX 0 1 0 1 +MARAIS 0 1 1 0 +MANIFESTED 0 1 1 0 +MANIFEST 2 1 2 3 +MAKE 40 1 40 41 +MAIDS 4 1 4 5 +MAIDEN 0 1 0 1 +MADE 61 1 61 62 +MACDONALDS 0 1 1 0 +MAC 0 1 1 0 +LUSTY 0 1 0 1 +LURE 0 1 1 0 +LULLS 0 1 1 0 +LUIS 0 1 1 0 +LUCK 3 1 3 4 +LUCADIA 0 1 0 1 +LUBRICATE 0 1 1 0 +LUBRICADE 0 1 0 1 +LOWERED 0 1 0 1 +LOWED 0 1 0 1 +LOVES 3 1 3 4 +LOVE'S 0 1 1 0 +LOUISE 4 1 4 5 +LOU'S 0 1 1 0 +LOU 0 1 0 1 +LOSS 6 1 6 7 +LORNE 0 1 1 0 +LORN 0 1 0 1 +LORD 22 1 23 22 +LOOSE 5 1 5 6 +LOOK 31 1 32 31 +LOLLS 0 1 0 1 +LOGARTHEMS 0 1 0 1 +LOGARITHMS 0 1 1 0 +LOCRIS 0 1 1 0 +LOCKY 0 1 0 1 +LOCHRIS 0 1 0 1 +LOCATIA 0 1 0 1 +LOCATED 1 1 1 2 +LOCALIA 0 1 0 1 +LOADS 0 1 1 0 +LOAD 0 1 1 0 +LO 1 1 2 1 +LITTLE 101 1 101 102 +LITER 0 1 1 0 +LINKED 0 1 0 1 +LINK'D 0 1 1 0 +LILY'S 0 1 0 1 +LILLYS 0 1 1 0 +LILLY 0 1 1 0 +LILBURN 0 1 1 0 +LIE 1 1 1 2 +LEVIN 0 1 0 1 +LEUCEDES 0 1 0 1 +LETS 0 1 1 0 +LESSER 1 1 2 1 +LESS 28 1 28 29 +LEOKADIA 0 1 0 1 +LEOCADIUS 0 1 0 1 +LEOCADI 0 1 1 0 +LEFRANK 0 1 1 0 +LEEK 0 1 0 1 +LED 7 1 7 8 +LECOMTE 0 1 0 1 +LEAVING 5 1 6 5 +LEAVES 5 1 5 6 +LEAST 15 1 15 16 +LEASED 0 1 1 0 +LEAPING 3 1 3 4 +LEAF 3 1 3 4 +LEADS 2 1 3 2 +LAUROI 0 1 0 1 +LATER 14 1 14 15 +LARKSPER 0 1 0 1 +LARKSBURG 0 1 0 1 +LANTHORN 0 1 1 0 +LANTERN 0 1 0 1 +LANDA 0 1 0 1 +LAND 12 1 13 12 +LAMENT 0 1 0 1 +LAMBENT 0 1 1 0 +LALLIE 0 1 1 0 +LAKE 12 1 13 12 +LAD 2 1 3 2 +LABOUR 0 1 1 0 +LABOR 1 1 1 2 +L 1 1 2 1 +KONSTANTINE 0 1 0 1 +KNOWS 6 1 6 7 +KNIFE 9 1 10 9 +KNEED 0 1 1 0 +KNAVE 0 1 1 0 +KIVED 0 1 0 1 +KIRTLAND 0 1 1 0 +KINGDOMS 1 1 2 1 +KINGDOM'S 0 1 0 1 +KING'S 4 1 4 5 +KING 25 1 26 25 +KIEV 0 1 0 1 +KIERRASCURISTS 0 1 0 1 +KICKAPOOS 0 1 0 1 +KICK 1 1 2 1 +KEZWICK 0 1 0 1 +KESWICK 0 1 1 0 +KEOGH 0 1 1 0 +KATYA 0 1 0 1 +KANTI 0 1 0 1 +JUST 42 1 42 43 +JUS 0 1 1 0 +JOHNNIAGO 0 1 0 1 +JOHN 15 1 16 15 +JOB 4 1 4 5 +JEWELLERS 0 1 0 1 +JEWELER'S 0 1 1 0 +JEFFREY'S 0 1 0 1 +JEFFREY 0 1 0 1 +JAW 1 1 2 1 +JASPER 5 1 6 5 +JAPSER 0 1 0 1 +JAIL 3 1 4 3 +JACKKNIFE 0 1 0 1 +JACK 5 1 6 5 +IT'LL 2 1 2 3 +ISTIATED 0 1 0 1 +IRONS 0 1 0 1 +IRON'S 0 1 1 0 +INVENTORS 1 1 2 1 +INVENTILE 0 1 0 1 +INTRUSTING 0 1 0 1 +INTRENCHMENT 0 1 1 0 +INTEREST 9 1 10 9 +INTER 0 1 0 1 +INTENTS 0 1 1 0 +INTENT 0 1 1 0 +INTENSE 2 1 2 3 +INSURRECTIONOUS 0 1 0 1 +INSURRECTIONISTS 0 1 1 0 +INSURE 0 1 0 1 +INQUIRIES 1 1 1 2 +INQUIRED 2 1 2 3 +INQUIRE 0 1 0 1 +INFECTED 0 1 1 0 +INFANTS 2 1 3 2 +INFANTILE 1 1 2 1 +INFANT'S 0 1 0 1 +INERLOCHY 0 1 0 1 +INDEED 29 1 29 30 +INCULCATED 0 1 1 0 +INCLOSED 0 1 1 0 +INCERTAINTY 0 1 1 0 +INCALCATED 0 1 0 1 +INACTION 0 1 1 0 +IMPRESSES 0 1 1 0 +IMPRESS'D 0 1 1 0 +IMPRACTICALLY 0 1 0 1 +IMPERFECT 0 1 0 1 +IMPELLED 2 1 2 3 +IMPEARLED 0 1 1 0 +IMMIGRATION 0 1 0 1 +IMMIGRANT 0 1 0 1 +IMMATURE 0 1 1 0 +IMMATEUR 0 1 0 1 +IKE 0 1 1 0 +IDIOSYNCRATICALLY 0 1 1 0 +IDIOS 0 1 0 1 +IDEAS 11 1 11 12 +HYMN 1 1 1 2 +HYDRAST 0 1 0 1 +HYDRAS 0 1 1 0 +HUTTED 0 1 0 1 +HUSBAND 8 1 9 8 +HUSBA 0 1 0 1 +HURRY 4 1 4 5 +HUNTLY 0 1 0 1 +HUNTLEY 0 1 1 0 +HUMPH 0 1 1 0 +HUMID 0 1 1 0 +HUMAN 15 1 15 16 +HULLO 0 1 0 1 +HOW 50 1 50 51 +HOUSEHOLDS 0 1 1 0 +HOUSEHOLD 4 1 4 5 +HOUSECLEANING 0 1 1 0 +HOUSE 34 1 34 35 +HOUND 0 1 1 0 +HOTBED 0 1 1 0 +HOSTESS 2 1 3 2 +HOSTES 0 1 0 1 +HOST 2 1 3 2 +HOSE 2 1 2 3 +HORTON 0 1 1 0 +HORSEPLAY 0 1 1 0 +HORNS 2 1 2 3 +HORACE 0 1 1 0 +HOPES 5 1 6 5 +HOPE 9 1 10 9 +HONOURS 0 1 0 1 +HONOURABLE 2 1 3 2 +HONOUR 1 1 2 1 +HONORS 0 1 1 0 +HONOR 4 1 4 5 +HONED 0 1 0 1 +HOLOCOST 0 1 0 1 +HOLOCAUST 0 1 1 0 +HOLD 7 1 8 7 +HIT 1 1 2 1 +HIRESUME 0 1 0 1 +HILDA'S 1 1 2 1 +HILDA 8 1 9 8 +HIGHEST 2 1 3 2 +HID 1 1 1 2 +HEY 0 1 0 1 +HETTY'S 0 1 0 1 +HETTY 0 1 1 0 +HERACLITUS 0 1 0 1 +HERACLEITUS 0 1 1 0 +HENCHMEN 0 1 1 0 +HENCHMAN 0 1 0 1 +HEN 1 1 2 1 +HELPS 0 1 0 1 +HELPED 2 1 3 2 +HELDA'S 0 1 0 1 +HELDA 0 1 0 1 +HELD 14 1 15 14 +HEIR 0 1 0 1 +HEAVEN 14 1 14 15 +HEARTSEASE 0 1 0 1 +HEARTS 8 1 8 9 +HEART'S 0 1 1 0 +HEART 27 1 28 27 +HEARKENED 0 1 0 1 +HE'D 2 1 3 2 +HAZEWRAPPED 0 1 1 0 +HAYES 0 1 0 1 +HAY 0 1 1 0 +HAWTREY 0 1 1 0 +HAWKIN 0 1 0 1 +HAWK 6 1 7 6 +HAVING 11 1 12 11 +HAT 1 1 1 2 +HARTS 0 1 1 0 +HAROLD 0 1 0 1 +HARMONIZED 1 1 2 1 +HARMONIZE 0 1 0 1 +HARKENED 0 1 1 0 +HARD 12 1 12 13 +HARBOURING 0 1 0 1 +HARBORING 0 1 1 0 +HARANGUE 0 1 1 0 +HARALD 0 1 1 0 +HAPPENED 6 1 6 7 +HAPPEN 4 1 5 4 +HANNA 0 1 1 0 +HANGINGS 0 1 1 0 +HANGING 2 1 2 3 +HANDSOME 3 1 3 4 +HANDS 16 1 17 16 +HANDLED 0 1 0 1 +HAMLET 5 1 6 5 +HAMLE 0 1 0 1 +HAM 0 1 1 0 +HALTREE 0 1 0 1 +HALLOA 0 1 1 0 +HALF 19 1 19 20 +HAL 0 1 1 0 +HAKON 0 1 1 0 +HAIRDRESSER 0 1 1 0 +HAIR 6 1 6 7 +GUY 0 1 0 1 +GUISE 0 1 1 0 +GUILE 0 1 0 1 +GUEST 3 1 4 3 +GUESS 0 1 0 1 +GRUE 0 1 0 1 +GROWS 1 1 2 1 +GROVES 0 1 0 1 +GROUND 10 1 10 11 +GRINGO 0 1 1 0 +GRIEFS 0 1 1 0 +GREY'S 0 1 1 0 +GREENOW 0 1 0 1 +GREEN 12 1 12 13 +GREEING 0 1 1 0 +GREEDS 0 1 0 1 +GREE 0 1 0 1 +GREATER 8 1 9 8 +GRAY'S 0 1 0 1 +GRATES 0 1 0 1 +GRATEFUL 2 1 2 3 +GRAPEVINE 0 1 1 0 +GRAPE 0 1 0 1 +GRAND 1 1 2 1 +GRAMOPHONE 0 1 1 0 +GRAMMAPHONE 0 1 0 1 +GRAM 0 1 1 0 +GRAHAM 0 1 0 1 +GRAFT 0 1 0 1 +GRAF 0 1 1 0 +GRADES 0 1 1 0 +GOVERNOR 14 1 14 15 +GOVERNMENT 7 1 8 7 +GOVERNED 0 1 1 0 +GOVERN 0 1 0 1 +GOULD 0 1 0 1 +GOOD'N 0 1 0 1 +GOOD 69 1 69 70 +GOOBERS 0 1 1 0 +GOING 26 1 26 27 +GODOLPH'S 0 1 0 1 +GOATS 0 1 0 1 +GOAT'S 1 1 2 1 +GO 37 1 37 38 +GIRLS 7 1 7 8 +GIRL'S 1 1 2 1 +GIRARD 0 1 1 0 +GIP 0 1 0 1 +GILCHRIST'S 0 1 1 0 +GILCHER'S 0 1 0 1 +GIER 0 1 1 0 +GIAOURS 0 1 1 0 +GHIP 3 1 4 3 +GERARD 0 1 0 1 +GERALLY 0 1 0 1 +GEOFFREY'S 0 1 1 0 +GEOFFREY 0 1 1 0 +GENTLEMEN 5 1 6 5 +GENTLEMAN 8 1 8 9 +GENERALSHIP 0 1 0 1 +GENERALLY 7 1 8 7 +GENERAL 16 1 17 16 +GEAR 0 1 0 1 +GAYLY 0 1 1 0 +GAVE 31 1 31 32 +GASE 0 1 0 1 +GARDENS 2 1 3 2 +GARDEN'S 0 1 0 1 +GAMEWELL 6 1 7 6 +GAME 4 1 4 5 +GALLIA 0 1 0 1 +GAILY 0 1 0 1 +FUTURISTIC 0 1 1 0 +FUTUREISTIC 0 1 0 1 +FURTHEST 0 1 0 1 +FURS 0 1 0 1 +FUN 0 1 0 1 +FULNESS 0 1 1 0 +FULLNESS 0 1 0 1 +FRISKLY 0 1 0 1 +FRISKILY 0 1 1 0 +FRIGHTER 0 1 0 1 +FRIGHTENED 2 1 2 3 +FRIEND 20 1 21 20 +FREEZED 0 1 0 1 +FRANKS 0 1 0 1 +FRANK 2 1 2 3 +FRANCS 0 1 1 0 +FOWLED 0 1 0 1 +FORWARDED 0 1 1 0 +FORTS 0 1 0 1 +FORMERLY 0 1 0 1 +FORMALLY 2 1 3 2 +FORBES'S 0 1 1 0 +FOOTED 1 1 1 2 +FOLLOWED 14 1 15 14 +FLUFFINOSE 0 1 1 0 +FLOWER 4 1 4 5 +FLOUR 0 1 1 0 +FLIGHT 2 1 3 2 +FLAPHANO'S 0 1 0 1 +FIVE 15 1 15 16 +FIT 0 1 0 1 +FIRST 67 1 67 68 +FIRS 0 1 1 0 +FIREBUGS 0 1 1 0 +FIREBALL 0 1 1 0 +FIR 9 1 9 10 +FINE 16 1 17 16 +FILT 0 1 0 1 +FILLS 2 1 2 3 +FILL 1 1 2 1 +FILIATED 0 1 0 1 +FIGHTER 0 1 1 0 +FETE 2 1 3 2 +FERRAGUT 0 1 0 1 +FERDINANDO 4 1 5 4 +FERDINAND 0 1 0 1 +FELLED 1 1 1 2 +FEES 0 1 1 0 +FEELS 1 1 2 1 +FEELINS 0 1 0 1 +FEELIN'S 0 1 1 0 +FEEL 17 1 18 17 +FEATURES 9 1 9 10 +FEARED 2 1 3 2 +FEAR 12 1 12 13 +FAVOURITE 1 1 1 2 +FAVORITE 3 1 4 3 +FAUDOIS 0 1 0 1 +FARTHEST 1 1 2 1 +FARTHER 6 1 6 7 +FARRAGUT 0 1 1 0 +FAR 29 1 30 29 +FANIA 0 1 0 1 +FALLING 2 1 2 3 +FALLEN 2 1 3 2 +FAIREST 0 1 0 1 +EYE 14 1 15 14 +EXIST 2 1 3 2 +EXCUSE 3 1 3 4 +EXAGGERUS 0 1 0 1 +EVERYDAY 2 1 3 2 +EVENINGS 1 1 1 2 +EVENIN'S 0 1 1 0 +EVA 2 1 3 2 +EULO 0 1 0 1 +ETHNEE 0 1 0 1 +ETHINAY 0 1 0 1 +ETHEL 0 1 0 1 +ESTATE 2 1 3 2 +ESTAFFANIA 0 1 0 1 +ESPRIT 0 1 1 0 +ESCHEATED 0 1 1 0 +ER 0 1 1 0 +ENTRUSTING 0 1 1 0 +ENTRENCHMENT 0 1 0 1 +ENTRANCE 4 1 4 5 +ENTHRIBING 0 1 0 1 +ENTHRALMENT 0 1 1 0 +ENTHRALIMENT 0 1 0 1 +ENTERED 21 1 21 22 +ENSURE 0 1 1 0 +ENQUIRIES 0 1 1 0 +ENQUIRED 0 1 1 0 +ENQUIRE 0 1 1 0 +ENDEAVOUR 1 1 1 2 +ENDEAVOR 0 1 1 0 +ENCLOSED 0 1 0 1 +EMISSIONS 0 1 1 0 +EMIGRATION 0 1 1 0 +EMIGRANT 0 1 1 0 +EMERGED 0 1 0 1 +EMERG'D 0 1 1 0 +EM 0 1 1 0 +ELSINORE 0 1 1 0 +ELSINOR 0 1 0 1 +ELMO'S 0 1 1 0 +ELKO 0 1 0 1 +ELECT 1 1 2 1 +ELEC 0 1 0 1 +ELCHO 0 1 1 0 +ELABORATE 2 1 3 2 +EITHER 8 1 8 9 +EFFECT 9 1 9 10 +EEN 0 1 0 1 +EDMOST 0 1 0 1 +EDITION 0 1 1 0 +EASE 2 1 3 2 +EARSHOT 0 1 1 0 +EARS 4 1 5 4 +EAR 6 1 6 7 +EA 0 1 0 1 +DYNAS 0 1 0 1 +DYKES 0 1 1 0 +DUSPORT 0 1 0 1 +DURING 11 1 12 11 +DURE 0 1 0 1 +DUNNO 0 1 1 0 +DUMPY 1 1 2 1 +DUMAS 0 1 1 0 +DUERER 0 1 1 0 +DRUGGIST'S 0 1 1 0 +DRUGGIST 0 1 0 1 +DROPIDAS 0 1 1 0 +DRIPPING 0 1 1 0 +DREWING 0 1 0 1 +DRESSER 0 1 0 1 +DRALE 0 1 0 1 +DOWNSTAIRS 1 1 1 2 +DOWN 72 1 73 72 +DOVES 0 1 1 0 +DOVE'S 0 1 0 1 +DOUZE 0 1 1 0 +DORNEST 0 1 0 1 +DOORSTEP 0 1 0 1 +DOOR 35 1 36 35 +DONATISTS 1 1 2 1 +DONATIST 0 1 0 1 +DONALDS 0 1 0 1 +DONA 1 1 2 1 +DOLL 1 1 2 1 +DOES 14 1 14 15 +DOCTRIS 0 1 0 1 +DOCTRESS 0 1 1 0 +DISTRICTS 1 1 2 1 +DISTRICT 1 1 1 2 +DISMAYEDESTAFHANIA 0 1 0 1 +DISMAYED 0 1 1 0 +DISCOLOURED 0 1 1 0 +DISCOLORED 0 1 0 1 +DINAH'S 0 1 1 0 +DINAH 1 1 2 1 +DINA 0 1 0 1 +DIKES 0 1 0 1 +DIFFERENCES 1 1 2 1 +DIFFERENCE 5 1 5 6 +DIETRIBE 0 1 0 1 +DIATRIBE 0 1 1 0 +DIAS 0 1 1 0 +DIALOGUES 1 1 2 1 +DIALECTS 0 1 0 1 +DEW 1 1 2 1 +DETONICHAUCHANT 0 1 0 1 +DEMEANOUR 1 1 1 2 +DEMEANOR 0 1 1 0 +DELLA 1 1 1 2 +DELICATE 3 1 4 3 +DELIBERATIVE 0 1 1 0 +DELIBERATE 2 1 2 3 +DELEGATE 0 1 0 1 +DELA 0 1 0 1 +DEFINE 4 1 5 4 +DEED 2 1 3 2 +DEDALOS 0 1 1 0 +DECK 1 1 1 2 +DECENCY 3 1 4 3 +DECENCIES 0 1 0 1 +DECEIVING 0 1 1 0 +DEARIE 0 1 0 1 +DEAD 5 1 5 6 +DAWNS 0 1 0 1 +DAWN'S 0 1 1 0 +DARK 16 1 17 16 +DAN 0 1 0 1 +DAL 0 1 0 1 +DAIS 0 1 0 1 +DAIRY 4 1 5 4 +DAHLIA 0 1 0 1 +DA 0 1 0 1 +D'ESTE 0 1 1 0 +CYN 0 1 1 0 +CUTTERS 0 1 1 0 +CURVED 0 1 1 0 +CURTLIN 0 1 0 1 +CUIRASCURISTS 0 1 0 1 +CRYSTAL 3 1 4 3 +CRUSWELLER 0 1 0 1 +CRUSTWELLS 0 1 0 1 +CROSSTREES 0 1 1 0 +CROSS 5 1 5 6 +CRISTEL 0 1 0 1 +CRESWELL 0 1 0 1 +CRESTWELLERS 0 1 0 1 +CRESSWELLS 0 1 1 0 +CRASWELLERS 0 1 1 0 +CRASWELLER 0 1 1 0 +CRADICALLY 0 1 0 1 +CRADIC 0 1 0 1 +COURTYARD 4 1 4 5 +COURTS 2 1 2 3 +COURT'S 1 1 2 1 +COURSE 19 1 19 20 +COURANT 0 1 1 0 +COUNTRY'S 1 1 2 1 +COUNTRY 25 1 25 26 +COUNSELS 0 1 1 0 +COUNSELLED 1 1 2 1 +COUNSEL 1 1 1 2 +COUNCILS 0 1 0 1 +COSTS 0 1 1 0 +COST 4 1 4 5 +CORRELATES 0 1 1 0 +CORRECTORISTIC 0 1 0 1 +COROLLETS 0 1 0 1 +CORNIERS 0 1 0 1 +CORN 4 1 5 4 +CORLIE 0 1 0 1 +CORALIE 0 1 1 0 +COPE 0 1 0 1 +COOK 1 1 2 1 +CONTROLL 0 1 0 1 +CONTROL 3 1 4 3 +CONTI 0 1 1 0 +CONTENSED 0 1 0 1 +CONTAINING 0 1 0 1 +CONTAINED 2 1 3 2 +CONTACT 0 1 1 0 +CONSTANTINE 0 1 1 0 +CONSIDERATE 0 1 1 0 +CONSIDERABLE 6 1 6 7 +CONSIDER 1 1 1 2 +CONSID'BLE 0 1 1 0 +CONQUERING 0 1 0 1 +CONQUERIN 0 1 1 0 +CONJUROR 0 1 0 1 +CONJURER 0 1 1 0 +CONDUCT 6 1 6 7 +CONDENSE 0 1 1 0 +COMTE 1 1 1 2 +COMPOSSIBLE 0 1 0 1 +COMPOSSER 0 1 1 0 +COMPOSED 1 1 1 2 +COMPOSE 0 1 1 0 +COMMENTATORS 0 1 1 0 +COMMANDMENTS 0 1 1 0 +COMMAND 2 1 2 3 +COMING 6 1 7 6 +COLOURED 1 1 1 2 +COLORLESS 0 1 0 1 +COLORED 1 1 2 1 +COLLARS 1 1 1 2 +COEXIST 0 1 0 1 +COAL 0 1 1 0 +CLUE 1 1 1 2 +CLOUDS 6 1 6 7 +CLOUD 10 1 11 10 +CLOTH 3 1 4 3 +CLEW 0 1 1 0 +CLENCHED 1 1 1 2 +CLEANING 2 1 2 3 +CLAUSE 0 1 1 0 +CLAS 0 1 0 1 +CIVET 0 1 1 0 +CITADELLED 0 1 1 0 +CITADELED 0 1 0 1 +CIGARETTE 1 1 2 1 +CHRONICAL 0 1 0 1 +CHRISTIAN 6 1 6 7 +CHRISTAIN 0 1 1 0 +CHRIST 22 1 22 23 +CHORD 0 1 1 0 +CHOLERIST 0 1 0 1 +CHIAROSCURISTS 0 1 1 0 +CHIAROSCURIST 0 1 1 0 +CHEROOT 0 1 1 0 +CHEQUER 0 1 0 1 +CHEQUE 0 1 0 1 +CHECKER 0 1 1 0 +CHECK 7 1 8 7 +CHATTERBOX 0 1 1 0 +CHATTER 0 1 0 1 +CHARLES 2 1 3 2 +CHARACTERISTIC 9 1 10 9 +CHANGES 3 1 3 4 +CHANGED 6 1 6 7 +CERTAINTY 2 1 2 3 +CENTRED 0 1 1 0 +CENTRE 1 1 1 2 +CENTER 1 1 2 1 +CENDENARIES 0 1 1 0 +CELEBRATE 1 1 1 2 +CEASED 1 1 1 2 +CEASD 0 1 1 0 +CAULDWELL 0 1 0 1 +CAUGHT 9 1 10 9 +CAT 6 1 7 6 +CARVED 1 1 1 2 +CARPATIUS 0 1 0 1 +CARPACCIO'S 0 1 1 0 +CAPLICE 0 1 0 1 +CAPLESS 0 1 1 0 +CANVASS 0 1 1 0 +CANVAS 4 1 4 5 +CANDLELIGHT 0 1 0 1 +CANDLE 1 1 2 1 +CALLED 24 1 24 25 +CALDWELL 0 1 1 0 +C 0 1 1 0 +BY 248 1 248 249 +BUTTED 0 1 1 0 +BURNE 1 1 1 2 +BURGOYNE 0 1 1 0 +BUNNITT 0 1 0 1 +BUNNIT 0 1 1 0 +BULBUL 0 1 0 1 +BUILD 1 1 1 2 +BUGS 0 1 0 1 +BUDDED 0 1 0 1 +BUCHANAN 0 1 1 0 +BROWN 9 1 10 9 +BROTHER 7 1 8 7 +BROCKTON'S 0 1 0 1 +BRITON 0 1 0 1 +BRITANNULISTS 0 1 1 0 +BRISK 0 1 1 0 +BRINGS 0 1 0 1 +BRINGING 4 1 5 4 +BRETHREN 2 1 2 3 +BREAKFAST 1 1 1 2 +BREAKFAS 0 1 1 0 +BREAD 4 1 5 4 +BRANDS 1 1 2 1 +BRAND 0 1 0 1 +BRANCH 3 1 4 3 +BRAGOLON 0 1 0 1 +BRAGELONE 0 1 0 1 +BRACTON'S 0 1 1 0 +BRACE 0 1 0 1 +BOXWOMEN 0 1 0 1 +BOWLS 0 1 0 1 +BOWER 1 1 1 2 +BOUGHT 0 1 0 1 +BOTANY 0 1 1 0 +BOTANICAL 1 1 2 1 +BORN 7 1 8 7 +BOREHOUND 0 1 0 1 +BORDERS 3 1 4 3 +BORDER 3 1 3 4 +BOOT 0 1 0 1 +BOOKS 8 1 8 9 +BOOKKEEPER 0 1 1 0 +BOLLS 0 1 1 0 +BOAT 2 1 2 3 +BOAR 0 1 1 0 +BLUESTS 0 1 0 1 +BLUESKINS 1 1 2 1 +BLUESKIN 0 1 0 1 +BLUE 20 1 21 20 +BLINT 0 1 0 1 +BLESSINGS 2 1 3 2 +BLESSING 2 1 2 3 +BLASTS 0 1 1 0 +BLAST 0 1 0 1 +BITKEEPER 0 1 0 1 +BILLYGOAT 0 1 1 0 +BILLED 0 1 1 0 +BILL 6 1 6 7 +BID 1 1 1 2 +BERKES 0 1 0 1 +BERGIN 0 1 0 1 +BENCH 3 1 4 3 +BEIN 0 1 1 0 +BEGIRT 0 1 0 1 +BEGGED 1 1 1 2 +BEG 1 1 2 1 +BEFORE 74 1 75 74 +BEFIT 0 1 1 0 +BEFALL 0 1 0 1 +BEFAL 0 1 1 0 +BEES 0 1 0 1 +BEEDER 0 1 1 0 +BEECHED 0 1 0 1 +BEEBE 0 1 1 0 +BEDIMMED 0 1 1 0 +BEDEMNED 0 1 0 1 +BED 12 1 12 13 +BEATER 0 1 1 0 +BEACHED 0 1 1 0 +BAYLIM'S 0 1 0 1 +BATTANICAL 0 1 0 1 +BASKET 1 1 2 1 +BASCULADES 0 1 0 1 +BARTANY 0 1 0 1 +BANDS 4 1 4 5 +BANDOMERE 0 1 0 1 +BALL 4 1 4 5 +BALINE 0 1 0 1 +BALEEN 0 1 1 0 +BALAAM'S 0 1 1 0 +BAINS 0 1 1 0 +BADR'D 0 1 0 1 +BADGES 0 1 1 0 +BADGERS 0 1 0 1 +BADAUDERIE 0 1 1 0 +BACK 45 1 45 46 +BABRUSA 0 1 0 1 +BABIRUSA 0 1 1 0 +AWHILE 0 1 1 0 +AWAY 50 1 50 51 +AUTHOR 1 1 1 2 +AUNT 4 1 5 4 +AU 0 1 1 0 +ATTORIAN 0 1 0 1 +ATTITUDE 3 1 4 3 +ATTITSON 0 1 0 1 +ATMOSPHERIC 0 1 1 0 +ATMOSPHERE 2 1 2 3 +ATHOLEMEN 0 1 1 0 +ATHLETE 1 1 2 1 +ATCHISON 0 1 1 0 +ASTRO 0 1 0 1 +ASTOR 1 1 2 1 +ASSEMBLED 2 1 3 2 +ASCENDENCY 0 1 1 0 +ARROW 2 1 2 3 +ARRONDISSEMENT 0 1 1 0 +ARRIVING 1 1 2 1 +ARRESTS 0 1 1 0 +ARMED 1 1 2 1 +ARM 5 1 5 6 +ARK 0 1 0 1 +ARGYLE 1 1 2 1 +ARDOUR 0 1 1 0 +ARDOR 0 1 0 1 +ARDLE 0 1 1 0 +ARCHER 0 1 0 1 +ARC 1 1 2 1 +APPROVES 0 1 1 0 +APPRENTICED 0 1 0 1 +APPRENTICE 2 1 3 2 +APPEALED 0 1 1 0 +APPEAL 1 1 1 2 +ANYWHERE 5 1 6 5 +ANYTHING 17 1 17 18 +ANYMORE 0 1 1 0 +ANTHROPIES 0 1 0 1 +ANTETING 0 1 0 1 +ANTEDATING 0 1 1 0 +ANSWERED 14 1 14 15 +ANSWERD 0 1 1 0 +ANNE 2 1 2 3 +ANNALS 2 1 3 2 +ANNAL 0 1 0 1 +ANGULATIONS 0 1 0 1 +ANGELS 2 1 3 2 +ANGEL 1 1 1 2 +ANDRES 0 1 0 1 +ANDRE 0 1 0 1 +ANDDELA 0 1 0 1 +ANAXAGORAS 0 1 1 0 +AMY 0 1 0 1 +AMPHITHEATRE 0 1 0 1 +AMPHITHEATER 0 1 1 0 +AMEN 0 1 0 1 +AMDELLA 0 1 0 1 +AMASS 0 1 1 0 +ALTERNATIVE 0 1 1 0 +ALLUVION 0 1 1 0 +ALLUVIAN 0 1 0 1 +ALEXANDRA 2 1 3 2 +ALEXANDER 13 1 13 14 +ALBANS 0 1 1 0 +ALBAN'S 0 1 0 1 +ALAP 0 1 0 1 +AIR 24 1 25 24 +AIGNAN 0 1 1 0 +AIDS 0 1 0 1 +AID 1 1 2 1 +AH 6 1 7 6 +AFTERDECK 0 1 1 0 +AFTER 58 1 58 59 +AFFRIGHTENED 0 1 1 0 +AFFILIATED 1 1 2 1 +AFFECTED 3 1 3 4 +AFFECT 0 1 1 0 +ADVENTURER 0 1 0 1 +ADVENTURE 0 1 1 0 +ADONNA 0 1 0 1 +ADONA 0 1 1 0 +ADOCTION 0 1 0 1 +ADETE 0 1 0 1 +ADELAX 0 1 0 1 +ADDITION 0 1 0 1 +ACTOR'S 0 1 0 1 +ACTOR 1 1 2 1 +ACTION 11 1 11 12 +ACKNOWLEDGMENT 0 1 0 1 +ACKNOWLEDGEMENT 0 1 1 0 +ACCORD 2 1 2 3 +ABRET 0 1 0 1 +ABOLITIONISTS 0 1 1 0 +ABOLITIONIST 0 1 0 1 +ABLE'S 0 1 0 1 +ABDUCTION 0 1 1 0 +ABBEY 0 1 0 1 +ABBE 0 1 1 0 +ZOOLOGY 1 0 1 1 +ZOOF 1 0 1 1 +ZION 1 0 1 1 +ZEST 1 0 1 1 +ZEAL 2 0 2 2 +YOUTH 5 0 5 5 +YOURSELVES 1 0 1 1 +YOURSELF 8 0 8 8 +YOURS 3 0 3 3 +YOUR 109 0 109 109 +YOUNGER 1 0 1 1 +YOU'VE 4 0 4 4 +YOU'D 3 0 3 3 +YORKSHIRE 2 0 2 2 +YORK 6 0 6 6 +YONDER 1 0 1 1 +YOKE 1 0 1 1 +YIELDING 3 0 3 3 +YIELDED 2 0 2 2 +YIELD 3 0 3 3 +YET 43 0 43 43 +YESTERDAY 3 0 3 3 +YES 33 0 33 33 +YELLOW 9 0 9 9 +YELL 1 0 1 1 +YEARS 34 0 34 34 +YEARLY 2 0 2 2 +YEAR 5 0 5 5 +YEA 1 0 1 1 +YARN 2 0 2 2 +YAMS 1 0 1 1 +YACHTSMAN 1 0 1 1 +YACHT 3 0 3 3 +WROUGHT 2 0 2 2 +WROTE 6 0 6 6 +WRONGS 1 0 1 1 +WRONG 10 0 10 10 +WRITTEN 7 0 7 7 +WRITS 1 0 1 1 +WRITINGS 2 0 2 2 +WRITING 6 0 6 6 +WRITHING 1 0 1 1 +WRITES 1 0 1 1 +WRITER 2 0 2 2 +WRIT 1 0 1 1 +WRIST 1 0 1 1 +WRETCHEDNESS 2 0 2 2 +WRESTLERS 1 0 1 1 +WRESTLED 1 0 1 1 +WOUNDED 1 0 1 1 +WOUND 1 0 1 1 +WOULDN'T 5 0 5 5 +WORTHY 6 0 6 6 +WORSTED 1 0 1 1 +WORSHIP 3 0 3 3 +WORRY 3 0 3 3 +WORN 1 0 1 1 +WORM 4 0 4 4 +WORLD'S 2 0 2 2 +WORLD 37 0 37 37 +WORKS 8 0 8 8 +WORKMEN 1 0 1 1 +WORKING 3 0 3 3 +WORKERS 1 0 1 1 +WORKED 5 0 5 5 +WORK 34 0 34 34 +WORDS 20 0 20 20 +WORD 20 0 20 20 +WOOL 3 0 3 3 +WOODS 3 0 3 3 +WOODLEY 3 0 3 3 +WOODEN 3 0 3 3 +WONDERS 1 0 1 1 +WONDERINGLY 1 0 1 1 +WONDERFUL 7 0 7 7 +WONDERED 2 0 2 2 +WONDER 7 0 7 7 +WON'T 15 0 15 15 +WOMAN 28 0 28 28 +WOLF 1 0 1 1 +WOKE 1 0 1 1 +WOE 2 0 2 2 +WIZARD'S 1 0 1 1 +WIZARD 3 0 3 3 +WIVES 3 0 3 3 +WITTY 1 0 1 1 +WITTILY 1 0 1 1 +WITS 1 0 1 1 +WITNESSING 1 0 1 1 +WITNESSES 1 0 1 1 +WITNESS 1 0 1 1 +WITHOUT 37 0 37 37 +WITHIN 23 0 23 23 +WITHERING 1 0 1 1 +WITHERED 1 0 1 1 +WITHDRAWN 2 0 2 2 +WITHDRAW 1 0 1 1 +WITHAL 1 0 1 1 +WITCH 3 0 3 3 +WIT 3 0 3 3 +WISHES 3 0 3 3 +WISHERS 1 0 1 1 +WISHED 6 0 6 6 +WISH 11 0 11 11 +WISE 5 0 5 5 +WISDOM 3 0 3 3 +WIRE 4 0 4 4 +WIPED 1 0 1 1 +WINNING 1 0 1 1 +WINKING 1 0 1 1 +WINK 1 0 1 1 +WINIFRED 1 0 1 1 +WINGS 5 0 5 5 +WING 3 0 3 3 +WINE 7 0 7 7 +WINDY 1 0 1 1 +WINDS 3 0 3 3 +WINDOWS 7 0 7 7 +WINDOW 16 0 16 16 +WINDING 1 0 1 1 +WIN 2 0 2 2 +WILY 1 0 1 1 +WILSON 1 0 1 1 +WILLS 1 0 1 1 +WILLOWY 1 0 1 1 +WILLINGLY 2 0 2 2 +WILLING 2 0 2 2 +WILLED 1 0 1 1 +WILDERNESS 1 0 1 1 +WILD 9 0 9 9 +WIDTH 1 0 1 1 +WIDEST 1 0 1 1 +WIDENING 1 0 1 1 +WIDELY 1 0 1 1 +WIDE 9 0 9 9 +WICKET 1 0 1 1 +WICKEDNESS 1 0 1 1 +WICKEDEST 1 0 1 1 +WICKED 3 0 3 3 +WHY 44 0 44 44 +WHOM 18 0 18 18 +WHOLESOME 1 0 1 1 +WHOLE 25 0 25 25 +WHOEVER 3 0 3 3 +WHITNEY 1 0 1 1 +WHISTLING 1 0 1 1 +WHISTLE 2 0 2 2 +WHISPERED 7 0 7 7 +WHISPER 1 0 1 1 +WHISKERS 1 0 1 1 +WHIRLWIND 3 0 3 3 +WHIM 1 0 1 1 +WHILST 3 0 3 3 +WHEREVER 3 0 3 3 +WHEREUPON 3 0 3 3 +WHEREON 1 0 1 1 +WHEREFORE 1 0 1 1 +WHEREBY 1 0 1 1 +WHENEVER 3 0 3 3 +WHELPS 1 0 1 1 +WHEELING 1 0 1 1 +WHEELER 1 0 1 1 +WHEELED 3 0 3 3 +WHEEL 1 0 1 1 +WHEAT 2 0 2 2 +WHARVES 1 0 1 1 +WETTING 1 0 1 1 +WET 9 0 9 9 +WESTERN 1 0 1 1 +WESLEY 2 0 2 2 +WEREN'T 2 0 2 2 +WENT 25 0 25 25 +WELFARE 2 0 2 2 +WEIGHT 2 0 2 2 +WEIGHED 3 0 3 3 +WEIGH 1 0 1 1 +WEEPING 4 0 4 4 +WEEP 1 0 1 1 +WEEKS 4 0 4 4 +WEEK 2 0 2 2 +WEEDS 3 0 3 3 +WEED 1 0 1 1 +WEBS 1 0 1 1 +WEB 1 0 1 1 +WEASEL 1 0 1 1 +WEARY 1 0 1 1 +WEARING 2 0 2 2 +WEARINESS 2 0 2 2 +WEARILY 2 0 2 2 +WEARERS 1 0 1 1 +WEAPON 2 0 2 2 +WEALTH 5 0 5 5 +WEAKNESS 3 0 3 3 +WEAKENED 2 0 2 2 +WEAK 6 0 6 6 +WAYS 1 0 1 1 +WAX 1 0 1 1 +WAVING 2 0 2 2 +WAVERING 2 0 2 2 +WAVED 1 0 1 1 +WATSON 5 0 5 5 +WATERY 2 0 2 2 +WATERS 6 0 6 6 +WATERCRESS 1 0 1 1 +WATCHING 1 0 1 1 +WATCHFULNESS 1 0 1 1 +WATCHFUL 1 0 1 1 +WATCHED 7 0 7 7 +WATCH 2 0 2 2 +WASTEFUL 4 0 4 4 +WASTED 2 0 2 2 +WASHINGTON 1 0 1 1 +WASHING 1 0 1 1 +WASH 1 0 1 1 +WARY 1 0 1 1 +WARRIORS 2 0 2 2 +WARRENTON'S 2 0 2 2 +WARRENTON 4 0 4 4 +WARRANTED 2 0 2 2 +WARRANT 1 0 1 1 +WARN 1 0 1 1 +WARMEST 1 0 1 1 +WARMED 1 0 1 1 +WARM 4 0 4 4 +WARDS 1 0 1 1 +WARD 1 0 1 1 +WANTS 3 0 3 3 +WANTING 3 0 3 3 +WANTED 8 0 8 8 +WANT 19 0 19 19 +WANDERED 2 0 2 2 +WANDER 2 0 2 2 +WAN 1 0 1 1 +WALNUT 1 0 1 1 +WALLS 2 0 2 2 +WALL 6 0 6 6 +WALKS 1 0 1 1 +WALKING 2 0 2 2 +WALKETH 1 0 1 1 +WALKED 6 0 6 6 +WALK 5 0 5 5 +WAITING 7 0 7 7 +WAITERS 1 0 1 1 +WAITER 1 0 1 1 +WAITED 1 0 1 1 +WAIT 8 0 8 8 +WAISTCOAT 1 0 1 1 +WAIST 1 0 1 1 +WAILING 1 0 1 1 +WAGED 1 0 1 1 +WADDLING 1 0 1 1 +W 3 0 3 3 +VULGAR 1 0 1 1 +VOYAGING 2 0 2 2 +VOYAGES 1 0 1 1 +VOYAGE 2 0 2 2 +VOWS 1 0 1 1 +VOTES 1 0 1 1 +VOTERS 1 0 1 1 +VOLUME 1 0 1 1 +VOLTAIRE'S 1 0 1 1 +VOLTAIRE 1 0 1 1 +VOICES 2 0 2 2 +VIVIDLY 2 0 2 2 +VIVID 2 0 2 2 +VIVE 1 0 1 1 +VIVACITY 1 0 1 1 +VITALITY 1 0 1 1 +VITAL 1 0 1 1 +VISTA 1 0 1 1 +VISITED 4 0 4 4 +VISIT 4 0 4 4 +VISION 2 0 2 2 +VISIBLE 2 0 2 2 +VIRTUOUS 1 0 1 1 +VIRTUE 3 0 3 3 +VIRTUALLY 2 0 2 2 +VIRGINS 1 0 1 1 +VIRGIN 2 0 2 2 +VIPER 2 0 2 2 +VIOLET 1 0 1 1 +VIOLENT 5 0 5 5 +VIOLENCE 5 0 5 5 +VIOLATED 1 0 1 1 +VINEGAR 1 0 1 1 +VINDICATION 1 0 1 1 +VINDICATE 1 0 1 1 +VILLAGE 4 0 4 4 +VIKING 3 0 3 3 +VIGOROUSLY 1 0 1 1 +VIGOROUS 1 0 1 1 +VIGILANCE 1 0 1 1 +VIEWS 1 0 1 1 +VIEWED 1 0 1 1 +VIE 1 0 1 1 +VICTUALS 1 0 1 1 +VICTORY 1 0 1 1 +VICTORIES 1 0 1 1 +VICTIM 2 0 2 2 +VICOMTE 1 0 1 1 +VICISSITUDES 1 0 1 1 +VICIOUS 1 0 1 1 +VICINITY 1 0 1 1 +VEXED 1 0 1 1 +VEXATION 1 0 1 1 +VESTURE 1 0 1 1 +VESTIBULE 1 0 1 1 +VESSEL 2 0 2 2 +VERTEBRAL 1 0 1 1 +VERSES 1 0 1 1 +VERSED 3 0 3 3 +VERIFY 1 0 1 1 +VERGE 1 0 1 1 +VENTURED 1 0 1 1 +VENTURE 1 0 1 1 +VENICE 1 0 1 1 +VENGEANCE 2 0 2 2 +VENERABLE 1 0 1 1 +VELVET 1 0 1 1 +VELOCITY 2 0 2 2 +VEILED 2 0 2 2 +VEHICLE 1 0 1 1 +VEHEMENTLY 1 0 1 1 +VAULT 1 0 1 1 +VASTLY 1 0 1 1 +VAST 5 0 5 5 +VASSALS 1 0 1 1 +VARYING 2 0 2 2 +VARIOUS 7 0 7 7 +VARIETY 2 0 2 2 +VARIETIES 1 0 1 1 +VARIED 1 0 1 1 +VARIATIONS 1 0 1 1 +VARIANCE 1 0 1 1 +VARIABILITY 2 0 2 2 +VANQUISHED 2 0 2 2 +VANITY 1 0 1 1 +VANISHED 2 0 2 2 +VANISH 2 0 2 2 +VANDERPOOLS 1 0 1 1 +VALUE 3 0 3 3 +VALUABLE 2 0 2 2 +VALLEYS 2 0 2 2 +VALLEYED 1 0 1 1 +VALLEY 4 0 4 4 +VALIANTLY 1 0 1 1 +VALHALLA 1 0 1 1 +VALE 1 0 1 1 +VAINLY 1 0 1 1 +VAIN 1 0 1 1 +VAGUELY 1 0 1 1 +VAGUE 3 0 3 3 +VACUUM 1 0 1 1 +VACANT 1 0 1 1 +UTTERLY 4 0 4 4 +UTTERED 1 0 1 1 +UTTERANCE 1 0 1 1 +UTTER 4 0 4 4 +UTMOST 3 0 3 3 +UTILITY 3 0 3 3 +USUALLY 4 0 4 4 +USUAL 5 0 5 5 +USING 3 0 3 3 +USELESS 4 0 4 4 +USEFUL 5 0 5 5 +USED 17 0 17 17 +USE 31 0 31 31 +US 60 0 60 60 +URGING 1 0 1 1 +URGED 3 0 3 3 +UPWARDS 1 0 1 1 +UPTOWN 1 0 1 1 +UPRIGHT 1 0 1 1 +UPRAISED 1 0 1 1 +UPPERMOST 1 0 1 1 +UPPER 2 0 2 2 +UPLIFTED 1 0 1 1 +UPHOLSTERED 1 0 1 1 +UPHEAVAL 1 0 1 1 +UPBRAIDED 1 0 1 1 +UNWORTHY 1 0 1 1 +UNWILLING 2 0 2 2 +UNVARNISHED 1 0 1 1 +UNUSUAL 4 0 4 4 +UNTUTORED 1 0 1 1 +UNTRIED 1 0 1 1 +UNTREATED 1 0 1 1 +UNTOUCHED 1 0 1 1 +UNTIL 16 0 16 16 +UNTIDINESS 1 0 1 1 +UNTASTED 1 0 1 1 +UNSUCCESSFUL 1 0 1 1 +UNSEPARATED 1 0 1 1 +UNSEEN 1 0 1 1 +UNSAID 1 0 1 1 +UNREAL 1 0 1 1 +UNPRECEDENTED 1 0 1 1 +UNPOPULAR 1 0 1 1 +UNPLEASANT 3 0 3 3 +UNPERCEIVED 1 0 1 1 +UNPARALLELED 1 0 1 1 +UNOBSERVED 1 0 1 1 +UNNECESSARY 1 0 1 1 +UNNATURAL 1 0 1 1 +UNMOVED 1 0 1 1 +UNLUCKY 2 0 2 2 +UNLUCKILY 1 0 1 1 +UNLOCKED 1 0 1 1 +UNLOCK 1 0 1 1 +UNLOADED 1 0 1 1 +UNLIKELY 1 0 1 1 +UNLESS 5 0 5 5 +UNKNOWN 1 0 1 1 +UNJUST 2 0 2 2 +UNIVERSITY 1 0 1 1 +UNIVERSE 1 0 1 1 +UNIVERSAL 3 0 3 3 +UNITED 8 0 8 8 +UNITE 1 0 1 1 +UNION 3 0 3 3 +UNINVITED 1 0 1 1 +UNINTELLIGIBLE 1 0 1 1 +UNIFORMS 2 0 2 2 +UNIFORM 1 0 1 1 +UNICORN 1 0 1 1 +UNHEEDED 1 0 1 1 +UNHAPPY 4 0 4 4 +UNHAPPINESS 1 0 1 1 +UNGRACIOUSLY 1 0 1 1 +UNGRACIOUS 1 0 1 1 +UNFORTUNATELY 2 0 2 2 +UNFORTUNATE 1 0 1 1 +UNFOLD 1 0 1 1 +UNFINISHED 2 0 2 2 +UNFEELING 1 0 1 1 +UNFAMILIAR 1 0 1 1 +UNFAIRLY 1 0 1 1 +UNFAIR 2 0 2 2 +UNFAILING 1 0 1 1 +UNEXPECTEDLY 2 0 2 2 +UNEXPECTED 3 0 3 3 +UNEXCEPTIONABLY 1 0 1 1 +UNEASY 4 0 4 4 +UNEASINESS 1 0 1 1 +UNEASILY 1 0 1 1 +UNEARTHLY 1 0 1 1 +UNDUE 1 0 1 1 +UNDOUBTEDLY 1 0 1 1 +UNDOING 1 0 1 1 +UNDERWATER 1 0 1 1 +UNDERTONE 1 0 1 1 +UNDERTAKING 2 0 2 2 +UNDERSTOOD 6 0 6 6 +UNDERSTANDING 4 0 4 4 +UNDERSTAND 9 0 9 9 +UNDERSCORE 1 0 1 1 +UNDERNEATH 1 0 1 1 +UNDERMINE 1 0 1 1 +UNDERHANDED 1 0 1 1 +UNDER 40 0 40 40 +UNDECEIVED 1 0 1 1 +UNCOUTH 1 0 1 1 +UNCOURTEOUS 1 0 1 1 +UNCONTROLLABLE 1 0 1 1 +UNCONSTITUTIONALITY 1 0 1 1 +UNCOMPROMISING 1 0 1 1 +UNCOMFORTABLE 1 0 1 1 +UNCLE 6 0 6 6 +UNCIVIL 1 0 1 1 +UNCHARITABLENESS 1 0 1 1 +UNCHANGED 1 0 1 1 +UNCERTAIN 2 0 2 2 +UNCASING 1 0 1 1 +UNCAS 10 0 10 10 +UNBUTTONING 1 0 1 1 +UNBROKEN 1 0 1 1 +UNBEARABLE 2 0 2 2 +UNAVOIDABLE 1 0 1 1 +UNAVERRED 1 0 1 1 +UNANIMOUSLY 1 0 1 1 +UNANIMOUS 1 0 1 1 +UNAFFECTED 1 0 1 1 +UNACCOUNTABLE 1 0 1 1 +UNABLE 1 0 1 1 +UGLY 3 0 3 3 +TYRANNY 1 0 1 1 +TYPICAL 1 0 1 1 +TYPES 1 0 1 1 +TYPE 1 0 1 1 +TWIXT 1 0 1 1 +TWITE 1 0 1 1 +TWIRLING 1 0 1 1 +TWIN 1 0 1 1 +TWILIGHT 2 0 2 2 +TWICE 3 0 3 3 +TWENTY 15 0 15 15 +TWELVE 2 0 2 2 +TWELFTH 1 0 1 1 +TURNS 1 0 1 1 +TURNIPS 1 0 1 1 +TURNING 6 0 6 6 +TURNER'S 1 0 1 1 +TURNER 4 0 4 4 +TURF 1 0 1 1 +TUNE 2 0 2 2 +TUMULTUOUS 1 0 1 1 +TUMULT 3 0 3 3 +TUMBLER 1 0 1 1 +TUMBLED 3 0 3 3 +TUFT 1 0 1 1 +TUESDAY 1 0 1 1 +TUCKED 1 0 1 1 +TUBE 1 0 1 1 +TRYING 5 0 5 5 +TRY 4 0 4 4 +TRUTH 13 0 13 13 +TRUSTY 1 0 1 1 +TRUSTS 1 0 1 1 +TRUST 5 0 5 5 +TRUNKS 2 0 2 2 +TRUNK 2 0 2 2 +TRUMPETS 1 0 1 1 +TRULY 9 0 9 9 +TRUFFLES 1 0 1 1 +TRUE 21 0 21 21 +TRUDGED 1 0 1 1 +TROUT'S 1 0 1 1 +TROUBLESOME 1 0 1 1 +TROUBLES 2 0 2 2 +TROUBLED 4 0 4 4 +TROTTING 1 0 1 1 +TROTTED 1 0 1 1 +TROTH 1 0 1 1 +TROPHIES 1 0 1 1 +TROOPS 3 0 3 3 +TROOPER'S 1 0 1 1 +TRIUMPHANTLY 1 0 1 1 +TRIUMPHANT 1 0 1 1 +TRIUMPH 3 0 3 3 +TRIPPED 1 0 1 1 +TRINKET 1 0 1 1 +TRIMNESS 1 0 1 1 +TRIM 1 0 1 1 +TRIGGER 1 0 1 1 +TRIED 9 0 9 9 +TRICKS 2 0 2 2 +TRIBUTE 1 0 1 1 +TRIBES 1 0 1 1 +TRIAL 2 0 2 2 +TREND 2 0 2 2 +TREMULOUSLY 1 0 1 1 +TREMULOUS 1 0 1 1 +TREMOR 1 0 1 1 +TREMENDOUSLY 1 0 1 1 +TREMBLING 5 0 5 5 +TREMBLED 1 0 1 1 +TREMBLE 2 0 2 2 +TREE 35 0 35 35 +TREATY 1 0 1 1 +TREATS 1 0 1 1 +TREATING 1 0 1 1 +TREATED 2 0 2 2 +TREAT 1 0 1 1 +TREASURE 2 0 2 2 +TREAD 1 0 1 1 +TRAY 1 0 1 1 +TRAVERSED 1 0 1 1 +TRAVEL 1 0 1 1 +TRASH 1 0 1 1 +TRAP 2 0 2 2 +TRANSPARENT 2 0 2 2 +TRANSLATION 1 0 1 1 +TRANSLATE 1 0 1 1 +TRANSIENT 2 0 2 2 +TRANSFERRED 1 0 1 1 +TRANSCRIPT 1 0 1 1 +TRANQUILLITY 1 0 1 1 +TRANQUIL 1 0 1 1 +TRAINS 1 0 1 1 +TRAINING 3 0 3 3 +TRAINED 1 0 1 1 +TRAIN 1 0 1 1 +TRAIL 2 0 2 2 +TRAFFIC 1 0 1 1 +TRADITIONS 3 0 3 3 +TRADITION 1 0 1 1 +TRADES 1 0 1 1 +TRACK 1 0 1 1 +TRACES 2 0 2 2 +TRACE 1 0 1 1 +TOYS 1 0 1 1 +TOWNS 3 0 3 3 +TOWERS 1 0 1 1 +TOWER 1 0 1 1 +TOUR 1 0 1 1 +TOUCHING 1 0 1 1 +TOUCHES 4 0 4 4 +TOUCH 8 0 8 8 +TOTAL 1 0 1 1 +TOSSING 1 0 1 1 +TOSSED 1 0 1 1 +TORTURED 2 0 2 2 +TORTURE 1 0 1 1 +TORTOISE 1 0 1 1 +TORRENT 3 0 3 3 +TORN 1 0 1 1 +TORCH 1 0 1 1 +TOPSAILS 1 0 1 1 +TOPMOST 1 0 1 1 +TOPMASTS 1 0 1 1 +TOPEKA 1 0 1 1 +TOOTHED 1 0 1 1 +TOOTH 1 0 1 1 +TOOK 33 0 33 33 +TONGUES 1 0 1 1 +TONGUE 8 0 8 8 +TONES 3 0 3 3 +TONED 1 0 1 1 +TOMMY 1 0 1 1 +TOMB 1 0 1 1 +TOM 4 0 4 4 +TOLERATION 1 0 1 1 +TOLEDANS 1 0 1 1 +TOKEN 2 0 2 2 +TOE 1 0 1 1 +TITLE 3 0 3 3 +TITIAN 1 0 1 1 +TIS 8 0 8 8 +TIRELESS 1 0 1 1 +TIRED 6 0 6 6 +TIRE 1 0 1 1 +TIPTOE 2 0 2 2 +TIPPED 1 0 1 1 +TIP 3 0 3 3 +TINY 3 0 3 3 +TINTS 1 0 1 1 +TINT 1 0 1 1 +TINSEL 1 0 1 1 +TINKLED 1 0 1 1 +TINGLING 1 0 1 1 +TINGE 1 0 1 1 +TIN 1 0 1 1 +TIME'S 2 0 2 2 +TIMASCHEFF'S 1 0 1 1 +TILL 8 0 8 8 +TILES 1 0 1 1 +TIGHTLY 1 0 1 1 +TIGHTEN 1 0 1 1 +TIGHT 1 0 1 1 +TIED 2 0 2 2 +TIE 1 0 1 1 +TIDING 1 0 1 1 +TICKET 1 0 1 1 +THY 17 0 17 17 +THWART 1 0 1 1 +THURSTON 2 0 2 2 +THURSDAY 1 0 1 1 +THRUSTING 1 0 1 1 +THRUST 5 0 5 5 +THROW 2 0 2 2 +THROUGHOUT 5 0 5 5 +THROATS 1 0 1 1 +THROAT 1 0 1 1 +THRILLING 1 0 1 1 +THRILLED 1 0 1 1 +THRILL 1 0 1 1 +THRICE 1 0 1 1 +THREW 5 0 5 5 +THREE 41 0 41 41 +THREATS 1 0 1 1 +THREATENS 2 0 2 2 +THREATENING 3 0 3 3 +THREATENED 1 0 1 1 +THRALLS 2 0 2 2 +THRALL'S 1 0 1 1 +THRALL 2 0 2 2 +THOUSANDS 2 0 2 2 +THOUGHTS 13 0 13 13 +THOUGHTLESS 1 0 1 1 +THOUGHTFUL 1 0 1 1 +THOSE 37 0 37 37 +THOROUGH 1 0 1 1 +THOMAS 1 0 1 1 +THIRTY 12 0 12 12 +THIRTIETH 1 0 1 1 +THIRTEENTH 1 0 1 1 +THIRD 7 0 7 7 +THINKS 3 0 3 3 +THINGS 34 0 34 34 +THIN 2 0 2 2 +THICKEST 1 0 1 1 +THICK 5 0 5 5 +THEY'VE 1 0 1 1 +THERMOMETER 1 0 1 1 +THEREOF 1 0 1 1 +THEREIN 3 0 3 3 +THEREAFTER 1 0 1 1 +THEREABOUTS 1 0 1 1 +THEORY 5 0 5 5 +THEORIES 1 0 1 1 +THEORETICAL 1 0 1 1 +THEOLOGY 1 0 1 1 +THENCE 1 0 1 1 +THEMSELVES 12 0 12 12 +THEME 1 0 1 1 +THEIRS 2 0 2 2 +THEFT 4 0 4 4 +THEATRICAL 1 0 1 1 +THEATRES 1 0 1 1 +THANKS 3 0 3 3 +THANKFUL 1 0 1 1 +THANKED 1 0 1 1 +TEXTURES 1 0 1 1 +TEXT 2 0 2 2 +TESTIMONY 1 0 1 1 +TESTIMONIES 1 0 1 1 +TESTED 1 0 1 1 +TEST 2 0 2 2 +TERROR 2 0 2 2 +TERRITORY 2 0 2 2 +TERRITORIAL 4 0 4 4 +TERRIFIED 2 0 2 2 +TERRIFIC 1 0 1 1 +TERRIBLY 2 0 2 2 +TERRIBLE 8 0 8 8 +TERRACED 1 0 1 1 +TERMS 9 0 9 9 +TERM 6 0 6 6 +TENTS 2 0 2 2 +TENFOLD 2 0 2 2 +TENDERLY 1 0 1 1 +TENDER 3 0 3 3 +TENDED 1 0 1 1 +TEND 2 0 2 2 +TENANTED 1 0 1 1 +TENABILITY 1 0 1 1 +TEN 14 0 14 14 +TEMPTATION 2 0 2 2 +TEMPORARY 2 0 2 2 +TEMPORAL 2 0 2 2 +TEMPLES 1 0 1 1 +TEMPLE 2 0 2 2 +TEMPEST 2 0 2 2 +TEMPERATURE 1 0 1 1 +TEMPER 3 0 3 3 +TELLS 1 0 1 1 +TELLING 3 0 3 3 +TELL 34 0 34 34 +TELESCOPE 2 0 2 2 +TEETH 1 0 1 1 +TEDIOUS 2 0 2 2 +TECHNOLOGY 1 0 1 1 +TECHNICAL 2 0 2 2 +TEAPOT 1 0 1 1 +TEAL 1 0 1 1 +TEACHING 2 0 2 2 +TEACHERY 1 0 1 1 +TEACHER 3 0 3 3 +TEACH 6 0 6 6 +TAYLOR 7 0 7 7 +TAXED 1 0 1 1 +TAWNY 1 0 1 1 +TAUGHT 5 0 5 5 +TASTE 5 0 5 5 +TASKS 1 0 1 1 +TASK 9 0 9 9 +TARTS 3 0 3 3 +TARRY 1 0 1 1 +TAPESTRY 1 0 1 1 +TAPESTRIES 1 0 1 1 +TAPE 1 0 1 1 +TANKARD 1 0 1 1 +TANGLE 1 0 1 1 +TAN 1 0 1 1 +TAMPERING 1 0 1 1 +TAMPERED 1 0 1 1 +TAMES 1 0 1 1 +TAME 1 0 1 1 +TALONS 1 0 1 1 +TALLOW 1 0 1 1 +TALL 9 0 9 9 +TALKS 4 0 4 4 +TALKING 10 0 10 10 +TALKED 1 0 1 1 +TALES 2 0 2 2 +TALENTED 1 0 1 1 +TALENT 5 0 5 5 +TALE 4 0 4 4 +TAKE 34 0 34 34 +TAILORS 1 0 1 1 +TAIL 3 0 3 3 +TAG 1 0 1 1 +TACT 1 0 1 1 +TACK 1 0 1 1 +TABLES 3 0 3 3 +TABBY'S 1 0 1 1 +TABBY 2 0 2 2 +SYSTEM 8 0 8 8 +SYNONYM 1 0 1 1 +SYMPATHY 3 0 3 3 +SYMPATHETIC 2 0 2 2 +SYLLABLE 1 0 1 1 +SWORDS 1 0 1 1 +SWORD 5 0 5 5 +SWOOPED 1 0 1 1 +SWOLLEN 1 0 1 1 +SWIRLING 1 0 1 1 +SWIRL 1 0 1 1 +SWING 1 0 1 1 +SWIMMING 2 0 2 2 +SWIFTNESS 1 0 1 1 +SWIFTLY 4 0 4 4 +SWIFT 1 0 1 1 +SWELLING 1 0 1 1 +SWELL 1 0 1 1 +SWEETS 1 0 1 1 +SWEETNESS 2 0 2 2 +SWEET 6 0 6 6 +SWEEPING 1 0 1 1 +SWARMING 1 0 1 1 +SWAMP 3 0 3 3 +SWAM 1 0 1 1 +SUSPICIOUS 1 0 1 1 +SUSPENDED 1 0 1 1 +SUSPECT 2 0 2 2 +SURVEYOR 1 0 1 1 +SURVEYED 1 0 1 1 +SURROUNDINGS 1 0 1 1 +SURROUNDING 2 0 2 2 +SURROUNDED 2 0 2 2 +SURRENDER 2 0 2 2 +SURPRISED 6 0 6 6 +SURPRISE 4 0 4 4 +SURPASSED 1 0 1 1 +SURMISED 1 0 1 1 +SURGEON 1 0 1 1 +SURGE 1 0 1 1 +SURFACE 8 0 8 8 +SURELY 5 0 5 5 +SURE 16 0 16 16 +SURCHARGED 1 0 1 1 +SUPREME 2 0 2 2 +SUPPRESSING 1 0 1 1 +SUPPRESSED 2 0 2 2 +SUPPOSITION 1 0 1 1 +SUPPOSING 2 0 2 2 +SUPPOSES 1 0 1 1 +SUPPOSED 3 0 3 3 +SUPPOSE 19 0 19 19 +SUPPORTS 1 0 1 1 +SUPPORTING 2 0 2 2 +SUPPORTED 2 0 2 2 +SUPPORT 2 0 2 2 +SUPPLYING 1 0 1 1 +SUPPLY 1 0 1 1 +SUPPLIES 1 0 1 1 +SUPPER 7 0 7 7 +SUPERIORITY 1 0 1 1 +SUPERIOR 8 0 8 8 +SUPERINTENDENCE 1 0 1 1 +SUPERFLUOUS 1 0 1 1 +SUPERFLUITIES 1 0 1 1 +SUNSHINE 3 0 3 3 +SUNSETS 1 0 1 1 +SUNSET 1 0 1 1 +SUNRISE 1 0 1 1 +SUNNY 1 0 1 1 +SUNLIGHT 2 0 2 2 +SUNK 1 0 1 1 +SUNG 2 0 2 2 +SUNDAY 2 0 2 2 +SUNBEAMS 1 0 1 1 +SUN 15 0 15 15 +SUMMONS 2 0 2 2 +SUMMIT 1 0 1 1 +SUMMARY 1 0 1 1 +SULLIED 1 0 1 1 +SULLEN 1 0 1 1 +SUITS 1 0 1 1 +SUITED 1 0 1 1 +SUITABLE 2 0 2 2 +SUGGESTIONS 1 0 1 1 +SUGGESTION 1 0 1 1 +SUGGESTED 3 0 3 3 +SUGGEST 1 0 1 1 +SUGAR 1 0 1 1 +SUFFOCATING 1 0 1 1 +SUFFICIENTLY 1 0 1 1 +SUFFICIENT 3 0 3 3 +SUFFICED 1 0 1 1 +SUFFICE 1 0 1 1 +SUFFERINGS 2 0 2 2 +SUFFERING 2 0 2 2 +SUFFERED 3 0 3 3 +SUFFER 5 0 5 5 +SUDDENLY 15 0 15 15 +SUDDEN 7 0 7 7 +SUCKLING 1 0 1 1 +SUCH 67 0 67 67 +SUCCESSION 3 0 3 3 +SUCCESSFUL 3 0 3 3 +SUCCESS 9 0 9 9 +SUCCEEDED 3 0 3 3 +SUCCEED 1 0 1 1 +SUBURB 1 0 1 1 +SUBTLETIES 1 0 1 1 +SUBSTITUTION 1 0 1 1 +SUBSTITUTED 1 0 1 1 +SUBSTANTIALLY 1 0 1 1 +SUBSTANTIAL 3 0 3 3 +SUBSTANCE 3 0 3 3 +SUBSISTENCE 1 0 1 1 +SUBSIDED 1 0 1 1 +SUBSCRIBE 1 0 1 1 +SUBORDINATION 1 0 1 1 +SUBMITTED 1 0 1 1 +SUBMIT 2 0 2 2 +SUBMISSIVELY 1 0 1 1 +SUBMARINE 3 0 3 3 +SUBJECTS 3 0 3 3 +SUBJECTIVELY 1 0 1 1 +SUBJECTED 1 0 1 1 +SUBJECT 16 0 16 16 +SUBDUING 1 0 1 1 +SUBDUED 2 0 2 2 +STYLE 5 0 5 5 +STUTELEY 4 0 4 4 +STURDY 1 0 1 1 +STUPID 4 0 4 4 +STUPEFIED 2 0 2 2 +STUNG 1 0 1 1 +STUMPED 1 0 1 1 +STUMP 1 0 1 1 +STUFFED 3 0 3 3 +STUFF 1 0 1 1 +STUDYING 2 0 2 2 +STUDIOUS 2 0 2 2 +STUDIES 1 0 1 1 +STUDENTS 3 0 3 3 +STUDENT 2 0 2 2 +STUCCO 1 0 1 1 +STRUGGLES 1 0 1 1 +STRUGGLED 1 0 1 1 +STRUGGLE 6 0 6 6 +STRUCTURE 2 0 2 2 +STRUCK 4 0 4 4 +STROVE 2 0 2 2 +STRONGLY 2 0 2 2 +STRONGHOLD 1 0 1 1 +STRONGEST 2 0 2 2 +STRONGER 1 0 1 1 +STRONG 13 0 13 13 +STROLLERS 1 0 1 1 +STROLLER'S 1 0 1 1 +STROLLER 3 0 3 3 +STROLL 3 0 3 3 +STROKE 1 0 1 1 +STRIVING 1 0 1 1 +STRIVE 3 0 3 3 +STRIPPING 1 0 1 1 +STRIPPED 1 0 1 1 +STRIPLING 1 0 1 1 +STRIKING 1 0 1 1 +STRIKE 3 0 3 3 +STRIFE 1 0 1 1 +STRICTLY 1 0 1 1 +STRICTEST 1 0 1 1 +STRICT 2 0 2 2 +STRETCHING 1 0 1 1 +STRETCHED 1 0 1 1 +STRETCH 1 0 1 1 +STRENUOUS 1 0 1 1 +STRENGTHENING 1 0 1 1 +STRENGTHENED 2 0 2 2 +STRENGTH 7 0 7 7 +STREETS 1 0 1 1 +STREET 14 0 14 14 +STREAKED 1 0 1 1 +STRAWBERRIES 1 0 1 1 +STRAW 1 0 1 1 +STRANGERS 2 0 2 2 +STRANGER 1 0 1 1 +STRANGELY 2 0 2 2 +STRANGE 12 0 12 12 +STRAITS 1 0 1 1 +STRAINED 1 0 1 1 +STRAIN 1 0 1 1 +STRAIGHTWAY 2 0 2 2 +STRAIGHT 3 0 3 3 +STORY 25 0 25 25 +STORMY 1 0 1 1 +STORMS 1 0 1 1 +STORES 1 0 1 1 +STORAGE 1 0 1 1 +STOPPING 2 0 2 2 +STOPPED 6 0 6 6 +STOP 8 0 8 8 +STOOPED 1 0 1 1 +STOOP 1 0 1 1 +STOOL 2 0 2 2 +STOOD 22 0 22 22 +STONE 3 0 3 3 +STOLEN 2 0 2 2 +STOICAL 1 0 1 1 +STOCKINGS 1 0 1 1 +STOCKBROKER 1 0 1 1 +STOCK 2 0 2 2 +STIRS 1 0 1 1 +STIRRED 1 0 1 1 +STIR 1 0 1 1 +STINGY 2 0 2 2 +STING 1 0 1 1 +STIMULANTS 1 0 1 1 +STILLNESS 1 0 1 1 +STILL 55 0 55 55 +STIFLING 1 0 1 1 +STIFFNESS 1 0 1 1 +STIFF 1 0 1 1 +STICKS 1 0 1 1 +STICKING 1 0 1 1 +STICK 1 0 1 1 +STEW 1 0 1 1 +STERNEST 1 0 1 1 +STERN 2 0 2 2 +STEPS 1 0 1 1 +STEPPED 1 0 1 1 +STEPHEN'S 1 0 1 1 +STEPHEN 2 0 2 2 +STEPHANOS 2 0 2 2 +STEM 1 0 1 1 +STEEP 1 0 1 1 +STEEL 1 0 1 1 +STEAMING 1 0 1 1 +STEAMED 1 0 1 1 +STEAMBOAT 1 0 1 1 +STEAM 1 0 1 1 +STEAL 1 0 1 1 +STEADILY 2 0 2 2 +STEAD 1 0 1 1 +STAYS 1 0 1 1 +STAYED 2 0 2 2 +STATUS 1 0 1 1 +STATUARY 1 0 1 1 +STATIONS 2 0 2 2 +STATION 6 0 6 6 +STATESMAN 1 0 1 1 +STATEMENT 3 0 3 3 +STATELY 1 0 1 1 +STATE'S 2 0 2 2 +STARTLING 1 0 1 1 +STARTLED 2 0 2 2 +STARTING 2 0 2 2 +STARTED 9 0 9 9 +STARS 1 0 1 1 +STARLIT 1 0 1 1 +STARING 1 0 1 1 +STARED 1 0 1 1 +STAR 2 0 2 2 +STANLEY 1 0 1 1 +STANDING 8 0 8 8 +STANDARD 4 0 4 4 +STAMPING 1 0 1 1 +STAMPED 1 0 1 1 +STALKS 1 0 1 1 +STAKES 1 0 1 1 +STAKE 1 0 1 1 +STAIRCASE 1 0 1 1 +STAIN 1 0 1 1 +STAGES 2 0 2 2 +STAGECRAFT 1 0 1 1 +STAFF 1 0 1 1 +STACKED 1 0 1 1 +STABLE 1 0 1 1 +SQUIRE'S 3 0 3 3 +SQUIRE 8 0 8 8 +SQUEEZE 1 0 1 1 +SQUARES 2 0 2 2 +SQUARE 2 0 2 2 +SQUALOR 1 0 1 1 +SQUALID 1 0 1 1 +SPUR 1 0 1 1 +SPRUNG 2 0 2 2 +SPRINKLING 1 0 1 1 +SPRINKLED 1 0 1 1 +SPRINGY 1 0 1 1 +SPRINGS 3 0 3 3 +SPRINGING 1 0 1 1 +SPREADS 1 0 1 1 +SPREAD 5 0 5 5 +SPRAGUE 1 0 1 1 +SPOTLESS 1 0 1 1 +SPOT 4 0 4 4 +SPORTING 1 0 1 1 +SPOON 1 0 1 1 +SPOKEN 11 0 11 11 +SPOKE 15 0 15 15 +SPOILS 2 0 2 2 +SPLENDORS 1 0 1 1 +SPLENDIDLY 2 0 2 2 +SPLASHES 1 0 1 1 +SPLASHED 2 0 2 2 +SPITE 2 0 2 2 +SPIRITUAL 4 0 4 4 +SPIRITS 3 0 3 3 +SPIRIT 11 0 11 11 +SPINNING 4 0 4 4 +SPIKES 1 0 1 1 +SPIDER 1 0 1 1 +SPICY 1 0 1 1 +SPERM 1 0 1 1 +SPENT 5 0 5 5 +SPENDING 1 0 1 1 +SPELLED 1 0 1 1 +SPELL 1 0 1 1 +SPEEDS 1 0 1 1 +SPEED 3 0 3 3 +SPEECHLESS 1 0 1 1 +SPEECH 6 0 6 6 +SPED 2 0 2 2 +SPECULATE 1 0 1 1 +SPECTATORS 1 0 1 1 +SPECKS 1 0 1 1 +SPECIOUS 1 0 1 1 +SPECIFICATIONS 1 0 1 1 +SPECIFIC 1 0 1 1 +SPECIES 3 0 3 3 +SPECIALTY 1 0 1 1 +SPECIALLY 2 0 2 2 +SPEAR 1 0 1 1 +SPEAKS 1 0 1 1 +SPEAKING 10 0 10 10 +SPEAK 15 0 15 15 +SPASM 1 0 1 1 +SPARKS 1 0 1 1 +SPARKLING 3 0 3 3 +SPARKLES 2 0 2 2 +SPARKLED 1 0 1 1 +SPARK 1 0 1 1 +SPARE 3 0 3 3 +SPACE 5 0 5 5 +SOUTHERNERS 2 0 2 2 +SOUTHBRIDGE 1 0 1 1 +SOUTH 7 0 7 7 +SOURCE 1 0 1 1 +SOUP 1 0 1 1 +SOUNDING 3 0 3 3 +SOUNDED 2 0 2 2 +SOUND 7 0 7 7 +SOULS 4 0 4 4 +SOUL'S 1 0 1 1 +SOUL 8 0 8 8 +SOUGHT 6 0 6 6 +SORTS 2 0 2 2 +SORT 8 0 8 8 +SORRY 5 0 5 5 +SORROWS 1 0 1 1 +SORROWFULLY 1 0 1 1 +SORROWFUL 1 0 1 1 +SORROW 5 0 5 5 +SORREL 1 0 1 1 +SORE 1 0 1 1 +SORCERESS 1 0 1 1 +SOPHISTRY 1 0 1 1 +SOOTHINGLY 1 0 1 1 +SOOTH 1 0 1 1 +SOON 28 0 28 28 +SONS 3 0 3 3 +SONOROUS 1 0 1 1 +SONG 2 0 2 2 +SOMEWHERE 6 0 6 6 +SOMEWHAT 5 0 5 5 +SOMETIMES 18 0 18 18 +SOMETHING 37 0 37 37 +SOMEHOW 6 0 6 6 +SOMEBODY 3 0 3 3 +SOMBRE 1 0 1 1 +SOLVED 2 0 2 2 +SOLUTION 1 0 1 1 +SOLID 2 0 2 2 +SOLEMNITY 1 0 1 1 +SOLELY 1 0 1 1 +SOLE 3 0 3 3 +SOLDIERS 6 0 6 6 +SOLD 4 0 4 4 +SOIL 2 0 2 2 +SOFTNESS 2 0 2 2 +SOFTLY 4 0 4 4 +SOFTENED 1 0 1 1 +SOFT 7 0 7 7 +SOFAS 1 0 1 1 +SOCRATES 2 0 2 2 +SOCKS 1 0 1 1 +SOCIETY 7 0 7 7 +SOCIETIES 1 0 1 1 +SOCIAL 8 0 8 8 +SOCIABLE 1 0 1 1 +SOBS 1 0 1 1 +SOARED 1 0 1 1 +SOAR 1 0 1 1 +SNUFFED 1 0 1 1 +SNUFF 4 0 4 4 +SNOW 1 0 1 1 +SNORED 1 0 1 1 +SNEER 1 0 1 1 +SNATCHED 1 0 1 1 +SNATCH 1 0 1 1 +SMUGGLING 1 0 1 1 +SMOOTHER 1 0 1 1 +SMOOTH 1 0 1 1 +SMOKING 1 0 1 1 +SMOKE 5 0 5 5 +SMITTEN 2 0 2 2 +SMITH 2 0 2 2 +SMILING 3 0 3 3 +SMILES 3 0 3 3 +SMILE 12 0 12 12 +SMELL 3 0 3 3 +SMARTLY 1 0 1 1 +SMART 1 0 1 1 +SMALLEST 2 0 2 2 +SMALLER 1 0 1 1 +SMALL 20 0 20 20 +SLY 2 0 2 2 +SLUNK 1 0 1 1 +SLUMS 1 0 1 1 +SLUMBERS 1 0 1 1 +SLOWLY 14 0 14 14 +SLOW 4 0 4 4 +SLOPING 1 0 1 1 +SLIPS 1 0 1 1 +SLIPPING 1 0 1 1 +SLIPPED 4 0 4 4 +SLINGS 1 0 1 1 +SLIMY 1 0 1 1 +SLIMLY 1 0 1 1 +SLIGHTLY 4 0 4 4 +SLIGHTEST 1 0 1 1 +SLIGHTER 1 0 1 1 +SLIGHT 4 0 4 4 +SLEPT 2 0 2 2 +SLENDER 2 0 2 2 +SLEEPING 2 0 2 2 +SLEEP 5 0 5 5 +SLEEK 2 0 2 2 +SLAVES 1 0 1 1 +SLAVERY 3 0 3 3 +SLAVE 1 0 1 1 +SLATED 1 0 1 1 +SLAP 1 0 1 1 +SLANG 12 0 12 12 +SLANDERER 1 0 1 1 +SLAM 1 0 1 1 +SLAKED 1 0 1 1 +SKY 5 0 5 5 +SKIRTS 1 0 1 1 +SKIRT 1 0 1 1 +SKIRMISHES 1 0 1 1 +SKIP 1 0 1 1 +SKINNER 1 0 1 1 +SKINNED 1 0 1 1 +SKIN 3 0 3 3 +SKIMS 1 0 1 1 +SKILL 2 0 2 2 +SKETCHES 1 0 1 1 +SKETCH 1 0 1 1 +SKEPTICISM 1 0 1 1 +SKELETON 1 0 1 1 +SIZZLE 1 0 1 1 +SIXTY 3 0 3 3 +SIXTH 1 0 1 1 +SIXTEENTH 4 0 4 4 +SIXTEEN 1 0 1 1 +SIX 14 0 14 14 +SITUATION 2 0 2 2 +SITE 1 0 1 1 +SISTERS 5 0 5 5 +SISTER'S 1 0 1 1 +SISTER 8 0 8 8 +SIRE 4 0 4 4 +SINS 9 0 9 9 +SINNER 2 0 2 2 +SINKS 1 0 1 1 +SINK 1 0 1 1 +SINGS 1 0 1 1 +SINGLED 1 0 1 1 +SINGLE 5 0 5 5 +SINGING 2 0 2 2 +SINGER'S 1 0 1 1 +SINGER 2 0 2 2 +SING 2 0 2 2 +SINFUL 2 0 2 2 +SIMPLY 10 0 10 10 +SIMPLIFIED 1 0 1 1 +SIMPLICITY 2 0 2 2 +SIMPLE 9 0 9 9 +SIMON 1 0 1 1 +SIMILITUDE 1 0 1 1 +SIMILARLY 1 0 1 1 +SIMILAR 3 0 3 3 +SILVERING 1 0 1 1 +SILVER 8 0 8 8 +SILLINESS 2 0 2 2 +SILKEN 2 0 2 2 +SILK 6 0 6 6 +SILHOUETTE 1 0 1 1 +SILENT 11 0 11 11 +SILENCES 1 0 1 1 +SILAS 1 0 1 1 +SIGNS 4 0 4 4 +SIGNING 1 0 1 1 +SIGNIFICANTLY 1 0 1 1 +SIGNIFICANCE 3 0 3 3 +SIGNED 1 0 1 1 +SIGNATURE 1 0 1 1 +SIGN 5 0 5 5 +SIGH 3 0 3 3 +SIDEWAYS 1 0 1 1 +SIDES 6 0 6 6 +SICKNESS 2 0 2 2 +SHY 1 0 1 1 +SHUTTING 2 0 2 2 +SHUTTERS 1 0 1 1 +SHUT 3 0 3 3 +SHUNNING 1 0 1 1 +SHUDDER 2 0 2 2 +SHRUGGED 1 0 1 1 +SHRUBBERY 1 0 1 1 +SHRIVELLED 1 0 1 1 +SHRINE 1 0 1 1 +SHRILL 1 0 1 1 +SHRIEKED 1 0 1 1 +SHREWISH 1 0 1 1 +SHREWDLY 1 0 1 1 +SHREWD 1 0 1 1 +SHOWN 1 0 1 1 +SHOWERED 1 0 1 1 +SHOWER 1 0 1 1 +SHOW 10 0 10 10 +SHOUTINGS 1 0 1 1 +SHOUTING 1 0 1 1 +SHOUTED 3 0 3 3 +SHOUT 3 0 3 3 +SHOULDN'T 1 0 1 1 +SHOULDERS 5 0 5 5 +SHOULDER 5 0 5 5 +SHORTLY 1 0 1 1 +SHORT 11 0 11 11 +SHORES 1 0 1 1 +SHORE 4 0 4 4 +SHOPS 1 0 1 1 +SHOP 2 0 2 2 +SHOOTING 1 0 1 1 +SHOOT 1 0 1 1 +SHOOK 10 0 10 10 +SHONE 7 0 7 7 +SHOES 3 0 3 3 +SHOCK 4 0 4 4 +SHOAL 1 0 1 1 +SHIVERING 1 0 1 1 +SHIVER 2 0 2 2 +SHIRTS 1 0 1 1 +SHIRK 1 0 1 1 +SHIPS 2 0 2 2 +SHIPPING 1 0 1 1 +SHINING 4 0 4 4 +SHINES 2 0 2 2 +SHINE 2 0 2 2 +SHIFTED 1 0 1 1 +SHIELD 1 0 1 1 +SHERWOOD 1 0 1 1 +SHEPHERD 2 0 2 2 +SHELVES 1 0 1 1 +SHELTER 1 0 1 1 +SHELLEY'S 1 0 1 1 +SHELL 1 0 1 1 +SHEETING 1 0 1 1 +SHEET 4 0 4 4 +SHEEP 1 0 1 1 +SHEDDING 2 0 2 2 +SHED 1 0 1 1 +SHEAF 1 0 1 1 +SHAWL 1 0 1 1 +SHAVINGS 1 0 1 1 +SHAVEN 1 0 1 1 +SHARPLY 2 0 2 2 +SHARPENED 1 0 1 1 +SHARP 8 0 8 8 +SHARING 1 0 1 1 +SHARED 1 0 1 1 +SHARE 1 0 1 1 +SHAPEN 1 0 1 1 +SHAPELY 1 0 1 1 +SHAPED 1 0 1 1 +SHAPE 1 0 1 1 +SHAMES 1 0 1 1 +SHAME 3 0 3 3 +SHAM 1 0 1 1 +SHALLOWS 1 0 1 1 +SHALLOW 2 0 2 2 +SHAKING 2 0 2 2 +SHAKEN 1 0 1 1 +SHAGGY 2 0 2 2 +SHAFT 1 0 1 1 +SHADY 2 0 2 2 +SHADOWS 7 0 7 7 +SHADOW 2 0 2 2 +SHADES 1 0 1 1 +SHADE 4 0 4 4 +SHACKLETON 1 0 1 1 +SEXTANT 1 0 1 1 +SEWING 1 0 1 1 +SEVERITY 4 0 4 4 +SEVERITIES 1 0 1 1 +SEVERED 2 0 2 2 +SEVERAL 9 0 9 9 +SEVENTY 2 0 2 2 +SEVENTH 1 0 1 1 +SEVENTEEN 2 0 2 2 +SEVEN 6 0 6 6 +SETTLERS 1 0 1 1 +SETTLER 1 0 1 1 +SETTLEMENTS 1 0 1 1 +SETTLEMENT 3 0 3 3 +SETH 1 0 1 1 +SESSION 1 0 1 1 +SERVITUDE 1 0 1 1 +SERVILE 1 0 1 1 +SERVICEABILITY 1 0 1 1 +SERVICE 12 0 12 12 +SERVADAC'S 1 0 1 1 +SERVADAC 7 0 7 7 +SERIOUSLY 4 0 4 4 +SERIOUS 3 0 3 3 +SERIES 3 0 3 3 +SERENE 1 0 1 1 +SERAPHIC 1 0 1 1 +SEQUEL 1 0 1 1 +SEPARATION 1 0 1 1 +SEPARATED 3 0 3 3 +SENTIMENTS 1 0 1 1 +SENTIMENTAL 1 0 1 1 +SENTIMENT 1 0 1 1 +SENTENTIOUSLY 1 0 1 1 +SENSITIVE 1 0 1 1 +SENSIBLE 1 0 1 1 +SENSES 2 0 2 2 +SENSELESS 1 0 1 1 +SENSATIONS 1 0 1 1 +SENSATIONAL 1 0 1 1 +SENSATION 2 0 2 2 +SENORA 1 0 1 1 +SENOR 1 0 1 1 +SENIOR 1 0 1 1 +SEND 3 0 3 3 +SENATOR 1 0 1 1 +SELLING 2 0 2 2 +SELECTION 2 0 2 2 +SELECTED 1 0 1 1 +SELDOM 3 0 3 3 +SEIZING 2 0 2 2 +SEIZED 3 0 3 3 +SEIZE 1 0 1 1 +SEGMENT 1 0 1 1 +SEES 1 0 1 1 +SEEN 16 0 16 16 +SEEMINGLY 3 0 3 3 +SEEKERS 1 0 1 1 +SEEK 4 0 4 4 +SECURITY 2 0 2 2 +SECURING 1 0 1 1 +SECURED 3 0 3 3 +SECURE 4 0 4 4 +SECRETLY 3 0 3 3 +SECRET 3 0 3 3 +SECRECY 1 0 1 1 +SECONDS 2 0 2 2 +SECONDLY 1 0 1 1 +SECONDED 1 0 1 1 +SECONDARY 1 0 1 1 +SECOND 10 0 10 10 +SECLUSION 1 0 1 1 +SECESSIONISTS 1 0 1 1 +SEATED 3 0 3 3 +SEASONS 2 0 2 2 +SEASONABLE 1 0 1 1 +SEASON 3 0 3 3 +SEAS 2 0 2 2 +SEARCHING 1 0 1 1 +SEARCHED 2 0 2 2 +SEARCH 4 0 4 4 +SCYTHE 2 0 2 2 +SCURRIED 1 0 1 1 +SCUMMED 1 0 1 1 +SCULPTURE 1 0 1 1 +SCRUTINY 1 0 1 1 +SCRUTINIZE 1 0 1 1 +SCRUPLES 1 0 1 1 +SCRUB 1 0 1 1 +SCRIPTURES 1 0 1 1 +SCRIBE 1 0 1 1 +SCRIBBLING 1 0 1 1 +SCRIBBLER 1 0 1 1 +SCREEN 1 0 1 1 +SCREAMED 3 0 3 3 +SCRAMBLED 1 0 1 1 +SCOWLED 1 0 1 1 +SCOTTISH 2 0 2 2 +SCOTS 1 0 1 1 +SCOTLAND 1 0 1 1 +SCORPION 1 0 1 1 +SCORNFUL 2 0 2 2 +SCORE 2 0 2 2 +SCORCHED 1 0 1 1 +SCOPE 1 0 1 1 +SCOLD 1 0 1 1 +SCIENTISTS 2 0 2 2 +SCIENTIST 1 0 1 1 +SCIENTIFIC 1 0 1 1 +SCIENCE 2 0 2 2 +SCHOONER 2 0 2 2 +SCHOOLROOM 1 0 1 1 +SCHOOLED 1 0 1 1 +SCHOLARSHIP 1 0 1 1 +SCHOLAR 1 0 1 1 +SCHISM 1 0 1 1 +SCHEME 5 0 5 5 +SCEPTICISM 1 0 1 1 +SCENT 1 0 1 1 +SCENES 1 0 1 1 +SCATTERS 1 0 1 1 +SCATTERED 3 0 3 3 +SCATTER 1 0 1 1 +SCARLET 3 0 3 3 +SCARE 1 0 1 1 +SCARCELY 9 0 9 9 +SCARCE 1 0 1 1 +SCALP 1 0 1 1 +SCALE 2 0 2 2 +SAY 51 0 51 51 +SAWDUST 1 0 1 1 +SAW 23 0 23 23 +SAVIOUR 1 0 1 1 +SAVINGS 1 0 1 1 +SAVING 1 0 1 1 +SAVES 1 0 1 1 +SAVE 9 0 9 9 +SAVAGE 5 0 5 5 +SAUNTERED 1 0 1 1 +SAUCE 1 0 1 1 +SATURDAY 5 0 5 5 +SATISFY 1 0 1 1 +SATISFIED 8 0 8 8 +SATISFACTION 5 0 5 5 +SATANIC 1 0 1 1 +SARCASTIC 1 0 1 1 +SARAH'S 1 0 1 1 +SANK 1 0 1 1 +SANGUINARY 1 0 1 1 +SANDY 2 0 2 2 +SANDWICHES 1 0 1 1 +SANDFORD 1 0 1 1 +SANCTIFYING 1 0 1 1 +SANCTIFIED 1 0 1 1 +SAME 35 0 35 35 +SALVATION 1 0 1 1 +SALUTE 1 0 1 1 +SALUTATION 1 0 1 1 +SALON 1 0 1 1 +SAKE 4 0 4 4 +SAINT 14 0 14 14 +SAILS 2 0 2 2 +SAILORS 1 0 1 1 +SAILORMAN 1 0 1 1 +SAILOR 1 0 1 1 +SAFETY 2 0 2 2 +SAFEST 1 0 1 1 +SAFE 5 0 5 5 +SADLY 2 0 2 2 +SADDLER 1 0 1 1 +SADDLE 1 0 1 1 +SAD 3 0 3 3 +SACRIFICE 2 0 2 2 +SACRED 2 0 2 2 +SACRAMENT 1 0 1 1 +S 1 0 1 1 +RUTH 10 0 10 10 +RUSTY 1 0 1 1 +RUSTLING 2 0 2 2 +RUSTLED 1 0 1 1 +RUSHING 1 0 1 1 +RUSHED 7 0 7 7 +RUSH 4 0 4 4 +RUNS 2 0 2 2 +RUNNING 8 0 8 8 +RUNG 1 0 1 1 +RUN 9 0 9 9 +RUMMAGED 1 0 1 1 +RUMINATED 1 0 1 1 +RULER 1 0 1 1 +RULE 2 0 2 2 +RUINS 1 0 1 1 +RUINED 1 0 1 1 +RUIN 2 0 2 2 +RUFFLED 1 0 1 1 +RUFFIANS 1 0 1 1 +RUFFIAN 2 0 2 2 +RUDELY 1 0 1 1 +RUDE 2 0 2 2 +RUBY 1 0 1 1 +RUBBING 1 0 1 1 +RUBBED 1 0 1 1 +ROYALISTS 2 0 2 2 +ROYAL 9 0 9 9 +ROWS 2 0 2 2 +ROW 5 0 5 5 +ROVING 1 0 1 1 +ROUTINE 1 0 1 1 +ROUTE 2 0 2 2 +ROUT 1 0 1 1 +ROUSES 1 0 1 1 +ROUSED 1 0 1 1 +ROUSE 1 0 1 1 +ROUNDED 1 0 1 1 +ROUGHLY 7 0 7 7 +ROUGHEST 1 0 1 1 +ROUGH 3 0 3 3 +ROSES 2 0 2 2 +ROSE 14 0 14 14 +ROSALIE 4 0 4 4 +ROOTS 1 0 1 1 +ROOMS 3 0 3 3 +ROOM 41 0 41 41 +ROOFS 1 0 1 1 +ROMANCE 1 0 1 1 +ROMAN 1 0 1 1 +ROLLERS 2 0 2 2 +ROLL 1 0 1 1 +ROGERS'S 1 0 1 1 +ROGERS 2 0 2 2 +ROD 1 0 1 1 +ROCKY 1 0 1 1 +ROCKS 2 0 2 2 +ROCKING 2 0 2 2 +ROCKED 1 0 1 1 +ROCK 1 0 1 1 +ROBUST 1 0 1 1 +ROBINSON 1 0 1 1 +ROBIN 19 0 19 19 +ROBERT 2 0 2 2 +ROBBING 1 0 1 1 +ROBBER 1 0 1 1 +ROARINGS 1 0 1 1 +ROARED 1 0 1 1 +ROADS 1 0 1 1 +ROAD 4 0 4 4 +RIVULET 2 0 2 2 +RIVER 6 0 6 6 +RIVAL 2 0 2 2 +RISK 2 0 2 2 +RISING 3 0 3 3 +RISEN 1 0 1 1 +RISE 1 0 1 1 +RIPPLING 1 0 1 1 +RIOTING 2 0 2 2 +RIOT 1 0 1 1 +RINGS 1 0 1 1 +RING 3 0 3 3 +RIGOROUSLY 1 0 1 1 +RIGOROUS 1 0 1 1 +RIGIDLY 1 0 1 1 +RIGIDITY 2 0 2 2 +RIGID 1 0 1 1 +RIGHTS 1 0 1 1 +RIGHTLY 1 0 1 1 +RIGHTEOUSNESS 1 0 1 1 +RIGHTEOUS 1 0 1 1 +RIGHT 25 0 25 25 +RIGGING 1 0 1 1 +RIFLES 1 0 1 1 +RIDGE 1 0 1 1 +RIDES 1 0 1 1 +RIDE 1 0 1 1 +RIDDLE 1 0 1 1 +RICHLY 1 0 1 1 +RICHEST 1 0 1 1 +RICHER 1 0 1 1 +RICH 11 0 11 11 +RHYTHM 1 0 1 1 +RHONE 1 0 1 1 +REYNOLDS 2 0 2 2 +REWARDED 2 0 2 2 +REWARD 3 0 3 3 +REVOLVING 1 0 1 1 +REVIVES 1 0 1 1 +REVERT 1 0 1 1 +REVERSED 1 0 1 1 +REVERSAL 1 0 1 1 +REVERIE 2 0 2 2 +REVEREND 1 0 1 1 +REVERENCE 1 0 1 1 +REVENGE 3 0 3 3 +REVELATION 1 0 1 1 +REVEAL 1 0 1 1 +RETURNING 1 0 1 1 +RETRIEVE 1 0 1 1 +RETREATED 1 0 1 1 +RETREAT 3 0 3 3 +RETRACE 1 0 1 1 +RETIREMENT 2 0 2 2 +RETIRED 2 0 2 2 +RETIRE 1 0 1 1 +RETAINERS 1 0 1 1 +RETAINER 1 0 1 1 +RETAINED 3 0 3 3 +RETAIN 1 0 1 1 +RESURRECTION 3 0 3 3 +RESUMED 2 0 2 2 +RESULTS 4 0 4 4 +RESULTED 3 0 3 3 +RESULT 5 0 5 5 +RESTRAINED 1 0 1 1 +RESTORING 1 0 1 1 +RESTORED 2 0 2 2 +RESTLESS 4 0 4 4 +RESTED 2 0 2 2 +RESPONSIBLE 2 0 2 2 +RESPONSES 1 0 1 1 +RESPONDED 3 0 3 3 +RESPECTS 1 0 1 1 +RESPECTING 1 0 1 1 +RESPECT 3 0 3 3 +RESOURCES 2 0 2 2 +RESOUNDING 1 0 1 1 +RESORT 2 0 2 2 +RESOLVED 3 0 3 3 +RESOLVE 2 0 2 2 +RESOLUTIONS 1 0 1 1 +RESOLUTION 1 0 1 1 +RESOLUTE 1 0 1 1 +RESISTANCE 1 0 1 1 +RESIST 1 0 1 1 +RESIGNATION 2 0 2 2 +RESIDENCES 1 0 1 1 +RESIDENCE 3 0 3 3 +RESIDE 1 0 1 1 +RESERVOIR 1 0 1 1 +RESERVED 1 0 1 1 +RESERVE 2 0 2 2 +RESENTFUL 1 0 1 1 +RESENTED 1 0 1 1 +RESEMBLING 2 0 2 2 +RESEMBLED 1 0 1 1 +RESEMBLE 1 0 1 1 +RESEMBLANCE 2 0 2 2 +RESCUE 2 0 2 2 +REQUISITION 1 0 1 1 +REQUIRING 1 0 1 1 +REQUIREMENTS 1 0 1 1 +REQUIRED 3 0 3 3 +REQUIRE 4 0 4 4 +REQUEST 1 0 1 1 +REPUTE 2 0 2 2 +REPUTATION 1 0 1 1 +REPUGNANT 1 0 1 1 +REPUBLISH 1 0 1 1 +REPUBLICAN 1 0 1 1 +REPUBLIC 5 0 5 5 +REPTILES 2 0 2 2 +REPROOF 1 0 1 1 +REPRODUCE 1 0 1 1 +REPROACHING 1 0 1 1 +REPROACHFULLY 1 0 1 1 +REPROACH 2 0 2 2 +REPRESS 2 0 2 2 +REPRESENTS 1 0 1 1 +REPRESENTING 1 0 1 1 +REPRESENTATIVE 1 0 1 1 +REPRESENTATION 1 0 1 1 +REPRESENT 2 0 2 2 +REPOSE 2 0 2 2 +REPORT 2 0 2 2 +REPLY 7 0 7 7 +REPLIED 20 0 20 20 +REPLACES 1 0 1 1 +REPLACE 1 0 1 1 +REPETITION 1 0 1 1 +REPENTING 1 0 1 1 +REPENTANCE 1 0 1 1 +REPENT 1 0 1 1 +REPELLENT 1 0 1 1 +REPELLED 1 0 1 1 +REPEATED 1 0 1 1 +REPAST 1 0 1 1 +REPARTEES 1 0 1 1 +REPAIRS 1 0 1 1 +REPAIRED 1 0 1 1 +RENTED 1 0 1 1 +RENTAL 1 0 1 1 +RENT 1 0 1 1 +RENEWED 2 0 2 2 +RENEWABLE 1 0 1 1 +RENDING 1 0 1 1 +RENDERING 2 0 2 2 +RENDERED 2 0 2 2 +RENDER 3 0 3 3 +REMOTENESS 1 0 1 1 +REMOTE 2 0 2 2 +REMONSTRANCE 1 0 1 1 +REMNANT 2 0 2 2 +REMISSION 2 0 2 2 +REMIND 1 0 1 1 +REMEMBRANCE 2 0 2 2 +REMEMBERS 1 0 1 1 +REMEMBERING 3 0 3 3 +REMEDY 1 0 1 1 +REMARKS 1 0 1 1 +REMARKING 1 0 1 1 +REMARKABLE 2 0 2 2 +REMAINS 3 0 3 3 +RELY 1 0 1 1 +RELUCTANTLY 1 0 1 1 +RELINQUISHED 1 0 1 1 +RELIGIOUS 1 0 1 1 +RELIGION 4 0 4 4 +RELIEVE 2 0 2 2 +RELIEF 1 0 1 1 +RELIANCE 1 0 1 1 +RELIABLE 1 0 1 1 +RELATIVES 1 0 1 1 +RELATIVE 1 0 1 1 +RELATIONSHIP 1 0 1 1 +RELATIONS 2 0 2 2 +RELATION 4 0 4 4 +RELATED 3 0 3 3 +RELATE 2 0 2 2 +RELAPSES 1 0 1 1 +REJOICING 2 0 2 2 +REJOICED 1 0 1 1 +REJOICE 5 0 5 5 +REINS 1 0 1 1 +REIGNS 1 0 1 1 +REIGN 1 0 1 1 +REGULATOR 1 0 1 1 +REGULATIONS 1 0 1 1 +REGULATION 1 0 1 1 +REGULATED 1 0 1 1 +REGULARLY 1 0 1 1 +REGULARITY 1 0 1 1 +REGULAR 1 0 1 1 +REGRET 1 0 1 1 +REGISTRATION 1 0 1 1 +REGISTERS 1 0 1 1 +REGISTERED 1 0 1 1 +REGIONS 1 0 1 1 +REGION 1 0 1 1 +REGIMENTS 1 0 1 1 +REGIMENT 1 0 1 1 +REGARDS 2 0 2 2 +REGARDLESS 1 0 1 1 +REGARDED 2 0 2 2 +REGARD 5 0 5 5 +REGAINING 1 0 1 1 +REFUSING 2 0 2 2 +REFUSAL 1 0 1 1 +REFUGEES 1 0 1 1 +REFUGE 1 0 1 1 +REFRESHING 1 0 1 1 +REFRESH 2 0 2 2 +REFRAINED 1 0 1 1 +REFORMS 1 0 1 1 +REFORM 1 0 1 1 +REFLECTIONS 1 0 1 1 +REFLECTION 4 0 4 4 +REFLECTED 3 0 3 3 +REFLECT 1 0 1 1 +REFINEMENTS 1 0 1 1 +REFINEMENT 1 0 1 1 +REFINED 1 0 1 1 +REFERRING 2 0 2 2 +REFER 2 0 2 2 +REED 1 0 1 1 +REDUCED 1 0 1 1 +REDOUBLES 1 0 1 1 +REDOUBLED 1 0 1 1 +REDMAN'S 1 0 1 1 +REDEEMER 1 0 1 1 +RECUR 1 0 1 1 +RECTOR 2 0 2 2 +RECREATION 2 0 2 2 +RECOVERY 1 0 1 1 +RECOVERING 1 0 1 1 +RECOVERED 1 0 1 1 +RECOVER 1 0 1 1 +RECONCILIATION 1 0 1 1 +RECOLLECTIONS 1 0 1 1 +RECOILED 2 0 2 2 +RECOGNIZE 4 0 4 4 +RECOGNITION 9 0 9 9 +RECLINING 1 0 1 1 +RECKONING 2 0 2 2 +RECKONED 1 0 1 1 +RECKON 1 0 1 1 +RECKLESS 1 0 1 1 +RECITED 5 0 5 5 +RECITE 2 0 2 2 +RECESSES 1 0 1 1 +RECEPTION 4 0 4 4 +RECENTLY 1 0 1 1 +RECENT 2 0 2 2 +RECEIVING 2 0 2 2 +RECEIVES 1 0 1 1 +RECEIVER 1 0 1 1 +RECEIVED 9 0 9 9 +RECEIVE 3 0 3 3 +RECAPTURED 1 0 1 1 +RECALLING 1 0 1 1 +RECALLED 4 0 4 4 +RECALL 1 0 1 1 +REBUKES 1 0 1 1 +REBUKE 1 0 1 1 +REBELLION 2 0 2 2 +REBEL 1 0 1 1 +REBATE 1 0 1 1 +REASSURED 1 0 1 1 +REASONS 1 0 1 1 +REASONING 1 0 1 1 +REASON 19 0 19 19 +REAR 1 0 1 1 +REALLY 10 0 10 10 +REALIZED 2 0 2 2 +REALITY 8 0 8 8 +REAL 16 0 16 16 +READING 4 0 4 4 +READINESS 1 0 1 1 +READILY 2 0 2 2 +REACHING 2 0 2 2 +REACHED 12 0 12 12 +REACH 3 0 3 3 +RAVISHING 1 0 1 1 +RAVING 1 0 1 1 +RAVINES 1 0 1 1 +RATS 1 0 1 1 +RATIFY 1 0 1 1 +RATIFICATION 1 0 1 1 +RATHER 23 0 23 23 +RATED 2 0 2 2 +RATE 7 0 7 7 +RASH 1 0 1 1 +RASCAL 1 0 1 1 +RARELY 1 0 1 1 +RARE 5 0 5 5 +RAPTUROUS 1 0 1 1 +RAPTURES 1 0 1 1 +RAPIDS 3 0 3 3 +RAPIDLY 3 0 3 3 +RAPIDITY 2 0 2 2 +RAPID 3 0 3 3 +RAPHAEL 1 0 1 1 +RAOUL 3 0 3 3 +RANSOM 1 0 1 1 +RANKING 1 0 1 1 +RANKED 1 0 1 1 +RANK 5 0 5 5 +RANGERS 1 0 1 1 +RANGED 1 0 1 1 +RANGE 4 0 4 4 +RANG 4 0 4 4 +RANCOR 1 0 1 1 +RAN 12 0 12 12 +RAMPART 1 0 1 1 +RALPH 2 0 2 2 +RAISED 6 0 6 6 +RAISE 1 0 1 1 +RAINS 1 0 1 1 +RAINDROPS 1 0 1 1 +RAINBOW 1 0 1 1 +RAIN 3 0 3 3 +RAILROADS 1 0 1 1 +RAGS 2 0 2 2 +RAGGED 1 0 1 1 +RAGE 3 0 3 3 +RAFT 7 0 7 7 +RADIE 2 0 2 2 +RADICALS 1 0 1 1 +RADICALISM 1 0 1 1 +RADIANCE 1 0 1 1 +RACKED 1 0 1 1 +RACK 1 0 1 1 +RACHEL'S 1 0 1 1 +RACES 6 0 6 6 +RACE 1 0 1 1 +RABBLE 1 0 1 1 +RABBIT 2 0 2 2 +QUOTE 2 0 2 2 +QUIVERING 2 0 2 2 +QUIVERED 1 0 1 1 +QUITTING 1 0 1 1 +QUITTED 1 0 1 1 +QUITE 29 0 29 29 +QUINSON 1 0 1 1 +QUILT 3 0 3 3 +QUIETLY 5 0 5 5 +QUIET 4 0 4 4 +QUICKLY 5 0 5 5 +QUICK 6 0 6 6 +QUESTIONS 4 0 4 4 +QUESTIONING 1 0 1 1 +QUESTIONED 2 0 2 2 +QUESTION 12 0 12 12 +QUEST 1 0 1 1 +QUERIED 1 0 1 1 +QUEER 3 0 3 3 +QUEENSTOWN 1 0 1 1 +QUEENS 1 0 1 1 +QUEEN'S 1 0 1 1 +QUEEN 8 0 8 8 +QUASH 1 0 1 1 +QUARTERS 3 0 3 3 +QUARTER 7 0 7 7 +QUARRY 2 0 2 2 +QUARREL 1 0 1 1 +QUANTITY 4 0 4 4 +QUANTITIES 1 0 1 1 +QUALITY 1 0 1 1 +QUALITIES 3 0 3 3 +QUALIFICATIONS 2 0 2 2 +QUALIFICATION 1 0 1 1 +QUAKE 1 0 1 1 +QUAINT 1 0 1 1 +QUADRILLE 2 0 2 2 +QUADRANGLE 1 0 1 1 +PYTHON 1 0 1 1 +PYRAMIDS 2 0 2 2 +PUZZLED 1 0 1 1 +PUSHED 2 0 2 2 +PUSH 2 0 2 2 +PURSUITS 3 0 3 3 +PURSUIT 2 0 2 2 +PURSUER 1 0 1 1 +PURSUED 1 0 1 1 +PURSUE 1 0 1 1 +PURRING 1 0 1 1 +PURPOSES 2 0 2 2 +PURPLE 1 0 1 1 +PURITY 2 0 2 2 +PURELY 1 0 1 1 +PURCHASED 1 0 1 1 +PUPIL 1 0 1 1 +PUNISHMENT 4 0 4 4 +PUNISHED 4 0 4 4 +PUNCH 1 0 1 1 +PUMP 1 0 1 1 +PULPIT 1 0 1 1 +PULLING 2 0 2 2 +PULLED 2 0 2 2 +PULL 1 0 1 1 +PUFFY 1 0 1 1 +PUBLISH 1 0 1 1 +PUBLIC 13 0 13 13 +PSYCHOLOGY 1 0 1 1 +PSYCHE 2 0 2 2 +PRYNNE 4 0 4 4 +PRUDENT 2 0 2 2 +PROWESS 1 0 1 1 +PROW 1 0 1 1 +PROVOCATION 1 0 1 1 +PROVISIONALLY 1 0 1 1 +PROVISION 2 0 2 2 +PROVINCES 2 0 2 2 +PROVINCE 2 0 2 2 +PROVIDING 1 0 1 1 +PROVIDED 3 0 3 3 +PROVEN 1 0 1 1 +PROVE 4 0 4 4 +PROUDLY 1 0 1 1 +PROUD 5 0 5 5 +PROTESTED 1 0 1 1 +PROTEST 1 0 1 1 +PROTECTOR 1 0 1 1 +PROTECTION 3 0 3 3 +PROTECTING 1 0 1 1 +PROTECTED 1 0 1 1 +PROTECT 4 0 4 4 +PROSTRATION 2 0 2 2 +PROSPECTS 1 0 1 1 +PROSELYTES 1 0 1 1 +PROSECUTION 1 0 1 1 +PROSECUTE 1 0 1 1 +PROSE 1 0 1 1 +PROSCRIPTION 1 0 1 1 +PROSCRIBED 1 0 1 1 +PROPRIETY 1 0 1 1 +PROPRIETORS 1 0 1 1 +PROPRIETOR 1 0 1 1 +PROPOSED 2 0 2 2 +PROPOSE 1 0 1 1 +PROPOSALS 1 0 1 1 +PROPORTIONS 3 0 3 3 +PROPHETS 1 0 1 1 +PROPHET 1 0 1 1 +PROPERTY 6 0 6 6 +PROPERTIES 1 0 1 1 +PROPERLY 4 0 4 4 +PROPER 3 0 3 3 +PROPENSITIES 1 0 1 1 +PROOFS 4 0 4 4 +PROOF 3 0 3 3 +PRONUNCIATION 1 0 1 1 +PRONOUNCED 4 0 4 4 +PROMPTLY 1 0 1 1 +PROMPT 1 0 1 1 +PROMOTING 1 0 1 1 +PROMOTED 1 0 1 1 +PROMISES 2 0 2 2 +PROMISED 4 0 4 4 +PROMISE 4 0 4 4 +PROMINENT 2 0 2 2 +PROLIFIC 1 0 1 1 +PROJECTION 1 0 1 1 +PROJECT 2 0 2 2 +PROGRESSING 1 0 1 1 +PROGRESS 6 0 6 6 +PROGRAMME 1 0 1 1 +PROFOUND 2 0 2 2 +PROFITABLE 1 0 1 1 +PROFESSOR 6 0 6 6 +PROFESSIONS 1 0 1 1 +PROFESSION 1 0 1 1 +PROFESSING 1 0 1 1 +PROFESSED 1 0 1 1 +PRODUCTIVE 1 0 1 1 +PRODUCTION 1 0 1 1 +PRODUCT 1 0 1 1 +PRODUCING 2 0 2 2 +PRODUCED 5 0 5 5 +PRODUCE 3 0 3 3 +PROCOPE 2 0 2 2 +PROCESSION 1 0 1 1 +PROCESSES 1 0 1 1 +PROCESS 3 0 3 3 +PROCEEDINGS 2 0 2 2 +PROCEEDING 2 0 2 2 +PROCEEDED 2 0 2 2 +PROBLEM 4 0 4 4 +PROBING 1 0 1 1 +PROBABLY 10 0 10 10 +PROBABLE 2 0 2 2 +PRO 2 0 2 2 +PRIZE 1 0 1 1 +PRIVILEGE 3 0 3 3 +PRIVATION 1 0 1 1 +PRIVATELY 1 0 1 1 +PRIVATE 11 0 11 11 +PRIVACY 1 0 1 1 +PRISTINE 1 0 1 1 +PRISONER 4 0 4 4 +PRISON 4 0 4 4 +PRIOR 1 0 1 1 +PRINTING 1 0 1 1 +PRINTER 2 0 2 2 +PRINCIPLES 2 0 2 2 +PRINCESSES 2 0 2 2 +PRINCESS 9 0 9 9 +PRINCES 2 0 2 2 +PRINCE 3 0 3 3 +PRIMLY 1 0 1 1 +PRIMITIVE 2 0 2 2 +PRIMATE 1 0 1 1 +PRIMARY 1 0 1 1 +PRIMARILY 1 0 1 1 +PRIESTHOOD 1 0 1 1 +PRIEST 1 0 1 1 +PRIDE 9 0 9 9 +PRICE 2 0 2 2 +PREY 2 0 2 2 +PREVIOUSLY 2 0 2 2 +PREVIOUS 2 0 2 2 +PREVENTS 1 0 1 1 +PREVENTING 1 0 1 1 +PREVAILING 1 0 1 1 +PREVAILED 3 0 3 3 +PREVAIL 1 0 1 1 +PRETTY 10 0 10 10 +PRETTIEST 1 0 1 1 +PRETENSION 1 0 1 1 +PRETENDED 1 0 1 1 +PRESUMED 1 0 1 1 +PRESUMABLY 1 0 1 1 +PRESSURE 4 0 4 4 +PRESSING 1 0 1 1 +PRESSED 2 0 2 2 +PRESS 2 0 2 2 +PRESIDENT 5 0 5 5 +PRESIDED 1 0 1 1 +PRESERVED 1 0 1 1 +PRESERVE 1 0 1 1 +PRESENTS 3 0 3 3 +PRESENTLY 7 0 7 7 +PRESENTING 2 0 2 2 +PRESENTED 1 0 1 1 +PRESENCE 6 0 6 6 +PRESCRIBED 1 0 1 1 +PREROGATIVES 1 0 1 1 +PREPARING 1 0 1 1 +PREPARED 3 0 3 3 +PREPARE 1 0 1 1 +PREOCCUPIED 1 0 1 1 +PREOCCUPATION 1 0 1 1 +PREMISES 1 0 1 1 +PRELIMINARY 1 0 1 1 +PREFERENCE 1 0 1 1 +PREDOMINATE 1 0 1 1 +PREDOMINANCE 1 0 1 1 +PREDICTED 1 0 1 1 +PREDATORY 2 0 2 2 +PRECONCEIVED 1 0 1 1 +PRECISION 1 0 1 1 +PRECISELY 2 0 2 2 +PRECIPITATED 1 0 1 1 +PRECIPITATE 1 0 1 1 +PRECIOUS 3 0 3 3 +PRECAUTION 1 0 1 1 +PREACHER 1 0 1 1 +PREACH 3 0 3 3 +PRAYERS 1 0 1 1 +PRAYED 1 0 1 1 +PRAY 7 0 7 7 +PRATTLED 1 0 1 1 +PRAISES 2 0 2 2 +PRAISED 1 0 1 1 +PRAISE 3 0 3 3 +PRAIRIES 2 0 2 2 +PRAIRIE 2 0 2 2 +PRACTISED 1 0 1 1 +PRACTICAL 6 0 6 6 +POYSER 9 0 9 9 +POWERS 3 0 3 3 +POWERLESS 1 0 1 1 +POWERFUL 2 0 2 2 +POWDERY 1 0 1 1 +POWDER 8 0 8 8 +POVERTY 2 0 2 2 +POURING 1 0 1 1 +POURED 3 0 3 3 +POUR 2 0 2 2 +POUNDED 1 0 1 1 +POUND 4 0 4 4 +POTFULS 1 0 1 1 +POTATOES 1 0 1 1 +POT 2 0 2 2 +POSTS 1 0 1 1 +POSTPONEMENT 1 0 1 1 +POSTERITY 2 0 2 2 +POSTED 1 0 1 1 +POST 3 0 3 3 +POSSIBLY 1 0 1 1 +POSSIBLE 6 0 6 6 +POSSESSING 1 0 1 1 +POSSE 2 0 2 2 +POSITIVELY 3 0 3 3 +POSITIVE 1 0 1 1 +POSITION 9 0 9 9 +PORTRAIT 1 0 1 1 +PORTIONS 3 0 3 3 +PORTION 2 0 2 2 +PORTAL 1 0 1 1 +POPULATION 3 0 3 3 +POPULATED 1 0 1 1 +POPULAR 7 0 7 7 +POPPIES 1 0 1 1 +POPPED 1 0 1 1 +POORLY 1 0 1 1 +POOLS 1 0 1 1 +PONY 1 0 1 1 +PONDS 1 0 1 1 +PONDERING 1 0 1 1 +POLLY'S 3 0 3 3 +POLLY 4 0 4 4 +POLITICAL 4 0 4 4 +POLITELY 2 0 2 2 +POLITE 1 0 1 1 +POLISHED 3 0 3 3 +POLISH 1 0 1 1 +POLICE 1 0 1 1 +POLE 2 0 2 2 +POLAR 1 0 1 1 +POISON 1 0 1 1 +POISED 1 0 1 1 +POINTS 3 0 3 3 +POINTING 4 0 4 4 +POINTEDLY 1 0 1 1 +POINTED 3 0 3 3 +POETRY 1 0 1 1 +POETIC 1 0 1 1 +POETESS 1 0 1 1 +POET 2 0 2 2 +POEMS 1 0 1 1 +POEM 3 0 3 3 +POCKETS 3 0 3 3 +POCKET 3 0 3 3 +PLURALITY 1 0 1 1 +PLUNGES 1 0 1 1 +PLUNDER 2 0 2 2 +PLUMES 1 0 1 1 +PLUCKING 1 0 1 1 +PLOT 3 0 3 3 +PLIABLE 1 0 1 1 +PLESIOSAURUS 1 0 1 1 +PLENTY 1 0 1 1 +PLENTIFUL 1 0 1 1 +PLEASURE 5 0 5 5 +PLEASING 1 0 1 1 +PLEASES 3 0 3 3 +PLEASED 4 0 4 4 +PLEASE 11 0 11 11 +PLEASANTLY 2 0 2 2 +PLEASANT 5 0 5 5 +PLEADED 1 0 1 1 +PLAYTHINGS 3 0 3 3 +PLAYS 1 0 1 1 +PLAYING 5 0 5 5 +PLAYER 2 0 2 2 +PLAYED 3 0 3 3 +PLATTERS 1 0 1 1 +PLATONIC 1 0 1 1 +PLATO'S 1 0 1 1 +PLATO 6 0 6 6 +PLATFORM 3 0 3 3 +PLATE 1 0 1 1 +PLASTER 1 0 1 1 +PLANTS 4 0 4 4 +PLANTED 3 0 3 3 +PLANTATIONS 1 0 1 1 +PLANT 7 0 7 7 +PLANS 2 0 2 2 +PLANNED 2 0 2 2 +PLAN 6 0 6 6 +PLAINER 1 0 1 1 +PLAIN 4 0 4 4 +PLACING 2 0 2 2 +PLACIDITY 1 0 1 1 +PLACID 1 0 1 1 +PLACED 8 0 8 8 +PITYING 1 0 1 1 +PITY 6 0 6 6 +PITIABLE 1 0 1 1 +PITH 1 0 1 1 +PITCHED 1 0 1 1 +PIT 1 0 1 1 +PISTOL 1 0 1 1 +PIPT 1 0 1 1 +PIPE 2 0 2 2 +PINKIES 3 0 3 3 +PINK 2 0 2 2 +PINIONED 1 0 1 1 +PINING 1 0 1 1 +PINES 1 0 1 1 +PINE 1 0 1 1 +PILLION 1 0 1 1 +PILLARS 1 0 1 1 +PILGRIM'S 1 0 1 1 +PILGRIM 1 0 1 1 +PILED 1 0 1 1 +PIGMENT 1 0 1 1 +PIETY 1 0 1 1 +PIECES 3 0 3 3 +PICTURESQUENESS 1 0 1 1 +PICTURES 3 0 3 3 +PICTURE 6 0 6 6 +PICNIC 1 0 1 1 +PICKED 3 0 3 3 +PIAZZA 3 0 3 3 +PIANO 4 0 4 4 +PHYSIOLOGY 1 0 1 1 +PHYSIOLOGICAL 1 0 1 1 +PHYSICS 1 0 1 1 +PHYSICAL 2 0 2 2 +PHRONSIE 6 0 6 6 +PHRASE 3 0 3 3 +PHOENICIAN 1 0 1 1 +PHILOSOPHY 2 0 2 2 +PHILOSOPHICAL 1 0 1 1 +PHILOSOPHER 5 0 5 5 +PHILOLOGIST 1 0 1 1 +PHILIP'S 1 0 1 1 +PHILIP 9 0 9 9 +PHENOMENON 1 0 1 1 +PHENOMENA 1 0 1 1 +PHEASANT 1 0 1 1 +PHASES 1 0 1 1 +PHASE 1 0 1 1 +PHANTOM 1 0 1 1 +PEWTER 2 0 2 2 +PET 1 0 1 1 +PERVERSE 1 0 1 1 +PERVADED 1 0 1 1 +PERTH 1 0 1 1 +PERSUASIVE 1 0 1 1 +PERSUADED 1 0 1 1 +PERSUADE 1 0 1 1 +PERSPIRATION 1 0 1 1 +PERSONS 13 0 13 13 +PERSONALLY 2 0 2 2 +PERSONAGE 2 0 2 2 +PERSEVERED 1 0 1 1 +PERSECUTORS 1 0 1 1 +PERSECUTION 2 0 2 2 +PERSECUTED 1 0 1 1 +PERPLEXITY 1 0 1 1 +PERPLEXED 2 0 2 2 +PERPETUATE 1 0 1 1 +PERPETUAL 1 0 1 1 +PERNICIOUS 1 0 1 1 +PERMITTING 1 0 1 1 +PERMITTED 2 0 2 2 +PERMIT 1 0 1 1 +PERMISSION 1 0 1 1 +PERMANENT 2 0 2 2 +PERISHES 1 0 1 1 +PERIODICALS 1 0 1 1 +PERIOD 8 0 8 8 +PERILS 1 0 1 1 +PERIL 2 0 2 2 +PERHAPS 17 0 17 17 +PERFORMING 1 0 1 1 +PERFORMED 1 0 1 1 +PERFORM 3 0 3 3 +PERFECTLY 8 0 8 8 +PERFECTION 4 0 4 4 +PERCY 1 0 1 1 +PERCHANCE 1 0 1 1 +PERCH 2 0 2 2 +PERCEPTION 2 0 2 2 +PERCEIVING 1 0 1 1 +PERCEIVES 1 0 1 1 +PERCEIVED 2 0 2 2 +PERCEIVE 2 0 2 2 +PEPPERS 1 0 1 1 +PEPPERED 1 0 1 1 +PEPPER 1 0 1 1 +PEOPLE 36 0 36 36 +PENSIVE 1 0 1 1 +PENSION 1 0 1 1 +PENINSULA 2 0 2 2 +PENETRATING 1 0 1 1 +PENETRATE 2 0 2 2 +PENDULOUS 1 0 1 1 +PENCILLED 1 0 1 1 +PENCIL 2 0 2 2 +PENANCE 1 0 1 1 +PENALTY 2 0 2 2 +PENAL 1 0 1 1 +PELL 1 0 1 1 +PEERING 3 0 3 3 +PEEPED 2 0 2 2 +PECUNIARY 6 0 6 6 +PECULIARLY 1 0 1 1 +PECULIAR 1 0 1 1 +PEARLY 2 0 2 2 +PEARL'S 1 0 1 1 +PEAKED 1 0 1 1 +PEACEFUL 1 0 1 1 +PEACEABLE 1 0 1 1 +PEACE 14 0 14 14 +PAYMENT 1 0 1 1 +PAYING 1 0 1 1 +PAY 3 0 3 3 +PAVEMENT 1 0 1 1 +PAUSED 4 0 4 4 +PAUSE 1 0 1 1 +PATTING 1 0 1 1 +PATRONIZING 1 0 1 1 +PATRON 1 0 1 1 +PATRIARCHAL 1 0 1 1 +PATHS 1 0 1 1 +PATHOLOGICAL 1 0 1 1 +PATH 4 0 4 4 +PATCHWORK 4 0 4 4 +PATCHING 2 0 2 2 +PATCHES 2 0 2 2 +PATCHED 1 0 1 1 +PASTNESS 1 0 1 1 +PASTEBOARD 1 0 1 1 +PAST 12 0 12 12 +PASSIONS 1 0 1 1 +PASSIONATE 1 0 1 1 +PASSING 3 0 3 3 +PASSES 1 0 1 1 +PASSER 1 0 1 1 +PASSED 15 0 15 15 +PASSAGES 1 0 1 1 +PASS 5 0 5 5 +PARTY 9 0 9 9 +PARTS 7 0 7 7 +PARTOOK 2 0 2 2 +PARTNER 1 0 1 1 +PARTLY 7 0 7 7 +PARTITION 1 0 1 1 +PARTISAN 1 0 1 1 +PARTING 2 0 2 2 +PARTIES 4 0 4 4 +PARTICULARS 2 0 2 2 +PARTICULARLY 6 0 6 6 +PARTICULAR 4 0 4 4 +PARTIALLY 1 0 1 1 +PARTED 2 0 2 2 +PART 22 0 22 22 +PARSONS 2 0 2 2 +PARSONAGE 1 0 1 1 +PARROT 2 0 2 2 +PARRIED 1 0 1 1 +PARLIAMENTS 1 0 1 1 +PARLIAMENTARY 1 0 1 1 +PARLIAMENT 5 0 5 5 +PARK 1 0 1 1 +PARIS 9 0 9 9 +PARENTS 4 0 4 4 +PARENT 2 0 2 2 +PARDON 1 0 1 1 +PARAPHERNALIA 2 0 2 2 +PARAGRAPH 1 0 1 1 +PAPERS 8 0 8 8 +PAPER 8 0 8 8 +PANTOMIME 1 0 1 1 +PANTING 1 0 1 1 +PANTED 1 0 1 1 +PANGS 1 0 1 1 +PANG 1 0 1 1 +PANES 1 0 1 1 +PANEL 1 0 1 1 +PAN 1 0 1 1 +PALM 3 0 3 3 +PALINGS 1 0 1 1 +PALE 8 0 8 8 +PALAIS 1 0 1 1 +PALACE 4 0 4 4 +PAIR 5 0 5 5 +PAINTING 2 0 2 2 +PAINTER 2 0 2 2 +PAINTED 4 0 4 4 +PAINS 2 0 2 2 +PAINFULLY 1 0 1 1 +PAINFUL 3 0 3 3 +PAIL 1 0 1 1 +PAID 7 0 7 7 +PAGES 1 0 1 1 +PAGE 2 0 2 2 +PACKING 1 0 1 1 +PACKET 1 0 1 1 +PACKED 1 0 1 1 +PACK 1 0 1 1 +PACING 1 0 1 1 +PACIFIED 1 0 1 1 +PACED 3 0 3 3 +OZMA 1 0 1 1 +OZ 4 0 4 4 +OWNER 1 0 1 1 +OWNED 2 0 2 2 +OWN 69 0 69 69 +OWLS 1 0 1 1 +OWING 3 0 3 3 +OWEN 1 0 1 1 +OVERWROUGHT 1 0 1 1 +OVERWHELMED 1 0 1 1 +OVERWHELM 1 0 1 1 +OVERTHROW 1 0 1 1 +OVERSTATEMENT 1 0 1 1 +OVERRATED 1 0 1 1 +OVERLOOKER 1 0 1 1 +OVERLOOKED 1 0 1 1 +OVERLEAN 1 0 1 1 +OVERHEAD 1 0 1 1 +OVERHANGING 2 0 2 2 +OVERFLOWING 1 0 1 1 +OVERCOAT 1 0 1 1 +OVERBEARING 1 0 1 1 +OVAL 2 0 2 2 +OUTWARD 1 0 1 1 +OUTSTRIP 1 0 1 1 +OUTSTRETCHED 2 0 2 2 +OUTSKIRTS 1 0 1 1 +OUTSIDE 4 0 4 4 +OUTSET 1 0 1 1 +OUTRIGHT 1 0 1 1 +OUTLINED 1 0 1 1 +OUTLINE 2 0 2 2 +OUTLAWS 3 0 3 3 +OUTFIT 1 0 1 1 +OUTER 2 0 2 2 +OUTCRY 1 0 1 1 +OUTCAST 1 0 1 1 +OURSELVES 6 0 6 6 +OURS 2 0 2 2 +OUNCE 2 0 2 2 +OUGHT 10 0 10 10 +OTHERWISE 5 0 5 5 +OTHER'S 2 0 2 2 +OSTENSIBLY 2 0 2 2 +OSTENSIBLE 1 0 1 1 +OSCILLATION 1 0 1 1 +ORNAMENTS 1 0 1 1 +ORNAMENTAL 2 0 2 2 +ORNAMENT 3 0 3 3 +ORLEANS 1 0 1 1 +ORIGINATED 1 0 1 1 +ORIGINAL 1 0 1 1 +ORIGIN 7 0 7 7 +ORGANS 1 0 1 1 +ORGANIZED 5 0 5 5 +ORGANIZATIONS 4 0 4 4 +ORGANIZATION 3 0 3 3 +ORGAN 1 0 1 1 +ORDINARY 3 0 3 3 +ORDERLY 1 0 1 1 +ORBIT 2 0 2 2 +ORANGE 1 0 1 1 +ORACLE 1 0 1 1 +OPPRESSOR 1 0 1 1 +OPPRESSIVE 1 0 1 1 +OPPRESSION 1 0 1 1 +OPPRESSED 1 0 1 1 +OPPOSITION 4 0 4 4 +OPPOSITE 2 0 2 2 +OPPOSED 2 0 2 2 +OPPOSE 4 0 4 4 +OPPORTUNITY 4 0 4 4 +OPPORTUNITIES 2 0 2 2 +OPPORTUNE 1 0 1 1 +OPPONENT 2 0 2 2 +OPINIONS 1 0 1 1 +OPINION'S 1 0 1 1 +OPINION 9 0 9 9 +OPERATOR 1 0 1 1 +OPERATIONS 1 0 1 1 +OPERATION 2 0 2 2 +OPERATE 1 0 1 1 +OPENING 6 0 6 6 +OPENED 11 0 11 11 +OPEN 23 0 23 23 +OPAQUE 1 0 1 1 +ONWARD 4 0 4 4 +ONES 2 0 2 2 +ONCE 22 0 22 22 +OMELETTE 1 0 1 1 +OLDEST 1 0 1 1 +OLDER 3 0 3 3 +OLDEN 1 0 1 1 +OLAF 2 0 2 2 +OJO 7 0 7 7 +OHIO 1 0 1 1 +OFTEN 13 0 13 13 +OFFICIALS 3 0 3 3 +OFFICIAL 1 0 1 1 +OFFICE 11 0 11 11 +OFFERS 1 0 1 1 +OFFERING 1 0 1 1 +OFFER 1 0 1 1 +OFFENDS 1 0 1 1 +OFFENDED 1 0 1 1 +OFFALS 1 0 1 1 +OFF 25 0 25 25 +ODORS 1 0 1 1 +ODIOUS 1 0 1 1 +ODIN 1 0 1 1 +ODDLY 1 0 1 1 +ODD 3 0 3 3 +OCTOBER 1 0 1 1 +OCEANOGRAPHER 1 0 1 1 +OCEAN 5 0 5 5 +OCCURS 3 0 3 3 +OCCURRENCES 2 0 2 2 +OCCURRENCE 3 0 3 3 +OCCURRED 2 0 2 2 +OCCUR 1 0 1 1 +OCCUPY 1 0 1 1 +OCCUPIED 6 0 6 6 +OCCUPATION 2 0 2 2 +OCCUPANTS 1 0 1 1 +OCCASIONS 1 0 1 1 +OCCASIONALLY 1 0 1 1 +OCCASIONAL 1 0 1 1 +OCCASION 13 0 13 13 +OBVIOUS 3 0 3 3 +OBTAINED 1 0 1 1 +OBTAIN 3 0 3 3 +OBSTINATE 1 0 1 1 +OBSTINACY 2 0 2 2 +OBSTACLES 1 0 1 1 +OBSERVING 2 0 2 2 +OBSERVERS 1 0 1 1 +OBSERVATION 3 0 3 3 +OBSERVANCES 1 0 1 1 +OBSCURE 3 0 3 3 +OBNOXIOUS 1 0 1 1 +OBLIVION 1 0 1 1 +OBLITERATED 1 0 1 1 +OBLITERATE 1 0 1 1 +OBLIGED 1 0 1 1 +OBJECTIONS 1 0 1 1 +OBJECTION 1 0 1 1 +OBJECT 16 0 16 16 +OBEYING 2 0 2 2 +OBEYED 2 0 2 2 +OBEY 1 0 1 1 +OBEDIENCE 1 0 1 1 +OATMEAL 1 0 1 1 +OATH 1 0 1 1 +OARS 1 0 1 1 +OAK 2 0 2 2 +O'CLOCK 6 0 6 6 +NURSED 1 0 1 1 +NURSE 1 0 1 1 +NUNKIE 1 0 1 1 +NUMIDIA 1 0 1 1 +NUMERICAL 1 0 1 1 +NUMBERS 4 0 4 4 +NUMBERED 1 0 1 1 +NUMBER 6 0 6 6 +NUDITY 1 0 1 1 +NUDGED 1 0 1 1 +NOWHERE 1 0 1 1 +NOVEL 1 0 1 1 +NOURISHING 1 0 1 1 +NOTWITHSTANDING 1 0 1 1 +NOTORIOUS 1 0 1 1 +NOTIONS 1 0 1 1 +NOTION 1 0 1 1 +NOTING 1 0 1 1 +NOTICING 1 0 1 1 +NOTICED 1 0 1 1 +NOTICEABLE 1 0 1 1 +NOTICE 3 0 3 3 +NOTED 1 0 1 1 +NOTE 4 0 4 4 +NOTARY 1 0 1 1 +NOTABLE 3 0 3 3 +NORWEGIAN 1 0 1 1 +NORWAY 1 0 1 1 +NORTHERN 1 0 1 1 +NOON 3 0 3 3 +NONSENSE 1 0 1 1 +NONE 12 0 12 12 +NON 4 0 4 4 +NOMADS 1 0 1 1 +NOISILY 1 0 1 1 +NOISE 2 0 2 2 +NODS 1 0 1 1 +NOD 1 0 1 1 +NOBODY 6 0 6 6 +NOBLEST 1 0 1 1 +NOBLER 2 0 2 2 +NOBLEMAN'S 1 0 1 1 +NOBLE 10 0 10 10 +NITROGEN 1 0 1 1 +NINTH 1 0 1 1 +NINETY 2 0 2 2 +NIMBLY 1 0 1 1 +NIMBLE 1 0 1 1 +NIGHTS 3 0 3 3 +NIGHTLY 1 0 1 1 +NIGHTINGALE'S 1 0 1 1 +NIECE 1 0 1 1 +NICEST 1 0 1 1 +NICER 1 0 1 1 +NICELY 1 0 1 1 +NICE 3 0 3 3 +NEXT 12 0 12 12 +NEWSPAPER 4 0 4 4 +NEWS 2 0 2 2 +NEWLY 1 0 1 1 +NEWCOMER 1 0 1 1 +NEVERTHELESS 3 0 3 3 +NETTLES 1 0 1 1 +NETTLED 1 0 1 1 +NESTING 1 0 1 1 +NERVOUSNESS 1 0 1 1 +NERVOUSLY 1 0 1 1 +NERVOUS 4 0 4 4 +NERVES 2 0 2 2 +NEMO 4 0 4 4 +NELL 1 0 1 1 +NEITHER 9 0 9 9 +NEIGHBORS 2 0 2 2 +NEGROES 2 0 2 2 +NEGRO 1 0 1 1 +NEGOTIATIONS 1 0 1 1 +NEGLIGENT 1 0 1 1 +NEGLECTED 2 0 2 2 +NEGLECT 1 0 1 1 +NEEDS 2 0 2 2 +NEEDING 1 0 1 1 +NEEDED 5 0 5 5 +NED 1 0 1 1 +NECESSITY 2 0 2 2 +NECESSITIES 1 0 1 1 +NECESSARY 9 0 9 9 +NECESSARILY 2 0 2 2 +NEATLY 2 0 2 2 +NEAT 1 0 1 1 +NEARLY 10 0 10 10 +NEAREST 1 0 1 1 +NEARED 1 0 1 1 +NAVY 1 0 1 1 +NAUTILUS 2 0 2 2 +NATURES 1 0 1 1 +NATURED 1 0 1 1 +NATURE 17 0 17 17 +NATURALLY 1 0 1 1 +NATURALISTS 2 0 2 2 +NATURALIST 1 0 1 1 +NATURAL 10 0 10 10 +NATIVE 5 0 5 5 +NATIONS 2 0 2 2 +NATIONAL 3 0 3 3 +NATION 2 0 2 2 +NASTY 1 0 1 1 +NARWHALE 1 0 1 1 +NARROWS 1 0 1 1 +NARROW 6 0 6 6 +NARRATIVE 2 0 2 2 +NAPIER 1 0 1 1 +NAPE 1 0 1 1 +NANCY'S 2 0 2 2 +NANCY 1 0 1 1 +NAMES 2 0 2 2 +NAMELY 2 0 2 2 +NAKEDNESS 1 0 1 1 +NAKED 1 0 1 1 +MYTHOLOGICAL 1 0 1 1 +MYTHICAL 1 0 1 1 +MYSTIFIED 1 0 1 1 +MYSTERY 5 0 5 5 +MYSTERIOUSLY 1 0 1 1 +MYSTERIOUS 3 0 3 3 +MYSELF 25 0 25 25 +MUTUAL 2 0 2 2 +MUTTON 1 0 1 1 +MUTTERING 1 0 1 1 +MUTILATION 1 0 1 1 +MUTABILITY 1 0 1 1 +MUSTARD 1 0 1 1 +MUST 66 0 66 66 +MUSICIANS 1 0 1 1 +MUSIC 6 0 6 6 +MUSHROOMS 1 0 1 1 +MUSEUM 1 0 1 1 +MURMURED 4 0 4 4 +MURMUR 2 0 2 2 +MURDERS 1 0 1 1 +MURDERERS 1 0 1 1 +MUNCHKINS 2 0 2 2 +MUNCHKIN 1 0 1 1 +MUMMERIES 1 0 1 1 +MULTIPLE 2 0 2 2 +MUFFLED 1 0 1 1 +MUDDY 1 0 1 1 +MUD 1 0 1 1 +MOWED 2 0 2 2 +MOW 1 0 1 1 +MOVING 1 0 1 1 +MOVES 1 0 1 1 +MOVEMENTS 3 0 3 3 +MOVEMENT 5 0 5 5 +MOVE 4 0 4 4 +MOUTHS 3 0 3 3 +MOUTHED 2 0 2 2 +MOUTH 5 0 5 5 +MOUSE 2 0 2 2 +MOURNFUL 1 0 1 1 +MOURN 1 0 1 1 +MOUNTAINS 2 0 2 2 +MOTTO 1 0 1 1 +MOTTLED 1 0 1 1 +MOTORS 1 0 1 1 +MOTIVES 5 0 5 5 +MOTIONLESS 1 0 1 1 +MOTIONING 1 0 1 1 +MOTIONED 2 0 2 2 +MOTION 1 0 1 1 +MOTHERS 2 0 2 2 +MOTHER'S 4 0 4 4 +MOTHER 32 0 32 32 +MOSSY 1 0 1 1 +MOSS 1 0 1 1 +MOSAIC 1 0 1 1 +MORTALS 1 0 1 1 +MORTALLY 1 0 1 1 +MORTAL 1 0 1 1 +MORROW 6 0 6 6 +MORRIS 1 0 1 1 +MORNINGS 1 0 1 1 +MORN 1 0 1 1 +MORMONISM 3 0 3 3 +MORMON 5 0 5 5 +MOREOVER 1 0 1 1 +MORE'S 1 0 1 1 +MORBID 1 0 1 1 +MORAL 1 0 1 1 +MOONLIGHT 2 0 2 2 +MOONBEAMS 1 0 1 1 +MOON 4 0 4 4 +MOOD 2 0 2 2 +MONTROSE'S 1 0 1 1 +MONTROSE 6 0 6 6 +MONTHS 4 0 4 4 +MONTH 4 0 4 4 +MONTFICHET'S 1 0 1 1 +MONTALAIS 4 0 4 4 +MONSTERS 2 0 2 2 +MONSTER 1 0 1 1 +MONSIEUR 1 0 1 1 +MONOTONOUS 1 0 1 1 +MONCEUX 1 0 1 1 +MOMENTS 5 0 5 5 +MOMENTOUS 1 0 1 1 +MOMENTARY 1 0 1 1 +MOMENT 32 0 32 32 +MOLLY 3 0 3 3 +MOLECULES 1 0 1 1 +MOISTURE 1 0 1 1 +MOIST 1 0 1 1 +MOHAMMED 1 0 1 1 +MODIFICATION 1 0 1 1 +MODEST 3 0 3 3 +MODES 2 0 2 2 +MODERNS 1 0 1 1 +MODERN 8 0 8 8 +MODERATE 2 0 2 2 +MODEL 1 0 1 1 +MODE 2 0 2 2 +MOCCASIN 1 0 1 1 +MOBS 1 0 1 1 +MOBILITY 2 0 2 2 +MOB 3 0 3 3 +MOANING 1 0 1 1 +MOAN 1 0 1 1 +MIXTURE 2 0 2 2 +MIXED 4 0 4 4 +MITIGATE 1 0 1 1 +MISUNDERSTANDING 1 0 1 1 +MISTY 1 0 1 1 +MISTRESS 10 0 10 10 +MISTAKEN 2 0 2 2 +MISTAKE 2 0 2 2 +MISSUS 23 0 23 23 +MISSOURI 6 0 6 6 +MISSIONARY 1 0 1 1 +MISSIONARIES 1 0 1 1 +MISSION 3 0 3 3 +MISSED 2 0 2 2 +MISGOVERNMENT 1 0 1 1 +MISFORTUNES 1 0 1 1 +MISFORTUNE 4 0 4 4 +MISERY 3 0 3 3 +MISERABLY 1 0 1 1 +MISERABLE 2 0 2 2 +MISCHIEF 1 0 1 1 +MISCHANCE 1 0 1 1 +MIRROR 2 0 2 2 +MIRACULOUSLY 1 0 1 1 +MIRACLE 2 0 2 2 +MINUTES 6 0 6 6 +MINUTE 2 0 2 2 +MINT 1 0 1 1 +MINORITY 1 0 1 1 +MINISTRY 3 0 3 3 +MINISTERS 1 0 1 1 +MINISTER 3 0 3 3 +MINIATURE 1 0 1 1 +MINGOES 1 0 1 1 +MINGLES 1 0 1 1 +MINGLED 1 0 1 1 +MINDS 3 0 3 3 +MINDFUL 1 0 1 1 +MINDED 1 0 1 1 +MILNER'S 3 0 3 3 +MILLIONS 1 0 1 1 +MILKING 1 0 1 1 +MILKED 1 0 1 1 +MILK 1 0 1 1 +MILITIA 3 0 3 3 +MILITARY 7 0 7 7 +MILE 1 0 1 1 +MILDLY 1 0 1 1 +MILD 2 0 2 2 +MILAN 1 0 1 1 +MIKE 2 0 2 2 +MIGHTY 4 0 4 4 +MIGHTILY 1 0 1 1 +MIGHT 48 0 48 48 +MIDWIFE 1 0 1 1 +MIDST 2 0 2 2 +MIDDLE 4 0 4 4 +MIDDAY 1 0 1 1 +MICROSCOPE 1 0 1 1 +MICE 5 0 5 5 +METROPOLIS 1 0 1 1 +METHODS 3 0 3 3 +METHOD 3 0 3 3 +METAPHOR 1 0 1 1 +METAMORPHOSIS 1 0 1 1 +METALLIC 1 0 1 1 +METAL 1 0 1 1 +MESSAGE 2 0 2 2 +MESHES 1 0 1 1 +MERITS 2 0 2 2 +MERIT 2 0 2 2 +MERIDIAN 2 0 2 2 +MERELY 5 0 5 5 +MERE 4 0 4 4 +MERCHANT 1 0 1 1 +MENTIONS 1 0 1 1 +MENTIONED 5 0 5 5 +MENTION 1 0 1 1 +MENTAL 2 0 2 2 +MENIAL 1 0 1 1 +MENDING 2 0 2 2 +MEND 2 0 2 2 +MENAGERIE 1 0 1 1 +MEMORY 21 0 21 21 +MEMBERS 4 0 4 4 +MEMBER 2 0 2 2 +MELTS 1 0 1 1 +MELODY 1 0 1 1 +MELL 1 0 1 1 +MELANCHOLY 2 0 2 2 +MEETING 5 0 5 5 +MEEK 1 0 1 1 +MEDIUM 1 0 1 1 +MEDITERRANEAN 4 0 4 4 +MEDITATIVE 1 0 1 1 +MEDITATION 1 0 1 1 +MEDICINE 6 0 6 6 +MECHANICS 1 0 1 1 +MEAT 1 0 1 1 +MEASURES 2 0 2 2 +MEASURED 2 0 2 2 +MEASURE 6 0 6 6 +MEANWHILE 4 0 4 4 +MEANTIME 2 0 2 2 +MEANT 5 0 5 5 +MEANS 17 0 17 17 +MEANINGS 1 0 1 1 +MEANING 4 0 4 4 +MEALS 4 0 4 4 +MEAL 5 0 5 5 +MEADOWS 1 0 1 1 +MEADOWCROFT'S 1 0 1 1 +MAXIMUM 1 0 1 1 +MAXIMS 1 0 1 1 +MATURE 1 0 1 1 +MATTHEWS 1 0 1 1 +MATTERS 5 0 5 5 +MATTERED 1 0 1 1 +MATTER 20 0 20 20 +MATHEMATICS 1 0 1 1 +MATERIALS 2 0 2 2 +MATERIALLY 1 0 1 1 +MATERIALISM 1 0 1 1 +MATERIAL 3 0 3 3 +MATED 1 0 1 1 +MATCHLESS 1 0 1 1 +MASTERY 1 0 1 1 +MASTERPIECE 1 0 1 1 +MASTERLY 1 0 1 1 +MASTER 14 0 14 14 +MAST 2 0 2 2 +MASSES 1 0 1 1 +MASSACHUSETTS 1 0 1 1 +MASKS 1 0 1 1 +MARY 6 0 6 6 +MARVELS 1 0 1 1 +MARVELLED 1 0 1 1 +MARVEL 2 0 2 2 +MARTIN 2 0 2 2 +MARTIAL 1 0 1 1 +MARTHA 2 0 2 2 +MARSHALLED 1 0 1 1 +MARRY 1 0 1 1 +MARRIAGE 5 0 5 5 +MARQUIS 1 0 1 1 +MARKS 4 0 4 4 +MARKING 1 0 1 1 +MARKHAM 2 0 2 2 +MARKET 1 0 1 1 +MARKED 3 0 3 3 +MARK 6 0 6 6 +MARINE 2 0 2 2 +MARIE'S 1 0 1 1 +MARIE 6 0 6 6 +MARIANNE 1 0 1 1 +MARIA 1 0 1 1 +MARGOLOTTE 5 0 5 5 +MARGIN 1 0 1 1 +MARGARET 1 0 1 1 +MARCHES 1 0 1 1 +MARCHED 2 0 2 2 +MARCH 7 0 7 7 +MARBLE 2 0 2 2 +MAP 2 0 2 2 +MANUSCRIPT 2 0 2 2 +MANUFACTURER 3 0 3 3 +MANSION 1 0 1 1 +MANSERVANT 1 0 1 1 +MANOEUVRING 1 0 1 1 +MANNERS 1 0 1 1 +MANNER 14 0 14 14 +MANNA 1 0 1 1 +MANKIND 2 0 2 2 +MANIFOLD 1 0 1 1 +MANIFESTLY 1 0 1 1 +MANICAMP 1 0 1 1 +MANHOOD 1 0 1 1 +MANDIBLE 1 0 1 1 +MANAGING 1 0 1 1 +MANAGERS 1 0 1 1 +MANAGEMENT 3 0 3 3 +MANAGED 4 0 4 4 +MANAGE 1 0 1 1 +MAN'S 5 0 5 5 +MAMMY 1 0 1 1 +MALIGNITIES 1 0 1 1 +MALIGNED 1 0 1 1 +MALICIOUS 1 0 1 1 +MALICE 1 0 1 1 +MALADY 1 0 1 1 +MAKING 13 0 13 13 +MAKES 10 0 10 10 +MAJESTY'S 2 0 2 2 +MAJESTY 6 0 6 6 +MAINTAINING 1 0 1 1 +MAINTAINED 4 0 4 4 +MAINSAIL 1 0 1 1 +MAINLY 1 0 1 1 +MAID'S 1 0 1 1 +MAHOGANY 1 0 1 1 +MAGNIFIED 1 0 1 1 +MAGNIFICENT 3 0 3 3 +MAGNIFICENCE 1 0 1 1 +MAGISTRACY 1 0 1 1 +MAGICIAN 5 0 5 5 +MAGIC 4 0 4 4 +MAGAZINE 1 0 1 1 +MADNESS 1 0 1 1 +MADEMOISELLE 5 0 5 5 +MADAME'S 1 0 1 1 +MAD 3 0 3 3 +MACHINES 1 0 1 1 +MACHINE 1 0 1 1 +MABEL 1 0 1 1 +LYNCHINGS 1 0 1 1 +LYING 4 0 4 4 +LUXURIES 2 0 2 2 +LUXURIANT 1 0 1 1 +LUTHERAN 2 0 2 2 +LUTHER'S 4 0 4 4 +LUTHER 3 0 3 3 +LUSTROUS 1 0 1 1 +LUSTRE 1 0 1 1 +LURKING 1 0 1 1 +LURID 1 0 1 1 +LUNGS 1 0 1 1 +LUMPS 1 0 1 1 +LUMP 1 0 1 1 +LUMINOUS 2 0 2 2 +LUKE 1 0 1 1 +LUGUBRIOUS 1 0 1 1 +LUCY 1 0 1 1 +LUCRETIUS 1 0 1 1 +LUCID 1 0 1 1 +LOYALLY 1 0 1 1 +LOYAL 1 0 1 1 +LOWLY 2 0 2 2 +LOW 6 0 6 6 +LOVING 4 0 4 4 +LOVERS 2 0 2 2 +LOVER 1 0 1 1 +LOVELY 7 0 7 7 +LOVED 6 0 6 6 +LOVE 48 0 48 48 +LOUDNESS 1 0 1 1 +LOUDLY 2 0 2 2 +LOUDER 1 0 1 1 +LOUD 2 0 2 2 +LOTUS 1 0 1 1 +LOTS 2 0 2 2 +LOT 6 0 6 6 +LOST 12 0 12 12 +LOSING 3 0 3 3 +LOSES 2 0 2 2 +LOSE 3 0 3 3 +LORDS 1 0 1 1 +LORDLY 1 0 1 1 +LORDING 2 0 2 2 +LOPPED 1 0 1 1 +LOOSELY 1 0 1 1 +LOOKS 7 0 7 7 +LOOKING 16 0 16 16 +LOOKED 24 0 24 24 +LONGING 2 0 2 2 +LONGER 9 0 9 9 +LONGED 1 0 1 1 +LONG 29 0 29 29 +LONELY 2 0 2 2 +LONELINESS 1 0 1 1 +LONELIER 2 0 2 2 +LONDON 3 0 3 3 +LOGICALLY 1 0 1 1 +LOGICAL 2 0 2 2 +LOG 2 0 2 2 +LOFTINESS 1 0 1 1 +LOFTIEST 1 0 1 1 +LOFT 2 0 2 2 +LODGING 1 0 1 1 +LODGE 2 0 2 2 +LOCKED 3 0 3 3 +LOCK 1 0 1 1 +LOAF 1 0 1 1 +LOADED 1 0 1 1 +LIVING 5 0 5 5 +LIVID 1 0 1 1 +LIVES 6 0 6 6 +LIVERY 1 0 1 1 +LIVERIES 2 0 2 2 +LIVERIED 1 0 1 1 +LIVELIEST 1 0 1 1 +LIVED 8 0 8 8 +LIVE 9 0 9 9 +LITERATURE 1 0 1 1 +LITERARY 4 0 4 4 +LITERALLY 2 0 2 2 +LITERAL 2 0 2 2 +LISTLESSLY 1 0 1 1 +LISTENING 3 0 3 3 +LISTENED 5 0 5 5 +LISTEN 3 0 3 3 +LIST 1 0 1 1 +LIQUID 2 0 2 2 +LIPS 4 0 4 4 +LINKS 2 0 2 2 +LINGERED 1 0 1 1 +LINES 7 0 7 7 +LINEN 2 0 2 2 +LINED 2 0 2 2 +LINDENS 1 0 1 1 +LINCOLN 2 0 2 2 +LIMITATION 1 0 1 1 +LIMIT 1 0 1 1 +LIMESTONE 1 0 1 1 +LIMBS 2 0 2 2 +LIKEWISE 1 0 1 1 +LIKES 2 0 2 2 +LIKENESS 1 0 1 1 +LIKELY 2 0 2 2 +LIKED 4 0 4 4 +LIGHTS 5 0 5 5 +LIGHTNING 1 0 1 1 +LIGHTLY 3 0 3 3 +LIGHTING 6 0 6 6 +LIGHTED 5 0 5 5 +LIFTING 2 0 2 2 +LIFTED 1 0 1 1 +LIFT 2 0 2 2 +LIFE'S 1 0 1 1 +LIFE 47 0 47 47 +LIEUTENANT 6 0 6 6 +LIES 8 0 8 8 +LIEDENBROCK 1 0 1 1 +LICHEN 1 0 1 1 +LIBRARY 3 0 3 3 +LIBERTY 3 0 3 3 +LIBERAL 1 0 1 1 +LIABLE 2 0 2 2 +LEXINGTON 1 0 1 1 +LEVIED 2 0 2 2 +LEVELS 1 0 1 1 +LEVEL 3 0 3 3 +LETTING 1 0 1 1 +LETTERS 4 0 4 4 +LETTER 12 0 12 12 +LET'S 2 0 2 2 +LET 27 0 27 27 +LEST 2 0 2 2 +LESSONS 1 0 1 1 +LESSON 1 0 1 1 +LENGTHY 1 0 1 1 +LENGTHS 1 0 1 1 +LENGTH 4 0 4 4 +LEND 2 0 2 2 +LEISURELY 1 0 1 1 +LEISURE 11 0 11 11 +LEGS 3 0 3 3 +LEGISLATURE 4 0 4 4 +LEGISLATORS 1 0 1 1 +LEGISLATIVE 1 0 1 1 +LEGATE 1 0 1 1 +LEGALITY 2 0 2 2 +LEGAL 1 0 1 1 +LEG 1 0 1 1 +LEFT 34 0 34 34 +LEECH 2 0 2 2 +LECTURES 2 0 2 2 +LECTURE 3 0 3 3 +LECOMPTON 1 0 1 1 +LEAVE 16 0 16 16 +LEATHER 1 0 1 1 +LEASH 1 0 1 1 +LEARNING 1 0 1 1 +LEARNED 4 0 4 4 +LEARN 4 0 4 4 +LEAPS 2 0 2 2 +LEAP 2 0 2 2 +LEANING 3 0 3 3 +LEANED 5 0 5 5 +LEAN 1 0 1 1 +LEAGUES 1 0 1 1 +LEAGUE 1 0 1 1 +LEADING 3 0 3 3 +LEADERS 1 0 1 1 +LEADER 2 0 2 2 +LEAD 8 0 8 8 +LAZILY 1 0 1 1 +LAYS 1 0 1 1 +LAYMAN 1 0 1 1 +LAYING 1 0 1 1 +LAY 16 0 16 16 +LAWYER 1 0 1 1 +LAWS 9 0 9 9 +LAWRENCE 2 0 2 2 +LAWN 1 0 1 1 +LAWFUL 1 0 1 1 +LAW 13 0 13 13 +LAVISHING 1 0 1 1 +LAUGHTER 2 0 2 2 +LAUGHING 6 0 6 6 +LAUGHED 6 0 6 6 +LAUGH 4 0 4 4 +LATTICE 1 0 1 1 +LATTER 9 0 9 9 +LATIN 3 0 3 3 +LATEST 1 0 1 1 +LATENT 1 0 1 1 +LATE 6 0 6 6 +LATCHED 1 0 1 1 +LAST 41 0 41 41 +LASHED 1 0 1 1 +LARGEST 2 0 2 2 +LARGER 3 0 3 3 +LARGE 16 0 16 16 +LAREN 1 0 1 1 +LARDER 1 0 1 1 +LAPSE 1 0 1 1 +LAP 3 0 3 3 +LANGUISHINGLY 1 0 1 1 +LANGUID 1 0 1 1 +LANGUAGE 11 0 11 11 +LANE 1 0 1 1 +LANDSCAPE 1 0 1 1 +LANDS 2 0 2 2 +LAMPS 3 0 3 3 +LAMP 4 0 4 4 +LAMENTATION 1 0 1 1 +LAMBS 1 0 1 1 +LAMB 1 0 1 1 +LAKES 1 0 1 1 +LAKE'S 1 0 1 1 +LAID 8 0 8 8 +LAGOON 4 0 4 4 +LADY 9 0 9 9 +LADLED 1 0 1 1 +LADIES 11 0 11 11 +LADDER 3 0 3 3 +LACKEY 1 0 1 1 +LACK 1 0 1 1 +LA 5 0 5 5 +KNOWN 15 0 15 15 +KNOWLEDGE 15 0 15 15 +KNOWING 5 0 5 5 +KNOT 1 0 1 1 +KNOCKING 1 0 1 1 +KNOCKED 4 0 4 4 +KNOCK 1 0 1 1 +KNITTED 1 0 1 1 +KNIGHT 1 0 1 1 +KNEW 25 0 25 25 +KNEES 3 0 3 3 +KNEELS 1 0 1 1 +KNEELING 1 0 1 1 +KNEE 1 0 1 1 +KITTEN 1 0 1 1 +KITES 1 0 1 1 +KITE 1 0 1 1 +KITCHEN 4 0 4 4 +KIT 1 0 1 1 +KISSING 1 0 1 1 +KISSES 1 0 1 1 +KISSED 2 0 2 2 +KISS 2 0 2 2 +KINGS 1 0 1 1 +KINGLY 1 0 1 1 +KINGDOM 4 0 4 4 +KINDS 1 0 1 1 +KINDNESS 1 0 1 1 +KINDLY 3 0 3 3 +KINDLED 3 0 3 3 +KINDER 1 0 1 1 +KIND 14 0 14 14 +KILLS 1 0 1 1 +KILLED 1 0 1 1 +KIDNAP 2 0 2 2 +KID 1 0 1 1 +KICKAPOO 1 0 1 1 +KEYNOTE 1 0 1 1 +KEY 5 0 5 5 +KETTLES 2 0 2 2 +KETTLE 1 0 1 1 +KERCHIEFS 1 0 1 1 +KEPT 5 0 5 5 +KENNINGTON 2 0 2 2 +KENNETH 9 0 9 9 +KEEPS 3 0 3 3 +KEEPING 4 0 4 4 +KEEPER 1 0 1 1 +KEEP 10 0 10 10 +KEENNESS 2 0 2 2 +KEENER 1 0 1 1 +KEEN 3 0 3 3 +KATHLEEN 1 0 1 1 +KATE 1 0 1 1 +KANSAS 3 0 3 3 +KANE 1 0 1 1 +JUSTLY 2 0 2 2 +JUSTIFICATION 2 0 2 2 +JUSTICE 3 0 3 3 +JURISDICTION 1 0 1 1 +JUMPING 1 0 1 1 +JUMPED 1 0 1 1 +JUMP 3 0 3 3 +JUDGMENT 6 0 6 6 +JUDGES 1 0 1 1 +JUDGE 5 0 5 5 +JUDAH 1 0 1 1 +JOYOUS 1 0 1 1 +JOYCE 2 0 2 2 +JOY 4 0 4 4 +JOURNEYING 1 0 1 1 +JOURNEY 5 0 5 5 +JONES 3 0 3 3 +JOLLY 5 0 5 5 +JOKED 1 0 1 1 +JOKE 2 0 2 2 +JOINED 1 0 1 1 +JOIN 2 0 2 2 +JOHNSON 1 0 1 1 +JIB 1 0 1 1 +JEWISH 1 0 1 1 +JEWELS 3 0 3 3 +JET 1 0 1 1 +JESUS 7 0 7 7 +JERSEY 1 0 1 1 +JERK 1 0 1 1 +JENKS 1 0 1 1 +JELLIES 1 0 1 1 +JEHOVAH 1 0 1 1 +JEERED 1 0 1 1 +JEALOUS 1 0 1 1 +JAWS 2 0 2 2 +JASPER'S 2 0 2 2 +JAP 1 0 1 1 +JANUARY 2 0 2 2 +JANE'S 1 0 1 1 +JANE 4 0 4 4 +JAMES 2 0 2 2 +JAILER 5 0 5 5 +JACOB'S 2 0 2 2 +JACOB 1 0 1 1 +JACKSON 1 0 1 1 +JACKET 1 0 1 1 +J 2 0 2 2 +IVORY 1 0 1 1 +ITSELF 21 0 21 21 +ITCH 1 0 1 1 +ITALIAN 2 0 2 2 +ISSUED 2 0 2 2 +ISSUE 1 0 1 1 +ISRAEL 1 0 1 1 +ISOLATED 1 0 1 1 +ISN'T 5 0 5 5 +ISLAND 5 0 5 5 +IRWINE 1 0 1 1 +IRRITABLE 1 0 1 1 +IRRESOLUTION 1 0 1 1 +IRREPARABLE 1 0 1 1 +IRREGULARITY 2 0 2 2 +IRONING 1 0 1 1 +IRON 2 0 2 2 +IRISH 2 0 2 2 +IRIDESCENT 1 0 1 1 +IRENE 1 0 1 1 +IRELAND 1 0 1 1 +INWARDLY 1 0 1 1 +INWARD 1 0 1 1 +INVOLVING 1 0 1 1 +INVOLVES 1 0 1 1 +INVOLVED 1 0 1 1 +INVOLVE 1 0 1 1 +INVITED 4 0 4 4 +INVITATION 3 0 3 3 +INVISIBLE 1 0 1 1 +INVIOLATE 1 0 1 1 +INVIDIOUS 1 0 1 1 +INVESTIGATION 1 0 1 1 +INVENTOR 1 0 1 1 +INVENTION 1 0 1 1 +INVENTED 1 0 1 1 +INVASION 1 0 1 1 +INVARIABLY 4 0 4 4 +INVARIABLE 1 0 1 1 +INVALID 1 0 1 1 +INVADER 1 0 1 1 +INVADE 1 0 1 1 +INTRODUCTION 4 0 4 4 +INTRODUCING 1 0 1 1 +INTRODUCED 3 0 3 3 +INTRODUCE 3 0 3 3 +INTRINSIC 1 0 1 1 +INTRICATE 1 0 1 1 +INTOLERANT 1 0 1 1 +INTOLERANCY 1 0 1 1 +INTOLERABLE 1 0 1 1 +INTIMATELY 2 0 2 2 +INTIMATE 2 0 2 2 +INTERVIEWS 1 0 1 1 +INTERVIEW 3 0 3 3 +INTERSECTED 1 0 1 1 +INTERRUPTED 2 0 2 2 +INTERPRETED 1 0 1 1 +INTERPRETATION 1 0 1 1 +INTERPOSED 1 0 1 1 +INTERNAL 1 0 1 1 +INTERMINGLED 1 0 1 1 +INTERMEDIATE 1 0 1 1 +INTERLACED 1 0 1 1 +INTERFERE 2 0 2 2 +INTERESTING 3 0 3 3 +INTERESTED 4 0 4 4 +INTENTLY 2 0 2 2 +INTENTIONS 1 0 1 1 +INTENTION 1 0 1 1 +INTENSITY 3 0 3 3 +INTENSIFICATION 1 0 1 1 +INTENSELY 1 0 1 1 +INTENDED 1 0 1 1 +INTEND 1 0 1 1 +INTELLIGENT 5 0 5 5 +INTELLIGENCE 7 0 7 7 +INTELLECTS 1 0 1 1 +INTELLECT 1 0 1 1 +INTEGRITY 1 0 1 1 +INTANGIBLE 1 0 1 1 +INSULT 1 0 1 1 +INSTRUMENT 1 0 1 1 +INSTRUCTIONS 4 0 4 4 +INSTITUTION 1 0 1 1 +INSTITUTED 1 0 1 1 +INSTITUTE 1 0 1 1 +INSTINCT 1 0 1 1 +INSTEAD 11 0 11 11 +INSTANTLY 6 0 6 6 +INSTANTANEOUS 1 0 1 1 +INSTANT 3 0 3 3 +INSTANCING 1 0 1 1 +INSTANCE 3 0 3 3 +INSTALLED 5 0 5 5 +INSTALL 1 0 1 1 +INSPIRED 1 0 1 1 +INSPIRATION 1 0 1 1 +INSOLENTLY 1 0 1 1 +INSISTS 1 0 1 1 +INSISTENCE 2 0 2 2 +INSISTED 1 0 1 1 +INSIST 1 0 1 1 +INSIPID 1 0 1 1 +INSINUATED 1 0 1 1 +INSIGNIFICANT 2 0 2 2 +INSIGHT 1 0 1 1 +INSIDE 2 0 2 2 +INSERTING 1 0 1 1 +INSENSIBLE 1 0 1 1 +INSECT 1 0 1 1 +INSATIABLE 2 0 2 2 +INNUMERABLE 2 0 2 2 +INNOCENTLY 1 0 1 1 +INNOCENT 2 0 2 2 +INNINGS 1 0 1 1 +INNER 2 0 2 2 +INMATES 1 0 1 1 +INJUSTICE 4 0 4 4 +INJURY 2 0 2 2 +INJURED 2 0 2 2 +INIQUITY 1 0 1 1 +INHUMAN 1 0 1 1 +INHERITANCE 2 0 2 2 +INHABITANTS 3 0 3 3 +INGREDIENTS 1 0 1 1 +INGENUITY 2 0 2 2 +INGENIOUS 2 0 2 2 +INFUSE 1 0 1 1 +INFORMING 1 0 1 1 +INFORMED 3 0 3 3 +INFORMATION 3 0 3 3 +INFORM 1 0 1 1 +INFLUENTIAL 2 0 2 2 +INFLUENCES 2 0 2 2 +INFLUENCE 8 0 8 8 +INFLICT 2 0 2 2 +INFLEXIBLE 1 0 1 1 +INFIRMITY 1 0 1 1 +INFIRMITIES 1 0 1 1 +INFIRMARY 1 0 1 1 +INFINITE 4 0 4 4 +INFERIOR 2 0 2 2 +INFERENCE 1 0 1 1 +INFANTRY 2 0 2 2 +INFANT 1 0 1 1 +INFANCY 1 0 1 1 +INFALLIBLE 1 0 1 1 +INEXPRESSIBLY 1 0 1 1 +INEXPLICABLE 2 0 2 2 +INEXPERIENCE 1 0 1 1 +INEXHAUSTIBLE 1 0 1 1 +INESTIMABLE 1 0 1 1 +INEFFECTUALLY 1 0 1 1 +INDUSTRY 1 0 1 1 +INDUSTRIOUS 1 0 1 1 +INDULGENCE 1 0 1 1 +INDULGED 1 0 1 1 +INDUCED 1 0 1 1 +INDUCE 1 0 1 1 +INDUBITABLE 1 0 1 1 +INDIVIDUALS 1 0 1 1 +INDIVIDUAL 5 0 5 5 +INDISTINGUISHABLE 1 0 1 1 +INDISTINCT 1 0 1 1 +INDISPENSABLE 1 0 1 1 +INDISCREET 1 0 1 1 +INDIRECT 1 0 1 1 +INDIGENCE 1 0 1 1 +INDIFFERENT 3 0 3 3 +INDIFFERENCE 4 0 4 4 +INDIES 1 0 1 1 +INDICATOR 1 0 1 1 +INDICATING 1 0 1 1 +INDICATES 2 0 2 2 +INDICATED 3 0 3 3 +INDICATE 2 0 2 2 +INDIANS 4 0 4 4 +INDIAN 4 0 4 4 +INDIA 1 0 1 1 +INDEPENDENTS 1 0 1 1 +INDEPENDENT 4 0 4 4 +INDEPENDENCE 1 0 1 1 +INCURRING 1 0 1 1 +INCURRED 1 0 1 1 +INCUR 1 0 1 1 +INCREASED 2 0 2 2 +INCREASE 2 0 2 2 +INCONVENIENT 1 0 1 1 +INCONCEIVABLE 1 0 1 1 +INCOMPREHENSIBLE 1 0 1 1 +INCOMPATIBLE 1 0 1 1 +INCOHERENT 1 0 1 1 +INCLUDING 1 0 1 1 +INCLUDED 2 0 2 2 +INCLINES 1 0 1 1 +INCLINED 2 0 2 2 +INCLINATIONS 2 0 2 2 +INCITED 1 0 1 1 +INCIDENTS 1 0 1 1 +INCIDENTAL 1 0 1 1 +INCIDENT 6 0 6 6 +INCHES 1 0 1 1 +INCH 2 0 2 2 +INCESSANTLY 1 0 1 1 +INCEPTION 1 0 1 1 +INCAPABLE 2 0 2 2 +INCANDESCENT 1 0 1 1 +INASMUCH 1 0 1 1 +INADEQUATE 2 0 2 2 +INADEQUACY 1 0 1 1 +INACCURACY 1 0 1 1 +INACCESSIBLE 1 0 1 1 +IMPULSIVELY 1 0 1 1 +IMPULSE 3 0 3 3 +IMPROVING 1 0 1 1 +IMPROVED 3 0 3 3 +IMPRISONMENT 1 0 1 1 +IMPRISONED 3 0 3 3 +IMPRESSIONS 6 0 6 6 +IMPRESSION 2 0 2 2 +IMPOSSIBLE 11 0 11 11 +IMPOSSIBILITY 1 0 1 1 +IMPOSED 1 0 1 1 +IMPOSE 1 0 1 1 +IMPORTANT 7 0 7 7 +IMPORTANCE 5 0 5 5 +IMPORT 1 0 1 1 +IMPLY 1 0 1 1 +IMPLORES 1 0 1 1 +IMPLORE 1 0 1 1 +IMPLIES 3 0 3 3 +IMPLICIT 1 0 1 1 +IMPLICATION 1 0 1 1 +IMPIETY 1 0 1 1 +IMPETUS 1 0 1 1 +IMPETUOUS 3 0 3 3 +IMPERIOUSLY 1 0 1 1 +IMPERIALIST 1 0 1 1 +IMPERIALISM 1 0 1 1 +IMPERIAL 1 0 1 1 +IMPERFECTLY 1 0 1 1 +IMPERATIVE 1 0 1 1 +IMPENETRABLE 2 0 2 2 +IMPEDIMENT 1 0 1 1 +IMPATIENT 1 0 1 1 +IMPATIENCE 3 0 3 3 +IMPASSIVELY 1 0 1 1 +IMMUNITY 1 0 1 1 +IMMORTALITY 1 0 1 1 +IMMORTAL 1 0 1 1 +IMMENSELY 1 0 1 1 +IMMENSE 1 0 1 1 +IMMEDIATELY 4 0 4 4 +IMMEDIATE 2 0 2 2 +IMMEDIACY 1 0 1 1 +IMBIBING 1 0 1 1 +IMBIBED 1 0 1 1 +IMAGINING 1 0 1 1 +IMAGINED 2 0 2 2 +IMAGINE 2 0 2 2 +IMAGINATIVE 1 0 1 1 +IMAGINATION 3 0 3 3 +IMAGINARY 1 0 1 1 +IMAGINABLE 2 0 2 2 +IMAGES 8 0 8 8 +IMAGE 9 0 9 9 +ILLUSTRIOUS 2 0 2 2 +ILLUSTRATION 1 0 1 1 +ILLUSION 2 0 2 2 +ILLUMINATION 1 0 1 1 +ILLUMINATING 1 0 1 1 +ILLUMINATED 1 0 1 1 +ILLS 1 0 1 1 +ILLNESS 1 0 1 1 +IGNORANCE 2 0 2 2 +IGNOMINY 1 0 1 1 +IGNOBLE 1 0 1 1 +IDOLATRY 1 0 1 1 +IDLY 1 0 1 1 +IDLENESS 1 0 1 1 +IDLE 6 0 6 6 +IDENTITY 1 0 1 1 +IDEAL 3 0 3 3 +IDEA 7 0 7 7 +ICE 1 0 1 1 +HYPOTHESIS 1 0 1 1 +HYPOCRITE 1 0 1 1 +HYPOCRISY 1 0 1 1 +HUT 4 0 4 4 +HUSSY 1 0 1 1 +HUSHED 1 0 1 1 +HUSBAND'S 1 0 1 1 +HURT 1 0 1 1 +HURRYING 2 0 2 2 +HURRIEDLY 3 0 3 3 +HURRIED 6 0 6 6 +HURONS 1 0 1 1 +HURLED 2 0 2 2 +HUNTING 2 0 2 2 +HUNTER 1 0 1 1 +HUNTED 1 0 1 1 +HUNGRY 1 0 1 1 +HUNGER 2 0 2 2 +HUNG 10 0 10 10 +HUNDREDTH 1 0 1 1 +HUNDREDS 2 0 2 2 +HUNDRED 18 0 18 18 +HUMPY 2 0 2 2 +HUMOUR 1 0 1 1 +HUMOROUS 3 0 3 3 +HUMOR 1 0 1 1 +HUMMING 1 0 1 1 +HUMILITY 1 0 1 1 +HUMILIATE 1 0 1 1 +HUMBUG 1 0 1 1 +HUMBLY 2 0 2 2 +HUMBLE 5 0 5 5 +HUMANITY 2 0 2 2 +HUMANE 1 0 1 1 +HUGGED 1 0 1 1 +HUGE 7 0 7 7 +HUES 1 0 1 1 +HUE 1 0 1 1 +HUDSON 1 0 1 1 +HUDDLED 1 0 1 1 +HOWL 1 0 1 1 +HOWEVER 29 0 29 29 +HOVER 1 0 1 1 +HOUSEWORK 1 0 1 1 +HOUSES 1 0 1 1 +HOUSEMAID 2 0 2 2 +HOUSEKEEPER 2 0 2 2 +HOUSEHOLD'S 1 0 1 1 +HOURS 13 0 13 13 +HOUR 12 0 12 12 +HOUNDED 1 0 1 1 +HOTELS 1 0 1 1 +HOTEL 7 0 7 7 +HOSTILITY 1 0 1 1 +HOSPITALITY 4 0 4 4 +HOSPITABLY 1 0 1 1 +HOSPITABLE 1 0 1 1 +HORSES 6 0 6 6 +HORSEMEN 1 0 1 1 +HORROR 2 0 2 2 +HORRID 1 0 1 1 +HORRIBLY 2 0 2 2 +HORRIBLE 3 0 3 3 +HORNFUL 1 0 1 1 +HORIZON 3 0 3 3 +HORATIO 2 0 2 2 +HOPKINSON 2 0 2 2 +HOPKINS'S 1 0 1 1 +HOPKINS 4 0 4 4 +HOPING 1 0 1 1 +HOPELESS 1 0 1 1 +HOPED 2 0 2 2 +HOOKING 1 0 1 1 +HOOKED 1 0 1 1 +HONOURED 1 0 1 1 +HONOURABLY 1 0 1 1 +HONORIFIC 2 0 2 2 +HONORED 1 0 1 1 +HONEY 1 0 1 1 +HONESTY 1 0 1 1 +HONESTLY 2 0 2 2 +HONEST 5 0 5 5 +HON 1 0 1 1 +HOMILY 1 0 1 1 +HOMES 2 0 2 2 +HOMELY 3 0 3 3 +HOME 23 0 23 23 +HOLMES 10 0 10 10 +HOLLYHOCKS 1 0 1 1 +HOLLOW 3 0 3 3 +HOLINESS 2 0 2 2 +HOLIDAYS 3 0 3 3 +HOLES 1 0 1 1 +HOLE 1 0 1 1 +HOLDS 2 0 2 2 +HOLDING 1 0 1 1 +HOLBORN 1 0 1 1 +HOBSON'S 1 0 1 1 +HOBS 1 0 1 1 +HO 1 0 1 1 +HITHERTO 1 0 1 1 +HISTORY 5 0 5 5 +HISTORIC 1 0 1 1 +HISTORIANS 1 0 1 1 +HISTORIAN 1 0 1 1 +HISSING 1 0 1 1 +HISS 1 0 1 1 +HIRE 1 0 1 1 +HINT 2 0 2 2 +HINDERED 1 0 1 1 +HIND 1 0 1 1 +HIMSELF 49 0 49 49 +HILLY 1 0 1 1 +HILL 4 0 4 4 +HIGHNESS 1 0 1 1 +HIGHLY 2 0 2 2 +HIGHER 2 0 2 2 +HIGH 18 0 18 18 +HIERARCHY 1 0 1 1 +HIDING 1 0 1 1 +HIDEOUS 1 0 1 1 +HIDE 3 0 3 3 +HIDDEN 3 0 3 3 +HIDALGO 1 0 1 1 +HICKEY 1 0 1 1 +HEWN 1 0 1 1 +HESTER 11 0 11 11 +HESITATION 1 0 1 1 +HESITATING 2 0 2 2 +HESITATED 1 0 1 1 +HERSELF 20 0 20 20 +HERS 2 0 2 2 +HERON 1 0 1 1 +HEROINE 1 0 1 1 +HEROIC 2 0 2 2 +HERO 3 0 3 3 +HERMOCRATES 1 0 1 1 +HERETICS 2 0 2 2 +HEREDITY 1 0 1 1 +HEREAFTER 3 0 3 3 +HERE'S 1 0 1 1 +HERALDED 1 0 1 1 +HENRY'S 1 0 1 1 +HENRY 2 0 2 2 +HENLEY 1 0 1 1 +HENCE 4 0 4 4 +HEMMED 1 0 1 1 +HELPLESS 3 0 3 3 +HELPING 1 0 1 1 +HELP 18 0 18 18 +HELMSMAN 1 0 1 1 +HELLENES 1 0 1 1 +HELL 2 0 2 2 +HEIGHTS 1 0 1 1 +HEIGHTENING 1 0 1 1 +HEIGHT 1 0 1 1 +HEELS 1 0 1 1 +HEDGES 1 0 1 1 +HEDGE 1 0 1 1 +HEAVY 13 0 13 13 +HEAVING 2 0 2 2 +HEAVILY 1 0 1 1 +HEAVENS 1 0 1 1 +HEAVENLY 1 0 1 1 +HEAVED 1 0 1 1 +HEAT 2 0 2 2 +HEARTY 1 0 1 1 +HEARTILY 2 0 2 2 +HEARTHSTONES 1 0 1 1 +HEARTH 3 0 3 3 +HEARTED 1 0 1 1 +HEARSE 2 0 2 2 +HEARS 2 0 2 2 +HEARING 1 0 1 1 +HEARD 19 0 19 19 +HEAP 2 0 2 2 +HEALTH 6 0 6 6 +HEADS 3 0 3 3 +HEADQUARTERS 1 0 1 1 +HEADLONGS 1 0 1 1 +HEADLONG 1 0 1 1 +HEADING 1 0 1 1 +HE'LL 1 0 1 1 +HAWORTH 1 0 1 1 +HAWKS 1 0 1 1 +HAWKEYE 5 0 5 5 +HAWK'S 1 0 1 1 +HAVEN'T 6 0 6 6 +HAUNTED 1 0 1 1 +HAUGHTY 4 0 4 4 +HATS 1 0 1 1 +HATRED 3 0 3 3 +HATH 4 0 4 4 +HATER 1 0 1 1 +HATEFUL 1 0 1 1 +HATED 1 0 1 1 +HATE 1 0 1 1 +HASTY 2 0 2 2 +HASTILY 2 0 2 2 +HASTENED 4 0 4 4 +HASTE 1 0 1 1 +HARVEST 1 0 1 1 +HARRYING 1 0 1 1 +HARRY 3 0 3 3 +HARROW 1 0 1 1 +HARRIED 1 0 1 1 +HARPOONER 1 0 1 1 +HARNESS 1 0 1 1 +HARMONY 2 0 2 2 +HARMON 4 0 4 4 +HARM 2 0 2 2 +HARE 1 0 1 1 +HARDSHIPS 1 0 1 1 +HARDLY 14 0 14 14 +HARDER 1 0 1 1 +HARASSING 1 0 1 1 +HARANGUING 1 0 1 1 +HAR 1 0 1 1 +HAPPY 16 0 16 16 +HAPPINESS 6 0 6 6 +HAPPILY 1 0 1 1 +HAPPENS 3 0 3 3 +HAPPENING 2 0 2 2 +HAPLESS 1 0 1 1 +HANSOM 1 0 1 1 +HANS 2 0 2 2 +HANGS 1 0 1 1 +HANGERS 1 0 1 1 +HANG 1 0 1 1 +HANDSOMEST 1 0 1 1 +HANDSOMELY 1 0 1 1 +HANDLE 1 0 1 1 +HANDKERCHIEFS 1 0 1 1 +HANDFUL 1 0 1 1 +HANDED 3 0 3 3 +HAND 29 0 29 29 +HAMMER 3 0 3 3 +HAMLET'S 1 0 1 1 +HAMBURG 1 0 1 1 +HALT 1 0 1 1 +HALLWAY 1 0 1 1 +HALLS 3 0 3 3 +HAIRS 1 0 1 1 +HAILING 1 0 1 1 +HADN'T 3 0 3 3 +HABITUALLY 1 0 1 1 +HABITUAL 2 0 2 2 +HABITS 4 0 4 4 +HABITATION 1 0 1 1 +HABIT 7 0 7 7 +GUTTER 1 0 1 1 +GUSTS 2 0 2 2 +GUST 1 0 1 1 +GUNS 1 0 1 1 +GUN 1 0 1 1 +GULLET 1 0 1 1 +GULF 2 0 2 2 +GUILTY 2 0 2 2 +GUILT 2 0 2 2 +GUIDED 2 0 2 2 +GUIDE 2 0 2 2 +GUERRILLA 1 0 1 1 +GUARDS 3 0 3 3 +GUARD 1 0 1 1 +GRUDGE 1 0 1 1 +GROWTH 5 0 5 5 +GROWN 7 0 7 7 +GROWLED 2 0 2 2 +GROWING 4 0 4 4 +GROW 4 0 4 4 +GROUPS 2 0 2 2 +GROUP 2 0 2 2 +GROUNDS 2 0 2 2 +GROSS 1 0 1 1 +GROPING 1 0 1 1 +GROPE 1 0 1 1 +GROANS 2 0 2 2 +GROANING 1 0 1 1 +GROANED 2 0 2 2 +GROAN 1 0 1 1 +GRINNING 1 0 1 1 +GRINDER 1 0 1 1 +GRIN 1 0 1 1 +GRIM 3 0 3 3 +GRIFFIN 1 0 1 1 +GRIEVED 1 0 1 1 +GRIEF 2 0 2 2 +GREW 5 0 5 5 +GREETINGS 1 0 1 1 +GREETING 4 0 4 4 +GREETED 1 0 1 1 +GREET 1 0 1 1 +GREEK 4 0 4 4 +GREATNESS 2 0 2 2 +GREATLY 10 0 10 10 +GREATEST 7 0 7 7 +GRAVEYARD 3 0 3 3 +GRAVES 1 0 1 1 +GRAVE 4 0 4 4 +GRATITUDE 2 0 2 2 +GRATIFICATION 3 0 3 3 +GRATE 1 0 1 1 +GRASS 11 0 11 11 +GRASPING 3 0 3 3 +GRANTED 3 0 3 3 +GRANDSON 1 0 1 1 +GRANDMOTHER 2 0 2 2 +GRANDFATHER 4 0 4 4 +GRANDER 2 0 2 2 +GRAMOPHONES 1 0 1 1 +GRAINS 1 0 1 1 +GRADUALLY 5 0 5 5 +GRADATED 1 0 1 1 +GRACIOUSLY 1 0 1 1 +GRACIOUS 2 0 2 2 +GRACES 2 0 2 2 +GRACEFULLY 1 0 1 1 +GRACEFUL 1 0 1 1 +GRACE 12 0 12 12 +GOWN 1 0 1 1 +GOVERNOR'S 1 0 1 1 +GOVERNESS 2 0 2 2 +GOTHIC 3 0 3 3 +GOT 13 0 13 13 +GOSSIP 2 0 2 2 +GOSPEL 2 0 2 2 +GORGEOUS 1 0 1 1 +GORDONS 1 0 1 1 +GORDON 1 0 1 1 +GOODS 5 0 5 5 +GOODNESS 1 0 1 1 +GOODLY 1 0 1 1 +GONE 14 0 14 14 +GOLIATH 2 0 2 2 +GOLF 1 0 1 1 +GOLDEN 15 0 15 15 +GOLD 15 0 15 15 +GOES 2 0 2 2 +GODLY 1 0 1 1 +GODLESS 1 0 1 1 +GODDESS 1 0 1 1 +GOD'S 1 0 1 1 +GOD 33 0 33 33 +GOBY 1 0 1 1 +GNARLED 1 0 1 1 +GLUE 1 0 1 1 +GLOWING 3 0 3 3 +GLOW 3 0 3 3 +GLOVES 5 0 5 5 +GLOVED 1 0 1 1 +GLOSSY 2 0 2 2 +GLORY 1 0 1 1 +GLORIOUS 2 0 2 2 +GLORIES 1 0 1 1 +GLOOMY 1 0 1 1 +GLOOMILY 2 0 2 2 +GLOBE 1 0 1 1 +GLITTERING 4 0 4 4 +GLITTERED 2 0 2 2 +GLINDA 1 0 1 1 +GLIMMERING 1 0 1 1 +GLIDING 1 0 1 1 +GLIDES 1 0 1 1 +GLIDED 2 0 2 2 +GLEANER 1 0 1 1 +GLEAMS 1 0 1 1 +GLEAMING 4 0 4 4 +GLEAMED 1 0 1 1 +GLEAM 1 0 1 1 +GLASS 6 0 6 6 +GLARE 2 0 2 2 +GLANCED 2 0 2 2 +GLANCE 5 0 5 5 +GLAMOUR 1 0 1 1 +GLADNESS 1 0 1 1 +GLADLY 1 0 1 1 +GLAD 12 0 12 12 +GIVING 7 0 7 7 +GIVES 3 0 3 3 +GIVEN 15 0 15 15 +GIRL 25 0 25 25 +GILLIKINS 2 0 2 2 +GILDED 3 0 3 3 +GILD 1 0 1 1 +GIGANTIC 1 0 1 1 +GIFTS 2 0 2 2 +GIFT 6 0 6 6 +GHOSTS 2 0 2 2 +GHOSTLY 1 0 1 1 +GHOST 2 0 2 2 +GHISIZZLE 4 0 4 4 +GHASTLY 1 0 1 1 +GETTING 6 0 6 6 +GET 30 0 30 30 +GESTURES 1 0 1 1 +GESTURE 2 0 2 2 +GESTATION 1 0 1 1 +GERMANTOWN 1 0 1 1 +GERM 1 0 1 1 +GEORGE 3 0 3 3 +GEOMETRICAL 1 0 1 1 +GENUINE 1 0 1 1 +GENTLY 5 0 5 5 +GENTLEWOMAN 1 0 1 1 +GENTLENESS 1 0 1 1 +GENTLEMAN'S 1 0 1 1 +GENTLE 7 0 7 7 +GENTILITY 1 0 1 1 +GENIUS 2 0 2 2 +GENEROUSLY 1 0 1 1 +GENEROUS 4 0 4 4 +GENERATION 2 0 2 2 +GENERATED 1 0 1 1 +GENERALS 3 0 3 3 +GENEALOGIES 1 0 1 1 +GAZING 2 0 2 2 +GAZED 2 0 2 2 +GAZE 3 0 3 3 +GAY 1 0 1 1 +GATHERINGS 1 0 1 1 +GATHERING 2 0 2 2 +GATHERED 2 0 2 2 +GATES 6 0 6 6 +GATE 3 0 3 3 +GASPED 1 0 1 1 +GASEOUS 1 0 1 1 +GAS 1 0 1 1 +GARDENING 1 0 1 1 +GARDENER'S 1 0 1 1 +GARDENER 1 0 1 1 +GARDEN 6 0 6 6 +GARB 1 0 1 1 +GAMEWELL'S 1 0 1 1 +GALVANOMETER 1 0 1 1 +GALLOWSNESS 1 0 1 1 +GALLEY 1 0 1 1 +GALLANT 4 0 4 4 +GAIN 2 0 2 2 +GABLES 1 0 1 1 +FUZZY 1 0 1 1 +FUTURE 5 0 5 5 +FURTHERED 1 0 1 1 +FURTHER 9 0 9 9 +FURNITURE 4 0 4 4 +FURNISHING 1 0 1 1 +FURNISHED 1 0 1 1 +FURNISH 1 0 1 1 +FURLED 1 0 1 1 +FURIOUSLY 2 0 2 2 +FUNERAL 1 0 1 1 +FUNDAMENTAL 1 0 1 1 +FUNCTIONS 1 0 1 1 +FUNCTION 1 0 1 1 +FUMBLED 1 0 1 1 +FULLY 3 0 3 3 +FULL 18 0 18 18 +FULFILLED 2 0 2 2 +FUGITIVES 1 0 1 1 +FUGITIVE'S 1 0 1 1 +FRUSTRATED 1 0 1 1 +FRUITS 1 0 1 1 +FRUIT 1 0 1 1 +FROZEN 2 0 2 2 +FROWNINGLY 1 0 1 1 +FROWNING 2 0 2 2 +FROWNED 2 0 2 2 +FROWN 1 0 1 1 +FROTHY 1 0 1 1 +FROST 1 0 1 1 +FRONTISPIECE 1 0 1 1 +FRONTIER 3 0 3 3 +FRONT 6 0 6 6 +FROM 187 0 187 187 +FROLIC 2 0 2 2 +FRO 1 0 1 1 +FRIVOLOUS 2 0 2 2 +FRINGED 2 0 2 2 +FRIGHTFUL 1 0 1 1 +FRIGATE 2 0 2 2 +FRIENDSHIP 1 0 1 1 +FRIENDS 8 0 8 8 +FRIENDLY 5 0 5 5 +FRIEND'S 2 0 2 2 +FRIDAY 1 0 1 1 +FRICTION 1 0 1 1 +FRETTING 1 0 1 1 +FRESHENS 1 0 1 1 +FRESH 6 0 6 6 +FREQUENTLY 3 0 3 3 +FREQUENTER 1 0 1 1 +FREQUENT 3 0 3 3 +FRENZY 1 0 1 1 +FRENCH 11 0 11 11 +FREELY 2 0 2 2 +FREEDOM 4 0 4 4 +FREED 1 0 1 1 +FREE 18 0 18 18 +FRECKLES 1 0 1 1 +FRANKNESS 1 0 1 1 +FRANKLY 1 0 1 1 +FRANCIS 3 0 3 3 +FRANCE 6 0 6 6 +FRAMEWORK 1 0 1 1 +FRAMED 1 0 1 1 +FRAME 4 0 4 4 +FRAIL 1 0 1 1 +FRAGRANCE 1 0 1 1 +FRAGMENT 2 0 2 2 +FRACTURED 1 0 1 1 +FRACTURE 1 0 1 1 +FOX 1 0 1 1 +FOURTH 2 0 2 2 +FOURTEEN 1 0 1 1 +FOUR 12 0 12 12 +FOUNTAINS 1 0 1 1 +FOUNDING 1 0 1 1 +FOUNDATION 1 0 1 1 +FOUL 1 0 1 1 +FOUGHT 1 0 1 1 +FORWARD 5 0 5 5 +FORTY 2 0 2 2 +FORTUNES 2 0 2 2 +FORTUNE 8 0 8 8 +FORTUNATELY 1 0 1 1 +FORTUNATE 2 0 2 2 +FORTUITOUS 1 0 1 1 +FORTNIGHT 1 0 1 1 +FORTIFIED 2 0 2 2 +FORTHWITH 3 0 3 3 +FORTH 9 0 9 9 +FORT 2 0 2 2 +FORSAKE 1 0 1 1 +FORMS 6 0 6 6 +FORMING 1 0 1 1 +FORMIDABLE 1 0 1 1 +FORMER 7 0 7 7 +FORMED 7 0 7 7 +FORMALITY 1 0 1 1 +FORMALITIES 1 0 1 1 +FORMAL 1 0 1 1 +FORM 12 0 12 12 +FORKED 1 0 1 1 +FORGOTTEN 4 0 4 4 +FORGOT 1 0 1 1 +FORGIVE 6 0 6 6 +FORGING 1 0 1 1 +FORGETTING 1 0 1 1 +FORGETFULNESS 1 0 1 1 +FORGET 6 0 6 6 +FORGED 3 0 3 3 +FORGE 1 0 1 1 +FORETOLD 2 0 2 2 +FOREST 6 0 6 6 +FORESEEING 1 0 1 1 +FORESAW 1 0 1 1 +FOREIGNER 1 0 1 1 +FOREIGN 1 0 1 1 +FOREHEAD 1 0 1 1 +FOREFINGER 1 0 1 1 +FORCIBLE 1 0 1 1 +FORCES 2 0 2 2 +FORCED 1 0 1 1 +FORCE 17 0 17 17 +FORBES 1 0 1 1 +FOOTNOTE 2 0 2 2 +FOOTMEN 1 0 1 1 +FOOTMAN 1 0 1 1 +FOOT 9 0 9 9 +FOOLS 1 0 1 1 +FOOLISHLY 2 0 2 2 +FOOLISH 3 0 3 3 +FOOD 1 0 1 1 +FONDNESS 1 0 1 1 +FOND 5 0 5 5 +FOLLOWS 3 0 3 3 +FOLLOWING 4 0 4 4 +FOLLOWER 1 0 1 1 +FOLLOW 7 0 7 7 +FOLLIES 1 0 1 1 +FOLIAGE 1 0 1 1 +FOLDED 1 0 1 1 +FOLD 2 0 2 2 +FOES 2 0 2 2 +FOCUS 1 0 1 1 +FOAM 4 0 4 4 +FLY 4 0 4 4 +FLUX 2 0 2 2 +FLUSHED 3 0 3 3 +FLUKES 1 0 1 1 +FLOYD'S 1 0 1 1 +FLOWERS 11 0 11 11 +FLOWED 1 0 1 1 +FLOW 2 0 2 2 +FLOURISH 1 0 1 1 +FLOORS 1 0 1 1 +FLOOR 10 0 10 10 +FLOOD 2 0 2 2 +FLOATED 1 0 1 1 +FLOAT 1 0 1 1 +FLINT 1 0 1 1 +FLING 1 0 1 1 +FLIES 1 0 1 1 +FLICKER 2 0 2 2 +FLEW 1 0 1 1 +FLESH 5 0 5 5 +FLEETING 1 0 1 1 +FLEECY 1 0 1 1 +FLEECES 1 0 1 1 +FLEECE 3 0 3 3 +FLED 3 0 3 3 +FLAX 1 0 1 1 +FLATTERY 1 0 1 1 +FLATTERS 2 0 2 2 +FLATTERING 1 0 1 1 +FLATTERED 1 0 1 1 +FLAT 1 0 1 1 +FLASHED 3 0 3 3 +FLASH 3 0 3 3 +FLAPS 1 0 1 1 +FLAP 2 0 2 2 +FLANKED 1 0 1 1 +FLAMES 1 0 1 1 +FLAMED 2 0 2 2 +FLAME 3 0 3 3 +FLAGSTONES 1 0 1 1 +FLAG 1 0 1 1 +FIXES 1 0 1 1 +FIXED 3 0 3 3 +FIX 2 0 2 2 +FITZOOTH'S 1 0 1 1 +FITZOOTH 7 0 7 7 +FITTED 2 0 2 2 +FITS 1 0 1 1 +FITLY 1 0 1 1 +FISTS 2 0 2 2 +FISHES 3 0 3 3 +FISHERMAN 1 0 1 1 +FISH 1 0 1 1 +FIRMNESS 1 0 1 1 +FIRMLY 2 0 2 2 +FIRM 1 0 1 1 +FIRESIDES 1 0 1 1 +FIRESIDE 1 0 1 1 +FIRES 1 0 1 1 +FIREPLACE 2 0 2 2 +FIRED 1 0 1 1 +FINISHED 4 0 4 4 +FINISH 1 0 1 1 +FINGERS 6 0 6 6 +FINGER 2 0 2 2 +FINEST 1 0 1 1 +FINER 1 0 1 1 +FINELY 1 0 1 1 +FINED 1 0 1 1 +FINDS 2 0 2 2 +FINDING 3 0 3 3 +FINANCIAL 1 0 1 1 +FINALLY 8 0 8 8 +FINALE 1 0 1 1 +FINAL 5 0 5 5 +FILLED 8 0 8 8 +FILE 1 0 1 1 +FIGURES 4 0 4 4 +FIGURED 1 0 1 1 +FIGURE 6 0 6 6 +FIGHTING 4 0 4 4 +FIFTY 6 0 6 6 +FIFTH 1 0 1 1 +FIFTEENTH 2 0 2 2 +FIFTEEN 1 0 1 1 +FIERCELY 1 0 1 1 +FIERCE 4 0 4 4 +FIELDS 4 0 4 4 +FIELD 6 0 6 6 +FEWER 1 0 1 1 +FEW 28 0 28 28 +FEVER 2 0 2 2 +FEUDS 1 0 1 1 +FETCH 1 0 1 1 +FESTIVE 1 0 1 1 +FERVENT 1 0 1 1 +FENCE 1 0 1 1 +FEMININE 1 0 1 1 +FEMALE 1 0 1 1 +FELLOWSHIP 1 0 1 1 +FELLOWS 3 0 3 3 +FELLOW'S 1 0 1 1 +FELLOW 9 0 9 9 +FELLER 1 0 1 1 +FELICITY 2 0 2 2 +FEET 11 0 11 11 +FEELINGS 3 0 3 3 +FEEDING 1 0 1 1 +FEEDER 1 0 1 1 +FEED 2 0 2 2 +FEEBLE 2 0 2 2 +FEDERAL 3 0 3 3 +FED 1 0 1 1 +FEBRUARY 1 0 1 1 +FEATURE 1 0 1 1 +FEATHERS 1 0 1 1 +FEASTED 1 0 1 1 +FEAST 3 0 3 3 +FEASIBLE 1 0 1 1 +FEARS 3 0 3 3 +FEARLESS 1 0 1 1 +FEARING 2 0 2 2 +FEARFUL 1 0 1 1 +FEAREST 1 0 1 1 +FAVORABLY 1 0 1 1 +FAVOR 2 0 2 2 +FAULTS 1 0 1 1 +FAULTLESS 1 0 1 1 +FAULT 2 0 2 2 +FATTENED 1 0 1 1 +FATIGUE 2 0 2 2 +FATHOMS 6 0 6 6 +FATHOM 1 0 1 1 +FATHERS 2 0 2 2 +FATHER'S 4 0 4 4 +FATHER 28 0 28 28 +FATALITY 2 0 2 2 +FAT 3 0 3 3 +FASTEST 1 0 1 1 +FASTEN 1 0 1 1 +FAST 7 0 7 7 +FASHIONED 1 0 1 1 +FASHIONABLE 1 0 1 1 +FASCINATION 2 0 2 2 +FARMS 1 0 1 1 +FARMHOUSES 1 0 1 1 +FARMERS 1 0 1 1 +FARMER'S 1 0 1 1 +FARMER 5 0 5 5 +FARM 8 0 8 8 +FAREWELL 2 0 2 2 +FANTASY 1 0 1 1 +FANNING 1 0 1 1 +FANCY 3 0 3 3 +FANCIES 2 0 2 2 +FANCIED 2 0 2 2 +FANATIC 1 0 1 1 +FAN 2 0 2 2 +FAMOUSLY 2 0 2 2 +FAMOUS 3 0 3 3 +FAMILY 16 0 16 16 +FAMILIES 3 0 3 3 +FAMILIARITY 3 0 3 3 +FAMILIAR 4 0 4 4 +FAME 2 0 2 2 +FALSE 6 0 6 6 +FALLS 5 0 5 5 +FALCONS 1 0 1 1 +FALCON 1 0 1 1 +FAITHFUL 1 0 1 1 +FAITH 9 0 9 9 +FAIRLY 4 0 4 4 +FAINTNESS 1 0 1 1 +FAINTLY 3 0 3 3 +FAINTING 4 0 4 4 +FAINT 3 0 3 3 +FAIN 1 0 1 1 +FAILURE 2 0 2 2 +FAILS 1 0 1 1 +FAILING 1 0 1 1 +FAILED 2 0 2 2 +FAIL 4 0 4 4 +FADES 1 0 1 1 +FADED 1 0 1 1 +FADE 4 0 4 4 +FACULTIES 1 0 1 1 +FACTS 4 0 4 4 +FACTORS 1 0 1 1 +FACTOR 1 0 1 1 +FACTIONS 2 0 2 2 +FACTION 1 0 1 1 +FACT 23 0 23 23 +FACILITATED 1 0 1 1 +FACILITATE 1 0 1 1 +FACES 4 0 4 4 +FACED 3 0 3 3 +FACE 29 0 29 29 +FABULOUS 1 0 1 1 +EYES 44 0 44 44 +EYELIDS 1 0 1 1 +EYED 1 0 1 1 +EXULTING 1 0 1 1 +EXULTATION 1 0 1 1 +EXTREMELY 2 0 2 2 +EXTREME 1 0 1 1 +EXTRAORDINARY 2 0 2 2 +EXTRACT 1 0 1 1 +EXTRA 1 0 1 1 +EXTINGUISHED 2 0 2 2 +EXTINCTION 1 0 1 1 +EXTINCT 1 0 1 1 +EXTERIOR 1 0 1 1 +EXTENT 6 0 6 6 +EXTENSIVE 1 0 1 1 +EXTENDED 3 0 3 3 +EXTEND 2 0 2 2 +EXQUISITE 3 0 3 3 +EXPULSION 1 0 1 1 +EXPRESSLY 1 0 1 1 +EXPRESSIVE 1 0 1 1 +EXPRESSIONS 1 0 1 1 +EXPRESSION 4 0 4 4 +EXPRESSING 2 0 2 2 +EXPRESSED 4 0 4 4 +EXPRESS 4 0 4 4 +EXPOSURE 1 0 1 1 +EXPONENT 2 0 2 2 +EXPLOSION 1 0 1 1 +EXPLORE 2 0 2 2 +EXPLOITS 1 0 1 1 +EXPLANATION 1 0 1 1 +EXPLAINED 1 0 1 1 +EXPLAIN 4 0 4 4 +EXPERIMENTALLY 1 0 1 1 +EXPERIENCING 1 0 1 1 +EXPERIENCED 1 0 1 1 +EXPERIENCE 7 0 7 7 +EXPENSIVE 1 0 1 1 +EXPENDITURE 4 0 4 4 +EXPEDITION 4 0 4 4 +EXPECTED 3 0 3 3 +EXPECTATIONS 2 0 2 2 +EXPECT 4 0 4 4 +EXPANDED 1 0 1 1 +EXOTICS 1 0 1 1 +EXISTING 3 0 3 3 +EXISTENT 1 0 1 1 +EXISTENCE 5 0 5 5 +EXISTED 4 0 4 4 +EXILE 1 0 1 1 +EXHORT 1 0 1 1 +EXHIBITS 1 0 1 1 +EXHIBITION 2 0 2 2 +EXHIBITED 1 0 1 1 +EXHIBIT 2 0 2 2 +EXHAUSTED 1 0 1 1 +EXHALE 1 0 1 1 +EXERTIONS 1 0 1 1 +EXERTED 1 0 1 1 +EXERCISING 1 0 1 1 +EXERCISED 1 0 1 1 +EXERCISE 1 0 1 1 +EXEMPLIFIES 1 0 1 1 +EXEMPLARY 1 0 1 1 +EXECUTIVE 1 0 1 1 +EXECUTED 1 0 1 1 +EXCLUDED 2 0 2 2 +EXCLAIMED 3 0 3 3 +EXCITING 2 0 2 2 +EXCITEMENT 3 0 3 3 +EXCITE 1 0 1 1 +EXCESSIVELY 1 0 1 1 +EXCESS 1 0 1 1 +EXCEPTIONS 1 0 1 1 +EXCEPTION 2 0 2 2 +EXCEPT 6 0 6 6 +EXCELLENT 5 0 5 5 +EXCELLENCY'S 1 0 1 1 +EXCELLENCY 2 0 2 2 +EXCELLENCE 1 0 1 1 +EXCEEDING 1 0 1 1 +EXCEEDED 1 0 1 1 +EXCEED 1 0 1 1 +EXAMPLE 2 0 2 2 +EXAMINED 4 0 4 4 +EXAMINE 4 0 4 4 +EXAMINATION 8 0 8 8 +EXALTED 1 0 1 1 +EXALT 1 0 1 1 +EXAGGERATED 1 0 1 1 +EXACTLY 8 0 8 8 +EXACT 5 0 5 5 +EX 2 0 2 2 +EVOLVING 1 0 1 1 +EVOLVED 1 0 1 1 +EVOLUTION 3 0 3 3 +EVOKED 1 0 1 1 +EVOKE 1 0 1 1 +EVIL 6 0 6 6 +EVIDENTLY 4 0 4 4 +EVIDENT 5 0 5 5 +EVIDENCE 5 0 5 5 +EVERYWHERE 7 0 7 7 +EVERYTHING 16 0 16 16 +EVERYBODY 7 0 7 7 +EVERLASTING 2 0 2 2 +EVENTS 8 0 8 8 +EVENT 4 0 4 4 +EVENLY 2 0 2 2 +EVENING 15 0 15 15 +EVEN 51 0 51 51 +EVASIVELY 1 0 1 1 +EVAPORATION 1 0 1 1 +EVADED 1 0 1 1 +EVA'S 1 0 1 1 +EUROPE 3 0 3 3 +EUCHARIST 1 0 1 1 +ETHICAL 1 0 1 1 +ETERNITY 2 0 2 2 +ETERNAL 2 0 2 2 +ETCHINGS 1 0 1 1 +ET 3 0 3 3 +ESTRANGEMENT 1 0 1 1 +ESTIMATE 1 0 1 1 +ESTABLISHED 3 0 3 3 +ESSEX 1 0 1 1 +ESSENTIALLY 1 0 1 1 +ESSENTIAL 2 0 2 2 +ESSENCE 1 0 1 1 +ESQUIRE 2 0 2 2 +ESPECIALLY 6 0 6 6 +ESCORT 4 0 4 4 +ESCAPED 1 0 1 1 +ESCAPE 4 0 4 4 +ESCAPADES 1 0 1 1 +ERRORS 1 0 1 1 +ERRONEOUS 2 0 2 2 +ERRING 1 0 1 1 +ERRAND 2 0 2 2 +ERR 1 0 1 1 +ERNEST 1 0 1 1 +ERIE 1 0 1 1 +ERECTED 1 0 1 1 +ERECT 1 0 1 1 +ERA 1 0 1 1 +EQUIP 1 0 1 1 +EQUATION 1 0 1 1 +EQUALS 1 0 1 1 +EQUALLY 4 0 4 4 +EQUAL 2 0 2 2 +EPOCH 1 0 1 1 +EPISTLE 3 0 3 3 +EPISODE 1 0 1 1 +ENVY 1 0 1 1 +ENVIRONMENT 1 0 1 1 +ENVELOPMENT 1 0 1 1 +ENTRUSTED 1 0 1 1 +ENTRENCHED 1 0 1 1 +ENTREATIES 1 0 1 1 +ENTRANCED 1 0 1 1 +ENTIRELY 6 0 6 6 +ENTIRE 2 0 2 2 +ENTHUSIASTS 1 0 1 1 +ENTHUSIASTIC 1 0 1 1 +ENTHUSIASM 1 0 1 1 +ENTERTAINS 1 0 1 1 +ENTERTAINMENT 3 0 3 3 +ENTERTAIN 2 0 2 2 +ENTERS 1 0 1 1 +ENTERPRISE 2 0 2 2 +ENTERING 2 0 2 2 +ENTANGLED 1 0 1 1 +ENOUGH 20 0 20 20 +ENORMOUSLY 2 0 2 2 +ENORMOUS 1 0 1 1 +ENNIS 1 0 1 1 +ENLISTED 1 0 1 1 +ENLIST 1 0 1 1 +ENJOYMENT 1 0 1 1 +ENJOYED 1 0 1 1 +ENJOY 2 0 2 2 +ENIGMA 1 0 1 1 +ENGORGED 1 0 1 1 +ENGLISHMAN 3 0 3 3 +ENGLISH 12 0 12 12 +ENGLAND 10 0 10 10 +ENGINEERS 2 0 2 2 +ENGINEER 4 0 4 4 +ENGINE 6 0 6 6 +ENGENDERS 1 0 1 1 +ENGAGEMENTS 1 0 1 1 +ENGAGED 5 0 5 5 +ENGAGE 1 0 1 1 +ENFORCED 1 0 1 1 +ENFORCE 3 0 3 3 +ENERGY 3 0 3 3 +ENEMY'S 1 0 1 1 +ENEMY 3 0 3 3 +ENEMIES 3 0 3 3 +ENDURES 1 0 1 1 +ENDURED 1 0 1 1 +ENDURE 1 0 1 1 +ENDS 1 0 1 1 +ENDOWED 1 0 1 1 +ENDLESS 1 0 1 1 +ENDEAVOURED 1 0 1 1 +ENDEAVORING 1 0 1 1 +END 18 0 18 18 +ENCYCLOPAEDIA 1 0 1 1 +ENCOURAGED 1 0 1 1 +ENCOURAGE 2 0 2 2 +ENCLOSE 1 0 1 1 +ENACTED 1 0 1 1 +ENABLES 2 0 2 2 +ENABLED 3 0 3 3 +EMULATION 1 0 1 1 +EMPTY 7 0 7 7 +EMPLOYMENTS 2 0 2 2 +EMPLOYMENT 1 0 1 1 +EMPLOYING 1 0 1 1 +EMPLOYERS 1 0 1 1 +EMPLOYER 1 0 1 1 +EMPLOYED 2 0 2 2 +EMPIRE 2 0 2 2 +EMPEROR 1 0 1 1 +EMOTIONS 2 0 2 2 +EMOTIONLESS 1 0 1 1 +EMOTION 1 0 1 1 +EMINENCES 1 0 1 1 +EMERGENCY 1 0 1 1 +EMERGENCE 2 0 2 2 +EMERALD 1 0 1 1 +EMBROIDERED 2 0 2 2 +EMBRACING 2 0 2 2 +EMBRACE 2 0 2 2 +EMBODIED 1 0 1 1 +EMBLEM 1 0 1 1 +EMBITTERING 1 0 1 1 +EMBITTERED 1 0 1 1 +EMBERS 1 0 1 1 +EMBARRASS 1 0 1 1 +ELSEWHERE 4 0 4 4 +ELSE 7 0 7 7 +ELOQUENT 1 0 1 1 +ELMHURST 2 0 2 2 +ELIZABETH 1 0 1 1 +ELIZA 3 0 3 3 +ELICITED 1 0 1 1 +ELF 1 0 1 1 +ELEVEN 4 0 4 4 +ELEMENTS 7 0 7 7 +ELEMENTARY 3 0 3 3 +ELEMENT 4 0 4 4 +ELEGANT 1 0 1 1 +ELEGANCE 2 0 2 2 +ELECTROPLATING 1 0 1 1 +ELECTROLYTIC 2 0 2 2 +ELECTRICITY 5 0 5 5 +ELECTRICAL 2 0 2 2 +ELECTRIC 4 0 4 4 +ELECTIONEERING 1 0 1 1 +ELECTION 3 0 3 3 +ELECTING 1 0 1 1 +ELECTED 1 0 1 1 +ELDERS 1 0 1 1 +ELDERLY 1 0 1 1 +ELDER 2 0 2 2 +ELAPSED 1 0 1 1 +EJACULATIONS 1 0 1 1 +EJACULATED 2 0 2 2 +EITHER'S 1 0 1 1 +EIGHTY 3 0 3 3 +EIGHTH 3 0 3 3 +EIGHTEENTH 2 0 2 2 +EIGHTEEN 14 0 14 14 +EIGHT 7 0 7 7 +EGYPTIAN 1 0 1 1 +EGYPT 1 0 1 1 +EFFORTS 4 0 4 4 +EFFORT 11 0 11 11 +EFFICIENCY 1 0 1 1 +EFFECTUALLY 1 0 1 1 +EFFECTUAL 2 0 2 2 +EFFECTS 3 0 3 3 +EFFECTIVENESS 1 0 1 1 +EFFECTIVELY 1 0 1 1 +EFFECTIVE 2 0 2 2 +EFFECTING 1 0 1 1 +EDWARD 3 0 3 3 +EDUCATION 5 0 5 5 +EDUCATED 2 0 2 2 +EDITORS 1 0 1 1 +EDITOR 1 0 1 1 +EDISONIA 1 0 1 1 +EDISON 16 0 16 16 +EDIFICE 1 0 1 1 +EDICTS 1 0 1 1 +EDICT 1 0 1 1 +EDGES 1 0 1 1 +EDGED 1 0 1 1 +EDGE 5 0 5 5 +EDDYING 1 0 1 1 +ECONOMY 2 0 2 2 +ECONOMIC 3 0 3 3 +ECHOING 1 0 1 1 +ECHOES 1 0 1 1 +ECCLESIASTICAL 1 0 1 1 +ECCENTRICITY 1 0 1 1 +EATEN 1 0 1 1 +EAT 5 0 5 5 +EASY 14 0 14 14 +EASTWARDS 1 0 1 1 +EASTERLY'S 1 0 1 1 +EAST 4 0 4 4 +EASILY 10 0 10 10 +EARTHLY 1 0 1 1 +EARTH'S 3 0 3 3 +EARTH 17 0 17 17 +EARNESTLY 1 0 1 1 +EARNEST 4 0 4 4 +EARNED 1 0 1 1 +EARLY 13 0 13 13 +EARLIER 6 0 6 6 +EAGLES 1 0 1 1 +EAGLE 1 0 1 1 +EAGERNESS 2 0 2 2 +EAGERLY 1 0 1 1 +EAGER 4 0 4 4 +EACH 24 0 24 24 +DYNAMO 2 0 2 2 +DYING 2 0 2 2 +DYIN 1 0 1 1 +DYE 1 0 1 1 +DWELLINGS 3 0 3 3 +DWELL 1 0 1 1 +DUTY 7 0 7 7 +DUTIES 8 0 8 8 +DUST 4 0 4 4 +DUSK 2 0 2 2 +DURATION 1 0 1 1 +DURABLE 1 0 1 1 +DUPLICATE 1 0 1 1 +DUMBFOUNDED 1 0 1 1 +DUMB 1 0 1 1 +DULY 1 0 1 1 +DULL 2 0 2 2 +DUG 1 0 1 1 +DUE 5 0 5 5 +DUDLEY 1 0 1 1 +DUDGEON 1 0 1 1 +DUCKS 1 0 1 1 +DUCKLINGS 1 0 1 1 +DUCHESS 3 0 3 3 +DUBIOUSLY 1 0 1 1 +DRY 4 0 4 4 +DRUNKENNESS 1 0 1 1 +DRUMS 1 0 1 1 +DROWNED 2 0 2 2 +DROPS 3 0 3 3 +DROPPING 1 0 1 1 +DROPPED 5 0 5 5 +DROP 3 0 3 3 +DROOPED 1 0 1 1 +DRIVING 1 0 1 1 +DRIVES 1 0 1 1 +DRIVEN 1 0 1 1 +DRINKS 1 0 1 1 +DRINK 4 0 4 4 +DRIFTS 1 0 1 1 +DRIED 1 0 1 1 +DREW 10 0 10 10 +DRESSES 1 0 1 1 +DRESSED 6 0 6 6 +DRESS 1 0 1 1 +DREDGED 1 0 1 1 +DREARY 1 0 1 1 +DREAMY 1 0 1 1 +DREAMT 1 0 1 1 +DREAMS 2 0 2 2 +DREAMING 2 0 2 2 +DREAMED 1 0 1 1 +DREAM 5 0 5 5 +DREADING 1 0 1 1 +DREADFUL 3 0 3 3 +DRAWS 2 0 2 2 +DRAWN 7 0 7 7 +DRAWING 2 0 2 2 +DRAW 4 0 4 4 +DRAUGHT 1 0 1 1 +DRAPERIES 1 0 1 1 +DRANK 1 0 1 1 +DRAMATIST'S 1 0 1 1 +DRAMATIST 1 0 1 1 +DRAMATIC 2 0 2 2 +DRAMA 1 0 1 1 +DRAINED 1 0 1 1 +DRAIN 1 0 1 1 +DRAGOONS 1 0 1 1 +DRAGON'S 1 0 1 1 +DRAGGING 1 0 1 1 +DRAGGED 2 0 2 2 +DRAG 1 0 1 1 +DOZEN 4 0 4 4 +DOWNWARD 2 0 2 2 +DOVE 1 0 1 1 +DOUGLAS 4 0 4 4 +DOUGHY 1 0 1 1 +DOUGHNUTS 1 0 1 1 +DOUGH 1 0 1 1 +DOUBTS 2 0 2 2 +DOUBTLESS 2 0 2 2 +DOUBTINGLY 1 0 1 1 +DOUBTING 1 0 1 1 +DOUBTFULLY 1 0 1 1 +DOUBTFUL 2 0 2 2 +DOUBT 11 0 11 11 +DOUBLE 6 0 6 6 +DOTH 5 0 5 5 +DOST 3 0 3 3 +DOROTHY 1 0 1 1 +DORKING 1 0 1 1 +DORCAS 6 0 6 6 +DOORS 3 0 3 3 +DOOM 1 0 1 1 +DONNITHORNE 1 0 1 1 +DONKEY 1 0 1 1 +DONE 24 0 24 24 +DOMINIONS 1 0 1 1 +DOMINION 1 0 1 1 +DOMESTIC 2 0 2 2 +DOME 1 0 1 1 +DOLLS 2 0 2 2 +DOLLARS 7 0 7 7 +DOING 12 0 12 12 +DOGS 1 0 1 1 +DOGGED 1 0 1 1 +DOG 2 0 2 2 +DOESN'T 3 0 3 3 +DOCTRINES 3 0 3 3 +DOCTRINE 4 0 4 4 +DOCTORS 1 0 1 1 +DOCTOR 7 0 7 7 +DOBRYNA 3 0 3 3 +DIZZILY 1 0 1 1 +DIVORCE 1 0 1 1 +DIVISIONS 1 0 1 1 +DIVISION 2 0 2 2 +DIVINITY 1 0 1 1 +DIVING 4 0 4 4 +DIVINE 3 0 3 3 +DIVIDING 1 0 1 1 +DIVIDEND 1 0 1 1 +DIVIDED 4 0 4 4 +DIVIDE 2 0 2 2 +DIVERTING 1 0 1 1 +DIVERT 1 0 1 1 +DIVERSITY 1 0 1 1 +DISUSE 1 0 1 1 +DISUNITED 1 0 1 1 +DISTURBING 1 0 1 1 +DISTURBANCE 1 0 1 1 +DISTURB 2 0 2 2 +DISTRUSTING 2 0 2 2 +DISTRUSTFUL 1 0 1 1 +DISTRUST 1 0 1 1 +DISTRESSED 1 0 1 1 +DISTRESS 1 0 1 1 +DISTORTION 1 0 1 1 +DISTORTED 1 0 1 1 +DISTINGUISH 3 0 3 3 +DISTINCTLY 3 0 3 3 +DISTINCTIVE 1 0 1 1 +DISTINCTION 5 0 5 5 +DISTINCT 2 0 2 2 +DISTANT 4 0 4 4 +DISTANCE 6 0 6 6 +DISSENTERS 1 0 1 1 +DISSENTED 1 0 1 1 +DISQUIETUDE 1 0 1 1 +DISPUTE 3 0 3 3 +DISPOSITIONS 1 0 1 1 +DISPOSITION 3 0 3 3 +DISPOSES 1 0 1 1 +DISPOSED 3 0 3 3 +DISPOSE 1 0 1 1 +DISPOSAL 1 0 1 1 +DISPLEASURE 1 0 1 1 +DISPLAYING 1 0 1 1 +DISPLAYED 1 0 1 1 +DISPLAY 1 0 1 1 +DISPERSED 3 0 3 3 +DISPENSE 1 0 1 1 +DISPENSATION 1 0 1 1 +DISPATCH 1 0 1 1 +DISPASSIONATE 1 0 1 1 +DISPARAGE 1 0 1 1 +DISOWN 1 0 1 1 +DISMISS 2 0 2 2 +DISMAY 1 0 1 1 +DISLOYAL 1 0 1 1 +DISLIKE 1 0 1 1 +DISK 1 0 1 1 +DISINCORPORATED 1 0 1 1 +DISHONOURED 1 0 1 1 +DISHES 2 0 2 2 +DISH 3 0 3 3 +DISGUSTED 1 0 1 1 +DISGUST 3 0 3 3 +DISGUISE 2 0 2 2 +DISGRACE 3 0 3 3 +DISENGAGED 1 0 1 1 +DISEASED 1 0 1 1 +DISDAINFUL 1 0 1 1 +DISCUSSION 1 0 1 1 +DISCUSSED 1 0 1 1 +DISCUSS 2 0 2 2 +DISCREETLY 1 0 1 1 +DISCREET 1 0 1 1 +DISCOVERY 2 0 2 2 +DISCOVERERS 1 0 1 1 +DISCOVERED 3 0 3 3 +DISCOVER 3 0 3 3 +DISCOURSE 2 0 2 2 +DISCOURAGED 1 0 1 1 +DISCOURAGE 2 0 2 2 +DISCOMFORT 1 0 1 1 +DISCLOSES 1 0 1 1 +DISCIPLINE 5 0 5 5 +DISCERN 1 0 1 1 +DISBURDENED 1 0 1 1 +DISASTER 1 0 1 1 +DISAPPOINTMENT 6 0 6 6 +DISAPPEARS 1 0 1 1 +DISAPPEARED 1 0 1 1 +DISAPPEAR 2 0 2 2 +DISADVANTAGE 1 0 1 1 +DISABILITIES 1 0 1 1 +DIRTY 1 0 1 1 +DIRECTLY 4 0 4 4 +DIRECTIONS 2 0 2 2 +DIRECTION 6 0 6 6 +DIRECTING 1 0 1 1 +DIRECTED 2 0 2 2 +DIRECT 8 0 8 8 +DIP 1 0 1 1 +DIOCLETIAN 1 0 1 1 +DINNER 8 0 8 8 +DINING 1 0 1 1 +DINE 1 0 1 1 +DIMLY 1 0 1 1 +DIMINUTION 2 0 2 2 +DIMINISH 1 0 1 1 +DIMENSIONS 1 0 1 1 +DIM 2 0 2 2 +DILIGENTLY 1 0 1 1 +DILEMMA 1 0 1 1 +DILATED 1 0 1 1 +DIGNITY 4 0 4 4 +DIGNITARIES 1 0 1 1 +DIGNIFIED 4 0 4 4 +DIGBY 1 0 1 1 +DIG 1 0 1 1 +DIFFUSED 1 0 1 1 +DIFFICULTIES 3 0 3 3 +DIFFICULT 11 0 11 11 +DIFFERS 2 0 2 2 +DIFFERENTLY 1 0 1 1 +DIFFERENTIATION 1 0 1 1 +DIFFERENT 15 0 15 15 +DIFFER 1 0 1 1 +DIES 1 0 1 1 +DIED 5 0 5 5 +DIE 4 0 4 4 +DIDN'T 12 0 12 12 +DICE 1 0 1 1 +DIAMONDS 1 0 1 1 +DIALOGUE 3 0 3 3 +DIALECT 1 0 1 1 +DIAGRAMS 1 0 1 1 +DEWS 2 0 2 2 +DEVOUR 3 0 3 3 +DEVOTION 1 0 1 1 +DEVOTES 1 0 1 1 +DEVOTED 2 0 2 2 +DEVOTE 1 0 1 1 +DEVOLVE 1 0 1 1 +DEVOID 1 0 1 1 +DEVISING 1 0 1 1 +DEVIL 1 0 1 1 +DEVICES 2 0 2 2 +DEVELOPMENTS 1 0 1 1 +DEVELOPMENT 6 0 6 6 +DEVELOPED 2 0 2 2 +DEVELOP 1 0 1 1 +DETOUR 1 0 1 1 +DETESTS 1 0 1 1 +DETESTED 1 0 1 1 +DETESTABLE 1 0 1 1 +DETERMINING 1 0 1 1 +DETERMINED 5 0 5 5 +DETERMINE 1 0 1 1 +DETERMINATION 1 0 1 1 +DETECT 1 0 1 1 +DETAINED 2 0 2 2 +DETAILS 2 0 2 2 +DETAIL 1 0 1 1 +DETACHMENT 1 0 1 1 +DESTRUCTIVE 1 0 1 1 +DESTRUCTION 2 0 2 2 +DESTROYED 1 0 1 1 +DESTINY 1 0 1 1 +DESTINED 2 0 2 2 +DESPITE 1 0 1 1 +DESPISE 1 0 1 1 +DESPERATELY 1 0 1 1 +DESPERATE 3 0 3 3 +DESPAIRING 1 0 1 1 +DESPAIR 4 0 4 4 +DESOLATION 1 0 1 1 +DESOLATE 1 0 1 1 +DESK 2 0 2 2 +DESIROUS 1 0 1 1 +DESIRES 1 0 1 1 +DESIRED 3 0 3 3 +DESIRE 4 0 4 4 +DESIRABLE 1 0 1 1 +DESIGNERS 1 0 1 1 +DESIGNATED 1 0 1 1 +DESIGN 3 0 3 3 +DESERVES 1 0 1 1 +DESERVED 1 0 1 1 +DESERVE 1 0 1 1 +DESERTS 1 0 1 1 +DESERTED 1 0 1 1 +DESERT 1 0 1 1 +DESCRIPTIONS 1 0 1 1 +DESCRIPTION 2 0 2 2 +DESCRIBING 1 0 1 1 +DESCRIBED 2 0 2 2 +DESCRIBE 3 0 3 3 +DESCENDS 1 0 1 1 +DESCENDING 2 0 2 2 +DESCENDED 2 0 2 2 +DESCENDANTS 1 0 1 1 +DESCEND 4 0 4 4 +DERIVED 1 0 1 1 +DERIVATIVE 1 0 1 1 +DEPUTY 1 0 1 1 +DEPTHS 2 0 2 2 +DEPTH 4 0 4 4 +DEPRIVED 1 0 1 1 +DEPRESSION 1 0 1 1 +DEPRESSED 1 0 1 1 +DEPRECIATING 1 0 1 1 +DEPRECATION 3 0 3 3 +DEPRAVED 1 0 1 1 +DEPOSITION 1 0 1 1 +DEPORTMENT 1 0 1 1 +DEPENDS 1 0 1 1 +DEPENDENT 2 0 2 2 +DEPENDENCE 1 0 1 1 +DEPEND 2 0 2 2 +DEPARTURE 4 0 4 4 +DEPARTMENT 1 0 1 1 +DEPARTING 2 0 2 2 +DEPARTED 1 0 1 1 +DEPART 1 0 1 1 +DENY 5 0 5 5 +DENUNCIATION 1 0 1 1 +DENSITY 1 0 1 1 +DENSELY 1 0 1 1 +DENSE 2 0 2 2 +DENOTE 1 0 1 1 +DENIES 1 0 1 1 +DENIED 2 0 2 2 +DEMURELY 1 0 1 1 +DEMURE 1 0 1 1 +DEMONSTRATION 1 0 1 1 +DEMOCRATIC 2 0 2 2 +DEMANDS 1 0 1 1 +DEMANDED 2 0 2 2 +DELUSIVE 1 0 1 1 +DELIVERY 2 0 2 2 +DELIVERING 1 0 1 1 +DELIVERED 2 0 2 2 +DELIGHTFUL 4 0 4 4 +DELIGHTED 5 0 5 5 +DELIGHT 4 0 4 4 +DELICIOUSNESS 1 0 1 1 +DELICIOUS 1 0 1 1 +DELICACY 1 0 1 1 +DELIBERATIONS 1 0 1 1 +DELIBERATION 1 0 1 1 +DELEGATED 1 0 1 1 +DELAYED 2 0 2 2 +DELAY 3 0 3 3 +DELAWARES 1 0 1 1 +DELAWARE 1 0 1 1 +DEIGNED 1 0 1 1 +DEIGN 1 0 1 1 +DEGREES 3 0 3 3 +DEGREE 6 0 6 6 +DEFYING 1 0 1 1 +DEFTLY 1 0 1 1 +DEFORMITIES 1 0 1 1 +DEFINITION 3 0 3 3 +DEFINITE 2 0 2 2 +DEFINES 1 0 1 1 +DEFIED 1 0 1 1 +DEFIANCE 2 0 2 2 +DEFERENCE 2 0 2 2 +DEFENDS 1 0 1 1 +DEFENDERS 1 0 1 1 +DEFENDED 1 0 1 1 +DEFENDANT 1 0 1 1 +DEFENCE 1 0 1 1 +DEFECT 1 0 1 1 +DEER 3 0 3 3 +DEEPLY 4 0 4 4 +DEEPENING 1 0 1 1 +DEEP 11 0 11 11 +DEEDS 1 0 1 1 +DECREES 1 0 1 1 +DECREED 1 0 1 1 +DECREE 1 0 1 1 +DECORATIVE 1 0 1 1 +DECORATED 3 0 3 3 +DECOMPOSE 1 0 1 1 +DECLINING 2 0 2 2 +DECLINED 2 0 2 2 +DECLARES 1 0 1 1 +DECLARED 2 0 2 2 +DECLARE 2 0 2 2 +DECLARATION 1 0 1 1 +DECISION 3 0 3 3 +DECIDEDLY 1 0 1 1 +DECIDED 3 0 3 3 +DECIDE 4 0 4 4 +DECEPTIVE 1 0 1 1 +DECEMBER 1 0 1 1 +DECEIVED 1 0 1 1 +DECEIT 1 0 1 1 +DECANTERS 1 0 1 1 +DECADES 1 0 1 1 +DEBATE 1 0 1 1 +DEATH 19 0 19 19 +DEARS 1 0 1 1 +DEARLY 2 0 2 2 +DEAREST 2 0 2 2 +DEAR 22 0 22 22 +DEALER 1 0 1 1 +DEAL 10 0 10 10 +DEAF 1 0 1 1 +DAZZLING 2 0 2 2 +DAZED 1 0 1 1 +DAYS 16 0 16 16 +DAYLIGHT 2 0 2 2 +DAWN 2 0 2 2 +DAVID 8 0 8 8 +DAUNTLESS 1 0 1 1 +DAUGHTERS 1 0 1 1 +DAUGHTER 9 0 9 9 +DATING 1 0 1 1 +DATE 1 0 1 1 +DATA 2 0 2 2 +DARTED 3 0 3 3 +DARLING 1 0 1 1 +DARKNESS 3 0 3 3 +DARKENED 1 0 1 1 +DARING 2 0 2 2 +DARED 3 0 3 3 +DARE 3 0 3 3 +DANGERS 1 0 1 1 +DANGEROUS 4 0 4 4 +DANGER 9 0 9 9 +DANES 1 0 1 1 +DANCING 2 0 2 2 +DANCES 1 0 1 1 +DANCERS 1 0 1 1 +DANCER 1 0 1 1 +DANCED 2 0 2 2 +DANCE 4 0 4 4 +DAMSEL 1 0 1 1 +DAMNED 1 0 1 1 +DAMNABLE 1 0 1 1 +DAMASK 1 0 1 1 +DAMAGING 1 0 1 1 +DAMAGE 2 0 2 2 +DAINTY 1 0 1 1 +DAILY 3 0 3 3 +DAFT 1 0 1 1 +D 2 0 2 2 +CYRIL 2 0 2 2 +CYPRESS 1 0 1 1 +CYNTHIA'S 1 0 1 1 +CYNTHIA 3 0 3 3 +CYMBALS 1 0 1 1 +CUTTINGS 1 0 1 1 +CUT 5 0 5 5 +CUSTOMS 1 0 1 1 +CUSTOMER'S 1 0 1 1 +CUSTOMARILY 1 0 1 1 +CUSTOM 2 0 2 2 +CUSTODY 2 0 2 2 +CUSHIONED 1 0 1 1 +CURVE 2 0 2 2 +CURTAINS 1 0 1 1 +CURTAIN 3 0 3 3 +CURSE 1 0 1 1 +CURRENT 8 0 8 8 +CURLY 1 0 1 1 +CURIOUSLY 1 0 1 1 +CURIOUS 4 0 4 4 +CURIOSITY 2 0 2 2 +CURBSTONE 1 0 1 1 +CUPS 1 0 1 1 +CUPBOARD 2 0 2 2 +CUP 3 0 3 3 +CUNNING 3 0 3 3 +CUMBERLAND'S 1 0 1 1 +CULTURE 4 0 4 4 +CULTIVATING 1 0 1 1 +CULTIVATE 1 0 1 1 +CULPRIT 1 0 1 1 +CULMINATING 2 0 2 2 +CUFFS 1 0 1 1 +CUB 1 0 1 1 +CRYSTALLIZE 1 0 1 1 +CRYING 1 0 1 1 +CRY 5 0 5 5 +CRUSHED 1 0 1 1 +CRUSH 3 0 3 3 +CRUMBLED 1 0 1 1 +CRUMBLE 1 0 1 1 +CRUISING 1 0 1 1 +CRUELTY 2 0 2 2 +CRUEL 1 0 1 1 +CRUCIFIX 2 0 2 2 +CRUCIFIED 1 0 1 1 +CROWNS 1 0 1 1 +CROWNING 2 0 2 2 +CROWN 6 0 6 6 +CROWDING 1 0 1 1 +CROWDED 2 0 2 2 +CROWD 5 0 5 5 +CROSSLY 1 0 1 1 +CROSSING 1 0 1 1 +CROSSED 3 0 3 3 +CROPS 1 0 1 1 +CROP 1 0 1 1 +CROOKED 3 0 3 3 +CRITICISM 1 0 1 1 +CRITICALLY 1 0 1 1 +CRISIS 1 0 1 1 +CRIMSON 1 0 1 1 +CRIMINAL 3 0 3 3 +CRIME 3 0 3 3 +CRIES 3 0 3 3 +CRIED 23 0 23 23 +CRESTED 1 0 1 1 +CREPT 1 0 1 1 +CREEPING 2 0 2 2 +CREEP 1 0 1 1 +CREEK 2 0 2 2 +CREED 1 0 1 1 +CREDIT 2 0 2 2 +CREATURES 2 0 2 2 +CREATURE 8 0 8 8 +CREATOR 1 0 1 1 +CREATIVE 1 0 1 1 +CREATIONS 1 0 1 1 +CREATION 2 0 2 2 +CREATING 1 0 1 1 +CREATES 1 0 1 1 +CREATED 2 0 2 2 +CREATE 3 0 3 3 +CREAM 1 0 1 1 +CREAKED 1 0 1 1 +CRAZY 2 0 2 2 +CRAWLING 1 0 1 1 +CRAWL 1 0 1 1 +CRANED 1 0 1 1 +CRANE 1 0 1 1 +CRAMPNESS 1 0 1 1 +CRAMPED 1 0 1 1 +CRACKING 1 0 1 1 +CRACKED 2 0 2 2 +COZIER 1 0 1 1 +COWARDLY 1 0 1 1 +COWARD 1 0 1 1 +COW 2 0 2 2 +COVERT 1 0 1 1 +COVERING 1 0 1 1 +COVERED 2 0 2 2 +COVER 2 0 2 2 +COVENANTERS 5 0 5 5 +COUSINS 3 0 3 3 +COUSIN'S 2 0 2 2 +COUSIN 7 0 7 7 +COURTIERS 2 0 2 2 +COURTESY 2 0 2 2 +COURAGEOUS 1 0 1 1 +COURAGE 6 0 6 6 +COUPLE 1 0 1 1 +COUNTY 7 0 7 7 +COUNTRIES 1 0 1 1 +COUNTING 1 0 1 1 +COUNTERPART 1 0 1 1 +COUNTERFEITED 1 0 1 1 +COUNTERACT 1 0 1 1 +COUNTENANCE 3 0 3 3 +COUNT 15 0 15 15 +COUCH 1 0 1 1 +COTTON 12 0 12 12 +COTTAGE 2 0 2 2 +COSTUME 2 0 2 2 +CORRUPTION 1 0 1 1 +CORRIDOR 1 0 1 1 +CORRESPONDING 1 0 1 1 +CORRESPOND 1 0 1 1 +CORRECTLY 1 0 1 1 +CORRECTED 1 0 1 1 +CORRECT 3 0 3 3 +CORNERS 4 0 4 4 +CORNER 13 0 13 13 +CORMORANT 1 0 1 1 +CORDIALLY 1 0 1 1 +CORDIALITY 1 0 1 1 +CORAL 1 0 1 1 +COQUETRY 1 0 1 1 +COPY 2 0 2 2 +COPPER 1 0 1 1 +COPIED 2 0 2 2 +COOKERY 2 0 2 2 +COOKED 1 0 1 1 +CONVULSION 1 0 1 1 +CONVIVIALITY 1 0 1 1 +CONVINCING 2 0 2 2 +CONVINCED 2 0 2 2 +CONVICTIONS 2 0 2 2 +CONVICTION 2 0 2 2 +CONVEYED 1 0 1 1 +CONVEXITY 1 0 1 1 +CONVERTS 1 0 1 1 +CONVERSION 1 0 1 1 +CONVERSING 1 0 1 1 +CONVERSE 2 0 2 2 +CONVERSATIONS 1 0 1 1 +CONVERSATIONAL 1 0 1 1 +CONVERSATION 6 0 6 6 +CONVENTIONALITY 1 0 1 1 +CONVENTIONAL 1 0 1 1 +CONVENTION 1 0 1 1 +CONTROLLING 1 0 1 1 +CONTRIVED 2 0 2 2 +CONTRIVANCE 2 0 2 2 +CONTRITION 1 0 1 1 +CONTRITE 1 0 1 1 +CONTRIBUTE 1 0 1 1 +CONTRASTING 1 0 1 1 +CONTRAST 4 0 4 4 +CONTRARY 5 0 5 5 +CONTRADICTIONS 1 0 1 1 +CONTRACTION 2 0 2 2 +CONTINUOUSLY 2 0 2 2 +CONTINUOUS 1 0 1 1 +CONTINUED 14 0 14 14 +CONTINUE 1 0 1 1 +CONTINUALLY 2 0 2 2 +CONTINUAL 3 0 3 3 +CONTINGENCY 1 0 1 1 +CONTINENT 1 0 1 1 +CONTESTED 1 0 1 1 +CONTEST 1 0 1 1 +CONTENTS 1 0 1 1 +CONTENTMENT 1 0 1 1 +CONTENTEDLY 1 0 1 1 +CONTENT 1 0 1 1 +CONTEMPTIBLE 1 0 1 1 +CONTEMPT 2 0 2 2 +CONTEMPORARY 1 0 1 1 +CONTEMPLATION 1 0 1 1 +CONTEMPLATED 1 0 1 1 +CONTAMINATION 1 0 1 1 +CONTAMINATED 1 0 1 1 +CONTAINS 1 0 1 1 +CONTAINERS 1 0 1 1 +CONTAGION 1 0 1 1 +CONSUMPTION 13 0 13 13 +CONSUMERS 2 0 2 2 +CONSUMER'S 1 0 1 1 +CONSUMER 5 0 5 5 +CONSUMED 1 0 1 1 +CONSUME 2 0 2 2 +CONSULTED 1 0 1 1 +CONSULTATION 1 0 1 1 +CONSULT 1 0 1 1 +CONSTRUCTION 4 0 4 4 +CONSTRUCTED 1 0 1 1 +CONSTRAINEDLY 1 0 1 1 +CONSTRAINED 1 0 1 1 +CONSTITUTION 3 0 3 3 +CONSTITUTES 1 0 1 1 +CONSTITUTED 1 0 1 1 +CONSTITUTE 1 0 1 1 +CONSTANT 3 0 3 3 +CONSTANCY 1 0 1 1 +CONSPIRACY 2 0 2 2 +CONSPICUOUS 8 0 8 8 +CONSOLE 1 0 1 1 +CONSOLATION 1 0 1 1 +CONSISTENTLY 1 0 1 1 +CONSIDERING 1 0 1 1 +CONSIDERED 5 0 5 5 +CONSIDERATIONS 1 0 1 1 +CONSIDERABLY 1 0 1 1 +CONSERVATION 1 0 1 1 +CONSEQUENTLY 1 0 1 1 +CONSEQUENT 2 0 2 2 +CONSEQUENCES 1 0 1 1 +CONSEQUENCE 5 0 5 5 +CONSENT 4 0 4 4 +CONSEIL 6 0 6 6 +CONSECRATED 2 0 2 2 +CONSCIOUSNESS 2 0 2 2 +CONSCIOUS 3 0 3 3 +CONSCIENCES 1 0 1 1 +CONSCIENCE 4 0 4 4 +CONQUERED 2 0 2 2 +CONQUER 1 0 1 1 +CONNECTIONS 1 0 1 1 +CONNECTION 1 0 1 1 +CONNECTED 3 0 3 3 +CONNECT 2 0 2 2 +CONJURATION 1 0 1 1 +CONJUNCTURE 1 0 1 1 +CONJECTURE 1 0 1 1 +CONGRESS 1 0 1 1 +CONGREGATED 1 0 1 1 +CONGRATULATIONS 1 0 1 1 +CONGRATULATION 1 0 1 1 +CONGRATULATE 1 0 1 1 +CONGO 1 0 1 1 +CONGENIAL 1 0 1 1 +CONFUSION 2 0 2 2 +CONFUSES 1 0 1 1 +CONFUSED 3 0 3 3 +CONFOUNDEDLY 1 0 1 1 +CONFLICTING 1 0 1 1 +CONFLICT 3 0 3 3 +CONFISCATED 1 0 1 1 +CONFIRMS 1 0 1 1 +CONFIRMED 3 0 3 3 +CONFINED 2 0 2 2 +CONFIDENT 1 0 1 1 +CONFIDENCE 7 0 7 7 +CONFIDE 1 0 1 1 +CONFIDANTS 1 0 1 1 +CONFESSION 1 0 1 1 +CONFESSED 1 0 1 1 +CONFESS 4 0 4 4 +CONFERS 1 0 1 1 +CONFEDERATE 1 0 1 1 +CONDUCTS 1 0 1 1 +CONDUCTORS 2 0 2 2 +CONDUCTED 1 0 1 1 +CONDUCIVE 1 0 1 1 +CONDITIONS 3 0 3 3 +CONDITION 11 0 11 11 +CONDENSED 1 0 1 1 +CONDENSATION 1 0 1 1 +CONDEMNATION 2 0 2 2 +CONCUR 1 0 1 1 +CONCOURSE 1 0 1 1 +CONCORD 1 0 1 1 +CONCLUSION 2 0 2 2 +CONCERTING 1 0 1 1 +CONCERNING 4 0 4 4 +CONCERNED 8 0 8 8 +CONCERN 1 0 1 1 +CONCEPTIONS 2 0 2 2 +CONCEPTION 2 0 2 2 +CONCEPT 1 0 1 1 +CONCEIVED 1 0 1 1 +CONCEIVE 2 0 2 2 +CONCEALING 1 0 1 1 +CONCEALED 1 0 1 1 +CONCEAL 1 0 1 1 +COMRADES 3 0 3 3 +COMPULSIVE 1 0 1 1 +COMPULSION 1 0 1 1 +COMPREHENSIVE 1 0 1 1 +COMPREHENDED 1 0 1 1 +COMPREHEND 1 0 1 1 +COMPOUND 1 0 1 1 +COMPOSURE 2 0 2 2 +COMPORT 1 0 1 1 +COMPONENT 1 0 1 1 +COMPLY 2 0 2 2 +COMPLIMENTARY 1 0 1 1 +COMPLIMENT 1 0 1 1 +COMPLICATED 1 0 1 1 +COMPLIANCE 2 0 2 2 +COMPLEXION 2 0 2 2 +COMPLETELY 3 0 3 3 +COMPLETED 2 0 2 2 +COMPLETE 2 0 2 2 +COMPLEMENT 1 0 1 1 +COMPLAINTS 1 0 1 1 +COMPLAINT 1 0 1 1 +COMPLAINING 1 0 1 1 +COMPLAINEST 1 0 1 1 +COMPLAIN 2 0 2 2 +COMPLACENTLY 1 0 1 1 +COMPLACENCY 1 0 1 1 +COMPETITION 1 0 1 1 +COMPETE 1 0 1 1 +COMPENSATION 1 0 1 1 +COMPELS 1 0 1 1 +COMPELLED 2 0 2 2 +COMPEL 1 0 1 1 +COMPASS 1 0 1 1 +COMPARISON 1 0 1 1 +COMPARED 3 0 3 3 +COMPARE 1 0 1 1 +COMPARATIVELY 2 0 2 2 +COMPARATIVE 1 0 1 1 +COMPANY 11 0 11 11 +COMPANIONSHIP 2 0 2 2 +COMPANIONS 2 0 2 2 +COMPANIONLESS 1 0 1 1 +COMPANION 5 0 5 5 +COMPANIES 3 0 3 3 +COMPACT 1 0 1 1 +COMMUNITY 3 0 3 3 +COMMUNITIES 1 0 1 1 +COMMUNION 1 0 1 1 +COMMUNICATED 2 0 2 2 +COMMUNICATE 1 0 1 1 +COMMOTION 1 0 1 1 +COMMONLY 1 0 1 1 +COMMITTING 1 0 1 1 +COMMITTEE 3 0 3 3 +COMMITTED 2 0 2 2 +COMMITTAL 1 0 1 1 +COMMITS 1 0 1 1 +COMMIT 1 0 1 1 +COMMISSIONS 1 0 1 1 +COMMISSIONERS 1 0 1 1 +COMMISSION 1 0 1 1 +COMMISERATION 1 0 1 1 +COMMERCIAL 2 0 2 2 +COMMENTS 1 0 1 1 +COMMENTED 1 0 1 1 +COMMENTARY 2 0 2 2 +COMMENT 1 0 1 1 +COMMENDED 1 0 1 1 +COMMEND 1 0 1 1 +COMMENCEMENT 1 0 1 1 +COMMENCE 1 0 1 1 +COMMANDS 1 0 1 1 +COMMANDMENT 2 0 2 2 +COMMANDERS 1 0 1 1 +COMMANDER 2 0 2 2 +COMMANDED 1 0 1 1 +COMFORTS 2 0 2 2 +COMFORTING 1 0 1 1 +COMFORTED 2 0 2 2 +COMFORTABLE 3 0 3 3 +COMFORT 8 0 8 8 +COMETH 1 0 1 1 +COMES 10 0 10 10 +COMELY 1 0 1 1 +COMEDY 1 0 1 1 +COMEDIES 2 0 2 2 +COME 51 0 51 51 +COMBINED 2 0 2 2 +COMBINE 1 0 1 1 +COMBINATIONS 1 0 1 1 +COMBINATION 2 0 2 2 +COMBAT 2 0 2 2 +COMBASH 1 0 1 1 +COMB 1 0 1 1 +COLUMNS 1 0 1 1 +COLUMN 1 0 1 1 +COLORISTS 2 0 2 2 +COLONY 3 0 3 3 +COLONIAL 1 0 1 1 +COLONEL 1 0 1 1 +COLLEGE 4 0 4 4 +COLLECTIONS 1 0 1 1 +COLLECTION 1 0 1 1 +COLLECTING 1 0 1 1 +COLLAR 2 0 2 2 +COLLAPSED 1 0 1 1 +COLBERT 1 0 1 1 +COINED 1 0 1 1 +COINCIDE 1 0 1 1 +COFFEE 6 0 6 6 +COCK 1 0 1 1 +COBBLER 1 0 1 1 +COAXED 1 0 1 1 +COAT 1 0 1 1 +COASTS 1 0 1 1 +COAST 3 0 3 3 +COACHMAN 1 0 1 1 +COACH 1 0 1 1 +CLUTCHING 1 0 1 1 +CLUTCH 1 0 1 1 +CLUNG 1 0 1 1 +CLUMSINESS 1 0 1 1 +CLOTTED 1 0 1 1 +CLOTHING 1 0 1 1 +CLOTHES 5 0 5 5 +CLOTHED 1 0 1 1 +CLOSET 1 0 1 1 +CLOSER 1 0 1 1 +CLOSELY 6 0 6 6 +CLOSED 2 0 2 2 +CLOSE 10 0 10 10 +CLOAKS 2 0 2 2 +CLIMBING 1 0 1 1 +CLIMATE 2 0 2 2 +CLIFF 2 0 2 2 +CLIENTS 1 0 1 1 +CLICKED 1 0 1 1 +CLEVERNESS 3 0 3 3 +CLEVER 2 0 2 2 +CLERK 2 0 2 2 +CLERICAL 1 0 1 1 +CLERGYMAN'S 2 0 2 2 +CLERGY 2 0 2 2 +CLEARNESS 1 0 1 1 +CLEARLY 5 0 5 5 +CLEARING 1 0 1 1 +CLEAREST 1 0 1 1 +CLEAR 10 0 10 10 +CLEANED 1 0 1 1 +CLAY 5 0 5 5 +CLAWS 2 0 2 2 +CLASSIFYING 1 0 1 1 +CLASSIFIER 1 0 1 1 +CLASSIFICATION 1 0 1 1 +CLASSIC 2 0 2 2 +CLASSES 1 0 1 1 +CLASSED 3 0 3 3 +CLASS 9 0 9 9 +CLASPING 1 0 1 1 +CLASPED 2 0 2 2 +CLASHING 2 0 2 2 +CLARIFIED 1 0 1 1 +CLAP 1 0 1 1 +CLAMOROUS 1 0 1 1 +CLAIMS 1 0 1 1 +CLAIMED 1 0 1 1 +CLAIM 2 0 2 2 +CIVILIZATION 2 0 2 2 +CIVIL 3 0 3 3 +CITY 15 0 15 15 +CITIZENS 4 0 4 4 +CITIZEN 2 0 2 2 +CITIES 2 0 2 2 +CIRCUMVENTION 1 0 1 1 +CIRCUMSTANCES 4 0 4 4 +CIRCUMSTANCE 3 0 3 3 +CIRCUMNAVIGATION 1 0 1 1 +CIRCUMFERENCE 1 0 1 1 +CIRCULATED 1 0 1 1 +CIRCUITS 1 0 1 1 +CIRCUITOUS 1 0 1 1 +CIRCUIT 1 0 1 1 +CIRCLE 7 0 7 7 +CIGARS 1 0 1 1 +CHURNING 1 0 1 1 +CHURCHES 1 0 1 1 +CHURCH 17 0 17 17 +CHUCKLING 1 0 1 1 +CHUBBY 1 0 1 1 +CHRONICLED 1 0 1 1 +CHRISTMAS 4 0 4 4 +CHRISTIANS 1 0 1 1 +CHRISTIANITY 2 0 2 2 +CHRIST'S 1 0 1 1 +CHRISM 1 0 1 1 +CHOSEN 3 0 3 3 +CHOPPED 1 0 1 1 +CHOOSING 1 0 1 1 +CHOOSE 3 0 3 3 +CHOKING 1 0 1 1 +CHOICE 2 0 2 2 +CHOCOLATE 1 0 1 1 +CHIP 1 0 1 1 +CHINA 1 0 1 1 +CHIN 2 0 2 2 +CHIMNEY 2 0 2 2 +CHILDREN'S 1 0 1 1 +CHILDREN 18 0 18 18 +CHILDISH 2 0 2 2 +CHILDHOOD'S 1 0 1 1 +CHILDHOOD 3 0 3 3 +CHILD'S 2 0 2 2 +CHILD 19 0 19 19 +CHIEFTAIN 1 0 1 1 +CHIEFLY 4 0 4 4 +CHIEF 3 0 3 3 +CHESTNUTS 1 0 1 1 +CHESTNUT 3 0 3 3 +CHEST 2 0 2 2 +CHERRY 1 0 1 1 +CHERRIES 3 0 3 3 +CHERISH 1 0 1 1 +CHEMICALS 1 0 1 1 +CHEMICAL 1 0 1 1 +CHELSEA 1 0 1 1 +CHELFORD 4 0 4 4 +CHEERS 1 0 1 1 +CHEERFULLY 2 0 2 2 +CHEERFUL 2 0 2 2 +CHEEKS 1 0 1 1 +CHEEK 2 0 2 2 +CHECKS 1 0 1 1 +CHECKED 3 0 3 3 +CHAUCER'S 1 0 1 1 +CHAUCER 1 0 1 1 +CHASING 1 0 1 1 +CHARTER 1 0 1 1 +CHARMING 1 0 1 1 +CHARMED 1 0 1 1 +CHARM 1 0 1 1 +CHARLOTTE 2 0 2 2 +CHARLESTOWN 1 0 1 1 +CHARLESTON 1 0 1 1 +CHARITY 1 0 1 1 +CHARGED 2 0 2 2 +CHARGE 8 0 8 8 +CHARACTERS 2 0 2 2 +CHARACTERIZES 1 0 1 1 +CHARACTERIZED 1 0 1 1 +CHARACTER 14 0 14 14 +CHAPTERS 1 0 1 1 +CHAPTER 3 0 3 3 +CHAPEL 1 0 1 1 +CHAP 1 0 1 1 +CHAOS 1 0 1 1 +CHANNEL 2 0 2 2 +CHANGING 2 0 2 2 +CHANCES 1 0 1 1 +CHANCE 6 0 6 6 +CHAMBERS 2 0 2 2 +CHAMBER 4 0 4 4 +CHALICE 1 0 1 1 +CHAIRS 4 0 4 4 +CHAIR 15 0 15 15 +CHAIN 1 0 1 1 +CETERA 3 0 3 3 +CERTITUDE 1 0 1 1 +CERTAINLY 8 0 8 8 +CERTAIN 12 0 12 12 +CEREMONIES 2 0 2 2 +CEREMONIAL 1 0 1 1 +CENTURY 1 0 1 1 +CENTURIES 1 0 1 1 +CENTRAL 5 0 5 5 +CENTIPEDE 1 0 1 1 +CELLS 1 0 1 1 +CELLAR 1 0 1 1 +CELL 1 0 1 1 +CELESTIAL 2 0 2 2 +CELEBRITY 1 0 1 1 +CELEBRATION 1 0 1 1 +CELEBRATED 4 0 4 4 +CEDAR 1 0 1 1 +CEASING 1 0 1 1 +CEASE 2 0 2 2 +CAVERN 2 0 2 2 +CAVALRY 2 0 2 2 +CAVALIERS 1 0 1 1 +CAUTIOUSLY 1 0 1 1 +CAUTION 1 0 1 1 +CAUSES 1 0 1 1 +CAUSED 5 0 5 5 +CAUSE 9 0 9 9 +CATTLE 1 0 1 1 +CATS 1 0 1 1 +CATHOLIC 3 0 3 3 +CATHEDRAL 1 0 1 1 +CATECHISM 2 0 2 2 +CATCHING 1 0 1 1 +CATCHES 1 0 1 1 +CATCH 3 0 3 3 +CATASTROPHE 1 0 1 1 +CATAPULT 1 0 1 1 +CASTS 1 0 1 1 +CASTLE 1 0 1 1 +CAST 8 0 8 8 +CASES 6 0 6 6 +CASEMATES 1 0 1 1 +CARTS 1 0 1 1 +CART 1 0 1 1 +CARRYING 4 0 4 4 +CARRY 7 0 7 7 +CARROTS 1 0 1 1 +CARRIES 1 0 1 1 +CARRIED 13 0 13 13 +CARRIAGES 1 0 1 1 +CARRIAGE 8 0 8 8 +CARPETED 1 0 1 1 +CARING 1 0 1 1 +CAREY 3 0 3 3 +CARELESSNESS 1 0 1 1 +CARELESS 1 0 1 1 +CAREFULLY 7 0 7 7 +CAREFUL 5 0 5 5 +CAREER 4 0 4 4 +CARED 4 0 4 4 +CARE 13 0 13 13 +CARD 1 0 1 1 +CAPTURED 1 0 1 1 +CAPTIVE 2 0 2 2 +CAPTIVATE 1 0 1 1 +CAPTAIN 27 0 27 27 +CAPSIZE 1 0 1 1 +CAPRICE 1 0 1 1 +CAPITAL 1 0 1 1 +CAPACITY 3 0 3 3 +CAPABLE 3 0 3 3 +CAP'N 4 0 4 4 +CAP 7 0 7 7 +CANST 1 0 1 1 +CANS 1 0 1 1 +CANOPY 1 0 1 1 +CANON 1 0 1 1 +CANNON 1 0 1 1 +CANE 1 0 1 1 +CANDLESTICKS 1 0 1 1 +CANDLES 2 0 2 2 +CANARY 1 0 1 1 +CANAL 1 0 1 1 +CAN'T 21 0 21 21 +CAMPS 1 0 1 1 +CAMPAIGN 2 0 2 2 +CAMP 1 0 1 1 +CAME 44 0 44 44 +CALVINISTIC 1 0 1 1 +CALMNESS 2 0 2 2 +CALMED 1 0 1 1 +CALM 5 0 5 5 +CALLS 5 0 5 5 +CALLOUS 1 0 1 1 +CALLING 2 0 2 2 +CALL 10 0 10 10 +CALHOUN 1 0 1 1 +CAKES 2 0 2 2 +CAKE 1 0 1 1 +CABINET 3 0 3 3 +CABIN 2 0 2 2 +CABALISTIC 1 0 1 1 +BYE 2 0 2 2 +BUTTONING 1 0 1 1 +BUTTON 1 0 1 1 +BUTTERFLY 1 0 1 1 +BUTT 1 0 1 1 +BUTLER 2 0 2 2 +BUTCHERY 2 0 2 2 +BUTCHERED 1 0 1 1 +BUSY 1 0 1 1 +BUSINESS 5 0 5 5 +BUSHES 4 0 4 4 +BUSHEL 1 0 1 1 +BURSTS 1 0 1 1 +BURST 5 0 5 5 +BURNT 1 0 1 1 +BURNS 1 0 1 1 +BURNING 2 0 2 2 +BURNED 1 0 1 1 +BURIED 2 0 2 2 +BURGOS 1 0 1 1 +BURGLARS 1 0 1 1 +BURDEN 1 0 1 1 +BUOYANT 2 0 2 2 +BUNDLE 1 0 1 1 +BULL 2 0 2 2 +BUILT 2 0 2 2 +BUILDS 1 0 1 1 +BUILDING 5 0 5 5 +BUGGY 1 0 1 1 +BUDDING 1 0 1 1 +BUCKLING 1 0 1 1 +BUCKLES 1 0 1 1 +BUCKINGHAM 1 0 1 1 +BUBBLING 1 0 1 1 +BUBBLE'S 1 0 1 1 +BRUTE 1 0 1 1 +BRUTALITY 1 0 1 1 +BRUTAL 1 0 1 1 +BRUSH 1 0 1 1 +BRUISED 1 0 1 1 +BROWSED 1 0 1 1 +BROW 1 0 1 1 +BROUGHT 14 0 14 14 +BROTHERS 5 0 5 5 +BROTHELS 1 0 1 1 +BROOM 1 0 1 1 +BROOKS 1 0 1 1 +BROOKLYN 1 0 1 1 +BROODING 3 0 3 3 +BRONTES 1 0 1 1 +BROKEN 7 0 7 7 +BROKE 1 0 1 1 +BROADLY 1 0 1 1 +BROADEST 1 0 1 1 +BROAD 11 0 11 11 +BRITISH 2 0 2 2 +BRISTLING 1 0 1 1 +BRING 9 0 9 9 +BRIM 2 0 2 2 +BRILLIANT 5 0 5 5 +BRILLIANCY 1 0 1 1 +BRIGHTNESS 1 0 1 1 +BRIGHTLY 1 0 1 1 +BRIGHTEST 1 0 1 1 +BRIGHTER 1 0 1 1 +BRIGHTENED 2 0 2 2 +BRIGHT 16 0 16 16 +BRIGANTINE 1 0 1 1 +BRIEFLY 1 0 1 1 +BRIDGE 4 0 4 4 +BRIDE 1 0 1 1 +BRICK 2 0 2 2 +BREWING 1 0 1 1 +BREEZE 1 0 1 1 +BRED 1 0 1 1 +BREATHING 4 0 4 4 +BREATH 10 0 10 10 +BREASTPLATE 1 0 1 1 +BREAST 2 0 2 2 +BREAKWATER 1 0 1 1 +BREAKING 2 0 2 2 +BREAKFASTING 1 0 1 1 +BREAKERS 1 0 1 1 +BRAVELY 1 0 1 1 +BRAVE 2 0 2 2 +BRANWELL 1 0 1 1 +BRANDY 1 0 1 1 +BRANDON 4 0 4 4 +BRANDED 1 0 1 1 +BRANCHES 8 0 8 8 +BRAINS 3 0 3 3 +BRAIN 2 0 2 2 +BRAIDS 1 0 1 1 +BRAIDED 1 0 1 1 +BRAID 1 0 1 1 +BRACTON 1 0 1 1 +BRACELETS 1 0 1 1 +BRACELET 1 0 1 1 +BOY'S 3 0 3 3 +BOY 17 0 17 17 +BOXES 1 0 1 1 +BOWING 1 0 1 1 +BOWED 1 0 1 1 +BOW 4 0 4 4 +BOUT 1 0 1 1 +BOUQUETS 1 0 1 1 +BOUND 6 0 6 6 +BOUGHS 1 0 1 1 +BOTTOMS 1 0 1 1 +BOTTOM 7 0 7 7 +BOTTLES 2 0 2 2 +BOTTLE 1 0 1 1 +BOTHER 1 0 1 1 +BOTH 34 0 34 34 +BOSOM 2 0 2 2 +BORE 2 0 2 2 +BORDERING 1 0 1 1 +BORDERED 1 0 1 1 +BOOTS 2 0 2 2 +BOOLOOROO 12 0 12 12 +BOOK 4 0 4 4 +BONY 1 0 1 1 +BONNET 1 0 1 1 +BONES 2 0 2 2 +BONDAGE 1 0 1 1 +BOND 3 0 3 3 +BOLTON 1 0 1 1 +BOLDLY 3 0 3 3 +BOLDEST 1 0 1 1 +BOILED 1 0 1 1 +BOIL 1 0 1 1 +BOGUS 3 0 3 3 +BOGGS 2 0 2 2 +BODY 8 0 8 8 +BODILY 3 0 3 3 +BODIES 3 0 3 3 +BOASTING 2 0 2 2 +BOARDED 2 0 2 2 +BOARD 9 0 9 9 +BLUSHING 2 0 2 2 +BLUSHED 1 0 1 1 +BLUSH 1 0 1 1 +BLUNT 1 0 1 1 +BLUFF 1 0 1 1 +BLUES 1 0 1 1 +BLOWN 2 0 2 2 +BLOWING 1 0 1 1 +BLOW 2 0 2 2 +BLOOM 1 0 1 1 +BLOODY 1 0 1 1 +BLOODSHED 1 0 1 1 +BLOODED 1 0 1 1 +BLOOD 6 0 6 6 +BLOCKS 1 0 1 1 +BLISS 1 0 1 1 +BLIND 1 0 1 1 +BLEW 1 0 1 1 +BLESSED 3 0 3 3 +BLESS 2 0 2 2 +BLEED 1 0 1 1 +BLEACHED 1 0 1 1 +BLAZING 1 0 1 1 +BLAZED 1 0 1 1 +BLAZE 2 0 2 2 +BLANK 2 0 2 2 +BLAME 1 0 1 1 +BLADE 2 0 2 2 +BLACKSTONE 1 0 1 1 +BLACKNESSES 1 0 1 1 +BLACKNESS 1 0 1 1 +BLACKER 2 0 2 2 +BLACK 22 0 22 22 +BITTER 1 0 1 1 +BITS 1 0 1 1 +BITES 1 0 1 1 +BITE 1 0 1 1 +BISHOPS 5 0 5 5 +BIRTH 2 0 2 2 +BIRMINGHAM 1 0 1 1 +BIRDS 4 0 4 4 +BIRD 4 0 4 4 +BIRCHES 1 0 1 1 +BINDING 1 0 1 1 +BIND 1 0 1 1 +BIG 12 0 12 12 +BIDDING 1 0 1 1 +BIBLE 1 0 1 1 +BEYOND 6 0 6 6 +BEWILDERMENT 1 0 1 1 +BEWILDERED 6 0 6 6 +BEWARE 1 0 1 1 +BEVERAGES 1 0 1 1 +BETWEEN 25 0 25 25 +BETTING 1 0 1 1 +BETTER 25 0 25 25 +BETRAYED 1 0 1 1 +BETRAY 1 0 1 1 +BETH 12 0 12 12 +BESTOWED 1 0 1 1 +BESTOW 1 0 1 1 +BEST 22 0 22 22 +BESOUGHT 1 0 1 1 +BESIEGED 1 0 1 1 +BESIDES 8 0 8 8 +BESIDE 5 0 5 5 +BERTIE 1 0 1 1 +BERRIES 1 0 1 1 +BENT 4 0 4 4 +BENIGNANTLY 1 0 1 1 +BENIGHTED 1 0 1 1 +BENEFIT 1 0 1 1 +BENEATH 6 0 6 6 +BEND 1 0 1 1 +BENCHES 3 0 3 3 +BEN 3 0 3 3 +BELT 2 0 2 2 +BELOW 1 0 1 1 +BELOVED 3 0 3 3 +BELONGS 1 0 1 1 +BELONGING 1 0 1 1 +BELONGED 3 0 3 3 +BELONG 2 0 2 2 +BELLY 3 0 3 3 +BELLS 1 0 1 1 +BELLINGHAM 2 0 2 2 +BELL 3 0 3 3 +BELIEVING 2 0 2 2 +BELIEVERS 1 0 1 1 +BELIEVED 5 0 5 5 +BELIEVE 21 0 21 21 +BELIEF 3 0 3 3 +BEINGS 1 0 1 1 +BEHOLDING 1 0 1 1 +BEHOLDERS 1 0 1 1 +BEHOLDER 1 0 1 1 +BEHIND 10 0 10 10 +BEHELD 1 0 1 1 +BEHAVIOURIST 2 0 2 2 +BEHAVED 1 0 1 1 +BEHAVE 1 0 1 1 +BEHALF 1 0 1 1 +BEGUN 5 0 5 5 +BEGUILING 1 0 1 1 +BEGOT 1 0 1 1 +BEGINS 4 0 4 4 +BEGINNING 4 0 4 4 +BEGIN 9 0 9 9 +BEGGAR'S 1 0 1 1 +BEGGAR 2 0 2 2 +BEGAN 22 0 22 22 +BEFITS 1 0 1 1 +BEFELL 1 0 1 1 +BEEN 137 0 137 137 +BEELZEBUB 1 0 1 1 +BEEHIVES 1 0 1 1 +BEEF 1 0 1 1 +BEDSIDE 1 0 1 1 +BEDROOM 2 0 2 2 +BEDFORD 1 0 1 1 +BECOMING 1 0 1 1 +BECOMES 8 0 8 8 +BECOME 14 0 14 14 +BECKONED 1 0 1 1 +BECKON 1 0 1 1 +BECAUSE 30 0 30 30 +BECAME 12 0 12 12 +BEAUTY 21 0 21 21 +BEAUTIFUL 13 0 13 13 +BEAUTIES 2 0 2 2 +BEATITUDE 2 0 2 2 +BEATING 2 0 2 2 +BEATERS 1 0 1 1 +BEATEN 2 0 2 2 +BEAT 1 0 1 1 +BEASTS 2 0 2 2 +BEARS 4 0 4 4 +BEARING 3 0 3 3 +BEARD 1 0 1 1 +BEAR'S 1 0 1 1 +BEAR 11 0 11 11 +BEAMS 1 0 1 1 +BEAK 6 0 6 6 +BEADS 1 0 1 1 +BATTLED 1 0 1 1 +BATTERIES 1 0 1 1 +BATTERED 1 0 1 1 +BAT 1 0 1 1 +BASTARD 1 0 1 1 +BASKETS 1 0 1 1 +BASIS 2 0 2 2 +BASED 1 0 1 1 +BARTLEY 14 0 14 14 +BARS 1 0 1 1 +BARRICADED 1 0 1 1 +BARREN 1 0 1 1 +BARREL 1 0 1 1 +BARRACK 1 0 1 1 +BARNS 1 0 1 1 +BARN 4 0 4 4 +BARGAINS 1 0 1 1 +BAREFOOT 1 0 1 1 +BARE 2 0 2 2 +BARBARITY 1 0 1 1 +BAR 1 0 1 1 +BAPTIZED 1 0 1 1 +BAPTISM 1 0 1 1 +BANTER 1 0 1 1 +BANQUET 1 0 1 1 +BANKS 1 0 1 1 +BANK 3 0 3 3 +BANISHED 1 0 1 1 +BANG 1 0 1 1 +BAND 2 0 2 2 +BALMY 1 0 1 1 +BALLS 2 0 2 2 +BALLET 2 0 2 2 +BAKER 1 0 1 1 +BAGS 1 0 1 1 +BAGGAGE 1 0 1 1 +BAG 1 0 1 1 +BAFFLED 2 0 2 2 +BADLY 2 0 2 2 +BADE 3 0 3 3 +BAD 6 0 6 6 +BACON 1 0 1 1 +BACKWARD 1 0 1 1 +BABY'S 1 0 1 1 +BABY 1 0 1 1 +BABIES 1 0 1 1 +BABE 1 0 1 1 +AZURE 1 0 1 1 +AXIS 1 0 1 1 +AWOKE 3 0 3 3 +AWKWARD 1 0 1 1 +AWFULLY 2 0 2 2 +AWFUL 4 0 4 4 +AWARE 6 0 6 6 +AWAKE 1 0 1 1 +AWAITING 1 0 1 1 +AWAITED 2 0 2 2 +AVOIDING 1 0 1 1 +AVOIDED 1 0 1 1 +AVOID 5 0 5 5 +AVERSION 1 0 1 1 +AVERSE 1 0 1 1 +AVAILABLE 1 0 1 1 +AUTUMN 1 0 1 1 +AUTHORS 1 0 1 1 +AUTHORIZED 1 0 1 1 +AUTHORITY 6 0 6 6 +AUTHORITIES 1 0 1 1 +AUTHORITATIVELY 1 0 1 1 +AUTHENTICATED 1 0 1 1 +AUNT'S 1 0 1 1 +AUGUST 5 0 5 5 +AUGMENT 1 0 1 1 +AUDITORY 1 0 1 1 +AUDITORS 1 0 1 1 +AUDIENCE 6 0 6 6 +AUDACIOUS 1 0 1 1 +AUCTION 1 0 1 1 +ATTRIBUTED 1 0 1 1 +ATTRACTIVE 1 0 1 1 +ATTRACTION 1 0 1 1 +ATTRACTED 3 0 3 3 +ATTORNEYS 1 0 1 1 +ATTIRE 1 0 1 1 +ATTENUATING 1 0 1 1 +ATTENTIVELY 2 0 2 2 +ATTENTION 11 0 11 11 +ATTENDED 1 0 1 1 +ATTENDANT 1 0 1 1 +ATTEND 3 0 3 3 +ATTEMPTS 1 0 1 1 +ATTEMPT 5 0 5 5 +ATTAINMENTS 1 0 1 1 +ATTAINMENT 1 0 1 1 +ATTAINED 3 0 3 3 +ATTACKED 1 0 1 1 +ATTACK 4 0 4 4 +ATTACHED 2 0 2 2 +ATROCIOUS 1 0 1 1 +ATLANTIS 1 0 1 1 +ATLANTIC 3 0 3 3 +ATHENS 1 0 1 1 +ATHENIANS 1 0 1 1 +ATHENIAN 2 0 2 2 +ATE 2 0 2 2 +ASTRONOMY 1 0 1 1 +ASTOUNDING 1 0 1 1 +ASTONISHMENT 2 0 2 2 +ASTONISHING 1 0 1 1 +ASTONISHED 1 0 1 1 +ASSURES 1 0 1 1 +ASSUREDLY 1 0 1 1 +ASSURED 5 0 5 5 +ASSURE 5 0 5 5 +ASSURANCES 1 0 1 1 +ASSURANCE 3 0 3 3 +ASSUMED 5 0 5 5 +ASSOCIATION 2 0 2 2 +ASSOCIATES 1 0 1 1 +ASSOCIATED 3 0 3 3 +ASSISTED 2 0 2 2 +ASSISTANT 1 0 1 1 +ASSIST 2 0 2 2 +ASSIDUOUSLY 1 0 1 1 +ASSERTS 1 0 1 1 +ASSERTIVE 1 0 1 1 +ASSERTED 3 0 3 3 +ASSENT 1 0 1 1 +ASSEMBLY 2 0 2 2 +ASS 1 0 1 1 +ASPECT 1 0 1 1 +ASLEEP 1 0 1 1 +ASKING 2 0 2 2 +ASKED 22 0 22 22 +ASK 10 0 10 10 +ASIDE 3 0 3 3 +ASIA 1 0 1 1 +ASHORE 1 0 1 1 +ASHAMED 2 0 2 2 +ASCRIBES 1 0 1 1 +ASCERTAINING 1 0 1 1 +ASCERTAIN 2 0 2 2 +ARTILLERY 1 0 1 1 +ARTIFICE 1 0 1 1 +ARTICULATE 2 0 2 2 +ARTICLE 3 0 3 3 +ARTICHOKES 1 0 1 1 +ARTHUR 1 0 1 1 +ART 14 0 14 14 +ARROWS 1 0 1 1 +ARRIVED 4 0 4 4 +ARRIVE 1 0 1 1 +ARRIVAL 4 0 4 4 +ARRESTING 1 0 1 1 +ARRAY 1 0 1 1 +ARRANGING 2 0 2 2 +ARRANGEMENTS 1 0 1 1 +ARRANGEMENT 2 0 2 2 +ARRANGED 2 0 2 2 +AROUSE 1 0 1 1 +AROSE 2 0 2 2 +ARONNAX 1 0 1 1 +ARMY 9 0 9 9 +ARMS 15 0 15 15 +ARMOUR 1 0 1 1 +ARISTOCRACY 1 0 1 1 +ARISING 1 0 1 1 +ARISE 1 0 1 1 +ARID 1 0 1 1 +ARIANS 1 0 1 1 +ARGYLE'S 1 0 1 1 +ARGUS 1 0 1 1 +ARGUING 1 0 1 1 +ARGUE 2 0 2 2 +AREN'T 1 0 1 1 +AREA 1 0 1 1 +ARDUOUS 1 0 1 1 +ARCHIVES 1 0 1 1 +ARCHITECTURAL 1 0 1 1 +ARCHED 1 0 1 1 +ARCH 1 0 1 1 +ARCADIAN 1 0 1 1 +APRON 2 0 2 2 +APRIL 2 0 2 2 +APPROXIMATELY 1 0 1 1 +APPROVING 2 0 2 2 +APPROVE 2 0 2 2 +APPROVAL 1 0 1 1 +APPROPRIATE 1 0 1 1 +APPROBATION 1 0 1 1 +APPROACHING 3 0 3 3 +APPROACHES 2 0 2 2 +APPROACHED 6 0 6 6 +APPROACH 1 0 1 1 +APPRENTICESHIP 1 0 1 1 +APPREHENSION 1 0 1 1 +APPRECIATIVE 1 0 1 1 +APPRECIATE 1 0 1 1 +APPOSITION 1 0 1 1 +APPOINTED 7 0 7 7 +APPLYING 1 0 1 1 +APPLICATION 2 0 2 2 +APPLE 1 0 1 1 +APPLAUSE 2 0 2 2 +APPLAUDED 1 0 1 1 +APPETITES 1 0 1 1 +APPETITE 1 0 1 1 +APPEARS 1 0 1 1 +APPEARED 10 0 10 10 +APPEARANCES 3 0 3 3 +APPEARANCE 9 0 9 9 +APPEAR 3 0 3 3 +APPEALS 1 0 1 1 +APPARENTLY 1 0 1 1 +APPARENT 2 0 2 2 +APPARATUS 1 0 1 1 +APPALLING 1 0 1 1 +APOSTOLICAL 1 0 1 1 +APOSTOLIC 1 0 1 1 +APOSTLES 6 0 6 6 +APOSTLE 4 0 4 4 +APOLLO 1 0 1 1 +APARTMENT 2 0 2 2 +APART 1 0 1 1 +ANYHOW 3 0 3 3 +ANYBODY 3 0 3 3 +ANXIOUS 3 0 3 3 +ANXIETY 4 0 4 4 +ANTIPATHY 2 0 2 2 +ANTICIPATION 1 0 1 1 +ANTICIPATE 1 0 1 1 +ANTICHRIST 1 0 1 1 +ANTI 1 0 1 1 +ANTE 1 0 1 1 +ANTARCTIC 1 0 1 1 +ANSWERS 2 0 2 2 +ANSWER 6 0 6 6 +ANOTHER'S 1 0 1 1 +ANNOYANCE 2 0 2 2 +ANNOUNCED 2 0 2 2 +ANNE'S 2 0 2 2 +ANIMOSITY 2 0 2 2 +ANIMATED 2 0 2 2 +ANIMALS 5 0 5 5 +ANIMAL 8 0 8 8 +ANGRY 5 0 5 5 +ANGRILY 3 0 3 3 +ANGRIER 1 0 1 1 +ANGOR 1 0 1 1 +ANGER 1 0 1 1 +ANECDOTES 1 0 1 1 +ANCIENT 3 0 3 3 +ANALYSIS 2 0 2 2 +ANALOGY 1 0 1 1 +ANALOGUE 1 0 1 1 +AMUSING 2 0 2 2 +AMUSEMENT 3 0 3 3 +AMUSED 1 0 1 1 +AMUSE 2 0 2 2 +AMPLY 2 0 2 2 +AMOUNT 3 0 3 3 +AMONGST 3 0 3 3 +AMONG 29 0 29 29 +AMISS 1 0 1 1 +AMIDST 2 0 2 2 +AMID 1 0 1 1 +AMETHYST 1 0 1 1 +AMERICANS 2 0 2 2 +AMERICAN 10 0 10 10 +AMERICA 2 0 2 2 +AMENDS 2 0 2 2 +AMENDMENT 1 0 1 1 +AMELIORATION 1 0 1 1 +AMBROSE 4 0 4 4 +AMBITIOUS 1 0 1 1 +AMBITION 2 0 2 2 +AMBASSADOR 1 0 1 1 +AMAZEMENT 2 0 2 2 +AMALGAMATED 1 0 1 1 +ALWAYS 36 0 36 36 +ALTHOUGH 10 0 10 10 +ALTERNATING 3 0 3 3 +ALTERING 2 0 2 2 +ALTERED 2 0 2 2 +ALTERATION 1 0 1 1 +ALTER 1 0 1 1 +ALTAR 1 0 1 1 +ALSO 36 0 36 36 +ALOUD 3 0 3 3 +ALONG 15 0 15 15 +ALONE 23 0 23 23 +ALMS 1 0 1 1 +ALMOST 19 0 19 19 +ALLY 1 0 1 1 +ALLUDE 1 0 1 1 +ALLOWING 2 0 2 2 +ALLOWED 7 0 7 7 +ALLOW 5 0 5 5 +ALLIES 1 0 1 1 +ALLIED 1 0 1 1 +ALLERS 1 0 1 1 +ALLEGED 2 0 2 2 +ALIVE 1 0 1 1 +ALIKE 1 0 1 1 +ALIGHTED 1 0 1 1 +ALICE 4 0 4 4 +ALGERIAN 1 0 1 1 +ALGERIA 2 0 2 2 +ALGEBRA 1 0 1 1 +ALERTNESS 1 0 1 1 +ALE 2 0 2 2 +ALBIGENSES 1 0 1 1 +ALAS 3 0 3 3 +ALARMED 1 0 1 1 +AKIN 1 0 1 1 +AIN'T 2 0 2 2 +AIMED 1 0 1 1 +AIDED 1 0 1 1 +AHEAD 1 0 1 1 +AGREEMENT 1 0 1 1 +AGREED 2 0 2 2 +AGREEABLY 2 0 2 2 +AGREEABLE 5 0 5 5 +AGREE 2 0 2 2 +AGO 4 0 4 4 +AGITATION 4 0 4 4 +AGITATED 2 0 2 2 +AGGRESSIVENESS 1 0 1 1 +AGGRESSIVE 1 0 1 1 +AGGREGATE 1 0 1 1 +AGENCY 1 0 1 1 +AGE 6 0 6 6 +AGAPE 1 0 1 1 +AGAINST 23 0 23 23 +AGAIN 39 0 39 39 +AFTERWARDS 5 0 5 5 +AFTERWARD 2 0 2 2 +AFTERNOON 4 0 4 4 +AFRICAN 1 0 1 1 +AFRAID 9 0 9 9 +AFLOAT 1 0 1 1 +AFFORD 4 0 4 4 +AFFLICTED 1 0 1 1 +AFFIRMATIVE 1 0 1 1 +AFFECTIONS 1 0 1 1 +AFFECTIONATE 1 0 1 1 +AFFECTION 7 0 7 7 +AFFAIRS 3 0 3 3 +AFFAIR 2 0 2 2 +ADVISER 1 0 1 1 +ADVISED 1 0 1 1 +ADVISABLE 1 0 1 1 +ADVICE 4 0 4 4 +ADVERTISING 1 0 1 1 +ADVERTISEMENT 1 0 1 1 +ADVERSE 2 0 2 2 +ADVANTAGES 2 0 2 2 +ADVANTAGE 3 0 3 3 +ADVANCING 1 0 1 1 +ADVANCE 5 0 5 5 +ADORE 1 0 1 1 +ADORATION 3 0 3 3 +ADOPTED 2 0 2 2 +ADOLESCENCE 1 0 1 1 +ADMITTING 2 0 2 2 +ADMITTED 3 0 3 3 +ADMITTANCE 1 0 1 1 +ADMIT 2 0 2 2 +ADMIRING 1 0 1 1 +ADMIRED 1 0 1 1 +ADMIRATION 2 0 2 2 +ADMINISTRATION 3 0 3 3 +ADJUST 1 0 1 1 +ADHERENTS 1 0 1 1 +ADDRESSING 1 0 1 1 +ADDRESSED 6 0 6 6 +ADDRESS 3 0 3 3 +ADDITIONAL 1 0 1 1 +ADDED 11 0 11 11 +ACUTE 2 0 2 2 +ACTUALLY 3 0 3 3 +ACTUAL 4 0 4 4 +ACTS 2 0 2 2 +ACTRESS 1 0 1 1 +ACTORS 4 0 4 4 +ACTIVITY 1 0 1 1 +ACTIVELY 1 0 1 1 +ACTIVE 2 0 2 2 +ACTING 2 0 2 2 +ACT 6 0 6 6 +ACROSS 13 0 13 13 +ACQUIRES 1 0 1 1 +ACQUIRE 1 0 1 1 +ACQUAINTED 1 0 1 1 +ACQUAINTANCE 3 0 3 3 +ACQUAINT 1 0 1 1 +ACORN 1 0 1 1 +ACKNOWLEDGES 1 0 1 1 +ACKNOWLEDGED 2 0 2 2 +ACKNOWLEDGE 2 0 2 2 +ACHIEVEMENTS 1 0 1 1 +ACHIEVEMENT 1 0 1 1 +ACHIEVED 2 0 2 2 +ACE 2 0 2 2 +ACCUSTOMED 3 0 3 3 +ACCUSE 1 0 1 1 +ACCURATELY 1 0 1 1 +ACCURATE 2 0 2 2 +ACCURACY 3 0 3 3 +ACCRUING 1 0 1 1 +ACCOUTREMENTS 1 0 1 1 +ACCOUNTS 1 0 1 1 +ACCOUNTED 1 0 1 1 +ACCOUNT 9 0 9 9 +ACCORDINGLY 1 0 1 1 +ACCORDING 4 0 4 4 +ACCORDANCE 2 0 2 2 +ACCOMPLISHMENT 1 0 1 1 +ACCOMPLISHED 5 0 5 5 +ACCOMPANY 2 0 2 2 +ACCOMPANIED 3 0 3 3 +ACCOMMODATIONS 1 0 1 1 +ACCOMMODATION 1 0 1 1 +ACCLAMATIONS 1 0 1 1 +ACCIDENTS 2 0 2 2 +ACCIDENT 2 0 2 2 +ACCESSORIES 1 0 1 1 +ACCEPTING 1 0 1 1 +ACCEPTED 1 0 1 1 +ACCEPTABLE 1 0 1 1 +ACCENTS 2 0 2 2 +ACCENT 3 0 3 3 +ABSURDITY 1 0 1 1 +ABSURDITIES 1 0 1 1 +ABSURD 1 0 1 1 +ABSTRACTIONS 1 0 1 1 +ABSTRACTION 1 0 1 1 +ABSORBED 1 0 1 1 +ABSOLUTELY 6 0 6 6 +ABSOLUTE 1 0 1 1 +ABSENT 2 0 2 2 +ABSENCE 1 0 1 1 +ABRUPTLY 2 0 2 2 +ABROAD 4 0 4 4 +ABRAHAM 2 0 2 2 +ABOVE 17 0 17 17 +ABOUT 85 0 85 85 +ABOLITIONISM 1 0 1 1 +ABOARD 2 0 2 2 +ABNER 1 0 1 1 +ABLE 7 0 7 7 +ABJECTLY 1 0 1 1 +ABILITY 2 0 2 2 +ABANDONED 2 0 2 2 diff --git a/log/modified_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt b/log/modified_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0fe35e68bf73476e332975064005906df9a94d1 --- /dev/null +++ b/log/modified_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt @@ -0,0 +1,15343 @@ +%WER = 8.94 +Errors: 512 insertions, 424 deletions, 3745 substitutions, over 52343 reference words (48174 correct) +Search below for sections starting with PER-UTT DETAILS:, SUBSTITUTIONS:, DELETIONS:, INSERTIONS:, PER-WORD STATS: + +PER-UTT DETAILS: corr or (ref->hyp) +1688-142285-0000-1948: THERE'S IRON THEY SAY IN ALL OUR BLOOD AND A GRAIN OR TWO PERHAPS IS GOOD BUT HIS HE MAKES ME HARSHLY FEEL HAS GOT A LITTLE TOO MUCH OF STEEL ANON +1688-142285-0001-1949: MARGARET SAID MISTER HALE AS HE RETURNED FROM SHOWING HIS (GUEST->GUESTS) DOWNSTAIRS I COULD NOT HELP WATCHING YOUR FACE WITH SOME ANXIETY WHEN MISTER THORNTON MADE HIS CONFESSION OF HAVING BEEN A SHOP BOY +1688-142285-0002-1950: YOU DON'T MEAN THAT YOU THOUGHT ME SO SILLY +1688-142285-0003-1951: I REALLY (LIKED->LIKE) THAT ACCOUNT OF HIMSELF BETTER THAN ANYTHING ELSE HE SAID +1688-142285-0004-1952: HIS STATEMENT OF HAVING BEEN A SHOP BOY WAS THE THING I (LIKED->LIKE) BEST OF ALL +1688-142285-0005-1953: YOU WHO WERE ALWAYS ACCUSING PEOPLE (OF BEING->HAVE BEEN) SHOPPY AT HELSTONE +1688-142285-0006-1954: I DON'T THINK MISTER HALE YOU HAVE DONE QUITE RIGHT (IN->*) INTRODUCING SUCH A PERSON TO US WITHOUT TELLING US WHAT HE HAD BEEN +1688-142285-0007-1955: I REALLY WAS VERY MUCH AFRAID OF SHOWING HIM HOW MUCH SHOCKED I WAS AT SOME (PARTS->PART) OF WHAT HE SAID +1688-142285-0008-1956: HIS FATHER DYING IN MISERABLE CIRCUMSTANCES +1688-142285-0009-1957: WHY IT MIGHT HAVE BEEN IN THE WORKHOUSE +1688-142285-0010-1958: HIS FATHER SPECULATED WILDLY FAILED AND THEN KILLED HIMSELF BECAUSE HE COULD NOT BEAR THE DISGRACE +1688-142285-0011-1959: ALL HIS FORMER FRIENDS SHRUNK FROM THE DISCLOSURES THAT HAD TO BE MADE OF HIS DISHONEST GAMBLING WILD HOPELESS STRUGGLES MADE WITH OTHER PEOPLE'S MONEY TO REGAIN HIS OWN MODERATE PORTION OF WEALTH +1688-142285-0012-1960: NO ONE CAME FORWARDS TO HELP THE MOTHER AND THIS BOY +1688-142285-0013-1961: AT LEAST NO FRIEND CAME FORWARDS IMMEDIATELY AND (MISSUS->MISTER) THORNTON IS NOT ONE I FANCY TO WAIT TILL (TARDY->TIDY) KINDNESS COMES TO FIND HER OUT +1688-142285-0014-1962: SO THEY LEFT MILTON +1688-142285-0015-1963: HOW TAINTED ASKED HER FATHER +1688-142285-0016-1964: (OH->O) PAPA BY THAT TESTING EVERYTHING BY THE STANDARD OF WEALTH +1688-142285-0017-1965: WHEN HE SPOKE OF THE MECHANICAL POWERS HE EVIDENTLY LOOKED UPON THEM ONLY AS NEW WAYS OF EXTENDING TRADE AND MAKING MONEY +1688-142285-0018-1966: AND THE POOR MEN AROUND HIM (THEY->THERE) WERE POOR BECAUSE THEY WERE VICIOUS OUT OF THE PALE OF HIS SYMPATHIES BECAUSE THEY HAD NOT HIS IRON NATURE AND THE CAPABILITIES THAT IT GIVES HIM FOR BEING RICH +1688-142285-0019-1967: NOT VICIOUS HE NEVER SAID THAT +1688-142285-0020-1968: IMPROVIDENT AND SELF INDULGENT WERE HIS WORDS +1688-142285-0021-1969: MARGARET WAS COLLECTING HER MOTHER'S WORKING MATERIALS AND PREPARING TO GO TO BED +1688-142285-0022-1970: JUST AS SHE WAS LEAVING THE ROOM SHE HESITATED SHE WAS INCLINED TO MAKE AN ACKNOWLEDGMENT WHICH SHE THOUGHT WOULD PLEASE HER FATHER BUT WHICH TO BE FULL AND TRUE MUST INCLUDE A LITTLE ANNOYANCE +1688-142285-0023-1971: HOWEVER OUT IT CAME +1688-142285-0024-1972: PAPA I DO THINK MISTER THORNTON A VERY REMARKABLE MAN BUT PERSONALLY I DON'T LIKE HIM AT ALL +1688-142285-0025-1973: AND I DO SAID HER FATHER LAUGHING +1688-142285-0026-1974: PERSONALLY AS YOU CALL IT AND ALL +1688-142285-0027-1975: I DON'T SET HIM UP FOR A HERO OR ANYTHING OF THAT KIND +1688-142285-0028-1976: BUT GOOD NIGHT CHILD +1688-142285-0029-1977: THERE WERE SEVERAL OTHER SIGNS OF SOMETHING WRONG ABOUT MISSUS HALE +1688-142285-0030-1978: SHE AND DIXON HELD MYSTERIOUS CONSULTATIONS IN HER BEDROOM FROM WHICH DIXON WOULD COME OUT CRYING AND CROSS AS WAS (HER->A) CUSTOM WHEN ANY DISTRESS OF HER MISTRESS CALLED UPON HER SYMPATHY +1688-142285-0031-1979: ONCE MARGARET HAD GONE INTO THE CHAMBER SOON AFTER DIXON (LEFT IT->LIFTED) AND FOUND HER MOTHER ON HER KNEES AND AS MARGARET STOLE OUT SHE CAUGHT A FEW WORDS WHICH WERE EVIDENTLY A PRAYER FOR STRENGTH AND PATIENCE TO (ENDURE->INDURE) SEVERE BODILY SUFFERING +1688-142285-0032-1980: BUT THOUGH SHE RECEIVED CARESSES AND FOND WORDS BACK AGAIN IN SUCH PROFUSION AS WOULD HAVE GLADDENED HER FORMERLY YET SHE FELT THAT THERE WAS A SECRET WITHHELD FROM HER AND SHE BELIEVED IT BORE SERIOUS REFERENCE TO HER MOTHER'S HEALTH +1688-142285-0033-1981: SHE LAY AWAKE VERY LONG THIS NIGHT PLANNING HOW TO LESSEN THE EVIL INFLUENCE OF THEIR MILTON LIFE ON HER MOTHER +1688-142285-0034-1982: A SERVANT (TO->*) GIVE DIXON PERMANENT ASSISTANCE SHOULD BE GOT IF SHE GAVE UP (HER->THE) WHOLE TIME TO THE SEARCH AND THEN AT ANY RATE HER MOTHER MIGHT HAVE ALL THE PERSONAL (ATTENTION->ATTENTIONS) SHE REQUIRED AND HAD BEEN ACCUSTOMED TO HER WHOLE LIFE +1688-142285-0035-1983: VISITING REGISTER (OFFICES->OFFICERS) SEEING ALL MANNER OF UNLIKELY PEOPLE AND VERY FEW IN THE LEAST LIKELY ABSORBED MARGARET'S TIME AND THOUGHTS FOR SEVERAL DAYS +1688-142285-0036-1984: ONE AFTERNOON SHE MET BESSY HIGGINS IN THE STREET AND STOPPED TO SPEAK TO HER +1688-142285-0037-1985: WELL (BESSY->BUSY) HOW ARE YOU +1688-142285-0038-1986: BETTER AND NOT BETTER IF (YO->YOU) KNOW WHAT THAT MEANS +1688-142285-0039-1987: NOT EXACTLY REPLIED MARGARET SMILING +1688-142285-0040-1988: I'M BETTER IN NOT BEING TORN TO PIECES (BY->BUT) COUGHING (O'NIGHTS->A NIGHT'S) BUT I'M WEARY AND TIRED (O->OF) MILTON AND LONGING TO GET AWAY TO THE LAND (O BEULAH->OF BOOLA) AND WHEN I THINK I'M FARTHER AND FARTHER OFF MY HEART SINKS AND I'M NO BETTER I'M WORSE +1688-142285-0041-1989: MARGARET TURNED (ROUND->AROUND) TO WALK ALONGSIDE OF THE GIRL IN HER FEEBLE PROGRESS HOMEWARD +1688-142285-0042-1990: BUT FOR A MINUTE OR TWO SHE DID NOT SPEAK +1688-142285-0043-1991: AT LAST SHE SAID IN A LOW VOICE +1688-142285-0044-1992: BESSY DO YOU WISH TO DIE +1688-142285-0045-1993: BESSY WAS SILENT IN HER TURN FOR A MINUTE OR TWO THEN SHE REPLIED +1688-142285-0046-1994: (NOUGHT->NOT) WORSE THAN MANY OTHERS I RECKON +1688-142285-0047-1995: BUT WHAT WAS IT +1688-142285-0048-1996: YOU KNOW I'M A STRANGER HERE SO PERHAPS I'M NOT SO QUICK AT UNDERSTANDING WHAT YOU MEAN AS IF I'D LIVED ALL MY LIFE (AT->IN) MILTON +1688-142285-0049-1997: I HAD FORGOTTEN WHAT I SAID FOR THE TIME CONTINUED MARGARET QUIETLY +1688-142285-0050-1998: I SHOULD HAVE THOUGHT OF IT AGAIN WHEN I WAS LESS BUSY (MAY I GO->MARGAR) WITH YOU NOW +1688-142285-0051-1999: THE SHARPNESS IN HER EYE TURNED TO A WISTFUL LONGING AS SHE MET MARGARET'S SOFT AND FRIENDLY GAZE +1688-142285-0052-2000: AS THEY TURNED UP INTO A SMALL COURT OPENING OUT (OF->INTO) A SQUALID STREET BESSY SAID +1688-142285-0053-2001: (YO'LL->YOU'LL) NOT BE DAUNTED IF (FATHER'S AT HOME->FATHER SAID HE) AND SPEAKS A BIT GRUFFISH AT FIRST +1688-142285-0054-2002: BUT NICHOLAS WAS NOT AT HOME WHEN THEY ENTERED +1688-142285-0055-2003: GASPED (BESSY->BESSIE) AT LAST +1688-142285-0056-2004: BESSY TOOK A LONG AND FEVERISH DRAUGHT AND THEN FELL BACK AND SHUT HER EYES +1688-142285-0057-2005: MARGARET BENT OVER AND SAID BESSY DON'T BE IMPATIENT WITH YOUR LIFE WHATEVER IT IS OR MAY HAVE BEEN +1688-142285-0058-2006: REMEMBER WHO GAVE IT (*->TO) YOU AND MADE IT WHAT IT IS +1688-142285-0059-2007: NOW I'LL NOT HAVE MY WENCH (PREACHED TO->PREACH TOO) +1688-142285-0060-2008: BUT SURELY SAID MARGARET FACING ROUND YOU BELIEVE IN WHAT I SAID THAT GOD GAVE HER LIFE AND ORDERED WHAT KIND OF LIFE IT WAS TO BE +1688-142285-0061-2009: I BELIEVE WHAT I SEE AND NO MORE +1688-142285-0062-2010: THAT'S WHAT I BELIEVE YOUNG WOMAN +1688-142285-0063-2011: I DON'T BELIEVE ALL I HEAR NO NOT BY A BIG DEAL +1688-142285-0064-2012: BUT (HOO'S->WHO'S) COME AT LAST AND (HOO'S->WHO'S) WELCOME AS LONG AS (HOO'LL->HE'LL) KEEP FROM PREACHING ON WHAT (HOO->HE) KNOWS (NOUGHT->NOT) ABOUT +1688-142285-0065-2013: IT'S SIMPLE AND NOT FAR TO FETCH NOR HARD TO WORK +1688-142285-0066-2014: BUT THE GIRL ONLY PLEADED THE MORE WITH MARGARET +1688-142285-0067-2015: DON'T THINK HARDLY ON HIM HE'S A GOOD MAN HE IS +1688-142285-0068-2016: I SOMETIMES THINK I SHALL BE (MOPED WI->MIRKED WITH) SORROW EVEN IN THE CITY OF GOD IF (FATHER->EITHER) IS NOT THERE +1688-142285-0069-2017: THE FEVERISH COLOUR CAME INTO HER (CHEEK->CHEEKS) AND THE FEVERISH FLAME INTO HER EYE +1688-142285-0070-2018: BUT YOU WILL BE THERE FATHER YOU SHALL OH MY HEART +1688-142285-0071-2019: SHE PUT HER HAND TO IT AND BECAME GHASTLY PALE +1688-142285-0072-2020: MARGARET HELD HER IN HER ARMS AND PUT THE WEARY HEAD TO REST UPON HER BOSOM +1688-142285-0073-2021: PRESENTLY THE SPASM THAT FORESHADOWED DEATH HAD PASSED AWAY AND BESSY ROUSED HERSELF AND SAID +1688-142285-0074-2022: I'LL GO TO BED IT'S BEST PLACE BUT CATCHING (AT->THAT) MARGARET'S (GOWN YO'LL->GUN YOU'LL) COME AGAIN I KNOW (YO->YOU) WILL BUT JUST SAY IT +1688-142285-0075-2023: I WILL COME TO MORROW SAID MARGARET +1688-142285-0076-2024: MARGARET WENT AWAY VERY SAD AND THOUGHTFUL +1688-142285-0077-2025: SHE WAS LATE FOR TEA AT HOME +1688-142285-0078-2026: HAVE YOU MET WITH A SERVANT DEAR +1688-142285-0079-2027: NO MAMMA THAT ANNE BUCKLEY WOULD NEVER HAVE DONE +1688-142285-0080-2028: (SUPPOSE->S'POSE) I TRY SAID MISTER HALE +1688-142285-0081-2029: EVERYBODY ELSE HAS HAD (THEIR->THEY) TURN AT THIS GREAT DIFFICULTY NOW LET ME TRY +1688-142285-0082-2030: I MAY BE THE (CINDERELLA->CINORLA) TO PUT ON THE SLIPPER AFTER ALL +1688-142285-0083-2031: (WHAT->BUT) WOULD YOU DO PAPA HOW WOULD YOU SET ABOUT IT +1688-142285-0084-2032: WHY I WOULD APPLY (*->IT) TO SOME GOOD HOUSE MOTHER TO RECOMMEND ME ONE KNOWN TO HERSELF OR HER SERVANTS +1688-142285-0085-2033: VERY GOOD BUT WE MUST FIRST CATCH OUR HOUSE MOTHER +1688-142285-0086-2034: THE MOTHER OF WHOM HE SPOKE TO US SAID MARGARET +1688-142285-0087-2035: (MISSUS->MISTER) THORNTON THE ONLY MOTHER HE HAS I BELIEVE SAID MISTER HALE QUIETLY +1688-142285-0088-2036: I SHALL LIKE TO SEE HER SHE MUST BE AN UNCOMMON PERSON HER MOTHER ADDED +1688-142285-0089-2037: PERHAPS SHE MAY HAVE A RELATION WHO MIGHT SUIT US AND BE GLAD OF OUR PLACE +1688-142285-0090-2038: SHE SOUNDED TO BE SUCH A CAREFUL ECONOMICAL PERSON THAT I SHOULD LIKE ANY ONE OUT OF THE SAME FAMILY +1688-142285-0091-2039: MY DEAR SAID MISTER HALE ALARMED PRAY DON'T GO OFF ON THAT IDEA +1688-142285-0092-2040: I AM SURE AT ANY RATE SHE WOULD NOT LIKE STRANGERS TO KNOW ANYTHING ABOUT IT +1688-142285-0093-2041: TAKE NOTICE THAT (*->THIS) IS NOT MY KIND OF HAUGHTINESS PAPA IF I HAVE ANY AT ALL WHICH I DON'T AGREE TO THOUGH (YOU'RE->YOU) ALWAYS ACCUSING ME OF IT +1688-142285-0094-2042: I DON'T KNOW POSITIVELY THAT IT IS HERS EITHER BUT FROM LITTLE THINGS I HAVE GATHERED FROM HIM I FANCY SO +1688-142285-0095-2043: THEY CARED TOO LITTLE TO ASK IN WHAT MANNER HER SON HAD SPOKEN ABOUT HER +1998-15444-0000-2204: IF CALLED TO A CASE SUPPOSED (OR->AS) SUSPECTED TO BE ONE OF POISONING THE MEDICAL MAN HAS TWO DUTIES TO PERFORM TO SAVE THE PATIENT'S LIFE AND TO PLACE HIMSELF IN A POSITION TO GIVE EVIDENCE (IF->OF) CALLED (ON TO->UNTO) DO SO +1998-15444-0001-2205: HE SHOULD MAKE INQUIRIES AS TO SYMPTOMS AND TIME AT WHICH FOOD OR MEDICINE (WAS->MUST) LAST TAKEN +1998-15444-0002-2206: HE SHOULD NOTICE THE POSITION AND TEMPERATURE OF THE BODY THE CONDITION OF RIGOR MORTIS MARKS OF VIOLENCE APPEARANCE OF LIPS AND MOUTH +1998-15444-0003-2207: IN MAKING A POST (MORTEM->MODE OF) EXAMINATION THE ALIMENTARY CANAL SHOULD BE REMOVED AND PRESERVED FOR FURTHER INVESTIGATION +1998-15444-0004-2208: THE GUT AND THE (GULLET->GALLANT) BEING CUT ACROSS BETWEEN THESE LIGATURES THE STOMACH MAY BE REMOVED ENTIRE WITHOUT (SPILLING->SPINNING) ITS CONTENTS +1998-15444-0005-2209: IF THE (MEDICAL PRACTITIONER IS IN DOUBT->MEDICA PETITIONERS ENDOWED) ON ANY POINT HE SHOULD OBTAIN TECHNICAL ASSISTANCE FROM (SOMEONE->SOME ONE) WHO HAS PAID ATTENTION TO THE SUBJECT +1998-15444-0006-2210: IN A CASE OF ATTEMPTED SUICIDE BY POISONING IS IT THE DUTY OF THE DOCTOR TO INFORM THE POLICE +1998-15444-0007-2211: THE BEST (EMETIC->AMATIC) IS THAT WHICH IS AT HAND +1998-15444-0008-2212: THE (DOSE FOR AN ADULT->DAYS WERE ADULGE) IS TEN MINIMS +1998-15444-0009-2213: (APOMORPHINE->EPIMORPHIN) IS NOT (ALLIED IN->ALID AND) PHYSIOLOGICAL ACTION TO MORPHINE AND MAY BE GIVEN IN CASES OF NARCOTIC POISONING +1998-15444-0010-2214: TICKLING THE (FAUCES->FOSSES) WITH (A->THE) FEATHER MAY EXCITE (VOMITING->WARMITTING) +1998-15444-0011-2215: IN USING THE (ELASTIC->ELECTIC) STOMACH TUBE SOME FLUID SHOULD BE INTRODUCED INTO THE STOMACH BEFORE ATTEMPTING TO EMPTY IT OR A PORTION OF THE MUCOUS MEMBRANE MAY BE (SUCKED->SACKED) INTO THE APERTURE +1998-15444-0012-2216: THE TUBE SHOULD BE EXAMINED TO SEE THAT IT IS NOT BROKEN OR CRACKED AS ACCIDENTS HAVE HAPPENED FROM NEGLECTING THIS PRECAUTION +1998-15444-0013-2217: (ANTIDOTES ARE->AND HE VOTES A) USUALLY GIVEN HYPODERMICALLY OR IF (BY MOUTH->THE MOUSE) IN THE FORM OF TABLETS +1998-15444-0014-2218: IN THE ABSENCE OF (A->THE) HYPODERMIC SYRINGE THE REMEDY MAY BE GIVEN BY THE (RECTUM->RECTIM) +1998-15444-0015-2219: NOTICE THE (SMELL->SMAR) COLOUR AND GENERAL APPEARANCE OF THE MATTER SUBMITTED FOR EXAMINATION +1998-15444-0016-2220: FOR THE SEPARATION OF AN (ALKALOID->AKALOID) THE FOLLOWING IS THE PROCESS OF (STAS->STARS) OTTO +1998-15444-0017-2221: THIS PROCESS IS BASED UPON THE PRINCIPLE THAT THE SALTS OF THE (ALKALOIDS->ACHELIDES) ARE SOLUBLE IN (ALCOHOL AND->AKELET) WATER AND INSOLUBLE IN ETHER +1998-15444-0018-2222: THE PURE (ALKALOIDS WITH->IKOLOITS WAS) THE EXCEPTION OF MORPHINE IN ITS CRYSTALLINE FORM (ARE->A) SOLUBLE (IN ETHER->BENEATH THEM) +1998-15444-0019-2223: TWO (COOL->U) THE (MIXTURE->MIXED) AND FILTER WASH THE RESIDUE WITH STRONG ALCOHOL AND MIX THE (FILTRATES->FUR TRADES) +1998-15444-0020-2224: THE RESIDUE MAY BE SET ASIDE FOR THE DETECTION OF THE METALLIC POISONS (IF->OF) SUSPECTED (EXPEL->EXPELLED) THE (ALCOHOL BY->ALCOHOLBA) CAREFUL EVAPORATION +1998-15444-0021-2225: ON THE EVAPORATION OF THE ALCOHOL THE (RESINOUS->ZENOUS) AND FATTY (MATTERS->MATTER) SEPARATE +1998-15444-0022-2226: EVAPORATE THE (FILTRATE->FEDERATE) TO A (SYRUP->CYRUP) AND EXTRACT WITH SUCCESSIVE PORTIONS OF ABSOLUTE ALCOHOL +1998-15444-0023-2227: SEPARATE THE ETHEREAL SOLUTION AND (EVAPORATE->THE REPARATE) +1998-15444-0024-2228: FIVE A PART OF THIS (ETHEREAL->ASSYRIAL) SOLUTION IS (POURED->PUT) INTO A WATCH GLASS AND (ALLOWED->ALLOW) TO EVAPORATE +1998-15444-0025-2229: TO PURIFY IT (ADD A SMALL->ADDISMA) QUANTITY OF (DILUTE SULPHURIC->DELUDE SUFFERG) ACID AND AFTER EVAPORATING TO THREE QUARTERS OF ITS BULK ADD (A->*) SATURATED SOLUTION OF CARBONATE OF POTASH OR SODA +1998-15444-0026-2230: (BOIL->BY) THE (FINELY DIVIDED->FINALLY DIVIDE) SUBSTANCE WITH ABOUT ONE (EIGHTH->EIGHTHS) ITS (BULK->BAG) OF PURE HYDROCHLORIC ACID ADD FROM TIME TO TIME POTASSIC (CHLORATE->LOW RAGE) UNTIL THE SOLIDS ARE REDUCED TO A STRAW YELLOW FLUID +1998-15444-0027-2231: THE RESIDUE OF THE MATERIAL AFTER DIGESTION (WITH->WAS) HYDROCHLORIC ACID AND (POTASSIUM CHLORATE->POTASSIAN CHLORIDE) MAY HAVE TO BE EXAMINED FOR SILVER LEAD AND (BARIUM->BURIUM) +1998-29454-0000-2157: A THOUSAND BLESSINGS FROM A GRATEFUL HEART +1998-29454-0001-2158: PERUSAL (SAID THE PAWNBROKER->SET UPON BROKER) THAT'S THE WAY TO (PERNOUNCE->PRONOUNCE) IT +1998-29454-0002-2159: HIS BOOKS TOLD HIM (THAT TREASURE IS->THE TREASURES) BEST HIDDEN UNDER LOOSE BOARDS (UNLESS->AND AS) OF COURSE YOUR HOUSE (HAS->HAD) A SECRET PANEL WHICH HIS HAD NOT +1998-29454-0003-2160: HE GOT IT UP AND PUSHED HIS TREASURES AS FAR IN AS HE COULD ALONG THE ROUGH (CRUMBLY->CRAMBLY) SURFACE OF THE (LATH->LAST) AND PLASTER +1998-29454-0004-2161: WHEN DICKIE CAME DOWN HIS AUNT (SLIGHTLY->SAT HE) SLAPPED HIM AND HE TOOK THE HALFPENNY AND (LIMPED OFF->LIMP OF) OBEDIENTLY +1998-29454-0005-2162: HE HAD NEVER SEEN ONE BEFORE AND IT INTERESTED HIM EXTREMELY +1998-29454-0006-2163: HE LOOKED ABOUT HIM AND KNEW THAT HE DID NOT AT ALL KNOW WHERE HE WAS +1998-29454-0007-2164: WHAT'S UP (MATEY LOST->MATE ASKED) YOUR WAY DICKIE EXPLAINED +1998-29454-0008-2165: WHEN HE SAID (AVE->HAVE) I (BIN->BEEN) ASLEEP +1998-29454-0009-2166: HERE WE ARE SAID THE MAN +1998-29454-0010-2167: NOT (EXACKLY->EXACTLY) SAID THE MAN BUT IT'S ALL RIGHT +1998-29454-0011-2168: WHEN IT WAS OVER THE (MAN->MEN) ASKED DICKIE IF HE COULD WALK A LITTLE WAY AND WHEN (DICKIE->DICKY) SAID HE COULD THEY SET OUT IN THE MOST FRIENDLY WAY SIDE BY SIDE +1998-29454-0012-2169: AND THE (TEA->TUNO) AND (ALL AN->*) THE EGG +1998-29454-0013-2170: AND THIS IS THE PRETTIEST PLACE EVER I SEE +1998-29454-0014-2171: I SHALL CATCH IT (A FAIR->IF HER) TREAT AS IT IS +1998-29454-0015-2172: SHE WAS (WAITIN->WAITING) FOR THE WOOD TO (BOIL->BOY) THE (KETTLE->CATTLE) WHEN (I->TO) COME OUT MOTHER +1998-29454-0016-2173: (AIN'T->AND) BAD WHEN SHE'S IN A GOOD TEMPER +1998-29454-0017-2174: THAT (AIN'T WHAT SHE'LL->ANNE BUT YE'LL) BE IN WHEN YOU GETS BACK +1998-29454-0018-2175: I GOT (TO STICK IT->A STICKET) SAID (DICKIE->DICKY) SADLY I'D BEST BE GETTING HOME +1998-29454-0019-2176: I WOULDN'T GO (OME->HOME) NOT IF (I->EVER) WAS YOU SAID THE MAN +1998-29454-0020-2177: NO SAID DICKIE OH NO NO I NEVER +1998-29454-0021-2178: (I AIN'T IT YER->AND A DEAR) HAVE I LIKE WHAT (YER AUNT DO->YOU AREN'T TO) +1998-29454-0022-2179: WELL (THAT'LL->THOU) SHOW YOU THE SORT OF (MAN->MEN) I AM +1998-29454-0023-2180: THE MAN'S MANNER WAS SO KIND AND HEARTY THE WHOLE ADVENTURE WAS SO WONDERFUL AND NEW IS IT COUNTRY WHERE YOU GOING +1998-29454-0024-2181: THE SUN (SHOT->HAD) LONG GOLDEN BEAMS THROUGH THE GAPS (IN->AND) THE HEDGE +1998-29454-0025-2182: A BIRD (PAUSED->PASSED) IN ITS FLIGHT ON A BRANCH QUITE CLOSE AND CLUNG THERE SWAYING +1998-29454-0026-2183: HE TOOK OUT OF HIS POCKET A NEW ENVELOPE (A->AND) NEW SHEET OF PAPER AND A NEW PENCIL READY SHARPENED BY MACHINERY +1998-29454-0027-2184: (AN->AND) I (ASKS->ASK) YOU LET ME COME (ALONGER->ALONG ARE) YOU GOT THAT +1998-29454-0028-2185: (GET IT WROTE->GERT RODE) DOWN THEN DONE +1998-29454-0029-2186: THEN HE FOLDED IT AND PUT IT IN HIS POCKET +1998-29454-0030-2187: NOW (WE'RE->WE ARE) SQUARE HE SAID +1998-29454-0031-2188: THEY COULD PUT A (MAN->MEN) AWAY FOR (LESS->US) THAN THAT +1998-29454-0032-2189: I SEE THAT (THERE IN->THEN) A BOOK SAID (DICKIE CHARMED->DICKET CHUMMED) +1998-29454-0033-2190: HE REWARD THE WAKE THE LAST OF THE ENGLISH AND (I WUNNERED->A ONE AT) WHAT IT STOOD FOR +1998-29454-0034-2191: (WILD->WHITE) ONES (AIN'T ALF THE->AND A HALF) SIZE I LAY +1998-29454-0035-2192: ADVENTURES I SHOULD THINK SO +1998-29454-0036-2193: AH SAID (DICKIE->DICKY) AND A (FULL->FOOT) SILENCE FELL BETWEEN THEM +1998-29454-0037-2194: THAT WAS CHARMING BUT IT WAS PLEASANT TOO TO WASH THE (MUD OFF->MATTER) ON THE WET GRASS +1998-29454-0038-2195: (DICKIE->DICKY) ALWAYS REMEMBERED THAT MOMENT +1998-29454-0039-2196: SO YOU SHALL SAID MISTER BEALE A (REG'LER->REGULAR) WASH ALL OVER THIS VERY NIGHT I ALWAYS LIKE A WASH MESELF +1998-29454-0040-2197: SOME (BLOKES->LOSS) THINK IT PAYS TO BE DIRTY BUT IT DON'T +1998-29454-0041-2198: IF (YOU'RE->YO) CLEAN THEY SAY (HONEST POVERTY->I DISPOVERTY) AN IF (YOU'RE->YO) DIRTY THEY SAY SERVE YOU RIGHT +1998-29454-0042-2199: YOU ARE GOOD SAID DICKIE I DO LIKE YOU +1998-29454-0043-2200: I KNOW YOU WILL SAID DICKIE WITH ENTHUSIASM I KNOW (OW->HOW) GOOD YOU ARE +1998-29454-0044-2201: BLESS ME SAID MISTER BEALE UNCOMFORTABLY WELL THERE +1998-29454-0045-2202: (STEP OUT SONNY OR WE'LL->SPATANI ALBEA) NEVER GET THERE THIS (SIDE->SORT OF) CHRISTMAS +1998-29454-0046-2203: WELL (YOU'LL->YOU) KNOW ALL ABOUT IT PRESENTLY +1998-29455-0000-2232: THE SINGING AND LAUGHING WENT ON LONG AFTER HE HAD FALLEN ASLEEP AND IF LATER IN THE EVENING (THERE->THEY) WERE (LOUD VOICED->ALL OUTWARDS) ARGUMENTS OR (*->A) QUARRELS EVEN DICKIE DID NOT HEAR THEM +1998-29455-0001-2233: WHAT'S (ALL->ON) THAT THERE (DICKIE->DICKY) ASKED POINTING TO THE ODD (KNOBBLY->NOBLY) BUNDLES OF ALL SORTS AND SHAPES TIED ON TO THE (PERAMBULATOR'S->PRIME RELATOR'S) FRONT +1998-29455-0002-2234: TELL (YER->YOU) WHAT (MATE->MADE) LOOKS TO ME AS IF (I'D->I) TOOK A FANCY TO YOU +1998-29455-0003-2235: (SWELP->SWAB) ME HE SAID HELPLESSLY +1998-29455-0004-2236: OH LOOK SAID (DICKIE->DICKY) THE FLOWERS +1998-29455-0005-2237: (THEY'RE->THEY) ONLY (WEEDS->READS) SAID BEALE +1998-29455-0006-2238: BUT I (SHALL->SHOULD) HAVE THEM (WHILE THEY'RE->WHETHER) ALIVE SAID (DICKIE->DICKY) AS HE HAD SAID TO THE PAWNBROKER (ABOUT->BUT) THE MOONFLOWERS +1998-29455-0007-2239: (HI->AY) THERE (GOES->WAS) A RABBIT +1998-29455-0008-2240: (SEE IM CROST THE->SEEM QUEST) ROAD THERE (SEE HIM->SEEM) +1998-29455-0009-2241: HOW BEAUTIFUL SAID (DICKIE->DICKY) WRIGGLING WITH DELIGHT +1998-29455-0010-2242: THIS LIFE OF THE RABBIT AS DESCRIBED BY MISTER BEALE WAS THE CHILD'S FIRST GLIMPSE OF FREEDOM I'D LIKE TO BE A RABBIT +1998-29455-0011-2243: (OW'M->AM) I TO (WHEEL->BE AT) THE (BLOOMIN PRAM->ROOM IN PEM) IF (YOU->YOUR) GOES ON LIKE AS IF YOU WAS A (BAG->PEG) OF EELS +1998-29455-0012-2244: I LIKE YOU (NEXTER->NEXT TO) MY OWN DADDY AND MISTER (BAXTER->BAXT THE) NEXT DOOR +1998-29455-0013-2245: THAT'S ALL RIGHT SAID MISTER BEALE AWKWARDLY +1998-29455-0014-2246: (DICKIE QUICK TO->DICKY QUICKLY) IMITATE TOUCHED HIS +1998-29455-0015-2247: POOR LITTLE MAN SAID THE LADY YOU MISS YOUR MOTHER DON'T YOU +1998-29455-0016-2248: OH WELL DONE LITTLE (UN->ONE) SAID MISTER (BEALE->BEECH) TO HIMSELF +1998-29455-0017-2249: THE TWO TRAVELLERS WERE LEFT FACING EACH OTHER THE RICHER BY A PENNY AND (OH->O) WONDERFUL GOOD FORTUNE A WHOLE HALF CROWN +1998-29455-0018-2250: NO I NEVER SAID DICKIE (ERE'S->YES) THE (STEEVER->STEVER) +1998-29455-0019-2251: YOU STICK TO THAT SAID (BEALE->BEARD) RADIANT (WITH->WAS) DELIGHT YOU'RE A FAIR MASTERPIECE YOU ARE YOU EARNED IT HONEST IF EVER (A KID->KIT) DONE +1998-29455-0020-2252: THEY WENT ON UP THE HILL AS HAPPY AS ANY ONE NEED WISH TO BE +1998-29455-0021-2253: PLEASE (DO NOT BE->DON'T REPEAT) TOO SHOCKED +1998-29455-0022-2254: REMEMBER THAT NEITHER OF THEM KNEW ANY BETTER +1998-29455-0023-2255: TO THE (ELDER->OTHER) TRAMP LIES (AND BEGGING WERE->IN PEGGING WHERE) NATURAL MEANS OF LIVELIHOOD +1998-29455-0024-2256: BUT YOU SAID THE BED (WITH->WAS) THE GREEN CURTAINS (URGED DICKIE->ADDED THE KEI) +1998-29455-0025-2257: WHICH THIS (AIN'T->END) NOT BY NO MEANS +1998-29455-0026-2258: THE NIGHT IS FULL OF INTERESTING LITTLE SOUNDS THAT WILL NOT AT FIRST LET YOU SLEEP THE RUSTLE OF LITTLE (WILD->WHITE) THINGS (IN->ON) THE (HEDGES->HATCHES) THE BARKING OF DOGS (IN->AND) DISTANT FARMS THE CHIRP OF CRICKETS AND THE CROAKING OF FROGS +1998-29455-0027-2259: (THE NEW->THEN YOU) GAME OF BEGGING AND INVENTING STORIES TO INTEREST THE PEOPLE FROM WHOM IT WAS (WORTH->WORSE) WHILE TO BEG WENT ON GAILY DAY BY DAY AND WEEK BY WEEK AND DICKIE BY CONSTANT PRACTICE GREW SO CLEVER AT TAKING HIS PART IN THE ACTING THAT MISTER (BEALE->BEER) WAS QUITE DAZED WITH ADMIRATION +1998-29455-0028-2260: (BLESSED->BLEST) IF I EVER SEE SUCH A NIPPER HE SAID OVER AND OVER AGAIN +1998-29455-0029-2261: CLEVER AS A (TRAINDAWG E->TRAIN DOG) IS (AN ALL OUTER IS->IN OUR OUTER'S) OWN (EAD->HEAD) +1998-29455-0030-2262: I (AIN'T->AM) SURE AS I (ADN'T->HADN'T) BETTER STICK TO THE ROAD AND KEEP AWAY FROM OLD (ANDS->ENDS) LIKE (YOU JIM->EUGEN) +1998-29455-0031-2263: (I OPE E'S CLEVER->IOPIUS LOVE) ENOUGH TO DO (WOT E'S TOLD KEEP IS MUG SHUT->WHAT HE STOWED HE WAS MUCH AT) THAT'S ALL +1998-29455-0032-2264: IF (E'S STRAIGHT E'LL->HE STRAYED YOU) DO FOR ME AND IF HE AIN'T I'LL DO FOR (IM->HIM) SEE +1998-29455-0033-2265: SEE THAT (BLOKE JUST->LOCTICE) NOW SAID MISTER BEALE (YUSS->YES) SAID DICKIE +1998-29455-0034-2266: WELL YOU NEVER SEE (IM->HIM) +1998-29455-0035-2267: IF ANY ONE (ARSTS->ASKED) YOU IF YOU EVER SEE (IM->HIM) YOU NEVER SET EYES ON (IM->HIM) IN ALL YOUR BORN NOT TO REMEMBER (IM->HIM) +1998-29455-0036-2268: DICKIE WAS FULL OF QUESTIONS BUT MISTER (BEALE->BEE) HAD NO ANSWERS FOR THEM +1998-29455-0037-2269: NOR WAS IT SUNDAY ON WHICH THEY TOOK A REST AND WASHED THEIR SHIRTS ACCORDING TO MISTER (BEALE'S->BEAT'S) RULE OF LIFE +1998-29455-0038-2270: THEY DID NOT STAY THERE BUT WALKED OUT ACROSS THE DOWNS (WHERE->WITH) THE (SKYLARKS->SKYLECKS) WERE SINGING AND ON A DIP OF THE DOWNS CAME UPON GREAT STONE WALLS AND TOWERS (VERY->WHERE) STRONG AND GRAY +1998-29455-0039-2271: WHAT'S THAT THERE SAID (DICKIE->DICKY) +2033-164914-0000-661: REPLIED HE OF A TRUTH I HEARD HIM NOT AND I WOT HIM NOT AND FOLKS ARE ALL SLEEPING +2033-164914-0001-662: BUT SHE SAID WHOMSOEVER THOU SEEST AWAKE HE IS THE RECITER +2033-164914-0002-663: THEN SAID THE EUNUCH ART THOU HE WHO REPEATED POETRY BUT NOW AND MY LADY HEARD HIM +2033-164914-0003-664: REJOINED THE EUNUCH WHO THEN WAS THE RECITER POINT HIM OUT TO ME +2033-164914-0004-665: BY ALLAH REPLIED THE FIREMAN I TELL THEE THE TRUTH +2033-164914-0005-666: TELL ME WHAT HAPPENED (QUOTH ZAU AL MAKAN->QUOMAN) +2033-164914-0006-667: WHAT AILS THEE THEN THAT THOU MUST NEEDS RECITE VERSES SEEING THAT WE ARE TIRED OUT WITH WALKING AND WATCHING AND ALL THE FOLK ARE ASLEEP FOR THEY REQUIRE SLEEP TO REST THEM OF THEIR FATIGUE +2033-164914-0007-668: AND HE ALSO (IMPROVISED->PROVISED) THE TWO FOLLOWING (DISTICHS->DISTINCTS) +2033-164914-0008-669: WHEN (NUZHAT->NUZHA'S) AL ZAMAN HEARD THE FIRST IMPROVISATION SHE CALLED TO (MIND->MINE) HER FATHER AND HER MOTHER AND HER BROTHER AND THEIR (WHILOME->WILHELM) HOME THEN SHE WEPT AND CRIED (AT->TO) THE EUNUCH AND SAID TO HIM WOE TO THEE +2033-164914-0009-670: HE WHO RECITED THE FIRST TIME HATH RECITED A SECOND TIME AND (I->*) HEARD HIM (HARD->HEART) BY +2033-164914-0010-671: BY ALLAH AN THOU FETCH HIM NOT TO ME I WILL ASSUREDLY ROUSE THE CHAMBERLAIN ON THEE AND HE SHALL BEAT THEE AND CAST THEE OUT +2033-164914-0011-672: BUT TAKE THESE HUNDRED DINERS AND GIVE THEM TO THE SINGER AND BRING HIM TO ME GENTLY AND DO HIM NO HURT +2033-164914-0012-673: (RETURN->RETURNED) QUICKLY AND LINGER NOT +2033-164914-0013-674: WHEN IT WAS THE SEVENTY THIRD NIGHT +2033-164914-0014-675: BUT THE (EUNUCH->EUNUCHS) SAID I WILL NOT LEAVE THEE TILL THOU SHOW ME WHO IT WAS THAT RECITED THE VERSES FOR I DREAD RETURNING TO MY LADY WITHOUT HIM +2033-164914-0015-676: NOW WHEN THE FIREMAN HEARD THESE WORDS HE FEARED FOR (ZAU->ZA) AL MAKAN AND WEPT WITH EXCEEDING WEEPING AND SAID TO THE EUNUCH BY ALLAH IT WAS NOT I AND (I->THEY) KNOW HIM NOT +2033-164914-0016-677: SO GO THOU TO THY STATION AND IF THOU AGAIN (MEET->*) ANY ONE AFTER THIS HOUR RECITING AUGHT OF POETRY WHETHER HE BE NEAR OR FAR IT WILL BE I OR SOME ONE I KNOW AND THOU SHALT NOT LEARN OF HIM BUT BY ME +2033-164914-0017-678: THEN HE KISSED THE EUNUCH'S HEAD AND SPAKE HIM FAIR TILL HE WENT AWAY BUT THE CASTRATO FETCHED (A ROUND->AROUND) AND RETURNING SECRETLY CAME AND STOOD BEHIND THE FIREMAN FEARING TO GO BACK TO HIS MISTRESS WITHOUT (TIDINGS->HIDINGS) +2033-164914-0018-679: I SAY WHAT MADE MY IGNOMY (WHATE'ER->WHATEVER) THE BITTER (CUP I->CUPIED) DRAIN FAR BE (FRO->FROM) ME (THAT->THE) LAND TO FLEE NOR WILL I BOW TO THOSE WHO BLAME AND FOR SUCH LOVE WOULD DEAL ME SHAME +2033-164914-0019-680: THEN SAID THE EUNUCH TO (ZAU->ZA) AL MAKAN PEACE BE WITH THEE O MY LORD +2033-164914-0020-681: O MY LORD CONTINUED THE EUNUCH AND SHAHRAZAD PERCEIVED (*->THAT) THE DAWN OF DAY AND CEASED TO SAY HER PERMITTED SAY +2033-164914-0021-682: WE WILL DO THEE NO UPRIGHT O MY SON NOR WRONG THEE IN AUGHT BUT OUR OBJECT IS THAT THOU BEND THY (GRACIOUS->GRECIOUS) STEPS WITH ME TO MY MISTRESS TO RECEIVE HER ANSWER AND (RETURN IN WEAL->RETURNING WHEEL) AND SAFETY AND THOU SHALT HAVE A HANDSOME PRESENT AS ONE WHO BRINGETH GOOD NEWS +2033-164914-0022-683: THEN THE EUNUCH WENT OUT TO (ZAU AL->ZAO) MAKAN AND SAID TO HIM RECITE WHAT (VERSES->VERSEST) THOU KNOWEST FOR MY (LADY IS->LADIES) HERE HARD BY LISTENING TO THEE AND AFTER I WILL ASK THEE OF THY NAME AND (THY->THINE) NATIVE COUNTRY AND THY CONDITION +2033-164915-0000-643: AND ALSO THESE +2033-164915-0001-644: THEN SHE THREW HERSELF UPON HIM AND HE GATHERED HER TO HIS BOSOM AND THE TWAIN FELL DOWN IN A FAINTING FIT +2033-164915-0002-645: WHEN THE (EUNUCH->EUNUCHS) SAW (THIS CASE->THESE CAVES) HE WONDERED AT THEM AND THROWING OVER THEM SOMEWHAT TO COVER THEM WAITED TILL THEY SHOULD RECOVER +2033-164915-0003-646: AFTER A WHILE THEY CAME TO THEMSELVES AND (NUZHAT->UZHAT) AL ZAMAN REJOICED WITH EXCEEDING JOY OPPRESSION AND DEPRESSION LEFT HER AND GLADNESS TOOK THE MASTERY OF HER AND SHE REPEATED THESE VERSES +2033-164915-0004-647: ACCORDINGLY SHE TOLD HIM ALL THAT HAD COME TO HER SINCE THEIR SEPARATION AT THE KHAN AND WHAT HAD HAPPENED TO HER WITH THE (BADAWI->BADARI) HOW THE MERCHANT HAD BOUGHT HER OF HIM AND HAD TAKEN HER TO HER BROTHER (SHARRKAN->SHARKAN) AND HAD SOLD HER TO HIM HOW HE HAD FREED HER AT THE TIME OF BUYING HOW HE HAD MADE (A->HER) MARRIAGE CONTRACT WITH HER AND HAD GONE IN TO HER AND HOW THE KING THEIR SIRE HAD SENT AND ASKED FOR HER FROM (SHARRKAN->SHARKAN) +2033-164915-0005-648: BUT NOW GO TO THY MASTER AND BRING HIM QUICKLY TO ME +2033-164915-0006-649: THE CHAMBERLAIN CALLED THE CASTRATO AND CHARGED HIM TO DO ACCORDINGLY SO HE REPLIED I HEAR AND I OBEY AND HE TOOK HIS PAGES WITH HIM AND WENT OUT IN SEARCH OF THE (STOKER->STOCKER) TILL HE FOUND HIM IN THE REAR OF THE CARAVAN (GIRTHING->GIRDING) HIS ASS AND PREPARING FOR FLIGHT +2033-164915-0007-650: SHE SAID IT HATH REACHED ME O AUSPICIOUS KING THAT WHEN THE (STOKER GIRTHED->STOCKER GIRDED) HIS ASS FOR FLIGHT AND BESPAKE HIMSELF SAYING (OH->O) WOULD I KNEW WHAT IS BECOME OF HIM +2033-164915-0008-651: I BELIEVE HE HATH DENOUNCED ME TO THE EUNUCH HENCE THESE PAGES (ET->AT) ABOUT ME AND HE HATH MADE ME AN ACCOMPLICE IN HIS CRIME +2033-164915-0009-652: WHY DIDST THOU SAY I NEVER REPEATED (THESE->THIS) COUPLETS NOR DO I KNOW WHO REPEATED THEM WHEN IT WAS THY COMPANION +2033-164915-0010-653: BUT NOW I WILL NOT LEAVE THEE BETWEEN THIS PLACE AND BAGHDAD AND WHAT BETIDETH THY COMRADE SHALL (BETIDE->BE TIDE) THEE +2033-164915-0011-654: TWAS AS I FEARED THE (COMING ILLS->CARMINALS) DISCERNING BUT UNTO ALLAH WE ARE ALL RETURNING +2033-164915-0012-655: THEN THE EUNUCH CRIED UPON (THE->HIS) PAGES SAYING TAKE HIM OFF THE ASS +2033-164915-0013-656: AND HE ANSWERED I AM THE CHAMBERLAIN OF THE EMIR OF DAMASCUS KING (SHARRKAN SON->SHARKAN SONG) OF OMAR BIN AL (NU'UMAN->NUMAN) LORD OF (BAGHDAD->ADAD) AND OF THE LAND OF KHORASAN AND I BRING TRIBUTE AND PRESENTS FROM HIM TO HIS FATHER IN BAGHDAD +2033-164915-0014-657: (SO FARE YE->SOPHIA HE) FORWARDS NO HARM SHALL (BEFAL->BEFALL) YOU TILL YOU JOIN HIS GRAND WAZIR (DANDAN->TAN) +2033-164915-0015-658: THEN HE BADE HIM BE SEATED AND QUESTIONED HIM AND HE REPLIED THAT HE WAS CHAMBERLAIN TO THE EMIR OF DAMASCUS AND WAS BOUND TO KING OMAR WITH PRESENTS AND THE TRIBUTE OF SYRIA +2033-164915-0016-659: SO IT WAS AGREED THAT WE GO TO DAMASCUS AND FETCH THENCE THE KING'S SON (SHARRKAN->SHARKAN) AND (MAKE->MADE) HIM SULTAN OVER HIS FATHER'S REALM +2033-164915-0017-660: AND AMONGST THEM WERE SOME WHO WOULD HAVE CHOSEN THE CADET (ZAU AL MAKAN->THOU A MACAN) FOR QUOTH THEY HIS NAME BE LIGHT OF THE PLACE AND HE HATH A SISTER NUZHAT AL ZAMAN (HIGHS->HIES) THE DELIGHT OF THE TIME BUT THEY SET OUT FIVE YEARS AGO FOR AL (HIJAZ->HI JARS) AND NONE (WOTTETH->WHATETH) WHAT IS BECOME OF THEM +2033-164916-0000-684: SO HE TURNED TO THE WAZIR DANDAN AND SAID TO HIM VERILY YOUR TALE IS A (WONDER->WANDER) OF WONDERS +2033-164916-0001-685: (KNOW->NO) O CHIEF WAZIR THAT HERE WHERE YOU HAVE ENCOUNTERED ME ALLAH HATH GIVEN YOU REST FROM FATIGUE AND BRINGETH YOU YOUR DESIRE AFTER THE EASIEST OF FASHIONS FOR (THAT->LET) HIS ALMIGHTY WILL (RESTORETH->RESTORE IT) TO YOU (ZAU AL MAKAN->THOU ARMANQUIN) AND (HIS->HE) SISTER (NUZHAT->KNOWSAT) AL ZAMAN WHEREBY WE WILL SETTLE THE MATTER AS WE EASILY CAN +2033-164916-0002-686: WHEN THE (MINISTER->MEANESTER) HEARD THESE WORDS HE REJOICED WITH GREAT JOY AND SAID O CHAMBERLAIN TELL ME THE TALE OF THE TWAIN AND WHAT (BEFEL->BEFELL) THEM AND THE CAUSE OF THEIR LONG ABSENCE +2033-164916-0003-687: (ZAU AL MAKAN->ZAWAIN) BOWED HIS HEAD AWHILE AND THEN SAID I ACCEPT (THIS->THE) POSITION FOR INDEED THERE WAS NO REFUSING AND HE WAS CERTIFIED THAT THE CHAMBERLAIN HAD COUNSELLED HIM WELL AND WISELY AND (SET->SAID TO) HIM ON THE RIGHT WAY +2033-164916-0004-688: THEN HE ADDED O MY UNCLE HOW SHALL I DO WITH MY BROTHER (SHARRKAN->SHARKAN) +2033-164916-0005-689: AFTER (AWHILE->A WHILE) THE DUST DISPERSED AND THERE APPEARED UNDER IT THE ARMY OF BAGHDAD AND KHORASAN A CONQUERING HOST LIKE THE (FULL->POOL) TIDE SEA AND SHAHRAZAD PERCEIVED THE DAWN OF DAY AND CEASED TO SAY HER PERMITTED SAY +2033-164916-0006-690: WHEN IT WAS THE SEVENTY EIGHTH NIGHT +2033-164916-0007-691: (AND IN IT ALL->ANY NEAT OR) REJOICED AT THE ACCESSION OF THE LIGHT OF THE PLACE +2033-164916-0008-692: LASTLY THE MINISTER WENT IN AND KISSED THE GROUND BEFORE (ZAU AL->ZAO) MAKAN WHO ROSE TO MEET HIM SAYING WELCOME O WAZIR AND (SIRE SANS PEER->SIRES SONSPIER) +2033-164916-0009-693: MOREOVER THE SULTAN COMMANDED HIS WAZIR DANDAN CALL (A->AT) TEN DAYS HALT OF THE ARMY THAT HE MIGHT BE PRIVATE WITH HIM AND LEARN FROM HIM HOW AND WHEREFORE HIS FATHER HAD BEEN SLAIN +2033-164916-0010-694: HE THEN REPAIRED TO THE HEART OF THE ENCAMPMENT AND ORDERED (*->THAT) THE HOST TO HALT TEN DAYS +2414-128291-0000-2689: WHAT HATH HAPPENED (UNTO->TO) ME +2414-128291-0001-2690: HE ASKED HIMSELF SOMETHING (WARM->WRONG) AND LIVING QUICKENETH ME IT MUST BE IN (THE->THAT) NEIGHBOURHOOD +2414-128291-0002-2691: (WHEN->WHO READ) HOWEVER (ZARATHUSTRA->THEIR TWO STRAW) WAS QUITE NIGH (UNTO->AND TO) THEM THEN DID HE HEAR PLAINLY (THAT A->WITH) HUMAN VOICE (SPAKE->TAKE) IN THE MIDST OF THE (KINE->KIND) AND (APPARENTLY->A FRIENDLY) ALL OF THEM HAD TURNED THEIR HEADS TOWARDS THE SPEAKER +2414-128291-0003-2692: (WHAT DO->FOR DIEU) I HERE SEEK +2414-128291-0004-2693: ANSWERED HE THE SAME THAT THOU (SEEKEST->SEEK'ST) THOU MISCHIEF MAKER THAT IS TO SAY HAPPINESS UPON EARTH +2414-128291-0005-2694: FOR I TELL THEE THAT I HAVE (ALREADY->ALREAD) TALKED HALF A MORNING UNTO THEM AND JUST NOW (WERE->WHERE) THEY (ABOUT->WERE) TO GIVE ME (THEIR->THE) ANSWER +2414-128291-0006-2695: HE WOULD NOT BE RID OF HIS (AFFLICTION->AFFLICATION) +2414-128291-0007-2696: WHO (HATH->HAD) NOT AT PRESENT HIS HEART HIS MOUTH AND HIS EYES FULL OF DISGUST +2414-128291-0008-2697: THOU ALSO THOU ALSO +2414-128291-0009-2698: (BUT->MIGHT) BEHOLD (THESE KINE->HIS KIND) +2414-128291-0010-2699: THE (KINE->KIND) HOWEVER GAZED AT IT ALL AND WONDERED +2414-128291-0011-2700: WANTON (AVIDITY->ALDITY) BILIOUS ENVY CAREWORN REVENGE (POPULACE->POPULOUS) PRIDE ALL (THESE STRUCK MINE->DISTRACT MIGHT) EYE +2414-128291-0012-2701: IT IS NO LONGER TRUE (THAT THE->LITTLE) POOR (ARE->A) BLESSED +2414-128291-0013-2702: THE KINGDOM OF HEAVEN HOWEVER IS WITH THE (KINE->KIND) AND WHY IS IT NOT WITH (THE->A) RICH +2414-128291-0014-2703: WHY (DOST->THOSE) THOU TEMPT ME +2414-128291-0015-2704: ANSWERED (THE OTHER->HER) +2414-128291-0016-2705: THOU KNOWEST IT THYSELF BETTER EVEN THAN I +2414-128291-0017-2706: (THUS SPAKE->DOES BEG) THE PEACEFUL ONE AND PUFFED HIMSELF AND (PERSPIRED->POISPIED) WITH HIS WORDS (SO THAT THE KINE->TO INTER KIND) WONDERED ANEW +2414-128291-0018-2707: THOU (DOEST->DOST) VIOLENCE TO THYSELF THOU PREACHER ON THE (MOUNT WHEN->MOUND AND) THOU USEST SUCH (SEVERE->SAVOUR) WORDS +2414-128291-0019-2708: THEY ALSO (ABSTAIN->ABSTAINED) FROM ALL HEAVY THOUGHTS WHICH INFLATE THE HEART +2414-128291-0020-2709: WELL +2414-128291-0021-2710: SAID (ZARATHUSTRA->GUESTRA) THOU SHOULDST ALSO SEE MINE ANIMALS (MINE->MY) EAGLE AND MY SERPENT (THEIR->THEY ARE) LIKE DO NOT AT PRESENT EXIST ON EARTH +2414-128291-0022-2711: AND (TALK->TALKED) TO (MINE->MY) ANIMALS OF THE HAPPINESS OF ANIMALS +2414-128291-0023-2712: NOW HOWEVER (TAKE->THEY) LEAVE (AT ONCE->IT WAS) OF (THY KINE->THEIR KIND) THOU STRANGE (ONE->WORLD) +2414-128291-0024-2713: THOU AMIABLE ONE +2414-128291-0025-2714: FOR THEY ARE THY WARMEST FRIENDS AND (PRECEPTORS->PERCEPTORS) +2414-128291-0026-2715: THOU (EVIL FLATTERER->EVEN SLACKER) +2414-128292-0000-2618: WHITHER (HATH->HAD) MY (LONESOMENESS GONE->LONESOME DISCOUR) SPAKE HE +2414-128292-0001-2619: MY SHADOW (CALLETH->CAUGHT) ME +2414-128292-0002-2620: WHAT MATTER ABOUT MY SHADOW +2414-128292-0003-2621: (LET IT RUN AFTER->NEKHLUD TRUE ENOUGH TO) ME I (RUN->RAN) AWAY FROM IT +2414-128292-0004-2622: THUS (SPAKE ZARATHUSTRA->BEING THEIR TOO STRIKE) TO HIS HEART AND RAN AWAY +2414-128292-0005-2623: VERILY MY FOLLY HATH GROWN BIG IN THE MOUNTAINS +2414-128292-0006-2624: NOW DO I HEAR SIX OLD (FOOLS->FOOD'S) LEGS RATTLING BEHIND ONE ANOTHER +2414-128292-0007-2625: (BUT DOTH ZARATHUSTRA->BY DIRTS ARE TOUSTRA) NEED TO BE FRIGHTENED BY (HIS->A) SHADOW +2414-128292-0008-2626: ALSO (METHINKETH->METHINK IT) THAT AFTER ALL IT (HATH LONGER LEGS->HAD LONG OR LESS) THAN MINE +2414-128292-0009-2627: FOR WHEN (ZARATHUSTRA SCRUTINISED->THEIR DISTRESS COGNIZED) HIM (WITH HIS->IT IS) GLANCE HE WAS FRIGHTENED (AS BY->ALBERT) A (SUDDEN->CERTAIN) APPARITION SO SLENDER (SWARTHY->SWALLTY) HOLLOW AND WORN OUT DID (THIS->HIS) FOLLOWER APPEAR +2414-128292-0010-2628: (ASKED ZARATHUSTRA VEHEMENTLY->I TAKE TO EXTRAVE IMAGINE) WHAT (DOEST->DOST) THOU (HERE->HEAR) +2414-128292-0011-2629: AND WHY (CALLEST->COLLARST) THOU THYSELF MY SHADOW +2414-128292-0012-2630: THOU ART NOT PLEASING (UNTO->INTO) ME +2414-128292-0013-2631: MUST I EVER BE ON THE WAY +2414-128292-0014-2632: O (EARTH->ART) THOU HAST BECOME (TOO->TO) ROUND FOR ME +2414-128292-0015-2633: (WHEN->WITH) THE DEVIL (CASTETH->CAST AT) HIS SKIN DOTH NOT HIS NAME ALSO FALL AWAY IT IS ALSO SKIN +2414-128292-0016-2634: THE DEVIL HIMSELF IS PERHAPS SKIN +2414-128292-0017-2635: SOMETIMES I MEANT TO LIE AND BEHOLD +2414-128292-0018-2636: THEN (ONLY->OLD LADY) DID I HIT THE TRUTH +2414-128292-0019-2637: HOW (HAVE->HAIR) I STILL INCLINATION +2414-128292-0020-2638: (HAVE->EH) I STILL A (GOAL->GOLD) +2414-128292-0021-2639: A (HAVEN TOWARDS WHICH->HAIRY DOOR SPEECH) MY (SAIL IS SET->SAILOR'S SAKE) +2414-128292-0022-2640: FOR IT (DO->TOO) I ASK AND SEEK AND HAVE (SOUGHT BUT HAVE->THOUGHT IT HATH) NOT FOUND IT +2414-128292-0023-2641: (O ETERNAL->OITERNAL) EVERYWHERE (O ETERNAL->WHO HAD TURNED OUT) NOWHERE (O ETERNAL->WHO HAD TURNED OUT) IN VAIN +2414-128292-0024-2642: THOU ART MY SHADOW +2414-128292-0025-2643: SAID HE AT LAST SADLY +2414-128292-0026-2644: THY DANGER (IS NOT SMALL->HIS PERCHED ALL) THOU FREE SPIRIT AND (WANDERER->WONDER) +2414-128292-0027-2645: THEY SLEEP QUIETLY THEY (ENJOY->ENJOYED) THEIR NEW SECURITY +2414-128292-0028-2646: BEWARE LEST IN THE END A NARROW (FAITH->FIT) CAPTURE THEE A HARD (RIGOROUS->RECKLESS) DELUSION +2414-128292-0029-2647: FOR NOW EVERYTHING THAT IS NARROW AND FIXED (SEDUCETH->SEDUCE IT) AND (TEMPTETH->TEMPTED) THEE +2414-128292-0030-2648: THOU HAST LOST (THY GOAL->DAGGULE) +2414-128292-0031-2649: (THOU->THOUGH) POOR ROVER AND RAMBLER (THOU->NOW) TIRED (BUTTERFLY->BUT TO FLY) +2414-128292-0032-2650: WILT THOU HAVE A REST (AND A HOME->IN THE WHOLE) THIS EVENING +2414-159411-0000-2653: ONCE UPON (A->HER) TIME A BRAHMAN WHO WAS WALKING ALONG THE ROAD CAME UPON AN IRON CAGE IN WHICH A GREAT TIGER (HAD BEEN SHUT->AT MONSHAT) UP BY THE (VILLAGERS->VILLAGES) WHO CAUGHT HIM +2414-159411-0001-2654: THE (BRAHMAN->BRAMIAN) ANSWERED NO I WILL NOT FOR IF I LET YOU OUT OF THE CAGE YOU WILL EAT ME +2414-159411-0002-2655: OH FATHER OF MERCY ANSWERED THE TIGER IN TRUTH THAT I WILL NOT +2414-159411-0003-2656: I WILL NEVER BE SO UNGRATEFUL ONLY LET ME OUT THAT I MAY (DRINK->BRING) SOME WATER AND RETURN +2414-159411-0004-2657: (THEN->AND IN) THE (BRAHMAN TOOK->BRAM INTO) PITY ON HIM AND OPENED THE CAGE DOOR BUT NO SOONER HAD HE (DONE->TURNED) SO THAN THE TIGER JUMPING OUT SAID NOW I WILL EAT YOU FIRST AND DRINK THE WATER AFTERWARDS +2414-159411-0005-2658: SO THE (BRAHMAN->BRAMID) AND THE TIGER WALKED ON TILL THEY CAME TO A (BANYAN->BANDON) TREE AND THE (BRAHMAN->BRAMEN) SAID TO IT (BANYAN->BANION) TREE (BANYAN->BAN AND) TREE (HEAR->HERE) AND GIVE (JUDGMENT->JOINTMENT) +2414-159411-0006-2659: ON WHAT MUST I GIVE JUDGMENT ASKED THE (BANYAN->BEN) TREE +2414-159411-0007-2660: (THIS TIGER->DISTAGGER) SAID (THE->DE) BRAHMAN BEGGED ME TO LET HIM OUT OF HIS CAGE TO DRINK A LITTLE WATER AND HE PROMISED NOT TO (HURT->HIDE) ME IF I DID SO BUT NOW THAT I HAVE (LET->LEFT) HIM OUT HE WISHES TO EAT ME +2414-159411-0008-2661: (IS->*) IT (JUST->IS JEALOUS) THAT HE SHOULD DO SO (OR NO->I KNOW) +2414-159411-0009-2662: (LET->LATE) THE TIGER EAT THE MAN FOR MEN ARE (AN->IN) UNGRATEFUL RACE +2414-159411-0010-2663: (SIR->SO) CAMEL SIR CAMEL CRIED THE (BRAHMAN HEAR->BRAMIN HERE) AND GIVE JUDGMENT +2414-159411-0011-2664: AT A LITTLE DISTANCE THEY FOUND A BULLOCK LYING BY THE ROADSIDE +2414-159411-0012-2665: IS IT FAIR THAT HE SHOULD DO SO OR NOT +2414-159411-0013-2666: (LET THE->LATER) TIGER EAT THE MAN FOR MEN HAVE NO PITY +2414-159411-0014-2667: THREE OUT OF THE SIX (HAD GIVEN->IN GIVING) JUDGMENT AGAINST THE BRAHMAN (BUT->WHICH) STILL HE DID NOT LOSE ALL HOPE AND (DETERMINED->TO TURN MIND) TO ASK THE OTHER THREE +2414-159411-0015-2668: ON WHAT MUST I GIVE (JUDGMENT->YOU TELL ME) ASKED THE EAGLE +2414-159411-0016-2669: THE (BRAHMAN STATED->BRAM IS SUITED) THE CASE AND THE EAGLE ANSWERED WHENEVER MEN SEE ME THEY TRY TO SHOOT ME (THEY CLIMB->DECLINE) THE ROCKS AND STEAL AWAY MY LITTLE ONES +2414-159411-0017-2670: THEN THE TIGER BEGAN TO ROAR AND SAID (THE->*) JUDGMENT OF ALL IS AGAINST YOU O BRAHMAN +2414-159411-0018-2671: AFTER THIS THEY SAW AN ALLIGATOR AND THE (BRAHMAN->BRAMMER) RELATED THE MATTER TO HIM HOPING FOR A MORE (FAVORABLE->FAVOURABLE) VERDICT +2414-159411-0019-2672: (BUT->WITH) THE (ALLIGATOR SAID WHENEVER I PUT->ADDIER TO THE SUIT WHENEVER APPOINT) MY NOSE OUT OF THE WATER (MEN TORMENT->MAYN'T TOM AND) ME AND (TRY->TRIED) TO KILL ME +2414-159411-0020-2673: (THE BRAHMAN->NO GRAMMEN) GAVE HIMSELF UP AS LOST BUT AGAIN HE PRAYED THE TIGER TO HAVE PATIENCE AND LET HIM ASK THE OPINION OF THE SIXTH JUDGE +2414-159411-0021-2674: (NOW->ON) THE SIXTH WAS A JACKAL +2414-159411-0022-2675: THE (BRAHMAN->GRAMMAR) TOLD HIS STORY AND SAID TO HIM UNCLE (JACKAL UNCLE JACKAL->JACKO AND WILL JACK HO) SAY WHAT IS YOUR JUDGMENT +2414-159411-0023-2676: SHOW ME THE (PLACE->PACE) +2414-159411-0024-2677: (WHEN THEY GOT->AND THE COURT) THERE THE JACKAL SAID (NOW BRAHMAN->NABRAMAN) SHOW ME EXACTLY WHERE YOU STOOD +2414-159411-0025-2678: EXACTLY THERE WAS IT ASKED (THE JACKAL->JACO) +2414-159411-0026-2679: EXACTLY HERE REPLIED THE (BRAHMAN->PROMIN) +2414-159411-0027-2680: (WHERE->THERE) WAS THE TIGER THEN +2414-159411-0028-2681: WHY I STOOD SO SAID THE (TIGER->DRAGGER) JUMPING INTO THE CAGE AND MY HEAD WAS ON THIS SIDE +2414-159411-0029-2682: VERY GOOD SAID THE (JACKAL->JACK HOPE) BUT I CANNOT JUDGE WITHOUT UNDERSTANDING THE WHOLE MATTER EXACTLY +2414-159411-0030-2683: SHUT AND BOLTED SAID (THE BRAHMAN->DEBRAMIN) +2414-159411-0031-2684: THEN SHUT AND (BOLT IT->BOLTED) SAID (THE->TO) JACKAL +2414-159411-0032-2685: WHEN THE BRAHMAN HAD (DONE->TURNED) THIS THE JACKAL SAID OH YOU WICKED AND UNGRATEFUL (TIGER->TYER) +2414-159411-0033-2686: (WHEN THE->WITH A) GOOD BRAHMAN OPENED (YOUR CAGE->YOU CARED) DOOR IS TO EAT HIM THE ONLY RETURN (YOU->HE) WOULD MAKE +2414-159411-0034-2687: PROCEED ON YOUR JOURNEY (FRIEND BRAHMAN->FRANJAMIN) +2414-159411-0035-2688: (YOUR ROAD->HE RULED) LIES THAT WAY (AND MINE->IN MIND) THIS +2414-165385-0000-2651: (THUS->AND THERE'S) ACCOMPLISHED HE EXCITED (THE->*) ADMIRATION OF EVERY SILLY (COQUETTE->POCKET) AND THE ENVY OF EVERY (FLUTTERING COXCOMB->REFLECTING ACCOUNT) BUT BY ALL YOUNG GENTLEMEN AND LADIES OF UNDERSTANDING HE WAS HEARTILY DESPISED AS A MERE CIVILIZED MONKEY +2414-165385-0001-2652: THAT HIS SOUL MIGHT AFTERWARDS OCCUPY SUCH A STATION AS WOULD BE MOST SUITABLE TO HIS CHARACTER IT WAS (SENTENCED->INTENSE) TO INHABIT (THE->A) BODY OF THAT (FINICAL->PHYNICAL) GRINNING AND (MISCHIEVOUS->MACHIEVOUS) LITTLE (MIMICK->MIMIC) WITH (FOUR->FULL) LEGS WHICH (YOU->SHE) NOW BEHOLD BEFORE YOU +2609-156975-0000-2367: THEN MOSES WAS AFRAID AND SAID SURELY THE THING IS KNOWN +2609-156975-0001-2368: (HOLD->OR) ON (HOLD->HER) FAST (HOLD OUT PATIENCE IS->OH DOUBT PATENTS AS) GENIUS +2609-156975-0002-2369: LET US HAVE FAITH THAT RIGHT (MAKES->MATRON) MIGHT AND IN THAT FAITH LET (US DARE->STARED) TO DO OUR DUTY (AS->IF) WE UNDERSTAND IT LINCOLN +2609-156975-0003-2370: THE EGYPTIAN BACKGROUND OF THE BONDAGE +2609-156975-0004-2371: EVERY ONE (WHO IS TURBULENT->WHOSE TREBRANT) HAS BEEN FOUND BY (KING MERNEPTAH->GIMERNETTE PATH) THE TESTIMONY OF THE OLDEST (BIBLICAL NARRATIVES->PABRICAL NARRATIVE) REGARDING THE SOJOURN OF THE HEBREWS IN EGYPT IS ALSO IN PERFECT ACCORD WITH THE (PICTURE->PITCHER) WHICH (THE->IT) CONTEMPORARY EGYPTIAN INSCRIPTIONS GIVE (OF THE->THIS) PERIOD +2609-156975-0005-2372: THE ABSENCE OF DETAILED REFERENCE TO THE HEBREWS IS THEREFORE PERFECTLY NATURAL +2609-156975-0006-2373: IT SEEMS PROBABLE THAT NOT ALL BUT ONLY PART (OF->IN) THE TRIBES WHICH (ULTIMATELY COALESCED->ULTIMATE COLLETS) INTO THE HEBREW NATION FOUND THEIR WAY TO EGYPT +2609-156975-0007-2374: THE STORIES REGARDING JOSEPH (THE->THEIR) TRADITIONAL (FATHER OF EPHRAIM AND MANASSEH IMPLY->FOUNDER THAT FROM IN MANETTE SE INCLINE) THAT THESE STRONG CENTRAL TRIBES POSSIBLY TOGETHER WITH THE SOUTHERN (TRIBES->TRINES) OF BENJAMIN AND JUDAH WERE THE CHIEF ACTORS (IN THIS->*) OPENING SCENE IN ISRAEL'S HISTORY +2609-156975-0008-2375: THE (BIBLICAL->BIBOCO) NARRATIVES APPARENTLY (DISAGREE REGARDING->DISAGREED GUARDING) THE DURATION OF THE SOJOURN IN EGYPT +2609-156975-0009-2376: THE LATER (TRADITIONS TEND TO EXTEND->JUDICINES INTEREST IN) THE PERIOD +2609-156975-0010-2377: (HERE->YOU) WERE FOUND (SEVERAL INSCRIPTIONS BEARING->SEVEREND SCRIPS AND SPARED) THE EGYPTIAN NAME OF THE CITY (P ATUM HOUSE->PATUM OUTS) OF THE GOD (ATUM->ATOM) +2609-156975-0011-2378: A CONTEMPORARY INSCRIPTION (ALSO STATES->ONCE ESTATES) THAT HE (FOUNDED->FOUND A) NEAR (PITHUM->PITTHAM) THE HOUSE OF (RAMSES->RANSES) A CITY WITH (A->THE) ROYAL RESIDENCE (AND->IN) TEMPLES +2609-156975-0012-2379: THAT THE HEBREWS WERE (RESTIVE->WRETS OF) UNDER THIS (TYRANNY->SURNING) WAS (NATURAL->NATURALLY) INEVITABLE +2609-156975-0013-2380: (WAS->WHEREAS) ANY OTHER PROCEDURE TO BE (EXPECTED->INSPECTOR) FROM (A DESPOTIC RULER->IT THAT SPOTIC ROAR) OF THAT LAND AND DAY +2609-156975-0014-2381: THE MAKING OF (A->THE) LOYAL PATRIOT +2609-156975-0015-2382: THE STORY OF MOSES BIRTH (AND->AN) EARLY CHILDHOOD IS ONE OF THE MOST INTERESTING CHAPTERS IN BIBLICAL HISTORY +2609-156975-0016-2383: (WAS MOSES JUSTIFIED IN RESISTING->WIS MOVES IT'S JEST FIND AN RESIST IN) THE EGYPTIAN (TASKMASTER->TAX MASTER) +2609-156975-0017-2384: (IS PEONAGE->HIS PINIONS) ALWAYS (DISASTROUS->DISASTERATE) NOT (ONLY->OWING) TO ITS VICTIMS BUT ALSO TO THE GOVERNMENT IMPOSING IT +2609-156975-0018-2385: NATURALLY HE WENT TO THE LAND (OF MIDIAN->A MILLION) +2609-156975-0019-2386: THE WILDERNESS TO THE EAST OF EGYPT (HAD->AND) FOR CENTURIES BEEN THE (PLACE->PLATES) OF REFUGE (FOR->OR) EGYPTIAN FUGITIVES +2609-156975-0020-2387: FROM ABOUT TWO THOUSAND B C +2609-156975-0021-2388: ON THE BORDERS OF THE WILDERNESS HE FOUND CERTAIN BEDOUIN (HERDSMEN->HERDSMAN) WHO RECEIVED HIM (HOSPITABLY->HALF SPITABLY) +2609-156975-0022-2389: THESE (SAND WANDERERS->SANDWARES) SENT HIM ON FROM (TRIBE->TIME) TO (TRIBE->TIME) UNTIL HE REACHED THE LAND OF (KEDEM EAST->KIDDAM EACH) OF THE DEAD SEA WHERE HE REMAINED FOR A YEAR AND A HALF +2609-156975-0023-2390: LATER HE FOUND HIS WAY TO THE COURT OF ONE OF THE LOCAL KINGS (IN->AND) CENTRAL PALESTINE WHERE HE MARRIED AND (BECAME->MICHANG) IN (*->THE) TIME A PROSPEROUS LOCAL PRINCE +2609-156975-0024-2391: THE SCHOOL OF THE (WILDERNESS->WEARINESS) +2609-156975-0025-2392: THE STORY OF MOSES IS IN MANY WAYS CLOSELY PARALLEL (TO THAT OF SINUHIT->DID NOT ASSUME IT) +2609-156975-0026-2393: THE PRIEST (OF->*) THE (SUB TRIBE->SUBTRINE) OF THE KENITES RECEIVED HIM INTO HIS HOME AND GAVE HIM HIS DAUGHTER IN MARRIAGE +2609-156975-0027-2394: NOTE THE (CHARACTERISTIC ORIENTAL IDEA->CARE OF RIVER STICK ORIENTOUINE) OF (MARRIAGE->MARES) +2609-156975-0028-2395: HERE MOSES LEARNED (THE->THAT) LESSONS THAT WERE ESSENTIAL FOR HIS (TRAINING->TRAINED IN) AS (THE->A) LEADER AND DELIVERER OF HIS PEOPLE +2609-156975-0029-2396: AFTER THE CAPTURE OF JERICHO (CERTAIN->CERTAINLY) OF THEM WENT UP WITH (THE SOUTHERN TRIBES TO->A SUDDEN TRIUMPHS SHE) CONQUER SOUTHERN PALESTINE +2609-156975-0030-2397: MANY MODERN SCHOLARS (DRAW->DRAWN) THE CONCLUSION FROM THE BIBLICAL NARRATIVE THAT IT WAS FROM THE (KENITES->KENNITES) THAT MOSES FIRST LEARNED OF (YAHWEH->YONWAY) OR AS THE DISTINCTIVE NAME OF ISRAEL'S (GOD->GONE) WAS TRANSLATED BY LATER JEWISH (SCRIBES->GRIBES) JEHOVAH +2609-156975-0031-2398: DO THE (EARLIEST HEBREW->ARIAD SEA BOU) TRADITIONS IMPLY THAT (THE ANCESTORS->INSECTORS) OF THE (ISRAELITES->ISRAIT) WERE (WORSHIPPERS->WORSHIPPED) OF JEHOVAH +2609-156975-0032-2399: THE (TITLE->TOWN) OF HIS (FATHER IN LAW->FUND THEM AND ALL) IMPLIES (THAT->AT) THIS (PRIEST->PREACH) MINISTERED AT SOME (WILDERNESS->LINEN AT) SANCTUARY +2609-156975-0033-2400: (MOSES->ROSES) IN THE HOME OF THE (MIDIAN PRIEST->MENDIAN PRIESTS) WAS BROUGHT INTO DIRECT AND CONSTANT CONTACT WITH THE JEHOVAH WORSHIP +2609-156975-0034-2401: THE CRUEL FATE OF (HIS->THIS) PEOPLE AND THE PAINFUL EXPERIENCE IN EGYPT THAT HAD DRIVEN HIM INTO THE WILDERNESS PREPARED HIS MIND TO RECEIVE THIS TRAINING +2609-156975-0035-2402: HIS (QUEST->PRICE) WAS FOR A (JUST->JETS) AND STRONG GOD ABLE TO DELIVER THE OPPRESSED +2609-156975-0036-2403: THE (WILDERNESS->WEDDINGS) WITH ITS LURKING FOES AND THE EVER PRESENT DREAD OF HUNGER AND THIRST (DEEPENED HIS->DEEP INTO) SENSE OF NEED AND OF DEPENDENCE UPON (A->THE) POWER ABLE TO GUIDE THE (DESTINIES->DEBTS NEEDS) OF MEN +2609-156975-0037-2404: THE PEASANTS OF THE (VAST ANTOLIAN->VATS INTOLLIUM) PLAIN (IN->OF) CENTRAL (ASIA->AS A) MINOR (STILL->SO) CALL EVERY LIFE (GIVING->GIVEN) SPRING GOD HATH GIVEN +2609-156975-0038-2405: (THE CONSTANT->THEY CAN'T SENT THE) NECESSITY (OF->A) MEETING THE DANGERS OF THE (WILDERNESS->WORDERNESS) AND (OF->THE) DEFENDING THE FLOCKS (ENTRUSTED TO MOSES->AND TRITES OF JEMOSIS) CARE DEVELOPED HIS COURAGE AND POWER OF (LEADERSHIP->LEISURESHIP) AND ACTION +2609-157645-0000-2352: EVIDENTLY THE INTENTION (WAS TO MAKE->WHICH MADE) THINGS (PLEASANT->PRESENT) FOR THE ROYAL (FOE OF->FOLK A) TOBACCO DURING HIS VISIT +2609-157645-0001-2353: THE (PROHIBITION IN->PROBES AND) THE (REGULATION->REGULATING) QUOTED (OF->HER) SMOKING (IN->AND) SAINT MARY'S CHURCH REFERRED (IT->TO) MAY BE NOTED TO THE ACT WHICH WAS HELD THEREIN +2609-157645-0002-2354: SOMETIMES TOBACCO (WAS->IS) USED IN CHURCH FOR (DISINFECTING OR DEODORIZING->DISINFECT AND NO DEAL ARISING) PURPOSES +2609-157645-0003-2355: (BLACKBURN ARCHBISHOP->BLACKBIRD ARCHBISH) OF YORK WAS A GREAT SMOKER +2609-157645-0004-2356: ON ONE OCCASION HE WAS AT SAINT MARY'S CHURCH (NOTTINGHAM->NINE IN HAM) FOR A (CONFIRMATION->CONFIRMATESON) +2609-157645-0005-2357: ANOTHER EIGHTEENTH CENTURY CLERICAL WORTHY THE FAMOUS (DOCTOR PARR->DOCTRIPAR) AN INVETERATE SMOKER WAS ACCUSTOMED TO DO (WHAT MISTER DISNEY->AT MIDSER DIDNY) PREVENTED (ARCHBISHOP->ARCHBISH OF) BLACKBURN FROM DOING HE SMOKED IN HIS (VESTRY->VETERY) AT HATTON +2609-157645-0006-2358: (PARR WAS->PAR WITH) SUCH A (CONTINUAL SMOKER->CONTINUOUS MOTOR) THAT (ANYONE->ANY ONE) WHO CAME INTO HIS COMPANY IF HE HAD NEVER SMOKED BEFORE (HAD->AND) TO (LEARN->LEARNED) THE USE OF A PIPE AS A MEANS OF SELF DEFENCE +2609-157645-0007-2359: ONE SUNDAY SAYS MISTER (DITCHFIELD->DITZFIELD) HE (HAD AN EXTRA->ENDING THAT SIR) PIPE AND (JOSHUA->JOHNSHAW) THE CLERK TOLD HIM THAT THE PEOPLE WERE GETTING IMPATIENT +2609-157645-0008-2360: (LET->THEM TO) THEM (SING ANOTHER PSALM SAID->SINGING NOW THE PSALMS SAKE) THE CURATE +2609-157645-0009-2361: THEY HAVE SIR REPLIED THE CLERK +2609-157645-0010-2362: THEN LET THEM SING THE HUNDRED AND NINETEENTH REPLIED THE CURATE +2609-157645-0011-2363: SIX ARMS THE (NEAREST->NURSE) WITHIN REACH PRESENTED WITH AN OBEDIENT START (*->AND) AS MANY TOBACCO (POUCHES->PIUCHES) TO THE (MAN->MEN) OF OFFICE +2609-157645-0012-2364: DAVID (DEANS HOWEVER->DEAN SAMURED) DID NOT AT ALL (APPROVE->IMPROVE) THIS IRREVERENCE +2609-157645-0013-2365: (GOING TO->GO INTO) CHURCH (AT HAYES IN THOSE->THAT HASAN'S) DAYS (MUST->MISTS) HAVE BEEN (QUITE AN EXCITING EXPERIENCE->ACQUAINTED AND THE SIGNING SPIRITS) +2609-157645-0014-2366: WHEN THESE MEN IN THE COURSE OF MY REMONSTRANCE FOUND (*->OUT) THAT (I->*) WAS NOT GOING TO CONTINUE THE (CUSTOM->COTTOM) THEY NO LONGER CARED TO BE COMMUNICANTS +2609-169640-0000-2406: (PROAS->PERHAPS) IN THAT QUARTER WERE (USUALLY DISTRUSTED->USUAL DISTRUDGED) BY (SHIPS IT->THE STEPS THAT) IS TRUE BUT THE (SEA IS FULL OF->SEAS FOR) THEM AND FAR MORE ARE INNOCENT THAN ARE GUILTY OF ANY ACTS OF VIOLENCE +2609-169640-0001-2407: (AN HOUR AFTER->NOW I OUTDREW) THE SUN HAD SET THE WIND FELL (TO A->TURNED) LIGHT AIR (THAT JUST->DAT JESTS) KEPT STEERAGE WAY ON THE SHIP +2609-169640-0002-2408: FORTUNATELY THE JOHN WAS NOT ONLY (FAST->FAT) BUT (SHE->SEA) MINDED HER (HELM->HAIL) AS (A LIGHT FOOTED->THE LIGHTFOOTED) GIRL (TURNS IN A->TURNED TO THE) LIVELY DANCE +2609-169640-0003-2409: I NEVER WAS IN A BETTER (STEERING->STIRRING) SHIP (MOST ESPECIALLY IN->PERCY SPENT FREE AND) MODERATE WEATHER +2609-169640-0004-2410: MISTER MARBLE HE (I DO->OUGHT TO) BELIEVE WAS FAIRLY SNOOZING ON THE (HEN COOPS->INCOUPS) BEING LIKE THE (SAILS->SAILORS) AS ONE MIGHT SAY (BARELY ASLEEP->VARIOUS LEAP) +2609-169640-0005-2411: AT THAT MOMENT I (HEARD->IN) A NOISE (ONE->WHEN) FAMILIAR TO (SEAMEN->SEAMAN) THAT OF AN OAR (FALLING->FOLLOWING) IN A BOAT +2609-169640-0006-2412: (I SANG OUT SAIL->AS IN YET SO) HO AND CLOSE (ABOARD->ABROAD) +2609-169640-0007-2413: HE WAS (TOO MUCH->SHIMMERTS) OF (A->THE) SEAMAN TO REQUIRE (A->*) SECOND LOOK IN ORDER TO ASCERTAIN (WHAT->BUT) WAS TO BE DONE +2609-169640-0008-2414: ALTHOUGH THEY WENT THREE FEET TO OUR TWO THIS GAVE (US A->UP SOME) MOMENT OF BREATHING TIME +2609-169640-0009-2415: AS OUR (SHEETS->SEATS) WERE ALL FLYING FORWARD AND REMAINED SO FOR A FEW MINUTES IT GAVE ME (*->A) LEISURE TO (LOOK->WORK) ABOUT +2609-169640-0010-2416: I SOON SAW BOTH (PROAS->PROTS) AND (GLAD ENOUGH->GRINDING UP) WAS I TO PERCEIVE THAT THEY HAD NOT APPROACHED MATERIALLY (NEARER->IN NEW YORK) +2609-169640-0011-2417: (MISTER KITE OBSERVED->BISHOIS DESERVED) THIS ALSO AND REMARKED THAT OUR MOVEMENTS HAD BEEN SO PROMPT AS TO TAKE THE (RASCALS->RASCAL WAS) ABACK +2609-169640-0012-2418: A (BREATHLESS STILLNESS->BREATH WHICH STILL IN ITS) SUCCEEDED +2609-169640-0013-2419: THE (PROAS->PROVIDES) DID NOT ALTER (THEIR->THE) COURSE BUT (NEARED US->NEAR TO ITS) FAST +2609-169640-0014-2420: I HEARD THE (RATTLING->RIDERING) OF THE BOARDING (PIKES->PIPES) TOO AS THEY WERE CUT ADRIFT FROM THE SPANKER BOOM AND FELL UPON THE DECKS +2609-169640-0015-2421: (KITE->KIND) WENT (AFT->APT) AND RETURNED WITH THREE OR FOUR (MUSKETS->MASKETS) AND AS MANY PIKES +2609-169640-0016-2422: THE STILLNESS THAT (REIGNED->RAINED) ON BOTH SIDES WAS LIKE THAT OF DEATH +2609-169640-0017-2423: THE JOHN BEHAVED BEAUTIFULLY (AND->HE) CAME (ROUND->AROUND) LIKE A TOP +2609-169640-0018-2424: THE QUESTION WAS NOW WHETHER WE COULD PASS (THEM->NO) OR NOT BEFORE THEY GOT NEAR ENOUGH TO (GRAPPLE->GRANTEL) +2609-169640-0019-2425: THE CAPTAIN BEHAVED (PERFECTLY->PERFECTUALLY) WELL IN (THIS->ITS) CRITICAL INSTANT COMMANDING A DEAD (SILENCE AND->SCIENCE IN) THE (CLOSEST ATTENTION->CITIZENS) TO HIS ORDERS +2609-169640-0020-2426: (NOT A SOUL->NOW SO) ON BOARD THE JOHN WAS HURT +2609-169640-0021-2427: (ON OUR SIDE->WHEN OURSAN) WE GAVE THE (GENTLEMEN->GENTLEMAN) THE FOUR (SIXES TWO AT->SIX TO OUT) THE (NEAREST->NEWS) AND TWO AT THE (STERN MOST PROA->STERNMOST PROVERB) WHICH WAS STILL NEAR A CABLE'S LENGTH (*->OF) DISTANT +2609-169640-0022-2428: THEY WERE (LIKE THE YELLS->NIGHTLY YEARS) OF FIENDS IN (ANGUISH->ENGLISH) +2609-169640-0023-2429: (I DOUBT->AND OUT) IF WE (TOUCHED A MAN->TOUCH THE MAIN) IN THE (NEAREST PROA->NURSE PRAYER) +2609-169640-0024-2430: (IN THIS->AND THAT) STATE THE SHIP PASSED AHEAD (ALL HER->ON FOR A) CANVAS (BEING FULL->BEEN FOR) LEAVING THE (PROA MOTIONLESS->PROW MUCH ENRICHED) IN HER WAKE +3005-163389-0000-1108: THEY SWARMED UP IN FRONT OF (SHERBURN'S->SHERBURNE'S) PALINGS AS THICK AS THEY COULD JAM TOGETHER AND YOU COULDN'T HEAR YOURSELF THINK FOR THE NOISE +3005-163389-0001-1109: SOME SUNG OUT TEAR DOWN THE FENCE TEAR DOWN THE FENCE +3005-163389-0002-1110: THE STILLNESS WAS AWFUL CREEPY AND UNCOMFORTABLE +3005-163389-0003-1111: SHERBURN RUN HIS EYE SLOW ALONG THE CROWD AND WHEREVER IT STRUCK THE PEOPLE TRIED A LITTLE TO (OUT GAZE->OUTGAZE) HIM BUT THEY COULDN'T THEY DROPPED THEIR EYES AND LOOKED SNEAKY +3005-163389-0004-1112: THE AVERAGE MAN'S A COWARD +3005-163389-0005-1113: BECAUSE THEY'RE AFRAID THE MAN'S FRIENDS WILL SHOOT THEM IN THE BACK IN THE (DARKAND->DARK AND) IT'S JUST WHAT THEY WOULD DO +3005-163389-0006-1114: SO THEY ALWAYS ACQUIT AND THEN A MAN GOES IN THE NIGHT WITH A HUNDRED (MASKED->MASSED) COWARDS AT HIS BACK AND LYNCHES THE RASCAL +3005-163389-0007-1115: YOU DIDN'T WANT TO COME +3005-163389-0008-1116: BUT A MOB WITHOUT ANY MAN AT THE HEAD OF IT IS BENEATH PITIFULNESS +3005-163389-0009-1117: NOW (LEAVE->LE) AND TAKE YOUR HALF A MAN WITH YOU TOSSING HIS GUN UP ACROSS HIS LEFT ARM AND COCKING IT WHEN HE SAYS THIS +3005-163389-0010-1118: THE CROWD WASHED BACK SUDDEN AND THEN BROKE ALL APART AND WENT TEARING OFF EVERY WHICH WAY AND BUCK (HARKNESS->HARKINS) HE (HEELED->HEALED) IT AFTER THEM LOOKING TOLERABLE CHEAP +3005-163389-0011-1119: (YOU->HE) CAN'T BE TOO CAREFUL +3005-163389-0012-1120: THEY ARGUED AND TRIED TO KEEP HIM OUT BUT HE WOULDN'T LISTEN AND (THE->A) WHOLE SHOW COME TO A (STANDSTILL->FAN STILL) +3005-163389-0013-1121: AND ONE OR TWO WOMEN (BEGUN->BEGAN) TO SCREAM +3005-163389-0014-1122: SO THEN (THE RINGMASTER->A RING MASTER) HE MADE A LITTLE SPEECH AND SAID HE HOPED THERE WOULDN'T BE NO DISTURBANCE AND IF THE MAN WOULD PROMISE HE WOULDN'T MAKE NO MORE TROUBLE HE WOULD LET HIM RIDE IF HE THOUGHT HE COULD STAY ON THE HORSE +3005-163389-0015-1123: IT WARN'T FUNNY TO ME THOUGH I WAS ALL OF A TREMBLE TO SEE HIS DANGER +3005-163389-0016-1124: AND (THE->A) HORSE A GOING LIKE A HOUSE AFIRE TOO +3005-163389-0017-1125: HE (SHED->SHARED) THEM SO THICK THEY KIND OF CLOGGED UP THE AIR AND ALTOGETHER HE SHED SEVENTEEN SUITS +3005-163389-0018-1126: WHY IT WAS ONE OF HIS OWN MEN +3005-163390-0000-1185: (ANDBUT->AND BUT) NEVER MIND THE REST OF HIS OUTFIT IT WAS JUST WILD BUT IT WAS AWFUL FUNNY +3005-163390-0001-1186: THE PEOPLE MOST KILLED THEMSELVES LAUGHING AND WHEN THE KING GOT DONE CAPERING AND CAPERED OFF BEHIND THE SCENES THEY ROARED AND CLAPPED AND STORMED AND (HAW HAWED->HAWHAT) TILL HE COME BACK AND DONE IT OVER AGAIN AND AFTER THAT THEY MADE HIM DO IT ANOTHER TIME +3005-163390-0002-1187: TWENTY PEOPLE (SINGS->SANGS) OUT +3005-163390-0003-1188: THE DUKE SAYS YES +3005-163390-0004-1189: EVERYBODY SINGS OUT SOLD +3005-163390-0005-1190: BUT A BIG FINE LOOKING MAN JUMPS UP ON A BENCH AND SHOUTS HOLD ON +3005-163390-0006-1191: JUST A WORD GENTLEMEN THEY STOPPED TO LISTEN +3005-163390-0007-1192: WHAT WE WANT IS TO GO OUT OF HERE QUIET AND TALK THIS SHOW UP AND SELL THE REST (OF->O) THE TOWN +3005-163390-0008-1193: (YOU BET->YE BADE) IT IS THE (JEDGE->JUDGE) IS RIGHT EVERYBODY SINGS OUT +3005-163390-0009-1194: WE STRUCK THE RAFT AT THE SAME TIME AND IN LESS THAN TWO SECONDS WE WAS GLIDING DOWN STREAM ALL DARK AND STILL AND EDGING TOWARDS THE MIDDLE OF THE RIVER NOBODY SAYING A WORD +3005-163390-0010-1195: WE NEVER SHOWED A LIGHT TILL WE WAS ABOUT TEN MILE BELOW THE VILLAGE +3005-163390-0011-1196: GREENHORNS (FLATHEADS->FLAT HEADS) +3005-163390-0012-1197: NO I (SAYS->SAY IS) IT DON'T +3005-163390-0013-1198: WELL IT DON'T BECAUSE IT'S IN (THE BREED->DE BREATHE) I RECKON THEY'RE ALL ALIKE +3005-163390-0014-1199: WELL THAT'S WHAT (I'M A->I MUST) SAYING ALL KINGS IS MOSTLY (RAPSCALLIONS->RATCALIONS) AS FUR AS I CAN MAKE OUT (IS DAT->HERE'S DAT'S) SO +3005-163390-0015-1200: AND LOOK AT CHARLES SECOND AND LOUIS FOURTEEN AND LOUIS FIFTEEN AND JAMES SECOND AND EDWARD SECOND AND RICHARD THIRD AND FORTY MORE BESIDES ALL THEM SAXON (HEPTARCHIES->HEPTARK IS) THAT USED TO RIP AROUND SO (IN->WHEN) OLD TIMES AND (RAISE CAIN->RAISED GAME) +3005-163390-0016-1201: MY YOU OUGHT TO (SEEN->SEE AN) OLD HENRY THE EIGHT WHEN HE WAS IN BLOOM HE WAS A BLOSSOM +3005-163390-0017-1202: RING UP FAIR (ROSAMUN->ROSAMOND) +3005-163390-0018-1203: WELL HENRY HE TAKES A NOTION HE WANTS TO (GET->GIT) UP SOME TROUBLE WITH THIS COUNTRY +3005-163390-0019-1204: S'POSE HE OPENED HIS (MOUTHWHAT->MOUTH WHAT) THEN +3005-163390-0020-1205: ALL I SAY IS KINGS IS KINGS (AND->AN) YOU GOT TO MAKE ALLOWANCES +3005-163390-0021-1206: TAKE THEM ALL AROUND THEY'RE A MIGHTY ORNERY LOT IT'S THE WAY THEY'RE RAISED +3005-163390-0022-1207: WELL THEY ALL DO JIM +3005-163390-0023-1208: NOW (DE DUKE->TO DO) HE'S A (TOLERBLE LIKELY->TOLERABLE LIKE THE) MAN IN SOME WAYS +3005-163390-0024-1209: THIS ONE'S A (MIDDLING->MIDDLIN) HARD LOT FOR A (DUKE->DUPE) +3005-163390-0025-1210: WHEN I WAKED UP (JUST->JEST) AT DAYBREAK HE WAS SITTING THERE WITH HIS HEAD DOWN BETWIXT HIS KNEES MOANING AND MOURNING TO HIMSELF +3005-163390-0026-1211: IT DON'T SEEM NATURAL BUT I RECKON IT'S SO +3005-163390-0027-1212: HE WAS OFTEN MOANING (AND->IN) MOURNING THAT WAY NIGHTS WHEN HE JUDGED I WAS ASLEEP AND SAYING PO LITTLE (LIZABETH->ELIZABETH) +3005-163390-0028-1213: (DOAN->DON'T) YOU HEAR ME (SHET->SHUT) DE DO +3005-163390-0029-1214: I LAY I MAKE YOU MINE +3005-163390-0030-1215: (JIS->GIT) AS LOUD AS I COULD YELL +3005-163391-0000-1127: WHICH WAS SOUND ENOUGH JUDGMENT BUT YOU TAKE THE AVERAGE MAN AND HE WOULDN'T WAIT FOR HIM TO HOWL +3005-163391-0001-1128: THE KING'S (DUDS->DERDS) WAS ALL BLACK AND HE DID LOOK REAL SWELL (AND->AN) STARCHY +3005-163391-0002-1129: WHY BEFORE HE LOOKED LIKE THE ORNERIEST OLD RIP THAT EVER WAS BUT NOW WHEN HE'D TAKE OFF HIS NEW WHITE BEAVER AND MAKE A BOW AND DO A SMILE HE LOOKED THAT GRAND AND GOOD AND PIOUS THAT YOU'D SAY (HE HAD->HE'D) WALKED RIGHT OUT OF THE ARK AND MAYBE WAS OLD (LEVITICUS->LUVIDICUS) HIMSELF +3005-163391-0003-1130: JIM CLEANED UP THE CANOE AND I GOT MY PADDLE READY +3005-163391-0004-1131: (WHER->WERE) YOU BOUND FOR YOUNG MAN +3005-163391-0005-1132: (GIT->GET) ABOARD SAYS THE KING +3005-163391-0006-1133: I DONE SO (AND->AN) THEN WE ALL THREE STARTED ON AGAIN +3005-163391-0007-1134: THE YOUNG CHAP WAS MIGHTY THANKFUL SAID IT WAS TOUGH WORK TOTING HIS BAGGAGE SUCH WEATHER +3005-163391-0008-1135: (HE ASKED->THE AIR) THE KING WHERE HE WAS GOING AND THE KING TOLD HIM HE'D COME DOWN (THE->A) RIVER AND LANDED AT THE OTHER VILLAGE THIS MORNING AND NOW HE WAS GOING UP A FEW (MILE->MILES) TO SEE AN OLD FRIEND ON A FARM UP THERE THE YOUNG FELLOW SAYS +3005-163391-0009-1136: BUT THEN I SAYS AGAIN NO I RECKON IT AIN'T HIM OR ELSE HE WOULDN'T BE (PADDLING->PADDLIN) UP THE RIVER YOU AIN'T HIM ARE YOU +3005-163391-0010-1137: NO MY NAME'S (BLODGETT ELEXANDER BLODGETT->OBLIGE IT ALEXANDER BLODGET) REVEREND (ELEXANDER BLODGETT->ALEXANDER BLODGET) I S'POSE I MUST SAY AS I'M ONE (O->OF) THE (LORD'S->LORDS) POOR SERVANTS +3005-163391-0011-1138: YOU SEE HE WAS PRETTY OLD AND (GEORGE'S G'YIRLS->GEORGE IS GUY EARLS) WAS TOO YOUNG TO BE MUCH COMPANY FOR HIM EXCEPT MARY JANE THE RED HEADED ONE AND SO HE WAS KINDER LONESOME AFTER GEORGE AND HIS WIFE DIED AND DIDN'T SEEM TO CARE MUCH TO LIVE +3005-163391-0012-1139: TOO BAD TOO BAD HE COULDN'T (A->HAVE) LIVED TO SEE HIS (BROTHERS->BROTHER'S) POOR SOUL +3005-163391-0013-1140: I'M (GOING->GOIN) IN A SHIP NEXT WEDNESDAY FOR (RYO JANEERO->RIO GENERO) WHERE MY UNCLE (LIVES->IS) +3005-163391-0014-1141: BUT IT'LL BE LOVELY (WISHT->WISHED) I WAS A (GOING->GOIN) +3005-163391-0015-1142: MARY JANE'S NINETEEN SUSAN'S FIFTEEN AND JOANNA'S ABOUT (FOURTEENTHAT'S->FOURTEEN THAT'S) THE ONE THAT GIVES HERSELF TO GOOD WORKS AND HAS A (HARE->HAIR) LIP POOR THINGS +3005-163391-0016-1143: WELL THEY COULD BE WORSE OFF +3005-163391-0017-1144: (OLD->O) PETER HAD FRIENDS AND THEY AIN'T GOING TO LET THEM COME TO NO HARM +3005-163391-0018-1145: BLAMED IF HE DIDN'T (INQUIRE->ACQUIRE) ABOUT EVERYBODY AND EVERYTHING (IN->AND) THAT BLESSED TOWN AND ALL ABOUT THE (WILKSES->WILKES) AND ABOUT PETER'S (BUSINESSWHICH->BUSINESS WHICH) WAS A TANNER AND ABOUT (GEORGE'SWHICH->GEORGE'S WHICH) WAS A CARPENTER AND ABOUT (HARVEY'SWHICH->HARVEST WHICH) WAS A DISSENTERING MINISTER AND SO ON AND SO ON THEN HE SAYS +3005-163391-0019-1146: WHEN (THEY'RE->HER) DEEP THEY WON'T STOP FOR A HAIL +3005-163391-0020-1147: WAS PETER (WILKS->WILKES) WELL OFF +3005-163391-0021-1148: WHEN (WE STRUCK->WASTED UP) THE BOAT SHE WAS ABOUT DONE LOADING AND PRETTY SOON SHE GOT OFF +3005-163391-0022-1149: NOW HUSTLE BACK RIGHT OFF AND FETCH THE DUKE UP HERE AND THE NEW CARPET BAGS +3005-163391-0023-1150: SO THEN THEY WAITED FOR A STEAMBOAT +3005-163391-0024-1151: (BUT->THAT) THE KING WAS (CA'M->CALM) HE SAYS +3005-163391-0025-1152: THEY (GIVE->GAVE) A GLANCE AT ONE ANOTHER AND NODDED THEIR HEADS AS MUCH AS TO SAY (WHAT D I->WOULD THEY) TELL YOU +3005-163391-0026-1153: THEN ONE OF THEM SAYS KIND OF SOFT AND GENTLE +3005-163399-0000-1154: PHELPS (WAS->IS) ONE OF THESE LITTLE ONE HORSE COTTON PLANTATIONS AND THEY ALL LOOK ALIKE +3005-163399-0001-1155: I WENT AROUND AND (CLUMB->CLIMB) OVER THE BACK STILE BY THE ASH HOPPER AND STARTED FOR THE KITCHEN +3005-163399-0002-1156: (I->AH) OUT WITH A (YES'M BEFORE->YES AND FORE) I THOUGHT +3005-163399-0003-1157: SO THEN SHE STARTED FOR THE HOUSE LEADING ME BY THE HAND AND THE CHILDREN TAGGING AFTER +3005-163399-0004-1158: WHEN WE GOT THERE SHE SET ME DOWN IN A SPLIT (BOTTOMED->BOTTOM) CHAIR AND SET HERSELF DOWN ON A LITTLE LOW STOOL IN FRONT OF ME HOLDING BOTH OF MY HANDS AND SAYS +3005-163399-0005-1159: WELL IT'S LUCKY BECAUSE SOMETIMES PEOPLE DO GET HURT +3005-163399-0006-1160: AND I THINK HE DIED AFTERWARDS HE WAS A BAPTIST +3005-163399-0007-1161: YES IT WAS (MORTIFICATIONTHAT->MORTIFICATION THAT) WAS IT +3005-163399-0008-1162: YOUR UNCLE'S BEEN UP TO THE TOWN EVERY DAY TO FETCH YOU +3005-163399-0009-1163: YOU MUST (A MET->AMERD) HIM ON THE ROAD DIDN'T YOU OLDISH MAN WITH A +3005-163399-0010-1164: WHY CHILD (IT LL->IT'LL) BE STOLE +3005-163399-0011-1165: IT WAS (KINDER->KIND OR) THIN (ICE->EYES) BUT I SAYS +3005-163399-0012-1166: I HAD MY MIND ON THE CHILDREN ALL THE TIME I WANTED TO GET THEM OUT TO ONE SIDE AND (PUMP->PUMPED) THEM A LITTLE AND FIND OUT WHO I WAS +3005-163399-0013-1167: (PRETTY->BERTIE) SOON SHE MADE THE COLD (CHILLS->CHILL) STREAK ALL DOWN MY BACK BECAUSE SHE SAYS +3005-163399-0014-1168: I SEE IT WARN'T A BIT OF USE TO TRY TO GO AHEAD I'D GOT TO THROW UP MY HAND +3005-163399-0015-1169: SO I SAYS TO MYSELF (HERE'S->HERE IS) ANOTHER PLACE WHERE I GOT TO (RESK->REST) THE TRUTH +3005-163399-0016-1170: I OPENED MY MOUTH TO BEGIN BUT SHE GRABBED ME AND HUSTLED ME IN BEHIND THE BED AND SAYS HERE HE COMES +3005-163399-0017-1171: CHILDREN DON'T YOU SAY A WORD +3005-163399-0018-1172: I SEE I WAS IN A FIX NOW +3005-163399-0019-1173: MISSUS PHELPS SHE (JUMPS->JUMPED) FOR HIM AND SAYS +3005-163399-0020-1174: HAS HE COME NO SAYS HER HUSBAND +3005-163399-0021-1175: I CAN'T IMAGINE SAYS THE OLD GENTLEMAN AND I MUST SAY IT MAKES ME DREADFUL UNEASY +3005-163399-0022-1176: UNEASY SHE SAYS I'M READY TO GO DISTRACTED +3005-163399-0023-1177: HE MUST (A->HAVE) COME AND YOU'VE MISSED HIM ALONG THE ROAD +3005-163399-0024-1178: OH DON'T DISTRESS ME ANY MORE'N I'M ALREADY DISTRESSED +3005-163399-0025-1179: WHY SILAS LOOK YONDER UP THE ROAD (AIN'T->HAIN'T) THAT SOMEBODY (COMING->COMIN) +3005-163399-0026-1180: THE OLD GENTLEMAN STARED AND SAYS +3005-163399-0027-1181: I HAIN'T NO IDEA WHO IS IT +3005-163399-0028-1182: (IT'S->IS) TOM SAWYER +3005-163399-0029-1183: BEING TOM SAWYER WAS EASY AND COMFORTABLE AND (IT STAYED->ITS STATE) EASY AND COMFORTABLE TILL BY AND BY I HEAR A STEAMBOAT COUGHING ALONG DOWN THE RIVER +3005-163399-0030-1184: THEN I SAYS TO MYSELF S'POSE TOM SAWYER COMES DOWN ON (THAT->MY) BOAT +3080-5032-0000-312: BUT I AM HUGELY PLEASED THAT YOU HAVE SEEN MY LADY +3080-5032-0001-313: I KNEW YOU COULD NOT CHOOSE BUT LIKE HER BUT YET LET ME TELL YOU YOU HAVE SEEN BUT THE WORST OF HER +3080-5032-0002-314: HER CONVERSATION HAS MORE CHARMS THAN CAN BE IN MERE BEAUTY AND (HER->A) HUMOUR AND DISPOSITION WOULD MAKE A DEFORMED PERSON APPEAR LOVELY +3080-5032-0003-315: WHY DID YOU NOT SEND ME THAT NEWS AND A GARLAND +3080-5032-0004-316: (WELL->WHY) THE BEST (ON'T->ON IT) IS (*->THAT) I HAVE A SQUIRE NOW THAT IS AS GOOD AS A KNIGHT +3080-5032-0005-317: IN EARNEST WE HAVE HAD SUCH A SKIRMISH (AND UPON->IN A POINT) SO FOOLISH AN OCCASION AS I CANNOT TELL WHICH IS (STRANGEST->STRANGERS) +3080-5032-0006-318: ALL THE PEOPLE THAT I HAD EVER IN MY LIFE REFUSED WERE BROUGHT AGAIN UPON THE STAGE LIKE RICHARD THE (THREE S->THIRD) GHOSTS TO REPROACH ME WITHAL (AND->IN) ALL THE KINDNESS HIS DISCOVERIES COULD MAKE I HAD FOR YOU WAS (LAID->LATE) TO MY CHARGE +3080-5032-0007-319: MY BEST QUALITIES IF I HAVE ANY THAT ARE GOOD SERVED BUT FOR AGGRAVATIONS OF MY FAULT AND I WAS ALLOWED TO HAVE WIT AND UNDERSTANDING AND DISCRETION IN OTHER THINGS THAT IT MIGHT APPEAR I HAD NONE IN THIS +3080-5032-0008-320: TIS A STRANGE CHANGE AND I AM VERY SORRY FOR IT BUT I'LL SWEAR I KNOW NOT HOW TO HELP IT +3080-5032-0009-321: MISTER FISH IS (THE->A) SQUIRE OF DAMES AND HAS SO MANY MISTRESSES (THAT->THAN) ANYBODY MAY PRETEND (A->TO) SHARE IN HIM AND BE BELIEVED BUT THOUGH I HAVE THE (HONOUR->HONOR) TO BE HIS NEAR NEIGHBOUR TO SPEAK FREELY I CANNOT BRAG MUCH THAT HE MAKES ANY COURT TO ME AND I KNOW NO YOUNG WOMAN IN THE COUNTRY THAT HE DOES NOT VISIT OFTEN +3080-5032-0010-322: I THINK MY YOUNGEST BROTHER COMES DOWN WITH HIM +3080-5032-0011-323: I CAN NO SOONER GIVE YOU SOME LITTLE HINTS (WHEREABOUTS->WHEREABOUT) THEY LIVE BUT YOU KNOW THEM PRESENTLY AND I MEANT YOU SHOULD BE BEHOLDING TO ME FOR YOUR ACQUAINTANCE +3080-5032-0012-324: BUT IT SEEMS THIS GENTLEMAN IS NOT SO EASY ACCESS BUT YOU MAY ACKNOWLEDGE SOMETHING DUE TO ME IF I INCLINE HIM TO LOOK GRACIOUSLY UPON YOU AND THEREFORE THERE IS NOT MUCH HARM DONE +3080-5032-0013-325: I HAVE MISSED FOUR FITS AND (*->HAVE) HAD BUT FIVE AND HAVE RECOVERED SO MUCH STRENGTH AS MADE ME VENTURE TO MEET YOUR LETTER ON WEDNESDAY A MILE FROM HOME +3080-5032-0014-326: BUT BESIDES I CAN GIVE YOU OTHERS +3080-5032-0015-327: I AM HERE MUCH MORE OUT OF PEOPLE'S WAY THAN IN TOWN WHERE MY (AUNT AND->AUNTS IN) SUCH (AS->HAS) PRETEND AN INTEREST IN ME AND A POWER OVER ME DO SO PERSECUTE ME (WITH THEIR->MY DEAR) GOOD NATURE (AND->YOU'LL) TAKE IT SO ILL THAT THEY ARE NOT ACCEPTED AS I WOULD LIVE IN A HOLLOW TREE TO AVOID THEM +3080-5032-0016-328: YOU WILL THINK HIM ALTERED AND IF IT BE POSSIBLE MORE MELANCHOLY THAN HE WAS +3080-5032-0017-329: IF MARRIAGE AGREES NO BETTER WITH OTHER PEOPLE THAN IT DOES WITH HIM I SHALL PRAY THAT ALL MY FRIENDS MAY (SCAPE->ESCAPE) IT +3080-5032-0018-330: WELL IN EARNEST IF I WERE A PRINCE THAT LADY SHOULD BE MY MISTRESS BUT I CAN GIVE NO RULE TO ANY ONE ELSE AND PERHAPS THOSE THAT ARE IN NO DANGER OF LOSING THEIR HEARTS TO HER MAY BE INFINITELY TAKEN WITH ONE I SHOULD NOT VALUE AT ALL FOR SO SAYS THE JUSTINIAN WISE PROVIDENCE HAS ORDAINED IT THAT BY THEIR DIFFERENT (HUMOURS->HUMANS) EVERYBODY MIGHT FIND SOMETHING TO PLEASE THEMSELVES WITHAL WITHOUT ENVYING THEIR NEIGHBOURS +3080-5032-0019-331: THE MATTER IS NOT (GREAT->GREEN) FOR I CONFESS I DO NATURALLY HATE THE NOISE AND TALK OF THE WORLD AND SHOULD BE BEST PLEASED NEVER TO BE KNOWN (IN'T->IN) UPON ANY OCCASION WHATSOEVER YET SINCE IT CAN NEVER BE WHOLLY AVOIDED ONE MUST SATISFY ONESELF BY DOING NOTHING THAT ONE NEED CARE WHO KNOWS +3080-5032-0020-332: IF I HAD A PICTURE THAT WERE FIT FOR YOU YOU SHOULD HAVE IT +3080-5032-0021-333: HOW CAN YOU TALK OF DEFYING FORTUNE NOBODY LIVES WITHOUT IT AND THEREFORE WHY SHOULD YOU IMAGINE YOU COULD +3080-5032-0022-334: I KNOW NOT HOW MY BROTHER COMES TO BE SO WELL INFORMED AS YOU SAY BUT I AM CERTAIN HE KNOWS THE UTMOST OF THE INJURIES YOU HAVE RECEIVED FROM HER +3080-5032-0023-335: WE HAVE HAD ANOTHER DEBATE BUT MUCH MORE CALMLY +3080-5032-0024-336: AND BESIDES THERE WAS A TIME WHEN WE OURSELVES WERE INDIFFERENT TO ONE ANOTHER DID I DO SO THEN OR HAVE I LEARNED IT SINCE +3080-5032-0025-337: I HAVE BEEN STUDYING HOW TOM (CHEEKE->CHEEK) MIGHT COME BY HIS INTELLIGENCE AND I (VERILY->VERY) BELIEVE HE HAS IT FROM MY COUSIN PETERS +3080-5032-0026-338: HOW KINDLY DO I TAKE (THESE->THE) CIVILITIES OF YOUR (FATHER'S->FATHERS) IN EARNEST YOU CANNOT IMAGINE HOW HIS LETTER PLEASED ME +3080-5040-0000-278: WOULD IT WOULD LEAVE ME AND THEN I COULD BELIEVE I SHALL NOT ALWAYS HAVE OCCASION FOR IT +3080-5040-0001-279: MY POOR LADY (VAVASOUR->VAVASOR) IS (CARRIED TO THE->CHARACTERED A) TOWER (AND->IN) HER GREAT BELLY COULD NOT EXCUSE HER BECAUSE SHE WAS ACQUAINTED BY SOMEBODY THAT THERE WAS A PLOT AGAINST THE PROTECTOR (AND->ANNE) DID NOT DISCOVER IT +3080-5040-0002-280: SHE HAS TOLD NOW ALL THAT WAS TOLD HER BUT VOWS SHE WILL NEVER SAY FROM WHENCE SHE HAD IT WE SHALL SEE WHETHER HER RESOLUTIONS ARE AS UNALTERABLE AS THOSE OF MY LADY (TALMASH->THOMMISH) +3080-5040-0003-281: I WONDER HOW SHE BEHAVED HERSELF WHEN SHE WAS MARRIED +3080-5040-0004-282: I NEVER SAW ANY ONE YET THAT DID NOT LOOK SIMPLY AND OUT OF COUNTENANCE NOR EVER KNEW A WEDDING WELL DESIGNED BUT ONE AND THAT WAS OF TWO PERSONS WHO (HAD->AT) TIME ENOUGH I CONFESS TO CONTRIVE IT AND NOBODY TO PLEASE (IN'T->IN) BUT THEMSELVES +3080-5040-0005-283: THE TRUTH IS I COULD NOT ENDURE TO BE MISSUS BRIDE IN A PUBLIC WEDDING TO BE MADE THE HAPPIEST PERSON ON EARTH +3080-5040-0006-284: DO NOT TAKE IT ILL FOR I WOULD ENDURE IT IF I COULD RATHER THAN FAIL BUT IN EARNEST I DO NOT THINK IT WERE POSSIBLE FOR ME +3080-5040-0007-285: YET IN EARNEST YOUR FATHER WILL NOT FIND MY BROTHER PEYTON WANTING IN CIVILITY THOUGH HE IS NOT A MAN OF MUCH COMPLIMENT UNLESS IT BE IN HIS (LETTERS->LETTER) TO ME (NOR->NO) AN UNREASONABLE PERSON IN ANYTHING SO HE WILL ALLOW HIM OUT OF HIS KINDNESS TO HIS WIFE TO SET A HIGHER VALUE UPON HER SISTER THAN SHE DESERVES +3080-5040-0008-286: MY AUNT TOLD ME NO LONGER (AGONE->GONE) THAN YESTERDAY THAT I WAS THE MOST WILFUL WOMAN THAT EVER SHE KNEW AND HAD AN OBSTINACY OF SPIRIT NOTHING COULD OVERCOME TAKE HEED +3080-5040-0009-287: YOU SEE I GIVE YOU FAIR WARNING +3080-5040-0010-288: BY THE NEXT I SHALL BE GONE INTO KENT AND MY OTHER JOURNEY IS LAID ASIDE WHICH I AM NOT DISPLEASED AT BECAUSE IT WOULD HAVE BROKEN OUR INTERCOURSE VERY MUCH +3080-5040-0011-289: HERE ARE SOME VERSES OF (COWLEY'S->CARLIS) TELL ME HOW YOU LIKE THEM +3080-5040-0012-290: I TOLD YOU IN MY LAST THAT MY SUFFOLK JOURNEY WAS LAID ASIDE AND THAT INTO KENT HASTENED +3080-5040-0013-291: IF I DROWN BY THE WAY THIS WILL BE MY LAST LETTER AND LIKE A WILL I BEQUEATH ALL MY KINDNESS TO YOU IN IT WITH A CHARGE NEVER TO BESTOW (IT->AT) ALL UPON ANOTHER MISTRESS LEST MY GHOST RISE AGAIN AND HAUNT YOU +3080-5040-0014-292: INDEED I LIKE HIM EXTREMELY AND HE IS COMMENDED TO ME BY PEOPLE THAT KNOW HIM VERY WELL AND ARE ABLE TO JUDGE FOR A MOST EXCELLENT SERVANT AND FAITHFUL AS POSSIBLE +3080-5040-0015-293: BECAUSE YOU FIND FAULT WITH MY OTHER LETTERS THIS IS LIKE TO BE SHORTER THAN THEY I DID NOT INTEND IT SO THOUGH I CAN ASSURE YOU +3080-5040-0016-294: I DO NOT FIND IT THOUGH I AM TOLD I WAS SO EXTREMELY WHEN I BELIEVED YOU LOVED ME +3080-5040-0017-295: BUT I AM CALLED UPON +3080-5040-0018-296: DIRECTED FOR YOUR MASTER +3080-5040-0019-297: I SEE YOU CAN (CHIDE->CHID) WHEN YOU PLEASE AND WITH AUTHORITY BUT I DESERVE IT I CONFESS AND ALL I CAN SAY FOR MYSELF IS THAT MY FAULT PROCEEDED FROM A VERY GOOD PRINCIPLE IN ME +3080-5040-0020-298: WE DARE NOT LET OUR TONGUES LIE MORE (ON->AND) ONE SIDE OF OUR (MOUTHS->MOTHS) THAN (T'OTHER->THE OTHER) FOR FEAR OF OVERTURNING IT +3080-5040-0021-299: YOU ARE SATISFIED I HOPE (ERE->IF) THIS THAT I (SCAPED->ESCAPED) DROWNING +3080-5040-0022-300: BUT I AM TROUBLED MUCH YOU SHOULD MAKE SO ILL A JOURNEY TO SO LITTLE PURPOSE INDEED I (WRIT->WRITE) BY THE FIRST POST AFTER MY ARRIVAL HERE AND CANNOT IMAGINE HOW YOU CAME TO MISS OF MY LETTERS +3080-5040-0023-301: (HOW->OH) WELCOME YOU WILL BE BUT ALAS +3080-5040-0024-302: FOR MY LIFE I CANNOT BEAT INTO THEIR HEADS A PASSION THAT MUST BE SUBJECT TO NO DECAY (AN->AND) EVEN PERFECT KINDNESS THAT MUST LAST PERPETUALLY WITHOUT THE LEAST INTERMISSION +3080-5040-0025-303: THEY LAUGH TO HEAR ME SAY THAT ONE UNKIND WORD WOULD DESTROY ALL THE SATISFACTION OF MY LIFE AND THAT I SHOULD EXPECT OUR KINDNESS SHOULD INCREASE EVERY DAY IF IT WERE POSSIBLE BUT NEVER LESSEN +3080-5040-0026-304: WE GO ABROAD ALL DAY AND PLAY ALL NIGHT AND SAY (OUR PRAYERS->I'LL PRAY AS) WHEN WE HAVE TIME +3080-5040-0027-305: (WELL->WHILE) IN SOBER EARNEST NOW I WOULD NOT LIVE THUS A (TWELVEMONTH->TWELVE MONTHS) TO GAIN ALL THAT (THE->*) KING HAS LOST UNLESS IT WERE TO GIVE IT HIM AGAIN +3080-5040-0028-306: WILL YOU BE SO GOOD NATURED +3080-5040-0029-307: HE HAS ONE SON AND TIS THE FINEST BOY THAT (E'ER->ERE) YOU SAW AND HAS A NOBLE SPIRIT BUT YET STANDS IN THAT AWE OF HIS FATHER THAT ONE WORD FROM HIM IS AS MUCH AS TWENTY WHIPPINGS +3080-5040-0030-308: YOU MUST GIVE ME LEAVE TO ENTERTAIN (YOU THUS->YOURSELVES) WITH DISCOURSES OF THE FAMILY FOR I CAN TELL YOU NOTHING ELSE FROM HENCE +3080-5040-0031-309: NOT TO KNOW WHEN YOU (WOULD->HAD) COME HOME I CAN ASSURE YOU (NOR->NO) FOR ANY OTHER OCCASION (OF->ON) MY OWN BUT WITH A COUSIN OF MINE THAT HAD LONG DESIGNED TO MAKE HERSELF SPORT WITH HIM AND DID NOT MISS OF HER AIM +3080-5040-0032-310: IN MY LIFE I NEVER HEARD SO RIDICULOUS A DISCOURSE AS HE MADE US AND NO OLD WOMAN WHO (PASSES->PAUSES) FOR A WITCH COULD HAVE BEEN MORE PUZZLED TO SEEK WHAT TO SAY TO REASONABLE PEOPLE THAN HE WAS +3080-5040-0033-311: EVER SINCE THIS ADVENTURE I HAVE HAD SO GREAT A BELIEF IN ALL THINGS OF THIS NATURE THAT I COULD NOT FORBEAR LAYING A (PEAS COD->PEASE COT) WITH NINE PEAS (IN'T->INTO) UNDER MY DOOR YESTERDAY (AND->IT) WAS INFORMED BY IT THAT MY HUSBAND'S NAME SHOULD BE THOMAS HOW DO YOU LIKE THAT +3331-159605-0000-695: SHE PULLED HER HAIR DOWN TURNED (HER SKIRT->HIS GOOD) BACK PUT HER FEET ON THE FENDER AND TOOK (PUTTEL->PATTERN) INTO HER LAP ALL OF WHICH ARRANGEMENTS SIGNIFIED THAT SOMETHING VERY IMPORTANT HAD GOT TO BE THOUGHT OVER AND SETTLED +3331-159605-0001-696: THE MORE PROPOSALS THE MORE CREDIT +3331-159605-0002-697: (I VE->I'VE) TRIED IT AND LIKED IT AND MAYBE THIS IS THE CONSEQUENCE OF THAT NIGHT'S FUN +3331-159605-0003-698: JUST SUPPOSE IT IS TRUE THAT HE DOES ASK ME AND I SAY YES +3331-159605-0004-699: WHAT A SPITEFUL THING I AM +3331-159605-0005-700: I COULD DO SO MUCH FOR ALL AT HOME HOW I SHOULD ENJOY THAT +3331-159605-0006-701: LET ME SEE HOW CAN I BEGIN +3331-159605-0007-702: HE HAS KNOWN HER ALL HER LIFE AND HAS A GOOD INFLUENCE OVER HER +3331-159605-0008-703: NOW AS POLLY WAS BY NO MEANS A PERFECT CREATURE I AM FREE TO CONFESS THAT THE OLD TEMPTATION ASSAILED HER MORE THAN ONCE (THAT->THE) WEEK FOR WHEN THE FIRST EXCITEMENT OF THE DODGING REFORM HAD SUBSIDED SHE MISSED THE PLEASANT LITTLE INTERVIEWS THAT USED TO PUT A CERTAIN (FLAVOR->FLAVOUR) OF ROMANCE INTO HER DULL HARD WORKING DAYS +3331-159605-0009-704: I DON'T THINK IT WAS HIS WEALTH (*->THE) ACCOMPLISHMENTS (OR POSITION->OPPOSITION) THAT MOST ATTRACTED POLLY THOUGH THESE DOUBTLESS POSSESSED A GREATER INFLUENCE THAN SHE SUSPECTED +3331-159605-0010-705: IT WAS THAT INDESCRIBABLE SOMETHING WHICH WOMEN ARE QUICK TO SEE AND FEEL IN MEN WHO HAVE BEEN BLESSED (WITH->THE) WISE AND GOOD MOTHERS +3331-159605-0011-706: THIS HAD AN ESPECIAL CHARM TO POLLY FOR SHE SOON FOUND THAT THIS (SIDE->SIGHT) OF HIS CHARACTER WAS NOT SHOWN TO EVERY ONE +3331-159605-0012-707: LATELY THIS HAD CHANGED ESPECIALLY TOWARDS POLLY AND IT (FLATTERED->FURTHER) HER MORE THAN SHE WOULD CONFESS EVEN TO HERSELF +3331-159605-0013-708: AT FIRST SHE TRIED TO THINK SHE COULD BUT UNFORTUNATELY HEARTS ARE SO CONTRARY THAT THEY WON'T BE OBEDIENT TO REASON WILL OR EVEN (GRATITUDE->CREDITUDE) +3331-159605-0014-709: POLLY FELT A VERY CORDIAL FRIENDSHIP FOR MISTER SYDNEY BUT NOT ONE PARTICLE OF THE (LOVE WHICH IS->LAW PITCHED) THE ONLY COIN IN WHICH LOVE CAN BE TRULY PAID +3331-159605-0015-710: THIS FINISHED POLLY'S INDECISION AND AFTER THAT NIGHT SHE NEVER ALLOWED HERSELF TO DWELL UPON THE PLEASANT TEMPTATION WHICH CAME IN A (GUISE->GUY'S) PARTICULARLY ATTRACTIVE TO A YOUNG GIRL (WITH A SPICE->BUT THE SPIES) OF THE OLD EVE (IN->AND) HER COMPOSITION +3331-159605-0016-711: WHEN (SATURDAY->SAID) CAME POLLY STARTED AS USUAL FOR A VISIT TO BECKY AND BESS BUT (COULD N'T->COULDN'T) RESIST STOPPING AT THE (SHAWS->SHORES) TO LEAVE A LITTLE PARCEL FOR FAN (THOUGH IT->THAT) WAS CALLING TIME +3331-159605-0017-712: A FOOLISH LITTLE SPEECH TO MAKE TO A (DOG->DARK) BUT YOU SEE POLLY WAS ONLY A TENDER HEARTED GIRL TRYING TO DO HER DUTY +3331-159605-0018-713: TAKE HOLD OF (MASTER CHARLEY'S->MASSR CHARLIE'S) HAND MISS (MAMIE->MAY) AND (WALK->BUCK) PRETTY LIKE (WILLY->BILLY) AND (FLOSSY->FLOSSIE) SAID THE (MAID->MATE) +3331-159605-0019-714: (AT->*) A (STREET->DISTRICT) CORNER A BLACK EYED (SCHOOL BOY->SCHOOLBOY) WAS PARTING FROM A ROSY FACED SCHOOL GIRL WHOSE MUSIC ROLL HE WAS RELUCTANTLY SURRENDERING +3331-159605-0020-715: HOW HE GOT THERE WAS NEVER VERY CLEAR TO POLLY BUT THERE HE WAS FLUSHED AND A LITTLE OUT OF BREATH BUT LOOKING SO GLAD TO SEE HER (THAT->TILL) SHE HAD (N'T->NOT) THE HEART TO BE STIFF AND COOL AS SHE HAD FULLY INTENDED TO BE WHEN THEY MET +3331-159605-0021-716: SHE REALLY COULD (N'T->NOT) HELP IT IT WAS SO PLEASANT TO SEE HIM AGAIN JUST WHEN SHE WAS FEELING SO LONELY +3331-159605-0022-717: THAT IS THE WAY I GET TO THE (ROTHS->WORSE) ANSWERED POLLY +3331-159605-0023-718: SHE DID NOT MEAN TO TELL BUT HIS FRANKNESS WAS (SO->TO) AGREEABLE SHE FORGOT HERSELF +3331-159605-0024-719: BUT I KNOW HER BETTER AND I ASSURE YOU THAT SHE (DOES IMPROVE->DOESN'T PROVE) SHE TRIES TO (MEND HER->MEAN TO) FAULTS THOUGH SHE WON'T OWN IT AND WILL SURPRISE YOU SOME DAY BY THE AMOUNT OF HEART AND SENSE AND GOODNESS SHE HAS GOT +3331-159605-0025-720: THANK YOU NO +3331-159605-0026-721: (HOW->HER) LOVELY THE PARK LOOKS SHE SAID IN GREAT CONFUSION +3331-159605-0027-722: ASKED THE ARTFUL YOUNG MAN LAYING A TRAP INTO WHICH POLLY IMMEDIATELY FELL +3331-159605-0028-723: HE WAS QUICKER TO TAKE A HINT THAN SHE HAD EXPECTED AND BEING BOTH PROUD AND GENEROUS (RESOLVED->WE SOFT) TO SETTLE THE MATTER AT ONCE FOR POLLY'S SAKE AS WELL AS HIS OWN +3331-159605-0029-724: SO WHEN SHE MADE HER LAST (BRILLIANT->BUOYANT) REMARK HE SAID QUIETLY WATCHING HER FACE KEENLY ALL THE WHILE I THOUGHT SO WELL (I M->I'M) GOING OUT OF TOWN ON BUSINESS FOR SEVERAL WEEKS SO YOU CAN ENJOY YOUR LITTLE BIT OF COUNTRY WITHOUT BEING ANNOYED BY ME (ANNOYED->ANNOY IT) +3331-159605-0030-725: SHE THOUGHT SHE HAD A GOOD DEAL OF THE COQUETTE IN HER AND (I VE->I'VE) NO DOUBT THAT WITH TIME AND TRAINING SHE WOULD HAVE BECOME A VERY DANGEROUS LITTLE PERSON BUT NOW SHE WAS FAR (TOO->TO) TRANSPARENT AND STRAIGHTFORWARD BY NATURE EVEN TO TELL A (WHITE LIE CLEVERLY->WIDE LIKE LEVILY) +3331-159605-0031-726: HE WAS GONE BEFORE SHE COULD DO ANYTHING BUT LOOK UP AT HIM WITH A REMORSEFUL FACE AND SHE WALKED ON FEELING THAT THE FIRST AND PERHAPS THE ONLY LOVER SHE WOULD EVER HAVE HAD READ HIS ANSWER AND ACCEPTED (IT->*) IN SILENCE +3331-159605-0032-727: POLLY DID NOT RETURN TO HER (FAVORITE->FAVOURITE) WALK TILL SHE LEARNED (FROM->FOR) MINNIE THAT UNCLE HAD REALLY LEFT TOWN AND THEN SHE FOUND THAT HIS FRIENDLY COMPANY AND CONVERSATION WAS WHAT HAD MADE THE WAY SO PLEASANT AFTER ALL +3331-159605-0033-728: (WAGGING->WORKING) TO AND FRO AS USUAL WHAT'S THE NEWS WITH YOU +3331-159605-0034-729: PERHAPS (SHE LL JILT->SHE'LL CHILLED) HIM +3331-159605-0035-730: UTTERLY DONE WITH AND LAID UPON THE SHELF +3331-159605-0036-731: (MINNIE->MANY) SAID THE OTHER DAY SHE WISHED SHE WAS A PIGEON SO SHE COULD PADDLE IN THE (PUDDLES->POTTLES) AND NOT FUSS ABOUT (RUBBERS->WRAPPERS) +3331-159605-0037-732: NOW DON'T BE AFFECTED POLLY BUT JUST TELL ME LIKE A DEAR HAS (N'T->NOT) HE PROPOSED +3331-159605-0038-733: DON'T YOU THINK HE MEANS TO +3331-159605-0039-734: TRULY (TRULY->JULIE) FAN +3331-159605-0040-735: I DON'T MEAN TO BE PRYING BUT I REALLY THOUGHT HE DID +3331-159605-0041-736: WELL I ALWAYS MEANT TO TRY IT IF I GOT A CHANCE AND I HAVE +3331-159605-0042-737: I JUST GAVE HIM A HINT AND HE TOOK IT +3331-159605-0043-738: HE MEANT TO GO AWAY BEFORE THAT SO DON'T THINK HIS HEART IS BROKEN (OR->OH) MIND WHAT (SILLY TATTLERS->DITTY TEDLER) SAY +3331-159605-0044-739: HE UNDERSTOOD AND BEING A GENTLEMAN MADE NO FUSS +3331-159605-0045-740: BUT POLLY IT WOULD HAVE BEEN A GRAND THING FOR YOU +3331-159605-0046-741: (I M ODD->I'M NOT) YOU KNOW (AND->I'M) PREFER TO BE AN INDEPENDENT SPINSTER AND TEACH MUSIC ALL MY DAYS +3331-159609-0000-742: NEVER MIND WHAT THE BUSINESS WAS IT (SUFFICES->SURFACES) TO SAY THAT IT WAS A GOOD BEGINNING FOR A YOUNG MAN LIKE TOM WHO HAVING BEEN BORN AND BRED IN THE MOST CONSERVATIVE CLASS OF THE MOST CONCEITED CITY IN NEW ENGLAND NEEDED JUST THE HEALTHY HEARTY SOCIAL INFLUENCES OF THE WEST TO WIDEN HIS VIEWS AND MAKE A MAN OF HIM +3331-159609-0001-743: FORTUNATELY EVERY ONE WAS SO BUSY WITH THE NECESSARY PREPARATIONS THAT THERE WAS NO TIME FOR (ROMANCE->ROMANS) OF ANY SORT AND THE FOUR YOUNG PEOPLE WORKED TOGETHER AS SOBERLY AND SENSIBLY AS IF ALL SORTS OF EMOTIONS WERE NOT (BOTTLED->BOTHERED) UP IN THEIR RESPECTIVE HEARTS +3331-159609-0002-744: PITY THAT THE END SHOULD COME SO SOON BUT THE HOUR DID ITS WORK AND (WENT->WHEN) ITS WAY LEAVING A CLEARER ATMOSPHERE BEHIND (THOUGH->THAN) THE YOUNG FOLKS DID NOT SEE IT THEN FOR THEIR EYES WERE DIM BECAUSE OF THE (PARTINGS THAT->PARTING STEP) MUST BE +3331-159609-0003-745: IF IT HAD NOT BEEN FOR TWO THINGS I FEAR SHE NEVER WOULD HAVE STOOD A SUMMER IN TOWN BUT SYDNEY OFTEN CALLED (TILL->TO) HIS VACATION CAME AND (A->THE) VOLUMINOUS CORRESPONDENCE WITH POLLY BEGUILED THE LONG DAYS +3331-159609-0004-746: (TOM WROTE ONCE->TUMBLED ONES) A WEEK TO HIS MOTHER BUT (THE LETTERS->THEY LET US) WERE SHORT AND NOT VERY SATISFACTORY FOR MEN NEVER DO TELL THE INTERESTING LITTLE THINGS THAT WOMEN BEST LIKE TO HEAR +3331-159609-0005-747: NO I (M->AM) ONLY TIRED HAD A GOOD DEAL TO DO LATELY AND THE (DULL->DOLL) WEATHER MAKES ME JUST A (TRIFLE->TRAVEL) BLUE +3331-159609-0006-748: FORGIVE ME POLLY BUT I CAN'T HELP SAYING IT FOR (IT IS->THIS) THERE AND I WANT TO BE AS TRUE TO YOU AS YOU WERE TO ME IF I CAN +3331-159609-0007-749: I (TRY->TRIED) NOT TO DECEIVE MYSELF BUT IT DOES SEEM AS IF THERE WAS A CHANCE OF HAPPINESS FOR ME +3331-159609-0008-750: THANK HEAVEN FOR THAT +3331-159609-0009-751: CRIED POLLY WITH THE HEARTIEST SATISFACTION IN HER VOICE +3331-159609-0010-752: POOR POLLY WAS SO TAKEN BY SURPRISE THAT SHE HAD NOT A WORD TO SAY +3331-159609-0011-753: NONE WERE NEEDED HER (TELLTALE->TELLS HER) FACE ANSWERED FOR HER AS WELL AS THE IMPULSE WHICH MADE HER HIDE HER HEAD IN THE (SOFA->SILVER) CUSHION LIKE A FOOLISH OSTRICH (WHEN->AND) THE (HUNTERS->HANDLES) ARE AFTER IT +3331-159609-0012-754: ONCE OR TWICE (BUT->THAT) SORT OF (JOKINGLY->CHOKINGLY) AND I THOUGHT IT WAS ONLY SOME LITTLE FLIRTATION +3331-159609-0013-755: IT WAS SO STUPID OF ME NOT TO GUESS BEFORE +3331-159609-0014-756: IT WAS SO TENDER EARNEST AND DEFIANT THAT FANNY FORGOT THE DEFENCE OF HER OWN LOVER (IN->AND) ADMIRATION OF POLLY'S LOYALTY TO HERS FOR THIS FAITHFUL ALL ABSORBING LOVE WAS A (NEW REVELATION->NEWER RELATION) TO FANNY WHO WAS USED TO HEARING HER FRIENDS BOAST OF TWO OR THREE LOVERS A YEAR AND CALCULATE THEIR RESPECTIVE VALUES WITH ALMOST AS MUCH COOLNESS AS THE YOUNG MEN DISCUSSED THE FORTUNES OF THE GIRLS THEY WISHED FOR BUT COULD NOT AFFORD TO MARRY +3331-159609-0015-757: I HOPE MARIA BAILEY IS (ALL HE->ONLY) THINKS HER SHE ADDED SOFTLY FOR I COULD (N'T->NOT) BEAR TO HAVE HIM DISAPPOINTED AGAIN +3331-159609-0016-758: SAID FANNY TURNING HOPEFUL ALL AT ONCE +3331-159609-0017-759: SUPPOSE (I->HER) SAY A WORD TO TOM JUST INQUIRE AFTER HIS HEART IN A GENERAL WAY YOU KNOW AND GIVE HIM A CHANCE TO TELL ME IF (THERE IS->THERE'S) ANYTHING TO TELL +3331-159609-0018-760: BEAR IT PEOPLE ALWAYS DO BEAR THINGS SOMEHOW ANSWERED POLLY LOOKING AS IF SENTENCE HAD BEEN PASSED UPON HER +3331-159609-0019-761: IT WAS A VERY DIFFERENT (WINTER->WINDOW) FROM THE LAST (FOR BOTH->ABOVE) THE GIRLS +3331-159609-0020-762: IF (FANNY->ANY) WANTED TO SHOW HIM WHAT SHE COULD DO TOWARD MAKING A PLEASANT HOME SHE CERTAINLY SUCCEEDED (BETTER THAN->BY THEN) SHE SUSPECTED FOR IN SPITE OF MANY FAILURES AND DISCOURAGEMENTS BEHIND THE SCENES THE LITTLE HOUSE BECAME A MOST ATTRACTIVE PLACE TO MISTER (SYDNEY->SIDNEY) AT LEAST FOR HE WAS MORE THE HOUSE FRIEND THAN EVER AND SEEMED DETERMINED TO PROVE THAT CHANGE OF FORTUNE MADE NO DIFFERENCE TO HIM +3331-159609-0021-763: SHE KEPT MUCH AT HOME (WHEN->IN) THE DAY'S WORK WAS DONE FINDING IT PLEASANTER TO SIT DREAMING (OVER->OF A) BOOK OR (SEWING->SOON) ALONE THAN TO EXERT HERSELF EVEN TO GO TO THE (SHAWS->SHORES) +3331-159609-0022-764: POLLY WAS NOT AT ALL LIKE HERSELF THAT (WINTER->WINDOW) AND THOSE NEAREST TO HER SAW AND (WONDERED->WANTED) AT IT MOST +3331-159609-0023-765: FOR NED WAS SO ABSORBED IN BUSINESS THAT HE IGNORED THE WHOLE (BAILEY->BAILIQUE) QUESTION AND LEFT THEM IN (UTTER->OTHER) DARKNESS +3331-159609-0024-766: (FANNY->THEN HE) CAME WALKING IN UPON HER ONE DAY LOOKING AS IF SHE (BROUGHT TIDINGS->POURED HIDINGS) OF SUCH GREAT JOY THAT SHE HARDLY KNEW HOW TO TELL THEM +3331-159609-0025-767: BUT IF WORK BASKETS WERE GIFTED WITH POWERS OF SPEECH THEY COULD TELL STORIES MORE TRUE AND TENDER THAN ANY WE READ +3528-168656-0000-864: SHE HAD EVEN BEEN IN SOCIETY BEFORE THE REVOLUTION +3528-168656-0001-865: IT WAS HER PLEASURE AND HER VANITY TO DRAG IN THESE NAMES ON EVERY PRETEXT +3528-168656-0002-866: EVERY YEAR SHE SOLEMNLY RENEWED HER VOWS AND AT THE MOMENT OF TAKING THE OATH SHE SAID TO THE PRIEST MONSEIGNEUR SAINT (FRANCOIS->FROSOIS) GAVE IT TO MONSEIGNEUR SAINT (JULIEN->JULIAN) MONSEIGNEUR SAINT (JULIEN->JULIAN) GAVE IT TO MONSEIGNEUR SAINT (EUSEBIUS MONSEIGNEUR->EUSIDIUS MONSIEUR) SAINT (EUSEBIUS->EUSIBIUS) GAVE IT TO MONSEIGNEUR SAINT PROCOPIUS ET CETERA ET CETERA +3528-168656-0003-867: AND THE (SCHOOL GIRLS->SCHOOLGIRLS) WOULD BEGIN TO LAUGH NOT IN THEIR SLEEVES BUT UNDER (THEIR->THE) VEILS CHARMING LITTLE STIFLED LAUGHS WHICH MADE THE VOCAL MOTHERS FROWN +3528-168656-0004-868: IT WAS A CENTURY WHICH SPOKE THROUGH HER BUT IT WAS THE EIGHTEENTH CENTURY +3528-168656-0005-869: THE RULE OF (FONTEVRAULT->FONTREVAL) DID NOT FORBID THIS +3528-168656-0006-870: SHE WOULD NOT SHOW (THIS OBJECT->THE SUBJECT) TO (ANYONE->ANY ONE) +3528-168656-0007-871: THUS IT FURNISHED A SUBJECT OF COMMENT FOR ALL THOSE WHO WERE (UNOCCUPIED->ON OCCUPIED) OR BORED IN THE CONVENT +3528-168656-0008-872: SOME (UNIQUE->EUIK) CHAPLET SOME AUTHENTIC RELIC +3528-168656-0009-873: THEY LOST THEMSELVES IN CONJECTURES +3528-168656-0010-874: WHEN THE POOR OLD WOMAN DIED THEY RUSHED TO HER CUPBOARD MORE HASTILY THAN WAS FITTING PERHAPS AND OPENED IT +3528-168656-0011-875: HE IS RESISTING FLUTTERING HIS TINY WINGS AND STILL MAKING AN EFFORT TO FLY BUT THE (DANCER IS->DANCERS) LAUGHING WITH A SATANICAL AIR +3528-168656-0012-876: MORAL LOVE CONQUERED BY THE COLIC +3528-168669-0000-877: THE PRIORESS RETURNED AND SEATED HERSELF ONCE MORE ON HER CHAIR +3528-168669-0001-878: WE WILL PRESENT A STENOGRAPHIC REPORT OF THE DIALOGUE WHICH THEN ENSUED TO THE BEST OF OUR ABILITY +3528-168669-0002-879: FATHER (FAUVENT->VUENT) +3528-168669-0003-880: REVEREND MOTHER DO YOU KNOW THE CHAPEL +3528-168669-0004-881: AND YOU HAVE BEEN IN THE CHOIR IN PURSUANCE OF YOUR DUTIES TWO OR THREE TIMES +3528-168669-0005-882: THERE IS A STONE TO BE RAISED HEAVY +3528-168669-0006-883: THE SLAB OF THE PAVEMENT WHICH IS AT THE (SIDE->THOUGHT) OF THE ALTAR +3528-168669-0007-884: THE (SLAB->FLAP) WHICH CLOSES THE VAULT YES +3528-168669-0008-885: IT WOULD BE A GOOD THING TO HAVE TWO MEN FOR IT +3528-168669-0009-886: A WOMAN IS NEVER A MAN +3528-168669-0010-887: BECAUSE (DOM MABILLON->DON MARVALAN) GIVES FOUR HUNDRED AND SEVENTEEN EPISTLES OF SAINT BERNARD WHILE (MERLONUS HORSTIUS->MERLINUS HORSES) ONLY GIVES THREE HUNDRED AND SIXTY SEVEN I DO NOT DESPISE (MERLONUS HORSTIUS->MERLINA'S HORSES) NEITHER DO I +3528-168669-0011-888: (MERIT->MARRIAGE) CONSISTS IN WORKING ACCORDING TO ONE'S STRENGTH A CLOISTER IS NOT A (DOCK YARD->DOCKYARD) +3528-168669-0012-889: AND A WOMAN IS NOT A MAN BUT MY BROTHER IS THE STRONG ONE THOUGH +3528-168669-0013-890: AND CAN YOU GET A (LEVER->LOVER) +3528-168669-0014-891: THERE IS A RING IN THE STONE +3528-168669-0015-892: I WILL PUT THE LEVER THROUGH IT +3528-168669-0016-893: THAT IS GOOD REVEREND MOTHER I WILL OPEN THE VAULT +3528-168669-0017-894: WILL THAT BE ALL NO +3528-168669-0018-895: GIVE ME YOUR ORDERS VERY REVEREND MOTHER +3528-168669-0019-896: (FAUVENT->FOR THAT) WE HAVE CONFIDENCE IN YOU +3528-168669-0020-897: I AM HERE TO DO ANYTHING YOU WISH +3528-168669-0021-898: AND TO HOLD YOUR PEACE ABOUT EVERYTHING YES (REVEREND->ROBIN) MOTHER +3528-168669-0022-899: WHEN THE (VAULT->WALL) IS OPEN I WILL CLOSE IT AGAIN +3528-168669-0023-900: BUT BEFORE THAT WHAT REVEREND MOTHER +3528-168669-0024-901: FATHER (FAUVENT->FERVENT) REVEREND MOTHER +3528-168669-0025-902: YOU KNOW THAT A MOTHER DIED THIS MORNING +3528-168669-0026-903: NO DID YOU NOT HEAR THE BELL +3528-168669-0027-904: NOTHING CAN BE HEARD AT THE BOTTOM OF THE GARDEN REALLY +3528-168669-0028-905: AND THEN THE WIND (IS->DOES) NOT BLOWING IN MY DIRECTION THIS MORNING +3528-168669-0029-906: IT WAS MOTHER CRUCIFIXION +3528-168669-0030-907: THREE YEARS AGO MADAME DE (BETHUNE->BESOON) A (JANSENIST->GENTLEST) TURNED ORTHODOX MERELY FROM HAVING SEEN MOTHER CRUCIFIXION AT PRAYER AH +3528-168669-0031-908: THE MOTHERS HAVE TAKEN HER TO THE DEAD ROOM WHICH OPENS ON THE CHURCH I KNOW +3528-168669-0032-909: A FINE SIGHT IT WOULD BE TO SEE A MAN ENTER THE DEAD ROOM MORE OFTEN +3528-168669-0033-910: HEY MORE OFTEN +3528-168669-0034-911: WHAT DO YOU SAY +3528-168669-0035-912: I SAY MORE OFTEN MORE OFTEN THAN WHAT +3528-168669-0036-913: REVEREND MOTHER I DID NOT SAY MORE OFTEN THAN WHAT I SAID MORE OFTEN +3528-168669-0037-914: BUT I DID NOT SAY MORE OFTEN +3528-168669-0038-915: AT THAT MOMENT NINE O'CLOCK STRUCK +3528-168669-0039-916: AT NINE O'CLOCK IN THE MORNING AND AT ALL HOURS PRAISED AND ADORED (*->TO) BE THE MOST HOLY SACRAMENT OF THE ALTAR SAID THE (PRIORESS->PIRATES) +3528-168669-0040-917: IT CUT MORE OFTEN SHORT +3528-168669-0041-918: FAUCHELEVENT MOPPED HIS FOREHEAD +3528-168669-0042-919: IN HER LIFETIME MOTHER CRUCIFIXION MADE CONVERTS AFTER HER DEATH SHE WILL PERFORM MIRACLES SHE WILL +3528-168669-0043-920: FATHER (FAUVENT->FUVENT) THE COMMUNITY HAS BEEN BLESSED IN MOTHER CRUCIFIXION +3528-168669-0044-921: SHE RETAINED HER CONSCIOUSNESS TO THE VERY LAST MOMENT +3528-168669-0045-922: SHE GAVE US HER LAST COMMANDS +3528-168669-0046-923: IF YOU HAD A LITTLE MORE FAITH AND IF YOU COULD HAVE BEEN IN (HER CELL->HERSELF) SHE WOULD HAVE CURED YOUR LEG MERELY BY TOUCHING IT SHE SMILED +3528-168669-0047-924: THERE WAS SOMETHING OF PARADISE IN THAT DEATH +3528-168669-0048-925: FAUCHELEVENT THOUGHT THAT IT WAS AN ORISON WHICH SHE WAS FINISHING +3528-168669-0049-926: FAUCHELEVENT HELD HIS PEACE SHE WENT ON +3528-168669-0050-927: I HAVE CONSULTED UPON THIS POINT MANY ECCLESIASTICS LABORING IN OUR LORD WHO OCCUPY THEMSELVES IN THE EXERCISES OF THE CLERICAL LIFE AND WHO BEAR WONDERFUL FRUIT +3528-168669-0051-928: FORTUNATELY THE (PRIORESS->PIRASS) COMPLETELY ABSORBED IN HER OWN THOUGHTS DID NOT HEAR IT +3528-168669-0052-929: SHE CONTINUED FATHER (FAUVENT->PROVENCE) +3528-168669-0053-930: YES REVEREND MOTHER +3528-168669-0054-931: SAINT TERENTIUS BISHOP OF PORT WHERE THE MOUTH OF THE TIBER EMPTIES INTO THE SEA REQUESTED THAT ON HIS (TOMB->TWO) MIGHT BE ENGRAVED THE SIGN WHICH WAS PLACED ON THE GRAVES OF (PARRICIDES->PARASITES) IN THE HOPE THAT PASSERS BY WOULD SPIT ON HIS TOMB THIS WAS DONE +3528-168669-0055-932: THE DEAD MUST BE OBEYED SO BE IT +3528-168669-0056-933: FOR THAT MATTER NO REVEREND MOTHER +3528-168669-0057-934: FATHER (FAUVENT->VENT) MOTHER CRUCIFIXION WILL BE INTERRED IN THE COFFIN IN WHICH SHE HAS SLEPT FOR THE LAST TWENTY YEARS THAT IS JUST +3528-168669-0058-935: IT IS A CONTINUATION OF HER SLUMBER +3528-168669-0059-936: SO I SHALL HAVE TO NAIL UP THAT COFFIN YES +3528-168669-0060-937: I AM AT THE ORDERS OF THE VERY REVEREND (COMMUNITY->CUNITY) +3528-168669-0061-938: THE (FOUR MOTHER PRECENTORS->FOREMOTHER PRESENTERS) WILL ASSIST YOU +3528-168669-0062-939: NO IN LOWERING THE COFFIN +3528-168669-0063-940: WHERE INTO THE VAULT +3528-168669-0064-941: FAUCHELEVENT STARTED THE VAULT UNDER THE ALTAR +3528-168669-0065-942: UNDER THE ALTAR BUT +3528-168669-0066-943: YOU WILL HAVE AN IRON BAR YES BUT +3528-168669-0067-944: YOU WILL RAISE THE STONE WITH THE BAR BY MEANS OF THE RING BUT +3528-168669-0068-945: THE DEAD MUST BE OBEYED TO BE BURIED IN THE VAULT UNDER THE ALTAR OF THE CHAPEL NOT TO GO TO PROFANE EARTH TO REMAIN THERE IN DEATH WHERE SHE PRAYED WHILE LIVING SUCH WAS THE LAST WISH OF MOTHER CRUCIFIXION +3528-168669-0069-946: SHE ASKED IT OF US THAT IS TO SAY COMMANDED US +3528-168669-0070-947: BUT IT IS FORBIDDEN +3528-168669-0071-948: OH I AM A STONE IN YOUR WALLS +3528-168669-0072-949: THINK FATHER (FAUVENT->*) IF SHE WERE TO WORK MIRACLES HERE +3528-168669-0073-950: WHAT A GLORY OF GOD FOR THE COMMUNITY AND MIRACLES ISSUE FROM TOMBS +3528-168669-0074-951: BUT REVEREND MOTHER IF THE AGENT OF THE SANITARY COMMISSION +3528-168669-0075-952: BUT THE COMMISSARY OF POLICE +3528-168669-0076-953: (CHONODEMAIRE->CHATEAU DE MER) ONE OF THE SEVEN GERMAN KINGS WHO ENTERED AMONG THE (GAULS->GULFS) UNDER THE EMPIRE OF CONSTANTIUS EXPRESSLY RECOGNIZED THE RIGHT OF NUNS TO BE BURIED IN RELIGION THAT IS TO SAY BENEATH THE ALTAR +3528-168669-0077-954: THE WORLD IS NOTHING IN THE PRESENCE OF THE CROSS +3528-168669-0078-955: MARTIN THE ELEVENTH GENERAL OF THE CARTHUSIANS GAVE TO HIS ORDER THIS DEVICE STAT (CRUX DUM VOLVITUR ORBIS->CREW DOOM VOLVETER ORBUS) +3528-168669-0079-956: THE (PRIORESS->PYRIUS) WHO WAS USUALLY SUBJECTED TO THE BARRIER OF SILENCE AND WHOSE RESERVOIR WAS (OVERFULL->OVER FULL) ROSE AND EXCLAIMED WITH THE (LOQUACITY->LEQUESTITY) OF A DAM WHICH HAS BROKEN AWAY +3528-168669-0080-957: I HAVE ON MY RIGHT (BENOIT->BENOIS) AND ON MY LEFT BERNARD WHO WAS BERNARD +3528-168669-0081-958: THE FIRST ABBOT OF (CLAIRVAUX->CLERVAL) +3528-168669-0082-959: HIS ORDER HAS PRODUCED FORTY POPES TWO HUNDRED CARDINALS FIFTY PATRIARCHS SIXTEEN HUNDRED ARCHBISHOPS FOUR THOUSAND SIX HUNDRED BISHOPS FOUR EMPERORS TWELVE EMPRESSES FORTY SIX KINGS FORTY ONE QUEENS THREE THOUSAND SIX HUNDRED CANONIZED SAINTS AND HAS BEEN IN EXISTENCE FOR FOURTEEN HUNDRED YEARS +3528-168669-0083-960: ON ONE SIDE SAINT BERNARD ON THE OTHER THE AGENT OF THE (SANITARY->SENATORY) DEPARTMENT +3528-168669-0084-961: GOD SUBORDINATED TO THE (COMMISSARY->COMMISSORY) OF POLICE SUCH (IS->WAS) THE AGE SILENCE (FAUVENT->FAVAN) +3528-168669-0085-962: NO ONE DOUBTS THE RIGHT OF THE MONASTERY (TO->CHOOSE) SEPULTURE +3528-168669-0086-963: ONLY FANATICS AND THOSE IN ERROR DENY IT +3528-168669-0087-964: WE LIVE IN TIMES OF TERRIBLE CONFUSION +3528-168669-0088-965: WE ARE IGNORANT AND IMPIOUS +3528-168669-0089-966: AND THEN RELIGION IS ATTACKED WHY +3528-168669-0090-967: BECAUSE THERE HAVE BEEN BAD PRIESTS BECAUSE (SAGITTAIRE->SAGATURE) BISHOP OF GAP WAS THE BROTHER OF (SALONE->SALON) BISHOP OF (EMBRUN->EMBRON) AND BECAUSE BOTH OF THEM FOLLOWED (MOMMOL->MAMMA) +3528-168669-0091-968: THEY PERSECUTE THE SAINTS +3528-168669-0092-969: THEY SHUT THEIR EYES TO THE TRUTH DARKNESS IS THE RULE +3528-168669-0093-970: THE MOST FEROCIOUS BEASTS ARE BEASTS WHICH ARE BLIND +3528-168669-0094-971: OH HOW WICKED PEOPLE ARE +3528-168669-0095-972: BY ORDER OF THE KING SIGNIFIES TO DAY BY ORDER OF THE REVOLUTION +3528-168669-0096-973: ONE NO LONGER KNOWS WHAT IS DUE TO THE LIVING OR TO THE DEAD A HOLY DEATH IS PROHIBITED +3528-168669-0097-974: (GAUTHIER->GATHIER) BISHOP OF (CHALONS->CALON) HELD HIS OWN IN THIS MATTER AGAINST OTHO DUKE OF BURGUNDY +3528-168669-0098-975: THE (PRIORESS->PRIORS) TOOK BREATH THEN TURNED TO FAUCHELEVENT +3528-168669-0099-976: YOU WILL CLOSE THE COFFIN THE SISTERS WILL CARRY IT TO THE CHAPEL +3528-168669-0100-977: THE OFFICE FOR THE DEAD WILL THEN BE SAID +3528-168669-0101-978: BUT SHE WILL HEAR SHE WILL NOT LISTEN +3528-168669-0102-979: BESIDES WHAT THE CLOISTER KNOWS THE WORLD LEARNS NOT +3528-168669-0103-980: A PAUSE (ENSUED->ENSUIT) +3528-168669-0104-981: YOU WILL REMOVE YOUR (BELL->BELT) +3528-168669-0105-982: HAS THE DOCTOR FOR THE DEAD PAID HIS VISIT +3528-168669-0106-983: HE WILL PAY IT AT FOUR O'CLOCK TO DAY +3528-168669-0107-984: THE PEAL WHICH ORDERS THE DOCTOR FOR THE DEAD TO BE SUMMONED HAS ALREADY BEEN RUNG +3528-168669-0108-985: BUT YOU DO NOT UNDERSTAND ANY OF THE PEALS +3528-168669-0109-986: THAT IS WELL FATHER (FAUVENT->VENT) +3528-168669-0110-987: WHERE WILL YOU OBTAIN IT +3528-168669-0111-988: I HAVE MY HEAP OF OLD IRON AT THE BOTTOM OF THE GARDEN +3528-168669-0112-989: REVEREND MOTHER WHAT +3528-168669-0113-990: IF YOU WERE EVER TO HAVE ANY OTHER JOBS OF THIS SORT MY BROTHER IS THE STRONG MAN FOR YOU A PERFECT TURK +3528-168669-0114-991: YOU WILL DO IT AS SPEEDILY AS POSSIBLE +3528-168669-0115-992: I CANNOT WORK VERY FAST I AM INFIRM THAT IS WHY I REQUIRE AN ASSISTANT I LIMP +3528-168669-0116-993: EVERYTHING MUST HAVE BEEN COMPLETED A GOOD QUARTER OF AN HOUR BEFORE THAT +3528-168669-0117-994: I WILL DO ANYTHING TO PROVE MY ZEAL TOWARDS THE COMMUNITY THESE ARE MY ORDERS I AM TO NAIL UP THE COFFIN +3528-168669-0118-995: AT ELEVEN O'CLOCK EXACTLY I AM TO BE IN THE CHAPEL +3528-168669-0119-996: MOTHER ASCENSION WILL BE THERE TWO MEN WOULD BE BETTER +3528-168669-0120-997: HOWEVER NEVER MIND I SHALL HAVE MY (LEVER->LOVER) +3528-168669-0121-998: AFTER WHICH THERE WILL BE NO TRACE OF ANYTHING +3528-168669-0122-999: THE GOVERNMENT WILL HAVE NO SUSPICION +3528-168669-0123-1000: THE EMPTY COFFIN REMAINS THIS PRODUCED A PAUSE +3528-168669-0124-1001: WHAT IS TO BE DONE WITH THAT COFFIN FATHER (FAUVENT->VENT) +3528-168669-0125-1002: IT WILL BE GIVEN TO THE EARTH EMPTY +3528-168669-0126-1003: AH (THE DE->LEDA) EXCLAIMED FAUCHELEVENT +3528-168669-0127-1004: THE (VIL->VILLE) STUCK FAST IN HIS THROAT +3528-168669-0128-1005: HE MADE HASTE TO IMPROVISE AN EXPEDIENT TO MAKE HER FORGET THE OATH +3528-168669-0129-1006: I WILL PUT EARTH IN THE COFFIN REVEREND MOTHER THAT WILL PRODUCE THE EFFECT OF A CORPSE +3528-168669-0130-1007: I WILL MAKE THAT MY SPECIAL BUSINESS +3538-142836-0000-1567: GENERAL OBSERVATIONS ON PRESERVES (CONFECTIONARY->CONFECTIONERY) ICES AND DESSERT DISHES +3538-142836-0001-1568: THE EXPENSE OF PRESERVING THEM WITH SUGAR IS A SERIOUS OBJECTION FOR EXCEPT THE SUGAR IS USED IN CONSIDERABLE (QUANTITIES->QUALITIES) THE SUCCESS IS VERY UNCERTAIN +3538-142836-0002-1569: FRUIT GATHERED IN WET OR FOGGY WEATHER WILL SOON BE (MILDEWED->MELTED) AND BE OF NO SERVICE FOR PRESERVES +3538-142836-0003-1570: BUT TO DISTINGUISH THESE PROPERLY REQUIRES VERY GREAT ATTENTION AND CONSIDERABLE EXPERIENCE +3538-142836-0004-1571: IF YOU DIP THE FINGER INTO THE (SYRUP->SERF) AND APPLY IT TO THE THUMB THE TENACITY OF THE (SYRUP->SERF) WILL ON SEPARATING THE FINGER AND THUMB AFFORD A THREAD WHICH SHORTLY BREAKS THIS IS THE LITTLE THREAD +3538-142836-0005-1572: LET IT BOIL UP AGAIN THEN TAKE IT OFF AND REMOVE CAREFULLY THE SCUM THAT HAS RISEN +3538-142836-0006-1573: IT IS CONSIDERED TO BE SUFFICIENTLY BOILED WHEN SOME TAKEN UP IN A SPOON POURS OUT LIKE OIL +3538-142836-0007-1574: BEFORE SUGAR WAS IN USE HONEY WAS EMPLOYED TO (PRESERVE->PRESENT) MANY VEGETABLE PRODUCTIONS THOUGH THIS SUBSTANCE (HAS->IS) NOW GIVEN WAY TO THE JUICE OF THE SUGAR CANE +3538-142836-0008-1575: FOURTEEN NINETY NINE +3538-142836-0009-1576: BOIL THEM UP THREE DAYS SUCCESSIVELY SKIMMING EACH TIME AND THEY WILL THEN BE FINISHED AND IN A STATE FIT TO BE PUT INTO POTS FOR USE +3538-142836-0010-1577: THE REASON WHY THE FRUIT IS EMPTIED OUT OF THE PRESERVING PAN INTO (AN->OUR) EARTHEN PAN IS THAT THE ACID OF THE FRUIT ACTS UPON THE COPPER OF WHICH THE PRESERVING PANS ARE USUALLY MADE +3538-142836-0011-1578: FROM THIS EXAMPLE THE PROCESS OF PRESERVING FRUITS BY SYRUP (WILL->WOULD) BE EASILY COMPREHENDED +3538-142836-0012-1579: THEY SHOULD BE DRIED IN THE STOVE OR OVEN ON A (SIEVE->SEA) AND TURNED EVERY SIX OR EIGHT HOURS FRESH POWDERED SUGAR BEING SIFTED OVER THEM EVERY TIME THEY (ARE TURNED->RETURNED) +3538-142836-0013-1580: IN THIS WAY IT IS ALSO THAT ORANGE AND (LEMON CHIPS->LINENSHIPS) ARE PRESERVED +3538-142836-0014-1581: MARMALADES JAMS AND FRUIT (PASTES->PACE) ARE OF THE SAME NATURE AND ARE NOW IN VERY GENERAL (REQUEST->QUEST) +3538-142836-0015-1582: (MARMALADES->MARMAL ETS) AND JAMS DIFFER LITTLE FROM EACH OTHER (THEY ARE->THEIR) PRESERVES OF (A->*) HALF LIQUID CONSISTENCY MADE BY BOILING THE PULP OF FRUITS AND SOMETIMES PART OF THE RINDS WITH SUGAR +3538-142836-0016-1583: THAT THEY MAY KEEP IT IS NECESSARY NOT TO BE SPARING OF SUGAR FIFTEEN O THREE +3538-142836-0017-1584: IN ALL THE OPERATIONS FOR PRESERVE MAKING WHEN THE PRESERVING PAN IS USED IT SHOULD NOT BE PLACED ON THE FIRE BUT ON A (TRIVET->TRIBUT) UNLESS THE JAM IS MADE ON A HOT PLATE WHEN THIS IS NOT NECESSARY +3538-142836-0018-1585: (CONFECTIONARY->CONFECTIONERY) FIFTEEN O EIGHT +3538-142836-0019-1586: IN SPEAKING OF (CONFECTIONARY IT->CONFECTIONERIES) SHOULD BE REMARKED THAT ALL THE VARIOUS PREPARATIONS ABOVE NAMED COME STRICTLY SPEAKING UNDER THAT HEAD FOR THE VARIOUS FRUITS FLOWERS HERBS (ROOTS->ROOFS) AND JUICES WHICH (WHEN->ONE) BOILED WITH SUGAR WERE FORMERLY EMPLOYED IN PHARMACY AS WELL AS FOR SWEETMEATS WERE CALLED CONFECTIONS FROM THE LATIN WORD (CONFICERE->CONFERS) TO MAKE UP BUT THE TERM CONFECTIONARY EMBRACES A VERY LARGE CLASS INDEED OF SWEET FOOD MANY KINDS OF WHICH SHOULD NOT BE ATTEMPTED IN THE ORDINARY (CUISINE->COUISINE) +3538-142836-0020-1587: THE THOUSAND AND ONE ORNAMENTAL DISHES THAT ADORN THE TABLES OF THE WEALTHY SHOULD BE PURCHASED FROM THE CONFECTIONER THEY CANNOT PROFITABLY BE MADE AT HOME +3538-142836-0021-1588: HOWEVER AS LATE AS THE (REIGNS->REIGN) OF OUR TWO LAST GEORGES FABULOUS SUMS WERE OFTEN EXPENDED UPON FANCIFUL (DESSERTS->DESERTS) +3538-142836-0022-1589: THE SHAPE OF THE DISHES VARIES AT DIFFERENT PERIODS THE PREVAILING FASHION AT PRESENT BEING OVAL AND CIRCULAR DISHES ON STEMS +3538-142836-0023-1590: (ICES->ISIS) +3538-142836-0024-1591: (AT->A) DESSERTS OR AT SOME EVENING PARTIES (ICES->IISES) ARE SCARCELY TO BE DISPENSED WITH +3538-142836-0025-1592: THE (SPADDLE->SPADEL) IS GENERALLY MADE OF COPPER KEPT BRIGHT AND CLEAN +3538-142836-0026-1593: THEY SHOULD BE TAKEN IMMEDIATELY AFTER THE REPAST OR SOME HOURS AFTER BECAUSE THE TAKING (*->OF) THESE SUBSTANCES DURING THE PROCESS OF DIGESTION IS APT TO PROVOKE INDISPOSITION +3538-163619-0000-1500: THERE WAS ONCE (ON A->TILL THE) TIME A WIDOWER WHO HAD A SON AND A DAUGHTER BY HIS FIRST (WIFE->WI) +3538-163619-0001-1501: FROM THE VERY DAY THAT THE NEW WIFE CAME INTO THE HOUSE THERE WAS NO PEACE FOR THE MAN'S CHILDREN AND NOT A CORNER TO BE FOUND WHERE THEY COULD GET ANY REST SO THE BOY THOUGHT THAT THE BEST THING HE COULD DO WAS TO GO OUT INTO THE WORLD AND TRY TO EARN HIS OWN BREAD +3538-163619-0002-1502: BUT HIS SISTER WHO WAS STILL AT HOME FARED WORSE AND WORSE +3538-163619-0003-1503: KISS ME (GIRL->GO) SAID THE HEAD +3538-163619-0004-1504: WHEN THE KING ENTERED AND SAW IT HE STOOD STILL AS IF HE WERE IN FETTERS AND COULD NOT STIR FROM THE SPOT FOR THE PICTURE SEEMED TO HIM SO BEAUTIFUL +3538-163619-0005-1505: (THE YOUTH->THESE) PROMISED TO MAKE ALL THE HASTE HE COULD AND SET FORTH FROM THE KING'S PALACE +3538-163619-0006-1506: AT LAST THEY CAME IN SIGHT OF LAND +3538-163619-0007-1507: WELL IF MY BROTHER SAYS SO I MUST DO IT SAID THE MAN'S DAUGHTER AND SHE FLUNG HER CASKET INTO THE SEA +3538-163619-0008-1508: WHAT IS MY BROTHER SAYING ASKED HIS SISTER AGAIN +3538-163619-0009-1509: ON THE FIRST THURSDAY NIGHT AFTER THIS A BEAUTIFUL MAIDEN CAME INTO THE KITCHEN OF THE PALACE AND BEGGED THE KITCHEN MAID WHO SLEPT THERE TO LEND HER A BRUSH +3538-163619-0010-1510: SHE BEGGED VERY PRETTILY AND GOT IT AND THEN SHE BRUSHED HER HAIR AND THE GOLD DROPPED FROM IT +3538-163619-0011-1511: OUT ON THEE UGLY BUSHY BRIDE SLEEPING SO SOFT BY THE YOUNG KING'S SIDE ON SAND AND STONES MY BED I MAKE AND MY (BROTHER->BROTHERS) SLEEPS WITH THE COLD SNAKE UNPITIED AND UNWEPT +3538-163619-0012-1512: I SHALL COME TWICE MORE AND THEN NEVER AGAIN SAID SHE +3538-163619-0013-1513: THIS TIME ALSO AS BEFORE SHE BORROWED A BRUSH AND BRUSHED HER HAIR WITH IT AND THE GOLD DROPPED DOWN AS SHE DID IT AND AGAIN SHE SENT THE DOG OUT THREE TIMES AND WHEN (DAY->THEY) DAWNED SHE DEPARTED BUT AS SHE WAS GOING SHE SAID AS SHE HAD SAID BEFORE I SHALL COME ONCE MORE AND THEN NEVER AGAIN +3538-163619-0014-1514: NO ONE CAN TELL HOW DELIGHTED THE KING WAS TO GET RID OF THAT HIDEOUS BUSHY BRIDE AND GET A QUEEN WHO WAS BRIGHT AND BEAUTIFUL AS DAY ITSELF +3538-163622-0000-1515: WILT THOU SERVE ME AND WATCH MY SEVEN (FOALS->FOLDS) ASKED THE KING +3538-163622-0001-1516: THE YOUTH THOUGHT THAT IT WAS VERY EASY WORK TO WATCH THE FOALS AND (THAT->*) HE COULD DO IT WELL ENOUGH +3538-163622-0002-1517: HAST THOU (WATCHED->WATCH) FAITHFULLY AND WELL THE WHOLE DAY LONG SAID THE KING WHEN THE LAD CAME INTO HIS PRESENCE IN THE EVENING +3538-163622-0003-1518: YES THAT I HAVE SAID THE YOUTH +3538-163622-0004-1519: HE HAD GONE OUT ONCE TO SEEK A PLACE HE SAID BUT NEVER WOULD HE DO SUCH A THING AGAIN +3538-163622-0005-1520: (THEN->*) THE (KING->MACKING) PROMISED HIM THE SAME PUNISHMENT AND THE SAME REWARD THAT HE HAD PROMISED HIS BROTHER +3538-163622-0006-1521: WHEN HE HAD RUN AFTER THE (FOALS->FOOLS) FOR A LONG LONG TIME AND WAS HOT AND TIRED HE PASSED BY (A CLEFT->CLIFF) IN THE ROCK WHERE AN OLD WOMAN WAS SITTING SPINNING WITH A DISTAFF AND SHE CALLED TO HIM +3538-163622-0007-1522: (COME HITHER->COMMANDER) COME HITHER MY HANDSOME SON AND LET ME COMB YOUR HAIR +3538-163622-0008-1523: THE YOUTH LIKED THE THOUGHT OF THIS LET THE (FOALS RUN->FOLDS WARM) WHERE THEY CHOSE AND SEATED HIMSELF IN THE CLEFT OF THE ROCK BY THE SIDE OF THE OLD HAG +3538-163622-0009-1524: SO THERE HE SAT WITH HIS HEAD ON HER LAP TAKING HIS EASE THE LIVELONG DAY +3538-163622-0010-1525: ON THE THIRD DAY (CINDERLAD->SAID THE LAD) WANTED TO SET OUT +3538-163622-0011-1526: THE TWO BROTHERS LAUGHED AT HIM AND HIS FATHER AND MOTHER BEGGED HIM NOT TO GO BUT ALL TO NO PURPOSE AND CINDERLAD SET OUT ON HIS WAY +3538-163622-0012-1527: I AM WALKING ABOUT IN SEARCH OF A PLACE SAID (CINDERLAD->SAINTO LAD) +3538-163622-0013-1528: I WOULD MUCH RATHER HAVE THE PRINCESS SAID (CINDERLAD->CINDER LAD) +3538-163622-0014-1529: AND THUS THEY JOURNEYED ONWARDS A LONG LONG WAY +3538-163622-0015-1530: WHEN THEY HAD GONE THUS FOR A LONG LONG WAY THE (FOAL->FULL) AGAIN ASKED DOST THOU SEE ANYTHING NOW +3538-163622-0016-1531: YES NOW I SEE SOMETHING THAT IS WHITE SAID (CINDERLAD->CINDER LAD) +3538-163622-0017-1532: IT LOOKS LIKE THE TRUNK OF A GREAT THICK BIRCH TREE +3538-163622-0018-1533: (CINDERLAD->SOONER LAD) TRIED BUT COULD NOT DO IT SO HE HAD TO TAKE A (DRAUGHT->DROP) FROM THE PITCHER AND THEN ONE MORE AND AFTER THAT STILL ANOTHER AND THEN HE WAS ABLE TO (WIELD->WHEEL) THE SWORD WITH PERFECT EASE +3538-163622-0019-1534: FOR WE ARE BROTHERS OF THE PRINCESS WHOM THOU ART TO HAVE WHEN THOU CANST TELL THE KING WHAT WE EAT AND DRINK BUT THERE IS A MIGHTY TROLL WHO (HAS->IS) CAST A SPELL OVER US +3538-163622-0020-1535: WHEN THEY HAD TRAVELLED (*->ALONG) A LONG (LONG->*) WAY THE FOAL SAID DOST THOU SEE ANYTHING +3538-163622-0021-1536: AND NOW INQUIRED THE (FOAL SEEST THOU->FULL CEASE DONE) NOTHING NOW +3538-163622-0022-1537: NOW THEN SAID THE (FOAL->FOOL) DOST THOU NOT SEE ANYTHING NOW +3538-163622-0023-1538: THAT IS A RIVER SAID THE FOAL AND WE HAVE TO CROSS IT +3538-163622-0024-1539: I HAVE DONE MY BEST REPLIED (CINDERLAD->SIR LAD) +3538-163624-0000-1540: ONCE UPON A TIME THERE WAS A KING IN THE NORTH WHO HAD WON MANY WARS BUT NOW HE WAS OLD +3538-163624-0001-1541: THE OLD KING WENT OUT AND (FOUGHT->THOUGHT) BRAVELY BUT AT LAST HIS SWORD BROKE AND HE WAS WOUNDED AND HIS MEN FLED +3538-163624-0002-1542: BUT IN THE NIGHT WHEN THE BATTLE WAS OVER HIS YOUNG WIFE CAME OUT AND SEARCHED FOR HIM AMONG THE SLAIN AND AT LAST SHE FOUND HIM AND ASKED WHETHER HE MIGHT BE HEALED +3538-163624-0003-1543: SO (HE ASKED->YES) THE QUEEN HOW DO YOU KNOW IN THE DARK OF NIGHT WHETHER THE HOURS ARE WEARING TO THE MORNING AND SHE SAID +3538-163624-0004-1544: THEN THE OLD MAN SAID DRIVE ALL THE HORSES INTO THE RIVER AND CHOOSE THE ONE THAT SWIMS ACROSS +3538-163624-0005-1545: HE (IS->HAS) NO BIGGER THAN OTHER DRAGONS SAID THE TUTOR AND IF YOU WERE AS BRAVE AS YOUR FATHER YOU WOULD NOT FEAR HIM +3538-163624-0006-1546: THEN THE PERSON WHO HAD KILLED OTTER WENT DOWN AND CAUGHT THE DWARF WHO OWNED ALL THE TREASURE AND TOOK IT FROM HIM +3538-163624-0007-1547: ONLY ONE RING WAS LEFT WHICH THE DWARF WORE AND EVEN THAT WAS TAKEN FROM HIM +3538-163624-0008-1548: SO (REGIN->RIGAN) MADE A SWORD AND (SIGURD->CIGAR) TRIED IT WITH A BLOW (ON->AND) A LUMP OF IRON AND THE SWORD BROKE +3538-163624-0009-1549: THEN (SIGURD->CIGAR) WENT TO HIS MOTHER AND ASKED FOR THE BROKEN PIECES OF HIS FATHER'S BLADE AND GAVE THEM TO (REGIN->REGAN) +3538-163624-0010-1550: SO (SIGURD->CIGARS) SAID THAT SWORD WOULD DO +3538-163624-0011-1551: THEN HE SAW THE TRACK WHICH THE DRAGON (*->HAD) MADE WHEN HE WENT TO A CLIFF TO DRINK AND THE TRACK WAS AS IF A GREAT RIVER HAD ROLLED ALONG AND LEFT A DEEP VALLEY +3538-163624-0012-1552: BUT (SIGURD->CIGARET) WAITED TILL HALF OF HIM HAD CRAWLED OVER THE PIT AND THEN HE THRUST THE SWORD (GRAM->GRAHAM) RIGHT INTO HIS VERY HEART +3538-163624-0013-1553: (SIGURD->CIGAR) SAID I WOULD TOUCH NONE OF IT IF BY LOSING IT I SHOULD NEVER DIE +3538-163624-0014-1554: BUT ALL MEN DIE AND (NO->KNOW) BRAVE MAN LETS DEATH FRIGHTEN HIM FROM HIS DESIRE +3538-163624-0015-1555: (DIE->GUY) THOU (FAFNIR->FAFNER) AND THEN (FAFNIR->STAFF) DIED +3538-163624-0016-1556: THEN (SIGURD->CIGAR) RODE BACK AND MET (REGIN->RIGAN) AND (REGIN->RIGAN) ASKED HIM TO ROAST (FAFNIR'S->FAFNER'S) HEART AND LET HIM TASTE OF IT +3538-163624-0017-1557: SO (SIGURD->SIR GOD) PUT THE HEART OF (FAFNIR->FAFNER) ON A STAKE AND ROASTED IT +3538-163624-0018-1558: THERE IS (SIGURD->CIGAR) ROASTING (FAFNIR'S->FASTENER'S) HEART FOR ANOTHER WHEN HE SHOULD TASTE OF IT HIMSELF AND LEARN ALL WISDOM +3538-163624-0019-1559: THAT LET HIM DO (AND->*) THEN RIDE OVER (HINDFELL->HINFIELD) TO THE PLACE WHERE (BRYNHILD->BURNHILD) SLEEPS +3538-163624-0020-1560: THERE MUST SHE SLEEP TILL THOU (COMEST->COMES) FOR HER WAKING (*->WHO) RISE UP AND RIDE FOR NOW SURE SHE WILL SWEAR THE VOW FEARLESS OF BREAKING +3538-163624-0021-1561: THEN HE TOOK THE HELMET OFF THE HEAD OF THE SLEEPER AND BEHOLD SHE WAS A MOST BEAUTIFUL LADY +3538-163624-0022-1562: THEN (SIGURD->CIGAR) RODE AWAY AND HE CAME TO THE HOUSE OF A KING WHO HAD A FAIR DAUGHTER +3538-163624-0023-1563: (THEN BRYNHILD'S->WHEN BRUNHOLD'S) FATHER TOLD (GUNNAR->GUNNER) THAT SHE WOULD MARRY NONE BUT HIM WHO COULD RIDE THE FLAME IN FRONT OF HER ENCHANTED TOWER AND THITHER THEY RODE AND (GUNNAR->GUNNER) SET HIS HORSE (AT->TO) THE FLAME BUT HE WOULD NOT FACE IT +3538-163624-0024-1564: FOR ONE DAY WHEN (BRYNHILD->BURNEHELD) AND (GUDRUN->GUNDRAIN) WERE BATHING (BRYNHILD WADED->BURNEHELD WAITED) FARTHEST (OUT->SOUTH) INTO THE RIVER AND SAID SHE DID THAT TO SHOW SHE WAS (GUIRUN'S->GUNDERING) SUPERIOR +3538-163624-0025-1565: FOR HER HUSBAND SHE SAID HAD RIDDEN THROUGH THE FLAME WHEN NO OTHER MAN DARED FACE IT +3538-163624-0026-1566: NOT LONG TO WAIT HE SAID TILL THE BITTER SWORD STANDS FAST IN MY HEART AND THOU (WILL->WILT) NOT LIVE LONG WHEN I AM DEAD +367-130732-0000-1466: LOBSTERS AND LOBSTERS +367-130732-0001-1467: WHEN (IS->AS) A LOBSTER NOT A LOBSTER WHEN IT IS A CRAYFISH +367-130732-0002-1468: THIS QUESTION AND ANSWER MIGHT WELL GO INTO THE (PRIMER->PRIMARY) OF INFORMATION FOR THOSE WHO COME (TO->THE) SAN FRANCISCO FROM THE EAST FOR WHAT IS CALLED A LOBSTER IN SAN FRANCISCO IS NOT A LOBSTER AT ALL BUT A CRAYFISH +367-130732-0003-1469: THE PACIFIC CRAYFISH (HOWEVER SERVES->HOURSERVES) EVERY PURPOSE AND WHILE MANY (CONTEND THAT->CONTENDED) ITS MEAT IS NOT SO DELICATE (IN FLAVOR->AND FLAVORITE) AS THAT OF ITS EASTERN COUSIN THE (CALIFORNIAN->CALIFORNIA) WILL AS STRENUOUSLY INSIST THAT IT IS BETTER BUT OF COURSE SOMETHING MUST ALWAYS BE ALLOWED FOR THE PATRIOTISM OF THE (CALIFORNIAN->CALIFORNIA) +367-130732-0004-1470: A BOOK COULD BE WRITTEN ABOUT THIS RESTAURANT AND THEN ALL WOULD NOT BE TOLD FOR ALL ITS SECRETS CAN NEVER BE KNOWN +367-130732-0005-1471: IT WAS HERE THAT MOST MAGNIFICENT DINNERS WERE ARRANGED IT WAS HERE THAT EXTRAORDINARY DISHES WERE CONCOCTED BY CHEFS OF (WORLD WIDE->WOOLWRIGHT) FAME IT WAS HERE THAT LOBSTER (A LA NEWBERG->ALAD NEWBURG) REACHED ITS HIGHEST PERFECTION AND THIS IS THE RECIPE THAT WAS FOLLOWED WHEN (IT->HE) WAS PREPARED IN THE (DELMONICO->DOMONICO) +367-130732-0006-1472: LOBSTER (A LA NEWBERG->OLY NEWBURG) +367-130732-0007-1473: ONE POUND OF (LOBSTER MEAT->LOBS TO ME) ONE TEASPOONFUL OF BUTTER ONE HALF PINT OF CREAM YOLKS OF FOUR EGGS ONE WINE GLASS OF SHERRY LOBSTER FAT +367-130732-0008-1474: PUT THIS IN A DOUBLE BOILER AND LET COOK UNTIL THICK STIRRING CONSTANTLY +367-130732-0009-1475: SERVE IN A (CHAFING->CHIEFING) DISH WITH (THIN->FLIND) SLICES OF DRY TOAST +367-130732-0010-1476: KING OF (SHELL FISH->SHELLFISH) +367-130732-0011-1477: ONE HAS TO COME TO SAN FRANCISCO TO PARTAKE OF THE KING OF (SHELL FISH->SHELLFISH) THE MAMMOTH PACIFIC CRAB +367-130732-0012-1478: I SAY COME TO SAN FRANCISCO ADVISEDLY FOR WHILE THE CRAB IS FOUND ALL ALONG THE COAST IT IS PREPARED NOWHERE SO DELICIOUSLY AS IN (SAN FRANCISCO->SAMPANCISCO) +367-130732-0013-1479: (GOBEY'S PASSED->GOBYS PASS) WITH THE FIRE AND THE LITTLE RESTAURANT BEARING HIS NAME (AND->*) IN CHARGE OF HIS WIDOW (IN->AND) UNION SQUARE AVENUE HAS NOT ATTAINED THE FAME OF THE OLD PLACE +367-130732-0014-1480: IT IS POSSIBLE THAT SHE KNOWS THE SECRET OF PREPARING CRAB AS IT WAS PREPARED IN THE (GOBEY'S->GOBIES) OF BEFORE THE FIRE BUT HIS (PRESTIGE->PRESAGE) DID NOT DESCEND TO HER +367-130732-0015-1481: (GOBEY'S CRAB STEW->GOBY'S CRABS DO) +367-130732-0016-1482: TAKE THE MEAT OF ONE LARGE CRAB SCRAPING OUT ALL (OF->*) THE (FAT->BAT) FROM THE SHELL +367-130732-0017-1483: SOAK THE CRAB MEAT IN THE SHERRY TWO HOURS BEFORE COOKING +367-130732-0018-1484: CHOP FINE THE ONION SWEET PEPPER AND TOMATO WITH THE ROSEMARY +367-130732-0019-1485: HEAT THIS IN A (STEWPAN->STEWPANT) AND (WHEN->WENT) SIMMERING (ADD->AT) THE SHERRY AND CRAB (MEAT->ME) AND LET ALL COOK TOGETHER WITH A SLOW FIRE FOR EIGHT MINUTES +367-130732-0020-1486: SERVE IN A CHAFING DISH WITH TOASTED CRACKERS OR THIN SLICES OF TOASTED BREAD +367-130732-0021-1487: LOBSTER IN MINIATURE +367-130732-0022-1488: SO FAR IT HAS BEEN USED MOSTLY FOR GARNISHMENT OF OTHER DISHES AND IT IS ONLY RECENTLY THAT THE (HOF BRAU->WHOLE BROW) HAS BEEN MAKING (A SPECIALTY->ESPECIALTY) OF THEM +367-130732-0023-1489: ALL (OF->*) THE BETTER CLASS RESTAURANTS HOWEVER WILL SERVE THEM IF YOU ORDER THEM +367-130732-0024-1490: THIS IS THE RECIPE FOR EIGHT PEOPLE AND IT IS WELL (*->IT) WORTH TRYING IF YOU ARE GIVING A DINNER OF IMPORTANCE +367-130732-0025-1491: (BISQUE->THIS) OF (CRAWFISH->CROFISH) +367-130732-0026-1492: TAKE THIRTY (CRAWFISH->CROPFISH) FROM WHICH REMOVE THE GUT CONTAINING THE GALL IN THE FOLLOWING MANNER TAKE FIRM HOLD OF THE CRAWFISH WITH THE LEFT HAND SO AS TO AVOID BEING PINCHED BY ITS (CLAWS->CLOTHS) WITH THE THUMB AND FOREFINGER OF THE RIGHT HAND PINCH THE EXTREME END OF THE CENTRAL FIN OF THE TAIL AND WITH A SUDDEN JERK THE GUT WILL BE WITHDRAWN +367-130732-0027-1493: MINCE (OR->ARE) CUT INTO SMALL DICE A CARROT (AN->AND) ONION ONE HEAD OF CELERY AND A FEW PARSLEY ROOTS AND TO THESE (ADD->AT) A BAY LEAF A SPRIG OF THYME A LITTLE (MINIONETTE->MEAN ON IT) PEPPER AND TWO (OUNCES->OZ) OF BUTTER +367-130732-0028-1494: PUT THESE INGREDIENTS INTO A STEWPAN AND FRY THEM TEN MINUTES THEN THROW IN THE (CRAWFISH->CROPPISH) AND POUR ON THEM HALF A BOTTLE OF FRENCH WHITE WINE +367-130732-0029-1495: ALLOW (THIS->US) TO BOIL AND THEN ADD A QUART OF STRONG (CONSOMME->CONSUM) AND LET ALL CONTINUE BOILING FOR HALF AN HOUR +367-130732-0030-1496: PICK OUT THE (CRAWFISH->CRAW FISH) AND STRAIN THE BROTH THROUGH A NAPKIN BY PRESSURE INTO A BASIN IN ORDER TO EXTRACT ALL THE ESSENCE FROM THE VEGETABLES +367-130732-0031-1497: PICK THE SHELLS (OFF->OF) TWENTY FIVE OF THE (CRAWFISH->CROFISH) TAILS TRIM THEM NEATLY AND SET THEM ASIDE UNTIL WANTED +367-130732-0032-1498: RESERVE SOME OF THE SPAWN ALSO (HALF OF->HAPPENED) THE BODY SHELLS WITH WHICH TO MAKE THE CRAWFISH BUTTER TO FINISH THE SOUP +367-130732-0033-1499: THIS BUTTER IS MADE AS FOLLOWS PLACE THE SHELLS (ON->IN) A BAKING SHEET IN THE OVEN TO DRY LET THE SHELLS COOL AND THEN POUND THEM IN A MORTAR WITH A LITTLE LOBSTER (CORAL->COAL) AND FOUR OUNCES OF FRESH BUTTER THOROUGHLY BRUISING THE WHOLE TOGETHER SO AS TO MAKE A FINE PASTE +367-293981-0000-1445: I SWEAR (IT->*) ANSWERED SANCHO +367-293981-0001-1446: I SAY SO CONTINUED DON QUIXOTE BECAUSE I HATE TAKING AWAY (ANYONE'S->ANY ONE'S) GOOD NAME +367-293981-0002-1447: I SAY REPLIED SANCHO THAT I SWEAR TO HOLD MY TONGUE ABOUT IT TILL THE END OF YOUR (WORSHIP'S DAYS->WORSHIP STAYS) AND (GOD->GONE) GRANT I MAY BE ABLE TO LET IT OUT (TOMORROW->TO MORROW) +367-293981-0003-1448: THOUGH YOUR WORSHIP WAS NOT SO BADLY OFF HAVING IN YOUR ARMS (THAT INCOMPARABLE->THE INN COMPARABLE) BEAUTY YOU SPOKE OF BUT I WHAT DID I HAVE EXCEPT THE HEAVIEST (WHACKS->WAX THAT) I THINK I HAD IN ALL MY LIFE +367-293981-0004-1449: UNLUCKY ME (AND->INTO) THE MOTHER THAT BORE ME +367-293981-0005-1450: DIDN'T I SAY SO WORSE LUCK TO MY LINE SAID SANCHO +367-293981-0006-1451: IT CANNOT BE THE (MOOR->MORE) ANSWERED DON QUIXOTE FOR THOSE UNDER ENCHANTMENT DO NOT LET THEMSELVES BE SEEN BY ANYONE +367-293981-0007-1452: IF THEY (DON'T->DO NOT) LET THEMSELVES BE SEEN THEY LET THEMSELVES BE FELT SAID SANCHO IF NOT LET MY SHOULDERS SPEAK TO THE POINT +367-293981-0008-1453: (MINE->MIKE) COULD SPEAK TOO SAID DON QUIXOTE BUT THAT IS NOT A (SUFFICIENT->SUSPICION OF) REASON FOR BELIEVING THAT WHAT WE SEE IS THE ENCHANTED MOOR +367-293981-0009-1454: THE (OFFICER->OFFICERS) TURNED TO HIM AND SAID WELL HOW GOES (IT->A) GOOD MAN +367-293981-0010-1455: (SANCHO GOT->SANCHA CUT) UP WITH PAIN ENOUGH IN HIS BONES AND WENT AFTER THE INNKEEPER IN THE DARK AND MEETING THE OFFICER WHO WAS LOOKING TO SEE WHAT HAD BECOME OF HIS ENEMY HE SAID TO HIM SENOR WHOEVER YOU ARE DO US (THE FAVOUR->TO FAVOR) AND KINDNESS TO GIVE US A LITTLE ROSEMARY OIL SALT AND (WINE->WHITE) FOR IT IS (WANTED->WATER) TO CURE ONE OF (THE->OUR) BEST KNIGHTS ERRANT ON EARTH WHO LIES ON YONDER BED WOUNDED BY THE HANDS OF THE ENCHANTED MOOR THAT IS IN THIS INN +367-293981-0011-1456: TO BE BRIEF HE TOOK THE (MATERIALS->MATURES) OF WHICH HE MADE A COMPOUND MIXING THEM ALL (AND->OF) BOILING THEM A GOOD WHILE (*->IT) UNTIL IT SEEMED TO HIM THEY HAD COME TO PERFECTION +367-293981-0012-1457: SANCHO PANZA WHO ALSO REGARDED THE AMENDMENT OF HIS MASTER AS MIRACULOUS BEGGED HIM TO GIVE HIM WHAT WAS (LEFT IN THE PIGSKIN->LET AN OPINION) WHICH WAS NO SMALL QUANTITY +367-293981-0013-1458: DON QUIXOTE CONSENTED AND HE TAKING IT WITH BOTH HANDS IN GOOD FAITH AND WITH A BETTER WILL GULPED (*->IT) DOWN AND DRAINED (OFF->UP) VERY LITTLE LESS THAN HIS MASTER +367-293981-0014-1459: IF YOUR WORSHIP KNEW THAT RETURNED SANCHO (WOE->WON'T) BETIDE ME (AND->IN) ALL MY KINDRED WHY DID YOU LET ME TASTE IT +367-293981-0015-1460: SEARCH YOUR MEMORY AND IF YOU FIND ANYTHING OF THIS KIND YOU NEED ONLY TELL ME OF IT AND I PROMISE YOU BY THE ORDER OF KNIGHTHOOD WHICH I HAVE RECEIVED TO PROCURE YOU SATISFACTION (AND->IN) REPARATION TO THE UTMOST OF YOUR DESIRE +367-293981-0016-1461: THEN THIS IS AN INN SAID DON QUIXOTE +367-293981-0017-1462: (AND->IN) A VERY RESPECTABLE ONE SAID THE INNKEEPER +367-293981-0018-1463: THE CRIES OF THE POOR (BLANKETED->BLANKET) WRETCH WERE SO LOUD THAT THEY REACHED THE EARS OF HIS MASTER WHO HALTING TO LISTEN ATTENTIVELY WAS PERSUADED THAT SOME NEW ADVENTURE WAS COMING UNTIL HE CLEARLY PERCEIVED THAT IT WAS (HIS->THE) SQUIRE WHO UTTERED THEM +367-293981-0019-1464: HE SAW HIM RISING AND FALLING IN THE AIR WITH SUCH GRACE AND NIMBLENESS THAT HAD HIS RAGE ALLOWED HIM IT IS MY BELIEF HE WOULD HAVE LAUGHED +367-293981-0020-1465: SANCHO TOOK IT AND AS HE WAS RAISING IT TO HIS MOUTH HE WAS STOPPED BY THE CRIES OF HIS MASTER EXCLAIMING SANCHO MY SON DRINK NOT WATER (DRINK IT NOT->DRINKIN UP) MY SON FOR IT WILL KILL THEE SEE HERE I HAVE THE BLESSED BALSAM AND HE HELD UP THE FLASK OF LIQUOR AND WITH DRINKING TWO DROPS (OF IT->WHAT) THOU WILT CERTAINLY BE RESTORED +3764-168670-0000-1666: THE STRIDES OF A LAME MAN ARE LIKE THE OGLING GLANCES OF A ONE EYED MAN THEY DO NOT REACH THEIR GOAL VERY PROMPTLY +3764-168670-0001-1667: COSETTE HAD WAKED UP +3764-168670-0002-1668: JEAN VALJEAN HAD PLACED HER NEAR THE FIRE +3764-168670-0003-1669: YOU WILL WAIT FOR ME AT A LADY'S HOUSE I SHALL COME TO FETCH YOU +3764-168670-0004-1670: EVERYTHING IS (ARRANGED->RANGED) AND NOTHING IS SAID FAUCHELEVENT +3764-168670-0005-1671: I HAVE PERMISSION TO BRING YOU IN BUT BEFORE BRINGING YOU IN YOU MUST BE GOT OUT +3764-168670-0006-1672: THAT'S WHERE THE DIFFICULTY LIES +3764-168670-0007-1673: IT IS EASY ENOUGH WITH THE CHILD YOU WILL CARRY HER OUT +3764-168670-0008-1674: AND SHE WILL HOLD HER TONGUE I ANSWER FOR THAT +3764-168670-0009-1675: FAUCHELEVENT GRUMBLED MORE TO HIMSELF THAN TO JEAN VALJEAN +3764-168670-0010-1676: YOU UNDERSTAND FATHER MADELEINE THE GOVERNMENT WILL NOTICE IT +3764-168670-0011-1677: JEAN VALJEAN STARED HIM STRAIGHT IN THE EYE AND THOUGHT THAT HE WAS RAVING +3764-168670-0012-1678: FAUCHELEVENT WENT ON +3764-168670-0013-1679: IT IS TO MORROW THAT I AM TO BRING YOU IN THE (PRIORESS->PRIORS) EXPECTS YOU +3764-168670-0014-1680: THEN HE EXPLAINED TO JEAN VALJEAN THAT THIS WAS HIS RECOMPENSE FOR A SERVICE WHICH HE (FAUCHELEVENT->FOR CHAUVELIN) WAS TO RENDER TO THE COMMUNITY +3764-168670-0015-1681: THAT THE NUN WHO HAD DIED THAT MORNING HAD REQUESTED TO BE BURIED IN THE COFFIN WHICH HAD SERVED HER FOR A BED AND INTERRED IN THE VAULT UNDER THE ALTAR OF THE CHAPEL +3764-168670-0016-1682: THAT THE (PRIORESS->PRIOR REST) AND THE VOCAL MOTHERS INTENDED TO FULFIL THE WISH OF THE DECEASED +3764-168670-0017-1683: THAT HE (FAUCHELEVENT->FOR SCHLEVENT) WAS TO NAIL UP THE COFFIN IN THE CELL (RAISE->RAISED) THE STONE IN THE CHAPEL AND (LOWER->BLOW) THE CORPSE INTO THE VAULT +3764-168670-0018-1684: AND THEN THAT THERE WAS ANOTHER THE EMPTY COFFIN +3764-168670-0019-1685: WHAT IS THAT EMPTY COFFIN +3764-168670-0020-1686: ASKED JEAN VALJEAN FAUCHELEVENT REPLIED +3764-168670-0021-1687: WHAT COFFIN WHAT ADMINISTRATION +3764-168670-0022-1688: FAUCHELEVENT WHO WAS SEATED SPRANG UP AS THOUGH A (BOMB->BALM) HAD BURST UNDER HIS CHAIR YOU +3764-168670-0023-1689: YOU KNOW FAUCHELEVENT WHAT YOU HAVE SAID MOTHER CRUCIFIXION IS DEAD +3764-168670-0024-1690: AND I ADD AND FATHER MADELEINE IS BURIED (AH->*) +3764-168670-0025-1691: YOU ARE NOT LIKE OTHER MEN FATHER MADELEINE +3764-168670-0026-1692: THIS OFFERS THE MEANS BUT GIVE ME SOME INFORMATION IN THE FIRST PLACE +3764-168670-0027-1693: HOW LONG IS THE COFFIN SIX FEET +3764-168670-0028-1694: IT IS A CHAMBER ON THE GROUND FLOOR WHICH HAS A GRATED WINDOW OPENING ON THE GARDEN WHICH IS CLOSED ON THE OUTSIDE BY A SHUTTER AND TWO DOORS ONE LEADS INTO THE CONVENT THE OTHER INTO THE CHURCH (WHAT->A WATCH) CHURCH +3764-168670-0029-1695: THE CHURCH IN THE STREET (*->THOUGH) THE CHURCH WHICH ANY ONE CAN ENTER +3764-168670-0030-1696: HAVE YOU THE KEYS TO THOSE TWO DOORS +3764-168670-0031-1697: (*->AND) NO I HAVE THE KEY TO THE DOOR WHICH COMMUNICATES WITH THE CONVENT THE PORTER HAS THE KEY TO THE DOOR WHICH COMMUNICATES WITH THE CHURCH +3764-168670-0032-1698: ONLY TO ALLOW THE (UNDERTAKER'S->UNDERTAKERS) MEN TO ENTER WHEN THEY COME TO GET THE COFFIN +3764-168670-0033-1699: WHO NAILS UP THE COFFIN I DO +3764-168670-0034-1700: WHO SPREADS THE (PALL->POOL) OVER IT +3764-168670-0035-1701: NOT ANOTHER MAN EXCEPT THE POLICE DOCTOR CAN ENTER THE (DEAD ROOM->BEDROOM) THAT IS EVEN WRITTEN ON THE WALL +3764-168670-0036-1702: COULD YOU HIDE ME IN THAT ROOM TO NIGHT WHEN EVERY ONE IS ASLEEP +3764-168670-0037-1703: ABOUT THREE O'CLOCK IN THE AFTERNOON +3764-168670-0038-1704: I SHALL BE HUNGRY I WILL BRING YOU SOMETHING +3764-168670-0039-1705: YOU CAN COME AND NAIL ME UP IN THE COFFIN AT TWO O'CLOCK +3764-168670-0040-1706: FAUCHELEVENT RECOILED AND CRACKED HIS FINGER JOINTS BUT THAT IS IMPOSSIBLE +3764-168670-0041-1707: BAH IMPOSSIBLE TO TAKE A HAMMER AND DRIVE SOME NAILS IN A PLANK +3764-168670-0042-1708: JEAN VALJEAN HAD BEEN IN WORSE (STRAITS->STRAIT) THAN THIS +3764-168670-0043-1709: ANY MAN WHO HAS BEEN A PRISONER UNDERSTANDS HOW TO CONTRACT HIMSELF TO FIT THE DIAMETER OF THE ESCAPE +3764-168670-0044-1710: WHAT DOES NOT A MAN UNDERGO FOR THE SAKE OF A CURE +3764-168670-0045-1711: TO HAVE HIMSELF NAILED UP IN A CASE AND CARRIED OFF LIKE A BALE OF GOODS TO LIVE FOR A LONG TIME IN A BOX TO FIND AIR WHERE THERE IS NONE TO ECONOMIZE HIS BREATH FOR HOURS TO KNOW HOW TO STIFLE WITHOUT DYING THIS WAS ONE OF JEAN VALJEAN'S GLOOMY TALENTS +3764-168670-0046-1712: YOU SURELY MUST HAVE A GIMLET YOU WILL MAKE A FEW HOLES HERE AND THERE AROUND MY MOUTH AND YOU WILL NAIL THE TOP PLANK ON LOOSELY GOOD AND WHAT IF YOU SHOULD HAPPEN TO COUGH OR TO SNEEZE +3764-168670-0047-1713: A MAN WHO IS MAKING HIS ESCAPE DOES NOT COUGH OR SNEEZE +3764-168670-0048-1714: WHO IS THERE WHO HAS NOT SAID TO A CAT DO COME IN +3764-168670-0049-1715: THE (OVER PRUDENT CATS->OVERPRUDENT CARTS) AS THEY ARE AND BECAUSE THEY ARE CATS SOMETIMES INCUR MORE DANGER THAN THE AUDACIOUS +3764-168670-0050-1716: BUT JEAN VALJEAN'S COOLNESS PREVAILED OVER HIM IN SPITE OF HIMSELF HE GRUMBLED +3764-168670-0051-1717: IF YOU ARE SURE OF COMING OUT OF THE COFFIN ALL RIGHT I AM SURE OF GETTING YOU OUT OF THE GRAVE +3764-168670-0052-1718: AN OLD FELLOW OF THE OLD SCHOOL THE GRAVE DIGGER PUTS THE CORPSES IN THE GRAVE AND I PUT THE GRAVE DIGGER IN MY POCKET +3764-168670-0053-1719: I SHALL FOLLOW THAT IS MY BUSINESS +3764-168670-0054-1720: THE (HEARSE HALTS->HOUSEHOLTS) THE (UNDERTAKER'S->UNDERTAKERS) MEN (KNOT->NOT) A ROPE AROUND YOUR COFFIN AND LOWER YOU DOWN +3764-168670-0055-1721: THE (PRIEST SAYS->PRIESTS AS) THE PRAYERS MAKES THE SIGN OF THE CROSS SPRINKLES THE HOLY WATER AND TAKES HIS DEPARTURE +3764-168670-0056-1722: ONE OF TWO THINGS WILL HAPPEN HE WILL EITHER BE SOBER OR HE WILL NOT BE SOBER +3764-168670-0057-1723: THAT IS SETTLED FATHER FAUCHELEVENT ALL WILL GO WELL +3764-168671-0000-1724: ON THE FOLLOWING DAY AS THE SUN WAS DECLINING THE VERY RARE PASSERS BY ON THE BOULEVARD DU (MAINE->MIN) PULLED OFF THEIR HATS TO AN OLD FASHIONED HEARSE ORNAMENTED WITH SKULLS CROSS BONES AND TEARS +3764-168671-0001-1725: THIS HEARSE CONTAINED A COFFIN COVERED WITH A WHITE CLOTH OVER WHICH SPREAD A LARGE BLACK CROSS LIKE A HUGE CORPSE WITH DROOPING ARMS +3764-168671-0002-1726: (A MOURNING->THE MORNING) COACH IN WHICH COULD BE SEEN A PRIEST IN HIS SURPLICE AND A CHOIR BOY IN HIS RED CAP FOLLOWED +3764-168671-0003-1727: BEHIND IT CAME AN OLD MAN IN THE GARMENTS OF A LABORER WHO LIMPED ALONG +3764-168671-0004-1728: THE GRAVE DIGGERS BEING THUS BOUND TO SERVICE IN THE EVENING IN SUMMER AND AT NIGHT (IN->AND) WINTER IN THIS CEMETERY THEY WERE SUBJECTED TO A SPECIAL DISCIPLINE +3764-168671-0005-1729: THESE GATES THEREFORE SWUNG INEXORABLY ON THEIR HINGES AT THE INSTANT WHEN THE SUN DISAPPEARED BEHIND THE DOME OF THE (INVALIDES->INVALIDE) +3764-168671-0006-1730: DAMPNESS WAS INVADING IT THE FLOWERS WERE DESERTING IT +3764-168671-0007-1731: THE BOURGEOIS DID NOT CARE MUCH ABOUT BEING BURIED IN THE (VAUGIRARD->ROGER) IT HINTED AT POVERTY (PERE LACHAISE->PALACE) IF YOU PLEASE +3764-168671-0008-1732: TO BE BURIED IN (PERE LACHAISE->PERFELASHES) IS EQUIVALENT TO HAVING FURNITURE OF MAHOGANY IT IS RECOGNIZED AS ELEGANT +3764-168671-0009-1733: THE INTERMENT OF MOTHER CRUCIFIXION IN THE VAULT UNDER THE ALTAR THE EXIT OF COSETTE THE INTRODUCTION OF JEAN VALJEAN (TO->INTO) THE DEAD ROOM ALL HAD BEEN EXECUTED WITHOUT DIFFICULTY AND THERE HAD BEEN NO HITCH LET US REMARK IN PASSING THAT THE BURIAL OF MOTHER CRUCIFIXION UNDER THE ALTAR OF THE CONVENT IS A PERFECTLY VENIAL OFFENCE IN OUR SIGHT +3764-168671-0010-1734: IT IS ONE OF THE FAULTS WHICH RESEMBLE A DUTY +3764-168671-0011-1735: THE NUNS HAD COMMITTED IT NOT ONLY WITHOUT DIFFICULTY BUT EVEN WITH THE APPLAUSE OF THEIR OWN CONSCIENCES +3764-168671-0012-1736: IN THE CLOISTER WHAT IS CALLED THE GOVERNMENT IS ONLY AN INTERMEDDLING WITH AUTHORITY AN INTERFERENCE WHICH IS ALWAYS QUESTIONABLE +3764-168671-0013-1737: MAKE AS MANY (LAWS->NOISE) AS YOU PLEASE MEN BUT KEEP THEM FOR YOURSELVES +3764-168671-0014-1738: A PRINCE IS NOTHING IN THE PRESENCE OF A PRINCIPLE +3764-168671-0015-1739: FAUCHELEVENT LIMPED ALONG BEHIND THE HEARSE IN A VERY CONTENTED FRAME OF MIND +3764-168671-0016-1740: JEAN VALJEAN'S COMPOSURE WAS ONE OF THOSE POWERFUL TRANQUILLITIES WHICH ARE CONTAGIOUS +3764-168671-0017-1741: WHAT REMAINED TO BE DONE WAS A MERE NOTHING +3764-168671-0018-1742: HE PLAYED WITH FATHER (MESTIENNE->MISTIENNE) +3764-168671-0019-1743: HE DID WHAT HE LIKED WITH HIM HE MADE HIM DANCE ACCORDING TO HIS WHIM +3764-168671-0020-1744: THE PERMISSION FOR INTERMENT MUST BE EXHIBITED +3764-168671-0021-1745: HE WAS A SORT OF (LABORING->LABOURING) MAN WHO WORE A WAISTCOAT WITH LARGE POCKETS AND CARRIED A MATTOCK UNDER HIS ARM +3764-168671-0022-1746: THE MAN REPLIED THE GRAVE DIGGER +3764-168671-0023-1747: THE (GRAVE->BRAVE) DIGGER YES +3764-168671-0024-1748: YOU I +3764-168671-0025-1749: FATHER (MESTIENNE->MISSION) IS THE GRAVE DIGGER HE WAS +3764-168671-0026-1750: FAUCHELEVENT HAD EXPECTED ANYTHING BUT THIS THAT A GRAVE DIGGER COULD DIE +3764-168671-0027-1751: IT IS TRUE NEVERTHELESS THAT GRAVE DIGGERS DO DIE THEMSELVES +3764-168671-0028-1752: HE HAD HARDLY THE STRENGTH TO STAMMER +3764-168671-0029-1753: BUT HE PERSISTED FEEBLY FATHER (MESTIENNE->MESSIAN) IS THE GRAVE DIGGER +3764-168671-0030-1754: DO YOU KNOW WHO LITTLE FATHER (LENOIR->NOIR) IS HE IS A JUG OF RED WINE +3764-168671-0031-1755: BUT YOU ARE A JOLLY FELLOW TOO +3764-168671-0032-1756: ARE YOU NOT COMRADE WE'LL GO AND HAVE A DRINK TOGETHER PRESENTLY +3764-168671-0033-1757: THE MAN REPLIED +3764-168671-0034-1758: HE LIMPED MORE OUT OF ANXIETY THAN FROM INFIRMITY +3764-168671-0035-1759: THE GRAVE DIGGER WALKED ON IN FRONT OF HIM +3764-168671-0036-1760: FAUCHELEVENT PASSED THE UNEXPECTED (GRIBIER->CLAVIER) ONCE MORE IN REVIEW +3764-168671-0037-1761: FAUCHELEVENT WHO WAS ILLITERATE BUT VERY SHARP UNDERSTOOD THAT HE HAD TO DEAL WITH A FORMIDABLE SPECIES OF MAN WITH A FINE TALKER HE MUTTERED +3764-168671-0038-1762: (SO->MISS OH) FATHER (MESTIENNE->MESS TEEN) IS DEAD +3764-168671-0039-1763: THE MAN REPLIED COMPLETELY +3764-168671-0040-1764: THE GOOD GOD CONSULTED HIS NOTE BOOK WHICH SHOWS WHEN THE TIME IS UP IT WAS FATHER MESTIENNE'S TURN (FATHER MESTIENNE->FOR THE MESSION) DIED +3764-168671-0041-1765: STAMMERED FAUCHELEVENT IT IS MADE +3764-168671-0042-1766: YOU ARE A PEASANT I AM A PARISIAN +3764-168671-0043-1767: FAUCHELEVENT THOUGHT I AM LOST +3764-168671-0044-1768: THEY WERE ONLY A FEW TURNS OF THE WHEEL DISTANT FROM THE SMALL ALLEY LEADING TO THE (NUNS->NUN'S) CORNER +3764-168671-0045-1769: AND HE ADDED WITH THE SATISFACTION OF A SERIOUS MAN WHO IS TURNING A PHRASE WELL +3764-168671-0046-1770: FORTUNATELY THE SOIL WHICH WAS LIGHT AND WET WITH THE WINTER RAINS CLOGGED THE WHEELS AND RETARDED ITS SPEED +3764-168671-0047-1771: MY FATHER WAS A PORTER AT THE (PRYTANEUM->BRITTANNIUM) TOWN HALL +3764-168671-0048-1772: BUT HE HAD REVERSES HE HAD (LOSSES ON CHANGE->LOSES UNCHANGED) I WAS OBLIGED TO RENOUNCE THE PROFESSION OF AUTHOR BUT I AM STILL A PUBLIC WRITER +3764-168671-0049-1773: (*->BUT) SO YOU ARE NOT A GRAVE DIGGER THEN +3764-168671-0050-1774: RETURNED FAUCHELEVENT CLUTCHING AT THIS BRANCH FEEBLE AS IT WAS +3764-168671-0051-1775: HERE A REMARK BECOMES NECESSARY +3764-168671-0052-1776: (*->A) FAUCHELEVENT WHATEVER HIS ANGUISH OFFERED A DRINK BUT HE DID NOT EXPLAIN HIMSELF ON ONE POINT WHO WAS TO PAY +3764-168671-0053-1777: THE GRAVE DIGGER WENT ON WITH (A->THE) SUPERIOR SMILE +3764-168671-0054-1778: ONE MUST EAT +3997-180294-0000-1800: THE DUKE COMES EVERY MORNING THEY WILL TELL HIM WHEN HE COMES THAT I AM ASLEEP AND PERHAPS HE WILL WAIT UNTIL I (WAKE->AWAKE) +3997-180294-0001-1801: YES BUT IF I SHOULD ALREADY ASK FOR SOMETHING WHAT +3997-180294-0002-1802: WELL DO IT FOR ME FOR I SWEAR TO YOU (THAT I->THY) DON'T LOVE YOU AS THE OTHERS HAVE LOVED YOU +3997-180294-0003-1803: THERE ARE BOLTS (ON->IN) THE DOOR WRETCH +3997-180294-0004-1804: I DON'T KNOW HOW IT IS BUT IT SEEMS TO ME AS IF I DO +3997-180294-0005-1805: NOW GO I CAN'T KEEP MY EYES OPEN +3997-180294-0006-1806: IT (SEEMED->SEEMS) TO ME AS IF THIS SLEEPING CITY (BELONGED->BELONGS) TO ME I SEARCHED MY MEMORY FOR THE NAMES OF THOSE WHOSE HAPPINESS I HAD ONCE ENVIED AND I COULD NOT RECALL ONE WITHOUT FINDING MYSELF THE HAPPIER +3997-180294-0007-1807: EDUCATION FAMILY FEELING THE SENSE OF DUTY THE FAMILY ARE STRONG SENTINELS BUT THERE ARE NO SENTINELS SO VIGILANT AS NOT TO BE DECEIVED BY A GIRL OF SIXTEEN TO WHOM NATURE BY THE VOICE OF THE MAN SHE LOVES GIVES THE FIRST (COUNSELS->COUNSEL) OF LOVE ALL THE MORE (ARDENT->ARDENTS) BECAUSE THEY SEEM SO PURE +3997-180294-0008-1808: THE MORE (A->*) GIRL BELIEVES IN GOODNESS THE MORE EASILY WILL SHE GIVE WAY IF NOT TO HER LOVER AT LEAST TO LOVE FOR (BEING->BE) WITHOUT MISTRUST SHE IS WITHOUT FORCE AND TO WIN HER LOVE (IS->AS) A TRIUMPH THAT CAN BE GAINED BY ANY YOUNG (MAN->MEN) OF FIVE AND TWENTY SEE HOW YOUNG GIRLS ARE WATCHED AND GUARDED +3997-180294-0009-1809: THEN HOW SURELY MUST THEY DESIRE THE WORLD WHICH IS HIDDEN FROM THEM HOW (SURELY->TRULY) MUST THEY FIND IT TEMPTING HOW SURELY MUST THEY (LISTEN->LISTENED) TO THE FIRST VOICE WHICH COMES TO TELL ITS SECRETS THROUGH THEIR BARS AND BLESS THE HAND WHICH (*->HE) IS THE FIRST TO RAISE A CORNER OF THE (MYSTERIOUS->MYSTERY) VEIL +3997-180294-0010-1810: WITH THEM THE BODY HAS WORN OUT THE SOUL THE SENSES (HAVE->HALF) BURNED UP THE HEART DISSIPATION HAS BLUNTED THE FEELINGS +3997-180294-0011-1811: THEY LOVE BY PROFESSION AND NOT BY INSTINCT +3997-180294-0012-1812: WHEN A CREATURE WHO HAS ALL HER PAST TO REPROACH HERSELF WITH IS TAKEN ALL AT ONCE BY A PROFOUND SINCERE IRRESISTIBLE LOVE OF WHICH SHE HAD NEVER FELT HERSELF CAPABLE WHEN SHE HAS CONFESSED HER LOVE HOW ABSOLUTELY THE MAN WHOM SHE LOVES DOMINATES HER +3997-180294-0013-1813: THEY KNOW NOT WHAT PROOF TO GIVE +3997-180294-0014-1814: IN ORDER TO DISTURB THE (LABOURERS->LABORERS) IN THE (FIELD->FIELDS) WAS ONE DAY DEVOURED BY A WOLF BECAUSE THOSE WHOM HE HAD SO OFTEN DECEIVED NO LONGER BELIEVED IN HIS CRIES FOR HELP +3997-180294-0015-1815: (IT->THIS) IS THE SAME WITH THESE UNHAPPY WOMEN WHEN (THEY LOVE->HE LOVED) SERIOUSLY +3997-180294-0016-1816: BUT WHEN THE MAN WHO INSPIRES THIS REDEEMING LOVE IS GREAT ENOUGH IN SOUL TO RECEIVE IT WITHOUT REMEMBERING THE PAST WHEN HE GIVES HIMSELF UP TO IT WHEN IN SHORT HE LOVES AS HE IS LOVED THIS MAN DRAINS AT ONE DRAUGHT ALL EARTHLY EMOTIONS AND AFTER SUCH A LOVE HIS HEART WILL BE CLOSED TO EVERY OTHER +3997-180294-0017-1817: BUT TO RETURN TO THE FIRST DAY OF MY (LIAISON->LEAR SONG) +3997-180294-0018-1818: WHEN I REACHED HOME I WAS IN A STATE OF (MAD->MADGE) GAIETY +3997-180294-0019-1819: THE WOMAN BECOMES THE MAN'S MISTRESS AND LOVES HIM +3997-180294-0020-1820: HOW WHY +3997-180294-0021-1821: MY WHOLE BEING WAS EXALTED INTO JOY AT THE MEMORY OF THE WORDS WE HAD EXCHANGED DURING THAT FIRST NIGHT +3997-180294-0022-1822: HERE ARE MY ORDERS TO NIGHT AT THE VAUDEVILLE +3997-180294-0023-1823: (COME->CALM) DURING THE THIRD (ENTR'ACTE->ENTRACT) +3997-180294-0024-1824: THE BOXES FILLED ONE AFTER ANOTHER +3997-180294-0025-1825: ONLY ONE (REMAINED->REMAINS) EMPTY THE STAGE BOX +3997-180294-0026-1826: AT THE BEGINNING OF THE THIRD ACT I HEARD THE DOOR OF THE BOX ON WHICH MY EYES HAD BEEN ALMOST CONSTANTLY FIXED OPEN AND MARGUERITE APPEARED +3997-180294-0027-1827: (DID->DOES) SHE (LOVE->LOVED) ME ENOUGH TO BELIEVE THAT THE MORE BEAUTIFUL SHE LOOKED THE HAPPIER I SHOULD BE +3997-180294-0028-1828: WHAT IS THE MATTER WITH YOU TO NIGHT SAID MARGUERITE RISING AND COMING TO THE BACK OF THE BOX AND KISSING ME ON THE FOREHEAD +3997-180294-0029-1829: (YOU->HE) SHOULD GO TO BED SHE REPLIED WITH THAT (IRONICAL->IRONIC) AIR WHICH WENT SO WELL WITH HER DELICATE AND WITTY FACE +3997-180294-0030-1830: WHERE AT HOME +3997-180294-0031-1831: YOU STILL LOVE ME CAN YOU ASK +3997-180294-0032-1832: BECAUSE YOU DON'T LIKE SEEING HIM +3997-180294-0033-1833: (NONETHELESS->NONE THE LESS) I WAS VERY UNHAPPY ALL THE REST OF THE EVENING AND WENT AWAY VERY SADLY AFTER HAVING SEEN PRUDENCE THE COUNT AND MARGUERITE GET INTO THE CARRIAGE WHICH WAS (WAITING->WINNING) FOR THEM AT THE DOOR +3997-180297-0000-1834: I HAVE NOT COME TO HINDER YOU FROM LEAVING PARIS +3997-180297-0001-1835: YOU (IN->AND) THE WAY MARGUERITE BUT HOW +3997-180297-0002-1836: WELL YOU MIGHT HAVE HAD A WOMAN HERE SAID PRUDENCE AND IT WOULD HARDLY HAVE BEEN AMUSING FOR HER TO SEE TWO MORE ARRIVE +3997-180297-0003-1837: DURING THIS REMARK MARGUERITE LOOKED AT ME ATTENTIVELY +3997-180297-0004-1838: MY DEAR PRUDENCE I ANSWERED YOU DO NOT KNOW WHAT YOU ARE SAYING +3997-180297-0005-1839: YES BUT BESIDES NOT WISHING TO PUT YOU OUT I WAS SURE THAT IF YOU CAME AS FAR AS MY DOOR YOU WOULD WANT TO COME UP AND AS I COULD NOT LET YOU I DID NOT WISH TO LET YOU GO AWAY BLAMING ME FOR SAYING NO +3997-180297-0006-1840: BECAUSE I AM WATCHED AND THE LEAST SUSPICION MIGHT (DO->TO) ME THE GREATEST HARM +3997-180297-0007-1841: IS THAT REALLY THE ONLY REASON +3997-180297-0008-1842: IF THERE WERE ANY OTHER I WOULD TELL YOU FOR WE ARE NOT TO HAVE ANY SECRETS FROM ONE ANOTHER NOW +3997-180297-0009-1843: (HONESTLY->ON THE SUIT) DO YOU CARE FOR ME A LITTLE A GREAT DEAL +3997-180297-0010-1844: I FANCIED FOR A MOMENT THAT I MIGHT GIVE MYSELF THAT HAPPINESS FOR SIX MONTHS YOU WOULD NOT HAVE IT YOU INSISTED ON KNOWING THE MEANS +3997-180297-0011-1845: WELL GOOD HEAVENS THE MEANS WERE EASY ENOUGH TO GUESS +3997-180297-0012-1846: I LISTENED AND I GAZED AT MARGUERITE WITH ADMIRATION +3997-180297-0013-1847: WHEN (I->THEY) THOUGHT THAT THIS MARVELLOUS CREATURE WHOSE FEET I HAD ONCE LONGED TO KISS WAS WILLING TO LET ME TAKE MY PLACE IN HER THOUGHTS (MY PART->BY PARTS) IN HER LIFE AND THAT I WAS NOT YET CONTENT WITH WHAT SHE GAVE ME I ASKED IF MAN'S DESIRE (HAS->HAD) INDEED LIMITS WHEN SATISFIED AS PROMPTLY AS MINE HAD BEEN IT REACHED AFTER SOMETHING FURTHER +3997-180297-0014-1848: TRULY SHE CONTINUED WE POOR CREATURES OF CHANCE HAVE FANTASTIC (DESIRES->DESIRE) AND INCONCEIVABLE LOVES +3997-180297-0015-1849: WE ARE NOT ALLOWED TO HAVE HEARTS UNDER PENALTY OF BEING HOOTED DOWN AND OF RUINING OUR CREDIT +3997-180297-0016-1850: WE NO LONGER BELONG TO OURSELVES +3997-180297-0017-1851: WE STAND FIRST IN THEIR SELF ESTEEM LAST IN THEIR ESTEEM +3997-180297-0018-1852: NEVER (DO THEY->DID HE) GIVE YOU ADVICE WHICH IS NOT LUCRATIVE +3997-180297-0019-1853: IT MEANS LITTLE ENOUGH TO THEM THAT WE SHOULD HAVE TEN LOVERS EXTRA AS LONG AS THEY GET DRESSES OR A BRACELET OUT OF THEM AND THAT THEY CAN DRIVE (IN OUR CARRIAGE->AND ARE PARISH) FROM TIME TO TIME OR COME TO OUR BOX AT THE (THEATRE->FUTURE) +3997-180297-0020-1854: SUCH A MAN I FOUND IN THE DUKE BUT THE DUKE IS OLD AND (*->THE) OLD AGE NEITHER PROTECTS NOR CONSOLES +3997-180297-0021-1855: I THOUGHT I COULD ACCEPT THE LIFE WHICH HE OFFERED ME (BUT->OR) WHAT WOULD YOU HAVE +3997-180297-0022-1856: WHAT I LOVED IN YOU WAS NOT THE MAN WHO WAS BUT THE MAN WHO WAS GOING TO BE +3997-180297-0023-1857: MARGUERITE (TIRED->HIRED) OUT WITH THIS LONG CONFESSION THREW HERSELF BACK ON THE SOFA AND TO STIFLE A SLIGHT COUGH (PUT->PULL) UP HER HANDKERCHIEF TO HER LIPS AND FROM THAT TO HER EYES +3997-180297-0024-1858: MARGUERITE DO WITH ME AS YOU WILL I AM YOUR SLAVE YOUR DOG BUT IN THE NAME OF HEAVEN TEAR UP THE LETTER WHICH I WROTE TO YOU AND DO NOT MAKE ME LEAVE YOU TO MORROW IT WOULD KILL ME +3997-180297-0025-1859: MARGUERITE DREW THE LETTER FROM HER BOSOM AND HANDING IT TO ME WITH A SMILE OF INFINITE SWEETNESS SAID +3997-180297-0026-1860: HERE IT IS I HAVE BROUGHT IT BACK +3997-180297-0027-1861: I (TORE THE->TOILED A) LETTER INTO FRAGMENTS AND KISSED WITH TEARS THE HAND THAT GAVE IT TO ME +3997-180297-0028-1862: LOOK HERE PRUDENCE DO YOU KNOW WHAT HE WANTS SAID MARGUERITE +3997-180297-0029-1863: HE WANTS YOU TO FORGIVE HIM +3997-180297-0030-1864: ONE HAS (TO->TWO) BUT HE WANTS MORE THAN THAT WHAT THEN +3997-180297-0031-1865: I EMBRACED MARGUERITE UNTIL SHE WAS ALMOST STIFLED +3997-182399-0000-1779: (OL MISTAH->ALL MISTER) BUZZARD GRINNED +3997-182399-0001-1780: THIS SOUNDED LIKE ANOTHER STORY +3997-182399-0002-1781: HE WAS CURIOUS ABOUT THAT BLACK HEADED COUSIN OF (OL MISTAH->ALL MISTER) BUZZARD VERY CURIOUS INDEED +3997-182399-0003-1782: ANYWAY HE WOULD FIND OUT +3997-182399-0004-1783: PLEASE MISTER (BUZZARD->BOZARD) PLEASE TELL US THE STORY HE BEGGED +3997-182399-0005-1784: NOW (OL MISTAH->ALL MISTER) BUZZARD IS NATURALLY GOOD NATURED AND ACCOMMODATING AND WHEN PETER BEGGED SO HARD HE JUST COULDN'T FIND IT IN HIS HEART TO REFUSE +3997-182399-0006-1785: WAY BACK IN THE DAYS WHEN GRANDPAP BUZZARD HAD HIS (LIL->LITTLE) FALLING (OUT->ON) WITH (OL->OLD) KING EAGLE AND DONE (FLY->FLIES) SO HIGH HE (SCO'TCH->SCORCHED) THE FEATHERS (OFFEN->OFF IN) HIS (HAID->HEAD) HE HAD A COUSIN DID (GRANDPAP->GRANDPA) BUZZARD AND THIS COUSIN WAS (JES->JUST) NATURALLY LAZY AND NO COUNT +3997-182399-0007-1786: LIKE MOST NO COUNT PEOPLE HE USED TO MAKE A REGULAR (NUISANCE->NOTIONS) OF HISSELF POKING HIS NOSE INTO (EV'YBODY'S->EVERYBODY'S) BUSINESS AND NEVER TENDING TO HIS OWN +3997-182399-0008-1787: WASN'T ANYTHING GOING ON THAT THIS TRIFLING MEMBER OF THE BUZZARD (FAM'LY->FAMILY) DIDN'T FIND OUT ABOUT (AND->A) MEDDLE IN HE COULD ASK (MO->MORE) QUESTIONS THAN PETER RABBIT CAN (AN->AND) ANYBODY THAT CAN DO THAT HAS GOT TO ASK A LOT +3997-182399-0009-1788: EVERYBODY LOOKED AT PETER AND LAUGHED +3997-182399-0010-1789: SO WE (UNS SIT->UNSTEAD) ON THE CHIMNEY TOPS WHENEVER (OL->OLD) JACK FROST GETS (TO->THE) STRAYING DOWN WHERE HE HAVE NO BUSINESS +3997-182399-0011-1790: ONE DAY THIS NO COUNT TRIFLING COUSIN OF (GRANDPAP->GRANDPA) BUZZARD GET COLD IN HIS FEET +3997-182399-0012-1791: IT WAS ON (A LIL OL->THE LITTLE OLD) HOUSE A (LIL OL->LITTLE OLD) TUMBLE DOWN HOUSE +3997-182399-0013-1792: WHY HE (JES->JUST) STRETCH HIS (FOOL HAID->FULL HEAD) AS FAR DOWN (THAT->THE) CHIMNEY AS HE CAN (AN->AND) LISTEN (AN->AND) LISTEN +3997-182399-0014-1793: BUT HE DON'T MIND THAT +3997-182399-0015-1794: (WILL YO' ALLS->WELL YOU ALL) PLEASE SPEAK A (LIL->LITTLE) LOUDER HE (HOLLER DOWN->HOLLERED ON) THE CHIMNEY (JES->JUST) LIKE THAT +3997-182399-0016-1795: YES (SAH->SAD) SHE (SHO'LY->SURELY YOU) WAS (PLUMB->PLUM) SCARED +3997-182399-0017-1796: THEY (LIKE->LIKED) TO CHOKE THAT NO (COUNT BUZZARD->COMPASSER) TO DEATH +3997-182399-0018-1797: WHEN HE GET HOME HE TRY (AN->AND) TRY TO BRUSH THAT (SOOT->SUIT) OFF BUT IT DONE GET INTO THE SKIN (AN->AND) IT STAY THERE +3997-182399-0019-1798: A LITTLE SIGH OF SATISFACTION WENT (AROUND->ROUND) THE CIRCLE OF LISTENERS +3997-182399-0020-1799: IT WAS JUST AS GOOD AS ONE OF GRANDFATHER (FROG'S->FROGS) +4198-12259-0000-203: DRAW REACH FILL MIX GIVE IT ME WITHOUT WATER +4198-12259-0001-204: SO MY FRIEND SO WHIP ME OFF THIS GLASS NEATLY BRING ME HITHER SOME CLARET A FULL WEEPING GLASS TILL IT RUN OVER +4198-12259-0002-205: A CESSATION AND TRUCE WITH THIRST +4198-12259-0003-206: YOU HAVE (CATCHED->CAST) A COLD GAMMER YEA FORSOOTH SIR +4198-12259-0004-207: BY THE (BELLY->VALLEY) OF (SANCT->SAINT) BUFF LET US TALK OF OUR DRINK I NEVER DRINK (BUT AT->WITHOUT) MY HOURS LIKE THE POPE'S MULE +4198-12259-0005-208: WHICH WAS FIRST (THIRST OR->THOSE) DRINKING +4198-12259-0006-209: WHAT IT SEEMS I DO NOT DRINK BUT (BY->BUY) AN ATTORNEY +4198-12259-0007-210: DRINK ALWAYS AND YOU SHALL NEVER DIE +4198-12259-0008-211: IF I DRINK NOT I AM A GROUND DRY GRAVELLED AND SPENT I AM (STARK->START) DEAD WITHOUT DRINK AND MY SOUL READY TO FLY INTO SOME MARSH (AMONGST->A MONTH'S) FROGS THE SOUL NEVER DWELLS IN A DRY PLACE DROUTH (KILLS->KILL) IT +4198-12259-0009-212: HE DRINKS (IN VAIN->THEIR VEIN) THAT (FEELS->FILLS) NOT THE PLEASURE OF IT +4198-12259-0010-213: IT IS ENOUGH TO BREAK BOTH (GIRDS->GUARDS) AND (PETREL->PETEL) +4198-12259-0011-214: WHAT DIFFERENCE IS THERE BETWEEN A BOTTLE AND A FLAGON +4198-12259-0012-215: BRAVELY AND WELL PLAYED UPON THE WORDS +4198-12259-0013-216: OUR FATHERS DRANK LUSTILY AND EMPTIED (THEIR CANS->THE AKANS) +4198-12259-0014-217: WELL (CACKED->CAGLED) WELL SUNG +4198-12259-0015-218: COME LET US DRINK WILL YOU SEND NOTHING TO THE RIVER +4198-12259-0016-219: I (DRINK->DRANK) NO MORE THAN (A SPONGE->HIS SPINES) +4198-12259-0017-220: I DRINK LIKE A (TEMPLAR KNIGHT->TENT LAWN NIGHT) +4198-12259-0018-221: AND I (TANQUAM SPONSUS->TEN QUALMS BONSES) +4198-12259-0019-222: AND I SICUT (TERRA SINE->TERRACE IN) AQUA +4198-12259-0020-223: GIVE ME A (SYNONYMON->SNYM) FOR A (GAMMON->GAMIN) OF BACON +4198-12259-0021-224: IT IS THE COMPULSORY OF DRINKERS IT IS A (PULLEY->POLY) +4198-12259-0022-225: A LITTLE RAIN (ALLAYS->A LAYS) A GREAT DEAL OF WIND LONG TIPPLING BREAKS (THE->THAT) THUNDER +4198-12259-0023-226: BUT IF THERE CAME SUCH LIQUOR (FROM->FOR) MY (BALLOCK WOULD->BALLAK WILL) YOU NOT WILLINGLY THEREAFTER SUCK THE (UDDER->UTTER) WHENCE IT ISSUED +4198-12259-0024-227: HERE PAGE FILL +4198-12259-0025-228: I APPEAL FROM THIRST AND DISCLAIM ITS (JURISDICTION->JURIS DIXON) +4198-12259-0026-229: I WAS WONT (HERETOFORE->HERE TOFORE) TO DRINK OUT ALL BUT NOW I LEAVE NOTHING +4198-12259-0027-230: (HEYDAY->HEY THEE) HERE (ARE TRIPES->A TRITE) FIT FOR (OUR SPORT->OURSPORT) AND IN EARNEST EXCELLENT (GODEBILLIOS->GO TO BE YOURS) OF THE DUN OX YOU KNOW WITH THE BLACK (STREAK->STREET) +4198-12259-0028-231: (O->OH) FOR GOD'S SAKE LET US (LASH->LAST) THEM SOUNDLY YET (THRIFTILY->DRIFTILY) +4198-12259-0029-232: SPARROWS (WILL->WOULD) NOT EAT UNLESS YOU (BOB->BOBBED) THEM ON THE TAIL NOR CAN I DRINK IF I BE NOT FAIRLY SPOKE TO +4198-12259-0030-233: (HO->OH) THIS (WILL BANG IT SOUNDLY->WAS BENNETT'S ARMY) +4198-12259-0031-234: BUT THIS (SHALL BANISH IT->OUR BANACY) UTTERLY +4198-12259-0032-235: LET US WIND OUR HORNS BY THE SOUND OF FLAGONS AND BOTTLES AND CRY ALOUD THAT WHOEVER HATH LOST HIS THIRST COME (NOT->NIGH) HITHER TO SEEK IT +4198-12259-0033-236: THE GREAT GOD MADE THE PLANETS AND WE MAKE THE PLATTERS NEAT +4198-12259-0034-237: APPETITE COMES WITH EATING SAYS (ANGESTON->ANGERSON) BUT THE (THIRST->DOZ) GOES AWAY WITH DRINKING +4198-12259-0035-238: I HAVE A REMEDY AGAINST THIRST QUITE CONTRARY TO THAT WHICH IS GOOD AGAINST (THE BITING->ABIDING) OF A MAD DOG +4198-12259-0036-239: (WHITE WINE->WHITEWAY) HERE WINE BOYS +4198-12259-0037-240: (O LACHRYMA CHRISTI->OH LACK REMAR CHRISTIE) IT IS OF THE BEST GRAPE +4198-12259-0038-241: (I'FAITH->I FAITH) PURE GREEK GREEK O THE FINE WHITE WINE +4198-12259-0039-242: THERE IS NO ENCHANTMENT NOR CHARM THERE EVERY ONE OF YOU HATH SEEN IT +4198-12259-0040-243: MY (PRENTICESHIP->PRENTICE IT) IS OUT (I AM->I'M) A FREE MAN AT THIS TRADE +4198-12259-0041-244: (I SHOULD SAY->AS YOU SEE) MASTER (PAST->PASS) +4198-12259-0042-245: (O->OH) THE DRINKERS THOSE THAT ARE A DRY (O POOR->OH PORT) THIRSTY SOULS +4198-12259-0043-246: CLEAR OFF NEAT SUPERNACULUM +4198-12281-0000-187: ALTHOUGH THE PLAGUE WAS THERE IN THE MOST PART OF ALL THE HOUSES THEY NEVERTHELESS ENTERED EVERYWHERE THEN PLUNDERED AND CARRIED AWAY ALL THAT WAS WITHIN AND YET FOR ALL THIS NOT ONE OF THEM TOOK ANY HURT WHICH IS A MOST WONDERFUL CASE +4198-12281-0001-188: I BESEECH YOU THINK UPON IT +4198-12281-0002-189: NEVERTHELESS AT ALL (ADVENTURES->VENTURES) THEY RANG THE BELLS (AD CAPITULUM CAPITULANTES->AT CAPITULAM CAPITULAT DAYS) +4198-12281-0003-190: BY THE VIRTUE OF GOD WHY DO NOT YOU SING (PANNIERS->PENNYERS) FAREWELL VINTAGE IS DONE +4198-12281-0004-191: BY THE BELLY OF (SANCT->SAINT) JAMES WHAT SHALL WE POOR DEVILS DRINK THE WHILE +4198-12281-0005-192: LORD GOD (DA MIHI POTUM->THOU ME HE POT EM) +4198-12281-0006-193: LET HIM BE CARRIED TO PRISON FOR TROUBLING THE DIVINE SERVICE +4198-12281-0007-194: WHEREFORE IS IT THAT OUR DEVOTIONS WERE INSTITUTED TO BE SHORT IN THE TIME OF HARVEST AND VINTAGE AND LONG IN THE ADVENT (AND->IN) ALL THE WINTER +4198-12281-0008-195: HARK YOU MY MASTERS YOU THAT LOVE THE WINE (COP'S->COPSE) BODY FOLLOW ME FOR (SANCT ANTHONY->SAINT AUNT ANY) BURN ME AS FREELY AS A FAGGOT (IF->*) THEY GET LEAVE TO TASTE ONE DROP OF THE LIQUOR THAT WILL NOT NOW COME AND FIGHT FOR RELIEF OF THE VINE +4198-12281-0009-196: TO OTHERS AGAIN HE UNJOINTED THE (SPONDYLES->SPAWN MULES) OR KNUCKLES OF THE NECK (DISFIGURED->THIS FIGURED) THEIR CHAPS GASHED THEIR FACES MADE THEIR CHEEKS HANG FLAPPING ON THEIR CHIN AND SO SWINGED AND (BALAMMED->BLAMMED) THEM THAT THEY FELL DOWN BEFORE HIM LIKE HAY BEFORE (A MOWER->HIM OVER) +4198-12281-0010-197: TO SOME (WITH A->WOULD THEY) SMART (SOUSE->SOUS) ON (THE EPIGASTER->THEIR EBERGASTER) HE (WOULD->WILL) MAKE (THEIR MIDRIFF SWAG->THEM MIDRIFTS WAG) THEN REDOUBLING THE BLOW GAVE THEM SUCH A (HOMEPUSH->HOME PUSH) ON THE NAVEL THAT HE MADE THEIR PUDDINGS TO GUSH OUT +4198-12281-0011-198: BELIEVE THAT IT WAS THE MOST HORRIBLE SPECTACLE THAT EVER (ONE->WON) SAW +4198-12281-0012-199: (O->ALL) THE HOLY LADY (NYTOUCH->KNIGHT) SAID ONE THE GOOD (SANCTESS->SANCTIS) O OUR LADY OF (SUCCOURS->SECURS) SAID ANOTHER HELP HELP +4198-12281-0013-200: SOME DIED WITHOUT SPEAKING OTHERS SPOKE WITHOUT DYING SOME DIED IN SPEAKING OTHERS SPOKE (IN->AND) DYING +4198-12281-0014-201: CAN YOU TELL WITH WHAT INSTRUMENTS THEY DID IT +4198-12281-0015-202: IN THE MEANTIME (FRIAR->FRIED) JOHN WITH HIS FORMIDABLE (BATON->BUT TIME) OF THE CROSS GOT TO THE BREACH WHICH THE ENEMIES HAD MADE AND THERE STOOD TO SNATCH UP THOSE THAT ENDEAVOURED TO ESCAPE +4198-61336-0000-247: IT IS SIGNIFICANT TO NOTE IN THIS CONNECTION THAT THE NEW KING WAS AN UNSWERVING ADHERENT OF THE CULT OF (ASHUR->AYESHA) BY THE (ADHERENTS->ADHERENCE) OF WHICH HE WAS PROBABLY STRONGLY SUPPORTED +4198-61336-0001-248: AT THE BEGINNING OF HIS REIGN THERE WAS MUCH SOCIAL DISCONTENT AND SUFFERING +4198-61336-0002-249: WELL MIGHT (SHARDURIS->SHOW DORIS) EXCLAIM IN THE WORDS OF THE PROPHET WHERE IS THE KING OF (ARPAD->ARBED) +4198-61336-0003-250: (TIGLATH PILESER->DICK LAUGHED PLEASURE) HOWEVER CROSSED THE (EUPHRATES->EUPHATEES) AND MOVING NORTHWARD DELIVERED AN UNEXPECTED ATTACK ON THE (URARTIAN->GRACIAN) ARMY (IN QUMMUKH->AND KUMAK) +4198-61336-0004-251: A FIERCE BATTLE ENSUED AND ONE OF (ITS->HIS) DRAMATIC INCIDENTS WAS A SINGLE COMBAT BETWEEN THE RIVAL KINGS +4198-61336-0005-252: AN ATTEMPT WAS MADE TO CAPTURE KING (SHARDURIS->SHORDURUS) WHO (LEAPT->LEAPED) FROM HIS CHARIOT AND MADE HASTY ESCAPE ON HORSEBACK HOTLY PURSUED IN THE GATHERING DARKNESS BY AN ASSYRIAN CONTINGENT OF CAVALRY +4198-61336-0006-253: DESPITE THE (BLOW->BLUE) DEALT AGAINST (URARTU->URITU) ASSYRIA DID NOT IMMEDIATELY REGAIN POSSESSION OF NORTH SYRIA +4198-61336-0007-254: THE SHIFTY (MATI ILU->MAN TO ILIU) EITHER CHERISHED THE HOPE THAT (SHARDURIS->SHALL DORRIS) WOULD RECOVER STRENGTH AND AGAIN (INVADE->IN VAIN) NORTH (SYRIA->ASSYRIA) OR THAT HE MIGHT HIMSELF ESTABLISH AN EMPIRE IN THAT REGION +4198-61336-0008-255: (TIGLATH PILESER->T GLASS BE LEISURE) HAD THEREFORE TO MARCH WESTWARD AGAIN +4198-61336-0009-256: FOR THREE YEARS HE CONDUCTED VIGOROUS CAMPAIGNS IN THE WESTERN LAND WHERE HE MET WITH VIGOROUS RESISTANCE +4198-61336-0010-257: (ARPAD->OUR PAD) WAS CAPTURED AND (MATI ILU->MET TO ILL YOU) DEPOSED AND PROBABLY PUT TO DEATH +4198-61336-0011-258: ONCE AGAIN THE HEBREWS CAME INTO CONTACT WITH (ASSYRIA->THE SYRIA) +4198-61336-0012-259: (ITS FALL MAY->IT'S FOR ME) NOT (HAVE->HAV) BEEN UNCONNECTED WITH THE TREND OF EVENTS IN ASSYRIA DURING THE CLOSING YEARS OF THE MIDDLE EMPIRE +4198-61336-0013-260: (JEHOASH->JO ASH) THE GRANDSON OF (JEHU->JEHOV) HAD ACHIEVED SUCCESSES IN CONFLICT WITH DAMASCUS +4198-61336-0014-261: SIX MONTHS (AFTERWARDS->AFTERWARD) HE WAS ASSASSINATED BY (SHALLUM->CELEM) +4198-61336-0015-262: THIS USURPER HELD SWAY AT SAMARIA FOR ONLY A MONTH +4198-61336-0016-263: NO RESISTANCE WAS POSSIBLE ON THE PART OF (MENAHEM->MANY HIM) THE USURPER (WHO WAS->WHOSE) PROBABLY READY TO WELCOME THE ASSYRIAN CONQUEROR SO THAT BY ARRANGING AN ALLIANCE HE MIGHT SECURE HIS OWN POSITION +4198-61336-0017-264: (TIGLATH PILESER->TAKE LAST PLEASE HER) NEXT OPERATED AGAINST THE (MEDIAN->MEDIUM) AND OTHER HILL TRIBES IN THE (NORTH EAST->NORTHEAST) +4198-61336-0018-265: HE OVERTHREW BUILDINGS DESTROYED ORCHARDS AND TRANSPORTED TO NINEVEH THOSE OF THE INHABITANTS HE HAD NOT PUT TO THE SWORD WITH ALL THE LIVE STOCK HE COULD LAY HANDS ON +4198-61336-0019-266: (THUS->THIS) WAS (URARTU->URA TO) CRIPPLED AND HUMILIATED IT NEVER REGAINED ITS (FORMER->FORM OF) PRESTIGE AMONG THE NORTHERN STATES +4198-61336-0020-267: IN THE FOLLOWING YEAR (TIGLATH PILESER->TIC LAUGH BELLEGER) RETURNED TO SYRIA +4198-61336-0021-268: (MENAHEM->MANY HIM) KING OF ISRAEL HAD DIED AND WAS SUCCEEDED BY HIS SON (PEKAHIAH->PECAH) +4198-61336-0022-269: (JUDAH->JULIA) HAD TAKEN ADVANTAGE OF THE DISTURBED CONDITIONS IN ISRAEL TO ASSERT ITS INDEPENDENCE +4198-61336-0023-270: HE CONDEMNED ISRAEL FOR ITS IDOLATRIES AND CRIED +4198-61336-0024-271: FOR (THUS->THIS) SAITH THE LORD UNTO THE HOUSE OF ISRAEL SEEK YE ME (AND->A) YE (SHALL->TO) LIVE HAVE YE OFFERED UNTO ME SACRIFICES AND OFFERINGS IN THE WILDERNESS FORTY YEARS (O->OR) HOUSE OF ISRAEL +4198-61336-0025-272: THE REMNANT OF THE PHILISTINES SHALL PERISH +4198-61336-0026-273: ISRAEL WAS ALSO DEALT WITH +4198-61336-0027-274: HE SWEPT THROUGH ISRAEL LIKE A HURRICANE +4198-61336-0028-275: THE (PHILISTINES->FURTHER STEAMS) AND THE ARABIANS OF THE DESERT WERE ALSO SUBDUED +4198-61336-0029-276: HE INVADED BABYLONIA +4198-61336-0030-277: (UKINZER->A KINDRED) TOOK REFUGE IN HIS CAPITAL SHAPIA WHICH HELD OUT SUCCESSFULLY ALTHOUGH THE SURROUNDING COUNTRY WAS RAVAGED AND DESPOILED +4294-14317-0000-1866: AS I THOUGHT THAT THIS WAS DUE TO SOME FAULT IN THE EARTH I WANTED TO MAKE THESE FIRST EXPERIMENTS BEFORE (I->AND) UNDERTOOK MY PERSEUS +4294-14317-0001-1867: WHEN I SAW (THAT->*) THIS BUST CAME OUT SHARP AND CLEAN I (SET->SAID) AT ONCE TO CONSTRUCT A LITTLE FURNACE IN THE WORKSHOP ERECTED FOR ME BY THE DUKE AFTER MY OWN PLANS AND DESIGN IN THE HOUSE WHICH THE DUKE HAD GIVEN ME +4294-14317-0002-1868: IT WAS AN EXTREMELY DIFFICULT TASK AND I WAS ANXIOUS TO OBSERVE ALL THE NICETIES OF ART WHICH I HAD LEARNED SO AS NOT TO LAPSE INTO SOME ERROR +4294-14317-0003-1869: I IN MY TURN FEEL THE SAME DESIRE AND HOPE TO PLAY MY PART LIKE THEM THEREFORE MY LORD GIVE ME THE LEAVE TO GO +4294-14317-0004-1870: BUT BEWARE OF LETTING (BANDINELLO->BEND NELLO) QUIT YOU RATHER BESTOW UPON HIM ALWAYS MORE THAN HE DEMANDS FOR IF HE GOES INTO FOREIGN PARTS HIS IGNORANCE IS SO PRESUMPTUOUS THAT HE IS JUST THE MAN TO DISGRACE OUR MOST ILLUSTRIOUS SCHOOL +4294-14317-0005-1871: I (ASK->ASKED) NO FURTHER REWARD FOR MY LABOURS UP TO THIS TIME THAN THE GRACIOUS FAVOUR OF YOUR MOST ILLUSTRIOUS EXCELLENCY +4294-14317-0006-1872: THEN I THANKED HIM AND SAID I HAD NO GREATER DESIRE THAN TO SHOW THOSE ENVIOUS FOLK THAT I HAD IT IN ME TO EXECUTE THE PROMISED WORK +4294-14317-0007-1873: I HAD BETTER LOOK TO MY CONDUCT FOR IT HAD COME TO HIS EARS THAT I RELIED UPON HIS FAVOUR TO TAKE IN FIRST ONE MAN AND THEN ANOTHER +4294-14317-0008-1874: I BEGGED HIS MOST ILLUSTRIOUS EXCELLENCY TO NAME A SINGLE PERSON WHOM I HAD EVER TAKEN IN +4294-14317-0009-1875: I SAID MY LORD I THANK YOU AND BEG YOU TO CONDESCEND SO FAR AS TO LISTEN TO FOUR WORDS IT IS TRUE THAT HE LENT ME A PAIR OF OLD SCALES TWO (ANVILS->AMBILS) AND THREE LITTLE HAMMERS WHICH ARTICLES I BEGGED HIS (WORKMAN GIORGIO DA CORTONA->WORKMEN GEORGIO DECORTUNA) FIFTEEN DAYS AGO TO FETCH BACK +4294-14317-0010-1876: (GIORGIO->GEORGIO) CAME FOR THEM (HIMSELF->HIS HEALTH) +4294-14317-0011-1877: I HOPE TO PROVE ON WHAT ACCOUNT THAT SCOUNDREL TRIES TO BRING ME INTO DISGRACE +4294-14317-0012-1878: WHEN HE HAD HEARD THIS SPEECH THE DUKE ROSE UP IN ANGER AND SENT FOR BERNARDONE WHO WAS FORCED TO TAKE FLIGHT AS FAR AS VENICE HE AND ANTONIO (LANDI->LANDY) WITH HIM +4294-14317-0013-1879: YOU HAD BETTER PUT THIS TO THE PROOF AND I WILL GO AT ONCE TO THE (BARGELLO->BARGENO) +4294-14317-0014-1880: I AM WILLING TO ENTER INTO COMPETITION WITH THE ANCIENTS AND FEEL ABLE TO SURPASS THEM FOR SINCE THOSE EARLY DAYS IN WHICH I MADE THE MEDALS OF POPE CLEMENT I HAVE LEARNED SO MUCH THAT I CAN NOW PRODUCE FAR BETTER PIECES OF THE KIND I THINK I CAN ALSO OUTDO THE COINS I STRUCK FOR DUKE (ALESSANDRO->ALISANDRO) WHICH ARE STILL HELD IN HIGH ESTEEM IN LIKE MANNER I COULD MAKE FOR YOU LARGE PIECES OF GOLD AND SILVER PLATE AS I DID SO OFTEN FOR THAT NOBLE MONARCH KING FRANCIS OF FRANCE THANKS TO THE GREAT CONVENIENCES HE ALLOWED ME WITHOUT EVER LOSING TIME FOR THE EXECUTION OF COLOSSAL STATUES OR OTHER WORKS OF THE (SCULPTORS->SCULPTOR'S) CRAFT +4294-14317-0015-1881: AFTER SEVERAL MONTHS WERE WASTED AND (PIERO->PIERRE) WOULD NEITHER WORK NOR PUT MEN TO WORK UPON THE PIECE I MADE HIM GIVE IT BACK +4294-14317-0016-1882: AMONG ARTISTS CERTAIN (ENRAGED->ENRAGE) SCULPTORS LAUGHED AT ME AND CALLED ME THE NEW SCULPTOR +4294-14317-0017-1883: NOW I HOPE TO SHOW THEM THAT I AM AN OLD SCULPTOR IF GOD SHALL GRANT ME THE BOON OF FINISHING MY PERSEUS FOR THAT NOBLE PIAZZA OF HIS MOST ILLUSTRIOUS EXCELLENCY +4294-14317-0018-1884: HAVING THIS EXCELLENT RESOLVE IN HEART I REACHED MY HOME +4294-32859-0000-1942: WYLDER WAS RATHER SURLY AFTER THE LADIES HAD FLOATED AWAY FROM THE SCENE AND HE DRANK HIS LIQUOR DOGGEDLY +4294-32859-0001-1943: IT WAS HIS FANCY I SUPPOSE TO REVIVE CERTAIN SENTIMENTAL RELATIONS WHICH HAD IT MAY BE ONCE EXISTED BETWEEN HIM AND MISS LAKE AND HE WAS A PERSON OF THAT COMBATIVE TEMPERAMENT THAT MAGNIFIES AN OBJECT IN PROPORTION AS ITS PURSUIT IS THWARTED +4294-32859-0002-1944: THE STORY OF FRIDOLIN AND (RETZCH'S->WRETCHES) PRETTY (OUTLINES->OUTLINE) +4294-32859-0003-1945: SIT DOWN BESIDE ME AND I'LL TELL YOU THE STORY +4294-32859-0004-1946: HE ASSISTED AT IT BUT TOOK NO PART AND IN FACT WAS LISTENING TO THAT OTHER CONVERSATION WHICH SOUNDED WITH ITS PLEASANT GABBLE AND LAUGHTER LIKE A LITTLE MUSICAL TINKLE OF BELLS IN THE DISTANCE +4294-32859-0005-1947: BUT HONEST MARK FORGOT THAT YOUNG LADIES DO NOT ALWAYS COME OUT QUITE ALONE AND JUMP UNASSISTED INTO THEIR VEHICLES +4294-35475-0000-1885: BUT THE MIDDLE (SON->SUN) WAS LITTLE AND LORN HE WAS NEITHER DARK NOR FAIR HE WAS NEITHER HANDSOME NOR STRONG +4294-35475-0001-1886: THROWING HIMSELF ON HIS KNEES BEFORE THE KING HE CRIED (OH->O) ROYAL SIRE BESTOW UPON ME ALSO A SWORD AND A STEED THAT I MAY UP AND AWAY TO FOLLOW MY BRETHREN +4294-35475-0002-1887: BUT THE KING LAUGHED HIM TO SCORN THOU A SWORD HE QUOTH +4294-35475-0003-1888: IN SOOTH THOU SHALT HAVE ONE BUT IT SHALL BE ONE BEFITTING THY MAIDEN SIZE AND COURAGE (IF->IT) SO SMALL A WEAPON CAN BE FOUND IN ALL MY KINGDOM +4294-35475-0004-1889: FORTHWITH THE GRINNING (JESTER->GESTURE) BEGAN SHRIEKING WITH LAUGHTER SO THAT THE BELLS UPON HIS MOTLEY CAP WERE ALL SET A JANGLING +4294-35475-0005-1890: I DID BUT LAUGH TO THINK THE (SWORD->SORT) OF (ETHELRIED->EPLORRIED) HAD BEEN SO QUICKLY FOUND RESPONDED THE JESTER AND HE POINTED TO THE SCISSORS HANGING FROM THE TAILOR'S GIRDLE +4294-35475-0006-1891: ONE NIGHT AS HE LAY IN A DEEP FOREST (TOO->TWO) UNHAPPY TO SLEEP HE HEARD A NOISE NEAR AT HAND IN THE BUSHES +4294-35475-0007-1892: THOU SHALT HAVE THY LIBERTY HE CRIED EVEN THOUGH THOU SHOULDST (REND->RUN) ME IN PIECES THE MOMENT THOU ART FREE +4294-35475-0008-1893: (IT->HE) HAD (*->HID IT) SUDDENLY DISAPPEARED AND IN ITS PLACE STOOD A BEAUTIFUL FAIRY WITH FILMY WINGS WHICH SHONE LIKE RAINBOWS IN THE MOONLIGHT +4294-35475-0009-1894: AT THIS MOMENT THERE WAS A DISTANT RUMBLING AS OF THUNDER TIS THE OGRE CRIED THE FAIRY WE MUST HASTEN +4294-35475-0010-1895: SCISSORS GROW A GIANT'S HEIGHT AND SAVE US FROM THE OGRE'S MIGHT +4294-35475-0011-1896: HE COULD SEE THE OGRE STANDING POWERLESS TO HURT HIM ON THE OTHER SIDE OF THE CHASM AND GNASHING HIS TEETH EACH ONE OF WHICH WAS AS BIG AS A (MILLSTON->MILLSTONE) +4294-35475-0012-1897: THE SIGHT WAS SO TERRIBLE THAT HE TURNED ON HIS HEEL AND FLED AWAY AS FAST AS HIS FEET COULD CARRY HIM +4294-35475-0013-1898: THOU SHALT NOT BE LEFT A PRISONER IN THIS DISMAL SPOT WHILE I HAVE THE POWER TO HELP THEE +4294-35475-0014-1899: HE LIFTED THE SCISSORS AND WITH ONE STROKE DESTROYED THE WEB AND GAVE THE FLY (ITS FREEDOM->TO READ THEM) +4294-35475-0015-1900: A FAINT GLIMMER OF LIGHT ON THE OPPOSITE WALL SHOWS ME THE KEYHOLE +4294-35475-0016-1901: THE PRINCE SPENT ALL THE FOLLOWING TIME UNTIL MIDNIGHT TRYING TO THINK OF A SUITABLE VERSE TO SAY TO THE SCISSORS +4294-35475-0017-1902: AS HE UTTERED THE WORDS THE SCISSORS LEAPED OUT OF HIS HAND AND BEGAN TO CUT THROUGH THE WOODEN SHUTTERS AS EASILY AS THROUGH A CHEESE +4294-35475-0018-1903: IN (A->THE) VERY SHORT TIME THE PRINCE HAD CRAWLED THROUGH THE OPENING +4294-35475-0019-1904: WHILE HE STOOD LOOKING AROUND HIM IN BEWILDERMENT A FIREFLY (ALIGHTED->LIGHTED) ON HIS (ARM->HEART) FLASHING ITS LITTLE LANTERN IN THE PRINCE'S FACE IT CRIED THIS WAY MY FRIEND THE FLY SENT ME TO GUIDE YOU TO A PLACE OF SAFETY +4294-35475-0020-1905: WHAT IS TO BECOME OF ME CRIED THE POOR PEASANT +4294-35475-0021-1906: MY GRAIN MUST FALL (AND->IN) ROT IN THE FIELD FROM (OVERRIPENESS->OVER RIPENESS) BECAUSE I HAVE NOT THE STRENGTH TO RISE AND HARVEST IT THEN INDEED MUST WE ALL STARVE +4294-35475-0022-1907: THE (GRANDAME->GRAND DAME) WHOM HE SUPPLIED WITH FAGOTS THE MERCHANT WHOM HE RESCUED FROM ROBBERS THE KING'S (COUNCILLOR->COUNSELLOR) TO WHOM HE GAVE AID ALL BECAME HIS FRIENDS UP AND DOWN THE LAND (TO->*) BEGGAR OR LORD HOMELESS WANDERER OR HIGH BORN DAME HE GLADLY GAVE UNSELFISH SERVICE ALL UNSOUGHT AND SUCH AS HE HELPED STRAIGHTWAY BECAME HIS FRIENDS +4294-35475-0023-1908: TO HIM WHO COULD BRING HER BACK TO HER FATHER'S CASTLE SHOULD BE GIVEN THE THRONE AND KINGDOM AS WELL AS THE PRINCESS HERSELF SO FROM FAR AND NEAR INDEED FROM ALMOST EVERY COUNTRY UNDER THE SUN CAME KNIGHTS AND PRINCES TO FIGHT THE OGRE +4294-35475-0024-1909: AMONG THOSE WHO DREW BACK WERE (ETHELRIED'S->EPILRIED'S) BROTHERS THE THREE THAT WERE DARK AND THE THREE THAT WERE FAIR +4294-35475-0025-1910: BUT (ETHELRIED HEEDED->ETHELRED HE DID) NOT THEIR TAUNTS +4294-35475-0026-1911: SO THEY ALL CRIED OUT LONG AND LOUD LONG LIVE THE PRINCE PRINCE (CISEAUX->ISAU) +4294-9934-0000-1912: HE FELT (WHAT->WITH) THE EARTH MAY POSSIBLY FEEL AT THE MOMENT WHEN IT IS TORN OPEN WITH THE IRON IN ORDER THAT GRAIN MAY BE DEPOSITED WITHIN IT IT FEELS ONLY THE WOUND THE QUIVER OF THE GERM (AND->*) THE JOY OF THE FRUIT ONLY (ARRIVE->ARRIVES) LATER +4294-9934-0001-1913: HE HAD BUT JUST ACQUIRED A FAITH MUST HE THEN (REJECT IT->REJECTED) ALREADY +4294-9934-0002-1914: HE AFFIRMED TO HIMSELF THAT HE WOULD NOT HE DECLARED TO HIMSELF THAT HE WOULD NOT DOUBT AND HE BEGAN TO DOUBT IN SPITE OF HIMSELF +4294-9934-0003-1915: TO STAND BETWEEN TWO RELIGIONS FROM ONE OF WHICH YOU HAVE NOT AS YET EMERGED AND ANOTHER INTO WHICH YOU HAVE NOT YET ENTERED IS INTOLERABLE AND TWILIGHT IS PLEASING ONLY TO BAT LIKE SOULS +4294-9934-0004-1916: MARIUS WAS CLEAR EYED AND HE REQUIRED THE TRUE LIGHT +4294-9934-0005-1917: THE HALF LIGHTS OF DOUBT PAINED HIM +4294-9934-0006-1918: WHATEVER MAY HAVE BEEN HIS DESIRE TO REMAIN WHERE HE WAS HE COULD NOT (HALT->HELP) THERE HE WAS IRRESISTIBLY CONSTRAINED TO CONTINUE TO ADVANCE TO EXAMINE TO THINK TO MARCH FURTHER +4294-9934-0007-1919: HE FEARED AFTER HAVING TAKEN SO MANY STEPS WHICH HAD BROUGHT HIM NEARER TO HIS FATHER TO NOW TAKE A STEP WHICH SHOULD ESTRANGE HIM FROM THAT FATHER +4294-9934-0008-1920: HIS DISCOMFORT WAS AUGMENTED BY ALL THE REFLECTIONS WHICH OCCURRED TO HIM +4294-9934-0009-1921: IN THE TROUBLED STATE OF HIS CONSCIENCE HE NO LONGER THOUGHT OF CERTAIN SERIOUS SIDES OF EXISTENCE +4294-9934-0010-1922: THEY SOON ELBOWED HIM ABRUPTLY +4294-9934-0011-1923: REQUEST (COURFEYRAC->COURFEREK) TO COME AND TALK WITH ME SAID MARIUS +4294-9934-0012-1924: WHAT IS TO BECOME OF YOU SAID COURFEYRAC +4294-9934-0013-1925: WHAT ARE YOU GOING TO DO I DO NOT KNOW +4294-9934-0014-1926: SILVER GOLD HERE IT IS +4294-9934-0015-1927: YOU WILL THEN HAVE ONLY A PAIR OF TROUSERS A WAISTCOAT A HAT AND A COAT AND MY BOOTS +4294-9934-0016-1928: THAT WILL BE ENOUGH +4294-9934-0017-1929: NO IT IS NOT GOOD WHAT (WILL YOU->WE) DO AFTER THAT +4294-9934-0018-1930: DO YOU KNOW GERMAN NO +4294-9934-0019-1931: IT IS BADLY PAID WORK BUT ONE CAN LIVE BY IT +4294-9934-0020-1932: THE CLOTHES DEALER WAS SENT FOR +4294-9934-0021-1933: HE PAID TWENTY FRANCS FOR THE CAST OFF GARMENTS THEY WENT TO THE (WATCHMAKER'S->WATCHMAKERS) +4294-9934-0022-1934: HE BOUGHT THE WATCH FOR FORTY FIVE FRANCS +4294-9934-0023-1935: HELLO I HAD FORGOTTEN THAT SAID MARIUS +4294-9934-0024-1936: THE LANDLORD PRESENTED HIS BILL WHICH HAD TO BE PAID ON THE SPOT +4294-9934-0025-1937: I HAVE TEN FRANCS LEFT SAID MARIUS +4294-9934-0026-1938: THAT WILL BE SWALLOWING A TONGUE VERY FAST OR A HUNDRED SOUS VERY SLOWLY +4294-9934-0027-1939: ONE MORNING ON HIS RETURN FROM THE (LAW->LAST) SCHOOL MARIUS FOUND A LETTER FROM HIS AUNT AND THE SIXTY (PISTOLES->PISTOL) THAT IS TO SAY SIX HUNDRED FRANCS IN GOLD (IN->AND) A SEALED BOX +4294-9934-0028-1940: MARIUS SENT BACK (THE->FOR) THIRTY LOUIS TO HIS AUNT WITH (A->THE) RESPECTFUL LETTER IN WHICH HE STATED THAT HE HAD SUFFICIENT MEANS OF SUBSISTENCE AND THAT HE SHOULD BE ABLE THENCEFORTH TO SUPPLY ALL HIS NEEDS +4294-9934-0029-1941: AT THAT MOMENT HE HAD THREE FRANCS LEFT +4350-10919-0000-2716: HE PERCEIVED THAT IT WAS NO GOOD TALKING TO THE OLD MAN AND THAT THE PRINCIPAL PERSON IN THE HOUSE WAS THE MOTHER +4350-10919-0001-2717: BEFORE HER HE DECIDED TO SCATTER HIS PEARLS +4350-10919-0002-2718: THE PRINCESS WAS DISTRACTED AND DID NOT KNOW WHAT TO DO SHE FELT SHE HAD SINNED AGAINST KITTY +4350-10919-0003-2719: WELL DOCTOR DECIDE OUR (FATE->PHAETON) SAID THE PRINCESS TELL ME EVERYTHING +4350-10919-0004-2720: IS (THERE->THEIR) HOPE SHE MEANT TO SAY BUT HER LIPS QUIVERED AND SHE COULD NOT UTTER THE QUESTION WELL DOCTOR +4350-10919-0005-2721: AS YOU PLEASE THE PRINCESS WENT OUT WITH A SIGH +4350-10919-0006-2722: THE FAMILY DOCTOR RESPECTFULLY CEASED IN THE MIDDLE OF HIS OBSERVATIONS +4350-10919-0007-2723: AND THERE ARE INDICATIONS (MALNUTRITION->MALTRICIAN) NERVOUS EXCITABILITY AND SO ON +4350-10919-0008-2724: THE QUESTION (STANDS->SENDS) THUS IN PRESENCE OF INDICATIONS OF (TUBERCULOUS->TUBERK AT THIS) PROCESS WHAT IS TO BE DONE TO MAINTAIN NUTRITION +4350-10919-0009-2725: YES (THAT'S AN->I CAN) UNDERSTOOD THING RESPONDED THE CELEBRATED PHYSICIAN AGAIN GLANCING AT HIS WATCH +4350-10919-0010-2726: BEG PARDON IS THE (YAUSKY->YOKE) BRIDGE DONE YET OR SHALL I HAVE TO DRIVE (AROUND->HER ON) +4350-10919-0011-2727: HE ASKED AH IT IS +4350-10919-0012-2728: OH WELL THEN I CAN DO IT IN TWENTY MINUTES +4350-10919-0013-2729: AND (HOW->*) ABOUT (A TOUR->IT TO) ABROAD ASKED THE FAMILY DOCTOR +4350-10919-0014-2730: WHAT IS WANTED IS (*->THE) MEANS OF IMPROVING NUTRITION AND NOT FOR LOWERING IT +4350-10919-0015-2731: THE FAMILY DOCTOR LISTENED ATTENTIVELY AND RESPECTFULLY +4350-10919-0016-2732: BUT IN (FAVOR->FAVOUR) OF FOREIGN TRAVEL I WOULD URGE THE CHANGE OF HABITS THE REMOVAL FROM CONDITIONS CALLING UP REMINISCENCES +4350-10919-0017-2733: AND THEN THE MOTHER WISHES IT HE ADDED +4350-10919-0018-2734: AH WELL (IN->*) THAT (CASE->HAS) TO BE SURE LET THEM GO ONLY THOSE GERMAN (QUACKS->CLACKS) ARE MISCHIEVOUS +4350-10919-0019-2735: OH TIME'S UP ALREADY AND HE WENT TO THE DOOR +4350-10919-0020-2736: THE CELEBRATED DOCTOR ANNOUNCED TO THE PRINCESS A FEELING OF WHAT WAS DUE FROM HIM DICTATED HIS DOING SO THAT HE OUGHT TO SEE THE PATIENT ONCE MORE +4350-10919-0021-2737: (OH->O) NO ONLY A FEW DETAILS PRINCESS COME THIS WAY +4350-10919-0022-2738: AND THE MOTHER ACCOMPANIED BY THE DOCTOR WENT INTO THE DRAWING ROOM TO KITTY +4350-10919-0023-2739: WHEN THE DOCTOR CAME IN SHE FLUSHED CRIMSON AND HER EYES FILLED WITH TEARS +4350-10919-0024-2740: SHE ANSWERED HIM AND ALL AT ONCE GOT UP FURIOUS +4350-10919-0025-2741: EXCUSE ME DOCTOR BUT THERE IS REALLY NO OBJECT IN THIS +4350-10919-0026-2742: THIS IS THE THIRD TIME YOU'VE ASKED ME THE SAME THING +4350-10919-0027-2743: THE CELEBRATED DOCTOR DID NOT TAKE (OFFENSE->OFFENCE) +4350-10919-0028-2744: NERVOUS IRRITABILITY HE SAID TO THE PRINCESS WHEN (KITTY->KATY) HAD LEFT THE ROOM HOWEVER I HAD FINISHED +4350-10919-0029-2745: AND THE DOCTOR BEGAN SCIENTIFICALLY EXPLAINING TO THE PRINCESS AS AN EXCEPTIONALLY INTELLIGENT WOMAN THE CONDITION OF THE YOUNG PRINCESS AND CONCLUDED BY INSISTING ON THE DRINKING OF THE WATERS WHICH WERE CERTAINLY HARMLESS +4350-10919-0030-2746: (AT->BUT) THE QUESTION SHOULD THEY GO ABROAD THE DOCTOR PLUNGED INTO DEEP MEDITATION AS THOUGH RESOLVING A WEIGHTY PROBLEM +4350-10919-0031-2747: FINALLY HIS DECISION WAS PRONOUNCED THEY WERE TO GO ABROAD BUT TO PUT NO FAITH IN FOREIGN QUACKS AND TO APPLY TO HIM IN ANY NEED +4350-10919-0032-2748: IT SEEMED AS THOUGH SOME PIECE OF GOOD FORTUNE HAD COME TO PASS AFTER THE DOCTOR HAD GONE +4350-10919-0033-2749: THE MOTHER WAS MUCH MORE CHEERFUL WHEN SHE WENT BACK TO HER DAUGHTER AND KITTY PRETENDED TO BE MORE CHEERFUL +4350-9170-0000-2750: EDUCATED PEOPLE OF THE UPPER CLASSES ARE TRYING TO STIFLE THE (EVER GROWING->EVERGREWING) SENSE OF THE NECESSITY OF TRANSFORMING THE EXISTING SOCIAL ORDER +4350-9170-0001-2751: (THIS IS->MISSUS) ABSOLUTELY INCORRECT +4350-9170-0002-2752: IN THE SOCIAL CONCEPTION OF LIFE IT IS SUPPOSED THAT SINCE THE AIM OF LIFE IS FOUND IN GROUPS OF INDIVIDUALS INDIVIDUALS WILL VOLUNTARILY SACRIFICE THEIR OWN INTERESTS FOR THE (INTERESTS->INTEREST) OF THE GROUP +4350-9170-0003-2753: THE CHAMPIONS OF THE SOCIAL CONCEPTION OF LIFE USUALLY TRY TO CONNECT THE IDEA OF AUTHORITY THAT IS OF VIOLENCE WITH THE IDEA OF MORAL INFLUENCE BUT THIS CONNECTION IS QUITE IMPOSSIBLE +4350-9170-0004-2754: THE MAN WHO (IS->WAS) CONTROLLED BY MORAL INFLUENCE ACTS IN ACCORDANCE WITH HIS OWN DESIRES +4350-9170-0005-2755: THE BASIS OF AUTHORITY IS BODILY VIOLENCE +4350-9170-0006-2756: THE POSSIBILITY OF APPLYING BODILY VIOLENCE TO PEOPLE IS PROVIDED ABOVE ALL BY AN ORGANIZATION OF ARMED MEN TRAINED TO ACT IN UNISON (IN->AND) SUBMISSION TO ONE WILL +4350-9170-0007-2757: THESE BANDS OF ARMED MEN SUBMISSIVE TO A SINGLE WILL ARE WHAT CONSTITUTE THE ARMY +4350-9170-0008-2758: THE ARMY HAS ALWAYS BEEN AND STILL IS THE BASIS OF POWER +4350-9170-0009-2759: POWER IS ALWAYS IN THE HANDS OF THOSE WHO CONTROL THE ARMY AND ALL MEN IN POWER FROM THE ROMAN CAESARS TO THE RUSSIAN AND GERMAN EMPERORS TAKE MORE INTEREST IN THEIR ARMY THAN IN ANYTHING AND COURT POPULARITY IN THE ARMY KNOWING THAT IF THAT IS ON THEIR SIDE THEIR POWER IS SECURE +4350-9170-0010-2760: INDEED IT COULD NOT BE OTHERWISE +4350-9170-0011-2761: ONLY UNDER THOSE CONDITIONS COULD THE SOCIAL ORGANIZATION BE JUSTIFIED +4350-9170-0012-2762: BUT SINCE THIS IS NOT THE CASE AND ON THE CONTRARY MEN (IN->AND) POWER ARE ALWAYS FAR FROM BEING SAINTS THROUGH THE VERY FACT OF THEIR POSSESSION OF POWER THE SOCIAL ORGANIZATION BASED ON POWER HAS NO JUSTIFICATION +4350-9170-0013-2763: EVEN IF THERE WAS ONCE A TIME WHEN OWING TO THE LOW (STANDARD->STANDARDS) OF MORALS (AND->WHEN) THE DISPOSITION OF MEN TO VIOLENCE THE EXISTENCE OF AN AUTHORITY TO RESTRAIN SUCH VIOLENCE WAS AN ADVANTAGE BECAUSE THE VIOLENCE OF (*->THE) GOVERNMENT WAS LESS THAN THE VIOLENCE OF INDIVIDUALS ONE CANNOT BUT SEE THAT THIS ADVANTAGE COULD NOT BE LASTING +4350-9170-0014-2764: BETWEEN THE MEMBERS OF ONE STATE SUBJECT TO A SINGLE AUTHORITY THE (STRIFE->STRIPE) BETWEEN (*->THE) INDIVIDUALS (SEEMS->SEEMED) STILL LESS AND (THE->A) LIFE OF THE STATE SEEMS EVEN MORE SECURE +4350-9170-0015-2765: IT WAS PRODUCED ON ONE HAND BY THE NATURAL GROWTH OF POPULATION AND ON THE OTHER BY (STRUGGLE AND->STRUGGLING) CONQUEST +4350-9170-0016-2766: AFTER CONQUEST THE POWER OF THE EMPEROR PUTS AN END TO INTERNAL DISSENSIONS AND SO THE STATE CONCEPTION OF LIFE JUSTIFIES ITSELF +4350-9170-0017-2767: BUT THIS JUSTIFICATION IS NEVER MORE THAN TEMPORARY +4350-9170-0018-2768: INTERNAL DISSENSIONS DISAPPEAR ONLY IN PROPORTION TO THE DEGREE OF OPPRESSION EXERTED BY THE AUTHORITY OVER THE (DISSENTIENT->DISINDIAN) INDIVIDUALS +4350-9170-0019-2769: (GOVERNMENT->GOVERNOR) AUTHORITY EVEN IF IT DOES SUPPRESS PRIVATE VIOLENCE ALWAYS INTRODUCES INTO THE LIFE OF MEN FRESH FORMS OF VIOLENCE WHICH TEND TO BECOME GREATER AND GREATER IN PROPORTION TO THE DURATION AND STRENGTH OF THE GOVERNMENT +4350-9170-0020-2770: AND THEREFORE THE OPPRESSION OF THE OPPRESSED ALWAYS GOES ON GROWING UP TO THE FURTHEST LIMIT BEYOND WHICH IT CANNOT GO WITHOUT KILLING THE GOOSE WITH THE (GOLDEN EGGS->GOLD KNIFE'S) +4350-9170-0021-2771: THE MOST CONVINCING EXAMPLE OF THIS IS TO BE FOUND IN THE CONDITION OF THE WORKING CLASSES OF OUR EPOCH WHO ARE IN REALITY NO BETTER THAN THE SLAVES OF ANCIENT (TIMES->TIME) SUBDUED BY CONQUEST +4350-9170-0022-2772: SO IT (HAS->IS) ALWAYS (BEEN->THEN) +4350-9170-0023-2773: FOOTNOTE THE FACT THAT IN AMERICA THE ABUSES OF AUTHORITY (EXIST->EXISTS) IN SPITE OF THE SMALL NUMBER OF THEIR TROOPS NOT ONLY FAILS TO DISPROVE THIS POSITION BUT POSITIVELY CONFIRMS IT +4350-9170-0024-2774: THE UPPER CLASSES KNOW THAT AN ARMY OF FIFTY THOUSAND WILL SOON BE INSUFFICIENT AND NO LONGER RELYING ON (PINKERTON'S->PINKERTIN'S) MEN THEY FEEL THAT THE SECURITY OF THEIR POSITION DEPENDS ON THE INCREASED STRENGTH OF THE ARMY +4350-9170-0025-2775: THE REASON TO WHICH HE GAVE EXPRESSION IS ESSENTIALLY THE SAME AS THAT WHICH MADE THE FRENCH KINGS AND THE POPES ENGAGE SWISS AND SCOTCH GUARDS AND MAKES THE RUSSIAN AUTHORITIES OF TO DAY SO CAREFULLY DISTRIBUTE THE RECRUITS SO THAT THE REGIMENTS FROM THE (FRONTIERS->FRONTIER THEY) ARE STATIONED IN CENTRAL DISTRICTS AND THE REGIMENTS FROM THE (CENTER->CENTRE) ARE STATIONED ON THE FRONTIERS +4350-9170-0026-2776: THE MEANING OF (CAPRIVI'S->CAPRIVY) SPEECH PUT INTO (PLAIN->PLAY AND) LANGUAGE IS THAT FUNDS ARE NEEDED NOT TO RESIST FOREIGN FOES BUT TO BUY UNDER OFFICERS TO BE READY TO ACT AGAINST THE ENSLAVED TOILING MASSES +4350-9170-0027-2777: AND THIS ABNORMAL ORDER OF (THINGS->THANKS) IS MAINTAINED BY THE ARMY +4350-9170-0028-2778: BUT THERE IS NOT ONLY ONE GOVERNMENT THERE ARE OTHER GOVERNMENTS (EXPLOITING->EXPLODING) THEIR SUBJECTS BY (VIOLENCE IN->VIOLENT AND) THE SAME WAY AND (*->ARE) ALWAYS READY TO POUNCE DOWN ON ANY OTHER GOVERNMENT AND CARRY OFF THE FRUITS OF THE TOIL OF ITS (ENSLAVED->ENSLAVE) SUBJECTS +4350-9170-0029-2779: AND SO EVERY GOVERNMENT NEEDS AN ARMY ALSO TO PROTECT ITS BOOTY FROM ITS (NEIGHBOR->NEIGHBOUR) BRIGANDS +4350-9170-0030-2780: THIS INCREASE IS CONTAGIOUS AS MONTESQUIEU POINTED OUT (ONE->A) HUNDRED FIFTY YEARS AGO +4350-9170-0031-2781: EVERY INCREASE IN THE ARMY OF ONE STATE WITH THE AIM OF SELF DEFENSE AGAINST ITS SUBJECTS BECOMES A (SOURCE->SORT) OF DANGER FOR NEIGHBORING STATES AND CALLS FOR A SIMILAR INCREASE IN THEIR ARMIES +4350-9170-0032-2782: THE DESPOTISM OF (A->THE) GOVERNMENT ALWAYS INCREASES WITH THE STRENGTH OF THE ARMY AND ITS EXTERNAL SUCCESSES AND THE AGGRESSIVENESS OF A GOVERNMENT INCREASES WITH ITS INTERNAL DESPOTISM +4350-9170-0033-2783: THE RIVALRY OF THE EUROPEAN STATES (IN->AND) CONSTANTLY INCREASING THEIR FORCES HAS REDUCED THEM TO THE NECESSITY OF HAVING RECOURSE TO UNIVERSAL MILITARY SERVICE SINCE BY THAT MEANS THE GREATEST POSSIBLE NUMBER OF SOLDIERS IS OBTAINED AT THE LEAST POSSIBLE EXPENSE +4350-9170-0034-2784: AND BY THIS MEANS ALL CITIZENS ARE UNDER ARMS TO SUPPORT THE INIQUITIES PRACTICED UPON THEM ALL CITIZENS HAVE BECOME THEIR OWN (OPPRESSORS->IMPRESSORS) +4350-9170-0035-2785: THIS INCONSISTENCY HAS BECOME OBVIOUS (IN->AND) UNIVERSAL MILITARY SERVICE +4350-9170-0036-2786: IN FACT THE WHOLE SIGNIFICANCE OF THE SOCIAL CONCEPTION OF LIFE CONSISTS IN MAN'S RECOGNITION OF THE BARBARITY OF STRIFE BETWEEN INDIVIDUALS AND THE TRANSITORINESS OF PERSONAL LIFE ITSELF AND THE TRANSFERENCE OF THE AIM OF LIFE (TO->THE) GROUPS OF PERSONS +4350-9170-0037-2787: BUT WITH UNIVERSAL MILITARY SERVICE IT COMES TO PASS THAT MEN AFTER MAKING EVERY SACRIFICE TO GET RID OF THE CRUELTY OF STRIFE AND THE INSECURITY OF EXISTENCE ARE CALLED UPON TO FACE ALL THE PERILS THEY HAD MEANT TO AVOID +4350-9170-0038-2788: BUT INSTEAD OF DOING THAT THEY (EXPOSE THE->EXPOSED TO) INDIVIDUALS TO THE SAME NECESSITY OF STRIFE SUBSTITUTING STRIFE WITH INDIVIDUALS OF OTHER STATES FOR STRIFE WITH NEIGHBORS +4350-9170-0039-2789: THE TAXES RAISED FROM THE PEOPLE FOR WAR PREPARATIONS ABSORB THE GREATER PART OF THE PRODUCE OF LABOR WHICH THE ARMY OUGHT TO DEFEND +4350-9170-0040-2790: THE DANGER OF WAR EVER READY TO BREAK OUT RENDERS ALL REFORMS OF LIFE SOCIAL LIFE VAIN AND FRUITLESS +4350-9170-0041-2791: BUT THE FATAL SIGNIFICANCE OF UNIVERSAL MILITARY SERVICE AS THE MANIFESTATION OF THE CONTRADICTION INHERENT IN THE SOCIAL CONCEPTION OF LIFE IS NOT ONLY APPARENT IN THAT +4350-9170-0042-2792: (GOVERNMENTS->GOVERNMENT) ASSERT THAT ARMIES ARE NEEDED ABOVE ALL FOR EXTERNAL DEFENSE BUT THAT IS NOT TRUE +4350-9170-0043-2793: (THEY ARE->THERE) NEEDED PRINCIPALLY AGAINST THEIR SUBJECTS AND EVERY MAN UNDER UNIVERSAL MILITARY SERVICE BECOMES AN ACCOMPLICE (IN->AND) ALL (THE->THAT) ACTS OF VIOLENCE OF THE GOVERNMENT AGAINST THE CITIZENS WITHOUT ANY CHOICE OF HIS OWN +4350-9170-0044-2794: AND FOR THE SAKE OF WHAT AM I MAKING THEM +4350-9170-0045-2795: I (AM EXPECTED->UNEXPECTED) FOR THE SAKE OF (THE->A) STATE TO MAKE THESE SACRIFICES TO RENOUNCE EVERYTHING THAT CAN BE PRECIOUS TO MAN PEACE FAMILY SECURITY AND HUMAN DIGNITY +4350-9170-0046-2796: EXCEPT FOR THE STATE THEY SAY WE SHOULD BE EXPOSED TO THE ATTACKS OF EVIL DISPOSED PERSONS IN OUR OWN COUNTRY +4350-9170-0047-2797: WE (KNOW->*) NOW (*->KNOW) THAT THREATS AND PUNISHMENTS CANNOT DIMINISH THEIR NUMBER THAT THAT CAN ONLY BE DONE BY CHANGE OF ENVIRONMENT AND MORAL INFLUENCE +4350-9170-0048-2798: SO THAT (THE->THIS) JUSTIFICATION OF STATE VIOLENCE ON THE GROUND OF THE PROTECTION IT GIVES US FROM EVIL (DISPOSED->DISPOS) PERSONS EVEN IF (IT->I) HAD SOME FOUNDATION THREE OR FOUR CENTURIES AGO HAS NONE WHATEVER NOW +4350-9170-0049-2799: EXCEPT FOR THE STATE THEY TELL US WE SHOULD NOT HAVE ANY RELIGION EDUCATION CULTURE MEANS OF COMMUNICATION AND SO ON +4350-9170-0050-2800: WITHOUT THE STATE MEN WOULD NOT HAVE BEEN ABLE TO FORM THE SOCIAL INSTITUTIONS NEEDED FOR DOING (ANY THING->ANYTHING) +4350-9170-0051-2801: THIS ARGUMENT TOO WAS WELL FOUNDED ONLY SOME CENTURIES AGO +4350-9170-0052-2802: THE GREAT EXTENSION OF MEANS OF COMMUNICATION AND INTERCHANGE OF IDEAS HAS MADE MEN COMPLETELY ABLE TO DISPENSE WITH STATE AID IN FORMING SOCIETIES ASSOCIATIONS CORPORATIONS AND CONGRESSES FOR SCIENTIFIC ECONOMIC AND POLITICAL OBJECTS +4350-9170-0053-2803: WITHOUT GOVERNMENTS NATIONS WOULD BE ENSLAVED BY THEIR NEIGHBORS +4350-9170-0054-2804: THE GOVERNMENT THEY TELL US WITH ITS ARMY IS NECESSARY TO DEFEND US FROM NEIGHBORING STATES WHO MIGHT ENSLAVE US +4350-9170-0055-2805: AND IF (DEFENSE->DEFENCE) AGAINST BARBAROUS NATIONS IS MEANT ONE THOUSANDTH PART OF THE TROOPS NOW UNDER ARMS WOULD BE AMPLY SUFFICIENT FOR THAT PURPOSE +4350-9170-0056-2806: THE POWER OF THE STATE FAR FROM BEING A SECURITY AGAINST THE ATTACKS OF OUR NEIGHBORS EXPOSES US ON THE CONTRARY TO MUCH GREATER DANGER OF SUCH ATTACKS +4350-9170-0057-2807: EVEN LOOKING AT IT PRACTICALLY WEIGHING THAT IS TO SAY ALL THE BURDENS LAID ON HIM BY THE (STATE->STATES) NO MAN CAN FAIL TO SEE THAT FOR HIM PERSONALLY TO COMPLY WITH (*->THE) STATE DEMANDS AND SERVE IN THE ARMY WOULD IN THE MAJORITY OF CASES BE MORE DISADVANTAGEOUS THAN TO REFUSE TO DO SO +4350-9170-0058-2808: TO RESIST WOULD NEED INDEPENDENT THOUGHT AND EFFORT OF WHICH EVERY MAN IS NOT CAPABLE +4350-9170-0059-2809: SO MUCH FOR THE ADVANTAGES (AND->OF) DISADVANTAGES OF BOTH LINES OF CONDUCT FOR A MAN OF THE WEALTHY (CLASSES AN->CLASS AND) OPPRESSOR +4350-9170-0060-2810: FOR A MAN OF THE POOR WORKING CLASS THE ADVANTAGES AND DISADVANTAGES WILL BE THE SAME BUT WITH A GREAT INCREASE OF DISADVANTAGES +4852-28311-0000-2098: SAY YOU KNOW (SUMTHIN->SOMETHING) +4852-28311-0001-2099: CHRIS LOOKED FROM (A NICKEL PLATED FLASHLIGHT->MENDICULATED FLASH LIKE) TO A CAR JACK AND SPARK PLUG +4852-28311-0002-2100: (KNOW WHO->NO ONE) NEEDS A JOB (BAD->BAN) THAT'S (JAKEY->JAKIE) HARRIS +4852-28311-0003-2101: O K HE SAID +4852-28311-0004-2102: ONLY WHY DIDN'T YOU ASK HIM YOURSELF +4852-28311-0005-2103: MIKE BECAME UNEASY AND FISHED (AN ELASTIC->IT MOLASTIC) BAND OUT OF HIS POCKET MADE A FLICK OF PAPER AND SENT IT SOARING OUT (INTO M->IN EM) STREET +4852-28311-0006-2104: WELL HE ADMITTED I DID +4852-28311-0007-2105: CHRIS ASKED (AND->HIM) FOR THE FIRST TIME THAT DAY THE HEAVY WEIGHT HE CARRIED WITHIN HIM LIFTED AND LIGHTENED A LITTLE +4852-28311-0008-2106: (THINK HE->THINKING) REALLY NEEDS IT HE PURSUED +4852-28311-0009-2107: HE WOULD HAVE LIKED TO GET THE JOB FOR (JAKEY->JAKIE) WHO NEEDED IT BUT SOMEHOW THE TASK OF FACING MISTER WICKER ESPECIALLY NOW THAT THE LIGHT WAS GOING AND DUSK (EDGING->EDGED) INTO THE STREETS WAS NOT WHAT (CHRIS HAD->CHRISTEN) INTENDED FOR ENDING THE AFTERNOON +4852-28311-0010-2108: MIKE'S EXPRESSION CHANGED AT (ONCE->ONE'S) TO ONE OF TRIUMPH BUT (CHRIS->BRUCE) WAS ONLY (PARTLY->PARTIALLY) ENCOURAGED +4852-28311-0011-2109: (BETCHA AREN'T->BETTER AND) GOIN AFTER ALL (CHRIS->THIS) TURNED (*->TO) ON HIM +4852-28311-0012-2110: MIKE WAS STANDING ON THE CORNER +4852-28311-0013-2111: (AW SHUCKS->AH SHOCKS) +4852-28311-0014-2112: CHRIS STARTED OFF ONCE MORE PASSING (THE->A) BLEAK LITTLE VICTORIAN CHURCH PERCHED ON THE HILL ABOVE MISTER WICKER'S HOUSE +4852-28311-0015-2113: AN EMPTY LOT CUT (*->IN) INTO BY CHURCH LANE GAVE A LOOK OF ISOLATION TO THE (L->ALE) SHAPED BRICK BUILDING THAT SERVED MISTER (WICKER AS->WICKER'S) BOTH HOUSE AND PLACE OF BUSINESS +4852-28311-0016-2114: (THE->NO) LONGER (WING->WINGED) TOWARD THE BACK (HAD->GOT) A BACK DOOR THAT OPENED (ONTO->ON A) WATER STREET THE SPACE BETWEEN THE HOUSE AND WISCONSIN (AVENUE->AVIGUE) HAD BEEN MADE INTO A NEAT OBLONG FLOWER GARDEN FENCED OFF FROM THE SIDEWALK BY BOX (SHRUBS->SHRUGS) AND (A->THE) WHITE PICKET FENCE +4852-28311-0017-2115: A LIVID YELLOW STAINED THE HORIZON BEYOND THE FACTORIES (AND GRAY->IN GLAY) CLOUDS LOWERED AND TUMBLED ABOVE +4852-28311-0018-2116: THE AIR WAS GROWING CHILL AND (CHRIS->CHRIST) DECIDED TO FINISH (HIS->THE) JOB +4852-28311-0019-2117: ALL AT ONCE (HE->YOU) WONDERED HOW HIS MOTHER WAS AND EVERYTHING IN HIM PINCHED AND TIGHTENED ITSELF +4852-28311-0020-2118: AT THE FOOT OF THE HILL HE REACHED THE HOUSE +4852-28311-0021-2119: THERE WERE THREE THINGS THAT ALWAYS CAUGHT HIS EYE AMID THE LITTER OF DUSTY PIECES +4852-28311-0022-2120: ON THE LEFT THE COIL OF ROPE IN THE CENTER THE MODEL OF A SAILING SHIP IN A GREEN GLASS BOTTLE AND ON THE RIGHT THE WOODEN STATUE OF A NEGRO BOY (IN->AND) BAGGY TROUSERS TURKISH JACKET AND WHITE TURBAN +4852-28311-0023-2121: BUT THE NAME STILL SHOWED AT THE PROW AND MANY A TIME CHRIS SAFE AT HOME IN BED HAD SAILED IMAGINARY VOYAGES IN THE MIRABELLE +4852-28311-0024-2122: (HE HAD->HE'D) NEVER SEEN (ANYONE->ANY ONE) GO INTO MISTER (WICKER'S->HOOKER'S) SHOP NOW HE THOUGHT OF IT +4852-28311-0025-2123: HOW THEN DID HE (*->TO) LIVE AND WHAT DID HE EVER SELL +4852-28311-0026-2124: A SUDDEN CAR HORN (WOKE HIM->WALKING) FROM (HIS->THIS) DREAM +4852-28312-0000-2125: OF THE MANY TIMES HE HAD EXAMINED MISTER WICKER'S WINDOW AND (PORED->POURED) OVER THE ROPE THE SHIP AND THE NUBIAN BOY HE HAD NEVER GONE INTO MISTER (WICKER'S->ROOKER'S) SHOP +4852-28312-0001-2126: SO NOW ALONE UNTIL (SOMEONE->SOME ONE) SHOULD ANSWER THE BELL (HE->THEY) LOOKED EAGERLY IF UNEASILY AROUND HIM +4852-28312-0002-2127: WHAT WITH THE ONE WINDOW AND THE LOWERING DAY OUTSIDE THE LONG NARROW SHOP WAS (SOMBER->SOMBRE) +4852-28312-0003-2128: HEAVY HAND (HEWN->YOU AND) BEAMS CROSSED IT FROM ONE SIDE TO THE OTHER +4852-28312-0004-2129: MISTER (WICKER'S->OCCUR'S) BACK BEING TOWARD THE SOURCE OF LIGHT CHRIS COULD NOT SEE HIS FACE +4852-28312-0005-2130: THE DOUBLE FANS OF MINUTE WRINKLES BREAKING FROM EYE (CORNER TO->CORNERED A) TEMPLE (AND JOINING->ENJOINING) WITH THOSE OVER THE (CHEEKBONES->SHEEP BONES) WERE DRAWN INTO THE HORIZONTAL LINES ACROSS THE DOMED FOREHEAD +4852-28312-0006-2131: LITTLE TUFTS OF WHITE (FUZZ->FUZ) ABOVE THE EARS WERE ALL THAT REMAINED OF THE ANTIQUARIAN'S HAIR BUT WHAT DREW AND HELD CHRIS'S GAZE WERE THE OLD MAN'S EYES +4852-28312-0007-2132: (CHRIS->CRIS) BLINKED AND LOOKED AGAIN YES THEY WERE STILL THERE +4852-28312-0008-2133: CHRIS SWALLOWED AND HIS VOICE CAME BACK TO HIM +4852-28312-0009-2134: YES SIR HE SAID +4852-28312-0010-2135: I SAW YOUR SIGN AND I KNOW A BOY WHO NEEDS THE JOB +4852-28312-0011-2136: HE'S A SCHOOLMATE OF MINE +4852-28312-0012-2137: (JAKEY HARRIS HIS->J HARRIS'S) NAME (IS AND->ISN'T) HE REALLY NEEDS THE JOB +4852-28312-0013-2138: I I JUST WONDERED IF THE PLACE WAS STILL OPEN +4852-28312-0014-2139: WHAT HE SAW WAS A FRESH CHEEKED LAD TALL FOR THIRTEEN STURDY WITH SINCERITY AND GOOD (HUMOR->HUMOUR) IN HIS FACE AND SOMETHING (SENSITIVE->SCENTED) AND APPEALING ABOUT HIS EYES +4852-28312-0015-2140: HE GUESSED THERE (*->IT) MUST BE A LIVELY FIRE IN THAT (ROOM->RUM) BEYOND +4852-28312-0016-2141: WOULD THAT INTERFERE WITH (JAKEY'S->JAKIE GIGS) GETTING THE JOB SIR +4852-28312-0017-2142: BUT EVEN AS HE SLOWLY TURNED THE THOUGHT PIERCED HIS MIND WHY (HAD HE->DO YOU) NOT (SEEN->SEE) THE REFLECTION OF THE (HEADLIGHTS->HEAD LIGHTS) OF THE CARS MOVING UP AROUND THE CORNER OF (WATER STREET AND UP->WALL AT HER STREET NOT) THE HILL TOWARD THE (TRAFFIC->EFFIC) SIGNALS +4852-28312-0018-2143: THE (ROOM->ROOMS) SEEMED OVERLY STILL +4852-28312-0019-2144: THEN IN THAT SECOND HE TURNED AND FACED ABOUT +4852-28312-0020-2145: THE WIDE BOW WINDOW WAS THERE BEFORE HIM THE THREE OBJECTS HE LIKED BEST SHOWING FROSTY IN THE MOONLIGHT THAT POURED IN FROM ACROSS THE WATER +4852-28312-0021-2146: ACROSS THE WATER WHERE WAS THE (FREEWAY->FREE WAY) +4852-28312-0022-2147: IT WAS NO LONGER THERE NOR WERE THE HIGH WALLS AND SMOKESTACKS OF FACTORIES TO BE SEEN +4852-28312-0023-2148: THE WAREHOUSES WERE STILL THERE +4852-28312-0024-2149: (FLABBERGASTED AND->FLABRA GASTED IN) BREATHLESS CHRIS WAS UNAWARE THAT HE HAD MOVED CLOSER TO PEER OUT THE WINDOW IN EVERY DIRECTION +4852-28312-0025-2150: NO ELECTRIC SIGNS NO LAMPLIT STREETS +4852-28312-0026-2151: WHERE THE PEOPLE'S (DRUGSTORE->DRUG STORE) HAD STOOD BUT (A->*) HALF (*->AN) HOUR BEFORE ROSE THE ROOFS OF WHAT WAS EVIDENTLY AN INN +4852-28312-0027-2152: A COURTYARD WAS (SPARSELY->FIRSTLY) LIT BY A FLARING (TORCH OR->TORTURE) TWO SHOWING (A->THE) SWINGING SIGN HUNG ON (A->THE) POST +4852-28312-0028-2153: THE (POST WAS->POSTS) PLANTED AT THE EDGE OF (WHAT->IT) WAS NOW A BROAD AND MUDDY ROAD +4852-28312-0029-2154: A COACH (WITH ITS TOP->WHICH HAD STOPPED) PILED HIGH WITH (LUGGAGE->LEGGED) STAMPED (TO A->O) HALT BESIDE THE FLAGGED COURTYARD +4852-28312-0030-2155: THEY MOVED INTO THE INN THE COACH RATTLED OFF TO THE STABLE +4852-28312-0031-2156: (MY->BY) WINDOW (HAS->AS) A POWER FOR THOSE FEW WHO ARE TO SEE +4852-28319-0000-2070: THE LEARNING (OF->AND) MAGIC WAS BY NO MEANS EASY +4852-28319-0001-2071: HE HAD TOLD HIS MASTER AT ONCE (ABOUT->HE GOT) SIMON GOSLER HIS (HORDE->HOARD) OF MONEY AND HIS HIDING PLACES FOR IT +4852-28319-0002-2072: CHRIS THEREFORE THREW HIMSELF (INTO->AND) ALL THE PRELIMINARIES OF HIS TASK +4852-28319-0003-2073: ONE AFTERNOON WHEN HE (*->HAD) RETURNED AFTER A REST TO MISTER WICKER'S STUDY HE SAW THAT THERE WAS SOMETHING NEW IN THE ROOM A BOWL WITH A (GOLDFISH->GOLD FISH) IN IT STOOD ON THE TABLE BUT MISTER WICKER WAS NOT TO BE SEEN +4852-28319-0004-2074: WHAT (SHALL->SHOULD ALL) I DO FIRST +4852-28319-0005-2075: HOW (YOU HAVE IMPROVED->OFTEN PROVED) MY BOY (HE->IT) EXCLAIMED (IT->*) IS NOW TIME FOR YOU TO TRY (AND THIS->MISSUS) IS (AS->*) GOOD A CHANGE (AS->IS) ANY +4852-28319-0006-2076: SUPPOSE (I->A) CHANGE AND CAN'T (CHANGE->CHANCE) BACK +4852-28319-0007-2077: MISTER WICKER WAITED PATIENTLY BESIDE HIM FOR A FEW MOMENTS FOR CHRIS TO GET UP HIS COURAGE +4852-28319-0008-2078: (THEN AS->THAT IS) NOTHING HAPPENED WITH A VOICE LIKE A WHIP MISTER WICKER SAID START AT ONCE +4852-28319-0009-2079: THE SENSATION SPREAD FASTER AND FASTER +4852-28319-0010-2080: HIS HEAD SWAM AND HE FELT FAINT (AND->IN) A LITTLE SICK BUT HE PERSISTED THROUGH THE FINAL WORDS +4852-28319-0011-2081: HE THOUGHT NOT WITHOUT A FEELING OF PRIDE AND COMMENCED (*->THE) EXPERIMENTING WITH HIS TAIL AND FINS WITH SUCH ENTHUSIASM AND DELIGHT THAT SOME LITTLE TIME ELAPSED BEFORE MISTER WICKER'S VOICE BOOMED CLOSE BY +4852-28319-0012-2082: SEVENTY FOUR BOOK ONE THE RETURN +4852-28319-0013-2083: THE (FIGURE'S->FIGURES) SHOES CARVED IN SOME EASTERN STYLE HAD CURVED UP POINTING TOES +4852-28319-0014-2084: THEN ALL AT ONCE THE IDEA CAME TO CHRIS +4852-28319-0015-2085: IF HE WAS TO BE A MAGICIAN COULD HE MAKE THIS BOY COME TO LIFE +4852-28319-0016-2086: (HE->IT) SQUATTED ON HIS HAUNCHES (EXAMINING->EXAMINED) THE CARVED WOODEN FIGURE ATTENTIVELY AND FELT CONVINCED THAT ONCE ALIVE THE BOY WOULD BE AN IDEAL AND HAPPY COMPANION +4852-28319-0017-2087: BUT HOW DID ONE (*->A) CHANGE INANIMATE TO ANIMATE +4852-28319-0018-2088: (CHRIS->GRIS) GOT UP AND STOLE BACK TO MISTER WICKER'S DOOR +4852-28319-0019-2089: HE HEARD (THE->THAT) MAGICIAN GOING UP THE SPIRAL STAIRCASE TO HIS ROOM ABOVE AND AFTER CHANGING HIMSELF TO A MOUSE TO SLIP UNDER THE DOOR AND SEE THAT THE ROOM WAS REALLY EMPTY (CHRIS RESUMED HIS->MISTER JUNE'S) PROPER SHAPE AND OPENED THE DOORS OF THE CUPBOARD AT THE FAR END OF THE ROOM +4852-28319-0020-2090: THE AFTERNOON (RAINY->RAINING) BEFORE INCREASED IN STORM +4852-28319-0021-2091: (DUSK CAME->THUS GAINED) TWO HOURS BEFORE ITS TIME THUNDER (SNARLED->SNARLS) IN THE SKY +4852-28319-0022-2092: CERTAIN ELEMENTS WERE TO BE MIXED AND POURED AT THE PROPER TIME +4852-28319-0023-2093: MISTER WICKER BEGAN MOVING ABOUT UPSTAIRS THE (FLOORBOARDS->FOREBOARDS) CREAKED AND STILL CHRIS COULD NOT LEAVE UNTIL THE (POTION->FORTUNE) FUMED AND GLOWED +4852-28319-0024-2094: WITH INFINITE CAUTION CHRIS CLOSED THE DOOR SILENTLY BEHIND HIM AND RUNNING LIGHTLY FORWARD REACHED THE FIGURE (OF->AT) THE NEGRO BOY +4852-28319-0025-2095: IT WAS AS IF THE STIFFNESS MELTED +4852-28319-0026-2096: UNDER HIS EYES (THE WOODEN FOLDS->WELLS) OF CLOTH BECAME RICH SILK EMBROIDERY GLEAMED IN ITS REALITY UPON THE COAT AND OH THE FACE +4852-28319-0027-2097: THE WOODEN GRIN LOOSENED THE LARGE EYES TURNED THE HAND HOLDING THE HARD BOUQUET OF CARVED FLOWERS MOVED (AND LET->*) THE BOUQUET FALL +4852-28330-0000-2044: THEY WENT DOWN TO THEIR QUARTERS FIRST +4852-28330-0001-2045: GUESS MISTER FINNEY WENT TO HIS QUARTERS I DON'T REMEMBER SEEING HIM CROSS THE DECK OR COME OVER THAT WAY AT ALL +4852-28330-0002-2046: NEXT NED CILLEY WAS RELIEVED (AT->TO) THE HELM BY (ELBERT->ELBER) JONES WHO TOOK OVER NED WENT ON DOWN +4852-28330-0003-2047: IT LOOKS TO ME AS IF IT COULD (HAVE BEEN->BIT OF IN) ONE OF SEVERAL PEOPLE AND I'LL BE SWITCHED IF I KNOW WHO I'LL KEEP MY EYES (OPEN->UP AND) +4852-28330-0004-2048: THE MIRABELLE WAS (NEARING TAHITI->NEAR INDEEDY) +4852-28330-0005-2049: (WE'VE WATER->REVOLTA) AND FRESH STORES TO TAKE ON THERE +4852-28330-0006-2050: CHRIS LOST NO TIME AS SOON AS HE COULD DO IT WITHOUT BEING NOTICED (IN->AND) HURRYING DOWN TO HIS CABIN +4852-28330-0007-2051: CERTAINLY MY BOY BOOMED OUT THE CAPTAIN (HIS->AS) BLUE EYES ABRUPTLY KEEN AND PENETRATING +4852-28330-0008-2052: MISTER FINNEY (WILL->WOULD) BE SOME TIME ON DECK WE CANNOT BE (OVERHEARD IN->OWNED HEARD AND) HERE +4852-28330-0009-2053: HIS FACE (FROZE->ROSE) WITH NERVOUSNESS THAT THIS MIGHT (*->DO) NOT DO AS AN ANSWER AND HE STOOD STIFF AND STILL BEFORE CAPTAIN BLIZZARD +4852-28330-0010-2054: THE CAPTAIN SAT FORWARD IN HIS CHAIR LOOKING AT HIM FOR A LONG MOMENT CONSIDERING +4852-28330-0011-2055: THEN HE SAID WELL I DO NOT CARE FOR IT I CANNOT SAY (I->THAT) DO +4852-28330-0012-2056: (THIS->THE) SHIP IS MORE TO ME THAN (WIFE OR->MY FULL) MOTHER OR FAMILY +4852-28330-0013-2057: (HE->AND) PAUSED FINGERING HIS LOWER LIP AND LOOKING SIDEWAYS IN A REFLECTIVE FASHION AT (CHRIS->CRIS) STANDING BEFORE HIM +4852-28330-0014-2058: WE SHALL SAY NO MORE BUT I TRUST YOU UNDERSTAND THE RESPONSIBILITY YOU HAVE +4852-28330-0015-2059: (THIS->THE) SHIP ITS CARGO (AND->IN) ITS MEN WILL BE IN YOUR HANDS +4852-28330-0016-2060: YES SIR I THINK I CAN DO IT SAFELY OR I SHOULD NOT TRY SIR +4852-28330-0017-2061: CAPTAIN BLIZZARD'S ROUND PINK (FACE->FACED) CREASED IN (HIS->ITS) WINNING SMILE +4852-28330-0018-2062: HE THEN WENT ON TO DESCRIBE WHAT ELSE WAS TO FOLLOW THE COVERING OF THE SHIP WITH LEAVES TO MAKE IT BLEND WITH ITS SURROUNDINGS +4852-28330-0019-2063: (*->THE) CAMOUFLAGE WAS NOT A WORD THE CAPTAIN OR (ANYONE->ANY ONE) ELSE OF HIS TIME (YET->HE HAD) UNDERSTOOD +4852-28330-0020-2064: WHAT CAN BE SAID DURING THAT TIME SIR CHRIS THOUGHT TO ASK +4852-28330-0021-2065: I AM SOMEWHAT SKILLED IN (MEDICAMENTS->MEDICMENTS) I HAVE TO BE AS (*->A) CAPTAIN OF (A->*) SHIP AND THE CREW KNOW IT +4852-28330-0022-2066: I SHALL SAY THAT YOU ARE IN MY OWN CABIN SO THAT I CAN CARE FOR YOU +4852-28330-0023-2067: NOT SINCE HE HAD LEFT MISTER WICKER (HAD->AND) CHRIS FELT SUCH CONFIDENCE AS HE DID IN THE WORDS AND ACTIONS OF CAPTAIN BLIZZARD +4852-28330-0024-2068: HE KNEW NOW THAT HIS ABSENCE FOR AS LONG AS HE HAD (*->HAD) TO BE AWAY WOULD BE COVERED UP (AND->IN) SATISFACTORILY ACCOUNTED FOR +4852-28330-0025-2069: THEIR CONVERSATION HAD TAKEN SOME LITTLE WHILE +533-1066-0000-796: (WHEN->ONE) CHURCHYARDS YAWN +533-1066-0001-797: I KNEW WELL ENOUGH THAT HE MIGHT BE CARRIED (THOUSANDS->THOUSAND) OF MILES (IN THE->INTO) BOX CAR LOCKED IN PERHAPS WITHOUT WATER OR (FOOD->FOOT) +533-1066-0002-798: I AM SURE I KISSED (LIDDY->LADY) AND (I HAVE->I'VE) HAD TERRIBLE MOMENTS SINCE WHEN I (SEEM->SEEMED) TO REMEMBER KISSING MISTER JAMIESON TOO IN THE EXCITEMENT +533-1066-0003-799: FORTUNATELY WARNER (AND->ON) THE (DETECTIVES->DETECTIVE) WERE KEEPING BACHELOR (HALL IN THE->HOLLAND) LODGE +533-1066-0004-800: OUT OF DEFERENCE TO (LIDDY->LADY) THEY WASHED (THEIR->HER) DISHES ONCE A DAY AND THEY (CONCOCTED->CONCLUDED) QUEER (MESSES->MASSES) ACCORDING TO THEIR SEVERAL ABILITIES +533-1066-0005-801: MISS (INNES->EAMES) HE SAID STOPPING ME AS I WAS ABOUT TO GO TO MY ROOM UP STAIRS HOW ARE YOUR NERVES (TONIGHT->TO NIGHT) +533-1066-0006-802: I HAVE NONE I SAID HAPPILY +533-1066-0007-803: I MEAN HE PERSISTED DO YOU FEEL AS THOUGH YOU COULD GO THROUGH WITH SOMETHING RATHER UNUSUAL +533-1066-0008-804: THE MOST UNUSUAL THING I CAN THINK OF WOULD BE A PEACEFUL NIGHT +533-1066-0009-805: SOMETHING IS GOING TO OCCUR HE SAID +533-1066-0010-806: PUT ON HEAVY SHOES AND SOME (OLD->ALL) DARK CLOTHES AND MAKE UP YOUR MIND NOT TO BE SURPRISED AT ANYTHING +533-1066-0011-807: (LIDDY->LEAVY) WAS SLEEPING (THE->*) SLEEP OF THE JUST WHEN I WENT (UP STAIRS->UPSTAIRS) AND I HUNTED OUT MY THINGS CAUTIOUSLY +533-1066-0012-808: (THEY->YOU) WERE TALKING (CONFIDENTIALLY->TO FILIANTLY) TOGETHER BUT WHEN I CAME DOWN THEY CEASED +533-1066-0013-809: (THERE->THEY) WERE A FEW PREPARATIONS TO BE MADE (THE->*) LOCKS TO BE GONE OVER WINTERS TO BE (INSTRUCTED->INSTRUCTIVE) AS TO RENEWED VIGILANCE AND THEN AFTER EXTINGUISHING THE (HALL->WHOLE) LIGHT WE CREPT IN THE DARKNESS THROUGH THE FRONT DOOR AND INTO THE NIGHT +533-1066-0014-810: I ASKED NO QUESTIONS +533-1066-0015-811: (ONCE->WAS) ONLY SOMEBODY SPOKE AND THEN IT WAS AN EMPHATIC (BIT->FIT) OF PROFANITY FROM DOCTOR STEWART WHEN HE RAN INTO A WIRE FENCE +533-1066-0016-812: I (HARDLY->ARE TO) KNOW WHAT I EXPECTED +533-1066-0017-813: THE DOCTOR WAS PUFFING SOMEWHAT WHEN WE FINALLY CAME TO A HALT +533-1066-0018-814: I CONFESS THAT JUST AT THAT MINUTE EVEN SUNNYSIDE SEEMED A CHEERFUL SPOT +533-1066-0019-815: IN SPITE OF MYSELF I DREW MY BREATH IN SHARPLY +533-1066-0020-816: IT WAS ALEX (ARMED WITH->I'M THE) TWO LONG HANDLED SPADES +533-1066-0021-817: THE DOCTOR KEPT A (KEEN LOOKOUT->KIN LOOK OUT) BUT NO ONE APPEARED +533-1066-0022-818: THERE'S ONE THING SURE I'LL NOT BE SUSPECTED OF COMPLICITY +533-1066-0023-819: (A->THE) DOCTOR IS GENERALLY SUPPOSED TO BE (*->A) HANDIER AT (BURYING->BERING) FOLKS THAN (AT DIGGING->A TIGGING) THEM UP +533-1066-0024-820: I HELD ON TO HIM FRANTICALLY AND SOMEHOW I GOT (THERE AND->TERRANT) LOOKED DOWN +533-131556-0000-821: BUT HOW AM I TO (GET->IT) OVER THE (TEN->TOWN) OR TWELVE DAYS THAT MUST YET ELAPSE BEFORE THEY GO +533-131556-0001-822: FOR NONE COULD (INJURE->ENDURE) ME AS HE HAS DONE OH +533-131556-0002-823: THE (WORD STARES->WORDS TEARS) ME IN THE FACE LIKE A GUILTY CONFESSION BUT IT IS TRUE I HATE HIM I HATE HIM +533-131556-0003-824: I SOMETIMES THINK I OUGHT TO GIVE HIM CREDIT FOR THE GOOD FEELING HE (SIMULATES->SIMILATES) SO WELL AND THEN AGAIN I THINK IT IS MY DUTY TO SUSPECT HIM UNDER THE PECULIAR CIRCUMSTANCES IN WHICH I AM PLACED +533-131556-0004-825: I HAVE DONE WELL TO RECORD (THEM SO MINUTELY->HIM SUMINUTELY) +533-131556-0005-826: (THEY->THE YEAR) HAD (BETAKEN->TAKEN) THEMSELVES TO THEIR WORK I (LESS TO->LEST) DIVERT MY MIND THAN TO (DEPRECATE->THE PROCATE) CONVERSATION HAD PROVIDED MYSELF WITH (A->THE) BOOK +533-131556-0006-827: I AM TOO (WELL->*) ACQUAINTED WITH YOUR CHARACTER AND CONDUCT TO FEEL ANY REAL FRIENDSHIP FOR YOU AND AS I AM WITHOUT YOUR TALENT FOR DISSIMULATION I CANNOT ASSUME THE APPEARANCE OF IT +533-131556-0007-828: (UPON->UP AND) PERUSING THIS SHE TURNED SCARLET AND BIT HER LIP +533-131556-0008-829: YOU MAY GO MILICENT AND SHE'LL (FOLLOW IN A WHILE MILICENT->FOLLOWING AWHILE MELLICENT) WENT +533-131556-0009-830: (WILL YOU->OLIO) OBLIGE ME (HELEN->ALAN) CONTINUED SHE +533-131556-0010-831: (AH->HA) YOU ARE SUSPICIOUS +533-131556-0011-832: IF I WERE SUSPICIOUS I REPLIED I SHOULD HAVE DISCOVERED YOUR INFAMY LONG BEFORE +533-131556-0012-833: (I->*) ENJOY (A MOONLIGHT->EVENLENTH) RAMBLE AS WELL AS YOU I ANSWERED STEADILY FIXING MY EYES (UPON HER->UP ON EARTH) AND (THE SHRUBBERY->FREDERI) HAPPENS TO BE ONE OF MY (FAVOURITE->FAVORITE) RESORTS +533-131556-0013-834: SHE COLOURED AGAIN EXCESSIVELY AND REMAINED SILENT PRESSING HER FINGER AGAINST HER TEETH AND GAZING INTO THE FIRE +533-131556-0014-835: I (WATCHED HER->WATCH FOR) A FEW MOMENTS (WITH A->TO THE) FEELING OF MALEVOLENT GRATIFICATION THEN MOVING TOWARDS THE DOOR I CALMLY ASKED IF SHE HAD ANYTHING MORE TO SAY +533-131556-0015-836: YES YES +533-131556-0016-837: SUPPOSE I DO +533-131556-0017-838: SHE PAUSED IN EVIDENT DISCONCERTION AND PERPLEXITY MINGLED WITH ANGER SHE DARED NOT SHOW +533-131556-0018-839: I CANNOT RENOUNCE WHAT IS DEARER THAN LIFE SHE MUTTERED IN A LOW HURRIED TONE +533-131556-0019-840: IF YOU ARE GENEROUS HERE IS A FITTING OPPORTUNITY FOR THE EXERCISE OF YOUR MAGNANIMITY IF YOU ARE PROUD HERE AM I YOUR RIVAL (READY->RATHER) TO (ACKNOWLEDGE->ANNOUNCE) MYSELF YOUR (DEBTOR->DEPTOR) FOR AN ACT OF (THE->*) MOST NOBLE FORBEARANCE +533-131556-0020-841: I SHALL NOT TELL HIM +533-131556-0021-842: GIVE ME NO THANKS IT IS NOT FOR YOUR SAKE THAT I REFRAIN +533-131556-0022-843: AND (MILICENT->MELLICENT) WILL (YOU->IT) TELL HER +533-131556-0023-844: I (WOULD->WILL) NOT FOR MUCH THAT (SHE->YOU) SHOULD (KNOW THE->NOT) INFAMY AND DISGRACE OF HER RELATION +533-131556-0024-845: YOU USE (HARD->OUR) WORDS MISSUS HUNTINGDON BUT I CAN PARDON YOU +533-131556-0025-846: HOW DARE YOU MENTION HIS NAME TO ME +533-131562-0000-847: IT SEEMS VERY INTERESTING LOVE SAID HE LIFTING HIS HEAD AND TURNING TO (WHERE I STOOD->HER EYES TOO) WRINGING MY (HANDS->HAND) IN SILENT (RAGE->RATE) AND ANGUISH BUT IT'S RATHER LONG (I'LL->I) LOOK AT IT SOME OTHER TIME AND MEANWHILE I'LL TROUBLE YOU FOR YOUR KEYS MY DEAR WHAT (KEYS->CASE) +533-131562-0001-848: (THE KEYS->IT ACCUSE) OF YOUR CABINET (DESK DRAWERS->DESKED RAOUL) AND WHATEVER ELSE YOU POSSESS SAID HE RISING AND HOLDING OUT HIS HAND +533-131562-0002-849: THE KEY OF MY (DESK->VES) IN FACT WAS AT THAT MOMENT IN (THE LOCK->LOVE) AND THE OTHERS WERE ATTACHED TO IT +533-131562-0003-850: NOW THEN SNEERED HE WE MUST HAVE A CONFISCATION OF PROPERTY +533-131562-0004-851: AND (PUTTING->PUT IN) THE KEYS INTO HIS POCKET HE WALKED INTO THE LIBRARY +533-131562-0005-852: THAT AND ALL REPLIED THE MASTER AND THE THINGS WERE CLEARED AWAY +533-131562-0006-853: MISTER HUNTINGDON THEN WENT (UP STAIRS->UPSTAIRS) +533-131562-0007-854: MUTTERED HE STARTING BACK SHE'S (THE->*) VERY DEVIL FOR (*->A) SPITE +533-131562-0008-855: I DIDN'T SAY (I'D->I'VE) BROKEN IT DID I RETURNED HE +533-131562-0009-856: I SHALL PUT YOU (UPON->UP IN) A SMALL (MONTHLY ALLOWANCE->MOUTHFULLY ALLOW US) IN FUTURE FOR YOUR OWN PRIVATE EXPENSES AND YOU NEEDN'T TROUBLE YOURSELF ANY MORE ABOUT MY CONCERNS I SHALL LOOK OUT FOR A STEWARD MY DEAR I WON'T EXPOSE YOU TO THE TEMPTATION +533-131562-0010-857: AND AS FOR THE (HOUSEHOLD->HOUSE OR) MATTERS MISSUS (GREAVES->GREEBS) MUST BE VERY PARTICULAR IN KEEPING HER ACCOUNTS WE MUST GO (UPON->UP IN) AN ENTIRELY NEW PLAN +533-131562-0011-858: WHAT GREAT DISCOVERY HAVE YOU MADE NOW MISTER (HUNTINGDON->HONDYNON) +533-131562-0012-859: (HAVE->IF) I (ATTEMPTED->ATTENDED) TO DEFRAUD YOU +533-131562-0013-860: NOT IN MONEY MATTERS EXACTLY IT SEEMS BUT (IT'S BEST->IS FAST) TO KEEP OUT OF THE WAY OF TEMPTATION +533-131562-0014-861: HERE (BENSON->BUILTON) ENTERED (WITH->*) THE CANDLES AND THERE FOLLOWED (A BRIEF->THE ROOF) INTERVAL OF SILENCE I SITTING (STILL IN->STEALING) MY CHAIR AND HE STANDING WITH HIS BACK TO THE FIRE SILENTLY TRIUMPHING IN MY DESPAIR +533-131562-0015-862: I KNOW THAT DAY AFTER DAY SUCH FEELINGS (WILL->TO) RETURN UPON ME +533-131562-0016-863: I (TRY->TRIED) TO LOOK TO HIM AND RAISE MY HEART TO HEAVEN BUT IT WILL (CLEAVE->CLIFF) TO THE DUST +533-131564-0000-768: VAIN HOPE I FEAR +533-131564-0001-769: (MISTER->MISS) AND MISSUS (HATTERSLEY->HALTERSLEY) HAVE BEEN (STAYING AT THE GROVE->SEEING IT TO GROW) A FORTNIGHT AND AS (MISTER->MISSUS) HARGRAVE IS STILL ABSENT AND (THE->*) WEATHER WAS REMARKABLY FINE I NEVER PASSED (A->THE) DAY WITHOUT SEEING MY TWO FRIENDS (MILICENT->MILLSON) AND (ESTHER->ASSER) EITHER THERE OR HERE +533-131564-0002-770: NO UNLESS YOU CAN TELL (ME->YOU) WHEN TO EXPECT HIM HOME +533-131564-0003-771: I CAN'T (YOU DON'T WANT->EVEN WANTS) HIM DO YOU +533-131564-0004-772: IT IS A RESOLUTION YOU (OUGHT TO HAVE FORMED->ARE REFORMED) LONG AGO +533-131564-0005-773: WE ALL HAVE A BIT OF A LIKING FOR HIM AT THE BOTTOM OF OUR (HEARTS->HEART) THOUGH WE CAN'T RESPECT HIM +533-131564-0006-774: NO I'D RATHER BE LIKE MYSELF (BAD AS->THAT WAS) I AM +533-131564-0007-775: NEVER MIND MY PLAIN SPEAKING SAID I IT IS FROM THE BEST OF MOTIVES +533-131564-0008-776: BUT TELL ME SHOULD YOU WISH (YOUR SONS->YOURSELVES) TO BE LIKE MISTER HUNTINGDON OR EVEN LIKE YOURSELF +533-131564-0009-777: OH NO (I COULDN'T->ECHOLYN) STAND THAT +533-131564-0010-778: FIRE AND FURY +533-131564-0011-779: NOW DON'T (BURST->FORCE) INTO A TEMPEST AT THAT +533-131564-0012-780: BUT HANG IT THAT'S NOT MY FAULT +533-131564-0013-781: NOT (YEARS->EARS) FOR SHE'S ONLY FIVE AND TWENTY +533-131564-0014-782: WHAT (WOULD->DID) YOU MAKE OF ME AND THE CHILDREN TO BE SURE THAT (WORRY HER TO->WERE HE HURT) DEATH BETWEEN THEM +533-131564-0015-783: I KNOW THEY ARE BLESS THEM +533-131564-0016-784: (HE FOLLOWED->IF ALL OF) ME INTO THE LIBRARY +533-131564-0017-785: I SOUGHT OUT AND PUT INTO HIS HANDS TWO OF (MILICENT'S LETTERS->MILICENT SLATTERS) ONE (DATED->DID IT) FROM LONDON AND WRITTEN DURING ONE OF HIS (WILDEST->WALLA'S) SEASONS OF RECKLESS DISSIPATION THE OTHER IN THE COUNTRY DURING (A LUCID->ELUSIVE) INTERVAL +533-131564-0018-786: THE FORMER WAS FULL OF TROUBLE AND ANGUISH NOT ACCUSING HIM BUT DEEPLY REGRETTING HIS CONNECTION WITH HIS PROFLIGATE COMPANIONS ABUSING MISTER (GRIMSBY AND->GRIM'S BEING) OTHERS INSINUATING BITTER THINGS AGAINST MISTER HUNTINGDON AND MOST (INGENIOUSLY THROWING->INGENUOUSLY THREW IN) THE BLAME OF HER HUSBAND'S MISCONDUCT ON (TO->THE) OTHER (MEN'S->MAN'S) SHOULDERS +533-131564-0019-787: I'VE BEEN A CURSED RASCAL GOD KNOWS SAID HE AS HE GAVE IT (A HEARTY->EARTHLY) SQUEEZE BUT YOU SEE IF I DON'T MAKE AMENDS FOR IT (D N->THEN) ME IF I DON'T +533-131564-0020-788: IF YOU (INTEND TO REFORM->INSENT WITH FORM) INVOKE GOD'S BLESSING (HIS->IS A) MERCY (AND HIS AID NOT HIS CURSE->IN THIS APE NOR DISCOURSE) +533-131564-0021-789: GOD HELP ME THEN FOR (I'M->I AM) SURE I (NEED IT->NEEDED) +533-131564-0022-790: (WHERE'S->WHERE IS) MILICENT +533-131564-0023-791: NAY NOT I SAID HE TURNING (HER ROUND->AROUND) AND PUSHING (HER->IT) TOWARDS ME +533-131564-0024-792: MILICENT FLEW TO THANK ME (OVERFLOWING WITH->OVERWHELMING ITS) GRATITUDE +533-131564-0025-793: CRIED SHE I COULDN'T HAVE INFLUENCED HIM I'M SURE BY ANYTHING THAT I COULD HAVE SAID +533-131564-0026-794: YOU NEVER TRIED ME (MILLY->MERELY) SAID HE +533-131564-0027-795: AFTER THAT THEY WILL REPAIR TO THEIR COUNTRY HOME +5442-32873-0000-1365: CAPTAIN LAKE DID NOT LOOK AT ALL LIKE A LONDON DANDY NOW +5442-32873-0001-1366: THERE WAS A VERY NATURAL SAVAGERY AND DEJECTION THERE AND A WILD (LEER->YARD) IN HIS YELLOW EYES RACHEL SAT DOWN +5442-32873-0002-1367: (A->AND) SLAVE ONLY THINK A SLAVE +5442-32873-0003-1368: OH FRIGHTFUL FRIGHTFUL IS IT A DREAM +5442-32873-0004-1369: OH FRIGHTFUL (FRIGHTFUL->DREADFUL) +5442-32873-0005-1370: STANLEY STANLEY IT WOULD BE MERCY TO KILL ME SHE BROKE OUT AGAIN +5442-32873-0006-1371: BRIGHT AND NATTY (WERE THE CHINTZ->WITH A CHIN'S) CURTAINS AND THE LITTLE TOILET SET OUT NOT INELEGANTLY AND HER PET PIPING GOLDFINCH ASLEEP ON HIS PERCH WITH HIS BIT OF SUGAR BETWEEN THE (WIRES->WIVES) OF HIS CAGE HER PILLOW SO WHITE AND UNPRESSED WITH ITS LITTLE EDGING OF LACE +5442-32873-0007-1372: WHEN HE CAME BACK TO THE DRAWING ROOM A (TOILET->TALLED) BOTTLE OF (EAU DE COLOGNE->OVERCLONE) IN HIS HAND WITH HER LACE HANDKERCHIEF HE BATHED HER (TEMPLES->TEMPLE) AND FOREHEAD +5442-32873-0008-1373: THERE WAS NOTHING VERY BROTHERLY IN HIS LOOK AS HE PEERED INTO (HER->A) PALE SHARP FEATURES DURING THE PROCESS +5442-32873-0009-1374: THERE DON'T MIND ME SHE SAID SHARPLY AND GETTING UP SHE LOOKED DOWN AT HER DRESS AND THIN SHOES AND SEEMING TO RECOLLECT HERSELF SHE TOOK THE CANDLE HE HAD JUST SET DOWN AND WENT SWIFTLY TO HER ROOM +5442-32873-0010-1375: AND SHE THREW BACK HER VEIL AND GOING HURRIEDLY TO THE TOILET MECHANICALLY SURVEYED HERSELF (IN->FROM) THE GLASS +5442-32873-0011-1376: (RACHEL LAKE RACHEL LAKE->RIGIDLY LEGALLY) WHAT ARE YOU NOW +5442-32873-0012-1377: I'LL STAY HERE THAT IS IN THE DRAWING ROOM SHE ANSWERED AND THE FACE WAS WITHDRAWN +5442-32873-0013-1378: (HE SLACKENED HIS PACE->HIS CLACK IN THE SPACE) AND (TAPPED->TOP) SHARPLY AT THE LITTLE WINDOW OF (THAT->THE) MODEST POST OFFICE AT WHICH THE YOUNG LADIES IN THE PONY CARRIAGE HAD PULLED UP THE DAY BEFORE AND WITHIN WHICH LUKE (WAGGOT->RAGGED) WAS WONT TO SLEEP IN A SORT OF WOODEN BOX THAT FOLDED UP AND APPEARED TO BE A CHEST OF DRAWERS ALL DAY +5442-32873-0014-1379: (LUKE->LOOK) TOOK CARE OF MISTER LARKIN'S (DOGS->DOG) AND GROOMED MISTER WYLDER'S HORSE AND CLEANED UP HIS (DOG->DOOR) CART FOR MARK BEING CLOSE ABOUT MONEY AND FINDING THAT THE THING WAS TO BE DONE MORE CHEAPLY THAT WAY PUT UP HIS HORSE AND (DOG CART->DORCART) IN THE POST OFFICE PREMISES AND SO EVADED THE LIVERY CHARGES OF THE BRANDON ARMS +5442-32873-0015-1380: BUT LUKE WAS (NOT->KNOWN) THERE AND CAPTAIN LAKE RECOLLECTING HIS HABITS AND HIS HAUNT HURRIED ON TO THE SILVER LION WHICH HAS ITS GABLE TOWARDS THE COMMON ONLY ABOUT A HUNDRED STEPS AWAY FOR DISTANCES ARE NOT GREAT IN (GYLINGDEN->GILINGDEN) +5442-32873-0016-1381: HERE WERE THE (FLOW->FLOOR) OF SOUL (AND->UN) OF STOUT LONG PIPES LONG YARNS AND TOLERABLY LONG CREDITS AND THE HUMBLE (SCAPEGRACES->CAVE BRACES) OF THE TOWN RESORTED THITHER FOR THE PLEASURES OF A CLUB LIFE AND OFTEN REVELLED DEEP INTO THE SMALL HOURS OF THE MORNING +5442-32873-0017-1382: LOSE NO TIME (AND->BUT) I'LL GIVE YOU HALF A CROWN +5442-32873-0018-1383: LUKE STUCK ON HIS GREASY (WIDEAWAKE->WIDE AWAKE) AND IN A FEW MINUTES MORE THE (DOG->DOOR) CART WAS (TRUNDLED->TUMBLED) OUT INTO THE LANE AND THE HORSE HARNESSED WENT BETWEEN THE SHAFTS WITH THAT WONDERFUL CHEERFULNESS WITH WHICH (THEY->THEIR) BEAR TO BE CALLED UP (UNDER->AND THE) STARTLING CIRCUMSTANCES (AT->AND) UNSEASONABLE HOURS +5442-32873-0019-1384: IF I THOUGHT YOU'D FAIL ME NOW (TAMAR->TO MORROW) I SHOULD NEVER COME BACK GOOD NIGHT (TAMAR->TO MORROW) +5442-41168-0000-1385: THE ACT SAID THAT IN CASE OF DIFFERENCE OF OPINION THERE MUST BE A BALLOT +5442-41168-0001-1386: HE WENT UP TO THE TABLE AND STRIKING IT WITH HIS FINGER RING HE SHOUTED LOUDLY A BALLOT +5442-41168-0002-1387: HE WAS SHOUTING FOR THE VERY (COURSE SERGEY->COARSE SURGY) IVANOVITCH HAD PROPOSED BUT IT WAS EVIDENT THAT HE HATED HIM AND ALL HIS PARTY AND THIS FEELING OF HATRED SPREAD THROUGH THE WHOLE PARTY AND ROUSED IN OPPOSITION TO IT THE SAME VINDICTIVENESS THOUGH IN A MORE SEEMLY FORM ON THE OTHER SIDE +5442-41168-0003-1388: SHOUTS WERE RAISED AND FOR A MOMENT ALL WAS CONFUSION SO THAT THE MARSHAL OF THE PROVINCE HAD TO CALL FOR (ORDER->ODO) A BALLOT +5442-41168-0004-1389: WE SHED OUR BLOOD FOR OUR COUNTRY +5442-41168-0005-1390: THE CONFIDENCE OF THE MONARCH (*->BUT) NO CHECKING THE ACCOUNTS OF THE (MARSHAL HE'S->MARTIAN IS) NOT A CASHIER BUT THAT'S NOT THE POINT +5442-41168-0006-1391: (VOTES->VAULTS) PLEASE BEASTLY +5442-41168-0007-1392: THEY EXPRESSED THE MOST IMPLACABLE HATRED +5442-41168-0008-1393: LEVIN DID NOT IN THE LEAST UNDERSTAND WHAT WAS THE MATTER AND HE (MARVELED->MARVELLED) AT THE PASSION WITH WHICH IT WAS DISPUTED WHETHER OR NOT THE DECISION ABOUT (FLEROV->FLARE OFF) SHOULD BE PUT TO THE VOTE +5442-41168-0009-1394: HE FORGOT AS (SERGEY IVANOVITCH->SO GIVANOVITCH) EXPLAINED TO HIM AFTERWARDS THIS (SYLLOGISM->DILIGION) THAT IT WAS NECESSARY FOR THE PUBLIC GOOD TO GET RID OF THE MARSHAL OF THE PROVINCE THAT TO GET (RID OF->INTO) THE (MARSHAL->MARTIAN) IT WAS NECESSARY TO HAVE A MAJORITY OF VOTES THAT TO GET A MAJORITY OF (VOTES->BOATS) IT WAS NECESSARY TO SECURE (FLEROV'S->FLORO'S) RIGHT TO VOTE THAT TO SECURE THE RECOGNITION OF (FLEROV'S->FLORA'S) RIGHT TO VOTE THEY MUST DECIDE ON THE INTERPRETATION TO BE PUT ON THE ACT +5442-41168-0010-1395: BUT LEVIN FORGOT ALL THAT AND IT WAS PAINFUL TO HIM TO SEE ALL THESE EXCELLENT PERSONS FOR WHOM HE HAD A RESPECT IN SUCH AN UNPLEASANT AND VICIOUS STATE OF EXCITEMENT +5442-41168-0011-1396: TO ESCAPE FROM THIS PAINFUL FEELING HE WENT AWAY INTO THE OTHER ROOM WHERE THERE WAS NOBODY EXCEPT THE WAITERS AT THE REFRESHMENT BAR +5442-41168-0012-1397: HE PARTICULARLY LIKED THE WAY ONE (GRAY WHISKERED->GREY WHISKIRT) WAITER WHO SHOWED HIS (SCORN->CORN) FOR THE OTHER YOUNGER ONES AND WAS (JEERED->JERED) AT BY THEM WAS TEACHING THEM HOW TO FOLD UP NAPKINS PROPERLY +5442-41168-0013-1398: LEVIN ADVANCED BUT UTTERLY FORGETTING WHAT HE WAS TO DO AND MUCH EMBARRASSED HE TURNED TO SERGEY IVANOVITCH WITH THE QUESTION WHERE AM I TO PUT IT +5442-41168-0014-1399: (SERGEY IVANOVITCH->SOJOURNOVITCH) FROWNED +5442-41168-0015-1400: THAT IS A MATTER FOR EACH MAN'S OWN DECISION HE SAID SEVERELY +5442-41168-0016-1401: HAVING PUT IT IN HE RECOLLECTED THAT HE OUGHT TO HAVE THRUST HIS LEFT HAND TOO AND SO HE THRUST IT (IN->*) THOUGH TOO LATE AND STILL MORE OVERCOME WITH CONFUSION HE BEAT A HASTY RETREAT INTO THE BACKGROUND +5442-41168-0017-1402: A HUNDRED AND TWENTY SIX FOR ADMISSION NINETY EIGHT AGAINST +5442-41168-0018-1403: SANG (OUT->ALL) THE VOICE OF THE SECRETARY WHO COULD NOT PRONOUNCE THE LETTER R +5442-41168-0019-1404: THEN THERE WAS A LAUGH (A BUTTON->OF BOTTOM) AND TWO (NUTS->KNOTS) WERE FOUND IN THE BOX +5442-41168-0020-1405: BUT THE OLD PARTY DID NOT CONSIDER THEMSELVES CONQUERED +5442-41168-0021-1406: (IN REPLY SNETKOV->INTERPLIES NEDCOV) SPOKE OF THE TRUST (THE->AND) NOBLEMEN OF THE PROVINCE HAD PLACED IN HIM THE (AFFECTION->EFFECT ON) THEY HAD SHOWN HIM WHICH HE DID NOT DESERVE AS HIS ONLY MERIT HAD BEEN HIS ATTACHMENT TO THE NOBILITY TO WHOM HE HAD DEVOTED TWELVE YEARS OF SERVICE +5442-41168-0022-1407: THIS EXPRESSION IN THE MARSHAL'S FACE WAS PARTICULARLY TOUCHING TO LEVIN BECAUSE ONLY THE DAY (BEFORE->FOR) HE HAD BEEN AT HIS HOUSE ABOUT HIS (TRUSTEE->TRUSTY) BUSINESS AND HAD SEEN HIM IN ALL HIS GRANDEUR A KIND HEARTED FATHERLY MAN +5442-41168-0023-1408: IF THERE ARE MEN YOUNGER AND MORE DESERVING THAN I LET THEM SERVE +5442-41168-0024-1409: AND THE MARSHAL DISAPPEARED THROUGH A SIDE DOOR +5442-41168-0025-1410: (THEY->THERE) WERE TO PROCEED IMMEDIATELY TO THE ELECTION +5442-41168-0026-1411: (TWO->DO) NOBLE GENTLEMEN WHO HAD A WEAKNESS FOR STRONG DRINK HAD BEEN MADE DRUNK BY THE PARTISANS OF SNETKOV AND (A->THE) THIRD HAD BEEN ROBBED OF HIS UNIFORM +5442-41168-0027-1412: ON LEARNING THIS THE NEW PARTY HAD MADE HASTE DURING THE DISPUTE ABOUT (FLEROV->FLAREFF) TO SEND SOME OF THEIR MEN IN A SLEDGE TO CLOTHE THE STRIPPED GENTLEMAN AND TO BRING ALONG ONE OF THE INTOXICATED TO THE MEETING +5442-41169-0000-1413: LEVIN DID NOT CARE TO EAT AND HE WAS NOT SMOKING HE DID NOT WANT TO JOIN HIS OWN FRIENDS THAT IS (SERGEY->SOJI) IVANOVITCH STEPAN ARKADYEVITCH SVIAZHSKY AND THE REST BECAUSE VRONSKY IN (HIS EQUERRY'S->AN EQUERRIES) UNIFORM WAS STANDING WITH THEM IN EAGER CONVERSATION +5442-41169-0001-1414: HE WENT TO THE WINDOW AND SAT DOWN SCANNING THE GROUPS AND LISTENING TO WHAT WAS BEING SAID AROUND HIM +5442-41169-0002-1415: HE'S SUCH A BLACKGUARD +5442-41169-0003-1416: I HAVE TOLD HIM SO BUT IT MAKES NO DIFFERENCE ONLY THINK OF IT +5442-41169-0004-1417: THESE PERSONS WERE UNMISTAKABLY SEEKING A PLACE WHERE THEY COULD TALK WITHOUT BEING OVERHEARD +5442-41169-0005-1418: SHALL WE GO ON YOUR EXCELLENCY FINE CHAMPAGNE +5442-41169-0006-1419: (LAST YEAR->MASTER) AT OUR DISTRICT (MARSHAL->MARTIAL) NIKOLAY IVANOVITCH'S +5442-41169-0007-1420: OH STILL JUST THE SAME ALWAYS AT A LOSS THE LANDOWNER ANSWERED WITH A RESIGNED SMILE BUT WITH AN EXPRESSION OF SERENITY AND CONVICTION THAT SO IT MUST BE +5442-41169-0008-1421: WHY WHAT IS (THERE->THAT) TO UNDERSTAND +5442-41169-0009-1422: (THERE'S->THERE IS) NO MEANING IN IT AT ALL +5442-41169-0010-1423: THEN (TOO->DO) ONE MUST KEEP UP CONNECTIONS +5442-41169-0011-1424: IT'S A MORAL OBLIGATION OF A SORT +5442-41169-0012-1425: AND THEN TO TELL THE TRUTH THERE'S ONE'S OWN (INTERESTS->INTEREST) +5442-41169-0013-1426: (THEY'RE->THEIR) PROPRIETORS OF A SORT BUT (WE'RE->WE ARE) THE LANDOWNERS +5442-41169-0014-1427: THAT IT MAY BE BUT STILL IT OUGHT TO BE TREATED A LITTLE MORE RESPECTFULLY +5442-41169-0015-1428: IF (WE'RE->WE ARE) LAYING OUT A GARDEN PLANNING ONE BEFORE THE HOUSE YOU KNOW AND THERE (YOU'VE->YOU HAVE) A TREE (THAT'S->THAT) STOOD (FOR->IN) CENTURIES IN THE VERY SPOT OLD AND GNARLED IT MAY BE AND YET YOU DON'T CUT DOWN THE OLD FELLOW TO MAKE ROOM FOR THE (FLOWERBEDS->FLOWER BEDS) BUT LAY OUT YOUR BEDS SO AS TO TAKE ADVANTAGE OF THE TREE +5442-41169-0016-1429: WELL AND HOW IS YOUR LAND DOING +5442-41169-0017-1430: BUT ONE'S WORK IS THROWN IN FOR NOTHING +5442-41169-0018-1431: OH WELL ONE DOES IT WHAT WOULD YOU HAVE +5442-41169-0019-1432: AND (WHAT'S->ONCE) MORE THE LANDOWNER WENT ON LEANING HIS ELBOWS ON THE WINDOW AND CHATTING ON MY SON I MUST TELL YOU HAS NO TASTE FOR IT +5442-41169-0020-1433: SO THERE'LL BE NO ONE TO KEEP IT UP AND YET ONE DOES IT +5442-41169-0021-1434: WE WALKED ABOUT THE FIELDS AND THE GARDEN NO SAID HE STEPAN (VASSILIEVITCH->WISLOVITCH) EVERYTHING'S WELL LOOKED AFTER BUT YOUR (GARDEN'S->GARDENS) NEGLECTED +5442-41169-0022-1435: TO MY THINKING I'D (CUT->GOT) DOWN (THAT LIME->THE LINE) TREE +5442-41169-0023-1436: HERE (YOU'VE->YOU HAVE) THOUSANDS OF LIMES AND EACH WOULD MAKE TWO GOOD BUNDLES OF BARK +5442-41169-0024-1437: YOU'RE MARRIED (I'VE->I) HEARD SAID THE LANDOWNER +5442-41169-0025-1438: YES (IT'S RATHER->AND JOHN IS) STRANGE HE WENT ON +5442-41169-0026-1439: THE LANDOWNER CHUCKLED UNDER HIS WHITE (MUSTACHES->MOUSTACHES) +5442-41169-0027-1440: WHY DON'T WE (CUT->GO) DOWN OUR (PARKS->BOX) FOR TIMBER +5442-41169-0028-1441: SAID LEVIN RETURNING TO A THOUGHT THAT HAD STRUCK HIM +5442-41169-0029-1442: THERE'S (A->THE) CLASS INSTINCT TOO OF WHAT ONE OUGHT AND (OUGHTN'T->OUGHT NOT KNOWN) TO DO +5442-41169-0030-1443: THERE'S THE PEASANTS TOO I WONDER AT THEM SOMETIMES ANY GOOD PEASANT TRIES TO TAKE ALL THE LAND HE CAN +5442-41169-0031-1444: WITHOUT A RETURN TOO (AT->ADD) A SIMPLE LOSS +5484-24317-0000-571: WHEN HE CAME FROM THE BATH (PROCLUS->PROCLASS) VISITED HIM AGAIN +5484-24317-0001-572: BUT (HERMON->HARMON) WAS NOT IN THE MOOD TO SHARE A JOYOUS REVEL AND HE FRANKLY SAID SO ALTHOUGH IMMEDIATELY AFTER HIS RETURN HE HAD ACCEPTED THE INVITATION TO THE FESTIVAL WHICH THE WHOLE FELLOWSHIP OF ARTISTS WOULD GIVE THE FOLLOWING DAY (IN->AN) HONOUR OF THE (SEVENTIETH->SEVENTEENTH) BIRTHDAY OF THE OLD SCULPTOR (EUPHRANOR->EUPHRANER) +5484-24317-0002-573: SHE WOULD APPEAR HERSELF AT DESSERT AND THE BANQUET MUST THEREFORE BEGIN AT AN UNUSUALLY EARLY HOUR +5484-24317-0003-574: SO THE ARTIST FOUND HIMSELF OBLIGED TO RELINQUISH HIS OPPOSITION +5484-24317-0004-575: THE BANQUET WAS TO BEGIN IN A FEW HOURS YET HE COULD NOT LET THE DAY PASS WITHOUT SEEING DAPHNE AND TELLING HER THE WORDS OF THE ORACLE +5484-24317-0005-576: HE LONGED WITH ARDENT YEARNING FOR THE SOUND OF HER VOICE AND STILL MORE TO UNBURDEN HIS SORELY TROUBLED SOUL TO HER +5484-24317-0006-577: SINCE HIS RETURN FROM THE ORACLE THE FEAR THAT THE (RESCUED->RESCUE) DEMETER MIGHT YET BE THE WORK OF (MYRTILUS->MERTOLUS) HAD AGAIN MASTERED HIM +5484-24317-0007-578: THE APPROVAL AS WELL AS (THE->A) DOUBTS WHICH IT (AROUSED->ARISED) IN OTHERS STRENGTHENED HIS OPINION ALTHOUGH EVEN NOW HE COULD NOT SUCCEED IN BRINGING IT INTO HARMONY WITH THE FACTS +5484-24317-0008-579: THEN HE WENT DIRECTLY TO THE (NEIGHBOURING->NEIGHBORING) PALACE THE QUEEN MIGHT HAVE APPEARED ALREADY AND IT WOULD NOT DO TO KEEP HER WAITING +5484-24317-0009-580: HITHERTO THE MERCHANT HAD BEEN INDUCED IT IS TRUE TO ADVANCE LARGE SUMS OF MONEY TO THE QUEEN BUT THE LOYAL DEVOTION WHICH HE SHOWED TO HER ROYAL HUSBAND HAD RENDERED IT IMPOSSIBLE TO GIVE HIM EVEN A HINT OF THE CONSPIRACY +5484-24317-0010-581: WHEN (HERMON ENTERED->HERMAN ANSWERED) THE RESIDENCE OF THE (GRAMMATEUS->GRAMMATIUS) IN THE PALACE THE GUESTS HAD ALREADY ASSEMBLED +5484-24317-0011-582: (THE PLACE->THEY PLACED) BY (HERMON'S->HERMANN'S) SIDE WHICH (ALTHEA->ALTHIE) HAD CHOSEN FOR HERSELF WOULD THEN BE GIVEN UP TO (ARSINOE->ARSENO) +5484-24317-0012-583: TRUE AN INTERESTING CONVERSATION STILL HAD POWER TO CHARM HIM BUT OFTEN DURING ITS CONTINUANCE THE FULL CONSCIOUSNESS OF HIS MISFORTUNE FORCED ITSELF UPON HIS MIND FOR THE MAJORITY OF THE SUBJECTS DISCUSSED BY THE ARTISTS CAME TO THEM THROUGH THE MEDIUM OF SIGHT AND REFERRED TO NEW CREATIONS OF ARCHITECTURE SCULPTURE AND PAINTING FROM WHOSE ENJOYMENT (HIS->IS) BLINDNESS (DEBARRED->DEBARED) HIM +5484-24317-0013-584: A STRANGER OUT OF HIS OWN SPHERE HE (FELT->FELL) CHILLED AMONG THESE CLOSELY UNITED MEN AND WOMEN TO WHOM NO TIE BOUND HIM SAVE THE PRESENCE OF THE SAME HOST +5484-24317-0014-585: (CRATES->CREEDS) HAD REALLY BEEN INVITED IN ORDER TO WIN HIM OVER TO THE QUEEN'S CAUSE BUT CHARMING FAIR HAIRED (NICO->NIGO) HAD BEEN COMMISSIONED BY THE CONSPIRATORS TO PERSUADE HIM TO SING (ARSINOE'S->ARSENO'S) PRAISES AMONG HIS PROFESSIONAL ASSOCIATES +5484-24317-0015-586: HIS SON HAD BEEN (THIS->THE) ROYAL (DAME'S->JAMES'S) FIRST HUSBAND AND SHE HAD DESERTED HIM TO MARRY (LYSIMACHUS->LISSMACHUS) THE AGED KING OF THRACE +5484-24317-0016-587: THE KING'S SISTER THE OBJECT OF HIS LOVE CRIED (HERMON->HARMON) INCREDULOUSLY +5484-24317-0017-588: WE WOMEN ARE ONLY AS OLD AS WE LOOK AND THE (LEECHES AND TIRING WOMEN->LEECH HAS ENTIRE AND WOMAN) OF THIS BEAUTY OF FORTY PRACTISE ARTS WHICH GIVE HER THE APPEARANCE OF TWENTY FIVE YET PERHAPS THE KING VALUES HER INTELLECT MORE THAN HER PERSON AND THE WISDOM OF A HUNDRED SERPENTS IS CERTAINLY UNITED IN THIS WOMAN'S HEAD +5484-24317-0018-589: THE THREE MOST TRUSTWORTHY ONES (ARE HERE AMYNTAS->I HEAR I MEANTIS) THE LEECH CHRYSIPPUS (AND->IN) THE ADMIRABLE (PROCLUS->PROCLASS) +5484-24317-0019-590: LET US HOPE THAT YOU WILL MAKE THIS THREE LEAVED CLOVER THE LUCK PROMISING (FOUR LEAVED->FOLIEVED) ONE +5484-24317-0020-591: YOUR UNCLE TOO HAS OFTEN WITH (PRAISEWORTHY->PRAISED WORTHY) GENEROSITY HELPED (ARSINOE->AUSTENO) IN MANY (AN->*) EMBARRASSMENT +5484-24317-0021-592: HOW LONG HE KEPT YOU WAITING (FOR->FROM) THE FIRST WORD CONCERNING A WORK WHICH JUSTLY TRANSPORTED THE WHOLE CITY WITH DELIGHT +5484-24317-0022-593: WHEN HE DID FINALLY SUMMON YOU HE SAID THINGS WHICH MUST HAVE WOUNDED YOU +5484-24317-0023-594: THAT IS GOING TOO FAR REPLIED (HERMON->HARMON) +5484-24317-0024-595: HE WINKED AT HER AND MADE A SIGNIFICANT GESTURE AS HE SPOKE AND THEN INFORMED THE BLIND ARTIST HOW GRACIOUSLY (ARSINOE->ARSENO) HAD REMEMBERED HIM WHEN SHE HEARD OF THE REMEDY BY WHOSE AID MANY A WONDERFUL CURE OF BLIND (EYES->EYE) HAD BEEN MADE IN (RHODES->ROADS) +5484-24317-0025-596: THE ROYAL LADY HAD INQUIRED ABOUT HIM AND HIS SUFFERINGS WITH ALMOST SISTERLY INTEREST AND ALTHEA EAGERLY CONFIRMED THE STATEMENT +5484-24317-0026-597: (HERMON->HERMAN) LISTENED TO THE (PAIR IN->PARENT) SILENCE +5484-24317-0027-598: THE (RHODIAN->RADIAN) WAS JUST BEGINNING TO PRAISE (ARSINOE->ARSENAL) ALSO AS A SPECIAL FRIEND AND CONNOISSEUR OF THE SCULPTOR'S ART WHEN CRATES (HERMON'S->HERMANN'S) FELLOW STUDENT ASKED THE BLIND ARTIST IN BEHALF OF HIS BEAUTIFUL COMPANION WHY HIS DEMETER WAS PLACED UPON A PEDESTAL (WHICH->WITCH) TO OTHERS AS WELL AS HIMSELF SEEMED TOO HIGH FOR THE SIZE OF THE STATUE +5484-24317-0028-599: YET WHAT MATTERED IT EVEN IF THESE MISERABLE PEOPLE CONSIDERED THEMSELVES DECEIVED AND POINTED THE FINGER OF SCORN AT HIM +5484-24317-0029-600: A WOMAN WHO YEARNS FOR THE REGARD OF ALL MEN AND MAKES LOVE A TOY EASILY LESSENS THE DEMANDS SHE IMPOSES UPON INDIVIDUALS +5484-24317-0030-601: ONLY EVEN THOUGH LOVE HAS WHOLLY DISAPPEARED SHE STILL CLAIMS CONSIDERATION AND (ALTHEA->ALTHIA) DID NOT WISH TO LOSE (HERMON'S->HARMON'S) REGARD +5484-24317-0031-602: HOW INDIFFERENT YOU LOOK BUT I TELL YOU HER DEEP BLUE EYES FLASHED AS SHE SPOKE THAT SO LONG AS YOU (WERE->WAS) STILL A GENUINE CREATING ARTIST THE CASE WAS DIFFERENT +5484-24317-0032-603: THOUGH SO LOUD A DENIAL IS WRITTEN ON YOUR FACE I PERSIST IN MY CONVICTION AND THAT NO IDLE DELUSION (ENSNARES->AND SNATHS) ME I CAN PROVE +5484-24317-0033-604: IT WAS NAY IT COULD HAVE BEEN NOTHING ELSE THAT VERY SPIDER +5484-24318-0000-605: NOT A SOUND IF YOU VALUE YOUR LIVES +5484-24318-0001-606: TO OFFER RESISTANCE WOULD HAVE BEEN MADNESS FOR EVEN (HERMON->HERMANN) PERCEIVED BY THE LOUD CLANKING OF WEAPONS AROUND THEM (THE->THEY) GREATLY SUPERIOR POWER OF THE ENEMY AND THEY WERE ACTING BY THE ORDERS OF THE KING TO THE PRISON NEAR THE PLACE OF EXECUTION +5484-24318-0002-607: WAS HE TO BE LED TO THE EXECUTIONER'S BLOCK +5484-24318-0003-608: WHAT PLEASURE HAD LIFE TO OFFER HIM THE BLIND MAN WHO WAS ALREADY DEAD TO HIS ART +5484-24318-0004-609: OUGHT HE NOT TO GREET (THIS->HIS) SUDDEN END AS A (BOON->BOOM) FROM THE IMMORTALS +5484-24318-0005-610: DID IT NOT SPARE HIM A HUMILIATION AS GREAT AND PAINFUL AS COULD BE IMAGINED +5484-24318-0006-611: WHATEVER MIGHT AWAIT HIM HE DESIRED NO BETTER FATE +5484-24318-0007-612: IF HE HAD PASSED INTO ANNIHILATION HE (HERMON->HERMAN) WISHED TO FOLLOW HIM THITHER AND ANNIHILATION CERTAINLY MEANT REDEMPTION FROM PAIN AND MISERY +5484-24318-0008-613: BUT IF HE WERE DESTINED TO MEET HIS (MYRTILUS->BURTLES) AND HIS MOTHER IN THE WORLD BEYOND THE GRAVE WHAT HAD HE NOT TO TELL THEM HOW SURE HE WAS (OF->A) FINDING A JOYFUL RECEPTION THERE FROM BOTH +5484-24318-0009-614: THE POWER WHICH DELIVERED HIM OVER TO DEATH JUST AT THAT MOMENT WAS NOT NEMESIS NO IT WAS A KINDLY DEITY +5484-24318-0010-615: YET IT WAS NO ILLUSION THAT DECEIVED HIM +5484-24318-0011-616: AGAIN HE HEARD THE BELOVED VOICE AND THIS TIME IT ADDRESSED NOT ONLY HIM BUT WITH THE UTMOST HASTE THE COMMANDER OF THE SOLDIERS +5484-24318-0012-617: SOMETIMES WITH TOUCHING ENTREATY SOMETIMES WITH IMPERIOUS COMMAND SHE PROTESTED AFTER GIVING HIM HER NAME THAT THIS MATTER COULD BE NOTHING BUT AN UNFORTUNATE MISTAKE +5484-24318-0013-618: LASTLY WITH EARNEST WARMTH SHE BESOUGHT HIM BEFORE TAKING THE PRISONERS AWAY TO PERMIT HER TO SPEAK TO THE COMMANDING GENERAL PHILIPPUS HER FATHER'S GUEST WHO SHE WAS CERTAIN WAS IN THE PALACE +5484-24318-0014-619: CRIED (HERMON->HERMANN) IN GRATEFUL AGITATION BUT SHE WOULD NOT LISTEN TO HIM AND (FOLLOWED->FOLLOW) THE SOLDIER WHOM THE CAPTAIN DETAILED TO GUIDE HER INTO THE PALACE +5484-24318-0015-620: TO MORROW YOU SHALL CONFESS TO ME WHO TREACHEROUSLY DIRECTED YOU TO THIS DANGEROUS PATH +5484-24318-0016-621: DAPHNE AGAIN PLEADED FOR THE LIBERATION OF THE PRISONERS BUT (PHILIPPUS->PHILIP WAS) SILENCED HER WITH (THE->A) GRAVE EXCLAMATION THE ORDER OF THE KING +5484-24318-0017-622: AS SOON AS THE CAPTIVE ARTIST WAS ALONE WITH THE WOMAN HE LOVED HE CLASPED HER HAND POURING FORTH INCOHERENT WORDS OF THE MOST ARDENT GRATITUDE AND WHEN HE FELT HER WARMLY (RETURN->RETURNED) THE PRESSURE HE COULD NOT RESTRAIN THE DESIRE TO CLASP HER TO HIS HEART +5484-24318-0018-623: IN SPITE OF HIS DEEP (MENTAL->MANTLE) DISTRESS HE COULD HAVE SHOUTED ALOUD IN HIS DELIGHT AND GRATITUDE +5484-24318-0019-624: HE MIGHT NOW HAVE BEEN PERMITTED TO BIND FOREVER TO HIS LIFE THE WOMAN WHO HAD JUST RESCUED HIM FROM THE GREATEST DANGER BUT THE CONFESSION HE MUST MAKE TO HIS FELLOW ARTISTS IN THE (PALAESTRA->PELLESTRA) THE FOLLOWING MORNING STILL SEALED HIS LIPS YET IN THIS HOUR HE FELT THAT HE WAS UNITED TO HER AND OUGHT NOT TO CONCEAL WHAT AWAITED HIM SO OBEYING A STRONG IMPULSE HE EXCLAIMED YOU KNOW THAT I LOVE YOU +5484-24318-0020-625: I LOVE YOU AND HAVE LOVED YOU ALWAYS +5484-24318-0021-626: (DAPHNE->JAPHANE) EXCLAIMED TENDERLY WHAT MORE IS NEEDED +5484-24318-0022-627: BUT (HERMON->HARMON) WITH DROOPING HEAD MURMURED TO MORROW I SHALL NO LONGER BE WHAT I AM NOW +5484-24318-0023-628: THEN (DAPHNE->JAPANE) RAISED HER FACE TO HIS ASKING SO THE (DEMETER->DEMEANOR) IS THE WORK OF (MYRTILUS->MYRTOLIS) +5484-24318-0024-629: WHAT A TERRIBLE ORDEAL AGAIN AWAITS YOU +5484-24318-0025-630: AND I FOOL BLINDED ALSO IN MIND COULD BE VEXED WITH YOU FOR IT +5484-24318-0026-631: BRING THIS BEFORE YOUR MIND AND EVERYTHING ELSE THAT YOU MUST ACCEPT WITH IT IF YOU CONSENT (WHEN->WITH) THE TIME ARRIVES TO BECOME MINE CONCEAL (AND PALLIATE->IN PALE YET) NOTHING +5484-24318-0027-632: (SO ARCHIAS->SARKAIUS) INTENDED TO LEAVE THE CITY ON ONE OF HIS OWN SHIPS THAT VERY DAY +5484-24318-0028-633: (HE->SHE) HIMSELF ON THE WAY TO EXPOSE HIMSELF TO THE MALICE AND MOCKERY OF THE WHOLE CITY +5484-24318-0029-634: HIS HEART CONTRACTED PAINFULLY AND HIS SOLICITUDE ABOUT HIS UNCLE'S FATE INCREASED WHEN PHILIPPUS INFORMED HIM THAT THE CONSPIRATORS HAD BEEN ARRESTED AT THE BANQUET AND HEADED BY (AMYNTAS->A MEANTESSE) THE (RHODIAN->RODIAN) CHRYSIPPUS AND (PROCLUS->PROCLAUS) HAD PERISHED BY THE EXECUTIONER'S SWORD AT SUNRISE +5484-24318-0030-635: BESIDES HE KNEW THAT THE OBJECT OF HIS LOVE WOULD NOT PART FROM HIM WITHOUT GRANTING HIM ONE LAST WORD +5484-24318-0031-636: ON THE WAY HIS HEART THROBBED ALMOST TO BURSTING +5484-24318-0032-637: EVEN (DAPHNE'S->THESE) IMAGE AND WHAT THREATENED HER FATHER AND HER WITH HIM (RECEDED->WAS SEATED) FAR INTO THE BACKGROUND +5484-24318-0033-638: HE WAS APPEARING BEFORE HIS COMPANIONS ONLY TO GIVE TRUTH ITS JUST DUE +5484-24318-0034-639: THE EGYPTIAN OBEYED AND HIS MASTER CROSSED THE WIDE SPACE STREWN WITH SAND AND APPROACHED THE STAGE WHICH HAD BEEN ERECTED FOR THE (FESTAL->FEAST OF) PERFORMANCES EVEN HAD HIS EYES RETAINED THE POWER OF SIGHT HIS BLOOD WAS (COURSING->COARSING) SO (WILDLY->WIDELY) THROUGH HIS VEINS THAT HE MIGHT PERHAPS HAVE BEEN UNABLE TO DISTINGUISH THE STATUES AROUND HIM AND THE THOUSANDS OF SPECTATORS WHO CROWDED CLOSELY TOGETHER RICHLY GARLANDED THEIR CHEEKS GLOWING WITH ENTHUSIASM SURROUNDED THE ARENA (HERMON->HERMANN) +5484-24318-0035-640: SHOUTED HIS FRIEND (SOTELES IN->SARTUOUS AND) JOYFUL SURPRISE IN THE MIDST OF (THIS->HIS) PAINFUL WALK (HERMON->HERE ON) +5484-24318-0036-641: EVEN WHILE HE BELIEVED HIMSELF TO BE THE CREATOR OF THE DEMETER HE HAD BEEN SERIOUSLY TROUBLED BY THE PRAISE OF SO MANY CRITICS BECAUSE IT HAD EXPOSED HIM TO THE SUSPICION OF HAVING BECOME FAITHLESS TO HIS ART AND HIS NATURE +5484-24318-0037-642: HONOUR TO (MYRTILUS->MARTILLUS) AND HIS ART BUT HE TRUSTED THIS NOBLE (FESTAL->FEAST AN) ASSEMBLAGE WOULD PARDON THE UNINTENTIONAL DECEPTION AND AID HIS PRAYER FOR RECOVERY +5764-299665-0000-405: AFTERWARD IT WAS SUPPOSED THAT HE WAS SATISFIED WITH THE BLOOD OF OXEN (LAMBS->LAMPS) AND DOVES AND THAT IN EXCHANGE FOR OR (ON->IN) ACCOUNT OF THESE SACRIFICES (THIS->THESE) GOD GAVE (RAIN->REIN) SUNSHINE AND HARVEST +5764-299665-0001-406: WHETHER HE WAS THE CREATOR OF YOURSELF AND MYSELF +5764-299665-0002-407: (WHETHER->WEATHER) ANY PRAYER WAS EVER ANSWERED +5764-299665-0003-408: WHY DID HE CREATE THE (INTELLECTUALLY->INTELLECTUAL) INFERIOR +5764-299665-0004-409: WHY DID HE CREATE THE DEFORMED AND HELPLESS WHY DID HE CREATE THE CRIMINAL THE IDIOTIC THE INSANE +5764-299665-0005-410: ARE THE FAILURES UNDER OBLIGATION TO THEIR CREATOR +5764-299665-0006-411: (IS HE RESPONSIBLE->HIS IRRESPONSIBLE) FOR ALL THE WARS THAT HAVE BEEN (WAGED->RAGED) FOR ALL THE INNOCENT BLOOD THAT HAS BEEN SHED +5764-299665-0007-412: (IS->IF) HE RESPONSIBLE FOR THE CENTURIES OF SLAVERY FOR THE BACKS THAT HAVE BEEN SCARRED WITH (THE->A) LASH FOR THE (BABES->BABE) THAT HAVE BEEN SOLD FROM THE BREASTS OF MOTHERS FOR THE FAMILIES THAT HAVE BEEN SEPARATED AND DESTROYED +5764-299665-0008-413: IS (THIS GOD->THE SCOTT) RESPONSIBLE FOR RELIGIOUS PERSECUTION FOR THE INQUISITION FOR THE (THUMB->TEMP) SCREW AND RACK AND FOR ALL THE INSTRUMENTS OF TORTURE +5764-299665-0009-414: DID THIS (GOD ALLOW->GOT THE LOW) THE CRUEL AND VILE TO DESTROY THE BRAVE AND VIRTUOUS +5764-299665-0010-415: DID HE (ALLOW->ALONE) TYRANTS TO SHED (THE->A) BLOOD OF PATRIOTS +5764-299665-0011-416: CAN WE CONCEIVE OF A DEVIL BASE ENOUGH TO PREFER HIS ENEMIES TO HIS FRIENDS +5764-299665-0012-417: HOW CAN WE ACCOUNT FOR THE WILD BEASTS THAT (DEVOUR->THE FOUR) HUMAN BEINGS FOR THE (FANGED->FACT) SERPENTS WHOSE BITE IS DEATH +5764-299665-0013-418: HOW CAN WE ACCOUNT FOR A WORLD (WHERE LIFE FEEDS->WERE LIE FEATS) ON LIFE +5764-299665-0014-419: (DID->THE) INFINITE WISDOM INTENTIONALLY (PRODUCE THE->PRODUCED A) MICROSCOPIC BEASTS THAT (FEED->FEAT) UPON THE OPTIC (NERVE->NERVES) THINK OF BLINDING A MAN TO SATISFY THE APPETITE OF A MICROBE +5764-299665-0015-420: FEAR (BUILDS->BIDS) THE ALTAR AND OFFERS THE SACRIFICE +5764-299665-0016-421: FEAR ERECTS THE (CATHEDRAL->KITRAL) AND BOWS THE HEAD OF MAN IN WORSHIP +5764-299665-0017-422: LIPS RELIGIOUS AND FEARFUL TREMBLINGLY REPEAT THIS PASSAGE THOUGH HE SLAY ME YET WILL I TRUST HIM +5764-299665-0018-423: CAN WE SAY THAT HE CARED FOR THE CHILDREN OF MEN +5764-299665-0019-424: CAN WE SAY THAT HIS MERCY (ENDURETH->AND DURRED) FOREVER +5764-299665-0020-425: DO WE PROVE HIS GOODNESS BY SHOWING THAT HE HAS OPENED THE EARTH AND SWALLOWED (THOUSANDS->THOUSAND) OF HIS HELPLESS CHILDREN (OR->ALL) THAT WITH THE VOLCANOES HE HAS OVERWHELMED THEM WITH RIVERS OF FIRE +5764-299665-0021-426: WAS (THERE->THEIR) GOODNESS WAS (THERE->THEIR) WISDOM IN THIS +5764-299665-0022-427: (OUGHT->ALL) THE SUPERIOR (RACES->RAYS) TO THANK GOD THAT THEY ARE NOT THE INFERIOR +5764-299665-0023-428: MOST PEOPLE (CLING TO->CLINKED THROUGH) THE SUPERNATURAL +5764-299665-0024-429: IF THEY GIVE UP ONE GOD THEY IMAGINE ANOTHER +5764-299665-0025-430: WHAT IS THIS POWER +5764-299665-0026-431: MAN ADVANCES (AND->A) NECESSARILY ADVANCES (THROUGH->TO) EXPERIENCE +5764-299665-0027-432: A MAN WISHING TO GO TO A CERTAIN PLACE (COMES->COME) TO WHERE THE ROAD DIVIDES +5764-299665-0028-433: HE (HAS->IS) TRIED THAT ROAD AND KNOWS THAT IT IS THE WRONG ROAD +5764-299665-0029-434: A CHILD (CHARMED->SHOWN) BY THE BEAUTY OF THE FLAME (GRASPS->GRASPED) IT WITH (ITS->HIS) DIMPLED HAND +5764-299665-0030-435: THE POWER THAT (WORKS->WORK) FOR RIGHTEOUSNESS (HAS->HAD) TAUGHT THE CHILD A LESSON +5764-299665-0031-436: IT IS A RESULT +5764-299665-0032-437: IT IS INSISTED BY THESE THEOLOGIANS AND BY MANY OF THE (SO->SOUL) CALLED PHILOSOPHERS THAT THIS MORAL SENSE THIS SENSE OF DUTY OF OBLIGATION WAS IMPORTED AND THAT CONSCIENCE IS AN EXOTIC +5764-299665-0033-438: WE LIVE TOGETHER IN FAMILIES TRIBES AND NATIONS +5764-299665-0034-439: THEY ARE PRAISED ADMIRED AND RESPECTED +5764-299665-0035-440: THEY ARE REGARDED AS GOOD THAT IS TO SAY (AS->S) MORAL +5764-299665-0036-441: THE MEMBERS WHO ADD TO THE MISERY OF THE FAMILY THE TRIBE (OR->OF) THE NATION ARE CONSIDERED BAD MEMBERS +5764-299665-0037-442: THE GREATEST OF HUMAN BEINGS (HAS->HAD) SAID CONSCIENCE IS BORN OF LOVE +5764-299665-0038-443: AS PEOPLE ADVANCE THE REMOTE CONSEQUENCES ARE PERCEIVED +5764-299665-0039-444: THE IMAGINATION IS CULTIVATED +5764-299665-0040-445: A MAN (PUTS->BUT) HIMSELF IN THE PLACE OF ANOTHER +5764-299665-0041-446: THE SENSE OF DUTY BECOMES STRONGER MORE IMPERATIVE +5764-299665-0042-447: MAN JUDGES HIMSELF +5764-299665-0043-448: IN ALL THIS THERE IS NOTHING SUPERNATURAL +5764-299665-0044-449: MAN HAS DECEIVED HIMSELF +5764-299665-0045-450: (HAS CHRISTIANITY DONE GOOD->HESTERITY DONEGOOD) +5764-299665-0046-451: WHEN THE CHURCH HAD (CONTROL WERE->CONTROLLED WHERE) MEN MADE BETTER AND HAPPIER +5764-299665-0047-452: WHAT HAS RELIGION DONE FOR HUNGARY OR AUSTRIA +5764-299665-0048-453: (COULD->GOOD) THESE COUNTRIES HAVE BEEN WORSE WITHOUT RELIGION +5764-299665-0049-454: COULD THEY HAVE BEEN WORSE HAD THEY HAD ANY OTHER RELIGION THAN CHRISTIANITY +5764-299665-0050-455: WHAT DID CHRISTIANITY DO (FOR->FAULT) THEM +5764-299665-0051-456: THEY HATED PLEASURE +5764-299665-0052-457: THEY MUFFLED ALL THE BELLS OF GLADNESS +5764-299665-0053-458: THE RELIGION OF THE PURITAN WAS AN (UNADULTERATED->AN ADULTERATED) CURSE +5764-299665-0054-459: THE PURITAN BELIEVED THE BIBLE TO BE THE (WORD->WORTH) OF GOD AND THIS BELIEF HAS ALWAYS MADE THOSE WHO HELD IT CRUEL AND WRETCHED +5764-299665-0055-460: LET ME REFER TO JUST ONE FACT SHOWING THE INFLUENCE OF A BELIEF IN THE BIBLE ON HUMAN BEINGS +5764-299665-0056-461: THE QUEEN RECEIVED THE BIBLE KISSED IT AND PLEDGED HERSELF TO DILIGENTLY READ THEREIN +5764-299665-0057-462: IN OTHER WORDS IT WAS JUST AS FIENDISH JUST AS (INFAMOUS->IN FAMOUS) AS THE (CATHOLIC SPIRIT->CATTLE EXPERIOR) +5764-299665-0058-463: (HAS THE BIBLE->HESDAY BUT) MADE THE PEOPLE OF GEORGIA KIND AND MERCIFUL +5764-299665-0059-464: (*->WHO) RELIGION HAS BEEN TRIED AND IN ALL COUNTRIES IN ALL TIMES (HAS->THUS) FAILED +5764-299665-0060-465: RELIGION (HAS->HATH) ALWAYS BEEN THE ENEMY OF SCIENCE OF INVESTIGATION AND THOUGHT +5764-299665-0061-466: RELIGION (HAS->IS) NEVER MADE (MAN->MEN) FREE +5764-299665-0062-467: (IT HAS->HE JUST) NEVER MADE MAN MORAL TEMPERATE INDUSTRIOUS AND HONEST +5764-299665-0063-468: (ARE CHRISTIANS MORE->AH CHRISTIAN SMALL) TEMPERATE NEARER VIRTUOUS NEARER HONEST THAN SAVAGES +5764-299665-0064-469: CAN WE CURE DISEASE BY SUPPLICATION +5764-299665-0065-470: CAN WE RECEIVE VIRTUE OR (HONOR->HUNGER) AS ALMS +5764-299665-0066-471: RELIGION RESTS ON THE IDEA THAT NATURE HAS A MASTER AND THAT THIS MASTER WILL LISTEN TO PRAYER THAT (THIS->HIS) MASTER PUNISHES AND REWARDS THAT HE LOVES PRAISE AND FLATTERY AND HATES THE BRAVE AND FREE +5764-299665-0067-472: WE MUST HAVE (CORNER->CORN THE) STONES +5764-299665-0068-473: THE STRUCTURE MUST HAVE (A BASEMENT->ABASEMENT) +5764-299665-0069-474: IF WE BUILD WE MUST BEGIN AT THE BOTTOM +5764-299665-0070-475: I HAVE (A->IT) THEORY AND I HAVE FOUR CORNER STONES +5764-299665-0071-476: THE FIRST STONE (IS THAT MATTER->EAST AT MAZAR) SUBSTANCE CANNOT BE DESTROYED CANNOT BE ANNIHILATED +5764-299665-0072-477: IF (THESE CORNER->THIS CORN THE) STONES ARE FACTS IT FOLLOWS AS A NECESSITY THAT MATTER AND FORCE ARE FROM (AND->END) TO ETERNITY THAT THEY CAN NEITHER BE INCREASED NOR DIMINISHED +5764-299665-0073-478: IT FOLLOWS THAT NOTHING HAS BEEN OR CAN BE CREATED THAT THERE NEVER HAS BEEN OR CAN BE A CREATOR +5764-299665-0074-479: IT (FOLLOWS->FOLLOWED) THAT THERE COULD NOT HAVE BEEN ANY INTELLIGENCE (ANY DESIGN->AND A DESIGNED) BACK OF MATTER AND FORCE +5764-299665-0075-480: I SAY WHAT I THINK +5764-299665-0076-481: EVERY EVENT HAS PARENTS +5764-299665-0077-482: THAT WHICH (HAS->HATH) NOT HAPPENED COULD NOT +5764-299665-0078-483: IN THE INFINITE (CHAIN THERE IS->CHANGE WRISTS) AND THERE CAN BE NO BROKEN NO MISSING LINK +5764-299665-0079-484: WE NOW KNOW THAT OUR FIRST PARENTS WERE NOT FOREIGNERS +5764-299665-0080-485: WE NOW KNOW IF WE KNOW ANYTHING THAT THE UNIVERSE IS NATURAL AND THAT (MEN->MAN) AND WOMEN HAVE BEEN NATURALLY PRODUCED +5764-299665-0081-486: WE KNOW THE PATHS THAT LIFE HAS (TRAVELED->TRAVELLED) +5764-299665-0082-487: WE KNOW THE FOOTSTEPS OF ADVANCE THEY HAVE BEEN (TRACED->PRAISED) +5764-299665-0083-488: (FOR->FOUR) THOUSANDS OF YEARS MEN AND WOMEN HAVE BEEN (TRYING->CRYING) TO REFORM THE WORLD +5764-299665-0084-489: WHY HAVE THE (REFORMERS FAILED->REFORMED FAITH) +5764-299665-0085-490: THEY DEPEND ON THE (LORD ON LUCK->LOT UNLUCK) AND CHARITY +5764-299665-0086-491: THEY (LIVE->LEAVE) BY FRAUD AND VIOLENCE AND BEQUEATH THEIR VICES TO THEIR CHILDREN +5764-299665-0087-492: FAILURE SEEMS TO BE THE (TRADEMARK->TRADE MARK) OF NATURE WHY +5764-299665-0088-493: NATURE (PRODUCES->PRODUCED) WITHOUT PURPOSE SUSTAINS WITHOUT INTENTION AND DESTROYS WITHOUT THOUGHT +5764-299665-0089-494: (MUST THE WORLD->MISTER BUILD) FOREVER (REMAIN THE->REMAINED A) VICTIM OF IGNORANT PASSION +5764-299665-0090-495: WHY SHOULD MEN AND WOMEN HAVE CHILDREN THAT THEY CANNOT TAKE CARE OF CHILDREN THAT ARE (BURDENS->A BURDEN) AND CURSES WHY +5764-299665-0091-496: PASSION (IS->EAST) AND (ALWAYS->ALL THESE) HAS BEEN DEAF +5764-299665-0092-497: LAW CAN PUNISH (BUT->THAT) IT CAN NEITHER REFORM CRIMINALS NOR PREVENT CRIME +5764-299665-0093-498: (THIS->THESE) CANNOT BE DONE BY TALK OR EXAMPLE +5764-299665-0094-499: THIS IS THE SOLUTION OF THE WHOLE QUESTION +5764-299665-0095-500: THIS (FREES WOMAN->FREEZE WOMEN) +5764-299665-0096-501: (POVERTY->BAVARY) AND CRIME WILL BE CHILDLESS +5764-299665-0097-502: IT IS FAR BETTER TO BE FREE TO LEAVE THE (FORTS->FAULTS) AND BARRICADES OF FEAR TO STAND ERECT AND (FACE->FAITH) THE FUTURE WITH (A SMILE->US MIND) +6070-63485-0000-2599: (THEY'RE->THERE) DONE (FOR->FAR) SAID THE SCHOOLMASTER IN A (LOW KEY->LOKIE) TO THE (CHOUETTE->SWEAT) OUT WITH YOUR VITRIOL AND MIND YOUR EYE +6070-63485-0001-2600: THE TWO MONSTERS TOOK OFF THEIR SHOES AND MOVED STEALTHILY ALONG KEEPING IN THE SHADOWS OF THE HOUSES +6070-63485-0002-2601: BY MEANS OF THIS STRATAGEM THEY FOLLOWED SO CLOSELY THAT ALTHOUGH WITHIN A FEW STEPS OF (SARAH AND->SEREN) TOM THEY DID NOT HEAR THEM +6070-63485-0003-2602: SARAH AND HER BROTHER HAVING AGAIN PASSED BY THE (TAPIS FRANC->TAPPY FRANK) ARRIVED CLOSE TO THE DILAPIDATED HOUSE WHICH WAS PARTLY IN RUINS AND ITS (OPENED->OPEN) CELLARS FORMED A KIND OF GULF ALONG WHICH THE STREET RAN IN THAT DIRECTION +6070-63485-0004-2603: IN AN INSTANT THE SCHOOLMASTER WITH A LEAP RESEMBLING IN STRENGTH AND AGILITY THE SPRING OF A TIGER SEIZED SEYTON WITH ONE HAND BY THE THROAT AND EXCLAIMED YOUR MONEY OR I WILL FLING YOU INTO THIS HOLE +6070-63485-0005-2604: NO SAID THE OLD BRUTE (GRUMBLINGLY->TREMBLINGLY) NO NOT ONE RING WHAT A SHAME +6070-63485-0006-2605: TOM SEYTON DID NOT LOSE HIS PRESENCE OF MIND DURING THIS SCENE RAPIDLY AND UNEXPECTEDLY AS IT HAD OCCURRED +6070-63485-0007-2606: (OH AH->UH) TO LAY A TRAP TO CATCH US REPLIED THE THIEF +6070-63485-0008-2607: THEN ADDRESSING THOMAS (SEYTON->SETTON) YOU KNOW THE (PLAIN->PLANE) OF SAINT DENIS +6070-63485-0009-2608: DID YOU SEE IN THE CABARET WE (HAVE->HAD) JUST LEFT FOR I KNOW YOU AGAIN THE MAN WHOM THE CHARCOAL MAN CAME TO SEEK +6070-63485-0010-2609: CRIED THE SCHOOLMASTER A THOUSAND FRANCS AND I'LL KILL HIM +6070-63485-0011-2610: WRETCH I DO NOT (SEEK->SEE) HIS LIFE REPLIED SARAH TO THE SCHOOLMASTER +6070-63485-0012-2611: LET'S GO AND MEET HIM +6070-63485-0013-2612: OLD BOY IT WILL PAY FOR LOOKING AFTER +6070-63485-0014-2613: WELL MY WIFE SHALL BE THERE SAID THE SCHOOLMASTER YOU WILL TELL HER WHAT YOU WANT AND I SHALL SEE +6070-63485-0015-2614: IN THE PLAIN OF SAINT DENIS +6070-63485-0016-2615: BETWEEN SAINT (OUEN->JOIN) AND THE ROAD OF LA (REVOLTE->REVOLT) AT THE END OF THE ROAD AGREED +6070-63485-0017-2616: HE HAD FORGOTTEN THE ADDRESS OF THE SELF STYLED (FAN PAINTER->PAMPAINTER) +6070-63485-0018-2617: THE (FIACRE->FIACCHUS) STARTED +6070-86744-0000-2569: (FRANZ->FRANCE) WHO SEEMED ATTRACTED BY SOME INVISIBLE INFLUENCE TOWARDS THE COUNT IN WHICH TERROR WAS STRANGELY MINGLED FELT AN EXTREME RELUCTANCE TO PERMIT HIS FRIEND TO BE EXPOSED ALONE TO THE SINGULAR FASCINATION THAT THIS MYSTERIOUS PERSONAGE SEEMED TO EXERCISE OVER HIM AND THEREFORE MADE NO OBJECTION TO ALBERT'S REQUEST BUT AT ONCE ACCOMPANIED HIM TO THE DESIRED SPOT AND AFTER A SHORT DELAY THE COUNT JOINED THEM IN THE SALON +6070-86744-0001-2570: MY VERY GOOD FRIEND (AND->AN) EXCELLENT (NEIGHBOR->NEIGHBOUR) REPLIED THE COUNT WITH A SMILE YOU REALLY EXAGGERATE MY TRIFLING EXERTIONS +6070-86744-0002-2571: MY FATHER THE COMTE DE MORCERF ALTHOUGH (OF->A) SPANISH ORIGIN POSSESSES CONSIDERABLE INFLUENCE BOTH AT THE COURT OF FRANCE AND MADRID AND I (UNHESITATINGLY->AM HESITATINGLY) PLACE THE BEST SERVICES OF MYSELF AND ALL TO WHOM MY LIFE IS DEAR AT YOUR DISPOSAL +6070-86744-0003-2572: I CAN SCARCELY CREDIT IT +6070-86744-0004-2573: THEN IT IS SETTLED SAID THE COUNT AND I GIVE YOU MY SOLEMN ASSURANCE THAT I ONLY WAITED (*->IN) AN OPPORTUNITY LIKE THE PRESENT TO REALIZE PLANS THAT I HAVE LONG MEDITATED +6070-86744-0005-2574: SHALL WE MAKE A POSITIVE APPOINTMENT FOR A PARTICULAR DAY AND HOUR INQUIRED THE COUNT ONLY LET ME WARN YOU THAT I AM PROVERBIAL FOR MY PUNCTILIOUS EXACTITUDE IN KEEPING MY ENGAGEMENTS DAY FOR DAY HOUR FOR HOUR SAID ALBERT THAT WILL SUIT ME TO A DOT +6070-86744-0006-2575: SO BE IT THEN REPLIED THE COUNT AND EXTENDING HIS HAND TOWARDS (A->THE) CALENDAR SUSPENDED NEAR THE CHIMNEY PIECE HE SAID TO DAY IS THE TWENTY FIRST OF FEBRUARY AND DRAWING OUT HIS WATCH (ADDED->I DID) IT IS EXACTLY HALF PAST TEN O'CLOCK NOW PROMISE ME TO REMEMBER THIS AND EXPECT ME (THE->THAT) TWENTY FIRST OF MAY AT THE SAME HOUR IN THE FORENOON +6070-86744-0007-2576: I RESIDE IN MY FATHER'S HOUSE BUT OCCUPY A PAVILION AT THE FARTHER SIDE OF THE (COURT YARD->COURTYARD) ENTIRELY SEPARATED FROM THE MAIN BUILDING +6070-86744-0008-2577: NOW THEN SAID THE COUNT RETURNING HIS TABLETS TO HIS POCKET MAKE YOURSELF PERFECTLY EASY THE HAND OF YOUR TIME (PIECE->PEACE) WILL NOT BE MORE ACCURATE IN MARKING THE TIME THAN MYSELF +6070-86744-0009-2578: THAT DEPENDS WHEN DO YOU LEAVE +6070-86744-0010-2579: FOR FRANCE NO FOR VENICE I SHALL REMAIN IN ITALY FOR ANOTHER YEAR OR TWO +6070-86744-0011-2580: THEN WE SHALL NOT MEET IN PARIS +6070-86744-0012-2581: I FEAR I SHALL NOT HAVE THAT (HONOR->HONOUR) +6070-86744-0013-2582: WELL SINCE WE MUST PART SAID THE COUNT HOLDING OUT A HAND TO EACH OF THE YOUNG MEN ALLOW ME TO WISH YOU BOTH A SAFE AND PLEASANT JOURNEY +6070-86744-0014-2583: WHAT IS THE MATTER ASKED ALBERT OF FRANZ WHEN THEY HAD RETURNED TO THEIR OWN APARTMENTS YOU (SEEM->SEE) MORE THAN COMMONLY THOUGHTFUL +6070-86744-0015-2584: I WILL CONFESS TO YOU ALBERT REPLIED FRANZ THE COUNT IS A VERY SINGULAR PERSON AND THE APPOINTMENT YOU HAVE MADE TO MEET HIM IN PARIS FILLS ME WITH A THOUSAND APPREHENSIONS +6070-86744-0016-2585: DID YOU EVER MEET HIM PREVIOUSLY TO COMING HITHER +6070-86744-0017-2586: UPON MY (HONOR->HONOUR) THEN LISTEN TO ME +6070-86744-0018-2587: HE DWELT WITH CONSIDERABLE FORCE AND ENERGY ON THE ALMOST MAGICAL HOSPITALITY HE HAD RECEIVED FROM THE COUNT AND THE MAGNIFICENCE OF HIS ENTERTAINMENT IN THE (GROTTO->DRATTO) OF THE THOUSAND AND ONE NIGHTS HE RECOUNTED WITH CIRCUMSTANTIAL EXACTITUDE ALL THE PARTICULARS OF THE SUPPER THE HASHISH THE STATUES THE DREAM AND HOW AT HIS AWAKENING THERE REMAINED NO PROOF OR TRACE OF ALL THESE EVENTS SAVE THE SMALL YACHT SEEN IN THE DISTANT HORIZON DRIVING UNDER FULL SAIL TOWARD PORTO VECCHIO +6070-86744-0019-2588: THEN HE DETAILED THE CONVERSATION OVERHEARD BY HIM AT THE COLOSSEUM BETWEEN THE COUNT AND VAMPA IN WHICH THE COUNT HAD PROMISED TO OBTAIN THE RELEASE OF THE BANDIT PEPPINO AN ENGAGEMENT WHICH AS OUR READERS ARE AWARE HE MOST FAITHFULLY FULFILLED +6070-86744-0020-2589: BUT SAID FRANZ THE CORSICAN BANDITS THAT WERE AMONG THE CREW OF HIS VESSEL +6070-86744-0021-2590: WHY REALLY THE THING SEEMS TO ME SIMPLE ENOUGH +6070-86744-0022-2591: TALKING OF COUNTRIES REPLIED FRANZ OF WHAT (COUNTRY IS->COUNTRIES) THE COUNT WHAT IS HIS NATIVE (TONGUE->TONG) WHENCE DOES HE DERIVE HIS IMMENSE FORTUNE AND WHAT WERE THOSE EVENTS OF HIS EARLY LIFE A LIFE AS MARVELLOUS AS UNKNOWN THAT (HAVE->HATH) TINCTURED HIS SUCCEEDING YEARS WITH SO DARK AND GLOOMY A MISANTHROPY +6070-86744-0023-2592: CERTAINLY THESE ARE QUESTIONS THAT IN YOUR PLACE I SHOULD LIKE TO HAVE ANSWERED +6070-86744-0024-2593: MY DEAR (FRANZ->FRIENDS) REPLIED ALBERT WHEN UPON RECEIPT OF MY LETTER YOU FOUND THE NECESSITY OF ASKING THE COUNT'S ASSISTANCE YOU PROMPTLY WENT TO HIM SAYING MY FRIEND ALBERT DE MORCERF IS IN DANGER HELP ME TO DELIVER HIM +6070-86744-0025-2594: WHAT ARE HIS MEANS OF EXISTENCE WHAT IS HIS BIRTHPLACE OF WHAT (COUNTRY IS->COUNTRIES) HE A NATIVE +6070-86744-0026-2595: I CONFESS HE ASKED ME NONE NO HE MERELY CAME AND FREED ME FROM THE HANDS OF (SIGNOR->SENOR) VAMPA WHERE I CAN ASSURE YOU IN SPITE OF ALL MY OUTWARD APPEARANCE OF EASE AND UNCONCERN I DID NOT VERY PARTICULARLY CARE TO REMAIN +6070-86744-0027-2596: AND THIS TIME IT MUST BE CONFESSED THAT CONTRARY TO THE USUAL STATE OF AFFAIRS IN DISCUSSIONS BETWEEN THE YOUNG MEN THE EFFECTIVE ARGUMENTS WERE ALL ON ALBERT'S SIDE +6070-86744-0028-2597: WELL SAID FRANZ WITH A SIGH DO AS YOU PLEASE MY DEAR VISCOUNT FOR YOUR ARGUMENTS ARE BEYOND MY POWERS OF REFUTATION +6070-86744-0029-2598: AND NOW MY DEAR (FRANZ->FRANCE) LET US TALK OF SOMETHING ELSE +6070-86745-0000-2549: THEN SHOULD ANYTHING APPEAR TO (MERIT->MARRIAGE) A MORE MINUTE EXAMINATION ALBERT DE MORCERF COULD FOLLOW UP HIS RESEARCHES BY MEANS OF A SMALL GATE SIMILAR TO THAT CLOSE TO THE CONCIERGE'S DOOR AND WHICH MERITS A PARTICULAR DESCRIPTION +6070-86745-0001-2550: SHRUBS AND CREEPING PLANTS COVERED THE WINDOWS AND HID FROM THE GARDEN AND COURT THESE TWO APARTMENTS THE ONLY ROOMS INTO WHICH AS THEY WERE ON THE GROUND FLOOR THE PRYING EYES OF THE CURIOUS COULD PENETRATE +6070-86745-0002-2551: AT A QUARTER TO TEN A (VALET->VALLEY) ENTERED HE COMPOSED WITH A LITTLE GROOM NAMED JOHN AND WHO ONLY SPOKE ENGLISH ALL ALBERT'S ESTABLISHMENT ALTHOUGH THE COOK OF THE HOTEL WAS ALWAYS AT HIS SERVICE AND ON GREAT OCCASIONS THE COUNT'S CHASSEUR ALSO +6070-86745-0003-2552: WAIT THEN DURING THE DAY TELL ROSA THAT WHEN I LEAVE THE OPERA I WILL SUP WITH HER AS SHE WISHES +6070-86745-0004-2553: VERY WELL AT HALF PAST TEN +6070-86745-0005-2554: IS THE COUNTESS UP YET +6070-86745-0006-2555: THE VALET LEFT THE ROOM +6070-86745-0007-2556: GOOD MORNING (LUCIEN->LUCIAN) GOOD MORNING SAID ALBERT YOUR PUNCTUALITY REALLY ALARMS ME +6070-86745-0008-2557: YOU WHOM I EXPECTED LAST YOU ARRIVE AT FIVE MINUTES TO TEN WHEN THE TIME FIXED WAS HALF PAST +6070-86745-0009-2558: NO NO MY DEAR FELLOW DO NOT CONFOUND OUR PLANS +6070-86745-0010-2559: YES HE HAS NOT MUCH TO COMPLAIN OF (BOURGES->BOURGE) IS THE CAPITAL OF CHARLES (SEVEN->THE SEVENTH) +6070-86745-0011-2560: IT IS FOR THAT REASON YOU SEE ME SO EARLY +6070-86745-0012-2561: I RETURNED HOME AT DAYBREAK AND STROVE TO SLEEP BUT MY HEAD ACHED AND I GOT UP TO HAVE A RIDE FOR AN HOUR +6070-86745-0013-2562: (PESTE->PESTS) I WILL DO NOTHING OF THE KIND THE MOMENT THEY COME FROM GOVERNMENT YOU WOULD FIND THEM EXECRABLE +6070-86745-0014-2563: BESIDES THAT DOES NOT CONCERN THE HOME BUT THE FINANCIAL DEPARTMENT +6070-86745-0015-2564: ABOUT WHAT ABOUT THE PAPERS +6070-86745-0016-2565: IN THE ENTIRE POLITICAL WORLD OF WHICH YOU ARE ONE OF THE LEADERS +6070-86745-0017-2566: THEY SAY THAT IT IS QUITE FAIR AND THAT SOWING SO MUCH RED YOU OUGHT TO (REAP->READ) A LITTLE BLUE +6070-86745-0018-2567: COME COME THAT IS NOT BAD SAID (LUCIEN->LUCIAN) +6070-86745-0019-2568: WITH (YOUR TALENTS YOU->THE OR TALONS HE) WOULD MAKE YOUR FORTUNE IN THREE OR FOUR YEARS +6128-63240-0000-503: THE GENTLEMAN HAD NOT EVEN NEEDED TO SIT DOWN TO BECOME INTERESTED APPARENTLY HE HAD TAKEN UP THE VOLUME FROM A TABLE AS SOON AS HE CAME IN AND STANDING THERE AFTER A SINGLE GLANCE ROUND THE APARTMENT HAD LOST HIMSELF IN ITS PAGES +6128-63240-0001-504: THAT HAS AN UNFLATTERING SOUND FOR ME SAID THE YOUNG MAN +6128-63240-0002-505: SHE IS WILLING TO RISK THAT +6128-63240-0003-506: JUST AS I AM THE VISITOR INQUIRED PRESENTING HIMSELF WITH RATHER A (WORK A DAY->WORKADAY) ASPECT +6128-63240-0004-507: HE WAS TALL AND LEAN AND DRESSED THROUGHOUT IN BLACK HIS SHIRT COLLAR WAS LOW AND WIDE AND THE TRIANGLE OF LINEN A LITTLE CRUMPLED EXHIBITED BY THE OPENING OF HIS WAISTCOAT WAS ADORNED BY A PIN CONTAINING A SMALL RED STONE +6128-63240-0005-508: IN SPITE OF THIS DECORATION THE YOUNG MAN LOOKED POOR AS (POOR->FAR) AS A YOUNG MAN COULD (LOOK->LIVE) WHO HAD SUCH A FINE HEAD AND SUCH MAGNIFICENT EYES +6128-63240-0006-509: THOSE OF (BASIL->BAZA) RANSOM (WERE->WENT) DARK DEEP AND GLOWING HIS HEAD HAD A CHARACTER OF ELEVATION WHICH FAIRLY ADDED TO HIS (STATURE->STATUE) IT WAS A HEAD TO BE SEEN ABOVE THE LEVEL OF A CROWD ON SOME JUDICIAL BENCH OR POLITICAL PLATFORM OR EVEN ON A BRONZE MEDAL +6128-63240-0007-510: THESE THINGS THE EYES ESPECIALLY WITH THEIR SMOULDERING FIRE MIGHT HAVE INDICATED THAT HE WAS TO BE A GREAT AMERICAN STATESMAN OR ON THE OTHER HAND THEY MIGHT SIMPLY HAVE PROVED THAT HE CAME FROM CAROLINA OR ALABAMA +6128-63240-0008-511: AND YET THE READER WHO LIKES A COMPLETE IMAGE WHO DESIRES TO READ WITH THE SENSES AS WELL AS WITH THE REASON IS ENTREATED NOT TO FORGET THAT HE PROLONGED HIS (CONSONANTS->COUNTENANCE) AND SWALLOWED HIS (VOWELS->VOWALS) THAT HE WAS GUILTY OF (ELISIONS->ELYGIANS) AND INTERPOLATIONS WHICH WERE EQUALLY UNEXPECTED AND THAT HIS DISCOURSE WAS PERVADED BY SOMETHING SULTRY AND VAST SOMETHING ALMOST AFRICAN IN ITS RICH BASKING TONE SOMETHING THAT SUGGESTED THE TEEMING (EXPANSE->EXPOUNDS) OF THE COTTON FIELD +6128-63240-0009-512: AND HE TOOK UP HIS HAT VAGUELY A SOFT BLACK HAT WITH A LOW CROWN AND AN IMMENSE STRAIGHT BRIM +6128-63240-0010-513: WELL SO IT IS THEY ARE ALL WITCHES AND WIZARDS MEDIUMS AND SPIRIT (RAPPERS->WRAPPERS) AND (ROARING->ROWING) RADICALS +6128-63240-0011-514: IF YOU ARE GOING TO DINE WITH HER YOU HAD BETTER KNOW IT OH MURDER +6128-63240-0012-515: HE (LOOKED AT->LIFTED) MISSUS LUNA WITH INTELLIGENT INCREDULITY +6128-63240-0013-516: SHE WAS ATTRACTIVE AND IMPERTINENT ESPECIALLY THE LATTER +6128-63240-0014-517: HAVE YOU BEEN IN EUROPE +6128-63240-0015-518: NO I HAVEN'T BEEN ANYWHERE +6128-63240-0016-519: SHE HATES IT SHE WOULD LIKE TO ABOLISH IT +6128-63240-0017-520: THIS LAST REMARK HE MADE AT A VENTURE FOR HE HAD NATURALLY NOT DEVOTED ANY SUPPOSITION WHATEVER TO MISSUS (LUNA->LINA) +6128-63240-0018-521: ARE YOU VERY AMBITIOUS YOU LOOK AS IF YOU WERE +6128-63240-0019-522: AND MISSUS (LUNA->LENA) ADDED THAT NOW SHE WAS BACK SHE DIDN'T KNOW WHAT SHE SHOULD DO +6128-63240-0020-523: ONE DIDN'T EVEN KNOW WHAT ONE HAD COME BACK FOR +6128-63240-0021-524: BESIDES OLIVE DIDN'T WANT HER IN BOSTON AND DIDN'T GO THROUGH THE FORM OF SAYING SO +6128-63240-0022-525: THAT WAS ONE COMFORT WITH OLIVE SHE NEVER (WENT->WON) THROUGH ANY FORMS +6128-63240-0023-526: SHE STOOD THERE LOOKING CONSCIOUSLY AND RATHER SERIOUSLY (AT->AND) MISTER RANSOM A SMILE OF EXCEEDING FAINTNESS PLAYED ABOUT HER LIPS IT WAS JUST PERCEPTIBLE ENOUGH TO LIGHT UP THE NATIVE GRAVITY OF HER FACE +6128-63240-0024-527: HER VOICE WAS LOW AND AGREEABLE A CULTIVATED VOICE AND SHE EXTENDED A SLENDER WHITE HAND TO HER VISITOR (WHO->HER) REMARKED WITH SOME SOLEMNITY HE FELT A CERTAIN GUILT OF PARTICIPATION IN MISSUS (LUNA'S->LUNAR'S) INDISCRETION THAT HE WAS INTENSELY HAPPY TO MAKE HER ACQUAINTANCE +6128-63240-0025-528: HE OBSERVED THAT MISS CHANCELLOR'S HAND WAS AT ONCE (COLD->CALLED) AND LIMP SHE MERELY PLACED IT IN HIS WITHOUT EXERTING THE SMALLEST PRESSURE +6128-63240-0026-529: I SHALL BE BACK VERY LATE (WE ARE GOING TO A THEATRE->WILL DON'T YOU THEATER) PARTY THAT'S WHY WE DINE SO EARLY +6128-63240-0027-530: MISSUS (LUNA'S->LUNDY'S) FAMILIARITY EXTENDED EVEN TO HER SISTER SHE REMARKED TO MISS CHANCELLOR THAT SHE LOOKED AS IF SHE WERE GOT UP FOR A SEA VOYAGE +6128-63241-0000-557: POOR RANSOM ANNOUNCED THIS FACT TO HIMSELF AS IF HE HAD MADE A GREAT DISCOVERY BUT IN REALITY HE HAD NEVER BEEN SO (BOEOTIAN->BE OTIAN) AS AT THAT MOMENT +6128-63241-0001-558: THE WOMEN HE HAD HITHERTO KNOWN HAD BEEN MAINLY OF HIS OWN SOFT (CLIME->CLIMB) AND IT WAS NOT OFTEN THEY EXHIBITED THE TENDENCY HE DETECTED AND (CURSORILY->CURSORY) DEPLORED IN MISSUS LUNA'S SISTER +6128-63241-0002-559: RANSOM WAS PLEASED WITH THE VISION OF THAT REMEDY IT MUST BE REPEATED THAT HE WAS VERY PROVINCIAL +6128-63241-0003-560: HE WAS SORRY FOR HER BUT HE SAW IN A FLASH THAT NO ONE COULD HELP HER THAT WAS WHAT MADE HER TRAGIC +6128-63241-0004-561: SHE COULD NOT DEFEND HERSELF AGAINST A RICH ADMIRATION A KIND OF TENDERNESS OF ENVY OF ANY ONE WHO HAD BEEN SO HAPPY AS TO HAVE THAT OPPORTUNITY +6128-63241-0005-562: HIS FAMILY WAS RUINED THEY HAD LOST THEIR SLAVES THEIR PROPERTY THEIR FRIENDS AND RELATIONS THEIR HOME HAD TASTED OF ALL THE CRUELTY OF DEFEAT +6128-63241-0006-563: THE STATE OF MISSISSIPPI SEEMED TO HIM THE STATE OF DESPAIR SO (HE->HIS) SURRENDERED THE REMNANTS OF HIS PATRIMONY TO HIS MOTHER AND SISTERS AND AT NEARLY THIRTY YEARS OF AGE ALIGHTED FOR THE FIRST TIME IN NEW YORK IN THE COSTUME OF HIS PROVINCE WITH FIFTY DOLLARS IN HIS POCKET AND (A GNAWING->ENNARING) HUNGER IN HIS HEART +6128-63241-0007-564: IT WAS IN THE FEMALE LINE AS (BASIL->BALES AT) RANSOM HAD WRITTEN IN ANSWERING HER LETTER WITH A GOOD DEAL OF FORM AND FLOURISH HE SPOKE AS IF THEY HAD BEEN ROYAL HOUSES +6128-63241-0008-565: IF IT HAD BEEN POSSIBLE TO SEND MISSUS (RANSOM->RANDOM) MONEY OR EVEN CLOTHES SHE WOULD HAVE LIKED THAT BUT SHE HAD NO MEANS OF ASCERTAINING (HOW->HER) SUCH AN OFFERING WOULD BE TAKEN +6128-63241-0009-566: (OLIVE->OLIV) HAD A FEAR OF EVERYTHING BUT HER GREATEST FEAR WAS OF BEING AFRAID +6128-63241-0010-567: SHE HAD ERECTED IT INTO A SORT OF RULE OF CONDUCT THAT WHENEVER SHE SAW A RISK SHE WAS TO TAKE IT AND SHE HAD FREQUENT HUMILIATIONS AT FINDING HERSELF SAFE AFTER ALL +6128-63241-0011-568: SHE WAS PERFECTLY SAFE AFTER WRITING TO (BASIL->BASE OR) RANSOM AND INDEED IT WAS DIFFICULT TO SEE WHAT HE COULD HAVE DONE TO HER EXCEPT THANK HER HE WAS ONLY EXCEPTIONALLY SUPERLATIVE FOR HER LETTER AND ASSURE HER THAT HE WOULD COME AND SEE HER THE FIRST TIME HIS BUSINESS HE WAS BEGINNING TO GET A LITTLE SHOULD TAKE HIM TO BOSTON +6128-63241-0012-569: HE WAS TOO SIMPLE TOO MISSISSIPPIAN FOR THAT SHE WAS ALMOST DISAPPOINTED +6128-63241-0013-570: OF ALL THINGS IN THE WORLD CONTENTION WAS MOST SWEET TO HER (THOUGH->THE) WHY IT IS HARD TO IMAGINE FOR IT ALWAYS COST HER TEARS HEADACHES A DAY OR TWO IN BED ACUTE EMOTION AND IT WAS VERY POSSIBLE (BASIL->BEESER) RANSOM WOULD NOT CARE TO (CONTEND->COMPEND) +6128-63244-0000-531: MISS CHANCELLOR HERSELF HAD THOUGHT SO MUCH ON THE VITAL SUBJECT WOULD NOT SHE MAKE A FEW REMARKS AND GIVE THEM SOME OF HER EXPERIENCES +6128-63244-0001-532: HOW DID THE LADIES (ON->AND) BEACON STREET FEEL ABOUT THE (BALLOT->BALLOTT) +6128-63244-0002-533: PERHAPS SHE COULD SPEAK FOR THEM MORE THAN FOR SOME OTHERS +6128-63244-0003-534: WITH (HER->*) IMMENSE (SYMPATHY->SMATHY) FOR REFORM SHE FOUND HERSELF SO OFTEN WISHING THAT (REFORMERS->REFUSE) WERE A LITTLE DIFFERENT +6128-63244-0004-535: (OLIVE->I HAVE) HATED TO HEAR THAT FINE AVENUE (TALKED->TALKS) ABOUT AS IF IT (WERE->WAS) SUCH A REMARKABLE PLACE AND TO LIVE THERE (WERE->WHERE) A PROOF OF WORLDLY GLORY +6128-63244-0005-536: ALL SORTS OF INFERIOR PEOPLE (LIVED->IF) THERE AND SO BRILLIANT A WOMAN AS MISSUS (FARRINDER->FARINGDER) WHO LIVED AT (ROXBURY->BRAXBURY) OUGHT NOT TO (MIX THINGS->MAKE SPACE) UP +6128-63244-0006-537: SHE KNEW HER PLACE IN THE BOSTON (HIERARCHY->HIRAKEE) AND IT WAS NOT WHAT MISSUS (FARRINDER->BARRING JUST) SUPPOSED SO THAT THERE WAS A WANT OF PERSPECTIVE IN TALKING TO HER AS IF SHE HAD BEEN (A REPRESENTATIVE->UNREPRESENTATIVE) OF THE ARISTOCRACY +6128-63244-0007-538: SHE WISHED TO WORK IN ANOTHER FIELD SHE HAD LONG BEEN PREOCCUPIED WITH THE ROMANCE OF THE PEOPLE +6128-63244-0008-539: THIS MIGHT SEEM ONE OF THE MOST ACCESSIBLE OF PLEASURES BUT IN POINT OF FACT SHE HAD NOT FOUND IT SO +6128-63244-0009-540: CHARLIE WAS A YOUNG MAN IN A (WHITE->WORLD) OVERCOAT AND A PAPER COLLAR IT WAS FOR HIM IN THE (LAST ANALYSIS->LASTIS) THAT (THEY->THE) CARED MUCH THE MOST +6128-63244-0010-541: (OLIVE->OUT OF) CHANCELLOR (*->I) WONDERED HOW MISSUS (FARRINDER->THORNDER) WOULD TREAT (THAT->THEIR) BRANCH OF THE QUESTION +6128-63244-0011-542: IF IT BE NECESSARY WE ARE PREPARED TO TAKE CERTAIN STEPS TO CONCILIATE THE SHRINKING +6128-63244-0012-543: OUR MOVEMENT IS FOR ALL IT APPEALS TO THE MOST DELICATE LADIES +6128-63244-0013-544: (RAISE->FOR IT IS) THE STANDARD AMONG THEM AND BRING ME (A->YOUR) THOUSAND NAMES +6128-63244-0014-545: (I->AND) LOOK AFTER THE DETAILS AS WELL AS THE BIG (CURRENTS->CURRANTS) MISSUS (FARRINDER->FARRENDER) ADDED IN A TONE AS EXPLANATORY AS COULD BE EXPECTED OF SUCH A WOMAN AND WITH A SMILE OF WHICH (THE->THIS) SWEETNESS WAS THRILLING TO HER LISTENER +6128-63244-0015-546: SAID (OLIVE->OLDEST) CHANCELLOR WITH A FACE WHICH SEEMED TO PLEAD FOR A (REMISSION OF->REMISSIONOUS) RESPONSIBILITY +6128-63244-0016-547: I (WANT->WARNED) TO BE NEAR TO THEM TO HELP THEM +6128-63244-0017-548: IT WAS ONE THING TO CHOOSE FOR HERSELF BUT NOW THE GREAT REPRESENTATIVE OF THE (ENFRANCHISEMENT->ENCOMCHISEMENT) OF THEIR SEX FROM EVERY FORM OF BONDAGE HAD CHOSEN FOR HER +6128-63244-0018-549: THE UNHAPPINESS OF WOMEN +6128-63244-0019-550: THEY WERE (HER->HIS) SISTERS THEY WERE HER OWN AND THE DAY OF THEIR DELIVERY HAD DAWNED +6128-63244-0020-551: THIS WAS THE ONLY SACRED CAUSE THIS WAS THE GREAT (THE JUST REVOLUTION->DRESSED REVELATION) IT (MUST->WAS) TRIUMPH IT (MUST->WAS) SWEEP EVERYTHING BEFORE IT IT MUST EXACT FROM THE OTHER THE BRUTAL (BLOOD STAINED->BLOODSTAINED) RAVENING RACE THE LAST PARTICLE OF EXPIATION +6128-63244-0021-552: (THEY->THERE) WOULD BE NAMES OF WOMEN WEAK INSULTED PERSECUTED BUT DEVOTED IN EVERY PULSE OF THEIR BEING TO THE CAUSE AND ASKING NO BETTER FATE THAN TO DIE FOR IT +6128-63244-0022-553: IT WAS NOT CLEAR TO THIS INTERESTING GIRL IN WHAT MANNER SUCH A SACRIFICE (AS->OF) THIS LAST WOULD BE REQUIRED OF HER BUT SHE (SAW THE->SOLDOM) MATTER THROUGH A KIND OF SUNRISE MIST OF (EMOTION->THE NATION) WHICH MADE DANGER AS ROSY (AS->IS) SUCCESS +6128-63244-0023-554: WHEN MISS (BIRDSEYE->BIRD'S EYE) APPROACHED IT TRANSFIGURED HER FAMILIAR (HER COMICAL->HYCOMICAL) SHAPE AND MADE THE POOR LITTLE HUMANITARY HACK SEEM ALREADY A MARTYR +6128-63244-0024-555: (OLIVE->ONLY IF) CHANCELLOR LOOKED AT HER WITH LOVE REMEMBERED THAT SHE HAD NEVER IN HER LONG (UNREWARDED->IN REWARDED) WEARY LIFE HAD A THOUGHT (OR->OF) AN IMPULSE FOR HERSELF +6128-63244-0025-556: (*->IF) SHE HAD BEEN CONSUMED BY THE PASSION OF SYMPATHY IT HAD (CRUMPLED->CRUMBLED) HER INTO AS MANY CREASES AS AN OLD GLAZED DISTENDED GLOVE +6432-63722-0000-2431: (BUT SCUSE->BUTCHUSE) ME (DIDN'T YO FIGGER ON DOIN->THEN YOU'LL FAGONNE DOING) SOME (DETECTIN AN GIVE->DETECTIVE AND GIVIN) UP (FISHIN->FISHING) +6432-63722-0001-2432: AND SHAG WITH THE FREEDOM OF AN OLD SERVANT STOOD LOOKING AT HIS MASTER AS IF NOT QUITE UNDERSTANDING THE NEW TWIST THE AFFAIRS HAD TAKEN +6432-63722-0002-2433: I'M (GOING OFF FISHING->GOIN OUR FISHIN) I MAY NOT CATCH ANYTHING (I->AND) MAY NOT WANT TO AFTER I GET THERE +6432-63722-0003-2434: GET READY (SHAG->SHAGG) YES (SAH->A) COLONEL +6432-63722-0004-2435: AND HAVING PUT HIMSELF IN A FAIR WAY AS HE HOPED TO SOLVE SOME OF THE PROBLEMS CONNECTED WITH THE DARCY CASE COLONEL ASHLEY WENT DOWN TO POLICE HEADQUARTERS TO LEARN MORE FACTS IN (*->THE) CONNECTION WITH THE MURDER OF THE EAST INDIAN +6432-63722-0005-2436: (PINKUS->PICK US) AND DONOVAN HAVEN'T THEY CARROLL YEP +6432-63722-0006-2437: (CARROLL->KAL) WAS TOO MUCH ENGAGED IN WATCHING THE BLUE SMOKE (CURL->GIRL) LAZILY UPWARD FROM HIS CIGAR JUST THEN TO SAY MORE +6432-63722-0007-2438: ARE YOU GOING TO WORK ON THAT CASE COLONEL +6432-63722-0008-2439: BUT HE HADN'T ANY MORE TO DO WITH IT COLONEL THAN THAT CAT +6432-63722-0009-2440: PERHAPS NOT ADMITTED COLONEL ASHLEY +6432-63722-0010-2441: WE'VE GOT OUR MAN AND THAT'S ALL WE WANT +6432-63722-0011-2442: YOU'RE ON THE DARCY CASE THEY TELL ME IN A WAY YES +6432-63722-0012-2443: I'M WORKING IN THE (INTERESTS->INTEREST) OF THE YOUNG MAN +6432-63722-0013-2444: IT'S JUST ONE OF THEM COINCIDENCES LIKE +6432-63722-0014-2445: BUSTED HIS HEAD IN WITH A HEAVY CANDLESTICK ONE OF A PAIR +6432-63722-0015-2446: GAD (EXCLAIMED->EXPLAINED) THE COLONEL +6432-63722-0016-2447: THE VERY PAIR I WAS GOING TO BUY +6432-63722-0017-2448: LOOK HERE (COLONEL->CAROL) DO YOU KNOW ANYTHING ABOUT THIS +6432-63722-0018-2449: AND THE DETECTIVE'S PROFESSIONAL INSTINCTS GOT THE UPPER HAND OF HIS FRIENDLINESS NOT THE LEAST IN THE WORLD NOT AS MUCH AS YOU DO WAS THE COOL ANSWER +6432-63722-0019-2450: I HAPPENED TO SEE THOSE CANDLESTICKS IN THE WINDOW OF (SINGA PHUT'S->SINGAFUT'S) SHOP THE OTHER DAY AND I MADE UP MY MIND TO BUY THEM WHEN I HAD A CHANCE +6432-63722-0020-2451: NOW I'M AFRAID I WON'T BUT HOW DID IT HAPPEN +6432-63722-0021-2452: (PHUT->FIVE) I DON'T KNOW WHETHER THAT'S HIS FIRST OR HIS LAST NAME ANYHOW HE HAD A PARTNER NAMED (SHERE->TO SHARE) ALI +6432-63722-0022-2453: ANYHOW HE (AND PHUT DIDN'T->INFECTED) GET ALONG VERY WELL IT SEEMS +6432-63722-0023-2454: (NEIGHBORS->LABORS) OFTEN HEARD (EM SCRAPPIN->HIM SCRAP IN) A LOT AND THIS AFTERNOON THEY WENT AT IT AGAIN HOT AND HEAVY +6432-63722-0024-2455: (TOWARD->TO OUR) DARK A MAN WENT IN TO BUY A LAMP +6432-63722-0025-2456: HE FOUND THE PLACE WITHOUT A LIGHT IN IT STUMBLED OVER SOMETHING ON THE FLOOR AND THERE WAS (ALI'S->ALWAYS) BODY WITH THE HEAD BUSTED IN AND THIS HEAVY CANDLESTICK NEAR IT +6432-63722-0026-2457: SURE HELD SO TIGHT WE COULD HARDLY GET IT OUT +6432-63722-0027-2458: MAYBE THE FIGHT WAS ABOUT WHO OWNED THE WATCH FOR THE (DAGOS->DAG WAS) TALKED IN THEIR FOREIGN LINGO AND NONE OF THE (NEIGHBORS->NEIGHBOURS) COULD TELL WHAT THEY WERE (SAYIN->SAYING) I SEE +6432-63722-0028-2459: AND THE WATCH HAVE YOU IT YES IT'S HERE +6432-63722-0029-2460: THAT'S THE WATCH ANNOUNCED THE (HEADQUARTERS->HEADQUARTER) DETECTIVE REACHING IN FOR IT GOING (YET->AT) SEE +6432-63722-0030-2461: YOU'RE NOT (AS SQUEAMISH->A SCREAMY) AS ALL THAT ARE YOU JUST BECAUSE IT WAS IN A DEAD MAN'S (HAND->HANDS) AND (IN->*) A WOMAN'S +6432-63722-0031-2462: AND DONOVAN'S VOICE WAS PLAINLY (SKEPTICAL->SCEPTICAL) +6432-63722-0032-2463: YES IT MAY HAVE SOME ROUGH EDGES ON IT +6432-63722-0033-2464: AND I'VE READ ENOUGH ABOUT GERMS TO KNOW THE DANGER I'D ADVISE YOU TO BE CAREFUL +6432-63722-0034-2465: IF YOU DON'T MIND I SHOULD LIKE TO EXAMINE THIS A BIT +6432-63722-0035-2466: BEFORE THE BIG WIND IN IRELAND SUGGESTED THONG WITH A NOD (AT->OF) HIS IRISH (COMPATRIOT->CAMPATRIOT) SLIGHTLY (LAUGHED->THEY'LL HAVE) THE COLONEL +6432-63722-0036-2467: THAT'S RIGHT AGREED THE COLONEL AS HE CONTINUED TO MOVE HIS MAGNIFYING GLASS OVER THE SURFACE OF THE STILL TICKING WATCH +6432-63722-0037-2468: (AND->IN) A CLOSE OBSERVER MIGHT HAVE OBSERVED THAT HE DID NOT TOUCH HIS BARE FINGERS TO THE TIMEPIECE BUT POKED IT ABOUT AND TOUCHED IT HERE AND THERE WITH THE END OF A (LEADPENCIL->LEAD PENCIL) +6432-63722-0038-2469: AND (DONOVAN->DONALIN) TAKE (A->HER) FRIEND'S ADVICE AND DON'T BE TOO FREE WITH THAT WATCH TOO FREE WITH IT +6432-63722-0039-2470: ASKED THE SURPRISED DETECTIVE YES +6432-63722-0040-2471: DON'T SCRATCH YOURSELF ON IT WHATEVER YOU DO WHY NOT +6432-63722-0041-2472: SIMPLY BECAUSE THIS WATCH +6432-63722-0042-2473: SOME ONE OUT HERE TO SEE YOU +6432-63722-0043-2474: ALL RIGHT BE THERE IN A SECOND +6432-63722-0044-2475: (SINGA PHUT->SHING AFOOT) WAS THE PANTING ANSWER +6432-63722-0045-2476: I WANT TO TALK OVER DARCY'S CASE WITH YOU THE COLONEL HAD SAID AND THE (TWO->JEW) HAD TALKED HAD THOUGHT HAD TALKED AGAIN AND NOW WERE SILENT FOR A TIME +6432-63722-0046-2477: WHAT ARE THE (CHANCES->CHURCHES) OF GETTING HIM OFF LEGALLY IF WE GO AT IT FROM A NEGATIVE STANDPOINT ASKED THE COLONEL +6432-63722-0047-2478: RATHER A HYPOTHETICAL QUESTION COLONEL BUT I SHOULD SAY IT MIGHT BE A FIFTY FIFTY PROPOSITION +6432-63722-0048-2479: AT BEST HE WOULD GET OFF (WITH A->FOR THE) SCOTCH VERDICT OF NOT PROVEN BUT HE DOESN'T WANT THAT NOR DO I +6432-63722-0049-2480: AND YOU I DON'T WANT IT EITHER +6432-63722-0050-2481: BUT I WANT TO KNOW JUST WHERE WE STAND NOW I KNOW +6432-63722-0051-2482: BUT I NEED TO DO A LITTLE MORE SMOKING OUT FIRST NOW I WANT TO THINK +6432-63722-0052-2483: IF YOU'LL EXCUSE ME I'LL PRETEND I'M FISHING AND I MAY CATCH SOMETHING +6432-63722-0053-2484: IN FACT I HAVE A FEELING THAT (I'LL->I) LAND MY FISH +6432-63722-0054-2485: (I'D->I) RECOMMEND HIM TO YOU INSTEAD OF BLACKSTONE THANKS LAUGHED KENNETH +6432-63722-0055-2486: WHAT IS IT PERHAPS I CAN HELP YOU +6432-63722-0056-2487: THE OLD ADAGE OF TWO HEADS YOU KNOW +6432-63722-0057-2488: YES (IT->IT'S) STILL HOLDS GOOD +6432-63722-0058-2489: NO ALIMONY (REPEATED->REPLIED) THE COLONEL PUZZLED YES JUST THAT +6432-63722-0059-2490: AND THERE'S NO REASON YOU SHOULDN'T KNOW +6432-63723-0000-2491: CHUCKLED THE COLONEL AS HE SKILFULLY PLAYED THE LUCKLESS TROUT NOW STRUGGLING TO GET LOOSE FROM THE HOOK +6432-63723-0001-2492: AND WHEN THE FISH WAS LANDED PANTING ON THE GRASS AND SHAG HAD BEEN ROUSED FROM HIS SLUMBER TO SLIP (THE->A) NOW LIMP FISH INTO THE (CREEL->CREOLE) COLONEL ASHLEY GAVE A SIGH OF RELIEF AND REMARKED I THINK I SEE IT NOW +6432-63723-0002-2493: THE REASON SHE ASKED NO ALIMONY INQUIRED KENNETH +6432-63723-0003-2494: NO I WASN'T THINKING OF THAT +6432-63723-0004-2495: HOWEVER DON'T THINK I'M NOT INTERESTED IN YOUR CASE I'VE (FISHED->FINISHED) ENOUGH FOR TO DAY +6432-63723-0005-2496: WELL I DON'T KNOW THAT YOU CAN +6432-63723-0006-2497: IT ISN'T GENERALLY KNOWN WENT ON THE LAWYER THAT THE HOTEL KEEPER'S WIFE HAS LEFT HIM +6432-63723-0007-2498: IT WAS ONE OF WHAT AT FIRST MIGHT BE CALLED REFINED CRUELTY ON HER HUSBAND'S PART DEGENERATING GRADUALLY INTO THAT OF (THE->A) BASER SORT +6432-63723-0008-2499: YOU DON'T MEAN THAT (LARCH->LARGE) STRUCK HER THAT THERE WAS PHYSICAL ABUSE DO YOU ASKED THE COLONEL THAT'S WHAT HE DID +6432-63723-0009-2500: THE COLONEL DID NOT DISCLOSE THE FACT THAT IT WAS NO NEWS TO HIM +6432-63723-0010-2501: AARON GRAFTON'S STATEMENT WAS BEING (UNEXPECTEDLY->UNEXPECTED GREAT) CONFIRMED +6432-63723-0011-2502: HE REMEMBERED THAT CYNTHIA AND GRAFTON HAD ONCE BEEN IN LOVE WITH EACH OTHER +6432-63723-0012-2503: SHE SAID HE HAD STRUCK HER MORE THAN ONCE AND SHE COULD STAND IT NO LONGER +6432-63723-0013-2504: BECAUSE (LARCH->LARGE) MADE NO (DEFENSE->DEFENCE) +6432-63723-0014-2505: (LARCH->LARGE) BY REFUSING TO APPEAR PRACTICALLY ADMITTED THE CHARGES AGAINST HIM AND DID NOT OPPOSE THE SEPARATION +6432-63723-0015-2506: SO I HAD TO LET HER HAVE HER WAY AND WE DID NOT ASK THE (COURT->CORP) FOR MONEY THOUGH I HAD NO SUCH SQUEAMISH FEELINGS WHEN IT CAME TO MY COUNSEL FEE +6432-63723-0016-2507: NO BUT HE WILL OR (I'LL SUE HIM->ELSE UM) AND GET JUDGMENT OH HE'LL PAY ALL RIGHT +6432-63723-0017-2508: AND IT TAKES ALL SORTS OF PERSONS TO MAKE IT UP +6432-63723-0018-2509: STILL I WOULD LIKE TO KNOW +6432-63723-0019-2510: THE MURDER OF MISSUS DARCY HAD SOME TIME AGO BEEN SHIFTED OFF THE FRONT PAGE THOUGH IT WOULD GET BACK THERE WHEN THE YOUNG JEWELER WAS TRIED +6432-63723-0020-2511: IT HAD A DOUBLE REPUTATION SO TO SPEAK +6432-63723-0021-2512: GRAVE AND EVEN REVEREND (*->THE) CONVENTIONS ASSEMBLED IN ITS (BALLROOM AND->BALL ROOM IN) POLITICIANS OF THE UPPER IF NOT BETTER CLASS WERE FREQUENTLY SEEN IN ITS DINING ROOM OR CAFE +6432-63723-0022-2513: (LARCH->LARGE) HIMSELF WAS A PECULIAR CHARACTER +6432-63723-0023-2514: IN A SMALLER PLACE HE WOULD HAVE BEEN CALLED A SALOON KEEPER +6432-63723-0024-2515: AND IT WAS THIS MAN RICH (IT WAS->EVER) SAID HANDSOME CERTAINLY THAT CYNTHIA RATCHFORD HAD MARRIED +6432-63723-0025-2516: TO THIS WAS THE ANSWER WHISPERED MONEY +6432-63723-0026-2517: AND IN A WAY IT WAS TRUE +6432-63723-0027-2518: SHE ALSO SAW AN OPPORTUNITY OF PAYING OLD DEBTS AND REAPING SOME REVENGES +6432-63723-0028-2519: AFTER THE MARRIAGE WHICH WAS A BRILLIANT AND GAY ONE IF NOT HAPPY THE (LARCH->LARGE) HOTEL IT COULD HARDLY BE CALLED A HOME BECAME THE SCENE OF MANY FESTIVE OCCASIONS +6432-63723-0029-2520: THEN IT WAS SAID OF (LARCH->LARGE) THAT SOON AFTER THE ECHOES OF THE WEDDING CHIMES HAD DIED AWAY HE HAD BEGUN TO TREAT HIS WIFE WITH (*->A) REFINED CRUELTY THAT HIDDEN AWAY FROM THE PUBLIC UNDERNEATH HIS HABITUAL MANNER THERE WAS THE RAWNESS OF THE BRUTE +6432-63723-0030-2521: BUT IT WAS NOTICED THAT THE OLDER AND MORE CONSERVATIVE FAMILIES WERE LESS OFTEN REPRESENTED AND WHEN THEY WERE IT WAS BY SOME OF THE YOUNGER MEMBERS WHOSE REPUTATIONS WERE ALREADY (SMIRCHED->SMARCHED) OR WHO HAD NOT YET ACQUIRED ANY AND WERE WILLING TO TAKE A CHANCE +6432-63723-0031-2522: IT WOULDN'T DO YOU KNOW AFTER THAT STORY CAME OUT FOR ME AND THE VICE CHANCELLOR WHO SAT IN (THE->A) CASE AS WELL AS OTHER JUDGES AND MEMBERS OF THE BAR TO BE SEEN THERE KENNETH EXPLAINED TO THE COLONEL +6432-63723-0032-2523: MEANWHILE COLONEL ASHLEY WAS A VERY BUSY MAN AND TO NO ONE DID HE TELL VERY MUCH ABOUT HIS ACTIVITIES HE SAW DARCY FREQUENTLY AT THE JAIL AND TO THAT YOUNG MAN'S PLEADINGS THAT SOMETHING (*->TO) BE DONE ALWAYS RETURNED THE ANSWER +6432-63723-0033-2524: DON'T WORRY IT WILL COME OUT ALL RIGHT +6432-63723-0034-2525: I'M GOING (TO RECTIFY->DIRECTIFY) THEM BUT (IT->I) WILL TAKE TIME +6432-63723-0035-2526: (IT'S->HIS) HARD FOR MISS MASON TOO ALTHOUGH SHE'S BEARING UP LIKE A MAJOR +6432-63723-0036-2527: SO KING (GOT->GOD) BAIL WHO PUT IT UP +6432-63723-0037-2528: IT WAS (HIGH->I) LARCH +6432-63723-0038-2529: THEY TOOK HARRY AWAY A WHILE AGO +6432-63723-0039-2530: BUT HIS ARE PRETTY UNCERTAIN SHOES TO BE IN JUST THE SAME +6432-63723-0040-2531: ONLY THAT I DARCY HESITATED AND GREW RED +6432-63723-0041-2532: GOOD EVENING COLONEL HE CALLED GENIALLY WILL YOU JOIN ME IN A WELSH RABBIT +6432-63723-0042-2533: THANK YOU NO +6432-63723-0043-2534: I'M AFRAID MY DIGESTION ISN'T QUITE UP TO THAT AS I'VE HAD TO CUT OUT MY FISHING OF LATE +6432-63723-0044-2535: NOW AS TO CERTAIN MATTERS IN THE STORE ON THE MORNING OF THE MURDER +6432-63723-0045-2536: (THE->THEY) STOPPED (CLOCKS->CLUXED) FOR INSTANCE HAVE YOU ANY THEORY +6432-63723-0046-2537: THERE WERE THREE OF THEM THE CENTER FIGURE BEING THAT OF HARRY KING AND HE WAS VERY MUCH INTOXICATED +6432-63723-0047-2538: THAT IS NOT ALWAYS BUT SOMETIMES IT HAPPENED TO BE SO NOW +6432-63723-0048-2539: I BEG YOUR PARDON HE SAID IN THE CULTURED TONES HE KNEW SO WELL HOW TO USE YET OF WHICH HE MADE SO LITTLE USE OF LATE +6432-63723-0049-2540: I SAID WHERE HAVE YOU BEEN REMARKED THE OTHER WE'VE MISSED YOU +6432-63723-0050-2541: I SAID I WAS GOLFING HE WENT ON EXCEEDINGLY DISTINCTLY THOUGH WITH AN EFFORT +6432-63723-0051-2542: WHY (POLONIUS->BONIUS) SOME ONE ASKED +6432-63723-0052-2543: BECAUSE DEAR FRIEND REPLIED KING SOFTLY HE SOMEWHAT RESEMBLES A CERTAIN PERSON HERE WHO TALKS TOO MUCH BUT WHO IS NOT SO WISE AS HE THINKS +6432-63723-0053-2544: THERE WAS A RATTLE OF (COINS ON->COIN DOWN) THE MAHOGANY BAR AS KING SOUGHT TO DISENTANGLE A SINGLE BILL FROM THE (WADDED->WATERED) UP CURRENCY IN HIS POCKET +6432-63723-0054-2545: IT'S (IT'S->*) AN ODD COIN AN OLD ROMAN ONE THAT MISSUS DARCY HAD IN HER PRIVATE COLLECTION KEPT IN THE JEWELRY STORE SAFE WAS THE WHISPERED ANSWER +6432-63723-0055-2546: I WENT OVER THEM (*->NEAR) THE (OTHER->*) DAY AND NOTICED SOME WERE MISSING THOUGH I SAW THEM ALL WHEN I PAID A VISIT TO HER JUST A SHORT TIME BEFORE SHE WAS KILLED +6432-63723-0056-2547: THAT WAS HERS WENT ON THE JEWELER +6432-63723-0057-2548: NOW HARRY KING HAS IT EXCLAIMED COLONEL ASHLEY +6938-70848-0000-1216: EVEN THE SUN CAME OUT PALE AND WATERY AT NOON +6938-70848-0001-1217: THE (COLDS->GOLDS) AND RHEUMATISM OF THE RAINY MONTHS VANISHED +6938-70848-0002-1218: (ASKED A->AS TO) WORKER LAST SUNDAY YOU DID IT WHEN THE YUNKERS +6938-70848-0003-1219: WELL DIDN'T THEY SHOOT US ONE MAN EXHIBITED HIS ARM IN A SLING +6938-70848-0004-1220: HAVEN'T I GOT SOMETHING TO REMEMBER THEM BY THE DEVILS +6938-70848-0005-1221: WHO ARE YOU TO DESTROY THE LEGAL GOVERNMENT (WHO IS LENIN->WITH LANY) A GERMAN +6938-70848-0006-1222: WHO ARE YOU A COUNTER (REVOLUTIONIST A PROVOCATOR->REVOLITIONIST APPROPATOR) THEY (BELLOWED->BELOVED) AT HIM +6938-70848-0007-1223: YOU CALL YOURSELVES THE PEOPLE OF (RUSSIA BUT YOU'RE->A SHEPHERD YOU ARE) NOT THE PEOPLE OF RUSSIA +6938-70848-0008-1224: (THE PEASANTS ARE THE->TO PIECE AND OTHER) PEOPLE OF RUSSIA WAIT UNTIL THE PEASANTS +6938-70848-0009-1225: WE KNOW WHAT THE PEASANTS WILL SAY AREN'T THEY (WORKINGMEN->WORKING MEN) LIKE OURSELVES +6938-70848-0010-1226: (THESE MEN ESPECIALLY->THIS MAN HAS SPECIALLY) WELCOMED THE CALL TO A CONGRESS OF PEASANTS +6938-70848-0011-1227: (THESE->THIS) LAST (WERE->WHERE) THE YOUNG GENERATION WHO HAD BEEN SERVING IN THE ARMY +6938-70848-0012-1228: WHEREUPON THE OLD (EXECUTIVE->EXECUTED) COMMITTEE LEFT THE HALL +6938-70848-0013-1229: DOWN WITH HIM THEY SHRIEKED +6938-70848-0014-1230: FEARFUL TUMULT (CRIES->CHRIST) DOWN WITH THE (BOLSHEVIKI->PULCHEVIKI) +6938-70848-0015-1231: UPON MY RETURN I VISITED (SMOLNY->MORLEY) NO SUCH ACCUSATION WAS MADE AGAINST ME THERE AFTER A BRIEF CONVERSATION I LEFT AND (THAT'S ALL->THAT SOUL) LET (ANY ONE->ANYONE) PRESENT MAKE SUCH AN ACCUSATION +6938-70848-0016-1232: MEANWHILE THE QUESTION OF THE (STATUS->STRATORS) OF THE (EXECUTIVE->EXECUTED) COMMITTEE WAS AGITATING ALL MINDS +6938-70848-0017-1233: BY (DECLARING THE->DECLINING THEIR) ASSEMBLY EXTRAORDINARY CONFERENCE IT HAD BEEN PLANNED TO (BLOCK->PLOT) THE (REELECTION->RE ELECTION) OF THE EXECUTIVE COMMITTEE +6938-70848-0018-1234: BUT THIS (WORKED->WORTH) BOTH WAYS THE (LEFT SOCIALIST REVOLUTIONISTS->LAST SOCIALLY REVOLUTION IS) DECIDED THAT IF THE CONGRESS HAD NO POWER OVER THE (EXECUTIVE->EXECUTING) COMMITTEE THEN THE EXECUTIVE COMMITTEE HAD NO POWER OVER THE CONGRESS +6938-70848-0019-1235: ON THE TWENTY SEVENTH OCCURRED THE DEBATE ON THE LAND QUESTION WHICH REVEALED THE DIFFERENCES BETWEEN THE (AGRARIAN->AGRIAN) PROGRAMME OF THE BOLSHEVIKI AND THE LEFT SOCIALIST REVOLUTIONARIES +6938-70848-0020-1236: THE (CONSTITUENT->CONSTITUTE) ASSEMBLY WILL NOT DARE TO BREAK WITH THE WILL OF THE PEOPLE +6938-70848-0021-1237: FOLLOWED HIM LENIN LISTENED TO NOW WITH ABSORBING INTENSITY +6938-70848-0022-1238: THE FIRST STAGE WAS (THE->A) CRUSHING OF AUTOCRACY AND (THE CRUSHING->A CRASHING) OF THE POWER OF THE INDUSTRIAL (CAPITALISTS->CAPITALIST) AND (LAND OWNERS->THE LANDOWNERS) WHOSE INTERESTS ARE CLOSELY RELATED +6938-70848-0023-1239: (THE DUMAS AND ZEMSTVOS->DID YOU ME SEND THEMSELVES) WERE DROPPED +6938-70848-0024-1240: HE KNEW THAT AN AGREEMENT WITH THE BOLSHEVIKI WAS BEING DISCUSSED BUT HE DID NOT KNOW THAT IT HAD BEEN CONCLUDED +6938-70848-0025-1241: HE SPOKE TO THE (RUMP->WRONG) CONVENTION +6938-70848-0026-1242: THE (VILLAGES->RELIGIOUS) WILL SAVE US IN THE END +6938-70848-0027-1243: BUT THE PRESENT (MOVEMENT->MOMENT) IS INTERNATIONAL AND THAT IS WHY IT IS INVINCIBLE +6938-70848-0028-1244: THE (WILL->WHEEL) OF MILLIONS OF WORKERS IS (NOW->SO) CONCENTRATED IN (THIS->THE) HALL +6938-70848-0029-1245: A NEW HUMANITY WILL BE BORN OF THIS WAR +6938-70848-0030-1246: I GREET YOU WITH THE (CHRISTENING->CHRISTIANNING) OF A NEW RUSSIAN LIFE AND FREEDOM +7018-75788-0000-135: THEN I TOOK UP A GREAT STONE FROM AMONG THE TREES AND COMING UP TO HIM SMOTE HIM THEREWITH ON THE HEAD WITH ALL MY MIGHT AND CRUSHED IN HIS SKULL AS HE LAY DEAD DRUNK +7018-75788-0001-136: BEHOLD A SHIP WAS MAKING FOR THE ISLAND THROUGH THE DASHING SEA AND CLASHING WAVES +7018-75788-0002-137: HEARING THIS I WAS SORE TROUBLED REMEMBERING WHAT I HAD BEFORE SUFFERED FROM THE APE KIND +7018-75788-0003-138: UPON THIS HE BROUGHT ME A COTTON BAG AND (GIVING->GIVEN) IT TO ME SAID TAKE THIS BAG AND FILL IT WITH PEBBLES FROM THE BEACH AND GO FORTH WITH A COMPANY OF THE TOWNSFOLK TO WHOM I WILL GIVE A CHARGE RESPECTING THEE +7018-75788-0004-139: DO AS THEY DO AND (BELIKE->BE LIKE) THOU SHALT GAIN WHAT MAY FURTHER THY RETURN VOYAGE TO THY NATIVE LAND +7018-75788-0005-140: THEN HE CARRIED ME TO THE BEACH WHERE I FILLED MY BAG WITH PEBBLES LARGE AND SMALL AND PRESENTLY WE SAW A COMPANY OF FOLK ISSUE FROM THE TOWN EACH BEARING A BAG LIKE MINE FILLED WITH PEBBLES +7018-75788-0006-141: TO THESE HE COMMITTED ME COMMENDING ME TO THEIR CARE AND SAYING THIS MAN IS A STRANGER SO TAKE HIM WITH YOU AND TEACH HIM HOW TO GATHER THAT HE MAY GET HIS DAILY BREAD AND YOU WILL EARN YOUR REWARD AND RECOMPENSE IN HEAVEN +7018-75788-0007-142: NOW SLEEPING UNDER THESE TREES WERE MANY (APES->IPES) WHICH WHEN THEY SAW US ROSE AND FLED FROM US AND SWARMED UP AMONG THE BRANCHES WHEREUPON MY COMPANIONS BEGAN TO PELT THEM WITH WHAT THEY HAD IN THEIR BAGS AND THE APES FELL TO PLUCKING OF THE FRUIT OF THE TREES AND CASTING THEM AT THE FOLK +7018-75788-0008-143: WE (WEIGHED->WADE) ANCHOR AND SHAHRAZAD PERCEIVED THE DAWN OF DAY AND CEASED SAYING HER PERMITTED SAY +7018-75788-0009-144: WHEN IT WAS THE FIVE HUNDRED AND FIFTY NINTH NIGHT +7018-75788-0010-145: AND CEASED NOT SAILING TILL WE ARRIVED SAFELY AT (BASSORAH->PUSSARA) +7018-75788-0011-146: THERE I ABODE A LITTLE AND THEN WENT ON TO (BAGHDAD->BAGDAD) WHERE I ENTERED MY QUARTER AND FOUND MY HOUSE AND (FOREGATHERED->FOR GATHERED) WITH MY FAMILY AND SALUTED MY FRIENDS WHO GAVE ME JOY OF MY SAFE RETURN AND I LAID UP ALL MY GOODS AND VALUABLES IN MY STOREHOUSES +7018-75788-0012-147: AFTER WHICH I RETURNED TO MY OLD MERRY WAY OF LIFE AND FORGOT ALL I HAD SUFFERED IN THE GREAT PROFIT AND GAIN I HAD MADE +7018-75788-0013-148: NEXT MORNING AS SOON AS IT WAS LIGHT HE PRAYED THE DAWN PRAYER AND AFTER BLESSING MOHAMMED THE CREAM OF ALL CREATURES BETOOK HIMSELF TO THE HOUSE OF (SINDBAD->SINBAD) THE SEAMAN AND WISHED HIM A GOOD DAY +7018-75788-0014-149: HERE I FOUND A GREAT SHIP READY FOR SEA AND FULL OF MERCHANTS AND NOTABLES WHO HAD WITH THEM GOODS OF PRICE SO I EMBARKED MY BALES THEREIN +7018-75788-0015-150: (HAPLY->HAPPILY) AMONGST YOU IS ONE RIGHTEOUS WHOSE PRAYERS THE LORD WILL ACCEPT +7018-75788-0016-151: PRESENTLY THE SHIP STRUCK THE MOUNTAIN AND BROKE UP AND ALL AND EVERYTHING ON BOARD OF HER WERE PLUNGED INTO THE SEA +7018-75788-0017-152: BUT (IT BURNETH->AT BERNETH) IN THEIR BELLIES SO THEY CAST IT UP AGAIN AND IT CONGEALETH ON THE SURFACE OF THE WATER WHEREBY ITS COLOR AND QUANTITIES ARE CHANGED AND AT LAST THE WAVES CAST IT ASHORE AND THE TRAVELLERS AND MERCHANTS WHO KNOW IT (COLLECT IT->COLLECTED) AND SELL IT +7018-75788-0018-153: EACH THAT DIED WE WASHED AND SHROUDED IN SOME OF THE CLOTHES AND LINEN CAST ASHORE BY THE TIDES AND AFTER A LITTLE THE REST OF MY FELLOWS PERISHED ONE BY ONE TILL I HAD BURIED THE LAST OF THE PARTY AND (ABODE->A BOAT) ALONE ON THE ISLAND WITH BUT A LITTLE PROVISION LEFT I WHO WAS WONT TO HAVE SO MUCH +7018-75788-0019-154: BUT THERE IS MAJESTY AND THERE IS NO MIGHT SAVE IN ALLAH THE GLORIOUS THE GREAT +7018-75789-0000-155: WHEN IT WAS THE FIVE HUNDRED AND SIXTY FIRST NIGHT +7018-75789-0001-156: THEN (SIGHING->SIGNED) FOR MYSELF I SET TO WORK COLLECTING A NUMBER OF PIECES OF CHINESE AND (COMORIN ALOES->CORMOR AND ALLIES) WOOD AND I BOUND THEM TOGETHER WITH ROPES FROM THE WRECKAGE THEN I CHOSE OUT FROM THE BROKEN UP (SHIPS->SHIP) STRAIGHT PLANKS OF EVEN SIZE AND FIXED THEM FIRMLY UPON THE (ALOES->ALLIES) WOOD MAKING ME A BOAT RAFT A LITTLE NARROWER THAN THE CHANNEL OF THE STREAM AND I TIED IT TIGHTLY AND FIRMLY AS THOUGH IT WERE NAILED +7018-75789-0002-157: LAND AFTER LAND SHALT THOU (SEEK AND FIND->SEE CONFINED) BUT NO OTHER LIFE ON THY WISH SHALL WAIT FRET NOT THY SOUL IN THY THOUGHTS (O->ARE) NIGHT (ALL->OR) WOES SHALL END OR SOONER OR LATE +7018-75789-0003-158: I (ROWED->RIDE) MY CONVEYANCE INTO THE PLACE WHICH WAS INTENSELY DARK AND THE CURRENT CARRIED (*->ME) THE RAFT WITH IT DOWN THE UNDERGROUND CHANNEL +7018-75789-0004-159: AND I THREW MYSELF DOWN UPON MY FACE ON THE RAFT BY REASON OF THE NARROWNESS OF THE CHANNEL WHILST THE STREAM CEASED NOT TO CARRY ME ALONG KNOWING NOT NIGHT FROM DAY FOR THE EXCESS OF THE GLOOM WHICH ENCOMPASSED ME ABOUT (AND->IN) MY TERROR AND CONCERN FOR MYSELF LEST I SHOULD PERISH +7018-75789-0005-160: WHEN I AWOKE AT LAST I FOUND MYSELF IN THE LIGHT OF HEAVEN AND OPENING MY EYES I SAW MYSELF IN A BROAD STREAM AND THE RAFT MOORED TO AN ISLAND IN THE MIDST OF A NUMBER OF INDIANS AND ABYSSINIANS +7018-75789-0006-161: BUT I WAS DELIGHTED AT MY ESCAPE FROM THE RIVER +7018-75789-0007-162: WHEN THEY SAW I UNDERSTOOD THEM NOT AND MADE THEM NO ANSWER ONE OF THEM CAME FORWARD AND SAID TO ME IN ARABIC PEACE BE WITH THEE O MY BROTHER +7018-75789-0008-163: O MY BROTHER ANSWERED HE WE ARE HUSBANDMEN AND (TILLERS->TELLERS) OF THE SOIL WHO CAME OUT TO WATER OUR FIELDS AND PLANTATIONS AND FINDING THEE ASLEEP ON THIS RAFT LAID HOLD OF IT AND MADE IT FAST BY US AGAINST THOU (SHOULDST->SHOULDEST) AWAKE AT THY LEISURE +7018-75789-0009-164: I ANSWERED FOR ALLAH'S SAKE (O->AM) MY LORD ERE I SPEAK GIVE ME SOMEWHAT TO EAT FOR I AM STARVING AND AFTER ASK ME WHAT THOU WILT +7018-75789-0010-165: WHEN IT WAS THE FIVE HUNDRED AND SIXTY SECOND NIGHT +7018-75789-0011-166: SHE SAID IT HATH (REACHED->RAGED) ME O AUSPICIOUS KING THAT (SINDBAD->SINBAD) THE SEAMAN CONTINUED WHEN I LANDED AND FOUND MYSELF AMONGST THE INDIANS AND ABYSSINIANS AND HAD TAKEN SOME REST THEY CONSULTED AMONG THEMSELVES AND SAID TO ONE ANOTHER THERE IS NO HELP FOR IT BUT WE CARRY HIM WITH US AND PRESENT HIM TO OUR KING THAT HE MAY ACQUAINT HIM WITH HIS ADVENTURES +7018-75789-0012-167: SO I CONSORTED WITH THE CHIEF OF THE ISLANDERS AND THEY PAID ME THE UTMOST RESPECT +7018-75789-0013-168: SO I ROSE WITHOUT STAY OR DELAY AND KISSED THE KING'S HAND AND ACQUAINTED HIM WITH MY LONGING TO SET OUT WITH THE MERCHANTS FOR THAT I PINED AFTER MY PEOPLE AND MINE OWN LAND +7018-75789-0014-169: QUOTH HE THOU ART THINE OWN MASTER YET IF IT BE THY WILL TO ABIDE WITH US (ON OUR->HONOUR) HEAD AND EYES BE IT FOR THOU GLADDENEST US WITH THY COMPANY +7018-75789-0015-170: BY ALLAH O MY LORD ANSWERED I THOU HAST INDEED OVERWHELMED ME WITH THY FAVOURS AND WELL DOINGS BUT I WEARY FOR A SIGHT OF MY FRIENDS AND FAMILY AND NATIVE COUNTRY +7018-75789-0016-171: THEN I TOOK LEAVE OF HIM AND OF ALL MY INTIMATES AND ACQUAINTANCES IN THE ISLAND AND EMBARKED WITH THE MERCHANTS AFORESAID +7018-75789-0017-172: HE ASKED ME WHENCE THEY CAME AND I SAID TO HIM BY ALLAH (O->A) COMMANDER OF THE FAITHFUL I KNOW NOT THE NAME OF THE CITY NOR THE WAY THITHER +7018-75789-0018-173: FOR STATE PROCESSIONS A THRONE IS SET FOR HIM UPON A HUGE ELEPHANT ELEVEN CUBITS HIGH AND UPON THIS HE SITTETH HAVING HIS GREAT LORDS AND OFFICERS AND GUESTS STANDING IN TWO RANKS ON HIS RIGHT HAND AND ON HIS LEFT +7018-75789-0019-174: HIS LETTER HATH SHOWN ME THIS AND AS FOR THE MIGHTINESS OF HIS DOMINION THOU HAST TOLD US WHAT THOU HAST (EYE->I) WITNESSED +7018-75789-0020-175: PRESENTLY MY FRIENDS CAME TO ME AND I DISTRIBUTED PRESENTS AMONG MY FAMILY AND GAVE ALMS AND LARGESSE AFTER WHICH I YIELDED MYSELF TO JOYANCE AND ENJOYMENT MIRTH AND (MERRY MAKING->MERRYMAKING) AND FORGOT ALL THAT I HAD SUFFERED +7018-75789-0021-176: SUCH THEN O MY BROTHERS IS THE HISTORY OF WHAT (BEFEL->BEFELL) ME IN MY SIXTH VOYAGE AND TO MORROW INSHALLAH +7018-75789-0022-177: I WILL TELL YOU THE STORY OF MY SEVENTH AND LAST VOYAGE WHICH IS STILL MORE WONDROUS AND MARVELLOUS THAN THAT OF THE FIRST SIX +7018-75789-0023-178: WHEN IT WAS THE FIVE HUNDRED AND SIXTY THIRD NIGHT +7018-75789-0024-179: SHE SAID IT HATH REACHED ME O AUSPICIOUS KING THAT WHEN (SINDBAD->SINBAD) THE (SEAMAN->SIMON) HAD (RELATED->RELIGHTED) THE HISTORY OF WHAT (BEFEL->BEFELL) HIM IN HIS SIXTH VOYAGE AND ALL THE COMPANY HAD DISPERSED (SINDBAD->SINBAD) THE LANDSMAN WENT HOME AND SLEPT AS OF WONT +7018-75789-0025-180: THE SEVENTH VOYAGE OF (SINDBAD->SINBAD) THE (SEAMAN->SALMON) +7018-75789-0026-181: (KNOW->NO) O COMPANY THAT AFTER MY RETURN FROM MY SIXTH VOYAGE WHICH BROUGHT ME ABUNDANT PROFIT I RESUMED MY FORMER LIFE (IN->AND) ALL POSSIBLE JOYANCE AND ENJOYMENT AND MIRTH AND MAKING MERRY DAY AND NIGHT AND I TARRIED SOME TIME IN THIS SOLACE AND SATISFACTION TILL MY SOUL BEGAN ONCE MORE TO LONG TO SAIL THE SEAS AND SEE FOREIGN COUNTRIES AND COMPANY WITH MERCHANTS AND (HEAR->HERE) NEW THINGS +7018-75789-0027-182: SO HAVING MADE UP MY MIND I PACKED UP IN BALES A QUANTITY OF PRECIOUS STUFFS SUITED FOR SEA TRADE AND REPAIRED WITH THEM FROM BAGHDAD CITY TO (BASSORAH->BASSORA) TOWN WHERE I FOUND A SHIP READY FOR SEA AND IN HER A COMPANY OF CONSIDERABLE MERCHANTS +7018-75789-0028-183: BUT THE CAPTAIN AROSE AND (TIGHTENING->TIGHTENED IN) HIS GIRDLE TUCKED UP HIS SKIRTS AND AFTER TAKING REFUGE WITH ALLAH FROM SATAN THE (STONED CLOMB->STONE CLIMBED) TO THE MAST HEAD WHENCE HE LOOKED OUT RIGHT AND LEFT AND GAZING AT THE PASSENGERS AND CREW FELL TO (BUFFETING->BUFFET IN) HIS FACE AND PLUCKING OUT HIS BEARD +7018-75789-0029-184: THIS HE (SET->SAID) IN A SAUCER WETTED WITH A LITTLE WATER AND AFTER WAITING A SHORT TIME SMELT AND TASTED IT AND THEN HE TOOK OUT OF THE CHEST A BOOKLET WHEREIN HE READ (AWHILE->A WHILE) AND SAID WEEPING KNOW O YE PASSENGERS THAT IN THIS BOOK IS A MARVELLOUS MATTER DENOTING THAT WHOSO (COMETH HITHER->COME THITHER) SHALL SURELY DIE WITHOUT HOPE OF ESCAPE FOR THAT THIS OCEAN IS CALLED THE SEA OF THE CLIME OF THE KING WHEREIN IS (THE->A) SEPULCHRE OF OUR LORD SOLOMON SON OF DAVID ON BOTH BE PEACE +7018-75789-0030-185: A SECOND FISH (MADE->READ) ITS APPEARANCE (THAN->AND) WHICH WE HAD SEEN (NAUGHT->NOUGHT) MORE MONSTROUS +7018-75789-0031-186: WHEN SUDDENLY A VIOLENT SQUALL OF WIND AROSE AND SMOTE THE SHIP WHICH ROSE OUT OF THE WATER AND SETTLED UPON A GREAT REEF THE HAUNT OF SEA MONSTERS WHERE IT BROKE UP AND FELL ASUNDER INTO PLANKS AND ALL AND EVERYTHING ON BOARD WERE PLUNGED INTO THE SEA +7105-2330-0000-2310: UNFORTUNATELY THERE COULD BE NO DOUBT (OR->OUR) MISCONCEPTION AS (TO PLATTERBAFF'S->THE PLATTERBUFF'S) GUILT +7105-2330-0001-2311: HE HAD NOT ONLY (PLEADED->PLAYED IT) GUILTY BUT HAD EXPRESSED HIS INTENTION OF REPEATING HIS ESCAPADE IN OTHER DIRECTIONS AS SOON AS CIRCUMSTANCES PERMITTED THROUGHOUT THE TRIAL HE WAS BUSY EXAMINING A SMALL MODEL OF THE FREE TRADE HALL IN MANCHESTER +7105-2330-0002-2312: (THE JURY->VERY CHEERY) COULD NOT POSSIBLY FIND THAT THE PRISONER HAD NOT DELIBERATELY AND INTENTIONALLY BLOWN UP THE ALBERT HALL THE QUESTION WAS COULD THEY FIND ANY (EXTENUATING->EXTINUATING) CIRCUMSTANCES WHICH WOULD PERMIT OF AN ACQUITTAL +7105-2330-0003-2313: OF COURSE ANY SENTENCE (WHICH->REACHED) THE LAW MIGHT FEEL COMPELLED TO INFLICT WOULD BE FOLLOWED BY AN IMMEDIATE PARDON BUT IT WAS HIGHLY DESIRABLE FROM THE GOVERNMENT'S POINT OF VIEW THAT THE NECESSITY FOR SUCH AN EXERCISE OF CLEMENCY SHOULD NOT ARISE +7105-2330-0004-2314: (A HEADLONG->I HAD LONG) PARDON (ON->AND) THE EVE OF A (BYE ELECTION->BIOLECTION) WITH THREATS OF A HEAVY VOTING (DEFECTION->AFFECTION) IF IT WERE WITHHELD OR EVEN DELAYED WOULD NOT NECESSARILY BE A SURRENDER BUT IT WOULD LOOK LIKE ONE +7105-2330-0005-2315: HENCE (THE->THEIR) ANXIETY IN THE CROWDED COURT AND IN THE LITTLE GROUPS GATHERED ROUND THE TAPE MACHINES IN WHITEHALL AND (DOWNING->DAWNING) STREET (AND OTHER->ANOTHER) AFFECTED CENTRES +7105-2330-0006-2316: (THE JURY RETURNED->THEIR CHEERY TURN) FROM CONSIDERING THEIR VERDICT THERE WAS A FLUTTER AN EXCITED MURMUR A (DEATHLIKE->DEATH LIKE) HUSH +7105-2330-0007-2317: THE (FOREMAN->FOUR MEN) DELIVERED HIS MESSAGE +7105-2330-0008-2318: THE (JURY->CHERRY) FIND THE PRISONER GUILTY OF BLOWING UP THE ALBERT HALL +7105-2330-0009-2319: (THE JURY->THEY JERRY) WISH TO ADD A (RIDER->WRITER) DRAWING ATTENTION TO THE FACT THAT A (BY ELECTION->BILL) IS (PENDING->SPENDING) IN THE PARLIAMENTARY DIVISION OF NEMESIS ON HAND +7105-2330-0010-2320: AND (MAY->MADE) THE (LORD->LARD) HAVE MERCY ON THE (POLL->POLE) A (JUNIOR COUNSEL->GENIOR CONSUL) EXCLAIMED IRREVERENTLY +7105-2330-0011-2321: FIFTEEN HUNDRED SAID THE PRIME MINISTER WITH A SHUDDER IT'S TOO HORRIBLE TO THINK OF +7105-2330-0012-2322: OUR MAJORITY LAST TIME WAS ONLY A THOUSAND AND SEVEN +7105-2330-0013-2323: SEVEN THIRTY AMENDED THE PRIME MINISTER WE MUST AVOID ANY APPEARANCE OF PRECIPITANCY +7105-2330-0014-2324: NOT LATER THAN SEVEN THIRTY THEN SAID THE CHIEF (ORGANISER->ORGANIZER) I HAVE PROMISED THE AGENT DOWN THERE THAT HE SHALL BE ABLE TO DISPLAY POSTERS ANNOUNCING PLATTERBAFF IS OUT BEFORE THE (POLL->POLE) OPENS +7105-2330-0015-2325: HE SAID IT WAS (OUR->HER) ONLY CHANCE OF GETTING A TELEGRAM (RADPROP->REDRUP) IS (IN->INN) TO NIGHT +7105-2330-0016-2326: (DESPITE->THIS SPITE) THE EARLINESS OF THE HOUR A SMALL CROWD HAD GATHERED IN THE STREET OUTSIDE AND THE HORRIBLE MENACING (TRELAWNEY->TREEONER) REFRAIN OF THE FIFTEEN HUNDRED VOTING MEN CAME IN A STEADY MONOTONOUS CHANT +7105-2330-0017-2327: HE EXCLAIMED WON'T GO +7105-2330-0018-2328: HE SAYS HE NEVER HAS LEFT PRISON WITHOUT A (BRASS BAND->BREASTPAND) TO PLAY HIM OUT AND HE'S NOT GOING TO GO WITHOUT ONE NOW +7105-2330-0019-2329: SAID THE PRIME MINISTER WE CAN HARDLY BE SUPPOSED TO SUPPLY A (RELEASED->RELISSE) PRISONER WITH A BRASS BAND HOW ON EARTH COULD WE (DEFEND IT->DEFENDED) ON THE ESTIMATES +7105-2330-0020-2330: (ANYWAY HE->AND AWAY YOU) WON'T GO UNLESS HE HAS A BAND +7105-2330-0021-2331: (POLL->PAUL) OPENS IN FIVE MINUTES +7105-2330-0022-2332: (IS PLATTERBAFF->HIS FURTHER BATH) OUT YET +7105-2330-0023-2333: IN HEAVEN'S NAME WHY +7105-2330-0024-2334: THE CHIEF (ORGANISER->ORGANIZER) RANG OFF +7105-2330-0025-2335: THIS IS NOT A MOMENT FOR STANDING ON DIGNITY HE OBSERVED BLUNTLY (MUSICIANS->MEASIANS) MUST BE SUPPLIED AT ONCE +7105-2330-0026-2336: CAN'T YOU GET (A->US) STRIKE PERMIT ASKED THE (ORGANISER->ORGANIZER) +7105-2330-0027-2337: I'LL TRY SAID THE HOME SECRETARY AND WENT TO THE TELEPHONE +7105-2330-0028-2338: EIGHT O'CLOCK STRUCK THE CROWD OUTSIDE CHANTED WITH AN INCREASING VOLUME OF SOUND (WILL VOTE->REVOTE) THE OTHER WAY +7105-2330-0029-2339: (A TELEGRAM WAS->I TELEGRAMAS) BROUGHT IN +7105-2330-0030-2340: IT WAS FROM THE CENTRAL (COMMITTEE->COMEDY) ROOMS AT NEMESIS +7105-2330-0031-2341: WITHOUT A BAND HE WOULD NOT GO AND THEY HAD NO (BAND->BEND) +7105-2330-0032-2342: A QUARTER PAST TEN HALF PAST +7105-2330-0033-2343: HAVE YOU ANY BAND INSTRUMENTS OF AN EASY NATURE TO PLAY +7105-2330-0034-2344: DEMANDED THE CHIEF (ORGANISER->ORGANIZER) OF THE PRISON GOVERNOR DRUMS (CYMBALS->SYMBOLS) THOSE SORT OF THINGS +7105-2330-0035-2345: THE (WARDERS->ORDERS) HAVE A PRIVATE BAND OF THEIR OWN SAID THE GOVERNOR BUT OF COURSE I COULDN'T ALLOW THE MEN THEMSELVES +7105-2330-0036-2346: (LEND US->BLENDEST) THE INSTRUMENTS SAID THE CHIEF (ORGANISER->ORGANIZER) +7105-2330-0037-2347: (THE->THEIR) POPULAR SONG OF THE MOMENT REPLIED THE AGITATOR AFTER A MOMENT'S REFLECTION +7105-2330-0038-2348: IT WAS A TUNE THEY HAD ALL HEARD HUNDREDS OF TIMES SO THERE (WAS->IS) NO DIFFICULTY IN TURNING OUT A PASSABLE IMITATION OF IT TO THE IMPROVISED (STRAINS->TRAINS) OF (I DIDN'T->EITHERN) WANT TO DO IT THE PRISONER STRODE FORTH TO FREEDOM +7105-2330-0039-2349: THE WORD OF THE (SONG->SUN) HAD REFERENCE IT WAS UNDERSTOOD (TO THE->THAT) INCARCERATING GOVERNMENT AND NOT TO THE DESTROYER OF THE ALBERT HALL +7105-2330-0040-2350: (THE SEAT->THIS HEAT) WAS LOST AFTER ALL BY A NARROW (MAJORITY->MATURITY) +7105-2330-0041-2351: THE LOCAL TRADE UNIONISTS TOOK OFFENCE AT THE FACT OF (CABINET MINISTERS->CABINETS) HAVING PERSONALLY ACTED AS (STRIKE BREAKERS->STRIKEBREAKERS) AND EVEN THE RELEASE OF (PLATTERBAFF->PLATTERBUFF) FAILED TO PACIFY THEM +7105-2340-0000-2272: WITH THAT NOTORIOUS FAILING OF HIS HE WAS NOT (THE->A) SORT OF PERSON ONE WANTED IN ONE'S HOUSE +7105-2340-0001-2273: WELL THE FAILING STILL EXISTS DOESN'T IT SAID (HER->THE) HUSBAND OR (*->A) DO YOU SUPPOSE A REFORM OF CHARACTER IS ENTAILED ALONG WITH THE ESTATE +7105-2340-0002-2274: BESIDES (CYNICISM->SYNICISM) APART (HIS->IS) BEING RICH (WILL->WE'LL) MAKE A DIFFERENCE IN THE WAY PEOPLE WILL LOOK AT HIS (FAILING->FEELING) +7105-2340-0003-2275: WHEN A MAN IS ABSOLUTELY WEALTHY NOT MERELY WELL TO DO ALL SUSPICION OF (SORDID->SARDID) MOTIVE (NATURALLY->NATURAL) DISAPPEARS THE THING BECOMES MERELY A (TIRESOME->PARASAN) MALADY +7105-2340-0004-2276: (WILFRID PIGEONCOTE->WILFRED DIGESON COLT) HAD SUDDENLY BECOME HEIR TO HIS UNCLE SIR WILFRID (PIGEONCOTE->PIGEON COAT) ON THE DEATH OF HIS COUSIN MAJOR (WILFRID PIGEONCOTE->WILFRED PIGEONOTE) WHO HAD SUCCUMBED (TO->*) THE (*->DAY) AFTER EFFECTS OF (A POLO->APOLLO) ACCIDENT +7105-2340-0005-2277: (A WILFRID PIGEONCOTE->OF WILFRED BEECH AND COURT) HAD COVERED HIMSELF WITH (HONOURS->HONORS) IN THE COURSE OF MARLBOROUGH'S CAMPAIGNS AND THE NAME (WILFRID->LOYAL FRED) HAD BEEN (A BAPTISMAL->ABOVE THE SMALL) WEAKNESS IN THE FAMILY EVER SINCE THE NEW HEIR TO THE FAMILY DIGNITY AND ESTATES WAS A YOUNG MAN OF ABOUT FIVE AND TWENTY WHO WAS KNOWN MORE BY (REPUTATION->REPETITION) THAN BY PERSON TO (A WIDE->AVIDE) CIRCLE OF COUSINS AND KINSFOLK +7105-2340-0006-2278: AND THE REPUTATION WAS AN UNPLEASANT ONE +7105-2340-0007-2279: FROM HIS LATE (SCHOOLDAYS->SCHOOL DAYS) ONWARD HE HAD BEEN POSSESSED BY AN ACUTE AND OBSTINATE FORM OF (KLEPTOMANIA->CLEFTOMANIA) HE HAD THE ACQUISITIVE INSTINCT OF THE COLLECTOR WITHOUT ANY OF THE COLLECTOR'S DISCRIMINATION +7105-2340-0008-2280: (THE->THIS) SEARCH USUALLY (PRODUCED->PRODUCE) A LARGE AND VARIED YIELD THIS IS FUNNY SAID PETER (PIGEONCOTE->PIGEON BOAT) TO HIS WIFE (SOME->THEM) HALF HOUR AFTER THEIR CONVERSATION (HERE'S->HERE IS) A TELEGRAM FROM (WILFRID->MILFRED) SAYING HE'S PASSING THROUGH HERE IN HIS MOTOR AND WOULD LIKE TO STOP AND PAY US HIS RESPECTS +7105-2340-0009-2281: SIGNED (WILFRID PIGEONCOTE->WILFRED PEACH AND COLT) +7105-2340-0010-2282: I SUPPOSE (HE'S->THIS) BRINGING US A PRESENT FOR THE SILVER WEDDING GOOD GRACIOUS +7105-2340-0011-2283: THE TALK FLITTED NERVOUSLY AND HURRIEDLY FROM ONE IMPERSONAL TOPIC TO ANOTHER +7105-2340-0012-2284: IN THE DRAWING ROOM AFTER DINNER THEIR NERVOUSNESS AND AWKWARDNESS INCREASED +7105-2340-0013-2285: OH WE HAVEN'T SHOWN YOU THE (SILVER->SILVERY) WEDDING PRESENTS SAID MISSUS PETER SUDDENLY AS THOUGH STRUCK BY A BRILLIANT IDEA (FOR->OF HER) ENTERTAINING THE GUEST HERE THEY ALL ARE +7105-2340-0014-2286: SUCH NICE USEFUL GIFTS A FEW (DUPLICATES->DEPLICATES) OF COURSE +7105-2340-0015-2287: SEVEN (CREAM->QUEEN) JUGS PUT IN PETER +7105-2340-0016-2288: WE FEEL THAT WE MUST LIVE (ON CREAM->UNCREAM) FOR THE REST OF OUR LIVES +7105-2340-0017-2289: OF COURSE SOME OF THEM CAN BE CHANGED +7105-2340-0018-2290: I PUT IT DOWN BY THE (CLARET JUG->CLARGA) SAID (WILFRID->WILFRIED) BUSY WITH ANOTHER OBJECT +7105-2340-0019-2291: (VIGILANCE->EACH A LENS) WAS NOT COMPLETELY CROWNED WITH A SENSE OF VICTORY +7105-2340-0020-2292: AFTER THEY HAD SAID GOOD NIGHT TO THEIR VISITOR MISSUS PETER EXPRESSED HER CONVICTION THAT HE HAD TAKEN SOMETHING +7105-2340-0021-2293: HOW ON EARTH ARE WE TO KNOW SAID PETER THE MEAN PIG HASN'T BROUGHT US A PRESENT AND I'M HANGED IF HE SHALL CARRY ONE OFF +7105-2340-0022-2294: (IT'S->IS) THE ONLY THING TO DO +7105-2340-0023-2295: (WILFRID->WILFRED) WAS (LATE->LAID) IN COMING DOWN TO BREAKFAST AND HIS MANNER SHOWED PLAINLY THAT SOMETHING WAS AMISS +7105-2340-0024-2296: (IT'S->IS) AN UNPLEASANT THING TO HAVE TO SAY HE BLURTED OUT PRESENTLY BUT I'M AFRAID YOU MUST HAVE A THIEF AMONG YOUR SERVANTS SOMETHING'S BEEN TAKEN OUT OF MY PORTMANTEAU +7105-2340-0025-2297: IT WAS A LITTLE PRESENT FROM MY MOTHER AND MYSELF FOR YOUR SILVER WEDDING +7105-2340-0026-2298: I SHOULD HAVE GIVEN IT TO YOU LAST NIGHT AFTER DINNER ONLY IT HAPPENED TO BE A (CREAM->QUEEN) JUG AND YOU SEEMED ANNOYED AT HAVING SO MANY DUPLICATES SO I FELT RATHER AWKWARD (ABOUT->OF A) GIVING YOU ANOTHER +7105-2340-0027-2299: (THE->THIS) SNATCHER HAD BEEN AN ORPHAN (THESE->THIS) MANY YEARS +7105-2340-0028-2300: LADY (ERNESTINE PIGEONCOTE->ERNESTON BEECH AND COLD) HIS MOTHER MOVED IN CIRCLES WHICH WERE ENTIRELY BEYOND THEIR COMPASS OR AMBITIONS AND THE (SON->SUN) WOULD PROBABLY ONE DAY BE AN AMBASSADOR +7105-2340-0029-2301: HUSBAND AND WIFE LOOKED BLANKLY AND DESPERATELY AT ONE ANOTHER +7105-2340-0030-2302: IT WAS MISSUS PETER WHO ARRIVED FIRST AT AN INSPIRATION HOW DREADFUL TO THINK THERE ARE THIEVES IN THE HOUSE WE KEEP THE DRAWING ROOM LOCKED UP AT NIGHT OF COURSE BUT ANYTHING MIGHT BE CARRIED OFF WHILE WE ARE AT BREAKFAST +7105-2340-0031-2303: SHE ROSE AND WENT OUT HURRIEDLY AS THOUGH TO ASSURE HERSELF THAT THE DRAWING ROOM WAS NOT BEING STRIPPED OF ITS SILVERWARE AND RETURNED A MOMENT LATER BEARING A CREAM (JUG->CHUG) IN HER HANDS +7105-2340-0032-2304: THE (PIGEONCOTES->PIGEON CORDS) HAD TURNED PALER THAN EVER MISSUS PETER HAD A FINAL INSPIRATION +7105-2340-0033-2305: (PETER->EITHER) DASHED OUT OF THE ROOM WITH GLAD RELIEF HE HAD LIVED SO LONG DURING THE LAST FEW MINUTES THAT A GOLDEN WEDDING SEEMED WITHIN MEASURABLE DISTANCE +7105-2340-0034-2306: MISSUS (PETER->BEATER) TURNED TO HER GUEST WITH CONFIDENTIAL (COYNESS->KINDNESS) +7105-2340-0035-2307: (PETER'S->PETER IS) LITTLE WEAKNESS (IT RUNS->EACH ONE'S) IN THE FAMILY GOOD LORD +7105-2340-0036-2308: DO YOU MEAN TO SAY HE'S A (KLEPTOMANIAC->CLAPTOMANIA) LIKE COUSIN SNATCHER +7105-2340-0037-2309: (BRAVE->PRETTY) LITTLE WOMAN SAID PETER WITH A GASP OF RELIEF I COULD NEVER HAVE DONE IT +7902-96591-0000-0: (I AM->AND) FROM THE CUTTER LYING OFF THE COAST +7902-96591-0001-1: DON'T CRY HE SAID I WAS OBLIGED TO COME +7902-96591-0002-2: AND AND YOU HAVE NOT FOUND OUT ANYTHING CAME IN QUICK FRIGHTENED TONES +7902-96591-0003-3: I WISH YOU WOULD BELIEVE ME THAT I AM IN AS GREAT TROUBLE ABOUT IT AS YOU ARE +7902-96591-0004-4: THAT MY FATHER SIR RISDON (GRAEME HAS->GRAHAME) SMUGGLED GOODS HERE +7902-96591-0005-5: HE COULD NOT HELP IT HE HATES THE SMUGGLERS YOU SHALL NOT TELL +7902-96591-0006-6: PRAY PRAY SAY YOU WILL NOT (ARCHY->ARCHIE) WAS SILENT +7902-96591-0007-7: THEN AS (ARCHY->ARCHIE) STOOD IN THE DARK LITERALLY AGHAST WITH ASTONISHMENT HE HEARD THE FAINT RUSTLING ONCE MORE AND AGAIN ALL WAS SILENT +7902-96591-0008-8: HE LAUGHED BUT IT WAS A CURIOUS KIND OF LAUGH FULL OF VEXATION INJURED (AMOUR PROPRE->AMORE A PROPER) AS THE FRENCH CALL OUR LOVE OF OUR OWN DIGNITY OF WHICH (ARCHIBALD RAYSTOKE->ARQUEBALD RAY STROKE) IN THE FULL FLUSH OF HIS YOUNG BELIEF IN HIS IMPORTANCE AS A BRITISH OFFICER HAD A PRETTY GOOD STOCK +7902-96591-0009-9: (IT->AND) ALL COMES OF DRESSING UP IN THIS STUPID WAY LIKE A ROUGH FISHER LAD +7902-96591-0010-10: COLD WATER CAME ON THIS IDEA DIRECTLY AS HE RECALLED THE FACT THAT THE DARKNESS WAS INTENSE AND CELIA COULD NOT HAVE SEEN HIM +7902-96591-0011-11: I'LL SOON SHOW THEM THAT I AM NOT GOING TO BE PLAYED WITH +7902-96591-0012-12: FOR IT SUDDENLY OCCURRED TO HIM THAT HE WAS NOT ONLY A PRISONER BUT A PRISONER IN THE POWER OF A VERY RECKLESS SET OF PEOPLE WHO WOULD STOP AT NOTHING +7902-96591-0013-13: NO HE THOUGHT TO HIMSELF I DON'T BELIEVE THEY WOULD KILL ME BUT THEY WOULD KNOCK ME ABOUT +7902-96591-0014-14: THE (KICK HE->KICKIE) HAD RECEIVED WAS A FORETASTE OF WHAT HE MIGHT EXPECT AND AFTER A LITTLE CONSIDERATION HE CAME TO THE CONCLUSION THAT HIS DUTY WAS TO ESCAPE AND GET BACK TO THE CUTTER AS QUICKLY AS HE COULD +7902-96591-0015-15: TO DO THIS HE MUST SCHEME LIE HID TILL MORNING (THEN->THAN) MAKE FOR THE NEAREST POINT AND SIGNAL FOR HELP UNLESS A BOAT'S CREW WERE ALREADY SEARCHING FOR HIM HOW TO ESCAPE +7902-96591-0016-16: THE WINDOW WAS BARRED BUT HE WENT TO IT AND TRIED THE BARS ONE BY ONE TO FIND THEM ALL SOLIDLY FITTED INTO THE STONE SILL +7902-96591-0017-17: NEXT MOMENT AS HE FELT HIS WAY ABOUT HIS HAND TOUCHED AN OLD FASHIONED MARBLE MANTELPIECE FIREPLACE CHIMNEY +7902-96591-0018-18: YES IF OTHER WAYS FAILED HE COULD ESCAPE UP THE CHIMNEY +7902-96591-0019-19: NO THAT WAS TOO BAD HE (COULD NOT->CANNOT) DO THAT +7902-96591-0020-20: SYMPATHY AND PITY FOR THE DWELLERS IN THE (HOZE->HOSE) WERE COMPLETELY GONE NOW AND HE SET HIS TEETH FAST AND MENTALLY CALLED HIMSELF A WEAK IDIOT FOR EVER THINKING ABOUT SUCH PEOPLE +7902-96591-0021-21: A NARROW TABLE AGAINST THE WALL IN TWO PLACES +7902-96591-0022-22: HE WENT AND TRIED TO FORCE HIS HEAD THROUGH RECALLING AS HE DID THAT WHERE A PERSON'S HEAD WOULD GO THE REST OF THE BODY WOULD PASS +7902-96591-0023-23: BUT THERE WAS NO CHANCE FOR HIS BODY THERE THE HEAD WOULD NOT GO FIRST +7902-96591-0024-24: A FELLOW WHO WAS SHUT UP IN (PRISON->PRISONED) FOR LIFE MIGHT DO IT HE SAID BUT NOT IN A CASE LIKE THIS +7902-96592-0000-25: SURE (YOU'VE LOOKED->YOU LOOK) ROUND EVERYWHERE BOY YES FATHER QUITE +7902-96592-0001-26: I'M GOING HOME TO BREAKFAST +7902-96592-0002-27: SHALL I COME (TOO->TO) FATHER NO +7902-96592-0003-28: STOP HERE TILL SIR RISDON COMES DOWN AND TELL HIM I'M VERY SORRY THAT WE SHOULD HAVE CLEARED OUT LAST NIGHT ONLY A BORN FOOL SAW JERRY (NANDY'S LOBSTER BOAT->ANDY'S LOBSTERBOAT) COMING INTO THE COVE AND CAME RUNNING TO SAY IT WAS A PARTY FROM THE CUTTER YES FATHER +7902-96592-0004-29: TELL HIM NOT TO BE UNEASY TIS ALL RIGHT AND I'LL HAVE EVERYTHING CLEAR AWAY TO NIGHT +7902-96592-0005-30: THE DULL SOUND OF DEPARTING STEPS AND A LOW WHISTLING SOUND COMING DOWN THROUGH THE SKYLIGHT WINDOW INTO THE CABIN WHERE (ARCHY RAYSTOKE->ARCHIE RAYSTROKE) LAY WITH HIS HEAVY EYELIDS PRESSED DOWN BY SLEEP +7902-96592-0006-31: WHAT A QUEER DREAM HE THOUGHT TO HIMSELF +7902-96592-0007-32: BUT HOW QUEER FOR MISTER (GURR->GIRT) TO BE TALKING LIKE THAT TO ANDREW (TEAL->TEALE) THE BOY WHO (HELPED->HELPS) THE COOK +7902-96592-0008-33: AND WHY DID ANDY CALL MISTER (GURR FATHER->GERFATHER) +7902-96592-0009-34: THERE WAS AN INTERVAL OF THINKING OVER THIS (KNOTTY->NAUGHTY) QUESTION DURING WHICH THE LOW WHISTLING WENT ON +7902-96592-0010-35: AND (I'M HUNGRY->UNHUNGRY) TOO (TIME I->TELL IT) WAS UP I SUPPOSE +7902-96592-0011-36: NO HE WAS NOT DREAMING FOR HE WAS LOOKING OUT ON THE SEA OVER WHICH A FAINT MIST HUNG LIKE WREATHS OF SMOKE +7902-96592-0012-37: WHAT DID THEY SAY FALSE ALARM TELL SIR (RISDON->RISDEN) THEY WOULD CLEAR ALL AWAY TO NIGHT SEE IF ANYTHING HAD BEEN LEFT ABOUT LOBSTER BOAT +7902-96592-0013-38: ONCE OUT OF THAT ROOM HE COULD (RAN->RUN) AND BY DAYLIGHT THE SMUGGLERS (DARE->DARED) NOT HUNT HIM DOWN +7902-96592-0014-39: OH THOSE BARS HE MENTALLY EXCLAIMED AND HE WAS ADVANCING (TOWARD->TOWARDS) THEM WHEN JUST AS HE DREW NEAR THERE WAS A RUSTLING NOISE UNDER THE WINDOW A COUPLE OF HANDS SEIZED THE BARS THERE WAS A SCRATCHING OF BOOT TOES AGAINST STONE WORK AND RAM'S FACE APPEARED TO GAZE INTO THE ROOM BY INTENTION BUT INTO THE ASTONISHED COUNTENANCE OF THE YOUNG MIDSHIPMAN INSTEAD +7902-96592-0015-40: (RAM->ROOM) WAS THE FIRST TO RECOVER FROM HIS SURPRISE +7902-96592-0016-41: HULLO HE SAID WHO ARE YOU +7902-96592-0017-42: GO ROUND AND OPEN THE DOOR I WAS SHUT IN LAST NIGHT BY MISTAKE +7902-96592-0018-43: I SAW YOU LAST NIGHT AND WONDERED WHOSE BOY (YOU->HE) WAS +7902-96592-0019-44: IT WAS (YOU->YOUR) FATHER KICKED FOR SHIRKING AND MY WELL I HARDLY KNOWED YOU +7902-96592-0020-45: NONSENSE +7902-96592-0021-46: WON'T DO SAID RAM GRINNING +7902-96592-0022-47: THINK I DON'T KNOW YOU MISTER (ORFICER->ORFASTER) +7902-96592-0023-48: (WON'T->WELL) DO SAID RAM QUICKLY I KNOW YOU +7902-96592-0024-49: (BEEN PLAYING->COMPLYING) THE SPY THAT'S WHAT YOU'VE BEEN DOING WHO LOCKED YOU IN +7902-96592-0025-50: (ARCHY->ARCHIE) STEPPED BACK TO THE DOOR LISTENING BUT THERE WAS NOT A SOUND +7902-96592-0026-51: HE HAS GONE TO GIVE THE ALARM THOUGHT THE PRISONER AND HE LOOKED EXCITEDLY ROUND FOR A WAY OF ESCAPE +7902-96592-0027-52: NOTHING BUT THE CHIMNEY PRESENTED ITSELF +7902-96592-0028-53: A HAPPY INSPIRATION HAD COME AND PLACING ONE HAND UPON HIS (BREAST->CHEST) HE THRUST IN THE OTHER GAVE A TUG AND DREW OUT HIS LITTLE CURVED DIRK GLANCED AT THE EDGE RAN TO THE WINDOW AND BEGAN TO CUT (AT->IT) ONE OF THE BARS (LABOUR->LABOR) IN VAIN +7902-96592-0029-54: HE DIVIDED THE PAINT AND PRODUCED A FEW SQUEAKS AND GRATING SOUNDS AS HE (REALISED->REALIZED) THAT THE ATTEMPT WAS MADNESS +7902-96592-0030-55: THE RESULT WAS NOT VERY SATISFACTORY BUT SUFFICIENTLY SO TO MAKE HIM ESSAY THE BAR OF THE WINDOW ONCE MORE PRODUCING A GRATING (EAR ASSAILING->IRRES SELLING) SOUND AS HE FOUND THAT NOW HE DID MAKE A LITTLE IMPRESSION SO LITTLE THOUGH THAT THE PROBABILITY WAS IF HE KEPT ON WORKING WELL FOR TWENTY FOUR HOURS HE WOULD NOT GET THROUGH +7902-96592-0031-56: BUT AT THE END OF FIVE MINUTES HE STOPPED AND THRUST BACK THE DIRK INTO ITS SHEATH +7902-96592-0032-57: NO I CAN'T PART WITH THAT HA HA (HA->*) LAUGHED THE BOY JEERINGLY +7902-96592-0033-58: BUT (I'LL->ALL) YES I'LL GIVE YOU A GUINEA IF YOU WILL LET ME OUT +7902-96592-0034-59: (GUINEA SAID->GUINEAS OF) THE BOY THINK (I'D->I'LL) DO IT FOR A GUINEA WELL THEN (TWO->TOO) +7902-96592-0035-60: BE QUICK THERE'S A GOOD FELLOW I WANT TO GET AWAY AT ONCE +7902-96592-0036-61: NOT YOU ONLY A SHAM +7902-96592-0037-62: WHY YOUR CLOTHES DON'T FIT YOU AND YOUR CAP'S PUT ON ALL (SKEW REW->SKIRO) +7902-96592-0038-63: NEVER MIND ABOUT THAT LET ME OUT OF THIS PLACE +7902-96592-0039-64: I TOLD YOU A FISHER BOY CRIED (ARCHY->ARCHIE) IMPATIENTLY BUT TRYING NOT TO OFFEND HIS VISITOR WHO POSSESSED THE POWER OF CONFERRING FREEDOM BY SPEAKING SHARPLY +7902-96592-0040-65: NOT YOU LOOK LIKE A WILD BEAST IN A CAGE LIKE A MONKEY YOU INSOLENT +7902-96592-0041-66: (ARCHY->ARCHIE) CHECKED HIMSELF AND THE BOY LAUGHED +7902-96592-0042-67: IT WAS YOUR TURN YESTERDAY IT'S MINE TO DAY WHAT A GAME +7902-96592-0043-68: YOU LAUGHED AND (FLEERED->FLARED) AT ME WHEN I WAS ON THE CUTTER'S DECK +7902-96592-0044-69: I SAY YOU DO LOOK (*->LIKE) A (RUM UN->ROMAN) JUST LIKE A BIG MONKEY IN A SHOW +7902-96592-0045-70: RAM SHOWED HIS WHITE TEETH AS HE BURST OUT WITH A LONG LOW FIT OF LAUGHTER +7902-96592-0046-71: YOU (ROPE'S END->HOPES AND) ME HE SAID +7902-96592-0047-72: WHY I COULD TIE YOU UP IN A KNOT AND HEAVE YOU OFF THE CLIFF ANY DAY WHAT A GAME +7902-96592-0048-73: BIT OF (A MIDDY->AMITY) FED ON (*->A) SALT TACK AND (WEEVILLY->WEEVILY) BISCUIT TALK OF GIVING ME (ROPE'S END->ROPES AND) +7902-96592-0049-74: ONCE MORE WILL YOU COME AND LET ME OUT NO +7902-96592-0050-75: TO HIS ASTONISHMENT THE BOY DID NOT FLINCH BUT THRUST HIS OWN ARMS THROUGH PLACING (THEM->HIM) ABOUT THE MIDDY'S WAIST CLENCHING HIS (HANDS->HAND) BEHIND AND UTTERING A SHARP WHISTLE +7902-96594-0000-76: (SEEMED IN GOOD SPIRITS->SEEMING AT SPEAR'S) LAST NIGHT MISTER (GURR->GARR) EH +7902-96594-0001-77: YES SIR BUT HE MAY TURN UP ON THE CLIFF AT ANY MOMENT +7902-96594-0002-78: YES MEN QUITE READY YES SIR +7902-96594-0003-79: (THAT'S RIGHT->THE THREAT) OF COURSE (WELL ARMED->WILL ALARMED) +7902-96594-0004-80: SOON AS THE SIGNAL COMES WE SHALL PUSH OFF +7902-96594-0005-81: AWKWARD (BIT O->BITTER) COUNTRY SIR SIX MILES ROW BEFORE YOU CAN FIND A PLACE TO LAND +7902-96594-0006-82: SO SHALL WE YET SIR +7902-96594-0007-83: YOU DON'T THINK MISTER (GURR->GORE) THAT THEY WOULD DARE TO INJURE HIM IF HE WAS SO UNLUCKY AS TO BE CAUGHT +7902-96594-0008-84: WELL SIR SAID THE MASTER HESITATING SMUGGLERS ARE SMUGGLERS +7902-96594-0009-85: CERTAINLY SIR SMUGGLERS ARE SMUGGLERS (INDEED->INDE) +7902-96594-0010-86: (BEG->THEY) PARDON SIR DIDN'T MEAN ANY HARM +7902-96594-0011-87: I'M GETTING VERY ANXIOUS ABOUT MISTER (RAYSTOKE->RAYSTROKE) START AT ONCE SIR +7902-96594-0012-88: NO WAIT ANOTHER (*->AND) HALF HOUR +7902-96594-0013-89: VERY ILL ADVISED THING TO DO +7902-96594-0014-90: (THEN->THAT) I MUST REQUEST THAT YOU WILL NOT MAKE IT AGAIN VERY TRUE +7902-96594-0015-91: (AWK WARD->AWKWARD) MISTER (GURR->GARR) AWKWARD +7902-96594-0016-92: YES SIR OF COURSE +7902-96594-0017-93: SAY (AWK WARD->AWKWARD) IN (*->THE) FUTURE NOT (AWK'ARD->UPWARD) +7902-96594-0018-94: I MEAN (ALL ALONE->OUR OWN) BY MYSELF SIR +7902-96594-0019-95: WHAT FOR THERE (AREN'T A->ARE TO) PUBLIC HOUSE FOR TEN MILES DIDN'T MEAN THAT +7902-96594-0020-96: THEN WHAT DID YOU MEAN SPEAK OUT AND DON'T DO THE DOUBLE SHUFFLE ALL OVER MY CLEAN DECK NO SIR +7902-96594-0021-97: (HOPPING->HAVING) ABOUT (LIKE A->THE GOOD) CAT ON HOT BRICKS +7902-96594-0022-98: NOW THEN WHY DO YOU WANT TO GO ASHORE +7902-96594-0023-99: (BEG->THEY) PARDON DIDN'T MEAN (NOWT->NOW) SIR SAID THE SAILOR TOUCHING HIS FORELOCK +7902-96594-0024-100: YES SIR SAID THE MAN HUMBLY SHALL I GO AT ONCE SIR +7902-96594-0025-101: NO WAIT +7902-96594-0026-102: (KEEP A->HE WAS) SHARP LOOK OUT ON THE CLIFF TO SEE IF MISTER (RAYSTOKE->RAYSTROKE) IS MAKING SIGNALS FOR A BOAT +7902-96594-0027-103: HE SWUNG ROUND WALKED (AFT->OFF) AND BEGAN SWEEPING (THE SHORE->ASHORE) AGAIN WITH HIS GLASS WHILE THE MASTER AND DICK EXCHANGED GLANCES WHICH MEANT A GREAT DEAL +7902-96594-0028-104: AT LAST THE LITTLE LIEUTENANT COULD BEAR THE ANXIETY NO LONGER +7902-96594-0029-105: (PIPE->PEG) AWAY (THE MEN TO->THEM INTO) THAT BOAT THERE HE SAID AND AS THE CREW SPRANG IN +7902-96594-0030-106: (NOW->NO) MISTER GURR HE SAID I'M ONLY GOING TO SAY ONE THING TO YOU IN THE WAY OF INSTRUCTIONS YES SIR +7902-96594-0031-107: BEG PARDON SIR SAID THE MASTER DEPRECATINGLY +7902-96594-0032-108: STEADY MY (LADS->LAD) STEADY CRIED THE MASTER KEEP STROKE AND THEN HE BEGAN TO MAKE PLANS AS TO HIS FIRST PROCEEDINGS ON GETTING ASHORE +7902-96595-0000-109: SAY (MESTER GURR->MISTER GIRK) SAID DICK AFTER ONE OF THESE SEARCHES HE WOULDN'T RUN AWAY WHAT +7902-96595-0001-110: MISTER RAYSTOKE SIR DON'T BE A FOOL +7902-96595-0002-111: WHAT (CHUCKED HIM OFF->SAID TO MORVE) YONDER +7902-96595-0003-112: (GURR->GIRK) GLANCED ROUND TO SEE IF THE MEN WERE LOOKING AND THEN SAID RATHER HUSKILY (BUT->BE) KINDLY +7902-96595-0004-113: AH EJACULATED DICK SADLY +7902-96595-0005-114: SAY (MESTER GURR SIR->MISTER GURSER) WHICH THANKFUL I AM (TO->FOR) YOU FOR SPEAKING SO BUT YOU DON'T REALLY THINK AS HE HAS COME TO HARM +7902-96595-0006-115: I HOPE NOT DICK I (HOPE NOT->OPEN IT) BUT SMUGGLERS DON'T STAND AT ANYTHING SOMETIMES +7902-96595-0007-116: I DO ASSURE YOU THERE'S NOTHING HERE BUT WHAT YOU MAY SEE +7902-96595-0008-117: IF (YOU'D->YOU) LET ME FINISH YOU'D KNOW SAID (GURR GRUFFLY->GRIGGLY) ONE OF OUR BOYS IS MISSING SEEN (HIM->EM) UP HERE +7902-96595-0009-118: BOY (BOUT->ABOUT) SEVENTEEN WITH A RED CAP NO SIR INDEED I'VE NOT +7902-96595-0010-119: DON'T KNOW AS HE HAS BEEN SEEN ABOUT HERE DO YOU SAID (GURR->GIRL) LOOKING AT HER SEARCHINGLY NO SIR +7902-96595-0011-120: IF SHE KNEW EVIL HAD COME TO THE POOR LAD HER FACE WOULD TELL TALES LIKE PRINT +7902-96595-0012-121: I (SAID A LAD BOUT->STOOD ALOUD ABOUT) SEVENTEEN (IN->AND) A RED (CAP LIKE->CAPLICH) YOURS SAID (GURR->GREW) VERY SHORTLY +7902-96595-0013-122: THE MAN SHOOK HIS HEAD AND STARED AS IF HE DIDN'T HALF UNDERSTAND THE DRIFT OF WHAT WAS SAID +7902-96595-0014-123: HERE MY LAD WHERE'S YOUR MASTER +7902-96595-0015-124: EH I SAY (WHERE'S->WAS) YOUR MASTER +7902-96595-0016-125: (GURR->GERT) TURNED AWAY IMPATIENTLY AGAIN AND (SIGNING->SUNNING) TO HIS MEN TO FOLLOW THEY ALL BEGAN TO TRAMP UP (THE->A) STEEP (TRACK->CHECK) LEADING TOWARD THE (HOZE->HOSE) WITH THE (RABBITS->RABBIT'S) SCUTTLING AWAY AMONG THE (FURZE->FIRS) AND SHOWING THEIR WHITE COTTONY TAILS FOR A MOMENT AS THEY DARTED DOWN INTO THEIR HOLES +7902-96595-0017-126: I DUNNO MUTTERED DICK AND A (MAN->MEN) CAN'T BE SURE +7902-96595-0018-127: (GURR->GER) SALUTED AND STATED HIS BUSINESS WHILE THE BARONET WHO HAD TURNED (SALLOWER->SALARY) AND MORE CAREWORN THAN HIS LOT DREW A BREATH (*->OF) FULL OF RELIEF ONE OF YOUR SHIP BOYS HE SAID +7902-96595-0019-128: A LAD LOOKING LIKE A COMMON SAILOR AND WEARING A RED CAP NO SAID SIR RISDON +7902-96595-0020-129: I HAVE SEEN NO ONE ANSWERING TO THE DESCRIPTION HERE +7902-96595-0021-130: (BEG PARDON SIR BUT CAN YOU->BIG PARTISER BECAME) AS (A->*) GENTLEMAN ASSURE ME THAT HE IS NOT HERE CERTAINLY SAID SIR RISDON +7902-96595-0022-131: SURELY CRIED SIR RISDON EXCITEDLY +7902-96595-0023-132: SIR (RISDON->RICHMOND) WAS SILENT +7902-96595-0024-133: LADY (GRAEME->GRAHAM) LOOKED GHASTLY +7902-96595-0025-134: YOU DO NOT KNOW NO +7975-280057-0000-1008: THESE HATREDS WERE SOON TO MAKE TROUBLE FOR ME OF WHICH I HAD NEVER DREAMED +7975-280057-0001-1009: HENRY WASHINGTON YOUNGER MY FATHER REPRESENTED JACKSON COUNTY THREE TIMES IN THE LEGISLATURE AND WAS ALSO (*->A) JUDGE OF THE COUNTY COURT +7975-280057-0002-1010: MY MOTHER WHO WAS (BURSHEBA FRISTOE->PERCEIVER FOR STOVE) OF INDEPENDENCE WAS (THE->A) DAUGHTER OF RICHARD (FRISTOE->FRISTOW) WHO FOUGHT UNDER GENERAL ANDREW JACKSON AT NEW ORLEANS JACKSON COUNTY HAVING BEEN SO NAMED (AT->AND) MY GRANDFATHER (FRISTOE'S INSISTENCE->FIRST DOZE INSISTANTS) +7975-280057-0003-1011: I CANNOT REMEMBER WHEN I DID NOT KNOW HOW TO SHOOT +7975-280057-0004-1012: MY BROTHER JAMES WAS BORN JANUARY (FIFTEENTH->FIFTEEN) EIGHTEEN FORTY EIGHT JOHN (IN->AND) EIGHTEEN FIFTY ONE AND ROBERT IN DECEMBER EIGHTEEN FIFTY THREE +7975-280057-0005-1013: MY ELDEST BROTHER RICHARD DIED IN EIGHTEEN SIXTY +7975-280057-0006-1014: MY FATHER WAS IN THE EMPLOY OF THE UNITED STATES GOVERNMENT AND HAD THE (MAIL->MALE) CONTRACT FOR FIVE HUNDRED MILES +7975-280057-0007-1015: HE HAD STARTED BACK TO HARRISONVILLE IN A BUGGY BUT WAS WAYLAID ONE MILE SOUTH OF (WESTPORT->WESTWARD) A SUBURB OF KANSAS CITY AND BRUTALLY MURDERED FALLING OUT OF HIS BUGGY INTO THE ROAD WITH THREE MORTAL BULLET WOUNDS +7975-280057-0008-1016: (MISSUS->MISS) WASHINGTON (WELLS->WALES) AND HER SON SAMUEL ON THE ROAD HOME FROM KANSAS CITY TO (LEE'S->LEE) SUMMIT RECOGNIZED THE BODY AS THAT OF MY FATHER +7975-280057-0009-1017: (MISSUS WELLS STAYED->MUST WELL STAY) TO GUARD THE REMAINS (WHILE->WHETHER) HER SON CARRIED THE NEWS OF THE MURDER TO COLONEL PEABODY OF THE FEDERAL COMMAND WHO WAS THEN IN CAMP AT KANSAS CITY +7975-280057-0010-1018: (MISSUS MC CORKLE->MISS MICROCLE) JUMPED FROM THE WINDOW OF THE HOUSE AND ESCAPED +7975-280057-0011-1019: AS THE RAIDERS (LEFT->LIVED) ONE OF THEM SHOUTED +7975-280057-0012-1020: NOW (OLD->*) LADY CALL ON YOUR PROTECTORS WHY DON'T YOU CALL (ON COLE->AND CO) YOUNGER NOW +7975-280057-0013-1021: EVERY KNOT REPRESENTED A HUMAN LIFE +7975-280057-0014-1022: BUT SHE FAILED TO (FIND THE->FAMILY) COMFORT SHE SOUGHT FOR ANNOYANCES CONTINUED IN A MORE AGGRAVATED FORM +7975-280057-0015-1023: TWO MONTHS AFTER (THIS->THE) INCIDENT THE SAME PERSECUTORS AGAIN ENTERED OUR HOME IN THE (DEAD->DAY) OF THE NIGHT AND AT THE POINT OF A PISTOL TRIED TO FORCE MY MOTHER TO SET FIRE TO HER OWN HOME +7975-280057-0016-1024: I HAVE ALWAYS FELT THAT THE EXPOSURE TO WHICH SHE WAS SUBJECTED ON THIS CRUEL JOURNEY TOO HARD EVEN FOR A MAN TO TAKE WAS (THE->A) DIRECT CAUSE OF HER DEATH +7975-280057-0017-1025: FROM HARRISONVILLE SHE WENT TO (WAVERLY->WAVERLEY) WHERE SHE WAS (HOUNDED->HANDY) CONTINUALLY +7975-280057-0018-1026: ONE OF THE CONDITIONS UPON WHICH HER LIFE WAS SPARED WAS THAT SHE WOULD REPORT (AT LEXINGTON->IT LESSINGTON) WEEKLY +7975-280057-0019-1027: ONE OF MY OLD SCHOOL TEACHERS WHOM I HAVE NEVER SEEN SINCE THE SPRING (OR->OF) SUMMER OF EIGHTEEN SIXTY TWO IS STEPHEN B ELKINS SENATOR FROM WEST VIRGINIA +7975-280057-0020-1028: WHEN I WAS (TAKEN->TAKING) PRISONER I EXPECTED TO BE SHOT WITHOUT CEREMONY +7975-280063-0000-1058: WE TOOK THE OATH PERHAPS THREE HUNDRED OF US DOWN ON LUTHER MASON'S FARM A FEW MILES FROM WHERE I NOW (WRITE->RIDE) WHERE COLONEL (HAYS->HAYES) HAD ENCAMPED AFTER INDEPENDENCE +7975-280063-0001-1059: (BOONE MUIR->BOOM YOU) AND MYSELF (MET->MAKE) COFFEE AND THE REST BELOW ROSE HILL ON GRAND RIVER +7975-280063-0002-1060: ACCORDINGLY I WAS SHORTLY AWAKENED TO ACCOMPANY HIM (TO LONE->THE LONG) JACK WHERE HE WOULD PERSONALLY MAKE KNOWN THE SITUATION TO THE OTHER COLONELS +7975-280063-0003-1061: FOSTER HAD NEARLY ONE THOUSAND (CAVALRYMEN->CAVERNMENT) AND TWO PIECES OF (RABB'S->RABBS) INDIANA BATTERY THAT HAD ALREADY MADE FOR ITSELF A NAME FOR HARD FIGHTING +7975-280063-0004-1062: (COME IN->COMMONED) COLONEL (HAYS->HAYES) EXCLAIMED COLONEL (COCKRELL->COCKLE) +7975-280063-0005-1063: I THINK HE'LL BE (RATHER TOUGH MEAT->READY TO HAVE MEET) FOR BREAKFAST I REPLIED HE MIGHT BE ALL (RIGHT->RIPE) FOR DINNER +7975-280063-0006-1064: (JACKMAN->JACK WENT) WITH A PARTY OF THIRTY SEASONED MEN CHARGED THE INDIANA GUNS AND CAPTURED THEM BUT MAJOR FOSTER LED A GALLANT CHARGE AGAINST THE INVADERS AND (RECAPTURED->RE CAPTURED) THE PIECES +7975-280063-0007-1065: WE WERE OUT OF AMMUNITION AND WERE HELPLESS HAD THE FIGHT BEEN PRESSED +7975-280063-0008-1066: THEY DID MARK MY CLOTHES IN ONE OR TWO PLACES HOWEVER +7975-280063-0009-1067: MAJOR FOSTER IN A LETTER TO (JUDGE->JOE) GEORGE (M BENNETT->I INVITED) OF (MINNEAPOLIS->MANY APOLIS) SAID +7975-280063-0010-1068: I WAS TOLD BY SOME OF OUR MEN FROM THE WESTERN BORDER OF THE STATE THAT THEY RECOGNIZED (THE->A) DARING YOUNG (RIDER AS COLE->RATTERAS COAL) YOUNGER +7975-280063-0011-1069: ABOUT NINE THIRTY A M I WAS SHOT DOWN +7975-280063-0012-1070: THE (WOUNDED->WOUNDS) OF BOTH FORCES WERE GATHERED UP AND WERE PLACED IN HOUSES +7975-280076-0000-1029: ALTHOUGH EVERY BOOK (PURPORTING->REPORTING) TO NARRATE THE LIVES OF THE YOUNGER BROTHERS (HAS->IS) TOLD OF THE LIBERTY ROBBERY AND IMPLIED THAT WE HAD A PART IN IT THE YOUNGERS WERE NOT SUSPECTED AT THAT TIME NOR FOR A LONG TIME AFTERWARD +7975-280076-0001-1030: (IT->HE) WAS CLAIMED BY PEOPLE OF LIBERTY THAT THEY POSITIVELY RECOGNIZED AMONG THE ROBBERS (OLL SHEPHERD RED MONKERS->ALL SHEPARD REDMOCKERS) AND BUD PENCE WHO HAD SEEN SERVICE WITH (QUANTRELL->QUANTREL) +7975-280076-0002-1031: THIS (RAID->RAY) WAS ACCOMPANIED BY (BLOODSHED JUDGE->BLOTCHYARD JOSE) MC (LAIN->LANE) THE BANKER BEING SHOT THOUGH NOT FATALLY +7975-280076-0003-1032: (NO->THOUGH) WARRANT WAS ISSUED FOR THE YOUNGERS BUT SUBSEQUENT HISTORIANS HAVE INFERENTIALLY AT LEAST ACCUSED US OF TAKING PART BUT AS I SAID BEFORE THERE IS NO TRUTH IN THE ACCUSATION +7975-280076-0004-1033: JUNE THIRD EIGHTEEN SEVENTY ONE (OBOCOCK BROTHERS->OBEY BROTHER'S) BANK AT (CORYDON IOWA->CROYDEN HOUR) WAS ROBBED OF FORTY THOUSAND DOLLARS BY SEVEN MEN IN BROAD DAYLIGHT +7975-280076-0005-1034: IT WAS (CHARGED->CHARGE) THAT (ARTHUR MC COY->OFTEN MA KOY) OR A (C MC->SEA MAC) COY AND MYSELF HAD BEEN PARTICIPANTS IN THE GAD'S HILL AFFAIR AND THE TWO STAGE ROBBERIES +7975-280076-0006-1035: THE PARTS OF THIS LETTER NOW RELEVANT ARE AS FOLLOWS +7975-280076-0007-1036: YOU MAY USE THIS LETTER IN YOUR OWN WAY +7975-280076-0008-1037: I WILL GIVE YOU THIS OUTLINE AND SKETCH OF MY WHEREABOUTS AND ACTIONS AT THE TIME OF CERTAIN ROBBERIES WITH WHICH I AM CHARGED +7975-280076-0009-1038: (AT->IT'S) THE TIME OF THE (GALLATIN->GALLOP AND) BANK ROBBERY I WAS GATHERING CATTLE (IN ELLIS->AND ILLIS) COUNTY TEXAS (CATTLE THAT I BOUGHT->CATTLETTA BROUGHT) FROM (PLEAS->PLACE) TAYLOR AND RECTOR +7975-280076-0010-1039: THIS CAN BE PROVED BY BOTH OF THEM ALSO BY (SHERIFF BARKLEY->SIR PARKLEY) AND FIFTY OTHER RESPECTABLE MEN OF THAT COUNTY +7975-280076-0011-1040: I BROUGHT THE CATTLE (TO->THE) KANSAS (THAT->SET) FALL AND REMAINED IN SAINT CLAIR COUNTY UNTIL FEBRUARY +7975-280076-0012-1041: I THEN WENT TO (ARKANSAS->OUR CONSOLE) AND (RETURNED->RETURN) TO SAINT CLAIR COUNTY ABOUT THE FIRST OF MAY +7975-280076-0013-1042: (I->AND) WENT TO KANSAS WHERE OUR CATTLE (WERE IN->BURNED) WOODSON COUNTY AT COLONEL (RIDGE'S->RICHES) +7975-280076-0014-1043: DURING (THE->*) SUMMER I WAS EITHER IN SAINT CLAIR (*->OR) JACKSON OR KANSAS BUT AS THERE WAS NO ROBBERY COMMITTED THAT SUMMER IT MAKES NO DIFFERENCE WHERE I WAS +7975-280076-0015-1044: (I->AND) WENT THROUGH INDEPENDENCE AND FROM THERE TO ACE (WEBB'S->WEBBS) +7975-280076-0016-1045: THERE I TOOK DINNER AND THEN WENT TO DOCTOR (L->OLD) W (TWYMAN'S->TWIMMAN) +7975-280076-0017-1046: OUR BUSINESS THERE WAS TO SEE E P WEST HE WAS NOT AT HOME BUT THE FAMILY WILL REMEMBER THAT WE WERE THERE +7975-280076-0018-1047: WE CROSSED ON THE BRIDGE (STAYED->STATE) IN THE CITY ALL NIGHT AND THE NEXT MORNING WE RODE UP (THROUGH->TO) THE CITY +7975-280076-0019-1048: (I MET->AMID) SEVERAL OF MY FRIENDS AMONG THEM WAS BOB (HUDSPETH->HUSBATH) +7975-280076-0020-1049: WE WERE NOT ON GOOD TERMS AT THE TIME NOR HAVE WE BEEN FOR SEVERAL YEARS +7975-280076-0021-1050: POOR JOHN HE HAS BEEN HUNTED DOWN AND SHOT LIKE A WILD BEAST AND NEVER WAS A BOY MORE INNOCENT +7975-280076-0022-1051: DOCTOR L (LEWIS->LOUIS) WAS HIS PHYSICIAN +7975-280076-0023-1052: THERE WERE FIFTY OR (A->*) HUNDRED PERSONS THERE WHO WILL TESTIFY IN ANY COURT THAT JOHN AND I WERE THERE +7975-280076-0024-1053: (HELVIN->HELVAN) FICKLE AND WIFE OF (GREENTON->GREENSON) VALLEY WERE ATTENDING THE SPRINGS AT THAT TIME AND EITHER OF THEM WILL TESTIFY TO THE ABOVE FOR JOHN AND I (SAT->SET) IN FRONT OF MISTER SMITH WHILE HE WAS PREACHING AND WAS IN HIS COMPANY FOR A FEW MOMENTS TOGETHER WITH HIS WIFE AND MISTER AND (MISSUS->MISS) FICKLE AFTER (*->THE) SERVICE +7975-280076-0025-1054: ABOUT THE LAST OF DECEMBER EIGHTEEN SEVENTY THREE I ARRIVED IN (CARROLL->CAROL) PARISH LOUISIANA +7975-280076-0026-1055: I STAYED THERE UNTIL THE EIGHTH OF FEBRUARY EIGHTEEN SEVENTY FOUR +7975-280076-0027-1056: I HAD NOT HEARD OF THAT WHEN I WROTE THE LETTER OF EIGHTEEN SEVENTY FOUR AND TO CORRECT ANY MISAPPREHENSION THAT MIGHT BE CREATED BY OMITTING IT I WILL SAY THAT AT (THAT->THE) TIME I WAS AT (NEOSHO->NEOSH O) KANSAS WITH A DROVE OF CATTLE WHICH I SOLD TO MAJOR (RAY->WRAYE) +7975-280076-0028-1057: IT WAS IMMEDIATELY FOLLOWING THE ROCK ISLAND ROBBERY AT (ADAIR->EIGHT AIR) IOWA THAT (THERE->THEIR) FIRST APPEARED A (DELIBERATE->DELIVERED) ENLISTMENT OF SOME LOCAL PAPERS (IN->AND) MISSOURI TO CONNECT US WITH THIS ROBBERY +7975-280084-0000-1090: I URGED ON THE BOYS (THAT->AT) WHATEVER HAPPENED WE SHOULD NOT SHOOT ANY ONE +7975-280084-0001-1091: WHEN MILLER AND I CROSSED THE BRIDGE THE THREE WERE ON SOME DRY (GOODS->GOOD) BOXES AT THE CORNER NEAR THE BANK AND AS SOON AS (THEY->I) SAW US WENT RIGHT INTO THE BANK INSTEAD OF WAITING FOR US TO GET THERE +7975-280084-0002-1092: WHEN WE CAME UP I TOLD MILLER TO SHUT THE BANK DOOR WHICH THEY HAD LEFT OPEN IN THEIR HURRY +7975-280084-0003-1093: J (S ALLEN->HELEN) WHOSE (HARDWARE STORE WAS->HARD WORKED ALWAYS) NEAR TRIED TO GO INTO THE BANK BUT MILLER ORDERED HIM AWAY AND HE RAN (AROUND->ROUND) THE CORNER SHOUTING +7975-280084-0004-1094: GET YOUR GUNS BOYS THEY'RE ROBBING THE BANK +7975-280084-0005-1095: AND I (CALLED->CALL) TO HIM TO GET INSIDE AT THE SAME TIME FIRING A PISTOL SHOT IN THE AIR AS (A->THE) SIGNAL TO THE THREE BOYS AT THE BRIDGE THAT WE HAD BEEN DISCOVERED +7975-280084-0006-1096: ALMOST AT THIS INSTANT I HEARD A PISTOL SHOT IN THE BANK +7975-280084-0007-1097: (CHADWELL->CHED WILL) WOODS AND JIM RODE UP AND (JOINED US->JARNDYCE) SHOUTING TO (*->THE) PEOPLE IN THE STREET TO GET INSIDE AND FIRING THEIR PISTOLS TO EMPHASIZE THEIR COMMANDS +7975-280084-0008-1098: IF ANY OF OUR PARTY SHOT HIM IT MUST HAVE BEEN WOODS +7975-280084-0009-1099: MEANTIME THE STREET WAS GETTING UNCOMFORTABLY HOT +7975-280084-0010-1100: EVERY TIME I SAW ANY ONE WITH A BEAD ON ME I WOULD DROP OFF MY HORSE AND (TRY->TROT) TO DRIVE THE SHOOTER INSIDE BUT I COULD NOT SEE IN EVERY DIRECTION +7975-280084-0011-1101: DOCTOR (WHEELER->WHALER) WHO HAD GONE UPSTAIRS IN THE HOTEL SHOT MILLER AND HE LAY DYING IN THE STREET +7975-280084-0012-1102: CHANGING HIS PISTOL TO HIS LEFT HAND BOB RAN OUT AND MOUNTED MILLER'S MARE +7975-280084-0013-1103: WHAT KEPT YOU SO LONG I ASKED PITTS +7975-280084-0014-1104: AS TO THE REST OF THE AFFAIR INSIDE THE BANK I TAKE THE ACCOUNT OF A (NORTHFIELD->NORTH FIELD) NARRATOR +7975-280084-0015-1105: WHERE'S THE MONEY OUTSIDE THE SAFE BOB ASKED +7975-280084-0016-1106: THE SHUTTERS WERE CLOSED AND THIS CAUSED BUNKER AN INSTANT'S DELAY THAT WAS ALMOST FATAL (PITTS->FITZ) CHASED HIM WITH A BULLET +7975-280084-0017-1107: THE FIRST ONE (MISSED HIM->MISTING) BUT THE SECOND WENT THROUGH HIS RIGHT SHOULDER +7975-280085-0000-1071: THAT NIGHT IT STARTED TO RAIN AND WE WORE OUT OUR HORSES +7975-280085-0001-1072: FRIDAY WE MOVED TOWARD WATERVILLE AND FRIDAY NIGHT WE CAMPED BETWEEN (ELYSIAN->THE LUCIEN) AND GERMAN LAKE +7975-280085-0002-1073: (BOB'S SHATTERED ELBOW WAS->BOB SATURDAIL BOWS) REQUIRING FREQUENT ATTENTION AND THAT NIGHT WE MADE ONLY NINE MILES AND MONDAY MONDAY NIGHT AND TUESDAY WE SPENT IN A DESERTED FARM HOUSE CLOSE TO (MANKATO->MANKADO) +7975-280085-0003-1074: THAT (DAY->THEY) A MAN NAMED (DUNNING->DARNING) DISCOVERED US AND WE TOOK HIM PRISONER +7975-280085-0004-1075: FINALLY WE ADMINISTERED TO HIM AN OATH NOT TO BETRAY OUR WHEREABOUTS UNTIL WE HAD TIME TO MAKE OUR ESCAPE AND HE AGREED NOT TO +7975-280085-0005-1076: NO SOONER HOWEVER WAS HE RELEASED THAN HE MADE (POSTHASTE->POST TASTE) INTO (MANKATO->MANKE) TO ANNOUNCE OUR PRESENCE AND IN A FEW MINUTES ANOTHER POSSE WAS LOOKING FOR US +7975-280085-0006-1077: THE WHISTLE ON THE (OIL->ORE) MILL BLEW AND WE FEARED THAT IT WAS A SIGNAL THAT HAD BEEN AGREED UPON TO ALARM THE TOWN IN CASE WE WERE OBSERVED BUT WE WERE NOT MOLESTED +7975-280085-0007-1078: HE HAD TO SLEEP WITH (IT->A) PILLOWED ON MY BREAST JIM BEING ALSO (*->A) CRIPPLED WITH A WOUND IN HIS SHOULDER AND WE COULD NOT GET MUCH SLEEP +7975-280085-0008-1079: BUT THEY SOON AFTER GOT CLOSE ENOUGH SO THAT ONE OF THEM BROKE MY WALKING STICK WITH A SHOT +7975-280085-0009-1080: WE WERE (IN SIGHT->INSIDE) OF OUR LONG (SOUGHT->SOWED) HORSES WHEN THEY CUT US OFF FROM THE ANIMALS AND OUR LAST HOPE WAS GONE +7975-280085-0010-1081: SIX (STEPPED->STEPS) TO THE FRONT SHERIFF (GLISPIN->CLISPIN) COLONEL T L (VOUGHT->WALT) B (M->AND) RICE G (A->*) BRADFORD C A (POMEROY->POMROY) AND S J SEVERSON +7975-280085-0011-1082: FORMING (IN->A) LINE FOUR PACES APART HE ORDERED THEM TO ADVANCE RAPIDLY AND CONCENTRATE THE FIRE OF THE WHOLE LINE THE INSTANT THE ROBBERS WERE DISCOVERED +7975-280085-0012-1083: MAKE FOR THE HORSES I SAID EVERY MAN FOR HIMSELF +7975-280085-0013-1084: THERE IS NO USE STOPPING TO PICK UP A COMRADE HERE (FOR->TILL) WE CAN'T GET HIM THROUGH THE LINE JUST (CHARGE->SHARS) THEM AND MAKE IT IF WE CAN +7975-280085-0014-1085: I GOT UP AS (THE->A) SIGNAL FOR THE CHARGE AND WE FIRED ONE VOLLEY +7975-280085-0015-1086: ONE OF THE FELLOWS IN THE OUTER (LINE->LAND) NOT BRAVE ENOUGH HIMSELF TO JOIN THE VOLUNTEERS WHO HAD COME IN TO BEAT US OUT WAS NOT DISPOSED TO BELIEVE IN THE SURRENDER AND HAD HIS GUN LEVELLED ON BOB IN SPITE OF THE HANDKERCHIEF WHICH WAS WAVING AS A FLAG OF TRUCE +7975-280085-0016-1087: (SHERIFF->SURE OF) GLISPIN OF (WATONWAN->WATERWAM) COUNTY WHO WAS TAKING BOB'S PISTOL FROM HIM WAS ALSO SHOUTING TO THE FELLOW +7975-280085-0017-1088: INCLUDING THOSE RECEIVED IN AND ON THE WAY FROM (NORTHFIELD->NORTH FIELD) I HAD ELEVEN (WOUNDS->ONES) +7975-280085-0018-1089: (AND SHERIFF->IN CHEER OF) GLISPIN'S ORDER NOT TO SHOOT WAS THE BEGINNING OF THE (PROTECTORATE->PROTECTOR) THAT MINNESOTA PEOPLE ESTABLISHED OVER US +8131-117016-0000-1303: CAPTAIN (MURDOCH->MURDOCK) +8131-117016-0001-1304: BUT MARSPORT HAD FLOURISHED ENOUGH TO KILL IT OFF +8131-117016-0002-1305: SOME OF MARS LAWS DATED FROM THE TIME WHEN (LAW ENFORCEMENT->LAWN FORCEMENT) HAD BEEN HAMPERED BY LACK OF MEN RATHER THAN BY THE TYPE OF MEN +8131-117016-0003-1306: THE (STONEWALL->STONE WALL) GANG NUMBERED PERHAPS FIVE HUNDRED +8131-117016-0004-1307: EVEN (DERELICTS AND->DEAR ALEXAM) FAILURES HAD TO EAT THERE WERE (STORES->STORIES) AND SHOPS THROUGHOUT THE DISTRICT WHICH EKED OUT SOME KIND OF A MARGINAL LIVING +8131-117016-0005-1308: THEY WERE SAFE FROM PROTECTION (RACKETEERS->RAGATIRS) THERE NONE BOTHERED TO COME SO FAR OUT +8131-117016-0006-1309: THE SHOPKEEPERS AND SOME OF THE LESS UNFORTUNATE PEOPLE THERE HAD PROTESTED LOUD ENOUGH TO REACH CLEAR BACK TO EARTH +8131-117016-0007-1310: CAPTAIN (MURDOCH->MURDOCK) WAS AN UNKNOWN FACTOR AND NOW WAS ASKING FOR MORE MEN +8131-117016-0008-1311: THE PRESSURE WAS ENOUGH TO GET THEM FOR HIM +8131-117016-0009-1312: GORDON REPORTED FOR WORK WITH A SENSE OF THE BOTTOM FALLING OUT MIXED WITH A VAGUE RELIEF +8131-117016-0010-1313: I'VE GOT A FREE HAND AND WE'RE GOING TO RUN THIS THE WAY WE WOULD ON EARTH +8131-117016-0011-1314: YOUR JOB IS TO PROTECT THE CITIZENS HERE AND THAT MEANS (EVERYONE->EVERY ONE) NOT BREAKING THE LAWS WHETHER YOU FEEL LIKE IT OR NOT NO GRAFT +8131-117016-0012-1315: THE FIRST MAN MAKING A (SHAKEDOWN->SHAKE DOWN) WILL GET THE SAME TREATMENT WE'RE GOING TO USE ON THE (STONEWALL->STONE WALL) BOYS YOU'LL GET DOUBLE PAY HERE AND YOU CAN LIVE ON IT +8131-117016-0013-1316: HE PICKED OUT FIVE OF THE MEN INCLUDING GORDON YOU FIVE WILL COME WITH ME +8131-117016-0014-1317: THE REST OF YOU CAN (TEAM->TEEM) UP ANY WAY YOU WANT (TONIGHT->TO NIGHT) PICK ANY (ROUTE->ROUGH) THAT'S OPEN (OKAY MEN->OH CAME AND) LET'S GO +8131-117016-0015-1318: (BRUCE->BRUSH) GORDON GRINNED SLOWLY AS HE SWUNG THE STICK AND (MURDOCH'S->MARDOC'S) EYES FELL ON HIM EARTH COP +8131-117016-0016-1319: TWO YEARS GORDON ADMITTED +8131-117016-0017-1320: FOR A SECOND GORDON CURSED HIMSELF +8131-117016-0018-1321: HE BEGAN WONDERING ABOUT SECURITY THEN +8131-117016-0019-1322: NOBODY HAD TRIED TO GET IN TOUCH WITH HIM +8131-117016-0020-1323: THERE WAS A CRUDE LIGHTING SYSTEM HERE PUT UP BY THE CITIZENS AT THE FRONT OF EACH BUILDING A DIM (PHOSPHOR->PHOSPHER) BULB GLOWED WHEN DARKNESS FELL THEY WOULD HAVE NOTHING ELSE TO SEE BY +8131-117016-0021-1324: MOVING IN TWO GROUPS OF THREES (AT->IT) OPPOSITE SIDES OF THE STREET THEY BEGAN THEIR BEAT +8131-117016-0022-1325: THERE WAS NO CHANCE TO SAVE THE CITIZEN WHO WAS DYING FROM LACK OF AIR +8131-117016-0023-1326: GORDON FELT THE SOLID PLEASURE OF THE FINELY TURNED CLUB IN HIS HANDS +8131-117016-0024-1327: GORDON'S EYES POPPED AT THAT +8131-117016-0025-1328: HE SWALLOWED THE SENTIMENT HIS OWN CLUB WAS MOVING NOW +8131-117016-0026-1329: THE OTHER (FOUR COPS->FUPS) HAD COME IN RELUCTANTLY +8131-117016-0027-1330: HE BROUGHT HIM TO THE GROUND WITH A SINGLE BLOW ACROSS THE KIDNEYS +8131-117016-0028-1331: THEY (ROUNDED->ROUTED) UP THE MEN OF THE GANG AND ONE OF THE (COPS->CUPS) STARTED OFF +8131-117016-0029-1332: TO FIND A PHONE AND CALL THE WAGON +8131-117016-0030-1333: WE'RE NOT USING WAGONS (MURDOCH->MURDOCK) TOLD HIM (LINE->LYING) THEM UP +8131-117016-0031-1334: IF THEY TRIED TO RUN THEY WERE HIT FROM BEHIND (IF->THAT) THEY STOOD STILL THEY WERE CLUBBED CAREFULLY +8131-117016-0032-1335: (MURDOCH->MURDOCK) INDICATED ONE WHO STOOD WITH HIS (SHOULDERS->SHOULDER) SHAKING AND TEARS RUNNING DOWN HIS CHEEKS +8131-117016-0033-1336: THE CAPTAIN'S FACE WAS AS SICK AS (GORDON->GORDON'S) FELT +8131-117016-0034-1337: I WANT THE NAME OF EVERY MAN IN THE GANG YOU CAN REMEMBER HE TOLD THE MAN +8131-117016-0035-1338: COLONEL THEY'D KILL ME I DON'T KNOW +8131-117016-0036-1339: (MURDOCH->MURDOCK) TOOK HIS NOD AS EVIDENCE ENOUGH AND TURNED TO THE WRETCHED (TOUGHS->TUFTS) +8131-117016-0037-1340: IF HE SHOULD TURN UP DEAD I'LL KNOW YOU BOYS ARE RESPONSIBLE AND I'LL FIND YOU +8131-117016-0038-1341: TROUBLE BEGAN BREWING SHORTLY AFTER THOUGH +8131-117016-0039-1342: (MURDOCH SENT->MARDOX SAT) ONE OF THE MEN TO PICK UP A SECOND SQUAD OF SIX AND THEN A THIRD +8131-117016-0040-1343: (IN->AND) THE THIRD ONE BRUCE GORDON SPOTTED ONE OF THE MEN (WHO'D->WHO HAD) BEEN BEATEN BEFORE +8131-117016-0041-1344: GET A STRETCHER AND TAKE HIM WHEREVER HE BELONGS HE ORDERED +8131-117016-0042-1345: BUT THE CAPTAIN STIRRED FINALLY SIGHING +8131-117016-0043-1346: (NO->NOW) THE (COPS THEY'RE->CAPS ARE) GIVING ME (WE'RE->WERE) COVERED GORDON +8131-117016-0044-1347: BUT THE (STONEWALL->STERN WALL) GANG IS (BACKING WAYNE->BACK IN WAIN) +8131-117016-0045-1348: BUT IT'S GOING TO BE TOUGH ON THEM +8131-117016-0046-1349: BRUCE (GORDON->GORD AND) GRIMACED I'VE GOT A YELLOW TICKET FROM SECURITY +8131-117016-0047-1350: (MURDOCH->MURDOCK) BLINKED HE DROPPED HIS EYES SLOWLY +8131-117016-0048-1351: WHAT MAKES YOU THINK (WAYNE->WAIN) WILL BE RE ELECTED +8131-117016-0049-1352: NOBODY WANTS HIM EXCEPT A GANG OF (CROOKS->COOKS) AND THOSE IN POWER +8131-117016-0050-1353: EVER SEE A MARTIAN ELECTION +8131-117016-0051-1354: NO (YOU'RE A FIRSTER->YOU ARE FIRSTTER) HE CAN'T LOSE +8131-117016-0052-1355: AND THEN HELL IS GOING TO POP AND THIS WHOLE (PLANET->PLAN IT) MAY BE BLOWN WIDE OPEN +8131-117016-0053-1356: (IT->YET) FITTED WITH THE (DIRE->DIA) PREDICTIONS OF SECURITY AND WITH (THE->A) SPYING GORDON WAS GOING TO DO ACCORDING TO THEM +8131-117016-0054-1357: HE WAS GETTING EVEN FATTER NOW THAT HE WAS EATING BETTER FOOD FROM THE FAIR RESTAURANT AROUND THE CORNER +8131-117016-0055-1358: (COST EM->COSTUM) MORE BUT THEY'D BE RESPECTABLE +8131-117016-0056-1359: BECAUSE (IZZY->IZZIE) IS ALWAYS HONEST ACCORDING TO HOW HE SEES IT +8131-117016-0057-1360: BUT YOU GOT EARTH IDEAS OF THE STUFF LIKE I HAD ONCE +8131-117016-0058-1361: THE GROUPS GREW MORE EXPERIENCED AND (MURDOCH->MURDOCK) WAS TRAINING A NEW SQUAD EVERY NIGHT +8131-117016-0059-1362: IT WASN'T EXACTLY LEGAL BUT NOTHING WAS HERE +8131-117016-0060-1363: THIS COULD LEAD TO ABUSES AS HE'D SEEN ON EARTH +8131-117016-0061-1364: BUT (THERE->THEIR) PROBABLY WOULDN'T BE TIME FOR IT IF MAYOR (WAYNE->WAIN) WAS RE ELECTED +8131-117017-0000-1270: IT WAS NIGHT OUTSIDE AND THE (PHOSPHOR BULBS->PHOSPHOBS) AT THE CORNERS GLOWED DIMLY GIVING HIM BARELY ENOUGH LIGHT BY WHICH TO LOCATE THE WAY TO THE EXTEMPORIZED PRECINCT HOUSE +8131-117017-0001-1271: IT HAD PROBABLY BEEN YEARS SINCE ANY HAD DARED RISK IT AFTER THE SUN WENT DOWN +8131-117017-0002-1272: AND THE SLOW DOUBTFUL RESPECT ON THE FACES OF THE CITIZENS AS THEY NODDED TO HIM WAS EVEN MORE PROOF THAT (HALEY'S->HALELY) SYSTEM WAS WORKING +8131-117017-0003-1273: GORDON HIT THE SIGNAL SWITCH AND THE (MARSPEAKER->MARSH SPEAKER) LET OUT A SHRILL WHISTLE +8131-117017-0004-1274: (GUNS->GUN) SUDDENLY SEEMED TO BE FLOURISHING EVERYWHERE +8131-117017-0005-1275: YOU CAN'T DO IT TO ME +8131-117017-0006-1276: (I'M->I AM) REFORMED I'M GOING STRAIGHT +8131-117017-0007-1277: YOU DAMNED (COPS->COPSE) CAN'T (O'NEILL->O'NEIA) WAS BLUBBERING +8131-117017-0008-1278: ONE LOOK WAS ENOUGH THE WORK PAPERS HAD THE (TELLTALE->TELL TALE) OVER THICKENING OF THE SIGNATURE (THAT->THEY) HAD SHOWED UP ON OTHER PAPERS OBVIOUSLY FORGERIES +8131-117017-0009-1279: SOME TURNED AWAY AS GORDON AND THE OTHER (COP->COPP) WENT TO WORK BUT MOST OF THEM WEREN'T SQUEAMISH +8131-117017-0010-1280: WHEN IT WAS OVER THE TWO PICKED UP THEIR WHIMPERING CAPTIVE +8131-117017-0011-1281: JENKINS THE OTHER COP HAD BEEN HOLDING THE WALLET +8131-117017-0012-1282: MUST (OF->HAVE) BEEN MAKING A BIG CONTACT IN SOMETHING FIFTY FIFTY +8131-117017-0013-1283: THERE MUST HAVE BEEN OVER TWO THOUSAND CREDITS IN THE WALLET +8131-117017-0014-1284: WHEN GORDON AND JENKINS CAME BACK (MURDOCH->MERDOCK) TOSSED THE MONEY TO THEM SPLIT IT +8131-117017-0015-1285: WHATEVER COMES TO HAND (GOV'NOR->GOVERNOR) +8131-117017-0016-1286: LIKE THIS SOCIAL CALL GORDON ASKED HIM +8131-117017-0017-1287: THE LITTLE MAN SHOOK HIS HEAD HIS ANCIENT EIGHTEEN YEAR OLD FACE TURNING SOBER (NOPE->NOTE) +8131-117017-0018-1288: YOU (OWE->ARE) ME SOME BILLS (GOV'NOR->GUV'NER) +8131-117017-0019-1289: ELEVEN HUNDRED FIFTY CREDITS +8131-117017-0020-1290: YOU DIDN'T PAY UP YOUR PLEDGE TO THE (CAMPAIGN->CAPTAIN) FUND SO I (HADDA->HAD A) FILL IN +8131-117017-0021-1291: A THOUSAND (INTEREST->INTERESTS) AT TEN PER CENT A WEEK STANDARD RIGHT +8131-117017-0022-1292: GORDON HAD HEARD OF THE FRIENDLY INTEREST CHARGED ON THE SIDE HERE BUT HE SHOOK HIS HEAD WRONG (IZZY->IS HE) +8131-117017-0023-1293: (HUH IZZY->HOW AS HE) TURNED IT OVER AND SHOOK HIS HEAD +8131-117017-0024-1294: NOW SHOW ME WHERE I SIGNED ANY AGREEMENT SAYING I'D PAY YOU BACK +8131-117017-0025-1295: FOR A SECOND (IZZY'S->IZZIE'S) FACE WENT BLANK THEN HE CHUCKLED +8131-117017-0026-1296: HE (PULLED->POURED) OUT THE BILLS AND HANDED THEM OVER +8131-117017-0027-1297: THANKS (IZZY->IS HE) THANKS YOURSELF +8131-117017-0028-1298: THE KID POCKETED THE MONEY CHEERFULLY NODDING +8131-117017-0029-1299: THE LITTLE GUY KNEW MARS AS FEW OTHERS DID APPARENTLY FROM ALL SIDES +8131-117017-0030-1300: AND IF ANY OF THE OTHER (COPS->CUPS) HAD PRIVATE RACKETS OF THEIR OWN (IZZY->IS HE) WAS UNDOUBTEDLY THE MAN TO FIND IT OUT AND (USE->USED) THE INFORMATION WITH A BEAT SUCH AS THAT EVEN GOING HALVES AND WITH ALL THE GRAFT (TO->AT) THE UPPER BRACKETS HE'D STILL BE ABLE TO MAKE HIS PILE IN A MATTER OF MONTHS +8131-117017-0031-1301: THE CAPTAIN LOOKED COMPLETELY BEATEN AS HE CAME INTO THE ROOM AND DROPPED (ONTO->INTO) THE BENCH +8131-117017-0032-1302: GO ON (ACCEPT DAMN IT->EXCEPT DEAR MIN) +8131-117029-0000-1247: THERE WAS A MAN COMING FROM EARTH ON A SECOND SHIP WHO WOULD SEE HIM +8131-117029-0001-1248: THE LITTLE PUBLISHER WAS BACK AT THE CRUSADER AGAIN +8131-117029-0002-1249: ONLY GORDON AND SHEILA WERE LEFT +8131-117029-0003-1250: CREDIT HAD BEEN ESTABLISHED AGAIN AND THE BUSINESSES WERE OPEN +8131-117029-0004-1251: GORDON CAME TO A ROW OF TEMPORARY BUBBLES INDIVIDUAL DWELLINGS BUILT LIKE THE DOME BUT OPAQUE FOR PRIVACY +8131-117029-0005-1252: THEY HAD BEEN LUCKY +8131-117029-0006-1253: (SCHULBERG'S->SHOALBURG'S) VOLUNTEERS WERE OFFICIAL NOW +8131-117029-0007-1254: (FATS->FAT'S) PLACE WAS STILL OPEN THOUGH THE CROOKED TABLES HAD BEEN REMOVED GORDON DROPPED TO A STOOL SLIPPING OFF HIS HELMET +8131-117029-0008-1255: HE REACHED AUTOMATICALLY FOR THE GLASS OF ETHER (NEEDLED->NEEDLE) BEER +8131-117029-0009-1256: THOUGHT (YOU'D->YE'D) BE IN THE CHIPS +8131-117029-0010-1257: THAT'S MARS GORDON ECHOED THE (OTHER'S COMMENT->OTHERS COMMENTS) WHY DON'T YOU PULL OFF THE PLANET FATS YOU COULD GO BACK TO EARTH I'D GUESS THE OTHER NODDED +8131-117029-0011-1258: (GUESS->GES) A MAN GETS USED TO ANYTHING HELL MAYBE I CAN HIRE SOME BUMS TO SIT AROUND AND WHOOP IT UP WHEN THE SHIPS COME IN AND (BILL->BUILD) THIS (AS->IS) A REAL OLD MARTIAN DEN OF SIN +8131-117029-0012-1259: THERE WAS A GRIN ON THE OTHER'S FACE +8131-117029-0013-1260: FINALLY GOT OUR ORDERS FOR YOU IT'S MERCURY +8131-117029-0014-1261: WE SENT TWENTY OTHERS THE SAME WAY AND THEY FAILED +8131-117029-0015-1262: LET'S (SAY YOU'VE->SAVE) SHIFTED SOME OF THE MISERY AROUND A BIT AND GIVEN THEM A CHANCE TO DO BETTER +8131-117029-0016-1263: YOU CAN'T STAY HERE +8131-117029-0017-1264: THERE'S A ROCKET WAITING TO (TRANSSHIP->TRANSHIP) YOU TO THE MOON ON THE WAY TO MERCURY RIGHT NOW GORDON SIGHED +8131-117029-0018-1265: AND (I'VE->I) PAID HER THE PAY WE OWE YOU FROM THE TIME YOU (BEGAN->BEGIN) USING YOUR BADGE SHE'S OUT SHOPPING +8131-117029-0019-1266: BUT HIS OLD EYES WERE GLINTING +8131-117029-0020-1267: DID YOU THINK WE'D LET YOU GO WITHOUT SEEING YOU OFF (COBBER->COPPER) HE ASKED +8131-117029-0021-1268: I I OH DRAT IT I'M GETTING OLD (IZZY->IS HE) YOU TELL HIM +8131-117029-0022-1269: HE GRABBED GORDON'S HAND AND WADDLED DOWN THE LANDING PLANK (IZZY->IZZIE) SHOOK HIS HEAD +8188-269288-0000-2881: (ANNIE->ANY) COLCHESTER HAD BEGUN TO MAKE FRIENDS WITH (LESLIE->LISLEY) +8188-269288-0001-2882: LESLIE DETERMINED TO (TRY FOR->TRIFLE) HONORS IN ENGLISH LANGUAGE AND LITERATURE +8188-269288-0002-2883: HER TASTES ALL LAY IN THIS DIRECTION HER IDEA BEING BY AND BY TO FOLLOW HER MOTHER'S PROFESSION OF JOURNALISM FOR WHICH SHE ALREADY SHOWED CONSIDERABLE APTITUDE +8188-269288-0003-2884: SHE HAD NO IDEA OF ALLOWING HERSELF TO BREAK DOWN +8188-269288-0004-2885: WHAT DO YOU MEAN REPLIED LESLIE +8188-269288-0005-2886: (WHY->WHAT) YOU WILL BE PARTING FROM ME YOU KNOW +8188-269288-0006-2887: I (WON'T->WOULD) BE THE CONSTANT WORRY (AND->IN) PLAGUE OF YOUR LIFE +8188-269288-0007-2888: IT IS THIS IF BY ANY CHANCE YOU DON'T LEAVE SAINT (WODE'S->WORDS) ANNIE I HOPE YOU WILL ALLOW ME TO BE YOUR (ROOMFELLOW->ROOM FELLOW) AGAIN NEXT TERM +8188-269288-0008-2889: SAID ANNIE A FLASH OF LIGHT COMING INTO HER EYES AND THEN LEAVING THEM +8188-269288-0009-2890: BUT SHE ADDED ABRUPTLY YOU SPEAK OF SOMETHING WHICH MUST NOT TAKE PLACE +8188-269288-0010-2891: I MUST PASS (IN HONORS->AN HONOURS) IF I DON'T I SHALL DIE +8188-269288-0011-2892: A FEW MOMENTS LATER THERE CAME A TAP AT THE DOOR +8188-269288-0012-2893: LESLIE OPENED THE DOOR +8188-269288-0013-2894: JANE (HERIOT->HEARET) STOOD WITHOUT +8188-269288-0014-2895: THESE (LETTERS->LITTLE) HAVE JUST COME FOR YOU AND (ANNIE->ANY) COLCHESTER SHE SAID AND AS I WAS COMING UPSTAIRS I THOUGHT I WOULD LEAVE THEM WITH YOU +8188-269288-0015-2896: (LESLIE->LIZLY) THANKED HER AND EAGERLY GRASPED THE LITTLE PARCEL +8188-269288-0016-2897: HER EYES SHONE WITH PLEASURE AT THE ANTICIPATION OF THE DELIGHTFUL TIME SHE WOULD HAVE REVELING IN THE HOME NEWS THE OTHER LETTER WAS DIRECTED TO (ANNIE->ANY) COLCHESTER +8188-269288-0017-2898: HERE IS A LETTER FOR YOU ANNIE CRIED LESLIE +8188-269288-0018-2899: HER FACE GREW SUDDENLY WHITE AS DEATH WHAT IS IT DEAR +8188-269288-0019-2900: I HAVE BEEN (STARVING->STARLING) OR RATHER I HAVE BEEN THIRSTING +8188-269288-0020-2901: WELL READ IT IN PEACE SAID (LESLIE->LINLESILY) I WON'T DISTURB YOU +8188-269288-0021-2902: I AM TRULY GLAD IT HAS COME +8188-269288-0022-2903: (LESLIE->LISALLY) SEATED HERSELF WITH HER BACK TO HER COMPANION AND OPENED HER (OWN->ON) LETTERS +8188-269288-0023-2904: DON'T NOTICE ME REPLIED ANNIE +8188-269288-0024-2905: I MUST GO INTO THE GROUNDS THE AIR IS STIFLING +8188-269288-0025-2906: BUT THEY ARE JUST SHUTTING UP +8188-269288-0026-2907: I SHALL GO I KNOW A WAY +8188-269288-0027-2908: JUST AFTER MIDNIGHT SHE ROSE WITH A SIGH TO PREPARE FOR BED +8188-269288-0028-2909: SHE LOOKED ROUND THE ROOM +8188-269288-0029-2910: NOW I REMEMBER SHE GOT A LETTER WHICH UPSET HER VERY MUCH AND WENT OUT +8188-269288-0030-2911: (LESLIE->LIZLY) WENT TO THE WINDOW AND FLUNG IT OPEN SHE PUT HER HEAD OUT AND TRIED TO PEER INTO THE DARKNESS BUT THE MOON HAD ALREADY SET AND SHE COULD NOT SEE MORE THAN A COUPLE OF YARDS IN FRONT OF HER +8188-269288-0031-2912: SHE IS A VERY QUEER ERRATIC CREATURE AND THAT LETTER THERE (WAS->IS) BAD NEWS IN THAT LETTER +8188-269288-0032-2913: WHAT (CAN SHE->CAN'T YOU) BE DOING OUT BY HERSELF +8188-269288-0033-2914: (LESLIE LEFT->THIS LILY LIT) THE ROOM BUT SHE HAD SCARCELY GONE A DOZEN (PACES->PLACES) DOWN THE CORRIDOR BEFORE SHE MET (ANNIE->ANY) RETURNING +8188-269288-0034-2915: (ANNIE'S->AND HIS) EYES WERE VERY BRIGHT HER CHEEKS WERE NO LONGER PALE AND THERE WAS A BRILLIANT COLOR IN THEM +8188-269288-0035-2916: SHE DID NOT TAKE THE LEAST NOTICE OF (LESLIE->LIZZLING) BUT GOING INTO THE ROOM SHUT THE DOOR +8188-269288-0036-2917: DON'T BEGIN SAID ANNIE +8188-269288-0037-2918: DON'T BEGIN WHAT DO YOU MEAN +8188-269288-0038-2919: I MEAN THAT I DON'T WANT YOU TO BEGIN TO ASK QUESTIONS +8188-269288-0039-2920: I WALKED UP AND DOWN AS FAST AS EVER I COULD OUTSIDE IN ORDER TO MAKE MYSELF SLEEPY +8188-269288-0040-2921: DON'T TALK TO ME LESLIE DON'T SAY A SINGLE WORD +8188-269288-0041-2922: I SHALL GO OFF TO SLEEP THAT IS ALL I CARE FOR +8188-269288-0042-2923: DON'T SAID ANNIE +8188-269288-0043-2924: NOW DRINK THIS AT ONCE SHE SAID IN A VOICE OF AUTHORITY IF YOU REALLY WISH TO SLEEP +8188-269288-0044-2925: (ANNIE STARED->ANY STEERED) VACANTLY AT THE (COCOA THEN SHE UTTERED->COOKER DIDN'T) A LAUGH +8188-269288-0045-2926: DRINK THAT SHE SAID +8188-269288-0046-2927: DO YOU WANT TO KILL ME DON'T TALK ANY MORE +8188-269288-0047-2928: I AM SLEEPY I SHALL SLEEP +8188-269288-0048-2929: SHE GOT INTO BED AS SHE SPOKE AND WRAPPED THE CLOTHES TIGHTLY ROUND HER +8188-269288-0049-2930: (CAN'T->COULD) YOU MANAGE WITH A CANDLE JUST FOR ONCE +8188-269288-0050-2931: CERTAINLY SAID (LESLIE->IT EASILY) +8188-269288-0051-2932: SHE TURNED OFF THE LIGHT AND LIT A CANDLE WHICH (SHE->HE) PUT BEHIND HER SCREEN THEN PREPARED TO GET INTO BED +8188-269288-0052-2933: (ANNIE'S->ANY) MANNER WAS VERY MYSTERIOUS +8188-269288-0053-2934: (ANNIE->AND HE) DID NOT MEAN TO (CONFIDE->CONFINE) IN (ANYONE->ANY ONE) THAT NIGHT AND THE KINDEST THING WAS TO LEAVE HER ALONE +8188-269288-0054-2935: (TIRED->TIE IT) OUT (LESLIE->LIZZLY) HERSELF DROPPED ASLEEP +8188-269288-0055-2936: ANNIE IS THAT YOU SHE CALLED OUT +8188-269288-0056-2937: THERE WAS NO REPLY BUT THE SOUND OF HURRYING STEPS CAME QUICKER AND QUICKER NOW AND THEN (THEY WERE->THEIR) INTERRUPTED BY A GROAN +8188-269288-0057-2938: OH THIS WILL KILL ME MY HEART WILL BREAK THIS WILL KILL ME +8188-269290-0000-2823: THE (GUILD->GOLD) OF SAINT ELIZABETH +8188-269290-0001-2824: IMMEDIATELY AFTER DINNER THAT EVENING LESLIE RAN UP TO HER ROOM TO MAKE PREPARATIONS FOR HER VISIT TO EAST HALL +8188-269290-0002-2825: I'M NOT COMING SAID ANNIE +8188-269290-0003-2826: EVERY STUDENT IS TO BE (IN->AN) EAST HALL AT HALF PAST EIGHT +8188-269290-0004-2827: IT DOESN'T MATTER REPLIED ANNIE WHETHER IT IS AN ORDER OR NOT I'M NOT COMING SAY NOTHING ABOUT ME PLEASE +8188-269290-0005-2828: IT BURNED AS IF WITH FEVER +8188-269290-0006-2829: YOU DON'T KNOW WHAT A TRIAL IT IS FOR ME TO HAVE YOU HERE +8188-269290-0007-2830: I WANT TO BE ALONE GO +8188-269290-0008-2831: I KNOW YOU DON'T QUITE MEAN WHAT YOU SAY SAID LESLIE BUT OF COURSE IF YOU REALLY WISH ME +8188-269290-0009-2832: YOU (FRET->FRITTEN) ME BEYOND ENDURANCE +8188-269290-0010-2833: WRAPPING A PRETTY BLUE SHAWL (ROUND HER HEAD AND->AROUND A HIDDEN) SHOULDERS SHE TURNED TO ANNIE +8188-269290-0011-2834: LESLIE WAS JUST CLOSING THE DOOR BEHIND HER WHEN (ANNIE->ANY) CALLED AFTER HER +8188-269290-0012-2835: I TOOK IT OUT SAID (LESLIE->LIZZIE) TOOK IT OUT +8188-269290-0013-2836: HAVE THE GOODNESS TO FIND IT AND PUT IT BACK +8188-269290-0014-2837: BUT DON'T LOCK ME OUT PLEASE ANNIE +8188-269290-0015-2838: OH I WON'T (LOCK->LOOK) YOU OUT SHE SAID BUT I MUST HAVE THE KEY +8188-269290-0016-2839: JANE (HERIOT'S->HERETT'S) VOICE WAS HEARD IN THE PASSAGE +8188-269290-0017-2840: AS SHE WALKED (DOWN->ROUND) THE CORRIDOR SHE HEARD IT BEING TURNED (IN->TO) THE LOCK +8188-269290-0018-2841: WHAT CAN THIS MEAN SHE SAID TO HERSELF +8188-269290-0019-2842: OH I WON'T PRESS YOU REPLIED JANE +8188-269290-0020-2843: OH I SHALL NEVER DO THAT REPLIED LESLIE +8188-269290-0021-2844: YOU SEE ALL THE GIRLS EXCEPT (EILEEN->AILEEN) AND MARJORIE LAUGH AT HER AND THAT SEEMS TO ME TO MAKE HER WORSE +8188-269290-0022-2845: SOME DAY JANE YOU MUST SEE HER +8188-269290-0023-2846: IF YOU (ARE->*) IN LONDON DURING THE SUMMER YOU MUST COME AND (PAY US->PASS) A VISIT WILL YOU +8188-269290-0024-2847: THAT IS IF YOU CARE TO CONFIDE IN ME +8188-269290-0025-2848: I BELIEVE POOR ANNIE IS DREADFULLY UNHAPPY +8188-269290-0026-2849: THAT'S JUST (IT JANE->A CHANT) THAT IS WHAT (FRIGHTENS->BRIGHTENS) ME SHE REFUSES TO COME +8188-269290-0027-2850: REFUSES TO COME SHE CRIED +8188-269290-0028-2851: (SHE WILL->SHE'LL) GET (INTO->IN) AN AWFUL SCRAPE +8188-269290-0029-2852: I AM SURE SHE IS ILL SHE WORKS TOO HARD AND SHE BUT THERE I DON'T KNOW THAT I OUGHT TO SAY ANY MORE +8188-269290-0030-2853: I'LL WAIT FOR YOU HERE SAID LESLIE +8188-269290-0031-2854: DO COME (ANNIE->ANY) DO +8188-269290-0032-2855: SCARCELY LIKELY REPLIED LESLIE SHE TOLD ME SHE WAS DETERMINED NOT TO COME TO THE MEETING +8188-269290-0033-2856: BUT (MARJORIE->MARGERY) AND (EILEEN->AILEEN) HAD ALREADY DEPARTED AND LESLIE AND JANE FOUND THEMSELVES AMONG THE LAST STUDENTS TO ARRIVE AT THE GREAT EAST HALL +8188-269290-0034-2857: MISS (LAUDERDALE->LAURDALE) WAS STANDING WITH THE OTHER TUTORS AND (PRINCIPALS->PRINCIPLES) OF THE DIFFERENT HALLS ON A RAISED PLATFORM +8188-269290-0035-2858: THEN A (ROLL->RAW) CALL WAS GONE THROUGH BY ONE OF THE TUTORS THE ONLY (ABSENTEE->ABSENTE) WAS (ANNIE->ANY) COLCHESTER +8188-269290-0036-2859: THE PHYSICAL PART OF (YOUR->THE OLD) TRAINING AND ALSO THE MENTAL PART ARE ABUNDANTLY SUPPLIED IN THIS GREAT HOUSE OF LEARNING SHE CONTINUED BUT THE SPIRITUAL PART IT SEEMS TO ME OUGHT NOW TO BE STRENGTHENED +8188-269290-0037-2860: (HEAR HEAR->HAIR HAIR) AND ONCE AGAIN (HEAR->HAIR) +8188-269290-0038-2861: SHE UTTERED (HER STRANGE->A STRAIN) REMARK STANDING UP +8188-269290-0039-2862: MARJORIE AND (EILEEN->AILEEN) WERE CLOSE TO HER +8188-269290-0040-2863: I WILL TALK WITH YOU (BELLE ACHESON->BELL ARCHISON) PRESENTLY SHE SAID +8188-269290-0041-2864: THE NAMES OF (*->THE) PROPOSED MEMBERS ARE TO BE SUBMITTED TO ME BEFORE THIS DAY WEEK +8188-269290-0042-2865: AM I MY BROTHER'S KEEPER +8188-269290-0043-2866: YOU ASK SHE CONTINUED +8188-269290-0044-2867: GOD (ANSWERS TO->ADDEST) EACH OF YOU YOU ARE +8188-269290-0045-2868: THE WORLD (SAYS->TEETH) NO I AM NOT BUT GOD SAYS (YES->IS) YOU ARE +8188-269290-0046-2869: ALL MEN ARE YOUR BROTHERS +8188-269290-0047-2870: FOR ALL WHO SIN ALL WHO SUFFER YOU ARE TO (A CERTAIN->EXERT AN) EXTENT RESPONSIBLE +8188-269290-0048-2871: AFTER THE ADDRESS THE GIRLS THEMSELVES WERE ENCOURAGED TO SPEAK AND A VERY ANIMATED DISCUSSION FOLLOWED +8188-269290-0049-2872: IT WAS PAST TEN O'CLOCK WHEN SHE LEFT THE HALL +8188-269290-0050-2873: JUST AS SHE WAS DOING SO MISS FRERE CAME UP +8188-269290-0051-2874: (ANNIE->ANY) COLCHESTER (IS->AS) YOUR (ROOMFELLOW->ROOM FELLOW) IS SHE NOT SHE SAID +8188-269290-0052-2875: I SEE BY YOUR FACE MISS GILROY THAT YOU ARE DISTRESSED ABOUT SOMETHING ARE YOU KEEPING ANYTHING BACK +8188-269290-0053-2876: (I AM->I'M) AFRAID I AM REPLIED (LESLIE DISTRESS->LIZZIE DISTRESSED) NOW IN HER TONE +8188-269290-0054-2877: I MUST SEE HER MYSELF EARLY IN THE MORNING AND I AM QUITE SURE THAT NOTHING WILL SATISFY MISS (LAUDERDALE->LAURAIL) EXCEPT A VERY AMPLE APOLOGY AND A FULL EXPLANATION OF THE REASON WHY SHE ABSENTED HERSELF +8188-269290-0055-2878: EXCUSES MAKE NO DIFFERENCE +8188-269290-0056-2879: THE GIRL WHO BREAKS THE RULES (HAS->HAVE) TO BE PUNISHED +8188-269290-0057-2880: I WILL TELL HER +8188-274364-0000-2811: THE COMMONS ALSO VOTED THAT THE NEW CREATED PEERS OUGHT TO HAVE NO VOICE IN THIS TRIAL BECAUSE THE ACCUSATION BEING AGREED TO WHILE THEY WERE COMMONERS (THEIR->THEY) CONSENT TO IT WAS IMPLIED WITH THAT OF ALL THE COMMONS OF ENGLAND +8188-274364-0001-2812: IN THE GOVERNMENT OF IRELAND HIS ADMINISTRATION HAD BEEN EQUALLY (PROMOTIVE->PROMOTED) OF HIS MASTER'S INTEREST AND THAT OF THE SUBJECTS COMMITTED TO HIS CARE +8188-274364-0002-2813: THE CASE OF LORD (MOUNTNORRIS->MONTORAS) OF ALL THOSE WHICH WERE COLLECTED WITH SO MUCH INDUSTRY IS THE MOST FLAGRANT AND THE LEAST EXCUSABLE +8188-274364-0003-2814: THE COURT WHICH CONSISTED OF THE (CHIEF OFFICERS->CHEAP OFFICIALS) OF THE ARMY FOUND THE CRIME TO BE (CAPITAL->CAPT ON) AND CONDEMNED THAT NOBLEMAN TO LOSE HIS HEAD +8188-274364-0004-2815: WHERE THE TOKEN BY WHICH I (SHOULD->SHALL) DISCOVER IT +8188-274364-0005-2816: IT IS NOW (*->A) FULL TWO HUNDRED AND FORTY YEARS SINCE TREASONS WERE DEFINED AND SO LONG HAS IT BEEN SINCE ANY MAN WAS TOUCHED TO THIS EXTENT UPON THIS CRIME BEFORE MYSELF +8188-274364-0006-2817: LET US NOT TO (OUR OWN DESTRUCTION->UNDERSTRUCTION) AWAKE THOSE SLEEPING LIONS BY RATTLING UP A COMPANY OF OLD RECORDS WHICH HAVE LAIN FOR SO MANY AGES BY THE (WALL->WAR) FORGOTTEN AND NEGLECTED +8188-274364-0007-2818: (HOWEVER->HERBERT) THESE (GENTLEMEN AT->GENTLEMAN OF) THE BAR (SAY->SO) THEY SPEAK FOR THE (COMMONWEALTH->CONWEALTH) AND THEY BELIEVE SO YET UNDER (FAVOR->FAVOUR) IT IS I WHO IN THIS PARTICULAR SPEAK FOR THE (COMMONWEALTH->CORNWEALTH) +8188-274364-0008-2819: MY LORDS I HAVE NOW TROUBLED YOUR LORDSHIPS A GREAT DEAL LONGER THAN I SHOULD HAVE DONE +8188-274364-0009-2820: YOUNG (VANE->VAIN) FALLING UPON THIS PAPER OF NOTES DEEMED THE MATTER OF THE UTMOST IMPORTANCE AND IMMEDIATELY COMMUNICATED IT TO (PYM->POEM) WHO NOW PRODUCED THE PAPER BEFORE THE HOUSE OF COMMONS +8188-274364-0010-2821: THE KING PROPOSES THIS DIFFICULTY BUT HOW CAN I UNDERTAKE OFFENSIVE (WAR->FOR) IF I HAVE NO MORE MONEY +8188-274364-0011-2822: YOUR MAJESTY HAVING TRIED THE AFFECTIONS OF YOUR PEOPLE YOU ARE (ABSOLVED->ABSORBED) AND LOOSE FROM ALL RULES OF GOVERNMENT AND MAY DO WHAT POWER WILL ADMIT +8280-266249-0000-339: OLD MISTER DINSMORE HAD ACCEPTED A PRESSING INVITATION FROM HIS GRANDDAUGHTER AND HER HUSBAND TO JOIN THE PARTY AND WITH THE ADDITION OF SERVANTS IT WAS A LARGE ONE +8280-266249-0001-340: AS THEY WERE IN NO HASTE AND THE CONFINEMENT OF A RAILROAD CAR WOULD BE VERY IRKSOME TO THE YOUNGER CHILDREN IT HAD BEEN DECIDED TO MAKE THE JOURNEY BY WATER +8280-266249-0002-341: THERE WERE NO SAD LEAVE TAKINGS TO MAR THEIR PLEASURE THE CHILDREN WERE IN WILD SPIRITS AND ALL SEEMED CHEERFUL AND HAPPY AS THEY SAT OR STOOD UPON THE DECK WATCHING THE RECEDING SHORE AS THE VESSEL STEAMED OUT OF THE HARBOR +8280-266249-0003-342: AT LENGTH THE LAND HAD QUITE DISAPPEARED NOTHING COULD BE SEEN BUT THE SKY OVERHEAD AND A VAST EXPANSE OF WATER ALL (AROUND->ROUND) AND THE PASSENGERS FOUND LEISURE TO TURN THEIR ATTENTION UPON EACH OTHER +8280-266249-0004-343: THERE ARE SOME NICE LOOKING PEOPLE ON BOARD REMARKED MISTER TRAVILLA IN AN UNDERTONE TO HIS WIFE +8280-266249-0005-344: (BESIDE->BESIDES) OURSELVES ADDED COUSIN (RONALD->RANALD) LAUGHING +8280-266249-0006-345: YES SHE ANSWERED THAT LITTLE GROUP YONDER A YOUNG MINISTER AND HIS WIFE AND CHILD I SUPPOSE +8280-266249-0007-346: AND WHAT A DEAR LITTLE FELLOW HE IS JUST ABOUT THE AGE OF OUR (HAROLD->HERALD) I SHOULD JUDGE +8280-266249-0008-347: DO YOU SON WAS THE SMILING REJOINDER +8280-266249-0009-348: HE CERTAINLY LOOKS LIKE A VERY NICE LITTLE BOY +8280-266249-0010-349: SUPPOSE YOU AND HE SHAKE HANDS FRANK +8280-266249-0011-350: I DO INDEED (THOUGH->THE) PROBABLY COMPARATIVELY FEW ARE AWARE THAT TOBACCO IS THE CAUSE OF THEIR AILMENTS +8280-266249-0012-351: DOUBTLESS THAT IS THE CASE REMARKED MISTER DINSMORE +8280-266249-0013-352: WITH ALL MY HEART IF YOU WILL STEP INTO THE (GENTLEMEN'S->GENTLEMAN'S) CABIN WHERE THERE'S A LIGHT +8280-266249-0014-353: HE LED THE WAY THE OTHERS ALL FOLLOWING AND TAKING OUT A SLIP OF PAPER READ FROM IT IN A DISTINCT TONE LOUD ENOUGH TO BE HEARD BY THOSE (*->ALL) ABOUT HIM WITHOUT DISTURBING THE OTHER PASSENGERS +8280-266249-0015-354: ONE DROP OF NICOTINE (EXTRACT OF->EXTRACTED) TOBACCO PLACED ON THE TONGUE OF (A->THE) DOG WILL KILL HIM IN A MINUTE THE HUNDREDTH PART OF (A->THE) GRAIN (PICKED->PRICKED) UNDER THE SKIN OF A MAN'S ARM WILL PRODUCE NAUSEA AND FAINTING +8280-266249-0016-355: THE HALF DOZEN CIGARS WHICH MOST SMOKERS (USE->YEARS) A DAY CONTAIN SIX OR SEVEN GRAINS ENOUGH IF CONCENTRATED AND ABSORBED TO KILL THREE MEN AND A POUND OF TOBACCO ACCORDING TO ITS QUALITY CONTAINS FROM ONE QUARTER TO ONE AND A QUARTER OUNCES +8280-266249-0017-356: IS IT STRANGE THEN THAT SMOKERS AND CHEWERS HAVE A THOUSAND AILMENTS +8280-266249-0018-357: THAT THE FRENCH (POLYTECHNIC->POLY TECHNIC AT) INSTITUTE HAD TO PROHIBIT ITS (*->THE) USE ON ACCOUNT OF ITS EFFECTS (ON->UPON) THE MIND +8280-266249-0019-358: (NOTICE->NOTICED) THE MULTITUDE OF SUDDEN DEATHS AND SEE HOW MANY ARE SMOKERS AND CHEWERS +8280-266249-0020-359: (IN->AND) A SMALL COUNTRY TOWN SEVEN OF THESE MYSTERIOUS PROVIDENCES OCCURRED WITHIN THE CIRCUIT OF A MILE ALL DIRECTLY TRACEABLE TO TOBACCO AND ANY PHYSICIAN ON A FEW MOMENTS REFLECTION CAN MATCH THIS FACT BY HIS OWN OBSERVATION +8280-266249-0021-360: AND THEN SUCH POWERFUL ACIDS PRODUCE INTENSE IRRITATION AND THIRST THIRST WHICH WATER DOES NOT QUENCH +8280-266249-0022-361: HENCE A RESORT TO CIDER AND BEER +8280-266249-0023-362: NO SIR WHAT (KNOW->NO) YE NOT THAT YOUR BODY IS THE TEMPLE OF THE HOLY GHOST WHICH IS IN YOU WHICH YE HAVE OF GOD AND YE ARE NOT YOUR OWN +8280-266249-0024-363: FOR (YE->YOU) ARE BOUGHT WITH A PRICE THEREFORE GLORIFY GOD IN YOUR BODY AND IN YOUR SPIRIT WHICH ARE GOD'S +8280-266249-0025-364: WE CERTAINLY HAVE NO RIGHT TO INJURE OUR BODIES EITHER BY NEGLECT OR SELF INDULGENCE +8280-266249-0026-365: AND AGAIN I BESEECH YOU THEREFORE BRETHREN BY THE MERCIES OF GOD THAT YE PRESENT YOUR BODIES A LIVING SACRIFICE (HOLY->WHOLLY) ACCEPTABLE UNTO GOD WHICH IS YOUR REASONABLE SERVICE +8280-266249-0027-366: IT MUST (REQUIRE->REQUIRRE) A GOOD DEAL OF RESOLUTION FOR ONE WHO HAS BECOME FOND OF THE INDULGENCE TO GIVE IT UP REMARKED MISTER (DALY->DALEY) +8280-266249-0028-367: NO DOUBT NO DOUBT RETURNED MISTER (LILBURN->LOWBOURNE) BUT IF (THY RIGHT EYE->I WRITE I) OFFEND THEE PLUCK IT (OUT->UP) AND CAST IT FROM (THEE->ME) FOR IT IS PROFITABLE FOR THEE THAT ONE OF THY MEMBERS SHOULD PERISH AND NOT THAT THY WHOLE BODY SHOULD BE CAST INTO HELL +8280-266249-0029-368: THERE WAS A PAUSE BROKEN BY YOUNG HORACE WHO HAD BEEN WATCHING A GROUP OF MEN GATHERED ABOUT A TABLE AT THE FURTHER END OF THE ROOM +8280-266249-0030-369: THEY ARE GAMBLING YONDER AND I'M AFRAID THAT YOUNG FELLOW IS BEING BADLY FLEECED BY (THAT->THE) MIDDLE AGED MAN OPPOSITE +8280-266249-0031-370: THE EYES OF THE WHOLE PARTY WERE AT ONCE TURNED IN THAT DIRECTION +8280-266249-0032-371: NO SIR HE IS NOT HERE +8280-266249-0033-372: (AND->AS) THE DOOR WAS SLAMMED VIOLENTLY (TO->TOO) +8280-266249-0034-373: NOW THE VOICE CAME FROM THE SKYLIGHT OVERHEAD APPARENTLY AND WITH A FIERCE IMPRECATION THE IRATE GAMESTER RUSHED UPON DECK AND RAN HITHER AND THITHER IN SEARCH OF HIS TORMENTOR +8280-266249-0035-374: HIS VICTIM WHO HAD BEEN LOOKING ON DURING THE LITTLE SCENE AND LISTENING TO THE MYSTERIOUS VOICE (IN->AND) SILENT WIDE EYED WONDER AND FEAR NOW (ROSE->AROSE) HASTILY HIS FACE (DEATHLY->DEFTLY) PALE WITH TREMBLING HANDS GATHERED UP THE MONEY HE HAD STAKED AND HURRYING (INTO->TO) HIS (STATE ROOM->STATEROOM) LOCKED HIMSELF IN +8280-266249-0036-375: WHAT DOES IT MEAN CRIED ONE +8280-266249-0037-376: A (VENTRILOQUIST ABOARD->VENTILLA QUESTED BORN) OF COURSE RETURNED ANOTHER LET'S FOLLOW AND SEE THE FUN +8280-266249-0038-377: I WONDER WHICH OF US IT IS REMARKED THE FIRST LOOKING HARD AT OUR PARTY I DON'T KNOW BUT COME ON +8280-266249-0039-378: THAT FELLOW NICK WARD IS A NOTED (BLACKLEG AND->BLACK LAG IN) RUFFIAN HAD HIS NOSE BROKEN IN A FIGHT AND IS SENSITIVE ON THE SUBJECT WAS CHEATING OF COURSE +8280-266249-0040-379: WHO ASKED THE MATE I'VE SEEN (NONE->NON) UP HERE THOUGH THERE ARE SOME IN THE STEERAGE +8280-266249-0041-380: THEY HEARD HIM IN SILENCE WITH A COOL PHLEGMATIC INDIFFERENCE MOST EXASPERATING TO ONE IN HIS PRESENT MOOD +8280-266249-0042-381: A MAN OF GIANT SIZE AND HERCULEAN STRENGTH HAD LAID ASIDE HIS PIPE AND SLOWLY RISING TO HIS FEET SEIZED THE SCOUNDREL IN HIS POWERFUL GRASP +8280-266249-0043-382: LET ME GO YELLED WARD MAKING A DESPERATE EFFORT TO FREE HIS ARMS +8280-266249-0044-383: I DINKS NO I (DINKS->THINK) I (DEACH->DID) YOU VON (LESSON->MESS') RETURNED HIS CAPTOR NOT RELAXING HIS GRASP IN THE LEAST +8280-266249-0045-384: THE GERMAN RELEASED HIS PRISONER AND THE LATTER (SLUNK->SUNK) AWAY WITH MUTTERED THREATS AND IMPRECATIONS UPON THE HEAD OF HIS TORMENTOR +8280-266249-0046-385: MISTER LILBURN AND MISTER (DALY->DALEY) EACH (AT->HAD) A DIFFERENT TIME SOUGHT OUT THE YOUNG MAN (WARD'S->WORDS) INTENDED VICTIM AND TRIED TO INFLUENCE HIM FOR GOOD +8280-266249-0047-386: YET THERE WAS GAMBLING AGAIN THE SECOND NIGHT BETWEEN WARD AND SEVERAL OTHERS OF HIS (PROFESSION->PROFESSIONS) +8280-266249-0048-387: THEY KEPT IT UP TILL AFTER MIDNIGHT +8280-266249-0049-388: THEN MISTER (LILBURN->LOWBORN) WAKING FROM HIS FIRST SLEEP IN A STATEROOM NEAR BY THOUGHT HE WOULD BREAK IT UP ONCE MORE +8280-266249-0050-389: AN INTENSE VOICELESS EXCITEMENT POSSESSED THE PLAYERS FOR THE GAME WAS A CLOSE ONE AND (THE STAKES->MISTAKES) WERE VERY HEAVY +8280-266249-0051-390: THEY BENT EAGERLY OVER THE BOARD EACH WATCHING WITH FEVERISH ANXIETY HIS COMPANION'S MOVEMENTS EACH CASTING NOW AND AGAIN A GLOATING EYE UPON THE HEAP OF GOLD AND (GREENBACKS->GREEN BACKS) THAT LAY BETWEEN THEM AND AT TIMES HALF STRETCHING OUT HIS HAND TO CLUTCH IT +8280-266249-0052-391: A DEEP (GROAN->GROUND) STARTLED THEM AND THEY SPRANG TO THEIR FEET PALE AND TREMBLING WITH SUDDEN TERROR EACH HOLDING HIS BREATH AND STRAINING HIS EAR TO CATCH A REPETITION OF THE DREAD SOUND +8280-266249-0053-392: BUT (ALL WAS->ALWAYS) SILENT AND AFTER A MOMENT OF ANXIOUS WAITING THEY SAT DOWN TO THEIR GAME AGAIN TRYING TO CONCEAL AND SHAKE OFF THEIR FEARS (WITH A->FOR THE) FORCED UNNATURAL LAUGH +8280-266249-0054-393: IT CAME FROM UNDER THE TABLE GASPED (WARD->TOWARD) LOOK WHAT'S THERE (LOOK->LOOKED) YOURSELF +8280-266249-0055-394: WHAT CAN IT HAVE BEEN THEY ASKED EACH OTHER +8280-266249-0056-395: OH NONSENSE WHAT FOOLS WE ARE +8280-266249-0057-396: IT WAS THE LAST GAME OF CARDS FOR THAT TRIP +8280-266249-0058-397: THE CAPTAIN COMING IN SHORTLY AFTER THE SUDDEN FLIGHT OF THE GAMBLERS TOOK CHARGE OF THE MONEY AND THE NEXT DAY RESTORED IT TO THE OWNERS +8280-266249-0059-398: TO ELSIE'S OBSERVANT EYES IT PRESENTLY BECAME EVIDENT THAT THE (DALYS WERE IN->DAILIES RAN) VERY (STRAITENED->STRAIGHT AND) CIRCUMSTANCES +8280-266249-0060-399: OH HOW KIND HOW VERY KIND MISSUS (DALY->DALEY) SAID WITH TEARS OF JOY AND GRATITUDE WE HAVE HARDLY KNOWN HOW WE SHOULD MEET THE MOST NECESSARY EXPENSES OF THIS TRIP BUT HAVE BEEN TRYING TO CAST OUR CARE UPON THE LORD ASKING HIM TO PROVIDE +8280-266249-0061-400: AND HOW WONDERFULLY HE HAS ANSWERED OUR PETITIONS +8280-266249-0062-401: ELSIE ANSWERED PRESSING HER HAND AFFECTIONATELY (ART->ARE) WE NOT SISTERS IN CHRIST +8280-266249-0063-402: YE ARE ALL THE CHILDREN OF GOD BY FAITH IN CHRIST JESUS +8280-266249-0064-403: YE ARE ALL (ONE IN->WINE AND) CHRIST JESUS +8280-266249-0065-404: WE (FEEL->SEE ON) MY HUSBAND AND I THAT WE ARE ONLY THE STEWARDS OF HIS BOUNTY AND (THAT->*) BECAUSE HE HAS SAID INASMUCH AS YE HAVE DONE IT UNTO ONE OF THE LEAST OF THESE MY BRETHREN (YE->YOU) HAVE DONE IT UNTO ME IT IS THE GREATEST PRIVILEGE AND DELIGHT TO DO ANYTHING FOR HIS PEOPLE +8461-258277-0000-1649: WHEN IT WAS THE SEVEN HUNDRED AND EIGHTEENTH NIGHT +8461-258277-0001-1650: BUT HE ANSWERED NEEDS (MUST I->MY THY) HAVE (ZAYNAB ALSO->THY NABBS SO) NOW SUDDENLY THERE CAME A RAP AT THE DOOR AND THE MAID SAID WHO IS AT THE DOOR +8461-258277-0002-1651: THE KNOCKER REPLIED (KAMAR->COME ON) DAUGHTER (OF AZARIAH->VAZARRE) THE JEW SAY ME IS ALI OF CAIRO WITH YOU +8461-258277-0003-1652: REPLIED THE BROKER'S DAUGHTER O THOU DAUGHTER OF A DOG +8461-258277-0004-1653: AND HAVING THUS (ISLAMISED->ISLAMMISED) SHE ASKED HIM (DO->TWO) MEN IN THE FAITH OF (AL ISLAM GIVE->ALICELA GAVE) MARRIAGE PORTIONS TO WOMEN OR (DO->TWO) WOMEN DOWER MEN +8461-258277-0005-1654: AND SHE THREW DOWN THE JEW'S HEAD BEFORE HIM +8461-258277-0006-1655: NOW THE (CAUSE->COURSE) OF HER SLAYING HER SIRE WAS AS FOLLOWS +8461-258277-0007-1656: THEN HE (SET->SAT) OUT REJOICING TO RETURN TO THE BARRACK OF THE FORTY +8461-258277-0008-1657: SO HE ATE AND FELL DOWN SENSELESS FOR THE SWEETMEATS WERE DRUGGED WITH (BHANG->BANG) WHEREUPON THE KAZI BUNDLED HIM INTO THE SACK AND MADE OFF WITH (HIM->THEM) CHARGER AND CHEST AND ALL TO THE BARRACK OF THE (FORTY->FORTE) +8461-258277-0009-1658: PRESENTLY (HASAN SHUMAN->HER SON SCHUMANN) CAME OUT OF A (CLOSET->CLOTH) AND SAID TO HIM HAST THOU GOTTEN THE GEAR O ALI +8461-258277-0010-1659: SO HE TOLD HIM WHAT HAD BEFALLEN HIM AND ADDED IF I KNOW WHITHER THE RASCAL IS GONE AND WHERE TO FIND THE KNAVE I WOULD PAY HIM OUT +8461-258277-0011-1660: KNOWEST THOU WHITHER HE WENT +8461-258277-0012-1661: ANSWERED HASAN I KNOW WHERE HE IS AND OPENING THE DOOR OF THE CLOSET SHOWED HIM THE SWEETMEAT (SELLER->CELLAR) WITHIN DRUGGED AND SENSELESS +8461-258277-0013-1662: SO I WENT ROUND ABOUT THE HIGHWAYS OF THE CITY TILL I MET A SWEETMEAT (SELLER->CELLAR) AND BUYING HIS CLOTHES AND (STOCK IN->STOCKING) TRADE AND GEAR FOR TEN (DINARS->HOURS) DID WHAT WAS DONE +8461-258277-0014-1663: QUOTH (AL->A) RASHID WHOSE HEAD IS THIS +8461-258277-0015-1664: SO ALI RELATED TO HIM ALL THAT (HAD->THAT) PASSED FROM FIRST (TO->*) LAST AND THE CALIPH SAID I HAD NOT THOUGHT THOU WOULDST KILL HIM FOR THAT HE WAS A SORCERER +8461-258277-0016-1665: HE REPLIED I HAVE FORTY LADS BUT THEY ARE IN CAIRO +8461-278226-0000-1633: AND LAURA HAD HER OWN PET PLANS +8461-278226-0001-1634: SHE MEANT TO BE SCRUPULOUSLY CONSCIENTIOUS IN THE ADMINISTRATION OF (HER TALENTS->ITALIANS) AND SOMETIMES AT CHURCH ON A SUNDAY WHEN THE (SERMON->SAME) WAS PARTICULARLY AWAKENING SHE MENTALLY DEBATED (THE->A) SERIOUS QUESTION AS TO WHETHER NEW BONNETS AND A PAIR OF (JOUVIN'S->JUBAND'S) GLOVES DAILY WERE NOT (SINFUL->SENT FOR) BUT I THINK SHE DECIDED THAT THE NEW BONNETS AND GLOVES WERE ON THE WHOLE A PARDONABLE WEAKNESS AS BEING GOOD FOR TRADE +8461-278226-0002-1635: ONE MORNING LAURA TOLD HER HUSBAND WITH A GAY LAUGH THAT SHE WAS GOING TO (VICTIMIZE->VICTIMISE) HIM BUT HE WAS TO PROMISE TO BE PATIENT AND BEAR WITH HER FOR ONCE IN A WAY +8461-278226-0003-1636: I WANT TO SEE ALL THE PICTURES THE MODERN PICTURES ESPECIALLY +8461-278226-0004-1637: I REMEMBER ALL THE (RUBENSES AT->REUBEN SAYS THAT) THE LOUVRE FOR I SAW THEM THREE YEARS AGO WHEN I WAS STAYING IN PARIS WITH GRANDPAPA +8461-278226-0005-1638: SHE RETURNED IN A LITTLE MORE THAN TEN MINUTES IN THE FRESHEST TOILETTE ALL PALE SHIMMERING BLUE LIKE THE SPRING SKY WITH (PEARL GREY->POOR GRAY) GLOVES AND BOOTS AND PARASOL AND A BONNET THAT SEEMED MADE OF AZURE BUTTERFLIES +8461-278226-0006-1639: (IT->HE) WAS DRAWING TOWARDS THE CLOSE OF THIS DELIGHTFUL HONEYMOON TOUR AND IT WAS A BRIGHT SUNSHINY MORNING EARLY IN FEBRUARY BUT FEBRUARY IN PARIS IS SOMETIMES BETTER THAN APRIL IN LONDON +8461-278226-0007-1640: BUT SHE FIXED UPON A PICTURE WHICH SHE SAID SHE PREFERRED TO ANYTHING SHE HAD SEEN IN THE GALLERY +8461-278226-0008-1641: PHILIP JOCELYN WAS EXAMINING SOME PICTURES ON THE OTHER SIDE OF THE ROOM WHEN HIS WIFE MADE (THIS->THE) DISCOVERY +8461-278226-0009-1642: HOW I WISH YOU COULD GET ME A COPY OF THAT PICTURE PHILIP LAURA SAID ENTREATINGLY +8461-278226-0010-1643: I SHOULD SO LIKE ONE TO HANG IN MY MORNING ROOM (AT JOCELYN'S ROCK->A JOSTLING STROKE) +8461-278226-0011-1644: SHE TURNED TO THE (FRENCH ARTIST->FRENCHARD THIS) PRESENTLY AND ASKED (HIM->THEM) WHERE THE ELDER MISTER (KERSTALL->COASTON) LIVED AND IF THERE WAS ANY POSSIBILITY OF SEEING HIM +8461-278226-0012-1645: THEY HAVE SAID THAT HE IS EVEN A LITTLE IMBECILE THAT HE DOES NOT REMEMBER HIMSELF OF THE MOST COMMON EVENTS OF HIS LIFE +8461-278226-0013-1646: BUT THERE ARE SOME OTHERS WHO SAY THAT HIS MEMORY HAS NOT ALTOGETHER FAILED AND THAT HE (IS->*) STILL ENOUGH HARSHLY CRITICAL TOWARDS THE WORKS OF OTHERS +8461-278226-0014-1647: I DON'T THINK YOU WILL HAVE ANY DIFFICULTY IN FINDING THE HOUSE +8461-278226-0015-1648: YOU (WILL BE DOING->WERE BETWEEN) ME SUCH A FAVOUR (PHILIP->FELLOW) IF YOU'LL SAY YES +8461-281231-0000-1594: HIS FOLLOWERS (RUSHED->RUSH) FORWARD TO WHERE HE LAY AND THEIR UNITED FORCE COMPELLING THE BLACK (KNIGHT->NIGHT) TO PAUSE THEY DRAGGED (THEIR->THE) WOUNDED LEADER WITHIN THE WALLS +8461-281231-0001-1595: IT WAS ON THEIR JOURNEY TO THAT TOWN THAT THEY WERE OVERTAKEN ON THE ROAD BY (CEDRIC->SEDRIC) AND HIS PARTY IN WHOSE COMPANY THEY WERE AFTERWARDS CARRIED CAPTIVE TO THE (CASTLE->COUNCIL) OF (TORQUILSTONE->TORCHLESTONE) +8461-281231-0002-1596: (AS HE->I SEE) LAY UPON HIS BED (RACKED->WRAPPED) WITH PAIN AND MENTAL AGONY AND FILLED WITH THE FEAR OF RAPIDLY APPROACHING DEATH HE HEARD A VOICE ADDRESS HIM +8461-281231-0003-1597: WHAT ART THOU HE EXCLAIMED IN TERROR +8461-281231-0004-1598: LEAVE ME AND SEEK THE SAXON (WITCH ULRICA->WHICH OIKA) WHO WAS MY TEMPTRESS LET HER AS WELL AS I (TASTE->CASE) THE TORTURES WHICH ANTICIPATE HELL +8461-281231-0005-1599: EXCLAIMED THE NORMAN (HO->OH) +8461-281231-0006-1600: (REMEMBEREST->REMEMBER AS) THOU THE MAGAZINE OF FUEL THAT IS (STORED->STOLE) BENEATH THESE APARTMENTS WOMAN +8461-281231-0007-1601: THEY ARE FAST RISING AT LEAST SAID (ULRICA->EUREKA) AND A SIGNAL SHALL SOON WAVE (TO WARN->TOWARD) THE BESIEGERS TO PRESS HARD UPON THOSE WHO WOULD EXTINGUISH THEM +8461-281231-0008-1602: MEANWHILE THE BLACK KNIGHT HAD LED HIS FORCES AGAIN TO THE ATTACK AND SO VIGOROUS WAS THEIR ASSAULT THAT BEFORE LONG THE GATE OF THE CASTLE ALONE SEPARATED THEM FROM THOSE WITHIN +8461-281231-0009-1603: THE DEFENDERS (FINDING->FIND IN) THE CASTLE TO BE ON FIRE NOW DETERMINED TO SELL THEIR LIVES AS (DEARLY->DAILY) AS THEY COULD AND HEADED BY (DE BRACY->THE BRACES) THEY THREW OPEN THE GATE AND WERE AT ONCE INVOLVED IN A TERRIFIC CONFLICT WITH THOSE OUTSIDE +8461-281231-0010-1604: THE BLACK (KNIGHT->NIGHT) WITH (PORTENTOUS->POTENT OF) STRENGTH (FORCED->FORCE) HIS WAY INWARD IN DESPITE OF (DE BRACY->THE BRAZY) AND HIS FOLLOWERS +8461-281231-0011-1605: TWO OF THE FOREMOST (INSTANTLY FELL->THING) AND THE REST GAVE WAY NOTWITHSTANDING ALL (THEIR LEADERS->THE LEADER'S) EFFORTS TO STOP THEM +8461-281231-0012-1606: THE BLACK (KNIGHT->NIGHT) WAS SOON ENGAGED IN DESPERATE COMBAT WITH THE NORMAN CHIEF AND (THE VAULTED->DEVOTED) ROOF OF THE HALL (RUNG->RANG) WITH (THEIR->THE) FURIOUS BLOWS +8461-281231-0013-1607: AT LENGTH (DE BRACY->THE BRACEY) FELL +8461-281231-0014-1608: TELL ME THY NAME (OR->O) WORK THY PLEASURE ON ME +8461-281231-0015-1609: YET FIRST LET ME SAY SAID (DE BRACY->DEBRACY) WHAT IT IMPORTS THEE TO KNOW +8461-281231-0016-1610: EXCLAIMED THE BLACK KNIGHT PRISONER AND PERISH +8461-281231-0017-1611: THE LIFE OF EVERY MAN IN THE CASTLE SHALL ANSWER IT IF A HAIR OF HIS HEAD BE SINGED SHOW ME HIS CHAMBER +8461-281231-0018-1612: RAISING THE WOUNDED MAN WITH EASE THE BLACK KNIGHT RUSHED WITH (HIM->THEM) TO THE (POSTERN->PASSING) GATE AND HAVING THERE DELIVERED HIS BURDEN TO THE CARE OF TWO (YEOMEN->YOUNG MEN) HE AGAIN ENTERED THE CASTLE TO ASSIST IN THE RESCUE OF (THE OTHER->THEIR) PRISONERS +8461-281231-0019-1613: BUT IN OTHER PARTS THE BESIEGERS PURSUED THE DEFENDERS OF THE CASTLE FROM CHAMBER TO CHAMBER AND SATIATED IN (THEIR->THE) BLOOD THE VENGEANCE WHICH HAD LONG ANIMATED THEM AGAINST THE SOLDIERS OF THE TYRANT (FRONT->FROM) DE BOEUF +8461-281231-0020-1614: AS THE FIRE (COMMENCED->COMMANDS) TO SPREAD RAPIDLY THROUGH ALL PARTS OF THE CASTLE (ULRICA->OR RICHA) APPEARED ON ONE OF THE TURRETS +8461-281231-0021-1615: BEFORE LONG THE TOWERING FLAMES HAD SURMOUNTED EVERY OBSTRUCTION AND ROSE TO THE EVENING SKIES (ONE->WHEN) HUGE AND BURNING BEACON (SEEN->SEEMED) FAR AND WIDE THROUGH THE ADJACENT COUNTRY (TOWER->TOWERED) AFTER TOWER CRASHED DOWN WITH BLAZING ROOF AND RAFTER +8461-281231-0022-1616: AT LENGTH WITH A TERRIFIC CRASH THE WHOLE (TURRET->TOWER) GAVE WAY AND SHE PERISHED IN THE FLAMES WHICH HAD CONSUMED HER TYRANT +8461-281231-0023-1617: WHEN THE OUTLAWS (HAD->ARE) DIVIDED THE SPOILS WHICH THEY HAD TAKEN FROM THE CASTLE OF (TORQUILSTONE->TORCHLESTONE) CEDRIC PREPARED TO TAKE HIS DEPARTURE +8461-281231-0024-1618: HE LEFT THE GALLANT BAND OF FORESTERS SORROWING DEEPLY FOR HIS LOST FRIEND THE LORD OF (CONINGSBURGH->CONNINGSBURG) AND HE AND HIS FOLLOWERS HAD SCARCE DEPARTED WHEN A PROCESSION MOVED SLOWLY FROM UNDER THE GREENWOOD BRANCHES IN THE DIRECTION WHICH HE HAD TAKEN IN THE CENTRE OF WHICH WAS THE CAR IN WHICH THE BODY OF (ATHELSTANE->ADDLESTEIN) WAS LAID +8461-281231-0025-1619: (DE BRACY->DEBRACY) BOWED LOW AND IN SILENCE THREW HIMSELF UPON A HORSE AND GALLOPED OFF THROUGH THE (WOOD->WOODS) +8461-281231-0026-1620: HERE IS A BUGLE WHICH AN ENGLISH YEOMAN HAS ONCE WORN I PRAY YOU TO KEEP IT AS A MEMORIAL OF YOUR GALLANT BEARING +8461-281231-0027-1621: SO SAYING HE MOUNTED HIS STRONG WAR HORSE AND RODE OFF THROUGH THE FOREST +8461-281231-0028-1622: DURING ALL THIS TIME (ISAAC->MISERC) OF YORK SAT MOURNFULLY APART GRIEVING FOR THE LOSS OF HIS (DEARLY->STILL IN) LOVED DAUGHTER REBECCA +8461-281231-0029-1623: AND WITH THIS EPISTLE (THE UNHAPPY->THEN HAPPY) OLD MAN SET OUT TO PROCURE HIS DAUGHTER'S LIBERATION +8461-281231-0030-1624: THE TEMPLAR IS FLED SAID (DE BRACY->THE BRACEE) IN ANSWER TO THE PRINCE'S EAGER QUESTIONS (FRONT->FROM) DE BOEUF YOU WILL NEVER SEE MORE AND HE ADDED IN A LOW AND EMPHATIC TONE (RICHARD->WRETCHED) IS (IN->AN) ENGLAND I HAVE SEEN HIM AND SPOKEN WITH HIM +8461-281231-0031-1625: HE (APPEALED->APPEARED) TO (DE BRACY->THE BRACELET) TO ASSIST HIM IN THIS PROJECT AND BECAME AT ONCE DEEPLY SUSPICIOUS OF THE (KNIGHT'S->NIGHT'S) LOYALTY TOWARDS HIM WHEN HE DECLINED TO LIFT HAND AGAINST THE MAN WHO HAD SPARED HIS OWN LIFE +8461-281231-0032-1626: BEFORE REACHING HIS DESTINATION HE WAS (TOLD->STOLE) THAT LUCAS (DE BEAUMANOIR->THE BURMANOIS) THE GRAND MASTER OF THE ORDER OF THE TEMPLARS WAS THEN ON VISIT TO THE PRECEPTORY +8461-281231-0033-1627: HE HAD NOT UNTIL THEN BEEN INFORMED (OF->TO) THE PRESENCE OF THE JEWISH MAIDEN IN THE ABODE OF THE TEMPLARS AND GREAT WAS HIS FURY AND INDIGNATION ON LEARNING THAT SHE WAS AMONGST THEM +8461-281231-0034-1628: POOR ISAAC WAS HURRIED OFF ACCORDINGLY AND EXPELLED FROM THE PRECEPTORY ALL HIS ENTREATIES AND EVEN HIS OFFERS UNHEARD AND DISREGARDED +8461-281231-0035-1629: THE ASSURANCE THAT SHE POSSESSED SOME FRIEND IN THIS AWFUL ASSEMBLY GAVE (HER->A) COURAGE TO LOOK AROUND AND TO MARK INTO WHOSE PRESENCE SHE HAD BEEN CONDUCTED +8461-281231-0036-1630: SHE GAZED ACCORDINGLY UPON A SCENE WHICH MIGHT WELL HAVE STRUCK TERROR INTO A BOLDER HEART THAN HERS +8461-281231-0037-1631: AT HIS FEET WAS PLACED (A->THE) TABLE OCCUPIED BY TWO SCRIBES WHOSE DUTY (IT->*) WAS TO RECORD THE PROCEEDINGS OF THE DAY +8461-281231-0038-1632: THE PRECEPTORS OF WHOM THERE WERE FOUR PRESENT OCCUPIED SEATS BEHIND (THEIR->THE) SUPERIORS AND BEHIND THEM STOOD THE ESQUIRES OF THE ORDER ROBED IN WHITE + +SUBSTITUTIONS: count ref -> hyp +36 IN -> AND +36 A -> THE +35 THE -> A +27 AND -> IN +11 AN -> AND +10 DICKIE -> DICKY +9 THIS -> THE +9 THEIR -> THE +9 THAT -> THE +9 ANNIE -> ANY +8 I -> AND +7 TO -> THE +7 THE -> THEIR +7 MURDOCH -> MURDOCK +7 MAN -> MEN +6 THE -> THEY +6 THE -> THAT +6 SIGURD -> CIGAR +6 LARCH -> LARGE +6 HER -> A +6 HAS -> IS +6 DE -> THE +6 ARCHY -> ARCHIE +5 WILFRID -> WILFRED +5 WERE -> WHERE +5 THESE -> THIS +5 THERE -> THEIR +5 THE -> TO +5 THE -> THIS +5 SINDBAD -> SINBAD +5 SHARRKAN -> SHARKAN +5 ORGANISER -> ORGANIZER +5 ON -> AND +5 OH -> O +5 OF -> A +5 KINE -> KIND +5 IT -> HE +5 IN -> AN +5 IM -> HIM +5 AT -> IT +5 ANYONE -> ANY +5 AND -> AN +4 YOU -> HE +4 WITH -> WAS +4 WAS -> IS +4 UPON -> UP +4 THIS -> HIS +4 THEY -> THERE +4 OL -> OLD +4 O -> OH +4 N'T -> NOT +4 LIL -> LITTLE +4 KNOW -> NO +4 IZZY -> IS +4 IT'S -> IS +4 IS -> HIS +4 IS -> AS +4 I -> I'M +4 HIS -> IS +4 HERMON -> HARMON +4 HAS -> HAD +4 AS -> IS +3 ZARATHUSTRA -> THEIR +3 YOU'VE -> YOU +3 YOU'RE -> YOU +3 WOULD -> WILL +3 WILL -> WOULD +3 WHEN -> WITH +3 WHEN -> AND +3 WHAT -> BUT +3 WE'RE -> WE +3 TRY -> TRIED +3 TOO -> TO +3 TO -> A +3 THIS -> THESE +3 THEY -> THEIR +3 THEY -> THE +3 THEIR -> THEY +3 THAT -> AT +3 SET -> SAID +3 SANCT -> SAINT +3 ROUND -> AROUND +3 REGIN -> RIGAN +3 RAYSTOKE -> RAYSTROKE +3 OR -> OF +3 ON -> IN +3 OL -> ALL +3 O -> OF +3 MISTAH -> MISTER +3 MISSUS -> MISS +3 KNIGHT -> NIGHT +3 JES -> JUST +3 IT -> A +3 IS -> WAS +3 INTERESTS -> INTEREST +3 I'M -> I +3 I -> I'VE +3 HIS -> THIS +3 HIM -> THEM +3 HERMON -> HERMANN +3 HERMON -> HERMAN +3 HEAR -> HERE +3 HEAR -> HAIR +3 HATH -> HAD +3 HAD -> AND +3 GOING -> GOIN +3 FAUVENT -> VENT +3 EILEEN -> AILEEN +3 DO -> TO +3 DALY -> DALEY +3 BUT -> THAT +3 BEFEL -> BEFELL +3 AT -> TO +3 AT -> AND +3 AT -> A +3 AROUND -> ROUND +3 ARE -> A +3 AND -> A +3 A -> TO +3 A -> IT +3 A -> HER +2 ZAU -> ZAO +2 ZAU -> ZA +2 ZAU -> THOU +2 YOUR -> THE +2 YOU'RE -> YO +2 YOU -> YOUR +2 YO'LL -> YOU'LL +2 YO -> YOU +2 YER -> YOU +2 YE -> YOU +2 WITH -> THE +2 WITH -> FOR +2 WINTER -> WINDOW +2 WILD -> WHITE +2 WHO -> WHOSE +2 WHILE -> WHETHER +2 WHEN -> ONE +2 WERE -> WAS +2 WAYNE -> WAIN +2 WATCHED -> WATCH +2 UP -> UPSTAIRS +2 UNDERTAKER'S -> UNDERTAKERS +2 TRIBE -> TIME +2 TORQUILSTONE -> TORCHLESTONE +2 TONIGHT -> TO +2 TO -> TOO +2 TO -> INTO +2 TIDINGS -> HIDINGS +2 THUS -> THIS +2 THROUGH -> TO +2 THOUSANDS -> THOUSAND +2 THOUGH -> THE +2 THEY -> HE +2 THERE -> THEY +2 THEN -> THAT +2 THEM -> HIM +2 THE -> THEN +2 THE -> NO +2 THE -> IT +2 THE -> DE +2 THAT'S -> THAT +2 TAMAR -> TO +2 SYRUP -> SERF +2 STONEWALL -> STONE +2 STOKER -> STOCKER +2 STAYED -> STATE +2 SON -> SUN +2 SOMEONE -> SOME +2 SO -> TO +2 SHELL -> SHELLFISH +2 SHE -> YOU +2 SHE -> SHE'LL +2 SHAWS -> SHORES +2 SHALL -> SHOULD +2 SELLER -> CELLAR +2 SEEN -> SEE +2 SEEK -> SEE +2 SEE -> SEEM +2 ROOMFELLOW -> ROOM +2 RETURN -> RETURNED +2 RAISE -> RAISED +2 PROCLUS -> PROCLASS +2 PRIORESS -> PRIORS +2 PRIEST -> PRIESTS +2 POLL -> POLE +2 PIGEONCOTE -> PIGEON +2 PIGEONCOTE -> BEECH +2 ONE -> WHEN +2 ONCE -> WAS +2 OFF -> OF +2 OF -> HAVE +2 O -> WHO +2 NOUGHT -> NOT +2 NORTHFIELD -> NORTH +2 NOR -> NO +2 NO -> KNOW +2 NEIGHBOR -> NEIGHBOUR +2 NEAREST -> NURSE +2 MY -> BY +2 MUST -> WAS +2 MISSUS -> MISTER +2 MINE -> MY +2 MESTER -> MISTER +2 MERIT -> MARRIAGE +2 MENAHEM -> MANY +2 MEN -> MAN +2 MARSHAL -> MARTIAN +2 LUCIEN -> LUCIAN +2 LOVE -> LOVED +2 LIKED -> LIKE +2 LIDDY -> LADY +2 LEVER -> LOVER +2 LESLIE -> LIZZIE +2 LESLIE -> LIZLY +2 LA -> NEWBURG +2 KEEP -> HE +2 JURY -> CHEERY +2 JULIEN -> JULIAN +2 JAKEY -> JAKIE +2 IZZY -> IZZIE +2 ITS -> HIS +2 IT -> THIS +2 IT -> I +2 IT -> AT +2 IS -> EAST +2 INTO -> IN +2 IN'T -> IN +2 IN -> TO +2 ILU -> TO +2 IF -> OF +2 I'VE -> I +2 I'LL -> I +2 I'D -> I +2 I -> THEY +2 I -> AS +2 I -> A +2 HOZE -> HOSE +2 HOW -> HER +2 HORSTIUS -> HORSES +2 HOO'S -> WHO'S +2 HONOR -> HONOUR +2 HO -> OH +2 HIS -> THE +2 HERMON'S -> HERMANN'S +2 HERE'S -> HERE +2 HERE -> HEAR +2 HER -> THE +2 HER -> HIS +2 HER -> FOR +2 HE -> YOU +2 HE -> IT +2 HE -> HIS +2 HE -> HE'D +2 HAYS -> HAYES +2 HAVE -> HATH +2 HAS -> HATH +2 HANDS -> HAND +2 HAID -> HEAD +2 HAD -> AT +2 GURR -> GIRK +2 GURR -> GARR +2 GUNNAR -> GUNNER +2 GRANDPAP -> GRANDPA +2 GOD -> GONE +2 GIVING -> GIVEN +2 GIORGIO -> GEORGIO +2 GENTLEMEN -> GENTLEMAN +2 FRONT -> FROM +2 FROM -> FOR +2 FRANZ -> FRANCE +2 FOALS -> FOLDS +2 FOAL -> FULL +2 FESTAL -> FEAST +2 FAVOR -> FAVOUR +2 FAUCHELEVENT -> FOR +2 FAFNIR -> FAFNER +2 EYE -> I +2 EXECUTIVE -> EXECUTED +2 EUNUCH -> EUNUCHS +2 ETERNAL -> HAD +2 END -> AND +2 E'S -> HE +2 DONE -> TURNED +2 DOG -> DOOR +2 DOEST -> DOST +2 DO -> TWO +2 DEFENSE -> DEFENCE +2 DE -> DEBRACY +2 DAY -> THEY +2 CREAM -> QUEEN +2 CRAWFISH -> CROFISH +2 COUNTRY -> COUNTRIES +2 CORNER -> CORN +2 COPS -> CUPS +2 CONFECTIONARY -> CONFECTIONERY +2 CINDERLAD -> CINDER +2 CHRIS -> CRIS +2 CALIFORNIAN -> CALIFORNIA +2 BRYNHILD -> BURNEHELD +2 BROTHERS -> BROTHER'S +2 BRAHMAN -> BRAM +2 BEING -> BEEN +2 BEG -> THEY +2 AWK -> AWKWARD +2 AWHILE -> A +2 AT -> THAT +2 AT -> OF +2 ARSINOE -> ARSENO +2 ARCHBISHOP -> ARCHBISH +2 AND -> OF +2 AN -> IN +2 ALL -> OUR +2 ALL -> ON +2 AL -> A +2 AIN'T -> AND +2 ADD -> AT +2 A -> US +2 A -> OF +2 A -> I +2 A -> HAVE +2 A -> AND +1 ZEMSTVOS -> SEND +1 ZAYNAB -> THY +1 ZAU -> ZAWAIN +1 ZARATHUSTRA -> TAKE +1 ZARATHUSTRA -> GUESTRA +1 ZARATHUSTRA -> ARE +1 YUSS -> YES +1 YOUR -> YOURSELVES +1 YOUR -> YOU +1 YOUR -> HE +1 YOU'LL -> YOU +1 YOU'D -> YOU +1 YOU'D -> YE'D +1 YOU -> YOURSELVES +1 YOU -> YE +1 YOU -> TALONS +1 YOU -> SHE +1 YOU -> OFTEN +1 YOU -> IT +1 YOU -> EVEN +1 YOU -> EUGEN +1 YO' -> YOU +1 YO -> YOU'LL +1 YET -> HE +1 YET -> AT +1 YES'M -> YES +1 YES -> IS +1 YEOMEN -> YOUNG +1 YEARS -> EARS +1 YAUSKY -> YOKE +1 YAHWEH -> YONWAY +1 WUNNERED -> ONE +1 WROTE -> ONES +1 WRITE -> RIDE +1 WRIT -> WRITE +1 WOUNDS -> ONES +1 WOUNDED -> WOUNDS +1 WOULD -> HAD +1 WOULD -> DID +1 WOTTETH -> WHATETH +1 WOT -> WHAT +1 WORTH -> WORSE +1 WORSHIPPERS -> WORSHIPPED +1 WORSHIP'S -> WORSHIP +1 WORRY -> WERE +1 WORLD -> WOOLWRIGHT +1 WORKS -> WORK +1 WORKMAN -> WORKMEN +1 WORKINGMEN -> WORKING +1 WORKED -> WORTH +1 WORK -> WORKADAY +1 WORD -> WORTH +1 WORD -> WORDS +1 WOOD -> WOODS +1 WONDERED -> WANTED +1 WONDER -> WANDER +1 WON'T -> WOULD +1 WON'T -> WELL +1 WOMEN -> AND +1 WOMAN -> WOMEN +1 WOKE -> WALKING +1 WOE -> WON'T +1 WODE'S -> WORDS +1 WITH -> WOULD +1 WITH -> WHICH +1 WITH -> TO +1 WITH -> MY +1 WITH -> ITS +1 WITH -> IT +1 WITH -> BUT +1 WITCH -> WHICH +1 WISHT -> WISHED +1 WIRES -> WIVES +1 WING -> WINGED +1 WINE -> WHITE +1 WILLY -> BILLY +1 WILL -> WILT +1 WILL -> WHEEL +1 WILL -> WERE +1 WILL -> WELL +1 WILL -> WE'LL +1 WILL -> WE +1 WILL -> WAS +1 WILL -> TO +1 WILL -> REVOTE +1 WILL -> OLIO +1 WILKSES -> WILKES +1 WILKS -> WILKES +1 WILFRID -> WILFRIED +1 WILFRID -> MILFRED +1 WILFRID -> LOYAL +1 WILDLY -> WIDELY +1 WILDEST -> WALLA'S +1 WILDERNESS -> WORDERNESS +1 WILDERNESS -> WEDDINGS +1 WILDERNESS -> WEARINESS +1 WILDERNESS -> LINEN +1 WIFE -> WI +1 WIFE -> MY +1 WIELD -> WHEEL +1 WIDEAWAKE -> WIDE +1 WICKER'S -> ROOKER'S +1 WICKER'S -> OCCUR'S +1 WICKER'S -> HOOKER'S +1 WICKER -> WICKER'S +1 WI -> WITH +1 WHY -> WHAT +1 WHO'D -> WHO +1 WHO -> WITH +1 WHO -> ONE +1 WHO -> HER +1 WHITE -> WORLD +1 WHITE -> WIDE +1 WHITE -> WHITEWAY +1 WHISKERED -> WHISKIRT +1 WHILOME -> WILHELM +1 WHICH -> WITCH +1 WHICH -> SPEECH +1 WHICH -> REACHED +1 WHICH -> PITCHED +1 WHETHER -> WEATHER +1 WHEREABOUTS -> WHEREABOUT +1 WHERE'S -> WHERE +1 WHERE'S -> WAS +1 WHERE -> WITH +1 WHERE -> WERE +1 WHERE -> THERE +1 WHERE -> HER +1 WHER -> WERE +1 WHENEVER -> THE +1 WHEN -> WHO +1 WHEN -> WENT +1 WHEN -> IN +1 WHEELER -> WHALER +1 WHEEL -> BE +1 WHATE'ER -> WHATEVER +1 WHAT'S -> ONCE +1 WHAT -> WOULD +1 WHAT -> WITH +1 WHAT -> IT +1 WHAT -> FOR +1 WHAT -> AT +1 WHAT -> A +1 WHACKS -> WAX +1 WESTPORT -> WESTWARD +1 WERE -> WITH +1 WERE -> WENT +1 WERE -> RAN +1 WERE -> BURNED +1 WENT -> WON +1 WENT -> WHEN +1 WELLS -> WELL +1 WELLS -> WALES +1 WELL -> WILL +1 WELL -> WHY +1 WELL -> WHILE +1 WEIGHED -> WADE +1 WEEVILLY -> WEEVILY +1 WEEDS -> READS +1 WEBB'S -> WEBBS +1 WE'VE -> REVOLTA +1 WE'RE -> WERE +1 WE -> WILL +1 WE -> WASTED +1 WAYNE -> IN +1 WAVERLY -> WAVERLEY +1 WATONWAN -> WATERWAM +1 WATER -> WALL +1 WATCHMAKER'S -> WATCHMAKERS +1 WAS -> WITH +1 WAS -> WIS +1 WAS -> WHICH +1 WAS -> WHEREAS +1 WAS -> MUST +1 WAS -> ALWAYS +1 WARM -> WRONG +1 WARDERS -> ORDERS +1 WARD'S -> WORDS +1 WARD -> TOWARD +1 WAR -> FOR +1 WANTED -> WATER +1 WANT -> WARNED +1 WANDERER -> WONDER +1 WALL -> WAR +1 WALK -> BUCK +1 WAKE -> AWAKE +1 WAITING -> WINNING +1 WAITIN -> WAITING +1 WAGGOT -> RAGGED +1 WAGGING -> WORKING +1 WAGED -> RAGED +1 WADED -> WAITED +1 WADDED -> WATERED +1 VOWELS -> VOWALS +1 VOUGHT -> WALT +1 VOTES -> VAULTS +1 VOTES -> BOATS +1 VOMITING -> WARMITTING +1 VOLVITUR -> VOLVETER +1 VOICED -> OUTWARDS +1 VIOLENCE -> VIOLENT +1 VILLAGES -> RELIGIOUS +1 VILLAGERS -> VILLAGES +1 VIL -> VILLE +1 VIGILANCE -> EACH +1 VICTIMIZE -> VICTIMISE +1 VESTRY -> VETERY +1 VERY -> WHERE +1 VERSES -> VERSEST +1 VERILY -> VERY +1 VENTRILOQUIST -> VENTILLA +1 VEHEMENTLY -> TO +1 VAVASOUR -> VAVASOR +1 VAULT -> WALL +1 VAUGIRARD -> ROGER +1 VAST -> VATS +1 VASSILIEVITCH -> WISLOVITCH +1 VANE -> VAIN +1 VALET -> VALLEY +1 VAIN -> VEIN +1 UTTER -> OTHER +1 USUALLY -> USUAL +1 USE -> YEARS +1 USE -> USED +1 US -> UP +1 US -> TO +1 US -> STARED +1 URGED -> ADDED +1 URARTU -> URITU +1 URARTU -> URA +1 URARTIAN -> GRACIAN +1 UPON -> A +1 UP -> STREET +1 UNTO -> TO +1 UNTO -> INTO +1 UNTO -> AND +1 UNS -> UNSTEAD +1 UNREWARDED -> IN +1 UNOCCUPIED -> ON +1 UNLESS -> AND +1 UNIQUE -> EUIK +1 UNHESITATINGLY -> AM +1 UNHAPPY -> HAPPY +1 UNEXPECTEDLY -> UNEXPECTED +1 UNDER -> AND +1 UNCLE -> AND +1 UNADULTERATED -> AN +1 UN -> ONE +1 ULTIMATELY -> ULTIMATE +1 ULRICA -> OR +1 ULRICA -> OIKA +1 ULRICA -> EUREKA +1 UKINZER -> A +1 UDDER -> UTTER +1 TYRANNY -> SURNING +1 TWYMAN'S -> TWIMMAN +1 TWO -> TOO +1 TWO -> TO +1 TWO -> JEW +1 TWO -> DO +1 TWELVEMONTH -> TWELVE +1 TURRET -> TOWER +1 TURNS -> TURNED +1 TUBERCULOUS -> TUBERK +1 TRYING -> CRYING +1 TRY -> TROT +1 TRY -> TRIFLE +1 TRUSTEE -> TRUSTY +1 TRUNDLED -> TUMBLED +1 TRULY -> JULIE +1 TRIVET -> TRIBUT +1 TRIPES -> TRITE +1 TRIFLE -> TRAVEL +1 TRIBES -> TRIUMPHS +1 TRIBES -> TRINES +1 TRELAWNEY -> TREEONER +1 TREASURE -> TREASURES +1 TRAVELED -> TRAVELLED +1 TRANSSHIP -> TRANSHIP +1 TRAINING -> TRAINED +1 TRAINDAWG -> TRAIN +1 TRAFFIC -> EFFIC +1 TRADITIONS -> JUDICINES +1 TRADEMARK -> TRADE +1 TRACK -> CHECK +1 TRACED -> PRAISED +1 TOWER -> TOWERED +1 TOWARDS -> DOOR +1 TOWARD -> TOWARDS +1 TOWARD -> TO +1 TOUR -> TO +1 TOUGHS -> TUFTS +1 TOUGH -> TO +1 TOUCHED -> TOUCH +1 TORMENT -> TOM +1 TORE -> TOILED +1 TORCH -> TORTURE +1 TOP -> STOPPED +1 TOOK -> INTO +1 TOO -> TWO +1 TOO -> SHIMMERTS +1 TOO -> DO +1 TONGUE -> TONG +1 TOMORROW -> TO +1 TOMB -> TWO +1 TOM -> TUMBLED +1 TOLERBLE -> TOLERABLE +1 TOLD -> STOWED +1 TOLD -> STOLE +1 TOILET -> TALLED +1 TO -> WITH +1 TO -> TWO +1 TO -> TURNED +1 TO -> TRITES +1 TO -> TOWARD +1 TO -> THROUGH +1 TO -> THEATER +1 TO -> THAT +1 TO -> SHE +1 TO -> REFORMED +1 TO -> O +1 TO -> MADE +1 TO -> IN +1 TO -> HURT +1 TO -> FOR +1 TO -> DIRECTIFY +1 TO -> DID +1 TO -> CHOOSE +1 TO -> AT +1 TITLE -> TOWN +1 TIRING -> ENTIRE +1 TIRESOME -> PARASAN +1 TIRED -> TIE +1 TIRED -> HIRED +1 TIMES -> TIME +1 TIME -> TELL +1 TILLERS -> TELLERS +1 TILL -> TO +1 TIGLATH -> TIC +1 TIGLATH -> TAKE +1 TIGLATH -> T +1 TIGLATH -> DICK +1 TIGHTENING -> TIGHTENED +1 TIGER -> TYER +1 TIGER -> DRAGGER +1 THY -> THINE +1 THY -> THEIR +1 THY -> I +1 THY -> DAGGULE +1 THUS -> DOES +1 THUS -> AND +1 THUMB -> TEMP +1 THROWING -> THREW +1 THRIFTILY -> DRIFTILY +1 THREE -> THIRD +1 THOUGH -> THAT +1 THOUGH -> THAN +1 THOU -> THOUGH +1 THOU -> NOW +1 THOU -> DONE +1 THIS -> US +1 THIS -> THAT +1 THIS -> MISSUS +1 THIS -> ITS +1 THIS -> DISTAGGER +1 THIRST -> THOSE +1 THIRST -> DOZ +1 THINK -> THINKING +1 THINGS -> THANKS +1 THINGS -> SPACE +1 THIN -> FLIND +1 THEY'RE -> THEY +1 THEY'RE -> THERE +1 THEY'RE -> THEIR +1 THEY'RE -> HER +1 THEY'RE -> ARE +1 THEY -> YOU +1 THEY -> I +1 THEY -> DECLINE +1 THESE -> THE +1 THESE -> HIS +1 THESE -> DISTRACT +1 THERE'S -> THERE +1 THERE -> WRISTS +1 THERE -> THERE'S +1 THERE -> THEN +1 THERE -> THAT +1 THERE -> TERRANT +1 THEN -> WHEN +1 THEN -> THAN +1 THEN -> DIDN'T +1 THEN -> AND +1 THEM -> NO +1 THEIR -> THEM +1 THEIR -> HER +1 THEIR -> DEAR +1 THEE -> ME +1 THEATRE -> FUTURE +1 THE -> YEARS +1 THE -> WELLS +1 THE -> VERY +1 THE -> UPON +1 THE -> THESE +1 THE -> THEM +1 THE -> OUR +1 THE -> OTHER +1 THE -> OPINION +1 THE -> MISTAKES +1 THE -> LOVE +1 THE -> LEDA +1 THE -> KIND +1 THE -> JACO +1 THE -> INSECTORS +1 THE -> HIS +1 THE -> HER +1 THE -> HALF +1 THE -> FREDERI +1 THE -> FOR +1 THE -> DRESSED +1 THE -> DID +1 THE -> DEVOTED +1 THE -> DEBRAMIN +1 THE -> BUT +1 THE -> BUILD +1 THE -> ASHORE +1 THE -> AND +1 THE -> ABIDING +1 THAT'S -> THE +1 THAT'S -> I +1 THAT'LL -> THOU +1 THAT -> WITH +1 THAT -> TILL +1 THAT -> THY +1 THAT -> THEY +1 THAT -> THEIR +1 THAT -> THAN +1 THAT -> STEP +1 THAT -> SET +1 THAT -> NOT +1 THAT -> MY +1 THAT -> LITTLE +1 THAT -> LET +1 THAT -> INTER +1 THAT -> DAT +1 THAT -> BROUGHT +1 THAN -> THEN +1 THAN -> AND +1 TERRA -> TERRACE +1 TEND -> INTEREST +1 TEN -> TOWN +1 TEMPTETH -> TEMPTED +1 TEMPLES -> TEMPLE +1 TEMPLAR -> TENT +1 TELLTALE -> TELLS +1 TELLTALE -> TELL +1 TELEGRAM -> TELEGRAMAS +1 TEAM -> TEEM +1 TEAL -> TEALE +1 TEA -> TUNO +1 TATTLERS -> TEDLER +1 TASTE -> CASE +1 TASKMASTER -> TAX +1 TARDY -> TIDY +1 TAPPED -> TOP +1 TAPIS -> TAPPY +1 TANQUAM -> TEN +1 TALMASH -> THOMMISH +1 TALKED -> TALKS +1 TALK -> TALKED +1 TALENTS -> OR +1 TAKEN -> TAKING +1 TAKE -> THEY +1 TAHITI -> INDEEDY +1 T'OTHER -> THE +1 SYRUP -> CYRUP +1 SYRIA -> ASSYRIA +1 SYNONYMON -> SNYM +1 SYMPATHY -> SMATHY +1 SYLLOGISM -> DILIGION +1 SYDNEY -> SIDNEY +1 SWORD -> SORT +1 SWELP -> SWAB +1 SWARTHY -> SWALLTY +1 SWAG -> WAG +1 SURELY -> TRULY +1 SUPPOSE -> S'POSE +1 SUMTHIN -> SOMETHING +1 SULPHURIC -> SUFFERG +1 SUFFICIENT -> SUSPICION +1 SUFFICES -> SURFACES +1 SUE -> UM +1 SUDDEN -> CERTAIN +1 SUCKED -> SACKED +1 SUCCOURS -> SECURS +1 SUB -> SUBTRINE +1 STRUGGLE -> STRUGGLING +1 STRUCK -> UP +1 STRUCK -> MIGHT +1 STRIKE -> STRIKEBREAKERS +1 STRIFE -> STRIPE +1 STREET -> DISTRICT +1 STREET -> AT +1 STREAK -> STREET +1 STRANGEST -> STRANGERS +1 STRANGE -> STRAIN +1 STRAITS -> STRAIT +1 STRAITENED -> STRAIGHT +1 STRAINS -> TRAINS +1 STRAIGHT -> STRAYED +1 STORES -> STORIES +1 STORED -> STOLE +1 STORE -> WORKED +1 STOOD -> TOO +1 STONEWALL -> STERN +1 STONED -> STONE +1 STOCK -> STOCKING +1 STILLNESS -> WHICH +1 STILL -> STEALING +1 STILL -> SO +1 STICK -> STICKET +1 STEWPAN -> STEWPANT +1 STEW -> DO +1 STERN -> STERNMOST +1 STEPPED -> STEPS +1 STEP -> SPATANI +1 STEEVER -> STEVER +1 STEERING -> STIRRING +1 STAYING -> SEEING +1 STAYED -> STAY +1 STATUS -> STRATORS +1 STATURE -> STATUE +1 STATES -> ESTATES +1 STATED -> IS +1 STATE -> STATES +1 STATE -> STATEROOM +1 STAS -> STARS +1 STARVING -> STARLING +1 STARK -> START +1 STARES -> TEARS +1 STARED -> STEERED +1 STANDSTILL -> FAN +1 STANDS -> SENDS +1 STANDARD -> STANDARDS +1 SQUEAMISH -> SCREAMY +1 SPONSUS -> QUALMS +1 SPONGE -> SPINES +1 SPONDYLES -> SPAWN +1 SPIRIT -> EXPERIOR +1 SPILLING -> SPINNING +1 SPICE -> SPIES +1 SPARSELY -> FIRSTLY +1 SPAKE -> TAKE +1 SPAKE -> BEING +1 SPAKE -> BEG +1 SPADDLE -> SPADEL +1 SOUTHERN -> SUDDEN +1 SOUSE -> SOUS +1 SOURCE -> SORT +1 SOUGHT -> THOUGHT +1 SOUGHT -> SOWED +1 SOTELES -> SARTUOUS +1 SORDID -> SARDID +1 SOOT -> SUIT +1 SONG -> SUN +1 SON -> SONG +1 SOME -> THEM +1 SOMBER -> SOMBRE +1 SOFA -> SILVER +1 SOCIALIST -> SOCIALLY +1 SO -> SUMINUTELY +1 SO -> SOUL +1 SO -> SOPHIA +1 SO -> SARKAIUS +1 SO -> MISS +1 SNARLED -> SNARLS +1 SMOLNY -> MORLEY +1 SMOKER -> MOTOR +1 SMIRCHED -> SMARCHED +1 SMILE -> MIND +1 SMELL -> SMAR +1 SMALL -> ALL +1 SLUNK -> SUNK +1 SLIGHTLY -> SAT +1 SLACKENED -> CLACK +1 SLAB -> FLAP +1 SKYLARKS -> SKYLECKS +1 SKIRT -> GOOD +1 SKEW -> SKIRO +1 SKEPTICAL -> SCEPTICAL +1 SIXES -> SIX +1 SIRE -> SIRES +1 SIR -> SO +1 SIR -> BECAME +1 SINUHIT -> IT +1 SINGS -> SANGS +1 SINGA -> SINGAFUT'S +1 SINGA -> SHING +1 SING -> SINGING +1 SINFUL -> SENT +1 SINE -> IN +1 SIMULATES -> SIMILATES +1 SILVER -> SILVERY +1 SILLY -> DITTY +1 SILENCE -> SCIENCE +1 SIGURD -> SIR +1 SIGURD -> CIGARS +1 SIGURD -> CIGARET +1 SIGNOR -> SENOR +1 SIGNING -> SUNNING +1 SIGHING -> SIGNED +1 SIEVE -> SEA +1 SIDE -> THOUGHT +1 SIDE -> SORT +1 SIDE -> SIGHT +1 SHUT -> AT +1 SHUMAN -> SON +1 SHUCKS -> SHOCKS +1 SHRUBS -> SHRUGS +1 SHOULDST -> SHOULDEST +1 SHOULDERS -> SHOULDER +1 SHOULD -> YOU +1 SHOULD -> SHALL +1 SHOT -> HAD +1 SHO'LY -> SURELY +1 SHIPS -> THE +1 SHIPS -> SHIP +1 SHET -> SHUT +1 SHERIFF -> SURE +1 SHERIFF -> SIR +1 SHERIFF -> CHEER +1 SHERE -> TO +1 SHERBURN'S -> SHERBURNE'S +1 SHEPHERD -> SHEPARD +1 SHEETS -> SEATS +1 SHED -> SHARED +1 SHE'LL -> YE'LL +1 SHE -> SEA +1 SHE -> HE +1 SHATTERED -> SATURDAIL +1 SHARDURIS -> SHOW +1 SHARDURIS -> SHORDURUS +1 SHARDURIS -> SHALL +1 SHALLUM -> CELEM +1 SHALL -> TO +1 SHALL -> OUR +1 SHAKEDOWN -> SHAKE +1 SHAG -> SHAGG +1 SEYTON -> SETTON +1 SEWING -> SOON +1 SEVERE -> SAVOUR +1 SEVERAL -> SEVEREND +1 SEVENTIETH -> SEVENTEENTH +1 SEVEN -> THE +1 SET -> SAT +1 SERMON -> SAME +1 SERGEY -> SURGY +1 SERGEY -> SOJOURNOVITCH +1 SERGEY -> SOJI +1 SERGEY -> SO +1 SENTENCED -> INTENSE +1 SENT -> SAT +1 SENSITIVE -> SCENTED +1 SEEST -> CEASE +1 SEEN -> SEEMED +1 SEEMS -> SEEMED +1 SEEMED -> SEEMS +1 SEEMED -> SEEMING +1 SEEM -> SEEMED +1 SEEM -> SEE +1 SEEKEST -> SEEK'ST +1 SEDUCETH -> SEDUCE +1 SEAT -> HEAT +1 SEAMEN -> SEAMAN +1 SEAMAN -> SIMON +1 SEAMAN -> SALMON +1 SEA -> SEAS +1 SCULPTORS -> SCULPTOR'S +1 SCRUTINISED -> DISTRESS +1 SCRIBES -> GRIBES +1 SCRAPPIN -> SCRAP +1 SCORN -> CORN +1 SCO'TCH -> SCORCHED +1 SCHULBERG'S -> SHOALBURG'S +1 SCHOOLDAYS -> SCHOOL +1 SCHOOL -> SCHOOLGIRLS +1 SCHOOL -> SCHOOLBOY +1 SCAPEGRACES -> CAVE +1 SCAPED -> ESCAPED +1 SCAPE -> ESCAPE +1 SAYS -> TEETH +1 SAYS -> SAY +1 SAYS -> AS +1 SAYIN -> SAYING +1 SAY -> SO +1 SAY -> SEE +1 SAY -> SAVE +1 SAW -> SOLDOM +1 SATURDAY -> SAID +1 SAT -> SET +1 SARAH -> SEREN +1 SANS -> SONSPIER +1 SANITARY -> SENATORY +1 SANG -> IN +1 SAND -> SANDWARES +1 SANCTESS -> SANCTIS +1 SANCHO -> SANCHA +1 SAN -> SAMPANCISCO +1 SALONE -> SALON +1 SALLOWER -> SALARY +1 SAILS -> SAILORS +1 SAIL -> SO +1 SAIL -> SAILOR'S +1 SAID -> TO +1 SAID -> STOOD +1 SAID -> SET +1 SAID -> PSALMS +1 SAID -> OF +1 SAH -> SAD +1 SAH -> A +1 SAGITTAIRE -> SAGATURE +1 S -> HELEN +1 RYO -> RIO +1 RUSSIA -> A +1 RUSHED -> RUSH +1 RUNS -> ONE'S +1 RUNG -> RANG +1 RUN -> WARM +1 RUN -> RAN +1 RUN -> ENOUGH +1 RUMP -> WRONG +1 RUM -> ROMAN +1 RULER -> SPOTIC +1 RUBENSES -> REUBEN +1 RUBBERS -> WRAPPERS +1 ROXBURY -> BRAXBURY +1 ROWED -> RIDE +1 ROUTE -> ROUGH +1 ROUNDED -> ROUTED +1 ROTHS -> WORSE +1 ROSE -> AROSE +1 ROSAMUN -> ROSAMOND +1 ROPE'S -> ROPES +1 ROPE'S -> HOPES +1 ROOTS -> ROOFS +1 ROOM -> RUM +1 ROOM -> ROOMS +1 RONALD -> RANALD +1 ROMANCE -> ROMANS +1 ROLL -> RAW +1 ROCK -> STROKE +1 ROARING -> ROWING +1 ROAD -> RULED +1 RISDON -> RISDEN +1 RISDON -> RICHMOND +1 RINGMASTER -> RING +1 RIGOROUS -> RECKLESS +1 RIGHT -> WRITE +1 RIGHT -> THREAT +1 RIGHT -> RIPE +1 RIDGE'S -> RICHES +1 RIDER -> WRITER +1 RIDER -> RATTERAS +1 RID -> INTO +1 RICHARD -> WRETCHED +1 RHODIAN -> RODIAN +1 RHODIAN -> RADIAN +1 RHODES -> ROADS +1 REVOLUTIONISTS -> REVOLUTION +1 REVOLUTIONIST -> REVOLITIONIST +1 REVOLTE -> REVOLT +1 REVEREND -> ROBIN +1 REVELATION -> RELATION +1 RETZCH'S -> WRETCHES +1 RETURNED -> TURN +1 RETURNED -> RETURN +1 RETURN -> RETURNING +1 RESUMED -> JUNE'S +1 RESTORETH -> RESTORE +1 RESTIVE -> WRETS +1 RESOLVED -> WE +1 RESK -> REST +1 RESISTING -> FIND +1 RESINOUS -> ZENOUS +1 RESCUED -> RESCUE +1 REQUIRE -> REQUIRRE +1 REQUEST -> QUEST +1 REPUTATION -> REPETITION +1 REPLY -> NEDCOV +1 REPEATED -> REPLIED +1 REND -> RUN +1 REMISSION -> REMISSIONOUS +1 REMEMBEREST -> REMEMBER +1 REMAINED -> REMAINS +1 REMAIN -> REMAINED +1 RELEASED -> RELISSE +1 RELATED -> RELIGHTED +1 REJECT -> REJECTED +1 REIGNS -> REIGN +1 REIGNED -> RAINED +1 REGULATION -> REGULATING +1 REGIN -> REGAN +1 REGARDING -> GUARDING +1 REG'LER -> REGULAR +1 REFORMERS -> REFUSE +1 REFORMERS -> REFORMED +1 REFORM -> FORM +1 REELECTION -> RE +1 RED -> REDMOCKERS +1 RECTUM -> RECTIM +1 RECEDED -> WAS +1 RECAPTURED -> RE +1 REAP -> READ +1 REALISED -> REALIZED +1 READY -> RATHER +1 REACHED -> RAGED +1 RAYSTOKE -> RAY +1 RAY -> WRAYE +1 RATTLING -> RIDERING +1 RATHER -> READY +1 RATHER -> JOHN +1 RASCALS -> RASCAL +1 RAPSCALLIONS -> RATCALIONS +1 RAPPERS -> WRAPPERS +1 RANSOM -> RANDOM +1 RAN -> RUN +1 RAMSES -> RANSES +1 RAM -> ROOM +1 RAISE -> FOR +1 RAINY -> RAINING +1 RAIN -> REIN +1 RAID -> RAY +1 RAGE -> RATE +1 RADPROP -> REDRUP +1 RACKETEERS -> RAGATIRS +1 RACKED -> WRAPPED +1 RACHEL -> RIGIDLY +1 RACES -> RAYS +1 RABBITS -> RABBIT'S +1 RABB'S -> RABBS +1 QUOTH -> QUOMAN +1 QUMMUKH -> KUMAK +1 QUITE -> ACQUAINTED +1 QUICK -> QUICKLY +1 QUEST -> PRICE +1 QUANTRELL -> QUANTREL +1 QUANTITIES -> QUALITIES +1 QUACKS -> CLACKS +1 PYM -> POEM +1 PUTTING -> PUT +1 PUTTEL -> PATTERN +1 PUTS -> BUT +1 PUT -> WHENEVER +1 PUT -> PULL +1 PURPORTING -> REPORTING +1 PUMP -> PUMPED +1 PULLEY -> POLY +1 PULLED -> POURED +1 PUDDLES -> POTTLES +1 PSALM -> THE +1 PRYTANEUM -> BRITTANNIUM +1 PRUDENT -> CARTS +1 PROTECTORATE -> PROTECTOR +1 PROPRE -> A +1 PROMOTIVE -> PROMOTED +1 PROHIBITION -> PROBES +1 PROFESSION -> PROFESSIONS +1 PRODUCES -> PRODUCED +1 PRODUCED -> PRODUCE +1 PRODUCE -> PRODUCED +1 PROCLUS -> PROCLAUS +1 PROAS -> PROVIDES +1 PROAS -> PROTS +1 PROAS -> PERHAPS +1 PROA -> PROW +1 PROA -> PRAYER +1 PRISON -> PRISONED +1 PRIORESS -> PYRIUS +1 PRIORESS -> PRIOR +1 PRIORESS -> PIRATES +1 PRIORESS -> PIRASS +1 PRINCIPALS -> PRINCIPLES +1 PRIMER -> PRIMARY +1 PRIEST -> PREACH +1 PRETTY -> BERTIE +1 PRESTIGE -> PRESAGE +1 PRESERVE -> PRESENT +1 PRENTICESHIP -> PRENTICE +1 PRECEPTORS -> PERCEPTORS +1 PREACHED -> PREACH +1 PRAYERS -> PRAY +1 PRAM -> IN +1 PRAISEWORTHY -> PRAISED +1 PRACTITIONER -> PETITIONERS +1 POVERTY -> DISPOVERTY +1 POVERTY -> BAVARY +1 POURED -> PUT +1 POUCHES -> PIUCHES +1 POTUM -> HE +1 POTION -> FORTUNE +1 POTASSIUM -> POTASSIAN +1 POSTHASTE -> POST +1 POSTERN -> PASSING +1 POST -> POSTS +1 PORTENTOUS -> POTENT +1 PORED -> POURED +1 POPULACE -> POPULOUS +1 POOR -> PORT +1 POOR -> FAR +1 POMEROY -> POMROY +1 POLYTECHNIC -> POLY +1 POLONIUS -> BONIUS +1 POLL -> PAUL +1 PLUMB -> PLUM +1 PLEASANT -> PRESENT +1 PLEAS -> PLACE +1 PLEADED -> PLAYED +1 PLATTERBAFF'S -> PLATTERBUFF'S +1 PLATTERBAFF -> PLATTERBUFF +1 PLATTERBAFF -> FURTHER +1 PLATED -> LIKE +1 PLANET -> PLAN +1 PLAIN -> PLAY +1 PLAIN -> PLANE +1 PLACE -> PLATES +1 PLACE -> PLACED +1 PLACE -> PACE +1 PITTS -> FITZ +1 PITHUM -> PITTHAM +1 PISTOLES -> PISTOL +1 PIPE -> PEG +1 PINKUS -> PICK +1 PINKERTON'S -> PINKERTIN'S +1 PILESER -> LAUGHED +1 PILESER -> LAUGH +1 PILESER -> LAST +1 PILESER -> GLASS +1 PIKES -> PIPES +1 PIGEONCOTES -> PIGEON +1 PIGEONCOTE -> PIGEONOTE +1 PIGEONCOTE -> PEACH +1 PIGEONCOTE -> DIGESON +1 PIERO -> PIERRE +1 PIECE -> PEACE +1 PICTURE -> PITCHER +1 PICKED -> PRICKED +1 PHUT -> FIVE +1 PHUT -> AFOOT +1 PHOSPHOR -> PHOSPHOBS +1 PHOSPHOR -> PHOSPHER +1 PHILISTINES -> FURTHER +1 PHILIPPUS -> PHILIP +1 PHILIP -> FELLOW +1 PETREL -> PETEL +1 PETER'S -> PETER +1 PETER -> EITHER +1 PETER -> BEATER +1 PESTE -> PESTS +1 PERSPIRED -> POISPIED +1 PERNOUNCE -> PRONOUNCE +1 PERFECTLY -> PERFECTUALLY +1 PERE -> PERFELASHES +1 PERE -> PALACE +1 PERAMBULATOR'S -> PRIME +1 PEONAGE -> PINIONS +1 PENDING -> SPENDING +1 PEKAHIAH -> PECAH +1 PEASANTS -> PIECE +1 PEAS -> PEASE +1 PEARL -> POOR +1 PAY -> PASS +1 PAWNBROKER -> BROKER +1 PAUSED -> PASSED +1 PATIENCE -> PATENTS +1 PASTES -> PACE +1 PAST -> PASS +1 PASSES -> PAUSES +1 PASSED -> PASS +1 PARTS -> PART +1 PARTLY -> PARTIALLY +1 PARTINGS -> PARTING +1 PART -> PARTS +1 PARRICIDES -> PARASITES +1 PARR -> PAR +1 PARKS -> BOX +1 PARDON -> PARTISER +1 PANNIERS -> PENNYERS +1 PALLIATE -> PALE +1 PALL -> POOL +1 PALAESTRA -> PELLESTRA +1 PAIR -> PARENT +1 PADDLING -> PADDLIN +1 PACES -> PLACES +1 PACE -> THE +1 P -> PATUM +1 OWNERS -> LANDOWNERS +1 OWN -> ON +1 OWE -> ARE +1 OW'M -> AM +1 OW -> HOW +1 OVERRIPENESS -> OVER +1 OVERHEARD -> OWNED +1 OVERFULL -> OVER +1 OVERFLOWING -> OVERWHELMING +1 OVER -> OVERPRUDENT +1 OVER -> OF +1 OUTLINES -> OUTLINE +1 OUTER -> OUTER'S +1 OUT -> YET +1 OUT -> UP +1 OUT -> SOUTH +1 OUT -> OUTGAZE +1 OUT -> ON +1 OUT -> DOUBT +1 OUT -> ALL +1 OUT -> ALBEA +1 OUR -> UNDERSTRUCTION +1 OUR -> OURSPORT +1 OUR -> OURSAN +1 OUR -> I'LL +1 OUR -> HER +1 OUR -> ARE +1 OUNCES -> OZ +1 OUGHTN'T -> OUGHT +1 OUGHT -> ARE +1 OUGHT -> ALL +1 OUEN -> JOIN +1 OTHER'S -> OTHERS +1 ORIENTAL -> OF +1 ORFICER -> ORFASTER +1 ORDER -> ODO +1 ORBIS -> ORBUS +1 OR -> OUR +1 OR -> OPPOSITION +1 OR -> OH +1 OR -> O +1 OR -> I +1 OR -> FULL +1 OR -> AS +1 OR -> ARE +1 OR -> AND +1 OR -> ALL +1 OPPRESSORS -> IMPRESSORS +1 OPENED -> OPEN +1 OPEN -> UP +1 OPE -> LOVE +1 ONTO -> ON +1 ONTO -> INTO +1 ONLY -> OWING +1 ONLY -> OLD +1 ONE -> WORLD +1 ONE -> WON +1 ONE -> WINE +1 ONE -> A +1 ONCE -> ONE'S +1 ON'T -> ON +1 ON -> WHEN +1 ON -> UPON +1 ON -> UNTO +1 ON -> UNLUCK +1 ON -> UNCREAM +1 ON -> UNCHANGED +1 ON -> TILL +1 ON -> HONOUR +1 ON -> DOWN +1 ON -> DOING +1 OME -> HOME +1 OLL -> ALL +1 OLIVE -> OUT +1 OLIVE -> ONLY +1 OLIVE -> OLIV +1 OLIVE -> OLDEST +1 OLIVE -> I +1 OLD -> O +1 OLD -> ALL +1 OKAY -> OH +1 OIL -> ORE +1 OH -> UH +1 OFFICES -> OFFICERS +1 OFFICERS -> OFFICIALS +1 OFFICER -> OFFICERS +1 OFFENSE -> OFFENCE +1 OFFEN -> OFF +1 OFF -> UP +1 OFF -> OUR +1 OFF -> MORVE +1 OF -> WHAT +1 OF -> VAZARRE +1 OF -> TO +1 OF -> THIS +1 OF -> THE +1 OF -> THAT +1 OF -> ON +1 OF -> O +1 OF -> INTO +1 OF -> IN +1 OF -> HER +1 OF -> AT +1 OF -> ASSUME +1 OF -> AND +1 OBOCOCK -> OBEY +1 OBJECT -> SUBJECT +1 O'NIGHTS -> A +1 O'NEILL -> O'NEIA +1 O -> OR +1 O -> OITERNAL +1 O -> ARE +1 O -> AM +1 O -> ALL +1 O -> A +1 NYTOUCH -> KNIGHT +1 NUZHAT -> UZHAT +1 NUZHAT -> NUZHA'S +1 NUZHAT -> KNOWSAT +1 NUTS -> KNOTS +1 NUNS -> NUN'S +1 NUISANCE -> NOTIONS +1 NU'UMAN -> NUMAN +1 NOWT -> NOW +1 NOW -> SO +1 NOW -> ON +1 NOW -> NO +1 NOW -> NABRAMAN +1 NOTTINGHAM -> NINE +1 NOTICE -> NOTICED +1 NOT -> REPEAT +1 NOT -> PERCHED +1 NOT -> NOW +1 NOT -> NOR +1 NOT -> NIGH +1 NOT -> KNOWN +1 NOT -> IT +1 NORTH -> NORTHEAST +1 NOPE -> NOTE +1 NONETHELESS -> NONE +1 NONE -> NON +1 NO -> THOUGH +1 NO -> NOW +1 NICO -> NIGO +1 NICKEL -> FLASH +1 NEXTER -> NEXT +1 NEW -> YOU +1 NEW -> NEWER +1 NERVE -> NERVES +1 NEOSHO -> NEOSH +1 NEIGHBOURING -> NEIGHBORING +1 NEIGHBORS -> NEIGHBOURS +1 NEIGHBORS -> LABORS +1 NEEDLED -> NEEDLE +1 NEED -> NEEDED +1 NEARING -> NEAR +1 NEAREST -> NEWS +1 NEARER -> IN +1 NEARED -> NEAR +1 NAUGHT -> NOUGHT +1 NATURALLY -> NATURAL +1 NATURAL -> NATURALLY +1 NARRATIVES -> NARRATIVE +1 NANDY'S -> ANDY'S +1 MYSTERIOUS -> MYSTERY +1 MYRTILUS -> MYRTOLIS +1 MYRTILUS -> MERTOLUS +1 MYRTILUS -> MARTILLUS +1 MYRTILUS -> BURTLES +1 MUSTACHES -> MOUSTACHES +1 MUST -> MY +1 MUST -> MISTS +1 MUST -> MISTER +1 MUSKETS -> MASKETS +1 MUSICIANS -> MEASIANS +1 MURDOCH'S -> MARDOC'S +1 MURDOCH -> MERDOCK +1 MURDOCH -> MARDOX +1 MUIR -> YOU +1 MUG -> MUCH +1 MUD -> MATTER +1 MOWER -> OVER +1 MOVEMENT -> MOMENT +1 MOUTHWHAT -> MOUTH +1 MOUTHS -> MOTHS +1 MOUTH -> MOUSE +1 MOURNING -> MORNING +1 MOUNTNORRIS -> MONTORAS +1 MOUNT -> MOUND +1 MOTIONLESS -> MUCH +1 MOTHER -> PRESENTERS +1 MOST -> PROVERB +1 MOST -> PERCY +1 MOSES -> ROSES +1 MOSES -> OF +1 MOSES -> MOVES +1 MORTIFICATIONTHAT -> MORTIFICATION +1 MORTEM -> MODE +1 MORE -> SMALL +1 MOPED -> MIRKED +1 MOOR -> MORE +1 MONTHLY -> MOUTHFULLY +1 MONSEIGNEUR -> MONSIEUR +1 MOMMOL -> MAMMA +1 MO -> MORE +1 MIXTURE -> MIXED +1 MIX -> MAKE +1 MISTER -> MISSUS +1 MISTER -> MISS +1 MISTER -> MIDSER +1 MISTER -> BISHOIS +1 MISSUS -> MUST +1 MISSED -> MISTING +1 MISCHIEVOUS -> MACHIEVOUS +1 MINNIE -> MANY +1 MINNEAPOLIS -> MANY +1 MINISTER -> MEANESTER +1 MINIONETTE -> MEAN +1 MINE -> MIND +1 MINE -> MIKE +1 MIND -> MINE +1 MIMICK -> MIMIC +1 MILLY -> MERELY +1 MILLSTON -> MILLSTONE +1 MILICENT'S -> MILICENT +1 MILICENT -> MILLSON +1 MILICENT -> MELLICENT +1 MILE -> MILES +1 MILDEWED -> MELTED +1 MIHI -> ME +1 MIDRIFF -> MIDRIFTS +1 MIDIAN -> MILLION +1 MIDIAN -> MENDIAN +1 MIDDLING -> MIDDLIN +1 METHINKETH -> METHINK +1 MET -> MAKE +1 MESTIENNE -> THE +1 MESTIENNE -> MISTIENNE +1 MESTIENNE -> MISSION +1 MESTIENNE -> MESSIAN +1 MESTIENNE -> MESS +1 MESSES -> MASSES +1 MERRY -> MERRYMAKING +1 MERNEPTAH -> PATH +1 MERLONUS -> MERLINUS +1 MERLONUS -> MERLINA'S +1 MENTAL -> MANTLE +1 MEND -> MEAN +1 MEN'S -> MAN'S +1 MEN -> MAYN'T +1 MEN -> INTO +1 MEN -> CAME +1 MEDICAMENTS -> MEDICMENTS +1 MEDICAL -> MEDICA +1 MEDIAN -> MEDIUM +1 MEAT -> TO +1 MEAT -> ME +1 MEAT -> HAVE +1 ME -> YOU +1 MC -> MICROCLE +1 MC -> MAC +1 MC -> MA +1 MAY -> ME +1 MAY -> MARGAR +1 MAY -> MADE +1 MATTERS -> MATTER +1 MATTER -> MAZAR +1 MATI -> MET +1 MATI -> MAN +1 MATEY -> MATE +1 MATERIALS -> MATURES +1 MATE -> MADE +1 MASTER -> MASSR +1 MASKED -> MASSED +1 MARVELED -> MARVELLED +1 MARSPEAKER -> MARSH +1 MARSHAL -> MARTIAL +1 MARRIAGE -> MARES +1 MARMALADES -> MARMAL +1 MARJORIE -> MARGERY +1 MANKATO -> MANKE +1 MANKATO -> MANKADO +1 MANASSEH -> MANETTE +1 MAN -> MAIN +1 MAMIE -> MAY +1 MALNUTRITION -> MALTRICIAN +1 MAKES -> MATRON +1 MAKE -> MADE +1 MAKAN -> MACAN +1 MAJORITY -> MATURITY +1 MAINE -> MIN +1 MAIL -> MALE +1 MAID -> MATE +1 MADE -> READ +1 MAD -> MADGE +1 MABILLON -> MARVALAN +1 M -> NOT +1 M -> I +1 M -> EM +1 M -> AND +1 M -> AM +1 LYSIMACHUS -> LISSMACHUS +1 LUNA'S -> LUNDY'S +1 LUNA'S -> LUNAR'S +1 LUNA -> LINA +1 LUNA -> LENA +1 LUKE -> LOOK +1 LUGGAGE -> LEGGED +1 LOWER -> BLOW +1 LOW -> LOKIE +1 LOVE -> LAW +1 LOUD -> ALL +1 LOST -> ASKED +1 LOSSES -> LOSES +1 LORD'S -> LORDS +1 LORD -> LOT +1 LORD -> LARD +1 LOQUACITY -> LEQUESTITY +1 LOOKOUT -> LOOK +1 LOOKED -> LOOK +1 LOOKED -> LIFTED +1 LOOK -> WORK +1 LOOK -> LOOKED +1 LOOK -> LIVE +1 LONGER -> LONG +1 LONESOMENESS -> LONESOME +1 LONE -> LONG +1 LOCK -> LOOK +1 LOBSTER -> LOBSTERBOAT +1 LOBSTER -> LOBS +1 LL -> CHILLED +1 LIZABETH -> ELIZABETH +1 LIVES -> IS +1 LIVED -> IF +1 LIVE -> LEAVE +1 LISTEN -> LISTENED +1 LINE -> LYING +1 LINE -> LAND +1 LIMPED -> LIMP +1 LIME -> LINE +1 LILBURN -> LOWBOURNE +1 LILBURN -> LOWBORN +1 LIKELY -> LIKE +1 LIKE -> THE +1 LIKE -> NIGHTLY +1 LIKE -> LIKED +1 LIGHT -> LIGHTFOOTED +1 LIFE -> LIE +1 LIE -> LIKE +1 LIDDY -> LEAVY +1 LIAISON -> LEAR +1 LEXINGTON -> LESSINGTON +1 LEWIS -> LOUIS +1 LEVITICUS -> LUVIDICUS +1 LETTERS -> SLATTERS +1 LETTERS -> LITTLE +1 LETTERS -> LETTER +1 LETTERS -> LET +1 LET -> THEM +1 LET -> NEKHLUD +1 LET -> LEFT +1 LET -> LATER +1 LET -> LATE +1 LESSON -> MESS' +1 LESS -> US +1 LESS -> LEST +1 LESLIE -> THIS +1 LESLIE -> LIZZLY +1 LESLIE -> LIZZLING +1 LESLIE -> LISLEY +1 LESLIE -> LISALLY +1 LESLIE -> LINLESILY +1 LESLIE -> IT +1 LENOIR -> NOIR +1 LEND -> BLENDEST +1 LEMON -> LINENSHIPS +1 LEGS -> OR +1 LEFT -> LIVED +1 LEFT -> LILY +1 LEFT -> LIFTED +1 LEFT -> LET +1 LEFT -> LAST +1 LEER -> YARD +1 LEECHES -> LEECH +1 LEE'S -> LEE +1 LEAVE -> LE +1 LEARN -> LEARNED +1 LEAPT -> LEAPED +1 LEADPENCIL -> LEAD +1 LEADERSHIP -> LEISURESHIP +1 LEADERS -> LEADER'S +1 LAWS -> NOISE +1 LAW -> LAWN +1 LAW -> LAST +1 LAW -> AND +1 LAUGHED -> THEY'LL +1 LAUDERDALE -> LAURDALE +1 LAUDERDALE -> LAURAIL +1 LATH -> LAST +1 LATE -> LAID +1 LAST -> MASTER +1 LAST -> LASTIS +1 LASH -> LAST +1 LANDI -> LANDY +1 LAND -> THE +1 LAMBS -> LAMPS +1 LAKE -> LEGALLY +1 LAIN -> LANE +1 LAID -> LATE +1 LADY -> LADIES +1 LADS -> LAD +1 LAD -> ABOUT +1 LACHRYMA -> LACK +1 LABOURERS -> LABORERS +1 LABOUR -> LABOR +1 LABORING -> LABOURING +1 L -> OLD +1 L -> ALE +1 KNOW -> NOT +1 KNOTTY -> NAUGHTY +1 KNOT -> NOT +1 KNOBBLY -> NOBLY +1 KNIGHT'S -> NIGHT'S +1 KNIGHT -> LAWN +1 KLEPTOMANIAC -> CLAPTOMANIA +1 KLEPTOMANIA -> CLEFTOMANIA +1 KITTY -> KATY +1 KITE -> KIND +1 KITE -> DESERVED +1 KING -> MACKING +1 KING -> GIMERNETTE +1 KINDER -> KIND +1 KILLS -> KILL +1 KICK -> KICKIE +1 KEYS -> CASE +1 KEYS -> ACCUSE +1 KETTLE -> CATTLE +1 KERSTALL -> COASTON +1 KENITES -> KENNITES +1 KEEN -> KIN +1 KEDEM -> KIDDAM +1 KAMAR -> COME +1 JUSTIFIED -> IT'S +1 JUST -> REVELATION +1 JUST -> JETS +1 JUST -> JESTS +1 JUST -> JEST +1 JUST -> IS +1 JURY -> JERRY +1 JURY -> CHERRY +1 JURISDICTION -> JURIS +1 JUNIOR -> GENIOR +1 JUMPS -> JUMPED +1 JUG -> CHUG +1 JUDGMENT -> YOU +1 JUDGMENT -> JOINTMENT +1 JUDGE -> JOSE +1 JUDGE -> JOE +1 JUDAH -> JULIA +1 JOUVIN'S -> JUBAND'S +1 JOSHUA -> JOHNSHAW +1 JOKINGLY -> CHOKINGLY +1 JOINED -> JARNDYCE +1 JOCELYN'S -> JOSTLING +1 JIS -> GIT +1 JESTER -> GESTURE +1 JEHU -> JEHOV +1 JEHOASH -> JO +1 JEERED -> JERED +1 JEDGE -> JUDGE +1 JANSENIST -> GENTLEST +1 JANEERO -> GENERO +1 JANE -> CHANT +1 JAKEY'S -> JAKIE +1 JAKEY -> J +1 JACKMAN -> JACK +1 JACKAL -> WILL +1 JACKAL -> JACKO +1 JACKAL -> JACK +1 IZZY'S -> IZZIE'S +1 IZZY -> AS +1 IVANOVITCH -> GIVANOVITCH +1 ITS -> TO +1 ITS -> IT'S +1 ITS -> HAD +1 IT'S -> HIS +1 IT'S -> AND +1 IT -> YET +1 IT -> UP +1 IT -> TRUE +1 IT -> TO +1 IT -> STEPS +1 IT -> RODE +1 IT -> OR +1 IT -> MIN +1 IT -> ITS +1 IT -> IT'S +1 IT -> IT'LL +1 IT -> EVER +1 IT -> EACH +1 IT -> DEAR +1 IT -> ARMY +1 IT -> AND +1 ISRAELITES -> ISRAIT +1 ISLAMISED -> ISLAMMISED +1 ISLAM -> GAVE +1 ISAAC -> MISERC +1 IS -> TREBRANT +1 IS -> SAKE +1 IS -> LANY +1 IS -> ISN'T +1 IS -> IF +1 IS -> HERE'S +1 IS -> HAS +1 IS -> FOR +1 IS -> ENDOWED +1 IS -> DOES +1 IRONICAL -> IRONIC +1 IOWA -> HOUR +1 INVALIDES -> INVALIDE +1 INVADE -> IN +1 INTO -> TO +1 INTO -> AND +1 INTEREST -> INTERESTS +1 INTEND -> INSENT +1 INTELLECTUALLY -> INTELLECTUAL +1 INSTRUCTED -> INSTRUCTIVE +1 INSTANTLY -> THING +1 INSISTENCE -> DOZE +1 INSCRIPTIONS -> SCRIPS +1 INQUIRE -> ACQUIRE +1 INNES -> EAMES +1 INJURE -> ENDURE +1 INGENIOUSLY -> INGENUOUSLY +1 INFAMOUS -> IN +1 INDEED -> INDE +1 INCOMPARABLE -> INN +1 IN'T -> INTO +1 IN -> WHEN +1 IN -> WHEEL +1 IN -> THEM +1 IN -> THEIR +1 IN -> ON +1 IN -> OF +1 IN -> NEAT +1 IN -> JEST +1 IN -> INTO +1 IN -> INTERPLIES +1 IN -> INSIDE +1 IN -> INN +1 IN -> HEARD +1 IN -> FROM +1 IN -> FREE +1 IN -> BENEATH +1 IN -> AWHILE +1 IN -> AT +1 IN -> A +1 IMPROVISED -> PROVISED +1 IMPROVE -> PROVE +1 IMPLY -> SE +1 IM -> QUEST +1 IF -> THAT +1 IF -> IT +1 IDEA -> RIVER +1 ICES -> ISIS +1 ICES -> IISES +1 ICE -> EYES +1 I'M -> UNHUNGRY +1 I'LL -> ELSE +1 I'LL -> ALL +1 I'FAITH -> I +1 I'D -> I'VE +1 I'D -> I'LL +1 I -> TO +1 I -> THY +1 I -> THAT +1 I -> SUIT +1 I -> OUGHT +1 I -> IT +1 I -> IOPIUS +1 I -> HER +1 I -> EYES +1 I -> EVER +1 I -> EITHERN +1 I -> ECHOLYN +1 I -> AMID +1 I -> AH +1 HURT -> HIDE +1 HUNTINGDON -> HONDYNON +1 HUNTERS -> HANDLES +1 HUMOURS -> HUMANS +1 HUMOR -> HUMOUR +1 HUH -> HOW +1 HUDSPETH -> HUSBATH +1 HOWEVER -> SAMURED +1 HOWEVER -> HOURSERVES +1 HOWEVER -> HERBERT +1 HOW -> OH +1 HOUSEHOLD -> HOUSE +1 HOUR -> I +1 HOUNDED -> HANDY +1 HOSPITABLY -> HALF +1 HORDE -> HOARD +1 HOPPING -> HAVING +1 HOPE -> OPEN +1 HOO'LL -> HE'LL +1 HOO -> HE +1 HONOURS -> HONORS +1 HONOUR -> HONOR +1 HONORS -> HONOURS +1 HONOR -> HUNGER +1 HONESTLY -> ON +1 HONEST -> I +1 HOMEPUSH -> HOME +1 HOME -> WHOLE +1 HOME -> HE +1 HOLY -> WHOLLY +1 HOLLER -> HOLLERED +1 HOLD -> OR +1 HOLD -> OH +1 HOLD -> HER +1 HOF -> WHOLE +1 HITHER -> THITHER +1 HIS -> ITS +1 HIS -> INTO +1 HIS -> IN +1 HIS -> HE +1 HIS -> DISCOURSE +1 HIS -> AS +1 HIS -> AN +1 HIS -> A +1 HINDFELL -> HINFIELD +1 HIMSELF -> HIS +1 HIM -> TO +1 HIM -> EM +1 HIJAZ -> HI +1 HIGHS -> HIES +1 HIGH -> I +1 HIERARCHY -> HIRAKEE +1 HI -> AY +1 HEYDAY -> HEY +1 HEWN -> YOU +1 HERMON'S -> HARMON'S +1 HERMON -> HERE +1 HERIOT'S -> HERETT'S +1 HERIOT -> HEARET +1 HERETOFORE -> HERE +1 HERE -> YOU +1 HERDSMEN -> HERDSMAN +1 HER -> TO +1 HER -> ON +1 HER -> ITALIANS +1 HER -> IT +1 HER -> HYCOMICAL +1 HER -> HERSELF +1 HER -> HE +1 HER -> AROUND +1 HEPTARCHIES -> HEPTARK +1 HEN -> INCOUPS +1 HELVIN -> HELVAN +1 HELPED -> HELPS +1 HELM -> HAIL +1 HELEN -> ALAN +1 HEELED -> HEALED +1 HEEDED -> HE +1 HEDGES -> HATCHES +1 HEBREW -> SEA +1 HEARTS -> HEART +1 HEARSE -> HOUSEHOLTS +1 HEARD -> IN +1 HEADQUARTERS -> HEADQUARTER +1 HEADLONG -> HAD +1 HEADLIGHTS -> HEAD +1 HEAD -> HIDDEN +1 HE'S -> THIS +1 HE'S -> IS +1 HE -> YES +1 HE -> THEY +1 HE -> THE +1 HE -> SHE +1 HE -> SEE +1 HE -> IRRESPONSIBLE +1 HE -> IF +1 HE -> AWAY +1 HE -> AND +1 HAYES -> HASAN'S +1 HAW -> HAWHAT +1 HAVEN -> HAIRY +1 HAVE -> PROVED +1 HAVE -> IF +1 HAVE -> HAV +1 HAVE -> HALF +1 HAVE -> HAIR +1 HAVE -> HAD +1 HAVE -> EH +1 HAVE -> BIT +1 HATTERSLEY -> HALTERSLEY +1 HASAN -> HER +1 HAS -> THUS +1 HAS -> JUST +1 HAS -> HESTERITY +1 HAS -> HESDAY +1 HAS -> HAVE +1 HAS -> AS +1 HARVEY'SWHICH -> HARVEST +1 HARRIS -> HARRIS'S +1 HAROLD -> HERALD +1 HARKNESS -> HARKINS +1 HARE -> HAIR +1 HARDWARE -> HARD +1 HARDLY -> ARE +1 HARD -> OUR +1 HARD -> HEART +1 HAPLY -> HAPPILY +1 HAND -> HANDS +1 HALT -> HELP +1 HALL -> WHOLE +1 HALL -> HOLLAND +1 HALF -> HAPPENED +1 HALEY'S -> HALELY +1 HADDA -> HAD +1 HAD -> THAT +1 HAD -> IN +1 HAD -> GOT +1 HAD -> ENDING +1 HAD -> DO +1 HAD -> ARE +1 GYLINGDEN -> GILINGDEN +1 GURR -> GURSER +1 GURR -> GRIGGLY +1 GURR -> GREW +1 GURR -> GORE +1 GURR -> GIRT +1 GURR -> GIRL +1 GURR -> GERT +1 GURR -> GERFATHER +1 GURR -> GER +1 GUNS -> GUN +1 GULLET -> GALLANT +1 GUISE -> GUY'S +1 GUIRUN'S -> GUNDERING +1 GUINEA -> GUINEAS +1 GUILD -> GOLD +1 GUEST -> GUESTS +1 GUESS -> GES +1 GUDRUN -> GUNDRAIN +1 GRUMBLINGLY -> TREMBLINGLY +1 GROVE -> GROW +1 GROTTO -> DRATTO +1 GROAN -> GROUND +1 GRIMSBY -> GRIM'S +1 GRIBIER -> CLAVIER +1 GREY -> GRAY +1 GREENTON -> GREENSON +1 GREENBACKS -> GREEN +1 GREAVES -> GREEBS +1 GREAT -> GREEN +1 GRAY -> GREY +1 GRAY -> GLAY +1 GRAVE -> BRAVE +1 GRATITUDE -> CREDITUDE +1 GRASPS -> GRASPED +1 GRAPPLE -> GRANTEL +1 GRANDAME -> GRAND +1 GRAMMATEUS -> GRAMMATIUS +1 GRAM -> GRAHAM +1 GRAEME -> GRAHAME +1 GRAEME -> GRAHAM +1 GRACIOUS -> GRECIOUS +1 GOWN -> GUN +1 GOVERNMENTS -> GOVERNMENT +1 GOVERNMENT -> GOVERNOR +1 GOV'NOR -> GUV'NER +1 GOV'NOR -> GOVERNOR +1 GOT -> GOD +1 GOT -> CUT +1 GOT -> COURT +1 GORDON -> GORDON'S +1 GORDON -> GORD +1 GOODS -> GOOD +1 GOOD -> SPEAR'S +1 GONE -> DISCOUR +1 GOLDFISH -> GOLD +1 GOLDEN -> GOLD +1 GOING -> YOU +1 GOING -> GO +1 GOES -> WAS +1 GODEBILLIOS -> GO +1 GOD -> SCOTT +1 GOD -> GOT +1 GOBEY'S -> GOBYS +1 GOBEY'S -> GOBY'S +1 GOBEY'S -> GOBIES +1 GOAL -> GOLD +1 GLISPIN -> CLISPIN +1 GLAD -> GRINDING +1 GIVEN -> GIVING +1 GIVE -> GIVIN +1 GIVE -> GAVE +1 GIT -> GET +1 GIRTHING -> GIRDING +1 GIRTHED -> GIRDED +1 GIRL -> GO +1 GIRDS -> GUARDS +1 GET -> IT +1 GET -> GIT +1 GET -> GERT +1 GEORGE'SWHICH -> GEORGE'S +1 GEORGE'S -> GEORGE +1 GENTLEMEN'S -> GENTLEMAN'S +1 GAUTHIER -> GATHIER +1 GAULS -> GULFS +1 GARDEN'S -> GARDENS +1 GAMMON -> GAMIN +1 GALLATIN -> GALLOP +1 G'YIRLS -> IS +1 FUZZ -> FUZ +1 FURZE -> FIRS +1 FULL -> POOL +1 FULL -> FOR +1 FULL -> FOOT +1 FROZE -> ROSE +1 FRONTIERS -> FRONTIER +1 FROG'S -> FROGS +1 FRO -> FROM +1 FRISTOE'S -> FIRST +1 FRISTOE -> FRISTOW +1 FRISTOE -> FOR +1 FRIGHTFUL -> DREADFUL +1 FRIGHTENS -> BRIGHTENS +1 FRIEND -> FRANJAMIN +1 FRIAR -> FRIED +1 FRET -> FRITTEN +1 FRENCH -> FRENCHARD +1 FREEWAY -> FREE +1 FREES -> FREEZE +1 FREEDOM -> READ +1 FRANZ -> FRIENDS +1 FRANCOIS -> FROSOIS +1 FRANC -> FRANK +1 FOURTEENTHAT'S -> FOURTEEN +1 FOUR -> FUPS +1 FOUR -> FULL +1 FOUR -> FOREMOTHER +1 FOUR -> FOLIEVED +1 FOUNDED -> FOUND +1 FOUGHT -> THOUGHT +1 FORTY -> FORTE +1 FORTS -> FAULTS +1 FORMER -> FORM +1 FOREMAN -> FOUR +1 FOREGATHERED -> FOR +1 FORCED -> FORCE +1 FOR -> WERE +1 FOR -> TILL +1 FOR -> OR +1 FOR -> OF +1 FOR -> IN +1 FOR -> FROM +1 FOR -> FOUR +1 FOR -> FAULT +1 FOR -> FAR +1 FOR -> ABOVE +1 FOOLS -> FOOD'S +1 FOOL -> FULL +1 FOOD -> FOOT +1 FONTEVRAULT -> FONTREVAL +1 FOLLOWS -> FOLLOWED +1 FOLLOWED -> FOLLOW +1 FOLLOWED -> ALL +1 FOLLOW -> FOLLOWING +1 FOE -> FOLK +1 FOALS -> FOOLS +1 FOAL -> FOOL +1 FLY -> FLIES +1 FLUTTERING -> REFLECTING +1 FLOWERBEDS -> FLOWER +1 FLOW -> FLOOR +1 FLOSSY -> FLOSSIE +1 FLOORBOARDS -> FOREBOARDS +1 FLEROV'S -> FLORO'S +1 FLEROV'S -> FLORA'S +1 FLEROV -> FLAREFF +1 FLEROV -> FLARE +1 FLEERED -> FLARED +1 FLAVOR -> FLAVOUR +1 FLAVOR -> FLAVORITE +1 FLATTERER -> SLACKER +1 FLATTERED -> FURTHER +1 FLATHEADS -> FLAT +1 FLABBERGASTED -> FLABRA +1 FISHING -> FISHIN +1 FISHIN -> FISHING +1 FISHED -> FINISHED +1 FIRSTER -> FIRSTTER +1 FINICAL -> PHYNICAL +1 FINELY -> FINALLY +1 FINDING -> FIND +1 FIND -> FAMILY +1 FILTRATES -> FUR +1 FILTRATE -> FEDERATE +1 FIGURE'S -> FIGURES +1 FIGGER -> FAGONNE +1 FIFTEENTH -> FIFTEEN +1 FIELD -> FIELDS +1 FIACRE -> FIACCHUS +1 FELT -> FELL +1 FEELS -> FILLS +1 FEEL -> SEE +1 FEEDS -> FEATS +1 FEED -> FEAT +1 FAVOURITE -> FAVORITE +1 FAVOUR -> FAVOR +1 FAVORITE -> FAVOURITE +1 FAVORABLE -> FAVOURABLE +1 FAUVENT -> VUENT +1 FAUVENT -> PROVENCE +1 FAUVENT -> FUVENT +1 FAUVENT -> FOR +1 FAUVENT -> FERVENT +1 FAUVENT -> FAVAN +1 FAUCES -> FOSSES +1 FATS -> FAT'S +1 FATHER'S -> FATHERS +1 FATHER'S -> FATHER +1 FATHER -> FUND +1 FATHER -> FOUNDER +1 FATHER -> FOR +1 FATHER -> EITHER +1 FATE -> PHAETON +1 FAT -> BAT +1 FAST -> FAT +1 FARRINDER -> THORNDER +1 FARRINDER -> FARRENDER +1 FARRINDER -> FARINGDER +1 FARRINDER -> BARRING +1 FARE -> HE +1 FANNY -> THEN +1 FANNY -> ANY +1 FANGED -> FACT +1 FAN -> PAMPAINTER +1 FAM'LY -> FAMILY +1 FALLING -> FOLLOWING +1 FALL -> FOR +1 FAITH -> FIT +1 FAIR -> HER +1 FAILING -> FEELING +1 FAILED -> FAITH +1 FAFNIR'S -> FASTENER'S +1 FAFNIR'S -> FAFNER'S +1 FAFNIR -> STAFF +1 FACE -> FAITH +1 FACE -> FACED +1 EYES -> EYE +1 EXTRACT -> EXTRACTED +1 EXTRA -> SIR +1 EXTENUATING -> EXTINUATING +1 EXPOSE -> EXPOSED +1 EXPLOITING -> EXPLODING +1 EXPERIENCE -> SIGNING +1 EXPEL -> EXPELLED +1 EXPECTED -> INSPECTOR +1 EXPANSE -> EXPOUNDS +1 EXIST -> EXISTS +1 EXECUTIVE -> EXECUTING +1 EXCLAIMED -> EXPLAINED +1 EXCITING -> THE +1 EXAMINING -> EXAMINED +1 EXACKLY -> EXACTLY +1 EVIL -> EVEN +1 EVERYONE -> EVERY +1 EVER -> EVERGREWING +1 EVAPORATE -> THE +1 EV'YBODY'S -> EVERYBODY'S +1 EUSEBIUS -> EUSIDIUS +1 EUSEBIUS -> EUSIBIUS +1 EUPHRATES -> EUPHATEES +1 EUPHRANOR -> EUPHRANER +1 ETHEREAL -> ASSYRIAL +1 ETHER -> THEM +1 ETHELRIED'S -> EPILRIED'S +1 ETHELRIED -> ETHELRED +1 ETHELRIED -> EPLORRIED +1 ET -> AT +1 ESTHER -> ASSER +1 ESPECIALLY -> SPENT +1 ESPECIALLY -> HAS +1 ERNESTINE -> ERNESTON +1 ERE'S -> YES +1 ERE -> IF +1 EQUERRY'S -> EQUERRIES +1 EPIGASTER -> EBERGASTER +1 EPHRAIM -> FROM +1 ENTRUSTED -> AND +1 ENTR'ACTE -> ENTRACT +1 ENTERED -> ANSWERED +1 ENSUED -> ENSUIT +1 ENSNARES -> AND +1 ENSLAVED -> ENSLAVE +1 ENRAGED -> ENRAGE +1 ENOUGH -> UP +1 ENJOY -> ENJOYED +1 ENFRANCHISEMENT -> ENCOMCHISEMENT +1 ENFORCEMENT -> FORCEMENT +1 ENDURETH -> AND +1 ENDURE -> INDURE +1 EMOTION -> THE +1 EMETIC -> AMATIC +1 EMBRUN -> EMBRON +1 EM -> HIM +1 ELYSIAN -> THE +1 ELLIS -> ILLIS +1 ELISIONS -> ELYGIANS +1 ELEXANDER -> IT +1 ELEXANDER -> ALEXANDER +1 ELDER -> OTHER +1 ELBOW -> BOWS +1 ELBERT -> ELBER +1 ELASTIC -> MOLASTIC +1 ELASTIC -> ELECTIC +1 EIGHTH -> EIGHTHS +1 EGGS -> KNIFE'S +1 EDGING -> EDGED +1 EAU -> OVERCLONE +1 EAST -> EACH +1 EARTH -> ART +1 EARLIEST -> ARIAD +1 EAR -> IRRES +1 EAD -> HEAD +1 E'LL -> YOU +1 E'ER -> ERE +1 E -> DOG +1 DUSK -> THUS +1 DUPLICATES -> DEPLICATES +1 DUNNING -> DARNING +1 DUMAS -> YOU +1 DUM -> DOOM +1 DULL -> DOLL +1 DUKE -> DUPE +1 DUKE -> DO +1 DUDS -> DERDS +1 DRUGSTORE -> DRUG +1 DRINK -> DRINKIN +1 DRINK -> DRANK +1 DRINK -> BRING +1 DRAWERS -> RAOUL +1 DRAW -> DRAWN +1 DRAUGHT -> DROP +1 DOWNING -> DAWNING +1 DOWN -> ROUND +1 DOWN -> ON +1 DOUBT -> OUT +1 DOTH -> DIRTS +1 DOST -> THOSE +1 DOSE -> DAYS +1 DONOVAN -> DONALIN +1 DON'T -> WANTS +1 DON'T -> DO +1 DOM -> DON +1 DOGS -> DOG +1 DOG -> DORCART +1 DOG -> DARK +1 DOES -> DOESN'T +1 DOCTOR -> DOCTRIPAR +1 DOCK -> DOCKYARD +1 DOAN -> DON'T +1 DO -> TOO +1 DO -> DON'T +1 DO -> DIEU +1 DO -> DID +1 DIVIDED -> DIVIDE +1 DITCHFIELD -> DITZFIELD +1 DISTRUSTED -> DISTRUDGED +1 DISTRESS -> DISTRESSED +1 DISTICHS -> DISTINCTS +1 DISSENTIENT -> DISINDIAN +1 DISPOSED -> DISPOS +1 DISNEY -> DIDNY +1 DISINFECTING -> DISINFECT +1 DISFIGURED -> THIS +1 DISASTROUS -> DISASTERATE +1 DISAGREE -> DISAGREED +1 DIRE -> DIA +1 DINKS -> THINK +1 DINARS -> HOURS +1 DILUTE -> DELUDE +1 DIGGING -> TIGGING +1 DIE -> GUY +1 DIDN'T -> THEN +1 DID -> THE +1 DID -> DOES +1 DICKIE -> THE +1 DICKIE -> DICKET +1 DEVOUR -> THE +1 DETERMINED -> TO +1 DETECTIVES -> DETECTIVE +1 DETECTIN -> DETECTIVE +1 DESTINIES -> DEBTS +1 DESSERTS -> DESERTS +1 DESPOTIC -> THAT +1 DESPITE -> THIS +1 DESK -> VES +1 DESK -> DESKED +1 DESIRES -> DESIRE +1 DESIGN -> A +1 DERELICTS -> DEAR +1 DEPRECATE -> THE +1 DEODORIZING -> NO +1 DEMETER -> DEMEANOR +1 DELMONICO -> DOMONICO +1 DELIBERATE -> DELIVERED +1 DEFEND -> DEFENDED +1 DEFECTION -> AFFECTION +1 DEEPENED -> DEEP +1 DECLARING -> DECLINING +1 DEBTOR -> DEPTOR +1 DEBARRED -> DEBARED +1 DEATHLY -> DEFTLY +1 DEATHLIKE -> DEATH +1 DEARLY -> STILL +1 DEARLY -> DAILY +1 DEANS -> DEAN +1 DEAD -> DAY +1 DEAD -> BEDROOM +1 DEACH -> DID +1 DE -> TO +1 DAYS -> STAYS +1 DATED -> DID +1 DAT -> DAT'S +1 DARKAND -> DARK +1 DARE -> DARED +1 DAPHNE'S -> THESE +1 DAPHNE -> JAPHANE +1 DAPHNE -> JAPANE +1 DANDAN -> TAN +1 DANCER -> DANCERS +1 DAMN -> DEAR +1 DAME'S -> JAMES'S +1 DALYS -> DAILIES +1 DAGOS -> DAG +1 DA -> THOU +1 DA -> DECORTUNA +1 D -> THEY +1 D -> THEN +1 CYNICISM -> SYNICISM +1 CYMBALS -> SYMBOLS +1 CUT -> GOT +1 CUT -> GO +1 CUSTOM -> COTTOM +1 CURSORILY -> CURSORY +1 CURRENTS -> CURRANTS +1 CURL -> GIRL +1 CUP -> CUPIED +1 CUISINE -> COUISINE +1 CRUX -> CREW +1 CRUSHING -> CRASHING +1 CRUMPLED -> CRUMBLED +1 CRUMBLY -> CRAMBLY +1 CROOKS -> COOKS +1 CRIES -> CHRIST +1 CREEL -> CREOLE +1 CRAWFISH -> CROPPISH +1 CRAWFISH -> CROPFISH +1 CRAWFISH -> CRAW +1 CRATES -> CREEDS +1 CRAB -> CRABS +1 COYNESS -> KINDNESS +1 COY -> KOY +1 COXCOMB -> ACCOUNT +1 COWLEY'S -> CARLIS +1 COURT -> COURTYARD +1 COURT -> CORP +1 COURSING -> COARSING +1 COURSE -> COARSE +1 COURFEYRAC -> COURFEREK +1 COUNT -> COMPASSER +1 COUNSELS -> COUNSEL +1 COUNSEL -> CONSUL +1 COUNCILLOR -> COUNSELLOR +1 COULD -> GOOD +1 COULD -> COULDN'T +1 COULD -> CANNOT +1 COST -> COSTUM +1 CORYDON -> CROYDEN +1 CORNER -> CORNERED +1 CORAL -> COAL +1 COQUETTE -> POCKET +1 COPS -> COPSE +1 COPS -> CAPS +1 COP'S -> COPSE +1 COP -> COPP +1 COOL -> U +1 CONTROL -> CONTROLLED +1 CONTINUAL -> CONTINUOUS +1 CONTEND -> CONTENDED +1 CONTEND -> COMPEND +1 CONSTITUENT -> CONSTITUTE +1 CONSTANT -> CAN'T +1 CONSONANTS -> COUNTENANCE +1 CONSOMME -> CONSUM +1 CONINGSBURGH -> CONNINGSBURG +1 CONFIRMATION -> CONFIRMATESON +1 CONFIDENTIALLY -> TO +1 CONFIDE -> CONFINE +1 CONFICERE -> CONFERS +1 CONFECTIONARY -> CONFECTIONERIES +1 CONCOCTED -> CONCLUDED +1 COMPATRIOT -> CAMPATRIOT +1 COMORIN -> CORMOR +1 COMMUNITY -> CUNITY +1 COMMONWEALTH -> CORNWEALTH +1 COMMONWEALTH -> CONWEALTH +1 COMMITTEE -> COMEDY +1 COMMISSARY -> COMMISSORY +1 COMMENT -> COMMENTS +1 COMMENCED -> COMMANDS +1 COMING -> COMIN +1 COMING -> CARMINALS +1 COMETH -> COME +1 COMEST -> COMES +1 COMES -> COME +1 COME -> COMMONED +1 COME -> COMMANDER +1 COME -> CALM +1 COLONEL -> CAROL +1 COLLECT -> COLLECTED +1 COLE -> CO +1 COLDS -> GOLDS +1 COLD -> CALLED +1 COINS -> COIN +1 COD -> COT +1 COCOA -> COOKER +1 COCKRELL -> COCKLE +1 COBBER -> COPPER +1 COALESCED -> COLLETS +1 CLUMB -> CLIMB +1 CLOSET -> CLOTH +1 CLOSEST -> CITIZENS +1 CLOMB -> CLIMBED +1 CLOCKS -> CLUXED +1 CLING -> CLINKED +1 CLIME -> CLIMB +1 CLEVERLY -> LEVILY +1 CLEAVE -> CLIFF +1 CLAWS -> CLOTHS +1 CLASSES -> CLASS +1 CLARET -> CLARGA +1 CLAIRVAUX -> CLERVAL +1 CISEAUX -> ISAU +1 CINDERLAD -> SOONER +1 CINDERLAD -> SIR +1 CINDERLAD -> SAINTO +1 CINDERLAD -> SAID +1 CINDERELLA -> CINORLA +1 CHUCKED -> SAID +1 CHRISTIANS -> CHRISTIAN +1 CHRISTIANITY -> DONEGOOD +1 CHRISTI -> REMAR +1 CHRISTENING -> CHRISTIANNING +1 CHRIS -> THIS +1 CHRIS -> MISTER +1 CHRIS -> GRIS +1 CHRIS -> CHRISTEN +1 CHRIS -> CHRIST +1 CHRIS -> BRUCE +1 CHOUETTE -> SWEAT +1 CHONODEMAIRE -> CHATEAU +1 CHLORATE -> LOW +1 CHLORATE -> CHLORIDE +1 CHINTZ -> CHIN'S +1 CHILLS -> CHILL +1 CHIEF -> CHEAP +1 CHIDE -> CHID +1 CHEEKE -> CHEEK +1 CHEEKBONES -> SHEEP +1 CHEEK -> CHEEKS +1 CHARMED -> SHOWN +1 CHARMED -> CHUMMED +1 CHARLEY'S -> CHARLIE'S +1 CHARGED -> CHARGE +1 CHARGE -> SHARS +1 CHARACTERISTIC -> CARE +1 CHANGE -> CHANCE +1 CHANCES -> CHURCHES +1 CHALONS -> CALON +1 CHAIN -> CHANGE +1 CHAFING -> CHIEFING +1 CHADWELL -> CHED +1 CERTAIN -> CERTAINLY +1 CERTAIN -> AN +1 CENTER -> CENTRE +1 CEDRIC -> SEDRIC +1 CAVALRYMEN -> CAVERNMENT +1 CAUSE -> COURSE +1 CATTLE -> CATTLETTA +1 CATHOLIC -> CATTLE +1 CATHEDRAL -> KITRAL +1 CATCHED -> CAST +1 CASTLE -> COUNCIL +1 CASTETH -> CAST +1 CASE -> HAS +1 CASE -> CAVES +1 CARROLL -> KAL +1 CARROLL -> CAROL +1 CARRIED -> CHARACTERED +1 CARRIAGE -> PARISH +1 CAPRIVI'S -> CAPRIVY +1 CAPITULUM -> CAPITULAM +1 CAPITULANTES -> CAPITULAT +1 CAPITALISTS -> CAPITALIST +1 CAPITAL -> CAPT +1 CAP -> CAPLICH +1 CANS -> AKANS +1 CAN'T -> COULD +1 CAN -> CAN'T +1 CAMPAIGN -> CAPTAIN +1 CAME -> GAINED +1 CALLETH -> CAUGHT +1 CALLEST -> COLLARST +1 CALLED -> CALL +1 CAIN -> GAME +1 CAGE -> CARED +1 CACKED -> CAGLED +1 CABINET -> CABINETS +1 CA'M -> CALM +1 C -> SEA +1 BYE -> BIOLECTION +1 BY -> THE +1 BY -> BUY +1 BY -> BUT +1 BY -> BILL +1 BUZZARD -> BOZARD +1 BUTTON -> BOTTOM +1 BUTTERFLY -> BUT +1 BUT -> WITHOUT +1 BUT -> WITH +1 BUT -> WHICH +1 BUT -> SHEPHERD +1 BUT -> OR +1 BUT -> MIGHT +1 BUT -> IT +1 BUT -> BY +1 BUT -> BUTCHUSE +1 BUT -> BE +1 BUSINESSWHICH -> BUSINESS +1 BURYING -> BERING +1 BURST -> FORCE +1 BURSHEBA -> PERCEIVER +1 BURNETH -> BERNETH +1 BURDENS -> A +1 BULK -> BAG +1 BUILDS -> BIDS +1 BUFFETING -> BUFFET +1 BRYNHILD'S -> BRUNHOLD'S +1 BRYNHILD -> BURNHILD +1 BRUCE -> BRUSH +1 BROUGHT -> POURED +1 BROTHER -> BROTHERS +1 BRILLIANT -> BUOYANT +1 BRIEF -> ROOF +1 BREED -> BREATHE +1 BREATHLESS -> BREATH +1 BREAST -> CHEST +1 BRAVE -> PRETTY +1 BRAU -> BROW +1 BRASS -> BREASTPAND +1 BRAHMAN -> PROMIN +1 BRAHMAN -> GRAMMEN +1 BRAHMAN -> GRAMMAR +1 BRAHMAN -> BRAMMER +1 BRAHMAN -> BRAMIN +1 BRAHMAN -> BRAMID +1 BRAHMAN -> BRAMIAN +1 BRAHMAN -> BRAMEN +1 BRACY -> BRAZY +1 BRACY -> BRACEY +1 BRACY -> BRACES +1 BRACY -> BRACELET +1 BRACY -> BRACEE +1 BOUT -> ABOUT +1 BOURGES -> BOURGE +1 BOTTOMED -> BOTTOM +1 BOTTLED -> BOTHERED +1 BOONE -> BOOM +1 BOON -> BOOM +1 BOMB -> BALM +1 BOLT -> BOLTED +1 BOLSHEVIKI -> PULCHEVIKI +1 BOIL -> BY +1 BOIL -> BOY +1 BOEOTIAN -> BE +1 BOB'S -> BOB +1 BOB -> BOBBED +1 BLOW -> BLUE +1 BLOOMIN -> ROOM +1 BLOODSHED -> BLOTCHYARD +1 BLOOD -> BLOODSTAINED +1 BLOKES -> LOSS +1 BLOKE -> LOCTICE +1 BLODGETT -> OBLIGE +1 BLODGETT -> BLODGET +1 BLODGETT -> ALEXANDER +1 BLOCK -> PLOT +1 BLESSED -> BLEST +1 BLANKETED -> BLANKET +1 BLACKLEG -> BLACK +1 BLACKBURN -> BLACKBIRD +1 BIT -> FIT +1 BIT -> BITTER +1 BISQUE -> THIS +1 BIRDSEYE -> BIRD'S +1 BIN -> BEEN +1 BILL -> BUILD +1 BIBLICAL -> PABRICAL +1 BIBLICAL -> BIBOCO +1 BHANG -> BANG +1 BEULAH -> BOOLA +1 BETTER -> BY +1 BETIDE -> BE +1 BETHUNE -> BESOON +1 BETCHA -> BETTER +1 BETAKEN -> TAKEN +1 BET -> BADE +1 BEST -> FAST +1 BESSY -> BUSY +1 BESSY -> BESSIE +1 BESIDE -> BESIDES +1 BENSON -> BUILTON +1 BENOIT -> BENOIS +1 BENNETT -> INVITED +1 BELONGED -> BELONGS +1 BELLY -> VALLEY +1 BELLOWED -> BELOVED +1 BELLE -> BELL +1 BELL -> BELT +1 BELIKE -> BE +1 BEING -> BE +1 BEGUN -> BEGAN +1 BEGGING -> PEGGING +1 BEGAN -> BEGIN +1 BEG -> BIG +1 BEFORE -> FOR +1 BEFORE -> AND +1 BEFAL -> BEFALL +1 BEEN -> THEN +1 BEEN -> OF +1 BEEN -> MONSHAT +1 BEEN -> COMPLYING +1 BECAME -> MICHANG +1 BEAUMANOIR -> BURMANOIS +1 BEARING -> AND +1 BEALE'S -> BEAT'S +1 BEALE -> BEER +1 BEALE -> BEECH +1 BEALE -> BEE +1 BEALE -> BEARD +1 BE -> BETWEEN +1 BAXTER -> BAXT +1 BATON -> BUT +1 BASSORAH -> PUSSARA +1 BASSORAH -> BASSORA +1 BASIL -> BEESER +1 BASIL -> BAZA +1 BASIL -> BASE +1 BASIL -> BALES +1 BARKLEY -> PARKLEY +1 BARIUM -> BURIUM +1 BARGELLO -> BARGENO +1 BARELY -> VARIOUS +1 BAPTISMAL -> THE +1 BANYAN -> BEN +1 BANYAN -> BANION +1 BANYAN -> BANDON +1 BANYAN -> BAN +1 BANISH -> BANACY +1 BANG -> BENNETT'S +1 BANDINELLO -> BEND +1 BAND -> BEND +1 BALLROOM -> BALL +1 BALLOT -> BALLOTT +1 BALLOCK -> BALLAK +1 BALAMMED -> BLAMMED +1 BAILEY -> BAILIQUE +1 BAGHDAD -> BAGDAD +1 BAGHDAD -> ADAD +1 BAG -> PEG +1 BADAWI -> BADARI +1 BAD -> THAT +1 BAD -> BAN +1 BACKING -> BACK +1 BABES -> BABE +1 AWK'ARD -> UPWARD +1 AW -> AH +1 AVIDITY -> ALDITY +1 AVENUE -> AVIGUE +1 AVE -> HAVE +1 AUNT -> AUNTS +1 AUNT -> AREN'T +1 ATUM -> OUTS +1 ATUM -> ATOM +1 ATTENTION -> ATTENTIONS +1 ATTEMPTED -> ATTENDED +1 ATHELSTANE -> ADDLESTEIN +1 AT -> SAYS +1 AT -> SAID +1 AT -> OUT +1 AT -> IT'S +1 AT -> IN +1 AT -> HAD +1 AT -> BUT +1 AT -> ADD +1 ASSYRIA -> THE +1 ASSAILING -> SELLING +1 ASLEEP -> LEAP +1 ASKS -> ASK +1 ASKED -> I +1 ASKED -> AS +1 ASKED -> AIR +1 ASK -> ASKED +1 ASIA -> AS +1 ASHUR -> AYESHA +1 AS -> WAS +1 AS -> S +1 AS -> OF +1 AS -> IF +1 AS -> I +1 AS -> HAS +1 AS -> COAL +1 AS -> ALBERT +1 AS -> A +1 ARTIST -> THIS +1 ARTHUR -> OFTEN +1 ART -> ARE +1 ARSTS -> ASKED +1 ARSINOE'S -> ARSENO'S +1 ARSINOE -> AUSTENO +1 ARSINOE -> ARSENAL +1 ARRIVE -> ARRIVES +1 ARRANGED -> RANGED +1 ARPAD -> OUR +1 ARPAD -> ARBED +1 AROUSED -> ARISED +1 AROUND -> HER +1 ARMED -> I'M +1 ARMED -> ALARMED +1 ARM -> HEART +1 ARKANSAS -> OUR +1 AREN'T -> ARE +1 AREN'T -> AND +1 ARE -> RETURNED +1 ARE -> I +1 ARE -> HE +1 ARE -> DON'T +1 ARE -> AND +1 ARE -> AH +1 ARDENT -> ARDENTS +1 ARCHIBALD -> ARQUEBALD +1 APPROVE -> IMPROVE +1 APPEALED -> APPEARED +1 APPARENTLY -> A +1 APOMORPHINE -> EPIMORPHIN +1 APES -> IPES +1 ANYWAY -> AND +1 ANYONE'S -> ANY +1 ANY -> ANYTHING +1 ANY -> ANYONE +1 ANY -> AND +1 ANVILS -> AMBILS +1 ANTOLIAN -> INTOLLIUM +1 ANTIDOTES -> AND +1 ANTHONY -> AUNT +1 ANSWERS -> ADDEST +1 ANOTHER -> NOW +1 ANNOYED -> ANNOY +1 ANNIE'S -> ANY +1 ANNIE'S -> AND +1 ANNIE -> AND +1 ANGUISH -> ENGLISH +1 ANGESTON -> ANGERSON +1 ANDS -> ENDS +1 ANDBUT -> AND +1 AND -> YOU'LL +1 AND -> WHEN +1 AND -> UN +1 AND -> ROOM +1 AND -> ON +1 AND -> MISSUS +1 AND -> ME +1 AND -> LAG +1 AND -> IT +1 AND -> INTO +1 AND -> INFECTED +1 AND -> I'M +1 AND -> HIM +1 AND -> HER +1 AND -> HE +1 AND -> HAS +1 AND -> GASTED +1 AND -> ENJOINING +1 AND -> END +1 AND -> CONFINED +1 AND -> BUT +1 AND -> BEING +1 AND -> AS +1 AND -> ANY +1 AND -> ANOTHER +1 AND -> ANNE +1 AND -> ALEXAM +1 AN -> THAT +1 AN -> OUR +1 AN -> NOW +1 AN -> IT +1 AN -> CAN +1 AN -> ADULGE +1 AMYNTAS -> I +1 AMYNTAS -> A +1 AMOUR -> AMORE +1 AMONGST -> A +1 AM -> UNEXPECTED +1 ALWAYS -> ALL +1 ALTHEA -> ALTHIE +1 ALTHEA -> ALTHIA +1 ALSO -> ONCE +1 ALSO -> NABBS +1 ALREADY -> ALREAD +1 ALONGER -> ALONG +1 ALONE -> OWN +1 ALOES -> AND +1 ALOES -> ALLIES +1 ALLS -> ALL +1 ALLOWED -> ALLOW +1 ALLOWANCE -> ALLOW +1 ALLOW -> THE +1 ALLOW -> ALONE +1 ALLIGATOR -> ADDIER +1 ALLIED -> ALID +1 ALLAYS -> A +1 ALL -> SOUL +1 ALL -> OR +1 ALL -> ONLY +1 ALL -> ALWAYS +1 ALKALOIDS -> IKOLOITS +1 ALKALOIDS -> ACHELIDES +1 ALKALOID -> AKALOID +1 ALIGHTED -> LIGHTED +1 ALI'S -> ALWAYS +1 ALF -> A +1 ALESSANDRO -> ALISANDRO +1 ALCOHOL -> ALCOHOLBA +1 ALCOHOL -> AKELET +1 AL -> ARMANQUIN +1 AL -> ALICELA +1 AIN'T -> HAIN'T +1 AIN'T -> END +1 AIN'T -> ANNE +1 AIN'T -> AM +1 AIN'T -> A +1 AID -> APE +1 AH -> HA +1 AGRARIAN -> AGRIAN +1 AGONE -> GONE +1 AFTERWARDS -> AFTERWARD +1 AFTER -> TO +1 AFTER -> OUTDREW +1 AFT -> OFF +1 AFT -> APT +1 AFFLICTION -> AFFLICATION +1 AFFECTION -> EFFECT +1 ADVENTURES -> VENTURES +1 ADN'T -> HADN'T +1 ADHERENTS -> ADHERENCE +1 ADDED -> I +1 ADD -> ADDISMA +1 ADAIR -> EIGHT +1 AD -> AT +1 ACKNOWLEDGE -> ANNOUNCE +1 ACHESON -> ARCHISON +1 ACCEPT -> EXCEPT +1 ABSTAIN -> ABSTAINED +1 ABSOLVED -> ABSORBED +1 ABSENTEE -> ABSENTE +1 ABOUT -> WERE +1 ABOUT -> OF +1 ABOUT -> HE +1 ABOUT -> BUT +1 ABODE -> A +1 ABOARD -> QUESTED +1 ABOARD -> ABROAD +1 A -> YOUR +1 A -> WAS +1 A -> UNREPRESENTATIVE +1 A -> THEY +1 A -> SOME +1 A -> SO +1 A -> OLY +1 A -> MUST +1 A -> MENDICULATED +1 A -> MELLICENT +1 A -> KIT +1 A -> IF +1 A -> HIS +1 A -> HIM +1 A -> GOOD +1 A -> EXERT +1 A -> EVENLENTH +1 A -> ESPECIALTY +1 A -> ENNARING +1 A -> ELUSIVE +1 A -> EARTHLY +1 A -> CLIFF +1 A -> AVIDE +1 A -> AT +1 A -> AROUND +1 A -> ARE +1 A -> APPROPATOR +1 A -> APOLLO +1 A -> AMITY +1 A -> AMERD +1 A -> ALOUD +1 A -> ALAD +1 A -> ABOVE +1 A -> ABASEMENT + +DELETIONS: count ref +20 THE +15 IT +15 IN +15 A +13 IS +10 AND +9 TO +8 OF +8 I +6 WAS +4 THAT +4 OTHER +4 HIM +4 AL +3 YOU +3 US +3 OR +3 MAKAN +3 HE +3 HAD +3 BRAHMAN +3 AT +3 ARE +3 AM +2 YARD +2 WIDE +2 WARD +2 VE +2 THIS +2 STAIRS +2 ROUND +2 ROOM +2 NOT +2 NEWBERG +2 MET +2 LACHAISE +2 HIS +2 HAVE +2 FISH +2 ELECTION +2 DIDN'T +2 DE +2 BY +2 BRACY +2 AS +2 AN +2 ALL +2 AH +1 ZAU +1 YOUTH +1 YOU'VE +1 YER +1 YELLS +1 YEAR +1 YE +1 WROTE +1 WORLD +1 WOODEN +1 WITH +1 WINE +1 WILL +1 WHILE +1 WERE +1 WELL +1 WEAL +1 WE'LL +1 WATER +1 WARN +1 WANT +1 WANDERERS +1 VOTE +1 VAULTED +1 UTTERED +1 UN +1 TURNED +1 TURBULENT +1 TRIBE +1 TIGER +1 THUS +1 THOSE +1 THING +1 THEY'RE +1 THEN +1 THEATRE +1 TALENTS +1 STAKES +1 STAINED +1 SPORT +1 SPIRITS +1 SPECIALTY +1 SOUNDLY +1 SOUL +1 SONS +1 SONNY +1 SNETKOV +1 SMALL +1 SIT +1 SIR +1 SIGHT +1 SIDE +1 SHUT +1 SHRUBBERY +1 SHORE +1 SHE +1 SET +1 SERVES +1 SCUSE +1 S +1 REW +1 REVOLUTION +1 RESPONSIBLE +1 REPRESENTATIVE +1 RECTIFY +1 RACHEL +1 PROVOCATOR +1 PROA +1 PRECENTORS +1 POSITION +1 POLO +1 PLAYING +1 PIGSKIN +1 PHUT'S +1 PHUT +1 PEER +1 PARR +1 PAINTER +1 OWN +1 OUR +1 ONE +1 ONCE +1 OLD +1 OFF +1 ODD +1 OBSERVED +1 O +1 N'T +1 N +1 MUCH +1 MOONLIGHT +1 MONKERS +1 MINUTELY +1 MINISTERS +1 MINE +1 MILICENT +1 MIDDY +1 MEET +1 MAKING +1 MAKE +1 M +1 LUCK +1 LUCID +1 LONG +1 LOCK +1 LL +1 LIKE +1 LET +1 LENIN +1 LEAVED +1 LAKE +1 KNOW +1 KINE +1 KID +1 KEY +1 JUST +1 JUG +1 JOINING +1 JIM +1 JILT +1 JACKAL +1 IVANOVITCH +1 IT'S +1 IMPROVED +1 ILLS +1 IF +1 HUNGRY +1 HOW +1 HOUSE +1 HITHER +1 HER +1 HEARTY +1 HAWED +1 HAS +1 HALTS +1 HA +1 GRUFFLY +1 GROWING +1 GOOD +1 GOAL +1 GO +1 GNAWING +1 GIVE +1 GIRLS +1 GAZE +1 FULL +1 FRANCISCO +1 FORMED +1 FOR +1 FOOTED +1 FOLDS +1 FLASHLIGHT +1 FIND +1 FELL +1 FAUVENT +1 FATHER +1 EXTEND +1 EXPECTED +1 ETERNAL +1 EM +1 EAST +1 E'S +1 DOUBT +1 DONE +1 DOING +1 DOIN +1 DESTRUCTION +1 DAY +1 DARE +1 CURSE +1 CROST +1 CREAM +1 COULDN'T +1 CORTONA +1 CORKLE +1 COPS +1 COOPS +1 COMICAL +1 COLOGNE +1 COLE +1 CLIMB +1 CLEVER +1 CLEFT +1 CHIPS +1 CHANGE +1 CELL +1 CATS +1 CART +1 CAN +1 BUZZARD +1 BUT +1 BULBS +1 BREAKERS +1 BOY +1 BOUT +1 BOUGHT +1 BOTH +1 BOAT +1 BITING +1 BIBLE +1 BE +1 BASEMENT +1 BAND +1 AZARIAH +1 ATTENTION +1 ASKED +1 ARCHIAS +1 ANCESTORS +1 ANALYSIS +1 ALLEN +1 ADULT + +INSERTIONS: count hyp +25 THE +23 A +21 IN +19 AND +18 IT +15 OF +13 TO +10 IS +10 HE +8 THAT +8 ONE +7 ON +7 AT +7 ARE +6 LAD +5 HAVE +5 HAD +4 OUT +4 OR +4 AN +3 YOU +3 WHICH +3 WAS +3 WALL +3 US +3 NOT +3 NIGHT +3 MORROW +3 MEN +3 ME +3 LIKE +3 HER +3 BUT +3 AS +3 ALL +2 WHO +2 WHILE +2 TURNED +2 THIS +2 STILL +2 MASTER +2 LESS +2 ITS +2 IF +2 HIM +2 FISH +2 FIELD +2 FELLOW +2 DID +2 DAYS +2 COLT +2 BOAT +2 BE +2 AM +1 YOURS +1 YORK +1 YET +1 YEAR +1 WORTHY +1 WOMAN +1 WILL +1 WHAT +1 WENT +1 WAY +1 WATCH +1 WAIN +1 VOTES +1 VAIN +1 TWO +1 TURN +1 TRADES +1 TOUSTRA +1 TOO +1 TOFORE +1 TIME +1 TIDE +1 THOUGH +1 THEY +1 THESE +1 THERE'S +1 THEMSELVES +1 THEM +1 THEE +1 THAT'S +1 TELL +1 TEEN +1 TECHNIC +1 TASTE +1 TALE +1 SYRIA +1 SUITED +1 SUIT +1 STROKE +1 STRIKE +1 STRAW +1 STOVE +1 STORE +1 STICK +1 STEAMS +1 SPITE +1 SPITABLY +1 SPIRITS +1 SPECIALLY +1 SPEAKER +1 SPARED +1 SPACE +1 SONG +1 SOFT +1 SO +1 SNATHS +1 SMALL +1 SHARE +1 SEVENTH +1 SENT +1 SEATED +1 SCHUMANN +1 SCHLEVENT +1 SAKE +1 ROAR +1 RIPENESS +1 RICHA +1 REWARDED +1 REST +1 RESIST +1 REPARATE +1 RELATOR'S +1 READ +1 RAGE +1 PUSH +1 PROPER +1 PROCATE +1 POT +1 POINT +1 PLEASURE +1 PLEASE +1 PENCIL +1 PEM +1 PAD +1 OUR +1 OTIAN +1 OTHER +1 ORIENTOUINE +1 ONE'S +1 OLD +1 OH +1 OFF +1 OCCUPIED +1 O +1 NIGHT'S +1 NEW +1 NELLO +1 NEEDS +1 NEAR +1 NATION +1 MULES +1 MONTHS +1 MONTH'S +1 MIND +1 MESSION +1 MER +1 MEET +1 MEANTIS +1 MEANTESSE +1 MARK +1 LUCIEN +1 LOW +1 LONG +1 LIT +1 LIGHTS +1 LENS +1 LEISURE +1 LAYS +1 LADY +1 KNOWN +1 KNOW +1 KINDRED +1 KEI +1 JUST +1 JEMOSIS +1 JEALOUS +1 JARS +1 JACK +1 INSISTANTS +1 INCLINE +1 IMAGINE +1 ILL +1 ILIU +1 I +1 HOPE +1 HO +1 HIS +1 HID +1 HESITATINGLY +1 HEALTH +1 HEADS +1 HAM +1 GUY +1 GREAT +1 GOT +1 GOD +1 GIGS +1 GATHERED +1 FULL +1 FRIENDLY +1 FRED +1 FOUR +1 FORE +1 FOR +1 FLY +1 FILIANTLY +1 FIGURED +1 FAMOUS +1 FAITH +1 EYE +1 EXTRAVE +1 ETS +1 ENRICHED +1 EM +1 ELECTION +1 EASILY +1 EARTH +1 EARLS +1 DURRED +1 DOWN +1 DORRIS +1 DORIS +1 DO +1 DIXON +1 DESIGNED +1 DEAL +1 DE +1 DAY +1 DAME +1 COURT +1 CORDS +1 CONSOLE +1 COMPARABLE +1 COLD +1 COGNIZED +1 COAT +1 CHRISTIE +1 CHAUVELIN +1 CAPTURED +1 BURDEN +1 BRACES +1 BOU +1 BORN +1 BONSES +1 BONES +1 BLODGET +1 BELLEGER +1 BEDS +1 BATH +1 BACKS +1 AWAKE +1 ASH +1 ARISING +1 APPOINT +1 APOLIS +1 ANY +1 ALONG +1 ALLIES +1 AIR +1 ADULTERATED + +PER-WORD STATS: word corr tot_errs count_in_ref count_in_hyp +THE 3013 260 3134 3152 +A 1041 215 1145 1152 +AND 1714 199 1788 1839 +IN 731 150 808 804 +TO 1402 110 1444 1470 +IT 615 89 660 659 +IS 379 78 415 421 +I 810 74 853 841 +OF 1357 69 1386 1397 +THAT 651 65 682 685 +HE 673 58 693 711 +AT 250 55 279 276 +YOU 496 51 513 530 +THIS 223 50 246 250 +HER 268 42 289 289 +THEY 224 39 239 248 +ON 263 39 281 284 +HIS 474 39 493 494 +WAS 637 38 653 659 +AN 104 38 125 121 +THEIR 97 37 112 119 +HAD 361 35 375 382 +FOR 420 33 431 442 +AS 324 32 339 341 +ARE 147 31 159 166 +OR 93 29 109 106 +BUT 356 29 370 371 +WITH 369 28 385 381 +ALL 226 28 236 244 +O 25 26 41 35 +HAVE 221 24 233 233 +HAS 83 24 102 88 +NOT 392 22 401 405 +ANY 73 22 76 92 +WILL 152 21 166 159 +HIM 296 21 305 308 +ONE 209 20 216 222 +WERE 154 19 166 161 +THERE 162 19 174 169 +WHEN 151 18 162 158 +OUR 63 18 70 74 +OH 29 17 35 40 +INTO 113 16 117 125 +DO 144 16 153 151 +THEN 146 15 153 154 +SO 204 15 211 212 +OUT 158 15 166 165 +NO 186 15 190 197 +MEN 61 15 66 71 +UP 142 14 145 153 +THESE 43 14 51 49 +IF 162 14 167 171 +DE 7 14 18 10 +WHAT 183 13 192 187 +US 52 13 58 59 +THEM 141 13 144 151 +MISTER 70 13 74 79 +GURR 1 13 14 1 +BRAHMAN 6 13 19 6 +WHICH 187 12 191 195 +TOO 35 12 41 41 +SAID 247 12 252 254 +LIKE 85 12 89 93 +DICKIE 11 12 23 11 +WOULD 124 11 129 130 +WHO 146 11 151 152 +WHERE 53 11 57 60 +MAN 106 11 114 109 +LESLIE 12 11 23 12 +JUST 51 11 57 56 +ITS 52 11 57 58 +IT'S 17 11 24 21 +HERMON 0 11 11 0 +BY 193 11 199 198 +AM 58 11 62 65 +TWO 58 10 62 64 +SHE 285 10 292 288 +SEE 75 10 77 83 +OLD 54 10 57 61 +NOW 112 10 116 118 +I'M 30 10 34 36 +HERE 66 10 69 73 +FULL 14 10 18 20 +FAUVENT 0 10 10 0 +DID 93 10 95 101 +DICKY 0 10 0 10 +BE 311 10 313 319 +ANNIE 12 10 22 12 +SIGURD 0 9 9 0 +ROUND 13 9 18 17 +ROOM 33 9 37 38 +OFF 47 9 53 50 +MURDOCH 0 9 9 0 +MISSUS 25 9 31 28 +ME 259 9 260 267 +LET 58 9 64 61 +KNOW 89 9 95 92 +AROUND 16 9 20 21 +ZAU 0 8 8 0 +YOUR 99 8 104 102 +WILFRID 1 8 9 1 +SET 26 8 31 29 +OTHER 65 8 69 69 +MY 246 8 248 252 +MUST 72 8 77 75 +LAD 6 8 7 13 +KIND 19 8 19 27 +HEAR 15 8 21 17 +FROM 177 8 179 183 +CHRIS 15 8 23 15 +AL 8 8 16 8 +WELL 81 7 85 84 +WE 149 7 151 154 +UPON 66 7 71 68 +TURNED 30 7 31 36 +THY 24 7 28 27 +THUS 8 7 13 10 +THOUGH 35 7 39 38 +THOU 65 7 68 69 +SIR 37 7 40 41 +PIGEONCOTE 0 7 7 0 +OL 0 7 7 0 +MURDOCK 0 7 0 7 +LOOK 30 7 33 34 +LAST 45 7 47 50 +IZZY 0 7 7 0 +HATH 14 7 17 18 +GOT 37 7 40 41 +FOUR 20 7 24 23 +BRACY 0 7 7 0 +BEEN 129 7 133 132 +ASKED 42 7 46 45 +AIN'T 4 7 11 4 +ZARATHUSTRA 0 6 6 0 +WHITE 15 6 18 18 +WHILE 19 6 22 22 +WALL 3 6 4 8 +THEY'RE 5 6 11 5 +SHALL 58 6 62 60 +PRIORESS 1 6 7 1 +ONCE 52 6 56 54 +NIGHT 49 6 49 55 +MINE 10 6 15 11 +M 1 6 7 1 +LITTLE 91 6 91 97 +LEFT 33 6 38 34 +LARGE 10 6 10 16 +LARCH 1 6 7 1 +KINE 0 6 6 0 +IM 0 6 6 0 +I'VE 8 6 10 12 +I'LL 20 6 24 22 +HOW 77 6 81 79 +GOOD 65 6 67 69 +GOD 25 6 29 27 +FATHER 46 6 51 47 +DOG 5 6 9 7 +COME 68 6 71 71 +CINDERLAD 1 6 7 1 +CIGAR 1 6 1 7 +ARCHY 0 6 6 0 +ARCHIE 0 6 0 6 +ANYONE 1 6 6 2 +AH 6 6 9 9 +ABOUT 75 6 79 77 +YOU'RE 4 5 9 4 +YOU'LL 3 5 4 7 +YO 0 5 3 2 +YET 32 5 34 35 +YES 41 5 42 45 +WILFRED 0 5 0 5 +WENT 74 5 76 77 +TRY 12 5 17 12 +TIME 82 5 83 86 +THAT'S 19 5 23 20 +THAN 72 5 74 75 +STILL 39 5 41 42 +SINDBAD 0 5 5 0 +SINBAD 0 5 0 5 +SHARRKAN 0 5 5 0 +SHARKAN 0 5 0 5 +SEEMED 16 5 18 19 +SEA 14 5 15 18 +RUN 6 5 9 8 +RETURNED 17 5 19 20 +OVER 47 5 49 50 +ORGANIZER 0 5 0 5 +ORGANISER 0 5 5 0 +OLIVE 2 5 7 2 +N'T 0 5 5 0 +MISS 15 5 15 20 +MESTIENNE 0 5 5 0 +MADE 76 5 77 80 +LOVE 26 5 29 28 +KNIGHT 4 5 8 5 +INTEREST 6 5 7 10 +HEAD 40 5 41 44 +HAIR 6 5 6 11 +GOING 34 5 39 34 +GO 60 5 61 64 +EM 0 5 2 3 +DON'T 71 5 73 74 +DAY 58 5 61 60 +CRAWFISH 2 5 7 2 +COPS 0 5 5 0 +BEING 36 5 39 38 +YOU'VE 3 4 7 3 +YE 11 4 14 12 +WORLD 12 4 14 14 +WILDERNESS 4 4 8 4 +WIDE 6 4 8 8 +WICKER'S 5 4 8 6 +WHEEL 1 4 2 4 +WE'RE 3 4 7 3 +UNTO 7 4 10 8 +TOWARD 7 4 9 9 +TILL 23 4 24 26 +TIGLATH 0 4 4 0 +TAKE 44 4 45 47 +STREET 13 4 15 15 +STATE 22 4 24 24 +SON 15 4 18 16 +SOME 76 4 77 79 +SMALL 15 4 17 17 +SIDE 18 4 22 18 +SHOULD 70 4 72 72 +SERGEY 1 4 5 1 +SEEM 6 4 8 8 +SAYS 25 4 28 26 +SAY 80 4 83 81 +SAT 8 4 9 11 +RETURN 15 4 18 16 +REGIN 0 4 4 0 +READ 8 4 8 12 +RAYSTOKE 1 4 5 1 +PUT 38 4 40 40 +POURED 2 4 3 5 +PLACE 36 4 39 37 +PILESER 0 4 4 0 +OUGHT 14 4 16 16 +ONLY 71 4 73 73 +MYRTILUS 0 4 4 0 +MIND 23 4 24 26 +MILICENT 3 4 6 4 +MET 8 4 11 9 +MAY 40 4 43 41 +MASTER 22 4 23 25 +MANY 27 4 27 31 +MAKE 64 4 66 66 +MAKAN 4 4 8 4 +LONG 51 4 52 54 +LIL 0 4 4 0 +LETTERS 3 4 7 3 +LESS 7 4 9 9 +LAW 2 4 5 3 +LADY 14 4 15 17 +JURY 0 4 4 0 +JACKAL 4 4 8 4 +INTERESTS 2 4 5 3 +I'D 9 4 13 9 +HONOUR 2 4 3 5 +HONOR 0 4 3 1 +HOME 32 4 34 34 +HARMON 0 4 0 4 +HALF 22 4 23 25 +GONE 16 4 17 19 +GOLD 6 4 6 10 +GET 50 4 53 51 +FISH 5 4 7 7 +FIND 23 4 25 25 +FARRINDER 0 4 4 0 +FAITH 8 4 9 11 +EYE 9 4 11 11 +END 13 4 15 15 +EAST 6 4 8 8 +DOWN 66 4 68 68 +DONE 36 4 39 37 +DOES 14 4 15 17 +DIDN'T 18 4 21 19 +DEAR 14 4 14 18 +DAYS 12 4 13 15 +COURT 10 4 12 12 +COULD 111 4 114 112 +CASE 18 4 20 20 +BEG 5 4 8 6 +BEALE 6 4 10 6 +BASIL 0 4 4 0 +BANYAN 0 4 4 0 +ARSINOE 0 4 4 0 +ALWAYS 33 4 34 36 +ALLOW 6 4 8 8 +ADD 6 4 9 7 +YER 0 3 3 0 +YEARS 19 3 20 21 +YARD 0 3 2 1 +WRITE 0 3 1 2 +WORTH 1 3 2 3 +WORK 25 3 26 27 +WORDS 18 3 18 21 +WON'T 11 3 13 12 +WINE 6 3 8 7 +WHOLE 22 3 22 25 +WHETHER 11 3 12 13 +WELLS 0 3 2 1 +WAYNE 0 3 3 0 +WATER 20 3 22 21 +WATCH 12 3 12 15 +WARD 3 3 6 3 +WAIN 0 3 0 3 +VOTES 1 3 3 2 +VERY 83 3 84 85 +VENT 0 3 0 3 +VAIN 4 3 5 6 +UN 0 3 2 1 +ULRICA 0 3 3 0 +TRIED 19 3 19 22 +TRIBE 1 3 4 1 +TIGER 10 3 13 10 +THROUGH 35 3 37 36 +THOUGHT 39 3 39 42 +THOSE 37 3 38 39 +THERE'S 10 3 11 12 +TELL 52 3 52 55 +SYRUP 1 3 4 1 +SUN 7 3 7 10 +SUIT 2 3 2 5 +STONEWALL 0 3 3 0 +STONE 11 3 11 14 +STAYED 1 3 4 1 +SPAKE 2 3 5 2 +SOUL 10 3 11 12 +SORT 12 3 12 15 +SONG 1 3 2 3 +SHUT 8 3 10 9 +SHERIFF 1 3 4 1 +SHE'LL 1 3 2 3 +SHARDURIS 0 3 3 0 +SENT 9 3 10 11 +SEEN 29 3 32 29 +SEAMAN 3 3 5 4 +SCHOOL 6 3 8 7 +SANCT 0 3 3 0 +SAINT 19 3 19 22 +S 1 3 3 2 +RIGHT 30 3 33 30 +RIGAN 0 3 0 3 +RAYSTROKE 0 3 0 3 +RAY 0 3 1 2 +RATHER 14 3 16 15 +RAN 8 3 9 10 +RAISE 3 3 6 3 +QUEST 0 3 1 2 +PRODUCED 6 3 7 8 +PROCLUS 0 3 3 0 +PROAS 0 3 3 0 +PROA 0 3 3 0 +PRIEST 3 3 6 3 +POOR 23 3 25 24 +POLL 0 3 3 0 +PIGEON 1 3 1 4 +PHUT 0 3 3 0 +PETER 13 3 15 14 +PASS 6 3 6 9 +PACE 0 3 1 2 +OWN 43 3 45 44 +OPEN 14 3 15 16 +ONE'S 5 3 5 8 +OFFICERS 2 3 3 4 +NUZHAT 1 3 4 1 +NOUGHT 0 3 2 1 +NORTH 3 3 4 5 +NOR 19 3 21 20 +NEW 28 3 30 29 +NEAREST 2 3 5 2 +NEAR 16 3 16 19 +MUCH 40 3 41 42 +MOSES 5 3 8 5 +MORROW 6 3 6 9 +MORE 98 3 99 100 +MISTAH 0 3 3 0 +MEAT 3 3 6 3 +MC 1 3 4 1 +MATTER 21 3 22 23 +MATE 1 3 2 3 +MARSHAL 3 3 6 3 +MARRIAGE 5 3 6 7 +LUCIEN 0 3 2 1 +LOW 12 3 13 14 +LOOKED 23 3 25 24 +LINE 5 3 7 6 +LIKED 7 3 9 8 +LIDDY 0 3 3 0 +LATE 8 3 9 10 +JUDGE 5 3 7 6 +JES 0 3 3 0 +JAKIE 0 3 0 3 +JAKEY 0 3 3 0 +JACK 3 3 3 6 +IN'T 0 3 3 0 +HOWEVER 13 3 16 13 +HOLD 7 3 10 7 +HO 1 3 3 2 +HERMON'S 0 3 3 0 +HERMANN 0 3 0 3 +HERMAN 0 3 0 3 +HERE'S 0 3 2 1 +HEART 28 3 28 31 +HAYES 0 3 1 2 +HARD 14 3 16 15 +HANDS 13 3 15 14 +HAND 39 3 40 41 +GRAY 1 3 3 2 +GOIN 1 3 1 4 +GOBEY'S 0 3 3 0 +GIVING 6 3 8 7 +GIVEN 12 3 13 14 +GIVE 44 3 47 44 +GIT 0 3 1 2 +GIRL 10 3 11 12 +FURTHER 6 3 6 9 +FRANZ 5 3 8 5 +FOLLOWED 8 3 10 9 +FOLDS 0 3 1 2 +FOALS 1 3 4 1 +FOAL 2 3 5 2 +FIELD 3 3 4 5 +FELLOW 13 3 13 16 +FAVOUR 3 3 4 5 +FAVOR 0 3 2 1 +FAFNIR 0 3 3 0 +EYES 34 3 35 36 +EXECUTIVE 2 3 5 2 +EVER 26 3 27 28 +ETERNAL 0 3 3 0 +ELECTION 2 3 4 3 +EILEEN 0 3 3 0 +EACH 18 3 18 21 +E'S 0 3 3 0 +DRINK 21 3 24 21 +DOUBT 7 3 9 8 +DOST 3 3 4 5 +DOOR 30 3 30 33 +DALY 0 3 3 0 +DALEY 0 3 0 3 +CUT 10 3 12 11 +CREAM 3 3 6 3 +CORNER 10 3 13 10 +CORN 0 3 0 3 +CONFECTIONARY 1 3 4 1 +CLIMB 0 3 1 2 +CHANGE 7 3 9 8 +CERTAIN 14 3 16 15 +CATTLE 4 3 5 6 +CAN'T 15 3 16 17 +CAN 78 3 80 79 +BRYNHILD 0 3 3 0 +BROTHERS 6 3 8 7 +BOAT 7 3 8 9 +BLODGETT 0 3 3 0 +BIT 9 3 11 10 +BEFELL 0 3 0 3 +BEFEL 0 3 3 0 +BEECH 0 3 0 3 +AWHILE 1 3 3 2 +AUNT 4 3 6 5 +AREN'T 1 3 3 2 +AILEEN 0 3 0 3 +ZAO 0 2 0 2 +ZA 0 2 0 2 +YOURSELVES 2 2 2 4 +YOU'D 3 2 5 3 +YO'LL 0 2 2 0 +YEAR 6 2 7 7 +WROTE 2 2 4 2 +WRONG 4 2 4 6 +WRAPPERS 0 2 0 2 +WOUNDS 1 2 2 2 +WORSE 10 2 10 12 +WORKING 8 2 8 10 +WORKED 1 2 2 2 +WORD 13 2 15 13 +WONDER 4 2 5 5 +WON 1 2 1 3 +WOMEN 14 2 15 15 +WOMAN 18 2 19 19 +WITCH 1 2 2 2 +WINTER 3 2 5 3 +WINDOW 17 2 17 19 +WILKES 0 2 0 2 +WILD 7 2 9 7 +WIFE 14 2 16 14 +WI 0 2 1 1 +WHY 46 2 47 47 +WHOSE 16 2 16 18 +WHO'S 0 2 0 2 +WHERE'S 2 2 4 2 +WHENEVER 3 2 4 4 +WE'LL 1 2 2 2 +WATCHED 2 2 4 2 +WARM 0 2 1 1 +WAR 4 2 5 5 +WANTED 7 2 8 8 +WANT 24 2 26 24 +WAITING 6 2 7 7 +VILLAGES 0 2 1 1 +VE 0 2 2 0 +VALLEY 2 2 2 4 +UTTER 1 2 2 2 +USE 11 2 13 11 +URARTU 0 2 2 0 +UPSTAIRS 3 2 3 5 +UNEXPECTED 3 2 3 5 +UNDERTAKERS 0 2 0 2 +UNDERTAKER'S 0 2 2 0 +TURN 8 2 8 10 +TUMBLED 1 2 1 3 +TRULY 4 2 5 5 +TRIFLE 0 2 1 1 +TRIBES 4 2 6 4 +TOWN 14 2 14 16 +TOWER 3 2 4 4 +TOWARDS 12 2 13 13 +TORQUILSTONE 0 2 2 0 +TORCHLESTONE 0 2 0 2 +TOP 2 2 3 3 +TONIGHT 0 2 2 0 +TOM 8 2 9 9 +TOLD 24 2 26 24 +TIRED 5 2 7 5 +TIDINGS 0 2 2 0 +THOUSANDS 3 2 5 3 +THOUSAND 16 2 16 18 +THIRST 7 2 9 7 +THINK 45 2 46 46 +THINGS 19 2 21 19 +THING 20 2 21 21 +THEE 26 2 27 27 +THEATRE 0 2 2 0 +TEN 17 2 18 18 +TELLTALE 0 2 2 0 +TASTE 5 2 6 6 +TAMAR 0 2 2 0 +TALKED 4 2 5 5 +TALENTS 1 2 3 1 +TAKEN 21 2 22 22 +SYRIA 3 2 4 4 +SURELY 7 2 8 8 +SUDDEN 7 2 8 8 +STRUCK 11 2 13 11 +STROKE 2 2 2 4 +STRIKE 1 2 2 2 +STRAIGHT 4 2 5 5 +STORE 2 2 3 3 +STOOD 21 2 22 22 +STOLE 3 2 3 5 +STOKER 0 2 2 0 +STOCKER 0 2 0 2 +STICK 4 2 5 5 +STERN 0 2 1 1 +STEPS 7 2 7 9 +STEP 2 2 3 3 +STATES 6 2 7 7 +STARED 3 2 4 4 +STAIRS 1 2 3 1 +SPIRITS 1 2 2 2 +SPACE 2 2 2 4 +SOUGHT 4 2 6 4 +SOMEONE 0 2 2 0 +SINGA 0 2 2 0 +SILVER 6 2 7 7 +SIGNING 0 2 1 1 +SIGHT 7 2 8 8 +SHORES 0 2 0 2 +SHIPS 2 2 4 2 +SHEPHERD 0 2 1 1 +SHELLFISH 0 2 0 2 +SHELL 1 2 3 1 +SHAWS 0 2 2 0 +SERF 0 2 0 2 +SELLER 0 2 2 0 +SEEMS 12 2 13 13 +SEEK 8 2 10 8 +SAKE 7 2 7 9 +SAIL 2 2 4 2 +SAH 0 2 2 0 +RUM 0 2 1 1 +ROSE 11 2 12 12 +ROPE'S 0 2 2 0 +ROOMFELLOW 0 2 2 0 +RISDON 5 2 7 5 +RIDER 0 2 2 0 +RIDE 5 2 5 7 +RHODIAN 0 2 2 0 +REVOLUTION 2 2 3 3 +REVELATION 0 2 1 1 +REST 19 2 19 21 +REMAINED 7 2 8 8 +REFORMERS 0 2 2 0 +REFORMED 1 2 1 3 +READY 12 2 13 13 +REACHED 11 2 12 12 +RE 2 2 2 4 +RAISED 6 2 6 8 +RAGED 0 2 0 2 +RAGE 1 2 2 2 +RACHEL 1 2 3 1 +QUEEN 5 2 5 7 +PRODUCE 5 2 6 6 +PROCLASS 0 2 0 2 +PRIORS 0 2 0 2 +PRIESTS 1 2 1 3 +PRETTY 7 2 8 8 +PRESENT 16 2 16 18 +PREACH 0 2 0 2 +PRAISED 2 2 2 4 +POVERTY 1 2 3 1 +POST 5 2 6 6 +POOL 0 2 0 2 +POLY 0 2 0 2 +POLE 0 2 0 2 +PLATTERBAFF 1 2 3 1 +PLAIN 3 2 5 3 +PIECE 3 2 4 4 +PHOSPHOR 0 2 2 0 +PHILIP 2 2 3 3 +PERE 0 2 2 0 +PEG 0 2 0 2 +PASSED 9 2 10 10 +PARTS 4 2 5 5 +PART 20 2 21 21 +PARR 0 2 2 0 +ONTO 0 2 2 0 +ONES 4 2 4 6 +OFTEN 21 2 21 23 +NURSE 0 2 0 2 +NORTHFIELD 0 2 2 0 +NONE 12 2 13 13 +NIGHT'S 1 2 1 3 +NEWBURG 0 2 0 2 +NEWBERG 0 2 2 0 +NEIGHBOUR 1 2 1 3 +NEIGHBORS 3 2 5 3 +NEIGHBOR 0 2 2 0 +NATURALLY 6 2 7 7 +NATURAL 6 2 7 7 +MOUTH 6 2 7 7 +MOST 42 2 44 42 +MIN 0 2 0 2 +MIGHT 43 2 43 45 +MIDIAN 0 2 2 0 +MESTER 0 2 2 0 +MERLONUS 0 2 2 0 +MERIT 1 2 3 1 +MENAHEM 0 2 2 0 +MELLICENT 0 2 0 2 +MEET 8 2 9 9 +MEAN 20 2 20 22 +MATI 0 2 2 0 +MARTIAN 2 2 2 4 +MANKATO 0 2 2 0 +LUNA'S 1 2 3 1 +LUNA 1 2 3 1 +LUCIAN 0 2 0 2 +LOVER 3 2 3 5 +LOVED 7 2 7 9 +LORD 17 2 19 17 +LOCK 2 2 4 2 +LOBSTER 10 2 12 10 +LL 0 2 2 0 +LIZZIE 0 2 0 2 +LIZLY 0 2 0 2 +LIVED 5 2 6 6 +LIVE 16 2 17 17 +LILBURN 1 2 3 1 +LIFTED 2 2 2 4 +LIE 3 2 4 4 +LEVER 1 2 3 1 +LEAVE 21 2 22 22 +LAWN 0 2 0 2 +LAUGHED 10 2 11 11 +LAUDERDALE 0 2 2 0 +LAND 19 2 20 20 +LAKE 4 2 6 4 +LAID 8 2 9 9 +LACHAISE 0 2 2 0 +LA 1 2 3 1 +L 2 2 4 2 +KNOWN 10 2 10 12 +KITE 0 2 2 0 +KING 43 2 45 43 +KEYS 3 2 5 3 +KEEP 14 2 16 14 +JULIEN 0 2 2 0 +JULIAN 0 2 0 2 +JUG 2 2 4 2 +JUDGMENT 7 2 9 7 +JEST 0 2 0 2 +IZZIE 0 2 0 2 +IVANOVITCH 3 2 5 3 +INN 4 2 4 6 +IMPROVE 0 2 1 1 +ILU 0 2 2 0 +ICES 1 2 3 1 +HURT 5 2 6 6 +HOZE 0 2 2 0 +HOUSE 36 2 37 37 +HOUR 15 2 16 16 +HOSE 0 2 0 2 +HORSTIUS 0 2 2 0 +HORSES 4 2 4 6 +HOPE 15 2 16 16 +HOO'S 0 2 2 0 +HONOURS 0 2 1 1 +HONORS 1 2 2 2 +HITHER 5 2 7 5 +HIDINGS 0 2 0 2 +HI 0 2 1 1 +HERMANN'S 0 2 0 2 +HELEN 0 2 1 1 +HEARD 27 2 28 28 +HE'S 7 2 9 7 +HE'D 4 2 4 6 +HAYS 0 2 2 0 +HALL 12 2 14 12 +HAID 0 2 2 0 +HA 2 2 3 3 +GUY 1 2 1 3 +GUNNER 0 2 0 2 +GUNNAR 0 2 2 0 +GUN 2 2 2 4 +GREY 0 2 1 1 +GREEN 2 2 2 4 +GREAT 39 2 40 40 +GRANDPAP 1 2 3 1 +GRANDPA 0 2 0 2 +GRAHAM 0 2 0 2 +GRAEME 0 2 2 0 +GOVERNOR 2 2 2 4 +GOVERNMENT 19 2 20 20 +GOV'NOR 0 2 2 0 +GORDON 19 2 21 19 +GOAL 1 2 3 1 +GIRK 0 2 0 2 +GIORGIO 0 2 2 0 +GERT 0 2 0 2 +GEORGIO 0 2 0 2 +GEORGE'S 0 2 1 1 +GENTLEMEN 3 2 5 3 +GENTLEMAN 7 2 7 9 +GAVE 32 2 32 34 +GARR 0 2 0 2 +FRONT 11 2 13 11 +FRISTOE 0 2 2 0 +FREE 12 2 12 14 +FRANCE 3 2 3 5 +FORM 9 2 9 11 +FORCE 7 2 7 9 +FOOT 1 2 1 3 +FOOLS 1 2 2 2 +FOOL 3 2 4 4 +FOLLOWING 10 2 10 12 +FOLLOW 9 2 10 10 +FLY 4 2 5 5 +FLEROV'S 0 2 2 0 +FLEROV 0 2 2 0 +FLAVOR 0 2 2 0 +FIT 7 2 7 9 +FISHING 2 2 3 3 +FISHIN 0 2 1 1 +FESTAL 0 2 2 0 +FELL 14 2 15 15 +FEAST 0 2 0 2 +FAVOURITE 0 2 1 1 +FAVORITE 0 2 1 1 +FAUCHELEVENT 22 2 24 22 +FATHER'S 5 2 7 5 +FAT 1 2 2 2 +FAST 11 2 12 12 +FAR 21 2 21 23 +FANNY 3 2 5 3 +FAN 2 2 3 3 +FAMILY 18 2 18 20 +FAFNIR'S 0 2 2 0 +FAFNER 0 2 0 2 +FACE 31 2 33 31 +EXPECTED 6 2 8 6 +EXECUTED 1 2 1 3 +EVEN 46 2 46 48 +EUSEBIUS 0 2 2 0 +EUNUCHS 0 2 0 2 +EUNUCH 9 2 11 9 +ETHELRIED 0 2 2 0 +ESPECIALLY 5 2 7 5 +ERE 1 2 2 2 +ENOUGH 30 2 31 31 +ENDURE 2 2 3 3 +ELEXANDER 0 2 2 0 +ELASTIC 0 2 2 0 +EITHER 8 2 8 10 +EARTH 19 2 20 20 +DUKE 10 2 12 10 +DOING 9 2 10 10 +DOEST 0 2 2 0 +DISTRESS 3 2 4 4 +DETECTIVE 2 2 2 4 +DESK 0 2 2 0 +DEFENSE 2 2 4 2 +DEFENCE 2 2 2 4 +DEBRACY 0 2 0 2 +DEARLY 0 2 2 0 +DEAD 19 2 21 19 +DAT 0 2 1 1 +DARK 11 2 11 13 +DARE 4 2 6 4 +DAPHNE 2 2 4 2 +DA 0 2 2 0 +D 0 2 2 0 +CUPS 0 2 0 2 +CROFISH 0 2 0 2 +CRIS 0 2 0 2 +COURSE 15 2 16 16 +COUNTRY 15 2 17 15 +COUNTRIES 4 2 4 6 +COUNSEL 1 2 2 2 +COULDN'T 6 2 7 7 +COPSE 0 2 0 2 +CONTEND 0 2 2 0 +CONFECTIONERY 0 2 0 2 +COMMONWEALTH 0 2 2 0 +COMING 14 2 16 14 +COMES 14 2 15 15 +COLT 0 2 0 2 +COLE 0 2 2 0 +COLD 5 2 6 6 +COAL 0 2 0 2 +CLIFF 4 2 4 6 +CINDER 0 2 0 2 +CHRIST 3 2 3 5 +CHLORATE 0 2 2 0 +CHEERY 0 2 0 2 +CHEEK 0 2 1 1 +CHARMED 0 2 2 0 +CHARGE 7 2 8 8 +CELLAR 0 2 0 2 +CAST 9 2 9 11 +CARROLL 1 2 3 1 +CAROL 0 2 0 2 +CAME 65 2 66 66 +CALM 0 2 0 2 +CALLED 23 2 24 24 +CALIFORNIAN 0 2 2 0 +CALIFORNIA 0 2 0 2 +BUZZARD 7 2 9 7 +BURNEHELD 0 2 0 2 +BUILD 1 2 1 3 +BRUCE 2 2 3 3 +BROUGHT 10 2 11 11 +BROTHER'S 1 2 1 3 +BRAVE 5 2 6 6 +BRAM 0 2 0 2 +BRACES 0 2 0 2 +BOY 26 2 27 27 +BOUT 0 2 2 0 +BOTTOM 5 2 5 7 +BOOM 1 2 1 3 +BOIL 3 2 5 3 +BOB 4 2 5 5 +BLOW 3 2 4 4 +BLODGET 0 2 0 2 +BILL 2 2 3 3 +BIBLICAL 2 2 4 2 +BETTER 28 2 29 29 +BESSY 7 2 9 7 +BEND 1 2 1 3 +BELL 2 2 3 3 +BEGAN 15 2 16 16 +BEFORE 54 2 56 54 +BECAME 9 2 10 10 +BASSORAH 0 2 2 0 +BANG 0 2 1 1 +BAND 7 2 9 7 +BAN 0 2 0 2 +BAGHDAD 4 2 6 4 +BAG 4 2 5 5 +BAD 8 2 10 8 +AWKWARD 3 2 3 5 +AWK 0 2 2 0 +AWAKE 4 2 4 6 +ATUM 0 2 2 0 +ATTENTION 5 2 7 5 +ASSYRIA 2 2 3 3 +ASK 16 2 17 17 +ART 12 2 13 13 +ARSENO 0 2 0 2 +ARPAD 0 2 2 0 +ARMED 2 2 4 2 +ARCHBISHOP 0 2 2 0 +ARCHBISH 0 2 0 2 +ANOTHER 30 2 31 31 +ANNIE'S 0 2 2 0 +ANNE 1 2 1 3 +AMYNTAS 0 2 2 0 +ALTHEA 1 2 3 1 +ALSO 31 2 33 31 +ALONG 15 2 15 17 +ALONE 9 2 10 10 +ALOES 0 2 2 0 +ALLIES 0 2 0 2 +ALKALOIDS 0 2 2 0 +ALEXANDER 0 2 0 2 +ALCOHOL 3 2 5 3 +AIR 10 2 10 12 +AFTER 95 2 97 95 +AFT 0 2 2 0 +AFFECTION 0 2 1 1 +ADDED 12 2 13 13 +ABOVE 9 2 9 11 +ABOARD 1 2 3 1 +ZENOUS 0 1 0 1 +ZEMSTVOS 0 1 1 0 +ZAYNAB 0 1 1 0 +ZAWAIN 0 1 0 1 +YUSS 0 1 1 0 +YOUTH 3 1 4 3 +YOURS 1 1 1 2 +YOUNG 39 1 39 40 +YORK 3 1 3 4 +YONWAY 0 1 0 1 +YOKE 0 1 0 1 +YO' 0 1 1 0 +YES'M 0 1 1 0 +YEOMEN 0 1 1 0 +YELLS 0 1 1 0 +YE'LL 0 1 0 1 +YE'D 0 1 0 1 +YAUSKY 0 1 1 0 +YAHWEH 0 1 1 0 +WUNNERED 0 1 1 0 +WRITER 1 1 1 2 +WRIT 0 1 1 0 +WRISTS 0 1 0 1 +WRETS 0 1 0 1 +WRETCHES 0 1 0 1 +WRETCHED 2 1 2 3 +WRAYE 0 1 0 1 +WRAPPED 1 1 1 2 +WOUNDED 5 1 6 5 +WOTTETH 0 1 1 0 +WOT 1 1 2 1 +WORTHY 1 1 1 2 +WORSHIPPERS 0 1 1 0 +WORSHIPPED 0 1 0 1 +WORSHIP'S 0 1 1 0 +WORSHIP 4 1 4 5 +WORRY 2 1 3 2 +WORKS 4 1 5 4 +WORKMEN 0 1 0 1 +WORKMAN 0 1 1 0 +WORKINGMEN 0 1 1 0 +WORKADAY 0 1 0 1 +WORDERNESS 0 1 0 1 +WOOLWRIGHT 0 1 0 1 +WOODS 2 1 2 3 +WOODEN 5 1 6 5 +WOOD 3 1 4 3 +WONDERED 7 1 8 7 +WOKE 0 1 1 0 +WOE 1 1 2 1 +WODE'S 0 1 1 0 +WIVES 0 1 0 1 +WITHOUT 51 1 51 52 +WISLOVITCH 0 1 0 1 +WISHT 0 1 1 0 +WISHED 5 1 5 6 +WIS 0 1 0 1 +WIRES 0 1 1 0 +WINNING 1 1 1 2 +WINGED 0 1 0 1 +WING 0 1 1 0 +WILT 4 1 4 5 +WILLY 0 1 1 0 +WILKSES 0 1 1 0 +WILKS 0 1 1 0 +WILHELM 0 1 0 1 +WILFRIED 0 1 0 1 +WILDLY 1 1 2 1 +WILDEST 0 1 1 0 +WIELD 0 1 1 0 +WIDELY 0 1 0 1 +WIDEAWAKE 0 1 1 0 +WICKER 6 1 7 6 +WHOLLY 2 1 2 3 +WHO'D 0 1 1 0 +WHITEWAY 0 1 0 1 +WHISKIRT 0 1 0 1 +WHISKERED 0 1 1 0 +WHILOME 0 1 1 0 +WHEREAS 0 1 0 1 +WHEREABOUTS 2 1 3 2 +WHEREABOUT 0 1 0 1 +WHER 0 1 1 0 +WHEELER 0 1 1 0 +WHATEVER 10 1 10 11 +WHATETH 0 1 0 1 +WHATE'ER 0 1 1 0 +WHAT'S 5 1 6 5 +WHALER 0 1 0 1 +WHACKS 0 1 1 0 +WESTWARD 1 1 1 2 +WESTPORT 0 1 1 0 +WEIGHED 0 1 1 0 +WEEVILY 0 1 0 1 +WEEVILLY 0 1 1 0 +WEEDS 0 1 1 0 +WEDDINGS 0 1 0 1 +WEBBS 0 1 0 1 +WEBB'S 0 1 1 0 +WEATHER 5 1 5 6 +WEARINESS 0 1 0 1 +WEAL 0 1 1 0 +WE'VE 2 1 3 2 +WAY 62 1 62 63 +WAX 0 1 0 1 +WAVERLY 0 1 1 0 +WAVERLEY 0 1 0 1 +WATONWAN 0 1 1 0 +WATERWAM 0 1 0 1 +WATERED 0 1 0 1 +WATCHMAKERS 0 1 0 1 +WATCHMAKER'S 0 1 1 0 +WASTED 1 1 1 2 +WARNED 0 1 0 1 +WARN 1 1 2 1 +WARMITTING 0 1 0 1 +WARDERS 0 1 1 0 +WARD'S 0 1 1 0 +WANTS 5 1 5 6 +WANDERERS 0 1 1 0 +WANDERER 1 1 2 1 +WANDER 0 1 0 1 +WALT 0 1 0 1 +WALLA'S 0 1 0 1 +WALKING 5 1 5 6 +WALK 4 1 5 4 +WALES 0 1 0 1 +WAKE 2 1 3 2 +WAITIN 0 1 1 0 +WAITED 5 1 5 6 +WAGGOT 0 1 1 0 +WAGGING 0 1 1 0 +WAGED 0 1 1 0 +WAG 0 1 0 1 +WADED 0 1 1 0 +WADE 0 1 0 1 +WADDED 0 1 1 0 +VUENT 0 1 0 1 +VOWELS 0 1 1 0 +VOWALS 0 1 0 1 +VOUGHT 0 1 1 0 +VOTE 3 1 4 3 +VOMITING 0 1 1 0 +VOLVITUR 0 1 1 0 +VOLVETER 0 1 0 1 +VOICED 0 1 1 0 +VIOLENT 1 1 1 2 +VIOLENCE 15 1 16 15 +VILLE 0 1 0 1 +VILLAGERS 0 1 1 0 +VIL 0 1 1 0 +VIGILANCE 1 1 2 1 +VICTIMIZE 0 1 1 0 +VICTIMISE 0 1 0 1 +VETERY 0 1 0 1 +VESTRY 0 1 1 0 +VES 0 1 0 1 +VERSEST 0 1 0 1 +VERSES 4 1 5 4 +VERILY 2 1 3 2 +VENTURES 0 1 0 1 +VENTRILOQUIST 0 1 1 0 +VENTILLA 0 1 0 1 +VEIN 0 1 0 1 +VEHEMENTLY 0 1 1 0 +VAZARRE 0 1 0 1 +VAVASOUR 0 1 1 0 +VAVASOR 0 1 0 1 +VAULTS 0 1 0 1 +VAULTED 0 1 1 0 +VAULT 8 1 9 8 +VAUGIRARD 0 1 1 0 +VATS 0 1 0 1 +VAST 2 1 3 2 +VASSILIEVITCH 0 1 1 0 +VARIOUS 2 1 2 3 +VANE 0 1 1 0 +VALET 1 1 2 1 +UZHAT 0 1 0 1 +UTTERED 3 1 4 3 +USUALLY 5 1 6 5 +USUAL 3 1 3 4 +USED 9 1 9 10 +URITU 0 1 0 1 +URGED 1 1 2 1 +URARTIAN 0 1 1 0 +URA 0 1 0 1 +UPWARD 1 1 1 2 +UNSTEAD 0 1 0 1 +UNS 0 1 1 0 +UNREWARDED 0 1 1 0 +UNREPRESENTATIVE 0 1 0 1 +UNOCCUPIED 0 1 1 0 +UNLUCK 0 1 0 1 +UNLESS 7 1 8 7 +UNIQUE 0 1 1 0 +UNHUNGRY 0 1 0 1 +UNHESITATINGLY 0 1 1 0 +UNHAPPY 4 1 5 4 +UNEXPECTEDLY 1 1 2 1 +UNDERSTRUCTION 0 1 0 1 +UNDER 36 1 37 36 +UNCREAM 0 1 0 1 +UNCLE 6 1 7 6 +UNCHANGED 0 1 0 1 +UNADULTERATED 0 1 1 0 +UM 0 1 0 1 +ULTIMATELY 0 1 1 0 +ULTIMATE 0 1 0 1 +UKINZER 0 1 1 0 +UH 0 1 0 1 +UDDER 0 1 1 0 +U 0 1 0 1 +TYRANNY 0 1 1 0 +TYER 0 1 0 1 +TWYMAN'S 0 1 1 0 +TWIMMAN 0 1 0 1 +TWELVEMONTH 0 1 1 0 +TWELVE 3 1 3 4 +TURRET 0 1 1 0 +TURNS 1 1 2 1 +TURBULENT 0 1 1 0 +TUNO 0 1 0 1 +TUFTS 1 1 1 2 +TUBERK 0 1 0 1 +TUBERCULOUS 0 1 1 0 +TRYING 7 1 8 7 +TRUSTY 0 1 0 1 +TRUSTEE 0 1 1 0 +TRUNDLED 0 1 1 0 +TRUE 15 1 15 16 +TROT 0 1 0 1 +TRIVET 0 1 1 0 +TRIUMPHS 0 1 0 1 +TRITES 0 1 0 1 +TRITE 0 1 0 1 +TRIPES 0 1 1 0 +TRINES 0 1 0 1 +TRIBUT 0 1 0 1 +TREMBLINGLY 1 1 1 2 +TRELAWNEY 0 1 1 0 +TREEONER 0 1 0 1 +TREBRANT 0 1 0 1 +TREASURES 1 1 1 2 +TREASURE 1 1 2 1 +TRAVELLED 1 1 1 2 +TRAVELED 0 1 1 0 +TRAVEL 1 1 1 2 +TRANSSHIP 0 1 1 0 +TRANSHIP 0 1 0 1 +TRAINS 0 1 0 1 +TRAINING 4 1 5 4 +TRAINED 1 1 1 2 +TRAINDAWG 0 1 1 0 +TRAIN 0 1 0 1 +TRAFFIC 0 1 1 0 +TRADITIONS 1 1 2 1 +TRADES 0 1 0 1 +TRADEMARK 0 1 1 0 +TRADE 7 1 7 8 +TRACK 2 1 3 2 +TRACED 0 1 1 0 +TOWERED 0 1 0 1 +TOUSTRA 0 1 0 1 +TOUR 1 1 2 1 +TOUGHS 0 1 1 0 +TOUGH 2 1 3 2 +TOUCHED 4 1 5 4 +TOUCH 3 1 3 4 +TORTURE 1 1 1 2 +TORMENT 0 1 1 0 +TORE 0 1 1 0 +TORCH 0 1 1 0 +TOOK 34 1 35 34 +TONGUE 4 1 5 4 +TONG 0 1 0 1 +TOMORROW 0 1 1 0 +TOMB 1 1 2 1 +TOLERBLE 0 1 1 0 +TOLERABLE 1 1 1 2 +TOILET 2 1 3 2 +TOILED 0 1 0 1 +TOFORE 0 1 0 1 +TITLE 0 1 1 0 +TIRING 0 1 1 0 +TIRESOME 0 1 1 0 +TIMES 9 1 10 9 +TILLERS 0 1 1 0 +TIGHTENING 0 1 1 0 +TIGHTENED 1 1 1 2 +TIGGING 0 1 0 1 +TIE 2 1 2 3 +TIDY 0 1 0 1 +TIDE 1 1 1 2 +TIC 0 1 0 1 +THUMB 3 1 4 3 +THROWING 2 1 3 2 +THRIFTILY 0 1 1 0 +THREW 8 1 8 9 +THREE 36 1 37 36 +THREAT 0 1 0 1 +THORNDER 0 1 0 1 +THOMMISH 0 1 0 1 +THITHER 5 1 5 6 +THIRD 11 1 11 12 +THINKING 4 1 4 5 +THINE 1 1 1 2 +THIN 3 1 4 3 +THEY'LL 0 1 0 1 +THEMSELVES 17 1 17 18 +THEATER 0 1 0 1 +THAT'LL 0 1 1 0 +THANKS 5 1 5 6 +TERRANT 0 1 0 1 +TERRACE 0 1 0 1 +TERRA 0 1 1 0 +TENT 0 1 0 1 +TEND 1 1 2 1 +TEMPTETH 0 1 1 0 +TEMPTED 0 1 0 1 +TEMPLES 1 1 2 1 +TEMPLE 2 1 2 3 +TEMPLAR 1 1 2 1 +TEMP 0 1 0 1 +TELLS 0 1 0 1 +TELLERS 0 1 0 1 +TELEGRAMAS 0 1 0 1 +TELEGRAM 2 1 3 2 +TEETH 4 1 4 5 +TEEN 0 1 0 1 +TEEM 0 1 0 1 +TEDLER 0 1 0 1 +TECHNIC 0 1 0 1 +TEARS 6 1 6 7 +TEAM 0 1 1 0 +TEALE 0 1 0 1 +TEAL 0 1 1 0 +TEA 1 1 2 1 +TAX 0 1 0 1 +TATTLERS 0 1 1 0 +TASKMASTER 0 1 1 0 +TARDY 0 1 1 0 +TAPPY 0 1 0 1 +TAPPED 0 1 1 0 +TAPIS 0 1 1 0 +TANQUAM 0 1 1 0 +TAN 0 1 0 1 +TALONS 0 1 0 1 +TALMASH 0 1 1 0 +TALLED 0 1 0 1 +TALKS 1 1 1 2 +TALK 14 1 15 14 +TALE 2 1 2 3 +TAKING 11 1 11 12 +TAHITI 0 1 1 0 +T'OTHER 0 1 1 0 +T 1 1 1 2 +SYNONYMON 0 1 1 0 +SYNICISM 0 1 0 1 +SYMPATHY 3 1 4 3 +SYMBOLS 0 1 0 1 +SYLLOGISM 0 1 1 0 +SYDNEY 2 1 3 2 +SWORD 11 1 12 11 +SWELP 0 1 1 0 +SWEAT 0 1 0 1 +SWARTHY 0 1 1 0 +SWALLTY 0 1 0 1 +SWAG 0 1 1 0 +SWAB 0 1 0 1 +SUSPICION 4 1 4 5 +SURNING 0 1 0 1 +SURGY 0 1 0 1 +SURFACES 0 1 0 1 +SURE 18 1 18 19 +SUPPOSE 10 1 11 10 +SUNNING 0 1 0 1 +SUNK 0 1 0 1 +SUMTHIN 0 1 1 0 +SUMINUTELY 0 1 0 1 +SULPHURIC 0 1 1 0 +SUITED 1 1 1 2 +SUFFICIENT 2 1 3 2 +SUFFICES 0 1 1 0 +SUFFERG 0 1 0 1 +SUE 0 1 1 0 +SUCKED 0 1 1 0 +SUCCOURS 0 1 1 0 +SUBTRINE 0 1 0 1 +SUBJECT 6 1 6 7 +SUB 0 1 1 0 +STRUGGLING 1 1 1 2 +STRUGGLE 0 1 1 0 +STRIPE 0 1 0 1 +STRIKEBREAKERS 0 1 0 1 +STRIFE 5 1 6 5 +STREAK 1 1 2 1 +STRAYED 0 1 0 1 +STRAW 1 1 1 2 +STRATORS 0 1 0 1 +STRANGEST 0 1 1 0 +STRANGERS 1 1 1 2 +STRANGE 4 1 5 4 +STRAITS 0 1 1 0 +STRAITENED 0 1 1 0 +STRAIT 0 1 0 1 +STRAINS 0 1 1 0 +STRAIN 1 1 1 2 +STOWED 0 1 0 1 +STOVE 1 1 1 2 +STORIES 3 1 3 4 +STORES 1 1 2 1 +STORED 0 1 1 0 +STOPPED 5 1 5 6 +STONED 0 1 1 0 +STOCKING 0 1 0 1 +STOCK 2 1 3 2 +STIRRING 1 1 1 2 +STILLNESS 2 1 3 2 +STICKET 0 1 0 1 +STEWPANT 0 1 0 1 +STEWPAN 1 1 2 1 +STEW 0 1 1 0 +STEVER 0 1 0 1 +STERNMOST 0 1 0 1 +STEPPED 1 1 2 1 +STEEVER 0 1 1 0 +STEERING 0 1 1 0 +STEERED 0 1 0 1 +STEAMS 0 1 0 1 +STEALING 0 1 0 1 +STAYS 0 1 0 1 +STAYING 1 1 2 1 +STAY 6 1 6 7 +STATUS 0 1 1 0 +STATURE 0 1 1 0 +STATUE 2 1 2 3 +STATEROOM 1 1 1 2 +STATED 2 1 3 2 +STAS 0 1 1 0 +STARVING 1 1 2 1 +START 3 1 3 4 +STARS 0 1 0 1 +STARLING 0 1 0 1 +STARK 0 1 1 0 +STARES 0 1 1 0 +STANDSTILL 0 1 1 0 +STANDS 2 1 3 2 +STANDARDS 0 1 0 1 +STANDARD 3 1 4 3 +STAKES 0 1 1 0 +STAINED 1 1 2 1 +STAFF 0 1 0 1 +SQUEAMISH 2 1 3 2 +SPOTIC 0 1 0 1 +SPORT 1 1 2 1 +SPONSUS 0 1 1 0 +SPONGE 0 1 1 0 +SPONDYLES 0 1 1 0 +SPITE 10 1 10 11 +SPITABLY 0 1 0 1 +SPIRIT 5 1 6 5 +SPINNING 1 1 1 2 +SPINES 0 1 0 1 +SPILLING 0 1 1 0 +SPIES 0 1 0 1 +SPICE 0 1 1 0 +SPENT 3 1 3 4 +SPENDING 0 1 0 1 +SPEECH 5 1 5 6 +SPECIALTY 0 1 1 0 +SPECIALLY 0 1 0 1 +SPEAR'S 0 1 0 1 +SPEAKER 1 1 1 2 +SPAWN 1 1 1 2 +SPATANI 0 1 0 1 +SPARSELY 0 1 1 0 +SPARED 2 1 2 3 +SPADEL 0 1 0 1 +SPADDLE 0 1 1 0 +SOWED 0 1 0 1 +SOUTHERN 2 1 3 2 +SOUTH 1 1 1 2 +SOUSE 0 1 1 0 +SOUS 1 1 1 2 +SOURCE 1 1 2 1 +SOUNDLY 1 1 2 1 +SOTELES 0 1 1 0 +SORDID 0 1 1 0 +SOPHIA 0 1 0 1 +SOOT 0 1 1 0 +SOONER 4 1 4 5 +SOON 22 1 22 23 +SONSPIER 0 1 0 1 +SONS 0 1 1 0 +SONNY 0 1 1 0 +SOMETHING 29 1 29 30 +SOMBRE 0 1 0 1 +SOMBER 0 1 1 0 +SOLDOM 0 1 0 1 +SOJOURNOVITCH 0 1 0 1 +SOJI 0 1 0 1 +SOFT 5 1 5 6 +SOFA 1 1 2 1 +SOCIALLY 0 1 0 1 +SOCIALIST 1 1 2 1 +SNYM 0 1 0 1 +SNETKOV 1 1 2 1 +SNATHS 0 1 0 1 +SNARLS 0 1 0 1 +SNARLED 0 1 1 0 +SMOLNY 0 1 1 0 +SMOKER 2 1 3 2 +SMIRCHED 0 1 1 0 +SMILE 8 1 9 8 +SMELL 0 1 1 0 +SMATHY 0 1 0 1 +SMARCHED 0 1 0 1 +SMAR 0 1 0 1 +SLUNK 0 1 1 0 +SLIGHTLY 1 1 2 1 +SLATTERS 0 1 0 1 +SLACKER 0 1 0 1 +SLACKENED 0 1 1 0 +SLAB 1 1 2 1 +SKYLECKS 0 1 0 1 +SKYLARKS 0 1 1 0 +SKIRT 0 1 1 0 +SKIRO 0 1 0 1 +SKEW 0 1 1 0 +SKEPTICAL 0 1 1 0 +SIXES 0 1 1 0 +SIX 17 1 17 18 +SIT 4 1 5 4 +SIRES 0 1 0 1 +SIRE 3 1 4 3 +SINUHIT 0 1 1 0 +SINGS 2 1 3 2 +SINGING 2 1 2 3 +SINGAFUT'S 0 1 0 1 +SING 3 1 4 3 +SINFUL 0 1 1 0 +SINE 0 1 1 0 +SIMULATES 0 1 1 0 +SIMON 1 1 1 2 +SIMILATES 0 1 0 1 +SILVERY 0 1 0 1 +SILLY 2 1 3 2 +SILENCE 8 1 9 8 +SIGNOR 0 1 1 0 +SIGNED 2 1 2 3 +SIGHING 1 1 2 1 +SIEVE 0 1 1 0 +SIDNEY 0 1 0 1 +SHUMAN 0 1 1 0 +SHUCKS 0 1 1 0 +SHRUGS 0 1 0 1 +SHRUBS 1 1 2 1 +SHRUBBERY 0 1 1 0 +SHOWN 4 1 4 5 +SHOW 16 1 16 17 +SHOULDST 2 1 3 2 +SHOULDEST 0 1 0 1 +SHOULDERS 3 1 4 3 +SHOULDER 2 1 2 3 +SHOT 9 1 10 9 +SHORE 1 1 2 1 +SHORDURUS 0 1 0 1 +SHOCKS 0 1 0 1 +SHOALBURG'S 0 1 0 1 +SHO'LY 0 1 1 0 +SHIP 17 1 17 18 +SHING 0 1 0 1 +SHIMMERTS 0 1 0 1 +SHET 0 1 1 0 +SHERE 0 1 1 0 +SHERBURNE'S 0 1 0 1 +SHERBURN'S 0 1 1 0 +SHEPARD 0 1 0 1 +SHEETS 0 1 1 0 +SHEEP 0 1 0 1 +SHED 4 1 5 4 +SHATTERED 0 1 1 0 +SHARS 0 1 0 1 +SHARED 0 1 0 1 +SHARE 2 1 2 3 +SHALLUM 0 1 1 0 +SHAKEDOWN 0 1 1 0 +SHAKE 2 1 2 3 +SHAGG 0 1 0 1 +SHAG 2 1 3 2 +SEYTON 2 1 3 2 +SEWING 0 1 1 0 +SEVEREND 0 1 0 1 +SEVERE 1 1 2 1 +SEVERAL 9 1 10 9 +SEVENTIETH 0 1 1 0 +SEVENTH 3 1 3 4 +SEVENTEENTH 0 1 0 1 +SEVEN 11 1 12 11 +SETTON 0 1 0 1 +SERVES 0 1 1 0 +SERMON 0 1 1 0 +SEREN 0 1 0 1 +SENTENCED 0 1 1 0 +SENSITIVE 1 1 2 1 +SENOR 1 1 1 2 +SENDS 0 1 0 1 +SEND 4 1 4 5 +SENATORY 0 1 0 1 +SELLING 0 1 0 1 +SEEST 1 1 2 1 +SEEMING 1 1 1 2 +SEEKEST 0 1 1 0 +SEEK'ST 0 1 0 1 +SEEING 8 1 8 9 +SEDUCETH 0 1 1 0 +SEDUCE 0 1 0 1 +SEDRIC 0 1 0 1 +SECURS 0 1 0 1 +SEATS 1 1 1 2 +SEATED 5 1 5 6 +SEAT 0 1 1 0 +SEAS 1 1 1 2 +SEAMEN 0 1 1 0 +SE 0 1 0 1 +SCUSE 0 1 1 0 +SCULPTORS 1 1 2 1 +SCULPTOR'S 1 1 1 2 +SCRUTINISED 0 1 1 0 +SCRIPS 0 1 0 1 +SCRIBES 1 1 2 1 +SCREAMY 0 1 0 1 +SCRAPPIN 0 1 1 0 +SCRAP 0 1 0 1 +SCOTT 0 1 0 1 +SCORN 2 1 3 2 +SCORCHED 0 1 0 1 +SCO'TCH 0 1 1 0 +SCIENCE 1 1 1 2 +SCHUMANN 0 1 0 1 +SCHULBERG'S 0 1 1 0 +SCHOOLGIRLS 0 1 0 1 +SCHOOLDAYS 0 1 1 0 +SCHOOLBOY 0 1 0 1 +SCHLEVENT 0 1 0 1 +SCEPTICAL 0 1 0 1 +SCENTED 0 1 0 1 +SCAPEGRACES 0 1 1 0 +SCAPED 0 1 1 0 +SCAPE 0 1 1 0 +SAYING 17 1 17 18 +SAYIN 0 1 1 0 +SAW 28 1 29 28 +SAVOUR 0 1 0 1 +SAVE 7 1 7 8 +SATURDAY 0 1 1 0 +SATURDAIL 0 1 0 1 +SARTUOUS 0 1 0 1 +SARKAIUS 0 1 0 1 +SARDID 0 1 0 1 +SARAH 2 1 3 2 +SANS 0 1 1 0 +SANITARY 1 1 2 1 +SANGS 0 1 0 1 +SANG 1 1 2 1 +SANDWARES 0 1 0 1 +SAND 2 1 3 2 +SANCTIS 0 1 0 1 +SANCTESS 0 1 1 0 +SANCHO 8 1 9 8 +SANCHA 0 1 0 1 +SAN 4 1 5 4 +SAMURED 0 1 0 1 +SAMPANCISCO 0 1 0 1 +SAME 22 1 22 23 +SALONE 0 1 1 0 +SALON 1 1 1 2 +SALMON 0 1 0 1 +SALLOWER 0 1 1 0 +SALARY 0 1 0 1 +SAINTO 0 1 0 1 +SAILS 0 1 1 0 +SAILORS 0 1 0 1 +SAILOR'S 0 1 0 1 +SAGITTAIRE 0 1 1 0 +SAGATURE 0 1 0 1 +SAD 2 1 2 3 +SACKED 0 1 0 1 +S'POSE 3 1 3 4 +RYO 0 1 1 0 +RUSSIA 2 1 3 2 +RUSHED 3 1 4 3 +RUSH 0 1 0 1 +RUNS 0 1 1 0 +RUNG 1 1 2 1 +RUMP 0 1 1 0 +RULER 0 1 1 0 +RULED 0 1 0 1 +RUBENSES 0 1 1 0 +RUBBERS 0 1 1 0 +ROXBURY 0 1 1 0 +ROWING 0 1 0 1 +ROWED 0 1 1 0 +ROUTED 0 1 0 1 +ROUTE 0 1 1 0 +ROUNDED 0 1 1 0 +ROUGH 3 1 3 4 +ROTHS 0 1 1 0 +ROSES 0 1 0 1 +ROSAMUN 0 1 1 0 +ROSAMOND 0 1 0 1 +ROPES 1 1 1 2 +ROOTS 1 1 2 1 +ROOMS 2 1 2 3 +ROOKER'S 0 1 0 1 +ROOFS 1 1 1 2 +ROOF 2 1 2 3 +RONALD 0 1 1 0 +ROMANS 0 1 0 1 +ROMANCE 2 1 3 2 +ROMAN 2 1 2 3 +ROLL 1 1 2 1 +ROGER 0 1 0 1 +RODIAN 0 1 0 1 +RODE 6 1 6 7 +ROCK 3 1 4 3 +ROBIN 0 1 0 1 +ROARING 0 1 1 0 +ROAR 1 1 1 2 +ROADS 0 1 0 1 +ROAD 15 1 16 15 +RIVER 11 1 11 12 +RISDEN 0 1 0 1 +RIPENESS 0 1 0 1 +RIPE 0 1 0 1 +RIO 0 1 0 1 +RINGMASTER 0 1 1 0 +RING 6 1 6 7 +RIGOROUS 0 1 1 0 +RIGIDLY 0 1 0 1 +RIDGE'S 0 1 1 0 +RIDERING 0 1 0 1 +RID 4 1 5 4 +RICHMOND 0 1 0 1 +RICHES 0 1 0 1 +RICHARD 4 1 5 4 +RICHA 0 1 0 1 +RHODES 0 1 1 0 +REWARDED 0 1 0 1 +REW 0 1 1 0 +REVOTE 0 1 0 1 +REVOLUTIONISTS 0 1 1 0 +REVOLUTIONIST 0 1 1 0 +REVOLTE 0 1 1 0 +REVOLTA 0 1 0 1 +REVOLT 0 1 0 1 +REVOLITIONIST 0 1 0 1 +REVEREND 14 1 15 14 +REUBEN 0 1 0 1 +RETZCH'S 0 1 1 0 +RETURNING 6 1 6 7 +RESUMED 1 1 2 1 +RESTORETH 0 1 1 0 +RESTORE 0 1 0 1 +RESTIVE 0 1 1 0 +RESPONSIBLE 4 1 5 4 +RESOLVED 0 1 1 0 +RESK 0 1 1 0 +RESISTING 1 1 2 1 +RESIST 3 1 3 4 +RESINOUS 0 1 1 0 +RESCUED 2 1 3 2 +RESCUE 1 1 1 2 +REQUIRRE 0 1 0 1 +REQUIRE 3 1 4 3 +REQUEST 3 1 4 3 +REPUTATION 2 1 3 2 +REPRESENTATIVE 1 1 2 1 +REPORTING 0 1 0 1 +REPLY 1 1 2 1 +REPLIED 39 1 39 40 +REPETITION 1 1 1 2 +REPEATED 5 1 6 5 +REPEAT 1 1 1 2 +REPARATE 0 1 0 1 +REND 0 1 1 0 +REMISSIONOUS 0 1 0 1 +REMISSION 0 1 1 0 +REMEMBEREST 0 1 1 0 +REMEMBER 13 1 13 14 +REMAR 0 1 0 1 +REMAINS 2 1 2 3 +REMAIN 4 1 5 4 +RELISSE 0 1 0 1 +RELIGIOUS 2 1 2 3 +RELIGHTED 0 1 0 1 +RELEASED 2 1 3 2 +RELATOR'S 0 1 0 1 +RELATION 2 1 2 3 +RELATED 3 1 4 3 +REJECTED 0 1 0 1 +REJECT 0 1 1 0 +REIN 0 1 0 1 +REIGNS 0 1 1 0 +REIGNED 0 1 1 0 +REIGN 1 1 1 2 +REGULATION 0 1 1 0 +REGULATING 0 1 0 1 +REGULAR 1 1 1 2 +REGARDING 2 1 3 2 +REGAN 0 1 0 1 +REG'LER 0 1 1 0 +REFUSE 2 1 2 3 +REFORM 5 1 6 5 +REFLECTING 0 1 0 1 +REELECTION 0 1 1 0 +REDRUP 0 1 0 1 +REDMOCKERS 0 1 0 1 +RED 9 1 10 9 +RECTUM 0 1 1 0 +RECTIM 0 1 0 1 +RECTIFY 0 1 1 0 +RECKLESS 2 1 2 3 +RECEDED 0 1 1 0 +RECAPTURED 0 1 1 0 +REAP 0 1 1 0 +REALIZED 0 1 0 1 +REALISED 0 1 1 0 +READS 0 1 0 1 +RAYS 0 1 0 1 +RAW 0 1 0 1 +RATTLING 2 1 3 2 +RATTERAS 0 1 0 1 +RATE 2 1 2 3 +RATCALIONS 0 1 0 1 +RASCALS 0 1 1 0 +RASCAL 3 1 3 4 +RAPSCALLIONS 0 1 1 0 +RAPPERS 0 1 1 0 +RAOUL 0 1 0 1 +RANSOM 7 1 8 7 +RANSES 0 1 0 1 +RANGED 0 1 0 1 +RANG 2 1 2 3 +RANDOM 0 1 0 1 +RANALD 0 1 0 1 +RAMSES 0 1 1 0 +RAM 3 1 4 3 +RAINY 1 1 2 1 +RAINING 0 1 0 1 +RAINED 0 1 0 1 +RAIN 2 1 3 2 +RAID 0 1 1 0 +RAGGED 0 1 0 1 +RAGATIRS 0 1 0 1 +RADPROP 0 1 1 0 +RADIAN 0 1 0 1 +RACKETEERS 0 1 1 0 +RACKED 0 1 1 0 +RACES 0 1 1 0 +RABBS 0 1 0 1 +RABBITS 0 1 1 0 +RABBIT'S 0 1 0 1 +RABB'S 0 1 1 0 +QUOTH 4 1 5 4 +QUOMAN 0 1 0 1 +QUMMUKH 0 1 1 0 +QUITE 15 1 16 15 +QUICKLY 5 1 5 6 +QUICK 4 1 5 4 +QUESTED 0 1 0 1 +QUANTRELL 0 1 1 0 +QUANTREL 0 1 0 1 +QUANTITIES 1 1 2 1 +QUALMS 0 1 0 1 +QUALITIES 1 1 1 2 +QUACKS 1 1 2 1 +PYRIUS 0 1 0 1 +PYM 0 1 1 0 +PUTTING 0 1 1 0 +PUTTEL 0 1 1 0 +PUTS 2 1 3 2 +PUSSARA 0 1 0 1 +PUSH 1 1 1 2 +PURPORTING 0 1 1 0 +PUMPED 0 1 0 1 +PUMP 0 1 1 0 +PULLEY 0 1 1 0 +PULLED 3 1 4 3 +PULL 1 1 1 2 +PULCHEVIKI 0 1 0 1 +PUDDLES 0 1 1 0 +PSALMS 0 1 0 1 +PSALM 0 1 1 0 +PRYTANEUM 0 1 1 0 +PRUDENT 0 1 1 0 +PROW 1 1 1 2 +PROVOCATOR 0 1 1 0 +PROVISED 0 1 0 1 +PROVIDES 0 1 0 1 +PROVERB 0 1 0 1 +PROVENCE 0 1 0 1 +PROVED 2 1 2 3 +PROVE 5 1 5 6 +PROTS 0 1 0 1 +PROTECTORATE 0 1 1 0 +PROTECTOR 1 1 1 2 +PROPRE 0 1 1 0 +PROPER 2 1 2 3 +PRONOUNCE 1 1 1 2 +PROMOTIVE 0 1 1 0 +PROMOTED 0 1 0 1 +PROMIN 0 1 0 1 +PROHIBITION 0 1 1 0 +PROFESSIONS 0 1 0 1 +PROFESSION 3 1 4 3 +PRODUCES 0 1 1 0 +PROCLAUS 0 1 0 1 +PROCATE 0 1 0 1 +PROBES 0 1 0 1 +PRISONED 0 1 0 1 +PRISON 4 1 5 4 +PRIOR 0 1 0 1 +PRINCIPLES 0 1 0 1 +PRINCIPALS 0 1 1 0 +PRIMER 0 1 1 0 +PRIME 3 1 3 4 +PRIMARY 0 1 0 1 +PRICKED 0 1 0 1 +PRICE 2 1 2 3 +PRESTIGE 1 1 2 1 +PRESERVE 1 1 2 1 +PRESENTERS 0 1 0 1 +PRESAGE 0 1 0 1 +PRENTICESHIP 0 1 1 0 +PRENTICE 0 1 0 1 +PRECEPTORS 1 1 2 1 +PRECENTORS 0 1 1 0 +PREACHED 0 1 1 0 +PRAYERS 2 1 3 2 +PRAYER 6 1 6 7 +PRAY 5 1 5 6 +PRAM 0 1 1 0 +PRAISEWORTHY 0 1 1 0 +PRACTITIONER 0 1 1 0 +POUCHES 0 1 1 0 +POTUM 0 1 1 0 +POTTLES 0 1 0 1 +POTION 0 1 1 0 +POTENT 0 1 0 1 +POTASSIUM 0 1 1 0 +POTASSIAN 0 1 0 1 +POT 0 1 0 1 +POSTS 0 1 0 1 +POSTHASTE 0 1 1 0 +POSTERN 0 1 1 0 +POSITION 6 1 7 6 +PORTENTOUS 0 1 1 0 +PORT 1 1 1 2 +PORED 0 1 1 0 +POPULOUS 0 1 0 1 +POPULACE 0 1 1 0 +POMROY 0 1 0 1 +POMEROY 0 1 1 0 +POLYTECHNIC 0 1 1 0 +POLONIUS 0 1 1 0 +POLO 0 1 1 0 +POISPIED 0 1 0 1 +POINT 10 1 10 11 +POEM 0 1 0 1 +POCKET 8 1 8 9 +PLUMB 0 1 1 0 +PLUM 0 1 0 1 +PLOT 1 1 1 2 +PLEASURE 8 1 8 9 +PLEASE 15 1 15 16 +PLEASANT 8 1 9 8 +PLEAS 0 1 1 0 +PLEADED 2 1 3 2 +PLAYING 0 1 1 0 +PLAYED 5 1 5 6 +PLAY 4 1 4 5 +PLATTERBUFF'S 0 1 0 1 +PLATTERBUFF 0 1 0 1 +PLATTERBAFF'S 0 1 1 0 +PLATES 0 1 0 1 +PLATED 0 1 1 0 +PLANET 1 1 2 1 +PLANE 0 1 0 1 +PLAN 1 1 1 2 +PLACES 3 1 3 4 +PLACED 10 1 10 11 +PIUCHES 0 1 0 1 +PITTS 1 1 2 1 +PITTHAM 0 1 0 1 +PITHUM 0 1 1 0 +PITCHER 1 1 1 2 +PITCHED 0 1 0 1 +PISTOLES 0 1 1 0 +PISTOL 5 1 5 6 +PIRATES 0 1 0 1 +PIRASS 0 1 0 1 +PIPES 1 1 1 2 +PIPE 3 1 4 3 +PINKUS 0 1 1 0 +PINKERTON'S 0 1 1 0 +PINKERTIN'S 0 1 0 1 +PINIONS 0 1 0 1 +PIKES 1 1 2 1 +PIGSKIN 0 1 1 0 +PIGEONOTE 0 1 0 1 +PIGEONCOTES 0 1 1 0 +PIERRE 0 1 0 1 +PIERO 0 1 1 0 +PICTURE 4 1 5 4 +PICKED 2 1 3 2 +PICK 5 1 5 6 +PHYNICAL 0 1 0 1 +PHUT'S 0 1 1 0 +PHOSPHOBS 0 1 0 1 +PHOSPHER 0 1 0 1 +PHILISTINES 1 1 2 1 +PHILIPPUS 2 1 3 2 +PHAETON 0 1 0 1 +PETREL 0 1 1 0 +PETITIONERS 0 1 0 1 +PETER'S 1 1 2 1 +PETEL 0 1 0 1 +PESTS 0 1 0 1 +PESTE 0 1 1 0 +PERSPIRED 0 1 1 0 +PERNOUNCE 0 1 1 0 +PERHAPS 17 1 17 18 +PERFELASHES 0 1 0 1 +PERFECTUALLY 0 1 0 1 +PERFECTLY 4 1 5 4 +PERCY 0 1 0 1 +PERCHED 1 1 1 2 +PERCEPTORS 0 1 0 1 +PERCEIVER 0 1 0 1 +PERAMBULATOR'S 0 1 1 0 +PEONAGE 0 1 1 0 +PENNYERS 0 1 0 1 +PENDING 0 1 1 0 +PENCIL 1 1 1 2 +PEM 0 1 0 1 +PELLESTRA 0 1 0 1 +PEKAHIAH 0 1 1 0 +PEGGING 0 1 0 1 +PEER 2 1 3 2 +PECAH 0 1 0 1 +PEASE 0 1 0 1 +PEASANTS 5 1 6 5 +PEAS 1 1 2 1 +PEARL 0 1 1 0 +PEACH 0 1 0 1 +PEACE 8 1 8 9 +PAY 10 1 11 10 +PAWNBROKER 1 1 2 1 +PAUSES 0 1 0 1 +PAUSED 2 1 3 2 +PAUL 0 1 0 1 +PATUM 0 1 0 1 +PATTERN 0 1 0 1 +PATIENCE 2 1 3 2 +PATH 1 1 1 2 +PATENTS 0 1 0 1 +PASTES 0 1 1 0 +PAST 9 1 10 9 +PASSING 3 1 3 4 +PASSES 0 1 1 0 +PARTLY 1 1 2 1 +PARTISER 0 1 0 1 +PARTINGS 0 1 1 0 +PARTING 2 1 2 3 +PARTIALLY 0 1 0 1 +PARRICIDES 0 1 1 0 +PARKS 0 1 1 0 +PARKLEY 0 1 0 1 +PARISH 1 1 1 2 +PARENT 0 1 0 1 +PARDON 9 1 10 9 +PARASITES 0 1 0 1 +PARASAN 0 1 0 1 +PAR 0 1 0 1 +PANNIERS 0 1 1 0 +PAMPAINTER 0 1 0 1 +PALLIATE 0 1 1 0 +PALL 0 1 1 0 +PALE 8 1 8 9 +PALAESTRA 0 1 1 0 +PALACE 6 1 6 7 +PAIR 5 1 6 5 +PAINTER 0 1 1 0 +PADDLING 0 1 1 0 +PADDLIN 0 1 0 1 +PAD 0 1 0 1 +PACES 1 1 2 1 +PABRICAL 0 1 0 1 +P 1 1 2 1 +OZ 0 1 0 1 +OWNERS 1 1 2 1 +OWNED 2 1 2 3 +OWING 1 1 1 2 +OWE 1 1 2 1 +OW'M 0 1 1 0 +OW 0 1 1 0 +OVERWHELMING 0 1 0 1 +OVERRIPENESS 0 1 1 0 +OVERPRUDENT 0 1 0 1 +OVERHEARD 2 1 3 2 +OVERFULL 0 1 1 0 +OVERFLOWING 0 1 1 0 +OVERCLONE 0 1 0 1 +OUTWARDS 0 1 0 1 +OUTS 0 1 0 1 +OUTLINES 0 1 1 0 +OUTLINE 1 1 1 2 +OUTGAZE 0 1 0 1 +OUTER'S 0 1 0 1 +OUTER 1 1 2 1 +OUTDREW 0 1 0 1 +OURSPORT 0 1 0 1 +OURSAN 0 1 0 1 +OUNCES 2 1 3 2 +OUGHTN'T 0 1 1 0 +OUEN 0 1 1 0 +OTIAN 0 1 0 1 +OTHERS 17 1 17 18 +OTHER'S 1 1 2 1 +ORIENTOUINE 0 1 0 1 +ORIENTAL 0 1 1 0 +ORFICER 0 1 1 0 +ORFASTER 0 1 0 1 +ORE 0 1 0 1 +ORDERS 8 1 8 9 +ORDER 19 1 20 19 +ORBUS 0 1 0 1 +ORBIS 0 1 1 0 +OPPRESSORS 0 1 1 0 +OPPOSITION 2 1 2 3 +OPINION 3 1 3 4 +OPENED 10 1 11 10 +OPE 0 1 1 0 +ON'T 0 1 1 0 +OME 0 1 1 0 +OLY 0 1 0 1 +OLL 0 1 1 0 +OLIV 0 1 0 1 +OLIO 0 1 0 1 +OLDEST 1 1 1 2 +OKAY 0 1 1 0 +OITERNAL 0 1 0 1 +OIL 2 1 3 2 +OIKA 0 1 0 1 +OFFICIALS 0 1 0 1 +OFFICES 0 1 1 0 +OFFICER 2 1 3 2 +OFFENSE 0 1 1 0 +OFFENCE 2 1 2 3 +OFFEN 0 1 1 0 +ODO 0 1 0 1 +ODD 2 1 3 2 +OCCUR'S 0 1 0 1 +OCCUPIED 2 1 2 3 +OBSERVED 4 1 5 4 +OBOCOCK 0 1 1 0 +OBLIGE 1 1 1 2 +OBJECT 6 1 7 6 +OBEY 1 1 1 2 +O'NIGHTS 0 1 1 0 +O'NEILL 0 1 1 0 +O'NEIA 0 1 0 1 +NYTOUCH 0 1 1 0 +NUZHA'S 0 1 0 1 +NUTS 0 1 1 0 +NUNS 2 1 3 2 +NUN'S 0 1 0 1 +NUMAN 0 1 0 1 +NUISANCE 0 1 1 0 +NU'UMAN 0 1 1 0 +NOWT 0 1 1 0 +NOTTINGHAM 0 1 1 0 +NOTIONS 0 1 0 1 +NOTICED 3 1 3 4 +NOTICE 6 1 7 6 +NOTE 3 1 3 4 +NORTHEAST 0 1 0 1 +NOPE 0 1 1 0 +NONETHELESS 0 1 1 0 +NON 0 1 0 1 +NOISE 5 1 5 6 +NOIR 0 1 0 1 +NOBLY 0 1 0 1 +NINE 6 1 6 7 +NIGO 0 1 0 1 +NIGHTLY 0 1 0 1 +NIGH 1 1 1 2 +NICO 0 1 1 0 +NICKEL 0 1 1 0 +NEXTER 0 1 1 0 +NEXT 10 1 10 11 +NEWS 7 1 7 8 +NEWER 0 1 0 1 +NERVES 1 1 1 2 +NERVE 0 1 1 0 +NEOSHO 0 1 1 0 +NEOSH 0 1 0 1 +NELLO 0 1 0 1 +NEKHLUD 0 1 0 1 +NEIGHBOURS 1 1 1 2 +NEIGHBOURING 0 1 1 0 +NEIGHBORING 2 1 2 3 +NEEDS 8 1 8 9 +NEEDLED 0 1 1 0 +NEEDLE 0 1 0 1 +NEEDED 9 1 9 10 +NEED 8 1 9 8 +NEDCOV 0 1 0 1 +NEAT 3 1 3 4 +NEARING 0 1 1 0 +NEARER 3 1 4 3 +NEARED 0 1 1 0 +NAUGHTY 0 1 0 1 +NAUGHT 0 1 1 0 +NATION 2 1 2 3 +NARRATIVES 1 1 2 1 +NARRATIVE 1 1 1 2 +NANDY'S 0 1 1 0 +NABRAMAN 0 1 0 1 +NABBS 0 1 0 1 +N 0 1 1 0 +MYSTERY 0 1 0 1 +MYSTERIOUS 5 1 6 5 +MYRTOLIS 0 1 0 1 +MUSTACHES 0 1 1 0 +MUSKETS 0 1 1 0 +MUSICIANS 0 1 1 0 +MURDOCH'S 0 1 1 0 +MULES 0 1 0 1 +MUIR 0 1 1 0 +MUG 0 1 1 0 +MUD 0 1 1 0 +MOWER 0 1 1 0 +MOVES 0 1 0 1 +MOVEMENT 1 1 2 1 +MOUTHWHAT 0 1 1 0 +MOUTHS 0 1 1 0 +MOUTHFULLY 0 1 0 1 +MOUSTACHES 0 1 0 1 +MOUSE 1 1 1 2 +MOURNING 2 1 3 2 +MOUNTNORRIS 0 1 1 0 +MOUNT 0 1 1 0 +MOUND 0 1 0 1 +MOTOR 1 1 1 2 +MOTIONLESS 0 1 1 0 +MOTHS 0 1 0 1 +MOTHER 51 1 52 51 +MORVE 0 1 0 1 +MORTIFICATIONTHAT 0 1 1 0 +MORTIFICATION 0 1 0 1 +MORTEM 0 1 1 0 +MORNING 21 1 21 22 +MORLEY 0 1 0 1 +MOPED 0 1 1 0 +MOOR 2 1 3 2 +MOONLIGHT 2 1 3 2 +MONTORAS 0 1 0 1 +MONTHS 6 1 6 7 +MONTHLY 0 1 1 0 +MONTH'S 0 1 0 1 +MONSIEUR 0 1 0 1 +MONSHAT 0 1 0 1 +MONSEIGNEUR 5 1 6 5 +MONKERS 0 1 1 0 +MOMMOL 0 1 1 0 +MOMENT 24 1 24 25 +MOLASTIC 0 1 0 1 +MODE 0 1 0 1 +MO 0 1 1 0 +MIXTURE 0 1 1 0 +MIXED 2 1 2 3 +MIX 2 1 3 2 +MISTS 0 1 0 1 +MISTING 0 1 0 1 +MISTIENNE 0 1 0 1 +MISTAKES 0 1 0 1 +MISSION 0 1 0 1 +MISSED 4 1 5 4 +MISERC 0 1 0 1 +MISCHIEVOUS 1 1 2 1 +MIRKED 0 1 0 1 +MINUTELY 0 1 1 0 +MINNIE 1 1 2 1 +MINNEAPOLIS 0 1 1 0 +MINISTERS 0 1 1 0 +MINISTER 6 1 7 6 +MINIONETTE 0 1 1 0 +MIMICK 0 1 1 0 +MIMIC 0 1 0 1 +MILLY 0 1 1 0 +MILLSTONE 0 1 0 1 +MILLSTON 0 1 1 0 +MILLSON 0 1 0 1 +MILLION 0 1 0 1 +MILICENT'S 0 1 1 0 +MILFRED 0 1 0 1 +MILES 6 1 6 7 +MILE 4 1 5 4 +MILDEWED 0 1 1 0 +MIKE 2 1 2 3 +MIHI 0 1 1 0 +MIDSER 0 1 0 1 +MIDRIFTS 0 1 0 1 +MIDRIFF 0 1 1 0 +MIDDY 0 1 1 0 +MIDDLING 0 1 1 0 +MIDDLIN 0 1 0 1 +MICROCLE 0 1 0 1 +MICHANG 0 1 0 1 +METHINKETH 0 1 1 0 +METHINK 0 1 0 1 +MESSION 0 1 0 1 +MESSIAN 0 1 0 1 +MESSES 0 1 1 0 +MESS' 0 1 0 1 +MESS 0 1 0 1 +MERTOLUS 0 1 0 1 +MERRYMAKING 0 1 0 1 +MERRY 2 1 3 2 +MERNEPTAH 0 1 1 0 +MERLINUS 0 1 0 1 +MERLINA'S 0 1 0 1 +MERELY 6 1 6 7 +MERDOCK 0 1 0 1 +MER 0 1 0 1 +MENTAL 2 1 3 2 +MENDICULATED 0 1 0 1 +MENDIAN 0 1 0 1 +MEND 0 1 1 0 +MEN'S 0 1 1 0 +MELTED 1 1 1 2 +MEDIUM 1 1 1 2 +MEDICMENTS 0 1 0 1 +MEDICAMENTS 0 1 1 0 +MEDICAL 1 1 2 1 +MEDICA 0 1 0 1 +MEDIAN 0 1 1 0 +MEASIANS 0 1 0 1 +MEANTIS 0 1 0 1 +MEANTESSE 0 1 0 1 +MEANESTER 0 1 0 1 +MAZAR 0 1 0 1 +MAYN'T 0 1 0 1 +MATURITY 0 1 0 1 +MATURES 0 1 0 1 +MATTERS 3 1 4 3 +MATRON 0 1 0 1 +MATEY 0 1 1 0 +MATERIALS 1 1 2 1 +MASSR 0 1 0 1 +MASSES 1 1 1 2 +MASSED 0 1 0 1 +MASKETS 0 1 0 1 +MASKED 0 1 1 0 +MARVELLED 0 1 0 1 +MARVELED 0 1 1 0 +MARVALAN 0 1 0 1 +MARTILLUS 0 1 0 1 +MARTIAL 0 1 0 1 +MARSPEAKER 0 1 1 0 +MARSH 1 1 1 2 +MARMALADES 1 1 2 1 +MARMAL 0 1 0 1 +MARK 4 1 4 5 +MARJORIE 2 1 3 2 +MARGERY 0 1 0 1 +MARGAR 0 1 0 1 +MARES 0 1 0 1 +MARDOX 0 1 0 1 +MARDOC'S 0 1 0 1 +MANTLE 0 1 0 1 +MANKE 0 1 0 1 +MANKADO 0 1 0 1 +MANETTE 0 1 0 1 +MANASSEH 0 1 1 0 +MAN'S 13 1 13 14 +MAMMA 1 1 1 2 +MAMIE 0 1 1 0 +MALTRICIAN 0 1 0 1 +MALNUTRITION 0 1 1 0 +MALE 0 1 0 1 +MAKING 17 1 18 17 +MAKES 10 1 11 10 +MAJORITY 5 1 6 5 +MAINE 0 1 1 0 +MAIN 1 1 1 2 +MAIL 0 1 1 0 +MAID 2 1 3 2 +MADGE 0 1 0 1 +MAD 1 1 2 1 +MACKING 0 1 0 1 +MACHIEVOUS 0 1 0 1 +MACAN 0 1 0 1 +MAC 0 1 0 1 +MABILLON 0 1 1 0 +MA 0 1 0 1 +LYSIMACHUS 0 1 1 0 +LYING 2 1 2 3 +LUVIDICUS 0 1 0 1 +LUNDY'S 0 1 0 1 +LUNAR'S 0 1 0 1 +LUKE 3 1 4 3 +LUGGAGE 0 1 1 0 +LUCK 2 1 3 2 +LUCID 0 1 1 0 +LOYAL 2 1 2 3 +LOWER 2 1 3 2 +LOWBOURNE 0 1 0 1 +LOWBORN 0 1 0 1 +LOUIS 3 1 3 4 +LOUD 7 1 8 7 +LOT 6 1 6 7 +LOST 11 1 12 11 +LOSSES 0 1 1 0 +LOSS 3 1 3 4 +LOSES 0 1 0 1 +LORDS 2 1 2 3 +LORD'S 0 1 1 0 +LOQUACITY 0 1 1 0 +LOOKOUT 0 1 1 0 +LONGER 15 1 16 15 +LONESOMENESS 0 1 1 0 +LONESOME 1 1 1 2 +LONE 0 1 1 0 +LOKIE 0 1 0 1 +LOCTICE 0 1 0 1 +LOBSTERBOAT 0 1 0 1 +LOBS 0 1 0 1 +LIZZLY 0 1 0 1 +LIZZLING 0 1 0 1 +LIZABETH 0 1 1 0 +LIVES 5 1 6 5 +LIT 2 1 2 3 +LISTENED 4 1 4 5 +LISTEN 10 1 11 10 +LISSMACHUS 0 1 0 1 +LISLEY 0 1 0 1 +LISALLY 0 1 0 1 +LINLESILY 0 1 0 1 +LINENSHIPS 0 1 0 1 +LINEN 2 1 2 3 +LINA 0 1 0 1 +LIMPED 3 1 4 3 +LIMP 3 1 3 4 +LIME 0 1 1 0 +LILY 0 1 0 1 +LIKELY 2 1 3 2 +LIGHTS 1 1 1 2 +LIGHTFOOTED 0 1 0 1 +LIGHTED 0 1 0 1 +LIGHT 18 1 19 18 +LIFE 54 1 55 54 +LIAISON 0 1 1 0 +LEXINGTON 0 1 1 0 +LEWIS 0 1 1 0 +LEVITICUS 0 1 1 0 +LEVILY 0 1 0 1 +LETTER 22 1 22 23 +LEST 3 1 3 4 +LESSON 1 1 2 1 +LESSINGTON 0 1 0 1 +LEQUESTITY 0 1 0 1 +LENS 0 1 0 1 +LENOIR 0 1 1 0 +LENIN 1 1 2 1 +LEND 1 1 2 1 +LENA 0 1 0 1 +LEMON 0 1 1 0 +LEISURESHIP 0 1 0 1 +LEISURE 3 1 3 4 +LEGS 2 1 3 2 +LEGGED 0 1 0 1 +LEGALLY 1 1 1 2 +LEER 0 1 1 0 +LEECHES 0 1 1 0 +LEECH 1 1 1 2 +LEE'S 0 1 1 0 +LEE 0 1 0 1 +LEDA 0 1 0 1 +LEAVY 0 1 0 1 +LEAVED 1 1 2 1 +LEARNED 6 1 6 7 +LEARN 4 1 5 4 +LEAR 0 1 0 1 +LEAPT 0 1 1 0 +LEAPED 1 1 1 2 +LEAP 1 1 1 2 +LEADPENCIL 0 1 1 0 +LEADERSHIP 0 1 1 0 +LEADERS 1 1 2 1 +LEADER'S 0 1 0 1 +LEAD 2 1 2 3 +LE 0 1 0 1 +LAYS 0 1 0 1 +LAWS 2 1 3 2 +LAURDALE 0 1 0 1 +LAURAIL 0 1 0 1 +LAUGH 9 1 9 10 +LATH 0 1 1 0 +LATER 8 1 8 9 +LASTIS 0 1 0 1 +LASH 1 1 2 1 +LARD 0 1 0 1 +LANY 0 1 0 1 +LANE 2 1 2 3 +LANDY 0 1 0 1 +LANDOWNERS 1 1 1 2 +LANDI 0 1 1 0 +LAMPS 0 1 0 1 +LAMBS 0 1 1 0 +LAIN 1 1 2 1 +LAG 0 1 0 1 +LADS 1 1 2 1 +LADIES 6 1 6 7 +LACK 2 1 2 3 +LACHRYMA 0 1 1 0 +LABOURING 0 1 0 1 +LABOURERS 0 1 1 0 +LABOUR 0 1 1 0 +LABORS 0 1 0 1 +LABORING 1 1 2 1 +LABORERS 0 1 0 1 +LABOR 1 1 1 2 +KUMAK 0 1 0 1 +KOY 0 1 0 1 +KNOWSAT 0 1 0 1 +KNOTTY 0 1 1 0 +KNOTS 0 1 0 1 +KNOT 2 1 3 2 +KNOBBLY 0 1 1 0 +KNIGHT'S 0 1 1 0 +KNIFE'S 0 1 0 1 +KLEPTOMANIAC 0 1 1 0 +KLEPTOMANIA 0 1 1 0 +KITTY 3 1 4 3 +KITRAL 0 1 0 1 +KIT 0 1 0 1 +KINDRED 1 1 1 2 +KINDNESS 7 1 7 8 +KINDER 1 1 2 1 +KIN 0 1 0 1 +KILLS 0 1 1 0 +KILL 14 1 14 15 +KIDDAM 0 1 0 1 +KID 1 1 2 1 +KICKIE 0 1 0 1 +KICK 0 1 1 0 +KEY 4 1 5 4 +KETTLE 0 1 1 0 +KERSTALL 0 1 1 0 +KENNITES 0 1 0 1 +KENITES 1 1 2 1 +KEI 0 1 0 1 +KEEN 1 1 2 1 +KEDEM 0 1 1 0 +KATY 0 1 0 1 +KAMAR 0 1 1 0 +KAL 0 1 0 1 +JUSTIFIED 1 1 2 1 +JURISDICTION 0 1 1 0 +JURIS 0 1 0 1 +JUNIOR 0 1 1 0 +JUNE'S 0 1 0 1 +JUMPS 1 1 2 1 +JUMPED 1 1 1 2 +JULIE 0 1 0 1 +JULIA 0 1 0 1 +JUDICINES 0 1 0 1 +JUDAH 1 1 2 1 +JUBAND'S 0 1 0 1 +JOUVIN'S 0 1 1 0 +JOSTLING 0 1 0 1 +JOSHUA 0 1 1 0 +JOSE 0 1 0 1 +JOKINGLY 0 1 1 0 +JOINTMENT 0 1 0 1 +JOINING 0 1 1 0 +JOINED 1 1 2 1 +JOIN 5 1 5 6 +JOHNSHAW 0 1 0 1 +JOHN 9 1 9 10 +JOE 0 1 0 1 +JOCELYN'S 0 1 1 0 +JO 0 1 0 1 +JIS 0 1 1 0 +JIM 4 1 5 4 +JILT 0 1 1 0 +JEW 1 1 1 2 +JETS 0 1 0 1 +JESTS 0 1 0 1 +JESTER 1 1 2 1 +JERRY 1 1 1 2 +JERED 0 1 0 1 +JEMOSIS 0 1 0 1 +JEHU 0 1 1 0 +JEHOV 0 1 0 1 +JEHOASH 0 1 1 0 +JEERED 0 1 1 0 +JEDGE 0 1 1 0 +JEALOUS 0 1 0 1 +JARS 0 1 0 1 +JARNDYCE 0 1 0 1 +JAPHANE 0 1 0 1 +JAPANE 0 1 0 1 +JANSENIST 0 1 1 0 +JANEERO 0 1 1 0 +JANE 6 1 7 6 +JAMES'S 0 1 0 1 +JAKEY'S 0 1 1 0 +JACO 0 1 0 1 +JACKO 0 1 0 1 +JACKMAN 0 1 1 0 +J 2 1 2 3 +IZZY'S 0 1 1 0 +IZZIE'S 0 1 0 1 +ITALIANS 0 1 0 1 +IT'LL 1 1 1 2 +ISRAIT 0 1 0 1 +ISRAELITES 0 1 1 0 +ISN'T 2 1 2 3 +ISLAMMISED 0 1 0 1 +ISLAMISED 0 1 1 0 +ISLAM 0 1 1 0 +ISIS 0 1 0 1 +ISAU 0 1 0 1 +ISAAC 1 1 2 1 +IRRESPONSIBLE 0 1 0 1 +IRRES 0 1 0 1 +IRONICAL 0 1 1 0 +IRONIC 0 1 0 1 +IPES 0 1 0 1 +IOWA 1 1 2 1 +IOPIUS 0 1 0 1 +INVITED 1 1 1 2 +INVALIDES 0 1 1 0 +INVALIDE 0 1 0 1 +INVADE 0 1 1 0 +INTOLLIUM 0 1 0 1 +INTERPLIES 0 1 0 1 +INTER 0 1 0 1 +INTENSE 3 1 3 4 +INTEND 1 1 2 1 +INTELLECTUALLY 0 1 1 0 +INTELLECTUAL 0 1 0 1 +INSTRUCTIVE 0 1 0 1 +INSTRUCTED 0 1 1 0 +INSTANTLY 0 1 1 0 +INSPECTOR 0 1 0 1 +INSISTENCE 0 1 1 0 +INSISTANTS 0 1 0 1 +INSIDE 4 1 4 5 +INSENT 0 1 0 1 +INSECTORS 0 1 0 1 +INSCRIPTIONS 1 1 2 1 +INQUIRE 1 1 2 1 +INNES 0 1 1 0 +INJURE 2 1 3 2 +INGENUOUSLY 0 1 0 1 +INGENIOUSLY 0 1 1 0 +INFECTED 0 1 0 1 +INFAMOUS 0 1 1 0 +INDURE 0 1 0 1 +INDEEDY 0 1 0 1 +INDEED 13 1 14 13 +INDE 0 1 0 1 +INCOUPS 0 1 0 1 +INCOMPARABLE 0 1 1 0 +INCLINE 1 1 1 2 +IMPROVISED 1 1 2 1 +IMPROVED 0 1 1 0 +IMPRESSORS 0 1 0 1 +IMPLY 1 1 2 1 +IMAGINE 6 1 6 7 +ILLS 0 1 1 0 +ILLIS 0 1 0 1 +ILL 5 1 5 6 +ILIU 0 1 0 1 +IKOLOITS 0 1 0 1 +IISES 0 1 0 1 +IDEA 10 1 11 10 +ICE 0 1 1 0 +I'FAITH 0 1 1 0 +HYCOMICAL 0 1 0 1 +HUSBATH 0 1 0 1 +HUNTINGDON 4 1 5 4 +HUNTERS 0 1 1 0 +HUNGRY 1 1 2 1 +HUNGER 2 1 2 3 +HUMOURS 0 1 1 0 +HUMOUR 1 1 1 2 +HUMOR 0 1 1 0 +HUMANS 0 1 0 1 +HUH 0 1 1 0 +HUDSPETH 0 1 1 0 +HOUSEHOLTS 0 1 0 1 +HOUSEHOLD 0 1 1 0 +HOURSERVES 0 1 0 1 +HOURS 12 1 12 13 +HOUNDED 0 1 1 0 +HOSPITABLY 0 1 1 0 +HORDE 0 1 1 0 +HOPPING 0 1 1 0 +HOPES 0 1 0 1 +HOOKER'S 0 1 0 1 +HOO'LL 0 1 1 0 +HOO 0 1 1 0 +HONESTLY 0 1 1 0 +HONEST 5 1 6 5 +HONDYNON 0 1 0 1 +HOMEPUSH 0 1 1 0 +HOLY 5 1 6 5 +HOLLERED 0 1 0 1 +HOLLER 0 1 1 0 +HOLLAND 0 1 0 1 +HOF 0 1 1 0 +HOARD 0 1 0 1 +HIRED 0 1 0 1 +HIRAKEE 0 1 0 1 +HINFIELD 0 1 0 1 +HINDFELL 0 1 1 0 +HIMSELF 51 1 52 51 +HIJAZ 0 1 1 0 +HIGHS 0 1 1 0 +HIGH 7 1 8 7 +HIES 0 1 0 1 +HIERARCHY 0 1 1 0 +HIDE 2 1 2 3 +HIDDEN 3 1 3 4 +HID 2 1 2 3 +HEYDAY 0 1 1 0 +HEY 1 1 1 2 +HEWN 0 1 1 0 +HESTERITY 0 1 0 1 +HESITATINGLY 0 1 0 1 +HESDAY 0 1 0 1 +HERSELF 35 1 35 36 +HERIOT'S 0 1 1 0 +HERIOT 0 1 1 0 +HERETT'S 0 1 0 1 +HERETOFORE 0 1 1 0 +HERDSMEN 0 1 1 0 +HERDSMAN 0 1 0 1 +HERBERT 0 1 0 1 +HERALD 0 1 0 1 +HEPTARK 0 1 0 1 +HEPTARCHIES 0 1 1 0 +HEN 0 1 1 0 +HELVIN 0 1 1 0 +HELVAN 0 1 0 1 +HELPS 0 1 0 1 +HELPED 2 1 3 2 +HELP 17 1 17 18 +HELM 1 1 2 1 +HEELED 0 1 1 0 +HEEDED 0 1 1 0 +HEDGES 0 1 1 0 +HEBREW 1 1 2 1 +HEAT 1 1 1 2 +HEARTY 2 1 3 2 +HEARTS 4 1 5 4 +HEARSE 3 1 4 3 +HEARET 0 1 0 1 +HEALTH 1 1 1 2 +HEALED 1 1 1 2 +HEADS 4 1 4 5 +HEADQUARTERS 1 1 2 1 +HEADQUARTER 0 1 0 1 +HEADLONG 0 1 1 0 +HEADLIGHTS 0 1 1 0 +HE'LL 2 1 2 3 +HAWHAT 0 1 0 1 +HAWED 0 1 1 0 +HAW 0 1 1 0 +HAVING 22 1 22 23 +HAVEN 0 1 1 0 +HAV 0 1 0 1 +HATTERSLEY 0 1 1 0 +HATCHES 0 1 0 1 +HASAN'S 0 1 0 1 +HASAN 1 1 2 1 +HARVEY'SWHICH 0 1 1 0 +HARVEST 3 1 3 4 +HARRIS'S 0 1 0 1 +HARRIS 1 1 2 1 +HAROLD 0 1 1 0 +HARMON'S 0 1 0 1 +HARKNESS 0 1 1 0 +HARKINS 0 1 0 1 +HARE 0 1 1 0 +HARDWARE 0 1 1 0 +HARDLY 9 1 10 9 +HAPPY 7 1 7 8 +HAPPILY 1 1 1 2 +HAPPENED 10 1 10 11 +HAPLY 0 1 1 0 +HANDY 0 1 0 1 +HANDLES 0 1 0 1 +HAM 0 1 0 1 +HALTS 0 1 1 0 +HALTERSLEY 0 1 0 1 +HALT 4 1 5 4 +HALEY'S 0 1 1 0 +HALELY 0 1 0 1 +HAIRY 0 1 0 1 +HAIN'T 1 1 1 2 +HAIL 1 1 1 2 +HADN'T 1 1 1 2 +HADDA 0 1 1 0 +GYLINGDEN 0 1 1 0 +GUY'S 0 1 0 1 +GUV'NER 0 1 0 1 +GURSER 0 1 0 1 +GUNS 2 1 3 2 +GUNDRAIN 0 1 0 1 +GUNDERING 0 1 0 1 +GULLET 0 1 1 0 +GULFS 0 1 0 1 +GUISE 0 1 1 0 +GUIRUN'S 0 1 1 0 +GUINEAS 0 1 0 1 +GUINEA 2 1 3 2 +GUILD 0 1 1 0 +GUESTS 2 1 2 3 +GUESTRA 0 1 0 1 +GUEST 3 1 4 3 +GUESS 4 1 5 4 +GUDRUN 0 1 1 0 +GUARDS 1 1 1 2 +GUARDING 0 1 0 1 +GRUMBLINGLY 0 1 1 0 +GRUFFLY 0 1 1 0 +GROWING 2 1 3 2 +GROW 1 1 1 2 +GROVE 0 1 1 0 +GROUND 6 1 6 7 +GROTTO 0 1 1 0 +GROAN 1 1 2 1 +GRIS 0 1 0 1 +GRINDING 0 1 0 1 +GRIMSBY 0 1 1 0 +GRIM'S 0 1 0 1 +GRIGGLY 0 1 0 1 +GRIBIER 0 1 1 0 +GRIBES 0 1 0 1 +GREW 4 1 4 5 +GREENTON 0 1 1 0 +GREENSON 0 1 0 1 +GREENBACKS 0 1 1 0 +GREEBS 0 1 0 1 +GRECIOUS 0 1 0 1 +GREAVES 0 1 1 0 +GRAVE 16 1 17 16 +GRATITUDE 4 1 5 4 +GRASPS 0 1 1 0 +GRASPED 1 1 1 2 +GRAPPLE 0 1 1 0 +GRANTEL 0 1 0 1 +GRANDAME 0 1 1 0 +GRAND 5 1 5 6 +GRAMMEN 0 1 0 1 +GRAMMATIUS 0 1 0 1 +GRAMMATEUS 0 1 1 0 +GRAMMAR 0 1 0 1 +GRAM 0 1 1 0 +GRAHAME 0 1 0 1 +GRACIOUS 2 1 3 2 +GRACIAN 0 1 0 1 +GOWN 0 1 1 0 +GOVERNMENTS 2 1 3 2 +GORE 0 1 0 1 +GORDON'S 2 1 2 3 +GORD 0 1 0 1 +GOODS 4 1 5 4 +GOLDS 0 1 0 1 +GOLDFISH 0 1 1 0 +GOLDEN 2 1 3 2 +GOES 6 1 7 6 +GODEBILLIOS 0 1 1 0 +GOBYS 0 1 0 1 +GOBY'S 0 1 0 1 +GOBIES 0 1 0 1 +GNAWING 0 1 1 0 +GLISPIN 1 1 2 1 +GLAY 0 1 0 1 +GLASS 9 1 9 10 +GLAD 4 1 5 4 +GIVIN 0 1 0 1 +GIVANOVITCH 0 1 0 1 +GIRTHING 0 1 1 0 +GIRTHED 0 1 1 0 +GIRT 0 1 0 1 +GIRLS 5 1 6 5 +GIRDS 0 1 1 0 +GIRDING 0 1 0 1 +GIRDED 0 1 0 1 +GIMERNETTE 0 1 0 1 +GILINGDEN 0 1 0 1 +GIGS 0 1 0 1 +GESTURE 1 1 1 2 +GES 0 1 0 1 +GERFATHER 0 1 0 1 +GER 0 1 0 1 +GEORGE'SWHICH 0 1 1 0 +GEORGE 2 1 2 3 +GENTLEST 0 1 0 1 +GENTLEMEN'S 0 1 1 0 +GENTLEMAN'S 0 1 0 1 +GENIOR 0 1 0 1 +GENERO 0 1 0 1 +GAZE 3 1 4 3 +GAUTHIER 0 1 1 0 +GAULS 0 1 1 0 +GATHIER 0 1 0 1 +GATHERED 8 1 8 9 +GASTED 0 1 0 1 +GARDENS 0 1 0 1 +GARDEN'S 0 1 1 0 +GAMMON 0 1 1 0 +GAMIN 0 1 0 1 +GAME 6 1 6 7 +GALLOP 0 1 0 1 +GALLATIN 0 1 1 0 +GALLANT 3 1 3 4 +GAINED 1 1 1 2 +G'YIRLS 0 1 1 0 +FUZZ 0 1 1 0 +FUZ 0 1 0 1 +FUVENT 0 1 0 1 +FUTURE 3 1 3 4 +FURZE 0 1 1 0 +FUR 1 1 1 2 +FUPS 0 1 0 1 +FUND 1 1 1 2 +FROZE 0 1 1 0 +FROSOIS 0 1 0 1 +FRONTIERS 1 1 2 1 +FRONTIER 0 1 0 1 +FROGS 2 1 2 3 +FROG'S 0 1 1 0 +FRO 1 1 2 1 +FRITTEN 0 1 0 1 +FRISTOW 0 1 0 1 +FRISTOE'S 0 1 1 0 +FRIGHTFUL 3 1 4 3 +FRIGHTENS 0 1 1 0 +FRIENDS 17 1 17 18 +FRIENDLY 4 1 4 5 +FRIEND 13 1 14 13 +FRIED 0 1 0 1 +FRIAR 0 1 1 0 +FRET 1 1 2 1 +FRENCHARD 0 1 0 1 +FRENCH 4 1 5 4 +FREEZE 0 1 0 1 +FREEWAY 0 1 1 0 +FREES 0 1 1 0 +FREEDOM 5 1 6 5 +FREDERI 0 1 0 1 +FRED 0 1 0 1 +FRANK 1 1 1 2 +FRANJAMIN 0 1 0 1 +FRANCOIS 0 1 1 0 +FRANCISCO 4 1 5 4 +FRANC 0 1 1 0 +FOURTEENTHAT'S 0 1 1 0 +FOURTEEN 3 1 3 4 +FOUNDER 0 1 0 1 +FOUNDED 1 1 2 1 +FOUND 37 1 37 38 +FOUGHT 1 1 2 1 +FOSSES 0 1 0 1 +FORTY 12 1 13 12 +FORTUNE 6 1 6 7 +FORTS 0 1 1 0 +FORTE 0 1 0 1 +FORMER 3 1 4 3 +FORMED 1 1 2 1 +FOREMOTHER 0 1 0 1 +FOREMAN 0 1 1 0 +FOREGATHERED 0 1 1 0 +FOREBOARDS 0 1 0 1 +FORE 0 1 0 1 +FORCEMENT 0 1 0 1 +FORCED 3 1 4 3 +FOOTED 0 1 1 0 +FOOD'S 0 1 0 1 +FOOD 3 1 4 3 +FONTREVAL 0 1 0 1 +FONTEVRAULT 0 1 1 0 +FOLLOWS 5 1 6 5 +FOLK 4 1 4 5 +FOLIEVED 0 1 0 1 +FOE 0 1 1 0 +FLUTTERING 1 1 2 1 +FLOWERBEDS 0 1 1 0 +FLOWER 1 1 1 2 +FLOW 0 1 1 0 +FLOSSY 0 1 1 0 +FLOSSIE 0 1 0 1 +FLORO'S 0 1 0 1 +FLORA'S 0 1 0 1 +FLOORBOARDS 0 1 1 0 +FLOOR 3 1 3 4 +FLIND 0 1 0 1 +FLIES 0 1 0 1 +FLEERED 0 1 1 0 +FLAVOUR 0 1 0 1 +FLAVORITE 0 1 0 1 +FLATTERER 0 1 1 0 +FLATTERED 0 1 1 0 +FLATHEADS 0 1 1 0 +FLAT 0 1 0 1 +FLASHLIGHT 0 1 1 0 +FLASH 2 1 2 3 +FLAREFF 0 1 0 1 +FLARED 0 1 0 1 +FLARE 0 1 0 1 +FLAP 0 1 0 1 +FLABRA 0 1 0 1 +FLABBERGASTED 0 1 1 0 +FIVE 20 1 20 21 +FITZ 0 1 0 1 +FISHED 1 1 2 1 +FIRSTTER 0 1 0 1 +FIRSTLY 0 1 0 1 +FIRSTER 0 1 1 0 +FIRST 54 1 54 55 +FIRS 0 1 0 1 +FINISHED 3 1 3 4 +FINICAL 0 1 1 0 +FINELY 1 1 2 1 +FINDING 7 1 8 7 +FINALLY 6 1 6 7 +FILTRATES 0 1 1 0 +FILTRATE 0 1 1 0 +FILLS 1 1 1 2 +FILIANTLY 0 1 0 1 +FIGURES 0 1 0 1 +FIGURED 0 1 0 1 +FIGURE'S 0 1 1 0 +FIGGER 0 1 1 0 +FIFTEENTH 0 1 1 0 +FIFTEEN 7 1 7 8 +FIELDS 2 1 2 3 +FIACRE 0 1 1 0 +FIACCHUS 0 1 0 1 +FERVENT 0 1 0 1 +FELT 18 1 19 18 +FEELS 1 1 2 1 +FEELING 10 1 10 11 +FEEL 12 1 13 12 +FEEDS 0 1 1 0 +FEED 0 1 1 0 +FEDERATE 0 1 0 1 +FEATS 0 1 0 1 +FEAT 0 1 0 1 +FAVOURABLE 0 1 0 1 +FAVORABLE 0 1 1 0 +FAVAN 0 1 0 1 +FAULTS 2 1 2 3 +FAULT 5 1 5 6 +FAUCES 0 1 1 0 +FATS 1 1 2 1 +FATHERS 1 1 1 2 +FATE 4 1 5 4 +FAT'S 0 1 0 1 +FASTENER'S 0 1 0 1 +FARRENDER 0 1 0 1 +FARINGDER 0 1 0 1 +FARE 0 1 1 0 +FANGED 0 1 1 0 +FAMOUS 1 1 1 2 +FAM'LY 0 1 1 0 +FALLING 5 1 6 5 +FALL 4 1 5 4 +FAIR 12 1 13 12 +FAILING 2 1 3 2 +FAILED 7 1 8 7 +FAGONNE 0 1 0 1 +FAFNER'S 0 1 0 1 +FACT 14 1 14 15 +FACED 2 1 2 3 +EXTRAVE 0 1 0 1 +EXTRACTED 0 1 0 1 +EXTRACT 2 1 3 2 +EXTRA 1 1 2 1 +EXTINUATING 0 1 0 1 +EXTENUATING 0 1 1 0 +EXTEND 0 1 1 0 +EXPOUNDS 0 1 0 1 +EXPOSED 3 1 3 4 +EXPOSE 2 1 3 2 +EXPLOITING 0 1 1 0 +EXPLODING 0 1 0 1 +EXPLAINED 4 1 4 5 +EXPERIOR 0 1 0 1 +EXPERIENCE 3 1 4 3 +EXPELLED 1 1 1 2 +EXPEL 0 1 1 0 +EXPANSE 1 1 2 1 +EXISTS 1 1 1 2 +EXIST 1 1 2 1 +EXERT 1 1 1 2 +EXECUTING 0 1 0 1 +EXCLAIMED 14 1 15 14 +EXCITING 0 1 1 0 +EXCEPT 11 1 11 12 +EXAMINING 2 1 3 2 +EXAMINED 3 1 3 4 +EXACTLY 9 1 9 10 +EXACKLY 0 1 1 0 +EVIL 4 1 5 4 +EVERYONE 0 1 1 0 +EVERYBODY'S 0 1 0 1 +EVERY 38 1 38 39 +EVERGREWING 0 1 0 1 +EVENLENTH 0 1 0 1 +EVAPORATE 2 1 3 2 +EV'YBODY'S 0 1 1 0 +EUSIDIUS 0 1 0 1 +EUSIBIUS 0 1 0 1 +EUREKA 0 1 0 1 +EUPHRATES 0 1 1 0 +EUPHRANOR 0 1 1 0 +EUPHRANER 0 1 0 1 +EUPHATEES 0 1 0 1 +EUIK 0 1 0 1 +EUGEN 0 1 0 1 +ETS 0 1 0 1 +ETHEREAL 1 1 2 1 +ETHER 2 1 3 2 +ETHELRIED'S 0 1 1 0 +ETHELRED 0 1 0 1 +ET 2 1 3 2 +ESTHER 0 1 1 0 +ESTATES 1 1 1 2 +ESPECIALTY 0 1 0 1 +ESCAPED 1 1 1 2 +ESCAPE 12 1 12 13 +ERNESTON 0 1 0 1 +ERNESTINE 0 1 1 0 +ERE'S 0 1 1 0 +EQUERRY'S 0 1 1 0 +EQUERRIES 0 1 0 1 +EPLORRIED 0 1 0 1 +EPIMORPHIN 0 1 0 1 +EPILRIED'S 0 1 0 1 +EPIGASTER 0 1 1 0 +EPHRAIM 0 1 1 0 +ENTRUSTED 0 1 1 0 +ENTRACT 0 1 0 1 +ENTR'ACTE 0 1 1 0 +ENTIRE 2 1 2 3 +ENTERED 10 1 11 10 +ENSUIT 0 1 0 1 +ENSUED 2 1 3 2 +ENSNARES 0 1 1 0 +ENSLAVED 2 1 3 2 +ENSLAVE 1 1 1 2 +ENRICHED 0 1 0 1 +ENRAGED 0 1 1 0 +ENRAGE 0 1 0 1 +ENNARING 0 1 0 1 +ENJOYED 0 1 0 1 +ENJOY 3 1 4 3 +ENJOINING 0 1 0 1 +ENGLISH 4 1 4 5 +ENFRANCHISEMENT 0 1 1 0 +ENFORCEMENT 0 1 1 0 +ENDURETH 0 1 1 0 +ENDS 0 1 0 1 +ENDOWED 0 1 0 1 +ENDING 1 1 1 2 +ENCOMCHISEMENT 0 1 0 1 +EMOTION 1 1 2 1 +EMETIC 0 1 1 0 +EMBRUN 0 1 1 0 +EMBRON 0 1 0 1 +ELYSIAN 0 1 1 0 +ELYGIANS 0 1 0 1 +ELUSIVE 0 1 0 1 +ELSE 12 1 12 13 +ELLIS 0 1 1 0 +ELIZABETH 1 1 1 2 +ELISIONS 0 1 1 0 +ELECTIC 0 1 0 1 +ELDER 1 1 2 1 +ELBOW 0 1 1 0 +ELBERT 0 1 1 0 +ELBER 0 1 0 1 +EITHERN 0 1 0 1 +EIGHTHS 0 1 0 1 +EIGHTH 2 1 3 2 +EIGHT 9 1 9 10 +EH 2 1 2 3 +EGGS 1 1 2 1 +EFFIC 0 1 0 1 +EFFECT 1 1 1 2 +EDGING 2 1 3 2 +EDGED 0 1 0 1 +ECHOLYN 0 1 0 1 +EBERGASTER 0 1 0 1 +EAU 0 1 1 0 +EASILY 5 1 5 6 +EARTHLY 1 1 1 2 +EARS 3 1 3 4 +EARLS 0 1 0 1 +EARLIEST 0 1 1 0 +EAR 1 1 2 1 +EAMES 0 1 0 1 +EAD 0 1 1 0 +E'LL 0 1 1 0 +E'ER 0 1 1 0 +E 1 1 2 1 +DUSK 1 1 2 1 +DURRED 0 1 0 1 +DUPLICATES 1 1 2 1 +DUPE 0 1 0 1 +DUNNING 0 1 1 0 +DUMAS 0 1 1 0 +DUM 0 1 1 0 +DULL 2 1 3 2 +DUDS 0 1 1 0 +DRUGSTORE 0 1 1 0 +DRUG 0 1 0 1 +DROP 3 1 3 4 +DRINKIN 0 1 0 1 +DRIFTILY 0 1 0 1 +DRESSED 1 1 1 2 +DREADFUL 2 1 2 3 +DRAWN 1 1 1 2 +DRAWERS 1 1 2 1 +DRAW 1 1 2 1 +DRAUGHT 2 1 3 2 +DRATTO 0 1 0 1 +DRANK 2 1 2 3 +DRAGGER 0 1 0 1 +DOZE 0 1 0 1 +DOZ 0 1 0 1 +DOWNING 0 1 1 0 +DOTH 1 1 2 1 +DOSE 0 1 1 0 +DORRIS 0 1 0 1 +DORIS 0 1 0 1 +DORCART 0 1 0 1 +DOOM 0 1 0 1 +DONOVAN 1 1 2 1 +DONEGOOD 0 1 0 1 +DONALIN 0 1 0 1 +DON 5 1 5 6 +DOMONICO 0 1 0 1 +DOM 0 1 1 0 +DOLL 0 1 0 1 +DOIN 0 1 1 0 +DOGS 1 1 2 1 +DOESN'T 3 1 3 4 +DOCTRIPAR 0 1 0 1 +DOCTOR 24 1 25 24 +DOCKYARD 0 1 0 1 +DOCK 0 1 1 0 +DOAN 0 1 1 0 +DIXON 4 1 4 5 +DIVIDED 2 1 3 2 +DIVIDE 0 1 0 1 +DITZFIELD 0 1 0 1 +DITTY 0 1 0 1 +DITCHFIELD 0 1 1 0 +DISTRUSTED 0 1 1 0 +DISTRUDGED 0 1 0 1 +DISTRICT 2 1 2 3 +DISTRESSED 2 1 2 3 +DISTRACT 0 1 0 1 +DISTINCTS 0 1 0 1 +DISTICHS 0 1 1 0 +DISTAGGER 0 1 0 1 +DISSENTIENT 0 1 1 0 +DISPOVERTY 0 1 0 1 +DISPOSED 2 1 3 2 +DISPOS 0 1 0 1 +DISNEY 0 1 1 0 +DISINFECTING 0 1 1 0 +DISINFECT 0 1 0 1 +DISINDIAN 0 1 0 1 +DISFIGURED 0 1 1 0 +DISCOURSE 2 1 2 3 +DISCOUR 0 1 0 1 +DISASTROUS 0 1 1 0 +DISASTERATE 0 1 0 1 +DISAGREED 0 1 0 1 +DISAGREE 0 1 1 0 +DIRTS 0 1 0 1 +DIRECTIFY 0 1 0 1 +DIRE 0 1 1 0 +DINKS 1 1 2 1 +DINARS 0 1 1 0 +DILUTE 0 1 1 0 +DILIGION 0 1 0 1 +DIGGING 0 1 1 0 +DIGESON 0 1 0 1 +DIEU 0 1 0 1 +DIE 9 1 10 9 +DIDNY 0 1 0 1 +DICKET 0 1 0 1 +DICK 5 1 5 6 +DIA 0 1 0 1 +DEVOUR 0 1 1 0 +DEVOTED 3 1 3 4 +DETERMINED 4 1 5 4 +DETECTIVES 0 1 1 0 +DETECTIN 0 1 1 0 +DESTRUCTION 0 1 1 0 +DESTINIES 0 1 1 0 +DESSERTS 1 1 2 1 +DESPOTIC 0 1 1 0 +DESPITE 2 1 3 2 +DESKED 0 1 0 1 +DESIRES 2 1 3 2 +DESIRE 9 1 9 10 +DESIGNED 2 1 2 3 +DESIGN 1 1 2 1 +DESERVED 0 1 0 1 +DESERTS 0 1 0 1 +DERELICTS 0 1 1 0 +DERDS 0 1 0 1 +DEPTOR 0 1 0 1 +DEPRECATE 0 1 1 0 +DEPLICATES 0 1 0 1 +DEODORIZING 0 1 1 0 +DEMETER 3 1 4 3 +DEMEANOR 0 1 0 1 +DELUDE 0 1 0 1 +DELMONICO 0 1 1 0 +DELIVERED 4 1 4 5 +DELIBERATE 0 1 1 0 +DEFTLY 0 1 0 1 +DEFENDED 0 1 0 1 +DEFEND 3 1 4 3 +DEFECTION 0 1 1 0 +DEEPENED 0 1 1 0 +DEEP 9 1 9 10 +DECORTUNA 0 1 0 1 +DECLINING 1 1 1 2 +DECLINE 0 1 0 1 +DECLARING 0 1 1 0 +DEBTS 1 1 1 2 +DEBTOR 0 1 1 0 +DEBRAMIN 0 1 0 1 +DEBARRED 0 1 1 0 +DEBARED 0 1 0 1 +DEATHLY 0 1 1 0 +DEATHLIKE 0 1 1 0 +DEATH 16 1 16 17 +DEANS 0 1 1 0 +DEAN 0 1 0 1 +DEAL 11 1 11 12 +DEACH 0 1 1 0 +DAWNING 0 1 0 1 +DATED 1 1 2 1 +DAT'S 0 1 0 1 +DARNING 0 1 0 1 +DARKAND 0 1 1 0 +DARED 3 1 3 4 +DAPHNE'S 0 1 1 0 +DANDAN 2 1 3 2 +DANCERS 0 1 0 1 +DANCER 0 1 1 0 +DAMN 0 1 1 0 +DAME'S 0 1 1 0 +DAME 1 1 1 2 +DALYS 0 1 1 0 +DAILY 2 1 2 3 +DAILIES 0 1 0 1 +DAGOS 0 1 1 0 +DAGGULE 0 1 0 1 +DAG 0 1 0 1 +CYRUP 0 1 0 1 +CYNICISM 0 1 1 0 +CYMBALS 0 1 1 0 +CUSTOM 1 1 2 1 +CURSORY 0 1 0 1 +CURSORILY 0 1 1 0 +CURSE 1 1 2 1 +CURRENTS 0 1 1 0 +CURRANTS 0 1 0 1 +CURL 0 1 1 0 +CUPIED 0 1 0 1 +CUP 0 1 1 0 +CUNITY 0 1 0 1 +CUISINE 0 1 1 0 +CRYING 1 1 1 2 +CRUX 0 1 1 0 +CRUSHING 1 1 2 1 +CRUMPLED 1 1 2 1 +CRUMBLY 0 1 1 0 +CRUMBLED 0 1 0 1 +CROYDEN 0 1 0 1 +CROST 0 1 1 0 +CROPPISH 0 1 0 1 +CROPFISH 0 1 0 1 +CROOKS 0 1 1 0 +CRIES 3 1 4 3 +CREW 5 1 5 6 +CREOLE 0 1 0 1 +CREEL 0 1 1 0 +CREEDS 0 1 0 1 +CREDITUDE 0 1 0 1 +CRAW 0 1 0 1 +CRATES 1 1 2 1 +CRASHING 0 1 0 1 +CRAMBLY 0 1 0 1 +CRABS 0 1 0 1 +CRAB 6 1 7 6 +COYNESS 0 1 1 0 +COY 1 1 2 1 +COXCOMB 0 1 1 0 +COWLEY'S 0 1 1 0 +COURTYARD 2 1 2 3 +COURSING 0 1 1 0 +COURFEYRAC 1 1 2 1 +COURFEREK 0 1 0 1 +COUNTENANCE 2 1 2 3 +COUNT 17 1 18 17 +COUNSELS 0 1 1 0 +COUNSELLOR 0 1 0 1 +COUNCILLOR 0 1 1 0 +COUNCIL 0 1 0 1 +COUISINE 0 1 0 1 +COTTOM 0 1 0 1 +COT 0 1 0 1 +COSTUM 0 1 0 1 +COST 1 1 2 1 +CORYDON 0 1 1 0 +CORTONA 0 1 1 0 +CORP 0 1 0 1 +CORNWEALTH 0 1 0 1 +CORNERED 0 1 0 1 +CORMOR 0 1 0 1 +CORKLE 0 1 1 0 +CORDS 0 1 0 1 +CORAL 0 1 1 0 +COQUETTE 1 1 2 1 +COPPER 2 1 2 3 +COPP 0 1 0 1 +COP'S 0 1 1 0 +COP 2 1 3 2 +COOPS 0 1 1 0 +COOL 4 1 5 4 +COOKS 0 1 0 1 +COOKER 0 1 0 1 +CONWEALTH 0 1 0 1 +CONTROLLED 1 1 1 2 +CONTROL 1 1 2 1 +CONTINUOUS 0 1 0 1 +CONTINUAL 0 1 1 0 +CONTENDED 0 1 0 1 +CONSUM 0 1 0 1 +CONSUL 0 1 0 1 +CONSTITUTE 1 1 1 2 +CONSTITUENT 0 1 1 0 +CONSTANT 3 1 4 3 +CONSONANTS 0 1 1 0 +CONSOMME 0 1 1 0 +CONSOLE 0 1 0 1 +CONNINGSBURG 0 1 0 1 +CONINGSBURGH 0 1 1 0 +CONFIRMATION 0 1 1 0 +CONFIRMATESON 0 1 0 1 +CONFINED 0 1 0 1 +CONFINE 0 1 0 1 +CONFIDENTIALLY 0 1 1 0 +CONFIDE 1 1 2 1 +CONFICERE 0 1 1 0 +CONFERS 0 1 0 1 +CONFECTIONERIES 0 1 0 1 +CONCOCTED 1 1 2 1 +CONCLUDED 2 1 2 3 +COMPLYING 0 1 0 1 +COMPEND 0 1 0 1 +COMPATRIOT 0 1 1 0 +COMPASSER 0 1 0 1 +COMPARABLE 0 1 0 1 +COMORIN 0 1 1 0 +COMMUNITY 4 1 5 4 +COMMONED 0 1 0 1 +COMMITTEE 5 1 6 5 +COMMISSORY 0 1 0 1 +COMMISSARY 1 1 2 1 +COMMENTS 0 1 0 1 +COMMENT 1 1 2 1 +COMMENCED 1 1 2 1 +COMMANDS 2 1 2 3 +COMMANDER 2 1 2 3 +COMIN 0 1 0 1 +COMICAL 0 1 1 0 +COMETH 0 1 1 0 +COMEST 0 1 1 0 +COMEDY 0 1 0 1 +COLONEL 27 1 28 27 +COLOGNE 0 1 1 0 +COLLETS 0 1 0 1 +COLLECTED 1 1 1 2 +COLLECT 0 1 1 0 +COLLARST 0 1 0 1 +COLDS 0 1 1 0 +COINS 1 1 2 1 +COIN 2 1 2 3 +COGNIZED 0 1 0 1 +COD 0 1 1 0 +COCOA 0 1 1 0 +COCKRELL 0 1 1 0 +COCKLE 0 1 0 1 +COBBER 0 1 1 0 +COAT 2 1 2 3 +COASTON 0 1 0 1 +COARSING 0 1 0 1 +COARSE 0 1 0 1 +COALESCED 0 1 1 0 +CO 0 1 0 1 +CLUXED 0 1 0 1 +CLUMB 0 1 1 0 +CLOTHS 0 1 0 1 +CLOTH 2 1 2 3 +CLOSET 1 1 2 1 +CLOSEST 0 1 1 0 +CLOMB 0 1 1 0 +CLOCKS 0 1 1 0 +CLISPIN 0 1 0 1 +CLINKED 0 1 0 1 +CLING 0 1 1 0 +CLIME 1 1 2 1 +CLIMBED 0 1 0 1 +CLEVERLY 0 1 1 0 +CLEVER 2 1 3 2 +CLERVAL 0 1 0 1 +CLEFTOMANIA 0 1 0 1 +CLEFT 1 1 2 1 +CLEAVE 0 1 1 0 +CLAWS 0 1 1 0 +CLAVIER 0 1 0 1 +CLASSES 3 1 4 3 +CLASS 6 1 6 7 +CLARGA 0 1 0 1 +CLARET 1 1 2 1 +CLAPTOMANIA 0 1 0 1 +CLAIRVAUX 0 1 1 0 +CLACKS 0 1 0 1 +CLACK 0 1 0 1 +CITIZENS 6 1 6 7 +CISEAUX 0 1 1 0 +CINORLA 0 1 0 1 +CINDERELLA 0 1 1 0 +CIGARS 1 1 1 2 +CIGARET 0 1 0 1 +CHURCHES 0 1 0 1 +CHUMMED 0 1 0 1 +CHUG 0 1 0 1 +CHUCKED 0 1 1 0 +CHRISTIE 0 1 0 1 +CHRISTIANS 0 1 1 0 +CHRISTIANNING 0 1 0 1 +CHRISTIANITY 2 1 3 2 +CHRISTIAN 0 1 0 1 +CHRISTI 0 1 1 0 +CHRISTENING 0 1 1 0 +CHRISTEN 0 1 0 1 +CHOUETTE 0 1 1 0 +CHOOSE 3 1 3 4 +CHONODEMAIRE 0 1 1 0 +CHOKINGLY 0 1 0 1 +CHLORIDE 0 1 0 1 +CHIPS 1 1 2 1 +CHINTZ 0 1 1 0 +CHIN'S 0 1 0 1 +CHILLS 0 1 1 0 +CHILLED 1 1 1 2 +CHILL 1 1 1 2 +CHIEFING 0 1 0 1 +CHIEF 8 1 9 8 +CHIDE 0 1 1 0 +CHID 0 1 0 1 +CHEST 3 1 3 4 +CHERRY 0 1 0 1 +CHEER 0 1 0 1 +CHEEKS 4 1 4 5 +CHEEKE 0 1 1 0 +CHEEKBONES 0 1 1 0 +CHED 0 1 0 1 +CHECK 0 1 0 1 +CHEAP 1 1 1 2 +CHAUVELIN 0 1 0 1 +CHATEAU 0 1 0 1 +CHARLIE'S 0 1 0 1 +CHARLEY'S 0 1 1 0 +CHARGED 4 1 5 4 +CHARACTERISTIC 0 1 1 0 +CHARACTERED 0 1 0 1 +CHANT 1 1 1 2 +CHANCES 0 1 1 0 +CHANCE 11 1 11 12 +CHALONS 0 1 1 0 +CHAIN 0 1 1 0 +CHAFING 1 1 2 1 +CHADWELL 0 1 1 0 +CERTAINLY 13 1 13 14 +CENTRE 1 1 1 2 +CENTER 2 1 3 2 +CELL 1 1 2 1 +CELEM 0 1 0 1 +CEDRIC 1 1 2 1 +CEASE 0 1 0 1 +CAVES 0 1 0 1 +CAVERNMENT 0 1 0 1 +CAVE 0 1 0 1 +CAVALRYMEN 0 1 1 0 +CAUSE 6 1 7 6 +CAUGHT 5 1 5 6 +CATTLETTA 0 1 0 1 +CATS 1 1 2 1 +CATHOLIC 0 1 1 0 +CATHEDRAL 0 1 1 0 +CATCHED 0 1 1 0 +CASTLE 8 1 9 8 +CASTETH 0 1 1 0 +CARTS 0 1 0 1 +CART 2 1 3 2 +CARRIED 11 1 12 11 +CARRIAGE 2 1 3 2 +CARMINALS 0 1 0 1 +CARLIS 0 1 0 1 +CARED 4 1 4 5 +CARE 18 1 18 19 +CAPTURED 2 1 2 3 +CAPTAIN 17 1 17 18 +CAPT 0 1 0 1 +CAPS 0 1 0 1 +CAPRIVY 0 1 0 1 +CAPRIVI'S 0 1 1 0 +CAPLICH 0 1 0 1 +CAPITULUM 0 1 1 0 +CAPITULAT 0 1 0 1 +CAPITULANTES 0 1 1 0 +CAPITULAM 0 1 0 1 +CAPITALISTS 0 1 1 0 +CAPITALIST 0 1 0 1 +CAPITAL 2 1 3 2 +CAP 4 1 5 4 +CANS 0 1 1 0 +CANNOT 21 1 21 22 +CAMPATRIOT 0 1 0 1 +CAMPAIGN 0 1 1 0 +CALON 0 1 0 1 +CALLETH 0 1 1 0 +CALLEST 0 1 1 0 +CALL 13 1 13 14 +CAIN 0 1 1 0 +CAGLED 0 1 0 1 +CAGE 7 1 8 7 +CACKED 0 1 1 0 +CABINETS 0 1 0 1 +CABINET 1 1 2 1 +CA'M 0 1 1 0 +C 2 1 3 2 +BYE 0 1 1 0 +BUY 4 1 4 5 +BUTTON 0 1 1 0 +BUTTERFLY 0 1 1 0 +BUTCHUSE 0 1 0 1 +BUSY 5 1 5 6 +BUSINESSWHICH 0 1 1 0 +BUSINESS 12 1 12 13 +BURYING 0 1 1 0 +BURTLES 0 1 0 1 +BURST 2 1 3 2 +BURSHEBA 0 1 1 0 +BURNHILD 0 1 0 1 +BURNETH 0 1 1 0 +BURNED 2 1 2 3 +BURMANOIS 0 1 0 1 +BURIUM 0 1 0 1 +BURDENS 1 1 2 1 +BURDEN 1 1 1 2 +BUOYANT 0 1 0 1 +BULK 1 1 2 1 +BULBS 0 1 1 0 +BUILTON 0 1 0 1 +BUILDS 0 1 1 0 +BUFFETING 0 1 1 0 +BUFFET 0 1 0 1 +BUCK 1 1 1 2 +BRYNHILD'S 0 1 1 0 +BRUSH 3 1 3 4 +BRUNHOLD'S 0 1 0 1 +BROW 0 1 0 1 +BROTHER 17 1 18 17 +BROKER 0 1 0 1 +BRITTANNIUM 0 1 0 1 +BRING 12 1 12 13 +BRILLIANT 4 1 5 4 +BRIGHTENS 0 1 0 1 +BRIEF 2 1 3 2 +BREED 0 1 1 0 +BREATHLESS 1 1 2 1 +BREATHE 0 1 0 1 +BREATH 6 1 6 7 +BREASTPAND 0 1 0 1 +BREAST 1 1 2 1 +BREAKERS 0 1 1 0 +BRAZY 0 1 0 1 +BRAXBURY 0 1 0 1 +BRAU 0 1 1 0 +BRASS 1 1 2 1 +BRAMMER 0 1 0 1 +BRAMIN 0 1 0 1 +BRAMID 0 1 0 1 +BRAMIAN 0 1 0 1 +BRAMEN 0 1 0 1 +BRACEY 0 1 0 1 +BRACELET 1 1 1 2 +BRACEE 0 1 0 1 +BOZARD 0 1 0 1 +BOX 10 1 10 11 +BOWS 1 1 1 2 +BOURGES 0 1 1 0 +BOURGE 0 1 0 1 +BOUGHT 3 1 4 3 +BOU 0 1 0 1 +BOTTOMED 0 1 1 0 +BOTTLED 0 1 1 0 +BOTHERED 1 1 1 2 +BOTH 16 1 17 16 +BORN 7 1 7 8 +BOONE 0 1 1 0 +BOON 1 1 2 1 +BOOLA 0 1 0 1 +BONSES 0 1 0 1 +BONIUS 0 1 0 1 +BONES 2 1 2 3 +BOMB 0 1 1 0 +BOLTED 1 1 1 2 +BOLT 0 1 1 0 +BOLSHEVIKI 2 1 3 2 +BOEOTIAN 0 1 1 0 +BOBBED 0 1 0 1 +BOB'S 1 1 2 1 +BOATS 0 1 0 1 +BLUE 7 1 7 8 +BLOTCHYARD 0 1 0 1 +BLOOMIN 0 1 1 0 +BLOODSTAINED 0 1 0 1 +BLOODSHED 0 1 1 0 +BLOOD 7 1 8 7 +BLOKES 0 1 1 0 +BLOKE 0 1 1 0 +BLOCK 1 1 2 1 +BLEST 0 1 0 1 +BLESSED 5 1 6 5 +BLENDEST 0 1 0 1 +BLANKETED 0 1 1 0 +BLANKET 0 1 0 1 +BLAMMED 0 1 0 1 +BLACKLEG 0 1 1 0 +BLACKBURN 1 1 2 1 +BLACKBIRD 0 1 0 1 +BLACK 13 1 13 14 +BITTER 3 1 3 4 +BITING 0 1 1 0 +BISQUE 0 1 1 0 +BISHOIS 0 1 0 1 +BIRDSEYE 0 1 1 0 +BIRD'S 0 1 0 1 +BIOLECTION 0 1 0 1 +BIN 1 1 2 1 +BILLY 0 1 0 1 +BIG 8 1 8 9 +BIDS 0 1 0 1 +BIBOCO 0 1 0 1 +BIBLE 3 1 4 3 +BHANG 0 1 1 0 +BEULAH 0 1 1 0 +BETWEEN 21 1 21 22 +BETIDE 1 1 2 1 +BETHUNE 0 1 1 0 +BETCHA 0 1 1 0 +BETAKEN 0 1 1 0 +BET 0 1 1 0 +BEST 18 1 19 18 +BESSIE 0 1 0 1 +BESOON 0 1 0 1 +BESIDES 9 1 9 10 +BESIDE 3 1 4 3 +BERTIE 0 1 0 1 +BERNETH 0 1 0 1 +BERING 0 1 0 1 +BENSON 0 1 1 0 +BENOIT 0 1 1 0 +BENOIS 0 1 0 1 +BENNETT'S 0 1 0 1 +BENNETT 0 1 1 0 +BENEATH 3 1 3 4 +BEN 0 1 0 1 +BELT 0 1 0 1 +BELOVED 1 1 1 2 +BELONGS 1 1 1 2 +BELONGED 0 1 1 0 +BELLY 2 1 3 2 +BELLOWED 0 1 1 0 +BELLEGER 0 1 0 1 +BELLE 0 1 1 0 +BELIKE 0 1 1 0 +BEGUN 2 1 3 2 +BEGIN 9 1 9 10 +BEGGING 1 1 2 1 +BEFALL 0 1 0 1 +BEFAL 0 1 1 0 +BEESER 0 1 0 1 +BEER 2 1 2 3 +BEE 0 1 0 1 +BEDS 1 1 1 2 +BEDROOM 1 1 1 2 +BEAUMANOIR 0 1 1 0 +BEATER 0 1 0 1 +BEAT'S 0 1 0 1 +BEARING 5 1 6 5 +BEARD 1 1 1 2 +BEALE'S 0 1 1 0 +BAZA 0 1 0 1 +BAXTER 0 1 1 0 +BAXT 0 1 0 1 +BAVARY 0 1 0 1 +BATON 0 1 1 0 +BATH 1 1 1 2 +BAT 1 1 1 2 +BASSORA 0 1 0 1 +BASEMENT 0 1 1 0 +BASE 1 1 1 2 +BARRING 0 1 0 1 +BARKLEY 0 1 1 0 +BARIUM 0 1 1 0 +BARGENO 0 1 0 1 +BARGELLO 0 1 1 0 +BARELY 1 1 2 1 +BAPTISMAL 0 1 1 0 +BANISH 0 1 1 0 +BANION 0 1 0 1 +BANDON 0 1 0 1 +BANDINELLO 0 1 1 0 +BANACY 0 1 0 1 +BALM 0 1 0 1 +BALLROOM 0 1 1 0 +BALLOTT 0 1 0 1 +BALLOT 3 1 4 3 +BALLOCK 0 1 1 0 +BALLAK 0 1 0 1 +BALL 0 1 0 1 +BALES 2 1 2 3 +BALAMMED 0 1 1 0 +BAILIQUE 0 1 0 1 +BAILEY 1 1 2 1 +BAGDAD 0 1 0 1 +BADE 1 1 1 2 +BADAWI 0 1 1 0 +BADARI 0 1 0 1 +BACKS 1 1 1 2 +BACKING 0 1 1 0 +BACK 51 1 51 52 +BABES 0 1 1 0 +BABE 0 1 0 1 +AZARIAH 0 1 1 0 +AYESHA 0 1 0 1 +AY 0 1 0 1 +AWK'ARD 0 1 1 0 +AWAY 39 1 39 40 +AW 0 1 1 0 +AVIGUE 0 1 0 1 +AVIDITY 0 1 1 0 +AVIDE 0 1 0 1 +AVENUE 2 1 3 2 +AVE 0 1 1 0 +AUSTENO 0 1 0 1 +AUNTS 0 1 0 1 +ATTENTIONS 0 1 0 1 +ATTENDED 0 1 0 1 +ATTEMPTED 2 1 3 2 +ATOM 0 1 0 1 +ATHELSTANE 0 1 1 0 +ASSYRIAL 0 1 0 1 +ASSUME 1 1 1 2 +ASSER 0 1 0 1 +ASSAILING 0 1 1 0 +ASLEEP 9 1 10 9 +ASKS 0 1 1 0 +ASIA 0 1 1 0 +ASHUR 0 1 1 0 +ASHORE 4 1 4 5 +ASH 1 1 1 2 +ARTIST 5 1 6 5 +ARTHUR 0 1 1 0 +ARSTS 0 1 1 0 +ARSINOE'S 0 1 1 0 +ARSENO'S 0 1 0 1 +ARSENAL 0 1 0 1 +ARRIVES 1 1 1 2 +ARRIVE 3 1 4 3 +ARRANGED 1 1 2 1 +ARQUEBALD 0 1 0 1 +AROUSED 0 1 1 0 +AROSE 2 1 2 3 +ARMY 19 1 19 20 +ARMANQUIN 0 1 0 1 +ARM 4 1 5 4 +ARKANSAS 0 1 1 0 +ARISING 0 1 0 1 +ARISED 0 1 0 1 +ARIAD 0 1 0 1 +ARDENTS 0 1 0 1 +ARDENT 2 1 3 2 +ARCHISON 0 1 0 1 +ARCHIBALD 0 1 1 0 +ARCHIAS 0 1 1 0 +ARBED 0 1 0 1 +APT 1 1 1 2 +APPROVE 0 1 1 0 +APPROPATOR 0 1 0 1 +APPOINT 0 1 0 1 +APPEARED 8 1 8 9 +APPEALED 0 1 1 0 +APPARENTLY 4 1 5 4 +APOMORPHINE 0 1 1 0 +APOLLO 0 1 0 1 +APOLIS 0 1 0 1 +APES 1 1 2 1 +APE 1 1 1 2 +ANYWAY 1 1 2 1 +ANYTHING 31 1 31 32 +ANYONE'S 0 1 1 0 +ANVILS 0 1 1 0 +ANTOLIAN 0 1 1 0 +ANTIDOTES 0 1 1 0 +ANTHONY 0 1 1 0 +ANSWERS 1 1 2 1 +ANSWERED 26 1 26 27 +ANNOYED 2 1 3 2 +ANNOY 0 1 0 1 +ANNOUNCE 1 1 1 2 +ANGUISH 3 1 4 3 +ANGESTON 0 1 1 0 +ANGERSON 0 1 0 1 +ANDY'S 0 1 0 1 +ANDS 0 1 1 0 +ANDBUT 0 1 1 0 +ANCESTORS 0 1 1 0 +ANALYSIS 0 1 1 0 +AMOUR 0 1 1 0 +AMORE 0 1 0 1 +AMONGST 4 1 5 4 +AMITY 0 1 0 1 +AMID 1 1 1 2 +AMERD 0 1 0 1 +AMBILS 0 1 0 1 +AMATIC 0 1 0 1 +ALTHIE 0 1 0 1 +ALTHIA 0 1 0 1 +ALREADY 15 1 16 15 +ALREAD 0 1 0 1 +ALOUD 2 1 2 3 +ALONGER 0 1 1 0 +ALLS 0 1 1 0 +ALLOWED 6 1 7 6 +ALLOWANCE 0 1 1 0 +ALLIGATOR 1 1 2 1 +ALLIED 0 1 1 0 +ALLEN 0 1 1 0 +ALLAYS 0 1 1 0 +ALKALOID 0 1 1 0 +ALISANDRO 0 1 0 1 +ALIGHTED 1 1 2 1 +ALID 0 1 0 1 +ALICELA 0 1 0 1 +ALI'S 0 1 1 0 +ALF 0 1 1 0 +ALEXAM 0 1 0 1 +ALESSANDRO 0 1 1 0 +ALE 0 1 0 1 +ALDITY 0 1 0 1 +ALCOHOLBA 0 1 0 1 +ALBERT 10 1 10 11 +ALBEA 0 1 0 1 +ALARMED 1 1 1 2 +ALAN 0 1 0 1 +ALAD 0 1 0 1 +AKELET 0 1 0 1 +AKANS 0 1 0 1 +AKALOID 0 1 0 1 +AID 4 1 5 4 +AGRIAN 0 1 0 1 +AGRARIAN 0 1 1 0 +AGONE 0 1 1 0 +AFTERWARDS 5 1 6 5 +AFTERWARD 2 1 2 3 +AFOOT 0 1 0 1 +AFFLICTION 0 1 1 0 +AFFLICATION 0 1 0 1 +ADVENTURES 2 1 3 2 +ADULTERATED 0 1 0 1 +ADULT 0 1 1 0 +ADULGE 0 1 0 1 +ADN'T 0 1 1 0 +ADHERENTS 0 1 1 0 +ADHERENCE 0 1 0 1 +ADDLESTEIN 0 1 0 1 +ADDISMA 0 1 0 1 +ADDIER 0 1 0 1 +ADDEST 0 1 0 1 +ADAIR 0 1 1 0 +ADAD 0 1 0 1 +AD 0 1 1 0 +ACQUIRE 0 1 0 1 +ACQUAINTED 3 1 3 4 +ACKNOWLEDGE 1 1 2 1 +ACHESON 0 1 1 0 +ACHELIDES 0 1 0 1 +ACCUSE 0 1 0 1 +ACCOUNT 7 1 7 8 +ACCEPT 4 1 5 4 +ABSTAINED 0 1 0 1 +ABSTAIN 0 1 1 0 +ABSORBED 4 1 4 5 +ABSOLVED 0 1 1 0 +ABSENTEE 0 1 1 0 +ABSENTE 0 1 0 1 +ABROAD 4 1 4 5 +ABODE 2 1 3 2 +ABIDING 0 1 0 1 +ABASEMENT 0 1 0 1 +ZEAL 1 0 1 1 +ZAMAN 4 0 4 4 +YUNKERS 1 0 1 1 +YOURSELF 9 0 9 9 +YOUNGEST 1 0 1 1 +YOUNGERS 2 0 2 2 +YOUNGER 8 0 8 8 +YONDER 5 0 5 5 +YOLKS 1 0 1 1 +YIELDED 1 0 1 1 +YIELD 1 0 1 1 +YESTERDAY 3 0 3 3 +YEP 1 0 1 1 +YEOMAN 1 0 1 1 +YELLOW 4 0 4 4 +YELLED 1 0 1 1 +YELL 1 0 1 1 +YEARNS 1 0 1 1 +YEARNING 1 0 1 1 +YEA 1 0 1 1 +YAWN 1 0 1 1 +YARNS 1 0 1 1 +YARDS 1 0 1 1 +YACHT 1 0 1 1 +WYLDER'S 1 0 1 1 +WYLDER 1 0 1 1 +WRITTEN 5 0 5 5 +WRITING 1 0 1 1 +WRINKLES 1 0 1 1 +WRINGING 1 0 1 1 +WRIGGLING 1 0 1 1 +WRETCH 3 0 3 3 +WRECKAGE 1 0 1 1 +WREATHS 1 0 1 1 +WRAPPING 1 0 1 1 +WOUND 2 0 2 2 +WOULDST 1 0 1 1 +WOULDN'T 9 0 9 9 +WORST 1 0 1 1 +WORN 3 0 3 3 +WORLDLY 1 0 1 1 +WORKSHOP 1 0 1 1 +WORKHOUSE 1 0 1 1 +WORKERS 1 0 1 1 +WORKER 1 0 1 1 +WORE 3 0 3 3 +WOODSON 1 0 1 1 +WONT 4 0 4 4 +WONDROUS 1 0 1 1 +WONDERS 1 0 1 1 +WONDERING 1 0 1 1 +WONDERFULLY 1 0 1 1 +WONDERFUL 6 0 6 6 +WOMAN'S 2 0 2 2 +WOLF 1 0 1 1 +WOES 1 0 1 1 +WIZARDS 1 0 1 1 +WITTY 1 0 1 1 +WITNESSED 1 0 1 1 +WITHIN 11 0 11 11 +WITHHELD 2 0 2 2 +WITHDRAWN 2 0 2 2 +WITHAL 2 0 2 2 +WITCHES 1 0 1 1 +WIT 1 0 1 1 +WISTFUL 1 0 1 1 +WISHING 3 0 3 3 +WISHES 3 0 3 3 +WISH 15 0 15 15 +WISELY 1 0 1 1 +WISE 3 0 3 3 +WISDOM 4 0 4 4 +WISCONSIN 1 0 1 1 +WIRE 1 0 1 1 +WINTERS 1 0 1 1 +WINKED 1 0 1 1 +WINGS 2 0 2 2 +WINDOWS 1 0 1 1 +WIND 6 0 6 6 +WIN 2 0 2 2 +WILLINGLY 1 0 1 1 +WILLING 4 0 4 4 +WILFUL 1 0 1 1 +WIDOWER 1 0 1 1 +WIDOW 1 0 1 1 +WIDEN 1 0 1 1 +WICKED 2 0 2 2 +WHOSO 1 0 1 1 +WHOOP 1 0 1 1 +WHOMSOEVER 1 0 1 1 +WHOM 20 0 20 20 +WHOEVER 2 0 2 2 +WHITHER 3 0 3 3 +WHITEHALL 1 0 1 1 +WHISTLING 2 0 2 2 +WHISTLE 3 0 3 3 +WHISPERED 2 0 2 2 +WHIPPINGS 1 0 1 1 +WHIP 2 0 2 2 +WHIMPERING 1 0 1 1 +WHIM 1 0 1 1 +WHILST 1 0 1 1 +WHEREVER 2 0 2 2 +WHEREUPON 3 0 3 3 +WHEREIN 2 0 2 2 +WHEREFORE 2 0 2 2 +WHEREBY 2 0 2 2 +WHENCE 5 0 5 5 +WHEELS 1 0 1 1 +WHATSOEVER 1 0 1 1 +WETTED 1 0 1 1 +WET 3 0 3 3 +WESTERN 2 0 2 2 +WEST 3 0 3 3 +WEREN'T 1 0 1 1 +WEPT 2 0 2 2 +WENCH 1 0 1 1 +WELSH 1 0 1 1 +WELCOMED 1 0 1 1 +WELCOME 4 0 4 4 +WEIGHTY 1 0 1 1 +WEIGHT 1 0 1 1 +WEIGHING 1 0 1 1 +WEEPING 3 0 3 3 +WEEKS 1 0 1 1 +WEEKLY 1 0 1 1 +WEEK 6 0 6 6 +WEDNESDAY 2 0 2 2 +WEDDING 7 0 7 7 +WEB 1 0 1 1 +WEARY 4 0 4 4 +WEARING 2 0 2 2 +WEAPONS 1 0 1 1 +WEAPON 1 0 1 1 +WEALTHY 3 0 3 3 +WEALTH 3 0 3 3 +WEAKNESS 4 0 4 4 +WEAK 2 0 2 2 +WE'D 1 0 1 1 +WAZIR 5 0 5 5 +WAYS 5 0 5 5 +WAYLAID 1 0 1 1 +WAVING 1 0 1 1 +WAVES 2 0 2 2 +WAVE 1 0 1 1 +WATERY 1 0 1 1 +WATERVILLE 1 0 1 1 +WATERS 1 0 1 1 +WATCHING 7 0 7 7 +WASN'T 3 0 3 3 +WASHINGTON 2 0 2 2 +WASHED 4 0 4 4 +WASH 4 0 4 4 +WARS 2 0 2 2 +WARRANT 1 0 1 1 +WARNING 1 0 1 1 +WARNER 1 0 1 1 +WARN'T 2 0 2 2 +WARMTH 1 0 1 1 +WARMLY 1 0 1 1 +WARMEST 1 0 1 1 +WAREHOUSES 1 0 1 1 +WANTON 1 0 1 1 +WANTING 1 0 1 1 +WALLS 4 0 4 4 +WALLET 2 0 2 2 +WALKED 10 0 10 10 +WAKING 2 0 2 2 +WAKED 2 0 2 2 +WAITERS 1 0 1 1 +WAITER 1 0 1 1 +WAIT 11 0 11 11 +WAISTCOAT 3 0 3 3 +WAIST 1 0 1 1 +WAGONS 1 0 1 1 +WAGON 1 0 1 1 +WADDLED 1 0 1 1 +W 1 0 1 1 +VRONSKY 1 0 1 1 +VOYAGES 1 0 1 1 +VOYAGE 7 0 7 7 +VOWS 2 0 2 2 +VOW 1 0 1 1 +VOTING 2 0 2 2 +VOTED 1 0 1 1 +VON 1 0 1 1 +VOLUNTEERS 2 0 2 2 +VOLUNTARILY 1 0 1 1 +VOLUMINOUS 1 0 1 1 +VOLUME 2 0 2 2 +VOLLEY 1 0 1 1 +VOLCANOES 1 0 1 1 +VOICELESS 1 0 1 1 +VOICE 20 0 20 20 +VOCAL 2 0 2 2 +VITRIOL 1 0 1 1 +VITAL 1 0 1 1 +VISITOR 4 0 4 4 +VISITING 1 0 1 1 +VISITED 2 0 2 2 +VISIT 8 0 8 8 +VISION 1 0 1 1 +VISCOUNT 1 0 1 1 +VIRTUOUS 2 0 2 2 +VIRTUE 2 0 2 2 +VIRGINIA 1 0 1 1 +VIOLENTLY 1 0 1 1 +VINTAGE 2 0 2 2 +VINE 1 0 1 1 +VINDICTIVENESS 1 0 1 1 +VILLAGE 2 0 2 2 +VILE 1 0 1 1 +VIGOROUS 3 0 3 3 +VIGILANT 1 0 1 1 +VIEWS 1 0 1 1 +VIEW 1 0 1 1 +VICTORY 1 0 1 1 +VICTORIAN 1 0 1 1 +VICTIMS 1 0 1 1 +VICTIM 3 0 3 3 +VICIOUS 3 0 3 3 +VICES 1 0 1 1 +VICE 1 0 1 1 +VEXED 1 0 1 1 +VEXATION 1 0 1 1 +VESSEL 2 0 2 2 +VERSE 1 0 1 1 +VERDICT 3 0 3 3 +VENTURE 2 0 2 2 +VENICE 2 0 2 2 +VENIAL 1 0 1 1 +VENGEANCE 1 0 1 1 +VEINS 1 0 1 1 +VEILS 1 0 1 1 +VEIL 2 0 2 2 +VEHICLES 1 0 1 1 +VEGETABLES 1 0 1 1 +VEGETABLE 1 0 1 1 +VECCHIO 1 0 1 1 +VAUDEVILLE 1 0 1 1 +VARIES 1 0 1 1 +VARIED 1 0 1 1 +VANITY 1 0 1 1 +VANISHED 1 0 1 1 +VAMPA 2 0 2 2 +VALUES 2 0 2 2 +VALUE 3 0 3 3 +VALUABLES 1 0 1 1 +VALJEAN'S 3 0 3 3 +VALJEAN 7 0 7 7 +VAGUELY 1 0 1 1 +VAGUE 1 0 1 1 +VACATION 1 0 1 1 +VACANTLY 1 0 1 1 +UTTERLY 3 0 3 3 +UTTERING 1 0 1 1 +UTMOST 5 0 5 5 +USURPER 2 0 2 2 +USING 3 0 3 3 +USEST 1 0 1 1 +USEFUL 1 0 1 1 +URGE 1 0 1 1 +UPSET 1 0 1 1 +UPRIGHT 1 0 1 1 +UPPER 5 0 5 5 +UNWEPT 1 0 1 1 +UNUSUALLY 1 0 1 1 +UNUSUAL 2 0 2 2 +UNTIL 16 0 16 16 +UNSWERVING 1 0 1 1 +UNSOUGHT 1 0 1 1 +UNSELFISH 1 0 1 1 +UNSEASONABLE 1 0 1 1 +UNREASONABLE 1 0 1 1 +UNPRESSED 1 0 1 1 +UNPLEASANT 3 0 3 3 +UNPITIED 1 0 1 1 +UNNATURAL 1 0 1 1 +UNMISTAKABLY 1 0 1 1 +UNLUCKY 2 0 2 2 +UNLIKELY 1 0 1 1 +UNKNOWN 2 0 2 2 +UNKIND 1 0 1 1 +UNJOINTED 1 0 1 1 +UNIVERSE 1 0 1 1 +UNIVERSAL 5 0 5 5 +UNITED 5 0 5 5 +UNISON 1 0 1 1 +UNIONISTS 1 0 1 1 +UNION 1 0 1 1 +UNINTENTIONAL 1 0 1 1 +UNIFORM 2 0 2 2 +UNHEARD 1 0 1 1 +UNHAPPINESS 1 0 1 1 +UNGRATEFUL 3 0 3 3 +UNFORTUNATELY 2 0 2 2 +UNFORTUNATE 2 0 2 2 +UNFLATTERING 1 0 1 1 +UNEASY 4 0 4 4 +UNEASILY 1 0 1 1 +UNDOUBTEDLY 1 0 1 1 +UNDERTOOK 1 0 1 1 +UNDERTONE 1 0 1 1 +UNDERTAKE 1 0 1 1 +UNDERSTOOD 6 0 6 6 +UNDERSTANDS 1 0 1 1 +UNDERSTANDING 5 0 5 5 +UNDERSTAND 7 0 7 7 +UNDERNEATH 1 0 1 1 +UNDERGROUND 1 0 1 1 +UNDERGO 1 0 1 1 +UNCONNECTED 1 0 1 1 +UNCONCERN 1 0 1 1 +UNCOMMON 1 0 1 1 +UNCOMFORTABLY 2 0 2 2 +UNCOMFORTABLE 1 0 1 1 +UNCLE'S 2 0 2 2 +UNCERTAIN 2 0 2 2 +UNBURDEN 1 0 1 1 +UNAWARE 1 0 1 1 +UNASSISTED 1 0 1 1 +UNALTERABLE 1 0 1 1 +UNABLE 1 0 1 1 +UGLY 1 0 1 1 +TYRANTS 1 0 1 1 +TYRANT 2 0 2 2 +TYPE 1 0 1 1 +TWIST 1 0 1 1 +TWILIGHT 1 0 1 1 +TWICE 2 0 2 2 +TWENTY 16 0 16 16 +TWAS 1 0 1 1 +TWAIN 2 0 2 2 +TUTORS 2 0 2 2 +TUTOR 1 0 1 1 +TURRETS 1 0 1 1 +TURNING 6 0 6 6 +TURKISH 1 0 1 1 +TURK 1 0 1 1 +TURBAN 1 0 1 1 +TUNE 1 0 1 1 +TUMULT 1 0 1 1 +TUMBLE 1 0 1 1 +TUG 1 0 1 1 +TUESDAY 1 0 1 1 +TUCKED 1 0 1 1 +TUBE 2 0 2 2 +TRUTH 10 0 10 10 +TRUSTWORTHY 1 0 1 1 +TRUSTED 1 0 1 1 +TRUST 3 0 3 3 +TRUNK 1 0 1 1 +TRUCE 2 0 2 2 +TROUT 1 0 1 1 +TROUSERS 2 0 2 2 +TROUBLING 1 0 1 1 +TROUBLED 6 0 6 6 +TROUBLE 8 0 8 8 +TROOPS 2 0 2 2 +TROLL 1 0 1 1 +TRIUMPHING 1 0 1 1 +TRIUMPH 3 0 3 3 +TRIP 2 0 2 2 +TRIM 1 0 1 1 +TRIFLING 3 0 3 3 +TRIES 3 0 3 3 +TRIBUTE 2 0 2 2 +TRIANGLE 1 0 1 1 +TRIAL 3 0 3 3 +TREND 1 0 1 1 +TREMBLING 2 0 2 2 +TREMBLE 1 0 1 1 +TREES 3 0 3 3 +TREE 9 0 9 9 +TREATMENT 1 0 1 1 +TREATED 1 0 1 1 +TREAT 3 0 3 3 +TREASONS 1 0 1 1 +TREACHEROUSLY 1 0 1 1 +TRAVILLA 1 0 1 1 +TRAVELLERS 2 0 2 2 +TRAP 2 0 2 2 +TRANSPORTED 2 0 2 2 +TRANSPARENT 1 0 1 1 +TRANSLATED 1 0 1 1 +TRANSITORINESS 1 0 1 1 +TRANSFORMING 1 0 1 1 +TRANSFIGURED 1 0 1 1 +TRANSFERENCE 1 0 1 1 +TRANQUILLITIES 1 0 1 1 +TRAMP 2 0 2 2 +TRAGIC 1 0 1 1 +TRADITIONAL 1 0 1 1 +TRACEABLE 1 0 1 1 +TRACE 2 0 2 2 +TOY 1 0 1 1 +TOWNSFOLK 1 0 1 1 +TOWERS 1 0 1 1 +TOWERING 1 0 1 1 +TOUCHING 4 0 4 4 +TOTING 1 0 1 1 +TOSSING 1 0 1 1 +TOSSED 1 0 1 1 +TORTURES 1 0 1 1 +TORN 2 0 2 2 +TORMENTOR 2 0 2 2 +TOPS 1 0 1 1 +TOPIC 1 0 1 1 +TONGUES 1 0 1 1 +TONES 2 0 2 2 +TONE 6 0 6 6 +TOMBS 1 0 1 1 +TOMATO 1 0 1 1 +TOLERABLY 1 0 1 1 +TOKEN 1 0 1 1 +TOILING 1 0 1 1 +TOILETTE 1 0 1 1 +TOIL 1 0 1 1 +TOGETHER 11 0 11 11 +TOES 2 0 2 2 +TOBACCO 7 0 7 7 +TOASTED 2 0 2 2 +TOAST 1 0 1 1 +TIS 4 0 4 4 +TIPPLING 1 0 1 1 +TINY 1 0 1 1 +TINKLE 1 0 1 1 +TINCTURED 1 0 1 1 +TIMEPIECE 1 0 1 1 +TIME'S 1 0 1 1 +TIMBER 1 0 1 1 +TIGHTLY 2 0 2 2 +TIGHT 1 0 1 1 +TIED 2 0 2 2 +TIDES 1 0 1 1 +TICKLING 1 0 1 1 +TICKING 1 0 1 1 +TICKET 1 0 1 1 +TIBER 1 0 1 1 +THYSELF 3 0 3 3 +THYME 1 0 1 1 +THWARTED 1 0 1 1 +THURSDAY 1 0 1 1 +THUNDER 3 0 3 3 +THRUST 6 0 6 6 +THROWN 1 0 1 1 +THROW 2 0 2 2 +THROUGHOUT 3 0 3 3 +THRONE 2 0 2 2 +THROBBED 1 0 1 1 +THROAT 2 0 2 2 +THRILLING 1 0 1 1 +THREES 1 0 1 1 +THREATS 3 0 3 3 +THREATENED 1 0 1 1 +THREAD 2 0 2 2 +THRACE 1 0 1 1 +THOUSANDTH 1 0 1 1 +THOUGHTS 5 0 5 5 +THOUGHTFUL 2 0 2 2 +THOROUGHLY 1 0 1 1 +THORNTON 4 0 4 4 +THONG 1 0 1 1 +THOMAS 2 0 2 2 +THIRTY 7 0 7 7 +THIRTEEN 1 0 1 1 +THIRSTY 1 0 1 1 +THIRSTING 1 0 1 1 +THINKS 2 0 2 2 +THIEVES 1 0 1 1 +THIEF 2 0 2 2 +THICKENING 1 0 1 1 +THICK 4 0 4 4 +THEY'D 2 0 2 2 +THEREWITH 1 0 1 1 +THEREIN 3 0 3 3 +THEREFORE 12 0 12 12 +THEREAFTER 1 0 1 1 +THERE'LL 1 0 1 1 +THEORY 2 0 2 2 +THEOLOGIANS 1 0 1 1 +THENCEFORTH 1 0 1 1 +THENCE 1 0 1 1 +THANKFUL 2 0 2 2 +THANKED 2 0 2 2 +THANK 7 0 7 7 +TEXAS 1 0 1 1 +TESTING 1 0 1 1 +TESTIMONY 1 0 1 1 +TESTIFY 2 0 2 2 +TERROR 5 0 5 5 +TERRIFIC 2 0 2 2 +TERRIBLE 4 0 4 4 +TERMS 1 0 1 1 +TERM 2 0 2 2 +TERENTIUS 1 0 1 1 +TENDING 1 0 1 1 +TENDERNESS 1 0 1 1 +TENDERLY 1 0 1 1 +TENDER 3 0 3 3 +TENDENCY 1 0 1 1 +TENACITY 1 0 1 1 +TEMPTRESS 1 0 1 1 +TEMPTING 1 0 1 1 +TEMPTATION 4 0 4 4 +TEMPT 1 0 1 1 +TEMPORARY 2 0 2 2 +TEMPLARS 2 0 2 2 +TEMPEST 1 0 1 1 +TEMPERATURE 1 0 1 1 +TEMPERATE 2 0 2 2 +TEMPERAMENT 1 0 1 1 +TEMPER 1 0 1 1 +TELLING 2 0 2 2 +TELEPHONE 1 0 1 1 +TEEMING 1 0 1 1 +TECHNICAL 1 0 1 1 +TEASPOONFUL 1 0 1 1 +TEARING 1 0 1 1 +TEAR 3 0 3 3 +TEACHING 1 0 1 1 +TEACHERS 1 0 1 1 +TEACH 2 0 2 2 +TAYLOR 1 0 1 1 +TAXES 1 0 1 1 +TAUNTS 1 0 1 1 +TAUGHT 1 0 1 1 +TASTES 1 0 1 1 +TASTED 2 0 2 2 +TASK 3 0 3 3 +TARRIED 1 0 1 1 +TAPE 1 0 1 1 +TAP 1 0 1 1 +TANNER 1 0 1 1 +TALL 2 0 2 2 +TALKING 5 0 5 5 +TALKER 1 0 1 1 +TALES 1 0 1 1 +TALENT 1 0 1 1 +TAKINGS 1 0 1 1 +TAKES 3 0 3 3 +TAINTED 1 0 1 1 +TAILS 2 0 2 2 +TAILOR'S 1 0 1 1 +TAIL 3 0 3 3 +TAGGING 1 0 1 1 +TACK 1 0 1 1 +TABLETS 2 0 2 2 +TABLES 2 0 2 2 +TABLE 7 0 7 7 +SYSTEM 2 0 2 2 +SYRINGE 1 0 1 1 +SYMPTOMS 1 0 1 1 +SYMPATHIES 1 0 1 1 +SWUNG 3 0 3 3 +SWITCHED 1 0 1 1 +SWITCH 1 0 1 1 +SWISS 1 0 1 1 +SWINGING 1 0 1 1 +SWINGED 1 0 1 1 +SWIMS 1 0 1 1 +SWIFTLY 1 0 1 1 +SWEPT 1 0 1 1 +SWELL 1 0 1 1 +SWEETNESS 2 0 2 2 +SWEETMEATS 2 0 2 2 +SWEETMEAT 2 0 2 2 +SWEET 3 0 3 3 +SWEEPING 1 0 1 1 +SWEEP 1 0 1 1 +SWEAR 5 0 5 5 +SWAYING 1 0 1 1 +SWAY 1 0 1 1 +SWARMED 2 0 2 2 +SWAM 1 0 1 1 +SWALLOWING 1 0 1 1 +SWALLOWED 4 0 4 4 +SVIAZHSKY 1 0 1 1 +SUSTAINS 1 0 1 1 +SUSPICIOUS 3 0 3 3 +SUSPENDED 1 0 1 1 +SUSPECTED 6 0 6 6 +SUSPECT 1 0 1 1 +SUSAN'S 1 0 1 1 +SURVEYED 1 0 1 1 +SURROUNDINGS 1 0 1 1 +SURROUNDING 1 0 1 1 +SURROUNDED 1 0 1 1 +SURRENDERING 1 0 1 1 +SURRENDERED 1 0 1 1 +SURRENDER 2 0 2 2 +SURPRISED 2 0 2 2 +SURPRISE 4 0 4 4 +SURPLICE 1 0 1 1 +SURPASS 1 0 1 1 +SURMOUNTED 1 0 1 1 +SURLY 1 0 1 1 +SURFACE 3 0 3 3 +SUPPRESS 1 0 1 1 +SUPPOSITION 1 0 1 1 +SUPPOSED 6 0 6 6 +SUPPORTED 1 0 1 1 +SUPPORT 1 0 1 1 +SUPPLY 2 0 2 2 +SUPPLIED 3 0 3 3 +SUPPLICATION 1 0 1 1 +SUPPER 1 0 1 1 +SUPERNATURAL 2 0 2 2 +SUPERNACULUM 1 0 1 1 +SUPERLATIVE 1 0 1 1 +SUPERIORS 1 0 1 1 +SUPERIOR 4 0 4 4 +SUP 1 0 1 1 +SUNSHINY 1 0 1 1 +SUNSHINE 1 0 1 1 +SUNRISE 2 0 2 2 +SUNNYSIDE 1 0 1 1 +SUNG 2 0 2 2 +SUNDAY 4 0 4 4 +SUMS 2 0 2 2 +SUMMONED 1 0 1 1 +SUMMON 1 0 1 1 +SUMMIT 1 0 1 1 +SUMMER 6 0 6 6 +SULTRY 1 0 1 1 +SULTAN 2 0 2 2 +SUITS 1 0 1 1 +SUITABLE 2 0 2 2 +SUICIDE 1 0 1 1 +SUGGESTED 2 0 2 2 +SUGAR 9 0 9 9 +SUFFOLK 1 0 1 1 +SUFFICIENTLY 2 0 2 2 +SUFFERINGS 1 0 1 1 +SUFFERING 2 0 2 2 +SUFFERED 3 0 3 3 +SUFFER 1 0 1 1 +SUDDENLY 8 0 8 8 +SUCK 1 0 1 1 +SUCH 44 0 44 44 +SUCCUMBED 1 0 1 1 +SUCCESSIVELY 1 0 1 1 +SUCCESSIVE 1 0 1 1 +SUCCESSFULLY 1 0 1 1 +SUCCESSES 2 0 2 2 +SUCCESS 2 0 2 2 +SUCCEEDING 1 0 1 1 +SUCCEEDED 3 0 3 3 +SUCCEED 1 0 1 1 +SUBURB 1 0 1 1 +SUBSTITUTING 1 0 1 1 +SUBSTANCES 1 0 1 1 +SUBSTANCE 3 0 3 3 +SUBSISTENCE 1 0 1 1 +SUBSIDED 1 0 1 1 +SUBSEQUENT 1 0 1 1 +SUBORDINATED 1 0 1 1 +SUBMITTED 2 0 2 2 +SUBMISSIVE 1 0 1 1 +SUBMISSION 1 0 1 1 +SUBJECTS 6 0 6 6 +SUBJECTED 3 0 3 3 +SUBDUED 2 0 2 2 +STYLED 1 0 1 1 +STYLE 1 0 1 1 +STURDY 1 0 1 1 +STUPID 2 0 2 2 +STUMBLED 1 0 1 1 +STUFFS 1 0 1 1 +STUFF 1 0 1 1 +STUDYING 1 0 1 1 +STUDY 1 0 1 1 +STUDENTS 1 0 1 1 +STUDENT 2 0 2 2 +STUCK 2 0 2 2 +STRUGGLES 1 0 1 1 +STRUCTURE 1 0 1 1 +STROVE 1 0 1 1 +STRONGLY 1 0 1 1 +STRONGER 1 0 1 1 +STRONG 12 0 12 12 +STRODE 1 0 1 1 +STRIPPED 2 0 2 2 +STRIKING 1 0 1 1 +STRIDES 1 0 1 1 +STRICTLY 1 0 1 1 +STREWN 1 0 1 1 +STRETCHING 1 0 1 1 +STRETCHER 1 0 1 1 +STRETCH 1 0 1 1 +STRENUOUSLY 1 0 1 1 +STRENGTHENED 2 0 2 2 +STRENGTH 12 0 12 12 +STREETS 2 0 2 2 +STREAM 4 0 4 4 +STRAYING 1 0 1 1 +STRATAGEM 1 0 1 1 +STRANGER 3 0 3 3 +STRANGELY 1 0 1 1 +STRAINING 1 0 1 1 +STRAIGHTWAY 1 0 1 1 +STRAIGHTFORWARD 1 0 1 1 +STOUT 1 0 1 1 +STORY 9 0 9 9 +STORMED 1 0 1 1 +STORM 1 0 1 1 +STOREHOUSES 1 0 1 1 +STOPPING 3 0 3 3 +STOP 5 0 5 5 +STOOL 2 0 2 2 +STONES 4 0 4 4 +STOMACH 3 0 3 3 +STIRRED 1 0 1 1 +STIR 1 0 1 1 +STILE 1 0 1 1 +STIFLING 1 0 1 1 +STIFLED 2 0 2 2 +STIFLE 3 0 3 3 +STIFFNESS 1 0 1 1 +STIFF 2 0 2 2 +STEWART 1 0 1 1 +STEWARDS 1 0 1 1 +STEWARD 1 0 1 1 +STEPHEN 1 0 1 1 +STEPAN 2 0 2 2 +STENOGRAPHIC 1 0 1 1 +STEMS 1 0 1 1 +STEERAGE 2 0 2 2 +STEEP 1 0 1 1 +STEEL 1 0 1 1 +STEED 1 0 1 1 +STEAMED 1 0 1 1 +STEAMBOAT 2 0 2 2 +STEALTHILY 1 0 1 1 +STEAL 1 0 1 1 +STEADY 3 0 3 3 +STEADILY 1 0 1 1 +STATUES 3 0 3 3 +STATIONED 2 0 2 2 +STATION 2 0 2 2 +STATESMAN 1 0 1 1 +STATEMENT 3 0 3 3 +STAT 1 0 1 1 +STARVE 1 0 1 1 +STARTLING 1 0 1 1 +STARTLED 1 0 1 1 +STARTING 1 0 1 1 +STARTED 10 0 10 10 +STARCHY 1 0 1 1 +STANLEY 2 0 2 2 +STANDPOINT 1 0 1 1 +STANDING 10 0 10 10 +STAND 7 0 7 7 +STAMPED 1 0 1 1 +STAMMERED 1 0 1 1 +STAMMER 1 0 1 1 +STAKED 1 0 1 1 +STAKE 1 0 1 1 +STAIRCASE 1 0 1 1 +STAGE 5 0 5 5 +STABLE 1 0 1 1 +SQUIRE 3 0 3 3 +SQUEEZE 1 0 1 1 +SQUEAKS 1 0 1 1 +SQUATTED 1 0 1 1 +SQUARE 2 0 2 2 +SQUALL 1 0 1 1 +SQUALID 1 0 1 1 +SQUAD 2 0 2 2 +SPYING 1 0 1 1 +SPY 1 0 1 1 +SPRINKLES 1 0 1 1 +SPRINGS 1 0 1 1 +SPRING 4 0 4 4 +SPRIG 1 0 1 1 +SPREADS 1 0 1 1 +SPREAD 4 0 4 4 +SPRANG 3 0 3 3 +SPOTTED 1 0 1 1 +SPOT 6 0 6 6 +SPOON 1 0 1 1 +SPOKEN 2 0 2 2 +SPOKE 15 0 15 15 +SPOILS 1 0 1 1 +SPLIT 2 0 2 2 +SPITEFUL 1 0 1 1 +SPIT 1 0 1 1 +SPIRITUAL 1 0 1 1 +SPIRAL 1 0 1 1 +SPINSTER 1 0 1 1 +SPIDER 1 0 1 1 +SPHERE 1 0 1 1 +SPELL 1 0 1 1 +SPEEDILY 1 0 1 1 +SPEED 1 0 1 1 +SPECULATED 1 0 1 1 +SPECTATORS 1 0 1 1 +SPECTACLE 1 0 1 1 +SPECIES 1 0 1 1 +SPECIAL 3 0 3 3 +SPEAKS 1 0 1 1 +SPEAKING 7 0 7 7 +SPEAK 15 0 15 15 +SPASM 1 0 1 1 +SPARROWS 1 0 1 1 +SPARK 1 0 1 1 +SPARING 1 0 1 1 +SPARE 1 0 1 1 +SPANKER 1 0 1 1 +SPANISH 1 0 1 1 +SPADES 1 0 1 1 +SOWING 1 0 1 1 +SOUP 1 0 1 1 +SOUNDS 2 0 2 2 +SOUNDED 3 0 3 3 +SOUND 12 0 12 12 +SOULS 2 0 2 2 +SORTS 4 0 4 4 +SORRY 3 0 3 3 +SORROWING 1 0 1 1 +SORROW 1 0 1 1 +SORELY 1 0 1 1 +SORE 1 0 1 1 +SORCERER 1 0 1 1 +SOOTH 1 0 1 1 +SOMEWHAT 5 0 5 5 +SOMETIMES 14 0 14 14 +SOMETHING'S 1 0 1 1 +SOMEHOW 3 0 3 3 +SOMEBODY 3 0 3 3 +SOLVE 1 0 1 1 +SOLUTION 4 0 4 4 +SOLUBLE 2 0 2 2 +SOLOMON 1 0 1 1 +SOLIDS 1 0 1 1 +SOLIDLY 1 0 1 1 +SOLID 1 0 1 1 +SOLICITUDE 1 0 1 1 +SOLEMNLY 1 0 1 1 +SOLEMNITY 1 0 1 1 +SOLEMN 1 0 1 1 +SOLDIERS 3 0 3 3 +SOLDIER 1 0 1 1 +SOLD 4 0 4 4 +SOLACE 1 0 1 1 +SOJOURN 2 0 2 2 +SOIL 2 0 2 2 +SOFTLY 2 0 2 2 +SODA 1 0 1 1 +SOCIETY 1 0 1 1 +SOCIETIES 1 0 1 1 +SOCIAL 12 0 12 12 +SOBERLY 1 0 1 1 +SOBER 4 0 4 4 +SOARING 1 0 1 1 +SOAK 1 0 1 1 +SNOOZING 1 0 1 1 +SNEEZE 2 0 2 2 +SNEERED 1 0 1 1 +SNEAKY 1 0 1 1 +SNATCHER 2 0 2 2 +SNATCH 1 0 1 1 +SNAKE 1 0 1 1 +SMUGGLERS 7 0 7 7 +SMUGGLED 1 0 1 1 +SMOULDERING 1 0 1 1 +SMOTE 2 0 2 2 +SMOKING 3 0 3 3 +SMOKESTACKS 1 0 1 1 +SMOKERS 3 0 3 3 +SMOKED 2 0 2 2 +SMOKE 2 0 2 2 +SMITH 1 0 1 1 +SMILING 2 0 2 2 +SMILED 1 0 1 1 +SMELT 1 0 1 1 +SMART 1 0 1 1 +SMALLEST 1 0 1 1 +SMALLER 1 0 1 1 +SLUMBER 2 0 2 2 +SLOWLY 6 0 6 6 +SLOW 3 0 3 3 +SLIPPING 1 0 1 1 +SLIPPER 1 0 1 1 +SLIP 3 0 3 3 +SLING 1 0 1 1 +SLIGHT 1 0 1 1 +SLICES 2 0 2 2 +SLEPT 3 0 3 3 +SLENDER 2 0 2 2 +SLEEVES 1 0 1 1 +SLEEPY 2 0 2 2 +SLEEPS 2 0 2 2 +SLEEPING 6 0 6 6 +SLEEPER 1 0 1 1 +SLEEP 15 0 15 15 +SLEDGE 1 0 1 1 +SLAYING 1 0 1 1 +SLAY 1 0 1 1 +SLAVES 2 0 2 2 +SLAVERY 1 0 1 1 +SLAVE 3 0 3 3 +SLAPPED 1 0 1 1 +SLAMMED 1 0 1 1 +SLAIN 2 0 2 2 +SKYLIGHT 2 0 2 2 +SKY 3 0 3 3 +SKULLS 1 0 1 1 +SKULL 1 0 1 1 +SKIRTS 1 0 1 1 +SKIRMISH 1 0 1 1 +SKIN 5 0 5 5 +SKIMMING 1 0 1 1 +SKILLED 1 0 1 1 +SKILFULLY 1 0 1 1 +SKIES 1 0 1 1 +SKETCH 1 0 1 1 +SIZE 5 0 5 5 +SIXTY 7 0 7 7 +SIXTH 5 0 5 5 +SIXTEEN 2 0 2 2 +SITUATION 1 0 1 1 +SITTING 3 0 3 3 +SITTETH 1 0 1 1 +SISTERS 4 0 4 4 +SISTERLY 1 0 1 1 +SISTER 8 0 8 8 +SINNED 1 0 1 1 +SINKS 1 0 1 1 +SINGULAR 2 0 2 2 +SINGLE 8 0 8 8 +SINGER 1 0 1 1 +SINGED 1 0 1 1 +SINCERITY 1 0 1 1 +SINCERE 1 0 1 1 +SINCE 17 0 17 17 +SIN 2 0 2 2 +SIMPLY 3 0 3 3 +SIMPLE 4 0 4 4 +SIMMERING 1 0 1 1 +SIMILAR 2 0 2 2 +SILVERWARE 1 0 1 1 +SILL 1 0 1 1 +SILK 1 0 1 1 +SILENTLY 2 0 2 2 +SILENT 9 0 9 9 +SILENCED 1 0 1 1 +SILAS 1 0 1 1 +SIGNS 2 0 2 2 +SIGNIFIES 1 0 1 1 +SIGNIFIED 1 0 1 1 +SIGNIFICANT 2 0 2 2 +SIGNIFICANCE 2 0 2 2 +SIGNATURE 1 0 1 1 +SIGNALS 2 0 2 2 +SIGNAL 7 0 7 7 +SIGN 4 0 4 4 +SIGHED 1 0 1 1 +SIGH 5 0 5 5 +SIFTED 1 0 1 1 +SIDEWAYS 1 0 1 1 +SIDEWALK 1 0 1 1 +SIDES 4 0 4 4 +SICUT 1 0 1 1 +SICK 2 0 2 2 +SHUTTING 1 0 1 1 +SHUTTERS 2 0 2 2 +SHUTTER 1 0 1 1 +SHUFFLE 1 0 1 1 +SHUDDER 1 0 1 1 +SHRUNK 1 0 1 1 +SHROUDED 1 0 1 1 +SHRINKING 1 0 1 1 +SHRILL 1 0 1 1 +SHRIEKING 1 0 1 1 +SHRIEKED 1 0 1 1 +SHOWS 2 0 2 2 +SHOWING 7 0 7 7 +SHOWED 9 0 9 9 +SHOUTS 2 0 2 2 +SHOUTING 4 0 4 4 +SHOUTED 4 0 4 4 +SHOULDN'T 1 0 1 1 +SHORTLY 5 0 5 5 +SHORTER 1 0 1 1 +SHORT 8 0 8 8 +SHOPS 1 0 1 1 +SHOPPY 1 0 1 1 +SHOPPING 1 0 1 1 +SHOPKEEPERS 1 0 1 1 +SHOP 6 0 6 6 +SHOOTER 1 0 1 1 +SHOOT 6 0 6 6 +SHOOK 5 0 5 5 +SHONE 2 0 2 2 +SHOES 5 0 5 5 +SHOCKED 2 0 2 2 +SHIRTS 1 0 1 1 +SHIRT 1 0 1 1 +SHIRKING 1 0 1 1 +SHIMMERING 1 0 1 1 +SHIFTY 1 0 1 1 +SHIFTED 2 0 2 2 +SHERRY 3 0 3 3 +SHERBURN 1 0 1 1 +SHELLS 4 0 4 4 +SHELF 1 0 1 1 +SHEILA 1 0 1 1 +SHEET 2 0 2 2 +SHEATH 1 0 1 1 +SHE'S 5 0 5 5 +SHAWL 1 0 1 1 +SHARPNESS 1 0 1 1 +SHARPLY 4 0 4 4 +SHARPENED 1 0 1 1 +SHARP 5 0 5 5 +SHAPIA 1 0 1 1 +SHAPES 1 0 1 1 +SHAPED 1 0 1 1 +SHAPE 3 0 3 3 +SHAME 2 0 2 2 +SHAM 1 0 1 1 +SHALT 7 0 7 7 +SHAKING 1 0 1 1 +SHAHRAZAD 3 0 3 3 +SHAFTS 1 0 1 1 +SHADOWS 1 0 1 1 +SHADOW 5 0 5 5 +SEX 1 0 1 1 +SEVERSON 1 0 1 1 +SEVERELY 1 0 1 1 +SEVENTY 7 0 7 7 +SEVENTEEN 4 0 4 4 +SETTLED 4 0 4 4 +SETTLE 2 0 2 2 +SERVING 1 0 1 1 +SERVICES 1 0 1 1 +SERVICE 15 0 15 15 +SERVED 3 0 3 3 +SERVE 7 0 7 7 +SERVANTS 4 0 4 4 +SERVANT 4 0 4 4 +SERPENTS 2 0 2 2 +SERPENT 1 0 1 1 +SERIOUSLY 3 0 3 3 +SERIOUS 5 0 5 5 +SERENITY 1 0 1 1 +SEPULTURE 1 0 1 1 +SEPULCHRE 1 0 1 1 +SEPARATION 3 0 3 3 +SEPARATING 1 0 1 1 +SEPARATED 3 0 3 3 +SEPARATE 2 0 2 2 +SENTINELS 2 0 2 2 +SENTIMENTAL 1 0 1 1 +SENTIMENT 1 0 1 1 +SENTENCE 2 0 2 2 +SENSIBLY 1 0 1 1 +SENSES 2 0 2 2 +SENSELESS 2 0 2 2 +SENSE 9 0 9 9 +SENSATION 1 0 1 1 +SENATOR 1 0 1 1 +SELL 4 0 4 4 +SELF 6 0 6 6 +SEIZED 3 0 3 3 +SEES 1 0 1 1 +SEEMLY 1 0 1 1 +SEEKING 1 0 1 1 +SECURITY 7 0 7 7 +SECURE 5 0 5 5 +SECRETS 3 0 3 3 +SECRETLY 1 0 1 1 +SECRETARY 2 0 2 2 +SECRET 3 0 3 3 +SECONDS 1 0 1 1 +SECOND 15 0 15 15 +SEASONS 1 0 1 1 +SEASONED 1 0 1 1 +SEARCHINGLY 1 0 1 1 +SEARCHING 1 0 1 1 +SEARCHES 1 0 1 1 +SEARCHED 2 0 2 2 +SEARCH 6 0 6 6 +SEALED 2 0 2 2 +SCUTTLING 1 0 1 1 +SCUM 1 0 1 1 +SCULPTURE 1 0 1 1 +SCULPTOR 3 0 3 3 +SCRUPULOUSLY 1 0 1 1 +SCREW 1 0 1 1 +SCREEN 1 0 1 1 +SCREAM 1 0 1 1 +SCRATCHING 1 0 1 1 +SCRATCH 1 0 1 1 +SCRAPING 1 0 1 1 +SCRAPE 1 0 1 1 +SCOUNDREL 2 0 2 2 +SCOTCH 2 0 2 2 +SCISSORS 5 0 5 5 +SCIENTIFICALLY 1 0 1 1 +SCIENTIFIC 1 0 1 1 +SCHOOLMATE 1 0 1 1 +SCHOOLMASTER 5 0 5 5 +SCHOLARS 1 0 1 1 +SCHEME 1 0 1 1 +SCENES 2 0 2 2 +SCENE 6 0 6 6 +SCATTER 1 0 1 1 +SCARRED 1 0 1 1 +SCARLET 1 0 1 1 +SCARED 1 0 1 1 +SCARCELY 4 0 4 4 +SCARCE 1 0 1 1 +SCANNING 1 0 1 1 +SCALES 1 0 1 1 +SAXON 2 0 2 2 +SAWYER 3 0 3 3 +SAVAGES 1 0 1 1 +SAVAGERY 1 0 1 1 +SAUCER 1 0 1 1 +SATURATED 1 0 1 1 +SATISFY 3 0 3 3 +SATISFIED 3 0 3 3 +SATISFACTORY 2 0 2 2 +SATISFACTORILY 1 0 1 1 +SATISFACTION 6 0 6 6 +SATIATED 1 0 1 1 +SATANICAL 1 0 1 1 +SATAN 1 0 1 1 +SANCTUARY 1 0 1 1 +SAMUEL 1 0 1 1 +SAMARIA 1 0 1 1 +SALUTED 2 0 2 2 +SALTS 1 0 1 1 +SALT 2 0 2 2 +SALOON 1 0 1 1 +SAITH 1 0 1 1 +SAINTS 3 0 3 3 +SAILOR 2 0 2 2 +SAILING 2 0 2 2 +SAILED 1 0 1 1 +SAFETY 2 0 2 2 +SAFELY 2 0 2 2 +SAFE 8 0 8 8 +SADLY 4 0 4 4 +SACRIFICES 3 0 3 3 +SACRIFICE 5 0 5 5 +SACRED 1 0 1 1 +SACRAMENT 1 0 1 1 +SACK 1 0 1 1 +RUSTLING 2 0 2 2 +RUSTLE 1 0 1 1 +RUSSIAN 3 0 3 3 +RUNNING 3 0 3 3 +RUMBLING 1 0 1 1 +RULES 2 0 2 2 +RULE 5 0 5 5 +RUINS 1 0 1 1 +RUINING 1 0 1 1 +RUINED 1 0 1 1 +RUFFIAN 1 0 1 1 +ROYAL 7 0 7 7 +ROW 2 0 2 2 +ROVER 1 0 1 1 +ROUSED 3 0 3 3 +ROUSE 1 0 1 1 +ROT 1 0 1 1 +ROSY 2 0 2 2 +ROSEMARY 2 0 2 2 +ROSA 1 0 1 1 +ROPE 3 0 3 3 +ROLLED 1 0 1 1 +ROCKS 1 0 1 1 +ROCKET 1 0 1 1 +ROBERT 1 0 1 1 +ROBED 1 0 1 1 +ROBBING 1 0 1 1 +ROBBERY 5 0 5 5 +ROBBERS 3 0 3 3 +ROBBERIES 2 0 2 2 +ROBBED 2 0 2 2 +ROASTING 1 0 1 1 +ROASTED 1 0 1 1 +ROAST 1 0 1 1 +ROARED 1 0 1 1 +ROADSIDE 1 0 1 1 +RIVERS 1 0 1 1 +RIVALRY 1 0 1 1 +RIVAL 2 0 2 2 +RISK 3 0 3 3 +RISING 5 0 5 5 +RISEN 1 0 1 1 +RISE 3 0 3 3 +RIP 2 0 2 2 +RINDS 1 0 1 1 +RIGOR 1 0 1 1 +RIGHTEOUSNESS 1 0 1 1 +RIGHTEOUS 1 0 1 1 +RIDICULOUS 1 0 1 1 +RIDDEN 1 0 1 1 +RICHLY 1 0 1 1 +RICHER 1 0 1 1 +RICH 7 0 7 7 +RICE 1 0 1 1 +RHEUMATISM 1 0 1 1 +REWARDS 1 0 1 1 +REWARD 4 0 4 4 +REVOLUTIONARIES 1 0 1 1 +REVIVE 1 0 1 1 +REVIEW 1 0 1 1 +REVERSES 1 0 1 1 +REVENGES 1 0 1 1 +REVENGE 1 0 1 1 +REVELLED 1 0 1 1 +REVELING 1 0 1 1 +REVEL 1 0 1 1 +REVEALED 1 0 1 1 +RETREAT 1 0 1 1 +RETARDED 1 0 1 1 +RETAINED 2 0 2 2 +RESULT 2 0 2 2 +RESTS 1 0 1 1 +RESTRAIN 2 0 2 2 +RESTORED 2 0 2 2 +RESTAURANTS 1 0 1 1 +RESTAURANT 3 0 3 3 +RESPONSIBILITY 2 0 2 2 +RESPONDED 2 0 2 2 +RESPECTS 1 0 1 1 +RESPECTIVE 2 0 2 2 +RESPECTING 1 0 1 1 +RESPECTFULLY 3 0 3 3 +RESPECTFUL 1 0 1 1 +RESPECTED 1 0 1 1 +RESPECTABLE 3 0 3 3 +RESPECT 4 0 4 4 +RESORTS 1 0 1 1 +RESORTED 1 0 1 1 +RESORT 1 0 1 1 +RESOLVING 1 0 1 1 +RESOLVE 1 0 1 1 +RESOLUTIONS 1 0 1 1 +RESOLUTION 2 0 2 2 +RESISTANCE 3 0 3 3 +RESIGNED 1 0 1 1 +RESIDUE 3 0 3 3 +RESIDENCE 2 0 2 2 +RESIDE 1 0 1 1 +RESERVOIR 1 0 1 1 +RESERVE 1 0 1 1 +RESEMBLING 1 0 1 1 +RESEMBLES 1 0 1 1 +RESEMBLE 1 0 1 1 +RESEARCHES 1 0 1 1 +REQUIRING 1 0 1 1 +REQUIRES 1 0 1 1 +REQUIRED 3 0 3 3 +REQUESTED 2 0 2 2 +REPUTATIONS 1 0 1 1 +REPROACH 2 0 2 2 +REPRESENTED 3 0 3 3 +REPORTED 1 0 1 1 +REPORT 2 0 2 2 +REPEATING 1 0 1 1 +REPAST 1 0 1 1 +REPARATION 1 0 1 1 +REPAIRED 2 0 2 2 +REPAIR 1 0 1 1 +RENOUNCE 3 0 3 3 +RENEWED 2 0 2 2 +RENDERS 1 0 1 1 +RENDERED 1 0 1 1 +RENDER 1 0 1 1 +REMOVED 3 0 3 3 +REMOVE 3 0 3 3 +REMOVAL 1 0 1 1 +REMOTE 1 0 1 1 +REMORSEFUL 1 0 1 1 +REMONSTRANCE 1 0 1 1 +REMNANTS 1 0 1 1 +REMNANT 1 0 1 1 +REMINISCENCES 1 0 1 1 +REMEMBERING 2 0 2 2 +REMEMBERED 4 0 4 4 +REMEDY 4 0 4 4 +REMARKS 1 0 1 1 +REMARKED 10 0 10 10 +REMARKABLY 1 0 1 1 +REMARKABLE 2 0 2 2 +REMARK 6 0 6 6 +RELYING 1 0 1 1 +RELUCTANTLY 2 0 2 2 +RELUCTANCE 1 0 1 1 +RELINQUISH 1 0 1 1 +RELIGIONS 1 0 1 1 +RELIGION 11 0 11 11 +RELIEVED 1 0 1 1 +RELIEF 6 0 6 6 +RELIED 1 0 1 1 +RELIC 1 0 1 1 +RELEVANT 1 0 1 1 +RELEASE 2 0 2 2 +RELAXING 1 0 1 1 +RELATIONS 2 0 2 2 +REJOINED 1 0 1 1 +REJOINDER 1 0 1 1 +REJOICING 1 0 1 1 +REJOICED 3 0 3 3 +REGRETTING 1 0 1 1 +REGISTER 1 0 1 1 +REGION 1 0 1 1 +REGIMENTS 2 0 2 2 +REGARDED 2 0 2 2 +REGARD 2 0 2 2 +REGAINED 1 0 1 1 +REGAIN 2 0 2 2 +REFUTATION 1 0 1 1 +REFUSING 2 0 2 2 +REFUSES 2 0 2 2 +REFUSED 1 0 1 1 +REFUGE 3 0 3 3 +REFRESHMENT 1 0 1 1 +REFRAIN 2 0 2 2 +REFORMS 1 0 1 1 +REFLECTIVE 1 0 1 1 +REFLECTIONS 1 0 1 1 +REFLECTION 3 0 3 3 +REFINED 2 0 2 2 +REFERRED 2 0 2 2 +REFERENCE 3 0 3 3 +REFER 1 0 1 1 +REEF 1 0 1 1 +REDUCED 2 0 2 2 +REDOUBLING 1 0 1 1 +REDEMPTION 1 0 1 1 +REDEEMING 1 0 1 1 +RECTOR 1 0 1 1 +RECRUITS 1 0 1 1 +RECOVERY 1 0 1 1 +RECOVERED 1 0 1 1 +RECOVER 3 0 3 3 +RECOURSE 1 0 1 1 +RECOUNTED 1 0 1 1 +RECORDS 1 0 1 1 +RECORD 2 0 2 2 +RECOMPENSE 2 0 2 2 +RECOMMEND 2 0 2 2 +RECOLLECTING 1 0 1 1 +RECOLLECTED 1 0 1 1 +RECOLLECT 1 0 1 1 +RECOILED 1 0 1 1 +RECOGNIZED 5 0 5 5 +RECOGNITION 2 0 2 2 +RECKON 4 0 4 4 +RECITING 1 0 1 1 +RECITER 2 0 2 2 +RECITED 3 0 3 3 +RECITE 2 0 2 2 +RECIPE 2 0 2 2 +RECEPTION 1 0 1 1 +RECENTLY 1 0 1 1 +RECEIVED 9 0 9 9 +RECEIVE 4 0 4 4 +RECEIPT 1 0 1 1 +RECEDING 1 0 1 1 +RECALLING 1 0 1 1 +RECALLED 1 0 1 1 +RECALL 1 0 1 1 +REBECCA 1 0 1 1 +REASONABLE 2 0 2 2 +REASON 11 0 11 11 +REAR 1 0 1 1 +REAPING 1 0 1 1 +REALM 1 0 1 1 +REALLY 18 0 18 18 +REALIZE 1 0 1 1 +REALITY 3 0 3 3 +REAL 3 0 3 3 +READERS 1 0 1 1 +READER 1 0 1 1 +REACHING 2 0 2 2 +REACH 4 0 4 4 +RAWNESS 1 0 1 1 +RAVING 1 0 1 1 +RAVENING 1 0 1 1 +RAVAGED 1 0 1 1 +RATTLED 1 0 1 1 +RATTLE 1 0 1 1 +RATCHFORD 1 0 1 1 +RASHID 1 0 1 1 +RARE 1 0 1 1 +RAPIDLY 4 0 4 4 +RAP 1 0 1 1 +RANKS 1 0 1 1 +RAMBLER 1 0 1 1 +RAMBLE 1 0 1 1 +RAM'S 1 0 1 1 +RAISING 2 0 2 2 +RAINS 1 0 1 1 +RAINBOWS 1 0 1 1 +RAILROAD 1 0 1 1 +RAIDERS 1 0 1 1 +RAFTER 1 0 1 1 +RAFT 6 0 6 6 +RADICALS 1 0 1 1 +RADIANT 1 0 1 1 +RACKETS 1 0 1 1 +RACK 1 0 1 1 +RACE 2 0 2 2 +RABBIT 5 0 5 5 +R 1 0 1 1 +QUOTED 1 0 1 1 +QUIXOTE 5 0 5 5 +QUIVERED 1 0 1 1 +QUIVER 1 0 1 1 +QUIT 1 0 1 1 +QUIETLY 4 0 4 4 +QUIET 1 0 1 1 +QUICKER 3 0 3 3 +QUICKENETH 1 0 1 1 +QUESTIONS 6 0 6 6 +QUESTIONED 1 0 1 1 +QUESTIONABLE 1 0 1 1 +QUESTION 15 0 15 15 +QUENCH 1 0 1 1 +QUEER 4 0 4 4 +QUEENS 1 0 1 1 +QUEEN'S 1 0 1 1 +QUARTERS 3 0 3 3 +QUARTER 7 0 7 7 +QUART 1 0 1 1 +QUARRELS 1 0 1 1 +QUANTITY 3 0 3 3 +QUALITY 1 0 1 1 +PUZZLED 2 0 2 2 +PUSHING 1 0 1 1 +PUSHED 1 0 1 1 +PURSUIT 1 0 1 1 +PURSUED 3 0 3 3 +PURSUANCE 1 0 1 1 +PURPOSES 1 0 1 1 +PURPOSE 5 0 5 5 +PURITAN 2 0 2 2 +PURIFY 1 0 1 1 +PURE 4 0 4 4 +PURCHASED 1 0 1 1 +PUNISHMENTS 1 0 1 1 +PUNISHMENT 1 0 1 1 +PUNISHES 1 0 1 1 +PUNISHED 1 0 1 1 +PUNISH 1 0 1 1 +PUNCTUALITY 1 0 1 1 +PUNCTILIOUS 1 0 1 1 +PULSE 1 0 1 1 +PULP 1 0 1 1 +PUFFING 1 0 1 1 +PUFFED 1 0 1 1 +PUDDINGS 1 0 1 1 +PUBLISHER 1 0 1 1 +PUBLIC 5 0 5 5 +PRYING 2 0 2 2 +PRUDENCE 4 0 4 4 +PROVOKE 1 0 1 1 +PROVISION 1 0 1 1 +PROVINCIAL 1 0 1 1 +PROVINCE 4 0 4 4 +PROVIDENCES 1 0 1 1 +PROVIDENCE 1 0 1 1 +PROVIDED 2 0 2 2 +PROVIDE 1 0 1 1 +PROVERBIAL 1 0 1 1 +PROVEN 1 0 1 1 +PROUD 2 0 2 2 +PROTESTED 2 0 2 2 +PROTECTS 1 0 1 1 +PROTECTORS 1 0 1 1 +PROTECTION 2 0 2 2 +PROTECT 2 0 2 2 +PROSPEROUS 1 0 1 1 +PROPRIETORS 1 0 1 1 +PROPOSITION 1 0 1 1 +PROPOSES 1 0 1 1 +PROPOSED 3 0 3 3 +PROPOSALS 1 0 1 1 +PROPORTION 3 0 3 3 +PROPHET 1 0 1 1 +PROPERTY 2 0 2 2 +PROPERLY 2 0 2 2 +PROOF 5 0 5 5 +PRONOUNCED 1 0 1 1 +PROMPTLY 3 0 3 3 +PROMPT 1 0 1 1 +PROMISING 1 0 1 1 +PROMISED 7 0 7 7 +PROMISE 4 0 4 4 +PROLONGED 1 0 1 1 +PROJECT 1 0 1 1 +PROHIBITED 1 0 1 1 +PROHIBIT 1 0 1 1 +PROGRESS 1 0 1 1 +PROGRAMME 1 0 1 1 +PROFUSION 1 0 1 1 +PROFOUND 1 0 1 1 +PROFLIGATE 1 0 1 1 +PROFITABLY 1 0 1 1 +PROFITABLE 1 0 1 1 +PROFIT 2 0 2 2 +PROFESSIONAL 2 0 2 2 +PROFANITY 1 0 1 1 +PROFANE 1 0 1 1 +PRODUCTIONS 1 0 1 1 +PRODUCING 1 0 1 1 +PROCURE 2 0 2 2 +PROCOPIUS 1 0 1 1 +PROCESSIONS 1 0 1 1 +PROCESSION 1 0 1 1 +PROCESS 6 0 6 6 +PROCEEDINGS 2 0 2 2 +PROCEEDED 1 0 1 1 +PROCEED 2 0 2 2 +PROCEDURE 1 0 1 1 +PROBLEMS 1 0 1 1 +PROBLEM 1 0 1 1 +PROBABLY 7 0 7 7 +PROBABLE 1 0 1 1 +PROBABILITY 1 0 1 1 +PRIVILEGE 1 0 1 1 +PRIVATE 6 0 6 6 +PRIVACY 1 0 1 1 +PRISONERS 3 0 3 3 +PRISONER 13 0 13 13 +PRINT 1 0 1 1 +PRINCIPLE 3 0 3 3 +PRINCIPALLY 1 0 1 1 +PRINCIPAL 1 0 1 1 +PRINCESS 11 0 11 11 +PRINCES 1 0 1 1 +PRINCE'S 2 0 2 2 +PRINCE 7 0 7 7 +PRIDE 2 0 2 2 +PREVIOUSLY 1 0 1 1 +PREVENTED 1 0 1 1 +PREVENT 1 0 1 1 +PREVAILING 1 0 1 1 +PREVAILED 1 0 1 1 +PRETTILY 1 0 1 1 +PRETTIEST 1 0 1 1 +PRETEXT 1 0 1 1 +PRETENDED 1 0 1 1 +PRETEND 3 0 3 3 +PRESUMPTUOUS 1 0 1 1 +PRESSURE 4 0 4 4 +PRESSING 3 0 3 3 +PRESSED 2 0 2 2 +PRESS 2 0 2 2 +PRESERVING 5 0 5 5 +PRESERVES 3 0 3 3 +PRESERVED 2 0 2 2 +PRESENTS 4 0 4 4 +PRESENTLY 12 0 12 12 +PRESENTING 1 0 1 1 +PRESENTED 3 0 3 3 +PRESENCE 9 0 9 9 +PREPARING 3 0 3 3 +PREPARED 7 0 7 7 +PREPARE 1 0 1 1 +PREPARATIONS 5 0 5 5 +PREOCCUPIED 1 0 1 1 +PREMISES 1 0 1 1 +PRELIMINARIES 1 0 1 1 +PREFERRED 1 0 1 1 +PREFER 2 0 2 2 +PREDICTIONS 1 0 1 1 +PRECIPITANCY 1 0 1 1 +PRECIOUS 2 0 2 2 +PRECINCT 1 0 1 1 +PRECEPTORY 2 0 2 2 +PRECAUTION 1 0 1 1 +PREACHING 2 0 2 2 +PREACHER 1 0 1 1 +PRAYED 3 0 3 3 +PRAISES 1 0 1 1 +PRAISE 3 0 3 3 +PRACTISE 1 0 1 1 +PRACTICED 1 0 1 1 +PRACTICE 1 0 1 1 +PRACTICALLY 2 0 2 2 +POWERS 3 0 3 3 +POWERLESS 1 0 1 1 +POWERFUL 3 0 3 3 +POWER 27 0 27 27 +POWDERED 1 0 1 1 +POURS 1 0 1 1 +POURING 1 0 1 1 +POUR 1 0 1 1 +POUND 3 0 3 3 +POUNCE 1 0 1 1 +POTS 1 0 1 1 +POTASSIC 1 0 1 1 +POTASH 1 0 1 1 +POSTERS 1 0 1 1 +POSSIBLY 3 0 3 3 +POSSIBLE 12 0 12 12 +POSSIBILITY 2 0 2 2 +POSSESSION 2 0 2 2 +POSSESSES 1 0 1 1 +POSSESSED 5 0 5 5 +POSSESS 1 0 1 1 +POSSE 1 0 1 1 +POSITIVELY 3 0 3 3 +POSITIVE 1 0 1 1 +PORTO 1 0 1 1 +PORTMANTEAU 1 0 1 1 +PORTIONS 2 0 2 2 +PORTION 2 0 2 2 +PORTER 2 0 2 2 +POPULATION 1 0 1 1 +POPULARITY 1 0 1 1 +POPULAR 1 0 1 1 +POPPED 1 0 1 1 +POPES 2 0 2 2 +POPE'S 1 0 1 1 +POPE 1 0 1 1 +POP 1 0 1 1 +PONY 1 0 1 1 +POLLY'S 3 0 3 3 +POLLY 19 0 19 19 +POLITICIANS 1 0 1 1 +POLITICAL 3 0 3 3 +POLICE 5 0 5 5 +POKING 1 0 1 1 +POKED 1 0 1 1 +POISONS 1 0 1 1 +POISONING 3 0 3 3 +POINTING 2 0 2 2 +POINTED 3 0 3 3 +POETRY 2 0 2 2 +POCKETS 1 0 1 1 +POCKETED 1 0 1 1 +PO 1 0 1 1 +PLUNGED 3 0 3 3 +PLUNDERED 1 0 1 1 +PLUG 1 0 1 1 +PLUCKING 2 0 2 2 +PLUCK 1 0 1 1 +PLEDGED 1 0 1 1 +PLEDGE 1 0 1 1 +PLEASURES 2 0 2 2 +PLEASING 2 0 2 2 +PLEASED 4 0 4 4 +PLEASANTER 1 0 1 1 +PLEADINGS 1 0 1 1 +PLEAD 1 0 1 1 +PLAYERS 1 0 1 1 +PLATTERS 1 0 1 1 +PLATFORM 2 0 2 2 +PLATE 2 0 2 2 +PLASTER 1 0 1 1 +PLANTS 1 0 1 1 +PLANTED 1 0 1 1 +PLANTATIONS 2 0 2 2 +PLANS 5 0 5 5 +PLANNING 2 0 2 2 +PLANNED 1 0 1 1 +PLANKS 2 0 2 2 +PLANK 3 0 3 3 +PLANETS 1 0 1 1 +PLAINLY 3 0 3 3 +PLAGUE 2 0 2 2 +PLACING 2 0 2 2 +PITY 4 0 4 4 +PITIFULNESS 1 0 1 1 +PIT 1 0 1 1 +PISTOLS 1 0 1 1 +PIPING 1 0 1 1 +PIOUS 1 0 1 1 +PINT 1 0 1 1 +PINK 1 0 1 1 +PINED 1 0 1 1 +PINCHED 2 0 2 2 +PINCH 1 0 1 1 +PIN 1 0 1 1 +PILLOWED 1 0 1 1 +PILLOW 1 0 1 1 +PILED 1 0 1 1 +PILE 1 0 1 1 +PIG 1 0 1 1 +PIERCED 1 0 1 1 +PIECES 9 0 9 9 +PICTURES 3 0 3 3 +PICKET 1 0 1 1 +PIAZZA 1 0 1 1 +PHYSIOLOGICAL 1 0 1 1 +PHYSICIAN 3 0 3 3 +PHYSICAL 2 0 2 2 +PHRASE 1 0 1 1 +PHONE 1 0 1 1 +PHLEGMATIC 1 0 1 1 +PHILOSOPHERS 1 0 1 1 +PHELPS 2 0 2 2 +PHARMACY 1 0 1 1 +PEYTON 1 0 1 1 +PETITIONS 1 0 1 1 +PETERS 1 0 1 1 +PET 2 0 2 2 +PERVADED 1 0 1 1 +PERUSING 1 0 1 1 +PERUSAL 1 0 1 1 +PERSUADED 1 0 1 1 +PERSUADE 1 0 1 1 +PERSPECTIVE 1 0 1 1 +PERSONS 8 0 8 8 +PERSONALLY 5 0 5 5 +PERSONAL 2 0 2 2 +PERSONAGE 1 0 1 1 +PERSON'S 1 0 1 1 +PERSON 16 0 16 16 +PERSISTED 3 0 3 3 +PERSIST 1 0 1 1 +PERSEUS 2 0 2 2 +PERSECUTORS 1 0 1 1 +PERSECUTION 1 0 1 1 +PERSECUTED 1 0 1 1 +PERSECUTE 2 0 2 2 +PERPLEXITY 1 0 1 1 +PERPETUALLY 1 0 1 1 +PERMITTED 5 0 5 5 +PERMIT 4 0 4 4 +PERMISSION 2 0 2 2 +PERMANENT 1 0 1 1 +PERISHED 3 0 3 3 +PERISH 4 0 4 4 +PERIODS 1 0 1 1 +PERIOD 2 0 2 2 +PERILS 1 0 1 1 +PERFORMANCES 1 0 1 1 +PERFORM 2 0 2 2 +PERFECTION 2 0 2 2 +PERFECT 5 0 5 5 +PERCH 1 0 1 1 +PERCEPTIBLE 1 0 1 1 +PERCEIVED 7 0 7 7 +PERCEIVE 1 0 1 1 +PER 1 0 1 1 +PEPPINO 1 0 1 1 +PEPPER 2 0 2 2 +PEOPLE'S 3 0 3 3 +PEOPLE 44 0 44 44 +PENNY 1 0 1 1 +PENETRATING 1 0 1 1 +PENETRATE 1 0 1 1 +PENCE 1 0 1 1 +PENALTY 1 0 1 1 +PELT 1 0 1 1 +PEERS 1 0 1 1 +PEERED 1 0 1 1 +PEDESTAL 1 0 1 1 +PECULIAR 2 0 2 2 +PEBBLES 3 0 3 3 +PEASANT 3 0 3 3 +PEARLS 1 0 1 1 +PEALS 1 0 1 1 +PEAL 1 0 1 1 +PEACEFUL 2 0 2 2 +PEABODY 1 0 1 1 +PAYS 1 0 1 1 +PAYING 1 0 1 1 +PAVILION 1 0 1 1 +PAVEMENT 1 0 1 1 +PAUSE 4 0 4 4 +PATRIOTS 1 0 1 1 +PATRIOTISM 1 0 1 1 +PATRIOT 1 0 1 1 +PATRIMONY 1 0 1 1 +PATRIARCHS 1 0 1 1 +PATIENTLY 1 0 1 1 +PATIENT'S 1 0 1 1 +PATIENT 2 0 2 2 +PATHS 1 0 1 1 +PASTE 1 0 1 1 +PASSION 5 0 5 5 +PASSERS 2 0 2 2 +PASSENGERS 4 0 4 4 +PASSAGE 2 0 2 2 +PASSABLE 1 0 1 1 +PARTY 13 0 13 13 +PARTNER 1 0 1 1 +PARTISANS 1 0 1 1 +PARTIES 1 0 1 1 +PARTICULARS 1 0 1 1 +PARTICULARLY 5 0 5 5 +PARTICULAR 4 0 4 4 +PARTICLE 2 0 2 2 +PARTICIPATION 1 0 1 1 +PARTICIPANTS 1 0 1 1 +PARTAKE 1 0 1 1 +PARSLEY 1 0 1 1 +PARLIAMENTARY 1 0 1 1 +PARK 1 0 1 1 +PARISIAN 1 0 1 1 +PARIS 5 0 5 5 +PARENTS 2 0 2 2 +PARDONABLE 1 0 1 1 +PARCEL 2 0 2 2 +PARASOL 1 0 1 1 +PARALLEL 1 0 1 1 +PARADISE 1 0 1 1 +PAPERS 4 0 4 4 +PAPER 6 0 6 6 +PAPA 4 0 4 4 +PANZA 1 0 1 1 +PANTING 2 0 2 2 +PANS 1 0 1 1 +PANEL 1 0 1 1 +PAN 3 0 3 3 +PALINGS 1 0 1 1 +PALESTINE 2 0 2 2 +PALER 1 0 1 1 +PAINTING 1 0 1 1 +PAINT 1 0 1 1 +PAINFULLY 1 0 1 1 +PAINFUL 5 0 5 5 +PAINED 1 0 1 1 +PAIN 3 0 3 3 +PAID 9 0 9 9 +PAGES 4 0 4 4 +PAGE 2 0 2 2 +PADDLE 2 0 2 2 +PACKED 1 0 1 1 +PACIFY 1 0 1 1 +PACIFIC 2 0 2 2 +OXEN 1 0 1 1 +OX 1 0 1 1 +OVERWHELMED 2 0 2 2 +OVERTURNING 1 0 1 1 +OVERTHREW 1 0 1 1 +OVERTAKEN 1 0 1 1 +OVERLY 1 0 1 1 +OVERHEAD 2 0 2 2 +OVERCOME 2 0 2 2 +OVERCOAT 1 0 1 1 +OVEN 2 0 2 2 +OVAL 1 0 1 1 +OUTWARD 1 0 1 1 +OUTSIDE 8 0 8 8 +OUTLAWS 1 0 1 1 +OUTFIT 1 0 1 1 +OUTDO 1 0 1 1 +OURSELVES 4 0 4 4 +OTTO 1 0 1 1 +OTTER 1 0 1 1 +OTHO 1 0 1 1 +OTHERWISE 1 0 1 1 +OSTRICH 1 0 1 1 +ORTHODOX 1 0 1 1 +ORPHAN 1 0 1 1 +ORNERY 1 0 1 1 +ORNERIEST 1 0 1 1 +ORNAMENTED 1 0 1 1 +ORNAMENTAL 1 0 1 1 +ORLEANS 1 0 1 1 +ORISON 1 0 1 1 +ORIGIN 1 0 1 1 +ORGANIZATION 3 0 3 3 +ORDINARY 1 0 1 1 +ORDERED 5 0 5 5 +ORDEAL 1 0 1 1 +ORDAINED 1 0 1 1 +ORCHARDS 1 0 1 1 +ORANGE 1 0 1 1 +ORACLE 2 0 2 2 +OPTIC 1 0 1 1 +OPPRESSOR 1 0 1 1 +OPPRESSION 3 0 3 3 +OPPRESSED 2 0 2 2 +OPPOSITE 3 0 3 3 +OPPOSE 1 0 1 1 +OPPORTUNITY 4 0 4 4 +OPERATIONS 1 0 1 1 +OPERATED 1 0 1 1 +OPERA 1 0 1 1 +OPENS 3 0 3 3 +OPENING 7 0 7 7 +OPAQUE 1 0 1 1 +ONWARDS 1 0 1 1 +ONWARD 1 0 1 1 +ONION 2 0 2 2 +ONESELF 1 0 1 1 +OMITTING 1 0 1 1 +OMAR 2 0 2 2 +OLDISH 1 0 1 1 +OLDER 1 0 1 1 +OGRE'S 1 0 1 1 +OGRE 3 0 3 3 +OGLING 1 0 1 1 +OFFICIAL 1 0 1 1 +OFFICE 4 0 4 4 +OFFERS 3 0 3 3 +OFFERINGS 1 0 1 1 +OFFERING 1 0 1 1 +OFFERED 3 0 3 3 +OFFER 2 0 2 2 +OFFENSIVE 1 0 1 1 +OFFEND 2 0 2 2 +OCEAN 1 0 1 1 +OCCURRED 5 0 5 5 +OCCUR 1 0 1 1 +OCCUPY 3 0 3 3 +OCCASIONS 2 0 2 2 +OCCASION 5 0 5 5 +OBVIOUSLY 1 0 1 1 +OBVIOUS 1 0 1 1 +OBTAINED 1 0 1 1 +OBTAIN 3 0 3 3 +OBSTRUCTION 1 0 1 1 +OBSTINATE 1 0 1 1 +OBSTINACY 1 0 1 1 +OBSERVER 1 0 1 1 +OBSERVE 1 0 1 1 +OBSERVATIONS 2 0 2 2 +OBSERVATION 1 0 1 1 +OBSERVANT 1 0 1 1 +OBLONG 1 0 1 1 +OBLIGED 3 0 3 3 +OBLIGATION 3 0 3 3 +OBJECTS 2 0 2 2 +OBJECTION 2 0 2 2 +OBEYING 1 0 1 1 +OBEYED 3 0 3 3 +OBEDIENTLY 1 0 1 1 +OBEDIENT 2 0 2 2 +OATH 4 0 4 4 +OAR 1 0 1 1 +O'CLOCK 9 0 9 9 +NUTRITION 2 0 2 2 +NUN 1 0 1 1 +NUMBERED 1 0 1 1 +NUMBER 5 0 5 5 +NUBIAN 1 0 1 1 +NOWHERE 2 0 2 2 +NOTWITHSTANDING 1 0 1 1 +NOTORIOUS 1 0 1 1 +NOTION 1 0 1 1 +NOTHING 28 0 28 28 +NOTES 1 0 1 1 +NOTED 2 0 2 2 +NOTABLES 1 0 1 1 +NOSE 3 0 3 3 +NORTHWARD 1 0 1 1 +NORTHERN 1 0 1 1 +NORMAN 2 0 2 2 +NOON 1 0 1 1 +NONSENSE 2 0 2 2 +NODDING 1 0 1 1 +NODDED 3 0 3 3 +NOD 2 0 2 2 +NOBODY 6 0 6 6 +NOBLEMEN 1 0 1 1 +NOBLEMAN 1 0 1 1 +NOBLE 6 0 6 6 +NOBILITY 1 0 1 1 +NIPPER 1 0 1 1 +NINTH 1 0 1 1 +NINEVEH 1 0 1 1 +NINETY 2 0 2 2 +NINETEENTH 1 0 1 1 +NINETEEN 1 0 1 1 +NIMBLENESS 1 0 1 1 +NIKOLAY 1 0 1 1 +NIGHTS 2 0 2 2 +NICOTINE 1 0 1 1 +NICK 1 0 1 1 +NICHOLAS 1 0 1 1 +NICETIES 1 0 1 1 +NICE 3 0 3 3 +NEVERTHELESS 3 0 3 3 +NEVER 61 0 61 61 +NERVOUSNESS 2 0 2 2 +NERVOUSLY 1 0 1 1 +NERVOUS 2 0 2 2 +NEMESIS 3 0 3 3 +NEITHER 8 0 8 8 +NEIGHBOURHOOD 1 0 1 1 +NEGRO 2 0 2 2 +NEGLECTING 1 0 1 1 +NEGLECTED 2 0 2 2 +NEGLECT 1 0 1 1 +NEGATIVE 1 0 1 1 +NEEDN'T 1 0 1 1 +NED 3 0 3 3 +NECK 1 0 1 1 +NECESSITY 7 0 7 7 +NECESSARY 10 0 10 10 +NECESSARILY 2 0 2 2 +NEATLY 2 0 2 2 +NEARLY 2 0 2 2 +NAY 2 0 2 2 +NAVEL 1 0 1 1 +NAUSEA 1 0 1 1 +NATURED 2 0 2 2 +NATURE 11 0 11 11 +NATTY 1 0 1 1 +NATIVE 6 0 6 6 +NATIONS 3 0 3 3 +NARROWNESS 1 0 1 1 +NARROWER 1 0 1 1 +NARROW 5 0 5 5 +NARRATOR 1 0 1 1 +NARRATE 1 0 1 1 +NARCOTIC 1 0 1 1 +NAPKINS 1 0 1 1 +NAPKIN 1 0 1 1 +NAMES 5 0 5 5 +NAMED 5 0 5 5 +NAME'S 1 0 1 1 +NAME 21 0 21 21 +NAILS 2 0 2 2 +NAILED 2 0 2 2 +NAIL 5 0 5 5 +MYSELF 27 0 27 27 +MUTTERED 5 0 5 5 +MUSICAL 1 0 1 1 +MUSIC 2 0 2 2 +MURMURED 1 0 1 1 +MURMUR 1 0 1 1 +MURDERED 1 0 1 1 +MURDER 5 0 5 5 +MULTITUDE 1 0 1 1 +MULE 1 0 1 1 +MUFFLED 1 0 1 1 +MUDDY 1 0 1 1 +MUCOUS 1 0 1 1 +MOVING 6 0 6 6 +MOVEMENTS 2 0 2 2 +MOVED 7 0 7 7 +MOVE 1 0 1 1 +MOURNFULLY 1 0 1 1 +MOUNTED 2 0 2 2 +MOUNTAINS 1 0 1 1 +MOUNTAIN 1 0 1 1 +MOTLEY 1 0 1 1 +MOTIVES 1 0 1 1 +MOTIVE 1 0 1 1 +MOTHERS 5 0 5 5 +MOTHER'S 3 0 3 3 +MOSTLY 2 0 2 2 +MORTIS 1 0 1 1 +MORTAR 1 0 1 1 +MORTAL 1 0 1 1 +MORPHINE 2 0 2 2 +MOREOVER 1 0 1 1 +MORE'N 1 0 1 1 +MORCERF 3 0 3 3 +MORALS 1 0 1 1 +MORAL 8 0 8 8 +MOPPED 1 0 1 1 +MOORED 1 0 1 1 +MOONFLOWERS 1 0 1 1 +MOON 2 0 2 2 +MOOD 2 0 2 2 +MONTH 1 0 1 1 +MONTESQUIEU 1 0 1 1 +MONSTROUS 1 0 1 1 +MONSTERS 2 0 2 2 +MONOTONOUS 1 0 1 1 +MONKEY 3 0 3 3 +MONEY 16 0 16 16 +MONDAY 2 0 2 2 +MONASTERY 1 0 1 1 +MONARCH 2 0 2 2 +MOMENTS 6 0 6 6 +MOMENT'S 1 0 1 1 +MOLESTED 1 0 1 1 +MOHAMMED 1 0 1 1 +MODEST 1 0 1 1 +MODERN 2 0 2 2 +MODERATE 2 0 2 2 +MODEL 2 0 2 2 +MOCKERY 1 0 1 1 +MOB 1 0 1 1 +MOANING 2 0 2 2 +MIXING 1 0 1 1 +MISTRUST 1 0 1 1 +MISTRESSES 1 0 1 1 +MISTRESS 6 0 6 6 +MISTAKE 2 0 2 2 +MIST 2 0 2 2 +MISSOURI 1 0 1 1 +MISSISSIPPIAN 1 0 1 1 +MISSISSIPPI 1 0 1 1 +MISSING 3 0 3 3 +MISFORTUNE 1 0 1 1 +MISERY 3 0 3 3 +MISERABLE 2 0 2 2 +MISCONDUCT 1 0 1 1 +MISCONCEPTION 1 0 1 1 +MISCHIEF 1 0 1 1 +MISAPPREHENSION 1 0 1 1 +MISANTHROPY 1 0 1 1 +MIRTH 2 0 2 2 +MIRACULOUS 1 0 1 1 +MIRACLES 3 0 3 3 +MIRABELLE 2 0 2 2 +MINUTES 11 0 11 11 +MINUTE 6 0 6 6 +MINOR 1 0 1 1 +MINNESOTA 1 0 1 1 +MINISTERED 1 0 1 1 +MINIMS 1 0 1 1 +MINIATURE 1 0 1 1 +MINGLED 2 0 2 2 +MINDS 1 0 1 1 +MINDED 1 0 1 1 +MINCE 1 0 1 1 +MILTON 4 0 4 4 +MILLIONS 1 0 1 1 +MILLER'S 1 0 1 1 +MILLER 4 0 4 4 +MILL 1 0 1 1 +MILITARY 5 0 5 5 +MIKE'S 1 0 1 1 +MIGHTY 3 0 3 3 +MIGHTINESS 1 0 1 1 +MIDST 3 0 3 3 +MIDSHIPMAN 1 0 1 1 +MIDNIGHT 3 0 3 3 +MIDDY'S 1 0 1 1 +MIDDLE 5 0 5 5 +MICROSCOPIC 1 0 1 1 +MICROBE 1 0 1 1 +METALLIC 1 0 1 1 +MESTIENNE'S 1 0 1 1 +MESSAGE 1 0 1 1 +MESELF 1 0 1 1 +MERITS 1 0 1 1 +MERE 3 0 3 3 +MERCY 5 0 5 5 +MERCURY 2 0 2 2 +MERCIFUL 1 0 1 1 +MERCIES 1 0 1 1 +MERCHANTS 6 0 6 6 +MERCHANT 3 0 3 3 +MENTION 1 0 1 1 +MENTALLY 3 0 3 3 +MENACING 1 0 1 1 +MEMORY 4 0 4 4 +MEMORIAL 1 0 1 1 +MEMBRANE 1 0 1 1 +MEMBERS 7 0 7 7 +MEMBER 1 0 1 1 +MELANCHOLY 1 0 1 1 +MEETING 4 0 4 4 +MEDIUMS 1 0 1 1 +MEDITATION 1 0 1 1 +MEDITATED 1 0 1 1 +MEDICINE 1 0 1 1 +MEDDLE 1 0 1 1 +MEDALS 1 0 1 1 +MEDAL 1 0 1 1 +MECHANICALLY 1 0 1 1 +MECHANICAL 1 0 1 1 +MEASURABLE 1 0 1 1 +MEANWHILE 4 0 4 4 +MEANTIME 2 0 2 2 +MEANT 10 0 10 10 +MEANS 23 0 23 23 +MEANING 2 0 2 2 +MAYOR 1 0 1 1 +MAYBE 4 0 4 4 +MATTOCK 1 0 1 1 +MATTERED 1 0 1 1 +MATERIALLY 1 0 1 1 +MATERIAL 1 0 1 1 +MATCH 1 0 1 1 +MASTERY 1 0 1 1 +MASTERS 1 0 1 1 +MASTERPIECE 1 0 1 1 +MASTERED 1 0 1 1 +MASTER'S 1 0 1 1 +MAST 1 0 1 1 +MASON'S 1 0 1 1 +MASON 1 0 1 1 +MARY'S 2 0 2 2 +MARY 2 0 2 2 +MARVELLOUS 4 0 4 4 +MARTYR 1 0 1 1 +MARTIN 1 0 1 1 +MARSPORT 1 0 1 1 +MARSHAL'S 1 0 1 1 +MARS 3 0 3 3 +MARRY 3 0 3 3 +MARRIED 4 0 4 4 +MARLBOROUGH'S 1 0 1 1 +MARKS 1 0 1 1 +MARKING 1 0 1 1 +MARIUS 6 0 6 6 +MARIA 1 0 1 1 +MARGUERITE 11 0 11 11 +MARGINAL 1 0 1 1 +MARGARET'S 3 0 3 3 +MARGARET 14 0 14 14 +MARE 1 0 1 1 +MARCH 2 0 2 2 +MARBLE 2 0 2 2 +MAR 1 0 1 1 +MANTELPIECE 1 0 1 1 +MANNER 9 0 9 9 +MANIFESTATION 1 0 1 1 +MANCHESTER 1 0 1 1 +MANAGE 1 0 1 1 +MAMMOTH 1 0 1 1 +MALICE 1 0 1 1 +MALEVOLENT 1 0 1 1 +MALADY 1 0 1 1 +MAKER 1 0 1 1 +MAJOR 5 0 5 5 +MAJESTY 2 0 2 2 +MAINTAINED 1 0 1 1 +MAINTAIN 1 0 1 1 +MAINLY 1 0 1 1 +MAIDEN 3 0 3 3 +MAHOGANY 2 0 2 2 +MAGNIFYING 1 0 1 1 +MAGNIFIES 1 0 1 1 +MAGNIFICENT 2 0 2 2 +MAGNIFICENCE 1 0 1 1 +MAGNANIMITY 1 0 1 1 +MAGICIAN 2 0 2 2 +MAGICAL 1 0 1 1 +MAGIC 1 0 1 1 +MAGAZINE 1 0 1 1 +MADRID 1 0 1 1 +MADNESS 2 0 2 2 +MADELEINE 3 0 3 3 +MADAME 1 0 1 1 +MACHINES 1 0 1 1 +MACHINERY 1 0 1 1 +LYNCHES 1 0 1 1 +LUTHER 1 0 1 1 +LUSTILY 1 0 1 1 +LURKING 1 0 1 1 +LUMP 1 0 1 1 +LUCRATIVE 1 0 1 1 +LUCKY 2 0 2 2 +LUCKLESS 1 0 1 1 +LUCAS 1 0 1 1 +LOYALTY 2 0 2 2 +LOWERING 3 0 3 3 +LOWERED 1 0 1 1 +LOVES 6 0 6 6 +LOVERS 2 0 2 2 +LOVELY 3 0 3 3 +LOUVRE 1 0 1 1 +LOUISIANA 1 0 1 1 +LOUDLY 1 0 1 1 +LOUDER 1 0 1 1 +LOSING 3 0 3 3 +LOSE 6 0 6 6 +LORN 1 0 1 1 +LORDSHIPS 1 0 1 1 +LOOSENED 1 0 1 1 +LOOSELY 1 0 1 1 +LOOSE 3 0 3 3 +LOOKS 5 0 5 5 +LOOKING 21 0 21 21 +LONGING 3 0 3 3 +LONGED 2 0 2 2 +LONELY 1 0 1 1 +LONDON 4 0 4 4 +LODGE 1 0 1 1 +LOCKS 1 0 1 1 +LOCKED 4 0 4 4 +LOCATE 1 0 1 1 +LOCAL 4 0 4 4 +LOBSTERS 2 0 2 2 +LOADING 1 0 1 1 +LIVING 5 0 5 5 +LIVID 1 0 1 1 +LIVERY 1 0 1 1 +LIVELY 2 0 2 2 +LIVELONG 1 0 1 1 +LIVELIHOOD 1 0 1 1 +LITTER 1 0 1 1 +LITERATURE 1 0 1 1 +LITERALLY 1 0 1 1 +LISTENING 5 0 5 5 +LISTENERS 1 0 1 1 +LISTENER 1 0 1 1 +LIQUOR 4 0 4 4 +LIQUID 1 0 1 1 +LIPS 6 0 6 6 +LIP 3 0 3 3 +LIONS 1 0 1 1 +LION 1 0 1 1 +LINK 1 0 1 1 +LINGO 1 0 1 1 +LINGER 1 0 1 1 +LINES 2 0 2 2 +LINCOLN 1 0 1 1 +LIMITS 1 0 1 1 +LIMIT 1 0 1 1 +LIMES 1 0 1 1 +LIKING 1 0 1 1 +LIKES 1 0 1 1 +LIGHTLY 1 0 1 1 +LIGHTING 1 0 1 1 +LIGHTENED 1 0 1 1 +LIGATURES 1 0 1 1 +LIFTING 1 0 1 1 +LIFT 1 0 1 1 +LIFETIME 1 0 1 1 +LIEUTENANT 1 0 1 1 +LIES 4 0 4 4 +LIBRARY 2 0 2 2 +LIBERTY 3 0 3 3 +LIBERATION 2 0 2 2 +LEVIN 6 0 6 6 +LEVELLED 1 0 1 1 +LEVEL 1 0 1 1 +LETTING 1 0 1 1 +LETS 1 0 1 1 +LET'S 4 0 4 4 +LESSONS 1 0 1 1 +LESSENS 1 0 1 1 +LESSEN 2 0 2 2 +LENT 1 0 1 1 +LENGTH 4 0 4 4 +LEGISLATURE 1 0 1 1 +LEGAL 2 0 2 2 +LEG 1 0 1 1 +LED 4 0 4 4 +LEAVING 5 0 5 5 +LEAVES 1 0 1 1 +LEAST 15 0 15 15 +LEARNS 1 0 1 1 +LEARNING 4 0 4 4 +LEANING 1 0 1 1 +LEAN 1 0 1 1 +LEAF 1 0 1 1 +LEADS 1 0 1 1 +LEADING 3 0 3 3 +LEADER 2 0 2 2 +LAZY 1 0 1 1 +LAZILY 1 0 1 1 +LAYING 3 0 3 3 +LAY 14 0 14 14 +LAWYER 1 0 1 1 +LAURA 3 0 3 3 +LAUGHTER 3 0 3 3 +LAUGHS 1 0 1 1 +LAUGHING 5 0 5 5 +LATTER 2 0 2 2 +LATIN 1 0 1 1 +LATELY 2 0 2 2 +LASTLY 2 0 2 2 +LASTING 1 0 1 1 +LARKIN'S 1 0 1 1 +LARGESSE 1 0 1 1 +LAPSE 1 0 1 1 +LAP 2 0 2 2 +LANTERN 1 0 1 1 +LANGUAGE 2 0 2 2 +LANDSMAN 1 0 1 1 +LANDOWNER 4 0 4 4 +LANDLORD 1 0 1 1 +LANDING 1 0 1 1 +LANDED 3 0 3 3 +LAMPLIT 1 0 1 1 +LAMP 1 0 1 1 +LAME 1 0 1 1 +LADY'S 1 0 1 1 +LACE 2 0 2 2 +LABOURS 1 0 1 1 +LABORER 1 0 1 1 +KNUCKLES 1 0 1 1 +KNOWS 8 0 8 8 +KNOWING 3 0 3 3 +KNOWEST 3 0 3 3 +KNOWED 1 0 1 1 +KNOCKER 1 0 1 1 +KNOCK 1 0 1 1 +KNIGHTS 2 0 2 2 +KNIGHTHOOD 1 0 1 1 +KNEW 16 0 16 16 +KNEES 3 0 3 3 +KNAVE 1 0 1 1 +KITCHEN 3 0 3 3 +KISSING 2 0 2 2 +KISSED 6 0 6 6 +KISS 2 0 2 2 +KINSFOLK 1 0 1 1 +KINGS 8 0 8 8 +KINGDOM 3 0 3 3 +KING'S 7 0 7 7 +KINDS 1 0 1 1 +KINDLY 3 0 3 3 +KINDEST 1 0 1 1 +KILLING 1 0 1 1 +KILLED 4 0 4 4 +KIDNEYS 1 0 1 1 +KICKED 1 0 1 1 +KHORASAN 2 0 2 2 +KHAN 1 0 1 1 +KEYHOLE 1 0 1 1 +KEPT 9 0 9 9 +KENT 2 0 2 2 +KENNETH 3 0 3 3 +KEEPING 5 0 5 5 +KEEPER'S 1 0 1 1 +KEEPER 2 0 2 2 +KEENLY 1 0 1 1 +KAZI 1 0 1 1 +KANSAS 7 0 7 7 +K 1 0 1 1 +JUSTLY 1 0 1 1 +JUSTINIAN 1 0 1 1 +JUSTIFIES 1 0 1 1 +JUSTIFICATION 3 0 3 3 +JUNE 1 0 1 1 +JUMPING 2 0 2 2 +JUMP 1 0 1 1 +JUICES 1 0 1 1 +JUICE 1 0 1 1 +JUGS 1 0 1 1 +JUDICIAL 1 0 1 1 +JUDGES 2 0 2 2 +JUDGED 1 0 1 1 +JOYOUS 1 0 1 1 +JOYFUL 2 0 2 2 +JOYANCE 2 0 2 2 +JOY 7 0 7 7 +JOURNEYED 1 0 1 1 +JOURNEY 8 0 8 8 +JOURNALISM 1 0 1 1 +JOSEPH 1 0 1 1 +JONES 1 0 1 1 +JOLLY 1 0 1 1 +JOINTS 1 0 1 1 +JOCELYN 1 0 1 1 +JOBS 1 0 1 1 +JOB 7 0 7 7 +JOANNA'S 1 0 1 1 +JEWISH 2 0 2 2 +JEWELRY 1 0 1 1 +JEWELER 2 0 2 2 +JEW'S 1 0 1 1 +JESUS 2 0 2 2 +JERK 1 0 1 1 +JERICHO 1 0 1 1 +JENKINS 2 0 2 2 +JEHOVAH 3 0 3 3 +JEERINGLY 1 0 1 1 +JEAN 10 0 10 10 +JANUARY 1 0 1 1 +JANGLING 1 0 1 1 +JANE'S 1 0 1 1 +JAMS 2 0 2 2 +JAMIESON 1 0 1 1 +JAMES 3 0 3 3 +JAM 2 0 2 2 +JAIL 1 0 1 1 +JACKSON 4 0 4 4 +JACKET 1 0 1 1 +IVANOVITCH'S 1 0 1 1 +ITSELF 7 0 7 7 +ITALY 1 0 1 1 +ISSUED 2 0 2 2 +ISSUE 2 0 2 2 +ISRAEL'S 2 0 2 2 +ISRAEL 7 0 7 7 +ISOLATION 1 0 1 1 +ISLANDERS 1 0 1 1 +ISLAND 5 0 5 5 +IRRITATION 1 0 1 1 +IRRITABILITY 1 0 1 1 +IRREVERENTLY 1 0 1 1 +IRREVERENCE 1 0 1 1 +IRRESISTIBLY 1 0 1 1 +IRRESISTIBLE 1 0 1 1 +IRON 7 0 7 7 +IRKSOME 1 0 1 1 +IRISH 1 0 1 1 +IRELAND 2 0 2 2 +IRATE 1 0 1 1 +INWARD 1 0 1 1 +INVOLVED 1 0 1 1 +INVOKE 1 0 1 1 +INVITATION 2 0 2 2 +INVISIBLE 1 0 1 1 +INVINCIBLE 1 0 1 1 +INVETERATE 1 0 1 1 +INVESTIGATION 2 0 2 2 +INVENTING 1 0 1 1 +INVADING 1 0 1 1 +INVADERS 1 0 1 1 +INVADED 1 0 1 1 +INTRODUCTION 1 0 1 1 +INTRODUCING 1 0 1 1 +INTRODUCES 1 0 1 1 +INTRODUCED 1 0 1 1 +INTOXICATED 2 0 2 2 +INTOLERABLE 1 0 1 1 +INTIMATES 1 0 1 1 +INTERVIEWS 1 0 1 1 +INTERVAL 3 0 3 3 +INTERRUPTED 1 0 1 1 +INTERRED 2 0 2 2 +INTERPRETATION 1 0 1 1 +INTERPOLATIONS 1 0 1 1 +INTERNATIONAL 1 0 1 1 +INTERNAL 3 0 3 3 +INTERMISSION 1 0 1 1 +INTERMENT 2 0 2 2 +INTERMEDDLING 1 0 1 1 +INTERFERENCE 1 0 1 1 +INTERFERE 1 0 1 1 +INTERESTING 6 0 6 6 +INTERESTED 3 0 3 3 +INTERCOURSE 1 0 1 1 +INTERCHANGE 1 0 1 1 +INTENTIONALLY 2 0 2 2 +INTENTION 4 0 4 4 +INTENSITY 1 0 1 1 +INTENSELY 2 0 2 2 +INTENDED 5 0 5 5 +INTELLIGENT 2 0 2 2 +INTELLIGENCE 2 0 2 2 +INTELLECT 1 0 1 1 +INSULTED 1 0 1 1 +INSUFFICIENT 1 0 1 1 +INSTRUMENTS 4 0 4 4 +INSTRUCTIONS 1 0 1 1 +INSTITUTIONS 1 0 1 1 +INSTITUTED 1 0 1 1 +INSTITUTE 1 0 1 1 +INSTINCTS 1 0 1 1 +INSTINCT 3 0 3 3 +INSTEAD 4 0 4 4 +INSTANT'S 1 0 1 1 +INSTANT 5 0 5 5 +INSTANCE 1 0 1 1 +INSPIRES 1 0 1 1 +INSPIRATION 3 0 3 3 +INSOLUBLE 1 0 1 1 +INSOLENT 1 0 1 1 +INSISTING 1 0 1 1 +INSISTED 2 0 2 2 +INSIST 1 0 1 1 +INSINUATING 1 0 1 1 +INSHALLAH 1 0 1 1 +INSECURITY 1 0 1 1 +INSCRIPTION 1 0 1 1 +INSANE 1 0 1 1 +INQUISITION 1 0 1 1 +INQUIRIES 1 0 1 1 +INQUIRED 5 0 5 5 +INNOCENT 3 0 3 3 +INNKEEPER 2 0 2 2 +INJURIES 1 0 1 1 +INJURED 1 0 1 1 +INIQUITIES 1 0 1 1 +INHERENT 1 0 1 1 +INHABITANTS 1 0 1 1 +INHABIT 1 0 1 1 +INGREDIENTS 1 0 1 1 +INFORMED 5 0 5 5 +INFORMATION 3 0 3 3 +INFORM 1 0 1 1 +INFLUENCES 1 0 1 1 +INFLUENCED 1 0 1 1 +INFLUENCE 10 0 10 10 +INFLICT 1 0 1 1 +INFLATE 1 0 1 1 +INFIRMITY 1 0 1 1 +INFIRM 1 0 1 1 +INFINITELY 1 0 1 1 +INFINITE 4 0 4 4 +INFERIOR 3 0 3 3 +INFERENTIALLY 1 0 1 1 +INFAMY 2 0 2 2 +INEXORABLY 1 0 1 1 +INEVITABLE 1 0 1 1 +INELEGANTLY 1 0 1 1 +INDUSTRY 1 0 1 1 +INDUSTRIOUS 1 0 1 1 +INDUSTRIAL 1 0 1 1 +INDULGENT 1 0 1 1 +INDULGENCE 2 0 2 2 +INDUCED 1 0 1 1 +INDIVIDUALS 9 0 9 9 +INDIVIDUAL 1 0 1 1 +INDISPOSITION 1 0 1 1 +INDISCRETION 1 0 1 1 +INDIGNATION 1 0 1 1 +INDIFFERENT 2 0 2 2 +INDIFFERENCE 1 0 1 1 +INDICATIONS 2 0 2 2 +INDICATED 2 0 2 2 +INDIANS 2 0 2 2 +INDIANA 2 0 2 2 +INDIAN 1 0 1 1 +INDESCRIBABLE 1 0 1 1 +INDEPENDENT 2 0 2 2 +INDEPENDENCE 4 0 4 4 +INDECISION 1 0 1 1 +INCUR 1 0 1 1 +INCREDULOUSLY 1 0 1 1 +INCREDULITY 1 0 1 1 +INCREASING 2 0 2 2 +INCREASES 2 0 2 2 +INCREASED 5 0 5 5 +INCREASE 5 0 5 5 +INCORRECT 1 0 1 1 +INCONSISTENCY 1 0 1 1 +INCONCEIVABLE 1 0 1 1 +INCOHERENT 1 0 1 1 +INCLUDING 2 0 2 2 +INCLUDE 1 0 1 1 +INCLINED 1 0 1 1 +INCLINATION 1 0 1 1 +INCIDENTS 1 0 1 1 +INCIDENT 1 0 1 1 +INCARCERATING 1 0 1 1 +INASMUCH 1 0 1 1 +INANIMATE 1 0 1 1 +IMPULSE 3 0 3 3 +IMPROVISE 1 0 1 1 +IMPROVISATION 1 0 1 1 +IMPROVING 1 0 1 1 +IMPROVIDENT 1 0 1 1 +IMPRESSION 1 0 1 1 +IMPRECATIONS 1 0 1 1 +IMPRECATION 1 0 1 1 +IMPOSSIBLE 4 0 4 4 +IMPOSING 1 0 1 1 +IMPOSES 1 0 1 1 +IMPORTS 1 0 1 1 +IMPORTED 1 0 1 1 +IMPORTANT 1 0 1 1 +IMPORTANCE 3 0 3 3 +IMPLIES 1 0 1 1 +IMPLIED 2 0 2 2 +IMPLACABLE 1 0 1 1 +IMPIOUS 1 0 1 1 +IMPERTINENT 1 0 1 1 +IMPERSONAL 1 0 1 1 +IMPERIOUS 1 0 1 1 +IMPERATIVE 1 0 1 1 +IMPATIENTLY 2 0 2 2 +IMPATIENT 2 0 2 2 +IMMORTALS 1 0 1 1 +IMMENSE 3 0 3 3 +IMMEDIATELY 9 0 9 9 +IMMEDIATE 1 0 1 1 +IMITATION 1 0 1 1 +IMITATE 1 0 1 1 +IMBECILE 1 0 1 1 +IMAGINED 1 0 1 1 +IMAGINATION 1 0 1 1 +IMAGINARY 1 0 1 1 +IMAGE 2 0 2 2 +ILLUSTRIOUS 4 0 4 4 +ILLUSION 1 0 1 1 +ILLITERATE 1 0 1 1 +IGNORED 1 0 1 1 +IGNORANT 2 0 2 2 +IGNORANCE 1 0 1 1 +IGNOMY 1 0 1 1 +IDOLATRIES 1 0 1 1 +IDLE 1 0 1 1 +IDIOTIC 1 0 1 1 +IDIOT 1 0 1 1 +IDEAS 2 0 2 2 +IDEAL 1 0 1 1 +HYPOTHETICAL 1 0 1 1 +HYPODERMICALLY 1 0 1 1 +HYPODERMIC 1 0 1 1 +HYDROCHLORIC 2 0 2 2 +HUSTLED 1 0 1 1 +HUSTLE 1 0 1 1 +HUSKILY 1 0 1 1 +HUSH 1 0 1 1 +HUSBANDMEN 1 0 1 1 +HUSBAND'S 3 0 3 3 +HUSBAND 9 0 9 9 +HURRYING 3 0 3 3 +HURRY 1 0 1 1 +HURRIEDLY 3 0 3 3 +HURRIED 3 0 3 3 +HURRICANE 1 0 1 1 +HUNTED 2 0 2 2 +HUNT 1 0 1 1 +HUNGARY 1 0 1 1 +HUNG 2 0 2 2 +HUNDREDTH 1 0 1 1 +HUNDREDS 1 0 1 1 +HUNDRED 29 0 29 29 +HUMILIATIONS 1 0 1 1 +HUMILIATION 1 0 1 1 +HUMILIATED 1 0 1 1 +HUMBLY 1 0 1 1 +HUMBLE 1 0 1 1 +HUMANITY 1 0 1 1 +HUMANITARY 1 0 1 1 +HUMAN 6 0 6 6 +HULLO 1 0 1 1 +HUGELY 1 0 1 1 +HUGE 3 0 3 3 +HOWL 1 0 1 1 +HOUSES 4 0 4 4 +HOTLY 1 0 1 1 +HOTEL 4 0 4 4 +HOT 5 0 5 5 +HOST 3 0 3 3 +HOSPITALITY 1 0 1 1 +HORSEBACK 1 0 1 1 +HORSE 10 0 10 10 +HORRIBLE 3 0 3 3 +HORNS 1 0 1 1 +HORN 1 0 1 1 +HORIZONTAL 1 0 1 1 +HORIZON 2 0 2 2 +HORACE 1 0 1 1 +HOPPER 1 0 1 1 +HOPING 1 0 1 1 +HOPELESS 1 0 1 1 +HOPEFUL 1 0 1 1 +HOPED 2 0 2 2 +HOOTED 1 0 1 1 +HOOK 1 0 1 1 +HONEYMOON 1 0 1 1 +HONEY 1 0 1 1 +HOMEWARD 1 0 1 1 +HOMELESS 1 0 1 1 +HOLLOW 2 0 2 2 +HOLES 2 0 2 2 +HOLE 1 0 1 1 +HOLDS 1 0 1 1 +HOLDING 6 0 6 6 +HITHERTO 2 0 2 2 +HITCH 1 0 1 1 +HIT 3 0 3 3 +HISTORY 4 0 4 4 +HISTORIANS 1 0 1 1 +HISSELF 1 0 1 1 +HIRE 1 0 1 1 +HINTS 1 0 1 1 +HINTED 1 0 1 1 +HINT 3 0 3 3 +HINGES 1 0 1 1 +HINDER 1 0 1 1 +HILL 7 0 7 7 +HIGHWAYS 1 0 1 1 +HIGHLY 1 0 1 1 +HIGHEST 1 0 1 1 +HIGHER 1 0 1 1 +HIGGINS 1 0 1 1 +HIDING 1 0 1 1 +HIDEOUS 1 0 1 1 +HESITATING 1 0 1 1 +HESITATED 2 0 2 2 +HERS 4 0 4 4 +HERO 1 0 1 1 +HERCULEAN 1 0 1 1 +HERBS 1 0 1 1 +HENRY 3 0 3 3 +HENCE 4 0 4 4 +HELSTONE 1 0 1 1 +HELPLESSLY 1 0 1 1 +HELPLESS 3 0 3 3 +HELMET 2 0 2 2 +HELLO 1 0 1 1 +HELL 4 0 4 4 +HELD 13 0 13 13 +HEIR 2 0 2 2 +HEIGHT 1 0 1 1 +HEEL 1 0 1 1 +HEED 1 0 1 1 +HEDGE 1 0 1 1 +HEBREWS 4 0 4 4 +HEAVY 11 0 11 11 +HEAVIEST 1 0 1 1 +HEAVENS 1 0 1 1 +HEAVEN'S 1 0 1 1 +HEAVEN 6 0 6 6 +HEAVE 1 0 1 1 +HEARTILY 1 0 1 1 +HEARTIEST 1 0 1 1 +HEARTED 2 0 2 2 +HEARING 2 0 2 2 +HEAP 2 0 2 2 +HEALTHY 1 0 1 1 +HEADED 4 0 4 4 +HEADACHES 1 0 1 1 +HAY 1 0 1 1 +HAVEN'T 4 0 4 4 +HAUNT 3 0 3 3 +HAUNCHES 1 0 1 1 +HAUGHTINESS 1 0 1 1 +HATTON 1 0 1 1 +HATS 1 0 1 1 +HATREDS 1 0 1 1 +HATRED 2 0 2 2 +HATES 3 0 3 3 +HATED 3 0 3 3 +HATE 4 0 4 4 +HAT 3 0 3 3 +HASTY 2 0 2 2 +HASTILY 2 0 2 2 +HASTENED 1 0 1 1 +HASTEN 1 0 1 1 +HASTE 5 0 5 5 +HAST 7 0 7 7 +HASN'T 1 0 1 1 +HASHISH 1 0 1 1 +HARSHLY 2 0 2 2 +HARRY 3 0 3 3 +HARRISONVILLE 2 0 2 2 +HARNESSED 1 0 1 1 +HARMONY 1 0 1 1 +HARMLESS 1 0 1 1 +HARM 6 0 6 6 +HARK 1 0 1 1 +HARGRAVE 1 0 1 1 +HARBOR 1 0 1 1 +HAPPINESS 5 0 5 5 +HAPPIEST 1 0 1 1 +HAPPIER 3 0 3 3 +HAPPENS 1 0 1 1 +HAPPEN 3 0 3 3 +HANGING 1 0 1 1 +HANGED 1 0 1 1 +HANG 3 0 3 3 +HANDSOME 4 0 4 4 +HANDLED 1 0 1 1 +HANDKERCHIEF 3 0 3 3 +HANDING 1 0 1 1 +HANDIER 1 0 1 1 +HANDED 1 0 1 1 +HAMPERED 1 0 1 1 +HAMMERS 1 0 1 1 +HAMMER 1 0 1 1 +HALVES 1 0 1 1 +HALTING 1 0 1 1 +HALLS 1 0 1 1 +HALFPENNY 1 0 1 1 +HALE 6 0 6 6 +HAIRED 1 0 1 1 +HAG 1 0 1 1 +HACK 1 0 1 1 +HABITUAL 1 0 1 1 +HABITS 2 0 2 2 +GUT 3 0 3 3 +GUSH 1 0 1 1 +GULPED 1 0 1 1 +GULF 1 0 1 1 +GUILTY 5 0 5 5 +GUILT 2 0 2 2 +GUIDE 3 0 3 3 +GUESSED 1 0 1 1 +GUARDED 1 0 1 1 +GUARD 1 0 1 1 +GRUMBLED 2 0 2 2 +GRUFFISH 1 0 1 1 +GROWTH 1 0 1 1 +GROWN 1 0 1 1 +GROUPS 6 0 6 6 +GROUP 3 0 3 3 +GROUNDS 1 0 1 1 +GROOMED 1 0 1 1 +GROOM 1 0 1 1 +GRINNING 3 0 3 3 +GRINNED 2 0 2 2 +GRIN 2 0 2 2 +GRIMACED 1 0 1 1 +GRIEVING 1 0 1 1 +GREET 2 0 2 2 +GREENWOOD 1 0 1 1 +GREENHORNS 1 0 1 1 +GREEK 2 0 2 2 +GREATLY 1 0 1 1 +GREATEST 6 0 6 6 +GREATER 6 0 6 6 +GREASY 1 0 1 1 +GRAVITY 1 0 1 1 +GRAVES 1 0 1 1 +GRAVELLED 1 0 1 1 +GRATING 2 0 2 2 +GRATIFICATION 1 0 1 1 +GRATEFUL 2 0 2 2 +GRATED 1 0 1 1 +GRASS 2 0 2 2 +GRASP 2 0 2 2 +GRAPE 1 0 1 1 +GRANTING 1 0 1 1 +GRANT 2 0 2 2 +GRANDSON 1 0 1 1 +GRANDPAPA 1 0 1 1 +GRANDFATHER 2 0 2 2 +GRANDEUR 1 0 1 1 +GRANDDAUGHTER 1 0 1 1 +GRAINS 1 0 1 1 +GRAIN 4 0 4 4 +GRAFTON'S 1 0 1 1 +GRAFTON 1 0 1 1 +GRAFT 2 0 2 2 +GRADUALLY 1 0 1 1 +GRACIOUSLY 2 0 2 2 +GRACE 1 0 1 1 +GRABBED 2 0 2 2 +GOVERNMENT'S 1 0 1 1 +GOTTEN 1 0 1 1 +GOSLER 1 0 1 1 +GOOSE 1 0 1 1 +GOODNESS 5 0 5 5 +GOLFING 1 0 1 1 +GOLDFINCH 1 0 1 1 +GOD'S 3 0 3 3 +GNASHING 1 0 1 1 +GNARLED 1 0 1 1 +GLOWING 2 0 2 2 +GLOWED 3 0 3 3 +GLOVES 3 0 3 3 +GLOVE 1 0 1 1 +GLORY 2 0 2 2 +GLORIOUS 1 0 1 1 +GLORIFY 1 0 1 1 +GLOOMY 2 0 2 2 +GLOOM 1 0 1 1 +GLOATING 1 0 1 1 +GLISPIN'S 1 0 1 1 +GLINTING 1 0 1 1 +GLIMPSE 1 0 1 1 +GLIMMER 1 0 1 1 +GLIDING 1 0 1 1 +GLEAMED 1 0 1 1 +GLAZED 1 0 1 1 +GLANCING 1 0 1 1 +GLANCES 2 0 2 2 +GLANCED 2 0 2 2 +GLANCE 3 0 3 3 +GLADNESS 2 0 2 2 +GLADLY 1 0 1 1 +GLADDENEST 1 0 1 1 +GLADDENED 1 0 1 1 +GIVES 7 0 7 7 +GIRDLE 2 0 2 2 +GIMLET 1 0 1 1 +GILROY 1 0 1 1 +GIFTS 1 0 1 1 +GIFTED 1 0 1 1 +GIANT'S 1 0 1 1 +GIANT 1 0 1 1 +GHOSTS 1 0 1 1 +GHOST 2 0 2 2 +GHASTLY 2 0 2 2 +GETTING 12 0 12 12 +GETS 3 0 3 3 +GERMS 1 0 1 1 +GERMAN 7 0 7 7 +GERM 1 0 1 1 +GEORGIA 1 0 1 1 +GEORGES 1 0 1 1 +GENUINE 1 0 1 1 +GENTLY 1 0 1 1 +GENTLE 1 0 1 1 +GENIUS 1 0 1 1 +GENIALLY 1 0 1 1 +GENEROUS 2 0 2 2 +GENEROSITY 1 0 1 1 +GENERATION 1 0 1 1 +GENERALLY 3 0 3 3 +GENERAL 7 0 7 7 +GEAR 2 0 2 2 +GAZING 2 0 2 2 +GAZED 3 0 3 3 +GAY 2 0 2 2 +GATHERING 2 0 2 2 +GATHER 1 0 1 1 +GATES 1 0 1 1 +GATE 4 0 4 4 +GASPED 2 0 2 2 +GASP 1 0 1 1 +GASHED 1 0 1 1 +GARNISHMENT 1 0 1 1 +GARMENTS 2 0 2 2 +GARLANDED 1 0 1 1 +GARLAND 1 0 1 1 +GARDEN 7 0 7 7 +GAPS 1 0 1 1 +GAP 1 0 1 1 +GANG 5 0 5 5 +GAMMER 1 0 1 1 +GAMESTER 1 0 1 1 +GAMBLING 3 0 3 3 +GAMBLERS 1 0 1 1 +GALLOPED 1 0 1 1 +GALLERY 1 0 1 1 +GALL 1 0 1 1 +GAIN 3 0 3 3 +GAILY 1 0 1 1 +GAIETY 1 0 1 1 +GAD'S 1 0 1 1 +GAD 1 0 1 1 +GABLE 1 0 1 1 +GABBLE 1 0 1 1 +G 1 0 1 1 +FUSS 2 0 2 2 +FURY 2 0 2 2 +FURTHEST 1 0 1 1 +FURNITURE 1 0 1 1 +FURNISHED 1 0 1 1 +FURNACE 1 0 1 1 +FURIOUS 2 0 2 2 +FUNNY 3 0 3 3 +FUNDS 1 0 1 1 +FUN 2 0 2 2 +FUMED 1 0 1 1 +FULLY 1 0 1 1 +FULFILLED 1 0 1 1 +FULFIL 1 0 1 1 +FUGITIVES 1 0 1 1 +FUEL 1 0 1 1 +FRY 1 0 1 1 +FRUITS 4 0 4 4 +FRUITLESS 1 0 1 1 +FRUIT 7 0 7 7 +FROWNED 1 0 1 1 +FROWN 1 0 1 1 +FROSTY 1 0 1 1 +FROST 1 0 1 1 +FRIGHTENED 3 0 3 3 +FRIGHTEN 1 0 1 1 +FRIENDSHIP 2 0 2 2 +FRIENDLINESS 1 0 1 1 +FRIEND'S 1 0 1 1 +FRIDOLIN 1 0 1 1 +FRIDAY 2 0 2 2 +FRESHEST 1 0 1 1 +FRESH 5 0 5 5 +FRERE 1 0 1 1 +FREQUENTLY 2 0 2 2 +FREQUENT 2 0 2 2 +FREELY 2 0 2 2 +FREED 2 0 2 2 +FRAUD 1 0 1 1 +FRANTICALLY 1 0 1 1 +FRANKNESS 1 0 1 1 +FRANKLY 1 0 1 1 +FRANCS 6 0 6 6 +FRANCIS 1 0 1 1 +FRAME 1 0 1 1 +FRAGMENTS 1 0 1 1 +FOUNDATION 1 0 1 1 +FOSTER 3 0 3 3 +FORWARDS 3 0 3 3 +FORWARD 5 0 5 5 +FORTUNES 1 0 1 1 +FORTUNATELY 5 0 5 5 +FORTNIGHT 1 0 1 1 +FORTHWITH 1 0 1 1 +FORTH 4 0 4 4 +FORSOOTH 1 0 1 1 +FORMS 2 0 2 2 +FORMING 2 0 2 2 +FORMIDABLE 2 0 2 2 +FORMERLY 2 0 2 2 +FORGOTTEN 4 0 4 4 +FORGOT 7 0 7 7 +FORGIVE 2 0 2 2 +FORGETTING 1 0 1 1 +FORGET 2 0 2 2 +FORGERIES 1 0 1 1 +FOREVER 3 0 3 3 +FORETASTE 1 0 1 1 +FORESTERS 1 0 1 1 +FOREST 2 0 2 2 +FORESHADOWED 1 0 1 1 +FORENOON 1 0 1 1 +FOREMOST 1 0 1 1 +FORELOCK 1 0 1 1 +FOREIGNERS 1 0 1 1 +FOREIGN 6 0 6 6 +FOREHEAD 4 0 4 4 +FOREFINGER 1 0 1 1 +FORCES 3 0 3 3 +FORBIDDEN 1 0 1 1 +FORBID 1 0 1 1 +FORBEARANCE 1 0 1 1 +FORBEAR 1 0 1 1 +FOOTSTEPS 1 0 1 1 +FOOTNOTE 1 0 1 1 +FOOLISH 3 0 3 3 +FOND 2 0 2 2 +FOLLY 1 0 1 1 +FOLLOWERS 3 0 3 3 +FOLLOWER 1 0 1 1 +FOLKS 3 0 3 3 +FOLDED 2 0 2 2 +FOLD 1 0 1 1 +FOGGY 1 0 1 1 +FOES 2 0 2 2 +FLYING 1 0 1 1 +FLUTTER 1 0 1 1 +FLUSHED 2 0 2 2 +FLUSH 1 0 1 1 +FLUNG 2 0 2 2 +FLUID 2 0 2 2 +FLOWERS 4 0 4 4 +FLOURISHING 1 0 1 1 +FLOURISHED 1 0 1 1 +FLOURISH 1 0 1 1 +FLOCKS 1 0 1 1 +FLOATED 1 0 1 1 +FLITTED 1 0 1 1 +FLIRTATION 1 0 1 1 +FLING 1 0 1 1 +FLINCH 1 0 1 1 +FLIGHT 5 0 5 5 +FLICK 1 0 1 1 +FLEW 1 0 1 1 +FLEECED 1 0 1 1 +FLEE 1 0 1 1 +FLED 4 0 4 4 +FLATTERY 1 0 1 1 +FLASK 1 0 1 1 +FLASHING 1 0 1 1 +FLASHED 1 0 1 1 +FLARING 1 0 1 1 +FLAPPING 1 0 1 1 +FLAMES 2 0 2 2 +FLAME 5 0 5 5 +FLAGRANT 1 0 1 1 +FLAGONS 1 0 1 1 +FLAGON 1 0 1 1 +FLAGGED 1 0 1 1 +FLAG 1 0 1 1 +FIXING 1 0 1 1 +FIXED 5 0 5 5 +FIX 1 0 1 1 +FITTING 2 0 2 2 +FITTED 2 0 2 2 +FITS 1 0 1 1 +FISHER 2 0 2 2 +FIRMLY 2 0 2 2 +FIRM 1 0 1 1 +FIRING 2 0 2 2 +FIREPLACE 1 0 1 1 +FIREMAN 3 0 3 3 +FIREFLY 1 0 1 1 +FIRED 1 0 1 1 +FIRE 15 0 15 15 +FINS 1 0 1 1 +FINNEY 2 0 2 2 +FINISHING 2 0 2 2 +FINISH 3 0 3 3 +FINGERS 1 0 1 1 +FINGERING 1 0 1 1 +FINGER 6 0 6 6 +FINEST 1 0 1 1 +FINE 10 0 10 10 +FINANCIAL 1 0 1 1 +FINAL 2 0 2 2 +FIN 1 0 1 1 +FILTER 1 0 1 1 +FILMY 1 0 1 1 +FILLED 5 0 5 5 +FILL 4 0 4 4 +FIGURE 3 0 3 3 +FIGHTING 1 0 1 1 +FIGHT 5 0 5 5 +FIFTY 14 0 14 14 +FIERCE 2 0 2 2 +FIENDS 1 0 1 1 +FIENDISH 1 0 1 1 +FICKLE 2 0 2 2 +FEW 26 0 26 26 +FEVERISH 4 0 4 4 +FEVER 1 0 1 1 +FETTERS 1 0 1 1 +FETCHED 1 0 1 1 +FETCH 7 0 7 7 +FESTIVE 1 0 1 1 +FESTIVAL 1 0 1 1 +FEROCIOUS 1 0 1 1 +FENDER 1 0 1 1 +FENCED 1 0 1 1 +FENCE 4 0 4 4 +FEMALE 1 0 1 1 +FELLOWSHIP 1 0 1 1 +FELLOWS 2 0 2 2 +FEET 9 0 9 9 +FEELINGS 3 0 3 3 +FEEBLY 1 0 1 1 +FEEBLE 2 0 2 2 +FEE 1 0 1 1 +FEDERAL 1 0 1 1 +FED 1 0 1 1 +FEBRUARY 5 0 5 5 +FEATURES 1 0 1 1 +FEATHERS 1 0 1 1 +FEATHER 1 0 1 1 +FEARS 1 0 1 1 +FEARLESS 1 0 1 1 +FEARING 1 0 1 1 +FEARFUL 2 0 2 2 +FEARED 4 0 4 4 +FEAR 13 0 13 13 +FAVOURS 1 0 1 1 +FATTY 1 0 1 1 +FATTER 1 0 1 1 +FATIGUE 2 0 2 2 +FATHERLY 1 0 1 1 +FATALLY 1 0 1 1 +FATAL 2 0 2 2 +FASTER 2 0 2 2 +FASHIONS 1 0 1 1 +FASHIONED 2 0 2 2 +FASHION 2 0 2 2 +FASCINATION 1 0 1 1 +FARTHEST 1 0 1 1 +FARTHER 3 0 3 3 +FARMS 1 0 1 1 +FARM 3 0 3 3 +FAREWELL 1 0 1 1 +FARED 1 0 1 1 +FANTASTIC 1 0 1 1 +FANS 1 0 1 1 +FANCY 4 0 4 4 +FANCIFUL 1 0 1 1 +FANCIED 1 0 1 1 +FANATICS 1 0 1 1 +FAMILIES 3 0 3 3 +FAMILIARITY 1 0 1 1 +FAMILIAR 2 0 2 2 +FAME 2 0 2 2 +FALSE 1 0 1 1 +FALLEN 1 0 1 1 +FAITHLESS 1 0 1 1 +FAITHFULLY 2 0 2 2 +FAITHFUL 3 0 3 3 +FAIRY 2 0 2 2 +FAIRLY 3 0 3 3 +FAINTNESS 1 0 1 1 +FAINTING 2 0 2 2 +FAINT 4 0 4 4 +FAILURES 3 0 3 3 +FAILURE 1 0 1 1 +FAILS 1 0 1 1 +FAIL 3 0 3 3 +FAGOTS 1 0 1 1 +FAGGOT 1 0 1 1 +FACTS 3 0 3 3 +FACTORIES 2 0 2 2 +FACTOR 1 0 1 1 +FACING 3 0 3 3 +FACES 2 0 2 2 +FABULOUS 1 0 1 1 +EYELIDS 1 0 1 1 +EYED 4 0 4 4 +EXTREMELY 4 0 4 4 +EXTREME 2 0 2 2 +EXTRAORDINARY 2 0 2 2 +EXTINGUISHING 1 0 1 1 +EXTINGUISH 1 0 1 1 +EXTERNAL 2 0 2 2 +EXTENT 2 0 2 2 +EXTENSION 1 0 1 1 +EXTENDING 2 0 2 2 +EXTENDED 2 0 2 2 +EXTEMPORIZED 1 0 1 1 +EXPRESSLY 1 0 1 1 +EXPRESSION 4 0 4 4 +EXPRESSED 3 0 3 3 +EXPOSURE 1 0 1 1 +EXPOSES 1 0 1 1 +EXPLANATORY 1 0 1 1 +EXPLANATION 1 0 1 1 +EXPLAINING 1 0 1 1 +EXPLAIN 1 0 1 1 +EXPIATION 1 0 1 1 +EXPERIMENTS 1 0 1 1 +EXPERIMENTING 1 0 1 1 +EXPERIENCES 1 0 1 1 +EXPERIENCED 1 0 1 1 +EXPENSES 2 0 2 2 +EXPENSE 2 0 2 2 +EXPENDED 1 0 1 1 +EXPEDIENT 1 0 1 1 +EXPECTS 1 0 1 1 +EXPECT 4 0 4 4 +EXOTIC 1 0 1 1 +EXIT 1 0 1 1 +EXISTING 1 0 1 1 +EXISTENCE 5 0 5 5 +EXISTED 1 0 1 1 +EXHIBITED 4 0 4 4 +EXERTIONS 1 0 1 1 +EXERTING 1 0 1 1 +EXERTED 1 0 1 1 +EXERCISES 1 0 1 1 +EXERCISE 3 0 3 3 +EXECUTIONER'S 2 0 2 2 +EXECUTION 2 0 2 2 +EXECUTE 1 0 1 1 +EXECRABLE 1 0 1 1 +EXCUSES 1 0 1 1 +EXCUSE 3 0 3 3 +EXCUSABLE 1 0 1 1 +EXCLAMATION 1 0 1 1 +EXCLAIMING 1 0 1 1 +EXCLAIM 1 0 1 1 +EXCITEMENT 4 0 4 4 +EXCITEDLY 2 0 2 2 +EXCITED 2 0 2 2 +EXCITE 1 0 1 1 +EXCITABILITY 1 0 1 1 +EXCHANGED 2 0 2 2 +EXCHANGE 1 0 1 1 +EXCESSIVELY 1 0 1 1 +EXCESS 1 0 1 1 +EXCEPTIONALLY 2 0 2 2 +EXCEPTION 1 0 1 1 +EXCELLENT 5 0 5 5 +EXCELLENCY 4 0 4 4 +EXCEEDINGLY 1 0 1 1 +EXCEEDING 3 0 3 3 +EXASPERATING 1 0 1 1 +EXAMPLE 3 0 3 3 +EXAMINE 2 0 2 2 +EXAMINATION 3 0 3 3 +EXALTED 1 0 1 1 +EXAGGERATE 1 0 1 1 +EXACTITUDE 2 0 2 2 +EXACT 1 0 1 1 +EVIDENTLY 4 0 4 4 +EVIDENT 3 0 3 3 +EVIDENCE 2 0 2 2 +EVERYWHERE 4 0 4 4 +EVERYTHING'S 1 0 1 1 +EVERYTHING 15 0 15 15 +EVERYBODY 6 0 6 6 +EVENTS 4 0 4 4 +EVENT 1 0 1 1 +EVENING 9 0 9 9 +EVE 2 0 2 2 +EVAPORATION 2 0 2 2 +EVAPORATING 1 0 1 1 +EVADED 1 0 1 1 +EUROPEAN 1 0 1 1 +EUROPE 1 0 1 1 +EUNUCH'S 1 0 1 1 +ETERNITY 1 0 1 1 +ESTRANGE 1 0 1 1 +ESTIMATES 1 0 1 1 +ESTEEM 3 0 3 3 +ESTATE 1 0 1 1 +ESTABLISHMENT 1 0 1 1 +ESTABLISHED 2 0 2 2 +ESTABLISH 1 0 1 1 +ESSENTIALLY 1 0 1 1 +ESSENTIAL 1 0 1 1 +ESSENCE 1 0 1 1 +ESSAY 1 0 1 1 +ESQUIRES 1 0 1 1 +ESPECIAL 1 0 1 1 +ESCAPADE 1 0 1 1 +ERROR 2 0 2 2 +ERRATIC 1 0 1 1 +ERRANT 1 0 1 1 +ERECTS 1 0 1 1 +ERECTED 3 0 3 3 +ERECT 1 0 1 1 +EQUIVALENT 1 0 1 1 +EQUALLY 2 0 2 2 +EPOCH 1 0 1 1 +EPISTLES 1 0 1 1 +EPISTLE 1 0 1 1 +ENVYING 1 0 1 1 +ENVY 3 0 3 3 +ENVIRONMENT 1 0 1 1 +ENVIOUS 1 0 1 1 +ENVIED 1 0 1 1 +ENVELOPE 1 0 1 1 +ENTREATY 1 0 1 1 +ENTREATINGLY 1 0 1 1 +ENTREATIES 1 0 1 1 +ENTREATED 1 0 1 1 +ENTIRELY 3 0 3 3 +ENTHUSIASM 3 0 3 3 +ENTERTAINMENT 1 0 1 1 +ENTERTAINING 1 0 1 1 +ENTERTAIN 1 0 1 1 +ENTER 5 0 5 5 +ENTAILED 1 0 1 1 +ENLISTMENT 1 0 1 1 +ENJOYMENT 3 0 3 3 +ENGRAVED 1 0 1 1 +ENGLAND 3 0 3 3 +ENGAGEMENTS 1 0 1 1 +ENGAGEMENT 1 0 1 1 +ENGAGED 2 0 2 2 +ENGAGE 1 0 1 1 +ENERGY 1 0 1 1 +ENEMY 3 0 3 3 +ENEMIES 2 0 2 2 +ENDURANCE 1 0 1 1 +ENDEAVOURED 1 0 1 1 +ENCOURAGED 2 0 2 2 +ENCOUNTERED 1 0 1 1 +ENCOMPASSED 1 0 1 1 +ENCHANTMENT 2 0 2 2 +ENCHANTED 3 0 3 3 +ENCAMPMENT 1 0 1 1 +ENCAMPED 1 0 1 1 +EMPTY 8 0 8 8 +EMPTIES 1 0 1 1 +EMPTIED 2 0 2 2 +EMPRESSES 1 0 1 1 +EMPLOYED 2 0 2 2 +EMPLOY 1 0 1 1 +EMPIRE 3 0 3 3 +EMPHATIC 2 0 2 2 +EMPHASIZE 1 0 1 1 +EMPERORS 2 0 2 2 +EMPEROR 1 0 1 1 +EMOTIONS 2 0 2 2 +EMIR 2 0 2 2 +EMERGED 1 0 1 1 +EMBROIDERY 1 0 1 1 +EMBRACES 1 0 1 1 +EMBRACED 1 0 1 1 +EMBARRASSMENT 1 0 1 1 +EMBARRASSED 1 0 1 1 +EMBARKED 2 0 2 2 +ELSIE'S 1 0 1 1 +ELSIE 1 0 1 1 +ELKINS 1 0 1 1 +ELEVENTH 1 0 1 1 +ELEVEN 4 0 4 4 +ELEVATION 1 0 1 1 +ELEPHANT 1 0 1 1 +ELEMENTS 1 0 1 1 +ELEGANT 1 0 1 1 +ELECTRIC 1 0 1 1 +ELECTED 2 0 2 2 +ELDEST 1 0 1 1 +ELBOWS 1 0 1 1 +ELBOWED 1 0 1 1 +ELAPSED 1 0 1 1 +ELAPSE 1 0 1 1 +EKED 1 0 1 1 +EJACULATED 1 0 1 1 +EIGHTEENTH 3 0 3 3 +EIGHTEEN 10 0 10 10 +EGYPTIAN 6 0 6 6 +EGYPT 5 0 5 5 +EGG 1 0 1 1 +EFFORTS 1 0 1 1 +EFFORT 4 0 4 4 +EFFECTS 2 0 2 2 +EFFECTIVE 1 0 1 1 +EELS 1 0 1 1 +EDWARD 1 0 1 1 +EDUCATION 2 0 2 2 +EDUCATED 1 0 1 1 +EDGES 1 0 1 1 +EDGE 2 0 2 2 +ECONOMIZE 1 0 1 1 +ECONOMICAL 1 0 1 1 +ECONOMIC 1 0 1 1 +ECHOES 1 0 1 1 +ECHOED 1 0 1 1 +ECCLESIASTICS 1 0 1 1 +EATING 2 0 2 2 +EAT 12 0 12 12 +EASY 9 0 9 9 +EASTERN 2 0 2 2 +EASIEST 1 0 1 1 +EASE 4 0 4 4 +EARTHEN 1 0 1 1 +EARNEST 9 0 9 9 +EARNED 1 0 1 1 +EARN 2 0 2 2 +EARLY 8 0 8 8 +EARLINESS 1 0 1 1 +EAGLE 4 0 4 4 +EAGERLY 4 0 4 4 +EAGER 2 0 2 2 +DYING 6 0 6 6 +DWELT 1 0 1 1 +DWELLS 1 0 1 1 +DWELLINGS 1 0 1 1 +DWELLERS 1 0 1 1 +DWELL 1 0 1 1 +DWARF 2 0 2 2 +DUTY 10 0 10 10 +DUTIES 2 0 2 2 +DUSTY 1 0 1 1 +DUST 2 0 2 2 +DURING 20 0 20 20 +DURATION 2 0 2 2 +DUNNO 1 0 1 1 +DUN 1 0 1 1 +DUE 5 0 5 5 +DU 1 0 1 1 +DRY 6 0 6 6 +DRUNK 2 0 2 2 +DRUMS 1 0 1 1 +DRUGGED 2 0 2 2 +DROWNING 1 0 1 1 +DROWN 1 0 1 1 +DROVE 1 0 1 1 +DROUTH 1 0 1 1 +DROPS 1 0 1 1 +DROPPED 8 0 8 8 +DROOPING 2 0 2 2 +DRIVING 1 0 1 1 +DRIVEN 1 0 1 1 +DRIVE 5 0 5 5 +DRINKS 1 0 1 1 +DRINKING 4 0 4 4 +DRINKERS 2 0 2 2 +DRIFT 1 0 1 1 +DRIED 1 0 1 1 +DREW 7 0 7 7 +DRESSING 1 0 1 1 +DRESSES 1 0 1 1 +DRESS 1 0 1 1 +DREAMING 2 0 2 2 +DREAMED 1 0 1 1 +DREAM 4 0 4 4 +DREADFULLY 1 0 1 1 +DREAD 3 0 3 3 +DRAWING 9 0 9 9 +DRAT 1 0 1 1 +DRAMATIC 1 0 1 1 +DRAINS 1 0 1 1 +DRAINED 1 0 1 1 +DRAIN 1 0 1 1 +DRAGONS 1 0 1 1 +DRAGON 1 0 1 1 +DRAGGED 1 0 1 1 +DRAG 1 0 1 1 +DOZEN 2 0 2 2 +DOWNSTAIRS 1 0 1 1 +DOWNS 2 0 2 2 +DOWER 1 0 1 1 +DOVES 1 0 1 1 +DOUBTS 2 0 2 2 +DOUBTLESS 2 0 2 2 +DOUBTFUL 1 0 1 1 +DOUBLE 5 0 5 5 +DOT 1 0 1 1 +DOORS 3 0 3 3 +DONOVAN'S 1 0 1 1 +DOMINION 1 0 1 1 +DOMINATES 1 0 1 1 +DOMED 1 0 1 1 +DOME 2 0 2 2 +DOLLARS 2 0 2 2 +DOINGS 1 0 1 1 +DOGGEDLY 1 0 1 1 +DODGING 1 0 1 1 +DIVISION 1 0 1 1 +DIVINE 1 0 1 1 +DIVIDES 1 0 1 1 +DIVERT 1 0 1 1 +DISTURBING 1 0 1 1 +DISTURBED 1 0 1 1 +DISTURBANCE 1 0 1 1 +DISTURB 2 0 2 2 +DISTRICTS 1 0 1 1 +DISTRIBUTED 1 0 1 1 +DISTRIBUTE 1 0 1 1 +DISTRACTED 2 0 2 2 +DISTINGUISH 2 0 2 2 +DISTINCTLY 1 0 1 1 +DISTINCTIVE 1 0 1 1 +DISTINCT 1 0 1 1 +DISTENDED 1 0 1 1 +DISTANT 5 0 5 5 +DISTANCES 1 0 1 1 +DISTANCE 3 0 3 3 +DISTAFF 1 0 1 1 +DISSIPATION 2 0 2 2 +DISSIMULATION 1 0 1 1 +DISSENTERING 1 0 1 1 +DISSENSIONS 2 0 2 2 +DISREGARDED 1 0 1 1 +DISPUTED 1 0 1 1 +DISPUTE 1 0 1 1 +DISPROVE 1 0 1 1 +DISPOSITION 2 0 2 2 +DISPOSAL 1 0 1 1 +DISPLEASED 1 0 1 1 +DISPLAY 1 0 1 1 +DISPERSED 2 0 2 2 +DISPENSED 1 0 1 1 +DISPENSE 1 0 1 1 +DISMAL 1 0 1 1 +DISHONEST 1 0 1 1 +DISHES 7 0 7 7 +DISH 2 0 2 2 +DISGUST 1 0 1 1 +DISGRACE 4 0 4 4 +DISENTANGLE 1 0 1 1 +DISEASE 1 0 1 1 +DISCUSSIONS 1 0 1 1 +DISCUSSION 1 0 1 1 +DISCUSSED 3 0 3 3 +DISCRIMINATION 1 0 1 1 +DISCRETION 1 0 1 1 +DISCOVERY 3 0 3 3 +DISCOVERIES 1 0 1 1 +DISCOVERED 4 0 4 4 +DISCOVER 2 0 2 2 +DISCOURSES 1 0 1 1 +DISCOURAGEMENTS 1 0 1 1 +DISCONTENT 1 0 1 1 +DISCONCERTION 1 0 1 1 +DISCOMFORT 1 0 1 1 +DISCLOSURES 1 0 1 1 +DISCLOSE 1 0 1 1 +DISCLAIM 1 0 1 1 +DISCIPLINE 1 0 1 1 +DISCERNING 1 0 1 1 +DISAPPOINTED 2 0 2 2 +DISAPPEARS 1 0 1 1 +DISAPPEARED 5 0 5 5 +DISAPPEAR 1 0 1 1 +DISADVANTAGES 3 0 3 3 +DISADVANTAGEOUS 1 0 1 1 +DIRTY 2 0 2 2 +DIRK 2 0 2 2 +DIRECTLY 3 0 3 3 +DIRECTIONS 1 0 1 1 +DIRECTION 7 0 7 7 +DIRECTED 3 0 3 3 +DIRECT 2 0 2 2 +DIP 2 0 2 2 +DINSMORE 2 0 2 2 +DINNERS 1 0 1 1 +DINNER 6 0 6 6 +DINING 1 0 1 1 +DINERS 1 0 1 1 +DINE 2 0 2 2 +DIMPLED 1 0 1 1 +DIMLY 1 0 1 1 +DIMINISHED 1 0 1 1 +DIMINISH 1 0 1 1 +DIM 2 0 2 2 +DILIGENTLY 1 0 1 1 +DILAPIDATED 1 0 1 1 +DIGNITY 4 0 4 4 +DIGGERS 2 0 2 2 +DIGGER 10 0 10 10 +DIGESTION 3 0 3 3 +DIFFICULTY 7 0 7 7 +DIFFICULT 2 0 2 2 +DIFFERENT 7 0 7 7 +DIFFERENCES 1 0 1 1 +DIFFERENCE 7 0 7 7 +DIFFER 1 0 1 1 +DIED 13 0 13 13 +DIDST 1 0 1 1 +DICTATED 1 0 1 1 +DICE 1 0 1 1 +DIAMETER 1 0 1 1 +DIALOGUE 1 0 1 1 +DEVOURED 1 0 1 1 +DEVOTIONS 1 0 1 1 +DEVOTION 1 0 1 1 +DEVILS 2 0 2 2 +DEVIL 4 0 4 4 +DEVICE 1 0 1 1 +DEVELOPED 1 0 1 1 +DETECTIVE'S 1 0 1 1 +DETECTION 1 0 1 1 +DETECTED 1 0 1 1 +DETAILS 2 0 2 2 +DETAILED 3 0 3 3 +DESTROYS 1 0 1 1 +DESTROYER 1 0 1 1 +DESTROYED 4 0 4 4 +DESTROY 3 0 3 3 +DESTINED 1 0 1 1 +DESTINATION 1 0 1 1 +DESSERT 2 0 2 2 +DESPOTISM 2 0 2 2 +DESPOILED 1 0 1 1 +DESPISED 1 0 1 1 +DESPISE 1 0 1 1 +DESPERATELY 1 0 1 1 +DESPERATE 2 0 2 2 +DESPAIR 2 0 2 2 +DESIRED 2 0 2 2 +DESIRABLE 1 0 1 1 +DESERVING 1 0 1 1 +DESERVES 1 0 1 1 +DESERVE 2 0 2 2 +DESERTING 1 0 1 1 +DESERTED 2 0 2 2 +DESERT 1 0 1 1 +DESCRIPTION 2 0 2 2 +DESCRIBED 1 0 1 1 +DESCRIBE 1 0 1 1 +DESCEND 1 0 1 1 +DERIVE 1 0 1 1 +DEPRESSION 1 0 1 1 +DEPRECATINGLY 1 0 1 1 +DEPOSITED 1 0 1 1 +DEPOSED 1 0 1 1 +DEPLORED 1 0 1 1 +DEPENDS 2 0 2 2 +DEPENDENCE 1 0 1 1 +DEPEND 1 0 1 1 +DEPARTURE 2 0 2 2 +DEPARTMENT 2 0 2 2 +DEPARTING 1 0 1 1 +DEPARTED 3 0 3 3 +DENY 1 0 1 1 +DENOUNCED 1 0 1 1 +DENOTING 1 0 1 1 +DENIS 2 0 2 2 +DENIAL 1 0 1 1 +DEN 1 0 1 1 +DEMANDS 3 0 3 3 +DEMANDED 1 0 1 1 +DELUSION 2 0 2 2 +DELIVERY 1 0 1 1 +DELIVERER 1 0 1 1 +DELIVER 2 0 2 2 +DELIGHTFUL 2 0 2 2 +DELIGHTED 2 0 2 2 +DELIGHT 7 0 7 7 +DELICIOUSLY 1 0 1 1 +DELICATE 3 0 3 3 +DELIBERATELY 1 0 1 1 +DELAYED 1 0 1 1 +DELAY 3 0 3 3 +DEJECTION 1 0 1 1 +DEITY 1 0 1 1 +DEGREE 1 0 1 1 +DEGENERATING 1 0 1 1 +DEFYING 1 0 1 1 +DEFRAUD 1 0 1 1 +DEFORMED 2 0 2 2 +DEFINED 1 0 1 1 +DEFIANT 1 0 1 1 +DEFERENCE 1 0 1 1 +DEFENDING 1 0 1 1 +DEFENDERS 2 0 2 2 +DEFEAT 1 0 1 1 +DEEPLY 3 0 3 3 +DEEMED 1 0 1 1 +DECORATION 1 0 1 1 +DECLINED 1 0 1 1 +DECLARED 1 0 1 1 +DECKS 1 0 1 1 +DECK 6 0 6 6 +DECISION 3 0 3 3 +DECIDED 5 0 5 5 +DECIDE 2 0 2 2 +DECEPTION 1 0 1 1 +DECEMBER 2 0 2 2 +DECEIVED 5 0 5 5 +DECEIVE 1 0 1 1 +DECEASED 1 0 1 1 +DECAY 1 0 1 1 +DEBATED 1 0 1 1 +DEBATE 2 0 2 2 +DEATHS 1 0 1 1 +DEARER 1 0 1 1 +DEALT 2 0 2 2 +DEALER 1 0 1 1 +DEAF 1 0 1 1 +DAZED 1 0 1 1 +DAYLIGHT 2 0 2 2 +DAYBREAK 2 0 2 2 +DAY'S 1 0 1 1 +DAWNED 2 0 2 2 +DAWN 4 0 4 4 +DAVID 2 0 2 2 +DAUNTED 1 0 1 1 +DAUGHTER'S 1 0 1 1 +DAUGHTER 10 0 10 10 +DASHING 1 0 1 1 +DASHED 1 0 1 1 +DARTED 1 0 1 1 +DARKNESS 7 0 7 7 +DARING 1 0 1 1 +DARCY'S 1 0 1 1 +DARCY 6 0 6 6 +DANGERS 1 0 1 1 +DANGEROUS 2 0 2 2 +DANGER 11 0 11 11 +DANDY 1 0 1 1 +DANCE 2 0 2 2 +DAMPNESS 1 0 1 1 +DAMNED 1 0 1 1 +DAMES 1 0 1 1 +DAMASCUS 4 0 4 4 +DAM 1 0 1 1 +DADDY 1 0 1 1 +CYNTHIA 2 0 2 2 +CUTTER'S 1 0 1 1 +CUTTER 3 0 3 3 +CUSHION 1 0 1 1 +CURVED 2 0 2 2 +CURTAINS 2 0 2 2 +CURSES 1 0 1 1 +CURSED 2 0 2 2 +CURRENT 1 0 1 1 +CURRENCY 1 0 1 1 +CURIOUS 4 0 4 4 +CURED 1 0 1 1 +CURE 4 0 4 4 +CURATE 2 0 2 2 +CUPBOARD 2 0 2 2 +CULTURED 1 0 1 1 +CULTURE 1 0 1 1 +CULTIVATED 2 0 2 2 +CULT 1 0 1 1 +CUBITS 1 0 1 1 +CRYSTALLINE 1 0 1 1 +CRY 2 0 2 2 +CRUSHED 1 0 1 1 +CRUSADER 1 0 1 1 +CRUELTY 4 0 4 4 +CRUEL 4 0 4 4 +CRUDE 1 0 1 1 +CRUCIFIXION 9 0 9 9 +CROWNED 1 0 1 1 +CROWN 3 0 3 3 +CROWDED 2 0 2 2 +CROWD 5 0 5 5 +CROSSED 5 0 5 5 +CROSS 8 0 8 8 +CROOKED 1 0 1 1 +CROAKING 1 0 1 1 +CRITICS 1 0 1 1 +CRITICAL 2 0 2 2 +CRIPPLED 2 0 2 2 +CRIMSON 1 0 1 1 +CRIMINALS 1 0 1 1 +CRIMINAL 1 0 1 1 +CRIME 5 0 5 5 +CRIED 21 0 21 21 +CRICKETS 1 0 1 1 +CREPT 1 0 1 1 +CREEPY 1 0 1 1 +CREEPING 1 0 1 1 +CREDITS 3 0 3 3 +CREDIT 5 0 5 5 +CREATURES 2 0 2 2 +CREATURE 4 0 4 4 +CREATOR 4 0 4 4 +CREATIONS 1 0 1 1 +CREATING 1 0 1 1 +CREATED 3 0 3 3 +CREATE 3 0 3 3 +CREASES 1 0 1 1 +CREASED 1 0 1 1 +CREAKED 1 0 1 1 +CRAYFISH 3 0 3 3 +CRAWLED 2 0 2 2 +CRASHED 1 0 1 1 +CRASH 1 0 1 1 +CRAFT 1 0 1 1 +CRACKERS 1 0 1 1 +CRACKED 2 0 2 2 +COWARDS 1 0 1 1 +COWARD 1 0 1 1 +COVERING 1 0 1 1 +COVERED 5 0 5 5 +COVER 1 0 1 1 +COVE 1 0 1 1 +COUSINS 1 0 1 1 +COUSIN 10 0 10 10 +COURAGE 4 0 4 4 +COUPLETS 1 0 1 1 +COUPLE 2 0 2 2 +COUNTY 9 0 9 9 +COUNTESS 1 0 1 1 +COUNTER 1 0 1 1 +COUNT'S 2 0 2 2 +COUNSELLED 1 0 1 1 +COUGHING 2 0 2 2 +COUGH 3 0 3 3 +COTTONY 1 0 1 1 +COTTON 3 0 3 3 +COSTUME 1 0 1 1 +COSETTE 2 0 2 2 +CORSICAN 1 0 1 1 +CORRIDOR 2 0 2 2 +CORRESPONDENCE 1 0 1 1 +CORRECT 1 0 1 1 +CORPSES 1 0 1 1 +CORPSE 3 0 3 3 +CORPORATIONS 1 0 1 1 +CORNERS 1 0 1 1 +CORDIAL 1 0 1 1 +COPY 1 0 1 1 +COOLNESS 2 0 2 2 +COOKING 1 0 1 1 +COOK 4 0 4 4 +CONVINCING 1 0 1 1 +CONVINCED 1 0 1 1 +CONVICTION 3 0 3 3 +CONVEYANCE 1 0 1 1 +CONVERTS 1 0 1 1 +CONVERSATION 10 0 10 10 +CONVENTIONS 1 0 1 1 +CONVENTION 1 0 1 1 +CONVENT 4 0 4 4 +CONVENIENCES 1 0 1 1 +CONTRIVE 1 0 1 1 +CONTRARY 5 0 5 5 +CONTRADICTION 1 0 1 1 +CONTRACTED 1 0 1 1 +CONTRACT 3 0 3 3 +CONTINUED 11 0 11 11 +CONTINUE 3 0 3 3 +CONTINUATION 1 0 1 1 +CONTINUANCE 1 0 1 1 +CONTINUALLY 1 0 1 1 +CONTINGENT 1 0 1 1 +CONTENTS 1 0 1 1 +CONTENTION 1 0 1 1 +CONTENTED 1 0 1 1 +CONTENT 1 0 1 1 +CONTEMPORARY 2 0 2 2 +CONTAINS 1 0 1 1 +CONTAINING 2 0 2 2 +CONTAINED 1 0 1 1 +CONTAIN 1 0 1 1 +CONTAGIOUS 2 0 2 2 +CONTACT 3 0 3 3 +CONSUMED 2 0 2 2 +CONSULTED 3 0 3 3 +CONSULTATIONS 1 0 1 1 +CONSTRUCT 1 0 1 1 +CONSTRAINED 1 0 1 1 +CONSTANTLY 3 0 3 3 +CONSTANTIUS 1 0 1 1 +CONSPIRATORS 2 0 2 2 +CONSPIRACY 1 0 1 1 +CONSORTED 1 0 1 1 +CONSOLES 1 0 1 1 +CONSISTS 2 0 2 2 +CONSISTENCY 1 0 1 1 +CONSISTED 1 0 1 1 +CONSIDERING 2 0 2 2 +CONSIDERED 3 0 3 3 +CONSIDERATION 2 0 2 2 +CONSIDERABLE 6 0 6 6 +CONSIDER 1 0 1 1 +CONSERVATIVE 2 0 2 2 +CONSEQUENCES 1 0 1 1 +CONSEQUENCE 1 0 1 1 +CONSENTED 1 0 1 1 +CONSENT 2 0 2 2 +CONSCIOUSNESS 2 0 2 2 +CONSCIOUSLY 1 0 1 1 +CONSCIENTIOUS 1 0 1 1 +CONSCIENCES 1 0 1 1 +CONSCIENCE 3 0 3 3 +CONQUEST 3 0 3 3 +CONQUEROR 1 0 1 1 +CONQUERING 1 0 1 1 +CONQUERED 2 0 2 2 +CONQUER 1 0 1 1 +CONNOISSEUR 1 0 1 1 +CONNECTIONS 1 0 1 1 +CONNECTION 4 0 4 4 +CONNECTED 1 0 1 1 +CONNECT 2 0 2 2 +CONJECTURES 1 0 1 1 +CONGRESSES 1 0 1 1 +CONGRESS 3 0 3 3 +CONGEALETH 1 0 1 1 +CONFUSION 4 0 4 4 +CONFOUND 1 0 1 1 +CONFLICT 2 0 2 2 +CONFISCATION 1 0 1 1 +CONFIRMS 1 0 1 1 +CONFIRMED 2 0 2 2 +CONFINEMENT 1 0 1 1 +CONFIDENTIAL 1 0 1 1 +CONFIDENCE 3 0 3 3 +CONFESSION 4 0 4 4 +CONFESSED 2 0 2 2 +CONFESS 9 0 9 9 +CONFERRING 1 0 1 1 +CONFERENCE 1 0 1 1 +CONFECTIONS 1 0 1 1 +CONFECTIONER 1 0 1 1 +CONDUCTED 2 0 2 2 +CONDUCT 4 0 4 4 +CONDITIONS 4 0 4 4 +CONDITION 4 0 4 4 +CONDESCEND 1 0 1 1 +CONDEMNED 2 0 2 2 +CONCLUSION 2 0 2 2 +CONCILIATE 1 0 1 1 +CONCIERGE'S 1 0 1 1 +CONCERNS 1 0 1 1 +CONCERNING 1 0 1 1 +CONCERN 2 0 2 2 +CONCEPTION 5 0 5 5 +CONCENTRATED 2 0 2 2 +CONCENTRATE 1 0 1 1 +CONCEIVE 1 0 1 1 +CONCEITED 1 0 1 1 +CONCEAL 3 0 3 3 +COMTE 1 0 1 1 +COMRADE 3 0 3 3 +COMPULSORY 1 0 1 1 +COMPREHENDED 1 0 1 1 +COMPOUND 1 0 1 1 +COMPOSURE 1 0 1 1 +COMPOSITION 1 0 1 1 +COMPOSED 1 0 1 1 +COMPLY 1 0 1 1 +COMPLIMENT 1 0 1 1 +COMPLICITY 1 0 1 1 +COMPLETELY 6 0 6 6 +COMPLETED 1 0 1 1 +COMPLETE 1 0 1 1 +COMPLAIN 1 0 1 1 +COMPETITION 1 0 1 1 +COMPELLING 1 0 1 1 +COMPELLED 1 0 1 1 +COMPASS 1 0 1 1 +COMPARATIVELY 1 0 1 1 +COMPANY 13 0 13 13 +COMPANIONS 3 0 3 3 +COMPANION'S 1 0 1 1 +COMPANION 4 0 4 4 +COMMUNICATION 2 0 2 2 +COMMUNICATES 2 0 2 2 +COMMUNICATED 1 0 1 1 +COMMUNICANTS 1 0 1 1 +COMMONS 3 0 3 3 +COMMONLY 1 0 1 1 +COMMONERS 1 0 1 1 +COMMON 3 0 3 3 +COMMITTED 4 0 4 4 +COMMISSIONED 1 0 1 1 +COMMISSION 1 0 1 1 +COMMENDING 1 0 1 1 +COMMENDED 1 0 1 1 +COMMANDING 2 0 2 2 +COMMANDED 2 0 2 2 +COMMAND 2 0 2 2 +COMFORTABLE 2 0 2 2 +COMFORT 2 0 2 2 +COMBATIVE 1 0 1 1 +COMBAT 2 0 2 2 +COMB 1 0 1 1 +COLOURED 1 0 1 1 +COLOUR 2 0 2 2 +COLOSSEUM 1 0 1 1 +COLOSSAL 1 0 1 1 +COLOR 2 0 2 2 +COLONELS 1 0 1 1 +COLLECTOR'S 1 0 1 1 +COLLECTOR 1 0 1 1 +COLLECTION 1 0 1 1 +COLLECTING 2 0 2 2 +COLLAR 2 0 2 2 +COLIC 1 0 1 1 +COLCHESTER 5 0 5 5 +COINCIDENCES 1 0 1 1 +COIL 1 0 1 1 +COFFIN 20 0 20 20 +COFFEE 1 0 1 1 +COCKING 1 0 1 1 +COAST 2 0 2 2 +COACH 3 0 3 3 +CLUTCHING 1 0 1 1 +CLUTCH 1 0 1 1 +CLUNG 1 0 1 1 +CLUBBED 1 0 1 1 +CLUB 3 0 3 3 +CLOVER 1 0 1 1 +CLOUDS 1 0 1 1 +CLOTHES 8 0 8 8 +CLOTHE 1 0 1 1 +CLOSING 2 0 2 2 +CLOSES 1 0 1 1 +CLOSER 1 0 1 1 +CLOSELY 5 0 5 5 +CLOSED 4 0 4 4 +CLOSE 14 0 14 14 +CLOISTER 3 0 3 3 +CLOGGED 2 0 2 2 +CLERK 2 0 2 2 +CLERICAL 2 0 2 2 +CLENCHING 1 0 1 1 +CLEMENT 1 0 1 1 +CLEMENCY 1 0 1 1 +CLEARLY 1 0 1 1 +CLEARER 1 0 1 1 +CLEARED 2 0 2 2 +CLEAR 7 0 7 7 +CLEANED 2 0 2 2 +CLEAN 4 0 4 4 +CLASPED 1 0 1 1 +CLASP 1 0 1 1 +CLASHING 1 0 1 1 +CLAPPED 1 0 1 1 +CLANKING 1 0 1 1 +CLAIR 3 0 3 3 +CLAIMS 1 0 1 1 +CLAIMED 1 0 1 1 +CIVILIZED 1 0 1 1 +CIVILITY 1 0 1 1 +CIVILITIES 1 0 1 1 +CITY 16 0 16 16 +CITIZEN 1 0 1 1 +CIRCUMSTANTIAL 1 0 1 1 +CIRCUMSTANCES 6 0 6 6 +CIRCULAR 1 0 1 1 +CIRCUIT 1 0 1 1 +CIRCLES 1 0 1 1 +CIRCLE 2 0 2 2 +CILLEY 1 0 1 1 +CIDER 1 0 1 1 +CHURCHYARDS 1 0 1 1 +CHURCH 14 0 14 14 +CHUCKLED 3 0 3 3 +CHRYSIPPUS 2 0 2 2 +CHRISTMAS 1 0 1 1 +CHRIS'S 1 0 1 1 +CHOSEN 3 0 3 3 +CHOSE 2 0 2 2 +CHOP 1 0 1 1 +CHOKE 1 0 1 1 +CHOIR 2 0 2 2 +CHOICE 1 0 1 1 +CHIRP 1 0 1 1 +CHINESE 1 0 1 1 +CHIN 1 0 1 1 +CHIMNEY 7 0 7 7 +CHIMES 1 0 1 1 +CHILDREN 13 0 13 13 +CHILDLESS 1 0 1 1 +CHILDHOOD 1 0 1 1 +CHILD'S 1 0 1 1 +CHILD 6 0 6 6 +CHEWERS 2 0 2 2 +CHERISHED 1 0 1 1 +CHEFS 1 0 1 1 +CHEESE 1 0 1 1 +CHEERFULNESS 1 0 1 1 +CHEERFULLY 1 0 1 1 +CHEERFUL 4 0 4 4 +CHEEKED 1 0 1 1 +CHECKING 1 0 1 1 +CHECKED 1 0 1 1 +CHEATING 1 0 1 1 +CHEAPLY 1 0 1 1 +CHATTING 1 0 1 1 +CHASSEUR 1 0 1 1 +CHASM 1 0 1 1 +CHASED 1 0 1 1 +CHARMS 1 0 1 1 +CHARMING 3 0 3 3 +CHARM 3 0 3 3 +CHARLIE 1 0 1 1 +CHARLES 2 0 2 2 +CHARITY 1 0 1 1 +CHARIOT 1 0 1 1 +CHARGES 2 0 2 2 +CHARGER 1 0 1 1 +CHARCOAL 1 0 1 1 +CHARACTER 6 0 6 6 +CHAPTERS 1 0 1 1 +CHAPS 1 0 1 1 +CHAPLET 1 0 1 1 +CHAPEL 6 0 6 6 +CHAP 1 0 1 1 +CHANTED 1 0 1 1 +CHANNEL 3 0 3 3 +CHANGING 2 0 2 2 +CHANGED 4 0 4 4 +CHANCELLOR'S 1 0 1 1 +CHANCELLOR 6 0 6 6 +CHAMPIONS 1 0 1 1 +CHAMPAGNE 1 0 1 1 +CHAMBERLAIN 6 0 6 6 +CHAMBER 5 0 5 5 +CHAIR 5 0 5 5 +CETERA 2 0 2 2 +CESSATION 1 0 1 1 +CERTIFIED 1 0 1 1 +CEREMONY 1 0 1 1 +CENTURY 3 0 3 3 +CENTURIES 5 0 5 5 +CENTRES 1 0 1 1 +CENTRAL 6 0 6 6 +CENT 1 0 1 1 +CEMETERY 1 0 1 1 +CELLARS 1 0 1 1 +CELIA 1 0 1 1 +CELERY 1 0 1 1 +CELEBRATED 3 0 3 3 +CEASED 7 0 7 7 +CAVALRY 1 0 1 1 +CAUTIOUSLY 1 0 1 1 +CAUTION 1 0 1 1 +CAUSED 1 0 1 1 +CATCHING 1 0 1 1 +CATCH 6 0 6 6 +CAT 3 0 3 3 +CASTRATO 2 0 2 2 +CASTING 2 0 2 2 +CASKET 1 0 1 1 +CASHIER 1 0 1 1 +CASES 2 0 2 2 +CARVED 3 0 3 3 +CARTHUSIANS 1 0 1 1 +CARS 1 0 1 1 +CARRY 7 0 7 7 +CARROT 1 0 1 1 +CARPET 1 0 1 1 +CARPENTER 1 0 1 1 +CAROLINA 1 0 1 1 +CARGO 1 0 1 1 +CAREWORN 2 0 2 2 +CARESSES 1 0 1 1 +CAREFULLY 3 0 3 3 +CAREFUL 4 0 4 4 +CARDS 1 0 1 1 +CARDINALS 1 0 1 1 +CARBONATE 1 0 1 1 +CARAVAN 1 0 1 1 +CAR 5 0 5 5 +CAPTURE 3 0 3 3 +CAPTOR 1 0 1 1 +CAPTIVE 3 0 3 3 +CAPTAIN'S 1 0 1 1 +CAPERING 1 0 1 1 +CAPERED 1 0 1 1 +CAPABLE 2 0 2 2 +CAPABILITIES 1 0 1 1 +CAP'S 1 0 1 1 +CANVAS 1 0 1 1 +CANST 1 0 1 1 +CANONIZED 1 0 1 1 +CANOE 1 0 1 1 +CANE 1 0 1 1 +CANDLESTICKS 1 0 1 1 +CANDLESTICK 2 0 2 2 +CANDLES 1 0 1 1 +CANDLE 3 0 3 3 +CANAL 1 0 1 1 +CAMPED 1 0 1 1 +CAMPAIGNS 2 0 2 2 +CAMP 1 0 1 1 +CAMOUFLAGE 1 0 1 1 +CAMEL 2 0 2 2 +CALMLY 2 0 2 2 +CALLS 1 0 1 1 +CALLING 2 0 2 2 +CALIPH 1 0 1 1 +CALENDAR 1 0 1 1 +CALCULATE 1 0 1 1 +CAIRO 2 0 2 2 +CAFE 1 0 1 1 +CAESARS 1 0 1 1 +CADET 1 0 1 1 +CABLE'S 1 0 1 1 +CABIN 4 0 4 4 +CABARET 1 0 1 1 +BUYING 2 0 2 2 +BUTTERFLIES 1 0 1 1 +BUTTER 5 0 5 5 +BUSTED 2 0 2 2 +BUST 1 0 1 1 +BUSINESSES 1 0 1 1 +BUSHY 2 0 2 2 +BUSHES 1 0 1 1 +BURSTING 1 0 1 1 +BURNING 1 0 1 1 +BURN 1 0 1 1 +BURIED 7 0 7 7 +BURIAL 1 0 1 1 +BURGUNDY 1 0 1 1 +BUNKER 1 0 1 1 +BUNDLES 2 0 2 2 +BUNDLED 1 0 1 1 +BUMS 1 0 1 1 +BULLOCK 1 0 1 1 +BULLET 2 0 2 2 +BULB 1 0 1 1 +BUILT 1 0 1 1 +BUILDINGS 1 0 1 1 +BUILDING 3 0 3 3 +BUGLE 1 0 1 1 +BUGGY 2 0 2 2 +BUFF 1 0 1 1 +BUD 1 0 1 1 +BUCKLEY 1 0 1 1 +BUBBLES 1 0 1 1 +BRUTE 2 0 2 2 +BRUTALLY 1 0 1 1 +BRUTAL 1 0 1 1 +BRUSHED 2 0 2 2 +BRUISING 1 0 1 1 +BROTHERLY 1 0 1 1 +BROTH 1 0 1 1 +BRONZE 1 0 1 1 +BROKER'S 1 0 1 1 +BROKEN 10 0 10 10 +BROKE 7 0 7 7 +BROAD 3 0 3 3 +BRITISH 1 0 1 1 +BRINGING 3 0 3 3 +BRINGETH 2 0 2 2 +BRIM 1 0 1 1 +BRIGHT 5 0 5 5 +BRIGANDS 1 0 1 1 +BRIDGE 4 0 4 4 +BRIDE 3 0 3 3 +BRICKS 1 0 1 1 +BRICK 1 0 1 1 +BREWING 1 0 1 1 +BRETHREN 3 0 3 3 +BRED 1 0 1 1 +BREATHING 1 0 1 1 +BREASTS 1 0 1 1 +BREAKS 3 0 3 3 +BREAKING 3 0 3 3 +BREAKFAST 4 0 4 4 +BREAK 6 0 6 6 +BREAD 3 0 3 3 +BREACH 1 0 1 1 +BRAVELY 2 0 2 2 +BRANDON 1 0 1 1 +BRANCHES 2 0 2 2 +BRANCH 3 0 3 3 +BRAG 1 0 1 1 +BRADFORD 1 0 1 1 +BRACKETS 1 0 1 1 +BOYS 8 0 8 8 +BOXES 2 0 2 2 +BOWL 1 0 1 1 +BOWED 2 0 2 2 +BOW 3 0 3 3 +BOURGEOIS 1 0 1 1 +BOUQUET 2 0 2 2 +BOUNTY 1 0 1 1 +BOUND 5 0 5 5 +BOULEVARD 1 0 1 1 +BOTTLES 1 0 1 1 +BOTTLE 4 0 4 4 +BOSTON 3 0 3 3 +BOSOM 3 0 3 3 +BORROWED 1 0 1 1 +BORED 1 0 1 1 +BORE 2 0 2 2 +BORDERS 1 0 1 1 +BORDER 1 0 1 1 +BOOTY 1 0 1 1 +BOOTS 2 0 2 2 +BOOT 1 0 1 1 +BOOMED 2 0 2 2 +BOOKS 1 0 1 1 +BOOKLET 1 0 1 1 +BOOK 8 0 8 8 +BONNETS 2 0 2 2 +BONNET 1 0 1 1 +BONDAGE 2 0 2 2 +BOLTS 1 0 1 1 +BOLDER 1 0 1 1 +BOILING 3 0 3 3 +BOILER 1 0 1 1 +BOILED 2 0 2 2 +BOEUF 2 0 2 2 +BODY 13 0 13 13 +BODILY 3 0 3 3 +BODIES 2 0 2 2 +BOAT'S 1 0 1 1 +BOAST 1 0 1 1 +BOARDS 1 0 1 1 +BOARDING 1 0 1 1 +BOARD 5 0 5 5 +BLURTED 1 0 1 1 +BLUNTLY 1 0 1 1 +BLUNTED 1 0 1 1 +BLUBBERING 1 0 1 1 +BLOWS 1 0 1 1 +BLOWN 2 0 2 2 +BLOWING 2 0 2 2 +BLOSSOM 1 0 1 1 +BLOOM 1 0 1 1 +BLIZZARD'S 1 0 1 1 +BLIZZARD 2 0 2 2 +BLINKED 2 0 2 2 +BLINDNESS 1 0 1 1 +BLINDING 1 0 1 1 +BLINDED 1 0 1 1 +BLIND 5 0 5 5 +BLEW 1 0 1 1 +BLESSINGS 1 0 1 1 +BLESSING 2 0 2 2 +BLESS 3 0 3 3 +BLEND 1 0 1 1 +BLEAK 1 0 1 1 +BLAZING 1 0 1 1 +BLANKLY 1 0 1 1 +BLANK 1 0 1 1 +BLAMING 1 0 1 1 +BLAMED 1 0 1 1 +BLAME 2 0 2 2 +BLADE 1 0 1 1 +BLACKSTONE 1 0 1 1 +BLACKGUARD 1 0 1 1 +BITE 1 0 1 1 +BISHOPS 1 0 1 1 +BISHOP 4 0 4 4 +BISCUIT 1 0 1 1 +BIRTHPLACE 1 0 1 1 +BIRTHDAY 1 0 1 1 +BIRTH 1 0 1 1 +BIRD 1 0 1 1 +BIRCH 1 0 1 1 +BIND 1 0 1 1 +BILLS 2 0 2 2 +BILIOUS 1 0 1 1 +BIGGER 1 0 1 1 +BEYOND 7 0 7 7 +BEWILDERMENT 1 0 1 1 +BEWARE 2 0 2 2 +BETWIXT 1 0 1 1 +BETRAY 1 0 1 1 +BETOOK 1 0 1 1 +BETIDETH 1 0 1 1 +BESTOW 3 0 3 3 +BESS 1 0 1 1 +BESPAKE 1 0 1 1 +BESOUGHT 1 0 1 1 +BESIEGERS 2 0 2 2 +BESEECH 2 0 2 2 +BERNARDONE 1 0 1 1 +BERNARD 4 0 4 4 +BEQUEATH 2 0 2 2 +BENT 2 0 2 2 +BENJAMIN 1 0 1 1 +BENCH 3 0 3 3 +BELOW 2 0 2 2 +BELONG 1 0 1 1 +BELLS 4 0 4 4 +BELLIES 1 0 1 1 +BELIEVING 1 0 1 1 +BELIEVES 1 0 1 1 +BELIEVED 6 0 6 6 +BELIEVE 16 0 16 16 +BELIEF 5 0 5 5 +BEINGS 3 0 3 3 +BEHOLDING 1 0 1 1 +BEHOLD 5 0 5 5 +BEHIND 16 0 16 16 +BEHAVED 3 0 3 3 +BEHALF 1 0 1 1 +BEGUILED 1 0 1 1 +BEGINNING 6 0 6 6 +BEGGED 9 0 9 9 +BEGGAR 1 0 1 1 +BEFITTING 1 0 1 1 +BEFALLEN 1 0 1 1 +BEDOUIN 1 0 1 1 +BED 14 0 14 14 +BECOMES 6 0 6 6 +BECOME 15 0 15 15 +BECKY 1 0 1 1 +BECAUSE 34 0 34 34 +BEAVER 1 0 1 1 +BEAUTY 4 0 4 4 +BEAUTIFULLY 1 0 1 1 +BEAUTIFUL 8 0 8 8 +BEATEN 2 0 2 2 +BEAT 6 0 6 6 +BEASTS 4 0 4 4 +BEASTLY 1 0 1 1 +BEAST 2 0 2 2 +BEAR 8 0 8 8 +BEAMS 2 0 2 2 +BEAD 1 0 1 1 +BEACON 2 0 2 2 +BEACH 2 0 2 2 +BAY 1 0 1 1 +BATTLE 2 0 2 2 +BATTERY 1 0 1 1 +BATHING 1 0 1 1 +BATHED 1 0 1 1 +BASKING 1 0 1 1 +BASKETS 1 0 1 1 +BASIS 2 0 2 2 +BASIN 1 0 1 1 +BASER 1 0 1 1 +BASED 2 0 2 2 +BARS 5 0 5 5 +BARRIER 1 0 1 1 +BARRICADES 1 0 1 1 +BARRED 1 0 1 1 +BARRACK 2 0 2 2 +BARONET 1 0 1 1 +BARKING 1 0 1 1 +BARK 1 0 1 1 +BARE 1 0 1 1 +BARBAROUS 1 0 1 1 +BARBARITY 1 0 1 1 +BAR 7 0 7 7 +BAPTIST 1 0 1 1 +BANQUET 3 0 3 3 +BANKER 1 0 1 1 +BANK 9 0 9 9 +BANDS 1 0 1 1 +BANDITS 1 0 1 1 +BANDIT 1 0 1 1 +BALSAM 1 0 1 1 +BALE 1 0 1 1 +BAKING 1 0 1 1 +BAIL 1 0 1 1 +BAH 1 0 1 1 +BAGS 2 0 2 2 +BAGGY 1 0 1 1 +BAGGAGE 1 0 1 1 +BADLY 3 0 3 3 +BADGE 1 0 1 1 +BACON 1 0 1 1 +BACKGROUND 3 0 3 3 +BACHELOR 1 0 1 1 +BABYLONIA 1 0 1 1 +B 3 0 3 3 +AZURE 1 0 1 1 +AWOKE 1 0 1 1 +AWKWARDNESS 1 0 1 1 +AWKWARDLY 1 0 1 1 +AWFUL 4 0 4 4 +AWE 1 0 1 1 +AWARE 2 0 2 2 +AWAKENING 2 0 2 2 +AWAKENED 1 0 1 1 +AWAITS 1 0 1 1 +AWAITED 1 0 1 1 +AWAIT 1 0 1 1 +AVOIDED 1 0 1 1 +AVOID 4 0 4 4 +AVERAGE 2 0 2 2 +AUTOMATICALLY 1 0 1 1 +AUTOCRACY 1 0 1 1 +AUTHORITY 10 0 10 10 +AUTHORITIES 1 0 1 1 +AUTHOR 1 0 1 1 +AUTHENTIC 1 0 1 1 +AUSTRIA 1 0 1 1 +AUSPICIOUS 3 0 3 3 +AUGMENTED 1 0 1 1 +AUGHT 2 0 2 2 +AUDACIOUS 1 0 1 1 +ATTRACTIVE 3 0 3 3 +ATTRACTED 2 0 2 2 +ATTORNEY 1 0 1 1 +ATTENTIVELY 4 0 4 4 +ATTENDING 1 0 1 1 +ATTEMPTING 1 0 1 1 +ATTEMPT 2 0 2 2 +ATTAINED 1 0 1 1 +ATTACKS 3 0 3 3 +ATTACKED 1 0 1 1 +ATTACK 2 0 2 2 +ATTACHMENT 1 0 1 1 +ATTACHED 1 0 1 1 +ATMOSPHERE 1 0 1 1 +ATE 1 0 1 1 +ASUNDER 1 0 1 1 +ASTONISHMENT 2 0 2 2 +ASTONISHED 1 0 1 1 +ASSYRIAN 2 0 2 2 +ASSUREDLY 1 0 1 1 +ASSURE 8 0 8 8 +ASSURANCE 2 0 2 2 +ASSOCIATIONS 1 0 1 1 +ASSOCIATES 1 0 1 1 +ASSISTED 1 0 1 1 +ASSISTANT 1 0 1 1 +ASSISTANCE 3 0 3 3 +ASSIST 3 0 3 3 +ASSERT 2 0 2 2 +ASSEMBLY 3 0 3 3 +ASSEMBLED 2 0 2 2 +ASSEMBLAGE 1 0 1 1 +ASSAULT 1 0 1 1 +ASSASSINATED 1 0 1 1 +ASSAILED 1 0 1 1 +ASS 3 0 3 3 +ASPECT 1 0 1 1 +ASKING 5 0 5 5 +ASIDE 5 0 5 5 +ASHLEY 5 0 5 5 +ASCERTAINING 1 0 1 1 +ASCERTAIN 1 0 1 1 +ASCENSION 1 0 1 1 +ARTS 1 0 1 1 +ARTISTS 4 0 4 4 +ARTICLES 1 0 1 1 +ARTFUL 1 0 1 1 +ARRIVED 4 0 4 4 +ARRIVAL 1 0 1 1 +ARRESTED 1 0 1 1 +ARRANGING 1 0 1 1 +ARRANGEMENTS 1 0 1 1 +ARMS 9 0 9 9 +ARMIES 2 0 2 2 +ARKADYEVITCH 1 0 1 1 +ARK 1 0 1 1 +ARISTOCRACY 1 0 1 1 +ARISE 1 0 1 1 +ARGUMENTS 3 0 3 3 +ARGUMENT 1 0 1 1 +ARGUED 1 0 1 1 +ARENA 1 0 1 1 +ARCHITECTURE 1 0 1 1 +ARCHBISHOPS 1 0 1 1 +ARABIC 1 0 1 1 +ARABIANS 1 0 1 1 +AQUA 1 0 1 1 +APTITUDE 1 0 1 1 +APRIL 1 0 1 1 +APPROVAL 1 0 1 1 +APPROACHING 1 0 1 1 +APPROACHED 3 0 3 3 +APPREHENSIONS 1 0 1 1 +APPOINTMENT 2 0 2 2 +APPLYING 1 0 1 1 +APPLY 3 0 3 3 +APPLAUSE 1 0 1 1 +APPETITE 2 0 2 2 +APPEARING 1 0 1 1 +APPEARANCE 7 0 7 7 +APPEAR 6 0 6 6 +APPEALS 1 0 1 1 +APPEALING 1 0 1 1 +APPEAL 1 0 1 1 +APPARITION 1 0 1 1 +APPARENT 1 0 1 1 +APOLOGY 1 0 1 1 +APERTURE 1 0 1 1 +APARTMENTS 3 0 3 3 +APARTMENT 1 0 1 1 +APART 4 0 4 4 +ANYWHERE 1 0 1 1 +ANYHOW 2 0 2 2 +ANYBODY 2 0 2 2 +ANXIOUS 3 0 3 3 +ANXIETY 5 0 5 5 +ANTONIO 1 0 1 1 +ANTIQUARIAN'S 1 0 1 1 +ANTICIPATION 1 0 1 1 +ANTICIPATE 1 0 1 1 +ANSWERING 2 0 2 2 +ANSWER 15 0 15 15 +ANON 1 0 1 1 +ANNOYANCES 1 0 1 1 +ANNOYANCE 1 0 1 1 +ANNOUNCING 1 0 1 1 +ANNOUNCED 3 0 3 3 +ANNIHILATION 2 0 2 2 +ANNIHILATED 1 0 1 1 +ANIMATED 2 0 2 2 +ANIMATE 1 0 1 1 +ANIMALS 4 0 4 4 +ANGER 2 0 2 2 +ANEW 1 0 1 1 +ANDY 1 0 1 1 +ANDREW 2 0 2 2 +ANCIENTS 1 0 1 1 +ANCIENT 2 0 2 2 +ANCHOR 1 0 1 1 +AMUSING 1 0 1 1 +AMPLY 1 0 1 1 +AMPLE 1 0 1 1 +AMOUNT 1 0 1 1 +AMONG 18 0 18 18 +AMMUNITION 1 0 1 1 +AMISS 1 0 1 1 +AMIABLE 1 0 1 1 +AMERICAN 1 0 1 1 +AMERICA 1 0 1 1 +AMENDS 1 0 1 1 +AMENDMENT 1 0 1 1 +AMENDED 1 0 1 1 +AMBITIOUS 1 0 1 1 +AMBITIONS 1 0 1 1 +AMBASSADOR 1 0 1 1 +ALTOGETHER 2 0 2 2 +ALTHOUGH 10 0 10 10 +ALTERED 1 0 1 1 +ALTER 1 0 1 1 +ALTAR 10 0 10 10 +ALONGSIDE 1 0 1 1 +ALMS 2 0 2 2 +ALMOST 11 0 11 11 +ALMIGHTY 1 0 1 1 +ALLOWING 1 0 1 1 +ALLOWANCES 1 0 1 1 +ALLIANCE 1 0 1 1 +ALLEY 1 0 1 1 +ALLAH'S 1 0 1 1 +ALLAH 9 0 9 9 +ALIVE 2 0 2 2 +ALIMONY 2 0 2 2 +ALIMENTARY 1 0 1 1 +ALIKE 2 0 2 2 +ALI 4 0 4 4 +ALEX 1 0 1 1 +ALBERT'S 3 0 3 3 +ALAS 1 0 1 1 +ALARMS 1 0 1 1 +ALARM 3 0 3 3 +ALABAMA 1 0 1 1 +AIM 4 0 4 4 +AILS 1 0 1 1 +AILMENTS 2 0 2 2 +AHEAD 2 0 2 2 +AGREES 1 0 1 1 +AGREEMENT 2 0 2 2 +AGREED 6 0 6 6 +AGREEABLE 2 0 2 2 +AGREE 1 0 1 1 +AGONY 1 0 1 1 +AGO 10 0 10 10 +AGITATOR 1 0 1 1 +AGITATION 1 0 1 1 +AGITATING 1 0 1 1 +AGILITY 1 0 1 1 +AGHAST 1 0 1 1 +AGGRESSIVENESS 1 0 1 1 +AGGRAVATIONS 1 0 1 1 +AGGRAVATED 1 0 1 1 +AGES 1 0 1 1 +AGENT 3 0 3 3 +AGED 2 0 2 2 +AGE 4 0 4 4 +AGAINST 27 0 27 27 +AGAIN 56 0 56 56 +AFTERNOON 6 0 6 6 +AFRICAN 1 0 1 1 +AFRAID 9 0 9 9 +AFORESAID 1 0 1 1 +AFIRE 1 0 1 1 +AFFORD 2 0 2 2 +AFFIRMED 1 0 1 1 +AFFECTIONS 1 0 1 1 +AFFECTIONATELY 1 0 1 1 +AFFECTED 2 0 2 2 +AFFAIRS 2 0 2 2 +AFFAIR 2 0 2 2 +ADVISEDLY 1 0 1 1 +ADVISED 1 0 1 1 +ADVISE 1 0 1 1 +ADVICE 2 0 2 2 +ADVENTURE 3 0 3 3 +ADVENT 1 0 1 1 +ADVANTAGES 2 0 2 2 +ADVANTAGE 4 0 4 4 +ADVANCING 1 0 1 1 +ADVANCES 2 0 2 2 +ADVANCED 1 0 1 1 +ADVANCE 5 0 5 5 +ADRIFT 1 0 1 1 +ADORNED 1 0 1 1 +ADORN 1 0 1 1 +ADORED 1 0 1 1 +ADMITTED 4 0 4 4 +ADMIT 1 0 1 1 +ADMISSION 1 0 1 1 +ADMIRED 1 0 1 1 +ADMIRATION 5 0 5 5 +ADMIRABLE 1 0 1 1 +ADMINISTRATION 3 0 3 3 +ADMINISTERED 1 0 1 1 +ADJACENT 1 0 1 1 +ADHERENT 1 0 1 1 +ADDRESSING 1 0 1 1 +ADDRESSED 1 0 1 1 +ADDRESS 3 0 3 3 +ADDITION 1 0 1 1 +ADAGE 1 0 1 1 +ACUTE 2 0 2 2 +ACTS 4 0 4 4 +ACTORS 1 0 1 1 +ACTIVITIES 1 0 1 1 +ACTIONS 2 0 2 2 +ACTION 2 0 2 2 +ACTING 2 0 2 2 +ACTED 1 0 1 1 +ACT 7 0 7 7 +ACROSS 8 0 8 8 +ACQUITTAL 1 0 1 1 +ACQUIT 1 0 1 1 +ACQUISITIVE 1 0 1 1 +ACQUIRED 2 0 2 2 +ACQUAINTANCES 1 0 1 1 +ACQUAINTANCE 2 0 2 2 +ACQUAINT 1 0 1 1 +ACKNOWLEDGMENT 1 0 1 1 +ACIDS 1 0 1 1 +ACID 4 0 4 4 +ACHIEVED 1 0 1 1 +ACHED 1 0 1 1 +ACE 1 0 1 1 +ACCUSTOMED 2 0 2 2 +ACCUSING 3 0 3 3 +ACCUSED 1 0 1 1 +ACCUSATION 4 0 4 4 +ACCURATE 1 0 1 1 +ACCOUNTS 2 0 2 2 +ACCOUNTED 1 0 1 1 +ACCORDINGLY 5 0 5 5 +ACCORDING 7 0 7 7 +ACCORDANCE 1 0 1 1 +ACCORD 1 0 1 1 +ACCOMPLISHMENTS 1 0 1 1 +ACCOMPLISHED 1 0 1 1 +ACCOMPLICE 2 0 2 2 +ACCOMPANY 1 0 1 1 +ACCOMPANIED 3 0 3 3 +ACCOMMODATING 1 0 1 1 +ACCIDENTS 1 0 1 1 +ACCIDENT 1 0 1 1 +ACCESSION 1 0 1 1 +ACCESSIBLE 1 0 1 1 +ACCESS 1 0 1 1 +ACCEPTED 4 0 4 4 +ACCEPTABLE 1 0 1 1 +ABYSSINIANS 2 0 2 2 +ABUSING 1 0 1 1 +ABUSES 2 0 2 2 +ABUSE 1 0 1 1 +ABUNDANTLY 1 0 1 1 +ABUNDANT 1 0 1 1 +ABSORBING 2 0 2 2 +ABSORB 1 0 1 1 +ABSOLUTELY 3 0 3 3 +ABSOLUTE 1 0 1 1 +ABSENTED 1 0 1 1 +ABSENT 1 0 1 1 +ABSENCE 4 0 4 4 +ABRUPTLY 3 0 3 3 +ABOLISH 1 0 1 1 +ABNORMAL 1 0 1 1 +ABLE 11 0 11 11 +ABILITY 1 0 1 1 +ABILITIES 1 0 1 1 +ABIDE 1 0 1 1 +ABBOT 1 0 1 1 +ABACK 1 0 1 1 +AARON 1 0 1 1 diff --git a/log/modified_beam_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model-2023-04-04-09-26-31 b/log/modified_beam_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model-2023-04-04-09-26-31 new file mode 100644 index 0000000000000000000000000000000000000000..3455f937e0ec5ace8dfa018ff06ee6783c495c54 --- /dev/null +++ b/log/modified_beam_search/log-decode-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model-2023-04-04-09-26-31 @@ -0,0 +1,35 @@ +2023-04-04 09:26:31,976 INFO [decode.py:649] Decoding started +2023-04-04 09:26:31,976 INFO [decode.py:655] Device: cuda:0 +2023-04-04 09:26:31,978 INFO [decode.py:665] {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.3', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1c9950559223ec24d187f56bc424c3b43904bed3', 'k2-git-date': 'Thu Jan 26 22:00:26 2023', 'lhotse-version': '1.13.0.dev+git.ca98c73.dirty', 'torch-version': '2.0.0+cu117', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'surt', 'icefall-git-sha1': '51e6a8a-dirty', 'icefall-git-date': 'Fri Mar 17 11:23:13 2023', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r7n04', 'IP address': '10.1.7.4'}, 'epoch': 30, 'iter': 0, 'avg': 9, 'use_averaged_model': True, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'lang_dir': PosixPath('data/lang_bpe_500'), 'decoding_method': 'modified_beam_search', 'beam_size': 4, 'beam': 20.0, 'ngram_lm_scale': 0.01, 'max_contexts': 4, 'max_states': 8, 'context_size': 2, 'max_sym_per_frame': 1, 'num_paths': 200, 'nbest_scale': 0.5, 'num_encoder_layers': '2,2,2,2,2', 'feedforward_dims': '768,768,768,768,768', 'nhead': '8,8,8,8,8', 'encoder_dims': '256,256,256,256,256', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '192,192,192,192,192', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'short_chunk_size': 50, 'num_left_chunks': 4, 'decode_chunk_len': 32, 'full_libri': True, 'manifest_dir': PosixPath('data/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'res_dir': PosixPath('pruned_transducer_stateless7_streaming/exp/v2/modified_beam_search'), 'suffix': 'epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model', 'blank_id': 0, 'unk_id': 2, 'vocab_size': 500} +2023-04-04 09:26:31,979 INFO [decode.py:667] About to create model +2023-04-04 09:26:32,322 INFO [zipformer.py:405] At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-04-04 09:26:32,330 INFO [decode.py:738] Calculating the averaged model over epoch range from 21 (excluded) to 30 +2023-04-04 09:26:34,928 INFO [decode.py:772] Number of model parameters: 20697573 +2023-04-04 09:26:34,928 INFO [asr_datamodule.py:454] About to get test-clean cuts +2023-04-04 09:26:34,931 INFO [asr_datamodule.py:461] About to get test-other cuts +2023-04-04 09:26:42,421 INFO [decode.py:560] batch 0/?, cuts processed until now is 36 +2023-04-04 09:28:20,169 INFO [decode.py:560] batch 20/?, cuts processed until now is 1038 +2023-04-04 09:29:48,445 INFO [decode.py:560] batch 40/?, cuts processed until now is 2296 +2023-04-04 09:30:18,365 INFO [decode.py:574] The transcripts are stored in pruned_transducer_stateless7_streaming/exp/v2/modified_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt +2023-04-04 09:30:18,575 INFO [utils.py:560] [test-clean-beam_size_4] %WER 3.41% [1792 / 52576, 213 ins, 129 del, 1450 sub ] +2023-04-04 09:30:18,740 INFO [decode.py:585] Wrote detailed error stats to pruned_transducer_stateless7_streaming/exp/v2/modified_beam_search/errs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt +2023-04-04 09:30:18,741 INFO [decode.py:599] +For test-clean, WER of different settings are: +beam_size_4 3.41 best for test-clean + +2023-04-04 09:30:24,057 INFO [decode.py:560] batch 0/?, cuts processed until now is 43 +2023-04-04 09:31:28,835 INFO [zipformer.py:2441] attn_weights_entropy = tensor([1.5981, 1.5829, 1.6027, 1.3932, 1.3421, 1.3843, 0.4039, 0.7473], + device='cuda:0'), covar=tensor([0.0687, 0.0691, 0.0417, 0.0690, 0.1242, 0.0840, 0.1421, 0.1122], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0356, 0.0360, 0.0384, 0.0463, 0.0389, 0.0338, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-04 09:31:56,538 INFO [decode.py:560] batch 20/?, cuts processed until now is 1198 +2023-04-04 09:33:26,353 INFO [decode.py:560] batch 40/?, cuts processed until now is 2642 +2023-04-04 09:33:50,079 INFO [decode.py:574] The transcripts are stored in pruned_transducer_stateless7_streaming/exp/v2/modified_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt +2023-04-04 09:33:50,162 INFO [utils.py:560] [test-other-beam_size_4] %WER 8.94% [4681 / 52343, 512 ins, 424 del, 3745 sub ] +2023-04-04 09:33:50,334 INFO [decode.py:585] Wrote detailed error stats to pruned_transducer_stateless7_streaming/exp/v2/modified_beam_search/errs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt +2023-04-04 09:33:50,334 INFO [decode.py:599] +For test-other, WER of different settings are: +beam_size_4 8.94 best for test-other + +2023-04-04 09:33:50,335 INFO [decode.py:803] Done! diff --git a/log/modified_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt b/log/modified_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f9f3675c6d5c3e720c01d0241e886cf0f48eb4f --- /dev/null +++ b/log/modified_beam_search/recogs-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt @@ -0,0 +1,5240 @@ +1089-134686-0000-1733: ref=['HE', 'HOPED', 'THERE', 'WOULD', 'BE', 'STEW', 'FOR', 'DINNER', 'TURNIPS', 'AND', 'CARROTS', 'AND', 'BRUISED', 'POTATOES', 'AND', 'FAT', 'MUTTON', 'PIECES', 'TO', 'BE', 'LADLED', 'OUT', 'IN', 'THICK', 'PEPPERED', 'FLOUR', 'FATTENED', 'SAUCE'] +1089-134686-0000-1733: hyp=['HE', 'HOPED', 'THERE', 'WOULD', 'BE', 'STEW', 'FOR', 'DINNER', 'TURNIPS', 'AND', 'CARROTS', 'AND', 'BRUISED', 'POTATOES', 'AND', 'FAT', 'MUTTON', 'PIECES', 'TO', 'BE', 'LADLED', 'OUT', 'IN', 'THICK', 'PEPPERED', 'FLOWER', 'FATTENED', 'SAUCE'] +1089-134686-0001-1734: ref=['STUFF', 'IT', 'INTO', 'YOU', 'HIS', 'BELLY', 'COUNSELLED', 'HIM'] +1089-134686-0001-1734: hyp=['STUFF', 'IT', 'INTO', 'YOU', 'HIS', 'BELLY', 'COUNSELLED', 'HIM'] +1089-134686-0002-1735: ref=['AFTER', 'EARLY', 'NIGHTFALL', 'THE', 'YELLOW', 'LAMPS', 'WOULD', 'LIGHT', 'UP', 'HERE', 'AND', 'THERE', 'THE', 'SQUALID', 'QUARTER', 'OF', 'THE', 'BROTHELS'] +1089-134686-0002-1735: hyp=['AFTER', 'EARLY', 'NIGHT', 'FALL', 'THE', 'YELLOW', 'LAMPS', 'WOULD', 'LIGHT', 'UP', 'HERE', 'AND', 'THERE', 'THE', 'SQUALID', 'QUARTER', 'OF', 'THE', 'BROTHELS'] +1089-134686-0003-1736: ref=['HELLO', 'BERTIE', 'ANY', 'GOOD', 'IN', 'YOUR', 'MIND'] +1089-134686-0003-1736: hyp=['HELLO', 'BERTIE', 'ANY', 'GOOD', 'IN', 'YOUR', 'MIND'] +1089-134686-0004-1737: ref=['NUMBER', 'TEN', 'FRESH', 'NELLY', 'IS', 'WAITING', 'ON', 'YOU', 'GOOD', 'NIGHT', 'HUSBAND'] +1089-134686-0004-1737: hyp=['NUMBER', 'TEN', 'FRESH', 'NELLIE', 'IS', 'WAITING', 'ON', 'YOU', 'GOOD', 'NIGHT', 'HUSBAND'] +1089-134686-0005-1738: ref=['THE', 'MUSIC', 'CAME', 'NEARER', 'AND', 'HE', 'RECALLED', 'THE', 'WORDS', 'THE', 'WORDS', 'OF', "SHELLEY'S", 'FRAGMENT', 'UPON', 'THE', 'MOON', 'WANDERING', 'COMPANIONLESS', 'PALE', 'FOR', 'WEARINESS'] +1089-134686-0005-1738: hyp=['THE', 'MUSIC', 'CAME', 'NEARER', 'AND', 'HE', 'RECALLED', 'THE', 'WORDS', 'THE', 'WORDS', 'OF', "SHELLEY'S", 'FRAGMENT', 'UPON', 'THE', 'MOON', 'WANDERING', 'COMPANIONLESS', 'PALE', 'FOR', 'WEARINESS'] +1089-134686-0006-1739: ref=['THE', 'DULL', 'LIGHT', 'FELL', 'MORE', 'FAINTLY', 'UPON', 'THE', 'PAGE', 'WHEREON', 'ANOTHER', 'EQUATION', 'BEGAN', 'TO', 'UNFOLD', 'ITSELF', 'SLOWLY', 'AND', 'TO', 'SPREAD', 'ABROAD', 'ITS', 'WIDENING', 'TAIL'] +1089-134686-0006-1739: hyp=['THE', 'DULL', 'LIGHT', 'FELL', 'MORE', 'FAINTLY', 'UPON', 'THE', 'PAGE', 'WHEREON', 'ANOTHER', 'EQUATION', 'BEGAN', 'TO', 'UNFOLD', 'ITSELF', 'SLOWLY', 'AND', 'TO', 'SPREAD', 'ABROAD', 'ITS', 'WIDENING', 'TAIL'] +1089-134686-0007-1740: ref=['A', 'COLD', 'LUCID', 'INDIFFERENCE', 'REIGNED', 'IN', 'HIS', 'SOUL'] +1089-134686-0007-1740: hyp=['A', 'COLD', 'LUCID', 'INDIFFERENCE', 'REIGNED', 'IN', 'HIS', 'SOUL'] +1089-134686-0008-1741: ref=['THE', 'CHAOS', 'IN', 'WHICH', 'HIS', 'ARDOUR', 'EXTINGUISHED', 'ITSELF', 'WAS', 'A', 'COLD', 'INDIFFERENT', 'KNOWLEDGE', 'OF', 'HIMSELF'] +1089-134686-0008-1741: hyp=['THE', 'CHAOS', 'IN', 'WHICH', 'HIS', 'ARDOR', 'EXTINGUISHED', 'ITSELF', 'WAS', 'A', 'COLD', 'INDIFFERENT', 'KNOWLEDGE', 'OF', 'HIMSELF'] +1089-134686-0009-1742: ref=['AT', 'MOST', 'BY', 'AN', 'ALMS', 'GIVEN', 'TO', 'A', 'BEGGAR', 'WHOSE', 'BLESSING', 'HE', 'FLED', 'FROM', 'HE', 'MIGHT', 'HOPE', 'WEARILY', 'TO', 'WIN', 'FOR', 'HIMSELF', 'SOME', 'MEASURE', 'OF', 'ACTUAL', 'GRACE'] +1089-134686-0009-1742: hyp=['AT', 'MOST', 'BY', 'AN', 'ALMS', 'GIVEN', 'TO', 'A', 'BEGGAR', 'WHOSE', 'BLESSING', 'HE', 'FLED', 'FROM', 'HE', 'MIGHT', 'HOPE', 'WEARILY', 'TO', 'WIN', 'FOR', 'HIMSELF', 'SOME', 'MEASURE', 'OF', 'ACTUAL', 'GRACE'] +1089-134686-0010-1743: ref=['WELL', 'NOW', 'ENNIS', 'I', 'DECLARE', 'YOU', 'HAVE', 'A', 'HEAD', 'AND', 'SO', 'HAS', 'MY', 'STICK'] +1089-134686-0010-1743: hyp=['WELL', 'NOW', 'ENNIS', 'I', 'DECLARE', 'YOU', 'HAVE', 'A', 'HEAD', 'AND', 'SO', 'HAS', 'MY', 'STICK'] +1089-134686-0011-1744: ref=['ON', 'SATURDAY', 'MORNINGS', 'WHEN', 'THE', 'SODALITY', 'MET', 'IN', 'THE', 'CHAPEL', 'TO', 'RECITE', 'THE', 'LITTLE', 'OFFICE', 'HIS', 'PLACE', 'WAS', 'A', 'CUSHIONED', 'KNEELING', 'DESK', 'AT', 'THE', 'RIGHT', 'OF', 'THE', 'ALTAR', 'FROM', 'WHICH', 'HE', 'LED', 'HIS', 'WING', 'OF', 'BOYS', 'THROUGH', 'THE', 'RESPONSES'] +1089-134686-0011-1744: hyp=['ON', 'SATURDAY', 'MORNINGS', 'WHEN', 'THE', 'SODELITY', 'MET', 'IN', 'THE', 'CHAPEL', 'TO', 'RECITE', 'THE', 'LITTLE', 'OFFICE', 'HIS', 'PLACE', 'WAS', 'A', 'CUSHIONED', 'KNEELING', 'DESK', 'AT', 'THE', 'RIGHT', 'OF', 'THE', 'ALTAR', 'FROM', 'WHICH', 'HE', 'LED', 'HIS', 'WING', 'OF', 'BOYS', 'THROUGH', 'THE', 'RESPONSES'] +1089-134686-0012-1745: ref=['HER', 'EYES', 'SEEMED', 'TO', 'REGARD', 'HIM', 'WITH', 'MILD', 'PITY', 'HER', 'HOLINESS', 'A', 'STRANGE', 'LIGHT', 'GLOWING', 'FAINTLY', 'UPON', 'HER', 'FRAIL', 'FLESH', 'DID', 'NOT', 'HUMILIATE', 'THE', 'SINNER', 'WHO', 'APPROACHED', 'HER'] +1089-134686-0012-1745: hyp=['HER', 'EYES', 'SEEMED', 'TO', 'REGARD', 'HIM', 'WITH', 'MILD', 'PITY', 'HER', 'HOLINESS', 'A', 'STRANGE', 'LIGHT', 'GLOWING', 'FAINTLY', 'UPON', 'HER', 'FRAIL', 'FLESH', 'DID', 'NOT', 'HUMILIATE', 'THE', 'SINNER', 'WHO', 'APPROACHED', 'HER'] +1089-134686-0013-1746: ref=['IF', 'EVER', 'HE', 'WAS', 'IMPELLED', 'TO', 'CAST', 'SIN', 'FROM', 'HIM', 'AND', 'TO', 'REPENT', 'THE', 'IMPULSE', 'THAT', 'MOVED', 'HIM', 'WAS', 'THE', 'WISH', 'TO', 'BE', 'HER', 'KNIGHT'] +1089-134686-0013-1746: hyp=['IF', 'EVER', 'HE', 'WAS', 'IMPELLED', 'TO', 'CAST', 'SIN', 'FROM', 'HIM', 'AND', 'TO', 'REPENT', 'THE', 'IMPULSE', 'THAT', 'MOVED', 'HIM', 'WAS', 'THE', 'WISH', 'TO', 'BE', 'HER', 'KNIGHT'] +1089-134686-0014-1747: ref=['HE', 'TRIED', 'TO', 'THINK', 'HOW', 'IT', 'COULD', 'BE'] +1089-134686-0014-1747: hyp=['HE', 'TRIED', 'TO', 'THINK', 'HOW', 'IT', 'COULD', 'BE'] +1089-134686-0015-1748: ref=['BUT', 'THE', 'DUSK', 'DEEPENING', 'IN', 'THE', 'SCHOOLROOM', 'COVERED', 'OVER', 'HIS', 'THOUGHTS', 'THE', 'BELL', 'RANG'] +1089-134686-0015-1748: hyp=['BUT', 'THE', 'DUSK', 'DEEPENING', 'IN', 'THE', 'SCHOOLROOM', 'COVERED', 'OVER', 'HIS', 'THOUGHTS', 'THE', 'BELL', 'RANG'] +1089-134686-0016-1749: ref=['THEN', 'YOU', 'CAN', 'ASK', 'HIM', 'QUESTIONS', 'ON', 'THE', 'CATECHISM', 'DEDALUS'] +1089-134686-0016-1749: hyp=['THEN', 'YOU', 'CAN', 'ASK', 'HIM', 'QUESTIONS', 'ON', 'THE', 'CATECHISM', 'DAEDALUS'] +1089-134686-0017-1750: ref=['STEPHEN', 'LEANING', 'BACK', 'AND', 'DRAWING', 'IDLY', 'ON', 'HIS', 'SCRIBBLER', 'LISTENED', 'TO', 'THE', 'TALK', 'ABOUT', 'HIM', 'WHICH', 'HERON', 'CHECKED', 'FROM', 'TIME', 'TO', 'TIME', 'BY', 'SAYING'] +1089-134686-0017-1750: hyp=['STEPHEN', 'LEANING', 'BACK', 'AND', 'DRAWING', 'IDLY', 'ON', 'HIS', 'SCRIBBLER', 'LISTENED', 'TO', 'THE', 'TALK', 'ABOUT', 'HIM', 'WHICH', 'HERON', 'CHECKED', 'FROM', 'TIME', 'TO', 'TIME', 'BY', 'SAYING'] +1089-134686-0018-1751: ref=['IT', 'WAS', 'STRANGE', 'TOO', 'THAT', 'HE', 'FOUND', 'AN', 'ARID', 'PLEASURE', 'IN', 'FOLLOWING', 'UP', 'TO', 'THE', 'END', 'THE', 'RIGID', 'LINES', 'OF', 'THE', 'DOCTRINES', 'OF', 'THE', 'CHURCH', 'AND', 'PENETRATING', 'INTO', 'OBSCURE', 'SILENCES', 'ONLY', 'TO', 'HEAR', 'AND', 'FEEL', 'THE', 'MORE', 'DEEPLY', 'HIS', 'OWN', 'CONDEMNATION'] +1089-134686-0018-1751: hyp=['IT', 'WAS', 'STRANGE', 'TOO', 'THAT', 'HE', 'FOUND', 'AN', 'ARID', 'PLEASURE', 'IN', 'FOLLOWING', 'UP', 'TO', 'THE', 'END', 'THE', 'RIGID', 'LINES', 'OF', 'THE', 'DOCTRINES', 'OF', 'THE', 'CHURCH', 'AND', 'PENETRATING', 'INTO', 'OBSCURE', 'SILENCES', 'ONLY', 'TO', 'HEAR', 'AND', 'FEEL', 'THE', 'MORE', 'DEEPLY', 'HIS', 'OWN', 'CONDEMNATION'] +1089-134686-0019-1752: ref=['THE', 'SENTENCE', 'OF', 'SAINT', 'JAMES', 'WHICH', 'SAYS', 'THAT', 'HE', 'WHO', 'OFFENDS', 'AGAINST', 'ONE', 'COMMANDMENT', 'BECOMES', 'GUILTY', 'OF', 'ALL', 'HAD', 'SEEMED', 'TO', 'HIM', 'FIRST', 'A', 'SWOLLEN', 'PHRASE', 'UNTIL', 'HE', 'HAD', 'BEGUN', 'TO', 'GROPE', 'IN', 'THE', 'DARKNESS', 'OF', 'HIS', 'OWN', 'STATE'] +1089-134686-0019-1752: hyp=['THE', 'SENTENCE', 'OF', 'SAINT', 'JAMES', 'WHICH', 'SAYS', 'THAT', 'HE', 'WHO', 'OFFENDS', 'AGAINST', 'ONE', 'COMMANDMENT', 'BECOMES', 'GUILTY', 'OF', 'ALL', 'HAD', 'SEEMED', 'TO', 'HIM', 'FIRST', 'A', 'SWOLLEN', 'PHRASE', 'UNTIL', 'HE', 'HAD', 'BEGUN', 'TO', 'GROPE', 'IN', 'THE', 'DARKNESS', 'OF', 'HIS', 'OWN', 'STATE'] +1089-134686-0020-1753: ref=['IF', 'A', 'MAN', 'HAD', 'STOLEN', 'A', 'POUND', 'IN', 'HIS', 'YOUTH', 'AND', 'HAD', 'USED', 'THAT', 'POUND', 'TO', 'AMASS', 'A', 'HUGE', 'FORTUNE', 'HOW', 'MUCH', 'WAS', 'HE', 'OBLIGED', 'TO', 'GIVE', 'BACK', 'THE', 'POUND', 'HE', 'HAD', 'STOLEN', 'ONLY', 'OR', 'THE', 'POUND', 'TOGETHER', 'WITH', 'THE', 'COMPOUND', 'INTEREST', 'ACCRUING', 'UPON', 'IT', 'OR', 'ALL', 'HIS', 'HUGE', 'FORTUNE'] +1089-134686-0020-1753: hyp=['IF', 'A', 'MAN', 'HAD', 'STOLEN', 'A', 'POUND', 'IN', 'HIS', 'YOUTH', 'AND', 'HAD', 'USED', 'THAT', 'POUND', 'TO', 'A', 'MASS', 'A', 'HUGE', 'FORTUNE', 'HOW', 'MUCH', 'WAS', 'HE', 'OBLIGED', 'TO', 'GIVE', 'BACK', 'THE', 'POUND', 'HE', 'HAD', 'STOLEN', 'ONLY', 'OR', 'THE', 'POUND', 'TOGETHER', 'WITH', 'THE', 'COMPOUND', 'INTEREST', 'ACCRUING', 'UPON', 'IT', 'OR', 'ALL', 'HIS', 'HUGE', 'FORTUNE'] +1089-134686-0021-1754: ref=['IF', 'A', 'LAYMAN', 'IN', 'GIVING', 'BAPTISM', 'POUR', 'THE', 'WATER', 'BEFORE', 'SAYING', 'THE', 'WORDS', 'IS', 'THE', 'CHILD', 'BAPTIZED'] +1089-134686-0021-1754: hyp=['IF', 'A', 'LAYMAN', 'IN', 'GIVING', 'BAPTISM', 'POUR', 'THE', 'WATER', 'BEFORE', 'SAYING', 'THE', 'WORDS', 'IS', 'THE', 'CHILD', 'BAPTIZED'] +1089-134686-0022-1755: ref=['HOW', 'COMES', 'IT', 'THAT', 'WHILE', 'THE', 'FIRST', 'BEATITUDE', 'PROMISES', 'THE', 'KINGDOM', 'OF', 'HEAVEN', 'TO', 'THE', 'POOR', 'OF', 'HEART', 'THE', 'SECOND', 'BEATITUDE', 'PROMISES', 'ALSO', 'TO', 'THE', 'MEEK', 'THAT', 'THEY', 'SHALL', 'POSSESS', 'THE', 'LAND'] +1089-134686-0022-1755: hyp=['HOW', 'COMES', 'IT', 'THAT', 'WHILE', 'THE', 'FIRST', 'BEATITUDE', 'PROMISES', 'THE', 'KINGDOM', 'OF', 'HEAVEN', 'TO', 'THE', 'POOR', 'OF', 'HEART', 'THE', 'SECOND', 'BEATITUDE', 'PROMISES', 'ALSO', 'TO', 'THE', 'MEEK', 'THAT', 'THEY', 'SHALL', 'POSSESS', 'THE', 'LAND'] +1089-134686-0023-1756: ref=['WHY', 'WAS', 'THE', 'SACRAMENT', 'OF', 'THE', 'EUCHARIST', 'INSTITUTED', 'UNDER', 'THE', 'TWO', 'SPECIES', 'OF', 'BREAD', 'AND', 'WINE', 'IF', 'JESUS', 'CHRIST', 'BE', 'PRESENT', 'BODY', 'AND', 'BLOOD', 'SOUL', 'AND', 'DIVINITY', 'IN', 'THE', 'BREAD', 'ALONE', 'AND', 'IN', 'THE', 'WINE', 'ALONE'] +1089-134686-0023-1756: hyp=['WHY', 'WAS', 'THE', 'SACRAMENT', 'OF', 'THE', 'EUCHARIST', 'INSTITUTED', 'UNDER', 'THE', 'TWO', 'SPECIES', 'OF', 'BREAD', 'AND', 'WINE', 'IF', 'JESUS', 'CHRIST', 'BE', 'PRESENT', 'BODY', 'AND', 'BLOOD', 'SOUL', 'AND', 'DIVINITY', 'IN', 'THE', 'BREAD', 'ALONE', 'AND', 'IN', 'THE', 'WINE', 'ALONE'] +1089-134686-0024-1757: ref=['IF', 'THE', 'WINE', 'CHANGE', 'INTO', 'VINEGAR', 'AND', 'THE', 'HOST', 'CRUMBLE', 'INTO', 'CORRUPTION', 'AFTER', 'THEY', 'HAVE', 'BEEN', 'CONSECRATED', 'IS', 'JESUS', 'CHRIST', 'STILL', 'PRESENT', 'UNDER', 'THEIR', 'SPECIES', 'AS', 'GOD', 'AND', 'AS', 'MAN'] +1089-134686-0024-1757: hyp=['IF', 'THE', 'WINE', 'CHANGE', 'INTO', 'VINEGAR', 'AND', 'THE', 'HOST', 'CRUMBLE', 'INTO', 'CORRUPTION', 'AFTER', 'THEY', 'HAVE', 'BEEN', 'CONSECRATED', 'IS', 'JESUS', 'CHRIST', 'STILL', 'PRESENT', 'UNDER', 'THEIR', 'SPECIES', 'AS', 'GOD', 'AND', 'AS', 'MAN'] +1089-134686-0025-1758: ref=['A', 'GENTLE', 'KICK', 'FROM', 'THE', 'TALL', 'BOY', 'IN', 'THE', 'BENCH', 'BEHIND', 'URGED', 'STEPHEN', 'TO', 'ASK', 'A', 'DIFFICULT', 'QUESTION'] +1089-134686-0025-1758: hyp=['A', 'GENTLE', 'KICK', 'FROM', 'THE', 'TALL', 'BOY', 'IN', 'THE', 'BENCH', 'BEHIND', 'URGED', 'STEPHEN', 'TO', 'ASK', 'A', 'DIFFICULT', 'QUESTION'] +1089-134686-0026-1759: ref=['THE', 'RECTOR', 'DID', 'NOT', 'ASK', 'FOR', 'A', 'CATECHISM', 'TO', 'HEAR', 'THE', 'LESSON', 'FROM'] +1089-134686-0026-1759: hyp=['THE', 'RECTOR', 'DID', 'NOT', 'ASK', 'FOR', 'A', 'CATECHISM', 'TO', 'HEAR', 'THE', 'LESSON', 'FROM'] +1089-134686-0027-1760: ref=['HE', 'CLASPED', 'HIS', 'HANDS', 'ON', 'THE', 'DESK', 'AND', 'SAID'] +1089-134686-0027-1760: hyp=['HE', 'CLASPED', 'HIS', 'HANDS', 'ON', 'THE', 'DESK', 'AND', 'SAID'] +1089-134686-0028-1761: ref=['THE', 'RETREAT', 'WILL', 'BEGIN', 'ON', 'WEDNESDAY', 'AFTERNOON', 'IN', 'HONOUR', 'OF', 'SAINT', 'FRANCIS', 'XAVIER', 'WHOSE', 'FEAST', 'DAY', 'IS', 'SATURDAY'] +1089-134686-0028-1761: hyp=['THE', 'RETREAT', 'WILL', 'BEGIN', 'ON', 'WEDNESDAY', 'AFTERNOON', 'IN', 'HONOR', 'OF', 'SAINT', 'FRANCIS', 'ZAVIOUR', 'WHOSE', 'FEAST', 'DAY', 'IS', 'SATURDAY'] +1089-134686-0029-1762: ref=['ON', 'FRIDAY', 'CONFESSION', 'WILL', 'BE', 'HEARD', 'ALL', 'THE', 'AFTERNOON', 'AFTER', 'BEADS'] +1089-134686-0029-1762: hyp=['ON', 'FRIDAY', 'CONFESSION', 'WILL', 'BE', 'HEARD', 'ALL', 'THE', 'AFTERNOON', 'AFTER', 'BEADS'] +1089-134686-0030-1763: ref=['BEWARE', 'OF', 'MAKING', 'THAT', 'MISTAKE'] +1089-134686-0030-1763: hyp=['BEWARE', 'OF', 'MAKING', 'THAT', 'MISTAKE'] +1089-134686-0031-1764: ref=["STEPHEN'S", 'HEART', 'BEGAN', 'SLOWLY', 'TO', 'FOLD', 'AND', 'FADE', 'WITH', 'FEAR', 'LIKE', 'A', 'WITHERING', 'FLOWER'] +1089-134686-0031-1764: hyp=["STEPHEN'S", 'HEART', 'BEGAN', 'SLOWLY', 'TO', 'FOLD', 'AND', 'FADE', 'WITH', 'FEAR', 'LIKE', 'A', 'WITHERING', 'FLOWER'] +1089-134686-0032-1765: ref=['HE', 'IS', 'CALLED', 'AS', 'YOU', 'KNOW', 'THE', 'APOSTLE', 'OF', 'THE', 'INDIES'] +1089-134686-0032-1765: hyp=['HE', 'HAS', 'CALLED', 'AS', 'YOU', 'KNOW', 'THE', 'APOSTLE', 'OF', 'THE', 'INDIES'] +1089-134686-0033-1766: ref=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'XAVIER'] +1089-134686-0033-1766: hyp=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'ZAVIER'] +1089-134686-0034-1767: ref=['THE', 'RECTOR', 'PAUSED', 'AND', 'THEN', 'SHAKING', 'HIS', 'CLASPED', 'HANDS', 'BEFORE', 'HIM', 'WENT', 'ON'] +1089-134686-0034-1767: hyp=['THE', 'RECTOR', 'PAUSED', 'AND', 'THEN', 'SHAKING', 'HIS', 'CLASPED', 'HANDS', 'BEFORE', 'HIM', 'WENT', 'ON'] +1089-134686-0035-1768: ref=['HE', 'HAD', 'THE', 'FAITH', 'IN', 'HIM', 'THAT', 'MOVES', 'MOUNTAINS'] +1089-134686-0035-1768: hyp=['HE', 'HAD', 'THE', 'FAITH', 'IN', 'HIM', 'THAT', 'MOVES', 'MOUNTAINS'] +1089-134686-0036-1769: ref=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'XAVIER'] +1089-134686-0036-1769: hyp=['A', 'GREAT', 'SAINT', 'SAINT', 'FRANCIS', 'ZEVIER'] +1089-134686-0037-1770: ref=['IN', 'THE', 'SILENCE', 'THEIR', 'DARK', 'FIRE', 'KINDLED', 'THE', 'DUSK', 'INTO', 'A', 'TAWNY', 'GLOW'] +1089-134686-0037-1770: hyp=['IN', 'THE', 'SILENCE', 'THEIR', 'DARK', 'FIRE', 'KINDLED', 'THE', 'DUSK', 'INTO', 'A', 'TAWNY', 'GLOW'] +1089-134691-0000-1707: ref=['HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0000-1707: hyp=['HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0001-1708: ref=['FOR', 'A', 'FULL', 'HOUR', 'HE', 'HAD', 'PACED', 'UP', 'AND', 'DOWN', 'WAITING', 'BUT', 'HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0001-1708: hyp=['FOR', 'A', 'FULL', 'HOUR', 'HE', 'HAD', 'PACED', 'UP', 'AND', 'DOWN', 'WAITING', 'BUT', 'HE', 'COULD', 'WAIT', 'NO', 'LONGER'] +1089-134691-0002-1709: ref=['HE', 'SET', 'OFF', 'ABRUPTLY', 'FOR', 'THE', 'BULL', 'WALKING', 'RAPIDLY', 'LEST', 'HIS', "FATHER'S", 'SHRILL', 'WHISTLE', 'MIGHT', 'CALL', 'HIM', 'BACK', 'AND', 'IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HAD', 'ROUNDED', 'THE', 'CURVE', 'AT', 'THE', 'POLICE', 'BARRACK', 'AND', 'WAS', 'SAFE'] +1089-134691-0002-1709: hyp=['HE', 'SET', 'OFF', 'ABRUPTLY', 'FOR', 'THE', 'BULL', 'WALKING', 'RAPIDLY', 'LEST', 'HIS', "FATHER'S", 'SHRILL', 'WHISTLE', 'MIGHT', 'CALL', 'HIM', 'BACK', 'AND', 'IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HAD', 'ROUNDED', 'THE', 'CURVE', 'AT', 'THE', 'POLICE', 'BARRACK', 'AND', 'WAS', 'SAFE'] +1089-134691-0003-1710: ref=['THE', 'UNIVERSITY'] +1089-134691-0003-1710: hyp=['THE', 'UNIVERSITY'] +1089-134691-0004-1711: ref=['PRIDE', 'AFTER', 'SATISFACTION', 'UPLIFTED', 'HIM', 'LIKE', 'LONG', 'SLOW', 'WAVES'] +1089-134691-0004-1711: hyp=['PRIDE', 'AFTER', 'SATISFACTION', 'UPLIFTED', 'HIM', 'LIKE', 'LONG', 'SLOW', 'WAVES'] +1089-134691-0005-1712: ref=['WHOSE', 'FEET', 'ARE', 'AS', 'THE', 'FEET', 'OF', 'HARTS', 'AND', 'UNDERNEATH', 'THE', 'EVERLASTING', 'ARMS'] +1089-134691-0005-1712: hyp=['WHOSE', 'FEET', 'ARE', 'AS', 'THE', 'FEET', 'OF', 'HEARTS', 'AND', 'UNDERNEATH', 'THE', 'EVERLASTING', 'ARMS'] +1089-134691-0006-1713: ref=['THE', 'PRIDE', 'OF', 'THAT', 'DIM', 'IMAGE', 'BROUGHT', 'BACK', 'TO', 'HIS', 'MIND', 'THE', 'DIGNITY', 'OF', 'THE', 'OFFICE', 'HE', 'HAD', 'REFUSED'] +1089-134691-0006-1713: hyp=['THE', 'PRIDE', 'OF', 'THAT', 'DIM', 'IMAGE', 'BROUGHT', 'BACK', 'TO', 'HIS', 'MIND', 'THE', 'DIGNITY', 'OF', 'THE', 'OFFICE', 'HE', 'HAD', 'REFUSED'] +1089-134691-0007-1714: ref=['SOON', 'THE', 'WHOLE', 'BRIDGE', 'WAS', 'TREMBLING', 'AND', 'RESOUNDING'] +1089-134691-0007-1714: hyp=['SOON', 'THE', 'WHOLE', 'BRIDGE', 'WAS', 'TREMBLING', 'AND', 'RESOUNDING'] +1089-134691-0008-1715: ref=['THE', 'UNCOUTH', 'FACES', 'PASSED', 'HIM', 'TWO', 'BY', 'TWO', 'STAINED', 'YELLOW', 'OR', 'RED', 'OR', 'LIVID', 'BY', 'THE', 'SEA', 'AND', 'AS', 'HE', 'STROVE', 'TO', 'LOOK', 'AT', 'THEM', 'WITH', 'EASE', 'AND', 'INDIFFERENCE', 'A', 'FAINT', 'STAIN', 'OF', 'PERSONAL', 'SHAME', 'AND', 'COMMISERATION', 'ROSE', 'TO', 'HIS', 'OWN', 'FACE'] +1089-134691-0008-1715: hyp=['THE', 'UNCOUTH', 'FACES', 'PASSED', 'HIM', 'TWO', 'BY', 'TWO', 'STAINED', 'YELLOW', 'OR', 'RED', 'OR', 'LIVID', 'BY', 'THE', 'SEA', 'AND', 'AS', 'HE', 'STROVE', 'TO', 'LOOK', 'AT', 'THEM', 'WITH', 'EASE', 'AND', 'INDIFFERENCE', 'A', 'FAINT', 'STAIN', 'OF', 'PERSONAL', 'SHAME', 'AND', 'COMMISERATION', 'ROSE', 'TO', 'HIS', 'OWN', 'FACE'] +1089-134691-0009-1716: ref=['ANGRY', 'WITH', 'HIMSELF', 'HE', 'TRIED', 'TO', 'HIDE', 'HIS', 'FACE', 'FROM', 'THEIR', 'EYES', 'BY', 'GAZING', 'DOWN', 'SIDEWAYS', 'INTO', 'THE', 'SHALLOW', 'SWIRLING', 'WATER', 'UNDER', 'THE', 'BRIDGE', 'BUT', 'HE', 'STILL', 'SAW', 'A', 'REFLECTION', 'THEREIN', 'OF', 'THEIR', 'TOP', 'HEAVY', 'SILK', 'HATS', 'AND', 'HUMBLE', 'TAPE', 'LIKE', 'COLLARS', 'AND', 'LOOSELY', 'HANGING', 'CLERICAL', 'CLOTHES', 'BROTHER', 'HICKEY'] +1089-134691-0009-1716: hyp=['ANGRY', 'WITH', 'HIMSELF', 'HE', 'TRIED', 'TO', 'HIDE', 'HIS', 'FACE', 'FROM', 'THEIR', 'EYES', 'BY', 'GAZING', 'DOWN', 'SIDEWAYS', 'INTO', 'THE', 'SHALLOW', 'SWIRLING', 'WATER', 'UNDER', 'THE', 'BRIDGE', 'BUT', 'HE', 'STILL', 'SAW', 'A', 'REFLECTION', 'THEREIN', 'OF', 'THEIR', 'TOP', 'HEAVY', 'SILK', 'HATS', 'AND', 'HUMBLE', 'TAPE', 'LIKE', 'COLLARS', 'AND', 'LOOSELY', 'HANGING', 'CLERICAL', 'CLOTHES', 'BROTHER', 'HICKEY'] +1089-134691-0010-1717: ref=['BROTHER', 'MAC', 'ARDLE', 'BROTHER', 'KEOGH'] +1089-134691-0010-1717: hyp=['BROTHER', 'MICARTLE', 'BROTHER', 'KIEV'] +1089-134691-0011-1718: ref=['THEIR', 'PIETY', 'WOULD', 'BE', 'LIKE', 'THEIR', 'NAMES', 'LIKE', 'THEIR', 'FACES', 'LIKE', 'THEIR', 'CLOTHES', 'AND', 'IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'THEIR', 'HUMBLE', 'AND', 'CONTRITE', 'HEARTS', 'IT', 'MIGHT', 'BE', 'PAID', 'A', 'FAR', 'RICHER', 'TRIBUTE', 'OF', 'DEVOTION', 'THAN', 'HIS', 'HAD', 'EVER', 'BEEN', 'A', 'GIFT', 'TENFOLD', 'MORE', 'ACCEPTABLE', 'THAN', 'HIS', 'ELABORATE', 'ADORATION'] +1089-134691-0011-1718: hyp=['THEIR', 'PIETY', 'WOULD', 'BE', 'LIKE', 'THEIR', 'NAMES', 'LIKE', 'THEIR', 'FACES', 'LIKE', 'THEIR', 'CLOTHES', 'AND', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'THEIR', 'HUMBLE', 'AND', 'CONTRITE', 'HEARTS', 'IT', 'MIGHT', 'BE', 'PAID', 'A', 'FAR', 'RICHER', 'TRIBUTE', 'OF', 'DEVOTION', 'THAN', 'HIS', 'HAD', 'EVER', 'BEEN', 'A', 'GIFT', 'TENFOLD', 'MORE', 'ACCEPTABLE', 'THAN', 'HIS', 'ELABORATE', 'ADORATION'] +1089-134691-0012-1719: ref=['IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'MOVE', 'HIMSELF', 'TO', 'BE', 'GENEROUS', 'TOWARDS', 'THEM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'IF', 'HE', 'EVER', 'CAME', 'TO', 'THEIR', 'GATES', 'STRIPPED', 'OF', 'HIS', 'PRIDE', 'BEATEN', 'AND', 'IN', "BEGGAR'S", 'WEEDS', 'THAT', 'THEY', 'WOULD', 'BE', 'GENEROUS', 'TOWARDS', 'HIM', 'LOVING', 'HIM', 'AS', 'THEMSELVES'] +1089-134691-0012-1719: hyp=['IT', 'WAS', 'IDLE', 'FOR', 'HIM', 'TO', 'MOVE', 'HIMSELF', 'TO', 'BE', 'GENEROUS', 'TOWARDS', 'THEM', 'TO', 'TELL', 'HIMSELF', 'THAT', 'IF', 'HE', 'EVER', 'CAME', 'TO', 'THEIR', 'GATES', 'STRIPPED', 'OF', 'HIS', 'PRIDE', 'BEATEN', 'AND', 'IN', "BEGGAR'S", 'WEEDS', 'THAT', 'THEY', 'WOULD', 'BE', 'GENEROUS', 'TOWARDS', 'HIM', 'LOVING', 'HIM', 'AS', 'THEMSELVES'] +1089-134691-0013-1720: ref=['IDLE', 'AND', 'EMBITTERING', 'FINALLY', 'TO', 'ARGUE', 'AGAINST', 'HIS', 'OWN', 'DISPASSIONATE', 'CERTITUDE', 'THAT', 'THE', 'COMMANDMENT', 'OF', 'LOVE', 'BADE', 'US', 'NOT', 'TO', 'LOVE', 'OUR', 'NEIGHBOUR', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'AMOUNT', 'AND', 'INTENSITY', 'OF', 'LOVE', 'BUT', 'TO', 'LOVE', 'HIM', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'KIND', 'OF', 'LOVE'] +1089-134691-0013-1720: hyp=['IDLE', 'AND', 'EMBITTERING', 'FINALLY', 'TO', 'ARGUE', 'AGAINST', 'HIS', 'OWN', 'DISPASSIONATE', 'CERTITUDE', 'THAT', 'THE', 'COMMANDMENT', 'OF', 'LOVE', 'BADE', 'US', 'NOT', 'TO', 'LOVE', 'OUR', 'NEIGHBOUR', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'AMOUNT', 'AND', 'INTENSITY', 'OF', 'LOVE', 'BUT', 'TO', 'LOVE', 'HIM', 'AS', 'OURSELVES', 'WITH', 'THE', 'SAME', 'KIND', 'OF', 'LOVE'] +1089-134691-0014-1721: ref=['THE', 'PHRASE', 'AND', 'THE', 'DAY', 'AND', 'THE', 'SCENE', 'HARMONIZED', 'IN', 'A', 'CHORD'] +1089-134691-0014-1721: hyp=['THE', 'PHRASE', 'AND', 'THE', 'DAY', 'AND', 'THE', 'SCENE', 'HARMONIZED', 'IN', 'ACCORD'] +1089-134691-0015-1722: ref=['WORDS', 'WAS', 'IT', 'THEIR', 'COLOURS'] +1089-134691-0015-1722: hyp=['WORDS', 'WAS', 'IT', 'THEIR', 'COLORS'] +1089-134691-0016-1723: ref=['THEY', 'WERE', 'VOYAGING', 'ACROSS', 'THE', 'DESERTS', 'OF', 'THE', 'SKY', 'A', 'HOST', 'OF', 'NOMADS', 'ON', 'THE', 'MARCH', 'VOYAGING', 'HIGH', 'OVER', 'IRELAND', 'WESTWARD', 'BOUND'] +1089-134691-0016-1723: hyp=['THEY', 'WERE', 'VOYAGING', 'ACROSS', 'THE', 'DESERTS', 'OF', 'THE', 'SKY', 'A', 'HOST', 'OF', 'NOMADS', 'ON', 'THE', 'MARCH', 'VOYAGING', 'HIGH', 'OVER', 'IRELAND', 'WESTWARD', 'BOUND'] +1089-134691-0017-1724: ref=['THE', 'EUROPE', 'THEY', 'HAD', 'COME', 'FROM', 'LAY', 'OUT', 'THERE', 'BEYOND', 'THE', 'IRISH', 'SEA', 'EUROPE', 'OF', 'STRANGE', 'TONGUES', 'AND', 'VALLEYED', 'AND', 'WOODBEGIRT', 'AND', 'CITADELLED', 'AND', 'OF', 'ENTRENCHED', 'AND', 'MARSHALLED', 'RACES'] +1089-134691-0017-1724: hyp=['THE', 'EUROPE', 'THEY', 'HAD', 'COME', 'FROM', 'LAY', 'OUT', 'THERE', 'BEYOND', 'THE', 'IRISH', 'SEA', 'EUROPE', 'OF', 'STRANGE', 'TONGUES', 'AND', 'VALLEYED', 'AND', 'WOOD', 'BEGIRT', 'AND', 'CITADELED', 'AND', 'OF', 'ENTRENCHED', 'AND', 'MARSHALLED', 'RACES'] +1089-134691-0018-1725: ref=['AGAIN', 'AGAIN'] +1089-134691-0018-1725: hyp=['AGAIN', 'AGAIN'] +1089-134691-0019-1726: ref=['A', 'VOICE', 'FROM', 'BEYOND', 'THE', 'WORLD', 'WAS', 'CALLING'] +1089-134691-0019-1726: hyp=['A', 'VOICE', 'FROM', 'BEYOND', 'THE', 'WORLD', 'WAS', 'CALLING'] +1089-134691-0020-1727: ref=['HELLO', 'STEPHANOS', 'HERE', 'COMES', 'THE', 'DEDALUS'] +1089-134691-0020-1727: hyp=['HELLO', 'STEPHANOS', 'HERE', 'COMES', 'THE', 'DAEDALUS'] +1089-134691-0021-1728: ref=['THEIR', 'DIVING', 'STONE', 'POISED', 'ON', 'ITS', 'RUDE', 'SUPPORTS', 'AND', 'ROCKING', 'UNDER', 'THEIR', 'PLUNGES', 'AND', 'THE', 'ROUGH', 'HEWN', 'STONES', 'OF', 'THE', 'SLOPING', 'BREAKWATER', 'OVER', 'WHICH', 'THEY', 'SCRAMBLED', 'IN', 'THEIR', 'HORSEPLAY', 'GLEAMED', 'WITH', 'COLD', 'WET', 'LUSTRE'] +1089-134691-0021-1728: hyp=['THEIR', 'DIVING', 'STONE', 'POISED', 'ON', 'ITS', 'RUDE', 'SUPPORTS', 'AND', 'ROCKING', 'UNDER', 'THEIR', 'PLUNGES', 'AND', 'THE', 'ROUGH', 'HEWN', 'STONES', 'OF', 'THE', 'SLOPING', 'BREAKWATER', 'OVER', 'WHICH', 'THEY', 'SCRAMBLED', 'IN', 'THEIR', 'HORSE', 'PLAY', 'GLEAMED', 'WITH', 'COLD', 'WET', 'LUSTRE'] +1089-134691-0022-1729: ref=['HE', 'STOOD', 'STILL', 'IN', 'DEFERENCE', 'TO', 'THEIR', 'CALLS', 'AND', 'PARRIED', 'THEIR', 'BANTER', 'WITH', 'EASY', 'WORDS'] +1089-134691-0022-1729: hyp=['HE', 'STOOD', 'STILL', 'IN', 'DEFERENCE', 'TO', 'THEIR', 'CALLS', 'AND', 'PARRIED', 'THEIR', 'BANTER', 'WITH', 'EASY', 'WORDS'] +1089-134691-0023-1730: ref=['IT', 'WAS', 'A', 'PAIN', 'TO', 'SEE', 'THEM', 'AND', 'A', 'SWORD', 'LIKE', 'PAIN', 'TO', 'SEE', 'THE', 'SIGNS', 'OF', 'ADOLESCENCE', 'THAT', 'MADE', 'REPELLENT', 'THEIR', 'PITIABLE', 'NAKEDNESS'] +1089-134691-0023-1730: hyp=['IT', 'WAS', 'A', 'PAIN', 'TO', 'SEE', 'THEM', 'AND', 'A', 'SWORD', 'LIKE', 'PAIN', 'TO', 'SEE', 'THE', 'SIGNS', 'OF', 'ADOLESCENCE', 'THAT', 'MADE', 'REPELLENT', 'THEIR', 'PITIABLE', 'NAKEDNESS'] +1089-134691-0024-1731: ref=['STEPHANOS', 'DEDALOS'] +1089-134691-0024-1731: hyp=['STEPHANOS', 'DE', 'LOSS'] +1089-134691-0025-1732: ref=['A', 'MOMENT', 'BEFORE', 'THE', 'GHOST', 'OF', 'THE', 'ANCIENT', 'KINGDOM', 'OF', 'THE', 'DANES', 'HAD', 'LOOKED', 'FORTH', 'THROUGH', 'THE', 'VESTURE', 'OF', 'THE', 'HAZEWRAPPED', 'CITY'] +1089-134691-0025-1732: hyp=['A', 'MOMENT', 'BEFORE', 'THE', 'GHOST', 'OF', 'THE', 'ANCIENT', 'KINGDOM', 'OF', 'THE', 'DANES', 'HAD', 'LOOKED', 'FORTH', 'THROUGH', 'THE', 'VESTURE', 'OF', 'THE', 'HAYES', 'WRAPPED', 'CITY'] +1188-133604-0000-1771: ref=['YOU', 'WILL', 'FIND', 'ME', 'CONTINUALLY', 'SPEAKING', 'OF', 'FOUR', 'MEN', 'TITIAN', 'HOLBEIN', 'TURNER', 'AND', 'TINTORET', 'IN', 'ALMOST', 'THE', 'SAME', 'TERMS'] +1188-133604-0000-1771: hyp=['YOU', 'WILL', 'FIND', 'ME', 'CONTINUALLY', 'SPEAKING', 'OF', 'FOUR', 'MEN', 'TITIAN', 'HOLBINE', 'TURNER', 'AND', 'TINTARETTE', 'IN', 'ALMOST', 'THE', 'SAME', 'TERMS'] +1188-133604-0001-1772: ref=['THEY', 'UNITE', 'EVERY', 'QUALITY', 'AND', 'SOMETIMES', 'YOU', 'WILL', 'FIND', 'ME', 'REFERRING', 'TO', 'THEM', 'AS', 'COLORISTS', 'SOMETIMES', 'AS', 'CHIAROSCURISTS'] +1188-133604-0001-1772: hyp=['THE', 'UNITE', 'EVERY', 'QUALITY', 'AND', 'SOMETIMES', 'YOU', 'WILL', 'FIND', 'ME', 'REFERRING', 'TO', 'THEM', 'AS', 'COLORISTS', 'SOMETIMES', 'AS', 'KIERRASCURISTS'] +1188-133604-0002-1773: ref=['BY', 'BEING', 'STUDIOUS', 'OF', 'COLOR', 'THEY', 'ARE', 'STUDIOUS', 'OF', 'DIVISION', 'AND', 'WHILE', 'THE', 'CHIAROSCURIST', 'DEVOTES', 'HIMSELF', 'TO', 'THE', 'REPRESENTATION', 'OF', 'DEGREES', 'OF', 'FORCE', 'IN', 'ONE', 'THING', 'UNSEPARATED', 'LIGHT', 'THE', 'COLORISTS', 'HAVE', 'FOR', 'THEIR', 'FUNCTION', 'THE', 'ATTAINMENT', 'OF', 'BEAUTY', 'BY', 'ARRANGEMENT', 'OF', 'THE', 'DIVISIONS', 'OF', 'LIGHT'] +1188-133604-0002-1773: hyp=['BY', 'BEING', 'STUDIOUS', 'OF', 'COLOUR', 'THEY', 'ARE', 'STUDIOUS', 'OF', 'DIVISION', 'AND', 'WHILE', 'THE', 'CUIRASCURISTS', 'DEVOTES', 'HIMSELF', 'TO', 'THE', 'REPRESENTATION', 'OF', 'DEGREES', 'OF', 'FORCE', 'IN', 'ONE', 'THING', 'UNSEPARATED', 'LIGHT', 'THE', 'COLORISTS', 'HAVE', 'FOR', 'THEIR', 'FUNCTION', 'THE', 'ATTAINMENT', 'OF', 'BEAUTY', 'BY', 'ARRANGEMENT', 'OF', 'THE', 'DIVISIONS', 'OF', 'LIGHT'] +1188-133604-0003-1774: ref=['MY', 'FIRST', 'AND', 'PRINCIPAL', 'REASON', 'WAS', 'THAT', 'THEY', 'ENFORCED', 'BEYOND', 'ALL', 'RESISTANCE', 'ON', 'ANY', 'STUDENT', 'WHO', 'MIGHT', 'ATTEMPT', 'TO', 'COPY', 'THEM', 'THIS', 'METHOD', 'OF', 'LAYING', 'PORTIONS', 'OF', 'DISTINCT', 'HUE', 'SIDE', 'BY', 'SIDE'] +1188-133604-0003-1774: hyp=['MY', 'FIRST', 'AND', 'PRINCIPAL', 'REASON', 'WAS', 'THAT', 'THEY', 'ENFORCED', 'BEYOND', 'ALL', 'RESISTANCE', 'ON', 'ANY', 'STUDENT', 'WHO', 'MIGHT', 'ATTEMPT', 'TO', 'COPY', 'THEM', 'THIS', 'METHOD', 'OF', 'LAYING', 'PORTIONS', 'OF', 'DISTINCT', 'HUE', 'SIDE', 'BY', 'SIDE'] +1188-133604-0004-1775: ref=['SOME', 'OF', 'THE', 'TOUCHES', 'INDEED', 'WHEN', 'THE', 'TINT', 'HAS', 'BEEN', 'MIXED', 'WITH', 'MUCH', 'WATER', 'HAVE', 'BEEN', 'LAID', 'IN', 'LITTLE', 'DROPS', 'OR', 'PONDS', 'SO', 'THAT', 'THE', 'PIGMENT', 'MIGHT', 'CRYSTALLIZE', 'HARD', 'AT', 'THE', 'EDGE'] +1188-133604-0004-1775: hyp=['SOME', 'OF', 'THE', 'TOUCHES', 'INDEED', 'WHEN', 'THE', 'TINT', 'HAS', 'BEEN', 'MIXED', 'WITH', 'MUCH', 'WATER', 'HAVE', 'BEEN', 'LAID', 'IN', 'LITTLE', 'DROPS', 'OR', 'PONDS', 'SO', 'THAT', 'THE', 'PIGMENT', 'MIGHT', 'CRYSTALLIZE', 'HARD', 'AT', 'THE', 'EDGE'] +1188-133604-0005-1776: ref=['IT', 'IS', 'THE', 'HEAD', 'OF', 'A', 'PARROT', 'WITH', 'A', 'LITTLE', 'FLOWER', 'IN', 'HIS', 'BEAK', 'FROM', 'A', 'PICTURE', 'OF', "CARPACCIO'S", 'ONE', 'OF', 'HIS', 'SERIES', 'OF', 'THE', 'LIFE', 'OF', 'SAINT', 'GEORGE'] +1188-133604-0005-1776: hyp=['IT', 'IS', 'THE', 'HEAD', 'OF', 'A', 'PARROT', 'WITH', 'A', 'LITTLE', 'FLOWER', 'IN', 'HIS', 'BEAK', 'FROM', 'A', 'PICTURE', 'OF', 'CARPATIUS', 'ONE', 'OF', 'HIS', 'SERIES', 'OF', 'THE', 'LIFE', 'OF', 'SAINT', 'GEORGE'] +1188-133604-0006-1777: ref=['THEN', 'HE', 'COMES', 'TO', 'THE', 'BEAK', 'OF', 'IT'] +1188-133604-0006-1777: hyp=['THEN', 'HE', 'COMES', 'TO', 'THE', 'BEAK', 'OF', 'IT'] +1188-133604-0007-1778: ref=['THE', 'BROWN', 'GROUND', 'BENEATH', 'IS', 'LEFT', 'FOR', 'THE', 'MOST', 'PART', 'ONE', 'TOUCH', 'OF', 'BLACK', 'IS', 'PUT', 'FOR', 'THE', 'HOLLOW', 'TWO', 'DELICATE', 'LINES', 'OF', 'DARK', 'GRAY', 'DEFINE', 'THE', 'OUTER', 'CURVE', 'AND', 'ONE', 'LITTLE', 'QUIVERING', 'TOUCH', 'OF', 'WHITE', 'DRAWS', 'THE', 'INNER', 'EDGE', 'OF', 'THE', 'MANDIBLE'] +1188-133604-0007-1778: hyp=['THE', 'BROWN', 'GROUND', 'BENEATH', 'IS', 'LEFT', 'FOR', 'THE', 'MOST', 'PART', 'ONE', 'TOUCH', 'OF', 'BLACK', 'IS', 'PUT', 'FOR', 'THE', 'HOLLOW', 'TOO', 'DELICATE', 'LINES', 'OF', 'DARK', 'GREY', 'DEFINE', 'THE', 'OUTER', 'CURVE', 'AND', 'ONE', 'LITTLE', 'QUIVERING', 'TOUCH', 'OF', 'WHITE', 'DRAWS', 'THE', 'INNER', 'EDGE', 'OF', 'THE', 'MANDIBLE'] +1188-133604-0008-1779: ref=['FOR', 'BELIEVE', 'ME', 'THE', 'FINAL', 'PHILOSOPHY', 'OF', 'ART', 'CAN', 'ONLY', 'RATIFY', 'THEIR', 'OPINION', 'THAT', 'THE', 'BEAUTY', 'OF', 'A', 'COCK', 'ROBIN', 'IS', 'TO', 'BE', 'RED', 'AND', 'OF', 'A', 'GRASS', 'PLOT', 'TO', 'BE', 'GREEN', 'AND', 'THE', 'BEST', 'SKILL', 'OF', 'ART', 'IS', 'IN', 'INSTANTLY', 'SEIZING', 'ON', 'THE', 'MANIFOLD', 'DELICIOUSNESS', 'OF', 'LIGHT', 'WHICH', 'YOU', 'CAN', 'ONLY', 'SEIZE', 'BY', 'PRECISION', 'OF', 'INSTANTANEOUS', 'TOUCH'] +1188-133604-0008-1779: hyp=['FOR', 'BELIEVE', 'ME', 'THE', 'FINAL', 'PHILOSOPHY', 'OF', 'ART', 'CAN', 'ONLY', 'RATIFY', 'THEIR', 'OPINION', 'THAT', 'THE', 'BEAUTY', 'OF', 'A', 'COCK', 'ROBIN', 'IS', 'TO', 'BE', 'READ', 'AND', 'OF', 'A', 'GRASS', 'PLOT', 'TO', 'BE', 'GREEN', 'AND', 'THE', 'BEST', 'SKILL', 'OF', 'ART', 'IS', 'AN', 'INSTANTLY', 'SEIZING', 'ON', 'THE', 'MANIFOLD', 'DELICIOUSNESS', 'OF', 'LIGHT', 'WHICH', 'YOU', 'CAN', 'ONLY', 'SEIZE', 'BY', 'PRECISION', 'OF', 'INSTANTANEOUS', 'TOUCH'] +1188-133604-0009-1780: ref=['NOW', 'YOU', 'WILL', 'SEE', 'IN', 'THESE', 'STUDIES', 'THAT', 'THE', 'MOMENT', 'THE', 'WHITE', 'IS', 'INCLOSED', 'PROPERLY', 'AND', 'HARMONIZED', 'WITH', 'THE', 'OTHER', 'HUES', 'IT', 'BECOMES', 'SOMEHOW', 'MORE', 'PRECIOUS', 'AND', 'PEARLY', 'THAN', 'THE', 'WHITE', 'PAPER', 'AND', 'THAT', 'I', 'AM', 'NOT', 'AFRAID', 'TO', 'LEAVE', 'A', 'WHOLE', 'FIELD', 'OF', 'UNTREATED', 'WHITE', 'PAPER', 'ALL', 'ROUND', 'IT', 'BEING', 'SURE', 'THAT', 'EVEN', 'THE', 'LITTLE', 'DIAMONDS', 'IN', 'THE', 'ROUND', 'WINDOW', 'WILL', 'TELL', 'AS', 'JEWELS', 'IF', 'THEY', 'ARE', 'GRADATED', 'JUSTLY'] +1188-133604-0009-1780: hyp=['NOW', 'YOU', 'WILL', 'SEE', 'IN', 'THESE', 'STUDIES', 'THAT', 'THE', 'MOMENT', 'THE', 'WHITE', 'IS', 'ENCLOSED', 'PROPERLY', 'AND', 'HARMONIZE', 'WITH', 'THE', 'OTHER', 'HUES', 'IT', 'BECOMES', 'SOMEHOW', 'MORE', 'PRECIOUS', 'AND', 'PEARLY', 'THAN', 'THE', 'WHITE', 'PAPER', 'AND', 'THAT', 'I', 'AM', 'NOT', 'AFRAID', 'TO', 'LEAVE', 'A', 'WHOLE', 'FIELD', 'OF', 'UNTREATED', 'WHITE', 'PAPER', 'ALL', 'ROUND', 'IT', 'BEING', 'SURE', 'THAT', 'EVEN', 'THE', 'LITTLE', 'DIAMONDS', 'IN', 'THE', 'ROUND', 'WINDOW', 'WILL', 'TELL', 'AS', 'JEWELS', 'IF', 'THEY', 'ARE', 'GRADATED', 'JUSTLY'] +1188-133604-0010-1781: ref=['BUT', 'IN', 'THIS', 'VIGNETTE', 'COPIED', 'FROM', 'TURNER', 'YOU', 'HAVE', 'THE', 'TWO', 'PRINCIPLES', 'BROUGHT', 'OUT', 'PERFECTLY'] +1188-133604-0010-1781: hyp=['BUT', 'IN', 'THIS', 'VINEYARD', 'COPIED', 'FROM', 'TURNER', 'YOU', 'HAVE', 'THE', 'TWO', 'PRINCIPLES', 'BROUGHT', 'OUT', 'PERFECTLY'] +1188-133604-0011-1782: ref=['THEY', 'ARE', 'BEYOND', 'ALL', 'OTHER', 'WORKS', 'THAT', 'I', 'KNOW', 'EXISTING', 'DEPENDENT', 'FOR', 'THEIR', 'EFFECT', 'ON', 'LOW', 'SUBDUED', 'TONES', 'THEIR', 'FAVORITE', 'CHOICE', 'IN', 'TIME', 'OF', 'DAY', 'BEING', 'EITHER', 'DAWN', 'OR', 'TWILIGHT', 'AND', 'EVEN', 'THEIR', 'BRIGHTEST', 'SUNSETS', 'PRODUCED', 'CHIEFLY', 'OUT', 'OF', 'GRAY', 'PAPER'] +1188-133604-0011-1782: hyp=['THEY', 'ARE', 'BEYOND', 'ALL', 'OTHER', 'WORKS', 'THAN', 'I', 'KNOW', 'EXISTING', 'DEPENDENT', 'FOR', 'THEIR', 'EFFECT', 'ON', 'LOW', 'SUBDUED', 'TONES', 'THEIR', 'FAVORITE', 'CHOICE', 'IN', 'TIME', 'OF', 'DAY', 'BEING', 'EITHER', 'DAWN', 'OR', 'TWILIGHT', 'AND', 'EVEN', 'THEIR', 'BRIGHTEST', 'SUNSETS', 'PRODUCED', 'CHIEFLY', 'OUT', 'OF', 'GRAY', 'PAPER'] +1188-133604-0012-1783: ref=['IT', 'MAY', 'BE', 'THAT', 'A', 'GREAT', 'COLORIST', 'WILL', 'USE', 'HIS', 'UTMOST', 'FORCE', 'OF', 'COLOR', 'AS', 'A', 'SINGER', 'HIS', 'FULL', 'POWER', 'OF', 'VOICE', 'BUT', 'LOUD', 'OR', 'LOW', 'THE', 'VIRTUE', 'IS', 'IN', 'BOTH', 'CASES', 'ALWAYS', 'IN', 'REFINEMENT', 'NEVER', 'IN', 'LOUDNESS'] +1188-133604-0012-1783: hyp=['IT', 'MAY', 'BE', 'THAT', 'A', 'GREAT', 'COLORLESS', 'WILL', 'USE', 'HIS', 'UTMOST', 'FORCE', 'OF', 'COLOR', 'AS', 'A', 'SINGER', 'HIS', 'FULL', 'POWER', 'OF', 'VOICE', 'BUT', 'LOUD', 'OR', 'LOW', 'THE', 'VIRTUE', 'IS', 'IN', 'BOTH', 'CASES', 'ALWAYS', 'IN', 'REFINEMENT', 'NEVER', 'IN', 'LOUDNESS'] +1188-133604-0013-1784: ref=['IT', 'MUST', 'REMEMBER', 'BE', 'ONE', 'OR', 'THE', 'OTHER'] +1188-133604-0013-1784: hyp=['IT', 'MUST', 'REMEMBER', 'BE', 'ONE', 'OR', 'THE', 'OTHER'] +1188-133604-0014-1785: ref=['DO', 'NOT', 'THEREFORE', 'THINK', 'THAT', 'THE', 'GOTHIC', 'SCHOOL', 'IS', 'AN', 'EASY', 'ONE'] +1188-133604-0014-1785: hyp=['DO', 'NOT', 'THEREFORE', 'THINK', 'THAT', 'THE', 'GOTHIC', 'SCHOOLS', 'AN', 'EASY', 'ONE'] +1188-133604-0015-1786: ref=['THE', 'LAW', 'OF', 'THAT', 'SCHOOL', 'IS', 'THAT', 'EVERYTHING', 'SHALL', 'BE', 'SEEN', 'CLEARLY', 'OR', 'AT', 'LEAST', 'ONLY', 'IN', 'SUCH', 'MIST', 'OR', 'FAINTNESS', 'AS', 'SHALL', 'BE', 'DELIGHTFUL', 'AND', 'I', 'HAVE', 'NO', 'DOUBT', 'THAT', 'THE', 'BEST', 'INTRODUCTION', 'TO', 'IT', 'WOULD', 'BE', 'THE', 'ELEMENTARY', 'PRACTICE', 'OF', 'PAINTING', 'EVERY', 'STUDY', 'ON', 'A', 'GOLDEN', 'GROUND'] +1188-133604-0015-1786: hyp=['THE', 'LAW', 'OF', 'THAT', 'SCHOOL', 'IS', 'THAT', 'EVERYTHING', 'SHALL', 'BE', 'SEEN', 'CLEARLY', 'OR', 'AT', 'LEAST', 'ONLY', 'IN', 'SUCH', 'MIST', 'OR', 'FAINTNESS', 'AS', 'SHALL', 'BE', 'DELIGHTFUL', 'AND', 'I', 'HAVE', 'NO', 'DOUBT', 'THAT', 'THE', 'BEST', 'INTRODUCTION', 'TO', 'IT', 'WOULD', 'BE', 'THE', 'ELEMENTARY', 'PRACTICE', 'OF', 'PAINTING', 'EVERY', 'STUDY', 'ON', 'A', 'GOLDEN', 'GROUND'] +1188-133604-0016-1787: ref=['THIS', 'AT', 'ONCE', 'COMPELS', 'YOU', 'TO', 'UNDERSTAND', 'THAT', 'THE', 'WORK', 'IS', 'TO', 'BE', 'IMAGINATIVE', 'AND', 'DECORATIVE', 'THAT', 'IT', 'REPRESENTS', 'BEAUTIFUL', 'THINGS', 'IN', 'THE', 'CLEAREST', 'WAY', 'BUT', 'NOT', 'UNDER', 'EXISTING', 'CONDITIONS', 'AND', 'THAT', 'IN', 'FACT', 'YOU', 'ARE', 'PRODUCING', "JEWELER'S", 'WORK', 'RATHER', 'THAN', 'PICTURES'] +1188-133604-0016-1787: hyp=['THIS', 'AT', 'ONCE', 'COMPELS', 'YOU', 'TO', 'UNDERSTAND', 'THAT', 'THE', 'WORK', 'IS', 'TO', 'BE', 'IMAGINATIVE', 'AND', 'DECORATIVE', 'THAT', 'IT', 'REPRESENTS', 'BEAUTIFUL', 'THINGS', 'IN', 'THE', 'CLEAREST', 'WAY', 'BUT', 'NOT', 'UNDER', 'EXISTING', 'CONDITIONS', 'AND', 'THAT', 'IN', 'FACT', 'YOU', 'ARE', 'PRODUCING', 'JEWELLERS', 'WORK', 'RATHER', 'THAN', 'PICTURES'] +1188-133604-0017-1788: ref=['THAT', 'A', 'STYLE', 'IS', 'RESTRAINED', 'OR', 'SEVERE', 'DOES', 'NOT', 'MEAN', 'THAT', 'IT', 'IS', 'ALSO', 'ERRONEOUS'] +1188-133604-0017-1788: hyp=['THAT', 'A', 'STYLE', 'WAS', 'RESTRAINED', 'OR', 'SEVERE', 'DOES', 'NOT', 'MEAN', 'THAT', 'IT', 'IS', 'ALSO', 'ERRONEOUS'] +1188-133604-0018-1789: ref=['IN', 'ALL', 'EARLY', 'GOTHIC', 'ART', 'INDEED', 'YOU', 'WILL', 'FIND', 'FAILURE', 'OF', 'THIS', 'KIND', 'ESPECIALLY', 'DISTORTION', 'AND', 'RIGIDITY', 'WHICH', 'ARE', 'IN', 'MANY', 'RESPECTS', 'PAINFULLY', 'TO', 'BE', 'COMPARED', 'WITH', 'THE', 'SPLENDID', 'REPOSE', 'OF', 'CLASSIC', 'ART'] +1188-133604-0018-1789: hyp=['IN', 'ALL', 'EARLY', 'GOTHIC', 'ART', 'INDEED', 'YOU', 'WILL', 'FIND', 'FAILURE', 'OF', 'THIS', 'KIND', 'ESPECIALLY', 'DISTORTION', 'AND', 'RIGIDITY', 'WHICH', 'ARE', 'IN', 'MANY', 'RESPECTS', 'PAINFULLY', 'TO', 'BE', 'COMPARED', 'WITH', 'THE', 'SPLENDID', 'REPOSE', 'OF', 'CLASSIC', 'ART'] +1188-133604-0019-1790: ref=['THE', 'LARGE', 'LETTER', 'CONTAINS', 'INDEED', 'ENTIRELY', 'FEEBLE', 'AND', 'ILL', 'DRAWN', 'FIGURES', 'THAT', 'IS', 'MERELY', 'CHILDISH', 'AND', 'FAILING', 'WORK', 'OF', 'AN', 'INFERIOR', 'HAND', 'IT', 'IS', 'NOT', 'CHARACTERISTIC', 'OF', 'GOTHIC', 'OR', 'ANY', 'OTHER', 'SCHOOL'] +1188-133604-0019-1790: hyp=['THE', 'LARGE', 'LETTER', 'CONTAINS', 'INDEED', 'ENTIRELY', 'FEEBLE', 'AND', 'ILL', 'DRAWN', 'FIGURES', 'THAT', 'IS', 'MERELY', 'CHILDISH', 'IN', 'FAILING', 'WORK', 'OF', 'AN', 'INFERIOR', 'HAND', 'IT', 'IS', 'NOT', 'CHARACTERISTIC', 'OF', 'GOTHIC', 'OR', 'ANY', 'OTHER', 'SCHOOL'] +1188-133604-0020-1791: ref=['BUT', 'OBSERVE', 'YOU', 'CAN', 'ONLY', 'DO', 'THIS', 'ON', 'ONE', 'CONDITION', 'THAT', 'OF', 'STRIVING', 'ALSO', 'TO', 'CREATE', 'IN', 'REALITY', 'THE', 'BEAUTY', 'WHICH', 'YOU', 'SEEK', 'IN', 'IMAGINATION'] +1188-133604-0020-1791: hyp=['BUT', 'OBSERVE', 'YOU', 'CAN', 'ONLY', 'DO', 'THIS', 'ON', 'ONE', 'CONDITION', 'THAT', 'OF', 'STRIVING', 'ALSO', 'TO', 'CREATE', 'IN', 'REALITY', 'THE', 'BEAUTY', 'WHICH', 'YOU', 'SEEK', 'IN', 'IMAGINATION'] +1188-133604-0021-1792: ref=['IT', 'WILL', 'BE', 'WHOLLY', 'IMPOSSIBLE', 'FOR', 'YOU', 'TO', 'RETAIN', 'THE', 'TRANQUILLITY', 'OF', 'TEMPER', 'AND', 'FELICITY', 'OF', 'FAITH', 'NECESSARY', 'FOR', 'NOBLE', 'PURIST', 'PAINTING', 'UNLESS', 'YOU', 'ARE', 'ACTIVELY', 'ENGAGED', 'IN', 'PROMOTING', 'THE', 'FELICITY', 'AND', 'PEACE', 'OF', 'PRACTICAL', 'LIFE'] +1188-133604-0021-1792: hyp=['IT', 'WILL', 'BE', 'WHOLLY', 'IMPOSSIBLE', 'FOR', 'YOU', 'TO', 'RETAIN', 'THE', 'TRANQUILLITY', 'OF', 'TEMPER', 'AND', 'FELICITY', 'OF', 'FAITH', 'NECESSARY', 'FOR', 'NOBLE', 'PUREST', 'PAINTING', 'UNLESS', 'YOU', 'ARE', 'ACTIVELY', 'ENGAGED', 'IN', 'PROMOTING', 'THE', 'FELICITY', 'AND', 'PEACE', 'OF', 'PRACTICAL', 'LIFE'] +1188-133604-0022-1793: ref=['YOU', 'MUST', 'LOOK', 'AT', 'HIM', 'IN', 'THE', 'FACE', 'FIGHT', 'HIM', 'CONQUER', 'HIM', 'WITH', 'WHAT', 'SCATHE', 'YOU', 'MAY', 'YOU', 'NEED', 'NOT', 'THINK', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'HIM'] +1188-133604-0022-1793: hyp=['YOU', 'MUST', 'LOOK', 'AT', 'HIM', 'IN', 'THE', 'FACE', 'FIGHT', 'HIM', 'CONQUER', 'HIM', 'WITH', 'WHAT', 'SCATH', 'YOU', 'MAY', 'YOU', 'NEED', 'NOT', 'THINK', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'HIM'] +1188-133604-0023-1794: ref=['THE', 'COLORIST', 'SAYS', 'FIRST', 'OF', 'ALL', 'AS', 'MY', 'DELICIOUS', 'PAROQUET', 'WAS', 'RUBY', 'SO', 'THIS', 'NASTY', 'VIPER', 'SHALL', 'BE', 'BLACK', 'AND', 'THEN', 'IS', 'THE', 'QUESTION', 'CAN', 'I', 'ROUND', 'HIM', 'OFF', 'EVEN', 'THOUGH', 'HE', 'IS', 'BLACK', 'AND', 'MAKE', 'HIM', 'SLIMY', 'AND', 'YET', 'SPRINGY', 'AND', 'CLOSE', 'DOWN', 'CLOTTED', 'LIKE', 'A', 'POOL', 'OF', 'BLACK', 'BLOOD', 'ON', 'THE', 'EARTH', 'ALL', 'THE', 'SAME'] +1188-133604-0023-1794: hyp=['THE', 'CHOLERIST', 'SAYS', 'FIRST', 'OF', 'ALL', 'AS', 'MY', 'DELICIOUS', 'PARAQUET', 'WAS', 'RUBY', 'SO', 'THIS', 'NASTY', 'VIPER', 'SHALL', 'BE', 'BLACK', 'AND', 'THEN', 'AS', 'THE', 'QUESTION', 'CAN', 'I', 'ROUND', 'HIM', 'OFF', 'EVEN', 'THOUGH', 'HE', 'IS', 'BLACK', 'AND', 'MAKE', 'HIM', 'SLIMY', 'AND', 'YET', 'SPRINGY', 'AND', 'CLOSE', 'DOWN', 'CLOTTED', 'LIKE', 'A', 'POOL', 'OF', 'BLACK', 'BLOOD', 'ON', 'THE', 'EARTH', 'ALL', 'THE', 'SAME'] +1188-133604-0024-1795: ref=['NOTHING', 'WILL', 'BE', 'MORE', 'PRECIOUS', 'TO', 'YOU', 'I', 'THINK', 'IN', 'THE', 'PRACTICAL', 'STUDY', 'OF', 'ART', 'THAN', 'THE', 'CONVICTION', 'WHICH', 'WILL', 'FORCE', 'ITSELF', 'ON', 'YOU', 'MORE', 'AND', 'MORE', 'EVERY', 'HOUR', 'OF', 'THE', 'WAY', 'ALL', 'THINGS', 'ARE', 'BOUND', 'TOGETHER', 'LITTLE', 'AND', 'GREAT', 'IN', 'SPIRIT', 'AND', 'IN', 'MATTER'] +1188-133604-0024-1795: hyp=['NOTHING', 'WILL', 'BE', 'MORE', 'PRECIOUS', 'TO', 'YOU', 'I', 'THINK', 'IN', 'THE', 'PRACTICAL', 'STUDY', 'OF', 'ART', 'THAN', 'THE', 'CONVICTION', 'WHICH', 'WILL', 'FORCE', 'ITSELF', 'ON', 'YOU', 'MORE', 'AND', 'MORE', 'EVERY', 'HOUR', 'OF', 'THE', 'WAY', 'ALL', 'THINGS', 'ARE', 'BOUND', 'TOGETHER', 'LITTLE', 'AND', 'GREAT', 'IN', 'SPIRIT', 'AND', 'IN', 'MATTER'] +1188-133604-0025-1796: ref=['YOU', 'KNOW', 'I', 'HAVE', 'JUST', 'BEEN', 'TELLING', 'YOU', 'HOW', 'THIS', 'SCHOOL', 'OF', 'MATERIALISM', 'AND', 'CLAY', 'INVOLVED', 'ITSELF', 'AT', 'LAST', 'IN', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0025-1796: hyp=['YOU', 'KNOW', 'I', 'HAVE', 'JUST', 'BEEN', 'TELLING', 'YOU', 'HOW', 'THIS', 'SCHOOL', 'OF', 'MATERIALISM', 'IN', 'CLAY', 'INVOLVED', 'ITSELF', 'AT', 'LAST', 'IN', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0026-1797: ref=['HERE', 'IS', 'AN', 'EQUALLY', 'TYPICAL', 'GREEK', 'SCHOOL', 'LANDSCAPE', 'BY', 'WILSON', 'LOST', 'WHOLLY', 'IN', 'GOLDEN', 'MIST', 'THE', 'TREES', 'SO', 'SLIGHTLY', 'DRAWN', 'THAT', 'YOU', "DON'T", 'KNOW', 'IF', 'THEY', 'ARE', 'TREES', 'OR', 'TOWERS', 'AND', 'NO', 'CARE', 'FOR', 'COLOR', 'WHATEVER', 'PERFECTLY', 'DECEPTIVE', 'AND', 'MARVELOUS', 'EFFECT', 'OF', 'SUNSHINE', 'THROUGH', 'THE', 'MIST', 'APOLLO', 'AND', 'THE', 'PYTHON'] +1188-133604-0026-1797: hyp=['HERE', 'IS', 'AN', 'EQUALLY', 'TYPICAL', 'GREEK', 'SCHOOL', 'LANDSCAPE', 'BY', 'WILSON', 'LOST', 'WHOLLY', 'IN', 'GOLDEN', 'MIST', 'THE', 'TREES', 'SO', 'SLIGHTLY', 'DRAWN', 'THAT', 'YOU', "DON'T", 'KNOW', 'IF', 'THEY', 'ARE', 'TREES', 'OR', 'TOWERS', 'AND', 'NO', 'CARE', 'FOR', 'COLOR', 'WHATSOEVER', 'PERFECTLY', 'DECEPTIVE', 'AND', 'MARVELLOUS', 'EFFECT', 'OF', 'SUNSHINE', 'THROUGH', 'THE', 'MIST', 'APOLLO', 'IN', 'THE', 'PYTHON'] +1188-133604-0027-1798: ref=['NOW', 'HERE', 'IS', 'RAPHAEL', 'EXACTLY', 'BETWEEN', 'THE', 'TWO', 'TREES', 'STILL', 'DRAWN', 'LEAF', 'BY', 'LEAF', 'WHOLLY', 'FORMAL', 'BUT', 'BEAUTIFUL', 'MIST', 'COMING', 'GRADUALLY', 'INTO', 'THE', 'DISTANCE'] +1188-133604-0027-1798: hyp=['NOW', 'HERE', 'IS', 'RAPHAEL', 'EXACTLY', 'BETWEEN', 'THE', 'TWO', 'TREES', 'STILL', 'DRAWN', 'LEAF', 'BY', 'LEAF', 'WHOLLY', 'FORMAL', 'BUT', 'BEAUTIFUL', 'MIST', 'COMING', 'GRADUALLY', 'INTO', 'THE', 'DISTANCE'] +1188-133604-0028-1799: ref=['WELL', 'THEN', 'LAST', 'HERE', 'IS', "TURNER'S", 'GREEK', 'SCHOOL', 'OF', 'THE', 'HIGHEST', 'CLASS', 'AND', 'YOU', 'DEFINE', 'HIS', 'ART', 'ABSOLUTELY', 'AS', 'FIRST', 'THE', 'DISPLAYING', 'INTENSELY', 'AND', 'WITH', 'THE', 'STERNEST', 'INTELLECT', 'OF', 'NATURAL', 'FORM', 'AS', 'IT', 'IS', 'AND', 'THEN', 'THE', 'ENVELOPMENT', 'OF', 'IT', 'WITH', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0028-1799: hyp=['WELL', 'THEN', 'LAST', 'HERE', 'IS', "TURNER'S", 'GREEK', 'SCHOOL', 'OF', 'THE', 'HIGHEST', 'CLASS', 'AND', 'YOU', 'DEFINE', 'HIS', 'ART', 'ABSOLUTELY', 'AS', 'FIRST', 'THE', 'DISPLAYING', 'INTENSELY', 'AND', 'WITH', 'THE', 'STERNEST', 'INTELLECT', 'OF', 'NATURAL', 'FORM', 'AS', 'IT', 'IS', 'AND', 'THEN', 'THE', 'ENVELOPMENT', 'OF', 'IT', 'WITH', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0029-1800: ref=['ONLY', 'THERE', 'ARE', 'TWO', 'SORTS', 'OF', 'CLOUD', 'AND', 'FIRE'] +1188-133604-0029-1800: hyp=['ONLY', 'THERE', 'ARE', 'TWO', 'SORTS', 'OF', 'CLOUD', 'IN', 'FIRE'] +1188-133604-0030-1801: ref=['HE', 'KNOWS', 'THEM', 'BOTH'] +1188-133604-0030-1801: hyp=['HE', 'KNOWS', 'THEM', 'BOTH'] +1188-133604-0031-1802: ref=["THERE'S", 'ONE', 'AND', "THERE'S", 'ANOTHER', 'THE', 'DUDLEY', 'AND', 'THE', 'FLINT'] +1188-133604-0031-1802: hyp=["THERE'S", 'ONE', 'AND', "THERE'S", 'ANOTHER', 'THE', 'DUDLEY', 'AND', 'THE', 'FLINT'] +1188-133604-0032-1803: ref=['IT', 'IS', 'ONLY', 'A', 'PENCIL', 'OUTLINE', 'BY', 'EDWARD', 'BURNE', 'JONES', 'IN', 'ILLUSTRATION', 'OF', 'THE', 'STORY', 'OF', 'PSYCHE', 'IT', 'IS', 'THE', 'INTRODUCTION', 'OF', 'PSYCHE', 'AFTER', 'ALL', 'HER', 'TROUBLES', 'INTO', 'HEAVEN'] +1188-133604-0032-1803: hyp=['IT', 'IS', 'ONLY', 'A', 'PENCIL', 'OUTLINE', 'BY', 'EDWARD', 'BURNE', 'JONES', 'IN', 'ILLUSTRATION', 'OF', 'THE', 'STORY', 'OF', 'PSYCHE', 'IT', 'IS', 'THE', 'INTRODUCTION', 'OF', 'PSYCHE', 'AFTER', 'ALL', 'HER', 'TROUBLES', 'INTO', 'HEAVEN'] +1188-133604-0033-1804: ref=['EVERY', 'PLANT', 'IN', 'THE', 'GRASS', 'IS', 'SET', 'FORMALLY', 'GROWS', 'PERFECTLY', 'AND', 'MAY', 'BE', 'REALIZED', 'COMPLETELY'] +1188-133604-0033-1804: hyp=['EVERY', 'PLANT', 'IN', 'THE', 'GRASS', 'IS', 'SET', 'FORMALLY', 'GROWS', 'PERFECTLY', 'AND', 'MAY', 'BE', 'REALIZED', 'COMPLETELY'] +1188-133604-0034-1805: ref=['EXQUISITE', 'ORDER', 'AND', 'UNIVERSAL', 'WITH', 'ETERNAL', 'LIFE', 'AND', 'LIGHT', 'THIS', 'IS', 'THE', 'FAITH', 'AND', 'EFFORT', 'OF', 'THE', 'SCHOOLS', 'OF', 'CRYSTAL', 'AND', 'YOU', 'MAY', 'DESCRIBE', 'AND', 'COMPLETE', 'THEIR', 'WORK', 'QUITE', 'LITERALLY', 'BY', 'TAKING', 'ANY', 'VERSES', 'OF', 'CHAUCER', 'IN', 'HIS', 'TENDER', 'MOOD', 'AND', 'OBSERVING', 'HOW', 'HE', 'INSISTS', 'ON', 'THE', 'CLEARNESS', 'AND', 'BRIGHTNESS', 'FIRST', 'AND', 'THEN', 'ON', 'THE', 'ORDER'] +1188-133604-0034-1805: hyp=['EXQUISITE', 'ORDER', 'AND', 'UNIVERSAL', 'WITH', 'ETERNAL', 'LIFE', 'AND', 'LIGHT', 'THIS', 'IS', 'THE', 'FAITH', 'AND', 'EFFORT', 'OF', 'THE', 'SCHOOLS', 'OF', 'CRISTEL', 'AND', 'YOU', 'MAY', 'DESCRIBE', 'AND', 'COMPLETE', 'THEIR', 'WORK', 'QUITE', 'LITERALLY', 'BY', 'TAKING', 'ANY', 'VERSES', 'OF', 'CHAUCER', 'IN', 'HIS', 'TENDER', 'MOOD', 'AND', 'OBSERVING', 'HOW', 'HE', 'INSISTS', 'ON', 'THE', 'CLEARNESS', 'AND', 'BRIGHTNESS', 'FIRST', 'AND', 'THEN', 'ON', 'THE', 'ORDER'] +1188-133604-0035-1806: ref=['THUS', 'IN', "CHAUCER'S", 'DREAM'] +1188-133604-0035-1806: hyp=['THUS', 'IN', "CHAUCER'S", 'DREAM'] +1188-133604-0036-1807: ref=['IN', 'BOTH', 'THESE', 'HIGH', 'MYTHICAL', 'SUBJECTS', 'THE', 'SURROUNDING', 'NATURE', 'THOUGH', 'SUFFERING', 'IS', 'STILL', 'DIGNIFIED', 'AND', 'BEAUTIFUL'] +1188-133604-0036-1807: hyp=['IN', 'BOTH', 'THESE', 'HIGH', 'MYTHICAL', 'SUBJECTS', 'THE', 'SURROUNDING', 'NATURE', 'THOUGH', 'SUFFERING', 'IS', 'STILL', 'DIGNIFIED', 'AND', 'BEAUTIFUL'] +1188-133604-0037-1808: ref=['EVERY', 'LINE', 'IN', 'WHICH', 'THE', 'MASTER', 'TRACES', 'IT', 'EVEN', 'WHERE', 'SEEMINGLY', 'NEGLIGENT', 'IS', 'LOVELY', 'AND', 'SET', 'DOWN', 'WITH', 'A', 'MEDITATIVE', 'CALMNESS', 'WHICH', 'MAKES', 'THESE', 'TWO', 'ETCHINGS', 'CAPABLE', 'OF', 'BEING', 'PLACED', 'BESIDE', 'THE', 'MOST', 'TRANQUIL', 'WORK', 'OF', 'HOLBEIN', 'OR', 'DUERER'] +1188-133604-0037-1808: hyp=['EVERY', 'LINE', 'IN', 'WHICH', 'THE', 'MASTER', 'TRACES', 'IT', 'EVEN', 'WHERE', 'SEEMINGLY', 'NEGLIGENT', 'IS', 'LOVELY', 'AND', 'SET', 'DOWN', 'WITH', 'A', 'MEDITATIVE', 'CALMNESS', 'WHICH', 'MAKES', 'THESE', 'TWO', 'ETCHINGS', 'CAPABLE', 'OF', 'BEING', 'PLACED', 'BESIDE', 'THE', 'MOST', 'TRANQUIL', 'WORK', 'OF', 'HOLBINE', 'OR', 'DURE'] +1188-133604-0038-1809: ref=['BUT', 'NOW', 'HERE', 'IS', 'A', 'SUBJECT', 'OF', 'WHICH', 'YOU', 'WILL', 'WONDER', 'AT', 'FIRST', 'WHY', 'TURNER', 'DREW', 'IT', 'AT', 'ALL'] +1188-133604-0038-1809: hyp=['BUT', 'NOW', 'HERE', 'IS', 'A', 'SUBJECT', 'OF', 'WHICH', 'YOU', 'WILL', 'WONDER', 'AT', 'FIRST', 'WHY', 'TURNER', 'DREW', 'IT', 'AT', 'ALL'] +1188-133604-0039-1810: ref=['IT', 'HAS', 'NO', 'BEAUTY', 'WHATSOEVER', 'NO', 'SPECIALTY', 'OF', 'PICTURESQUENESS', 'AND', 'ALL', 'ITS', 'LINES', 'ARE', 'CRAMPED', 'AND', 'POOR'] +1188-133604-0039-1810: hyp=['IT', 'HAS', 'NO', 'BEAUTY', 'WHATSOEVER', 'NO', 'SPECIALTY', 'OF', 'PICTURESQUENESS', 'IN', 'ALL', 'ITS', 'LINES', 'ARE', 'CRAMPED', 'AND', 'POOR'] +1188-133604-0040-1811: ref=['THE', 'CRAMPNESS', 'AND', 'THE', 'POVERTY', 'ARE', 'ALL', 'INTENDED'] +1188-133604-0040-1811: hyp=['THE', 'CRAMPNESS', 'IN', 'THE', 'POVERTY', 'ARE', 'ALL', 'INTENDED'] +1188-133604-0041-1812: ref=['IT', 'IS', 'A', 'GLEANER', 'BRINGING', 'DOWN', 'HER', 'ONE', 'SHEAF', 'OF', 'CORN', 'TO', 'AN', 'OLD', 'WATERMILL', 'ITSELF', 'MOSSY', 'AND', 'RENT', 'SCARCELY', 'ABLE', 'TO', 'GET', 'ITS', 'STONES', 'TO', 'TURN'] +1188-133604-0041-1812: hyp=['IT', 'IS', 'A', 'GLEANER', 'BRINGING', 'DOWN', 'HER', 'ONE', 'SHEAF', 'OF', 'CORN', 'TO', 'AN', 'OLD', 'WATER', 'MILL', 'ITSELF', 'MOSSY', 'AND', 'RENT', 'SCARCELY', 'ABLE', 'TO', 'GET', 'ITS', 'STONES', 'TO', 'TURN'] +1188-133604-0042-1813: ref=['THE', 'SCENE', 'IS', 'ABSOLUTELY', 'ARCADIAN'] +1188-133604-0042-1813: hyp=['THE', 'SCENE', 'IS', 'ABSOLUTELY', 'ARCADIAN'] +1188-133604-0043-1814: ref=['SEE', 'THAT', 'YOUR', 'LIVES', 'BE', 'IN', 'NOTHING', 'WORSE', 'THAN', 'A', "BOY'S", 'CLIMBING', 'FOR', 'HIS', 'ENTANGLED', 'KITE'] +1188-133604-0043-1814: hyp=['SEE', 'THAT', 'YOUR', 'LIVES', 'BE', 'IN', 'NOTHING', 'WORSE', 'THAN', 'A', "BOY'S", 'CLIMBING', 'FOR', 'HIS', 'ENTANGLED', 'KITE'] +1188-133604-0044-1815: ref=['IT', 'WILL', 'BE', 'WELL', 'FOR', 'YOU', 'IF', 'YOU', 'JOIN', 'NOT', 'WITH', 'THOSE', 'WHO', 'INSTEAD', 'OF', 'KITES', 'FLY', 'FALCONS', 'WHO', 'INSTEAD', 'OF', 'OBEYING', 'THE', 'LAST', 'WORDS', 'OF', 'THE', 'GREAT', 'CLOUD', 'SHEPHERD', 'TO', 'FEED', 'HIS', 'SHEEP', 'LIVE', 'THE', 'LIVES', 'HOW', 'MUCH', 'LESS', 'THAN', 'VANITY', 'OF', 'THE', 'WAR', 'WOLF', 'AND', 'THE', 'GIER', 'EAGLE'] +1188-133604-0044-1815: hyp=['IT', 'WILL', 'BE', 'WELL', 'FOR', 'YOU', 'IF', 'YOU', 'JOIN', 'NOT', 'WITH', 'THOSE', 'WHO', 'INSTEAD', 'OF', 'KITES', 'FLY', 'FALCONS', 'WHO', 'INSTEAD', 'OF', 'OBEYING', 'THE', 'LAST', 'WORDS', 'OF', 'THE', 'GREAT', 'CLOUD', 'SHEPHERD', 'TO', 'FEED', 'HIS', 'SHEEP', 'LIVE', 'THE', 'LIVES', 'HOW', 'MUCH', 'LESS', 'THAN', 'VANITY', 'OF', 'THE', 'WAR', 'WOLF', 'AND', 'THE', 'GEAR', 'EAGLE'] +121-121726-0000-2558: ref=['ALSO', 'A', 'POPULAR', 'CONTRIVANCE', 'WHEREBY', 'LOVE', 'MAKING', 'MAY', 'BE', 'SUSPENDED', 'BUT', 'NOT', 'STOPPED', 'DURING', 'THE', 'PICNIC', 'SEASON'] +121-121726-0000-2558: hyp=['ALSO', 'A', 'POPULAR', 'CONTRIVANCE', 'WHEREBY', 'LOVE', 'MAKING', 'MAY', 'BE', 'SUSPENDED', 'BUT', 'NOT', 'STOPPED', 'DURING', 'THE', 'PICNIC', 'SEASON'] +121-121726-0001-2559: ref=['HARANGUE', 'THE', 'TIRESOME', 'PRODUCT', 'OF', 'A', 'TIRELESS', 'TONGUE'] +121-121726-0001-2559: hyp=['HURRY', 'THE', 'TIRESOME', 'PRODUCT', 'OF', 'A', 'TIRELESS', 'TONGUE'] +121-121726-0002-2560: ref=['ANGOR', 'PAIN', 'PAINFUL', 'TO', 'HEAR'] +121-121726-0002-2560: hyp=['ANGOR', 'PAIN', 'PAINFUL', 'TO', 'HEAR'] +121-121726-0003-2561: ref=['HAY', 'FEVER', 'A', 'HEART', 'TROUBLE', 'CAUSED', 'BY', 'FALLING', 'IN', 'LOVE', 'WITH', 'A', 'GRASS', 'WIDOW'] +121-121726-0003-2561: hyp=['HEY', 'FEVER', 'A', 'HARD', 'TROUBLE', 'CAUSED', 'BY', 'FALLING', 'IN', 'LOVE', 'WITH', 'A', 'GRASS', 'WIDOW'] +121-121726-0004-2562: ref=['HEAVEN', 'A', 'GOOD', 'PLACE', 'TO', 'BE', 'RAISED', 'TO'] +121-121726-0004-2562: hyp=['HEAVEN', 'A', 'GOOD', 'PLACE', 'TO', 'BE', 'RAISED', 'TO'] +121-121726-0005-2563: ref=['HEDGE', 'A', 'FENCE'] +121-121726-0005-2563: hyp=['HEDGE', 'A', 'FENCE'] +121-121726-0006-2564: ref=['HEREDITY', 'THE', 'CAUSE', 'OF', 'ALL', 'OUR', 'FAULTS'] +121-121726-0006-2564: hyp=['HEREDITY', 'THE', 'CAUSE', 'OF', 'ALL', 'OUR', 'FAULTS'] +121-121726-0007-2565: ref=['HORSE', 'SENSE', 'A', 'DEGREE', 'OF', 'WISDOM', 'THAT', 'KEEPS', 'ONE', 'FROM', 'BETTING', 'ON', 'THE', 'RACES'] +121-121726-0007-2565: hyp=['HORSE', 'SENSE', 'A', 'DEGREE', 'OF', 'WISDOM', 'THAT', 'KEEPS', 'ONE', 'FROM', 'BETTING', 'ON', 'THE', 'RACES'] +121-121726-0008-2566: ref=['HOSE', "MAN'S", 'EXCUSE', 'FOR', 'WETTING', 'THE', 'WALK'] +121-121726-0008-2566: hyp=['HOSE', "MAN'S", 'EXCUSE', 'FOR', 'WETTING', 'THE', 'WALK'] +121-121726-0009-2567: ref=['HOTEL', 'A', 'PLACE', 'WHERE', 'A', 'GUEST', 'OFTEN', 'GIVES', 'UP', 'GOOD', 'DOLLARS', 'FOR', 'POOR', 'QUARTERS'] +121-121726-0009-2567: hyp=['HOTEL', 'A', 'PLACE', 'WHERE', 'A', 'GUEST', 'OFTEN', 'GIVES', 'UP', 'GOOD', 'DOLLARS', 'FOR', 'POOR', 'QUARTERS'] +121-121726-0010-2568: ref=['HOUSECLEANING', 'A', 'DOMESTIC', 'UPHEAVAL', 'THAT', 'MAKES', 'IT', 'EASY', 'FOR', 'THE', 'GOVERNMENT', 'TO', 'ENLIST', 'ALL', 'THE', 'SOLDIERS', 'IT', 'NEEDS'] +121-121726-0010-2568: hyp=['HOUSE', 'CLEANING', 'A', 'DOMESTIC', 'UPHEAVAL', 'THAT', 'MAKES', 'IT', 'EASY', 'FOR', 'THE', 'GOVERNMENT', 'TO', 'ENLIST', 'ALL', 'THE', 'SOLDIERS', 'IT', 'NEEDS'] +121-121726-0011-2569: ref=['HUSBAND', 'THE', 'NEXT', 'THING', 'TO', 'A', 'WIFE'] +121-121726-0011-2569: hyp=['HUSBAND', 'THE', 'NEXT', 'THING', 'TO', 'A', 'WIFE'] +121-121726-0012-2570: ref=['HUSSY', 'WOMAN', 'AND', 'BOND', 'TIE'] +121-121726-0012-2570: hyp=['HUSSY', 'WOMAN', 'AND', 'BOND', 'TIE'] +121-121726-0013-2571: ref=['TIED', 'TO', 'A', 'WOMAN'] +121-121726-0013-2571: hyp=['TIED', 'TO', 'A', 'WOMAN'] +121-121726-0014-2572: ref=['HYPOCRITE', 'A', 'HORSE', 'DEALER'] +121-121726-0014-2572: hyp=['HYPOCRITE', 'A', 'HORSE', 'DEALER'] +121-123852-0000-2615: ref=['THOSE', 'PRETTY', 'WRONGS', 'THAT', 'LIBERTY', 'COMMITS', 'WHEN', 'I', 'AM', 'SOMETIME', 'ABSENT', 'FROM', 'THY', 'HEART', 'THY', 'BEAUTY', 'AND', 'THY', 'YEARS', 'FULL', 'WELL', 'BEFITS', 'FOR', 'STILL', 'TEMPTATION', 'FOLLOWS', 'WHERE', 'THOU', 'ART'] +121-123852-0000-2615: hyp=['THOSE', 'PRETTY', 'WRONGS', 'THAT', 'LIBERTY', 'COMMITS', 'WHEN', 'I', 'AM', 'SOMETIME', 'ABSENT', 'FROM', 'THY', 'HEART', 'THY', 'BEAUTY', 'AND', 'THY', 'YEARS', 'FULL', 'WELL', 'BEFITS', 'FOR', 'STILL', 'TEMPTATION', 'FOLLOWS', 'WHERE', 'THOU', 'ART'] +121-123852-0001-2616: ref=['AY', 'ME'] +121-123852-0001-2616: hyp=['I', 'ME'] +121-123852-0002-2617: ref=['NO', 'MATTER', 'THEN', 'ALTHOUGH', 'MY', 'FOOT', 'DID', 'STAND', 'UPON', 'THE', 'FARTHEST', 'EARTH', "REMOV'D", 'FROM', 'THEE', 'FOR', 'NIMBLE', 'THOUGHT', 'CAN', 'JUMP', 'BOTH', 'SEA', 'AND', 'LAND', 'AS', 'SOON', 'AS', 'THINK', 'THE', 'PLACE', 'WHERE', 'HE', 'WOULD', 'BE', 'BUT', 'AH'] +121-123852-0002-2617: hyp=['NO', 'MATTER', 'THEN', 'ALTHOUGH', 'MY', 'FOOT', 'DID', 'STAND', 'UPON', 'THE', 'FARTHEST', 'EARTH', 'REMOVED', 'FROM', 'THEE', 'FOR', 'NIMBLE', 'THOUGHT', 'CAN', 'JUMP', 'BOTH', 'SEA', 'AND', 'LAND', 'AS', 'SOON', 'AS', 'THINK', 'THE', 'PLACE', 'WHERE', 'HE', 'WOULD', 'BE', 'BUT', 'AH'] +121-123852-0003-2618: ref=['THOUGHT', 'KILLS', 'ME', 'THAT', 'I', 'AM', 'NOT', 'THOUGHT', 'TO', 'LEAP', 'LARGE', 'LENGTHS', 'OF', 'MILES', 'WHEN', 'THOU', 'ART', 'GONE', 'BUT', 'THAT', 'SO', 'MUCH', 'OF', 'EARTH', 'AND', 'WATER', 'WROUGHT', 'I', 'MUST', 'ATTEND', "TIME'S", 'LEISURE', 'WITH', 'MY', 'MOAN', 'RECEIVING', 'NOUGHT', 'BY', 'ELEMENTS', 'SO', 'SLOW', 'BUT', 'HEAVY', 'TEARS', 'BADGES', 'OF', "EITHER'S", 'WOE'] +121-123852-0003-2618: hyp=['THOUGHT', 'KILLS', 'ME', 'THAT', 'I', 'AM', 'NOT', 'BOUGHT', 'TO', 'LEAP', 'LARGE', 'LENGTHS', 'OF', 'MILES', 'WHEN', 'THOU', 'ART', 'GONE', 'BUT', 'THAT', 'SO', 'MUCH', 'OF', 'EARTH', 'AND', 'WATER', 'WROUGHT', 'I', 'MUST', 'ATTEND', "TIME'S", 'LEISURE', 'WITH', 'MY', 'MOAN', 'RECEIVING', 'NAUGHT', 'BY', 'ELEMENTS', 'SO', 'SLOW', 'BUT', 'HEAVY', 'TEARS', 'BADGERS', 'OF', "EITHER'S", 'WOE'] +121-123852-0004-2619: ref=['MY', 'HEART', 'DOTH', 'PLEAD', 'THAT', 'THOU', 'IN', 'HIM', 'DOST', 'LIE', 'A', 'CLOSET', 'NEVER', "PIERC'D", 'WITH', 'CRYSTAL', 'EYES', 'BUT', 'THE', 'DEFENDANT', 'DOTH', 'THAT', 'PLEA', 'DENY', 'AND', 'SAYS', 'IN', 'HIM', 'THY', 'FAIR', 'APPEARANCE', 'LIES'] +121-123852-0004-2619: hyp=['MY', 'HEART', 'DOTH', 'PLEAD', 'THAT', 'THOU', 'IN', 'HIM', 'DOST', 'LIE', 'A', 'CLOSET', 'NEVER', 'PIERCED', 'WITH', 'CRYSTAL', 'EYES', 'BUT', 'THE', 'DEFENDANT', 'DOTH', 'THAT', 'PLEAD', 'DENY', 'AND', 'SAYS', 'IN', 'HIM', 'THY', 'FAIR', 'APPEARANCE', 'LIES'] +121-123859-0000-2573: ref=['YOU', 'ARE', 'MY', 'ALL', 'THE', 'WORLD', 'AND', 'I', 'MUST', 'STRIVE', 'TO', 'KNOW', 'MY', 'SHAMES', 'AND', 'PRAISES', 'FROM', 'YOUR', 'TONGUE', 'NONE', 'ELSE', 'TO', 'ME', 'NOR', 'I', 'TO', 'NONE', 'ALIVE', 'THAT', 'MY', "STEEL'D", 'SENSE', 'OR', 'CHANGES', 'RIGHT', 'OR', 'WRONG'] +121-123859-0000-2573: hyp=['YOU', 'ARE', 'MY', 'ALL', 'THE', 'WORLD', 'AND', 'I', 'MUST', 'STRIVE', 'TO', 'KNOW', 'MY', 'SHAMES', 'AND', 'PRAISES', 'FROM', 'YOUR', 'TONGUE', 'NONE', 'ELSE', 'TO', 'ME', 'NOR', 'I', 'TO', 'NONE', 'ALIVE', 'THAT', 'MY', 'STEELED', 'SCENTS', 'OR', 'CHANGES', 'RIGHT', 'OR', 'WRONG'] +121-123859-0001-2574: ref=['O', 'TIS', 'THE', 'FIRST', 'TIS', 'FLATTERY', 'IN', 'MY', 'SEEING', 'AND', 'MY', 'GREAT', 'MIND', 'MOST', 'KINGLY', 'DRINKS', 'IT', 'UP', 'MINE', 'EYE', 'WELL', 'KNOWS', 'WHAT', 'WITH', 'HIS', 'GUST', 'IS', 'GREEING', 'AND', 'TO', 'HIS', 'PALATE', 'DOTH', 'PREPARE', 'THE', 'CUP', 'IF', 'IT', 'BE', "POISON'D", 'TIS', 'THE', 'LESSER', 'SIN', 'THAT', 'MINE', 'EYE', 'LOVES', 'IT', 'AND', 'DOTH', 'FIRST', 'BEGIN'] +121-123859-0001-2574: hyp=['OH', 'TIS', 'THE', 'FIRST', 'TIS', 'FLATTERY', 'IN', 'MY', 'SEEING', 'AND', 'MY', 'GREAT', 'MIND', 'MOST', 'KINGLY', 'DRINKS', 'IT', 'UP', 'MINE', 'EYE', 'WELL', 'KNOWS', 'WHAT', 'WITH', 'HIS', 'GUST', 'IS', 'GREEN', 'AND', 'TO', 'HIS', 'PALLET', 'DOTH', 'PREPARE', 'THE', 'CUP', 'IF', 'IT', 'BE', 'POISONED', 'TIS', 'THE', 'LESSER', 'SIN', 'THAT', 'MINE', 'EYE', 'LOVES', 'IT', 'AND', 'DOTH', 'FIRST', 'BEGIN'] +121-123859-0002-2575: ref=['BUT', 'RECKONING', 'TIME', 'WHOSE', "MILLION'D", 'ACCIDENTS', 'CREEP', 'IN', 'TWIXT', 'VOWS', 'AND', 'CHANGE', 'DECREES', 'OF', 'KINGS', 'TAN', 'SACRED', 'BEAUTY', 'BLUNT', 'THE', "SHARP'ST", 'INTENTS', 'DIVERT', 'STRONG', 'MINDS', 'TO', 'THE', 'COURSE', 'OF', 'ALTERING', 'THINGS', 'ALAS', 'WHY', 'FEARING', 'OF', "TIME'S", 'TYRANNY', 'MIGHT', 'I', 'NOT', 'THEN', 'SAY', 'NOW', 'I', 'LOVE', 'YOU', 'BEST', 'WHEN', 'I', 'WAS', 'CERTAIN', "O'ER", 'INCERTAINTY', 'CROWNING', 'THE', 'PRESENT', 'DOUBTING', 'OF', 'THE', 'REST'] +121-123859-0002-2575: hyp=['BUT', 'RECKONING', 'TIME', 'WHOSE', 'MILLIONED', 'ACCIDENTS', 'CREEP', 'IN', 'TWIXT', 'VOWS', 'AND', 'CHANGE', 'DECREES', 'OF', 'KINGS', 'TAN', 'SACRED', 'BEAUTY', 'BLUNT', 'THE', 'SHARPEST', 'INTENSE', 'DIVERT', 'STRONG', 'MINDS', 'TO', 'THE', 'COURSE', 'OF', 'ALTERING', 'THINGS', 'ALAS', 'WHY', 'FEARING', 'OF', "TIME'S", 'TYRANNY', 'MIGHT', 'I', 'NOT', 'THEN', 'SAY', 'NOW', 'I', 'LOVE', 'YOU', 'BEST', 'WHEN', 'I', 'WAS', 'CERTAIN', 'OR', 'IN', 'CERTAINTY', 'CROWNING', 'THE', 'PRESENT', 'DOUBTING', 'OF', 'THE', 'REST'] +121-123859-0003-2576: ref=['LOVE', 'IS', 'A', 'BABE', 'THEN', 'MIGHT', 'I', 'NOT', 'SAY', 'SO', 'TO', 'GIVE', 'FULL', 'GROWTH', 'TO', 'THAT', 'WHICH', 'STILL', 'DOTH', 'GROW'] +121-123859-0003-2576: hyp=['LOVE', 'IS', 'A', 'BABE', 'THEN', 'MIGHT', 'I', 'NOT', 'SAY', 'SO', 'TO', 'GIVE', 'FULL', 'GROWTH', 'TO', 'THAT', 'WHICH', 'STILL', 'DOTH', 'GROW'] +121-123859-0004-2577: ref=['SO', 'I', 'RETURN', "REBUK'D", 'TO', 'MY', 'CONTENT', 'AND', 'GAIN', 'BY', 'ILL', 'THRICE', 'MORE', 'THAN', 'I', 'HAVE', 'SPENT'] +121-123859-0004-2577: hyp=['SO', 'I', 'RETURNED', 'REBUKED', 'TO', 'MY', 'CONTENT', 'AND', 'GAIN', 'BY', 'ILL', 'THRICE', 'MORE', 'THAN', 'I', 'HAVE', 'SPENT'] +121-127105-0000-2578: ref=['IT', 'WAS', 'THIS', 'OBSERVATION', 'THAT', 'DREW', 'FROM', 'DOUGLAS', 'NOT', 'IMMEDIATELY', 'BUT', 'LATER', 'IN', 'THE', 'EVENING', 'A', 'REPLY', 'THAT', 'HAD', 'THE', 'INTERESTING', 'CONSEQUENCE', 'TO', 'WHICH', 'I', 'CALL', 'ATTENTION'] +121-127105-0000-2578: hyp=['IT', 'WAS', 'THIS', 'OBSERVATION', 'THAT', 'DREW', 'FROM', 'DOUGLAS', 'NOT', 'IMMEDIATELY', 'BUT', 'LATER', 'IN', 'THE', 'EVENING', 'A', 'REPLY', 'THAT', 'HAD', 'THE', 'INTERESTING', 'CONSEQUENCE', 'TO', 'WHICH', 'I', 'CALL', 'ATTENTION'] +121-127105-0001-2579: ref=['SOMEONE', 'ELSE', 'TOLD', 'A', 'STORY', 'NOT', 'PARTICULARLY', 'EFFECTIVE', 'WHICH', 'I', 'SAW', 'HE', 'WAS', 'NOT', 'FOLLOWING'] +121-127105-0001-2579: hyp=['SOME', 'ONE', 'ELSE', 'TOLD', 'A', 'STORY', 'NOT', 'PARTICULARLY', 'EFFECTIVE', 'WHICH', 'I', 'SAW', 'HE', 'WAS', 'NOT', 'FOLLOWING'] +121-127105-0002-2580: ref=['CRIED', 'ONE', 'OF', 'THE', 'WOMEN', 'HE', 'TOOK', 'NO', 'NOTICE', 'OF', 'HER', 'HE', 'LOOKED', 'AT', 'ME', 'BUT', 'AS', 'IF', 'INSTEAD', 'OF', 'ME', 'HE', 'SAW', 'WHAT', 'HE', 'SPOKE', 'OF'] +121-127105-0002-2580: hyp=['CRIED', 'ONE', 'OF', 'THE', 'WOMEN', 'HE', 'TOOK', 'NO', 'NOTICE', 'OF', 'HER', 'HE', 'LOOKED', 'AT', 'ME', 'BUT', 'AS', 'IF', 'INSTEAD', 'OF', 'ME', 'HE', 'SAW', 'WHAT', 'HE', 'SPOKE', 'OF'] +121-127105-0003-2581: ref=['THERE', 'WAS', 'A', 'UNANIMOUS', 'GROAN', 'AT', 'THIS', 'AND', 'MUCH', 'REPROACH', 'AFTER', 'WHICH', 'IN', 'HIS', 'PREOCCUPIED', 'WAY', 'HE', 'EXPLAINED'] +121-127105-0003-2581: hyp=['THERE', 'WAS', 'A', 'UNANIMOUS', 'GROAN', 'AT', 'THIS', 'AND', 'MUCH', 'REPROACH', 'AFTER', 'WHICH', 'IN', 'HIS', 'PREOCCUPIED', 'WAY', 'HE', 'EXPLAINED'] +121-127105-0004-2582: ref=['THE', "STORY'S", 'WRITTEN'] +121-127105-0004-2582: hyp=['THE', 'STORIES', 'WRITTEN'] +121-127105-0005-2583: ref=['I', 'COULD', 'WRITE', 'TO', 'MY', 'MAN', 'AND', 'ENCLOSE', 'THE', 'KEY', 'HE', 'COULD', 'SEND', 'DOWN', 'THE', 'PACKET', 'AS', 'HE', 'FINDS', 'IT'] +121-127105-0005-2583: hyp=['I', 'COULD', 'WRITE', 'TO', 'MY', 'MAN', 'AND', 'ENCLOSE', 'THE', 'KEY', 'HE', 'COULD', 'SEND', 'DOWN', 'THE', 'PACKET', 'AS', 'HE', 'FINDS', 'IT'] +121-127105-0006-2584: ref=['THE', 'OTHERS', 'RESENTED', 'POSTPONEMENT', 'BUT', 'IT', 'WAS', 'JUST', 'HIS', 'SCRUPLES', 'THAT', 'CHARMED', 'ME'] +121-127105-0006-2584: hyp=['THE', 'OTHERS', 'RESENTED', 'POSTPONEMENT', 'BUT', 'IT', 'WAS', 'JUST', 'HIS', 'SCRUPLES', 'THAT', 'CHARMED', 'ME'] +121-127105-0007-2585: ref=['TO', 'THIS', 'HIS', 'ANSWER', 'WAS', 'PROMPT', 'OH', 'THANK', 'GOD', 'NO', 'AND', 'IS', 'THE', 'RECORD', 'YOURS'] +121-127105-0007-2585: hyp=['TO', 'THIS', 'HIS', 'ANSWER', 'WAS', 'PROMPT', 'OH', 'THANK', 'GOD', 'NO', 'AND', 'IS', 'THE', 'RECORD', 'YOURS'] +121-127105-0008-2586: ref=['HE', 'HUNG', 'FIRE', 'AGAIN', 'A', "WOMAN'S"] +121-127105-0008-2586: hyp=['HE', 'HUNG', 'FIRE', 'AGAIN', 'A', "WOMAN'S"] +121-127105-0009-2587: ref=['SHE', 'HAS', 'BEEN', 'DEAD', 'THESE', 'TWENTY', 'YEARS'] +121-127105-0009-2587: hyp=['SHE', 'HAS', 'BEEN', 'DEAD', 'THESE', 'TWENTY', 'YEARS'] +121-127105-0010-2588: ref=['SHE', 'SENT', 'ME', 'THE', 'PAGES', 'IN', 'QUESTION', 'BEFORE', 'SHE', 'DIED'] +121-127105-0010-2588: hyp=['SHE', 'SENT', 'ME', 'THE', 'PAGES', 'IN', 'QUESTION', 'BEFORE', 'SHE', 'DIED'] +121-127105-0011-2589: ref=['SHE', 'WAS', 'THE', 'MOST', 'AGREEABLE', 'WOMAN', "I'VE", 'EVER', 'KNOWN', 'IN', 'HER', 'POSITION', 'SHE', 'WOULD', 'HAVE', 'BEEN', 'WORTHY', 'OF', 'ANY', 'WHATEVER'] +121-127105-0011-2589: hyp=['SHE', 'WAS', 'THE', 'MOST', 'AGREEABLE', 'WOMAN', "I'VE", 'EVER', 'KNOWN', 'IN', 'HER', 'POSITION', 'SHE', 'WOULD', 'HAVE', 'BEEN', 'WORTHY', 'OF', 'ANY', 'WHATEVER'] +121-127105-0012-2590: ref=['IT', "WASN'T", 'SIMPLY', 'THAT', 'SHE', 'SAID', 'SO', 'BUT', 'THAT', 'I', 'KNEW', 'SHE', "HADN'T", 'I', 'WAS', 'SURE', 'I', 'COULD', 'SEE'] +121-127105-0012-2590: hyp=["TWASN'T", 'SIMPLY', 'THAT', 'SHE', 'SAID', 'SO', 'BUT', 'THAT', 'I', 'KNEW', 'SHE', "HADN'T", 'I', 'WAS', 'SURE', 'I', 'COULD', 'SEE'] +121-127105-0013-2591: ref=["YOU'LL", 'EASILY', 'JUDGE', 'WHY', 'WHEN', 'YOU', 'HEAR', 'BECAUSE', 'THE', 'THING', 'HAD', 'BEEN', 'SUCH', 'A', 'SCARE', 'HE', 'CONTINUED', 'TO', 'FIX', 'ME'] +121-127105-0013-2591: hyp=["YOU'LL", 'EASILY', 'JUDGE', 'WHY', 'WHEN', 'YOU', 'HEAR', 'BECAUSE', 'THE', 'THING', 'HAD', 'BEEN', 'SUCH', 'A', 'SCARE', 'HE', 'CONTINUED', 'TO', 'FIX', 'ME'] +121-127105-0014-2592: ref=['YOU', 'ARE', 'ACUTE'] +121-127105-0014-2592: hyp=['YOU', 'ARE', 'ACUTE'] +121-127105-0015-2593: ref=['HE', 'QUITTED', 'THE', 'FIRE', 'AND', 'DROPPED', 'BACK', 'INTO', 'HIS', 'CHAIR'] +121-127105-0015-2593: hyp=['HE', 'QUITTED', 'THE', 'FIRE', 'AND', 'DROPPED', 'BACK', 'INTO', 'HIS', 'CHAIR'] +121-127105-0016-2594: ref=['PROBABLY', 'NOT', 'TILL', 'THE', 'SECOND', 'POST'] +121-127105-0016-2594: hyp=['PROBABLY', 'NOT', 'TILL', 'THE', 'SECOND', 'POST'] +121-127105-0017-2595: ref=['IT', 'WAS', 'ALMOST', 'THE', 'TONE', 'OF', 'HOPE', 'EVERYBODY', 'WILL', 'STAY'] +121-127105-0017-2595: hyp=['IT', 'WAS', 'ALMOST', 'THE', 'TONE', 'OF', 'HOPE', 'EVERYBODY', 'WILL', 'STAY'] +121-127105-0018-2596: ref=['CRIED', 'THE', 'LADIES', 'WHOSE', 'DEPARTURE', 'HAD', 'BEEN', 'FIXED'] +121-127105-0018-2596: hyp=['CRIED', 'THE', 'LADIES', 'WHOSE', 'DEPARTURE', 'HAD', 'BEEN', 'FIXED'] +121-127105-0019-2597: ref=['MISSUS', 'GRIFFIN', 'HOWEVER', 'EXPRESSED', 'THE', 'NEED', 'FOR', 'A', 'LITTLE', 'MORE', 'LIGHT'] +121-127105-0019-2597: hyp=['MISSUS', 'GRIFFIN', 'HOWEVER', 'EXPRESSED', 'THE', 'NEED', 'FOR', 'A', 'LITTLE', 'MORE', 'LIGHT'] +121-127105-0020-2598: ref=['WHO', 'WAS', 'IT', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'THE', 'STORY', 'WILL', 'TELL', 'I', 'TOOK', 'UPON', 'MYSELF', 'TO', 'REPLY', 'OH', 'I', "CAN'T", 'WAIT', 'FOR', 'THE', 'STORY', 'THE', 'STORY', "WON'T", 'TELL', 'SAID', 'DOUGLAS', 'NOT', 'IN', 'ANY', 'LITERAL', 'VULGAR', 'WAY', "MORE'S", 'THE', 'PITY', 'THEN'] +121-127105-0020-2598: hyp=['WHO', 'WAS', 'IT', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'THE', 'STORY', 'WILL', 'TELL', 'I', 'TOOK', 'UPON', 'MYSELF', 'TO', 'REPLY', 'OH', 'I', "CAN'T", 'WAIT', 'FOR', 'THE', 'STORY', 'THE', 'STORY', "WON'T", 'TELL', 'SAID', 'DOUGLAS', 'NOT', 'IN', 'ANY', 'LITERAL', 'VULGAR', 'WAY', "MORE'S", 'THE', 'PITY', 'THEN'] +121-127105-0021-2599: ref=["WON'T", 'YOU', 'TELL', 'DOUGLAS'] +121-127105-0021-2599: hyp=["WON'T", 'YOU', 'TELL', 'DOUGLAS'] +121-127105-0022-2600: ref=['WELL', 'IF', 'I', "DON'T", 'KNOW', 'WHO', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'I', 'KNOW', 'WHO', 'HE', 'WAS'] +121-127105-0022-2600: hyp=['FOR', 'IF', 'I', "DON'T", 'KNOW', 'WHO', 'SHE', 'WAS', 'IN', 'LOVE', 'WITH', 'I', 'KNOW', 'WHO', 'HE', 'WAS'] +121-127105-0023-2601: ref=['LET', 'ME', 'SAY', 'HERE', 'DISTINCTLY', 'TO', 'HAVE', 'DONE', 'WITH', 'IT', 'THAT', 'THIS', 'NARRATIVE', 'FROM', 'AN', 'EXACT', 'TRANSCRIPT', 'OF', 'MY', 'OWN', 'MADE', 'MUCH', 'LATER', 'IS', 'WHAT', 'I', 'SHALL', 'PRESENTLY', 'GIVE'] +121-127105-0023-2601: hyp=['LET', 'ME', 'SAY', 'HERE', 'DISTINCTLY', 'TO', 'HAVE', 'DONE', 'WITH', 'IT', 'THAT', 'THIS', 'NARRATIVE', 'FROM', 'AN', 'EXACT', 'TRANSCRIPT', 'OF', 'MY', 'OWN', 'MADE', 'MUCH', 'LATER', 'IS', 'WHAT', 'I', 'SHALL', 'PRESENTLY', 'GIVE'] +121-127105-0024-2602: ref=['POOR', 'DOUGLAS', 'BEFORE', 'HIS', 'DEATH', 'WHEN', 'IT', 'WAS', 'IN', 'SIGHT', 'COMMITTED', 'TO', 'ME', 'THE', 'MANUSCRIPT', 'THAT', 'REACHED', 'HIM', 'ON', 'THE', 'THIRD', 'OF', 'THESE', 'DAYS', 'AND', 'THAT', 'ON', 'THE', 'SAME', 'SPOT', 'WITH', 'IMMENSE', 'EFFECT', 'HE', 'BEGAN', 'TO', 'READ', 'TO', 'OUR', 'HUSHED', 'LITTLE', 'CIRCLE', 'ON', 'THE', 'NIGHT', 'OF', 'THE', 'FOURTH'] +121-127105-0024-2602: hyp=['POOR', 'DOUGLAS', 'BEFORE', 'HIS', 'DEATH', 'WHEN', 'IT', 'WAS', 'IN', 'SIGHT', 'COMMITTED', 'TO', 'ME', 'THE', 'MANUSCRIPT', 'THAT', 'REACHED', 'HIM', 'ON', 'THE', 'THIRD', 'OF', 'THESE', 'DAYS', 'AND', 'THAT', 'ON', 'THE', 'SAME', 'SPOT', 'WITH', 'IMMENSE', 'EFFECT', 'HE', 'BEGAN', 'TO', 'READ', 'TO', 'OUR', 'HUSHED', 'LITTLE', 'CIRCLE', 'ON', 'THE', 'NIGHT', 'OF', 'THE', 'FOURTH'] +121-127105-0025-2603: ref=['THE', 'DEPARTING', 'LADIES', 'WHO', 'HAD', 'SAID', 'THEY', 'WOULD', 'STAY', "DIDN'T", 'OF', 'COURSE', 'THANK', 'HEAVEN', 'STAY', 'THEY', 'DEPARTED', 'IN', 'CONSEQUENCE', 'OF', 'ARRANGEMENTS', 'MADE', 'IN', 'A', 'RAGE', 'OF', 'CURIOSITY', 'AS', 'THEY', 'PROFESSED', 'PRODUCED', 'BY', 'THE', 'TOUCHES', 'WITH', 'WHICH', 'HE', 'HAD', 'ALREADY', 'WORKED', 'US', 'UP'] +121-127105-0025-2603: hyp=['THE', 'DEPARTING', 'LADIES', 'WHO', 'HAD', 'SAID', 'THEY', 'WOULD', 'STAY', "DIDN'T", 'OF', 'COURSE', 'THANK', 'HEAVEN', 'STAY', 'THEY', 'DEPARTED', 'IN', 'CONSEQUENCE', 'OF', 'ARRANGEMENTS', 'MADE', 'IN', 'A', 'RAGE', 'OF', 'CURIOSITY', 'AS', 'THEY', 'PROFESSED', 'PRODUCED', 'BY', 'THE', 'TOUCHES', 'WITH', 'WHICH', 'HE', 'HAD', 'ALREADY', 'WORKED', 'US', 'UP'] +121-127105-0026-2604: ref=['THE', 'FIRST', 'OF', 'THESE', 'TOUCHES', 'CONVEYED', 'THAT', 'THE', 'WRITTEN', 'STATEMENT', 'TOOK', 'UP', 'THE', 'TALE', 'AT', 'A', 'POINT', 'AFTER', 'IT', 'HAD', 'IN', 'A', 'MANNER', 'BEGUN'] +121-127105-0026-2604: hyp=['THE', 'FIRST', 'OF', 'THESE', 'TOUCHES', 'CONVEYED', 'THAT', 'THE', 'WRITTEN', 'STATEMENT', 'TOOK', 'UP', 'THE', 'TALE', 'AT', 'A', 'POINT', 'AFTER', 'IT', 'HAD', 'IN', 'A', 'MANNER', 'BEGUN'] +121-127105-0027-2605: ref=['HE', 'HAD', 'FOR', 'HIS', 'OWN', 'TOWN', 'RESIDENCE', 'A', 'BIG', 'HOUSE', 'FILLED', 'WITH', 'THE', 'SPOILS', 'OF', 'TRAVEL', 'AND', 'THE', 'TROPHIES', 'OF', 'THE', 'CHASE', 'BUT', 'IT', 'WAS', 'TO', 'HIS', 'COUNTRY', 'HOME', 'AN', 'OLD', 'FAMILY', 'PLACE', 'IN', 'ESSEX', 'THAT', 'HE', 'WISHED', 'HER', 'IMMEDIATELY', 'TO', 'PROCEED'] +121-127105-0027-2605: hyp=['HE', 'HAD', 'FOR', 'HIS', 'OWN', 'TOWN', 'RESIDENCE', 'A', 'BIG', 'HOUSE', 'FILLED', 'WITH', 'THE', 'SPOILS', 'OF', 'TRAVEL', 'AND', 'THE', 'TROPHIES', 'OF', 'THE', 'CHASE', 'BUT', 'IT', 'WAS', 'TO', 'HIS', 'COUNTRY', 'HOME', 'AN', 'OLD', 'FAMILY', 'PLACE', 'IN', 'ESSEX', 'THAT', 'HE', 'WISHED', 'HER', 'IMMEDIATELY', 'TO', 'PROCEED'] +121-127105-0028-2606: ref=['THE', 'AWKWARD', 'THING', 'WAS', 'THAT', 'THEY', 'HAD', 'PRACTICALLY', 'NO', 'OTHER', 'RELATIONS', 'AND', 'THAT', 'HIS', 'OWN', 'AFFAIRS', 'TOOK', 'UP', 'ALL', 'HIS', 'TIME'] +121-127105-0028-2606: hyp=['THE', 'AWKWARD', 'THING', 'WAS', 'THAT', 'THEY', 'HAD', 'PRACTICALLY', 'NO', 'OTHER', 'RELATIONS', 'AND', 'THAT', 'HIS', 'OWN', 'AFFAIRS', 'TOOK', 'UP', 'ALL', 'HIS', 'TIME'] +121-127105-0029-2607: ref=['THERE', 'WERE', 'PLENTY', 'OF', 'PEOPLE', 'TO', 'HELP', 'BUT', 'OF', 'COURSE', 'THE', 'YOUNG', 'LADY', 'WHO', 'SHOULD', 'GO', 'DOWN', 'AS', 'GOVERNESS', 'WOULD', 'BE', 'IN', 'SUPREME', 'AUTHORITY'] +121-127105-0029-2607: hyp=['THERE', 'WERE', 'PLENTY', 'OF', 'PEOPLE', 'TO', 'HELP', 'BUT', 'OF', 'COURSE', 'THE', 'YOUNG', 'LADY', 'WHO', 'SHOULD', 'GO', 'DOWN', 'AS', 'GOVERNESS', 'WOULD', 'BE', 'IN', 'SUPREME', 'AUTHORITY'] +121-127105-0030-2608: ref=['I', "DON'T", 'ANTICIPATE'] +121-127105-0030-2608: hyp=['I', "DON'T", 'ANTICIPATE'] +121-127105-0031-2609: ref=['SHE', 'WAS', 'YOUNG', 'UNTRIED', 'NERVOUS', 'IT', 'WAS', 'A', 'VISION', 'OF', 'SERIOUS', 'DUTIES', 'AND', 'LITTLE', 'COMPANY', 'OF', 'REALLY', 'GREAT', 'LONELINESS'] +121-127105-0031-2609: hyp=['SHE', 'WAS', 'YOUNG', 'UNTRIED', 'NERVOUS', 'IT', 'WAS', 'A', 'VISION', 'OF', 'SERIOUS', 'DUTIES', 'AND', 'LITTLE', 'COMPANY', 'OF', 'REALLY', 'GREAT', 'LONELINESS'] +121-127105-0032-2610: ref=['YES', 'BUT', "THAT'S", 'JUST', 'THE', 'BEAUTY', 'OF', 'HER', 'PASSION'] +121-127105-0032-2610: hyp=['YES', 'BUT', "THAT'S", 'JUST', 'THE', 'BEAUTY', 'OF', 'HER', 'PASSION'] +121-127105-0033-2611: ref=['IT', 'WAS', 'THE', 'BEAUTY', 'OF', 'IT'] +121-127105-0033-2611: hyp=['IT', 'WAS', 'THE', 'BEAUTY', 'OF', 'IT'] +121-127105-0034-2612: ref=['IT', 'SOUNDED', 'DULL', 'IT', 'SOUNDED', 'STRANGE', 'AND', 'ALL', 'THE', 'MORE', 'SO', 'BECAUSE', 'OF', 'HIS', 'MAIN', 'CONDITION', 'WHICH', 'WAS'] +121-127105-0034-2612: hyp=['IT', 'SOUNDED', 'DULL', 'IT', 'SOUNDED', 'STRANGE', 'AND', 'ALL', 'THE', 'MORE', 'SO', 'BECAUSE', 'OF', 'HIS', 'MAIN', 'CONDITION', 'WHICH', 'WAS'] +121-127105-0035-2613: ref=['SHE', 'PROMISED', 'TO', 'DO', 'THIS', 'AND', 'SHE', 'MENTIONED', 'TO', 'ME', 'THAT', 'WHEN', 'FOR', 'A', 'MOMENT', 'DISBURDENED', 'DELIGHTED', 'HE', 'HELD', 'HER', 'HAND', 'THANKING', 'HER', 'FOR', 'THE', 'SACRIFICE', 'SHE', 'ALREADY', 'FELT', 'REWARDED'] +121-127105-0035-2613: hyp=['SHE', 'PROMISED', 'TO', 'DO', 'THIS', 'AND', 'SHE', 'MENTIONED', 'TO', 'ME', 'THAT', 'WHEN', 'FOR', 'A', 'MOMENT', 'DISBURDENED', 'DELIGHTED', 'HE', 'HELD', 'HER', 'HAND', 'THANKING', 'HER', 'FOR', 'THE', 'SACRIFICE', 'SHE', 'ALREADY', 'FELT', 'REWARDED'] +121-127105-0036-2614: ref=['BUT', 'WAS', 'THAT', 'ALL', 'HER', 'REWARD', 'ONE', 'OF', 'THE', 'LADIES', 'ASKED'] +121-127105-0036-2614: hyp=['BUT', 'WAS', 'THAT', 'ALL', 'HER', 'REWARD', 'ONE', 'OF', 'THE', 'LADIES', 'ASKED'] +1221-135766-0000-1305: ref=['HOW', 'STRANGE', 'IT', 'SEEMED', 'TO', 'THE', 'SAD', 'WOMAN', 'AS', 'SHE', 'WATCHED', 'THE', 'GROWTH', 'AND', 'THE', 'BEAUTY', 'THAT', 'BECAME', 'EVERY', 'DAY', 'MORE', 'BRILLIANT', 'AND', 'THE', 'INTELLIGENCE', 'THAT', 'THREW', 'ITS', 'QUIVERING', 'SUNSHINE', 'OVER', 'THE', 'TINY', 'FEATURES', 'OF', 'THIS', 'CHILD'] +1221-135766-0000-1305: hyp=['HOW', 'STRANGE', 'IT', 'SEEMED', 'TO', 'THE', 'SAD', 'WOMAN', 'AS', 'SHE', 'WATCHED', 'THE', 'GROWTH', 'AND', 'THE', 'BEAUTY', 'THAT', 'BECAME', 'EVERY', 'DAY', 'MORE', 'BRILLIANT', 'AND', 'THE', 'INTELLIGENCE', 'THAT', 'THREW', 'ITS', 'QUIVERING', 'SUNSHINE', 'OVER', 'THE', 'TINY', 'FEATURES', 'OF', 'THIS', 'CHILD'] +1221-135766-0001-1306: ref=['GOD', 'AS', 'A', 'DIRECT', 'CONSEQUENCE', 'OF', 'THE', 'SIN', 'WHICH', 'MAN', 'THUS', 'PUNISHED', 'HAD', 'GIVEN', 'HER', 'A', 'LOVELY', 'CHILD', 'WHOSE', 'PLACE', 'WAS', 'ON', 'THAT', 'SAME', 'DISHONOURED', 'BOSOM', 'TO', 'CONNECT', 'HER', 'PARENT', 'FOR', 'EVER', 'WITH', 'THE', 'RACE', 'AND', 'DESCENT', 'OF', 'MORTALS', 'AND', 'TO', 'BE', 'FINALLY', 'A', 'BLESSED', 'SOUL', 'IN', 'HEAVEN'] +1221-135766-0001-1306: hyp=['GOD', 'AS', 'A', 'DIRECT', 'CONSEQUENCE', 'OF', 'THE', 'SIN', 'WHICH', 'MAN', 'THUS', 'PUNISHED', 'HAD', 'GIVEN', 'HER', 'A', 'LOVELY', 'CHILD', 'WHOSE', 'PLACE', 'WAS', 'ON', 'THAT', 'SAME', 'DISHONOURED', 'BOSOM', 'TO', 'CONNECT', 'HER', 'PARENT', 'FOREVER', 'WITH', 'THE', 'RACE', 'AND', 'DESCENT', 'OF', 'MORTALS', 'AND', 'TO', 'BE', 'FINALLY', 'A', 'BLESSED', 'SOUL', 'IN', 'HEAVEN'] +1221-135766-0002-1307: ref=['YET', 'THESE', 'THOUGHTS', 'AFFECTED', 'HESTER', 'PRYNNE', 'LESS', 'WITH', 'HOPE', 'THAN', 'APPREHENSION'] +1221-135766-0002-1307: hyp=['YET', 'THESE', 'THOUGHTS', 'AFFECTED', 'HESTER', 'PRYNNE', 'LESS', 'WITH', 'HOPE', 'THAN', 'APPREHENSION'] +1221-135766-0003-1308: ref=['THE', 'CHILD', 'HAD', 'A', 'NATIVE', 'GRACE', 'WHICH', 'DOES', 'NOT', 'INVARIABLY', 'CO', 'EXIST', 'WITH', 'FAULTLESS', 'BEAUTY', 'ITS', 'ATTIRE', 'HOWEVER', 'SIMPLE', 'ALWAYS', 'IMPRESSED', 'THE', 'BEHOLDER', 'AS', 'IF', 'IT', 'WERE', 'THE', 'VERY', 'GARB', 'THAT', 'PRECISELY', 'BECAME', 'IT', 'BEST'] +1221-135766-0003-1308: hyp=['THE', 'CHILD', 'HAD', 'A', 'NATIVE', 'GRACE', 'WHICH', 'DOES', 'NOT', 'INVARIABLY', 'COEXIST', 'WITH', 'FAULTLESS', 'BEAUTY', 'ITS', 'ATTIRE', 'HOWEVER', 'SIMPLE', 'ALWAYS', 'IMPRESSED', 'THE', 'BEHOLDER', 'AS', 'IF', 'IT', 'WERE', 'THE', 'VERY', 'GARB', 'THAT', 'PRECISELY', 'BECAME', 'IT', 'BEST'] +1221-135766-0004-1309: ref=['THIS', 'OUTWARD', 'MUTABILITY', 'INDICATED', 'AND', 'DID', 'NOT', 'MORE', 'THAN', 'FAIRLY', 'EXPRESS', 'THE', 'VARIOUS', 'PROPERTIES', 'OF', 'HER', 'INNER', 'LIFE'] +1221-135766-0004-1309: hyp=['THIS', 'OUTWARD', 'MUTABILITY', 'INDICATED', 'AND', 'DID', 'NOT', 'MORE', 'THAN', 'FAIRLY', 'EXPRESS', 'THE', 'VARIOUS', 'PROPERTIES', 'OF', 'HER', 'INNER', 'LIFE'] +1221-135766-0005-1310: ref=['HESTER', 'COULD', 'ONLY', 'ACCOUNT', 'FOR', 'THE', "CHILD'S", 'CHARACTER', 'AND', 'EVEN', 'THEN', 'MOST', 'VAGUELY', 'AND', 'IMPERFECTLY', 'BY', 'RECALLING', 'WHAT', 'SHE', 'HERSELF', 'HAD', 'BEEN', 'DURING', 'THAT', 'MOMENTOUS', 'PERIOD', 'WHILE', 'PEARL', 'WAS', 'IMBIBING', 'HER', 'SOUL', 'FROM', 'THE', 'SPIRITUAL', 'WORLD', 'AND', 'HER', 'BODILY', 'FRAME', 'FROM', 'ITS', 'MATERIAL', 'OF', 'EARTH'] +1221-135766-0005-1310: hyp=['HESTER', 'COULD', 'ONLY', 'ACCOUNT', 'FOR', 'THE', "CHILD'S", 'CHARACTER', 'AND', 'EVEN', 'THEN', 'MOST', 'VAGUELY', 'AND', 'IMPERFECTLY', 'BY', 'RECALLING', 'WHAT', 'SHE', 'HERSELF', 'HAD', 'BEEN', 'DURING', 'THAT', 'MOMENTOUS', 'PERIOD', 'WHILE', 'PEARL', 'WAS', 'IMBIBING', 'HER', 'SOUL', 'FROM', 'THE', 'SPIRITUAL', 'WORLD', 'AND', 'HER', 'BODILY', 'FRAME', 'FROM', 'ITS', 'MATERIAL', 'OF', 'EARTH'] +1221-135766-0006-1311: ref=['THEY', 'WERE', 'NOW', 'ILLUMINATED', 'BY', 'THE', 'MORNING', 'RADIANCE', 'OF', 'A', 'YOUNG', "CHILD'S", 'DISPOSITION', 'BUT', 'LATER', 'IN', 'THE', 'DAY', 'OF', 'EARTHLY', 'EXISTENCE', 'MIGHT', 'BE', 'PROLIFIC', 'OF', 'THE', 'STORM', 'AND', 'WHIRLWIND'] +1221-135766-0006-1311: hyp=['THEY', 'WERE', 'NOW', 'ILLUMINATED', 'BY', 'THE', 'MORNING', 'RADIANCE', 'OF', 'A', 'YOUNG', "CHILD'S", 'DISPOSITION', 'BUT', 'LATER', 'IN', 'THE', 'DAY', 'OF', 'EARTHLY', 'EXISTENCE', 'MIGHT', 'BE', 'PROLIFIC', 'OF', 'THE', 'STORM', 'AND', 'WHIRLWIND'] +1221-135766-0007-1312: ref=['HESTER', 'PRYNNE', 'NEVERTHELESS', 'THE', 'LOVING', 'MOTHER', 'OF', 'THIS', 'ONE', 'CHILD', 'RAN', 'LITTLE', 'RISK', 'OF', 'ERRING', 'ON', 'THE', 'SIDE', 'OF', 'UNDUE', 'SEVERITY'] +1221-135766-0007-1312: hyp=['HESTER', 'PRYNNE', 'NEVERTHELESS', 'THE', 'LOVING', 'MOTHER', 'OF', 'THIS', 'ONE', 'CHILD', 'RAN', 'LITTLE', 'RISK', 'OF', 'ERRING', 'ON', 'THE', 'SIDE', 'OF', 'UNDUE', 'SEVERITY'] +1221-135766-0008-1313: ref=['MINDFUL', 'HOWEVER', 'OF', 'HER', 'OWN', 'ERRORS', 'AND', 'MISFORTUNES', 'SHE', 'EARLY', 'SOUGHT', 'TO', 'IMPOSE', 'A', 'TENDER', 'BUT', 'STRICT', 'CONTROL', 'OVER', 'THE', 'INFANT', 'IMMORTALITY', 'THAT', 'WAS', 'COMMITTED', 'TO', 'HER', 'CHARGE'] +1221-135766-0008-1313: hyp=['MINDFUL', 'HOWEVER', 'OF', 'HER', 'OWN', 'ERRORS', 'AND', 'MISFORTUNES', 'SHE', 'EARLY', 'SOUGHT', 'TO', 'IMPOSE', 'A', 'TENDER', 'BUT', 'STRICT', 'CONTROL', 'OVER', 'THE', 'INFANT', 'IMMORTALITY', 'THAT', 'WAS', 'COMMITTED', 'TO', 'HER', 'CHARGE'] +1221-135766-0009-1314: ref=['AS', 'TO', 'ANY', 'OTHER', 'KIND', 'OF', 'DISCIPLINE', 'WHETHER', 'ADDRESSED', 'TO', 'HER', 'MIND', 'OR', 'HEART', 'LITTLE', 'PEARL', 'MIGHT', 'OR', 'MIGHT', 'NOT', 'BE', 'WITHIN', 'ITS', 'REACH', 'IN', 'ACCORDANCE', 'WITH', 'THE', 'CAPRICE', 'THAT', 'RULED', 'THE', 'MOMENT'] +1221-135766-0009-1314: hyp=['AS', 'TO', 'ANY', 'OTHER', 'KIND', 'OF', 'DISCIPLINE', 'WHETHER', 'ADDRESSED', 'TO', 'HER', 'MIND', 'OR', 'HEART', 'LITTLE', 'PEARL', 'MIGHT', 'OR', 'MIGHT', 'NOT', 'BE', 'WITHIN', 'ITS', 'REACH', 'IN', 'ACCORDANCE', 'WITH', 'THE', 'CAPRICE', 'THAT', 'ROLLED', 'THE', 'MOMENT'] +1221-135766-0010-1315: ref=['IT', 'WAS', 'A', 'LOOK', 'SO', 'INTELLIGENT', 'YET', 'INEXPLICABLE', 'PERVERSE', 'SOMETIMES', 'SO', 'MALICIOUS', 'BUT', 'GENERALLY', 'ACCOMPANIED', 'BY', 'A', 'WILD', 'FLOW', 'OF', 'SPIRITS', 'THAT', 'HESTER', 'COULD', 'NOT', 'HELP', 'QUESTIONING', 'AT', 'SUCH', 'MOMENTS', 'WHETHER', 'PEARL', 'WAS', 'A', 'HUMAN', 'CHILD'] +1221-135766-0010-1315: hyp=['IT', 'WAS', 'A', 'LOOK', 'SO', 'INTELLIGENT', 'YET', 'INEXPLICABLE', 'PERVERSE', 'SOMETIMES', 'SO', 'MALICIOUS', 'BUT', 'GENERALLY', 'ACCOMPANIED', 'BY', 'A', 'WILD', 'FLOW', 'OF', 'SPIRITS', 'THAT', 'HESTER', 'COULD', 'NOT', 'HELP', 'QUESTIONING', 'AT', 'SUCH', 'MOMENTS', 'WHETHER', 'PEARL', 'WAS', 'A', 'HUMAN', 'CHILD'] +1221-135766-0011-1316: ref=['BEHOLDING', 'IT', 'HESTER', 'WAS', 'CONSTRAINED', 'TO', 'RUSH', 'TOWARDS', 'THE', 'CHILD', 'TO', 'PURSUE', 'THE', 'LITTLE', 'ELF', 'IN', 'THE', 'FLIGHT', 'WHICH', 'SHE', 'INVARIABLY', 'BEGAN', 'TO', 'SNATCH', 'HER', 'TO', 'HER', 'BOSOM', 'WITH', 'A', 'CLOSE', 'PRESSURE', 'AND', 'EARNEST', 'KISSES', 'NOT', 'SO', 'MUCH', 'FROM', 'OVERFLOWING', 'LOVE', 'AS', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'PEARL', 'WAS', 'FLESH', 'AND', 'BLOOD', 'AND', 'NOT', 'UTTERLY', 'DELUSIVE'] +1221-135766-0011-1316: hyp=['BEHOLDING', 'IT', 'HESTER', 'WAS', 'CONSTRAINED', 'TO', 'RUSH', 'TOWARDS', 'THE', 'CHILD', 'TO', 'PURSUE', 'THE', 'LITTLE', 'ELF', 'IN', 'THE', 'FLIGHT', 'WHICH', 'SHE', 'INVARIABLY', 'BEGAN', 'TO', 'SNATCH', 'HER', 'TO', 'HER', 'BOSOM', 'WITH', 'A', 'CLOSE', 'PRESSURE', 'AND', 'EARNEST', 'KISSES', 'NOT', 'SO', 'MUCH', 'FROM', 'OVERFLOWING', 'LOVE', 'AS', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'PEARL', 'WAS', 'FLESH', 'AND', 'BLOOD', 'AND', 'NOT', 'UTTERLY', 'DELUSIVE'] +1221-135766-0012-1317: ref=['BROODING', 'OVER', 'ALL', 'THESE', 'MATTERS', 'THE', 'MOTHER', 'FELT', 'LIKE', 'ONE', 'WHO', 'HAS', 'EVOKED', 'A', 'SPIRIT', 'BUT', 'BY', 'SOME', 'IRREGULARITY', 'IN', 'THE', 'PROCESS', 'OF', 'CONJURATION', 'HAS', 'FAILED', 'TO', 'WIN', 'THE', 'MASTER', 'WORD', 'THAT', 'SHOULD', 'CONTROL', 'THIS', 'NEW', 'AND', 'INCOMPREHENSIBLE', 'INTELLIGENCE'] +1221-135766-0012-1317: hyp=['BROODING', 'OVER', 'ALL', 'THESE', 'MATTERS', 'THE', 'MOTHER', 'FELT', 'LIKE', 'ONE', 'WHO', 'HAS', 'EVOKED', 'A', 'SPIRIT', 'BUT', 'BY', 'SOME', 'IRREGULARITY', 'IN', 'THE', 'PROCESS', 'OF', 'CONJURATION', 'HAS', 'FAILED', 'TO', 'WIN', 'THE', 'MASTER', 'WORD', 'THAT', 'SHOULD', 'CONTROL', 'THIS', 'NEW', 'AND', 'INCOMPREHENSIBLE', 'INTELLIGENCE'] +1221-135766-0013-1318: ref=['PEARL', 'WAS', 'A', 'BORN', 'OUTCAST', 'OF', 'THE', 'INFANTILE', 'WORLD'] +1221-135766-0013-1318: hyp=['PEARL', 'WAS', 'A', 'BORN', 'OUTCAST', 'OF', 'THE', 'INVENTILE', 'WORLD'] +1221-135766-0014-1319: ref=['PEARL', 'SAW', 'AND', 'GAZED', 'INTENTLY', 'BUT', 'NEVER', 'SOUGHT', 'TO', 'MAKE', 'ACQUAINTANCE'] +1221-135766-0014-1319: hyp=['PEARL', 'SAW', 'AND', 'GAZED', 'INTENTLY', 'BUT', 'NEVER', 'SOUGHT', 'TO', 'MAKE', 'ACQUAINTANCE'] +1221-135766-0015-1320: ref=['IF', 'SPOKEN', 'TO', 'SHE', 'WOULD', 'NOT', 'SPEAK', 'AGAIN'] +1221-135766-0015-1320: hyp=['IF', 'SPOKEN', 'TO', 'SHE', 'WOULD', 'NOT', 'SPEAK', 'AGAIN'] +1221-135767-0000-1280: ref=['HESTER', 'PRYNNE', 'WENT', 'ONE', 'DAY', 'TO', 'THE', 'MANSION', 'OF', 'GOVERNOR', 'BELLINGHAM', 'WITH', 'A', 'PAIR', 'OF', 'GLOVES', 'WHICH', 'SHE', 'HAD', 'FRINGED', 'AND', 'EMBROIDERED', 'TO', 'HIS', 'ORDER', 'AND', 'WHICH', 'WERE', 'TO', 'BE', 'WORN', 'ON', 'SOME', 'GREAT', 'OCCASION', 'OF', 'STATE', 'FOR', 'THOUGH', 'THE', 'CHANCES', 'OF', 'A', 'POPULAR', 'ELECTION', 'HAD', 'CAUSED', 'THIS', 'FORMER', 'RULER', 'TO', 'DESCEND', 'A', 'STEP', 'OR', 'TWO', 'FROM', 'THE', 'HIGHEST', 'RANK', 'HE', 'STILL', 'HELD', 'AN', 'HONOURABLE', 'AND', 'INFLUENTIAL', 'PLACE', 'AMONG', 'THE', 'COLONIAL', 'MAGISTRACY'] +1221-135767-0000-1280: hyp=['HESTER', 'PRYNNE', 'WENT', 'ONE', 'DAY', 'TO', 'THE', 'MANSION', 'OF', 'GOVERNOR', 'BELLINGHAM', 'WITH', 'A', 'PAIR', 'OF', 'GLOVES', 'WHICH', 'SHE', 'HAD', 'FRINGED', 'AND', 'EMBROIDERED', 'TO', 'HIS', 'ORDER', 'AND', 'WHICH', 'WERE', 'TO', 'BE', 'WORN', 'ON', 'SOME', 'GREAT', 'OCCASION', 'OF', 'STATE', 'FOR', 'THOUGH', 'THE', 'CHANCES', 'OF', 'A', 'POPULAR', 'ELECTION', 'HAD', 'CAUSED', 'THIS', 'FORMER', 'RULER', 'TO', 'DESCEND', 'A', 'STEP', 'OR', 'TWO', 'FROM', 'THE', 'HIGHEST', 'RANK', 'HE', 'STILL', 'HELD', 'AN', 'HONORABLE', 'AND', 'INFLUENTIAL', 'PLACE', 'AMONG', 'THE', 'COLONIAL', 'MAGISTRACY'] +1221-135767-0001-1281: ref=['ANOTHER', 'AND', 'FAR', 'MORE', 'IMPORTANT', 'REASON', 'THAN', 'THE', 'DELIVERY', 'OF', 'A', 'PAIR', 'OF', 'EMBROIDERED', 'GLOVES', 'IMPELLED', 'HESTER', 'AT', 'THIS', 'TIME', 'TO', 'SEEK', 'AN', 'INTERVIEW', 'WITH', 'A', 'PERSONAGE', 'OF', 'SO', 'MUCH', 'POWER', 'AND', 'ACTIVITY', 'IN', 'THE', 'AFFAIRS', 'OF', 'THE', 'SETTLEMENT'] +1221-135767-0001-1281: hyp=['ANOTHER', 'AND', 'FAR', 'MORE', 'IMPORTANT', 'REASON', 'THAN', 'THE', 'DELIVERY', 'OF', 'A', 'PAIR', 'OF', 'EMBROIDERED', 'GLOVES', 'IMPELLED', 'HESTER', 'AT', 'THIS', 'TIME', 'TO', 'SEEK', 'AN', 'INTERVIEW', 'WITH', 'A', 'PERSONAGE', 'OF', 'SO', 'MUCH', 'POWER', 'AND', 'ACTIVITY', 'IN', 'THE', 'AFFAIRS', 'OF', 'THE', 'SETTLEMENT'] +1221-135767-0002-1282: ref=['AT', 'THAT', 'EPOCH', 'OF', 'PRISTINE', 'SIMPLICITY', 'HOWEVER', 'MATTERS', 'OF', 'EVEN', 'SLIGHTER', 'PUBLIC', 'INTEREST', 'AND', 'OF', 'FAR', 'LESS', 'INTRINSIC', 'WEIGHT', 'THAN', 'THE', 'WELFARE', 'OF', 'HESTER', 'AND', 'HER', 'CHILD', 'WERE', 'STRANGELY', 'MIXED', 'UP', 'WITH', 'THE', 'DELIBERATIONS', 'OF', 'LEGISLATORS', 'AND', 'ACTS', 'OF', 'STATE'] +1221-135767-0002-1282: hyp=['AT', 'THAT', 'EPOCH', 'OF', 'PRISTINE', 'SIMPLICITY', 'HOWEVER', 'MATTERS', 'OF', 'EVEN', 'SLIGHTER', 'PUBLIC', 'INTEREST', 'AND', 'OF', 'FAR', 'LESS', 'INTRINSIC', 'WEIGHT', 'THAN', 'THE', 'WELFARE', 'OF', 'HESTER', 'AND', 'HER', 'CHILD', 'WERE', 'STRANGELY', 'MIXED', 'UP', 'WITH', 'THE', 'DELIBERATIONS', 'OF', 'LEGISLATORS', 'AND', 'ACTS', 'OF', 'STATE'] +1221-135767-0003-1283: ref=['THE', 'PERIOD', 'WAS', 'HARDLY', 'IF', 'AT', 'ALL', 'EARLIER', 'THAN', 'THAT', 'OF', 'OUR', 'STORY', 'WHEN', 'A', 'DISPUTE', 'CONCERNING', 'THE', 'RIGHT', 'OF', 'PROPERTY', 'IN', 'A', 'PIG', 'NOT', 'ONLY', 'CAUSED', 'A', 'FIERCE', 'AND', 'BITTER', 'CONTEST', 'IN', 'THE', 'LEGISLATIVE', 'BODY', 'OF', 'THE', 'COLONY', 'BUT', 'RESULTED', 'IN', 'AN', 'IMPORTANT', 'MODIFICATION', 'OF', 'THE', 'FRAMEWORK', 'ITSELF', 'OF', 'THE', 'LEGISLATURE'] +1221-135767-0003-1283: hyp=['THE', 'PERIOD', 'WAS', 'HARDLY', 'IF', 'AT', 'ALL', 'EARLIER', 'THAN', 'THAT', 'OF', 'OUR', 'STORY', 'WHEN', 'A', 'DISPUTE', 'CONCERNING', 'THE', 'RIGHT', 'OF', 'PROPERTY', 'IN', 'A', 'PIG', 'NOT', 'ONLY', 'CAUSED', 'A', 'FIERCE', 'AND', 'BITTER', 'CONTEST', 'IN', 'THE', 'LEGISLATIVE', 'BODY', 'OF', 'THE', 'COLONY', 'BUT', 'RESULTED', 'IN', 'AN', 'IMPORTANT', 'MODIFICATION', 'OF', 'THE', 'FRAMEWORK', 'ITSELF', 'OF', 'THE', 'LEGISLATURE'] +1221-135767-0004-1284: ref=['WE', 'HAVE', 'SPOKEN', 'OF', "PEARL'S", 'RICH', 'AND', 'LUXURIANT', 'BEAUTY', 'A', 'BEAUTY', 'THAT', 'SHONE', 'WITH', 'DEEP', 'AND', 'VIVID', 'TINTS', 'A', 'BRIGHT', 'COMPLEXION', 'EYES', 'POSSESSING', 'INTENSITY', 'BOTH', 'OF', 'DEPTH', 'AND', 'GLOW', 'AND', 'HAIR', 'ALREADY', 'OF', 'A', 'DEEP', 'GLOSSY', 'BROWN', 'AND', 'WHICH', 'IN', 'AFTER', 'YEARS', 'WOULD', 'BE', 'NEARLY', 'AKIN', 'TO', 'BLACK'] +1221-135767-0004-1284: hyp=['WE', 'HAVE', 'SPOKEN', 'OF', "PEARL'S", 'RICH', 'AND', 'LUXURIANT', 'BEAUTY', 'A', 'BEAUTY', 'THAT', 'SHONE', 'WITH', 'DEEP', 'AND', 'VIVID', 'TINTS', 'A', 'BRIGHT', 'COMPLEXION', 'EYES', 'POSSESSING', 'INTENSITY', 'BOTH', 'OF', 'DEPTH', 'AND', 'GLOW', 'AND', 'HAIR', 'ALREADY', 'OF', 'A', 'DEEP', 'GLOSSY', 'BROWN', 'AND', 'WHICH', 'IN', 'AFTER', 'YEARS', 'WOULD', 'BE', 'NEARLY', 'AKIN', 'TO', 'BLACK'] +1221-135767-0005-1285: ref=['IT', 'WAS', 'THE', 'SCARLET', 'LETTER', 'IN', 'ANOTHER', 'FORM', 'THE', 'SCARLET', 'LETTER', 'ENDOWED', 'WITH', 'LIFE'] +1221-135767-0005-1285: hyp=['IT', 'WAS', 'THE', 'SCARLET', 'LETTER', 'IN', 'ANOTHER', 'FORM', 'THE', 'SCARLET', 'LETTER', 'ENDOWED', 'WITH', 'LIFE'] +1221-135767-0006-1286: ref=['THE', 'MOTHER', 'HERSELF', 'AS', 'IF', 'THE', 'RED', 'IGNOMINY', 'WERE', 'SO', 'DEEPLY', 'SCORCHED', 'INTO', 'HER', 'BRAIN', 'THAT', 'ALL', 'HER', 'CONCEPTIONS', 'ASSUMED', 'ITS', 'FORM', 'HAD', 'CAREFULLY', 'WROUGHT', 'OUT', 'THE', 'SIMILITUDE', 'LAVISHING', 'MANY', 'HOURS', 'OF', 'MORBID', 'INGENUITY', 'TO', 'CREATE', 'AN', 'ANALOGY', 'BETWEEN', 'THE', 'OBJECT', 'OF', 'HER', 'AFFECTION', 'AND', 'THE', 'EMBLEM', 'OF', 'HER', 'GUILT', 'AND', 'TORTURE'] +1221-135767-0006-1286: hyp=['THE', 'MOTHER', 'HERSELF', 'AS', 'IF', 'THE', 'RED', 'IGNOMINY', 'WERE', 'SO', 'DEEPLY', 'SCORCHED', 'INTO', 'HER', 'BRAIN', 'THAT', 'ALL', 'HER', 'CONCEPTIONS', 'ASSUMED', 'ITS', 'FORM', 'HAD', 'CAREFULLY', 'WROUGHT', 'OUT', 'THE', 'SIMILITUDE', 'LAVISHING', 'MANY', 'HOURS', 'OF', 'MORBID', 'INGENUITY', 'TO', 'CREATE', 'AN', 'ANALOGY', 'BETWEEN', 'THE', 'OBJECT', 'OF', 'HER', 'AFFECTION', 'AND', 'THE', 'EMBLEM', 'OF', 'HER', 'GUILT', 'AND', 'TORTURE'] +1221-135767-0007-1287: ref=['BUT', 'IN', 'TRUTH', 'PEARL', 'WAS', 'THE', 'ONE', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'AND', 'ONLY', 'IN', 'CONSEQUENCE', 'OF', 'THAT', 'IDENTITY', 'HAD', 'HESTER', 'CONTRIVED', 'SO', 'PERFECTLY', 'TO', 'REPRESENT', 'THE', 'SCARLET', 'LETTER', 'IN', 'HER', 'APPEARANCE'] +1221-135767-0007-1287: hyp=['BUT', 'IN', 'TRUTH', 'PEARL', 'WAS', 'THE', 'ONE', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'AND', 'ONLY', 'IN', 'CONSEQUENCE', 'OF', 'THAT', 'IDENTITY', 'HAD', 'HESTER', 'CONTRIVED', 'SO', 'PERFECTLY', 'TO', 'REPRESENT', 'THE', 'SCARLET', 'LETTER', 'IN', 'HER', 'APPEARANCE'] +1221-135767-0008-1288: ref=['COME', 'THEREFORE', 'AND', 'LET', 'US', 'FLING', 'MUD', 'AT', 'THEM'] +1221-135767-0008-1288: hyp=['COME', 'THEREFORE', 'AND', 'LET', 'US', 'FLING', 'MUD', 'AT', 'THEM'] +1221-135767-0009-1289: ref=['BUT', 'PEARL', 'WHO', 'WAS', 'A', 'DAUNTLESS', 'CHILD', 'AFTER', 'FROWNING', 'STAMPING', 'HER', 'FOOT', 'AND', 'SHAKING', 'HER', 'LITTLE', 'HAND', 'WITH', 'A', 'VARIETY', 'OF', 'THREATENING', 'GESTURES', 'SUDDENLY', 'MADE', 'A', 'RUSH', 'AT', 'THE', 'KNOT', 'OF', 'HER', 'ENEMIES', 'AND', 'PUT', 'THEM', 'ALL', 'TO', 'FLIGHT'] +1221-135767-0009-1289: hyp=['BUT', 'PEARL', 'WHO', 'WAS', 'A', 'DAUNTLESS', 'CHILD', 'AFTER', 'FROWNING', 'STAMPING', 'HER', 'FOOT', 'AND', 'SHAKING', 'HER', 'LITTLE', 'HAND', 'WITH', 'A', 'VARIETY', 'OF', 'THREATENING', 'GESTURES', 'SUDDENLY', 'MADE', 'A', 'RUSH', 'AT', 'THE', 'KNOT', 'OF', 'HER', 'ENEMIES', 'AND', 'PUT', 'THEM', 'ALL', 'TO', 'FLIGHT'] +1221-135767-0010-1290: ref=['SHE', 'SCREAMED', 'AND', 'SHOUTED', 'TOO', 'WITH', 'A', 'TERRIFIC', 'VOLUME', 'OF', 'SOUND', 'WHICH', 'DOUBTLESS', 'CAUSED', 'THE', 'HEARTS', 'OF', 'THE', 'FUGITIVES', 'TO', 'QUAKE', 'WITHIN', 'THEM'] +1221-135767-0010-1290: hyp=['SHE', 'SCREAMED', 'AND', 'SHOUTED', 'TOO', 'WITH', 'A', 'TERRIFIC', 'VOLUME', 'OF', 'SOUND', 'WHICH', 'DOUBTLESS', 'CAUSED', 'THE', 'HEARTS', 'OF', 'THE', 'FUGITIVES', 'TO', 'QUAKE', 'WITHIN', 'THEM'] +1221-135767-0011-1291: ref=['IT', 'WAS', 'FURTHER', 'DECORATED', 'WITH', 'STRANGE', 'AND', 'SEEMINGLY', 'CABALISTIC', 'FIGURES', 'AND', 'DIAGRAMS', 'SUITABLE', 'TO', 'THE', 'QUAINT', 'TASTE', 'OF', 'THE', 'AGE', 'WHICH', 'HAD', 'BEEN', 'DRAWN', 'IN', 'THE', 'STUCCO', 'WHEN', 'NEWLY', 'LAID', 'ON', 'AND', 'HAD', 'NOW', 'GROWN', 'HARD', 'AND', 'DURABLE', 'FOR', 'THE', 'ADMIRATION', 'OF', 'AFTER', 'TIMES'] +1221-135767-0011-1291: hyp=['IT', 'WAS', 'FURTHER', 'DECORATED', 'WITH', 'STRANGE', 'AND', 'SEEMINGLY', 'CABALISTIC', 'FIGURES', 'AND', 'DIAGRAMS', 'SUITABLE', 'TO', 'THE', 'QUAINT', 'TASTE', 'OF', 'THE', 'AGE', 'WHICH', 'HAD', 'BEEN', 'DRAWN', 'IN', 'THE', 'STUCCO', 'WHEN', 'NEWLY', 'LAID', 'ON', 'AND', 'HAD', 'NOW', 'GROWN', 'HARD', 'AND', 'DURABLE', 'FOR', 'THE', 'ADMIRATION', 'OF', 'AFTER', 'TIMES'] +1221-135767-0012-1292: ref=['THEY', 'APPROACHED', 'THE', 'DOOR', 'WHICH', 'WAS', 'OF', 'AN', 'ARCHED', 'FORM', 'AND', 'FLANKED', 'ON', 'EACH', 'SIDE', 'BY', 'A', 'NARROW', 'TOWER', 'OR', 'PROJECTION', 'OF', 'THE', 'EDIFICE', 'IN', 'BOTH', 'OF', 'WHICH', 'WERE', 'LATTICE', 'WINDOWS', 'THE', 'WOODEN', 'SHUTTERS', 'TO', 'CLOSE', 'OVER', 'THEM', 'AT', 'NEED'] +1221-135767-0012-1292: hyp=['THEY', 'APPROACHED', 'THE', 'DOOR', 'WHICH', 'WAS', 'OF', 'AN', 'ARCHED', 'FORM', 'AND', 'FLANKED', 'ON', 'EACH', 'SIDE', 'BY', 'A', 'NARROW', 'TOWER', 'OR', 'PROJECTION', 'OF', 'THE', 'EDIFICE', 'IN', 'BOTH', 'OF', 'WHICH', 'WERE', 'LATTICE', 'WINDOWS', 'THE', 'WOODEN', 'SHUTTERS', 'TO', 'CLOSE', 'OVER', 'THEM', 'AT', 'NEED'] +1221-135767-0013-1293: ref=['LIFTING', 'THE', 'IRON', 'HAMMER', 'THAT', 'HUNG', 'AT', 'THE', 'PORTAL', 'HESTER', 'PRYNNE', 'GAVE', 'A', 'SUMMONS', 'WHICH', 'WAS', 'ANSWERED', 'BY', 'ONE', 'OF', 'THE', "GOVERNOR'S", 'BOND', 'SERVANT', 'A', 'FREE', 'BORN', 'ENGLISHMAN', 'BUT', 'NOW', 'A', 'SEVEN', 'YEARS', 'SLAVE'] +1221-135767-0013-1293: hyp=['LIFTING', 'THE', 'IRON', 'HAMMER', 'THAT', 'HUNG', 'AT', 'THE', 'PORTAL', 'HESTER', 'PRYNNE', 'GAVE', 'A', 'SUMMONS', 'WHICH', 'WAS', 'ANSWERED', 'BY', 'ONE', 'OF', 'THE', "GOVERNOR'S", 'BOND', 'SERVANTS', 'A', 'FREE', 'BORN', 'ENGLISHMAN', 'BUT', 'NOW', 'A', 'SEVEN', 'YEARS', 'SLAVE'] +1221-135767-0014-1294: ref=['YEA', 'HIS', 'HONOURABLE', 'WORSHIP', 'IS', 'WITHIN', 'BUT', 'HE', 'HATH', 'A', 'GODLY', 'MINISTER', 'OR', 'TWO', 'WITH', 'HIM', 'AND', 'LIKEWISE', 'A', 'LEECH'] +1221-135767-0014-1294: hyp=['YEA', 'HIS', 'HONOURABLE', 'WORSHIP', 'IS', 'WITHIN', 'BUT', 'HE', 'HATH', 'A', 'GODLY', 'MINISTER', 'OR', 'TWO', 'WITH', 'HIM', 'AND', 'LIKEWISE', 'A', 'LEECH'] +1221-135767-0015-1295: ref=['YE', 'MAY', 'NOT', 'SEE', 'HIS', 'WORSHIP', 'NOW'] +1221-135767-0015-1295: hyp=['YE', 'MAY', 'NOT', 'SEE', 'HIS', 'WORSHIP', 'NOW'] +1221-135767-0016-1296: ref=['WITH', 'MANY', 'VARIATIONS', 'SUGGESTED', 'BY', 'THE', 'NATURE', 'OF', 'HIS', 'BUILDING', 'MATERIALS', 'DIVERSITY', 'OF', 'CLIMATE', 'AND', 'A', 'DIFFERENT', 'MODE', 'OF', 'SOCIAL', 'LIFE', 'GOVERNOR', 'BELLINGHAM', 'HAD', 'PLANNED', 'HIS', 'NEW', 'HABITATION', 'AFTER', 'THE', 'RESIDENCES', 'OF', 'GENTLEMEN', 'OF', 'FAIR', 'ESTATE', 'IN', 'HIS', 'NATIVE', 'LAND'] +1221-135767-0016-1296: hyp=['WITH', 'MANY', 'VARIATIONS', 'SUGGESTED', 'BY', 'THE', 'NATURE', 'OF', 'HIS', 'BUILDING', 'MATERIALS', 'DIVERSITY', 'OF', 'CLIMATE', 'AND', 'A', 'DIFFERENT', 'MODE', 'OF', 'SOCIAL', 'LIFE', 'GOVERNOR', 'BELLINGHAM', 'HAD', 'PLANNED', 'HIS', 'NEW', 'HABITATION', 'AFTER', 'THE', 'RESIDENCES', 'OF', 'GENTLEMEN', 'OF', 'FAIREST', 'STATE', 'IN', 'HIS', 'NATIVE', 'LAND'] +1221-135767-0017-1297: ref=['ON', 'THE', 'TABLE', 'IN', 'TOKEN', 'THAT', 'THE', 'SENTIMENT', 'OF', 'OLD', 'ENGLISH', 'HOSPITALITY', 'HAD', 'NOT', 'BEEN', 'LEFT', 'BEHIND', 'STOOD', 'A', 'LARGE', 'PEWTER', 'TANKARD', 'AT', 'THE', 'BOTTOM', 'OF', 'WHICH', 'HAD', 'HESTER', 'OR', 'PEARL', 'PEEPED', 'INTO', 'IT', 'THEY', 'MIGHT', 'HAVE', 'SEEN', 'THE', 'FROTHY', 'REMNANT', 'OF', 'A', 'RECENT', 'DRAUGHT', 'OF', 'ALE'] +1221-135767-0017-1297: hyp=['ON', 'THE', 'TABLE', 'IN', 'TOKEN', 'THAT', 'THE', 'SENTIMENT', 'OF', 'OLD', 'ENGLISH', 'HOSPITALITY', 'HAD', 'NOT', 'BEEN', 'LEFT', 'BEHIND', 'STOOD', 'A', 'LARGE', 'PEWTER', 'TANKARD', 'AT', 'THE', 'BOTTOM', 'OF', 'WHICH', 'HAD', 'HESTER', 'OR', 'PEARL', 'PEEPED', 'INTO', 'IT', 'THEY', 'MIGHT', 'HAVE', 'SEEN', 'THE', 'FROTHY', 'REMNANT', 'OF', 'A', 'RECENT', 'DRAUGHT', 'OF', 'ALE'] +1221-135767-0018-1298: ref=['LITTLE', 'PEARL', 'WHO', 'WAS', 'AS', 'GREATLY', 'PLEASED', 'WITH', 'THE', 'GLEAMING', 'ARMOUR', 'AS', 'SHE', 'HAD', 'BEEN', 'WITH', 'THE', 'GLITTERING', 'FRONTISPIECE', 'OF', 'THE', 'HOUSE', 'SPENT', 'SOME', 'TIME', 'LOOKING', 'INTO', 'THE', 'POLISHED', 'MIRROR', 'OF', 'THE', 'BREASTPLATE'] +1221-135767-0018-1298: hyp=['LITTLE', 'PEARL', 'WHO', 'WAS', 'AS', 'GREATLY', 'PLEASED', 'WITH', 'THE', 'GLEAMING', 'ARMOUR', 'AS', 'SHE', 'HAD', 'BEEN', 'WITH', 'THE', 'GLITTERING', 'FRONTISPIECE', 'OF', 'THE', 'HOUSE', 'SPENT', 'SOME', 'TIME', 'LOOKING', 'INTO', 'THE', 'POLISHED', 'MIRROR', 'OF', 'THE', 'BREASTPLATE'] +1221-135767-0019-1299: ref=['MOTHER', 'CRIED', 'SHE', 'I', 'SEE', 'YOU', 'HERE', 'LOOK', 'LOOK'] +1221-135767-0019-1299: hyp=['MOTHER', 'CRIED', 'SHE', 'I', 'SEE', 'YOU', 'HERE', 'LOOK', 'LOOK'] +1221-135767-0020-1300: ref=['IN', 'TRUTH', 'SHE', 'SEEMED', 'ABSOLUTELY', 'HIDDEN', 'BEHIND', 'IT'] +1221-135767-0020-1300: hyp=['IN', 'TRUTH', 'SHE', 'SEEMED', 'ABSOLUTELY', 'HIDDEN', 'BEHIND', 'IT'] +1221-135767-0021-1301: ref=['PEARL', 'ACCORDINGLY', 'RAN', 'TO', 'THE', 'BOW', 'WINDOW', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'HALL', 'AND', 'LOOKED', 'ALONG', 'THE', 'VISTA', 'OF', 'A', 'GARDEN', 'WALK', 'CARPETED', 'WITH', 'CLOSELY', 'SHAVEN', 'GRASS', 'AND', 'BORDERED', 'WITH', 'SOME', 'RUDE', 'AND', 'IMMATURE', 'ATTEMPT', 'AT', 'SHRUBBERY'] +1221-135767-0021-1301: hyp=['PEARL', 'ACCORDINGLY', 'RAN', 'TO', 'THE', 'BOW', 'WINDOW', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'HALL', 'AND', 'LOOKED', 'ALONG', 'THE', 'VISTA', 'OF', 'A', 'GARDEN', 'WALK', 'CARPETED', 'WITH', 'CLOSELY', 'SHAVEN', 'GRASS', 'AND', 'BORDERED', 'WITH', 'SOME', 'RUDE', 'AND', 'IMMATEUR', 'ATTEMPT', 'AT', 'SHRUBBERY'] +1221-135767-0022-1302: ref=['BUT', 'THE', 'PROPRIETOR', 'APPEARED', 'ALREADY', 'TO', 'HAVE', 'RELINQUISHED', 'AS', 'HOPELESS', 'THE', 'EFFORT', 'TO', 'PERPETUATE', 'ON', 'THIS', 'SIDE', 'OF', 'THE', 'ATLANTIC', 'IN', 'A', 'HARD', 'SOIL', 'AND', 'AMID', 'THE', 'CLOSE', 'STRUGGLE', 'FOR', 'SUBSISTENCE', 'THE', 'NATIVE', 'ENGLISH', 'TASTE', 'FOR', 'ORNAMENTAL', 'GARDENING'] +1221-135767-0022-1302: hyp=['BUT', 'THE', 'PROPRIETOR', 'APPEARED', 'ALL', 'READY', 'TO', 'HAVE', 'RELINQUISHED', 'AS', 'HOPELESS', 'THE', 'EFFORT', 'TO', 'PERPETUATE', 'ON', 'THIS', 'SIDE', 'OF', 'THE', 'ATLANTIC', 'IN', 'A', 'HARD', 'SOIL', 'AND', 'AMID', 'THE', 'CLOSE', 'STRUGGLE', 'FOR', 'SUBSISTENCE', 'THE', 'NATIVE', 'ENGLISH', 'TASTE', 'FOR', 'ORNAMENTAL', 'GARDENING'] +1221-135767-0023-1303: ref=['THERE', 'WERE', 'A', 'FEW', 'ROSE', 'BUSHES', 'HOWEVER', 'AND', 'A', 'NUMBER', 'OF', 'APPLE', 'TREES', 'PROBABLY', 'THE', 'DESCENDANTS', 'OF', 'THOSE', 'PLANTED', 'BY', 'THE', 'REVEREND', 'MISTER', 'BLACKSTONE', 'THE', 'FIRST', 'SETTLER', 'OF', 'THE', 'PENINSULA', 'THAT', 'HALF', 'MYTHOLOGICAL', 'PERSONAGE', 'WHO', 'RIDES', 'THROUGH', 'OUR', 'EARLY', 'ANNALS', 'SEATED', 'ON', 'THE', 'BACK', 'OF', 'A', 'BULL'] +1221-135767-0023-1303: hyp=['THERE', 'WERE', 'A', 'FEW', 'ROSE', 'BUSHES', 'HOWEVER', 'AND', 'A', 'NUMBER', 'OF', 'APPLE', 'TREES', 'PROBABLY', 'THE', 'DESCENDANTS', 'OF', 'THOSE', 'PLANTED', 'BY', 'THE', 'REVEREND', 'MISTER', 'BLACKSTONE', 'THE', 'FIRST', 'SETTLER', 'OF', 'THE', 'PENINSULA', 'THAT', 'HALF', 'MYTHOLOGICAL', 'PERSONAGE', 'WHO', 'RIDES', 'THROUGH', 'OUR', 'EARLY', 'ANNALS', 'SEATED', 'ON', 'THE', 'BACK', 'OF', 'A', 'BULL'] +1221-135767-0024-1304: ref=['PEARL', 'SEEING', 'THE', 'ROSE', 'BUSHES', 'BEGAN', 'TO', 'CRY', 'FOR', 'A', 'RED', 'ROSE', 'AND', 'WOULD', 'NOT', 'BE', 'PACIFIED'] +1221-135767-0024-1304: hyp=['PEARL', 'SEEING', 'THE', 'ROSE', 'BUSHES', 'BEGAN', 'TO', 'CRY', 'FOR', 'A', 'RED', 'ROSE', 'AND', 'WOULD', 'NOT', 'BE', 'PACIFIED'] +1284-1180-0000-829: ref=['HE', 'WORE', 'BLUE', 'SILK', 'STOCKINGS', 'BLUE', 'KNEE', 'PANTS', 'WITH', 'GOLD', 'BUCKLES', 'A', 'BLUE', 'RUFFLED', 'WAIST', 'AND', 'A', 'JACKET', 'OF', 'BRIGHT', 'BLUE', 'BRAIDED', 'WITH', 'GOLD'] +1284-1180-0000-829: hyp=['HE', 'WORE', 'BLUE', 'SILK', 'STOCKINGS', 'BLUE', 'KNEE', 'PANS', 'WITH', 'GOLD', 'BUCKLES', 'A', 'BLUE', 'RUFFLED', 'WAIST', 'AND', 'A', 'JACKET', 'OF', 'BRIGHT', 'BLUE', 'BRAIDED', 'WITH', 'GOLD'] +1284-1180-0001-830: ref=['HIS', 'HAT', 'HAD', 'A', 'PEAKED', 'CROWN', 'AND', 'A', 'FLAT', 'BRIM', 'AND', 'AROUND', 'THE', 'BRIM', 'WAS', 'A', 'ROW', 'OF', 'TINY', 'GOLDEN', 'BELLS', 'THAT', 'TINKLED', 'WHEN', 'HE', 'MOVED'] +1284-1180-0001-830: hyp=['HIS', 'HAT', 'HAD', 'A', 'PEAKED', 'CROWN', 'AND', 'A', 'FLAT', 'BRIM', 'AND', 'AROUND', 'THE', 'BRIM', 'WAS', 'A', 'ROW', 'OF', 'TINY', 'GOLDEN', 'BELLS', 'THAT', 'TINKLED', 'WHEN', 'HE', 'MOVED'] +1284-1180-0002-831: ref=['INSTEAD', 'OF', 'SHOES', 'THE', 'OLD', 'MAN', 'WORE', 'BOOTS', 'WITH', 'TURNOVER', 'TOPS', 'AND', 'HIS', 'BLUE', 'COAT', 'HAD', 'WIDE', 'CUFFS', 'OF', 'GOLD', 'BRAID'] +1284-1180-0002-831: hyp=['INSTEAD', 'OF', 'SHOES', 'THE', 'OLD', 'MAN', 'WORE', 'BOOTS', 'WITH', 'TURN', 'OVER', 'TOPS', 'AND', 'HIS', 'BLUE', 'COAT', 'HAD', 'WIDE', 'CUFFS', 'OF', 'GOLD', 'BRAID'] +1284-1180-0003-832: ref=['FOR', 'A', 'LONG', 'TIME', 'HE', 'HAD', 'WISHED', 'TO', 'EXPLORE', 'THE', 'BEAUTIFUL', 'LAND', 'OF', 'OZ', 'IN', 'WHICH', 'THEY', 'LIVED'] +1284-1180-0003-832: hyp=['FOR', 'A', 'LONG', 'TIME', 'HE', 'HAD', 'WISHED', 'TO', 'EXPLORE', 'THE', 'BEAUTIFUL', 'LAND', 'OF', 'OZ', 'IN', 'WHICH', 'THEY', 'LIVED'] +1284-1180-0004-833: ref=['WHEN', 'THEY', 'WERE', 'OUTSIDE', 'UNC', 'SIMPLY', 'LATCHED', 'THE', 'DOOR', 'AND', 'STARTED', 'UP', 'THE', 'PATH'] +1284-1180-0004-833: hyp=['WHEN', 'THEY', 'WERE', 'OUTSIDE', 'UNC', 'SIMPLY', 'LATCHED', 'THE', 'DOOR', 'AND', 'STARTED', 'UP', 'THE', 'PATH'] +1284-1180-0005-834: ref=['NO', 'ONE', 'WOULD', 'DISTURB', 'THEIR', 'LITTLE', 'HOUSE', 'EVEN', 'IF', 'ANYONE', 'CAME', 'SO', 'FAR', 'INTO', 'THE', 'THICK', 'FOREST', 'WHILE', 'THEY', 'WERE', 'GONE'] +1284-1180-0005-834: hyp=['NO', 'ONE', 'WOULD', 'DISTURB', 'THEIR', 'LITTLE', 'HOUSE', 'EVEN', 'IF', 'ANY', 'ONE', 'CAME', 'SO', 'FAR', 'INTO', 'THE', 'THICK', 'FOREST', 'WHILE', 'THEY', 'WERE', 'GONE'] +1284-1180-0006-835: ref=['AT', 'THE', 'FOOT', 'OF', 'THE', 'MOUNTAIN', 'THAT', 'SEPARATED', 'THE', 'COUNTRY', 'OF', 'THE', 'MUNCHKINS', 'FROM', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'THE', 'PATH', 'DIVIDED'] +1284-1180-0006-835: hyp=['AT', 'THE', 'FOOT', 'OF', 'THE', 'MOUNTAIN', 'THAT', 'SEPARATED', 'THE', 'COUNTRY', 'OF', 'THE', 'MUNCHKINS', 'FROM', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'THE', 'PATH', 'DIVIDED'] +1284-1180-0007-836: ref=['HE', 'KNEW', 'IT', 'WOULD', 'TAKE', 'THEM', 'TO', 'THE', 'HOUSE', 'OF', 'THE', 'CROOKED', 'MAGICIAN', 'WHOM', 'HE', 'HAD', 'NEVER', 'SEEN', 'BUT', 'WHO', 'WAS', 'THEIR', 'NEAREST', 'NEIGHBOR'] +1284-1180-0007-836: hyp=['HE', 'KNEW', 'IT', 'WOULD', 'TAKE', 'THEM', 'TO', 'THE', 'HOUSE', 'OF', 'THE', 'CROOKED', 'MAGICIAN', 'WHOM', 'HE', 'HAD', 'NEVER', 'SEEN', 'BUT', 'WHO', 'WAS', 'THERE', 'NEAREST', 'NEIGHBOUR'] +1284-1180-0008-837: ref=['ALL', 'THE', 'MORNING', 'THEY', 'TRUDGED', 'UP', 'THE', 'MOUNTAIN', 'PATH', 'AND', 'AT', 'NOON', 'UNC', 'AND', 'OJO', 'SAT', 'ON', 'A', 'FALLEN', 'TREE', 'TRUNK', 'AND', 'ATE', 'THE', 'LAST', 'OF', 'THE', 'BREAD', 'WHICH', 'THE', 'OLD', 'MUNCHKIN', 'HAD', 'PLACED', 'IN', 'HIS', 'POCKET'] +1284-1180-0008-837: hyp=['ALL', 'THE', 'MORNING', 'THEY', 'TRUDGED', 'UP', 'THE', 'MOUNTAIN', 'PATH', 'AND', 'AT', 'NOON', 'UNC', 'AND', 'OJO', 'SAT', 'ON', 'A', 'FALLEN', 'TREE', 'TRUNK', 'AND', 'ATE', 'THE', 'LAST', 'OF', 'THE', 'BREAD', 'WHICH', 'THE', 'OLD', 'MUNCHKIN', 'HAD', 'PLACED', 'IN', 'HIS', 'POCKET'] +1284-1180-0009-838: ref=['THEN', 'THEY', 'STARTED', 'ON', 'AGAIN', 'AND', 'TWO', 'HOURS', 'LATER', 'CAME', 'IN', 'SIGHT', 'OF', 'THE', 'HOUSE', 'OF', 'DOCTOR', 'PIPT'] +1284-1180-0009-838: hyp=['THEN', 'THEY', 'STARTED', 'ON', 'AGAIN', 'AND', 'TWO', 'HOURS', 'LATER', 'CAME', 'IN', 'SIGHT', 'OF', 'THE', 'HOUSE', 'OF', 'DOCTOR', 'PIPT'] +1284-1180-0010-839: ref=['UNC', 'KNOCKED', 'AT', 'THE', 'DOOR', 'OF', 'THE', 'HOUSE', 'AND', 'A', 'CHUBBY', 'PLEASANT', 'FACED', 'WOMAN', 'DRESSED', 'ALL', 'IN', 'BLUE', 'OPENED', 'IT', 'AND', 'GREETED', 'THE', 'VISITORS', 'WITH', 'A', 'SMILE'] +1284-1180-0010-839: hyp=['UNC', 'KNOCKED', 'AT', 'THE', 'DOOR', 'OF', 'THE', 'HOUSE', 'AND', 'A', 'CHUBBY', 'PLEASANT', 'FACED', 'WOMAN', 'DRESSED', 'ALL', 'IN', 'BLUE', 'OPENED', 'IT', 'AND', 'GREETED', 'THE', 'VISITORS', 'WITH', 'A', 'SMILE'] +1284-1180-0011-840: ref=['I', 'AM', 'MY', 'DEAR', 'AND', 'ALL', 'STRANGERS', 'ARE', 'WELCOME', 'TO', 'MY', 'HOME'] +1284-1180-0011-840: hyp=['I', 'AM', 'MY', 'DEAR', 'AND', 'ALL', 'STRANGERS', 'ARE', 'WELCOME', 'TO', 'MY', 'HOME'] +1284-1180-0012-841: ref=['WE', 'HAVE', 'COME', 'FROM', 'A', 'FAR', 'LONELIER', 'PLACE', 'THAN', 'THIS', 'A', 'LONELIER', 'PLACE'] +1284-1180-0012-841: hyp=['WE', 'HAVE', 'COME', 'FROM', 'A', 'FAR', 'LONELIER', 'PLACE', 'THAN', 'THIS', 'A', 'LONELIER', 'PLACE'] +1284-1180-0013-842: ref=['AND', 'YOU', 'MUST', 'BE', 'OJO', 'THE', 'UNLUCKY', 'SHE', 'ADDED'] +1284-1180-0013-842: hyp=['AND', 'YOU', 'MUST', 'BE', 'OJO', 'THE', 'UNLUCKY', 'SHE', 'ADDED'] +1284-1180-0014-843: ref=['OJO', 'HAD', 'NEVER', 'EATEN', 'SUCH', 'A', 'FINE', 'MEAL', 'IN', 'ALL', 'HIS', 'LIFE'] +1284-1180-0014-843: hyp=['OJO', 'HAD', 'NEVER', 'EATEN', 'SUCH', 'A', 'FINE', 'MEAL', 'IN', 'ALL', 'HIS', 'LIFE'] +1284-1180-0015-844: ref=['WE', 'ARE', 'TRAVELING', 'REPLIED', 'OJO', 'AND', 'WE', 'STOPPED', 'AT', 'YOUR', 'HOUSE', 'JUST', 'TO', 'REST', 'AND', 'REFRESH', 'OURSELVES'] +1284-1180-0015-844: hyp=['WE', 'ARE', 'TRAVELING', 'REPLIED', 'OJO', 'AND', 'WE', 'STOPPED', 'AT', 'YOUR', 'HOUSE', 'JUST', 'TO', 'REST', 'AND', 'REFRESH', 'OURSELVES'] +1284-1180-0016-845: ref=['THE', 'WOMAN', 'SEEMED', 'THOUGHTFUL'] +1284-1180-0016-845: hyp=['THE', 'WOMAN', 'SEEMED', 'THOUGHTFUL'] +1284-1180-0017-846: ref=['AT', 'ONE', 'END', 'STOOD', 'A', 'GREAT', 'FIREPLACE', 'IN', 'WHICH', 'A', 'BLUE', 'LOG', 'WAS', 'BLAZING', 'WITH', 'A', 'BLUE', 'FLAME', 'AND', 'OVER', 'THE', 'FIRE', 'HUNG', 'FOUR', 'KETTLES', 'IN', 'A', 'ROW', 'ALL', 'BUBBLING', 'AND', 'STEAMING', 'AT', 'A', 'GREAT', 'RATE'] +1284-1180-0017-846: hyp=['THAT', 'ONE', 'END', 'STOOD', 'A', 'GREAT', 'FIREPLACE', 'IN', 'WHICH', 'A', 'BLUE', 'LOG', 'WAS', 'BLAZING', 'WITH', 'A', 'BLUE', 'FLAME', 'AND', 'OVER', 'THE', 'FIRE', 'HUNG', 'FOUR', 'KETTLES', 'IN', 'A', 'ROW', 'ALL', 'BUBBLING', 'AND', 'STEAMING', 'AT', 'A', 'GREAT', 'RATE'] +1284-1180-0018-847: ref=['IT', 'TAKES', 'ME', 'SEVERAL', 'YEARS', 'TO', 'MAKE', 'THIS', 'MAGIC', 'POWDER', 'BUT', 'AT', 'THIS', 'MOMENT', 'I', 'AM', 'PLEASED', 'TO', 'SAY', 'IT', 'IS', 'NEARLY', 'DONE', 'YOU', 'SEE', 'I', 'AM', 'MAKING', 'IT', 'FOR', 'MY', 'GOOD', 'WIFE', 'MARGOLOTTE', 'WHO', 'WANTS', 'TO', 'USE', 'SOME', 'OF', 'IT', 'FOR', 'A', 'PURPOSE', 'OF', 'HER', 'OWN'] +1284-1180-0018-847: hyp=['IT', 'TAKES', 'ME', 'SEVERAL', 'YEARS', 'TO', 'MAKE', 'THIS', 'MAGIC', 'POWDER', 'BUT', 'AT', 'THIS', 'MOMENT', 'I', 'AM', 'PLEASED', 'TO', 'SAY', 'IT', 'IS', 'NEARLY', 'DONE', 'YOU', 'SEE', 'I', 'AM', 'MAKING', 'IT', 'FOR', 'MY', 'GOOD', 'WIFE', 'MARGOLOTTE', 'WHO', 'WANTS', 'TO', 'USE', 'SOME', 'OF', 'IT', 'FOR', 'A', 'PURPOSE', 'OF', 'HER', 'OWN'] +1284-1180-0019-848: ref=['YOU', 'MUST', 'KNOW', 'SAID', 'MARGOLOTTE', 'WHEN', 'THEY', 'WERE', 'ALL', 'SEATED', 'TOGETHER', 'ON', 'THE', 'BROAD', 'WINDOW', 'SEAT', 'THAT', 'MY', 'HUSBAND', 'FOOLISHLY', 'GAVE', 'AWAY', 'ALL', 'THE', 'POWDER', 'OF', 'LIFE', 'HE', 'FIRST', 'MADE', 'TO', 'OLD', 'MOMBI', 'THE', 'WITCH', 'WHO', 'USED', 'TO', 'LIVE', 'IN', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'TO', 'THE', 'NORTH', 'OF', 'HERE'] +1284-1180-0019-848: hyp=['YOU', 'MUST', 'KNOW', 'SAID', 'MARGOLOTTE', 'WHEN', 'THEY', 'WERE', 'ALL', 'SEATED', 'TOGETHER', 'ON', 'THE', 'BROAD', 'WINDOW', 'SEAT', 'THAT', 'MY', 'HUSBAND', 'FOOLISHLY', 'GAVE', 'AWAY', 'ALL', 'THE', 'POWDER', 'OF', 'LIFE', 'HE', 'FIRST', 'MADE', 'TO', 'OLD', 'MUMBIE', 'THE', 'WITCH', 'WHO', 'USED', 'TO', 'LIVE', 'IN', 'THE', 'COUNTRY', 'OF', 'THE', 'GILLIKINS', 'TO', 'THE', 'NORTH', 'OF', 'HERE'] +1284-1180-0020-849: ref=['THE', 'FIRST', 'LOT', 'WE', 'TESTED', 'ON', 'OUR', 'GLASS', 'CAT', 'WHICH', 'NOT', 'ONLY', 'BEGAN', 'TO', 'LIVE', 'BUT', 'HAS', 'LIVED', 'EVER', 'SINCE'] +1284-1180-0020-849: hyp=['THE', 'FIRST', 'LOT', 'WE', 'TESTED', 'ON', 'OUR', 'GLASS', 'HAT', 'WHICH', 'NOT', 'ONLY', 'BEGAN', 'TO', 'LIVE', 'BUT', 'HAS', 'LIVED', 'EVER', 'SINCE'] +1284-1180-0021-850: ref=['I', 'THINK', 'THE', 'NEXT', 'GLASS', 'CAT', 'THE', 'MAGICIAN', 'MAKES', 'WILL', 'HAVE', 'NEITHER', 'BRAINS', 'NOR', 'HEART', 'FOR', 'THEN', 'IT', 'WILL', 'NOT', 'OBJECT', 'TO', 'CATCHING', 'MICE', 'AND', 'MAY', 'PROVE', 'OF', 'SOME', 'USE', 'TO', 'US'] +1284-1180-0021-850: hyp=['I', 'THINK', 'THE', 'NEXT', 'GLASS', 'CAT', 'THE', 'MAGICIAN', 'MAKES', 'WILL', 'HAVE', 'NEITHER', 'BRAINS', 'NOR', 'HEART', 'FOR', 'THEN', 'IT', 'WILL', 'NOT', 'OBJECT', 'TO', 'CATCHING', 'MICE', 'AND', 'MAY', 'PROVE', 'OF', 'SOME', 'USE', 'TO', 'US'] +1284-1180-0022-851: ref=["I'M", 'AFRAID', 'I', "DON'T", 'KNOW', 'MUCH', 'ABOUT', 'THE', 'LAND', 'OF', 'OZ'] +1284-1180-0022-851: hyp=["I'M", 'AFRAID', 'I', "DON'T", 'KNOW', 'MUCH', 'ABOUT', 'THE', 'LAND', 'OF', 'OZ'] +1284-1180-0023-852: ref=['YOU', 'SEE', "I'VE", 'LIVED', 'ALL', 'MY', 'LIFE', 'WITH', 'UNC', 'NUNKIE', 'THE', 'SILENT', 'ONE', 'AND', 'THERE', 'WAS', 'NO', 'ONE', 'TO', 'TELL', 'ME', 'ANYTHING'] +1284-1180-0023-852: hyp=['YOU', 'SEE', 'I', 'HAVE', 'LIVED', 'ALL', 'MY', 'LIFE', 'WITH', 'UNC', 'NUNKIE', 'THE', 'SILENT', 'ONE', 'AND', 'THERE', 'WAS', 'NO', 'ONE', 'TO', 'TELL', 'ME', 'ANYTHING'] +1284-1180-0024-853: ref=['THAT', 'IS', 'ONE', 'REASON', 'YOU', 'ARE', 'OJO', 'THE', 'UNLUCKY', 'SAID', 'THE', 'WOMAN', 'IN', 'A', 'SYMPATHETIC', 'TONE'] +1284-1180-0024-853: hyp=['THAT', 'IS', 'ONE', 'REASON', 'YOU', 'ARE', 'OJO', 'THE', 'UNLUCKY', 'SAID', 'THE', 'WOMAN', 'IN', 'SYMPATHETIC', 'TONE'] +1284-1180-0025-854: ref=['I', 'THINK', 'I', 'MUST', 'SHOW', 'YOU', 'MY', 'PATCHWORK', 'GIRL', 'SAID', 'MARGOLOTTE', 'LAUGHING', 'AT', 'THE', "BOY'S", 'ASTONISHMENT', 'FOR', 'SHE', 'IS', 'RATHER', 'DIFFICULT', 'TO', 'EXPLAIN'] +1284-1180-0025-854: hyp=['I', 'THINK', 'I', 'MUST', 'SHOW', 'YOU', 'MY', 'PATCHWORK', 'GIRL', 'SAID', 'MARGOLOTTE', 'LAUGHING', 'AT', 'THE', "BOY'S", 'ASTONISHMENT', 'FOR', 'SHE', 'IS', 'RATHER', 'DIFFICULT', 'TO', 'EXPLAIN'] +1284-1180-0026-855: ref=['BUT', 'FIRST', 'I', 'WILL', 'TELL', 'YOU', 'THAT', 'FOR', 'MANY', 'YEARS', 'I', 'HAVE', 'LONGED', 'FOR', 'A', 'SERVANT', 'TO', 'HELP', 'ME', 'WITH', 'THE', 'HOUSEWORK', 'AND', 'TO', 'COOK', 'THE', 'MEALS', 'AND', 'WASH', 'THE', 'DISHES'] +1284-1180-0026-855: hyp=['BUT', 'FIRST', 'I', 'WILL', 'TELL', 'YOU', 'THAT', 'FOR', 'MANY', 'YEARS', 'I', 'HAVE', 'LONGED', 'FOR', 'A', 'SERVANT', 'TO', 'HELP', 'ME', 'WITH', 'THE', 'HOUSEWORK', 'AND', 'TO', 'COPE', 'THE', 'MEALS', 'AND', 'WASH', 'THE', 'DISHES'] +1284-1180-0027-856: ref=['YET', 'THAT', 'TASK', 'WAS', 'NOT', 'SO', 'EASY', 'AS', 'YOU', 'MAY', 'SUPPOSE'] +1284-1180-0027-856: hyp=['YET', 'THAT', 'TASK', 'WAS', 'NOT', 'SO', 'EASY', 'AS', 'YOU', 'MAY', 'SUPPOSE'] +1284-1180-0028-857: ref=['A', 'BED', 'QUILT', 'MADE', 'OF', 'PATCHES', 'OF', 'DIFFERENT', 'KINDS', 'AND', 'COLORS', 'OF', 'CLOTH', 'ALL', 'NEATLY', 'SEWED', 'TOGETHER'] +1284-1180-0028-857: hyp=['A', 'BED', 'QUILT', 'MADE', 'OF', 'PATCHES', 'OF', 'DIFFERENT', 'KINDS', 'AND', 'COLLARS', 'OF', 'CLOTH', 'ALL', 'NEATLY', 'SEWED', 'TOGETHER'] +1284-1180-0029-858: ref=['SOMETIMES', 'IT', 'IS', 'CALLED', 'A', 'CRAZY', 'QUILT', 'BECAUSE', 'THE', 'PATCHES', 'AND', 'COLORS', 'ARE', 'SO', 'MIXED', 'UP'] +1284-1180-0029-858: hyp=['SOMETIMES', 'IT', 'IS', 'CALLED', 'A', 'CRAZY', 'QUILT', 'BECAUSE', 'THE', 'PATCHES', 'AND', 'COLORS', 'ARE', 'SO', 'MIXED', 'UP'] +1284-1180-0030-859: ref=['WHEN', 'I', 'FOUND', 'IT', 'I', 'SAID', 'TO', 'MYSELF', 'THAT', 'IT', 'WOULD', 'DO', 'NICELY', 'FOR', 'MY', 'SERVANT', 'GIRL', 'FOR', 'WHEN', 'SHE', 'WAS', 'BROUGHT', 'TO', 'LIFE', 'SHE', 'WOULD', 'NOT', 'BE', 'PROUD', 'NOR', 'HAUGHTY', 'AS', 'THE', 'GLASS', 'CAT', 'IS', 'FOR', 'SUCH', 'A', 'DREADFUL', 'MIXTURE', 'OF', 'COLORS', 'WOULD', 'DISCOURAGE', 'HER', 'FROM', 'TRYING', 'TO', 'BE', 'AS', 'DIGNIFIED', 'AS', 'THE', 'BLUE', 'MUNCHKINS', 'ARE'] +1284-1180-0030-859: hyp=['WHEN', 'I', 'FOUND', 'IT', 'I', 'SAID', 'TO', 'MYSELF', 'THAT', 'IT', 'WOULD', 'DO', 'NICELY', 'FOR', 'MY', 'SERVANT', 'GIRL', 'FOR', 'WHEN', 'SHE', 'WAS', 'BROUGHT', 'TO', 'LIFE', 'SHE', 'WOULD', 'NOT', 'BE', 'PROUD', 'NOR', 'HAUGHTY', 'AS', 'THE', 'GLASS', 'CAT', 'IS', 'FOR', 'SUCH', 'A', 'DREADFUL', 'MIXTURE', 'OF', 'COLOURS', 'WOULD', 'DISCOURAGE', 'HER', 'FROM', 'TRYING', 'TO', 'BE', 'AS', 'DIGNIFIED', 'AS', 'THE', 'BLUE', 'MUNCHKINS', 'ARE'] +1284-1180-0031-860: ref=['AT', 'THE', 'EMERALD', 'CITY', 'WHERE', 'OUR', 'PRINCESS', 'OZMA', 'LIVES', 'GREEN', 'IS', 'THE', 'POPULAR', 'COLOR'] +1284-1180-0031-860: hyp=['AT', 'THE', 'EMERALD', 'CITY', 'WHERE', 'OUR', 'PRINCESS', 'OZMA', 'LIVES', 'GREEN', 'IS', 'THE', 'POPULAR', 'COLOR'] +1284-1180-0032-861: ref=['I', 'WILL', 'SHOW', 'YOU', 'WHAT', 'A', 'GOOD', 'JOB', 'I', 'DID', 'AND', 'SHE', 'WENT', 'TO', 'A', 'TALL', 'CUPBOARD', 'AND', 'THREW', 'OPEN', 'THE', 'DOORS'] +1284-1180-0032-861: hyp=['I', 'WILL', 'SHOW', 'YOU', 'WHAT', 'A', 'GOOD', 'JOB', 'I', 'DID', 'AND', 'SHE', 'WENT', 'TO', 'A', 'TALL', 'CUPBOARD', 'AND', 'THREW', 'OPEN', 'THE', 'DOORS'] +1284-1181-0000-807: ref=['OJO', 'EXAMINED', 'THIS', 'CURIOUS', 'CONTRIVANCE', 'WITH', 'WONDER'] +1284-1181-0000-807: hyp=['OJO', 'EXAMINED', 'THIS', 'CURIOUS', 'CONTRIVANCE', 'WITH', 'WONDER'] +1284-1181-0001-808: ref=['MARGOLOTTE', 'HAD', 'FIRST', 'MADE', 'THE', "GIRL'S", 'FORM', 'FROM', 'THE', 'PATCHWORK', 'QUILT', 'AND', 'THEN', 'SHE', 'HAD', 'DRESSED', 'IT', 'WITH', 'A', 'PATCHWORK', 'SKIRT', 'AND', 'AN', 'APRON', 'WITH', 'POCKETS', 'IN', 'IT', 'USING', 'THE', 'SAME', 'GAY', 'MATERIAL', 'THROUGHOUT'] +1284-1181-0001-808: hyp=['MARGOLOTTE', 'HAD', 'FIRST', 'MADE', 'THE', "GIRL'S", 'FORM', 'FROM', 'THE', 'PATCHWORK', 'QUILT', 'AND', 'THEN', 'SHE', 'HAD', 'DRESSED', 'IT', 'WITH', 'A', 'PATCHWORK', 'SKIRT', 'AND', 'AN', 'APRON', 'WITH', 'POCKETS', 'IN', 'IT', 'USING', 'THE', 'SAME', 'GAY', 'MATERIAL', 'THROUGHOUT'] +1284-1181-0002-809: ref=['THE', 'HEAD', 'OF', 'THE', 'PATCHWORK', 'GIRL', 'WAS', 'THE', 'MOST', 'CURIOUS', 'PART', 'OF', 'HER'] +1284-1181-0002-809: hyp=['THE', 'HEAD', 'OF', 'THE', 'PATCHWORK', 'GIRL', 'WAS', 'THE', 'MOST', 'CURIOUS', 'PART', 'OF', 'HER'] +1284-1181-0003-810: ref=['THE', 'HAIR', 'WAS', 'OF', 'BROWN', 'YARN', 'AND', 'HUNG', 'DOWN', 'ON', 'HER', 'NECK', 'IN', 'SEVERAL', 'NEAT', 'BRAIDS'] +1284-1181-0003-810: hyp=['THE', 'HAIR', 'WAS', 'OF', 'BROWN', 'YARN', 'AND', 'HUNG', 'DOWN', 'ON', 'HER', 'NECK', 'IN', 'SEVERAL', 'NEAT', 'BRAIDS'] +1284-1181-0004-811: ref=['GOLD', 'IS', 'THE', 'MOST', 'COMMON', 'METAL', 'IN', 'THE', 'LAND', 'OF', 'OZ', 'AND', 'IS', 'USED', 'FOR', 'MANY', 'PURPOSES', 'BECAUSE', 'IT', 'IS', 'SOFT', 'AND', 'PLIABLE'] +1284-1181-0004-811: hyp=['GOLD', 'IS', 'THE', 'MOST', 'COMMON', 'METAL', 'IN', 'THE', 'LAND', 'OF', 'OZ', 'AND', 'IS', 'USED', 'FOR', 'MANY', 'PURPOSES', 'BECAUSE', 'IT', 'IS', 'SOFT', 'AND', 'PLIABLE'] +1284-1181-0005-812: ref=['NO', 'I', 'FORGOT', 'ALL', 'ABOUT', 'THE', 'BRAINS', 'EXCLAIMED', 'THE', 'WOMAN'] +1284-1181-0005-812: hyp=['NO', 'I', 'FORGOT', 'ALL', 'ABOUT', 'THE', 'BRAINS', 'EXCLAIMED', 'THE', 'WOMAN'] +1284-1181-0006-813: ref=['WELL', 'THAT', 'MAY', 'BE', 'TRUE', 'AGREED', 'MARGOLOTTE', 'BUT', 'ON', 'THE', 'CONTRARY', 'A', 'SERVANT', 'WITH', 'TOO', 'MUCH', 'BRAINS', 'IS', 'SURE', 'TO', 'BECOME', 'INDEPENDENT', 'AND', 'HIGH', 'AND', 'MIGHTY', 'AND', 'FEEL', 'ABOVE', 'HER', 'WORK'] +1284-1181-0006-813: hyp=['WELL', 'THAT', 'MAY', 'BE', 'TRUE', 'AGREED', 'MARGOLOTTE', 'BUT', 'ON', 'THE', 'CONTRARY', 'A', 'SERVANT', 'WITH', 'TOO', 'MUCH', 'BRAINS', 'IS', 'SURE', 'TO', 'BECOME', 'INDEPENDENT', 'AND', 'HIGH', 'AND', 'MIGHTY', 'AND', 'FEEL', 'ABOVE', 'HER', 'WORK'] +1284-1181-0007-814: ref=['SHE', 'POURED', 'INTO', 'THE', 'DISH', 'A', 'QUANTITY', 'FROM', 'EACH', 'OF', 'THESE', 'BOTTLES'] +1284-1181-0007-814: hyp=['SHE', 'POURED', 'INTO', 'THE', 'DISH', 'A', 'QUANTITY', 'FROM', 'EACH', 'OF', 'THESE', 'BOTTLES'] +1284-1181-0008-815: ref=['I', 'THINK', 'THAT', 'WILL', 'DO', 'SHE', 'CONTINUED', 'FOR', 'THE', 'OTHER', 'QUALITIES', 'ARE', 'NOT', 'NEEDED', 'IN', 'A', 'SERVANT'] +1284-1181-0008-815: hyp=['I', 'THINK', 'THAT', 'WILL', 'DO', 'SHE', 'CONTINUED', 'FOR', 'THE', 'OTHER', 'QUALITIES', 'ARE', 'NOT', 'NEEDED', 'IN', 'A', 'SERVANT'] +1284-1181-0009-816: ref=['SHE', 'RAN', 'TO', 'HER', "HUSBAND'S", 'SIDE', 'AT', 'ONCE', 'AND', 'HELPED', 'HIM', 'LIFT', 'THE', 'FOUR', 'KETTLES', 'FROM', 'THE', 'FIRE'] +1284-1181-0009-816: hyp=['SHE', 'RAN', 'TO', 'HER', "HUSBAND'S", 'SIDE', 'AT', 'ONCE', 'AND', 'HELPED', 'HIM', 'LIFT', 'THE', 'FOUR', 'KETTLES', 'FROM', 'THE', 'FIRE'] +1284-1181-0010-817: ref=['THEIR', 'CONTENTS', 'HAD', 'ALL', 'BOILED', 'AWAY', 'LEAVING', 'IN', 'THE', 'BOTTOM', 'OF', 'EACH', 'KETTLE', 'A', 'FEW', 'GRAINS', 'OF', 'FINE', 'WHITE', 'POWDER'] +1284-1181-0010-817: hyp=['THEIR', 'CONTENTS', 'HAD', 'ALL', 'BOILED', 'AWAY', 'LEAVING', 'IN', 'THE', 'BOTTOM', 'OF', 'EACH', 'KETTLE', 'A', 'FEW', 'GRAINS', 'OF', 'FINE', 'WHITE', 'POWDER'] +1284-1181-0011-818: ref=['VERY', 'CAREFULLY', 'THE', 'MAGICIAN', 'REMOVED', 'THIS', 'POWDER', 'PLACING', 'IT', 'ALL', 'TOGETHER', 'IN', 'A', 'GOLDEN', 'DISH', 'WHERE', 'HE', 'MIXED', 'IT', 'WITH', 'A', 'GOLDEN', 'SPOON'] +1284-1181-0011-818: hyp=['VERY', 'CAREFULLY', 'THE', 'MAGICIAN', 'REMOVED', 'THIS', 'POWDER', 'PLACING', 'IT', 'ALTOGETHER', 'IN', 'A', 'GOLDEN', 'DISH', 'WHERE', 'HE', 'MIXED', 'IT', 'WITH', 'A', 'GOLDEN', 'SPOON'] +1284-1181-0012-819: ref=['NO', 'ONE', 'SAW', 'HIM', 'DO', 'THIS', 'FOR', 'ALL', 'WERE', 'LOOKING', 'AT', 'THE', 'POWDER', 'OF', 'LIFE', 'BUT', 'SOON', 'THE', 'WOMAN', 'REMEMBERED', 'WHAT', 'SHE', 'HAD', 'BEEN', 'DOING', 'AND', 'CAME', 'BACK', 'TO', 'THE', 'CUPBOARD'] +1284-1181-0012-819: hyp=['NO', 'ONE', 'SAW', 'HIM', 'DO', 'THIS', 'FOR', 'ALL', 'WERE', 'LOOKING', 'AT', 'THE', 'POWDER', 'OF', 'LIFE', 'BUT', 'SOON', 'THE', 'WOMAN', 'REMEMBERED', 'WHAT', 'SHE', 'HAD', 'BEEN', 'DOING', 'AND', 'CAME', 'BACK', 'TO', 'THE', 'CUPBOARD'] +1284-1181-0013-820: ref=['OJO', 'BECAME', 'A', 'BIT', 'UNEASY', 'AT', 'THIS', 'FOR', 'HE', 'HAD', 'ALREADY', 'PUT', 'QUITE', 'A', 'LOT', 'OF', 'THE', 'CLEVERNESS', 'POWDER', 'IN', 'THE', 'DISH', 'BUT', 'HE', 'DARED', 'NOT', 'INTERFERE', 'AND', 'SO', 'HE', 'COMFORTED', 'HIMSELF', 'WITH', 'THE', 'THOUGHT', 'THAT', 'ONE', 'CANNOT', 'HAVE', 'TOO', 'MUCH', 'CLEVERNESS'] +1284-1181-0013-820: hyp=['OJO', 'BECAME', 'A', 'BIT', 'UNEASY', 'AT', 'THIS', 'FOR', 'HE', 'HAD', 'ALREADY', 'PUT', 'QUITE', 'A', 'LOT', 'OF', 'THE', 'CLEVERNESS', 'POWDER', 'IN', 'THE', 'DISH', 'BUT', 'HE', 'DARED', 'NOT', 'INTERFERE', 'AND', 'SO', 'HE', 'COMFORTED', 'HIMSELF', 'WITH', 'THE', 'THOUGHT', 'THAT', 'ONE', 'CANNOT', 'HAVE', 'TOO', 'MUCH', 'CLEVERNESS'] +1284-1181-0014-821: ref=['HE', 'SELECTED', 'A', 'SMALL', 'GOLD', 'BOTTLE', 'WITH', 'A', 'PEPPER', 'BOX', 'TOP', 'SO', 'THAT', 'THE', 'POWDER', 'MIGHT', 'BE', 'SPRINKLED', 'ON', 'ANY', 'OBJECT', 'THROUGH', 'THE', 'SMALL', 'HOLES'] +1284-1181-0014-821: hyp=['HE', 'SELECTED', 'A', 'SMALL', 'GOLD', 'BOTTLE', 'WITH', 'A', 'PEPPER', 'BOX', 'TOP', 'SO', 'THAT', 'THE', 'POWDER', 'MIGHT', 'BE', 'SPRINKLED', 'ON', 'ANY', 'OBJECT', 'THROUGH', 'THE', 'SMALL', 'HOLES'] +1284-1181-0015-822: ref=['MOST', 'PEOPLE', 'TALK', 'TOO', 'MUCH', 'SO', 'IT', 'IS', 'A', 'RELIEF', 'TO', 'FIND', 'ONE', 'WHO', 'TALKS', 'TOO', 'LITTLE'] +1284-1181-0015-822: hyp=['MOST', 'PEOPLE', 'TALK', 'TOO', 'MUCH', 'SO', 'IT', 'IS', 'A', 'RELIEF', 'TO', 'FIND', 'ONE', 'WHO', 'TALKS', 'TOO', 'LITTLE'] +1284-1181-0016-823: ref=['I', 'AM', 'NOT', 'ALLOWED', 'TO', 'PERFORM', 'MAGIC', 'EXCEPT', 'FOR', 'MY', 'OWN', 'AMUSEMENT', 'HE', 'TOLD', 'HIS', 'VISITORS', 'AS', 'HE', 'LIGHTED', 'A', 'PIPE', 'WITH', 'A', 'CROOKED', 'STEM', 'AND', 'BEGAN', 'TO', 'SMOKE'] +1284-1181-0016-823: hyp=['I', 'AM', 'NOT', 'ALLOWED', 'TO', 'PERFORM', 'MAGIC', 'EXCEPT', 'FOR', 'MY', 'OWN', 'AMUSEMENT', 'HE', 'TOLD', 'HIS', 'VISITORS', 'AS', 'HE', 'LIGHTED', 'A', 'PIPE', 'WITH', 'A', 'CROOKED', 'STEM', 'AND', 'BEGAN', 'TO', 'SMOKE'] +1284-1181-0017-824: ref=['THE', 'WIZARD', 'OF', 'OZ', 'WHO', 'USED', 'TO', 'BE', 'A', 'HUMBUG', 'AND', 'KNEW', 'NO', 'MAGIC', 'AT', 'ALL', 'HAS', 'BEEN', 'TAKING', 'LESSONS', 'OF', 'GLINDA', 'AND', "I'M", 'TOLD', 'HE', 'IS', 'GETTING', 'TO', 'BE', 'A', 'PRETTY', 'GOOD', 'WIZARD', 'BUT', 'HE', 'IS', 'MERELY', 'THE', 'ASSISTANT', 'OF', 'THE', 'GREAT', 'SORCERESS'] +1284-1181-0017-824: hyp=['THE', 'WIZARD', 'OF', 'OZ', 'WHO', 'USED', 'TO', 'BE', 'A', 'HUMBUG', 'AND', 'KNEW', 'NO', 'MAGIC', 'AT', 'ALL', 'HAS', 'BEEN', 'TAKING', 'LESSONS', 'OF', 'GLINDA', 'AND', "I'M", 'TOLD', 'HE', 'IS', 'GETTING', 'TO', 'BE', 'A', 'PRETTY', 'GOOD', 'WIZARD', 'BUT', 'HE', 'IS', 'MERELY', 'THE', 'ASSISTANT', 'OF', 'THE', 'GREAT', 'SORCERESS'] +1284-1181-0018-825: ref=['IT', 'TRULY', 'IS', 'ASSERTED', 'THE', 'MAGICIAN'] +1284-1181-0018-825: hyp=['IT', 'TRULY', 'IS', 'ASSERTED', 'THE', 'MAGICIAN'] +1284-1181-0019-826: ref=['I', 'NOW', 'USE', 'THEM', 'AS', 'ORNAMENTAL', 'STATUARY', 'IN', 'MY', 'GARDEN'] +1284-1181-0019-826: hyp=['I', 'NOW', 'USE', 'THEM', 'AS', 'ORNAMENTAL', 'STATUARY', 'IN', 'MY', 'GARDEN'] +1284-1181-0020-827: ref=['DEAR', 'ME', 'WHAT', 'A', 'CHATTERBOX', "YOU'RE", 'GETTING', 'TO', 'BE', 'UNC', 'REMARKED', 'THE', 'MAGICIAN', 'WHO', 'WAS', 'PLEASED', 'WITH', 'THE', 'COMPLIMENT'] +1284-1181-0020-827: hyp=['DEAR', 'ME', 'WHAT', 'A', 'CHATTER', 'BOX', "YOU'RE", 'GETTING', 'TO', 'BE', 'YOUNG', 'REMARKED', 'THE', 'MAGICIAN', 'WHO', 'WAS', 'PLEASED', 'WITH', 'THE', 'COMPLIMENT'] +1284-1181-0021-828: ref=['ASKED', 'THE', 'VOICE', 'IN', 'SCORNFUL', 'ACCENTS'] +1284-1181-0021-828: hyp=['ASKED', 'THE', 'VOICE', 'IN', 'SCORNFUL', 'ACCENTS'] +1284-134647-0000-862: ref=['THE', 'GRATEFUL', 'APPLAUSE', 'OF', 'THE', 'CLERGY', 'HAS', 'CONSECRATED', 'THE', 'MEMORY', 'OF', 'A', 'PRINCE', 'WHO', 'INDULGED', 'THEIR', 'PASSIONS', 'AND', 'PROMOTED', 'THEIR', 'INTEREST'] +1284-134647-0000-862: hyp=['THE', 'GRATEFUL', 'APPLAUSE', 'OF', 'THE', 'CLERGY', 'HAS', 'CONSECRATED', 'THE', 'MEMORY', 'OF', 'A', 'PRINCE', 'WHO', 'INDULGED', 'THEIR', 'PASSIONS', 'AND', 'PROMOTED', 'THEIR', 'INTEREST'] +1284-134647-0001-863: ref=['THE', 'EDICT', 'OF', 'MILAN', 'THE', 'GREAT', 'CHARTER', 'OF', 'TOLERATION', 'HAD', 'CONFIRMED', 'TO', 'EACH', 'INDIVIDUAL', 'OF', 'THE', 'ROMAN', 'WORLD', 'THE', 'PRIVILEGE', 'OF', 'CHOOSING', 'AND', 'PROFESSING', 'HIS', 'OWN', 'RELIGION'] +1284-134647-0001-863: hyp=['THE', 'EDICT', 'OF', 'MILAN', 'THE', 'GREAT', 'CHARTER', 'OF', 'TOLERATION', 'HAD', 'CONFIRMED', 'TO', 'EACH', 'INDIVIDUAL', 'OF', 'THE', 'ROMAN', 'WORLD', 'THE', 'PRIVILEGE', 'OF', 'CHOOSING', 'AND', 'PROFESSING', 'HIS', 'OWN', 'RELIGION'] +1284-134647-0002-864: ref=['BUT', 'THIS', 'INESTIMABLE', 'PRIVILEGE', 'WAS', 'SOON', 'VIOLATED', 'WITH', 'THE', 'KNOWLEDGE', 'OF', 'TRUTH', 'THE', 'EMPEROR', 'IMBIBED', 'THE', 'MAXIMS', 'OF', 'PERSECUTION', 'AND', 'THE', 'SECTS', 'WHICH', 'DISSENTED', 'FROM', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'AFFLICTED', 'AND', 'OPPRESSED', 'BY', 'THE', 'TRIUMPH', 'OF', 'CHRISTIANITY'] +1284-134647-0002-864: hyp=['BUT', 'THIS', 'INESTIMABLE', 'PRIVILEGE', 'WAS', 'SOON', 'VIOLATED', 'WITH', 'THE', 'KNOWLEDGE', 'OF', 'TRUTH', 'THE', 'EMPEROR', 'IMBIBED', 'THE', 'MAXIMS', 'OF', 'PERSECUTION', 'AND', 'THE', 'SEX', 'WHICH', 'DISSENTED', 'FROM', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'AFFLICTED', 'AND', 'OPPRESSED', 'BY', 'THE', 'TRIUMPH', 'OF', 'CHRISTIANITY'] +1284-134647-0003-865: ref=['CONSTANTINE', 'EASILY', 'BELIEVED', 'THAT', 'THE', 'HERETICS', 'WHO', 'PRESUMED', 'TO', 'DISPUTE', 'HIS', 'OPINIONS', 'OR', 'TO', 'OPPOSE', 'HIS', 'COMMANDS', 'WERE', 'GUILTY', 'OF', 'THE', 'MOST', 'ABSURD', 'AND', 'CRIMINAL', 'OBSTINACY', 'AND', 'THAT', 'A', 'SEASONABLE', 'APPLICATION', 'OF', 'MODERATE', 'SEVERITIES', 'MIGHT', 'SAVE', 'THOSE', 'UNHAPPY', 'MEN', 'FROM', 'THE', 'DANGER', 'OF', 'AN', 'EVERLASTING', 'CONDEMNATION'] +1284-134647-0003-865: hyp=['KONSTANTINE', 'EASILY', 'BELIEVED', 'THAT', 'THE', 'HERETICS', 'WHO', 'PRESUMED', 'TO', 'DISPUTE', 'HIS', 'OPINIONS', 'OR', 'TO', 'OPPOSE', 'HIS', 'COMMANDS', 'WERE', 'GUILTY', 'OF', 'THE', 'MOST', 'ABSURD', 'AND', 'CRIMINAL', 'OBSTINACY', 'AND', 'THAT', 'A', 'SEASONABLE', 'APPLICATION', 'OF', 'MODERATE', 'SEVERITIES', 'MIGHT', 'SAVE', 'THOSE', 'UNHAPPY', 'MEN', 'FROM', 'THE', 'DANGER', 'OF', 'AN', 'EVERLASTING', 'CONDEMNATION'] +1284-134647-0004-866: ref=['SOME', 'OF', 'THE', 'PENAL', 'REGULATIONS', 'WERE', 'COPIED', 'FROM', 'THE', 'EDICTS', 'OF', 'DIOCLETIAN', 'AND', 'THIS', 'METHOD', 'OF', 'CONVERSION', 'WAS', 'APPLAUDED', 'BY', 'THE', 'SAME', 'BISHOPS', 'WHO', 'HAD', 'FELT', 'THE', 'HAND', 'OF', 'OPPRESSION', 'AND', 'PLEADED', 'FOR', 'THE', 'RIGHTS', 'OF', 'HUMANITY'] +1284-134647-0004-866: hyp=['SOME', 'OF', 'THE', 'PENAL', 'REGULATIONS', 'WERE', 'COPIED', 'FROM', 'THE', 'EDICTS', 'OF', 'DIOCLETIAN', 'AND', 'THIS', 'METHOD', 'OF', 'CONVERSION', 'WAS', 'APPLAUDED', 'BY', 'THE', 'SAME', 'BISHOPS', 'WHO', 'HAD', 'FELLED', 'THE', 'HAND', 'OF', 'OPPRESSION', 'AND', 'PLEADED', 'FOR', 'THE', 'RIGHTS', 'OF', 'HUMANITY'] +1284-134647-0005-867: ref=['THEY', 'ASSERTED', 'WITH', 'CONFIDENCE', 'AND', 'ALMOST', 'WITH', 'EXULTATION', 'THAT', 'THE', 'APOSTOLICAL', 'SUCCESSION', 'WAS', 'INTERRUPTED', 'THAT', 'ALL', 'THE', 'BISHOPS', 'OF', 'EUROPE', 'AND', 'ASIA', 'WERE', 'INFECTED', 'BY', 'THE', 'CONTAGION', 'OF', 'GUILT', 'AND', 'SCHISM', 'AND', 'THAT', 'THE', 'PREROGATIVES', 'OF', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'CONFINED', 'TO', 'THE', 'CHOSEN', 'PORTION', 'OF', 'THE', 'AFRICAN', 'BELIEVERS', 'WHO', 'ALONE', 'HAD', 'PRESERVED', 'INVIOLATE', 'THE', 'INTEGRITY', 'OF', 'THEIR', 'FAITH', 'AND', 'DISCIPLINE'] +1284-134647-0005-867: hyp=['THEY', 'ASSERTED', 'WITH', 'CONFIDENCE', 'AND', 'ALMOST', 'WITH', 'EXULTATION', 'THAT', 'THE', 'APOSTOLICAL', 'SUCCESSION', 'WAS', 'INTERRUPTED', 'THAT', 'ALL', 'THE', 'BISHOPS', 'OF', 'EUROPE', 'AND', 'ASIA', 'WERE', 'IN', 'EFFECTED', 'BY', 'THE', 'CONTAGION', 'OF', 'GUILT', 'AND', 'SCHISM', 'AND', 'THAT', 'THE', 'PREROGATIVES', 'OF', 'THE', 'CATHOLIC', 'CHURCH', 'WERE', 'CONFINED', 'TO', 'THE', 'CHOSEN', 'PORTION', 'OF', 'THE', 'AFRICAN', 'BELIEVERS', 'WHO', 'ALONE', 'HAD', 'PRESERVED', 'INVIOLATE', 'THE', 'INTEGRITY', 'OF', 'THEIR', 'FAITH', 'AND', 'DISCIPLINE'] +1284-134647-0006-868: ref=['BISHOPS', 'VIRGINS', 'AND', 'EVEN', 'SPOTLESS', 'INFANTS', 'WERE', 'SUBJECTED', 'TO', 'THE', 'DISGRACE', 'OF', 'A', 'PUBLIC', 'PENANCE', 'BEFORE', 'THEY', 'COULD', 'BE', 'ADMITTED', 'TO', 'THE', 'COMMUNION', 'OF', 'THE', 'DONATISTS'] +1284-134647-0006-868: hyp=['BISHOPS', 'VIRGINS', 'AND', 'EVEN', 'SPOTLESS', 'INFANTS', 'WERE', 'SUBJECTED', 'TO', 'THE', 'DISGRACE', 'OF', 'A', 'PUBLIC', 'PENANCE', 'BEFORE', 'THEY', 'COULD', 'BE', 'ADMITTED', 'TO', 'THE', 'COMMUNION', 'OF', 'THE', 'DONATISTS'] +1284-134647-0007-869: ref=['PROSCRIBED', 'BY', 'THE', 'CIVIL', 'AND', 'ECCLESIASTICAL', 'POWERS', 'OF', 'THE', 'EMPIRE', 'THE', 'DONATISTS', 'STILL', 'MAINTAINED', 'IN', 'SOME', 'PROVINCES', 'PARTICULARLY', 'IN', 'NUMIDIA', 'THEIR', 'SUPERIOR', 'NUMBERS', 'AND', 'FOUR', 'HUNDRED', 'BISHOPS', 'ACKNOWLEDGED', 'THE', 'JURISDICTION', 'OF', 'THEIR', 'PRIMATE'] +1284-134647-0007-869: hyp=['PROSCRIBED', 'BY', 'THE', 'CIVIL', 'AND', 'ECCLESIASTICAL', 'POWERS', 'OF', 'THE', 'EMPIRE', 'THE', 'DONATIST', 'STILL', 'MAINTAINED', 'IN', 'SOME', 'PROVINCES', 'PARTICULARLY', 'IN', 'NUMIDIA', 'THEIR', 'SUPERIOR', 'NUMBERS', 'AND', 'FOUR', 'HUNDRED', 'BISHOPS', 'ACKNOWLEDGED', 'THE', 'JURISDICTION', 'OF', 'THEIR', 'PRIMATE'] +1320-122612-0000-120: ref=['SINCE', 'THE', 'PERIOD', 'OF', 'OUR', 'TALE', 'THE', 'ACTIVE', 'SPIRIT', 'OF', 'THE', 'COUNTRY', 'HAS', 'SURROUNDED', 'IT', 'WITH', 'A', 'BELT', 'OF', 'RICH', 'AND', 'THRIVING', 'SETTLEMENTS', 'THOUGH', 'NONE', 'BUT', 'THE', 'HUNTER', 'OR', 'THE', 'SAVAGE', 'IS', 'EVER', 'KNOWN', 'EVEN', 'NOW', 'TO', 'PENETRATE', 'ITS', 'WILD', 'RECESSES'] +1320-122612-0000-120: hyp=['SINCE', 'THE', 'PERIOD', 'OF', 'OUR', 'TALE', 'THE', 'ACTIVE', 'SPIRIT', 'OF', 'THE', 'COUNTRY', 'HAS', 'SURROUNDED', 'IT', 'WITH', 'A', 'BELT', 'OF', 'RICH', 'ENTHRIBING', 'SETTLEMENTS', 'THOUGH', 'NONE', 'BUT', 'THE', 'HUNTER', 'OR', 'THE', 'SAVAGE', 'IS', 'EVER', 'KNOWN', 'EVEN', 'NOW', 'TO', 'PENETRATE', 'ITS', 'WILD', 'RECESSES'] +1320-122612-0001-121: ref=['THE', 'DEWS', 'WERE', 'SUFFERED', 'TO', 'EXHALE', 'AND', 'THE', 'SUN', 'HAD', 'DISPERSED', 'THE', 'MISTS', 'AND', 'WAS', 'SHEDDING', 'A', 'STRONG', 'AND', 'CLEAR', 'LIGHT', 'IN', 'THE', 'FOREST', 'WHEN', 'THE', 'TRAVELERS', 'RESUMED', 'THEIR', 'JOURNEY'] +1320-122612-0001-121: hyp=['THE', 'DEWS', 'WERE', 'SUFFERED', 'TO', 'EXHALE', 'AND', 'THE', 'SUN', 'HAD', 'DISPERSED', 'THE', 'MISTS', 'AND', 'WAS', 'SHEDDING', 'A', 'STRONG', 'AND', 'CLEAR', 'LIGHT', 'IN', 'THE', 'FOREST', 'WHEN', 'THE', 'TRAVELLERS', 'RESUMED', 'THEIR', 'JOURNEY'] +1320-122612-0002-122: ref=['AFTER', 'PROCEEDING', 'A', 'FEW', 'MILES', 'THE', 'PROGRESS', 'OF', 'HAWKEYE', 'WHO', 'LED', 'THE', 'ADVANCE', 'BECAME', 'MORE', 'DELIBERATE', 'AND', 'WATCHFUL'] +1320-122612-0002-122: hyp=['AFTER', 'PROCEEDING', 'A', 'FEW', 'MILES', 'THE', 'PROGRESS', 'OF', 'HAWKEYE', 'WHO', 'LED', 'THE', 'ADVANCE', 'BECAME', 'MORE', 'DELIBERATE', 'AND', 'WATCHFUL'] +1320-122612-0003-123: ref=['HE', 'OFTEN', 'STOPPED', 'TO', 'EXAMINE', 'THE', 'TREES', 'NOR', 'DID', 'HE', 'CROSS', 'A', 'RIVULET', 'WITHOUT', 'ATTENTIVELY', 'CONSIDERING', 'THE', 'QUANTITY', 'THE', 'VELOCITY', 'AND', 'THE', 'COLOR', 'OF', 'ITS', 'WATERS'] +1320-122612-0003-123: hyp=['HE', 'OFTEN', 'STOPPED', 'TO', 'EXAMINE', 'THE', 'TREES', 'NOR', 'DID', 'HE', 'CROSS', 'A', 'RIVULET', 'WITHOUT', 'ATTENTIVELY', 'CONSIDERING', 'THE', 'QUANTITY', 'THE', 'VELOCITY', 'AND', 'THE', 'COLOR', 'OF', 'ITS', 'WATERS'] +1320-122612-0004-124: ref=['DISTRUSTING', 'HIS', 'OWN', 'JUDGMENT', 'HIS', 'APPEALS', 'TO', 'THE', 'OPINION', 'OF', 'CHINGACHGOOK', 'WERE', 'FREQUENT', 'AND', 'EARNEST'] +1320-122612-0004-124: hyp=['DISTRUSTING', 'HIS', 'OWN', 'JUDGMENT', 'HIS', 'APPEALS', 'TO', 'THE', 'OPINION', 'OF', 'CHINGACHOOK', 'WERE', 'FREQUENT', 'AND', 'EARNEST'] +1320-122612-0005-125: ref=['YET', 'HERE', 'ARE', 'WE', 'WITHIN', 'A', 'SHORT', 'RANGE', 'OF', 'THE', 'SCAROONS', 'AND', 'NOT', 'A', 'SIGN', 'OF', 'A', 'TRAIL', 'HAVE', 'WE', 'CROSSED'] +1320-122612-0005-125: hyp=['YET', 'HERE', 'ARE', 'WE', 'WITHIN', 'A', 'SHORT', 'RANGE', 'OF', 'THE', 'SCARONS', 'AND', 'NOT', 'A', 'SIGN', 'OF', 'A', 'TRAIL', 'HAVE', 'WE', 'CROSSED'] +1320-122612-0006-126: ref=['LET', 'US', 'RETRACE', 'OUR', 'STEPS', 'AND', 'EXAMINE', 'AS', 'WE', 'GO', 'WITH', 'KEENER', 'EYES'] +1320-122612-0006-126: hyp=['LET', 'US', 'RETRACE', 'OUR', 'STEPS', 'AND', 'EXAMINE', 'AS', 'WE', 'GO', 'WITH', 'KEENER', 'EYES'] +1320-122612-0007-127: ref=['CHINGACHGOOK', 'HAD', 'CAUGHT', 'THE', 'LOOK', 'AND', 'MOTIONING', 'WITH', 'HIS', 'HAND', 'HE', 'BADE', 'HIM', 'SPEAK'] +1320-122612-0007-127: hyp=['CHINGACHOOK', 'HAD', 'CAUGHT', 'THE', 'LOOK', 'AND', 'MOTIONING', 'WITH', 'HIS', 'HAND', 'HE', 'BADE', 'HIM', 'SPEAK'] +1320-122612-0008-128: ref=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'FOLLOWED', 'THE', 'UNEXPECTED', 'MOVEMENT', 'AND', 'READ', 'THEIR', 'SUCCESS', 'IN', 'THE', 'AIR', 'OF', 'TRIUMPH', 'THAT', 'THE', 'YOUTH', 'ASSUMED'] +1320-122612-0008-128: hyp=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'FOLLOWED', 'THE', 'UNEXPECTED', 'MOVEMENT', 'AND', 'READ', 'THEIR', 'SUCCESS', 'IN', 'THE', 'AIR', 'OF', 'TRIUMPH', 'THAT', 'THE', 'YOUTH', 'ASSUMED'] +1320-122612-0009-129: ref=['IT', 'WOULD', 'HAVE', 'BEEN', 'MORE', 'WONDERFUL', 'HAD', 'HE', 'SPOKEN', 'WITHOUT', 'A', 'BIDDING'] +1320-122612-0009-129: hyp=['IT', 'WOULD', 'HAVE', 'BEEN', 'MORE', 'WONDERFUL', 'HAD', 'HE', 'SPOKEN', 'WITHOUT', 'A', 'BIDDING'] +1320-122612-0010-130: ref=['SEE', 'SAID', 'UNCAS', 'POINTING', 'NORTH', 'AND', 'SOUTH', 'AT', 'THE', 'EVIDENT', 'MARKS', 'OF', 'THE', 'BROAD', 'TRAIL', 'ON', 'EITHER', 'SIDE', 'OF', 'HIM', 'THE', 'DARK', 'HAIR', 'HAS', 'GONE', 'TOWARD', 'THE', 'FOREST'] +1320-122612-0010-130: hyp=['SEE', 'SAID', 'UNCAS', 'POINTING', 'NORTH', 'AND', 'SOUTH', 'AT', 'THE', 'EVIDENT', 'MARKS', 'OF', 'THE', 'BROAD', 'TRAIL', 'ON', 'EITHER', 'SIDE', 'OF', 'HIM', 'THE', 'DARK', 'HAIR', 'HAS', 'GONE', 'TOWARD', 'THE', 'FOREST'] +1320-122612-0011-131: ref=['IF', 'A', 'ROCK', 'OR', 'A', 'RIVULET', 'OR', 'A', 'BIT', 'OF', 'EARTH', 'HARDER', 'THAN', 'COMMON', 'SEVERED', 'THE', 'LINKS', 'OF', 'THE', 'CLEW', 'THEY', 'FOLLOWED', 'THE', 'TRUE', 'EYE', 'OF', 'THE', 'SCOUT', 'RECOVERED', 'THEM', 'AT', 'A', 'DISTANCE', 'AND', 'SELDOM', 'RENDERED', 'THE', 'DELAY', 'OF', 'A', 'SINGLE', 'MOMENT', 'NECESSARY'] +1320-122612-0011-131: hyp=['IF', 'A', 'ROCK', 'OR', 'A', 'RIVULET', 'OR', 'A', 'BIT', 'OF', 'EARTH', 'HARDER', 'THAN', 'COMMON', 'SEVERED', 'THE', 'LINKS', 'OF', 'THE', 'CLUE', 'THEY', 'FOLLOWED', 'THE', 'TRUE', 'EYE', 'OF', 'THE', 'SCOUT', 'RECOVERED', 'THEM', 'AT', 'A', 'DISTANCE', 'AND', 'SELDOM', 'RENDERED', 'THE', 'DELAY', 'OF', 'A', 'SINGLE', 'MOMENT', 'NECESSARY'] +1320-122612-0012-132: ref=['EXTINGUISHED', 'BRANDS', 'WERE', 'LYING', 'AROUND', 'A', 'SPRING', 'THE', 'OFFALS', 'OF', 'A', 'DEER', 'WERE', 'SCATTERED', 'ABOUT', 'THE', 'PLACE', 'AND', 'THE', 'TREES', 'BORE', 'EVIDENT', 'MARKS', 'OF', 'HAVING', 'BEEN', 'BROWSED', 'BY', 'THE', 'HORSES'] +1320-122612-0012-132: hyp=['EXTINGUISHED', 'BRANDS', 'WERE', 'LYING', 'AROUND', 'A', 'SPRING', 'THE', 'OFFALS', 'OF', 'A', 'DEER', 'WERE', 'SCATTERED', 'ABOUT', 'THE', 'PLACE', 'AND', 'THE', 'TREES', 'BORE', 'EVIDENT', 'MARKS', 'OF', 'HAVING', 'BEEN', 'BROWSED', 'BY', 'THE', 'HORSES'] +1320-122612-0013-133: ref=['A', 'CIRCLE', 'OF', 'A', 'FEW', 'HUNDRED', 'FEET', 'IN', 'CIRCUMFERENCE', 'WAS', 'DRAWN', 'AND', 'EACH', 'OF', 'THE', 'PARTY', 'TOOK', 'A', 'SEGMENT', 'FOR', 'HIS', 'PORTION'] +1320-122612-0013-133: hyp=['A', 'CIRCLE', 'OF', 'A', 'FEW', 'HUNDRED', 'FEET', 'IN', 'CIRCUMFERENCE', 'WAS', 'DRAWN', 'AND', 'EACH', 'OF', 'THE', 'PARTY', 'TOOK', 'A', 'SEGMENT', 'FOR', 'HIS', 'PORTION'] +1320-122612-0014-134: ref=['THE', 'EXAMINATION', 'HOWEVER', 'RESULTED', 'IN', 'NO', 'DISCOVERY'] +1320-122612-0014-134: hyp=['THE', 'EXAMINATION', 'HOWEVER', 'RESULTED', 'IN', 'NO', 'DISCOVERY'] +1320-122612-0015-135: ref=['THE', 'WHOLE', 'PARTY', 'CROWDED', 'TO', 'THE', 'SPOT', 'WHERE', 'UNCAS', 'POINTED', 'OUT', 'THE', 'IMPRESSION', 'OF', 'A', 'MOCCASIN', 'IN', 'THE', 'MOIST', 'ALLUVION'] +1320-122612-0015-135: hyp=['THE', 'WHOLE', 'PARTY', 'CROWDED', 'TO', 'THE', 'SPOT', 'WHERE', 'UNCAS', 'POINTED', 'OUT', 'THE', 'IMPRESSION', 'OF', 'A', 'MOCCASIN', 'IN', 'THE', 'MOIST', 'ALLUVIAN'] +1320-122612-0016-136: ref=['RUN', 'BACK', 'UNCAS', 'AND', 'BRING', 'ME', 'THE', 'SIZE', 'OF', 'THE', "SINGER'S", 'FOOT'] +1320-122612-0016-136: hyp=['RUN', 'BACK', 'UNCAS', 'AND', 'BRING', 'ME', 'THE', 'SIZE', 'OF', 'THE', "SINGER'S", 'FOOT'] +1320-122617-0000-78: ref=['NOTWITHSTANDING', 'THE', 'HIGH', 'RESOLUTION', 'OF', 'HAWKEYE', 'HE', 'FULLY', 'COMPREHENDED', 'ALL', 'THE', 'DIFFICULTIES', 'AND', 'DANGER', 'HE', 'WAS', 'ABOUT', 'TO', 'INCUR'] +1320-122617-0000-78: hyp=['NOTWITHSTANDING', 'THE', 'HIGH', 'RESOLUTION', 'OF', 'HAWKEYE', 'HE', 'FULLY', 'COMPREHENDED', 'ALL', 'THE', 'DIFFICULTIES', 'AND', 'DANGER', 'HE', 'WAS', 'ABOUT', 'TO', 'INCUR'] +1320-122617-0001-79: ref=['IN', 'HIS', 'RETURN', 'TO', 'THE', 'CAMP', 'HIS', 'ACUTE', 'AND', 'PRACTISED', 'INTELLECTS', 'WERE', 'INTENTLY', 'ENGAGED', 'IN', 'DEVISING', 'MEANS', 'TO', 'COUNTERACT', 'A', 'WATCHFULNESS', 'AND', 'SUSPICION', 'ON', 'THE', 'PART', 'OF', 'HIS', 'ENEMIES', 'THAT', 'HE', 'KNEW', 'WERE', 'IN', 'NO', 'DEGREE', 'INFERIOR', 'TO', 'HIS', 'OWN'] +1320-122617-0001-79: hyp=['IN', 'HIS', 'RETURN', 'TO', 'THE', 'CAMP', 'HIS', 'ACUTE', 'AND', 'PRACTISED', 'INTELLECTS', 'WERE', 'INTENTLY', 'ENGAGED', 'IN', 'DEVISING', 'MEANS', 'TO', 'COUNTERACT', 'A', 'WATCHFULNESS', 'AND', 'SUSPICION', 'ON', 'THE', 'PART', 'OF', 'HIS', 'ENEMIES', 'THAT', 'HE', 'KNEW', 'WERE', 'IN', 'NO', 'DEGREE', 'INFERIOR', 'TO', 'HIS', 'OWN'] +1320-122617-0002-80: ref=['IN', 'OTHER', 'WORDS', 'WHILE', 'HE', 'HAD', 'IMPLICIT', 'FAITH', 'IN', 'THE', 'ABILITY', 'OF', "BALAAM'S", 'ASS', 'TO', 'SPEAK', 'HE', 'WAS', 'SOMEWHAT', 'SKEPTICAL', 'ON', 'THE', 'SUBJECT', 'OF', 'A', "BEAR'S", 'SINGING', 'AND', 'YET', 'HE', 'HAD', 'BEEN', 'ASSURED', 'OF', 'THE', 'LATTER', 'ON', 'THE', 'TESTIMONY', 'OF', 'HIS', 'OWN', 'EXQUISITE', 'ORGANS'] +1320-122617-0002-80: hyp=['IN', 'OTHER', 'WORDS', 'WHILE', 'HE', 'HAD', 'IMPLICIT', 'FAITH', 'IN', 'THE', 'ABILITY', 'OF', "BAYLIM'S", 'ASS', 'TO', 'SPEAK', 'HE', 'WAS', 'SOMEWHAT', 'SCEPTICAL', 'ON', 'THE', 'SUBJECT', 'OF', 'A', "BEAR'S", 'SINGING', 'AND', 'YET', 'HE', 'HAD', 'BEEN', 'ASSURED', 'OF', 'THE', 'LATTER', 'ON', 'THE', 'TESTIMONY', 'OF', 'HIS', 'OWN', 'EXQUISITE', 'ORGANS'] +1320-122617-0003-81: ref=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'AIR', 'AND', 'MANNER', 'THAT', 'BETRAYED', 'TO', 'THE', 'SCOUT', 'THE', 'UTTER', 'CONFUSION', 'OF', 'THE', 'STATE', 'OF', 'HIS', 'MIND'] +1320-122617-0003-81: hyp=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'AIR', 'AND', 'MANNER', 'THAT', 'BETRAYED', 'TO', 'THE', 'SCOUT', 'THE', 'UTTER', 'CONFUSION', 'OF', 'THE', 'STATE', 'OF', 'HIS', 'MIND'] +1320-122617-0004-82: ref=['THE', 'INGENIOUS', 'HAWKEYE', 'WHO', 'RECALLED', 'THE', 'HASTY', 'MANNER', 'IN', 'WHICH', 'THE', 'OTHER', 'HAD', 'ABANDONED', 'HIS', 'POST', 'AT', 'THE', 'BEDSIDE', 'OF', 'THE', 'SICK', 'WOMAN', 'WAS', 'NOT', 'WITHOUT', 'HIS', 'SUSPICIONS', 'CONCERNING', 'THE', 'SUBJECT', 'OF', 'SO', 'MUCH', 'SOLEMN', 'DELIBERATION'] +1320-122617-0004-82: hyp=['THE', 'INGENIOUS', 'HAWKEYE', 'WHO', 'RECALLED', 'THE', 'HASTY', 'MANNER', 'IN', 'WHICH', 'THE', 'OTHER', 'HAD', 'ABANDONED', 'HIS', 'POST', 'AT', 'THE', 'BEDSIDE', 'OF', 'THE', 'SICK', 'WOMAN', 'WAS', 'NOT', 'WITHOUT', 'HIS', 'SUSPICIONS', 'CONCERNING', 'THE', 'SUBJECT', 'OF', 'SO', 'MUCH', 'SOLEMN', 'DELIBERATION'] +1320-122617-0005-83: ref=['THE', 'BEAR', 'SHOOK', 'HIS', 'SHAGGY', 'SIDES', 'AND', 'THEN', 'A', 'WELL', 'KNOWN', 'VOICE', 'REPLIED'] +1320-122617-0005-83: hyp=['THE', 'BEAR', 'SHOOK', 'HIS', 'SHAGGY', 'SIDES', 'AND', 'THEN', 'A', 'WELL', 'KNOWN', 'VOICE', 'REPLIED'] +1320-122617-0006-84: ref=['CAN', 'THESE', 'THINGS', 'BE', 'RETURNED', 'DAVID', 'BREATHING', 'MORE', 'FREELY', 'AS', 'THE', 'TRUTH', 'BEGAN', 'TO', 'DAWN', 'UPON', 'HIM'] +1320-122617-0006-84: hyp=['CAN', 'THESE', 'THINGS', 'BE', 'RETURNED', 'DAVID', 'BREATHING', 'MORE', 'FREELY', 'AS', 'THE', 'TRUTH', 'BEGAN', 'TO', 'DAWN', 'UPON', 'HIM'] +1320-122617-0007-85: ref=['COME', 'COME', 'RETURNED', 'HAWKEYE', 'UNCASING', 'HIS', 'HONEST', 'COUNTENANCE', 'THE', 'BETTER', 'TO', 'ASSURE', 'THE', 'WAVERING', 'CONFIDENCE', 'OF', 'HIS', 'COMPANION', 'YOU', 'MAY', 'SEE', 'A', 'SKIN', 'WHICH', 'IF', 'IT', 'BE', 'NOT', 'AS', 'WHITE', 'AS', 'ONE', 'OF', 'THE', 'GENTLE', 'ONES', 'HAS', 'NO', 'TINGE', 'OF', 'RED', 'TO', 'IT', 'THAT', 'THE', 'WINDS', 'OF', 'THE', 'HEAVEN', 'AND', 'THE', 'SUN', 'HAVE', 'NOT', 'BESTOWED', 'NOW', 'LET', 'US', 'TO', 'BUSINESS'] +1320-122617-0007-85: hyp=['COME', 'COME', 'RETURNED', 'HAWKEYE', 'UNCASING', 'HIS', 'HONEST', 'COUNTENANCE', 'THE', 'BETTER', 'TO', 'ASSURE', 'THE', 'WAVERING', 'CONFIDENCE', 'OF', 'HIS', 'COMPANION', 'YOU', 'MAY', 'SEE', 'A', 'SKIN', 'WHICH', 'IF', 'IT', 'BE', 'NOT', 'AS', 'WHITE', 'AS', 'ONE', 'OF', 'THE', 'GENTLE', 'ONES', 'HAS', 'NO', 'TINGE', 'OF', 'RED', 'TO', 'IT', 'THAT', 'THE', 'WINDS', 'OF', 'THE', 'HEAVEN', 'AND', 'THE', 'SUN', 'HAVE', 'NOT', 'BESTOWED', 'NOW', 'LET', 'US', 'TO', 'BUSINESS'] +1320-122617-0008-86: ref=['THE', 'YOUNG', 'MAN', 'IS', 'IN', 'BONDAGE', 'AND', 'MUCH', 'I', 'FEAR', 'HIS', 'DEATH', 'IS', 'DECREED'] +1320-122617-0008-86: hyp=['THE', 'YOUNG', 'MAN', 'IS', 'IN', 'BONDAGE', 'AND', 'MUCH', 'I', 'FEAR', 'HIS', 'DEATH', 'IS', 'DECREED'] +1320-122617-0009-87: ref=['I', 'GREATLY', 'MOURN', 'THAT', 'ONE', 'SO', 'WELL', 'DISPOSED', 'SHOULD', 'DIE', 'IN', 'HIS', 'IGNORANCE', 'AND', 'I', 'HAVE', 'SOUGHT', 'A', 'GOODLY', 'HYMN', 'CAN', 'YOU', 'LEAD', 'ME', 'TO', 'HIM'] +1320-122617-0009-87: hyp=['I', 'GREATLY', 'MOURN', 'THAT', 'ONE', 'SO', 'WELL', 'DISPOSED', 'SHOULD', 'DIE', 'IN', 'HIS', 'IGNORANCE', 'AND', 'I', 'HAVE', 'SOUGHT', 'A', 'GOODLY', 'HYMN', 'CAN', 'YOU', 'LEAD', 'ME', 'TO', 'HIM'] +1320-122617-0010-88: ref=['THE', 'TASK', 'WILL', 'NOT', 'BE', 'DIFFICULT', 'RETURNED', 'DAVID', 'HESITATING', 'THOUGH', 'I', 'GREATLY', 'FEAR', 'YOUR', 'PRESENCE', 'WOULD', 'RATHER', 'INCREASE', 'THAN', 'MITIGATE', 'HIS', 'UNHAPPY', 'FORTUNES'] +1320-122617-0010-88: hyp=['THE', 'TASK', 'WILL', 'NOT', 'BE', 'DIFFICULT', 'RETURNED', 'DAVID', 'HESITATING', 'THOUGH', 'I', 'GREATLY', 'FEAR', 'YOUR', 'PRESENCE', 'WOULD', 'RATHER', 'INCREASE', 'THAN', 'MITIGATE', 'HIS', 'UNHAPPY', 'FORTUNES'] +1320-122617-0011-89: ref=['THE', 'LODGE', 'IN', 'WHICH', 'UNCAS', 'WAS', 'CONFINED', 'WAS', 'IN', 'THE', 'VERY', 'CENTER', 'OF', 'THE', 'VILLAGE', 'AND', 'IN', 'A', 'SITUATION', 'PERHAPS', 'MORE', 'DIFFICULT', 'THAN', 'ANY', 'OTHER', 'TO', 'APPROACH', 'OR', 'LEAVE', 'WITHOUT', 'OBSERVATION'] +1320-122617-0011-89: hyp=['THE', 'LODGE', 'IN', 'WHICH', 'UNCAS', 'WAS', 'CONFINED', 'WAS', 'IN', 'THE', 'VERY', 'CENTER', 'OF', 'THE', 'VILLAGE', 'AND', 'IN', 'A', 'SITUATION', 'PERHAPS', 'MORE', 'DIFFICULT', 'THAN', 'ANY', 'OTHER', 'TO', 'APPROACH', 'OR', 'LEAVE', 'WITHOUT', 'OBSERVATION'] +1320-122617-0012-90: ref=['FOUR', 'OR', 'FIVE', 'OF', 'THE', 'LATTER', 'ONLY', 'LINGERED', 'ABOUT', 'THE', 'DOOR', 'OF', 'THE', 'PRISON', 'OF', 'UNCAS', 'WARY', 'BUT', 'CLOSE', 'OBSERVERS', 'OF', 'THE', 'MANNER', 'OF', 'THEIR', 'CAPTIVE'] +1320-122617-0012-90: hyp=['FOUR', 'OR', 'FIVE', 'OF', 'THE', 'LATTER', 'ONLY', 'LINGERED', 'ABOUT', 'THE', 'DOOR', 'OF', 'THE', 'PRISON', 'OF', 'UNCAS', 'WARY', 'BUT', 'CLOSE', 'OBSERVERS', 'OF', 'THE', 'MANNER', 'OF', 'THEIR', 'CAPTIVE'] +1320-122617-0013-91: ref=['DELIVERED', 'IN', 'A', 'STRONG', 'TONE', 'OF', 'ASSENT', 'ANNOUNCED', 'THE', 'GRATIFICATION', 'THE', 'SAVAGE', 'WOULD', 'RECEIVE', 'IN', 'WITNESSING', 'SUCH', 'AN', 'EXHIBITION', 'OF', 'WEAKNESS', 'IN', 'AN', 'ENEMY', 'SO', 'LONG', 'HATED', 'AND', 'SO', 'MUCH', 'FEARED'] +1320-122617-0013-91: hyp=['DELIVERED', 'IN', 'A', 'STRONG', 'TONE', 'OF', 'ASSENT', 'ANNOUNCED', 'THE', 'GRATIFICATION', 'THE', 'SAVAGE', 'WOULD', 'RECEIVE', 'AND', 'WITNESSING', 'SUCH', 'AN', 'EXHIBITION', 'OF', 'WEAKNESS', 'IN', 'AN', 'ENEMY', 'SO', 'LONG', 'HATED', 'AND', 'SO', 'MUCH', 'FEARED'] +1320-122617-0014-92: ref=['THEY', 'DREW', 'BACK', 'A', 'LITTLE', 'FROM', 'THE', 'ENTRANCE', 'AND', 'MOTIONED', 'TO', 'THE', 'SUPPOSED', 'CONJURER', 'TO', 'ENTER'] +1320-122617-0014-92: hyp=['THEY', 'DREW', 'BACK', 'A', 'LITTLE', 'FROM', 'THE', 'ENTRANCE', 'AND', 'MOTIONED', 'TO', 'THE', 'SUPPOSED', 'CONJUROR', 'TO', 'ENTER'] +1320-122617-0015-93: ref=['BUT', 'THE', 'BEAR', 'INSTEAD', 'OF', 'OBEYING', 'MAINTAINED', 'THE', 'SEAT', 'IT', 'HAD', 'TAKEN', 'AND', 'GROWLED'] +1320-122617-0015-93: hyp=['BUT', 'THE', 'BEAR', 'INSTEAD', 'OF', 'OBEYING', 'MAINTAINED', 'THE', 'SEED', 'IT', 'HAD', 'TAKEN', 'AND', 'GROWLED'] +1320-122617-0016-94: ref=['THE', 'CUNNING', 'MAN', 'IS', 'AFRAID', 'THAT', 'HIS', 'BREATH', 'WILL', 'BLOW', 'UPON', 'HIS', 'BROTHERS', 'AND', 'TAKE', 'AWAY', 'THEIR', 'COURAGE', 'TOO', 'CONTINUED', 'DAVID', 'IMPROVING', 'THE', 'HINT', 'HE', 'RECEIVED', 'THEY', 'MUST', 'STAND', 'FURTHER', 'OFF'] +1320-122617-0016-94: hyp=['THE', 'CUNNING', 'MAN', 'IS', 'AFRAID', 'THAT', 'HIS', 'BREATH', 'WILL', 'BLOW', 'UPON', 'HIS', 'BROTHERS', 'AND', 'TAKE', 'AWAY', 'THEIR', 'COURAGE', 'TOO', 'CONTINUED', 'DAVID', 'IMPROVING', 'THE', 'HINT', 'HE', 'RECEIVED', 'THEY', 'MUST', 'STAND', 'FURTHER', 'OFF'] +1320-122617-0017-95: ref=['THEN', 'AS', 'IF', 'SATISFIED', 'OF', 'THEIR', 'SAFETY', 'THE', 'SCOUT', 'LEFT', 'HIS', 'POSITION', 'AND', 'SLOWLY', 'ENTERED', 'THE', 'PLACE'] +1320-122617-0017-95: hyp=['THEN', 'AS', 'IF', 'SATISFIED', 'OF', 'THEIR', 'SAFETY', 'THE', 'SCOUT', 'LEFT', 'HIS', 'POSITION', 'AND', 'SLOWLY', 'ENTERED', 'THE', 'PLACE'] +1320-122617-0018-96: ref=['IT', 'WAS', 'SILENT', 'AND', 'GLOOMY', 'BEING', 'TENANTED', 'SOLELY', 'BY', 'THE', 'CAPTIVE', 'AND', 'LIGHTED', 'BY', 'THE', 'DYING', 'EMBERS', 'OF', 'A', 'FIRE', 'WHICH', 'HAD', 'BEEN', 'USED', 'FOR', 'THE', 'PURPOSED', 'OF', 'COOKERY'] +1320-122617-0018-96: hyp=['IT', 'WAS', 'SILENT', 'AND', 'GLOOMY', 'BEING', 'TENANTED', 'SOLELY', 'BY', 'THE', 'CAPTIVE', 'AND', 'LIGHTED', 'BY', 'THE', 'DYING', 'EMBERS', 'OF', 'A', 'FIRE', 'WHICH', 'HAD', 'BEEN', 'USED', 'FOR', 'THE', 'PURPOSE', 'OF', 'COOKERY'] +1320-122617-0019-97: ref=['UNCAS', 'OCCUPIED', 'A', 'DISTANT', 'CORNER', 'IN', 'A', 'RECLINING', 'ATTITUDE', 'BEING', 'RIGIDLY', 'BOUND', 'BOTH', 'HANDS', 'AND', 'FEET', 'BY', 'STRONG', 'AND', 'PAINFUL', 'WITHES'] +1320-122617-0019-97: hyp=['UNCAS', 'OCCUPIED', 'A', 'DISTANT', 'CORNER', 'IN', 'A', 'RECLINING', 'ATTITUDE', 'BEING', 'RIGIDLY', 'BOUND', 'BOTH', 'HANDS', 'AND', 'FEET', 'BY', 'STRONG', 'AND', 'PAINFUL', 'WIDTHS'] +1320-122617-0020-98: ref=['THE', 'SCOUT', 'WHO', 'HAD', 'LEFT', 'DAVID', 'AT', 'THE', 'DOOR', 'TO', 'ASCERTAIN', 'THEY', 'WERE', 'NOT', 'OBSERVED', 'THOUGHT', 'IT', 'PRUDENT', 'TO', 'PRESERVE', 'HIS', 'DISGUISE', 'UNTIL', 'ASSURED', 'OF', 'THEIR', 'PRIVACY'] +1320-122617-0020-98: hyp=['THE', 'SCOUT', 'WHO', 'HAD', 'LEFT', 'DAVID', 'AT', 'THE', 'DOOR', 'TO', 'ASCERTAIN', 'THEY', 'WERE', 'NOT', 'OBSERVED', 'THOUGHT', 'IT', 'PRUDENT', 'TO', 'PRESERVE', 'HIS', 'DISGUISE', 'UNTIL', 'ASSURED', 'OF', 'THEIR', 'PRIVACY'] +1320-122617-0021-99: ref=['WHAT', 'SHALL', 'WE', 'DO', 'WITH', 'THE', 'MINGOES', 'AT', 'THE', 'DOOR', 'THEY', 'COUNT', 'SIX', 'AND', 'THIS', 'SINGER', 'IS', 'AS', 'GOOD', 'AS', 'NOTHING'] +1320-122617-0021-99: hyp=['WHAT', 'SHALL', 'WE', 'DO', 'WITH', 'THE', 'MINGOES', 'AT', 'THE', 'DOOR', 'THEY', 'COUNT', 'SIX', 'AND', 'THE', 'SINGER', 'IS', 'AS', 'GOOD', 'AS', 'NOTHING'] +1320-122617-0022-100: ref=['THE', 'DELAWARES', 'ARE', 'CHILDREN', 'OF', 'THE', 'TORTOISE', 'AND', 'THEY', 'OUTSTRIP', 'THE', 'DEER'] +1320-122617-0022-100: hyp=['THE', 'DELAWARES', 'ARE', 'CHILDREN', 'OF', 'THE', 'TORTOISE', 'AND', 'THE', 'OUTSTRIP', 'THE', 'DEER'] +1320-122617-0023-101: ref=['UNCAS', 'WHO', 'HAD', 'ALREADY', 'APPROACHED', 'THE', 'DOOR', 'IN', 'READINESS', 'TO', 'LEAD', 'THE', 'WAY', 'NOW', 'RECOILED', 'AND', 'PLACED', 'HIMSELF', 'ONCE', 'MORE', 'IN', 'THE', 'BOTTOM', 'OF', 'THE', 'LODGE'] +1320-122617-0023-101: hyp=['UNCAS', 'WHO', 'HAD', 'ALREADY', 'APPROACHED', 'THE', 'DOOR', 'IN', 'READINESS', 'TO', 'LEAD', 'THE', 'WAY', 'NOW', 'RECOILED', 'AND', 'PLACED', 'HIMSELF', 'ONCE', 'MORE', 'IN', 'THE', 'BOTTOM', 'OF', 'THE', 'LODGE'] +1320-122617-0024-102: ref=['BUT', 'HAWKEYE', 'WHO', 'WAS', 'TOO', 'MUCH', 'OCCUPIED', 'WITH', 'HIS', 'OWN', 'THOUGHTS', 'TO', 'NOTE', 'THE', 'MOVEMENT', 'CONTINUED', 'SPEAKING', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'HIS', 'COMPANION'] +1320-122617-0024-102: hyp=['BUT', 'HAWKEYE', 'WHO', 'WAS', 'TOO', 'MUCH', 'OCCUPIED', 'WITH', 'HIS', 'OWN', 'THOUGHTS', 'TO', 'NOTE', 'THE', 'MOVEMENT', 'CONTINUED', 'SPEAKING', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'HIS', 'COMPANION'] +1320-122617-0025-103: ref=['SO', 'UNCAS', 'YOU', 'HAD', 'BETTER', 'TAKE', 'THE', 'LEAD', 'WHILE', 'I', 'WILL', 'PUT', 'ON', 'THE', 'SKIN', 'AGAIN', 'AND', 'TRUST', 'TO', 'CUNNING', 'FOR', 'WANT', 'OF', 'SPEED'] +1320-122617-0025-103: hyp=['SO', 'UNCAS', 'YOU', 'HAD', 'BETTER', 'TAKE', 'THE', 'LEAD', 'WHILE', 'I', 'WILL', 'PUT', 'ON', 'THE', 'SKIN', 'AGAIN', 'AND', 'TRUST', 'TO', 'CUNNING', 'FOR', 'WANT', 'OF', 'SPEED'] +1320-122617-0026-104: ref=['WELL', 'WHAT', "CAN'T", 'BE', 'DONE', 'BY', 'MAIN', 'COURAGE', 'IN', 'WAR', 'MUST', 'BE', 'DONE', 'BY', 'CIRCUMVENTION'] +1320-122617-0026-104: hyp=['WELL', 'WHAT', "CAN'T", 'BE', 'DONE', 'BY', 'MAIN', 'COURAGE', 'AND', 'WAR', 'MUST', 'BE', 'DONE', 'BY', 'CIRCUMVENTION'] +1320-122617-0027-105: ref=['AS', 'SOON', 'AS', 'THESE', 'DISPOSITIONS', 'WERE', 'MADE', 'THE', 'SCOUT', 'TURNED', 'TO', 'DAVID', 'AND', 'GAVE', 'HIM', 'HIS', 'PARTING', 'INSTRUCTIONS'] +1320-122617-0027-105: hyp=['AS', 'SOON', 'AS', 'THESE', 'DISPOSITIONS', 'WERE', 'MADE', 'THE', 'SCOUT', 'TURNED', 'TO', 'DAVID', 'AND', 'GAVE', 'HIM', 'HIS', 'PARTING', 'INSTRUCTIONS'] +1320-122617-0028-106: ref=['MY', 'PURSUITS', 'ARE', 'PEACEFUL', 'AND', 'MY', 'TEMPER', 'I', 'HUMBLY', 'TRUST', 'IS', 'GREATLY', 'GIVEN', 'TO', 'MERCY', 'AND', 'LOVE', 'RETURNED', 'DAVID', 'A', 'LITTLE', 'NETTLED', 'AT', 'SO', 'DIRECT', 'AN', 'ATTACK', 'ON', 'HIS', 'MANHOOD', 'BUT', 'THERE', 'ARE', 'NONE', 'WHO', 'CAN', 'SAY', 'THAT', 'I', 'HAVE', 'EVER', 'FORGOTTEN', 'MY', 'FAITH', 'IN', 'THE', 'LORD', 'EVEN', 'IN', 'THE', 'GREATEST', 'STRAITS'] +1320-122617-0028-106: hyp=['MY', 'PURSUITS', 'ARE', 'PEACEFUL', 'AND', 'MY', 'TEMPER', 'I', 'HUMBLY', 'TRUST', 'IS', 'GREATLY', 'GIVEN', 'TO', 'MERCY', 'AND', 'LOVE', 'RETURNED', 'DAVID', 'A', 'LITTLE', 'NETTLED', 'AT', 'SO', 'DIRECT', 'AN', 'ATTACK', 'ON', 'HIS', 'MANHOOD', 'BUT', 'THERE', 'ARE', 'NONE', 'WHO', 'CAN', 'SAY', 'THAT', 'I', 'HAVE', 'EVER', 'FORGOTTEN', 'MY', 'FAITH', 'IN', 'THE', 'LORD', 'EVEN', 'IN', 'THE', 'GREATEST', 'STRAITS'] +1320-122617-0029-107: ref=['IF', 'YOU', 'ARE', 'NOT', 'THEN', 'KNOCKED', 'ON', 'THE', 'HEAD', 'YOUR', 'BEING', 'A', 'NON', 'COMPOSSER', 'WILL', 'PROTECT', 'YOU', 'AND', "YOU'LL", 'THEN', 'HAVE', 'A', 'GOOD', 'REASON', 'TO', 'EXPECT', 'TO', 'DIE', 'IN', 'YOUR', 'BED'] +1320-122617-0029-107: hyp=['IF', 'YOU', 'ARE', 'NOT', 'THEN', 'KNOCKED', 'ON', 'THE', 'HEAD', 'YOUR', 'BEING', 'A', 'NON', 'COMPOSSIBLE', 'PROTECT', 'YOU', 'AND', "YOU'LL", 'THEN', 'HAVE', 'A', 'GOOD', 'REASON', 'TO', 'EXPECT', 'TO', 'DIE', 'IN', 'YOUR', 'BED'] +1320-122617-0030-108: ref=['SO', 'CHOOSE', 'FOR', 'YOURSELF', 'TO', 'MAKE', 'A', 'RUSH', 'OR', 'TARRY', 'HERE'] +1320-122617-0030-108: hyp=['SO', 'CHOOSE', 'FOR', 'YOURSELF', 'TO', 'MAKE', 'A', 'RUSH', 'OR', 'TARRY', 'HERE'] +1320-122617-0031-109: ref=['BRAVELY', 'AND', 'GENEROUSLY', 'HAS', 'HE', 'BATTLED', 'IN', 'MY', 'BEHALF', 'AND', 'THIS', 'AND', 'MORE', 'WILL', 'I', 'DARE', 'IN', 'HIS', 'SERVICE'] +1320-122617-0031-109: hyp=['BRAVELY', 'AND', 'GENEROUSLY', 'HAS', 'HE', 'BATTLED', 'IN', 'MY', 'BEHALF', 'AND', 'THIS', 'AND', 'MORE', 'WILL', 'I', 'DARE', 'IN', 'HIS', 'SERVICE'] +1320-122617-0032-110: ref=['KEEP', 'SILENT', 'AS', 'LONG', 'AS', 'MAY', 'BE', 'AND', 'IT', 'WOULD', 'BE', 'WISE', 'WHEN', 'YOU', 'DO', 'SPEAK', 'TO', 'BREAK', 'OUT', 'SUDDENLY', 'IN', 'ONE', 'OF', 'YOUR', 'SHOUTINGS', 'WHICH', 'WILL', 'SERVE', 'TO', 'REMIND', 'THE', 'INDIANS', 'THAT', 'YOU', 'ARE', 'NOT', 'ALTOGETHER', 'AS', 'RESPONSIBLE', 'AS', 'MEN', 'SHOULD', 'BE'] +1320-122617-0032-110: hyp=['KEEP', 'SILENT', 'AS', 'LONG', 'AS', 'MAY', 'BE', 'AND', 'IT', 'WOULD', 'BE', 'WISE', 'WHEN', 'YOU', 'DO', 'SPEAK', 'TO', 'BREAK', 'OUT', 'SUDDENLY', 'IN', 'ONE', 'OF', 'YOUR', 'SHOUTINGS', 'WHICH', 'WILL', 'SERVE', 'TO', 'REMIND', 'THE', 'INDIANS', 'THAT', 'YOU', 'ARE', 'NOT', 'ALTOGETHER', 'AS', 'RESPONSIBLE', 'AS', 'MEN', 'SHOULD', 'BE'] +1320-122617-0033-111: ref=['IF', 'HOWEVER', 'THEY', 'TAKE', 'YOUR', 'SCALP', 'AS', 'I', 'TRUST', 'AND', 'BELIEVE', 'THEY', 'WILL', 'NOT', 'DEPEND', 'ON', 'IT', 'UNCAS', 'AND', 'I', 'WILL', 'NOT', 'FORGET', 'THE', 'DEED', 'BUT', 'REVENGE', 'IT', 'AS', 'BECOMES', 'TRUE', 'WARRIORS', 'AND', 'TRUSTY', 'FRIENDS'] +1320-122617-0033-111: hyp=['IF', 'HOWEVER', 'THEY', 'TAKE', 'YOUR', 'SCALP', 'AS', 'I', 'TRUST', 'AND', 'BELIEVE', 'THEY', 'WILL', 'NOT', 'DEPEND', 'ON', 'IT', 'UNCAS', 'AND', 'I', 'WILL', 'NOT', 'FORGET', 'THE', 'DEED', 'BUT', 'REVENGE', 'IT', 'IS', 'BECOMES', 'TRUE', 'WARRIORS', 'AND', 'TRUSTY', 'FRIENDS'] +1320-122617-0034-112: ref=['HOLD', 'SAID', 'DAVID', 'PERCEIVING', 'THAT', 'WITH', 'THIS', 'ASSURANCE', 'THEY', 'WERE', 'ABOUT', 'TO', 'LEAVE', 'HIM', 'I', 'AM', 'AN', 'UNWORTHY', 'AND', 'HUMBLE', 'FOLLOWER', 'OF', 'ONE', 'WHO', 'TAUGHT', 'NOT', 'THE', 'DAMNABLE', 'PRINCIPLE', 'OF', 'REVENGE'] +1320-122617-0034-112: hyp=['HOLD', 'SAID', 'DAVID', 'PERCEIVING', 'THAT', 'WITH', 'THIS', 'ASSURANCE', 'THEY', 'WERE', 'ABOUT', 'TO', 'LEAVE', 'HIM', 'I', 'AM', 'AN', 'UNWORTHY', 'AND', 'HUMBLE', 'FOLLOWER', 'OF', 'ONE', 'WHO', 'TAUGHT', 'NOT', 'THE', 'DAMNABLE', 'PRINCIPLE', 'OF', 'REVENGE'] +1320-122617-0035-113: ref=['THEN', 'HEAVING', 'A', 'HEAVY', 'SIGH', 'PROBABLY', 'AMONG', 'THE', 'LAST', 'HE', 'EVER', 'DREW', 'IN', 'PINING', 'FOR', 'A', 'CONDITION', 'HE', 'HAD', 'SO', 'LONG', 'ABANDONED', 'HE', 'ADDED', 'IT', 'IS', 'WHAT', 'I', 'WOULD', 'WISH', 'TO', 'PRACTISE', 'MYSELF', 'AS', 'ONE', 'WITHOUT', 'A', 'CROSS', 'OF', 'BLOOD', 'THOUGH', 'IT', 'IS', 'NOT', 'ALWAYS', 'EASY', 'TO', 'DEAL', 'WITH', 'AN', 'INDIAN', 'AS', 'YOU', 'WOULD', 'WITH', 'A', 'FELLOW', 'CHRISTIAN'] +1320-122617-0035-113: hyp=['THEN', 'HEAVING', 'A', 'HEAVY', 'SIGH', 'PROBABLY', 'AMONG', 'THE', 'LAST', 'HE', 'EVER', 'DREW', 'IN', 'PINING', 'FOR', 'A', 'CONDITION', 'HE', 'HAD', 'SO', 'LONG', 'ABANDONED', 'HE', 'ADDED', 'IT', 'IS', 'WHAT', 'I', 'WOULD', 'WISH', 'TO', 'PRACTISE', 'MYSELF', 'AS', 'ONE', 'WITHOUT', 'A', 'CROSS', 'OF', 'BLOOD', 'THOUGH', 'IT', 'IS', 'NOT', 'ALWAYS', 'EASY', 'TO', 'DEAL', 'WITH', 'AN', 'INDIAN', 'AS', 'YOU', 'WOULD', 'WITH', 'A', 'FELLOW', 'CHRISTIAN'] +1320-122617-0036-114: ref=['GOD', 'BLESS', 'YOU', 'FRIEND', 'I', 'DO', 'BELIEVE', 'YOUR', 'SCENT', 'IS', 'NOT', 'GREATLY', 'WRONG', 'WHEN', 'THE', 'MATTER', 'IS', 'DULY', 'CONSIDERED', 'AND', 'KEEPING', 'ETERNITY', 'BEFORE', 'THE', 'EYES', 'THOUGH', 'MUCH', 'DEPENDS', 'ON', 'THE', 'NATURAL', 'GIFTS', 'AND', 'THE', 'FORCE', 'OF', 'TEMPTATION'] +1320-122617-0036-114: hyp=['GOD', 'BLESS', 'YOU', 'FRIEND', 'I', 'DO', 'BELIEVE', 'YOUR', 'SCENT', 'HAS', 'NOT', 'GREATLY', 'WRONG', 'WHEN', 'THE', 'MATTER', 'IS', 'DULY', 'CONSIDERED', 'AND', 'KEEPING', 'ETERNITY', 'BEFORE', 'THE', 'EYES', 'THOUGH', 'MUCH', 'DEPENDS', 'ON', 'THE', 'NATURAL', 'GIFTS', 'AND', 'THE', 'FORCE', 'OF', 'TEMPTATION'] +1320-122617-0037-115: ref=['THE', 'DELAWARE', 'DOG', 'HE', 'SAID', 'LEANING', 'FORWARD', 'AND', 'PEERING', 'THROUGH', 'THE', 'DIM', 'LIGHT', 'TO', 'CATCH', 'THE', 'EXPRESSION', 'OF', 'THE', "OTHER'S", 'FEATURES', 'IS', 'HE', 'AFRAID'] +1320-122617-0037-115: hyp=['THE', 'DELAWARE', 'DOG', 'HE', 'SAID', 'LEANING', 'FORWARD', 'AND', 'PEERING', 'THROUGH', 'THE', 'DIM', 'LIGHT', 'TO', 'CATCH', 'THE', 'EXPRESSION', 'OF', 'THE', "OTHER'S", 'FEATURES', 'IS', 'HE', 'AFRAID'] +1320-122617-0038-116: ref=['WILL', 'THE', 'HURONS', 'HEAR', 'HIS', 'GROANS'] +1320-122617-0038-116: hyp=['WILL', 'THE', 'HURONS', 'HEAR', 'HIS', 'GROANS'] +1320-122617-0039-117: ref=['THE', 'MOHICAN', 'STARTED', 'ON', 'HIS', 'FEET', 'AND', 'SHOOK', 'HIS', 'SHAGGY', 'COVERING', 'AS', 'THOUGH', 'THE', 'ANIMAL', 'HE', 'COUNTERFEITED', 'WAS', 'ABOUT', 'TO', 'MAKE', 'SOME', 'DESPERATE', 'EFFORT'] +1320-122617-0039-117: hyp=['THE', 'MOHICANS', 'STARTED', 'ON', 'HIS', 'FEET', 'AND', 'SHOOK', 'HIS', 'SHAGGY', 'COVERING', 'AS', 'THOUGH', 'THE', 'ANIMAL', 'HE', 'COUNTERFEITED', 'WAS', 'ABOUT', 'TO', 'MAKE', 'SOME', 'DESPERATE', 'EFFORT'] +1320-122617-0040-118: ref=['HE', 'HAD', 'NO', 'OCCASION', 'TO', 'DELAY', 'FOR', 'AT', 'THE', 'NEXT', 'INSTANT', 'A', 'BURST', 'OF', 'CRIES', 'FILLED', 'THE', 'OUTER', 'AIR', 'AND', 'RAN', 'ALONG', 'THE', 'WHOLE', 'EXTENT', 'OF', 'THE', 'VILLAGE'] +1320-122617-0040-118: hyp=['HE', 'HAD', 'NO', 'OCCASION', 'TO', 'DELAY', 'FOR', 'AT', 'THE', 'NEXT', 'INSTANT', 'A', 'BURST', 'OF', 'CRIES', 'FILLED', 'THE', 'OUTER', 'AIR', 'AND', 'RAN', 'ALONG', 'THE', 'WHOLE', 'EXTENT', 'OF', 'THE', 'VILLAGE'] +1320-122617-0041-119: ref=['UNCAS', 'CAST', 'HIS', 'SKIN', 'AND', 'STEPPED', 'FORTH', 'IN', 'HIS', 'OWN', 'BEAUTIFUL', 'PROPORTIONS'] +1320-122617-0041-119: hyp=['UNCAS', 'CAST', 'HIS', 'SKIN', 'AND', 'STEPPED', 'FORTH', 'IN', 'HIS', 'OWN', 'BEAUTIFUL', 'PROPORTIONS'] +1580-141083-0000-1949: ref=['I', 'WILL', 'ENDEAVOUR', 'IN', 'MY', 'STATEMENT', 'TO', 'AVOID', 'SUCH', 'TERMS', 'AS', 'WOULD', 'SERVE', 'TO', 'LIMIT', 'THE', 'EVENTS', 'TO', 'ANY', 'PARTICULAR', 'PLACE', 'OR', 'GIVE', 'A', 'CLUE', 'AS', 'TO', 'THE', 'PEOPLE', 'CONCERNED'] +1580-141083-0000-1949: hyp=['I', 'WILL', 'ENDEAVOUR', 'IN', 'MY', 'STATEMENT', 'TO', 'AVOID', 'SUCH', 'TERMS', 'AS', 'WOULD', 'SERVE', 'TO', 'LIMIT', 'THE', 'EVENTS', 'TO', 'ANY', 'PARTICULAR', 'PLACE', 'OR', 'GIVE', 'A', 'CLUE', 'AS', 'TO', 'THE', 'PEOPLE', 'CONCERNED'] +1580-141083-0001-1950: ref=['I', 'HAD', 'ALWAYS', 'KNOWN', 'HIM', 'TO', 'BE', 'RESTLESS', 'IN', 'HIS', 'MANNER', 'BUT', 'ON', 'THIS', 'PARTICULAR', 'OCCASION', 'HE', 'WAS', 'IN', 'SUCH', 'A', 'STATE', 'OF', 'UNCONTROLLABLE', 'AGITATION', 'THAT', 'IT', 'WAS', 'CLEAR', 'SOMETHING', 'VERY', 'UNUSUAL', 'HAD', 'OCCURRED'] +1580-141083-0001-1950: hyp=['I', 'HAD', 'ALWAYS', 'KNOWN', 'HIM', 'TO', 'BE', 'RESTLESS', 'IN', 'HIS', 'MANNER', 'BUT', 'ON', 'THIS', 'PARTICULAR', 'OCCASION', 'HE', 'WAS', 'IN', 'SUCH', 'A', 'STATE', 'OF', 'UNCONTROLLABLE', 'AGITATION', 'THAT', 'IT', 'WAS', 'CLEAR', 'SOMETHING', 'VERY', 'UNUSUAL', 'HAD', 'OCCURRED'] +1580-141083-0002-1951: ref=['MY', "FRIEND'S", 'TEMPER', 'HAD', 'NOT', 'IMPROVED', 'SINCE', 'HE', 'HAD', 'BEEN', 'DEPRIVED', 'OF', 'THE', 'CONGENIAL', 'SURROUNDINGS', 'OF', 'BAKER', 'STREET'] +1580-141083-0002-1951: hyp=['MY', "FRIEND'S", 'TEMPER', 'HAD', 'NOT', 'IMPROVED', 'SINCE', 'HE', 'HAD', 'BEEN', 'DEPRIVED', 'OF', 'THE', 'CONGENIAL', 'SURROUNDINGS', 'OF', 'BAKER', 'STREET'] +1580-141083-0003-1952: ref=['WITHOUT', 'HIS', 'SCRAPBOOKS', 'HIS', 'CHEMICALS', 'AND', 'HIS', 'HOMELY', 'UNTIDINESS', 'HE', 'WAS', 'AN', 'UNCOMFORTABLE', 'MAN'] +1580-141083-0003-1952: hyp=['WITHOUT', 'HIS', 'SCRAP', 'BOOKS', 'HIS', 'CHEMICALS', 'AND', 'HIS', 'HOMELY', 'UNTIDINESS', 'HE', 'WAS', 'AN', 'UNCOMFORTABLE', 'MAN'] +1580-141083-0004-1953: ref=['I', 'HAD', 'TO', 'READ', 'IT', 'OVER', 'CAREFULLY', 'AS', 'THE', 'TEXT', 'MUST', 'BE', 'ABSOLUTELY', 'CORRECT'] +1580-141083-0004-1953: hyp=['I', 'HAD', 'TO', 'READ', 'IT', 'OVER', 'CAREFULLY', 'AS', 'THE', 'TEXT', 'MUST', 'BE', 'ABSOLUTELY', 'CORRECT'] +1580-141083-0005-1954: ref=['I', 'WAS', 'ABSENT', 'RATHER', 'MORE', 'THAN', 'AN', 'HOUR'] +1580-141083-0005-1954: hyp=['I', 'WAS', 'ABSENT', 'RATHER', 'MORE', 'THAN', 'AN', 'HOUR'] +1580-141083-0006-1955: ref=['THE', 'ONLY', 'DUPLICATE', 'WHICH', 'EXISTED', 'SO', 'FAR', 'AS', 'I', 'KNEW', 'WAS', 'THAT', 'WHICH', 'BELONGED', 'TO', 'MY', 'SERVANT', 'BANNISTER', 'A', 'MAN', 'WHO', 'HAS', 'LOOKED', 'AFTER', 'MY', 'ROOM', 'FOR', 'TEN', 'YEARS', 'AND', 'WHOSE', 'HONESTY', 'IS', 'ABSOLUTELY', 'ABOVE', 'SUSPICION'] +1580-141083-0006-1955: hyp=['THE', 'ONLY', 'DUPLICATE', 'WHICH', 'EXISTED', 'SO', 'FAR', 'AS', 'I', 'KNEW', 'WAS', 'THAT', 'WHICH', 'BELONGED', 'TO', 'MY', 'SERVANT', 'BANISTER', 'A', 'MAN', 'WHO', 'HAS', 'LOOKED', 'AFTER', 'MY', 'ROOM', 'FOR', 'TEN', 'YEARS', 'AND', 'WHOSE', 'HONESTY', 'IS', 'ABSOLUTELY', 'ABOVE', 'SUSPICION'] +1580-141083-0007-1956: ref=['THE', 'MOMENT', 'I', 'LOOKED', 'AT', 'MY', 'TABLE', 'I', 'WAS', 'AWARE', 'THAT', 'SOMEONE', 'HAD', 'RUMMAGED', 'AMONG', 'MY', 'PAPERS'] +1580-141083-0007-1956: hyp=['THE', 'MOMENT', 'I', 'LOOKED', 'AT', 'MY', 'TABLE', 'I', 'WAS', 'AWARE', 'THAT', 'SOMEONE', 'HAD', 'RUMMAGED', 'AMONG', 'MY', 'PAPERS'] +1580-141083-0008-1957: ref=['THE', 'PROOF', 'WAS', 'IN', 'THREE', 'LONG', 'SLIPS', 'I', 'HAD', 'LEFT', 'THEM', 'ALL', 'TOGETHER'] +1580-141083-0008-1957: hyp=['THE', 'PROOF', 'WAS', 'IN', 'THREE', 'LONG', 'SLIPS', 'I', 'HAD', 'LEFT', 'THEM', 'ALTOGETHER'] +1580-141083-0009-1958: ref=['THE', 'ALTERNATIVE', 'WAS', 'THAT', 'SOMEONE', 'PASSING', 'HAD', 'OBSERVED', 'THE', 'KEY', 'IN', 'THE', 'DOOR', 'HAD', 'KNOWN', 'THAT', 'I', 'WAS', 'OUT', 'AND', 'HAD', 'ENTERED', 'TO', 'LOOK', 'AT', 'THE', 'PAPERS'] +1580-141083-0009-1958: hyp=['THEY', 'ALL', 'TURNED', 'OF', 'WAS', 'THAT', 'SOME', 'ONE', 'PASSING', 'HAD', 'OBSERVED', 'THE', 'KEY', 'IN', 'THE', 'DOOR', 'HAD', 'KNOWN', 'THAT', 'I', 'WAS', 'OUT', 'AND', 'HAD', 'ENTERED', 'TO', 'LOOK', 'AT', 'THE', 'PAPERS'] +1580-141083-0010-1959: ref=['I', 'GAVE', 'HIM', 'A', 'LITTLE', 'BRANDY', 'AND', 'LEFT', 'HIM', 'COLLAPSED', 'IN', 'A', 'CHAIR', 'WHILE', 'I', 'MADE', 'A', 'MOST', 'CAREFUL', 'EXAMINATION', 'OF', 'THE', 'ROOM'] +1580-141083-0010-1959: hyp=['I', 'GAVE', 'HIM', 'A', 'LITTLE', 'BRANDY', 'AND', 'LEFT', 'HIM', 'COLLAPSED', 'IN', 'A', 'CHAIR', 'WHILE', 'I', 'MADE', 'A', 'MOST', 'CAREFUL', 'EXAMINATION', 'OF', 'THE', 'ROOM'] +1580-141083-0011-1960: ref=['A', 'BROKEN', 'TIP', 'OF', 'LEAD', 'WAS', 'LYING', 'THERE', 'ALSO'] +1580-141083-0011-1960: hyp=['A', 'BROKEN', 'TIP', 'OF', 'LEAD', 'WAS', 'LYING', 'THERE', 'ALSO'] +1580-141083-0012-1961: ref=['NOT', 'ONLY', 'THIS', 'BUT', 'ON', 'THE', 'TABLE', 'I', 'FOUND', 'A', 'SMALL', 'BALL', 'OF', 'BLACK', 'DOUGH', 'OR', 'CLAY', 'WITH', 'SPECKS', 'OF', 'SOMETHING', 'WHICH', 'LOOKS', 'LIKE', 'SAWDUST', 'IN', 'IT'] +1580-141083-0012-1961: hyp=['NOT', 'ONLY', 'THIS', 'BUT', 'ON', 'THE', 'TABLE', 'I', 'FOUND', 'A', 'SMALL', 'BALL', 'OF', 'BLACK', 'DOUGH', 'OR', 'CLAY', 'WITH', 'SPECKS', 'OF', 'SOMETHING', 'WHICH', 'LOOKS', 'LIKE', 'SAWDUST', 'IN', 'IT'] +1580-141083-0013-1962: ref=['ABOVE', 'ALL', 'THINGS', 'I', 'DESIRE', 'TO', 'SETTLE', 'THE', 'MATTER', 'QUIETLY', 'AND', 'DISCREETLY'] +1580-141083-0013-1962: hyp=['ABOVE', 'ALL', 'THINGS', 'I', 'DESIRE', 'TO', 'SETTLE', 'THE', 'MATTER', 'QUIETLY', 'AND', 'DISCREETLY'] +1580-141083-0014-1963: ref=['TO', 'THE', 'BEST', 'OF', 'MY', 'BELIEF', 'THEY', 'WERE', 'ROLLED', 'UP'] +1580-141083-0014-1963: hyp=['TO', 'THE', 'BEST', 'OF', 'MY', 'BELIEF', 'THEY', 'WERE', 'ROLLED', 'UP'] +1580-141083-0015-1964: ref=['DID', 'ANYONE', 'KNOW', 'THAT', 'THESE', 'PROOFS', 'WOULD', 'BE', 'THERE', 'NO', 'ONE', 'SAVE', 'THE', 'PRINTER'] +1580-141083-0015-1964: hyp=['DID', 'ANY', 'ONE', 'KNOW', 'THAT', 'THESE', 'PROOFS', 'WOULD', 'BE', 'THERE', 'NO', 'ONE', 'SAVE', 'THE', 'PRINTER'] +1580-141083-0016-1965: ref=['I', 'WAS', 'IN', 'SUCH', 'A', 'HURRY', 'TO', 'COME', 'TO', 'YOU', 'YOU', 'LEFT', 'YOUR', 'DOOR', 'OPEN'] +1580-141083-0016-1965: hyp=['I', 'WAS', 'IN', 'SUCH', 'A', 'HURRY', 'TO', 'COME', 'TO', 'YOU', 'YOU', 'LEFT', 'YOUR', 'DOOR', 'OPEN'] +1580-141083-0017-1966: ref=['SO', 'IT', 'SEEMS', 'TO', 'ME'] +1580-141083-0017-1966: hyp=['SO', 'IT', 'SEEMS', 'TO', 'ME'] +1580-141083-0018-1967: ref=['NOW', 'MISTER', 'SOAMES', 'AT', 'YOUR', 'DISPOSAL'] +1580-141083-0018-1967: hyp=['NOW', 'MISTER', 'SOLMES', 'AT', 'YOUR', 'DISPOSAL'] +1580-141083-0019-1968: ref=['ABOVE', 'WERE', 'THREE', 'STUDENTS', 'ONE', 'ON', 'EACH', 'STORY'] +1580-141083-0019-1968: hyp=['ABOVE', 'WERE', 'THREE', 'STUDENTS', 'ONE', 'ON', 'EACH', 'STORY'] +1580-141083-0020-1969: ref=['THEN', 'HE', 'APPROACHED', 'IT', 'AND', 'STANDING', 'ON', 'TIPTOE', 'WITH', 'HIS', 'NECK', 'CRANED', 'HE', 'LOOKED', 'INTO', 'THE', 'ROOM'] +1580-141083-0020-1969: hyp=['THEN', 'HE', 'APPROACHED', 'IT', 'AND', 'STANDING', 'ON', 'TIPTOE', 'WITH', 'HIS', 'NET', 'CRANED', 'HE', 'LOOKED', 'INTO', 'THE', 'ROOM'] +1580-141083-0021-1970: ref=['THERE', 'IS', 'NO', 'OPENING', 'EXCEPT', 'THE', 'ONE', 'PANE', 'SAID', 'OUR', 'LEARNED', 'GUIDE'] +1580-141083-0021-1970: hyp=['THERE', 'IS', 'NO', 'OPENING', 'EXCEPT', 'THE', 'ONE', 'PAIN', 'SAID', 'OUR', 'LEARNED', 'GUIDE'] +1580-141083-0022-1971: ref=['I', 'AM', 'AFRAID', 'THERE', 'ARE', 'NO', 'SIGNS', 'HERE', 'SAID', 'HE'] +1580-141083-0022-1971: hyp=['I', 'AM', 'AFRAID', 'THERE', 'ARE', 'NO', 'SIGNS', 'HERE', 'SAID', 'HE'] +1580-141083-0023-1972: ref=['ONE', 'COULD', 'HARDLY', 'HOPE', 'FOR', 'ANY', 'UPON', 'SO', 'DRY', 'A', 'DAY'] +1580-141083-0023-1972: hyp=['ONE', 'COULD', 'HARDLY', 'HOPE', 'FOR', 'ANY', 'UPON', 'SO', 'DRY', 'A', 'DAY'] +1580-141083-0024-1973: ref=['YOU', 'LEFT', 'HIM', 'IN', 'A', 'CHAIR', 'YOU', 'SAY', 'WHICH', 'CHAIR', 'BY', 'THE', 'WINDOW', 'THERE'] +1580-141083-0024-1973: hyp=['YOU', 'LEFT', 'HIM', 'IN', 'A', 'CHAIR', 'YOU', 'SAY', 'WHICH', 'CHAIR', 'BY', 'THE', 'WINDOW', 'THERE'] +1580-141083-0025-1974: ref=['THE', 'MAN', 'ENTERED', 'AND', 'TOOK', 'THE', 'PAPERS', 'SHEET', 'BY', 'SHEET', 'FROM', 'THE', 'CENTRAL', 'TABLE'] +1580-141083-0025-1974: hyp=['THE', 'MEN', 'ENTERED', 'AND', 'TOOK', 'THE', 'PAPERS', 'SHEET', 'BY', 'SHEET', 'FROM', 'THE', 'CENTRAL', 'TABLE'] +1580-141083-0026-1975: ref=['AS', 'A', 'MATTER', 'OF', 'FACT', 'HE', 'COULD', 'NOT', 'SAID', 'SOAMES', 'FOR', 'I', 'ENTERED', 'BY', 'THE', 'SIDE', 'DOOR'] +1580-141083-0026-1975: hyp=['AS', 'A', 'MATTER', 'OF', 'FACT', 'HE', 'COULD', 'NOT', 'SAID', 'SOLMES', 'FOR', 'I', 'ENTERED', 'BY', 'THE', 'SIDE', 'DOOR'] +1580-141083-0027-1976: ref=['HOW', 'LONG', 'WOULD', 'IT', 'TAKE', 'HIM', 'TO', 'DO', 'THAT', 'USING', 'EVERY', 'POSSIBLE', 'CONTRACTION', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'NOT', 'LESS'] +1580-141083-0027-1976: hyp=['HOW', 'LONG', 'WOULD', 'IT', 'TAKE', 'HIM', 'TO', 'DO', 'THAT', 'USING', 'EVERY', 'POSSIBLE', 'CONTRACTION', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'NOT', 'LESS'] +1580-141083-0028-1977: ref=['THEN', 'HE', 'TOSSED', 'IT', 'DOWN', 'AND', 'SEIZED', 'THE', 'NEXT'] +1580-141083-0028-1977: hyp=['THEN', 'HE', 'TOSSED', 'IT', 'DOWN', 'AND', 'SEIZED', 'THE', 'NEXT'] +1580-141083-0029-1978: ref=['HE', 'WAS', 'IN', 'THE', 'MIDST', 'OF', 'THAT', 'WHEN', 'YOUR', 'RETURN', 'CAUSED', 'HIM', 'TO', 'MAKE', 'A', 'VERY', 'HURRIED', 'RETREAT', 'VERY', 'HURRIED', 'SINCE', 'HE', 'HAD', 'NOT', 'TIME', 'TO', 'REPLACE', 'THE', 'PAPERS', 'WHICH', 'WOULD', 'TELL', 'YOU', 'THAT', 'HE', 'HAD', 'BEEN', 'THERE'] +1580-141083-0029-1978: hyp=['HE', 'WAS', 'IN', 'THE', 'MIDST', 'OF', 'THAT', 'WHEN', 'YOUR', 'RETURN', 'CAUSED', 'HIM', 'TO', 'MAKE', 'A', 'VERY', 'HURRIED', 'RETREAT', 'VERY', 'HURRIED', 'SINCE', 'HE', 'HAD', 'NOT', 'TIME', 'TO', 'REPLACE', 'THE', 'PAPERS', 'WHICH', 'WOULD', 'TELL', 'YOU', 'THAT', 'HE', 'HAD', 'BEEN', 'THERE'] +1580-141083-0030-1979: ref=['MISTER', 'SOAMES', 'WAS', 'SOMEWHAT', 'OVERWHELMED', 'BY', 'THIS', 'FLOOD', 'OF', 'INFORMATION'] +1580-141083-0030-1979: hyp=['MISTER', 'SOLMES', 'WAS', 'SOMEWHAT', 'OVERWHELMED', 'BY', 'THIS', 'FLOOD', 'OF', 'INFORMATION'] +1580-141083-0031-1980: ref=['HOLMES', 'HELD', 'OUT', 'A', 'SMALL', 'CHIP', 'WITH', 'THE', 'LETTERS', 'N', 'N', 'AND', 'A', 'SPACE', 'OF', 'CLEAR', 'WOOD', 'AFTER', 'THEM', 'YOU', 'SEE'] +1580-141083-0031-1980: hyp=['HOLMES', 'HELD', 'OUT', 'A', 'SMALL', 'CHIP', 'WITH', 'THE', 'LETTERS', 'N', 'N', 'AND', 'A', 'SPACE', 'OF', 'CLEAR', 'WOOD', 'AFTER', 'THEM', 'YOU', 'SEE'] +1580-141083-0032-1981: ref=['WATSON', 'I', 'HAVE', 'ALWAYS', 'DONE', 'YOU', 'AN', 'INJUSTICE', 'THERE', 'ARE', 'OTHERS'] +1580-141083-0032-1981: hyp=['WATSON', 'I', 'HAVE', 'ALWAYS', 'DONE', 'YOU', 'AN', 'INJUSTICE', 'THERE', 'ARE', 'OTHERS'] +1580-141083-0033-1982: ref=['I', 'WAS', 'HOPING', 'THAT', 'IF', 'THE', 'PAPER', 'ON', 'WHICH', 'HE', 'WROTE', 'WAS', 'THIN', 'SOME', 'TRACE', 'OF', 'IT', 'MIGHT', 'COME', 'THROUGH', 'UPON', 'THIS', 'POLISHED', 'SURFACE', 'NO', 'I', 'SEE', 'NOTHING'] +1580-141083-0033-1982: hyp=['I', 'WAS', 'HOPING', 'THAT', 'IF', 'THE', 'PAPER', 'ON', 'WHICH', 'HE', 'WROTE', 'WAS', 'THIN', 'SOME', 'TRACE', 'OF', 'IT', 'MIGHT', 'COME', 'THROUGH', 'UPON', 'THIS', 'POLISHED', 'SURFACE', 'NO', 'I', 'SEE', 'NOTHING'] +1580-141083-0034-1983: ref=['AS', 'HOLMES', 'DREW', 'THE', 'CURTAIN', 'I', 'WAS', 'AWARE', 'FROM', 'SOME', 'LITTLE', 'RIGIDITY', 'AND', 'ALERTNESS', 'OF', 'HIS', 'ATTITUDE', 'THAT', 'HE', 'WAS', 'PREPARED', 'FOR', 'AN', 'EMERGENCY'] +1580-141083-0034-1983: hyp=['AS', 'HOLMES', 'DREW', 'THE', 'CURTAIN', 'I', 'WAS', 'AWARE', 'FROM', 'SOME', 'LITTLE', 'RIGIDITY', 'AND', 'AN', 'ALERTNESS', 'OF', 'HIS', 'ATTITUDE', 'THAT', 'HE', 'WAS', 'PREPARED', 'FOR', 'AN', 'EMERGENCY'] +1580-141083-0035-1984: ref=['HOLMES', 'TURNED', 'AWAY', 'AND', 'STOOPED', 'SUDDENLY', 'TO', 'THE', 'FLOOR', 'HALLOA', "WHAT'S", 'THIS'] +1580-141083-0035-1984: hyp=['HOLMES', 'TURNED', 'AWAY', 'AND', 'STOOPED', 'SUDDENLY', 'TO', 'THE', 'FLOOR', 'HULLO', 'WHAT', 'IS', 'THIS'] +1580-141083-0036-1985: ref=['HOLMES', 'HELD', 'IT', 'OUT', 'ON', 'HIS', 'OPEN', 'PALM', 'IN', 'THE', 'GLARE', 'OF', 'THE', 'ELECTRIC', 'LIGHT'] +1580-141083-0036-1985: hyp=['HOLMES', 'HUTTED', 'OUT', 'ON', 'HIS', 'OPEN', 'PALM', 'IN', 'THE', 'GLARE', 'OF', 'THE', 'ELECTRIC', 'LIGHT'] +1580-141083-0037-1986: ref=['WHAT', 'COULD', 'HE', 'DO', 'HE', 'CAUGHT', 'UP', 'EVERYTHING', 'WHICH', 'WOULD', 'BETRAY', 'HIM', 'AND', 'HE', 'RUSHED', 'INTO', 'YOUR', 'BEDROOM', 'TO', 'CONCEAL', 'HIMSELF'] +1580-141083-0037-1986: hyp=['WHAT', 'COULD', 'HE', 'DO', 'HE', 'CAUGHT', 'UP', 'EVERYTHING', 'WHICH', 'WOULD', 'BETRAY', 'HIM', 'AND', 'HE', 'RUSHED', 'INTO', 'YOUR', 'BEDROOM', 'TO', 'CONCEAL', 'HIMSELF'] +1580-141083-0038-1987: ref=['I', 'UNDERSTAND', 'YOU', 'TO', 'SAY', 'THAT', 'THERE', 'ARE', 'THREE', 'STUDENTS', 'WHO', 'USE', 'THIS', 'STAIR', 'AND', 'ARE', 'IN', 'THE', 'HABIT', 'OF', 'PASSING', 'YOUR', 'DOOR', 'YES', 'THERE', 'ARE'] +1580-141083-0038-1987: hyp=['I', 'UNDERSTAND', 'YOU', 'TO', 'SAY', 'THAT', 'THERE', 'ARE', 'THREE', 'STUDENTS', 'WHO', 'USE', 'THIS', 'STARE', 'AND', 'ARE', 'IN', 'THE', 'HABIT', 'OF', 'PASSING', 'YOUR', 'DOOR', 'YES', 'THERE', 'ARE'] +1580-141083-0039-1988: ref=['AND', 'THEY', 'ARE', 'ALL', 'IN', 'FOR', 'THIS', 'EXAMINATION', 'YES'] +1580-141083-0039-1988: hyp=['AND', 'THEY', 'ARE', 'ALL', 'IN', 'FOR', 'THIS', 'EXAMINATION', 'YES'] +1580-141083-0040-1989: ref=['ONE', 'HARDLY', 'LIKES', 'TO', 'THROW', 'SUSPICION', 'WHERE', 'THERE', 'ARE', 'NO', 'PROOFS'] +1580-141083-0040-1989: hyp=['ONE', 'HARDLY', 'LIKES', 'TO', 'THROW', 'SUSPICION', 'WHERE', 'THERE', 'ARE', 'NO', 'PROOFS'] +1580-141083-0041-1990: ref=['LET', 'US', 'HEAR', 'THE', 'SUSPICIONS', 'I', 'WILL', 'LOOK', 'AFTER', 'THE', 'PROOFS'] +1580-141083-0041-1990: hyp=['LET', 'US', 'SEE', 'THE', 'SUSPICIONS', 'I', 'WILL', 'LOOK', 'AFTER', 'THE', 'PROOFS'] +1580-141083-0042-1991: ref=['MY', 'SCHOLAR', 'HAS', 'BEEN', 'LEFT', 'VERY', 'POOR', 'BUT', 'HE', 'IS', 'HARD', 'WORKING', 'AND', 'INDUSTRIOUS', 'HE', 'WILL', 'DO', 'WELL'] +1580-141083-0042-1991: hyp=['MY', 'SCHOLAR', 'HAS', 'BEEN', 'LEFT', 'A', 'VERY', 'POOR', 'BUT', 'HE', 'IS', 'HARD', 'WORKING', 'AND', 'INDUSTRIOUS', 'HE', 'WILL', 'DO', 'WELL'] +1580-141083-0043-1992: ref=['THE', 'TOP', 'FLOOR', 'BELONGS', 'TO', 'MILES', 'MC', 'LAREN'] +1580-141083-0043-1992: hyp=['THE', 'TOP', 'FLOOR', 'BELONGS', 'TO', 'MYLES', 'MC', 'LAREN'] +1580-141083-0044-1993: ref=['I', 'DARE', 'NOT', 'GO', 'SO', 'FAR', 'AS', 'THAT', 'BUT', 'OF', 'THE', 'THREE', 'HE', 'IS', 'PERHAPS', 'THE', 'LEAST', 'UNLIKELY'] +1580-141083-0044-1993: hyp=['I', 'DARE', 'NOT', 'GO', 'SO', 'FAR', 'AS', 'THAT', 'BUT', 'OF', 'THE', 'THREE', 'HE', 'IS', 'PERHAPS', 'THE', 'LEAST', 'UNLIKELY'] +1580-141083-0045-1994: ref=['HE', 'WAS', 'STILL', 'SUFFERING', 'FROM', 'THIS', 'SUDDEN', 'DISTURBANCE', 'OF', 'THE', 'QUIET', 'ROUTINE', 'OF', 'HIS', 'LIFE'] +1580-141083-0045-1994: hyp=['HE', 'WAS', 'STILL', 'SUFFERING', 'FROM', 'THIS', 'SUDDEN', 'DISTURBANCE', 'OF', 'THE', 'QUIET', 'ROUTINE', 'OF', 'HIS', 'LIFE'] +1580-141083-0046-1995: ref=['BUT', 'I', 'HAVE', 'OCCASIONALLY', 'DONE', 'THE', 'SAME', 'THING', 'AT', 'OTHER', 'TIMES'] +1580-141083-0046-1995: hyp=['BUT', 'I', 'HAVE', 'OCCASIONALLY', 'DONE', 'THE', 'SAME', 'THING', 'AT', 'OTHER', 'TIMES'] +1580-141083-0047-1996: ref=['DID', 'YOU', 'LOOK', 'AT', 'THESE', 'PAPERS', 'ON', 'THE', 'TABLE'] +1580-141083-0047-1996: hyp=['DID', 'YOU', 'LOOK', 'AT', 'THESE', 'PAPERS', 'ON', 'THE', 'TABLE'] +1580-141083-0048-1997: ref=['HOW', 'CAME', 'YOU', 'TO', 'LEAVE', 'THE', 'KEY', 'IN', 'THE', 'DOOR'] +1580-141083-0048-1997: hyp=['HOW', 'CAME', 'YOU', 'TO', 'LEAVE', 'THE', 'KEY', 'IN', 'THE', 'DOOR'] +1580-141083-0049-1998: ref=['ANYONE', 'IN', 'THE', 'ROOM', 'COULD', 'GET', 'OUT', 'YES', 'SIR'] +1580-141083-0049-1998: hyp=['ANY', 'ONE', 'IN', 'THE', 'ROOM', 'COULD', 'GET', 'OUT', 'YES', 'SIR'] +1580-141083-0050-1999: ref=['I', 'REALLY', "DON'T", 'THINK', 'HE', 'KNEW', 'MUCH', 'ABOUT', 'IT', 'MISTER', 'HOLMES'] +1580-141083-0050-1999: hyp=['I', 'HAVE', 'REALLY', "DON'T", 'THINK', 'HE', 'KNEW', 'MUCH', 'ABOUT', 'IT', 'MISTER', 'HOLMES'] +1580-141083-0051-2000: ref=['ONLY', 'FOR', 'A', 'MINUTE', 'OR', 'SO'] +1580-141083-0051-2000: hyp=['ONLY', 'FOR', 'A', 'MINUTE', 'OR', 'SO'] +1580-141083-0052-2001: ref=['OH', 'I', 'WOULD', 'NOT', 'VENTURE', 'TO', 'SAY', 'SIR'] +1580-141083-0052-2001: hyp=['OH', 'I', 'WOULD', 'NOT', 'VENTURE', 'TO', 'SAY', 'SIR'] +1580-141083-0053-2002: ref=['YOU', "HAVEN'T", 'SEEN', 'ANY', 'OF', 'THEM', 'NO', 'SIR'] +1580-141083-0053-2002: hyp=['YOU', "HAVEN'T", 'SEEN', 'ANY', 'OF', 'THEM', 'NO', 'SIR'] +1580-141084-0000-2003: ref=['IT', 'WAS', 'THE', 'INDIAN', 'WHOSE', 'DARK', 'SILHOUETTE', 'APPEARED', 'SUDDENLY', 'UPON', 'HIS', 'BLIND'] +1580-141084-0000-2003: hyp=['IT', 'WAS', 'THE', 'INDIAN', 'WHOSE', 'DARK', 'SILHOUETTE', 'APPEARED', 'SUDDENLY', 'UPON', 'HIS', 'BLIND'] +1580-141084-0001-2004: ref=['HE', 'WAS', 'PACING', 'SWIFTLY', 'UP', 'AND', 'DOWN', 'HIS', 'ROOM'] +1580-141084-0001-2004: hyp=['HE', 'WAS', 'PACING', 'SWIFTLY', 'UP', 'AND', 'DOWN', 'HIS', 'ROOM'] +1580-141084-0002-2005: ref=['THIS', 'SET', 'OF', 'ROOMS', 'IS', 'QUITE', 'THE', 'OLDEST', 'IN', 'THE', 'COLLEGE', 'AND', 'IT', 'IS', 'NOT', 'UNUSUAL', 'FOR', 'VISITORS', 'TO', 'GO', 'OVER', 'THEM'] +1580-141084-0002-2005: hyp=['THE', 'SET', 'OF', 'ROOMS', 'IS', 'QUITE', 'THE', 'OLDEST', 'IN', 'THE', 'COLLEGE', 'AND', 'IT', 'IS', 'NOT', 'UNUSUAL', 'FOR', 'VISITORS', 'TO', 'GO', 'OVER', 'THEM'] +1580-141084-0003-2006: ref=['NO', 'NAMES', 'PLEASE', 'SAID', 'HOLMES', 'AS', 'WE', 'KNOCKED', 'AT', "GILCHRIST'S", 'DOOR'] +1580-141084-0003-2006: hyp=['NO', 'NAMES', 'PLEASE', 'SAID', 'HOLMES', 'AS', 'WE', 'KNOCKED', 'AT', "GILCHER'S", 'DOOR'] +1580-141084-0004-2007: ref=['OF', 'COURSE', 'HE', 'DID', 'NOT', 'REALIZE', 'THAT', 'IT', 'WAS', 'I', 'WHO', 'WAS', 'KNOCKING', 'BUT', 'NONE', 'THE', 'LESS', 'HIS', 'CONDUCT', 'WAS', 'VERY', 'UNCOURTEOUS', 'AND', 'INDEED', 'UNDER', 'THE', 'CIRCUMSTANCES', 'RATHER', 'SUSPICIOUS'] +1580-141084-0004-2007: hyp=['OF', 'COURSE', 'HE', 'DID', 'NOT', 'REALIZE', 'THAT', 'IT', 'WAS', 'I', 'WHO', 'WAS', 'KNOCKING', 'BUT', 'NONE', 'THE', 'LESS', 'HIS', 'CONDUCT', 'WAS', 'VERY', 'UNCOURTEOUS', 'AND', 'INDEED', 'UNDER', 'THE', 'CIRCUMSTANCES', 'RATHER', 'SUSPICIOUS'] +1580-141084-0005-2008: ref=['THAT', 'IS', 'VERY', 'IMPORTANT', 'SAID', 'HOLMES'] +1580-141084-0005-2008: hyp=['THAT', 'IS', 'VERY', 'IMPORTANT', 'SAID', 'HOLMES'] +1580-141084-0006-2009: ref=['YOU', "DON'T", 'SEEM', 'TO', 'REALIZE', 'THE', 'POSITION'] +1580-141084-0006-2009: hyp=['YOU', "DON'T", 'SEEM', 'TO', 'REALIZE', 'THE', 'POSITION'] +1580-141084-0007-2010: ref=['TO', 'MORROW', 'IS', 'THE', 'EXAMINATION'] +1580-141084-0007-2010: hyp=['TO', 'MORROW', 'WAS', 'THE', 'EXAMINATION'] +1580-141084-0008-2011: ref=['I', 'CANNOT', 'ALLOW', 'THE', 'EXAMINATION', 'TO', 'BE', 'HELD', 'IF', 'ONE', 'OF', 'THE', 'PAPERS', 'HAS', 'BEEN', 'TAMPERED', 'WITH', 'THE', 'SITUATION', 'MUST', 'BE', 'FACED'] +1580-141084-0008-2011: hyp=['I', 'CANNOT', 'ALLOW', 'THE', 'EXAMINATION', 'TO', 'BE', 'HELD', 'IF', 'ONE', 'OF', 'THE', 'PAPERS', 'HAS', 'BEEN', 'TAMPERED', 'WITH', 'THE', 'SITUATION', 'MUST', 'BE', 'FACED'] +1580-141084-0009-2012: ref=['IT', 'IS', 'POSSIBLE', 'THAT', 'I', 'MAY', 'BE', 'IN', 'A', 'POSITION', 'THEN', 'TO', 'INDICATE', 'SOME', 'COURSE', 'OF', 'ACTION'] +1580-141084-0009-2012: hyp=['IT', 'IS', 'POSSIBLE', 'THAT', 'I', 'MAY', 'BE', 'IN', 'A', 'POSITION', 'THEN', 'TO', 'INDICATE', 'SOME', 'COURSE', 'OF', 'ACTION'] +1580-141084-0010-2013: ref=['I', 'WILL', 'TAKE', 'THE', 'BLACK', 'CLAY', 'WITH', 'ME', 'ALSO', 'THE', 'PENCIL', 'CUTTINGS', 'GOOD', 'BYE'] +1580-141084-0010-2013: hyp=['I', 'WILL', 'TAKE', 'THE', 'BLACK', 'CLAY', 'WITH', 'ME', 'ALSO', 'THE', 'PENCIL', 'CUTTINGS', 'GOOD', 'BYE'] +1580-141084-0011-2014: ref=['WHEN', 'WE', 'WERE', 'OUT', 'IN', 'THE', 'DARKNESS', 'OF', 'THE', 'QUADRANGLE', 'WE', 'AGAIN', 'LOOKED', 'UP', 'AT', 'THE', 'WINDOWS'] +1580-141084-0011-2014: hyp=['WHEN', 'WE', 'WERE', 'OUT', 'IN', 'THE', 'DARKNESS', 'OF', 'THE', 'QUADRANGLE', 'WE', 'AGAIN', 'LOOKED', 'UP', 'AT', 'THE', 'WINDOWS'] +1580-141084-0012-2015: ref=['THE', 'FOUL', 'MOUTHED', 'FELLOW', 'AT', 'THE', 'TOP'] +1580-141084-0012-2015: hyp=['THE', 'FOUL', 'MOUTHED', 'FELLOW', 'AT', 'THE', 'TOP'] +1580-141084-0013-2016: ref=['HE', 'IS', 'THE', 'ONE', 'WITH', 'THE', 'WORST', 'RECORD'] +1580-141084-0013-2016: hyp=['HE', 'IS', 'THE', 'ONE', 'WITH', 'THE', 'WORST', 'RECORD'] +1580-141084-0014-2017: ref=['WHY', 'BANNISTER', 'THE', 'SERVANT', "WHAT'S", 'HIS', 'GAME', 'IN', 'THE', 'MATTER'] +1580-141084-0014-2017: hyp=['WHY', 'BANISTER', 'THE', 'SERVANT', "WHAT'S", 'HIS', 'GAME', 'IN', 'THE', 'MATTER'] +1580-141084-0015-2018: ref=['HE', 'IMPRESSED', 'ME', 'AS', 'BEING', 'A', 'PERFECTLY', 'HONEST', 'MAN'] +1580-141084-0015-2018: hyp=['HE', 'IMPRESSED', 'ME', 'AS', 'BEING', 'A', 'PERFECTLY', 'HONEST', 'MAN'] +1580-141084-0016-2019: ref=['MY', 'FRIEND', 'DID', 'NOT', 'APPEAR', 'TO', 'BE', 'DEPRESSED', 'BY', 'HIS', 'FAILURE', 'BUT', 'SHRUGGED', 'HIS', 'SHOULDERS', 'IN', 'HALF', 'HUMOROUS', 'RESIGNATION'] +1580-141084-0016-2019: hyp=['MY', 'FRIEND', 'DID', 'NOT', 'APPEAR', 'TO', 'BE', 'DEPRESSED', 'BY', 'HIS', 'FAILURE', 'BUT', 'SHRUGGED', 'HIS', 'SHOULDERS', 'AND', 'HALF', 'HUMOROUS', 'RESIGNATION'] +1580-141084-0017-2020: ref=['NO', 'GOOD', 'MY', 'DEAR', 'WATSON'] +1580-141084-0017-2020: hyp=['NO', 'GOOD', 'MY', 'DEAR', 'WATSON'] +1580-141084-0018-2021: ref=['I', 'THINK', 'SO', 'YOU', 'HAVE', 'FORMED', 'A', 'CONCLUSION'] +1580-141084-0018-2021: hyp=['I', 'THINK', 'SO', 'YOU', 'HAVE', 'FORMED', 'A', 'CONCLUSION'] +1580-141084-0019-2022: ref=['YES', 'MY', 'DEAR', 'WATSON', 'I', 'HAVE', 'SOLVED', 'THE', 'MYSTERY'] +1580-141084-0019-2022: hyp=['YES', 'MY', 'DEAR', 'WATSON', 'I', 'HAVE', 'SOLVED', 'THE', 'MYSTERY'] +1580-141084-0020-2023: ref=['LOOK', 'AT', 'THAT', 'HE', 'HELD', 'OUT', 'HIS', 'HAND'] +1580-141084-0020-2023: hyp=['LOOK', 'AT', 'THAT', 'HE', 'HELD', 'OUT', 'HIS', 'HAND'] +1580-141084-0021-2024: ref=['ON', 'THE', 'PALM', 'WERE', 'THREE', 'LITTLE', 'PYRAMIDS', 'OF', 'BLACK', 'DOUGHY', 'CLAY'] +1580-141084-0021-2024: hyp=['ON', 'THE', 'PALM', 'WERE', 'THREE', 'LITTLE', 'PYRAMIDS', 'OF', 'BLACK', 'DOUGHY', 'CLAY'] +1580-141084-0022-2025: ref=['AND', 'ONE', 'MORE', 'THIS', 'MORNING'] +1580-141084-0022-2025: hyp=['AND', 'ONE', 'MORE', 'THIS', 'MORNING'] +1580-141084-0023-2026: ref=['IN', 'A', 'FEW', 'HOURS', 'THE', 'EXAMINATION', 'WOULD', 'COMMENCE', 'AND', 'HE', 'WAS', 'STILL', 'IN', 'THE', 'DILEMMA', 'BETWEEN', 'MAKING', 'THE', 'FACTS', 'PUBLIC', 'AND', 'ALLOWING', 'THE', 'CULPRIT', 'TO', 'COMPETE', 'FOR', 'THE', 'VALUABLE', 'SCHOLARSHIP'] +1580-141084-0023-2026: hyp=['IN', 'A', 'FEW', 'HOURS', 'THE', 'EXAMINATION', 'WOULD', 'COMMENCE', 'AND', 'HE', 'WAS', 'STILL', 'IN', 'THE', 'DILEMMA', 'BETWEEN', 'MAKING', 'THE', 'FACTS', 'PUBLIC', 'AND', 'ALLOWING', 'THE', 'CULPRIT', 'TO', 'COMPETE', 'FOR', 'THE', 'VALUABLE', 'SCHOLARSHIP'] +1580-141084-0024-2027: ref=['HE', 'COULD', 'HARDLY', 'STAND', 'STILL', 'SO', 'GREAT', 'WAS', 'HIS', 'MENTAL', 'AGITATION', 'AND', 'HE', 'RAN', 'TOWARDS', 'HOLMES', 'WITH', 'TWO', 'EAGER', 'HANDS', 'OUTSTRETCHED', 'THANK', 'HEAVEN', 'THAT', 'YOU', 'HAVE', 'COME'] +1580-141084-0024-2027: hyp=['HE', 'COULD', 'HARDLY', 'STAND', 'STILL', 'SO', 'GREAT', 'WAS', 'HIS', 'MENTAL', 'AGITATION', 'AND', 'HE', 'RAN', 'TOWARDS', 'HOLMES', 'WITH', 'TOO', 'EAGER', 'HANDS', 'OUTSTRETCHED', 'THANK', 'HEAVEN', 'THAT', 'YOU', 'HAVE', 'COME'] +1580-141084-0025-2028: ref=['YOU', 'KNOW', 'HIM', 'I', 'THINK', 'SO'] +1580-141084-0025-2028: hyp=['YOU', 'KNOW', 'HIM', 'I', 'THINK', 'SO'] +1580-141084-0026-2029: ref=['IF', 'THIS', 'MATTER', 'IS', 'NOT', 'TO', 'BECOME', 'PUBLIC', 'WE', 'MUST', 'GIVE', 'OURSELVES', 'CERTAIN', 'POWERS', 'AND', 'RESOLVE', 'OURSELVES', 'INTO', 'A', 'SMALL', 'PRIVATE', 'COURT', 'MARTIAL'] +1580-141084-0026-2029: hyp=['IF', 'THIS', 'MATTER', 'IS', 'NOT', 'TO', 'BECOME', 'PUBLIC', 'WE', 'MUST', 'GIVE', 'OURSELVES', 'CERTAIN', 'POWERS', 'AND', 'RESOLVE', 'OURSELVES', 'INTO', 'A', 'SMALL', 'PRIVATE', 'COURT', 'MARTIAL'] +1580-141084-0027-2030: ref=['NO', 'SIR', 'CERTAINLY', 'NOT'] +1580-141084-0027-2030: hyp=['NO', 'SIR', 'CERTAINLY', 'NOT'] +1580-141084-0028-2031: ref=['THERE', 'WAS', 'NO', 'MAN', 'SIR'] +1580-141084-0028-2031: hyp=['THERE', 'WAS', 'NO', 'MAN', 'SIR'] +1580-141084-0029-2032: ref=['HIS', 'TROUBLED', 'BLUE', 'EYES', 'GLANCED', 'AT', 'EACH', 'OF', 'US', 'AND', 'FINALLY', 'RESTED', 'WITH', 'AN', 'EXPRESSION', 'OF', 'BLANK', 'DISMAY', 'UPON', 'BANNISTER', 'IN', 'THE', 'FARTHER', 'CORNER'] +1580-141084-0029-2032: hyp=['HIS', 'TROUBLED', 'BLUE', 'EYES', 'GLANCED', 'AT', 'EACH', 'OF', 'US', 'AND', 'FINALLY', 'RESTED', 'WITH', 'AN', 'EXPRESSION', 'OF', 'BLANK', 'DISMAY', 'UPON', 'BANISTER', 'IN', 'THE', 'FARTHER', 'CORNER'] +1580-141084-0030-2033: ref=['JUST', 'CLOSE', 'THE', 'DOOR', 'SAID', 'HOLMES'] +1580-141084-0030-2033: hyp=['JUST', 'CLOSE', 'THE', 'DOOR', 'SAID', 'HOLMES'] +1580-141084-0031-2034: ref=['WE', 'WANT', 'TO', 'KNOW', 'MISTER', 'GILCHRIST', 'HOW', 'YOU', 'AN', 'HONOURABLE', 'MAN', 'EVER', 'CAME', 'TO', 'COMMIT', 'SUCH', 'AN', 'ACTION', 'AS', 'THAT', 'OF', 'YESTERDAY'] +1580-141084-0031-2034: hyp=['WE', 'WANT', 'TO', 'KNOW', 'MISTER', 'GILGRIST', 'HOW', 'YOU', 'AN', 'HONOURABLE', 'MAN', 'EVER', 'CAME', 'TO', 'COMMIT', 'SUCH', 'AN', 'ACTION', 'AS', 'THAT', 'OF', 'YESTERDAY'] +1580-141084-0032-2035: ref=['FOR', 'A', 'MOMENT', 'GILCHRIST', 'WITH', 'UPRAISED', 'HAND', 'TRIED', 'TO', 'CONTROL', 'HIS', 'WRITHING', 'FEATURES'] +1580-141084-0032-2035: hyp=['FOR', 'A', 'MOMENT', 'GO', 'CHRIST', 'WITH', 'UPRAISED', 'HAND', 'TRIED', 'TO', 'CONTROL', 'HIS', 'WRITHING', 'FEATURES'] +1580-141084-0033-2036: ref=['COME', 'COME', 'SAID', 'HOLMES', 'KINDLY', 'IT', 'IS', 'HUMAN', 'TO', 'ERR', 'AND', 'AT', 'LEAST', 'NO', 'ONE', 'CAN', 'ACCUSE', 'YOU', 'OF', 'BEING', 'A', 'CALLOUS', 'CRIMINAL'] +1580-141084-0033-2036: hyp=['COME', 'COME', 'SAID', 'HOLMES', 'KINDLY', 'IT', 'IS', 'HUMAN', 'TO', 'ERR', 'AND', 'AT', 'LEAST', 'NO', 'ONE', 'CAN', 'ACCUSE', 'YOU', 'OF', 'BEING', 'A', 'CALLOUS', 'CRIMINAL'] +1580-141084-0034-2037: ref=['WELL', 'WELL', "DON'T", 'TROUBLE', 'TO', 'ANSWER', 'LISTEN', 'AND', 'SEE', 'THAT', 'I', 'DO', 'YOU', 'NO', 'INJUSTICE'] +1580-141084-0034-2037: hyp=['WELL', 'WELL', "DON'T", 'TROUBLE', 'TO', 'ANSWER', 'LISTEN', 'AND', 'SEE', 'THAT', 'I', 'DO', 'YOU', 'KNOW', 'INJUSTICE'] +1580-141084-0035-2038: ref=['HE', 'COULD', 'EXAMINE', 'THE', 'PAPERS', 'IN', 'HIS', 'OWN', 'OFFICE'] +1580-141084-0035-2038: hyp=['HE', 'COULD', 'EXAMINE', 'THE', 'PAPERS', 'IN', 'HIS', 'OWN', 'OFFICE'] +1580-141084-0036-2039: ref=['THE', 'INDIAN', 'I', 'ALSO', 'THOUGHT', 'NOTHING', 'OF'] +1580-141084-0036-2039: hyp=['THE', 'INDIAN', 'I', 'ALSO', 'THOUGHT', 'NOTHING', 'OF'] +1580-141084-0037-2040: ref=['WHEN', 'I', 'APPROACHED', 'YOUR', 'ROOM', 'I', 'EXAMINED', 'THE', 'WINDOW'] +1580-141084-0037-2040: hyp=['WHEN', 'I', 'APPROACHED', 'YOUR', 'ROOM', 'I', 'EXAMINED', 'THE', 'WINDOW'] +1580-141084-0038-2041: ref=['NO', 'ONE', 'LESS', 'THAN', 'THAT', 'WOULD', 'HAVE', 'A', 'CHANCE'] +1580-141084-0038-2041: hyp=['NO', 'ONE', 'LESS', 'THAN', 'THAT', 'WOULD', 'HAVE', 'A', 'CHANCE'] +1580-141084-0039-2042: ref=['I', 'ENTERED', 'AND', 'I', 'TOOK', 'YOU', 'INTO', 'MY', 'CONFIDENCE', 'AS', 'TO', 'THE', 'SUGGESTIONS', 'OF', 'THE', 'SIDE', 'TABLE'] +1580-141084-0039-2042: hyp=['I', 'ENTERED', 'AND', 'I', 'TOOK', 'YOU', 'INTO', 'MY', 'CONFIDENCE', 'AS', 'TO', 'THE', 'SUGGESTIONS', 'OF', 'THE', 'SIDE', 'TABLE'] +1580-141084-0040-2043: ref=['HE', 'RETURNED', 'CARRYING', 'HIS', 'JUMPING', 'SHOES', 'WHICH', 'ARE', 'PROVIDED', 'AS', 'YOU', 'ARE', 'AWARE', 'WITH', 'SEVERAL', 'SHARP', 'SPIKES'] +1580-141084-0040-2043: hyp=['HE', 'RETURNED', 'CARRYING', 'HIS', 'JUMPING', 'SHOES', 'WHICH', 'ARE', 'PROVIDED', 'AS', 'YOU', 'ARE', 'AWARE', 'WITH', 'SEVERAL', 'SHARP', 'SPIKES'] +1580-141084-0041-2044: ref=['NO', 'HARM', 'WOULD', 'HAVE', 'BEEN', 'DONE', 'HAD', 'IT', 'NOT', 'BEEN', 'THAT', 'AS', 'HE', 'PASSED', 'YOUR', 'DOOR', 'HE', 'PERCEIVED', 'THE', 'KEY', 'WHICH', 'HAD', 'BEEN', 'LEFT', 'BY', 'THE', 'CARELESSNESS', 'OF', 'YOUR', 'SERVANT'] +1580-141084-0041-2044: hyp=['NO', 'HARM', 'WOULD', 'HAVE', 'BEEN', 'DONE', 'HAD', 'IT', 'NOT', 'BEEN', 'THAT', 'AS', 'HE', 'PASSED', 'YOUR', 'DOOR', 'HE', 'PERCEIVED', 'THE', 'KEY', 'WHICH', 'HAD', 'BEEN', 'LEFT', 'BY', 'THE', 'CARELESSNESS', 'OF', 'YOUR', 'SERVANT'] +1580-141084-0042-2045: ref=['A', 'SUDDEN', 'IMPULSE', 'CAME', 'OVER', 'HIM', 'TO', 'ENTER', 'AND', 'SEE', 'IF', 'THEY', 'WERE', 'INDEED', 'THE', 'PROOFS'] +1580-141084-0042-2045: hyp=['A', 'SUDDEN', 'IMPULSE', 'CAME', 'OVER', 'HIM', 'TO', 'ENTER', 'AND', 'SEE', 'IF', 'THEY', 'WERE', 'INDEED', 'THE', 'PROOFS'] +1580-141084-0043-2046: ref=['HE', 'PUT', 'HIS', 'SHOES', 'ON', 'THE', 'TABLE'] +1580-141084-0043-2046: hyp=['HE', 'PUT', 'HIS', 'SHOES', 'ON', 'THE', 'TABLE'] +1580-141084-0044-2047: ref=['GLOVES', 'SAID', 'THE', 'YOUNG', 'MAN'] +1580-141084-0044-2047: hyp=['GLOVES', 'SAID', 'THE', 'YOUNG', 'MAN'] +1580-141084-0045-2048: ref=['SUDDENLY', 'HE', 'HEARD', 'HIM', 'AT', 'THE', 'VERY', 'DOOR', 'THERE', 'WAS', 'NO', 'POSSIBLE', 'ESCAPE'] +1580-141084-0045-2048: hyp=['SUDDENLY', 'HE', 'HEARD', 'HIM', 'AT', 'THE', 'VERY', 'DOOR', 'THERE', 'WAS', 'NO', 'POSSIBLE', 'ESCAPE'] +1580-141084-0046-2049: ref=['HAVE', 'I', 'TOLD', 'THE', 'TRUTH', 'MISTER', 'GILCHRIST'] +1580-141084-0046-2049: hyp=['HAVE', 'I', 'TOLD', 'THE', 'TRUTH', 'MISTER', 'GILGRIST'] +1580-141084-0047-2050: ref=['I', 'HAVE', 'A', 'LETTER', 'HERE', 'MISTER', 'SOAMES', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'EARLY', 'THIS', 'MORNING', 'IN', 'THE', 'MIDDLE', 'OF', 'A', 'RESTLESS', 'NIGHT'] +1580-141084-0047-2050: hyp=['I', 'HAVE', 'A', 'LETTER', 'HERE', 'MISTER', 'SOLMES', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'EARLY', 'THIS', 'MORNING', 'IN', 'THE', 'MIDDLE', 'OF', 'A', 'RESTLESS', 'NIGHT'] +1580-141084-0048-2051: ref=['IT', 'WILL', 'BE', 'CLEAR', 'TO', 'YOU', 'FROM', 'WHAT', 'I', 'HAVE', 'SAID', 'THAT', 'ONLY', 'YOU', 'COULD', 'HAVE', 'LET', 'THIS', 'YOUNG', 'MAN', 'OUT', 'SINCE', 'YOU', 'WERE', 'LEFT', 'IN', 'THE', 'ROOM', 'AND', 'MUST', 'HAVE', 'LOCKED', 'THE', 'DOOR', 'WHEN', 'YOU', 'WENT', 'OUT'] +1580-141084-0048-2051: hyp=['IT', 'WOULD', 'BE', 'CLEAR', 'TO', 'YOU', 'FROM', 'WHAT', 'I', 'HAVE', 'SAID', 'THAT', 'ONLY', 'YOU', 'COULD', 'HAVE', 'LET', 'THIS', 'YOUNG', 'MAN', 'OUT', 'SINCE', 'YOU', 'WERE', 'LEFT', 'IN', 'THE', 'ROOM', 'AND', 'MUST', 'HAVE', 'LOCKED', 'THE', 'DOOR', 'WHEN', 'YOU', 'WENT', 'OUT'] +1580-141084-0049-2052: ref=['IT', 'WAS', 'SIMPLE', 'ENOUGH', 'SIR', 'IF', 'YOU', 'ONLY', 'HAD', 'KNOWN', 'BUT', 'WITH', 'ALL', 'YOUR', 'CLEVERNESS', 'IT', 'WAS', 'IMPOSSIBLE', 'THAT', 'YOU', 'COULD', 'KNOW'] +1580-141084-0049-2052: hyp=['IT', 'WAS', 'SIMPLE', 'ENOUGH', 'SIR', 'IF', 'YOU', 'ONLY', 'HAD', 'KNOWN', 'BUT', 'WITH', 'ALL', 'YOUR', 'CLEVERNESS', 'IT', 'WAS', 'IMPOSSIBLE', 'THAT', 'YOU', 'COULD', 'KNOW'] +1580-141084-0050-2053: ref=['IF', 'MISTER', 'SOAMES', 'SAW', 'THEM', 'THE', 'GAME', 'WAS', 'UP'] +1580-141084-0050-2053: hyp=['IF', 'MISTER', 'SOLMES', 'SAW', 'THEM', 'THE', 'GAME', 'WAS', 'UP'] +1995-1826-0000-750: ref=['IN', 'THE', 'DEBATE', 'BETWEEN', 'THE', 'SENIOR', 'SOCIETIES', 'HER', 'DEFENCE', 'OF', 'THE', 'FIFTEENTH', 'AMENDMENT', 'HAD', 'BEEN', 'NOT', 'ONLY', 'A', 'NOTABLE', 'BIT', 'OF', 'REASONING', 'BUT', 'DELIVERED', 'WITH', 'REAL', 'ENTHUSIASM'] +1995-1826-0000-750: hyp=['IN', 'THE', 'DEBATE', 'BETWEEN', 'THE', 'SENIOR', 'SOCIETIES', 'HER', 'DEFENCE', 'OF', 'THE', 'FIFTEENTH', 'AMENDMENT', 'HAD', 'BEEN', 'NOT', 'ONLY', 'A', 'NOTABLE', 'BIT', 'OF', 'REASONING', 'BUT', 'DELIVERED', 'WITH', 'REAL', 'ENTHUSIASM'] +1995-1826-0001-751: ref=['THE', 'SOUTH', 'SHE', 'HAD', 'NOT', 'THOUGHT', 'OF', 'SERIOUSLY', 'AND', 'YET', 'KNOWING', 'OF', 'ITS', 'DELIGHTFUL', 'HOSPITALITY', 'AND', 'MILD', 'CLIMATE', 'SHE', 'WAS', 'NOT', 'AVERSE', 'TO', 'CHARLESTON', 'OR', 'NEW', 'ORLEANS'] +1995-1826-0001-751: hyp=['THE', 'SOUTH', 'SHE', 'HAD', 'NOT', 'THOUGHT', 'OF', 'SERIOUSLY', 'AND', 'YET', 'KNOWING', 'OF', 'ITS', 'DELIGHTFUL', 'HOSPITALITY', 'AND', 'MILD', 'CLIMATE', 'SHE', 'WAS', 'NOT', 'AVERSE', 'TO', 'CHARLESTON', 'OR', 'NEW', 'ORLEANS'] +1995-1826-0002-752: ref=['JOHN', 'TAYLOR', 'WHO', 'HAD', 'SUPPORTED', 'HER', 'THROUGH', 'COLLEGE', 'WAS', 'INTERESTED', 'IN', 'COTTON'] +1995-1826-0002-752: hyp=['JOHN', 'TAYLOR', 'WHO', 'HAD', 'SUPPORTED', 'HER', 'THROUGH', 'COLLEGE', 'WAS', 'INTERESTED', 'IN', 'COTTON'] +1995-1826-0003-753: ref=['BETTER', 'GO', 'HE', 'HAD', 'COUNSELLED', 'SENTENTIOUSLY'] +1995-1826-0003-753: hyp=['BETTER', 'GO', 'HE', 'HAD', 'COUNSEL', 'SENTENTIOUSLY'] +1995-1826-0004-754: ref=['MIGHT', 'LEARN', 'SOMETHING', 'USEFUL', 'DOWN', 'THERE'] +1995-1826-0004-754: hyp=['MIGHT', 'LEARN', 'SOMETHING', 'USEFUL', 'DOWN', 'THERE'] +1995-1826-0005-755: ref=['BUT', 'JOHN', "THERE'S", 'NO', 'SOCIETY', 'JUST', 'ELEMENTARY', 'WORK'] +1995-1826-0005-755: hyp=['BUT', 'JOHN', "THERE'S", 'NO', 'SOCIETY', 'JUST', 'ELEMENTARY', 'WORK'] +1995-1826-0006-756: ref=['BEEN', 'LOOKING', 'UP', 'TOOMS', 'COUNTY'] +1995-1826-0006-756: hyp=['BEEN', 'LOOKING', 'UP', 'TOMBS', 'COUNTY'] +1995-1826-0007-757: ref=['FIND', 'SOME', 'CRESSWELLS', 'THERE', 'BIG', 'PLANTATIONS', 'RATED', 'AT', 'TWO', 'HUNDRED', 'AND', 'FIFTY', 'THOUSAND', 'DOLLARS'] +1995-1826-0007-757: hyp=['FIVE', 'SOME', 'CRUSTWELLS', 'THERE', 'BIG', 'PLANTATIONS', 'RATED', 'AT', 'TWO', 'HUNDRED', 'AND', 'FIFTY', 'THOUSAND', 'DOLLARS'] +1995-1826-0008-758: ref=['SOME', 'OTHERS', 'TOO', 'BIG', 'COTTON', 'COUNTY'] +1995-1826-0008-758: hyp=['SOME', 'OTHERS', 'TOO', 'BIG', 'COTTON', 'COUNTY'] +1995-1826-0009-759: ref=['YOU', 'OUGHT', 'TO', 'KNOW', 'JOHN', 'IF', 'I', 'TEACH', 'NEGROES', "I'LL", 'SCARCELY', 'SEE', 'MUCH', 'OF', 'PEOPLE', 'IN', 'MY', 'OWN', 'CLASS'] +1995-1826-0009-759: hyp=['YOU', 'OUGHT', 'TO', 'KNOW', 'JOHN', 'IF', 'I', 'TEACH', 'NEGROES', "I'LL", 'SCARCELY', 'SEE', 'MUCH', 'OF', 'PEOPLE', 'IN', 'MY', 'OWN', 'CLASS'] +1995-1826-0010-760: ref=['AT', 'ANY', 'RATE', 'I', 'SAY', 'GO'] +1995-1826-0010-760: hyp=['AT', 'ANY', 'RATE', 'I', 'SAY', 'GO'] +1995-1826-0011-761: ref=['HERE', 'SHE', 'WAS', 'TEACHING', 'DIRTY', 'CHILDREN', 'AND', 'THE', 'SMELL', 'OF', 'CONFUSED', 'ODORS', 'AND', 'BODILY', 'PERSPIRATION', 'WAS', 'TO', 'HER', 'AT', 'TIMES', 'UNBEARABLE'] +1995-1826-0011-761: hyp=['HERE', 'SHE', 'WAS', 'TEACHING', 'DIRTY', 'CHILDREN', 'AND', 'THE', 'SMELL', 'OF', 'CONFUSED', 'ODORS', 'AND', 'BODILY', 'PERSPIRATION', 'WAS', 'TO', 'HER', 'AT', 'TIMES', 'UNBEARABLE'] +1995-1826-0012-762: ref=['SHE', 'WANTED', 'A', 'GLANCE', 'OF', 'THE', 'NEW', 'BOOKS', 'AND', 'PERIODICALS', 'AND', 'TALK', 'OF', 'GREAT', 'PHILANTHROPIES', 'AND', 'REFORMS'] +1995-1826-0012-762: hyp=['SHE', 'WANTED', 'A', 'GLANCE', 'OF', 'THE', 'NEW', 'BOOKS', 'AND', 'PERIODICALS', 'AND', 'TALK', 'OF', 'GRATEFUL', 'ANTHROPIES', 'AND', 'REFORMS'] +1995-1826-0013-763: ref=['SO', 'FOR', 'THE', 'HUNDREDTH', 'TIME', 'SHE', 'WAS', 'THINKING', 'TODAY', 'AS', 'SHE', 'WALKED', 'ALONE', 'UP', 'THE', 'LANE', 'BACK', 'OF', 'THE', 'BARN', 'AND', 'THEN', 'SLOWLY', 'DOWN', 'THROUGH', 'THE', 'BOTTOMS'] +1995-1826-0013-763: hyp=['SO', 'FOR', 'THE', 'HUNDREDTH', 'TIME', 'SHE', 'WAS', 'THINKING', 'TO', 'DAY', 'AS', 'SHE', 'WALKED', 'ALONE', 'UP', 'THE', 'LANE', 'BACK', 'OF', 'THE', 'BARN', 'AND', 'THEN', 'SLOWLY', 'DOWN', 'THROUGH', 'THE', 'BOTTOMS'] +1995-1826-0014-764: ref=['COTTON', 'SHE', 'PAUSED'] +1995-1826-0014-764: hyp=['COTTON', 'SHE', 'PAUSED'] +1995-1826-0015-765: ref=['SHE', 'HAD', 'ALMOST', 'FORGOTTEN', 'THAT', 'IT', 'WAS', 'HERE', 'WITHIN', 'TOUCH', 'AND', 'SIGHT'] +1995-1826-0015-765: hyp=['SHE', 'HAD', 'ALMOST', 'FORGOTTEN', 'THAT', 'IT', 'WAS', 'HERE', 'WITHIN', 'TOUCH', 'IN', 'SIGHT'] +1995-1826-0016-766: ref=['THE', 'GLIMMERING', 'SEA', 'OF', 'DELICATE', 'LEAVES', 'WHISPERED', 'AND', 'MURMURED', 'BEFORE', 'HER', 'STRETCHING', 'AWAY', 'TO', 'THE', 'NORTHWARD'] +1995-1826-0016-766: hyp=['THE', 'GLIMMERING', 'SEA', 'OF', 'DELICATE', 'LEAVES', 'WHISPERED', 'AND', 'MURMURED', 'BEFORE', 'HER', 'STRETCHING', 'AWAY', 'TO', 'THE', 'NORTHWARD'] +1995-1826-0017-767: ref=['THERE', 'MIGHT', 'BE', 'A', 'BIT', 'OF', 'POETRY', 'HERE', 'AND', 'THERE', 'BUT', 'MOST', 'OF', 'THIS', 'PLACE', 'WAS', 'SUCH', 'DESPERATE', 'PROSE'] +1995-1826-0017-767: hyp=['THERE', 'MIGHT', 'BE', 'A', 'BIT', 'OF', 'POETRY', 'HERE', 'AND', 'THERE', 'BUT', 'MOST', 'OF', 'THIS', 'PLACE', 'WAS', 'SUCH', 'DESPERATE', 'PROSE'] +1995-1826-0018-768: ref=['HER', 'REGARD', 'SHIFTED', 'TO', 'THE', 'GREEN', 'STALKS', 'AND', 'LEAVES', 'AGAIN', 'AND', 'SHE', 'STARTED', 'TO', 'MOVE', 'AWAY'] +1995-1826-0018-768: hyp=['HER', 'REGARD', 'SHIFTED', 'TO', 'THE', 'GREEN', 'STALKS', 'AND', 'LEAVES', 'AGAIN', 'AND', 'SHE', 'STARTED', 'TO', 'MOVE', 'AWAY'] +1995-1826-0019-769: ref=['COTTON', 'IS', 'A', 'WONDERFUL', 'THING', 'IS', 'IT', 'NOT', 'BOYS', 'SHE', 'SAID', 'RATHER', 'PRIMLY'] +1995-1826-0019-769: hyp=['COTTON', 'IS', 'A', 'WONDERFUL', 'THING', 'IS', 'IT', 'NOT', 'BOYS', 'SHE', 'SAID', 'RATHER', 'PRIMLY'] +1995-1826-0020-770: ref=['MISS', 'TAYLOR', 'DID', 'NOT', 'KNOW', 'MUCH', 'ABOUT', 'COTTON', 'BUT', 'AT', 'LEAST', 'ONE', 'MORE', 'REMARK', 'SEEMED', 'CALLED', 'FOR'] +1995-1826-0020-770: hyp=['MISS', 'TAYLOR', 'DID', 'NOT', 'KNOW', 'MUCH', 'ABOUT', 'COTTON', 'BUT', 'AT', 'LEAST', 'ONE', 'MORE', 'REMARKED', 'SEEMED', 'CALLED', 'FOR'] +1995-1826-0021-771: ref=["DON'T", 'KNOW', 'WELL', 'OF', 'ALL', 'THINGS', 'INWARDLY', 'COMMENTED', 'MISS', 'TAYLOR', 'LITERALLY', 'BORN', 'IN', 'COTTON', 'AND', 'OH', 'WELL', 'AS', 'MUCH', 'AS', 'TO', 'ASK', "WHAT'S", 'THE', 'USE', 'SHE', 'TURNED', 'AGAIN', 'TO', 'GO'] +1995-1826-0021-771: hyp=["DON'T", 'KNOW', 'WELL', 'OF', 'ALL', 'THINGS', 'INWARDLY', 'COMMENTED', 'MISS', 'TAYLOR', 'LITERALLY', 'BORN', 'IN', 'COTTON', 'AND', 'OH', 'WELL', 'AS', 'MUCH', 'AS', 'TO', 'ASK', "WHAT'S", 'THE', 'USE', 'SHE', 'TURNED', 'AGAIN', 'TO', 'GO'] +1995-1826-0022-772: ref=['I', 'SUPPOSE', 'THOUGH', "IT'S", 'TOO', 'EARLY', 'FOR', 'THEM', 'THEN', 'CAME', 'THE', 'EXPLOSION'] +1995-1826-0022-772: hyp=['I', 'SUPPOSE', 'THOUGH', "IT'S", 'TOO', 'EARLY', 'FOR', 'THEM', 'THEN', 'CAME', 'THE', 'EXPLOSION'] +1995-1826-0023-773: ref=['GOOBERS', "DON'T", 'GROW', 'ON', 'THE', 'TOPS', 'OF', 'VINES', 'BUT', 'UNDERGROUND', 'ON', 'THE', 'ROOTS', 'LIKE', 'YAMS', 'IS', 'THAT', 'SO'] +1995-1826-0023-773: hyp=['GOULD', 'WAS', "DON'T", 'GROW', 'ON', 'THE', 'TOPSY', 'BANDS', 'BUT', 'ON', 'THE', 'GROUND', 'ON', 'THE', 'ROOTS', 'LIKE', 'YAMS', 'IS', 'THAT', 'SO'] +1995-1826-0024-774: ref=['THE', 'GOLDEN', 'FLEECE', "IT'S", 'THE', 'SILVER', 'FLEECE', 'HE', 'HARKENED'] +1995-1826-0024-774: hyp=['THE', 'GOLDEN', 'FLEECE', "IT'S", 'THE', 'SILVER', 'FLEECE', 'HE', 'HEARKENED'] +1995-1826-0025-775: ref=['SOME', 'TIME', "YOU'LL", 'TELL', 'ME', 'PLEASE', "WON'T", 'YOU'] +1995-1826-0025-775: hyp=['SOMETIME', 'YOU', 'TELL', 'ME', 'PLEASE', "WON'T", 'YOU'] +1995-1826-0026-776: ref=['NOW', 'FOR', 'ONE', 'LITTLE', 'HALF', 'HOUR', 'SHE', 'HAD', 'BEEN', 'A', 'WOMAN', 'TALKING', 'TO', 'A', 'BOY', 'NO', 'NOT', 'EVEN', 'THAT', 'SHE', 'HAD', 'BEEN', 'TALKING', 'JUST', 'TALKING', 'THERE', 'WERE', 'NO', 'PERSONS', 'IN', 'THE', 'CONVERSATION', 'JUST', 'THINGS', 'ONE', 'THING', 'COTTON'] +1995-1826-0026-776: hyp=['THOU', 'FOR', 'ONE', 'LITTLE', 'HALF', 'HOUR', 'SHE', 'HAD', 'BEEN', 'A', 'WOMAN', 'TALKING', 'TO', 'A', 'BOY', 'NO', 'NOT', 'EVEN', 'THAT', 'SHE', 'HAD', 'BEEN', 'TALKING', 'JUST', 'TALKING', 'THERE', 'WERE', 'NO', 'PERSONS', 'IN', 'THE', 'CONVERSATION', 'JUST', 'THINGS', 'ONE', 'THING', 'COTTON'] +1995-1836-0000-735: ref=['THE', 'HON', 'CHARLES', 'SMITH', 'MISS', "SARAH'S", 'BROTHER', 'WAS', 'WALKING', 'SWIFTLY', 'UPTOWN', 'FROM', 'MISTER', "EASTERLY'S", 'WALL', 'STREET', 'OFFICE', 'AND', 'HIS', 'FACE', 'WAS', 'PALE'] +1995-1836-0000-735: hyp=['THE', 'HON', 'SMITH', 'MISS', "SARAH'S", 'BROTHER', 'WAS', 'WALKING', 'SWIFTLY', 'UPTOWN', 'FROM', 'MISTER', "EASTERLY'S", 'WALL', 'STREET', 'OFFICE', 'AND', 'HIS', 'FACE', 'WAS', 'PALE'] +1995-1836-0001-736: ref=['AT', 'LAST', 'THE', 'COTTON', 'COMBINE', 'WAS', 'TO', 'ALL', 'APPEARANCES', 'AN', 'ASSURED', 'FACT', 'AND', 'HE', 'WAS', 'SLATED', 'FOR', 'THE', 'SENATE'] +1995-1836-0001-736: hyp=['AT', 'LAST', 'THE', 'COTTON', 'COMBINE', 'WAS', 'TO', 'ALL', 'APPEARANCES', 'AN', 'ASSURED', 'FACT', 'AND', 'HE', 'WAS', 'SLATED', 'FOR', 'THE', 'SENATE'] +1995-1836-0002-737: ref=['WHY', 'SHOULD', 'HE', 'NOT', 'BE', 'AS', 'OTHER', 'MEN'] +1995-1836-0002-737: hyp=['WHY', 'SHOULD', 'HE', 'NOT', 'BE', 'AS', 'OTHER', 'MEN'] +1995-1836-0003-738: ref=['SHE', 'WAS', 'NOT', 'HERSELF', 'A', 'NOTABLY', 'INTELLIGENT', 'WOMAN', 'SHE', 'GREATLY', 'ADMIRED', 'INTELLIGENCE', 'OR', 'WHATEVER', 'LOOKED', 'TO', 'HER', 'LIKE', 'INTELLIGENCE', 'IN', 'OTHERS'] +1995-1836-0003-738: hyp=['SHE', 'WAS', 'NOT', 'HERSELF', 'UNNOTABLY', 'INTELLIGENT', 'WOMAN', 'SHE', 'GREATLY', 'ADMIRED', 'INTELLIGENCE', 'OR', 'WHATEVER', 'LOOKED', 'TO', 'HER', 'LIKE', 'INTELLIGENCE', 'IN', 'OTHERS'] +1995-1836-0004-739: ref=['AS', 'SHE', 'AWAITED', 'HER', 'GUESTS', 'SHE', 'SURVEYED', 'THE', 'TABLE', 'WITH', 'BOTH', 'SATISFACTION', 'AND', 'DISQUIETUDE', 'FOR', 'HER', 'SOCIAL', 'FUNCTIONS', 'WERE', 'FEW', 'TONIGHT', 'THERE', 'WERE', 'SHE', 'CHECKED', 'THEM', 'OFF', 'ON', 'HER', 'FINGERS', 'SIR', 'JAMES', 'CREIGHTON', 'THE', 'RICH', 'ENGLISH', 'MANUFACTURER', 'AND', 'LADY', 'CREIGHTON', 'MISTER', 'AND', 'MISSUS', 'VANDERPOOL', 'MISTER', 'HARRY', 'CRESSWELL', 'AND', 'HIS', 'SISTER', 'JOHN', 'TAYLOR', 'AND', 'HIS', 'SISTER', 'AND', 'MISTER', 'CHARLES', 'SMITH', 'WHOM', 'THE', 'EVENING', 'PAPERS', 'MENTIONED', 'AS', 'LIKELY', 'TO', 'BE', 'UNITED', 'STATES', 'SENATOR', 'FROM', 'NEW', 'JERSEY', 'A', 'SELECTION', 'OF', 'GUESTS', 'THAT', 'HAD', 'BEEN', 'DETERMINED', 'UNKNOWN', 'TO', 'THE', 'HOSTESS', 'BY', 'THE', 'MEETING', 'OF', 'COTTON', 'INTERESTS', 'EARLIER', 'IN', 'THE', 'DAY'] +1995-1836-0004-739: hyp=['AS', 'SHE', 'AWAITED', 'HER', 'GUESS', 'SHE', 'SURVEYED', 'THE', 'TABLE', 'WITH', 'BOTH', 'SATISFACTION', 'AND', 'DISQUIETUDE', 'FOR', 'HER', 'SOCIAL', 'FUNCTIONS', 'WERE', 'FEW', 'TO', 'NIGHT', 'THERE', 'WERE', 'SHE', 'CHECKED', 'THEM', 'OFF', 'ON', 'HER', 'FINGERS', 'SIR', 'JAMES', 'CRIGHTON', 'THE', 'RICH', 'ENGLISH', 'MANUFACTURER', 'AND', 'LADY', 'CRIGHTON', 'MISTER', 'AND', 'MISSUS', 'VAN', 'DERPOOL', 'MISTER', 'HARRY', 'CRESSWELL', 'AND', 'HIS', 'SISTER', 'JOHN', 'TAYLOR', 'AND', 'HIS', 'SISTER', 'AND', 'MISTER', 'CHARLES', 'SMITH', 'WHOM', 'THE', 'EVENING', 'PAPERS', 'MENTIONED', 'AS', 'LIKELY', 'TO', 'BE', 'UNITED', 'STATES', 'SENATOR', 'FROM', 'NEW', 'JERSEY', 'A', 'SELECTION', 'OF', 'GUESTS', 'THAT', 'HAD', 'BEEN', 'DETERMINED', 'UNKNOWN', 'TO', 'THE', 'HOSTESS', 'BY', 'THE', 'MEETING', 'OF', 'COTTON', 'INTERESTS', 'EARLIER', 'IN', 'THE', 'DAY'] +1995-1836-0005-740: ref=['MISSUS', 'GREY', 'HAD', 'MET', 'SOUTHERNERS', 'BEFORE', 'BUT', 'NOT', 'INTIMATELY', 'AND', 'SHE', 'ALWAYS', 'HAD', 'IN', 'MIND', 'VIVIDLY', 'THEIR', 'CRUELTY', 'TO', 'POOR', 'NEGROES', 'A', 'SUBJECT', 'SHE', 'MADE', 'A', 'POINT', 'OF', 'INTRODUCING', 'FORTHWITH'] +1995-1836-0005-740: hyp=['MISSUS', 'GRAY', 'HAD', 'MET', 'SOUTHERNERS', 'BEFORE', 'BUT', 'NOT', 'INTIMATELY', 'AND', 'SHE', 'ALWAYS', 'HAD', 'IN', 'MIND', 'VIVIDLY', 'THEIR', 'CRUELTY', 'TO', 'POOR', 'NEGROES', 'A', 'SUBJECT', 'SHE', 'MADE', 'A', 'POINT', 'OF', 'INTRODUCING', 'FORTHWITH'] +1995-1836-0006-741: ref=['SHE', 'WAS', 'THEREFORE', 'MOST', 'AGREEABLY', 'SURPRISED', 'TO', 'HEAR', 'MISTER', 'CRESSWELL', 'EXPRESS', 'HIMSELF', 'SO', 'CORDIALLY', 'AS', 'APPROVING', 'OF', 'NEGRO', 'EDUCATION'] +1995-1836-0006-741: hyp=['SHE', 'WAS', 'THEREFORE', 'MOST', 'AGREEABLY', 'SURPRISED', 'TO', 'HEAR', 'MISTER', 'CRESWELL', 'EXPRESS', 'HIMSELF', 'SO', 'CORDIALLY', 'AS', 'APPROVING', 'OF', 'NEGRO', 'EDUCATION'] +1995-1836-0007-742: ref=['BUT', 'YOU', 'BELIEVE', 'IN', 'SOME', 'EDUCATION', 'ASKED', 'MARY', 'TAYLOR'] +1995-1836-0007-742: hyp=['DO', 'BELIEVE', 'IN', 'SOME', 'EDUCATION', 'ASKED', 'MARY', 'TAYLOR'] +1995-1836-0008-743: ref=['I', 'BELIEVE', 'IN', 'THE', 'TRAINING', 'OF', 'PEOPLE', 'TO', 'THEIR', 'HIGHEST', 'CAPACITY', 'THE', 'ENGLISHMAN', 'HERE', 'HEARTILY', 'SECONDED', 'HIM'] +1995-1836-0008-743: hyp=['I', 'BELIEVE', 'IN', 'THE', 'TRAINING', 'OF', 'PEOPLE', 'TO', 'THEIR', 'HAS', 'CAPACITY', 'THE', 'ENGLISHMAN', 'HERE', 'HEARTILY', 'SECONDED', 'HIM'] +1995-1836-0009-744: ref=['BUT', 'CRESSWELL', 'ADDED', 'SIGNIFICANTLY', 'CAPACITY', 'DIFFERS', 'ENORMOUSLY', 'BETWEEN', 'RACES'] +1995-1836-0009-744: hyp=['BUT', 'CRASWELL', 'ADDED', 'SIGNIFICANTLY', 'CAPACITY', 'DIFFERS', 'ENORMOUSLY', 'BETWEEN', 'RACES'] +1995-1836-0010-745: ref=['THE', 'VANDERPOOLS', 'WERE', 'SURE', 'OF', 'THIS', 'AND', 'THE', 'ENGLISHMAN', 'INSTANCING', 'INDIA', 'BECAME', 'QUITE', 'ELOQUENT', 'MISSUS', 'GREY', 'WAS', 'MYSTIFIED', 'BUT', 'HARDLY', 'DARED', 'ADMIT', 'IT', 'THE', 'GENERAL', 'TREND', 'OF', 'THE', 'CONVERSATION', 'SEEMED', 'TO', 'BE', 'THAT', 'MOST', 'INDIVIDUALS', 'NEEDED', 'TO', 'BE', 'SUBMITTED', 'TO', 'THE', 'SHARPEST', 'SCRUTINY', 'BEFORE', 'BEING', 'ALLOWED', 'MUCH', 'EDUCATION', 'AND', 'AS', 'FOR', 'THE', 'LOWER', 'RACES', 'IT', 'WAS', 'SIMPLY', 'CRIMINAL', 'TO', 'OPEN', 'SUCH', 'USELESS', 'OPPORTUNITIES', 'TO', 'THEM'] +1995-1836-0010-745: hyp=['THE', 'VANDERPOOLS', 'WERE', 'SURE', 'THIS', 'AND', 'THE', 'ENGLISHMAN', 'INSTANCING', 'INDIA', 'BECAME', 'QUITE', 'ELOQUENT', 'MISSUS', 'GRAY', 'WAS', 'MYSTIFIED', 'BUT', 'HARDLY', 'DARED', 'ADMIT', 'IT', 'THE', 'GENERAL', 'TREND', 'OF', 'THE', 'CONVERSATION', 'SEEMED', 'TO', 'BE', 'THAT', 'MOST', 'INDIVIDUALS', 'NEEDED', 'TO', 'BE', 'SUBMITTED', 'TO', 'THE', 'SHARPEST', 'SCRUTINY', 'BEFORE', 'BEING', 'ALLOWED', 'MUCH', 'EDUCATION', 'AND', 'AS', 'FOR', 'THE', 'LOWER', 'RACES', 'IT', 'WAS', 'SIMPLY', 'CRIMINAL', 'TO', 'OPEN', 'SUCH', 'USELESS', 'OPPORTUNITIES', 'TO', 'THEM'] +1995-1836-0011-746: ref=['POSITIVELY', 'HEROIC', 'ADDED', 'CRESSWELL', 'AVOIDING', 'HIS', "SISTER'S", 'EYES'] +1995-1836-0011-746: hyp=['POSITIVELY', 'HEROIC', 'ADDED', 'CRASWELL', 'AVOIDING', 'HIS', "SISTER'S", 'EYES'] +1995-1836-0012-747: ref=['BUT', "WE'RE", 'NOT', 'ER', 'EXACTLY', 'WELCOMED'] +1995-1836-0012-747: hyp=['BUT', 'WE', 'ARE', 'NOT', 'A', 'EXACTLY', 'WELCOME'] +1995-1836-0013-748: ref=['MARY', 'TAYLOR', 'HOWEVER', 'RELATED', 'THE', 'TALE', 'OF', 'ZORA', 'TO', 'MISSUS', "GREY'S", 'PRIVATE', 'EAR', 'LATER'] +1995-1836-0013-748: hyp=['MARY', 'TAYLOR', 'HOWEVER', 'RELATED', 'THE', 'TALE', 'OF', 'ZORA', 'TO', 'MISSUS', "GRAY'S", 'PRIVATE', 'EAR', 'LATER'] +1995-1836-0014-749: ref=['FORTUNATELY', 'SAID', 'MISTER', 'VANDERPOOL', 'NORTHERNERS', 'AND', 'SOUTHERNERS', 'ARE', 'ARRIVING', 'AT', 'A', 'BETTER', 'MUTUAL', 'UNDERSTANDING', 'ON', 'MOST', 'OF', 'THESE', 'MATTERS'] +1995-1836-0014-749: hyp=['FORTUNATELY', 'SAID', 'MISTER', 'VAN', 'DERPOOL', 'NOR', 'THE', 'NOSE', 'AND', 'SOUTHERNERS', 'ALL', 'RIVING', 'AT', 'A', 'BETTER', 'MUTUAL', 'UNDERSTANDING', 'ON', 'MOST', 'OF', 'THESE', 'MATTERS'] +1995-1837-0000-777: ref=['HE', 'KNEW', 'THE', 'SILVER', 'FLEECE', 'HIS', 'AND', "ZORA'S", 'MUST', 'BE', 'RUINED'] +1995-1837-0000-777: hyp=['HE', 'KNEW', 'THE', 'SILVER', 'FLEECE', 'HIS', 'AND', 'ZORAS', 'MUST', 'BE', 'RUINED'] +1995-1837-0001-778: ref=['IT', 'WAS', 'THE', 'FIRST', 'GREAT', 'SORROW', 'OF', 'HIS', 'LIFE', 'IT', 'WAS', 'NOT', 'SO', 'MUCH', 'THE', 'LOSS', 'OF', 'THE', 'COTTON', 'ITSELF', 'BUT', 'THE', 'FANTASY', 'THE', 'HOPES', 'THE', 'DREAMS', 'BUILT', 'AROUND', 'IT'] +1995-1837-0001-778: hyp=['IT', 'WAS', 'THE', 'FIRST', 'GREAT', 'SORROW', 'OF', 'HIS', 'LIFE', 'IT', 'WAS', 'NOT', 'SO', 'MUCH', 'THE', 'LOSS', 'OF', 'THE', 'COTTON', 'ITSELF', 'BUT', 'THE', 'FANTASY', 'THE', 'HOPES', 'THE', 'DREAMS', 'BUILT', 'AROUND', 'IT'] +1995-1837-0002-779: ref=['AH', 'THE', 'SWAMP', 'THE', 'CRUEL', 'SWAMP'] +1995-1837-0002-779: hyp=['AH', 'THE', 'SWAMP', 'THE', 'CRUEL', 'SWAMP'] +1995-1837-0003-780: ref=['THE', 'REVELATION', 'OF', 'HIS', 'LOVE', 'LIGHTED', 'AND', 'BRIGHTENED', 'SLOWLY', 'TILL', 'IT', 'FLAMED', 'LIKE', 'A', 'SUNRISE', 'OVER', 'HIM', 'AND', 'LEFT', 'HIM', 'IN', 'BURNING', 'WONDER'] +1995-1837-0003-780: hyp=['WHO', 'REVELATION', 'OF', 'HIS', 'LOVE', 'LIGHTED', 'AND', 'BRIGHTENED', 'SLOWLY', 'TILL', 'IT', 'FLAMED', 'LIKE', 'A', 'SUNRISE', 'OVER', 'HIM', 'AND', 'LEFT', 'HIM', 'IN', 'BURNING', 'WONDER'] +1995-1837-0004-781: ref=['HE', 'PANTED', 'TO', 'KNOW', 'IF', 'SHE', 'TOO', 'KNEW', 'OR', 'KNEW', 'AND', 'CARED', 'NOT', 'OR', 'CARED', 'AND', 'KNEW', 'NOT'] +1995-1837-0004-781: hyp=['HE', 'PANTED', 'TO', 'KNOW', 'IF', 'SHE', 'TOO', 'KNEW', 'OR', 'KNEW', 'AND', 'CARED', 'NOT', 'OR', 'CARED', 'AND', 'KNEW', 'NOT'] +1995-1837-0005-782: ref=['SHE', 'WAS', 'SO', 'STRANGE', 'AND', 'HUMAN', 'A', 'CREATURE'] +1995-1837-0005-782: hyp=['SHE', 'WAS', 'SO', 'STRANGE', 'AND', 'HUMAN', 'A', 'CREATURE'] +1995-1837-0006-783: ref=['THE', 'WORLD', 'WAS', 'WATER', 'VEILED', 'IN', 'MISTS'] +1995-1837-0006-783: hyp=['THE', 'WORLD', 'WAS', 'WATER', 'VEILED', 'IN', 'MISTS'] +1995-1837-0007-784: ref=['THEN', 'OF', 'A', 'SUDDEN', 'AT', 'MIDDAY', 'THE', 'SUN', 'SHOT', 'OUT', 'HOT', 'AND', 'STILL', 'NO', 'BREATH', 'OF', 'AIR', 'STIRRED', 'THE', 'SKY', 'WAS', 'LIKE', 'BLUE', 'STEEL', 'THE', 'EARTH', 'STEAMED'] +1995-1837-0007-784: hyp=['THEN', 'OF', 'A', 'SUDDEN', 'AT', 'MIDDAY', 'THE', 'SUN', 'SHOT', 'OUT', 'HOT', 'AND', 'STILL', 'NO', 'BREATH', 'OF', 'AIR', 'STIRRED', 'THE', 'SKY', 'WAS', 'LIKE', 'BLUE', 'STEEL', 'THE', 'EARTH', 'STEAMED'] +1995-1837-0008-785: ref=['WHERE', 'WAS', 'THE', 'USE', 'OF', 'IMAGINING'] +1995-1837-0008-785: hyp=['WHERE', 'WAS', 'THE', 'USE', 'OF', 'IMAGINING'] +1995-1837-0009-786: ref=['THE', 'LAGOON', 'HAD', 'BEEN', 'LEVEL', 'WITH', 'THE', 'DYKES', 'A', 'WEEK', 'AGO', 'AND', 'NOW'] +1995-1837-0009-786: hyp=['THE', 'LAGOON', 'HAD', 'BEEN', 'LEVEL', 'WITH', 'THE', 'DIKES', 'A', 'WEEK', 'AGO', 'AND', 'NOW'] +1995-1837-0010-787: ref=['PERHAPS', 'SHE', 'TOO', 'MIGHT', 'BE', 'THERE', 'WAITING', 'WEEPING'] +1995-1837-0010-787: hyp=['PERHAPS', 'SHE', 'TOO', 'MIGHT', 'BE', 'THERE', 'WAITING', 'WEEPING'] +1995-1837-0011-788: ref=['HE', 'STARTED', 'AT', 'THE', 'THOUGHT', 'HE', 'HURRIED', 'FORTH', 'SADLY'] +1995-1837-0011-788: hyp=['HE', 'STARTED', 'AT', 'THE', 'THOUGHT', 'HE', 'HURRIED', 'FORTH', 'SADLY'] +1995-1837-0012-789: ref=['HE', 'SPLASHED', 'AND', 'STAMPED', 'ALONG', 'FARTHER', 'AND', 'FARTHER', 'ONWARD', 'UNTIL', 'HE', 'NEARED', 'THE', 'RAMPART', 'OF', 'THE', 'CLEARING', 'AND', 'PUT', 'FOOT', 'UPON', 'THE', 'TREE', 'BRIDGE'] +1995-1837-0012-789: hyp=['HE', 'SPLASHED', 'AND', 'STAMPED', 'ALONG', 'FARTHER', 'AND', 'FARTHER', 'ONWARD', 'UNTIL', 'HE', 'NEARED', 'THE', 'RAMPART', 'OF', 'THE', 'CLEARING', 'AND', 'PUT', 'FOOT', 'UPON', 'THE', 'TREE', 'BRIDGE'] +1995-1837-0013-790: ref=['THEN', 'HE', 'LOOKED', 'DOWN', 'THE', 'LAGOON', 'WAS', 'DRY'] +1995-1837-0013-790: hyp=['THEN', 'HE', 'LOOKED', 'DOWN', 'THE', 'LAGOON', 'WAS', 'DRY'] +1995-1837-0014-791: ref=['HE', 'STOOD', 'A', 'MOMENT', 'BEWILDERED', 'THEN', 'TURNED', 'AND', 'RUSHED', 'UPON', 'THE', 'ISLAND', 'A', 'GREAT', 'SHEET', 'OF', 'DAZZLING', 'SUNLIGHT', 'SWEPT', 'THE', 'PLACE', 'AND', 'BENEATH', 'LAY', 'A', 'MIGHTY', 'MASS', 'OF', 'OLIVE', 'GREEN', 'THICK', 'TALL', 'WET', 'AND', 'WILLOWY'] +1995-1837-0014-791: hyp=['HE', 'STOOD', 'A', 'MOMENT', 'BEWILDERED', 'THEN', 'TURNED', 'AND', 'RUSHED', 'UPON', 'THE', 'ISLAND', 'A', 'GREAT', 'SHEET', 'OF', 'DAZZLING', 'SUNLIGHT', 'SWEPT', 'THE', 'PLACE', 'AND', 'BENEATH', 'LAY', 'A', 'MIGHTY', 'MASS', 'OF', 'OLIVE', 'GREEN', 'THICK', 'TALL', 'WET', 'AND', 'WILLOWY'] +1995-1837-0015-792: ref=['THE', 'SQUARES', 'OF', 'COTTON', 'SHARP', 'EDGED', 'HEAVY', 'WERE', 'JUST', 'ABOUT', 'TO', 'BURST', 'TO', 'BOLLS'] +1995-1837-0015-792: hyp=['THE', 'SQUARES', 'OF', 'COTTON', 'SHARP', 'EDGED', 'HEAVY', 'WERE', 'JUST', 'ABOUT', 'TO', 'BURST', 'TO', 'BOWLS'] +1995-1837-0016-793: ref=['FOR', 'ONE', 'LONG', 'MOMENT', 'HE', 'PAUSED', 'STUPID', 'AGAPE', 'WITH', 'UTTER', 'AMAZEMENT', 'THEN', 'LEANED', 'DIZZILY', 'AGAINST', 'A', 'TREE'] +1995-1837-0016-793: hyp=['FOR', 'ONE', 'LONG', 'MOMENT', 'HE', 'PAUSED', 'STUPID', 'AGAPE', 'WITH', 'UTTER', 'AMAZEMENT', 'THEN', 'LEANED', 'DIZZILY', 'AGAINST', 'THE', 'TREE'] +1995-1837-0017-794: ref=['HE', 'GAZED', 'ABOUT', 'PERPLEXED', 'ASTONISHED'] +1995-1837-0017-794: hyp=['HE', 'GAZED', 'ABOUT', 'PERPLEXED', 'ASTONISHED'] +1995-1837-0018-795: ref=['HERE', 'LAY', 'THE', 'READING', 'OF', 'THE', 'RIDDLE', 'WITH', 'INFINITE', 'WORK', 'AND', 'PAIN', 'SOME', 'ONE', 'HAD', 'DUG', 'A', 'CANAL', 'FROM', 'THE', 'LAGOON', 'TO', 'THE', 'CREEK', 'INTO', 'WHICH', 'THE', 'FORMER', 'HAD', 'DRAINED', 'BY', 'A', 'LONG', 'AND', 'CROOKED', 'WAY', 'THUS', 'ALLOWING', 'IT', 'TO', 'EMPTY', 'DIRECTLY'] +1995-1837-0018-795: hyp=['HERE', 'LAY', 'THE', 'READING', 'OF', 'THE', 'RIDDLE', 'WITH', 'INFINITE', 'WORK', 'AND', 'PAIN', 'SOME', 'ONE', 'HAD', 'DUG', 'A', 'CANAL', 'FROM', 'THE', 'LAGOON', 'TO', 'THE', 'CREEK', 'INTO', 'WHICH', 'THE', 'FORMER', 'HAD', 'DRAINED', 'BY', 'A', 'LONG', 'AND', 'CROOKED', 'WAY', 'THUS', 'ALLOWING', 'IT', 'TO', 'EMPTY', 'DIRECTLY'] +1995-1837-0019-796: ref=['HE', 'SAT', 'DOWN', 'WEAK', 'BEWILDERED', 'AND', 'ONE', 'THOUGHT', 'WAS', 'UPPERMOST', 'ZORA'] +1995-1837-0019-796: hyp=['HE', 'SAT', 'DOWN', 'WEAK', 'BEWILDERED', 'AND', 'ONE', 'THOUGHT', 'WAS', 'UPPERMOST', 'SORA'] +1995-1837-0020-797: ref=['THE', 'YEARS', 'OF', 'THE', 'DAYS', 'OF', 'HER', 'DYING', 'WERE', 'TEN'] +1995-1837-0020-797: hyp=['THE', 'YEARS', 'OF', 'THE', 'DAYS', 'OF', 'HER', 'DYING', 'WERE', 'TEN'] +1995-1837-0021-798: ref=['THE', 'HOPE', 'AND', 'DREAM', 'OF', 'HARVEST', 'WAS', 'UPON', 'THE', 'LAND'] +1995-1837-0021-798: hyp=['THE', 'HOPE', 'AND', 'DREAM', 'OF', 'HARVEST', 'WAS', 'UPON', 'THE', 'LAND'] +1995-1837-0022-799: ref=['UP', 'IN', 'THE', 'SICK', 'ROOM', 'ZORA', 'LAY', 'ON', 'THE', 'LITTLE', 'WHITE', 'BED'] +1995-1837-0022-799: hyp=['UP', 'IN', 'THE', 'SICK', 'ROOM', 'ZORA', 'LAY', 'ON', 'THE', 'LITTLE', 'WHITE', 'BED'] +1995-1837-0023-800: ref=['THE', 'NET', 'AND', 'WEB', 'OF', 'ENDLESS', 'THINGS', 'HAD', 'BEEN', 'CRAWLING', 'AND', 'CREEPING', 'AROUND', 'HER', 'SHE', 'HAD', 'STRUGGLED', 'IN', 'DUMB', 'SPEECHLESS', 'TERROR', 'AGAINST', 'SOME', 'MIGHTY', 'GRASPING', 'THAT', 'STROVE', 'FOR', 'HER', 'LIFE', 'WITH', 'GNARLED', 'AND', 'CREEPING', 'FINGERS', 'BUT', 'NOW', 'AT', 'LAST', 'WEAKLY', 'SHE', 'OPENED', 'HER', 'EYES', 'AND', 'QUESTIONED'] +1995-1837-0023-800: hyp=['THE', 'NET', 'AND', 'WEB', 'OF', 'ENDLESS', 'THINGS', 'HAD', 'BEEN', 'CRAWLING', 'AND', 'CREEPING', 'AROUND', 'HER', 'SHE', 'HAD', 'STRUGGLED', 'IN', 'DUMB', 'SPEECHLESS', 'TERROR', 'AGAINST', 'SOME', 'MIGHTY', 'GRASPING', 'THAT', 'STROVE', 'FOR', 'HER', 'LIFE', 'WITH', 'GNARLED', 'AND', 'CREEPING', 'FINGERS', 'BUT', 'NOW', 'AT', 'LAST', 'WEEKLY', 'SHE', 'OPENED', 'HER', 'EYES', 'AND', 'QUESTIONED'] +1995-1837-0024-801: ref=['FOR', 'A', 'WHILE', 'SHE', 'LAY', 'IN', 'HER', 'CHAIR', 'IN', 'HAPPY', 'DREAMY', 'PLEASURE', 'AT', 'SUN', 'AND', 'BIRD', 'AND', 'TREE'] +1995-1837-0024-801: hyp=['FOR', 'A', 'WHILE', 'SHE', 'LAY', 'IN', 'HER', 'CHAIR', 'IN', 'HAPPY', 'DREAMY', 'PLEASURE', 'AT', 'SUN', 'AND', 'BIRD', 'AND', 'TREE'] +1995-1837-0025-802: ref=['SHE', 'ROSE', 'WITH', 'A', 'FLEETING', 'GLANCE', 'GATHERED', 'THE', 'SHAWL', 'ROUND', 'HER', 'THEN', 'GLIDING', 'FORWARD', 'WAVERING', 'TREMULOUS', 'SLIPPED', 'ACROSS', 'THE', 'ROAD', 'AND', 'INTO', 'THE', 'SWAMP'] +1995-1837-0025-802: hyp=['SHE', 'ROSE', 'WITH', 'A', 'FLEETING', 'GLANCE', 'GATHERED', 'THE', 'SHAWL', 'AROUND', 'HER', 'THEN', 'GLIDING', 'FORWARD', 'WAVERING', 'TREMULOUS', 'SLIPPED', 'ACROSS', 'THE', 'ROAD', 'AND', 'INTO', 'THE', 'SWAMP'] +1995-1837-0026-803: ref=['SHE', 'HAD', 'BEEN', 'BORN', 'WITHIN', 'ITS', 'BORDERS', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'LIVED', 'AND', 'GROWN', 'AND', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'MET', 'HER', 'LOVE'] +1995-1837-0026-803: hyp=['SHE', 'HAD', 'BEEN', 'BORN', 'WITHIN', 'ITS', 'BORDERS', 'WITHIN', 'ITS', 'BORDERS', 'SHE', 'HAD', 'LIVED', 'AND', 'GROWN', 'AND', 'WITHIN', 'ITS', 'BORDER', 'SHE', 'HAD', 'MET', 'HER', 'LOVE'] +1995-1837-0027-804: ref=['ON', 'SHE', 'HURRIED', 'UNTIL', 'SWEEPING', 'DOWN', 'TO', 'THE', 'LAGOON', 'AND', 'THE', 'ISLAND', 'LO', 'THE', 'COTTON', 'LAY', 'BEFORE', 'HER'] +1995-1837-0027-804: hyp=['ON', 'SHE', 'HURRIED', 'UNTIL', 'SWEEPING', 'DOWN', 'TO', 'THE', 'LAGOON', 'AND', 'THE', 'ISLAND', 'LO', 'THE', 'COTTON', 'LAY', 'BEFORE', 'HER'] +1995-1837-0028-805: ref=['THE', 'CHAIR', 'WAS', 'EMPTY', 'BUT', 'HE', 'KNEW'] +1995-1837-0028-805: hyp=['THE', 'CHAIR', 'WAS', 'EMPTY', 'BUT', 'HE', 'KNEW'] +1995-1837-0029-806: ref=['HE', 'DARTED', 'THROUGH', 'THE', 'TREES', 'AND', 'PAUSED', 'A', 'TALL', 'MAN', 'STRONGLY', 'BUT', 'SLIMLY', 'MADE'] +1995-1837-0029-806: hyp=['HE', 'DARTED', 'THROUGH', 'THE', 'TREES', 'AND', 'PAUSED', 'A', 'TALL', 'MAN', 'STRONGLY', 'BUT', 'SLIMLY', 'MADE'] +2094-142345-0000-308: ref=['IT', 'IS', 'A', 'VERY', 'FINE', 'OLD', 'PLACE', 'OF', 'RED', 'BRICK', 'SOFTENED', 'BY', 'A', 'PALE', 'POWDERY', 'LICHEN', 'WHICH', 'HAS', 'DISPERSED', 'ITSELF', 'WITH', 'HAPPY', 'IRREGULARITY', 'SO', 'AS', 'TO', 'BRING', 'THE', 'RED', 'BRICK', 'INTO', 'TERMS', 'OF', 'FRIENDLY', 'COMPANIONSHIP', 'WITH', 'THE', 'LIMESTONE', 'ORNAMENTS', 'SURROUNDING', 'THE', 'THREE', 'GABLES', 'THE', 'WINDOWS', 'AND', 'THE', 'DOOR', 'PLACE'] +2094-142345-0000-308: hyp=['IT', 'IS', 'A', 'VERY', 'FINE', 'OLD', 'PLACE', 'OF', 'RED', 'BRICK', 'SOFTENED', 'BY', 'A', 'PALE', 'POWDERY', 'LICHEN', 'WHICH', 'HAS', 'DISPERSED', 'ITSELF', 'WITH', 'HAPPY', 'IRREGULARITY', 'SO', 'AS', 'TO', 'BRING', 'THE', 'RED', 'BRICK', 'INTO', 'TERMS', 'OF', 'FRIENDLY', 'COMPANIONSHIP', 'WITH', 'A', 'LIMESTONE', 'ORNAMENTS', 'SURROUNDING', 'THE', 'THREE', 'GABLES', 'THE', 'WINDOWS', 'AND', 'THE', 'DOOR', 'PLACE'] +2094-142345-0001-309: ref=['BUT', 'THE', 'WINDOWS', 'ARE', 'PATCHED', 'WITH', 'WOODEN', 'PANES', 'AND', 'THE', 'DOOR', 'I', 'THINK', 'IS', 'LIKE', 'THE', 'GATE', 'IT', 'IS', 'NEVER', 'OPENED'] +2094-142345-0001-309: hyp=['BUT', 'THE', 'WINDOWS', 'ARE', 'PATCHED', 'WITH', 'WOODEN', 'PANES', 'AND', 'THE', 'DOOR', 'I', 'THINK', 'IS', 'LIKE', 'THE', 'GATE', 'IT', 'IS', 'NEVER', 'OPENED'] +2094-142345-0002-310: ref=['FOR', 'IT', 'IS', 'A', 'SOLID', 'HEAVY', 'HANDSOME', 'DOOR', 'AND', 'MUST', 'ONCE', 'HAVE', 'BEEN', 'IN', 'THE', 'HABIT', 'OF', 'SHUTTING', 'WITH', 'A', 'SONOROUS', 'BANG', 'BEHIND', 'A', 'LIVERIED', 'LACKEY', 'WHO', 'HAD', 'JUST', 'SEEN', 'HIS', 'MASTER', 'AND', 'MISTRESS', 'OFF', 'THE', 'GROUNDS', 'IN', 'A', 'CARRIAGE', 'AND', 'PAIR'] +2094-142345-0002-310: hyp=['FOR', 'IT', 'IS', 'A', 'SOLID', 'HEAVY', 'HANDSOME', 'DOOR', 'AND', 'MUST', 'ONCE', 'HAVE', 'BEEN', 'IN', 'THE', 'HABIT', 'OF', 'SHUTTING', 'WITH', 'A', 'SONOROUS', 'BANG', 'BEHIND', 'THE', 'LIVERIED', 'LACKEY', 'WHO', 'HAD', 'JUST', 'SEEN', 'HIS', 'MASTER', 'AND', 'MISTRESS', 'OFF', 'THE', 'GROUNDS', 'IN', 'A', 'CARRIAGE', 'AND', 'PAIR'] +2094-142345-0003-311: ref=['A', 'LARGE', 'OPEN', 'FIREPLACE', 'WITH', 'RUSTY', 'DOGS', 'IN', 'IT', 'AND', 'A', 'BARE', 'BOARDED', 'FLOOR', 'AT', 'THE', 'FAR', 'END', 'FLEECES', 'OF', 'WOOL', 'STACKED', 'UP', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'FLOOR', 'SOME', 'EMPTY', 'CORN', 'BAGS'] +2094-142345-0003-311: hyp=['A', 'LARGE', 'OPEN', 'FIREPLACE', 'WITH', 'RUSTY', 'DOGS', 'IN', 'IT', 'AND', 'A', 'BARE', 'BOARDED', 'FLOOR', 'AT', 'THE', 'FAR', 'END', 'FLEECES', 'OF', 'WOOL', 'STACKED', 'UP', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'FLOOR', 'SOME', 'EMPTY', 'CORN', 'BAGS'] +2094-142345-0004-312: ref=['AND', 'WHAT', 'THROUGH', 'THE', 'LEFT', 'HAND', 'WINDOW'] +2094-142345-0004-312: hyp=['AND', 'WHAT', 'THROUGH', 'THE', 'LEFT', 'HAND', 'WINDOW'] +2094-142345-0005-313: ref=['SEVERAL', 'CLOTHES', 'HORSES', 'A', 'PILLION', 'A', 'SPINNING', 'WHEEL', 'AND', 'AN', 'OLD', 'BOX', 'WIDE', 'OPEN', 'AND', 'STUFFED', 'FULL', 'OF', 'COLOURED', 'RAGS'] +2094-142345-0005-313: hyp=['SEVERAL', 'CLOTHES', 'HORSES', 'A', 'PILLION', 'A', 'SPINNING', 'WHEEL', 'AND', 'AN', 'OLD', 'BOX', 'WIDE', 'OPEN', 'AND', 'STUFFED', 'FULL', 'OF', 'COLOURED', 'RAGS'] +2094-142345-0006-314: ref=['AT', 'THE', 'EDGE', 'OF', 'THIS', 'BOX', 'THERE', 'LIES', 'A', 'GREAT', 'WOODEN', 'DOLL', 'WHICH', 'SO', 'FAR', 'AS', 'MUTILATION', 'IS', 'CONCERNED', 'BEARS', 'A', 'STRONG', 'RESEMBLANCE', 'TO', 'THE', 'FINEST', 'GREEK', 'SCULPTURE', 'AND', 'ESPECIALLY', 'IN', 'THE', 'TOTAL', 'LOSS', 'OF', 'ITS', 'NOSE'] +2094-142345-0006-314: hyp=['AT', 'THE', 'EDGE', 'OF', 'THIS', 'BOX', 'THERE', 'LIES', 'A', 'GREAT', 'WOODEN', 'DOLL', 'WHICH', 'SO', 'FAR', 'AS', 'MUTILATION', 'IS', 'CONCERNED', 'BEARS', 'A', 'STRONG', 'RESEMBLANCE', 'TO', 'THE', 'FINEST', 'GREEK', 'SCULPTURE', 'AND', 'ESPECIALLY', 'IN', 'THE', 'TOTAL', 'LOSS', 'OF', 'ITS', 'NOSE'] +2094-142345-0007-315: ref=['THE', 'HISTORY', 'OF', 'THE', 'HOUSE', 'IS', 'PLAIN', 'NOW'] +2094-142345-0007-315: hyp=['THE', 'HISTORY', 'OF', 'THE', 'HOUSE', 'IS', 'PLAIN', 'NOW'] +2094-142345-0008-316: ref=['BUT', 'THERE', 'IS', 'ALWAYS', 'A', 'STRONGER', 'SENSE', 'OF', 'LIFE', 'WHEN', 'THE', 'SUN', 'IS', 'BRILLIANT', 'AFTER', 'RAIN', 'AND', 'NOW', 'HE', 'IS', 'POURING', 'DOWN', 'HIS', 'BEAMS', 'AND', 'MAKING', 'SPARKLES', 'AMONG', 'THE', 'WET', 'STRAW', 'AND', 'LIGHTING', 'UP', 'EVERY', 'PATCH', 'OF', 'VIVID', 'GREEN', 'MOSS', 'ON', 'THE', 'RED', 'TILES', 'OF', 'THE', 'COW', 'SHED', 'AND', 'TURNING', 'EVEN', 'THE', 'MUDDY', 'WATER', 'THAT', 'IS', 'HURRYING', 'ALONG', 'THE', 'CHANNEL', 'TO', 'THE', 'DRAIN', 'INTO', 'A', 'MIRROR', 'FOR', 'THE', 'YELLOW', 'BILLED', 'DUCKS', 'WHO', 'ARE', 'SEIZING', 'THE', 'OPPORTUNITY', 'OF', 'GETTING', 'A', 'DRINK', 'WITH', 'AS', 'MUCH', 'BODY', 'IN', 'IT', 'AS', 'POSSIBLE'] +2094-142345-0008-316: hyp=['BUT', 'THERE', 'IS', 'ALWAYS', 'A', 'STRONGER', 'SENSE', 'OF', 'LIFE', 'WHEN', 'THE', 'SUN', 'IS', 'BRILLIANT', 'AFTER', 'RAIN', 'AND', 'NOW', 'HE', 'IS', 'POURING', 'DOWN', 'HIS', 'BEAMS', 'AND', 'MAKING', 'SPARKLES', 'AMONG', 'THE', 'WET', 'STRAW', 'AND', 'LIGHTING', 'UP', 'EVERY', 'PATCH', 'OF', 'VIVID', 'GREEN', 'MOSS', 'ON', 'THE', 'RED', 'TILES', 'OF', 'THE', 'COW', 'SHED', 'AND', 'TURNING', 'EVEN', 'THE', 'MUDDY', 'WATER', 'THAT', 'IS', 'HURRYING', 'ALONG', 'THE', 'CHANNEL', 'TO', 'THE', 'DRAIN', 'INTO', 'A', 'MIRROR', 'FOR', 'THE', 'YELLOW', 'BUILD', 'DUCKS', 'WHO', 'ARE', 'SEIZING', 'THE', 'OPPORTUNITY', 'OF', 'GETTING', 'A', 'DRINK', 'WITH', 'AS', 'MUCH', 'BODY', 'IN', 'IT', 'AS', 'POSSIBLE'] +2094-142345-0009-317: ref=['FOR', 'THE', 'GREAT', 'BARN', 'DOORS', 'ARE', 'THROWN', 'WIDE', 'OPEN', 'AND', 'MEN', 'ARE', 'BUSY', 'THERE', 'MENDING', 'THE', 'HARNESS', 'UNDER', 'THE', 'SUPERINTENDENCE', 'OF', 'MISTER', 'GOBY', 'THE', 'WHITTAW', 'OTHERWISE', 'SADDLER', 'WHO', 'ENTERTAINS', 'THEM', 'WITH', 'THE', 'LATEST', 'TREDDLESTON', 'GOSSIP'] +2094-142345-0009-317: hyp=['FOR', 'THE', 'GREAT', 'BARN', 'DOORS', 'ARE', 'THROWN', 'WIDE', 'OPEN', 'AND', 'MEN', 'ARE', 'BUSY', 'THERE', 'MENDING', 'THE', 'HARNESS', 'UNDER', 'THE', 'SUPERINTENDENCE', 'OF', 'MISTER', 'GOBY', 'THE', 'WIDOW', 'OTHERWISE', 'SADDLER', 'WHO', 'ENTERTAINS', 'THEM', 'WITH', 'THE', 'LATEST', 'TREDDLESTONE', 'GOSSIP'] +2094-142345-0010-318: ref=['HETTY', 'SORREL', 'OFTEN', 'TOOK', 'THE', 'OPPORTUNITY', 'WHEN', 'HER', "AUNT'S", 'BACK', 'WAS', 'TURNED', 'OF', 'LOOKING', 'AT', 'THE', 'PLEASING', 'REFLECTION', 'OF', 'HERSELF', 'IN', 'THOSE', 'POLISHED', 'SURFACES', 'FOR', 'THE', 'OAK', 'TABLE', 'WAS', 'USUALLY', 'TURNED', 'UP', 'LIKE', 'A', 'SCREEN', 'AND', 'WAS', 'MORE', 'FOR', 'ORNAMENT', 'THAN', 'FOR', 'USE', 'AND', 'SHE', 'COULD', 'SEE', 'HERSELF', 'SOMETIMES', 'IN', 'THE', 'GREAT', 'ROUND', 'PEWTER', 'DISHES', 'THAT', 'WERE', 'RANGED', 'ON', 'THE', 'SHELVES', 'ABOVE', 'THE', 'LONG', 'DEAL', 'DINNER', 'TABLE', 'OR', 'IN', 'THE', 'HOBS', 'OF', 'THE', 'GRATE', 'WHICH', 'ALWAYS', 'SHONE', 'LIKE', 'JASPER'] +2094-142345-0010-318: hyp=["HETTY'S", 'SORREL', 'OFTEN', 'TOOK', 'THE', 'OPPORTUNITY', 'WHEN', 'HER', "AUNT'S", 'BACK', 'WAS', 'TURNED', 'OF', 'LOOKING', 'AT', 'THE', 'PLEASING', 'REFLECTION', 'OF', 'HERSELF', 'IN', 'THOSE', 'POLISHED', 'SERVICES', 'FOR', 'THE', 'OAK', 'TABLE', 'WAS', 'USUALLY', 'TURNED', 'UP', 'LIKE', 'A', 'SCREEN', 'AND', 'WAS', 'MORE', 'FOR', 'ORNAMENT', 'THAN', 'FOR', 'USE', 'AND', 'SHE', 'COULD', 'SEE', 'HERSELF', 'SOMETIMES', 'IN', 'THE', 'GREAT', 'ROUND', 'PEWTER', 'DISHES', 'THAT', 'WERE', 'RANGED', 'ON', 'THE', 'SHELVES', 'ABOVE', 'THE', 'LONG', 'DEAL', 'DINNER', 'TABLE', 'OR', 'IN', 'THE', 'HOBS', 'OF', 'THE', 'GRATE', 'WHICH', 'ALWAYS', 'SHONE', 'LIKE', 'JASPER'] +2094-142345-0011-319: ref=['DO', 'NOT', 'SUPPOSE', 'HOWEVER', 'THAT', 'MISSUS', 'POYSER', 'WAS', 'ELDERLY', 'OR', 'SHREWISH', 'IN', 'HER', 'APPEARANCE', 'SHE', 'WAS', 'A', 'GOOD', 'LOOKING', 'WOMAN', 'NOT', 'MORE', 'THAN', 'EIGHT', 'AND', 'THIRTY', 'OF', 'FAIR', 'COMPLEXION', 'AND', 'SANDY', 'HAIR', 'WELL', 'SHAPEN', 'LIGHT', 'FOOTED'] +2094-142345-0011-319: hyp=['DO', 'NOT', 'SUPPOSE', 'HOWEVER', 'THAT', 'MISSUS', 'POYSER', 'WAS', 'ELDERLY', 'OR', 'SHREWISH', 'IN', 'HER', 'APPEARANCE', 'SHE', 'WAS', 'A', 'GOOD', 'LOOKING', 'WOMAN', 'NOT', 'MORE', 'THAN', 'EIGHT', 'AND', 'THIRTY', 'OF', 'FAIR', 'COMPLEXION', 'AND', 'SANDY', 'HAIR', 'WELL', 'SHAPEN', 'LIGHT', 'FOOTED'] +2094-142345-0012-320: ref=['THE', 'FAMILY', 'LIKENESS', 'BETWEEN', 'HER', 'AND', 'HER', 'NIECE', 'DINAH', 'MORRIS', 'WITH', 'THE', 'CONTRAST', 'BETWEEN', 'HER', 'KEENNESS', 'AND', "DINAH'S", 'SERAPHIC', 'GENTLENESS', 'OF', 'EXPRESSION', 'MIGHT', 'HAVE', 'SERVED', 'A', 'PAINTER', 'AS', 'AN', 'EXCELLENT', 'SUGGESTION', 'FOR', 'A', 'MARTHA', 'AND', 'MARY'] +2094-142345-0012-320: hyp=['THE', 'FAMILY', 'LIKENESS', 'BETWEEN', 'HER', 'AND', 'HER', 'NIECE', 'DINA', 'MORRIS', 'WITH', 'THE', 'CONTRAST', 'BETWEEN', 'HER', 'KEENNESS', 'AND', 'DYNAS', 'SERAPHIC', 'GENTLENESS', 'OF', 'EXPRESSION', 'MIGHT', 'HAVE', 'SERVED', 'A', 'PAINTER', 'AS', 'AN', 'EXCELLENT', 'SUGGESTION', 'FOR', 'A', 'MARTHA', 'AND', 'MARY'] +2094-142345-0013-321: ref=['HER', 'TONGUE', 'WAS', 'NOT', 'LESS', 'KEEN', 'THAN', 'HER', 'EYE', 'AND', 'WHENEVER', 'A', 'DAMSEL', 'CAME', 'WITHIN', 'EARSHOT', 'SEEMED', 'TO', 'TAKE', 'UP', 'AN', 'UNFINISHED', 'LECTURE', 'AS', 'A', 'BARREL', 'ORGAN', 'TAKES', 'UP', 'A', 'TUNE', 'PRECISELY', 'AT', 'THE', 'POINT', 'WHERE', 'IT', 'HAD', 'LEFT', 'OFF'] +2094-142345-0013-321: hyp=['HER', 'TONGUE', 'WAS', 'NOT', 'LESS', 'KEEN', 'THAN', 'HER', 'EYE', 'AND', 'WHENEVER', 'A', 'DAMSEL', 'CAME', 'WITHIN', 'EAR', 'SHOT', 'SEEMED', 'TO', 'TAKE', 'UP', 'AN', 'UNFINISHED', 'LECTURE', 'AS', 'A', 'BARREL', 'ORGAN', 'TAKES', 'UP', 'A', 'TUNE', 'PRECISELY', 'AT', 'THE', 'POINT', 'WHERE', 'IT', 'HAD', 'LEFT', 'OFF'] +2094-142345-0014-322: ref=['THE', 'FACT', 'THAT', 'IT', 'WAS', 'CHURNING', 'DAY', 'WAS', 'ANOTHER', 'REASON', 'WHY', 'IT', 'WAS', 'INCONVENIENT', 'TO', 'HAVE', 'THE', 'WHITTAWS', 'AND', 'WHY', 'CONSEQUENTLY', 'MISSUS', 'POYSER', 'SHOULD', 'SCOLD', 'MOLLY', 'THE', 'HOUSEMAID', 'WITH', 'UNUSUAL', 'SEVERITY'] +2094-142345-0014-322: hyp=['THE', 'FACT', 'THAT', 'IT', 'WAS', 'CHURNING', 'DAY', 'WAS', 'ANOTHER', 'REASON', 'WHY', 'IT', 'WAS', 'INCONVENIENT', 'TO', 'HAVE', 'THE', 'WIDOWS', 'AND', 'WHY', 'CONSEQUENTLY', 'MISSUS', 'POYSER', 'SHOULD', 'SCOLD', 'MOLLY', 'THE', 'HOUSEMAID', 'WITH', 'UNUSUAL', 'SEVERITY'] +2094-142345-0015-323: ref=['TO', 'ALL', 'APPEARANCE', 'MOLLY', 'HAD', 'GOT', 'THROUGH', 'HER', 'AFTER', 'DINNER', 'WORK', 'IN', 'AN', 'EXEMPLARY', 'MANNER', 'HAD', 'CLEANED', 'HERSELF', 'WITH', 'GREAT', 'DISPATCH', 'AND', 'NOW', 'CAME', 'TO', 'ASK', 'SUBMISSIVELY', 'IF', 'SHE', 'SHOULD', 'SIT', 'DOWN', 'TO', 'HER', 'SPINNING', 'TILL', 'MILKING', 'TIME'] +2094-142345-0015-323: hyp=['TO', 'ALL', 'APPEARANCE', 'MOLLY', 'HAD', 'GOT', 'THROUGH', 'HER', 'AFTER', 'DINNER', 'WORK', 'IN', 'AN', 'EXEMPLARY', 'MANNER', 'HAD', 'CLEANED', 'HERSELF', 'WITH', 'GREAT', 'DISPATCH', 'AND', 'NOW', 'CAME', 'TO', 'ASK', 'SUBMISSIVELY', 'IF', 'SHE', 'SHOULD', 'SIT', 'DOWN', 'TO', 'HER', 'SPINNING', 'TILL', 'MILKING', 'TIME'] +2094-142345-0016-324: ref=['SPINNING', 'INDEED'] +2094-142345-0016-324: hyp=['SPINNING', 'INDEED'] +2094-142345-0017-325: ref=['I', 'NEVER', 'KNEW', 'YOUR', 'EQUALS', 'FOR', 'GALLOWSNESS'] +2094-142345-0017-325: hyp=['I', 'NEVER', 'KNEW', 'YOUR', 'EQUALS', 'FOR', 'GALLOWSNESS'] +2094-142345-0018-326: ref=['WHO', 'TAUGHT', 'YOU', 'TO', 'SCRUB', 'A', 'FLOOR', 'I', 'SHOULD', 'LIKE', 'TO', 'KNOW'] +2094-142345-0018-326: hyp=['WHO', 'TAUGHT', 'YOU', 'TO', 'SCRUB', 'A', 'FLOOR', 'I', 'SHOULD', 'LIKE', 'TO', 'KNOW'] +2094-142345-0019-327: ref=['COMB', 'THE', 'WOOL', 'FOR', 'THE', 'WHITTAWS', 'INDEED'] +2094-142345-0019-327: hyp=['COMB', 'THE', 'WOOL', 'FOR', 'THE', 'WIDOWS', 'INDEED'] +2094-142345-0020-328: ref=["THAT'S", 'WHAT', "YOU'D", 'LIKE', 'TO', 'BE', 'DOING', 'IS', 'IT'] +2094-142345-0020-328: hyp=["THAT'S", 'WHAT', "YOU'D", 'LIKE', 'TO', 'BE', 'DOING', 'IS', 'IT'] +2094-142345-0021-329: ref=["THAT'S", 'THE', 'WAY', 'WITH', 'YOU', "THAT'S", 'THE', 'ROAD', "YOU'D", 'ALL', 'LIKE', 'TO', 'GO', 'HEADLONGS', 'TO', 'RUIN'] +2094-142345-0021-329: hyp=["THAT'S", 'THE', 'WAY', 'WITH', 'YOU', "THAT'S", 'THE', 'ROAD', "YOU'D", 'ALL', 'LIKE', 'TO', 'GO', 'HEADLONGS', 'TO', 'RUIN'] +2094-142345-0022-330: ref=['MISTER', "OTTLEY'S", 'INDEED'] +2094-142345-0022-330: hyp=['MISTER', 'OAKLEIGHS', 'INDEED'] +2094-142345-0023-331: ref=["YOU'RE", 'A', 'RARE', 'UN', 'FOR', 'SITTING', 'DOWN', 'TO', 'YOUR', 'WORK', 'A', 'LITTLE', 'WHILE', 'AFTER', "IT'S", 'TIME', 'TO', 'PUT', 'BY'] +2094-142345-0023-331: hyp=['YOU', 'ARE', 'A', 'RARE', 'AND', 'FOR', 'SITTING', 'DOWN', 'TO', 'YOUR', 'WORK', 'A', 'LITTLE', 'WHILE', 'AFTER', 'ITS', 'TIME', 'TO', 'PUT', 'BY'] +2094-142345-0024-332: ref=['MUNNY', 'MY', "IRON'S", 'TWITE', 'TOLD', 'PEASE', 'PUT', 'IT', 'DOWN', 'TO', 'WARM'] +2094-142345-0024-332: hyp=['MONEY', 'MY', 'IRONS', 'TWITE', 'COLD', 'PIECE', 'PUT', 'IT', 'DOWN', 'TO', 'WARM'] +2094-142345-0025-333: ref=['COLD', 'IS', 'IT', 'MY', 'DARLING', 'BLESS', 'YOUR', 'SWEET', 'FACE'] +2094-142345-0025-333: hyp=['COLD', 'IS', 'IT', 'MY', 'DARLING', 'BLESS', 'YOUR', 'SWEET', 'FACE'] +2094-142345-0026-334: ref=["SHE'S", 'GOING', 'TO', 'PUT', 'THE', 'IRONING', 'THINGS', 'AWAY'] +2094-142345-0026-334: hyp=["SHE'S", 'GOING', 'TO', 'PUT', 'THE', 'IRONING', 'THINGS', 'AWAY'] +2094-142345-0027-335: ref=['MUNNY', 'I', 'TOULD', 'IKE', 'TO', 'DO', 'INTO', 'DE', 'BARN', 'TO', 'TOMMY', 'TO', 'SEE', 'DE', 'WHITTAWD'] +2094-142345-0027-335: hyp=['MONEY', 'I', 'DID', 'LIKE', 'TO', 'DO', 'INTO', 'THE', 'BARN', 'TO', 'TOMMY', 'TO', 'SEE', 'THE', 'WIDOWED'] +2094-142345-0028-336: ref=['NO', 'NO', 'NO', 'TOTTY', 'UD', 'GET', 'HER', 'FEET', 'WET', 'SAID', 'MISSUS', 'POYSER', 'CARRYING', 'AWAY', 'HER', 'IRON'] +2094-142345-0028-336: hyp=['NO', 'NO', 'NO', 'NO', 'TODDY', 'HAD', 'GET', 'HER', 'FEET', 'WET', 'SAID', 'MISSUS', 'POYSER', 'CARRYING', 'AWAY', 'HER', 'IRON'] +2094-142345-0029-337: ref=['DID', 'EVER', 'ANYBODY', 'SEE', 'THE', 'LIKE', 'SCREAMED', 'MISSUS', 'POYSER', 'RUNNING', 'TOWARDS', 'THE', 'TABLE', 'WHEN', 'HER', 'EYE', 'HAD', 'FALLEN', 'ON', 'THE', 'BLUE', 'STREAM'] +2094-142345-0029-337: hyp=['DID', 'EVER', 'ANYBODY', 'SEE', 'THE', 'LIKE', 'SCREAMED', 'MISSUS', 'POYSER', 'RUNNING', 'TOWARDS', 'THE', 'TABLE', 'WHEN', 'HER', 'EYE', 'HAD', 'FALLEN', 'ON', 'THE', 'BLUE', 'STREAM'] +2094-142345-0030-338: ref=['TOTTY', 'HOWEVER', 'HAD', 'DESCENDED', 'FROM', 'HER', 'CHAIR', 'WITH', 'GREAT', 'SWIFTNESS', 'AND', 'WAS', 'ALREADY', 'IN', 'RETREAT', 'TOWARDS', 'THE', 'DAIRY', 'WITH', 'A', 'SORT', 'OF', 'WADDLING', 'RUN', 'AND', 'AN', 'AMOUNT', 'OF', 'FAT', 'ON', 'THE', 'NAPE', 'OF', 'HER', 'NECK', 'WHICH', 'MADE', 'HER', 'LOOK', 'LIKE', 'THE', 'METAMORPHOSIS', 'OF', 'A', 'WHITE', 'SUCKLING', 'PIG'] +2094-142345-0030-338: hyp=['TOTTY', 'HOWEVER', 'HAD', 'DESCENDED', 'FROM', 'HER', 'CHAIR', 'WITH', 'GREAT', 'SWIFTNESS', 'AND', 'WAS', 'ALREADY', 'IN', 'RETREAT', 'TOWARDS', 'THE', 'DAIRY', 'WITH', 'A', 'SORT', 'OF', 'WADDLING', 'RUN', 'AND', 'AN', 'AMOUNT', 'OF', 'FAT', 'ON', 'THE', 'NAPE', 'OF', 'HER', 'NECK', 'WHICH', 'MADE', 'HER', 'LOOK', 'LIKE', 'THE', 'METAMORPHOSIS', 'OF', 'A', 'WHITE', 'SUCKLING', 'PIG'] +2094-142345-0031-339: ref=['AND', 'SHE', 'WAS', 'VERY', 'FOND', 'OF', 'YOU', 'TOO', 'AUNT', 'RACHEL'] +2094-142345-0031-339: hyp=['AND', 'SHE', 'WAS', 'VERY', 'FOND', 'OF', 'YOU', 'TOO', 'AUNT', 'RACHEL'] +2094-142345-0032-340: ref=['I', 'OFTEN', 'HEARD', 'HER', 'TALK', 'OF', 'YOU', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'WAY'] +2094-142345-0032-340: hyp=['I', 'OFTEN', 'HEARD', 'HER', 'TALK', 'OF', 'YOU', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'WAY'] +2094-142345-0033-341: ref=['WHEN', 'SHE', 'HAD', 'THAT', 'BAD', 'ILLNESS', 'AND', 'I', 'WAS', 'ONLY', 'ELEVEN', 'YEARS', 'OLD', 'SHE', 'USED', 'TO', 'SAY', "YOU'LL", 'HAVE', 'A', 'FRIEND', 'ON', 'EARTH', 'IN', 'YOUR', 'AUNT', 'RACHEL', 'IF', "I'M", 'TAKEN', 'FROM', 'YOU', 'FOR', 'SHE', 'HAS', 'A', 'KIND', 'HEART', 'AND', "I'M", 'SURE', "I'VE", 'FOUND', 'IT', 'SO'] +2094-142345-0033-341: hyp=['WHEN', 'SHE', 'HAD', 'THAT', 'BAD', 'ILLNESS', 'AND', 'I', 'WAS', 'ONLY', 'ELEVEN', 'YEARS', 'OLD', 'SHE', 'USED', 'TO', 'SAY', "YOU'LL", 'HAVE', 'A', 'FRIEND', 'ON', 'EARTH', 'IN', 'YOUR', 'AUNT', 'RACHEL', 'IF', "I'M", 'TAKEN', 'FROM', 'YOU', 'FOR', 'SHE', 'HAS', 'A', 'KIND', 'HEART', 'AND', "I'M", 'SURE', "I'VE", 'FOUND', 'IT', 'SO'] +2094-142345-0034-342: ref=['AND', "THERE'S", 'LINEN', 'IN', 'THE', 'HOUSE', 'AS', 'I', 'COULD', 'WELL', 'SPARE', 'YOU', 'FOR', "I'VE", 'GOT', 'LOTS', 'O', 'SHEETING', 'AND', 'TABLE', 'CLOTHING', 'AND', 'TOWELLING', 'AS', "ISN'T", 'MADE', 'UP'] +2094-142345-0034-342: hyp=['AND', "THERE'S", 'LINEN', 'IN', 'THE', 'HOUSE', 'AS', 'I', 'COULD', 'WELL', 'SPARE', 'YOU', 'FOR', 'I', 'GOT', 'LOTS', 'OF', 'SHEETING', 'AND', 'TABLE', 'CLOTHING', 'AND', 'TOWELINGS', "ISN'T", 'MADE', 'UP'] +2094-142345-0035-343: ref=['BUT', 'NOT', 'MORE', 'THAN', "WHAT'S", 'IN', 'THE', 'BIBLE', 'AUNT', 'SAID', 'DINAH'] +2094-142345-0035-343: hyp=['BUT', 'NOT', 'MORE', 'THAN', "WHAT'S", 'IN', 'THE', 'BIBLE', 'AND', 'SAID', 'DINAH'] +2094-142345-0036-344: ref=['NAY', 'DEAR', 'AUNT', 'YOU', 'NEVER', 'HEARD', 'ME', 'SAY', 'THAT', 'ALL', 'PEOPLE', 'ARE', 'CALLED', 'TO', 'FORSAKE', 'THEIR', 'WORK', 'AND', 'THEIR', 'FAMILIES'] +2094-142345-0036-344: hyp=['NAY', 'DEAR', 'AUNT', 'YOU', 'NEVER', 'HEARD', 'ME', 'SAY', 'THAT', 'ALL', 'PEOPLE', 'ARE', 'CALLED', 'TO', 'FORSAKE', 'THEIR', 'WORK', 'AND', 'THEIR', 'FAMILIES'] +2094-142345-0037-345: ref=['WE', 'CAN', 'ALL', 'BE', 'SERVANTS', 'OF', 'GOD', 'WHEREVER', 'OUR', 'LOT', 'IS', 'CAST', 'BUT', 'HE', 'GIVES', 'US', 'DIFFERENT', 'SORTS', 'OF', 'WORK', 'ACCORDING', 'AS', 'HE', 'FITS', 'US', 'FOR', 'IT', 'AND', 'CALLS', 'US', 'TO', 'IT'] +2094-142345-0037-345: hyp=['WE', 'CAN', 'ALL', 'BE', 'SERVANTS', 'OF', 'GOD', 'WHEREVER', 'OUR', 'LOT', 'IS', 'CAST', 'BUT', 'HE', 'GIVES', 'US', 'DIFFERENT', 'SORTS', 'OF', 'WORK', 'ACCORDING', 'AS', 'HE', 'FITS', 'US', 'FOR', 'IT', 'AND', 'CALLS', 'US', 'TO', 'IT'] +2094-142345-0038-346: ref=['I', 'CAN', 'NO', 'MORE', 'HELP', 'SPENDING', 'MY', 'LIFE', 'IN', 'TRYING', 'TO', 'DO', 'WHAT', 'I', 'CAN', 'FOR', 'THE', 'SOULS', 'OF', 'OTHERS', 'THAN', 'YOU', 'COULD', 'HELP', 'RUNNING', 'IF', 'YOU', 'HEARD', 'LITTLE', 'TOTTY', 'CRYING', 'AT', 'THE', 'OTHER', 'END', 'OF', 'THE', 'HOUSE', 'THE', 'VOICE', 'WOULD', 'GO', 'TO', 'YOUR', 'HEART', 'YOU', 'WOULD', 'THINK', 'THE', 'DEAR', 'CHILD', 'WAS', 'IN', 'TROUBLE', 'OR', 'IN', 'DANGER', 'AND', 'YOU', "COULDN'T", 'REST', 'WITHOUT', 'RUNNING', 'TO', 'HELP', 'HER', 'AND', 'COMFORT', 'HER'] +2094-142345-0038-346: hyp=['I', 'CAN', 'NO', 'MORE', 'HELP', 'SPENDING', 'MY', 'LIFE', 'IN', 'TRYING', 'TO', 'DO', 'WHAT', 'I', 'CAN', 'FOR', 'THE', 'SOULS', 'OF', 'OTHERS', 'THEN', 'YOU', 'COULD', 'HELP', 'RUNNING', 'IF', 'YOU', 'HEARD', 'LITTLE', 'TOTTY', 'CRYING', 'AT', 'THE', 'OTHER', 'END', 'OF', 'THE', 'HOUSE', 'THE', 'VOICE', 'WOULD', 'GO', 'TO', 'YOUR', 'HEART', 'YOU', 'WOULD', 'THINK', 'THE', 'DEAR', 'CHILD', 'WAS', 'IN', 'TROUBLE', 'OR', 'IN', 'DANGER', 'AND', 'YOU', "COULDN'T", 'REST', 'WITHOUT', 'RUNNING', 'TO', 'HELP', 'HER', 'AND', 'COMFORT', 'HER'] +2094-142345-0039-347: ref=["I'VE", 'STRONG', 'ASSURANCE', 'THAT', 'NO', 'EVIL', 'WILL', 'HAPPEN', 'TO', 'YOU', 'AND', 'MY', 'UNCLE', 'AND', 'THE', 'CHILDREN', 'FROM', 'ANYTHING', "I'VE", 'DONE'] +2094-142345-0039-347: hyp=["I'VE", 'STRONG', 'ASSURANCE', 'THAT', 'NO', 'EVIL', 'WILL', 'HAPPEN', 'TO', 'YOU', 'AND', 'MY', 'UNCLE', 'AND', 'THE', 'CHILDREN', 'FROM', 'ANYTHING', 'I', 'HAVE', 'DONE'] +2094-142345-0040-348: ref=['I', "DIDN'T", 'PREACH', 'WITHOUT', 'DIRECTION'] +2094-142345-0040-348: hyp=['I', "DIDN'T", 'PREACH', 'WITHOUT', 'DIRECTION'] +2094-142345-0041-349: ref=['DIRECTION'] +2094-142345-0041-349: hyp=['DIRECTION'] +2094-142345-0042-350: ref=['I', 'HANNA', 'COMMON', 'PATIENCE', 'WITH', 'YOU'] +2094-142345-0042-350: hyp=['I', 'HAD', 'A', 'COMMON', 'PATIENCE', 'WITH', 'YOU'] +2094-142345-0043-351: ref=['BY', 'THIS', 'TIME', 'THE', 'TWO', 'GENTLEMEN', 'HAD', 'REACHED', 'THE', 'PALINGS', 'AND', 'HAD', 'GOT', 'DOWN', 'FROM', 'THEIR', 'HORSES', 'IT', 'WAS', 'PLAIN', 'THEY', 'MEANT', 'TO', 'COME', 'IN'] +2094-142345-0043-351: hyp=['BY', 'THIS', 'TIME', 'THE', 'TWO', 'GENTLEMEN', 'HAD', 'REACHED', 'THE', 'PALINGS', 'AND', 'HAD', 'GOT', 'DOWN', 'FROM', 'THEIR', 'HORSES', 'IT', 'WAS', 'PLAIN', 'THEY', 'MEANT', 'TO', 'COME', 'IN'] +2094-142345-0044-352: ref=['SAID', 'MISTER', 'IRWINE', 'WITH', 'HIS', 'STATELY', 'CORDIALITY'] +2094-142345-0044-352: hyp=['SAID', 'MISTER', 'IRWINE', 'WITH', 'HIS', 'STATELY', 'CORDIALITY'] +2094-142345-0045-353: ref=['OH', 'SIR', "DON'T", 'MENTION', 'IT', 'SAID', 'MISSUS', 'POYSER'] +2094-142345-0045-353: hyp=['OH', 'SIR', "DON'T", 'MENTION', 'IT', 'SAID', 'MISSUS', 'POYSER'] +2094-142345-0046-354: ref=['I', 'DELIGHT', 'IN', 'YOUR', 'KITCHEN'] +2094-142345-0046-354: hyp=['I', 'DELIGHT', 'IN', 'YOUR', 'KITCHEN'] +2094-142345-0047-355: ref=['POYSER', 'IS', 'NOT', 'AT', 'HOME', 'IS', 'HE'] +2094-142345-0047-355: hyp=['POYSER', 'IS', 'NOT', 'AT', 'HOME', 'IS', 'HE'] +2094-142345-0048-356: ref=['SAID', 'CAPTAIN', 'DONNITHORNE', 'SEATING', 'HIMSELF', 'WHERE', 'HE', 'COULD', 'SEE', 'ALONG', 'THE', 'SHORT', 'PASSAGE', 'TO', 'THE', 'OPEN', 'DAIRY', 'DOOR'] +2094-142345-0048-356: hyp=['SAID', 'CAPTAIN', 'DONNITHORNE', 'SITTING', 'HIMSELF', 'WHERE', 'HE', 'COULD', 'SEE', 'ALONG', 'THE', 'SHORT', 'PASSAGE', 'TO', 'THE', 'OPEN', 'DAIRY', 'DOOR'] +2094-142345-0049-357: ref=['NO', 'SIR', 'HE', "ISN'T", "HE'S", 'GONE', 'TO', 'ROSSETER', 'TO', 'SEE', 'MISTER', 'WEST', 'THE', 'FACTOR', 'ABOUT', 'THE', 'WOOL'] +2094-142345-0049-357: hyp=['NO', 'SIR', 'HE', "ISN'T", "HE'S", 'GONE', 'TO', 'ROSSITUR', 'TO', 'SEE', 'MISTER', 'WEST', 'THE', 'FACTOR', 'ABOUT', 'THE', 'WOOL'] +2094-142345-0050-358: ref=['BUT', "THERE'S", 'FATHER', 'THE', 'BARN', 'SIR', 'IF', "HE'D", 'BE', 'OF', 'ANY', 'USE'] +2094-142345-0050-358: hyp=['BUT', "THERE'S", 'FATHER', 'IN', 'BARN', 'SIR', 'IF', "HE'D", 'BE', 'OF', 'ANY', 'USE'] +2094-142345-0051-359: ref=['NO', 'THANK', 'YOU', "I'LL", 'JUST', 'LOOK', 'AT', 'THE', 'WHELPS', 'AND', 'LEAVE', 'A', 'MESSAGE', 'ABOUT', 'THEM', 'WITH', 'YOUR', 'SHEPHERD'] +2094-142345-0051-359: hyp=['NO', 'THANK', 'YOU', "I'LL", 'JUST', 'LOOK', 'AT', 'THE', 'WHELPS', 'AND', 'LEAVE', 'A', 'MESSAGE', 'ABOUT', 'THEM', 'WITH', 'YOUR', 'SHEPHERD'] +2094-142345-0052-360: ref=['I', 'MUST', 'COME', 'ANOTHER', 'DAY', 'AND', 'SEE', 'YOUR', 'HUSBAND', 'I', 'WANT', 'TO', 'HAVE', 'A', 'CONSULTATION', 'WITH', 'HIM', 'ABOUT', 'HORSES'] +2094-142345-0052-360: hyp=['I', 'MUST', 'COME', 'ANOTHER', 'DAY', 'AND', 'SEE', 'YOUR', 'HUSBAND', 'I', 'WANT', 'TO', 'HAVE', 'A', 'CONSULTATION', 'WITH', 'HIM', 'ABOUT', 'HORSES'] +2094-142345-0053-361: ref=['FOR', 'IF', "HE'S", 'ANYWHERE', 'ON', 'THE', 'FARM', 'WE', 'CAN', 'SEND', 'FOR', 'HIM', 'IN', 'A', 'MINUTE'] +2094-142345-0053-361: hyp=['FOR', 'IF', 'IS', 'ANYWHERE', 'ON', 'THE', 'FARM', 'WE', 'CAN', 'SEND', 'FOR', 'HIM', 'IN', 'A', 'MINUTE'] +2094-142345-0054-362: ref=['OH', 'SIR', 'SAID', 'MISSUS', 'POYSER', 'RATHER', 'ALARMED', 'YOU', "WOULDN'T", 'LIKE', 'IT', 'AT', 'ALL'] +2094-142345-0054-362: hyp=['OH', 'SIR', 'SAID', 'MISSUS', 'POYSER', 'RATHER', 'ALARMED', 'YOU', "WOULDN'T", 'LIKE', 'IT', 'AT', 'ALL'] +2094-142345-0055-363: ref=['BUT', 'YOU', 'KNOW', 'MORE', 'ABOUT', 'THAT', 'THAN', 'I', 'DO', 'SIR'] +2094-142345-0055-363: hyp=['BUT', 'YOU', 'KNOW', 'MORE', 'ABOUT', 'THAT', 'THAN', 'I', 'DO', 'SIR'] +2094-142345-0056-364: ref=['I', 'THINK', 'I', 'SHOULD', 'BE', 'DOING', 'YOU', 'A', 'SERVICE', 'TO', 'TURN', 'YOU', 'OUT', 'OF', 'SUCH', 'A', 'PLACE'] +2094-142345-0056-364: hyp=['I', 'THINK', 'I', 'SHOULD', 'BE', 'DOING', 'YOU', 'A', 'SERVICE', 'TO', 'TURN', 'YOU', 'OUT', 'OF', 'SUCH', 'A', 'PLACE'] +2094-142345-0057-365: ref=['I', 'KNOW', 'HIS', 'FARM', 'IS', 'IN', 'BETTER', 'ORDER', 'THAN', 'ANY', 'OTHER', 'WITHIN', 'TEN', 'MILES', 'OF', 'US', 'AND', 'AS', 'FOR', 'THE', 'KITCHEN', 'HE', 'ADDED', 'SMILING', 'I', "DON'T", 'BELIEVE', "THERE'S", 'ONE', 'IN', 'THE', 'KINGDOM', 'TO', 'BEAT', 'IT'] +2094-142345-0057-365: hyp=['I', 'KNOWS', 'FARM', 'IS', 'IN', 'BETTER', 'ORDER', 'THAN', 'ANY', 'OTHER', 'WITHIN', 'TEN', 'MILES', 'OF', 'US', 'AND', 'AS', 'FOR', 'THE', 'KITCHEN', 'HE', 'ADDED', 'SMILING', 'I', "DON'T", 'BELIEVE', "THERE'S", 'ONE', 'IN', 'THE', 'KINGDOM', 'TO', 'BEAT', 'IT'] +2094-142345-0058-366: ref=['BY', 'THE', 'BY', "I'VE", 'NEVER', 'SEEN', 'YOUR', 'DAIRY', 'I', 'MUST', 'SEE', 'YOUR', 'DAIRY', 'MISSUS', 'POYSER'] +2094-142345-0058-366: hyp=['BY', 'THE', 'BY', 'I', 'HAVE', 'NEVER', 'SEEN', 'YOUR', 'DAIRY', 'I', 'MUST', 'SEE', 'YOUR', 'DEARIE', 'MISSUS', 'POYSER'] +2094-142345-0059-367: ref=['THIS', 'MISSUS', 'POYSER', 'SAID', 'BLUSHING', 'AND', 'BELIEVING', 'THAT', 'THE', 'CAPTAIN', 'WAS', 'REALLY', 'INTERESTED', 'IN', 'HER', 'MILK', 'PANS', 'AND', 'WOULD', 'ADJUST', 'HIS', 'OPINION', 'OF', 'HER', 'TO', 'THE', 'APPEARANCE', 'OF', 'HER', 'DAIRY'] +2094-142345-0059-367: hyp=['THIS', 'MISSUS', 'POYSER', 'SAID', 'BLUSHING', 'AND', 'BELIEVING', 'THAT', 'THE', 'CAPTAIN', 'WAS', 'REALLY', 'INTERESTED', 'IN', 'HER', 'MILK', 'PANS', 'AND', 'WOULD', 'ADJUST', 'HIS', 'OPINION', 'OF', 'HER', 'TO', 'THE', 'APPEARANCE', 'OF', 'HER', 'DAIRY'] +2094-142345-0060-368: ref=['OH', "I'VE", 'NO', 'DOUBT', "IT'S", 'IN', 'CAPITAL', 'ORDER'] +2094-142345-0060-368: hyp=['OH', "I'VE", 'NO', 'DOUBT', "IT'S", 'IN', 'CAPITAL', 'ORDER'] +2300-131720-0000-1816: ref=['THE', 'PARIS', 'PLANT', 'LIKE', 'THAT', 'AT', 'THE', 'CRYSTAL', 'PALACE', 'WAS', 'A', 'TEMPORARY', 'EXHIBIT'] +2300-131720-0000-1816: hyp=['THE', 'PARIS', 'PLANT', 'LIKE', 'THAT', 'AT', 'THE', 'CRYSTAL', 'PALACE', 'WAS', 'A', 'TEMPORARY', 'EXHIBIT'] +2300-131720-0001-1817: ref=['THE', 'LONDON', 'PLANT', 'WAS', 'LESS', 'TEMPORARY', 'BUT', 'NOT', 'PERMANENT', 'SUPPLYING', 'BEFORE', 'IT', 'WAS', 'TORN', 'OUT', 'NO', 'FEWER', 'THAN', 'THREE', 'THOUSAND', 'LAMPS', 'IN', 'HOTELS', 'CHURCHES', 'STORES', 'AND', 'DWELLINGS', 'IN', 'THE', 'VICINITY', 'OF', 'HOLBORN', 'VIADUCT'] +2300-131720-0001-1817: hyp=['THE', 'LONDON', 'PLANT', 'WAS', 'LESS', 'TEMPORARY', 'BUT', 'NOT', 'PERMANENT', 'SUPPLYING', 'BEFORE', 'IT', 'WAS', 'TORN', 'OUT', 'NO', 'FEWER', 'THAN', 'THREE', 'THOUSAND', 'LAMPS', 'IN', 'HOTELS', 'CHURCHES', 'STORES', 'AND', 'DWELLINGS', 'IN', 'THE', 'VICINITY', 'OF', 'HOLBORN', 'VIEDUC'] +2300-131720-0002-1818: ref=['THERE', 'MESSRS', 'JOHNSON', 'AND', 'HAMMER', 'PUT', 'INTO', 'PRACTICE', 'MANY', 'OF', 'THE', 'IDEAS', 'NOW', 'STANDARD', 'IN', 'THE', 'ART', 'AND', 'SECURED', 'MUCH', 'USEFUL', 'DATA', 'FOR', 'THE', 'WORK', 'IN', 'NEW', 'YORK', 'OF', 'WHICH', 'THE', 'STORY', 'HAS', 'JUST', 'BEEN', 'TOLD'] +2300-131720-0002-1818: hyp=['THERE', 'MESSIERS', 'JOHNSON', 'AND', 'HAMMER', 'PUT', 'INTO', 'PRACTICE', 'MANY', 'OF', 'THE', 'IDEAS', 'NOW', 'STANDARD', 'IN', 'THE', 'ART', 'AND', 'SECURED', 'MUCH', 'USEFUL', 'DATA', 'FOR', 'THE', 'WORK', 'IN', 'NEW', 'YORK', 'OF', 'WHICH', 'THE', 'STORY', 'HAS', 'JUST', 'BEEN', 'TOLD'] +2300-131720-0003-1819: ref=['THE', 'DYNAMO', 'ELECTRIC', 'MACHINE', 'THOUGH', 'SMALL', 'WAS', 'ROBUST', 'FOR', 'UNDER', 'ALL', 'THE', 'VARYING', 'SPEEDS', 'OF', 'WATER', 'POWER', 'AND', 'THE', 'VICISSITUDES', 'OF', 'THE', 'PLANT', 'TO', 'WHICH', 'IT', 'BELONGED', 'IT', 'CONTINUED', 'IN', 'ACTIVE', 'USE', 'UNTIL', 'EIGHTEEN', 'NINETY', 'NINE', 'SEVENTEEN', 'YEARS'] +2300-131720-0003-1819: hyp=['THE', 'DYNAMO', 'ELECTRIC', 'MACHINE', 'THOUGH', 'SMALL', 'WAS', 'ROBUST', 'FOR', 'UNDER', 'ALL', 'THE', 'VARYING', 'SPEEDS', 'OF', 'WATER', 'POWER', 'AND', 'THE', 'VICISSITUDES', 'OF', 'THE', 'PLANT', 'TO', 'WHICH', 'IT', 'BELONGED', 'IT', 'CONTINUED', 'IN', 'ACTIVE', 'USE', 'UNTIL', 'EIGHTEEN', 'NINETY', 'NINE', 'SEVENTEEN', 'YEARS'] +2300-131720-0004-1820: ref=['OWING', 'TO', 'HIS', 'INSISTENCE', 'ON', 'LOW', 'PRESSURE', 'DIRECT', 'CURRENT', 'FOR', 'USE', 'IN', 'DENSELY', 'POPULATED', 'DISTRICTS', 'AS', 'THE', 'ONLY', 'SAFE', 'AND', 'TRULY', 'UNIVERSAL', 'PROFITABLE', 'WAY', 'OF', 'DELIVERING', 'ELECTRICAL', 'ENERGY', 'TO', 'THE', 'CONSUMERS', 'EDISON', 'HAS', 'BEEN', 'FREQUENTLY', 'SPOKEN', 'OF', 'AS', 'AN', 'OPPONENT', 'OF', 'THE', 'ALTERNATING', 'CURRENT'] +2300-131720-0004-1820: hyp=['OWING', 'TO', 'HIS', 'INSISTENCE', 'ON', 'LOW', 'PRESSURE', 'DIRECT', 'CURRENT', 'FOR', 'USE', 'IN', 'DENSELY', 'POPULATED', 'DISTRICTS', 'AS', 'THE', 'ONLY', 'SAFE', 'AND', 'TRULY', 'UNIVERSAL', 'PROFITABLE', 'WAY', 'OF', 'DELIVERING', 'ELECTRICAL', 'ENERGY', 'TO', 'THE', 'CONSUMERS', 'EDISON', 'HAS', 'BEEN', 'FREQUENTLY', 'SPOKEN', 'OF', 'AS', 'AN', 'OPPONENT', 'OF', 'THE', 'ALTERNATING', 'CURRENT'] +2300-131720-0005-1821: ref=['WHY', 'IF', 'WE', 'ERECT', 'A', 'STATION', 'AT', 'THE', 'FALLS', 'IT', 'IS', 'A', 'GREAT', 'ECONOMY', 'TO', 'GET', 'IT', 'UP', 'TO', 'THE', 'CITY'] +2300-131720-0005-1821: hyp=['WHY', 'IF', 'WE', 'ERECT', 'A', 'STATION', 'AT', 'THE', 'FALLS', 'IT', 'IS', 'A', 'GREAT', 'ECONOMY', 'TO', 'GET', 'IT', 'UP', 'TO', 'THE', 'CITY'] +2300-131720-0006-1822: ref=['THERE', 'SEEMS', 'NO', 'GOOD', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'IT', 'WILL', 'CHANGE'] +2300-131720-0006-1822: hyp=['THERE', 'SEEMS', 'NO', 'GOOD', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'IT', 'WILL', 'CHANGE'] +2300-131720-0007-1823: ref=['BROAD', 'AS', 'THE', 'PRAIRIES', 'AND', 'FREE', 'IN', 'THOUGHT', 'AS', 'THE', 'WINDS', 'THAT', 'SWEEP', 'THEM', 'HE', 'IS', 'IDIOSYNCRATICALLY', 'OPPOSED', 'TO', 'LOOSE', 'AND', 'WASTEFUL', 'METHODS', 'TO', 'PLANS', 'OF', 'EMPIRE', 'THAT', 'NEGLECT', 'THE', 'POOR', 'AT', 'THE', 'GATE'] +2300-131720-0007-1823: hyp=['BROAD', 'AS', 'THE', 'PRAIRIES', 'AND', 'FREE', 'IN', 'THOUGHT', 'AS', 'THE', 'WINDS', 'THAT', 'SWEPT', 'THEM', 'HE', 'IS', 'IDIOS', 'AND', 'CRADICALLY', 'OPPOSED', 'TO', 'LOOSE', 'AND', 'WASTEFUL', 'METHODS', 'TO', 'PLANS', 'OF', 'EMPIRE', 'THAT', 'NEGLECT', 'THE', 'POOR', 'AT', 'THE', 'GATE'] +2300-131720-0008-1824: ref=['EVERYTHING', 'HE', 'HAS', 'DONE', 'HAS', 'BEEN', 'AIMED', 'AT', 'THE', 'CONSERVATION', 'OF', 'ENERGY', 'THE', 'CONTRACTION', 'OF', 'SPACE', 'THE', 'INTENSIFICATION', 'OF', 'CULTURE'] +2300-131720-0008-1824: hyp=['EVERYTHING', 'HE', 'HAS', 'DONE', 'HAS', 'BEEN', 'AIMED', 'AT', 'THE', 'CONSERVATION', 'OF', 'ENERGY', 'THE', 'CONTRACTION', 'OF', 'SPACE', 'THE', 'INTENSIFICATION', 'OF', 'CULTURE'] +2300-131720-0009-1825: ref=['FOR', 'SOME', 'YEARS', 'IT', 'WAS', 'NOT', 'FOUND', 'FEASIBLE', 'TO', 'OPERATE', 'MOTORS', 'ON', 'ALTERNATING', 'CURRENT', 'CIRCUITS', 'AND', 'THAT', 'REASON', 'WAS', 'OFTEN', 'URGED', 'AGAINST', 'IT', 'SERIOUSLY'] +2300-131720-0009-1825: hyp=['FOR', 'SOME', 'YEARS', 'IT', 'WAS', 'NOT', 'FOUND', 'FEASIBLE', 'TO', 'OPERATE', 'MOTORS', 'ON', 'ALTERNATING', 'CURRENT', 'CIRCUITS', 'AND', 'THAT', 'REASON', 'WAS', 'OFTEN', 'URGED', 'AGAINST', 'ITS', 'SERIOUSLY'] +2300-131720-0010-1826: ref=['IT', 'COULD', 'NOT', 'BE', 'USED', 'FOR', 'ELECTROPLATING', 'OR', 'DEPOSITION', 'NOR', 'COULD', 'IT', 'CHARGE', 'STORAGE', 'BATTERIES', 'ALL', 'OF', 'WHICH', 'ARE', 'EASILY', 'WITHIN', 'THE', 'ABILITY', 'OF', 'THE', 'DIRECT', 'CURRENT'] +2300-131720-0010-1826: hyp=['IT', 'COULD', 'NOT', 'BE', 'USED', 'FOR', 'ELECTROPLATING', 'OR', 'DEPOSITION', 'NOR', 'COULD', 'IT', 'CHARGE', 'STORAGE', 'BATTERIES', 'ALL', 'OF', 'WHICH', 'ARE', 'EASILY', 'WITHIN', 'THE', 'ABILITY', 'OF', 'THE', 'DIRECT', 'CURRENT'] +2300-131720-0011-1827: ref=['BUT', 'WHEN', 'IT', 'CAME', 'TO', 'BE', 'A', 'QUESTION', 'OF', 'LIGHTING', 'A', 'SCATTERED', 'SUBURB', 'A', 'GROUP', 'OF', 'DWELLINGS', 'ON', 'THE', 'OUTSKIRTS', 'A', 'REMOTE', 'COUNTRY', 'RESIDENCE', 'OR', 'A', 'FARM', 'HOUSE', 'THE', 'ALTERNATING', 'CURRENT', 'IN', 'ALL', 'ELEMENTS', 'SAVE', 'ITS', 'DANGER', 'WAS', 'AND', 'IS', 'IDEAL'] +2300-131720-0011-1827: hyp=['BUT', 'WHEN', 'IT', 'CAME', 'TO', 'BE', 'A', 'QUESTION', 'OF', 'LIGHTING', 'A', 'SCATTERED', 'SUBURB', 'A', 'GROUP', 'OF', 'DWELLINGS', 'ON', 'THE', 'OUTSKIRTS', 'A', 'REMOTE', 'COUNTRY', 'RESIDENCE', 'OR', 'A', 'FARM', 'HOUSE', 'THE', 'ALTERNATING', 'CURRENT', 'IN', 'ALL', 'ELEMENTS', 'SAVE', 'ITS', 'DANGER', 'WAS', 'AND', 'IS', 'IDEAL'] +2300-131720-0012-1828: ref=['EDISON', 'WAS', 'INTOLERANT', 'OF', 'SHAM', 'AND', 'SHODDY', 'AND', 'NOTHING', 'WOULD', 'SATISFY', 'HIM', 'THAT', 'COULD', 'NOT', 'STAND', 'CROSS', 'EXAMINATION', 'BY', 'MICROSCOPE', 'TEST', 'TUBE', 'AND', 'GALVANOMETER'] +2300-131720-0012-1828: hyp=['EDISON', 'WAS', 'INTOLERANT', 'OF', 'SHAM', 'AND', 'SHODY', 'AND', 'NOTHING', 'WOULD', 'SATISFY', 'HIM', 'THAT', 'COULD', 'NOT', 'STAND', 'CROSS', 'EXAMINATION', 'BY', 'MICROSCOPE', 'TEST', 'TUBE', 'AND', 'GALVANOMETER'] +2300-131720-0013-1829: ref=['UNLESS', 'HE', 'COULD', 'SECURE', 'AN', 'ENGINE', 'OF', 'SMOOTHER', 'RUNNING', 'AND', 'MORE', 'EXACTLY', 'GOVERNED', 'AND', 'REGULATED', 'THAN', 'THOSE', 'AVAILABLE', 'FOR', 'HIS', 'DYNAMO', 'AND', 'LAMP', 'EDISON', 'REALIZED', 'THAT', 'HE', 'WOULD', 'FIND', 'IT', 'ALMOST', 'IMPOSSIBLE', 'TO', 'GIVE', 'A', 'STEADY', 'LIGHT'] +2300-131720-0013-1829: hyp=['UNLESS', 'HE', 'COULD', 'SECURE', 'AN', 'ENGINE', 'OF', 'SMOOTHER', 'RUNNING', 'AND', 'MORE', 'EXACTLY', 'GOVERN', 'AND', 'REGULATED', 'THAN', 'THOSE', 'AVAILABLE', 'FOR', 'HIS', 'DYNAMO', 'AND', 'LAMP', 'EDISON', 'REALIZED', 'THAT', 'HE', 'WOULD', 'FIND', 'IT', 'ALMOST', 'IMPOSSIBLE', 'TO', 'GIVE', 'A', 'STEADY', 'LIGHT'] +2300-131720-0014-1830: ref=['MISTER', 'EDISON', 'WAS', 'A', 'LEADER', 'FAR', 'AHEAD', 'OF', 'THE', 'TIME'] +2300-131720-0014-1830: hyp=['MISTER', 'EDISON', 'WAS', 'A', 'LEADER', 'FAR', 'AHEAD', 'OF', 'THE', 'TIME'] +2300-131720-0015-1831: ref=['HE', 'OBTAINED', 'THE', 'DESIRED', 'SPEED', 'AND', 'LOAD', 'WITH', 'A', 'FRICTION', 'BRAKE', 'ALSO', 'REGULATOR', 'OF', 'SPEED', 'BUT', 'WAITED', 'FOR', 'AN', 'INDICATOR', 'TO', 'VERIFY', 'IT'] +2300-131720-0015-1831: hyp=['HE', 'OBTAINED', 'THE', 'DESIRED', 'SPEED', 'AND', 'LOWED', 'WITH', 'A', 'FRICTION', 'BREAK', 'ALSO', 'REGULATOR', 'OF', 'SPEED', 'BUT', 'WAITED', 'FOR', 'AN', 'INDICATOR', 'TO', 'VERIFY', 'IT'] +2300-131720-0016-1832: ref=['THEN', 'AGAIN', 'THERE', 'WAS', 'NO', 'KNOWN', 'WAY', 'TO', 'LUBRICATE', 'AN', 'ENGINE', 'FOR', 'CONTINUOUS', 'RUNNING', 'AND', 'MISTER', 'EDISON', 'INFORMED', 'ME', 'THAT', 'AS', 'A', 'MARINE', 'ENGINE', 'STARTED', 'BEFORE', 'THE', 'SHIP', 'LEFT', 'NEW', 'YORK', 'AND', 'CONTINUED', 'RUNNING', 'UNTIL', 'IT', 'REACHED', 'ITS', 'HOME', 'PORT', 'SO', 'AN', 'ENGINE', 'FOR', 'HIS', 'PURPOSES', 'MUST', 'PRODUCE', 'LIGHT', 'AT', 'ALL', 'TIMES'] +2300-131720-0016-1832: hyp=['THEN', 'AGAIN', 'THERE', 'WAS', 'NO', 'KNOWN', 'WAY', 'TO', 'LUBRICADE', 'AN', 'ENGINE', 'FOR', 'CONTINUOUS', 'RUNNING', 'AND', 'MISTER', 'EDISON', 'INFORMED', 'ME', 'THAT', 'AS', 'A', 'MARINE', 'ENGINE', 'STARTED', 'BEFORE', 'THE', 'SHIP', 'LEFT', 'NEW', 'YORK', 'AND', 'CONTINUED', 'RUNNING', 'UNTIL', 'IT', 'REACHED', 'ITS', 'HOME', 'PORT', 'SO', 'AN', 'ENGINE', 'FOR', 'HIS', 'PURPOSES', 'MUST', 'PRODUCE', 'LIGHT', 'AT', 'ALL', 'TIMES'] +2300-131720-0017-1833: ref=['EDISON', 'HAD', 'INSTALLED', 'HIS', 'HISTORIC', 'FIRST', 'GREAT', 'CENTRAL', 'STATION', 'SYSTEM', 'IN', 'NEW', 'YORK', 'ON', 'THE', 'MULTIPLE', 'ARC', 'SYSTEM', 'COVERED', 'BY', 'HIS', 'FEEDER', 'AND', 'MAIN', 'INVENTION', 'WHICH', 'RESULTED', 'IN', 'A', 'NOTABLE', 'SAVING', 'IN', 'THE', 'COST', 'OF', 'CONDUCTORS', 'AS', 'AGAINST', 'A', 'STRAIGHT', 'TWO', 'WIRE', 'SYSTEM', 'THROUGHOUT', 'OF', 'THE', 'TREE', 'KIND'] +2300-131720-0017-1833: hyp=['EDISON', 'HAD', 'INSTALLED', 'HIS', 'HISTORIC', 'FIRST', 'GREAT', 'CENTRAL', 'STATION', 'SYSTEM', 'IN', 'NEW', 'YORK', 'ON', 'THE', 'MULTIPLE', 'ARC', 'SYSTEM', 'COVERED', 'BY', 'HIS', 'FEEDER', 'AND', 'MAIN', 'INVENTION', 'WHICH', 'RESULTED', 'IN', 'A', 'NOTABLE', 'SAVING', 'IN', 'THE', 'COST', 'OF', 'CONDUCTORS', 'AS', 'AGAINST', 'A', 'STRAIGHT', 'TWO', 'WIRE', 'SYSTEM', 'THROUGHOUT', 'OF', 'THE', 'TREE', 'KIND'] +2300-131720-0018-1834: ref=['HE', 'SOON', 'FORESAW', 'THAT', 'STILL', 'GREATER', 'ECONOMY', 'WOULD', 'BE', 'NECESSARY', 'FOR', 'COMMERCIAL', 'SUCCESS', 'NOT', 'ALONE', 'FOR', 'THE', 'LARGER', 'TERRITORY', 'OPENING', 'BUT', 'FOR', 'THE', 'COMPACT', 'DISTRICTS', 'OF', 'LARGE', 'CITIES'] +2300-131720-0018-1834: hyp=['HE', 'SOON', 'FORESAW', 'THAT', 'STILL', 'GREATER', 'ECONOMY', 'WOULD', 'BE', 'NECESSARY', 'FOR', 'COMMERCIAL', 'SUCCESS', 'NOT', 'ALONE', 'FOR', 'THE', 'LARGER', 'TERRITORY', 'OPENING', 'BUT', 'FOR', 'THE', 'COMPACT', 'DISTRICT', 'OF', 'LARGE', 'CITIES'] +2300-131720-0019-1835: ref=['THE', 'STRONG', 'POSITION', 'HELD', 'BY', 'THE', 'EDISON', 'SYSTEM', 'UNDER', 'THE', 'STRENUOUS', 'COMPETITION', 'THAT', 'WAS', 'ALREADY', 'SPRINGING', 'UP', 'WAS', 'ENORMOUSLY', 'IMPROVED', 'BY', 'THE', 'INTRODUCTION', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'AND', 'IT', 'GAVE', 'AN', 'IMMEDIATE', 'IMPETUS', 'TO', 'INCANDESCENT', 'LIGHTING'] +2300-131720-0019-1835: hyp=['THE', 'STRONG', 'POSITION', 'HELD', 'BY', 'THE', 'EDISON', 'SYSTEM', 'UNDER', 'THE', 'STRENUOUS', 'COMPETITION', 'THAT', 'WAS', 'ALREADY', 'SPRINGING', 'UP', 'WAS', 'ENORMOUSLY', 'IMPROVED', 'BY', 'THE', 'INTRODUCTION', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'AND', 'IT', 'GAVE', 'AN', 'IMMEDIATE', 'IMPETUS', 'TO', 'INCANDESCENT', 'LIGHTING'] +2300-131720-0020-1836: ref=['IT', 'WAS', 'SPECIALLY', 'SUITED', 'FOR', 'A', 'TRIAL', 'PLANT', 'ALSO', 'IN', 'THE', 'EARLY', 'DAYS', 'WHEN', 'A', 'YIELD', 'OF', 'SIX', 'OR', 'EIGHT', 'LAMPS', 'TO', 'THE', 'HORSE', 'POWER', 'WAS', 'CONSIDERED', 'SUBJECT', 'FOR', 'CONGRATULATION'] +2300-131720-0020-1836: hyp=['IT', 'WAS', 'SPECIALLY', 'SUITED', 'FOR', 'A', 'TRIAL', 'PLANT', 'ALSO', 'IN', 'THE', 'EARLY', 'DAYS', 'WHEN', 'A', 'YIELD', 'OF', 'SIX', 'OR', 'EIGHT', 'LAMPS', 'TO', 'THE', 'HORSE', 'BOWER', 'WAS', 'CONSIDERED', 'SUBJECT', 'FOR', 'CONGRATULATION'] +2300-131720-0021-1837: ref=['THE', 'STREET', 'CONDUCTORS', 'WERE', 'OF', 'THE', 'OVERHEAD', 'POLE', 'LINE', 'CONSTRUCTION', 'AND', 'WERE', 'INSTALLED', 'BY', 'THE', 'CONSTRUCTION', 'COMPANY', 'THAT', 'HAD', 'BEEN', 'ORGANIZED', 'BY', 'EDISON', 'TO', 'BUILD', 'AND', 'EQUIP', 'CENTRAL', 'STATIONS'] +2300-131720-0021-1837: hyp=['THE', 'STREET', 'CONDUCTORS', 'WERE', 'OF', 'THE', 'OVERHEAD', 'POLE', 'LINE', 'CONSTRUCTION', 'AND', 'WERE', 'INSTALLED', 'BY', 'THE', 'CONSTRUCTION', 'COMPANY', 'THAT', 'HAD', 'BEEN', 'ORGANIZED', 'BY', 'EDISON', 'TO', 'BUILD', 'AN', 'EQUIP', 'CENTRAL', 'STATIONS'] +2300-131720-0022-1838: ref=['MEANWHILE', 'HE', 'HAD', 'CALLED', 'UPON', 'ME', 'TO', 'MAKE', 'A', 'REPORT', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'KNOWN', 'IN', 'ENGLAND', 'AS', 'THE', 'HOPKINSON', 'BOTH', 'DOCTOR', 'JOHN', 'HOPKINSON', 'AND', 'MISTER', 'EDISON', 'BEING', 'INDEPENDENT', 'INVENTORS', 'AT', 'PRACTICALLY', 'THE', 'SAME', 'TIME'] +2300-131720-0022-1838: hyp=['MEANWHILE', 'HE', 'HAD', 'CALLED', 'UPON', 'ME', 'TO', 'MAKE', 'A', 'REPORT', 'OF', 'THE', 'THREE', 'WIRE', 'SYSTEM', 'KNOWN', 'IN', 'ENGLAND', 'AS', 'THE', 'HOPKINSON', 'BOTH', 'DOCTOR', 'JOHN', 'HOPKINSON', 'AND', 'MISTER', 'EDISON', 'BEING', 'INDEPENDENT', 'IN', 'VENORS', 'AT', 'PRACTICALLY', 'THE', 'SAME', 'TIME'] +2300-131720-0023-1839: ref=['I', 'THINK', 'HE', 'WAS', 'PERHAPS', 'MORE', 'APPRECIATIVE', 'THAN', 'I', 'WAS', 'OF', 'THE', 'DISCIPLINE', 'OF', 'THE', 'EDISON', 'CONSTRUCTION', 'DEPARTMENT', 'AND', 'THOUGHT', 'IT', 'WOULD', 'BE', 'WELL', 'FOR', 'US', 'TO', 'WAIT', 'UNTIL', 'THE', 'MORNING', 'OF', 'THE', 'FOURTH', 'BEFORE', 'WE', 'STARTED', 'UP'] +2300-131720-0023-1839: hyp=['I', 'THINK', 'HE', 'WAS', 'PERHAPS', 'MORE', 'APPRECIATIVE', 'THAN', 'I', 'WAS', 'OF', 'THE', 'DISCIPLINE', 'OF', 'THE', 'EDISON', 'CONSTRUCTION', 'DEPARTMENT', 'AND', 'THOUGHT', 'IT', 'WOULD', 'BE', 'WELL', 'FOR', 'US', 'TO', 'WAIT', 'UNTIL', 'THE', 'MORNING', 'OF', 'THE', 'FOURTH', 'BEFORE', 'WE', 'STARTED', 'UP'] +2300-131720-0024-1840: ref=['BUT', 'THE', 'PLANT', 'RAN', 'AND', 'IT', 'WAS', 'THE', 'FIRST', 'THREE', 'WIRE', 'STATION', 'IN', 'THIS', 'COUNTRY'] +2300-131720-0024-1840: hyp=['BUT', 'THE', 'PLANT', 'RAN', 'AND', 'IT', 'WAS', 'THE', 'FIRST', 'THREE', 'WIRE', 'STATION', 'IN', 'THIS', 'COUNTRY'] +2300-131720-0025-1841: ref=['THEY', 'WERE', 'LATER', 'USED', 'AS', 'RESERVE', 'MACHINES', 'AND', 'FINALLY', 'WITH', 'THE', 'ENGINE', 'RETIRED', 'FROM', 'SERVICE', 'AS', 'PART', 'OF', 'THE', 'COLLECTION', 'OF', 'EDISONIA', 'BUT', 'THEY', 'REMAIN', 'IN', 'PRACTICALLY', 'AS', 'GOOD', 'CONDITION', 'AS', 'WHEN', 'INSTALLED', 'IN', 'EIGHTEEN', 'EIGHTY', 'THREE'] +2300-131720-0025-1841: hyp=['THEY', 'WERE', 'LATER', 'USED', 'AS', 'RESERVE', 'MACHINES', 'AND', 'FINALLY', 'WITH', 'THE', 'ENGINE', 'RETIRED', 'FROM', 'SERVICE', 'AS', 'PART', 'OF', 'THE', 'COLLECTION', 'OF', 'EDISONIA', 'BUT', 'THEY', 'REMAIN', 'IMPRACTICALLY', 'AS', 'GOOD', 'CONDITION', 'AS', 'ONE', 'INSTALLED', 'IN', 'EIGHTEEN', 'EIGHTY', 'THREE'] +2300-131720-0026-1842: ref=['THE', 'ARC', 'LAMP', 'INSTALLED', 'OUTSIDE', 'A', "CUSTOMER'S", 'PREMISES', 'OR', 'IN', 'A', 'CIRCUIT', 'FOR', 'PUBLIC', 'STREET', 'LIGHTING', 'BURNED', 'SO', 'MANY', 'HOURS', 'NIGHTLY', 'SO', 'MANY', 'NIGHTS', 'IN', 'THE', 'MONTH', 'AND', 'WAS', 'PAID', 'FOR', 'AT', 'THAT', 'RATE', 'SUBJECT', 'TO', 'REBATE', 'FOR', 'HOURS', 'WHEN', 'THE', 'LAMP', 'MIGHT', 'BE', 'OUT', 'THROUGH', 'ACCIDENT'] +2300-131720-0026-1842: hyp=['THE', 'ARK', 'LAMP', 'INSTALLED', 'OUTSIDE', 'A', "CUSTOMER'S", 'PREMISES', 'OR', 'IN', 'A', 'CIRCUIT', 'FOR', 'PUBLIC', 'STREET', 'LIGHTING', 'BURNED', 'SO', 'MANY', 'HOURS', 'NIGHTLY', 'SO', 'MANY', 'NIGHTS', 'IN', 'THE', 'MONTH', 'AND', 'WAS', 'PAID', 'FOR', 'AT', 'THAT', 'RATE', 'SUBJECT', 'TO', 'REBATE', 'FOR', 'HOURS', 'WHEN', 'THE', 'LAMP', 'MIGHT', 'BE', 'OUT', 'THROUGH', 'ACCIDENT'] +2300-131720-0027-1843: ref=['EDISON', 'HELD', 'THAT', 'THE', 'ELECTRICITY', 'SOLD', 'MUST', 'BE', 'MEASURED', 'JUST', 'LIKE', 'GAS', 'OR', 'WATER', 'AND', 'HE', 'PROCEEDED', 'TO', 'DEVELOP', 'A', 'METER'] +2300-131720-0027-1843: hyp=['EDISON', 'HELD', 'THAT', 'THE', 'ELECTRICITY', 'SOLD', 'MUST', 'BE', 'MEASURED', 'JUST', 'LIKE', 'GAS', 'OR', 'WATER', 'AND', 'HE', 'PROCEEDED', 'TO', 'DEVELOP', 'A', 'METER'] +2300-131720-0028-1844: ref=['THERE', 'WAS', 'INFINITE', 'SCEPTICISM', 'AROUND', 'HIM', 'ON', 'THE', 'SUBJECT', 'AND', 'WHILE', 'OTHER', 'INVENTORS', 'WERE', 'ALSO', 'GIVING', 'THE', 'SUBJECT', 'THEIR', 'THOUGHT', 'THE', 'PUBLIC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'ANYTHING', 'SO', 'UTTERLY', 'INTANGIBLE', 'AS', 'ELECTRICITY', 'THAT', 'COULD', 'NOT', 'BE', 'SEEN', 'OR', 'WEIGHED', 'AND', 'ONLY', 'GAVE', 'SECONDARY', 'EVIDENCE', 'OF', 'ITSELF', 'AT', 'THE', 'EXACT', 'POINT', 'OF', 'USE', 'COULD', 'NOT', 'BE', 'BROUGHT', 'TO', 'ACCURATE', 'REGISTRATION'] +2300-131720-0028-1844: hyp=['THERE', 'WAS', 'INFINITE', 'SCEPTICISM', 'AROUND', 'HIM', 'ON', 'THE', 'SUBJECT', 'AND', 'WHILE', 'OTHER', 'INVENTORS', 'WERE', 'ALSO', 'GIVING', 'THE', 'SUBJECT', 'THEIR', 'THOUGHT', 'THE', 'PUBLIC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'ANYTHING', 'SO', 'UTTERLY', 'INTANGIBLE', 'AS', 'ELECTRICITY', 'THAT', 'COULD', 'NOT', 'BE', 'SEEN', 'OR', 'WEIGHED', 'AND', 'ONLY', 'GAVE', 'SECONDARY', 'EVIDENCE', 'OF', 'ITSELF', 'AT', 'THE', 'EXACT', 'POINT', 'OF', 'USE', 'COULD', 'NOT', 'BE', 'BROUGHT', 'TO', 'ACCURATE', 'REGISTRATION'] +2300-131720-0029-1845: ref=['HENCE', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'NO', 'LONGER', 'USED', 'DESPITE', 'ITS', 'EXCELLENT', 'QUALITIES'] +2300-131720-0029-1845: hyp=['HENCE', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'NO', 'LONGER', 'USED', 'DESPITE', 'ITS', 'EXCELLENT', 'QUALITIES'] +2300-131720-0030-1846: ref=['THE', 'PRINCIPLE', 'EMPLOYED', 'IN', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'THAT', 'WHICH', 'EXEMPLIFIES', 'THE', 'POWER', 'OF', 'ELECTRICITY', 'TO', 'DECOMPOSE', 'A', 'CHEMICAL', 'SUBSTANCE'] +2300-131720-0030-1846: hyp=['THE', 'PRINCIPAL', 'EMPLOYED', 'IN', 'THE', 'EDISON', 'ELECTROLYTIC', 'METER', 'IS', 'THAT', 'WHICH', 'EXEMPLIFIES', 'THE', 'POWER', 'OF', 'ELECTRICITY', 'TO', 'DECOMPOSE', 'A', 'CHEMICAL', 'SUBSTANCE'] +2300-131720-0031-1847: ref=['ASSOCIATED', 'WITH', 'THIS', 'SIMPLE', 'FORM', 'OF', 'APPARATUS', 'WERE', 'VARIOUS', 'INGENIOUS', 'DETAILS', 'AND', 'REFINEMENTS', 'TO', 'SECURE', 'REGULARITY', 'OF', 'OPERATION', 'FREEDOM', 'FROM', 'INACCURACY', 'AND', 'IMMUNITY', 'FROM', 'SUCH', 'TAMPERING', 'AS', 'WOULD', 'PERMIT', 'THEFT', 'OF', 'CURRENT', 'OR', 'DAMAGE'] +2300-131720-0031-1847: hyp=['ASSOCIATED', 'WITH', 'THIS', 'SIMPLE', 'FORM', 'OF', 'APPARATUS', 'WERE', 'VARIOUS', 'INGENIOUS', 'DETAILS', 'AND', 'REFINEMENTS', 'TO', 'SECURE', 'REGULARITY', 'OF', 'OPERATION', 'FREEDOM', 'FROM', 'INACCURACY', 'AND', 'IMMUNITY', 'FROM', 'SUCH', 'TAMPERING', 'AS', 'WOULD', 'PERMIT', 'THEFT', 'OF', 'CURRENT', 'OR', 'DAMAGE'] +2300-131720-0032-1848: ref=['THE', 'STANDARD', 'EDISON', 'METER', 'PRACTICE', 'WAS', 'TO', 'REMOVE', 'THE', 'CELLS', 'ONCE', 'A', 'MONTH', 'TO', 'THE', 'METER', 'ROOM', 'OF', 'THE', 'CENTRAL', 'STATION', 'COMPANY', 'FOR', 'EXAMINATION', 'ANOTHER', 'SET', 'BEING', 'SUBSTITUTED'] +2300-131720-0032-1848: hyp=['THE', 'STANDARD', 'EDISON', 'METER', 'PRACTICE', 'WAS', 'TO', 'REMOVE', 'THE', 'CELLS', 'ONCE', 'A', 'MONTH', 'TO', 'THE', 'METER', 'ROOM', 'OF', 'THE', 'CENTRAL', 'STATION', 'COMPANY', 'FOR', 'EXAMINATION', 'ANOTHER', 'SET', 'BEING', 'SUBSTITUTED'] +2300-131720-0033-1849: ref=['IN', 'DECEMBER', 'EIGHTEEN', 'EIGHTY', 'EIGHT', 'MISTER', 'W', 'J', 'JENKS', 'READ', 'AN', 'INTERESTING', 'PAPER', 'BEFORE', 'THE', 'AMERICAN', 'INSTITUTE', 'OF', 'ELECTRICAL', 'ENGINEERS', 'ON', 'THE', 'SIX', 'YEARS', 'OF', 'PRACTICAL', 'EXPERIENCE', 'HAD', 'UP', 'TO', 'THAT', 'TIME', 'WITH', 'THE', 'METER', 'THEN', 'MORE', 'GENERALLY', 'IN', 'USE', 'THAN', 'ANY', 'OTHER'] +2300-131720-0033-1849: hyp=['IN', 'DECEMBER', 'EIGHTEEN', 'EIGHTY', 'EIGHT', 'MISTER', 'W', 'J', 'JENKS', 'READ', 'AN', 'INTERESTING', 'PAPER', 'BEFORE', 'THE', 'AMERICAN', 'INSTITUTE', 'OF', 'ELECTRICAL', 'ENGINEERS', 'ON', 'THE', 'SIX', 'YEARS', 'OF', 'PRACTICAL', 'EXPERIENCE', 'HAD', 'UP', 'TO', 'THAT', 'TIME', 'WITH', 'THE', 'METRE', 'THEN', 'MORE', 'GENERALLY', 'IN', 'USE', 'THAN', 'ANY', 'OTHER'] +2300-131720-0034-1850: ref=['THE', 'OTHERS', 'HAVING', 'BEEN', 'IN', 'OPERATION', 'TOO', 'SHORT', 'A', 'TIME', 'TO', 'SHOW', 'DEFINITE', 'RESULTS', 'ALTHOUGH', 'THEY', 'ALSO', 'WENT', 'QUICKLY', 'TO', 'A', 'DIVIDEND', 'BASIS'] +2300-131720-0034-1850: hyp=['THE', 'OTHERS', 'HAVING', 'BEEN', 'IN', 'OPERATION', 'TOO', 'SHORT', 'A', 'TIME', 'TO', 'SHOW', 'DEFINITE', 'RESULTS', 'ALTHOUGH', 'THEY', 'ALSO', 'WENT', 'QUICKLY', 'TO', 'A', 'DIVIDEND', 'BASIS'] +2300-131720-0035-1851: ref=['IN', 'THIS', 'CONNECTION', 'IT', 'SHOULD', 'BE', 'MENTIONED', 'THAT', 'THE', 'ASSOCIATION', 'OF', 'EDISON', 'ILLUMINATING', 'COMPANIES', 'IN', 'THE', 'SAME', 'YEAR', 'ADOPTED', 'RESOLUTIONS', 'UNANIMOUSLY', 'TO', 'THE', 'EFFECT', 'THAT', 'THE', 'EDISON', 'METER', 'WAS', 'ACCURATE', 'AND', 'THAT', 'ITS', 'USE', 'WAS', 'NOT', 'EXPENSIVE', 'FOR', 'STATIONS', 'ABOVE', 'ONE', 'THOUSAND', 'LIGHTS', 'AND', 'THAT', 'THE', 'BEST', 'FINANCIAL', 'RESULTS', 'WERE', 'INVARIABLY', 'SECURED', 'IN', 'A', 'STATION', 'SELLING', 'CURRENT', 'BY', 'METER'] +2300-131720-0035-1851: hyp=['IN', 'THIS', 'CONNECTION', 'IT', 'SHOULD', 'BE', 'MENTIONED', 'THAT', 'THE', 'ASSOCIATION', 'OF', 'EDISON', 'ILLUMINATING', 'COMPANIES', 'IN', 'THE', 'SAME', 'YEAR', 'ADOPTED', 'RESOLUTIONS', 'UNANIMOUSLY', 'TO', 'THE', 'EFFECT', 'THAT', 'THE', 'EDISON', 'METER', 'WAS', 'ACCURATE', 'AND', 'THAT', 'ITS', 'USE', 'WAS', 'NOT', 'EXPENSIVE', 'FOR', 'STATIONS', 'ABOVE', 'ONE', 'THOUSAND', 'LIGHTS', 'AND', 'THAT', 'THE', 'BEST', 'FINANCIAL', 'RESULTS', 'WERE', 'INVARIABLY', 'SECURED', 'IN', 'A', 'STATION', 'SELLING', 'CURRENT', 'BY', 'METER'] +2300-131720-0036-1852: ref=['THE', 'METER', 'CONTINUED', 'IN', 'GENERAL', 'SERVICE', 'DURING', 'EIGHTEEN', 'NINETY', 'NINE', 'AND', 'PROBABLY', 'UP', 'TO', 'THE', 'CLOSE', 'OF', 'THE', 'CENTURY'] +2300-131720-0036-1852: hyp=['THE', 'METRE', 'CONTINUED', 'IN', 'GENERAL', 'SERVICE', 'DURING', 'EIGHTEEN', 'NINETY', 'NINE', 'AND', 'PROBABLY', 'UP', 'TO', 'THE', 'CLOSE', 'OF', 'THE', 'CENTURY'] +2300-131720-0037-1853: ref=['HE', 'WEIGHED', 'AND', 'REWEIGHED', 'THE', 'METER', 'PLATES', 'AND', 'PURSUED', 'EVERY', 'LINE', 'OF', 'INVESTIGATION', 'IMAGINABLE', 'BUT', 'ALL', 'IN', 'VAIN'] +2300-131720-0037-1853: hyp=['HE', 'WEIGHED', 'AND', 'REWAIED', 'THE', 'METRE', 'PLATES', 'AND', 'PURSUED', 'EVERY', 'LINE', 'OF', 'INVESTIGATION', 'IMAGINABLE', 'BUT', 'ALL', 'IN', 'VAIN'] +2300-131720-0038-1854: ref=['HE', 'FELT', 'HE', 'WAS', 'UP', 'AGAINST', 'IT', 'AND', 'THAT', 'PERHAPS', 'ANOTHER', 'KIND', 'OF', 'A', 'JOB', 'WOULD', 'SUIT', 'HIM', 'BETTER'] +2300-131720-0038-1854: hyp=['HE', 'FELT', 'HE', 'WAS', 'UP', 'AGAINST', 'IT', 'AND', 'THAT', 'PERHAPS', 'ANOTHER', 'KIND', 'OF', 'A', 'JOB', 'WOULD', 'SUIT', 'HIM', 'BETTER'] +2300-131720-0039-1855: ref=['THE', 'PROBLEM', 'WAS', 'SOLVED'] +2300-131720-0039-1855: hyp=['THE', 'PROBLEM', 'WAS', 'SOLVED'] +2300-131720-0040-1856: ref=['WE', 'WERE', 'MORE', 'INTERESTED', 'IN', 'THE', 'TECHNICAL', 'CONDITION', 'OF', 'THE', 'STATION', 'THAN', 'IN', 'THE', 'COMMERCIAL', 'PART'] +2300-131720-0040-1856: hyp=['WE', 'WERE', 'MORE', 'INTERESTED', 'IN', 'THE', 'TECHNICAL', 'CONDITION', 'OF', 'THE', 'STATION', 'THAN', 'IN', 'THE', 'COMMERCIAL', 'PART'] +2300-131720-0041-1857: ref=['WE', 'HAD', 'METERS', 'IN', 'WHICH', 'THERE', 'WERE', 'TWO', 'BOTTLES', 'OF', 'LIQUID'] +2300-131720-0041-1857: hyp=['WE', 'HAD', 'METRES', 'IN', 'WHICH', 'THERE', 'WERE', 'TWO', 'BOTTLES', 'OF', 'LIQUID'] +237-126133-0000-2407: ref=['HERE', 'SHE', 'WOULD', 'STAY', 'COMFORTED', 'AND', 'SOOTHED', 'AMONG', 'THE', 'LOVELY', 'PLANTS', 'AND', 'RICH', 'EXOTICS', 'REJOICING', 'THE', 'HEART', 'OF', 'OLD', 'TURNER', 'THE', 'GARDENER', 'WHO', 'SINCE', "POLLY'S", 'FIRST', 'RAPTUROUS', 'ENTRANCE', 'HAD', 'TAKEN', 'HER', 'INTO', 'HIS', 'GOOD', 'GRACES', 'FOR', 'ALL', 'TIME'] +237-126133-0000-2407: hyp=['HERE', 'SHE', 'WOULD', 'STAY', 'COMFORTED', 'AND', 'SOOTHE', 'AMONG', 'THE', 'LOVELY', 'PLANTS', 'AND', 'RICH', 'EXOTICS', 'REJOICING', 'THE', 'HEART', 'OF', 'OLD', 'TURNER', 'THE', 'GARDENER', 'WHO', 'SINCE', "POLLY'S", 'FIRST', 'RAPTUROUS', 'ENTRANCE', 'HAD', 'TAKEN', 'HER', 'INTO', 'HIS', 'GOOD', 'GRACES', 'FOR', 'ALL', 'TIME'] +237-126133-0001-2408: ref=['EVERY', 'CHANCE', 'SHE', 'COULD', 'STEAL', 'AFTER', 'PRACTICE', 'HOURS', 'WERE', 'OVER', 'AND', 'AFTER', 'THE', 'CLAMOROUS', 'DEMANDS', 'OF', 'THE', 'BOYS', 'UPON', 'HER', 'TIME', 'WERE', 'FULLY', 'SATISFIED', 'WAS', 'SEIZED', 'TO', 'FLY', 'ON', 'THE', 'WINGS', 'OF', 'THE', 'WIND', 'TO', 'THE', 'FLOWERS'] +237-126133-0001-2408: hyp=['EVERY', 'CHANCE', 'SHE', 'COULD', 'STEAL', 'AFTER', 'PRACTICE', 'HOURS', 'WERE', 'OVER', 'AND', 'AFTER', 'THE', 'CLAMOROUS', 'DEMANDS', 'OF', 'THE', 'BOYS', 'UPON', 'HER', 'TIME', 'WERE', 'FULLY', 'SATISFIED', 'WAS', 'SEIZED', 'TO', 'FLY', 'ON', 'THE', 'WINGS', 'OF', 'THE', 'WIND', 'TO', 'THE', 'FLOWERS'] +237-126133-0002-2409: ref=['THEN', 'DEAR', 'SAID', 'MISSUS', 'WHITNEY', 'YOU', 'MUST', 'BE', 'KINDER', 'TO', 'HER', 'THAN', 'EVER', 'THINK', 'WHAT', 'IT', 'WOULD', 'BE', 'FOR', 'ONE', 'OF', 'YOU', 'TO', 'BE', 'AWAY', 'FROM', 'HOME', 'EVEN', 'AMONG', 'FRIENDS'] +237-126133-0002-2409: hyp=['THEN', 'DEAR', 'SAID', 'MISSUS', 'WHITNEY', 'YOU', 'MUST', 'BE', 'KINDER', 'TO', 'HER', 'THAN', 'EVER', 'THINK', 'WHAT', 'IT', 'WOULD', 'BE', 'FOR', 'ONE', 'OF', 'YOU', 'TO', 'BE', 'AWAY', 'FROM', 'HOME', 'EVEN', 'AMONG', 'FRIENDS'] +237-126133-0003-2410: ref=['SOMEHOW', 'OF', 'ALL', 'THE', 'DAYS', 'WHEN', 'THE', 'HOME', 'FEELING', 'WAS', 'THE', 'STRONGEST', 'THIS', 'DAY', 'IT', 'SEEMED', 'AS', 'IF', 'SHE', 'COULD', 'BEAR', 'IT', 'NO', 'LONGER'] +237-126133-0003-2410: hyp=['SOMEHOW', 'OF', 'ALL', 'THE', 'DAYS', 'WHEN', 'THE', 'HOME', 'FEELING', 'WAS', 'THE', 'STRONGEST', 'THIS', 'DAY', 'IT', 'SEEMED', 'AS', 'IF', 'SHE', 'COULD', 'BEAR', 'IT', 'NO', 'LONGER'] +237-126133-0004-2411: ref=['IF', 'SHE', 'COULD', 'ONLY', 'SEE', 'PHRONSIE', 'FOR', 'JUST', 'ONE', 'MOMENT'] +237-126133-0004-2411: hyp=['IF', 'SHE', 'COULD', 'ONLY', 'SEE', 'PHRONSIE', 'FOR', 'JUST', 'ONE', 'MOMENT'] +237-126133-0005-2412: ref=['OH', "SHE'S", 'ALWAYS', 'AT', 'THE', 'PIANO', 'SAID', 'VAN', 'SHE', 'MUST', 'BE', 'THERE', 'NOW', 'SOMEWHERE', 'AND', 'THEN', 'SOMEBODY', 'LAUGHED'] +237-126133-0005-2412: hyp=['OH', "SHE'S", 'ALWAYS', 'AT', 'THE', 'PIANO', 'SAID', 'VAN', 'SHE', 'MUST', 'BE', 'THERE', 'NOW', 'SOMEWHERE', 'AND', 'THEN', 'SOMEBODY', 'LAUGHED'] +237-126133-0006-2413: ref=['AT', 'THIS', 'THE', 'BUNDLE', 'OPENED', 'SUDDENLY', 'AND', 'OUT', 'POPPED', 'PHRONSIE'] +237-126133-0006-2413: hyp=['AT', 'THIS', 'THE', 'BUNDLE', 'OPENED', 'SUDDENLY', 'AND', 'OUT', 'POPPED', 'PHRONSIE'] +237-126133-0007-2414: ref=['BUT', 'POLLY', "COULDN'T", 'SPEAK', 'AND', 'IF', 'JASPER', "HADN'T", 'CAUGHT', 'HER', 'JUST', 'IN', 'TIME', 'SHE', 'WOULD', 'HAVE', 'TUMBLED', 'OVER', 'BACKWARD', 'FROM', 'THE', 'STOOL', 'PHRONSIE', 'AND', 'ALL'] +237-126133-0007-2414: hyp=['BUT', 'POLLY', "COULDN'T", 'SPEAK', 'AND', 'IF', 'JASPER', "HADN'T", 'CAUGHT', 'HER', 'JUST', 'IN', 'TIME', 'SHE', 'WOULD', 'HAVE', 'TUMBLED', 'OVER', 'BACKWARD', 'FROM', 'THE', 'STOOL', 'PHRONSIE', 'AND', 'ALL'] +237-126133-0008-2415: ref=['ASKED', 'PHRONSIE', 'WITH', 'HER', 'LITTLE', 'FACE', 'CLOSE', 'TO', "POLLY'S", 'OWN'] +237-126133-0008-2415: hyp=['ASKED', 'PHRONSIE', 'WITH', 'HER', 'LITTLE', 'FACE', 'CLOSE', 'TO', "POLLY'S", 'OWN'] +237-126133-0009-2416: ref=['NOW', "YOU'LL", 'STAY', 'CRIED', 'VAN', 'SAY', 'POLLY', "WON'T", 'YOU'] +237-126133-0009-2416: hyp=['NOW', "YOU'LL", 'STAY', 'CRIED', 'VAN', 'SAY', 'POLLY', "WON'T", 'YOU'] +237-126133-0010-2417: ref=['OH', 'YOU', 'ARE', 'THE', 'DEAREST', 'AND', 'BEST', 'MISTER', 'KING', 'I', 'EVER', 'SAW', 'BUT', 'HOW', 'DID', 'YOU', 'MAKE', 'MAMMY', 'LET', 'HER', 'COME'] +237-126133-0010-2417: hyp=['OH', 'YOU', 'ARE', 'THE', 'DEAREST', 'AND', 'BEST', 'MISTER', 'KING', 'I', 'EVER', 'SAW', 'BUT', 'HOW', 'DID', 'YOU', 'MAKE', 'MAMMY', 'LET', 'HER', 'COME'] +237-126133-0011-2418: ref=["ISN'T", 'HE', 'SPLENDID', 'CRIED', 'JASPER', 'IN', 'INTENSE', 'PRIDE', 'SWELLING', 'UP', 'FATHER', 'KNEW', 'HOW', 'TO', 'DO', 'IT'] +237-126133-0011-2418: hyp=["ISN'T", 'HE', 'SPLENDID', 'CRIED', 'JASPER', 'AN', 'INTENSE', 'PRIDE', 'SWELLING', 'UP', 'FATHER', 'KNEW', 'HOW', 'TO', 'DO', 'IT'] +237-126133-0012-2419: ref=['THERE', 'THERE', 'HE', 'SAID', 'SOOTHINGLY', 'PATTING', 'HER', 'BROWN', 'FUZZY', 'HEAD'] +237-126133-0012-2419: hyp=['THERE', 'THERE', 'HE', 'SAT', 'SOOTHINGLY', 'PATTING', 'HER', 'BROWN', 'FUZZY', 'HEAD'] +237-126133-0013-2420: ref=['I', 'KNOW', 'GASPED', 'POLLY', 'CONTROLLING', 'HER', 'SOBS', 'I', "WON'T", 'ONLY', 'I', "CAN'T", 'THANK', 'YOU'] +237-126133-0013-2420: hyp=['I', 'KNOW', 'GASPED', 'POLLY', 'CONTROLLING', 'HER', 'SOBS', 'I', "WON'T", 'ONLY', 'I', "CAN'T", 'THANK', 'YOU'] +237-126133-0014-2421: ref=['ASKED', 'PHRONSIE', 'IN', 'INTENSE', 'INTEREST', 'SLIPPING', 'DOWN', 'OUT', 'OF', "POLLY'S", 'ARMS', 'AND', 'CROWDING', 'UP', 'CLOSE', 'TO', "JASPER'S", 'SIDE'] +237-126133-0014-2421: hyp=['ASKED', 'PHRONSIE', 'IN', 'INTENSE', 'INTEREST', 'SLIPPING', 'DOWN', 'OUT', 'OF', "POLLY'S", 'ARMS', 'AND', 'CROWDING', 'UP', 'CLOSE', 'TO', "JASPER'S", 'SIDE'] +237-126133-0015-2422: ref=['YES', 'ALL', 'ALONE', 'BY', 'HIMSELF', 'ASSERTED', 'JASPER', 'VEHEMENTLY', 'AND', 'WINKING', 'FURIOUSLY', 'TO', 'THE', 'OTHERS', 'TO', 'STOP', 'THEIR', 'LAUGHING', 'HE', 'DID', 'NOW', 'TRULY', 'PHRONSIE'] +237-126133-0015-2422: hyp=['YES', 'ALL', 'ALONE', 'BY', 'HIMSELF', 'ASSERTED', 'JASPER', 'VEHEMENTLY', 'AND', 'WINKING', 'FURIOUSLY', 'TO', 'THE', 'OTHERS', 'TO', 'STOP', 'THEIR', 'LAUGHING', 'HE', 'DID', 'NOW', 'TRULY', 'PHRONSIE'] +237-126133-0016-2423: ref=['OH', 'NO', 'JASPER', 'I', 'MUST', 'GO', 'BY', 'MY', 'VERY', 'OWN', 'SELF'] +237-126133-0016-2423: hyp=['OH', 'NO', 'JAPSER', 'I', 'MUST', 'GO', 'BY', 'MY', 'VERY', 'OWN', 'SELF'] +237-126133-0017-2424: ref=['THERE', 'JAP', "YOU'VE", 'CAUGHT', 'IT', 'LAUGHED', 'PERCY', 'WHILE', 'THE', 'OTHERS', 'SCREAMED', 'AT', 'THE', 'SIGHT', 'OF', "JASPER'S", 'FACE'] +237-126133-0017-2424: hyp=['THERE', 'JAP', "YOU'VE", 'CAUGHT', 'IT', 'LAUGHED', 'PERCY', 'WHILE', 'THE', 'OTHERS', 'SCREAMED', 'AT', 'THE', 'SIGHT', 'OF', "JASPER'S", 'FACE'] +237-126133-0018-2425: ref=["DON'T", 'MIND', 'IT', 'POLLY', 'WHISPERED', 'JASPER', "TWASN'T", 'HER', 'FAULT'] +237-126133-0018-2425: hyp=["DON'T", 'MIND', 'IT', 'POLLY', 'WHISPERED', 'JASPER', "TWASN'T", 'HER', 'FAULT'] +237-126133-0019-2426: ref=['DEAR', 'ME', 'EJACULATED', 'THE', 'OLD', 'GENTLEMAN', 'IN', 'THE', 'UTMOST', 'AMAZEMENT', 'AND', 'SUCH', 'A', 'TIME', 'AS', "I'VE", 'HAD', 'TO', 'GET', 'HER', 'HERE', 'TOO'] +237-126133-0019-2426: hyp=['DEAR', 'ME', 'EJACULATED', 'THE', 'OLD', 'GENTLEMAN', 'IN', 'THE', 'UTMOST', 'AMAZEMENT', 'AND', 'SUCH', 'A', 'TIME', 'AS', "I'VE", 'HAD', 'TO', 'GET', 'HER', 'HERE', 'TOO'] +237-126133-0020-2427: ref=['HOW', 'DID', 'HER', 'MOTHER', 'EVER', 'LET', 'HER', 'GO'] +237-126133-0020-2427: hyp=['HOW', 'DID', 'HER', 'MOTHER', 'EVER', 'LET', 'HER', 'GO'] +237-126133-0021-2428: ref=['SHE', 'ASKED', 'IMPULSIVELY', 'I', "DIDN'T", 'BELIEVE', 'YOU', 'COULD', 'PERSUADE', 'HER', 'FATHER'] +237-126133-0021-2428: hyp=['SHE', 'ASKED', 'IMPULSIVELY', 'I', "DIDN'T", 'BELIEVE', 'YOU', 'COULD', 'PERSUADE', 'HER', 'FATHER'] +237-126133-0022-2429: ref=['I', "DIDN'T", 'HAVE', 'ANY', 'FEARS', 'IF', 'I', 'WORKED', 'IT', 'RIGHTLY', 'SAID', 'THE', 'OLD', 'GENTLEMAN', 'COMPLACENTLY'] +237-126133-0022-2429: hyp=['I', "DIDN'T", 'HAVE', 'ANY', 'FEARS', 'IF', 'I', 'WORKED', 'IT', 'RIGHTLY', 'SAID', 'THE', 'OLD', 'GENTLEMAN', 'COMPLACENTLY'] +237-126133-0023-2430: ref=['HE', 'CRIED', 'IN', 'HIGH', 'DUDGEON', 'JUST', 'AS', 'IF', 'HE', 'OWNED', 'THE', 'WHOLE', 'OF', 'THE', 'PEPPERS', 'AND', 'COULD', 'DISPOSE', 'OF', 'THEM', 'ALL', 'TO', 'SUIT', 'HIS', 'FANCY'] +237-126133-0023-2430: hyp=['HE', 'CRIED', 'IN', 'HIGH', 'DUDGEON', 'JUST', 'AS', 'IF', 'HE', 'OWNED', 'THE', 'WHOLE', 'OF', 'THE', 'PEPPERS', 'AND', 'COULD', 'DISPOSE', 'OF', 'THEM', 'ALL', 'TO', 'SUIT', 'HIS', 'FANCY'] +237-126133-0024-2431: ref=['AND', 'THE', 'OLD', 'GENTLEMAN', 'WAS', 'SO', 'DELIGHTED', 'WITH', 'HIS', 'SUCCESS', 'THAT', 'HE', 'HAD', 'TO', 'BURST', 'OUT', 'INTO', 'A', 'SERIES', 'OF', 'SHORT', 'HAPPY', 'BITS', 'OF', 'LAUGHTER', 'THAT', 'OCCUPIED', 'QUITE', 'A', 'SPACE', 'OF', 'TIME'] +237-126133-0024-2431: hyp=['AND', 'THE', 'OLD', 'GENTLEMAN', 'WAS', 'SO', 'DELIGHTED', 'WITH', 'HIS', 'SUCCESS', 'THAT', 'HE', 'HAD', 'TO', 'BURST', 'OUT', 'INTO', 'A', 'SERIES', 'OF', 'SHORT', 'HAPPY', 'BITS', 'OF', 'LAUGHTER', 'THAT', 'OCCUPIED', 'QUITE', 'A', 'SPACE', 'OF', 'TIME'] +237-126133-0025-2432: ref=['AT', 'LAST', 'HE', 'CAME', 'OUT', 'OF', 'THEM', 'AND', 'WIPED', 'HIS', 'FACE', 'VIGOROUSLY'] +237-126133-0025-2432: hyp=['AT', 'LAST', 'HE', 'CAME', 'OUT', 'OF', 'THEM', 'AND', 'WIPED', 'HIS', 'FACE', 'VIGOROUSLY'] +237-134493-0000-2388: ref=['IT', 'IS', 'SIXTEEN', 'YEARS', 'SINCE', 'JOHN', 'BERGSON', 'DIED'] +237-134493-0000-2388: hyp=['IT', 'IS', 'SIXTEEN', 'YEARS', 'SINCE', 'JOHN', 'BERKES', 'AND', 'DIED'] +237-134493-0001-2389: ref=['HIS', 'WIFE', 'NOW', 'LIES', 'BESIDE', 'HIM', 'AND', 'THE', 'WHITE', 'SHAFT', 'THAT', 'MARKS', 'THEIR', 'GRAVES', 'GLEAMS', 'ACROSS', 'THE', 'WHEAT', 'FIELDS'] +237-134493-0001-2389: hyp=['HIS', 'WIFE', 'NOW', 'LIES', 'BESIDE', 'HIM', 'AND', 'THE', 'WHITE', 'SHAFT', 'THAT', 'MARKS', 'THEIR', 'GRAVES', 'GLEAMS', 'ACROSS', 'THE', 'WHEAT', 'FIELDS'] +237-134493-0002-2390: ref=['FROM', 'THE', 'NORWEGIAN', 'GRAVEYARD', 'ONE', 'LOOKS', 'OUT', 'OVER', 'A', 'VAST', 'CHECKER', 'BOARD', 'MARKED', 'OFF', 'IN', 'SQUARES', 'OF', 'WHEAT', 'AND', 'CORN', 'LIGHT', 'AND', 'DARK', 'DARK', 'AND', 'LIGHT'] +237-134493-0002-2390: hyp=['FROM', 'THE', 'NORWEGIAN', 'GRAVEYARD', 'ONE', 'LOOKS', 'OUT', 'OVER', 'A', 'VAST', 'CHEQUER', 'BOARD', 'MARKED', 'OFF', 'IN', 'SQUARES', 'OF', 'WHEAT', 'AND', 'CORN', 'LIGHT', 'AND', 'DARK', 'AND', 'LIGHT'] +237-134493-0003-2391: ref=['FROM', 'THE', 'GRAVEYARD', 'GATE', 'ONE', 'CAN', 'COUNT', 'A', 'DOZEN', 'GAYLY', 'PAINTED', 'FARMHOUSES', 'THE', 'GILDED', 'WEATHER', 'VANES', 'ON', 'THE', 'BIG', 'RED', 'BARNS', 'WINK', 'AT', 'EACH', 'OTHER', 'ACROSS', 'THE', 'GREEN', 'AND', 'BROWN', 'AND', 'YELLOW', 'FIELDS'] +237-134493-0003-2391: hyp=['FROM', 'THE', 'GRAVEYARD', 'GATE', 'ONE', 'CAN', 'COUNT', 'A', 'DOZEN', 'GAILY', 'PAINTED', 'FARMHOUSES', 'THE', 'GILDED', 'WEATHER', 'VEINS', 'ON', 'THE', 'BIG', 'RED', 'BARNS', 'WINK', 'AT', 'EACH', 'OTHER', 'ACROSS', 'THE', 'GREEN', 'AND', 'BROWN', 'AND', 'YELLOW', 'FIELDS'] +237-134493-0004-2392: ref=['THE', 'AIR', 'AND', 'THE', 'EARTH', 'ARE', 'CURIOUSLY', 'MATED', 'AND', 'INTERMINGLED', 'AS', 'IF', 'THE', 'ONE', 'WERE', 'THE', 'BREATH', 'OF', 'THE', 'OTHER'] +237-134493-0004-2392: hyp=['THE', 'AIR', 'AND', 'THE', 'EARTH', 'ARE', 'CURIOUSLY', 'MATED', 'AND', 'INTERMINGLED', 'AS', 'IF', 'THE', 'ONE', 'WERE', 'THE', 'BREATH', 'OF', 'THE', 'OTHER'] +237-134493-0005-2393: ref=['HE', 'WAS', 'A', 'SPLENDID', 'FIGURE', 'OF', 'A', 'BOY', 'TALL', 'AND', 'STRAIGHT', 'AS', 'A', 'YOUNG', 'PINE', 'TREE', 'WITH', 'A', 'HANDSOME', 'HEAD', 'AND', 'STORMY', 'GRAY', 'EYES', 'DEEPLY', 'SET', 'UNDER', 'A', 'SERIOUS', 'BROW'] +237-134493-0005-2393: hyp=['HE', 'WAS', 'A', 'SPLENDID', 'FIGURE', 'OF', 'A', 'BOY', 'TALL', 'AND', 'STRAIGHT', 'AS', 'A', 'YOUNG', 'PINE', 'TREE', 'WITH', 'A', 'HANDSOME', 'HEAD', 'AND', 'STORMY', 'GRAY', 'EYES', 'DEEPLY', 'SET', 'UNDER', 'A', 'SERIOUS', 'BROW'] +237-134493-0006-2394: ref=["THAT'S", 'NOT', 'MUCH', 'OF', 'A', 'JOB', 'FOR', 'AN', 'ATHLETE', 'HERE', "I'VE", 'BEEN', 'TO', 'TOWN', 'AND', 'BACK'] +237-134493-0006-2394: hyp=["THAT'S", 'NOT', 'MUCH', 'OF', 'A', 'JOB', 'FOR', 'AN', 'ATHLETE', 'HERE', "I'VE", 'BEEN', 'TO', 'TOWN', 'AND', 'BACK'] +237-134493-0007-2395: ref=['ALEXANDRA', 'LETS', 'YOU', 'SLEEP', 'LATE'] +237-134493-0007-2395: hyp=['ALEXANDRA', "THAT'S", 'YOU', 'SLEEP', 'LATE'] +237-134493-0008-2396: ref=['SHE', 'GATHERED', 'UP', 'HER', 'REINS'] +237-134493-0008-2396: hyp=['SHE', 'GATHERED', 'UP', 'HER', 'REINS'] +237-134493-0009-2397: ref=['PLEASE', 'WAIT', 'FOR', 'ME', 'MARIE', 'EMIL', 'COAXED'] +237-134493-0009-2397: hyp=['PLEASE', 'WAIT', 'FOR', 'ME', 'MARIE', 'AMYL', 'COAXED'] +237-134493-0010-2398: ref=['I', 'NEVER', 'SEE', "LOU'S", 'SCYTHE', 'OVER', 'HERE'] +237-134493-0010-2398: hyp=['I', 'NEVER', 'SEE', 'LOOSE', 'SCYTHE', 'OVER', 'HERE'] +237-134493-0011-2399: ref=['HOW', 'BROWN', "YOU'VE", 'GOT', 'SINCE', 'YOU', 'CAME', 'HOME', 'I', 'WISH', 'I', 'HAD', 'AN', 'ATHLETE', 'TO', 'MOW', 'MY', 'ORCHARD'] +237-134493-0011-2399: hyp=['HOW', 'BROWN', "YOU'VE", 'GOT', 'SINCE', 'YOU', 'CAME', 'HOME', 'I', 'WISH', 'I', 'HAD', 'AN', 'ADETE', 'TO', 'MOW', 'MY', 'ORCHARD'] +237-134493-0012-2400: ref=['I', 'GET', 'WET', 'TO', 'MY', 'KNEES', 'WHEN', 'I', 'GO', 'DOWN', 'TO', 'PICK', 'CHERRIES'] +237-134493-0012-2400: hyp=['I', 'GET', 'WET', 'TO', 'MY', 'KNEES', 'WHEN', 'I', 'GO', 'DOWN', 'TO', 'PIC', 'CHERRIES'] +237-134493-0013-2401: ref=['INDEED', 'HE', 'HAD', 'LOOKED', 'AWAY', 'WITH', 'THE', 'PURPOSE', 'OF', 'NOT', 'SEEING', 'IT'] +237-134493-0013-2401: hyp=['INDEED', 'HE', 'HAD', 'LOOKED', 'AWAY', 'WITH', 'THE', 'PURPOSE', 'OF', 'NOT', 'SEEING', 'IT'] +237-134493-0014-2402: ref=['THEY', 'THINK', "YOU'RE", 'PROUD', 'BECAUSE', "YOU'VE", 'BEEN', 'AWAY', 'TO', 'SCHOOL', 'OR', 'SOMETHING'] +237-134493-0014-2402: hyp=['THEY', 'THINK', 'YOU', 'ARE', 'PROUD', 'BECAUSE', "YOU'VE", 'BEEN', 'AWAY', 'TO', 'SCHOOL', 'OR', 'SOMETHING'] +237-134493-0015-2403: ref=['THERE', 'WAS', 'SOMETHING', 'INDIVIDUAL', 'ABOUT', 'THE', 'GREAT', 'FARM', 'A', 'MOST', 'UNUSUAL', 'TRIMNESS', 'AND', 'CARE', 'FOR', 'DETAIL'] +237-134493-0015-2403: hyp=['THERE', 'WAS', 'SOMETHING', 'INDIVIDUAL', 'ABOUT', 'THE', 'GREAT', 'FARM', 'A', 'MOST', 'UNUSUAL', 'TRIMNESS', 'AND', 'CARE', 'FOR', 'DETAIL'] +237-134493-0016-2404: ref=['ON', 'EITHER', 'SIDE', 'OF', 'THE', 'ROAD', 'FOR', 'A', 'MILE', 'BEFORE', 'YOU', 'REACHED', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'STOOD', 'TALL', 'OSAGE', 'ORANGE', 'HEDGES', 'THEIR', 'GLOSSY', 'GREEN', 'MARKING', 'OFF', 'THE', 'YELLOW', 'FIELDS'] +237-134493-0016-2404: hyp=['ON', 'EITHER', 'SIDE', 'OF', 'THE', 'ROAD', 'FOR', 'A', 'MILE', 'BEFORE', 'YOU', 'REACHED', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'STOOD', 'TALL', 'O', 'SAGE', 'ORANGE', 'HEDGES', 'THEIR', 'GLOSSY', 'GREEN', 'MARKING', 'OFF', 'THE', 'YELLOW', 'FIELDS'] +237-134493-0017-2405: ref=['ANY', 'ONE', 'THEREABOUTS', 'WOULD', 'HAVE', 'TOLD', 'YOU', 'THAT', 'THIS', 'WAS', 'ONE', 'OF', 'THE', 'RICHEST', 'FARMS', 'ON', 'THE', 'DIVIDE', 'AND', 'THAT', 'THE', 'FARMER', 'WAS', 'A', 'WOMAN', 'ALEXANDRA', 'BERGSON'] +237-134493-0017-2405: hyp=['ANY', 'ONE', 'THEREABOUTS', 'WOULD', 'HAVE', 'TOLD', 'YOU', 'THAT', 'THIS', 'WAS', 'ONE', 'OF', 'THE', 'RICHEST', 'FARMS', 'ON', 'THE', 'DIVIDE', 'AND', 'THAT', 'THE', 'FARMER', 'WAS', 'A', 'WOMAN', 'ALEXANDRA', 'BERGIN'] +237-134493-0018-2406: ref=['THERE', 'IS', 'EVEN', 'A', 'WHITE', 'ROW', 'OF', 'BEEHIVES', 'IN', 'THE', 'ORCHARD', 'UNDER', 'THE', 'WALNUT', 'TREES'] +237-134493-0018-2406: hyp=['THERE', 'IS', 'EVEN', 'A', 'WHITE', 'ROW', 'OF', 'BEEHIVES', 'IN', 'THE', 'ORCHARD', 'UNDER', 'THE', 'WALNUT', 'TREES'] +237-134500-0000-2345: ref=['FRANK', 'READ', 'ENGLISH', 'SLOWLY', 'AND', 'THE', 'MORE', 'HE', 'READ', 'ABOUT', 'THIS', 'DIVORCE', 'CASE', 'THE', 'ANGRIER', 'HE', 'GREW'] +237-134500-0000-2345: hyp=['FRANK', 'READ', 'ENGLISH', 'SLOWLY', 'AND', 'THE', 'MORE', 'HE', 'READ', 'ABOUT', 'THIS', 'DIVORCE', 'CASE', 'THE', 'ANGRIER', 'HE', 'GREW'] +237-134500-0001-2346: ref=['MARIE', 'SIGHED'] +237-134500-0001-2346: hyp=['MARIE', 'SIGHED'] +237-134500-0002-2347: ref=['A', 'BRISK', 'WIND', 'HAD', 'COME', 'UP', 'AND', 'WAS', 'DRIVING', 'PUFFY', 'WHITE', 'CLOUDS', 'ACROSS', 'THE', 'SKY'] +237-134500-0002-2347: hyp=['A', 'BRACE', 'WIND', 'HAD', 'COME', 'UP', 'AND', 'WAS', 'DRIVING', 'PUFFY', 'WHITE', 'CLOUDS', 'ACROSS', 'THE', 'SKY'] +237-134500-0003-2348: ref=['THE', 'ORCHARD', 'WAS', 'SPARKLING', 'AND', 'RIPPLING', 'IN', 'THE', 'SUN'] +237-134500-0003-2348: hyp=['THE', 'ARCHER', 'WAS', 'SPARKLING', 'AND', 'RIPPLING', 'IN', 'THE', 'SUN'] +237-134500-0004-2349: ref=['THAT', 'INVITATION', 'DECIDED', 'HER'] +237-134500-0004-2349: hyp=['THAT', 'INVITATION', 'DECIDED', 'HER'] +237-134500-0005-2350: ref=['OH', 'BUT', "I'M", 'GLAD', 'TO', 'GET', 'THIS', 'PLACE', 'MOWED'] +237-134500-0005-2350: hyp=['OH', 'BUT', 'I', 'AM', 'GLAD', 'TO', 'GET', 'THIS', 'PLACE', 'MOWED'] +237-134500-0006-2351: ref=['JUST', 'SMELL', 'THE', 'WILD', 'ROSES', 'THEY', 'ARE', 'ALWAYS', 'SO', 'SPICY', 'AFTER', 'A', 'RAIN'] +237-134500-0006-2351: hyp=['JUST', 'SMELL', 'THE', 'WILD', 'ROSES', 'THEY', 'ARE', 'ALWAYS', 'SO', 'SPICY', 'AFTER', 'A', 'RAIN'] +237-134500-0007-2352: ref=['WE', 'NEVER', 'HAD', 'SO', 'MANY', 'OF', 'THEM', 'IN', 'HERE', 'BEFORE'] +237-134500-0007-2352: hyp=['WE', 'NEVER', 'HAD', 'SO', 'MANY', 'OF', 'THEM', 'IN', 'HERE', 'BEFORE'] +237-134500-0008-2353: ref=['I', 'SUPPOSE', "IT'S", 'THE', 'WET', 'SEASON', 'WILL', 'YOU', 'HAVE', 'TO', 'CUT', 'THEM', 'TOO'] +237-134500-0008-2353: hyp=['I', 'SUPPOSE', "IT'S", 'THE', 'WET', 'SEASON', 'WILL', 'YOU', 'HAVE', 'TO', 'CUT', 'THEM', 'TOO'] +237-134500-0009-2354: ref=['I', 'SUPPOSE', "THAT'S", 'THE', 'WET', 'SEASON', 'TOO', 'THEN'] +237-134500-0009-2354: hyp=['I', 'SUPPOSE', "THAT'S", 'THE', 'WET', 'SEASON', 'TOO', 'THEN'] +237-134500-0010-2355: ref=["IT'S", 'EXCITING', 'TO', 'SEE', 'EVERYTHING', 'GROWING', 'SO', 'FAST', 'AND', 'TO', 'GET', 'THE', 'GRASS', 'CUT'] +237-134500-0010-2355: hyp=["IT'S", 'EXCITING', 'TO', 'SEE', 'EVERYTHING', 'GROWING', 'SO', 'FAST', 'AND', 'TO', 'GET', 'THE', 'GRASS', 'CUT'] +237-134500-0011-2356: ref=["AREN'T", 'YOU', 'SPLASHED', 'LOOK', 'AT', 'THE', 'SPIDER', 'WEBS', 'ALL', 'OVER', 'THE', 'GRASS'] +237-134500-0011-2356: hyp=["AREN'T", 'YOU', 'SPLASHED', 'LOOK', 'AT', 'THE', 'SPIDER', 'WEBS', 'ALL', 'OVER', 'THE', 'GRASS'] +237-134500-0012-2357: ref=['IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HEARD', 'THE', 'CHERRIES', 'DROPPING', 'SMARTLY', 'INTO', 'THE', 'PAIL', 'AND', 'HE', 'BEGAN', 'TO', 'SWING', 'HIS', 'SCYTHE', 'WITH', 'THAT', 'LONG', 'EVEN', 'STROKE', 'THAT', 'FEW', 'AMERICAN', 'BOYS', 'EVER', 'LEARN'] +237-134500-0012-2357: hyp=['IN', 'A', 'FEW', 'MOMENTS', 'HE', 'HEARD', 'THE', 'CHERRIES', 'DROPPING', 'SMARTLY', 'INTO', 'THE', 'PAIL', 'AND', 'HE', 'BEGAN', 'TO', 'SWING', 'HIS', 'SCYTHE', 'WITH', 'THAT', 'LONG', 'EVEN', 'STROKE', 'THAT', 'FEW', 'AMERICAN', 'BOYS', 'EVER', 'LEARN'] +237-134500-0013-2358: ref=['MARIE', 'PICKED', 'CHERRIES', 'AND', 'SANG', 'SOFTLY', 'TO', 'HERSELF', 'STRIPPING', 'ONE', 'GLITTERING', 'BRANCH', 'AFTER', 'ANOTHER', 'SHIVERING', 'WHEN', 'SHE', 'CAUGHT', 'A', 'SHOWER', 'OF', 'RAINDROPS', 'ON', 'HER', 'NECK', 'AND', 'HAIR'] +237-134500-0013-2358: hyp=['MARIE', 'PICKED', 'CHERRIES', 'AND', 'SANG', 'SOFTLY', 'TO', 'HERSELF', 'STRIPPING', 'ONE', 'GLITTERING', 'RANCH', 'AFTER', 'ANOTHER', 'SHIVERING', 'WHEN', 'SHE', 'THOUGHT', 'A', 'SHOWER', 'OF', 'RAINDROPS', 'ON', 'HER', 'NECK', 'AND', 'HAIR'] +237-134500-0014-2359: ref=['AND', 'EMIL', 'MOWED', 'HIS', 'WAY', 'SLOWLY', 'DOWN', 'TOWARD', 'THE', 'CHERRY', 'TREES'] +237-134500-0014-2359: hyp=['AND', 'AMYL', 'MOWED', 'HIS', 'WAY', 'SLOWLY', 'DOWN', 'TOWARD', 'THE', 'CHERRY', 'TREES'] +237-134500-0015-2360: ref=['THAT', 'SUMMER', 'THE', 'RAINS', 'HAD', 'BEEN', 'SO', 'MANY', 'AND', 'OPPORTUNE', 'THAT', 'IT', 'WAS', 'ALMOST', 'MORE', 'THAN', 'SHABATA', 'AND', 'HIS', 'MAN', 'COULD', 'DO', 'TO', 'KEEP', 'UP', 'WITH', 'THE', 'CORN', 'THE', 'ORCHARD', 'WAS', 'A', 'NEGLECTED', 'WILDERNESS'] +237-134500-0015-2360: hyp=['THAT', 'SUMMER', 'THE', 'RAINS', 'HAD', 'BEEN', 'SO', 'MANY', 'AND', 'OPPORTUNE', 'THAT', 'IT', 'WAS', 'ALMOST', 'MORE', 'THAN', 'SHEBATA', 'AND', 'HIS', 'MAN', 'COULD', 'DO', 'TO', 'KEEP', 'UP', 'WITH', 'THE', 'CORN', 'THE', 'ORCHARD', 'WAS', 'A', 'NEGLECTED', 'WILDERNESS'] +237-134500-0016-2361: ref=['I', "DON'T", 'KNOW', 'ALL', 'OF', 'THEM', 'BUT', 'I', 'KNOW', 'LINDENS', 'ARE'] +237-134500-0016-2361: hyp=['I', "DON'T", 'KNOW', 'ALL', 'OF', 'THEM', 'BUT', 'I', 'KNOW', 'LINDENS', 'ARE'] +237-134500-0017-2362: ref=['IF', 'I', 'FEEL', 'THAT', 'WAY', 'I', 'FEEL', 'THAT', 'WAY'] +237-134500-0017-2362: hyp=['IF', 'I', 'FEEL', 'THAT', 'WAY', 'I', 'FEEL', 'THAT', 'WAY'] +237-134500-0018-2363: ref=['HE', 'REACHED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'AND', 'BEGAN', 'TO', 'PICK', 'THE', 'SWEET', 'INSIPID', 'FRUIT', 'LONG', 'IVORY', 'COLORED', 'BERRIES', 'TIPPED', 'WITH', 'FAINT', 'PINK', 'LIKE', 'WHITE', 'CORAL', 'THAT', 'FALL', 'TO', 'THE', 'GROUND', 'UNHEEDED', 'ALL', 'SUMMER', 'THROUGH'] +237-134500-0018-2363: hyp=['HE', 'REACHED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'AND', 'BEGAN', 'TO', 'PICK', 'THE', 'SWEET', 'INSIPID', 'FRUIT', 'LONG', 'IVORY', 'COLORED', 'BERRIES', 'TIPPED', 'WITH', 'FAINT', 'PINK', 'LIKE', 'WHITE', 'CORAL', 'THAT', 'FALL', 'TO', 'THE', 'GROUND', 'UNHEEDED', 'ALL', 'SUMMER', 'THROUGH'] +237-134500-0019-2364: ref=['HE', 'DROPPED', 'A', 'HANDFUL', 'INTO', 'HER', 'LAP'] +237-134500-0019-2364: hyp=['HE', 'DROPPED', 'A', 'HANDFUL', 'INTO', 'HER', 'LAP'] +237-134500-0020-2365: ref=['YES', "DON'T", 'YOU'] +237-134500-0020-2365: hyp=['YES', "DON'T", 'YOU'] +237-134500-0021-2366: ref=['OH', 'EVER', 'SO', 'MUCH', 'ONLY', 'HE', 'SEEMS', 'KIND', 'OF', 'STAID', 'AND', 'SCHOOL', 'TEACHERY'] +237-134500-0021-2366: hyp=['OH', 'EVER', 'SO', 'MUCH', 'ONLY', 'HE', 'SEEMS', 'KIND', 'OF', 'STAY', 'AT', 'IN', 'SCHOOL', 'TEACHERY'] +237-134500-0022-2367: ref=['WHEN', 'SHE', 'USED', 'TO', 'TELL', 'ME', 'ABOUT', 'HIM', 'I', 'ALWAYS', 'WONDERED', 'WHETHER', 'SHE', "WASN'T", 'A', 'LITTLE', 'IN', 'LOVE', 'WITH', 'HIM'] +237-134500-0022-2367: hyp=['WHEN', 'SHE', 'USED', 'TO', 'TELL', 'ME', 'ABOUT', 'HIM', 'I', 'ALWAYS', 'WONDERED', 'WHETHER', 'SHE', "WASN'T", 'A', 'LITTLE', 'IN', 'LOVE', 'WITH', 'HIM'] +237-134500-0023-2368: ref=['IT', 'WOULD', 'SERVE', 'YOU', 'ALL', 'RIGHT', 'IF', 'SHE', 'WALKED', 'OFF', 'WITH', 'CARL'] +237-134500-0023-2368: hyp=['IT', 'WOULD', 'SERVE', 'YOU', 'ALL', 'RIGHT', 'IF', 'SHE', 'WALKED', 'OFF', 'WITH', 'KARL'] +237-134500-0024-2369: ref=['I', 'LIKE', 'TO', 'TALK', 'TO', 'CARL', 'ABOUT', 'NEW', 'YORK', 'AND', 'WHAT', 'A', 'FELLOW', 'CAN', 'DO', 'THERE'] +237-134500-0024-2369: hyp=['I', 'LIKE', 'TO', 'TALK', 'TO', 'KARL', 'ABOUT', 'NEW', 'YORK', 'AND', 'WHAT', 'A', 'FELLOW', 'CAN', 'DO', 'THERE'] +237-134500-0025-2370: ref=['OH', 'EMIL'] +237-134500-0025-2370: hyp=['OH', 'AMY', 'ILL'] +237-134500-0026-2371: ref=['SURELY', 'YOU', 'ARE', 'NOT', 'THINKING', 'OF', 'GOING', 'OFF', 'THERE'] +237-134500-0026-2371: hyp=['SURELY', 'YOU', 'ARE', 'NOT', 'THINKING', 'OF', 'GOING', 'OFF', 'THERE'] +237-134500-0027-2372: ref=["MARIE'S", 'FACE', 'FELL', 'UNDER', 'HIS', 'BROODING', 'GAZE'] +237-134500-0027-2372: hyp=["MARIE'S", 'FACE', 'FELL', 'UNDER', 'HIS', 'BROODING', 'GAZE'] +237-134500-0028-2373: ref=["I'M", 'SURE', 'ALEXANDRA', 'HOPES', 'YOU', 'WILL', 'STAY', 'ON', 'HERE', 'SHE', 'MURMURED'] +237-134500-0028-2373: hyp=['I', 'AM', 'SURE', 'ALEXANDER', 'HELPS', 'YOU', 'WILL', 'STAY', 'ON', 'HERE', 'SHE', 'MURMURED'] +237-134500-0029-2374: ref=['I', "DON'T", 'WANT', 'TO', 'STAND', 'AROUND', 'AND', 'LOOK', 'ON'] +237-134500-0029-2374: hyp=['I', "DON'T", 'WANT', 'TO', 'STAND', 'AROUND', 'AND', 'LOOK', 'ON'] +237-134500-0030-2375: ref=['I', 'WANT', 'TO', 'BE', 'DOING', 'SOMETHING', 'ON', 'MY', 'OWN', 'ACCOUNT'] +237-134500-0030-2375: hyp=['I', 'WANT', 'TO', 'BE', 'DOING', 'SOMETHING', 'ON', 'MY', 'OWN', 'ACCOUNT'] +237-134500-0031-2376: ref=['SOMETIMES', 'I', "DON'T", 'WANT', 'TO', 'DO', 'ANYTHING', 'AT', 'ALL', 'AND', 'SOMETIMES', 'I', 'WANT', 'TO', 'PULL', 'THE', 'FOUR', 'CORNERS', 'OF', 'THE', 'DIVIDE', 'TOGETHER', 'HE', 'THREW', 'OUT', 'HIS', 'ARM', 'AND', 'BROUGHT', 'IT', 'BACK', 'WITH', 'A', 'JERK', 'SO', 'LIKE', 'A', 'TABLE', 'CLOTH'] +237-134500-0031-2376: hyp=['SOMETIMES', 'I', "DON'T", 'WANT', 'TO', 'DO', 'ANYTHING', 'AT', 'ALL', 'AND', 'SOMETIMES', 'I', 'WANT', 'TO', 'PULL', 'THE', 'FOUR', 'CORNERS', 'OF', 'THE', 'DIVIDE', 'TOGETHER', 'HE', 'THREW', 'OUT', 'HIS', 'ARM', 'AND', 'BROUGHT', 'IT', 'BACK', 'WITH', 'A', 'JERK', 'SO', 'LIKE', 'A', 'TABLECLOTH'] +237-134500-0032-2377: ref=['I', 'GET', 'TIRED', 'OF', 'SEEING', 'MEN', 'AND', 'HORSES', 'GOING', 'UP', 'AND', 'DOWN', 'UP', 'AND', 'DOWN'] +237-134500-0032-2377: hyp=['I', 'GET', 'TIRED', 'OF', 'SEEING', 'MAN', 'AND', 'HORSES', 'GOING', 'UP', 'AND', 'DOWN', 'UP', 'AND', 'DOWN'] +237-134500-0033-2378: ref=['I', 'WISH', 'YOU', "WEREN'T", 'SO', 'RESTLESS', 'AND', "DIDN'T", 'GET', 'SO', 'WORKED', 'UP', 'OVER', 'THINGS', 'SHE', 'SAID', 'SADLY'] +237-134500-0033-2378: hyp=['I', 'WISH', 'YOU', "WEREN'T", 'SO', 'RESTLESS', 'AND', "DIDN'T", 'GET', 'SO', 'WORKED', 'UP', 'OVER', 'THINGS', 'SHE', 'SAID', 'SADLY'] +237-134500-0034-2379: ref=['THANK', 'YOU', 'HE', 'RETURNED', 'SHORTLY'] +237-134500-0034-2379: hyp=['THANK', 'YOU', 'HE', 'RETURNED', 'SHORTLY'] +237-134500-0035-2380: ref=['AND', 'YOU', 'NEVER', 'USED', 'TO', 'BE', 'CROSS', 'TO', 'ME'] +237-134500-0035-2380: hyp=['AND', 'YOU', 'NEVER', 'USED', 'TO', 'BE', 'CROSS', 'TO', 'ME'] +237-134500-0036-2381: ref=['I', "CAN'T", 'PLAY', 'WITH', 'YOU', 'LIKE', 'A', 'LITTLE', 'BOY', 'ANY', 'MORE', 'HE', 'SAID', 'SLOWLY', "THAT'S", 'WHAT', 'YOU', 'MISS', 'MARIE'] +237-134500-0036-2381: hyp=['I', "CAN'T", 'PLAY', 'WITH', 'YOU', 'LIKE', 'A', 'LITTLE', 'BOY', 'ANY', 'MORE', 'HE', 'SAID', 'SLOWLY', "THAT'S", 'WHAT', 'YOU', 'MISS', 'MARIE'] +237-134500-0037-2382: ref=['BUT', 'EMIL', 'IF', 'I', 'UNDERSTAND', 'THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER', 'WE', 'CAN', 'NEVER', 'DO', 'NICE', 'THINGS', 'TOGETHER', 'ANY', 'MORE'] +237-134500-0037-2382: hyp=['BUT', 'AM', 'ILL', 'IF', 'I', 'UNDERSTAND', 'IN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER', 'WE', 'CAN', 'NEVER', 'DO', 'NICE', 'THINGS', 'TOGETHER', 'ANY', 'MORE'] +237-134500-0038-2383: ref=['AND', 'ANYHOW', "THERE'S", 'NOTHING', 'TO', 'UNDERSTAND'] +237-134500-0038-2383: hyp=['AND', 'ANYHOW', "THERE'S", 'NOTHING', 'TO', 'UNDERSTAND'] +237-134500-0039-2384: ref=['THAT', "WON'T", 'LAST', 'IT', 'WILL', 'GO', 'AWAY', 'AND', 'THINGS', 'WILL', 'BE', 'JUST', 'AS', 'THEY', 'USED', 'TO'] +237-134500-0039-2384: hyp=['THAT', "WON'T", 'LAST', 'IT', 'WILL', 'GO', 'AWAY', 'AND', 'THINGS', 'WILL', 'BE', 'JUST', 'AS', 'THEY', 'USED', 'TO'] +237-134500-0040-2385: ref=['I', 'PRAY', 'FOR', 'YOU', 'BUT', "THAT'S", 'NOT', 'THE', 'SAME', 'AS', 'IF', 'YOU', 'PRAYED', 'YOURSELF'] +237-134500-0040-2385: hyp=['I', 'PRAY', 'FOR', 'YOU', 'BUT', "THAT'S", 'NOT', 'THE', 'SAME', 'AS', 'IF', 'YOU', 'PRAYED', 'YOURSELF'] +237-134500-0041-2386: ref=['I', "CAN'T", 'PRAY', 'TO', 'HAVE', 'THE', 'THINGS', 'I', 'WANT', 'HE', 'SAID', 'SLOWLY', 'AND', 'I', "WON'T", 'PRAY', 'NOT', 'TO', 'HAVE', 'THEM', 'NOT', 'IF', "I'M", 'DAMNED', 'FOR', 'IT'] +237-134500-0041-2386: hyp=['I', "CAN'T", 'PRAY', 'TO', 'HAVE', 'THE', 'THINGS', 'I', 'WANT', 'HE', 'SAID', 'SLOWLY', 'AND', 'I', "WON'T", 'PRAY', 'NOT', 'TO', 'HAVE', 'THEM', 'NOT', 'IF', "I'M", 'DAMNED', 'FOR', 'IT'] +237-134500-0042-2387: ref=['THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER'] +237-134500-0042-2387: hyp=['THEN', 'ALL', 'OUR', 'GOOD', 'TIMES', 'ARE', 'OVER'] +260-123286-0000-200: ref=['SATURDAY', 'AUGUST', 'FIFTEENTH', 'THE', 'SEA', 'UNBROKEN', 'ALL', 'ROUND', 'NO', 'LAND', 'IN', 'SIGHT'] +260-123286-0000-200: hyp=['SATURDAY', 'AUGUST', 'FIFTEENTH', 'THE', 'SEA', 'UNBROKEN', 'ALL', 'ROUND', 'NO', 'LAND', 'IN', 'SIGHT'] +260-123286-0001-201: ref=['THE', 'HORIZON', 'SEEMS', 'EXTREMELY', 'DISTANT'] +260-123286-0001-201: hyp=['THE', 'HORIZON', 'SEEMS', 'EXTREMELY', 'DISTANT'] +260-123286-0002-202: ref=['ALL', 'MY', 'DANGER', 'AND', 'SUFFERINGS', 'WERE', 'NEEDED', 'TO', 'STRIKE', 'A', 'SPARK', 'OF', 'HUMAN', 'FEELING', 'OUT', 'OF', 'HIM', 'BUT', 'NOW', 'THAT', 'I', 'AM', 'WELL', 'HIS', 'NATURE', 'HAS', 'RESUMED', 'ITS', 'SWAY'] +260-123286-0002-202: hyp=['ALL', 'MY', 'DANGER', 'AND', 'SUFFERINGS', 'WERE', 'NEEDED', 'TO', 'STRIKE', 'A', 'SPARK', 'OF', 'HUMAN', 'FEELING', 'OUT', 'OF', 'HIM', 'BUT', 'NOW', 'THAT', 'I', 'AM', 'WELL', 'HIS', 'NATURE', 'HAS', 'RESUMED', 'ITS', 'SWAY'] +260-123286-0003-203: ref=['YOU', 'SEEM', 'ANXIOUS', 'MY', 'UNCLE', 'I', 'SAID', 'SEEING', 'HIM', 'CONTINUALLY', 'WITH', 'HIS', 'GLASS', 'TO', 'HIS', 'EYE', 'ANXIOUS'] +260-123286-0003-203: hyp=['YOU', 'SEEM', 'ANXIOUS', 'MY', 'UNCLE', 'I', 'SAID', 'SEEING', 'HIM', 'CONTINUALLY', 'WITH', 'HIS', 'GLASS', 'TO', 'HIS', 'EYE', 'ANXIOUS'] +260-123286-0004-204: ref=['ONE', 'MIGHT', 'BE', 'WITH', 'LESS', 'REASON', 'THAN', 'NOW'] +260-123286-0004-204: hyp=['ONE', 'MIGHT', 'BE', 'WITH', 'LESS', 'REASON', 'THAN', 'NOW'] +260-123286-0005-205: ref=['I', 'AM', 'NOT', 'COMPLAINING', 'THAT', 'THE', 'RATE', 'IS', 'SLOW', 'BUT', 'THAT', 'THE', 'SEA', 'IS', 'SO', 'WIDE'] +260-123286-0005-205: hyp=['I', 'AM', 'NOT', 'COMPLAINING', 'THAT', 'THE', 'RATE', 'IS', 'SLOW', 'BUT', 'THAT', 'THE', 'SEA', 'IS', 'SO', 'WIDE'] +260-123286-0006-206: ref=['WE', 'ARE', 'LOSING', 'TIME', 'AND', 'THE', 'FACT', 'IS', 'I', 'HAVE', 'NOT', 'COME', 'ALL', 'THIS', 'WAY', 'TO', 'TAKE', 'A', 'LITTLE', 'SAIL', 'UPON', 'A', 'POND', 'ON', 'A', 'RAFT'] +260-123286-0006-206: hyp=['WE', 'ARE', 'LOSING', 'TIME', 'AND', 'THE', 'FACT', 'IS', 'I', 'HAVE', 'NOT', 'COME', 'ALL', 'THIS', 'WAY', 'TO', 'TAKE', 'A', 'LITTLE', 'SAIL', 'UPON', 'A', 'POND', 'ON', 'A', 'RAFT'] +260-123286-0007-207: ref=['HE', 'CALLED', 'THIS', 'SEA', 'A', 'POND', 'AND', 'OUR', 'LONG', 'VOYAGE', 'TAKING', 'A', 'LITTLE', 'SAIL'] +260-123286-0007-207: hyp=['HE', 'CALLED', 'THIS', 'SEA', 'UPON', 'AND', 'OUR', 'LONG', 'VOYAGE', 'TAKING', 'A', 'LITTLE', 'SAIL'] +260-123286-0008-208: ref=['THEREFORE', "DON'T", 'TALK', 'TO', 'ME', 'ABOUT', 'VIEWS', 'AND', 'PROSPECTS'] +260-123286-0008-208: hyp=['THEREFORE', "DON'T", 'TALK', 'TO', 'ME', 'ABOUT', 'VIEWS', 'AND', 'PROSPECTS'] +260-123286-0009-209: ref=['I', 'TAKE', 'THIS', 'AS', 'MY', 'ANSWER', 'AND', 'I', 'LEAVE', 'THE', 'PROFESSOR', 'TO', 'BITE', 'HIS', 'LIPS', 'WITH', 'IMPATIENCE'] +260-123286-0009-209: hyp=['I', 'TAKE', 'THIS', 'IS', 'MY', 'ANSWER', 'AND', 'I', 'LEAVE', 'THE', 'PROFESSOR', 'TO', 'BITE', 'HIS', 'LIPS', 'WITH', 'IMPATIENCE'] +260-123286-0010-210: ref=['SUNDAY', 'AUGUST', 'SIXTEENTH'] +260-123286-0010-210: hyp=['SUNDAY', 'AUGUST', 'SIXTEENTH'] +260-123286-0011-211: ref=['NOTHING', 'NEW', 'WEATHER', 'UNCHANGED', 'THE', 'WIND', 'FRESHENS'] +260-123286-0011-211: hyp=['NOTHING', 'NEW', 'WHETHER', 'UNCHANGED', 'THE', 'WIND', 'FRESHENS'] +260-123286-0012-212: ref=['BUT', 'THERE', 'SEEMED', 'NO', 'REASON', 'TO', 'FEAR'] +260-123286-0012-212: hyp=['BUT', 'THERE', 'SEEMED', 'NO', 'REASON', 'OF', 'FEAR'] +260-123286-0013-213: ref=['THE', 'SHADOW', 'OF', 'THE', 'RAFT', 'WAS', 'CLEARLY', 'OUTLINED', 'UPON', 'THE', 'SURFACE', 'OF', 'THE', 'WAVES'] +260-123286-0013-213: hyp=['THE', 'SHADOW', 'OF', 'THE', 'RAFT', 'WAS', 'CLEARLY', 'OUTLINED', 'UPON', 'THE', 'SURFACE', 'OF', 'THE', 'WAVES'] +260-123286-0014-214: ref=['TRULY', 'THIS', 'SEA', 'IS', 'OF', 'INFINITE', 'WIDTH'] +260-123286-0014-214: hyp=['TRULY', 'THE', 'SEA', 'IS', 'OF', 'INFINITE', 'WIDTH'] +260-123286-0015-215: ref=['IT', 'MUST', 'BE', 'AS', 'WIDE', 'AS', 'THE', 'MEDITERRANEAN', 'OR', 'THE', 'ATLANTIC', 'AND', 'WHY', 'NOT'] +260-123286-0015-215: hyp=['IT', 'MUST', 'BE', 'AS', 'WIDE', 'AS', 'THE', 'MEDITERRANEAN', 'OR', 'THE', 'ATLANTIC', 'AND', 'WHY', 'NOT'] +260-123286-0016-216: ref=['THESE', 'THOUGHTS', 'AGITATED', 'ME', 'ALL', 'DAY', 'AND', 'MY', 'IMAGINATION', 'SCARCELY', 'CALMED', 'DOWN', 'AFTER', 'SEVERAL', 'HOURS', 'SLEEP'] +260-123286-0016-216: hyp=['THESE', 'THOUGHTS', 'AGITATED', 'ME', 'ALL', 'DAY', 'AND', 'MY', 'IMAGINATION', 'SCARCELY', 'CALMED', 'DOWN', 'AFTER', 'SEVERAL', 'HOURS', 'SLEEP'] +260-123286-0017-217: ref=['I', 'SHUDDER', 'AS', 'I', 'RECALL', 'THESE', 'MONSTERS', 'TO', 'MY', 'REMEMBRANCE'] +260-123286-0017-217: hyp=['I', 'SHUDDER', 'AS', 'I', 'RECALL', 'THESE', 'MONSTERS', 'TO', 'MY', 'REMEMBRANCE'] +260-123286-0018-218: ref=['I', 'SAW', 'AT', 'THE', 'HAMBURG', 'MUSEUM', 'THE', 'SKELETON', 'OF', 'ONE', 'OF', 'THESE', 'CREATURES', 'THIRTY', 'FEET', 'IN', 'LENGTH'] +260-123286-0018-218: hyp=['I', 'SAW', 'AT', 'THE', 'HAMBURG', 'MUSEUM', 'THE', 'SKELETON', 'OF', 'ONE', 'OF', 'THESE', 'CREATURES', 'THIRTY', 'FEET', 'IN', 'LENGTH'] +260-123286-0019-219: ref=['I', 'SUPPOSE', 'PROFESSOR', 'LIEDENBROCK', 'WAS', 'OF', 'MY', 'OPINION', 'TOO', 'AND', 'EVEN', 'SHARED', 'MY', 'FEARS', 'FOR', 'AFTER', 'HAVING', 'EXAMINED', 'THE', 'PICK', 'HIS', 'EYES', 'TRAVERSED', 'THE', 'OCEAN', 'FROM', 'SIDE', 'TO', 'SIDE'] +260-123286-0019-219: hyp=['I', 'SUPPOSE', 'PROFESSOR', 'LIEDENBROCK', 'WAS', 'OF', 'MY', 'OPINION', 'TOO', 'AND', 'EVEN', 'SHARED', 'MY', 'FEARS', 'FOR', 'AFTER', 'HAVING', 'EXAMINED', 'THE', 'PIG', 'HIS', 'EYES', 'TRAVERSED', 'THE', 'OCEAN', 'FROM', 'SIDE', 'TO', 'SIDE'] +260-123286-0020-220: ref=['TUESDAY', 'AUGUST', 'EIGHTEENTH'] +260-123286-0020-220: hyp=['TUESDAY', 'AUGUST', 'EIGHTEENTH'] +260-123286-0021-221: ref=['DURING', 'HIS', 'WATCH', 'I', 'SLEPT'] +260-123286-0021-221: hyp=['DURING', 'HIS', 'WATCH', 'I', 'SLEPT'] +260-123286-0022-222: ref=['TWO', 'HOURS', 'AFTERWARDS', 'A', 'TERRIBLE', 'SHOCK', 'AWOKE', 'ME'] +260-123286-0022-222: hyp=['TWO', 'HOURS', 'AFTERWARDS', 'A', 'TERRIBLE', 'SHOCK', 'AWOKE', 'ME'] +260-123286-0023-223: ref=['THE', 'RAFT', 'WAS', 'HEAVED', 'UP', 'ON', 'A', 'WATERY', 'MOUNTAIN', 'AND', 'PITCHED', 'DOWN', 'AGAIN', 'AT', 'A', 'DISTANCE', 'OF', 'TWENTY', 'FATHOMS'] +260-123286-0023-223: hyp=['THE', 'RAFT', 'WAS', 'HEAVED', 'UP', 'ON', 'A', 'WATERY', 'MOUNTAIN', 'AND', 'PITCHED', 'DOWN', 'AGAIN', 'AT', 'A', 'DISTANCE', 'OF', 'TWENTY', 'FATHOMS'] +260-123286-0024-224: ref=["THERE'S", 'A', 'WHALE', 'A', 'WHALE', 'CRIED', 'THE', 'PROFESSOR'] +260-123286-0024-224: hyp=["THERE'S", 'A', 'WAIL', 'A', 'WELL', 'CRIED', 'THE', 'PROFESSOR'] +260-123286-0025-225: ref=['FLIGHT', 'WAS', 'OUT', 'OF', 'THE', 'QUESTION', 'NOW', 'THE', 'REPTILES', 'ROSE', 'THEY', 'WHEELED', 'AROUND', 'OUR', 'LITTLE', 'RAFT', 'WITH', 'A', 'RAPIDITY', 'GREATER', 'THAN', 'THAT', 'OF', 'EXPRESS', 'TRAINS'] +260-123286-0025-225: hyp=['FIGHT', 'WAS', 'OUT', 'OF', 'THE', 'QUESTION', 'NOW', 'THE', 'REPTILES', 'ROSE', 'THEY', 'WHEELED', 'AROUND', 'OUR', 'LITTLE', 'RAFT', 'WITH', 'A', 'RAPIDITY', 'GREATER', 'THAN', 'THAT', 'OF', 'EXPRESS', 'TRAINS'] +260-123286-0026-226: ref=['TWO', 'MONSTERS', 'ONLY', 'WERE', 'CREATING', 'ALL', 'THIS', 'COMMOTION', 'AND', 'BEFORE', 'MY', 'EYES', 'ARE', 'TWO', 'REPTILES', 'OF', 'THE', 'PRIMITIVE', 'WORLD'] +260-123286-0026-226: hyp=['TWO', 'MONSTERS', 'OMER', 'WERE', 'CREATING', 'ALL', 'THIS', 'COMMOTION', 'AND', 'BEFORE', 'MY', 'EYES', 'OUR', 'TWO', 'REPTILES', 'OF', 'THE', 'PRIMITIVE', 'WORLD'] +260-123286-0027-227: ref=['I', 'CAN', 'DISTINGUISH', 'THE', 'EYE', 'OF', 'THE', 'ICHTHYOSAURUS', 'GLOWING', 'LIKE', 'A', 'RED', 'HOT', 'COAL', 'AND', 'AS', 'LARGE', 'AS', 'A', "MAN'S", 'HEAD'] +260-123286-0027-227: hyp=['I', 'CAN', 'DISTINGUISH', 'THE', 'EYE', 'OF', 'THE', 'ITHUSORIS', 'GLOWING', 'LIKE', 'A', 'RED', 'HOT', 'CO', 'AND', 'AS', 'LARGE', 'AS', 'A', "MAN'S", 'HEAD'] +260-123286-0028-228: ref=['ITS', 'JAW', 'IS', 'ENORMOUS', 'AND', 'ACCORDING', 'TO', 'NATURALISTS', 'IT', 'IS', 'ARMED', 'WITH', 'NO', 'LESS', 'THAN', 'ONE', 'HUNDRED', 'AND', 'EIGHTY', 'TWO', 'TEETH'] +260-123286-0028-228: hyp=['ITS', 'JAW', 'IS', 'ENORMOUS', 'AND', 'ACCORDING', 'TO', 'NATURALISTS', 'IT', 'IS', 'ARMED', 'WITH', 'NO', 'LESS', 'THAN', 'ONE', 'HUNDRED', 'AND', 'EIGHTY', 'TWO', 'TEETH'] +260-123286-0029-229: ref=['THOSE', 'HUGE', 'CREATURES', 'ATTACKED', 'EACH', 'OTHER', 'WITH', 'THE', 'GREATEST', 'ANIMOSITY'] +260-123286-0029-229: hyp=['THOSE', 'HUGE', 'CREATURES', 'ATTACKED', 'EACH', 'OTHER', 'WITH', 'THE', 'GREATEST', 'ANIMOSITY'] +260-123286-0030-230: ref=['SUDDENLY', 'THE', 'ICHTHYOSAURUS', 'AND', 'THE', 'PLESIOSAURUS', 'DISAPPEAR', 'BELOW', 'LEAVING', 'A', 'WHIRLPOOL', 'EDDYING', 'IN', 'THE', 'WATER'] +260-123286-0030-230: hyp=['SUDDENLY', 'THE', 'IDEAS', 'AND', 'THE', 'PLESIOSAURUS', 'DISAPPEAR', 'BELOW', 'LEAVING', 'A', 'WAR', 'POOL', 'EDDYING', 'IN', 'THE', 'WATER'] +260-123286-0031-231: ref=['AS', 'FOR', 'THE', 'ICHTHYOSAURUS', 'HAS', 'HE', 'RETURNED', 'TO', 'HIS', 'SUBMARINE', 'CAVERN'] +260-123286-0031-231: hyp=['AS', 'FOR', 'THE', 'ITHUSORIS', 'HAS', 'HE', 'RETURNED', 'HIS', 'SUBMARINE', 'CAVERN'] +260-123288-0000-232: ref=['THE', 'ROARINGS', 'BECOME', 'LOST', 'IN', 'THE', 'DISTANCE'] +260-123288-0000-232: hyp=['THE', 'ROARINGS', 'BECOME', 'LOST', 'IN', 'THE', 'DISTANCE'] +260-123288-0001-233: ref=['THE', 'WEATHER', 'IF', 'WE', 'MAY', 'USE', 'THAT', 'TERM', 'WILL', 'CHANGE', 'BEFORE', 'LONG'] +260-123288-0001-233: hyp=['THE', 'WEATHER', 'IF', 'WE', 'MAY', 'USE', 'THE', 'TERM', 'WILL', 'CHANGE', 'BEFORE', 'LONG'] +260-123288-0002-234: ref=['THE', 'ATMOSPHERE', 'IS', 'CHARGED', 'WITH', 'VAPOURS', 'PERVADED', 'WITH', 'THE', 'ELECTRICITY', 'GENERATED', 'BY', 'THE', 'EVAPORATION', 'OF', 'SALINE', 'WATERS'] +260-123288-0002-234: hyp=['THE', 'ATMOSPHERE', 'IS', 'CHARGED', 'WITH', 'VAPORS', 'PERVADED', 'WITH', 'THE', 'ELECTRICITY', 'GENERATED', 'BY', 'THE', 'EVAPORATION', 'OF', 'SAILING', 'WATERS'] +260-123288-0003-235: ref=['THE', 'ELECTRIC', 'LIGHT', 'CAN', 'SCARCELY', 'PENETRATE', 'THROUGH', 'THE', 'DENSE', 'CURTAIN', 'WHICH', 'HAS', 'DROPPED', 'OVER', 'THE', 'THEATRE', 'ON', 'WHICH', 'THE', 'BATTLE', 'OF', 'THE', 'ELEMENTS', 'IS', 'ABOUT', 'TO', 'BE', 'WAGED'] +260-123288-0003-235: hyp=['THE', 'ELECTRIC', 'LIGHT', 'CAN', 'SCARCELY', 'PENETRATE', 'TO', 'THE', 'DENSE', 'CURTAIN', 'WHICH', 'IS', 'DROPPED', 'OVER', 'THE', 'THEATRE', 'ON', 'WHICH', 'THE', 'BATTLE', 'OF', 'THE', 'ELEMENTS', 'IS', 'ABOUT', 'TO', 'BE', 'WAGED'] +260-123288-0004-236: ref=['THE', 'AIR', 'IS', 'HEAVY', 'THE', 'SEA', 'IS', 'CALM'] +260-123288-0004-236: hyp=['THE', 'AIR', 'IS', 'HEAVY', 'THE', 'SEA', 'IS', 'CALM'] +260-123288-0005-237: ref=['FROM', 'TIME', 'TO', 'TIME', 'A', 'FLEECY', 'TUFT', 'OF', 'MIST', 'WITH', 'YET', 'SOME', 'GLEAMING', 'LIGHT', 'LEFT', 'UPON', 'IT', 'DROPS', 'DOWN', 'UPON', 'THE', 'DENSE', 'FLOOR', 'OF', 'GREY', 'AND', 'LOSES', 'ITSELF', 'IN', 'THE', 'OPAQUE', 'AND', 'IMPENETRABLE', 'MASS'] +260-123288-0005-237: hyp=['FROM', 'TIME', 'TO', 'TIME', 'A', 'FLEECY', 'TUFT', 'OF', 'MISTS', 'WITH', 'YET', 'SOME', 'GLEAMING', 'LIGHT', 'LEFT', 'UPON', 'IT', 'DROPS', 'DOWN', 'UPON', 'THE', 'DENSE', 'FLOOR', 'OF', 'GRAY', 'AND', 'LOSES', 'ITSELF', 'IN', 'THE', 'OPAQUE', 'AND', 'IMPENETRABLE', 'MASS'] +260-123288-0006-238: ref=['THE', 'ATMOSPHERE', 'IS', 'EVIDENTLY', 'CHARGED', 'AND', 'SURCHARGED', 'WITH', 'ELECTRICITY'] +260-123288-0006-238: hyp=['THE', 'ATMOSPHERE', 'AS', 'EVIDENTLY', 'CHARGED', 'IN', 'SURCHARGED', 'WITH', 'ELECTRICITY'] +260-123288-0007-239: ref=['THE', 'WIND', 'NEVER', 'LULLS', 'BUT', 'TO', 'ACQUIRE', 'INCREASED', 'STRENGTH', 'THE', 'VAST', 'BANK', 'OF', 'HEAVY', 'CLOUDS', 'IS', 'A', 'HUGE', 'RESERVOIR', 'OF', 'FEARFUL', 'WINDY', 'GUSTS', 'AND', 'RUSHING', 'STORMS'] +260-123288-0007-239: hyp=['THE', 'WIND', 'NEVER', 'LOLLS', 'BUT', 'TO', 'ACQUIRE', 'INCREASED', 'STRENGTH', 'THE', 'VAST', 'BANK', 'OF', 'HEAVY', 'CLOUDS', 'IS', 'A', 'HUGE', 'RESERVOIR', 'OF', 'FEARFUL', 'WINDY', 'GUSTS', 'AND', 'RUSHING', 'STORMS'] +260-123288-0008-240: ref=["THERE'S", 'A', 'HEAVY', 'STORM', 'COMING', 'ON', 'I', 'CRIED', 'POINTING', 'TOWARDS', 'THE', 'HORIZON'] +260-123288-0008-240: hyp=["THERE'S", 'A', 'HEAVY', 'STORM', 'COMING', 'ON', 'I', 'CRIED', 'POINTING', 'TOWARDS', 'THE', 'HORIZON'] +260-123288-0009-241: ref=['THOSE', 'CLOUDS', 'SEEM', 'AS', 'IF', 'THEY', 'WERE', 'GOING', 'TO', 'CRUSH', 'THE', 'SEA'] +260-123288-0009-241: hyp=['THOSE', 'CLOUDS', 'SEEM', 'AS', 'IF', 'THEY', 'WERE', 'GOING', 'TO', 'CRUSH', 'THE', 'SEA'] +260-123288-0010-242: ref=['ON', 'THE', 'MAST', 'ALREADY', 'I', 'SEE', 'THE', 'LIGHT', 'PLAY', 'OF', 'A', 'LAMBENT', 'SAINT', "ELMO'S", 'FIRE', 'THE', 'OUTSTRETCHED', 'SAIL', 'CATCHES', 'NOT', 'A', 'BREATH', 'OF', 'WIND', 'AND', 'HANGS', 'LIKE', 'A', 'SHEET', 'OF', 'LEAD'] +260-123288-0010-242: hyp=['ON', 'THE', 'MAST', 'ALREADY', 'I', 'SEE', 'THE', 'LIGHT', 'PLAY', 'OF', 'A', 'LAMENT', 'SAINT', "ABLE'S", 'FIRE', 'THE', 'OUTSTRETCHED', 'SAIL', 'CATCHES', 'NOT', 'A', 'BREATH', 'OF', 'WIND', 'AND', 'HANGS', 'LIKE', 'A', 'SHEET', 'OF', 'LEAD'] +260-123288-0011-243: ref=['BUT', 'IF', 'WE', 'HAVE', 'NOW', 'CEASED', 'TO', 'ADVANCE', 'WHY', 'DO', 'WE', 'YET', 'LEAVE', 'THAT', 'SAIL', 'LOOSE', 'WHICH', 'AT', 'THE', 'FIRST', 'SHOCK', 'OF', 'THE', 'TEMPEST', 'MAY', 'CAPSIZE', 'US', 'IN', 'A', 'MOMENT'] +260-123288-0011-243: hyp=['BUT', 'IF', 'WE', 'HAVE', 'NOW', 'CEASED', 'TO', 'ADVANCE', 'WHY', 'DO', 'WE', 'YET', 'LEAVE', 'THAT', 'SAIL', 'LOOSE', 'WHICH', 'AT', 'THE', 'FIRST', 'SHOCK', 'OF', 'A', 'TEMPEST', 'MAY', 'CAPSIZE', 'US', 'IN', 'A', 'MOMENT'] +260-123288-0012-244: ref=['THAT', 'WILL', 'BE', 'SAFEST', 'NO', 'NO', 'NEVER'] +260-123288-0012-244: hyp=['THAT', 'WILL', 'BE', 'THE', 'SAFEST', 'NO', 'NO', 'NEVER'] +260-123288-0013-245: ref=['THE', 'PILED', 'UP', 'VAPOURS', 'CONDENSE', 'INTO', 'WATER', 'AND', 'THE', 'AIR', 'PUT', 'INTO', 'VIOLENT', 'ACTION', 'TO', 'SUPPLY', 'THE', 'VACUUM', 'LEFT', 'BY', 'THE', 'CONDENSATION', 'OF', 'THE', 'MISTS', 'ROUSES', 'ITSELF', 'INTO', 'A', 'WHIRLWIND'] +260-123288-0013-245: hyp=['THEY', 'PILED', 'UP', 'VAPORS', 'CONTENSED', 'INTO', 'WATER', 'AND', 'THE', 'AIR', 'PUT', 'INTO', 'VIOLENT', 'ACTION', 'TO', 'SUPPLY', 'THE', 'VACUUM', 'LEFT', 'BY', 'THE', 'CONDENSATION', 'OF', 'THE', 'MIST', 'ROUSES', 'ITSELF', 'INTO', 'A', 'WHIRLWIND'] +260-123288-0014-246: ref=['HANS', 'STIRS', 'NOT'] +260-123288-0014-246: hyp=['HANS', 'STIRS', 'NOT'] +260-123288-0015-247: ref=['FROM', 'THE', 'UNDER', 'SURFACE', 'OF', 'THE', 'CLOUDS', 'THERE', 'ARE', 'CONTINUAL', 'EMISSIONS', 'OF', 'LURID', 'LIGHT', 'ELECTRIC', 'MATTER', 'IS', 'IN', 'CONTINUAL', 'EVOLUTION', 'FROM', 'THEIR', 'COMPONENT', 'MOLECULES', 'THE', 'GASEOUS', 'ELEMENTS', 'OF', 'THE', 'AIR', 'NEED', 'TO', 'BE', 'SLAKED', 'WITH', 'MOISTURE', 'FOR', 'INNUMERABLE', 'COLUMNS', 'OF', 'WATER', 'RUSH', 'UPWARDS', 'INTO', 'THE', 'AIR', 'AND', 'FALL', 'BACK', 'AGAIN', 'IN', 'WHITE', 'FOAM'] +260-123288-0015-247: hyp=['FROM', 'THE', 'UNDER', 'SURFACE', 'OF', 'THE', 'CLOUDS', 'THERE', 'ARE', 'CONTINUAL', 'OMISSIONS', 'OF', 'LURID', 'LIGHT', 'ELECTRIC', 'MATTER', 'IS', 'IN', 'CONTINUAL', 'EVOLUTION', 'FROM', 'THEIR', 'COMPONENT', 'MOLECULES', 'THE', 'GASEOUS', 'ELEMENTS', 'OF', 'THE', 'AIR', 'NEED', 'TO', 'BE', 'SLAKED', 'WITH', 'MOISTURE', 'FOR', 'INNUMERABLE', 'COLUMNS', 'OF', 'WATER', 'RUSH', 'UPWARDS', 'INTO', 'THE', 'AIR', 'AND', 'FALL', 'BACK', 'AGAIN', 'IN', 'WHITE', 'FOAM'] +260-123288-0016-248: ref=['I', 'REFER', 'TO', 'THE', 'THERMOMETER', 'IT', 'INDICATES', 'THE', 'FIGURE', 'IS', 'OBLITERATED'] +260-123288-0016-248: hyp=['I', 'REFER', 'TO', 'THE', 'THERMOMETER', 'IT', 'INDICATES', 'THE', 'FIGURE', 'IS', 'OBLITERATED'] +260-123288-0017-249: ref=['IS', 'THE', 'ATMOSPHERIC', 'CONDITION', 'HAVING', 'ONCE', 'REACHED', 'THIS', 'DENSITY', 'TO', 'BECOME', 'FINAL'] +260-123288-0017-249: hyp=['IS', 'THE', 'ATMOSPHERE', 'CONDITION', 'HAVING', 'ONCE', 'REACHED', 'ITS', 'DENSITY', 'TO', 'BECOME', 'FINAL'] +260-123288-0018-250: ref=['THE', 'RAFT', 'BEARS', 'ON', 'STILL', 'TO', 'THE', 'SOUTH', 'EAST'] +260-123288-0018-250: hyp=['THE', 'RAFT', 'BEARS', 'ON', 'STILL', 'TO', 'THE', 'SOUTH', 'EAST'] +260-123288-0019-251: ref=['AT', 'NOON', 'THE', 'VIOLENCE', 'OF', 'THE', 'STORM', 'REDOUBLES'] +260-123288-0019-251: hyp=['AT', 'NOON', 'THE', 'VIOLENCE', 'OF', 'THE', 'STORM', 'REDOUBLES'] +260-123288-0020-252: ref=['EACH', 'OF', 'US', 'IS', 'LASHED', 'TO', 'SOME', 'PART', 'OF', 'THE', 'RAFT'] +260-123288-0020-252: hyp=['EACH', 'OF', 'US', 'IS', 'LASHED', 'TO', 'SOME', 'PART', 'OF', 'THE', 'RAFT'] +260-123288-0021-253: ref=['THE', 'WAVES', 'RISE', 'ABOVE', 'OUR', 'HEADS'] +260-123288-0021-253: hyp=['THE', 'WAVES', 'RISE', 'ABOVE', 'OUR', 'HEADS'] +260-123288-0022-254: ref=['THEY', 'SEEM', 'TO', 'BE', 'WE', 'ARE', 'LOST', 'BUT', 'I', 'AM', 'NOT', 'SURE'] +260-123288-0022-254: hyp=['THEY', 'SEEMED', 'TO', 'BE', 'WE', 'ARE', 'LOST', 'BUT', 'I', 'AM', 'NOT', 'SURE'] +260-123288-0023-255: ref=['HE', 'NODS', 'HIS', 'CONSENT'] +260-123288-0023-255: hyp=['HE', 'NODS', 'HIS', 'CONSENT'] +260-123288-0024-256: ref=['THE', 'FIREBALL', 'HALF', 'OF', 'IT', 'WHITE', 'HALF', 'AZURE', 'BLUE', 'AND', 'THE', 'SIZE', 'OF', 'A', 'TEN', 'INCH', 'SHELL', 'MOVED', 'SLOWLY', 'ABOUT', 'THE', 'RAFT', 'BUT', 'REVOLVING', 'ON', 'ITS', 'OWN', 'AXIS', 'WITH', 'ASTONISHING', 'VELOCITY', 'AS', 'IF', 'WHIPPED', 'ROUND', 'BY', 'THE', 'FORCE', 'OF', 'THE', 'WHIRLWIND'] +260-123288-0024-256: hyp=['THE', 'FIRE', 'BALL', 'HALF', 'OF', 'IT', 'WHITE', 'HALF', 'AZURE', 'BLUE', 'AND', 'THE', 'SIZE', 'OF', 'A', 'TEN', 'INCH', 'SHELL', 'MOVED', 'SLOWLY', 'ABOUT', 'THE', 'RAFT', 'BUT', 'REVOLVING', 'ON', 'ITS', 'OWN', 'AXIS', 'WITH', 'ASTONISHING', 'VELOCITY', 'AS', 'IF', 'WHIP', 'ROUND', 'BY', 'THE', 'FORCE', 'OF', 'THE', 'WHIRLWIND'] +260-123288-0025-257: ref=['HERE', 'IT', 'COMES', 'THERE', 'IT', 'GLIDES', 'NOW', 'IT', 'IS', 'UP', 'THE', 'RAGGED', 'STUMP', 'OF', 'THE', 'MAST', 'THENCE', 'IT', 'LIGHTLY', 'LEAPS', 'ON', 'THE', 'PROVISION', 'BAG', 'DESCENDS', 'WITH', 'A', 'LIGHT', 'BOUND', 'AND', 'JUST', 'SKIMS', 'THE', 'POWDER', 'MAGAZINE', 'HORRIBLE'] +260-123288-0025-257: hyp=['HERE', 'IT', 'COMES', 'THERE', 'IT', 'GLIDES', 'NOW', 'IT', 'IS', 'UP', 'THE', 'RAGGED', 'STUMP', 'OF', 'THE', 'MAST', 'THENCE', 'IT', 'LIGHTLY', 'LEAPS', 'ON', 'THE', 'PROVISION', 'BAG', 'DESCENDS', 'WITH', 'A', 'LIGHT', 'BOUND', 'AND', 'JUST', 'SKIMS', 'THE', 'POWDER', 'MAGAZINE', 'HORRIBLE'] +260-123288-0026-258: ref=['WE', 'SHALL', 'BE', 'BLOWN', 'UP', 'BUT', 'NO', 'THE', 'DAZZLING', 'DISK', 'OF', 'MYSTERIOUS', 'LIGHT', 'NIMBLY', 'LEAPS', 'ASIDE', 'IT', 'APPROACHES', 'HANS', 'WHO', 'FIXES', 'HIS', 'BLUE', 'EYE', 'UPON', 'IT', 'STEADILY', 'IT', 'THREATENS', 'THE', 'HEAD', 'OF', 'MY', 'UNCLE', 'WHO', 'FALLS', 'UPON', 'HIS', 'KNEES', 'WITH', 'HIS', 'HEAD', 'DOWN', 'TO', 'AVOID', 'IT'] +260-123288-0026-258: hyp=['WE', 'SHALL', 'BE', 'BLOWN', 'UP', 'BUT', 'NO', 'THE', 'DAZZLING', 'DISK', 'OF', 'MYSTERIOUS', 'LIGHT', 'NIMBLY', 'LEAPS', 'ASIDE', 'IT', 'APPROACHES', 'HANS', 'WHO', 'FIXES', 'HIS', 'BLUE', 'EYE', 'UPON', 'IT', 'STEADILY', 'IT', 'THREATENS', 'THE', 'HEAD', 'OF', 'MY', 'UNCLE', 'WHO', 'FALLS', 'UPON', 'HIS', 'KNEES', 'WITH', 'HIS', 'HEAD', 'DOWN', 'TO', 'AVOID', 'IT'] +260-123288-0027-259: ref=['A', 'SUFFOCATING', 'SMELL', 'OF', 'NITROGEN', 'FILLS', 'THE', 'AIR', 'IT', 'ENTERS', 'THE', 'THROAT', 'IT', 'FILLS', 'THE', 'LUNGS'] +260-123288-0027-259: hyp=['A', 'SUFFOCATING', 'SMELL', 'OF', 'NITROGEN', 'FILLS', 'THE', 'AIR', 'IT', 'ENTERS', 'THE', 'THROAT', 'IT', 'FILLS', 'THE', 'LUNGS'] +260-123288-0028-260: ref=['WE', 'SUFFER', 'STIFLING', 'PAINS'] +260-123288-0028-260: hyp=['WE', 'SUFFER', 'STIFLING', 'PAINS'] +260-123440-0000-179: ref=['AND', 'HOW', 'ODD', 'THE', 'DIRECTIONS', 'WILL', 'LOOK'] +260-123440-0000-179: hyp=['AND', 'HOW', 'ODD', 'THE', 'DIRECTIONS', 'WILL', 'LOOK'] +260-123440-0001-180: ref=['POOR', 'ALICE'] +260-123440-0001-180: hyp=['POOR', 'ALICE'] +260-123440-0002-181: ref=['IT', 'WAS', 'THE', 'WHITE', 'RABBIT', 'RETURNING', 'SPLENDIDLY', 'DRESSED', 'WITH', 'A', 'PAIR', 'OF', 'WHITE', 'KID', 'GLOVES', 'IN', 'ONE', 'HAND', 'AND', 'A', 'LARGE', 'FAN', 'IN', 'THE', 'OTHER', 'HE', 'CAME', 'TROTTING', 'ALONG', 'IN', 'A', 'GREAT', 'HURRY', 'MUTTERING', 'TO', 'HIMSELF', 'AS', 'HE', 'CAME', 'OH', 'THE', 'DUCHESS', 'THE', 'DUCHESS'] +260-123440-0002-181: hyp=['IT', 'WAS', 'THE', 'WHITE', 'RABBIT', 'RETURNING', 'SPLENDIDLY', 'DRESSED', 'WITH', 'A', 'PAIR', 'OF', 'WHITE', 'KID', 'GLOVES', 'IN', 'ONE', 'HAND', 'AND', 'A', 'LARGE', 'FAN', 'IN', 'THE', 'OTHER', 'HE', 'CAME', 'TROTTING', 'ALONG', 'IN', 'A', 'GREAT', 'HURRY', 'MUTTERING', 'TO', 'HIMSELF', 'AS', 'HE', 'CAME', 'OH', 'THE', 'DUCHESS', 'THE', 'DUCHESS'] +260-123440-0003-182: ref=['OH', "WON'T", 'SHE', 'BE', 'SAVAGE', 'IF', "I'VE", 'KEPT', 'HER', 'WAITING'] +260-123440-0003-182: hyp=['OH', "WON'T", 'SHE', 'BE', 'SAVAGE', 'IF', "I'VE", 'KEPT', 'HER', 'WAITING'] +260-123440-0004-183: ref=['ALICE', 'TOOK', 'UP', 'THE', 'FAN', 'AND', 'GLOVES', 'AND', 'AS', 'THE', 'HALL', 'WAS', 'VERY', 'HOT', 'SHE', 'KEPT', 'FANNING', 'HERSELF', 'ALL', 'THE', 'TIME', 'SHE', 'WENT', 'ON', 'TALKING', 'DEAR', 'DEAR', 'HOW', 'QUEER', 'EVERYTHING', 'IS', 'TO', 'DAY'] +260-123440-0004-183: hyp=['ALICE', 'TOOK', 'UP', 'THE', 'FAN', 'AND', 'GLOVES', 'AND', 'AS', 'THE', 'HALL', 'WAS', 'VERY', 'HOT', 'SHE', 'KEPT', 'FANNING', 'HERSELF', 'ALL', 'THE', 'TIME', 'SHE', 'WENT', 'ON', 'TALKING', 'DEAR', 'DEAR', 'HOW', 'QUEER', 'EVERYTHING', 'IS', 'TO', 'DAY'] +260-123440-0005-184: ref=['AND', 'YESTERDAY', 'THINGS', 'WENT', 'ON', 'JUST', 'AS', 'USUAL'] +260-123440-0005-184: hyp=['AND', 'YESTERDAY', 'THINGS', 'WENT', 'ON', 'JUST', 'AS', 'USUAL'] +260-123440-0006-185: ref=['I', 'WONDER', 'IF', "I'VE", 'BEEN', 'CHANGED', 'IN', 'THE', 'NIGHT'] +260-123440-0006-185: hyp=['I', 'WONDER', 'IF', "I'VE", 'BEEN', 'CHANGED', 'IN', 'THE', 'NIGHT'] +260-123440-0007-186: ref=['I', 'ALMOST', 'THINK', 'I', 'CAN', 'REMEMBER', 'FEELING', 'A', 'LITTLE', 'DIFFERENT'] +260-123440-0007-186: hyp=['I', 'ALMOST', 'THINK', 'I', 'CAN', 'REMEMBER', 'FEELING', 'A', 'LITTLE', 'DIFFERENT'] +260-123440-0008-187: ref=["I'LL", 'TRY', 'IF', 'I', 'KNOW', 'ALL', 'THE', 'THINGS', 'I', 'USED', 'TO', 'KNOW'] +260-123440-0008-187: hyp=["I'LL", 'TRY', 'IF', 'I', 'KNOW', 'ALL', 'THE', 'THINGS', 'I', 'USED', 'TO', 'KNOW'] +260-123440-0009-188: ref=['I', 'SHALL', 'NEVER', 'GET', 'TO', 'TWENTY', 'AT', 'THAT', 'RATE'] +260-123440-0009-188: hyp=['I', 'SHALL', 'NEVER', 'GET', 'TO', 'TWENTY', 'AT', 'THAT', 'RATE'] +260-123440-0010-189: ref=['HOW', 'CHEERFULLY', 'HE', 'SEEMS', 'TO', 'GRIN', 'HOW', 'NEATLY', 'SPREAD', 'HIS', 'CLAWS', 'AND', 'WELCOME', 'LITTLE', 'FISHES', 'IN', 'WITH', 'GENTLY', 'SMILING', 'JAWS'] +260-123440-0010-189: hyp=['HOW', 'CHEERFULLY', 'HE', 'SEEMS', 'TO', 'GRIN', 'HOW', 'NEATLY', 'SPREAD', 'HIS', 'CLAWS', 'AND', 'WELCOME', 'LITTLE', 'FISHES', 'IN', 'WITH', 'GENTLY', 'SMILING', 'JAWS'] +260-123440-0011-190: ref=['NO', "I'VE", 'MADE', 'UP', 'MY', 'MIND', 'ABOUT', 'IT', 'IF', "I'M", 'MABEL', "I'LL", 'STAY', 'DOWN', 'HERE'] +260-123440-0011-190: hyp=['NO', "I'VE", 'MADE', 'UP', 'MY', 'MIND', 'ABOUT', 'IT', 'IF', "I'M", 'MABEL', "I'LL", 'STAY', 'DOWN', 'HERE'] +260-123440-0012-191: ref=["IT'LL", 'BE', 'NO', 'USE', 'THEIR', 'PUTTING', 'THEIR', 'HEADS', 'DOWN', 'AND', 'SAYING', 'COME', 'UP', 'AGAIN', 'DEAR'] +260-123440-0012-191: hyp=["IT'LL", 'BE', 'NO', 'USE', 'THEIR', 'PUTTING', 'THEIR', 'HEADS', 'DOWN', 'AND', 'SAYING', 'COME', 'UP', 'AGAIN', 'DEAR'] +260-123440-0013-192: ref=['I', 'AM', 'SO', 'VERY', 'TIRED', 'OF', 'BEING', 'ALL', 'ALONE', 'HERE'] +260-123440-0013-192: hyp=['I', 'AM', 'SO', 'VERY', 'TIRED', 'OF', 'BEING', 'ALL', 'ALONE', 'HERE'] +260-123440-0014-193: ref=['AND', 'I', 'DECLARE', "IT'S", 'TOO', 'BAD', 'THAT', 'IT', 'IS'] +260-123440-0014-193: hyp=['AND', 'I', 'DECLARE', "IT'S", 'TOO', 'BAD', 'THAT', 'IT', 'IS'] +260-123440-0015-194: ref=['I', 'WISH', 'I', "HADN'T", 'CRIED', 'SO', 'MUCH', 'SAID', 'ALICE', 'AS', 'SHE', 'SWAM', 'ABOUT', 'TRYING', 'TO', 'FIND', 'HER', 'WAY', 'OUT'] +260-123440-0015-194: hyp=['I', 'WISH', 'I', "HADN'T", 'CRIED', 'SO', 'MUCH', 'SAID', 'ALICE', 'AS', 'SHE', 'SWAM', 'ABOUT', 'TRYING', 'TO', 'FIND', 'HER', 'WAY', 'OUT'] +260-123440-0016-195: ref=['I', 'SHALL', 'BE', 'PUNISHED', 'FOR', 'IT', 'NOW', 'I', 'SUPPOSE', 'BY', 'BEING', 'DROWNED', 'IN', 'MY', 'OWN', 'TEARS'] +260-123440-0016-195: hyp=['I', 'SHALL', 'BE', 'PUNISHED', 'FOR', 'IT', 'NOW', 'I', 'SUPPOSE', 'BY', 'BEING', 'DROWNED', 'IN', 'MY', 'OWN', 'TEARS'] +260-123440-0017-196: ref=['THAT', 'WILL', 'BE', 'A', 'QUEER', 'THING', 'TO', 'BE', 'SURE'] +260-123440-0017-196: hyp=['THAT', 'WILL', 'BE', 'A', 'QUEER', 'THING', 'TO', 'BE', 'SURE'] +260-123440-0018-197: ref=['I', 'AM', 'VERY', 'TIRED', 'OF', 'SWIMMING', 'ABOUT', 'HERE', 'O', 'MOUSE'] +260-123440-0018-197: hyp=['I', 'AM', 'VERY', 'TIRED', 'OF', 'SWIMMING', 'ABOUT', 'HERE', 'OH', 'MOUSE'] +260-123440-0019-198: ref=['CRIED', 'ALICE', 'AGAIN', 'FOR', 'THIS', 'TIME', 'THE', 'MOUSE', 'WAS', 'BRISTLING', 'ALL', 'OVER', 'AND', 'SHE', 'FELT', 'CERTAIN', 'IT', 'MUST', 'BE', 'REALLY', 'OFFENDED'] +260-123440-0019-198: hyp=['CRIED', 'ALICE', 'AGAIN', 'FOR', 'THIS', 'TIME', 'THE', 'MOUSE', 'WAS', 'BRISTLING', 'ALL', 'OVER', 'AND', 'SHE', 'FELT', 'CERTAIN', 'IT', 'MUST', 'BE', 'REALLY', 'OFFENDED'] +260-123440-0020-199: ref=['WE', "WON'T", 'TALK', 'ABOUT', 'HER', 'ANY', 'MORE', 'IF', "YOU'D", 'RATHER', 'NOT', 'WE', 'INDEED'] +260-123440-0020-199: hyp=['WE', "WON'T", 'TALK', 'ABOUT', 'HER', 'ANY', 'MORE', 'IF', "YOU'D", 'RATHER', 'NOT', 'WE', 'INDEED'] +2830-3979-0000-1120: ref=['WE', 'WANT', 'YOU', 'TO', 'HELP', 'US', 'PUBLISH', 'SOME', 'LEADING', 'WORK', 'OF', "LUTHER'S", 'FOR', 'THE', 'GENERAL', 'AMERICAN', 'MARKET', 'WILL', 'YOU', 'DO', 'IT'] +2830-3979-0000-1120: hyp=['WE', 'WANT', 'YOU', 'TO', 'HELP', 'US', 'PUBLISH', 'SOME', 'LEADING', 'WORK', 'OF', "LUTHER'S", 'FOR', 'THE', 'GENERAL', 'AMERICAN', 'MARKET', 'WILL', 'YOU', 'DO', 'IT'] +2830-3979-0001-1121: ref=['THE', 'CONDITION', 'IS', 'THAT', 'I', 'WILL', 'BE', 'PERMITTED', 'TO', 'MAKE', 'LUTHER', 'TALK', 'AMERICAN', 'STREAMLINE', 'HIM', 'SO', 'TO', 'SPEAK', 'BECAUSE', 'YOU', 'WILL', 'NEVER', 'GET', 'PEOPLE', 'WHETHER', 'IN', 'OR', 'OUTSIDE', 'THE', 'LUTHERAN', 'CHURCH', 'ACTUALLY', 'TO', 'READ', 'LUTHER', 'UNLESS', 'WE', 'MAKE', 'HIM', 'TALK', 'AS', 'HE', 'WOULD', 'TALK', 'TODAY', 'TO', 'AMERICANS'] +2830-3979-0001-1121: hyp=['THE', 'CONDITION', 'IS', 'THAT', 'I', 'WILL', 'BE', 'PERMITTED', 'TO', 'MAKE', 'LUTHER', 'TALK', 'AMERICAN', 'STREAM', 'LINE', 'HYMN', 'SO', 'TO', 'SPEAK', 'BECAUSE', 'YOU', 'WILL', 'NEVER', 'GET', 'PEOPLE', 'WHETHER', 'IN', 'OR', 'OUTSIDE', 'THE', 'LUTHERAN', 'CHURCH', 'ACTUALLY', 'TO', 'READ', 'LUTHER', 'UNLESS', 'WE', 'MAKE', 'HIM', 'TALK', 'AS', 'HE', 'WOULD', 'TALK', 'TO', 'DAY', 'TO', 'AMERICANS'] +2830-3979-0002-1122: ref=['LET', 'US', 'BEGIN', 'WITH', 'THAT', 'HIS', 'COMMENTARY', 'ON', 'GALATIANS'] +2830-3979-0002-1122: hyp=['LET', 'US', 'BEGIN', 'WITH', 'THAT', 'HIS', 'COMMENTARY', 'ON', 'GALLATIONS'] +2830-3979-0003-1123: ref=['THE', 'UNDERTAKING', 'WHICH', 'SEEMED', 'SO', 'ATTRACTIVE', 'WHEN', 'VIEWED', 'AS', 'A', 'LITERARY', 'TASK', 'PROVED', 'A', 'MOST', 'DIFFICULT', 'ONE', 'AND', 'AT', 'TIMES', 'BECAME', 'OPPRESSIVE'] +2830-3979-0003-1123: hyp=['THE', 'UNDERTAKING', 'WHICH', 'SEEMS', 'SO', 'ATTRACTIVE', 'WHEN', 'VIEWED', 'AS', 'A', 'LITERARY', 'TASK', 'PROVED', 'A', 'MOST', 'DIFFICULT', 'ONE', 'AND', 'AT', 'TIMES', 'BECAME', 'OPPRESSIVE'] +2830-3979-0004-1124: ref=['IT', 'WAS', 'WRITTEN', 'IN', 'LATIN'] +2830-3979-0004-1124: hyp=['IT', 'WAS', 'WRITTEN', 'IN', 'LATIN'] +2830-3979-0005-1125: ref=['THE', 'WORK', 'HAD', 'TO', 'BE', 'CONDENSED'] +2830-3979-0005-1125: hyp=['THE', 'WORK', 'HAD', 'TO', 'BE', 'CONDENSED'] +2830-3979-0006-1126: ref=['A', 'WORD', 'SHOULD', 'NOW', 'BE', 'SAID', 'ABOUT', 'THE', 'ORIGIN', 'OF', "LUTHER'S", 'COMMENTARY', 'ON', 'GALATIANS'] +2830-3979-0006-1126: hyp=['A', 'WORD', 'SHOULD', 'NOW', 'BE', 'SAID', 'ABOUT', 'THE', 'ORIGIN', 'OF', "LUTHER'S", 'COMMENTARY', 'ANGULATIONS'] +2830-3979-0007-1127: ref=['MUCH', 'LATER', 'WHEN', 'A', 'FRIEND', 'OF', 'HIS', 'WAS', 'PREPARING', 'AN', 'EDITION', 'OF', 'ALL', 'HIS', 'LATIN', 'WORKS', 'HE', 'REMARKED', 'TO', 'HIS', 'HOME', 'CIRCLE', 'IF', 'I', 'HAD', 'MY', 'WAY', 'ABOUT', 'IT', 'THEY', 'WOULD', 'REPUBLISH', 'ONLY', 'THOSE', 'OF', 'MY', 'BOOKS', 'WHICH', 'HAVE', 'DOCTRINE', 'MY', 'GALATIANS', 'FOR', 'INSTANCE'] +2830-3979-0007-1127: hyp=['MUCH', 'LATER', 'WHEN', 'A', 'FRIEND', 'OF', 'HIS', 'WAS', 'PREPARING', 'AN', 'ADDITION', 'OF', 'ALL', 'HIS', 'LATIN', 'WORKS', 'HE', 'REMARKED', 'TO', 'HIS', 'HOME', 'CIRCLE', 'IF', 'I', 'HAD', 'MY', 'WAY', 'ABOUT', 'IT', 'THEY', 'WOULD', 'REPUBLISH', 'ONLY', 'THOSE', 'OF', 'MY', 'BOOKS', 'WHICH', 'HAVE', 'DOCTRINE', 'MY', 'GALLATIONS', 'FOR', 'INSTANCE'] +2830-3979-0008-1128: ref=['IN', 'OTHER', 'WORDS', 'THESE', 'THREE', 'MEN', 'TOOK', 'DOWN', 'THE', 'LECTURES', 'WHICH', 'LUTHER', 'ADDRESSED', 'TO', 'HIS', 'STUDENTS', 'IN', 'THE', 'COURSE', 'OF', 'GALATIANS', 'AND', 'ROERER', 'PREPARED', 'THE', 'MANUSCRIPT', 'FOR', 'THE', 'PRINTER'] +2830-3979-0008-1128: hyp=['IN', 'OTHER', 'WORDS', 'THESE', 'THREE', 'MEN', 'TOOK', 'DOWN', 'THE', 'LECTURES', 'WHICH', 'LUTHER', 'ADDRESSED', 'TO', 'HIS', 'STUDENTS', 'IN', 'THE', 'COURSE', 'OF', 'GALLATIONS', 'AND', 'ROAR', 'PREPARED', 'THE', 'MANUSCRIPT', 'FOR', 'THE', 'PRINTER'] +2830-3979-0009-1129: ref=['IT', 'PRESENTS', 'LIKE', 'NO', 'OTHER', 'OF', "LUTHER'S", 'WRITINGS', 'THE', 'CENTRAL', 'THOUGHT', 'OF', 'CHRISTIANITY', 'THE', 'JUSTIFICATION', 'OF', 'THE', 'SINNER', 'FOR', 'THE', 'SAKE', 'OF', "CHRIST'S", 'MERITS', 'ALONE'] +2830-3979-0009-1129: hyp=['IT', 'PRESENTS', 'LIKE', 'NO', 'OTHER', 'OF', "LUTHER'S", 'WRITINGS', 'THE', 'CENTRAL', 'THOUGHT', 'OF', 'CHRISTIANITY', 'THE', 'JUSTIFICATION', 'OF', 'THE', 'SINNER', 'FOR', 'THE', 'SAKE', 'OF', "CHRIST'S", 'MERITS', 'ALONE'] +2830-3979-0010-1130: ref=['BUT', 'THE', 'ESSENCE', 'OF', "LUTHER'S", 'LECTURES', 'IS', 'THERE'] +2830-3979-0010-1130: hyp=['BUT', 'THE', 'ESSENCE', 'OF', "LUTHER'S", 'LECTURES', 'IS', 'THERE'] +2830-3979-0011-1131: ref=['THE', 'LORD', 'WHO', 'HAS', 'GIVEN', 'US', 'POWER', 'TO', 'TEACH', 'AND', 'TO', 'HEAR', 'LET', 'HIM', 'ALSO', 'GIVE', 'US', 'THE', 'POWER', 'TO', 'SERVE', 'AND', 'TO', 'DO', 'LUKE', 'TWO'] +2830-3979-0011-1131: hyp=['THE', 'LORD', 'WHO', 'HAS', 'GIVEN', 'US', 'POWER', 'TO', 'TEACH', 'AND', 'TO', 'HEAR', 'LET', 'HIM', 'ALSO', 'GIVE', 'US', 'THE', 'POWER', 'TO', 'SERVE', 'AND', 'TO', 'DO', 'LUKE', 'TWO'] +2830-3979-0012-1132: ref=['THE', 'WORD', 'OF', 'OUR', 'GOD', 'SHALL', 'STAND', 'FOREVER'] +2830-3979-0012-1132: hyp=['THE', 'WORD', 'OF', 'OUR', 'GOD', 'SHALL', 'STAND', 'FOR', 'EVER'] +2830-3980-0000-1043: ref=['IN', 'EVERY', 'WAY', 'THEY', 'SOUGHT', 'TO', 'UNDERMINE', 'THE', 'AUTHORITY', 'OF', 'SAINT', 'PAUL'] +2830-3980-0000-1043: hyp=['IN', 'EVERY', 'WAY', 'THEY', 'SOUGHT', 'TO', 'UNDERMINE', 'THE', 'AUTHORITY', 'OF', 'SAINT', 'PAUL'] +2830-3980-0001-1044: ref=['THEY', 'SAID', 'TO', 'THE', 'GALATIANS', 'YOU', 'HAVE', 'NO', 'RIGHT', 'TO', 'THINK', 'HIGHLY', 'OF', 'PAUL'] +2830-3980-0001-1044: hyp=['THEY', 'SAID', 'TO', 'THE', 'GALATIANS', 'YOU', 'HAVE', 'NO', 'RIGHT', 'TO', 'THINK', 'HIGHLY', 'OF', 'PAUL'] +2830-3980-0002-1045: ref=['HE', 'WAS', 'THE', 'LAST', 'TO', 'TURN', 'TO', 'CHRIST'] +2830-3980-0002-1045: hyp=['HE', 'WAS', 'THE', 'LAST', 'TO', 'TURN', 'TO', 'CHRIST'] +2830-3980-0003-1046: ref=['PAUL', 'CAME', 'LATER', 'AND', 'IS', 'BENEATH', 'US'] +2830-3980-0003-1046: hyp=['PAUL', 'CAME', 'LATER', 'IN', 'HIS', 'BENEATH', 'US'] +2830-3980-0004-1047: ref=['INDEED', 'HE', 'PERSECUTED', 'THE', 'CHURCH', 'OF', 'CHRIST', 'FOR', 'A', 'LONG', 'TIME'] +2830-3980-0004-1047: hyp=['INDEED', 'HE', 'PERSECUTED', 'THE', 'CHURCH', 'OF', 'CHRIST', 'FOR', 'A', 'LONG', 'TIME'] +2830-3980-0005-1048: ref=['DO', 'YOU', 'SUPPOSE', 'THAT', 'GOD', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'FEW', 'LUTHERAN', 'HERETICS', 'WOULD', 'DISOWN', 'HIS', 'ENTIRE', 'CHURCH'] +2830-3980-0005-1048: hyp=['DO', 'YOU', 'SUPPOSE', 'THAT', 'GOD', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'FEW', 'LUTHERAN', 'HERETICS', 'WOULD', 'DISOWN', 'HIS', 'ENTIRE', 'CHURCH'] +2830-3980-0006-1049: ref=['AGAINST', 'THESE', 'BOASTING', 'FALSE', 'APOSTLES', 'PAUL', 'BOLDLY', 'DEFENDS', 'HIS', 'APOSTOLIC', 'AUTHORITY', 'AND', 'MINISTRY'] +2830-3980-0006-1049: hyp=['AGAINST', 'THESE', 'BOASTING', 'FALSE', 'APOSTLES', 'PAUL', 'BOLDLY', 'DEFENDS', 'HIS', 'APOSTOLIC', 'AUTHORITY', 'AND', 'MINISTRY'] +2830-3980-0007-1050: ref=['AS', 'THE', 'AMBASSADOR', 'OF', 'A', 'GOVERNMENT', 'IS', 'HONORED', 'FOR', 'HIS', 'OFFICE', 'AND', 'NOT', 'FOR', 'HIS', 'PRIVATE', 'PERSON', 'SO', 'THE', 'MINISTER', 'OF', 'CHRIST', 'SHOULD', 'EXALT', 'HIS', 'OFFICE', 'IN', 'ORDER', 'TO', 'GAIN', 'AUTHORITY', 'AMONG', 'MEN'] +2830-3980-0007-1050: hyp=['AS', 'THE', 'AMBASSADOR', 'OF', 'A', 'GOVERNMENT', 'IS', 'HONORED', 'FOR', 'HIS', 'OFFICE', 'AND', 'NOT', 'FOR', 'HIS', 'PRIVATE', 'PERSON', 'SO', 'THE', 'MINISTER', 'OF', 'CHRIST', 'SHOULD', 'EXALT', 'HIS', 'OFFICE', 'IN', 'ORDER', 'TO', 'GAIN', 'AUTHORITY', 'AMONG', 'MEN'] +2830-3980-0008-1051: ref=['PAUL', 'TAKES', 'PRIDE', 'IN', 'HIS', 'MINISTRY', 'NOT', 'TO', 'HIS', 'OWN', 'PRAISE', 'BUT', 'TO', 'THE', 'PRAISE', 'OF', 'GOD'] +2830-3980-0008-1051: hyp=['POLITICS', 'PRIDE', 'IN', 'HIS', 'MINISTRY', 'NOT', 'TO', 'HIS', 'OWN', 'PRAISE', 'BUT', 'TO', 'THE', 'PRAISE', 'OF', 'GOD'] +2830-3980-0009-1052: ref=['PAUL', 'AN', 'APOSTLE', 'NOT', 'OF', 'MEN', 'ET', 'CETERA'] +2830-3980-0009-1052: hyp=['PAUL', 'AND', 'APOSTLE', 'NOT', 'OF', 'MEN', 'ET', 'CETERA'] +2830-3980-0010-1053: ref=['EITHER', 'HE', 'CALLS', 'MINISTERS', 'THROUGH', 'THE', 'AGENCY', 'OF', 'MEN', 'OR', 'HE', 'CALLS', 'THEM', 'DIRECTLY', 'AS', 'HE', 'CALLED', 'THE', 'PROPHETS', 'AND', 'APOSTLES'] +2830-3980-0010-1053: hyp=['EITHER', 'HE', 'CALLS', 'MINISTERS', 'THROUGH', 'THE', 'AGENCY', 'OF', 'MEN', 'OR', 'HE', 'CALLS', 'THEM', 'DIRECTLY', 'AS', 'HE', 'CALLED', 'THE', 'PROPHETS', 'AND', 'APOSTLES'] +2830-3980-0011-1054: ref=['PAUL', 'DECLARES', 'THAT', 'THE', 'FALSE', 'APOSTLES', 'WERE', 'CALLED', 'OR', 'SENT', 'NEITHER', 'BY', 'MEN', 'NOR', 'BY', 'MAN'] +2830-3980-0011-1054: hyp=['PAUL', 'DECLARES', 'THAT', 'THE', 'FALSE', 'APOSTLES', 'WERE', 'CALLED', 'OR', 'SENT', 'NEITHER', 'BY', 'MEN', 'NOR', 'BY', 'MAN'] +2830-3980-0012-1055: ref=['THE', 'MOST', 'THEY', 'COULD', 'CLAIM', 'IS', 'THAT', 'THEY', 'WERE', 'SENT', 'BY', 'OTHERS'] +2830-3980-0012-1055: hyp=['THE', 'MOST', 'THEY', 'COULD', 'CLAIM', 'IS', 'THAT', 'THEY', 'WERE', 'SENT', 'BY', 'OTHERS'] +2830-3980-0013-1056: ref=['HE', 'MENTIONS', 'THE', 'APOSTLES', 'FIRST', 'BECAUSE', 'THEY', 'WERE', 'APPOINTED', 'DIRECTLY', 'BY', 'GOD'] +2830-3980-0013-1056: hyp=['HE', 'MENTIONS', 'THE', 'APOSTLES', 'FIRST', 'BECAUSE', 'THEY', 'WERE', 'APPOINTED', 'DIRECTLY', 'BY', 'GOD'] +2830-3980-0014-1057: ref=['THE', 'CALL', 'IS', 'NOT', 'TO', 'BE', 'TAKEN', 'LIGHTLY'] +2830-3980-0014-1057: hyp=['THE', 'CALL', 'IS', 'NOT', 'TO', 'BE', 'TAKEN', 'LIGHTLY'] +2830-3980-0015-1058: ref=['FOR', 'A', 'PERSON', 'TO', 'POSSESS', 'KNOWLEDGE', 'IS', 'NOT', 'ENOUGH'] +2830-3980-0015-1058: hyp=['FOR', 'A', 'PERSON', 'TO', 'POSSESSED', 'KNOWLEDGE', 'IS', 'NOT', 'ENOUGH'] +2830-3980-0016-1059: ref=['IT', 'SPOILS', "ONE'S", 'BEST', 'WORK'] +2830-3980-0016-1059: hyp=['IT', 'SPOILS', "ONE'S", 'BEST', 'WORK'] +2830-3980-0017-1060: ref=['WHEN', 'I', 'WAS', 'A', 'YOUNG', 'MAN', 'I', 'THOUGHT', 'PAUL', 'WAS', 'MAKING', 'TOO', 'MUCH', 'OF', 'HIS', 'CALL'] +2830-3980-0017-1060: hyp=['WHEN', 'I', 'WAS', 'A', 'YOUNG', 'MAN', 'I', 'THOUGHT', 'PAUL', 'WAS', 'MAKING', 'TOO', 'MUCH', 'OF', 'HIS', 'CALL'] +2830-3980-0018-1061: ref=['I', 'DID', 'NOT', 'THEN', 'REALIZE', 'THE', 'IMPORTANCE', 'OF', 'THE', 'MINISTRY'] +2830-3980-0018-1061: hyp=['I', 'DID', 'NOT', 'THEN', 'REALIZE', 'THE', 'IMPORTANCE', 'OF', 'THE', 'MINISTRY'] +2830-3980-0019-1062: ref=['I', 'KNEW', 'NOTHING', 'OF', 'THE', 'DOCTRINE', 'OF', 'FAITH', 'BECAUSE', 'WE', 'WERE', 'TAUGHT', 'SOPHISTRY', 'INSTEAD', 'OF', 'CERTAINTY', 'AND', 'NOBODY', 'UNDERSTOOD', 'SPIRITUAL', 'BOASTING'] +2830-3980-0019-1062: hyp=['I', 'KNEW', 'NOTHING', 'OF', 'THE', 'DOCTRINE', 'OF', 'FAITH', 'BECAUSE', 'WE', 'WERE', 'TAUGHT', 'SOPHISTRY', 'INSTEAD', 'OF', 'CERTAINTY', 'AND', 'NOBODY', 'UNDERSTOOD', 'SPIRITUAL', 'BOASTING'] +2830-3980-0020-1063: ref=['THIS', 'IS', 'NO', 'SINFUL', 'PRIDE', 'IT', 'IS', 'HOLY', 'PRIDE'] +2830-3980-0020-1063: hyp=['THIS', 'IS', 'NO', 'SINFUL', 'PRIDE', 'IT', 'IS', 'WHOLLY', 'PRIDE'] +2830-3980-0021-1064: ref=['AND', 'GOD', 'THE', 'FATHER', 'WHO', 'RAISED', 'HIM', 'FROM', 'THE', 'DEAD'] +2830-3980-0021-1064: hyp=['AND', 'GOD', 'THE', 'FATHER', 'WHO', 'RAISED', 'HIM', 'FROM', 'THE', 'DEAD'] +2830-3980-0022-1065: ref=['THE', 'CLAUSE', 'SEEMS', 'SUPERFLUOUS', 'ON', 'FIRST', 'SIGHT'] +2830-3980-0022-1065: hyp=['THE', 'CLAS', 'SEEMS', 'SUPERFLUOUS', 'ON', 'FIRST', 'SIGHT'] +2830-3980-0023-1066: ref=['THESE', 'PERVERTERS', 'OF', 'THE', 'RIGHTEOUSNESS', 'OF', 'CHRIST', 'RESIST', 'THE', 'FATHER', 'AND', 'THE', 'SON', 'AND', 'THE', 'WORKS', 'OF', 'THEM', 'BOTH'] +2830-3980-0023-1066: hyp=['THESE', 'PERVERTED', 'OF', 'THE', 'RIGHTEOUSNESS', 'OF', 'CHRIST', 'RESIST', 'THE', 'FATHER', 'AND', 'THE', 'SON', 'AND', 'THE', 'WORKS', 'OF', 'THEM', 'BOTH'] +2830-3980-0024-1067: ref=['IN', 'THIS', 'WHOLE', 'EPISTLE', 'PAUL', 'TREATS', 'OF', 'THE', 'RESURRECTION', 'OF', 'CHRIST'] +2830-3980-0024-1067: hyp=['IN', 'THIS', 'WHOLE', 'EPISTLE', 'PAUL', 'TREATS', 'OF', 'THE', 'RESURRECTION', 'OF', 'CHRIST'] +2830-3980-0025-1068: ref=['BY', 'HIS', 'RESURRECTION', 'CHRIST', 'WON', 'THE', 'VICTORY', 'OVER', 'LAW', 'SIN', 'FLESH', 'WORLD', 'DEVIL', 'DEATH', 'HELL', 'AND', 'EVERY', 'EVIL'] +2830-3980-0025-1068: hyp=['BY', 'HIS', 'RESURRECTION', 'CHRIST', 'WON', 'THE', 'VICTORY', 'OVER', 'LAW', 'SIN', 'FLESH', 'WORLD', 'DEVIL', 'DEATH', 'HELL', 'AND', 'EVERY', 'EVIL'] +2830-3980-0026-1069: ref=['VERSE', 'TWO'] +2830-3980-0026-1069: hyp=['FIRST', 'TOO'] +2830-3980-0027-1070: ref=['AND', 'ALL', 'THE', 'BRETHREN', 'WHICH', 'ARE', 'WITH', 'ME'] +2830-3980-0027-1070: hyp=['AND', 'ALL', 'THE', 'BRETHREN', 'WHICH', 'ARE', 'WITH', 'ME'] +2830-3980-0028-1071: ref=['THIS', 'SHOULD', 'GO', 'FAR', 'IN', 'SHUTTING', 'THE', 'MOUTHS', 'OF', 'THE', 'FALSE', 'APOSTLES'] +2830-3980-0028-1071: hyp=['THIS', 'SHOULD', 'GO', 'FAR', 'IN', 'SHUTTING', 'THE', 'MOUTHS', 'OF', 'THE', 'FALSE', 'APOSTLES'] +2830-3980-0029-1072: ref=['ALTHOUGH', 'THE', 'BRETHREN', 'WITH', 'ME', 'ARE', 'NOT', 'APOSTLES', 'LIKE', 'MYSELF', 'YET', 'THEY', 'ARE', 'ALL', 'OF', 'ONE', 'MIND', 'WITH', 'ME', 'THINK', 'WRITE', 'AND', 'TEACH', 'AS', 'I', 'DO'] +2830-3980-0029-1072: hyp=['ALTHOUGH', 'THE', 'BRETHREN', 'WITH', 'ME', 'ARE', 'NOT', 'APOSTLES', 'LIKE', 'MYSELF', 'YET', 'THEY', 'ARE', 'ALL', 'OF', 'ONE', 'MIND', 'WITH', 'ME', 'THINK', 'WRITE', 'AND', 'TEACH', 'AS', 'I', 'DO'] +2830-3980-0030-1073: ref=['THEY', 'DO', 'NOT', 'GO', 'WHERE', 'THE', 'ENEMIES', 'OF', 'THE', 'GOSPEL', 'PREDOMINATE', 'THEY', 'GO', 'WHERE', 'THE', 'CHRISTIANS', 'ARE'] +2830-3980-0030-1073: hyp=['THEY', 'DO', 'NOT', 'GO', 'WHERE', 'THE', 'ENEMIES', 'OF', 'THE', 'GOSPEL', 'PREDOMINATE', 'THEY', 'GO', 'WHERE', 'THE', 'CHRISTIANS', 'ARE'] +2830-3980-0031-1074: ref=['WHY', 'DO', 'THEY', 'NOT', 'INVADE', 'THE', 'CATHOLIC', 'PROVINCES', 'AND', 'PREACH', 'THEIR', 'DOCTRINE', 'TO', 'GODLESS', 'PRINCES', 'BISHOPS', 'AND', 'DOCTORS', 'AS', 'WE', 'HAVE', 'DONE', 'BY', 'THE', 'HELP', 'OF', 'GOD'] +2830-3980-0031-1074: hyp=['WHY', 'DO', 'THEY', 'NOT', 'INVADE', 'THE', 'CATHOLIC', 'PROVINCES', 'AND', 'PREACH', 'THEIR', 'DOCTRINE', 'TO', 'GODLESS', 'PRINCES', 'BISHOPS', 'AND', 'DOCTORS', 'AS', 'WE', 'HAVE', 'DONE', 'BY', 'THE', 'HELP', 'OF', 'GOD'] +2830-3980-0032-1075: ref=['WE', 'LOOK', 'FOR', 'THAT', 'REWARD', 'WHICH', 'EYE', 'HATH', 'NOT', 'SEEN', 'NOR', 'EAR', 'HEARD', 'NEITHER', 'HATH', 'ENTERED', 'INTO', 'THE', 'HEART', 'OF', 'MAN'] +2830-3980-0032-1075: hyp=['WE', 'LOOK', 'FOR', 'THAT', 'REWARD', 'WHICH', 'I', 'HATH', 'NOT', 'SEEN', 'NOR', 'EAR', 'HEARD', 'NEITHER', 'HATH', 'ENTERED', 'INTO', 'THE', 'HEART', 'OF', 'MAN'] +2830-3980-0033-1076: ref=['NOT', 'ALL', 'THE', 'GALATIANS', 'HAD', 'BECOME', 'PERVERTED'] +2830-3980-0033-1076: hyp=['NOT', 'ALL', 'THE', 'GALLATIONS', 'HAD', 'BECOME', 'PERVERTED'] +2830-3980-0034-1077: ref=['THESE', 'MEANS', 'CANNOT', 'BE', 'CONTAMINATED'] +2830-3980-0034-1077: hyp=['THESE', 'MEANS', 'CANNOT', 'BE', 'CONTAMINATED'] +2830-3980-0035-1078: ref=['THEY', 'REMAIN', 'DIVINE', 'REGARDLESS', 'OF', "MEN'S", 'OPINION'] +2830-3980-0035-1078: hyp=['THEY', 'REMAINED', 'DIVINE', 'REGARDLESS', 'OF', "MEN'S", 'OPINION'] +2830-3980-0036-1079: ref=['WHEREVER', 'THE', 'MEANS', 'OF', 'GRACE', 'ARE', 'FOUND', 'THERE', 'IS', 'THE', 'HOLY', 'CHURCH', 'EVEN', 'THOUGH', 'ANTICHRIST', 'REIGNS', 'THERE'] +2830-3980-0036-1079: hyp=['WHEREVER', 'THE', 'MEANS', 'OF', 'GRACE', 'ARE', 'FOUND', 'THERE', 'IS', 'THE', 'HOLY', 'CHURCH', 'EVEN', 'THOUGH', 'ANTICHRIST', 'REIGNS', 'THERE'] +2830-3980-0037-1080: ref=['SO', 'MUCH', 'FOR', 'THE', 'TITLE', 'OF', 'THE', 'EPISTLE', 'NOW', 'FOLLOWS', 'THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'VERSE', 'THREE'] +2830-3980-0037-1080: hyp=['SO', 'MUCH', 'FOR', 'THE', 'TITLE', 'OF', 'THE', 'EPISTLE', 'NOW', 'FOLLOWS', 'THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'VERSE', 'THREE'] +2830-3980-0038-1081: ref=['GRACE', 'BE', 'TO', 'YOU', 'AND', 'PEACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0038-1081: hyp=['GRACE', 'BE', 'TO', 'YOU', 'IN', 'PEACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0039-1082: ref=['THE', 'TERMS', 'OF', 'GRACE', 'AND', 'PEACE', 'ARE', 'COMMON', 'TERMS', 'WITH', 'PAUL', 'AND', 'ARE', 'NOW', 'PRETTY', 'WELL', 'UNDERSTOOD'] +2830-3980-0039-1082: hyp=['THE', 'TERMS', 'OF', 'GRACE', 'AND', 'PEACE', 'ARE', 'COMMON', 'TERMS', 'WITH', 'PAUL', 'AND', 'ARE', 'NOW', 'PRETTY', 'WELL', 'UNDERSTOOD'] +2830-3980-0040-1083: ref=['THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'IS', 'REFRESHING'] +2830-3980-0040-1083: hyp=['THE', 'GREETING', 'OF', 'THE', 'APOSTLE', 'IS', 'REFRESHING'] +2830-3980-0041-1084: ref=['GRACE', 'INVOLVES', 'THE', 'REMISSION', 'OF', 'SINS', 'PEACE', 'AND', 'A', 'HAPPY', 'CONSCIENCE'] +2830-3980-0041-1084: hyp=['GRACE', 'INVOLVES', 'THE', 'REMISSION', 'OF', 'SINS', 'PEACE', 'AND', 'A', 'HAPPY', 'CONSCIENCE'] +2830-3980-0042-1085: ref=['THE', 'WORLD', 'BRANDS', 'THIS', 'A', 'PERNICIOUS', 'DOCTRINE'] +2830-3980-0042-1085: hyp=['THE', 'WORLD', 'BRINGS', 'THIS', 'A', 'PERNICIOUS', 'DOCTRINE'] +2830-3980-0043-1086: ref=['EXPERIENCE', 'PROVES', 'THIS'] +2830-3980-0043-1086: hyp=['EXPERIENCE', 'PROVES', 'THIS'] +2830-3980-0044-1087: ref=['HOWEVER', 'THE', 'GRACE', 'AND', 'PEACE', 'OF', 'GOD', 'WILL'] +2830-3980-0044-1087: hyp=['HOWEVER', 'THE', 'GRACE', 'AND', 'PEACE', 'OF', 'GOD', 'WILL'] +2830-3980-0045-1088: ref=['MEN', 'SHOULD', 'NOT', 'SPECULATE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0045-1088: hyp=['MEN', 'SHOULD', 'NOT', 'SPECULATE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0046-1089: ref=['WAS', 'IT', 'NOT', 'ENOUGH', 'TO', 'SAY', 'FROM', 'GOD', 'THE', 'FATHER'] +2830-3980-0046-1089: hyp=['WAS', 'IT', 'NOT', 'ENOUGH', 'TO', 'SAY', 'FROM', 'GOD', 'THE', 'FATHER'] +2830-3980-0047-1090: ref=['TO', 'DO', 'SO', 'IS', 'TO', 'LOSE', 'GOD', 'ALTOGETHER', 'BECAUSE', 'GOD', 'BECOMES', 'INTOLERABLE', 'WHEN', 'WE', 'SEEK', 'TO', 'MEASURE', 'AND', 'TO', 'COMPREHEND', 'HIS', 'INFINITE', 'MAJESTY'] +2830-3980-0047-1090: hyp=['TO', 'DO', 'SO', 'IS', 'TO', 'LOSE', 'GOD', 'ALTOGETHER', 'BECAUSE', 'GOD', 'BECOMES', 'INTOLERABLE', 'WHEN', 'WE', 'SEEK', 'TO', 'MEASURE', 'INTO', 'COMPREHEND', 'HIS', 'INFINITE', 'MAJESTY'] +2830-3980-0048-1091: ref=['HE', 'CAME', 'DOWN', 'TO', 'EARTH', 'LIVED', 'AMONG', 'MEN', 'SUFFERED', 'WAS', 'CRUCIFIED', 'AND', 'THEN', 'HE', 'DIED', 'STANDING', 'CLEARLY', 'BEFORE', 'US', 'SO', 'THAT', 'OUR', 'HEARTS', 'AND', 'EYES', 'MAY', 'FASTEN', 'UPON', 'HIM'] +2830-3980-0048-1091: hyp=['HE', 'CAME', 'DOWN', 'TO', 'EARTH', 'LIVED', 'AMONG', 'MEN', 'SUFFERED', 'WAS', 'CRUCIFIED', 'AND', 'THEN', 'HE', 'DIED', 'STANDING', 'CLEARLY', 'BEFORE', 'US', 'SO', 'THAT', 'OUR', 'HEARTS', 'AND', 'EYES', 'MAY', 'FASTEN', 'UPON', 'HIM'] +2830-3980-0049-1092: ref=['EMBRACE', 'HIM', 'AND', 'FORGET', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0049-1092: hyp=['EMBRACE', 'HIM', 'AND', 'FORGET', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD'] +2830-3980-0050-1093: ref=['DID', 'NOT', 'CHRIST', 'HIMSELF', 'SAY', 'I', 'AM', 'THE', 'WAY', 'AND', 'THE', 'TRUTH', 'AND', 'THE', 'LIFE', 'NO', 'MAN', 'COMETH', 'UNTO', 'THE', 'FATHER', 'BUT', 'BY', 'ME'] +2830-3980-0050-1093: hyp=['DID', 'NOT', 'CHRIST', 'HIMSELF', 'SAY', 'I', 'AM', 'THE', 'WAY', 'AND', 'THE', 'TRUTH', 'AND', 'THE', 'LIFE', 'NO', 'MAN', 'COMETH', 'UNTO', 'THE', 'FATHER', 'BUT', 'BY', 'ME'] +2830-3980-0051-1094: ref=['WHEN', 'YOU', 'ARGUE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD', 'APART', 'FROM', 'THE', 'QUESTION', 'OF', 'JUSTIFICATION', 'YOU', 'MAY', 'BE', 'AS', 'PROFOUND', 'AS', 'YOU', 'LIKE'] +2830-3980-0051-1094: hyp=['WHEN', 'YOU', 'ARGUE', 'ABOUT', 'THE', 'NATURE', 'OF', 'GOD', 'APART', 'FROM', 'THE', 'QUESTION', 'OF', 'JUSTIFICATION', 'YOU', 'MAY', 'BE', 'AS', 'PROFOUND', 'AS', 'YOU', 'LIKE'] +2830-3980-0052-1095: ref=['WE', 'ARE', 'TO', 'HEAR', 'CHRIST', 'WHO', 'HAS', 'BEEN', 'APPOINTED', 'BY', 'THE', 'FATHER', 'AS', 'OUR', 'DIVINE', 'TEACHER'] +2830-3980-0052-1095: hyp=['WE', 'ARE', 'TO', 'HEAR', 'CHRIST', 'WHO', 'HAS', 'BEEN', 'APPOINTED', 'BY', 'THE', 'FATHER', 'AS', 'OUR', 'DIVINE', 'TEACHER'] +2830-3980-0053-1096: ref=['AT', 'THE', 'SAME', 'TIME', 'PAUL', 'CONFIRMS', 'OUR', 'CREED', 'THAT', 'CHRIST', 'IS', 'VERY', 'GOD'] +2830-3980-0053-1096: hyp=['AT', 'THE', 'SAME', 'TIME', 'PAUL', 'CONFIRMS', 'OUR', 'CREED', 'THAT', 'CHRIST', 'IS', 'VERY', 'GOD'] +2830-3980-0054-1097: ref=['THAT', 'CHRIST', 'IS', 'VERY', 'GOD', 'IS', 'APPARENT', 'IN', 'THAT', 'PAUL', 'ASCRIBES', 'TO', 'HIM', 'DIVINE', 'POWERS', 'EQUALLY', 'WITH', 'THE', 'FATHER', 'AS', 'FOR', 'INSTANCE', 'THE', 'POWER', 'TO', 'DISPENSE', 'GRACE', 'AND', 'PEACE'] +2830-3980-0054-1097: hyp=['THAT', 'CHRIST', 'IS', 'VERY', 'GOD', 'IS', 'APPARENT', 'IN', 'THAT', 'PAUL', 'ASCRIBES', 'TO', 'HIM', 'DIVINE', 'POWERS', 'EQUALLY', 'WITH', 'THE', 'FATHER', 'AS', 'FOR', 'INSTANCE', 'THE', 'POWER', 'DOES', 'DISPENSE', 'GRACE', 'AND', 'PEACE'] +2830-3980-0055-1098: ref=['TO', 'BESTOW', 'PEACE', 'AND', 'GRACE', 'LIES', 'IN', 'THE', 'PROVINCE', 'OF', 'GOD', 'WHO', 'ALONE', 'CAN', 'CREATE', 'THESE', 'BLESSINGS', 'THE', 'ANGELS', 'CANNOT'] +2830-3980-0055-1098: hyp=['TO', 'BESTOW', 'PEACE', 'AND', 'GRACE', 'LIES', 'IN', 'THE', 'PROVINCE', 'OF', 'GOD', 'WHO', 'ALONE', 'CAN', 'CREATE', 'THESE', 'BLESSINGS', 'THE', 'ANGELS', 'CANNOT'] +2830-3980-0056-1099: ref=['OTHERWISE', 'PAUL', 'SHOULD', 'HAVE', 'WRITTEN', 'GRACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'PEACE', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0056-1099: hyp=['OTHERWISE', 'PAUL', 'SHOULD', 'HAVE', 'WRITTEN', 'GRACE', 'FROM', 'GOD', 'THE', 'FATHER', 'AND', 'PEACE', 'FROM', 'OUR', 'LORD', 'JESUS', 'CHRIST'] +2830-3980-0057-1100: ref=['THE', 'ARIANS', 'TOOK', 'CHRIST', 'FOR', 'A', 'NOBLE', 'AND', 'PERFECT', 'CREATURE', 'SUPERIOR', 'EVEN', 'TO', 'THE', 'ANGELS', 'BECAUSE', 'BY', 'HIM', 'GOD', 'CREATED', 'HEAVEN', 'AND', 'EARTH'] +2830-3980-0057-1100: hyp=['THE', 'ARIANS', 'TOOK', 'CHRIST', 'FOR', 'A', 'NOBLE', 'AND', 'PERFECT', 'CREATURE', 'SUPERIOR', 'EVEN', 'TO', 'THE', 'ANGELS', 'BECAUSE', 'BY', 'HIM', 'GOD', 'CREATED', 'HEAVEN', 'AND', 'EARTH'] +2830-3980-0058-1101: ref=['MOHAMMED', 'ALSO', 'SPEAKS', 'HIGHLY', 'OF', 'CHRIST'] +2830-3980-0058-1101: hyp=['MOHAMMED', 'ALSO', 'SPEAKS', 'HIGHLY', 'OF', 'CHRIST'] +2830-3980-0059-1102: ref=['PAUL', 'STICKS', 'TO', 'HIS', 'THEME'] +2830-3980-0059-1102: hyp=['PAUL', 'STICKS', 'TO', 'HIS', 'THEME'] +2830-3980-0060-1103: ref=['HE', 'NEVER', 'LOSES', 'SIGHT', 'OF', 'THE', 'PURPOSE', 'OF', 'HIS', 'EPISTLE'] +2830-3980-0060-1103: hyp=['HE', 'NEVER', 'LOSES', 'SIGHT', 'OF', 'THE', 'PURPOSE', 'OF', 'HIS', 'EPISTLE'] +2830-3980-0061-1104: ref=['NOT', 'GOLD', 'OR', 'SILVER', 'OR', 'PASCHAL', 'LAMBS', 'OR', 'AN', 'ANGEL', 'BUT', 'HIMSELF', 'WHAT', 'FOR'] +2830-3980-0061-1104: hyp=['NOT', 'GOLD', 'OR', 'SILVER', 'OR', 'PASSION', 'LAMBS', 'OR', 'AN', 'ANGEL', 'BUT', 'HIMSELF', 'WHAT', 'FOR'] +2830-3980-0062-1105: ref=['NOT', 'FOR', 'A', 'CROWN', 'OR', 'A', 'KINGDOM', 'OR', 'OUR', 'GOODNESS', 'BUT', 'FOR', 'OUR', 'SINS'] +2830-3980-0062-1105: hyp=['NOT', 'FOR', 'A', 'CROWN', 'OR', 'A', 'KINGDOM', 'OR', 'A', 'GOODNESS', 'BUT', 'FOR', 'OUR', 'SINS'] +2830-3980-0063-1106: ref=['UNDERSCORE', 'THESE', 'WORDS', 'FOR', 'THEY', 'ARE', 'FULL', 'OF', 'COMFORT', 'FOR', 'SORE', 'CONSCIENCES'] +2830-3980-0063-1106: hyp=['UNDERSCORE', 'THESE', 'WORDS', 'FOR', 'THEY', 'ARE', 'FULL', 'OF', 'COMFORT', 'FOR', 'SORE', 'CONSCIENCES'] +2830-3980-0064-1107: ref=['HOW', 'MAY', 'WE', 'OBTAIN', 'REMISSION', 'OF', 'OUR', 'SINS'] +2830-3980-0064-1107: hyp=['HOW', 'MAY', 'WE', 'OBTAIN', 'REMISSION', 'OF', 'OUR', 'SINS'] +2830-3980-0065-1108: ref=['PAUL', 'ANSWERS', 'THE', 'MAN', 'WHO', 'IS', 'NAMED', 'JESUS', 'CHRIST', 'AND', 'THE', 'SON', 'OF', 'GOD', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0065-1108: hyp=['PAUL', 'ANSWERS', 'THE', 'MAN', 'WHO', 'IS', 'NAMED', 'JESUS', 'CHRIST', 'AND', 'THE', 'SON', 'OF', 'GOD', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0066-1109: ref=['SINCE', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS', 'IT', 'STANDS', 'TO', 'REASON', 'THAT', 'THEY', 'CANNOT', 'BE', 'PUT', 'AWAY', 'BY', 'OUR', 'OWN', 'EFFORTS'] +2830-3980-0066-1109: hyp=['SINCE', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS', 'IT', 'STANDS', 'TO', 'REASON', 'THAT', 'THEY', 'CANNOT', 'BE', 'PUT', 'AWAY', 'BY', 'OUR', 'OWN', 'EFFORTS'] +2830-3980-0067-1110: ref=['THIS', 'SENTENCE', 'ALSO', 'DEFINES', 'OUR', 'SINS', 'AS', 'GREAT', 'SO', 'GREAT', 'IN', 'FACT', 'THAT', 'THE', 'WHOLE', 'WORLD', 'COULD', 'NOT', 'MAKE', 'AMENDS', 'FOR', 'A', 'SINGLE', 'SIN'] +2830-3980-0067-1110: hyp=['THIS', 'SENTENCE', 'ALSO', 'DEFINES', 'OUR', 'SINS', 'AS', 'GREAT', 'SO', 'GREAT', 'IN', 'FACT', 'THAT', 'THE', 'WHOLE', 'WORLD', 'COULD', 'NOT', 'MAKE', 'AMENDS', 'FOR', 'A', 'SINGLE', 'SIN'] +2830-3980-0068-1111: ref=['THE', 'GREATNESS', 'OF', 'THE', 'RANSOM', 'CHRIST', 'THE', 'SON', 'OF', 'GOD', 'INDICATES', 'THIS'] +2830-3980-0068-1111: hyp=['THE', 'GREATNESS', 'OF', 'THE', 'RANSOM', 'CHRIST', 'THE', 'SON', 'OF', 'GOD', 'INDICATES', 'THIS'] +2830-3980-0069-1112: ref=['THE', 'VICIOUS', 'CHARACTER', 'OF', 'SIN', 'IS', 'BROUGHT', 'OUT', 'BY', 'THE', 'WORDS', 'WHO', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0069-1112: hyp=['THE', 'VICIOUS', 'CHARACTER', 'OF', 'SIN', 'IS', 'BROUGHT', 'OUT', 'BY', 'THE', 'WORDS', 'WHO', 'GAVE', 'HIMSELF', 'FOR', 'OUR', 'SINS'] +2830-3980-0070-1113: ref=['BUT', 'WE', 'ARE', 'CARELESS', 'WE', 'MAKE', 'LIGHT', 'OF', 'SIN'] +2830-3980-0070-1113: hyp=['BUT', 'WE', 'ARE', 'CARELESS', 'WE', 'MAKE', 'LIGHT', 'OF', 'SIN'] +2830-3980-0071-1114: ref=['WE', 'THINK', 'THAT', 'BY', 'SOME', 'LITTLE', 'WORK', 'OR', 'MERIT', 'WE', 'CAN', 'DISMISS', 'SIN'] +2830-3980-0071-1114: hyp=['WE', 'THINK', 'THAT', 'BY', 'SOME', 'LITTLE', 'WORK', 'OR', 'MERIT', 'WE', 'CAN', 'DISMISS', 'IN'] +2830-3980-0072-1115: ref=['THIS', 'PASSAGE', 'THEN', 'BEARS', 'OUT', 'THE', 'FACT', 'THAT', 'ALL', 'MEN', 'ARE', 'SOLD', 'UNDER', 'SIN'] +2830-3980-0072-1115: hyp=['THIS', 'PASSAGE', 'THEN', 'BEARS', 'OUT', 'THE', 'FACT', 'THAT', 'ALL', 'MEN', 'ARE', 'SOLD', 'UNDER', 'SIN'] +2830-3980-0073-1116: ref=['THIS', 'ATTITUDE', 'SPRINGS', 'FROM', 'A', 'FALSE', 'CONCEPTION', 'OF', 'SIN', 'THE', 'CONCEPTION', 'THAT', 'SIN', 'IS', 'A', 'SMALL', 'MATTER', 'EASILY', 'TAKEN', 'CARE', 'OF', 'BY', 'GOOD', 'WORKS', 'THAT', 'WE', 'MUST', 'PRESENT', 'OURSELVES', 'UNTO', 'GOD', 'WITH', 'A', 'GOOD', 'CONSCIENCE', 'THAT', 'WE', 'MUST', 'FEEL', 'NO', 'SIN', 'BEFORE', 'WE', 'MAY', 'FEEL', 'THAT', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS'] +2830-3980-0073-1116: hyp=['THIS', 'ATTITUDE', 'SPRINGS', 'FROM', 'A', 'FALSE', 'CONCEPTION', 'OF', 'SIN', 'THE', 'CONCEPTION', 'THAT', 'SIN', 'IS', 'A', 'SMALL', 'MATTER', 'EASILY', 'TAKING', 'CARE', 'OF', 'BY', 'GOOD', 'WORKS', 'THAT', 'WE', 'MUST', 'PRESENT', 'OURSELVES', 'INTO', 'GOD', 'WITH', 'GOOD', 'CONSCIENCE', 'THAT', 'WE', 'MUST', 'FEEL', 'NO', 'SIN', 'BEFORE', 'WE', 'MAY', 'FEEL', 'THAT', 'CHRIST', 'WAS', 'GIVEN', 'FOR', 'OUR', 'SINS'] +2830-3980-0074-1117: ref=['THIS', 'ATTITUDE', 'IS', 'UNIVERSAL', 'AND', 'PARTICULARLY', 'DEVELOPED', 'IN', 'THOSE', 'WHO', 'CONSIDER', 'THEMSELVES', 'BETTER', 'THAN', 'OTHERS'] +2830-3980-0074-1117: hyp=['THE', 'SATITUDE', 'IS', 'UNIVERSAL', 'IN', 'PARTICULARLY', 'DEVELOPED', 'IN', 'THOSE', 'WHO', 'CONSIDER', 'THEMSELVES', 'BETTER', 'THAN', 'OTHERS'] +2830-3980-0075-1118: ref=['BUT', 'THE', 'REAL', 'SIGNIFICANCE', 'AND', 'COMFORT', 'OF', 'THE', 'WORDS', 'FOR', 'OUR', 'SINS', 'IS', 'LOST', 'UPON', 'THEM'] +2830-3980-0075-1118: hyp=['BUT', 'THE', 'REAL', 'SIGNIFICANCE', 'AND', 'COMFORT', 'OF', 'THE', 'WORDS', 'FOR', 'OUR', 'SINS', 'IS', 'LOST', 'UPON', 'THEM'] +2830-3980-0076-1119: ref=['ON', 'THE', 'OTHER', 'HAND', 'WE', 'ARE', 'NOT', 'TO', 'REGARD', 'THEM', 'AS', 'SO', 'TERRIBLE', 'THAT', 'WE', 'MUST', 'DESPAIR'] +2830-3980-0076-1119: hyp=['ON', 'THE', 'OTHER', 'HAND', 'WE', 'ARE', 'NOT', 'TO', 'REGARD', 'THEM', 'AS', 'SO', 'TERRIBLE', 'THAT', 'WE', 'MUST', 'DESPAIR'] +2961-960-0000-497: ref=['HE', 'PASSES', 'ABRUPTLY', 'FROM', 'PERSONS', 'TO', 'IDEAS', 'AND', 'NUMBERS', 'AND', 'FROM', 'IDEAS', 'AND', 'NUMBERS', 'TO', 'PERSONS', 'FROM', 'THE', 'HEAVENS', 'TO', 'MAN', 'FROM', 'ASTRONOMY', 'TO', 'PHYSIOLOGY', 'HE', 'CONFUSES', 'OR', 'RATHER', 'DOES', 'NOT', 'DISTINGUISH', 'SUBJECT', 'AND', 'OBJECT', 'FIRST', 'AND', 'FINAL', 'CAUSES', 'AND', 'IS', 'DREAMING', 'OF', 'GEOMETRICAL', 'FIGURES', 'LOST', 'IN', 'A', 'FLUX', 'OF', 'SENSE'] +2961-960-0000-497: hyp=['HE', 'PASSES', 'ABRUPTLY', 'FROM', 'PERSONS', 'TO', 'IDEAS', 'AND', 'NUMBERS', 'AND', 'FROM', 'IDEAS', 'AND', 'NUMBERS', 'TO', 'PERSONS', 'FROM', 'THE', 'HEAVENS', 'TO', 'MAN', 'FROM', 'ASTRONOMY', 'TO', 'PHYSIOLOGY', 'HE', 'CONFUSES', 'OR', 'RATHER', 'DOES', 'NOT', 'DISTINGUISH', 'SUBJECT', 'AND', 'OBJECT', 'FIRST', 'AND', 'FINAL', 'CAUSES', 'AND', 'IS', 'DREAMING', 'OF', 'GEOMETRICAL', 'FIGURES', 'LOST', 'IN', 'A', 'FLUX', 'OF', 'SENSE'] +2961-960-0001-498: ref=['THE', 'INFLUENCE', 'WITH', 'THE', 'TIMAEUS', 'HAS', 'EXERCISED', 'UPON', 'POSTERITY', 'IS', 'DUE', 'PARTLY', 'TO', 'A', 'MISUNDERSTANDING'] +2961-960-0001-498: hyp=['THE', 'INFLUENCE', 'WHICH', 'THE', 'TIMAEUS', 'HAS', 'EXERCISED', 'UPON', 'POSTERITY', 'IS', 'DUE', 'PARTLY', 'TO', 'A', 'MISUNDERSTANDING'] +2961-960-0002-499: ref=['IN', 'THE', 'SUPPOSED', 'DEPTHS', 'OF', 'THIS', 'DIALOGUE', 'THE', 'NEO', 'PLATONISTS', 'FOUND', 'HIDDEN', 'MEANINGS', 'AND', 'CONNECTIONS', 'WITH', 'THE', 'JEWISH', 'AND', 'CHRISTIAN', 'SCRIPTURES', 'AND', 'OUT', 'OF', 'THEM', 'THEY', 'ELICITED', 'DOCTRINES', 'QUITE', 'AT', 'VARIANCE', 'WITH', 'THE', 'SPIRIT', 'OF', 'PLATO'] +2961-960-0002-499: hyp=['IN', 'THE', 'SUPPOSED', 'DEPTHS', 'OF', 'THIS', 'DIALOGUE', 'THE', 'NEO', 'PLATINISTS', 'FOUND', 'HIDDEN', 'MEANINGS', 'IN', 'CONNECTIONS', 'WITH', 'THE', 'JEWISH', 'AND', 'CHRISTIAN', 'SCRIPTURES', 'AND', 'OUT', 'OF', 'THEM', 'THEY', 'ELICITED', 'DOCTRINES', 'QUITE', 'AT', 'VARIANCE', 'WITH', 'THE', 'SPIRIT', 'OF', 'PLATO'] +2961-960-0003-500: ref=['THEY', 'WERE', 'ABSORBED', 'IN', 'HIS', 'THEOLOGY', 'AND', 'WERE', 'UNDER', 'THE', 'DOMINION', 'OF', 'HIS', 'NAME', 'WHILE', 'THAT', 'WHICH', 'WAS', 'TRULY', 'GREAT', 'AND', 'TRULY', 'CHARACTERISTIC', 'IN', 'HIM', 'HIS', 'EFFORT', 'TO', 'REALIZE', 'AND', 'CONNECT', 'ABSTRACTIONS', 'WAS', 'NOT', 'UNDERSTOOD', 'BY', 'THEM', 'AT', 'ALL'] +2961-960-0003-500: hyp=['THEY', 'WERE', 'ABSORBED', 'IN', 'HIS', 'THEOLOGY', 'AND', 'WERE', 'UNDER', 'THE', 'DOMINION', 'OF', 'HIS', 'NAME', 'WHILE', 'THAT', 'WHICH', 'WAS', 'TRULY', 'GREAT', 'AND', 'TRULY', 'CORRECTORISTIC', 'IN', 'HIM', 'HIS', 'EFFORT', 'TO', 'REALIZE', 'AND', 'CONNECT', 'ABSTRACTIONS', 'WAS', 'NOT', 'UNDERSTOOD', 'BY', 'THEM', 'AT', 'ALL'] +2961-960-0004-501: ref=['THERE', 'IS', 'NO', 'DANGER', 'OF', 'THE', 'MODERN', 'COMMENTATORS', 'ON', 'THE', 'TIMAEUS', 'FALLING', 'INTO', 'THE', 'ABSURDITIES', 'OF', 'THE', 'NEO', 'PLATONISTS'] +2961-960-0004-501: hyp=['THERE', 'IS', 'NO', 'DANGER', 'OF', 'THE', 'MODERN', 'COMMON', 'TEACHERS', 'ON', 'THE', 'TIMAEUS', 'FALLING', 'INTO', 'THE', 'ABSURDITIES', 'OF', 'THE', 'NEW', 'PLATANISTS'] +2961-960-0005-502: ref=['IN', 'THE', 'PRESENT', 'DAY', 'WE', 'ARE', 'WELL', 'AWARE', 'THAT', 'AN', 'ANCIENT', 'PHILOSOPHER', 'IS', 'TO', 'BE', 'INTERPRETED', 'FROM', 'HIMSELF', 'AND', 'BY', 'THE', 'CONTEMPORARY', 'HISTORY', 'OF', 'THOUGHT'] +2961-960-0005-502: hyp=['IN', 'THE', 'PRESENT', 'DAY', 'WE', 'ARE', 'WELL', 'AWARE', 'THAT', 'AN', 'ANCIENT', 'PHILOSOPHER', 'IS', 'TO', 'BE', 'INTERPRETED', 'FROM', 'HIMSELF', 'AND', 'BY', 'THE', 'CONTEMPORARY', 'HISTORY', 'OF', 'THOUGHT'] +2961-960-0006-503: ref=['THE', 'FANCIES', 'OF', 'THE', 'NEO', 'PLATONISTS', 'ARE', 'ONLY', 'INTERESTING', 'TO', 'US', 'BECAUSE', 'THEY', 'EXHIBIT', 'A', 'PHASE', 'OF', 'THE', 'HUMAN', 'MIND', 'WHICH', 'PREVAILED', 'WIDELY', 'IN', 'THE', 'FIRST', 'CENTURIES', 'OF', 'THE', 'CHRISTIAN', 'ERA', 'AND', 'IS', 'NOT', 'WHOLLY', 'EXTINCT', 'IN', 'OUR', 'OWN', 'DAY'] +2961-960-0006-503: hyp=['THE', 'FANCIES', 'OF', 'THE', 'NEW', 'PLATANISTS', 'ARE', 'ONLY', 'INTERESTING', 'TO', 'US', 'BECAUSE', 'THEY', 'EXHIBIT', 'A', 'PHASE', 'OF', 'THE', 'HUMAN', 'MIND', 'WHICH', 'PREVAILED', 'WIDELY', 'IN', 'THE', 'FIRST', 'CENTURIES', 'OF', 'THE', 'CHRISTIAN', 'ERA', 'AND', 'IS', 'NOT', 'WHOLLY', 'EXTINCT', 'IN', 'OUR', 'OWN', 'DAY'] +2961-960-0007-504: ref=['BUT', 'THEY', 'HAVE', 'NOTHING', 'TO', 'DO', 'WITH', 'THE', 'INTERPRETATION', 'OF', 'PLATO', 'AND', 'IN', 'SPIRIT', 'THEY', 'ARE', 'OPPOSED', 'TO', 'HIM'] +2961-960-0007-504: hyp=['BUT', 'THEY', 'HAVE', 'NOTHING', 'TO', 'DO', 'WITH', 'THE', 'INTERPRETATION', 'OF', 'PLATO', 'AND', 'IN', 'SPIRIT', 'THEY', 'ARE', 'OPPOSED', 'TO', 'HIM'] +2961-960-0008-505: ref=['WE', 'DO', 'NOT', 'KNOW', 'HOW', 'PLATO', 'WOULD', 'HAVE', 'ARRANGED', 'HIS', 'OWN', 'DIALOGUES', 'OR', 'WHETHER', 'THE', 'THOUGHT', 'OF', 'ARRANGING', 'ANY', 'OF', 'THEM', 'BESIDES', 'THE', 'TWO', 'TRILOGIES', 'WHICH', 'HE', 'HAS', 'EXPRESSLY', 'CONNECTED', 'WAS', 'EVER', 'PRESENT', 'TO', 'HIS', 'MIND'] +2961-960-0008-505: hyp=['WE', 'DO', 'NOT', 'KNOW', 'HOW', 'PLATO', 'WOULD', 'HAVE', 'ARRANGED', 'HIS', 'OWN', 'DIALECTS', 'OR', 'WHETHER', 'THE', 'THOUGHT', 'OF', 'ARRANGING', 'ANY', 'OF', 'THEM', 'BESIDES', 'THE', 'TUTRILOGIES', 'WHICH', 'HE', 'HAS', 'EXPRESSLY', 'CONNECTED', 'WAS', 'EVER', 'PRESENT', 'TO', 'HIS', 'MIND'] +2961-960-0009-506: ref=['THE', 'DIALOGUE', 'IS', 'PRIMARILY', 'CONCERNED', 'WITH', 'THE', 'ANIMAL', 'CREATION', 'INCLUDING', 'UNDER', 'THIS', 'TERM', 'THE', 'HEAVENLY', 'BODIES', 'AND', 'WITH', 'MAN', 'ONLY', 'AS', 'ONE', 'AMONG', 'THE', 'ANIMALS'] +2961-960-0009-506: hyp=['THE', 'DIALOGUE', 'IS', 'PRIMARILY', 'CONCERNED', 'WITH', 'THE', 'ANIMAL', 'CREATION', 'INCLUDING', 'UNDER', 'THIS', 'TERM', 'THE', 'HEAVENLY', 'BODIES', 'AND', 'WITH', 'MAN', 'ONLY', 'AS', 'ONE', 'AMONG', 'THE', 'ANIMALS'] +2961-960-0010-507: ref=['BUT', 'HE', 'HAS', 'NOT', 'AS', 'YET', 'DEFINED', 'THIS', 'INTERMEDIATE', 'TERRITORY', 'WHICH', 'LIES', 'SOMEWHERE', 'BETWEEN', 'MEDICINE', 'AND', 'MATHEMATICS', 'AND', 'HE', 'WOULD', 'HAVE', 'FELT', 'THAT', 'THERE', 'WAS', 'AS', 'GREAT', 'AN', 'IMPIETY', 'IN', 'RANKING', 'THEORIES', 'OF', 'PHYSICS', 'FIRST', 'IN', 'THE', 'ORDER', 'OF', 'KNOWLEDGE', 'AS', 'IN', 'PLACING', 'THE', 'BODY', 'BEFORE', 'THE', 'SOUL'] +2961-960-0010-507: hyp=['BUT', 'HE', 'HAS', 'NOT', 'AS', 'YET', 'THE', 'FIND', 'THIS', 'INTERMEDIATE', 'TERRITORY', 'WHICH', 'LIES', 'SOMEWHERE', 'BETWEEN', 'MEDICINE', 'AND', 'MATHEMATICS', 'AND', 'HE', 'WOULD', 'HAVE', 'FELT', 'THAT', 'THERE', 'WAS', 'AS', 'GREAT', 'AN', 'IMPIETY', 'IN', 'RANKING', 'THEORIES', 'OF', 'PHYSICS', 'FIRST', 'IN', 'THE', 'ORDER', 'OF', 'KNOWLEDGE', 'AS', 'IN', 'PLACING', 'THE', 'BODY', 'BEFORE', 'THE', 'SOUL'] +2961-960-0011-508: ref=['WITH', 'HERACLEITUS', 'HE', 'ACKNOWLEDGES', 'THE', 'PERPETUAL', 'FLUX', 'LIKE', 'ANAXAGORAS', 'HE', 'ASSERTS', 'THE', 'PREDOMINANCE', 'OF', 'MIND', 'ALTHOUGH', 'ADMITTING', 'AN', 'ELEMENT', 'OF', 'NECESSITY', 'WHICH', 'REASON', 'IS', 'INCAPABLE', 'OF', 'SUBDUING', 'LIKE', 'THE', 'PYTHAGOREANS', 'HE', 'SUPPOSES', 'THE', 'MYSTERY', 'OF', 'THE', 'WORLD', 'TO', 'BE', 'CONTAINED', 'IN', 'NUMBER'] +2961-960-0011-508: hyp=['WITH', 'HERACLITUS', 'HE', 'ACKNOWLEDGES', 'THE', 'PERPETUAL', 'FLUX', 'LIKE', 'AN', 'EXAGGERUS', 'HE', 'ASSERTS', 'THE', 'PREDOMINANCE', 'OF', 'MIND', 'ALTHOUGH', 'ADMITTING', 'AN', 'ELEMENT', 'OF', 'NECESSITY', 'WHICH', 'REASON', 'IS', 'INCAPABLE', 'OF', 'SUBDUING', 'LIKE', 'THE', 'PYTHAGORIANS', 'HE', 'SUPPOSES', 'THE', 'MYSTERY', 'OF', 'THE', 'WORLD', 'TO', 'BE', 'CONTAINED', 'IN', 'NUMBER'] +2961-960-0012-509: ref=['MANY', 'IF', 'NOT', 'ALL', 'THE', 'ELEMENTS', 'OF', 'THE', 'PRE', 'SOCRATIC', 'PHILOSOPHY', 'ARE', 'INCLUDED', 'IN', 'THE', 'TIMAEUS'] +2961-960-0012-509: hyp=['MANY', 'IF', 'NOT', 'ALL', 'THE', 'ELEMENTS', 'OF', 'THE', 'PRIESTHOO', 'CRADIC', 'PHILOSOPHY', 'ARE', 'INCLUDED', 'IN', 'THE', 'TIMEUS'] +2961-960-0013-510: ref=['IT', 'IS', 'PROBABLE', 'THAT', 'THE', 'RELATION', 'OF', 'THE', 'IDEAS', 'TO', 'GOD', 'OR', 'OF', 'GOD', 'TO', 'THE', 'WORLD', 'WAS', 'DIFFERENTLY', 'CONCEIVED', 'BY', 'HIM', 'AT', 'DIFFERENT', 'TIMES', 'OF', 'HIS', 'LIFE'] +2961-960-0013-510: hyp=['IT', 'IS', 'PROBABLE', 'THAT', 'THE', 'RELATION', 'OF', 'THE', 'IDEAS', 'TO', 'GOD', 'OR', 'OF', 'GOD', 'TO', 'THE', 'WORLD', 'WAS', 'DIFFERENTLY', 'CONCEIVED', 'BY', 'HIM', 'AT', 'DIFFERENT', 'TIMES', 'OF', 'HIS', 'LIFE'] +2961-960-0014-511: ref=['THE', 'IDEAS', 'ALSO', 'REMAIN', 'BUT', 'THEY', 'HAVE', 'BECOME', 'TYPES', 'IN', 'NATURE', 'FORMS', 'OF', 'MEN', 'ANIMALS', 'BIRDS', 'FISHES'] +2961-960-0014-511: hyp=['THE', 'IDEAS', 'ALSO', 'REMAIN', 'BUT', 'THEY', 'HAVE', 'BECOME', 'TYPES', 'IN', 'NATURE', 'FORMS', 'OF', 'MEN', 'ANIMALS', 'BIRDS', 'FISHES'] +2961-960-0015-512: ref=['THE', 'STYLE', 'AND', 'PLAN', 'OF', 'THE', 'TIMAEUS', 'DIFFER', 'GREATLY', 'FROM', 'THAT', 'OF', 'ANY', 'OTHER', 'OF', 'THE', 'PLATONIC', 'DIALOGUES'] +2961-960-0015-512: hyp=['THE', 'STYLE', 'AND', 'PLAN', 'OF', 'THE', 'TENEAS', 'DIFFER', 'GREATLY', 'FROM', 'THAT', 'OF', 'ANY', 'OTHER', 'OF', 'THE', 'PLATONIC', 'DIALOGUES'] +2961-960-0016-513: ref=['BUT', 'PLATO', 'HAS', 'NOT', 'THE', 'SAME', 'MASTERY', 'OVER', 'HIS', 'INSTRUMENT', 'WHICH', 'HE', 'EXHIBITS', 'IN', 'THE', 'PHAEDRUS', 'OR', 'SYMPOSIUM'] +2961-960-0016-513: hyp=['BUT', 'PLATO', 'HAS', 'NOT', 'THE', 'SAME', 'MASTERY', 'OVER', 'HIS', 'INSTRUMENT', 'WHICH', 'HE', 'EXHIBITS', 'IN', 'THE', 'FEATURES', 'OR', 'SIMPOS', 'HIM'] +2961-960-0017-514: ref=['NOTHING', 'CAN', 'EXCEED', 'THE', 'BEAUTY', 'OR', 'ART', 'OF', 'THE', 'INTRODUCTION', 'IN', 'WHICH', 'HE', 'IS', 'USING', 'WORDS', 'AFTER', 'HIS', 'ACCUSTOMED', 'MANNER'] +2961-960-0017-514: hyp=['NOTHING', 'CAN', 'EXCEED', 'THE', 'BEAUTY', 'OR', 'ART', 'OF', 'INTRODUCTION', 'IN', 'WHICH', 'HIS', 'USING', 'WORDS', 'AFTER', 'HIS', 'ACCUSTOMED', 'MANNER'] +2961-960-0018-515: ref=['BUT', 'IN', 'THE', 'REST', 'OF', 'THE', 'WORK', 'THE', 'POWER', 'OF', 'LANGUAGE', 'SEEMS', 'TO', 'FAIL', 'HIM', 'AND', 'THE', 'DRAMATIC', 'FORM', 'IS', 'WHOLLY', 'GIVEN', 'UP'] +2961-960-0018-515: hyp=['BUT', 'IN', 'THE', 'REST', 'OF', 'THE', 'WORK', 'THE', 'POWER', 'OF', 'LANGUAGE', 'SEEMS', 'TO', 'FAIL', 'HIM', 'AND', 'THE', 'DRAMATIC', 'FORM', 'IS', 'WHOLLY', 'GIVEN', 'UP'] +2961-960-0019-516: ref=['HE', 'COULD', 'WRITE', 'IN', 'ONE', 'STYLE', 'BUT', 'NOT', 'IN', 'ANOTHER', 'AND', 'THE', 'GREEK', 'LANGUAGE', 'HAD', 'NOT', 'AS', 'YET', 'BEEN', 'FASHIONED', 'BY', 'ANY', 'POET', 'OR', 'PHILOSOPHER', 'TO', 'DESCRIBE', 'PHYSICAL', 'PHENOMENA'] +2961-960-0019-516: hyp=['HE', 'COULD', 'WRITE', 'IN', "ONE'S", 'STYLE', 'BUT', 'NOT', 'IN', 'ANOTHER', 'THE', 'GREEK', 'LANGUAGE', 'HAD', 'NOT', 'AS', 'YET', 'BEEN', 'FASHIONED', 'BY', 'ANY', 'POET', 'OR', 'PHILOSOPHER', 'TO', 'DESCRIBE', 'PHYSICAL', 'PHENOMENA'] +2961-960-0020-517: ref=['AND', 'HENCE', 'WE', 'FIND', 'THE', 'SAME', 'SORT', 'OF', 'CLUMSINESS', 'IN', 'THE', 'TIMAEUS', 'OF', 'PLATO', 'WHICH', 'CHARACTERIZES', 'THE', 'PHILOSOPHICAL', 'POEM', 'OF', 'LUCRETIUS'] +2961-960-0020-517: hyp=['AND', 'HENCE', 'WE', 'FIND', 'THE', 'SAME', 'SORT', 'OF', 'CLUMSINESS', 'IN', 'THE', 'TIMAIRS', 'OF', 'PLATO', 'WHICH', 'CHARACTERIZES', 'THE', 'PHILOSOPHICAL', 'POEM', 'OF', 'LUCRETIUS'] +2961-960-0021-518: ref=['THERE', 'IS', 'A', 'WANT', 'OF', 'FLOW', 'AND', 'OFTEN', 'A', 'DEFECT', 'OF', 'RHYTHM', 'THE', 'MEANING', 'IS', 'SOMETIMES', 'OBSCURE', 'AND', 'THERE', 'IS', 'A', 'GREATER', 'USE', 'OF', 'APPOSITION', 'AND', 'MORE', 'OF', 'REPETITION', 'THAN', 'OCCURS', 'IN', "PLATO'S", 'EARLIER', 'WRITINGS'] +2961-960-0021-518: hyp=['THERE', 'IS', 'A', 'WANT', 'OF', 'FLOW', 'AND', 'OFTEN', 'A', 'DEFECT', 'OF', 'RHYTHM', 'THE', 'MEANING', 'IS', 'SOMETIMES', 'OBSCURE', 'AND', 'THERE', 'IS', 'A', 'GREATER', 'USE', 'OF', 'APPOSITION', 'IN', 'MORE', 'OF', 'REPETITION', 'THAN', 'OCCURS', 'IN', "PLATO'S", 'EARLIER', 'WRITINGS'] +2961-960-0022-519: ref=['PLATO', 'HAD', 'NOT', 'THE', 'COMMAND', 'OF', 'HIS', 'MATERIALS', 'WHICH', 'WOULD', 'HAVE', 'ENABLED', 'HIM', 'TO', 'PRODUCE', 'A', 'PERFECT', 'WORK', 'OF', 'ART'] +2961-960-0022-519: hyp=['PLATO', 'HAD', 'NOT', 'THE', 'COMMAND', 'OF', 'HIS', 'MATERIALS', 'WHICH', 'WOULD', 'HAVE', 'ENABLED', 'HIM', 'TO', 'PRODUCE', 'A', 'PERFECT', 'WORK', 'OF', 'ART'] +2961-961-0000-520: ref=['SOCRATES', 'BEGINS', 'THE', 'TIMAEUS', 'WITH', 'A', 'SUMMARY', 'OF', 'THE', 'REPUBLIC'] +2961-961-0000-520: hyp=['SOCRATES', 'BEGINS', 'TO', 'TEARS', 'WITH', 'A', 'SUMMARY', 'OF', 'THE', 'REPUBLIC'] +2961-961-0001-521: ref=['AND', 'NOW', 'HE', 'DESIRES', 'TO', 'SEE', 'THE', 'IDEAL', 'STATE', 'SET', 'IN', 'MOTION', 'HE', 'WOULD', 'LIKE', 'TO', 'KNOW', 'HOW', 'SHE', 'BEHAVED', 'IN', 'SOME', 'GREAT', 'STRUGGLE'] +2961-961-0001-521: hyp=['AND', 'NOW', 'HE', 'DESIRES', 'TO', 'SEE', 'THE', 'IDEAL', 'STATE', 'SET', 'IN', 'MOTION', 'HE', 'WOULD', 'LIKE', 'TO', 'KNOW', 'HOW', 'SHE', 'BEHAVED', 'IN', 'SOME', 'GREAT', 'STRUGGLE'] +2961-961-0002-522: ref=['AND', 'THEREFORE', 'TO', 'YOU', 'I', 'TURN', 'TIMAEUS', 'CITIZEN', 'OF', 'LOCRIS', 'WHO', 'ARE', 'AT', 'ONCE', 'A', 'PHILOSOPHER', 'AND', 'A', 'STATESMAN', 'AND', 'TO', 'YOU', 'CRITIAS', 'WHOM', 'ALL', 'ATHENIANS', 'KNOW', 'TO', 'BE', 'SIMILARLY', 'ACCOMPLISHED', 'AND', 'TO', 'HERMOCRATES', 'WHO', 'IS', 'ALSO', 'FITTED', 'BY', 'NATURE', 'AND', 'EDUCATION', 'TO', 'SHARE', 'IN', 'OUR', 'DISCOURSE'] +2961-961-0002-522: hyp=['AND', 'THEREFORE', 'TO', 'YOU', 'I', 'TURN', 'TO', 'ME', 'AS', 'CITIZEN', 'OF', 'LOCHRIS', 'WHO', 'ARE', 'AT', 'ONCE', 'A', 'PHILOSOPHER', 'IN', 'A', 'STATESMAN', 'AND', 'TO', 'YOU', 'CRITIUS', 'WHOM', 'ALL', 'ATHENIANS', 'KNOW', 'TO', 'BE', 'SIMILARLY', 'ACCOMPLISHED', 'AND', 'TO', 'HERMOCRATES', 'WHOSE', 'ALSO', 'FITTED', 'BY', 'NATURE', 'AND', 'EDUCATION', 'TO', 'SHARE', 'IN', 'OUR', 'DISCOURSE'] +2961-961-0003-523: ref=['I', 'WILL', 'IF', 'TIMAEUS', 'APPROVES', 'I', 'APPROVE'] +2961-961-0003-523: hyp=['I', 'WILL', 'IF', 'TO', 'ME', 'AS', 'IT', 'PROVES', 'I', 'APPROVE'] +2961-961-0004-524: ref=['LISTEN', 'THEN', 'SOCRATES', 'TO', 'A', 'TALE', 'OF', "SOLON'S", 'WHO', 'BEING', 'THE', 'FRIEND', 'OF', 'DROPIDAS', 'MY', 'GREAT', 'GRANDFATHER', 'TOLD', 'IT', 'TO', 'MY', 'GRANDFATHER', 'CRITIAS', 'AND', 'HE', 'TOLD', 'ME'] +2961-961-0004-524: hyp=['LISTEN', 'THEN', 'SOCRATES', 'TO', 'A', 'TALE', 'OF', 'SILENCE', 'WHO', 'BEING', 'THE', 'FRIEND', 'OF', 'TROPIDAS', 'BY', 'GREAT', 'GRANDFATHER', 'TOLD', 'IT', 'TO', 'MY', 'GRANDFATHER', 'CRITIUS', 'AND', 'HE', 'TOLD', 'ME'] +2961-961-0005-525: ref=['SOME', 'POEMS', 'OF', 'SOLON', 'WERE', 'RECITED', 'BY', 'THE', 'BOYS'] +2961-961-0005-525: hyp=['SOME', 'POEMS', 'OF', 'SOLEMN', 'WERE', 'RECITED', 'BY', 'THE', 'BOYS'] +2961-961-0006-526: ref=['AND', 'WHAT', 'WAS', 'THE', 'SUBJECT', 'OF', 'THE', 'POEM', 'SAID', 'THE', 'PERSON', 'WHO', 'MADE', 'THE', 'REMARK'] +2961-961-0006-526: hyp=['AND', 'WHAT', 'WAS', 'THE', 'SUBJECT', 'OF', 'THE', 'POEM', 'SAID', 'THE', 'PERSON', 'WHO', 'MADE', 'THE', 'REMARK'] +2961-961-0007-527: ref=['THE', 'SUBJECT', 'WAS', 'A', 'VERY', 'NOBLE', 'ONE', 'HE', 'DESCRIBED', 'THE', 'MOST', 'FAMOUS', 'ACTION', 'IN', 'WHICH', 'THE', 'ATHENIAN', 'PEOPLE', 'WERE', 'EVER', 'ENGAGED'] +2961-961-0007-527: hyp=['THE', 'SUBJECT', 'WAS', 'A', 'VERY', 'NOBLE', 'ONE', 'HE', 'DESCRIBED', 'THE', 'MOST', 'FAMOUS', 'ACTION', 'IN', 'WHICH', 'THE', 'ATHENIAN', 'PEOPLE', 'WERE', 'EVER', 'ENGAGED'] +2961-961-0008-528: ref=['BUT', 'THE', 'MEMORY', 'OF', 'THEIR', 'EXPLOITS', 'HAS', 'PASSED', 'AWAY', 'OWING', 'TO', 'THE', 'LAPSE', 'OF', 'TIME', 'AND', 'THE', 'EXTINCTION', 'OF', 'THE', 'ACTORS'] +2961-961-0008-528: hyp=['BUT', 'THE', 'MEMORY', 'OF', 'THEIR', 'EXPLOITS', 'HAD', 'PASSED', 'AWAY', 'OWING', 'TO', 'THE', 'LAPSE', 'OF', 'TIME', 'AND', 'THE', 'EXTINCTION', 'OF', 'THE', 'ACTORS'] +2961-961-0009-529: ref=['TELL', 'US', 'SAID', 'THE', 'OTHER', 'THE', 'WHOLE', 'STORY', 'AND', 'WHERE', 'SOLON', 'HEARD', 'THE', 'STORY'] +2961-961-0009-529: hyp=['TELL', 'US', 'SAID', 'THE', 'OTHER', 'THE', 'WHOLE', 'STORY', 'AND', 'WHERE', 'SOLON', 'HEARD', 'THE', 'STORY'] +2961-961-0010-530: ref=['BUT', 'IN', 'EGYPT', 'THE', 'TRADITIONS', 'OF', 'OUR', 'OWN', 'AND', 'OTHER', 'LANDS', 'ARE', 'BY', 'US', 'REGISTERED', 'FOR', 'EVER', 'IN', 'OUR', 'TEMPLES'] +2961-961-0010-530: hyp=['BUT', 'IN', 'EGYPT', 'THE', 'TRADITIONS', 'OF', 'OUR', 'OWN', 'AND', 'OTHER', 'LANDS', 'ARE', 'BY', 'US', 'REGISTERED', 'FOR', 'EVER', 'IN', 'OUR', 'TEMPLES'] +2961-961-0011-531: ref=['THE', 'GENEALOGIES', 'WHICH', 'YOU', 'HAVE', 'RECITED', 'TO', 'US', 'OUT', 'OF', 'YOUR', 'OWN', 'ANNALS', 'SOLON', 'ARE', 'A', 'MERE', "CHILDREN'S", 'STORY'] +2961-961-0011-531: hyp=['THE', 'GENEALOGIES', 'WHICH', 'YOU', 'HAVE', 'RECITED', 'TO', 'US', 'OUT', 'OF', 'YOUR', 'OWN', 'ANNAL', 'SOLEMN', 'ARE', 'A', 'MERE', "CHILDREN'S", 'STORY'] +2961-961-0012-532: ref=['FOR', 'IN', 'THE', 'TIMES', 'BEFORE', 'THE', 'GREAT', 'FLOOD', 'ATHENS', 'WAS', 'THE', 'GREATEST', 'AND', 'BEST', 'OF', 'CITIES', 'AND', 'DID', 'THE', 'NOBLEST', 'DEEDS', 'AND', 'HAD', 'THE', 'BEST', 'CONSTITUTION', 'OF', 'ANY', 'UNDER', 'THE', 'FACE', 'OF', 'HEAVEN'] +2961-961-0012-532: hyp=['FOR', 'IN', 'THE', 'TIMES', 'BEFORE', 'THE', 'GREAT', 'FLOOD', 'ATHENS', 'WAS', 'THE', 'GREATEST', 'AND', 'BEST', 'OF', 'CITIES', 'AND', 'DID', 'THE', 'NOBLEST', 'DEEDS', 'AND', 'HAD', 'THE', 'BEST', 'CONSTITUTION', 'OF', 'ANY', 'UNDER', 'THE', 'FACE', 'OF', 'HEAVEN'] +2961-961-0013-533: ref=['SOLON', 'MARVELLED', 'AND', 'DESIRED', 'TO', 'BE', 'INFORMED', 'OF', 'THE', 'PARTICULARS'] +2961-961-0013-533: hyp=['SOLEMN', 'MARVELLED', 'AND', 'DESIRED', 'TO', 'BE', 'INFORMED', 'OF', 'THE', 'PARTICULARS'] +2961-961-0014-534: ref=['NINE', 'THOUSAND', 'YEARS', 'HAVE', 'ELAPSED', 'SINCE', 'SHE', 'FOUNDED', 'YOURS', 'AND', 'EIGHT', 'THOUSAND', 'SINCE', 'SHE', 'FOUNDED', 'OURS', 'AS', 'OUR', 'ANNALS', 'RECORD'] +2961-961-0014-534: hyp=['NINE', 'THOUSAND', 'YEARS', 'HAVE', 'ELAPSED', 'SINCE', 'SHE', 'FOUND', 'IT', 'YOURS', 'AND', 'EIGHT', 'THOUSAND', 'SINCE', 'YOU', 'FOUND', 'IT', 'OURS', 'AS', 'OUR', 'ANNALS', 'RECORD'] +2961-961-0015-535: ref=['MANY', 'LAWS', 'EXIST', 'AMONG', 'US', 'WHICH', 'ARE', 'THE', 'COUNTERPART', 'OF', 'YOURS', 'AS', 'THEY', 'WERE', 'IN', 'THE', 'OLDEN', 'TIME'] +2961-961-0015-535: hyp=['MANY', 'LAWS', 'EXIST', 'AMONG', 'US', 'WHICH', 'ARE', 'THE', 'COUNTERPART', 'OF', 'YOURS', 'AS', 'THEY', 'WERE', 'IN', 'THE', 'OLDEN', 'TIME'] +2961-961-0016-536: ref=['I', 'WILL', 'BRIEFLY', 'DESCRIBE', 'THEM', 'TO', 'YOU', 'AND', 'YOU', 'SHALL', 'READ', 'THE', 'ACCOUNT', 'OF', 'THEM', 'AT', 'YOUR', 'LEISURE', 'IN', 'THE', 'SACRED', 'REGISTERS'] +2961-961-0016-536: hyp=['I', 'WILL', 'BRIEFLY', 'DESCRIBE', 'HIM', 'TO', 'YOU', 'AND', 'YOU', 'SHALL', 'READ', 'THE', 'ACCOUNT', 'OF', 'THEM', 'AT', 'YOUR', 'LEISURE', 'IN', 'THE', 'SACRED', 'REGISTERS'] +2961-961-0017-537: ref=['OBSERVE', 'AGAIN', 'WHAT', 'CARE', 'THE', 'LAW', 'TOOK', 'IN', 'THE', 'PURSUIT', 'OF', 'WISDOM', 'SEARCHING', 'OUT', 'THE', 'DEEP', 'THINGS', 'OF', 'THE', 'WORLD', 'AND', 'APPLYING', 'THEM', 'TO', 'THE', 'USE', 'OF', 'MAN'] +2961-961-0017-537: hyp=['OBSERVE', 'AGAIN', 'WHAT', 'CARE', 'THE', 'LAW', 'TOOK', 'IN', 'THE', 'PURSUIT', 'OF', 'WISDOM', 'SEARCHING', 'OUT', 'THE', 'DEEP', 'THINGS', 'OF', 'THE', 'WORLD', 'AND', 'APPLYING', 'THEM', 'TO', 'THE', 'USE', 'OF', 'MEN'] +2961-961-0018-538: ref=['THE', 'MOST', 'FAMOUS', 'OF', 'THEM', 'ALL', 'WAS', 'THE', 'OVERTHROW', 'OF', 'THE', 'ISLAND', 'OF', 'ATLANTIS'] +2961-961-0018-538: hyp=['THE', 'MOST', 'FAMOUS', 'OF', 'THEM', 'ALL', 'WAS', 'THE', 'OVERTHROW', 'OF', 'THE', 'ISLAND', 'OF', 'ATLANTIS'] +2961-961-0019-539: ref=['FOR', 'AT', 'THE', 'PERIL', 'OF', 'HER', 'OWN', 'EXISTENCE', 'AND', 'WHEN', 'THE', 'OTHER', 'HELLENES', 'HAD', 'DESERTED', 'HER', 'SHE', 'REPELLED', 'THE', 'INVADER', 'AND', 'OF', 'HER', 'OWN', 'ACCORD', 'GAVE', 'LIBERTY', 'TO', 'ALL', 'THE', 'NATIONS', 'WITHIN', 'THE', 'PILLARS'] +2961-961-0019-539: hyp=['FOR', 'AT', 'THE', 'PERIL', 'OF', 'HER', 'OWN', 'EXISTENCE', 'AND', 'WHEN', 'THE', 'OTTER', 'HELLENES', 'HAD', 'DESERTED', 'HER', 'SHE', 'REPELLED', 'THE', 'INVADER', 'AND', 'OF', 'HER', 'OWN', 'ACCORD', 'GAVE', 'LIBERTY', 'TO', 'ALL', 'THE', 'NATIONS', 'WITHIN', 'THE', 'PILLARS'] +2961-961-0020-540: ref=['THIS', 'IS', 'THE', 'EXPLANATION', 'OF', 'THE', 'SHALLOWS', 'WHICH', 'ARE', 'FOUND', 'IN', 'THAT', 'PART', 'OF', 'THE', 'ATLANTIC', 'OCEAN'] +2961-961-0020-540: hyp=['THIS', 'IS', 'THE', 'EXPLANATION', 'OF', 'THE', 'SHALLOWS', 'WHICH', 'ARE', 'FOUND', 'IN', 'THAT', 'PART', 'OF', 'THE', 'ATLANTIC', 'OCEAN'] +2961-961-0021-541: ref=['BUT', 'I', 'WOULD', 'NOT', 'SPEAK', 'AT', 'THE', 'TIME', 'BECAUSE', 'I', 'WANTED', 'TO', 'REFRESH', 'MY', 'MEMORY'] +2961-961-0021-541: hyp=['BUT', 'I', 'WOULD', 'NOT', 'SPEAK', 'AT', 'THE', 'TIME', 'BECAUSE', 'I', 'WANTED', 'TO', 'REFRESH', 'MY', 'MEMORY'] +2961-961-0022-542: ref=['THEN', 'NOW', 'LET', 'ME', 'EXPLAIN', 'TO', 'YOU', 'THE', 'ORDER', 'OF', 'OUR', 'ENTERTAINMENT', 'FIRST', 'TIMAEUS', 'WHO', 'IS', 'A', 'NATURAL', 'PHILOSOPHER', 'WILL', 'SPEAK', 'OF', 'THE', 'ORIGIN', 'OF', 'THE', 'WORLD', 'GOING', 'DOWN', 'TO', 'THE', 'CREATION', 'OF', 'MAN', 'AND', 'THEN', 'I', 'SHALL', 'RECEIVE', 'THE', 'MEN', 'WHOM', 'HE', 'HAS', 'CREATED', 'AND', 'SOME', 'OF', 'WHOM', 'WILL', 'HAVE', 'BEEN', 'EDUCATED', 'BY', 'YOU', 'AND', 'INTRODUCE', 'THEM', 'TO', 'YOU', 'AS', 'THE', 'LOST', 'ATHENIAN', 'CITIZENS', 'OF', 'WHOM', 'THE', 'EGYPTIAN', 'RECORD', 'SPOKE'] +2961-961-0022-542: hyp=['THEN', 'THOU', 'LET', 'ME', 'EXPLAIN', 'TO', 'YOU', 'THE', 'ORDER', 'OF', 'OUR', 'ENTERTAINMENT', 'FIRST', 'TIMAEUS', 'WHO', 'IS', 'A', 'NATURAL', 'PHILOSOPHER', 'WILL', 'SPEAK', 'OF', 'THE', 'ORIGIN', 'OF', 'THE', 'WORLD', 'GOING', 'DOWN', 'TO', 'THE', 'CREATION', 'OF', 'MEN', 'AND', 'THEN', 'I', 'SHALL', 'RECEIVE', 'THE', 'MEN', 'WHOM', 'HE', 'HAS', 'CREATED', 'AND', 'SOME', 'OF', 'WHOM', 'WILL', 'HAVE', 'BEEN', 'EDUCATED', 'BY', 'YOU', 'AND', 'INTRODUCE', 'THEM', 'TO', 'YOU', 'AS', 'THE', 'LOST', 'ATHENIAN', 'CITIZENS', 'OF', 'WHOM', 'THE', 'EGYPTIAN', 'RECORDS', 'SPOKE'] +3570-5694-0000-2433: ref=['BUT', 'ALREADY', 'AT', 'A', 'POINT', 'IN', 'ECONOMIC', 'EVOLUTION', 'FAR', 'ANTEDATING', 'THE', 'EMERGENCE', 'OF', 'THE', 'LADY', 'SPECIALISED', 'CONSUMPTION', 'OF', 'GOODS', 'AS', 'AN', 'EVIDENCE', 'OF', 'PECUNIARY', 'STRENGTH', 'HAD', 'BEGUN', 'TO', 'WORK', 'OUT', 'IN', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM'] +3570-5694-0000-2433: hyp=['BUT', 'ALREADY', 'AT', 'A', 'POINT', 'IN', 'ECONOMIC', 'EVOLUTION', 'FAR', 'ANTETING', 'THE', 'EMERGENCE', 'OF', 'THE', 'LADY', 'SPECIALIZED', 'CONSUMPTION', 'OF', 'GOODS', 'AS', 'AN', 'EVIDENCE', 'OF', 'PECUNIARY', 'STRENGTH', 'HAD', 'BEGUN', 'TO', 'WORK', 'OUT', 'IN', 'A', 'MORE', 'OR', 'LESS', 'CELEBRATE', 'SYSTEM'] +3570-5694-0001-2434: ref=['THE', 'UTILITY', 'OF', 'CONSUMPTION', 'AS', 'AN', 'EVIDENCE', 'OF', 'WEALTH', 'IS', 'TO', 'BE', 'CLASSED', 'AS', 'A', 'DERIVATIVE', 'GROWTH'] +3570-5694-0001-2434: hyp=['THE', 'UTILITY', 'OF', 'CONSUMPTION', 'AS', 'AN', 'EVIDENCE', 'OF', 'WEALTH', 'IS', 'TO', 'BE', 'CLASSED', 'AS', 'A', 'DERIVATIVE', 'GROWTH'] +3570-5694-0002-2435: ref=['SUCH', 'CONSUMPTION', 'AS', 'FALLS', 'TO', 'THE', 'WOMEN', 'IS', 'MERELY', 'INCIDENTAL', 'TO', 'THEIR', 'WORK', 'IT', 'IS', 'A', 'MEANS', 'TO', 'THEIR', 'CONTINUED', 'LABOUR', 'AND', 'NOT', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THEIR', 'OWN', 'COMFORT', 'AND', 'FULNESS', 'OF', 'LIFE'] +3570-5694-0002-2435: hyp=['SUCH', 'CONSUMPTION', 'AS', 'FALLS', 'THROUGH', 'THE', 'WOMEN', 'IS', 'MERELY', 'INCIDENTAL', 'TO', 'THEIR', 'WORK', 'IT', 'IS', 'A', 'MEANS', 'TO', 'THEIR', 'CONTINUED', 'LABOR', 'AND', 'NOT', 'TO', 'CONSUMPTION', 'DIRECTED', 'TO', 'THEIR', 'OWN', 'COMFORT', 'AND', 'FULLNESS', 'OF', 'LIFE'] +3570-5694-0003-2436: ref=['WITH', 'A', 'FURTHER', 'ADVANCE', 'IN', 'CULTURE', 'THIS', 'TABU', 'MAY', 'CHANGE', 'INTO', 'SIMPLE', 'CUSTOM', 'OF', 'A', 'MORE', 'OR', 'LESS', 'RIGOROUS', 'CHARACTER', 'BUT', 'WHATEVER', 'BE', 'THE', 'THEORETICAL', 'BASIS', 'OF', 'THE', 'DISTINCTION', 'WHICH', 'IS', 'MAINTAINED', 'WHETHER', 'IT', 'BE', 'A', 'TABU', 'OR', 'A', 'LARGER', 'CONVENTIONALITY', 'THE', 'FEATURES', 'OF', 'THE', 'CONVENTIONAL', 'SCHEME', 'OF', 'CONSUMPTION', 'DO', 'NOT', 'CHANGE', 'EASILY'] +3570-5694-0003-2436: hyp=['WITH', 'A', 'FURTHER', 'ADVANCE', 'AND', 'CULTURE', 'THIS', 'TABOU', 'MAY', 'CHANGED', 'INTO', 'SIMPLE', 'CUSTOM', 'OF', 'A', 'MORE', 'OR', 'LESS', 'RIGOROUS', 'CHARACTER', 'BUT', 'WHATEVER', 'BE', 'THE', 'THEORETICAL', 'BASIS', 'OF', 'THE', 'DISTINCTION', 'WHICH', 'IS', 'MAINTAINED', 'WHETHER', 'IT', 'BE', 'AT', 'A', 'BOOT', 'OR', 'A', 'LARGER', 'CONVENTIONALITY', 'THE', 'FEATURES', 'OF', 'THE', 'CONVENTIONAL', 'SCHEME', 'OF', 'CONSUMPTION', 'DO', 'NOT', 'CHANGE', 'EASILY'] +3570-5694-0004-2437: ref=['IN', 'THE', 'NATURE', 'OF', 'THINGS', 'LUXURIES', 'AND', 'THE', 'COMFORTS', 'OF', 'LIFE', 'BELONG', 'TO', 'THE', 'LEISURE', 'CLASS'] +3570-5694-0004-2437: hyp=['IN', 'THE', 'NATURE', 'OF', 'THINGS', 'LUXURIES', 'AND', 'THE', 'COMFORTS', 'OF', 'LIFE', 'BELONG', 'TO', 'THE', 'LEISURE', 'CLASS'] +3570-5694-0005-2438: ref=['UNDER', 'THE', 'TABU', 'CERTAIN', 'VICTUALS', 'AND', 'MORE', 'PARTICULARLY', 'CERTAIN', 'BEVERAGES', 'ARE', 'STRICTLY', 'RESERVED', 'FOR', 'THE', 'USE', 'OF', 'THE', 'SUPERIOR', 'CLASS'] +3570-5694-0005-2438: hyp=['UNDER', 'THE', 'TABOO', 'CERTAIN', 'VICTUALS', 'AND', 'MORE', 'PARTICULARLY', 'CERTAIN', 'BEVERAGES', 'ARE', 'STRICTLY', 'RESERVED', 'FOR', 'THE', 'USE', 'OF', 'THE', 'SUPERIOR', 'CLASS'] +3570-5694-0006-2439: ref=['DRUNKENNESS', 'AND', 'THE', 'OTHER', 'PATHOLOGICAL', 'CONSEQUENCES', 'OF', 'THE', 'FREE', 'USE', 'OF', 'STIMULANTS', 'THEREFORE', 'TEND', 'IN', 'THEIR', 'TURN', 'TO', 'BECOME', 'HONORIFIC', 'AS', 'BEING', 'A', 'MARK', 'AT', 'THE', 'SECOND', 'REMOVE', 'OF', 'THE', 'SUPERIOR', 'STATUS', 'OF', 'THOSE', 'WHO', 'ARE', 'ABLE', 'TO', 'AFFORD', 'THE', 'INDULGENCE'] +3570-5694-0006-2439: hyp=['DRUNKENNESS', 'AND', 'THE', 'OTHER', 'PATHOLOGICAL', 'CONSEQUENCES', 'OF', 'THE', 'FREE', 'USE', 'OF', 'STIMULANTS', 'THEREFORE', 'TEND', 'IN', 'THEIR', 'TURN', 'TO', 'BECOME', 'HONORIFIC', 'AS', 'BEING', 'A', 'MARK', 'AT', 'THE', 'SECOND', 'REMOVE', 'OF', 'THE', 'SUPERIOR', 'STATUS', 'OF', 'THOSE', 'WHO', 'ARE', 'ABLE', 'TO', 'AFFORD', 'THE', 'INDULGENCE'] +3570-5694-0007-2440: ref=['IT', 'HAS', 'EVEN', 'HAPPENED', 'THAT', 'THE', 'NAME', 'FOR', 'CERTAIN', 'DISEASED', 'CONDITIONS', 'OF', 'THE', 'BODY', 'ARISING', 'FROM', 'SUCH', 'AN', 'ORIGIN', 'HAS', 'PASSED', 'INTO', 'EVERYDAY', 'SPEECH', 'AS', 'A', 'SYNONYM', 'FOR', 'NOBLE', 'OR', 'GENTLE'] +3570-5694-0007-2440: hyp=['IT', 'HAS', 'EVEN', 'HAPPENED', 'THAT', 'THE', 'NAME', 'FOR', 'CERTAIN', 'DISEASED', 'CONDITIONS', 'OF', 'THE', 'BODY', 'ARISING', 'FROM', 'SUCH', 'AN', 'ORIGIN', 'HAS', 'PASSED', 'INTO', 'EVERYDAY', 'SPEECH', 'AS', 'A', 'SYNONYM', 'FOR', 'NOBLE', 'OR', 'GENTLE'] +3570-5694-0008-2441: ref=['THE', 'CONSUMPTION', 'OF', 'LUXURIES', 'IN', 'THE', 'TRUE', 'SENSE', 'IS', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THE', 'COMFORT', 'OF', 'THE', 'CONSUMER', 'HIMSELF', 'AND', 'IS', 'THEREFORE', 'A', 'MARK', 'OF', 'THE', 'MASTER'] +3570-5694-0008-2441: hyp=['THE', 'CONSUMPTION', 'OF', 'LUXURIES', 'IN', 'THE', 'TRUE', 'SENSE', 'IS', 'A', 'CONSUMPTION', 'DIRECTED', 'TO', 'THE', 'COMFORT', 'OF', 'THE', 'CONSUMER', 'HIMSELF', 'AND', 'IS', 'THEREFORE', 'A', 'MARK', 'OF', 'THE', 'MASTER'] +3570-5694-0009-2442: ref=['WITH', 'MANY', 'QUALIFICATIONS', 'WITH', 'MORE', 'QUALIFICATIONS', 'AS', 'THE', 'PATRIARCHAL', 'TRADITION', 'HAS', 'GRADUALLY', 'WEAKENED', 'THE', 'GENERAL', 'RULE', 'IS', 'FELT', 'TO', 'BE', 'RIGHT', 'AND', 'BINDING', 'THAT', 'WOMEN', 'SHOULD', 'CONSUME', 'ONLY', 'FOR', 'THE', 'BENEFIT', 'OF', 'THEIR', 'MASTERS'] +3570-5694-0009-2442: hyp=['WITH', 'MANY', 'QUALIFICATIONS', 'WITH', 'MORE', 'QUALIFICATIONS', 'AS', 'THE', 'PATRIARCHAL', 'TRADITION', 'HAS', 'GRADUALLY', 'WEAKENED', 'THE', 'GENERAL', 'RULE', 'IS', 'FELT', 'TO', 'BE', 'RIGHT', 'AND', 'BINDING', 'THAT', 'WOMEN', 'SHOULD', 'CONSUME', 'ONLY', 'FOR', 'THE', 'BENEFIT', 'OF', 'THEIR', 'MASTERS'] +3570-5694-0010-2443: ref=['THE', 'OBJECTION', 'OF', 'COURSE', 'PRESENTS', 'ITSELF', 'THAT', 'EXPENDITURE', 'ON', "WOMEN'S", 'DRESS', 'AND', 'HOUSEHOLD', 'PARAPHERNALIA', 'IS', 'AN', 'OBVIOUS', 'EXCEPTION', 'TO', 'THIS', 'RULE', 'BUT', 'IT', 'WILL', 'APPEAR', 'IN', 'THE', 'SEQUEL', 'THAT', 'THIS', 'EXCEPTION', 'IS', 'MUCH', 'MORE', 'OBVIOUS', 'THAN', 'SUBSTANTIAL'] +3570-5694-0010-2443: hyp=['THE', 'OBJECTION', 'OF', 'COURSE', 'PRESENTS', 'ITSELF', 'THAT', 'EXPENDITURE', 'ON', "WOMEN'S", 'DRESS', 'AND', 'HOUSEHOLD', 'PARAPHERNALIA', 'IS', 'AN', 'OBVIOUS', 'EXCEPTION', 'TO', 'THIS', 'RULE', 'BUT', 'IT', 'WILL', 'APPEAR', 'IN', 'THE', 'SEQUEL', 'THAT', 'THIS', 'EXCEPTION', 'IS', 'MUCH', 'MORE', 'OBVIOUS', 'THAN', 'SUBSTANTIAL'] +3570-5694-0011-2444: ref=['THE', 'CUSTOM', 'OF', 'FESTIVE', 'GATHERINGS', 'PROBABLY', 'ORIGINATED', 'IN', 'MOTIVES', 'OF', 'CONVIVIALITY', 'AND', 'RELIGION', 'THESE', 'MOTIVES', 'ARE', 'ALSO', 'PRESENT', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'BUT', 'THEY', 'DO', 'NOT', 'CONTINUE', 'TO', 'BE', 'THE', 'SOLE', 'MOTIVES'] +3570-5694-0011-2444: hyp=['THE', 'CUSTOM', 'OF', 'FESTIVE', 'GATHERINGS', 'PROBABLY', 'ORIGINATED', 'IN', 'MOTIVES', 'OF', 'CONVIVIALITY', 'AND', 'RELIGION', 'THESE', 'MOTIVES', 'ARE', 'ALSO', 'PRESENT', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'THAT', 'THEY', 'DO', 'NOT', 'CONTINUE', 'TO', 'BE', 'THE', 'SOLE', 'MOTIVES'] +3570-5694-0012-2445: ref=['THERE', 'IS', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM', 'OF', 'RANK', 'AND', 'GRADES'] +3570-5694-0012-2445: hyp=['THERE', 'IS', 'A', 'MORE', 'OR', 'LESS', 'ELABORATE', 'SYSTEM', 'OF', 'RANK', 'AND', 'GRATES'] +3570-5694-0013-2446: ref=['THIS', 'DIFFERENTIATION', 'IS', 'FURTHERED', 'BY', 'THE', 'INHERITANCE', 'OF', 'WEALTH', 'AND', 'THE', 'CONSEQUENT', 'INHERITANCE', 'OF', 'GENTILITY'] +3570-5694-0013-2446: hyp=['THIS', 'DIFFERENTIATION', 'IS', 'FURTHERED', 'BY', 'THE', 'INHERITANCE', 'OF', 'WEALTH', 'AND', 'THE', 'CONSEQUENT', 'INHERITANCE', 'OF', 'GENTILITY'] +3570-5694-0014-2447: ref=['MANY', 'OF', 'THESE', 'AFFILIATED', 'GENTLEMEN', 'OF', 'LEISURE', 'ARE', 'AT', 'THE', 'SAME', 'TIME', 'LESSER', 'MEN', 'OF', 'SUBSTANCE', 'IN', 'THEIR', 'OWN', 'RIGHT', 'SO', 'THAT', 'SOME', 'OF', 'THEM', 'ARE', 'SCARCELY', 'AT', 'ALL', 'OTHERS', 'ONLY', 'PARTIALLY', 'TO', 'BE', 'RATED', 'AS', 'VICARIOUS', 'CONSUMERS'] +3570-5694-0014-2447: hyp=['MANY', 'OF', 'THESE', 'ARE', 'FILIATED', 'GENTLEMEN', 'OF', 'LEISURE', 'ARE', 'AT', 'THE', 'SAME', 'TIME', 'LESS', 'AMEN', 'OF', 'SUBSTANCE', 'IN', 'THEIR', 'OWN', 'RIGHT', 'SO', 'THAT', 'SOME', 'OF', 'THEM', 'ARE', 'SCARCELY', 'AT', 'ALL', 'OTHERS', 'ONLY', 'PARTIALLY', 'TO', 'BE', 'RATED', 'AS', 'VICARIOUS', 'CONSUMERS'] +3570-5694-0015-2448: ref=['SO', 'MANY', 'OF', 'THEM', 'HOWEVER', 'AS', 'MAKE', 'UP', 'THE', 'RETAINER', 'AND', 'HANGERS', 'ON', 'OF', 'THE', 'PATRON', 'MAY', 'BE', 'CLASSED', 'AS', 'VICARIOUS', 'CONSUMER', 'WITHOUT', 'QUALIFICATION'] +3570-5694-0015-2448: hyp=['SO', 'MANY', 'OF', 'THEM', 'HOWEVER', 'AS', 'MAKE', 'UP', 'THE', 'RETAINER', 'AND', 'HANGERS', 'ON', 'OF', 'THE', 'PATRON', 'MAY', 'BE', 'CLASSED', 'AS', 'VICARIOUS', 'CONSUMER', 'WITHOUT', 'QUALIFICATION'] +3570-5694-0016-2449: ref=['MANY', 'OF', 'THESE', 'AGAIN', 'AND', 'ALSO', 'MANY', 'OF', 'THE', 'OTHER', 'ARISTOCRACY', 'OF', 'LESS', 'DEGREE', 'HAVE', 'IN', 'TURN', 'ATTACHED', 'TO', 'THEIR', 'PERSONS', 'A', 'MORE', 'OR', 'LESS', 'COMPREHENSIVE', 'GROUP', 'OF', 'VICARIOUS', 'CONSUMER', 'IN', 'THE', 'PERSONS', 'OF', 'THEIR', 'WIVES', 'AND', 'CHILDREN', 'THEIR', 'SERVANTS', 'RETAINERS', 'ET', 'CETERA'] +3570-5694-0016-2449: hyp=['MANY', 'OF', 'THESE', 'AGAIN', 'AND', 'ALSO', 'MANY', 'OF', 'THE', 'OTHER', 'ARISTOCRACY', 'OF', 'LESS', 'DEGREE', 'HAVE', 'IN', 'TURN', 'ATTACHED', 'TO', 'THEIR', 'PERSONS', 'A', 'MORE', 'OR', 'LESS', 'COMPREHENSIVE', 'GROUP', 'OF', 'VICARIOUS', 'CONSUMER', 'IN', 'THE', 'PERSONS', 'OF', 'THEIR', 'WIVES', 'AND', 'CHILDREN', 'THEIR', 'SERVANTS', 'RETAINERS', 'ET', 'CETERA'] +3570-5694-0017-2450: ref=['THE', 'WEARING', 'OF', 'UNIFORMS', 'OR', 'LIVERIES', 'IMPLIES', 'A', 'CONSIDERABLE', 'DEGREE', 'OF', 'DEPENDENCE', 'AND', 'MAY', 'EVEN', 'BE', 'SAID', 'TO', 'BE', 'A', 'MARK', 'OF', 'SERVITUDE', 'REAL', 'OR', 'OSTENSIBLE'] +3570-5694-0017-2450: hyp=['THE', 'WEARING', 'OF', 'UNIFORMS', 'ARE', 'LIVERIES', 'IMPLIES', 'A', 'CONSIDERABLE', 'DEGREE', 'OF', 'DEPENDENCE', 'AND', 'MAY', 'EVEN', 'BE', 'SAID', 'TO', 'BE', 'A', 'MARK', 'OF', 'SERVITUDE', 'REAL', 'OR', 'OSTENSIBLE'] +3570-5694-0018-2451: ref=['THE', 'WEARERS', 'OF', 'UNIFORMS', 'AND', 'LIVERIES', 'MAY', 'BE', 'ROUGHLY', 'DIVIDED', 'INTO', 'TWO', 'CLASSES', 'THE', 'FREE', 'AND', 'THE', 'SERVILE', 'OR', 'THE', 'NOBLE', 'AND', 'THE', 'IGNOBLE'] +3570-5694-0018-2451: hyp=['THE', 'WEARERS', 'OF', 'UNIFORMS', 'AND', 'LIVERIES', 'MAY', 'BE', 'ROUGHLY', 'DIVIDED', 'INTO', 'TWO', 'CLASSES', 'THE', 'FREE', 'AND', 'THE', 'SERVILE', 'OR', 'THE', 'NOBLE', 'AND', 'THE', 'IGNOBLE'] +3570-5694-0019-2452: ref=['BUT', 'THE', 'GENERAL', 'DISTINCTION', 'IS', 'NOT', 'ON', 'THAT', 'ACCOUNT', 'TO', 'BE', 'OVERLOOKED'] +3570-5694-0019-2452: hyp=['BUT', 'THE', 'GENERAL', 'DISTINCTION', 'IS', 'NOT', 'ON', 'THAT', 'ACCOUNT', 'TO', 'BE', 'OVERLOOKED'] +3570-5694-0020-2453: ref=['SO', 'THOSE', 'OFFICES', 'WHICH', 'ARE', 'BY', 'RIGHT', 'THE', 'PROPER', 'EMPLOYMENT', 'OF', 'THE', 'LEISURE', 'CLASS', 'ARE', 'NOBLE', 'SUCH', 'AS', 'GOVERNMENT', 'FIGHTING', 'HUNTING', 'THE', 'CARE', 'OF', 'ARMS', 'AND', 'ACCOUTREMENTS', 'AND', 'THE', 'LIKE', 'IN', 'SHORT', 'THOSE', 'WHICH', 'MAY', 'BE', 'CLASSED', 'AS', 'OSTENSIBLY', 'PREDATORY', 'EMPLOYMENTS'] +3570-5694-0020-2453: hyp=['SO', 'THOSE', 'OFFICERS', 'WHICH', 'ARE', 'BY', 'RIGHT', 'THE', 'PROPER', 'EMPLOYMENT', 'OF', 'THE', 'LEISURE', 'CLASS', 'ARE', 'NOBLE', 'SUCH', 'AS', 'GOVERNMENT', 'FIGHTING', 'HUNTING', 'THE', 'CARE', 'OF', 'ARMS', 'AND', 'ACCOUTREMENTS', 'AND', 'THE', 'LIKE', 'IN', 'SHORT', 'THOSE', 'WHICH', 'MAY', 'BE', 'CLASSED', 'AS', 'OSTENSIBLY', 'PREDATORY', 'EMPLOYMENTS'] +3570-5694-0021-2454: ref=['WHENEVER', 'AS', 'IN', 'THESE', 'CASES', 'THE', 'MENIAL', 'SERVICE', 'IN', 'QUESTION', 'HAS', 'TO', 'DO', 'DIRECTLY', 'WITH', 'THE', 'PRIMARY', 'LEISURE', 'EMPLOYMENTS', 'OF', 'FIGHTING', 'AND', 'HUNTING', 'IT', 'EASILY', 'ACQUIRES', 'A', 'REFLECTED', 'HONORIFIC', 'CHARACTER'] +3570-5694-0021-2454: hyp=['WHENEVER', 'AS', 'IN', 'THESE', 'CASES', 'THE', 'MENIAL', 'SERVICE', 'IN', 'QUESTION', 'HAS', 'TO', 'DO', 'DIRECTLY', 'WITH', 'A', 'PRIMARY', 'LEISURE', 'EMPLOYMENTS', 'OF', 'FIGHTING', 'AND', 'HUNTING', 'IT', 'EASILY', 'ACQUIRES', 'A', 'REFLECTED', 'HONORIFIC', 'CHARACTER'] +3570-5694-0022-2455: ref=['THE', 'LIVERY', 'BECOMES', 'OBNOXIOUS', 'TO', 'NEARLY', 'ALL', 'WHO', 'ARE', 'REQUIRED', 'TO', 'WEAR', 'IT'] +3570-5694-0022-2455: hyp=['THE', 'LIVERY', 'BECOMES', 'OBNOXIOUS', 'TO', 'NEARLY', 'ALL', 'WHO', 'ARE', 'REQUIRED', 'TO', 'WEAR', 'IT'] +3570-5695-0000-2456: ref=['IN', 'A', 'GENERAL', 'WAY', 'THOUGH', 'NOT', 'WHOLLY', 'NOR', 'CONSISTENTLY', 'THESE', 'TWO', 'GROUPS', 'COINCIDE'] +3570-5695-0000-2456: hyp=['IN', 'A', 'GENERAL', 'WAY', 'THOUGH', 'NOT', 'WHOLLY', 'NOR', 'CONSISTENTLY', 'THESE', 'TWO', 'GROUPS', 'COINCIDE'] +3570-5695-0001-2457: ref=['THE', 'DEPENDENT', 'WHO', 'WAS', 'FIRST', 'DELEGATED', 'FOR', 'THESE', 'DUTIES', 'WAS', 'THE', 'WIFE', 'OR', 'THE', 'CHIEF', 'WIFE', 'AND', 'AS', 'WOULD', 'BE', 'EXPECTED', 'IN', 'THE', 'LATER', 'DEVELOPMENT', 'OF', 'THE', 'INSTITUTION', 'WHEN', 'THE', 'NUMBER', 'OF', 'PERSONS', 'BY', 'WHOM', 'THESE', 'DUTIES', 'ARE', 'CUSTOMARILY', 'PERFORMED', 'GRADUALLY', 'NARROWS', 'THE', 'WIFE', 'REMAINS', 'THE', 'LAST'] +3570-5695-0001-2457: hyp=['THE', 'DEPENDENT', 'WHO', 'WAS', 'FIRST', 'DELEGATED', 'FOR', 'THESE', 'DUTIES', 'WAS', 'THE', 'WIFE', 'OR', 'THE', 'CHIEF', 'WIFE', 'AND', 'AS', 'WOULD', 'BE', 'EXPECTED', 'IN', 'A', 'LATER', 'DEVELOPMENT', 'OF', 'THE', 'INSTITUTION', 'WHEN', 'THE', 'NUMBER', 'OF', 'PERSONS', 'BY', 'WHOM', 'THESE', 'DUTIES', 'ARE', 'CUSTOMARILY', 'PERFORMED', 'GRADUALLY', 'NARROWS', 'THE', 'WIFE', 'REMAINS', 'THE', 'LAST'] +3570-5695-0002-2458: ref=['BUT', 'AS', 'WE', 'DESCEND', 'THE', 'SOCIAL', 'SCALE', 'THE', 'POINT', 'IS', 'PRESENTLY', 'REACHED', 'WHERE', 'THE', 'DUTIES', 'OF', 'VICARIOUS', 'LEISURE', 'AND', 'CONSUMPTION', 'DEVOLVE', 'UPON', 'THE', 'WIFE', 'ALONE'] +3570-5695-0002-2458: hyp=['BUT', 'AS', 'WE', 'DESCEND', 'THE', 'SOCIAL', 'SCALE', 'THE', 'POINT', 'IS', 'PRESENTLY', 'REACHED', 'WHERE', 'THE', 'DUTIES', 'OF', 'VIPEROUS', 'LEISURE', 'AND', 'CONSUMPTION', 'DEVOLVE', 'UPON', 'THE', 'WIFE', 'ALONE'] +3570-5695-0003-2459: ref=['IN', 'THE', 'COMMUNITIES', 'OF', 'THE', 'WESTERN', 'CULTURE', 'THIS', 'POINT', 'IS', 'AT', 'PRESENT', 'FOUND', 'AMONG', 'THE', 'LOWER', 'MIDDLE', 'CLASS'] +3570-5695-0003-2459: hyp=['IN', 'THE', 'COMMUNITIES', 'OF', 'THE', 'WESTERN', 'CULTURE', 'THIS', 'POINT', 'IS', 'AT', 'PRESENT', 'FOUND', 'AMONG', 'THE', 'LOWER', 'MIDDLE', 'CLASS'] +3570-5695-0004-2460: ref=['IF', 'BEAUTY', 'OR', 'COMFORT', 'IS', 'ACHIEVED', 'AND', 'IT', 'IS', 'A', 'MORE', 'OR', 'LESS', 'FORTUITOUS', 'CIRCUMSTANCE', 'IF', 'THEY', 'ARE', 'THEY', 'MUST', 'BE', 'ACHIEVED', 'BY', 'MEANS', 'AND', 'METHODS', 'THAT', 'COMMEND', 'THEMSELVES', 'TO', 'THE', 'GREAT', 'ECONOMIC', 'LAW', 'OF', 'WASTED', 'EFFORT'] +3570-5695-0004-2460: hyp=['IF', 'BEAUTY', 'OR', 'COMFORT', 'IS', 'ACHIEVED', 'AND', 'IT', 'IS', 'A', 'MORE', 'OR', 'LESS', 'FORTUITOUS', 'CIRCUMSTANCE', 'IF', 'THEY', 'ARE', 'THEY', 'MUST', 'BE', 'ACHIEVED', 'BY', 'MEANS', 'AND', 'METHODS', 'THAT', 'COMMEND', 'THEMSELVES', 'TO', 'THE', 'GREAT', 'ECONOMIC', 'LAW', 'OF', 'WASTED', 'EFFORT'] +3570-5695-0005-2461: ref=['THE', 'MAN', 'OF', 'THE', 'HOUSEHOLD', 'ALSO', 'CAN', 'DO', 'SOMETHING', 'IN', 'THIS', 'DIRECTION', 'AND', 'INDEED', 'HE', 'COMMONLY', 'DOES', 'BUT', 'WITH', 'A', 'STILL', 'LOWER', 'DESCENT', 'INTO', 'THE', 'LEVELS', 'OF', 'INDIGENCE', 'ALONG', 'THE', 'MARGIN', 'OF', 'THE', 'SLUMS', 'THE', 'MAN', 'AND', 'PRESENTLY', 'ALSO', 'THE', 'CHILDREN', 'VIRTUALLY', 'CEASE', 'TO', 'CONSUME', 'VALUABLE', 'GOODS', 'FOR', 'APPEARANCES', 'AND', 'THE', 'WOMAN', 'REMAINS', 'VIRTUALLY', 'THE', 'SOLE', 'EXPONENT', 'OF', 'THE', "HOUSEHOLD'S", 'PECUNIARY', 'DECENCY'] +3570-5695-0005-2461: hyp=['THE', 'MAN', 'OF', 'THE', 'HOUSEHOLD', 'ALSO', 'CAN', 'DO', 'SOMETHING', 'IN', 'THIS', 'DIRECTION', 'AND', 'INDEED', 'HE', 'COMMONLY', 'DOES', 'BUT', 'WITH', 'A', 'STILL', 'LOWER', 'DESCENT', 'INTO', 'THE', 'LEVELS', 'OF', 'INDIGENCE', 'ALONG', 'THE', 'MARGIN', 'OF', 'THE', 'SLUMS', 'THE', 'MAN', 'AND', 'PRESENTLY', 'ALSO', 'THE', 'CHILDREN', 'VIRTUALLY', 'CEASE', 'TO', 'CONSUME', 'VALUABLE', 'GOODS', 'FOR', 'APPEARANCES', 'AND', 'THE', 'WOMAN', 'REMAINS', 'VIRTUALLY', 'THE', 'SOLE', 'EXPONENT', 'OF', 'THE', "HOUSEHOLD'S", 'PECUNIARY', 'DECENCY'] +3570-5695-0006-2462: ref=['VERY', 'MUCH', 'OF', 'SQUALOR', 'AND', 'DISCOMFORT', 'WILL', 'BE', 'ENDURED', 'BEFORE', 'THE', 'LAST', 'TRINKET', 'OR', 'THE', 'LAST', 'PRETENSE', 'OF', 'PECUNIARY', 'DECENCY', 'IS', 'PUT', 'AWAY'] +3570-5695-0006-2462: hyp=['VERY', 'MUCH', 'OF', 'SQUALOR', 'AND', 'DISCOMFORT', 'WILL', 'BE', 'ENDURED', 'BEFORE', 'THE', 'LAST', 'TRINKET', 'OR', 'THE', 'LAST', 'PRETENCE', 'OF', 'PECUNIARY', 'DECENCIES', 'PUT', 'AWAY'] +3570-5695-0007-2463: ref=['THERE', 'IS', 'NO', 'CLASS', 'AND', 'NO', 'COUNTRY', 'THAT', 'HAS', 'YIELDED', 'SO', 'ABJECTLY', 'BEFORE', 'THE', 'PRESSURE', 'OF', 'PHYSICAL', 'WANT', 'AS', 'TO', 'DENY', 'THEMSELVES', 'ALL', 'GRATIFICATION', 'OF', 'THIS', 'HIGHER', 'OR', 'SPIRITUAL', 'NEED'] +3570-5695-0007-2463: hyp=['THERE', 'IS', 'NO', 'CLASS', 'AND', 'NO', 'COUNTRY', 'THAT', 'HAS', 'YIELDED', 'SO', 'ABJECTLY', 'BEFORE', 'THE', 'PRESSURE', 'OF', 'PHYSICAL', 'WANT', 'AS', 'TO', 'DENY', 'THEMSELVES', 'ALL', 'GRATIFICATION', 'OF', 'THIS', 'HIGHER', 'OR', 'SPIRITUAL', 'NEED'] +3570-5695-0008-2464: ref=['THE', 'QUESTION', 'IS', 'WHICH', 'OF', 'THE', 'TWO', 'METHODS', 'WILL', 'MOST', 'EFFECTIVELY', 'REACH', 'THE', 'PERSONS', 'WHOSE', 'CONVICTIONS', 'IT', 'IS', 'DESIRED', 'TO', 'AFFECT'] +3570-5695-0008-2464: hyp=['THE', 'QUESTION', 'IS', 'WHICH', 'OF', 'THE', 'TWO', 'METHODS', 'WILL', 'MOST', 'EFFECTIVELY', 'REACH', 'THE', 'PERSONS', 'WHOSE', 'CONVICTIONS', 'IT', 'IS', 'DESIRED', 'TO', 'EFFECT'] +3570-5695-0009-2465: ref=['EACH', 'WILL', 'THEREFORE', 'SERVE', 'ABOUT', 'EQUALLY', 'WELL', 'DURING', 'THE', 'EARLIER', 'STAGES', 'OF', 'SOCIAL', 'GROWTH'] +3570-5695-0009-2465: hyp=['EACH', 'WILL', 'THEREFORE', 'SERVE', 'ABOUT', 'EQUALLY', 'WELL', 'DURING', 'THE', 'EARLIER', 'STAGES', 'OF', 'SOCIAL', 'GROWTH'] +3570-5695-0010-2466: ref=['THE', 'MODERN', 'ORGANIZATION', 'OF', 'INDUSTRY', 'WORKS', 'IN', 'THE', 'SAME', 'DIRECTION', 'ALSO', 'BY', 'ANOTHER', 'LINE'] +3570-5695-0010-2466: hyp=['THE', 'MODERN', 'ORGANIZATION', 'OF', 'INDUSTRY', 'WORKS', 'IN', 'THE', 'SAME', 'DIRECTION', 'ALSO', 'BY', 'ANOTHER', 'LINE'] +3570-5695-0011-2467: ref=['IT', 'IS', 'EVIDENT', 'THEREFORE', 'THAT', 'THE', 'PRESENT', 'TREND', 'OF', 'THE', 'DEVELOPMENT', 'IS', 'IN', 'THE', 'DIRECTION', 'OF', 'HEIGHTENING', 'THE', 'UTILITY', 'OF', 'CONSPICUOUS', 'CONSUMPTION', 'AS', 'COMPARED', 'WITH', 'LEISURE'] +3570-5695-0011-2467: hyp=['IT', 'IS', 'EVIDENT', 'THEREFORE', 'THAT', 'THE', 'PRESENT', 'TREND', 'OF', 'THE', 'DEVELOPMENT', 'IS', 'IN', 'THE', 'DIRECTION', 'OF', 'HEIGHTENING', 'THE', 'UTILITY', 'OF', 'CONSPICUOUS', 'CONSUMPTION', 'AS', 'COMPARED', 'WITH', 'LEISURE'] +3570-5695-0012-2468: ref=['IT', 'IS', 'ALSO', 'NOTICEABLE', 'THAT', 'THE', 'SERVICEABILITY', 'OF', 'CONSUMPTION', 'AS', 'A', 'MEANS', 'OF', 'REPUTE', 'AS', 'WELL', 'AS', 'THE', 'INSISTENCE', 'ON', 'IT', 'AS', 'AN', 'ELEMENT', 'OF', 'DECENCY', 'IS', 'AT', 'ITS', 'BEST', 'IN', 'THOSE', 'PORTIONS', 'OF', 'THE', 'COMMUNITY', 'WHERE', 'THE', 'HUMAN', 'CONTACT', 'OF', 'THE', 'INDIVIDUAL', 'IS', 'WIDEST', 'AND', 'THE', 'MOBILITY', 'OF', 'THE', 'POPULATION', 'IS', 'GREATEST'] +3570-5695-0012-2468: hyp=['IT', 'IS', 'ALSO', 'NOTICEABLE', 'THAT', 'THE', 'SERVICEABILITY', 'OF', 'CONSUMPTION', 'AS', 'A', 'MEANS', 'OF', 'REPUTE', 'AS', 'WELL', 'AS', 'THE', 'INSISTENCE', 'ON', 'IT', 'AS', 'AN', 'ELEMENT', 'OF', 'DECENCY', 'IS', 'AT', 'ITS', 'BEST', 'IN', 'THOSE', 'PORTIONS', 'OF', 'THE', 'COMMUNITY', 'WHERE', 'THE', 'HUMAN', 'CONDUCT', 'OF', 'THE', 'INDIVIDUAL', 'IS', 'WIDEST', 'AND', 'THE', 'MOBILITY', 'OF', 'THE', 'POPULATION', 'IS', 'GREATEST'] +3570-5695-0013-2469: ref=['CONSUMPTION', 'BECOMES', 'A', 'LARGER', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'LIVING', 'IN', 'THE', 'CITY', 'THAN', 'IN', 'THE', 'COUNTRY'] +3570-5695-0013-2469: hyp=['CONSUMPTION', 'BECOMES', 'A', 'LARGER', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'LIVING', 'IN', 'THE', 'CITY', 'THAN', 'IN', 'THE', 'COUNTRY'] +3570-5695-0014-2470: ref=['AMONG', 'THE', 'COUNTRY', 'POPULATION', 'ITS', 'PLACE', 'IS', 'TO', 'SOME', 'EXTENT', 'TAKEN', 'BY', 'SAVINGS', 'AND', 'HOME', 'COMFORTS', 'KNOWN', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'NEIGHBORHOOD', 'GOSSIP', 'SUFFICIENTLY', 'TO', 'SERVE', 'THE', 'LIKE', 'GENERAL', 'PURPOSE', 'OF', 'PECUNIARY', 'REPUTE'] +3570-5695-0014-2470: hyp=['AMONG', 'THE', 'COUNTRY', 'POPULATION', 'ITS', 'PLACES', 'TO', 'SOME', 'EXTENT', 'TAKEN', 'BY', 'SAVINGS', 'AND', 'HOME', 'COMFORTS', 'KNOWN', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'NEIGHBOURHOOD', 'GOSSIP', 'SUFFICIENTLY', 'TO', 'SERVE', 'THE', 'LIKE', 'GENERAL', 'PURPOSE', 'OF', 'PECUNIARY', 'REPUTE'] +3570-5695-0015-2471: ref=['THE', 'RESULT', 'IS', 'A', 'GREAT', 'MOBILITY', 'OF', 'THE', 'LABOR', 'EMPLOYED', 'IN', 'PRINTING', 'PERHAPS', 'GREATER', 'THAN', 'IN', 'ANY', 'OTHER', 'EQUALLY', 'WELL', 'DEFINED', 'AND', 'CONSIDERABLE', 'BODY', 'OF', 'WORKMEN'] +3570-5695-0015-2471: hyp=['THE', 'RESULT', 'IS', 'A', 'GREAT', 'MOBILITY', 'OF', 'THE', 'LABOR', 'EMPLOYED', 'IN', 'PRINTING', 'PERHAPS', 'GREATER', 'THAN', 'IN', 'ANY', 'OTHER', 'EQUALLY', 'WELL', 'DEFINED', 'AND', 'CONSIDERABLE', 'BODY', 'OF', 'WORKMEN'] +3570-5696-0000-2472: ref=['UNDER', 'THE', 'SIMPLE', 'TEST', 'OF', 'EFFECTIVENESS', 'FOR', 'ADVERTISING', 'WE', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'LEISURE', 'AND', 'THE', 'CONSPICUOUS', 'CONSUMPTION', 'OF', 'GOODS', 'DIVIDING', 'THE', 'FIELD', 'OF', 'PECUNIARY', 'EMULATION', 'PRETTY', 'EVENLY', 'BETWEEN', 'THEM', 'AT', 'THE', 'OUTSET'] +3570-5696-0000-2472: hyp=['UNDER', 'THE', 'SIMPLE', 'TEST', 'OF', 'EFFECTIVENESS', 'FOR', 'ADVERTISING', 'WE', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'LEISURE', 'AND', 'THE', 'CONSPICUOUS', 'CONSUMPTION', 'OF', 'GOODS', 'DIVIDING', 'THE', 'FIELD', 'OF', 'PECUNIARY', 'EMULATION', 'PRETTY', 'EVENLY', 'BETWEEN', 'THEM', 'AT', 'THE', 'OUTSET'] +3570-5696-0001-2473: ref=['BUT', 'THE', 'ACTUAL', 'COURSE', 'OF', 'DEVELOPMENT', 'HAS', 'BEEN', 'SOMEWHAT', 'DIFFERENT', 'FROM', 'THIS', 'IDEAL', 'SCHEME', 'LEISURE', 'HELD', 'THE', 'FIRST', 'PLACE', 'AT', 'THE', 'START', 'AND', 'CAME', 'TO', 'HOLD', 'A', 'RANK', 'VERY', 'MUCH', 'ABOVE', 'WASTEFUL', 'CONSUMPTION', 'OF', 'GOODS', 'BOTH', 'AS', 'A', 'DIRECT', 'EXPONENT', 'OF', 'WEALTH', 'AND', 'AS', 'AN', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'DECENCY', 'DURING', 'THE', 'QUASI', 'PEACEABLE', 'CULTURE'] +3570-5696-0001-2473: hyp=['BUT', 'THE', 'ACTUAL', 'COURSE', 'OF', 'DEVELOPMENT', 'HAS', 'BEEN', 'SOMEWHAT', 'DIFFERENT', 'FROM', 'THIS', 'IDEAL', 'SCHEME', 'LEISURE', 'HELD', 'THE', 'FIRST', 'PLACE', 'AT', 'THE', 'START', 'AND', 'CAME', 'TO', 'ALL', 'THE', 'RANK', 'VERIMENT', 'ABOVE', 'WASTEFUL', 'CONSUMPTION', 'OF', 'GOODS', 'BOTH', 'AS', 'A', 'DIRECT', 'EXPONENT', 'OF', 'WEALTH', 'AND', 'AS', 'AN', 'ELEMENT', 'IN', 'THE', 'STANDARD', 'OF', 'DECENCY', 'DURING', 'THE', 'COURSE', 'I', 'PEACEABLE', 'CULTURE'] +3570-5696-0002-2474: ref=['OTHER', 'CIRCUMSTANCES', 'PERMITTING', 'THAT', 'INSTINCT', 'DISPOSES', 'MEN', 'TO', 'LOOK', 'WITH', 'FAVOR', 'UPON', 'PRODUCTIVE', 'EFFICIENCY', 'AND', 'ON', 'WHATEVER', 'IS', 'OF', 'HUMAN', 'USE'] +3570-5696-0002-2474: hyp=['OTHER', 'CIRCUMSTANCES', 'PERMITTING', 'THAT', 'INSTINCT', 'DISPOSES', 'MEN', 'TO', 'LOOK', 'WITH', 'FAVOR', 'UPON', 'PRODUCTIVE', 'EFFICIENCY', 'AND', 'ON', 'WHATEVER', 'IS', 'OF', 'HUMAN', 'USE'] +3570-5696-0003-2475: ref=['A', 'RECONCILIATION', 'BETWEEN', 'THE', 'TWO', 'CONFLICTING', 'REQUIREMENTS', 'IS', 'EFFECTED', 'BY', 'A', 'RESORT', 'TO', 'MAKE', 'BELIEVE', 'MANY', 'AND', 'INTRICATE', 'POLITE', 'OBSERVANCES', 'AND', 'SOCIAL', 'DUTIES', 'OF', 'A', 'CEREMONIAL', 'NATURE', 'ARE', 'DEVELOPED', 'MANY', 'ORGANIZATIONS', 'ARE', 'FOUNDED', 'WITH', 'SOME', 'SPECIOUS', 'OBJECT', 'OF', 'AMELIORATION', 'EMBODIED', 'IN', 'THEIR', 'OFFICIAL', 'STYLE', 'AND', 'TITLE', 'THERE', 'IS', 'MUCH', 'COMING', 'AND', 'GOING', 'AND', 'A', 'DEAL', 'OF', 'TALK', 'TO', 'THE', 'END', 'THAT', 'THE', 'TALKERS', 'MAY', 'NOT', 'HAVE', 'OCCASION', 'TO', 'REFLECT', 'ON', 'WHAT', 'IS', 'THE', 'EFFECTUAL', 'ECONOMIC', 'VALUE', 'OF', 'THEIR', 'TRAFFIC'] +3570-5696-0003-2475: hyp=['A', 'RECONCILIATION', 'BETWEEN', 'THE', 'TWO', 'CONFLICTING', 'REQUIREMENTS', 'IS', 'AFFECTED', 'BY', 'RESORT', 'TO', 'MAKE', 'BELIEVE', 'MEN', 'IN', 'INTRICATE', 'POLITE', 'OBSERVANCES', 'AND', 'SOCIAL', 'DUTIES', 'OF', 'A', 'CEREMONIAL', 'NATURE', 'ARE', 'DEVELOPED', 'MANY', 'ORGANIZATIONS', 'ARE', 'FOUNDED', 'WITH', 'SOME', 'SPECIOUS', 'OBJECT', 'OF', 'AMELIORATION', 'EMBODIED', 'IN', 'THEIR', 'OFFICIAL', 'STYLE', 'AND', 'TITLE', 'THERE', 'IS', 'MUCH', 'COMING', 'AND', 'GOING', 'AND', 'A', 'DEAL', 'OF', 'TALK', 'TO', 'THE', 'END', 'THAT', 'THE', 'TALK', 'IS', 'NOT', 'HAVE', 'OCCASION', 'TO', 'REFLECT', 'ON', 'WHAT', 'IS', 'THE', 'EFFECTUAL', 'ECONOMIC', 'VALUE', 'OF', 'THEIR', 'TRAFFIC'] +3570-5696-0004-2476: ref=['THE', 'SALIENT', 'FEATURES', 'OF', 'THIS', 'DEVELOPMENT', 'OF', 'DOMESTIC', 'SERVICE', 'HAVE', 'ALREADY', 'BEEN', 'INDICATED'] +3570-5696-0004-2476: hyp=['THE', 'SAILORED', 'FEATURES', 'OF', 'THIS', 'DEVELOPMENT', 'OF', 'DOMESTIC', 'SERVICE', 'HAVE', 'ALREADY', 'BEEN', 'INDICATED'] +3570-5696-0005-2477: ref=['THROUGHOUT', 'THE', 'ENTIRE', 'EVOLUTION', 'OF', 'CONSPICUOUS', 'EXPENDITURE', 'WHETHER', 'OF', 'GOODS', 'OR', 'OF', 'SERVICES', 'OR', 'HUMAN', 'LIFE', 'RUNS', 'THE', 'OBVIOUS', 'IMPLICATION', 'THAT', 'IN', 'ORDER', 'TO', 'EFFECTUALLY', 'MEND', 'THE', "CONSUMER'S", 'GOOD', 'FAME', 'IT', 'MUST', 'BE', 'AN', 'EXPENDITURE', 'OF', 'SUPERFLUITIES'] +3570-5696-0005-2477: hyp=['THROUGHOUT', 'THE', 'ENTIRE', 'EVOLUTION', 'OF', 'CONSPICUOUS', 'EXPENDITURE', 'WHETHER', 'OF', 'GOODS', 'OR', 'OF', 'SERVICES', 'OR', 'HUMAN', 'LIFE', 'RUNS', 'THE', 'OBVIOUS', 'IMPLICATION', 'THAT', 'IN', 'ORDER', 'TO', 'EFFECTUALLY', 'MEND', 'THE', "CONSUMER'S", 'GOOD', 'FAME', 'IT', 'MUST', 'BE', 'AN', 'EXPENDITURE', 'OF', 'SUPERFLUITIES'] +3570-5696-0006-2478: ref=['AS', 'USED', 'IN', 'THE', 'SPEECH', 'OF', 'EVERYDAY', 'LIFE', 'THE', 'WORD', 'CARRIES', 'AN', 'UNDERTONE', 'OF', 'DEPRECATION'] +3570-5696-0006-2478: hyp=['AS', 'USED', 'IN', 'THE', 'SPEECH', 'OF', 'EVERY', 'DAY', 'LIFE', 'THE', 'WORD', 'CARRIES', 'AN', 'UNDERTONE', 'OF', 'DEPRECATION'] +3570-5696-0007-2479: ref=['THE', 'USE', 'OF', 'THE', 'WORD', 'WASTE', 'AS', 'A', 'TECHNICAL', 'TERM', 'THEREFORE', 'IMPLIES', 'NO', 'DEPRECATION', 'OF', 'THE', 'MOTIVES', 'OR', 'OF', 'THE', 'ENDS', 'SOUGHT', 'BY', 'THE', 'CONSUMER', 'UNDER', 'THIS', 'CANON', 'OF', 'CONSPICUOUS', 'WASTE'] +3570-5696-0007-2479: hyp=['THE', 'USE', 'OF', 'THE', 'WORD', 'WASTE', 'AS', 'A', 'TECHNICAL', 'TERM', 'THEREFORE', 'IMPLIES', 'NO', 'DEPRECATION', 'OF', 'THE', 'MOTIVES', 'OR', 'OF', 'THE', 'ENDS', 'SOUGHT', 'BY', 'THE', 'CONSUMER', 'UNDER', 'THIS', 'CANON', 'OF', 'CONSPICUOUS', 'WASTE'] +3570-5696-0008-2480: ref=['BUT', 'IT', 'IS', 'ON', 'OTHER', 'GROUNDS', 'WORTH', 'NOTING', 'THAT', 'THE', 'TERM', 'WASTE', 'IN', 'THE', 'LANGUAGE', 'OF', 'EVERYDAY', 'LIFE', 'IMPLIES', 'DEPRECATION', 'OF', 'WHAT', 'IS', 'CHARACTERIZED', 'AS', 'WASTEFUL'] +3570-5696-0008-2480: hyp=['BUT', 'IT', 'IS', 'ANOTHER', 'GROUNDS', 'WORTH', 'NOTING', 'THAT', 'THE', 'TERM', 'WASTES', 'IN', 'THE', 'LANGUAGE', 'OF', 'EVERYDAY', 'LIFE', 'IMPLIES', 'DEPRECATION', 'OF', 'WHAT', 'IS', 'CHARACTERIZED', 'AS', 'WASTEFUL'] +3570-5696-0009-2481: ref=['IN', 'STRICT', 'ACCURACY', 'NOTHING', 'SHOULD', 'BE', 'INCLUDED', 'UNDER', 'THE', 'HEAD', 'OF', 'CONSPICUOUS', 'WASTE', 'BUT', 'SUCH', 'EXPENDITURE', 'AS', 'IS', 'INCURRED', 'ON', 'THE', 'GROUND', 'OF', 'AN', 'INVIDIOUS', 'PECUNIARY', 'COMPARISON'] +3570-5696-0009-2481: hyp=['IN', 'STRICT', 'ACCURACY', 'NOTHING', 'SHOULD', 'BE', 'INCLUDED', 'UNDER', 'THE', 'HEAD', 'OF', 'CONSPICUOUS', 'WASTE', 'BUT', 'SUCH', 'EXPENDITURE', 'AS', 'IS', 'INCURRED', 'ON', 'THE', 'GROUND', 'OF', 'AN', 'INVIDIOUS', 'PECUNIARY', 'COMPARISON'] +3570-5696-0010-2482: ref=['AN', 'ARTICLE', 'MAY', 'BE', 'USEFUL', 'AND', 'WASTEFUL', 'BOTH', 'AND', 'ITS', 'UTILITY', 'TO', 'THE', 'CONSUMER', 'MAY', 'BE', 'MADE', 'UP', 'OF', 'USE', 'AND', 'WASTE', 'IN', 'THE', 'MOST', 'VARYING', 'PROPORTIONS'] +3570-5696-0010-2482: hyp=['AN', 'ARTICLE', 'MAY', 'BE', 'USEFUL', 'AND', 'WASTEFUL', 'BOTH', 'AND', 'ITS', 'UTILITY', 'TO', 'THE', 'CONSUMER', 'MAY', 'BE', 'MADE', 'UP', 'OF', 'USE', 'AND', 'WASTE', 'IN', 'THE', 'MOST', 'VARYING', 'PROPORTIONS'] +3575-170457-0000-369: ref=['AND', 'OFTEN', 'HAS', 'MY', 'MOTHER', 'SAID', 'WHILE', 'ON', 'HER', 'LAP', 'I', 'LAID', 'MY', 'HEAD', 'SHE', 'FEARED', 'FOR', 'TIME', 'I', 'WAS', 'NOT', 'MADE', 'BUT', 'FOR', 'ETERNITY'] +3575-170457-0000-369: hyp=['AND', 'OFTEN', 'HAS', 'MY', 'MOTHER', 'SAID', 'WHILE', 'ON', 'HER', 'LAP', 'I', 'LAID', 'MY', 'HEAD', 'SHE', 'FEARED', 'FOR', 'TIME', 'I', 'WAS', 'NOT', 'MADE', 'BUT', 'FOR', 'ETERNITY'] +3575-170457-0001-370: ref=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DENIED', 'EACH', "OTHER'S", 'SOCIETY'] +3575-170457-0001-370: hyp=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DENIED', 'EACH', "OTHER'S", 'SOCIETY'] +3575-170457-0002-371: ref=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DIVIDED'] +3575-170457-0002-371: hyp=['WHY', 'ARE', 'WE', 'TO', 'BE', 'DIVIDED'] +3575-170457-0003-372: ref=['SURELY', 'IT', 'MUST', 'BE', 'BECAUSE', 'WE', 'ARE', 'IN', 'DANGER', 'OF', 'LOVING', 'EACH', 'OTHER', 'TOO', 'WELL', 'OF', 'LOSING', 'SIGHT', 'OF', 'THE', 'CREATOR', 'IN', 'IDOLATRY', 'OF', 'THE', 'CREATURE'] +3575-170457-0003-372: hyp=['SURELY', 'IT', 'MUST', 'BE', 'BECAUSE', 'WE', 'ARE', 'IN', 'DANGER', 'OF', 'LOVING', 'EACH', 'OTHER', 'TOO', 'WELL', 'OF', 'LOSING', 'SIGHT', 'OF', 'THE', 'CREATOR', 'AND', 'IDOLATRY', 'OF', 'THE', 'CREATURE'] +3575-170457-0004-373: ref=['WE', 'USED', 'TO', 'DISPUTE', 'ABOUT', 'POLITICS', 'AND', 'RELIGION'] +3575-170457-0004-373: hyp=['WE', 'USED', 'TO', 'DISPUTE', 'ABOUT', 'POLITICS', 'AND', 'RELIGION'] +3575-170457-0005-374: ref=['SHE', 'A', 'TORY', 'AND', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'ALWAYS', 'IN', 'A', 'MINORITY', 'OF', 'ONE', 'IN', 'OUR', 'HOUSE', 'OF', 'VIOLENT', 'DISSENT', 'AND', 'RADICALISM'] +3575-170457-0005-374: hyp=['SHE', 'ATTORIAN', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'ALWAYS', 'IN', 'A', 'MINORITY', 'OF', 'ONE', 'IN', 'OUR', 'HOUSE', 'OF', 'VIOLENT', 'DESCENT', 'AND', 'RADICALISM'] +3575-170457-0006-375: ref=['HER', 'FEEBLE', 'HEALTH', 'GAVE', 'HER', 'HER', 'YIELDING', 'MANNER', 'FOR', 'SHE', 'COULD', 'NEVER', 'OPPOSE', 'ANY', 'ONE', 'WITHOUT', 'GATHERING', 'UP', 'ALL', 'HER', 'STRENGTH', 'FOR', 'THE', 'STRUGGLE'] +3575-170457-0006-375: hyp=['HER', 'FEEBLE', 'HEALTH', 'GAVE', 'HER', 'HER', 'YIELDING', 'MANNER', 'FOR', 'SHE', 'COULD', 'NEVER', 'OPPOSE', 'ANY', 'ONE', 'WITHOUT', 'GATHERING', 'UP', 'ALL', 'HER', 'STRENGTH', 'FOR', 'THE', 'STRUGGLE'] +3575-170457-0007-376: ref=['HE', 'SPOKE', 'FRENCH', 'PERFECTLY', 'I', 'HAVE', 'BEEN', 'TOLD', 'WHEN', 'NEED', 'WAS', 'BUT', 'DELIGHTED', 'USUALLY', 'IN', 'TALKING', 'THE', 'BROADEST', 'YORKSHIRE'] +3575-170457-0007-376: hyp=['HE', 'SPOKE', 'FRENCH', 'PERFECTLY', 'I', 'HAVE', 'BEEN', 'TOLD', 'WHEN', 'NEED', 'WAS', 'BUT', 'DELIGHTED', 'USUALLY', 'IN', 'TALKING', 'THE', 'BROADEST', 'YORKSHIRE'] +3575-170457-0008-377: ref=['AND', 'SO', 'LIFE', 'AND', 'DEATH', 'HAVE', 'DISPERSED', 'THE', 'CIRCLE', 'OF', 'VIOLENT', 'RADICALS', 'AND', 'DISSENTERS', 'INTO', 'WHICH', 'TWENTY', 'YEARS', 'AGO', 'THE', 'LITTLE', 'QUIET', 'RESOLUTE', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'RECEIVED', 'AND', 'BY', 'WHOM', 'SHE', 'WAS', 'TRULY', 'LOVED', 'AND', 'HONOURED'] +3575-170457-0008-377: hyp=['AND', 'SO', 'LIFE', 'AND', 'DEATH', 'HAVE', 'DISPERSED', 'THE', 'CIRCLE', 'OF', 'VIOLENT', 'RADICALS', 'AND', 'DISSENTERS', 'INTO', 'WHICH', 'TWENTY', 'YEARS', 'AGO', 'THE', 'LITTLE', 'QUIET', 'RESOLUTE', "CLERGYMAN'S", 'DAUGHTER', 'WAS', 'RECEIVED', 'AND', 'BY', 'WHOM', 'SHE', 'WAS', 'TRULY', 'LOVED', 'AND', 'HONOURED'] +3575-170457-0009-378: ref=['JANUARY', 'AND', 'FEBRUARY', 'OF', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'HAD', 'PASSED', 'AWAY', 'AND', 'STILL', 'THERE', 'WAS', 'NO', 'REPLY', 'FROM', 'SOUTHEY'] +3575-170457-0009-378: hyp=['JANUARY', 'AND', 'FEBRUARY', 'OF', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'HAD', 'PASSED', 'AWAY', 'AND', 'STILL', 'THERE', 'WAS', 'NO', 'REPLY', 'FROM', 'SALVI'] +3575-170457-0010-379: ref=['I', 'AM', 'NOT', 'DEPRECIATING', 'IT', 'WHEN', 'I', 'SAY', 'THAT', 'IN', 'THESE', 'TIMES', 'IT', 'IS', 'NOT', 'RARE'] +3575-170457-0010-379: hyp=['I', 'AM', 'NOT', 'DEPRECIATING', 'IT', 'WHEN', 'I', 'SAY', 'THAT', 'IN', 'THESE', 'TIMES', 'IT', 'IS', 'NOT', 'RARE'] +3575-170457-0011-380: ref=['BUT', 'IT', 'IS', 'NOT', 'WITH', 'A', 'VIEW', 'TO', 'DISTINCTION', 'THAT', 'YOU', 'SHOULD', 'CULTIVATE', 'THIS', 'TALENT', 'IF', 'YOU', 'CONSULT', 'YOUR', 'OWN', 'HAPPINESS'] +3575-170457-0011-380: hyp=['BUT', 'IT', 'IS', 'NOT', 'WITH', 'A', 'VIEW', 'TO', 'DISTINCTION', 'THAT', 'YOU', 'SHOULD', 'CULTIVATE', 'THIS', 'TALENT', 'IF', 'YOU', 'CONSULT', 'YOUR', 'OWN', 'HAPPINESS'] +3575-170457-0012-381: ref=['YOU', 'WILL', 'SAY', 'THAT', 'A', 'WOMAN', 'HAS', 'NO', 'NEED', 'OF', 'SUCH', 'A', 'CAUTION', 'THERE', 'CAN', 'BE', 'NO', 'PERIL', 'IN', 'IT', 'FOR', 'HER'] +3575-170457-0012-381: hyp=['YOU', 'WILL', 'SAY', 'THAT', 'A', 'WOMAN', 'HAS', 'NO', 'NEED', 'OF', 'SUCH', 'A', 'CAUTION', 'THERE', 'CAN', 'BE', 'NO', 'PERIL', 'IN', 'IT', 'FOR', 'HER'] +3575-170457-0013-382: ref=['THE', 'MORE', 'SHE', 'IS', 'ENGAGED', 'IN', 'HER', 'PROPER', 'DUTIES', 'THE', 'LESS', 'LEISURE', 'WILL', 'SHE', 'HAVE', 'FOR', 'IT', 'EVEN', 'AS', 'AN', 'ACCOMPLISHMENT', 'AND', 'A', 'RECREATION'] +3575-170457-0013-382: hyp=['THE', 'MORE', 'SHE', 'IS', 'ENGAGED', 'IN', 'HER', 'PROPER', 'DUTIES', 'THE', 'LESS', 'LEISURE', 'WILL', 'SHE', 'HAVE', 'FOR', 'IT', 'EVEN', 'AS', 'AN', 'ACCOMPLISHMENT', 'AND', 'A', 'RECREATION'] +3575-170457-0014-383: ref=['TO', 'THOSE', 'DUTIES', 'YOU', 'HAVE', 'NOT', 'YET', 'BEEN', 'CALLED', 'AND', 'WHEN', 'YOU', 'ARE', 'YOU', 'WILL', 'BE', 'LESS', 'EAGER', 'FOR', 'CELEBRITY'] +3575-170457-0014-383: hyp=['TO', 'THOSE', 'DUTIES', 'YOU', 'HAVE', 'NOT', 'YET', 'BEEN', 'CALLED', 'AND', 'WHEN', 'YOU', 'ARE', 'YOU', 'WILL', 'BE', 'LESS', 'EAGER', 'FOR', 'CELEBRITY'] +3575-170457-0015-384: ref=['BUT', 'DO', 'NOT', 'SUPPOSE', 'THAT', 'I', 'DISPARAGE', 'THE', 'GIFT', 'WHICH', 'YOU', 'POSSESS', 'NOR', 'THAT', 'I', 'WOULD', 'DISCOURAGE', 'YOU', 'FROM', 'EXERCISING', 'IT', 'I', 'ONLY', 'EXHORT', 'YOU', 'SO', 'TO', 'THINK', 'OF', 'IT', 'AND', 'SO', 'TO', 'USE', 'IT', 'AS', 'TO', 'RENDER', 'IT', 'CONDUCIVE', 'TO', 'YOUR', 'OWN', 'PERMANENT', 'GOOD'] +3575-170457-0015-384: hyp=['BUT', 'DO', 'NOT', 'SUPPOSE', 'THAT', 'I', 'DISPARAGE', 'THE', 'GIFT', 'WHICH', 'YOU', 'POSSESS', 'NOR', 'THAT', 'I', 'WOULD', 'DISCOURAGE', 'YOU', 'FROM', 'EXERCISING', 'IT', 'I', 'ONLY', 'EXHORT', 'YOU', 'SO', 'TO', 'THINK', 'OF', 'IT', 'AND', 'SO', 'TO', 'USE', 'IT', 'AS', 'TO', 'RENDER', 'IT', 'CONDUCIVE', 'TO', 'YOUR', 'OWN', 'PERMANENT', 'GOOD'] +3575-170457-0016-385: ref=['FAREWELL', 'MADAM'] +3575-170457-0016-385: hyp=['FAREWELL', 'MADAM'] +3575-170457-0017-386: ref=['THOUGH', 'I', 'MAY', 'BE', 'BUT', 'AN', 'UNGRACIOUS', 'ADVISER', 'YOU', 'WILL', 'ALLOW', 'ME', 'THEREFORE', 'TO', 'SUBSCRIBE', 'MYSELF', 'WITH', 'THE', 'BEST', 'WISHES', 'FOR', 'YOUR', 'HAPPINESS', 'HERE', 'AND', 'HEREAFTER', 'YOUR', 'TRUE', 'FRIEND', 'ROBERT', 'SOUTHEY'] +3575-170457-0017-386: hyp=['THOUGH', 'I', 'MAY', 'BE', 'BUT', 'AN', 'UNGRACIOUS', 'ADVISER', 'YOU', 'WILL', 'ALLOW', 'ME', 'THEREFORE', 'TO', 'SUBSCRIBE', 'MYSELF', 'WITH', 'THE', 'BEST', 'WISHES', 'FOR', 'YOUR', 'HAPPINESS', 'HERE', 'AND', 'HEREAFTER', 'YOUR', 'TRUE', 'FRIEND', 'ROBERT', 'SELVEY'] +3575-170457-0018-387: ref=['SIR', 'MARCH', 'SIXTEENTH'] +3575-170457-0018-387: hyp=['SIR', 'MARCH', 'SIXTEENTH'] +3575-170457-0019-388: ref=['I', 'HAD', 'NOT', 'VENTURED', 'TO', 'HOPE', 'FOR', 'SUCH', 'A', 'REPLY', 'SO', 'CONSIDERATE', 'IN', 'ITS', 'TONE', 'SO', 'NOBLE', 'IN', 'ITS', 'SPIRIT'] +3575-170457-0019-388: hyp=['I', 'HAVE', 'NOT', 'VENTURED', 'TO', 'HOPE', 'FOR', 'SUCH', 'A', 'REPLY', 'SO', 'CONSIDER', 'IT', 'IN', 'ITS', 'TONE', 'SO', 'NOBLE', 'IN', 'ITS', 'SPIRIT'] +3575-170457-0020-389: ref=['I', 'KNOW', 'THE', 'FIRST', 'LETTER', 'I', 'WROTE', 'TO', 'YOU', 'WAS', 'ALL', 'SENSELESS', 'TRASH', 'FROM', 'BEGINNING', 'TO', 'END', 'BUT', 'I', 'AM', 'NOT', 'ALTOGETHER', 'THE', 'IDLE', 'DREAMING', 'BEING', 'IT', 'WOULD', 'SEEM', 'TO', 'DENOTE'] +3575-170457-0020-389: hyp=['I', 'KNOW', 'THE', 'FIRST', 'LETTER', 'I', 'WROTE', 'TO', 'YOU', 'WAS', 'ALL', 'SENSELESS', 'TRASH', 'FROM', 'BEGINNING', 'TO', 'END', 'BUT', 'I', 'AM', 'NOT', 'ALTOGETHER', 'THE', 'IDLE', 'DREAMING', 'BEING', 'IT', 'WOULD', 'SEEM', 'TO', 'DENOTE'] +3575-170457-0021-390: ref=['I', 'THOUGHT', 'IT', 'THEREFORE', 'MY', 'DUTY', 'WHEN', 'I', 'LEFT', 'SCHOOL', 'TO', 'BECOME', 'A', 'GOVERNESS'] +3575-170457-0021-390: hyp=['I', 'THOUGHT', 'IT', 'THEREFORE', 'MY', 'DUTY', 'WHEN', 'I', 'LEFT', 'SCHOOL', 'TO', 'BECOME', 'A', 'GOVERNESS'] +3575-170457-0022-391: ref=['IN', 'THE', 'EVENINGS', 'I', 'CONFESS', 'I', 'DO', 'THINK', 'BUT', 'I', 'NEVER', 'TROUBLE', 'ANY', 'ONE', 'ELSE', 'WITH', 'MY', 'THOUGHTS'] +3575-170457-0022-391: hyp=['IN', 'THE', 'EVENINGS', 'I', 'CONFESS', 'I', 'DO', 'THINK', 'BUT', 'I', 'NEVER', 'TROUBLE', 'ANY', 'ONE', 'ELSE', 'WITH', 'MY', 'THOUGHTS'] +3575-170457-0023-392: ref=['I', 'CAREFULLY', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PREOCCUPATION', 'AND', 'ECCENTRICITY', 'WHICH', 'MIGHT', 'LEAD', 'THOSE', 'I', 'LIVE', 'AMONGST', 'TO', 'SUSPECT', 'THE', 'NATURE', 'OF', 'MY', 'PURSUITS'] +3575-170457-0023-392: hyp=['I', 'CAREFULLY', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PREOCCUPATION', 'AND', 'ECCENTRICITY', 'WHICH', 'MIGHT', 'LEAD', 'THOSE', 'I', 'LIVE', 'AMONGST', 'TO', 'SUSPECT', 'THE', 'NATURE', 'OF', 'MY', 'PURSUITS'] +3575-170457-0024-393: ref=['I', "DON'T", 'ALWAYS', 'SUCCEED', 'FOR', 'SOMETIMES', 'WHEN', "I'M", 'TEACHING', 'OR', 'SEWING', 'I', 'WOULD', 'RATHER', 'BE', 'READING', 'OR', 'WRITING', 'BUT', 'I', 'TRY', 'TO', 'DENY', 'MYSELF', 'AND', 'MY', "FATHER'S", 'APPROBATION', 'AMPLY', 'REWARDED', 'ME', 'FOR', 'THE', 'PRIVATION'] +3575-170457-0024-393: hyp=['I', "DON'T", 'ALWAYS', 'SUCCEED', 'FOR', 'SOMETIMES', 'WHEN', "I'M", 'TEACHING', 'OR', 'SEWING', 'I', 'WOULD', 'RATHER', 'BE', 'READING', 'A', 'WRITING', 'BUT', 'I', 'TRY', 'TO', 'DENY', 'MYSELF', 'AND', 'MY', "FATHER'S", 'APPROBATION', 'AMPLY', 'REWARDED', 'ME', 'FOR', 'THE', 'PRIVATION'] +3575-170457-0025-394: ref=['AGAIN', 'I', 'THANK', 'YOU', 'THIS', 'INCIDENT', 'I', 'SUPPOSE', 'WILL', 'BE', 'RENEWED', 'NO', 'MORE', 'IF', 'I', 'LIVE', 'TO', 'BE', 'AN', 'OLD', 'WOMAN', 'I', 'SHALL', 'REMEMBER', 'IT', 'THIRTY', 'YEARS', 'HENCE', 'AS', 'A', 'BRIGHT', 'DREAM'] +3575-170457-0025-394: hyp=['AGAIN', 'I', 'THANK', 'YOU', 'THIS', 'INCIDENT', 'I', 'SUPPOSE', 'WILL', 'BE', 'RENEWED', 'NO', 'MORE', 'IF', 'I', 'LIVE', 'TO', 'BE', 'AN', 'OLD', 'WOMAN', 'I', 'SHALL', 'REMEMBER', 'IT', 'THIRTY', 'YEARS', 'HENCE', 'AS', 'A', 'BRIGHT', 'DREAM'] +3575-170457-0026-395: ref=['P', 'S', 'PRAY', 'SIR', 'EXCUSE', 'ME', 'FOR', 'WRITING', 'TO', 'YOU', 'A', 'SECOND', 'TIME', 'I', 'COULD', 'NOT', 'HELP', 'WRITING', 'PARTLY', 'TO', 'TELL', 'YOU', 'HOW', 'THANKFUL', 'I', 'AM', 'FOR', 'YOUR', 'KINDNESS', 'AND', 'PARTLY', 'TO', 'LET', 'YOU', 'KNOW', 'THAT', 'YOUR', 'ADVICE', 'SHALL', 'NOT', 'BE', 'WASTED', 'HOWEVER', 'SORROWFULLY', 'AND', 'RELUCTANTLY', 'IT', 'MAY', 'BE', 'AT', 'FIRST', 'FOLLOWED', 'C', 'B'] +3575-170457-0026-395: hyp=['P', 'S', 'PRAY', 'SIR', 'EXCUSE', 'ME', 'FOR', 'WRITING', 'TO', 'YOU', 'A', 'SECOND', 'TIME', 'I', 'COULD', 'NOT', 'HELP', 'WRITING', 'PARTLY', 'TO', 'TELL', 'YOU', 'HOW', 'THANKFUL', 'I', 'AM', 'FOR', 'YOUR', 'KINDNESS', 'AND', 'PARTLY', 'TO', 'LET', 'YOU', 'KNOW', 'THAT', 'YOUR', 'ADVICE', 'SHALL', 'NOT', 'BE', 'WASTED', 'HOWEVER', 'SORROWFULLY', 'AND', 'RELUCTANTLY', 'IT', 'MAY', 'BE', 'AT', 'FIRST', 'FOLLOWED'] +3575-170457-0027-396: ref=['I', 'CANNOT', 'DENY', 'MYSELF', 'THE', 'GRATIFICATION', 'OF', 'INSERTING', "SOUTHEY'S", 'REPLY'] +3575-170457-0027-396: hyp=['I', 'CANNOT', 'DENY', 'MYSELF', 'THE', 'GRATIFICATION', 'OF', 'INSERTING', 'SO', 'THESE', 'REPLY'] +3575-170457-0028-397: ref=['KESWICK', 'MARCH', 'TWENTY', 'SECOND', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'DEAR', 'MADAM'] +3575-170457-0028-397: hyp=['KEZWICK', 'MARCH', 'TWENTY', 'SECOND', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'DEAR', 'MADAME'] +3575-170457-0029-398: ref=['YOUR', 'LETTER', 'HAS', 'GIVEN', 'ME', 'GREAT', 'PLEASURE', 'AND', 'I', 'SHOULD', 'NOT', 'FORGIVE', 'MYSELF', 'IF', 'I', 'DID', 'NOT', 'TELL', 'YOU', 'SO'] +3575-170457-0029-398: hyp=['YOUR', 'LETTER', 'HAS', 'GIVEN', 'ME', 'GREAT', 'PLEASURE', 'AND', 'I', 'SHOULD', 'NOT', 'FORGIVE', 'MYSELF', 'IF', 'I', 'DID', 'NOT', 'TELL', 'YOU', 'SO'] +3575-170457-0030-399: ref=['OF', 'THIS', 'SECOND', 'LETTER', 'ALSO', 'SHE', 'SPOKE', 'AND', 'TOLD', 'ME', 'THAT', 'IT', 'CONTAINED', 'AN', 'INVITATION', 'FOR', 'HER', 'TO', 'GO', 'AND', 'SEE', 'THE', 'POET', 'IF', 'EVER', 'SHE', 'VISITED', 'THE', 'LAKES'] +3575-170457-0030-399: hyp=['OF', 'THIS', 'SECOND', 'LETTER', 'ALSO', 'SHE', 'SPOKE', 'AND', 'TOLD', 'ME', 'THAT', 'IT', 'CONTAINED', 'AN', 'INVITATION', 'FOR', 'HER', 'TO', 'GO', 'AND', 'SEE', 'THE', 'POET', 'IF', 'EVER', 'SHE', 'VISITED', 'THE', 'LAKES'] +3575-170457-0031-400: ref=['ON', 'AUGUST', 'TWENTY', 'SEVENTH', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'SHE', 'WRITES'] +3575-170457-0031-400: hyp=['ON', 'AUGUST', 'TWENTY', 'SEVENTH', 'EIGHTEEN', 'THIRTY', 'SEVEN', 'SHE', 'WRITES'] +3575-170457-0032-401: ref=['COME', 'COME', 'I', 'AM', 'GETTING', 'REALLY', 'TIRED', 'OF', 'YOUR', 'ABSENCE'] +3575-170457-0032-401: hyp=['COME', 'COME', "I'M", 'GETTING', 'REALLY', 'TIRED', 'OF', 'YOUR', 'ABSENCE'] +3575-170457-0033-402: ref=['SATURDAY', 'AFTER', 'SATURDAY', 'COMES', 'ROUND', 'AND', 'I', 'CAN', 'HAVE', 'NO', 'HOPE', 'OF', 'HEARING', 'YOUR', 'KNOCK', 'AT', 'THE', 'DOOR', 'AND', 'THEN', 'BEING', 'TOLD', 'THAT', 'MISS', 'E', 'IS', 'COME', 'OH', 'DEAR'] +3575-170457-0033-402: hyp=['SATURDAY', 'AFTER', 'SATURDAY', 'COMES', 'AROUND', 'AND', 'I', 'CAN', 'HAVE', 'NO', 'HOPE', 'OF', 'HEARING', 'YOUR', 'KNOCK', 'AT', 'THE', 'DOOR', 'AND', 'THEN', 'BEING', 'TOLD', 'THAT', 'MISS', 'EA', 'IS', 'COME', 'OH', 'DEAR'] +3575-170457-0034-403: ref=['IN', 'THIS', 'MONOTONOUS', 'LIFE', 'OF', 'MINE', 'THAT', 'WAS', 'A', 'PLEASANT', 'EVENT'] +3575-170457-0034-403: hyp=['IN', 'THIS', 'MONOTONOUS', 'LIFE', 'OF', 'MIND', 'THAT', 'WAS', 'A', 'PLEASANT', 'EVENT'] +3575-170457-0035-404: ref=['I', 'WISH', 'IT', 'WOULD', 'RECUR', 'AGAIN', 'BUT', 'IT', 'WILL', 'TAKE', 'TWO', 'OR', 'THREE', 'INTERVIEWS', 'BEFORE', 'THE', 'STIFFNESS', 'THE', 'ESTRANGEMENT', 'OF', 'THIS', 'LONG', 'SEPARATION', 'WILL', 'WEAR', 'AWAY'] +3575-170457-0035-404: hyp=['I', 'WISH', 'YOU', 'WERE', 'RECUR', 'AGAIN', 'BUT', 'IT', 'WILL', 'TAKE', 'TWO', 'OR', 'THREE', 'INTERVIEWS', 'BEFORE', 'THE', 'STIFFNESS', 'THE', 'ESTRANGEMENT', 'OF', 'THIS', 'LONG', 'SEPARATION', 'WILL', 'WEAR', 'AWAY'] +3575-170457-0036-405: ref=['MY', 'EYES', 'FILL', 'WITH', 'TEARS', 'WHEN', 'I', 'CONTRAST', 'THE', 'BLISS', 'OF', 'SUCH', 'A', 'STATE', 'BRIGHTENED', 'BY', 'HOPES', 'OF', 'THE', 'FUTURE', 'WITH', 'THE', 'MELANCHOLY', 'STATE', 'I', 'NOW', 'LIVE', 'IN', 'UNCERTAIN', 'THAT', 'I', 'EVER', 'FELT', 'TRUE', 'CONTRITION', 'WANDERING', 'IN', 'THOUGHT', 'AND', 'DEED', 'LONGING', 'FOR', 'HOLINESS', 'WHICH', 'I', 'SHALL', 'NEVER', 'NEVER', 'OBTAIN', 'SMITTEN', 'AT', 'TIMES', 'TO', 'THE', 'HEART', 'WITH', 'THE', 'CONVICTION', 'THAT', 'GHASTLY', 'CALVINISTIC', 'DOCTRINES', 'ARE', 'TRUE', 'DARKENED', 'IN', 'SHORT', 'BY', 'THE', 'VERY', 'SHADOWS', 'OF', 'SPIRITUAL', 'DEATH'] +3575-170457-0036-405: hyp=['MY', 'EYES', 'FELL', 'WITH', 'TEARS', 'WHEN', 'I', 'CONTRAST', 'THE', 'BLISS', 'OF', 'SUCH', 'A', 'STATE', 'BRIGHTENED', 'BY', 'HOPES', 'OF', 'THE', 'FUTURE', 'WITH', 'THE', 'MELANCHOLY', 'STATE', 'I', 'NOW', 'LIVE', 'IN', 'UNCERTAIN', 'THAT', 'I', 'EVER', 'FELT', 'TRUE', 'CONTRITION', 'WANDERING', 'IN', 'THOUGHT', 'INDEED', 'LONGING', 'FOR', 'HOLINESS', 'WHICH', 'I', 'SHALL', 'NEVER', 'NEVER', 'OBTAIN', 'SMITTEN', 'THAT', 'TIMES', 'TO', 'THE', 'HEART', 'WITH', 'THE', 'CONVICTION', 'THAT', 'GHASTLY', 'CALVINISTIC', 'DOCTRINES', 'ARE', 'TRUE', 'DARKENED', 'AND', 'SHORT', 'BY', 'THE', 'VERY', 'SHADOWS', 'OF', 'SPIRITUAL', 'DEATH'] +3575-170457-0037-406: ref=['IF', 'CHRISTIAN', 'PERFECTION', 'BE', 'NECESSARY', 'TO', 'SALVATION', 'I', 'SHALL', 'NEVER', 'BE', 'SAVED', 'MY', 'HEART', 'IS', 'A', 'VERY', 'HOTBED', 'FOR', 'SINFUL', 'THOUGHTS', 'AND', 'WHEN', 'I', 'DECIDE', 'ON', 'AN', 'ACTION', 'I', 'SCARCELY', 'REMEMBER', 'TO', 'LOOK', 'TO', 'MY', 'REDEEMER', 'FOR', 'DIRECTION'] +3575-170457-0037-406: hyp=['IF', 'CHRISTIAN', 'PERFECTION', 'BE', 'NECESSARY', 'TO', 'SALVATION', 'I', 'SHALL', 'NEVER', 'BE', 'SAVED', 'MY', 'HEART', 'IS', 'A', 'VERY', 'HOT', 'BED', 'FOR', 'SINFUL', 'THOUGHTS', 'AND', 'WHEN', 'I', 'DECIDE', 'ON', 'AN', 'ACTION', 'I', 'SCARCELY', 'REMEMBER', 'TO', 'LOOK', 'TO', 'MY', 'REDEEMER', 'FOR', 'A', 'DIRECTION'] +3575-170457-0038-407: ref=['AND', 'MEANTIME', 'I', 'KNOW', 'THE', 'GREATNESS', 'OF', 'JEHOVAH', 'I', 'ACKNOWLEDGE', 'THE', 'PERFECTION', 'OF', 'HIS', 'WORD', 'I', 'ADORE', 'THE', 'PURITY', 'OF', 'THE', 'CHRISTIAN', 'FAITH', 'MY', 'THEORY', 'IS', 'RIGHT', 'MY', 'PRACTICE', 'HORRIBLY', 'WRONG'] +3575-170457-0038-407: hyp=['AND', 'MEANTIME', 'I', 'KNOW', 'THE', 'GREATNESS', 'OF', 'JEHOVAH', 'I', 'ACKNOWLEDGE', 'THE', 'PERFECTION', 'OF', 'HIS', 'WORD', 'I', 'ADORE', 'THE', 'PURITY', 'OF', 'THE', 'CHRISTIAN', 'FAITH', 'MY', 'THEORY', 'IS', 'RIGHT', 'MY', 'PRACTICE', 'HORRIBLY', 'WRONG'] +3575-170457-0039-408: ref=['THE', 'CHRISTMAS', 'HOLIDAYS', 'CAME', 'AND', 'SHE', 'AND', 'ANNE', 'RETURNED', 'TO', 'THE', 'PARSONAGE', 'AND', 'TO', 'THAT', 'HAPPY', 'HOME', 'CIRCLE', 'IN', 'WHICH', 'ALONE', 'THEIR', 'NATURES', 'EXPANDED', 'AMONGST', 'ALL', 'OTHER', 'PEOPLE', 'THEY', 'SHRIVELLED', 'UP', 'MORE', 'OR', 'LESS'] +3575-170457-0039-408: hyp=['THE', 'CHRISTMAS', 'HOLIDAYS', 'CAME', 'AND', 'SHE', 'AND', 'ANNE', 'RETURNED', 'TO', 'THE', 'PARSONAGE', 'AND', 'TO', 'THAT', 'HAPPY', 'HOME', 'CIRCLE', 'IN', 'WHICH', 'ALONE', 'THEIR', 'NATURES', 'EXPANDED', 'AMONGST', 'ALL', 'OTHER', 'PEOPLE', 'THEY', 'SHRIVELLED', 'UP', 'MORE', 'OR', 'LESS'] +3575-170457-0040-409: ref=['INDEED', 'THERE', 'WERE', 'ONLY', 'ONE', 'OR', 'TWO', 'STRANGERS', 'WHO', 'COULD', 'BE', 'ADMITTED', 'AMONG', 'THE', 'SISTERS', 'WITHOUT', 'PRODUCING', 'THE', 'SAME', 'RESULT'] +3575-170457-0040-409: hyp=['INDEED', 'THERE', 'WERE', 'ONLY', 'ONE', 'OR', 'TWO', 'STRANGERS', 'WHO', 'COULD', 'BE', 'ADMITTED', 'AMONG', 'THE', 'SISTERS', 'WITHOUT', 'PRODUCING', 'THE', 'SAME', 'RESULT'] +3575-170457-0041-410: ref=['SHE', 'WAS', 'GONE', 'OUT', 'INTO', 'THE', 'VILLAGE', 'ON', 'SOME', 'ERRAND', 'WHEN', 'AS', 'SHE', 'WAS', 'DESCENDING', 'THE', 'STEEP', 'STREET', 'HER', 'FOOT', 'SLIPPED', 'ON', 'THE', 'ICE', 'AND', 'SHE', 'FELL', 'IT', 'WAS', 'DARK', 'AND', 'NO', 'ONE', 'SAW', 'HER', 'MISCHANCE', 'TILL', 'AFTER', 'A', 'TIME', 'HER', 'GROANS', 'ATTRACTED', 'THE', 'ATTENTION', 'OF', 'A', 'PASSER', 'BY'] +3575-170457-0041-410: hyp=['SHE', 'WAS', 'GONE', 'OUT', 'INTO', 'THE', 'VILLAGE', 'ON', 'SOME', 'ERRAND', 'WHEN', 'AS', 'SHE', 'WAS', 'DESCENDING', 'THE', 'STEEP', 'STREET', 'HER', 'FOOT', 'SLIPPED', 'ON', 'THE', 'ICE', 'AND', 'SHE', 'FELL', 'HE', 'WAS', 'DARK', 'AND', 'NO', 'ONE', 'SAW', 'HER', 'MISCHANCE', 'TILL', 'AFTER', 'A', 'TIME', 'HER', 'GROANS', 'ATTRACTED', 'THE', 'ATTENTION', 'OF', 'A', 'PASSER', 'BY'] +3575-170457-0042-411: ref=['UNFORTUNATELY', 'THE', 'FRACTURE', 'COULD', 'NOT', 'BE', 'SET', 'TILL', 'SIX', "O'CLOCK", 'THE', 'NEXT', 'MORNING', 'AS', 'NO', 'SURGEON', 'WAS', 'TO', 'BE', 'HAD', 'BEFORE', 'THAT', 'TIME', 'AND', 'SHE', 'NOW', 'LIES', 'AT', 'OUR', 'HOUSE', 'IN', 'A', 'VERY', 'DOUBTFUL', 'AND', 'DANGEROUS', 'STATE'] +3575-170457-0042-411: hyp=['UNFORTUNATELY', 'THE', 'FRACTURE', 'COULD', 'NOT', 'BE', 'SET', 'TILL', 'SIX', "O'CLOCK", 'THE', 'NEXT', 'MORNING', 'AS', 'NO', 'SURGEON', 'WAS', 'TO', 'BE', 'HAD', 'BEFORE', 'THAT', 'TIME', 'AND', 'SHE', 'NOW', 'LIES', 'AT', 'HER', 'HOUSE', 'IN', 'A', 'VERY', 'DOUBTFUL', 'AND', 'DANGEROUS', 'STATE'] +3575-170457-0043-412: ref=['HOWEVER', 'REMEMBERING', 'WHAT', 'YOU', 'TOLD', 'ME', 'NAMELY', 'THAT', 'YOU', 'HAD', 'COMMENDED', 'THE', 'MATTER', 'TO', 'A', 'HIGHER', 'DECISION', 'THAN', 'OURS', 'AND', 'THAT', 'YOU', 'WERE', 'RESOLVED', 'TO', 'SUBMIT', 'WITH', 'RESIGNATION', 'TO', 'THAT', 'DECISION', 'WHATEVER', 'IT', 'MIGHT', 'BE', 'I', 'HOLD', 'IT', 'MY', 'DUTY', 'TO', 'YIELD', 'ALSO', 'AND', 'TO', 'BE', 'SILENT', 'IT', 'MAY', 'BE', 'ALL', 'FOR', 'THE', 'BEST'] +3575-170457-0043-412: hyp=['HOWEVER', 'REMEMBERING', 'WHAT', 'YOU', 'TOLD', 'ME', 'NAMELY', 'THAT', 'YOU', 'HAD', 'COMMENDED', 'THE', 'MATTER', 'TO', 'A', 'HIGHER', 'DECISION', 'THAN', 'OURS', 'AND', 'THAT', 'YOU', 'WERE', 'RESOLVED', 'TO', 'SUBMIT', 'WITH', 'RESIGNATION', 'TO', 'THAT', 'DECISION', 'WHATEVER', 'IT', 'MIGHT', 'BE', 'I', 'HOLD', 'IT', 'MY', 'DUTY', 'TO', 'YIELD', 'ALSO', 'AND', 'TO', 'BE', 'SILENT', 'AND', 'MAY', 'BE', 'ALL', 'FOR', 'THE', 'BEST'] +3575-170457-0044-413: ref=['AFTER', 'THIS', 'DISAPPOINTMENT', 'I', 'NEVER', 'DARE', 'RECKON', 'WITH', 'CERTAINTY', 'ON', 'THE', 'ENJOYMENT', 'OF', 'A', 'PLEASURE', 'AGAIN', 'IT', 'SEEMS', 'AS', 'IF', 'SOME', 'FATALITY', 'STOOD', 'BETWEEN', 'YOU', 'AND', 'ME'] +3575-170457-0044-413: hyp=['AFTER', 'THIS', 'DISAPPOINTMENT', 'I', 'NEVER', 'DARE', 'RECKON', 'WITH', 'CERTAINTY', 'ON', 'THE', 'ENJOYMENT', 'OF', 'A', 'PLEASURE', 'AGAIN', 'IT', 'SEEMS', 'AS', 'IF', 'SOME', 'FATALITY', 'STOOD', 'BETWEEN', 'YOU', 'AND', 'ME'] +3575-170457-0045-414: ref=['I', 'AM', 'NOT', 'GOOD', 'ENOUGH', 'FOR', 'YOU', 'AND', 'YOU', 'MUST', 'BE', 'KEPT', 'FROM', 'THE', 'CONTAMINATION', 'OF', 'TOO', 'INTIMATE', 'SOCIETY'] +3575-170457-0045-414: hyp=['I', 'AM', 'NOT', 'GOOD', 'ENOUGH', 'FOR', 'YOU', 'AND', 'YOU', 'MUST', 'BE', 'KEPT', 'FROM', 'THE', 'CONTAMINATION', 'OF', 'TWO', 'INTIMATE', 'SOCIETY'] +3575-170457-0046-415: ref=['A', 'GOOD', 'NEIGHBOUR', 'OF', 'THE', 'BRONTES', 'A', 'CLEVER', 'INTELLIGENT', 'YORKSHIRE', 'WOMAN', 'WHO', 'KEEPS', 'A', "DRUGGIST'S", 'SHOP', 'IN', 'HAWORTH', 'AND', 'FROM', 'HER', 'OCCUPATION', 'HER', 'EXPERIENCE', 'AND', 'EXCELLENT', 'SENSE', 'HOLDS', 'THE', 'POSITION', 'OF', 'VILLAGE', 'DOCTRESS', 'AND', 'NURSE', 'AND', 'AS', 'SUCH', 'HAS', 'BEEN', 'A', 'FRIEND', 'IN', 'MANY', 'A', 'TIME', 'OF', 'TRIAL', 'AND', 'SICKNESS', 'AND', 'DEATH', 'IN', 'THE', 'HOUSEHOLDS', 'ROUND', 'TOLD', 'ME', 'A', 'CHARACTERISTIC', 'LITTLE', 'INCIDENT', 'CONNECTED', 'WITH', "TABBY'S", 'FRACTURED', 'LEG'] +3575-170457-0046-415: hyp=['A', 'GOOD', 'NEIGHBOR', 'OF', 'THE', 'BRONTES', 'A', 'CLEVER', 'INTELLIGENT', 'YORKSHIRE', 'WOMAN', 'WHO', 'KEEPS', 'A', 'DRUGGIST', 'SHOP', 'IN', 'HAWORTH', 'FROM', 'HER', 'OCCUPATION', 'HER', 'EXPERIENCE', 'AND', 'EXCELLENT', 'SENSE', 'HOLDS', 'THE', 'POSITION', 'OF', 'VILLAGE', 'DOCTRIS', 'AND', 'NURSE', 'AND', 'AS', 'SUCH', 'HAS', 'BEEN', 'A', 'FRIEND', 'IN', 'MANY', 'A', 'TIME', 'OF', 'TRIAL', 'AND', 'SICKNESS', 'AND', 'DEATH', 'IN', 'THE', 'HOUSEHOLD', 'ROUND', 'TOLD', 'ME', 'A', 'CHARACTERISTIC', 'LITTLE', 'INCIDENT', 'CONNECTED', 'WITH', "TABBY'S", 'FRACTURED', 'LEG'] +3575-170457-0047-416: ref=['TABBY', 'HAD', 'LIVED', 'WITH', 'THEM', 'FOR', 'TEN', 'OR', 'TWELVE', 'YEARS', 'AND', 'WAS', 'AS', 'CHARLOTTE', 'EXPRESSED', 'IT', 'ONE', 'OF', 'THE', 'FAMILY'] +3575-170457-0047-416: hyp=['TABBY', 'HAD', 'LIVED', 'WITH', 'THEM', 'FOR', 'TEN', 'OR', 'TWELVE', 'YEARS', 'AND', 'WAS', 'AS', 'CHARLOTTE', 'EXPRESSED', 'IT', 'ONE', 'OF', 'THE', 'FAMILY'] +3575-170457-0048-417: ref=['HE', 'REFUSED', 'AT', 'FIRST', 'TO', 'LISTEN', 'TO', 'THE', 'CAREFUL', 'ADVICE', 'IT', 'WAS', 'REPUGNANT', 'TO', 'HIS', 'LIBERAL', 'NATURE'] +3575-170457-0048-417: hyp=['HE', 'REFUSED', 'AT', 'FIRST', 'TO', 'LISTEN', 'TO', 'THE', 'CAREFUL', 'ADVICE', 'IT', 'WAS', 'REPUGNANT', 'TO', 'HIS', 'LIBERAL', 'NATURE'] +3575-170457-0049-418: ref=['THIS', 'DECISION', 'WAS', 'COMMUNICATED', 'TO', 'THE', 'GIRLS'] +3575-170457-0049-418: hyp=['THIS', 'DECISION', 'WAS', 'COMMUNICATED', 'TO', 'THE', 'GIRLS'] +3575-170457-0050-419: ref=['TABBY', 'HAD', 'TENDED', 'THEM', 'IN', 'THEIR', 'CHILDHOOD', 'THEY', 'AND', 'NONE', 'OTHER', 'SHOULD', 'TEND', 'HER', 'IN', 'HER', 'INFIRMITY', 'AND', 'AGE'] +3575-170457-0050-419: hyp=['TABBY', 'HAD', 'TENDED', 'THEM', 'IN', 'THEIR', 'CHILDHOOD', 'THEY', 'AND', 'NONE', 'OTHER', 'SHOULD', 'TEND', 'HER', 'IN', 'HER', 'INFIRMITY', 'IN', 'AGE'] +3575-170457-0051-420: ref=['AT', 'TEA', 'TIME', 'THEY', 'WERE', 'SAD', 'AND', 'SILENT', 'AND', 'THE', 'MEAL', 'WENT', 'AWAY', 'UNTOUCHED', 'BY', 'ANY', 'OF', 'THE', 'THREE'] +3575-170457-0051-420: hyp=['AT', 'TEA', 'TIME', 'THEY', 'WERE', 'SAD', 'AND', 'SILENT', 'AND', 'THE', 'MEAL', 'WENT', 'AWAY', 'UNTOUCHED', 'BY', 'ANY', 'OF', 'THE', 'THREE'] +3575-170457-0052-421: ref=['SHE', 'HAD', 'ANOTHER', 'WEIGHT', 'ON', 'HER', 'MIND', 'THIS', 'CHRISTMAS'] +3575-170457-0052-421: hyp=['SHE', 'HAD', 'ANOTHER', 'WEIGHT', 'ON', 'HER', 'MIND', 'THIS', 'CHRISTMAS'] +3575-170457-0053-422: ref=['BUT', 'ANNE', 'HAD', 'BEGUN', 'TO', 'SUFFER', 'JUST', 'BEFORE', 'THE', 'HOLIDAYS', 'AND', 'CHARLOTTE', 'WATCHED', 'OVER', 'HER', 'YOUNGER', 'SISTERS', 'WITH', 'THE', 'JEALOUS', 'VIGILANCE', 'OF', 'SOME', 'WILD', 'CREATURE', 'THAT', 'CHANGES', 'HER', 'VERY', 'NATURE', 'IF', 'DANGER', 'THREATENS', 'HER', 'YOUNG'] +3575-170457-0053-422: hyp=['BUT', 'ANNE', 'HAD', 'BEGUN', 'TO', 'SUFFER', 'JUST', 'BEFORE', 'THE', 'HOLIDAYS', 'AND', 'CHARLOTTE', 'WATCHED', 'OVER', 'HER', 'YOUNGER', 'SISTERS', 'WITH', 'A', 'JEALOUS', 'VIGILANCE', 'OF', 'SOME', 'WILD', 'CREATURE', 'THAT', 'CHANGES', 'HER', 'VERY', 'NATURE', 'IF', 'DANGER', 'THREATENS', 'HER', 'YOUNG'] +3575-170457-0054-423: ref=['STUNG', 'BY', 'ANXIETY', 'FOR', 'THIS', 'LITTLE', 'SISTER', 'SHE', 'UPBRAIDED', 'MISS', 'W', 'FOR', 'HER', 'FANCIED', 'INDIFFERENCE', 'TO', "ANNE'S", 'STATE', 'OF', 'HEALTH'] +3575-170457-0054-423: hyp=['STUNG', 'BY', 'ANXIETY', 'FOR', 'THIS', 'LITTLE', 'SISTER', 'SHE', 'UPBRAIDED', 'MISS', 'W', 'FOR', 'HER', 'FANCIED', 'INDIFFERENCE', 'TO', "ANNE'S", 'STATE', 'OF', 'HEALTH'] +3575-170457-0055-424: ref=['STILL', 'HER', 'HEART', 'HAD', 'RECEIVED', 'A', 'SHOCK', 'IN', 'THE', 'PERCEPTION', 'OF', "ANNE'S", 'DELICACY', 'AND', 'ALL', 'THESE', 'HOLIDAYS', 'SHE', 'WATCHED', 'OVER', 'HER', 'WITH', 'THE', 'LONGING', 'FOND', 'ANXIETY', 'WHICH', 'IS', 'SO', 'FULL', 'OF', 'SUDDEN', 'PANGS', 'OF', 'FEAR'] +3575-170457-0055-424: hyp=['STILL', 'HER', 'HEART', 'HAD', 'RECEIVED', 'A', 'SHOCK', 'IN', 'THE', 'PERCEPTION', 'OF', "ANNE'S", 'DELICACY', 'AND', 'ALL', 'THESE', 'HOLIDAYS', 'SHE', 'WATCHED', 'OVER', 'HER', 'WITH', 'THE', 'LONGING', 'FOND', 'ANXIETY', 'WHICH', 'IS', 'SO', 'FULL', 'OF', 'SUDDEN', 'PANGS', 'OF', 'FEAR'] +3575-170457-0056-425: ref=['I', 'DOUBT', 'WHETHER', 'BRANWELL', 'WAS', 'MAINTAINING', 'HIMSELF', 'AT', 'THIS', 'TIME'] +3575-170457-0056-425: hyp=['I', 'DOUBT', 'WHETHER', 'BRANWELL', 'WAS', 'MAINTAINING', 'HIMSELF', 'AT', 'THIS', 'TIME'] +3729-6852-0000-1660: ref=['TO', 'CELEBRATE', 'THE', 'ARRIVAL', 'OF', 'HER', 'SON', 'SILVIA', 'GAVE', 'A', 'SPLENDID', 'SUPPER', 'TO', 'WHICH', 'SHE', 'HAD', 'INVITED', 'ALL', 'HER', 'RELATIVES', 'AND', 'IT', 'WAS', 'A', 'GOOD', 'OPPORTUNITY', 'FOR', 'ME', 'TO', 'MAKE', 'THEIR', 'ACQUAINTANCE'] +3729-6852-0000-1660: hyp=['TO', 'CELEBRATE', 'THE', 'ARRIVAL', 'OF', 'HER', 'SON', 'SYLVIA', 'GAVE', 'A', 'SPLENDID', 'SUPPER', 'TO', 'WHICH', 'SHE', 'HAD', 'INVITED', 'ALL', 'HER', 'RELATIVES', 'AND', 'IT', 'WAS', 'A', 'GOOD', 'OPPORTUNITY', 'FOR', 'ME', 'TO', 'MAKE', 'THEIR', 'ACQUAINTANCE'] +3729-6852-0001-1661: ref=['WITHOUT', 'SAYING', 'IT', 'POSITIVELY', 'SHE', 'MADE', 'ME', 'UNDERSTAND', 'THAT', 'BEING', 'HERSELF', 'AN', 'ILLUSTRIOUS', 'MEMBER', 'OF', 'THE', 'REPUBLIC', 'OF', 'LETTERS', 'SHE', 'WAS', 'WELL', 'AWARE', 'THAT', 'SHE', 'WAS', 'SPEAKING', 'TO', 'AN', 'INSECT'] +3729-6852-0001-1661: hyp=['WITHOUT', 'SAYING', 'IT', 'POSITIVELY', 'SHE', 'MADE', 'ME', 'UNDERSTAND', 'THAT', 'BEING', 'HERSELF', 'AN', 'ILLUSTRIOUS', 'MEMBER', 'OF', 'THE', 'REPUBLIC', 'OF', 'LETTERS', 'SHE', 'WAS', 'WELL', 'AWARE', 'THAT', 'SHE', 'WAS', 'SPEAKING', 'TO', 'AN', 'INSECT'] +3729-6852-0002-1662: ref=['IN', 'ORDER', 'TO', 'PLEASE', 'HER', 'I', 'SPOKE', 'TO', 'HER', 'OF', 'THE', 'ABBE', 'CONTI', 'AND', 'I', 'HAD', 'OCCASION', 'TO', 'QUOTE', 'TWO', 'LINES', 'OF', 'THAT', 'PROFOUND', 'WRITER'] +3729-6852-0002-1662: hyp=['IN', 'ORDER', 'TO', 'PLEASE', 'HER', 'I', 'SPOKE', 'TO', 'HER', 'OF', 'THE', 'ABBEY', 'KANTI', 'AND', 'I', 'HAD', 'OCCASION', 'TO', 'QUOTE', 'TWO', 'LINES', 'OF', 'THAT', 'PROFOUND', 'WRITER'] +3729-6852-0003-1663: ref=['MADAM', 'CORRECTED', 'ME', 'WITH', 'A', 'PATRONIZING', 'AIR', 'FOR', 'MY', 'PRONUNCIATION', 'OF', 'THE', 'WORD', 'SCEVRA', 'WHICH', 'MEANS', 'DIVIDED', 'SAYING', 'THAT', 'IT', 'OUGHT', 'TO', 'BE', 'PRONOUNCED', 'SCEURA', 'AND', 'SHE', 'ADDED', 'THAT', 'I', 'OUGHT', 'TO', 'BE', 'VERY', 'GLAD', 'TO', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'ON', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'ARRIVAL', 'IN', 'PARIS', 'TELLING', 'ME', 'THAT', 'IT', 'WOULD', 'BE', 'AN', 'IMPORTANT', 'DAY', 'IN', 'MY', 'LIFE'] +3729-6852-0003-1663: hyp=['MADAME', 'CORRECTED', 'ME', 'WITH', 'A', 'PATRONIZING', 'AIR', 'FOR', 'MY', 'PRONUNCIATION', 'OF', 'THE', 'WORD', 'SCAFFRA', 'WHICH', 'MEANS', 'DIVIDED', 'SAYING', 'THAT', 'IT', 'OUGHT', 'TO', 'BE', 'PRONOUNCED', 'SKURA', 'AND', 'SHE', 'ADDED', 'THAT', 'I', 'OUGHT', 'TO', 'BE', 'VERY', 'GLAD', 'TO', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'ON', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'ARRIVAL', 'IN', 'PARIS', 'TELLING', 'ME', 'THAT', 'IT', 'WOULD', 'BE', 'AN', 'IMPORTANT', 'DAY', 'IN', 'MY', 'LIFE'] +3729-6852-0004-1664: ref=['HER', 'FACE', 'WAS', 'AN', 'ENIGMA', 'FOR', 'IT', 'INSPIRED', 'EVERYONE', 'WITH', 'THE', 'WARMEST', 'SYMPATHY', 'AND', 'YET', 'IF', 'YOU', 'EXAMINED', 'IT', 'ATTENTIVELY', 'THERE', 'WAS', 'NOT', 'ONE', 'BEAUTIFUL', 'FEATURE', 'SHE', 'COULD', 'NOT', 'BE', 'CALLED', 'HANDSOME', 'BUT', 'NO', 'ONE', 'COULD', 'HAVE', 'THOUGHT', 'HER', 'UGLY'] +3729-6852-0004-1664: hyp=['HER', 'FACE', 'WAS', 'AN', 'ENIGMA', 'FOR', 'IT', 'INSPIRED', 'EVERY', 'ONE', 'WITH', 'THE', 'WARMEST', 'SYMPATHY', 'AND', 'YET', 'IF', 'YOU', 'EXAMINED', 'IT', 'ATTENTIVELY', 'THERE', 'WAS', 'NOT', 'ONE', 'BEAUTIFUL', 'FEATURE', 'SHE', 'COULD', 'NOT', 'BE', 'CALLED', 'HANDSOME', 'BUT', 'NO', 'ONE', 'COULD', 'HAVE', 'THOUGHT', 'HER', 'UGLY'] +3729-6852-0005-1665: ref=['SILVIA', 'WAS', 'THE', 'ADORATION', 'OF', 'FRANCE', 'AND', 'HER', 'TALENT', 'WAS', 'THE', 'REAL', 'SUPPORT', 'OF', 'ALL', 'THE', 'COMEDIES', 'WHICH', 'THE', 'GREATEST', 'AUTHORS', 'WROTE', 'FOR', 'HER', 'ESPECIALLY', 'OF', 'THE', 'PLAYS', 'OF', 'MARIVAUX', 'FOR', 'WITHOUT', 'HER', 'HIS', 'COMEDIES', 'WOULD', 'NEVER', 'HAVE', 'GONE', 'TO', 'POSTERITY'] +3729-6852-0005-1665: hyp=['SYLVIA', 'WAS', 'THE', 'ADORATION', 'OF', 'FRANCE', 'AND', 'HER', 'TALENT', 'WAS', 'THE', 'REAL', 'SUPPORT', 'OF', 'ALL', 'THE', 'COMEDIES', 'WHICH', 'THE', 'GREATEST', 'AUTHORS', 'WROTE', 'FOR', 'HER', 'ESPECIALLY', 'OF', 'THE', 'PLAYS', 'OF', 'MARIVAUX', 'FOR', 'WITHOUT', 'HER', 'HIS', 'COMEDIES', 'WOULD', 'NEVER', 'HAVE', 'GONE', 'TO', 'POSTERITY'] +3729-6852-0006-1666: ref=['SILVIA', 'DID', 'NOT', 'THINK', 'THAT', 'HER', 'GOOD', 'CONDUCT', 'WAS', 'A', 'MERIT', 'FOR', 'SHE', 'KNEW', 'THAT', 'SHE', 'WAS', 'VIRTUOUS', 'ONLY', 'BECAUSE', 'HER', 'SELF', 'LOVE', 'COMPELLED', 'HER', 'TO', 'BE', 'SO', 'AND', 'SHE', 'NEVER', 'EXHIBITED', 'ANY', 'PRIDE', 'OR', 'ASSUMED', 'ANY', 'SUPERIORITY', 'TOWARDS', 'HER', 'THEATRICAL', 'SISTERS', 'ALTHOUGH', 'SATISFIED', 'TO', 'SHINE', 'BY', 'THEIR', 'TALENT', 'OR', 'THEIR', 'BEAUTY', 'THEY', 'CARED', 'LITTLE', 'ABOUT', 'RENDERING', 'THEMSELVES', 'CONSPICUOUS', 'BY', 'THEIR', 'VIRTUE'] +3729-6852-0006-1666: hyp=['SYLVIA', 'DID', 'NOT', 'THINK', 'THAT', 'HER', 'GOOD', 'CONDUCT', 'WAS', 'A', 'MERIT', 'FOR', 'SHE', 'KNEW', 'THAT', 'SHE', 'WAS', 'VIRTUOUS', 'ONLY', 'BECAUSE', 'HER', 'SELF', 'LOVE', 'COMPELLED', 'HER', 'TO', 'BE', 'SO', 'AND', 'SHE', 'NEVER', 'EXHIBITED', 'ANY', 'PRIDE', 'OR', 'ASSUMED', 'ANY', 'SUPERIORITY', 'TOWARDS', 'HER', 'THEATRICAL', 'SISTERS', 'ALTHOUGH', 'SATISFIED', 'TO', 'SHINE', 'BY', 'THEIR', 'TALENT', 'OR', 'THEIR', 'BEAUTY', 'THEY', 'CARED', 'LITTLE', 'ABOUT', 'RENDERING', 'THEMSELVES', 'CONSPICUOUS', 'BY', 'THEIR', 'VIRTUE'] +3729-6852-0007-1667: ref=['TWO', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'I', 'SAW', 'HER', 'PERFORM', 'THE', 'CHARACTER', 'OF', 'MARIANNE', 'IN', 'THE', 'COMEDY', 'OF', 'MARIVAUX', 'AND', 'IN', 'SPITE', 'OF', 'HER', 'AGE', 'AND', 'DECLINING', 'HEALTH', 'THE', 'ILLUSION', 'WAS', 'COMPLETE'] +3729-6852-0007-1667: hyp=['TWO', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'I', 'SAW', 'HER', 'PERFORM', 'THE', 'CHARACTER', 'OF', 'MARIANNE', 'IN', 'THE', 'COMEDY', 'OF', 'MARAVAUX', 'AND', 'IN', 'SPITE', 'OF', 'HER', 'AGE', 'AND', 'DECLINING', 'HEALTH', 'THE', 'ILLUSION', 'WAS', 'COMPLETE'] +3729-6852-0008-1668: ref=['SHE', 'WAS', 'HONOURABLY', 'BURIED', 'IN', 'THE', 'CHURCH', 'OF', 'SAINT', 'SAUVEUR', 'WITHOUT', 'THE', 'SLIGHTEST', 'OPPOSITION', 'FROM', 'THE', 'VENERABLE', 'PRIEST', 'WHO', 'FAR', 'FROM', 'SHARING', 'THE', 'ANTI', 'CHRISTAIN', 'INTOLERANCY', 'OF', 'THE', 'CLERGY', 'IN', 'GENERAL', 'SAID', 'THAT', 'HER', 'PROFESSION', 'AS', 'AN', 'ACTRESS', 'HAD', 'NOT', 'HINDERED', 'HER', 'FROM', 'BEING', 'A', 'GOOD', 'CHRISTIAN', 'AND', 'THAT', 'THE', 'EARTH', 'WAS', 'THE', 'COMMON', 'MOTHER', 'OF', 'ALL', 'HUMAN', 'BEINGS', 'AS', 'JESUS', 'CHRIST', 'HAD', 'BEEN', 'THE', 'SAVIOUR', 'OF', 'ALL', 'MANKIND'] +3729-6852-0008-1668: hyp=['SHE', 'WAS', 'HONOURABLY', 'BURIED', 'IN', 'THE', 'CHURCH', 'OF', 'SAINT', 'SEVERE', 'WITHOUT', 'THE', 'SLIGHTEST', 'OPPOSITION', 'FROM', 'THE', 'VENERABLE', 'PRIEST', 'WHO', 'FAR', 'FROM', 'SHARING', 'THE', 'ANTI', 'CHRISTIAN', 'INTOLERANCY', 'OF', 'THE', 'CLERGY', 'IN', 'GENERAL', 'SAID', 'THAT', 'HER', 'PROFESSION', 'AS', 'AN', 'ACTRESS', 'HAD', 'NOT', 'HINDERED', 'HER', 'FROM', 'BEING', 'A', 'GOOD', 'CHRISTIAN', 'AND', 'THAT', 'THE', 'EARTH', 'WAS', 'A', 'COMMON', 'MOTHER', 'OF', 'ALL', 'HUMAN', 'BEINGS', 'AS', 'JESUS', 'CHRIST', 'HAD', 'BEEN', 'THE', 'SAVIOUR', 'OF', 'ALL', 'MANKIND'] +3729-6852-0009-1669: ref=['YOU', 'WILL', 'FORGIVE', 'ME', 'DEAR', 'READER', 'IF', 'I', 'HAVE', 'MADE', 'YOU', 'ATTEND', 'THE', 'FUNERAL', 'OF', 'SILVIA', 'TEN', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'BELIEVE', 'ME', 'I', 'HAVE', 'NO', 'INTENTION', 'OF', 'PERFORMING', 'A', 'MIRACLE', 'YOU', 'MAY', 'CONSOLE', 'YOURSELF', 'WITH', 'THE', 'IDEA', 'THAT', 'I', 'SHALL', 'SPARE', 'YOU', 'THAT', 'UNPLEASANT', 'TASK', 'WHEN', 'POOR', 'SILVIA', 'DIES'] +3729-6852-0009-1669: hyp=['YOU', 'WILL', 'FORGIVE', 'ME', 'DEAR', 'READER', 'IF', 'I', 'HAVE', 'MADE', 'YOU', 'ATTEND', 'THE', 'FUNERAL', 'OF', 'SYLVIA', 'TEN', 'YEARS', 'BEFORE', 'HER', 'DEATH', 'BELIEVE', 'ME', 'I', 'HAVE', 'NO', 'INTENTION', 'OF', 'PERFORMING', 'A', 'MIRACLE', 'YOU', 'MAY', 'CONSOLE', 'YOURSELF', 'WITH', 'THE', 'IDEA', 'THAT', 'I', 'SHALL', 'SPARE', 'YOU', 'THAT', 'UNPLEASANT', 'TASK', 'WHEN', 'POOR', 'SYLVIA', 'DIES'] +3729-6852-0010-1670: ref=['I', 'NEVER', 'HAD', 'ANY', 'FAMILY'] +3729-6852-0010-1670: hyp=['I', 'NEVER', 'HAD', 'ANY', 'FAMILY'] +3729-6852-0011-1671: ref=['I', 'HAD', 'A', 'NAME', 'I', 'BELIEVE', 'IN', 'MY', 'YOUNG', 'DAYS', 'BUT', 'I', 'HAVE', 'FORGOTTEN', 'IT', 'SINCE', 'I', 'HAVE', 'BEEN', 'IN', 'SERVICE'] +3729-6852-0011-1671: hyp=['I', 'HAD', 'A', 'NAME', 'I', 'BELIEVE', 'IN', 'MY', 'YOUNG', 'DAYS', 'BUT', 'I', 'HAVE', 'FORGOTTEN', 'IT', 'SINCE', 'I', 'HAVE', 'BEEN', 'IN', 'SERVICE'] +3729-6852-0012-1672: ref=['I', 'SHALL', 'CALL', 'YOU', 'ESPRIT'] +3729-6852-0012-1672: hyp=['I', 'SHALL', 'CALL', 'YOU', 'A', 'SPREE'] +3729-6852-0013-1673: ref=['YOU', 'DO', 'ME', 'A', 'GREAT', 'HONOUR'] +3729-6852-0013-1673: hyp=['YOU', 'DO', 'ME', 'A', 'GREAT', 'HONOUR'] +3729-6852-0014-1674: ref=['HERE', 'GO', 'AND', 'GET', 'ME', 'CHANGE', 'FOR', 'A', 'LOUIS', 'I', 'HAVE', 'IT', 'SIR'] +3729-6852-0014-1674: hyp=['HERE', 'GO', 'AND', 'GET', 'ME', 'CHANGE', 'FOR', 'A', 'LOUIS', 'I', 'HAVE', 'IT', 'SIR'] +3729-6852-0015-1675: ref=['AT', 'YOUR', 'SERVICE', 'SIR'] +3729-6852-0015-1675: hyp=['AT', 'YOUR', 'SERVICE', 'SIR'] +3729-6852-0016-1676: ref=['MADAME', 'QUINSON', 'BESIDES', 'CAN', 'ANSWER', 'YOUR', 'ENQUIRIES'] +3729-6852-0016-1676: hyp=['MADAME', 'QUINSON', 'BESIDES', 'CAN', 'ANSWER', 'YOUR', 'INQUIRIES'] +3729-6852-0017-1677: ref=['I', 'SEE', 'A', 'QUANTITY', 'OF', 'CHAIRS', 'FOR', 'HIRE', 'AT', 'THE', 'RATE', 'OF', 'ONE', 'SOU', 'MEN', 'READING', 'THE', 'NEWSPAPER', 'UNDER', 'THE', 'SHADE', 'OF', 'THE', 'TREES', 'GIRLS', 'AND', 'MEN', 'BREAKFASTING', 'EITHER', 'ALONE', 'OR', 'IN', 'COMPANY', 'WAITERS', 'WHO', 'WERE', 'RAPIDLY', 'GOING', 'UP', 'AND', 'DOWN', 'A', 'NARROW', 'STAIRCASE', 'HIDDEN', 'UNDER', 'THE', 'FOLIAGE'] +3729-6852-0017-1677: hyp=['I', 'SEE', 'A', 'QUANTITY', 'OF', 'CHAIRS', 'FOR', 'HIRE', 'AT', 'THE', 'RATE', 'OF', 'ONE', 'SOUS', 'MEN', 'READING', 'THE', 'NEWSPAPER', 'UNDER', 'THE', 'SHADE', 'OF', 'THE', 'TREES', 'GIRLS', 'AND', 'MEN', 'BREAKFASTING', 'EITHER', 'ALONE', 'OR', 'IN', 'COMPANY', 'WAITERS', 'WHO', 'WERE', 'RAPIDLY', 'GOING', 'UP', 'AND', 'DOWN', 'A', 'NARROW', 'STAIRCASE', 'HIDDEN', 'UNDER', 'THE', 'FOLIAGE'] +3729-6852-0018-1678: ref=['I', 'SIT', 'DOWN', 'AT', 'A', 'SMALL', 'TABLE', 'A', 'WAITER', 'COMES', 'IMMEDIATELY', 'TO', 'ENQUIRE', 'MY', 'WISHES'] +3729-6852-0018-1678: hyp=['I', 'SIT', 'DOWN', 'AT', 'A', 'SMALL', 'TABLE', 'A', 'WAITER', 'COMES', 'IMMEDIATELY', 'TO', 'INQUIRE', 'MY', 'WISHES'] +3729-6852-0019-1679: ref=['I', 'TELL', 'HIM', 'TO', 'GIVE', 'ME', 'SOME', 'COFFEE', 'IF', 'IT', 'IS', 'GOOD'] +3729-6852-0019-1679: hyp=['I', 'TELL', 'HIM', 'TO', 'GIVE', 'ME', 'SOME', 'COFFEE', 'IF', 'IT', 'IS', 'GOOD'] +3729-6852-0020-1680: ref=['THEN', 'TURNING', 'TOWARDS', 'ME', 'HE', 'SAYS', 'THAT', 'I', 'LOOK', 'LIKE', 'A', 'FOREIGNER', 'AND', 'WHEN', 'I', 'SAY', 'THAT', 'I', 'AM', 'AN', 'ITALIAN', 'HE', 'BEGINS', 'TO', 'SPEAK', 'TO', 'ME', 'OF', 'THE', 'COURT', 'OF', 'THE', 'CITY', 'OF', 'THE', 'THEATRES', 'AND', 'AT', 'LAST', 'HE', 'OFFERS', 'TO', 'ACCOMPANY', 'ME', 'EVERYWHERE'] +3729-6852-0020-1680: hyp=['THEN', 'TURNING', 'TOWARDS', 'ME', 'HE', 'SAYS', 'THAT', 'I', 'LOOK', 'LIKE', 'A', 'FOREIGNER', 'AND', 'WHEN', 'I', 'SAY', 'THAT', 'I', 'AM', 'AN', 'ITALIAN', 'HE', 'BEGINS', 'TO', 'SPEAK', 'TO', 'ME', 'OF', 'THE', 'COURT', 'THE', 'CITY', 'OF', 'THE', 'THEATRES', 'AND', 'AT', 'LAST', 'HE', 'OFFERS', 'TO', 'ACCOMPANY', 'ME', 'EVERYWHERE'] +3729-6852-0021-1681: ref=['I', 'THANK', 'HIM', 'AND', 'TAKE', 'MY', 'LEAVE'] +3729-6852-0021-1681: hyp=['I', 'THANK', 'HIM', 'AND', 'TAKE', 'MY', 'LEAVE'] +3729-6852-0022-1682: ref=['I', 'ADDRESS', 'HIM', 'IN', 'ITALIAN', 'AND', 'HE', 'ANSWERS', 'VERY', 'WITTILY', 'BUT', 'HIS', 'WAY', 'OF', 'SPEAKING', 'MAKES', 'ME', 'SMILE', 'AND', 'I', 'TELL', 'HIM', 'WHY'] +3729-6852-0022-1682: hyp=['I', 'ADDRESS', 'HIM', 'IN', 'ITALIAN', 'AND', 'HE', 'ANSWERS', 'VERY', 'WITTILY', 'BUT', 'HIS', 'WAY', 'OF', 'SPEAKING', 'MAKES', 'ME', 'SMILE', 'AND', 'I', 'TELL', 'HIM', 'WHY'] +3729-6852-0023-1683: ref=['MY', 'REMARK', 'PLEASES', 'HIM', 'BUT', 'I', 'SOON', 'PROVE', 'TO', 'HIM', 'THAT', 'IT', 'IS', 'NOT', 'THE', 'RIGHT', 'WAY', 'TO', 'SPEAK', 'HOWEVER', 'PERFECT', 'MAY', 'HAVE', 'BEEN', 'THE', 'LANGUAGE', 'OF', 'THAT', 'ANCIENT', 'WRITER'] +3729-6852-0023-1683: hyp=['MY', 'REMARK', 'PLEASES', 'HIM', 'BUT', 'I', 'SOON', 'PROVE', 'TO', 'HIM', 'THAT', 'IT', 'IS', 'NOT', 'THE', 'RIGHT', 'WAY', 'TO', 'SPEAK', 'HOWEVER', 'PERFECT', 'MAY', 'HAVE', 'BEEN', 'THE', 'LANGUAGE', 'OF', 'THAT', 'ANCIENT', 'WRITER'] +3729-6852-0024-1684: ref=['I', 'SEE', 'A', 'CROWD', 'IN', 'ONE', 'CORNER', 'OF', 'THE', 'GARDEN', 'EVERYBODY', 'STANDING', 'STILL', 'AND', 'LOOKING', 'UP'] +3729-6852-0024-1684: hyp=['I', 'SEE', 'A', 'CROWD', 'IN', 'ONE', 'CORNER', 'OF', 'THE', 'GARDEN', 'EVERYBODY', 'STANDING', 'STILL', 'AND', 'LOOKING', 'UP'] +3729-6852-0025-1685: ref=['IS', 'THERE', 'NOT', 'A', 'MERIDIAN', 'EVERYWHERE'] +3729-6852-0025-1685: hyp=['IS', 'THERE', 'NOT', 'A', 'MERIDIAN', 'EVERYWHERE'] +3729-6852-0026-1686: ref=['YES', 'BUT', 'THE', 'MERIDIAN', 'OF', 'THE', 'PALAIS', 'ROYAL', 'IS', 'THE', 'MOST', 'EXACT'] +3729-6852-0026-1686: hyp=['YES', 'BUT', 'THE', 'MERIDIAN', 'OF', 'THE', 'PALAIS', 'ROYAL', 'IS', 'THE', 'MOST', 'EXACT'] +3729-6852-0027-1687: ref=['THAT', 'IS', 'TRUE', 'BADAUDERIE'] +3729-6852-0027-1687: hyp=['THAT', 'IS', 'TRUE', "BADR'D", 'GREE'] +3729-6852-0028-1688: ref=['ALL', 'THESE', 'HONEST', 'PERSONS', 'ARE', 'WAITING', 'THEIR', 'TURN', 'TO', 'GET', 'THEIR', 'SNUFF', 'BOXES', 'FILLED'] +3729-6852-0028-1688: hyp=['ALL', 'THESE', 'HONEST', 'PERSONS', 'ARE', 'WAITING', 'THEIR', 'TURN', 'TO', 'GET', 'THEIR', 'SNUFF', 'BOXES', 'FILLED'] +3729-6852-0029-1689: ref=['IT', 'IS', 'SOLD', 'EVERYWHERE', 'BUT', 'FOR', 'THE', 'LAST', 'THREE', 'WEEKS', 'NOBODY', 'WILL', 'USE', 'ANY', 'SNUFF', 'BUT', 'THAT', 'SOLD', 'AT', 'THE', 'CIVET', 'CAT'] +3729-6852-0029-1689: hyp=['IT', 'IS', 'SOLD', 'EVERYWHERE', 'BUT', 'FOR', 'THE', 'LAST', 'THREE', 'WEEKS', 'NOBODY', 'WILL', 'USE', 'ANY', 'SNUFF', 'BUT', 'THAT', 'SOLD', 'AT', 'THE', 'SAVEETTE', 'CAT'] +3729-6852-0030-1690: ref=['IS', 'IT', 'BETTER', 'THAN', 'ANYWHERE', 'ELSE'] +3729-6852-0030-1690: hyp=['IS', 'IT', 'BETTER', 'THAN', 'ANYWHERE', 'ELSE'] +3729-6852-0031-1691: ref=['BUT', 'HOW', 'DID', 'SHE', 'MANAGE', 'TO', 'RENDER', 'IT', 'SO', 'FASHIONABLE'] +3729-6852-0031-1691: hyp=['BUT', 'HOW', 'DID', 'SHE', 'MANAGE', 'TO', 'RENDER', 'IT', 'SO', 'FASHIONABLE'] +3729-6852-0032-1692: ref=['SIMPLY', 'BY', 'STOPPING', 'HER', 'CARRIAGE', 'TWO', 'OR', 'THREE', 'TIMES', 'BEFORE', 'THE', 'SHOP', 'TO', 'HAVE', 'HER', 'SNUFF', 'BOX', 'FILLED', 'AND', 'BY', 'SAYING', 'ALOUD', 'TO', 'THE', 'YOUNG', 'GIRL', 'WHO', 'HANDED', 'BACK', 'THE', 'BOX', 'THAT', 'HER', 'SNUFF', 'WAS', 'THE', 'VERY', 'BEST', 'IN', 'PARIS'] +3729-6852-0032-1692: hyp=['SIMPLY', 'BY', 'STOPPING', 'HER', 'CARRIAGE', 'TWO', 'OR', 'THREE', 'TIMES', 'BEFORE', 'THE', 'SHOP', 'TO', 'HAVE', 'HER', 'SNUFF', 'BOX', 'FILLED', 'AND', 'BY', 'SAYING', 'ALOUD', 'TO', 'THE', 'YOUNG', 'GIRL', 'WHO', 'HANDED', 'BACK', 'THE', 'BOX', 'THAT', 'HER', 'SNUFF', 'WAS', 'THE', 'VERY', 'BEST', 'IN', 'PARIS'] +3729-6852-0033-1693: ref=['YOU', 'ARE', 'NOW', 'IN', 'THE', 'ONLY', 'COUNTRY', 'IN', 'THE', 'WORLD', 'WHERE', 'WIT', 'CAN', 'MAKE', 'A', 'FORTUNE', 'BY', 'SELLING', 'EITHER', 'A', 'GENUINE', 'OR', 'A', 'FALSE', 'ARTICLE', 'IN', 'THE', 'FIRST', 'CASE', 'IT', 'RECEIVES', 'THE', 'WELCOME', 'OF', 'INTELLIGENT', 'AND', 'TALENTED', 'PEOPLE', 'AND', 'IN', 'THE', 'SECOND', 'FOOLS', 'ARE', 'ALWAYS', 'READY', 'TO', 'REWARD', 'IT', 'FOR', 'SILLINESS', 'IS', 'TRULY', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'PEOPLE', 'HERE', 'AND', 'HOWEVER', 'WONDERFUL', 'IT', 'MAY', 'APPEAR', 'SILLINESS', 'IS', 'THE', 'DAUGHTER', 'OF', 'WIT'] +3729-6852-0033-1693: hyp=['YOU', 'ARE', 'NOW', 'IN', 'THE', 'ONLY', 'COUNTRY', 'IN', 'THE', 'WORLD', 'WHERE', 'WIT', 'CAN', 'MAKE', 'A', 'FORTUNE', 'BY', 'SELLING', 'EITHER', 'A', 'GENUINE', 'OR', 'A', 'FALSE', 'ARTICLE', 'IN', 'THE', 'FIRST', 'CASE', 'IT', 'RECEIVES', 'THE', 'WELCOME', 'OF', 'INTELLIGENT', 'AND', 'TALENTED', 'PEOPLE', 'AND', 'IN', 'THE', 'SECOND', 'FOOLS', 'ARE', 'ALWAYS', 'READY', 'TO', 'REWARD', 'IT', 'FOR', 'SILLINESS', 'IS', 'TRULY', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'PEOPLE', 'HERE', 'AND', 'HOWEVER', 'WONDERFUL', 'IT', 'MAY', 'APPEAR', 'SILLINESS', 'IS', 'THE', 'DAUGHTER', 'OF', 'WIT'] +3729-6852-0034-1694: ref=['LET', 'A', 'MAN', 'RUN', 'AND', 'EVERYBODY', 'WILL', 'RUN', 'AFTER', 'HIM', 'THE', 'CROWD', 'WILL', 'NOT', 'STOP', 'UNLESS', 'THE', 'MAN', 'IS', 'PROVED', 'TO', 'BE', 'MAD', 'BUT', 'TO', 'PROVE', 'IT', 'IS', 'INDEED', 'A', 'DIFFICULT', 'TASK', 'BECAUSE', 'WE', 'HAVE', 'A', 'CROWD', 'OF', 'MEN', 'WHO', 'MAD', 'FROM', 'THEIR', 'BIRTH', 'ARE', 'STILL', 'CONSIDERED', 'WISE'] +3729-6852-0034-1694: hyp=['LET', 'A', 'MAN', 'RUN', 'AND', 'EVERYBODY', 'WILL', 'RUN', 'AFTER', 'HIM', 'THE', 'CROWD', 'WILL', 'NOT', 'STOP', 'UNLESS', 'THE', 'MAN', 'IS', 'PROVED', 'TO', 'BE', 'MAD', 'BUT', 'TO', 'PROVE', 'IT', 'IS', 'INDEED', 'A', 'DIFFICULT', 'TASK', 'BECAUSE', 'WE', 'HAVE', 'A', 'CROWD', 'OF', 'MEN', 'WHO', 'MAD', 'FROM', 'THEIR', 'BIRTH', 'ARE', 'STILL', 'CONSIDERED', 'WISE'] +3729-6852-0035-1695: ref=['IT', 'SEEMS', 'TO', 'ME', 'I', 'REPLIED', 'THAT', 'SUCH', 'APPROVAL', 'SUCH', 'RATIFICATION', 'OF', 'THE', 'OPINION', 'EXPRESSED', 'BY', 'THE', 'KING', 'THE', 'PRINCES', 'OF', 'THE', 'BLOOD', 'ET', 'CETERA', 'IS', 'RATHER', 'A', 'PROOF', 'OF', 'THE', 'AFFECTION', 'FELT', 'FOR', 'THEM', 'BY', 'THE', 'NATION', 'FOR', 'THE', 'FRENCH', 'CARRY', 'THAT', 'AFFECTION', 'TO', 'SUCH', 'AN', 'EXTENT', 'THAT', 'THEY', 'BELIEVE', 'THEM', 'INFALLIBLE'] +3729-6852-0035-1695: hyp=['IT', 'SEEMS', 'TO', 'ME', 'I', 'REPLIED', 'THAT', 'SUCH', 'APPROVAL', 'SUCH', 'RATIFICATION', 'OF', 'THE', 'OPINION', 'EXPRESSED', 'BY', 'THE', 'KING', 'THE', 'PRINCES', 'OF', 'THE', 'BLOOD', 'ET', 'CETERA', 'IS', 'RATHER', 'A', 'PROOF', 'OF', 'THE', 'AFFECTION', 'FELT', 'FOR', 'THEM', 'BY', 'THE', 'NATION', 'FOR', 'THE', 'FRENCH', 'CARRY', 'THAT', 'AFFECTION', 'TO', 'SUCH', 'AN', 'EXTENT', 'THAT', 'THEY', 'BELIEVE', 'THEM', 'INFALLIBLE'] +3729-6852-0036-1696: ref=['WHEN', 'THE', 'KING', 'COMES', 'TO', 'PARIS', 'EVERYBODY', 'CALLS', 'OUT', 'VIVE', 'LE', 'ROI'] +3729-6852-0036-1696: hyp=['WHEN', 'THE', 'KING', 'COMES', 'TO', 'PARIS', 'EVERYBODY', 'CALLS', 'OUT', 'VIVE', 'LAUROI'] +3729-6852-0037-1697: ref=['SHE', 'INTRODUCED', 'ME', 'TO', 'ALL', 'HER', 'GUESTS', 'AND', 'GAVE', 'ME', 'SOME', 'PARTICULARS', 'RESPECTING', 'EVERY', 'ONE', 'OF', 'THEM'] +3729-6852-0037-1697: hyp=['SHE', 'INTRODUCED', 'ME', 'TO', 'ALL', 'HER', 'GUESTS', 'AND', 'GAVE', 'ME', 'SOME', 'PARTICULARS', 'RESPECTING', 'EVERY', 'ONE', 'OF', 'THEM'] +3729-6852-0038-1698: ref=['WHAT', 'SIR', 'I', 'SAID', 'TO', 'HIM', 'AM', 'I', 'FORTUNATE', 'ENOUGH', 'TO', 'SEE', 'YOU'] +3729-6852-0038-1698: hyp=['WHAT', 'SIR', 'I', 'SAID', 'TO', 'HIM', 'AM', 'I', 'FORTUNATE', 'ENOUGH', 'TO', 'SEE', 'YOU'] +3729-6852-0039-1699: ref=['HE', 'HIMSELF', 'RECITED', 'THE', 'SAME', 'PASSAGE', 'IN', 'FRENCH', 'AND', 'POLITELY', 'POINTED', 'OUT', 'THE', 'PARTS', 'IN', 'WHICH', 'HE', 'THOUGHT', 'THAT', 'I', 'HAD', 'IMPROVED', 'ON', 'THE', 'ORIGINAL'] +3729-6852-0039-1699: hyp=['HE', 'HIMSELF', 'RECITED', 'THE', 'SAME', 'PASSAGE', 'IN', 'FRENCH', 'AND', 'POLITELY', 'POINTED', 'OUT', 'THE', 'PARTS', 'IN', 'WHICH', 'HE', 'THOUGHT', 'THAT', 'I', 'HAD', 'IMPROVED', 'ON', 'THE', 'ORIGINAL'] +3729-6852-0040-1700: ref=['FOR', 'THE', 'FIRST', 'DAY', 'SIR', 'I', 'THINK', 'THAT', 'WHAT', 'YOU', 'HAVE', 'DONE', 'GIVES', 'GREAT', 'HOPES', 'OF', 'YOU', 'AND', 'WITHOUT', 'ANY', 'DOUBT', 'YOU', 'WILL', 'MAKE', 'RAPID', 'PROGRESS'] +3729-6852-0040-1700: hyp=['FOR', 'THE', 'FIRST', 'DAY', 'SIR', 'I', 'THINK', 'THAT', 'WHAT', 'YOU', 'HAVE', 'DONE', 'GIVES', 'GREAT', 'HOPES', 'OF', 'YOU', 'AND', 'WITHOUT', 'ANY', 'DOUBT', 'YOU', 'WILL', 'MAKE', 'RAPID', 'PROGRESS'] +3729-6852-0041-1701: ref=['I', 'BELIEVE', 'IT', 'SIR', 'AND', 'THAT', 'IS', 'WHAT', 'I', 'FEAR', 'THEREFORE', 'THE', 'PRINCIPAL', 'OBJECT', 'OF', 'MY', 'VISIT', 'HERE', 'IS', 'TO', 'DEVOTE', 'MYSELF', 'ENTIRELY', 'TO', 'THE', 'STUDY', 'OF', 'THE', 'FRENCH', 'LANGUAGE'] +3729-6852-0041-1701: hyp=['I', 'BELIEVE', 'IT', 'SIR', 'AND', 'THAT', 'IS', 'WHAT', 'I', 'FEAR', 'THEREFORE', 'THE', 'PRINCIPAL', 'OBJECT', 'OF', 'MY', 'VISIT', 'HERE', 'IS', 'TO', 'DEVOTE', 'MYSELF', 'ENTIRELY', 'TO', 'THE', 'STUDY', 'OF', 'THE', 'FRENCH', 'LANGUAGE'] +3729-6852-0042-1702: ref=['I', 'AM', 'A', 'VERY', 'UNPLEASANT', 'PUPIL', 'ALWAYS', 'ASKING', 'QUESTIONS', 'CURIOUS', 'TROUBLESOME', 'INSATIABLE', 'AND', 'EVEN', 'SUPPOSING', 'THAT', 'I', 'COULD', 'MEET', 'WITH', 'THE', 'TEACHER', 'I', 'REQUIRE', 'I', 'AM', 'AFRAID', 'I', 'AM', 'NOT', 'RICH', 'ENOUGH', 'TO', 'PAY', 'HIM'] +3729-6852-0042-1702: hyp=['I', 'AM', 'A', 'VERY', 'UNPLEASANT', 'PUPIL', 'ALWAYS', 'ASKING', 'QUESTIONS', 'CURIOUS', 'TROUBLESOME', 'INSATIABLE', 'AND', 'EVEN', 'SUPPOSING', 'THAT', 'I', 'COULD', 'MEET', 'WITH', 'THE', 'TEACHER', 'I', 'REQUIRE', 'I', 'AM', 'AFRAID', 'I', 'AM', 'NOT', 'RICH', 'ENOUGH', 'TO', 'PAY', 'HIM'] +3729-6852-0043-1703: ref=['I', 'RESIDE', 'IN', 'THE', 'MARAIS', 'RUE', 'DE', 'DOUZE', 'PORTES'] +3729-6852-0043-1703: hyp=['I', 'RESIDE', 'IN', 'THE', 'MARAY', 'GRUE', 'DE', 'DUSPORT'] +3729-6852-0044-1704: ref=['I', 'WILL', 'MAKE', 'YOU', 'TRANSLATE', 'THEM', 'INTO', 'FRENCH', 'AND', 'YOU', 'NEED', 'NOT', 'BE', 'AFRAID', 'OF', 'MY', 'FINDING', 'YOU', 'INSATIABLE'] +3729-6852-0044-1704: hyp=['I', 'WILL', 'MAKE', 'YOU', 'TRANSLATE', 'THEM', 'INTO', 'FRENCH', 'AND', 'YOU', 'NEED', 'NOT', 'BE', 'AFRAID', 'OF', 'MY', 'FINDING', 'YOU', 'INSATIABLE'] +3729-6852-0045-1705: ref=['HE', 'HAD', 'A', 'GOOD', 'APPETITE', 'COULD', 'TELL', 'A', 'GOOD', 'STORY', 'WITHOUT', 'LAUGHING', 'WAS', 'CELEBRATED', 'FOR', 'HIS', 'WITTY', 'REPARTEES', 'AND', 'HIS', 'SOCIABLE', 'MANNERS', 'BUT', 'HE', 'SPENT', 'HIS', 'LIFE', 'AT', 'HOME', 'SELDOM', 'GOING', 'OUT', 'AND', 'SEEING', 'HARDLY', 'ANYONE', 'BECAUSE', 'HE', 'ALWAYS', 'HAD', 'A', 'PIPE', 'IN', 'HIS', 'MOUTH', 'AND', 'WAS', 'SURROUNDED', 'BY', 'AT', 'LEAST', 'TWENTY', 'CATS', 'WITH', 'WHICH', 'HE', 'WOULD', 'AMUSE', 'HIMSELF', 'ALL', 'DAY'] +3729-6852-0045-1705: hyp=['HE', 'HAD', 'A', 'GOOD', 'APPETITE', 'COULD', 'TELL', 'A', 'GOOD', 'STORY', 'WITHOUT', 'LAUGHING', 'WITH', 'CELEBRATED', 'FOR', 'HIS', 'WITTY', 'REPARTEES', 'AND', 'HIS', 'SOCIABLE', 'MANNERS', 'BUT', 'HE', 'SPENT', 'HIS', 'LIFE', 'AT', 'HOME', 'SELDOM', 'GOING', 'OUT', 'AND', 'SEEING', 'HARDLY', 'ANY', 'ONE', 'BECAUSE', 'HE', 'ALWAYS', 'HAD', 'A', 'PIPE', 'IN', 'HIS', 'MOUTH', 'AND', 'WAS', 'SURROUNDED', 'BY', 'AT', 'LEAST', 'TWENTY', 'CATS', 'WITH', 'WHICH', 'HE', 'WOULD', 'AMUSE', 'HIMSELF', 'ALL', 'DAY'] +3729-6852-0046-1706: ref=['HIS', 'HOUSEKEEPER', 'HAD', 'THE', 'MANAGEMENT', 'OF', 'EVERYTHING', 'SHE', 'NEVER', 'ALLOWED', 'HIM', 'TO', 'BE', 'IN', 'NEED', 'OF', 'ANYTHING', 'AND', 'SHE', 'GAVE', 'NO', 'ACCOUNT', 'OF', 'HIS', 'MONEY', 'WHICH', 'SHE', 'KEPT', 'ALTOGETHER', 'BECAUSE', 'HE', 'NEVER', 'ASKED', 'HER', 'TO', 'RENDER', 'ANY', 'ACCOUNTS'] +3729-6852-0046-1706: hyp=['HIS', 'HOUSEKEEPER', 'HAD', 'THE', 'MANAGEMENT', 'OF', 'EVERYTHING', 'SHE', 'NEVER', 'ALLOWED', 'HIM', 'TO', 'BE', 'IN', 'NEED', 'OF', 'ANYTHING', 'AND', 'SHE', 'GAVE', 'NO', 'ACCOUNT', 'OF', 'HIS', 'MONEY', 'WHICH', 'SHE', 'KEPT', 'ALTOGETHER', 'BECAUSE', 'HE', 'NEVER', 'ASKED', 'HER', 'TO', 'RENDER', 'ANY', 'ACCOUNTS'] +4077-13751-0000-1258: ref=['ON', 'THE', 'SIXTH', 'OF', 'APRIL', 'EIGHTEEN', 'THIRTY', 'THE', 'CHURCH', 'OF', 'JESUS', 'CHRIST', 'OF', 'LATTER', 'DAY', 'SAINTS', 'WAS', 'FORMALLY', 'ORGANIZED', 'AND', 'THUS', 'TOOK', 'ON', 'A', 'LEGAL', 'EXISTENCE'] +4077-13751-0000-1258: hyp=['ON', 'THE', 'SIXTH', 'OF', 'APRIL', 'EIGHTEEN', 'THIRTY', 'THE', 'CHURCH', 'OF', 'JESUS', 'CHRIST', 'OF', 'LATTER', 'DAY', 'SAINTS', 'WAS', 'FORMERLY', 'ORGANIZED', 'AND', 'THUS', 'TOOK', 'ON', 'A', 'LEGAL', 'EXISTENCE'] +4077-13751-0001-1259: ref=['ITS', 'ORIGIN', 'WAS', 'SMALL', 'A', 'GERM', 'AN', 'INSIGNIFICANT', 'SEED', 'HARDLY', 'TO', 'BE', 'THOUGHT', 'OF', 'AS', 'LIKELY', 'TO', 'AROUSE', 'OPPOSITION'] +4077-13751-0001-1259: hyp=['ITS', 'ORIGIN', 'WAS', 'SMALL', 'A', 'GERM', 'AN', 'INSIGNIFICANT', 'SEED', 'HARDLY', 'TO', 'BE', 'THOUGHT', 'OF', 'AS', 'LIKELY', 'TO', 'AROUSE', 'OPPOSITION'] +4077-13751-0002-1260: ref=['INSTEAD', 'OF', 'BUT', 'SIX', 'REGULARLY', 'AFFILIATED', 'MEMBERS', 'AND', 'AT', 'MOST', 'TWO', 'SCORE', 'OF', 'ADHERENTS', 'THE', 'ORGANIZATION', 'NUMBERS', 'TODAY', 'MANY', 'HUNDRED', 'THOUSAND', 'SOULS'] +4077-13751-0002-1260: hyp=['INSTEAD', 'OF', 'BUT', 'SIX', 'REGULARLY', 'AFFILIATED', 'MEMBERS', 'AND', 'AT', 'MOST', 'TWO', 'SCORE', 'OF', 'ADHERENTS', 'THE', 'ORGANIZATION', 'NUMBERS', 'TO', 'DAY', 'MANY', 'HUNDRED', 'THOUSAND', 'SOULS'] +4077-13751-0003-1261: ref=['IN', 'PLACE', 'OF', 'A', 'SINGLE', 'HAMLET', 'IN', 'THE', 'SMALLEST', 'CORNER', 'OF', 'WHICH', 'THE', 'MEMBERS', 'COULD', 'HAVE', 'CONGREGATED', 'THERE', 'NOW', 'ARE', 'ABOUT', 'SEVENTY', 'STAKES', 'OF', 'ZION', 'AND', 'ABOUT', 'SEVEN', 'HUNDRED', 'ORGANIZED', 'WARDS', 'EACH', 'WARD', 'AND', 'STAKE', 'WITH', 'ITS', 'FULL', 'COMPLEMENT', 'OF', 'OFFICERS', 'AND', 'PRIESTHOOD', 'ORGANIZATIONS'] +4077-13751-0003-1261: hyp=['IN', 'PLACE', 'HAVE', 'A', 'SINGLE', 'HAMLET', 'IN', 'THE', 'SMALLEST', 'CORNER', 'OF', 'WHICH', 'THE', 'MEMBERS', 'COULD', 'HAVE', 'CONGREGATED', 'THERE', 'NOW', 'ARE', 'ABOUT', 'SEVENTY', 'STAKES', 'OF', 'ZION', 'AND', 'ABOUT', 'SEVEN', 'HUNDRED', 'ORGANIZED', 'WARDS', 'EACH', 'WARD', 'AND', 'STAKE', 'WITH', 'ITS', 'FULL', 'COMPLEMENT', 'OF', 'OFFICERS', 'AND', 'PRIESTHOOD', 'ORGANIZATIONS'] +4077-13751-0004-1262: ref=['THE', 'PRACTISE', 'OF', 'GATHERING', 'ITS', 'PROSELYTES', 'INTO', 'ONE', 'PLACE', 'PREVENTS', 'THE', 'BUILDING', 'UP', 'AND', 'STRENGTHENING', 'OF', 'FOREIGN', 'BRANCHES', 'AND', 'INASMUCH', 'AS', 'EXTENSIVE', 'AND', 'STRONG', 'ORGANIZATIONS', 'ARE', 'SELDOM', 'MET', 'WITH', 'ABROAD', 'VERY', 'ERRONEOUS', 'IDEAS', 'EXIST', 'CONCERNING', 'THE', 'STRENGTH', 'OF', 'THE', 'CHURCH'] +4077-13751-0004-1262: hyp=['THE', 'PRACTICE', 'OF', 'GATHERING', 'ITS', 'PROSELYTES', 'INTO', 'ONE', 'PLACE', 'PREVENTS', 'THE', 'BUILDING', 'UP', 'AND', 'STRENGTHENING', 'OF', 'FOREIGN', 'BRANCHES', 'AND', 'INASMUCH', 'AS', 'EXTENSIVE', 'AND', 'STRONG', 'ORGANIZATIONS', 'ARE', 'SELDOM', 'MET', 'WITH', 'ABROAD', 'VERY', 'ERRONEOUS', 'IDEAS', 'EXIST', 'CONCERNING', 'THE', 'STRENGTH', 'OF', 'THE', 'CHURCH'] +4077-13751-0005-1263: ref=['NEVERTHELESS', 'THE', 'MUSTARD', 'SEED', 'AMONG', 'THE', 'SMALLEST', 'OF', 'ALL', 'SEEDS', 'HAS', 'ATTAINED', 'THE', 'PROPORTIONS', 'OF', 'A', 'TREE', 'AND', 'THE', 'BIRDS', 'OF', 'THE', 'AIR', 'ARE', 'NESTING', 'IN', 'ITS', 'BRANCHES', 'THE', 'ACORN', 'IS', 'NOW', 'AN', 'OAK', 'OFFERING', 'PROTECTION', 'AND', 'THE', 'SWEETS', 'OF', 'SATISFACTION', 'TO', 'EVERY', 'EARNEST', 'PILGRIM', 'JOURNEYING', 'ITS', 'WAY', 'FOR', 'TRUTH'] +4077-13751-0005-1263: hyp=['NEVERTHELESS', 'THE', 'MUSTARD', 'SEED', 'AMONG', 'THE', 'SMALLEST', 'OF', 'ALL', 'SEATS', 'HAS', 'ATTAINED', 'THAT', 'PROPORTIONS', 'OF', 'A', 'TREE', 'AND', 'THE', 'BIRDS', 'OF', 'THE', 'AIR', 'ARE', 'NESTING', 'IN', 'ITS', 'BRANCHES', 'THE', 'ACORN', 'IS', 'NOW', 'IN', 'OAK', 'OFFERING', 'PROTECTION', 'AND', 'THE', 'SWEETS', 'OF', 'SATISFACTION', 'TO', 'EVERY', 'EARNEST', 'PILGRIM', 'JOURNEYING', 'ITS', 'WAY', 'FIR', 'TRUTH'] +4077-13751-0006-1264: ref=['THEIR', 'EYES', 'WERE', 'FROM', 'THE', 'FIRST', 'TURNED', 'IN', 'ANTICIPATION', 'TOWARD', 'THE', 'EVENING', 'SUN', 'NOT', 'MERELY', 'THAT', 'THE', 'WORK', 'OF', 'PROSELYTING', 'SHOULD', 'BE', 'CARRIED', 'ON', 'IN', 'THE', 'WEST', 'BUT', 'THAT', 'THE', 'HEADQUARTERS', 'OF', 'THE', 'CHURCH', 'SHOULD', 'BE', 'THERE', 'ESTABLISHED'] +4077-13751-0006-1264: hyp=['THEIR', 'EYES', 'WERE', 'FROM', 'THE', 'FIRST', 'TURNED', 'IN', 'ANTICIPATION', 'TOWARD', 'THE', 'EVENING', 'SUN', 'NOT', 'MERELY', 'THAT', 'THE', 'WORK', 'OF', 'PROSELY', 'SHOULD', 'BE', 'CARRIED', 'ON', 'IN', 'THE', 'WEST', 'BUT', 'THAT', 'THE', 'HEADQUARTERS', 'OF', 'THE', 'CHURCH', 'SHOULD', 'BE', 'THEIR', 'ESTABLISHED'] +4077-13751-0007-1265: ref=['THE', 'BOOK', 'OF', 'MORMON', 'HAD', 'TAUGHT', 'THE', 'PEOPLE', 'THE', 'TRUE', 'ORIGIN', 'AND', 'DESTINY', 'OF', 'THE', 'AMERICAN', 'INDIANS', 'AND', 'TOWARD', 'THIS', 'DARK', 'SKINNED', 'REMNANT', 'OF', 'A', 'ONCE', 'MIGHTY', 'PEOPLE', 'THE', 'MISSIONARIES', 'OF', 'MORMONISM', 'EARLY', 'TURNED', 'THEIR', 'EYES', 'AND', 'WITH', 'THEIR', 'EYES', 'WENT', 'THEIR', 'HEARTS', 'AND', 'THEIR', 'HOPES'] +4077-13751-0007-1265: hyp=['THE', 'BOOK', 'O', 'MORMON', 'HAD', 'TAUGHT', 'THE', 'PEOPLE', 'THE', 'TRUE', 'ORIGIN', 'AND', 'DESTINY', 'OF', 'THE', 'AMERICAN', 'INDIANS', 'AND', 'TOWARD', 'THIS', 'DARK', 'SKINNED', 'REMNANT', 'OF', 'A', 'ONCE', 'MIGHTY', 'PEOPLE', 'THE', 'MISSIONARIES', 'OF', 'MORMONISM', 'EARLY', 'TURNED', 'THEIR', 'EYES', 'AND', 'WITH', 'THEIR', 'EYES', 'WENT', 'THEIR', 'HEARTS', 'AND', 'THEIR', 'HOPES'] +4077-13751-0008-1266: ref=['IT', 'IS', 'NOTABLE', 'THAT', 'THE', 'INDIAN', 'TRIBES', 'HAVE', 'GENERALLY', 'REGARDED', 'THE', 'RELIGION', 'OF', 'THE', 'LATTER', 'DAY', 'SAINTS', 'WITH', 'FAVOR', 'SEEING', 'IN', 'THE', 'BOOK', 'OF', 'MORMON', 'STRIKING', 'AGREEMENT', 'WITH', 'THEIR', 'OWN', 'TRADITIONS'] +4077-13751-0008-1266: hyp=['IT', 'IS', 'NOTABLE', 'THAT', 'THE', 'INDIAN', 'TRIBES', 'HAVE', 'GERALLY', 'REGARDED', 'THEIR', 'RELIGION', 'OF', 'THE', 'LATTER', 'DAY', 'SAINTS', 'WITH', 'FAVOR', 'SEEING', 'IN', 'THE', 'BOOK', 'OF', 'MORMON', 'STRIKING', 'AGREEMENT', 'WITH', 'THEIR', 'OWN', 'TRADITIONS'] +4077-13751-0009-1267: ref=['THE', 'FIRST', 'WELL', 'ESTABLISHED', 'SEAT', 'OF', 'THE', 'CHURCH', 'WAS', 'IN', 'THE', 'PRETTY', 'LITTLE', 'TOWN', 'OF', 'KIRTLAND', 'OHIO', 'ALMOST', 'WITHIN', 'SIGHT', 'OF', 'LAKE', 'ERIE', 'AND', 'HERE', 'SOON', 'ROSE', 'THE', 'FIRST', 'TEMPLE', 'OF', 'MODERN', 'TIMES'] +4077-13751-0009-1267: hyp=['THE', 'FIRST', 'WELL', 'ESTABLISHED', 'SEAT', 'OF', 'THE', 'CHURCH', 'WAS', 'IN', 'THE', 'PRETTY', 'LITTLE', 'TOWN', 'OF', 'CURTLIN', 'OHIO', 'ALMOST', 'WITHIN', 'SIGHT', 'OF', 'LAKE', 'ERIE', 'AND', 'HERE', 'SOON', 'ROSE', 'THE', 'FIRST', 'TEMPLE', 'OF', 'MODERN', 'TIMES'] +4077-13751-0010-1268: ref=['TO', 'THE', 'FERVENT', 'LATTER', 'DAY', 'SAINT', 'A', 'TEMPLE', 'IS', 'NOT', 'SIMPLY', 'A', 'CHURCH', 'BUILDING', 'A', 'HOUSE', 'FOR', 'RELIGIOUS', 'ASSEMBLY'] +4077-13751-0010-1268: hyp=['TO', 'THE', 'FERVENT', 'LATTER', 'DAY', 'SAINT', 'A', 'TEMPLE', 'IS', 'NOT', 'SIMPLY', 'A', 'CHURCH', 'BUILDING', 'A', 'HOUSE', 'FOR', 'RELIGIOUS', 'ASSEMBLY'] +4077-13751-0011-1269: ref=['SOON', 'THOUSANDS', 'OF', 'CONVERTS', 'HAD', 'RENTED', 'OR', 'PURCHASED', 'HOMES', 'IN', 'MISSOURI', 'INDEPENDENCE', 'JACKSON', 'COUNTY', 'BEING', 'THEIR', 'CENTER', 'BUT', 'FROM', 'THE', 'FIRST', 'THEY', 'WERE', 'UNPOPULAR', 'AMONG', 'THE', 'MISSOURIANS'] +4077-13751-0011-1269: hyp=['SOON', 'THOUSANDS', 'OF', 'CONVERTS', 'HAD', 'RENTED', 'OR', 'PURCHASED', 'HOMES', 'IN', 'MISSOURI', 'INDEPENDENCE', 'JACKSON', 'COUNTY', 'BEING', 'THEIR', 'CENTRE', 'BUT', 'FROM', 'THE', 'FIRST', 'THEY', 'WERE', 'UNPOPULAR', 'AMONG', 'THE', 'MISSOURIENS'] +4077-13751-0012-1270: ref=['THE', 'LIEUTENANT', 'GOVERNOR', 'LILBURN', 'W', 'BOGGS', 'AFTERWARD', 'GOVERNOR', 'WAS', 'A', 'PRONOUNCED', 'MORMON', 'HATER', 'AND', 'THROUGHOUT', 'THE', 'PERIOD', 'OF', 'THE', 'TROUBLES', 'HE', 'MANIFESTED', 'SYMPATHY', 'WITH', 'THE', 'PERSECUTORS'] +4077-13751-0012-1270: hyp=['THE', 'LIEUTENANT', 'GOVERNOR', 'LITTLE', 'BURN', 'W', 'BOGGS', 'AFTERWARD', 'GOVERNOR', 'WAS', 'A', 'PRONOUNCED', 'MORMON', 'HATER', 'AND', 'THROUGHOUT', 'THE', 'PERIOD', 'OF', 'THE', 'TROUBLES', 'HE', 'MANIFEST', 'HIS', 'SYMPATHY', 'WITH', 'THE', 'PERSECUTORS'] +4077-13751-0013-1271: ref=['THEIR', 'SUFFERINGS', 'HAVE', 'NEVER', 'YET', 'BEEN', 'FITLY', 'CHRONICLED', 'BY', 'HUMAN', 'SCRIBE'] +4077-13751-0013-1271: hyp=['THEIR', 'SUFFERINGS', 'HAVE', 'NEVER', 'YET', 'BEEN', 'FITLY', 'CHRONICLED', 'BY', 'HUMAN', 'SCRIBE'] +4077-13751-0014-1272: ref=['MAKING', 'THEIR', 'WAY', 'ACROSS', 'THE', 'RIVER', 'MOST', 'OF', 'THE', 'REFUGEES', 'FOUND', 'SHELTER', 'AMONG', 'THE', 'MORE', 'HOSPITABLE', 'PEOPLE', 'OF', 'CLAY', 'COUNTY', 'AND', 'AFTERWARD', 'ESTABLISHED', 'THEMSELVES', 'IN', 'CALDWELL', 'COUNTY', 'THEREIN', 'FOUNDING', 'THE', 'CITY', 'OF', 'FAR', 'WEST'] +4077-13751-0014-1272: hyp=['MAKING', 'THEIR', 'WAY', 'ACROSS', 'THE', 'RIVER', 'MOST', 'OF', 'THE', 'REFUGEES', 'FOUND', 'SHELTER', 'AMONG', 'THE', 'MORE', 'HOSPITABLE', 'PEOPLE', 'OF', 'CLAY', 'COUNTY', 'AND', 'AFTERWARD', 'ESTABLISHED', 'THEMSELVES', 'IN', 'CAULDWELL', 'COUNTY', 'THEREIN', 'FOUNDING', 'THE', 'CITY', 'OF', 'FAR', 'WEST'] +4077-13751-0015-1273: ref=['A', 'SMALL', 'SETTLEMENT', 'HAD', 'BEEN', 'FOUNDED', 'BY', 'MORMON', 'FAMILIES', 'ON', 'SHOAL', 'CREEK', 'AND', 'HERE', 'ON', 'THE', 'THIRTIETH', 'OF', 'OCTOBER', 'EIGHTEEN', 'THIRTY', 'EIGHT', 'A', 'COMPANY', 'OF', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'FELL', 'UPON', 'THE', 'HAPLESS', 'SETTLERS', 'AND', 'BUTCHERED', 'A', 'SCORE'] +4077-13751-0015-1273: hyp=['A', 'SMALL', 'SETTLEMENT', 'HAD', 'BEEN', 'FOUNDED', 'BY', 'MORMON', 'FAMILIES', 'ON', 'SHOAL', 'CREEK', 'AND', 'HERE', 'ON', 'THE', 'THIRTIETH', 'OF', 'OCTOBER', 'EIGHTEEN', 'THIRTY', 'EIGHT', 'A', 'COMPANY', 'OF', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'FELL', 'UPON', 'THE', 'HAPLESS', 'SETTLERS', 'AND', 'BUTCHERED', 'A', 'SCORE'] +4077-13751-0016-1274: ref=['BE', 'IT', 'SAID', 'TO', 'THE', 'HONOR', 'OF', 'SOME', 'OF', 'THE', 'OFFICERS', 'ENTRUSTED', 'WITH', 'THE', 'TERRIBLE', 'COMMISSION', 'THAT', 'WHEN', 'THEY', 'LEARNED', 'ITS', 'TRUE', 'SIGNIFICANCE', 'THEY', 'RESIGNED', 'THEIR', 'AUTHORITY', 'RATHER', 'THAN', 'HAVE', 'ANYTHING', 'TO', 'DO', 'WITH', 'WHAT', 'THEY', 'DESIGNATED', 'A', 'COLD', 'BLOODED', 'BUTCHERY'] +4077-13751-0016-1274: hyp=['BE', 'IT', 'SAID', 'TO', 'THE', 'HONOR', 'OF', 'SOME', 'OF', 'THE', 'OFFICERS', 'ENTRUSTED', 'WITH', 'THE', 'TERRIBLE', 'COMMISSION', 'THAT', 'WHEN', 'THEY', 'LEARNED', 'ITS', 'TRUE', 'SIGNIFICANCE', 'THEY', 'RESIGN', 'THEIR', 'AUTHORITY', 'RATHER', 'THAN', 'HAVE', 'ANYTHING', 'TO', 'DO', 'WITH', 'WHAT', 'THEY', 'DESIGNATED', 'A', 'COLD', 'BLOODED', 'BUTCHERY'] +4077-13751-0017-1275: ref=['OH', 'WHAT', 'A', 'RECORD', 'TO', 'READ', 'WHAT', 'A', 'PICTURE', 'TO', 'GAZE', 'UPON', 'HOW', 'AWFUL', 'THE', 'FACT'] +4077-13751-0017-1275: hyp=['OH', 'WHAT', 'A', 'RECORD', 'TO', 'READ', 'WHAT', 'A', 'PICTURE', 'TO', 'GAZE', 'UPON', 'HOW', 'AWFUL', 'THE', 'FACT'] +4077-13751-0018-1276: ref=['AMERICAN', 'SCHOOL', 'BOYS', 'READ', 'WITH', 'EMOTIONS', 'OF', 'HORROR', 'OF', 'THE', 'ALBIGENSES', 'DRIVEN', 'BEATEN', 'AND', 'KILLED', 'WITH', 'A', 'PAPAL', 'LEGATE', 'DIRECTING', 'THE', 'BUTCHERY', 'AND', 'OF', 'THE', 'VAUDOIS', 'HUNTED', 'AND', 'HOUNDED', 'LIKE', 'BEASTS', 'AS', 'THE', 'EFFECT', 'OF', 'A', 'ROYAL', 'DECREE', 'AND', 'THEY', 'YET', 'SHALL', 'READ', 'IN', 'THE', 'HISTORY', 'OF', 'THEIR', 'OWN', 'COUNTRY', 'OF', 'SCENES', 'AS', 'TERRIBLE', 'AS', 'THESE', 'IN', 'THE', 'EXHIBITION', 'OF', 'INJUSTICE', 'AND', 'INHUMAN', 'HATE'] +4077-13751-0018-1276: hyp=['AMERICAN', 'SCHOOLBOYS', 'READ', 'WITH', 'EMOTIONS', 'OF', 'HORROR', 'OF', 'THE', 'ALBIGENSES', 'DRIVEN', 'BEATEN', 'AND', 'KILLED', 'WITH', 'A', 'PEPPEL', 'LEGATE', 'DIRECTING', 'THE', 'BUTCHERY', 'AND', 'OF', 'THE', 'FAUDOIS', 'HUNTED', 'AND', 'HOUNDED', 'LIKE', 'BEASTS', 'AS', 'THE', 'EFFECT', 'OF', 'A', 'ROYAL', 'DECREE', 'AND', 'THEY', 'YET', 'SHALL', 'READ', 'IN', 'THE', 'HISTORY', 'OF', 'THEIR', 'OWN', 'COUNTRY', 'OF', 'SCENES', 'AS', 'TERRIBLE', 'AS', 'THESE', 'IN', 'THE', 'EXHIBITION', 'OF', 'INJUSTICE', 'AND', 'INHUMAN', 'HATE'] +4077-13751-0019-1277: ref=['WHO', 'BEGAN', 'THE', 'QUARREL', 'WAS', 'IT', 'THE', 'MORMONS'] +4077-13751-0019-1277: hyp=['WHO', 'BEGAN', 'THE', 'QUARREL', 'WAS', 'IT', 'THE', 'MORMONS'] +4077-13751-0020-1278: ref=['AS', 'A', 'SAMPLE', 'OF', 'THE', 'PRESS', 'COMMENTS', 'AGAINST', 'THE', 'BRUTALITY', 'OF', 'THE', 'MISSOURIANS', 'I', 'QUOTE', 'A', 'PARAGRAPH', 'FROM', 'THE', 'QUINCY', 'ARGUS', 'MARCH', 'SIXTEENTH', 'EIGHTEEN', 'THIRTY', 'NINE'] +4077-13751-0020-1278: hyp=['AS', 'THE', 'SABLE', 'OF', 'THE', 'PRESS', 'COMMENTS', 'AGAINST', 'THE', 'BRUTALITY', 'OF', 'THE', 'MISSOURIANS', 'I', 'QUOTE', 'A', 'PARAGRAPH', 'FROM', 'THE', 'QUINCEY', 'ARGUS', 'MARCH', 'SIXTEENTH', 'EIGHTEEN', 'THIRTY', 'NINE'] +4077-13751-0021-1279: ref=['IT', 'WILL', 'BE', 'OBSERVED', 'THAT', 'AN', 'ORGANIZED', 'MOB', 'AIDED', 'BY', 'MANY', 'OF', 'THE', 'CIVIL', 'AND', 'MILITARY', 'OFFICERS', 'OF', 'MISSOURI', 'WITH', 'GOVERNOR', 'BOGGS', 'AT', 'THEIR', 'HEAD', 'HAVE', 'BEEN', 'THE', 'PROMINENT', 'ACTORS', 'IN', 'THIS', 'BUSINESS', 'INCITED', 'TOO', 'IT', 'APPEARS', 'AGAINST', 'THE', 'MORMONS', 'BY', 'POLITICAL', 'HATRED', 'AND', 'BY', 'THE', 'ADDITIONAL', 'MOTIVES', 'OF', 'PLUNDER', 'AND', 'REVENGE'] +4077-13751-0021-1279: hyp=['IT', 'WILL', 'BE', 'OBSERVED', 'THAT', 'AN', 'ORGANIZED', 'MOB', 'AIDED', 'BY', 'MANY', 'OF', 'THE', 'CIVIL', 'AND', 'MILITARY', 'OFFICERS', 'OF', 'MISSOURI', 'WITH', 'GOVERNOR', 'BOGGS', 'AT', 'THEIR', 'HEAD', 'HAVE', 'BEEN', 'THE', 'PROMINENT', 'ACTORS', 'IN', 'THIS', 'BUSINESS', 'INCITED', 'TOO', 'IT', 'APPEARS', 'AGAINST', 'THE', 'MORMONS', 'BY', 'POLITICAL', 'HATRED', 'AND', 'BY', 'THE', 'ADDITIONAL', 'MOTIVES', 'OF', 'PLUNDER', 'AND', 'REVENGE'] +4077-13754-0000-1241: ref=['THE', 'ARMY', 'FOUND', 'THE', 'PEOPLE', 'IN', 'POVERTY', 'AND', 'LEFT', 'THEM', 'IN', 'COMPARATIVE', 'WEALTH'] +4077-13754-0000-1241: hyp=['THE', 'ARMY', 'FOUND', 'THE', 'PEOPLE', 'IN', 'POVERTY', 'AND', 'LEFT', 'THEM', 'IN', 'COMPARATIVE', 'WEALTH'] +4077-13754-0001-1242: ref=['BUT', 'A', 'WORD', 'FURTHER', 'CONCERNING', 'THE', 'EXPEDITION', 'IN', 'GENERAL'] +4077-13754-0001-1242: hyp=['BUT', 'A', 'WORD', 'FURTHER', 'CONCERNING', 'THE', 'EXPEDITION', 'IN', 'GENERAL'] +4077-13754-0002-1243: ref=['IT', 'WAS', 'THROUGH', "FLOYD'S", 'ADVICE', 'THAT', 'BUCHANAN', 'ORDERED', 'THE', 'MILITARY', 'EXPEDITION', 'TO', 'UTAH', 'OSTENSIBLY', 'TO', 'INSTALL', 'CERTAIN', 'FEDERAL', 'OFFICIALS', 'AND', 'TO', 'REPRESS', 'AN', 'ALLEGED', 'INFANTILE', 'REBELLION', 'WHICH', 'IN', 'FACT', 'HAD', 'NEVER', 'COME', 'INTO', 'EXISTENCE', 'BUT', 'IN', 'REALITY', 'TO', 'FURTHER', 'THE', 'INTERESTS', 'OF', 'THE', 'SECESSIONISTS'] +4077-13754-0002-1243: hyp=['IT', 'WAS', 'THROUGH', "FLOYD'S", 'ADVICE', 'THAT', 'YOU', 'CANNOT', 'ORDER', 'THE', 'MILITARY', 'EXPEDITION', 'TO', 'UTAH', 'OSTENSIBLY', 'TO', 'INSTALL', 'CERTAIN', 'FEDERAL', 'OFFICIALS', 'AND', 'TO', 'REPRESS', 'AN', 'ALLEGED', 'INFANTILE', 'REBELLION', 'WHICH', 'IN', 'FACT', 'HAD', 'NEVER', 'COME', 'INTO', 'EXISTENCE', 'BUT', 'IN', 'REALITY', 'TO', 'FURTHER', 'THE', 'ENTRANCE', 'OF', 'THE', 'SECESSIONISTS'] +4077-13754-0003-1244: ref=['MOREOVER', 'HAD', 'THE', 'PEOPLE', 'BEEN', 'INCLINED', 'TO', 'REBELLION', 'WHAT', 'GREATER', 'OPPORTUNITY', 'COULD', 'THEY', 'HAVE', 'WISHED'] +4077-13754-0003-1244: hyp=['MOREOVER', 'HAD', 'THE', 'PEOPLE', 'BEEN', 'INCLINED', 'TO', 'REBELLION', 'WHAT', 'GREAT', 'OPPORTUNITY', 'COULD', 'THEY', 'HAVE', 'WISHED'] +4077-13754-0004-1245: ref=['ALREADY', 'A', 'NORTH', 'AND', 'A', 'SOUTH', 'WERE', 'TALKED', 'OF', 'WHY', 'NOT', 'SET', 'UP', 'ALSO', 'A', 'WEST'] +4077-13754-0004-1245: hyp=['ALREADY', 'A', 'NORTH', 'AND', 'THE', 'SOUTH', 'WERE', 'TALKED', 'OF', 'WHY', 'NOT', 'SET', 'UP', 'ALSO', 'WEST'] +4077-13754-0005-1246: ref=['THEY', 'KNEW', 'NO', 'NORTH', 'NO', 'SOUTH', 'NO', 'EAST', 'NO', 'WEST', 'THEY', 'STOOD', 'POSITIVELY', 'BY', 'THE', 'CONSTITUTION', 'AND', 'WOULD', 'HAVE', 'NOTHING', 'TO', 'DO', 'IN', 'THE', 'BLOODY', 'STRIFE', 'BETWEEN', 'BROTHERS', 'UNLESS', 'INDEED', 'THEY', 'WERE', 'SUMMONED', 'BY', 'THE', 'AUTHORITY', 'TO', 'WHICH', 'THEY', 'HAD', 'ALREADY', 'ONCE', 'LOYALLY', 'RESPONDED', 'TO', 'FURNISH', 'MEN', 'AND', 'ARMS', 'FOR', 'THEIR', "COUNTRY'S", 'NEED'] +4077-13754-0005-1246: hyp=['THEY', 'KNEW', 'NO', 'NORTH', 'NO', 'SOUTH', 'NO', 'EAST', 'NO', 'WEST', 'THEY', 'STOOD', 'POSITIVELY', 'BY', 'THE', 'CONSTITUTION', 'AND', 'WOULD', 'HAVE', 'NOTHING', 'TO', 'DO', 'IN', 'THE', 'BLOODY', 'STRIFE', 'BETWEEN', 'BROTHERS', 'UNLESS', 'INDEED', 'THEY', 'WERE', 'SUMMONED', 'BY', 'THE', 'AUTHORITY', 'TO', 'WHICH', 'THEY', 'HAD', 'ALREADY', 'ONCE', 'LOYALLY', 'RESPONDED', 'TO', 'FURNISH', 'MEN', 'IN', 'ARMS', 'FOR', 'THE', "COUNTRY'S", 'NEED'] +4077-13754-0006-1247: ref=['WHAT', 'THE', 'LATTER', 'DAY', 'SAINTS', 'CALL', 'CELESTIAL', 'MARRIAGE', 'IS', 'CHARACTERISTIC', 'OF', 'THE', 'CHURCH', 'AND', 'IS', 'IN', 'VERY', 'GENERAL', 'PRACTISE', 'BUT', 'OF', 'CELESTIAL', 'MARRIAGE', 'PLURALITY', 'OF', 'WIVES', 'WAS', 'AN', 'INCIDENT', 'NEVER', 'AN', 'ESSENTIAL'] +4077-13754-0006-1247: hyp=['WHAT', 'THE', 'LATTER', 'DAY', 'SAYS', 'CALL', 'CELESTIAL', 'MARRIAGE', 'IS', 'CHARACTERISTIC', 'OF', 'THE', 'CHURCH', 'AND', 'IS', 'IN', 'VERY', 'GENERAL', 'PRACTICE', 'BUT', 'OF', 'CELESTIAL', 'MARRIAGE', 'PLURALITY', 'OF', 'WIVES', 'WAS', 'AN', 'INCIDENT', 'NEVER', 'AN', 'ESSENTIAL'] +4077-13754-0007-1248: ref=['WE', 'BELIEVE', 'IN', 'A', 'LITERAL', 'RESURRECTION', 'AND', 'AN', 'ACTUAL', 'HEREAFTER', 'IN', 'WHICH', 'FUTURE', 'STATE', 'SHALL', 'BE', 'RECOGNIZED', 'EVERY', 'SANCTIFIED', 'AND', 'AUTHORIZED', 'RELATIONSHIP', 'EXISTING', 'HERE', 'ON', 'EARTH', 'OF', 'PARENT', 'AND', 'CHILD', 'BROTHER', 'AND', 'SISTER', 'HUSBAND', 'AND', 'WIFE'] +4077-13754-0007-1248: hyp=['WE', 'BELIEVE', 'IN', 'A', 'LITERAL', 'RESURRECTION', 'AND', 'AN', 'ACTUAL', 'HEREAFTER', 'IN', 'WHICH', 'FUTURE', 'STATES', 'SHALL', 'BE', 'RECOGNIZED', 'EVERY', 'SANCTIFIED', 'AND', 'AUTHORIZED', 'RELATIONSHIP', 'EXISTING', 'HERE', 'ON', 'EARTH', 'OF', 'PARENT', 'AND', 'CHILD', 'BRETHREN', 'SISTER', 'HUSBAND', 'AND', 'WIFE'] +4077-13754-0008-1249: ref=['IT', 'HAS', 'BEEN', 'MY', 'PRIVILEGE', 'TO', 'TREAD', 'THE', 'SOIL', 'OF', 'MANY', 'LANDS', 'TO', 'OBSERVE', 'THE', 'CUSTOMS', 'AND', 'STUDY', 'THE', 'HABITS', 'OF', 'MORE', 'NATIONS', 'THAN', 'ONE', 'AND', 'I', 'HAVE', 'YET', 'TO', 'FIND', 'THE', 'PLACE', 'AND', 'MEET', 'THE', 'PEOPLE', 'WHERE', 'AND', 'WITH', 'WHOM', 'THE', 'PURITY', 'OF', 'MAN', 'AND', 'WOMAN', 'IS', 'HELD', 'MORE', 'PRECIOUS', 'THAN', 'AMONG', 'THE', 'MALIGNED', 'MORMONS', 'IN', 'THE', 'MOUNTAIN', 'VALLEYS', 'OF', 'THE', 'WEST'] +4077-13754-0008-1249: hyp=['IT', 'HAS', 'BEEN', 'MY', 'PRIVILEGE', 'TO', 'TREAD', 'THE', 'SOIL', 'OF', 'MANY', 'LANDS', 'TO', 'OBSERVE', 'THE', 'CUSTOMS', 'AND', 'STUDY', 'THE', 'HABITS', 'OF', 'MORE', 'NATIONS', 'THAN', 'ONE', 'AND', 'I', 'HAVE', 'YET', 'DEFINED', 'THE', 'PLACE', 'AND', 'MEET', 'THE', 'PEOPLE', 'WHERE', 'AND', 'WITH', 'WHOM', 'THE', 'PURITY', 'OF', 'MAN', 'AND', 'WOMAN', 'IS', 'HELD', 'MORE', 'PRECIOUS', 'THAN', 'AMONG', 'THE', 'MALIGNED', 'MORMONS', 'IN', 'THE', 'MOUNTAIN', 'VALLEYS', 'OF', 'THE', 'WEST'] +4077-13754-0009-1250: ref=['AT', 'THE', 'INCEPTION', 'OF', 'PLURAL', 'MARRIAGE', 'AMONG', 'THE', 'LATTER', 'DAY', 'SAINTS', 'THERE', 'WAS', 'NO', 'LAW', 'NATIONAL', 'OR', 'STATE', 'AGAINST', 'ITS', 'PRACTISE'] +4077-13754-0009-1250: hyp=['AT', 'THE', 'INCEPTION', 'OF', 'PEARL', 'MARRIAGE', 'AMONG', 'THE', 'LATTER', 'DAY', 'SAINTS', 'THERE', 'WAS', 'NO', 'LAW', 'NATIONAL', 'OR', 'STATE', 'AGAINST', 'ITS', 'PRACTICE'] +4077-13754-0010-1251: ref=['IN', 'EIGHTEEN', 'SIXTY', 'TWO', 'A', 'LAW', 'WAS', 'ENACTED', 'WITH', 'THE', 'PURPOSE', 'OF', 'SUPPRESSING', 'PLURAL', 'MARRIAGE', 'AND', 'AS', 'HAD', 'BEEN', 'PREDICTED', 'IN', 'THE', 'NATIONAL', 'SENATE', 'PRIOR', 'TO', 'ITS', 'PASSAGE', 'IT', 'LAY', 'FOR', 'MANY', 'YEARS', 'A', 'DEAD', 'LETTER'] +4077-13754-0010-1251: hyp=['IN', 'EIGHTEEN', 'SIXTY', 'TWO', 'A', 'LAW', 'WAS', 'ENACTED', 'WITH', 'A', 'PURPOSE', 'OF', 'SUPPRESSING', 'POOR', 'MARRIAGE', 'AND', 'AS', 'HAD', 'BEEN', 'PREDICTED', 'IN', 'THE', 'NATIONAL', 'SENATE', 'PRIOR', 'TO', 'ITS', 'PASSAGE', 'IT', 'LAY', 'FOR', 'MANY', 'YEARS', 'A', 'DEAD', 'LETTER'] +4077-13754-0011-1252: ref=['FEDERAL', 'JUDGES', 'AND', 'UNITED', 'STATES', 'ATTORNEYS', 'IN', 'UTAH', 'WHO', 'WERE', 'NOT', 'MORMONS', 'NOR', 'LOVERS', 'OF', 'MORMONISM', 'REFUSED', 'TO', 'ENTERTAIN', 'COMPLAINTS', 'OR', 'PROSECUTE', 'CASES', 'UNDER', 'THE', 'LAW', 'BECAUSE', 'OF', 'ITS', 'MANIFEST', 'INJUSTICE', 'AND', 'INADEQUACY'] +4077-13754-0011-1252: hyp=['FEDERAL', 'JUDGES', 'AND', 'UNITED', 'STATES', 'ATTORNEYS', 'IN', 'NEW', 'TOP', 'WHO', 'WERE', 'NOT', 'MORE', "MEN'S", 'NOR', 'LOVERS', 'OF', 'MORMONISM', 'REFUSED', 'TO', 'ENTERTAIN', 'COMPLAINTS', 'OR', 'PROSECUTE', 'CASES', 'UNDER', 'THE', 'LAW', 'BECAUSE', 'OF', 'ITS', 'MANIFEST', 'INJUSTICE', 'AND', 'INADEQUACY'] +4077-13754-0012-1253: ref=['THIS', 'MEANT', 'THAT', 'FOR', 'AN', 'ALLEGED', 'MISDEMEANOR', 'FOR', 'WHICH', 'CONGRESS', 'PRESCRIBED', 'A', 'MAXIMUM', 'PENALTY', 'OF', 'SIX', 'MONTHS', 'IMPRISONMENT', 'AND', 'A', 'FINE', 'OF', 'THREE', 'HUNDRED', 'DOLLARS', 'A', 'MAN', 'MIGHT', 'BE', 'IMPRISONED', 'FOR', 'LIFE', 'AYE', 'FOR', 'MANY', 'TERMS', 'OF', 'A', "MAN'S", 'NATURAL', 'LIFE', 'DID', 'THE', "COURT'S", 'POWER', 'TO', 'ENFORCE', 'ITS', 'SENTENCES', 'EXTEND', 'SO', 'FAR', 'AND', 'MIGHT', 'BE', 'FINED', 'MILLIONS', 'OF', 'DOLLARS'] +4077-13754-0012-1253: hyp=['THIS', 'MEANT', 'THAT', 'FOR', 'AN', 'ALLEGED', 'MISDEMEANOUR', 'FOR', 'WHICH', 'CONGRESS', 'PRESCRIBED', 'A', 'MAXIMUM', 'PENALTY', 'OF', 'SIX', 'MONTHS', 'IMPRISONMENT', 'AND', 'A', 'FINE', 'OF', 'THREE', 'HUNDRED', 'DOLLARS', 'A', 'MAN', 'MIGHT', 'BE', 'IMPRISONED', 'FOR', 'LIFE', 'I', 'FOR', 'MANY', 'TERMS', 'OF', 'A', "MAN'S", 'NATURAL', 'LIFE', 'DID', 'THE', "COURT'S", 'POWER', 'TO', 'ENFORCE', 'ITS', 'SENTENCES', 'EXTEND', 'SO', 'FAR', 'AND', 'MIGHT', 'BE', 'FINED', 'MILLIONS', 'OF', 'DOLLARS'] +4077-13754-0013-1254: ref=['BEFORE', 'THIS', 'TRAVESTY', 'ON', 'THE', 'ADMINISTRATION', 'OF', 'LAW', 'COULD', 'BE', 'BROUGHT', 'BEFORE', 'THE', 'COURT', 'OF', 'LAST', 'RESORT', 'AND', 'THERE', 'MEET', 'WITH', 'THE', 'REVERSAL', 'AND', 'REBUKE', 'IT', 'DESERVED', 'MEN', 'WERE', 'IMPRISONED', 'UNDER', 'SENTENCES', 'OF', 'MANY', 'YEARS', 'DURATION'] +4077-13754-0013-1254: hyp=['BEFORE', 'THIS', 'TRAVASTY', 'ON', 'THE', 'ADMINISTRATION', 'OF', 'LAW', 'COULD', 'BE', 'BROUGHT', 'BEFORE', 'THE', 'COURT', 'OF', 'LAST', 'RESORT', 'AND', 'THERE', 'MET', 'WITH', 'THE', 'REVERSAL', 'AND', 'REBUKE', 'IT', 'DESERVED', 'MEN', 'WERE', 'IMPRISONED', 'UNDER', 'SENTENCE', 'OF', 'MANY', 'YEARS', 'DURATION'] +4077-13754-0014-1255: ref=['THE', 'PEOPLE', 'CONTESTED', 'THESE', 'MEASURES', 'ONE', 'BY', 'ONE', 'IN', 'THE', 'COURTS', 'PRESENTING', 'IN', 'CASE', 'AFTER', 'CASE', 'THE', 'DIFFERENT', 'PHASES', 'OF', 'THE', 'SUBJECT', 'AND', 'URGING', 'THE', 'UNCONSTITUTIONALITY', 'OF', 'THE', 'MEASURE'] +4077-13754-0014-1255: hyp=['THE', 'PEOPLE', 'CONTESTED', 'THESE', 'MEASURES', 'ONE', 'BY', 'ONE', 'IN', 'THE', 'COURTS', 'PRESENTING', 'IN', 'CASE', 'AFTER', 'CASE', 'THE', 'DIFFERENT', 'PHASES', 'OF', 'THE', 'SUBJECT', 'AND', 'URGING', 'THE', 'UNCONSTITUTIONALITY', 'OF', 'THE', 'MEASURE'] +4077-13754-0015-1256: ref=['THEN', 'THE', 'CHURCH', 'WAS', 'DISINCORPORATED', 'AND', 'ITS', 'PROPERTY', 'BOTH', 'REAL', 'AND', 'PERSONAL', 'CONFISCATED', 'AND', 'ESCHEATED', 'TO', 'THE', 'GOVERNMENT', 'OF', 'THE', 'UNITED', 'STATES', 'AND', 'ALTHOUGH', 'THE', 'PERSONAL', 'PROPERTY', 'WAS', 'SOON', 'RESTORED', 'REAL', 'ESTATE', 'OF', 'GREAT', 'VALUE', 'LONG', 'LAY', 'IN', 'THE', 'HANDS', 'OF', 'THE', "COURT'S", 'RECEIVER', 'AND', 'THE', 'MORMON', 'CHURCH', 'HAD', 'TO', 'PAY', 'THE', 'NATIONAL', 'GOVERNMENT', 'HIGH', 'RENTAL', 'ON', 'ITS', 'OWN', 'PROPERTY'] +4077-13754-0015-1256: hyp=['THEN', 'THE', 'CHURCH', 'WAS', 'DISINCORPORATED', 'AND', 'ITS', 'PROPERTY', 'BOTH', 'REAL', 'AND', 'PERSONAL', 'CONFISCATED', 'AND', 'ISTIATED', 'TO', 'THE', 'GOVERNMENT', 'OF', 'THE', 'UNITED', 'STATES', 'AND', 'ALTHOUGH', 'THE', 'PERSONAL', 'PROPERTY', 'WAS', 'SOON', 'RESTORED', 'REAL', 'ESTATE', 'OF', 'GREAT', 'VALUE', 'LONG', 'LAY', 'IN', 'THE', 'HANDS', 'OF', 'THE', 'COURTS', 'RECEIVER', 'AND', 'THE', 'MORMON', 'CHURCH', 'HAD', 'TO', 'PAY', 'THE', 'NATIONAL', 'GOVERNMENT', 'HIGH', 'RENTAL', 'ON', 'ITS', 'OWN', 'PROPERTY'] +4077-13754-0016-1257: ref=['AND', 'SO', 'THE', 'STORY', 'OF', 'MORMONISM', 'RUNS', 'ON', 'ITS', 'FINALE', 'HAS', 'NOT', 'YET', 'BEEN', 'WRITTEN', 'THE', 'CURRENT', 'PRESS', 'PRESENTS', 'CONTINUOUSLY', 'NEW', 'STAGES', 'OF', 'ITS', 'PROGRESS', 'NEW', 'DEVELOPMENTS', 'OF', 'ITS', 'PLAN'] +4077-13754-0016-1257: hyp=['AND', 'SO', 'THE', 'STORY', 'OF', 'MORMONISM', 'RUNS', 'ON', 'ITS', 'FINALE', 'HAS', 'NOT', 'YET', 'BEEN', 'WRITTEN', 'THE', 'CURRENT', 'PRESS', 'PRESENTS', 'CONTINUOUSLY', 'NEW', 'STAGES', 'OF', 'ITS', 'PROGRESS', 'NEW', 'DEVELOPMENTS', 'OF', 'ITS', 'PLAN'] +4446-2271-0000-1133: ref=['MAINHALL', 'LIKED', 'ALEXANDER', 'BECAUSE', 'HE', 'WAS', 'AN', 'ENGINEER'] +4446-2271-0000-1133: hyp=['MAIN', 'HALL', 'LIKED', 'ALEXANDER', 'BECAUSE', 'HE', 'WAS', 'AN', 'ENGINEER'] +4446-2271-0001-1134: ref=['HE', 'HAD', 'PRECONCEIVED', 'IDEAS', 'ABOUT', 'EVERYTHING', 'AND', 'HIS', 'IDEA', 'ABOUT', 'AMERICANS', 'WAS', 'THAT', 'THEY', 'SHOULD', 'BE', 'ENGINEERS', 'OR', 'MECHANICS'] +4446-2271-0001-1134: hyp=['WE', 'NOT', 'PRECONCEIVED', 'IDEAS', 'ABOUT', 'EVERYTHING', 'AND', 'HIS', 'IDEA', 'ABOUT', 'AMERICANS', 'WAS', 'THAT', 'THEY', 'SHOULD', 'BE', 'ENGINEERS', 'OR', 'MECHANICS'] +4446-2271-0002-1135: ref=["IT'S", 'TREMENDOUSLY', 'WELL', 'PUT', 'ON', 'TOO'] +4446-2271-0002-1135: hyp=['ITS', 'TREMENDOUSLY', 'WELL', 'PUT', 'ON', 'TOO'] +4446-2271-0003-1136: ref=["IT'S", 'BEEN', 'ON', 'ONLY', 'TWO', 'WEEKS', 'AND', "I'VE", 'BEEN', 'HALF', 'A', 'DOZEN', 'TIMES', 'ALREADY'] +4446-2271-0003-1136: hyp=["IT'S", 'BEEN', 'ON', 'ONLY', 'TWO', 'WEEKS', 'AND', "I'VE", 'BEEN', 'HALF', 'A', 'DOZEN', 'TIMES', 'ALREADY'] +4446-2271-0004-1137: ref=['DO', 'YOU', 'KNOW', 'ALEXANDER', 'MAINHALL', 'LOOKED', 'WITH', 'PERPLEXITY', 'UP', 'INTO', 'THE', 'TOP', 'OF', 'THE', 'HANSOM', 'AND', 'RUBBED', 'HIS', 'PINK', 'CHEEK', 'WITH', 'HIS', 'GLOVED', 'FINGER', 'DO', 'YOU', 'KNOW', 'I', 'SOMETIMES', 'THINK', 'OF', 'TAKING', 'TO', 'CRITICISM', 'SERIOUSLY', 'MYSELF'] +4446-2271-0004-1137: hyp=['DO', 'YOU', 'KNOW', 'ALEXANDER', 'MAIN', 'HALL', 'LOOKED', 'WITH', 'PERPLEXITY', 'UP', 'INTO', 'THE', 'TOP', 'OF', 'THE', 'HANSOM', 'AND', 'RUBBED', 'HIS', 'PINK', 'CHEEK', 'WITH', 'HIS', 'GLOVED', 'FINGER', 'DO', 'YOU', 'KNOW', 'I', 'SOMETIMES', 'THINK', 'OF', 'TAKING', 'TO', 'CRITICISM', 'SERIOUSLY', 'MYSELF'] +4446-2271-0005-1138: ref=['SHE', 'SAVES', 'HER', 'HAND', 'TOO', "SHE'S", 'AT', 'HER', 'BEST', 'IN', 'THE', 'SECOND', 'ACT'] +4446-2271-0005-1138: hyp=['SHE', 'SAVES', 'HER', 'HAND', 'TOO', 'SHE', 'SAID', 'HER', 'BEST', 'IN', 'THE', 'SECOND', 'ACT'] +4446-2271-0006-1139: ref=["HE'S", 'BEEN', 'WANTING', 'TO', 'MARRY', 'HILDA', 'THESE', 'THREE', 'YEARS', 'AND', 'MORE'] +4446-2271-0006-1139: hyp=["HE'S", 'BEEN', 'WANTING', 'TO', 'MARRY', 'HILDA', 'THESE', 'THREE', 'YEARS', 'AND', 'MORE'] +4446-2271-0007-1140: ref=['SHE', "DOESN'T", 'TAKE', 'UP', 'WITH', 'ANYBODY', 'YOU', 'KNOW'] +4446-2271-0007-1140: hyp=['SHE', "DOESN'T", 'TAKE', 'UP', 'WITH', 'ANYBODY', 'YOU', 'KNOW'] +4446-2271-0008-1141: ref=['IRENE', 'BURGOYNE', 'ONE', 'OF', 'HER', 'FAMILY', 'TOLD', 'ME', 'IN', 'CONFIDENCE', 'THAT', 'THERE', 'WAS', 'A', 'ROMANCE', 'SOMEWHERE', 'BACK', 'IN', 'THE', 'BEGINNING'] +4446-2271-0008-1141: hyp=['IRENE', 'WERE', 'GOING', 'ONE', 'OF', 'HER', 'FAMILY', 'TOLD', 'ME', 'IN', 'CONFIDENCE', 'THAT', 'THERE', 'WAS', 'A', 'ROMANCE', 'SOMEWHERE', 'BACK', 'IN', 'THE', 'BEGINNING'] +4446-2271-0009-1142: ref=['MAINHALL', 'VOUCHED', 'FOR', 'HER', 'CONSTANCY', 'WITH', 'A', 'LOFTINESS', 'THAT', 'MADE', 'ALEXANDER', 'SMILE', 'EVEN', 'WHILE', 'A', 'KIND', 'OF', 'RAPID', 'EXCITEMENT', 'WAS', 'TINGLING', 'THROUGH', 'HIM'] +4446-2271-0009-1142: hyp=['MEANHAVED', 'FOR', 'HER', 'CONSTANCY', 'WITH', 'A', 'LOFTINESS', 'THAT', 'MADE', 'ALEXANDER', 'SMILE', 'EVEN', 'WHILE', 'A', 'KIND', 'OF', 'RAPID', 'EXCITEMENT', 'WAS', 'TINGLING', 'THROUGH', 'HIM'] +4446-2271-0010-1143: ref=["HE'S", 'ANOTHER', "WHO'S", 'AWFULLY', 'KEEN', 'ABOUT', 'HER', 'LET', 'ME', 'INTRODUCE', 'YOU'] +4446-2271-0010-1143: hyp=["HE'S", 'ANOTHER', "WHO'S", 'AWFULLY', 'KEEN', 'ABOUT', 'HER', 'LET', 'ME', 'INTRODUCE', 'YOU'] +4446-2271-0011-1144: ref=['SIR', 'HARRY', 'TOWNE', 'MISTER', 'BARTLEY', 'ALEXANDER', 'THE', 'AMERICAN', 'ENGINEER'] +4446-2271-0011-1144: hyp=['SIR', 'HARRY', 'TOWN', 'MISTER', 'BARTLEY', 'ALEXANDER', 'THE', 'AMERICAN', 'ENGINEER'] +4446-2271-0012-1145: ref=['I', 'SAY', 'SIR', 'HARRY', 'THE', 'LITTLE', "GIRL'S", 'GOING', 'FAMOUSLY', 'TO', 'NIGHT', "ISN'T", 'SHE'] +4446-2271-0012-1145: hyp=['I', 'SAY', 'SIR', 'HARRY', 'THE', 'LITTLE', 'GIRLS', 'GOING', 'FAMOUSLY', 'TO', 'NIGHT', "ISN'T", 'SHE'] +4446-2271-0013-1146: ref=['DO', 'YOU', 'KNOW', 'I', 'THOUGHT', 'THE', 'DANCE', 'A', 'BIT', 'CONSCIOUS', 'TO', 'NIGHT', 'FOR', 'THE', 'FIRST', 'TIME'] +4446-2271-0013-1146: hyp=['YOU', 'KNOW', 'I', 'THOUGHT', 'THE', 'DANCE', 'OF', 'GOOD', 'CONSCIOUS', 'TO', 'NIGHT', 'FOR', 'THE', 'FIRST', 'TIME'] +4446-2271-0014-1147: ref=['WESTMERE', 'AND', 'I', 'WERE', 'BACK', 'AFTER', 'THE', 'FIRST', 'ACT', 'AND', 'WE', 'THOUGHT', 'SHE', 'SEEMED', 'QUITE', 'UNCERTAIN', 'OF', 'HERSELF'] +4446-2271-0014-1147: hyp=['WESTMARE', 'AND', 'I', 'WERE', 'BACK', 'AFTER', 'THE', 'FIRST', 'ACT', 'AND', 'WE', 'THOUGHT', 'SHE', 'SEEMED', 'QUITE', 'UNCERTAIN', 'OF', 'HERSELF'] +4446-2271-0015-1148: ref=['A', 'LITTLE', 'ATTACK', 'OF', 'NERVES', 'POSSIBLY'] +4446-2271-0015-1148: hyp=['A', 'LITTLE', 'ATTACK', 'OF', 'NERVES', 'POSSIBLY'] +4446-2271-0016-1149: ref=['HE', 'WAS', 'BEGINNING', 'TO', 'FEEL', 'A', 'KEEN', 'INTEREST', 'IN', 'THE', 'SLENDER', 'BAREFOOT', 'DONKEY', 'GIRL', 'WHO', 'SLIPPED', 'IN', 'AND', 'OUT', 'OF', 'THE', 'PLAY', 'SINGING', 'LIKE', 'SOME', 'ONE', 'WINDING', 'THROUGH', 'A', 'HILLY', 'FIELD'] +4446-2271-0016-1149: hyp=['HE', 'WAS', 'BEGINNING', 'TO', 'FEEL', 'THE', 'KEEN', 'INTEREST', 'IN', 'THE', 'SLENDER', 'BAREFOOT', 'DONKEY', 'GIRL', 'WHO', 'SLIPPED', 'IN', 'AND', 'OUT', 'OF', 'THE', 'PLAY', 'SINGING', 'LIKE', 'SOME', 'ONE', 'WINDING', 'THROUGH', 'A', 'HILLY', 'FIELD'] +4446-2271-0017-1150: ref=['ONE', 'NIGHT', 'WHEN', 'HE', 'AND', 'WINIFRED', 'WERE', 'SITTING', 'TOGETHER', 'ON', 'THE', 'BRIDGE', 'HE', 'TOLD', 'HER', 'THAT', 'THINGS', 'HAD', 'HAPPENED', 'WHILE', 'HE', 'WAS', 'STUDYING', 'ABROAD', 'THAT', 'HE', 'WAS', 'SORRY', 'FOR', 'ONE', 'THING', 'IN', 'PARTICULAR', 'AND', 'HE', 'ASKED', 'HER', 'WHETHER', 'SHE', 'THOUGHT', 'SHE', 'OUGHT', 'TO', 'KNOW', 'ABOUT', 'THEM'] +4446-2271-0017-1150: hyp=['ONE', 'NIGHT', 'WHEN', 'HE', 'AND', 'WINIFRED', 'WERE', 'SITTING', 'TOGETHER', 'ON', 'THE', 'BRIDGE', 'HE', 'TOLD', 'HER', 'THE', 'THINGS', 'HAD', 'HAPPENED', 'WHILE', 'HE', 'WAS', 'STUDYING', 'ABROAD', 'THAT', 'HE', 'WAS', 'SORRY', 'FOR', 'ONE', 'THING', 'IN', 'PARTICULAR', 'AND', 'HE', 'ASKED', 'HER', 'WHETHER', 'SHE', 'THOUGHT', 'SHE', 'OUGHT', 'TO', 'KNOW', 'ABOUT', 'THEM'] +4446-2271-0018-1151: ref=['SHE', 'CONSIDERED', 'A', 'MOMENT', 'AND', 'THEN', 'SAID', 'NO', 'I', 'THINK', 'NOT', 'THOUGH', 'I', 'AM', 'GLAD', 'YOU', 'ASK', 'ME'] +4446-2271-0018-1151: hyp=['SHE', 'CONSIDERED', 'FOR', 'A', 'MOMENT', 'AND', 'THEN', 'SAID', 'NO', 'I', 'THINK', 'NOT', 'THE', 'WAY', 'I', 'AM', 'GLAD', 'YOU', 'ASK', 'ME'] +4446-2271-0019-1152: ref=['AFTER', 'THAT', 'IT', 'WAS', 'EASY', 'TO', 'FORGET', 'ACTUALLY', 'TO', 'FORGET'] +4446-2271-0019-1152: hyp=['AFTER', 'THAT', 'IT', 'WAS', 'EASY', 'TO', 'FORGET', 'ACTUALLY', 'TO', 'FORGET'] +4446-2271-0020-1153: ref=['OF', 'COURSE', 'HE', 'REFLECTED', 'SHE', 'ALWAYS', 'HAD', 'THAT', 'COMBINATION', 'OF', 'SOMETHING', 'HOMELY', 'AND', 'SENSIBLE', 'AND', 'SOMETHING', 'UTTERLY', 'WILD', 'AND', 'DAFT'] +4446-2271-0020-1153: hyp=['OF', 'COURSE', 'HE', 'REFLECTED', 'SHE', 'ALWAYS', 'HAD', 'THAT', 'COMBINATION', 'OF', 'SOMETHING', 'HOMELY', 'AND', 'SENSIBLE', 'AND', 'SOMETHING', 'UTTERLY', 'WILD', 'AND', 'DAFT'] +4446-2271-0021-1154: ref=['SHE', 'MUST', 'CARE', 'ABOUT', 'THE', 'THEATRE', 'A', 'GREAT', 'DEAL', 'MORE', 'THAN', 'SHE', 'USED', 'TO'] +4446-2271-0021-1154: hyp=['SHE', 'MUST', 'CARE', 'ABOUT', 'THE', 'THEATRE', 'A', 'GREAT', 'DEAL', 'MORE', 'THAN', 'SHE', 'USED', 'TO'] +4446-2271-0022-1155: ref=["I'M", 'GLAD', "SHE'S", 'HELD', 'HER', 'OWN', 'SINCE'] +4446-2271-0022-1155: hyp=["I'M", 'GLAD', "SHE'S", 'HELD', 'HER', 'OWN', 'SEN'] +4446-2271-0023-1156: ref=['AFTER', 'ALL', 'WE', 'WERE', 'AWFULLY', 'YOUNG'] +4446-2271-0023-1156: hyp=['AFTER', 'ALL', 'WE', 'WERE', 'AWFULLY', 'YOUNG'] +4446-2271-0024-1157: ref=['I', "SHOULDN'T", 'WONDER', 'IF', 'SHE', 'COULD', 'LAUGH', 'ABOUT', 'IT', 'WITH', 'ME', 'NOW'] +4446-2271-0024-1157: hyp=['I', "SHOULDN'T", 'WONDER', 'IF', 'SHE', 'COULD', 'LAUGH', 'ABOUT', 'IT', 'WITH', 'ME', 'NOW'] +4446-2273-0000-1158: ref=['HILDA', 'WAS', 'VERY', 'NICE', 'TO', 'HIM', 'AND', 'HE', 'SAT', 'ON', 'THE', 'EDGE', 'OF', 'HIS', 'CHAIR', 'FLUSHED', 'WITH', 'HIS', 'CONVERSATIONAL', 'EFFORTS', 'AND', 'MOVING', 'HIS', 'CHIN', 'ABOUT', 'NERVOUSLY', 'OVER', 'HIS', 'HIGH', 'COLLAR'] +4446-2273-0000-1158: hyp=['HILDA', 'WAS', 'VERY', 'NICE', 'TO', 'HIM', 'AND', 'HE', 'SAT', 'ON', 'THE', 'EDGE', 'OF', 'HIS', 'CHAIR', 'FLUSHED', 'WITH', 'HIS', 'CONVERSATIONAL', 'EFFORTS', 'AND', 'MOVING', 'HIS', 'CHIN', 'ABOUT', 'NERVOUSLY', 'OVER', 'HIS', 'HIGH', 'COLLAR'] +4446-2273-0001-1159: ref=['THEY', 'ASKED', 'HIM', 'TO', 'COME', 'TO', 'SEE', 'THEM', 'IN', 'CHELSEA', 'AND', 'THEY', 'SPOKE', 'VERY', 'TENDERLY', 'OF', 'HILDA'] +4446-2273-0001-1159: hyp=['THEY', 'ASKED', 'HIM', 'TO', 'COME', 'TO', 'SEE', 'THEM', 'IN', 'CHELSEA', 'AND', 'THEY', 'SPOKE', 'VERY', 'TENDERLY', 'OF', 'HILDA'] +4446-2273-0002-1160: ref=['LAMB', "WOULDN'T", 'CARE', 'A', 'GREAT', 'DEAL', 'ABOUT', 'MANY', 'OF', 'THEM', 'I', 'FANCY'] +4446-2273-0002-1160: hyp=['LAMB', "WOULDN'T", 'CARE', 'A', 'GREAT', 'DEAL', 'ABOUT', 'MANY', 'OF', 'THEM', 'I', 'FANCY'] +4446-2273-0003-1161: ref=['WHEN', 'BARTLEY', 'ARRIVED', 'AT', 'BEDFORD', 'SQUARE', 'ON', 'SUNDAY', 'EVENING', 'MARIE', 'THE', 'PRETTY', 'LITTLE', 'FRENCH', 'GIRL', 'MET', 'HIM', 'AT', 'THE', 'DOOR', 'AND', 'CONDUCTED', 'HIM', 'UPSTAIRS'] +4446-2273-0003-1161: hyp=['WHEN', 'BARTLEY', 'ARRIVED', 'AT', 'BEDFORD', 'SQUARE', 'ON', 'SUNDAY', 'EVENING', 'MARIE', 'THE', 'PRETTY', 'LITTLE', 'FRENCH', 'GIRL', 'MET', 'HIM', 'AT', 'THE', 'DOOR', 'AND', 'CONDUCTED', 'HIM', 'UPSTAIRS'] +4446-2273-0004-1162: ref=['I', 'SHOULD', 'NEVER', 'HAVE', 'ASKED', 'YOU', 'IF', 'MOLLY', 'HAD', 'BEEN', 'HERE', 'FOR', 'I', 'REMEMBER', 'YOU', "DON'T", 'LIKE', 'ENGLISH', 'COOKERY'] +4446-2273-0004-1162: hyp=['I', 'SHOULD', 'NEVER', 'HAVE', 'ASKED', 'YOU', 'IF', 'MOLLY', 'HAD', 'BEEN', 'HERE', 'FOR', 'I', 'REMEMBER', 'YOU', "DON'T", 'LIKE', 'ENGLISH', 'COOKERY'] +4446-2273-0005-1163: ref=['I', "HAVEN'T", 'HAD', 'A', 'CHANCE', 'YET', 'TO', 'TELL', 'YOU', 'WHAT', 'A', 'JOLLY', 'LITTLE', 'PLACE', 'I', 'THINK', 'THIS', 'IS'] +4446-2273-0005-1163: hyp=['I', "HAVEN'T", 'HAD', 'A', 'CHANCE', 'YET', 'TO', 'TELL', 'YOU', 'WHAT', 'A', 'JOLLY', 'LITTLE', 'PLACE', 'I', 'THINK', 'THIS', 'IS'] +4446-2273-0006-1164: ref=['THEY', 'ARE', 'ALL', 'SKETCHES', 'MADE', 'ABOUT', 'THE', 'VILLA', "D'ESTE", 'YOU', 'SEE'] +4446-2273-0006-1164: hyp=['THEY', 'ARE', 'ALL', 'SKETCHES', 'MADE', 'ABOUT', 'THE', 'VILIDESSEA', 'YOU', 'SEE'] +4446-2273-0007-1165: ref=['THOSE', 'FELLOWS', 'ARE', 'ALL', 'VERY', 'LOYAL', 'EVEN', 'MAINHALL'] +4446-2273-0007-1165: hyp=['THOSE', 'FELLOWS', 'ARE', 'ALL', 'VERY', 'LOYAL', 'EVEN', 'MAIN', 'HALL'] +4446-2273-0008-1166: ref=["I'VE", 'MANAGED', 'TO', 'SAVE', 'SOMETHING', 'EVERY', 'YEAR', 'AND', 'THAT', 'WITH', 'HELPING', 'MY', 'THREE', 'SISTERS', 'NOW', 'AND', 'THEN', 'AND', 'TIDING', 'POOR', 'COUSIN', 'MIKE', 'OVER', 'BAD', 'SEASONS'] +4446-2273-0008-1166: hyp=["I'VE", 'MANAGED', 'TO', 'SAVE', 'SOMETHING', 'EVERY', 'YEAR', 'AND', 'THAT', 'WITH', 'HELPING', 'MY', 'THREE', 'SISTERS', 'NOW', 'AND', 'THEN', 'AND', 'TIDING', 'POOR', 'COUSIN', 'MIKE', 'OVER', 'BAD', 'SEASONS'] +4446-2273-0009-1167: ref=["IT'S", 'NOT', 'PARTICULARLY', 'RARE', 'SHE', 'SAID', 'BUT', 'SOME', 'OF', 'IT', 'WAS', 'MY', "MOTHER'S"] +4446-2273-0009-1167: hyp=["IT'S", 'NOT', 'PARTICULARLY', 'RARE', 'SHE', 'SAID', 'BUT', 'SOME', 'OF', 'IT', 'WAS', 'MY', "MOTHER'S"] +4446-2273-0010-1168: ref=['THERE', 'WAS', 'WATERCRESS', 'SOUP', 'AND', 'SOLE', 'AND', 'A', 'DELIGHTFUL', 'OMELETTE', 'STUFFED', 'WITH', 'MUSHROOMS', 'AND', 'TRUFFLES', 'AND', 'TWO', 'SMALL', 'RARE', 'DUCKLINGS', 'AND', 'ARTICHOKES', 'AND', 'A', 'DRY', 'YELLOW', 'RHONE', 'WINE', 'OF', 'WHICH', 'BARTLEY', 'HAD', 'ALWAYS', 'BEEN', 'VERY', 'FOND'] +4446-2273-0010-1168: hyp=['THERE', 'WAS', 'WATERCRESS', 'SOUP', 'AND', 'SOLE', 'AND', 'A', 'DELIGHTFUL', 'OMELETTE', 'STUFFED', 'WITH', 'MUSHROOMS', 'AND', 'TRUFFLES', 'AND', 'TWO', 'SMALL', 'RARE', 'DUCKLINGS', 'AND', 'ARTICHOKES', 'AND', 'A', 'DRY', 'YELLOW', 'RHONE', 'WINE', 'OF', 'WHICH', 'BARTLEY', 'HAD', 'ALWAYS', 'BEEN', 'VERY', 'FOND'] +4446-2273-0011-1169: ref=['THERE', 'IS', 'NOTHING', 'ELSE', 'THAT', 'LOOKS', 'SO', 'JOLLY'] +4446-2273-0011-1169: hyp=['THERE', 'IS', 'NOTHING', 'ELSE', 'THAT', 'LOOKS', 'SO', 'JOLLY'] +4446-2273-0012-1170: ref=['THANK', 'YOU', 'BUT', 'I', "DON'T", 'LIKE', 'IT', 'SO', 'WELL', 'AS', 'THIS'] +4446-2273-0012-1170: hyp=['THANK', 'YOU', 'BUT', 'I', "DON'T", 'LIKE', 'IT', 'SO', 'WELL', 'AS', 'THIS'] +4446-2273-0013-1171: ref=['HAVE', 'YOU', 'BEEN', 'IN', 'PARIS', 'MUCH', 'THESE', 'LATE', 'YEARS'] +4446-2273-0013-1171: hyp=['HAVE', 'YOU', 'BEEN', 'IN', 'PARIS', 'MUCH', 'THESE', 'LATE', 'YEARS'] +4446-2273-0014-1172: ref=['THERE', 'ARE', 'FEW', 'CHANGES', 'IN', 'THE', 'OLD', 'QUARTER'] +4446-2273-0014-1172: hyp=['THERE', 'ARE', 'A', 'FEW', 'CHANGES', 'IN', 'THE', 'OLD', 'QUARTER'] +4446-2273-0015-1173: ref=["DON'T", 'I', 'THOUGH', "I'M", 'SO', 'SORRY', 'TO', 'HEAR', 'IT', 'HOW', 'DID', 'HER', 'SON', 'TURN', 'OUT'] +4446-2273-0015-1173: hyp=["DON'T", 'I', 'THOUGH', "I'M", 'SO', 'SORRY', 'TO', 'HEAR', 'IT', 'HOW', 'DID', 'HER', 'SON', 'TURN', 'OUT'] +4446-2273-0016-1174: ref=['HER', 'HAIR', 'IS', 'STILL', 'LIKE', 'FLAX', 'AND', 'HER', 'BLUE', 'EYES', 'ARE', 'JUST', 'LIKE', 'A', "BABY'S", 'AND', 'SHE', 'HAS', 'THE', 'SAME', 'THREE', 'FRECKLES', 'ON', 'HER', 'LITTLE', 'NOSE', 'AND', 'TALKS', 'ABOUT', 'GOING', 'BACK', 'TO', 'HER', 'BAINS', 'DE', 'MER'] +4446-2273-0016-1174: hyp=['HER', 'HAIR', 'IS', 'STILL', 'LIKE', 'FLAX', 'AND', 'HER', 'BLUE', 'EYES', 'ARE', 'JUST', 'LIKE', 'A', "BABY'S", 'AND', 'SHE', 'HAS', 'THE', 'SAME', 'THREE', 'FRECKLES', 'ON', 'HER', 'LITTLE', 'NOSE', 'AND', 'TALKS', 'ABOUT', 'GOING', 'BACK', 'TO', 'HER', 'BANDOMERE'] +4446-2273-0017-1175: ref=['HOW', 'JOLLY', 'IT', 'WAS', 'BEING', 'YOUNG', 'HILDA'] +4446-2273-0017-1175: hyp=['HOW', 'JOLLY', 'IT', 'WAS', 'BEING', 'YOUNG', 'HILDA'] +4446-2273-0018-1176: ref=['DO', 'YOU', 'REMEMBER', 'THAT', 'FIRST', 'WALK', 'WE', 'TOOK', 'TOGETHER', 'IN', 'PARIS'] +4446-2273-0018-1176: hyp=['DO', 'YOU', 'REMEMBER', 'THAT', 'FIRST', 'WALK', 'WE', 'TOOK', 'TOGETHER', 'IN', 'PARIS'] +4446-2273-0019-1177: ref=['COME', "WE'LL", 'HAVE', 'OUR', 'COFFEE', 'IN', 'THE', 'OTHER', 'ROOM', 'AND', 'YOU', 'CAN', 'SMOKE'] +4446-2273-0019-1177: hyp=['COME', "WE'LL", 'HAVE', 'OUR', 'COFFEE', 'IN', 'THE', 'OTHER', 'ROOM', 'AND', 'YOU', 'CAN', 'SMOKE'] +4446-2273-0020-1178: ref=['I', 'THINK', 'WE', 'DID', 'SHE', 'ANSWERED', 'DEMURELY'] +4446-2273-0020-1178: hyp=['I', 'THINK', 'WE', 'DID', 'SHE', 'ANSWERED', 'DEMURELY'] +4446-2273-0021-1179: ref=['WHAT', 'SHE', 'WANTED', 'FROM', 'US', 'WAS', 'NEITHER', 'OUR', 'FLOWERS', 'NOR', 'OUR', 'FRANCS', 'BUT', 'JUST', 'OUR', 'YOUTH'] +4446-2273-0021-1179: hyp=['WHAT', 'SHE', 'WANTED', 'FROM', 'US', 'WAS', 'NEITHER', 'OUR', 'FLOWERS', 'NOR', 'OUR', 'FRANKS', 'BUT', 'JUST', 'OUR', 'YOUTH'] +4446-2273-0022-1180: ref=['THEY', 'WERE', 'BOTH', 'REMEMBERING', 'WHAT', 'THE', 'WOMAN', 'HAD', 'SAID', 'WHEN', 'SHE', 'TOOK', 'THE', 'MONEY', 'GOD', 'GIVE', 'YOU', 'A', 'HAPPY', 'LOVE'] +4446-2273-0022-1180: hyp=['THEY', 'WERE', 'BOTH', 'REMEMBERING', 'WHAT', 'THE', 'WOMAN', 'HAD', 'SAID', 'WHEN', 'SHE', 'TOOK', 'THE', 'MONEY', 'GOD', 'GIVE', 'YOU', 'A', 'HAPPY', 'LOVE'] +4446-2273-0023-1181: ref=['THE', 'STRANGE', 'WOMAN', 'AND', 'HER', 'PASSIONATE', 'SENTENCE', 'THAT', 'RANG', 'OUT', 'SO', 'SHARPLY', 'HAD', 'FRIGHTENED', 'THEM', 'BOTH'] +4446-2273-0023-1181: hyp=['THE', 'STRANGE', 'WOMAN', 'AND', 'HER', 'PASSIONATE', 'SENTENCE', 'THAT', 'RANG', 'OUT', 'SO', 'SHARPLY', 'HAD', 'FRIGHTENED', 'THEM', 'BOTH'] +4446-2273-0024-1182: ref=['BARTLEY', 'STARTED', 'WHEN', 'HILDA', 'RANG', 'THE', 'LITTLE', 'BELL', 'BESIDE', 'HER', 'DEAR', 'ME', 'WHY', 'DID', 'YOU', 'DO', 'THAT'] +4446-2273-0024-1182: hyp=['BARTLEY', 'STARTED', 'WHEN', 'HILDA', 'RANG', 'THE', 'LITTLE', 'BELL', 'BESIDE', 'HER', 'DEAR', 'ME', 'WHY', 'DID', 'YOU', 'DO', 'THAT'] +4446-2273-0025-1183: ref=['IT', 'WAS', 'VERY', 'JOLLY', 'HE', 'MURMURED', 'LAZILY', 'AS', 'MARIE', 'CAME', 'IN', 'TO', 'TAKE', 'AWAY', 'THE', 'COFFEE'] +4446-2273-0025-1183: hyp=['IT', 'WAS', 'VERY', 'JOLLY', 'HE', 'MURMURED', 'LAZILY', 'AS', 'MARIE', 'CAME', 'IN', 'TO', 'TAKE', 'AWAY', 'THE', 'COFFEE'] +4446-2273-0026-1184: ref=['HAVE', 'I', 'TOLD', 'YOU', 'ABOUT', 'MY', 'NEW', 'PLAY'] +4446-2273-0026-1184: hyp=['HAVE', 'I', 'TOLD', 'YOU', 'ABOUT', 'MY', 'NEW', 'PLAY'] +4446-2273-0027-1185: ref=['WHEN', 'SHE', 'FINISHED', 'ALEXANDER', 'SHOOK', 'HIMSELF', 'OUT', 'OF', 'A', 'REVERIE'] +4446-2273-0027-1185: hyp=['WHEN', 'SHE', 'FINISHED', 'ALEXANDER', 'SHOOK', 'HIMSELF', 'OUT', 'OF', 'A', 'REVERIE'] +4446-2273-0028-1186: ref=['NONSENSE', 'OF', 'COURSE', 'I', "CAN'T", 'REALLY', 'SING', 'EXCEPT', 'THE', 'WAY', 'MY', 'MOTHER', 'AND', 'GRANDMOTHER', 'DID', 'BEFORE', 'ME'] +4446-2273-0028-1186: hyp=['NONSENSE', 'OF', 'COURSE', 'I', "CAN'T", 'REALLY', 'SING', 'EXCEPT', 'THE', 'WAY', 'MY', 'MOTHER', 'AND', 'GRANDMOTHER', 'DID', 'BEFORE', 'ME'] +4446-2273-0029-1187: ref=["IT'S", 'REALLY', 'TOO', 'WARM', 'IN', 'THIS', 'ROOM', 'TO', 'SING', "DON'T", 'YOU', 'FEEL', 'IT'] +4446-2273-0029-1187: hyp=["IT'S", 'REALLY', 'TOO', 'WARM', 'IN', 'THIS', 'ROOM', 'TO', 'SING', "DON'T", 'YOU', 'FEEL', 'IT'] +4446-2273-0030-1188: ref=['ALEXANDER', 'WENT', 'OVER', 'AND', 'OPENED', 'THE', 'WINDOW', 'FOR', 'HER'] +4446-2273-0030-1188: hyp=['ALEXANDER', 'WENT', 'OVER', 'AND', 'OPENED', 'THE', 'WINDOW', 'FOR', 'HER'] +4446-2273-0031-1189: ref=['THERE', 'JUST', 'IN', 'FRONT'] +4446-2273-0031-1189: hyp=['THERE', 'JUST', 'IN', 'FRONT'] +4446-2273-0032-1190: ref=['HE', 'STOOD', 'A', 'LITTLE', 'BEHIND', 'HER', 'AND', 'TRIED', 'TO', 'STEADY', 'HIMSELF', 'AS', 'HE', 'SAID', "IT'S", 'SOFT', 'AND', 'MISTY', 'SEE', 'HOW', 'WHITE', 'THE', 'STARS', 'ARE'] +4446-2273-0032-1190: hyp=['HE', 'STOOD', 'A', 'LITTLE', 'BEHIND', 'HER', 'AND', 'TRIED', 'TO', 'STEADY', 'HIMSELF', 'AS', 'HE', 'SAID', "IT'S", 'SOFT', 'AND', 'MISTY', 'SEE', 'HOW', 'WHITE', 'THE', 'STARS', 'ARE'] +4446-2273-0033-1191: ref=['FOR', 'A', 'LONG', 'TIME', 'NEITHER', 'HILDA', 'NOR', 'BARTLEY', 'SPOKE'] +4446-2273-0033-1191: hyp=['FOR', 'A', 'LONG', 'TIME', 'NEITHER', 'HILDA', 'NOR', 'BARTLEY', 'SPOKE'] +4446-2273-0034-1192: ref=['HE', 'FELT', 'A', 'TREMOR', 'RUN', 'THROUGH', 'THE', 'SLENDER', 'YELLOW', 'FIGURE', 'IN', 'FRONT', 'OF', 'HIM'] +4446-2273-0034-1192: hyp=['HE', 'FELT', 'A', 'TREMOR', 'RUN', 'THROUGH', 'THE', 'SLENDER', 'YELLOW', 'FIGURE', 'IN', 'FRONT', 'OF', 'HIM'] +4446-2273-0035-1193: ref=['BARTLEY', 'LEANED', 'OVER', 'HER', 'SHOULDER', 'WITHOUT', 'TOUCHING', 'HER', 'AND', 'WHISPERED', 'IN', 'HER', 'EAR', 'YOU', 'ARE', 'GIVING', 'ME', 'A', 'CHANCE', 'YES'] +4446-2273-0035-1193: hyp=['BARTLEY', 'LEANED', 'OVER', 'HER', 'SHOULDER', 'WITHOUT', 'TOUCHING', 'HER', 'AND', 'WHISPERED', 'IN', 'HER', 'EAR', 'YOU', 'ARE', 'GIVING', 'ME', 'A', 'CHANCE', 'YES'] +4446-2273-0036-1194: ref=['ALEXANDER', 'UNCLENCHED', 'THE', 'TWO', 'HANDS', 'AT', 'HIS', 'SIDES'] +4446-2273-0036-1194: hyp=['ALEXANDER', 'CLENCHED', 'THE', 'TWO', 'HANDS', 'AT', 'HIS', 'SIDES'] +4446-2275-0000-1195: ref=['THE', 'STOP', 'AT', 'QUEENSTOWN', 'THE', 'TEDIOUS', 'PASSAGE', 'UP', 'THE', 'MERSEY', 'WERE', 'THINGS', 'THAT', 'HE', 'NOTED', 'DIMLY', 'THROUGH', 'HIS', 'GROWING', 'IMPATIENCE'] +4446-2275-0000-1195: hyp=['THE', 'STOP', 'AT', 'QUEENSTOWN', 'THE', 'TEDIOUS', 'PASSAGE', 'OF', 'THE', 'MERCY', 'WERE', 'THINGS', 'THAT', 'HE', 'NOTED', 'DIMLY', 'THROUGH', 'HIS', 'GROWING', 'IMPATIENCE'] +4446-2275-0001-1196: ref=['SHE', 'BLUSHED', 'AND', 'SMILED', 'AND', 'FUMBLED', 'HIS', 'CARD', 'IN', 'HER', 'CONFUSION', 'BEFORE', 'SHE', 'RAN', 'UPSTAIRS'] +4446-2275-0001-1196: hyp=['SHE', 'BLUSHED', 'AND', 'SMILED', 'AND', 'FUMBLED', 'HIS', 'CARD', 'IN', 'HER', 'CONFUSION', 'BEFORE', 'SHE', 'RAN', 'UPSTAIRS'] +4446-2275-0002-1197: ref=['ALEXANDER', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'HALLWAY', 'BUTTONING', 'AND', 'UNBUTTONING', 'HIS', 'OVERCOAT', 'UNTIL', 'SHE', 'RETURNED', 'AND', 'TOOK', 'HIM', 'UP', 'TO', "HILDA'S", 'LIVING', 'ROOM'] +4446-2275-0002-1197: hyp=['ALEXANDER', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'HALLWAY', 'BUTTONING', 'AND', 'UNBUTTONING', 'HIS', 'OVERCOAT', 'UNTIL', 'SHE', 'RETURNED', 'AND', 'TOOK', 'HIM', 'UP', 'TO', "HILDA'S", 'LIVING', 'ROOM'] +4446-2275-0003-1198: ref=['THE', 'ROOM', 'WAS', 'EMPTY', 'WHEN', 'HE', 'ENTERED'] +4446-2275-0003-1198: hyp=['THE', 'ROOM', 'WAS', 'EMPTY', 'WHEN', 'HE', 'ENTERED'] +4446-2275-0004-1199: ref=['ALEXANDER', 'DID', 'NOT', 'SIT', 'DOWN'] +4446-2275-0004-1199: hyp=['ALEXANDER', 'DID', 'NOT', 'SIT', 'DOWN'] +4446-2275-0005-1200: ref=['I', 'FELT', 'IT', 'IN', 'MY', 'BONES', 'WHEN', 'I', 'WOKE', 'THIS', 'MORNING', 'THAT', 'SOMETHING', 'SPLENDID', 'WAS', 'GOING', 'TO', 'TURN', 'UP'] +4446-2275-0005-1200: hyp=['I', 'FELT', 'IT', 'IN', 'MY', 'BONES', 'WHEN', 'I', 'WOKE', 'THIS', 'MORNING', 'THAT', 'SOMETHING', 'SPLENDID', 'WAS', 'GOING', 'TO', 'TURN', 'UP'] +4446-2275-0006-1201: ref=['I', 'THOUGHT', 'IT', 'MIGHT', 'BE', 'SISTER', 'KATE', 'OR', 'COUSIN', 'MIKE', 'WOULD', 'BE', 'HAPPENING', 'ALONG'] +4446-2275-0006-1201: hyp=['I', 'THOUGHT', 'IT', 'MIGHT', 'BE', 'SISTER', 'KATE', 'OR', 'COUSIN', 'MIKE', 'WOULD', 'BE', 'HAPPENING', 'ALONG'] +4446-2275-0007-1202: ref=['SHE', 'PUSHED', 'HIM', 'TOWARD', 'THE', 'BIG', 'CHAIR', 'BY', 'THE', 'FIRE', 'AND', 'SAT', 'DOWN', 'ON', 'A', 'STOOL', 'AT', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'HEARTH', 'HER', 'KNEES', 'DRAWN', 'UP', 'TO', 'HER', 'CHIN', 'LAUGHING', 'LIKE', 'A', 'HAPPY', 'LITTLE', 'GIRL'] +4446-2275-0007-1202: hyp=['SHE', 'PUSHED', 'HIM', 'TOWARD', 'THE', 'BIG', 'CHAIR', 'BY', 'THE', 'FIRE', 'AND', 'SAT', 'DOWN', 'ON', 'A', 'STOOL', 'AT', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'HEARTH', 'HER', 'KNEES', 'DRAWN', 'UP', 'TO', 'HER', 'CHIN', 'LAUGHING', 'LIKE', 'A', 'HAPPY', 'LITTLE', 'GIRL'] +4446-2275-0008-1203: ref=['WHEN', 'DID', 'YOU', 'COME', 'BARTLEY', 'AND', 'HOW', 'DID', 'IT', 'HAPPEN', 'YOU', "HAVEN'T", 'SPOKEN', 'A', 'WORD'] +4446-2275-0008-1203: hyp=['WHEN', 'DID', 'YOU', 'COME', 'BARTLEY', 'AND', 'HOW', 'DID', 'IT', 'HAPPEN', 'YOU', "HAVEN'T", 'SPOKEN', 'A', 'WORD'] +4446-2275-0009-1204: ref=['I', 'GOT', 'IN', 'ABOUT', 'TEN', 'MINUTES', 'AGO'] +4446-2275-0009-1204: hyp=['I', 'GOT', 'IN', 'ABOUT', 'TEN', 'MINUTES', 'AGO'] +4446-2275-0010-1205: ref=['ALEXANDER', 'LEANED', 'FORWARD', 'AND', 'WARMED', 'HIS', 'HANDS', 'BEFORE', 'THE', 'BLAZE'] +4446-2275-0010-1205: hyp=['ALEXANDER', 'LEANED', 'FORWARD', 'AND', 'WARMED', 'HIS', 'HANDS', 'BEFORE', 'THE', 'BLAZE'] +4446-2275-0011-1206: ref=['BARTLEY', 'BENT', 'LOWER', 'OVER', 'THE', 'FIRE'] +4446-2275-0011-1206: hyp=['BARTLEY', 'BENT', 'LOWERED', 'OVER', 'THE', 'FIRE'] +4446-2275-0012-1207: ref=['SHE', 'LOOKED', 'AT', 'HIS', 'HEAVY', 'SHOULDERS', 'AND', 'BIG', 'DETERMINED', 'HEAD', 'THRUST', 'FORWARD', 'LIKE', 'A', 'CATAPULT', 'IN', 'LEASH'] +4446-2275-0012-1207: hyp=['SHE', 'LOOKED', 'AT', 'HIS', 'HEAVY', 'SHOULDERS', 'AND', 'BIG', 'DETERMINED', 'HEAD', 'THRUST', 'FORWARD', 'LIKE', 'A', 'CATAPULT', 'IN', 'LEASH'] +4446-2275-0013-1208: ref=["I'LL", 'DO', 'ANYTHING', 'YOU', 'WISH', 'ME', 'TO', 'BARTLEY', 'SHE', 'SAID', 'TREMULOUSLY'] +4446-2275-0013-1208: hyp=["I'LL", 'DO', 'ANYTHING', 'YOU', 'WISH', 'ME', 'TO', 'BARTLEY', 'SHE', 'SAID', 'TREMULOUSLY'] +4446-2275-0014-1209: ref=['I', "CAN'T", 'STAND', 'SEEING', 'YOU', 'MISERABLE'] +4446-2275-0014-1209: hyp=['I', "CAN'T", 'STAND', 'SEEING', 'YOU', 'MISERABLE'] +4446-2275-0015-1210: ref=['HE', 'PULLED', 'UP', 'A', 'WINDOW', 'AS', 'IF', 'THE', 'AIR', 'WERE', 'HEAVY'] +4446-2275-0015-1210: hyp=['HE', 'PULLED', 'UP', 'A', 'WINDOW', 'AS', 'IF', 'THE', 'AIR', 'WERE', 'HEAVY'] +4446-2275-0016-1211: ref=['HILDA', 'WATCHED', 'HIM', 'FROM', 'HER', 'CORNER', 'TREMBLING', 'AND', 'SCARCELY', 'BREATHING', 'DARK', 'SHADOWS', 'GROWING', 'ABOUT', 'HER', 'EYES', 'IT'] +4446-2275-0016-1211: hyp=['HILDA', 'WATCHED', 'HIM', 'FROM', 'THE', 'CORNER', 'TREMBLING', 'AND', 'SCARCELY', 'BREATHING', 'DARK', 'SHADOWS', 'GROWING', 'ABOUT', 'HER', 'EYES', 'IT'] +4446-2275-0017-1212: ref=['BUT', "IT'S", 'WORSE', 'NOW', "IT'S", 'UNBEARABLE'] +4446-2275-0017-1212: hyp=['BUT', "IT'S", 'WORSE', 'NOW', "IT'S", 'UNBEARABLE'] +4446-2275-0018-1213: ref=['I', 'GET', 'NOTHING', 'BUT', 'MISERY', 'OUT', 'OF', 'EITHER'] +4446-2275-0018-1213: hyp=['I', 'GET', 'NOTHING', 'BUT', 'MISERY', 'OUT', 'OF', 'EITHER'] +4446-2275-0019-1214: ref=['THE', 'WORLD', 'IS', 'ALL', 'THERE', 'JUST', 'AS', 'IT', 'USED', 'TO', 'BE', 'BUT', 'I', "CAN'T", 'GET', 'AT', 'IT', 'ANY', 'MORE'] +4446-2275-0019-1214: hyp=['THE', 'WORLD', 'IS', 'ALL', 'THERE', 'JUST', 'AS', 'IT', 'USED', 'TO', 'BE', 'BUT', 'I', "CAN'T", 'GET', 'AT', 'IT', 'ANY', 'MORE'] +4446-2275-0020-1215: ref=['IT', 'WAS', 'MYSELF', 'I', 'WAS', 'DEFYING', 'HILDA'] +4446-2275-0020-1215: hyp=['IT', 'WAS', 'MYSELF', 'I', 'WAS', 'DEFYING', 'HILDA'] +4446-2275-0021-1216: ref=["HILDA'S", 'FACE', 'QUIVERED', 'BUT', 'SHE', 'WHISPERED', 'YES', 'I', 'THINK', 'IT', 'MUST', 'HAVE', 'BEEN'] +4446-2275-0021-1216: hyp=["HELDA'S", 'FACE', 'QUIVERED', 'BUT', 'SHE', 'WHISPERED', 'YES', 'I', 'THINK', 'IT', 'MUST', 'HAVE', 'BEEN'] +4446-2275-0022-1217: ref=['BUT', 'WHY', "DIDN'T", 'YOU', 'TELL', 'ME', 'WHEN', 'YOU', 'WERE', 'HERE', 'IN', 'THE', 'SUMMER'] +4446-2275-0022-1217: hyp=['BUT', 'WHY', "DIDN'T", 'YOU', 'TELL', 'ME', 'WHEN', 'YOU', 'WERE', 'HERE', 'IN', 'THE', 'SUMMER'] +4446-2275-0023-1218: ref=['ALEXANDER', 'GROANED', 'I', 'MEANT', 'TO', 'BUT', 'SOMEHOW', 'I', "COULDN'T"] +4446-2275-0023-1218: hyp=['ALEXANDER', 'GROANED', 'I', 'MEANT', 'TO', 'BUT', 'SOMEHOW', 'I', "COULDN'T"] +4446-2275-0024-1219: ref=['SHE', 'PRESSED', 'HIS', 'HAND', 'GENTLY', 'IN', 'GRATITUDE'] +4446-2275-0024-1219: hyp=['SHE', 'PRESSED', 'HIS', 'HAND', 'GENTLY', 'IN', 'GRATITUDE'] +4446-2275-0025-1220: ref=["WEREN'T", 'YOU', 'HAPPY', 'THEN', 'AT', 'ALL'] +4446-2275-0025-1220: hyp=["WEREN'T", 'YOU', 'HAPPY', 'THEN', 'AT', 'ALL'] +4446-2275-0026-1221: ref=['SHE', 'CLOSED', 'HER', 'EYES', 'AND', 'TOOK', 'A', 'DEEP', 'BREATH', 'AS', 'IF', 'TO', 'DRAW', 'IN', 'AGAIN', 'THE', 'FRAGRANCE', 'OF', 'THOSE', 'DAYS'] +4446-2275-0026-1221: hyp=['SHE', 'CLOSED', 'HER', 'EYES', 'AND', 'TOOK', 'A', 'DEEP', 'BREATH', 'AS', 'IF', 'TO', 'DRAW', 'IN', 'AGAIN', 'THE', 'FRAGRANCE', 'OF', 'THOSE', 'DAYS'] +4446-2275-0027-1222: ref=['HE', 'MOVED', 'UNEASILY', 'AND', 'HIS', 'CHAIR', 'CREAKED'] +4446-2275-0027-1222: hyp=['HE', 'MOVED', 'UNEASILY', 'AND', 'HIS', 'CHAIR', 'CREAKED'] +4446-2275-0028-1223: ref=['YES', 'YES', 'SHE', 'HURRIED', 'PULLING', 'HER', 'HAND', 'GENTLY', 'AWAY', 'FROM', 'HIM'] +4446-2275-0028-1223: hyp=['YES', 'YES', 'SHE', 'HURRIED', 'PULLING', 'HER', 'HAND', 'GENTLY', 'AWAY', 'FROM', 'HIM'] +4446-2275-0029-1224: ref=['PLEASE', 'TELL', 'ME', 'ONE', 'THING', 'BARTLEY', 'AT', 'LEAST', 'TELL', 'ME', 'THAT', 'YOU', 'BELIEVE', 'I', 'THOUGHT', 'I', 'WAS', 'MAKING', 'YOU', 'HAPPY'] +4446-2275-0029-1224: hyp=['PLEASE', 'TELL', 'ME', 'ONE', 'THING', 'BARTLEY', 'AT', 'LEAST', 'TELL', 'ME', 'THAT', 'YOU', 'BELIEVE', 'I', 'THOUGHT', 'I', 'WAS', 'MAKING', 'YOU', 'HAPPY'] +4446-2275-0030-1225: ref=['YES', 'HILDA', 'I', 'KNOW', 'THAT', 'HE', 'SAID', 'SIMPLY'] +4446-2275-0030-1225: hyp=['YES', 'HELDA', 'I', 'KNOW', 'THAT', 'HE', 'SAID', 'SIMPLY'] +4446-2275-0031-1226: ref=['I', 'UNDERSTAND', 'BARTLEY', 'I', 'WAS', 'WRONG'] +4446-2275-0031-1226: hyp=['I', 'UNDERSTAND', 'BARTLEY', 'I', 'WAS', 'WRONG'] +4446-2275-0032-1227: ref=['BUT', 'I', "DIDN'T", 'KNOW', "YOU'VE", 'ONLY', 'TO', 'TELL', 'ME', 'NOW'] +4446-2275-0032-1227: hyp=['BUT', 'I', "DIDN'T", 'KNOW', "YOU'VE", 'ONLY', 'TO', 'TELL', 'ME', 'NOW'] +4446-2275-0033-1228: ref=['WHAT', 'I', 'MEAN', 'IS', 'THAT', 'I', 'WANT', 'YOU', 'TO', 'PROMISE', 'NEVER', 'TO', 'SEE', 'ME', 'AGAIN', 'NO', 'MATTER', 'HOW', 'OFTEN', 'I', 'COME', 'NO', 'MATTER', 'HOW', 'HARD', 'I', 'BEG'] +4446-2275-0033-1228: hyp=['WHAT', 'I', 'MEAN', 'IS', 'THAT', 'I', 'WANT', 'YOU', 'TO', 'PROMISE', 'NEVER', 'TO', 'SEE', 'ME', 'AGAIN', 'NO', 'MATTER', 'HOW', 'OFTEN', 'I', 'COME', 'NO', 'MATTER', 'HOW', 'HARD', 'I', 'BEG'] +4446-2275-0034-1229: ref=['KEEP', 'AWAY', 'IF', 'YOU', 'WISH', 'WHEN', 'HAVE', 'I', 'EVER', 'FOLLOWED', 'YOU'] +4446-2275-0034-1229: hyp=['KEEP', 'AWAY', 'IF', 'YOU', 'WISH', 'WHEN', 'HAVE', 'I', 'EVER', 'FOLLOWED', 'YOU'] +4446-2275-0035-1230: ref=['ALEXANDER', 'ROSE', 'AND', 'SHOOK', 'HIMSELF', 'ANGRILY', 'YES', 'I', 'KNOW', "I'M", 'COWARDLY'] +4446-2275-0035-1230: hyp=['ALEXANDER', 'ROSE', 'AND', 'SHOOK', 'HIMSELF', 'ANGRILY', 'YES', 'I', 'KNOW', "I'M", 'COWARDLY'] +4446-2275-0036-1231: ref=['HE', 'TOOK', 'HER', 'ROUGHLY', 'IN', 'HIS', 'ARMS', 'DO', 'YOU', 'KNOW', 'WHAT', 'I', 'MEAN'] +4446-2275-0036-1231: hyp=['HE', 'TOOK', 'A', 'ROUGHLY', 'IN', 'HIS', 'ARMS', 'DO', 'YOU', 'KNOW', 'WHAT', 'I', 'MEAN'] +4446-2275-0037-1232: ref=['OH', 'BARTLEY', 'WHAT', 'AM', 'I', 'TO', 'DO'] +4446-2275-0037-1232: hyp=['OH', 'BARTLEY', 'WHAT', 'AM', 'I', 'TO', 'DO'] +4446-2275-0038-1233: ref=['I', 'WILL', 'ASK', 'THE', 'LEAST', 'IMAGINABLE', 'BUT', 'I', 'MUST', 'HAVE', 'SOMETHING'] +4446-2275-0038-1233: hyp=['I', 'WILL', 'ASK', 'THE', 'LEAST', 'IMAGINABLE', 'BUT', 'I', 'MUST', 'HAVE', 'SOMETHING'] +4446-2275-0039-1234: ref=['I', 'MUST', 'KNOW', 'ABOUT', 'YOU'] +4446-2275-0039-1234: hyp=['I', 'MUST', 'KNOW', 'ABOUT', 'YOU'] +4446-2275-0040-1235: ref=['THE', 'SIGHT', 'OF', 'YOU', 'BARTLEY', 'TO', 'SEE', 'YOU', 'LIVING', 'AND', 'HAPPY', 'AND', 'SUCCESSFUL', 'CAN', 'I', 'NEVER', 'MAKE', 'YOU', 'UNDERSTAND', 'WHAT', 'THAT', 'MEANS', 'TO', 'ME'] +4446-2275-0040-1235: hyp=['THE', 'SIGHT', 'OF', 'YOU', 'BARTLEY', 'TO', 'SEE', 'YOU', 'LIVING', 'AND', 'HAPPY', 'AND', 'SUCCESSFUL', 'CAN', 'I', 'NEVER', 'MAKE', 'YOU', 'UNDERSTAND', 'WHAT', 'THAT', 'MEANS', 'TO', 'ME'] +4446-2275-0041-1236: ref=['YOU', 'SEE', 'LOVING', 'SOME', 'ONE', 'AS', 'I', 'LOVE', 'YOU', 'MAKES', 'THE', 'WHOLE', 'WORLD', 'DIFFERENT'] +4446-2275-0041-1236: hyp=['YOU', 'SEE', 'LOVING', 'SOME', 'ONE', 'AS', 'I', 'LOVE', 'YOU', 'MAKES', 'THE', 'WHOLE', 'WORLD', 'DIFFERENT'] +4446-2275-0042-1237: ref=['AND', 'THEN', 'YOU', 'CAME', 'BACK', 'NOT', 'CARING', 'VERY', 'MUCH', 'BUT', 'IT', 'MADE', 'NO', 'DIFFERENCE'] +4446-2275-0042-1237: hyp=['AND', 'THEN', 'YOU', 'CAME', 'BACK', 'NOT', 'CARING', 'VERY', 'MUCH', 'BUT', 'IT', 'MADE', 'NO', 'DIFFERENCE'] +4446-2275-0043-1238: ref=['BARTLEY', 'BENT', 'OVER', 'AND', 'TOOK', 'HER', 'IN', 'HIS', 'ARMS', 'KISSING', 'HER', 'MOUTH', 'AND', 'HER', 'WET', 'TIRED', 'EYES'] +4446-2275-0043-1238: hyp=['BARTLEY', 'BENT', 'OVER', 'AND', 'TOOK', 'HER', 'IN', 'HIS', 'ARMS', 'KISSING', 'HER', 'MOUTH', 'AND', 'HER', 'WET', 'TIRED', 'EYES'] +4446-2275-0044-1239: ref=["DON'T", 'CRY', "DON'T", 'CRY', 'HE', 'WHISPERED'] +4446-2275-0044-1239: hyp=['I', "DON'T", 'CRY', "DON'T", 'CRY', 'HE', 'WHISPERED'] +4446-2275-0045-1240: ref=["WE'VE", 'TORTURED', 'EACH', 'OTHER', 'ENOUGH', 'FOR', 'TONIGHT'] +4446-2275-0045-1240: hyp=["WE'VE", 'TORTURED', 'EACH', 'OTHER', 'ENOUGH', 'FOR', 'TO', 'NIGHT'] +4507-16021-0000-1469: ref=['CHAPTER', 'ONE', 'ORIGIN'] +4507-16021-0000-1469: hyp=['CHAPTER', 'ONE', 'ORIGIN'] +4507-16021-0001-1470: ref=['IT', 'ENGENDERS', 'A', 'WHOLE', 'WORLD', 'LA', 'PEGRE', 'FOR', 'WHICH', 'READ', 'THEFT', 'AND', 'A', 'HELL', 'LA', 'PEGRENNE', 'FOR', 'WHICH', 'READ', 'HUNGER'] +4507-16021-0001-1470: hyp=['IT', 'ENGENDERS', 'A', 'WHOLE', 'WORLD', 'LA', 'PEG', 'FOR', 'WHICH', 'RED', 'THEFT', 'AND', 'A', 'HELL', 'LA', 'PEGRIN', 'FOR', 'WHICH', 'RED', 'HUNGER'] +4507-16021-0002-1471: ref=['THUS', 'IDLENESS', 'IS', 'THE', 'MOTHER'] +4507-16021-0002-1471: hyp=['THUS', 'IDLENESS', 'IS', 'THE', 'MOTHER'] +4507-16021-0003-1472: ref=['SHE', 'HAS', 'A', 'SON', 'THEFT', 'AND', 'A', 'DAUGHTER', 'HUNGER'] +4507-16021-0003-1472: hyp=['SHE', 'HAS', 'A', 'SON', 'THEFT', 'AND', 'A', 'DAUGHTER', 'HUNGER'] +4507-16021-0004-1473: ref=['WHAT', 'IS', 'SLANG'] +4507-16021-0004-1473: hyp=['WHAT', 'IS', 'SLANG'] +4507-16021-0005-1474: ref=['WE', 'HAVE', 'NEVER', 'UNDERSTOOD', 'THIS', 'SORT', 'OF', 'OBJECTIONS'] +4507-16021-0005-1474: hyp=['WE', 'HAVE', 'NEVER', 'UNDERSTOOD', 'THIS', 'SORT', 'OF', 'OBJECTIONS'] +4507-16021-0006-1475: ref=['SLANG', 'IS', 'ODIOUS'] +4507-16021-0006-1475: hyp=['SLANG', 'IS', 'ODIOUS'] +4507-16021-0007-1476: ref=['SLANG', 'MAKES', 'ONE', 'SHUDDER'] +4507-16021-0007-1476: hyp=['SLANG', 'MAKES', 'ONE', 'SHUDDER'] +4507-16021-0008-1477: ref=['WHO', 'DENIES', 'THAT', 'OF', 'COURSE', 'IT', 'DOES'] +4507-16021-0008-1477: hyp=['WHO', 'DENIES', 'THAT', 'OF', 'COURSE', 'IT', 'DOES'] +4507-16021-0009-1478: ref=['WHEN', 'IT', 'IS', 'A', 'QUESTION', 'OF', 'PROBING', 'A', 'WOUND', 'A', 'GULF', 'A', 'SOCIETY', 'SINCE', 'WHEN', 'HAS', 'IT', 'BEEN', 'CONSIDERED', 'WRONG', 'TO', 'GO', 'TOO', 'FAR', 'TO', 'GO', 'TO', 'THE', 'BOTTOM'] +4507-16021-0009-1478: hyp=['WHEN', 'IT', 'IS', 'A', 'QUESTION', 'OF', 'PROBING', 'A', 'WOUND', 'A', 'GULF', 'A', 'SOCIETY', 'SINCE', 'ONE', 'HAS', 'IT', 'BEEN', 'CONSIDERED', 'WRONG', 'TO', 'GO', 'TOO', 'FAR', 'TO', 'GO', 'TO', 'THE', 'BOTTOM'] +4507-16021-0010-1479: ref=['WE', 'HAVE', 'ALWAYS', 'THOUGHT', 'THAT', 'IT', 'WAS', 'SOMETIMES', 'A', 'COURAGEOUS', 'ACT', 'AND', 'AT', 'LEAST', 'A', 'SIMPLE', 'AND', 'USEFUL', 'DEED', 'WORTHY', 'OF', 'THE', 'SYMPATHETIC', 'ATTENTION', 'WHICH', 'DUTY', 'ACCEPTED', 'AND', 'FULFILLED', 'MERITS'] +4507-16021-0010-1479: hyp=['WE', 'HAVE', 'ALWAYS', 'THOUGHT', 'THAT', 'IT', 'WAS', 'SOMETIMES', 'A', 'COURAGEOUS', 'ACT', 'AND', 'AT', 'LEAST', 'A', 'SIMPLE', 'AND', 'USEFUL', 'DEED', 'WORTHY', 'OF', 'THE', 'SYMPATHETIC', 'ATTENTION', 'WHICH', 'DUTY', 'ACCEPTED', 'IN', 'FULFILLED', 'MERITS'] +4507-16021-0011-1480: ref=['WHY', 'SHOULD', 'ONE', 'NOT', 'EXPLORE', 'EVERYTHING', 'AND', 'STUDY', 'EVERYTHING'] +4507-16021-0011-1480: hyp=['WHY', 'SHOULD', 'ONE', 'NOT', 'EXPLORE', 'EVERYTHING', 'AND', 'STUDY', 'EVERYTHING'] +4507-16021-0012-1481: ref=['WHY', 'SHOULD', 'ONE', 'HALT', 'ON', 'THE', 'WAY'] +4507-16021-0012-1481: hyp=['WHY', 'SHOULD', 'ONE', 'HALT', 'ON', 'THE', 'WAY'] +4507-16021-0013-1482: ref=['NOTHING', 'IS', 'MORE', 'LUGUBRIOUS', 'THAN', 'THE', 'CONTEMPLATION', 'THUS', 'IN', 'ITS', 'NUDITY', 'IN', 'THE', 'BROAD', 'LIGHT', 'OF', 'THOUGHT', 'OF', 'THE', 'HORRIBLE', 'SWARMING', 'OF', 'SLANG'] +4507-16021-0013-1482: hyp=['NOTHING', 'IS', 'MORE', 'LUGUBRIOUS', 'THAN', 'THE', 'CONTEMPLATION', 'THUS', 'IN', 'ITS', 'NUDITY', 'IN', 'THE', 'BROAD', 'LIGHT', 'OF', 'THOUGHT', 'OF', 'THE', 'HORRIBLE', 'SWARMING', 'OF', 'SLANG'] +4507-16021-0014-1483: ref=['NOW', 'WHEN', 'HAS', 'HORROR', 'EVER', 'EXCLUDED', 'STUDY'] +4507-16021-0014-1483: hyp=['NO', 'WHEN', 'HAS', 'HORROR', 'EVER', 'EXCLUDED', 'STUDY'] +4507-16021-0015-1484: ref=['SINCE', 'WHEN', 'HAS', 'MALADY', 'BANISHED', 'MEDICINE'] +4507-16021-0015-1484: hyp=['SINCE', 'WHEN', 'HAS', 'MALADY', 'BANISHED', 'MEDICINE'] +4507-16021-0016-1485: ref=['CAN', 'ONE', 'IMAGINE', 'A', 'NATURALIST', 'REFUSING', 'TO', 'STUDY', 'THE', 'VIPER', 'THE', 'BAT', 'THE', 'SCORPION', 'THE', 'CENTIPEDE', 'THE', 'TARANTULA', 'AND', 'ONE', 'WHO', 'WOULD', 'CAST', 'THEM', 'BACK', 'INTO', 'THEIR', 'DARKNESS', 'SAYING', 'OH', 'HOW', 'UGLY', 'THAT', 'IS'] +4507-16021-0016-1485: hyp=['CAN', 'ONE', 'IMAGINE', 'A', 'NATURALIST', 'REFUSING', 'TO', 'STUDY', 'THE', 'VIPER', 'THE', 'BAT', 'THE', 'SCORPION', 'THE', 'CENTIPEDE', 'THE', 'TERENTIAL', 'AND', 'ONE', 'WHO', 'WOULD', 'CAST', 'THEM', 'BACK', 'INTO', 'THEIR', 'DARKNESS', 'SAYING', 'OH', 'HOW', 'UGLY', 'THAT', 'IS'] +4507-16021-0017-1486: ref=['HE', 'WOULD', 'BE', 'LIKE', 'A', 'PHILOLOGIST', 'REFUSING', 'TO', 'EXAMINE', 'A', 'FACT', 'IN', 'LANGUAGE', 'A', 'PHILOSOPHER', 'HESITATING', 'TO', 'SCRUTINIZE', 'A', 'FACT', 'IN', 'HUMANITY'] +4507-16021-0017-1486: hyp=['HE', 'WOULD', 'BE', 'LIKE', 'A', 'PHILOLOGIST', 'REFUSING', 'TO', 'EXAMINE', 'A', 'FACT', 'IN', 'LANGUAGE', 'A', 'PHILOSOPHER', 'HESITATING', 'TO', 'SCRUTINIZE', 'A', 'FACT', 'IN', 'HUMANITY'] +4507-16021-0018-1487: ref=['WHAT', 'IS', 'SLANG', 'PROPERLY', 'SPEAKING'] +4507-16021-0018-1487: hyp=['WHAT', 'IS', 'SLANG', 'PROPERLY', 'SPEAKING'] +4507-16021-0019-1488: ref=['IT', 'IS', 'THE', 'LANGUAGE', 'OF', 'WRETCHEDNESS'] +4507-16021-0019-1488: hyp=['IT', 'IS', 'THE', 'LANGUAGE', 'OF', 'WRETCHEDNESS'] +4507-16021-0020-1489: ref=['WE', 'MAY', 'BE', 'STOPPED', 'THE', 'FACT', 'MAY', 'BE', 'PUT', 'TO', 'US', 'IN', 'GENERAL', 'TERMS', 'WHICH', 'IS', 'ONE', 'WAY', 'OF', 'ATTENUATING', 'IT', 'WE', 'MAY', 'BE', 'TOLD', 'THAT', 'ALL', 'TRADES', 'PROFESSIONS', 'IT', 'MAY', 'BE', 'ADDED', 'ALL', 'THE', 'ACCIDENTS', 'OF', 'THE', 'SOCIAL', 'HIERARCHY', 'AND', 'ALL', 'FORMS', 'OF', 'INTELLIGENCE', 'HAVE', 'THEIR', 'OWN', 'SLANG'] +4507-16021-0020-1489: hyp=['WE', 'MAY', 'BE', 'STOPPED', 'THE', 'FACT', 'MAY', 'BE', 'PUT', 'TO', 'US', 'IN', 'GENERAL', 'TERMS', 'WHICH', 'IS', 'ONE', 'WAY', 'OF', 'ATTENUATING', 'IT', 'WE', 'MAY', 'BE', 'TOLD', 'THAT', 'ALL', 'TRADES', 'PROFESSIONS', 'IT', 'MAY', 'BE', 'ADDED', 'ALL', 'THE', 'ACCIDENTS', 'OF', 'THE', 'SOCIAL', 'HIERARCHY', 'AND', 'ALL', 'FORMS', 'OF', 'INTELLIGENCE', 'HAVE', 'THEIR', 'OWN', 'SLANG'] +4507-16021-0021-1490: ref=['THE', 'PAINTER', 'WHO', 'SAYS', 'MY', 'GRINDER', 'THE', 'NOTARY', 'WHO', 'SAYS', 'MY', 'SKIP', 'THE', 'GUTTER', 'THE', 'HAIRDRESSER', 'WHO', 'SAYS', 'MY', 'MEALYBACK', 'THE', 'COBBLER', 'WHO', 'SAYS', 'MY', 'CUB', 'TALKS', 'SLANG'] +4507-16021-0021-1490: hyp=['THE', 'PAINTER', 'WHO', 'SAYS', 'MY', 'GRINDER', 'THE', 'NOTARY', 'WHO', 'SAYS', 'MY', 'SKIP', 'THE', 'GUTTER', 'THE', 'HAIR', 'DRESSER', 'WHO', 'SAYS', 'MY', 'MEALLY', 'BACK', 'THE', 'COBBLER', 'WHO', 'SAYS', 'MY', 'CUB', 'TALKS', 'SLANG'] +4507-16021-0022-1491: ref=['THERE', 'IS', 'THE', 'SLANG', 'OF', 'THE', 'AFFECTED', 'LADY', 'AS', 'WELL', 'AS', 'OF', 'THE', 'PRECIEUSES'] +4507-16021-0022-1491: hyp=['THERE', 'IS', 'THE', 'SLANG', 'OF', 'THE', 'AFFECTED', 'LADY', 'AS', 'WELL', 'AS', 'OF', 'THE', 'PURSUS'] +4507-16021-0023-1492: ref=['THE', 'SUGAR', 'MANUFACTURER', 'WHO', 'SAYS', 'LOAF', 'CLARIFIED', 'LUMPS', 'BASTARD', 'COMMON', 'BURNT', 'THIS', 'HONEST', 'MANUFACTURER', 'TALKS', 'SLANG'] +4507-16021-0023-1492: hyp=['THE', 'SUGAR', 'MANUFACTURER', 'WHO', 'SAYS', 'LOAF', 'CLARIFIED', 'LUMPS', 'BASTARD', 'COMMON', 'BURNT', 'THIS', 'HONEST', 'MANUFACTURER', 'TALKS', 'SLANG'] +4507-16021-0024-1493: ref=['ALGEBRA', 'MEDICINE', 'BOTANY', 'HAVE', 'EACH', 'THEIR', 'SLANG'] +4507-16021-0024-1493: hyp=['ALGEBRA', 'MEDICINE', 'BARTANY', 'HAVE', 'EACH', 'THEIR', 'SLANG'] +4507-16021-0025-1494: ref=['TO', 'MEET', 'THE', 'NEEDS', 'OF', 'THIS', 'CONFLICT', 'WRETCHEDNESS', 'HAS', 'INVENTED', 'A', 'LANGUAGE', 'OF', 'COMBAT', 'WHICH', 'IS', 'SLANG'] +4507-16021-0025-1494: hyp=['TO', 'MEET', 'THE', 'NEEDS', 'OF', 'THIS', 'CONFLICT', 'WRETCHEDNESS', 'HAS', 'INVENTED', 'A', 'LANGUAGE', 'OF', 'COMBAT', 'WHICH', 'IS', 'SLANG'] +4507-16021-0026-1495: ref=['TO', 'KEEP', 'AFLOAT', 'AND', 'TO', 'RESCUE', 'FROM', 'OBLIVION', 'TO', 'HOLD', 'ABOVE', 'THE', 'GULF', 'WERE', 'IT', 'BUT', 'A', 'FRAGMENT', 'OF', 'SOME', 'LANGUAGE', 'WHICH', 'MAN', 'HAS', 'SPOKEN', 'AND', 'WHICH', 'WOULD', 'OTHERWISE', 'BE', 'LOST', 'THAT', 'IS', 'TO', 'SAY', 'ONE', 'OF', 'THE', 'ELEMENTS', 'GOOD', 'OR', 'BAD', 'OF', 'WHICH', 'CIVILIZATION', 'IS', 'COMPOSED', 'OR', 'BY', 'WHICH', 'IT', 'IS', 'COMPLICATED', 'TO', 'EXTEND', 'THE', 'RECORDS', 'OF', 'SOCIAL', 'OBSERVATION', 'IS', 'TO', 'SERVE', 'CIVILIZATION', 'ITSELF'] +4507-16021-0026-1495: hyp=['TO', 'KEEP', 'AFLOAT', 'AND', 'TO', 'RESCUE', 'FROM', 'OBLIVION', 'TO', 'HOLD', 'ABOVE', 'THE', 'GULF', 'WERE', 'IT', 'BUT', 'A', 'FRAGMENT', 'OF', 'SOME', 'LANGUAGE', 'WHICH', 'MAN', 'HAS', 'SPOKEN', 'AND', 'WHICH', 'WOULD', 'OTHERWISE', 'BE', 'LOST', 'THAT', 'IS', 'TO', 'SAY', 'ONE', 'OF', 'THE', 'ELEMENTS', 'GOOD', 'OR', 'BAD', 'OF', 'WHICH', 'CIVILIZATION', 'IS', 'COMPOSED', 'OR', 'BY', 'WHICH', 'IT', 'IS', 'COMPLICATED', 'TO', 'EXTEND', 'THE', 'RECORDS', 'OF', 'SOCIAL', 'OBSERVATION', 'IS', 'TO', 'SERVE', 'CIVILIZATION', 'ITSELF'] +4507-16021-0027-1496: ref=['PHOENICIAN', 'VERY', 'GOOD'] +4507-16021-0027-1496: hyp=['PHOENICIAN', 'VERY', 'GOOD'] +4507-16021-0028-1497: ref=['EVEN', 'DIALECT', 'LET', 'THAT', 'PASS'] +4507-16021-0028-1497: hyp=['EVEN', 'DIALECT', 'LET', 'THAT', 'PASS'] +4507-16021-0029-1498: ref=['TO', 'THIS', 'WE', 'REPLY', 'IN', 'ONE', 'WORD', 'ONLY'] +4507-16021-0029-1498: hyp=['TO', 'THIS', 'WE', 'REPLY', 'IN', 'ONE', 'WORD', 'ONLY'] +4507-16021-0030-1499: ref=['ASSUREDLY', 'IF', 'THE', 'TONGUE', 'WHICH', 'A', 'NATION', 'OR', 'A', 'PROVINCE', 'HAS', 'SPOKEN', 'IS', 'WORTHY', 'OF', 'INTEREST', 'THE', 'LANGUAGE', 'WHICH', 'HAS', 'BEEN', 'SPOKEN', 'BY', 'A', 'MISERY', 'IS', 'STILL', 'MORE', 'WORTHY', 'OF', 'ATTENTION', 'AND', 'STUDY'] +4507-16021-0030-1499: hyp=['ASSUREDLY', 'IF', 'THE', 'TONGUE', 'WHICH', 'A', 'NATION', 'OR', 'A', 'PROVINCE', 'HAS', 'SPOKEN', 'IS', 'WORTHY', 'OF', 'INTEREST', 'THE', 'LANGUAGE', 'WHICH', 'HAS', 'BEEN', 'SPOKEN', 'BY', 'A', 'MISERY', 'IS', 'STILL', 'MORE', 'WORTHY', 'OF', 'ATTENTION', 'AND', 'STUDY'] +4507-16021-0031-1500: ref=['AND', 'THEN', 'WE', 'INSIST', 'UPON', 'IT', 'THE', 'STUDY', 'OF', 'SOCIAL', 'DEFORMITIES', 'AND', 'INFIRMITIES', 'AND', 'THE', 'TASK', 'OF', 'POINTING', 'THEM', 'OUT', 'WITH', 'A', 'VIEW', 'TO', 'REMEDY', 'IS', 'NOT', 'A', 'BUSINESS', 'IN', 'WHICH', 'CHOICE', 'IS', 'PERMITTED'] +4507-16021-0031-1500: hyp=['AND', 'THEN', 'WE', 'INSIST', 'UPON', 'IT', 'THE', 'STUDY', 'OF', 'SOCIAL', 'DEFORMITIES', 'AND', 'INFIRMITIES', 'AND', 'THE', 'TASK', 'OF', 'POINTING', 'THEM', 'OUT', 'WITH', 'A', 'VIEW', 'TO', 'REMEDY', 'IS', 'NOT', 'A', 'BUSINESS', 'IN', 'WHICH', 'CHOICE', 'IS', 'PERMITTED'] +4507-16021-0032-1501: ref=['HE', 'MUST', 'DESCEND', 'WITH', 'HIS', 'HEART', 'FULL', 'OF', 'CHARITY', 'AND', 'SEVERITY', 'AT', 'THE', 'SAME', 'TIME', 'AS', 'A', 'BROTHER', 'AND', 'AS', 'A', 'JUDGE', 'TO', 'THOSE', 'IMPENETRABLE', 'CASEMATES', 'WHERE', 'CRAWL', 'PELL', 'MELL', 'THOSE', 'WHO', 'BLEED', 'AND', 'THOSE', 'WHO', 'DEAL', 'THE', 'BLOW', 'THOSE', 'WHO', 'WEEP', 'AND', 'THOSE', 'WHO', 'CURSE', 'THOSE', 'WHO', 'FAST', 'AND', 'THOSE', 'WHO', 'DEVOUR', 'THOSE', 'WHO', 'ENDURE', 'EVIL', 'AND', 'THOSE', 'WHO', 'INFLICT', 'IT'] +4507-16021-0032-1501: hyp=['HE', 'MUST', 'DESCEND', 'WITH', 'HIS', 'HEART', 'FULL', 'OF', 'CHARITY', 'AND', 'SEVERITY', 'AT', 'THE', 'SAME', 'TIME', 'AS', 'A', 'BROTHER', 'AND', 'AS', 'HE', 'JUDGE', 'TO', 'THOSE', 'IMPENETRABLE', 'CASEMATES', 'WERE', 'CRAWL', 'PELL', 'MELL', 'THOSE', 'WHO', 'BLEED', 'AND', 'THOSE', 'WHO', 'DEAL', 'THE', 'BLOW', 'THOSE', 'WHO', 'WEEP', 'AND', 'THOSE', 'WHO', 'CURSE', 'THOSE', 'WHO', 'FAST', 'IN', 'THOSE', 'WHO', 'DEVOUR', 'THOSE', 'WHO', 'ENDURE', 'EVIL', 'AND', 'THOSE', 'WHO', 'INFLICT', 'IT'] +4507-16021-0033-1502: ref=['DO', 'WE', 'REALLY', 'KNOW', 'THE', 'MOUNTAIN', 'WELL', 'WHEN', 'WE', 'ARE', 'NOT', 'ACQUAINTED', 'WITH', 'THE', 'CAVERN'] +4507-16021-0033-1502: hyp=['DO', 'WE', 'REALLY', 'KNOW', 'THE', 'MOUNTAIN', 'WELL', 'WHEN', 'WE', 'ARE', 'NOT', 'ACQUAINTED', 'WITH', 'THE', 'CAVERN'] +4507-16021-0034-1503: ref=['THEY', 'CONSTITUTE', 'TWO', 'DIFFERENT', 'ORDERS', 'OF', 'FACTS', 'WHICH', 'CORRESPOND', 'TO', 'EACH', 'OTHER', 'WHICH', 'ARE', 'ALWAYS', 'INTERLACED', 'AND', 'WHICH', 'OFTEN', 'BRING', 'FORTH', 'RESULTS'] +4507-16021-0034-1503: hyp=['THEY', 'CONSTITUTE', 'TWO', 'DIFFERENT', 'ORDERS', 'OF', 'FACTS', 'WHICH', 'CORRESPOND', 'TO', 'EACH', 'OTHER', 'WHICH', 'ARE', 'ALWAYS', 'INTERLACED', 'AND', 'WHICH', 'OFTEN', 'BRING', 'FORTH', 'RESULTS'] +4507-16021-0035-1504: ref=['TRUE', 'HISTORY', 'BEING', 'A', 'MIXTURE', 'OF', 'ALL', 'THINGS', 'THE', 'TRUE', 'HISTORIAN', 'MINGLES', 'IN', 'EVERYTHING'] +4507-16021-0035-1504: hyp=['TRUE', 'HISTORY', 'BEING', 'A', 'MIXTURE', 'OF', 'ALL', 'THINGS', 'THE', 'TRUE', 'HISTORIAN', 'MINGLES', 'IN', 'EVERYTHING'] +4507-16021-0036-1505: ref=['FACTS', 'FORM', 'ONE', 'OF', 'THESE', 'AND', 'IDEAS', 'THE', 'OTHER'] +4507-16021-0036-1505: hyp=['FACTS', 'FORM', 'ONE', 'OF', 'THESE', 'AND', 'IDEAS', 'THE', 'OTHER'] +4507-16021-0037-1506: ref=['THERE', 'IT', 'CLOTHES', 'ITSELF', 'IN', 'WORD', 'MASKS', 'IN', 'METAPHOR', 'RAGS'] +4507-16021-0037-1506: hyp=['THERE', 'IT', 'CLOTHES', 'ITSELF', 'IN', 'WORD', 'MASKS', 'IN', 'METAPHOR', 'RAGS'] +4507-16021-0038-1507: ref=['IN', 'THIS', 'GUISE', 'IT', 'BECOMES', 'HORRIBLE'] +4507-16021-0038-1507: hyp=['IN', 'THIS', 'SKIES', 'IT', 'BECOMES', 'HORRIBLE'] +4507-16021-0039-1508: ref=['ONE', 'PERCEIVES', 'WITHOUT', 'UNDERSTANDING', 'IT', 'A', 'HIDEOUS', 'MURMUR', 'SOUNDING', 'ALMOST', 'LIKE', 'HUMAN', 'ACCENTS', 'BUT', 'MORE', 'NEARLY', 'RESEMBLING', 'A', 'HOWL', 'THAN', 'AN', 'ARTICULATE', 'WORD'] +4507-16021-0039-1508: hyp=['ONE', 'PERCEIVES', 'WITHOUT', 'UNDERSTANDING', 'IT', 'A', 'HIDEOUS', 'MURMUR', 'SOUNDING', 'ALMOST', 'LIKE', 'HUMAN', 'ACCENTS', 'BUT', 'MORE', 'NEARLY', 'RESEMBLING', 'A', 'HOWL', 'THAN', 'AN', 'ARTICULATE', 'WORD'] +4507-16021-0040-1509: ref=['ONE', 'THINKS', 'ONE', 'HEARS', 'HYDRAS', 'TALKING'] +4507-16021-0040-1509: hyp=['ONE', 'THINKS', 'ONE', 'HEARS', 'HYDRAST', 'TALKING'] +4507-16021-0041-1510: ref=['IT', 'IS', 'UNINTELLIGIBLE', 'IN', 'THE', 'DARK'] +4507-16021-0041-1510: hyp=['IT', 'IS', 'UNINTELLIGIBLE', 'IN', 'THE', 'DARK'] +4507-16021-0042-1511: ref=['IT', 'IS', 'BLACK', 'IN', 'MISFORTUNE', 'IT', 'IS', 'BLACKER', 'STILL', 'IN', 'CRIME', 'THESE', 'TWO', 'BLACKNESSES', 'AMALGAMATED', 'COMPOSE', 'SLANG'] +4507-16021-0042-1511: hyp=['IT', 'IS', 'BLACK', 'IN', 'MISFORTUNE', 'IT', 'IS', 'BLACKER', 'STILL', 'AND', 'CRIME', 'THESE', 'TWO', 'BLACKNESSES', 'AMALGAMATED', 'COMPOSED', 'SLANG'] +4507-16021-0043-1512: ref=['THE', 'EARTH', 'IS', 'NOT', 'DEVOID', 'OF', 'RESEMBLANCE', 'TO', 'A', 'JAIL'] +4507-16021-0043-1512: hyp=['THE', 'EARTH', 'IS', 'NOT', 'DEVOID', 'OF', 'RESEMBLANCE', 'TO', 'A', 'JAIL'] +4507-16021-0044-1513: ref=['LOOK', 'CLOSELY', 'AT', 'LIFE'] +4507-16021-0044-1513: hyp=['LOOK', 'CLOSELY', 'AT', 'LIFE'] +4507-16021-0045-1514: ref=['IT', 'IS', 'SO', 'MADE', 'THAT', 'EVERYWHERE', 'WE', 'FEEL', 'THE', 'SENSE', 'OF', 'PUNISHMENT'] +4507-16021-0045-1514: hyp=['IT', 'IS', 'SO', 'MADE', 'THAT', 'EVERYWHERE', 'WE', 'FEEL', 'THE', 'SENSE', 'OF', 'PUNISHMENT'] +4507-16021-0046-1515: ref=['EACH', 'DAY', 'HAS', 'ITS', 'OWN', 'GREAT', 'GRIEF', 'OR', 'ITS', 'LITTLE', 'CARE'] +4507-16021-0046-1515: hyp=['EACH', 'DAY', 'HAS', 'ITS', 'OWN', 'GREAT', 'GRIEF', 'FOR', 'ITS', 'LITTLE', 'CARE'] +4507-16021-0047-1516: ref=['YESTERDAY', 'YOU', 'WERE', 'TREMBLING', 'FOR', 'A', 'HEALTH', 'THAT', 'IS', 'DEAR', 'TO', 'YOU', 'TO', 'DAY', 'YOU', 'FEAR', 'FOR', 'YOUR', 'OWN', 'TO', 'MORROW', 'IT', 'WILL', 'BE', 'ANXIETY', 'ABOUT', 'MONEY', 'THE', 'DAY', 'AFTER', 'TO', 'MORROW', 'THE', 'DIATRIBE', 'OF', 'A', 'SLANDERER', 'THE', 'DAY', 'AFTER', 'THAT', 'THE', 'MISFORTUNE', 'OF', 'SOME', 'FRIEND', 'THEN', 'THE', 'PREVAILING', 'WEATHER', 'THEN', 'SOMETHING', 'THAT', 'HAS', 'BEEN', 'BROKEN', 'OR', 'LOST', 'THEN', 'A', 'PLEASURE', 'WITH', 'WHICH', 'YOUR', 'CONSCIENCE', 'AND', 'YOUR', 'VERTEBRAL', 'COLUMN', 'REPROACH', 'YOU', 'AGAIN', 'THE', 'COURSE', 'OF', 'PUBLIC', 'AFFAIRS'] +4507-16021-0047-1516: hyp=['YESTERDAY', 'YOU', 'WERE', 'TREMBLING', 'FOR', 'A', 'HEALTH', 'THAT', 'IS', 'DEAR', 'TO', 'YOU', 'TO', 'DAY', 'YOU', 'FEAR', 'FOR', 'YOUR', 'OWN', 'TO', 'MORROW', 'IT', 'WILL', 'BE', 'ANXIETY', 'ABOUT', 'MONEY', 'THE', 'DAY', 'AFTER', 'TO', 'MORROW', 'THE', 'DIETRIBE', 'OF', 'A', 'SLANDERER', 'THE', 'DAY', 'AFTER', 'THAT', 'THE', 'MISFORTUNE', 'OF', 'SOME', 'FRIEND', 'THEN', 'THE', 'PREVAILING', 'WEATHER', 'THEN', 'SOMETHING', 'THAT', 'HAS', 'BEEN', 'BROKEN', 'OR', 'LOST', 'THEN', 'A', 'PLEASURE', 'WITH', 'WHICH', 'YOUR', 'CONSCIENCE', 'AND', 'YOUR', 'VERTEBRAL', 'COLUMN', 'REPROACH', 'YOU', 'AGAIN', 'THE', 'COURSE', 'OF', 'PUBLIC', 'AFFAIRS'] +4507-16021-0048-1517: ref=['THIS', 'WITHOUT', 'RECKONING', 'IN', 'THE', 'PAINS', 'OF', 'THE', 'HEART', 'AND', 'SO', 'IT', 'GOES', 'ON'] +4507-16021-0048-1517: hyp=['THIS', 'WITHOUT', 'RECKONING', 'IN', 'THE', 'PAINS', 'OF', 'THE', 'HEART', 'AND', 'SO', 'TO', 'GOES', 'ON'] +4507-16021-0049-1518: ref=['THERE', 'IS', 'HARDLY', 'ONE', 'DAY', 'OUT', 'OF', 'A', 'HUNDRED', 'WHICH', 'IS', 'WHOLLY', 'JOYOUS', 'AND', 'SUNNY'] +4507-16021-0049-1518: hyp=['THERE', 'IS', 'HARDLY', 'ONE', 'DAY', 'OUT', 'OF', 'A', 'HUNDRED', 'WHICH', 'IS', 'WHOLLY', 'JOYOUS', 'AND', 'SUNNY'] +4507-16021-0050-1519: ref=['AND', 'YOU', 'BELONG', 'TO', 'THAT', 'SMALL', 'CLASS', 'WHO', 'ARE', 'HAPPY'] +4507-16021-0050-1519: hyp=['AND', 'YOU', 'BELONG', 'TO', 'THAT', 'SMALL', 'CLASS', 'WHO', 'ARE', 'A', 'HAPPY'] +4507-16021-0051-1520: ref=['IN', 'THIS', 'WORLD', 'EVIDENTLY', 'THE', 'VESTIBULE', 'OF', 'ANOTHER', 'THERE', 'ARE', 'NO', 'FORTUNATE'] +4507-16021-0051-1520: hyp=['IN', 'THIS', 'WORLD', 'EVIDENTLY', 'THE', 'VESTIBULE', 'OF', 'ANOTHER', 'THERE', 'ARE', 'NO', 'FORTUNATE'] +4507-16021-0052-1521: ref=['THE', 'REAL', 'HUMAN', 'DIVISION', 'IS', 'THIS', 'THE', 'LUMINOUS', 'AND', 'THE', 'SHADY'] +4507-16021-0052-1521: hyp=['THE', 'REAL', 'HUMAN', 'DIVISION', 'IS', 'THIS', 'THE', 'LUMINOUS', 'AND', 'THE', 'SHADY'] +4507-16021-0053-1522: ref=['TO', 'DIMINISH', 'THE', 'NUMBER', 'OF', 'THE', 'SHADY', 'TO', 'AUGMENT', 'THE', 'NUMBER', 'OF', 'THE', 'LUMINOUS', 'THAT', 'IS', 'THE', 'OBJECT'] +4507-16021-0053-1522: hyp=['TO', 'DIMINISH', 'THE', 'NUMBER', 'OF', 'THE', 'SHADY', 'TO', 'AUGMENT', 'THE', 'NUMBER', 'OF', 'THE', 'LUMINOUS', 'THAT', 'IS', 'THE', 'OBJECT'] +4507-16021-0054-1523: ref=['THAT', 'IS', 'WHY', 'WE', 'CRY', 'EDUCATION', 'SCIENCE'] +4507-16021-0054-1523: hyp=['THAT', 'IS', 'WHY', 'WE', 'CRY', 'EDUCATION', 'SCIENCE'] +4507-16021-0055-1524: ref=['TO', 'TEACH', 'READING', 'MEANS', 'TO', 'LIGHT', 'THE', 'FIRE', 'EVERY', 'SYLLABLE', 'SPELLED', 'OUT', 'SPARKLES'] +4507-16021-0055-1524: hyp=['TO', 'TEACH', 'READING', 'MEANS', 'TO', 'WRITE', 'THE', 'FIRE', 'EVERY', 'SYLLABLE', 'SPELLED', 'OUT', 'SPARKLES'] +4507-16021-0056-1525: ref=['HOWEVER', 'HE', 'WHO', 'SAYS', 'LIGHT', 'DOES', 'NOT', 'NECESSARILY', 'SAY', 'JOY'] +4507-16021-0056-1525: hyp=['HOWEVER', 'HE', 'WHO', 'SAYS', 'LIGHT', 'DOES', 'NOT', 'NECESSARILY', 'SAY', 'JOY'] +4507-16021-0057-1526: ref=['PEOPLE', 'SUFFER', 'IN', 'THE', 'LIGHT', 'EXCESS', 'BURNS'] +4507-16021-0057-1526: hyp=['PEOPLE', 'SUFFER', 'IN', 'THE', 'LIGHT', 'EXCESS', 'BURNS'] +4507-16021-0058-1527: ref=['THE', 'FLAME', 'IS', 'THE', 'ENEMY', 'OF', 'THE', 'WING'] +4507-16021-0058-1527: hyp=['THE', 'FLAME', 'IS', 'THE', 'ENEMY', 'OF', 'THE', 'WING'] +4507-16021-0059-1528: ref=['TO', 'BURN', 'WITHOUT', 'CEASING', 'TO', 'FLY', 'THEREIN', 'LIES', 'THE', 'MARVEL', 'OF', 'GENIUS'] +4507-16021-0059-1528: hyp=['TO', 'BURN', 'WITHOUT', 'CEASING', 'TO', 'FLY', 'THEREIN', 'LIES', 'THE', 'MARVEL', 'OF', 'GENIUS'] +4970-29093-0000-2093: ref=["YOU'LL", 'NEVER', 'DIG', 'IT', 'OUT', 'OF', 'THE', 'ASTOR', 'LIBRARY'] +4970-29093-0000-2093: hyp=["YOU'LL", 'NEVER', 'DIG', 'IT', 'OUT', 'OF', 'THE', 'ASTRO', 'LIBRARY'] +4970-29093-0001-2094: ref=['TO', 'THE', 'YOUNG', 'AMERICAN', 'HERE', 'OR', 'ELSEWHERE', 'THE', 'PATHS', 'TO', 'FORTUNE', 'ARE', 'INNUMERABLE', 'AND', 'ALL', 'OPEN', 'THERE', 'IS', 'INVITATION', 'IN', 'THE', 'AIR', 'AND', 'SUCCESS', 'IN', 'ALL', 'HIS', 'WIDE', 'HORIZON'] +4970-29093-0001-2094: hyp=['TO', 'THE', 'YOUNG', 'AMERICAN', 'HERE', 'OR', 'ELSEWHERE', 'THE', 'PATHS', 'TO', 'FORTUNE', 'ARE', 'INNUMERABLE', 'AND', 'ALL', 'OPEN', 'THERE', 'IS', 'INVITATION', 'IN', 'THE', 'AIR', 'AND', 'SUCCESS', 'IN', 'ALL', 'HIS', 'WIDE', 'HORIZON'] +4970-29093-0002-2095: ref=['HE', 'HAS', 'NO', 'TRADITIONS', 'TO', 'BIND', 'HIM', 'OR', 'GUIDE', 'HIM', 'AND', 'HIS', 'IMPULSE', 'IS', 'TO', 'BREAK', 'AWAY', 'FROM', 'THE', 'OCCUPATION', 'HIS', 'FATHER', 'HAS', 'FOLLOWED', 'AND', 'MAKE', 'A', 'NEW', 'WAY', 'FOR', 'HIMSELF'] +4970-29093-0002-2095: hyp=['HE', 'HAS', 'NO', 'TRADITIONS', 'TO', 'BIND', 'HIM', 'OR', 'GUIDE', 'HIM', 'AND', 'HIS', 'IMPULSE', 'IS', 'TO', 'BREAK', 'AWAY', 'FROM', 'THE', 'OCCUPATION', 'HIS', 'FATHER', 'HAS', 'FOLLOWED', 'AND', 'MAKE', 'A', 'NEW', 'WAY', 'FOR', 'HIMSELF'] +4970-29093-0003-2096: ref=['THE', 'MODEST', 'FELLOW', 'WOULD', 'HAVE', 'LIKED', 'FAME', 'THRUST', 'UPON', 'HIM', 'FOR', 'SOME', 'WORTHY', 'ACHIEVEMENT', 'IT', 'MIGHT', 'BE', 'FOR', 'A', 'BOOK', 'OR', 'FOR', 'THE', 'SKILLFUL', 'MANAGEMENT', 'OF', 'SOME', 'GREAT', 'NEWSPAPER', 'OR', 'FOR', 'SOME', 'DARING', 'EXPEDITION', 'LIKE', 'THAT', 'OF', 'LIEUTENANT', 'STRAIN', 'OR', 'DOCTOR', 'KANE'] +4970-29093-0003-2096: hyp=['THE', 'MODEST', 'FELLOW', 'WOULD', 'HAVE', 'LIKED', 'FAME', 'THRUST', 'UPON', 'HIM', 'FOR', 'SOME', 'WORTHY', 'ACHIEVEMENT', 'IT', 'MIGHT', 'BE', 'FOR', 'A', 'BOOK', 'OR', 'FOR', 'THE', 'SKILFUL', 'MANAGEMENT', 'OF', 'SOME', 'GREAT', 'NEWSPAPER', 'OR', 'FOR', 'SOME', 'DARING', 'EXPEDITION', 'LIKE', 'THAT', 'OF', 'LIEUTENANT', 'STRAIN', 'OR', 'DOCTOR', 'KANE'] +4970-29093-0004-2097: ref=['HE', 'WAS', 'UNABLE', 'TO', 'DECIDE', 'EXACTLY', 'WHAT', 'IT', 'SHOULD', 'BE'] +4970-29093-0004-2097: hyp=['HE', 'WAS', 'UNABLE', 'TO', 'DECIDE', 'EXACTLY', 'WHAT', 'IT', 'SHOULD', 'BE'] +4970-29093-0005-2098: ref=['SOMETIMES', 'HE', 'THOUGHT', 'HE', 'WOULD', 'LIKE', 'TO', 'STAND', 'IN', 'A', 'CONSPICUOUS', 'PULPIT', 'AND', 'HUMBLY', 'PREACH', 'THE', 'GOSPEL', 'OF', 'REPENTANCE', 'AND', 'IT', 'EVEN', 'CROSSED', 'HIS', 'MIND', 'THAT', 'IT', 'WOULD', 'BE', 'NOBLE', 'TO', 'GIVE', 'HIMSELF', 'TO', 'A', 'MISSIONARY', 'LIFE', 'TO', 'SOME', 'BENIGHTED', 'REGION', 'WHERE', 'THE', 'DATE', 'PALM', 'GROWS', 'AND', 'THE', "NIGHTINGALE'S", 'VOICE', 'IS', 'IN', 'TUNE', 'AND', 'THE', 'BUL', 'BUL', 'SINGS', 'ON', 'THE', 'OFF', 'NIGHTS'] +4970-29093-0005-2098: hyp=['SOMETIMES', 'HE', 'THOUGHT', 'HE', 'WOULD', 'LIKE', 'TO', 'STAND', 'IN', 'A', 'CONSPICUOUS', 'PULPIT', 'AND', 'HUMBLY', 'PREACH', 'THE', 'GOSPEL', 'OF', 'REPENTANCE', 'AND', 'IT', 'EVEN', 'CROSSED', 'HIS', 'MIND', 'THAT', 'IT', 'WOULD', 'BE', 'NOBLE', 'TO', 'GIVE', 'HIMSELF', 'TO', 'A', 'MISSIONARY', 'LIFE', 'TO', 'SOME', 'BENIGHTED', 'REGION', 'WHERE', 'THE', 'DATE', 'PALM', 'GROVES', 'AND', 'THE', "NIGHTINGALE'S", 'VOICE', 'IS', 'IN', 'TUNE', 'AND', 'THE', 'BULBUL', 'SINGS', 'ON', 'THE', 'OFF', 'NIGHTS'] +4970-29093-0006-2099: ref=['LAW', 'SEEMED', 'TO', 'HIM', 'WELL', 'ENOUGH', 'AS', 'A', 'SCIENCE', 'BUT', 'HE', 'NEVER', 'COULD', 'DISCOVER', 'A', 'PRACTICAL', 'CASE', 'WHERE', 'IT', 'APPEARED', 'TO', 'HIM', 'WORTH', 'WHILE', 'TO', 'GO', 'TO', 'LAW', 'AND', 'ALL', 'THE', 'CLIENTS', 'WHO', 'STOPPED', 'WITH', 'THIS', 'NEW', 'CLERK', 'IN', 'THE', 'ANTE', 'ROOM', 'OF', 'THE', 'LAW', 'OFFICE', 'WHERE', 'HE', 'WAS', 'WRITING', 'PHILIP', 'INVARIABLY', 'ADVISED', 'TO', 'SETTLE', 'NO', 'MATTER', 'HOW', 'BUT', 'SETTLE', 'GREATLY', 'TO', 'THE', 'DISGUST', 'OF', 'HIS', 'EMPLOYER', 'WHO', 'KNEW', 'THAT', 'JUSTICE', 'BETWEEN', 'MAN', 'AND', 'MAN', 'COULD', 'ONLY', 'BE', 'ATTAINED', 'BY', 'THE', 'RECOGNIZED', 'PROCESSES', 'WITH', 'THE', 'ATTENDANT', 'FEES'] +4970-29093-0006-2099: hyp=['LAW', 'SEEMED', 'TO', 'HIM', 'WELL', 'ENOUGH', 'AS', 'A', 'SCIENCE', 'BUT', 'HE', 'NEVER', 'COULD', 'DISCOVER', 'A', 'PRACTICAL', 'CASE', 'WHERE', 'IT', 'APPEARED', 'TO', 'HIM', 'WORTH', 'WHILE', 'TO', 'GO', 'TO', 'LAW', 'AND', 'ALL', 'THE', 'CLIENTS', 'WHO', 'STOPPED', 'WITH', 'THIS', 'NEW', 'CLERK', 'AND', 'THE', 'ANTE', 'ROOM', 'OF', 'THE', 'LAW', 'OFFICE', 'WHERE', 'HE', 'WAS', 'WRITING', 'PHILIP', 'INVARIABLY', 'ADVISED', 'TO', 'SETTLE', 'NO', 'MATTER', 'HOW', 'BUT', 'SETTLED', 'GREATLY', 'TO', 'THE', 'DISGUST', 'OF', 'HIS', 'EMPLOYER', 'WHO', 'KNEW', 'THAT', 'JUSTICE', 'BETWEEN', 'MAN', 'AND', 'MAN', 'COULD', 'ONLY', 'BE', 'ATTAINED', 'BY', 'THE', 'RECOGNIZED', 'PROCESSES', 'WITH', 'THE', 'ATTENDANT', 'BEES'] +4970-29093-0007-2100: ref=['IT', 'IS', 'SUCH', 'A', 'NOBLE', 'AMBITION', 'THAT', 'IT', 'IS', 'A', 'PITY', 'IT', 'HAS', 'USUALLY', 'SUCH', 'A', 'SHALLOW', 'FOUNDATION'] +4970-29093-0007-2100: hyp=['IT', 'IS', 'SUCH', 'A', 'NOBLE', 'AMBITION', 'THAT', 'IT', 'IS', 'A', 'PITY', 'IT', 'HAS', 'USUALLY', 'SUCH', 'A', 'SHALLOW', 'FOUNDATION'] +4970-29093-0008-2101: ref=['HE', 'WANTED', 'TO', 'BEGIN', 'AT', 'THE', 'TOP', 'OF', 'THE', 'LADDER'] +4970-29093-0008-2101: hyp=['HE', 'WANTED', 'TO', 'BEGIN', 'AT', 'THE', 'TOP', 'OF', 'THE', 'LADDER'] +4970-29093-0009-2102: ref=['PHILIP', 'THEREFORE', 'READ', 'DILIGENTLY', 'IN', 'THE', 'ASTOR', 'LIBRARY', 'PLANNED', 'LITERARY', 'WORKS', 'THAT', 'SHOULD', 'COMPEL', 'ATTENTION', 'AND', 'NURSED', 'HIS', 'GENIUS'] +4970-29093-0009-2102: hyp=['PHILIP', 'THEREFORE', 'READ', 'DILIGENTLY', 'IN', 'THE', 'ASTOR', 'LIBRARY', 'PLANNED', 'LITERARY', 'WORKS', 'THAT', 'SHOULD', 'COMPEL', 'ATTENTION', 'AND', 'NURSED', 'HIS', 'GENIUS'] +4970-29093-0010-2103: ref=['HE', 'HAD', 'NO', 'FRIEND', 'WISE', 'ENOUGH', 'TO', 'TELL', 'HIM', 'TO', 'STEP', 'INTO', 'THE', 'DORKING', 'CONVENTION', 'THEN', 'IN', 'SESSION', 'MAKE', 'A', 'SKETCH', 'OF', 'THE', 'MEN', 'AND', 'WOMEN', 'ON', 'THE', 'PLATFORM', 'AND', 'TAKE', 'IT', 'TO', 'THE', 'EDITOR', 'OF', 'THE', 'DAILY', 'GRAPEVINE', 'AND', 'SEE', 'WHAT', 'HE', 'COULD', 'GET', 'A', 'LINE', 'FOR', 'IT'] +4970-29093-0010-2103: hyp=['HE', 'HAD', 'NO', 'FRIEND', 'WISE', 'ENOUGH', 'TO', 'TELL', 'HIM', 'TO', 'STEP', 'INTO', 'THE', 'DORKING', 'CONVENTION', 'THAN', 'IN', 'SESSION', 'MAKE', 'A', 'SKETCH', 'OF', 'THE', 'MEN', 'AND', 'WOMEN', 'ON', 'THE', 'PLATFORM', 'AND', 'TAKE', 'IT', 'TO', 'THE', 'EDITOR', 'OF', 'THE', 'DAILY', 'GRAPE', 'VINE', 'AND', 'SEE', 'WHAT', 'HE', 'COULD', 'GET', 'A', 'LINE', 'FOR', 'IT'] +4970-29093-0011-2104: ref=['O', 'VERY', 'WELL', 'SAID', 'GRINGO', 'TURNING', 'AWAY', 'WITH', 'A', 'SHADE', 'OF', 'CONTEMPT', "YOU'LL", 'FIND', 'IF', 'YOU', 'ARE', 'GOING', 'INTO', 'LITERATURE', 'AND', 'NEWSPAPER', 'WORK', 'THAT', 'YOU', "CAN'T", 'AFFORD', 'A', 'CONSCIENCE', 'LIKE', 'THAT'] +4970-29093-0011-2104: hyp=['OH', 'VERY', 'WELL', 'SAID', 'GREENOW', 'TURNING', 'AWAY', 'WITH', 'A', 'SHADE', 'OF', 'CONTEMPT', "YOU'LL", 'FIND', 'IF', 'YOU', 'ARE', 'GOING', 'INTO', 'LITERATURE', 'AND', 'NEWSPAPER', 'WORK', 'THAT', 'YOU', "CAN'T", 'AFFORD', 'A', 'CONSCIENCE', 'LIKE', 'THAT'] +4970-29093-0012-2105: ref=['BUT', 'PHILIP', 'DID', 'AFFORD', 'IT', 'AND', 'HE', 'WROTE', 'THANKING', 'HIS', 'FRIENDS', 'AND', 'DECLINING', 'BECAUSE', 'HE', 'SAID', 'THE', 'POLITICAL', 'SCHEME', 'WOULD', 'FAIL', 'AND', 'OUGHT', 'TO', 'FAIL'] +4970-29093-0012-2105: hyp=['BUT', 'PHILIP', 'DID', 'AFFORD', 'IT', 'AND', 'HE', 'WROTE', 'THINKING', 'HIS', 'FRIENDS', 'AND', 'DECLINING', 'BECAUSE', 'HE', 'SAID', 'THE', 'POLITICAL', 'SCHEME', 'WOULD', 'FAIL', 'AND', 'OUGHT', 'TO', 'FAIL'] +4970-29093-0013-2106: ref=['AND', 'HE', 'WENT', 'BACK', 'TO', 'HIS', 'BOOKS', 'AND', 'TO', 'HIS', 'WAITING', 'FOR', 'AN', 'OPENING', 'LARGE', 'ENOUGH', 'FOR', 'HIS', 'DIGNIFIED', 'ENTRANCE', 'INTO', 'THE', 'LITERARY', 'WORLD'] +4970-29093-0013-2106: hyp=['AND', 'HE', 'WENT', 'BACK', 'TO', 'HIS', 'BOOKS', 'AND', 'TO', 'HIS', 'WAITING', 'FOR', 'AN', 'OPENING', 'LARGE', 'ENOUGH', 'FOR', 'HIS', 'DIGNIFIED', 'ENTRANCE', 'INTO', 'THE', 'LITERARY', 'WORLD'] +4970-29093-0014-2107: ref=['WELL', "I'M", 'GOING', 'AS', 'AN', 'ENGINEER', 'YOU', 'CAN', 'GO', 'AS', 'ONE'] +4970-29093-0014-2107: hyp=['WELL', "I'M", 'GOING', 'AS', 'AN', 'ENGINEER', 'YOU', 'COULD', 'GO', 'AS', 'ONE'] +4970-29093-0015-2108: ref=['YOU', 'CAN', 'BEGIN', 'BY', 'CARRYING', 'A', 'ROD', 'AND', 'PUTTING', 'DOWN', 'THE', 'FIGURES'] +4970-29093-0015-2108: hyp=['YOU', 'CAN', 'BEGIN', 'BY', 'CARRYING', 'A', 'ROD', 'AND', 'PUTTING', 'DOWN', 'THE', 'FIGURES'] +4970-29093-0016-2109: ref=['NO', 'ITS', 'NOT', 'TOO', 'SOON'] +4970-29093-0016-2109: hyp=['NO', "IT'S", 'NOT', 'TOO', 'SOON'] +4970-29093-0017-2110: ref=["I'VE", 'BEEN', 'READY', 'TO', 'GO', 'ANYWHERE', 'FOR', 'SIX', 'MONTHS'] +4970-29093-0017-2110: hyp=["I'VE", 'BEEN', 'READY', 'TO', 'GO', 'ANYWHERE', 'FOR', 'SIX', 'MONTHS'] +4970-29093-0018-2111: ref=['THE', 'TWO', 'YOUNG', 'MEN', 'WHO', 'WERE', 'BY', 'THIS', 'TIME', 'FULL', 'OF', 'THE', 'ADVENTURE', 'WENT', 'DOWN', 'TO', 'THE', 'WALL', 'STREET', 'OFFICE', 'OF', "HENRY'S", 'UNCLE', 'AND', 'HAD', 'A', 'TALK', 'WITH', 'THAT', 'WILY', 'OPERATOR'] +4970-29093-0018-2111: hyp=['THE', 'TWO', 'YOUNG', 'MEN', 'WHO', 'WERE', 'BY', 'THIS', 'TIME', 'FULL', 'OF', 'THE', 'ADVENTURER', 'WENT', 'DOWN', 'TO', 'THE', 'WALL', 'STREET', 'OFFICE', 'OF', "HENRY'S", 'UNCLE', 'AND', 'HAD', 'A', 'TALK', 'WITH', 'THAT', 'WILY', 'OPERATOR'] +4970-29093-0019-2112: ref=['THE', 'NIGHT', 'WAS', 'SPENT', 'IN', 'PACKING', 'UP', 'AND', 'WRITING', 'LETTERS', 'FOR', 'PHILIP', 'WOULD', 'NOT', 'TAKE', 'SUCH', 'AN', 'IMPORTANT', 'STEP', 'WITHOUT', 'INFORMING', 'HIS', 'FRIENDS'] +4970-29093-0019-2112: hyp=['THE', 'NIGHT', 'WAS', 'SPENT', 'IN', 'PACKING', 'UP', 'AND', 'WRITING', 'LETTERS', 'FOR', 'PHILIP', 'WOULD', 'NOT', 'TAKE', 'SUCH', 'AN', 'IMPORTANT', 'STEP', 'WITHOUT', 'INFORMING', 'HIS', 'FRIENDS'] +4970-29093-0020-2113: ref=['WHY', "IT'S", 'IN', 'MISSOURI', 'SOMEWHERE', 'ON', 'THE', 'FRONTIER', 'I', 'THINK', "WE'LL", 'GET', 'A', 'MAP'] +4970-29093-0020-2113: hyp=['WHY', "IT'S", 'A', 'MISSOURI', 'SOMEWHERE', 'ON', 'THE', 'FRONTIER', 'I', 'THINK', "WE'LL", 'GET', 'A', 'MAP'] +4970-29093-0021-2114: ref=['I', 'WAS', 'AFRAID', 'IT', 'WAS', 'NEARER', 'HOME'] +4970-29093-0021-2114: hyp=['I', 'WAS', 'AFRAID', 'IT', 'WAS', 'NEARER', 'HOME'] +4970-29093-0022-2115: ref=['HE', 'KNEW', 'HIS', 'UNCLE', 'WOULD', 'BE', 'GLAD', 'TO', 'HEAR', 'THAT', 'HE', 'HAD', 'AT', 'LAST', 'TURNED', 'HIS', 'THOUGHTS', 'TO', 'A', 'PRACTICAL', 'MATTER'] +4970-29093-0022-2115: hyp=['HE', 'KNEW', 'HIS', 'UNCLE', 'WOULD', 'BE', 'GLAD', 'TO', 'HEAR', 'THAT', 'HE', 'HAD', 'AT', 'LAST', 'TURNED', 'HIS', 'THOUGHTS', 'TO', 'A', 'PRACTICAL', 'MATTER'] +4970-29093-0023-2116: ref=['HE', 'WELL', 'KNEW', 'THE', 'PERILS', 'OF', 'THE', 'FRONTIER', 'THE', 'SAVAGE', 'STATE', 'OF', 'SOCIETY', 'THE', 'LURKING', 'INDIANS', 'AND', 'THE', 'DANGERS', 'OF', 'FEVER'] +4970-29093-0023-2116: hyp=['HE', 'WELL', 'KNEW', 'THE', 'PERILS', 'OF', 'THE', 'FRONTIER', 'THE', 'SAVAGE', 'STATE', 'OF', 'SOCIETY', 'THE', 'LURKING', 'INDIANS', 'AND', 'THE', 'DANGERS', 'OF', 'FEVER'] +4970-29095-0000-2054: ref=['SHE', 'WAS', 'TIRED', 'OF', 'OTHER', 'THINGS'] +4970-29095-0000-2054: hyp=['SHE', 'WAS', 'TIRED', 'OF', 'OTHER', 'THINGS'] +4970-29095-0001-2055: ref=['SHE', 'TRIED', 'THIS', 'MORNING', 'AN', 'AIR', 'OR', 'TWO', 'UPON', 'THE', 'PIANO', 'SANG', 'A', 'SIMPLE', 'SONG', 'IN', 'A', 'SWEET', 'BUT', 'SLIGHTLY', 'METALLIC', 'VOICE', 'AND', 'THEN', 'SEATING', 'HERSELF', 'BY', 'THE', 'OPEN', 'WINDOW', 'READ', "PHILIP'S", 'LETTER'] +4970-29095-0001-2055: hyp=['SHE', 'TRIED', 'THIS', 'MORNING', 'AN', 'AIR', 'OR', 'TWO', 'UPON', 'THE', 'PIANO', 'SAYING', 'A', 'SIMPLE', 'SONG', 'IN', 'A', 'SWEET', 'BUT', 'SLIGHTLY', 'METALLIC', 'VOICE', 'AND', 'THEN', 'SEATING', 'HERSELF', 'BY', 'THE', 'OPEN', 'WINDOW', 'READ', "PHILIP'S", 'LETTER'] +4970-29095-0002-2056: ref=['WELL', 'MOTHER', 'SAID', 'THE', 'YOUNG', 'STUDENT', 'LOOKING', 'UP', 'WITH', 'A', 'SHADE', 'OF', 'IMPATIENCE'] +4970-29095-0002-2056: hyp=['WELL', 'MOTHER', 'SAID', 'THE', 'YOUNG', 'STUDENT', 'LOOKING', 'UP', 'WITH', 'A', 'SHADE', 'OF', 'IMPATIENCE'] +4970-29095-0003-2057: ref=['I', 'HOPE', 'THEE', 'TOLD', 'THE', 'ELDERS', 'THAT', 'FATHER', 'AND', 'I', 'ARE', 'RESPONSIBLE', 'FOR', 'THE', 'PIANO', 'AND', 'THAT', 'MUCH', 'AS', 'THEE', 'LOVES', 'MUSIC', 'THEE', 'IS', 'NEVER', 'IN', 'THE', 'ROOM', 'WHEN', 'IT', 'IS', 'PLAYED'] +4970-29095-0003-2057: hyp=['I', 'HOPE', 'THEE', 'TOLD', 'THE', 'ELDERS', 'THAT', 'FATHER', 'AND', 'I', 'ARE', 'RESPONSIBLE', 'FOR', 'THE', 'PIANO', 'AND', 'THAT', 'MUCH', 'AS', 'THEE', 'LOVES', 'MUSIC', 'THEE', 'IS', 'NEVER', 'IN', 'THE', 'ROOM', 'WHEN', 'IT', 'IS', 'PLAYED'] +4970-29095-0004-2058: ref=['I', 'HEARD', 'FATHER', 'TELL', 'COUSIN', 'ABNER', 'THAT', 'HE', 'WAS', 'WHIPPED', 'SO', 'OFTEN', 'FOR', 'WHISTLING', 'WHEN', 'HE', 'WAS', 'A', 'BOY', 'THAT', 'HE', 'WAS', 'DETERMINED', 'TO', 'HAVE', 'WHAT', 'COMPENSATION', 'HE', 'COULD', 'GET', 'NOW'] +4970-29095-0004-2058: hyp=['I', 'HEARD', 'FATHER', 'TELL', 'COUSIN', 'ABNER', 'THAT', 'HE', 'WAS', 'WHIPPED', 'SO', 'OFTEN', 'FOR', 'WHISTLING', 'WHEN', 'HE', 'WAS', 'A', 'BOY', 'THAT', 'HE', 'WAS', 'DETERMINED', 'TO', 'HAVE', 'WHAT', 'COMPENSATION', 'HE', 'COULD', 'GET', 'NOW'] +4970-29095-0005-2059: ref=['THY', 'WAYS', 'GREATLY', 'TRY', 'ME', 'RUTH', 'AND', 'ALL', 'THY', 'RELATIONS'] +4970-29095-0005-2059: hyp=['THY', 'WAYS', 'GREATLY', 'TRY', 'ME', 'RUTH', 'AND', 'ALL', 'THY', 'RELATIONS'] +4970-29095-0006-2060: ref=['IS', 'THY', 'FATHER', 'WILLING', 'THEE', 'SHOULD', 'GO', 'AWAY', 'TO', 'A', 'SCHOOL', 'OF', 'THE', "WORLD'S", 'PEOPLE'] +4970-29095-0006-2060: hyp=['IS', 'THY', 'FATHER', 'WILLING', 'THEE', 'SHOULD', 'GO', 'AWAY', 'TO', 'A', 'SCHOOL', 'OF', 'THE', "WORLD'S", 'PEOPLE'] +4970-29095-0007-2061: ref=['I', 'HAVE', 'NOT', 'ASKED', 'HIM', 'RUTH', 'REPLIED', 'WITH', 'A', 'LOOK', 'THAT', 'MIGHT', 'IMPLY', 'THAT', 'SHE', 'WAS', 'ONE', 'OF', 'THOSE', 'DETERMINED', 'LITTLE', 'BODIES', 'WHO', 'FIRST', 'MADE', 'UP', 'HER', 'OWN', 'MIND', 'AND', 'THEN', 'COMPELLED', 'OTHERS', 'TO', 'MAKE', 'UP', 'THEIRS', 'IN', 'ACCORDANCE', 'WITH', 'HERS'] +4970-29095-0007-2061: hyp=['I', 'HAVE', 'NOT', 'ASKED', 'HIM', 'RUTH', 'REPLIED', 'WITH', 'A', 'LOOK', 'THAT', 'MIGHT', 'IMPLY', 'THAT', 'SHE', 'WAS', 'ONE', 'OF', 'THOSE', 'DETERMINED', 'LITTLE', 'BODIES', 'WHO', 'FIRST', 'MADE', 'UP', 'HER', 'OWN', 'MIND', 'AND', 'THEN', 'COMPELLED', 'OTHERS', 'TO', 'MAKE', 'UP', 'THEIRS', 'IN', 'ACCORDANCE', 'WITH', 'HERS'] +4970-29095-0008-2062: ref=['MOTHER', "I'M", 'GOING', 'TO', 'STUDY', 'MEDICINE'] +4970-29095-0008-2062: hyp=['MOTHER', 'I', 'AM', 'GOING', 'TO', 'STUDY', 'MEDICINE'] +4970-29095-0009-2063: ref=['MARGARET', 'BOLTON', 'ALMOST', 'LOST', 'FOR', 'A', 'MOMENT', 'HER', 'HABITUAL', 'PLACIDITY'] +4970-29095-0009-2063: hyp=['MARGARET', 'BOLTON', 'ALMOST', 'LOST', 'FOR', 'A', 'MOMENT', 'HER', 'HABITUAL', 'PLACIDITY'] +4970-29095-0010-2064: ref=['THEE', 'STUDY', 'MEDICINE'] +4970-29095-0010-2064: hyp=['THE', 'STUDY', 'MEDICINE'] +4970-29095-0011-2065: ref=['DOES', 'THEE', 'THINK', 'THEE', 'COULD', 'STAND', 'IT', 'SIX', 'MONTHS'] +4970-29095-0011-2065: hyp=['DOES', 'THEE', 'THINK', 'THEE', 'COULD', 'STAND', 'IT', 'SIX', 'MONTHS'] +4970-29095-0012-2066: ref=['AND', 'BESIDES', 'SUPPOSE', 'THEE', 'DOES', 'LEARN', 'MEDICINE'] +4970-29095-0012-2066: hyp=['AND', 'BESIDES', 'SUPPOSE', 'THEE', 'DOES', 'LEARN', 'MEDICINE'] +4970-29095-0013-2067: ref=['I', 'WILL', 'PRACTICE', 'IT'] +4970-29095-0013-2067: hyp=['I', 'WILL', 'PRACTISE', 'IT'] +4970-29095-0014-2068: ref=['WHERE', 'THEE', 'AND', 'THY', 'FAMILY', 'ARE', 'KNOWN'] +4970-29095-0014-2068: hyp=["WHERE'S", 'THEE', 'AND', 'THY', 'FAMILY', 'ARE', 'KNOWN'] +4970-29095-0015-2069: ref=['IF', 'I', 'CAN', 'GET', 'PATIENTS'] +4970-29095-0015-2069: hyp=['IF', 'I', 'CAN', 'GET', 'PATIENCE'] +4970-29095-0016-2070: ref=['RUTH', 'SAT', 'QUITE', 'STILL', 'FOR', 'A', 'TIME', 'WITH', 'FACE', 'INTENT', 'AND', 'FLUSHED', 'IT', 'WAS', 'OUT', 'NOW'] +4970-29095-0016-2070: hyp=['RUTH', 'SAT', 'QUITE', 'STILL', 'FOR', 'A', 'TIME', 'WITH', 'FACE', 'AND', 'TENT', 'AND', 'FLUSHED', 'IT', 'WAS', 'OUT', 'NOW'] +4970-29095-0017-2071: ref=['THE', 'SIGHT', 'SEERS', 'RETURNED', 'IN', 'HIGH', 'SPIRITS', 'FROM', 'THE', 'CITY'] +4970-29095-0017-2071: hyp=['THE', 'SIGHTSEERS', 'RETURNED', 'IN', 'HIGH', 'SPIRITS', 'FROM', 'THE', 'CITY'] +4970-29095-0018-2072: ref=['RUTH', 'ASKED', 'THE', 'ENTHUSIASTS', 'IF', 'THEY', 'WOULD', 'LIKE', 'TO', 'LIVE', 'IN', 'SUCH', 'A', 'SOUNDING', 'MAUSOLEUM', 'WITH', 'ITS', 'GREAT', 'HALLS', 'AND', 'ECHOING', 'ROOMS', 'AND', 'NO', 'COMFORTABLE', 'PLACE', 'IN', 'IT', 'FOR', 'THE', 'ACCOMMODATION', 'OF', 'ANY', 'BODY'] +4970-29095-0018-2072: hyp=['RUTH', 'ASKED', 'THE', 'ENTHUSIASTS', 'IF', 'THEY', 'WOULD', 'LIKE', 'TO', 'LIVE', 'IN', 'SUCH', 'A', 'SOUNDING', 'MUZZLEEM', 'WITH', 'ITS', 'GREAT', 'HALLS', 'AND', 'ECHOING', 'ROOMS', 'AND', 'NO', 'COMFORTABLE', 'PLACE', 'IN', 'IT', 'FOR', 'THE', 'ACCOMMODATION', 'OF', 'ANY', 'BODY'] +4970-29095-0019-2073: ref=['AND', 'THEN', 'THERE', 'WAS', 'BROAD', 'STREET'] +4970-29095-0019-2073: hyp=['AND', 'THEN', 'THERE', 'WAS', 'BROAD', 'STREET'] +4970-29095-0020-2074: ref=['THERE', 'CERTAINLY', 'WAS', 'NO', 'END', 'TO', 'IT', 'AND', 'EVEN', 'RUTH', 'WAS', 'PHILADELPHIAN', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'A', 'STREET', 'OUGHT', 'NOT', 'TO', 'HAVE', 'ANY', 'END', 'OR', 'ARCHITECTURAL', 'POINT', 'UPON', 'WHICH', 'THE', 'WEARY', 'EYE', 'COULD', 'REST'] +4970-29095-0020-2074: hyp=['THERE', 'ARE', 'CERTAINLY', 'WAS', 'NO', 'END', 'TO', 'IT', 'AND', 'EVEN', 'RUTH', 'WAS', 'PHILADELPHIA', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'A', 'STREET', 'OUGHT', 'NOT', 'TO', 'HAVE', 'ANY', 'END', 'OR', 'ARCHITECTURAL', 'BLINT', 'UPON', 'WHICH', 'THE', 'WEARY', 'EYE', 'COULD', 'REST'] +4970-29095-0021-2075: ref=['BUT', 'NEITHER', 'SAINT', 'GIRARD', 'NOR', 'BROAD', 'STREET', 'NEITHER', 'WONDERS', 'OF', 'THE', 'MINT', 'NOR', 'THE', 'GLORIES', 'OF', 'THE', 'HALL', 'WHERE', 'THE', 'GHOSTS', 'OF', 'OUR', 'FATHERS', 'SIT', 'ALWAYS', 'SIGNING', 'THE', 'DECLARATION', 'IMPRESSED', 'THE', 'VISITORS', 'SO', 'MUCH', 'AS', 'THE', 'SPLENDORS', 'OF', 'THE', 'CHESTNUT', 'STREET', 'WINDOWS', 'AND', 'THE', 'BARGAINS', 'ON', 'EIGHTH', 'STREET'] +4970-29095-0021-2075: hyp=['BUT', 'NEITHER', 'SAINT', 'GERARD', 'NOR', 'BROAD', 'STREET', 'NEITHER', 'WONDERS', 'OF', 'THE', 'MINT', 'NOR', 'THE', 'GLORIES', 'OF', 'THE', 'HALL', 'WHERE', 'THE', 'GHOSTS', 'OF', 'OUR', 'FATHERS', 'SIT', 'ALWAYS', 'SIGNING', 'THE', 'DECLARATION', 'IMPRESS', 'THE', 'VISITOR', 'SO', 'MUCH', 'AS', 'THE', 'SPLENDORS', 'OF', 'THE', 'CHESTNUT', 'STREET', 'WINDOWS', 'AND', 'THE', 'BARGAINS', 'ON', 'EIGHTH', 'STREET'] +4970-29095-0022-2076: ref=['IS', 'THEE', 'GOING', 'TO', 'THE', 'YEARLY', 'MEETING', 'RUTH', 'ASKED', 'ONE', 'OF', 'THE', 'GIRLS'] +4970-29095-0022-2076: hyp=['IS', 'THEE', 'GOING', 'TO', 'THE', 'YEARLY', 'MEETING', 'RUTH', 'ASKED', 'ONE', 'OF', 'THE', 'GIRLS'] +4970-29095-0023-2077: ref=['I', 'HAVE', 'NOTHING', 'TO', 'WEAR', 'REPLIED', 'THAT', 'DEMURE', 'PERSON'] +4970-29095-0023-2077: hyp=['I', 'HAVE', 'NOTHING', 'TO', 'WEAR', 'REPLIED', 'THE', 'DEMURE', 'PERSON'] +4970-29095-0024-2078: ref=['IT', 'HAS', 'OCCUPIED', 'MOTHER', 'A', 'LONG', 'TIME', 'TO', 'FIND', 'AT', 'THE', 'SHOPS', 'THE', 'EXACT', 'SHADE', 'FOR', 'HER', 'NEW', 'BONNET'] +4970-29095-0024-2078: hyp=['IT', 'HAS', 'OCCUPIED', 'MOTHER', 'A', 'LONG', 'TIME', 'TO', 'FIND', 'THE', 'SHOPS', 'THE', 'EXACT', 'SHADE', 'FOR', 'HER', 'NEW', 'BONNET'] +4970-29095-0025-2079: ref=['AND', 'THEE', "WON'T", 'GO', 'WHY', 'SHOULD', 'I'] +4970-29095-0025-2079: hyp=['AND', 'THEE', "WON'T", 'GO', 'WHY', 'SHOULD', 'I'] +4970-29095-0026-2080: ref=['IF', 'I', 'GO', 'TO', 'MEETING', 'AT', 'ALL', 'I', 'LIKE', 'BEST', 'TO', 'SIT', 'IN', 'THE', 'QUIET', 'OLD', 'HOUSE', 'IN', 'GERMANTOWN', 'WHERE', 'THE', 'WINDOWS', 'ARE', 'ALL', 'OPEN', 'AND', 'I', 'CAN', 'SEE', 'THE', 'TREES', 'AND', 'HEAR', 'THE', 'STIR', 'OF', 'THE', 'LEAVES'] +4970-29095-0026-2080: hyp=['IF', 'I', 'GO', 'TO', 'MEETING', 'AT', 'ALL', 'I', 'LIKE', 'BEST', 'TO', 'SIT', 'IN', 'THE', 'QUIET', 'OLD', 'HOUSE', 'IN', 'GERMANTOWN', 'WHERE', 'THE', 'WINDOWS', 'ARE', 'ALL', 'OPEN', 'AND', 'I', 'CAN', 'SEE', 'THE', 'TREES', 'AND', 'HERE', 'THE', 'STIR', 'OF', 'THE', 'LEAVES'] +4970-29095-0027-2081: ref=["IT'S", 'SUCH', 'A', 'CRUSH', 'AT', 'THE', 'YEARLY', 'MEETING', 'AT', 'ARCH', 'STREET', 'AND', 'THEN', "THERE'S", 'THE', 'ROW', 'OF', 'SLEEK', 'LOOKING', 'YOUNG', 'MEN', 'WHO', 'LINE', 'THE', 'CURBSTONE', 'AND', 'STARE', 'AT', 'US', 'AS', 'WE', 'COME', 'OUT'] +4970-29095-0027-2081: hyp=["IT'S", 'SUCH', 'A', 'CRUSH', 'AT', 'THE', 'YEARLY', 'MEETING', 'AT', 'ARCH', 'STREET', 'AND', 'THEN', "THERE'S", 'THE', 'ROW', 'OF', 'SLEEK', 'LOOKING', 'YOUNG', 'MEN', 'WHO', 'LIE', 'IN', 'THE', 'CURBSTONE', 'AND', 'STARE', 'AT', 'US', 'AS', 'WE', 'COME', 'OUT'] +4970-29095-0028-2082: ref=['HE', "DOESN'T", 'SAY', 'BUT', "IT'S", 'ON', 'THE', 'FRONTIER', 'AND', 'ON', 'THE', 'MAP', 'EVERYTHING', 'BEYOND', 'IT', 'IS', 'MARKED', 'INDIANS', 'AND', 'DESERT', 'AND', 'LOOKS', 'AS', 'DESOLATE', 'AS', 'A', 'WEDNESDAY', 'MEETING', 'HUMPH', 'IT', 'WAS', 'TIME', 'FOR', 'HIM', 'TO', 'DO', 'SOMETHING'] +4970-29095-0028-2082: hyp=['HE', "DOESN'T", 'SAY', 'BUT', "IT'S", 'ON', 'THE', 'FRONTIER', 'AND', 'ON', 'THE', 'MAP', 'EVERYTHING', 'BEYOND', 'IT', 'IS', 'MARKED', 'INDIANS', 'AND', 'DESERT', 'AND', 'LOOKS', 'AS', 'DESOLATE', 'AS', 'A', 'WIND', 'ZAY', 'MEETING', 'IT', 'WAS', 'TIME', 'FOR', 'HIM', 'TO', 'DO', 'SOMETHING'] +4970-29095-0029-2083: ref=['IS', 'HE', 'GOING', 'TO', 'START', 'A', 'DAILY', 'NEWSPAPER', 'AMONG', 'THE', 'KICK', 'A', 'POOS'] +4970-29095-0029-2083: hyp=['IS', 'HE', 'GOING', 'TO', 'START', 'A', 'DAILY', 'NEWSPAPER', 'AMONG', 'THE', 'KICKAPOOS'] +4970-29095-0030-2084: ref=['FATHER', "THEE'S", 'UNJUST', 'TO', 'PHILIP', "HE'S", 'GOING', 'INTO', 'BUSINESS'] +4970-29095-0030-2084: hyp=['FATHER', 'THESE', 'UNJUST', 'TO', 'PHILIP', "HE'S", 'GOING', 'INTO', 'BUSINESS'] +4970-29095-0031-2085: ref=['HE', "DOESN'T", 'SAY', 'EXACTLY', 'WHAT', 'IT', 'IS', 'SAID', 'RUTH', 'A', 'LITTLE', 'DUBIOUSLY', 'BUT', "IT'S", 'SOMETHING', 'ABOUT', 'LAND', 'AND', 'RAILROADS', 'AND', 'THEE', 'KNOWS', 'FATHER', 'THAT', 'FORTUNES', 'ARE', 'MADE', 'NOBODY', 'KNOWS', 'EXACTLY', 'HOW', 'IN', 'A', 'NEW', 'COUNTRY'] +4970-29095-0031-2085: hyp=['HE', "DOESN'T", 'SAY', 'EXACTLY', 'WHAT', 'IT', 'IS', 'SAID', 'RUTH', 'A', 'LITTLE', 'DUBIOUSLY', 'BUT', "IT'S", 'SOMETHING', 'ABOUT', 'LAND', 'AND', 'RAILROADS', 'AND', 'HE', 'KNOWS', 'FATHER', 'THAT', 'FORTUNES', 'ARE', 'MADE', 'NOBODY', 'KNOWS', 'EXACTLY', 'HOW', 'IN', 'A', 'NEW', 'COUNTRY'] +4970-29095-0032-2086: ref=['BUT', 'PHILIP', 'IS', 'HONEST', 'AND', 'HE', 'HAS', 'TALENT', 'ENOUGH', 'IF', 'HE', 'WILL', 'STOP', 'SCRIBBLING', 'TO', 'MAKE', 'HIS', 'WAY'] +4970-29095-0032-2086: hyp=['THAT', 'PHILIP', 'IS', 'HONEST', 'AND', 'HE', 'HAS', 'TALENT', 'ENOUGH', 'IF', 'HE', 'WILL', 'STOP', 'SCRIBBLING', 'TO', 'MAKE', 'HIS', 'WAY'] +4970-29095-0033-2087: ref=['WHAT', 'A', 'BOX', 'WOMEN', 'ARE', 'PUT', 'INTO', 'MEASURED', 'FOR', 'IT', 'AND', 'PUT', 'IN', 'YOUNG', 'IF', 'WE', 'GO', 'ANYWHERE', "IT'S", 'IN', 'A', 'BOX', 'VEILED', 'AND', 'PINIONED', 'AND', 'SHUT', 'IN', 'BY', 'DISABILITIES'] +4970-29095-0033-2087: hyp=['WHAT', 'A', 'BOXWOMEN', 'ARE', 'PUT', 'INTO', 'MEASURED', 'FOR', 'IT', 'AND', 'PUTTING', 'YOUNG', 'IF', 'WE', 'GO', 'ANYWHERE', "IT'S", 'IN', 'A', 'BOX', 'VEILED', 'AND', 'PINIONED', 'AND', 'SHUT', 'IN', 'BY', 'DISABILITIES'] +4970-29095-0034-2088: ref=['WHY', 'SHOULD', 'I', 'RUST', 'AND', 'BE', 'STUPID', 'AND', 'SIT', 'IN', 'INACTION', 'BECAUSE', 'I', 'AM', 'A', 'GIRL'] +4970-29095-0034-2088: hyp=['WHY', 'SHOULD', 'I', 'REST', 'AND', 'BE', 'STUPID', 'AND', 'SIT', 'IN', 'AN', 'ACTION', 'BECAUSE', 'I', 'AM', 'A', 'GIRL'] +4970-29095-0035-2089: ref=['AND', 'IF', 'I', 'HAD', 'A', 'FORTUNE', 'WOULD', 'THEE', 'WANT', 'ME', 'TO', 'LEAD', 'A', 'USELESS', 'LIFE'] +4970-29095-0035-2089: hyp=['AND', 'IF', 'I', 'HAD', 'A', 'FORTUNE', 'WOULD', 'THEE', 'WANT', 'ME', 'TO', 'LEAD', 'A', 'USELESS', 'LIFE'] +4970-29095-0036-2090: ref=['HAS', 'THEE', 'CONSULTED', 'THY', 'MOTHER', 'ABOUT', 'A', 'CAREER', 'I', 'SUPPOSE', 'IT', 'IS', 'A', 'CAREER', 'THEE', 'WANTS'] +4970-29095-0036-2090: hyp=['HAS', 'THE', 'CONSULTED', 'THY', 'MOTHER', 'ABOUT', 'A', 'CAREER', 'I', 'SUPPOSE', 'IT', 'IS', 'A', 'CAREER', 'OF', 'THEE', 'WANTS'] +4970-29095-0037-2091: ref=['BUT', 'THAT', 'WISE', 'AND', 'PLACID', 'WOMAN', 'UNDERSTOOD', 'THE', 'SWEET', 'REBEL', 'A', 'GREAT', 'DEAL', 'BETTER', 'THAN', 'RUTH', 'UNDERSTOOD', 'HERSELF'] +4970-29095-0037-2091: hyp=['BUT', 'THAT', 'WISE', 'AND', 'PLACID', 'WOMAN', 'UNDERSTOOD', 'THE', 'SWEET', 'REBEL', 'A', 'GREAT', 'DEAL', 'BETTER', 'THAN', 'RUTH', 'UNDERSTOOD', 'HERSELF'] +4970-29095-0038-2092: ref=['RUTH', 'WAS', 'GLAD', 'TO', 'HEAR', 'THAT', 'PHILIP', 'HAD', 'MADE', 'A', 'PUSH', 'INTO', 'THE', 'WORLD', 'AND', 'SHE', 'WAS', 'SURE', 'THAT', 'HIS', 'TALENT', 'AND', 'COURAGE', 'WOULD', 'MAKE', 'A', 'WAY', 'FOR', 'HIM'] +4970-29095-0038-2092: hyp=['RUTH', 'WAS', 'GLAD', 'TO', 'HEAR', 'THAT', 'PHILIP', 'HAD', 'MADE', 'A', 'PUSH', 'INTO', 'THE', 'WORLD', 'AND', 'SHE', 'WAS', 'SURE', 'THAT', 'HIS', 'TALENT', 'AND', 'COURAGE', 'WOULD', 'MAKE', 'AWAY', 'FOR', 'HIM'] +4992-23283-0000-2140: ref=['BUT', 'THE', 'MORE', 'FORGETFULNESS', 'HAD', 'THEN', 'PREVAILED', 'THE', 'MORE', 'POWERFUL', 'WAS', 'THE', 'FORCE', 'OF', 'REMEMBRANCE', 'WHEN', 'SHE', 'AWOKE'] +4992-23283-0000-2140: hyp=['BUT', 'THE', 'MORE', 'FORGETFULNESS', 'HAD', 'THEN', 'PREVAILED', 'THE', 'MORE', 'POWERFUL', 'WAS', 'THE', 'FORCE', 'OF', 'REMEMBRANCE', 'WHEN', 'SHE', 'AWOKE'] +4992-23283-0001-2141: ref=['MISS', "MILNER'S", 'HEALTH', 'IS', 'NOT', 'GOOD'] +4992-23283-0001-2141: hyp=['MISS', "MILNER'S", 'HEALTH', 'IS', 'NOT', 'GOOD'] +4992-23283-0002-2142: ref=['SAID', 'MISSUS', 'HORTON', 'A', 'FEW', 'MINUTES', 'AFTER'] +4992-23283-0002-2142: hyp=['SAID', 'MISSUS', 'WHARTON', 'A', 'FEW', 'MINUTES', 'AFTER'] +4992-23283-0003-2143: ref=['SO', 'THERE', 'IS', 'TO', 'ME', 'ADDED', 'SANDFORD', 'WITH', 'A', 'SARCASTIC', 'SNEER'] +4992-23283-0003-2143: hyp=['SO', 'THERE', 'IS', 'TO', 'ME', 'ADDED', 'SANDFORD', 'WITH', 'A', 'SARCASTIC', 'SNEER'] +4992-23283-0004-2144: ref=['AND', 'YET', 'YOU', 'MUST', 'OWN', 'HER', 'BEHAVIOUR', 'HAS', 'WARRANTED', 'THEM', 'HAS', 'IT', 'NOT', 'BEEN', 'IN', 'THIS', 'PARTICULAR', 'INCOHERENT', 'AND', 'UNACCOUNTABLE'] +4992-23283-0004-2144: hyp=['AND', 'YET', 'YOU', 'MUST', 'OWN', 'HER', 'BEHAVIOR', 'HAS', 'WARRANTED', 'THEM', 'HAS', 'IT', 'NOT', 'BEEN', 'IN', 'THIS', 'PARTICULAR', 'INCOHERENT', 'AND', 'UNACCOUNTABLE'] +4992-23283-0005-2145: ref=['NOT', 'THAT', 'I', 'KNOW', 'OF', 'NOT', 'ONE', 'MORE', 'THAT', 'I', 'KNOW', 'OF', 'HE', 'REPLIED', 'WITH', 'ASTONISHMENT', 'AT', 'WHAT', 'SHE', 'HAD', 'INSINUATED', 'AND', 'YET', 'WITH', 'A', 'PERFECT', 'ASSURANCE', 'THAT', 'SHE', 'WAS', 'IN', 'THE', 'WRONG'] +4992-23283-0005-2145: hyp=['NOT', 'THAT', 'I', 'KNOW', 'OF', 'NOT', 'ONE', 'MORE', 'THAT', 'I', 'KNOW', 'OF', 'HE', 'REPLIED', 'WITH', 'ASTONISHMENT', 'AT', 'WHAT', 'SHE', 'HAD', 'INSINUATED', 'AND', 'YET', 'WITH', 'A', 'PERFECT', 'ASSURANCE', 'THAT', 'SHE', 'WAS', 'IN', 'THE', 'WRONG'] +4992-23283-0006-2146: ref=['PERHAPS', 'I', 'AM', 'MISTAKEN', 'ANSWERED', 'SHE'] +4992-23283-0006-2146: hyp=['PERHAPS', 'I', 'AM', 'MISTAKEN', 'ANSWERED', 'SHE'] +4992-23283-0007-2147: ref=['TO', 'ASK', 'ANY', 'MORE', 'QUESTIONS', 'OF', 'YOU', 'I', 'BELIEVE', 'WOULD', 'BE', 'UNFAIR'] +4992-23283-0007-2147: hyp=['TO', 'ASK', 'ANY', 'MORE', 'QUESTIONS', 'OF', 'YOU', 'I', 'BELIEVE', 'WOULD', 'BE', 'UNFAIR'] +4992-23283-0008-2148: ref=['HE', 'SEEMED', 'TO', 'WAIT', 'FOR', 'HER', 'REPLY', 'BUT', 'AS', 'SHE', 'MADE', 'NONE', 'HE', 'PROCEEDED'] +4992-23283-0008-2148: hyp=['HE', 'SEEMED', 'TO', 'WAIT', 'FOR', 'HER', 'REPLY', 'BUT', 'AS', 'SHE', 'MADE', 'NONE', 'HE', 'PROCEEDED'] +4992-23283-0009-2149: ref=['OH', 'MY', 'LORD', 'CRIED', 'MISS', 'WOODLEY', 'WITH', 'A', 'MOST', 'FORCIBLE', 'ACCENT', 'YOU', 'ARE', 'THE', 'LAST', 'PERSON', 'ON', 'EARTH', 'SHE', 'WOULD', 'PARDON', 'ME', 'FOR', 'ENTRUSTING'] +4992-23283-0009-2149: hyp=['OH', 'MY', 'LORD', 'CRIED', 'MISS', 'WOODLEY', 'WITH', 'A', 'MOST', 'FORCIBLE', 'ACCENT', 'YOU', 'ARE', 'THE', 'LAST', 'PERSONAL', 'ON', 'EARTH', 'SHE', 'WOULD', 'PARDON', 'ME', 'FOR', 'INTRUSTING'] +4992-23283-0010-2150: ref=['BUT', 'IN', 'SUCH', 'A', 'CASE', 'MISS', "MILNER'S", 'ELECTION', 'OF', 'A', 'HUSBAND', 'SHALL', 'NOT', 'DIRECT', 'MINE'] +4992-23283-0010-2150: hyp=['BUT', 'IN', 'SUCH', 'A', 'CASE', 'MISS', "MILNER'S", 'ELECTION', 'OF', 'A', 'HUSBAND', 'SHALL', 'NOT', 'DIRECT', 'MINE'] +4992-23283-0011-2151: ref=['IF', 'SHE', 'DOES', 'NOT', 'KNOW', 'HOW', 'TO', 'ESTIMATE', 'HER', 'OWN', 'VALUE', 'I', 'DO'] +4992-23283-0011-2151: hyp=['IF', 'SHE', 'DOES', 'NOT', 'KNOW', 'HOW', 'TO', 'ESTIMATE', 'HER', 'OWN', 'VALUE', 'I', 'DO'] +4992-23283-0012-2152: ref=['INDEPENDENT', 'OF', 'HER', 'FORTUNE', 'SHE', 'HAS', 'BEAUTY', 'TO', 'CAPTIVATE', 'THE', 'HEART', 'OF', 'ANY', 'MAN', 'AND', 'WITH', 'ALL', 'HER', 'FOLLIES', 'SHE', 'HAS', 'A', 'FRANKNESS', 'IN', 'HER', 'MANNER', 'AN', 'UNAFFECTED', 'WISDOM', 'IN', 'HER', 'THOUGHTS', 'A', 'VIVACITY', 'IN', 'HER', 'CONVERSATION', 'AND', 'WITHAL', 'A', 'SOFTNESS', 'IN', 'HER', 'DEMEANOUR', 'THAT', 'MIGHT', 'ALONE', 'ENGAGE', 'THE', 'AFFECTIONS', 'OF', 'A', 'MAN', 'OF', 'THE', 'NICEST', 'SENTIMENTS', 'AND', 'THE', 'STRONGEST', 'UNDERSTANDING'] +4992-23283-0012-2152: hyp=['INDEPENDENT', 'OF', 'HER', 'FORTUNE', 'SHE', 'HAS', 'BEAUTY', 'TO', 'CAPTIVATE', 'THE', 'HEART', 'OF', 'ANY', 'MAN', 'AND', 'WITH', 'ALL', 'HER', 'FOLLIES', 'SHE', 'HAS', 'A', 'FRANKNESS', 'IN', 'HER', 'MANNER', 'AN', 'UNAFFECTED', 'WISDOM', 'IN', 'HER', 'THOUGHTS', 'OF', 'VIVACITY', 'IN', 'HER', 'CONVERSATION', 'AND', 'WITHAL', 'A', 'SOFTNESS', 'IN', 'HER', 'DEMEANOUR', 'THAT', 'MIGHT', 'ALONE', 'ENGAGE', 'THE', 'AFFECTIONS', 'OF', 'A', 'MAN', 'OF', 'THE', 'NICEST', 'SENTIMENTS', 'AND', 'THE', 'STRONGEST', 'UNDERSTANDING'] +4992-23283-0013-2153: ref=['MY', 'LORD', 'MISS', "MILNER'S", 'TASTE', 'IS', 'NOT', 'A', 'DEPRAVED', 'ONE', 'IT', 'IS', 'BUT', 'TOO', 'REFINED'] +4992-23283-0013-2153: hyp=['MY', 'LORD', 'MISS', "MILNER'S", 'TASTE', 'IS', 'NOT', 'A', 'DEPRAVED', 'ONE', 'IT', 'IS', 'BUT', 'TOO', 'REFINED'] +4992-23283-0014-2154: ref=['WHAT', 'CAN', 'YOU', 'MEAN', 'BY', 'THAT', 'MISS', 'WOODLEY', 'YOU', 'TALK', 'MYSTERIOUSLY'] +4992-23283-0014-2154: hyp=['WHAT', 'CAN', 'YOU', 'MEAN', 'BY', 'THAT', 'MISS', 'WOODLEY', 'YOU', 'TALK', 'MYSTERIOUSLY'] +4992-23283-0015-2155: ref=['IS', 'SHE', 'NOT', 'AFRAID', 'THAT', 'I', 'WILL', 'THWART', 'HER', 'INCLINATIONS'] +4992-23283-0015-2155: hyp=['IS', 'SHE', 'NOT', 'AFRAID', 'THAT', 'I', 'WILL', 'THWART', 'HER', 'INCLINATIONS'] +4992-23283-0016-2156: ref=['AGAIN', 'HE', 'SEARCHED', 'HIS', 'OWN', 'THOUGHTS', 'NOR', 'INEFFECTUALLY', 'AS', 'BEFORE'] +4992-23283-0016-2156: hyp=['AGAIN', 'HE', 'SEARCHED', 'HIS', 'OWN', 'THOUGHTS', 'NOR', 'INEFFECTUALLY', 'AS', 'BEFORE'] +4992-23283-0017-2157: ref=['MISS', 'WOODLEY', 'WAS', 'TOO', 'LITTLE', 'VERSED', 'IN', 'THE', 'SUBJECT', 'TO', 'KNOW', 'THIS', 'WOULD', 'HAVE', 'BEEN', 'NOT', 'TO', 'LOVE', 'AT', 'ALL', 'AT', 'LEAST', 'NOT', 'TO', 'THE', 'EXTENT', 'OF', 'BREAKING', 'THROUGH', 'ENGAGEMENTS', 'AND', 'ALL', 'THE', 'VARIOUS', 'OBSTACLES', 'THAT', 'STILL', 'MILITATED', 'AGAINST', 'THEIR', 'UNION'] +4992-23283-0017-2157: hyp=['MISS', 'WOODLEY', 'WAS', 'TOO', 'LITTLE', 'VERSED', 'IN', 'THE', 'SUBJECT', 'TO', 'KNOW', 'THIS', 'WOULD', 'HAVE', 'BEEN', 'NOT', 'TO', 'LOVE', 'AT', 'ALL', 'AT', 'LEAST', 'NOT', 'TO', 'THE', 'EXTENT', 'OF', 'BREAKING', 'THROUGH', 'ENGAGEMENTS', 'AND', 'ALL', 'THE', 'VARIOUS', 'OBSTACLES', 'THAT', 'STILL', 'MITIGATED', 'AGAINST', 'THEIR', 'UNION'] +4992-23283-0018-2158: ref=['TO', 'RELIEVE', 'HER', 'FROM', 'BOTH', 'HE', 'LAID', 'HIS', 'HAND', 'WITH', 'FORCE', 'UPON', 'HIS', 'HEART', 'AND', 'SAID', 'DO', 'YOU', 'BELIEVE', 'ME'] +4992-23283-0018-2158: hyp=['TO', 'RELIEVE', 'HER', 'FROM', 'BOTH', 'HE', 'LAID', 'HIS', 'HAND', 'WITH', 'FORCE', 'UPON', 'HIS', 'HEART', 'AND', 'SAID', 'DO', 'YOU', 'BELIEVE', 'ME'] +4992-23283-0019-2159: ref=['I', 'WILL', 'MAKE', 'NO', 'UNJUST', 'USE', 'OF', 'WHAT', 'I', 'KNOW', 'HE', 'REPLIED', 'WITH', 'FIRMNESS', 'I', 'BELIEVE', 'YOU', 'MY', 'LORD'] +4992-23283-0019-2159: hyp=['I', 'WILL', 'MAKE', 'NO', 'UNJUST', 'USE', 'OF', 'WHAT', 'I', 'KNOW', 'HE', 'REPLIED', 'WITH', 'FIRMNESS', 'I', 'BELIEVE', 'YOU', 'MY', 'LORD'] +4992-23283-0020-2160: ref=['I', 'HAVE', 'NEVER', 'YET', 'HOWEVER', 'BEEN', 'VANQUISHED', 'BY', 'THEM', 'AND', 'EVEN', 'UPON', 'THIS', 'OCCASION', 'MY', 'REASON', 'SHALL', 'COMBAT', 'THEM', 'TO', 'THE', 'LAST', 'AND', 'MY', 'REASON', 'SHALL', 'FAIL', 'ME', 'BEFORE', 'I', 'DO', 'WRONG'] +4992-23283-0020-2160: hyp=['I', 'HAVE', 'NEVER', 'YET', 'HOWEVER', 'BEEN', 'VANQUISHED', 'BY', 'THEM', 'AND', 'EVEN', 'UPON', 'THIS', 'OCCASION', 'MY', 'REASON', 'SHALL', 'COMBAT', 'THEM', 'TO', 'THE', 'LAST', 'AND', 'MY', 'REASON', 'SHALL', 'FAIL', 'ME', 'BEFORE', 'I', 'DO', 'WRONG'] +4992-41797-0000-2117: ref=['YES', 'DEAD', 'THESE', 'FOUR', 'YEARS', 'AN', 'A', 'GOOD', 'JOB', 'FOR', 'HER', 'TOO'] +4992-41797-0000-2117: hyp=['YES', 'DEAD', 'THESE', 'FOUR', 'YEARS', 'AND', 'A', 'GOOD', 'JOB', 'FOR', 'HER', 'TOO'] +4992-41797-0001-2118: ref=['WELL', 'AS', 'I', 'SAY', "IT'S", 'AN', 'AWFUL', 'QUEER', 'WORLD', 'THEY', 'CLAP', 'ALL', 'THE', 'BURGLARS', 'INTO', 'JAIL', 'AND', 'THE', 'MURDERERS', 'AND', 'THE', 'WIFE', 'BEATERS', "I'VE", 'ALLERS', 'THOUGHT', 'A', 'GENTLE', 'REPROOF', 'WOULD', 'BE', 'ENOUGH', 'PUNISHMENT', 'FOR', 'A', 'WIFE', 'BEATER', 'CAUSE', 'HE', 'PROBABLY', 'HAS', 'A', 'LOT', 'O', 'PROVOCATION', 'THAT', 'NOBODY', 'KNOWS', 'AND', 'THE', 'FIREBUGS', "CAN'T", 'THINK', 'O', 'THE', 'RIGHT', 'NAME', 'SOMETHING', 'LIKE', 'CENDENARIES', 'AN', 'THE', 'BREAKERS', 'O', 'THE', 'PEACE', 'AN', 'WHAT', 'NOT', 'AN', 'YET', 'THE', 'LAW', 'HAS', 'NOTHIN', 'TO', 'SAY', 'TO', 'A', 'MAN', 'LIKE', 'HEN', 'LORD'] +4992-41797-0001-2118: hyp=['WELL', 'AS', 'I', 'SAY', "IT'S", 'AN', 'AWFUL', 'QUEER', 'WORLD', 'THEY', 'CLAP', 'ALL', 'THE', 'BURGLARS', 'AND', 'JAIL', 'THE', 'MURDERERS', 'IN', 'THE', 'WHITE', 'BEATERS', 'I', 'ALLERS', 'THOUGHT', 'A', 'GENTLE', 'REPROOF', 'WOULD', 'BE', 'ENOUGH', 'PUNISHMENT', 'FOR', 'A', 'WIFE', 'PETER', 'CAUSE', 'HE', 'PROBABLY', 'HAS', 'A', 'LOT', 'OF', 'PROVOCATION', 'THAT', 'NOBODY', 'KNOWS', 'AND', 'THE', 'FIRE', 'BUGS', "CAN'T", 'THINK', 'OF', 'THE', 'RIGHT', 'NAME', 'SOMETHING', 'LIKE', 'SENDIARIES', 'AND', 'THE', 'BREAKERS', 'OF', 'THE', 'PEACE', 'AND', 'WHAT', 'NOT', 'AND', 'YET', 'THE', 'LAW', 'HAS', 'NOTHING', 'TO', 'SAY', 'TO', 'A', 'MAN', 'LIKE', 'HANDLED'] +4992-41797-0002-2119: ref=['GRANDFATHER', 'WAS', 'ALEXANDER', 'CAREY', 'L', 'L', 'D', 'DOCTOR', 'OF', 'LAWS', 'THAT', 'IS'] +4992-41797-0002-2119: hyp=['GRANDFATHER', 'WAS', 'ALEXANDER', 'CAREY', 'L', 'D', 'DOCTOR', 'OF', 'LAWS', 'THAT', 'IS'] +4992-41797-0003-2120: ref=['MISTER', 'POPHAM', 'LAID', 'DOWN', 'HIS', 'BRUSH'] +4992-41797-0003-2120: hyp=['MISTER', 'POPHAM', 'LAID', 'DOWN', 'HIS', 'BRUSH'] +4992-41797-0004-2121: ref=['I', 'SWAN', 'TO', 'MAN', 'HE', 'EJACULATED', 'IF', 'YOU', "DON'T", 'WORK', 'HARD', 'YOU', "CAN'T", 'KEEP', 'UP', 'WITH', 'THE', 'TIMES', 'DOCTOR', 'OF', 'LAWS'] +4992-41797-0004-2121: hyp=['I', 'SWAY', 'INTO', 'MEN', 'HE', 'EJACULATED', 'IF', 'YOU', "DON'T", 'WORK', 'HARD', 'YOU', "CAN'T", 'KEEP', 'UP', 'WITH', 'THE', 'TUBS', 'DOCTOR', 'OF', 'LAWS'] +4992-41797-0005-2122: ref=['DONE', 'HE', "AIN'T", 'DONE', 'A', 'THING', "HE'D", 'OUGHTER', 'SENCE', 'HE', 'WAS', 'BORN'] +4992-41797-0005-2122: hyp=['DONE', 'HE', "AIN'T", 'DONE', 'A', 'THING', 'HE', 'ORDERS', 'SINCE', 'HE', 'WAS', 'BORN'] +4992-41797-0006-2123: ref=['HE', 'KEEPS', 'THE', 'THOU', 'SHALT', 'NOT', 'COMMANDMENTS', 'FIRST', 'RATE', 'HEN', 'LORD', 'DOES'] +4992-41797-0006-2123: hyp=['HE', 'KEEPS', 'THE', 'THOU', 'SHALT', 'NOT', 'COMMAND', 'ITS', 'FIRST', 'RATE', 'HEN', 'LORD', 'DOES'] +4992-41797-0007-2124: ref=['HE', 'GIVE', 'UP', 'HIS', 'POSITION', 'AND', 'SHUT', 'THE', 'FAMILY', 'UP', 'IN', 'THAT', 'TOMB', 'OF', 'A', 'HOUSE', 'SO', 'T', 'HE', 'COULD', 'STUDY', 'HIS', 'BOOKS'] +4992-41797-0007-2124: hyp=['HE', 'GAVE', 'UP', 'HIS', 'POSITION', 'AND', 'SHUT', 'THE', 'FAMILY', 'UP', 'IN', 'THAT', 'TOMB', 'OF', 'A', 'HOUSE', 'SEWED', 'HE', "COULDN'T", 'STUDY', 'HIS', 'BOOKS'] +4992-41797-0008-2125: ref=['MISTER', 'POPHAM', 'EXAGGERATED', 'NOTHING', 'BUT', 'ON', 'THE', 'CONTRARY', 'LEFT', 'MUCH', 'UNSAID', 'IN', 'HIS', 'NARRATIVE', 'OF', 'THE', 'FAMILY', 'AT', 'THE', 'HOUSE', 'OF', 'LORDS'] +4992-41797-0008-2125: hyp=['MISTER', 'POPHAM', 'EXAGGERATED', 'NOTHING', 'BUT', 'ON', 'THE', 'CONTRARY', 'LEFT', 'MUCH', 'UNSAID', 'IN', 'HIS', 'NARRATIVE', 'OF', 'THE', 'FAMILY', 'AT', 'THE', 'HOUSE', 'OF', 'LORDS'] +4992-41797-0009-2126: ref=['HENRY', 'LORD', 'WITH', 'THE', 'DEGREE', 'OF', 'PH', 'D', 'TO', 'HIS', 'CREDIT', 'HAD', 'BEEN', 'PROFESSOR', 'OF', 'ZOOLOGY', 'AT', 'A', 'NEW', 'ENGLAND', 'COLLEGE', 'BUT', 'HAD', 'RESIGNED', 'HIS', 'POST', 'IN', 'ORDER', 'TO', 'WRITE', 'A', 'SERIES', 'OF', 'SCIENTIFIC', 'TEXT', 'BOOKS'] +4992-41797-0009-2126: hyp=['HENRY', 'LORD', 'WITH', 'THE', 'DEGREE', 'OF', 'P', 'D', 'TO', 'HIS', 'CREDIT', 'HAD', 'BEEN', 'PROFESSOR', 'OF', 'ZOOLOGY', 'AT', 'A', 'NEW', 'ENGLAND', 'COLLEGE', 'BUT', 'HAD', 'RESIGNED', 'HIS', 'POST', 'IN', 'ORDER', 'TO', 'WRITE', 'A', 'SERIES', 'OF', 'SCIENTIFIC', 'TEXT', 'BOOKS'] +4992-41797-0010-2127: ref=['ALWAYS', 'IRRITABLE', 'COLD', 'INDIFFERENT', 'HE', 'HAD', 'GROWN', 'RAPIDLY', 'MORE', 'SO', 'AS', 'YEARS', 'WENT', 'ON'] +4992-41797-0010-2127: hyp=['ALWAYS', 'IRRITABLE', 'COLD', 'INDIFFERENT', 'HE', 'HAD', 'GROWN', 'RAPIDLY', 'MORE', 'SO', 'AS', 'YEARS', 'WENT', 'ON'] +4992-41797-0011-2128: ref=['WHATEVER', 'APPEALED', 'TO', 'HER', 'SENSE', 'OF', 'BEAUTY', 'WAS', 'STRAIGHTWAY', 'TRANSFERRED', 'TO', 'PAPER', 'OR', 'CANVAS'] +4992-41797-0011-2128: hyp=['WHATEVER', 'APPEAL', 'TO', 'HER', 'SENSE', 'OF', 'BEAUTY', 'WAS', 'STRAIGHTWAY', 'TRANSFERRED', 'TO', 'PAPER', 'OR', 'CANVAS'] +4992-41797-0012-2129: ref=['SHE', 'IS', 'WILD', 'TO', 'KNOW', 'HOW', 'TO', 'DO', 'THINGS'] +4992-41797-0012-2129: hyp=['SHE', 'IS', 'WILD', 'TO', 'KNOW', 'HOW', 'TO', 'DO', 'THINGS'] +4992-41797-0013-2130: ref=['SHE', 'MAKES', 'EFFORT', 'AFTER', 'EFFORT', 'TREMBLING', 'WITH', 'EAGERNESS', 'AND', 'WHEN', 'SHE', 'FAILS', 'TO', 'REPRODUCE', 'WHAT', 'SHE', 'SEES', 'SHE', 'WORKS', 'HERSELF', 'INTO', 'A', 'FRENZY', 'OF', 'GRIEF', 'AND', 'DISAPPOINTMENT'] +4992-41797-0013-2130: hyp=['SHE', 'MAKES', 'EFFORT', 'AFTER', 'EFFORT', 'TREMBLING', 'WITH', 'EAGERNESS', 'AND', 'WHEN', 'SHE', 'FAILS', 'TO', 'REPRODUCE', 'WHAT', 'SHE', 'SEES', 'SHE', 'WORKS', 'HERSELF', 'INTO', 'A', 'FRENZY', 'OF', 'GRIEF', 'AND', 'DISAPPOINTMENT'] +4992-41797-0014-2131: ref=['WHEN', 'SHE', 'COULD', 'NOT', 'MAKE', 'A', 'RABBIT', 'OR', 'A', 'BIRD', 'LOOK', 'REAL', 'ON', 'PAPER', 'SHE', 'SEARCHED', 'IN', 'HER', "FATHER'S", 'BOOKS', 'FOR', 'PICTURES', 'OF', 'ITS', 'BONES'] +4992-41797-0014-2131: hyp=['WHEN', 'SHE', 'COULD', 'NOT', 'MAKE', 'A', 'RABBIT', 'OR', 'A', 'BIRD', 'LOOK', 'REAL', 'ON', 'PAPER', 'SHE', 'SEARCHED', 'IN', 'HER', "FATHER'S", 'BOOKS', 'FOR', 'PICTURES', 'OF', 'ITS', 'BONES'] +4992-41797-0015-2132: ref=['CYRIL', 'THERE', 'MUST', 'BE', 'SOME', 'BETTER', 'WAY', 'OF', 'DOING', 'I', 'JUST', 'DRAW', 'THE', 'OUTLINE', 'OF', 'AN', 'ANIMAL', 'AND', 'THEN', 'I', 'PUT', 'HAIRS', 'OR', 'FEATHERS', 'ON', 'IT', 'THEY', 'HAVE', 'NO', 'BODIES'] +4992-41797-0015-2132: hyp=['CYRIL', 'THERE', 'MUST', 'BE', 'SOME', 'BETTER', 'WAY', 'OF', 'DOING', 'I', 'JUST', 'DRAW', 'THE', 'OUTLINE', 'OF', 'AN', 'ANIMAL', 'AND', 'THEN', 'I', 'PUT', 'HAIRS', 'OR', 'FEATHERS', 'ON', 'IT', 'THEY', 'HAVE', 'NO', 'BODIES'] +4992-41797-0016-2133: ref=['THEY', "COULDN'T", 'RUN', 'NOR', 'MOVE', "THEY'RE", 'JUST', 'PASTEBOARD'] +4992-41797-0016-2133: hyp=['THEY', "COULDN'T", 'RUN', 'OR', 'MOVE', "THEY'RE", 'JUST', 'PASTEBOARD'] +4992-41797-0017-2134: ref=['HE', "WOULDN'T", 'SEARCH', 'SO', "DON'T", 'WORRY', 'REPLIED', 'CYRIL', 'QUIETLY', 'AND', 'THE', 'TWO', 'LOOKED', 'AT', 'EACH', 'OTHER', 'AND', 'KNEW', 'THAT', 'IT', 'WAS', 'SO'] +4992-41797-0017-2134: hyp=['HE', "WOULDN'T", 'SEARCH', 'SO', "DON'T", 'WORRY', 'REPLIED', 'CYRIL', 'QUIETLY', 'AND', 'THE', 'TWO', 'LOOKED', 'AT', 'EACH', 'OTHER', 'AND', 'KNEW', 'THAT', 'IT', 'WAS', 'SO'] +4992-41797-0018-2135: ref=['THERE', 'IN', 'THE', 'CEDAR', 'HOLLOW', 'THEN', 'LIVED', 'OLIVE', 'LORD', 'AN', 'ANGRY', 'RESENTFUL', 'LITTLE', 'CREATURE', 'WEIGHED', 'DOWN', 'BY', 'A', 'FIERCE', 'SENSE', 'OF', 'INJURY'] +4992-41797-0018-2135: hyp=['THERE', 'IN', 'THE', 'CEDAR', 'HOLLOW', 'THEN', 'LIVED', 'OLIVE', 'LORD', 'AN', 'ANGRY', 'RESENTFUL', 'LITTLE', 'CREATURE', 'WEIGHED', 'DOWN', 'BY', 'A', 'FIERCE', 'SENSE', 'OF', 'INJURY'] +4992-41797-0019-2136: ref=["OLIVE'S", 'MOURNFUL', 'BLACK', 'EYES', 'MET', "NANCY'S", 'SPARKLING', 'BROWN', 'ONES'] +4992-41797-0019-2136: hyp=['ALL', 'OF', 'HIS', 'MOURNFUL', 'BLACK', 'EYES', 'MET', "NANCY'S", 'SPARKLING', 'BROWN', 'ONES'] +4992-41797-0020-2137: ref=["NANCY'S", 'CURLY', 'CHESTNUT', 'CROP', 'SHONE', 'IN', 'THE', 'SUN', 'AND', "OLIVE'S", 'THICK', 'BLACK', 'PLAITS', 'LOOKED', 'BLACKER', 'BY', 'CONTRAST'] +4992-41797-0020-2137: hyp=["NANCY'S", 'CURLY', 'CHESTNUT', 'CROP', 'SHONE', 'IN', 'THE', 'SUN', 'AND', "OLIVE'S", 'THICK', 'BLACK', 'PLATES', 'LOOKED', 'BLACKER', 'BY', 'CONTRAST'] +4992-41797-0021-2138: ref=["SHE'S", 'WONDERFUL', 'MORE', 'WONDERFUL', 'THAN', 'ANYBODY', "WE'VE", 'EVER', 'SEEN', 'ANYWHERE', 'AND', 'SHE', 'DRAWS', 'BETTER', 'THAN', 'THE', 'TEACHER', 'IN', 'CHARLESTOWN'] +4992-41797-0021-2138: hyp=['SHE', 'IS', 'WONDERFUL', 'MORE', 'WONDERFUL', 'IN', 'ANYBODY', "WE'VE", 'EVER', 'SEEN', 'ANYWHERE', 'AND', 'SHE', 'DRAWS', 'BETTER', 'THAN', 'THE', 'TEACHER', 'IN', 'CHARLESTOWN'] +4992-41797-0022-2139: ref=["SHE'S", 'OLDER', 'THAN', 'I', 'AM', 'BUT', 'SO', 'TINY', 'AND', 'SAD', 'AND', 'SHY', 'THAT', 'SHE', 'SEEMS', 'LIKE', 'A', 'CHILD'] +4992-41797-0022-2139: hyp=["SHE'S", 'OLDER', 'THAN', 'I', 'AM', 'BUT', 'SO', 'TINY', 'AND', 'SAD', 'AND', 'SHY', 'THAT', 'SHE', 'SEEMS', 'LIKE', 'A', 'CHILD'] +4992-41806-0000-2161: ref=['NATTY', 'HARMON', 'TRIED', 'THE', 'KITCHEN', 'PUMP', 'SECRETLY', 'SEVERAL', 'TIMES', 'DURING', 'THE', 'EVENING', 'FOR', 'THE', 'WATER', 'HAD', 'TO', 'RUN', 'UP', 'HILL', 'ALL', 'THE', 'WAY', 'FROM', 'THE', 'WELL', 'TO', 'THE', 'KITCHEN', 'SINK', 'AND', 'HE', 'BELIEVED', 'THIS', 'TO', 'BE', 'A', 'CONTINUAL', 'MIRACLE', 'THAT', 'MIGHT', 'GIVE', 'OUT', 'AT', 'ANY', 'MOMENT'] +4992-41806-0000-2161: hyp=['NATTY', 'HARMON', 'TRIED', 'THE', 'KITCHEN', 'PUMP', 'SECRETLY', 'SEVERAL', 'TIMES', 'DURING', 'THE', 'EVENING', 'FOR', 'THE', 'WATER', 'HAD', 'TO', 'RUN', 'UP', 'HILL', 'ALL', 'THE', 'WAY', 'FROM', 'THE', 'WELL', 'TO', 'THE', 'KITCHEN', 'SINK', 'AND', 'HE', 'BELIEVED', 'THIS', 'TO', 'BE', 'A', 'CONTINUAL', 'MIRACLE', 'THAT', 'MIGHT', 'GIVE', 'OUT', 'AT', 'ANY', 'MOMENT'] +4992-41806-0001-2162: ref=['TO', 'NIGHT', 'THERE', 'WAS', 'NO', 'NEED', 'OF', 'EXTRA', 'HEAT', 'AND', 'THERE', 'WERE', 'GREAT', 'CEREMONIES', 'TO', 'BE', 'OBSERVED', 'IN', 'LIGHTING', 'THE', 'FIRES', 'ON', 'THE', 'HEARTHSTONES'] +4992-41806-0001-2162: hyp=['TO', 'NIGHT', 'THERE', 'WAS', 'NO', 'NEED', 'OF', 'EXTRA', 'HEAT', 'AND', 'THERE', 'WERE', 'GREAT', 'CEREMONIES', 'TO', 'BE', 'OBSERVED', 'IN', 'LIGHTING', 'THE', 'FIRES', 'ON', 'THE', 'HEARTHSTONES'] +4992-41806-0002-2163: ref=['THEY', 'BEGAN', 'WITH', 'THE', 'ONE', 'IN', 'THE', 'FAMILY', 'SITTING', 'ROOM', 'COLONEL', 'WHEELER', 'RALPH', 'THURSTON', 'MISTER', 'AND', 'MISSUS', 'BILL', 'HARMON', 'WITH', 'NATTY', 'AND', 'RUFUS', 'MISTER', 'AND', 'MISSUS', 'POPHAM', 'WITH', 'DIGBY', 'AND', 'LALLIE', 'JOY', 'ALL', 'STANDING', 'IN', 'ADMIRING', 'GROUPS', 'AND', 'THRILLING', 'WITH', 'DELIGHT', 'AT', 'THE', 'ORDER', 'OF', 'EVENTS'] +4992-41806-0002-2163: hyp=['THEY', 'BEGAN', 'WITH', 'THE', 'ONE', 'IN', 'THE', 'FAMILY', 'SITTING', 'ROOM', 'COLONEL', 'WHEELER', 'RALPH', 'THURSTON', 'MISTER', 'AND', 'MISSUS', 'BILL', 'HARMON', 'WITH', 'NANNIE', 'AND', 'RUFFUS', 'MISTER', 'AND', 'MISSUS', 'POPHAM', 'WITH', 'DIGBY', 'AND', 'LILY', 'JOY', 'ALL', 'STANDING', 'IN', 'ADMIRING', 'GROUPS', 'AND', 'THRILLING', 'WITH', 'DELIGHT', 'AT', 'THE', 'ORDER', 'OF', 'EVENTS'] +4992-41806-0003-2164: ref=['KATHLEEN', 'WAVED', 'THE', 'TORCH', 'TO', 'AND', 'FRO', 'AS', 'SHE', 'RECITED', 'SOME', 'BEAUTIFUL', 'LINES', 'WRITTEN', 'FOR', 'SOME', 'SUCH', 'PURPOSE', 'AS', 'THAT', 'WHICH', 'CALLED', 'THEM', 'TOGETHER', 'TO', 'NIGHT'] +4992-41806-0003-2164: hyp=['KATHLEEN', 'WAVED', 'THE', 'TORCH', 'TO', 'AND', 'FRO', 'AS', 'SHE', 'RECITED', 'SOME', 'BEAUTIFUL', 'LINES', 'WRITTEN', 'FOR', 'SOME', 'SUCH', 'PURPOSE', 'AS', 'THAT', 'WHICH', 'CALLED', 'THEM', 'TOGETHER', 'TO', 'NIGHT'] +4992-41806-0004-2165: ref=['BURN', 'FIRE', 'BURN', 'FLICKER', 'FLICKER', 'FLAME'] +4992-41806-0004-2165: hyp=['BURNE', 'FIRE', 'BURN', 'FLICKER', 'FLICKER', 'FLAME'] +4992-41806-0005-2166: ref=['NEXT', 'CAME', "OLIVE'S", 'TURN', 'TO', 'HELP', 'IN', 'THE', 'CEREMONIES'] +4992-41806-0005-2166: hyp=['NEXT', 'CAME', "OLIVE'S", 'TURN', 'TO', 'HELP', 'IN', 'THE', 'CEREMONIES'] +4992-41806-0006-2167: ref=['RALPH', 'THURSTON', 'HAD', 'FOUND', 'A', 'LINE', 'OF', 'LATIN', 'FOR', 'THEM', 'IN', 'HIS', 'BELOVED', 'HORACE', 'TIBI', 'SPLENDET', 'FOCUS', 'FOR', 'YOU', 'THE', 'HEARTH', 'FIRE', 'SHINES'] +4992-41806-0006-2167: hyp=['RALPH', 'THURSTON', 'HAD', 'FOUND', 'A', 'LINE', 'OF', 'LATIN', 'FOR', 'THEM', 'IN', 'HIS', 'BELOVED', 'HORNS', 'TIBBY', 'SPLENDID', 'FOCUS', 'FOR', 'YOU', 'THE', 'HEARTH', 'FIRE', 'SHINES'] +4992-41806-0007-2168: ref=['OLIVE', 'HAD', 'PAINTED', 'THE', 'MOTTO', 'ON', 'A', 'LONG', 'NARROW', 'PANEL', 'OF', 'CANVAS', 'AND', 'GIVING', 'IT', 'TO', 'MISTER', 'POPHAM', 'STOOD', 'BY', 'THE', 'FIRESIDE', 'WHILE', 'HE', 'DEFTLY', 'FITTED', 'IT', 'INTO', 'THE', 'PLACE', 'PREPARED', 'FOR', 'IT'] +4992-41806-0007-2168: hyp=['OLIVE', 'HAD', 'PAINTED', 'THE', 'MOTTO', 'ON', 'A', 'LONG', 'NARROW', 'PANEL', 'OF', 'CANVAS', 'AND', 'GIVING', 'IT', 'TO', 'MISTER', 'POPHAM', 'STOOD', 'BY', 'THE', 'FIRESIDE', 'WHILE', 'HE', 'DEFTLY', 'FITTED', 'IT', 'INTO', 'THE', 'PLACE', 'PREPARED', 'FOR', 'IT'] +4992-41806-0008-2169: ref=['OLIVE', 'HAS', 'ANOTHER', 'LOVELY', 'GIFT', 'FOR', 'THE', 'YELLOW', 'HOUSE', 'SAID', 'MOTHER', 'CAREY', 'RISING', 'AND', 'TO', 'CARRY', 'OUT', 'THE', 'NEXT', 'PART', 'OF', 'THE', 'PROGRAMME', 'WE', 'SHALL', 'HAVE', 'TO', 'GO', 'IN', 'PROCESSION', 'UPSTAIRS', 'TO', 'MY', 'BEDROOM'] +4992-41806-0008-2169: hyp=['ALAP', 'HAS', 'ANOTHER', 'LOVELY', 'GIFT', 'FOR', 'THE', 'YELLOW', 'HOUSE', 'SAID', 'MOTHER', 'CAREY', 'RISING', 'AND', 'TO', 'CARRY', 'OUT', 'THE', 'NEXT', 'PART', 'OF', 'THE', 'PROGRAMME', 'WE', 'SHALL', 'HAVE', 'TO', 'GO', 'IN', 'PROCESSION', 'UPSTAIRS', 'TO', 'MY', 'BEDROOM'] +4992-41806-0009-2170: ref=['EXCLAIMED', 'BILL', 'HARMON', 'TO', 'HIS', 'WIFE', 'AS', 'THEY', 'WENT', 'THROUGH', 'THE', 'LIGHTED', 'HALL'] +4992-41806-0009-2170: hyp=['EXCLAIMED', 'BILL', 'HARMON', 'TO', 'HIS', 'WIFE', 'AS', 'THEY', 'WENT', 'THROUGH', 'THE', 'LIGHTED', 'HALL'] +4992-41806-0010-2171: ref=["AIN'T", 'THEY', 'THE', 'GREATEST'] +4992-41806-0010-2171: hyp=["AIN'T", 'THEY', 'THE', 'GREATEST'] +4992-41806-0011-2172: ref=['MOTHER', 'CAREY', 'POURED', 'COFFEE', 'NANCY', 'CHOCOLATE', 'AND', 'THE', 'OTHERS', 'HELPED', 'SERVE', 'THE', 'SANDWICHES', 'AND', 'CAKE', 'DOUGHNUTS', 'AND', 'TARTS'] +4992-41806-0011-2172: hyp=['MOTHER', 'CAREY', 'POURED', 'COFFEE', 'NANCY', 'CHOCOLATE', 'AND', 'THE', 'OTHER', 'SELF', 'SERVED', 'THE', 'SANDWICHES', 'AND', 'CAKE', 'DOUGHNUTS', 'AND', 'TARTS'] +4992-41806-0012-2173: ref=['AT', 'THAT', 'MOMENT', 'THE', 'GENTLEMAN', 'ENTERED', 'BEARING', 'A', 'HUGE', 'OBJECT', 'CONCEALED', 'BY', 'A', 'PIECE', 'OF', 'GREEN', 'FELT'] +4992-41806-0012-2173: hyp=['AT', 'THAT', 'MOMENT', 'THE', 'GENTLEMAN', 'ENTERED', 'BEARING', 'A', 'HUGE', 'OBJECT', 'CONCEALED', 'BY', 'A', 'PIECE', 'OF', 'GREEN', 'FILT'] +4992-41806-0013-2174: ref=['APPROACHING', 'THE', 'DINING', 'TABLE', 'HE', 'CAREFULLY', 'PLACED', 'THE', 'ARTICLE', 'IN', 'THE', 'CENTRE', 'AND', 'REMOVED', 'THE', 'CLOTH'] +4992-41806-0013-2174: hyp=['APPROACHING', 'THE', 'DINING', 'TABLE', 'HE', 'CAREFULLY', 'PLACED', 'THE', 'ARTICLE', 'IN', 'THE', 'CENTRE', 'AND', 'REMOVED', 'THE', 'CLOTH'] +4992-41806-0014-2175: ref=['THINKS', 'I', 'TO', 'MYSELF', 'I', 'NEVER', 'SEEN', 'ANYTHING', 'OSH', 'POPHAM', "COULDN'T", 'MEND', 'IF', 'HE', 'TOOK', 'TIME', 'ENOUGH', 'AND', 'GLUE', 'ENOUGH', 'SO', 'I', 'CARRIED', 'THIS', 'LITTLE', 'FELLER', 'HOME', 'IN', 'A', 'BUSHEL', 'BASKET', 'ONE', 'NIGHT', 'LAST', 'MONTH', 'AN', "I'VE", 'SPENT', 'ELEVEN', "EVENIN'S", 'PUTTIN', 'HIM', 'TOGETHER'] +4992-41806-0014-2175: hyp=['THINKS', 'OUT', 'OF', 'MYSELF', 'I', 'NEVER', 'SEEN', 'ANYTHING', 'I', 'PAPA', "GOOD'N", 'MEND', 'IF', 'HE', 'TOOK', 'TIME', 'ENOUGH', 'AND', 'GLUE', 'ENOUGH', 'SO', 'I', 'CARRIED', 'THIS', 'LITTLE', 'FELLER', 'HOME', 'IN', 'A', 'BUSHEL', 'BASKET', 'ONE', 'NIGHT', 'LAST', 'MONTH', 'AND', "I'VE", 'SPENT', 'ELEVEN', 'EVENINGS', 'PUTTING', 'HIM', 'TOGETHER'] +4992-41806-0015-2176: ref=['MISSUS', 'HARMON', 'THOUGHT', 'HE', 'SANG', 'TOO', 'MUCH', 'AND', 'TOLD', 'HER', 'HUSBAND', 'PRIVATELY', 'THAT', 'IF', 'HE', 'WAS', 'A', 'CANARY', 'BIRD', 'SHE', 'SHOULD', 'WANT', 'TO', 'KEEP', 'A', 'TABLE', 'COVER', 'OVER', 'HIS', 'HEAD', 'MOST', 'OF', 'THE', 'TIME', 'BUT', 'HE', 'WAS', 'IMMENSELY', 'POPULAR', 'WITH', 'THE', 'REST', 'OF', 'HIS', 'AUDIENCE'] +4992-41806-0015-2176: hyp=['MISSUS', 'HARMON', 'THOUGHT', 'HE', 'SANG', 'TOO', 'MUCH', 'AND', 'TOLD', 'HER', 'HUSBA', 'PRIVATELY', 'THAT', 'IF', 'HE', 'WAS', 'A', 'CANARY', 'BIRD', 'SHE', 'SHOULD', 'WANT', 'TO', 'KEEP', 'A', 'TABLE', 'COVER', 'OF', 'HIS', 'EDMOST', 'TO', 'THE', 'TIME', 'BUT', 'HE', 'WAS', 'IMMENSELY', 'POPULAR', 'WITH', 'THE', 'REST', 'OF', 'HIS', 'AUDIENCE'] +4992-41806-0016-2177: ref=['THE', 'FACE', 'OF', 'THE', 'MAHOGANY', 'SHONE', 'WITH', 'DELIGHT', 'AND', 'WHY', 'NOT', 'WHEN', 'IT', 'WAS', 'DOING', 'EVERYTHING', 'ALMOST', 'EVERYTHING', 'WITHIN', 'THE', 'SCOPE', 'OF', 'A', 'PIANO', 'AND', 'YET', 'THE', 'FAMILY', 'HAD', 'ENJOYED', 'WEEKS', 'OF', 'GOOD', 'NOURISHING', 'MEALS', 'ON', 'WHAT', 'HAD', 'BEEN', 'SAVED', 'BY', 'ITS', 'EXERTIONS'] +4992-41806-0016-2177: hyp=['THE', 'FACE', 'OF', 'THE', 'MAHOGANY', 'SHONE', 'WITH', 'DELIGHT', 'AND', 'WHY', 'NOT', 'WHEN', 'IT', 'WAS', 'DOING', 'EVERYTHING', 'ALMOST', 'EVERYTHING', 'WITHIN', 'THE', 'SCOPE', 'OF', 'A', 'PIANO', 'AND', 'YET', 'THE', 'FAMILY', 'HAD', 'ENJOYED', 'WEEKS', 'OF', 'GOOD', 'NOURISHING', 'MEALS', 'ON', 'WHAT', 'HAD', 'BEEN', 'SAVED', 'BY', 'ITS', 'EXERTIONS'] +4992-41806-0017-2178: ref=['WE', 'SHUT', 'OUR', 'EYES', 'THE', 'FLOWERS', 'BLOOM', 'ON', 'WE', 'MURMUR', 'BUT', 'THE', 'CORN', 'EARS', 'FILL', 'WE', 'CHOOSE', 'THE', 'SHADOW', 'BUT', 'THE', 'SUN', 'THAT', 'CASTS', 'IT', 'SHINES', 'BEHIND', 'US', 'STILL'] +4992-41806-0017-2178: hyp=['WE', 'SHUT', 'OUR', 'EYES', 'THE', 'FLOWERS', 'BLOOM', 'ON', 'WE', 'MURMUR', 'BUT', 'THE', 'CORNIERS', 'FILL', 'WE', 'CHOOSE', 'THE', 'SHADOW', 'BUT', 'THE', 'SUN', 'THAT', 'CASTS', 'IT', 'SHINES', 'BEHIND', 'US', 'STILL'] +5105-28233-0000-1649: ref=['LENGTH', 'OF', 'SERVICE', 'FOURTEEN', 'YEARS', 'THREE', 'MONTHS', 'AND', 'FIVE', 'DAYS'] +5105-28233-0000-1649: hyp=['LENGTH', 'OF', 'SERVICE', 'FOURTEEN', 'YEARS', 'THREE', 'MONTHS', 'AND', 'FIVE', 'DAYS'] +5105-28233-0001-1650: ref=['HE', 'SEEMED', 'BORN', 'TO', 'PLEASE', 'WITHOUT', 'BEING', 'CONSCIOUS', 'OF', 'THE', 'POWER', 'HE', 'POSSESSED'] +5105-28233-0001-1650: hyp=['HE', 'SEEMED', 'BORN', 'TO', 'PLEASE', 'WITHOUT', 'BEING', 'CONSCIOUS', 'OF', 'THE', 'POWER', 'HE', 'POSSESSED'] +5105-28233-0002-1651: ref=['IT', 'MUST', 'BE', 'OWNED', 'AND', 'NO', 'ONE', 'WAS', 'MORE', 'READY', 'TO', 'CONFESS', 'IT', 'THAN', 'HIMSELF', 'THAT', 'HIS', 'LITERARY', 'ATTAINMENTS', 'WERE', 'BY', 'NO', 'MEANS', 'OF', 'A', 'HIGH', 'ORDER'] +5105-28233-0002-1651: hyp=['IT', 'MUST', 'BE', 'OWNED', 'AND', 'NO', 'ONE', 'WAS', 'MORE', 'READY', 'TO', 'CONFESS', 'IT', 'THAN', 'HIMSELF', 'THAT', 'HIS', 'LITERARY', 'ATTAINMENTS', 'WERE', 'BY', 'NO', 'MEANS', 'OF', 'A', 'HIGH', 'ORDER'] +5105-28233-0003-1652: ref=['WE', "DON'T", 'SPIN', 'TOPS', 'IS', 'A', 'FAVORITE', 'SAYING', 'AMONGST', 'ARTILLERY', 'OFFICERS', 'INDICATING', 'THAT', 'THEY', 'DO', 'NOT', 'SHIRK', 'THEIR', 'DUTY', 'BY', 'FRIVOLOUS', 'PURSUITS', 'BUT', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'SERVADAC', 'BEING', 'NATURALLY', 'IDLE', 'WAS', 'VERY', 'MUCH', 'GIVEN', 'TO', 'SPINNING', 'TOPS'] +5105-28233-0003-1652: hyp=['WE', "DON'T", 'SPEND', 'TOPS', 'AS', 'A', 'FAVORITE', 'SAYING', 'AMONGST', 'ARTILLERY', 'OFFICERS', 'INDICATING', 'THAT', 'THEY', 'DO', 'NOT', 'SHIRK', 'THEIR', 'DUTY', 'BY', 'FRIVOLOUS', 'PURSUITS', 'BUT', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'SERVADAC', 'BEING', 'NATURALLY', 'IDLE', 'WAS', 'VERY', 'MUCH', 'GIVEN', 'TO', 'SPINNING', 'TOPS'] +5105-28233-0004-1653: ref=['ONCE', 'IN', 'ACTION', 'HE', 'WAS', 'LEADING', 'A', 'DETACHMENT', 'OF', 'INFANTRY', 'THROUGH', 'AN', 'INTRENCHMENT'] +5105-28233-0004-1653: hyp=['ONCE', 'AN', 'ACTION', 'HE', 'WAS', 'LEADING', 'A', 'DETACHMENT', 'OF', 'INFANTRY', 'THROUGH', 'AN', 'ENTRENCHMENT'] +5105-28233-0005-1654: ref=['SOMETIMES', 'HE', 'WOULD', 'WANDER', 'ON', 'FOOT', 'UPON', 'THE', 'SANDY', 'SHORE', 'AND', 'SOMETIMES', 'HE', 'WOULD', 'ENJOY', 'A', 'RIDE', 'ALONG', 'THE', 'SUMMIT', 'OF', 'THE', 'CLIFF', 'ALTOGETHER', 'BEING', 'IN', 'NO', 'HURRY', 'AT', 'ALL', 'TO', 'BRING', 'HIS', 'TASK', 'TO', 'AN', 'END'] +5105-28233-0005-1654: hyp=['SOMETIMES', 'HE', 'WOULD', 'WANDER', 'ON', 'FOOT', 'UPON', 'THE', 'SANDY', 'SHORE', 'AND', 'SOMETIMES', 'HE', 'WOULD', 'ENJOY', 'A', 'RIDE', 'ALONG', 'THE', 'SUMMIT', 'OF', 'THE', 'CLIFF', 'ALTOGETHER', 'BEING', 'IN', 'NO', 'HURRY', 'AT', 'ALL', 'TO', 'BRING', 'HIS', 'TASK', 'TO', 'AN', 'END'] +5105-28233-0006-1655: ref=['NO', 'CATHEDRAL', 'NOT', 'EVEN', 'BURGOS', 'ITSELF', 'COULD', 'VIE', 'WITH', 'THE', 'CHURCH', 'AT', 'MONTMARTRE'] +5105-28233-0006-1655: hyp=['NO', 'CATHEDRAL', 'NOT', 'EVEN', 'BURGOS', 'ITSELF', 'COULD', 'VIE', 'WITH', 'THE', 'CHURCH', 'AT', 'MOUNT', 'MARSHRA'] +5105-28233-0007-1656: ref=['BEN', "ZOOF'S", 'MOST', 'AMBITIOUS', 'DESIRE', 'WAS', 'TO', 'INDUCE', 'THE', 'CAPTAIN', 'TO', 'GO', 'WITH', 'HIM', 'AND', 'END', 'HIS', 'DAYS', 'IN', 'HIS', 'MUCH', 'LOVED', 'HOME', 'AND', 'SO', 'INCESSANTLY', 'WERE', "SERVADAC'S", 'EARS', 'BESIEGED', 'WITH', 'DESCRIPTIONS', 'OF', 'THE', 'UNPARALLELED', 'BEAUTIES', 'AND', 'ADVANTAGES', 'OF', 'THIS', 'EIGHTEENTH', 'ARRONDISSEMENT', 'OF', 'PARIS', 'THAT', 'HE', 'COULD', 'SCARCELY', 'HEAR', 'THE', 'NAME', 'OF', 'MONTMARTRE', 'WITHOUT', 'A', 'CONSCIOUS', 'THRILL', 'OF', 'AVERSION'] +5105-28233-0007-1656: hyp=['BEN', "ZOOF'S", 'MOST', 'AMBITIOUS', 'DESIRE', 'WAS', 'TO', 'INDUCE', 'THE', 'CAPTAIN', 'TO', 'GO', 'WITH', 'HIM', 'AND', 'END', 'HIS', 'DAYS', 'IN', 'HIS', 'MUCH', 'LOVED', 'HOME', 'AND', 'SO', 'INCESSANTLY', 'WERE', "SERVADAC'S", 'EARS', 'BESIEGED', 'WITH', 'DESCRIPTIONS', 'OF', 'THE', 'UNPARALLELED', 'BEAUTIES', 'AND', 'ADVANTAGES', 'OF', 'THIS', 'EIGHTEENTH', 'ARE', 'UNDISSIMA', 'OF', 'PARIS', 'THAT', 'HE', 'COULD', 'SCARCELY', 'HEAR', 'THE', 'NAME', 'OF', 'MONTMARTRA', 'WITHOUT', 'A', 'CONSCIOUS', 'THRILL', 'OF', 'AVERSION'] +5105-28233-0008-1657: ref=['WHEN', 'A', 'PRIVATE', 'IN', 'THE', 'EIGHTH', 'CAVALRY', 'HE', 'HAD', 'BEEN', 'ON', 'THE', 'POINT', 'OF', 'QUITTING', 'THE', 'ARMY', 'AT', 'TWENTY', 'EIGHT', 'YEARS', 'OF', 'AGE', 'BUT', 'UNEXPECTEDLY', 'HE', 'HAD', 'BEEN', 'APPOINTED', 'ORDERLY', 'TO', 'CAPTAIN', 'SERVADAC'] +5105-28233-0008-1657: hyp=['WHEN', 'A', 'PRIVATE', 'IN', 'THE', 'EIGHTH', 'CAVALRY', 'HE', 'HAD', 'BEEN', 'ON', 'THE', 'POINT', 'OF', 'QUITTING', 'THE', 'ARMY', 'AT', 'TWENTY', 'EIGHT', 'YEARS', 'OF', 'AGE', 'BUT', 'UNEXPECTEDLY', 'HE', 'HAD', 'BEEN', 'APPOINTED', 'ORDERLY', 'TO', 'CAPTAIN', 'SERVADAC'] +5105-28233-0009-1658: ref=['THE', 'BOND', 'OF', 'UNION', 'THUS', 'EFFECTED', 'COULD', 'NEVER', 'BE', 'SEVERED', 'AND', 'ALTHOUGH', 'BEN', "ZOOF'S", 'ACHIEVEMENTS', 'HAD', 'FAIRLY', 'EARNED', 'HIM', 'THE', 'RIGHT', 'OF', 'RETIREMENT', 'HE', 'FIRMLY', 'DECLINED', 'ALL', 'HONORS', 'OR', 'ANY', 'PENSION', 'THAT', 'MIGHT', 'PART', 'HIM', 'FROM', 'HIS', 'SUPERIOR', 'OFFICER'] +5105-28233-0009-1658: hyp=['THE', 'BOND', 'OF', 'UNION', 'THUS', 'EFFECTED', 'COULD', 'NEVER', 'BE', 'SEVERED', 'AND', 'ALTHOUGH', 'BEN', "ZEF'S", 'ACHIEVEMENTS', 'HAD', 'FAIRLY', 'EARNED', 'HIM', 'THE', 'RIGHT', 'OF', 'RETIREMENT', 'HE', 'FIRMLY', 'DECLINED', 'ALL', 'HONOURS', 'OR', 'ANY', 'PENSION', 'THAT', 'MIGHT', 'PART', 'HIM', 'FROM', 'HIS', 'SUPERIOR', 'OFFICER'] +5105-28233-0010-1659: ref=['UNLIKE', 'HIS', 'MASTER', 'HE', 'MADE', 'NO', 'PRETENSION', 'TO', 'ANY', 'GIFT', 'OF', 'POETIC', 'POWER', 'BUT', 'HIS', 'INEXHAUSTIBLE', 'MEMORY', 'MADE', 'HIM', 'A', 'LIVING', 'ENCYCLOPAEDIA', 'AND', 'FOR', 'HIS', 'STOCK', 'OF', 'ANECDOTES', 'AND', "TROOPER'S", 'TALES', 'HE', 'WAS', 'MATCHLESS'] +5105-28233-0010-1659: hyp=['I', 'MAKE', 'HIS', 'MASTER', 'HE', 'MADE', 'NO', 'PRETENSION', 'TO', 'ANY', 'GIFT', 'OF', 'POETIC', 'POWER', 'BUT', 'HIS', 'INEXHAUSTIBLE', 'MEMORY', 'MADE', 'HIM', 'A', 'LIVING', 'ENCYCLOPAEDIA', 'AND', 'FOR', 'HIS', 'STOCK', 'OF', 'ANECDOTES', 'AND', "TROOPER'S", 'TALES', 'HE', 'WAS', 'MATCHLESS'] +5105-28240-0000-1624: ref=['FAST', 'AS', 'HIS', 'LEGS', 'COULD', 'CARRY', 'HIM', 'SERVADAC', 'HAD', 'MADE', 'HIS', 'WAY', 'TO', 'THE', 'TOP', 'OF', 'THE', 'CLIFF'] +5105-28240-0000-1624: hyp=['FAST', 'AS', 'HIS', 'LEGS', 'COULD', 'CARRY', 'HIM', 'SERVADAC', 'HAD', 'MADE', 'HIS', 'WAY', 'TO', 'THE', 'TOP', 'OF', 'THE', 'CLIFF'] +5105-28240-0001-1625: ref=['IT', 'WAS', 'QUITE', 'TRUE', 'THAT', 'A', 'VESSEL', 'WAS', 'IN', 'SIGHT', 'HARDLY', 'MORE', 'THAN', 'SIX', 'MILES', 'FROM', 'THE', 'SHORE', 'BUT', 'OWING', 'TO', 'THE', 'INCREASE', 'IN', 'THE', "EARTH'S", 'CONVEXITY', 'AND', 'THE', 'CONSEQUENT', 'LIMITATION', 'OF', 'THE', 'RANGE', 'OF', 'VISION', 'THE', 'RIGGING', 'OF', 'THE', 'TOPMASTS', 'ALONE', 'WAS', 'VISIBLE', 'ABOVE', 'THE', 'WATER'] +5105-28240-0001-1625: hyp=['IT', 'WAS', 'QUITE', 'TRUE', 'THAT', 'A', 'VESSEL', 'WAS', 'IN', 'SIGHT', 'HARDLY', 'MORE', 'THAN', 'SIX', 'MILES', 'FROM', 'THE', 'SHORE', 'BUT', 'OWING', 'TO', 'THE', 'INCREASE', 'IN', 'THE', "EARTH'S", 'CONVEXITY', 'AND', 'THE', 'CONSEQUENT', 'LIMITATION', 'OF', 'THE', 'RANGE', 'OF', 'VISION', 'THE', 'RIGGING', 'OF', 'THE', 'TOPMASTS', 'ALONE', 'WAS', 'VISIBLE', 'ABOVE', 'THE', 'WATER'] +5105-28240-0002-1626: ref=['EXCLAIMED', 'SERVADAC', 'KEEPING', 'HIS', 'EYE', 'UNMOVED', 'AT', 'HIS', 'TELESCOPE'] +5105-28240-0002-1626: hyp=['EXCLAIMED', 'SERVADAC', 'KEEPING', 'HIS', 'EYE', 'UNMOVED', 'AT', 'HIS', 'TELESCOPE'] +5105-28240-0003-1627: ref=['SHE', 'IS', 'UNDER', 'SAIL', 'BUT', 'SHE', 'IS', 'COUNT', "TIMASCHEFF'S", 'YACHT', 'HE', 'WAS', 'RIGHT'] +5105-28240-0003-1627: hyp=['SHE', 'IS', 'UNDER', 'SALE', 'BUT', 'SHE', 'IS', 'COUNT', "TIMASCHEFF'S", 'YACHT', 'HE', 'WAS', 'RIGHT'] +5105-28240-0004-1628: ref=['IF', 'THE', 'COUNT', 'WERE', 'ON', 'BOARD', 'A', 'STRANGE', 'FATALITY', 'WAS', 'BRINGING', 'HIM', 'TO', 'THE', 'PRESENCE', 'OF', 'HIS', 'RIVAL'] +5105-28240-0004-1628: hyp=['IF', 'THE', 'COUNT', 'WERE', 'ON', 'BOARD', 'A', 'STRANGE', 'FATALITY', 'WAS', 'BRINGING', 'HIM', 'TO', 'THE', 'PRESENCE', 'OF', 'HIS', 'RIVAL'] +5105-28240-0005-1629: ref=['HE', 'RECKONED', 'THEREFORE', 'NOT', 'ONLY', 'UPON', 'ASCERTAINING', 'THE', 'EXTENT', 'OF', 'THE', 'LATE', 'CATASTROPHE', 'BUT', 'UPON', 'LEARNING', 'ITS', 'CAUSE'] +5105-28240-0005-1629: hyp=['HE', 'RECKONED', 'THEREFORE', 'NOT', 'ONLY', 'UPON', 'ASCERTAINING', 'THE', 'EXTENT', 'OF', 'THE', 'LATE', 'CATASTROPHE', 'BUT', 'UPON', 'LEARNING', 'ITS', 'CAUSE'] +5105-28240-0006-1630: ref=['THE', 'WIND', 'BEING', 'ADVERSE', 'THE', 'DOBRYNA', 'DID', 'NOT', 'MAKE', 'VERY', 'RAPID', 'PROGRESS', 'BUT', 'AS', 'THE', 'WEATHER', 'IN', 'SPITE', 'OF', 'A', 'FEW', 'CLOUDS', 'REMAINED', 'CALM', 'AND', 'THE', 'SEA', 'WAS', 'QUITE', 'SMOOTH', 'SHE', 'WAS', 'ENABLED', 'TO', 'HOLD', 'A', 'STEADY', 'COURSE'] +5105-28240-0006-1630: hyp=['THE', 'WIND', 'BEING', 'ADVERSE', 'THE', 'DOBRYNA', 'DID', 'NOT', 'MAKE', 'VERY', 'RAPID', 'PROGRESS', 'BUT', 'AS', 'THE', 'WEATHER', 'IN', 'SPITE', 'OF', 'A', 'FEW', 'CLOUDS', 'REMAINED', 'CALM', 'AND', 'THE', 'SEA', 'WAS', 'QUITE', 'SMOOTH', 'SHE', 'WAS', 'ENABLED', 'TO', 'HOLD', 'A', 'STEADY', 'COURSE'] +5105-28240-0007-1631: ref=['SERVADAC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'THE', 'DOBRYNA', 'WAS', 'ENDEAVORING', 'TO', 'PUT', 'IN'] +5105-28240-0007-1631: hyp=['SERVADAC', 'TOOK', 'IT', 'FOR', 'GRANTED', 'THAT', 'THE', 'DOBRYNA', 'WAS', 'ENDEAVORING', 'TO', 'PUT', 'IN'] +5105-28240-0008-1632: ref=['A', 'NARROW', 'CHANNEL', 'FORMED', 'A', 'PASSAGE', 'THROUGH', 'THE', 'RIDGE', 'OF', 'ROCKS', 'THAT', 'PROTECTED', 'IT', 'FROM', 'THE', 'OPEN', 'SEA', 'AND', 'WHICH', 'EVEN', 'IN', 'THE', 'ROUGHEST', 'WEATHER', 'WOULD', 'ENSURE', 'THE', 'CALMNESS', 'OF', 'ITS', 'WATERS'] +5105-28240-0008-1632: hyp=['A', 'NARROW', 'CHANNEL', 'FORMED', 'A', 'PASSAGE', 'THROUGH', 'THE', 'RIDGE', 'OF', 'ROCKS', 'THAT', 'PROTECTED', 'IT', 'FROM', 'THE', 'OPEN', 'SEA', 'AND', 'WHICH', 'EVEN', 'IN', 'THE', 'ROUGHEST', 'WEATHER', 'WOULD', 'INSURE', 'THE', 'CALMNESS', 'OF', 'ITS', 'WATERS'] +5105-28240-0009-1633: ref=['SLIGHTLY', 'CHANGING', 'HER', 'COURSE', 'SHE', 'FIRST', 'STRUCK', 'HER', 'MAINSAIL', 'AND', 'IN', 'ORDER', 'TO', 'FACILITATE', 'THE', 'MOVEMENTS', 'OF', 'HER', 'HELMSMAN', 'SOON', 'CARRIED', 'NOTHING', 'BUT', 'HER', 'TWO', 'TOPSAILS', 'BRIGANTINE', 'AND', 'JIB'] +5105-28240-0009-1633: hyp=['SLIGHTLY', 'CHANGING', 'HER', 'COURSE', 'SHE', 'FIRST', 'STRUCK', 'HER', 'MAINSAIL', 'AND', 'IN', 'ORDER', 'TO', 'FACILITATE', 'THE', 'MOVEMENTS', 'OF', 'HER', 'HELMSMAN', 'SOON', 'CARRIED', 'NOTHING', 'BUT', 'HER', 'TWO', 'TOPSAILS', 'BRIGANTINE', 'AND', 'JIB'] +5105-28240-0010-1634: ref=['CAPTAIN', 'SERVADAC', 'HASTENED', 'TOWARDS', 'HIM'] +5105-28240-0010-1634: hyp=['CAPTAIN', 'SERVADAC', 'HASTENED', 'TOWARD', 'HIM'] +5105-28240-0011-1635: ref=['I', 'LEFT', 'YOU', 'ON', 'A', 'CONTINENT', 'AND', 'HERE', 'I', 'HAVE', 'THE', 'HONOR', 'OF', 'FINDING', 'YOU', 'ON', 'AN', 'ISLAND'] +5105-28240-0011-1635: hyp=['I', 'LEFT', 'YOU', 'ON', 'A', 'CONTINENT', 'AND', 'HERE', 'I', 'HAVE', 'THE', 'HONOR', 'OF', 'FINDING', 'YOU', 'ON', 'AN', 'ISLAND'] +5105-28240-0012-1636: ref=['NEVER', 'MIND', 'NOW', 'INTERPOSED', 'THE', 'CAPTAIN', 'WE', 'WILL', 'TALK', 'OF', 'THAT', 'BY', 'AND', 'BY'] +5105-28240-0012-1636: hyp=['NEVER', 'MIND', 'NOW', 'INTERPOSED', 'THE', 'CAPTAIN', 'WE', 'WILL', 'TALK', 'OF', 'THAT', 'BY', 'AND', 'BY'] +5105-28240-0013-1637: ref=['NOTHING', 'MORE', 'THAN', 'YOU', 'KNOW', 'YOURSELF'] +5105-28240-0013-1637: hyp=['NOTHING', 'MORE', 'THAN', 'YOU', 'KNOW', 'YOURSELF'] +5105-28240-0014-1638: ref=['ARE', 'YOU', 'CERTAIN', 'THAT', 'THIS', 'IS', 'THE', 'MEDITERRANEAN'] +5105-28240-0014-1638: hyp=['ARE', 'YOU', 'CERTAIN', 'THAT', 'THIS', 'IS', 'THE', 'MEDITERRANEAN'] +5105-28240-0015-1639: ref=['FOR', 'SOME', 'MOMENTS', 'HE', 'SEEMED', 'PERFECTLY', 'STUPEFIED', 'THEN', 'RECOVERING', 'HIMSELF', 'HE', 'BEGAN', 'TO', 'OVERWHELM', 'THE', 'COUNT', 'WITH', 'A', 'TORRENT', 'OF', 'QUESTIONS'] +5105-28240-0015-1639: hyp=['FOR', 'SOME', 'MOMENTS', 'HE', 'SEEMED', 'PERFECTLY', 'STUPEFIED', 'AND', 'THEN', 'RECOVERING', 'HIMSELF', 'HE', 'BEGAN', 'TO', 'OVERWHELM', 'THE', 'COUNT', 'WITH', 'A', 'TORRENT', 'OF', 'QUESTIONS'] +5105-28240-0016-1640: ref=['TO', 'ALL', 'THESE', 'INQUIRIES', 'THE', 'COUNT', 'RESPONDED', 'IN', 'THE', 'AFFIRMATIVE'] +5105-28240-0016-1640: hyp=['TO', 'ALL', 'THESE', 'INQUIRIES', 'THE', 'COUNT', 'RESPONDED', 'IN', 'THE', 'AFFIRMATIVE'] +5105-28240-0017-1641: ref=['SOME', 'MYSTERIOUS', 'FORCE', 'SEEMED', 'TO', 'HAVE', 'BROUGHT', 'ABOUT', 'A', 'CONVULSION', 'OF', 'THE', 'ELEMENTS'] +5105-28240-0017-1641: hyp=['SOME', 'MYSTERIOUS', 'FORCE', 'SEEMED', 'TO', 'HAVE', 'BROUGHT', 'ABOUT', 'A', 'CONVULSION', 'OF', 'THE', 'ELEMENTS'] +5105-28240-0018-1642: ref=['YOU', 'WILL', 'TAKE', 'ME', 'ON', 'BOARD', 'COUNT', 'WILL', 'YOU', 'NOT'] +5105-28240-0018-1642: hyp=['YOU', 'WILL', 'TAKE', 'ME', 'ON', 'BOARD', 'COUNT', 'WILL', 'YOU', 'NOT'] +5105-28240-0019-1643: ref=['MY', 'YACHT', 'IS', 'AT', 'YOUR', 'SERVICE', 'SIR', 'EVEN', 'SHOULD', 'YOU', 'REQUIRE', 'TO', 'MAKE', 'A', 'TOUR', 'ROUND', 'THE', 'WORLD'] +5105-28240-0019-1643: hyp=['MY', 'YACHT', 'IS', 'AT', 'YOUR', 'SERVICE', 'SIR', 'EVEN', 'SHOULD', 'YOU', 'REQUIRE', 'TO', 'MAKE', 'A', 'TOUR', 'AROUND', 'THE', 'WORLD'] +5105-28240-0020-1644: ref=['THE', 'COUNT', 'SHOOK', 'HIS', 'HEAD'] +5105-28240-0020-1644: hyp=['THE', 'COUNT', 'SHOOK', 'HIS', 'HEAD'] +5105-28240-0021-1645: ref=['BEFORE', 'STARTING', 'IT', 'WAS', 'INDISPENSABLE', 'THAT', 'THE', 'ENGINE', 'OF', 'THE', 'DOBRYNA', 'SHOULD', 'BE', 'REPAIRED', 'TO', 'SAIL', 'UNDER', 'CANVAS', 'ONLY', 'WOULD', 'IN', 'CONTRARY', 'WINDS', 'AND', 'ROUGH', 'SEAS', 'BE', 'BOTH', 'TEDIOUS', 'AND', 'DIFFICULT'] +5105-28240-0021-1645: hyp=['BEFORE', 'STARTING', 'IT', 'WAS', 'INDISPENSABLE', 'THAT', 'THE', 'ENGINE', 'OF', 'THE', 'DOBRYNA', 'SHOULD', 'BE', 'REPAIRED', 'TO', 'SAIL', 'UNDER', 'CANVAS', 'ONLY', 'WOULD', 'IN', 'CONTRARY', 'WINDS', 'AND', 'ROUGH', 'SEAS', 'BE', 'BOTH', 'TEDIOUS', 'AND', 'DIFFICULT'] +5105-28240-0022-1646: ref=['IT', 'WAS', 'ON', 'THE', 'LAST', 'DAY', 'OF', 'JANUARY', 'THAT', 'THE', 'REPAIRS', 'OF', 'THE', 'SCHOONER', 'WERE', 'COMPLETED'] +5105-28240-0022-1646: hyp=['IT', 'WAS', 'ON', 'THE', 'LAST', 'DAY', 'OF', 'JANUARY', 'THAT', 'THE', 'REPAIRS', 'OF', 'THE', 'SCHOONER', 'WERE', 'COMPLETED'] +5105-28240-0023-1647: ref=['A', 'SLIGHT', 'DIMINUTION', 'IN', 'THE', 'EXCESSIVELY', 'HIGH', 'TEMPERATURE', 'WHICH', 'HAD', 'PREVAILED', 'FOR', 'THE', 'LAST', 'FEW', 'WEEKS', 'WAS', 'THE', 'ONLY', 'APPARENT', 'CHANGE', 'IN', 'THE', 'GENERAL', 'ORDER', 'OF', 'THINGS', 'BUT', 'WHETHER', 'THIS', 'WAS', 'TO', 'BE', 'ATTRIBUTED', 'TO', 'ANY', 'ALTERATION', 'IN', 'THE', "EARTH'S", 'ORBIT', 'WAS', 'A', 'QUESTION', 'WHICH', 'WOULD', 'STILL', 'REQUIRE', 'SEVERAL', 'DAYS', 'TO', 'DECIDE'] +5105-28240-0023-1647: hyp=['A', 'SLIGHT', 'DIMINUTION', 'IN', 'THE', 'EXCESSIVELY', 'HIGH', 'TEMPERATURE', 'WHICH', 'HAD', 'PREVAILED', 'FOR', 'THE', 'LAST', 'FEW', 'WEEKS', 'WAS', 'THE', 'ONLY', 'APPARENT', 'CHANGE', 'IN', 'THE', 'GENERAL', 'ORDER', 'OF', 'THINGS', 'BUT', 'WHETHER', 'THIS', 'WAS', 'TO', 'BE', 'ATTRIBUTED', 'TO', 'ANY', 'ALTERATION', 'IN', 'THE', "EARTH'S", 'ORBIT', 'WAS', 'A', 'QUESTION', 'WHICH', 'WOULD', 'STILL', 'REQUIRE', 'SEVERAL', 'DAYS', 'TO', 'DECIDE'] +5105-28240-0024-1648: ref=['DOUBTS', 'NOW', 'AROSE', 'AND', 'SOME', 'DISCUSSION', 'FOLLOWED', 'WHETHER', 'OR', 'NOT', 'IT', 'WAS', 'DESIRABLE', 'FOR', 'BEN', 'ZOOF', 'TO', 'ACCOMPANY', 'HIS', 'MASTER'] +5105-28240-0024-1648: hyp=['DOUBTS', 'NOW', 'AROSE', 'AND', 'SOME', 'DISCUSSION', 'FOLLOWED', 'WHETHER', 'OR', 'NOT', 'IT', 'WAS', 'DESIRABLE', 'FOR', 'BEN', 'ZOOF', 'TO', 'ACCOMPANY', 'HIS', 'MASTER'] +5105-28241-0000-1604: ref=['HER', 'SEA', 'GOING', 'QUALITIES', 'WERE', 'EXCELLENT', 'AND', 'WOULD', 'HAVE', 'AMPLY', 'SUFFICED', 'FOR', 'A', 'CIRCUMNAVIGATION', 'OF', 'THE', 'GLOBE'] +5105-28241-0000-1604: hyp=['HER', 'SEA', 'GOING', 'QUALITIES', 'WERE', 'EXCELLENT', 'AND', 'WOULD', 'HAVE', 'AMPLY', 'SUFFICED', 'FOR', 'A', 'CIRCUMNAVIGATION', 'OF', 'THE', 'GLOBE'] +5105-28241-0001-1605: ref=['AFTER', 'AN', 'APPRENTICESHIP', 'ON', 'A', 'MERCHANT', 'SHIP', 'HE', 'HAD', 'ENTERED', 'THE', 'IMPERIAL', 'NAVY', 'AND', 'HAD', 'ALREADY', 'REACHED', 'THE', 'RANK', 'OF', 'LIEUTENANT', 'WHEN', 'THE', 'COUNT', 'APPOINTED', 'HIM', 'TO', 'THE', 'CHARGE', 'OF', 'HIS', 'OWN', 'PRIVATE', 'YACHT', 'IN', 'WHICH', 'HE', 'WAS', 'ACCUSTOMED', 'TO', 'SPEND', 'BY', 'FAR', 'THE', 'GREATER', 'PART', 'OF', 'HIS', 'TIME', 'THROUGHOUT', 'THE', 'WINTER', 'GENERALLY', 'CRUISING', 'IN', 'THE', 'MEDITERRANEAN', 'WHILST', 'IN', 'THE', 'SUMMER', 'HE', 'VISITED', 'MORE', 'NORTHERN', 'WATERS'] +5105-28241-0001-1605: hyp=['AFTER', 'AN', 'APPRENTICESHIP', 'ON', 'A', 'MERCHANT', 'SHIP', 'HE', 'HAD', 'ENTERED', 'THE', 'IMPERIAL', 'NAVY', 'AND', 'HAD', 'ALREADY', 'REACHED', 'THE', 'RANK', 'OF', 'LIEUTENANT', 'WHEN', 'THE', 'COUNT', 'APPOINTED', 'HIM', 'TO', 'THE', 'CHARGE', 'OF', 'HIS', 'OWN', 'PRIVATE', 'YACHT', 'IN', 'WHICH', 'HE', 'WAS', 'ACCUSTOMED', 'TO', 'SPEND', 'BY', 'FARTHER', 'GREATER', 'PART', 'OF', 'HIS', 'TIME', 'THROUGHOUT', 'THE', 'WINTER', 'GENERALLY', 'CRUISING', 'IN', 'THE', 'MEDITERRANEAN', 'WHILST', 'IN', 'THE', 'SUMMER', 'HE', 'VISITED', 'MORE', 'NORTHERN', 'WATERS'] +5105-28241-0002-1606: ref=['THE', 'LATE', 'ASTOUNDING', 'EVENTS', 'HOWEVER', 'HAD', 'RENDERED', 'PROCOPE', 'MANIFESTLY', 'UNEASY', 'AND', 'NOT', 'THE', 'LESS', 'SO', 'FROM', 'HIS', 'CONSCIOUSNESS', 'THAT', 'THE', 'COUNT', 'SECRETLY', 'PARTOOK', 'OF', 'HIS', 'OWN', 'ANXIETY'] +5105-28241-0002-1606: hyp=['THE', 'LATE', 'ASTOUNDING', 'EVENTS', 'HOWEVER', 'HAD', 'RENDERED', 'PROCOPE', 'MANIFESTLY', 'UNEASY', 'AND', 'NOT', 'THE', 'LESS', 'SO', 'FROM', 'HIS', 'CONSCIOUSNESS', 'THAT', 'THE', 'COUNT', 'SECRETLY', 'PARTOOK', 'OF', 'HIS', 'OWN', 'ANXIETY'] +5105-28241-0003-1607: ref=['STEAM', 'UP', 'AND', 'CANVAS', 'SPREAD', 'THE', 'SCHOONER', 'STARTED', 'EASTWARDS'] +5105-28241-0003-1607: hyp=['STEAM', 'UP', 'AND', 'CANVAS', 'SPREAD', 'THE', 'SCHOONER', 'STARTED', 'EASTWARDS'] +5105-28241-0004-1608: ref=['ALTHOUGH', 'ONLY', 'A', 'MODERATE', 'BREEZE', 'WAS', 'BLOWING', 'THE', 'SEA', 'WAS', 'ROUGH', 'A', 'CIRCUMSTANCE', 'TO', 'BE', 'ACCOUNTED', 'FOR', 'ONLY', 'BY', 'THE', 'DIMINUTION', 'IN', 'THE', 'FORCE', 'OF', 'THE', "EARTH'S", 'ATTRACTION', 'RENDERING', 'THE', 'LIQUID', 'PARTICLES', 'SO', 'BUOYANT', 'THAT', 'BY', 'THE', 'MERE', 'EFFECT', 'OF', 'OSCILLATION', 'THEY', 'WERE', 'CARRIED', 'TO', 'A', 'HEIGHT', 'THAT', 'WAS', 'QUITE', 'UNPRECEDENTED'] +5105-28241-0004-1608: hyp=['ALTHOUGH', 'ONLY', 'A', 'MODERATE', 'BREEZE', 'WAS', 'BLOWING', 'THE', 'SEA', 'WAS', 'ROUGH', 'A', 'CIRCUMSTANCE', 'TO', 'BE', 'ACCOUNTED', 'FOR', 'ONLY', 'BY', 'THE', 'DIMINUTION', 'IN', 'THE', 'FORCE', 'OF', 'THE', "EARTH'S", 'ATTRACTION', 'RENDERING', 'THE', 'LIQUID', 'PARTICLE', 'SO', 'BUOYANT', 'THAT', 'BY', 'THE', 'MERE', 'EFFECT', 'OF', 'OSCILLATION', 'THEY', 'WERE', 'CARRIED', 'TO', 'A', 'HEIGHT', 'THAT', 'WAS', 'QUITE', 'UNPRECEDENTED'] +5105-28241-0005-1609: ref=['FOR', 'A', 'FEW', 'MILES', 'SHE', 'FOLLOWED', 'THE', 'LINE', 'HITHERTO', 'PRESUMABLY', 'OCCUPIED', 'BY', 'THE', 'COAST', 'OF', 'ALGERIA', 'BUT', 'NO', 'LAND', 'APPEARED', 'TO', 'THE', 'SOUTH'] +5105-28241-0005-1609: hyp=['FOR', 'A', 'FEW', 'MILES', 'SHE', 'FOLLOWED', 'THE', 'LINE', 'HITHERTO', 'PRESUMABLY', 'OCCUPIED', 'BY', 'THE', 'COAST', 'OF', 'ALGERIA', 'BUT', 'NO', 'LAND', 'APPEARED', 'TO', 'THE', 'SOUTH'] +5105-28241-0006-1610: ref=['THE', 'LOG', 'AND', 'THE', 'COMPASS', 'THEREFORE', 'WERE', 'ABLE', 'TO', 'BE', 'CALLED', 'UPON', 'TO', 'DO', 'THE', 'WORK', 'OF', 'THE', 'SEXTANT', 'WHICH', 'HAD', 'BECOME', 'UTTERLY', 'USELESS'] +5105-28241-0006-1610: hyp=['THE', 'LOG', 'AND', 'THE', 'COMPASS', 'THEREFORE', 'WERE', 'ABLE', 'TO', 'BE', 'CALLED', 'UPON', 'TO', 'DO', 'THE', 'WORK', 'OF', 'THE', 'SEXTANT', 'WHICH', 'HAD', 'BECOME', 'UTTERLY', 'USELESS'] +5105-28241-0007-1611: ref=['THERE', 'IS', 'NO', 'FEAR', 'OF', 'THAT', 'SIR'] +5105-28241-0007-1611: hyp=["THERE'S", 'NO', 'FEAR', 'OF', 'THAT', 'SIR'] +5105-28241-0008-1612: ref=['THE', 'EARTH', 'HAS', 'UNDOUBTEDLY', 'ENTERED', 'UPON', 'A', 'NEW', 'ORBIT', 'BUT', 'SHE', 'IS', 'NOT', 'INCURRING', 'ANY', 'PROBABLE', 'RISK', 'OF', 'BEING', 'PRECIPITATED', 'ONTO', 'THE', 'SUN'] +5105-28241-0008-1612: hyp=['THAT', 'THE', 'EARTH', 'HAS', 'UNDOUBTEDLY', 'ENTERED', 'UPON', 'A', 'NEW', 'ORBIT', 'BUT', 'SHE', 'IS', 'NOT', 'INCURRING', 'ANY', 'PROBABLE', 'RISK', 'OF', 'BEING', 'PRECIPITATED', 'ON', 'TO', 'THE', 'SUN'] +5105-28241-0009-1613: ref=['AND', 'WHAT', 'DEMONSTRATION', 'DO', 'YOU', 'OFFER', 'ASKED', 'SERVADAC', 'EAGERLY', 'THAT', 'IT', 'WILL', 'NOT', 'HAPPEN'] +5105-28241-0009-1613: hyp=['AND', 'WHAT', 'DEMONSTRATION', 'DO', 'YOU', 'OFFER', 'ASKED', 'SERVADAC', 'EAGERLY', 'THAT', 'IT', 'WILL', 'NOT', 'HAPPEN'] +5105-28241-0010-1614: ref=['OCEAN', 'REIGNED', 'SUPREME'] +5105-28241-0010-1614: hyp=['OCEAN', 'RAINED', 'SUPREME'] +5105-28241-0011-1615: ref=['ALL', 'THE', 'IMAGES', 'OF', 'HIS', 'PAST', 'LIFE', 'FLOATED', 'UPON', 'HIS', 'MEMORY', 'HIS', 'THOUGHTS', 'SPED', 'AWAY', 'TO', 'HIS', 'NATIVE', 'FRANCE', 'ONLY', 'TO', 'RETURN', 'AGAIN', 'TO', 'WONDER', 'WHETHER', 'THE', 'DEPTHS', 'OF', 'OCEAN', 'WOULD', 'REVEAL', 'ANY', 'TRACES', 'OF', 'THE', 'ALGERIAN', 'METROPOLIS'] +5105-28241-0011-1615: hyp=['ALL', 'THE', 'IMAGES', 'OF', 'HIS', 'PAST', 'LIFE', 'FLOATED', 'UPON', 'HIS', 'MEMORY', 'HIS', 'THOUGHTS', 'SPED', 'AWAY', 'TO', 'HIS', 'NATIVE', 'FRANCE', 'ONLY', 'TO', 'RETURN', 'AGAIN', 'TO', 'WONDER', 'WHETHER', 'THE', 'DEPTHS', 'OF', 'OCEAN', 'WOULD', 'REVEAL', 'ANY', 'TRACES', 'OF', 'THE', 'ALGERIAN', 'METROPOLIS'] +5105-28241-0012-1616: ref=['IS', 'IT', 'NOT', 'IMPOSSIBLE', 'HE', 'MURMURED', 'ALOUD', 'THAT', 'ANY', 'CITY', 'SHOULD', 'DISAPPEAR', 'SO', 'COMPLETELY'] +5105-28241-0012-1616: hyp=['IS', 'IT', 'NOT', 'IMPOSSIBLE', 'HE', 'MURMURED', 'ALOUD', 'THAT', 'ANY', 'CITY', 'SHOULD', 'DISAPPEAR', 'SO', 'COMPLETELY'] +5105-28241-0013-1617: ref=['WOULD', 'NOT', 'THE', 'LOFTIEST', 'EMINENCES', 'OF', 'THE', 'CITY', 'AT', 'LEAST', 'BE', 'VISIBLE'] +5105-28241-0013-1617: hyp=['WOULD', 'NOT', 'THE', 'LOFTIEST', 'EMINENCES', 'OF', 'THE', 'CITY', 'AT', 'LEAST', 'BE', 'VISIBLE'] +5105-28241-0014-1618: ref=['ANOTHER', 'CIRCUMSTANCE', 'WAS', 'MOST', 'REMARKABLE'] +5105-28241-0014-1618: hyp=['ANOTHER', 'CIRCUMSTANCE', 'WAS', 'MOST', 'REMARKABLE'] +5105-28241-0015-1619: ref=['TO', 'THE', 'SURPRISE', 'OF', 'ALL', 'AND', 'ESPECIALLY', 'OF', 'LIEUTENANT', 'PROCOPE', 'THE', 'LINE', 'INDICATED', 'A', 'BOTTOM', 'AT', 'A', 'NEARLY', 'UNIFORM', 'DEPTH', 'OF', 'FROM', 'FOUR', 'TO', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'THE', 'SOUNDING', 'WAS', 'PERSEVERED', 'WITH', 'CONTINUOUSLY', 'FOR', 'MORE', 'THAN', 'TWO', 'HOURS', 'OVER', 'A', 'CONSIDERABLE', 'AREA', 'THE', 'DIFFERENCES', 'OF', 'LEVEL', 'WERE', 'INSIGNIFICANT', 'NOT', 'CORRESPONDING', 'IN', 'ANY', 'DEGREE', 'TO', 'WHAT', 'WOULD', 'BE', 'EXPECTED', 'OVER', 'THE', 'SITE', 'OF', 'A', 'CITY', 'THAT', 'HAD', 'BEEN', 'TERRACED', 'LIKE', 'THE', 'SEATS', 'OF', 'AN', 'AMPHITHEATER'] +5105-28241-0015-1619: hyp=['TO', 'THE', 'SURPRISE', 'OF', 'ALL', 'AND', 'ESPECIALLY', 'OF', 'LIEUTENANT', 'PROCOPE', 'THE', 'LINE', 'INDICATED', 'A', 'BOTTOM', 'AT', 'A', 'NEARLY', 'UNIFORM', 'DEPTH', 'OF', 'FROM', 'FOUR', 'TO', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'THE', 'SOUNDING', 'WAS', 'PERSEVERED', 'WITH', 'CONTINUOUSLY', 'FOR', 'MORE', 'THAN', 'TWO', 'HOURS', 'OVER', 'A', 'CONSIDERABLE', 'AREA', 'THE', 'DIFFERENCES', 'OF', 'LEVEL', 'WERE', 'INSIGNIFICANT', 'NOT', 'CORRESPONDING', 'IN', 'ANY', 'DEGREE', 'TO', 'WHAT', 'WOULD', 'BE', 'EXPECTED', 'OVER', 'THE', 'SITE', 'OF', 'A', 'CITY', 'THAT', 'HAD', 'BEEN', 'TERRACED', 'LIKE', 'THE', 'SEATS', 'OF', 'AN', 'AMPHITHEATRE'] +5105-28241-0016-1620: ref=['YOU', 'MUST', 'SEE', 'LIEUTENANT', 'I', 'SHOULD', 'THINK', 'THAT', 'WE', 'ARE', 'NOT', 'SO', 'NEAR', 'THE', 'COAST', 'OF', 'ALGERIA', 'AS', 'YOU', 'IMAGINED'] +5105-28241-0016-1620: hyp=['YOU', 'MUST', 'SEE', 'LIEUTENANT', 'I', 'SHOULD', 'THINK', 'THAT', 'WE', 'ARE', 'NOT', 'SO', 'NEAR', 'THE', 'COAST', 'OF', 'ALGERIA', 'AS', 'YOU', 'IMAGINED'] +5105-28241-0017-1621: ref=['AFTER', 'PONDERING', 'AWHILE', 'HE', 'SAID', 'IF', 'WE', 'WERE', 'FARTHER', 'AWAY', 'I', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'A', 'DEPTH', 'OF', 'TWO', 'OR', 'THREE', 'HUNDRED', 'FATHOMS', 'INSTEAD', 'OF', 'FIVE', 'FATHOMS', 'FIVE', 'FATHOMS'] +5105-28241-0017-1621: hyp=['AFTER', 'PONDERING', 'A', 'WHILE', 'HE', 'SAID', 'IF', 'WE', 'WERE', 'FARTHER', 'AWAY', 'I', 'SHOULD', 'EXPECT', 'TO', 'FIND', 'A', 'DEPTH', 'OF', 'TWO', 'OR', 'THREE', 'HUNDRED', 'FATHOMS', 'INSTEAD', 'OF', 'FIVE', 'FATHOMS', 'FIVE', 'FATHOMS'] +5105-28241-0018-1622: ref=['ITS', 'DEPTH', 'REMAINED', 'INVARIABLE', 'STILL', 'FOUR', 'OR', 'AT', 'MOST', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'ITS', 'BOTTOM', 'WAS', 'ASSIDUOUSLY', 'DREDGED', 'IT', 'WAS', 'ONLY', 'TO', 'PROVE', 'IT', 'BARREN', 'OF', 'MARINE', 'PRODUCTION', 'OF', 'ANY', 'TYPE'] +5105-28241-0018-1622: hyp=['ITS', 'DEPTH', 'REMAINED', 'INVARIABLE', 'STILL', 'FOUR', 'OR', 'AT', 'MOST', 'FIVE', 'FATHOMS', 'AND', 'ALTHOUGH', 'ITS', 'BOTTOM', 'WAS', 'ASSIDUOUSLY', 'DREDGED', 'IT', 'WAS', 'ONLY', 'TO', 'PROVE', 'IT', 'BARREN', 'OF', 'MARINE', 'PRODUCTION', 'OF', 'ANY', 'TYPE'] +5105-28241-0019-1623: ref=['NOTHING', 'WAS', 'TO', 'BE', 'DONE', 'BUT', 'TO', 'PUT', 'ABOUT', 'AND', 'RETURN', 'IN', 'DISAPPOINTMENT', 'TOWARDS', 'THE', 'NORTH'] +5105-28241-0019-1623: hyp=['NOTHING', 'WAS', 'TO', 'BE', 'DONE', 'BUT', 'TO', 'PUT', 'ABOUT', 'AND', 'RETURN', 'AND', 'DISAPPOINTMENT', 'TOWARD', 'THE', 'NORTH'] +5142-33396-0000-898: ref=['AT', 'ANOTHER', 'TIME', 'HARALD', 'ASKED'] +5142-33396-0000-898: hyp=['AT', 'ANOTHER', 'TIME', 'HAROLD', 'ASKED'] +5142-33396-0001-899: ref=['WHAT', 'IS', 'YOUR', 'COUNTRY', 'OLAF', 'HAVE', 'YOU', 'ALWAYS', 'BEEN', 'A', 'THRALL', 'THE', "THRALL'S", 'EYES', 'FLASHED'] +5142-33396-0001-899: hyp=['WHAT', 'IS', 'YOUR', 'COUNTRY', 'OLAF', 'HAVE', 'YOU', 'ALWAYS', 'BEEN', 'A', 'THRALL', 'THE', "THRALL'S", 'EYES', 'FLASHED'] +5142-33396-0002-900: ref=['TWO', 'HUNDRED', 'WARRIORS', 'FEASTED', 'IN', 'HIS', 'HALL', 'AND', 'FOLLOWED', 'HIM', 'TO', 'BATTLE'] +5142-33396-0002-900: hyp=['TWO', 'HUNDRED', 'WARRIORS', 'FEASTED', 'IN', 'HIS', 'HALL', 'AND', 'FOLLOWED', 'HIM', 'TO', 'BATTLE'] +5142-33396-0003-901: ref=['THE', 'REST', 'OF', 'YOU', 'OFF', 'A', 'VIKING', 'HE', 'HAD', 'THREE', 'SHIPS'] +5142-33396-0003-901: hyp=['THE', 'REST', 'OF', 'YOU', 'OFF', 'A', 'VIKING', 'HE', 'HAD', 'THREE', 'SHIPS'] +5142-33396-0004-902: ref=['THESE', 'HE', 'GAVE', 'TO', 'THREE', 'OF', 'MY', 'BROTHERS'] +5142-33396-0004-902: hyp=['THESE', 'HE', 'GAVE', 'TO', 'THREE', 'OF', 'MY', 'BROTHERS'] +5142-33396-0005-903: ref=['BUT', 'I', 'STAYED', 'THAT', 'SPRING', 'AND', 'BUILT', 'ME', 'A', 'BOAT'] +5142-33396-0005-903: hyp=['BUT', 'I', 'STAYED', 'THAT', 'SPRING', 'AND', 'BUILT', 'ME', 'A', 'BOAT'] +5142-33396-0006-904: ref=['I', 'MADE', 'HER', 'FOR', 'ONLY', 'TWENTY', 'OARS', 'BECAUSE', 'I', 'THOUGHT', 'FEW', 'MEN', 'WOULD', 'FOLLOW', 'ME', 'FOR', 'I', 'WAS', 'YOUNG', 'FIFTEEN', 'YEARS', 'OLD'] +5142-33396-0006-904: hyp=['I', 'MADE', 'HER', 'FALLING', 'TWENTY', 'OARS', 'BECAUSE', 'I', 'THOUGHT', 'FEW', 'MEN', 'WOULD', 'FOLLOW', 'ME', 'FOR', 'I', 'WAS', 'YOUNG', 'FIFTEEN', 'YEARS', 'OLD'] +5142-33396-0007-905: ref=['AT', 'THE', 'PROW', 'I', 'CARVED', 'THE', 'HEAD', 'WITH', 'OPEN', 'MOUTH', 'AND', 'FORKED', 'TONGUE', 'THRUST', 'OUT'] +5142-33396-0007-905: hyp=['AT', 'THE', 'PROW', 'I', 'CARVED', 'THE', 'HEAD', 'WITH', 'OPEN', 'MOUTH', 'AND', 'FORKED', 'TONGUE', 'THRUST', 'OUT'] +5142-33396-0008-906: ref=['I', 'PAINTED', 'THE', 'EYES', 'RED', 'FOR', 'ANGER'] +5142-33396-0008-906: hyp=['I', 'PAINTED', 'THE', 'EYES', 'RED', 'FOR', 'ANGER'] +5142-33396-0009-907: ref=['THERE', 'STAND', 'SO', 'I', 'SAID', 'AND', 'GLARE', 'AND', 'HISS', 'AT', 'MY', 'FOES'] +5142-33396-0009-907: hyp=['THERE', 'STAND', 'SO', 'I', 'SAID', 'AND', 'GLARE', 'AND', 'HISS', 'AT', 'MY', 'FOES'] +5142-33396-0010-908: ref=['IN', 'THE', 'STERN', 'I', 'CURVED', 'THE', 'TAIL', 'UP', 'ALMOST', 'AS', 'HIGH', 'AS', 'THE', 'HEAD'] +5142-33396-0010-908: hyp=['IN', 'THE', 'STERN', 'I', 'CARVED', 'THE', 'TAIL', 'UP', 'ALMOST', 'AS', 'HIGH', 'AS', 'THE', 'HEAD'] +5142-33396-0011-909: ref=['THERE', 'SHE', 'SAT', 'ON', 'THE', 'ROLLERS', 'AS', 'FAIR', 'A', 'SHIP', 'AS', 'I', 'EVER', 'SAW'] +5142-33396-0011-909: hyp=['THERE', 'SHE', 'SAT', 'ON', 'THE', 'ROLLERS', 'AS', 'FAIR', 'A', 'SHIP', 'AS', 'I', 'EVER', 'SAW'] +5142-33396-0012-910: ref=['THEN', 'I', 'WILL', 'GET', 'ME', 'A', 'FARM', 'AND', 'WILL', 'WINTER', 'IN', 'THAT', 'LAND', 'NOW', 'WHO', 'WILL', 'FOLLOW', 'ME'] +5142-33396-0012-910: hyp=['THEN', 'I', 'WILL', 'GET', 'ME', 'A', 'FARM', 'AND', "WE'LL", 'WINNER', 'IN', 'THAT', 'LAND', 'NOW', 'WHO', 'WILL', 'FOLLOW', 'ME'] +5142-33396-0013-911: ref=['HE', 'IS', 'BUT', 'A', 'BOY', 'THE', 'MEN', 'SAID'] +5142-33396-0013-911: hyp=['HE', 'IS', 'BUT', 'A', 'BOY', 'THE', 'MAN', 'SAID'] +5142-33396-0014-912: ref=['THIRTY', 'MEN', 'ONE', 'AFTER', 'ANOTHER', 'RAISED', 'THEIR', 'HORNS', 'AND', 'SAID'] +5142-33396-0014-912: hyp=['THIRTY', 'MEN', 'ONE', 'AFTER', 'ANOTHER', 'RAISED', 'THEIR', 'HORNS', 'AND', 'SAID'] +5142-33396-0015-913: ref=['AS', 'OUR', 'BOAT', 'FLASHED', 'DOWN', 'THE', 'ROLLERS', 'INTO', 'THE', 'WATER', 'I', 'MADE', 'THIS', 'SONG', 'AND', 'SANG', 'IT'] +5142-33396-0015-913: hyp=['AS', 'OUR', 'BOAT', 'FLASHED', 'DOWN', 'THE', 'ROLLERS', 'INTO', 'THE', 'WATER', 'I', 'MADE', 'THIS', 'SONG', 'AND', 'SANG', 'IT'] +5142-33396-0016-914: ref=['SO', 'WE', 'HARRIED', 'THE', 'COAST', 'OF', 'NORWAY'] +5142-33396-0016-914: hyp=['SO', 'WE', 'HARRIED', 'THE', 'COAST', 'OF', 'NORWAY'] +5142-33396-0017-915: ref=['WE', 'ATE', 'AT', 'MANY', "MEN'S", 'TABLES', 'UNINVITED'] +5142-33396-0017-915: hyp=['WE', 'ATE', 'IT', 'MANY', "MEN'S", 'TABLES', 'UNINVITED'] +5142-33396-0018-916: ref=['MY', "DRAGON'S", 'BELLY', 'IS', 'NEVER', 'FULL', 'AND', 'ON', 'BOARD', 'WENT', 'THE', 'GOLD'] +5142-33396-0018-916: hyp=['I', "DRAGON'S", 'BELLY', 'IS', 'NEVER', 'FULL', 'AND', 'ON', 'BOARD', 'WENT', 'THE', 'GOLD'] +5142-33396-0019-917: ref=['OH', 'IT', 'IS', 'BETTER', 'TO', 'LIVE', 'ON', 'THE', 'SEA', 'AND', 'LET', 'OTHER', 'MEN', 'RAISE', 'YOUR', 'CROPS', 'AND', 'COOK', 'YOUR', 'MEALS'] +5142-33396-0019-917: hyp=['OH', 'IT', 'IS', 'BETTER', 'TO', 'LIVE', 'ON', 'THE', 'SEA', 'AND', 'LET', 'OTHER', 'MEN', 'RAISE', 'YOUR', 'CROPS', 'AND', 'COOK', 'YOUR', 'MEALS'] +5142-33396-0020-918: ref=['A', 'HOUSE', 'SMELLS', 'OF', 'SMOKE', 'A', 'SHIP', 'SMELLS', 'OF', 'FROLIC'] +5142-33396-0020-918: hyp=['A', 'HOUSE', 'SMELLS', 'OF', 'SMOKE', 'A', "SHIP'S", 'MILLS', 'OF', 'FROLIC'] +5142-33396-0021-919: ref=['UP', 'AND', 'DOWN', 'THE', 'WATER', 'WE', 'WENT', 'TO', 'GET', 'MUCH', 'WEALTH', 'AND', 'MUCH', 'FROLIC'] +5142-33396-0021-919: hyp=['UP', 'AND', 'DOWN', 'THE', 'WATER', 'WE', 'WENT', 'TO', 'GET', 'MUCH', 'WEALTH', 'AND', 'MUCH', 'FROLIC'] +5142-33396-0022-920: ref=['WHAT', 'OF', 'THE', 'FARM', 'OLAF', 'NOT', 'YET', 'I', 'ANSWERED', 'VIKING', 'IS', 'BETTER', 'FOR', 'SUMMER'] +5142-33396-0022-920: hyp=['WHAT', 'IS', 'THE', 'FARM', 'OLAF', 'NOT', 'YET', 'I', 'ANSWERED', 'VIKING', 'IS', 'BETTER', 'FOR', 'SUMMER'] +5142-33396-0023-921: ref=['IT', 'WAS', 'SO', 'DARK', 'THAT', 'I', 'COULD', 'SEE', 'NOTHING', 'BUT', 'A', 'FEW', 'SPARKS', 'ON', 'THE', 'HEARTH'] +5142-33396-0023-921: hyp=['IT', 'WAS', 'SO', 'DARK', 'THAT', 'I', 'COULD', 'SEE', 'NOTHING', 'BUT', 'A', 'FEW', 'SPARKS', 'ON', 'THE', 'HEARTH'] +5142-33396-0024-922: ref=['I', 'STOOD', 'WITH', 'MY', 'BACK', 'TO', 'THE', 'WALL', 'FOR', 'I', 'WANTED', 'NO', 'SWORD', 'REACHING', 'OUT', 'OF', 'THE', 'DARK', 'FOR', 'ME'] +5142-33396-0024-922: hyp=['I', 'STOOD', 'WITH', 'MY', 'BACK', 'TO', 'THE', 'WALL', 'FOR', 'I', 'WANTED', 'NO', 'SWORD', 'REACHING', 'OUT', 'OF', 'THE', 'DARK', 'FOR', 'ME'] +5142-33396-0025-923: ref=['COME', 'COME', 'I', 'CALLED', 'WHEN', 'NO', 'ONE', 'OBEYED', 'A', 'FIRE'] +5142-33396-0025-923: hyp=['COME', 'COME', 'I', 'CALLED', 'WHEN', 'NO', 'ONE', 'OBEYED', 'A', 'FIRE'] +5142-33396-0026-924: ref=['MY', 'MEN', 'LAUGHED', 'YES', 'A', 'STINGY', 'HOST'] +5142-33396-0026-924: hyp=['MY', 'MEN', 'LAUGHED', 'YES', 'A', 'STINGY', 'HOSE'] +5142-33396-0027-925: ref=['HE', 'ACTS', 'AS', 'THOUGH', 'HE', 'HAD', 'NOT', 'EXPECTED', 'US'] +5142-33396-0027-925: hyp=['HE', 'ACTS', 'AS', 'THOUGH', 'HE', 'IS', 'NOT', 'EXPECTED', 'US'] +5142-33396-0028-926: ref=['ON', 'A', 'BENCH', 'IN', 'A', 'FAR', 'CORNER', 'WERE', 'A', 'DOZEN', 'PEOPLE', 'HUDDLED', 'TOGETHER'] +5142-33396-0028-926: hyp=['ON', 'A', 'BENCH', 'IN', 'A', 'FAR', 'CORNER', 'WERE', 'A', 'DOZEN', 'PEOPLE', 'HUDDLED', 'TOGETHER'] +5142-33396-0029-927: ref=['BRING', 'IN', 'THE', 'TABLE', 'WE', 'ARE', 'HUNGRY'] +5142-33396-0029-927: hyp=['BRING', 'IN', 'THE', 'TABLE', 'WE', 'ARE', 'HUNGRY'] +5142-33396-0030-928: ref=['THE', 'THRALLS', 'WERE', 'BRINGING', 'IN', 'A', 'GREAT', 'POT', 'OF', 'MEAT'] +5142-33396-0030-928: hyp=['THE', 'THRALLS', 'WERE', 'RINGING', 'IN', 'A', 'GREAT', 'POT', 'OF', 'MEAT'] +5142-33396-0031-929: ref=['THEY', 'SET', 'UP', 'A', 'CRANE', 'OVER', 'THE', 'FIRE', 'AND', 'HUNG', 'THE', 'POT', 'UPON', 'IT', 'AND', 'WE', 'SAT', 'AND', 'WATCHED', 'IT', 'BOIL', 'WHILE', 'WE', 'JOKED', 'AT', 'LAST', 'THE', 'SUPPER', 'BEGAN'] +5142-33396-0031-929: hyp=['THEY', 'SET', 'UP', 'A', 'CRANE', 'OVER', 'THE', 'FIRE', 'AND', 'HUNG', 'THE', 'POT', 'UPON', 'IT', 'AND', 'WE', 'SAT', 'AND', 'WATCHED', 'IT', 'BOIL', 'WHILE', 'WE', 'JOKED', 'AT', 'LAST', 'THE', 'SUPPER', 'BEGAN'] +5142-33396-0032-930: ref=['THE', 'FARMER', 'SAT', 'GLOOMILY', 'ON', 'THE', 'BENCH', 'AND', 'WOULD', 'NOT', 'EAT', 'AND', 'YOU', 'CANNOT', 'WONDER', 'FOR', 'HE', 'SAW', 'US', 'PUTTING', 'POTFULS', 'OF', 'HIS', 'GOOD', 'BEEF', 'AND', 'BASKET', 'LOADS', 'OF', 'BREAD', 'INTO', 'OUR', 'BIG', 'MOUTHS'] +5142-33396-0032-930: hyp=['THE', 'FARMER', 'SAT', 'GLOOMILY', 'ON', 'THE', 'BENCH', 'AND', 'WOULD', 'NOT', 'EAT', 'AND', 'YOU', 'CANNOT', 'WONDER', 'FOR', 'HE', 'SAW', 'US', 'PUTTING', 'POTFULS', 'OF', 'HIS', 'GOOD', 'BEEF', 'AND', 'BASCULADES', 'OF', 'BREAD', 'AND', 'OUR', 'BIG', 'MOUTHS'] +5142-33396-0033-931: ref=['YOU', 'WOULD', 'NOT', 'EAT', 'WITH', 'US', 'YOU', 'CANNOT', 'SAY', 'NO', 'TO', 'HALF', 'OF', 'MY', 'ALE', 'I', 'DRINK', 'THIS', 'TO', 'YOUR', 'HEALTH'] +5142-33396-0033-931: hyp=['YOU', 'WOULD', 'NOT', 'EAT', 'WITH', 'US', 'YOU', 'CANNOT', 'SAY', 'NO', 'TO', 'HALF', 'OF', 'MY', 'ALE', 'I', 'DRINK', 'THIS', 'TO', 'YOUR', 'HEALTH'] +5142-33396-0034-932: ref=['THEN', 'I', 'DRANK', 'HALF', 'OF', 'THE', 'HORNFUL', 'AND', 'SENT', 'THE', 'REST', 'ACROSS', 'THE', 'FIRE', 'TO', 'THE', 'FARMER', 'HE', 'TOOK', 'IT', 'AND', 'SMILED', 'SAYING'] +5142-33396-0034-932: hyp=['THEN', 'I', 'DRANK', 'HALF', 'OF', 'THE', 'HORNFUL', 'AND', 'SET', 'THE', 'REST', 'ACROSS', 'THE', 'FIRE', 'TO', 'THE', 'FARMER', 'HE', 'TOOK', 'IT', 'AND', 'SMILED', 'SAYING'] +5142-33396-0035-933: ref=['DID', 'YOU', 'EVER', 'HAVE', 'SUCH', 'A', 'LORDLY', 'GUEST', 'BEFORE', 'I', 'WENT', 'ON'] +5142-33396-0035-933: hyp=['DID', 'YOU', 'EVER', 'HAVE', 'SUCH', 'A', 'LORDLY', 'GUEST', 'BEFORE', 'I', 'WENT', 'ON'] +5142-33396-0036-934: ref=['SO', 'I', 'WILL', 'GIVE', 'OUT', 'THIS', 'LAW', 'THAT', 'MY', 'MEN', 'SHALL', 'NEVER', 'LEAVE', 'YOU', 'ALONE'] +5142-33396-0036-934: hyp=['SO', 'I', 'WILL', 'GIVE', 'OUT', 'THIS', 'LAW', 'THAT', 'MY', 'MEN', 'SHALL', 'NEVER', 'LEAVE', 'YOU', 'ALONE'] +5142-33396-0037-935: ref=['HAKON', 'THERE', 'SHALL', 'BE', 'YOUR', 'CONSTANT', 'COMPANION', 'FRIEND', 'FARMER'] +5142-33396-0037-935: hyp=['HAWKIN', 'THERE', 'SHALL', 'BE', 'YOUR', 'CONSTANT', 'COMPANION', 'FRIEND', 'FARMER'] +5142-33396-0038-936: ref=['HE', 'SHALL', 'NOT', 'LEAVE', 'YOU', 'DAY', 'OR', 'NIGHT', 'WHETHER', 'YOU', 'ARE', 'WORKING', 'OR', 'PLAYING', 'OR', 'SLEEPING'] +5142-33396-0038-936: hyp=['HE', 'SHALL', 'NOT', 'LEAVE', 'YOU', 'DAY', 'OR', 'NIGHT', 'WHETHER', 'YOU', 'ARE', 'WORKING', 'OR', 'PLAYING', 'OR', 'SLEEPING'] +5142-33396-0039-937: ref=['I', 'NAMED', 'NINE', 'OTHERS', 'AND', 'SAID'] +5142-33396-0039-937: hyp=['I', 'NAME', 'NINE', 'OTHERS', 'AND', 'SAID'] +5142-33396-0040-938: ref=['AND', 'THESE', 'SHALL', 'FOLLOW', 'YOUR', 'THRALLS', 'IN', 'THE', 'SAME', 'WAY'] +5142-33396-0040-938: hyp=['AND', 'THESE', 'SHALL', 'FOLLOW', 'YOUR', 'THRALLS', 'IN', 'THE', 'SAME', 'WAY'] +5142-33396-0041-939: ref=['SO', 'I', 'SET', 'GUARDS', 'OVER', 'EVERY', 'ONE', 'IN', 'THAT', 'HOUSE'] +5142-33396-0041-939: hyp=['SO', 'I', 'SET', 'GUARDS', 'OVER', 'EVERY', 'ONE', 'IN', 'THAT', 'HOUSE'] +5142-33396-0042-940: ref=['SO', 'NO', 'TALES', 'GOT', 'OUT', 'TO', 'THE', 'NEIGHBORS', 'BESIDES', 'IT', 'WAS', 'A', 'LONELY', 'PLACE', 'AND', 'BY', 'GOOD', 'LUCK', 'NO', 'ONE', 'CAME', 'THAT', 'WAY'] +5142-33396-0042-940: hyp=['SO', 'NO', 'TALES', 'GOT', 'OUT', 'TO', 'THE', 'NEIGHBORS', 'BESIDES', 'IT', 'WAS', 'A', 'LONELY', 'PLACE', 'AND', 'BY', 'GOOD', 'LUCK', 'NO', 'ONE', 'CAME', 'THAT', 'WAY'] +5142-33396-0043-941: ref=['THEIR', 'EYES', 'DANCED', 'BIG', 'THORLEIF', 'STOOD', 'UP', 'AND', 'STRETCHED', 'HIMSELF'] +5142-33396-0043-941: hyp=['THEIR', 'EYES', 'DANCED', 'BIG', 'TORE', 'LEAF', 'STOOD', 'UP', 'AND', 'STRETCHED', 'HIMSELF'] +5142-33396-0044-942: ref=['I', 'AM', 'STIFF', 'WITH', 'LONG', 'SITTING', 'HE', 'SAID', 'I', 'ITCH', 'FOR', 'A', 'FIGHT', 'I', 'TURNED', 'TO', 'THE', 'FARMER'] +5142-33396-0044-942: hyp=["I'M", 'STIFF', 'WITH', 'LONG', 'SITTING', 'HE', 'SAID', 'I', 'ITCH', 'FOR', 'A', 'FIGHT', 'I', 'TURNED', 'TO', 'THE', 'FARMER'] +5142-33396-0045-943: ref=['THIS', 'IS', 'OUR', 'LAST', 'FEAST', 'WITH', 'YOU', 'I', 'SAID'] +5142-33396-0045-943: hyp=['THIS', 'IS', 'OUR', 'LAST', 'FEAST', 'WITH', 'YOU', 'I', 'SAID'] +5142-33396-0046-944: ref=['BY', 'THE', 'BEARD', 'OF', 'ODIN', 'I', 'CRIED', 'YOU', 'HAVE', 'TAKEN', 'OUR', 'JOKE', 'LIKE', 'A', 'MAN'] +5142-33396-0046-944: hyp=['BY', 'THE', 'BEARD', 'OF', 'ODIN', 'I', 'CRIED', 'YOU', 'HAVE', 'TAKEN', 'OUR', 'JOKE', 'LIKE', 'A', 'MAN'] +5142-33396-0047-945: ref=['MY', 'MEN', 'POUNDED', 'THE', 'TABLE', 'WITH', 'THEIR', 'FISTS'] +5142-33396-0047-945: hyp=['MY', 'MEN', 'POUNDED', 'THE', 'TABLE', 'WITH', 'THEIR', 'FISTS'] +5142-33396-0048-946: ref=['BY', 'THE', 'HAMMER', 'OF', 'THOR', 'SHOUTED', 'GRIM', 'HERE', 'IS', 'NO', 'STINGY', 'COWARD'] +5142-33396-0048-946: hyp=['BY', 'THE', 'HAMMER', 'AUTHOR', 'SHOUTED', 'GRIM', 'THERE', 'IS', 'NO', 'STINGY', 'COWARD'] +5142-33396-0049-947: ref=['HERE', 'FRIEND', 'TAKE', 'IT', 'AND', 'HE', 'THRUST', 'IT', 'INTO', 'THE', "FARMER'S", 'HAND'] +5142-33396-0049-947: hyp=['HERE', 'FRIEND', 'TAKE', 'IT', 'AND', 'HE', 'THRUST', 'IT', 'INTO', 'THE', "FARMER'S", 'HAND'] +5142-33396-0050-948: ref=['MAY', 'YOU', 'DRINK', "HEART'S", 'EASE', 'FROM', 'IT', 'FOR', 'MANY', 'YEARS'] +5142-33396-0050-948: hyp=['MAY', 'YOU', 'DRINK', 'HEARTSEASE', 'FROM', 'IT', 'FOR', 'MANY', 'YEARS'] +5142-33396-0051-949: ref=['AND', 'WITH', 'IT', 'I', 'LEAVE', 'YOU', 'A', 'NAME', 'SIF', 'THE', 'FRIENDLY', 'I', 'SHALL', 'HOPE', 'TO', 'DRINK', 'WITH', 'YOU', 'SOMETIME', 'IN', 'VALHALLA'] +5142-33396-0051-949: hyp=['AND', 'WITH', 'IT', 'I', 'LEAVE', 'YOU', 'A', 'NAME', 'SIFT', 'THE', 'FRIENDLY', 'I', 'SHALL', 'HOPE', 'TO', 'DRINK', 'WITH', 'YOU', 'SOME', 'TIME', 'IN', 'VALHALLA'] +5142-33396-0052-950: ref=['HERE', 'IS', 'A', 'RING', 'FOR', 'SIF', 'THE', 'FRIENDLY', 'AND', 'HERE', 'IS', 'A', 'BRACELET', 'A', 'SWORD', 'WOULD', 'NOT', 'BE', 'ASHAMED', 'TO', 'HANG', 'AT', 'YOUR', 'SIDE'] +5142-33396-0052-950: hyp=['HERE', 'IS', 'A', 'RING', 'FOR', 'SIFT', 'THE', 'FRIENDLY', 'AND', 'HERE', 'IS', 'A', 'BRACELET', 'AND', 'A', 'SWORD', 'WOULD', 'NOT', 'BE', 'ASHAMED', 'TO', 'HANG', 'AT', 'YOUR', 'SIDE'] +5142-33396-0053-951: ref=['I', 'TOOK', 'FIVE', 'GREAT', 'BRACELETS', 'OF', 'GOLD', 'FROM', 'OUR', 'TREASURE', 'CHEST', 'AND', 'GAVE', 'THEM', 'TO', 'HIM'] +5142-33396-0053-951: hyp=['I', 'TOOK', 'FIVE', 'GREAT', 'BRACELETS', 'OF', 'GOLD', 'FROM', 'OUR', 'TREASURE', 'CHEST', 'AND', 'GAVE', 'THEM', 'TO', 'HIM'] +5142-33396-0054-952: ref=['THAT', 'IS', 'THE', 'BEST', 'WAY', 'TO', 'DECIDE', 'FOR', 'THE', 'SPEAR', 'WILL', 'ALWAYS', 'POINT', 'SOMEWHERE', 'AND', 'ONE', 'THING', 'IS', 'AS', 'GOOD', 'AS', 'ANOTHER'] +5142-33396-0054-952: hyp=['THAT', 'IS', 'THE', 'BEST', 'WAY', 'TO', 'DECIDE', 'FOR', 'THE', 'SPEAR', 'WILL', 'ALWAYS', 'POINT', 'SOMEWHERE', 'AND', 'ONE', 'THING', 'IS', 'AS', 'GOOD', 'AS', 'ANOTHER'] +5142-33396-0055-953: ref=['THAT', 'TIME', 'IT', 'POINTED', 'US', 'INTO', 'YOUR', "FATHER'S", 'SHIPS'] +5142-33396-0055-953: hyp=['THAT', 'TIME', 'IT', 'POINTED', 'US', 'INTO', 'YOUR', "FATHER'S", 'SHIPS'] +5142-33396-0056-954: ref=['HERE', 'THEY', 'SAID', 'IS', 'A', 'RASCAL', 'WHO', 'HAS', 'BEEN', 'HARRYING', 'OUR', 'COASTS'] +5142-33396-0056-954: hyp=['HERE', 'THEY', 'SAID', 'IS', 'A', 'RASCAL', 'WHO', 'HAS', 'BEEN', 'HARRYING', 'OUR', 'COASTS'] +5142-33396-0057-955: ref=['WE', 'SUNK', 'HIS', 'SHIP', 'AND', 'MEN', 'BUT', 'HIM', 'WE', 'BROUGHT', 'TO', 'YOU'] +5142-33396-0057-955: hyp=['WE', 'SUNK', 'HIS', 'SHIP', 'AND', 'MEN', 'BUT', 'HIM', 'WE', 'BROUGHT', 'TO', 'YOU'] +5142-33396-0058-956: ref=['A', 'ROBBER', 'VIKING', 'SAID', 'THE', 'KING', 'AND', 'SCOWLED', 'AT', 'ME'] +5142-33396-0058-956: hyp=['A', 'ROBBER', 'VIKING', 'SAID', 'THE', 'KING', 'AND', 'HE', 'SCOWLED', 'AT', 'ME'] +5142-33396-0059-957: ref=['YES', 'AND', 'WITH', 'ALL', 'YOUR', 'FINGERS', 'IT', 'TOOK', 'YOU', 'A', 'YEAR', 'TO', 'CATCH', 'ME', 'THE', 'KING', 'FROWNED', 'MORE', 'ANGRILY'] +5142-33396-0059-957: hyp=['YES', 'AND', 'WITH', 'ALL', 'YOUR', 'FINGERS', 'IT', 'TOOK', 'YOU', 'A', 'YEAR', 'TO', 'CATCH', 'ME', 'THE', 'KING', 'FROWNED', 'MORE', 'ANGRILY'] +5142-33396-0060-958: ref=['TAKE', 'HIM', 'OUT', 'THORKEL', 'AND', 'LET', 'HIM', 'TASTE', 'YOUR', 'SWORD'] +5142-33396-0060-958: hyp=['TAKE', 'HIM', 'OUT', 'TORQUAL', 'AND', 'LET', 'HIM', 'TASTE', 'YOUR', 'SWORD'] +5142-33396-0061-959: ref=['YOUR', 'MOTHER', 'THE', 'QUEEN', 'WAS', 'STANDING', 'BY'] +5142-33396-0061-959: hyp=['YOUR', 'MOTHER', 'THE', 'QUEEN', 'WAS', 'STANDING', 'BY'] +5142-33396-0062-960: ref=['NOW', 'SHE', 'PUT', 'HER', 'HAND', 'ON', 'HIS', 'ARM', 'AND', 'SMILED', 'AND', 'SAID'] +5142-33396-0062-960: hyp=['NOW', 'SHE', 'PUT', 'HER', 'HAND', 'ON', 'HIS', 'ARM', 'AND', 'SMILED', 'AND', 'SAID'] +5142-33396-0063-961: ref=['AND', 'WOULD', 'HE', 'NOT', 'BE', 'A', 'GOOD', 'GIFT', 'FOR', 'OUR', 'BABY'] +5142-33396-0063-961: hyp=['AND', 'WOULD', 'HE', 'NOT', 'BE', 'A', 'GOOD', 'GIFT', 'FOR', 'OUR', 'BABY'] +5142-33396-0064-962: ref=['YOUR', 'FATHER', 'THOUGHT', 'A', 'MOMENT', 'THEN', 'LOOKED', 'AT', 'YOUR', 'MOTHER', 'AND', 'SMILED'] +5142-33396-0064-962: hyp=['YOUR', 'FATHER', 'THOUGHT', 'A', 'MOMENT', 'AND', 'LOOKED', 'AT', 'YOUR', 'MOTHER', 'AND', 'SMILED'] +5142-33396-0065-963: ref=['SOFT', 'HEART', 'HE', 'SAID', 'GENTLY', 'TO', 'HER', 'THEN', 'TO', 'THORKEL', 'WELL', 'LET', 'HIM', 'GO', 'THORKEL'] +5142-33396-0065-963: hyp=['SOFT', 'HEART', 'HE', 'SAID', 'GENTLY', 'TO', 'HER', 'THEN', 'TO', 'TORCOAL', 'WELL', 'LET', 'HIM', 'GO', 'TORKLE'] +5142-33396-0066-964: ref=['THEN', 'HE', 'TURNED', 'TO', 'ME', 'AGAIN', 'FROWNING'] +5142-33396-0066-964: hyp=['THEN', 'HE', 'TURNED', 'TO', 'ME', 'AGAIN', 'FROWNING'] +5142-33396-0067-965: ref=['BUT', 'YOUNG', 'SHARP', 'TONGUE', 'NOW', 'THAT', 'WE', 'HAVE', 'CAUGHT', 'YOU', 'WE', 'WILL', 'PUT', 'YOU', 'INTO', 'A', 'TRAP', 'THAT', 'YOU', 'CANNOT', 'GET', 'OUT', 'OF'] +5142-33396-0067-965: hyp=['BUT', 'YOUNG', 'SHARP', 'TONGUE', 'NOW', 'THAT', "WE'VE", 'CAUGHT', 'YOU', 'WILL', 'PUT', 'YOU', 'INTO', 'A', 'TRAP', 'THAT', 'YOU', 'CANNOT', 'GET', 'OUT', 'OF'] +5142-33396-0068-966: ref=['SO', 'I', 'LIVED', 'AND', 'NOW', 'AM', 'YOUR', 'TOOTH', 'THRALL', 'WELL', 'IT', 'IS', 'THE', 'LUCK', 'OF', 'WAR'] +5142-33396-0068-966: hyp=['SO', 'I', 'LIVED', 'AND', 'NOW', 'AM', 'YOUR', 'TOOTH', 'THRALL', 'WELL', 'IT', 'IS', 'THE', 'LUCK', 'OF', 'WAR'] +5142-36377-0000-870: ref=['IT', 'WAS', 'ONE', 'OF', 'THE', 'MASTERLY', 'AND', 'CHARMING', 'STORIES', 'OF', 'DUMAS', 'THE', 'ELDER'] +5142-36377-0000-870: hyp=['IT', 'WAS', 'ONE', 'OF', 'THE', 'MASTERLY', 'AND', 'CHARMING', 'STORIES', 'OF', 'DE', 'MAU', 'THE', 'ELDER'] +5142-36377-0001-871: ref=['IN', 'FIVE', 'MINUTES', 'I', 'WAS', 'IN', 'A', 'NEW', 'WORLD', 'AND', 'MY', 'MELANCHOLY', 'ROOM', 'WAS', 'FULL', 'OF', 'THE', 'LIVELIEST', 'FRENCH', 'COMPANY'] +5142-36377-0001-871: hyp=['IN', 'FIVE', 'MINUTES', 'I', 'WAS', 'IN', 'A', 'NEW', 'WORLD', 'AND', 'MY', 'MELANCHOLY', 'ROOM', 'WAS', 'FULL', 'OF', 'THE', 'LIVELIEST', 'FRENCH', 'COMPANY'] +5142-36377-0002-872: ref=['THE', 'SOUND', 'OF', 'AN', 'IMPERATIVE', 'AND', 'UNCOMPROMISING', 'BELL', 'RECALLED', 'ME', 'IN', 'DUE', 'TIME', 'TO', 'THE', 'REGIONS', 'OF', 'REALITY'] +5142-36377-0002-872: hyp=['THE', 'SOUND', 'OF', 'AN', 'IMPERATIVE', 'AND', 'UNCOMPROMISING', 'BELL', 'RECALLED', 'ME', 'IN', 'DUE', 'TIME', 'TO', 'THE', 'REGIONS', 'OF', 'REALITY'] +5142-36377-0003-873: ref=['AMBROSE', 'MET', 'ME', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'STAIRS', 'AND', 'SHOWED', 'ME', 'THE', 'WAY', 'TO', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0003-873: hyp=['AMBROSE', 'MET', 'ME', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'STAIRS', 'AND', 'SHOWED', 'ME', 'THE', 'WAY', 'TO', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0004-874: ref=['SHE', 'SIGNED', 'TO', 'ME', 'WITH', 'A', 'GHOSTLY', 'SOLEMNITY', 'TO', 'TAKE', 'THE', 'VACANT', 'PLACE', 'ON', 'THE', 'LEFT', 'OF', 'HER', 'FATHER'] +5142-36377-0004-874: hyp=['SHE', 'SIGNED', 'TO', 'ME', 'WITH', 'A', 'GHOSTLY', 'SOLEMNITY', 'TO', 'TAKE', 'THE', 'VACANT', 'PLACE', 'ON', 'THE', 'LEFT', 'OF', 'HER', 'FATHER'] +5142-36377-0005-875: ref=['THE', 'DOOR', 'OPENED', 'AGAIN', 'WHILE', 'I', 'WAS', 'STILL', 'STUDYING', 'THE', 'TWO', 'BROTHERS', 'WITHOUT', 'I', 'HONESTLY', 'CONFESS', 'BEING', 'VERY', 'FAVORABLY', 'IMPRESSED', 'BY', 'EITHER', 'OF', 'THEM'] +5142-36377-0005-875: hyp=['THE', 'DOOR', 'OPENED', 'AGAIN', 'WHILE', 'I', 'WAS', 'STILL', 'STUDYING', 'THE', 'TWO', 'BROTHERS', 'WITHOUT', 'I', 'HONESTLY', 'CONFESS', 'BEING', 'VERY', 'FAVORABLY', 'IMPRESSED', 'BY', 'EITHER', 'OF', 'THEM'] +5142-36377-0006-876: ref=['A', 'NEW', 'MEMBER', 'OF', 'THE', 'FAMILY', 'CIRCLE', 'WHO', 'INSTANTLY', 'ATTRACTED', 'MY', 'ATTENTION', 'ENTERED', 'THE', 'ROOM'] +5142-36377-0006-876: hyp=['A', 'NEW', 'MEMBER', 'OF', 'THE', 'FAMILY', 'CIRCLE', 'WHO', 'INSTANTLY', 'ATTRACTED', 'MY', 'ATTENTION', 'ENTERED', 'THE', 'ROOM'] +5142-36377-0007-877: ref=['A', 'LITTLE', 'CRACKED', 'THAT', 'IN', 'THE', 'POPULAR', 'PHRASE', 'WAS', 'MY', 'IMPRESSION', 'OF', 'THE', 'STRANGER', 'WHO', 'NOW', 'MADE', 'HIS', 'APPEARANCE', 'IN', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0007-877: hyp=['A', 'LITTLE', 'CRACKED', 'THAT', 'IN', 'THE', 'POPULAR', 'PHRASE', 'WAS', 'MY', 'IMPRESSION', 'OF', 'THE', 'STRANGER', 'WHO', 'NOW', 'MADE', 'HIS', 'APPEARANCE', 'IN', 'THE', 'SUPPER', 'ROOM'] +5142-36377-0008-878: ref=['MISTER', 'MEADOWCROFT', 'THE', 'ELDER', 'HAVING', 'NOT', 'SPOKEN', 'ONE', 'WORD', 'THUS', 'FAR', 'HIMSELF', 'INTRODUCED', 'THE', 'NEWCOMER', 'TO', 'ME', 'WITH', 'A', 'SIDE', 'GLANCE', 'AT', 'HIS', 'SONS', 'WHICH', 'HAD', 'SOMETHING', 'LIKE', 'DEFIANCE', 'IN', 'IT', 'A', 'GLANCE', 'WHICH', 'AS', 'I', 'WAS', 'SORRY', 'TO', 'NOTICE', 'WAS', 'RETURNED', 'WITH', 'THE', 'DEFIANCE', 'ON', 'THEIR', 'SIDE', 'BY', 'THE', 'TWO', 'YOUNG', 'MEN'] +5142-36377-0008-878: hyp=['MISTER', 'MEDICROFT', 'THE', 'ELDER', 'HAVING', 'NOT', 'SPOKEN', 'ONE', 'WORD', 'THUS', 'FAR', 'HIMSELF', 'INTRODUCED', 'THE', 'NEWCOMER', 'TO', 'ME', 'WITH', 'A', 'SIDE', 'GLANCE', 'AT', 'HIS', 'SONS', 'WHICH', 'HAD', 'SOMETHING', 'LIKE', 'DEFIANCE', 'IN', 'IT', 'A', 'GLANCE', 'WHICH', 'AS', 'I', 'WAS', 'SORRY', 'TO', 'NOTICE', 'WAS', 'RETURNED', 'WITH', 'THE', 'DEFIANCE', 'ON', 'THEIR', 'SIDE', 'BY', 'THE', 'TWO', 'YOUNG', 'MEN'] +5142-36377-0009-879: ref=['PHILIP', 'LEFRANK', 'THIS', 'IS', 'MY', 'OVERLOOKER', 'MISTER', 'JAGO', 'SAID', 'THE', 'OLD', 'MAN', 'FORMALLY', 'PRESENTING', 'US'] +5142-36377-0009-879: hyp=['PHILIP', 'LE', 'FRANK', 'THIS', 'IS', 'MY', 'OVERLOOKER', 'MISTER', 'YAGO', 'SAID', 'THE', 'OLD', 'MAN', 'FORMALLY', 'PRESENTING', 'US'] +5142-36377-0010-880: ref=['HE', 'IS', 'NOT', 'WELL', 'HE', 'HAS', 'COME', 'OVER', 'THE', 'OCEAN', 'FOR', 'REST', 'AND', 'CHANGE', 'OF', 'SCENE'] +5142-36377-0010-880: hyp=['HE', 'IS', 'NOT', 'WELL', 'HE', 'HAS', 'COME', 'OVER', 'THE', 'OCEAN', 'FOR', 'REST', 'AND', 'CHANGES', 'SCENE'] +5142-36377-0011-881: ref=['MISTER', 'JAGO', 'IS', 'AN', 'AMERICAN', 'PHILIP'] +5142-36377-0011-881: hyp=['THIS', 'GIAGO', 'IS', 'AN', 'AMERICAN', 'PHILIP'] +5142-36377-0012-882: ref=['MAKE', 'ACQUAINTANCE', 'WITH', 'MISTER', 'JAGO', 'SIT', 'TOGETHER'] +5142-36377-0012-882: hyp=['MAKE', 'ACQUAINTANCE', 'WITH', 'MISS', 'GIAGO', 'SIT', 'TOGETHER'] +5142-36377-0013-883: ref=['THEY', 'POINTEDLY', 'DREW', 'BACK', 'FROM', 'JOHN', 'JAGO', 'AS', 'HE', 'APPROACHED', 'THE', 'EMPTY', 'CHAIR', 'NEXT', 'TO', 'ME', 'AND', 'MOVED', 'ROUND', 'TO', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'TABLE'] +5142-36377-0013-883: hyp=['THEY', 'POINTEDLY', 'DREW', 'BACK', 'FROM', 'JOHN', 'YAGO', 'AS', 'HE', 'APPROACHED', 'THE', 'EMPTY', 'CHAIR', 'NEXT', 'ME', 'AND', 'MOVED', 'ROUND', 'TO', 'THE', 'OPPOSITE', 'SIDE', 'OF', 'THE', 'TABLE'] +5142-36377-0014-884: ref=['A', 'PRETTY', 'GIRL', 'AND', 'SO', 'FAR', 'AS', 'I', 'COULD', 'JUDGE', 'BY', 'APPEARANCES', 'A', 'GOOD', 'GIRL', 'TOO', 'DESCRIBING', 'HER', 'GENERALLY', 'I', 'MAY', 'SAY', 'THAT', 'SHE', 'HAD', 'A', 'SMALL', 'HEAD', 'WELL', 'CARRIED', 'AND', 'WELL', 'SET', 'ON', 'HER', 'SHOULDERS', 'BRIGHT', 'GRAY', 'EYES', 'THAT', 'LOOKED', 'AT', 'YOU', 'HONESTLY', 'AND', 'MEANT', 'WHAT', 'THEY', 'LOOKED', 'A', 'TRIM', 'SLIGHT', 'LITTLE', 'FIGURE', 'TOO', 'SLIGHT', 'FOR', 'OUR', 'ENGLISH', 'NOTIONS', 'OF', 'BEAUTY', 'A', 'STRONG', 'AMERICAN', 'ACCENT', 'AND', 'A', 'RARE', 'THING', 'IN', 'AMERICA', 'A', 'PLEASANTLY', 'TONED', 'VOICE', 'WHICH', 'MADE', 'THE', 'ACCENT', 'AGREEABLE', 'TO', 'ENGLISH', 'EARS'] +5142-36377-0014-884: hyp=['A', 'PRETTY', 'GIRL', 'AND', 'SO', 'FAR', 'AS', 'I', 'COULD', 'JUDGE', 'BY', 'APPEARANCES', 'A', 'GOOD', 'GIRL', 'TOO', 'DESCRIBING', 'HER', 'GENERALLY', 'I', 'MAY', 'SAY', 'THAT', 'SHE', 'HAD', 'A', 'SMALL', 'HEAD', 'WELL', 'CARRIED', 'AND', 'WELL', 'SET', 'ON', 'HER', 'SHOULDERS', 'BRIGHT', 'GRAY', 'EYES', 'THAT', 'LOOKED', 'AT', 'YOU', 'HONESTLY', 'AND', 'MEANT', 'WHAT', 'THEY', 'LOOKED', 'A', 'TRIM', 'SLIGHT', 'LITTLE', 'FIGURE', 'TOO', 'SLIGHT', 'FOR', 'OUR', 'ENGLISH', 'NOTIONS', 'OF', 'BEAUTY', 'A', 'STRONG', 'AMERICAN', 'ACCENT', 'AND', 'A', 'RARE', 'THING', 'IN', 'AMERICA', 'A', 'PLEASANTLY', 'TONED', 'VOICE', 'WHICH', 'MADE', 'THE', 'ACCENT', 'AGREEABLE', 'TO', 'ENGLISH', 'EARS'] +5142-36377-0015-885: ref=['OUR', 'FIRST', 'IMPRESSIONS', 'OF', 'PEOPLE', 'ARE', 'IN', 'NINE', 'CASES', 'OUT', 'OF', 'TEN', 'THE', 'RIGHT', 'IMPRESSIONS'] +5142-36377-0015-885: hyp=['OUR', 'FIRST', 'IMPRESSIONS', 'OF', 'PEOPLE', 'ARE', 'IN', 'NINE', 'CASES', 'OUT', 'OF', 'TEN', 'THE', 'RIGHT', 'IMPRESSIONS'] +5142-36377-0016-886: ref=['FOR', 'ONCE', 'IN', 'A', 'WAY', 'I', 'PROVED', 'A', 'TRUE', 'PROPHET'] +5142-36377-0016-886: hyp=['FOR', 'ONCE', 'IN', 'A', 'WAY', 'I', 'PROVED', 'A', 'TRUE', 'PROPHET'] +5142-36377-0017-887: ref=['THE', 'ONLY', 'CHEERFUL', 'CONVERSATION', 'WAS', 'THE', 'CONVERSATION', 'ACROSS', 'THE', 'TABLE', 'BETWEEN', 'NAOMI', 'AND', 'ME'] +5142-36377-0017-887: hyp=['THE', 'ONLY', 'CHEERFUL', 'CONVERSATION', 'WAS', 'THE', 'CONVERSATION', 'ACROSS', 'THE', 'TABLE', 'BETWEEN', 'NAOMI', 'AND', 'ME'] +5142-36377-0018-888: ref=['HE', 'LOOKED', 'UP', 'AT', 'NAOMI', 'DOUBTINGLY', 'FROM', 'HIS', 'PLATE', 'AND', 'LOOKED', 'DOWN', 'AGAIN', 'SLOWLY', 'WITH', 'A', 'FROWN'] +5142-36377-0018-888: hyp=['HE', 'LOOKED', 'UP', 'AND', 'NOW', 'ON', 'ME', 'DOUBTINGLY', 'FROM', 'HIS', 'PLATE', 'AND', 'LOOKED', 'DOWN', 'AGAIN', 'SLOWLY', 'WITH', 'A', 'FROWN'] +5142-36377-0019-889: ref=['WHEN', 'I', 'ADDRESSED', 'HIM', 'HE', 'ANSWERED', 'CONSTRAINEDLY'] +5142-36377-0019-889: hyp=['WHEN', 'I', 'ADDRESSED', 'HIM', 'HE', 'ANSWERED', 'CONSTRAINEDLY'] +5142-36377-0020-890: ref=['A', 'MORE', 'DREARY', 'AND', 'MORE', 'DISUNITED', 'FAMILY', 'PARTY', 'I', 'NEVER', 'SAT', 'AT', 'THE', 'TABLE', 'WITH'] +5142-36377-0020-890: hyp=['A', 'MORE', 'DREARY', 'AND', 'MORE', 'DISUNITED', 'FAMILY', 'PARTY', 'I', 'NEVER', 'SAT', 'AT', 'THE', 'TABLE', 'WITH'] +5142-36377-0021-891: ref=['ENVY', 'HATRED', 'MALICE', 'AND', 'UNCHARITABLENESS', 'ARE', 'NEVER', 'SO', 'ESSENTIALLY', 'DETESTABLE', 'TO', 'MY', 'MIND', 'AS', 'WHEN', 'THEY', 'ARE', 'ANIMATED', 'BY', 'A', 'SENSE', 'OF', 'PROPRIETY', 'AND', 'WORK', 'UNDER', 'THE', 'SURFACE', 'BUT', 'FOR', 'MY', 'INTEREST', 'IN', 'NAOMI', 'AND', 'MY', 'OTHER', 'INTEREST', 'IN', 'THE', 'LITTLE', 'LOVE', 'LOOKS', 'WHICH', 'I', 'NOW', 'AND', 'THEN', 'SURPRISED', 'PASSING', 'BETWEEN', 'HER', 'AND', 'AMBROSE', 'I', 'SHOULD', 'NEVER', 'HAVE', 'SAT', 'THROUGH', 'THAT', 'SUPPER'] +5142-36377-0021-891: hyp=['ENVY', 'HATRED', 'MALICE', 'AND', 'UNCHARITABLENESS', 'ARE', 'NEVER', 'SO', 'ESSENTIALLY', 'DETESTABLE', 'TO', 'MY', 'MIND', 'AS', 'WHEN', 'THEY', 'ARE', 'ANIMATED', 'BY', 'THE', 'SENSE', 'OF', 'PROPRIETY', 'AND', 'WORK', 'UNDER', 'THE', 'SURFACE', 'BUT', 'FOR', 'MY', 'INTEREST', 'TO', 'NAY', 'OWE', 'ME', 'AND', 'MY', 'OTHER', 'INTERESTS', 'IN', 'THE', 'LITTLE', 'LOVE', 'LOOKS', 'WHICH', 'I', 'NOW', 'AND', 'THEN', 'SURPRISED', 'PASSING', 'BETWEEN', 'HER', 'AND', 'AMBROSE', 'I', 'SHOULD', 'NEVER', 'HAVE', 'SAT', 'THROUGH', 'THAT', 'SUPPER'] +5142-36377-0022-892: ref=['I', 'WISH', 'YOU', 'GOOD', 'NIGHT', 'SHE', 'LAID', 'HER', 'BONY', 'HANDS', 'ON', 'THE', 'BACK', 'OF', 'MISTER', "MEADOWCROFT'S", 'INVALID', 'CHAIR', 'CUT', 'HIM', 'SHORT', 'IN', 'HIS', 'FAREWELL', 'SALUTATION', 'TO', 'ME', 'AND', 'WHEELED', 'HIM', 'OUT', 'TO', 'HIS', 'BED', 'AS', 'IF', 'SHE', 'WERE', 'WHEELING', 'HIM', 'OUT', 'TO', 'HIS', 'GRAVE'] +5142-36377-0022-892: hyp=['I', 'WISH', 'YOU', 'GOOD', 'NIGHT', 'SHE', 'LAID', 'HER', 'BONY', 'HANDS', 'ON', 'THE', 'BACK', 'OF', 'MISTER', "MEADOWCROFT'S", 'INVALID', 'CHAIR', 'CUT', 'HIM', 'SHORT', 'IN', 'HIS', 'FAREWELL', 'SALUTATION', 'TO', 'ME', 'AND', 'WHEELED', 'HIM', 'OUT', 'TO', 'HIS', 'BED', 'AS', 'IF', 'SHE', 'WERE', 'WHEELING', 'HIM', 'OUT', 'TO', 'HIS', 'GRAVE'] +5142-36377-0023-893: ref=['YOU', 'WERE', 'QUITE', 'RIGHT', 'TO', 'SAY', 'NO', 'AMBROSE', 'BEGAN', 'NEVER', 'SMOKE', 'WITH', 'JOHN', 'JAGO', 'HIS', 'CIGARS', 'WILL', 'POISON', 'YOU'] +5142-36377-0023-893: hyp=['YOU', 'WERE', 'QUITE', 'RIGHT', 'TO', 'SAY', 'NO', 'AMBROSE', 'BEGAN', 'NEVER', 'SMOKE', 'WITH', 'JOHNNIAGO', 'HIS', 'CIGARS', 'WILL', 'POISON', 'YOU'] +5142-36377-0024-894: ref=['NAOMI', 'SHOOK', 'HER', 'FOREFINGER', 'REPROACHFULLY', 'AT', 'THEM', 'AS', 'IF', 'THE', 'TWO', 'STURDY', 'YOUNG', 'FARMERS', 'HAD', 'BEEN', 'TWO', 'CHILDREN'] +5142-36377-0024-894: hyp=['THEY', 'ONLY', 'SHOOK', 'HER', 'FOREFINGER', 'REPROACHFULLY', 'AT', 'THEM', 'AS', 'IF', 'THE', 'TWO', 'STURDY', 'YOUNG', 'FARMERS', 'HAD', 'BEEN', 'TWO', 'CHILDREN'] +5142-36377-0025-895: ref=['SILAS', 'SLUNK', 'AWAY', 'WITHOUT', 'A', 'WORD', 'OF', 'PROTEST', 'AMBROSE', 'STOOD', 'HIS', 'GROUND', 'EVIDENTLY', 'BENT', 'ON', 'MAKING', 'HIS', 'PEACE', 'WITH', 'NAOMI', 'BEFORE', 'HE', 'LEFT', 'HER', 'SEEING', 'THAT', 'I', 'WAS', 'IN', 'THE', 'WAY', 'I', 'WALKED', 'ASIDE', 'TOWARD', 'A', 'GLASS', 'DOOR', 'AT', 'THE', 'LOWER', 'END', 'OF', 'THE', 'ROOM'] +5142-36377-0025-895: hyp=['SILAS', 'SLUNK', 'AWAY', 'WITHOUT', 'A', 'WORD', 'OF', 'PROTEST', 'AMBROSE', 'STOOD', 'HIS', 'GROUND', 'EVIDENTLY', 'BENT', 'ON', 'MAKING', 'HIS', 'PEACE', 'WHEN', 'NAOMI', 'BEFORE', 'HE', 'LEFT', 'HER', 'SEEING', 'THAT', 'I', 'WAS', 'IN', 'THE', 'WAY', 'I', 'WALKED', 'ASIDE', 'TOWARD', 'A', 'GLASS', 'DOOR', 'AT', 'THE', 'LOWER', 'END', 'OF', 'THE', 'ROOM'] +5142-36586-0000-967: ref=['IT', 'IS', 'MANIFEST', 'THAT', 'MAN', 'IS', 'NOW', 'SUBJECT', 'TO', 'MUCH', 'VARIABILITY'] +5142-36586-0000-967: hyp=['IT', 'IS', 'MANIFEST', 'THAT', 'MAN', 'IS', 'NOW', 'SUBJECT', 'TO', 'MUCH', 'VARIABILITY'] +5142-36586-0001-968: ref=['SO', 'IT', 'IS', 'WITH', 'THE', 'LOWER', 'ANIMALS'] +5142-36586-0001-968: hyp=['SO', 'IT', 'IS', 'WITH', 'THE', 'LOWER', 'ANIMALS'] +5142-36586-0002-969: ref=['THE', 'VARIABILITY', 'OF', 'MULTIPLE', 'PARTS'] +5142-36586-0002-969: hyp=['THE', 'VARIABILITY', 'OF', 'MULTIPLE', 'PARTS'] +5142-36586-0003-970: ref=['BUT', 'THIS', 'SUBJECT', 'WILL', 'BE', 'MORE', 'PROPERLY', 'DISCUSSED', 'WHEN', 'WE', 'TREAT', 'OF', 'THE', 'DIFFERENT', 'RACES', 'OF', 'MANKIND'] +5142-36586-0003-970: hyp=['BUT', 'THIS', 'SUBJECT', 'WILL', 'BE', 'MORE', 'PROPERLY', 'DISCUSSED', 'WHEN', 'WE', 'TREAT', 'OF', 'THE', 'DIFFERENT', 'RACES', 'OF', 'MANKIND'] +5142-36586-0004-971: ref=['EFFECTS', 'OF', 'THE', 'INCREASED', 'USE', 'AND', 'DISUSE', 'OF', 'PARTS'] +5142-36586-0004-971: hyp=['EFFECTS', 'OF', 'THE', 'INCREASED', 'USE', 'AND', 'DISUSE', 'OF', 'PARTS'] +5142-36600-0000-896: ref=['CHAPTER', 'SEVEN', 'ON', 'THE', 'RACES', 'OF', 'MAN'] +5142-36600-0000-896: hyp=['CHAPTER', 'SEVEN', 'ON', 'THE', 'RACES', 'OF', 'MAN'] +5142-36600-0001-897: ref=['IN', 'DETERMINING', 'WHETHER', 'TWO', 'OR', 'MORE', 'ALLIED', 'FORMS', 'OUGHT', 'TO', 'BE', 'RANKED', 'AS', 'SPECIES', 'OR', 'VARIETIES', 'NATURALISTS', 'ARE', 'PRACTICALLY', 'GUIDED', 'BY', 'THE', 'FOLLOWING', 'CONSIDERATIONS', 'NAMELY', 'THE', 'AMOUNT', 'OF', 'DIFFERENCE', 'BETWEEN', 'THEM', 'AND', 'WHETHER', 'SUCH', 'DIFFERENCES', 'RELATE', 'TO', 'FEW', 'OR', 'MANY', 'POINTS', 'OF', 'STRUCTURE', 'AND', 'WHETHER', 'THEY', 'ARE', 'OF', 'PHYSIOLOGICAL', 'IMPORTANCE', 'BUT', 'MORE', 'ESPECIALLY', 'WHETHER', 'THEY', 'ARE', 'CONSTANT'] +5142-36600-0001-897: hyp=['IN', 'DETERMINING', 'WHETHER', 'TWO', 'OR', 'MORE', 'ALLIED', 'FORMS', 'OUGHT', 'TO', 'BE', 'RANKED', 'A', 'SPECIES', 'OR', 'VARIETIES', 'NATURALISTS', 'ARE', 'PRACTICALLY', 'GUIDED', 'BY', 'THE', 'FOLLOWING', 'CONSIDERATIONS', 'NAMELY', 'THE', 'AMOUNT', 'OF', 'DIFFERENCE', 'BETWEEN', 'THEM', 'AND', 'WHETHER', 'SUCH', 'DIFFERENCE', 'IS', 'RELATE', 'TO', 'FEW', 'OR', 'MANY', 'POINTS', 'OF', 'STRUCTURE', 'AND', 'WHETHER', 'THEY', 'ARE', 'OF', 'PHYSIOLOGICAL', 'IMPORTANCE', 'BUT', 'MORE', 'ESPECIALLY', 'WHETHER', 'THEY', 'ARE', 'CONSTANT'] +5639-40744-0000-137: ref=['ELEVEN', "O'CLOCK", 'HAD', 'STRUCK', 'IT', 'WAS', 'A', 'FINE', 'CLEAR', 'NIGHT', 'THEY', 'WERE', 'THE', 'ONLY', 'PERSONS', 'ON', 'THE', 'ROAD', 'AND', 'THEY', 'SAUNTERED', 'LEISURELY', 'ALONG', 'TO', 'AVOID', 'PAYING', 'THE', 'PRICE', 'OF', 'FATIGUE', 'FOR', 'THE', 'RECREATION', 'PROVIDED', 'FOR', 'THE', 'TOLEDANS', 'IN', 'THEIR', 'VALLEY', 'OR', 'ON', 'THE', 'BANKS', 'OF', 'THEIR', 'RIVER'] +5639-40744-0000-137: hyp=['ELEVEN', "O'CLOCK", 'HAD', 'STRUCK', 'IT', 'WAS', 'A', 'FINE', 'CLEAR', 'NIGHT', 'THERE', 'WERE', 'THE', 'ONLY', 'PERSONS', 'ON', 'THE', 'ROAD', 'AND', 'THEY', 'SAUNTERED', 'LEISURELY', 'ALONG', 'TO', 'AVOID', 'PAYING', 'THE', 'PRICE', 'OF', 'FATIGUE', 'FOR', 'THE', 'RECREATION', 'PROVIDED', 'FOR', 'THE', 'TOLEDANS', 'IN', 'THE', 'VALLEY', 'OR', 'ON', 'THE', 'BANKS', 'OF', 'THEIR', 'RIVER'] +5639-40744-0001-138: ref=['SECURE', 'AS', 'HE', 'THOUGHT', 'IN', 'THE', 'CAREFUL', 'ADMINISTRATION', 'OF', 'JUSTICE', 'IN', 'THAT', 'CITY', 'AND', 'THE', 'CHARACTER', 'OF', 'ITS', 'WELL', 'DISPOSED', 'INHABITANTS', 'THE', 'GOOD', 'HIDALGO', 'WAS', 'FAR', 'FROM', 'THINKING', 'THAT', 'ANY', 'DISASTER', 'COULD', 'BEFAL', 'HIS', 'FAMILY'] +5639-40744-0001-138: hyp=['SECURE', 'AS', 'HE', 'THOUGHT', 'IN', 'THE', 'CAREFUL', 'ADMINISTRATION', 'OF', 'JUSTICE', 'IN', 'THAT', 'CITY', 'AND', 'THE', 'CHARACTER', 'OF', 'ITS', 'WELL', 'DISPOSED', 'INHABITANTS', 'THE', 'GOOD', 'HIDALGO', 'WAS', 'FAR', 'FROM', 'THINKING', 'THAT', 'ANY', 'DISASTER', 'COULD', 'BEFALL', 'HIS', 'FAMILY'] +5639-40744-0002-139: ref=['RODOLFO', 'AND', 'HIS', 'COMPANIONS', 'WITH', 'THEIR', 'FACES', 'MUFFLED', 'IN', 'THEIR', 'CLOAKS', 'STARED', 'RUDELY', 'AND', 'INSOLENTLY', 'AT', 'THE', 'MOTHER', 'THE', 'DAUGHTER', 'AND', 'THE', 'SERVANT', 'MAID'] +5639-40744-0002-139: hyp=['RUDOLPHO', 'AND', 'HIS', 'COMPANIONS', 'WITH', 'THEIR', 'FACES', 'MUFFLED', 'IN', 'THEIR', 'CLOAKS', 'STARED', 'RUDELY', 'AND', 'INSOLENTLY', 'AT', 'THE', 'MOTHER', 'THE', 'DAUGHTER', 'AND', 'THE', 'SERVANT', 'MAID'] +5639-40744-0003-140: ref=['IN', 'A', 'MOMENT', 'HE', 'COMMUNICATED', 'HIS', 'THOUGHTS', 'TO', 'HIS', 'COMPANIONS', 'AND', 'IN', 'THE', 'NEXT', 'MOMENT', 'THEY', 'RESOLVED', 'TO', 'TURN', 'BACK', 'AND', 'CARRY', 'HER', 'OFF', 'TO', 'PLEASE', 'RODOLFO', 'FOR', 'THE', 'RICH', 'WHO', 'ARE', 'OPEN', 'HANDED', 'ALWAYS', 'FIND', 'PARASITES', 'READY', 'TO', 'ENCOURAGE', 'THEIR', 'BAD', 'PROPENSITIES', 'AND', 'THUS', 'TO', 'CONCEIVE', 'THIS', 'WICKED', 'DESIGN', 'TO', 'COMMUNICATE', 'IT', 'APPROVE', 'IT', 'RESOLVE', 'ON', 'RAVISHING', 'LEOCADIA', 'AND', 'TO', 'CARRY', 'THAT', 'DESIGN', 'INTO', 'EFFECT', 'WAS', 'THE', 'WORK', 'OF', 'A', 'MOMENT'] +5639-40744-0003-140: hyp=['IN', 'A', 'MOMENT', 'HE', 'COMMUNICATED', 'HIS', 'THOUGHTS', 'TO', 'HIS', 'COMPANIONS', 'AND', 'IN', 'THE', 'NEXT', 'MOMENT', 'THEY', 'RESOLVED', 'TO', 'TURN', 'BACK', 'AND', 'CARRY', 'HER', 'OFF', 'TO', 'PLEASE', 'RUDOLPHO', 'FOR', 'THE', 'RICH', 'WHO', 'ARE', 'OPEN', 'HANDED', 'ALWAYS', 'FIND', 'PARRICIDES', 'READY', 'TO', 'ENCOURAGE', 'THEIR', 'BAD', 'PROPENSITIES', 'AND', 'THUS', 'TO', 'CONCEIVE', 'THIS', 'WICKED', 'DESIGN', 'TO', 'COMMUNICATE', 'IT', 'APPROVE', 'IT', 'RESOLVE', 'ON', 'RAVISHING', 'LEOCADIA', 'AND', 'TO', 'CARRY', 'THAT', 'DESIGN', 'INTO', 'EFFECT', 'WAS', 'THE', 'WORK', 'OF', 'A', 'MOMENT'] +5639-40744-0004-141: ref=['THEY', 'DREW', 'THEIR', 'SWORDS', 'HID', 'THEIR', 'FACES', 'IN', 'THE', 'FLAPS', 'OF', 'THEIR', 'CLOAKS', 'TURNED', 'BACK', 'AND', 'SOON', 'CAME', 'IN', 'FRONT', 'OF', 'THE', 'LITTLE', 'PARTY', 'WHO', 'HAD', 'NOT', 'YET', 'DONE', 'GIVING', 'THANKS', 'TO', 'GOD', 'FOR', 'THEIR', 'ESCAPE', 'FROM', 'THOSE', 'AUDACIOUS', 'MEN'] +5639-40744-0004-141: hyp=['THEY', 'DREW', 'THEIR', 'SWORDS', 'HID', 'THEIR', 'FACES', 'IN', 'THE', 'FLAPS', 'OF', 'THEIR', 'CLOAKS', 'TURNED', 'BACK', 'AND', 'SOON', 'CAME', 'IN', 'FRONT', 'OF', 'THE', 'LITTLE', 'PARTY', 'WHO', 'HAD', 'NOT', 'YET', 'DONE', 'GIVING', 'THANKS', 'TO', 'GOD', 'FOR', 'THEIR', 'ESCAPE', 'FROM', 'THOSE', 'AUDACIOUS', 'MEN'] +5639-40744-0005-142: ref=['FINALLY', 'THE', 'ONE', 'PARTY', 'WENT', 'OFF', 'EXULTING', 'AND', 'THE', 'OTHER', 'WAS', 'LEFT', 'IN', 'DESOLATION', 'AND', 'WOE'] +5639-40744-0005-142: hyp=['FINALLY', 'THE', 'ONE', 'PARTY', 'WENT', 'OFF', 'EXULTING', 'AND', 'THE', 'OTHER', 'WAS', 'LEFT', 'IN', 'DESOLATION', 'AND', 'WOE'] +5639-40744-0006-143: ref=['RODOLFO', 'ARRIVED', 'AT', 'HIS', 'OWN', 'HOUSE', 'WITHOUT', 'ANY', 'IMPEDIMENT', 'AND', "LEOCADIA'S", 'PARENTS', 'REACHED', 'THEIRS', 'HEART', 'BROKEN', 'AND', 'DESPAIRING'] +5639-40744-0006-143: hyp=['RODOLPHO', 'ARRIVED', 'AT', 'HIS', 'OWN', 'HOUSE', 'WITHOUT', 'ANY', 'IMPEDIMENT', 'A', 'LEOCADIUS', 'PARENTS', 'REACHED', 'THEIRS', 'HEART', 'BROKEN', 'AND', 'DESPAIRING'] +5639-40744-0007-144: ref=['MEANWHILE', 'RODOLFO', 'HAD', 'LEOCADIA', 'SAFE', 'IN', 'HIS', 'CUSTODY', 'AND', 'IN', 'HIS', 'OWN', 'APARTMENT'] +5639-40744-0007-144: hyp=['MEANWHILE', 'RUDOLPHO', 'HAD', 'LOCALIA', 'SAFE', 'IN', 'HIS', 'CUSTODY', 'AND', 'IN', 'HIS', 'OWN', 'APARTMENT'] +5639-40744-0008-145: ref=['WHO', 'TOUCHES', 'ME', 'AM', 'I', 'IN', 'BED'] +5639-40744-0008-145: hyp=['WHO', 'TOUCHES', 'ME', 'AM', 'I', 'IN', 'BED'] +5639-40744-0009-146: ref=['MOTHER', 'DEAR', 'FATHER', 'DO', 'YOU', 'HEAR', 'ME'] +5639-40744-0009-146: hyp=['MOTHER', 'DEAR', 'FATHER', 'DO', 'YOU', 'HEAR', 'ME'] +5639-40744-0010-147: ref=['IT', 'IS', 'THE', 'ONLY', 'AMENDS', 'I', 'ASK', 'OF', 'YOU', 'FOR', 'THE', 'WRONG', 'YOU', 'HAVE', 'DONE', 'ME'] +5639-40744-0010-147: hyp=['IT', 'IS', 'THE', 'ONLY', 'AMENDS', 'I', 'ASK', 'OF', 'YOU', 'FOR', 'THE', 'WRONG', 'YOU', 'HAVE', 'DONE', 'ME'] +5639-40744-0011-148: ref=['SHE', 'FOUND', 'THE', 'DOOR', 'BUT', 'IT', 'WAS', 'LOCKED', 'OUTSIDE'] +5639-40744-0011-148: hyp=['SHE', 'FOUND', 'THE', 'DOOR', 'BUT', 'IT', 'WAS', 'LOCKED', 'OUTSIDE'] +5639-40744-0012-149: ref=['SHE', 'SUCCEEDED', 'IN', 'OPENING', 'THE', 'WINDOW', 'AND', 'THE', 'MOONLIGHT', 'SHONE', 'IN', 'SO', 'BRIGHTLY', 'THAT', 'SHE', 'COULD', 'DISTINGUISH', 'THE', 'COLOUR', 'OF', 'SOME', 'DAMASK', 'HANGINGS', 'IN', 'THE', 'ROOM'] +5639-40744-0012-149: hyp=['SHE', 'SUCCEEDED', 'IN', 'OPENING', 'THE', 'WINDOW', 'AND', 'THE', 'MOONLIGHT', 'SHONE', 'IN', 'SO', 'BRIGHTLY', 'THAT', 'SHE', 'COULD', 'DISTINGUISH', 'THE', 'COLOR', 'OF', 'SOME', 'DAMASK', 'HANGING', 'IN', 'THE', 'ROOM'] +5639-40744-0013-150: ref=['SHE', 'SAW', 'THAT', 'THE', 'BED', 'WAS', 'GILDED', 'AND', 'SO', 'RICH', 'THAT', 'IT', 'SEEMED', 'THAT', 'OF', 'A', 'PRINCE', 'RATHER', 'THAN', 'OF', 'A', 'PRIVATE', 'GENTLEMAN'] +5639-40744-0013-150: hyp=['SHE', 'SAW', 'THAT', 'THE', 'BED', 'WAS', 'GILDED', 'AND', 'SO', 'RICH', 'THAT', 'IT', 'SEEMED', 'THAT', 'OF', 'A', 'PRINCE', 'THE', 'RATHER', 'THAT', 'OF', 'A', 'PRIVATE', 'GENTLEMAN'] +5639-40744-0014-151: ref=['AMONG', 'OTHER', 'THINGS', 'ON', 'WHICH', 'SHE', 'CAST', 'HER', 'EYES', 'WAS', 'A', 'SMALL', 'CRUCIFIX', 'OF', 'SOLID', 'SILVER', 'STANDING', 'ON', 'A', 'CABINET', 'NEAR', 'THE', 'WINDOW'] +5639-40744-0014-151: hyp=['AMONG', 'OTHER', 'THINGS', 'ON', 'WHICH', 'HE', 'CAST', 'HER', 'EYES', 'WAS', 'A', 'SMALL', 'CRUCIFIX', 'OF', 'SOLID', 'SILVER', 'STANDING', 'ON', 'A', 'CABINET', 'NEAR', 'THE', 'WINDOW'] +5639-40744-0015-152: ref=['THIS', 'PERSON', 'WAS', 'RODOLFO', 'WHO', 'THOUGH', 'HE', 'HAD', 'GONE', 'TO', 'LOOK', 'FOR', 'HIS', 'FRIENDS', 'HAD', 'CHANGED', 'HIS', 'MIND', 'IN', 'THAT', 'RESPECT', 'NOT', 'THINKING', 'IT', 'ADVISABLE', 'TO', 'ACQUAINT', 'THEM', 'WITH', 'WHAT', 'HAD', 'PASSED', 'BETWEEN', 'HIM', 'AND', 'THE', 'GIRL'] +5639-40744-0015-152: hyp=['THIS', 'PERSON', 'WAS', 'RIDOLPHO', 'WHO', 'THOUGH', 'HE', 'HAD', 'GONE', 'TO', 'LOOK', 'FOR', 'HIS', 'FRIENDS', 'HAD', 'CHANGED', 'HIS', 'MIND', 'IN', 'THAT', 'RESPECT', 'NOTHING', 'IT', 'ADVISABLE', 'TO', 'ACQUAINT', 'THEM', 'WITH', 'WHAT', 'HAD', 'PASSED', 'BETWEEN', 'HIM', 'AND', 'THE', 'GIRL'] +5639-40744-0016-153: ref=['ON', 'THE', 'CONTRARY', 'HE', 'RESOLVED', 'TO', 'TELL', 'THEM', 'THAT', 'REPENTING', 'OF', 'HIS', 'VIOLENCE', 'AND', 'MOVED', 'BY', 'HER', 'TEARS', 'HE', 'HAD', 'ONLY', 'CARRIED', 'HER', 'HALF', 'WAY', 'TOWARDS', 'HIS', 'HOUSE', 'AND', 'THEN', 'LET', 'HER', 'GO'] +5639-40744-0016-153: hyp=['ON', 'THE', 'CONTRARY', 'HE', 'RESOLVED', 'TO', 'TELL', 'THEM', 'THAT', 'REPENTING', 'OF', 'HIS', 'VIOLENCE', 'AND', 'MOVED', 'BY', 'A', 'TEARS', 'HE', 'HAD', 'ONLY', 'CARRIED', 'HER', 'HALF', 'WAY', 'TOWARDS', 'HIS', 'HOUSE', 'AND', 'THEN', 'LET', 'HER', 'GO'] +5639-40744-0017-154: ref=['CHOKING', 'WITH', 'EMOTION', 'LEOCADI', 'MADE', 'A', 'SIGN', 'TO', 'HER', 'PARENTS', 'THAT', 'SHE', 'WISHED', 'TO', 'BE', 'ALONE', 'WITH', 'THEM'] +5639-40744-0017-154: hyp=['CHOKING', 'WITH', 'EMOTION', 'LUCADIA', 'MADE', 'A', 'SIGN', 'TO', 'HER', 'PARENTS', 'THAT', 'SHE', 'WISHED', 'TO', 'BE', 'ALONE', 'WITH', 'THEM'] +5639-40744-0018-155: ref=['THAT', 'WOULD', 'BE', 'VERY', 'WELL', 'MY', 'CHILD', 'REPLIED', 'HER', 'FATHER', 'IF', 'YOUR', 'PLAN', 'WERE', 'NOT', 'LIABLE', 'TO', 'BE', 'FRUSTRATED', 'BY', 'ORDINARY', 'CUNNING', 'BUT', 'NO', 'DOUBT', 'THIS', 'IMAGE', 'HAS', 'BEEN', 'ALREADY', 'MISSED', 'BY', 'ITS', 'OWNER', 'AND', 'HE', 'WILL', 'HAVE', 'SET', 'IT', 'DOWN', 'FOR', 'CERTAIN', 'THAT', 'IT', 'WAS', 'TAKEN', 'OUT', 'OF', 'THE', 'ROOM', 'BY', 'THE', 'PERSON', 'HE', 'LOCKED', 'UP', 'THERE'] +5639-40744-0018-155: hyp=['THAT', 'WOULD', 'BE', 'VERY', 'WELL', 'MY', 'CHILD', 'REPLIED', 'HER', 'FATHER', 'IF', 'YOUR', 'PLAN', 'WERE', 'NOT', 'LIABLE', 'TO', 'BE', 'FRUSTRATED', 'BY', 'ORDINARY', 'CUNNING', 'BUT', 'NO', 'DOUBT', 'THIS', 'IMAGE', 'HAD', 'BEEN', 'ALREADY', 'MISSED', 'BY', 'ITS', 'OWNER', 'AND', 'HE', 'WILL', 'HAVE', 'SET', 'IT', 'DOWN', 'FOR', 'CERTAIN', 'THAT', 'IT', 'WAS', 'TAKEN', 'OUT', 'OF', 'THE', 'ROOM', 'BY', 'THE', 'PERSON', 'HE', 'LOCKED', 'UP', 'THERE'] +5639-40744-0019-156: ref=['WHAT', 'YOU', 'HAD', 'BEST', 'DO', 'MY', 'CHILD', 'IS', 'TO', 'KEEP', 'IT', 'AND', 'PRAY', 'TO', 'IT', 'THAT', 'SINCE', 'IT', 'WAS', 'A', 'WITNESS', 'TO', 'YOUR', 'UNDOING', 'IT', 'WILL', 'DEIGN', 'TO', 'VINDICATE', 'YOUR', 'CAUSE', 'BY', 'ITS', 'RIGHTEOUS', 'JUDGMENT'] +5639-40744-0019-156: hyp=['WHAT', 'YOU', 'HAD', 'BEST', 'DO', 'MY', 'CHILD', 'IS', 'TO', 'KEEP', 'IT', 'AND', 'PRAY', 'TO', 'IT', 'THAT', 'SINCE', 'IT', 'WAS', 'A', 'WITNESS', 'TO', 'YOUR', 'UNDOING', 'IT', 'WILL', 'DEIGN', 'TO', 'VINDICATE', 'YOUR', 'CAUSE', 'BY', 'ITS', 'RIGHTEOUS', 'JUDGMENT'] +5639-40744-0020-157: ref=['THUS', 'DID', 'THIS', 'HUMANE', 'AND', 'RIGHT', 'MINDED', 'FATHER', 'COMFORT', 'HIS', 'UNHAPPY', 'DAUGHTER', 'AND', 'HER', 'MOTHER', 'EMBRACING', 'HER', 'AGAIN', 'DID', 'ALL', 'SHE', 'COULD', 'TO', 'SOOTHE', 'HER', 'FEELINGS'] +5639-40744-0020-157: hyp=['THUS', 'DID', 'THE', 'HUMANE', 'AND', 'RIGHT', 'MINDED', 'FATHER', 'COMFORT', 'HIS', 'UNHAPPY', 'DAUGHTER', 'AND', 'HER', 'MOTHER', 'EMBRACING', 'HER', 'AGAIN', 'DID', 'ALL', 'SHE', 'COULD', 'TO', 'SOOTHE', 'A', 'FEELINGS'] +5639-40744-0021-158: ref=['SHE', 'MEANWHILE', 'PASSED', 'HER', 'LIFE', 'WITH', 'HER', 'PARENTS', 'IN', 'THE', 'STRICTEST', 'RETIREMENT', 'NEVER', 'LETTING', 'HERSELF', 'BE', 'SEEN', 'BUT', 'SHUNNING', 'EVERY', 'EYE', 'LEST', 'IT', 'SHOULD', 'READ', 'HER', 'MISFORTUNE', 'IN', 'HER', 'FACE'] +5639-40744-0021-158: hyp=['SHE', 'MEANWHILE', 'PASSED', 'HER', 'LIFE', 'WITH', 'HER', 'PARENTS', 'IN', 'THE', 'STRICTEST', 'RETIREMENT', 'NEVER', 'LETTING', 'HERSELF', 'BE', 'SEEN', 'BUT', 'SHUNNING', 'EVERY', 'EYE', 'LEST', 'IT', 'SHOULD', 'READ', 'HER', 'MISFORTUNE', 'IN', 'HER', 'FACE'] +5639-40744-0022-159: ref=['TIME', 'ROLLED', 'ON', 'THE', 'HOUR', 'OF', 'HER', 'DELIVERY', 'ARRIVED', 'IT', 'TOOK', 'PLACE', 'IN', 'THE', 'UTMOST', 'SECRECY', 'HER', 'MOTHER', 'TAKING', 'UPON', 'HER', 'THE', 'OFFICE', 'OF', 'MIDWIFE', 'AND', 'SHE', 'GAVE', 'BIRTH', 'TO', 'A', 'SON', 'ONE', 'OF', 'THE', 'MOST', 'BEAUTIFUL', 'EVER', 'SEEN'] +5639-40744-0022-159: hyp=['TIME', 'ROLLED', 'ON', 'THE', 'HOUR', 'OF', 'HER', 'DELIVERY', 'ARRIVED', 'IT', 'TOOK', 'PLACE', 'IN', 'THE', 'UTMOST', 'SECRECY', 'HER', 'MOTHER', 'TAKING', 'UPON', 'HER', 'THE', 'OFFICE', 'OF', 'MIDWIFE', 'AS', 'SHE', 'GAVE', 'BIRTH', 'TO', 'A', 'SON', 'ONE', 'OF', 'THE', 'MOST', 'BEAUTIFUL', 'EVER', 'SEEN'] +5639-40744-0023-160: ref=['WHEN', 'THE', 'BOY', 'WALKED', 'THROUGH', 'THE', 'STREETS', 'BLESSINGS', 'WERE', 'SHOWERED', 'UPON', 'HIM', 'BY', 'ALL', 'WHO', 'SAW', 'HIM', 'BLESSINGS', 'UPON', 'HIS', 'BEAUTY', 'UPON', 'THE', 'MOTHER', 'THAT', 'BORE', 'HIM', 'UPON', 'THE', 'FATHER', 'THAT', 'BEGOT', 'HIM', 'UPON', 'THOSE', 'WHO', 'BROUGHT', 'HIM', 'UP', 'SO', 'WELL'] +5639-40744-0023-160: hyp=['AND', 'THE', 'BOY', 'WALKED', 'THROUGH', 'THE', 'STREETS', 'BLESSINGS', 'WERE', 'SHOWERED', 'UPON', 'HIM', 'BY', 'ALL', 'WHO', 'SAW', 'HIM', 'BLESSING', 'UPON', 'HIS', 'BEAUTY', 'UPON', 'THE', 'MOTHER', 'THAT', 'BORE', 'HIM', 'UPON', 'THE', 'FATHER', 'THAT', 'BEGOT', 'HIM', 'UPON', 'THOSE', 'WHO', 'BROUGHT', 'HIM', 'UP', 'SO', 'WELL'] +5639-40744-0024-161: ref=['ONE', 'DAY', 'WHEN', 'THE', 'BOY', 'WAS', 'SENT', 'BY', 'HIS', 'GRANDFATHER', 'WITH', 'A', 'MESSAGE', 'TO', 'A', 'RELATION', 'HE', 'PASSED', 'ALONG', 'A', 'STREET', 'IN', 'WHICH', 'THERE', 'WAS', 'A', 'GREAT', 'CONCOURSE', 'OF', 'HORSEMEN'] +5639-40744-0024-161: hyp=['ONE', 'DAY', 'WHEN', 'THE', 'BOY', 'WAS', 'SENT', 'BY', 'HIS', 'GRANDFATHER', 'WITH', 'A', 'MESSAGE', 'TO', 'A', 'RELATION', 'HE', 'PASSED', 'ALONG', 'A', 'STREET', 'IN', 'WHICH', 'THERE', 'WAS', 'A', 'GREAT', 'CONCOURSE', 'OF', 'HORSEMEN'] +5639-40744-0025-162: ref=['THE', 'BED', 'SHE', 'TOO', 'WELL', 'REMEMBERED', 'WAS', 'THERE', 'AND', 'ABOVE', 'ALL', 'THE', 'CABINET', 'ON', 'WHICH', 'HAD', 'STOOD', 'THE', 'IMAGE', 'SHE', 'HAD', 'TAKEN', 'AWAY', 'WAS', 'STILL', 'ON', 'THE', 'SAME', 'SPOT'] +5639-40744-0025-162: hyp=['THE', 'BED', 'SHE', 'TOO', 'WELL', 'REMEMBERED', 'WAS', 'THERE', 'AND', 'ABOVE', 'ALL', 'THE', 'CABINET', 'ON', 'WHICH', 'HAD', 'STOOD', 'THE', 'IMAGE', 'SHE', 'HAD', 'TAKEN', 'AWAY', 'WAS', 'STILL', 'ON', 'THE', 'SAME', 'SPOT'] +5639-40744-0026-163: ref=['LUIS', 'WAS', 'OUT', 'OF', 'DANGER', 'IN', 'A', 'FORTNIGHT', 'IN', 'A', 'MONTH', 'HE', 'ROSE', 'FROM', 'HIS', 'BED', 'AND', 'DURING', 'ALL', 'THAT', 'TIME', 'HE', 'WAS', 'VISITED', 'DAILY', 'BY', 'HIS', 'MOTHER', 'AND', 'GRANDMOTHER', 'AND', 'TREATED', 'BY', 'THE', 'MASTER', 'AND', 'MISTRESS', 'OF', 'THE', 'HOUSE', 'AS', 'IF', 'HE', 'WAS', 'THEIR', 'OWN', 'CHILD'] +5639-40744-0026-163: hyp=['LOUIS', 'WAS', 'OUT', 'OF', 'DANGER', 'IN', 'A', 'FORTNIGHT', 'IN', 'A', 'MONTH', 'HE', 'ROSE', 'FROM', 'HIS', 'BED', 'AND', 'DREWING', 'ALL', 'THAT', 'TIME', 'HE', 'WAS', 'VISITED', 'DAILY', 'BY', 'HIS', 'MOTHER', 'AND', 'GRANDMOTHER', 'AND', 'TREATED', 'BY', 'THE', 'MASTER', 'AND', 'MISTRESS', 'OF', 'THE', 'HOUSE', 'AS', 'IF', 'HE', 'WAS', 'THEIR', 'OWN', 'CHILD'] +5639-40744-0027-164: ref=['THUS', 'SAYING', 'AND', 'PRESSING', 'THE', 'CRUCIFIX', 'TO', 'HER', 'BREAST', 'SHE', 'FELL', 'FAINTING', 'INTO', 'THE', 'ARMS', 'OF', 'DONA', 'ESTAFANIA', 'WHO', 'AS', 'A', 'GENTLEWOMAN', 'TO', 'WHOSE', 'SEX', 'PITY', 'IS', 'AS', 'NATURAL', 'AS', 'CRUELTY', 'IS', 'TO', 'MAN', 'INSTANTLY', 'PRESSED', 'HER', 'LIPS', 'TO', 'THOSE', 'OF', 'THE', 'FAINTING', 'GIRL', 'SHEDDING', 'OVER', 'HER', 'SO', 'MANY', 'TEARS', 'THAT', 'THERE', 'NEEDED', 'NO', 'OTHER', 'SPRINKLING', 'OF', 'WATER', 'TO', 'RECOVER', 'LEOCADIA', 'FROM', 'HER', 'SWOON'] +5639-40744-0027-164: hyp=['THUS', 'SAYING', 'AND', 'PRESSING', 'THE', 'CRUCIFIX', 'TO', 'HER', 'BREAST', 'SHE', 'FELL', 'FAINTING', 'INTO', 'THE', 'ARMS', 'OF', 'DONA', 'ESTAFFANIA', 'WHO', 'AS', 'A', 'GENTLEWOMAN', 'TO', 'WHOSE', 'SEX', 'PITY', 'IS', 'THE', 'NATURAL', 'AS', 'CRUELTY', 'AS', 'TO', 'MAN', 'INSTANTLY', 'PRESSED', 'HER', 'LIPS', 'TO', 'THOSE', 'OF', 'THE', 'FAINTING', 'GIRL', 'SHEDDING', 'OVER', 'HER', 'SO', 'MANY', 'TEARS', 'THAT', 'THERE', 'NEEDED', 'NO', 'OTHER', 'SPRINKLING', 'OF', 'WATER', 'TO', 'RECOVER', 'LOCATIA', 'FROM', 'HER', 'SWOON'] +5639-40744-0028-165: ref=['I', 'HAVE', 'GREAT', 'THINGS', 'TO', 'TELL', 'YOU', 'SENOR', 'SAID', 'DONA', 'ESTAFANIA', 'TO', 'HER', 'HUSBAND', 'THE', 'CREAM', 'AND', 'SUBSTANCE', 'OF', 'WHICH', 'IS', 'THIS', 'THE', 'FAINTING', 'GIRL', 'BEFORE', 'YOU', 'IS', 'YOUR', 'DAUGHTER', 'AND', 'THAT', 'BOY', 'IS', 'YOUR', 'GRANDSON'] +5639-40744-0028-165: hyp=['I', 'HAVE', 'GREAT', 'THINGS', 'TO', 'TELL', 'YOU', 'SENOR', 'SAID', 'DORNEST', 'DA', 'FANIA', 'TO', 'HER', 'HUSBAND', 'THE', 'CREAM', 'AND', 'SUBSTANCE', 'OF', 'WHICH', 'IS', 'THIS', 'THE', 'FAINTING', 'GIRL', 'BEFORE', 'YOU', 'IS', 'YOUR', 'DAUGHTER', 'AND', 'THE', 'BOY', 'IS', 'YOUR', 'GRANDSON'] +5639-40744-0029-166: ref=['THIS', 'TRUTH', 'WHICH', 'I', 'HAVE', 'LEARNED', 'FROM', 'HER', 'LIPS', 'IS', 'CONFIRMED', 'BY', 'HIS', 'FACE', 'IN', 'WHICH', 'WE', 'HAVE', 'BOTH', 'BEHELD', 'THAT', 'OF', 'OUR', 'SON'] +5639-40744-0029-166: hyp=['THIS', 'TRUTH', 'WHICH', 'I', 'HAVE', 'LEARNED', 'FROM', 'HER', 'LIPS', 'IS', 'CONFIRMED', 'BY', 'HIS', 'FACE', 'IN', 'WHICH', 'WE', 'HAVE', 'BOTH', 'BEHELD', 'THAT', 'OF', 'OUR', 'SON'] +5639-40744-0030-167: ref=['JUST', 'THEN', 'LEOCADIA', 'CAME', 'TO', 'HERSELF', 'AND', 'EMBRACING', 'THE', 'CROSS', 'SEEMED', 'CHANGED', 'INTO', 'A', 'SEA', 'OF', 'TEARS', 'AND', 'THE', 'GENTLEMAN', 'REMAINED', 'IN', 'UTTER', 'BEWILDERMENT', 'UNTIL', 'HIS', 'WIFE', 'HAD', 'REPEATED', 'TO', 'HIM', 'FROM', 'BEGINNING', 'TO', 'END', "LEOCADIA'S", 'WHOLE', 'STORY', 'AND', 'HE', 'BELIEVED', 'IT', 'THROUGH', 'THE', 'BLESSED', 'DISPENSATION', 'OF', 'HEAVEN', 'WHICH', 'HAD', 'CONFIRMED', 'IT', 'BY', 'SO', 'MANY', 'CONVINCING', 'TESTIMONIES'] +5639-40744-0030-167: hyp=['JUST', 'THEN', 'LEOKADIA', 'CAME', 'TO', 'HERSELF', 'AND', 'EMBRACING', 'THE', 'CROSS', 'SEEMED', 'CHANGED', 'INTO', 'A', 'SEA', 'OF', 'TEARS', 'AND', 'THE', 'GENTLEMAN', 'REMAINING', 'IN', 'UTTER', 'BEWILDERMENT', 'UNTIL', 'HIS', 'WIFE', 'HAD', 'REPEATED', 'TO', 'HIM', 'FROM', 'BEGINNING', 'TO', 'END', 'LEUCEDES', 'WHOLE', 'STORY', 'AND', 'HE', 'BELIEVED', 'IT', 'THROUGH', 'THE', 'BLESSED', 'DISPENSATION', 'OF', 'HEAVEN', 'WHICH', 'HAD', 'CONFIRMED', 'IT', 'BY', 'SO', 'MANY', 'CONVINCING', 'TESTIMONIES'] +5639-40744-0031-168: ref=['SO', 'PERSUASIVE', 'WERE', 'HER', 'ENTREATIES', 'AND', 'SO', 'STRONG', 'HER', 'ASSURANCES', 'THAT', 'NO', 'HARM', 'WHATEVER', 'COULD', 'RESULT', 'TO', 'THEM', 'FROM', 'THE', 'INFORMATION', 'SHE', 'SOUGHT', 'THEY', 'WERE', 'INDUCED', 'TO', 'CONFESS', 'THAT', 'ONE', "SUMMER'S", 'NIGHT', 'THE', 'SAME', 'SHE', 'HAD', 'MENTIONED', 'THEMSELVES', 'AND', 'ANOTHER', 'FRIEND', 'BEING', 'OUT', 'ON', 'A', 'STROLL', 'WITH', 'RODOLFO', 'THEY', 'HAD', 'BEEN', 'CONCERNED', 'IN', 'THE', 'ABDUCTION', 'OF', 'A', 'GIRL', 'WHOM', 'RODOLFO', 'CARRIED', 'OFF', 'WHILST', 'THE', 'REST', 'OF', 'THEM', 'DETAINED', 'HER', 'FAMILY', 'WHO', 'MADE', 'A', 'GREAT', 'OUTCRY', 'AND', 'WOULD', 'HAVE', 'DEFENDED', 'HER', 'IF', 'THEY', 'COULD'] +5639-40744-0031-168: hyp=['SO', 'PERSUASIVE', 'WERE', 'HER', 'ENTREATIES', 'AND', 'SO', 'STRONG', 'HER', 'ASSURANCES', 'THAT', 'NO', 'HARM', 'WHATEVER', 'COULD', 'RESULT', 'TO', 'THEM', 'FROM', 'THE', 'INFORMATION', 'SHE', 'SOUGHT', 'THEY', 'WERE', 'INDUCED', 'TO', 'CONFESS', 'THAT', 'ONE', "SUMMER'S", 'NIGHT', 'THE', 'SAME', 'SHE', 'HAD', 'MENTIONED', 'THEMSELVES', 'IN', 'ANOTHER', 'FRIEND', 'BEING', 'OUT', 'ON', 'A', 'STROLL', 'WITH', 'RODOLPHO', 'THEY', 'HAD', 'BEEN', 'CONCERNED', 'IN', 'THE', 'ADOCTION', 'OF', 'A', 'GIRL', 'WHOM', 'UDOLPHO', 'CARRIED', 'OFF', 'WHILST', 'THE', 'REST', 'OF', 'THEM', 'DETAINED', 'HER', 'FAMILY', 'WHO', 'MADE', 'A', 'GREAT', 'OUTCRY', 'AND', 'WOULD', 'HAVE', 'DEFENDED', 'HER', 'IF', 'THEY', 'COULD'] +5639-40744-0032-169: ref=['FOR', "GOD'S", 'SAKE', 'MY', 'LADY', 'MOTHER', 'GIVE', 'ME', 'A', 'WIFE', 'WHO', 'WOULD', 'BE', 'AN', 'AGREEABLE', 'COMPANION', 'NOT', 'ONE', 'WHO', 'WILL', 'DISGUST', 'ME', 'SO', 'THAT', 'WE', 'MAY', 'BOTH', 'BEAR', 'EVENLY', 'AND', 'WITH', 'MUTUAL', 'GOOD', 'WILL', 'THE', 'YOKE', 'IMPOSED', 'ON', 'US', 'BY', 'HEAVEN', 'INSTEAD', 'OF', 'PULLING', 'THIS', 'WAY', 'AND', 'THAT', 'WAY', 'AND', 'FRETTING', 'EACH', 'OTHER', 'TO', 'DEATH'] +5639-40744-0032-169: hyp=['FOR', "GOD'S", 'SAKE', 'MY', 'LADY', 'MOTHER', 'GIVE', 'ME', 'A', 'WIFE', 'WHO', 'WOULD', 'BE', 'AN', 'AGREEABLE', 'COMPANION', 'NOT', 'ONE', 'WHO', 'WILL', 'DISGUST', 'ME', 'SO', 'THAT', 'WE', 'MAY', 'BOTH', 'BEAR', 'EVENLY', 'AND', 'WITH', 'MUTUAL', 'GOOD', 'WILL', 'THE', 'YOKE', 'IMPOSED', 'ON', 'US', 'BY', 'HEAVEN', 'INSTEAD', 'OF', 'PULLING', 'THIS', 'WAY', 'AND', 'THAT', 'WAY', 'AND', 'FRETTING', 'EACH', 'OTHER', 'TO', 'DEATH'] +5639-40744-0033-170: ref=['HER', 'BEARING', 'WAS', 'GRACEFUL', 'AND', 'ANIMATED', 'SHE', 'LED', 'HER', 'SON', 'BY', 'THE', 'HAND', 'AND', 'BEFORE', 'HER', 'WALKED', 'TWO', 'MAIDS', 'WITH', 'WAX', 'LIGHTS', 'AND', 'SILVER', 'CANDLESTICKS'] +5639-40744-0033-170: hyp=['HER', 'BEARING', 'WAS', 'GRACEFUL', 'AND', 'ANIMATED', 'SHE', 'LED', 'HER', 'SON', 'BY', 'THE', 'HAND', 'AND', 'BEFORE', 'HER', 'WALKED', 'TWO', 'MAIDS', 'WITH', 'WAX', 'LIGHTS', 'AND', 'SILVER', 'CANDLESTICKS'] +5639-40744-0034-171: ref=['ALL', 'ROSE', 'TO', 'DO', 'HER', 'REVERENCE', 'AS', 'IF', 'SOMETHING', 'FROM', 'HEAVEN', 'HAD', 'MIRACULOUSLY', 'APPEARED', 'BEFORE', 'THEM', 'BUT', 'GAZING', 'ON', 'HER', 'ENTRANCED', 'WITH', 'ADMIRATION', 'NOT', 'ONE', 'OF', 'THEM', 'WAS', 'ABLE', 'TO', 'ADDRESS', 'A', 'SINGLE', 'WORD', 'TO', 'HER'] +5639-40744-0034-171: hyp=['ALL', 'ROSE', 'TO', 'DO', 'HER', 'REVERENCE', 'AS', 'IF', 'SOMETHING', 'FROM', 'HEAVEN', 'HAD', 'MIRACULOUSLY', 'APPEARED', 'BEFORE', 'THEM', 'BUT', 'GAZING', 'ON', 'HER', 'ENTRANCED', 'WITH', 'ADMIRATION', 'NOT', 'ONE', 'OF', 'THEM', 'WAS', 'ABLE', 'TO', 'ADDRESS', 'A', 'SINGLE', 'WORD', 'TO', 'HER'] +5639-40744-0035-172: ref=['SHE', 'REFLECTED', 'HOW', 'NEAR', 'SHE', 'STOOD', 'TO', 'THE', 'CRISIS', 'WHICH', 'WAS', 'TO', 'DETERMINE', 'WHETHER', 'SHE', 'WAS', 'TO', 'BE', 'BLESSED', 'OR', 'UNHAPPY', 'FOR', 'EVER', 'AND', 'RACKED', 'BY', 'THE', 'INTENSITY', 'OF', 'HER', 'EMOTIONS', 'SHE', 'SUDDENLY', 'CHANGED', 'COLOUR', 'HER', 'HEAD', 'DROPPED', 'AND', 'SHE', 'FELL', 'FORWARD', 'IN', 'A', 'SWOON', 'INTO', 'THE', 'ARMS', 'OF', 'THE', 'DISMAYED', 'ESTAFANIA'] +5639-40744-0035-172: hyp=['SHE', 'REFLECTED', 'HOW', 'NEAR', 'SHE', 'STOOD', 'TO', 'THE', 'CRISIS', 'WHICH', 'WAS', 'TO', 'DETERMINE', 'WHETHER', 'SHE', 'WAS', 'TO', 'BE', 'BLESSED', 'OR', 'UNHAPPY', 'FOR', 'EVER', 'AND', 'RACKED', 'BY', 'THE', 'INTENSITY', 'OF', 'HER', 'EMOTIONS', 'SHE', 'SUDDENLY', 'CHANGED', 'COLOR', 'HER', 'HEAD', 'DROPPED', 'AND', 'SHE', 'FELL', 'FORWARD', 'IN', 'A', 'SWOON', 'INTO', 'THE', 'ARMS', 'OF', 'THE', 'DISMAYEDESTAFHANIA'] +5639-40744-0036-173: ref=['HIS', 'MOTHER', 'HAD', 'LEFT', 'HER', 'TO', 'HIM', 'AS', 'BEING', 'HER', 'DESTINED', 'PROTECTOR', 'BUT', 'WHEN', 'SHE', 'SAW', 'THAT', 'HE', 'TOO', 'WAS', 'INSENSIBLE', 'SHE', 'WAS', 'NEAR', 'MAKING', 'A', 'THIRD', 'AND', 'WOULD', 'HAVE', 'DONE', 'SO', 'HAD', 'HE', 'NOT', 'COME', 'TO', 'HIMSELF'] +5639-40744-0036-173: hyp=['HIS', 'MOTHER', 'HAD', 'LEFT', 'HER', 'TO', 'HIM', 'AS', 'BEING', 'HER', 'DESTINED', 'PROTECTOR', 'BUT', 'WHEN', 'SHE', 'SAW', 'THAT', 'HE', 'TOO', 'WAS', 'INSENSIBLE', 'SHE', 'WAS', 'NEAR', 'MAKING', 'A', 'THIRD', 'AND', 'WOULD', 'HAVE', 'DONE', 'SO', 'HAD', 'HE', 'NOT', 'COME', 'TO', 'HIMSELF'] +5639-40744-0037-174: ref=['KNOW', 'THEN', 'SON', 'OF', 'MY', 'HEART', 'THAT', 'THIS', 'FAINTING', 'LADY', 'IS', 'YOUR', 'REAL', 'BRIDE', 'I', 'SAY', 'REAL', 'BECAUSE', 'SHE', 'IS', 'THE', 'ONE', 'WHOM', 'YOUR', 'FATHER', 'AND', 'I', 'HAVE', 'CHOSEN', 'FOR', 'YOU', 'AND', 'THE', 'PORTRAIT', 'WAS', 'A', 'PRETENCE'] +5639-40744-0037-174: hyp=['KNOW', 'THEN', 'SON', 'OF', 'MY', 'HEART', 'THAT', 'THIS', 'FAINTING', 'LADY', 'IS', 'YOUR', 'REAL', 'BRIDE', 'I', 'SAY', 'REAL', 'BECAUSE', 'SHE', 'IS', 'THE', 'ONE', 'WHOM', 'YOUR', 'FATHER', 'AND', 'I', 'HAVE', 'CHOSEN', 'FOR', 'YOU', 'AND', 'A', 'PORTRAIT', 'WAS', 'A', 'PRETENCE'] +5639-40744-0038-175: ref=['JUST', 'AT', 'THE', 'MOMENT', 'WHEN', 'THE', 'TEARS', 'OF', 'THE', 'PITYING', 'BEHOLDERS', 'FLOWED', 'FASTEST', 'AND', 'THEIR', 'EJACULATIONS', 'WERE', 'MOST', 'EXPRESSIVE', 'OF', 'DESPAIR', 'LEOCADIA', 'GAVE', 'SIGNS', 'OF', 'RECOVERY', 'AND', 'BROUGHT', 'BACK', 'GLADNESS', 'TO', 'THE', 'HEARTS', 'OF', 'ALL'] +5639-40744-0038-175: hyp=['JUST', 'AT', 'A', 'MOMENT', 'WHEN', 'THE', 'TEARS', 'OF', 'THE', 'PITYING', 'BEHOLDERS', 'FLOWED', 'FASTEST', 'AND', 'THERE', 'EJACULATIONS', 'WERE', 'MOST', 'EXPRESSIVE', 'OF', 'DESPAIR', 'THE', 'OCCASIONA', 'GAVE', 'SIGNS', 'OF', 'RECOVERY', 'AND', 'BROUGHT', 'BACK', 'GLADNESS', 'TO', 'THE', 'HEARTS', 'OF', 'ALL'] +5639-40744-0039-176: ref=['WHEN', 'SHE', 'CAME', 'TO', 'HER', 'SENSES', 'AND', 'BLUSHING', 'TO', 'FIND', 'HERSELF', 'IN', "RODOLFO'S", 'ARMS', 'WOULD', 'HAVE', 'DISENGAGED', 'HERSELF', 'NO', 'SENORA', 'HE', 'SAID', 'THAT', 'MUST', 'NOT', 'BE', 'STRIVE', 'NOT', 'TO', 'WITHDRAW', 'FROM', 'THE', 'ARMS', 'OF', 'HIM', 'WHO', 'HOLDS', 'YOU', 'IN', 'HIS', 'SOUL'] +5639-40744-0039-176: hyp=['WHEN', 'SHE', 'CAME', 'TO', 'HER', 'SENSES', 'AND', 'BLUSHING', 'TO', 'FIND', 'HERSELF', 'IN', "GODOLPH'S", 'ARMS', 'WOULD', 'HAVE', 'DISENGAGED', 'HERSELF', 'NO', 'SENORA', 'HE', 'SAID', 'THAT', 'MUST', 'NOT', 'BE', 'STRIVE', 'NOT', 'TO', 'WITHDRAW', 'FROM', 'THE', 'ARMS', 'OF', 'HIM', 'WHO', 'HOLDS', 'YOU', 'IN', 'HIS', 'SOUL'] +5639-40744-0040-177: ref=['THIS', 'WAS', 'DONE', 'FOR', 'THE', 'EVENT', 'TOOK', 'PLACE', 'AT', 'A', 'TIME', 'WHEN', 'THE', 'CONSENT', 'OF', 'THE', 'PARTIES', 'WAS', 'SUFFICIENT', 'FOR', 'THE', 'CELEBRATION', 'OF', 'A', 'MARRIAGE', 'WITHOUT', 'ANY', 'OF', 'THE', 'PRELIMINARY', 'FORMALITIES', 'WHICH', 'ARE', 'NOW', 'SO', 'PROPERLY', 'REQUIRED'] +5639-40744-0040-177: hyp=['THIS', 'WAS', 'DONE', 'FOR', 'THE', 'EVENT', 'TOOK', 'PLACE', 'AT', 'A', 'TIME', 'WITH', 'THE', 'CONSENT', 'OF', 'THE', 'PARTIES', 'WAS', 'SUFFICIENT', 'FOR', 'THE', 'CELEBRATION', 'OF', 'A', 'MARRIAGE', 'WITHOUT', 'ANY', 'OF', 'THE', 'PRELIMINARY', 'FORMALITIES', 'WHICH', 'ARE', 'NOW', 'SO', 'PROPERLY', 'REQUIRED'] +5639-40744-0041-178: ref=['NOR', 'WAS', 'RODOLFO', 'LESS', 'SURPRISED', 'THAN', 'THEY', 'AND', 'THE', 'BETTER', 'TO', 'ASSURE', 'HIMSELF', 'OF', 'SO', 'WONDERFUL', 'A', 'FACT', 'HE', 'BEGGED', 'LEOCADIA', 'TO', 'GIVE', 'HIM', 'SOME', 'TOKEN', 'WHICH', 'SHOULD', 'MAKE', 'PERFECTLY', 'CLEAR', 'TO', 'HIM', 'THAT', 'WHICH', 'INDEED', 'HE', 'DID', 'NOT', 'DOUBT', 'SINCE', 'IT', 'WAS', 'AUTHENTICATED', 'BY', 'HIS', 'PARENTS'] +5639-40744-0041-178: hyp=['NOR', 'WAS', 'RIDOLPHAL', 'LESS', 'SURPRISED', 'THAN', 'THEY', 'AND', 'THE', 'BETTER', 'TO', 'ASSURE', 'HIMSELF', 'OF', 'SO', 'WONDERFUL', 'A', 'FACT', 'HE', 'BEGGED', 'LOU', 'KATYA', 'TO', 'GIVE', 'HIM', 'SOME', 'TOKEN', 'WHICH', 'SHOULD', 'MAKE', 'PERFECTLY', 'CLEAR', 'TO', 'HIM', 'THAT', 'WHICH', 'INDEED', 'HE', 'DID', 'NOT', 'DOUBT', 'SINCE', 'IT', 'WAS', 'AUTHENTICATED', 'BY', 'HIS', 'PARENTS'] +5683-32865-0000-2483: ref=['YOU', 'KNOW', 'CAPTAIN', 'LAKE'] +5683-32865-0000-2483: hyp=['YOU', 'KNOW', 'CAPTAIN', 'LAKE'] +5683-32865-0001-2484: ref=['SAID', 'LORD', 'CHELFORD', 'ADDRESSING', 'ME'] +5683-32865-0001-2484: hyp=['SAID', 'LORD', 'CHELFORD', 'ADDRESSING', 'ME'] +5683-32865-0002-2485: ref=['HE', 'HAD', 'HIS', 'HAND', 'UPON', "LAKE'S", 'SHOULDER'] +5683-32865-0002-2485: hyp=['HE', 'HAD', 'HIS', 'HAND', 'UPON', "LAKE'S", 'SHOULDER'] +5683-32865-0003-2486: ref=['THEY', 'ARE', 'COUSINS', 'YOU', 'KNOW', 'WE', 'ARE', 'ALL', 'COUSINS'] +5683-32865-0003-2486: hyp=['THEY', 'ARE', 'COUSINS', 'YOU', 'KNOW', 'WE', 'ARE', 'ALL', 'COUSINS'] +5683-32865-0004-2487: ref=['WHATEVER', 'LORD', 'CHELFORD', 'SAID', 'MISS', 'BRANDON', 'RECEIVED', 'IT', 'VERY', 'GRACIOUSLY', 'AND', 'EVEN', 'WITH', 'A', 'MOMENTARY', 'SMILE'] +5683-32865-0004-2487: hyp=['WHATEVER', 'LORD', 'CHELFORD', 'SAID', 'MISS', 'BRANDON', 'RECEIVED', 'IT', 'VERY', 'GRACIOUSLY', 'AND', 'EVEN', 'WITH', 'A', 'MOMENTARY', 'SMILE'] +5683-32865-0005-2488: ref=['BUT', 'HER', 'GREETING', 'TO', 'CAPTAIN', 'LAKE', 'WAS', 'MORE', 'THAN', 'USUALLY', 'HAUGHTY', 'AND', 'FROZEN', 'AND', 'HER', 'FEATURES', 'I', 'FANCIED', 'PARTICULARLY', 'PROUD', 'AND', 'PALE'] +5683-32865-0005-2488: hyp=['BUT', 'HER', 'GREETING', 'TO', 'CAPTAIN', 'LEEK', 'WAS', 'MORE', 'THAN', 'USUALLY', 'HAUGHTY', 'AND', 'FROZEN', 'AND', 'HER', 'FEATURES', 'I', 'FANCIED', 'PARTICULARLY', 'PROUD', 'AND', 'PALE'] +5683-32865-0006-2489: ref=['AT', 'DINNER', 'LAKE', 'WAS', 'EASY', 'AND', 'AMUSING'] +5683-32865-0006-2489: hyp=['AT', 'DINNER', 'LAKE', 'WAS', 'EASY', 'AND', 'AMUSING'] +5683-32865-0007-2490: ref=["I'M", 'GLAD', 'YOU', 'LIKE', 'IT', 'SAYS', 'WYLDER', 'CHUCKLING', 'BENIGNANTLY', 'ON', 'IT', 'OVER', 'HIS', 'SHOULDER'] +5683-32865-0007-2490: hyp=['I', 'AM', 'GLAD', 'YOU', 'LIKE', 'IT', 'SAYS', 'WILDER', 'CHUCKLING', 'BENIGNANTLY', 'ON', 'IT', 'OVER', 'HIS', 'SHOULDER'] +5683-32865-0008-2491: ref=['I', 'BELIEVE', 'I', 'HAVE', 'A', 'LITTLE', 'TASTE', 'THAT', 'WAY', 'THOSE', 'ARE', 'ALL', 'REAL', 'YOU', 'KNOW', 'THOSE', 'JEWELS'] +5683-32865-0008-2491: hyp=['I', 'BELIEVE', 'I', 'HAVE', 'A', 'LITTLE', 'TASTE', 'THAT', 'WAY', 'THOSE', 'ARE', 'ALL', 'REAL', 'YOU', 'KNOW', 'THOSE', 'JEWELS'] +5683-32865-0009-2492: ref=['AND', 'HE', 'PLACED', 'IT', 'IN', 'THAT', "GENTLEMAN'S", 'FINGERS', 'WHO', 'NOW', 'TOOK', 'HIS', 'TURN', 'AT', 'THE', 'LAMP', 'AND', 'CONTEMPLATED', 'THE', 'LITTLE', 'PARALLELOGRAM', 'WITH', 'A', 'GLEAM', 'OF', 'SLY', 'AMUSEMENT'] +5683-32865-0009-2492: hyp=['AND', 'HE', 'PLACED', 'IT', 'IN', 'THAT', "GENTLEMAN'S", 'FINGERS', 'WHO', 'NOW', 'TOOK', 'HIS', 'TURN', 'AT', 'THE', 'LAMP', 'AND', 'CONTEMPLATED', 'THE', 'LITTLE', 'PARALLELLOGRAM', 'WITH', 'A', 'GLEAM', 'OF', 'SLY', 'AMUSEMENT'] +5683-32865-0010-2493: ref=['I', 'WAS', 'THINKING', "IT'S", 'VERY', 'LIKE', 'THE', 'ACE', 'OF', 'HEARTS', 'ANSWERED', 'THE', 'CAPTAIN', 'SOFTLY', 'SMILING', 'ON'] +5683-32865-0010-2493: hyp=['I', 'WAS', 'THINKING', "IT'S", 'VERY', 'LIKE', 'THE', 'ACE', 'OF', 'HEARTS', 'ANSWERED', 'THE', 'CAPTAIN', 'SOFTLY', 'SMILING', 'ON'] +5683-32865-0011-2494: ref=['WHEREUPON', 'LAKE', 'LAUGHED', 'QUIETLY', 'STILL', 'LOOKING', 'ON', 'THE', 'ACE', 'OF', 'HEARTS', 'WITH', 'HIS', 'SLY', 'EYES'] +5683-32865-0011-2494: hyp=['WHEREUPON', 'LAKE', 'LAUGHED', 'QUIETLY', 'STILL', 'LOOKING', 'ON', 'THE', 'ACE', 'OF', 'HEARTS', 'WITH', 'HIS', 'SLY', 'EYES'] +5683-32865-0012-2495: ref=['AND', 'WYLDER', 'LAUGHED', 'TOO', 'MORE', 'SUDDENLY', 'AND', 'NOISILY', 'THAN', 'THE', 'HUMOUR', 'OF', 'THE', 'JOKE', 'SEEMED', 'QUITE', 'TO', 'CALL', 'FOR', 'AND', 'GLANCED', 'A', 'GRIM', 'LOOK', 'FROM', 'THE', 'CORNERS', 'OF', 'HIS', 'EYES', 'ON', 'LAKE', 'BUT', 'THE', 'GALLANT', 'CAPTAIN', 'DID', 'NOT', 'SEEM', 'TO', 'PERCEIVE', 'IT', 'AND', 'AFTER', 'A', 'FEW', 'SECONDS', 'MORE', 'HE', 'HANDED', 'IT', 'VERY', 'INNOCENTLY', 'BACK', 'TO', 'MISSUS', 'DOROTHY', 'ONLY', 'REMARKING'] +5683-32865-0012-2495: hyp=['AND', 'WYLDER', 'LAUGHED', 'TOO', 'MORE', 'SUDDENLY', 'AND', 'NOISILY', 'THAN', 'THE', 'HUMOUR', 'OF', 'THE', 'JOKE', 'SEEMED', 'QUITE', 'TO', 'CALL', 'FOR', 'AND', 'GLANCED', 'A', 'GRIM', 'LOOK', 'FROM', 'THE', 'CORNERS', 'OF', 'HIS', 'EYES', 'ON', 'LAKE', 'BUT', 'THE', 'GALLANT', 'CAPTAIN', 'DID', 'NOT', 'SEEM', 'TO', 'PERCEIVE', 'IT', 'AND', 'AFTER', 'A', 'FEW', 'SECONDS', 'MORE', 'HE', 'HANDED', 'IT', 'VERY', 'INNOCENTLY', 'BACK', 'TO', 'MISSUS', 'DOROTHY', 'ONLY', 'REMARKING'] +5683-32865-0013-2496: ref=['DO', 'YOU', 'KNOW', 'LAKE', 'OH', 'I', 'REALLY', "CAN'T", 'TELL', 'BUT', "HE'LL", 'SOON', 'TIRE', 'OF', 'COUNTRY', 'LIFE'] +5683-32865-0013-2496: hyp=['DO', 'YOU', 'KNOW', 'LAKE', 'OH', 'I', 'REALLY', "CAN'T", 'TELL', 'BUT', "HE'LL", 'SOON', 'TIRE', 'OF', 'COUNTRY', 'LIFE'] +5683-32865-0014-2497: ref=["HE'S", 'NOT', 'A', 'MAN', 'FOR', 'COUNTRY', 'QUARTERS'] +5683-32865-0014-2497: hyp=["HE'S", 'NOT', 'A', 'MAN', 'FOR', 'COUNTRY', 'QUARTERS'] +5683-32865-0015-2498: ref=['I', 'HAD', 'A', 'HORRID', 'DREAM', 'ABOUT', 'HIM', 'LAST', 'NIGHT', 'THAT'] +5683-32865-0015-2498: hyp=['I', 'HAD', 'A', 'HORRID', 'DREAM', 'ABOUT', 'HIM', 'LAST', 'NIGHT', 'THAT'] +5683-32865-0016-2499: ref=['OH', 'I', 'KNOW', "THAT'S", 'LORNE', 'BRANDON'] +5683-32865-0016-2499: hyp=['OH', 'I', 'KNOW', "THAT'S", 'LORN', 'BRANDON'] +5683-32865-0017-2500: ref=['ALL', 'THE', 'TIME', 'HE', 'WAS', 'TALKING', 'TO', 'ME', 'HIS', 'ANGRY', 'LITTLE', 'EYES', 'WERE', 'FOLLOWING', 'LAKE'] +5683-32865-0017-2500: hyp=['ALL', 'THE', 'TIME', 'HE', 'WAS', 'TALKING', 'TO', 'ME', 'HIS', 'ANGRY', 'LITTLE', 'EYES', 'WERE', 'FOLLOWING', 'LAKE'] +5683-32866-0000-2527: ref=['MISS', 'LAKE', 'DECLINED', 'THE', 'CARRIAGE', 'TO', 'NIGHT'] +5683-32866-0000-2527: hyp=['MISS', 'LAKE', 'DECLINED', 'THE', 'CARRIAGE', 'TO', 'NIGHT'] +5683-32866-0001-2528: ref=['AND', 'HE', 'ADDED', 'SOMETHING', 'STILL', 'LESS', 'COMPLIMENTARY'] +5683-32866-0001-2528: hyp=['AND', 'HE', 'ADDED', 'SOMETHING', 'STILL', 'LESS', 'COMPLIMENTARY'] +5683-32866-0002-2529: ref=['BUT', "DON'T", 'THESE', 'VERY', 'WISE', 'THINGS', 'SOMETIMES', 'TURN', 'OUT', 'VERY', 'FOOLISHLY'] +5683-32866-0002-2529: hyp=['BUT', "DON'T", 'THESE', 'VERY', 'WISE', 'THINGS', 'SOMETIMES', 'TURN', 'OUT', 'VERY', 'FOOLISHLY'] +5683-32866-0003-2530: ref=['IN', 'THE', 'MEANTIME', 'I', 'HAD', 'FORMED', 'A', 'NEW', 'IDEA', 'OF', 'HER'] +5683-32866-0003-2530: hyp=['IN', 'THE', 'MEANTIME', 'I', 'HAD', 'FORMED', 'A', 'NEW', 'IDEA', 'OF', 'HER'] +5683-32866-0004-2531: ref=['BY', 'THIS', 'TIME', 'LORD', 'CHELFORD', 'AND', 'WYLDER', 'RETURNED', 'AND', 'DISGUSTED', 'RATHER', 'WITH', 'MYSELF', 'I', 'RUMINATED', 'ON', 'MY', 'WANT', 'OF', 'GENERAL', 'SHIP'] +5683-32866-0004-2531: hyp=['BY', 'THIS', 'TIME', 'LORD', 'CHELFORD', 'AND', 'WYLDER', 'RETURNED', 'AND', 'DISGUSTED', 'RATHER', 'WITH', 'MYSELF', 'I', 'RUMINATED', 'ON', 'MY', 'WANT', 'OF', 'GENERALSHIP'] +5683-32866-0005-2532: ref=['AND', 'HE', 'MADE', 'A', 'LITTLE', 'DIP', 'OF', 'HIS', 'CANE', 'TOWARDS', 'BRANDON', 'HALL', 'OVER', 'HIS', 'SHOULDER'] +5683-32866-0005-2532: hyp=['AND', 'HE', 'MADE', 'A', 'LITTLE', 'DIP', 'OF', 'HIS', 'CANE', 'TOWARDS', 'BRANDON', 'HALL', 'OVER', 'HIS', 'SHOULDER'] +5683-32866-0006-2533: ref=['YES', 'SO', 'THEY', 'SAID', 'BUT', 'THAT', 'WOULD', 'I', 'THINK', 'HAVE', 'BEEN', 'WORSE'] +5683-32866-0006-2533: hyp=['YES', 'SO', 'THEY', 'SAID', 'BUT', 'THAT', 'WOULD', 'I', 'THINK', 'HAVE', 'BEEN', 'WORSE'] +5683-32866-0007-2534: ref=['IF', 'A', "FELLOW'S", 'BEEN', 'A', 'LITTLE', 'BIT', 'WILD', "HE'S", 'BEELZEBUB', 'AT', 'ONCE'] +5683-32866-0007-2534: hyp=['IF', 'A', "FELLOW'S", 'BEEN', 'A', 'LITTLE', 'BIT', 'WILD', 'HE', 'IS', 'BEELZEBUB', 'AT', 'ONCE'] +5683-32866-0008-2535: ref=["BRACTON'S", 'A', 'VERY', 'GOOD', 'FELLOW', 'I', 'CAN', 'ASSURE', 'YOU'] +5683-32866-0008-2535: hyp=["BROCKTON'S", 'A', 'VERY', 'GOOD', 'FELLOW', 'I', 'CAN', 'ASSURE', 'YOU'] +5683-32866-0009-2536: ref=['I', "DON'T", 'KNOW', 'AND', "CAN'T", 'SAY', 'HOW', 'YOU', 'FINE', 'GENTLEMEN', 'DEFINE', 'WICKEDNESS', 'ONLY', 'AS', 'AN', 'OBSCURE', 'FEMALE', 'I', 'SPEAK', 'ACCORDING', 'TO', 'MY', 'LIGHTS', 'AND', 'HE', 'IS', 'GENERALLY', 'THOUGHT', 'THE', 'WICKEDEST', 'MAN', 'IN', 'THIS', 'COUNTY'] +5683-32866-0009-2536: hyp=['I', "DON'T", 'KNOW', 'ONE', "CAN'T", 'SAY', 'HOW', 'YOU', 'FIND', 'GENTLEMEN', 'TO', 'FIND', 'WICKEDNESS', 'ONLY', 'AS', 'AN', 'OBSCURE', 'FEMALE', 'I', 'SPEAK', 'ACCORDING', 'TO', 'MY', 'LIGHTS', 'AND', 'HE', 'IS', 'GENERALLY', 'THOUGHT', 'THE', 'WICKEDEST', 'MAN', 'IN', 'THIS', 'COUNTY'] +5683-32866-0010-2537: ref=['WELL', 'YOU', 'KNOW', 'RADIE', 'WOMEN', 'LIKE', 'WICKED', 'FELLOWS', 'IT', 'IS', 'CONTRAST', 'I', 'SUPPOSE', 'BUT', 'THEY', 'DO', 'AND', "I'M", 'SURE', 'FROM', 'WHAT', 'BRACTON', 'HAS', 'SAID', 'TO', 'ME', 'I', 'KNOW', 'HIM', 'INTIMATELY', 'THAT', 'DORCAS', 'LIKES', 'HIM', 'AND', 'I', "CAN'T", 'CONCEIVE', 'WHY', 'THEY', 'ARE', 'NOT', 'MARRIED'] +5683-32866-0010-2537: hyp=['WELL', 'YOU', 'KNOW', 'RADIE', 'WOMEN', 'LIKE', 'WICKED', 'FELLOWS', 'IT', 'IS', 'CONTRAST', 'I', 'SUPPOSE', 'BUT', 'THEY', 'DO', 'AND', "I'M", 'SURE', 'FROM', 'WHAT', 'BRACTON', 'HAS', 'SAID', 'TO', 'ME', 'I', 'KNOW', 'HIM', 'INTIMATELY', 'THAT', 'DORCAS', 'LIKES', 'HIM', 'AND', 'I', "CAN'T", 'CONCEIVE', 'WHY', 'THEY', 'ARE', 'NOT', 'MARRIED'] +5683-32866-0011-2538: ref=['THEIR', 'WALK', 'CONTINUED', 'SILENT', 'FOR', 'THE', 'GREATER', 'PART', 'NEITHER', 'WAS', 'QUITE', 'SATISFIED', 'WITH', 'THE', 'OTHER', 'BUT', 'RACHEL', 'AT', 'LAST', 'SAID'] +5683-32866-0011-2538: hyp=['THEIR', 'WALK', 'CONTINUED', 'SILENT', 'FOR', 'THE', 'GREATER', 'PART', 'NEITHER', 'WAS', 'QUITE', 'SATISFIED', 'WITH', 'THE', 'OTHER', 'BUT', 'RACHEL', 'AT', 'LAST', 'SAID'] +5683-32866-0012-2539: ref=['NOW', "THAT'S", 'IMPOSSIBLE', 'RADIE', 'FOR', 'I', 'REALLY', "DON'T", 'THINK', 'I', 'ONCE', 'THOUGHT', 'OF', 'HIM', 'ALL', 'THIS', 'EVENING', 'EXCEPT', 'JUST', 'WHILE', 'WE', 'WERE', 'TALKING'] +5683-32866-0012-2539: hyp=['NOW', "THAT'S", 'IMPOSSIBLE', 'RADIE', 'FOR', 'I', 'REALLY', "DON'T", 'THINK', 'I', 'ONCE', 'THOUGHT', 'OF', 'HIM', 'ALL', 'THIS', 'EVENING', 'EXCEPT', 'JUST', 'WHILE', 'WE', 'WERE', 'TALKING'] +5683-32866-0013-2540: ref=['THERE', 'WAS', 'A', 'BRIGHT', 'MOONLIGHT', 'BROKEN', 'BY', 'THE', 'SHADOWS', 'OF', 'OVERHANGING', 'BOUGHS', 'AND', 'WITHERED', 'LEAVES', 'AND', 'THE', 'MOTTLED', 'LIGHTS', 'AND', 'SHADOWS', 'GLIDED', 'ODDLY', 'ACROSS', 'HIS', 'PALE', 'FEATURES'] +5683-32866-0013-2540: hyp=['THERE', 'WAS', 'A', 'BRIGHT', 'MOONLIGHT', 'BROKEN', 'BY', 'THE', 'SHADOWS', 'OF', 'OVERHANGING', 'BOUGHS', 'AND', 'WITHERED', 'LEAVES', 'AND', 'THE', 'MOTTLED', 'LIGHTS', 'AND', 'SHADOWS', 'GLIDED', 'ODDLY', 'ACROSS', 'HIS', 'PALE', 'FEATURES'] +5683-32866-0014-2541: ref=["DON'T", 'INSULT', 'ME', 'STANLEY', 'BY', 'TALKING', 'AGAIN', 'AS', 'YOU', 'DID', 'THIS', 'MORNING'] +5683-32866-0014-2541: hyp=["DON'T", 'INSULT', 'ME', 'STANLEY', 'BY', 'TALKING', 'AGAIN', 'AS', 'YOU', 'DID', 'THIS', 'MORNING'] +5683-32866-0015-2542: ref=['WHAT', 'I', 'SAY', 'IS', 'ALTOGETHER', 'ON', 'YOUR', 'OWN', 'ACCOUNT'] +5683-32866-0015-2542: hyp=['WHAT', 'I', 'SAY', 'IS', 'ALTOGETHER', 'ON', 'YOUR', 'OWN', 'ACCOUNT'] +5683-32866-0016-2543: ref=['MARK', 'MY', 'WORDS', "YOU'LL", 'FIND', 'HIM', 'TOO', 'STRONG', 'FOR', 'YOU', 'AYE', 'AND', 'TOO', 'DEEP'] +5683-32866-0016-2543: hyp=['MARK', 'MY', 'WORDS', "YOU'LL", 'FIND', 'HIM', 'TOO', 'STRONG', 'FOR', 'YOU', 'I', 'AND', 'TOO', 'DEEP'] +5683-32866-0017-2544: ref=['I', 'AM', 'VERY', 'UNEASY', 'ABOUT', 'IT', 'WHATEVER', 'IT', 'IS', 'I', "CAN'T", 'HELP', 'IT'] +5683-32866-0017-2544: hyp=['I', 'AM', 'VERY', 'UNEASY', 'ABOUT', 'IT', 'WHATEVER', 'IT', 'IS', 'I', "CAN'T", 'HELP', 'IT'] +5683-32866-0018-2545: ref=['TO', 'MY', 'MIND', 'THERE', 'HAS', 'ALWAYS', 'BEEN', 'SOMETHING', 'INEXPRESSIBLY', 'AWFUL', 'IN', 'FAMILY', 'FEUDS'] +5683-32866-0018-2545: hyp=['TO', 'MY', 'MIND', 'THERE', 'HAS', 'ALWAYS', 'BEEN', 'SOMETHING', 'INEXPRESSIBLY', 'AWFUL', 'IN', 'FAMILY', 'FEUDS'] +5683-32866-0019-2546: ref=['THE', 'MYSTERY', 'OF', 'THEIR', 'ORIGIN', 'THEIR', 'CAPACITY', 'FOR', 'EVOLVING', 'LATENT', 'FACULTIES', 'OF', 'CRIME', 'AND', 'THE', 'STEADY', 'VITALITY', 'WITH', 'WHICH', 'THEY', 'SURVIVE', 'THE', 'HEARSE', 'AND', 'SPEAK', 'THEIR', 'DEEP', 'MOUTHED', 'MALIGNITIES', 'IN', 'EVERY', 'NEW', 'BORN', 'GENERATION', 'HAVE', 'ASSOCIATED', 'THEM', 'SOMEHOW', 'IN', 'MY', 'MIND', 'WITH', 'A', 'SPELL', 'OF', 'LIFE', 'EXCEEDING', 'AND', 'DISTINCT', 'FROM', 'HUMAN', 'AND', 'A', 'SPECIAL', 'SATANIC', 'ACTION'] +5683-32866-0019-2546: hyp=['THE', 'MYSTERY', 'OF', 'THEIR', 'ORIGIN', 'THEIR', 'CAPACITY', 'FOR', 'EVOLVING', 'LATENT', 'FACULTIES', 'OF', 'CRIME', 'AND', 'THE', 'STUDY', 'VITALITY', 'WITH', 'WHICH', 'THEY', 'SURVIVED', 'THE', 'HEARSE', 'AND', 'SPEAK', 'THEIR', 'DEEP', 'MOUTHED', 'MALIGNITIES', 'IN', 'EVERY', 'NEW', 'BORN', 'GENERATION', 'HAVE', 'ASSOCIATED', 'THEM', 'SOMEHOW', 'IN', 'MY', 'MIND', 'WITH', 'THE', 'SPELL', 'OF', 'LIFE', 'EXCEEDING', 'AND', 'DISTINCT', 'FROM', 'HUMAN', 'AND', 'ESPECIAL', 'SATANIC', 'ACTION'] +5683-32866-0020-2547: ref=['THE', 'FLOOR', 'MORE', 'THAN', 'ANYTHING', 'ELSE', 'SHOWED', 'THE', 'GREAT', 'AGE', 'OF', 'THE', 'ROOM'] +5683-32866-0020-2547: hyp=['THE', 'FLOOR', 'MORE', 'THAN', 'ANYTHING', 'ELSE', 'SHOWED', 'THE', 'GREAT', 'AGE', 'OF', 'THE', 'ROOM'] +5683-32866-0021-2548: ref=['MY', 'BED', 'WAS', 'UNEXCEPTIONABLY', 'COMFORTABLE', 'BUT', 'IN', 'MY', 'THEN', 'MOOD', 'I', 'COULD', 'HAVE', 'WISHED', 'IT', 'A', 'GREAT', 'DEAL', 'MORE', 'MODERN'] +5683-32866-0021-2548: hyp=['MY', 'BED', 'WAS', 'UNEXCEPTIONABLY', 'COMFORTABLE', 'BUT', 'IN', 'MY', 'THEN', 'MOOD', 'I', 'COULD', 'HAVE', 'WISHED', 'IT', 'A', 'GREAT', 'DEAL', 'MORE', 'MODERN'] +5683-32866-0022-2549: ref=['ITS', 'CURTAINS', 'WERE', 'OF', 'THICK', 'AND', 'FADED', 'TAPESTRY'] +5683-32866-0022-2549: hyp=['ITS', 'CURTAINS', 'WERE', 'OF', 'THICK', 'AND', 'FADED', 'TAPESTRY'] +5683-32866-0023-2550: ref=['ALL', 'THE', 'FURNITURE', 'BELONGED', 'TO', 'OTHER', 'TIMES'] +5683-32866-0023-2550: hyp=['ALL', 'THE', 'FURNITURE', 'BELONGED', 'TO', 'OTHER', 'TIMES'] +5683-32866-0024-2551: ref=['I', "SHAN'T", 'TROUBLE', 'YOU', 'ABOUT', 'MY', 'TRAIN', 'OF', 'THOUGHTS', 'OR', 'FANCIES', 'BUT', 'I', 'BEGAN', 'TO', 'FEEL', 'VERY', 'LIKE', 'A', 'GENTLEMAN', 'IN', 'A', 'GHOST', 'STORY', 'WATCHING', 'EXPERIMENTALLY', 'IN', 'A', 'HAUNTED', 'CHAMBER'] +5683-32866-0024-2551: hyp=['I', "SHA'N'T", 'TROUBLE', 'YOU', 'ABOUT', 'MY', 'TRAIN', 'OF', 'THOUGHTS', 'OR', 'FANCIES', 'BUT', 'I', 'BEGAN', 'TO', 'FEEL', 'VERY', 'LIKE', 'A', 'GENTLEMAN', 'IN', 'A', 'GHOST', 'STORY', 'WATCHING', 'EXPERIMENTALLY', 'IN', 'A', 'HAUNTED', 'CHAMBER'] +5683-32866-0025-2552: ref=['I', 'DID', 'NOT', 'EVEN', 'TAKE', 'THE', 'PRECAUTION', 'OF', 'SMOKING', 'UP', 'THE', 'CHIMNEY'] +5683-32866-0025-2552: hyp=['I', 'DID', 'NOT', 'EVEN', 'TAKE', 'THE', 'PRECAUTION', 'OF', 'SMOKING', 'UP', 'THE', 'CHIMNEY'] +5683-32866-0026-2553: ref=['I', 'BOLDLY', 'LIGHTED', 'MY', 'CHEROOT'] +5683-32866-0026-2553: hyp=['I', 'BOLDLY', 'LIGHTED', 'MY', 'TROUT'] +5683-32866-0027-2554: ref=['A', 'COLD', 'BRIGHT', 'MOON', 'WAS', 'SHINING', 'WITH', 'CLEAR', 'SHARP', 'LIGHTS', 'AND', 'SHADOWS'] +5683-32866-0027-2554: hyp=['A', 'COLD', 'BRIGHT', 'MOON', 'WAS', 'SHINING', 'WITH', 'CLEAR', 'SHARP', 'LIGHTS', 'AND', 'SHADOWS'] +5683-32866-0028-2555: ref=['THE', 'SOMBRE', 'OLD', 'TREES', 'LIKE', 'GIGANTIC', 'HEARSE', 'PLUMES', 'BLACK', 'AND', 'AWFUL'] +5683-32866-0028-2555: hyp=['THE', 'SOMBRE', 'OLD', 'TREES', 'LIKE', 'GIGANTIC', 'HEARSE', 'PLUMES', 'BLACK', 'AND', 'AWFUL'] +5683-32866-0029-2556: ref=['SOMEHOW', 'I', 'HAD', 'GROWN', 'NERVOUS'] +5683-32866-0029-2556: hyp=['SOMEHOW', 'I', 'HAD', 'GROWN', 'NERVOUS'] +5683-32866-0030-2557: ref=['A', 'LITTLE', 'BIT', 'OF', 'PLASTER', 'TUMBLED', 'DOWN', 'THE', 'CHIMNEY', 'AND', 'STARTLED', 'ME', 'CONFOUNDEDLY'] +5683-32866-0030-2557: hyp=['A', 'LITTLE', 'BIT', 'OF', 'PLASTER', 'TUMBLED', 'DOWN', 'THE', 'CHIMNEY', 'AND', 'STARTLED', 'ME', 'CONFOUNDEDLY'] +5683-32879-0000-2501: ref=['IT', 'WAS', 'NOT', 'VERY', 'MUCH', 'PAST', 'ELEVEN', 'THAT', 'MORNING', 'WHEN', 'THE', 'PONY', 'CARRIAGE', 'FROM', 'BRANDON', 'DREW', 'UP', 'BEFORE', 'THE', 'LITTLE', 'GARDEN', 'WICKET', 'OF', "REDMAN'S", 'FARM'] +5683-32879-0000-2501: hyp=['IT', 'WAS', 'NOT', 'VERY', 'MUCH', 'PAST', 'ELEVEN', 'THAT', 'MORNING', 'WHEN', 'THE', 'PONY', 'CARRIAGE', 'FROM', 'BRANDON', 'DREW', 'UP', 'BEFORE', 'THE', 'LITTLE', 'GARDEN', 'WICKET', 'OF', "REDMAN'S", 'FARM'] +5683-32879-0001-2502: ref=['WELL', 'SHE', 'WAS', 'BETTER', 'THOUGH', 'SHE', 'HAD', 'HAD', 'A', 'BAD', 'NIGHT'] +5683-32879-0001-2502: hyp=['WHILE', 'SHE', 'WAS', 'BETTER', 'THOUGH', 'SHE', 'HAD', 'HAD', 'A', 'BAD', 'NIGHT'] +5683-32879-0002-2503: ref=['SO', 'THERE', 'CAME', 'A', 'STEP', 'AND', 'A', 'LITTLE', 'RUSTLING', 'OF', 'FEMININE', 'DRAPERIES', 'THE', 'SMALL', 'DOOR', 'OPENED', 'AND', 'RACHEL', 'ENTERED', 'WITH', 'HER', 'HAND', 'EXTENDED', 'AND', 'A', 'PALE', 'SMILE', 'OF', 'WELCOME'] +5683-32879-0002-2503: hyp=['SO', 'THERE', 'CAME', 'A', 'STEP', 'AND', 'A', 'LITTLE', 'RUSTLING', 'OF', 'FEMININE', 'DRAPERIES', 'THE', 'SMALL', 'DOOR', 'OPENED', 'AND', 'RACHEL', 'ENTERED', 'WITH', 'HER', 'HAND', 'EXTENDED', 'AND', 'A', 'PALE', 'SMILE', 'OF', 'WELCOME'] +5683-32879-0003-2504: ref=['WOMEN', 'CAN', 'HIDE', 'THEIR', 'PAIN', 'BETTER', 'THAN', 'WE', 'MEN', 'AND', 'BEAR', 'IT', 'BETTER', 'TOO', 'EXCEPT', 'WHEN', 'SHAME', 'DROPS', 'FIRE', 'INTO', 'THE', 'DREADFUL', 'CHALICE'] +5683-32879-0003-2504: hyp=['WOMEN', 'CAN', 'HIDE', 'THEIR', 'PAIN', 'BETTER', 'THAN', 'WE', 'MEN', 'AND', 'BEAR', 'IT', 'BETTER', 'TOO', 'EXCEPT', 'WHEN', 'SHAME', 'DROPS', 'FIRE', 'INTO', 'THE', 'DREADFUL', 'CHALICE'] +5683-32879-0004-2505: ref=['BUT', 'POOR', 'RACHEL', 'LAKE', 'HAD', 'MORE', 'THAN', 'THAT', 'STOICAL', 'HYPOCRISY', 'WHICH', 'ENABLES', 'THE', 'TORTURED', 'SPIRITS', 'OF', 'HER', 'SEX', 'TO', 'LIFT', 'A', 'PALE', 'FACE', 'THROUGH', 'THE', 'FLAMES', 'AND', 'SMILE'] +5683-32879-0004-2505: hyp=['BUT', 'POOR', 'RACHEL', 'LAKE', 'HAD', 'MORE', 'THAN', 'THAT', 'STOICAL', 'HYPOCRISY', 'WHICH', 'ENABLES', 'THE', 'TORTURED', 'SPIRITS', 'OF', 'HER', 'SEX', 'TO', 'LIFT', 'A', 'PALE', 'FACE', 'THROUGH', 'THE', 'FLAMES', 'AND', 'SMILE'] +5683-32879-0005-2506: ref=['THIS', 'TRANSIENT', 'SPRING', 'AND', 'LIGHTING', 'UP', 'ARE', 'BEAUTIFUL', 'A', 'GLAMOUR', 'BEGUILING', 'OUR', 'SENSES'] +5683-32879-0005-2506: hyp=['THIS', 'TRANSIENT', 'SPRING', 'AND', 'LIGHTING', 'UP', 'ARE', 'BEAUTIFUL', 'A', 'GLAMOUR', 'BEGUILING', 'OUR', 'SENSES'] +5683-32879-0006-2507: ref=['THERE', 'WAS', 'SOMETHING', 'OF', 'SWEETNESS', 'AND', 'FONDNESS', 'IN', 'HER', 'TONES', 'AND', 'MANNER', 'WHICH', 'WAS', 'NEW', 'TO', 'RACHEL', 'AND', 'COMFORTING', 'AND', 'SHE', 'RETURNED', 'THE', 'GREETING', 'AS', 'KINDLY', 'AND', 'FELT', 'MORE', 'LIKE', 'HER', 'FORMER', 'SELF'] +5683-32879-0006-2507: hyp=['THERE', 'WAS', 'SOMETHING', 'OF', 'SWEETNESS', 'AND', 'FONDNESS', 'IN', 'HER', 'TONES', 'AND', 'MANNER', 'WHICH', 'WAS', 'NEW', 'TO', 'RACHEL', 'AND', 'COMFORTING', 'AND', 'SHE', 'RETURNED', 'THE', 'GREETING', 'AS', 'KINDLY', 'AND', 'FELT', 'MORE', 'LIKE', 'HER', 'FORMER', 'SELF'] +5683-32879-0007-2508: ref=["RACHEL'S", 'PALE', 'AND', 'SHARPENED', 'FEATURES', 'AND', 'DILATED', 'EYE', 'STRUCK', 'HER', 'WITH', 'A', 'PAINFUL', 'SURPRISE'] +5683-32879-0007-2508: hyp=["RACHEL'S", 'PALE', 'AND', 'SHARPENED', 'FEATURES', 'AND', 'DILATED', 'EYE', 'STRUCK', 'HER', 'WITH', 'A', 'PAINFUL', 'SURPRISE'] +5683-32879-0008-2509: ref=['YOU', 'HAVE', 'BEEN', 'SO', 'ILL', 'MY', 'POOR', 'RACHEL'] +5683-32879-0008-2509: hyp=['YOU', 'HAVE', 'BEEN', 'SO', 'ILL', 'MY', 'POOR', 'RACHEL'] +5683-32879-0009-2510: ref=['ILL', 'AND', 'TROUBLED', 'DEAR', 'TROUBLED', 'IN', 'MIND', 'AND', 'MISERABLY', 'NERVOUS'] +5683-32879-0009-2510: hyp=['ILL', 'AND', 'TROUBLED', 'DEAR', 'TROUBLED', 'IN', 'MIND', 'AND', 'MISERABLY', 'NERVOUS'] +5683-32879-0010-2511: ref=['POOR', 'RACHEL', 'HER', 'NATURE', 'RECOILED', 'FROM', 'DECEIT', 'AND', 'SHE', 'TOLD', 'AT', 'ALL', 'EVENTS', 'AS', 'MUCH', 'OF', 'THE', 'TRUTH', 'AS', 'SHE', 'DARED'] +5683-32879-0010-2511: hyp=['POOR', 'RACHEL', 'HER', 'NATURE', 'RECOILED', 'FROM', 'DECEIT', 'AND', 'SHE', 'TOLD', 'AT', 'ALL', 'EVENTS', 'AS', 'MUCH', 'OF', 'THE', 'TRUTH', 'AS', 'SHE', 'DARED'] +5683-32879-0011-2512: ref=['SHE', 'SPOKE', 'WITH', 'A', 'SUDDEN', 'ENERGY', 'WHICH', 'PARTOOK', 'OF', 'FEAR', 'AND', 'PASSION', 'AND', 'FLUSHED', 'HER', 'THIN', 'CHEEK', 'AND', 'MADE', 'HER', 'LANGUID', 'EYES', 'FLASH'] +5683-32879-0011-2512: hyp=['SHE', 'SPOKE', 'WITH', 'A', 'SUDDEN', 'ENERGY', 'WHICH', 'PARTOOK', 'A', 'FEAR', 'AND', 'PASSION', 'AND', 'FLUSHED', 'HER', 'THIN', 'CHEEK', 'AND', 'MADE', 'HER', 'LANGUID', 'EYES', 'FLASH'] +5683-32879-0012-2513: ref=['THANK', 'YOU', 'RACHEL', 'MY', 'COUSIN', 'RACHEL', 'MY', 'ONLY', 'FRIEND'] +5683-32879-0012-2513: hyp=['THANK', 'YOU', 'RACHAEL', 'MY', 'COUSIN', 'RACHEL', 'MY', 'ONLY', 'FRIEND'] +5683-32879-0013-2514: ref=['CHELFORD', 'HAD', 'A', 'NOTE', 'FROM', 'MISTER', 'WYLDER', 'THIS', 'MORNING', 'ANOTHER', 'NOTE', 'HIS', 'COMING', 'DELAYED', 'AND', 'SOMETHING', 'OF', 'HIS', 'HAVING', 'TO', 'SEE', 'SOME', 'PERSON', 'WHO', 'IS', 'ABROAD', 'CONTINUED', 'DORCAS', 'AFTER', 'A', 'LITTLE', 'PAUSE'] +5683-32879-0013-2514: hyp=['CHELFORD', 'HAD', 'A', 'NOTE', 'FROM', 'MISTER', 'WILDER', 'THIS', 'MORNING', 'ANOTHER', 'NOTE', 'HIS', 'COMING', 'DELAYED', 'AND', 'SOMETHING', 'OF', 'HIS', 'HAVING', 'TO', 'SEE', 'SOME', 'PERSON', 'WHO', 'WAS', 'ABROAD', 'CONTINUED', 'DORCAS', 'AFTER', 'A', 'LITTLE', 'PAUSE'] +5683-32879-0014-2515: ref=['YES', 'SOMETHING', 'EVERYTHING', 'SAID', 'RACHEL', 'HURRIEDLY', 'LOOKING', 'FROWNINGLY', 'AT', 'A', 'FLOWER', 'WHICH', 'SHE', 'WAS', 'TWIRLING', 'IN', 'HER', 'FINGERS'] +5683-32879-0014-2515: hyp=['YES', 'SOMETHING', 'EVERYTHING', 'SAID', 'RACHEL', 'HURRIEDLY', 'LOOKING', 'FROWNINGLY', 'AT', 'A', 'FLOWER', 'WHICH', 'SHE', 'WAS', 'TWIRLING', 'IN', 'HER', 'FINGERS'] +5683-32879-0015-2516: ref=['YES', 'SAID', 'RACHEL'] +5683-32879-0015-2516: hyp=['YES', 'SAID', 'RACHEL'] +5683-32879-0016-2517: ref=['AND', 'THE', 'WAN', 'ORACLE', 'HAVING', 'SPOKEN', 'SHE', 'SATE', 'DOWN', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'ABSTRACTION', 'AGAIN', 'BESIDE', 'DORCAS', 'AND', 'SHE', 'LOOKED', 'FULL', 'IN', 'HER', "COUSIN'S", 'EYES'] +5683-32879-0016-2517: hyp=['AND', 'THE', 'WAN', 'ORACLE', 'HAVING', 'SPOKEN', 'SHE', 'SAT', 'DOWN', 'IN', 'THE', 'SAME', 'SORT', 'OF', 'ABSTRACTION', 'AGAIN', 'BESIDE', 'DORCAS', 'AND', 'SHE', 'LOOKED', 'FULL', 'IN', 'HER', "COUSIN'S", 'EYES'] +5683-32879-0017-2518: ref=['OF', 'MARK', 'WYLDER', 'I', 'SAY', 'THIS', 'HIS', 'NAME', 'HAS', 'BEEN', 'FOR', 'YEARS', 'HATEFUL', 'TO', 'ME', 'AND', 'RECENTLY', 'IT', 'HAS', 'BECOME', 'FRIGHTFUL', 'AND', 'YOU', 'WILL', 'PROMISE', 'ME', 'SIMPLY', 'THIS', 'THAT', 'YOU', 'WILL', 'NEVER', 'ASK', 'ME', 'TO', 'SPEAK', 'AGAIN', 'ABOUT', 'HIM'] +5683-32879-0017-2518: hyp=['OF', 'MARK', 'WYLDER', 'I', 'SAY', 'THIS', 'HIS', 'NAME', 'HAS', 'BEEN', 'FOR', 'YEARS', 'HATEFUL', 'TO', 'ME', 'AND', 'RECENTLY', 'IT', 'HAS', 'BECOME', 'FRIGHTFUL', 'AND', 'YOU', 'WILL', 'PROMISE', 'ME', 'SIMPLY', 'THIS', 'THAT', 'YOU', 'WILL', 'NEVER', 'ASK', 'ME', 'TO', 'SPEAK', 'AGAIN', 'ABOUT', 'HIM'] +5683-32879-0018-2519: ref=['IT', 'IS', 'AN', 'ANTIPATHY', 'AN', 'ANTIPATHY', 'I', 'CANNOT', 'GET', 'OVER', 'DEAR', 'DORCAS', 'YOU', 'MAY', 'THINK', 'IT', 'A', 'MADNESS', 'BUT', "DON'T", 'BLAME', 'ME'] +5683-32879-0018-2519: hyp=['IT', 'IS', 'AN', 'ANTIPATHY', 'AN', 'ANTIPATHY', 'I', 'CANNOT', 'GET', 'OVER', 'DEAR', 'DORCAS', 'YOU', 'MAY', 'THINK', 'IT', 'A', 'MADNESS', 'BUT', "DON'T", 'BLAME', 'ME'] +5683-32879-0019-2520: ref=['I', 'HAVE', 'VERY', 'FEW', 'TO', 'LOVE', 'ME', 'NOW', 'AND', 'I', 'THOUGHT', 'YOU', 'MIGHT', 'LOVE', 'ME', 'AS', 'I', 'HAVE', 'BEGUN', 'TO', 'LOVE', 'YOU'] +5683-32879-0019-2520: hyp=['I', 'HAVE', 'VERY', 'FEW', 'TO', 'LOVE', 'ME', 'NOW', 'AND', 'I', 'THOUGHT', 'YOU', 'MIGHT', 'LOVE', 'ME', 'AS', 'I', 'HAVE', 'BEGUN', 'TO', 'LOVE', 'YOU'] +5683-32879-0020-2521: ref=['AND', 'SHE', 'THREW', 'HER', 'ARMS', 'ROUND', 'HER', "COUSIN'S", 'NECK', 'AND', 'BRAVE', 'RACHEL', 'AT', 'LAST', 'BURST', 'INTO', 'TEARS'] +5683-32879-0020-2521: hyp=['AND', 'SHE', 'THREW', 'HER', 'ARMS', 'ROUND', 'HER', "COUSIN'S", 'NECK', 'AND', 'BRAVE', 'RACHEL', 'AT', 'LAST', 'BURST', 'INTO', 'TEARS'] +5683-32879-0021-2522: ref=['DORCAS', 'IN', 'HER', 'STRANGE', 'WAY', 'WAS', 'MOVED'] +5683-32879-0021-2522: hyp=['DORCAS', 'IN', 'HER', 'STRANGE', 'WAY', 'WAS', 'MOVED'] +5683-32879-0022-2523: ref=['I', 'LIKE', 'YOU', 'STILL', 'RACHEL', "I'M", 'SURE', "I'LL", 'ALWAYS', 'LIKE', 'YOU'] +5683-32879-0022-2523: hyp=['I', 'LIKE', 'YOU', 'STILL', 'RACHEL', "I'M", 'SURE', "I'LL", 'ALWAYS', 'LIKE', 'YOU'] +5683-32879-0023-2524: ref=['YOU', 'RESEMBLE', 'ME', 'RACHEL', 'YOU', 'ARE', 'FEARLESS', 'AND', 'INFLEXIBLE', 'AND', 'GENEROUS'] +5683-32879-0023-2524: hyp=['YOU', 'RESEMBLE', 'ME', 'RACHEL', 'YOU', 'ARE', 'FEARLESS', 'AND', 'INFLEXIBLE', 'AND', 'GENEROUS'] +5683-32879-0024-2525: ref=['YES', 'RACHEL', 'I', 'DO', 'LOVE', 'YOU'] +5683-32879-0024-2525: hyp=['YES', 'RACHEL', 'I', 'DO', 'LOVE', 'YOU'] +5683-32879-0025-2526: ref=['THANK', 'YOU', 'DORCAS', 'DEAR'] +5683-32879-0025-2526: hyp=['THANK', 'YOU', 'DORCAS', 'DEAR'] +61-70968-0000-2179: ref=['HE', 'BEGAN', 'A', 'CONFUSED', 'COMPLAINT', 'AGAINST', 'THE', 'WIZARD', 'WHO', 'HAD', 'VANISHED', 'BEHIND', 'THE', 'CURTAIN', 'ON', 'THE', 'LEFT'] +61-70968-0000-2179: hyp=['HE', 'BEGAN', 'A', 'CONFUSED', 'COMPLAINT', 'AGAINST', 'THE', 'WIZARD', 'WHO', 'HAD', 'VANISHED', 'BEHIND', 'THE', 'CURTAIN', 'ON', 'THE', 'LEFT'] +61-70968-0001-2180: ref=['GIVE', 'NOT', 'SO', 'EARNEST', 'A', 'MIND', 'TO', 'THESE', 'MUMMERIES', 'CHILD'] +61-70968-0001-2180: hyp=['KIVED', 'NOT', 'SO', 'EARNEST', 'A', 'MIND', 'TO', 'THESE', 'MUMMERIES', 'CHILD'] +61-70968-0002-2181: ref=['A', 'GOLDEN', 'FORTUNE', 'AND', 'A', 'HAPPY', 'LIFE'] +61-70968-0002-2181: hyp=['A', 'GOLDEN', 'FORTUNE', 'AND', 'A', 'HAPPY', 'LIFE'] +61-70968-0003-2182: ref=['HE', 'WAS', 'LIKE', 'UNTO', 'MY', 'FATHER', 'IN', 'A', 'WAY', 'AND', 'YET', 'WAS', 'NOT', 'MY', 'FATHER'] +61-70968-0003-2182: hyp=['HE', 'WAS', 'LIKE', 'UNTO', 'MY', 'FATHER', 'IN', 'A', 'WAY', 'AND', 'YET', 'WAS', 'NOT', 'MY', 'FATHER'] +61-70968-0004-2183: ref=['ALSO', 'THERE', 'WAS', 'A', 'STRIPLING', 'PAGE', 'WHO', 'TURNED', 'INTO', 'A', 'MAID'] +61-70968-0004-2183: hyp=['ALSO', 'THERE', 'WAS', 'A', 'STRIPLING', 'PAGE', 'WHO', 'TURNED', 'INTO', 'A', 'MAID'] +61-70968-0005-2184: ref=['THIS', 'WAS', 'SO', 'SWEET', 'A', 'LADY', 'SIR', 'AND', 'IN', 'SOME', 'MANNER', 'I', 'DO', 'THINK', 'SHE', 'DIED'] +61-70968-0005-2184: hyp=['THIS', 'WAS', 'SO', 'SWEET', 'A', 'LADY', 'SIR', 'AND', 'IN', 'SOME', 'MANNER', 'I', 'DO', 'THINK', 'SHE', 'DIED'] +61-70968-0006-2185: ref=['BUT', 'THEN', 'THE', 'PICTURE', 'WAS', 'GONE', 'AS', 'QUICKLY', 'AS', 'IT', 'CAME'] +61-70968-0006-2185: hyp=['BUT', 'THEN', 'THE', 'PICTURE', 'WAS', 'GONE', 'AS', 'QUICKLY', 'AS', 'IT', 'CAME'] +61-70968-0007-2186: ref=['SISTER', 'NELL', 'DO', 'YOU', 'HEAR', 'THESE', 'MARVELS'] +61-70968-0007-2186: hyp=['SISTER', 'NELL', 'DO', 'YOU', 'HEAR', 'THESE', 'MARVELS'] +61-70968-0008-2187: ref=['TAKE', 'YOUR', 'PLACE', 'AND', 'LET', 'US', 'SEE', 'WHAT', 'THE', 'CRYSTAL', 'CAN', 'SHOW', 'TO', 'YOU'] +61-70968-0008-2187: hyp=['TAKE', 'YOUR', 'PLACE', 'AND', 'LET', 'US', 'SEE', 'WHAT', 'THE', 'CRYSTAL', 'CAN', 'SHOW', 'TO', 'YOU'] +61-70968-0009-2188: ref=['LIKE', 'AS', 'NOT', 'YOUNG', 'MASTER', 'THOUGH', 'I', 'AM', 'AN', 'OLD', 'MAN'] +61-70968-0009-2188: hyp=['LIKE', 'AS', 'NOT', 'YOUNG', 'MASTER', 'THOUGH', 'I', 'AM', 'AN', 'OLD', 'MAN'] +61-70968-0010-2189: ref=['FORTHWITH', 'ALL', 'RAN', 'TO', 'THE', 'OPENING', 'OF', 'THE', 'TENT', 'TO', 'SEE', 'WHAT', 'MIGHT', 'BE', 'AMISS', 'BUT', 'MASTER', 'WILL', 'WHO', 'PEEPED', 'OUT', 'FIRST', 'NEEDED', 'NO', 'MORE', 'THAN', 'ONE', 'GLANCE'] +61-70968-0010-2189: hyp=['FORTHWITH', 'ALL', 'RAN', 'TO', 'THE', 'OPENING', 'OF', 'THE', 'TENT', 'TO', 'SEE', 'WHAT', 'MIGHT', 'BE', 'AMISS', 'BUT', 'MASTER', 'WILL', 'WHO', 'PEEPED', 'OUT', 'FIRST', 'NEEDED', 'NO', 'MORE', 'THAN', 'ONE', 'GLANCE'] +61-70968-0011-2190: ref=['HE', 'GAVE', 'WAY', 'TO', 'THE', 'OTHERS', 'VERY', 'READILY', 'AND', 'RETREATED', 'UNPERCEIVED', 'BY', 'THE', 'SQUIRE', 'AND', 'MISTRESS', 'FITZOOTH', 'TO', 'THE', 'REAR', 'OF', 'THE', 'TENT'] +61-70968-0011-2190: hyp=['HE', 'GAVE', 'WAY', 'TO', 'THE', 'OTHERS', 'VERY', 'READILY', 'AND', 'RETREATED', 'UNPERCEIVED', 'BY', 'THE', 'SQUIRE', 'AND', 'MISTRESS', 'FITZOOTH', 'TO', 'THE', 'REAR', 'OF', 'THE', 'TENT'] +61-70968-0012-2191: ref=['CRIES', 'OF', 'A', 'NOTTINGHAM', 'A', 'NOTTINGHAM'] +61-70968-0012-2191: hyp=['CRIES', 'OF', 'UNNOTTINGHAM', 'A', 'NOTTINGHAM'] +61-70968-0013-2192: ref=['BEFORE', 'THEM', 'FLED', 'THE', 'STROLLER', 'AND', 'HIS', 'THREE', 'SONS', 'CAPLESS', 'AND', 'TERRIFIED'] +61-70968-0013-2192: hyp=['BEFORE', 'THEM', 'FLED', 'THE', 'STROLLER', 'AND', 'HIS', 'THREE', 'SONS', 'CAPLICE', 'AND', 'TERRIFIED'] +61-70968-0014-2193: ref=['WHAT', 'IS', 'THE', 'TUMULT', 'AND', 'RIOTING', 'CRIED', 'OUT', 'THE', 'SQUIRE', 'AUTHORITATIVELY', 'AND', 'HE', 'BLEW', 'TWICE', 'ON', 'A', 'SILVER', 'WHISTLE', 'WHICH', 'HUNG', 'AT', 'HIS', 'BELT'] +61-70968-0014-2193: hyp=['WHAT', 'IS', 'THE', 'TUMULT', 'AND', 'RIOTING', 'CRIED', 'OUT', 'THE', 'SQUIRE', 'AUTHORITATIVELY', 'AND', 'HE', 'BLEW', 'TWICE', 'ON', 'THE', 'SILVER', 'WHISTLE', 'WHICH', 'HUNG', 'AT', 'HIS', 'BELT'] +61-70968-0015-2194: ref=['NAY', 'WE', 'REFUSED', 'THEIR', 'REQUEST', 'MOST', 'POLITELY', 'MOST', 'NOBLE', 'SAID', 'THE', 'LITTLE', 'STROLLER'] +61-70968-0015-2194: hyp=['NAY', 'WE', 'WERE', 'FREEZED', 'THEIR', 'REQUEST', 'MOST', 'POLITELY', 'MOST', 'NOBLE', 'SAID', 'THE', 'LITTLE', 'STROLLER'] +61-70968-0016-2195: ref=['AND', 'THEN', 'THEY', 'BECAME', 'VEXED', 'AND', 'WOULD', 'HAVE', 'SNATCHED', 'YOUR', 'PURSE', 'FROM', 'US'] +61-70968-0016-2195: hyp=['AND', 'THEN', 'THEY', 'BECAME', 'VEXED', 'AND', 'WOULD', 'HAVE', 'SNATCHED', 'YOUR', 'PURSE', 'FROM', 'US'] +61-70968-0017-2196: ref=['I', 'COULD', 'NOT', 'SEE', 'MY', 'BOY', 'INJURED', 'EXCELLENCE', 'FOR', 'BUT', 'DOING', 'HIS', 'DUTY', 'AS', 'ONE', 'OF', "CUMBERLAND'S", 'SONS'] +61-70968-0017-2196: hyp=['I', 'COULD', 'NOT', 'SEE', 'MY', 'BOY', 'INJURED', 'EXCELLENCE', 'FOR', 'BUT', 'DOING', 'HIS', 'DUTY', 'AS', 'ONE', 'OF', "CUMBERLAND'S", 'SONS'] +61-70968-0018-2197: ref=['SO', 'I', 'DID', 'PUSH', 'THIS', 'FELLOW'] +61-70968-0018-2197: hyp=['SO', 'I', 'DID', 'PUSH', 'THIS', 'FELLOW'] +61-70968-0019-2198: ref=['IT', 'IS', 'ENOUGH', 'SAID', 'GEORGE', 'GAMEWELL', 'SHARPLY', 'AND', 'HE', 'TURNED', 'UPON', 'THE', 'CROWD'] +61-70968-0019-2198: hyp=['IT', 'IS', 'ENOUGH', 'SAID', 'GEORGE', 'GAMEWELL', 'SHARPLY', 'AS', 'HE', 'TURNED', 'UPON', 'THE', 'CROWD'] +61-70968-0020-2199: ref=['SHAME', 'ON', 'YOU', 'CITIZENS', 'CRIED', 'HE', 'I', 'BLUSH', 'FOR', 'MY', 'FELLOWS', 'OF', 'NOTTINGHAM'] +61-70968-0020-2199: hyp=['SHAME', 'ON', 'YOU', 'CITIZENS', 'CRIED', 'HE', 'I', 'BLUSH', 'FOR', 'MY', 'FELLOWS', 'OF', 'NOTTINGHAM'] +61-70968-0021-2200: ref=['SURELY', 'WE', 'CAN', 'SUBMIT', 'WITH', 'GOOD', 'GRACE'] +61-70968-0021-2200: hyp=['SURELY', 'WE', 'CAN', 'SUBMIT', 'WITH', 'GOOD', 'GRACE'] +61-70968-0022-2201: ref=['TIS', 'FINE', 'FOR', 'YOU', 'TO', 'TALK', 'OLD', 'MAN', 'ANSWERED', 'THE', 'LEAN', 'SULLEN', 'APPRENTICE'] +61-70968-0022-2201: hyp=['TIS', 'FINE', 'FOR', 'YOU', 'TO', 'TALK', 'OLD', 'MAN', 'ANSWERED', 'THE', 'LEAN', 'SULLEN', 'APPRENTICE'] +61-70968-0023-2202: ref=['BUT', 'I', 'WRESTLED', 'WITH', 'THIS', 'FELLOW', 'AND', 'DO', 'KNOW', 'THAT', 'HE', 'PLAYED', 'UNFAIRLY', 'IN', 'THE', 'SECOND', 'BOUT'] +61-70968-0023-2202: hyp=['BUT', 'I', 'WRESTLED', 'WITH', 'THIS', 'FELLOW', 'AND', 'DO', 'KNOW', 'THAT', 'HE', 'PLAYED', 'UNFAIRLY', 'IN', 'THE', 'SECOND', 'BOUT'] +61-70968-0024-2203: ref=['SPOKE', 'THE', 'SQUIRE', 'LOSING', 'ALL', 'PATIENCE', 'AND', 'IT', 'WAS', 'TO', 'YOU', 'THAT', 'I', 'GAVE', 'ANOTHER', 'PURSE', 'IN', 'CONSOLATION'] +61-70968-0024-2203: hyp=['SPOKE', 'THE', 'SQUIRE', 'LOSING', 'ALL', 'PATIENT', 'AND', 'IT', 'WAS', 'TO', 'YOU', 'THAT', 'I', 'GAVE', 'ANOTHER', 'PERSON', 'CONSOLATION'] +61-70968-0025-2204: ref=['COME', 'TO', 'ME', 'MEN', 'HERE', 'HERE', 'HE', 'RAISED', 'HIS', 'VOICE', 'STILL', 'LOUDER'] +61-70968-0025-2204: hyp=['COME', 'TO', 'ME', 'MEN', 'HERE', 'HERE', 'HE', 'RAISED', 'HIS', 'VOICE', 'STILL', 'LOUDER'] +61-70968-0026-2205: ref=['THE', 'STROLLERS', 'TOOK', 'THEIR', 'PART', 'IN', 'IT', 'WITH', 'HEARTY', 'ZEST', 'NOW', 'THAT', 'THEY', 'HAD', 'SOME', 'CHANCE', 'OF', 'BEATING', 'OFF', 'THEIR', 'FOES'] +61-70968-0026-2205: hyp=['THE', 'STROLLERS', 'TOOK', 'THEIR', 'PART', 'IN', 'IT', 'WITH', 'HEARTY', 'ZEST', 'NOW', 'THAT', 'THEY', 'HAD', 'SOME', 'CHANCE', 'OF', 'BEATING', 'OFF', 'THEIR', 'FOES'] +61-70968-0027-2206: ref=['ROBIN', 'AND', 'THE', 'LITTLE', 'TUMBLER', 'BETWEEN', 'THEM', 'TRIED', 'TO', 'FORCE', 'THE', 'SQUIRE', 'TO', 'STAND', 'BACK', 'AND', 'VERY', 'VALIANTLY', 'DID', 'THESE', 'TWO', 'COMPORT', 'THEMSELVES'] +61-70968-0027-2206: hyp=['ROBIN', 'AND', 'THE', 'LITTLE', 'TUMBLER', 'BETWEEN', 'THEM', 'TRIED', 'TO', 'FORCE', 'THE', 'SQUIRE', 'TO', 'STAND', 'BACK', 'AND', 'VERY', 'VALIANTLY', 'DID', 'THESE', 'TWO', 'COMPORT', 'THEMSELVES'] +61-70968-0028-2207: ref=['THE', 'HEAD', 'AND', 'CHIEF', 'OF', 'THE', 'RIOT', 'THE', 'NOTTINGHAM', 'APPRENTICE', 'WITH', 'CLENCHED', 'FISTS', 'THREATENED', 'MONTFICHET'] +61-70968-0028-2207: hyp=['THE', 'HEAD', 'AND', 'CHIEF', 'OF', 'THE', 'RIOT', 'THE', 'NOTTINGHAM', 'APPRENTICED', 'WITH', 'CLENCHED', 'FISTS', 'THREATENED', 'MONTFICHET'] +61-70968-0029-2208: ref=['THE', 'SQUIRE', 'HELPED', 'TO', 'THRUST', 'THEM', 'ALL', 'IN', 'AND', 'ENTERED', 'SWIFTLY', 'HIMSELF'] +61-70968-0029-2208: hyp=['THE', 'SQUIRE', 'HELPED', 'TO', 'THRUST', 'THEM', 'ALL', 'IN', 'AND', 'ENTERED', 'SWIFTLY', 'HIMSELF'] +61-70968-0030-2209: ref=['NOW', 'BE', 'SILENT', 'ON', 'YOUR', 'LIVES', 'HE', 'BEGAN', 'BUT', 'THE', 'CAPTURED', 'APPRENTICE', 'SET', 'UP', 'AN', 'INSTANT', 'SHOUT'] +61-70968-0030-2209: hyp=['NOW', 'BE', 'SILENT', 'ON', 'YOUR', 'LIVES', 'HE', 'BEGAN', 'BUT', 'THE', 'CAPTURED', 'APPRENTICE', 'SET', 'UP', 'AN', 'INSTANT', 'SHOUT'] +61-70968-0031-2210: ref=['SILENCE', 'YOU', 'KNAVE', 'CRIED', 'MONTFICHET'] +61-70968-0031-2210: hyp=['SILENCE', 'YOU', 'NAVE', 'CRIED', 'MONTFICHET'] +61-70968-0032-2211: ref=['HE', 'FELT', 'FOR', 'AND', 'FOUND', 'THE', "WIZARD'S", 'BLACK', 'CLOTH', 'THE', 'SQUIRE', 'WAS', 'QUITE', 'OUT', 'OF', 'BREATH'] +61-70968-0032-2211: hyp=['HE', 'FELT', 'FOR', 'AND', 'FOUND', 'THE', "WIZARD'S", 'BLACK', 'CLOTH', 'THE', 'SQUIRE', 'WAS', 'QUITE', 'OUT', 'OF', 'BREATH'] +61-70968-0033-2212: ref=['THRUSTING', 'OPEN', 'THE', 'PROPER', 'ENTRANCE', 'OF', 'THE', 'TENT', 'ROBIN', 'SUDDENLY', 'RUSHED', 'FORTH', 'WITH', 'HIS', 'BURDEN', 'WITH', 'A', 'GREAT', 'SHOUT'] +61-70968-0033-2212: hyp=['THRUSTING', 'OPEN', 'THE', 'PROPER', 'ENTRANCE', 'OF', 'THE', 'TENT', 'ROBIN', 'SUDDENLY', 'RUSHED', 'FORTH', 'WITH', 'HIS', 'BURDEN', 'WITH', 'A', 'GREAT', 'SHOUT'] +61-70968-0034-2213: ref=['A', 'MONTFICHET', 'A', 'MONTFICHET', 'GAMEWELL', 'TO', 'THE', 'RESCUE'] +61-70968-0034-2213: hyp=['A', 'MONTFICHET', 'A', 'MONTFICHET', 'GAMEWELL', 'TO', 'THE', 'RESCUE'] +61-70968-0035-2214: ref=['TAKING', 'ADVANTAGE', 'OF', 'THIS', 'THE', "SQUIRE'S", 'FEW', 'MEN', 'REDOUBLED', 'THEIR', 'EFFORTS', 'AND', 'ENCOURAGED', 'BY', "ROBIN'S", 'AND', 'THE', 'LITTLE', "STROLLER'S", 'CRIES', 'FOUGHT', 'THEIR', 'WAY', 'TO', 'HIM'] +61-70968-0035-2214: hyp=['TAKING', 'ADVANTAGE', 'OF', 'THIS', 'THE', "SQUIRE'S", 'FEW', 'MEN', 'REDOUBLED', 'THEIR', 'EFFORTS', 'AND', 'ENCOURAGED', 'BY', 'ROBINS', 'AND', 'THE', 'LITTLE', "STROLLER'S", 'CRIES', 'FOUGHT', 'THEIR', 'WAY', 'TO', 'HIM'] +61-70968-0036-2215: ref=['GEORGE', 'MONTFICHET', 'WILL', 'NEVER', 'FORGET', 'THIS', 'DAY'] +61-70968-0036-2215: hyp=['GEORGE', 'MONTFICHET', 'WILL', 'NEVER', 'FORGET', 'THIS', 'DAY'] +61-70968-0037-2216: ref=['WHAT', 'IS', 'YOUR', 'NAME', 'LORDING', 'ASKED', 'THE', 'LITTLE', 'STROLLER', 'PRESENTLY'] +61-70968-0037-2216: hyp=['WHAT', 'IS', 'YOUR', 'NAME', 'LORDING', 'ASKED', 'THE', 'LITTLE', 'STROLLER', 'PRESENTLY'] +61-70968-0038-2217: ref=['ROBIN', 'FITZOOTH'] +61-70968-0038-2217: hyp=['ROBIN', 'FITZOOTH'] +61-70968-0039-2218: ref=['AND', 'MINE', 'IS', 'WILL', 'STUTELEY', 'SHALL', 'WE', 'BE', 'COMRADES'] +61-70968-0039-2218: hyp=['AND', 'MINE', 'IS', 'WILL', 'STUTELEY', 'SHALL', 'WE', 'BE', 'COMRADES'] +61-70968-0040-2219: ref=['RIGHT', 'WILLINGLY', 'FOR', 'BETWEEN', 'US', 'WE', 'HAVE', 'WON', 'THE', 'BATTLE', 'ANSWERED', 'ROBIN'] +61-70968-0040-2219: hyp=['RIGHT', 'WILLINGLY', 'FOR', 'BETWEEN', 'US', 'WE', 'HAVE', 'WON', 'THE', 'BATTLE', 'ANSWERED', 'ROBIN'] +61-70968-0041-2220: ref=['I', 'LIKE', 'YOU', 'WILL', 'YOU', 'ARE', 'THE', 'SECOND', 'WILL', 'THAT', 'I', 'HAVE', 'MET', 'AND', 'LIKED', 'WITHIN', 'TWO', 'DAYS', 'IS', 'THERE', 'A', 'SIGN', 'IN', 'THAT'] +61-70968-0041-2220: hyp=['I', 'LIKE', 'YOU', 'WILL', 'YOU', 'ARE', 'THE', 'SECOND', 'WILL', 'THAT', 'I', 'HAVE', 'MET', 'AND', 'LIKED', 'WITHIN', 'TWO', 'DAYS', 'IS', 'THERE', 'A', 'SIGN', 'IN', 'THAT'] +61-70968-0042-2221: ref=['MONTFICHET', 'CALLED', 'OUT', 'FOR', 'ROBIN', 'TO', 'GIVE', 'HIM', 'AN', 'ARM'] +61-70968-0042-2221: hyp=['MARTFICHERE', 'CALLED', 'OUT', 'FOR', 'ROBIN', 'TO', 'GIVE', 'HIM', 'AN', 'ARM'] +61-70968-0043-2222: ref=['FRIENDS', 'SAID', 'MONTFICHET', 'FAINTLY', 'TO', 'THE', 'WRESTLERS', 'BEAR', 'US', 'ESCORT', 'SO', 'FAR', 'AS', 'THE', "SHERIFF'S", 'HOUSE'] +61-70968-0043-2222: hyp=['FRIENDS', 'SAID', 'MONTFICHE', 'FAINTLY', 'TO', 'THE', 'WRESTLERS', 'BEAR', 'US', 'ESCORT', 'SO', 'FAR', 'AS', 'THE', "SHERIFF'S", 'HOUSE'] +61-70968-0044-2223: ref=['IT', 'WILL', 'NOT', 'BE', 'SAFE', 'FOR', 'YOU', 'TO', 'STAY', 'HERE', 'NOW'] +61-70968-0044-2223: hyp=['IT', 'WILL', 'NOT', 'BE', 'SAFE', 'FOR', 'YOU', 'TO', 'STAY', 'HERE', 'NOW'] +61-70968-0045-2224: ref=['PRAY', 'FOLLOW', 'US', 'WITH', 'MINE', 'AND', 'MY', 'LORD', "SHERIFF'S", 'MEN'] +61-70968-0045-2224: hyp=['PRAY', 'FOLLOW', 'US', 'WITH', 'MINE', 'IN', 'MY', 'LORD', "SHERIFF'S", 'MEN'] +61-70968-0046-2225: ref=['NOTTINGHAM', 'CASTLE', 'WAS', 'REACHED', 'AND', 'ADMITTANCE', 'WAS', 'DEMANDED'] +61-70968-0046-2225: hyp=['NOTTINGHAM', 'CASTLE', 'WAS', 'REACHED', 'AND', 'ADMITTANCE', 'WAS', 'DEMANDED'] +61-70968-0047-2226: ref=['MASTER', 'MONCEUX', 'THE', 'SHERIFF', 'OF', 'NOTTINGHAM', 'WAS', 'MIGHTILY', 'PUT', 'ABOUT', 'WHEN', 'TOLD', 'OF', 'THE', 'RIOTING'] +61-70968-0047-2226: hyp=['MASTER', 'MONCEUX', 'THE', 'SHERIFF', 'OF', 'NOTTINGHAM', 'WAS', 'MIGHTILY', 'PUT', 'ABOUT', 'WHEN', 'TOLD', 'OF', 'THE', 'RIOTING'] +61-70968-0048-2227: ref=['AND', 'HENRY', 'MIGHT', 'RETURN', 'TO', 'ENGLAND', 'AT', 'ANY', 'MOMENT'] +61-70968-0048-2227: hyp=['AND', 'HENRY', 'MIGHT', 'RETURN', 'TO', 'ENGLAND', 'AT', 'ANY', 'MOMENT'] +61-70968-0049-2228: ref=['HAVE', 'YOUR', 'WILL', 'CHILD', 'IF', 'THE', 'BOY', 'ALSO', 'WILLS', 'IT', 'MONTFICHET', 'ANSWERED', 'FEELING', 'TOO', 'ILL', 'TO', 'OPPOSE', 'ANYTHING', 'VERY', 'STRONGLY', 'JUST', 'THEN'] +61-70968-0049-2228: hyp=['HAVE', 'YOUR', 'WILL', 'CHILD', 'IF', 'THE', 'BOY', 'ALSO', 'WILLS', 'IT', 'MONTFICHET', 'ANSWERED', 'FEELING', 'TOO', 'ILL', 'TO', 'OPPOSE', 'ANYTHING', 'VERY', 'STRONGLY', 'JUST', 'THEN'] +61-70968-0050-2229: ref=['HE', 'MADE', 'AN', 'EFFORT', 'TO', 'HIDE', 'HIS', 'CONDITION', 'FROM', 'THEM', 'ALL', 'AND', 'ROBIN', 'FELT', 'HIS', 'FINGERS', 'TIGHTEN', 'UPON', 'HIS', 'ARM'] +61-70968-0050-2229: hyp=['HE', 'MADE', 'AN', 'EFFORT', 'TO', 'HIDE', 'HIS', 'CONDITION', 'FROM', 'THEM', 'ALL', 'AND', 'ROBIN', 'FELT', 'HIS', 'FINGERS', 'TIGHTEN', 'UPON', 'HIS', 'ARM'] +61-70968-0051-2230: ref=['BEG', 'ME', 'A', 'ROOM', 'OF', 'THE', 'SHERIFF', 'CHILD', 'QUICKLY'] +61-70968-0051-2230: hyp=['BEGGED', 'ME', 'A', 'ROOM', 'OF', 'THE', 'SHERIFF', 'CHILD', 'QUICKLY'] +61-70968-0052-2231: ref=['BUT', 'WHO', 'IS', 'THIS', 'FELLOW', 'PLUCKING', 'AT', 'YOUR', 'SLEEVE'] +61-70968-0052-2231: hyp=['BUT', 'WHO', 'IS', 'THIS', 'FELLOW', 'PLUCKING', 'AT', 'YOUR', 'STEVE'] +61-70968-0053-2232: ref=['HE', 'IS', 'MY', 'ESQUIRE', 'EXCELLENCY', 'RETURNED', 'ROBIN', 'WITH', 'DIGNITY'] +61-70968-0053-2232: hyp=['HE', 'IS', 'MY', 'ESQUIRE', 'EXCELLENCY', 'RETURNED', 'ROBIN', 'WITH', 'DIGNITY'] +61-70968-0054-2233: ref=['MISTRESS', 'FITZOOTH', 'HAD', 'BEEN', 'CARRIED', 'OFF', 'BY', 'THE', "SHERIFF'S", 'DAUGHTER', 'AND', 'HER', 'MAIDS', 'AS', 'SOON', 'AS', 'THEY', 'HAD', 'ENTERED', 'THE', 'HOUSE', 'SO', 'THAT', 'ROBIN', 'ALONE', 'HAD', 'THE', 'CARE', 'OF', 'MONTFICHET'] +61-70968-0054-2233: hyp=['MISTRESS', 'FITZOOTH', 'HAD', 'BEEN', 'CARRIED', 'OFF', 'BY', 'THE', "SHERIFF'S", 'DAUGHTER', 'AND', 'HER', 'MAIDS', 'AS', 'SOON', 'AS', 'THEY', 'HAD', 'ENTERED', 'THE', 'HOUSE', 'SO', 'THAT', 'ROBIN', 'ALONE', 'HAD', 'THE', 'CARE', 'OF', 'MONTFICHET'] +61-70968-0055-2234: ref=['ROBIN', 'WAS', 'GLAD', 'WHEN', 'AT', 'LENGTH', 'THEY', 'WERE', 'LEFT', 'TO', 'THEIR', 'OWN', 'DEVICES'] +61-70968-0055-2234: hyp=['ROBIN', 'WAS', 'GLAD', 'WHEN', 'AT', 'LENGTH', 'THEY', 'WERE', 'LEFT', 'TO', 'THEIR', 'OWN', 'DEVICES'] +61-70968-0056-2235: ref=['THE', 'WINE', 'DID', 'CERTAINLY', 'BRING', 'BACK', 'THE', 'COLOR', 'TO', 'THE', "SQUIRE'S", 'CHEEKS'] +61-70968-0056-2235: hyp=['THE', 'WINE', 'DID', 'CERTAINLY', 'BRING', 'BACK', 'THE', 'COLOR', 'TO', 'THE', "SQUIRE'S", 'CHEEKS'] +61-70968-0057-2236: ref=['THESE', 'ESCAPADES', 'ARE', 'NOT', 'FOR', 'OLD', 'GAMEWELL', 'LAD', 'HIS', 'DAY', 'HAS', 'COME', 'TO', 'TWILIGHT'] +61-70968-0057-2236: hyp=['THESE', 'ESCAPADES', 'ARE', 'NOT', 'FOR', 'OLD', 'GAME', 'WELL', 'LED', 'HIS', 'DAY', 'HAS', 'COME', 'TO', 'TWILIGHT'] +61-70968-0058-2237: ref=['WILL', 'YOU', 'FORGIVE', 'ME', 'NOW'] +61-70968-0058-2237: hyp=['WILL', 'YOU', 'FORGIVE', 'ME', 'NOW'] +61-70968-0059-2238: ref=['IT', 'WILL', 'BE', 'NO', 'DISAPPOINTMENT', 'TO', 'ME'] +61-70968-0059-2238: hyp=["IT'LL", 'BE', 'NO', 'DISAPPOINTMENT', 'TO', 'ME'] +61-70968-0060-2239: ref=['NO', 'THANKS', 'I', 'AM', 'GLAD', 'TO', 'GIVE', 'YOU', 'SUCH', 'EASY', 'HAPPINESS'] +61-70968-0060-2239: hyp=['NO', 'THANKS', 'I', 'AM', 'GLAD', 'TO', 'GIVE', 'YOU', 'SUCH', 'EASY', 'HAPPINESS'] +61-70968-0061-2240: ref=['YOU', 'ARE', 'A', 'WORTHY', 'LEECH', 'WILL', 'PRESENTLY', 'WHISPERED', 'ROBIN', 'THE', 'WINE', 'HAS', 'WORKED', 'A', 'MARVEL'] +61-70968-0061-2240: hyp=['YOU', 'ARE', 'A', 'WORTHY', 'LEECH', 'WILL', 'PRESENTLY', 'WHISPERED', 'ROBIN', 'THE', 'WINE', 'HAS', 'WORKED', 'A', 'MARVEL'] +61-70968-0062-2241: ref=['AY', 'AND', 'SHOW', 'YOU', 'SOME', 'PRETTY', 'TRICKS'] +61-70968-0062-2241: hyp=['I', 'AND', 'SHOW', 'YOU', 'SOME', 'PRETTY', 'TRICKS'] +61-70970-0000-2242: ref=['YOUNG', 'FITZOOTH', 'HAD', 'BEEN', 'COMMANDED', 'TO', 'HIS', "MOTHER'S", 'CHAMBER', 'SO', 'SOON', 'AS', 'HE', 'HAD', 'COME', 'OUT', 'FROM', 'HIS', 'CONVERSE', 'WITH', 'THE', 'SQUIRE'] +61-70970-0000-2242: hyp=['YOUNG', 'FITZOOTH', 'HAD', 'BEEN', 'COMMANDED', 'TO', 'HIS', "MOTHER'S", 'CHAMBER', 'SO', 'SOON', 'AS', 'HE', 'HAD', 'COME', 'OUT', 'FROM', 'HIS', 'CONVERSE', 'WITH', 'THE', 'SQUIRE'] +61-70970-0001-2243: ref=['THERE', 'BEFELL', 'AN', 'ANXIOUS', 'INTERVIEW', 'MISTRESS', 'FITZOOTH', 'ARGUING', 'FOR', 'AND', 'AGAINST', 'THE', "SQUIRE'S", 'PROJECT', 'IN', 'A', 'BREATH'] +61-70970-0001-2243: hyp=['THERE', 'BEFELL', 'AN', 'ANXIOUS', 'INTERVIEW', 'MISTRESS', 'FITZOOTH', 'ARGUING', 'FOR', 'AND', 'AGAINST', 'THE', "SQUIRE'S", 'PROJECT', 'IN', 'A', 'BREATH'] +61-70970-0002-2244: ref=['MOST', 'OF', 'ALL', 'ROBIN', 'THOUGHT', 'OF', 'HIS', 'FATHER', 'WHAT', 'WOULD', 'HE', 'COUNSEL'] +61-70970-0002-2244: hyp=['MOST', 'OF', 'ALL', 'ROBIN', 'THOUGHT', 'OF', 'HIS', 'FATHER', 'WHAT', 'WOULD', 'HE', 'COUNSEL'] +61-70970-0003-2245: ref=['IF', 'FOR', 'A', 'WHIM', 'YOU', 'BEGGAR', 'YOURSELF', 'I', 'CANNOT', 'STAY', 'YOU'] +61-70970-0003-2245: hyp=['IF', 'FOR', 'A', 'WHIM', 'YOU', 'BEGGAR', 'YOURSELF', 'I', 'CANNOT', 'STAY', 'YOU'] +61-70970-0004-2246: ref=['BUT', 'TAKE', 'IT', 'WHILST', 'I', 'LIVE', 'AND', 'WEAR', "MONTFICHET'S", 'SHIELD', 'IN', 'THE', 'DAYS', 'WHEN', 'MY', 'EYES', 'CAN', 'BE', 'REJOICED', 'BY', 'SO', 'BRAVE', 'A', 'SIGHT', 'FOR', 'YOU', 'WILL', "NE'ER", 'DISGRACE', 'OUR', 'SCUTCHEON', 'I', 'WARRANT', 'ME'] +61-70970-0004-2246: hyp=['BUT', 'TAKE', 'IT', 'WHILST', 'I', 'LIVE', 'AND', 'WHERE', "MONTFICHET'S", 'SHIELD', 'IN', 'THE', 'DAYS', 'WHEN', 'MY', 'EYES', 'CAN', 'BE', 'REJOICED', 'BY', 'SO', 'BRAVE', 'A', 'SIGHT', 'FOR', 'YOU', 'WILL', 'NEVER', 'DISGRACE', 'OUR', 'STATUNE', 'I', 'WARRANT', 'ME'] +61-70970-0005-2247: ref=['THE', 'LAD', 'HAD', 'CHECKED', 'HIM', 'THEN'] +61-70970-0005-2247: hyp=['THE', 'LAD', 'HAD', 'CHECKED', 'HIM', 'THEN'] +61-70970-0006-2248: ref=['NEVER', 'THAT', 'SIR', 'HE', 'HAD', 'SAID'] +61-70970-0006-2248: hyp=['NEVER', 'THAT', 'SIR', 'HE', 'HAD', 'SAID'] +61-70970-0007-2249: ref=['HE', 'WAS', 'IN', 'DEEP', 'CONVERSE', 'WITH', 'THE', 'CLERK', 'AND', 'ENTERED', 'THE', 'HALL', 'HOLDING', 'HIM', 'BY', 'THE', 'ARM'] +61-70970-0007-2249: hyp=['HE', 'WAS', 'IN', 'DEEP', 'CONVERSE', 'WITH', 'THE', 'CLERK', 'AND', 'ENTERED', 'THE', 'HALL', 'HOLDING', 'HIM', 'BY', 'THE', 'ARM'] +61-70970-0008-2250: ref=['NOW', 'TO', 'BED', 'BOY'] +61-70970-0008-2250: hyp=['NOW', 'TO', 'BED', 'BOY'] +61-70970-0009-2251: ref=['TIS', 'LATE', 'AND', 'I', 'GO', 'MYSELF', 'WITHIN', 'A', 'SHORT', 'SPACE'] +61-70970-0009-2251: hyp=['TIS', 'LATE', 'AND', 'I', 'GO', 'MYSELF', 'WITHIN', 'A', 'SHORT', 'SPACE'] +61-70970-0010-2252: ref=['DISMISS', 'YOUR', 'SQUIRE', 'ROBIN', 'AND', 'BID', 'ME', 'GOOD', 'E', 'E', 'N'] +61-70970-0010-2252: hyp=['DISMISS', 'YOUR', 'SQUIRE', 'ROBIN', 'AND', 'BID', 'ME', 'GOOD', 'EEN'] +61-70970-0011-2253: ref=['AS', 'ANY', 'IN', 'ENGLAND', 'I', 'WOULD', 'SAY', 'SAID', 'GAMEWELL', 'PROUDLY', 'THAT', 'IS', 'IN', 'HIS', 'DAY'] +61-70970-0011-2253: hyp=['AS', 'ANY', 'IN', 'ENGLAND', 'I', 'WOULD', 'SAY', 'SAID', 'GAMEWELL', 'PROUDLY', 'THAT', 'IS', 'IN', 'HIS', 'DAY'] +61-70970-0012-2254: ref=['YET', 'HE', 'WILL', 'TEACH', 'YOU', 'A', 'FEW', 'TRICKS', 'WHEN', 'MORNING', 'IS', 'COME'] +61-70970-0012-2254: hyp=['YET', 'HE', 'WILL', 'TEACH', 'YOU', 'A', 'FEW', 'TRICKS', 'WHEN', 'MORNING', 'IS', 'COME'] +61-70970-0013-2255: ref=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'ALTER', 'HIS', 'SLEEPING', 'ROOM', 'TO', 'ONE', 'NEARER', 'TO', "GAMEWELL'S", 'CHAMBER'] +61-70970-0013-2255: hyp=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'ALTER', 'HIS', 'SLEEPING', 'ROOM', 'TO', 'ONE', 'NEARER', 'TO', "GAMEWELL'S", 'CHAMBER'] +61-70970-0014-2256: ref=['PRESENTLY', 'HE', 'CROSSED', 'THE', 'FLOOR', 'OF', 'HIS', 'ROOM', 'WITH', 'DECIDED', 'STEP'] +61-70970-0014-2256: hyp=['PRESENTLY', 'HE', 'CROSSED', 'THE', 'FLOOR', 'OF', 'HIS', 'ROOM', 'WITH', 'DECIDED', 'STEP'] +61-70970-0015-2257: ref=['WILL', 'CRIED', 'HE', 'SOFTLY', 'AND', 'STUTELEY', 'WHO', 'HAD', 'CHOSEN', 'HIS', 'COUCH', 'ACROSS', 'THE', 'DOOR', 'OF', 'HIS', 'YOUNG', "MASTER'S", 'CHAMBER', 'SPRANG', 'UP', 'AT', 'ONCE', 'IN', 'ANSWER'] +61-70970-0015-2257: hyp=['WILL', 'CRIED', 'HE', 'SOFTLY', 'AND', 'STUTELEY', 'WHO', 'HAD', 'CHOSEN', 'HIS', 'COUCH', 'ACROSS', 'THE', 'DOOR', 'OF', 'HIS', 'YOUNG', "MASTER'S", 'CHAMBER', 'SPRANG', 'UP', 'AT', 'ONCE', 'IN', 'ANSWER'] +61-70970-0016-2258: ref=['WE', 'WILL', 'GO', 'OUT', 'TOGETHER', 'TO', 'THE', 'BOWER', 'THERE', 'IS', 'A', 'WAY', 'DOWN', 'TO', 'THE', 'COURT', 'FROM', 'MY', 'WINDOW'] +61-70970-0016-2258: hyp=['WE', 'WILL', 'GO', 'OUT', 'TOGETHER', 'TO', 'THE', 'BOWER', 'THERE', 'IS', 'A', 'WAY', 'DOWN', 'TO', 'THE', 'COURT', 'FROM', 'MY', 'WINDOW'] +61-70970-0017-2259: ref=['REST', 'AND', 'BE', 'STILL', 'UNTIL', 'I', 'WARN', 'YOU'] +61-70970-0017-2259: hyp=['REST', 'AND', 'BE', 'STILL', 'UNTIL', 'I', 'WARN', 'YOU'] +61-70970-0018-2260: ref=['THE', 'HOURS', 'PASSED', 'WEARILY', 'BY', 'AND', 'MOVEMENT', 'COULD', 'YET', 'BE', 'HEARD', 'ABOUT', 'THE', 'HALL'] +61-70970-0018-2260: hyp=['THE', 'HOURS', 'PASSED', 'WEARILY', 'BY', 'AND', 'MOVEMENT', 'COULD', 'YET', 'BE', 'HEARD', 'ABOUT', 'THE', 'HALL'] +61-70970-0019-2261: ref=['AT', 'LAST', 'ALL', 'WAS', 'QUIET', 'AND', 'BLACK', 'IN', 'THE', 'COURTYARD', 'OF', 'GAMEWELL'] +61-70970-0019-2261: hyp=['AT', 'LAST', 'ALL', 'WAS', 'QUIET', 'AND', 'BLACK', 'IN', 'THE', 'COURTYARD', 'OF', 'GAMEWELL'] +61-70970-0020-2262: ref=['WILL', 'WHISPERED', 'ROBIN', 'OPENING', 'HIS', 'DOOR', 'AS', 'HE', 'SPOKE', 'ARE', 'YOU', 'READY'] +61-70970-0020-2262: hyp=['WILL', 'WHISPERED', 'ROBIN', 'OPENING', 'HIS', 'DOOR', 'AS', 'HE', 'SPOKE', 'ARE', 'YOU', 'READY'] +61-70970-0021-2263: ref=['THEY', 'THEN', 'RENEWED', 'THEIR', 'JOURNEY', 'AND', 'UNDER', 'THE', 'BETTER', 'LIGHT', 'MADE', 'A', 'SAFE', 'CROSSING', 'OF', 'THE', 'STABLE', 'ROOFS'] +61-70970-0021-2263: hyp=['THEY', 'THEN', 'RENEWED', 'THEIR', 'JOURNEY', 'AND', 'UNDER', 'THE', 'BETTER', 'LIGHT', 'MADE', 'A', 'SAFE', 'CROSSING', 'OF', 'THE', 'STABLE', 'ROOFS'] +61-70970-0022-2264: ref=['ROBIN', 'ENTERED', 'THE', 'HUT', 'DRAGGING', 'THE', 'UNWILLING', 'ESQUIRE', 'AFTER', 'HIM'] +61-70970-0022-2264: hyp=['ROBIN', 'ENTERED', 'THE', 'HUT', 'DRAGGING', 'THE', 'UNWILLING', 'ESQUIRE', 'AFTER', 'HIM'] +61-70970-0023-2265: ref=['BE', 'NOT', 'SO', 'FOOLISH', 'FRIEND', 'SAID', 'FITZOOTH', 'CROSSLY'] +61-70970-0023-2265: hyp=['BE', 'NOT', 'SO', 'FOOLISH', 'FRIEND', 'SAID', 'FITZOOTH', 'CROSSLY'] +61-70970-0024-2266: ref=['THEY', 'MOVED', 'THEREAFTER', 'CAUTIOUSLY', 'ABOUT', 'THE', 'HUT', 'GROPING', 'BEFORE', 'AND', 'ABOUT', 'THEM', 'TO', 'FIND', 'SOMETHING', 'TO', 'SHOW', 'THAT', 'WARRENTON', 'HAD', 'FULFILLED', 'HIS', 'MISSION'] +61-70970-0024-2266: hyp=['THEY', 'MOVED', 'THEREAFTER', 'CAUTIOUSLY', 'ABOUT', 'THE', 'HUT', 'GROPING', 'BEFORE', 'AND', 'ABOUT', 'THEM', 'TO', 'FIND', 'SOMETHING', 'TO', 'SHOW', 'THAT', 'WARRENTON', 'HAD', 'FULFILLED', 'HIS', 'MISSION'] +61-70970-0025-2267: ref=['THEY', 'WERE', 'UPON', 'THE', 'VERGE', 'OF', 'AN', 'OPEN', 'TRAP', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'HUT', 'AND', 'STUTELEY', 'HAD', 'TRIPPED', 'OVER', 'THE', 'EDGE', 'OF', 'THE', 'REVERSED', 'FLAP', 'MOUTH', 'OF', 'THIS', 'PIT'] +61-70970-0025-2267: hyp=['THEY', 'WERE', 'UPON', 'THE', 'VERGE', 'OF', 'AN', 'OPEN', 'TRAP', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'HUT', 'AND', 'STUTELEY', 'HAD', 'TRIPPED', 'OVER', 'THE', 'EDGE', 'OF', 'THE', 'REVERSED', 'FLAP', 'MOUTH', 'OF', 'THIS', 'PIT'] +61-70970-0026-2268: ref=["FITZOOTH'S", 'HAND', 'RESTED', 'AT', 'LAST', 'UPON', 'THE', 'TOP', 'RUNG', 'OF', 'A', 'LADDER', 'AND', 'SLOWLY', 'THE', 'TRUTH', 'CAME', 'TO', 'HIM'] +61-70970-0026-2268: hyp=["FITZOOTH'S", 'HAND', 'RESTED', 'AT', 'LAST', 'UPON', 'THE', 'TOP', 'RUNG', 'OF', 'A', 'LADDER', 'AND', 'SLOWLY', 'THE', 'TRUTH', 'CAME', 'TO', 'HIM'] +61-70970-0027-2269: ref=['ROBIN', 'CAREFULLY', 'DESCENDED', 'THE', 'LADDER', 'AND', 'FOUND', 'HIMSELF', 'SOON', 'UPON', 'FIRM', 'ROCKY', 'GROUND'] +61-70970-0027-2269: hyp=['ROBIN', 'CAREFULLY', 'DESCENDED', 'THE', 'LADDER', 'AND', 'FOUND', 'HIMSELF', 'SOON', 'UPON', 'FIRM', 'ROCKY', 'GROUND'] +61-70970-0028-2270: ref=['STUTELEY', 'WAS', 'BY', 'HIS', 'SIDE', 'IN', 'A', 'FLASH', 'AND', 'THEN', 'THEY', 'BOTH', 'BEGAN', 'FEELING', 'ABOUT', 'THEM', 'TO', 'ASCERTAIN', 'THE', 'SHAPE', 'AND', 'CHARACTER', 'OF', 'THIS', 'VAULT'] +61-70970-0028-2270: hyp=['STUTELEY', 'WAS', 'BY', 'HIS', 'SIDE', 'IN', 'A', 'FLASH', 'AND', 'THEN', 'THEY', 'BOTH', 'BEGAN', 'FEELING', 'ABOUT', 'THEM', 'TO', 'ASCERTAIN', 'THE', 'SHAPE', 'AND', 'CHARACTER', 'OF', 'THIS', 'VAULT'] +61-70970-0029-2271: ref=['FROM', 'THE', 'BLACKNESS', 'BEHIND', 'THE', 'LIGHT', 'THEY', 'HEARD', 'A', 'VOICE', "WARRENTON'S"] +61-70970-0029-2271: hyp=['FROM', 'THE', 'BLACKNESS', 'BEHIND', 'THE', 'LIGHT', 'THEY', 'HEARD', 'A', 'VOICE', "WARRENTON'S"] +61-70970-0030-2272: ref=['SAVE', 'ME', 'MASTERS', 'BUT', 'YOU', 'STARTLED', 'ME', 'RARELY'] +61-70970-0030-2272: hyp=['SAVE', 'ME', 'MASTERS', 'BUT', 'YOU', 'STARTLED', 'ME', 'RARELY'] +61-70970-0031-2273: ref=['CRIED', 'HE', 'WAVING', 'THE', 'LANTHORN', 'BEFORE', 'HIM', 'TO', 'MAKE', 'SURE', 'THAT', 'THESE', 'WERE', 'NO', 'GHOSTS', 'IN', 'FRONT', 'OF', 'HIM'] +61-70970-0031-2273: hyp=['CRIED', 'HE', 'WAVING', 'THE', 'LANTERN', 'BEFORE', 'HIM', 'TO', 'MAKE', 'SURE', 'THAT', 'THESE', 'WERE', 'NO', 'GHOSTS', 'IN', 'FRONT', 'OF', 'HIM'] +61-70970-0032-2274: ref=['ENQUIRED', 'ROBIN', 'WITH', 'HIS', 'SUSPICIONS', 'STILL', 'UPON', 'HIM'] +61-70970-0032-2274: hyp=['INQUIRED', 'ROBIN', 'WITH', 'HIS', 'SUSPICION', 'STILL', 'UPON', 'HIM'] +61-70970-0033-2275: ref=['TRULY', 'SUCH', 'A', 'HORSE', 'SHOULD', 'BE', 'WORTH', 'MUCH', 'IN', 'NOTTINGHAM', 'FAIR'] +61-70970-0033-2275: hyp=['TRULY', 'SUCH', 'A', 'HORSE', 'WOULD', 'BE', 'WORTH', 'MUCH', 'IN', 'NOTTINGHAM', 'FAIR'] +61-70970-0034-2276: ref=['NAY', 'NAY', 'LORDING', 'ANSWERED', 'WARRENTON', 'WITH', 'A', 'HALF', 'LAUGH'] +61-70970-0034-2276: hyp=['NAY', 'NAY', 'LORDING', 'ANSWERED', 'WARRENTON', 'WITH', 'A', 'HALF', 'LAUGH'] +61-70970-0035-2277: ref=['WARRENTON', 'SPOKE', 'THUS', 'WITH', 'SIGNIFICANCE', 'TO', 'SHOW', 'ROBIN', 'THAT', 'HE', 'WAS', 'NOT', 'TO', 'THINK', "GEOFFREY'S", 'CLAIMS', 'TO', 'THE', 'ESTATE', 'WOULD', 'BE', 'PASSED', 'BY'] +61-70970-0035-2277: hyp=['WARRENTON', 'SPOKE', 'THUS', 'WITH', 'SIGNIFICANCE', 'TO', 'SHOW', 'ROBIN', 'THAT', 'HE', 'WAS', 'NOT', 'TO', 'THINK', "JEFFREY'S", 'CLAIMS', 'TO', 'THE', 'ESTATE', 'WOULD', 'BE', 'PASSED', 'BY'] +61-70970-0036-2278: ref=['ROBIN', 'FITZOOTH', 'SAW', 'THAT', 'HIS', 'DOUBTS', 'OF', 'WARRENTON', 'HAD', 'BEEN', 'UNFAIR', 'AND', 'HE', 'BECAME', 'ASHAMED', 'OF', 'HIMSELF', 'FOR', 'HARBORING', 'THEM'] +61-70970-0036-2278: hyp=['ROBIN', 'FITZOOTH', 'SAW', 'THAT', 'HIS', 'DOUBTS', 'OF', 'WARRENTON', 'HAD', 'BEEN', 'UNFAIR', 'AND', 'HE', 'BECAME', 'ASHAMED', 'OF', 'HIMSELF', 'FOR', 'HARBOURING', 'THEM'] +61-70970-0037-2279: ref=['HIS', 'TONES', 'RANG', 'PLEASANTLY', 'ON', "WARRENTON'S", 'EARS', 'AND', 'FORTHWITH', 'A', 'GOOD', 'FELLOWSHIP', 'WAS', 'HERALDED', 'BETWEEN', 'THEM'] +61-70970-0037-2279: hyp=['HIS', 'TONES', 'RANG', 'PLEASANTLY', 'ON', "WARRENTON'S", 'EARS', 'AND', 'FORTHWITH', 'THE', 'GOOD', 'FELLOWSHIP', 'WAS', 'HERALDED', 'BETWEEN', 'THEM'] +61-70970-0038-2280: ref=['THE', 'OLD', 'SERVANT', 'TOLD', 'HIM', 'QUIETLY', 'AS', 'THEY', 'CREPT', 'BACK', 'TO', 'GAMEWELL', 'THAT', 'THIS', 'PASSAGE', 'WAY', 'LED', 'FROM', 'THE', 'HUT', 'IN', 'THE', 'PLEASANCE', 'TO', 'SHERWOOD', 'AND', 'THAT', 'GEOFFREY', 'FOR', 'THE', 'TIME', 'WAS', 'HIDING', 'WITH', 'THE', 'OUTLAWS', 'IN', 'THE', 'FOREST'] +61-70970-0038-2280: hyp=['THE', 'OLD', 'SERVANT', 'TOLD', 'HIM', 'QUIETLY', 'AS', 'THEY', 'CREPT', 'BACK', 'TO', 'GAMEWELL', 'THAT', 'THIS', 'PASSAGEWAY', 'LED', 'FROM', 'THE', 'HUT', 'IN', 'THE', 'PLEASANTS', 'TO', 'SHERWOOD', 'AND', 'THAT', 'JEFFREY', 'FOR', 'THE', 'TIME', 'WAS', 'HIDING', 'WITH', 'THE', 'OUTLAWS', 'IN', 'THE', 'FOREST'] +61-70970-0039-2281: ref=['HE', 'IMPLORES', 'US', 'TO', 'BE', 'DISCREET', 'AS', 'THE', 'GRAVE', 'IN', 'THIS', 'MATTER', 'FOR', 'IN', 'SOOTH', 'HIS', 'LIFE', 'IS', 'IN', 'THE', 'HOLLOW', 'OF', 'OUR', 'HANDS'] +61-70970-0039-2281: hyp=['HE', 'IMPLORES', 'US', 'TO', 'BE', 'DISCREET', 'AS', 'THE', 'GRAVE', 'IN', 'THIS', 'MATTER', 'FOR', 'IN', 'SOOTH', 'HIS', 'LIFE', 'IS', 'IN', 'THE', 'HOLLOW', 'OF', 'OUR', 'HANDS'] +61-70970-0040-2282: ref=['THEY', 'REGAINED', 'THEIR', 'APARTMENT', 'APPARENTLY', 'WITHOUT', 'DISTURBING', 'THE', 'HOUSEHOLD', 'OF', 'GAMEWELL'] +61-70970-0040-2282: hyp=['THEY', 'REGAIN', 'THEIR', 'APARTMENT', 'APPARENTLY', 'WITHOUT', 'DISTURBING', 'THE', 'HOUSEHOLD', 'OF', 'GAMEWELL'] +672-122797-0000-1529: ref=['OUT', 'IN', 'THE', 'WOODS', 'STOOD', 'A', 'NICE', 'LITTLE', 'FIR', 'TREE'] +672-122797-0000-1529: hyp=['OUT', 'IN', 'THE', 'WOODS', 'STOOD', 'A', 'NICE', 'LITTLE', 'FIR', 'TREE'] +672-122797-0001-1530: ref=['THE', 'PLACE', 'HE', 'HAD', 'WAS', 'A', 'VERY', 'GOOD', 'ONE', 'THE', 'SUN', 'SHONE', 'ON', 'HIM', 'AS', 'TO', 'FRESH', 'AIR', 'THERE', 'WAS', 'ENOUGH', 'OF', 'THAT', 'AND', 'ROUND', 'HIM', 'GREW', 'MANY', 'LARGE', 'SIZED', 'COMRADES', 'PINES', 'AS', 'WELL', 'AS', 'FIRS'] +672-122797-0001-1530: hyp=['THE', 'PLACE', 'HE', 'HAD', 'WAS', 'A', 'VERY', 'GOOD', 'ONE', 'THE', 'SUN', 'SHONE', 'ON', 'HIM', 'AS', 'TO', 'FRESH', 'AIR', 'THERE', 'WAS', 'ENOUGH', 'OF', 'THAT', 'AND', 'ROUND', 'HIM', 'GREW', 'MANY', 'LARGE', 'SIZED', 'COMRADES', 'PINES', 'AS', 'WELL', 'AS', 'FURS'] +672-122797-0002-1531: ref=['HE', 'DID', 'NOT', 'THINK', 'OF', 'THE', 'WARM', 'SUN', 'AND', 'OF', 'THE', 'FRESH', 'AIR', 'HE', 'DID', 'NOT', 'CARE', 'FOR', 'THE', 'LITTLE', 'COTTAGE', 'CHILDREN', 'THAT', 'RAN', 'ABOUT', 'AND', 'PRATTLED', 'WHEN', 'THEY', 'WERE', 'IN', 'THE', 'WOODS', 'LOOKING', 'FOR', 'WILD', 'STRAWBERRIES'] +672-122797-0002-1531: hyp=['HE', 'DID', 'NOT', 'THINK', 'OF', 'THE', 'WARM', 'SUN', 'AND', 'OF', 'THE', 'FRESH', 'AIR', 'HE', 'DID', 'NOT', 'CARE', 'FOR', 'THE', 'LITTLE', 'COTTAGE', 'CHILDREN', 'THAT', 'RAN', 'ABOUT', 'IN', 'PRATTLED', 'WHEN', 'THEY', 'WERE', 'IN', 'THE', 'WOODS', 'LOOKING', 'FOR', 'WILD', 'STRAWBERRIES'] +672-122797-0003-1532: ref=['BUT', 'THIS', 'WAS', 'WHAT', 'THE', 'TREE', 'COULD', 'NOT', 'BEAR', 'TO', 'HEAR'] +672-122797-0003-1532: hyp=['BUT', 'THIS', 'WAS', 'WHAT', 'THE', 'TREE', 'COULD', 'NOT', 'BEAR', 'TO', 'HEAR'] +672-122797-0004-1533: ref=['IN', 'WINTER', 'WHEN', 'THE', 'SNOW', 'LAY', 'GLITTERING', 'ON', 'THE', 'GROUND', 'A', 'HARE', 'WOULD', 'OFTEN', 'COME', 'LEAPING', 'ALONG', 'AND', 'JUMP', 'RIGHT', 'OVER', 'THE', 'LITTLE', 'TREE'] +672-122797-0004-1533: hyp=['IN', 'WINTER', 'WHEN', 'THE', 'SNOW', 'LAY', 'GLITTERING', 'ON', 'THE', 'GROUND', 'A', 'HARE', 'WOULD', 'OFTEN', 'COME', 'LEAPING', 'ALONG', 'AND', 'JUMP', 'RIGHT', 'OVER', 'THE', 'LITTLE', 'TREE'] +672-122797-0005-1534: ref=['OH', 'THAT', 'MADE', 'HIM', 'SO', 'ANGRY'] +672-122797-0005-1534: hyp=['OH', 'THAT', 'MADE', 'HIM', 'SO', 'ANGRY'] +672-122797-0006-1535: ref=['TO', 'GROW', 'AND', 'GROW', 'TO', 'GET', 'OLDER', 'AND', 'BE', 'TALL', 'THOUGHT', 'THE', 'TREE', 'THAT', 'AFTER', 'ALL', 'IS', 'THE', 'MOST', 'DELIGHTFUL', 'THING', 'IN', 'THE', 'WORLD'] +672-122797-0006-1535: hyp=['TO', 'GROW', 'AND', 'GROW', 'TO', 'GET', 'OLDER', 'AND', 'BE', 'TALL', 'THOUGHT', 'THE', 'TREE', 'THAT', 'AFTER', 'ALL', 'IS', 'THE', 'MOST', 'DELIGHTFUL', 'THING', 'IN', 'THE', 'WORLD'] +672-122797-0007-1536: ref=['IN', 'AUTUMN', 'THE', 'WOOD', 'CUTTERS', 'ALWAYS', 'CAME', 'AND', 'FELLED', 'SOME', 'OF', 'THE', 'LARGEST', 'TREES'] +672-122797-0007-1536: hyp=['IN', 'AUTUMN', 'THE', 'WOODCUTTERS', 'ALWAYS', 'CAME', 'AND', 'FELLED', 'SOME', 'OF', 'THE', 'LARGEST', 'TREES'] +672-122797-0008-1537: ref=['THIS', 'HAPPENED', 'EVERY', 'YEAR', 'AND', 'THE', 'YOUNG', 'FIR', 'TREE', 'THAT', 'HAD', 'NOW', 'GROWN', 'TO', 'A', 'VERY', 'COMELY', 'SIZE', 'TREMBLED', 'AT', 'THE', 'SIGHT', 'FOR', 'THE', 'MAGNIFICENT', 'GREAT', 'TREES', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'NOISE', 'AND', 'CRACKING', 'THE', 'BRANCHES', 'WERE', 'LOPPED', 'OFF', 'AND', 'THE', 'TREES', 'LOOKED', 'LONG', 'AND', 'BARE', 'THEY', 'WERE', 'HARDLY', 'TO', 'BE', 'RECOGNISED', 'AND', 'THEN', 'THEY', 'WERE', 'LAID', 'IN', 'CARTS', 'AND', 'THE', 'HORSES', 'DRAGGED', 'THEM', 'OUT', 'OF', 'THE', 'WOOD'] +672-122797-0008-1537: hyp=['THIS', 'HAPPENED', 'EVERY', 'YEAR', 'AND', 'THE', 'YOUNG', 'FIR', 'TREE', 'THAT', 'HAD', 'NOW', 'GROWN', 'TO', 'A', 'VERY', 'COMELY', 'SIZED', 'TREMBLED', 'AT', 'THE', 'SIGHT', 'FOR', 'THE', 'MAGNIFICENT', 'GREAT', 'TREES', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'NOISE', 'AND', 'CRACKING', 'THE', 'BRANCHES', 'WERE', 'LOPPED', 'OFF', 'AND', 'THE', 'TREES', 'LOOKED', 'LONG', 'AND', 'BARE', 'THEY', 'WERE', 'HARDLY', 'TO', 'BE', 'RECOGNIZED', 'AND', 'THEN', 'THEY', 'WERE', 'LAID', 'IN', 'CARTS', 'AND', 'THE', 'HORSES', 'DRAGGED', 'THEM', 'OUT', 'OF', 'THE', 'WOOD'] +672-122797-0009-1538: ref=['HAVE', 'YOU', 'NOT', 'MET', 'THEM', 'ANYWHERE'] +672-122797-0009-1538: hyp=['HAVE', 'YOU', 'NOT', 'MET', 'THE', 'MANY', 'WHERE'] +672-122797-0010-1539: ref=['REJOICE', 'IN', 'THY', 'GROWTH', 'SAID', 'THE', 'SUNBEAMS'] +672-122797-0010-1539: hyp=['REJOICE', 'IN', 'THY', 'GROWTH', 'SAID', 'THE', 'SUNBEAMS'] +672-122797-0011-1540: ref=['AND', 'THEN', 'WHAT', 'HAPPENS', 'THEN'] +672-122797-0011-1540: hyp=['AND', 'THEN', 'WHAT', 'HAPPENS', 'THEN'] +672-122797-0012-1541: ref=['I', 'WOULD', 'FAIN', 'KNOW', 'IF', 'I', 'AM', 'DESTINED', 'FOR', 'SO', 'GLORIOUS', 'A', 'CAREER', 'CRIED', 'THE', 'TREE', 'REJOICING'] +672-122797-0012-1541: hyp=['I', 'WOULD', 'FAIN', 'KNOW', 'IF', 'I', 'AM', 'DESTINED', 'FOR', 'SO', 'GLORIOUS', 'A', 'CAREER', 'CRIED', 'THE', 'TREE', 'REJOICING'] +672-122797-0013-1542: ref=['I', 'AM', 'NOW', 'TALL', 'AND', 'MY', 'BRANCHES', 'SPREAD', 'LIKE', 'THE', 'OTHERS', 'THAT', 'WERE', 'CARRIED', 'OFF', 'LAST', 'YEAR', 'OH'] +672-122797-0013-1542: hyp=['I', 'AM', 'NOW', 'TALL', 'AND', 'MY', 'BRANCHES', 'SPREAD', 'LIKE', 'THE', 'OTHERS', 'THAT', 'WERE', 'CARRIED', 'OFF', 'LAST', 'YEAR', 'OH'] +672-122797-0014-1543: ref=['WERE', 'I', 'BUT', 'ALREADY', 'ON', 'THE', 'CART'] +672-122797-0014-1543: hyp=['WERE', 'I', 'BUT', 'ALREADY', 'ON', 'THE', 'CART'] +672-122797-0015-1544: ref=['WERE', 'I', 'IN', 'THE', 'WARM', 'ROOM', 'WITH', 'ALL', 'THE', 'SPLENDOR', 'AND', 'MAGNIFICENCE'] +672-122797-0015-1544: hyp=['WHERE', 'I', 'IN', 'THE', 'WARM', 'ROOM', 'WITH', 'ALL', 'THE', 'SPLENDOUR', 'AND', 'MAGNIFICENCE'] +672-122797-0016-1545: ref=['YES', 'THEN', 'SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'WILL', 'SURELY', 'FOLLOW', 'OR', 'WHEREFORE', 'SHOULD', 'THEY', 'THUS', 'ORNAMENT', 'ME'] +672-122797-0016-1545: hyp=['YES', 'THEN', 'SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'WILL', 'SURELY', 'FOLLOW', 'OR', 'WHEREFORE', 'SHOULD', 'THEY', 'THUS', 'ORNAMENT', 'ME'] +672-122797-0017-1546: ref=['SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'MUST', 'FOLLOW', 'BUT', 'WHAT'] +672-122797-0017-1546: hyp=['SOMETHING', 'BETTER', 'SOMETHING', 'STILL', 'GRANDER', 'MUST', 'FOLLOW', 'BUT', 'WHAT'] +672-122797-0018-1547: ref=['REJOICE', 'IN', 'OUR', 'PRESENCE', 'SAID', 'THE', 'AIR', 'AND', 'THE', 'SUNLIGHT'] +672-122797-0018-1547: hyp=['REJOICE', 'IN', 'OUR', 'PRESENCE', 'SAID', 'THE', 'HEIR', 'AND', 'THE', 'SUNLIGHT'] +672-122797-0019-1548: ref=['REJOICE', 'IN', 'THY', 'OWN', 'FRESH', 'YOUTH'] +672-122797-0019-1548: hyp=['REJOICE', 'IN', 'THY', 'OWN', 'FRESH', 'YOUTH'] +672-122797-0020-1549: ref=['BUT', 'THE', 'TREE', 'DID', 'NOT', 'REJOICE', 'AT', 'ALL', 'HE', 'GREW', 'AND', 'GREW', 'AND', 'WAS', 'GREEN', 'BOTH', 'WINTER', 'AND', 'SUMMER'] +672-122797-0020-1549: hyp=['BUT', 'THE', 'TREE', 'DID', 'NOT', 'REJOICE', 'AT', 'ALL', 'HE', 'GREW', 'AND', 'GREW', 'AND', 'WAS', 'GREEN', 'BOTH', 'WINTER', 'AND', 'SUMMER'] +672-122797-0021-1550: ref=['AND', 'TOWARDS', 'CHRISTMAS', 'HE', 'WAS', 'ONE', 'OF', 'THE', 'FIRST', 'THAT', 'WAS', 'CUT', 'DOWN'] +672-122797-0021-1550: hyp=['AND', 'TOWARDS', 'CHRISTMAS', 'HE', 'WAS', 'ONE', 'OF', 'THE', 'FIRST', 'THAT', 'WAS', 'CUT', 'DOWN'] +672-122797-0022-1551: ref=['THE', 'AXE', 'STRUCK', 'DEEP', 'INTO', 'THE', 'VERY', 'PITH', 'THE', 'TREE', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'A', 'SIGH', 'HE', 'FELT', 'A', 'PANG', 'IT', 'WAS', 'LIKE', 'A', 'SWOON', 'HE', 'COULD', 'NOT', 'THINK', 'OF', 'HAPPINESS', 'FOR', 'HE', 'WAS', 'SORROWFUL', 'AT', 'BEING', 'SEPARATED', 'FROM', 'HIS', 'HOME', 'FROM', 'THE', 'PLACE', 'WHERE', 'HE', 'HAD', 'SPRUNG', 'UP'] +672-122797-0022-1551: hyp=['THE', 'AXE', 'STRUCK', 'DEEP', 'INTO', 'THE', 'VERY', 'PITH', 'THE', 'TREE', 'FELL', 'TO', 'THE', 'EARTH', 'WITH', 'A', 'SIGH', 'HE', 'FELT', 'A', 'PANG', 'IT', 'WAS', 'LIKE', 'A', 'SWOON', 'HE', 'COULD', 'NOT', 'THINK', 'OF', 'HAPPINESS', 'FOR', 'HE', 'WAS', 'SORROWFUL', 'AT', 'BEING', 'SEPARATED', 'FROM', 'HIS', 'HOME', 'FROM', 'THE', 'PLACE', 'WHERE', 'HE', 'HAD', 'SPRUNG', 'UP'] +672-122797-0023-1552: ref=['HE', 'WELL', 'KNEW', 'THAT', 'HE', 'SHOULD', 'NEVER', 'SEE', 'HIS', 'DEAR', 'OLD', 'COMRADES', 'THE', 'LITTLE', 'BUSHES', 'AND', 'FLOWERS', 'AROUND', 'HIM', 'ANYMORE', 'PERHAPS', 'NOT', 'EVEN', 'THE', 'BIRDS'] +672-122797-0023-1552: hyp=['HE', 'WELL', 'KNEW', 'THAT', 'HE', 'SHOULD', 'NEVER', 'SEE', 'HIS', 'DEAR', 'OLD', 'COMRADES', 'THE', 'LITTLE', 'BUSHES', 'AND', 'FLOWERS', 'AROUND', 'HIM', 'ANY', 'MORE', 'PERHAPS', 'NOT', 'EVEN', 'THE', 'BIRDS'] +672-122797-0024-1553: ref=['THE', 'DEPARTURE', 'WAS', 'NOT', 'AT', 'ALL', 'AGREEABLE'] +672-122797-0024-1553: hyp=['THE', 'DEPARTURE', 'WAS', 'NOT', 'AT', 'ALL', 'AGREEABLE'] +672-122797-0025-1554: ref=['THE', 'TREE', 'ONLY', 'CAME', 'TO', 'HIMSELF', 'WHEN', 'HE', 'WAS', 'UNLOADED', 'IN', 'A', 'COURT', 'YARD', 'WITH', 'THE', 'OTHER', 'TREES', 'AND', 'HEARD', 'A', 'MAN', 'SAY', 'THAT', 'ONE', 'IS', 'SPLENDID', 'WE', "DON'T", 'WANT', 'THE', 'OTHERS'] +672-122797-0025-1554: hyp=['THE', 'TREE', 'ONLY', 'CAME', 'TO', 'HIMSELF', 'WHEN', 'HE', 'WAS', 'UNLOADED', 'IN', 'A', 'COURTYARD', 'WITH', 'THE', 'OTHER', 'TREES', 'AND', 'HEARD', 'A', 'MAN', 'SAY', 'THAT', 'ONE', 'IS', 'SPLENDID', 'WE', "DON'T", 'WANT', 'THE', 'OTHERS'] +672-122797-0026-1555: ref=['THERE', 'TOO', 'WERE', 'LARGE', 'EASY', 'CHAIRS', 'SILKEN', 'SOFAS', 'LARGE', 'TABLES', 'FULL', 'OF', 'PICTURE', 'BOOKS', 'AND', 'FULL', 'OF', 'TOYS', 'WORTH', 'HUNDREDS', 'AND', 'HUNDREDS', 'OF', 'CROWNS', 'AT', 'LEAST', 'THE', 'CHILDREN', 'SAID', 'SO'] +672-122797-0026-1555: hyp=['THERE', 'TOO', 'WERE', 'LARGE', 'EASY', 'CHAIRS', 'SILKEN', 'SOFAS', 'LARGE', 'TABLES', 'FULL', 'OF', 'PICTURE', 'BOOKS', 'AND', 'FULL', 'OF', 'TOYS', 'WORTH', 'HUNDREDS', 'AND', 'HUNDREDS', 'OF', 'CROWNS', 'AT', 'LEAST', 'THE', 'CHILDREN', 'SAID', 'SO'] +672-122797-0027-1556: ref=['THE', 'SERVANTS', 'AS', 'WELL', 'AS', 'THE', 'YOUNG', 'LADIES', 'DECORATED', 'IT'] +672-122797-0027-1556: hyp=['THE', 'SERVANTS', 'AS', 'WELL', 'AS', 'THE', 'YOUNG', 'LADIES', 'DECORATED', 'IT'] +672-122797-0028-1557: ref=['THIS', 'EVENING', 'THEY', 'ALL', 'SAID'] +672-122797-0028-1557: hyp=['THIS', 'EVENING', 'THEY', 'ALL', 'SAID'] +672-122797-0029-1558: ref=['HOW', 'IT', 'WILL', 'SHINE', 'THIS', 'EVENING'] +672-122797-0029-1558: hyp=['HOW', 'IT', 'WILL', 'SHINE', 'THIS', 'EVENING'] +672-122797-0030-1559: ref=['PERHAPS', 'THE', 'OTHER', 'TREES', 'FROM', 'THE', 'FOREST', 'WILL', 'COME', 'TO', 'LOOK', 'AT', 'ME'] +672-122797-0030-1559: hyp=['PERHAPS', 'THE', 'OTHER', 'TREES', 'FROM', 'THE', 'FOREST', 'WILL', 'COME', 'TO', 'LOOK', 'AT', 'ME'] +672-122797-0031-1560: ref=['IT', 'BLAZED', 'UP', 'FAMOUSLY', 'HELP', 'HELP'] +672-122797-0031-1560: hyp=['IT', 'BLAZED', 'UP', 'FAMOUSLY', 'HELP', 'HELP'] +672-122797-0032-1561: ref=['CRIED', 'THE', 'YOUNG', 'LADIES', 'AND', 'THEY', 'QUICKLY', 'PUT', 'OUT', 'THE', 'FIRE'] +672-122797-0032-1561: hyp=['CRIED', 'THE', 'YOUNG', 'LADIES', 'AND', 'THEY', 'QUICKLY', 'PUT', 'OUT', 'THE', 'FIRE'] +672-122797-0033-1562: ref=['A', 'STORY'] +672-122797-0033-1562: hyp=['A', 'STORY'] +672-122797-0034-1563: ref=['A', 'STORY', 'CRIED', 'THE', 'CHILDREN', 'DRAWING', 'A', 'LITTLE', 'FAT', 'MAN', 'TOWARDS', 'THE', 'TREE'] +672-122797-0034-1563: hyp=['A', 'STORY', 'CRIED', 'THE', 'CHILDREN', 'DRAWING', 'A', 'LITTLE', 'FAT', 'MAN', 'TOWARDS', 'THE', 'TREE'] +672-122797-0035-1564: ref=['BUT', 'I', 'SHALL', 'TELL', 'ONLY', 'ONE', 'STORY'] +672-122797-0035-1564: hyp=['BUT', 'I', 'SHALL', 'TELL', 'ONLY', 'ONE', 'STORY'] +672-122797-0036-1565: ref=['HUMPY', 'DUMPY', 'FELL', 'DOWNSTAIRS', 'AND', 'YET', 'HE', 'MARRIED', 'THE', 'PRINCESS'] +672-122797-0036-1565: hyp=['HUMPY', "DON'T", 'BE', 'FELL', 'DOWNSTAIRS', 'AND', 'YET', 'HE', 'MARRIED', 'THE', 'PRINCESS'] +672-122797-0037-1566: ref=["THAT'S", 'THE', 'WAY', 'OF', 'THE', 'WORLD'] +672-122797-0037-1566: hyp=["THAT'S", 'THE', 'WAY', 'OF', 'THE', 'WORLD'] +672-122797-0038-1567: ref=['THOUGHT', 'THE', 'FIR', 'TREE', 'AND', 'BELIEVED', 'IT', 'ALL', 'BECAUSE', 'THE', 'MAN', 'WHO', 'TOLD', 'THE', 'STORY', 'WAS', 'SO', 'GOOD', 'LOOKING', 'WELL', 'WELL'] +672-122797-0038-1567: hyp=['THOUGHT', 'THE', 'FIR', 'TREE', 'AND', 'BELIEVED', 'IT', 'ALL', 'BECAUSE', 'THE', 'MAN', 'WHO', 'TOLD', 'THE', 'STORY', 'WAS', 'SO', 'GOOD', 'LOOKING', 'WELL', 'WELL'] +672-122797-0039-1568: ref=['I', "WON'T", 'TREMBLE', 'TO', 'MORROW', 'THOUGHT', 'THE', 'FIR', 'TREE'] +672-122797-0039-1568: hyp=['I', "WON'T", 'TREMBLE', 'TO', 'MORROW', 'THOUGHT', 'THE', 'FIR', 'TREE'] +672-122797-0040-1569: ref=['AND', 'THE', 'WHOLE', 'NIGHT', 'THE', 'TREE', 'STOOD', 'STILL', 'AND', 'IN', 'DEEP', 'THOUGHT'] +672-122797-0040-1569: hyp=['AND', 'THE', 'WHOLE', 'NIGHT', 'THE', 'TREE', 'STOOD', 'STILL', 'AND', 'IN', 'DEEP', 'THOUGHT'] +672-122797-0041-1570: ref=['IN', 'THE', 'MORNING', 'THE', 'SERVANT', 'AND', 'THE', 'HOUSEMAID', 'CAME', 'IN'] +672-122797-0041-1570: hyp=['IN', 'THE', 'MORNING', 'THE', 'SERVANT', 'AND', 'THE', 'HOUSEMAID', 'CAME', 'IN'] +672-122797-0042-1571: ref=['BUT', 'THEY', 'DRAGGED', 'HIM', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'UP', 'THE', 'STAIRS', 'INTO', 'THE', 'LOFT', 'AND', 'HERE', 'IN', 'A', 'DARK', 'CORNER', 'WHERE', 'NO', 'DAYLIGHT', 'COULD', 'ENTER', 'THEY', 'LEFT', 'HIM'] +672-122797-0042-1571: hyp=['BUT', 'THEY', 'DRAGGED', 'HIM', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'UP', 'THE', 'STAIRS', 'INTO', 'THE', 'LOFT', 'AND', 'HERE', 'IN', 'A', 'DARK', 'CORNER', 'WHERE', 'NO', 'DAYLIGHT', 'COULD', 'ENTER', 'THEY', 'LEFT', 'HIM'] +672-122797-0043-1572: ref=["WHAT'S", 'THE', 'MEANING', 'OF', 'THIS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0043-1572: hyp=["WHAT'S", 'THE', 'MEANING', 'OF', 'THIS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0044-1573: ref=['AND', 'HE', 'LEANED', 'AGAINST', 'THE', 'WALL', 'LOST', 'IN', 'REVERIE'] +672-122797-0044-1573: hyp=['AND', 'HE', 'LEANED', 'AGAINST', 'THE', 'WALL', 'LOST', 'IN', 'REVERIE'] +672-122797-0045-1574: ref=['TIME', 'ENOUGH', 'HAD', 'HE', 'TOO', 'FOR', 'HIS', 'REFLECTIONS', 'FOR', 'DAYS', 'AND', 'NIGHTS', 'PASSED', 'ON', 'AND', 'NOBODY', 'CAME', 'UP', 'AND', 'WHEN', 'AT', 'LAST', 'SOMEBODY', 'DID', 'COME', 'IT', 'WAS', 'ONLY', 'TO', 'PUT', 'SOME', 'GREAT', 'TRUNKS', 'IN', 'A', 'CORNER', 'OUT', 'OF', 'THE', 'WAY'] +672-122797-0045-1574: hyp=['TIME', 'ENOUGH', 'HAD', 'HE', 'TOO', 'FOR', 'HIS', 'REFLECTIONS', 'FOR', 'DAYS', 'AND', 'NIGHTS', 'PASSED', 'ON', 'AND', 'NOBODY', 'CAME', 'UP', 'AND', 'WHEN', 'AT', 'LAST', 'SOMEBODY', 'DID', 'COME', 'IT', 'WAS', 'ONLY', 'TO', 'PUT', 'SOME', 'GREAT', 'TRUNKS', 'IN', 'A', 'CORNER', 'OUT', 'OF', 'THE', 'WAY'] +672-122797-0046-1575: ref=['TIS', 'NOW', 'WINTER', 'OUT', 'OF', 'DOORS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0046-1575: hyp=['TIS', 'NOW', 'WINTER', 'OUT', 'OF', 'DOORS', 'THOUGHT', 'THE', 'TREE'] +672-122797-0047-1576: ref=['HOW', 'KIND', 'MAN', 'IS', 'AFTER', 'ALL'] +672-122797-0047-1576: hyp=['HOW', 'KIND', 'MAN', 'IS', 'AFTER', 'ALL'] +672-122797-0048-1577: ref=['IF', 'IT', 'ONLY', 'WERE', 'NOT', 'SO', 'DARK', 'HERE', 'AND', 'SO', 'TERRIBLY', 'LONELY'] +672-122797-0048-1577: hyp=['IF', 'IT', 'ONLY', 'WERE', 'NOT', 'SO', 'DARK', 'HERE', 'AND', 'SO', 'TERRIBLY', 'LONELY'] +672-122797-0049-1578: ref=['SQUEAK', 'SQUEAK'] +672-122797-0049-1578: hyp=['SQUEAK', 'SQUI'] +672-122797-0050-1579: ref=['THEY', 'SNUFFED', 'ABOUT', 'THE', 'FIR', 'TREE', 'AND', 'RUSTLED', 'AMONG', 'THE', 'BRANCHES'] +672-122797-0050-1579: hyp=['THEY', 'SNUFFED', 'ABOUT', 'THE', 'FIR', 'TREE', 'AND', 'RUSTLED', 'AMONG', 'THE', 'BRANCHES'] +672-122797-0051-1580: ref=['I', 'AM', 'BY', 'NO', 'MEANS', 'OLD', 'SAID', 'THE', 'FIR', 'TREE'] +672-122797-0051-1580: hyp=['I', 'AM', 'BY', 'NO', 'MEANS', 'OLD', 'SAID', 'THE', 'FIR', 'TREE'] +672-122797-0052-1581: ref=["THERE'S", 'MANY', 'A', 'ONE', 'CONSIDERABLY', 'OLDER', 'THAN', 'I', 'AM'] +672-122797-0052-1581: hyp=["THERE'S", 'MANY', 'A', 'ONE', 'CONSIDERABLY', 'OLDER', 'THAN', 'I', 'AM'] +672-122797-0053-1582: ref=['THEY', 'WERE', 'SO', 'EXTREMELY', 'CURIOUS'] +672-122797-0053-1582: hyp=['THEY', 'WERE', 'SO', 'EXTREMELY', 'CURIOUS'] +672-122797-0054-1583: ref=['I', 'KNOW', 'NO', 'SUCH', 'PLACE', 'SAID', 'THE', 'TREE'] +672-122797-0054-1583: hyp=['I', 'KNOW', 'NO', 'SUCH', 'PLACE', 'SAID', 'THE', 'TREE'] +672-122797-0055-1584: ref=['AND', 'THEN', 'HE', 'TOLD', 'ALL', 'ABOUT', 'HIS', 'YOUTH', 'AND', 'THE', 'LITTLE', 'MICE', 'HAD', 'NEVER', 'HEARD', 'THE', 'LIKE', 'BEFORE', 'AND', 'THEY', 'LISTENED', 'AND', 'SAID'] +672-122797-0055-1584: hyp=['AND', 'THEN', 'HE', 'TOLD', 'ALL', 'ABOUT', 'HIS', 'YOUTH', 'AND', 'THE', 'LITTLE', 'MICE', 'HAD', 'NEVER', 'HEARD', 'THE', 'LIKE', 'BEFORE', 'AND', 'THEY', 'LISTENED', 'AND', 'SAID'] +672-122797-0056-1585: ref=['SAID', 'THE', 'FIR', 'TREE', 'THINKING', 'OVER', 'WHAT', 'HE', 'HAD', 'HIMSELF', 'RELATED'] +672-122797-0056-1585: hyp=['SAID', 'THE', 'FIR', 'TREE', 'THINKING', 'OVER', 'WHAT', 'HE', 'HAD', 'HIMSELF', 'RELATED'] +672-122797-0057-1586: ref=['YES', 'IN', 'REALITY', 'THOSE', 'WERE', 'HAPPY', 'TIMES'] +672-122797-0057-1586: hyp=['YES', 'IN', 'REALITY', 'THOSE', 'WERE', 'HAPPY', 'TIMES'] +672-122797-0058-1587: ref=['WHO', 'IS', 'HUMPY', 'DUMPY', 'ASKED', 'THE', 'MICE'] +672-122797-0058-1587: hyp=['WHO', "IT'S", 'HUMPY', 'DUMPY', 'ASKED', 'THE', 'MICE'] +672-122797-0059-1588: ref=['ONLY', 'THAT', 'ONE', 'ANSWERED', 'THE', 'TREE'] +672-122797-0059-1588: hyp=['ONLY', 'THAT', 'ONE', 'ANSWERED', 'THE', 'TREE'] +672-122797-0060-1589: ref=['IT', 'IS', 'A', 'VERY', 'STUPID', 'STORY'] +672-122797-0060-1589: hyp=['IT', 'IS', 'A', 'VERY', 'STUPID', 'STORY'] +672-122797-0061-1590: ref=["DON'T", 'YOU', 'KNOW', 'ONE', 'ABOUT', 'BACON', 'AND', 'TALLOW', 'CANDLES', "CAN'T", 'YOU', 'TELL', 'ANY', 'LARDER', 'STORIES'] +672-122797-0061-1590: hyp=["DON'T", 'YOU', 'KNOW', 'ONE', 'ABOUT', 'BACON', 'AND', 'TALLOW', 'CANDLES', "CAN'T", 'YOU', 'TELL', 'ANY', 'LARDER', 'STORIES'] +672-122797-0062-1591: ref=['NO', 'SAID', 'THE', 'TREE'] +672-122797-0062-1591: hyp=['NO', 'SAID', 'THE', 'TREE'] +672-122797-0063-1592: ref=['THEN', 'GOOD', 'BYE', 'SAID', 'THE', 'RATS', 'AND', 'THEY', 'WENT', 'HOME'] +672-122797-0063-1592: hyp=['THEN', 'GOOD', 'BYE', 'SAID', 'THE', 'RATS', 'AND', 'THEY', 'WENT', 'HOME'] +672-122797-0064-1593: ref=['AT', 'LAST', 'THE', 'LITTLE', 'MICE', 'STAYED', 'AWAY', 'ALSO', 'AND', 'THE', 'TREE', 'SIGHED', 'AFTER', 'ALL', 'IT', 'WAS', 'VERY', 'PLEASANT', 'WHEN', 'THE', 'SLEEK', 'LITTLE', 'MICE', 'SAT', 'ROUND', 'ME', 'AND', 'LISTENED', 'TO', 'WHAT', 'I', 'TOLD', 'THEM'] +672-122797-0064-1593: hyp=['AT', 'LAST', 'THE', 'LITTLE', 'MICE', 'STAYED', 'AWAY', 'ALSO', 'AND', 'THE', 'TREE', 'SIGHED', 'AFTER', 'ALL', 'IT', 'WAS', 'VERY', 'PLEASANT', 'WHEN', 'THE', 'SLEEK', 'LITTLE', 'MICE', 'SAT', 'ROUND', 'ME', 'AND', 'LISTENED', 'TO', 'WHAT', 'I', 'TOLD', 'THEM'] +672-122797-0065-1594: ref=['NOW', 'THAT', 'TOO', 'IS', 'OVER'] +672-122797-0065-1594: hyp=['NOW', 'THAT', 'TOO', 'IS', 'OVER'] +672-122797-0066-1595: ref=['WHY', 'ONE', 'MORNING', 'THERE', 'CAME', 'A', 'QUANTITY', 'OF', 'PEOPLE', 'AND', 'SET', 'TO', 'WORK', 'IN', 'THE', 'LOFT'] +672-122797-0066-1595: hyp=['WHY', 'ONE', 'MORNING', 'THERE', 'CAME', 'A', 'QUANTITY', 'OF', 'PEOPLE', 'AND', 'SET', 'TO', 'WORK', 'IN', 'THE', 'LOFT'] +672-122797-0067-1596: ref=['THE', 'TRUNKS', 'WERE', 'MOVED', 'THE', 'TREE', 'WAS', 'PULLED', 'OUT', 'AND', 'THROWN', 'RATHER', 'HARD', 'IT', 'IS', 'TRUE', 'DOWN', 'ON', 'THE', 'FLOOR', 'BUT', 'A', 'MAN', 'DREW', 'HIM', 'TOWARDS', 'THE', 'STAIRS', 'WHERE', 'THE', 'DAYLIGHT', 'SHONE'] +672-122797-0067-1596: hyp=['THE', 'TRUNKS', 'WERE', 'MOVED', 'THE', 'TREE', 'WAS', 'PULLED', 'OUT', 'AND', 'THROWN', 'RATHER', 'HARD', 'IT', 'IS', 'TRUE', 'DOWN', 'ON', 'THE', 'FLOOR', 'BUT', 'A', 'MAN', 'DREW', 'HIM', 'TOWARDS', 'THE', 'STAIRS', 'WHERE', 'THE', 'DAYLIGHT', 'SHONE'] +672-122797-0068-1597: ref=['BUT', 'IT', 'WAS', 'NOT', 'THE', 'FIR', 'TREE', 'THAT', 'THEY', 'MEANT'] +672-122797-0068-1597: hyp=['BUT', 'IT', 'WAS', 'NOT', 'THE', 'FIR', 'TREE', 'THAT', 'THEY', 'MEANT'] +672-122797-0069-1598: ref=['IT', 'WAS', 'IN', 'A', 'CORNER', 'THAT', 'HE', 'LAY', 'AMONG', 'WEEDS', 'AND', 'NETTLES'] +672-122797-0069-1598: hyp=['IT', 'WAS', 'IN', 'A', 'CORNER', 'THAT', 'HE', 'LAY', 'AMONG', 'WEEDS', 'AND', 'NETTLES'] +672-122797-0070-1599: ref=['THE', 'GOLDEN', 'STAR', 'OF', 'TINSEL', 'WAS', 'STILL', 'ON', 'THE', 'TOP', 'OF', 'THE', 'TREE', 'AND', 'GLITTERED', 'IN', 'THE', 'SUNSHINE'] +672-122797-0070-1599: hyp=['THE', 'GOLDEN', 'STAR', 'OF', 'TINSEL', 'WAS', 'STILL', 'ON', 'THE', 'TOP', 'OF', 'THE', 'TREE', 'AND', 'GLITTERED', 'IN', 'THE', 'SUNSHINE'] +672-122797-0071-1600: ref=['IN', 'THE', 'COURT', 'YARD', 'SOME', 'OF', 'THE', 'MERRY', 'CHILDREN', 'WERE', 'PLAYING', 'WHO', 'HAD', 'DANCED', 'AT', 'CHRISTMAS', 'ROUND', 'THE', 'FIR', 'TREE', 'AND', 'WERE', 'SO', 'GLAD', 'AT', 'THE', 'SIGHT', 'OF', 'HIM'] +672-122797-0071-1600: hyp=['IN', 'THE', 'COURT', 'YARD', 'SOME', 'OF', 'THE', 'MARRIED', 'CHILDREN', 'WERE', 'PLAYING', 'WHO', 'HAD', 'DANCED', 'AT', 'CHRISTMAS', 'ROUND', 'THE', 'FIR', 'TREE', 'AND', 'WERE', 'SO', 'GLAD', 'AT', 'THE', 'SIGHT', 'OF', 'HIM'] +672-122797-0072-1601: ref=['AND', 'THE', "GARDENER'S", 'BOY', 'CHOPPED', 'THE', 'TREE', 'INTO', 'SMALL', 'PIECES', 'THERE', 'WAS', 'A', 'WHOLE', 'HEAP', 'LYING', 'THERE'] +672-122797-0072-1601: hyp=['AND', 'THE', "GARDENER'S", 'BOY', 'CHOPPED', 'THE', 'TREE', 'INTO', 'SMALL', 'PIECES', 'THERE', 'WAS', 'A', 'WHOLE', 'HEAP', 'LYING', 'THERE'] +672-122797-0073-1602: ref=['THE', 'WOOD', 'FLAMED', 'UP', 'SPLENDIDLY', 'UNDER', 'THE', 'LARGE', 'BREWING', 'COPPER', 'AND', 'IT', 'SIGHED', 'SO', 'DEEPLY'] +672-122797-0073-1602: hyp=['THE', 'WOOD', 'FLAMED', 'UP', 'SPLENDIDLY', 'UNDER', 'THE', 'LARGE', 'BREWING', 'COPPER', 'AND', 'ITS', 'SIDE', 'SO', 'DEEPLY'] +672-122797-0074-1603: ref=['HOWEVER', 'THAT', 'WAS', 'OVER', 'NOW', 'THE', 'TREE', 'GONE', 'THE', 'STORY', 'AT', 'AN', 'END'] +672-122797-0074-1603: hyp=['HOWEVER', 'THAT', 'WAS', 'OVER', 'NOW', 'THE', 'TREE', 'GONE', 'THE', 'STORY', 'AT', 'AN', 'END'] +6829-68769-0000-1858: ref=['KENNETH', 'AND', 'BETH', 'REFRAINED', 'FROM', 'TELLING', 'THE', 'OTHER', 'GIRLS', 'OR', 'UNCLE', 'JOHN', 'OF', 'OLD', 'WILL', "ROGERS'S", 'VISIT', 'BUT', 'THEY', 'GOT', 'MISTER', 'WATSON', 'IN', 'THE', 'LIBRARY', 'AND', 'QUESTIONED', 'HIM', 'CLOSELY', 'ABOUT', 'THE', 'PENALTY', 'FOR', 'FORGING', 'A', 'CHECK'] +6829-68769-0000-1858: hyp=['KENNETH', 'AND', 'BETH', 'REFRAINED', 'FROM', 'TELLING', 'THE', 'OTHER', 'GIRLS', 'OR', 'UNCLE', 'JOHN', 'OF', 'OLD', 'WILL', "ROGERS'S", 'VISIT', 'BUT', 'THEY', 'GOT', 'MISTER', 'WATSON', 'IN', 'THE', 'LIBRARY', 'AND', 'QUESTIONED', 'HIM', 'CLOSELY', 'ABOUT', 'THE', 'PENALTY', 'FOR', 'FORGING', 'A', 'CHEQUE'] +6829-68769-0001-1859: ref=['IT', 'WAS', 'A', 'SERIOUS', 'CRIME', 'INDEED', 'MISTER', 'WATSON', 'TOLD', 'THEM', 'AND', 'TOM', 'GATES', 'BADE', 'FAIR', 'TO', 'SERVE', 'A', 'LENGTHY', 'TERM', 'IN', "STATE'S", 'PRISON', 'AS', 'A', 'CONSEQUENCE', 'OF', 'HIS', 'RASH', 'ACT'] +6829-68769-0001-1859: hyp=['IT', 'WAS', 'A', 'SERIOUS', 'CRIME', 'INDEED', 'MISTER', 'WATSON', 'TOLD', 'THEM', 'AND', 'TOM', 'GATES', 'BADE', 'FAIR', 'TO', 'SERVE', 'A', 'LENGTHY', 'TERM', 'IN', 'THE', "STATE'S", 'PRISON', 'AS', 'A', 'CONSEQUENCE', 'OF', 'HIS', 'RASH', 'ACT'] +6829-68769-0002-1860: ref=['I', "CAN'T", 'SEE', 'IT', 'IN', 'THAT', 'LIGHT', 'SAID', 'THE', 'OLD', 'LAWYER'] +6829-68769-0002-1860: hyp=['I', "CAN'T", 'SEE', 'IT', 'IN', 'THAT', 'LIGHT', 'SAID', 'THE', 'OLD', 'LAWYER'] +6829-68769-0003-1861: ref=['IT', 'WAS', 'A', 'DELIBERATE', 'THEFT', 'FROM', 'HIS', 'EMPLOYERS', 'TO', 'PROTECT', 'A', 'GIRL', 'HE', 'LOVED'] +6829-68769-0003-1861: hyp=['IT', 'WAS', 'A', 'DELIBERATE', 'THEFT', 'FROM', 'HIS', 'EMPLOYERS', 'TO', 'PROTECT', 'A', 'GIRL', 'HE', 'LOVED'] +6829-68769-0004-1862: ref=['BUT', 'THEY', 'COULD', 'NOT', 'HAVE', 'PROVEN', 'A', 'CASE', 'AGAINST', 'LUCY', 'IF', 'SHE', 'WAS', 'INNOCENT', 'AND', 'ALL', 'THEIR', 'THREATS', 'OF', 'ARRESTING', 'HER', 'WERE', 'PROBABLY', 'MERE', 'BLUFF'] +6829-68769-0004-1862: hyp=['BUT', 'THEY', 'COULD', 'NOT', 'HAVE', 'PROVEN', 'A', 'GASE', 'AGAINST', 'LUCY', 'IF', 'SHE', 'WAS', 'INNOCENT', 'AND', 'ALL', 'THEIR', 'THREATS', 'OF', 'ARRESTING', 'HER', 'WERE', 'PROBABLY', 'A', 'MERE', 'BLUFF'] +6829-68769-0005-1863: ref=['HE', 'WAS', 'SOFT', 'HEARTED', 'AND', 'IMPETUOUS', 'SAID', 'BETH', 'AND', 'BEING', 'IN', 'LOVE', 'HE', "DIDN'T", 'STOP', 'TO', 'COUNT', 'THE', 'COST'] +6829-68769-0005-1863: hyp=['HE', 'WAS', 'A', 'SOFT', 'HEARTED', 'AND', 'IMPETUOUS', 'SAID', 'BETH', 'AND', 'BEING', 'IN', 'LOVE', 'HE', "DIDN'T", 'STOP', 'TO', 'COUNT', 'THE', 'COST'] +6829-68769-0006-1864: ref=['IF', 'THE', 'PROSECUTION', 'WERE', 'WITHDRAWN', 'AND', 'THE', 'CASE', 'SETTLED', 'WITH', 'THE', 'VICTIM', 'OF', 'THE', 'FORGED', 'CHECK', 'THEN', 'THE', 'YOUNG', 'MAN', 'WOULD', 'BE', 'ALLOWED', 'HIS', 'FREEDOM'] +6829-68769-0006-1864: hyp=['IF', 'THE', 'PROSECUTION', 'WERE', 'WITHDRAWN', 'AND', 'THE', 'CASE', 'SETTLED', 'WITH', 'THE', 'VICTIM', 'OF', 'THE', 'FORGED', 'CHECK', 'THEN', 'THE', 'YOUNG', 'MAN', 'WOULD', 'BE', 'ALLOWED', 'HIS', 'FREEDOM'] +6829-68769-0007-1865: ref=['BUT', 'UNDER', 'THE', 'CIRCUMSTANCES', 'I', 'DOUBT', 'IF', 'SUCH', 'AN', 'ARRANGEMENT', 'COULD', 'BE', 'MADE'] +6829-68769-0007-1865: hyp=['BUT', 'UNDER', 'THE', 'CIRCUMSTANCES', 'I', 'DOUBT', 'OF', 'SUCH', 'AN', 'ARRANGEMENT', 'COULD', 'BE', 'MADE'] +6829-68769-0008-1866: ref=['FAIRVIEW', 'WAS', 'TWELVE', 'MILES', 'AWAY', 'BUT', 'BY', 'TEN', "O'CLOCK", 'THEY', 'DREW', 'UP', 'AT', 'THE', 'COUNTY', 'JAIL'] +6829-68769-0008-1866: hyp=['FAIR', "VIEW'S", 'TWELVE', 'MILES', 'AWAY', 'BUT', 'BY', 'TEN', "O'CLOCK", 'THEY', 'DREW', 'UP', 'AT', 'THE', 'COUNTY', 'DRALE'] +6829-68769-0009-1867: ref=['THEY', 'WERE', 'RECEIVED', 'IN', 'THE', 'LITTLE', 'OFFICE', 'BY', 'A', 'MAN', 'NAMED', 'MARKHAM', 'WHO', 'WAS', 'THE', 'JAILER'] +6829-68769-0009-1867: hyp=['THEY', 'WERE', 'RECEIVED', 'IN', 'THE', 'LITTLE', 'OFFICE', 'BY', 'A', 'MAN', 'NAMED', 'MARKHAM', 'WHO', 'WAS', 'THE', 'JAILER'] +6829-68769-0010-1868: ref=['WE', 'WISH', 'TO', 'TALK', 'WITH', 'HIM', 'ANSWERED', 'KENNETH', 'TALK'] +6829-68769-0010-1868: hyp=['WE', 'WISH', 'TO', 'TALK', 'WITH', 'HIM', 'ANSWERED', 'KENNETH', 'TALK'] +6829-68769-0011-1869: ref=["I'M", 'RUNNING', 'FOR', 'REPRESENTATIVE', 'ON', 'THE', 'REPUBLICAN', 'TICKET', 'SAID', 'KENNETH', 'QUIETLY'] +6829-68769-0011-1869: hyp=["I'M", 'RUNNING', 'FOR', 'REPRESENTATIVE', 'ON', 'THE', 'REPUBLICAN', 'TICKET', 'SAID', 'KENNETH', 'QUIETLY'] +6829-68769-0012-1870: ref=['OH', 'SAY', "THAT'S", 'DIFFERENT', 'OBSERVED', 'MARKHAM', 'ALTERING', 'HIS', 'DEMEANOR'] +6829-68769-0012-1870: hyp=["I'LL", 'SAY', "THAT'S", 'DIFFERENT', 'OBSERVED', 'MARKHAM', 'ALTERING', 'HIS', 'DEMEANOUR'] +6829-68769-0013-1871: ref=['MAY', 'WE', 'SEE', 'GATES', 'AT', 'ONCE', 'ASKED', 'KENNETH'] +6829-68769-0013-1871: hyp=['MAYBE', 'SEA', 'GATES', 'AT', 'ONCE', 'ASKED', 'KENNETH'] +6829-68769-0014-1872: ref=['THEY', 'FOLLOWED', 'THE', 'JAILER', 'ALONG', 'A', 'SUCCESSION', 'OF', 'PASSAGES'] +6829-68769-0014-1872: hyp=['THEY', 'FOLLOWED', 'THE', 'JAILER', 'ALONG', 'THE', 'SUCCESSION', 'OF', 'PASSAGES'] +6829-68769-0015-1873: ref=['SOMETIMES', "I'M", 'THAT', 'YEARNING', 'FOR', 'A', 'SMOKE', "I'M", 'NEARLY', 'CRAZY', 'AN', 'I', 'DUNNO', 'WHICH', 'IS', 'WORST', 'DYIN', 'ONE', 'WAY', 'OR', 'ANOTHER'] +6829-68769-0015-1873: hyp=['SOMETIMES', 'ON', 'THAT', 'YEARNIN', 'FOR', 'A', 'SMOKE', "I'M", 'NEARLY', 'CRAZY', 'AND', 'I', "DON'T", 'KNOW', 'WHICH', 'IS', 'WORSE', 'DYIN', 'ONE', 'WAY', 'OR', 'THE', 'OTHER'] +6829-68769-0016-1874: ref=['HE', 'UNLOCKED', 'THE', 'DOOR', 'AND', 'CALLED', "HERE'S", 'VISITORS', 'TOM'] +6829-68769-0016-1874: hyp=['HE', 'UNLOCKED', 'THE', 'DOOR', 'AND', 'CALLED', "HERE'S", 'VISITORS', 'TOM'] +6829-68769-0017-1875: ref=['WORSE', 'TOM', 'WORSE', 'N', 'EVER', 'REPLIED', 'THE', 'JAILER', 'GLOOMILY'] +6829-68769-0017-1875: hyp=['HORSE', 'TOM', 'WORSE', 'THAN', 'ARROW', 'REPLIED', 'THE', 'JAILER', 'GLOOMILY'] +6829-68769-0018-1876: ref=['MISS', 'DE', 'GRAF', 'SAID', 'KENNETH', 'NOTICING', 'THE', "BOY'S", 'FACE', 'CRITICALLY', 'AS', 'HE', 'STOOD', 'WHERE', 'THE', 'LIGHT', 'FROM', 'THE', 'PASSAGE', 'FELL', 'UPON', 'IT'] +6829-68769-0018-1876: hyp=['MISTER', 'GRAFT', 'SAID', 'KENNETH', 'NOTICING', 'THE', "BOY'S", 'FACE', 'CRITICALLY', 'AS', 'HE', 'STOOD', 'WHERE', 'THE', 'LIGHT', 'FROM', 'THE', 'PASSAGE', 'FELL', 'UPON', 'IT'] +6829-68769-0019-1877: ref=['SORRY', 'WE', "HAVEN'T", 'ANY', 'RECEPTION', 'ROOM', 'IN', 'THE', 'JAIL'] +6829-68769-0019-1877: hyp=['SORRY', 'WE', "HAVEN'T", 'ANY', 'RECEPTION', 'ROOM', 'IN', 'THE', 'JAIL'] +6829-68769-0020-1878: ref=['SIT', 'DOWN', 'PLEASE', 'SAID', 'GATES', 'IN', 'A', 'CHEERFUL', 'AND', 'PLEASANT', 'VOICE', "THERE'S", 'A', 'BENCH', 'HERE'] +6829-68769-0020-1878: hyp=['SIT', 'DOWN', 'PLEASE', 'SAID', 'GATES', 'IN', 'A', 'CHEERFUL', 'AND', 'PLEASANT', 'VOICE', "THERE'S", 'A', 'PINCH', 'HERE'] +6829-68769-0021-1879: ref=['A', 'FRESH', 'WHOLESOME', 'LOOKING', 'BOY', 'WAS', 'TOM', 'GATES', 'WITH', 'STEADY', 'GRAY', 'EYES', 'AN', 'INTELLIGENT', 'FOREHEAD', 'BUT', 'A', 'SENSITIVE', 'RATHER', 'WEAK', 'MOUTH'] +6829-68769-0021-1879: hyp=['A', 'FRESH', 'WHOLESOME', 'LOOKING', 'BOY', 'WAS', 'TOM', 'GATES', 'WITH', 'STEADY', 'GRAY', 'EYES', 'AN', 'INTELLIGENT', 'FOREHEAD', 'BUT', 'A', 'SENSITIVE', 'RATHER', 'WEAK', 'MOUTH'] +6829-68769-0022-1880: ref=['WE', 'HAVE', 'HEARD', 'SOMETHING', 'OF', 'YOUR', 'STORY', 'SAID', 'KENNETH', 'AND', 'ARE', 'INTERESTED', 'IN', 'IT'] +6829-68769-0022-1880: hyp=['WE', 'HAVE', 'HEARD', 'SOMETHING', 'OF', 'YOUR', 'STORY', 'SAID', 'KENNETH', 'AND', 'OUR', 'INTERESTED', 'IN', 'IT'] +6829-68769-0023-1881: ref=['I', "DIDN'T", 'STOP', 'TO', 'THINK', 'WHETHER', 'IT', 'WAS', 'FOOLISH', 'OR', 'NOT', 'I', 'DID', 'IT', 'AND', "I'M", 'GLAD', 'I', 'DID'] +6829-68769-0023-1881: hyp=['I', "DIDN'T", 'STOP', 'TO', 'THINK', 'WHETHER', 'IT', 'WAS', 'FOOLISH', 'OR', 'NOT', 'I', 'DID', 'IT', 'AND', "I'M", 'GLAD', 'I', 'DID', 'IT'] +6829-68769-0024-1882: ref=['OLD', 'WILL', 'IS', 'A', 'FINE', 'FELLOW', 'BUT', 'POOR', 'AND', 'HELPLESS', 'SINCE', 'MISSUS', 'ROGERS', 'HAD', 'HER', 'ACCIDENT'] +6829-68769-0024-1882: hyp=['OLD', 'WILL', 'IS', 'A', 'FINE', 'FELLOW', 'BUT', 'POOR', 'AND', 'HELPLESS', 'SINCE', 'MISSUS', 'ROGERS', 'HAD', 'HER', 'ACCIDENT'] +6829-68769-0025-1883: ref=['THEN', 'ROGERS', "WOULDN'T", 'DO', 'ANYTHING', 'BUT', 'LEAD', 'HER', 'AROUND', 'AND', 'WAIT', 'UPON', 'HER', 'AND', 'THE', 'PLACE', 'WENT', 'TO', 'RACK', 'AND', 'RUIN'] +6829-68769-0025-1883: hyp=['THEN', 'ROGERS', "WOULDN'T", 'DO', 'ANYTHING', 'BUT', 'LEAD', 'HER', 'AROUND', 'AND', 'WAIT', 'UPON', 'HER', 'AND', 'THE', 'PLACE', 'WENT', 'TO', 'RACK', 'AND', 'RUIN'] +6829-68769-0026-1884: ref=['HE', 'SPOKE', 'SIMPLY', 'BUT', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'NARROW', 'CELL', 'IN', 'FRONT', 'OF', 'THEM'] +6829-68769-0026-1884: hyp=['HE', 'SPOKE', 'SIMPLY', 'BUT', 'PACED', 'UP', 'AND', 'DOWN', 'THE', 'NARROW', 'CELL', 'IN', 'FRONT', 'OF', 'THEM'] +6829-68769-0027-1885: ref=['WHOSE', 'NAME', 'DID', 'YOU', 'SIGN', 'TO', 'THE', 'CHECK', 'ASKED', 'KENNETH'] +6829-68769-0027-1885: hyp=['WHOSE', 'NAME', 'DID', 'YOU', 'SIGN', 'TO', 'THE', 'CHECK', 'ASKED', 'KENNETH'] +6829-68769-0028-1886: ref=['HE', 'IS', 'SUPPOSED', 'TO', 'SIGN', 'ALL', 'THE', 'CHECKS', 'OF', 'THE', 'CONCERN'] +6829-68769-0028-1886: hyp=['HE', 'IS', 'SUPPOSED', 'TO', 'SIGN', 'ALL', 'THE', 'CHECKS', 'OF', 'THE', 'CONCERN'] +6829-68769-0029-1887: ref=["IT'S", 'A', 'STOCK', 'COMPANY', 'AND', 'RICH'] +6829-68769-0029-1887: hyp=["IT'S", 'A', 'STOCK', 'COMPANY', 'IN', 'RICH'] +6829-68769-0030-1888: ref=['I', 'WAS', 'BOOKKEEPER', 'SO', 'IT', 'WAS', 'EASY', 'TO', 'GET', 'A', 'BLANK', 'CHECK', 'AND', 'FORGE', 'THE', 'SIGNATURE'] +6829-68769-0030-1888: hyp=['I', 'WAS', 'BITKEEPER', 'SO', 'IT', 'WAS', 'EASY', 'TO', 'GET', 'A', 'BLANK', 'CHECK', 'AND', 'FORGE', 'THE', 'SIGNATURE'] +6829-68769-0031-1889: ref=['AS', 'REGARDS', 'MY', 'ROBBING', 'THE', 'COMPANY', "I'LL", 'SAY', 'THAT', 'I', 'SAVED', 'THEM', 'A', 'HEAVY', 'LOSS', 'ONE', 'DAY'] +6829-68769-0031-1889: hyp=['AS', 'REGARDS', 'MY', 'ROBBING', 'THE', 'COMPANY', "I'LL", 'SAY', 'THAT', 'I', 'SAVED', 'HIM', 'A', 'HEAVY', 'LOSS', 'ONE', 'DAY'] +6829-68769-0032-1890: ref=['I', 'DISCOVERED', 'AND', 'PUT', 'OUT', 'A', 'FIRE', 'THAT', 'WOULD', 'HAVE', 'DESTROYED', 'THE', 'WHOLE', 'PLANT', 'BUT', 'MARSHALL', 'NEVER', 'EVEN', 'THANKED', 'ME'] +6829-68769-0032-1890: hyp=['I', 'DISCOVERED', 'AND', 'PUT', 'OUT', 'A', 'FIRE', 'THAT', 'WOULD', 'HAVE', 'DESTROYED', 'THE', 'WHOLE', 'PLANT', 'BUT', 'MARSHALL', 'NEVER', 'EVEN', 'THANKED', 'ME'] +6829-68769-0033-1891: ref=['IT', 'WAS', 'BETTER', 'FOR', 'HIM', 'TO', 'THINK', 'THE', 'GIRL', 'UNFEELING', 'THAN', 'TO', 'KNOW', 'THE', 'TRUTH'] +6829-68769-0033-1891: hyp=['IT', 'WAS', 'BETTER', 'FOR', 'HIM', 'TO', 'THINK', 'THE', 'GIRL', 'UNFEELING', 'THAN', 'TO', 'KNOW', 'THE', 'TRUTH'] +6829-68769-0034-1892: ref=["I'M", 'GOING', 'TO', 'SEE', 'MISTER', 'MARSHALL', 'SAID', 'KENNETH', 'AND', 'DISCOVER', 'WHAT', 'I', 'CAN', 'DO', 'TO', 'ASSIST', 'YOU', 'THANK', 'YOU', 'SIR'] +6829-68769-0034-1892: hyp=["I'M", 'GOING', 'TO', 'SEE', 'MISTER', 'MARSHAL', 'SAID', 'KENNETH', 'AND', 'DISCOVER', 'WHAT', 'I', 'CAN', 'DO', 'TO', 'ASSIST', 'YOU', 'THANK', 'YOU', 'SIR'] +6829-68769-0035-1893: ref=['IT', "WON'T", 'BE', 'MUCH', 'BUT', "I'M", 'GRATEFUL', 'TO', 'FIND', 'A', 'FRIEND'] +6829-68769-0035-1893: hyp=['IT', "WON'T", 'BE', 'MUCH', 'BUT', "I'M", 'GRATEFUL', 'TO', 'FIND', 'A', 'FRIEND'] +6829-68769-0036-1894: ref=['THEY', 'LEFT', 'HIM', 'THEN', 'FOR', 'THE', 'JAILER', 'ARRIVED', 'TO', 'UNLOCK', 'THE', 'DOOR', 'AND', 'ESCORT', 'THEM', 'TO', 'THE', 'OFFICE'] +6829-68769-0036-1894: hyp=['THEY', 'LEFT', 'HIM', 'THEN', 'FOR', 'THE', 'JAILER', 'ARRIVED', 'TO', 'UNLOCK', 'THE', 'DOOR', 'AND', 'ESCORT', 'THEM', 'TO', 'THE', 'OFFICE'] +6829-68769-0037-1895: ref=["I'VE", 'SEEN', 'LOTS', 'OF', 'THAT', 'KIND', 'IN', 'MY', 'DAY'] +6829-68769-0037-1895: hyp=["I'VE", 'SEEN', 'LOTS', 'OF', 'THAT', 'KIND', 'IN', 'MY', 'DAY'] +6829-68769-0038-1896: ref=['AND', 'IT', 'RUINS', 'A', "MAN'S", 'DISPOSITION'] +6829-68769-0038-1896: hyp=['AND', 'IT', 'RUINS', 'A', "MAN'S", 'DISPOSITION'] +6829-68769-0039-1897: ref=['HE', 'LOOKED', 'UP', 'RATHER', 'UNGRACIOUSLY', 'BUT', 'MOTIONED', 'THEM', 'TO', 'BE', 'SEATED'] +6829-68769-0039-1897: hyp=['HE', 'LOOKED', 'UP', 'RATHER', 'UNGRACIOUSLY', 'BUT', 'MOTIONED', 'THEM', 'TO', 'BE', 'SEATED'] +6829-68769-0040-1898: ref=['SOME', 'GIRL', 'HAS', 'BEEN', 'HERE', 'TWICE', 'TO', 'INTERVIEW', 'MY', 'MEN', 'AND', 'I', 'HAVE', 'REFUSED', 'TO', 'ADMIT', 'HER'] +6829-68769-0040-1898: hyp=['SOME', 'GIRL', 'HAS', 'BEEN', 'IN', 'HERE', 'TWICE', 'TO', 'INTERVIEW', 'MY', 'MEN', 'AND', 'I', 'HAVE', 'REFUSED', 'TO', 'ADMIT', 'HER'] +6829-68769-0041-1899: ref=["I'M", 'NOT', 'ELECTIONEERING', 'JUST', 'NOW'] +6829-68769-0041-1899: hyp=["I'M", 'NOT', 'ELECTIONEERING', 'JUST', 'NOW'] +6829-68769-0042-1900: ref=['OH', 'WELL', 'SIR', 'WHAT', 'ABOUT', 'HIM'] +6829-68769-0042-1900: hyp=['OH', 'WELL', 'SIR', 'WHAT', 'ABOUT', 'HIM'] +6829-68769-0043-1901: ref=['AND', 'HE', 'DESERVES', 'A', 'TERM', 'IN', "STATE'S", 'PRISON'] +6829-68769-0043-1901: hyp=['AND', 'HE', 'DESERVES', 'A', 'TERM', 'AND', "STATE'S", 'PRISON'] +6829-68769-0044-1902: ref=['IT', 'HAS', 'COST', 'ME', 'TWICE', 'SIXTY', 'DOLLARS', 'IN', 'ANNOYANCE'] +6829-68769-0044-1902: hyp=['IT', 'HAS', 'COST', 'ME', 'TWICE', 'SIXTY', 'DOLLARS', 'AN', 'ANNOYANCE'] +6829-68769-0045-1903: ref=["I'LL", 'PAY', 'ALL', 'THE', 'COSTS', 'BESIDES'] +6829-68769-0045-1903: hyp=["I'LL", 'PAY', 'ALL', 'THE', 'COST', 'BESIDES'] +6829-68769-0046-1904: ref=["YOU'RE", 'FOOLISH', 'WHY', 'SHOULD', 'YOU', 'DO', 'ALL', 'THIS'] +6829-68769-0046-1904: hyp=["YOU'RE", 'FOOLISH', 'WHY', 'SHOULD', 'YOU', 'DO', 'ALL', 'THIS'] +6829-68769-0047-1905: ref=['I', 'HAVE', 'MY', 'OWN', 'REASONS', 'MISTER', 'MARSHALL'] +6829-68769-0047-1905: hyp=['I', 'HAVE', 'MY', 'OWN', 'REASONS', 'MISTER', 'MARSHAL'] +6829-68769-0048-1906: ref=['GIVE', 'ME', 'A', 'CHECK', 'FOR', 'A', 'HUNDRED', 'AND', 'FIFTY', 'AND', "I'LL", 'TURN', 'OVER', 'TO', 'YOU', 'THE', 'FORGED', 'CHECK', 'AND', 'QUASH', 'FURTHER', 'PROCEEDINGS'] +6829-68769-0048-1906: hyp=['GIVE', 'ME', 'A', 'CHECK', 'FOR', 'A', 'HUNDRED', 'AND', 'FIFTY', 'AND', "I'LL", 'TURN', 'OVER', 'TO', 'YOU', 'THE', 'FORGED', 'CHECK', 'AND', 'QUASH', 'FURTHER', 'PROCEEDINGS'] +6829-68769-0049-1907: ref=['HE', 'DETESTED', 'THE', 'GRASPING', 'DISPOSITION', 'THAT', 'WOULD', 'ENDEAVOR', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'HIS', 'EVIDENT', 'DESIRE', 'TO', 'HELP', 'YOUNG', 'GATES'] +6829-68769-0049-1907: hyp=['HE', 'DETESTED', 'THE', 'GRASPING', 'DISPOSITION', 'THAT', 'WOULD', 'ENDEAVOUR', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'HIS', 'EVIDENT', 'DESIRE', 'TO', 'HELP', 'YOUNG', 'GATES'] +6829-68769-0050-1908: ref=['BETH', 'UNEASY', 'AT', 'HIS', 'SILENCE', 'NUDGED', 'HIM'] +6829-68769-0050-1908: hyp=['BETH', 'UNEASY', 'AT', 'HIS', 'SILENCE', 'NUDGED', 'HIM'] +6829-68769-0051-1909: ref=['THERE', 'WAS', 'A', 'GRIM', 'SMILE', 'OF', 'AMUSEMENT', 'ON', 'HIS', 'SHREWD', 'FACE'] +6829-68769-0051-1909: hyp=['THERE', 'WAS', 'A', 'GRIM', 'SMILE', 'OF', 'AMUSEMENT', 'ON', 'HIS', 'SHREWD', 'FACE'] +6829-68769-0052-1910: ref=['HE', 'MIGHT', 'HAVE', 'HAD', 'THAT', 'FORGED', 'CHECK', 'FOR', 'THE', 'FACE', 'OF', 'IT', 'IF', "HE'D", 'BEEN', 'SHARP'] +6829-68769-0052-1910: hyp=['HE', 'MIGHT', 'HAVE', 'HAD', 'THAT', 'FORGED', 'CHECK', 'FOR', 'THE', 'FACE', 'OF', 'IT', 'IF', "HE'D", 'BEEN', 'SHARP'] +6829-68769-0053-1911: ref=['AND', 'TO', 'THINK', 'WE', 'CAN', 'SAVE', 'ALL', 'THAT', 'MISERY', 'AND', 'DESPAIR', 'BY', 'THE', 'PAYMENT', 'OF', 'A', 'HUNDRED', 'AND', 'FIFTY', 'DOLLARS'] +6829-68769-0053-1911: hyp=['AND', 'TO', 'THINK', 'WE', 'CAN', 'SAVE', 'ALL', 'THAT', 'MISERY', 'AND', 'DESPAIR', 'BY', 'THE', 'PAYMENT', 'OF', 'A', 'HUNDRED', 'AND', 'FIFTY', 'DOLLARS'] +6829-68771-0000-1912: ref=['SO', 'TO', 'THE', 'SURPRISE', 'OF', 'THE', 'DEMOCRATIC', 'COMMITTEE', 'AND', 'ALL', 'HIS', 'FRIENDS', 'MISTER', 'HOPKINS', 'ANNOUNCED', 'THAT', 'HE', 'WOULD', 'OPPOSE', "FORBES'S", 'AGGRESSIVE', 'CAMPAIGN', 'WITH', 'AN', 'EQUAL', 'AGGRESSIVENESS', 'AND', 'SPEND', 'AS', 'MANY', 'DOLLARS', 'IN', 'DOING', 'SO', 'AS', 'MIGHT', 'BE', 'NECESSARY'] +6829-68771-0000-1912: hyp=['SO', 'TO', 'THE', 'SURPRISE', 'OF', 'THE', 'DEMOCRATIC', 'COMMITTEE', 'AND', 'ALL', 'HIS', 'FRIENDS', 'MISTER', 'HOPKINS', 'ANNOUNCED', 'THAT', 'HE', 'WOULD', 'OPPOSE', 'FORTS', 'AGGRESSIVE', 'CAMPAIGN', 'WITH', 'AN', 'EQUAL', 'AGGRESSIVENESS', 'AND', 'SPEND', 'AS', 'MANY', 'DOLLARS', 'IN', 'DOING', 'SO', 'AS', 'MIGHT', 'BE', 'NECESSARY'] +6829-68771-0001-1913: ref=['ONE', 'OF', 'MISTER', "HOPKINS'S", 'FIRST', 'TASKS', 'AFTER', 'CALLING', 'HIS', 'FAITHFUL', 'HENCHMEN', 'AROUND', 'HIM', 'WAS', 'TO', 'MAKE', 'A', 'CAREFUL', 'CANVASS', 'OF', 'THE', 'VOTERS', 'OF', 'HIS', 'DISTRICT', 'TO', 'SEE', 'WHAT', 'WAS', 'STILL', 'TO', 'BE', 'ACCOMPLISHED'] +6829-68771-0001-1913: hyp=['ONE', 'OF', 'MISTER', "HOPKINS'S", 'FIRST', 'TASKS', 'AFTER', 'CALLING', 'HIS', 'FAITHFUL', 'HENCHMAN', 'AROUND', 'HIM', 'WAS', 'TO', 'MAKE', 'A', 'CAREFUL', 'CANVAS', 'OF', 'THE', 'VOTERS', 'OF', 'HIS', 'DISTRICT', 'TO', 'SEE', 'WHAT', 'WAS', 'STILL', 'TO', 'BE', 'ACCOMPLISHED'] +6829-68771-0002-1914: ref=['THE', 'WEAK', 'KNEED', 'CONTINGENCY', 'MUST', 'BE', 'STRENGTHENED', 'AND', 'FORTIFIED', 'AND', 'A', 'COUPLE', 'OF', 'HUNDRED', 'VOTES', 'IN', 'ONE', 'WAY', 'OR', 'ANOTHER', 'SECURED', 'FROM', 'THE', 'OPPOSITION'] +6829-68771-0002-1914: hyp=['THE', 'WEAK', 'NEED', 'CONTINGENCY', 'MUST', 'BE', 'STRENGTHENED', 'AND', 'FORTIFIED', 'AND', 'A', 'COUPLE', 'OF', 'HUNDRED', 'VOTES', 'IN', 'ONE', 'WAY', 'OR', 'THE', 'OTHER', 'SECURED', 'FROM', 'THE', 'OPPOSITION'] +6829-68771-0003-1915: ref=['THE', 'DEMOCRATIC', 'COMMITTEE', 'FIGURED', 'OUT', 'A', 'WAY', 'TO', 'DO', 'THIS'] +6829-68771-0003-1915: hyp=['THE', 'DEMOCRATIC', 'COMMITTEE', 'FIGURED', 'OUT', 'A', 'WAY', 'TO', 'DO', 'THIS'] +6829-68771-0004-1916: ref=['UNDER', 'ORDINARY', 'CONDITIONS', 'REYNOLDS', 'WAS', 'SURE', 'TO', 'BE', 'ELECTED', 'BUT', 'THE', 'COMMITTEE', 'PROPOSED', 'TO', 'SACRIFICE', 'HIM', 'IN', 'ORDER', 'TO', 'ELECT', 'HOPKINS'] +6829-68771-0004-1916: hyp=['UNDER', 'ORDINARY', 'CONDITIONS', 'REYNOLDS', 'WAS', 'SURE', 'TO', 'BE', 'ELECTED', 'BUT', 'THE', 'COMMITTEE', 'PROPOSED', 'TO', 'SACRIFICE', 'HIM', 'IN', 'ORDER', 'TO', 'ELEC', 'HOPKINS'] +6829-68771-0005-1917: ref=['THE', 'ONLY', 'THING', 'NECESSARY', 'WAS', 'TO', 'FIX', 'SETH', 'REYNOLDS', 'AND', 'THIS', 'HOPKINS', 'ARRANGED', 'PERSONALLY'] +6829-68771-0005-1917: hyp=['THE', 'ONLY', 'THING', 'NECESSARY', 'WAS', 'TO', 'FIX', 'SETH', 'REYNOLDS', 'AND', 'THIS', 'HOPKINS', 'ARRANGED', 'PERSONALLY'] +6829-68771-0006-1918: ref=['AND', 'THIS', 'WAS', 'WHY', 'KENNETH', 'AND', 'BETH', 'DISCOVERED', 'HIM', 'CONVERSING', 'WITH', 'THE', 'YOUNG', 'WOMAN', 'IN', 'THE', 'BUGGY'] +6829-68771-0006-1918: hyp=['AND', 'THIS', 'WAS', 'WHY', 'KENNETH', 'AND', 'BETH', 'DISCOVERED', 'HIM', 'CONVERSING', 'WITH', 'THE', 'YOUNG', 'WOMAN', 'IN', 'THE', 'BUGGY'] +6829-68771-0007-1919: ref=['THE', 'DESCRIPTION', 'SHE', 'GAVE', 'OF', 'THE', 'COMING', 'RECEPTION', 'TO', 'THE', "WOMAN'S", 'POLITICAL', 'LEAGUE', 'WAS', 'SO', 'HUMOROUS', 'AND', 'DIVERTING', 'THAT', 'THEY', 'WERE', 'BOTH', 'LAUGHING', 'HEARTILY', 'OVER', 'THE', 'THING', 'WHEN', 'THE', 'YOUNG', 'PEOPLE', 'PASSED', 'THEM', 'AND', 'THUS', 'MISTER', 'HOPKINS', 'FAILED', 'TO', 'NOTICE', 'WHO', 'THE', 'OCCUPANTS', 'OF', 'THE', 'OTHER', 'VEHICLE', 'WERE'] +6829-68771-0007-1919: hyp=['THE', 'DESCRIPTION', 'SHE', 'GAVE', 'OF', 'THE', 'COMING', 'RECEPTION', 'TO', 'THE', "WOMEN'S", 'POLITICAL', 'LEAGUE', 'WAS', 'SO', 'HUMOROUS', 'AND', 'DIVERTING', 'THAT', 'THEY', 'WERE', 'BOTH', 'LAUGHING', 'HEARTILY', 'OVER', 'THE', 'THING', 'WHEN', 'THE', 'YOUNG', 'PEOPLE', 'PASSED', 'THEM', 'AND', 'THUS', 'MISTER', 'HOPKINS', 'FAILED', 'TO', 'NOTICE', 'WHO', 'THE', 'OCCUPANTS', 'OF', 'THE', 'OTHER', 'VEHICLE', 'WERE'] +6829-68771-0008-1920: ref=['THESE', 'WOMEN', 'WERE', 'FLATTERED', 'BY', 'THE', 'ATTENTION', 'OF', 'THE', 'YOUNG', 'LADY', 'AND', 'HAD', 'PROMISED', 'TO', 'ASSIST', 'IN', 'ELECTING', 'MISTER', 'FORBES'] +6829-68771-0008-1920: hyp=['THESE', 'WOMEN', 'WERE', 'FLATTERED', 'BY', 'THE', 'ATTENTION', 'OF', 'THE', 'YOUNG', 'LADY', 'AND', 'HAD', 'PROMISED', 'TO', 'ASSIST', 'IN', 'ELECTING', 'MISTER', 'FORBES'] +6829-68771-0009-1921: ref=['LOUISE', 'HOPED', 'FOR', 'EXCELLENT', 'RESULTS', 'FROM', 'THIS', 'ORGANIZATION', 'AND', 'WISHED', 'THE', 'ENTERTAINMENT', 'TO', 'BE', 'SO', 'EFFECTIVE', 'IN', 'WINNING', 'THEIR', 'GOOD', 'WILL', 'THAT', 'THEY', 'WOULD', 'WORK', 'EARNESTLY', 'FOR', 'THE', 'CAUSE', 'IN', 'WHICH', 'THEY', 'WERE', 'ENLISTED'] +6829-68771-0009-1921: hyp=['LOUISE', 'HOPED', 'FOR', 'EXCELLENT', 'RESULTS', 'FROM', 'THIS', 'ORGANIZATION', 'AND', 'WISHED', 'THE', 'ENTERTAINMENT', 'TO', 'BE', 'SO', 'EFFECTIVE', 'IN', 'WINNING', 'THEIR', 'GOOD', 'WILL', 'THAT', 'THEY', 'WOULD', 'WORK', 'EARNESTLY', 'FOR', 'THE', 'CAUSE', 'IN', 'WHICH', 'THEY', 'WERE', 'ENLISTED'] +6829-68771-0010-1922: ref=['THE', 'FAIRVIEW', 'BAND', 'WAS', 'ENGAGED', 'TO', 'DISCOURSE', 'AS', 'MUCH', 'HARMONY', 'AS', 'IT', 'COULD', 'PRODUCE', 'AND', 'THE', 'RESOURCES', 'OF', 'THE', 'GREAT', 'HOUSE', 'WERE', 'TAXED', 'TO', 'ENTERTAIN', 'THE', 'GUESTS'] +6829-68771-0010-1922: hyp=['THE', 'FAIR', 'VIEW', 'BAND', 'WAS', 'ENGAGED', 'TO', 'DISCOURSE', 'AS', 'MUCH', 'HARMONY', 'AS', 'IT', 'COULD', 'PRODUCE', 'AND', 'THE', 'RESOURCES', 'OF', 'THE', 'GREAT', 'HOUSE', 'WERE', 'TAXED', 'TO', 'ENTERTAIN', 'THE', 'GUESTS'] +6829-68771-0011-1923: ref=['TABLES', 'WERE', 'SPREAD', 'ON', 'THE', 'LAWN', 'AND', 'A', 'DAINTY', 'BUT', 'SUBSTANTIAL', 'REPAST', 'WAS', 'TO', 'BE', 'SERVED'] +6829-68771-0011-1923: hyp=['TABLES', 'WERE', 'SPREAD', 'ON', 'THE', 'LAWN', 'AND', 'A', 'DAINTY', 'BUT', 'SUBSTANTIAL', 'REPAST', 'WAS', 'TO', 'BE', 'SERVED'] +6829-68771-0012-1924: ref=['THIS', 'WAS', 'THE', 'FIRST', 'OCCASION', 'WITHIN', 'A', 'GENERATION', 'WHEN', 'SUCH', 'AN', 'ENTERTAINMENT', 'HAD', 'BEEN', 'GIVEN', 'AT', 'ELMHURST', 'AND', 'THE', 'ONLY', 'ONE', 'WITHIN', 'THE', 'MEMORY', 'OF', 'MAN', 'WHERE', 'THE', 'NEIGHBORS', 'AND', 'COUNTRY', 'PEOPLE', 'HAD', 'BEEN', 'INVITED', 'GUESTS'] +6829-68771-0012-1924: hyp=['THIS', 'WAS', 'THE', 'FIRST', 'OCCASION', 'WITHIN', 'A', 'GENERATION', 'WHEN', 'SUCH', 'AN', 'ENTERTAINMENT', 'HAD', 'BEEN', 'GIVEN', 'AT', 'ELMHURST', 'AND', 'THE', 'ONLY', 'ONE', 'WITHIN', 'THE', 'MEMORY', 'OF', 'MAN', 'WERE', 'THE', 'NEIGHBORS', 'AND', 'COUNTRY', 'PEOPLE', 'HAD', 'BEEN', 'THE', 'INVITED', 'GUESTS'] +6829-68771-0013-1925: ref=['THE', 'ATTENDANCE', 'WAS', 'UNEXPECTEDLY', 'LARGE', 'AND', 'THE', 'GIRLS', 'WERE', 'DELIGHTED', 'FORESEEING', 'GREAT', 'SUCCESS', 'FOR', 'THEIR', 'FETE'] +6829-68771-0013-1925: hyp=['THE', 'ATTENDANTS', 'WAS', 'UNEXPECTEDLY', 'LARGE', 'AND', 'THE', 'GIRLS', 'WERE', 'DELIGHTED', 'FORESEEING', 'GREAT', 'SUCCESS', 'FOR', 'THEIR', 'FIGHT'] +6829-68771-0014-1926: ref=['WE', 'OUGHT', 'TO', 'HAVE', 'MORE', 'ATTENDANTS', 'BETH', 'SAID', 'LOUISE', 'APPROACHING', 'HER', 'COUSIN'] +6829-68771-0014-1926: hyp=['WE', 'OUGHT', 'TO', 'HAVE', 'MORE', 'ATTENDANCE', 'BETH', 'SAID', 'LOUISE', 'APPROACHING', 'HER', 'COUSIN'] +6829-68771-0015-1927: ref=["WON'T", 'YOU', 'RUN', 'INTO', 'THE', 'HOUSE', 'AND', 'SEE', 'IF', 'MARTHA', "CAN'T", 'SPARE', 'ONE', 'OR', 'TWO', 'MORE', 'MAIDS'] +6829-68771-0015-1927: hyp=["WON'T", 'YOU', 'RUN', 'INTO', 'THE', 'HOUSE', 'AND', 'SEE', 'IF', 'MARTHA', "CAN'T", 'SPARE', 'ONE', 'OR', 'TWO', 'MORE', 'MAIDS'] +6829-68771-0016-1928: ref=['SHE', 'WAS', 'VERY', 'FOND', 'OF', 'THE', 'YOUNG', 'LADIES', 'WHOM', 'SHE', 'HAD', 'KNOWN', 'WHEN', 'AUNT', 'JANE', 'WAS', 'THE', 'MISTRESS', 'HERE', 'AND', 'BETH', 'WAS', 'HER', 'ESPECIAL', 'FAVORITE'] +6829-68771-0016-1928: hyp=['SHE', 'WAS', 'VERY', 'FOND', 'OF', 'THE', 'YOUNG', 'LADIES', 'WHOM', 'SHE', 'HAD', 'KNOWN', 'WHEN', 'AUNT', 'JANE', 'WAS', 'THEIR', 'MISTRESS', 'HERE', 'AND', 'BETH', 'WAS', 'HER', 'SPECIAL', 'FAVOURITE'] +6829-68771-0017-1929: ref=['THE', 'HOUSEKEEPER', 'LED', 'THE', 'WAY', 'AND', 'BETH', 'FOLLOWED'] +6829-68771-0017-1929: hyp=['THE', 'HOUSEKEEPER', 'LED', 'THE', 'WAY', 'IN', 'BETH', 'FOLLOWED'] +6829-68771-0018-1930: ref=['FOR', 'A', 'MOMENT', 'BETH', 'STOOD', 'STARING', 'WHILE', 'THE', 'NEW', 'MAID', 'REGARDED', 'HER', 'WITH', 'COMPOSURE', 'AND', 'A', 'SLIGHT', 'SMILE', 'UPON', 'HER', 'BEAUTIFUL', 'FACE'] +6829-68771-0018-1930: hyp=['FOR', 'A', 'MOMENT', 'BETH', 'STOOD', 'STARING', 'WHILE', 'THE', 'NEW', 'MAID', 'REGARDED', 'HER', 'WITH', 'COMPOSURE', 'AND', 'OF', 'SLIGHT', 'SMILE', 'UPON', 'HER', 'BEAUTIFUL', 'FACE'] +6829-68771-0019-1931: ref=['SHE', 'WAS', 'DRESSED', 'IN', 'THE', 'REGULATION', 'COSTUME', 'OF', 'THE', 'MAIDS', 'AT', 'ELMHURST', 'A', 'PLAIN', 'BLACK', 'GOWN', 'WITH', 'WHITE', 'APRON', 'AND', 'CAP'] +6829-68771-0019-1931: hyp=['SHE', 'WAS', 'DRESSED', 'IN', 'THE', 'REGULATION', 'COSTUME', 'OF', 'THE', 'MAIDS', 'AT', 'ELMHURST', 'A', 'PLAIN', 'BLACK', 'GOWN', 'WITH', 'A', 'WHITE', 'APRON', 'AND', 'CAP'] +6829-68771-0020-1932: ref=['THEN', 'SHE', 'GAVE', 'A', 'LITTLE', 'LAUGH', 'AND', 'REPLIED', 'NO', 'MISS', 'BETH', "I'M", 'ELIZABETH', 'PARSONS'] +6829-68771-0020-1932: hyp=['THEN', 'SHE', 'GAVE', 'A', 'LITTLE', 'LAUGH', 'AND', 'REPLIED', 'NO', 'MISS', 'BETH', "I'M", 'ELIZABETH', 'PARSONS'] +6829-68771-0021-1933: ref=['BUT', 'IT', "CAN'T", 'BE', 'PROTESTED', 'THE', 'GIRL'] +6829-68771-0021-1933: hyp=['BUT', 'IT', "CAN'T", 'BE', 'PROTESTED', 'THE', 'GIRL'] +6829-68771-0022-1934: ref=['I', 'ATTEND', 'TO', 'THE', 'HOUSEHOLD', 'MENDING', 'YOU', 'KNOW', 'AND', 'CARE', 'FOR', 'THE', 'LINEN'] +6829-68771-0022-1934: hyp=['I', 'ATTEND', 'TO', 'THE', 'HOUSEHOLD', 'MENDING', 'YOU', 'KNOW', 'AND', 'CARE', 'FOR', 'THE', 'LINEN'] +6829-68771-0023-1935: ref=['YOU', 'SPEAK', 'LIKE', 'AN', 'EDUCATED', 'PERSON', 'SAID', 'BETH', 'WONDERINGLY', 'WHERE', 'IS', 'YOUR', 'HOME'] +6829-68771-0023-1935: hyp=['YOU', 'SPEAK', 'LIKE', 'AN', 'EDUCATED', 'PERSON', 'SAID', 'BETH', 'WONDERINGLY', 'WHERE', 'IS', 'YOUR', 'HOME'] +6829-68771-0024-1936: ref=['FOR', 'THE', 'FIRST', 'TIME', 'THE', 'MAID', 'SEEMED', 'A', 'LITTLE', 'CONFUSED', 'AND', 'HER', 'GAZE', 'WANDERED', 'FROM', 'THE', 'FACE', 'OF', 'HER', 'VISITOR'] +6829-68771-0024-1936: hyp=['FOR', 'THE', 'FIRST', 'TIME', 'THE', 'MAIDS', 'SEEMED', 'A', 'LITTLE', 'CONFUSED', 'AND', 'HER', 'GAZE', 'WANDERED', 'FROM', 'THE', 'FACE', 'OF', 'HER', 'VISITOR'] +6829-68771-0025-1937: ref=['SHE', 'SAT', 'DOWN', 'IN', 'A', 'ROCKING', 'CHAIR', 'AND', 'CLASPING', 'HER', 'HANDS', 'IN', 'HER', 'LAP', 'ROCKED', 'SLOWLY', 'BACK', 'AND', 'FORTH', "I'M", 'SORRY', 'SAID', 'BETH'] +6829-68771-0025-1937: hyp=['SHE', 'SAT', 'DOWN', 'IN', 'A', 'ROCKING', 'CHAIR', 'AND', 'CLASPING', 'HER', 'HANDS', 'IN', 'HER', 'LAP', 'ROCKED', 'SLOWLY', 'BACK', 'AND', 'FORTH', "I'M", 'SORRY', 'SAID', 'BETH'] +6829-68771-0026-1938: ref=['ELIZA', 'PARSONS', 'SHOOK', 'HER', 'HEAD'] +6829-68771-0026-1938: hyp=['ELIZA', 'PARSONS', 'SHOOK', 'HER', 'HEAD'] +6829-68771-0027-1939: ref=['THEY', 'THEY', 'EXCITE', 'ME', 'IN', 'SOME', 'WAY', 'AND', 'I', 'I', "CAN'T", 'BEAR', 'THEM', 'YOU', 'MUST', 'EXCUSE', 'ME'] +6829-68771-0027-1939: hyp=['THEY', 'THEY', 'EXCITE', 'ME', 'IN', 'SOME', 'WAY', 'AND', 'I', 'I', "CAN'T", 'BEAR', 'THEM', 'YOU', 'MUST', 'EXCUSE', 'ME'] +6829-68771-0028-1940: ref=['SHE', 'EVEN', 'SEEMED', 'MILDLY', 'AMUSED', 'AT', 'THE', 'ATTENTION', 'SHE', 'ATTRACTED'] +6829-68771-0028-1940: hyp=['SHE', 'EVEN', 'SEEMED', 'MILDLY', 'AMUSED', 'AT', 'THE', 'ATTENTION', 'SHE', 'ATTRACTED'] +6829-68771-0029-1941: ref=['BETH', 'WAS', 'A', 'BEAUTIFUL', 'GIRL', 'THE', 'HANDSOMEST', 'OF', 'THE', 'THREE', 'COUSINS', 'BY', 'FAR', 'YET', 'ELIZA', 'SURPASSED', 'HER', 'IN', 'NATURAL', 'CHARM', 'AND', 'SEEMED', 'WELL', 'AWARE', 'OF', 'THE', 'FACT'] +6829-68771-0029-1941: hyp=['BETH', 'WAS', 'A', 'BEAUTIFUL', 'GIRL', 'THE', 'HANDSOMEST', 'OF', 'THE', 'THREE', 'COUSINS', 'BY', 'FAR', 'YET', 'ELIZA', 'SURPASSED', 'HER', 'A', 'NATURAL', 'CHARM', 'AND', 'SEEMED', 'WELL', 'AWARE', 'OF', 'THE', 'FACT'] +6829-68771-0030-1942: ref=['HER', 'MANNER', 'WAS', 'NEITHER', 'INDEPENDENT', 'NOR', 'ASSERTIVE', 'BUT', 'RATHER', 'ONE', 'OF', 'WELL', 'BRED', 'COMPOSURE', 'AND', 'CALM', 'RELIANCE'] +6829-68771-0030-1942: hyp=['HER', 'MANNER', 'WAS', 'NEITHER', 'INDEPENDENT', 'NOR', 'ASSERTIVE', 'BUT', 'RATHER', 'ONE', 'OF', 'WELL', 'BRED', 'COMPOSURE', 'AND', 'CALM', 'RELIANCE'] +6829-68771-0031-1943: ref=['HER', 'EYES', 'WANDERED', 'TO', 'THE', "MAID'S", 'HANDS'] +6829-68771-0031-1943: hyp=['HER', 'EYES', 'WANDERED', 'TO', 'THE', "MAID'S", 'HANDS'] +6829-68771-0032-1944: ref=['HOWEVER', 'HER', 'FEATURES', 'AND', 'FORM', 'MIGHT', 'REPRESS', 'ANY', 'EVIDENCE', 'OF', 'NERVOUSNESS', 'THESE', 'HANDS', 'TOLD', 'A', 'DIFFERENT', 'STORY'] +6829-68771-0032-1944: hyp=['HOWEVER', 'HER', 'FEATURES', 'IN', 'FORM', 'MIGHT', 'REPRESS', 'ANY', 'EVIDENCE', 'OF', 'NERVOUSNESS', 'THESE', 'HANDS', 'TOLD', 'A', 'DIFFERENT', 'STORY'] +6829-68771-0033-1945: ref=['SHE', 'ROSE', 'QUICKLY', 'TO', 'HER', 'FEET', 'WITH', 'AN', 'IMPETUOUS', 'GESTURE', 'THAT', 'MADE', 'HER', 'VISITOR', 'CATCH', 'HER', 'BREATH'] +6829-68771-0033-1945: hyp=['SHE', 'ROSE', 'QUICKLY', 'TO', 'HER', 'FEET', 'WITH', 'AN', 'IMPETUOUS', 'GESTURE', 'THAT', 'MADE', 'HER', 'VISITOR', 'CATCH', 'HER', 'BREATH'] +6829-68771-0034-1946: ref=['I', 'WISH', 'I', 'KNEW', 'MYSELF', 'SHE', 'CRIED', 'FIERCELY'] +6829-68771-0034-1946: hyp=['I', 'WISH', 'I', 'KNEW', 'MYSELF', 'SHE', 'CRIED', 'FIERCELY'] +6829-68771-0035-1947: ref=['WILL', 'YOU', 'LEAVE', 'ME', 'ALONE', 'IN', 'MY', 'OWN', 'ROOM', 'OR', 'MUST', 'I', 'GO', 'AWAY', 'TO', 'ESCAPE', 'YOU'] +6829-68771-0035-1947: hyp=['WILL', 'YOU', 'LEAVE', 'ME', 'ALONE', 'IN', 'MY', 'OWN', 'ROOM', 'OR', 'MUST', 'I', 'GO', 'AWAY', 'TO', 'ESCAPE', 'YOU'] +6829-68771-0036-1948: ref=['ELIZA', 'CLOSED', 'THE', 'DOOR', 'BEHIND', 'HER', 'WITH', 'A', 'DECIDED', 'SLAM', 'AND', 'A', 'KEY', 'CLICKED', 'IN', 'THE', 'LOCK'] +6829-68771-0036-1948: hyp=['ELIZA', 'CLOSED', 'THE', 'DOOR', 'BEHIND', 'HER', 'WITH', 'A', 'DECIDED', 'SLAM', 'AND', 'A', 'KEY', 'CLICKED', 'IN', 'THE', 'LOCK'] +6930-75918-0000-0: ref=['CONCORD', 'RETURNED', 'TO', 'ITS', 'PLACE', 'AMIDST', 'THE', 'TENTS'] +6930-75918-0000-0: hyp=['CONCORD', 'RETURNED', 'TO', 'ITS', 'PLACE', 'AMIDST', 'THE', 'TENTS'] +6930-75918-0001-1: ref=['THE', 'ENGLISH', 'FORWARDED', 'TO', 'THE', 'FRENCH', 'BASKETS', 'OF', 'FLOWERS', 'OF', 'WHICH', 'THEY', 'HAD', 'MADE', 'A', 'PLENTIFUL', 'PROVISION', 'TO', 'GREET', 'THE', 'ARRIVAL', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'THE', 'FRENCH', 'IN', 'RETURN', 'INVITED', 'THE', 'ENGLISH', 'TO', 'A', 'SUPPER', 'WHICH', 'WAS', 'TO', 'BE', 'GIVEN', 'THE', 'NEXT', 'DAY'] +6930-75918-0001-1: hyp=['THE', 'ENGLISH', 'FOOTED', 'TO', 'THE', 'FRENCH', 'BASKETS', 'OF', 'FLOWERS', 'OF', 'WHICH', 'THEY', 'HAD', 'MADE', 'A', 'PLENTIFUL', 'PROVISION', 'TO', 'GREET', 'THE', 'ARRIVAL', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'THE', 'FRENCH', 'IN', 'RETURN', 'INVITED', 'THE', 'ENGLISH', 'TO', 'A', 'SUPPER', 'WHICH', 'WAS', 'TO', 'BE', 'GIVEN', 'THE', 'NEXT', 'DAY'] +6930-75918-0002-2: ref=['CONGRATULATIONS', 'WERE', 'POURED', 'IN', 'UPON', 'THE', 'PRINCESS', 'EVERYWHERE', 'DURING', 'HER', 'JOURNEY'] +6930-75918-0002-2: hyp=['CONGRATULATIONS', 'WERE', 'POURED', 'IN', 'UPON', 'THE', 'PRINCESS', 'EVERYWHERE', 'DURING', 'HER', 'JOURNEY'] +6930-75918-0003-3: ref=['FROM', 'THE', 'RESPECT', 'PAID', 'HER', 'ON', 'ALL', 'SIDES', 'SHE', 'SEEMED', 'LIKE', 'A', 'QUEEN', 'AND', 'FROM', 'THE', 'ADORATION', 'WITH', 'WHICH', 'SHE', 'WAS', 'TREATED', 'BY', 'TWO', 'OR', 'THREE', 'SHE', 'APPEARED', 'AN', 'OBJECT', 'OF', 'WORSHIP', 'THE', 'QUEEN', 'MOTHER', 'GAVE', 'THE', 'FRENCH', 'THE', 'MOST', 'AFFECTIONATE', 'RECEPTION', 'FRANCE', 'WAS', 'HER', 'NATIVE', 'COUNTRY', 'AND', 'SHE', 'HAD', 'SUFFERED', 'TOO', 'MUCH', 'UNHAPPINESS', 'IN', 'ENGLAND', 'FOR', 'ENGLAND', 'TO', 'HAVE', 'MADE', 'HER', 'FORGET', 'FRANCE'] +6930-75918-0003-3: hyp=['FROM', 'THE', 'RESPECT', 'PAID', 'HER', 'ON', 'ALL', 'SIDES', 'SHE', 'SEEMED', 'LIKE', 'A', 'QUEEN', 'AND', 'FROM', 'THE', 'ADORATION', 'WITH', 'WHICH', 'SHE', 'WAS', 'TREATED', 'BY', 'TWO', 'OR', 'THREE', 'SHE', 'APPEARED', 'AN', 'OBJECT', 'OF', 'WORSHIP', 'THE', 'QUEEN', 'MOTHER', 'GAVE', 'THE', 'FRENCH', 'THE', 'MOST', 'AFFECTIONATE', 'RECEPTION', 'FRANCE', 'WAS', 'HER', 'NATIVE', 'COUNTRY', 'AND', 'SHE', 'HAD', 'SUFFERED', 'TOO', 'MUCH', 'UNHAPPINESS', 'IN', 'ENGLAND', 'FOR', 'ENGLAND', 'TO', 'HAVE', 'MADE', 'HER', 'FORGET', 'FRANCE'] +6930-75918-0004-4: ref=['SHE', 'TAUGHT', 'HER', 'DAUGHTER', 'THEN', 'BY', 'HER', 'OWN', 'AFFECTION', 'FOR', 'IT', 'THAT', 'LOVE', 'FOR', 'A', 'COUNTRY', 'WHERE', 'THEY', 'HAD', 'BOTH', 'BEEN', 'HOSPITABLY', 'RECEIVED', 'AND', 'WHERE', 'A', 'BRILLIANT', 'FUTURE', 'OPENED', 'BEFORE', 'THEM'] +6930-75918-0004-4: hyp=['SHE', 'TAUGHT', 'HER', 'DAUGHTER', 'THEN', 'BY', 'HER', 'OWN', 'AFFECTION', 'FOR', 'IT', 'THAT', 'LOVE', 'FOR', 'A', 'COUNTRY', 'WHERE', 'THEY', 'HAD', 'BOTH', 'BEEN', 'HOSPITABLY', 'RECEIVED', 'AND', 'WERE', 'A', 'BRILLIANT', 'FUTURE', 'OPENED', 'FOR', 'THEM'] +6930-75918-0005-5: ref=['THE', 'COUNT', 'HAD', 'THROWN', 'HIMSELF', 'BACK', 'ON', 'HIS', 'SEAT', 'LEANING', 'HIS', 'SHOULDERS', 'AGAINST', 'THE', 'PARTITION', 'OF', 'THE', 'TENT', 'AND', 'REMAINED', 'THUS', 'HIS', 'FACE', 'BURIED', 'IN', 'HIS', 'HANDS', 'WITH', 'HEAVING', 'CHEST', 'AND', 'RESTLESS', 'LIMBS'] +6930-75918-0005-5: hyp=['THE', 'COUNT', 'HAD', 'THROWN', 'HIMSELF', 'BACK', 'ON', 'HIS', 'SEAT', 'LEANING', 'HIS', 'SHOULDERS', 'AGAINST', 'THE', 'PARTITION', 'OF', 'THE', 'TENT', 'AND', 'REMAINED', 'THUS', 'HIS', 'FACE', 'BURIED', 'IN', 'HIS', 'HANDS', 'WITH', 'HEAVING', 'CHEST', 'AND', 'RESTLESS', 'LIMBS'] +6930-75918-0006-6: ref=['THIS', 'HAS', 'INDEED', 'BEEN', 'A', 'HARASSING', 'DAY', 'CONTINUED', 'THE', 'YOUNG', 'MAN', 'HIS', 'EYES', 'FIXED', 'UPON', 'HIS', 'FRIEND'] +6930-75918-0006-6: hyp=['THIS', 'HAS', 'INDEED', 'BEEN', 'A', 'HARASSING', 'DAY', 'CONTINUED', 'THE', 'YOUNG', 'MAN', 'HIS', 'EYES', 'FIXED', 'UPON', 'HIS', 'FRIEND'] +6930-75918-0007-7: ref=['YOU', 'WILL', 'BE', 'FRANK', 'WITH', 'ME', 'I', 'ALWAYS', 'AM'] +6930-75918-0007-7: hyp=['YOU', 'WILL', 'BE', 'FRANK', 'WITH', 'ME', 'I', 'ALWAYS', 'AM'] +6930-75918-0008-8: ref=['CAN', 'YOU', 'IMAGINE', 'WHY', 'BUCKINGHAM', 'HAS', 'BEEN', 'SO', 'VIOLENT', 'I', 'SUSPECT'] +6930-75918-0008-8: hyp=['CAN', 'YOU', 'IMAGINE', 'WHY', 'BUCKINGHAM', 'HAS', 'BEEN', 'SO', 'VIOLENT', 'I', 'SUSPECT'] +6930-75918-0009-9: ref=['IT', 'IS', 'YOU', 'WHO', 'ARE', 'MISTAKEN', 'RAOUL', 'I', 'HAVE', 'READ', 'HIS', 'DISTRESS', 'IN', 'HIS', 'EYES', 'IN', 'HIS', 'EVERY', 'GESTURE', 'AND', 'ACTION', 'THE', 'WHOLE', 'DAY'] +6930-75918-0009-9: hyp=['IT', 'IS', 'YOU', 'WHO', 'ARE', 'MISTAKEN', 'RAOUL', 'I', 'HAVE', 'READ', 'HIS', 'DISTRESS', 'IN', 'HIS', 'EYES', 'IN', 'HIS', 'EVERY', 'GESTURE', 'AND', 'ACTION', 'THE', 'WHOLE', 'DAY'] +6930-75918-0010-10: ref=['I', 'CAN', 'PERCEIVE', 'LOVE', 'CLEARLY', 'ENOUGH'] +6930-75918-0010-10: hyp=['I', 'CAN', 'PERCEIVE', 'LOVE', 'CLEARLY', 'ENOUGH'] +6930-75918-0011-11: ref=['I', 'AM', 'CONVINCED', 'OF', 'WHAT', 'I', 'SAY', 'SAID', 'THE', 'COUNT'] +6930-75918-0011-11: hyp=['I', 'AM', 'CONVINCED', 'OF', 'WHAT', 'I', 'SAY', 'SAID', 'THE', 'COUNT'] +6930-75918-0012-12: ref=['IT', 'IS', 'ANNOYANCE', 'THEN'] +6930-75918-0012-12: hyp=['IT', 'IS', 'ANNOYANCE', 'THEN'] +6930-75918-0013-13: ref=['IN', 'THOSE', 'VERY', 'TERMS', 'I', 'EVEN', 'ADDED', 'MORE'] +6930-75918-0013-13: hyp=['IN', 'THOSE', 'VERY', 'TERMS', 'I', 'EVEN', 'ADDED', 'MORE'] +6930-75918-0014-14: ref=['BUT', 'CONTINUED', 'RAOUL', 'NOT', 'INTERRUPTED', 'BY', 'THIS', 'MOVEMENT', 'OF', 'HIS', 'FRIEND', 'HEAVEN', 'BE', 'PRAISED', 'THE', 'FRENCH', 'WHO', 'ARE', 'PRONOUNCED', 'TO', 'BE', 'THOUGHTLESS', 'AND', 'INDISCREET', 'RECKLESS', 'EVEN', 'ARE', 'CAPABLE', 'OF', 'BRINGING', 'A', 'CALM', 'AND', 'SOUND', 'JUDGMENT', 'TO', 'BEAR', 'ON', 'MATTERS', 'OF', 'SUCH', 'HIGH', 'IMPORTANCE'] +6930-75918-0014-14: hyp=['BUT', 'CONTINUED', 'RAOUL', 'NOT', 'INTERRUPTED', 'BY', 'THIS', 'MOVEMENT', 'OF', 'HIS', 'FRIEND', 'HEAVEN', 'BE', 'PRAISED', 'THE', 'FRENCH', 'WHO', 'ARE', 'PRONOUNCED', 'TO', 'BE', 'THOUGHTLESS', 'AND', 'INDISCREET', 'RECKLESS', 'EVEN', 'ARE', 'CAPABLE', 'OF', 'BRINGING', 'A', 'CALM', 'AND', 'SOUND', 'JUDGMENT', 'TO', 'BEAR', 'ON', 'MATTERS', 'OF', 'SUCH', 'HIGH', 'IMPORTANCE'] +6930-75918-0015-15: ref=['THUS', 'IT', 'IS', 'THAT', 'THE', 'HONOR', 'OF', 'THREE', 'IS', 'SAVED', 'OUR', "COUNTRY'S", 'OUR', "MASTER'S", 'AND', 'OUR', 'OWN'] +6930-75918-0015-15: hyp=['THUS', 'IT', 'IS', 'THAT', 'THE', 'HONOR', 'OF', 'THREE', 'IS', 'SAVED', 'OUR', 'COUNTRY', 'OUR', 'MASTERS', 'AND', 'OUR', 'OWN'] +6930-75918-0016-16: ref=['YES', 'I', 'NEED', 'REPOSE', 'MANY', 'THINGS', 'HAVE', 'AGITATED', 'ME', 'TO', 'DAY', 'BOTH', 'IN', 'MIND', 'AND', 'BODY', 'WHEN', 'YOU', 'RETURN', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'THE', 'SAME', 'MAN'] +6930-75918-0016-16: hyp=['YES', 'I', 'NEED', 'REPOSE', 'MANY', 'THINGS', 'HAVE', 'AGITATED', 'ME', 'TO', 'DAY', 'BOTH', 'IN', 'MIND', 'AND', 'BODY', 'WHEN', 'YOU', 'RETURN', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'THE', 'SAME', 'MAN'] +6930-75918-0017-17: ref=['BUT', 'IN', 'THIS', 'FRIENDLY', 'PRESSURE', 'RAOUL', 'COULD', 'DETECT', 'THE', 'NERVOUS', 'AGITATION', 'OF', 'A', 'GREAT', 'INTERNAL', 'CONFLICT'] +6930-75918-0017-17: hyp=['BUT', 'IN', 'THIS', 'FRIENDLY', 'PRESSURE', 'RAOUL', 'COULD', 'DETECT', 'THE', 'NERVOUS', 'AGITATION', 'OF', 'A', 'GREAT', 'INTERNAL', 'CONFLICT'] +6930-75918-0018-18: ref=['THE', 'NIGHT', 'WAS', 'CLEAR', 'STARLIT', 'AND', 'SPLENDID', 'THE', 'TEMPEST', 'HAD', 'PASSED', 'AWAY', 'AND', 'THE', 'SWEET', 'INFLUENCES', 'OF', 'THE', 'EVENING', 'HAD', 'RESTORED', 'LIFE', 'PEACE', 'AND', 'SECURITY', 'EVERYWHERE'] +6930-75918-0018-18: hyp=['THE', 'NIGHT', 'WAS', 'CLEAR', 'STARLIT', 'AND', 'SPLENDID', 'THE', 'TEMPEST', 'HAD', 'PASSED', 'AWAY', 'AND', 'THE', 'SWEET', 'INFLUENCES', 'OF', 'THE', 'EVENING', 'HAD', 'RESTORED', 'LIFE', 'PEACE', 'AND', 'SECURITY', 'EVERYWHERE'] +6930-75918-0019-19: ref=['UPON', 'THE', 'LARGE', 'SQUARE', 'IN', 'FRONT', 'OF', 'THE', 'HOTEL', 'THE', 'SHADOWS', 'OF', 'THE', 'TENTS', 'INTERSECTED', 'BY', 'THE', 'GOLDEN', 'MOONBEAMS', 'FORMED', 'AS', 'IT', 'WERE', 'A', 'HUGE', 'MOSAIC', 'OF', 'JET', 'AND', 'YELLOW', 'FLAGSTONES'] +6930-75918-0019-19: hyp=['UPON', 'THE', 'LARGE', 'SQUARE', 'IN', 'FRONT', 'OF', 'THE', 'HOTEL', 'THE', 'SHADOWS', 'OF', 'THE', 'TENTS', 'INTERSECTED', 'BY', 'THE', 'GOLDEN', 'MOONBEAMS', 'FORMED', 'AS', 'IT', 'WERE', 'A', 'HUGE', 'MOSAIC', 'OF', 'JET', 'AND', 'YELLOW', 'FLAGSTONES'] +6930-75918-0020-20: ref=['BRAGELONNE', 'WATCHED', 'FOR', 'SOME', 'TIME', 'THE', 'CONDUCT', 'OF', 'THE', 'TWO', 'LOVERS', 'LISTENED', 'TO', 'THE', 'LOUD', 'AND', 'UNCIVIL', 'SLUMBERS', 'OF', 'MANICAMP', 'WHO', 'SNORED', 'AS', 'IMPERIOUSLY', 'AS', 'THOUGH', 'HE', 'WAS', 'WEARING', 'HIS', 'BLUE', 'AND', 'GOLD', 'INSTEAD', 'OF', 'HIS', 'VIOLET', 'SUIT'] +6930-75918-0020-20: hyp=['BRAGOLON', 'WATCHED', 'FOR', 'SOME', 'TIME', 'THE', 'CONDUCT', 'OF', 'THE', 'TWO', 'LOVERS', 'LISTENED', 'TO', 'THE', 'LOUD', 'AND', 'UNCIVIL', 'SLUMBERS', 'OF', 'MANICAMP', 'WHO', 'SNORED', 'AS', 'IMPERIOUSLY', 'AS', 'THOUGH', 'HE', 'WAS', 'WEARING', 'HIS', 'BLUE', 'AND', 'GOLD', 'INSTEAD', 'OF', 'HIS', 'VIOLET', 'SUIT'] +6930-76324-0000-21: ref=['GOLIATH', 'MAKES', 'ANOTHER', 'DISCOVERY'] +6930-76324-0000-21: hyp=['GOLIATH', 'MAKES', 'ANOTHER', 'DISCOVERY'] +6930-76324-0001-22: ref=['THEY', 'WERE', 'CERTAINLY', 'NO', 'NEARER', 'THE', 'SOLUTION', 'OF', 'THEIR', 'PROBLEM'] +6930-76324-0001-22: hyp=['THERE', 'WERE', 'CERTAINLY', 'NO', 'NEAR', 'THE', 'SOLUTION', 'OF', 'THEIR', 'PROBLEM'] +6930-76324-0002-23: ref=['THE', 'POOR', 'LITTLE', 'THINGS', 'CRIED', 'CYNTHIA', 'THINK', 'OF', 'THEM', 'HAVING', 'BEEN', 'TURNED', 'TO', 'THE', 'WALL', 'ALL', 'THESE', 'YEARS'] +6930-76324-0002-23: hyp=['THE', 'POOR', 'LITTLE', 'THINGS', 'CRIED', 'CYNTHIA', 'THINK', 'OF', 'THEM', 'HAVING', 'BEEN', 'TURNED', 'TO', 'THE', 'WALL', 'ALL', 'THESE', 'YEARS'] +6930-76324-0003-24: ref=['NOW', 'WHAT', 'WAS', 'THE', 'SENSE', 'OF', 'IT', 'TWO', 'INNOCENT', 'BABIES', 'LIKE', 'THAT'] +6930-76324-0003-24: hyp=['NOW', 'WHAT', 'IS', 'THE', 'SENSE', 'OF', 'IT', 'TOO', 'INNOCENT', 'BABIES', 'LIKE', 'THAT'] +6930-76324-0004-25: ref=['BUT', 'JOYCE', 'HAD', 'NOT', 'BEEN', 'LISTENING', 'ALL', 'AT', 'ONCE', 'SHE', 'PUT', 'DOWN', 'HER', 'CANDLE', 'ON', 'THE', 'TABLE', 'AND', 'FACED', 'HER', 'COMPANION'] +6930-76324-0004-25: hyp=['BUT', 'JOYCE', 'HAD', 'NOT', 'BEEN', 'LISTENING', 'ALL', 'AT', 'ONCE', 'SHE', 'PUT', 'DOWN', 'HER', 'CANDLE', 'ON', 'THE', 'TABLE', 'AND', 'FACED', 'HER', 'COMPANION'] +6930-76324-0005-26: ref=['THE', 'TWIN', 'BROTHER', 'DID', 'SOMETHING', 'SHE', "DIDN'T", 'LIKE', 'AND', 'SHE', 'TURNED', 'HIS', 'PICTURE', 'TO', 'THE', 'WALL'] +6930-76324-0005-26: hyp=['THE', 'TWIN', 'BROTHER', 'DID', 'SOMETHING', 'SHE', "DIDN'T", 'LIKE', 'AND', 'SHE', 'TURNED', 'HIS', 'PICTURE', 'TO', 'THE', 'WALL'] +6930-76324-0006-27: ref=['HERS', 'HAPPENED', 'TO', 'BE', 'IN', 'THE', 'SAME', 'FRAME', 'TOO', 'BUT', 'SHE', 'EVIDENTLY', "DIDN'T", 'CARE', 'ABOUT', 'THAT'] +6930-76324-0006-27: hyp=['HERS', 'HAPPENED', 'TO', 'BE', 'ON', 'THE', 'SAME', 'FRAME', 'TOO', 'BUT', 'SHE', 'EVIDENTLY', "DIDN'T", 'CARE', 'ABOUT', 'IT'] +6930-76324-0007-28: ref=['NOW', 'WHAT', 'HAVE', 'YOU', 'TO', 'SAY', 'CYNTHIA', 'SPRAGUE'] +6930-76324-0007-28: hyp=['NOW', 'WHAT', 'HAVE', 'YOU', 'TO', 'SAY', 'CYNTHIA', 'SPRAGUE'] +6930-76324-0008-29: ref=['I', 'THOUGHT', 'WE', 'WERE', 'STUMPED', 'AGAIN', 'WHEN', 'I', 'FIRST', 'SAW', 'THAT', 'PICTURE', 'BUT', "IT'S", 'BEEN', 'OF', 'SOME', 'USE', 'AFTER', 'ALL'] +6930-76324-0008-29: hyp=['I', 'THOUGHT', 'WE', 'WERE', 'STUMPED', 'AGAIN', 'WHEN', 'I', 'FIRST', 'SAW', 'THAT', 'PICTURE', 'BUT', "IT'S", 'BEEN', 'OF', 'SOME', 'USE', 'AFTER', 'ALL'] +6930-76324-0009-30: ref=['DO', 'YOU', 'SUPPOSE', 'THE', 'MINIATURE', 'WAS', 'A', 'COPY', 'OF', 'THE', 'SAME', 'THING'] +6930-76324-0009-30: hyp=['DO', 'YOU', 'SUPPOSE', 'THE', 'MINIATURE', 'WAS', 'A', 'COPY', 'OF', 'THE', 'SAME', 'THING'] +6930-76324-0010-31: ref=['WHAT', 'IN', 'THE', 'WORLD', 'IS', 'THAT', 'QUERIED', 'JOYCE'] +6930-76324-0010-31: hyp=['WHEN', 'IN', 'THE', 'WORLD', 'IS', 'IT', 'QUERIED', 'JOYCE'] +6930-76324-0011-32: ref=['THEY', 'WORRY', 'ME', 'TERRIBLY', 'AND', 'BESIDES', "I'D", 'LIKE', 'TO', 'SEE', 'WHAT', 'THIS', 'LOVELY', 'FURNITURE', 'LOOKS', 'LIKE', 'WITHOUT', 'SUCH', 'QUANTITIES', 'OF', 'DUST', 'ALL', 'OVER', 'IT', 'GOOD', 'SCHEME', 'CYN'] +6930-76324-0011-32: hyp=['MAY', 'WORRY', 'ME', 'TERRIBLY', 'AND', 'BESIDES', "I'D", 'LIKE', 'TO', 'SEE', 'WHAT', 'THIS', 'LOVELY', 'FURNITURE', 'LOOKS', 'LIKE', 'WITHOUT', 'SUCH', 'QUANTITIES', 'OF', 'DUST', 'ALL', 'OVER', 'IT', 'GOOD', 'SCHEME', 'SIN'] +6930-76324-0012-33: ref=["WE'LL", 'COME', 'IN', 'HERE', 'THIS', 'AFTERNOON', 'WITH', 'OLD', 'CLOTHES', 'ON', 'AND', 'HAVE', 'A', 'REGULAR', 'HOUSE', 'CLEANING'] +6930-76324-0012-33: hyp=['WILL', 'COME', 'IN', 'HERE', 'THIS', 'AFTERNOON', 'WITH', 'OLD', 'CLOTHES', 'ON', 'AND', 'HALF', 'A', 'REGULAR', 'HOUSE', 'CLEANING'] +6930-76324-0013-34: ref=['IT', "CAN'T", 'HURT', 'ANYTHING', "I'M", 'SURE', 'FOR', 'WE', "WON'T", 'DISTURB', 'THINGS', 'AT', 'ALL'] +6930-76324-0013-34: hyp=['IT', "CAN'T", 'HURT', 'ANYTHING', "I'M", 'SURE', 'FOR', 'WE', "WON'T", 'DISTURB', 'THINGS', 'AT', 'ALL'] +6930-76324-0014-35: ref=['THIS', 'THOUGHT', 'HOWEVER', 'DID', 'NOT', 'ENTER', 'THE', 'HEADS', 'OF', 'THE', 'ENTHUSIASTIC', 'PAIR'] +6930-76324-0014-35: hyp=['THIS', 'THOUGHT', 'HOWEVER', 'DID', 'NOT', 'ENTER', 'THE', 'HEADS', 'OF', 'THE', 'ENTHUSIASTIC', 'PAIR'] +6930-76324-0015-36: ref=['SMUGGLING', 'THE', 'HOUSE', 'CLEANING', 'PARAPHERNALIA', 'INTO', 'THE', 'CELLAR', 'WINDOW', 'UNOBSERVED', 'THAT', 'AFTERNOON', 'PROVED', 'NO', 'EASY', 'TASK', 'FOR', 'CYNTHIA', 'HAD', 'ADDED', 'A', 'WHISK', 'BROOM', 'AND', 'DUST', 'PAN', 'TO', 'THE', 'OUTFIT'] +6930-76324-0015-36: hyp=['SMUGGLING', 'THE', 'HOUSE', 'CLEANING', 'PARAPHERNALIA', 'INTO', 'THE', 'CELLAR', 'WINDOW', 'UNOBSERVED', 'THAT', 'AFTERNOON', 'PROVED', 'NO', 'EASY', 'TASK', 'FOR', 'CYNTHIA', 'HAD', 'ADDED', 'A', 'WHISKED', 'BROOM', 'AND', 'DUST', 'PAN', 'TO', 'THE', 'OUTFIT'] +6930-76324-0016-37: ref=['THE', 'LURE', 'PROVED', 'TOO', 'MUCH', 'FOR', 'HIM', 'AND', 'HE', 'CAME', 'SPORTING', 'AFTER', 'IT', 'AS', 'FRISKILY', 'AS', 'A', 'YOUNG', 'KITTEN', 'MUCH', 'TO', "CYNTHIA'S", 'DELIGHT', 'WHEN', 'SHE', 'CAUGHT', 'SIGHT', 'OF', 'HIM'] +6930-76324-0016-37: hyp=['THE', 'LOWER', 'PROVED', 'TOO', 'MUCH', 'FOR', 'HIM', 'AND', 'HE', 'CAME', 'SPORTING', 'AFTER', 'IT', 'AS', 'FRISKLY', 'AS', 'A', 'YOUNG', 'KITTEN', 'MUCH', 'TO', "CYNTHIA'S", 'DELIGHT', 'WHEN', 'SHE', 'CAUGHT', 'SIGHT', 'OF', 'HIM'] +6930-76324-0017-38: ref=['OH', 'LET', 'HIM', 'COME', 'ALONG', 'SHE', 'URGED', 'I', 'DO', 'LOVE', 'TO', 'SEE', 'HIM', 'ABOUT', 'THAT', 'OLD', 'HOUSE'] +6930-76324-0017-38: hyp=['OH', 'LET', 'HIM', 'COME', 'ALONG', 'SHE', 'URGED', 'I', 'DO', 'LOVE', 'TO', 'SEE', 'HIM', 'ABOUT', 'THAT', 'OLD', 'HOUSE'] +6930-76324-0018-39: ref=['HE', 'MAKES', 'IT', 'SORT', 'OF', 'COZIER'] +6930-76324-0018-39: hyp=['HE', 'MAKES', 'IT', 'SORT', 'OF', 'COZIER'] +6930-76324-0019-40: ref=['NOW', "LET'S", 'DUST', 'THE', 'FURNITURE', 'AND', 'PICTURES'] +6930-76324-0019-40: hyp=['NOW', "LET'S", 'DUST', 'THE', 'FURNITURE', 'AND', 'PICTURES'] +6930-76324-0020-41: ref=['YET', 'LITTLE', 'AS', 'IT', 'WAS', 'IT', 'HAD', 'ALREADY', 'MADE', 'A', 'VAST', 'DIFFERENCE', 'IN', 'THE', 'ASPECT', 'OF', 'THE', 'ROOM'] +6930-76324-0020-41: hyp=['YET', 'LITTLE', 'AS', 'IT', 'WAS', 'IT', 'HAD', 'ALREADY', 'MADE', 'A', 'VAST', 'DIFFERENCE', 'IN', 'THE', 'ASPECT', 'OF', 'THE', 'ROOM'] +6930-76324-0021-42: ref=['SURFACE', 'DUST', 'AT', 'LEAST', 'HAD', 'BEEN', 'REMOVED', 'AND', 'THE', 'FINE', 'OLD', 'FURNITURE', 'GAVE', 'A', 'HINT', 'OF', 'ITS', 'REAL', 'ELEGANCE', 'AND', 'POLISH'] +6930-76324-0021-42: hyp=['SURFACE', 'DUST', 'AT', 'LEAST', 'HAD', 'BEEN', 'REMOVED', 'AND', 'THE', 'FINE', 'OLD', 'FURNITURE', 'GAVE', 'A', 'HINT', 'OF', 'ITS', 'REAL', 'ELEGANCE', 'AND', 'POLISH'] +6930-76324-0022-43: ref=['THEN', 'SHE', 'SUDDENLY', 'REMARKED'] +6930-76324-0022-43: hyp=['THEN', 'SHE', 'SUDDENLY', 'REMARKED'] +6930-76324-0023-44: ref=['AND', 'MY', 'POCKET', 'MONEY', 'IS', 'GETTING', 'LOW', 'AGAIN', 'AND', 'YOU', "HAVEN'T", 'ANY', 'LEFT', 'AS', 'USUAL'] +6930-76324-0023-44: hyp=['AND', 'MY', 'POCKET', 'MONEY', 'IS', 'GETTING', 'LOW', 'AGAIN', 'AND', 'YOU', "HAVEN'T", 'ANY', 'LEFT', 'AS', 'USUAL'] +6930-76324-0024-45: ref=['THEY', 'SAY', 'ILLUMINATION', 'BY', 'CANDLE', 'LIGHT', 'IS', 'THE', 'PRETTIEST', 'IN', 'THE', 'WORLD'] +6930-76324-0024-45: hyp=['THEY', 'SAY', 'ILLUMINATION', 'BY', 'CANDLELIGHT', 'IS', 'THE', 'PRETTIEST', 'IN', 'THE', 'WORLD'] +6930-76324-0025-46: ref=['WHY', "IT'S", 'GOLIATH', 'AS', 'USUAL', 'THEY', 'BOTH', 'CRIED', 'PEERING', 'IN'] +6930-76324-0025-46: hyp=['WHY', "IT'S", 'GOLIATH', 'AS', 'USUAL', 'THEY', 'BOTH', 'CRIED', 'PEERING', 'IN'] +6930-76324-0026-47: ref=["ISN'T", 'HE', 'THE', 'GREATEST', 'FOR', 'GETTING', 'INTO', 'ODD', 'CORNERS'] +6930-76324-0026-47: hyp=["ISN'T", 'HE', 'THE', 'GREATEST', 'FOR', 'GETTING', 'INTO', 'ODD', 'CORNERS'] +6930-76324-0027-48: ref=['FORGETTING', 'ALL', 'THEIR', 'WEARINESS', 'THEY', 'SEIZED', 'THEIR', 'CANDLES', 'AND', 'SCURRIED', 'THROUGH', 'THE', 'HOUSE', 'FINDING', 'AN', 'OCCASIONAL', 'PAPER', 'TUCKED', 'AWAY', 'IN', 'SOME', 'ODD', 'CORNER'] +6930-76324-0027-48: hyp=['FORGETTING', 'ALL', 'THEIR', 'WEARINESS', 'THEY', 'SEIZED', 'THEIR', 'CANDLES', 'AND', 'SCURRIED', 'THROUGH', 'THE', 'HOUSE', 'FINDING', 'ON', 'OCCASIONAL', 'PAPER', 'TUCKED', 'AWAY', 'IN', 'SOME', 'ODD', 'CORNER'] +6930-76324-0028-49: ref=['WELL', "I'M", 'CONVINCED', 'THAT', 'THE', 'BOARDED', 'UP', 'HOUSE', 'MYSTERY', 'HAPPENED', 'NOT', 'EARLIER', 'THAN', 'APRIL', 'SIXTEENTH', 'EIGHTEEN', 'SIXTY', 'ONE', 'AND', 'PROBABLY', 'NOT', 'MUCH', 'LATER'] +6930-76324-0028-49: hyp=['WELL', "I'M", 'CONVINCED', 'THAT', 'THE', 'BOARDED', 'UP', 'HOUSE', 'MYSTERY', 'HAPPENED', 'NOT', 'EARLIER', 'THAN', 'APRIL', 'SIXTEENTH', 'EIGHTEEN', 'SIXTY', 'ONE', 'AND', 'PROBABLY', 'NOT', 'MUCH', 'LATER'] +6930-81414-0000-50: ref=['NO', 'WORDS', 'WERE', 'SPOKEN', 'NO', 'LANGUAGE', 'WAS', 'UTTERED', 'SAVE', 'THAT', 'OF', 'WAILING', 'AND', 'HISSING', 'AND', 'THAT', 'SOMEHOW', 'WAS', 'INDISTINCT', 'AS', 'IF', 'IT', 'EXISTED', 'IN', 'FANCY', 'AND', 'NOT', 'IN', 'REALITY'] +6930-81414-0000-50: hyp=['NO', 'WORDS', 'WERE', 'SPOKEN', 'NO', 'LANGUAGE', 'WAS', 'UTTERED', 'SAVE', 'THAT', 'OF', 'WAILING', 'AND', 'HISSING', 'AND', 'THAT', 'SOMEHOW', 'WAS', 'INDISTINCT', 'AS', 'IF', 'IT', 'EXISTED', 'IN', 'FANCY', 'AND', 'NOT', 'IN', 'REALITY'] +6930-81414-0001-51: ref=['I', 'HEARD', 'A', 'NOISE', 'BEHIND', 'I', 'TURNED', 'AND', 'SAW', 'KAFFAR', 'HIS', 'BLACK', 'EYES', 'SHINING', 'WHILE', 'IN', 'HIS', 'HAND', 'HE', 'HELD', 'A', 'GLEAMING', 'KNIFE', 'HE', 'LIFTED', 'IT', 'ABOVE', 'HIS', 'HEAD', 'AS', 'IF', 'TO', 'STRIKE', 'BUT', 'I', 'HAD', 'THE', 'STRENGTH', 'OF', 'TEN', 'MEN', 'AND', 'I', 'HURLED', 'HIM', 'FROM', 'ME'] +6930-81414-0001-51: hyp=['I', 'HEARD', 'A', 'NOISE', 'BEHIND', 'I', 'TURNED', 'AND', 'SAW', 'KAFFIR', 'HIS', 'BLACK', 'EYES', 'SHINING', 'WHILE', 'IN', 'HIS', 'HAND', 'HE', 'HELD', 'A', 'GLEAMING', 'KNIFE', 'HE', 'LIFTED', 'IT', 'ABOVE', 'HIS', 'HEAD', 'AS', 'IF', 'TO', 'STRIKE', 'BUT', 'I', 'HAD', 'THE', 'STRENGTH', 'OF', 'TEN', 'MEN', 'AND', 'I', 'HURLED', 'HIM', 'FROM', 'ME'] +6930-81414-0002-52: ref=['ONWARD', 'SAID', 'A', 'DISTANT', 'VOICE'] +6930-81414-0002-52: hyp=['ONWARD', 'SAID', 'A', 'DISTANT', 'VOICE'] +6930-81414-0003-53: ref=['NO', 'SOUND', 'BROKE', 'THE', 'STILLNESS', 'OF', 'THE', 'NIGHT'] +6930-81414-0003-53: hyp=['NO', 'SOUND', 'BROKE', 'THE', 'STILLNESS', 'OF', 'THE', 'NIGHT'] +6930-81414-0004-54: ref=['THE', 'STORY', 'OF', 'ITS', 'EVIL', 'INFLUENCE', 'CAME', 'BACK', 'TO', 'ME', 'AND', 'IN', 'MY', 'BEWILDERED', 'CONDITION', 'I', 'WONDERED', 'WHETHER', 'THERE', 'WAS', 'NOT', 'SOME', 'TRUTH', 'IN', 'WHAT', 'HAD', 'BEEN', 'SAID'] +6930-81414-0004-54: hyp=['THE', 'STORY', 'OF', 'ITS', 'EVIL', 'INFLUENCE', 'CAME', 'BACK', 'TO', 'ME', 'AND', 'IN', 'MY', 'BEWILDERED', 'CONDITION', 'I', 'WONDERED', 'WHETHER', 'THERE', 'WAS', 'NOT', 'SOME', 'TRUTH', 'IN', 'WHAT', 'HAD', 'BEEN', 'SAID'] +6930-81414-0005-55: ref=['WHAT', 'WAS', 'THAT'] +6930-81414-0005-55: hyp=['WHAT', 'WAS', 'THAT'] +6930-81414-0006-56: ref=['WHAT', 'THEN', 'A', 'HUMAN', 'HAND', 'LARGE', 'AND', 'SHAPELY', 'APPEARED', 'DISTINCTLY', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'POND'] +6930-81414-0006-56: hyp=['WHAT', 'THEN', 'A', 'HUMAN', 'HAND', 'LARGE', 'AND', 'SHAPELY', 'APPEARED', 'DISTINCTLY', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'POND'] +6930-81414-0007-57: ref=['NOTHING', 'MORE', 'NOT', 'EVEN', 'THE', 'WRIST', 'TO', 'WHICH', 'IT', 'MIGHT', 'BE', 'ATTACHED'] +6930-81414-0007-57: hyp=['NOTHING', 'MORE', 'NOT', 'EVEN', 'THE', 'WRIST', 'TO', 'WHICH', 'IT', 'MIGHT', 'BE', 'ATTACHED'] +6930-81414-0008-58: ref=['IT', 'DID', 'NOT', 'BECKON', 'OR', 'INDEED', 'MOVE', 'AT', 'ALL', 'IT', 'WAS', 'AS', 'STILL', 'AS', 'THE', 'HAND', 'OF', 'DEATH'] +6930-81414-0008-58: hyp=['IT', 'DID', 'NOT', 'BECKON', 'OR', 'INDEED', 'MOVE', 'AT', 'ALL', 'IT', 'WAS', 'AS', 'STILL', 'AS', 'THE', 'HAND', 'OF', 'DEATH'] +6930-81414-0009-59: ref=['I', 'AWOKE', 'TO', 'CONSCIOUSNESS', 'FIGHTING', 'AT', 'FIRST', 'IT', 'SEEMED', 'AS', 'IF', 'I', 'WAS', 'FIGHTING', 'WITH', 'A', 'PHANTOM', 'BUT', 'GRADUALLY', 'MY', 'OPPONENT', 'BECAME', 'MORE', 'REAL', 'TO', 'ME', 'IT', 'WAS', 'KAFFAR'] +6930-81414-0009-59: hyp=['I', 'AWOKE', 'TO', 'CONSCIOUSNESS', 'FIGHTING', 'AT', 'FIRST', 'IT', 'SEEMED', 'AS', 'IF', 'I', 'WAS', 'FIGHTING', 'WITH', 'THE', 'PHANTOM', 'BUT', 'GRADUALLY', 'MY', 'OPPONENT', 'BECAME', 'MORE', 'REAL', 'TO', 'ME', 'IT', 'WAS', 'KAFFIR'] +6930-81414-0010-60: ref=['A', 'SOUND', 'OF', 'VOICES', 'A', 'FLASH', 'OF', 'LIGHT'] +6930-81414-0010-60: hyp=['A', 'SOUND', 'OF', 'VOICES', 'A', 'FLASH', 'OF', 'LIGHT'] +6930-81414-0011-61: ref=['A', 'FEELING', 'OF', 'FREEDOM', 'AND', 'I', 'WAS', 'AWAKE', 'WHERE'] +6930-81414-0011-61: hyp=['A', 'FEELING', 'OF', 'FREEDOM', 'AND', 'I', 'WAS', 'AWAKE', 'WHERE'] +6930-81414-0012-62: ref=['SAID', 'ANOTHER', 'VOICE', 'WHICH', 'I', 'RECOGNIZED', 'AS', "VOLTAIRE'S", 'KAFFAR'] +6930-81414-0012-62: hyp=['SAID', 'ANOTHER', 'VOICE', 'WHICH', 'I', 'RECOGNIZED', 'AS', "VOLTAIRE'S", 'KAFFIR'] +6930-81414-0013-63: ref=['I', 'HAD', 'SCARCELY', 'KNOWN', 'WHAT', 'I', 'HAD', 'BEEN', 'SAYING', 'OR', 'DOING', 'UP', 'TO', 'THIS', 'TIME', 'BUT', 'AS', 'HE', 'SPOKE', 'I', 'LOOKED', 'AT', 'MY', 'HAND'] +6930-81414-0013-63: hyp=['I', 'HAD', 'SCARCELY', 'KNOWN', 'WHAT', 'I', 'HAD', 'BEEN', 'SAYING', 'OR', 'DOING', 'UP', 'TO', 'THIS', 'TIME', 'BUT', 'AS', 'HE', 'SPOKE', 'I', 'LOOKED', 'AT', 'MY', 'HAND'] +6930-81414-0014-64: ref=['IN', 'THE', 'LIGHT', 'OF', 'THE', 'MOON', 'I', 'SAW', 'A', 'KNIFE', 'RED', 'WITH', 'BLOOD', 'AND', 'MY', 'HAND', 'TOO', 'WAS', 'ALSO', 'DISCOLOURED'] +6930-81414-0014-64: hyp=['IN', 'THE', 'LIGHT', 'OF', 'THE', 'MOON', 'I', 'SAW', 'A', 'KNIFE', 'RED', 'WITH', 'BLOOD', 'AND', 'MY', 'HAND', 'TOO', 'WAS', 'ALSO', 'DISCOLORED'] +6930-81414-0015-65: ref=['I', 'DO', 'NOT', 'KNOW', 'I', 'AM', 'DAZED', 'BEWILDERED'] +6930-81414-0015-65: hyp=['I', 'DO', 'NOT', 'KNOW', 'I', 'AM', 'DAZED', 'BEWILDERED'] +6930-81414-0016-66: ref=['BUT', 'THAT', 'IS', "KAFFAR'S", 'KNIFE'] +6930-81414-0016-66: hyp=['BUT', 'THAT', 'IS', "KAFFIR'S", 'KNIFE'] +6930-81414-0017-67: ref=['I', 'KNOW', 'HE', 'HAD', 'IT', 'THIS', 'VERY', 'EVENING'] +6930-81414-0017-67: hyp=['I', 'KNOW', 'HE', 'HAD', 'IT', 'THIS', 'VERY', 'EVENING'] +6930-81414-0018-68: ref=['I', 'REMEMBER', 'SAYING', 'HAVE', 'WE', 'BEEN', 'TOGETHER'] +6930-81414-0018-68: hyp=['I', 'REMEMBERED', 'SAYING', 'HAVE', 'WE', 'BEEN', 'TOGETHER'] +6930-81414-0019-69: ref=['VOLTAIRE', 'PICKED', 'UP', 'SOMETHING', 'FROM', 'THE', 'GROUND', 'AND', 'LOOKED', 'AT', 'IT'] +6930-81414-0019-69: hyp=['VOLTAIRE', 'PICKED', 'UP', 'SOMETHING', 'FROM', 'THE', 'GROUND', 'AND', 'LOOKED', 'AT', 'IT'] +6930-81414-0020-70: ref=['I', 'SAY', 'YOU', 'DO', 'KNOW', 'WHAT', 'THIS', 'MEANS', 'AND', 'YOU', 'MUST', 'TELL', 'US'] +6930-81414-0020-70: hyp=['I', 'SAY', 'YOU', 'DO', 'KNOW', 'WHAT', 'THIS', 'MEANS', 'AND', 'YOU', 'MUST', 'TELL', 'US'] +6930-81414-0021-71: ref=['A', 'TERRIBLE', 'THOUGHT', 'FLASHED', 'INTO', 'MY', 'MIND'] +6930-81414-0021-71: hyp=['A', 'TERRIBLE', 'THOUGHT', 'FLASHED', 'INTO', 'MY', 'MIND'] +6930-81414-0022-72: ref=['I', 'HAD', 'AGAIN', 'BEEN', 'ACTING', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'THIS', "MAN'S", 'POWER'] +6930-81414-0022-72: hyp=['I', 'HAD', 'AGAIN', 'BEEN', 'ACTING', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'THIS', "MAN'S", 'POWER'] +6930-81414-0023-73: ref=['PERCHANCE', 'TOO', "KAFFAR'S", 'DEATH', 'MIGHT', 'SERVE', 'HIM', 'IN', 'GOOD', 'STEAD'] +6930-81414-0023-73: hyp=['PERCHANCE', 'TOO', "KAFFIR'S", 'DEATH', 'MIGHT', 'SERVE', 'HIM', 'IN', 'GOOD', 'STEAD'] +6930-81414-0024-74: ref=['MY', 'TONGUE', 'REFUSED', 'TO', 'ARTICULATE', 'MY', 'POWER', 'OF', 'SPEECH', 'LEFT', 'ME'] +6930-81414-0024-74: hyp=['MY', 'TONGUE', 'REFUSED', 'TO', 'ARTICULATE', 'MY', 'POWER', 'OF', 'SPEECH', 'LEFT', 'ME'] +6930-81414-0025-75: ref=['MY', 'POSITION', 'WAS', 'TOO', 'TERRIBLE'] +6930-81414-0025-75: hyp=['MY', 'POSITION', 'WAS', 'TOO', 'TERRIBLE'] +6930-81414-0026-76: ref=['MY', 'OVERWROUGHT', 'NERVES', 'YIELDED', 'AT', 'LAST'] +6930-81414-0026-76: hyp=['MY', 'OVERWROUGHT', 'NERVES', 'YIELDED', 'AT', 'LAST'] +6930-81414-0027-77: ref=['FOR', 'SOME', 'TIME', 'AFTER', 'THAT', 'I', 'REMEMBERED', 'NOTHING', 'DISTINCTLY'] +6930-81414-0027-77: hyp=['FOR', 'SOME', 'TIME', 'AFTER', 'THAT', 'I', 'REMEMBERED', 'NOTHING', 'DISTINCTLY'] +7021-79730-0000-1399: ref=['THE', 'THREE', 'MODES', 'OF', 'MANAGEMENT'] +7021-79730-0000-1399: hyp=['THE', 'THREE', 'MODES', 'OF', 'MANAGEMENT'] +7021-79730-0001-1400: ref=['TO', 'SUPPOSE', 'THAT', 'THE', 'OBJECT', 'OF', 'THIS', 'WORK', 'IS', 'TO', 'AID', 'IN', 'EFFECTING', 'SUCH', 'A', 'SUBSTITUTION', 'AS', 'THAT', 'IS', 'ENTIRELY', 'TO', 'MISTAKE', 'ITS', 'NATURE', 'AND', 'DESIGN'] +7021-79730-0001-1400: hyp=['TO', 'SUPPOSE', 'THAT', 'THE', 'OBJECT', 'OF', 'THIS', 'WORK', 'IS', 'TO', 'AID', 'IN', 'EFFECTING', 'SUCH', 'A', 'SUBSTITUTION', 'AS', 'THAT', 'IS', 'ENTIRELY', 'TO', 'MISTAKE', 'ITS', 'NATURE', 'AND', 'DESIGN'] +7021-79730-0002-1401: ref=['BY', 'REASON', 'AND', 'AFFECTION'] +7021-79730-0002-1401: hyp=['BY', 'REASON', 'AND', 'AFFECTION'] +7021-79730-0003-1402: ref=['AS', 'THE', 'CHAISE', 'DRIVES', 'AWAY', 'MARY', 'STANDS', 'BEWILDERED', 'AND', 'PERPLEXED', 'ON', 'THE', 'DOOR', 'STEP', 'HER', 'MIND', 'IN', 'A', 'TUMULT', 'OF', 'EXCITEMENT', 'IN', 'WHICH', 'HATRED', 'OF', 'THE', 'DOCTOR', 'DISTRUST', 'AND', 'SUSPICION', 'OF', 'HER', 'MOTHER', 'DISAPPOINTMENT', 'VEXATION', 'AND', 'ILL', 'HUMOR', 'SURGE', 'AND', 'SWELL', 'AMONG', 'THOSE', 'DELICATE', 'ORGANIZATIONS', 'ON', 'WHICH', 'THE', 'STRUCTURE', 'AND', 'DEVELOPMENT', 'OF', 'THE', 'SOUL', 'SO', 'CLOSELY', 'DEPEND', 'DOING', 'PERHAPS', 'AN', 'IRREPARABLE', 'INJURY'] +7021-79730-0003-1402: hyp=['AS', 'THE', 'CHASE', 'DRIVES', 'AWAY', 'MARY', 'STANDS', 'BEWILDERED', 'AND', 'PERPLEXED', 'ON', 'THE', 'DOORSTEP', 'HER', 'MIND', 'IN', 'A', 'TUMULT', 'OF', 'EXCITEMENT', 'IN', 'WHICH', 'HATRED', 'OF', 'THE', 'DOCTOR', 'DISTRUST', 'AND', 'SUSPICION', 'OF', 'HER', 'MOTHER', 'DISAPPOINTMENT', 'VEXATION', 'AND', 'ILL', 'HUMOR', 'SURGE', 'AND', 'SWELL', 'AMONG', 'THOSE', 'DELEGATE', 'ORGANIZATIONS', 'ON', 'WHICH', 'THE', 'STRUCTURE', 'AND', 'DEVELOPMENT', 'OF', 'THE', 'SOUL', 'SO', 'CLOSELY', 'DEPEND', 'DOING', 'PERHAPS', 'AN', 'IRREPARABLE', 'INJURY'] +7021-79730-0004-1403: ref=['THE', 'MOTHER', 'AS', 'SOON', 'AS', 'THE', 'CHAISE', 'IS', 'SO', 'FAR', 'TURNED', 'THAT', 'MARY', 'CAN', 'NO', 'LONGER', 'WATCH', 'THE', 'EXPRESSION', 'OF', 'HER', 'COUNTENANCE', 'GOES', 'AWAY', 'FROM', 'THE', 'DOOR', 'WITH', 'A', 'SMILE', 'OF', 'COMPLACENCY', 'AND', 'SATISFACTION', 'UPON', 'HER', 'FACE', 'AT', 'THE', 'INGENUITY', 'AND', 'SUCCESS', 'OF', 'HER', 'LITTLE', 'ARTIFICE'] +7021-79730-0004-1403: hyp=['THE', 'MOTHER', 'AS', 'SOON', 'AS', 'THE', 'CHASE', 'IS', 'SO', 'FAR', 'TURNED', 'THAT', 'MARY', 'CAN', 'NO', 'LONGER', 'WATCH', 'THE', 'EXPRESSION', 'OF', 'HER', 'COUNTENANCE', 'GOES', 'AWAY', 'FROM', 'THE', 'DOOR', 'WITH', 'A', 'SMILE', 'OF', 'COMPLACENCY', 'AND', 'SATISFACTION', 'ON', 'HER', 'FACE', 'AT', 'THE', 'INGENUITY', 'AND', 'SUCCESS', 'OF', 'HER', 'LITTLE', 'ARTIFICE'] +7021-79730-0005-1404: ref=['SO', 'YOU', 'WILL', 'BE', 'A', 'GOOD', 'GIRL', 'I', 'KNOW', 'AND', 'NOT', 'MAKE', 'ANY', 'TROUBLE', 'BUT', 'WILL', 'STAY', 'AT', 'HOME', 'CONTENTEDLY', "WON'T", 'YOU'] +7021-79730-0005-1404: hyp=['SO', 'YOU', 'WILL', 'BE', 'A', 'GOOD', 'GIRL', 'I', 'KNOW', 'AND', 'NOT', 'MAKE', 'ANY', 'TROUBLE', 'BUT', 'WILL', 'STAY', 'AT', 'HOME', 'CONTENTEDLY', "WON'T", 'YOU'] +7021-79730-0006-1405: ref=['THE', 'MOTHER', 'IN', 'MANAGING', 'THE', 'CASE', 'IN', 'THIS', 'WAY', 'RELIES', 'PARTLY', 'ON', 'CONVINCING', 'THE', 'REASON', 'OF', 'THE', 'CHILD', 'AND', 'PARTLY', 'ON', 'AN', 'APPEAL', 'TO', 'HER', 'AFFECTION'] +7021-79730-0006-1405: hyp=['THE', 'MOTHER', 'IN', 'MANAGING', 'THE', 'CASE', 'IN', 'THIS', 'WAY', 'REALIZE', 'PARTLY', 'ON', 'CONVINCING', 'THE', 'REASON', 'OF', 'THE', 'CHILD', 'AND', 'PARTLY', 'ON', 'AN', 'APPEAL', 'TO', 'HER', 'AFFECTION'] +7021-79730-0007-1406: ref=['IF', 'YOU', 'SHOULD', 'NOT', 'BE', 'A', 'GOOD', 'GIRL', 'BUT', 'SHOULD', 'SHOW', 'SIGNS', 'OF', 'MAKING', 'US', 'ANY', 'TROUBLE', 'I', 'SHALL', 'HAVE', 'TO', 'SEND', 'YOU', 'OUT', 'SOMEWHERE', 'TO', 'THE', 'BACK', 'PART', 'OF', 'THE', 'HOUSE', 'UNTIL', 'WE', 'ARE', 'GONE'] +7021-79730-0007-1406: hyp=['IF', 'YOU', 'SHOULD', 'NOT', 'BE', 'A', 'GOOD', 'GIRL', 'BUT', 'SHOULD', 'SHOW', 'SIGNS', 'OF', 'MAKING', 'US', 'ANY', 'TROUBLE', 'I', 'SHALL', 'HAVE', 'TO', 'SEND', 'YOU', 'OUT', 'SOMEWHERE', 'TO', 'THE', 'BACK', 'PART', 'OF', 'THE', 'HOUSE', 'UNTIL', 'WE', 'ARE', 'GONE'] +7021-79730-0008-1407: ref=['BUT', 'THIS', 'LAST', 'SUPPOSITION', 'IS', 'ALMOST', 'ALWAYS', 'UNNECESSARY', 'FOR', 'IF', 'MARY', 'HAS', 'BEEN', 'HABITUALLY', 'MANAGED', 'ON', 'THIS', 'PRINCIPLE', 'SHE', 'WILL', 'NOT', 'MAKE', 'ANY', 'TROUBLE'] +7021-79730-0008-1407: hyp=['BUT', 'THIS', 'LAST', 'SUPPOSITION', 'IS', 'ALMOST', 'ALWAYS', 'UNNECESSARY', 'FOR', 'IF', 'MARY', 'HAS', 'BEEN', 'HABITUALLY', 'MANAGED', 'ON', 'THIS', 'PRINCIPLE', 'SHE', 'WILL', 'NOT', 'MAKE', 'ANY', 'TROUBLE'] +7021-79730-0009-1408: ref=['IT', 'IS', 'INDEED', 'TRUE', 'THAT', 'THE', 'IMPORTANCE', 'OF', 'TACT', 'AND', 'SKILL', 'IN', 'THE', 'TRAINING', 'OF', 'THE', 'YOUNG', 'AND', 'OF', 'CULTIVATING', 'THEIR', 'REASON', 'AND', 'SECURING', 'THEIR', 'AFFECTION', 'CAN', 'NOT', 'BE', 'OVERRATED'] +7021-79730-0009-1408: hyp=['IT', 'IS', 'INDEED', 'TRUE', 'THAT', 'THE', 'IMPORTANCE', 'OF', 'TACT', 'AND', 'SKILL', 'IN', 'THE', 'TRAINING', 'OF', 'THE', 'YOUNG', 'AND', 'OF', 'CULTIVATING', 'THEIR', 'REASON', 'AND', 'SECURING', 'THEIR', 'AFFECTION', 'CANNOT', 'BE', 'OVERRATED'] +7021-79740-0000-1384: ref=['TO', 'SUCH', 'PERSONS', 'THESE', 'INDIRECT', 'MODES', 'OF', 'TRAINING', 'CHILDREN', 'IN', 'HABITS', 'OF', 'SUBORDINATION', 'TO', 'THEIR', 'WILL', 'OR', 'RATHER', 'OF', 'YIELDING', 'TO', 'THEIR', 'INFLUENCE', 'ARE', 'SPECIALLY', 'USEFUL'] +7021-79740-0000-1384: hyp=['TO', 'SUCH', 'PERSONS', 'THESE', 'INDIRECT', 'MODES', 'OF', 'TRAINING', 'CHILDREN', 'IN', 'HABITS', 'OF', 'SUBORDINATION', 'TO', 'THEIR', 'WILL', 'OR', 'RATHER', 'OF', 'YIELDING', 'TO', 'THEIR', 'INFLUENCE', 'ARE', 'SPECIALLY', 'USEFUL'] +7021-79740-0001-1385: ref=['DELLA', 'HAD', 'A', 'YOUNG', 'SISTER', 'NAMED', 'MARIA', 'AND', 'A', 'COUSIN', 'WHOSE', 'NAME', 'WAS', 'JANE'] +7021-79740-0001-1385: hyp=['DELLA', 'HAD', 'A', 'YOUNG', 'SISTER', 'NAMED', 'MARIA', 'AND', 'A', 'COUSIN', 'WHOSE', 'NAME', 'WAS', 'JANE'] +7021-79740-0002-1386: ref=['NOW', 'DELIA', 'CONTRIVED', 'TO', 'OBTAIN', 'A', 'GREAT', 'INFLUENCE', 'AND', 'ASCENDENCY', 'OVER', 'THE', 'MINDS', 'OF', 'THE', 'CHILDREN', 'BY', 'MEANS', 'OF', 'THESE', 'DOLLS'] +7021-79740-0002-1386: hyp=['NOW', 'GALLIA', 'CONTRIVED', 'TO', 'OBTAIN', 'A', 'GREAT', 'INFLUENCE', 'AND', 'A', 'SCENE', 'OVER', 'THE', 'MINDS', 'OF', 'THE', 'CHILDREN', 'BY', 'MEANS', 'OF', 'THESE', 'DOLLS'] +7021-79740-0003-1387: ref=['TO', 'GIVE', 'AN', 'IDEA', 'OF', 'THESE', 'CONVERSATIONS', 'I', 'WILL', 'REPORT', 'ONE', 'OF', 'THEM', 'IN', 'FULL'] +7021-79740-0003-1387: hyp=['TO', 'GIVE', 'AN', 'IDEA', 'OF', 'THESE', 'CONVERSATIONS', 'I', 'WILL', 'REPORT', 'ONE', 'OF', 'THEM', 'IN', 'FULL'] +7021-79740-0004-1388: ref=['YOU', 'HAVE', 'COME', 'ANDELLA', 'ANDELLA', 'WAS', 'THE', 'NAME', 'OF', "JANE'S", 'DOLL', 'TO', 'MAKE', 'ROSALIE', 'A', 'VISIT'] +7021-79740-0004-1388: hyp=['YOU', 'HAVE', 'COME', 'AMDELLA', 'AND', 'DELLA', 'WAS', 'THE', 'NAME', 'OF', "JANE'S", 'DAL', 'TO', 'MAKE', 'ROSALIE', 'A', 'VISIT'] +7021-79740-0005-1389: ref=['I', 'AM', 'VERY', 'GLAD'] +7021-79740-0005-1389: hyp=['I', 'AM', 'VERY', 'GLAD'] +7021-79740-0006-1390: ref=['I', 'EXPECT', 'YOU', 'HAVE', 'BEEN', 'A', 'VERY', 'GOOD', 'GIRL', 'ANDELLA', 'SINCE', 'YOU', 'WERE', 'HERE', 'LAST'] +7021-79740-0006-1390: hyp=['I', 'EXPECT', 'YOU', 'HAVE', 'BEEN', 'A', 'VERY', 'GOOD', 'GIRL', 'ANNE', 'DELA', 'SINCE', 'YOU', 'WERE', 'HERE', 'LAST'] +7021-79740-0007-1391: ref=['THEN', 'TURNING', 'TO', 'JANE', 'SHE', 'ASKED', 'IN', 'A', 'SOMEWHAT', 'ALTERED', 'TONE', 'HAS', 'SHE', 'BEEN', 'A', 'GOOD', 'GIRL', 'JANE'] +7021-79740-0007-1391: hyp=['THEN', 'TURNING', 'TO', 'JANE', 'SHE', 'ASKED', 'IN', 'A', 'SOMEWHAT', 'ALTERED', 'TONE', 'HAS', 'SHE', 'BEEN', 'A', 'GOOD', 'GIRL', 'JANE'] +7021-79740-0008-1392: ref=['FOR', 'INSTANCE', 'ONE', 'DAY', 'THE', 'CHILDREN', 'HAD', 'BEEN', 'PLAYING', 'UPON', 'THE', 'PIAZZA', 'WITH', 'BLOCKS', 'AND', 'OTHER', 'PLAYTHINGS', 'AND', 'FINALLY', 'HAD', 'GONE', 'INTO', 'THE', 'HOUSE', 'LEAVING', 'ALL', 'THE', 'THINGS', 'ON', 'THE', 'FLOOR', 'OF', 'THE', 'PIAZZA', 'INSTEAD', 'OF', 'PUTTING', 'THEM', 'AWAY', 'IN', 'THEIR', 'PLACES', 'AS', 'THEY', 'OUGHT', 'TO', 'HAVE', 'DONE'] +7021-79740-0008-1392: hyp=['FOR', 'INSTANCE', 'ONE', 'DAY', 'THE', 'CHILDREN', 'HAD', 'BEEN', 'PLAYING', 'UPON', 'THE', 'PIAZZA', 'WITH', 'BLOCKS', 'AND', 'OTHER', 'PLAYTHINGS', 'AND', 'FINALLY', 'HAD', 'GONE', 'INTO', 'THE', 'HOUSE', 'LEAVING', 'ALL', 'THE', 'THINGS', 'ON', 'THE', 'FLOOR', 'OF', 'THE', 'PIAZZA', 'INSTEAD', 'OF', 'PUTTING', 'THEM', 'AWAY', 'IN', 'THEIR', 'PLACES', 'AS', 'THEY', 'OUGHT', 'TO', 'HAVE', 'DONE'] +7021-79740-0009-1393: ref=['THEY', 'WERE', 'NOW', 'PLAYING', 'WITH', 'THEIR', 'DOLLS', 'IN', 'THE', 'PARLOR'] +7021-79740-0009-1393: hyp=['THEY', 'WERE', 'NOW', 'PLAYING', 'WITH', 'THEIR', 'DOLLS', 'IN', 'THE', 'PARLOR'] +7021-79740-0010-1394: ref=['DELIA', 'CAME', 'TO', 'THE', 'PARLOR', 'AND', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'MYSTERY', 'BECKONED', 'THE', 'CHILDREN', 'ASIDE', 'AND', 'SAID', 'TO', 'THEM', 'IN', 'A', 'WHISPER', 'LEAVE', 'ANDELLA', 'AND', 'ROSALIE', 'HERE', 'AND', "DON'T", 'SAY', 'A', 'WORD', 'TO', 'THEM'] +7021-79740-0010-1394: hyp=['DAHLIA', 'CAME', 'TO', 'THE', 'PARLOUR', 'AND', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'MYSTERY', 'BECKONED', 'THE', 'CHILDREN', 'ASIDE', 'AND', 'SAID', 'TO', 'THEM', 'IN', 'A', 'WHISPER', 'LEAVE', 'ANDDELA', 'AND', 'ROSALIE', 'HERE', 'AND', "DON'T", 'SAY', 'A', 'WORD', 'TO', 'THEM'] +7021-79740-0011-1395: ref=['SO', 'SAYING', 'SHE', 'LED', 'THE', 'WAY', 'ON', 'TIPTOE', 'FOLLOWED', 'BY', 'THE', 'CHILDREN', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'ROUND', 'BY', 'A', 'CIRCUITOUS', 'ROUTE', 'TO', 'THE', 'PIAZZA', 'THERE'] +7021-79740-0011-1395: hyp=['SO', 'SAYING', 'SHE', 'LED', 'THE', 'WAY', 'ON', 'TIPTOE', 'FOLLOWED', 'BY', 'THE', 'CHILDREN', 'OUT', 'OF', 'THE', 'ROOM', 'AND', 'ROUND', 'BY', 'A', 'CIRCUITOUS', 'ROUTE', 'TO', 'THE', 'PIAZZA', 'THERE'] +7021-79740-0012-1396: ref=['SAID', 'SHE', 'POINTING', 'TO', 'THE', 'PLAYTHINGS', 'SEE'] +7021-79740-0012-1396: hyp=['SAID', 'SHE', 'POINTING', 'TO', 'THE', 'PLAYTHINGS', 'SEE'] +7021-79740-0013-1397: ref=['PUT', 'THESE', 'PLAYTHINGS', 'ALL', 'AWAY', 'QUICK', 'AND', 'CAREFULLY', 'AND', 'WE', 'WILL', 'NOT', 'LET', 'THEM', 'KNOW', 'ANY', 'THING', 'ABOUT', 'YOUR', 'LEAVING', 'THEM', 'OUT'] +7021-79740-0013-1397: hyp=['PUT', 'THESE', 'PLAYTHINGS', 'ALL', 'AWAY', 'QUICK', 'AND', 'CAREFULLY', 'AND', 'WE', 'WILL', 'NOT', 'LET', 'THEM', 'KNOW', 'ANYTHING', 'ABOUT', 'YOUR', 'LEAVING', 'THEM', 'OUT'] +7021-79740-0014-1398: ref=['AND', 'THIS', 'METHOD', 'OF', 'TREATING', 'THE', 'CASE', 'WAS', 'MUCH', 'MORE', 'EFFECTUAL', 'IN', 'MAKING', 'THEM', 'DISPOSED', 'TO', 'AVOID', 'COMMITTING', 'A', 'SIMILAR', 'FAULT', 'ANOTHER', 'TIME', 'THAN', 'ANY', 'DIRECT', 'REBUKES', 'OR', 'EXPRESSIONS', 'OF', 'DISPLEASURE', 'ADDRESSED', 'PERSONALLY', 'TO', 'THEM', 'WOULD', 'HAVE', 'BEEN'] +7021-79740-0014-1398: hyp=['AND', 'THIS', 'METHOD', 'OF', 'TREATING', 'THE', 'CASE', 'WAS', 'MUCH', 'MORE', 'EFFECTUAL', 'IN', 'MAKING', 'THEM', 'DISPOSED', 'TO', 'AVOID', 'COMMITTING', 'A', 'SIMILAR', 'FAULT', 'ANOTHER', 'TIME', 'THAN', 'ANY', 'DIRECT', 'REBUKES', 'OR', 'EXPRESSIONS', 'OF', 'DISPLEASURE', 'ADDRESSED', 'PERSONALLY', 'TO', 'THEM', 'WOULD', 'HAVE', 'BEEN'] +7021-79759-0000-1378: ref=['NATURE', 'OF', 'THE', 'EFFECT', 'PRODUCED', 'BY', 'EARLY', 'IMPRESSIONS'] +7021-79759-0000-1378: hyp=['NATURE', 'OF', 'THE', 'EFFECT', 'PRODUCED', 'BY', 'EARLY', 'IMPRESSIONS'] +7021-79759-0001-1379: ref=['THAT', 'IS', 'COMPARATIVELY', 'NOTHING'] +7021-79759-0001-1379: hyp=['THAT', 'IS', 'COMPARATIVELY', 'NOTHING'] +7021-79759-0002-1380: ref=['THEY', 'ARE', 'CHIEFLY', 'FORMED', 'FROM', 'COMBINATIONS', 'OF', 'THE', 'IMPRESSIONS', 'MADE', 'IN', 'CHILDHOOD'] +7021-79759-0002-1380: hyp=['THEY', 'ARE', 'CHIEFLY', 'FORMED', 'FROM', 'COMBINATIONS', 'OF', 'THE', 'IMPRESSIONS', 'MADE', 'IN', 'CHILDHOOD'] +7021-79759-0003-1381: ref=['VAST', 'IMPORTANCE', 'AND', 'INFLUENCE', 'OF', 'THIS', 'MENTAL', 'FURNISHING'] +7021-79759-0003-1381: hyp=['VAST', 'IMPORTANCE', 'AND', 'INFLUENCE', 'OF', 'THIS', 'MENTAL', 'FURNISHING'] +7021-79759-0004-1382: ref=['WITHOUT', 'GOING', 'TO', 'ANY', 'SUCH', 'EXTREME', 'AS', 'THIS', 'WE', 'CAN', 'EASILY', 'SEE', 'ON', 'REFLECTION', 'HOW', 'VAST', 'AN', 'INFLUENCE', 'ON', 'THE', 'IDEAS', 'AND', 'CONCEPTIONS', 'AS', 'WELL', 'AS', 'ON', 'THE', 'PRINCIPLES', 'OF', 'ACTION', 'IN', 'MATURE', 'YEARS', 'MUST', 'BE', 'EXERTED', 'BY', 'THE', 'NATURE', 'AND', 'CHARACTER', 'OF', 'THE', 'IMAGES', 'WHICH', 'THE', 'PERIOD', 'OF', 'INFANCY', 'AND', 'CHILDHOOD', 'IMPRESSES', 'UPON', 'THE', 'MIND'] +7021-79759-0004-1382: hyp=['WITHOUT', 'GOING', 'TO', 'ANY', 'SUCH', 'EXTREME', 'AS', 'THIS', 'WE', 'CAN', 'EASILY', 'SEE', 'ON', 'REFLECTION', 'HOW', 'VAST', 'AN', 'INFLUENCE', 'ON', 'THE', 'IDEAS', 'AND', 'CONCEPTIONS', 'AS', 'WELL', 'AS', 'ON', 'THE', 'PRINCIPLES', 'OF', 'ACTION', 'AND', 'MATURE', 'YEARS', 'MUST', 'BE', 'EXERTED', 'BY', 'THE', 'NATURE', 'AND', 'CHARACTER', 'OF', 'THE', 'IMAGES', 'WHICH', 'THE', 'PERIOD', 'OF', 'INFANCY', 'AND', 'CHILDHOOD', 'IMPRESS', 'UPON', 'THE', 'MIND'] +7021-79759-0005-1383: ref=['THE', 'PAIN', 'PRODUCED', 'BY', 'AN', 'ACT', 'OF', 'HASTY', 'AND', 'ANGRY', 'VIOLENCE', 'TO', 'WHICH', 'A', 'FATHER', 'SUBJECTS', 'HIS', 'SON', 'MAY', 'SOON', 'PASS', 'AWAY', 'BUT', 'THE', 'MEMORY', 'OF', 'IT', 'DOES', 'NOT', 'PASS', 'AWAY', 'WITH', 'THE', 'PAIN'] +7021-79759-0005-1383: hyp=['THE', 'PAIN', 'PRODUCED', 'BY', 'AN', 'ACT', 'OF', 'HASTY', 'AND', 'ANGRY', 'VIOLENCE', 'TO', 'WHICH', 'A', 'FATHER', 'SUBJECTS', 'HIS', 'SON', 'MAY', 'SOON', 'PASS', 'AWAY', 'BUT', 'THE', 'MEMORY', 'OF', 'IT', 'DOES', 'NOT', 'PASS', 'AWAY', 'WITH', 'THE', 'PAIN'] +7021-85628-0000-1409: ref=['BUT', 'ANDERS', 'CARED', 'NOTHING', 'ABOUT', 'THAT'] +7021-85628-0000-1409: hyp=['BUT', 'ANDREWS', 'CARED', 'NOTHING', 'ABOUT', 'THAT'] +7021-85628-0001-1410: ref=['HE', 'MADE', 'A', 'BOW', 'SO', 'DEEP', 'THAT', 'HIS', 'BACK', 'CAME', 'NEAR', 'BREAKING', 'AND', 'HE', 'WAS', 'DUMBFOUNDED', 'I', 'CAN', 'TELL', 'YOU', 'WHEN', 'HE', 'SAW', 'IT', 'WAS', 'NOBODY', 'BUT', 'ANDERS'] +7021-85628-0001-1410: hyp=['HE', 'MADE', 'A', 'BOW', 'SO', 'DEEP', 'THAT', 'HIS', 'BACK', 'CAME', 'NEAR', 'BREAKING', 'AND', 'HE', 'WAS', 'DUMBFOUNDED', 'I', 'CAN', 'TELL', 'YOU', 'WHEN', 'HE', 'SAW', 'IT', 'WAS', 'NOBODY', 'BUT', 'ANDERS'] +7021-85628-0002-1411: ref=['HE', 'WAS', 'SUCH', 'A', 'BIG', 'BOY', 'THAT', 'HE', 'WORE', 'HIGH', 'BOOTS', 'AND', 'CARRIED', 'A', 'JACK', 'KNIFE'] +7021-85628-0002-1411: hyp=['HE', 'WAS', 'SUCH', 'A', 'BIG', 'BOY', 'THAT', 'HE', 'WORE', 'HIGH', 'BOOTS', 'AND', 'CARRIED', 'A', 'JACK', 'KNIFE'] +7021-85628-0003-1412: ref=['NOW', 'THIS', 'KNIFE', 'WAS', 'A', 'SPLENDID', 'ONE', 'THOUGH', 'HALF', 'THE', 'BLADE', 'WAS', 'GONE', 'AND', 'THE', 'HANDLE', 'WAS', 'A', 'LITTLE', 'CRACKED', 'AND', 'ANDERS', 'KNEW', 'THAT', 'ONE', 'IS', 'ALMOST', 'A', 'MAN', 'AS', 'SOON', 'AS', 'ONE', 'HAS', 'A', 'JACK', 'KNIFE'] +7021-85628-0003-1412: hyp=['NOW', 'THIS', 'KNIFE', 'WAS', 'A', 'SPLENDID', 'ONE', 'THOUGH', 'HALF', 'THE', 'BLADE', 'WAS', 'GONE', 'AND', 'THE', 'HANDLE', 'WAS', 'A', 'LITTLE', 'CRACKED', 'AND', 'ANDERS', 'KNEW', 'THAT', 'ONE', 'IS', 'ALMOST', 'A', 'MAN', 'AS', 'SOON', 'AS', 'ONE', 'HAS', 'A', 'JACKKNIFE'] +7021-85628-0004-1413: ref=['YES', 'WHY', 'NOT', 'THOUGHT', 'ANDERS'] +7021-85628-0004-1413: hyp=['YES', 'WHY', 'NOT', 'THOUGHT', 'ANDREWS'] +7021-85628-0005-1414: ref=['SEEING', 'THAT', 'I', 'AM', 'SO', 'FINE', 'I', 'MAY', 'AS', 'WELL', 'GO', 'AND', 'VISIT', 'THE', 'KING'] +7021-85628-0005-1414: hyp=['SEEING', 'THAT', 'I', 'AM', 'SO', 'FINE', 'I', 'MAY', 'AS', 'WELL', 'GO', 'AND', 'VISIT', 'THE', 'KING'] +7021-85628-0006-1415: ref=['I', 'AM', 'GOING', 'TO', 'THE', 'COURT', 'BALL', 'ANSWERED', 'ANDERS'] +7021-85628-0006-1415: hyp=['I', 'AM', 'GOING', 'TO', 'THE', 'COURT', 'BALL', 'ANSWERED', 'ANDRES'] +7021-85628-0007-1416: ref=['AND', 'SHE', 'TOOK', 'ANDERS', 'HAND', 'AND', 'WALKED', 'WITH', 'HIM', 'UP', 'THE', 'BROAD', 'MARBLE', 'STAIRS', 'WHERE', 'SOLDIERS', 'WERE', 'POSTED', 'AT', 'EVERY', 'THIRD', 'STEP', 'AND', 'THROUGH', 'THE', 'MAGNIFICENT', 'HALLS', 'WHERE', 'COURTIERS', 'IN', 'SILK', 'AND', 'VELVET', 'STOOD', 'BOWING', 'WHEREVER', 'HE', 'WENT'] +7021-85628-0007-1416: hyp=['AND', 'SHE', 'TOOK', "ANDREW'S", 'HAND', 'AND', 'WALKED', 'WITH', 'HIM', 'UP', 'THE', 'BROAD', 'MARBLE', 'STAIRS', 'WHERE', 'SOLDIERS', 'WERE', 'POSTED', 'AT', 'EVERY', 'THIRD', 'STEP', 'AND', 'THROUGH', 'THE', 'MAGNIFICENT', 'HALLS', 'WHERE', 'COURTIERS', 'IN', 'SILK', 'AND', 'VELVET', 'STOOD', 'BOWING', 'WHEREVER', 'HE', 'WENT'] +7021-85628-0008-1417: ref=['FOR', 'LIKE', 'AS', 'NOT', 'THEY', 'MUST', 'HAVE', 'THOUGHT', 'HIM', 'A', 'PRINCE', 'WHEN', 'THEY', 'SAW', 'HIS', 'FINE', 'CAP'] +7021-85628-0008-1417: hyp=['FOR', 'LIKE', 'AS', 'NOT', 'THEY', 'MUST', 'HAVE', 'THOUGHT', 'HIM', 'A', 'PRINCE', 'WHEN', 'THEY', 'SAW', 'HIS', 'FINE', 'CAP'] +7021-85628-0009-1418: ref=['AT', 'THE', 'FARTHER', 'END', 'OF', 'THE', 'LARGEST', 'HALL', 'A', 'TABLE', 'WAS', 'SET', 'WITH', 'GOLDEN', 'CUPS', 'AND', 'GOLDEN', 'PLATES', 'IN', 'LONG', 'ROWS'] +7021-85628-0009-1418: hyp=['AT', 'THE', 'FARTHER', 'END', 'OF', 'THE', 'LARGEST', 'HALL', 'A', 'TABLE', 'WAS', 'SET', 'WITH', 'GOLDEN', 'CUPS', 'AND', 'GOLDEN', 'PLATES', 'IN', 'LONG', 'ROWS'] +7021-85628-0010-1419: ref=['ON', 'HUGE', 'SILVER', 'PLATTERS', 'WERE', 'PYRAMIDS', 'OF', 'TARTS', 'AND', 'CAKES', 'AND', 'RED', 'WINE', 'SPARKLED', 'IN', 'GLITTERING', 'DECANTERS'] +7021-85628-0010-1419: hyp=['ON', 'HUGE', 'SILVER', 'PLATTERS', 'WERE', 'PYRAMIDS', 'OF', 'TARTS', 'AND', 'CAKES', 'AND', 'RED', 'WINE', 'SPARKLED', 'IN', 'GLITTERING', 'DECANTERS'] +7021-85628-0011-1420: ref=['THE', 'PRINCESS', 'SAT', 'DOWN', 'UNDER', 'A', 'BLUE', 'CANOPY', 'WITH', 'BOUQUETS', 'OF', 'ROSES', 'AND', 'SHE', 'LET', 'ANDERS', 'SIT', 'IN', 'A', 'GOLDEN', 'CHAIR', 'BY', 'HER', 'SIDE'] +7021-85628-0011-1420: hyp=['THE', 'PRINCESS', 'SAT', 'DOWN', 'UNDER', 'A', 'BLUE', 'CANOPY', 'WITH', 'BOUQUETS', 'OF', 'ROSES', 'AND', 'SHE', 'LET', 'ANDRE', 'SIT', 'IN', 'A', 'GOLDEN', 'CHAIR', 'BY', 'HER', 'SIDE'] +7021-85628-0012-1421: ref=['BUT', 'YOU', 'MUST', 'NOT', 'EAT', 'WITH', 'YOUR', 'CAP', 'ON', 'YOUR', 'HEAD', 'SHE', 'SAID', 'AND', 'WAS', 'GOING', 'TO', 'TAKE', 'IT', 'OFF'] +7021-85628-0012-1421: hyp=['BUT', 'YOU', 'MUST', 'NOT', 'EAT', 'WITH', 'YOUR', 'CAP', 'ON', 'YOUR', 'HEAD', 'SHE', 'SAID', 'AND', 'WAS', 'GOING', 'TO', 'TAKE', 'IT', 'OFF'] +7021-85628-0013-1422: ref=['THE', 'PRINCESS', 'CERTAINLY', 'WAS', 'BEAUTIFUL', 'AND', 'HE', 'WOULD', 'HAVE', 'DEARLY', 'LIKED', 'TO', 'BE', 'KISSED', 'BY', 'HER', 'BUT', 'THE', 'CAP', 'WHICH', 'HIS', 'MOTHER', 'HAD', 'MADE', 'HE', 'WOULD', 'NOT', 'GIVE', 'UP', 'ON', 'ANY', 'CONDITION'] +7021-85628-0013-1422: hyp=['THE', 'PRINCESS', 'CERTAINLY', 'WAS', 'BEAUTIFUL', 'AND', 'HE', 'WOULD', 'HAVE', 'DEARLY', 'LIKED', 'TO', 'BE', 'KISSED', 'BY', 'HER', 'BUT', 'THE', 'CAP', 'WHICH', 'HIS', 'MOTHER', 'HAD', 'MADE', 'HE', 'WOULD', 'NOT', 'GIVE', 'UP', 'ON', 'ANY', 'CONDITION'] +7021-85628-0014-1423: ref=['HE', 'ONLY', 'SHOOK', 'HIS', 'HEAD'] +7021-85628-0014-1423: hyp=['HE', 'ONLY', 'SHOOK', 'HIS', 'HEAD'] +7021-85628-0015-1424: ref=['WELL', 'BUT', 'NOW', 'SAID', 'THE', 'PRINCESS', 'AND', 'SHE', 'FILLED', 'HIS', 'POCKETS', 'WITH', 'CAKES', 'AND', 'PUT', 'HER', 'OWN', 'HEAVY', 'GOLD', 'CHAIN', 'AROUND', 'HIS', 'NECK', 'AND', 'BENT', 'DOWN', 'AND', 'KISSED', 'HIM'] +7021-85628-0015-1424: hyp=['WELL', 'BUT', 'NOW', 'SAID', 'THE', 'PRINCESS', 'AND', 'SHE', 'FILLED', 'HIS', 'POCKETS', 'WITH', 'CAKES', 'AND', 'PUT', 'HER', 'OWN', 'HEAVY', 'GOLD', 'CHAIN', 'AROUND', 'HIS', 'NECK', 'AND', 'BENT', 'DOWN', 'AND', 'KISSED', 'HIM'] +7021-85628-0016-1425: ref=['THAT', 'IS', 'A', 'VERY', 'FINE', 'CAP', 'YOU', 'HAVE', 'HE', 'SAID'] +7021-85628-0016-1425: hyp=['THAT', 'IS', 'A', 'VERY', 'FINE', 'CAP', 'YOU', 'HAVE', 'HE', 'SAID'] +7021-85628-0017-1426: ref=['SO', 'IT', 'IS', 'SAID', 'ANDERS'] +7021-85628-0017-1426: hyp=['SO', 'IT', 'IS', 'SAID', 'ANDREWS'] +7021-85628-0018-1427: ref=['AND', 'IT', 'IS', 'MADE', 'OF', "MOTHER'S", 'BEST', 'YARN', 'AND', 'SHE', 'KNITTED', 'IT', 'HERSELF', 'AND', 'EVERYBODY', 'WANTS', 'TO', 'GET', 'IT', 'AWAY', 'FROM', 'ME'] +7021-85628-0018-1427: hyp=['AND', 'IT', 'IS', 'MADE', 'OF', "MOTHER'S", 'BEST', 'YARN', 'AND', 'SHE', 'KNITTED', 'IT', 'HERSELF', 'AND', 'EVERYBODY', 'WANTS', 'TO', 'GET', 'IT', 'AWAY', 'FROM', 'ME'] +7021-85628-0019-1428: ref=['WITH', 'ONE', 'JUMP', 'ANDERS', 'GOT', 'OUT', 'OF', 'HIS', 'CHAIR'] +7021-85628-0019-1428: hyp=['WITH', 'ONE', 'JUMP', 'ANDERS', 'GOT', 'OUT', 'OF', 'HIS', 'CHAIR'] +7021-85628-0020-1429: ref=['HE', 'DARTED', 'LIKE', 'AN', 'ARROW', 'THROUGH', 'ALL', 'THE', 'HALLS', 'DOWN', 'ALL', 'THE', 'STAIRS', 'AND', 'ACROSS', 'THE', 'YARD'] +7021-85628-0020-1429: hyp=['HE', 'DARTED', 'LIKE', 'AN', 'ARROW', 'THROUGH', 'ALL', 'THE', 'HALLS', 'DOWN', 'ALL', 'THE', 'STAIRS', 'AND', 'ACROSS', 'THE', 'YARD'] +7021-85628-0021-1430: ref=['HE', 'STILL', 'HELD', 'ON', 'TO', 'IT', 'WITH', 'BOTH', 'HANDS', 'AS', 'HE', 'RUSHED', 'INTO', 'HIS', "MOTHER'S", 'COTTAGE'] +7021-85628-0021-1430: hyp=['HE', 'STILL', 'HELD', 'ON', 'TO', 'IT', 'WITH', 'BOTH', 'HANDS', 'AS', 'HE', 'RUSHED', 'INTO', 'HIS', "MOTHER'S", 'COTTAGE'] +7021-85628-0022-1431: ref=['AND', 'ALL', 'HIS', 'BROTHERS', 'AND', 'SISTERS', 'STOOD', 'ROUND', 'AND', 'LISTENED', 'WITH', 'THEIR', 'MOUTHS', 'OPEN'] +7021-85628-0022-1431: hyp=['AND', 'ALL', 'HIS', 'BROTHERS', 'AND', 'SISTERS', 'STOOD', 'ROUND', 'AND', 'LISTENED', 'WITH', 'THEIR', 'MOUTHS', 'OPEN'] +7021-85628-0023-1432: ref=['BUT', 'WHEN', 'HIS', 'BIG', 'BROTHER', 'HEARD', 'THAT', 'HE', 'HAD', 'REFUSED', 'TO', 'GIVE', 'HIS', 'CAP', 'FOR', 'A', "KING'S", 'GOLDEN', 'CROWN', 'HE', 'SAID', 'THAT', 'ANDERS', 'WAS', 'A', 'STUPID'] +7021-85628-0023-1432: hyp=['BUT', 'WHEN', 'HIS', 'BIG', 'BROTHER', 'HEARD', 'THAT', 'HE', 'HAD', 'REFUSED', 'TO', 'GIVE', 'HIS', 'CAP', 'FOR', 'A', "KING'S", 'GOLDEN', 'CROWN', 'HE', 'SAID', 'THAT', 'ANDERS', 'WAS', 'A', 'STUPID'] +7021-85628-0024-1433: ref=['ANDERS', 'FACE', 'GREW', 'RED'] +7021-85628-0024-1433: hyp=["ANDREW'S", 'FACE', 'GREW', 'RED'] +7021-85628-0025-1434: ref=['BUT', 'HIS', 'MOTHER', 'HUGGED', 'HIM', 'CLOSE'] +7021-85628-0025-1434: hyp=['BUT', 'HIS', 'MOTHER', 'HUGGED', 'HIM', 'CLOSE'] +7021-85628-0026-1435: ref=['NO', 'MY', 'LITTLE', 'SON', 'SHE', 'SAID'] +7021-85628-0026-1435: hyp=['NO', 'MY', 'LITTLE', 'FUN', 'SHE', 'SAID'] +7021-85628-0027-1436: ref=['IF', 'YOU', 'DRESSED', 'IN', 'SILK', 'AND', 'GOLD', 'FROM', 'TOP', 'TO', 'TOE', 'YOU', 'COULD', 'NOT', 'LOOK', 'ANY', 'NICER', 'THAN', 'IN', 'YOUR', 'LITTLE', 'RED', 'CAP'] +7021-85628-0027-1436: hyp=['IF', 'YOU', 'DRESSED', 'IN', 'SILK', 'AND', 'GOLD', 'FROM', 'TOP', 'TO', 'TOE', 'YOU', 'COULD', 'NOT', 'LOOK', 'ANY', 'NICER', 'THAN', 'IN', 'YOUR', 'LITTLE', 'RED', 'CAP'] +7127-75946-0000-467: ref=['AT', 'THE', 'CONCLUSION', 'OF', 'THE', 'BANQUET', 'WHICH', 'WAS', 'SERVED', 'AT', 'FIVE', "O'CLOCK", 'THE', 'KING', 'ENTERED', 'HIS', 'CABINET', 'WHERE', 'HIS', 'TAILORS', 'WERE', 'AWAITING', 'HIM', 'FOR', 'THE', 'PURPOSE', 'OF', 'TRYING', 'ON', 'THE', 'CELEBRATED', 'COSTUME', 'REPRESENTING', 'SPRING', 'WHICH', 'WAS', 'THE', 'RESULT', 'OF', 'SO', 'MUCH', 'IMAGINATION', 'AND', 'HAD', 'COST', 'SO', 'MANY', 'EFFORTS', 'OF', 'THOUGHT', 'TO', 'THE', 'DESIGNERS', 'AND', 'ORNAMENT', 'WORKERS', 'OF', 'THE', 'COURT'] +7127-75946-0000-467: hyp=['AT', 'THE', 'CONCLUSION', 'OF', 'THE', 'BANQUET', 'WHICH', 'WAS', 'SERVED', 'AT', 'FIVE', "O'CLOCK", 'THE', 'KING', 'ENTERED', 'HIS', 'CABINET', 'WHERE', 'HIS', 'TAILORS', 'WERE', 'AWAITING', 'HIM', 'FOR', 'THE', 'PURPOSE', 'OF', 'TRYING', 'ON', 'THE', 'CELEBRATED', 'COSTUME', 'REPRESENTING', 'SPRING', 'WHICH', 'WAS', 'THE', 'RESULT', 'OF', 'SO', 'MUCH', 'IMAGINATION', 'AND', 'HAD', 'COST', 'SO', 'MANY', 'EFFORTS', 'OF', 'THOUGHT', 'TO', 'THE', 'DESIGNERS', 'AND', 'ORNAMENT', 'WORKERS', 'OF', 'THE', 'COURT'] +7127-75946-0001-468: ref=['AH', 'VERY', 'WELL'] +7127-75946-0001-468: hyp=['AH', 'VERY', 'WELL'] +7127-75946-0002-469: ref=['LET', 'HIM', 'COME', 'IN', 'THEN', 'SAID', 'THE', 'KING', 'AND', 'AS', 'IF', 'COLBERT', 'HAD', 'BEEN', 'LISTENING', 'AT', 'THE', 'DOOR', 'FOR', 'THE', 'PURPOSE', 'OF', 'KEEPING', 'HIMSELF', 'AU', 'COURANT', 'WITH', 'THE', 'CONVERSATION', 'HE', 'ENTERED', 'AS', 'SOON', 'AS', 'THE', 'KING', 'HAD', 'PRONOUNCED', 'HIS', 'NAME', 'TO', 'THE', 'TWO', 'COURTIERS'] +7127-75946-0002-469: hyp=['LET', 'HIM', 'COME', 'IN', 'THEN', 'SAID', 'THE', 'KING', 'AND', 'AS', 'IF', 'COLBERT', 'HAD', 'BEEN', 'LISTENING', 'AT', 'THE', 'DOOR', 'FOR', 'THE', 'PURPOSE', 'OF', 'KEEPING', 'HIMSELF', 'OKARRANT', 'WITH', 'THE', 'CONVERSATION', 'HE', 'ENTERED', 'AS', 'SOON', 'AS', 'THE', 'KING', 'HAD', 'PRONOUNCED', 'HIS', 'NAME', 'TO', 'THE', 'TWO', 'COURTIERS'] +7127-75946-0003-470: ref=['GENTLEMEN', 'TO', 'YOUR', 'POSTS', 'WHEREUPON', 'SAINT', 'AIGNAN', 'AND', 'VILLEROY', 'TOOK', 'THEIR', 'LEAVE'] +7127-75946-0003-470: hyp=['GENTLEMEN', 'TO', 'YOUR', 'POSTS', 'WHEREUPON', 'SAINT', 'DAN', 'AND', 'VILLEROI', 'TOOK', 'THEIR', 'LEAVE'] +7127-75946-0004-471: ref=['CERTAINLY', 'SIRE', 'BUT', 'I', 'MUST', 'HAVE', 'MONEY', 'TO', 'DO', 'THAT', 'WHAT'] +7127-75946-0004-471: hyp=['CERTAINLY', 'SIRE', 'BUT', 'I', 'MUST', 'HAVE', 'MONEY', 'TO', 'DO', 'THAT', 'WHAT'] +7127-75946-0005-472: ref=['WHAT', 'DO', 'YOU', 'MEAN', 'INQUIRED', 'LOUIS'] +7127-75946-0005-472: hyp=['WHAT', 'DO', 'YOU', 'MEAN', 'INQUIRED', 'LOUISE'] +7127-75946-0006-473: ref=['HE', 'HAS', 'GIVEN', 'THEM', 'WITH', 'TOO', 'MUCH', 'GRACE', 'NOT', 'TO', 'HAVE', 'OTHERS', 'STILL', 'TO', 'GIVE', 'IF', 'THEY', 'ARE', 'REQUIRED', 'WHICH', 'IS', 'THE', 'CASE', 'AT', 'THE', 'PRESENT', 'MOMENT'] +7127-75946-0006-473: hyp=['HE', 'HAS', 'GIVEN', 'THEM', 'WITH', 'TOO', 'MUCH', 'GRACE', 'NOT', 'TO', 'HAVE', 'OTHERS', 'STILL', 'TO', 'GIVE', 'IF', 'THEY', 'ARE', 'REQUIRED', 'WHICH', 'IS', 'THE', 'CASE', 'AT', 'THE', 'PRESENT', 'MOMENT'] +7127-75946-0007-474: ref=['IT', 'IS', 'NECESSARY', 'THEREFORE', 'THAT', 'HE', 'SHOULD', 'COMPLY', 'THE', 'KING', 'FROWNED'] +7127-75946-0007-474: hyp=['IT', 'IS', 'NECESSARY', 'THEREFORE', 'THAT', 'HE', 'SHOULD', 'COMPLY', 'THE', 'KING', 'FROWNED'] +7127-75946-0008-475: ref=['DOES', 'YOUR', 'MAJESTY', 'THEN', 'NO', 'LONGER', 'BELIEVE', 'THE', 'DISLOYAL', 'ATTEMPT'] +7127-75946-0008-475: hyp=['DOES', 'YOUR', 'MAJESTY', 'THEN', 'NO', 'LONGER', 'BELIEVE', 'THE', 'DISLOYAL', 'ATTEMPT'] +7127-75946-0009-476: ref=['NOT', 'AT', 'ALL', 'YOU', 'ARE', 'ON', 'THE', 'CONTRARY', 'MOST', 'AGREEABLE', 'TO', 'ME'] +7127-75946-0009-476: hyp=['NOT', 'AT', 'ALL', 'YOU', 'ARE', 'ON', 'THE', 'CONTRARY', 'MOST', 'AGREEABLE', 'TO', 'ME'] +7127-75946-0010-477: ref=['YOUR', "MAJESTY'S", 'PLAN', 'THEN', 'IN', 'THIS', 'AFFAIR', 'IS'] +7127-75946-0010-477: hyp=['YOUR', "MAJESTY'S", 'PLAN', 'THEN', 'IN', 'THIS', 'AFFAIR', 'IS'] +7127-75946-0011-478: ref=['YOU', 'WILL', 'TAKE', 'THEM', 'FROM', 'MY', 'PRIVATE', 'TREASURE'] +7127-75946-0011-478: hyp=['YOU', 'WILL', 'TAKE', 'THEM', 'FROM', 'MY', 'PRIVATE', 'TREASURE'] +7127-75946-0012-479: ref=['THE', 'NEWS', 'CIRCULATED', 'WITH', 'THE', 'RAPIDITY', 'OF', 'LIGHTNING', 'DURING', 'ITS', 'PROGRESS', 'IT', 'KINDLED', 'EVERY', 'VARIETY', 'OF', 'COQUETRY', 'DESIRE', 'AND', 'WILD', 'AMBITION'] +7127-75946-0012-479: hyp=['THE', 'NEWS', 'CIRCULATED', 'WITH', 'THE', 'RAPIDITY', 'OF', 'LIGHTNING', 'DURING', 'ITS', 'PROGRESS', 'IT', 'KINDLED', 'EVERY', 'VARIETY', 'OF', 'COQUETRY', 'DESIRE', 'AND', 'WILD', 'AMBITION'] +7127-75946-0013-480: ref=['THE', 'KING', 'HAD', 'COMPLETED', 'HIS', 'TOILETTE', 'BY', 'NINE', "O'CLOCK", 'HE', 'APPEARED', 'IN', 'AN', 'OPEN', 'CARRIAGE', 'DECORATED', 'WITH', 'BRANCHES', 'OF', 'TREES', 'AND', 'FLOWERS'] +7127-75946-0013-480: hyp=['THE', 'KING', 'HAD', 'COMPLETED', 'HIS', 'TOILET', 'BY', 'NINE', "O'CLOCK", 'HE', 'APPEARED', 'IN', 'AN', 'OPEN', 'CARRIAGE', 'DECORATED', 'WITH', 'BRANCHES', 'OF', 'TREES', 'AND', 'FLOWERS'] +7127-75946-0014-481: ref=['THE', 'QUEENS', 'HAD', 'TAKEN', 'THEIR', 'SEATS', 'UPON', 'A', 'MAGNIFICENT', 'DIAS', 'OR', 'PLATFORM', 'ERECTED', 'UPON', 'THE', 'BORDERS', 'OF', 'THE', 'LAKE', 'IN', 'A', 'THEATER', 'OF', 'WONDERFUL', 'ELEGANCE', 'OF', 'CONSTRUCTION'] +7127-75946-0014-481: hyp=['THE', 'QUEENS', 'HAD', 'TAKEN', 'THEIR', 'SEATS', 'UPON', 'A', 'MAGNIFICENT', 'DAIS', 'OR', 'PLATFORM', 'ERECTED', 'UPON', 'THE', 'BORDERS', 'OF', 'THE', 'LAKE', 'IN', 'A', 'THEATRE', 'OF', 'WONDERFUL', 'ELEGANCE', 'OF', 'CONSTRUCTION'] +7127-75946-0015-482: ref=['SUDDENLY', 'FOR', 'THE', 'PURPOSE', 'OF', 'RESTORING', 'PEACE', 'AND', 'ORDER', 'SPRING', 'ACCOMPANIED', 'BY', 'HIS', 'WHOLE', 'COURT', 'MADE', 'HIS', 'APPEARANCE'] +7127-75946-0015-482: hyp=['SUDDENLY', 'FOR', 'THE', 'PURPOSE', 'OF', 'RESTORING', 'PEACE', 'AND', 'ORDER', 'SPRANG', 'ACCOMPANIED', 'BY', 'HIS', 'WHOLE', 'COURT', 'MADE', 'HIS', 'APPEARANCE'] +7127-75946-0016-483: ref=['THE', 'SEASONS', 'ALLIES', 'OF', 'SPRING', 'FOLLOWED', 'HIM', 'CLOSELY', 'TO', 'FORM', 'A', 'QUADRILLE', 'WHICH', 'AFTER', 'MANY', 'WORDS', 'OF', 'MORE', 'OR', 'LESS', 'FLATTERING', 'IMPORT', 'WAS', 'THE', 'COMMENCEMENT', 'OF', 'THE', 'DANCE'] +7127-75946-0016-483: hyp=['THE', 'SEASONS', 'ALLIES', 'OF', 'SPRING', 'FOLLOWED', 'HIM', 'CLOSELY', 'TO', 'FORM', 'A', 'QUADRILLE', 'WHICH', 'AFTER', 'MANY', 'WORDS', 'OF', 'MORE', 'OR', 'LESS', 'FLATTERING', 'IMPORT', 'WAS', 'THE', 'COMMENCEMENT', 'OF', 'THE', 'DANCE'] +7127-75946-0017-484: ref=['HIS', 'LEGS', 'THE', 'BEST', 'SHAPED', 'AT', 'COURT', 'WERE', 'DISPLAYED', 'TO', 'GREAT', 'ADVANTAGE', 'IN', 'FLESH', 'COLORED', 'SILKEN', 'HOSE', 'OF', 'SILK', 'SO', 'FINE', 'AND', 'SO', 'TRANSPARENT', 'THAT', 'IT', 'SEEMED', 'ALMOST', 'LIKE', 'FLESH', 'ITSELF'] +7127-75946-0017-484: hyp=['HIS', 'LEGS', 'THE', 'BEST', 'SHAPED', 'AT', 'COURT', 'WERE', 'DISPLAYED', 'TO', 'GREAT', 'ADVANTAGE', 'IN', 'FLESH', 'COLOURED', 'SILKEN', 'HOSE', 'A', 'SILK', 'SO', 'FINE', 'AND', 'SO', 'TRANSPARENT', 'THAT', 'IT', 'SEEMED', 'ALMOST', 'LIKE', 'FLESH', 'ITSELF'] +7127-75946-0018-485: ref=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'CARRIAGE', 'WHICH', 'RESEMBLED', 'THE', 'BUOYANT', 'MOVEMENTS', 'OF', 'AN', 'IMMORTAL', 'AND', 'HE', 'DID', 'NOT', 'DANCE', 'SO', 'MUCH', 'AS', 'SEEM', 'TO', 'SOAR', 'ALONG'] +7127-75946-0018-485: hyp=['THERE', 'WAS', 'SOMETHING', 'IN', 'HIS', 'CARRIAGE', 'WHICH', 'RESEMBLED', 'THE', 'BUOYANT', 'MOVEMENTS', 'OF', 'AN', 'IMMORTAL', 'AND', 'HE', 'DID', 'NOT', 'DANCE', 'SO', 'MUCH', 'AS', 'SEEMED', 'TO', 'SOAR', 'ALONG'] +7127-75946-0019-486: ref=['YES', 'IT', 'IS', 'SUPPRESSED'] +7127-75946-0019-486: hyp=['YES', 'IT', 'IS', 'SUPPRESSED'] +7127-75946-0020-487: ref=['FAR', 'FROM', 'IT', 'SIRE', 'YOUR', 'MAJESTY', 'HAVING', 'GIVEN', 'NO', 'DIRECTIONS', 'ABOUT', 'IT', 'THE', 'MUSICIANS', 'HAVE', 'RETAINED', 'IT'] +7127-75946-0020-487: hyp=['FAR', 'FROM', 'IT', 'SIRE', 'YOUR', 'MAJESTY', 'HEAVEN', 'GIVEN', 'NO', 'DIRECTIONS', 'ABOUT', 'IT', 'THE', 'MUSICIANS', 'HAVE', 'RETAINED', 'IT'] +7127-75946-0021-488: ref=['YES', 'SIRE', 'AND', 'READY', 'DRESSED', 'FOR', 'THE', 'BALLET'] +7127-75946-0021-488: hyp=['YES', 'SIRE', 'AND', 'READY', 'DRESSED', 'FOR', 'THE', 'BALLET'] +7127-75946-0022-489: ref=['SIRE', 'HE', 'SAID', 'YOUR', "MAJESTY'S", 'MOST', 'DEVOTED', 'SERVANT', 'APPROACHES', 'TO', 'PERFORM', 'A', 'SERVICE', 'ON', 'THIS', 'OCCASION', 'WITH', 'SIMILAR', 'ZEAL', 'THAT', 'HE', 'HAS', 'ALREADY', 'SHOWN', 'ON', 'THE', 'FIELD', 'OF', 'BATTLE'] +7127-75946-0022-489: hyp=['SIRE', 'HE', 'SAID', 'YOUR', "MAJESTY'S", 'MOST', 'DEVOTED', 'SERVANT', 'APPROACHES', 'TO', 'PERFORM', 'A', 'SERVICE', 'ON', 'THIS', 'OCCASION', 'WITH', 'SIMILAR', 'ZEAL', 'THAT', 'HE', 'HAS', 'ALREADY', 'SHOWN', 'ON', 'THE', 'FIELD', 'OF', 'BATTLE'] +7127-75946-0023-490: ref=['THE', 'KING', 'SEEMED', 'ONLY', 'PLEASED', 'WITH', 'EVERY', 'ONE', 'PRESENT'] +7127-75946-0023-490: hyp=['THE', 'KING', 'SEEMED', 'ONLY', 'PLEASED', 'WITH', 'EVERY', 'ONE', 'PRESENT'] +7127-75946-0024-491: ref=['MONSIEUR', 'WAS', 'THE', 'ONLY', 'ONE', 'WHO', 'DID', 'NOT', 'UNDERSTAND', 'ANYTHING', 'ABOUT', 'THE', 'MATTER'] +7127-75946-0024-491: hyp=['MONSIEUR', 'WAS', 'THE', 'ONLY', 'ONE', 'WHO', 'DID', 'NOT', 'UNDERSTAND', 'ANYTHING', 'ABOUT', 'THE', 'MATTER'] +7127-75946-0025-492: ref=['THE', 'BALLET', 'BEGAN', 'THE', 'EFFECT', 'WAS', 'MORE', 'THAN', 'BEAUTIFUL'] +7127-75946-0025-492: hyp=['THE', 'BALLET', 'BEGAN', 'THE', 'EFFECT', 'WAS', 'MORE', 'THAN', 'BEAUTIFUL'] +7127-75946-0026-493: ref=['WHEN', 'THE', 'MUSIC', 'BY', 'ITS', 'BURSTS', 'OF', 'MELODY', 'CARRIED', 'AWAY', 'THESE', 'ILLUSTRIOUS', 'DANCERS', 'WHEN', 'THE', 'SIMPLE', 'UNTUTORED', 'PANTOMIME', 'OF', 'THAT', 'PERIOD', 'ONLY', 'THE', 'MORE', 'NATURAL', 'ON', 'ACCOUNT', 'OF', 'THE', 'VERY', 'INDIFFERENT', 'ACTING', 'OF', 'THE', 'AUGUST', 'ACTORS', 'HAD', 'REACHED', 'ITS', 'CULMINATING', 'POINT', 'OF', 'TRIUMPH', 'THE', 'THEATER', 'SHOOK', 'WITH', 'TUMULTUOUS', 'APPLAUSE'] +7127-75946-0026-493: hyp=['WHEN', 'THE', 'MUSIC', 'BY', 'ITS', 'BURSTS', 'OF', 'MELODY', 'CARRIED', 'AWAY', 'THESE', 'ILLUSTRIOUS', 'DANCERS', 'WHEN', 'THIS', 'SIMPLE', 'UNTUTORED', 'PANTOMIME', 'OF', 'THAT', 'PERIOD', 'ONLY', 'THE', 'MORE', 'NATURAL', 'ON', 'ACCOUNT', 'OF', 'THE', 'VERY', 'INDIFFERENT', 'ACTING', 'OF', 'THE', 'AUGUST', 'ACTORS', 'HAD', 'REACHED', 'ITS', 'CULMINATING', 'POINT', 'OF', 'TRIUMPH', 'THE', 'THEATRE', 'SHOOK', 'WITH', 'TUMULTUOUS', 'APPLAUSE'] +7127-75946-0027-494: ref=['DISDAINFUL', 'OF', 'A', 'SUCCESS', 'OF', 'WHICH', 'MADAME', 'SHOWED', 'NO', 'ACKNOWLEDGEMENT', 'HE', 'THOUGHT', 'OF', 'NOTHING', 'BUT', 'BOLDLY', 'REGAINING', 'THE', 'MARKED', 'PREFERENCE', 'OF', 'THE', 'PRINCESS'] +7127-75946-0027-494: hyp=['DISDAINFUL', 'OF', 'A', 'SUCCESS', 'OF', 'WHICH', 'MADAME', 'SHOWED', 'NO', 'ACKNOWLEDGMENT', 'HE', 'THOUGHT', 'OF', 'NOTHING', 'BUT', 'BOLDLY', 'REGAINING', 'THE', 'MARKED', 'PREFERENCE', 'OF', 'THE', 'PRINCESS'] +7127-75946-0028-495: ref=['BY', 'DEGREES', 'ALL', 'HIS', 'HAPPINESS', 'ALL', 'HIS', 'BRILLIANCY', 'SUBSIDED', 'INTO', 'REGRET', 'AND', 'UNEASINESS', 'SO', 'THAT', 'HIS', 'LIMBS', 'LOST', 'THEIR', 'POWER', 'HIS', 'ARMS', 'HUNG', 'HEAVILY', 'BY', 'HIS', 'SIDES', 'AND', 'HIS', 'HEAD', 'DROOPED', 'AS', 'THOUGH', 'HE', 'WAS', 'STUPEFIED'] +7127-75946-0028-495: hyp=['BY', 'DEGREES', 'ALL', 'HIS', 'HAPPINESS', 'ALL', 'HIS', 'BRILLIANCY', 'SUBSIDED', 'INTO', 'REGRET', 'AND', 'UNEASINESS', 'SO', 'THAT', 'HIS', 'LIMBS', 'LOST', 'THEIR', 'POWER', 'HIS', 'ARMS', 'HUNG', 'HEAVILY', 'BY', 'HIS', 'SIDES', 'AND', 'HIS', 'HEAD', 'DROOPED', 'AS', 'THOUGH', 'HE', 'WAS', 'STUPEFIED'] +7127-75946-0029-496: ref=['THE', 'KING', 'WHO', 'HAD', 'FROM', 'THIS', 'MOMENT', 'BECOME', 'IN', 'REALITY', 'THE', 'PRINCIPAL', 'DANCER', 'IN', 'THE', 'QUADRILLE', 'CAST', 'A', 'LOOK', 'UPON', 'HIS', 'VANQUISHED', 'RIVAL'] +7127-75946-0029-496: hyp=['THE', 'KING', 'WHO', 'HAD', 'FROM', 'THIS', 'MOMENT', 'BECOME', 'IN', 'REALITY', 'THE', 'PRINCIPAL', 'DANCER', 'IN', 'THE', 'QUADRILLE', 'CAST', 'A', 'LOOK', 'UPON', 'HIS', 'VANQUISHED', 'RIVAL'] +7127-75947-0000-426: ref=['EVERY', 'ONE', 'COULD', 'OBSERVE', 'HIS', 'AGITATION', 'AND', 'PROSTRATION', 'A', 'PROSTRATION', 'WHICH', 'WAS', 'INDEED', 'THE', 'MORE', 'REMARKABLE', 'SINCE', 'PEOPLE', 'WERE', 'NOT', 'ACCUSTOMED', 'TO', 'SEE', 'HIM', 'WITH', 'HIS', 'ARMS', 'HANGING', 'LISTLESSLY', 'BY', 'HIS', 'SIDE', 'HIS', 'HEAD', 'BEWILDERED', 'AND', 'HIS', 'EYES', 'WITH', 'ALL', 'THEIR', 'BRIGHT', 'INTELLIGENCE', 'BEDIMMED'] +7127-75947-0000-426: hyp=['EVERY', 'ONE', 'COULD', 'OBSERVE', 'HIS', 'AGITATION', 'AND', 'PROSTRATION', 'A', 'PROSTRATION', 'WHICH', 'WAS', 'INDEED', 'THE', 'MORE', 'REMARKABLE', 'SINCE', 'PEOPLE', 'WERE', 'NOT', 'ACCUSTOMED', 'TO', 'SEE', 'HIM', 'WITH', 'HIS', 'ARMS', 'HANGING', 'LISTLESSLY', 'BY', 'HIS', 'SIDE', 'HIS', 'HEAD', 'BEWILDERED', 'AND', 'HIS', 'EYES', 'WITH', 'ALL', 'THEIR', 'BRIGHT', 'INTELLIGENCE', 'BEDEMNED'] +7127-75947-0001-427: ref=['UPON', 'THIS', 'MADAME', 'DEIGNED', 'TO', 'TURN', 'HER', 'EYES', 'LANGUISHINGLY', 'TOWARDS', 'THE', 'COMTE', 'OBSERVING'] +7127-75947-0001-427: hyp=['UPON', 'THIS', 'MADAME', 'DEIGNED', 'TO', 'TURN', 'HER', 'EYES', 'LANGUISHINGLY', 'TOWARDS', 'THE', 'COMTE', 'OBSERVING'] +7127-75947-0002-428: ref=['DO', 'YOU', 'THINK', 'SO', 'SHE', 'REPLIED', 'WITH', 'INDIFFERENCE'] +7127-75947-0002-428: hyp=['DO', 'YOU', 'THINK', 'SO', 'SHE', 'REPLIED', 'WITH', 'INDIFFERENCE'] +7127-75947-0003-429: ref=['YES', 'THE', 'CHARACTER', 'WHICH', 'YOUR', 'ROYAL', 'HIGHNESS', 'ASSUMED', 'IS', 'IN', 'PERFECT', 'HARMONY', 'WITH', 'YOUR', 'OWN'] +7127-75947-0003-429: hyp=['YES', 'THE', 'CHARACTER', 'WHICH', 'YOUR', 'ROYAL', 'HIGHNESS', 'ASSUMED', 'IS', 'IN', 'PERFECT', 'HARMONY', 'WITH', 'YOUR', 'OWN'] +7127-75947-0004-430: ref=['EXPLAIN', 'YOURSELF'] +7127-75947-0004-430: hyp=['EXPLAIN', 'YOURSELF'] +7127-75947-0005-431: ref=['I', 'ALLUDE', 'TO', 'THE', 'GODDESS'] +7127-75947-0005-431: hyp=['I', 'ALLUDE', 'TO', 'THE', 'GODDESS'] +7127-75947-0006-432: ref=['THE', 'PRINCESS', 'INQUIRED', 'NO'] +7127-75947-0006-432: hyp=['THE', 'PRINCESS', 'INQUIRED', 'NO'] +7127-75947-0007-433: ref=['SHE', 'THEN', 'ROSE', 'HUMMING', 'THE', 'AIR', 'TO', 'WHICH', 'SHE', 'WAS', 'PRESENTLY', 'GOING', 'TO', 'DANCE'] +7127-75947-0007-433: hyp=['SHE', 'THEN', 'ROSE', 'HUMMING', 'THE', 'AIR', 'TO', 'WHICH', 'SHE', 'WAS', 'PRESENTLY', 'GOING', 'TO', 'DANCE'] +7127-75947-0008-434: ref=['THE', 'ARROW', 'PIERCED', 'HIS', 'HEART', 'AND', 'WOUNDED', 'HIM', 'MORTALLY'] +7127-75947-0008-434: hyp=['THE', 'ARROW', 'PIERCED', 'HIS', 'HEART', 'AND', 'WOUNDED', 'HIM', 'MORTALLY'] +7127-75947-0009-435: ref=['A', 'QUARTER', 'OF', 'AN', 'HOUR', 'AFTERWARDS', 'HE', 'RETURNED', 'TO', 'THE', 'THEATER', 'BUT', 'IT', 'WILL', 'BE', 'READILY', 'BELIEVED', 'THAT', 'IT', 'WAS', 'ONLY', 'A', 'POWERFUL', 'EFFORT', 'OF', 'REASON', 'OVER', 'HIS', 'GREAT', 'EXCITEMENT', 'THAT', 'ENABLED', 'HIM', 'TO', 'GO', 'BACK', 'OR', 'PERHAPS', 'FOR', 'LOVE', 'IS', 'THUS', 'STRANGELY', 'CONSTITUTED', 'HE', 'FOUND', 'IT', 'IMPOSSIBLE', 'EVEN', 'TO', 'REMAIN', 'MUCH', 'LONGER', 'SEPARATED', 'FROM', 'THE', 'PRESENCE', 'OF', 'ONE', 'WHO', 'HAD', 'BROKEN', 'HIS', 'HEART'] +7127-75947-0009-435: hyp=['A', 'QUARTER', 'OF', 'AN', 'HOUR', 'AFTERWARDS', 'HE', 'RETURNED', 'TO', 'THE', 'THEATRE', 'BUT', 'IT', 'WILL', 'BE', 'READILY', 'BELIEVED', 'THAT', 'IT', 'WAS', 'ONLY', 'A', 'POWERFUL', 'EFFORT', 'OF', 'REASON', 'OVER', 'HIS', 'GREAT', 'EXCITEMENT', 'THAT', 'ENABLED', 'HIM', 'TO', 'GO', 'BACK', 'OR', 'PERHAPS', 'FOR', 'LOVE', 'IS', 'THUS', 'STRANGELY', 'CONSTITUTED', 'HE', 'FOUND', 'IT', 'IMPOSSIBLE', 'EVEN', 'TO', 'REMAIN', 'MUCH', 'LONGER', 'SEPARATED', 'FROM', 'THEIR', 'PRESENCE', 'OF', 'ONE', 'WHO', 'HAD', 'BROKEN', 'HIS', 'HEART'] +7127-75947-0010-436: ref=['WHEN', 'SHE', 'PERCEIVED', 'THE', 'YOUNG', 'MAN', 'SHE', 'ROSE', 'LIKE', 'A', 'WOMAN', 'SURPRISED', 'IN', 'THE', 'MIDST', 'OF', 'IDEAS', 'SHE', 'WAS', 'DESIROUS', 'OF', 'CONCEALING', 'FROM', 'HERSELF'] +7127-75947-0010-436: hyp=['WHEN', 'SHE', 'PERCEIVED', 'THE', 'YOUNG', 'MAN', 'SHE', 'ROSE', 'LIKE', 'A', 'WOMAN', 'SURPRISED', 'IN', 'THE', 'MIDST', 'OF', 'IDEAS', 'SHE', 'WAS', 'DESIROUS', 'OF', 'CONCEALING', 'FROM', 'HERSELF'] +7127-75947-0011-437: ref=['REMAIN', 'I', 'IMPLORE', 'YOU', 'THE', 'EVENING', 'IS', 'MOST', 'LOVELY'] +7127-75947-0011-437: hyp=['REMAIN', 'I', 'IMPLORE', 'YOU', 'THE', 'EVENING', 'IS', 'MOST', 'LOVELY'] +7127-75947-0012-438: ref=['INDEED', 'AH'] +7127-75947-0012-438: hyp=['INDEED', 'A'] +7127-75947-0013-439: ref=['I', 'REMEMBER', 'NOW', 'AND', 'I', 'CONGRATULATE', 'MYSELF', 'DO', 'YOU', 'LOVE', 'ANY', 'ONE'] +7127-75947-0013-439: hyp=['I', 'REMEMBER', 'NOW', 'AND', 'I', 'CONGRATULATE', 'MYSELF', 'DO', 'YOU', 'LOVE', 'ANY', 'ONE'] +7127-75947-0014-440: ref=['FORGIVE', 'ME', 'I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'AM', 'SAYING', 'A', 'THOUSAND', 'TIMES', 'FORGIVE', 'ME', 'MADAME', 'WAS', 'RIGHT', 'QUITE', 'RIGHT', 'THIS', 'BRUTAL', 'EXILE', 'HAS', 'COMPLETELY', 'TURNED', 'MY', 'BRAIN'] +7127-75947-0014-440: hyp=['FORGIVE', 'ME', 'I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'AM', 'SAYING', 'A', 'THOUSAND', 'TIMES', 'FORGIVE', 'ME', 'MADAME', 'WAS', 'RIGHT', 'QUITE', 'RIGHT', 'THIS', 'BRUTAL', 'EXILE', 'HAS', 'COMPLETELY', 'TURNED', 'MY', 'BRAIN'] +7127-75947-0015-441: ref=['THERE', 'CANNOT', 'BE', 'A', 'DOUBT', 'HE', 'RECEIVED', 'YOU', 'KINDLY', 'FOR', 'IN', 'FACT', 'YOU', 'RETURNED', 'WITHOUT', 'HIS', 'PERMISSION'] +7127-75947-0015-441: hyp=['THERE', 'CANNOT', 'BE', 'A', 'DOUBT', 'HE', 'RECEIVED', 'YOU', 'KINDLY', 'FOR', 'IN', 'FACT', 'YOU', 'RETURNED', 'WITHOUT', 'HIS', 'PERMISSION'] +7127-75947-0016-442: ref=['OH', 'MADEMOISELLE', 'WHY', 'HAVE', 'I', 'NOT', 'A', 'DEVOTED', 'SISTER', 'OR', 'A', 'TRUE', 'FRIEND', 'SUCH', 'AS', 'YOURSELF'] +7127-75947-0016-442: hyp=['OH', 'MADEMOISELLE', 'WHY', 'HAVE', 'I', 'NOT', 'A', 'DEVOTED', 'SISTER', 'OR', 'A', 'TRUE', 'FRIEND', 'SUCH', 'AS', 'YOURSELF'] +7127-75947-0017-443: ref=['WHAT', 'ALREADY', 'HERE', 'THEY', 'SAID', 'TO', 'HER'] +7127-75947-0017-443: hyp=['WHAT', 'ALREADY', 'HERE', 'THEY', 'SAID', 'TO', 'HER'] +7127-75947-0018-444: ref=['I', 'HAVE', 'BEEN', 'HERE', 'THIS', 'QUARTER', 'OF', 'AN', 'HOUR', 'REPLIED', 'LA', 'VALLIERE'] +7127-75947-0018-444: hyp=['I', 'HAVE', 'BEEN', 'HERE', 'THIS', 'QUARTER', 'OF', 'AN', 'HOUR', 'REPLIED', 'LA', 'VALLIERS'] +7127-75947-0019-445: ref=['DID', 'NOT', 'THE', 'DANCING', 'AMUSE', 'YOU', 'NO'] +7127-75947-0019-445: hyp=['DID', 'NOT', 'THE', 'DANCING', 'AMUSE', 'YOU', 'NO'] +7127-75947-0020-446: ref=['NO', 'MORE', 'THAN', 'THE', 'DANCING'] +7127-75947-0020-446: hyp=['NO', 'MORE', 'THAN', 'THE', 'DANCING'] +7127-75947-0021-447: ref=['LA', 'VALLIERE', 'IS', 'QUITE', 'A', 'POETESS', 'SAID', 'TONNAY', 'CHARENTE'] +7127-75947-0021-447: hyp=['LA', 'VALLIERS', 'IS', 'QUITE', 'A', 'POETESS', 'SAID', 'TONIET'] +7127-75947-0022-448: ref=['I', 'AM', 'A', 'WOMAN', 'AND', 'THERE', 'ARE', 'FEW', 'LIKE', 'ME', 'WHOEVER', 'LOVES', 'ME', 'FLATTERS', 'ME', 'WHOEVER', 'FLATTERS', 'ME', 'PLEASES', 'ME', 'AND', 'WHOEVER', 'PLEASES', 'WELL', 'SAID', 'MONTALAIS', 'YOU', 'DO', 'NOT', 'FINISH'] +7127-75947-0022-448: hyp=['I', 'AM', 'A', 'WOMAN', 'AND', 'THERE', 'ARE', 'FEW', 'LIKE', 'ME', 'WHOEVER', 'LOVES', 'ME', 'FLATTERS', 'ME', 'WHOEVER', 'FLATTERS', 'ME', 'PLEASES', 'ME', 'AND', 'WHOEVER', 'PLEASES', 'WELL', 'SAID', 'MONTALAIS', 'YOU', 'DO', 'NOT', 'FINISH'] +7127-75947-0023-449: ref=['IT', 'IS', 'TOO', 'DIFFICULT', 'REPLIED', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'LAUGHING', 'LOUDLY'] +7127-75947-0023-449: hyp=['IT', 'IS', 'TOO', 'DIFFICULT', 'REPLIED', 'MADEMOISELLE', 'DETONICHAUCHANT', 'LAUGHING', 'LOUDLY'] +7127-75947-0024-450: ref=['LOOK', 'YONDER', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'MOON', 'SLOWLY', 'RISING', 'SILVERING', 'THE', 'TOPMOST', 'BRANCHES', 'OF', 'THE', 'CHESTNUTS', 'AND', 'THE', 'OAKS'] +7127-75947-0024-450: hyp=['LUCK', 'YONDER', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'MOON', 'SLOWLY', 'RISING', 'SILVERING', 'THE', 'TOPMOST', 'BRANCHES', 'OF', 'THE', 'CHESTNUTS', 'AND', 'THE', 'YOKES'] +7127-75947-0025-451: ref=['EXQUISITE', 'SOFT', 'TURF', 'OF', 'THE', 'WOODS', 'THE', 'HAPPINESS', 'WHICH', 'YOUR', 'FRIENDSHIP', 'CONFERS', 'UPON', 'ME'] +7127-75947-0025-451: hyp=['EXQUISITE', 'SOFT', 'TURF', 'OF', 'THE', 'WOODS', 'THE', 'HAPPINESS', 'WHICH', 'YOUR', 'FRIENDSHIP', 'CONFERS', 'UPON', 'ME'] +7127-75947-0026-452: ref=['WELL', 'SAID', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'I', 'ALSO', 'THINK', 'A', 'GOOD', 'DEAL', 'BUT', 'I', 'TAKE', 'CARE'] +7127-75947-0026-452: hyp=['WELL', 'SAID', 'MADEMOISELLE', 'DE', 'TONE', 'I', 'ALSO', 'THINK', 'A', 'GOOD', 'DEAL', 'BUT', 'I', 'TAKE', 'CARE'] +7127-75947-0027-453: ref=['TO', 'SAY', 'NOTHING', 'SAID', 'MONTALAIS', 'SO', 'THAT', 'WHEN', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'THINKS', 'ATHENAIS', 'IS', 'THE', 'ONLY', 'ONE', 'WHO', 'KNOWS', 'IT'] +7127-75947-0027-453: hyp=['TO', 'SAY', 'NOTHING', 'SAID', 'MONTALAIS', 'SO', 'THAT', 'WHEN', 'MADEMOISELLE', 'DE', 'TO', 'NECHERANT', 'THINKS', 'ETHNEE', 'IS', 'THE', 'ONLY', 'ONE', 'WHO', 'KNOWS', 'IT'] +7127-75947-0028-454: ref=['QUICK', 'QUICK', 'THEN', 'AMONG', 'THE', 'HIGH', 'REED', 'GRASS', 'SAID', 'MONTALAIS', 'STOOP', 'ATHENAIS', 'YOU', 'ARE', 'SO', 'TALL'] +7127-75947-0028-454: hyp=['QUICK', 'QUICK', 'THEN', 'AMONG', 'THE', 'HIGH', 'REED', 'GRASS', 'SAID', 'MONTALAIS', 'STOOP', 'ETHINAY', 'YOU', 'ARE', 'SO', 'TALL'] +7127-75947-0029-455: ref=['THE', 'YOUNG', 'GIRLS', 'HAD', 'INDEED', 'MADE', 'THEMSELVES', 'SMALL', 'INDEED', 'INVISIBLE'] +7127-75947-0029-455: hyp=['THE', 'YOUNG', 'GIRLS', 'HAD', 'INDEED', 'MADE', 'THEMSELVES', 'SMALL', 'INDEED', 'INVISIBLE'] +7127-75947-0030-456: ref=['SHE', 'WAS', 'HERE', 'JUST', 'NOW', 'SAID', 'THE', 'COUNT'] +7127-75947-0030-456: hyp=['SHE', 'WAS', 'HERE', 'JUST', 'NOW', 'SAID', 'THE', 'COUNT'] +7127-75947-0031-457: ref=['YOU', 'ARE', 'POSITIVE', 'THEN'] +7127-75947-0031-457: hyp=['YOU', 'ARE', 'POSITIVE', 'THEN'] +7127-75947-0032-458: ref=['YES', 'BUT', 'PERHAPS', 'I', 'FRIGHTENED', 'HER', 'IN', 'WHAT', 'WAY'] +7127-75947-0032-458: hyp=['YES', 'BUT', 'PERHAPS', 'I', 'FRIGHTENED', 'HER', 'AND', 'WHAT', 'WAY'] +7127-75947-0033-459: ref=['HOW', 'IS', 'IT', 'LA', 'VALLIERE', 'SAID', 'MADEMOISELLE', 'DE', 'TONNAY', 'CHARENTE', 'THAT', 'THE', 'VICOMTE', 'DE', 'BRAGELONNE', 'SPOKE', 'OF', 'YOU', 'AS', 'LOUISE'] +7127-75947-0033-459: hyp=['HOW', 'IS', 'IT', 'LA', 'VALLIERS', 'SAID', 'MADEMOISELLE', 'DE', 'TINACHANT', 'THAT', 'THE', 'VICOMTE', 'DE', 'BRAGELONE', 'SPOKE', 'OF', 'YOU', 'AS', 'LOUISE'] +7127-75947-0034-460: ref=['IT', 'SEEMS', 'THE', 'KING', 'WILL', 'NOT', 'CONSENT', 'TO', 'IT'] +7127-75947-0034-460: hyp=['IT', 'SEEMS', 'THE', 'KING', 'WILL', 'NOT', 'CONSENT', 'TO', 'IT'] +7127-75947-0035-461: ref=['GOOD', 'GRACIOUS', 'HAS', 'THE', 'KING', 'ANY', 'RIGHT', 'TO', 'INTERFERE', 'IN', 'MATTERS', 'OF', 'THAT', 'KIND'] +7127-75947-0035-461: hyp=['GOOD', 'GRACIOUS', 'AS', 'THE', 'KING', 'ANY', 'RIGHT', 'TO', 'INTERFERE', 'IN', 'MATTERS', 'OF', 'THAT', 'KIND'] +7127-75947-0036-462: ref=['I', 'GIVE', 'MY', 'CONSENT'] +7127-75947-0036-462: hyp=['I', 'GIVE', 'MY', 'CONSENT'] +7127-75947-0037-463: ref=['OH', 'I', 'AM', 'SPEAKING', 'SERIOUSLY', 'REPLIED', 'MONTALAIS', 'AND', 'MY', 'OPINION', 'IN', 'THIS', 'CASE', 'IS', 'QUITE', 'AS', 'GOOD', 'AS', 'THE', "KING'S", 'I', 'SUPPOSE', 'IS', 'IT', 'NOT', 'LOUISE'] +7127-75947-0037-463: hyp=['OH', 'I', 'AM', 'SPEAKING', 'SERIOUSLY', 'REPLIED', 'MONTALAIS', 'AND', 'MY', 'OPINION', 'IN', 'THIS', 'CASE', 'IS', 'QUITE', 'AS', 'GOOD', 'AS', 'THE', "KING'S", 'I', 'SUPPOSE', 'IS', 'IT', 'NOT', 'LOUISE'] +7127-75947-0038-464: ref=['LET', 'US', 'RUN', 'THEN', 'SAID', 'ALL', 'THREE', 'AND', 'GRACEFULLY', 'LIFTING', 'UP', 'THE', 'LONG', 'SKIRTS', 'OF', 'THEIR', 'SILK', 'DRESSES', 'THEY', 'LIGHTLY', 'RAN', 'ACROSS', 'THE', 'OPEN', 'SPACE', 'BETWEEN', 'THE', 'LAKE', 'AND', 'THE', 'THICKEST', 'COVERT', 'OF', 'THE', 'PARK'] +7127-75947-0038-464: hyp=['LET', 'US', 'RUN', 'THEN', 'SAID', 'ALL', 'THREE', 'AND', 'GRACEFULLY', 'LIFTING', 'UP', 'THE', 'LONG', 'SKIRTS', 'OF', 'THEIR', 'SILK', 'DRESSES', 'THEY', 'LIGHTLY', 'RAN', 'ACROSS', 'THE', 'OPEN', 'SPACE', 'BETWEEN', 'THE', 'LAKE', 'AND', 'THE', 'THICKEST', 'COVERT', 'OF', 'THE', 'PARK'] +7127-75947-0039-465: ref=['IN', 'FACT', 'THE', 'SOUND', 'OF', "MADAME'S", 'AND', 'THE', "QUEEN'S", 'CARRIAGES', 'COULD', 'BE', 'HEARD', 'IN', 'THE', 'DISTANCE', 'UPON', 'THE', 'HARD', 'DRY', 'GROUND', 'OF', 'THE', 'ROADS', 'FOLLOWED', 'BY', 'THE', 'MOUNTED', 'CAVALIERS'] +7127-75947-0039-465: hyp=['IN', 'FACT', 'THE', 'SOUND', 'OF', "MADAME'S", 'AND', 'THE', "QUEEN'S", 'CARRIAGES', 'COULD', 'BE', 'HEARD', 'IN', 'THE', 'DISTANCE', 'UPON', 'THE', 'HARD', 'DRY', 'GROUND', 'OF', 'THE', 'ROADS', 'FOLLOWED', 'BY', 'THE', 'MOUNTAIN', 'CAVALIERS'] +7127-75947-0040-466: ref=['IN', 'THIS', 'WAY', 'THE', 'FETE', 'OF', 'THE', 'WHOLE', 'COURT', 'WAS', 'A', 'FETE', 'ALSO', 'FOR', 'THE', 'MYSTERIOUS', 'INHABITANTS', 'OF', 'THE', 'FOREST', 'FOR', 'CERTAINLY', 'THE', 'DEER', 'IN', 'THE', 'BRAKE', 'THE', 'PHEASANT', 'ON', 'THE', 'BRANCH', 'THE', 'FOX', 'IN', 'ITS', 'HOLE', 'WERE', 'ALL', 'LISTENING'] +7127-75947-0040-466: hyp=['IN', 'THIS', 'WAY', 'THE', 'FETE', 'OF', 'THE', 'WHOLE', 'COURT', 'WAS', 'A', 'FETE', 'ALSO', 'FOR', 'THE', 'MYSTERIOUS', 'INHABITANTS', 'OF', 'THE', 'FOREST', 'FOR', 'CERTAINLY', 'THE', 'DEER', 'IN', 'THE', 'BRAKE', 'THE', 'PHEASANT', 'ON', 'THE', 'BRANCH', 'THE', 'FOX', 'IN', 'ITS', 'HOLE', 'WERE', 'ALL', 'LISTENING'] +7176-88083-0000-707: ref=['ALL', 'ABOUT', 'HIM', 'WAS', 'A', 'TUMULT', 'OF', 'BRIGHT', 'AND', 'BROKEN', 'COLOR', 'SCATTERED', 'IN', 'BROAD', 'SPLASHES'] +7176-88083-0000-707: hyp=['ALL', 'ABOUT', 'HIM', 'WAS', 'A', 'TUMULT', 'OF', 'BRIGHT', 'AND', 'BROKEN', 'COLOR', 'SCATTERED', 'IN', 'BROAD', 'SPLASHES'] +7176-88083-0001-708: ref=['THE', 'MERGANSER', 'HAD', 'A', 'CRESTED', 'HEAD', 'OF', 'IRIDESCENT', 'GREEN', 'BLACK', 'A', 'BROAD', 'COLLAR', 'OF', 'LUSTROUS', 'WHITE', 'BLACK', 'BACK', 'BLACK', 'AND', 'WHITE', 'WINGS', 'WHITE', 'BELLY', 'SIDES', 'FINELY', 'PENCILLED', 'IN', 'BLACK', 'AND', 'WHITE', 'AND', 'A', 'BREAST', 'OF', 'RICH', 'CHESTNUT', 'RED', 'STREAKED', 'WITH', 'BLACK'] +7176-88083-0001-708: hyp=['THE', 'MERGANCER', 'HAD', 'A', 'CRESTED', 'HEAD', 'OF', 'IRIDESCENT', 'GREEN', 'BLACK', 'A', 'BROAD', 'COLLAR', 'OF', 'LUSTROUS', 'WHITE', 'BLACK', 'BACK', 'BLACK', 'AND', 'WHITE', 'WINGS', 'WHITE', 'BELLY', 'SIDES', 'FINELY', 'PENCILLED', 'AND', 'BLACK', 'AND', 'WHITE', 'AND', 'HER', 'BREAST', 'OF', 'RICH', 'CHESTNUT', 'RED', 'STREAKED', 'WITH', 'BLACK'] +7176-88083-0002-709: ref=['HIS', 'FEET', 'WERE', 'RED', 'HIS', 'LONG', 'NARROW', 'BEAK', 'WITH', 'ITS', 'SAW', 'TOOTHED', 'EDGES', 'AND', 'SHARP', 'HOOKED', 'TIP', 'WAS', 'BRIGHT', 'RED'] +7176-88083-0002-709: hyp=['HIS', 'FEET', 'WERE', 'RED', 'HIS', 'LONG', 'NARROW', 'BEAK', 'WITH', 'ITS', 'SAW', 'TOOTHED', 'EDGES', 'AND', 'SHARP', 'HOOKED', 'TIP', 'WAS', 'BRIGHT', 'RED'] +7176-88083-0003-710: ref=['BUT', 'HERE', 'HE', 'WAS', 'AT', 'A', 'TERRIBLE', 'DISADVANTAGE', 'AS', 'COMPARED', 'WITH', 'THE', 'OWLS', 'HAWKS', 'AND', 'EAGLES', 'HE', 'HAD', 'NO', 'RENDING', 'CLAWS'] +7176-88083-0003-710: hyp=['BUT', 'HERE', 'HE', 'WAS', 'AT', 'A', 'TERRIBLE', 'DISADVANTAGE', 'AS', 'COMPARED', 'WITH', 'THE', 'OWLS', 'HAWKS', 'AND', 'EAGLES', 'HE', 'HAD', 'NO', 'RENDING', 'CLAWS'] +7176-88083-0004-711: ref=['BUT', 'SUDDENLY', 'STRAIGHT', 'AND', 'SWIFT', 'AS', 'A', 'DIVING', 'CORMORANT', 'HE', 'SHOT', 'DOWN', 'INTO', 'THE', 'TORRENT', 'AND', 'DISAPPEARED', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0004-711: hyp=['BUT', 'SUDDENLY', 'STRAIGHT', 'AND', 'SWIFT', 'AS', 'A', 'DIVING', 'CORMORANT', 'HE', 'SHOT', 'DOWN', 'INTO', 'THE', 'TORRENT', 'AND', 'DISAPPEARED', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0005-712: ref=['ONCE', 'FAIRLY', 'A', 'WING', 'HOWEVER', 'HE', 'WHEELED', 'AND', 'MADE', 'BACK', 'HURRIEDLY', 'FOR', 'HIS', 'PERCH'] +7176-88083-0005-712: hyp=['ONCE', 'FAIRLY', 'A', 'WING', 'HOWEVER', 'HE', 'WHEELED', 'AND', 'MADE', 'BACK', 'HURRIEDLY', 'FOR', 'HIS', 'PERCH'] +7176-88083-0006-713: ref=['IT', 'MIGHT', 'HAVE', 'SEEMED', 'THAT', 'A', 'TROUT', 'OF', 'THIS', 'SIZE', 'WAS', 'A', 'FAIRLY', 'SUBSTANTIAL', 'MEAL'] +7176-88083-0006-713: hyp=['IT', 'MIGHT', 'HAVE', 'SEEMED', 'THAT', 'A', 'TROUT', 'OF', 'THIS', 'SIZE', 'WAS', 'A', 'FAIRLY', 'SUBSTANTIAL', 'MEAL'] +7176-88083-0007-714: ref=['BUT', 'SUCH', 'WAS', 'HIS', 'KEENNESS', 'THAT', 'EVEN', 'WHILE', 'THE', 'WIDE', 'FLUKES', 'OF', 'HIS', 'ENGORGED', 'VICTIM', 'WERE', 'STILL', 'STICKING', 'OUT', 'AT', 'THE', 'CORNERS', 'OF', 'HIS', 'BEAK', 'HIS', 'FIERCE', 'RED', 'EYES', 'WERE', 'ONCE', 'MORE', 'PEERING', 'DOWNWARD', 'INTO', 'THE', 'TORRENT', 'IN', 'SEARCH', 'OF', 'FRESH', 'PREY'] +7176-88083-0007-714: hyp=['BUT', 'SUCH', 'WAS', 'HIS', 'KEENNESS', 'THAT', 'EVEN', 'WHILE', 'THE', 'WIDE', 'FLUKES', 'OF', 'HIS', 'ENGORGED', 'VICTIM', 'WERE', 'STILL', 'STICKING', 'OUT', 'AT', 'THE', 'CORNERS', 'OF', 'HIS', 'BEAK', 'HIS', 'FIERCE', 'RED', 'EYES', 'WERE', 'ONCE', 'MORE', 'PEERING', 'DOWNWARD', 'INTO', 'THE', 'TORRENT', 'IN', 'SEARCH', 'OF', 'FRESH', 'PREY'] +7176-88083-0008-715: ref=['IN', 'DESPAIR', 'HE', 'HURLED', 'HIMSELF', 'DOWNWARD', 'TOO', 'SOON'] +7176-88083-0008-715: hyp=['IN', 'DESPAIR', 'HE', 'HURLED', 'HIMSELF', 'DOWNWARD', 'TOO', 'SOON'] +7176-88083-0009-716: ref=['THE', 'GREAT', 'HAWK', 'FOLLOWED', 'HURRIEDLY', 'TO', 'RETRIEVE', 'HIS', 'PREY', 'FROM', 'THE', 'GROUND'] +7176-88083-0009-716: hyp=['THE', 'GREAT', 'HAWK', 'FOWLED', 'HURRIEDLY', 'TO', 'RETRIEVE', 'HIS', 'PREY', 'FROM', 'THE', 'GROUND'] +7176-88083-0010-717: ref=['THE', 'CAT', 'GROWLED', 'SOFTLY', 'PICKED', 'UP', 'THE', 'PRIZE', 'IN', 'HER', 'JAWS', 'AND', 'TROTTED', 'INTO', 'THE', 'BUSHES', 'TO', 'DEVOUR', 'IT'] +7176-88083-0010-717: hyp=['THE', 'CAT', 'GROWLED', 'SOFTLY', 'PICKED', 'UP', 'THE', 'PRIZE', 'IN', 'HER', 'JAWS', 'AND', 'TROTTED', 'INTO', 'THE', 'BUSHES', 'TO', 'DEVOUR', 'IT'] +7176-88083-0011-718: ref=['IN', 'FACT', 'HE', 'HAD', 'JUST', 'FINISHED', 'IT', 'THE', 'LAST', 'OF', 'THE', "TROUT'S", 'TAIL', 'HAD', 'JUST', 'VANISHED', 'WITH', 'A', 'SPASM', 'DOWN', 'HIS', 'STRAINED', 'GULLET', 'WHEN', 'THE', 'BAFFLED', 'HAWK', 'CAUGHT', 'SIGHT', 'OF', 'HIM', 'AND', 'SWOOPED'] +7176-88083-0011-718: hyp=['IN', 'FACT', 'HE', 'HAD', 'JUST', 'FINISHED', 'IT', 'THE', 'LAST', 'OF', 'THE', "TROUT'S", 'TAIL', 'HAD', 'JUST', 'VANISHED', 'WITH', 'A', 'SPASM', 'DOWN', 'HIS', 'STRAINED', 'GULLET', 'WHEN', 'THE', 'BAFFLED', 'HAWK', 'CAUGHT', 'SIGHT', 'OF', 'HIM', 'AND', 'SWOOPED'] +7176-88083-0012-719: ref=['THE', 'HAWK', 'ALIGHTED', 'ON', 'THE', 'DEAD', 'BRANCH', 'AND', 'SAT', 'UPRIGHT', 'MOTIONLESS', 'AS', 'IF', 'SURPRISED'] +7176-88083-0012-719: hyp=['THE', 'HAWK', 'ALIGHTED', 'ON', 'THE', 'DEAD', 'BRANCH', 'AND', 'SAT', 'UPRIGHT', 'MOTIONLESS', 'AS', 'IF', 'SURPRISED'] +7176-88083-0013-720: ref=['LIKE', 'HIS', 'UNFORTUNATE', 'LITTLE', 'COUSIN', 'THE', 'TEAL', 'HE', 'TOO', 'HAD', 'FELT', 'THE', 'FEAR', 'OF', 'DEATH', 'SMITTEN', 'INTO', 'HIS', 'HEART', 'AND', 'WAS', 'HEADING', 'DESPERATELY', 'FOR', 'THE', 'REFUGE', 'OF', 'SOME', 'DARK', 'OVERHANGING', 'BANK', 'DEEP', 'FRINGED', 'WITH', 'WEEDS', 'WHERE', 'THE', 'DREADFUL', 'EYE', 'OF', 'THE', 'HAWK', 'SHOULD', 'NOT', 'DISCERN', 'HIM'] +7176-88083-0013-720: hyp=['LIKE', 'HIS', 'UNFORTUNATE', 'LITTLE', 'COUSIN', 'THE', 'TEAL', 'HE', 'TOO', 'HAD', 'FELT', 'THE', 'FEAR', 'OF', 'DEATH', 'SMITTEN', 'INTO', 'HIS', 'HEART', 'AND', 'WAS', 'HEADING', 'DESPERATELY', 'FOR', 'THE', 'REFUGE', 'OF', 'SOME', 'DARK', 'OVERHANGING', 'BANK', 'DEEP', 'FRINGED', 'WITH', 'WEEDS', 'WHERE', 'THE', 'DREADFUL', 'EYE', 'OF', 'THE', 'HAWK', 'SHOULD', 'NOT', 'DISCERN', 'HIM'] +7176-88083-0014-721: ref=['THE', 'HAWK', 'SAT', 'UPON', 'THE', 'BRANCH', 'AND', 'WATCHED', 'HIS', 'QUARRY', 'SWIMMING', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0014-721: hyp=['THE', 'HAWK', 'SAT', 'UPON', 'THE', 'BRANCH', 'AND', 'WATCHED', 'HIS', 'QUARRY', 'SWIMMING', 'BENEATH', 'THE', 'SURFACE'] +7176-88083-0015-722: ref=['ALMOST', 'INSTANTLY', 'HE', 'WAS', 'FORCED', 'TO', 'THE', 'TOP'] +7176-88083-0015-722: hyp=['ALMOST', 'INSTANTLY', 'HE', 'WAS', 'FORCED', 'TO', 'THE', 'TOP'] +7176-88083-0016-723: ref=['STRAIGHTWAY', 'THE', 'HAWK', 'GLIDED', 'FROM', 'HIS', 'PERCH', 'AND', 'DARTED', 'AFTER', 'HIM'] +7176-88083-0016-723: hyp=['STRAIGHTWAY', 'THE', 'HOT', 'GLIDED', 'FROM', 'HIS', 'PERCH', 'AND', 'DARTED', 'AFTER', 'HIM'] +7176-88083-0017-724: ref=['BUT', 'AT', 'THIS', 'POINT', 'IN', 'THE', 'RAPIDS', 'IT', 'WAS', 'IMPOSSIBLE', 'FOR', 'HIM', 'TO', 'STAY', 'DOWN'] +7176-88083-0017-724: hyp=['BUT', 'AT', 'THIS', 'POINT', 'IN', 'THE', 'RAPIDS', 'IT', 'WAS', 'IMPOSSIBLE', 'FOR', 'HIM', 'TO', 'STAY', 'DOWN'] +7176-88083-0018-725: ref=['BUT', 'THIS', 'FREQUENTER', 'OF', 'THE', 'HEIGHTS', 'OF', 'AIR', 'FOR', 'ALL', 'HIS', 'SAVAGE', 'VALOR', 'WAS', 'TROUBLED', 'AT', 'THE', 'LEAPING', 'WAVES', 'AND', 'THE', 'TOSSING', 'FOAM', 'OF', 'THESE', 'MAD', 'RAPIDS', 'HE', 'DID', 'NOT', 'UNDERSTAND', 'THEM'] +7176-88083-0018-725: hyp=['BUT', 'THIS', 'FREQUENTER', 'OF', 'THE', 'HEIGHTS', 'OF', 'AIR', 'FOR', 'ALL', 'HIS', 'SAVAGE', 'VALOR', 'WAS', 'TROUBLED', 'AT', 'THE', 'LEAPING', 'WAVES', 'AND', 'THE', 'TOSSING', 'FOAM', 'OF', 'THESE', 'MAD', 'RAPIDS', 'HE', 'DID', 'NOT', 'UNDERSTAND', 'THEM'] +7176-88083-0019-726: ref=['AS', 'HE', 'FLEW', 'HIS', 'DOWN', 'REACHING', 'CLUTCHING', 'TALONS', 'WERE', 'NOT', 'HALF', 'A', 'YARD', 'ABOVE', 'THE', "FUGITIVE'S", 'HEAD'] +7176-88083-0019-726: hyp=['AS', 'HE', 'FLEW', 'HIS', 'DOWN', 'REACHING', 'CLUTCHING', 'TALONS', 'WERE', 'NOT', 'HALF', 'A', 'YARD', 'ABOVE', 'THE', "FUGITIVE'S", 'HEAD'] +7176-88083-0020-727: ref=['WHERE', 'THE', 'WAVES', 'FOR', 'AN', 'INSTANT', 'SANK', 'THEY', 'CAME', 'CLOSER', 'BUT', 'NOT', 'QUITE', 'WITHIN', 'GRASPING', 'REACH'] +7176-88083-0020-727: hyp=['WHERE', 'THE', 'WAY', 'IS', 'FOR', 'AN', 'INSTANT', 'SANK', 'THEY', 'CAME', 'CLOSER', 'BUT', 'NOT', 'QUITE', 'WITHIN', 'GRASPING', 'REACH'] +7176-88083-0021-728: ref=['BUT', 'AS', 'BEFORE', 'THE', 'LEAPING', 'WAVES', 'OF', 'THE', 'RAPIDS', 'WERE', 'TOO', 'MUCH', 'FOR', 'HIS', 'PURSUER', 'AND', 'HE', 'WAS', 'ABLE', 'TO', 'FLAP', 'HIS', 'WAY', 'ONWARD', 'IN', 'A', 'CLOUD', 'OF', 'FOAM', 'WHILE', 'DOOM', 'HUNG', 'LOW', 'ABOVE', 'HIS', 'HEAD', 'YET', 'HESITATED', 'TO', 'STRIKE'] +7176-88083-0021-728: hyp=['BUT', 'AS', 'BEFORE', 'THE', 'LEAPING', 'WAVES', 'OF', 'THE', 'RAPIDS', 'WERE', 'TOO', 'MUCH', 'FOR', 'HIS', 'PURSUER', 'AND', 'HE', 'WAS', 'ABLE', 'TO', 'FLAP', 'HIS', 'WAY', 'ONWARD', 'IN', 'A', 'CLOUD', 'OF', 'FOAM', 'WHILE', 'DOOM', 'HUNG', 'LOW', 'ABOVE', 'HIS', 'HEAD', 'YET', 'HESITATED', 'TO', 'STRIKE'] +7176-88083-0022-729: ref=['THE', 'HAWK', 'EMBITTERED', 'BY', 'THE', 'LOSS', 'OF', 'HIS', 'FIRST', 'QUARRY', 'HAD', 'BECOME', 'AS', 'DOGGED', 'IN', 'PURSUIT', 'AS', 'A', 'WEASEL', 'NOT', 'TO', 'BE', 'SHAKEN', 'OFF', 'OR', 'EVADED', 'OR', 'DECEIVED'] +7176-88083-0022-729: hyp=['THE', 'HAWK', 'EMBITTERED', 'BY', 'THE', 'LOSS', 'OF', 'HIS', 'FIRST', 'QUARRY', 'HAD', 'BECOME', 'AS', 'DOGGED', 'IN', 'PURSUIT', 'AS', 'A', 'WEASEL', 'NOT', 'TO', 'BE', 'SHAKEN', 'OFF', 'OR', 'EVADED', 'OR', 'DECEIVED'] +7176-88083-0023-730: ref=['HE', 'HAD', 'A', 'LOT', 'OF', 'LINE', 'OUT', 'AND', 'THE', 'PLACE', 'WAS', 'NONE', 'TOO', 'FREE', 'FOR', 'A', 'LONG', 'CAST', 'BUT', 'HE', 'WAS', 'IMPATIENT', 'TO', 'DROP', 'HIS', 'FLIES', 'AGAIN', 'ON', 'THE', 'SPOT', 'WHERE', 'THE', 'BIG', 'FISH', 'WAS', 'FEEDING'] +7176-88083-0023-730: hyp=['HE', 'HAD', 'A', 'LOT', 'OF', 'LINE', 'OUT', 'AND', 'THE', 'PLACE', 'WAS', 'NONE', 'TOO', 'FREE', 'FOR', 'A', 'LONG', 'CAST', 'BUT', 'HE', 'WAS', 'IMPATIENT', 'TO', 'DROP', 'HIS', 'FLIES', 'AGAIN', 'ON', 'THE', 'SPOT', 'WHERE', 'THE', 'BIG', 'FISH', 'WAS', 'FEEDING'] +7176-88083-0024-731: ref=['THE', 'LAST', 'DROP', 'FLY', 'AS', 'LUCK', 'WOULD', 'HAVE', 'IT', 'CAUGHT', 'JUST', 'IN', 'THE', 'CORNER', 'OF', 'THE', "HAWK'S", 'ANGRILY', 'OPEN', 'BEAK', 'HOOKING', 'ITSELF', 'FIRMLY'] +7176-88083-0024-731: hyp=['THE', 'LAST', 'DROP', 'FLY', 'AS', 'LUCK', 'WOULD', 'HAVE', 'IT', 'CAUGHT', 'JUST', 'IN', 'THE', 'CORNER', 'OF', 'THE', "HAWK'S", 'ANGRILY', 'OPEN', 'BEAK', 'HOOKING', 'ITSELF', 'FIRMLY'] +7176-88083-0025-732: ref=['AT', 'THE', 'SUDDEN', 'SHARP', 'STING', 'OF', 'IT', 'THE', 'GREAT', 'BIRD', 'TURNED', 'HIS', 'HEAD', 'AND', 'NOTICED', 'FOR', 'THE', 'FIRST', 'TIME', 'THE', 'FISHERMAN', 'STANDING', 'ON', 'THE', 'BANK'] +7176-88083-0025-732: hyp=['AT', 'THE', 'SUDDEN', 'SHARP', 'STING', 'OF', 'IT', 'THE', 'GREAT', 'BIRD', 'TURNED', 'HIS', 'HEAD', 'AND', 'NOTICED', 'FOR', 'THE', 'FIRST', 'TIME', 'THE', 'FISHERMAN', 'STANDING', 'ON', 'THE', 'BANK'] +7176-88083-0026-733: ref=['THE', 'DRAG', 'UPON', 'HIS', 'BEAK', 'AND', 'THE', 'LIGHT', 'CHECK', 'UPON', 'HIS', 'WINGS', 'WERE', 'INEXPLICABLE', 'TO', 'HIM', 'AND', 'APPALLING'] +7176-88083-0026-733: hyp=['THE', 'DRAG', 'UPON', 'HIS', 'BEAK', 'AND', 'THE', 'LIGHT', 'CHECK', 'UPON', 'HIS', 'WINGS', 'WERE', 'INEXPLICABLE', 'TO', 'HIM', 'AND', 'APPALLING'] +7176-88083-0027-734: ref=['THEN', 'THE', 'LEADER', 'PARTED', 'FROM', 'THE', 'LINE'] +7176-88083-0027-734: hyp=['THAN', 'THE', 'LEADER', 'PARTED', 'FROM', 'THE', 'LINE'] +7176-92135-0000-661: ref=['HE', 'IS', 'A', 'WELCOME', 'FIGURE', 'AT', 'THE', 'GARDEN', 'PARTIES', 'OF', 'THE', 'ELECT', 'WHO', 'ARE', 'ALWAYS', 'READY', 'TO', 'ENCOURAGE', 'HIM', 'BY', 'ACCEPTING', 'FREE', 'SEATS', 'FOR', 'HIS', 'PLAY', 'ACTOR', 'MANAGERS', 'NOD', 'TO', 'HIM', 'EDITORS', 'ALLOW', 'HIM', 'TO', 'CONTRIBUTE', 'WITHOUT', 'CHARGE', 'TO', 'A', 'SYMPOSIUM', 'ON', 'THE', 'PRICE', 'OF', 'GOLF', 'BALLS'] +7176-92135-0000-661: hyp=['HE', 'IS', 'A', 'WELCOME', 'FIGURE', 'AT', 'THE', 'GARDEN', 'PARTIES', 'OF', 'THE', 'ELECT', 'WHO', 'ARE', 'ALWAYS', 'READY', 'TO', 'ENCOURAGE', 'HIM', 'BY', 'ACCEPTING', 'FREE', 'SEATS', 'FOR', 'HIS', 'PLAY', 'ACTOR', 'MANAGERS', 'NOD', 'TO', 'HIM', 'EDITORS', 'ALLOW', 'HIM', 'TO', 'CONTRIBUTE', 'WITHOUT', 'CHARGE', 'TO', 'A', 'SYMPOSIUM', 'ON', 'THE', 'PRICE', 'OF', 'GOLF', 'BALLS'] +7176-92135-0001-662: ref=['IN', 'SHORT', 'HE', 'BECOMES', 'A', 'PROMINENT', 'FIGURE', 'IN', 'LONDON', 'SOCIETY', 'AND', 'IF', 'HE', 'IS', 'NOT', 'CAREFUL', 'SOMEBODY', 'WILL', 'SAY', 'SO'] +7176-92135-0001-662: hyp=['IN', 'SHORT', 'HE', 'BECOMES', 'A', 'PROMINENT', 'FIGURE', 'IN', 'LONDON', 'SOCIETY', 'AND', 'IF', 'HE', 'IS', 'NOT', 'CAREFUL', 'SOMEBODY', 'WILL', 'SAY', 'SO'] +7176-92135-0002-663: ref=['BUT', 'EVEN', 'THE', 'UNSUCCESSFUL', 'DRAMATIST', 'HAS', 'HIS', 'MOMENTS'] +7176-92135-0002-663: hyp=['BUT', 'EVEN', 'THE', 'UNSUCCESSFUL', 'DRAMATIST', 'HAS', 'HIS', 'MOMENTS'] +7176-92135-0003-664: ref=['YOUR', 'PLAY', 'MUST', 'BE', 'NOT', 'MERELY', 'A', 'GOOD', 'PLAY', 'BUT', 'A', 'SUCCESSFUL', 'ONE'] +7176-92135-0003-664: hyp=['YOUR', 'PLAY', 'MUST', 'BE', 'NOT', 'MERELY', 'A', 'GOOD', 'PLAY', 'BUT', 'A', 'SUCCESSFUL', 'ONE'] +7176-92135-0004-665: ref=['FRANKLY', 'I', 'CANNOT', 'ALWAYS', 'SAY'] +7176-92135-0004-665: hyp=['FRANKLY', 'I', 'CANNOT', 'ALWAYS', 'SAY'] +7176-92135-0005-666: ref=['BUT', 'SUPPOSE', 'YOU', 'SAID', "I'M", 'FOND', 'OF', 'WRITING', 'MY', 'PEOPLE', 'ALWAYS', 'SAY', 'MY', 'LETTERS', 'HOME', 'ARE', 'GOOD', 'ENOUGH', 'FOR', 'PUNCH'] +7176-92135-0005-666: hyp=['BUT', 'SUPPOSE', 'YOU', 'SAID', "I'M", 'FOND', 'OF', 'WRITING', 'MY', 'PEOPLE', 'ALWAYS', 'SAY', 'MY', 'LETTERS', 'HOME', 'ARE', 'GOOD', 'ENOUGH', 'FOR', 'PUNCH'] +7176-92135-0006-667: ref=["I'VE", 'GOT', 'A', 'LITTLE', 'IDEA', 'FOR', 'A', 'PLAY', 'ABOUT', 'A', 'MAN', 'AND', 'A', 'WOMAN', 'AND', 'ANOTHER', 'WOMAN', 'AND', 'BUT', 'PERHAPS', "I'D", 'BETTER', 'KEEP', 'THE', 'PLOT', 'A', 'SECRET', 'FOR', 'THE', 'MOMENT'] +7176-92135-0006-667: hyp=["I'VE", 'GOT', 'A', 'LITTLE', 'IDEA', 'FOR', 'A', 'PLAY', 'ABOUT', 'A', 'MAN', 'AND', 'A', 'WOMAN', 'AND', 'ANOTHER', 'WOMAN', 'AND', 'BUT', 'PERHAPS', 'I', 'BETTER', 'KEEP', 'THE', 'PLOT', 'A', 'SECRET', 'FOR', 'THE', 'MOMENT'] +7176-92135-0007-668: ref=['ANYHOW', "IT'S", 'JOLLY', 'EXCITING', 'AND', 'I', 'CAN', 'DO', 'THE', 'DIALOGUE', 'ALL', 'RIGHT'] +7176-92135-0007-668: hyp=['ANYHOW', "IT'S", 'JOLLY', 'EXCITING', 'AND', 'I', 'CAN', 'DO', 'THE', 'DIALOGUE', 'ALL', 'RIGHT'] +7176-92135-0008-669: ref=['LEND', 'ME', 'YOUR', 'EAR', 'FOR', 'TEN', 'MINUTES', 'AND', 'YOU', 'SHALL', 'LEARN', 'JUST', 'WHAT', 'STAGECRAFT', 'IS'] +7176-92135-0008-669: hyp=['LEND', 'ME', 'YOUR', 'EAR', 'FOR', 'TEN', 'MINUTES', 'AND', 'YOU', 'SHALL', 'LEARN', 'JUST', 'WHAT', 'STAGECRAFT', 'IS'] +7176-92135-0009-670: ref=['AND', 'I', 'SHOULD', 'BEGIN', 'WITH', 'A', 'SHORT', 'HOMILY', 'ON', 'SOLILOQUY'] +7176-92135-0009-670: hyp=['AND', 'I', 'SHOULD', 'BEGIN', 'WITH', 'A', 'SHORT', 'HOMILY', 'ON', 'SOLILOQUY'] +7176-92135-0010-671: ref=['HAM', 'TO', 'BE', 'OR', 'NOT', 'TO', 'BE'] +7176-92135-0010-671: hyp=['HIM', 'TO', 'BE', 'OR', 'NOT', 'TO', 'BE'] +7176-92135-0011-672: ref=['NOW', 'THE', 'OBJECT', 'OF', 'THIS', 'SOLILOQUY', 'IS', 'PLAIN'] +7176-92135-0011-672: hyp=['NOW', 'THE', 'OBJECT', 'OF', 'THIS', 'SOLOQUY', 'IS', 'PLAIN'] +7176-92135-0012-673: ref=['INDEED', 'IRRESOLUTION', 'BEING', 'THE', 'KEYNOTE', 'OF', "HAMLET'S", 'SOLILOQUY', 'A', 'CLEVER', 'PLAYER', 'COULD', 'TO', 'SOME', 'EXTENT', 'INDICATE', 'THE', 'WHOLE', 'THIRTY', 'LINES', 'BY', 'A', 'SILENT', 'WORKING', 'OF', 'THE', 'JAW', 'BUT', 'AT', 'THE', 'SAME', 'TIME', 'IT', 'WOULD', 'BE', 'IDLE', 'TO', 'DENY', 'THAT', 'HE', 'WOULD', 'MISS', 'THE', 'FINER', 'SHADES', 'OF', 'THE', "DRAMATIST'S", 'MEANING'] +7176-92135-0012-673: hyp=['INDEED', 'IRRESOLUTION', 'MEAN', 'THE', 'KEYNOTE', 'OF', "HAMLET'S", 'SOLILOQUY', 'A', 'CLEVER', 'PLAYER', 'COULD', 'TO', 'SOME', 'EXTENT', 'INDICATE', 'THE', 'WHOLE', 'THIRTY', 'LINES', 'BY', 'A', 'SILENT', 'WORKING', 'OF', 'THE', 'JOB', 'BUT', 'AT', 'THE', 'SAME', 'TIME', 'IT', 'WOULD', 'BE', 'IDLE', 'TO', 'DENY', 'THAT', 'HE', 'WOULD', 'MISS', 'THE', 'FINER', 'SHADES', 'OF', 'THE', "DRAMATIST'S", 'MEANING'] +7176-92135-0013-674: ref=['WE', 'MODERNS', 'HOWEVER', 'SEE', 'THE', 'ABSURDITY', 'OF', 'IT'] +7176-92135-0013-674: hyp=['WE', 'MODERNS', 'HOWEVER', 'SEE', 'THE', 'ABSURDITY', 'OF', 'IT'] +7176-92135-0014-675: ref=['IF', 'IT', 'BE', 'GRANTED', 'FIRST', 'THAT', 'THE', 'THOUGHTS', 'OF', 'A', 'CERTAIN', 'CHARACTER', 'SHOULD', 'BE', 'KNOWN', 'TO', 'THE', 'AUDIENCE', 'AND', 'SECONDLY', 'THAT', 'SOLILOQUY', 'OR', 'THE', 'HABIT', 'OF', 'THINKING', 'ALOUD', 'IS', 'IN', 'OPPOSITION', 'TO', 'MODERN', 'STAGE', 'TECHNIQUE', 'HOW', 'SHALL', 'A', 'SOLILOQUY', 'BE', 'AVOIDED', 'WITHOUT', 'DAMAGE', 'TO', 'THE', 'PLAY'] +7176-92135-0014-675: hyp=['IF', 'IT', 'BE', 'GRANTED', 'FIRST', 'THAT', 'THE', 'THOUGHTS', 'OF', 'A', 'CERTAIN', 'CHARACTER', 'SHOULD', 'BE', 'KNOWN', 'TO', 'THE', 'AUDIENCE', 'AND', 'SECONDLY', 'THAT', 'SOLILOQUY', 'OR', 'THE', 'HABIT', 'OF', 'THINKING', 'ALOUD', 'IS', 'IN', 'OPPOSITION', 'TO', 'MODERN', 'STAGE', 'TYPENIQUE', 'HOW', 'SHALL', 'A', 'SOLILOQUY', 'BE', 'AVOIDED', 'WITHOUT', 'DAMAGE', 'TO', 'THE', 'PLAY'] +7176-92135-0015-676: ref=['AND', 'SO', 'ON', 'TILL', 'YOU', 'GET', 'TO', 'THE', 'END', 'WHEN', 'OPHELIA', 'MIGHT', 'SAY', 'AH', 'YES', 'OR', 'SOMETHING', 'NON', 'COMMITTAL', 'OF', 'THAT', 'SORT'] +7176-92135-0015-676: hyp=['AND', 'SO', 'ON', 'TILL', 'YOU', 'GET', 'TO', 'THE', 'END', 'ONE', 'OF', 'WILLIAM', 'MIGHT', 'SAY', 'AH', 'YES', 'OR', 'SOMETHING', 'NON', 'COMMITTAL', 'OF', 'THAT', 'SORT'] +7176-92135-0016-677: ref=['THIS', 'WOULD', 'BE', 'AN', 'EASY', 'WAY', 'OF', 'DOING', 'IT', 'BUT', 'IT', 'WOULD', 'NOT', 'BE', 'THE', 'BEST', 'WAY', 'FOR', 'THE', 'REASON', 'THAT', 'IT', 'IS', 'TOO', 'EASY', 'TO', 'CALL', 'ATTENTION', 'TO', 'ITSELF'] +7176-92135-0016-677: hyp=['THIS', 'WOULD', 'BE', 'AN', 'EASY', 'WAY', 'OF', 'DOING', 'IT', 'BUT', 'IT', 'WOULD', 'NOT', 'BE', 'THE', 'BEST', 'WAY', 'FOR', 'THE', 'REASON', 'THAT', 'IT', 'IS', 'TOO', 'EASY', 'TO', 'CALL', 'ATTENTION', 'TO', 'ITSELF'] +7176-92135-0017-678: ref=['IN', 'THE', 'OLD', 'BADLY', 'MADE', 'PLAY', 'IT', 'WAS', 'FREQUENTLY', 'NECESSARY', 'FOR', 'ONE', 'OF', 'THE', 'CHARACTERS', 'TO', 'TAKE', 'THE', 'AUDIENCE', 'INTO', 'HIS', 'CONFIDENCE'] +7176-92135-0017-678: hyp=['IN', 'THE', 'OLD', 'BADLY', 'MADE', 'PLAY', 'IT', 'WAS', 'FREQUENTLY', 'NECESSARY', 'FOR', 'ONE', 'OF', 'THE', 'CHARACTERS', 'TO', 'TAKE', 'THE', 'AUDIENCE', 'INTO', 'HIS', 'CONFIDENCE'] +7176-92135-0018-679: ref=['IN', 'THE', 'MODERN', 'WELL', 'CONSTRUCTED', 'PLAY', 'HE', 'SIMPLY', 'RINGS', 'UP', 'AN', 'IMAGINARY', 'CONFEDERATE', 'AND', 'TELLS', 'HIM', 'WHAT', 'HE', 'IS', 'GOING', 'TO', 'DO', 'COULD', 'ANYTHING', 'BE', 'MORE', 'NATURAL'] +7176-92135-0018-679: hyp=['IN', 'THE', 'MODERN', 'WELL', 'CONSTRUCTED', 'PLAY', 'HE', 'SIMPLY', 'RINGS', 'UP', 'AN', 'IMAGINARY', 'CONFEDERATE', 'AND', 'TELLS', 'HIM', 'WHAT', 'HE', 'IS', 'GOING', 'TO', 'DO', 'COULD', 'ANYTHING', 'BE', 'MORE', 'NATURAL'] +7176-92135-0019-680: ref=['I', 'WANT', 'DOUBLE', 'NINE', 'HAL', 'LO'] +7176-92135-0019-680: hyp=['I', 'WANT', 'DOUBLE', 'NINE', 'HELLO'] +7176-92135-0020-681: ref=['DOUBLE', 'NINE', 'TWO', 'THREE', 'ELSINORE', 'DOUBLE', 'NINE', 'YES', 'HALLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0020-681: hyp=['DOUBLE', 'NINE', 'TO', 'THREE', 'ELSINOR', 'DOUBLE', 'NOT', 'YES', 'HELLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLE', 'SPEAKING'] +7176-92135-0021-682: ref=['I', 'SAY', "I'VE", 'BEEN', 'WONDERING', 'ABOUT', 'THIS', 'BUSINESS'] +7176-92135-0021-682: hyp=['I', 'SAY', "I'VE", 'BEEN', 'WANDERING', 'ABOUT', 'THIS', 'BUSINESS'] +7176-92135-0022-683: ref=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER', 'IN', 'THE', 'MIND', 'TO', 'SUFFER', 'THE', 'SLINGS', 'AND', 'ARROWS', 'WHAT', 'NO', 'HAMLET', 'SPEAKING'] +7176-92135-0022-683: hyp=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER', 'IN', 'THE', 'MIND', 'TO', 'SUFFER', 'THE', 'SLINGS', 'AND', 'ARROWS', 'WHAT', 'NO', 'HAMLET', 'SPEAKING'] +7176-92135-0023-684: ref=['YOU', 'GAVE', 'ME', 'DOUBLE', 'FIVE', 'I', 'WANT', 'DOUBLE', 'NINE', 'HALLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0023-684: hyp=['YOU', 'GAVE', 'ME', 'DOUBLE', 'FIVE', 'I', 'WANT', 'DOUBLE', 'NINE', 'HELLO', 'IS', 'THAT', 'YOU', 'HORATIO', 'HAMLET', 'SPEAKING'] +7176-92135-0024-685: ref=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER'] +7176-92135-0024-685: hyp=['TO', 'BE', 'OR', 'NOT', 'TO', 'BE', 'THAT', 'IS', 'THE', 'QUESTION', 'WHETHER', 'TIS', 'NOBLER'] +7176-92135-0025-686: ref=['IT', 'IS', 'TO', 'LET', 'HAMLET', 'IF', 'THAT', 'HAPPEN', 'TO', 'BE', 'THE', 'NAME', 'OF', 'YOUR', 'CHARACTER', 'ENTER', 'WITH', 'A', 'SMALL', 'DOG', 'PET', 'FALCON', 'MONGOOSE', 'TAME', 'BEAR', 'OR', 'WHATEVER', 'ANIMAL', 'IS', 'MOST', 'IN', 'KEEPING', 'WITH', 'THE', 'PART', 'AND', 'CONFIDE', 'IN', 'THIS', 'ANIMAL', 'SUCH', 'SORROWS', 'HOPES', 'OR', 'SECRET', 'HISTORY', 'AS', 'THE', 'AUDIENCE', 'HAS', 'GOT', 'TO', 'KNOW'] +7176-92135-0025-686: hyp=['IT', 'IS', 'TO', 'LET', 'HAMLET', 'IF', 'THAT', 'HAPPENED', 'TO', 'BE', 'THE', 'NAME', 'OF', 'YOUR', 'CHARACTER', 'ENTER', 'WITH', 'A', 'SMALL', 'DOG', 'PET', 'FALCON', "MONGOO'S", 'TAME', 'BEAR', 'OR', 'WHATEVER', 'ANIMAL', 'IS', 'MOST', 'IN', 'KEEPING', 'WITH', 'THE', 'PART', 'AND', 'CONFIDE', 'IN', 'THIS', 'ANIMAL', 'SUCH', 'SORROWS', 'HOPES', 'OR', 'SECRET', 'HISTORY', 'AS', 'THE', 'AUDIENCE', 'HAS', 'GOT', 'TO', 'KNOW'] +7176-92135-0026-687: ref=['ENTER', 'HAMLET', 'WITH', 'HIS', 'FAVOURITE', 'BOAR', 'HOUND'] +7176-92135-0026-687: hyp=['INTER', 'HAMLET', 'WITH', 'HIS', 'FAVOURITE', 'BOREHOUND'] +7176-92135-0027-688: ref=['LADY', 'LARKSPUR', 'STARTS', 'SUDDENLY', 'AND', 'TURNS', 'TOWARDS', 'HIM'] +7176-92135-0027-688: hyp=['LADY', 'LARKSBURG', 'START', 'SUDDENLY', 'AND', 'TURNS', 'TOWARDS', 'HIM'] +7176-92135-0028-689: ref=['LARKSPUR', 'BIT', 'ME', 'AGAIN', 'THIS', 'MORNING', 'FOR', 'THE', 'THIRD', 'TIME'] +7176-92135-0028-689: hyp=['LARKSPER', 'BID', 'ME', 'AGAIN', 'THIS', 'MORNING', 'FOR', 'THE', 'THIRD', 'TIME'] +7176-92135-0029-690: ref=['I', 'WANT', 'TO', 'GET', 'AWAY', 'FROM', 'IT', 'ALL', 'SWOONS'] +7176-92135-0029-690: hyp=['I', 'WANT', 'TO', 'GET', 'AWAY', 'FROM', 'IT', 'ALL', 'SWOON'] +7176-92135-0030-691: ref=['ENTER', 'LORD', 'ARTHUR', 'FLUFFINOSE'] +7176-92135-0030-691: hyp=['ENTERED', 'LORD', 'ARTHUR', "FLAPHANO'S"] +7176-92135-0031-692: ref=['AND', 'THERE', 'YOU', 'ARE', 'YOU', 'WILL', 'OF', 'COURSE', 'APPRECIATE', 'THAT', 'THE', 'UNFINISHED', 'SENTENCES', 'NOT', 'ONLY', 'SAVE', 'TIME', 'BUT', 'ALSO', 'MAKE', 'THE', 'MANOEUVRING', 'VERY', 'MUCH', 'MORE', 'NATURAL'] +7176-92135-0031-692: hyp=['AND', 'THERE', 'YOU', 'ARE', 'YOU', 'WILL', 'OF', 'COURSE', 'APPRECIATE', 'THAT', 'THE', 'UNFINISHED', 'SENTENCES', 'NOT', 'ONLY', 'SAVE', 'TIME', 'BUT', 'ALSO', 'MAKE', 'THE', 'MANOEUVRING', 'VERY', 'MUCH', 'MORE', 'NATURAL'] +7176-92135-0032-693: ref=['HOW', 'YOU', 'MAY', 'BE', 'WONDERING', 'ARE', 'YOU', 'TO', 'BEGIN', 'YOUR', 'MASTERPIECE'] +7176-92135-0032-693: hyp=['HOW', 'YOU', 'MAY', 'BE', 'WONDERING', 'ARE', 'YOU', 'TO', 'BEGIN', 'YOUR', 'MASTERPIECE'] +7176-92135-0033-694: ref=['RELAPSES', 'INTO', 'SILENCE', 'FOR', 'THE', 'REST', 'OF', 'THE', 'EVENING'] +7176-92135-0033-694: hyp=['RELAPSES', 'INTO', 'SILENCE', 'FOR', 'THE', 'REST', 'OF', 'THE', 'EVENING'] +7176-92135-0034-695: ref=['THE', 'DUCHESS', 'OF', 'SOUTHBRIDGE', 'TO', 'LORD', 'REGGIE', 'OH', 'REGGIE', 'WHAT', 'DID', 'YOU', 'SAY'] +7176-92135-0034-695: hyp=['THE', 'DUCHESS', 'OF', 'SOUTHBRIDGE', 'TO', 'LORD', 'REGGIE', 'OH', 'READY', 'WHAT', 'DID', 'YOU', 'SAY'] +7176-92135-0035-696: ref=['THEN', 'LORD', 'TUPPENY', 'WELL', 'WHAT', 'ABOUT', 'AUCTION'] +7176-92135-0035-696: hyp=['THEN', 'LORD', 'TUPPENNY', 'WHAT', 'ABOUT', 'AUCTION'] +7176-92135-0036-697: ref=['THE', 'CROWD', 'DRIFTS', 'OFF', 'LEAVING', 'THE', 'HERO', 'AND', 'HEROINE', 'ALONE', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'STAGE', 'AND', 'THEN', 'YOU', 'CAN', 'BEGIN'] +7176-92135-0036-697: hyp=['THE', 'CROWD', 'DRIFTS', 'OFF', 'LEAPING', 'THE', 'HERO', 'AND', 'HEROINE', 'ALONE', 'IN', 'THE', 'MIDDLE', 'OF', 'THE', 'STAGE', 'AND', 'THEN', 'YOU', 'CAN', 'BEGIN'] +7176-92135-0037-698: ref=['THEN', 'IS', 'THE', 'TIME', 'TO', 'INTRODUCE', 'A', 'MEAL', 'ON', 'THE', 'STAGE'] +7176-92135-0037-698: hyp=['THEN', 'IS', 'THE', 'TIME', 'TO', 'INTRODUCE', 'A', 'MEAL', 'ON', 'THE', 'STAGE'] +7176-92135-0038-699: ref=['A', 'STAGE', 'MEAL', 'IS', 'POPULAR', 'BECAUSE', 'IT', 'PROVES', 'TO', 'THE', 'AUDIENCE', 'THAT', 'THE', 'ACTORS', 'EVEN', 'WHEN', 'CALLED', 'CHARLES', 'HAWTREY', 'OR', 'OWEN', 'NARES', 'ARE', 'REAL', 'PEOPLE', 'JUST', 'LIKE', 'YOU', 'AND', 'ME'] +7176-92135-0038-699: hyp=['A', 'STAGE', 'MEAL', 'IS', 'POPULAR', 'BECAUSE', 'IT', 'PROVED', 'TO', 'THE', 'AUDIENCE', 'THAT', 'THE', 'ACTORS', 'EVEN', 'WHEN', 'CALLED', 'CHARLES', 'HALTREE', 'OR', 'OWEN', "NEAR'S", 'ARE', 'REAL', 'PEOPLE', 'JUST', 'LIKE', 'YOU', 'AND', 'ME'] +7176-92135-0039-700: ref=['TEA', 'PLEASE', 'MATTHEWS', 'BUTLER', 'IMPASSIVELY'] +7176-92135-0039-700: hyp=['T', 'PLEASE', 'MATTHEWS', 'BUTLER', 'IMPASSIVELY'] +7176-92135-0040-701: ref=['HOSTESS', 'REPLACES', 'LUMP', 'AND', 'INCLINES', 'EMPTY', 'TEAPOT', 'OVER', 'TRAY', 'FOR', 'A', 'MOMENT', 'THEN', 'HANDS', 'HIM', 'A', 'CUP', 'PAINTED', 'BROWN', 'INSIDE', 'THUS', 'DECEIVING', 'THE', 'GENTLEMAN', 'WITH', 'THE', 'TELESCOPE', 'IN', 'THE', 'UPPER', 'CIRCLE'] +7176-92135-0040-701: hyp=['HOSTES', 'REPLACES', 'LUMP', 'AND', 'INCLINES', 'EMPTY', 'TEAPOT', 'OVER', 'TRAY', 'FOR', 'MOMENT', 'THEN', 'HANDSOME', 'A', 'CUP', 'PAINTED', 'BROWN', 'INSIDE', 'LUSTY', 'SEEING', 'THE', 'GENTLEMAN', 'WITH', 'THE', 'TELESCOPE', 'IN', 'THE', 'UPPER', 'CIRCLE'] +7176-92135-0041-702: ref=['RE', 'ENTER', 'BUTLER', 'AND', 'THREE', 'FOOTMEN', 'WHO', 'REMOVE', 'THE', 'TEA', 'THINGS', 'HOSTESS', 'TO', 'GUEST'] +7176-92135-0041-702: hyp=['REINTER', 'BUTLER', 'AND', 'THREE', 'FOOTMEN', 'WHO', 'MOVED', 'THE', 'TEA', 'THINGS', 'HOSTESS', 'TWO', 'GUEST'] +7176-92135-0042-703: ref=['IN', 'NOVELS', 'THE', 'HERO', 'HAS', 'OFTEN', 'PUSHED', 'HIS', 'MEALS', 'AWAY', 'UNTASTED', 'BUT', 'NO', 'STAGE', 'HERO', 'WOULD', 'DO', 'ANYTHING', 'SO', 'UNNATURAL', 'AS', 'THIS'] +7176-92135-0042-703: hyp=['AND', 'NOVELS', 'THE', 'HERO', 'HAS', 'OFTEN', 'PUSHED', 'HIS', 'MEALS', 'AWAY', 'UNTASTED', 'BUT', 'NO', 'STEED', 'HERO', 'WOULD', 'DO', 'ANYTHING', 'SO', 'UNNATURAL', 'AS', 'THIS'] +7176-92135-0043-704: ref=['TWO', 'BITES', 'ARE', 'MADE', 'AND', 'THE', 'BREAD', 'IS', 'CRUMBLED', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'EAGERNESS', 'INDEED', 'ONE', 'FEELS', 'THAT', 'IN', 'REAL', 'LIFE', 'THE', 'GUEST', 'WOULD', 'CLUTCH', 'HOLD', 'OF', 'THE', 'FOOTMAN', 'AND', 'SAY', 'HALF', 'A', 'MO', 'OLD', 'CHAP', 'I', "HAVEN'T", 'NEARLY', 'FINISHED', 'BUT', 'THE', 'ACTOR', 'IS', 'BETTER', 'SCHOOLED', 'THAN', 'THIS'] +7176-92135-0043-704: hyp=['TWO', 'BITES', 'ARE', 'MADE', 'AND', 'THE', 'ABRET', 'IS', 'CRUMBLED', 'WITH', 'AN', 'AIR', 'OF', 'GREAT', 'EAGERNESS', 'INDEED', 'ONE', 'FEELS', 'THAT', 'IN', 'REAL', 'LIFE', 'THE', 'GUESTS', 'WOULD', 'CLUTCH', 'HOLD', 'OF', 'THE', 'FOOTMAN', 'AND', 'SAY', 'HALF', 'A', 'MOLE', 'CHAP', 'I', "HAVEN'T", 'NEARLY', 'FINISHED', 'BUT', 'THE', "ACTOR'S", 'BETTER', 'SCHOOLED', 'THAN', 'THIS'] +7176-92135-0044-705: ref=['BUT', 'IT', 'IS', 'THE', 'CIGARETTE', 'WHICH', 'CHIEFLY', 'HAS', 'BROUGHT', 'THE', 'MODERN', 'DRAMA', 'TO', 'ITS', 'PRESENT', 'STATE', 'OF', 'PERFECTION'] +7176-92135-0044-705: hyp=['BUT', 'IT', 'IS', 'THE', 'CIGARETTE', 'WHICH', 'CHIEFLY', 'HAS', 'BROUGHT', 'THE', 'MODERN', 'DRAMA', 'TO', 'ITS', 'PRESENT', 'STATE', 'OF', 'PERFECTION'] +7176-92135-0045-706: ref=['LORD', 'JOHN', 'TAKING', 'OUT', 'GOLD', 'CIGARETTE', 'CASE', 'FROM', 'HIS', 'LEFT', 'HAND', 'UPPER', 'WAISTCOAT', 'POCKET'] +7176-92135-0045-706: hyp=['LORD', 'JOHN', 'TAKING', 'OUT', 'GOLD', 'SICK', 'RED', 'CASE', 'FROM', 'HIS', 'LEFT', 'HAND', 'UPPER', 'WAISTCOAT', 'POCKET'] +7729-102255-0000-261: ref=['THE', 'BOGUS', 'LEGISLATURE', 'NUMBERED', 'THIRTY', 'SIX', 'MEMBERS'] +7729-102255-0000-261: hyp=['THE', 'BOGUS', 'LEGISLATURE', 'NUMBERED', 'THIRTY', 'SIX', 'MEMBERS'] +7729-102255-0001-262: ref=['THIS', 'WAS', 'AT', 'THE', 'MARCH', 'ELECTION', 'EIGHTEEN', 'FIFTY', 'FIVE'] +7729-102255-0001-262: hyp=['THIS', 'WAS', 'AT', 'THE', 'MARCH', 'ELECTION', 'EIGHTEEN', 'FIFTY', 'FIVE'] +7729-102255-0002-263: ref=['THAT', "SUMMER'S", 'EMIGRATION', 'HOWEVER', 'BEING', 'MAINLY', 'FROM', 'THE', 'FREE', 'STATES', 'GREATLY', 'CHANGED', 'THE', 'RELATIVE', 'STRENGTH', 'OF', 'THE', 'TWO', 'PARTIES'] +7729-102255-0002-263: hyp=['THAT', "SUMMER'S", 'IMMIGRATION', 'HOWEVER', 'BEING', 'MAINLY', 'FROM', 'THE', 'FREE', 'STATES', 'GREATLY', 'CHANGED', 'THE', 'RELATIVE', 'STRENGTH', 'OF', 'THE', 'TWO', 'PARTIES'] +7729-102255-0003-264: ref=['FOR', 'GENERAL', 'SERVICE', 'THEREFORE', 'REQUIRING', 'NO', 'SPECIAL', 'EFFORT', 'THE', 'NUMERICAL', 'STRENGTH', 'OF', 'THE', 'FACTIONS', 'WAS', 'ABOUT', 'EQUAL', 'WHILE', 'ON', 'EXTRAORDINARY', 'OCCASIONS', 'THE', 'TWO', 'THOUSAND', 'BORDER', 'RUFFIAN', 'RESERVE', 'LYING', 'A', 'LITTLE', 'FARTHER', 'BACK', 'FROM', 'THE', 'STATE', 'LINE', 'COULD', 'AT', 'ANY', 'TIME', 'EASILY', 'TURN', 'THE', 'SCALE'] +7729-102255-0003-264: hyp=['FOR', 'GENERAL', 'SERVICE', 'THEREFORE', 'REQUIRING', 'NO', 'SPECIAL', 'EFFORT', 'THE', 'NUMERICAL', 'STRENGTH', 'OF', 'THE', 'FACTIONS', 'WAS', 'ABOUT', 'EQUAL', 'WHILE', 'ON', 'EXTRAORDINARY', 'OCCASIONS', 'THE', 'TWO', 'THOUSAND', 'BORDER', 'RUFFIAN', 'RESERVE', 'LYING', 'A', 'LITTLE', 'FARTHER', 'BACK', 'FROM', 'THE', 'STATE', 'LINE', 'COULD', 'AT', 'ANY', 'TIME', 'EASILY', 'TURN', 'THE', 'SCALE'] +7729-102255-0004-265: ref=['THE', 'FREE', 'STATE', 'MEN', 'HAD', 'ONLY', 'THEIR', 'CONVICTIONS', 'THEIR', 'INTELLIGENCE', 'THEIR', 'COURAGE', 'AND', 'THE', 'MORAL', 'SUPPORT', 'OF', 'THE', 'NORTH', 'THE', 'CONSPIRACY', 'HAD', 'ITS', 'SECRET', 'COMBINATION', 'THE', 'TERRITORIAL', 'OFFICIALS', 'THE', 'LEGISLATURE', 'THE', 'BOGUS', 'LAWS', 'THE', 'COURTS', 'THE', 'MILITIA', 'OFFICERS', 'THE', 'PRESIDENT', 'AND', 'THE', 'ARMY'] +7729-102255-0004-265: hyp=['THE', 'FREE', 'STATE', 'MEN', 'HAD', 'ONLY', 'THEIR', 'CONVICTIONS', 'THEIR', 'INTELLIGENCE', 'THEIR', 'COURAGE', 'AND', 'THE', 'MORAL', 'SUPPORT', 'OF', 'THE', 'NORTH', 'THE', 'CONSPIRACY', 'HAD', 'ITS', 'SECRET', 'COMBINATION', 'THE', 'TERRITORIAL', 'OFFICIALS', 'THE', 'LEGISLATURE', 'THE', 'BOGUS', 'LAWS', 'THE', 'COURTS', 'THE', 'MILITIA', 'OFFICERS', 'THE', 'PRESIDENT', 'AND', 'THE', 'ARMY'] +7729-102255-0005-266: ref=['THIS', 'WAS', 'A', 'FORMIDABLE', 'ARRAY', 'OF', 'ADVANTAGES', 'SLAVERY', 'WAS', 'PLAYING', 'WITH', 'LOADED', 'DICE'] +7729-102255-0005-266: hyp=['THIS', 'WAS', 'A', 'FORMIDABLE', 'ARRAY', 'OF', 'ADVANTAGES', 'SLAVERY', 'WAS', 'PLAYING', 'WITH', 'LOADED', 'DICE'] +7729-102255-0006-267: ref=['COMING', 'BY', 'WAY', 'OF', 'THE', 'MISSOURI', 'RIVER', 'TOWNS', 'HE', 'FELL', 'FIRST', 'AMONG', 'BORDER', 'RUFFIAN', 'COMPANIONSHIP', 'AND', 'INFLUENCES', 'AND', 'PERHAPS', 'HAVING', 'HIS', 'INCLINATIONS', 'ALREADY', 'MOLDED', 'BY', 'HIS', 'WASHINGTON', 'INSTRUCTIONS', 'HIS', 'EARLY', 'IMPRESSIONS', 'WERE', 'DECIDEDLY', 'ADVERSE', 'TO', 'THE', 'FREE', 'STATE', 'CAUSE'] +7729-102255-0006-267: hyp=['COMMON', 'BY', 'WAY', 'OF', 'THE', 'MISSOURI', 'RIVER', 'TOWNS', 'HE', 'FELL', 'FIRST', 'AMONG', 'BORDER', 'RUFFIAN', 'COMPANIONSHIP', 'AND', 'INFLUENCES', 'AND', 'PERHAPS', 'HAVING', 'HIS', 'INCLINATIONS', 'ALREADY', 'MOULDED', 'BY', 'HIS', 'WASHINGTON', 'INSTRUCTIONS', 'HIS', 'EARLY', 'IMPRESSIONS', 'WERE', 'DECIDEDLY', 'ADVERSE', 'TO', 'THE', 'FREE', 'STATE', 'CAUSE'] +7729-102255-0007-268: ref=['HIS', 'RECEPTION', 'SPEECH', 'AT', 'WESTPORT', 'IN', 'WHICH', 'HE', 'MAINTAINED', 'THE', 'LEGALITY', 'OF', 'THE', 'LEGISLATURE', 'AND', 'HIS', 'DETERMINATION', 'TO', 'ENFORCE', 'THEIR', 'LAWS', 'DELIGHTED', 'HIS', 'PRO', 'SLAVERY', 'AUDITORS'] +7729-102255-0007-268: hyp=['HIS', 'RECEPTION', 'SPEECH', 'AT', 'WESTWARD', 'IN', 'WHICH', 'HE', 'MAINTAINED', 'THE', 'LEGALITY', 'OF', 'THE', 'LEGISLATURE', 'AND', 'HIS', 'DETERMINATION', 'TO', 'ENFORCE', 'THEIR', 'LAWS', 'DELIGHTED', 'HIS', 'PRO', 'SLAVERY', 'AUDITORS'] +7729-102255-0008-269: ref=['ALL', 'THE', 'TERRITORIAL', 'DIGNITARIES', 'WERE', 'PRESENT', 'GOVERNOR', 'SHANNON', 'PRESIDED', 'JOHN', 'CALHOUN', 'THE', 'SURVEYOR', 'GENERAL', 'MADE', 'THE', 'PRINCIPAL', 'SPEECH', 'A', 'DENUNCIATION', 'OF', 'THE', 'ABOLITIONISTS', 'SUPPORTING', 'THE', 'TOPEKA', 'MOVEMENT', 'CHIEF', 'JUSTICE', 'LECOMPTE', 'DIGNIFIED', 'THE', 'OCCASION', 'WITH', 'APPROVING', 'REMARKS'] +7729-102255-0008-269: hyp=['ALL', 'THE', 'TERRITORIAL', 'DIGNITARIES', 'WERE', 'PRESENT', 'GOVERNOR', 'SHAN', 'AND', 'PRESIDED', 'JOHN', 'CALHOUN', 'THE', 'SURVEYOR', 'GENERAL', 'MADE', 'THE', 'PRINCIPAL', 'SPEECH', 'A', 'DENUNCIATION', 'OF', 'THE', 'ABOLITIONIST', 'SUPPORTING', 'THE', 'TOPEKA', 'MOVEMENT', 'CHIEF', 'JUSTICE', 'LE', 'COMTE', 'DIGNIFIED', 'THE', 'OCCASION', 'WITH', 'APPROVING', 'REMARKS'] +7729-102255-0009-270: ref=['ALL', 'DISSENT', 'ALL', 'NON', 'COMPLIANCE', 'ALL', 'HESITATION', 'ALL', 'MERE', 'SILENCE', 'EVEN', 'WERE', 'IN', 'THEIR', 'STRONGHOLD', 'TOWNS', 'LIKE', 'LEAVENWORTH', 'BRANDED', 'AS', 'ABOLITIONISM', 'DECLARED', 'TO', 'BE', 'HOSTILITY', 'TO', 'THE', 'PUBLIC', 'WELFARE', 'AND', 'PUNISHED', 'WITH', 'PROSCRIPTION', 'PERSONAL', 'VIOLENCE', 'EXPULSION', 'AND', 'FREQUENTLY', 'DEATH'] +7729-102255-0009-270: hyp=['ALL', 'DESCENT', 'ALL', 'NON', 'COMPLIANCE', 'ALL', 'HESITATION', 'ALL', 'MERE', 'SILENCE', 'EVEN', 'WERE', 'IN', 'THEIR', 'STRONGHOLD', 'TOWNS', 'LIKE', 'LEVINWORTH', 'BRANDED', 'AS', 'ABOLITIONISM', 'DECLARED', 'TO', 'BE', 'HOSTILITY', 'TO', 'THE', 'PUBLIC', 'WELFARE', 'AND', 'PUNISHED', 'WITH', 'PROSCRIPTION', 'PERSONAL', 'VIOLENCE', 'EXPULSION', 'AND', 'FREQUENTLY', 'DEATH'] +7729-102255-0010-271: ref=['OF', 'THE', 'LYNCHINGS', 'THE', 'MOBS', 'AND', 'THE', 'MURDERS', 'IT', 'WOULD', 'BE', 'IMPOSSIBLE', 'EXCEPT', 'IN', 'A', 'VERY', 'EXTENDED', 'WORK', 'TO', 'NOTE', 'THE', 'FREQUENT', 'AND', 'ATROCIOUS', 'DETAILS'] +7729-102255-0010-271: hyp=['OF', 'THE', 'LYNCHINGS', 'THE', 'MOBS', 'AND', 'THE', 'MURDERS', 'IT', 'WOULD', 'BE', 'IMPOSSIBLE', 'EXCEPT', 'IN', 'A', 'VERY', 'EXTENDED', 'WORK', 'TO', 'NOTE', 'THE', 'FREQUENT', 'AND', 'ATROCIOUS', 'DETAILS'] +7729-102255-0011-272: ref=['THE', 'PRESENT', 'CHAPTERS', 'CAN', 'ONLY', 'TOUCH', 'UPON', 'THE', 'MORE', 'SALIENT', 'MOVEMENTS', 'OF', 'THE', 'CIVIL', 'WAR', 'IN', 'KANSAS', 'WHICH', 'HAPPILY', 'WERE', 'NOT', 'SANGUINARY', 'IF', 'HOWEVER', 'THE', 'INDIVIDUAL', 'AND', 'MORE', 'ISOLATED', 'CASES', 'OF', 'BLOODSHED', 'COULD', 'BE', 'DESCRIBED', 'THEY', 'WOULD', 'SHOW', 'A', 'STARTLING', 'AGGREGATE', 'OF', 'BARBARITY', 'AND', 'LOSS', 'OF', 'LIFE', 'FOR', "OPINION'S", 'SAKE'] +7729-102255-0011-272: hyp=['THE', 'PRESENT', 'CHAPTERS', 'CAN', 'ONLY', 'TOUCH', 'UPON', 'THE', 'MORE', 'SALIENT', 'MOVEMENTS', 'OF', 'THE', 'CIVIL', 'WAR', 'IN', 'KANSAS', 'WHICH', 'HAPPILY', 'ARE', 'NOT', 'SANGUINARY', 'IF', 'HOWEVER', 'THE', 'INDIVIDUAL', 'AND', 'MORE', 'ISOLATED', 'CASES', 'OF', 'BLOODSHED', 'COULD', 'BE', 'DESCRIBED', 'THEY', 'WOULD', 'SHOW', 'A', 'STARTLING', 'AGGREGATE', 'OF', 'BARBARITY', 'AND', 'A', 'LOSS', 'OF', 'LIFE', 'FOR', "OPINION'S", 'SAKE'] +7729-102255-0012-273: ref=['SEVERAL', 'HUNDRED', 'FREE', 'STATE', 'MEN', 'PROMPTLY', 'RESPONDED', 'TO', 'THE', 'SUMMONS'] +7729-102255-0012-273: hyp=['SEVERAL', 'HUNDRED', 'FREE', 'STATE', 'MEN', 'PROMPTLY', 'RESPONDED', 'TO', 'THE', 'SUMMONS'] +7729-102255-0013-274: ref=['IT', 'WAS', 'IN', 'FACT', 'THE', 'BEST', 'WEAPON', 'OF', 'ITS', 'DAY'] +7729-102255-0013-274: hyp=['IT', 'WAS', 'IN', 'FACT', 'THE', 'BEST', 'WEAPON', 'OF', 'ITS', 'DAY'] +7729-102255-0014-275: ref=['THE', 'LEADERS', 'OF', 'THE', 'CONSPIRACY', 'BECAME', 'DISTRUSTFUL', 'OF', 'THEIR', 'POWER', 'TO', 'CRUSH', 'THE', 'TOWN'] +7729-102255-0014-275: hyp=['THE', 'LEADERS', 'OF', 'THE', 'CONSPIRACY', 'BECAME', 'DISTRUSTFUL', 'OF', 'THEIR', 'POWER', 'TO', 'CRUSH', 'THE', 'TOWN'] +7729-102255-0015-276: ref=['ONE', 'OF', 'HIS', 'MILITIA', 'GENERALS', 'SUGGESTED', 'THAT', 'THE', 'GOVERNOR', 'SHOULD', 'REQUIRE', 'THE', 'OUTLAWS', 'AT', 'LAWRENCE', 'AND', 'ELSEWHERE', 'TO', 'SURRENDER', 'THE', 'SHARPS', 'RIFLES', 'ANOTHER', 'WROTE', 'ASKING', 'HIM', 'TO', 'CALL', 'OUT', 'THE', 'GOVERNMENT', 'TROOPS', 'AT', 'FORT', 'LEAVENWORTH'] +7729-102255-0015-276: hyp=['ONE', 'OF', 'HIS', 'MILITIA', 'GENERALS', 'SUGGESTED', 'THAT', 'THE', 'GOVERNOR', 'SHOULD', 'REQUIRE', 'THE', 'OUTLAWS', 'AT', 'LAWRENCE', 'AND', 'ELSEWHERE', 'TO', 'SURRENDER', 'THE', "SHARP'S", 'RIFLES', 'ANOTHER', 'WROTE', 'ASKING', 'HIM', 'TO', 'CALL', 'OUT', 'THE', 'GOVERNMENT', 'TROOPS', 'AT', 'FORT', 'LEVINWORTH'] +7729-102255-0016-277: ref=['THE', 'GOVERNOR', 'ON', 'HIS', 'PART', 'BECOMING', 'DOUBTFUL', 'OF', 'THE', 'LEGALITY', 'OF', 'EMPLOYING', 'MISSOURI', 'MILITIA', 'TO', 'ENFORCE', 'KANSAS', 'LAWS', 'WAS', 'ALSO', 'EAGER', 'TO', 'SECURE', 'THE', 'HELP', 'OF', 'FEDERAL', 'TROOPS'] +7729-102255-0016-277: hyp=['THE', 'GOVERNOR', 'ON', 'HIS', 'PART', 'BECOMING', 'DOUBTFUL', 'OF', 'THE', 'LEGALITY', 'OF', 'EMPLOYING', 'MISSOURI', 'MILITIA', 'TO', 'ENFORCE', 'KANSAS', 'LAWS', 'WAS', 'ALSO', 'EAGER', 'TO', 'SECURE', 'THE', 'HELP', 'OF', 'FEDERAL', 'TROOPS'] +7729-102255-0017-278: ref=['SHERIFF', 'JONES', 'HAD', 'HIS', 'POCKETS', 'ALWAYS', 'FULL', 'OF', 'WRITS', 'ISSUED', 'IN', 'THE', 'SPIRIT', 'OF', 'PERSECUTION', 'BUT', 'WAS', 'OFTEN', 'BAFFLED', 'BY', 'THE', 'SHARP', 'WITS', 'AND', 'READY', 'RESOURCES', 'OF', 'THE', 'FREE', 'STATE', 'PEOPLE', 'AND', 'SOMETIMES', 'DEFIED', 'OUTRIGHT'] +7729-102255-0017-278: hyp=['SHERIFF', 'JONES', 'HAD', 'HIS', 'POCKETS', 'ALWAYS', 'FULL', 'OF', 'WRITS', 'ISSUED', 'IN', 'THE', 'SPIRIT', 'OF', 'PERSECUTION', 'BUT', 'WAS', 'OFTEN', 'BAFFLED', 'BY', 'THE', 'SHARP', 'WITS', 'AND', 'READY', 'RESOURCES', 'OF', 'THE', 'FREE', 'STATE', 'PEOPLE', 'AND', 'SOMETIMES', 'DEFIED', 'OUTRIGHT'] +7729-102255-0018-279: ref=['LITTLE', 'BY', 'LITTLE', 'HOWEVER', 'THE', 'LATTER', 'BECAME', 'HEMMED', 'AND', 'BOUND', 'IN', 'THE', 'MESHES', 'OF', 'THE', 'VARIOUS', 'DEVICES', 'AND', 'PROCEEDINGS', 'WHICH', 'THE', 'TERRITORIAL', 'OFFICIALS', 'EVOLVED', 'FROM', 'THE', 'BOGUS', 'LAWS'] +7729-102255-0018-279: hyp=['LITTLE', 'BY', 'LITTLE', 'HOWEVER', 'THE', 'LATTER', 'BECAME', 'HEMMED', 'AND', 'BOUND', 'IN', 'THE', 'MESHES', 'OF', 'THE', 'VARIOUS', 'DEVICES', 'AND', 'PROCEEDINGS', 'WHICH', 'THE', 'TERRITORIAL', 'OFFICIALS', 'EVOLVED', 'FROM', 'THE', 'BOGUS', 'LAWS'] +7729-102255-0019-280: ref=['TO', 'EMBARRASS', 'THIS', 'DAMAGING', 'EXPOSURE', 'JUDGE', 'LECOMPTE', 'ISSUED', 'A', 'WRIT', 'AGAINST', 'THE', 'EX', 'GOVERNOR', 'ON', 'A', 'FRIVOLOUS', 'CHARGE', 'OF', 'CONTEMPT'] +7729-102255-0019-280: hyp=['TO', 'EMBARRASS', 'THIS', 'DAMAGING', 'EXPOSURE', 'JUDGE', 'LECOMTE', 'ISSUED', 'A', 'WRIT', 'AGAINST', 'THE', 'EX', 'GOVERNOR', 'ON', 'A', 'FRIVOLOUS', 'CHARGE', 'OF', 'CONTEMPT'] +7729-102255-0020-281: ref=['THE', 'INCIDENT', 'WAS', 'NOT', 'VIOLENT', 'NOR', 'EVEN', 'DRAMATIC', 'NO', 'POSSE', 'WAS', 'SUMMONED', 'NO', 'FURTHER', 'EFFORT', 'MADE', 'AND', 'REEDER', 'FEARING', 'PERSONAL', 'VIOLENCE', 'SOON', 'FLED', 'IN', 'DISGUISE'] +7729-102255-0020-281: hyp=['THE', 'INCIDENT', 'WAS', 'NOT', 'VIOLENT', 'NOR', 'EVEN', 'DRAMATIC', 'NO', 'POSSE', 'WAS', 'SUMMONED', 'NO', 'FURTHER', 'EFFORT', 'MADE', 'AND', 'READER', 'FEARING', 'PERSONAL', 'VIOLENCE', 'SOON', 'FLED', 'IN', 'DISGUISE'] +7729-102255-0021-282: ref=['BUT', 'THE', 'AFFAIR', 'WAS', 'MAGNIFIED', 'AS', 'A', 'CROWNING', 'PROOF', 'THAT', 'THE', 'FREE', 'STATE', 'MEN', 'WERE', 'INSURRECTIONISTS', 'AND', 'OUTLAWS'] +7729-102255-0021-282: hyp=['BUT', 'THE', 'AFFAIR', 'WAS', 'MAGNIFIED', 'AS', 'A', 'CROWNING', 'PROOF', 'THAT', 'THE', 'FREE', 'STATE', 'MEN', 'WERE', 'INSURRECTIONOUS', 'AND', 'OUTLAWS'] +7729-102255-0022-283: ref=['FROM', 'THESE', 'AGAIN', 'SPRANG', 'BARRICADED', 'AND', 'FORTIFIED', 'DWELLINGS', 'CAMPS', 'AND', 'SCOUTING', 'PARTIES', 'FINALLY', 'CULMINATING', 'IN', 'ROVING', 'GUERRILLA', 'BANDS', 'HALF', 'PARTISAN', 'HALF', 'PREDATORY'] +7729-102255-0022-283: hyp=['FROM', 'THESE', 'AGAIN', 'SPRANG', 'BARRICADED', 'AND', 'FORTIFIED', 'DWELLINGS', 'CAMPS', 'AND', 'SCOUT', 'PARTIES', 'FINALLY', 'CULMINATING', 'IN', 'ROVING', 'GUERRILLA', 'BANDS', 'HALF', 'PARTISAN', 'HALF', 'PREDATORY'] +7729-102255-0023-284: ref=['THEIR', 'DISTINCTIVE', 'CHARACTERS', 'HOWEVER', 'DISPLAY', 'ONE', 'BROAD', 'AND', 'UNFAILING', 'DIFFERENCE'] +7729-102255-0023-284: hyp=['THERE', 'ARE', 'DISTINCTIVE', 'CHARACTERS', 'HOWEVER', 'DISPLAY', 'ONE', 'BROAD', 'AND', 'UNFAILING', 'DIFFERENCE'] +7729-102255-0024-285: ref=['THE', 'FREE', 'STATE', 'MEN', 'CLUNG', 'TO', 'THEIR', 'PRAIRIE', 'TOWNS', 'AND', 'PRAIRIE', 'RAVINES', 'WITH', 'ALL', 'THE', 'OBSTINACY', 'AND', 'COURAGE', 'OF', 'TRUE', 'DEFENDERS', 'OF', 'THEIR', 'HOMES', 'AND', 'FIRESIDES'] +7729-102255-0024-285: hyp=['THE', 'FREE', 'STATE', 'MEN', 'CLUNG', 'TO', 'THEIR', 'PRAIRIE', 'TOWNS', 'AND', 'PRAIRIE', 'RAVINES', 'WITH', 'ALL', 'THE', 'OBSTINACY', 'AND', 'COURAGE', 'OF', 'TRUE', 'DEFENDERS', 'OF', 'THEIR', 'HOMES', 'AND', 'FIRESIDES'] +7729-102255-0025-286: ref=['THEIR', 'ASSUMED', 'CHARACTER', 'CHANGED', 'WITH', 'THEIR', 'CHANGING', 'OPPORTUNITIES', 'OR', 'NECESSITIES'] +7729-102255-0025-286: hyp=['THERE', 'ASSUMED', 'CHARACTER', 'CHANGED', 'WITH', 'THEIR', 'CHANGING', 'OPPORTUNITIES', 'OR', 'NECESSITIES'] +7729-102255-0026-287: ref=['IN', 'THE', 'SHOOTING', 'OF', 'SHERIFF', 'JONES', 'IN', 'LAWRENCE', 'AND', 'IN', 'THE', 'REFUSAL', 'OF', 'EX', 'GOVERNOR', 'BEEDER', 'TO', 'ALLOW', 'THE', 'DEPUTY', 'MARSHAL', 'TO', 'ARREST', 'HIM', 'THEY', 'DISCOVERED', 'GRAVE', 'OFFENSES', 'AGAINST', 'THE', 'TERRITORIAL', 'AND', 'UNITED', 'STATES', 'LAWS'] +7729-102255-0026-287: hyp=['IN', 'THE', 'SHOOTING', 'OF', "SHERIFF'S", 'JONES', 'AND', 'LAWRENCE', 'AND', 'IN', 'THE', 'REFUSAL', 'OF', 'EX', 'GOVERNOR', 'READER', 'TO', 'ALLOW', 'THE', 'DEPUTY', 'MARSHAL', 'TO', 'ARREST', 'HIM', 'THEY', 'DISCOVERED', 'GRAVE', 'OFFENCES', 'AGAINST', 'THE', 'TERRITORIAL', 'AND', 'THE', 'UNITED', 'STATES', 'LAWS'] +7729-102255-0027-288: ref=['FOOTNOTE', 'SUMNER', 'TO', 'SHANNON', 'MAY', 'TWELFTH', 'EIGHTEEN', 'FIFTY', 'SIX'] +7729-102255-0027-288: hyp=['FOOTNOTE', 'SUMMER', 'TO', 'SHANNON', 'MAY', 'TWELFTH', 'EIGHTEEN', 'FIFTY', 'SIX'] +7729-102255-0028-289: ref=['PRIVATE', 'PERSONS', 'WHO', 'HAD', 'LEASED', 'THE', 'FREE', 'STATE', 'HOTEL', 'VAINLY', 'BESOUGHT', 'THE', 'VARIOUS', 'AUTHORITIES', 'TO', 'PREVENT', 'THE', 'DESTRUCTION', 'OF', 'THEIR', 'PROPERTY'] +7729-102255-0028-289: hyp=['PRIVATE', 'PERSONS', 'WHO', 'AT', 'LEAST', 'THE', 'FREE', 'STATE', 'HOTEL', 'VAINLY', 'BESOUGHT', 'THE', 'VARIOUS', 'AUTHORITIES', 'TO', 'PRESENT', 'THE', 'DESTRUCTION', 'OF', 'THEIR', 'PROPERTY'] +7729-102255-0029-290: ref=['TEN', 'DAYS', 'WERE', 'CONSUMED', 'IN', 'THESE', 'NEGOTIATIONS', 'BUT', 'THE', 'SPIRIT', 'OF', 'VENGEANCE', 'REFUSED', 'TO', 'YIELD'] +7729-102255-0029-290: hyp=['TEN', 'DAYS', 'WERE', 'CONSUMED', 'IN', 'THESE', 'NEGOTIATIONS', 'BUT', 'THE', 'SPIRIT', 'OF', 'VENGEANCE', 'REFUSED', 'TO', 'YIELD'] +7729-102255-0030-291: ref=['HE', 'SUMMONED', 'HALF', 'A', 'DOZEN', 'CITIZENS', 'TO', 'JOIN', 'HIS', 'POSSE', 'WHO', 'FOLLOWED', 'OBEYED', 'AND', 'ASSISTED', 'HIM'] +7729-102255-0030-291: hyp=['HE', 'SUMMONED', 'HALF', 'A', 'DOZEN', 'CITIZENS', 'TO', 'JOIN', 'HIS', 'POSSE', 'WHO', 'FOLLOWED', 'OBEYED', 'AND', 'ASSISTED', 'HIM'] +7729-102255-0031-292: ref=['HE', 'CONTINUED', 'HIS', 'PRETENDED', 'SEARCH', 'AND', 'TO', 'GIVE', 'COLOR', 'TO', 'HIS', 'ERRAND', 'MADE', 'TWO', 'ARRESTS'] +7729-102255-0031-292: hyp=['HE', 'CONTINUED', 'HIS', 'PRETENDED', 'SEARCH', 'AND', 'TO', 'GIVE', 'COLOR', 'TO', 'HIS', 'ERRAND', 'MADE', 'TO', 'ARREST'] +7729-102255-0032-293: ref=['THE', 'FREE', 'STATE', 'HOTEL', 'A', 'STONE', 'BUILDING', 'IN', 'DIMENSIONS', 'FIFTY', 'BY', 'SEVENTY', 'FEET', 'THREE', 'STORIES', 'HIGH', 'AND', 'HANDSOMELY', 'FURNISHED', 'PREVIOUSLY', 'OCCUPIED', 'ONLY', 'FOR', 'LODGING', 'ROOMS', 'ON', 'THAT', 'DAY', 'FOR', 'THE', 'FIRST', 'TIME', 'OPENED', 'ITS', 'TABLE', 'ACCOMMODATIONS', 'TO', 'THE', 'PUBLIC', 'AND', 'PROVIDED', 'A', 'FREE', 'DINNER', 'IN', 'HONOR', 'OF', 'THE', 'OCCASION'] +7729-102255-0032-293: hyp=['THE', 'FREE', 'STATE', 'HOTEL', 'A', 'STONE', 'BUILDING', 'IN', 'DIMENSIONS', 'FIFTY', 'BY', 'SEVENTY', 'FEET', 'THREE', 'STORIES', 'HIGH', 'AND', 'HANDSOMELY', 'FURNISHED', 'PREVIOUSLY', 'OCCUPIED', 'ONLY', 'FOR', 'LODGING', 'ROOMS', 'ON', 'THAT', 'DAY', 'FOR', 'THE', 'FIRST', 'TIME', 'OPENED', 'ITS', 'TABLE', 'ACCOMMODATIONS', 'TO', 'THE', 'PUBLIC', 'AND', 'PROVIDED', 'A', 'FREE', 'DINNER', 'IN', 'HONOR', 'OF', 'THE', 'OCCASION'] +7729-102255-0033-294: ref=['AS', 'HE', 'HAD', 'PROMISED', 'TO', 'PROTECT', 'THE', 'HOTEL', 'THE', 'REASSURED', 'CITIZENS', 'BEGAN', 'TO', 'LAUGH', 'AT', 'THEIR', 'OWN', 'FEARS'] +7729-102255-0033-294: hyp=['AS', 'HE', 'HAD', 'PROMISED', 'TO', 'PROTECT', 'THE', 'HOTEL', 'THE', 'REASSURED', 'CITIZENS', 'BEGAN', 'TO', 'LAUGH', 'AT', 'THEIR', 'OWN', 'FEARS'] +7729-102255-0034-295: ref=['TO', 'THEIR', 'SORROW', 'THEY', 'WERE', 'SOON', 'UNDECEIVED'] +7729-102255-0034-295: hyp=['TO', 'THEIR', 'SORROW', 'THEY', 'WERE', 'SOON', 'UNDECEIVED'] +7729-102255-0035-296: ref=['THE', 'MILITARY', 'FORCE', 'PARTLY', 'RABBLE', 'PARTLY', 'ORGANIZED', 'HAD', 'MEANWHILE', 'MOVED', 'INTO', 'THE', 'TOWN'] +7729-102255-0035-296: hyp=['THE', 'MILITARY', 'FORCE', 'PARTLY', 'RABBLE', 'PARTLY', 'ORGANIZED', 'HAD', 'MEANWHILE', 'MOVED', 'INTO', 'THE', 'TOWN'] +7729-102255-0036-297: ref=['HE', 'PLANTED', 'A', 'COMPANY', 'BEFORE', 'THE', 'HOTEL', 'AND', 'DEMANDED', 'A', 'SURRENDER', 'OF', 'THE', 'ARMS', 'BELONGING', 'TO', 'THE', 'FREE', 'STATE', 'MILITARY', 'COMPANIES'] +7729-102255-0036-297: hyp=['HE', 'PLANTED', 'A', 'COMPANY', 'BEFORE', 'THE', 'HOTEL', 'AND', 'DEMANDED', 'A', 'SURRENDER', 'OF', 'THE', 'ARMS', 'BELONGING', 'TO', 'THE', 'FREE', 'STATE', 'MILITARY', 'COMPANIES'] +7729-102255-0037-298: ref=['HALF', 'AN', 'HOUR', 'LATER', 'TURNING', 'A', 'DEAF', 'EAR', 'TO', 'ALL', 'REMONSTRANCE', 'HE', 'GAVE', 'THE', 'PROPRIETORS', 'UNTIL', 'FIVE', "O'CLOCK", 'TO', 'REMOVE', 'THEIR', 'FAMILIES', 'AND', 'PERSONAL', 'PROPERTY', 'FROM', 'THE', 'FREE', 'STATE', 'HOTEL'] +7729-102255-0037-298: hyp=['HALF', 'AN', 'HOUR', 'LATER', 'TURNING', 'A', 'DEAF', 'EAR', 'TO', 'ALL', 'REMONSTRANCE', 'HE', 'GAVE', 'THE', 'PROPRIETORS', 'UNTIL', 'FIVE', "O'CLOCK", 'TO', 'REMOVE', 'THEIR', 'FAMILIES', 'AND', 'PERSONAL', 'PROPERTY', 'FROM', 'THE', 'FREE', 'STATE', 'HOTEL'] +7729-102255-0038-299: ref=['ATCHISON', 'WHO', 'HAD', 'BEEN', 'HARANGUING', 'THE', 'MOB', 'PLANTED', 'HIS', 'TWO', 'GUNS', 'BEFORE', 'THE', 'BUILDING', 'AND', 'TRAINED', 'THEM', 'UPON', 'IT'] +7729-102255-0038-299: hyp=['ATTITSON', 'WHO', 'HAD', 'BEEN', 'HARANGUING', 'THE', 'MOB', 'PLANTED', 'HIS', 'TWO', 'GUNS', 'BEFORE', 'THE', 'BUILDING', 'AND', 'TRAINED', 'THEM', 'UPON', 'IT'] +7729-102255-0039-300: ref=['THE', 'INMATES', 'BEING', 'REMOVED', 'AT', 'THE', 'APPOINTED', 'HOUR', 'A', 'FEW', 'CANNON', 'BALLS', 'WERE', 'FIRED', 'THROUGH', 'THE', 'STONE', 'WALLS'] +7729-102255-0039-300: hyp=['THE', 'INMATES', 'BEING', 'REMOVED', 'AT', 'THE', 'APPOINTED', 'HOUR', 'A', 'FEW', 'CANNON', 'BALLS', 'WERE', 'FIRED', 'THROUGH', 'THE', 'STONE', 'WALLS'] +7729-102255-0040-301: ref=['IN', 'THIS', 'INCIDENT', 'CONTRASTING', 'THE', 'CREATIVE', 'AND', 'THE', 'DESTRUCTIVE', 'SPIRIT', 'OF', 'THE', 'FACTIONS', 'THE', 'EMIGRANT', 'AID', 'SOCIETY', 'OF', 'MASSACHUSETTS', 'FINDS', 'ITS', 'MOST', 'HONORABLE', 'AND', 'TRIUMPHANT', 'VINDICATION'] +7729-102255-0040-301: hyp=['IN', 'THIS', 'INCIDENT', 'CONTRASTING', 'THE', 'CREATIVE', 'AND', 'THE', 'DESTRUCTIVE', 'SPIRIT', 'OF', 'THE', 'FACTIONS', 'THE', 'IMMIGRANT', 'AIDS', 'SOCIETY', 'OF', 'MASSACHUSETTS', 'FINDS', 'ITS', 'MOST', 'HONORABLE', 'AND', 'TRIUMPHANT', 'VINDICATION'] +7729-102255-0041-302: ref=['THE', 'WHOLE', 'PROCEEDING', 'WAS', 'SO', 'CHILDISH', 'THE', 'MISERABLE', 'PLOT', 'SO', 'TRANSPARENT', 'THE', 'OUTRAGE', 'SO', 'GROSS', 'AS', 'TO', 'BRING', 'DISGUST', 'TO', 'THE', 'BETTER', 'CLASS', 'OF', 'BORDER', 'RUFFIANS', 'WHO', 'WERE', 'WITNESSES', 'AND', 'ACCESSORIES'] +7729-102255-0041-302: hyp=['THE', 'WHOLE', 'PROCEEDING', 'WAS', 'SO', 'CHILDISH', 'THE', 'MISERABLE', 'PLOT', 'SO', 'TRANSPARENT', 'THE', 'OUTRAGED', 'SO', 'GROSS', 'AS', 'TO', 'BRING', 'DISGUST', 'TO', 'THE', 'BETTER', 'CLASS', 'OF', 'BORDER', 'RUFFIANS', 'WHO', 'WERE', 'WITNESSES', 'AND', 'ACCESSORIES'] +7729-102255-0042-303: ref=['RELOCATED', 'FOOTNOTE', 'GOVERNOR', 'ROBINSON', 'BEING', 'ON', 'HIS', 'WAY', 'EAST', 'THE', 'STEAMBOAT', 'ON', 'WHICH', 'HE', 'WAS', 'TRAVELING', 'STOPPED', 'AT', 'LEXINGTON', 'MISSOURI'] +7729-102255-0042-303: hyp=['RE', 'LOCATED', 'FOOTNOTE', 'GOVERNOR', 'ROBINSON', 'BEING', 'ON', 'HIS', 'WAY', 'EAST', 'THE', 'STEAMBOAT', 'ON', 'WHICH', 'HE', 'WAS', 'TRAVELLING', 'STOPPED', 'AT', 'LEXINGTON', 'MISSOURI'] +7729-102255-0043-304: ref=['IN', 'A', 'FEW', 'DAYS', 'AN', 'OFFICER', 'CAME', 'WITH', 'A', 'REQUISITION', 'FROM', 'GOVERNOR', 'SHANNON', 'AND', 'TOOK', 'THE', 'PRISONER', 'BY', 'LAND', 'TO', 'WESTPORT', 'AND', 'AFTERWARDS', 'FROM', 'THERE', 'TO', 'KANSAS', 'CITY', 'AND', 'LEAVENWORTH'] +7729-102255-0043-304: hyp=['IN', 'A', 'FEW', 'DAYS', 'AN', 'OFFICER', 'CAME', 'WITH', 'A', 'REQUISITION', 'FROM', 'GOVERNOR', 'SHANNON', 'AND', 'TOOK', 'THE', 'PRISONER', 'BY', 'LANDA', 'WEST', 'PORT', 'AND', 'AFTERWARDS', 'FROM', 'THERE', 'TO', 'KANSAS', 'CITY', 'IN', 'LEVINWORTH'] +7729-102255-0044-305: ref=['HERE', 'HE', 'WAS', 'PLACED', 'IN', 'THE', 'CUSTODY', 'OF', 'CAPTAIN', 'MARTIN', 'OF', 'THE', 'KICKAPOO', 'RANGERS', 'WHO', 'PROVED', 'A', 'KIND', 'JAILER', 'AND', 'MATERIALLY', 'ASSISTED', 'IN', 'PROTECTING', 'HIM', 'FROM', 'THE', 'DANGEROUS', 'INTENTIONS', 'OF', 'THE', 'MOB', 'WHICH', 'AT', 'THAT', 'TIME', 'HELD', 'LEAVENWORTH', 'UNDER', 'A', 'REIGN', 'OF', 'TERROR'] +7729-102255-0044-305: hyp=['HERE', 'HE', 'WAS', 'PLACED', 'IN', 'THE', 'CUSTODY', 'OF', 'CAPTAIN', 'MARTIN', 'OF', 'THE', 'KICKAPOO', 'RANGERS', 'WHO', 'PROVED', 'A', 'KIND', 'JAILER', 'AND', 'MATERIALLY', 'ASSISTED', 'IN', 'PROTECTING', 'HIM', 'FROM', 'THE', 'DANGEROUS', 'INTENTIONS', 'OF', 'THE', 'MOB', 'WHICH', 'AT', 'THAT', 'TIME', 'HELD', 'LEVIN', 'WORTH', 'UNDER', 'THE', 'REIGN', 'OF', 'TERROR'] +7729-102255-0045-306: ref=['CAPTAIN', 'MARTIN', 'SAID', 'I', 'SHALL', 'GIVE', 'YOU', 'A', 'PISTOL', 'TO', 'HELP', 'PROTECT', 'YOURSELF', 'IF', 'WORSE', 'COMES', 'TO', 'WORST'] +7729-102255-0045-306: hyp=['CAPTAIN', 'MARTIN', 'SAID', 'I', 'SHALL', 'GIVE', 'YOU', 'A', 'PISTOL', 'TO', 'HELP', 'PROTECT', 'YOURSELF', 'IF', 'WORSE', 'COMES', 'TO', 'WORST'] +7729-102255-0046-307: ref=['IN', 'THE', 'EARLY', 'MORNING', 'OF', 'THE', 'NEXT', 'DAY', 'MAY', 'TWENTY', 'NINTH', 'A', 'COMPANY', 'OF', 'DRAGOONS', 'WITH', 'ONE', 'EMPTY', 'SADDLE', 'CAME', 'DOWN', 'FROM', 'THE', 'FORT', 'AND', 'WHILE', 'THE', 'PRO', 'SLAVERY', 'MEN', 'STILL', 'SLEPT', 'THE', 'PRISONER', 'AND', 'HIS', 'ESCORT', 'WERE', 'ON', 'THEIR', 'WAY', 'ACROSS', 'THE', 'PRAIRIES', 'TO', 'LECOMPTON', 'IN', 'THE', 'CHARGE', 'OF', 'OFFICERS', 'OF', 'THE', 'UNITED', 'STATES', 'ARMY'] +7729-102255-0046-307: hyp=['IN', 'THE', 'EARLY', 'MORNING', 'OF', 'THE', 'NEXT', 'DAY', 'MAY', 'TWENTY', 'NINTH', 'A', 'COMPANY', 'OF', 'DRAGOONS', 'WITH', 'ONE', 'EMPTY', 'SADDLE', 'CAME', 'DOWN', 'FROM', 'THE', 'FORT', 'AND', 'WHILE', 'THE', 'PRO', 'SLAVERY', 'MEN', 'STILL', 'SLEPT', 'THE', 'PRISONER', 'AND', 'HIS', 'ESCORT', 'WERE', 'ON', 'THEIR', 'WAY', 'ACROSS', 'THE', 'PRAIRIES', 'TO', 'LECOMPTON', 'IN', 'THE', 'CHARGE', 'OF', 'OFFICERS', 'OF', 'THE', 'UNITED', 'STATES', 'ARMY'] +8224-274381-0000-1451: ref=['THOUGH', 'THROWN', 'INTO', 'PRISON', 'FOR', 'THIS', 'ENTERPRISE', 'AND', 'DETAINED', 'SOME', 'TIME', 'HE', 'WAS', 'NOT', 'DISCOURAGED', 'BUT', 'STILL', 'CONTINUED', 'BY', 'HIS', 'COUNTENANCE', 'AND', 'PROTECTION', 'TO', 'INFUSE', 'SPIRIT', 'INTO', 'THE', 'DISTRESSED', 'ROYALISTS'] +8224-274381-0000-1451: hyp=['THOUGH', 'THROWN', 'INTO', 'PRISON', 'FOR', 'THIS', 'ENTERPRISE', 'AND', 'DETAINED', 'SOME', 'TIME', 'HE', 'WAS', 'NOT', 'DISCOURAGED', 'BUT', 'STILL', 'CONTINUED', 'BY', 'HIS', 'COUNTENANCE', 'AND', 'PROTECTION', 'TO', 'INFUSE', 'SPIRIT', 'INTO', 'THE', 'DISTRESSED', 'ROYALISTS'] +8224-274381-0001-1452: ref=['AMONG', 'OTHER', 'PERSONS', 'OF', 'DISTINCTION', 'WHO', 'UNITED', 'THEMSELVES', 'TO', 'HIM', 'WAS', 'LORD', 'NAPIER', 'OF', 'MERCHISTON', 'SON', 'OF', 'THE', 'FAMOUS', 'INVENTOR', 'OF', 'THE', 'LOGARITHMS', 'THE', 'PERSON', 'TO', 'WHOM', 'THE', 'TITLE', 'OF', 'A', 'GREAT', 'MAN', 'IS', 'MORE', 'JUSTLY', 'DUE', 'THAN', 'TO', 'ANY', 'OTHER', 'WHOM', 'HIS', 'COUNTRY', 'EVER', 'PRODUCED'] +8224-274381-0001-1452: hyp=['AMONG', 'OTHER', 'PERSONS', 'OF', 'DISTINCTION', 'WHO', 'UNITED', 'THEMSELVES', 'TO', 'HIM', 'WAS', 'LORD', 'NAPIER', 'OF', 'MURCHISON', 'SON', 'OF', 'THE', 'FAMOUS', 'INVENTOR', 'OF', 'THE', 'LOGARTHEMS', 'THE', 'PERSON', 'TO', 'WHOM', 'THE', 'TITLE', 'OF', 'A', 'GREAT', 'MAN', 'IS', 'MORE', 'JUSTLY', 'DUE', 'THAN', 'TO', 'ANY', 'OTHER', 'WHOM', 'HIS', 'COUNTRY', 'EVER', 'PRODUCED'] +8224-274381-0002-1453: ref=['WHILE', 'THE', 'FORMER', 'FORETOLD', 'THAT', 'THE', 'SCOTTISH', 'COVENANTERS', 'WERE', 'SECRETLY', 'FORMING', 'A', 'UNION', 'WITH', 'THE', 'ENGLISH', 'PARLIAMENT', 'AND', 'INCULCATED', 'THE', 'NECESSITY', 'OF', 'PREVENTING', 'THEM', 'BY', 'SOME', 'VIGOROUS', 'UNDERTAKING', 'THE', 'LATTER', 'STILL', 'INSISTED', 'THAT', 'EVERY', 'SUCH', 'ATTEMPT', 'WOULD', 'PRECIPITATE', 'THEM', 'INTO', 'MEASURES', 'TO', 'WHICH', 'OTHERWISE', 'THEY', 'WERE', 'NOT', 'PERHAPS', 'INCLINED'] +8224-274381-0002-1453: hyp=['WHILE', 'THE', 'FORMER', 'FORETOLD', 'THAT', 'THE', 'SCOTTISH', 'COVENANTERS', 'WERE', 'SECRETLY', 'FORMING', 'A', 'UNION', 'WITH', 'THE', 'ENGLISH', 'PARLIAMENT', 'AND', 'INCALCATED', 'THE', 'NECESSITY', 'OF', 'PREVENTING', 'THEM', 'BY', 'SOME', 'VIGOROUS', 'UNDERTAKING', 'THE', 'LATTER', 'STILL', 'INSISTED', 'THAT', 'EVERY', 'SUCH', 'ATTEMPT', 'WOULD', 'PRECIPITATE', 'THEM', 'INTO', 'MEASURES', 'TO', 'WHICH', 'OTHERWISE', 'THEY', 'WERE', 'NOT', 'PERHAPS', 'INCLINED'] +8224-274381-0003-1454: ref=['THE', "KING'S", 'EARS', 'WERE', 'NOW', 'OPEN', 'TO', "MONTROSE'S", 'COUNSELS', 'WHO', 'PROPOSED', 'NONE', 'BUT', 'THE', 'BOLDEST', 'AND', 'MOST', 'DARING', 'AGREEABLY', 'TO', 'THE', 'DESPERATE', 'STATE', 'OF', 'THE', 'ROYAL', 'CAUSE', 'IN', 'SCOTLAND'] +8224-274381-0003-1454: hyp=['THE', "KING'S", 'EARS', 'WERE', 'NOW', 'OPEN', 'TO', "MONTROSE'S", 'COUNCILS', 'WHO', 'PROPOSED', 'NONE', 'BUT', 'THE', 'BOLDEST', 'AND', 'MOST', 'DARING', 'AGREEABLY', 'TO', 'THE', 'DESPERATE', 'STATE', 'OF', 'THE', 'ROYAL', 'CAUSE', 'IN', 'SCOTLAND'] +8224-274381-0004-1455: ref=['FIVE', 'HUNDRED', 'MEN', 'MORE', 'WHO', 'HAD', 'BEEN', 'LEVIED', 'BY', 'THE', 'COVENANTERS', 'WERE', 'PERSUADED', 'TO', 'EMBRACE', 'THE', 'ROYAL', 'CAUSE', 'AND', 'WITH', 'THIS', 'COMBINED', 'FORCE', 'HE', 'HASTENED', 'TO', 'ATTACK', 'LORD', 'ELCHO', 'WHO', 'LAY', 'AT', 'PERTH', 'WITH', 'AN', 'ARMY', 'OF', 'SIX', 'THOUSAND', 'MEN', 'ASSEMBLED', 'UPON', 'THE', 'FIRST', 'NEWS', 'OF', 'THE', 'IRISH', 'INVASION'] +8224-274381-0004-1455: hyp=['FIVE', 'HUNDRED', 'MEN', 'MORE', 'WHO', 'HAD', 'BEEN', 'LEVIED', 'BY', 'THE', 'COVENANTERS', 'WERE', 'PERSUADED', 'TO', 'EMBRACE', 'THE', 'ROYAL', 'CAUSE', 'AND', 'WITH', 'THIS', 'COMBINED', 'FORCE', 'HE', 'HASTENED', 'TO', 'ATTACK', 'LORD', 'ELKO', 'WHO', 'LAY', 'AT', 'PERTH', 'WITH', 'AN', 'ARMY', 'OF', 'SIX', 'THOUSAND', 'MEN', 'ASSEMBLED', 'UPON', 'THE', 'FIRST', 'NEWS', 'OF', 'THE', 'IRISH', 'INVASION'] +8224-274381-0005-1456: ref=['DREADING', 'THE', 'SUPERIOR', 'POWER', 'OF', 'ARGYLE', 'WHO', 'HAVING', 'JOINED', 'HIS', 'VASSALS', 'TO', 'A', 'FORCE', 'LEVIED', 'BY', 'THE', 'PUBLIC', 'WAS', 'APPROACHING', 'WITH', 'A', 'CONSIDERABLE', 'ARMY', 'MONTROSE', 'HASTENED', 'NORTHWARDS', 'IN', 'ORDER', 'TO', 'ROUSE', 'AGAIN', 'THE', 'MARQUIS', 'OF', 'HUNTLEY', 'AND', 'THE', 'GORDONS', 'WHO', 'HAVING', 'BEFORE', 'HASTILY', 'TAKEN', 'ARMS', 'HAD', 'BEEN', 'INSTANTLY', 'SUPPRESSED', 'BY', 'THE', 'COVENANTERS'] +8224-274381-0005-1456: hyp=['DREADING', 'THE', 'SUPERIOR', 'POWER', 'OF', 'ARGYLE', 'WHO', 'HAVING', 'JOINED', 'HIS', 'VASSALS', 'TO', 'A', 'FORCE', 'LEVIED', 'BY', 'THE', 'PUBLIC', 'WAS', 'APPROACHING', 'WITH', 'A', 'CONSIDERABLE', 'ARMY', 'MONTROSE', 'HASTENED', 'NORTHWARD', 'IN', 'ORDER', 'TO', 'ROUSE', 'AGAIN', 'THE', 'MARQUIS', 'OF', 'HUNTLY', 'AND', 'THE', 'GORDONS', 'WHO', 'HAVING', 'BEFORE', 'HASTILY', 'TAKEN', 'ARMS', 'HAD', 'BEEN', 'INSTANTLY', 'SUPPRESSED', 'BY', 'THE', 'COVENANTERS'] +8224-274381-0006-1457: ref=['THIS', "NOBLEMAN'S", 'CHARACTER', 'THOUGH', 'CELEBRATED', 'FOR', 'POLITICAL', 'COURAGE', 'AND', 'CONDUCT', 'WAS', 'VERY', 'LOW', 'FOR', 'MILITARY', 'PROWESS', 'AND', 'AFTER', 'SOME', 'SKIRMISHES', 'IN', 'WHICH', 'HE', 'WAS', 'WORSTED', 'HE', 'HERE', 'ALLOWED', 'MONTROSE', 'TO', 'ESCAPE', 'HIM'] +8224-274381-0006-1457: hyp=['THIS', "NOBLEMAN'S", 'CHARACTER', 'THOUGH', 'CELEBRATED', 'FOR', 'POLITICAL', 'COURAGE', 'AND', 'CONDUCT', 'WAS', 'VERY', 'LOW', 'FOR', 'MILITARY', 'PROWESS', 'AND', 'AFTER', 'SOME', 'SKIRMISHES', 'IN', 'WHICH', 'HE', 'WAS', 'WORSTED', 'HE', 'HERE', 'ALLOWED', 'MONTROSE', 'TO', 'ESCAPE', 'HIM'] +8224-274381-0007-1458: ref=['BY', 'QUICK', 'MARCHES', 'THROUGH', 'THESE', 'INACCESSIBLE', 'MOUNTAINS', 'THAT', 'GENERAL', 'FREED', 'HIMSELF', 'FROM', 'THE', 'SUPERIOR', 'FORCES', 'OF', 'THE', 'COVENANTERS'] +8224-274381-0007-1458: hyp=['BY', 'QUICK', 'MARCHES', 'THROUGH', 'THESE', 'INACCESSIBLE', 'MOUNTAINS', 'THAT', 'GENERAL', 'FREED', 'HIMSELF', 'FROM', 'THE', 'SUPERIOR', 'FORCES', 'OF', 'THE', 'COVENANTERS'] +8224-274381-0008-1459: ref=['WITH', 'THESE', 'AND', 'SOME', 'REENFORCEMENTS', 'OF', 'THE', 'ATHOLEMEN', 'AND', 'MACDONALDS', 'WHOM', 'HE', 'HAD', 'RECALLED', 'MONTROSE', 'FELL', 'SUDDENLY', 'UPON', "ARGYLE'S", 'COUNTRY', 'AND', 'LET', 'LOOSE', 'UPON', 'IT', 'ALL', 'THE', 'RAGE', 'OF', 'WAR', 'CARRYING', 'OFF', 'THE', 'CATTLE', 'BURNING', 'THE', 'HOUSES', 'AND', 'PUTTING', 'THE', 'INHABITANTS', 'TO', 'THE', 'SWORD'] +8224-274381-0008-1459: hyp=['WITH', 'THESE', 'AND', 'SOME', 'REINFORCEMENTS', 'OF', 'THE', 'ETHEL', 'MEN', 'AND', 'MC', 'DONALDS', 'WHOM', 'HE', 'HAD', 'RECALLED', 'MONTROSE', 'FELL', 'SUDDENLY', 'UPON', "ARGYLE'S", 'COUNTRY', 'AND', 'LET', 'LOOSE', 'UPON', 'IT', 'ALL', 'THE', 'RAGE', 'OF', 'WAR', 'CARRYING', 'OFF', 'THE', 'CATTLE', 'BURNING', 'THE', 'HOUSES', 'AND', 'PUTTING', 'THE', 'INHABITANTS', 'TO', 'THE', 'SWORD'] +8224-274381-0009-1460: ref=['THIS', 'SEVERITY', 'BY', 'WHICH', 'MONTROSE', 'SULLIED', 'HIS', 'VICTORIES', 'WAS', 'THE', 'RESULT', 'OF', 'PRIVATE', 'ANIMOSITY', 'AGAINST', 'THE', 'CHIEFTAIN', 'AS', 'MUCH', 'AS', 'OF', 'ZEAL', 'FOR', 'THE', 'PUBLIC', 'CAUSE', 'ARGYLE', 'COLLECTING', 'THREE', 'THOUSAND', 'MEN', 'MARCHED', 'IN', 'QUEST', 'OF', 'THE', 'ENEMY', 'WHO', 'HAD', 'RETIRED', 'WITH', 'THEIR', 'PLUNDER', 'AND', 'HE', 'LAY', 'AT', 'INNERLOCHY', 'SUPPOSING', 'HIMSELF', 'STILL', 'AT', 'A', 'CONSIDERABLE', 'DISTANCE', 'FROM', 'THEM'] +8224-274381-0009-1460: hyp=['THIS', 'SEVERITY', 'BY', 'WHICH', 'MONTROSE', 'SULLIED', 'HIS', 'VICTORIES', 'WAS', 'THE', 'RESULT', 'OF', 'PRIVATE', 'ANIMOSITY', 'AGAINST', 'THE', 'CHIEFTAIN', 'AS', 'MUCH', 'AS', 'OF', 'ZEAL', 'FOR', 'THE', 'PUBLIC', 'CAUSE', 'OUR', 'GUILE', 'COLLECTING', 'THREE', 'THOUSAND', 'MEN', 'MARCHED', 'IN', 'QUEST', 'OF', 'THE', 'ENEMY', 'WHO', 'HAD', 'RETIRED', 'WITH', 'THEIR', 'PLUNDER', 'AND', 'HE', 'LAY', 'AT', 'INERLOCHY', 'SUPPOSING', 'HIMSELF', 'STILL', 'AT', 'A', 'CONSIDERABLE', 'DISTANCE', 'FROM', 'THEM'] +8224-274381-0010-1461: ref=['BY', 'A', 'QUICK', 'AND', 'UNEXPECTED', 'MARCH', 'MONTROSE', 'HASTENED', 'TO', 'INNERLOCHY', 'AND', 'PRESENTED', 'HIMSELF', 'IN', 'ORDER', 'OF', 'BATTLE', 'BEFORE', 'THE', 'SURPRISED', 'BUT', 'NOT', 'AFFRIGHTENED', 'COVENANTERS'] +8224-274381-0010-1461: hyp=['BY', 'A', 'QUICK', 'AND', 'UNEXPECTED', 'MARCH', 'MONTROSE', 'HASTENED', 'TO', 'IN', 'A', 'LOCKY', 'AND', 'PRESENTED', 'HIMSELF', 'IN', 'ORDER', 'OF', 'BATTLE', 'BEFORE', 'THE', 'SURPRISED', 'BUT', 'NOT', 'A', 'FRIGHTENED', 'COVENANTERS'] +8224-274381-0011-1462: ref=['HIS', 'CONDUCT', 'AND', 'PRESENCE', 'OF', 'MIND', 'IN', 'THIS', 'EMERGENCE', 'APPEARED', 'CONSPICUOUS'] +8224-274381-0011-1462: hyp=['HIS', 'CONDUCT', 'AND', 'PRESENCE', 'OF', 'MIND', 'IN', 'THIS', 'EMERGENCE', 'APPEARED', 'CONSPICUOUS'] +8224-274381-0012-1463: ref=['MONTROSE', 'WEAK', 'IN', 'CAVALRY', 'HERE', 'LINED', 'HIS', 'TROOPS', 'OF', 'HORSE', 'WITH', 'INFANTRY', 'AND', 'AFTER', 'PUTTING', 'THE', "ENEMY'S", 'HORSE', 'TO', 'ROUT', 'FELL', 'WITH', 'UNITED', 'FORCE', 'UPON', 'THEIR', 'FOOT', 'WHO', 'WERE', 'ENTIRELY', 'CUT', 'IN', 'PIECES', 'THOUGH', 'WITH', 'THE', 'LOSS', 'OF', 'THE', 'GALLANT', 'LORD', 'GORDON', 'ON', 'THE', 'PART', 'OF', 'THE', 'ROYALISTS'] +8224-274381-0012-1463: hyp=['MONTROSE', 'WEAK', 'IN', 'CAVALRY', 'HERE', 'LINED', 'HIS', 'TROOPS', 'OF', 'HORSE', 'WITH', 'INFANTRY', 'AND', 'AFTER', 'PUTTING', 'THE', "ENEMY'S", 'HORSE', 'TO', 'ROUT', 'FELL', 'WITH', 'UNITED', 'FORCE', 'UPON', 'THEIR', 'FOOT', 'WHO', 'WERE', 'ENTIRELY', 'CUT', 'IN', 'PIECES', 'THOUGH', 'WITH', 'THE', 'LOSS', 'OF', 'THE', 'GALLANT', 'LORD', 'GORDON', 'ON', 'THE', 'PART', 'OF', 'THE', 'ROYALISTS'] +8224-274381-0013-1464: ref=['FROM', 'THE', 'SAME', 'MEN', 'NEW', 'REGIMENTS', 'AND', 'NEW', 'COMPANIES', 'WERE', 'FORMED', 'DIFFERENT', 'OFFICERS', 'APPOINTED', 'AND', 'THE', 'WHOLE', 'MILITARY', 'FORCE', 'PUT', 'INTO', 'SUCH', 'HANDS', 'AS', 'THE', 'INDEPENDENTS', 'COULD', 'RELY', 'ON'] +8224-274381-0013-1464: hyp=['FROM', 'THE', 'SAME', 'MEN', 'NEW', 'REGIMENTS', 'AND', 'NEW', 'COMPANIES', 'WERE', 'FORMED', 'DIFFERENT', 'OFFICERS', 'APPOINTED', 'AND', 'THE', 'WHOLE', 'MILITARY', 'FORCE', 'PUT', 'INTO', 'SUCH', 'HANDS', 'AS', 'THE', 'INDEPENDENTS', 'COULD', 'RELY', 'ON'] +8224-274381-0014-1465: ref=['BESIDES', 'MEMBERS', 'OF', 'PARLIAMENT', 'WHO', 'WERE', 'EXCLUDED', 'MANY', 'OFFICERS', 'UNWILLING', 'TO', 'SERVE', 'UNDER', 'THE', 'NEW', 'GENERALS', 'THREW', 'UP', 'THEIR', 'COMMISSIONS', 'AND', 'UNWARILY', 'FACILITATED', 'THE', 'PROJECT', 'OF', 'PUTTING', 'THE', 'ARMY', 'ENTIRELY', 'INTO', 'THE', 'HANDS', 'OF', 'THAT', 'FACTION'] +8224-274381-0014-1465: hyp=['BESIDES', 'MEMBERS', 'OF', 'PARLIAMENT', 'WHO', 'WERE', 'EXCLUDED', 'MANY', 'OFFICERS', 'UNWILLING', 'TO', 'SERVE', 'UNDER', 'THE', 'NEW', 'GENERALS', 'THREW', 'UP', 'THEIR', 'COMMISSIONS', 'AND', 'THEN', 'WARILY', 'FACILITATED', 'THE', 'PROJECT', 'OF', 'PUTTING', 'THE', 'ARMY', 'ENTIRELY', 'INTO', 'THE', 'HANDS', 'OF', 'THAT', 'FACTION'] +8224-274381-0015-1466: ref=['THOUGH', 'THE', 'DISCIPLINE', 'OF', 'THE', 'FORMER', 'PARLIAMENTARY', 'ARMY', 'WAS', 'NOT', 'CONTEMPTIBLE', 'A', 'MORE', 'EXACT', 'PLAN', 'WAS', 'INTRODUCED', 'AND', 'RIGOROUSLY', 'EXECUTED', 'BY', 'THESE', 'NEW', 'COMMANDERS'] +8224-274381-0015-1466: hyp=['THOUGH', 'THE', 'DISCIPLINE', 'OF', 'THE', 'FORMER', 'PARLIAMENTARY', 'ARMY', 'WAS', 'NOT', 'CONTEMPTIBLE', 'A', 'MORE', 'EXACT', 'PLAN', 'WAS', 'INTRODUCED', 'AND', 'RIGOROUSLY', 'EXECUTED', 'BY', 'THESE', 'NEW', 'COMMANDERS'] +8224-274381-0016-1467: ref=['VALOR', 'INDEED', 'WAS', 'VERY', 'GENERALLY', 'DIFFUSED', 'OVER', 'THE', 'ONE', 'PARTY', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'DURING', 'THIS', 'PERIOD', 'DISCIPLINE', 'ALSO', 'WAS', 'ATTAINED', 'BY', 'THE', 'FORCES', 'OF', 'THE', 'PARLIAMENT', 'BUT', 'THE', 'PERFECTION', 'OF', 'THE', 'MILITARY', 'ART', 'IN', 'CONCERTING', 'THE', 'GENERAL', 'PLANS', 'OF', 'ACTION', 'AND', 'THE', 'OPERATIONS', 'OF', 'THE', 'FIELD', 'SEEMS', 'STILL', 'ON', 'BOTH', 'SIDES', 'TO', 'HAVE', 'BEEN', 'IN', 'A', 'GREAT', 'MEASURE', 'WANTING'] +8224-274381-0016-1467: hyp=['VALOUR', 'INDEED', 'WAS', 'VERY', 'GENERALLY', 'DIFFUSED', 'OVER', 'THE', 'ONE', 'PARTY', 'AS', 'WELL', 'AS', 'THE', 'OTHER', 'DURING', 'THIS', 'PERIOD', 'DISCIPLINE', 'ALSO', 'WAS', 'ATTAINED', 'BY', 'THE', 'FORCES', 'OF', 'THE', 'PARLIAMENT', 'BUT', 'THE', 'PERFECTION', 'OF', 'THE', 'MILITARY', 'ART', 'IN', 'CONCERTING', 'THE', 'GENERAL', 'PLANS', 'OF', 'ACTION', 'AND', 'THE', 'OPERATIONS', 'OF', 'THE', 'FIELD', 'SEEMS', 'STILL', 'ON', 'BOTH', 'SIDES', 'TO', 'HAVE', 'BEEN', 'IN', 'A', 'GREAT', 'MEASURE', 'WANTING'] +8224-274381-0017-1468: ref=['HISTORIANS', 'AT', 'LEAST', 'PERHAPS', 'FROM', 'THEIR', 'OWN', 'IGNORANCE', 'AND', 'INEXPERIENCE', 'HAVE', 'NOT', 'REMARKED', 'ANY', 'THING', 'BUT', 'A', 'HEADLONG', 'IMPETUOUS', 'CONDUCT', 'EACH', 'PARTY', 'HURRYING', 'TO', 'A', 'BATTLE', 'WHERE', 'VALOR', 'AND', 'FORTUNE', 'CHIEFLY', 'DETERMINED', 'THE', 'SUCCESS'] +8224-274381-0017-1468: hyp=['HISTORIANS', 'AT', 'LEAST', 'PERHAPS', 'FROM', 'THEIR', 'OWN', 'IGNORANCE', 'AND', 'INEXPERIENCE', 'HAVE', 'NOT', 'REMARKED', 'ANY', 'THING', 'BUT', 'A', 'HEADLONG', 'IMPETUOUS', 'CONDUCT', 'EACH', 'PARTY', 'HURRYING', 'TO', 'A', 'BATTLE', 'WERE', 'VALOR', 'AND', 'FORTUNE', 'CHIEFLY', 'DETERMINED', 'THE', 'SUCCESS'] +8224-274384-0000-1437: ref=['HE', 'PASSED', 'THROUGH', 'HENLEY', 'SAINT', 'ALBANS', 'AND', 'CAME', 'SO', 'NEAR', 'TO', 'LONDON', 'AS', 'HARROW', 'ON', 'THE', 'HILL'] +8224-274384-0000-1437: hyp=['HE', 'PASSED', 'THROUGH', 'HENLEY', 'SAINT', "ALBAN'S", 'AND', 'CAME', 'SO', 'NEAR', 'TO', 'LONDON', 'AS', 'HARROW', 'ON', 'THE', 'HILL'] +8224-274384-0001-1438: ref=['THE', 'SCOTTISH', 'GENERALS', 'AND', 'COMMISSIONERS', 'AFFECTED', 'GREAT', 'SURPRISE', 'ON', 'THE', 'APPEARANCE', 'OF', 'THE', 'KING', 'AND', 'THOUGH', 'THEY', 'PAID', 'HIM', 'ALL', 'THE', 'EXTERIOR', 'RESPECT', 'DUE', 'TO', 'HIS', 'DIGNITY', 'THEY', 'INSTANTLY', 'SET', 'A', 'GUARD', 'UPON', 'HIM', 'UNDER', 'COLOR', 'OF', 'PROTECTION', 'AND', 'MADE', 'HIM', 'IN', 'REALITY', 'A', 'PRISONER'] +8224-274384-0001-1438: hyp=['THE', 'SCOTTISH', 'GENERALS', 'AND', 'COMMISSIONERS', 'AFFECTED', 'GREAT', 'SURPRISE', 'ON', 'THE', 'APPEARANCE', 'OF', 'THE', 'KING', 'AND', 'THOUGH', 'THEY', 'PAID', 'HIM', 'ALL', 'THE', 'EXTERIOR', 'RESPECT', 'DUE', 'TO', 'HIS', 'DIGNITY', 'THEY', 'INSTANTLY', 'SET', 'A', 'GUARD', 'UPON', 'HIM', 'UNDER', 'COLOR', 'OF', 'PROTECTION', 'AND', 'MADE', 'HIM', 'IN', 'REALITY', 'A', 'PRISONER'] +8224-274384-0002-1439: ref=['THEY', 'INFORMED', 'THE', 'ENGLISH', 'PARLIAMENT', 'OF', 'THIS', 'UNEXPECTED', 'INCIDENT', 'AND', 'ASSURED', 'THEM', 'THAT', 'THEY', 'HAD', 'ENTERED', 'INTO', 'NO', 'PRIVATE', 'TREATY', 'WITH', 'THE', 'KING'] +8224-274384-0002-1439: hyp=['THEY', 'INFORMED', 'THE', 'ENGLISH', 'PARLIAMENT', 'OF', 'THIS', 'UNEXPECTED', 'INCIDENT', 'AND', 'ASSURED', 'THEM', 'THAT', 'THEY', 'HAD', 'ENTERED', 'INTO', 'NO', 'PRIVATE', 'TREATY', 'WITH', 'THE', 'KING'] +8224-274384-0003-1440: ref=['OR', 'HATH', 'HE', 'GIVEN', 'US', 'ANY', 'GIFT'] +8224-274384-0003-1440: hyp=['OR', 'HATH', 'HE', 'GIVEN', 'US', 'ANY', 'GIFT'] +8224-274384-0004-1441: ref=['AND', 'THE', 'MEN', 'OF', 'ISRAEL', 'ANSWERED', 'THE', 'MEN', 'OF', 'JUDAH', 'AND', 'SAID', 'WE', 'HAVE', 'TEN', 'PARTS', 'IN', 'THE', 'KING', 'AND', 'WE', 'HAVE', 'ALSO', 'MORE', 'RIGHT', 'IN', 'DAVID', 'THAN', 'YE', 'WHY', 'THEN', 'DID', 'YE', 'DESPISE', 'US', 'THAT', 'OUR', 'ADVICE', 'SHOULD', 'NOT', 'BE', 'FIRST', 'HAD', 'IN', 'BRINGING', 'BACK', 'OUR', 'KING'] +8224-274384-0004-1441: hyp=['AND', 'THE', 'MEN', 'OF', 'ISRAEL', 'ANSWERED', 'THE', 'MEN', 'OF', 'JUDAH', 'AND', 'SAID', 'WE', 'HAVE', 'TEN', 'PARTS', 'IN', 'THE', 'KING', 'AND', 'WE', 'HAVE', 'ALSO', 'MORE', 'RIGHT', 'IN', 'DAVID', 'THAN', 'YE', 'WHY', 'THEN', 'DID', 'YE', 'DESPISE', 'US', 'THAT', 'OUR', 'ADVICE', 'SHOULD', 'NOT', 'BE', 'FIRST', 'HAD', 'IN', 'BRINGING', 'BACK', 'OUR', 'KING'] +8224-274384-0005-1442: ref=['ANOTHER', 'PREACHER', 'AFTER', 'REPROACHING', 'HIM', 'TO', 'HIS', 'FACE', 'WITH', 'HIS', 'MISGOVERNMENT', 'ORDERED', 'THIS', 'PSALM', 'TO', 'BE', 'SUNG'] +8224-274384-0005-1442: hyp=['ANOTHER', 'PREACHER', 'AFTER', 'REPROACHING', 'HIM', 'TO', 'HIS', 'FACE', 'WITH', 'HIS', 'MISGOVERNMENT', 'ORDERED', 'THIS', 'SUM', 'TO', 'BE', 'SUNG'] +8224-274384-0006-1443: ref=['THE', 'KING', 'STOOD', 'UP', 'AND', 'CALLED', 'FOR', 'THAT', 'PSALM', 'WHICH', 'BEGINS', 'WITH', 'THESE', 'WORDS'] +8224-274384-0006-1443: hyp=['THE', 'KING', 'STOOD', 'UP', 'AND', 'CALLED', 'FOR', 'THAT', 'PSALM', 'WHICH', 'BEGINS', 'WITH', 'THESE', 'WORDS'] +8224-274384-0007-1444: ref=['HAVE', 'MERCY', 'LORD', 'ON', 'ME', 'I', 'PRAY', 'FOR', 'MEN', 'WOULD', 'ME', 'DEVOUR'] +8224-274384-0007-1444: hyp=['HAVE', 'MERCY', 'LORD', 'ON', 'ME', 'I', 'PRAY', 'FOR', 'MEN', 'WOULD', 'ME', 'DEVOUR'] +8224-274384-0008-1445: ref=['THE', 'GOOD', 'NATURED', 'AUDIENCE', 'IN', 'PITY', 'TO', 'FALLEN', 'MAJESTY', 'SHOWED', 'FOR', 'ONCE', 'GREATER', 'DEFERENCE', 'TO', 'THE', 'KING', 'THAN', 'TO', 'THE', 'MINISTER', 'AND', 'SUNG', 'THE', 'PSALM', 'WHICH', 'THE', 'FORMER', 'HAD', 'CALLED', 'FOR'] +8224-274384-0008-1445: hyp=['THE', 'GOOD', 'NATURED', 'AUDIENCE', 'IN', 'PITY', 'TO', 'FALL', 'IN', 'MAJESTY', 'SHOWED', 'FOR', 'ONCE', 'GREATER', 'DEFERENCE', 'TO', 'THE', 'KING', 'THAN', 'TO', 'THE', 'MINISTER', 'AND', 'SUNG', 'THE', 'PSALM', 'WHICH', 'THE', 'FORMER', 'HAD', 'CALLED', 'FOR'] +8224-274384-0009-1446: ref=['THE', 'PARLIAMENT', 'AND', 'THE', 'SCOTS', 'LAID', 'THEIR', 'PROPOSALS', 'BEFORE', 'THE', 'KING'] +8224-274384-0009-1446: hyp=['THE', 'PARLIAMENT', 'AND', 'THE', 'SCOTS', 'LAID', 'THEIR', 'PROPOSALS', 'BEFORE', 'THE', 'KING'] +8224-274384-0010-1447: ref=['BEFORE', 'THE', 'SETTLEMENT', 'OF', 'TERMS', 'THE', 'ADMINISTRATION', 'MUST', 'BE', 'POSSESSED', 'ENTIRELY', 'BY', 'THE', 'PARLIAMENTS', 'OF', 'BOTH', 'KINGDOMS', 'AND', 'HOW', 'INCOMPATIBLE', 'THAT', 'SCHEME', 'WITH', 'THE', 'LIBERTY', 'OF', 'THE', 'KING', 'IS', 'EASILY', 'IMAGINED'] +8224-274384-0010-1447: hyp=['BEFORE', 'THE', 'SETTLEMENT', 'OF', 'TERMS', 'THE', 'ADMINISTRATION', 'MUST', 'BE', 'POSSESSED', 'ENTIRELY', 'BY', 'THE', 'PARLIAMENTS', 'OF', 'BOTH', 'KINGDOMS', 'AND', 'HOW', 'INCOMPATIBLE', 'THAT', 'SCHEME', 'WITH', 'THE', 'LIBERTY', 'OF', 'THE', 'KING', 'IS', 'EASILY', 'IMAGINED'] +8224-274384-0011-1448: ref=['THE', 'ENGLISH', 'IT', 'IS', 'EVIDENT', 'HAD', 'THEY', 'NOT', 'BEEN', 'PREVIOUSLY', 'ASSURED', 'OF', 'RECEIVING', 'THE', 'KING', 'WOULD', 'NEVER', 'HAVE', 'PARTED', 'WITH', 'SO', 'CONSIDERABLE', 'A', 'SUM', 'AND', 'WHILE', 'THEY', 'WEAKENED', 'THEMSELVES', 'BY', 'THE', 'SAME', 'MEASURE', 'HAVE', 'STRENGTHENED', 'A', 'PEOPLE', 'WITH', 'WHOM', 'THEY', 'MUST', 'AFTERWARDS', 'HAVE', 'SO', 'MATERIAL', 'AN', 'INTEREST', 'TO', 'DISCUSS'] +8224-274384-0011-1448: hyp=['THE', 'ENGLISH', 'IT', 'IS', 'EVIDENT', 'HAD', 'THEY', 'NOT', 'BEEN', 'PREVIOUSLY', 'ASSURED', 'OF', 'RECEIVING', 'THE', 'KING', 'WOULD', 'NEVER', 'HAVE', 'PARTED', 'WITH', 'SO', 'CONSIDERABLE', 'A', 'SUM', 'AND', 'WHILE', 'THEY', 'WEAKENED', 'THEMSELVES', 'BY', 'THE', 'SAME', 'MEASURE', 'HAVE', 'STRENGTHENED', 'A', 'PEOPLE', 'WITH', 'WHOM', 'THEY', 'MUST', 'AFTERWARDS', 'HAVE', 'SO', 'MATERIAL', 'AN', 'INTEREST', 'TO', 'DISCUSS'] +8224-274384-0012-1449: ref=['IF', 'ANY', 'STILL', 'RETAINED', 'RANCOR', 'AGAINST', 'HIM', 'IN', 'HIS', 'PRESENT', 'CONDITION', 'THEY', 'PASSED', 'IN', 'SILENCE', 'WHILE', 'HIS', 'WELL', 'WISHERS', 'MORE', 'GENEROUS', 'THAN', 'PRUDENT', 'ACCOMPANIED', 'HIS', 'MARCH', 'WITH', 'TEARS', 'WITH', 'ACCLAMATIONS', 'AND', 'WITH', 'PRAYERS', 'FOR', 'HIS', 'SAFETY'] +8224-274384-0012-1449: hyp=['IF', 'ANY', 'STILL', 'RETAINED', 'RANCOR', 'AGAINST', 'HIM', 'IN', 'HIS', 'PRESENT', 'CONDITION', 'THEY', 'PASSED', 'IN', 'SILENCE', 'WHILE', 'HIS', 'WELL', 'WISHERS', 'MORE', 'GENEROUS', 'THAN', 'PRUDENT', 'ACCOMPANIED', 'HIS', 'MARCH', 'WITH', 'TEARS', 'WITH', 'ACCLAMATIONS', 'AND', 'WITH', 'PRAYERS', 'FOR', 'HIS', 'SAFETY'] +8224-274384-0013-1450: ref=['HIS', 'DEATH', 'IN', 'THIS', 'CONJUNCTURE', 'WAS', 'A', 'PUBLIC', 'MISFORTUNE'] +8224-274384-0013-1450: hyp=['HIS', 'DEATH', 'IN', 'THIS', 'CONJUNCTURE', 'WAS', 'A', 'PUBLIC', 'MISFORTUNE'] +8230-279154-0000-617: ref=['THE', 'ANALYSIS', 'OF', 'KNOWLEDGE', 'WILL', 'OCCUPY', 'US', 'UNTIL', 'THE', 'END', 'OF', 'THE', 'THIRTEENTH', 'LECTURE', 'AND', 'IS', 'THE', 'MOST', 'DIFFICULT', 'PART', 'OF', 'OUR', 'WHOLE', 'ENTERPRISE'] +8230-279154-0000-617: hyp=['THE', 'ANALYSIS', 'OF', 'KNOWLEDGE', 'WILL', 'OCCUPY', 'US', 'UNTIL', 'THE', 'END', 'OF', 'THE', 'THIRTEENTH', 'LECTURE', 'AND', 'IS', 'THE', 'MOST', 'DIFFICULT', 'PART', 'OF', 'OUR', 'WHOLE', 'ENTERPRISE'] +8230-279154-0001-618: ref=['WHAT', 'IS', 'CALLED', 'PERCEPTION', 'DIFFERS', 'FROM', 'SENSATION', 'BY', 'THE', 'FACT', 'THAT', 'THE', 'SENSATIONAL', 'INGREDIENTS', 'BRING', 'UP', 'HABITUAL', 'ASSOCIATES', 'IMAGES', 'AND', 'EXPECTATIONS', 'OF', 'THEIR', 'USUAL', 'CORRELATES', 'ALL', 'OF', 'WHICH', 'ARE', 'SUBJECTIVELY', 'INDISTINGUISHABLE', 'FROM', 'THE', 'SENSATION'] +8230-279154-0001-618: hyp=['WHAT', 'IS', 'CALLED', 'PERCEPTION', 'DIFFERS', 'FROM', 'SENSATION', 'BY', 'THE', 'FACT', 'THAT', 'THE', 'SENSATIONAL', 'INGREDIENTS', 'BRING', 'UP', 'HABITUAL', 'ASSOCIATES', 'IMAGES', 'AND', 'EXPECTATIONS', 'OF', 'THEIR', 'USUAL', 'COROLLETS', 'ALL', 'OF', 'WHICH', 'ARE', 'SUBJECTIVELY', 'INDISTINGUISHABLE', 'FROM', 'THE', 'SENSATION'] +8230-279154-0002-619: ref=['WHETHER', 'OR', 'NOT', 'THIS', 'PRINCIPLE', 'IS', 'LIABLE', 'TO', 'EXCEPTIONS', 'EVERYONE', 'WOULD', 'AGREE', 'THAT', 'IS', 'HAS', 'A', 'BROAD', 'MEASURE', 'OF', 'TRUTH', 'THOUGH', 'THE', 'WORD', 'EXACTLY', 'MIGHT', 'SEEM', 'AN', 'OVERSTATEMENT', 'AND', 'IT', 'MIGHT', 'SEEM', 'MORE', 'CORRECT', 'TO', 'SAY', 'THAT', 'IDEAS', 'APPROXIMATELY', 'REPRESENT', 'IMPRESSIONS'] +8230-279154-0002-619: hyp=['WHETHER', 'OR', 'NOT', 'THIS', 'PRINCIPLE', 'IS', 'LIABLE', 'TO', 'EXCEPTIONS', 'EVERY', 'ONE', 'WOULD', 'AGREE', 'THAT', 'IT', 'HAS', 'A', 'BROAD', 'MEASURE', 'OF', 'TRUTH', 'THOUGH', 'THE', 'WORD', 'EXACTLY', 'MIGHT', 'SEEM', 'AN', 'OVERSTATEMENT', 'AND', 'IT', 'MIGHT', 'SEEM', 'MORE', 'CORRECT', 'TO', 'SAY', 'THAT', 'IDEAS', 'APPROXIMATELY', 'REPRESENT', 'IMPRESSIONS'] +8230-279154-0003-620: ref=['AND', 'WHAT', 'SORT', 'OF', 'EVIDENCE', 'IS', 'LOGICALLY', 'POSSIBLE'] +8230-279154-0003-620: hyp=['AND', 'WHAT', 'SORT', 'OF', 'EVIDENCE', 'IS', 'LOGICALLY', 'POSSIBLE'] +8230-279154-0004-621: ref=['THERE', 'IS', 'NO', 'LOGICAL', 'IMPOSSIBILITY', 'IN', 'THE', 'HYPOTHESIS', 'THAT', 'THE', 'WORLD', 'SPRANG', 'INTO', 'BEING', 'FIVE', 'MINUTES', 'AGO', 'EXACTLY', 'AS', 'IT', 'THEN', 'WAS', 'WITH', 'A', 'POPULATION', 'THAT', 'REMEMBERED', 'A', 'WHOLLY', 'UNREAL', 'PAST'] +8230-279154-0004-621: hyp=['THERE', 'IS', 'NO', 'LOGICAL', 'IMPOSSIBILITY', 'IN', 'THE', 'HYPOTHESIS', 'THAT', 'THE', 'WORLD', 'SPRANG', 'INTO', 'BEING', 'FIVE', 'MINUTES', 'AGO', 'EXACTLY', 'AS', 'IT', 'THEN', 'WAS', 'WITH', 'THE', 'POPULATION', 'THAT', 'REMEMBERED', 'A', 'WHOLLY', 'UNREAL', 'PAST'] +8230-279154-0005-622: ref=['ALL', 'THAT', 'I', 'AM', 'DOING', 'IS', 'TO', 'USE', 'ITS', 'LOGICAL', 'TENABILITY', 'AS', 'A', 'HELP', 'IN', 'THE', 'ANALYSIS', 'OF', 'WHAT', 'OCCURS', 'WHEN', 'WE', 'REMEMBER'] +8230-279154-0005-622: hyp=['ALL', 'THAT', 'I', 'AM', 'DOING', 'IS', 'TO', 'USE', 'ITS', 'LOGICAL', 'TENABILITY', 'AS', 'A', 'HELP', 'IN', 'THE', 'ANALYSIS', 'OF', 'WHAT', 'OCCURS', 'WHEN', 'WE', 'REMEMBER'] +8230-279154-0006-623: ref=['THE', 'BEHAVIOURIST', 'WHO', 'ATTEMPTS', 'TO', 'MAKE', 'PSYCHOLOGY', 'A', 'RECORD', 'OF', 'BEHAVIOUR', 'HAS', 'TO', 'TRUST', 'HIS', 'MEMORY', 'IN', 'MAKING', 'THE', 'RECORD'] +8230-279154-0006-623: hyp=['THE', 'BEHAVIOURIST', 'WHO', 'ATTEMPTS', 'TO', 'MAKE', 'PSYCHOLOGY', 'A', 'RECORD', 'OF', 'BEHAVIOR', 'HAS', 'TO', 'TRUST', 'HIS', 'MEMORY', 'IN', 'MAKING', 'THE', 'RECORD'] +8230-279154-0007-624: ref=['HABIT', 'IS', 'A', 'CONCEPT', 'INVOLVING', 'THE', 'OCCURRENCE', 'OF', 'SIMILAR', 'EVENTS', 'AT', 'DIFFERENT', 'TIMES', 'IF', 'THE', 'BEHAVIOURIST', 'FEELS', 'CONFIDENT', 'THAT', 'THERE', 'IS', 'SUCH', 'A', 'PHENOMENON', 'AS', 'HABIT', 'THAT', 'CAN', 'ONLY', 'BE', 'BECAUSE', 'HE', 'TRUSTS', 'HIS', 'MEMORY', 'WHEN', 'IT', 'ASSURES', 'HIM', 'THAT', 'THERE', 'HAVE', 'BEEN', 'OTHER', 'TIMES'] +8230-279154-0007-624: hyp=['HABIT', 'IS', 'A', 'CONCEPT', 'INVOLVING', 'THE', 'OCCURRENCE', 'OF', 'SIMILAR', 'EVENTS', 'AT', 'DIFFERENT', 'TIMES', 'IF', 'THE', 'BEHAVIOURIST', 'FILLS', 'CONFIDENT', 'THAT', 'THERE', 'IS', 'SUCH', 'A', 'PHENOMENON', 'AS', 'HABIT', 'THAT', 'CAN', 'ONLY', 'BE', 'BECAUSE', 'HE', 'TRUSTS', 'HIS', 'MEMORY', 'WHEN', 'IT', 'ASSURES', 'HIM', 'THAT', 'THERE', 'HAVE', 'BEEN', 'OTHER', 'TIMES'] +8230-279154-0008-625: ref=['BUT', 'I', 'DO', 'NOT', 'THINK', 'SUCH', 'AN', 'INFERENCE', 'IS', 'WARRANTED'] +8230-279154-0008-625: hyp=['BUT', 'I', 'DO', 'NOT', 'THINK', 'SUCH', 'AN', 'INFERENCE', 'IS', 'WARRANTED'] +8230-279154-0009-626: ref=['OUR', 'CONFIDENCE', 'OR', 'LACK', 'OF', 'CONFIDENCE', 'IN', 'THE', 'ACCURACY', 'OF', 'A', 'MEMORY', 'IMAGE', 'MUST', 'IN', 'FUNDAMENTAL', 'CASES', 'BE', 'BASED', 'UPON', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'IMAGE', 'ITSELF', 'SINCE', 'WE', 'CANNOT', 'EVOKE', 'THE', 'PAST', 'BODILY', 'AND', 'COMPARE', 'IT', 'WITH', 'THE', 'PRESENT', 'IMAGE'] +8230-279154-0009-626: hyp=['OUR', 'CONFIDENCE', 'OR', 'LACK', 'OF', 'CONFIDENCE', 'IN', 'THE', 'ACCURACY', 'OF', 'A', 'MEMORY', 'IMAGE', 'MUST', 'IN', 'FUNDAMENTAL', 'CASES', 'BE', 'BASED', 'UPON', 'A', 'CHARACTERISTIC', 'OF', 'THE', 'IMAGE', 'ITSELF', 'SINCE', 'WE', 'CANNOT', 'EVOKE', 'THE', 'PAST', 'BODILY', 'AND', 'COMPARE', 'IT', 'WITH', 'THE', 'PRESENT', 'IMAGE'] +8230-279154-0010-627: ref=['WE', 'SOMETIMES', 'HAVE', 'IMAGES', 'THAT', 'ARE', 'BY', 'NO', 'MEANS', 'PECULIARLY', 'VAGUE', 'WHICH', 'YET', 'WE', 'DO', 'NOT', 'TRUST', 'FOR', 'EXAMPLE', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'FATIGUE', 'WE', 'MAY', 'SEE', 'A', "FRIEND'S", 'FACE', 'VIVIDLY', 'AND', 'CLEARLY', 'BUT', 'HORRIBLY', 'DISTORTED'] +8230-279154-0010-627: hyp=['WE', 'SOMETIMES', 'HAVE', 'IMAGES', 'THAT', 'ARE', 'BY', 'NO', 'MEANS', 'PECULIARLY', 'VAGUE', 'WHICH', 'YET', 'WE', 'DO', 'NOT', 'TRUST', 'FOR', 'EXAMPLE', 'UNDER', 'THE', 'INFLUENCE', 'OF', 'FATIGUE', 'WE', 'MAY', 'SEE', 'A', "FRIEND'S", 'FACE', 'VIVIDLY', 'AND', 'CLEARLY', 'BUT', 'HORRIBLY', 'DISTORTED'] +8230-279154-0011-628: ref=['SOME', 'IMAGES', 'LIKE', 'SOME', 'SENSATIONS', 'FEEL', 'VERY', 'FAMILIAR', 'WHILE', 'OTHERS', 'FEEL', 'STRANGE'] +8230-279154-0011-628: hyp=['SOME', 'IMAGES', 'LIKE', 'SOME', 'SENSATIONS', 'FEEL', 'VERY', 'FAMILIAR', 'WHILE', 'OTHERS', 'FEEL', 'STRANGE'] +8230-279154-0012-629: ref=['FAMILIARITY', 'IS', 'A', 'FEELING', 'CAPABLE', 'OF', 'DEGREES'] +8230-279154-0012-629: hyp=['FAMILIARITY', 'IS', 'A', 'FILLING', 'CAPABLE', 'OF', 'DEGREES'] +8230-279154-0013-630: ref=['IN', 'AN', 'IMAGE', 'OF', 'A', 'WELL', 'KNOWN', 'FACE', 'FOR', 'EXAMPLE', 'SOME', 'PARTS', 'MAY', 'FEEL', 'MORE', 'FAMILIAR', 'THAN', 'OTHERS', 'WHEN', 'THIS', 'HAPPENS', 'WE', 'HAVE', 'MORE', 'BELIEF', 'IN', 'THE', 'ACCURACY', 'OF', 'THE', 'FAMILIAR', 'PARTS', 'THAN', 'IN', 'THAT', 'OF', 'THE', 'UNFAMILIAR', 'PARTS'] +8230-279154-0013-630: hyp=['IN', 'AN', 'IMAGE', 'OF', 'A', 'WELL', 'KNOWN', 'FACE', 'FOR', 'EXAMPLE', 'SOME', 'PARTS', 'MAY', 'FEEL', 'MORE', 'FAMILIAR', 'THAN', 'OTHERS', 'WHEN', 'THIS', 'HAPPENS', 'WE', 'HAVE', 'MORE', 'BELIEF', 'IN', 'THE', 'ACCURACY', 'OF', 'THE', 'FAMILIAR', 'PARTS', 'THAN', 'IN', 'THAT', 'OF', 'THE', 'UNFAMILIAR', 'PARTS'] +8230-279154-0014-631: ref=['I', 'COME', 'NOW', 'TO', 'THE', 'OTHER', 'CHARACTERISTIC', 'WHICH', 'MEMORY', 'IMAGES', 'MUST', 'HAVE', 'IN', 'ORDER', 'TO', 'ACCOUNT', 'FOR', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0014-631: hyp=['I', 'COME', 'NOW', 'TO', 'THE', 'OTHER', 'CHARACTERISTIC', 'WHICH', 'MEMORY', 'IMAGES', 'MUST', 'HAVE', 'IN', 'ORDER', 'TO', 'ACCOUNT', 'FOR', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0015-632: ref=['THEY', 'MUST', 'HAVE', 'SOME', 'CHARACTERISTIC', 'WHICH', 'MAKES', 'US', 'REGARD', 'THEM', 'AS', 'REFERRING', 'TO', 'MORE', 'OR', 'LESS', 'REMOTE', 'PORTIONS', 'OF', 'THE', 'PAST'] +8230-279154-0015-632: hyp=['THEY', 'MUST', 'HAVE', 'SOME', 'CHARACTERISTIC', 'WHICH', 'MAKES', 'US', 'REGARD', 'THEM', 'AS', 'REFERRING', 'TO', 'MORE', 'OR', 'LESS', 'REMOTE', 'PORTIONS', 'OF', 'THE', 'PAST'] +8230-279154-0016-633: ref=['IN', 'ACTUAL', 'FACT', 'THERE', 'ARE', 'DOUBTLESS', 'VARIOUS', 'FACTORS', 'THAT', 'CONCUR', 'IN', 'GIVING', 'US', 'THE', 'FEELING', 'OF', 'GREATER', 'OR', 'LESS', 'REMOTENESS', 'IN', 'SOME', 'REMEMBERED', 'EVENT'] +8230-279154-0016-633: hyp=['IN', 'ACTUAL', 'FACT', 'THERE', 'ARE', 'DOUBTLESS', 'VARIOUS', 'FACTORS', 'THAT', 'CONCUR', 'IN', 'GIVING', 'US', 'THE', 'FEELING', 'OF', 'GREATER', 'OR', 'LESS', 'REMOTENESS', 'IN', 'SOME', 'REMEMBERED', 'EVENT'] +8230-279154-0017-634: ref=['THERE', 'MAY', 'BE', 'A', 'SPECIFIC', 'FEELING', 'WHICH', 'COULD', 'BE', 'CALLED', 'THE', 'FEELING', 'OF', 'PASTNESS', 'ESPECIALLY', 'WHERE', 'IMMEDIATE', 'MEMORY', 'IS', 'CONCERNED'] +8230-279154-0017-634: hyp=['THERE', 'MAY', 'BE', 'A', 'SPECIFIC', 'FEELING', 'WHICH', 'COULD', 'BE', 'CALLED', 'THE', 'FILLING', 'OF', 'PASTNESS', 'ESPECIALLY', 'WHERE', 'IMMEDIATE', 'MEMORY', 'IS', 'CONCERNED'] +8230-279154-0018-635: ref=['THERE', 'IS', 'OF', 'COURSE', 'A', 'DIFFERENCE', 'BETWEEN', 'KNOWING', 'THE', 'TEMPORAL', 'RELATION', 'OF', 'A', 'REMEMBERED', 'EVENT', 'TO', 'THE', 'PRESENT', 'AND', 'KNOWING', 'THE', 'TIME', 'ORDER', 'OF', 'TWO', 'REMEMBERED', 'EVENTS'] +8230-279154-0018-635: hyp=['THERE', 'IS', 'OF', 'COURSE', 'A', 'DIFFERENCE', 'BETWEEN', 'KNOWING', 'THE', 'TEMPORAL', 'RELATION', 'OF', 'A', 'REMEMBERED', 'EVENT', 'TO', 'THE', 'PRESENT', 'AND', 'KNOWING', 'THE', 'TIME', 'ORDER', 'OF', 'TWO', 'REMEMBERED', 'EVENTS'] +8230-279154-0019-636: ref=['IT', 'WOULD', 'SEEM', 'THAT', 'ONLY', 'RATHER', 'RECENT', 'EVENTS', 'CAN', 'BE', 'PLACED', 'AT', 'ALL', 'ACCURATELY', 'BY', 'MEANS', 'OF', 'FEELINGS', 'GIVING', 'THEIR', 'TEMPORAL', 'RELATION', 'TO', 'THE', 'PRESENT', 'BUT', 'IT', 'IS', 'CLEAR', 'THAT', 'SUCH', 'FEELINGS', 'MUST', 'PLAY', 'AN', 'ESSENTIAL', 'PART', 'IN', 'THE', 'PROCESS', 'OF', 'DATING', 'REMEMBERED', 'EVENTS'] +8230-279154-0019-636: hyp=['IT', 'WOULD', 'SEEM', 'THAT', 'ONLY', 'RATHER', 'RECENT', 'EVENTS', 'CAN', 'BE', 'PLACED', 'AT', 'ALL', 'ACCURATELY', 'BY', 'MEANS', 'OF', 'FEELINGS', 'GIVING', 'THEIR', 'TEMPORAL', 'RELATION', 'TO', 'THE', 'PRESENT', 'BUT', 'IT', 'IS', 'CLEAR', 'THAT', 'SUCH', 'FEELINGS', 'MUST', 'PLAY', 'AN', 'ESSENTIAL', 'PART', 'IN', 'THE', 'PROCESS', 'OF', 'DATING', 'REMEMBERED', 'EVENTS'] +8230-279154-0020-637: ref=['IF', 'WE', 'HAD', 'RETAINED', 'THE', 'SUBJECT', 'OR', 'ACT', 'IN', 'KNOWLEDGE', 'THE', 'WHOLE', 'PROBLEM', 'OF', 'MEMORY', 'WOULD', 'HAVE', 'BEEN', 'COMPARATIVELY', 'SIMPLE'] +8230-279154-0020-637: hyp=['IF', 'WE', 'HAD', 'RETAINED', 'THE', 'SUBJECT', 'OR', 'ACT', 'IN', 'KNOWLEDGE', 'THE', 'WHOLE', 'PROBLEM', 'OF', 'MEMORY', 'WOULD', 'HAVE', 'BEEN', 'COMPARATIVELY', 'SIMPLE'] +8230-279154-0021-638: ref=['REMEMBERING', 'HAS', 'TO', 'BE', 'A', 'PRESENT', 'OCCURRENCE', 'IN', 'SOME', 'WAY', 'RESEMBLING', 'OR', 'RELATED', 'TO', 'WHAT', 'IS', 'REMEMBERED'] +8230-279154-0021-638: hyp=['REMEMBERING', 'HAS', 'TO', 'BE', 'A', 'PRESENT', 'OCCURRENCE', 'IN', 'SOME', 'WAY', 'RESEMBLING', 'OR', 'RELATED', 'TO', 'WHAT', 'IS', 'REMEMBERED'] +8230-279154-0022-639: ref=['SOME', 'POINTS', 'MAY', 'BE', 'TAKEN', 'AS', 'FIXED', 'AND', 'SUCH', 'AS', 'ANY', 'THEORY', 'OF', 'MEMORY', 'MUST', 'ARRIVE', 'AT'] +8230-279154-0022-639: hyp=['SOME', 'POINTS', 'MAY', 'BE', 'TAKEN', 'AS', 'FIXED', 'AND', 'SUCH', 'AS', 'ANY', 'THEORY', 'OF', 'MEMORY', 'MUST', 'ARRIVE', 'AT'] +8230-279154-0023-640: ref=['IN', 'THIS', 'CASE', 'AS', 'IN', 'MOST', 'OTHERS', 'WHAT', 'MAY', 'BE', 'TAKEN', 'AS', 'CERTAIN', 'IN', 'ADVANCE', 'IS', 'RATHER', 'VAGUE'] +8230-279154-0023-640: hyp=['IN', 'THIS', 'CASE', 'AS', 'IN', 'MOST', 'OTHERS', 'WHAT', 'MAY', 'BE', 'TAKEN', 'AS', 'CERTAIN', 'IN', 'ADVANCE', 'IS', 'RATHER', 'VAGUE'] +8230-279154-0024-641: ref=['THE', 'FIRST', 'OF', 'OUR', 'VAGUE', 'BUT', 'INDUBITABLE', 'DATA', 'IS', 'THAT', 'THERE', 'IS', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0024-641: hyp=['THE', 'FIRST', 'OF', 'OUR', 'VAGUE', 'BUT', 'INDUBITABLE', 'DATA', 'IS', 'THAT', 'THERE', 'IS', 'KNOWLEDGE', 'OF', 'THE', 'PAST'] +8230-279154-0025-642: ref=['WE', 'MIGHT', 'PROVISIONALLY', 'THOUGH', 'PERHAPS', 'NOT', 'QUITE', 'CORRECTLY', 'DEFINE', 'MEMORY', 'AS', 'THAT', 'WAY', 'OF', 'KNOWING', 'ABOUT', 'THE', 'PAST', 'WHICH', 'HAS', 'NO', 'ANALOGUE', 'IN', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'FUTURE', 'SUCH', 'A', 'DEFINITION', 'WOULD', 'AT', 'LEAST', 'SERVE', 'TO', 'MARK', 'THE', 'PROBLEM', 'WITH', 'WHICH', 'WE', 'ARE', 'CONCERNED', 'THOUGH', 'SOME', 'EXPECTATIONS', 'MAY', 'DESERVE', 'TO', 'RANK', 'WITH', 'MEMORY', 'AS', 'REGARDS', 'IMMEDIACY'] +8230-279154-0025-642: hyp=['WE', 'MIGHT', 'PROVISIONALLY', 'THOUGH', 'PERHAPS', 'NOT', 'QUITE', 'CORRECTLY', 'DEFINE', 'MEMORY', 'AS', 'THAT', 'WAY', 'OF', 'KNOWING', 'ABOUT', 'THE', 'PAST', 'WHICH', 'HAS', 'NO', 'ANALOGUE', 'IN', 'OUR', 'KNOWLEDGE', 'OF', 'THE', 'FUTURE', 'SUCH', 'A', 'DEFINITION', 'WOULD', 'AT', 'LEAST', 'SERVE', 'TO', 'MARK', 'THE', 'PROBLEM', 'WITH', 'WHICH', 'WE', 'ARE', 'CONCERNED', 'THOUGH', 'SOME', 'EXPECTATIONS', 'MAY', 'DESERVE', 'TO', 'RANK', 'WITH', 'MEMORY', 'AS', 'REGARDS', 'IMMEDIACY'] +8230-279154-0026-643: ref=['THIS', 'DISTINCTION', 'IS', 'VITAL', 'TO', 'THE', 'UNDERSTANDING', 'OF', 'MEMORY', 'BUT', 'IT', 'IS', 'NOT', 'SO', 'EASY', 'TO', 'CARRY', 'OUT', 'IN', 'PRACTICE', 'AS', 'IT', 'IS', 'TO', 'DRAW', 'IN', 'THEORY'] +8230-279154-0026-643: hyp=['THIS', 'DISTINCTION', 'IS', 'VITAL', 'TO', 'THE', 'UNDERSTANDING', 'OF', 'MEMORY', 'BUT', 'IT', 'IS', 'NOT', 'SO', 'EASY', 'TO', 'CARRY', 'OUT', 'IN', 'PRACTICE', 'AS', 'IT', 'IS', 'TO', 'DRAW', 'IN', 'THEORY'] +8230-279154-0027-644: ref=['A', 'GRAMOPHONE', 'BY', 'THE', 'HELP', 'OF', 'SUITABLE', 'RECORDS', 'MIGHT', 'RELATE', 'TO', 'US', 'THE', 'INCIDENTS', 'OF', 'ITS', 'PAST', 'AND', 'PEOPLE', 'ARE', 'NOT', 'SO', 'DIFFERENT', 'FROM', 'GRAMOPHONES', 'AS', 'THEY', 'LIKE', 'TO', 'BELIEVE'] +8230-279154-0027-644: hyp=['A', 'GRAMMAPHONE', 'BY', 'THE', 'HELP', 'OF', 'SUITABLE', 'RECORDS', 'MIGHT', 'RELATE', 'TO', 'US', 'THE', 'INCIDENTS', 'OF', 'ITS', 'PAST', 'AND', 'PEOPLE', 'ARE', 'NOT', 'SO', 'DIFFERENT', 'FROM', 'GRAMOPHONES', 'AS', 'THEY', 'LIKE', 'TO', 'BELIEVE'] +8230-279154-0028-645: ref=['I', 'CAN', 'SET', 'TO', 'WORK', 'NOW', 'TO', 'REMEMBER', 'THINGS', 'I', 'NEVER', 'REMEMBERED', 'BEFORE', 'SUCH', 'AS', 'WHAT', 'I', 'HAD', 'TO', 'EAT', 'FOR', 'BREAKFAST', 'THIS', 'MORNING', 'AND', 'IT', 'CAN', 'HARDLY', 'BE', 'WHOLLY', 'HABIT', 'THAT', 'ENABLES', 'ME', 'TO', 'DO', 'THIS'] +8230-279154-0028-645: hyp=['I', 'CAN', 'SET', 'TO', 'WORK', 'NOW', 'TO', 'REMEMBER', 'THINGS', 'I', 'NEVER', 'REMEMBERED', 'BEFORE', 'SUCH', 'AS', 'WHAT', 'I', 'HAD', 'TO', 'EAT', 'FOR', 'BREAKFAST', 'THIS', 'MORNING', 'AND', 'IT', 'CAN', 'HARDLY', 'BE', 'WHOLLY', 'HABIT', 'THAT', 'ENABLES', 'ME', 'TO', 'DO', 'THIS'] +8230-279154-0029-646: ref=['THE', 'FACT', 'THAT', 'A', 'MAN', 'CAN', 'RECITE', 'A', 'POEM', 'DOES', 'NOT', 'SHOW', 'THAT', 'HE', 'REMEMBERS', 'ANY', 'PREVIOUS', 'OCCASION', 'ON', 'WHICH', 'HE', 'HAS', 'RECITED', 'OR', 'READ', 'IT'] +8230-279154-0029-646: hyp=['THE', 'FACT', 'THAT', 'A', 'MAN', 'CAN', 'RECITE', 'A', 'POEM', 'DOES', 'NOT', 'SHOW', 'THAT', 'HE', 'REMEMBERS', 'ANY', 'PREVIOUS', 'OCCASION', 'ON', 'WHICH', 'HE', 'HAS', 'RECITED', 'OR', 'READ', 'IT'] +8230-279154-0030-647: ref=["SEMON'S", 'TWO', 'BOOKS', 'MENTIONED', 'IN', 'AN', 'EARLIER', 'LECTURE', 'DO', 'NOT', 'TOUCH', 'KNOWLEDGE', 'MEMORY', 'AT', 'ALL', 'CLOSELY'] +8230-279154-0030-647: hyp=['SIMMONS', 'TWO', 'BOOKS', 'MENTIONED', 'IN', 'AN', 'EARLIER', 'LECTURE', 'DO', 'NOT', 'TOUCH', 'KNOWLEDGE', 'MEMORY', 'AT', 'ALL', 'CLOSELY'] +8230-279154-0031-648: ref=['THEY', 'GIVE', 'LAWS', 'ACCORDING', 'TO', 'WHICH', 'IMAGES', 'OF', 'PAST', 'OCCURRENCES', 'COME', 'INTO', 'OUR', 'MINDS', 'BUT', 'DO', 'NOT', 'DISCUSS', 'OUR', 'BELIEF', 'THAT', 'THESE', 'IMAGES', 'REFER', 'TO', 'PAST', 'OCCURRENCES', 'WHICH', 'IS', 'WHAT', 'CONSTITUTES', 'KNOWLEDGE', 'MEMORY'] +8230-279154-0031-648: hyp=['THEY', 'GIVE', 'LAWS', 'ACCORDING', 'TO', 'WHICH', 'IMAGES', 'OF', 'PAST', 'OCCURRENCES', 'COME', 'INTO', 'OUR', 'MINDS', 'BUT', 'DO', 'NOT', 'DISCUSS', 'OUR', 'BELIEF', 'THAT', 'THESE', 'IMAGES', 'REFER', 'TO', 'PAST', 'OCCURRENCES', 'WHICH', 'IS', 'WHAT', 'CONSTITUTES', 'KNOWLEDGE', 'MEMORY'] +8230-279154-0032-649: ref=['IT', 'IS', 'THIS', 'THAT', 'IS', 'OF', 'INTEREST', 'TO', 'THEORY', 'OF', 'KNOWLEDGE'] +8230-279154-0032-649: hyp=['IT', 'IS', 'THIS', 'THAT', 'IS', 'OF', 'INTEREST', 'TO', 'THEORY', 'OF', 'KNOWLEDGE'] +8230-279154-0033-650: ref=['IT', 'IS', 'BY', 'NO', 'MEANS', 'ALWAYS', 'RELIABLE', 'ALMOST', 'EVERYBODY', 'HAS', 'AT', 'SOME', 'TIME', 'EXPERIENCED', 'THE', 'WELL', 'KNOWN', 'ILLUSION', 'THAT', 'ALL', 'THAT', 'IS', 'HAPPENING', 'NOW', 'HAPPENED', 'BEFORE', 'AT', 'SOME', 'TIME'] +8230-279154-0033-650: hyp=['IT', 'IS', 'BY', 'NO', 'MEANS', 'ALWAYS', 'RELIABLE', 'ALMOST', 'EVERYBODY', 'HAS', 'AT', 'SOME', 'TIME', 'EXPERIENCED', 'THE', 'WELL', 'KNOWN', 'ILLUSION', 'THAT', 'ALL', 'THAT', 'IS', 'HAPPENING', 'NOW', 'HAPPENED', 'BEFORE', 'AT', 'SOME', 'TIME'] +8230-279154-0034-651: ref=['WHENEVER', 'THE', 'SENSE', 'OF', 'FAMILIARITY', 'OCCURS', 'WITHOUT', 'A', 'DEFINITE', 'OBJECT', 'IT', 'LEADS', 'US', 'TO', 'SEARCH', 'THE', 'ENVIRONMENT', 'UNTIL', 'WE', 'ARE', 'SATISFIED', 'THAT', 'WE', 'HAVE', 'FOUND', 'THE', 'APPROPRIATE', 'OBJECT', 'WHICH', 'LEADS', 'US', 'TO', 'THE', 'JUDGMENT', 'THIS', 'IS', 'FAMILIAR'] +8230-279154-0034-651: hyp=['WHENEVER', 'THE', 'SENSE', 'OF', 'FAMILIARITY', 'OCCURS', 'WITHOUT', 'A', 'DEFINITE', 'OBJECT', 'IT', 'LEAVES', 'US', 'TO', 'SEARCH', 'THE', 'ENVIRONMENT', 'UNTIL', 'WE', 'ARE', 'SATISFIED', 'THAT', 'WE', 'HAVE', 'FOUND', 'THE', 'APPROPRIATE', 'OBJECT', 'WHICH', 'LEADS', 'US', 'TO', 'THE', 'JUDGMENT', 'THIS', 'IS', 'FAMILIAR'] +8230-279154-0035-652: ref=['THUS', 'NO', 'KNOWLEDGE', 'AS', 'TO', 'THE', 'PAST', 'IS', 'TO', 'BE', 'DERIVED', 'FROM', 'THE', 'FEELING', 'OF', 'FAMILIARITY', 'ALONE'] +8230-279154-0035-652: hyp=['THUS', 'NO', 'KNOWLEDGE', 'AS', 'TO', 'THE', 'PAST', 'IS', 'TO', 'BE', 'DERIVED', 'FROM', 'THE', 'FEELING', 'OF', 'FAMILIARITY', 'ALONE'] +8230-279154-0036-653: ref=['A', 'FURTHER', 'STAGE', 'IS', 'RECOGNITION'] +8230-279154-0036-653: hyp=['A', 'FURTHER', 'STAGE', 'IS', 'RECOGNITION'] +8230-279154-0037-654: ref=['RECOGNITION', 'IN', 'THIS', 'SENSE', 'DOES', 'NOT', 'NECESSARILY', 'INVOLVE', 'MORE', 'THAN', 'A', 'HABIT', 'OF', 'ASSOCIATION', 'THE', 'KIND', 'OF', 'OBJECT', 'WE', 'ARE', 'SEEING', 'AT', 'THE', 'MOMENT', 'IS', 'ASSOCIATED', 'WITH', 'THE', 'WORD', 'CAT', 'OR', 'WITH', 'AN', 'AUDITORY', 'IMAGE', 'OF', 'PURRING', 'OR', 'WHATEVER', 'OTHER', 'CHARACTERISTIC', 'WE', 'MAY', 'HAPPEN', 'TO', 'RECOGNIZE', 'IN', 'THE', 'CAT', 'OF', 'THE', 'MOMENT'] +8230-279154-0037-654: hyp=['RECOGNITION', 'IN', 'THIS', 'SENSE', 'DOES', 'NOT', 'NECESSARILY', 'INVOLVE', 'MORE', 'THAN', 'A', 'HABIT', 'OF', 'ASSOCIATION', 'THE', 'KIND', 'OF', 'OBJECT', 'WE', 'ARE', 'SEEING', 'AT', 'THE', 'MOMENT', 'IS', 'ASSOCIATED', 'WITH', 'THE', 'WORD', 'CAT', 'OR', 'WITH', 'AN', 'AUDITORY', 'IMAGE', 'OF', 'PURRING', 'OR', 'WHATEVER', 'OTHER', 'CHARACTERISTIC', 'WE', 'MAY', 'HAPPEN', 'TO', 'RECOGNIZE', 'IN', 'THE', 'CAT', 'OF', 'THE', 'MOMENT'] +8230-279154-0038-655: ref=['WE', 'ARE', 'OF', 'COURSE', 'IN', 'FACT', 'ABLE', 'TO', 'JUDGE', 'WHEN', 'WE', 'RECOGNIZE', 'AN', 'OBJECT', 'THAT', 'WE', 'HAVE', 'SEEN', 'IT', 'BEFORE', 'BUT', 'THIS', 'JUDGMENT', 'IS', 'SOMETHING', 'OVER', 'AND', 'ABOVE', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'AND', 'MAY', 'VERY', 'PROBABLY', 'BE', 'IMPOSSIBLE', 'TO', 'ANIMALS', 'THAT', 'NEVERTHELESS', 'HAVE', 'THE', 'EXPERIENCE', 'OF', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'OF', 'THE', 'WORD'] +8230-279154-0038-655: hyp=['WE', 'ARE', 'OF', 'COURSE', 'IN', 'FACT', 'ABLE', 'TO', 'JUDGE', 'WHEN', 'WE', 'RECOGNIZE', 'AN', 'OBJECT', 'THAT', 'WE', 'HAVE', 'SEEN', 'IT', 'BEFORE', 'BUT', 'THIS', 'JUDGMENT', 'IS', 'SOMETHING', 'OVER', 'AND', 'ABOVE', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'AND', 'MAY', 'VERY', 'PROBABLY', 'BE', 'IMPOSSIBLE', 'TO', 'ANIMALS', 'THAT', 'NEVERTHELESS', 'HAVE', 'THE', 'EXPERIENCE', 'OF', 'RECOGNITION', 'IN', 'THIS', 'FIRST', 'SENSE', 'OF', 'THE', 'WORD'] +8230-279154-0039-656: ref=['THIS', 'KNOWLEDGE', 'IS', 'MEMORY', 'IN', 'ONE', 'SENSE', 'THOUGH', 'IN', 'ANOTHER', 'IT', 'IS', 'NOT'] +8230-279154-0039-656: hyp=['THIS', 'KNOWLEDGE', 'IS', 'MEMORY', 'IN', 'ONE', 'SENSE', 'THOUGH', 'IN', 'ANOTHER', 'IT', 'IS', 'NOT'] +8230-279154-0040-657: ref=['THERE', 'ARE', 'HOWEVER', 'SEVERAL', 'POINTS', 'IN', 'WHICH', 'SUCH', 'AN', 'ACCOUNT', 'OF', 'RECOGNITION', 'IS', 'INADEQUATE', 'TO', 'BEGIN', 'WITH', 'IT', 'MIGHT', 'SEEM', 'AT', 'FIRST', 'SIGHT', 'MORE', 'CORRECT', 'TO', 'DEFINE', 'RECOGNITION', 'AS', 'I', 'HAVE', 'SEEN', 'THIS', 'BEFORE', 'THAN', 'AS', 'THIS', 'HAS', 'EXISTED', 'BEFORE'] +8230-279154-0040-657: hyp=['THERE', 'ARE', 'HOWEVER', 'SEVERAL', 'POINTS', 'IN', 'WHICH', 'SUCH', 'AN', 'ACCOUNT', 'OF', 'RECOGNITION', 'IS', 'INADEQUATE', 'TO', 'BEGIN', 'WITH', 'IT', 'MIGHT', 'SEEM', 'AT', 'FIRST', 'SIGHT', 'MORE', 'CORRECT', 'TO', 'DEFINE', 'RECOGNITION', 'AS', 'I', 'HAVE', 'SEEN', 'THIS', 'BEFORE', 'THAN', 'AS', 'THIS', 'HAS', 'EXISTED', 'BEFORE'] +8230-279154-0041-658: ref=['THE', 'DEFINITION', 'OF', 'MY', 'EXPERIENCE', 'IS', 'DIFFICULT', 'BROADLY', 'SPEAKING', 'IT', 'IS', 'EVERYTHING', 'THAT', 'IS', 'CONNECTED', 'WITH', 'WHAT', 'I', 'AM', 'EXPERIENCING', 'NOW', 'BY', 'CERTAIN', 'LINKS', 'OF', 'WHICH', 'THE', 'VARIOUS', 'FORMS', 'OF', 'MEMORY', 'ARE', 'AMONG', 'THE', 'MOST', 'IMPORTANT'] +8230-279154-0041-658: hyp=['THE', 'DEFINITION', 'OF', 'MY', 'EXPERIENCE', 'IS', 'DIFFICULT', 'BROADLY', 'SPEAKING', 'IT', 'IS', 'EVERYTHING', 'THAT', 'IS', 'CONNECTED', 'WITH', 'WHAT', 'I', 'AM', 'EXPERIENCING', 'NOW', 'BY', 'CERTAIN', 'LINKS', 'OF', 'WHICH', 'THE', 'VARIOUS', 'FORMS', 'OF', 'MEMORY', 'ARE', 'AMONG', 'THE', 'MOST', 'IMPORTANT'] +8230-279154-0042-659: ref=['THUS', 'IF', 'I', 'RECOGNIZE', 'A', 'THING', 'THE', 'OCCASION', 'OF', 'ITS', 'PREVIOUS', 'EXISTENCE', 'IN', 'VIRTUE', 'OF', 'WHICH', 'I', 'RECOGNIZE', 'IT', 'FORMS', 'PART', 'OF', 'MY', 'EXPERIENCE', 'BY', 'DEFINITION', 'RECOGNITION', 'WILL', 'BE', 'ONE', 'OF', 'THE', 'MARKS', 'BY', 'WHICH', 'MY', 'EXPERIENCE', 'IS', 'SINGLED', 'OUT', 'FROM', 'THE', 'REST', 'OF', 'THE', 'WORLD'] +8230-279154-0042-659: hyp=['THUS', 'IF', 'I', 'RECOGNIZE', 'A', 'THING', 'THE', 'OCCASION', 'OF', 'ITS', 'PREVIOUS', 'EXISTENCE', 'IN', 'VIRTUE', 'OF', 'WHICH', 'I', 'RECOGNIZE', 'IT', 'FORMS', 'PART', 'OF', 'MY', 'EXPERIENCE', 'BY', 'DEFINITION', 'RECOGNITION', 'WILL', 'BE', 'ONE', 'OF', 'THE', 'MARKS', 'BY', 'WHICH', 'MY', 'EXPERIENCE', 'IS', 'SINGLED', 'OUT', 'FROM', 'THE', 'REST', 'OF', 'THE', 'WORLD'] +8230-279154-0043-660: ref=['OF', 'COURSE', 'THE', 'WORDS', 'THIS', 'HAS', 'EXISTED', 'BEFORE', 'ARE', 'A', 'VERY', 'INADEQUATE', 'TRANSLATION', 'OF', 'WHAT', 'ACTUALLY', 'HAPPENS', 'WHEN', 'WE', 'FORM', 'A', 'JUDGMENT', 'OF', 'RECOGNITION', 'BUT', 'THAT', 'IS', 'UNAVOIDABLE', 'WORDS', 'ARE', 'FRAMED', 'TO', 'EXPRESS', 'A', 'LEVEL', 'OF', 'THOUGHT', 'WHICH', 'IS', 'BY', 'NO', 'MEANS', 'PRIMITIVE', 'AND', 'ARE', 'QUITE', 'INCAPABLE', 'OF', 'EXPRESSING', 'SUCH', 'AN', 'ELEMENTARY', 'OCCURRENCE', 'AS', 'RECOGNITION'] +8230-279154-0043-660: hyp=['OF', 'COURSE', 'THE', 'WORDS', 'THIS', 'HAS', 'EXISTED', 'BEFORE', 'ARE', 'A', 'VERY', 'INADEQUATE', 'TRANSLATION', 'OF', 'WHAT', 'ACTUALLY', 'HAPPENS', 'WHEN', 'WE', 'FORM', 'A', 'JUDGMENT', 'OF', 'RECOGNITION', 'BUT', 'THAT', 'IS', 'UNAVOIDABLE', 'WORDS', 'ARE', 'FRAMED', 'TO', 'EXPRESS', 'A', 'LEVEL', 'OF', 'THOUGHT', 'WHICH', 'IS', 'BY', 'NO', 'MEANS', 'PRIMITIVE', 'AND', 'ARE', 'QUITE', 'INCAPABLE', 'OF', 'EXPRESSING', 'SUCH', 'AN', 'ELEMENTARY', 'OCCURRENCE', 'AS', 'RECOGNITION'] +8455-210777-0000-972: ref=['I', 'REMAINED', 'THERE', 'ALONE', 'FOR', 'MANY', 'HOURS', 'BUT', 'I', 'MUST', 'ACKNOWLEDGE', 'THAT', 'BEFORE', 'I', 'LEFT', 'THE', 'CHAMBERS', 'I', 'HAD', 'GRADUALLY', 'BROUGHT', 'MYSELF', 'TO', 'LOOK', 'AT', 'THE', 'MATTER', 'IN', 'ANOTHER', 'LIGHT'] +8455-210777-0000-972: hyp=['I', 'REMAINED', 'THERE', 'ALONE', 'FOR', 'MANY', 'HOURS', 'BUT', 'I', 'MUST', 'ACKNOWLEDGE', 'THAT', 'BEFORE', 'I', 'LEFT', 'THE', 'CHAMBERS', 'I', 'HAD', 'GRADUALLY', 'BROUGHT', 'MYSELF', 'TO', 'LOOK', 'AT', 'THE', 'MATTER', 'IN', 'ANOTHER', 'LIGHT'] +8455-210777-0001-973: ref=['HAD', 'EVA', 'CRASWELLER', 'NOT', 'BEEN', 'GOOD', 'LOOKING', 'HAD', 'JACK', 'BEEN', 'STILL', 'AT', 'COLLEGE', 'HAD', 'SIR', 'KENNINGTON', 'OVAL', 'REMAINED', 'IN', 'ENGLAND', 'HAD', 'MISTER', 'BUNNIT', 'AND', 'THE', 'BAR', 'KEEPER', 'NOT', 'SUCCEEDED', 'IN', 'STOPPING', 'MY', 'CARRIAGE', 'ON', 'THE', 'HILL', 'SHOULD', 'I', 'HAVE', 'SUCCEEDED', 'IN', 'ARRANGING', 'FOR', 'THE', 'FINAL', 'DEPARTURE', 'OF', 'MY', 'OLD', 'FRIEND'] +8455-210777-0001-973: hyp=['HAD', 'EITHER', 'CRUSWELLER', 'NOT', 'BEEN', 'GOOD', 'LOOKING', 'HAD', 'JACK', 'BEEN', 'STILL', 'AT', 'COLLEGE', 'HAD', 'SIR', 'KENNINGTON', 'OVAL', 'REMAINED', 'IN', 'ENGLAND', 'HAD', 'MISTER', 'BUNNITT', 'IN', 'THE', 'BAR', 'KEEPER', 'NOT', 'SUCCEEDED', 'IN', 'STOPPING', 'MY', 'CARRIAGE', 'ON', 'THE', 'HILL', 'SHOULD', 'I', 'HAVE', 'SUCCEEDED', 'IN', 'ARRANGING', 'FOR', 'THE', 'FINAL', 'DEPARTURE', 'OF', 'MY', 'OLD', 'FRIEND'] +8455-210777-0002-974: ref=['ON', 'ARRIVING', 'AT', 'HOME', 'AT', 'MY', 'OWN', 'RESIDENCE', 'I', 'FOUND', 'THAT', 'OUR', 'SALON', 'WAS', 'FILLED', 'WITH', 'A', 'BRILLIANT', 'COMPANY'] +8455-210777-0002-974: hyp=['ON', 'ARRIVING', 'AT', 'HOME', 'AT', 'MY', 'OWN', 'RESIDENCE', 'I', 'FOUND', 'THAT', 'OUR', 'SALON', 'WAS', 'FILLED', 'WITH', 'A', 'BRILLIANT', 'COMPANY'] +8455-210777-0003-975: ref=['AS', 'I', 'SPOKE', 'I', 'MADE', 'HIM', 'A', 'GRACIOUS', 'BOW', 'AND', 'I', 'THINK', 'I', 'SHOWED', 'HIM', 'BY', 'MY', 'MODE', 'OF', 'ADDRESS', 'THAT', 'I', 'DID', 'NOT', 'BEAR', 'ANY', 'GRUDGE', 'AS', 'TO', 'MY', 'INDIVIDUAL', 'SELF'] +8455-210777-0003-975: hyp=['AS', 'I', 'SPOKE', 'I', 'MADE', 'HIM', 'A', 'GRACIOUS', 'BOW', 'AND', 'I', 'THINK', 'I', 'SHOWED', 'HIM', 'BY', 'MY', 'MODE', 'OF', 'ADDRESS', 'THAT', 'I', 'DID', 'NOT', 'BEAR', 'ANY', 'GRUDGE', 'AS', 'TO', 'MY', 'INDIVIDUAL', 'SELF'] +8455-210777-0004-976: ref=['I', 'HAVE', 'COME', 'TO', 'YOUR', 'SHORES', 'MISTER', 'PRESIDENT', 'WITH', 'THE', 'PURPOSE', 'OF', 'SEEING', 'HOW', 'THINGS', 'ARE', 'PROGRESSING', 'IN', 'THIS', 'DISTANT', 'QUARTER', 'OF', 'THE', 'WORLD'] +8455-210777-0004-976: hyp=['I', 'HAVE', 'COME', 'TO', 'YOUR', 'SHORES', 'MISTER', 'PRESIDENT', 'WITH', 'THE', 'PURPOSE', 'OF', 'SEEING', 'HOW', 'THINGS', 'ARE', 'PROGRESSING', 'IN', 'THIS', 'DISTANT', 'QUARTER', 'OF', 'THE', 'WORLD'] +8455-210777-0005-977: ref=['WE', 'HAVE', 'OUR', 'LITTLE', 'STRUGGLES', 'HERE', 'AS', 'ELSEWHERE', 'AND', 'ALL', 'THINGS', 'CANNOT', 'BE', 'DONE', 'BY', 'ROSE', 'WATER'] +8455-210777-0005-977: hyp=['WE', 'HAVE', 'OUR', 'LITTLE', 'STRUGGLES', 'HERE', 'AS', 'ELSEWHERE', 'AND', 'ALL', 'THINGS', 'CANNOT', 'BE', 'DONE', 'BY', 'ROSE', 'WATER'] +8455-210777-0006-978: ref=['WE', 'ARE', 'QUITE', 'SATISFIED', 'NOW', 'CAPTAIN', 'BATTLEAX', 'SAID', 'MY', 'WIFE'] +8455-210777-0006-978: hyp=['WE', 'ARE', 'QUITE', 'SATISFIED', 'NOW', 'CAPTAIN', 'BATTLE', 'AXE', 'SAID', 'MY', 'WIFE'] +8455-210777-0007-979: ref=['QUITE', 'SATISFIED', 'SAID', 'EVA'] +8455-210777-0007-979: hyp=['QUITE', 'SATISFIED', 'SAID', 'EVA'] +8455-210777-0008-980: ref=['THE', 'LADIES', 'IN', 'COMPLIANCE', 'WITH', 'THAT', 'SOFTNESS', 'OF', 'HEART', 'WHICH', 'IS', 'THEIR', 'CHARACTERISTIC', 'ARE', 'ON', 'ONE', 'SIDE', 'AND', 'THE', 'MEN', 'BY', 'WHOM', 'THE', 'WORLD', 'HAS', 'TO', 'BE', 'MANAGED', 'ARE', 'ON', 'THE', 'OTHER'] +8455-210777-0008-980: hyp=['THE', 'LADIES', 'IN', 'COMPLIANCE', 'WITH', 'THAT', 'SOFTNESS', 'OF', 'HEART', 'WHICH', 'IS', 'THEIR', 'CHARACTERISTIC', 'ARE', 'ON', 'ONE', 'SIDE', 'AND', 'THE', 'MEN', 'BY', 'WHOM', 'THE', 'WORLD', 'HAS', 'TO', 'BE', 'MANAGED', 'ARE', 'ON', 'THE', 'OTHER'] +8455-210777-0009-981: ref=['NO', 'DOUBT', 'IN', 'PROCESS', 'OF', 'TIME', 'THE', 'LADIES', 'WILL', 'FOLLOW'] +8455-210777-0009-981: hyp=['NO', 'DOUBT', 'IN', 'PROCESS', 'OF', 'TIME', 'THE', 'LADIES', 'WILL', 'FOLLOW'] +8455-210777-0010-982: ref=['THEIR', 'MASTERS', 'SAID', 'MISSUS', 'NEVERBEND'] +8455-210777-0010-982: hyp=['THEIR', 'MASTERS', 'SAID', 'MISSUS', 'NEVERBEND'] +8455-210777-0011-983: ref=['I', 'DID', 'NOT', 'MEAN', 'SAID', 'CAPTAIN', 'BATTLEAX', 'TO', 'TOUCH', 'UPON', 'PUBLIC', 'SUBJECTS', 'AT', 'SUCH', 'A', 'MOMENT', 'AS', 'THIS'] +8455-210777-0011-983: hyp=['I', 'DID', 'NOT', 'MEAN', 'SAID', 'CAPTAIN', 'BATTLE', 'AXE', 'TO', 'TOUCH', 'UPON', 'PUBLIC', 'SUBJECTS', 'AT', 'SUCH', 'A', 'MOMENT', 'AS', 'THIS'] +8455-210777-0012-984: ref=['MISSUS', 'NEVERBEND', 'YOU', 'MUST', 'INDEED', 'BE', 'PROUD', 'OF', 'YOUR', 'SON'] +8455-210777-0012-984: hyp=['MISSUS', 'NEVERBEND', 'YOU', 'MUST', 'INDEED', 'BE', 'PROUD', 'OF', 'YOUR', 'SON'] +8455-210777-0013-985: ref=['JACK', 'HAD', 'BEEN', 'STANDING', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'ROOM', 'TALKING', 'TO', 'EVA', 'AND', 'WAS', 'NOW', 'REDUCED', 'TO', 'SILENCE', 'BY', 'HIS', 'PRAISES'] +8455-210777-0013-985: hyp=['JACK', 'HAD', 'BEEN', 'STANDING', 'IN', 'THE', 'FAR', 'CORNER', 'OF', 'THE', 'ROOM', 'TALKING', 'TO', 'EVA', 'AND', 'WAS', 'NOW', 'REDUCED', 'TO', 'SILENCE', 'BY', 'HIS', 'PRAISES'] +8455-210777-0014-986: ref=['SIR', 'KENNINGTON', 'OVAL', 'IS', 'A', 'VERY', 'FINE', 'PLAYER', 'SAID', 'MY', 'WIFE'] +8455-210777-0014-986: hyp=['SIR', 'KENNINGTON', 'OVAL', 'IS', 'A', 'VERY', 'FINE', 'PLAYER', 'SAID', 'MY', 'WIFE'] +8455-210777-0015-987: ref=['I', 'AND', 'MY', 'WIFE', 'AND', 'SON', 'AND', 'THE', 'TWO', 'CRASWELLERS', 'AND', 'THREE', 'OR', 'FOUR', 'OTHERS', 'AGREED', 'TO', 'DINE', 'ON', 'BOARD', 'THE', 'SHIP', 'ON', 'THE', 'NEXT'] +8455-210777-0015-987: hyp=['I', 'AM', 'MY', 'WIFE', 'AND', 'SON', 'AND', 'THE', 'TWO', 'CRESTWELLERS', 'AND', 'THREE', 'OR', 'FOUR', 'OTHERS', 'AGREED', 'TO', 'DINE', 'ON', 'BOARD', 'THE', 'SHIP', 'ON', 'THE', 'NEXT'] +8455-210777-0016-988: ref=['THIS', 'I', 'FELT', 'WAS', 'PAID', 'TO', 'ME', 'AS', 'BEING', 'PRESIDENT', 'OF', 'THE', 'REPUBLIC', 'AND', 'I', 'ENDEAVOURED', 'TO', 'BEHAVE', 'MYSELF', 'WITH', 'SUCH', 'MINGLED', 'HUMILITY', 'AND', 'DIGNITY', 'AS', 'MIGHT', 'BEFIT', 'THE', 'OCCASION', 'BUT', 'I', 'COULD', 'NOT', 'BUT', 'FEEL', 'THAT', 'SOMETHING', 'WAS', 'WANTING', 'TO', 'THE', 'SIMPLICITY', 'OF', 'MY', 'ORDINARY', 'LIFE'] +8455-210777-0016-988: hyp=['THIS', 'I', 'FELT', 'WAS', 'PAID', 'TO', 'ME', 'AS', 'BEING', 'PRESIDENT', 'OF', 'THE', 'REPUBLIC', 'AND', 'I', 'ENDEAVOURED', 'TO', 'BEHAVE', 'MYSELF', 'WITH', 'SUCH', 'MINGLED', 'HUMILITY', 'AND', 'DIGNITY', 'AS', 'MIGHT', 'BE', 'FIT', 'THE', 'OCCASION', 'BUT', 'I', 'COULD', 'NOT', 'BUT', 'FEEL', 'THAT', 'SOMETHING', 'WAS', 'WANTING', 'TO', 'THE', 'SIMPLICITY', 'OF', 'MY', 'ORDINARY', 'LIFE'] +8455-210777-0017-989: ref=['MY', 'WIFE', 'ON', 'THE', 'SPUR', 'OF', 'THE', 'MOMENT', 'MANAGED', 'TO', 'GIVE', 'THE', 'GENTLEMEN', 'A', 'VERY', 'GOOD', 'DINNER'] +8455-210777-0017-989: hyp=['MY', 'WIFE', 'ON', 'THE', 'SPUR', 'OF', 'THE', 'MOMENT', 'MANAGED', 'TO', 'GIVE', 'THE', 'GENTLEMAN', 'A', 'VERY', 'GOOD', 'DINNER'] +8455-210777-0018-990: ref=['THIS', 'SHE', 'SAID', 'WAS', 'TRUE', 'HOSPITALITY', 'AND', 'I', 'AM', 'NOT', 'SURE', 'THAT', 'I', 'DID', 'NOT', 'AGREE', 'WITH', 'HER'] +8455-210777-0018-990: hyp=['THIS', 'SHE', 'SAID', 'WAS', 'TRUE', 'HOSPITALITY', 'AND', 'I', 'AM', 'NOT', 'SURE', 'THAT', 'I', 'DID', 'NOT', 'AGREE', 'WITH', 'THAT'] +8455-210777-0019-991: ref=['THEN', 'THERE', 'WERE', 'THREE', 'OR', 'FOUR', 'LEADING', 'MEN', 'OF', 'THE', 'COMMUNITY', 'WITH', 'THEIR', 'WIVES', 'WHO', 'WERE', 'FOR', 'THE', 'MOST', 'PART', 'THE', 'FATHERS', 'AND', 'MOTHERS', 'OF', 'THE', 'YOUNG', 'LADIES'] +8455-210777-0019-991: hyp=['THEN', 'THERE', 'WERE', 'THREE', 'OR', 'FOUR', 'LEADING', 'MEN', 'OF', 'THE', 'COMMUNITY', 'WITH', 'THEIR', 'WIVES', 'WHO', 'WERE', 'FOR', 'THE', 'MOST', 'PART', 'THE', 'FATHERS', 'AND', 'MOTHERS', 'OF', 'THE', 'YOUNG', 'LADIES'] +8455-210777-0020-992: ref=['OH', 'YES', 'SAID', 'JACK', 'AND', "I'M", 'NOWHERE'] +8455-210777-0020-992: hyp=['OH', 'YES', 'SAID', 'JACK', 'AND', "I'M", 'NOWHERE'] +8455-210777-0021-993: ref=['BUT', 'I', 'MEAN', 'TO', 'HAVE', 'MY', 'INNINGS', 'BEFORE', 'LONG'] +8455-210777-0021-993: hyp=['BUT', 'I', 'MEAN', 'TO', 'HAVE', 'MY', 'INNINGS', 'BEFORE', 'LONG'] +8455-210777-0022-994: ref=['OF', 'WHAT', 'MISSUS', 'NEVERBEND', 'HAD', 'GONE', 'THROUGH', 'IN', 'PROVIDING', 'BIRDS', 'BEASTS', 'AND', 'FISHES', 'NOT', 'TO', 'TALK', 'OF', 'TARTS', 'AND', 'JELLIES', 'FOR', 'THE', 'DINNER', 'OF', 'THAT', 'DAY', 'NO', 'ONE', 'BUT', 'MYSELF', 'CAN', 'HAVE', 'ANY', 'IDEA', 'BUT', 'IT', 'MUST', 'BE', 'ADMITTED', 'THAT', 'SHE', 'ACCOMPLISHED', 'HER', 'TASK', 'WITH', 'THOROUGH', 'SUCCESS'] +8455-210777-0022-994: hyp=['OF', 'WHAT', 'MISSUS', 'NEVERBEND', 'HAD', 'GONE', 'THROUGH', 'IN', 'PROVIDING', 'BIRDS', 'BEASTS', 'AND', 'FISHES', 'NOT', 'TO', 'TALK', 'OF', 'TARTS', 'AND', 'JELLIES', 'FOR', 'THE', 'DINNER', 'OF', 'THAT', 'DAY', 'NO', 'ONE', 'BUT', 'MYSELF', 'CAN', 'HAVE', 'ANY', 'IDEA', 'BUT', 'IT', 'MUST', 'BE', 'ADMITTED', 'THAT', 'SHE', 'ACCOMPLISHED', 'HER', 'TASK', 'WITH', 'THOROUGH', 'SUCCESS'] +8455-210777-0023-995: ref=['WE', 'SAT', 'WITH', 'THE', 'OFFICERS', 'SOME', 'LITTLE', 'TIME', 'AFTER', 'DINNER', 'AND', 'THEN', 'WENT', 'ASHORE'] +8455-210777-0023-995: hyp=['WE', 'SAT', 'WITH', 'THE', 'OFFICER', 'SOME', 'LITTLE', 'TIME', 'AFTER', 'DINNER', 'AND', 'THEN', 'WENT', 'ASHORE'] +8455-210777-0024-996: ref=['HOW', 'MUCH', 'OF', 'EVIL', 'OF', 'REAL', 'ACCOMPLISHED', 'EVIL', 'HAD', 'THERE', 'NOT', 'OCCURRED', 'TO', 'ME', 'DURING', 'THE', 'LAST', 'FEW', 'DAYS'] +8455-210777-0024-996: hyp=['HOW', 'MUCH', 'OF', 'EVIL', 'OF', 'REAL', 'ACCOMPLISHED', 'EVIL', 'HAD', 'THERE', 'NOT', 'OCCURRED', 'TO', 'ME', 'DURING', 'THE', 'LAST', 'FEW', 'DAYS'] +8455-210777-0025-997: ref=['WHAT', 'COULD', 'I', 'DO', 'NOW', 'BUT', 'JUST', 'LAY', 'MYSELF', 'DOWN', 'AND', 'DIE'] +8455-210777-0025-997: hyp=['WHAT', 'COULD', 'I', 'DO', 'NOW', 'BUT', 'JUST', 'LAY', 'MYSELF', 'DOWN', 'AND', 'DIE'] +8455-210777-0026-998: ref=['AND', 'THE', 'DEATH', 'OF', 'WHICH', 'I', 'DREAMT', 'COULD', 'NOT', 'ALAS'] +8455-210777-0026-998: hyp=['AND', 'THE', 'DEATH', 'OF', 'WHICH', 'I', 'DREAMT', 'COULD', 'NOT', 'ALAS'] +8455-210777-0027-999: ref=['WHEN', 'THIS', 'CAPTAIN', 'SHOULD', 'HAVE', 'TAKEN', 'HIMSELF', 'AND', 'HIS', 'VESSEL', 'BACK', 'TO', 'ENGLAND', 'I', 'WOULD', 'RETIRE', 'TO', 'A', 'SMALL', 'FARM', 'WHICH', 'I', 'POSSESSED', 'AT', 'THE', 'FARTHEST', 'SIDE', 'OF', 'THE', 'ISLAND', 'AND', 'THERE', 'IN', 'SECLUSION', 'WOULD', 'I', 'END', 'MY', 'DAYS'] +8455-210777-0027-999: hyp=['WHEN', 'THIS', 'CAPTAIN', 'SHOULD', 'HAVE', 'TAKEN', 'HIMSELF', 'AND', 'HIS', 'VESSEL', 'BACK', 'TO', 'ENGLAND', 'I', 'WOULD', 'RETIRE', 'TO', 'A', 'SMALL', 'FARM', 'WHICH', 'I', 'POSSESSED', 'AT', 'THE', 'FURTHEST', 'SIDE', 'OF', 'THE', 'ISLAND', 'AND', 'THERE', 'IN', 'SECLUSION', 'WOULD', 'I', 'END', 'MY', 'DAYS'] +8455-210777-0028-1000: ref=['JACK', 'WOULD', 'BECOME', "EVA'S", 'HAPPY', 'HUSBAND', 'AND', 'WOULD', 'REMAIN', 'AMIDST', 'THE', 'HURRIED', 'DUTIES', 'OF', 'THE', 'EAGER', 'WORLD'] +8455-210777-0028-1000: hyp=['JACK', 'WOULD', 'BECOME', "EVA'S", 'HAPPY', 'HUSBAND', 'AND', 'WOULD', 'REMAIN', 'AMIDST', 'THE', 'HURRIED', 'DUTIES', 'OF', 'THE', 'EAGER', 'WORLD'] +8455-210777-0029-1001: ref=['THINKING', 'OF', 'ALL', 'THIS', 'I', 'WENT', 'TO', 'SLEEP'] +8455-210777-0029-1001: hyp=['THINKING', 'OF', 'ALL', 'THIS', 'I', 'WENT', 'TO', 'SLEEP'] +8455-210777-0030-1002: ref=['MISTER', 'NEVERBEND', 'BEGAN', 'THE', 'CAPTAIN', 'AND', 'I', 'OBSERVED', 'THAT', 'UP', 'TO', 'THAT', 'MOMENT', 'HE', 'HAD', 'GENERALLY', 'ADDRESSED', 'ME', 'AS', 'PRESIDENT', 'IT', 'CANNOT', 'BE', 'DENIED', 'THAT', 'WE', 'HAVE', 'COME', 'HERE', 'ON', 'AN', 'UNPLEASANT', 'MISSION'] +8455-210777-0030-1002: hyp=['MISTER', 'NEVERBEND', 'BEGAN', 'THE', 'CAPTAIN', 'AND', 'I', 'OBSERVE', 'THAT', 'UP', 'TO', 'THAT', 'MOMENT', 'HE', 'HAD', 'GENERALLY', 'ADDRESSED', 'ME', 'AS', 'PRESIDENT', 'IT', 'CANNOT', 'BE', 'DENIED', 'THAT', 'WE', 'HAVE', 'COME', 'HERE', 'ON', 'AN', 'UNPLEASANT', 'MISSION'] +8455-210777-0031-1003: ref=['YOU', 'HAVE', 'RECEIVED', 'US', 'WITH', 'ALL', 'THAT', 'COURTESY', 'AND', 'HOSPITALITY', 'FOR', 'WHICH', 'YOUR', 'CHARACTER', 'IN', 'ENGLAND', 'STANDS', 'SO', 'HIGH'] +8455-210777-0031-1003: hyp=['YOU', 'HAVE', 'RECEIVED', 'US', 'WITH', 'ALL', 'THAT', 'COURTESY', 'AND', 'HOSPITALITY', 'FOR', 'WHICH', 'YOUR', 'CHARACTER', 'AND', 'IN', 'ENGLAND', 'STAND', 'SO', 'HIGH'] +8455-210777-0032-1004: ref=['IT', 'IS', 'A', 'DUTY', 'SAID', 'I'] +8455-210777-0032-1004: hyp=['IT', 'IS', 'A', 'DUTY', 'SAID', 'I'] +8455-210777-0033-1005: ref=['BUT', 'YOUR', 'POWER', 'IS', 'SO', 'SUPERIOR', 'TO', 'ANY', 'THAT', 'I', 'CAN', 'ADVANCE', 'AS', 'TO', 'MAKE', 'US', 'HERE', 'FEEL', 'THAT', 'THERE', 'IS', 'NO', 'DISGRACE', 'IN', 'YIELDING', 'TO', 'IT'] +8455-210777-0033-1005: hyp=['BUT', 'YOUR', 'POWER', 'IS', 'SO', 'SUPERIOR', 'TO', 'ANY', 'THAT', 'I', 'CAN', 'ADVANCE', 'AS', 'TO', 'MAKE', 'US', 'HERE', 'FEEL', 'THAT', 'THERE', 'IS', 'NO', 'DISGRACE', 'IN', 'YIELDING', 'TO', 'IT'] +8455-210777-0034-1006: ref=['NOT', 'A', 'DOUBT', 'BUT', 'HAD', 'YOUR', 'FORCE', 'BEEN', 'ONLY', 'DOUBLE', 'OR', 'TREBLE', 'OUR', 'OWN', 'I', 'SHOULD', 'HAVE', 'FOUND', 'IT', 'MY', 'DUTY', 'TO', 'STRUGGLE', 'WITH', 'YOU'] +8455-210777-0034-1006: hyp=['NOT', 'A', 'DOUBT', 'BUT', 'HAD', 'YOUR', 'FORCE', 'BEEN', 'ONLY', 'DOUBLE', 'OR', 'TROUBLE', 'OUR', 'OWN', 'I', 'SHOULD', 'HAVE', 'FOUND', 'IT', 'MY', 'DUTY', 'TO', 'STRUGGLE', 'WITH', 'YOU'] +8455-210777-0035-1007: ref=['THAT', 'IS', 'ALL', 'QUITE', 'TRUE', 'MISTER', 'NEVERBEND', 'SAID', 'SIR', 'FERDINANDO', 'BROWN'] +8455-210777-0035-1007: hyp=['THAT', 'IS', 'ALL', 'QUITE', 'TRUE', 'MISTER', 'NEVERBEND', 'SAID', 'SIR', 'FERDINAND', 'OBROWN'] +8455-210777-0036-1008: ref=['I', 'CAN', 'AFFORD', 'TO', 'SMILE', 'BECAUSE', 'I', 'AM', 'ABSOLUTELY', 'POWERLESS', 'BEFORE', 'YOU', 'BUT', 'I', 'DO', 'NOT', 'THE', 'LESS', 'FEEL', 'THAT', 'IN', 'A', 'MATTER', 'IN', 'WHICH', 'THE', 'PROGRESS', 'OF', 'THE', 'WORLD', 'IS', 'CONCERNED', 'I', 'OR', 'RATHER', 'WE', 'HAVE', 'BEEN', 'PUT', 'DOWN', 'BY', 'BRUTE', 'FORCE'] +8455-210777-0036-1008: hyp=['I', 'CAN', 'AFFORD', 'TO', 'SMILE', 'BECAUSE', 'I', 'AM', 'ABSOLUTELY', 'POWERLESS', 'BEFORE', 'YOU', 'BUT', 'I', 'DO', 'NOT', 'THE', 'LESS', 'FEEL', 'THAT', 'IN', 'A', 'MATTER', 'OF', 'WHICH', 'THE', 'PROGRESS', 'OF', 'THE', 'WORLD', 'IS', 'CONCERNED', 'I', 'OR', 'RATHER', 'WE', 'HAVE', 'BEEN', 'PUT', 'DOWN', 'BY', 'BRUTE', 'FORCE'] +8455-210777-0037-1009: ref=['YOU', 'HAVE', 'COME', 'TO', 'US', 'THREATENING', 'US', 'WITH', 'ABSOLUTE', 'DESTRUCTION'] +8455-210777-0037-1009: hyp=['YOU', 'HAVE', 'COME', 'TO', 'US', 'THREATENING', 'US', 'WITH', 'ABSOLUTE', 'DESTRUCTION'] +8455-210777-0038-1010: ref=['THEREFORE', 'I', 'FEEL', 'MYSELF', 'QUITE', 'ABLE', 'AS', 'PRESIDENT', 'OF', 'THIS', 'REPUBLIC', 'TO', 'RECEIVE', 'YOU', 'WITH', 'A', 'COURTESY', 'DUE', 'TO', 'THE', 'SERVANTS', 'OF', 'A', 'FRIENDLY', 'ALLY'] +8455-210777-0038-1010: hyp=['THEREFORE', 'I', 'FEEL', 'MYSELF', 'QUITE', 'ABLE', 'AS', 'PRESIDENT', 'OF', 'THIS', 'REPUBLIC', 'TO', 'RECEIVE', 'YOU', 'WITH', 'A', 'COURTESY', 'DUE', 'TO', 'THE', 'SERVANTS', 'OF', 'A', 'FRIENDLY', 'ALLY'] +8455-210777-0039-1011: ref=['I', 'CAN', 'ASSURE', 'YOU', 'HE', 'HAS', 'NOT', 'EVEN', 'ALLOWED', 'ME', 'TO', 'SEE', 'THE', 'TRIGGER', 'SINCE', 'I', 'HAVE', 'BEEN', 'ON', 'BOARD'] +8455-210777-0039-1011: hyp=['I', 'CAN', 'ASSURE', 'YOU', 'HE', 'HAS', 'NOT', 'EVEN', 'ALLOWED', 'ME', 'TO', 'SEE', 'THE', 'TRIGGER', 'SINCE', 'I', 'HAVE', 'BEEN', 'ON', 'BOARD'] +8455-210777-0040-1012: ref=['THEN', 'SAID', 'SIR', 'FERDINANDO', 'THERE', 'IS', 'NOTHING', 'FOR', 'IT', 'BUT', 'THAT', 'HE', 'MUST', 'TAKE', 'YOU', 'WITH', 'HIM'] +8455-210777-0040-1012: hyp=['THEN', 'SAID', 'SIR', 'FERDINANDO', 'THERE', 'IS', 'NOTHING', 'FOR', 'IT', 'BUT', 'THAT', 'WE', 'MUST', 'TAKE', 'YOU', 'WITH', 'HIM'] +8455-210777-0041-1013: ref=['THERE', 'CAME', 'UPON', 'ME', 'A', 'SUDDEN', 'SHOCK', 'WHEN', 'I', 'HEARD', 'THESE', 'WORDS', 'WHICH', 'EXCEEDED', 'ANYTHING', 'WHICH', 'I', 'HAD', 'YET', 'FELT'] +8455-210777-0041-1013: hyp=['THERE', 'CAME', 'UPON', 'ME', 'A', 'SUDDEN', 'SHOCK', 'WHEN', 'I', 'HEARD', 'THESE', 'WORDS', 'WHICH', 'EXCEEDED', 'ANYTHING', 'WHICH', 'I', 'HAD', 'YET', 'FELT'] +8455-210777-0042-1014: ref=['YOU', 'HEAR', 'WHAT', 'SIR', 'FERDINANDO', 'BROWN', 'HAS', 'SAID', 'REPLIED', 'CAPTAIN', 'BATTLEAX'] +8455-210777-0042-1014: hyp=['YOU', 'HEAR', 'WHAT', 'SIR', 'FERDINANDO', 'BROWN', 'HAS', 'SAID', 'REPLIED', 'CAPTAIN', 'BATTLE', 'AXE'] +8455-210777-0043-1015: ref=['BUT', 'WHAT', 'IS', 'THE', 'DELICATE', 'MISSION', 'I', 'ASKED'] +8455-210777-0043-1015: hyp=['BUT', 'WHAT', 'IS', 'THE', 'DELICATE', 'MISSION', 'I', 'ASKED'] +8455-210777-0044-1016: ref=['I', 'WAS', 'TO', 'BE', 'TAKEN', 'AWAY', 'AND', 'CARRIED', 'TO', 'ENGLAND', 'OR', 'ELSEWHERE', 'OR', 'DROWNED', 'UPON', 'THE', 'VOYAGE', 'IT', 'MATTERED', 'NOT', 'WHICH'] +8455-210777-0044-1016: hyp=['I', 'WAS', 'TO', 'BE', 'TAKEN', 'AWAY', 'AND', 'CARRIED', 'TO', 'ENGLAND', 'OR', 'ELSEWHERE', 'OR', 'DROWNED', 'UPON', 'THE', 'VOYAGE', 'IT', 'MATTERED', 'NOT', 'WHICH'] +8455-210777-0045-1017: ref=['THEN', 'THE', 'REPUBLIC', 'OF', 'BRITANNULA', 'WAS', 'TO', 'BE', 'DECLARED', 'AS', 'NON', 'EXISTENT', 'AND', 'THE', 'BRITISH', 'FLAG', 'WAS', 'TO', 'BE', 'EXALTED', 'AND', 'A', 'BRITISH', 'GOVERNOR', 'INSTALLED', 'IN', 'THE', 'EXECUTIVE', 'CHAMBERS'] +8455-210777-0045-1017: hyp=['THEN', 'THE', 'REPUBLIC', 'OF', 'BRITAIN', 'NULA', 'WAS', 'TO', 'BE', 'DECLARED', 'AS', 'NON', 'EXISTENT', 'AND', 'THE', 'BRITISH', 'FLAG', 'WAS', 'TO', 'BE', 'EXALTED', 'AND', 'A', 'BRITISH', 'GOVERNOR', 'INSTALLED', 'IN', 'THE', 'EXECUTIVE', 'CHAMBERS'] +8455-210777-0046-1018: ref=['YOU', 'MAY', 'BE', 'QUITE', 'SURE', "IT'S", 'THERE', 'SAID', 'CAPTAIN', 'BATTLEAX', 'AND', 'THAT', 'I', 'CAN', 'SO', 'USE', 'IT', 'AS', 'TO', 'HALF', 'OBLITERATE', 'YOUR', 'TOWN', 'WITHIN', 'TWO', 'MINUTES', 'OF', 'MY', 'RETURN', 'ON', 'BOARD'] +8455-210777-0046-1018: hyp=['YOU', 'MAY', 'BE', 'QUITE', 'SURE', 'TO', 'THERE', 'SAID', 'CAPTAIN', 'BATTLE', 'AXE', 'AND', 'THAT', 'I', 'CAN', 'SO', 'USE', 'IT', 'AS', 'TO', 'HALF', 'OBLITERATE', 'YOUR', 'TOWN', 'WITHIN', 'TWO', 'MINUTES', 'OF', 'MY', 'RETURN', 'ON', 'BOARD'] +8455-210777-0047-1019: ref=['YOU', 'PROPOSE', 'TO', 'KIDNAP', 'ME', 'I', 'SAID'] +8455-210777-0047-1019: hyp=['YOU', 'PROPOSE', 'TO', 'KIDNAP', 'ME', 'I', 'SAID'] +8455-210777-0048-1020: ref=['WHAT', 'WOULD', 'BECOME', 'OF', 'YOUR', 'GUN', 'WERE', 'I', 'TO', 'KIDNAP', 'YOU'] +8455-210777-0048-1020: hyp=['WHAT', 'WILL', 'BECOME', 'OF', 'YOUR', 'GUN', 'WERE', 'I', 'TO', 'KIDNAP', 'YOU'] +8455-210777-0049-1021: ref=['LIEUTENANT', 'CROSSTREES', 'IS', 'A', 'VERY', 'GALLANT', 'OFFICER'] +8455-210777-0049-1021: hyp=['LIEUTENANT', 'CROSS', 'TREES', 'IS', 'A', 'VERY', 'GALLANT', 'OFFICER'] +8455-210777-0050-1022: ref=['ONE', 'OF', 'US', 'ALWAYS', 'REMAINS', 'ON', 'BOARD', 'WHILE', 'THE', 'OTHER', 'IS', 'ON', 'SHORE'] +8455-210777-0050-1022: hyp=['ONE', 'OF', 'US', 'ALWAYS', 'REMAINS', 'ON', 'BOARD', 'WHILE', 'THE', 'OTHER', 'IS', 'ON', 'SHORE'] +8455-210777-0051-1023: ref=['WHAT', 'WORLD', 'WIDE', 'INIQUITY', 'SUCH', 'A', 'SPEECH', 'AS', 'THAT', 'DISCLOSES', 'SAID', 'I', 'STILL', 'TURNING', 'MYSELF', 'TO', 'THE', 'CAPTAIN', 'FOR', 'THOUGH', 'I', 'WOULD', 'HAVE', 'CRUSHED', 'THEM', 'BOTH', 'BY', 'MY', 'WORDS', 'HAD', 'IT', 'BEEN', 'POSSIBLE', 'MY', 'DISLIKE', 'CENTRED', 'ITSELF', 'ON', 'SIR', 'FERDINANDO'] +8455-210777-0051-1023: hyp=['WHAT', 'WORLD', 'WIDE', 'INIQUITY', 'SUCH', 'A', 'SPEECH', 'AS', 'THAT', 'DISCLOSES', 'SAID', 'I', 'STILL', 'TURNING', 'MYSELF', 'TO', 'THE', 'CAPTAIN', 'FOR', 'THOUGH', 'I', 'WOULD', 'HAVE', 'CRUSHED', 'THEM', 'BOTH', 'BY', 'MY', 'WORDS', 'HAD', 'IT', 'BEEN', 'POSSIBLE', 'MY', 'DISLIKE', 'SENATE', 'ITSELF', 'ON', 'SIR', 'FERDINANDO'] +8455-210777-0052-1024: ref=['YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'SUGGEST', 'SAID', 'HE', 'THAT', 'THAT', 'IS', 'A', 'MATTER', 'OF', 'OPINION'] +8455-210777-0052-1024: hyp=['YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'SUGGEST', 'SAID', 'HE', 'THAT', 'THAT', 'IS', 'A', 'MATTER', 'OF', 'OPINION'] +8455-210777-0053-1025: ref=['WERE', 'I', 'TO', 'COMPLY', 'WITH', 'YOUR', 'ORDERS', 'WITHOUT', 'EXPRESSING', 'MY', 'OWN', 'OPINION', 'I', 'SHOULD', 'SEEM', 'TO', 'HAVE', 'DONE', 'SO', 'WILLINGLY', 'HEREAFTER'] +8455-210777-0053-1025: hyp=['WERE', 'I', 'TO', 'COMPLY', 'WITH', 'YOUR', 'ORDERS', 'WITHOUT', 'EXPRESSING', 'MY', 'OWN', 'OPINION', 'I', 'SHOULD', 'SEEM', 'TO', 'HAVE', 'DONE', 'SO', 'WILLINGLY', 'HEREAFTER'] +8455-210777-0054-1026: ref=['THE', 'LETTER', 'RAN', 'AS', 'FOLLOWS'] +8455-210777-0054-1026: hyp=['THE', 'LETTER', 'RAN', 'AS', 'FOLLOWS'] +8455-210777-0055-1027: ref=['SIR', 'I', 'HAVE', 'IT', 'IN', 'COMMAND', 'TO', 'INFORM', 'YOUR', 'EXCELLENCY', 'THAT', 'YOU', 'HAVE', 'BEEN', 'APPOINTED', 'GOVERNOR', 'OF', 'THE', 'CROWN', 'COLONY', 'WHICH', 'IS', 'CALLED', 'BRITANNULA'] +8455-210777-0055-1027: hyp=['SIR', 'I', 'HAVE', 'IT', 'IN', 'COMMAND', 'TO', 'INFORM', 'YOUR', 'EXCELLENCY', 'THAT', 'YOU', 'HAVE', 'BEEN', 'APPOINTED', 'GOVERNOR', 'OF', 'THE', 'CROWN', 'COLONY', 'WHICH', 'IS', 'CALLED', 'BRITAIN', 'NULA'] +8455-210777-0056-1028: ref=['THE', 'PECULIAR', 'CIRCUMSTANCES', 'OF', 'THE', 'COLONY', 'ARE', 'WITHIN', 'YOUR', "EXCELLENCY'S", 'KNOWLEDGE'] +8455-210777-0056-1028: hyp=['THE', 'PECULIAR', 'CIRCUMSTANCES', 'OF', 'THE', 'COLONY', 'ARE', 'WITHIN', 'YOUR', "EXCELLENCY'S", 'KNOWLEDGE'] +8455-210777-0057-1029: ref=['BUT', 'IN', 'THEIR', 'SELECTION', 'OF', 'A', 'CONSTITUTION', 'THE', 'BRITANNULISTS', 'HAVE', 'UNFORTUNATELY', 'ALLOWED', 'THEMSELVES', 'BUT', 'ONE', 'DELIBERATIVE', 'ASSEMBLY', 'AND', 'HENCE', 'HAVE', 'SPRUNG', 'THEIR', 'PRESENT', 'DIFFICULTIES'] +8455-210777-0057-1029: hyp=['BUT', 'IN', 'THEIR', 'SELECTION', 'OF', 'A', 'CONSTITUTION', 'THE', 'BRITON', 'ULYSTS', 'HAVE', 'UNFORTUNATELY', 'ALLOWED', 'THEMSELVES', 'BUT', 'ONE', 'DELIBERATE', 'ASSEMBLY', 'AND', 'HENCE', 'HAS', 'SPRUNG', 'THEIR', 'PRESENT', 'DIFFICULTIES'] +8455-210777-0058-1030: ref=['IT', 'IS', 'FOUNDED', 'ON', 'THE', 'ACKNOWLEDGED', 'WEAKNESS', 'OF', 'THOSE', 'WHO', 'SURVIVE', 'THAT', 'PERIOD', 'OF', 'LIFE', 'AT', 'WHICH', 'MEN', 'CEASE', 'TO', 'WORK'] +8455-210777-0058-1030: hyp=['IT', 'IS', 'FOUNDED', 'ON', 'THE', 'ACKNOWLEDGED', 'WEAKNESS', 'OF', 'THOSE', 'WHO', 'SURVIVE', 'THAT', 'PERIOD', 'OF', 'LIFE', 'AT', 'WHICH', 'MEN', 'CEASE', 'TO', 'WORK'] +8455-210777-0059-1031: ref=['BUT', 'IT', 'IS', 'SURMISED', 'THAT', 'YOU', 'WILL', 'FIND', 'DIFFICULTIES', 'IN', 'THE', 'WAY', 'OF', 'YOUR', 'ENTERING', 'AT', 'ONCE', 'UPON', 'YOUR', 'GOVERNMENT'] +8455-210777-0059-1031: hyp=['BUT', 'IT', 'IS', 'SURMISED', 'THAT', 'YOU', 'WILL', 'FIND', 'DIFFICULTIES', 'IN', 'THE', 'WAY', 'OF', 'YOUR', 'ENTERING', 'AT', 'ONCE', 'UPON', 'YOUR', 'GOVERNOR'] +8455-210777-0060-1032: ref=['THE', 'JOHN', 'BRIGHT', 'IS', 'ARMED', 'WITH', 'A', 'WEAPON', 'OF', 'GREAT', 'POWER', 'AGAINST', 'WHICH', 'IT', 'IS', 'IMPOSSIBLE', 'THAT', 'THE', 'PEOPLE', 'OF', 'BRITANNULA', 'SHOULD', 'PREVAIL'] +8455-210777-0060-1032: hyp=['THE', 'JOHN', 'BRIGHT', 'IS', 'ARM', 'WITH', 'A', 'WEAPON', 'OF', 'GREAT', 'POWER', 'AGAINST', 'WHICH', 'IT', 'IS', 'IMPOSSIBLE', 'THAT', 'THE', 'PEOPLE', 'OF', 'BRITAIN', 'EULO', 'SHOULD', 'PREVAIL'] +8455-210777-0061-1033: ref=['YOU', 'WILL', 'CARRY', 'OUT', 'WITH', 'YOU', 'ONE', 'HUNDRED', 'MEN', 'OF', 'THE', 'NORTH', 'NORTH', 'WEST', 'BIRMINGHAM', 'REGIMENT', 'WHICH', 'WILL', 'PROBABLY', 'SUFFICE', 'FOR', 'YOUR', 'OWN', 'SECURITY', 'AS', 'IT', 'IS', 'THOUGHT', 'THAT', 'IF', 'MISTER', 'NEVERBEND', 'BE', 'WITHDRAWN', 'THE', 'PEOPLE', 'WILL', 'REVERT', 'EASILY', 'TO', 'THEIR', 'OLD', 'HABITS', 'OF', 'OBEDIENCE'] +8455-210777-0061-1033: hyp=['YOU', 'WILL', 'CARRY', 'OUT', 'WITH', 'YOU', 'ONE', 'HUNDRED', 'MEN', 'OF', 'THE', 'NORTH', 'NORTHWEST', 'BIRMINGHAM', 'REGIMENT', 'WHICH', 'WILL', 'PROBABLY', 'SUFFICE', 'FOR', 'YOUR', 'OWN', 'SECURITY', 'AS', 'IT', 'IS', 'THOUGHT', 'THAT', 'IF', 'MISTER', 'NEVERBIN', 'BE', 'WITHDRAWN', 'THE', 'PEOPLE', 'WILL', 'REVERT', 'EASILY', 'TO', 'THEIR', 'OLD', 'HABITS', 'OF', 'OBEDIENCE'] +8455-210777-0062-1034: ref=['WHEN', 'DO', 'YOU', 'INTEND', 'THAT', 'THE', 'JOHN', 'BRIGHT', 'SHALL', 'START'] +8455-210777-0062-1034: hyp=['WHEN', 'DO', 'YOU', 'INTEND', 'THAT', 'THE', 'JOHN', 'BRIGHT', 'SHALL', 'START'] +8455-210777-0063-1035: ref=['TO', 'DAY', 'I', 'SHOUTED'] +8455-210777-0063-1035: hyp=['TO', 'DAY', 'I', 'SHOUTED'] +8455-210777-0064-1036: ref=['AND', 'I', 'HAVE', 'NO', 'ONE', 'READY', 'TO', 'WHOM', 'I', 'CAN', 'GIVE', 'UP', 'THE', 'ARCHIVES', 'OF', 'THE', 'GOVERNMENT'] +8455-210777-0064-1036: hyp=['AND', 'I', 'HAVE', 'NO', 'ONE', 'READY', 'TO', 'WHOM', 'I', 'CAN', 'GIVE', 'UP', 'THE', 'ARCHIVES', 'OF', 'THE', 'GOVERNMENT'] +8455-210777-0065-1037: ref=['I', 'SHALL', 'BE', 'HAPPY', 'TO', 'TAKE', 'CHARGE', 'OF', 'THEM', 'SAID', 'SIR', 'FERDINANDO'] +8455-210777-0065-1037: hyp=['I', 'SHALL', 'BE', 'HAPPY', 'TO', 'TAKE', 'CHARGE', 'OF', 'THEM', 'SAID', 'SIR', 'FERDINANDO'] +8455-210777-0066-1038: ref=['THEY', 'OF', 'COURSE', 'MUST', 'ALL', 'BE', 'ALTERED'] +8455-210777-0066-1038: hyp=['THEY', 'OF', 'COURSE', 'MUST', 'ALL', 'BE', 'ALTERED'] +8455-210777-0067-1039: ref=['OR', 'OF', 'THE', 'HABITS', 'OF', 'OUR', 'PEOPLE', 'IT', 'IS', 'QUITE', 'IMPOSSIBLE'] +8455-210777-0067-1039: hyp=['OR', 'OF', 'THE', 'HABITS', 'OF', 'OUR', 'PEOPLE', 'IT', 'IS', 'QUITE', 'IMPOSSIBLE'] +8455-210777-0068-1040: ref=['YOUR', 'POWER', 'IS', 'SUFFICIENT', 'I', 'SAID'] +8455-210777-0068-1040: hyp=['YOUR', 'POWER', 'IS', 'SUFFICIENT', 'I', 'SAID'] +8455-210777-0069-1041: ref=['IF', 'YOU', 'WILL', 'GIVE', 'US', 'YOUR', 'PROMISE', 'TO', 'MEET', 'CAPTAIN', 'BATTLEAX', 'HERE', 'AT', 'THIS', 'TIME', 'TO', 'MORROW', 'WE', 'WILL', 'STRETCH', 'A', 'POINT', 'AND', 'DELAY', 'THE', 'DEPARTURE', 'OF', 'THE', 'JOHN', 'BRIGHT', 'FOR', 'TWENTY', 'FOUR', 'HOURS'] +8455-210777-0069-1041: hyp=['IF', 'YOU', 'WILL', 'GIVE', 'US', 'YOUR', 'PROMISE', 'TO', 'MEET', 'CAPTAIN', 'ADELAX', 'HERE', 'AT', 'THIS', 'TIME', 'TO', 'MORROW', 'WE', 'WILL', 'STRETCH', 'A', 'POINT', 'AND', 'DELAY', 'THE', 'DEPARTURE', 'OF', 'THE', 'JOHN', 'BRIGHT', 'FOR', 'TWENTY', 'FOUR', 'HOURS'] +8455-210777-0070-1042: ref=['AND', 'THIS', 'PLAN', 'WAS', 'ADOPTED', 'TOO', 'IN', 'ORDER', 'TO', 'EXTRACT', 'FROM', 'ME', 'A', 'PROMISE', 'THAT', 'I', 'WOULD', 'DEPART', 'IN', 'PEACE'] +8455-210777-0070-1042: hyp=['AND', 'THIS', 'PLAN', 'WAS', 'ADOPTED', 'TOO', 'IN', 'ORDER', 'TO', 'EXTRACT', 'FROM', 'ME', 'A', 'PROMISE', 'THAT', 'I', 'WOULD', 'DEPART', 'IN', 'PEACE'] +8463-287645-0000-543: ref=['THIS', 'WAS', 'WHAT', 'DID', 'THE', 'MISCHIEF', 'SO', 'FAR', 'AS', 'THE', 'RUNNING', 'AWAY', 'WAS', 'CONCERNED'] +8463-287645-0000-543: hyp=['THIS', 'WAS', 'WHAT', 'DID', 'THE', 'MISCHIEF', 'SO', 'FAR', 'AS', 'THE', 'RUNNING', 'AWAY', 'WAS', 'CONCERNED'] +8463-287645-0001-544: ref=['IT', 'IS', 'HARDLY', 'NECESSARY', 'TO', 'SAY', 'MORE', 'OF', 'THEM', 'HERE'] +8463-287645-0001-544: hyp=['IT', 'IS', 'HARDLY', 'NECESSARY', 'TO', 'SAY', 'MORE', 'OF', 'THEM', 'HERE'] +8463-287645-0002-545: ref=['FROM', 'THE', 'MANNER', 'IN', 'WHICH', 'HE', 'EXPRESSED', 'HIMSELF', 'WITH', 'REGARD', 'TO', 'ROBERT', 'HOLLAN', 'NO', 'MAN', 'IN', 'THE', 'WHOLE', 'RANGE', 'OF', 'HIS', 'RECOLLECTIONS', 'WILL', 'BE', 'LONGER', 'REMEMBERED', 'THAN', 'HE', 'HIS', 'ENTHRALMENT', 'WHILE', 'UNDER', 'HOLLAN', 'WILL', 'HARDLY', 'EVER', 'BE', 'FORGOTTEN'] +8463-287645-0002-545: hyp=['FROM', 'THE', 'MANNER', 'IN', 'WHICH', 'SHE', 'EXPRESSED', 'HIMSELF', 'WITH', 'REGARD', 'TO', 'ROBERT', 'HOLLAND', 'NO', 'MAN', 'IN', 'THE', 'WHOLE', 'RANGE', 'OF', 'HIS', 'RECOLLECTIONS', 'WILL', 'BE', 'LONGER', 'REMEMBERED', 'THAN', 'HE', 'HIS', 'ENTHRALIMENT', 'WHILE', 'UNDER', 'HOLLAND', 'WILL', 'HARDLY', 'EVER', 'BE', 'FORGOTTEN'] +8463-287645-0003-546: ref=['OF', 'THIS', 'PARTY', 'EDWARD', 'A', 'BOY', 'OF', 'SEVENTEEN', 'CALLED', 'FORTH', 'MUCH', 'SYMPATHY', 'HE', 'TOO', 'WAS', 'CLAIMED', 'BY', 'HOLLAN'] +8463-287645-0003-546: hyp=['OF', 'THIS', 'PARTY', 'EDWARD', 'A', 'BOY', 'OF', 'SEVENTEEN', 'CALLED', 'FORTH', 'MUCH', 'SYMPATHY', 'HE', 'TOO', 'WAS', 'CLAIMED', 'BY', 'HOLLAND'] +8463-287645-0004-547: ref=['JOHN', 'WESLEY', 'COMBASH', 'JACOB', 'TAYLOR', 'AND', 'THOMAS', 'EDWARD', 'SKINNER'] +8463-287645-0004-547: hyp=['JOHN', 'WESLEY', 'COMBASH', 'JACOB', 'TAYLOR', 'AND', 'THOMAS', 'EDWARD', 'SKINNER'] +8463-287645-0005-548: ref=['A', 'FEW', 'YEARS', 'BACK', 'ONE', 'OF', 'THEIR', 'SLAVES', 'A', 'COACHMAN', 'WAS', 'KEPT', 'ON', 'THE', 'COACH', 'BOX', 'ONE', 'COLD', 'NIGHT', 'WHEN', 'THEY', 'WERE', 'OUT', 'AT', 'A', 'BALL', 'UNTIL', 'HE', 'BECAME', 'ALMOST', 'FROZEN', 'TO', 'DEATH', 'IN', 'FACT', 'HE', 'DID', 'DIE', 'IN', 'THE', 'INFIRMARY', 'FROM', 'THE', 'EFFECTS', 'OF', 'THE', 'FROST', 'ABOUT', 'ONE', 'WEEK', 'AFTERWARDS'] +8463-287645-0005-548: hyp=['A', 'FEW', 'YEARS', 'BACK', 'ONE', 'OF', 'THEIR', 'SLAVES', 'A', 'COACHMAN', 'WAS', 'KEPT', 'ON', 'THE', 'COACH', 'BOX', 'ONE', 'CALLED', 'NIGHT', 'WHEN', 'THEY', 'WERE', 'OUT', 'AT', 'A', 'BALL', 'UNTIL', 'HE', 'BECAME', 'ALMOST', 'FROZEN', 'TO', 'DEATH', 'IN', 'FACT', 'HE', 'DID', 'DIE', 'IN', 'THE', 'INFIRMARY', 'FROM', 'THE', 'EFFECTS', 'OF', 'THE', 'FROST', 'ABOUT', 'ONE', 'WEEK', 'AFTERWARDS'] +8463-287645-0006-549: ref=['THE', 'DOCTOR', 'WHO', 'ATTENDED', 'THE', 'INJURED', 'CREATURE', 'IN', 'THIS', 'CASE', 'WAS', 'SIMPLY', 'TOLD', 'THAT', 'SHE', 'SLIPPED', 'AND', 'FELL', 'DOWN', 'STAIRS', 'AS', 'SHE', 'WAS', 'COMING', 'DOWN'] +8463-287645-0006-549: hyp=['THE', 'DOCTOR', 'WHO', 'ATTENDED', 'THE', 'INJURED', 'CREATURE', 'IN', 'THIS', 'CASE', 'WAS', 'SIMPLY', 'TOLD', 'THAT', 'SHE', 'SLIPPED', 'AND', 'FELL', 'DOWN', 'THE', 'STAIRS', 'AS', 'SHE', 'WAS', 'COMING', 'DOWN'] +8463-287645-0007-550: ref=['ANOTHER', 'CASE', 'SAID', 'JOHN', 'WESLEY', 'WAS', 'A', 'LITTLE', 'GIRL', 'HALF', 'GROWN', 'WHO', 'WAS', 'WASHING', 'WINDOWS', 'UP', 'STAIRS', 'ONE', 'DAY', 'AND', 'UNLUCKILY', 'FELL', 'ASLEEP', 'IN', 'THE', 'WINDOW', 'AND', 'IN', 'THIS', 'POSITION', 'WAS', 'FOUND', 'BY', 'HER', 'MISTRESS', 'IN', 'A', 'RAGE', 'THE', 'MISTRESS', 'HIT', 'HER', 'A', 'HEAVY', 'SLAP', 'KNOCKED', 'HER', 'OUT', 'OF', 'THE', 'WINDOW', 'AND', 'SHE', 'FELL', 'TO', 'THE', 'PAVEMENT', 'AND', 'DIED', 'IN', 'A', 'FEW', 'HOURS', 'FROM', 'THE', 'EFFECTS', 'THEREOF'] +8463-287645-0007-550: hyp=['ANOTHER', 'CASE', 'SAID', 'JOHN', 'WESLEY', 'WAS', 'A', 'LITTLE', 'GIRL', 'HALF', 'GROWN', 'WHO', 'WAS', 'WASHING', 'WINDOWS', 'UPSTAIRS', 'ONE', 'DAY', 'AND', 'UNLUCKILY', 'FELL', 'ASLEEP', 'IN', 'THE', 'WINDOW', 'AND', 'IN', 'THIS', 'POSITION', 'WAS', 'FOUND', 'BY', 'HER', 'MISTRESS', 'IN', 'A', 'RAGE', 'THE', 'MISTRESS', 'HID', 'HER', 'A', 'HEAVY', 'SLAP', 'KNOCKED', 'HER', 'OUT', 'OF', 'THE', 'WINDOW', 'AND', 'SHE', 'FELL', 'TO', 'THE', 'PAVEMENT', 'AND', 'DIED', 'IN', 'A', 'FEW', 'HOURS', 'FROM', 'THE', 'EFFECTS', 'THEREOF'] +8463-287645-0008-551: ref=['AS', 'USUAL', 'NOTHING', 'WAS', 'DONE', 'IN', 'THE', 'WAY', 'OF', 'PUNISHMENT'] +8463-287645-0008-551: hyp=['AS', 'USUAL', 'NOTHING', 'WAS', 'DONE', 'IN', 'THE', 'WAY', 'OF', 'PUNISHMENT'] +8463-287645-0009-552: ref=['I', 'NEVER', 'KNEW', 'OF', 'BUT', 'ONE', 'MAN', 'WHO', 'COULD', 'EVER', 'PLEASE', 'HIM'] +8463-287645-0009-552: hyp=['I', 'NEVER', 'KNEW', 'OF', 'BUT', 'ONE', 'MAN', 'WHO', 'COULD', 'EVER', 'PLEASE', 'HIM'] +8463-287645-0010-553: ref=['HE', 'WORKED', 'ME', 'VERY', 'HARD', 'HE', 'WANTED', 'TO', 'BE', 'BEATING', 'ME', 'ALL', 'THE', 'TIME'] +8463-287645-0010-553: hyp=['HE', 'WORKED', 'ME', 'VERY', 'HARD', 'HE', 'WANTED', 'TO', 'BE', 'BEATING', 'ME', 'ALL', 'THE', 'TIME'] +8463-287645-0011-554: ref=['SHE', 'WAS', 'A', 'LARGE', 'HOMELY', 'WOMAN', 'THEY', 'WERE', 'COMMON', 'WHITE', 'PEOPLE', 'WITH', 'NO', 'REPUTATION', 'IN', 'THE', 'COMMUNITY'] +8463-287645-0011-554: hyp=['SHE', 'WAS', 'A', 'LARGE', 'HOMELY', 'WOMAN', 'THEY', 'WERE', 'COMMON', 'WHITE', 'PEOPLE', 'WITH', 'NO', 'REPUTATION', 'IN', 'THE', 'COMMUNITY'] +8463-287645-0012-555: ref=['SUBSTANTIALLY', 'THIS', 'WAS', "JACOB'S", 'UNVARNISHED', 'DESCRIPTION', 'OF', 'HIS', 'MASTER', 'AND', 'MISTRESS'] +8463-287645-0012-555: hyp=['SUBSTANTIALLY', 'THIS', 'WAS', "JACOB'S", 'UNVARNISHED', 'DESCRIPTION', 'OF', 'HIS', 'MASTER', 'AND', 'MISTRESS'] +8463-287645-0013-556: ref=['AS', 'TO', 'HIS', 'AGE', 'AND', 'ALSO', 'THE', 'NAME', 'OF', 'HIS', 'MASTER', "JACOB'S", 'STATEMENT', 'VARIED', 'SOMEWHAT', 'FROM', 'THE', 'ADVERTISEMENT'] +8463-287645-0013-556: hyp=['AS', 'TO', 'HIS', 'AGE', 'AND', 'ALSO', 'THE', 'NAME', 'OF', 'HIS', 'MASTER', "JACOB'S", 'STATEMENT', 'VARIED', 'SOMEWHAT', 'FROM', 'THE', 'ADVERTISEMENT'] +8463-287645-0014-557: ref=['OF', 'STARTING', 'I', "DIDN'T", 'KNOW', 'THE', 'WAY', 'TO', 'COME'] +8463-287645-0014-557: hyp=['OF', 'STARTING', 'I', "DIDN'T", 'KNOW', 'THE', 'WAY', 'TO', 'COME'] +8463-294825-0000-558: ref=["IT'S", 'ALMOST', 'BEYOND', 'CONJECTURE'] +8463-294825-0000-558: hyp=["IT'S", 'ALMOST', 'BEYOND', 'CONJECTURE'] +8463-294825-0001-559: ref=['THIS', 'REALITY', 'BEGINS', 'TO', 'EXPLAIN', 'THE', 'DARK', 'POWER', 'AND', 'OTHERWORLDLY', 'FASCINATION', 'OF', 'TWENTY', 'THOUSAND', 'LEAGUES', 'UNDER', 'THE', 'SEAS'] +8463-294825-0001-559: hyp=['THIS', 'REALITY', 'BEGINS', 'TO', 'EXPLAIN', 'THE', 'DARK', 'POWER', 'AND', 'OTHER', 'WORLDLY', 'FASCINATION', 'OF', 'TWENTY', 'THOUSAND', 'LEAGUES', 'UNDER', 'THE', 'SEAS'] +8463-294825-0002-560: ref=['FIRST', 'AS', 'A', 'PARIS', 'STOCKBROKER', 'LATER', 'AS', 'A', 'CELEBRATED', 'AUTHOR', 'AND', 'YACHTSMAN', 'HE', 'WENT', 'ON', 'FREQUENT', 'VOYAGES', 'TO', 'BRITAIN', 'AMERICA', 'THE', 'MEDITERRANEAN'] +8463-294825-0002-560: hyp=['FIRST', 'AS', 'A', 'PARIS', 'STOCKBROKER', 'LATER', 'AS', 'A', 'CELEBRATED', 'AUTHOR', 'AND', 'YACHTSMAN', 'HE', 'WENT', 'ON', 'FREQUENT', 'VOYAGES', 'TO', 'BRITAIN', 'AMERICA', 'THE', 'MEDITERRANEAN'] +8463-294825-0003-561: ref=['NEMO', 'BUILDS', 'A', 'FABULOUS', 'FUTURISTIC', 'SUBMARINE', 'THE', 'NAUTILUS', 'THEN', 'CONDUCTS', 'AN', 'UNDERWATER', 'CAMPAIGN', 'OF', 'VENGEANCE', 'AGAINST', 'HIS', 'IMPERIALIST', 'OPPRESSOR'] +8463-294825-0003-561: hyp=['NEMO', 'BUILDS', 'A', 'FABULOUS', 'FUTUREISTIC', 'SUBMARINE', 'THE', 'NAUTILUS', 'THEN', 'CONDUCTS', 'AN', 'UNDERWATER', 'CAMPAIGN', 'OF', 'VENGEANCE', 'AGAINST', 'HIS', 'IMPERIALIST', 'OPPRESSOR'] +8463-294825-0004-562: ref=['IN', 'ALL', 'THE', 'NOVEL', 'HAD', 'A', 'DIFFICULT', 'GESTATION'] +8463-294825-0004-562: hyp=['IN', 'ALL', 'THE', 'NOVEL', 'HEAD', 'A', 'DIFFICULT', 'GESTATION'] +8463-294825-0005-563: ref=['OTHER', 'SUBTLETIES', 'OCCUR', 'INSIDE', 'EACH', 'EPISODE', 'THE', 'TEXTURES', 'SPARKLING', 'WITH', 'WIT', 'INFORMATION', 'AND', 'INSIGHT'] +8463-294825-0005-563: hyp=['OTHER', 'SUBTLETIES', 'OCCUR', 'INSIDE', 'EACH', 'EPISODE', 'THE', 'TEXTURES', 'SPARKLING', 'WITH', 'WIT', 'INFORMATION', 'AND', 'INSIGHT'] +8463-294825-0006-564: ref=['HIS', 'SPECIFICATIONS', 'FOR', 'AN', 'OPEN', 'SEA', 'SUBMARINE', 'AND', 'A', 'SELF', 'CONTAINED', 'DIVING', 'SUIT', 'WERE', 'DECADES', 'BEFORE', 'THEIR', 'TIME', 'YET', 'MODERN', 'TECHNOLOGY', 'BEARS', 'THEM', 'OUT', 'TRIUMPHANTLY'] +8463-294825-0006-564: hyp=['HIS', 'SPECIFICATIONS', 'FOR', 'AN', 'OPEN', 'SEA', 'SUBMARINE', 'AND', 'A', 'SELF', 'CONTAINING', 'DIVING', 'SUIT', 'WERE', 'DECADES', 'BEFORE', 'THEIR', 'TIME', 'YET', 'MODERN', 'TECHNOLOGY', 'BEARS', 'THEM', 'OUT', 'TRIUMPHANTLY'] +8463-294825-0007-565: ref=['EVEN', 'THE', 'SUPPORTING', 'CAST', 'IS', 'SHREWDLY', 'DRAWN', 'PROFESSOR', 'ARONNAX', 'THE', 'CAREER', 'SCIENTIST', 'CAUGHT', 'IN', 'AN', 'ETHICAL', 'CONFLICT', 'CONSEIL', 'THE', 'COMPULSIVE', 'CLASSIFIER', 'WHO', 'SUPPLIES', 'HUMOROUS', 'TAG', 'LINES', 'FOR', "VERNE'S", 'FAST', 'FACTS', 'THE', 'HARPOONER', 'NED', 'LAND', 'A', 'CREATURE', 'OF', 'CONSTANT', 'APPETITES', 'MAN', 'AS', 'HEROIC', 'ANIMAL'] +8463-294825-0007-565: hyp=['EVEN', 'THE', 'SUPPORTING', 'CAST', 'IS', 'SHREWDLY', 'DRAWN', 'PROFESSOR', 'ARONNAX', 'THE', 'CAREER', 'SCIENTIST', 'CAUGHT', 'IN', 'AN', 'ETHICAL', 'CONFLICT', 'CONSEIL', 'THE', 'COMPULSIVE', 'CLASSIFIER', 'WHO', 'SUPPLIES', 'HUMOROUS', 'TAG', 'LINES', 'FOR', "VERN'S", 'FAST', 'FACTS', 'THE', 'HARPOONER', 'NED', 'LAND', 'A', 'CREATURE', 'OF', 'CONSTANT', 'APPETITES', 'MAN', 'AS', 'HEROIC', 'ANIMAL'] +8463-294825-0008-566: ref=['BUT', 'MUCH', 'OF', 'THE', "NOVEL'S", 'BROODING', 'POWER', 'COMES', 'FROM', 'CAPTAIN', 'NEMO'] +8463-294825-0008-566: hyp=['BUT', 'MUCH', 'OF', 'THE', 'NOVELS', 'BROODING', 'POWER', 'COMES', 'FROM', 'CAPTAIN', 'NEMO'] +8463-294825-0009-567: ref=['THIS', 'COMPULSION', 'LEADS', 'NEMO', 'INTO', 'UGLY', 'CONTRADICTIONS', "HE'S", 'A', 'FIGHTER', 'FOR', 'FREEDOM', 'YET', 'ALL', 'WHO', 'BOARD', 'HIS', 'SHIP', 'ARE', 'IMPRISONED', 'THERE', 'FOR', 'GOOD', 'HE', 'WORKS', 'TO', 'SAVE', 'LIVES', 'BOTH', 'HUMAN', 'AND', 'ANIMAL', 'YET', 'HE', 'HIMSELF', 'CREATES', 'A', 'HOLOCAUST', 'HE', 'DETESTS', 'IMPERIALISM', 'YET', 'HE', 'LAYS', 'PERSONAL', 'CLAIM', 'TO', 'THE', 'SOUTH', 'POLE'] +8463-294825-0009-567: hyp=['THIS', 'COMPULSION', 'LEADS', 'NEMO', 'INTO', 'UGLY', 'CONTRADICTIONS', 'HE', 'IS', 'A', 'FRIGHTER', 'FOR', 'FREEDOM', 'YET', 'ALL', 'WHO', 'BOARD', 'HIS', 'SHIP', 'OR', 'IMPRISONED', 'THERE', 'FOR', 'GOOD', 'HE', 'WORKS', 'TO', 'SAVE', 'LIVES', 'BOTH', 'HUMAN', 'AND', 'ANIMAL', 'YET', 'HE', 'HIMSELF', 'CREATES', 'A', 'HOLOCOST', 'HE', 'DETESTS', 'IMPERIALISM', 'YET', 'HE', 'LAYS', 'PERSONAL', 'CLAIM', 'TO', 'THE', 'SOUTH', 'POLE'] +8463-294825-0010-568: ref=['AND', 'IN', 'THIS', 'LAST', 'ACTION', 'HE', 'FALLS', 'INTO', 'THE', 'CLASSIC', 'SIN', 'OF', 'PRIDE'] +8463-294825-0010-568: hyp=['AND', 'IN', 'THIS', 'LAST', 'ACTION', 'HE', 'FALLS', 'INTO', 'THE', 'CLASSIC', 'SIN', 'OF', 'PRIDE'] +8463-294825-0011-569: ref=["HE'S", 'SWIFTLY', 'PUNISHED'] +8463-294825-0011-569: hyp=['HE', 'IS', 'SWIFTLY', 'PUNISHED'] +8463-294825-0012-570: ref=['THE', 'NAUTILUS', 'NEARLY', 'PERISHES', 'IN', 'THE', 'ANTARCTIC', 'AND', 'NEMO', 'SINKS', 'INTO', 'A', 'GROWING', 'DEPRESSION'] +8463-294825-0012-570: hyp=['THE', 'NAUTILUS', 'NEARLY', 'PERISHES', 'IN', 'THE', 'ANTARCTIC', 'AND', 'NEMO', 'SINKS', 'INTO', 'A', 'GROWING', 'DEPRESSION'] +8463-294825-0013-571: ref=['FOR', 'MANY', 'THEN', 'THIS', 'BOOK', 'HAS', 'BEEN', 'A', 'SOURCE', 'OF', 'FASCINATION', 'SURELY', 'ONE', 'OF', 'THE', 'MOST', 'INFLUENTIAL', 'NOVELS', 'EVER', 'WRITTEN', 'AN', 'INSPIRATION', 'FOR', 'SUCH', 'SCIENTISTS', 'AND', 'DISCOVERERS', 'AS', 'ENGINEER', 'SIMON', 'LAKE', 'OCEANOGRAPHER', 'WILLIAM', 'BEEBE', 'POLAR', 'TRAVELER', 'SIR', 'ERNEST', 'SHACKLETON'] +8463-294825-0013-571: hyp=['FOR', 'MANY', 'THEN', 'THIS', 'BOOK', 'HAS', 'BEEN', 'A', 'SOURCE', 'OF', 'FASCINATION', 'SURELY', 'ONE', 'OF', 'THE', 'MOST', 'INFLUENTIAL', 'NOVELS', 'EVER', 'WRITTEN', 'AN', 'INSPIRATION', 'FOR', 'SUCH', 'SCIENTISTS', 'AND', 'DISCOVERERS', 'AS', 'ENGINEER', 'SIMON', 'LAKE', 'OCEANOGRAPHER', 'WILLIAM', 'B', 'POLAR', 'TRAVELLERS', 'ARE', 'ERNEST', 'SHACKLETON'] +8463-294825-0014-572: ref=['FATHOM', 'SIX', 'FEET'] +8463-294825-0014-572: hyp=['FATHOM', 'SIX', 'FEET'] +8463-294825-0015-573: ref=['GRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0015-573: hyp=['GRAHAM', 'ROUGHLY', 'WON', 'TWENTY', 'EIGHTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0016-574: ref=['MILLIGRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHT', 'THOUSAND', 'OF', 'AN', 'OUNCE'] +8463-294825-0016-574: hyp=['MILAGRAM', 'ROUGHLY', 'ONE', 'TWENTY', 'EIGHT', 'THOUSANDTH', 'OF', 'AN', 'OUNCE'] +8463-294825-0017-575: ref=['LITER', 'ROUGHLY', 'ONE', 'QUART'] +8463-294825-0017-575: hyp=['LATER', 'ROUGHLY', 'WON', 'COURT'] +8463-294825-0018-576: ref=['METER', 'ROUGHLY', 'ONE', 'YARD', 'THREE', 'INCHES'] +8463-294825-0018-576: hyp=['METER', 'ROUGHLY', 'ONE', 'YARD', 'THREE', 'INCHES'] +8463-294825-0019-577: ref=['MILLIMETER', 'ROUGHLY', 'ONE', 'TWENTY', 'FIFTH', 'OF', 'AN', 'INCH'] +8463-294825-0019-577: hyp=['MILLIMETRE', 'ROUGHLY', 'ONE', 'TWENTY', 'FIFTH', 'OF', 'AN', 'INCH'] +8463-294828-0000-578: ref=['CHAPTER', 'THREE', 'AS', 'MASTER', 'WISHES'] +8463-294828-0000-578: hyp=['CHAPTER', 'THREE', 'AS', 'MASTER', 'WISHES'] +8463-294828-0001-579: ref=['THREE', 'SECONDS', 'BEFORE', 'THE', 'ARRIVAL', 'OF', 'J', 'B', "HOBSON'S", 'LETTER', 'I', 'NO', 'MORE', 'DREAMED', 'OF', 'CHASING', 'THE', 'UNICORN', 'THAN', 'OF', 'TRYING', 'FOR', 'THE', 'NORTHWEST', 'PASSAGE'] +8463-294828-0001-579: hyp=['THREE', 'SECONDS', 'BEFORE', 'THE', 'ARRIVAL', 'OF', 'J', 'B', "HOBSON'S", 'LETTER', 'I', 'KNOW', 'MORE', 'DREAMED', 'OF', 'CHASING', 'THE', 'UNICORN', 'THAN', 'OF', 'TRYING', 'FOR', 'THE', 'NORTHWEST', 'PASSAGE'] +8463-294828-0002-580: ref=['EVEN', 'SO', 'I', 'HAD', 'JUST', 'RETURNED', 'FROM', 'AN', 'ARDUOUS', 'JOURNEY', 'EXHAUSTED', 'AND', 'BADLY', 'NEEDING', 'A', 'REST'] +8463-294828-0002-580: hyp=['EVEN', 'SO', 'I', 'HAD', 'JUST', 'RETURNED', 'FROM', 'AN', 'ARDUOUS', 'JOURNEY', 'EXHAUSTED', 'AND', 'BADLY', 'NEEDING', 'ARREST'] +8463-294828-0003-581: ref=['I', 'WANTED', 'NOTHING', 'MORE', 'THAN', 'TO', 'SEE', 'MY', 'COUNTRY', 'AGAIN', 'MY', 'FRIENDS', 'MY', 'MODEST', 'QUARTERS', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'MY', 'DEARLY', 'BELOVED', 'COLLECTIONS'] +8463-294828-0003-581: hyp=['I', 'WANTED', 'NOTHING', 'MORE', 'THAN', 'TO', 'SEE', 'MY', 'COUNTRY', 'AGAIN', 'MY', 'FRIENDS', 'MY', 'MODEST', 'QUARTERS', 'BY', 'THE', 'BATTANICAL', 'GARDENS', 'MY', 'DEARLY', 'BELOVED', 'COLLECTIONS'] +8463-294828-0004-582: ref=['BUT', 'NOW', 'NOTHING', 'COULD', 'HOLD', 'ME', 'BACK'] +8463-294828-0004-582: hyp=['BUT', 'NOW', 'NOTHING', 'COULD', 'HOLD', 'ME', 'BACK'] +8463-294828-0005-583: ref=['CONSEIL', 'WAS', 'MY', 'MANSERVANT'] +8463-294828-0005-583: hyp=['CONSEIL', 'WAS', 'MY', 'MANSERVANT'] +8463-294828-0006-584: ref=['FROM', 'RUBBING', 'SHOULDERS', 'WITH', 'SCIENTISTS', 'IN', 'OUR', 'LITTLE', 'UNIVERSE', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'THE', 'BOY', 'HAD', 'COME', 'TO', 'KNOW', 'A', 'THING', 'OR', 'TWO'] +8463-294828-0006-584: hyp=['FROM', 'RUBBING', 'SHOULDERS', 'WITH', 'SCIENTISTS', 'IN', 'OUR', 'LITTLE', 'UNIVERSE', 'BY', 'THE', 'BOTANICAL', 'GARDENS', 'THE', 'BOY', 'HAD', 'COME', 'TO', 'KNOW', 'A', 'THING', 'OR', 'TWO'] +8463-294828-0007-585: ref=['CLASSIFYING', 'WAS', 'EVERYTHING', 'TO', 'HIM', 'SO', 'HE', 'KNEW', 'NOTHING', 'ELSE', 'WELL', 'VERSED', 'IN', 'THE', 'THEORY', 'OF', 'CLASSIFICATION', 'HE', 'WAS', 'POORLY', 'VERSED', 'IN', 'ITS', 'PRACTICAL', 'APPLICATION', 'AND', 'I', 'DOUBT', 'THAT', 'HE', 'COULD', 'TELL', 'A', 'SPERM', 'WHALE', 'FROM', 'A', 'BALEEN', 'WHALE'] +8463-294828-0007-585: hyp=['CLASSIFYING', 'WAS', 'EVERYTHING', 'TO', 'HIM', 'SO', 'HE', 'KNEW', 'NOTHING', 'ELSE', 'WELL', 'VERSED', 'IN', 'A', 'THEORY', 'OF', 'CLASSIFICATION', 'HE', 'WAS', 'POORLY', 'VERSED', 'IN', 'ITS', 'PRACTICAL', 'APPLICATION', 'AND', 'I', 'DOUBT', 'THAT', 'HE', 'COULD', 'TELL', 'A', 'SPERM', 'WHALE', 'FROM', 'A', 'BALINE', 'WHALE'] +8463-294828-0008-586: ref=['AND', 'YET', 'WHAT', 'A', 'FINE', 'GALLANT', 'LAD'] +8463-294828-0008-586: hyp=['AND', 'YET', 'WHAT', 'A', 'FINE', 'GALLANT', 'LAD'] +8463-294828-0009-587: ref=['NOT', 'ONCE', 'DID', 'HE', 'COMMENT', 'ON', 'THE', 'LENGTH', 'OR', 'THE', 'HARDSHIPS', 'OF', 'A', 'JOURNEY'] +8463-294828-0009-587: hyp=['NOT', 'ONCE', 'DID', 'HE', 'COMMENT', 'ON', 'THE', 'LENGTH', 'OR', 'THE', 'HARDSHIPS', 'OF', 'THE', 'JOURNEY'] +8463-294828-0010-588: ref=['NEVER', 'DID', 'HE', 'OBJECT', 'TO', 'BUCKLING', 'UP', 'HIS', 'SUITCASE', 'FOR', 'ANY', 'COUNTRY', 'WHATEVER', 'CHINA', 'OR', 'THE', 'CONGO', 'NO', 'MATTER', 'HOW', 'FAR', 'OFF', 'IT', 'WAS'] +8463-294828-0010-588: hyp=['NEVER', 'DID', 'HE', 'OBJECT', 'TO', 'BUCKLING', 'UP', 'HIS', 'SUIT', 'CASE', 'FOR', 'ANY', 'COUNTRY', 'WHATEVER', 'CHINA', 'OR', 'THE', 'CONGO', 'NO', 'MATTER', 'HOW', 'FAR', 'OFF', 'IT', 'WAS'] +8463-294828-0011-589: ref=['HE', 'WENT', 'HERE', 'THERE', 'AND', 'EVERYWHERE', 'IN', 'PERFECT', 'CONTENTMENT'] +8463-294828-0011-589: hyp=['HE', 'WENT', 'HERE', 'THERE', 'AND', 'EVERYWHERE', 'IN', 'PERFECT', 'CONTENTMENT'] +8463-294828-0012-590: ref=['PLEASE', 'FORGIVE', 'ME', 'FOR', 'THIS', 'UNDERHANDED', 'WAY', 'OF', 'ADMITTING', 'I', 'HAD', 'TURNED', 'FORTY'] +8463-294828-0012-590: hyp=['PLEASE', 'FORGIVE', 'ME', 'FOR', 'THIS', 'UNDERHANDED', 'WAY', 'OF', 'ADMITTING', 'THAT', 'I', 'HAD', 'TURNED', 'FORTY'] +8463-294828-0013-591: ref=['HE', 'WAS', 'A', 'FANATIC', 'ON', 'FORMALITY', 'AND', 'HE', 'ONLY', 'ADDRESSED', 'ME', 'IN', 'THE', 'THIRD', 'PERSON', 'TO', 'THE', 'POINT', 'WHERE', 'IT', 'GOT', 'TIRESOME'] +8463-294828-0013-591: hyp=['HE', 'WAS', 'A', 'FANATIC', 'ON', 'FORMALITY', 'AND', 'HE', 'ONLY', 'ADDRESSED', 'ME', 'IN', 'THE', 'THIRD', 'PERSON', 'TO', 'THE', 'POINT', 'WHERE', 'IT', 'GOT', 'TO', 'HIRESUME'] +8463-294828-0014-592: ref=['THERE', 'WAS', 'GOOD', 'REASON', 'TO', 'STOP', 'AND', 'THINK', 'EVEN', 'FOR', 'THE', "WORLD'S", 'MOST', 'EMOTIONLESS', 'MAN'] +8463-294828-0014-592: hyp=['THERE', 'WAS', 'GOOD', 'REASON', 'TO', 'STOP', 'AND', 'THINK', 'EVEN', 'FOR', 'THE', "WORLD'S", 'MOST', 'EMOTIONLESS', 'MAN'] +8463-294828-0015-593: ref=['CONSEIL', 'I', 'CALLED', 'A', 'THIRD', 'TIME', 'CONSEIL', 'APPEARED'] +8463-294828-0015-593: hyp=['CONSEIL', 'I', 'CALLED', 'A', 'THIRD', 'TON', 'CONSEIL', 'APPEARED'] +8463-294828-0016-594: ref=['DID', 'MASTER', 'SUMMON', 'ME', 'HE', 'SAID', 'ENTERING'] +8463-294828-0016-594: hyp=['DEAD', 'MASTER', 'SUMMONED', 'ME', 'HE', 'SAID', 'ENTERING'] +8463-294828-0017-595: ref=['PACK', 'AS', 'MUCH', 'INTO', 'MY', 'TRUNK', 'AS', 'YOU', 'CAN', 'MY', 'TRAVELING', 'KIT', 'MY', 'SUITS', 'SHIRTS', 'AND', 'SOCKS', "DON'T", 'BOTHER', 'COUNTING', 'JUST', 'SQUEEZE', 'IT', 'ALL', 'IN', 'AND', 'HURRY'] +8463-294828-0017-595: hyp=['PACK', 'AS', 'MUCH', 'INTO', 'MY', 'TRUNK', 'AS', 'YOU', 'CAN', 'MY', 'TRAVELING', 'KIT', 'MY', 'SUITS', 'SHIRTS', 'AND', 'SOCKS', "DON'T", 'BOTHER', 'COUNTING', 'JUST', 'SQUEEZE', 'IT', 'ALL', 'IN', 'AND', 'HURRY'] +8463-294828-0018-596: ref=["WE'LL", 'DEAL', 'WITH', 'THEM', 'LATER', 'WHAT'] +8463-294828-0018-596: hyp=["WE'LL", 'DEAL', 'WITH', 'THEM', 'LATER', 'WHAT'] +8463-294828-0019-597: ref=['ANYHOW', "WE'LL", 'LEAVE', 'INSTRUCTIONS', 'TO', 'SHIP', 'THE', 'WHOLE', 'MENAGERIE', 'TO', 'FRANCE'] +8463-294828-0019-597: hyp=['ANYHOW', "WE'LL", 'LEAVE', 'INSTRUCTIONS', 'TO', 'SHIP', 'THE', 'WHOLE', 'MENAGERIE', 'TO', 'FRANCE'] +8463-294828-0020-598: ref=['YES', 'WE', 'ARE', 'CERTAINLY', 'I', 'REPLIED', 'EVASIVELY', 'BUT', 'AFTER', 'WE', 'MAKE', 'A', 'DETOUR'] +8463-294828-0020-598: hyp=['YES', 'WE', 'ARE', 'CERTAINLY', 'I', 'REPLIED', 'EVASIVELY', 'BUT', 'AFTER', 'WE', 'MAKE', 'A', 'DETOUR'] +8463-294828-0021-599: ref=['A', 'ROUTE', 'SLIGHTLY', 'LESS', 'DIRECT', "THAT'S", 'ALL'] +8463-294828-0021-599: hyp=['A', 'ROUTE', 'SLIGHTLY', 'LESS', 'DIRECT', "THAT'S", 'ALL'] +8463-294828-0022-600: ref=["WE'RE", 'LEAVING', 'ON', 'THE', 'ABRAHAM', 'LINCOLN'] +8463-294828-0022-600: hyp=['WERE', 'LEAVING', 'ON', 'THE', 'ABRAHAM', 'LINCOLN'] +8463-294828-0023-601: ref=['YOU', 'SEE', 'MY', 'FRIEND', "IT'S", 'AN', 'ISSUE', 'OF', 'THE', 'MONSTER', 'THE', 'NOTORIOUS', 'NARWHALE'] +8463-294828-0023-601: hyp=['YOU', 'SEE', 'MY', 'FRIEND', "IT'S", 'AN', 'ISSUE', 'OF', 'THE', 'MONSTER', 'THE', 'NOTORIOUS', 'NARWHALE'] +8463-294828-0024-602: ref=['WE', "DON'T", 'KNOW', 'WHERE', 'IT', 'WILL', 'TAKE', 'US'] +8463-294828-0024-602: hyp=['WE', "DON'T", 'KNOW', 'WHERE', 'IT', 'WILL', 'TAKE', 'US'] +8463-294828-0025-603: ref=['BUT', "WE'RE", 'GOING', 'JUST', 'THE', 'SAME'] +8463-294828-0025-603: hyp=['BUT', 'WERE', 'GOING', 'JUST', 'THE', 'SAME'] +8463-294828-0026-604: ref=['WE', 'HAVE', 'A', 'COMMANDER', "WHO'S", 'GAME', 'FOR', 'ANYTHING'] +8463-294828-0026-604: hyp=['WE', 'HAVE', 'A', 'COMMANDER', 'WHOSE', 'GAME', 'FOR', 'ANYTHING'] +8463-294828-0027-605: ref=['I', 'LEFT', 'INSTRUCTIONS', 'FOR', 'SHIPPING', 'MY', 'CONTAINERS', 'OF', 'STUFFED', 'ANIMALS', 'AND', 'DRIED', 'PLANTS', 'TO', 'PARIS', 'FRANCE'] +8463-294828-0027-605: hyp=['I', 'LEFT', 'INSTRUCTIONS', 'FOR', 'SHIPPING', 'MY', 'CONTAINERS', 'OF', 'STUFFED', 'ANIMALS', 'AND', 'DRIED', 'PLANTS', 'TO', 'PARIS', 'FRANCE'] +8463-294828-0028-606: ref=['I', 'OPENED', 'A', 'LINE', 'OF', 'CREDIT', 'SUFFICIENT', 'TO', 'COVER', 'THE', 'BABIRUSA', 'AND', 'CONSEIL', 'AT', 'MY', 'HEELS', 'I', 'JUMPED', 'INTO', 'A', 'CARRIAGE'] +8463-294828-0028-606: hyp=['I', 'OPENED', 'A', 'LINE', 'OF', 'CREDIT', 'SUFFICIENT', 'TO', 'COVER', 'THE', 'BABRUSA', 'AND', 'CONSEIL', 'AT', 'MY', 'HEELS', 'I', 'JUMPED', 'INTO', 'A', 'CARRIAGE'] +8463-294828-0029-607: ref=['OUR', 'BAGGAGE', 'WAS', 'IMMEDIATELY', 'CARRIED', 'TO', 'THE', 'DECK', 'OF', 'THE', 'FRIGATE', 'I', 'RUSHED', 'ABOARD'] +8463-294828-0029-607: hyp=['OUR', 'BAGGAGE', 'WAS', 'IMMEDIATELY', 'CARRIED', 'TO', 'THE', 'DECK', 'OF', 'THE', 'FRIGATE', 'I', 'RUSHED', 'ABOARD'] +8463-294828-0030-608: ref=['I', 'ASKED', 'FOR', 'COMMANDER', 'FARRAGUT'] +8463-294828-0030-608: hyp=['I', 'ASKED', 'FOR', 'COMMANDER', 'FERRAGUT'] +8463-294828-0031-609: ref=['ONE', 'OF', 'THE', 'SAILORS', 'LED', 'ME', 'TO', 'THE', 'AFTERDECK', 'WHERE', 'I', 'STOOD', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'SMART', 'LOOKING', 'OFFICER', 'WHO', 'EXTENDED', 'HIS', 'HAND', 'TO', 'ME'] +8463-294828-0031-609: hyp=['ONE', 'OF', 'THE', 'SAILORS', 'LED', 'ME', 'TO', 'THE', 'AFTER', 'DECK', 'WHERE', 'I', 'STOOD', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'SMART', 'LOOKING', 'OFFICER', 'WHO', 'EXTENDED', 'HIS', 'HAND', 'TO', 'ME'] +8463-294828-0032-610: ref=['IN', 'PERSON', 'WELCOME', 'ABOARD', 'PROFESSOR', 'YOUR', 'CABIN', 'IS', 'WAITING', 'FOR', 'YOU'] +8463-294828-0032-610: hyp=['IN', 'PERSON', 'WELCOME', 'ABOARD', 'PROFESSOR', 'YOUR', 'CABIN', 'IS', 'WAITING', 'FOR', 'YOU'] +8463-294828-0033-611: ref=['I', 'WAS', 'WELL', 'SATISFIED', 'WITH', 'MY', 'CABIN', 'WHICH', 'WAS', 'LOCATED', 'IN', 'THE', 'STERN', 'AND', 'OPENED', 'INTO', 'THE', 'OFFICERS', 'MESS'] +8463-294828-0033-611: hyp=['I', 'WAS', 'WELL', 'SATISFIED', 'WITH', 'MY', 'CABIN', 'WHICH', 'WAS', 'LOCATED', 'IN', 'THE', 'STERN', 'AND', 'OPENED', 'INTO', 'THE', "OFFICER'S", 'MASTS'] +8463-294828-0034-612: ref=["WE'LL", 'BE', 'QUITE', 'COMFORTABLE', 'HERE', 'I', 'TOLD', 'CONSEIL'] +8463-294828-0034-612: hyp=['WILL', 'BE', 'QUITE', 'COMFORTABLE', 'HERE', 'I', 'TOLD', 'CONSEIL'] +8463-294828-0035-613: ref=['AND', 'SO', 'IF', "I'D", 'BEEN', 'DELAYED', 'BY', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'OR', 'EVEN', 'LESS', 'THE', 'FRIGATE', 'WOULD', 'HAVE', 'GONE', 'WITHOUT', 'ME', 'AND', 'I', 'WOULD', 'HAVE', 'MISSED', 'OUT', 'ON', 'THIS', 'UNEARTHLY', 'EXTRAORDINARY', 'AND', 'INCONCEIVABLE', 'EXPEDITION', 'WHOSE', 'TRUE', 'STORY', 'MIGHT', 'WELL', 'MEET', 'WITH', 'SOME', 'SKEPTICISM'] +8463-294828-0035-613: hyp=['AND', 'SO', 'IF', 'I', 'HAD', 'BEEN', 'DELAYED', 'BY', 'A', 'QUARTER', 'OF', 'AN', 'HOUR', 'OR', 'EVEN', 'LESS', 'THE', 'FRIGATE', 'WOULD', 'HAVE', 'GONE', 'WITHOUT', 'ME', 'AND', 'I', 'WOULD', 'HAVE', 'MISSED', 'OUT', 'ON', 'THIS', 'UNEARTHLY', 'EXTRAORDINARY', 'AND', 'INCONCEIVABLE', 'EXPEDITION', 'WHOSE', 'TRUE', 'STORY', 'MIGHT', 'WELL', 'MEET', 'WITH', 'SOME', 'SKEPTICISM'] +8463-294828-0036-614: ref=['THE', 'WHARVES', 'OF', 'BROOKLYN', 'AND', 'EVERY', 'PART', 'OF', 'NEW', 'YORK', 'BORDERING', 'THE', 'EAST', 'RIVER', 'WERE', 'CROWDED', 'WITH', 'CURIOSITY', 'SEEKERS'] +8463-294828-0036-614: hyp=['THE', 'WHARVES', 'OF', 'BROOKLYN', 'AND', 'EVERY', 'PART', 'OF', 'NEW', 'YORK', 'BORDERING', 'THE', 'EAST', 'RIVER', 'WERE', 'CROWDED', 'WITH', 'CURIOSITY', 'SEEKERS'] +8463-294828-0037-615: ref=['DEPARTING', 'FROM', 'FIVE', 'HUNDRED', 'THOUSAND', 'THROATS', 'THREE', 'CHEERS', 'BURST', 'FORTH', 'IN', 'SUCCESSION'] +8463-294828-0037-615: hyp=['DEPARTING', 'FROM', 'FIVE', 'HUNDRED', 'THOUSAND', 'THROATS', 'THREE', 'CHEERS', 'BURST', 'FORTH', 'IN', 'SUCCESSION'] +8463-294828-0038-616: ref=['THOUSANDS', 'OF', 'HANDKERCHIEFS', 'WERE', 'WAVING', 'ABOVE', 'THESE', 'TIGHTLY', 'PACKED', 'MASSES', 'HAILING', 'THE', 'ABRAHAM', 'LINCOLN', 'UNTIL', 'IT', 'REACHED', 'THE', 'WATERS', 'OF', 'THE', 'HUDSON', 'RIVER', 'AT', 'THE', 'TIP', 'OF', 'THE', 'LONG', 'PENINSULA', 'THAT', 'FORMS', 'NEW', 'YORK', 'CITY'] +8463-294828-0038-616: hyp=['THOUSANDS', 'OF', 'HANDKERCHIEFS', 'WERE', 'WAVING', 'ABOVE', 'THESE', 'TIGHTLY', 'PACKED', 'MASSES', 'HAILING', 'THE', 'ABRAHAM', 'LINCOLN', 'UNTIL', 'IT', 'REACHED', 'THE', 'WATERS', 'OF', 'THE', 'HUDSON', 'RIVER', 'AT', 'THE', 'TIP', 'OF', 'THE', 'LONG', 'PENINSULA', 'THAT', 'FORMS', 'NEW', 'YORK', 'CITY'] +8555-284447-0000-2299: ref=['THEN', 'HE', 'RUSHED', 'DOWN', 'STAIRS', 'INTO', 'THE', 'COURTYARD', 'SHOUTING', 'LOUDLY', 'FOR', 'HIS', 'SOLDIERS', 'AND', 'THREATENING', 'TO', 'PATCH', 'EVERYBODY', 'IN', 'HIS', 'DOMINIONS', 'IF', 'THE', 'SAILORMAN', 'WAS', 'NOT', 'RECAPTURED'] +8555-284447-0000-2299: hyp=['THEN', 'HE', 'RUSHED', 'DOWNSTAIRS', 'INTO', 'THE', 'COURTYARD', 'SHOUTING', 'LOUDLY', 'FOR', 'HIS', 'SOLDIERS', 'AND', 'THREATENING', 'TO', 'PATCH', 'EVERYBODY', 'IN', 'HIS', 'DOMINIONS', 'AT', 'THE', 'SAILORMAN', 'WAS', 'NOT', 'RECAPTURED'] +8555-284447-0001-2300: ref=['HOLD', 'HIM', 'FAST', 'MY', 'MEN', 'AND', 'AS', 'SOON', 'AS', "I'VE", 'HAD', 'MY', 'COFFEE', 'AND', 'OATMEAL', "I'LL", 'TAKE', 'HIM', 'TO', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'AND', 'PATCH', 'HIM'] +8555-284447-0001-2300: hyp=['HOLD', 'HIM', 'FAST', 'MY', 'MEN', 'AND', 'AS', 'SOON', 'AS', "I'VE", 'HAD', 'MY', 'COFFEE', 'AN', 'OATMEAL', 'I', 'WILL', 'TAKE', 'HIM', 'TO', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'AND', 'PAT', 'HIM'] +8555-284447-0002-2301: ref=['I', "WOULDN'T", 'MIND', 'A', 'CUP', 'O', 'COFFEE', 'MYSELF', 'SAID', "CAP'N", 'BILL', "I'VE", 'HAD', "CONSID'BLE", 'EXERCISE', 'THIS', 'MORNIN', 'AND', "I'M", 'ALL', 'READY', 'FOR', 'BREAKFAS'] +8555-284447-0002-2301: hyp=['I', "WOULDN'T", 'MIND', 'A', 'CUP', 'OF', 'COFFEE', 'MYSELF', 'SAID', "CAP'N", 'BILL', 'I', 'HAVE', 'HAD', 'CONSIDERABLE', 'EXERCISE', 'THIS', 'MORNIN', 'AN', "I'M", 'ALREADY', 'FOR', 'BREAKFAST'] +8555-284447-0003-2302: ref=['BUT', "CAP'N", 'BILL', 'MADE', 'NO', 'SUCH', 'ATTEMPT', 'KNOWING', 'IT', 'WOULD', 'BE', 'USELESS'] +8555-284447-0003-2302: hyp=['BUT', "CAP'N", 'BILL', 'MADE', 'NO', 'SUCH', 'ATTEMPT', 'KNOWING', 'IT', 'WOULD', 'BE', 'USELESS'] +8555-284447-0004-2303: ref=['AS', 'SOON', 'AS', 'THEY', 'ENTERED', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'THE', 'BOOLOOROO', 'GAVE', 'A', 'YELL', 'OF', 'DISAPPOINTMENT'] +8555-284447-0004-2303: hyp=['AS', 'SOON', 'AS', 'THEY', 'ENTERED', 'THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'THE', 'BOOLOOROO', 'GAVE', 'A', 'YELL', 'OF', 'DISAPPOINTMENT'] +8555-284447-0005-2304: ref=['THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'WAS', 'HIGH', 'AND', 'BIG', 'AND', 'AROUND', 'IT', 'RAN', 'ROWS', 'OF', 'BENCHES', 'FOR', 'THE', 'SPECTATORS', 'TO', 'SIT', 'UPON'] +8555-284447-0005-2304: hyp=['THE', 'ROOM', 'OF', 'THE', 'GREAT', 'KNIFE', 'WAS', 'HIGH', 'AND', 'BIG', 'AND', 'AROUND', 'IT', 'RAN', 'ROWS', 'OF', 'BENCHES', 'FOR', 'THE', 'SPECTATORS', 'TO', 'SIT', 'UPON'] +8555-284447-0006-2305: ref=['IN', 'ONE', 'PLACE', 'AT', 'THE', 'HEAD', 'OF', 'THE', 'ROOM', 'WAS', 'A', 'RAISED', 'PLATFORM', 'FOR', 'THE', 'ROYAL', 'FAMILY', 'WITH', 'ELEGANT', 'THRONE', 'CHAIRS', 'FOR', 'THE', 'KING', 'AND', 'QUEEN', 'AND', 'SIX', 'SMALLER', 'BUT', 'RICHLY', 'UPHOLSTERED', 'CHAIRS', 'FOR', 'THE', 'SNUBNOSED', 'PRINCESSES'] +8555-284447-0006-2305: hyp=['IN', 'ONE', 'PLACE', 'AT', 'THE', 'HEAD', 'OF', 'THE', 'ROOM', 'WAS', 'A', 'RAISED', 'PLATFORM', 'FOR', 'THE', 'ROYAL', 'FAMILY', 'WITH', 'ELEGANT', 'THROWN', 'CHAIRS', 'FOR', 'THE', 'KING', 'AND', 'QUEEN', 'AND', 'SIX', 'SMALLER', 'BUT', 'RICHLY', 'UPHOLSTERED', 'CHAIRS', 'WITH', 'A', 'SNUB', 'NOSED', 'PRINCESSES'] +8555-284447-0007-2306: ref=['THEREFORE', 'HER', 'MAJESTY', 'PAID', 'NO', 'ATTENTION', 'TO', 'ANYONE', 'AND', 'NO', 'ONE', 'PAID', 'ANY', 'ATTENTION', 'TO', 'HER'] +8555-284447-0007-2306: hyp=['THEY', 'ARE', 'FOR', 'HER', 'MAJESTY', 'PAID', 'NO', 'ATTENTION', 'TO', 'ANY', 'ONE', 'AND', 'NO', 'ONE', 'PAID', 'ANY', 'ATTENTION', 'TO', 'HER'] +8555-284447-0008-2307: ref=['RICH', 'JEWELS', 'OF', 'BLUE', 'STONES', 'GLITTERED', 'UPON', 'THEIR', 'PERSONS', 'AND', 'THE', 'ROYAL', 'LADIES', 'WERE', 'FULLY', 'AS', 'GORGEOUS', 'AS', 'THEY', 'WERE', 'HAUGHTY', 'AND', 'OVERBEARING'] +8555-284447-0008-2307: hyp=['RICH', 'JEWELS', 'OF', 'BLUESTS', 'GLITTERED', 'UPON', 'THEIR', 'PERSONS', 'AND', 'THE', 'ROYAL', 'LADIES', 'WERE', 'FULLY', 'AS', 'GORGEOUS', 'AS', 'THEY', 'WERE', 'HAUGHTY', 'AND', 'OVERBEARING'] +8555-284447-0009-2308: ref=['MORNIN', 'GIRLS', 'HOPE', 'YE', 'FEEL', 'AS', 'WELL', 'AS', 'YE', 'LOOK'] +8555-284447-0009-2308: hyp=['MORNING', 'GIRLS', 'OH', 'BE', 'BILL', 'AS', 'WELL', 'AS', 'YOU', 'LOOK'] +8555-284447-0010-2309: ref=['CONTROL', 'YOURSELVES', 'MY', 'DEARS', 'REPLIED', 'THE', 'BOOLOOROO', 'THE', 'WORST', 'PUNISHMENT', 'I', 'KNOW', 'HOW', 'TO', 'INFLICT', 'ON', 'ANYONE', 'THIS', 'PRISONER', 'IS', 'ABOUT', 'TO', 'SUFFER', "YOU'LL", 'SEE', 'A', 'VERY', 'PRETTY', 'PATCHING', 'MY', 'ROYAL', 'DAUGHTERS'] +8555-284447-0010-2309: hyp=['CONTROLL', 'YOURSELVES', 'MY', 'DEARS', 'REPLIED', 'THE', 'BOOLOOROO', 'THE', 'WORST', 'PUNISHMENT', 'I', 'KNOW', 'HOW', 'TO', 'INFLICT', 'ON', 'ANY', 'ONE', 'THIS', 'PRISONER', 'IS', 'ABOUT', 'TO', 'SUFFER', 'YOU', 'WILL', 'SEE', 'A', 'VERY', 'PRETTY', 'PATCHING', 'MY', 'ROYAL', 'DAUGHTERS'] +8555-284447-0011-2310: ref=['SUPPOSE', "IT'S", 'A', 'FRIEND'] +8555-284447-0011-2310: hyp=['SUPPOSE', "IT'S", 'OF', 'BRAND'] +8555-284447-0012-2311: ref=['THE', 'CAPTAIN', 'SHOOK', 'HIS', 'HEAD'] +8555-284447-0012-2311: hyp=['THE', 'CAPTAIN', 'SHOOK', 'HIS', 'HEAD'] +8555-284447-0013-2312: ref=['WHY', 'YOU', 'SAID', 'TO', 'FETCH', 'THE', 'FIRST', 'LIVING', 'CREATURE', 'WE', 'MET', 'AND', 'THAT', 'WAS', 'THIS', 'BILLYGOAT', 'REPLIED', 'THE', 'CAPTAIN', 'PANTING', 'HARD', 'AS', 'HE', 'HELD', 'FAST', 'TO', 'ONE', 'OF', 'THE', "GOAT'S", 'HORNS'] +8555-284447-0013-2312: hyp=['WHY', 'YOU', 'SIT', 'TO', 'FETCH', 'THE', 'FIRST', 'LIVING', 'CREATURE', 'WE', 'MET', 'AND', 'THAT', 'WAS', 'THE', 'SPILLIGOAT', 'REPLIED', 'THE', 'CAPTAIN', 'PANTING', 'HARD', 'AS', 'HE', 'HELD', 'FAST', 'TO', 'ONE', 'OF', 'THE', "GOAT'S", 'HORNS'] +8555-284447-0014-2313: ref=['THE', 'IDEA', 'OF', 'PATCHING', "CAP'N", 'BILL', 'TO', 'A', 'GOAT', 'WAS', 'VASTLY', 'AMUSING', 'TO', 'HIM', 'AND', 'THE', 'MORE', 'HE', 'THOUGHT', 'OF', 'IT', 'THE', 'MORE', 'HE', 'ROARED', 'WITH', 'LAUGHTER'] +8555-284447-0014-2313: hyp=['THE', 'IDEA', 'OF', 'PATCHING', "CAP'N", 'BILL', 'TO', 'A', 'GOAT', 'WAS', 'VASTLY', 'AMUSING', 'TO', 'HIM', 'AND', 'THE', 'MORE', 'HE', 'THOUGHT', 'OF', 'IT', 'THE', 'MORE', 'HE', 'ROARED', 'WITH', 'LAUGHTER'] +8555-284447-0015-2314: ref=['THEY', 'LOOK', 'SOMETHING', 'ALIKE', 'YOU', 'KNOW', 'SUGGESTED', 'THE', 'CAPTAIN', 'OF', 'THE', 'GUARDS', 'LOOKING', 'FROM', 'ONE', 'TO', 'THE', 'OTHER', 'DOUBTFULLY', 'AND', "THEY'RE", 'NEARLY', 'THE', 'SAME', 'SIZE', 'IF', 'YOU', 'STAND', 'THE', 'GOAT', 'ON', 'HIS', 'HIND', 'LEGS', "THEY'VE", 'BOTH', 'GOT', 'THE', 'SAME', 'STYLE', 'OF', 'WHISKERS', 'AND', "THEY'RE", 'BOTH', 'OF', 'EM', 'OBSTINATE', 'AND', 'DANGEROUS', 'SO', 'THEY', 'OUGHT', 'TO', 'MAKE', 'A', 'GOOD', 'PATCH', 'SPLENDID'] +8555-284447-0015-2314: hyp=['THEY', 'LOOK', 'SOMETHING', 'ALIKE', 'YOU', 'KNOW', 'SUGGESTED', 'THE', 'CAPTAIN', 'OF', 'THE', 'GUARDS', 'LOOKING', 'FROM', 'ONE', 'TO', 'THE', 'OTHER', 'DOUBTFULLY', 'AND', 'THEY', 'NEARLY', 'THE', 'SAME', 'SIZE', 'IF', 'YOU', 'STAND', 'THE', 'BOAT', 'ON', 'HIS', 'HIND', 'LEGS', "THEY'VE", 'BOTH', 'GOT', 'THE', 'SAME', 'STYLE', 'OF', 'WHISKERS', 'AND', "THEY'RE", 'BOTH', 'OF', 'THEM', 'OBSTINATE', 'AND', 'DANGEROUS', 'SO', 'THEY', 'OUGHT', 'TO', 'MAKE', 'A', 'GOOD', 'PATCH', 'SPLENDID'] +8555-284447-0016-2315: ref=['FINE', 'GLORIOUS'] +8555-284447-0016-2315: hyp=['FINE', 'GLORIOUS'] +8555-284447-0017-2316: ref=['WHEN', 'THIS', 'HAD', 'BEEN', 'ACCOMPLISHED', 'THE', 'BOOLOOROO', 'LEANED', 'OVER', 'TO', 'TRY', 'TO', 'DISCOVER', 'WHY', 'THE', 'FRAME', 'ROLLED', 'AWAY', 'SEEMINGLY', 'OF', 'ITS', 'OWN', 'ACCORD', 'AND', 'HE', 'WAS', 'THE', 'MORE', 'PUZZLED', 'BECAUSE', 'IT', 'HAD', 'NEVER', 'DONE', 'SUCH', 'A', 'THING', 'BEFORE'] +8555-284447-0017-2316: hyp=['WHEN', 'THIS', 'HAD', 'BEEN', 'ACCOMPLISHED', 'THE', 'BOOLOOROO', 'LEANED', 'OVER', 'TO', 'TRY', 'TO', 'DISCOVER', 'WHY', 'THE', 'FRAME', 'ROLLED', 'AWAY', 'SEEMINGLY', 'OF', 'ITS', 'OWN', 'ACCORD', 'AND', 'HE', 'WAS', 'THE', 'MORE', 'PUZZLED', 'BECAUSE', 'IT', 'HAD', 'NEVER', 'DONE', 'SUCH', 'A', 'THING', 'BEFORE'] +8555-284447-0018-2317: ref=['AT', 'ONCE', 'THE', 'GOAT', 'GAVE', 'A', 'LEAP', 'ESCAPED', 'FROM', 'THE', 'SOLDIERS', 'AND', 'WITH', 'BOWED', 'HEAD', 'RUSHED', 'UPON', 'THE', 'BOOLOOROO'] +8555-284447-0018-2317: hyp=['AT', 'ONCE', 'THE', 'GOAT', 'GAVE', 'A', 'LEAP', 'ESCAPED', 'FROM', 'THE', 'SOLDIERS', 'AND', 'WITH', 'BOWED', 'HEAD', 'RUSHED', 'UPON', 'THE', 'BOOLOOROO'] +8555-284447-0019-2318: ref=['BEFORE', 'ANY', 'COULD', 'STOP', 'HIM', 'HE', 'BUTTED', 'HIS', 'MAJESTY', 'SO', 'FURIOUSLY', 'THAT', 'THE', 'KING', 'SOARED', 'FAR', 'INTO', 'THE', 'AIR', 'AND', 'TUMBLED', 'IN', 'A', 'HEAP', 'AMONG', 'THE', 'BENCHES', 'WHERE', 'HE', 'LAY', 'MOANING', 'AND', 'GROANING'] +8555-284447-0019-2318: hyp=['BEFORE', 'ANY', 'COULD', 'STOP', 'HIM', 'HE', 'BUDDED', 'HIS', 'MAJESTY', 'SO', 'FURIOUSLY', 'THAT', 'THE', "KING'S", 'SOARED', 'FAR', 'INTO', 'THE', 'AIR', 'AND', 'TUMBLED', 'IN', 'A', 'HEAP', 'AMONG', 'THE', 'BENCHES', 'WHERE', 'HE', 'LAY', 'MOANING', 'AND', 'GROANING'] +8555-284447-0020-2319: ref=['THE', "GOAT'S", 'WARLIKE', 'SPIRIT', 'WAS', 'ROUSED', 'BY', 'THIS', 'SUCCESSFUL', 'ATTACK'] +8555-284447-0020-2319: hyp=['THE', 'GOATS', 'WORE', 'LIKE', 'SPIRIT', 'WAS', 'ROUSED', 'BY', 'THIS', 'SUCCESSFUL', 'ATTACK'] +8555-284447-0021-2320: ref=['THEN', 'THEY', 'SPED', 'IN', 'GREAT', 'HASTE', 'FOR', 'THE', 'DOOR', 'AND', 'THE', 'GOAT', 'GAVE', 'A', 'FINAL', 'BUTT', 'THAT', 'SENT', 'THE', 'ROW', 'OF', 'ROYAL', 'LADIES', 'ALL', 'DIVING', 'INTO', 'THE', 'CORRIDOR', 'IN', 'ANOTHER', 'TANGLE', 'WHEREUPON', 'THEY', 'SHRIEKED', 'IN', 'A', 'MANNER', 'THAT', 'TERRIFIED', 'EVERYONE', 'WITHIN', 'SOUND', 'OF', 'THEIR', 'VOICES'] +8555-284447-0021-2320: hyp=['THEN', 'THEY', 'SPED', 'IN', 'GREAT', 'HASTE', 'FOR', 'THE', 'DOOR', 'AND', 'THE', 'GOAT', 'GAVE', 'A', 'FINAL', 'BUTT', 'THAT', 'SENT', 'THE', 'ROW', 'OF', 'ROYAL', 'LADIES', 'ALL', 'DIVING', 'INTO', 'THE', 'CORRIDOR', 'IN', 'ANOTHER', 'TANGLE', 'WHEREUPON', 'THEY', 'SHRIEKED', 'IN', 'A', 'MANNER', 'THAT', 'TERRIFIED', 'EVERY', 'ONE', 'WITHIN', 'SOUND', 'OF', 'THEIR', 'VOICES'] +8555-284447-0022-2321: ref=['I', 'HAD', 'A', 'NOTION', 'IT', 'WAS', 'YOU', 'MATE', 'AS', 'SAVED', 'ME', 'FROM', 'THE', 'KNIFE'] +8555-284447-0022-2321: hyp=['I', 'HAD', 'A', 'NOTION', 'IT', 'WAS', 'YOU', 'MADE', 'TO', 'SEE', 'ME', 'FROM', 'THE', 'KNIFE'] +8555-284447-0023-2322: ref=['I', "COULDN'T", 'SHIVER', 'MUCH', 'BEIN', 'BOUND', 'SO', 'TIGHT', 'BUT', 'WHEN', "I'M", 'LOOSE', 'I', 'MEAN', 'TO', 'HAVE', 'JUS', 'ONE', 'GOOD', 'SHIVER', 'TO', 'RELIEVE', 'MY', "FEELIN'S"] +8555-284447-0023-2322: hyp=['I', "COULDN'T", 'SHIVER', 'MUCH', 'BEING', 'BOUND', 'SO', 'TIGHT', 'BUT', 'WHEN', "I'M", 'LOOSE', 'I', 'MEAN', 'TO', 'HAVE', 'JUST', 'SWUNG', 'GOOD', 'SHIVER', 'TO', 'RELIEVE', 'MY', 'FEELINS'] +8555-284447-0024-2323: ref=['COME', 'AND', 'GET', 'THE', 'BOOLOOROO', 'SHE', 'SAID', 'GOING', 'TOWARD', 'THE', 'BENCHES'] +8555-284447-0024-2323: hyp=['COME', 'AND', 'GET', 'THE', 'BOOLOOROO', 'SHE', 'SAID', 'GOING', 'TOWARD', 'THE', 'BENCHES'] +8555-284449-0000-2324: ref=['SO', 'THEY', 'WERE', 'QUITE', 'WILLING', 'TO', 'OBEY', 'THE', 'ORDERS', 'OF', 'THEIR', 'GIRL', 'QUEEN', 'AND', 'IN', 'A', 'SHORT', 'TIME', 'THE', 'BLASTS', 'OF', 'TRUMPETS', 'AND', 'ROLL', 'OF', 'DRUMS', 'AND', 'CLASHING', 'OF', 'CYMBALS', 'TOLD', 'TROT', 'AND', "CAP'N", 'BILL', 'THAT', 'THE', 'BLUE', 'BANDS', 'HAD', 'ASSEMBLED', 'BEFORE', 'THE', 'PALACE'] +8555-284449-0000-2324: hyp=['SO', 'THEY', 'WERE', 'QUITE', 'WILLING', 'TO', 'OBEY', 'THE', 'ORDERS', 'OF', 'THEIR', 'GIRL', 'QUEEN', 'AND', 'IN', 'A', 'SHORT', 'TIME', 'THE', 'BLAST', 'OF', 'TRUMPETS', 'AND', 'ROLL', 'OF', 'DRUMS', 'AND', 'CLASHING', 'OF', 'CYMBALS', 'TOLD', 'TROT', 'AND', "CAP'N", 'BILL', 'THAT', 'THE', 'BLUE', 'BANDS', 'HAD', 'A', 'SIMPLED', 'BEFORE', 'THE', 'PALACE'] +8555-284449-0001-2325: ref=['THEN', 'THEY', 'ALL', 'MARCHED', 'OUT', 'A', 'LITTLE', 'WAY', 'INTO', 'THE', 'FIELDS', 'AND', 'FOUND', 'THAT', 'THE', 'ARMY', 'OF', 'PINKIES', 'HAD', 'ALREADY', 'FORMED', 'AND', 'WAS', 'ADVANCING', 'STEADILY', 'TOWARD', 'THEM'] +8555-284449-0001-2325: hyp=['THEN', 'THEY', 'ALL', 'MARCHED', 'OUT', 'A', 'LITTLE', 'WAY', 'INTO', 'THE', 'FIELDS', 'AND', 'FOUND', 'THAT', 'THE', 'ARMY', 'OF', 'PINKIES', 'HAD', 'ALREADY', 'FORMED', 'AND', 'WAS', 'ADVANCING', 'STEADILY', 'TOWARD', 'THEM'] +8555-284449-0002-2326: ref=['AT', 'THE', 'HEAD', 'OF', 'THE', 'PINKIES', 'WERE', 'GHIP', 'GHISIZZLE', 'AND', 'BUTTON', 'BRIGHT', 'WHO', 'HAD', 'THE', 'PARROT', 'ON', 'HIS', 'SHOULDER', 'AND', 'THEY', 'WERE', 'SUPPORTED', 'BY', 'CAPTAIN', 'CORALIE', 'AND', 'CAPTAIN', 'TINTINT', 'AND', 'ROSALIE', 'THE', 'WITCH'] +8555-284449-0002-2326: hyp=['AT', 'THE', 'HEAD', 'OF', 'THE', 'PINKIES', 'WERE', 'GHIP', 'GHISIZZLE', 'AND', 'BUTTON', 'BRIGHT', 'WHO', 'HAD', 'THE', 'PARROT', 'ON', 'HIS', 'SHOULDER', 'AND', 'THEY', 'WERE', 'SUPPORTED', 'BY', 'CAPTAIN', 'CORLIE', 'AND', 'CAPTAIN', 'TINTANT', 'AND', 'ROSALIE', 'THE', 'WITCH'] +8555-284449-0003-2327: ref=['WHEN', 'THE', 'BLUESKINS', 'SAW', 'GHIP', 'GHISIZZLE', 'THEY', 'RAISED', 'ANOTHER', 'GREAT', 'SHOUT', 'FOR', 'HE', 'WAS', 'THE', 'FAVORITE', 'OF', 'THE', 'SOLDIERS', 'AND', 'VERY', 'POPULAR', 'WITH', 'ALL', 'THE', 'PEOPLE'] +8555-284449-0003-2327: hyp=['WHEN', 'THE', 'BLUESKIN', 'SAW', 'GHIP', 'GHISIZZLE', 'THEY', 'RAISED', 'ANOTHER', 'GREAT', 'SHOUT', 'FOR', 'HE', 'WAS', 'THE', 'FAVORITE', 'OF', 'THE', 'SOLDIERS', 'AND', 'VERY', 'POPULAR', 'WITH', 'ALL', 'THE', 'PEOPLE'] +8555-284449-0004-2328: ref=['SINCE', 'LAST', 'THURSDAY', 'I', 'GHIP', 'GHISIZZLE', 'HAVE', 'BEEN', 'THE', 'LAWFUL', 'BOOLOOROO', 'OF', 'THE', 'BLUE', 'COUNTRY', 'BUT', 'NOW', 'THAT', 'YOU', 'ARE', 'CONQUERED', 'BY', 'QUEEN', 'TROT', 'I', 'SUPPOSE', 'I', 'AM', 'CONQUERED', 'TOO', 'AND', 'YOU', 'HAVE', 'NO', 'BOOLOOROO', 'AT', 'ALL'] +8555-284449-0004-2328: hyp=['SINCE', 'LAST', 'THURSDAY', 'I', 'GIP', 'GHISIZZLE', 'HAVE', 'BEEN', 'THE', 'LAWFUL', 'BOOLOOROO', 'OF', 'THE', 'BLUE', 'COUNTRY', 'BUT', 'NOW', 'THAT', 'YOU', 'ARE', 'CONQUERED', 'BY', 'QUEEN', 'TROT', 'I', 'SUPPOSE', 'I', 'AM', 'CONQUERED', 'TOO', 'AND', 'YOU', 'HAVE', 'NO', 'BOOLOOROO', 'AT', 'ALL'] +8555-284449-0005-2329: ref=['WHEN', 'HE', 'FINISHED', 'SHE', 'SAID', 'CHEERFULLY'] +8555-284449-0005-2329: hyp=['WHEN', 'HE', 'FINISHED', 'SHE', 'SAID', 'CHEERFULLY'] +8555-284449-0006-2330: ref=["DON'T", 'WORRY', 'SIZZLE', 'DEAR', "IT'LL", 'ALL', 'COME', 'RIGHT', 'PRETTY', 'SOON'] +8555-284449-0006-2330: hyp=["DON'T", 'WORRY', 'SIZZLE', 'DEAR', "IT'LL", 'ALL', 'COME', 'RIGHT', 'PRETTY', 'SOON'] +8555-284449-0007-2331: ref=['NOW', 'THEN', "LET'S", 'ENTER', 'THE', 'CITY', 'AN', 'ENJOY', 'THE', 'GRAND', 'FEAST', "THAT'S", 'BEING', 'COOKED', "I'M", 'NEARLY', 'STARVED', 'MYSELF', 'FOR', 'THIS', 'CONQUERIN', 'KINGDOMS', 'IS', 'HARD', 'WORK'] +8555-284449-0007-2331: hyp=['NOW', 'THEN', "LET'S", 'ENTER', 'THE', 'CITY', 'AND', 'ENJOY', 'THE', 'GREAT', 'FEAST', 'ITS', 'BEING', 'COOKED', "I'M", 'NEARLY', 'STORM', 'MYSELF', 'FOR', 'THIS', 'CONQUERING', "KINGDOM'S", 'IS', 'HARD', 'WORK'] +8555-284449-0008-2332: ref=['THEN', 'SHE', 'GAVE', 'ROSALIE', 'BACK', 'HER', 'MAGIC', 'RING', 'THANKING', 'THE', 'KIND', 'WITCH', 'FOR', 'ALL', 'SHE', 'HAD', 'DONE', 'FOR', 'THEM'] +8555-284449-0008-2332: hyp=['THEN', 'SHE', 'GAVE', 'ROSALIE', 'BACK', 'HER', 'MAGIC', 'RING', 'THANKING', 'THE', 'KIND', 'WITCH', 'FOR', 'ALL', 'SHE', 'HAD', 'DONE', 'FOR', 'THEM'] +8555-284449-0009-2333: ref=['YOU', 'ARE', 'MATE', 'REPLIED', 'THE', 'SAILOR'] +8555-284449-0009-2333: hyp=['YOU', 'ARE', 'A', 'MATE', 'REPLIED', 'THE', 'SAILOR'] +8555-284449-0010-2334: ref=['IT', 'WILL', 'BE', 'SUCH', 'A', 'SATISFACTION'] +8555-284449-0010-2334: hyp=['IT', 'WILL', 'BE', 'SUCH', 'A', 'SATISFACTION'] +8555-284449-0011-2335: ref=['THE', 'GUARDS', 'HAD', 'A', 'TERRIBLE', 'STRUGGLE', 'WITH', 'THE', 'GOAT', 'WHICH', 'WAS', 'LOOSE', 'IN', 'THE', 'ROOM', 'AND', 'STILL', 'WANTED', 'TO', 'FIGHT', 'BUT', 'FINALLY', 'THEY', 'SUBDUED', 'THE', 'ANIMAL', 'AND', 'THEN', 'THEY', 'TOOK', 'THE', 'BOOLOOROO', 'OUT', 'OF', 'THE', 'FRAME', 'HE', 'WAS', 'TIED', 'IN', 'AND', 'BROUGHT', 'BOTH', 'HIM', 'AND', 'THE', 'GOAT', 'BEFORE', 'QUEEN', 'TROT', 'WHO', 'AWAITED', 'THEM', 'IN', 'THE', 'THRONE', 'ROOM', 'OF', 'THE', 'PALACE'] +8555-284449-0011-2335: hyp=['THE', 'GUARDS', 'HAD', 'A', 'TERRIBLE', 'STRUGGLE', 'WITH', 'THE', 'GOAT', 'WHICH', 'WAS', 'LOOSE', 'IN', 'THE', 'ROOM', 'AND', 'STILL', 'WANTED', 'TO', 'FIGHT', 'BUT', 'FINALLY', 'THEY', 'SUBDUED', 'THE', 'ANIMAL', 'AND', 'THEN', 'THEY', 'TOOK', 'THE', 'BOOLOOROO', 'OUT', 'OF', 'THE', 'FRAME', 'HE', 'WAS', 'TIED', 'IN', 'AND', 'BROUGHT', 'BOTH', 'HIM', 'AND', 'THE', 'GOAT', 'BEFORE', 'QUEEN', 'TROT', 'WHO', 'AWAITED', 'THEM', 'IN', 'THE', 'THRONE', 'ROOM', 'OF', 'THE', 'PALACE'] +8555-284449-0012-2336: ref=["I'LL", 'GLADLY', 'DO', 'THAT', 'PROMISED', 'THE', 'NEW', 'BOOLOOROO', 'AND', "I'LL", 'FEED', 'THE', 'HONORABLE', 'GOAT', 'ALL', 'THE', 'SHAVINGS', 'AND', 'LEATHER', 'AND', 'TIN', 'CANS', 'HE', 'CAN', 'EAT', 'BESIDES', 'THE', 'GRASS'] +8555-284449-0012-2336: hyp=["I'LL", 'GLADLY', 'DO', 'THAT', 'PROMISED', 'THE', 'NEW', 'BOOLOOROO', 'AND', "I'LL", 'FEED', 'THE', 'HONED', 'ALL', 'THE', 'SHAVINGS', 'AND', 'LEATHER', 'AND', 'TIN', 'CANS', 'HE', 'CAN', 'EAT', 'BESIDES', 'THE', 'GRASS'] +8555-284449-0013-2337: ref=['SCUSE', 'ME', 'SAID', 'TROT', 'I', 'NEGLECTED', 'TO', 'TELL', 'YOU', 'THAT', "YOU'RE", 'NOT', 'THE', 'BOOLOOROO', 'ANY', 'MORE'] +8555-284449-0013-2337: hyp=['EXCUSE', 'ME', 'SAID', 'SHOT', 'I', 'NEGLECTED', 'TO', 'TELL', 'YOU', 'THAT', "YOU'RE", 'NOT', 'THE', 'BOOLOOROO', 'ANY', 'MORE'] +8555-284449-0014-2338: ref=['THE', 'FORMER', 'BOOLOOROO', 'GROANED'] +8555-284449-0014-2338: hyp=['THE', 'FORMER', 'BOOLOOROO', 'GROANED'] +8555-284449-0015-2339: ref=["I'LL", 'NOT', 'BE', 'WICKED', 'ANY', 'MORE', 'SIGHED', 'THE', 'OLD', 'BOOLOOROO', "I'LL", 'REFORM'] +8555-284449-0015-2339: hyp=['HOW', 'NOW', 'BE', 'WICKED', 'ANY', 'MORE', 'SIGHED', 'THE', 'OLD', 'BOOLOOROO', "I'LL", 'REFORM'] +8555-284449-0016-2340: ref=['AS', 'A', 'PRIVATE', 'CITIZEN', 'I', 'SHALL', 'BE', 'A', 'MODEL', 'OF', 'DEPORTMENT', 'BECAUSE', 'IT', 'WOULD', 'BE', 'DANGEROUS', 'TO', 'BE', 'OTHERWISE'] +8555-284449-0016-2340: hyp=['AS', 'A', 'PRIVATE', 'CITIZEN', 'I', 'SHALL', 'BE', 'A', 'MODEL', 'OF', 'DEPORTMENT', 'BECAUSE', 'IT', 'WOULD', 'BE', 'DANGEROUS', 'TO', 'BE', 'OTHERWISE'] +8555-284449-0017-2341: ref=['WHEN', 'FIRST', 'THEY', 'ENTERED', 'THE', 'THRONE', 'ROOM', 'THEY', 'TRIED', 'TO', 'BE', 'AS', 'HAUGHTY', 'AND', 'SCORNFUL', 'AS', 'EVER', 'BUT', 'THE', 'BLUES', 'WHO', 'WERE', 'ASSEMBLED', 'THERE', 'ALL', 'LAUGHED', 'AT', 'THEM', 'AND', 'JEERED', 'THEM', 'FOR', 'THERE', 'WAS', 'NOT', 'A', 'SINGLE', 'PERSON', 'IN', 'ALL', 'THE', 'BLUE', 'COUNTRY', 'WHO', 'LOVED', 'THE', 'PRINCESSES', 'THE', 'LEAST', 'LITTLE', 'BIT'] +8555-284449-0017-2341: hyp=['WHEN', 'FIRST', 'THEY', 'ENTERED', 'THE', 'THRONE', 'ROOM', 'THEY', 'TRIED', 'TO', 'BE', 'AS', 'HAUGHTY', 'AND', 'SCORNFUL', 'AS', 'EVER', 'BUT', 'THE', 'BLUES', 'WHO', 'WERE', 'ASSEMBLED', 'THERE', 'ALL', 'LAUGHED', 'AT', 'THEM', 'AND', 'JEERED', 'THEM', 'FOR', 'THERE', 'WAS', 'NOT', 'A', 'SINGLE', 'PERSON', 'IN', 'ALL', 'THE', 'BLUE', 'COUNTRY', 'WHO', 'LOVED', 'THE', 'PRINCESSES', 'THE', 'LEAST', 'LITTLE', 'BIT'] +8555-284449-0018-2342: ref=['SO', 'GHIP', 'GHISIZZLE', 'ORDERED', 'THE', 'CAPTAIN', 'TO', 'TAKE', 'A', 'FILE', 'OF', 'SOLDIERS', 'AND', 'ESCORT', 'THE', 'RAVING', 'BEAUTIES', 'TO', 'THEIR', 'NEW', 'HOME'] +8555-284449-0018-2342: hyp=['SO', 'GHIP', 'GHISIZZLE', 'ORDERED', 'THE', 'CAPTAIN', 'TO', 'TAKE', 'A', 'FILE', 'OF', 'SOLDIERS', 'AND', 'ESCORT', 'THE', 'RAVING', 'BEAUTIES', 'TO', 'THEIR', 'NEW', 'HOME'] +8555-284449-0019-2343: ref=['THAT', 'EVENING', 'TROT', 'GAVE', 'A', 'GRAND', 'BALL', 'IN', 'THE', 'PALACE', 'TO', 'WHICH', 'THE', 'MOST', 'IMPORTANT', 'OF', 'THE', 'PINKIES', 'AND', 'THE', 'BLUESKINS', 'WERE', 'INVITED'] +8555-284449-0019-2343: hyp=['THAT', 'EVENING', 'TROT', 'GAVE', 'A', 'GRAND', 'BALL', 'IN', 'THE', 'PALACE', 'TO', 'WHICH', 'THE', 'MOST', 'IMPORTANT', 'OF', 'THE', 'PINKIES', 'IN', 'THE', 'BLUESKINS', 'WERE', 'INVITED'] +8555-284449-0020-2344: ref=['THE', 'COMBINED', 'BANDS', 'OF', 'BOTH', 'THE', 'COUNTRIES', 'PLAYED', 'THE', 'MUSIC', 'AND', 'A', 'FINE', 'SUPPER', 'WAS', 'SERVED'] +8555-284449-0020-2344: hyp=['THE', 'COMBINED', 'BANDS', 'OF', 'BOTH', 'THE', 'COUNTRIES', 'PLAYED', 'THE', 'MUSIC', 'AND', 'A', 'FINE', 'SUPPER', 'WAS', 'SERVED'] +8555-292519-0000-2283: ref=['BRIGHTER', 'THAN', 'EARLY', "DAWN'S", 'MOST', 'BRILLIANT', 'DYE', 'ARE', 'BLOWN', 'CLEAR', 'BANDS', 'OF', 'COLOR', 'THROUGH', 'THE', 'SKY', 'THAT', 'SWIRL', 'AND', 'SWEEP', 'AND', 'MEET', 'TO', 'BREAK', 'AND', 'FOAM', 'LIKE', 'RAINBOW', 'VEILS', 'UPON', 'A', "BUBBLE'S", 'DOME'] +8555-292519-0000-2283: hyp=['BRIGHTER', 'THAN', 'EARLY', 'DAWNS', 'MOST', 'BRILLIANT', 'DYE', 'ARE', 'BLOWN', 'CLEAR', 'BANDS', 'OF', 'COLOR', 'THROUGH', 'THE', 'SKY', 'THAT', 'SWIRL', 'AND', 'SWEEP', 'AND', 'MEET', 'TO', 'BREAK', 'AND', 'FOAM', 'LIKE', 'RAINBOW', 'VEILS', 'UPON', 'A', "BUBBLE'S", 'DOME'] +8555-292519-0001-2284: ref=['GUIDED', 'BY', 'YOU', 'HOW', 'WE', 'MIGHT', 'STROLL', 'TOWARDS', 'DEATH', 'OUR', 'ONLY', 'MUSIC', 'ONE', "ANOTHER'S", 'BREATH', 'THROUGH', 'GARDENS', 'INTIMATE', 'WITH', 'HOLLYHOCKS', 'WHERE', 'SILENT', 'POPPIES', 'BURN', 'BETWEEN', 'THE', 'ROCKS', 'BY', 'POOLS', 'WHERE', 'BIRCHES', 'BEND', 'TO', 'CONFIDANTS', 'ABOVE', 'GREEN', 'WATERS', 'SCUMMED', 'WITH', 'LILY', 'PLANTS'] +8555-292519-0001-2284: hyp=['GUIDED', 'BY', 'YOU', 'HOW', 'WE', 'MIGHT', 'STROLL', 'TOWARDS', 'DEATH', 'OUR', 'ONLY', 'MUSIC', 'ONE', "ANOTHER'S", 'BREATH', 'THROUGH', "GARDEN'S", 'INTIMATE', 'WITH', 'HOLLYHOCKS', 'WHERE', 'IS', 'SILENT', 'POPPIES', 'BURN', 'BETWEEN', 'THE', 'ROCKS', 'BY', 'POOLS', 'WHERE', 'BIRCHES', 'BEND', 'TO', 'CONFIDANTS', 'ABOVE', 'GREEN', 'WATERS', 'SCUMMED', 'WITH', 'THE', 'LILY', 'PLANTS'] +8555-292519-0002-2285: ref=['VENICE'] +8555-292519-0002-2285: hyp=['VENICE'] +8555-292519-0003-2286: ref=['IN', 'A', 'SUNSET', 'GLOWING', 'OF', 'CRIMSON', 'AND', 'GOLD', 'SHE', 'LIES', 'THE', 'GLORY', 'OF', 'THE', 'WORLD', 'A', 'BEACHED', "KING'S", 'GALLEY', 'WHOSE', 'SAILS', 'ARE', 'FURLED', 'WHO', 'IS', 'HUNG', 'WITH', 'TAPESTRIES', 'RICH', 'AND', 'OLD'] +8555-292519-0003-2286: hyp=['IN', 'A', 'SUNSET', 'GLOWING', 'OF', 'CRIMSON', 'AND', 'GOLD', 'SHE', 'LIES', 'THE', 'GLORY', 'OF', 'THE', 'WORLD', 'A', 'BEECHED', "KING'S", 'GALLEY', 'WHO', 'SAILS', 'ARE', 'FURLED', 'WHO', 'IS', 'HUNG', 'WITH', 'TAPESTRIES', 'RICH', 'AND', 'OLD'] +8555-292519-0004-2287: ref=['THE', 'PITY', 'THAT', 'WE', 'MUST', 'COME', 'AND', 'GO'] +8555-292519-0004-2287: hyp=['THE', 'PITY', 'THAT', 'WE', 'MUST', 'COME', 'AND', 'GO'] +8555-292519-0005-2288: ref=['WHILE', 'THE', 'OLD', 'GOLD', 'AND', 'THE', 'MARBLE', 'STAYS', 'FOREVER', 'GLEAMING', 'ITS', 'SOFT', 'STRONG', 'BLAZE', 'CALM', 'IN', 'THE', 'EARLY', 'EVENING', 'GLOW'] +8555-292519-0005-2288: hyp=['WHILE', 'THE', 'OLD', 'GOLD', 'AND', 'THE', 'MARBLE', 'STAYS', 'FOREVER', 'GLEAMING', 'ITS', 'SOFT', 'STRONG', 'BLAZE', 'CALM', 'IN', 'THE', 'EARLY', 'EVENING', 'GLOW'] +8555-292519-0006-2289: ref=['THE', 'PLEASANT', 'GRAVEYARD', 'OF', 'MY', 'SOUL', 'WITH', 'SENTIMENTAL', 'CYPRESS', 'TREES', 'AND', 'FLOWERS', 'IS', 'FILLED', 'THAT', 'I', 'MAY', 'STROLL', 'IN', 'MEDITATION', 'AT', 'MY', 'EASE'] +8555-292519-0006-2289: hyp=['THE', 'PLEASANT', 'GRAVEYARD', 'OF', 'MY', 'SOUL', 'WITH', 'SENTIMENTAL', 'CYPRESS', 'TREES', 'AND', 'FLOWERS', 'IS', 'FILLED', 'THAT', 'I', 'MAY', 'STROLL', 'IN', 'MEDITATION', 'AT', 'MY', 'EASE'] +8555-292519-0007-2290: ref=['IT', 'IS', 'MY', 'HEART', 'HUNG', 'IN', 'THE', 'SKY', 'AND', 'NO', 'CLOUDS', 'EVER', 'FLOAT', 'BETWEEN', 'THE', 'GRAVE', 'FLOWERS', 'AND', 'MY', 'HEART', 'ON', 'HIGH'] +8555-292519-0007-2290: hyp=['IT', 'IS', 'MY', 'HEART', 'HUNG', 'IN', 'THE', 'SKY', 'AND', 'NO', 'CLOUDS', 'EVER', 'FLOAT', 'BETWEEN', 'THE', 'GRAVE', 'FLOWERS', 'AND', 'MY', 'HEART', 'ON', 'HIGH'] +8555-292519-0008-2291: ref=['OVER', 'THE', 'TRACK', 'LINED', 'CITY', 'STREET', 'THE', 'YOUNG', 'MEN', 'THE', 'GRINNING', 'MEN', 'PASS'] +8555-292519-0008-2291: hyp=['OVER', 'THE', 'TRACK', 'LINED', 'CITY', 'STREET', 'THE', 'YOUNG', 'MAN', 'THE', 'GRINNING', 'MAN', 'PASS'] +8555-292519-0009-2292: ref=['HO', 'YE', 'SAILS', 'THAT', 'SEEM', 'TO', 'WANDER', 'IN', 'DREAM', 'FILLED', 'MEADOWS', 'SAY', 'IS', 'THE', 'SHORE', 'WHERE', 'I', 'STAND', 'THE', 'ONLY', 'FIELD', 'OF', 'STRUGGLE', 'OR', 'ARE', 'YE', 'HIT', 'AND', 'BATTERED', 'OUT', 'THERE', 'BY', 'WAVES', 'AND', 'WIND', 'GUSTS', 'AS', 'YE', 'TACK', 'OVER', 'A', 'CLASHING', 'SEA', 'OF', 'WATERY', 'ECHOES'] +8555-292519-0009-2292: hyp=['HO', 'YE', 'SAILS', 'THAT', 'SEEM', 'TO', 'WANDER', 'AND', 'DREAM', 'FILLED', 'MEADOWS', 'SAY', 'IS', 'THE', 'SHORE', 'WHERE', 'I', 'STAND', 'THE', 'ONLY', 'FIELD', 'OF', 'STRUGGLE', 'OR', 'ARE', 'YE', 'HIT', 'AND', 'BATTERED', 'OUT', 'THERE', 'BY', 'WAVES', 'AND', 'WIND', 'GUSTS', 'AS', 'YE', 'TACK', 'OVER', 'A', 'CLASHING', 'SEA', 'OF', 'WATERY', 'ECHOES'] +8555-292519-0010-2293: ref=['OLD', 'DANCES', 'ARE', 'SIMPLIFIED', 'OF', 'THEIR', 'YEARNING', 'BLEACHED', 'BY', 'TIME'] +8555-292519-0010-2293: hyp=['OLD', 'DANCES', 'ARE', 'SIMPLIFIED', 'OF', 'THEIR', 'YEARNING', 'BLEACHED', 'BY', 'TIME'] +8555-292519-0011-2294: ref=['HE', 'HAD', 'GOT', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0011-2294: hyp=['HE', 'HAD', 'GOT', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0012-2295: ref=['THROUGH', 'THE', 'BLACK', 'NIGHT', 'RAIN', 'HE', 'SANG', 'TO', 'HER', 'WINDOW', 'BARS'] +8555-292519-0012-2295: hyp=['THROUGH', 'THE', 'BLACK', 'NIGHT', 'RAIN', 'HE', 'SANG', 'TO', 'HER', 'WINDOW', 'BARS'] +8555-292519-0013-2296: ref=['THAT', 'WAS', 'BUT', 'RUSTLING', 'OF', 'DRIPPING', 'PLANTS', 'IN', 'THE', 'DARK'] +8555-292519-0013-2296: hyp=['THAT', 'WAS', 'BUT', 'RUSTLING', 'OF', 'TRIPPING', 'PLANTS', 'IN', 'THE', 'DARK'] +8555-292519-0014-2297: ref=['SHE', 'WAS', 'ALONE', 'THAT', 'NIGHT'] +8555-292519-0014-2297: hyp=['SHE', 'WAS', 'ALONE', 'THAT', 'NIGHT'] +8555-292519-0015-2298: ref=['HE', 'HAD', 'BROKEN', 'INTO', 'HER', 'COURTYARD'] +8555-292519-0015-2298: hyp=['HE', 'HAD', 'BROKEN', 'INTO', 'HER', 'COURTYARD'] +908-157963-0000-1321: ref=['TO', 'FADE', 'AWAY', 'LIKE', 'MORNING', 'BEAUTY', 'FROM', 'HER', 'MORTAL', 'DAY', 'DOWN', 'BY', 'THE', 'RIVER', 'OF', 'ADONA', 'HER', 'SOFT', 'VOICE', 'IS', 'HEARD', 'AND', 'THUS', 'HER', 'GENTLE', 'LAMENTATION', 'FALLS', 'LIKE', 'MORNING', 'DEW'] +908-157963-0000-1321: hyp=['TO', 'FADE', 'AWAY', 'LIKE', 'MORNING', 'BEAUTY', 'FROM', 'HER', 'MORTAL', 'DAY', 'DOWN', 'BY', 'THE', 'RIVER', 'OF', 'ADONNA', 'HER', 'SOFT', 'VOICE', 'IS', 'HEARD', 'AND', 'THUS', 'HER', 'GENTLE', 'LAMENTATION', 'FALLS', 'LIKE', 'MORNING', 'DEW'] +908-157963-0001-1322: ref=['O', 'LIFE', 'OF', 'THIS', 'OUR', 'SPRING'] +908-157963-0001-1322: hyp=['OH', 'LIFE', 'OF', 'THIS', 'OUR', 'SPRING'] +908-157963-0002-1323: ref=['WHY', 'FADES', 'THE', 'LOTUS', 'OF', 'THE', 'WATER'] +908-157963-0002-1323: hyp=['WHY', 'FADES', 'THE', 'LOTUS', 'OF', 'THE', 'WATER'] +908-157963-0003-1324: ref=['WHY', 'FADE', 'THESE', 'CHILDREN', 'OF', 'THE', 'SPRING'] +908-157963-0003-1324: hyp=['WHY', 'FADE', 'THESE', 'CHILDREN', 'OF', 'THE', 'SPRING'] +908-157963-0004-1325: ref=['THEL', 'IS', 'LIKE', 'A', 'WATRY', 'BOW', 'AND', 'LIKE', 'A', 'PARTING', 'CLOUD', 'LIKE', 'A', 'REFLECTION', 'IN', 'A', 'GLASS', 'LIKE', 'SHADOWS', 'IN', 'THE', 'WATER', 'LIKE', 'DREAMS', 'OF', 'INFANTS', 'LIKE', 'A', 'SMILE', 'UPON', 'AN', 'INFANTS', 'FACE'] +908-157963-0004-1325: hyp=['FELL', 'IS', 'LIKE', 'A', 'WATRY', 'BOW', 'AND', 'LIKE', 'A', 'PARTING', 'CLOUD', 'LIKE', 'A', 'REFLECTION', 'IN', 'A', 'GLASS', 'LIKE', 'SHADOWS', 'IN', 'THE', 'WATER', 'LIKE', 'DREAMS', 'OF', 'INFANTS', 'LIKE', 'A', 'SMILE', 'UPON', 'AN', "INFANT'S", 'FACE'] +908-157963-0005-1326: ref=['LIKE', 'THE', 'DOVES', 'VOICE', 'LIKE', 'TRANSIENT', 'DAY', 'LIKE', 'MUSIC', 'IN', 'THE', 'AIR', 'AH'] +908-157963-0005-1326: hyp=['LIKE', 'THE', "DOVE'S", 'BOYS', 'LIKE', 'TRANSIENT', 'DAY', 'LIKE', 'MUSIC', 'IN', 'THE', 'AIR', 'AH'] +908-157963-0006-1327: ref=['AND', 'GENTLE', 'SLEEP', 'THE', 'SLEEP', 'OF', 'DEATH', 'AND', 'GENTLY', 'HEAR', 'THE', 'VOICE', 'OF', 'HIM', 'THAT', 'WALKETH', 'IN', 'THE', 'GARDEN', 'IN', 'THE', 'EVENING', 'TIME'] +908-157963-0006-1327: hyp=['AND', 'GENTLE', 'SLEEP', 'THE', 'SLEEP', 'OF', 'DEATH', 'AND', 'GENTLY', 'HEAR', 'THE', 'VOICE', 'OF', 'HIM', 'THAT', 'WALKETH', 'IN', 'THE', 'GARDEN', 'IN', 'THE', 'EVENING', 'TIME'] +908-157963-0007-1328: ref=['THE', 'LILLY', 'OF', 'THE', 'VALLEY', 'BREATHING', 'IN', 'THE', 'HUMBLE', 'GRASS', 'ANSWERD', 'THE', 'LOVELY', 'MAID', 'AND', 'SAID', 'I', 'AM', 'A', 'WATRY', 'WEED', 'AND', 'I', 'AM', 'VERY', 'SMALL', 'AND', 'LOVE', 'TO', 'DWELL', 'IN', 'LOWLY', 'VALES', 'SO', 'WEAK', 'THE', 'GILDED', 'BUTTERFLY', 'SCARCE', 'PERCHES', 'ON', 'MY', 'HEAD', 'YET', 'I', 'AM', 'VISITED', 'FROM', 'HEAVEN', 'AND', 'HE', 'THAT', 'SMILES', 'ON', 'ALL', 'WALKS', 'IN', 'THE', 'VALLEY', 'AND', 'EACH', 'MORN', 'OVER', 'ME', 'SPREADS', 'HIS', 'HAND', 'SAYING', 'REJOICE', 'THOU', 'HUMBLE', 'GRASS', 'THOU', 'NEW', 'BORN', 'LILY', 'FLOWER'] +908-157963-0007-1328: hyp=['THE', 'LILY', 'OF', 'THE', 'VALLEY', 'BREATHING', 'IN', 'THE', 'HUMBLE', 'GRASS', 'ANSWERED', 'THE', 'LOVELY', 'MAIDEN', 'SAID', 'I', 'AM', 'A', 'WATCHERY', 'WEED', 'AND', 'I', 'AM', 'VERY', 'SMALL', 'AND', 'LOVE', 'TO', 'DWELL', 'IN', 'LOWLY', 'VALES', 'SO', 'WEAK', 'THE', 'GILDED', 'BUTTERFLY', 'SCARCE', 'PURCHASE', 'ON', 'MY', 'HEAD', 'YET', 'I', 'AM', 'VISITED', 'FROM', 'HEAVEN', 'AND', 'HE', 'THAT', 'SMILES', 'ON', 'ALL', 'WALKS', 'IN', 'THE', 'VALLEY', 'AND', 'EACH', 'MORN', 'OVER', 'ME', 'SPREADS', 'HIS', 'HAND', 'SAYING', 'REJOICE', 'THOU', 'HUMBLE', 'GRASS', 'THOU', 'NEWBORN', 'LILY', 'FLOWER'] +908-157963-0008-1329: ref=['THOU', 'GENTLE', 'MAID', 'OF', 'SILENT', 'VALLEYS', 'AND', 'OF', 'MODEST', 'BROOKS', 'FOR', 'THOU', 'SHALL', 'BE', 'CLOTHED', 'IN', 'LIGHT', 'AND', 'FED', 'WITH', 'MORNING', 'MANNA', 'TILL', 'SUMMERS', 'HEAT', 'MELTS', 'THEE', 'BESIDE', 'THE', 'FOUNTAINS', 'AND', 'THE', 'SPRINGS', 'TO', 'FLOURISH', 'IN', 'ETERNAL', 'VALES', 'THEY', 'WHY', 'SHOULD', 'THEL', 'COMPLAIN'] +908-157963-0008-1329: hyp=['THOU', 'GENTLE', 'MAID', 'OF', 'SILENT', 'VALLEYS', 'AND', 'OF', 'MODEST', 'BROOKS', 'FOR', 'THOU', 'SHALT', 'BE', 'CLOTHED', 'IN', 'LIGHT', 'AND', 'FED', 'WITH', 'MORNING', 'MANNA', 'TILL', "SUMMER'S", 'HEAT', 'MELTS', 'THEE', 'BESIDE', 'THE', 'FOUNTAINS', 'AND', 'THE', 'SPRINGS', 'TO', 'FLOURISH', 'IN', 'ETERNAL', 'VALES', 'THEY', 'WHY', 'SHOULDST', 'THOU', 'COMPLAIN'] +908-157963-0009-1330: ref=['WHY', 'SHOULD', 'THE', 'MISTRESS', 'OF', 'THE', 'VALES', 'OF', 'HAR', 'UTTER', 'A', 'SIGH'] +908-157963-0009-1330: hyp=['WHY', 'SHOULD', 'THE', 'MISTRESS', 'OF', 'THE', 'VEILS', 'OF', 'HAR', 'UTTER', 'A', 'SIGH'] +908-157963-0010-1331: ref=['SHE', 'CEASD', 'AND', 'SMILD', 'IN', 'TEARS', 'THEN', 'SAT', 'DOWN', 'IN', 'HER', 'SILVER', 'SHRINE'] +908-157963-0010-1331: hyp=['SHE', 'CEASED', 'AND', 'SMILED', 'IN', 'TEARS', 'THEN', 'SAT', 'DOWN', 'IN', 'HER', 'SILVER', 'SHRINE'] +908-157963-0011-1332: ref=['WHICH', 'THOU', 'DOST', 'SCATTER', 'ON', 'EVERY', 'LITTLE', 'BLADE', 'OF', 'GRASS', 'THAT', 'SPRINGS', 'REVIVES', 'THE', 'MILKED', 'COW', 'AND', 'TAMES', 'THE', 'FIRE', 'BREATHING', 'STEED'] +908-157963-0011-1332: hyp=['WHICH', 'THOU', 'DOST', 'SCATTER', 'ON', 'EVERY', 'LITTLE', 'BLADE', 'OF', 'GRASS', 'THAT', 'SPRINGS', 'REVIVES', 'THE', 'MILKED', 'COW', 'AND', 'TAMES', 'THE', 'FIRE', 'BREATHING', 'STEED'] +908-157963-0012-1333: ref=['BUT', 'THEL', 'IS', 'LIKE', 'A', 'FAINT', 'CLOUD', 'KINDLED', 'AT', 'THE', 'RISING', 'SUN', 'I', 'VANISH', 'FROM', 'MY', 'PEARLY', 'THRONE', 'AND', 'WHO', 'SHALL', 'FIND', 'MY', 'PLACE'] +908-157963-0012-1333: hyp=['BUT', 'THOUGH', 'IS', 'LIKE', 'A', 'FAINT', 'CLOUD', 'KINDLED', 'AT', 'THE', 'RISING', 'SUN', 'I', 'VANISH', 'FROM', 'MY', 'PEARLY', 'THRONE', 'AND', 'WHO', 'SHALL', 'FIND', 'MY', 'PLACE'] +908-157963-0013-1334: ref=['AND', 'WHY', 'IT', 'SCATTERS', 'ITS', 'BRIGHT', 'BEAUTY', 'THRO', 'THE', 'HUMID', 'AIR'] +908-157963-0013-1334: hyp=['AND', 'WHY', 'IT', 'SCATTERS', 'ITS', 'BRIGHT', 'BEAUTY', 'THROUGH', 'THE', 'HUMAN', 'AIR'] +908-157963-0014-1335: ref=['DESCEND', 'O', 'LITTLE', 'CLOUD', 'AND', 'HOVER', 'BEFORE', 'THE', 'EYES', 'OF', 'THEL'] +908-157963-0014-1335: hyp=['DESCEND', 'A', 'LITTLE', 'CLOUD', 'AND', 'HOVER', 'BEFORE', 'THE', 'EYES', 'OF', 'FELL'] +908-157963-0015-1336: ref=['O', 'LITTLE', 'CLOUD', 'THE', 'VIRGIN', 'SAID', 'I', 'CHARGE', 'THEE', 'TO', 'TELL', 'ME', 'WHY', 'THOU', 'COMPLAINEST', 'NOW', 'WHEN', 'IN', 'ONE', 'HOUR', 'THOU', 'FADE', 'AWAY', 'THEN', 'WE', 'SHALL', 'SEEK', 'THEE', 'BUT', 'NOT', 'FIND', 'AH', 'THEL', 'IS', 'LIKE', 'TO', 'THEE'] +908-157963-0015-1336: hyp=['O', 'LITTLE', 'CLOUD', 'THE', 'VIRGIN', 'SAID', 'I', 'CHARGE', 'THEE', 'TO', 'TELL', 'ME', 'WHY', 'THOU', 'COMPLAINEST', 'NOW', 'WHEN', 'IN', 'ONE', 'HOUR', 'THOU', 'FADE', 'AWAY', 'THEN', 'WE', 'SHALL', 'SEEK', 'THEE', 'BUT', 'NOT', 'FIND', 'AH', 'FELL', 'IS', 'LIKE', 'TO', 'THEE'] +908-157963-0016-1337: ref=['I', 'PASS', 'AWAY', 'YET', 'I', 'COMPLAIN', 'AND', 'NO', 'ONE', 'HEARS', 'MY', 'VOICE'] +908-157963-0016-1337: hyp=['I', 'PASS', 'AWAY', 'YET', 'I', 'COMPLAIN', 'AND', 'NO', 'ONE', 'HEARS', 'MY', 'VOICE'] +908-157963-0017-1338: ref=['THE', 'CLOUD', 'THEN', 'SHEWD', 'HIS', 'GOLDEN', 'HEAD', 'AND', 'HIS', 'BRIGHT', 'FORM', "EMERG'D"] +908-157963-0017-1338: hyp=['THE', 'CLOUD', 'THEN', 'SHOWED', 'HIS', 'GOLDEN', 'HEAD', 'AND', 'HIS', 'BRIGHT', 'FORM', 'EMERGED'] +908-157963-0018-1339: ref=['AND', 'FEAREST', 'THOU', 'BECAUSE', 'I', 'VANISH', 'AND', 'AM', 'SEEN', 'NO', 'MORE'] +908-157963-0018-1339: hyp=['AND', 'FEAREST', 'THOU', 'BECAUSE', 'I', 'VANISH', 'AND', 'AM', 'SEEN', 'NO', 'MORE'] +908-157963-0019-1340: ref=['IT', 'IS', 'TO', 'TENFOLD', 'LIFE', 'TO', 'LOVE', 'TO', 'PEACE', 'AND', 'RAPTURES', 'HOLY', 'UNSEEN', 'DESCENDING', 'WEIGH', 'MY', 'LIGHT', 'WINGS', 'UPON', 'BALMY', 'FLOWERS', 'AND', 'COURT', 'THE', 'FAIR', 'EYED', 'DEW', 'TO', 'TAKE', 'ME', 'TO', 'HER', 'SHINING', 'TENT', 'THE', 'WEEPING', 'VIRGIN', 'TREMBLING', 'KNEELS', 'BEFORE', 'THE', 'RISEN', 'SUN'] +908-157963-0019-1340: hyp=['IT', 'IS', 'TO', 'TENFOLD', 'LIFE', 'TO', 'LOVE', 'TO', 'PEACE', 'AND', 'RAPTURES', 'WHOLLY', 'UNSEEN', 'DESCENDING', 'WEIGH', 'MY', 'LIGHT', 'WINGS', 'UPON', 'BALMY', 'FLOWERS', 'AND', 'COURT', 'THE', 'FAIR', 'EYED', 'DO', 'TO', 'TAKE', 'ME', 'TO', 'HER', 'SHINING', 'TENT', 'THE', 'WEEPING', 'VIRGIN', 'TREMBLING', 'KNEELS', 'BEFORE', 'THE', 'RISEN', 'SUN'] +908-157963-0020-1341: ref=['TILL', 'WE', 'ARISE', "LINK'D", 'IN', 'A', 'GOLDEN', 'BAND', 'AND', 'NEVER', 'PART', 'BUT', 'WALK', 'UNITED', 'BEARING', 'FOOD', 'TO', 'ALL', 'OUR', 'TENDER', 'FLOWERS'] +908-157963-0020-1341: hyp=['TILL', 'WE', 'ARISE', 'LINKED', 'IN', 'A', 'GOLDEN', 'BAND', 'AND', 'NEVER', 'PART', 'BUT', 'WALK', 'UNITED', 'BEARING', 'FOOD', 'TO', 'ALL', 'OUR', 'TENDER', 'FLOWERS'] +908-157963-0021-1342: ref=['LIVES', 'NOT', 'ALONE', 'NOR', 'OR', 'ITSELF', 'FEAR', 'NOT', 'AND', 'I', 'WILL', 'CALL', 'THE', 'WEAK', 'WORM', 'FROM', 'ITS', 'LOWLY', 'BED', 'AND', 'THOU', 'SHALT', 'HEAR', 'ITS', 'VOICE'] +908-157963-0021-1342: hyp=['LIVES', 'NOT', 'ALONE', 'NOR', 'OF', 'ITSELF', 'FEAR', 'NOT', 'AND', 'I', 'WILL', 'CALL', 'THE', 'WEAK', 'WORM', 'FROM', 'ITS', 'LOWLY', 'BED', 'AND', 'THOU', 'SHALT', 'HEAR', 'ITS', 'VOICE'] +908-157963-0022-1343: ref=['COME', 'FORTH', 'WORM', 'AND', 'THE', 'SILENT', 'VALLEY', 'TO', 'THY', 'PENSIVE', 'QUEEN'] +908-157963-0022-1343: hyp=['COME', 'FORTH', 'WORM', 'AND', 'THE', 'SILENT', 'VALLEY', 'TO', 'THY', 'PENSIVE', 'QUEEN'] +908-157963-0023-1344: ref=['THE', 'HELPLESS', 'WORM', 'AROSE', 'AND', 'SAT', 'UPON', 'THE', 'LILLYS', 'LEAF', 'AND', 'THE', 'BRIGHT', 'CLOUD', 'SAILD', 'ON', 'TO', 'FIND', 'HIS', 'PARTNER', 'IN', 'THE', 'VALE'] +908-157963-0023-1344: hyp=['THE', 'HELPLESS', 'WORM', 'AROSE', 'AND', 'SAT', 'UPON', 'THE', "LILY'S", 'LEAF', 'AND', 'THE', 'BRIGHT', 'CLOUDS', 'SAILED', 'ON', 'TO', 'FIND', 'HIS', 'PARTNER', 'IN', 'THE', 'VALE'] +908-157963-0024-1345: ref=['IMAGE', 'OF', 'WEAKNESS', 'ART', 'THOU', 'BUT', 'A', 'WORM'] +908-157963-0024-1345: hyp=['IMAGE', 'OF', 'WEAKNESS', 'ART', 'THOU', 'BUT', 'A', 'WORM'] +908-157963-0025-1346: ref=['I', 'SEE', 'THEY', 'LAY', 'HELPLESS', 'AND', 'NAKED', 'WEEPING', 'AND', 'NONE', 'TO', 'ANSWER', 'NONE', 'TO', 'CHERISH', 'THEE', 'WITH', 'MOTHERS', 'SMILES'] +908-157963-0025-1346: hyp=['I', 'SEE', 'THEY', 'LAY', 'HELPLESS', 'AND', 'NAKED', 'WEEPING', 'AND', 'NONE', 'TO', 'ANSWER', 'NONE', 'TO', 'CHERISH', 'THEE', 'WITH', 'MOTHERS', 'SMILES'] +908-157963-0026-1347: ref=['AND', 'SAYS', 'THOU', 'MOTHER', 'OF', 'MY', 'CHILDREN', 'I', 'HAVE', 'LOVED', 'THEE', 'AND', 'I', 'HAVE', 'GIVEN', 'THEE', 'A', 'CROWN', 'THAT', 'NONE', 'CAN', 'TAKE', 'AWAY'] +908-157963-0026-1347: hyp=['AND', 'SAYS', 'THOU', 'MOTHER', 'OF', 'MY', 'CHILDREN', 'I', 'HAVE', 'LOVED', 'THEE', 'AND', 'I', 'HAVE', 'GIVEN', 'THEE', 'A', 'CROWN', 'THAT', 'NONE', 'CAN', 'TAKE', 'AWAY'] +908-157963-0027-1348: ref=['AND', 'LAY', 'ME', 'DOWN', 'IN', 'THY', 'COLD', 'BED', 'AND', 'LEAVE', 'MY', 'SHINING', 'LOT'] +908-157963-0027-1348: hyp=['AND', 'LAY', 'ME', 'DOWN', 'IN', 'THY', 'COLD', 'BED', 'AND', 'LEAVE', 'MY', 'SHINING', 'LOT'] +908-157963-0028-1349: ref=['OR', 'AN', 'EYE', 'OF', 'GIFTS', 'AND', 'GRACES', 'SHOWRING', 'FRUITS', 'AND', 'COINED', 'GOLD'] +908-157963-0028-1349: hyp=['OR', 'AN', 'EYE', 'OF', 'GIFTS', 'AND', 'GRACES', 'SHOWERING', 'FRUITS', 'AND', 'COINED', 'GOLD'] +908-157963-0029-1350: ref=['WHY', 'A', 'TONGUE', "IMPRESS'D", 'WITH', 'HONEY', 'FROM', 'EVERY', 'WIND'] +908-157963-0029-1350: hyp=['WHY', 'A', 'TONGUE', 'IMPRESSED', 'WITH', 'HONEY', 'FROM', 'EVERY', 'WIND'] +908-157963-0030-1351: ref=['WHY', 'AN', 'EAR', 'A', 'WHIRLPOOL', 'FIERCE', 'TO', 'DRAW', 'CREATIONS', 'IN'] +908-157963-0030-1351: hyp=['WHY', 'AN', 'EAR', 'A', 'WHIRLPOOL', 'FIERCE', 'TO', 'DRAW', 'CREATIONS', 'IN'] +908-31957-0000-1352: ref=['ALL', 'IS', 'SAID', 'WITHOUT', 'A', 'WORD'] +908-31957-0000-1352: hyp=['ALL', 'IS', 'SAID', 'WITHOUT', 'A', 'WORD'] +908-31957-0001-1353: ref=['I', 'SIT', 'BENEATH', 'THY', 'LOOKS', 'AS', 'CHILDREN', 'DO', 'IN', 'THE', 'NOON', 'SUN', 'WITH', 'SOULS', 'THAT', 'TREMBLE', 'THROUGH', 'THEIR', 'HAPPY', 'EYELIDS', 'FROM', 'AN', 'UNAVERRED', 'YET', 'PRODIGAL', 'INWARD', 'JOY'] +908-31957-0001-1353: hyp=['I', 'SIT', 'BENEATH', 'THY', 'LOOKS', 'AS', 'CHILDREN', 'DO', 'IN', 'THE', 'NOON', 'SUN', 'WITH', 'SOULS', 'THAT', 'TREMBLE', 'THROUGH', 'THEIR', 'HAPPY', 'EYELIDS', 'FROM', 'AN', 'UNAVERRED', 'YET', 'CHRONICAL', 'INWARD', 'JOY'] +908-31957-0002-1354: ref=['I', 'DID', 'NOT', 'WRONG', 'MYSELF', 'SO', 'BUT', 'I', 'PLACED', 'A', 'WRONG', 'ON', 'THEE'] +908-31957-0002-1354: hyp=['I', 'DID', 'NOT', 'WRONG', 'MYSELF', 'SO', 'BUT', 'I', 'PLACED', 'A', 'WRONG', 'ON', 'THEE'] +908-31957-0003-1355: ref=['WHEN', 'CALLED', 'BEFORE', 'I', 'TOLD', 'HOW', 'HASTILY', 'I', 'DROPPED', 'MY', 'FLOWERS', 'OR', 'BRAKE', 'OFF', 'FROM', 'A', 'GAME'] +908-31957-0003-1355: hyp=['WHEN', 'CALLED', 'BEFORE', 'I', 'TOLD', 'HOW', 'HASTILY', 'I', 'DROPPED', 'MY', 'FLOWERS', 'OR', 'BREAK', 'OFF', 'FROM', 'A', 'GAME'] +908-31957-0004-1356: ref=['SHALL', 'I', 'NEVER', 'MISS', 'HOME', 'TALK', 'AND', 'BLESSING', 'AND', 'THE', 'COMMON', 'KISS', 'THAT', 'COMES', 'TO', 'EACH', 'IN', 'TURN', 'NOR', 'COUNT', 'IT', 'STRANGE', 'WHEN', 'I', 'LOOK', 'UP', 'TO', 'DROP', 'ON', 'A', 'NEW', 'RANGE', 'OF', 'WALLS', 'AND', 'FLOORS', 'ANOTHER', 'HOME', 'THAN', 'THIS'] +908-31957-0004-1356: hyp=['SHALL', 'I', 'NEVER', 'MISS', 'HOME', 'TALK', 'AND', 'BLESSING', 'AND', 'THE', 'COMMON', 'KISS', 'THAT', 'COMES', 'TO', 'EACH', 'IN', 'TURN', 'NOR', 'COUNT', 'IT', 'STRANGE', 'WHEN', 'I', 'LOOK', 'UP', 'TO', 'DROP', 'ON', 'A', 'NEW', 'RANGE', 'OF', 'WALLS', 'AND', 'FLOORS', 'ANOTHER', 'HOME', 'THAN', 'THIS'] +908-31957-0005-1357: ref=['ALAS', 'I', 'HAVE', 'GRIEVED', 'SO', 'I', 'AM', 'HARD', 'TO', 'LOVE'] +908-31957-0005-1357: hyp=['ALAS', 'I', 'HAVE', 'GRIEVED', 'SO', 'I', 'AM', 'HARD', 'TO', 'LOVE'] +908-31957-0006-1358: ref=['OPEN', 'THY', 'HEART', 'WIDE', 'AND', 'FOLD', 'WITHIN', 'THE', 'WET', 'WINGS', 'OF', 'THY', 'DOVE'] +908-31957-0006-1358: hyp=['OPEN', 'THY', 'HEART', 'WIDE', 'AND', 'FOLD', 'WITHIN', 'THE', 'WET', 'WINGS', 'OF', 'THY', 'DOVE'] +908-31957-0007-1359: ref=['COULD', 'IT', 'MEAN', 'TO', 'LAST', 'A', 'LOVE', 'SET', 'PENDULOUS', 'BETWEEN', 'SORROW', 'AND', 'SORROW'] +908-31957-0007-1359: hyp=['COULD', 'IT', 'MEAN', 'TO', 'LAST', 'A', 'LOVE', 'SET', 'PENDULOUS', 'BETWEEN', 'SORROW', 'AND', 'SORROW'] +908-31957-0008-1360: ref=['NAY', 'I', 'RATHER', 'THRILLED', 'DISTRUSTING', 'EVERY', 'LIGHT', 'THAT', 'SEEMED', 'TO', 'GILD', 'THE', 'ONWARD', 'PATH', 'AND', 'FEARED', 'TO', 'OVERLEAN', 'A', 'FINGER', 'EVEN'] +908-31957-0008-1360: hyp=['NAY', 'I', 'RATHER', 'THRILLED', 'DISTRUSTING', 'EVERY', 'LIGHT', 'THAT', 'SEEMED', 'TO', 'GILD', 'THE', 'ONWARD', 'PATH', 'AND', 'FEAR', 'TO', 'OVERLEAN', 'A', 'FINGER', 'EVEN'] +908-31957-0009-1361: ref=['AND', 'THOUGH', 'I', 'HAVE', 'GROWN', 'SERENE', 'AND', 'STRONG', 'SINCE', 'THEN', 'I', 'THINK', 'THAT', 'GOD', 'HAS', 'WILLED', 'A', 'STILL', 'RENEWABLE', 'FEAR'] +908-31957-0009-1361: hyp=['AND', 'THOUGH', 'I', 'HAVE', 'GROWN', 'SERENE', 'AND', 'STRONG', 'SINCE', 'THEN', 'I', 'THINK', 'THAT', 'GOD', 'HAS', 'WILLED', 'A', 'STILL', 'RENEWABLE', 'FEAR'] +908-31957-0010-1362: ref=['O', 'LOVE', 'O', 'TROTH'] +908-31957-0010-1362: hyp=['O', 'LOVE', 'O', 'TROTH'] +908-31957-0011-1363: ref=['AND', 'LOVE', 'BE', 'FALSE'] +908-31957-0011-1363: hyp=['AND', 'LOVE', 'BE', 'FALSE'] +908-31957-0012-1364: ref=['IF', 'HE', 'TO', 'KEEP', 'ONE', 'OATH', 'MUST', 'LOSE', 'ONE', 'JOY', 'BY', 'HIS', "LIFE'S", 'STAR', 'FORETOLD'] +908-31957-0012-1364: hyp=['IF', 'HE', 'TO', 'KEEP', 'ONE', 'OATH', 'MUST', 'LOSE', 'ONE', 'JOY', 'BY', 'HIS', "LIFE'S", 'STAR', 'FORETOLD'] +908-31957-0013-1365: ref=['SLOW', 'TO', 'WORLD', 'GREETINGS', 'QUICK', 'WITH', 'ITS', 'O', 'LIST', 'WHEN', 'THE', 'ANGELS', 'SPEAK'] +908-31957-0013-1365: hyp=['SLOW', 'TO', 'WORLD', 'GREETINGS', 'QUICK', 'WITH', 'ITS', 'O', 'LIST', 'WHEN', 'THE', 'ANGEL', 'SPEAK'] +908-31957-0014-1366: ref=['A', 'RING', 'OF', 'AMETHYST', 'I', 'COULD', 'NOT', 'WEAR', 'HERE', 'PLAINER', 'TO', 'MY', 'SIGHT', 'THAN', 'THAT', 'FIRST', 'KISS'] +908-31957-0014-1366: hyp=['A', 'RING', 'OF', 'AMETHYST', 'I', 'COULD', 'NOT', 'WEAR', 'HERE', 'PLAINER', 'TO', 'MY', 'SIGHT', 'THAN', 'THAT', 'FIRST', 'KISS'] +908-31957-0015-1367: ref=['THAT', 'WAS', 'THE', 'CHRISM', 'OF', 'LOVE', 'WHICH', "LOVE'S", 'OWN', 'CROWN', 'WITH', 'SANCTIFYING', 'SWEETNESS', 'DID', 'PRECEDE', 'THE', 'THIRD', 'UPON', 'MY', 'LIPS', 'WAS', 'FOLDED', 'DOWN', 'IN', 'PERFECT', 'PURPLE', 'STATE', 'SINCE', 'WHEN', 'INDEED', 'I', 'HAVE', 'BEEN', 'PROUD', 'AND', 'SAID', 'MY', 'LOVE', 'MY', 'OWN'] +908-31957-0015-1367: hyp=['THAT', 'WAS', 'THE', 'CHRISM', 'OF', 'LOVE', 'WHICH', 'LOVES', 'OWN', 'CROWN', 'WITH', 'SANCTIFYING', 'SWEETNESS', 'DID', 'PROCEED', 'THE', 'THIRD', 'UPON', 'MY', 'LIPS', 'WAS', 'FOLDED', 'DOWN', 'IMPERFECT', 'PURPLE', 'STAINED', 'SINCE', 'WHEN', 'INDEED', 'I', 'HAVE', 'BEEN', 'PROUD', 'AND', 'SAID', 'MY', 'LOVE', 'MY', 'OWN'] +908-31957-0016-1368: ref=['DEAREST', 'TEACH', 'ME', 'SO', 'TO', 'POUR', 'OUT', 'GRATITUDE', 'AS', 'THOU', 'DOST', 'GOOD'] +908-31957-0016-1368: hyp=['DEAREST', 'TEACH', 'ME', 'SO', 'TO', 'POUR', 'OUT', 'GRATITUDE', 'AS', 'THOU', 'DOST', 'GOOD'] +908-31957-0017-1369: ref=['MUSSULMANS', 'AND', 'GIAOURS', 'THROW', 'KERCHIEFS', 'AT', 'A', 'SMILE', 'AND', 'HAVE', 'NO', 'RUTH', 'FOR', 'ANY', 'WEEPING'] +908-31957-0017-1369: hyp=['MUSSELMENS', 'AND', 'GUY', 'ORS', 'THROW', 'KERCHIEFS', 'AT', 'A', 'SMILE', 'AND', 'HAVE', 'NO', 'RUTH', 'FOR', 'ANY', 'WEEPING'] +908-31957-0018-1370: ref=['BUT', 'THOU', 'ART', 'NOT', 'SUCH', 'A', 'LOVER', 'MY', 'BELOVED'] +908-31957-0018-1370: hyp=['BUT', 'THOU', 'ART', 'NOT', 'SUCH', 'A', 'LOVER', 'MY', 'BELOVED'] +908-31957-0019-1371: ref=['THOU', 'CANST', 'WAIT', 'THROUGH', 'SORROW', 'AND', 'SICKNESS', 'TO', 'BRING', 'SOULS', 'TO', 'TOUCH', 'AND', 'THINK', 'IT', 'SOON', 'WHEN', 'OTHERS', 'CRY', 'TOO', 'LATE'] +908-31957-0019-1371: hyp=['THOU', 'CANST', 'WAIT', 'THROUGH', 'SORROW', 'AND', 'SICKNESS', 'TO', 'BRING', 'SOULS', 'TO', 'TOUCH', 'AND', 'THINK', 'IT', 'SOON', 'WHEN', 'OTHERS', 'CRY', 'TOO', 'LATE'] +908-31957-0020-1372: ref=['I', 'THANK', 'ALL', 'WHO', 'HAVE', 'LOVED', 'ME', 'IN', 'THEIR', 'HEARTS', 'WITH', 'THANKS', 'AND', 'LOVE', 'FROM', 'MINE'] +908-31957-0020-1372: hyp=['I', 'THINK', 'ALL', 'WHO', 'HAVE', 'LOVED', 'ME', 'IN', 'THEIR', 'HEARTS', 'WITH', 'THANKS', 'AND', 'LOVE', 'FROM', 'MINE'] +908-31957-0021-1373: ref=['OH', 'TO', 'SHOOT', 'MY', "SOUL'S", 'FULL', 'MEANING', 'INTO', 'FUTURE', 'YEARS', 'THAT', 'THEY', 'SHOULD', 'LEND', 'IT', 'UTTERANCE', 'AND', 'SALUTE', 'LOVE', 'THAT', 'ENDURES', 'FROM', 'LIFE', 'THAT', 'DISAPPEARS'] +908-31957-0021-1373: hyp=['OH', 'TO', 'SHOOT', 'MY', "SOUL'S", 'FULL', 'MEANING', 'INTO', 'FUTURE', 'YEARS', 'THAT', 'THEY', 'SHOULD', 'LEND', 'IT', 'UTTERANCE', 'AND', 'SALUTE', 'LOVE', 'THAT', 'ENDURES', 'FROM', 'LIFE', 'THAT', 'DISAPPEARS'] +908-31957-0022-1374: ref=['THEN', 'I', 'LONG', 'TRIED', 'BY', 'NATURAL', 'ILLS', 'RECEIVED', 'THE', 'COMFORT', 'FAST', 'WHILE', 'BUDDING', 'AT', 'THY', 'SIGHT', 'MY', "PILGRIM'S", 'STAFF', 'GAVE', 'OUT', 'GREEN', 'LEAVES', 'WITH', 'MORNING', 'DEWS', 'IMPEARLED'] +908-31957-0022-1374: hyp=['THEN', 'I', 'LONG', 'TRIED', 'BY', 'NATURAL', 'ILLS', 'RECEIVED', 'THE', 'COMFORT', 'FAST', 'WHILE', 'BUDDING', 'AT', 'THY', 'SIGHT', 'MY', "PILGRIM'S", 'STAFF', 'GAVE', 'OUT', 'GREEN', 'LEAVES', 'WITH', 'MORNING', 'DEWS', 'IMPELLED'] +908-31957-0023-1375: ref=['I', 'LOVE', 'THEE', 'FREELY', 'AS', 'MEN', 'STRIVE', 'FOR', 'RIGHT', 'I', 'LOVE', 'THEE', 'PURELY', 'AS', 'THEY', 'TURN', 'FROM', 'PRAISE'] +908-31957-0023-1375: hyp=['I', 'LOVE', 'THEE', 'FREELY', 'AS', 'MEN', 'STRIVE', 'FOR', 'RIGHT', 'I', 'LOVE', 'THEE', 'PURELY', 'AS', 'THEY', 'TURN', 'FROM', 'PRAISE'] +908-31957-0024-1376: ref=['I', 'LOVE', 'THEE', 'WITH', 'THE', 'PASSION', 'PUT', 'TO', 'USE', 'IN', 'MY', 'OLD', 'GRIEFS', 'AND', 'WITH', 'MY', "CHILDHOOD'S", 'FAITH'] +908-31957-0024-1376: hyp=['I', 'LOVE', 'THEE', 'WITH', 'THE', 'PASSION', 'PUT', 'TO', 'USE', 'IN', 'MY', 'OLD', 'GREEDS', 'AND', 'WITH', 'MY', "CHILDHOOD'S", 'FAITH'] +908-31957-0025-1377: ref=['I', 'LOVE', 'THEE', 'WITH', 'A', 'LOVE', 'I', 'SEEMED', 'TO', 'LOSE', 'WITH', 'MY', 'LOST', 'SAINTS', 'I', 'LOVE', 'THEE', 'WITH', 'THE', 'BREATH', 'SMILES', 'TEARS', 'OF', 'ALL', 'MY', 'LIFE', 'AND', 'IF', 'GOD', 'CHOOSE', 'I', 'SHALL', 'BUT', 'LOVE', 'THEE', 'BETTER', 'AFTER', 'DEATH'] +908-31957-0025-1377: hyp=['I', 'LOVE', 'THEE', 'WITH', 'A', 'LOVE', 'I', 'SEEMED', 'TO', 'LOSE', 'WITH', 'MY', 'LOST', 'SAINTS', 'I', 'LOVE', 'THEE', 'WITH', 'THE', 'BREATH', 'SMILES', 'TEARS', 'OF', 'ALL', 'MY', 'LIFE', 'AND', 'IF', 'GOD', 'CHOOSE', 'I', 'SHALL', 'BUT', 'LOVE', 'THEE', 'BETTER', 'AFTER', 'DEATH'] diff --git a/log/modified_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt b/log/modified_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..24bf00e08ecf13e594dccb98c7bdb1f5db00cb75 --- /dev/null +++ b/log/modified_beam_search/recogs-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt @@ -0,0 +1,5878 @@ +1688-142285-0000-1948: ref=["THERE'S", 'IRON', 'THEY', 'SAY', 'IN', 'ALL', 'OUR', 'BLOOD', 'AND', 'A', 'GRAIN', 'OR', 'TWO', 'PERHAPS', 'IS', 'GOOD', 'BUT', 'HIS', 'HE', 'MAKES', 'ME', 'HARSHLY', 'FEEL', 'HAS', 'GOT', 'A', 'LITTLE', 'TOO', 'MUCH', 'OF', 'STEEL', 'ANON'] +1688-142285-0000-1948: hyp=["THERE'S", 'IRON', 'THEY', 'SAY', 'IN', 'ALL', 'OUR', 'BLOOD', 'AND', 'A', 'GRAIN', 'OR', 'TWO', 'PERHAPS', 'IS', 'GOOD', 'BUT', 'HIS', 'HE', 'MAKES', 'ME', 'HARSHLY', 'FEEL', 'HAS', 'GOT', 'A', 'LITTLE', 'TOO', 'MUCH', 'OF', 'STEEL', 'ANON'] +1688-142285-0001-1949: ref=['MARGARET', 'SAID', 'MISTER', 'HALE', 'AS', 'HE', 'RETURNED', 'FROM', 'SHOWING', 'HIS', 'GUEST', 'DOWNSTAIRS', 'I', 'COULD', 'NOT', 'HELP', 'WATCHING', 'YOUR', 'FACE', 'WITH', 'SOME', 'ANXIETY', 'WHEN', 'MISTER', 'THORNTON', 'MADE', 'HIS', 'CONFESSION', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY'] +1688-142285-0001-1949: hyp=['MARGARET', 'SAID', 'MISTER', 'HALE', 'AS', 'HE', 'RETURNED', 'FROM', 'SHOWING', 'HIS', 'GUESTS', 'DOWNSTAIRS', 'I', 'COULD', 'NOT', 'HELP', 'WATCHING', 'YOUR', 'FACE', 'WITH', 'SOME', 'ANXIETY', 'WHEN', 'MISTER', 'THORNTON', 'MADE', 'HIS', 'CONFESSION', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY'] +1688-142285-0002-1950: ref=['YOU', "DON'T", 'MEAN', 'THAT', 'YOU', 'THOUGHT', 'ME', 'SO', 'SILLY'] +1688-142285-0002-1950: hyp=['YOU', "DON'T", 'MEAN', 'THAT', 'YOU', 'THOUGHT', 'ME', 'SO', 'SILLY'] +1688-142285-0003-1951: ref=['I', 'REALLY', 'LIKED', 'THAT', 'ACCOUNT', 'OF', 'HIMSELF', 'BETTER', 'THAN', 'ANYTHING', 'ELSE', 'HE', 'SAID'] +1688-142285-0003-1951: hyp=['I', 'REALLY', 'LIKE', 'THAT', 'ACCOUNT', 'OF', 'HIMSELF', 'BETTER', 'THAN', 'ANYTHING', 'ELSE', 'HE', 'SAID'] +1688-142285-0004-1952: ref=['HIS', 'STATEMENT', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY', 'WAS', 'THE', 'THING', 'I', 'LIKED', 'BEST', 'OF', 'ALL'] +1688-142285-0004-1952: hyp=['HIS', 'STATEMENT', 'OF', 'HAVING', 'BEEN', 'A', 'SHOP', 'BOY', 'WAS', 'THE', 'THING', 'I', 'LIKE', 'BEST', 'OF', 'ALL'] +1688-142285-0005-1953: ref=['YOU', 'WHO', 'WERE', 'ALWAYS', 'ACCUSING', 'PEOPLE', 'OF', 'BEING', 'SHOPPY', 'AT', 'HELSTONE'] +1688-142285-0005-1953: hyp=['YOU', 'WHO', 'WERE', 'ALWAYS', 'ACCUSING', 'PEOPLE', 'HAVE', 'BEEN', 'SHOPPY', 'AT', 'HELSTONE'] +1688-142285-0006-1954: ref=['I', "DON'T", 'THINK', 'MISTER', 'HALE', 'YOU', 'HAVE', 'DONE', 'QUITE', 'RIGHT', 'IN', 'INTRODUCING', 'SUCH', 'A', 'PERSON', 'TO', 'US', 'WITHOUT', 'TELLING', 'US', 'WHAT', 'HE', 'HAD', 'BEEN'] +1688-142285-0006-1954: hyp=['I', "DON'T", 'THINK', 'MISTER', 'HALE', 'YOU', 'HAVE', 'DONE', 'QUITE', 'RIGHT', 'INTRODUCING', 'SUCH', 'A', 'PERSON', 'TO', 'US', 'WITHOUT', 'TELLING', 'US', 'WHAT', 'HE', 'HAD', 'BEEN'] +1688-142285-0007-1955: ref=['I', 'REALLY', 'WAS', 'VERY', 'MUCH', 'AFRAID', 'OF', 'SHOWING', 'HIM', 'HOW', 'MUCH', 'SHOCKED', 'I', 'WAS', 'AT', 'SOME', 'PARTS', 'OF', 'WHAT', 'HE', 'SAID'] +1688-142285-0007-1955: hyp=['I', 'REALLY', 'WAS', 'VERY', 'MUCH', 'AFRAID', 'OF', 'SHOWING', 'HIM', 'HOW', 'MUCH', 'SHOCKED', 'I', 'WAS', 'AT', 'SOME', 'PART', 'OF', 'WHAT', 'HE', 'SAID'] +1688-142285-0008-1956: ref=['HIS', 'FATHER', 'DYING', 'IN', 'MISERABLE', 'CIRCUMSTANCES'] +1688-142285-0008-1956: hyp=['HIS', 'FATHER', 'DYING', 'IN', 'MISERABLE', 'CIRCUMSTANCES'] +1688-142285-0009-1957: ref=['WHY', 'IT', 'MIGHT', 'HAVE', 'BEEN', 'IN', 'THE', 'WORKHOUSE'] +1688-142285-0009-1957: hyp=['WHY', 'IT', 'MIGHT', 'HAVE', 'BEEN', 'IN', 'THE', 'WORKHOUSE'] +1688-142285-0010-1958: ref=['HIS', 'FATHER', 'SPECULATED', 'WILDLY', 'FAILED', 'AND', 'THEN', 'KILLED', 'HIMSELF', 'BECAUSE', 'HE', 'COULD', 'NOT', 'BEAR', 'THE', 'DISGRACE'] +1688-142285-0010-1958: hyp=['HIS', 'FATHER', 'SPECULATED', 'WILDLY', 'FAILED', 'AND', 'THEN', 'KILLED', 'HIMSELF', 'BECAUSE', 'HE', 'COULD', 'NOT', 'BEAR', 'THE', 'DISGRACE'] +1688-142285-0011-1959: ref=['ALL', 'HIS', 'FORMER', 'FRIENDS', 'SHRUNK', 'FROM', 'THE', 'DISCLOSURES', 'THAT', 'HAD', 'TO', 'BE', 'MADE', 'OF', 'HIS', 'DISHONEST', 'GAMBLING', 'WILD', 'HOPELESS', 'STRUGGLES', 'MADE', 'WITH', 'OTHER', "PEOPLE'S", 'MONEY', 'TO', 'REGAIN', 'HIS', 'OWN', 'MODERATE', 'PORTION', 'OF', 'WEALTH'] +1688-142285-0011-1959: hyp=['ALL', 'HIS', 'FORMER', 'FRIENDS', 'SHRUNK', 'FROM', 'THE', 'DISCLOSURES', 'THAT', 'HAD', 'TO', 'BE', 'MADE', 'OF', 'HIS', 'DISHONEST', 'GAMBLING', 'WILD', 'HOPELESS', 'STRUGGLES', 'MADE', 'WITH', 'OTHER', "PEOPLE'S", 'MONEY', 'TO', 'REGAIN', 'HIS', 'OWN', 'MODERATE', 'PORTION', 'OF', 'WEALTH'] +1688-142285-0012-1960: ref=['NO', 'ONE', 'CAME', 'FORWARDS', 'TO', 'HELP', 'THE', 'MOTHER', 'AND', 'THIS', 'BOY'] +1688-142285-0012-1960: hyp=['NO', 'ONE', 'CAME', 'FORWARDS', 'TO', 'HELP', 'THE', 'MOTHER', 'AND', 'THIS', 'BOY'] +1688-142285-0013-1961: ref=['AT', 'LEAST', 'NO', 'FRIEND', 'CAME', 'FORWARDS', 'IMMEDIATELY', 'AND', 'MISSUS', 'THORNTON', 'IS', 'NOT', 'ONE', 'I', 'FANCY', 'TO', 'WAIT', 'TILL', 'TARDY', 'KINDNESS', 'COMES', 'TO', 'FIND', 'HER', 'OUT'] +1688-142285-0013-1961: hyp=['AT', 'LEAST', 'NO', 'FRIEND', 'CAME', 'FORWARDS', 'IMMEDIATELY', 'AND', 'MISTER', 'THORNTON', 'IS', 'NOT', 'ONE', 'I', 'FANCY', 'TO', 'WAIT', 'TILL', 'TIDY', 'KINDNESS', 'COMES', 'TO', 'FIND', 'HER', 'OUT'] +1688-142285-0014-1962: ref=['SO', 'THEY', 'LEFT', 'MILTON'] +1688-142285-0014-1962: hyp=['SO', 'THEY', 'LEFT', 'MILTON'] +1688-142285-0015-1963: ref=['HOW', 'TAINTED', 'ASKED', 'HER', 'FATHER'] +1688-142285-0015-1963: hyp=['HOW', 'TAINTED', 'ASKED', 'HER', 'FATHER'] +1688-142285-0016-1964: ref=['OH', 'PAPA', 'BY', 'THAT', 'TESTING', 'EVERYTHING', 'BY', 'THE', 'STANDARD', 'OF', 'WEALTH'] +1688-142285-0016-1964: hyp=['O', 'PAPA', 'BY', 'THAT', 'TESTING', 'EVERYTHING', 'BY', 'THE', 'STANDARD', 'OF', 'WEALTH'] +1688-142285-0017-1965: ref=['WHEN', 'HE', 'SPOKE', 'OF', 'THE', 'MECHANICAL', 'POWERS', 'HE', 'EVIDENTLY', 'LOOKED', 'UPON', 'THEM', 'ONLY', 'AS', 'NEW', 'WAYS', 'OF', 'EXTENDING', 'TRADE', 'AND', 'MAKING', 'MONEY'] +1688-142285-0017-1965: hyp=['WHEN', 'HE', 'SPOKE', 'OF', 'THE', 'MECHANICAL', 'POWERS', 'HE', 'EVIDENTLY', 'LOOKED', 'UPON', 'THEM', 'ONLY', 'AS', 'NEW', 'WAYS', 'OF', 'EXTENDING', 'TRADE', 'AND', 'MAKING', 'MONEY'] +1688-142285-0018-1966: ref=['AND', 'THE', 'POOR', 'MEN', 'AROUND', 'HIM', 'THEY', 'WERE', 'POOR', 'BECAUSE', 'THEY', 'WERE', 'VICIOUS', 'OUT', 'OF', 'THE', 'PALE', 'OF', 'HIS', 'SYMPATHIES', 'BECAUSE', 'THEY', 'HAD', 'NOT', 'HIS', 'IRON', 'NATURE', 'AND', 'THE', 'CAPABILITIES', 'THAT', 'IT', 'GIVES', 'HIM', 'FOR', 'BEING', 'RICH'] +1688-142285-0018-1966: hyp=['AND', 'THE', 'POOR', 'MEN', 'AROUND', 'HIM', 'THERE', 'WERE', 'POOR', 'BECAUSE', 'THEY', 'WERE', 'VICIOUS', 'OUT', 'OF', 'THE', 'PALE', 'OF', 'HIS', 'SYMPATHIES', 'BECAUSE', 'THEY', 'HAD', 'NOT', 'HIS', 'IRON', 'NATURE', 'AND', 'THE', 'CAPABILITIES', 'THAT', 'IT', 'GIVES', 'HIM', 'FOR', 'BEING', 'RICH'] +1688-142285-0019-1967: ref=['NOT', 'VICIOUS', 'HE', 'NEVER', 'SAID', 'THAT'] +1688-142285-0019-1967: hyp=['NOT', 'VICIOUS', 'HE', 'NEVER', 'SAID', 'THAT'] +1688-142285-0020-1968: ref=['IMPROVIDENT', 'AND', 'SELF', 'INDULGENT', 'WERE', 'HIS', 'WORDS'] +1688-142285-0020-1968: hyp=['IMPROVIDENT', 'AND', 'SELF', 'INDULGENT', 'WERE', 'HIS', 'WORDS'] +1688-142285-0021-1969: ref=['MARGARET', 'WAS', 'COLLECTING', 'HER', "MOTHER'S", 'WORKING', 'MATERIALS', 'AND', 'PREPARING', 'TO', 'GO', 'TO', 'BED'] +1688-142285-0021-1969: hyp=['MARGARET', 'WAS', 'COLLECTING', 'HER', "MOTHER'S", 'WORKING', 'MATERIALS', 'AND', 'PREPARING', 'TO', 'GO', 'TO', 'BED'] +1688-142285-0022-1970: ref=['JUST', 'AS', 'SHE', 'WAS', 'LEAVING', 'THE', 'ROOM', 'SHE', 'HESITATED', 'SHE', 'WAS', 'INCLINED', 'TO', 'MAKE', 'AN', 'ACKNOWLEDGMENT', 'WHICH', 'SHE', 'THOUGHT', 'WOULD', 'PLEASE', 'HER', 'FATHER', 'BUT', 'WHICH', 'TO', 'BE', 'FULL', 'AND', 'TRUE', 'MUST', 'INCLUDE', 'A', 'LITTLE', 'ANNOYANCE'] +1688-142285-0022-1970: hyp=['JUST', 'AS', 'SHE', 'WAS', 'LEAVING', 'THE', 'ROOM', 'SHE', 'HESITATED', 'SHE', 'WAS', 'INCLINED', 'TO', 'MAKE', 'AN', 'ACKNOWLEDGMENT', 'WHICH', 'SHE', 'THOUGHT', 'WOULD', 'PLEASE', 'HER', 'FATHER', 'BUT', 'WHICH', 'TO', 'BE', 'FULL', 'AND', 'TRUE', 'MUST', 'INCLUDE', 'A', 'LITTLE', 'ANNOYANCE'] +1688-142285-0023-1971: ref=['HOWEVER', 'OUT', 'IT', 'CAME'] +1688-142285-0023-1971: hyp=['HOWEVER', 'OUT', 'IT', 'CAME'] +1688-142285-0024-1972: ref=['PAPA', 'I', 'DO', 'THINK', 'MISTER', 'THORNTON', 'A', 'VERY', 'REMARKABLE', 'MAN', 'BUT', 'PERSONALLY', 'I', "DON'T", 'LIKE', 'HIM', 'AT', 'ALL'] +1688-142285-0024-1972: hyp=['PAPA', 'I', 'DO', 'THINK', 'MISTER', 'THORNTON', 'A', 'VERY', 'REMARKABLE', 'MAN', 'BUT', 'PERSONALLY', 'I', "DON'T", 'LIKE', 'HIM', 'AT', 'ALL'] +1688-142285-0025-1973: ref=['AND', 'I', 'DO', 'SAID', 'HER', 'FATHER', 'LAUGHING'] +1688-142285-0025-1973: hyp=['AND', 'I', 'DO', 'SAID', 'HER', 'FATHER', 'LAUGHING'] +1688-142285-0026-1974: ref=['PERSONALLY', 'AS', 'YOU', 'CALL', 'IT', 'AND', 'ALL'] +1688-142285-0026-1974: hyp=['PERSONALLY', 'AS', 'YOU', 'CALL', 'IT', 'AND', 'ALL'] +1688-142285-0027-1975: ref=['I', "DON'T", 'SET', 'HIM', 'UP', 'FOR', 'A', 'HERO', 'OR', 'ANYTHING', 'OF', 'THAT', 'KIND'] +1688-142285-0027-1975: hyp=['I', "DON'T", 'SET', 'HIM', 'UP', 'FOR', 'A', 'HERO', 'OR', 'ANYTHING', 'OF', 'THAT', 'KIND'] +1688-142285-0028-1976: ref=['BUT', 'GOOD', 'NIGHT', 'CHILD'] +1688-142285-0028-1976: hyp=['BUT', 'GOOD', 'NIGHT', 'CHILD'] +1688-142285-0029-1977: ref=['THERE', 'WERE', 'SEVERAL', 'OTHER', 'SIGNS', 'OF', 'SOMETHING', 'WRONG', 'ABOUT', 'MISSUS', 'HALE'] +1688-142285-0029-1977: hyp=['THERE', 'WERE', 'SEVERAL', 'OTHER', 'SIGNS', 'OF', 'SOMETHING', 'WRONG', 'ABOUT', 'MISSUS', 'HALE'] +1688-142285-0030-1978: ref=['SHE', 'AND', 'DIXON', 'HELD', 'MYSTERIOUS', 'CONSULTATIONS', 'IN', 'HER', 'BEDROOM', 'FROM', 'WHICH', 'DIXON', 'WOULD', 'COME', 'OUT', 'CRYING', 'AND', 'CROSS', 'AS', 'WAS', 'HER', 'CUSTOM', 'WHEN', 'ANY', 'DISTRESS', 'OF', 'HER', 'MISTRESS', 'CALLED', 'UPON', 'HER', 'SYMPATHY'] +1688-142285-0030-1978: hyp=['SHE', 'AND', 'DIXON', 'HELD', 'MYSTERIOUS', 'CONSULTATIONS', 'IN', 'HER', 'BEDROOM', 'FROM', 'WHICH', 'DIXON', 'WOULD', 'COME', 'OUT', 'CRYING', 'AND', 'CROSS', 'AS', 'WAS', 'A', 'CUSTOM', 'WHEN', 'ANY', 'DISTRESS', 'OF', 'HER', 'MISTRESS', 'CALLED', 'UPON', 'HER', 'SYMPATHY'] +1688-142285-0031-1979: ref=['ONCE', 'MARGARET', 'HAD', 'GONE', 'INTO', 'THE', 'CHAMBER', 'SOON', 'AFTER', 'DIXON', 'LEFT', 'IT', 'AND', 'FOUND', 'HER', 'MOTHER', 'ON', 'HER', 'KNEES', 'AND', 'AS', 'MARGARET', 'STOLE', 'OUT', 'SHE', 'CAUGHT', 'A', 'FEW', 'WORDS', 'WHICH', 'WERE', 'EVIDENTLY', 'A', 'PRAYER', 'FOR', 'STRENGTH', 'AND', 'PATIENCE', 'TO', 'ENDURE', 'SEVERE', 'BODILY', 'SUFFERING'] +1688-142285-0031-1979: hyp=['ONCE', 'MARGARET', 'HAD', 'GONE', 'INTO', 'THE', 'CHAMBER', 'SOON', 'AFTER', 'DIXON', 'LIFTED', 'AND', 'FOUND', 'HER', 'MOTHER', 'ON', 'HER', 'KNEES', 'AND', 'AS', 'MARGARET', 'STOLE', 'OUT', 'SHE', 'CAUGHT', 'A', 'FEW', 'WORDS', 'WHICH', 'WERE', 'EVIDENTLY', 'A', 'PRAYER', 'FOR', 'STRENGTH', 'AND', 'PATIENCE', 'TO', 'INDURE', 'SEVERE', 'BODILY', 'SUFFERING'] +1688-142285-0032-1980: ref=['BUT', 'THOUGH', 'SHE', 'RECEIVED', 'CARESSES', 'AND', 'FOND', 'WORDS', 'BACK', 'AGAIN', 'IN', 'SUCH', 'PROFUSION', 'AS', 'WOULD', 'HAVE', 'GLADDENED', 'HER', 'FORMERLY', 'YET', 'SHE', 'FELT', 'THAT', 'THERE', 'WAS', 'A', 'SECRET', 'WITHHELD', 'FROM', 'HER', 'AND', 'SHE', 'BELIEVED', 'IT', 'BORE', 'SERIOUS', 'REFERENCE', 'TO', 'HER', "MOTHER'S", 'HEALTH'] +1688-142285-0032-1980: hyp=['BUT', 'THOUGH', 'SHE', 'RECEIVED', 'CARESSES', 'AND', 'FOND', 'WORDS', 'BACK', 'AGAIN', 'IN', 'SUCH', 'PROFUSION', 'AS', 'WOULD', 'HAVE', 'GLADDENED', 'HER', 'FORMERLY', 'YET', 'SHE', 'FELT', 'THAT', 'THERE', 'WAS', 'A', 'SECRET', 'WITHHELD', 'FROM', 'HER', 'AND', 'SHE', 'BELIEVED', 'IT', 'BORE', 'SERIOUS', 'REFERENCE', 'TO', 'HER', "MOTHER'S", 'HEALTH'] +1688-142285-0033-1981: ref=['SHE', 'LAY', 'AWAKE', 'VERY', 'LONG', 'THIS', 'NIGHT', 'PLANNING', 'HOW', 'TO', 'LESSEN', 'THE', 'EVIL', 'INFLUENCE', 'OF', 'THEIR', 'MILTON', 'LIFE', 'ON', 'HER', 'MOTHER'] +1688-142285-0033-1981: hyp=['SHE', 'LAY', 'AWAKE', 'VERY', 'LONG', 'THIS', 'NIGHT', 'PLANNING', 'HOW', 'TO', 'LESSEN', 'THE', 'EVIL', 'INFLUENCE', 'OF', 'THEIR', 'MILTON', 'LIFE', 'ON', 'HER', 'MOTHER'] +1688-142285-0034-1982: ref=['A', 'SERVANT', 'TO', 'GIVE', 'DIXON', 'PERMANENT', 'ASSISTANCE', 'SHOULD', 'BE', 'GOT', 'IF', 'SHE', 'GAVE', 'UP', 'HER', 'WHOLE', 'TIME', 'TO', 'THE', 'SEARCH', 'AND', 'THEN', 'AT', 'ANY', 'RATE', 'HER', 'MOTHER', 'MIGHT', 'HAVE', 'ALL', 'THE', 'PERSONAL', 'ATTENTION', 'SHE', 'REQUIRED', 'AND', 'HAD', 'BEEN', 'ACCUSTOMED', 'TO', 'HER', 'WHOLE', 'LIFE'] +1688-142285-0034-1982: hyp=['A', 'SERVANT', 'GIVE', 'DIXON', 'PERMANENT', 'ASSISTANCE', 'SHOULD', 'BE', 'GOT', 'IF', 'SHE', 'GAVE', 'UP', 'THE', 'WHOLE', 'TIME', 'TO', 'THE', 'SEARCH', 'AND', 'THEN', 'AT', 'ANY', 'RATE', 'HER', 'MOTHER', 'MIGHT', 'HAVE', 'ALL', 'THE', 'PERSONAL', 'ATTENTIONS', 'SHE', 'REQUIRED', 'AND', 'HAD', 'BEEN', 'ACCUSTOMED', 'TO', 'HER', 'WHOLE', 'LIFE'] +1688-142285-0035-1983: ref=['VISITING', 'REGISTER', 'OFFICES', 'SEEING', 'ALL', 'MANNER', 'OF', 'UNLIKELY', 'PEOPLE', 'AND', 'VERY', 'FEW', 'IN', 'THE', 'LEAST', 'LIKELY', 'ABSORBED', "MARGARET'S", 'TIME', 'AND', 'THOUGHTS', 'FOR', 'SEVERAL', 'DAYS'] +1688-142285-0035-1983: hyp=['VISITING', 'REGISTER', 'OFFICERS', 'SEEING', 'ALL', 'MANNER', 'OF', 'UNLIKELY', 'PEOPLE', 'AND', 'VERY', 'FEW', 'IN', 'THE', 'LEAST', 'LIKELY', 'ABSORBED', "MARGARET'S", 'TIME', 'AND', 'THOUGHTS', 'FOR', 'SEVERAL', 'DAYS'] +1688-142285-0036-1984: ref=['ONE', 'AFTERNOON', 'SHE', 'MET', 'BESSY', 'HIGGINS', 'IN', 'THE', 'STREET', 'AND', 'STOPPED', 'TO', 'SPEAK', 'TO', 'HER'] +1688-142285-0036-1984: hyp=['ONE', 'AFTERNOON', 'SHE', 'MET', 'BESSY', 'HIGGINS', 'IN', 'THE', 'STREET', 'AND', 'STOPPED', 'TO', 'SPEAK', 'TO', 'HER'] +1688-142285-0037-1985: ref=['WELL', 'BESSY', 'HOW', 'ARE', 'YOU'] +1688-142285-0037-1985: hyp=['WELL', 'BUSY', 'HOW', 'ARE', 'YOU'] +1688-142285-0038-1986: ref=['BETTER', 'AND', 'NOT', 'BETTER', 'IF', 'YO', 'KNOW', 'WHAT', 'THAT', 'MEANS'] +1688-142285-0038-1986: hyp=['BETTER', 'AND', 'NOT', 'BETTER', 'IF', 'YOU', 'KNOW', 'WHAT', 'THAT', 'MEANS'] +1688-142285-0039-1987: ref=['NOT', 'EXACTLY', 'REPLIED', 'MARGARET', 'SMILING'] +1688-142285-0039-1987: hyp=['NOT', 'EXACTLY', 'REPLIED', 'MARGARET', 'SMILING'] +1688-142285-0040-1988: ref=["I'M", 'BETTER', 'IN', 'NOT', 'BEING', 'TORN', 'TO', 'PIECES', 'BY', 'COUGHING', "O'NIGHTS", 'BUT', "I'M", 'WEARY', 'AND', 'TIRED', 'O', 'MILTON', 'AND', 'LONGING', 'TO', 'GET', 'AWAY', 'TO', 'THE', 'LAND', 'O', 'BEULAH', 'AND', 'WHEN', 'I', 'THINK', "I'M", 'FARTHER', 'AND', 'FARTHER', 'OFF', 'MY', 'HEART', 'SINKS', 'AND', "I'M", 'NO', 'BETTER', "I'M", 'WORSE'] +1688-142285-0040-1988: hyp=["I'M", 'BETTER', 'IN', 'NOT', 'BEING', 'TORN', 'TO', 'PIECES', 'BUT', 'COUGHING', 'A', "NIGHT'S", 'BUT', "I'M", 'WEARY', 'AND', 'TIRED', 'OF', 'MILTON', 'AND', 'LONGING', 'TO', 'GET', 'AWAY', 'TO', 'THE', 'LAND', 'OF', 'BOOLA', 'AND', 'WHEN', 'I', 'THINK', "I'M", 'FARTHER', 'AND', 'FARTHER', 'OFF', 'MY', 'HEART', 'SINKS', 'AND', "I'M", 'NO', 'BETTER', "I'M", 'WORSE'] +1688-142285-0041-1989: ref=['MARGARET', 'TURNED', 'ROUND', 'TO', 'WALK', 'ALONGSIDE', 'OF', 'THE', 'GIRL', 'IN', 'HER', 'FEEBLE', 'PROGRESS', 'HOMEWARD'] +1688-142285-0041-1989: hyp=['MARGARET', 'TURNED', 'AROUND', 'TO', 'WALK', 'ALONGSIDE', 'OF', 'THE', 'GIRL', 'IN', 'HER', 'FEEBLE', 'PROGRESS', 'HOMEWARD'] +1688-142285-0042-1990: ref=['BUT', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'SHE', 'DID', 'NOT', 'SPEAK'] +1688-142285-0042-1990: hyp=['BUT', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'SHE', 'DID', 'NOT', 'SPEAK'] +1688-142285-0043-1991: ref=['AT', 'LAST', 'SHE', 'SAID', 'IN', 'A', 'LOW', 'VOICE'] +1688-142285-0043-1991: hyp=['AT', 'LAST', 'SHE', 'SAID', 'IN', 'A', 'LOW', 'VOICE'] +1688-142285-0044-1992: ref=['BESSY', 'DO', 'YOU', 'WISH', 'TO', 'DIE'] +1688-142285-0044-1992: hyp=['BESSY', 'DO', 'YOU', 'WISH', 'TO', 'DIE'] +1688-142285-0045-1993: ref=['BESSY', 'WAS', 'SILENT', 'IN', 'HER', 'TURN', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'THEN', 'SHE', 'REPLIED'] +1688-142285-0045-1993: hyp=['BESSY', 'WAS', 'SILENT', 'IN', 'HER', 'TURN', 'FOR', 'A', 'MINUTE', 'OR', 'TWO', 'THEN', 'SHE', 'REPLIED'] +1688-142285-0046-1994: ref=['NOUGHT', 'WORSE', 'THAN', 'MANY', 'OTHERS', 'I', 'RECKON'] +1688-142285-0046-1994: hyp=['NOT', 'WORSE', 'THAN', 'MANY', 'OTHERS', 'I', 'RECKON'] +1688-142285-0047-1995: ref=['BUT', 'WHAT', 'WAS', 'IT'] +1688-142285-0047-1995: hyp=['BUT', 'WHAT', 'WAS', 'IT'] +1688-142285-0048-1996: ref=['YOU', 'KNOW', "I'M", 'A', 'STRANGER', 'HERE', 'SO', 'PERHAPS', "I'M", 'NOT', 'SO', 'QUICK', 'AT', 'UNDERSTANDING', 'WHAT', 'YOU', 'MEAN', 'AS', 'IF', "I'D", 'LIVED', 'ALL', 'MY', 'LIFE', 'AT', 'MILTON'] +1688-142285-0048-1996: hyp=['YOU', 'KNOW', "I'M", 'A', 'STRANGER', 'HERE', 'SO', 'PERHAPS', "I'M", 'NOT', 'SO', 'QUICK', 'AT', 'UNDERSTANDING', 'WHAT', 'YOU', 'MEAN', 'AS', 'IF', "I'D", 'LIVED', 'ALL', 'MY', 'LIFE', 'IN', 'MILTON'] +1688-142285-0049-1997: ref=['I', 'HAD', 'FORGOTTEN', 'WHAT', 'I', 'SAID', 'FOR', 'THE', 'TIME', 'CONTINUED', 'MARGARET', 'QUIETLY'] +1688-142285-0049-1997: hyp=['I', 'HAD', 'FORGOTTEN', 'WHAT', 'I', 'SAID', 'FOR', 'THE', 'TIME', 'CONTINUED', 'MARGARET', 'QUIETLY'] +1688-142285-0050-1998: ref=['I', 'SHOULD', 'HAVE', 'THOUGHT', 'OF', 'IT', 'AGAIN', 'WHEN', 'I', 'WAS', 'LESS', 'BUSY', 'MAY', 'I', 'GO', 'WITH', 'YOU', 'NOW'] +1688-142285-0050-1998: hyp=['I', 'SHOULD', 'HAVE', 'THOUGHT', 'OF', 'IT', 'AGAIN', 'WHEN', 'I', 'WAS', 'LESS', 'BUSY', 'MARGAR', 'WITH', 'YOU', 'NOW'] +1688-142285-0051-1999: ref=['THE', 'SHARPNESS', 'IN', 'HER', 'EYE', 'TURNED', 'TO', 'A', 'WISTFUL', 'LONGING', 'AS', 'SHE', 'MET', "MARGARET'S", 'SOFT', 'AND', 'FRIENDLY', 'GAZE'] +1688-142285-0051-1999: hyp=['THE', 'SHARPNESS', 'IN', 'HER', 'EYE', 'TURNED', 'TO', 'A', 'WISTFUL', 'LONGING', 'AS', 'SHE', 'MET', "MARGARET'S", 'SOFT', 'AND', 'FRIENDLY', 'GAZE'] +1688-142285-0052-2000: ref=['AS', 'THEY', 'TURNED', 'UP', 'INTO', 'A', 'SMALL', 'COURT', 'OPENING', 'OUT', 'OF', 'A', 'SQUALID', 'STREET', 'BESSY', 'SAID'] +1688-142285-0052-2000: hyp=['AS', 'THEY', 'TURNED', 'UP', 'INTO', 'A', 'SMALL', 'COURT', 'OPENING', 'OUT', 'INTO', 'A', 'SQUALID', 'STREET', 'BESSY', 'SAID'] +1688-142285-0053-2001: ref=["YO'LL", 'NOT', 'BE', 'DAUNTED', 'IF', "FATHER'S", 'AT', 'HOME', 'AND', 'SPEAKS', 'A', 'BIT', 'GRUFFISH', 'AT', 'FIRST'] +1688-142285-0053-2001: hyp=["YOU'LL", 'NOT', 'BE', 'DAUNTED', 'IF', 'FATHER', 'SAID', 'HE', 'AND', 'SPEAKS', 'A', 'BIT', 'GRUFFISH', 'AT', 'FIRST'] +1688-142285-0054-2002: ref=['BUT', 'NICHOLAS', 'WAS', 'NOT', 'AT', 'HOME', 'WHEN', 'THEY', 'ENTERED'] +1688-142285-0054-2002: hyp=['BUT', 'NICHOLAS', 'WAS', 'NOT', 'AT', 'HOME', 'WHEN', 'THEY', 'ENTERED'] +1688-142285-0055-2003: ref=['GASPED', 'BESSY', 'AT', 'LAST'] +1688-142285-0055-2003: hyp=['GASPED', 'BESSIE', 'AT', 'LAST'] +1688-142285-0056-2004: ref=['BESSY', 'TOOK', 'A', 'LONG', 'AND', 'FEVERISH', 'DRAUGHT', 'AND', 'THEN', 'FELL', 'BACK', 'AND', 'SHUT', 'HER', 'EYES'] +1688-142285-0056-2004: hyp=['BESSY', 'TOOK', 'A', 'LONG', 'AND', 'FEVERISH', 'DRAUGHT', 'AND', 'THEN', 'FELL', 'BACK', 'AND', 'SHUT', 'HER', 'EYES'] +1688-142285-0057-2005: ref=['MARGARET', 'BENT', 'OVER', 'AND', 'SAID', 'BESSY', "DON'T", 'BE', 'IMPATIENT', 'WITH', 'YOUR', 'LIFE', 'WHATEVER', 'IT', 'IS', 'OR', 'MAY', 'HAVE', 'BEEN'] +1688-142285-0057-2005: hyp=['MARGARET', 'BENT', 'OVER', 'AND', 'SAID', 'BESSY', "DON'T", 'BE', 'IMPATIENT', 'WITH', 'YOUR', 'LIFE', 'WHATEVER', 'IT', 'IS', 'OR', 'MAY', 'HAVE', 'BEEN'] +1688-142285-0058-2006: ref=['REMEMBER', 'WHO', 'GAVE', 'IT', 'YOU', 'AND', 'MADE', 'IT', 'WHAT', 'IT', 'IS'] +1688-142285-0058-2006: hyp=['REMEMBER', 'WHO', 'GAVE', 'IT', 'TO', 'YOU', 'AND', 'MADE', 'IT', 'WHAT', 'IT', 'IS'] +1688-142285-0059-2007: ref=['NOW', "I'LL", 'NOT', 'HAVE', 'MY', 'WENCH', 'PREACHED', 'TO'] +1688-142285-0059-2007: hyp=['NOW', "I'LL", 'NOT', 'HAVE', 'MY', 'WENCH', 'PREACH', 'TOO'] +1688-142285-0060-2008: ref=['BUT', 'SURELY', 'SAID', 'MARGARET', 'FACING', 'ROUND', 'YOU', 'BELIEVE', 'IN', 'WHAT', 'I', 'SAID', 'THAT', 'GOD', 'GAVE', 'HER', 'LIFE', 'AND', 'ORDERED', 'WHAT', 'KIND', 'OF', 'LIFE', 'IT', 'WAS', 'TO', 'BE'] +1688-142285-0060-2008: hyp=['BUT', 'SURELY', 'SAID', 'MARGARET', 'FACING', 'ROUND', 'YOU', 'BELIEVE', 'IN', 'WHAT', 'I', 'SAID', 'THAT', 'GOD', 'GAVE', 'HER', 'LIFE', 'AND', 'ORDERED', 'WHAT', 'KIND', 'OF', 'LIFE', 'IT', 'WAS', 'TO', 'BE'] +1688-142285-0061-2009: ref=['I', 'BELIEVE', 'WHAT', 'I', 'SEE', 'AND', 'NO', 'MORE'] +1688-142285-0061-2009: hyp=['I', 'BELIEVE', 'WHAT', 'I', 'SEE', 'AND', 'NO', 'MORE'] +1688-142285-0062-2010: ref=["THAT'S", 'WHAT', 'I', 'BELIEVE', 'YOUNG', 'WOMAN'] +1688-142285-0062-2010: hyp=["THAT'S", 'WHAT', 'I', 'BELIEVE', 'YOUNG', 'WOMAN'] +1688-142285-0063-2011: ref=['I', "DON'T", 'BELIEVE', 'ALL', 'I', 'HEAR', 'NO', 'NOT', 'BY', 'A', 'BIG', 'DEAL'] +1688-142285-0063-2011: hyp=['I', "DON'T", 'BELIEVE', 'ALL', 'I', 'HEAR', 'NO', 'NOT', 'BY', 'A', 'BIG', 'DEAL'] +1688-142285-0064-2012: ref=['BUT', "HOO'S", 'COME', 'AT', 'LAST', 'AND', "HOO'S", 'WELCOME', 'AS', 'LONG', 'AS', "HOO'LL", 'KEEP', 'FROM', 'PREACHING', 'ON', 'WHAT', 'HOO', 'KNOWS', 'NOUGHT', 'ABOUT'] +1688-142285-0064-2012: hyp=['BUT', "WHO'S", 'COME', 'AT', 'LAST', 'AND', "WHO'S", 'WELCOME', 'AS', 'LONG', 'AS', "HE'LL", 'KEEP', 'FROM', 'PREACHING', 'ON', 'WHAT', 'HE', 'KNOWS', 'NOT', 'ABOUT'] +1688-142285-0065-2013: ref=["IT'S", 'SIMPLE', 'AND', 'NOT', 'FAR', 'TO', 'FETCH', 'NOR', 'HARD', 'TO', 'WORK'] +1688-142285-0065-2013: hyp=["IT'S", 'SIMPLE', 'AND', 'NOT', 'FAR', 'TO', 'FETCH', 'NOR', 'HARD', 'TO', 'WORK'] +1688-142285-0066-2014: ref=['BUT', 'THE', 'GIRL', 'ONLY', 'PLEADED', 'THE', 'MORE', 'WITH', 'MARGARET'] +1688-142285-0066-2014: hyp=['BUT', 'THE', 'GIRL', 'ONLY', 'PLEADED', 'THE', 'MORE', 'WITH', 'MARGARET'] +1688-142285-0067-2015: ref=["DON'T", 'THINK', 'HARDLY', 'ON', 'HIM', "HE'S", 'A', 'GOOD', 'MAN', 'HE', 'IS'] +1688-142285-0067-2015: hyp=["DON'T", 'THINK', 'HARDLY', 'ON', 'HIM', "HE'S", 'A', 'GOOD', 'MAN', 'HE', 'IS'] +1688-142285-0068-2016: ref=['I', 'SOMETIMES', 'THINK', 'I', 'SHALL', 'BE', 'MOPED', 'WI', 'SORROW', 'EVEN', 'IN', 'THE', 'CITY', 'OF', 'GOD', 'IF', 'FATHER', 'IS', 'NOT', 'THERE'] +1688-142285-0068-2016: hyp=['I', 'SOMETIMES', 'THINK', 'I', 'SHALL', 'BE', 'MIRKED', 'WITH', 'SORROW', 'EVEN', 'IN', 'THE', 'CITY', 'OF', 'GOD', 'IF', 'EITHER', 'IS', 'NOT', 'THERE'] +1688-142285-0069-2017: ref=['THE', 'FEVERISH', 'COLOUR', 'CAME', 'INTO', 'HER', 'CHEEK', 'AND', 'THE', 'FEVERISH', 'FLAME', 'INTO', 'HER', 'EYE'] +1688-142285-0069-2017: hyp=['THE', 'FEVERISH', 'COLOUR', 'CAME', 'INTO', 'HER', 'CHEEKS', 'AND', 'THE', 'FEVERISH', 'FLAME', 'INTO', 'HER', 'EYE'] +1688-142285-0070-2018: ref=['BUT', 'YOU', 'WILL', 'BE', 'THERE', 'FATHER', 'YOU', 'SHALL', 'OH', 'MY', 'HEART'] +1688-142285-0070-2018: hyp=['BUT', 'YOU', 'WILL', 'BE', 'THERE', 'FATHER', 'YOU', 'SHALL', 'OH', 'MY', 'HEART'] +1688-142285-0071-2019: ref=['SHE', 'PUT', 'HER', 'HAND', 'TO', 'IT', 'AND', 'BECAME', 'GHASTLY', 'PALE'] +1688-142285-0071-2019: hyp=['SHE', 'PUT', 'HER', 'HAND', 'TO', 'IT', 'AND', 'BECAME', 'GHASTLY', 'PALE'] +1688-142285-0072-2020: ref=['MARGARET', 'HELD', 'HER', 'IN', 'HER', 'ARMS', 'AND', 'PUT', 'THE', 'WEARY', 'HEAD', 'TO', 'REST', 'UPON', 'HER', 'BOSOM'] +1688-142285-0072-2020: hyp=['MARGARET', 'HELD', 'HER', 'IN', 'HER', 'ARMS', 'AND', 'PUT', 'THE', 'WEARY', 'HEAD', 'TO', 'REST', 'UPON', 'HER', 'BOSOM'] +1688-142285-0073-2021: ref=['PRESENTLY', 'THE', 'SPASM', 'THAT', 'FORESHADOWED', 'DEATH', 'HAD', 'PASSED', 'AWAY', 'AND', 'BESSY', 'ROUSED', 'HERSELF', 'AND', 'SAID'] +1688-142285-0073-2021: hyp=['PRESENTLY', 'THE', 'SPASM', 'THAT', 'FORESHADOWED', 'DEATH', 'HAD', 'PASSED', 'AWAY', 'AND', 'BESSY', 'ROUSED', 'HERSELF', 'AND', 'SAID'] +1688-142285-0074-2022: ref=["I'LL", 'GO', 'TO', 'BED', "IT'S", 'BEST', 'PLACE', 'BUT', 'CATCHING', 'AT', "MARGARET'S", 'GOWN', "YO'LL", 'COME', 'AGAIN', 'I', 'KNOW', 'YO', 'WILL', 'BUT', 'JUST', 'SAY', 'IT'] +1688-142285-0074-2022: hyp=["I'LL", 'GO', 'TO', 'BED', "IT'S", 'BEST', 'PLACE', 'BUT', 'CATCHING', 'THAT', "MARGARET'S", 'GUN', "YOU'LL", 'COME', 'AGAIN', 'I', 'KNOW', 'YOU', 'WILL', 'BUT', 'JUST', 'SAY', 'IT'] +1688-142285-0075-2023: ref=['I', 'WILL', 'COME', 'TO', 'MORROW', 'SAID', 'MARGARET'] +1688-142285-0075-2023: hyp=['I', 'WILL', 'COME', 'TO', 'MORROW', 'SAID', 'MARGARET'] +1688-142285-0076-2024: ref=['MARGARET', 'WENT', 'AWAY', 'VERY', 'SAD', 'AND', 'THOUGHTFUL'] +1688-142285-0076-2024: hyp=['MARGARET', 'WENT', 'AWAY', 'VERY', 'SAD', 'AND', 'THOUGHTFUL'] +1688-142285-0077-2025: ref=['SHE', 'WAS', 'LATE', 'FOR', 'TEA', 'AT', 'HOME'] +1688-142285-0077-2025: hyp=['SHE', 'WAS', 'LATE', 'FOR', 'TEA', 'AT', 'HOME'] +1688-142285-0078-2026: ref=['HAVE', 'YOU', 'MET', 'WITH', 'A', 'SERVANT', 'DEAR'] +1688-142285-0078-2026: hyp=['HAVE', 'YOU', 'MET', 'WITH', 'A', 'SERVANT', 'DEAR'] +1688-142285-0079-2027: ref=['NO', 'MAMMA', 'THAT', 'ANNE', 'BUCKLEY', 'WOULD', 'NEVER', 'HAVE', 'DONE'] +1688-142285-0079-2027: hyp=['NO', 'MAMMA', 'THAT', 'ANNE', 'BUCKLEY', 'WOULD', 'NEVER', 'HAVE', 'DONE'] +1688-142285-0080-2028: ref=['SUPPOSE', 'I', 'TRY', 'SAID', 'MISTER', 'HALE'] +1688-142285-0080-2028: hyp=["S'POSE", 'I', 'TRY', 'SAID', 'MISTER', 'HALE'] +1688-142285-0081-2029: ref=['EVERYBODY', 'ELSE', 'HAS', 'HAD', 'THEIR', 'TURN', 'AT', 'THIS', 'GREAT', 'DIFFICULTY', 'NOW', 'LET', 'ME', 'TRY'] +1688-142285-0081-2029: hyp=['EVERYBODY', 'ELSE', 'HAS', 'HAD', 'THEY', 'TURN', 'AT', 'THIS', 'GREAT', 'DIFFICULTY', 'NOW', 'LET', 'ME', 'TRY'] +1688-142285-0082-2030: ref=['I', 'MAY', 'BE', 'THE', 'CINDERELLA', 'TO', 'PUT', 'ON', 'THE', 'SLIPPER', 'AFTER', 'ALL'] +1688-142285-0082-2030: hyp=['I', 'MAY', 'BE', 'THE', 'CINORLA', 'TO', 'PUT', 'ON', 'THE', 'SLIPPER', 'AFTER', 'ALL'] +1688-142285-0083-2031: ref=['WHAT', 'WOULD', 'YOU', 'DO', 'PAPA', 'HOW', 'WOULD', 'YOU', 'SET', 'ABOUT', 'IT'] +1688-142285-0083-2031: hyp=['BUT', 'WOULD', 'YOU', 'DO', 'PAPA', 'HOW', 'WOULD', 'YOU', 'SET', 'ABOUT', 'IT'] +1688-142285-0084-2032: ref=['WHY', 'I', 'WOULD', 'APPLY', 'TO', 'SOME', 'GOOD', 'HOUSE', 'MOTHER', 'TO', 'RECOMMEND', 'ME', 'ONE', 'KNOWN', 'TO', 'HERSELF', 'OR', 'HER', 'SERVANTS'] +1688-142285-0084-2032: hyp=['WHY', 'I', 'WOULD', 'APPLY', 'IT', 'TO', 'SOME', 'GOOD', 'HOUSE', 'MOTHER', 'TO', 'RECOMMEND', 'ME', 'ONE', 'KNOWN', 'TO', 'HERSELF', 'OR', 'HER', 'SERVANTS'] +1688-142285-0085-2033: ref=['VERY', 'GOOD', 'BUT', 'WE', 'MUST', 'FIRST', 'CATCH', 'OUR', 'HOUSE', 'MOTHER'] +1688-142285-0085-2033: hyp=['VERY', 'GOOD', 'BUT', 'WE', 'MUST', 'FIRST', 'CATCH', 'OUR', 'HOUSE', 'MOTHER'] +1688-142285-0086-2034: ref=['THE', 'MOTHER', 'OF', 'WHOM', 'HE', 'SPOKE', 'TO', 'US', 'SAID', 'MARGARET'] +1688-142285-0086-2034: hyp=['THE', 'MOTHER', 'OF', 'WHOM', 'HE', 'SPOKE', 'TO', 'US', 'SAID', 'MARGARET'] +1688-142285-0087-2035: ref=['MISSUS', 'THORNTON', 'THE', 'ONLY', 'MOTHER', 'HE', 'HAS', 'I', 'BELIEVE', 'SAID', 'MISTER', 'HALE', 'QUIETLY'] +1688-142285-0087-2035: hyp=['MISTER', 'THORNTON', 'THE', 'ONLY', 'MOTHER', 'HE', 'HAS', 'I', 'BELIEVE', 'SAID', 'MISTER', 'HALE', 'QUIETLY'] +1688-142285-0088-2036: ref=['I', 'SHALL', 'LIKE', 'TO', 'SEE', 'HER', 'SHE', 'MUST', 'BE', 'AN', 'UNCOMMON', 'PERSON', 'HER', 'MOTHER', 'ADDED'] +1688-142285-0088-2036: hyp=['I', 'SHALL', 'LIKE', 'TO', 'SEE', 'HER', 'SHE', 'MUST', 'BE', 'AN', 'UNCOMMON', 'PERSON', 'HER', 'MOTHER', 'ADDED'] +1688-142285-0089-2037: ref=['PERHAPS', 'SHE', 'MAY', 'HAVE', 'A', 'RELATION', 'WHO', 'MIGHT', 'SUIT', 'US', 'AND', 'BE', 'GLAD', 'OF', 'OUR', 'PLACE'] +1688-142285-0089-2037: hyp=['PERHAPS', 'SHE', 'MAY', 'HAVE', 'A', 'RELATION', 'WHO', 'MIGHT', 'SUIT', 'US', 'AND', 'BE', 'GLAD', 'OF', 'OUR', 'PLACE'] +1688-142285-0090-2038: ref=['SHE', 'SOUNDED', 'TO', 'BE', 'SUCH', 'A', 'CAREFUL', 'ECONOMICAL', 'PERSON', 'THAT', 'I', 'SHOULD', 'LIKE', 'ANY', 'ONE', 'OUT', 'OF', 'THE', 'SAME', 'FAMILY'] +1688-142285-0090-2038: hyp=['SHE', 'SOUNDED', 'TO', 'BE', 'SUCH', 'A', 'CAREFUL', 'ECONOMICAL', 'PERSON', 'THAT', 'I', 'SHOULD', 'LIKE', 'ANY', 'ONE', 'OUT', 'OF', 'THE', 'SAME', 'FAMILY'] +1688-142285-0091-2039: ref=['MY', 'DEAR', 'SAID', 'MISTER', 'HALE', 'ALARMED', 'PRAY', "DON'T", 'GO', 'OFF', 'ON', 'THAT', 'IDEA'] +1688-142285-0091-2039: hyp=['MY', 'DEAR', 'SAID', 'MISTER', 'HALE', 'ALARMED', 'PRAY', "DON'T", 'GO', 'OFF', 'ON', 'THAT', 'IDEA'] +1688-142285-0092-2040: ref=['I', 'AM', 'SURE', 'AT', 'ANY', 'RATE', 'SHE', 'WOULD', 'NOT', 'LIKE', 'STRANGERS', 'TO', 'KNOW', 'ANYTHING', 'ABOUT', 'IT'] +1688-142285-0092-2040: hyp=['I', 'AM', 'SURE', 'AT', 'ANY', 'RATE', 'SHE', 'WOULD', 'NOT', 'LIKE', 'STRANGERS', 'TO', 'KNOW', 'ANYTHING', 'ABOUT', 'IT'] +1688-142285-0093-2041: ref=['TAKE', 'NOTICE', 'THAT', 'IS', 'NOT', 'MY', 'KIND', 'OF', 'HAUGHTINESS', 'PAPA', 'IF', 'I', 'HAVE', 'ANY', 'AT', 'ALL', 'WHICH', 'I', "DON'T", 'AGREE', 'TO', 'THOUGH', "YOU'RE", 'ALWAYS', 'ACCUSING', 'ME', 'OF', 'IT'] +1688-142285-0093-2041: hyp=['TAKE', 'NOTICE', 'THAT', 'THIS', 'IS', 'NOT', 'MY', 'KIND', 'OF', 'HAUGHTINESS', 'PAPA', 'IF', 'I', 'HAVE', 'ANY', 'AT', 'ALL', 'WHICH', 'I', "DON'T", 'AGREE', 'TO', 'THOUGH', 'YOU', 'ALWAYS', 'ACCUSING', 'ME', 'OF', 'IT'] +1688-142285-0094-2042: ref=['I', "DON'T", 'KNOW', 'POSITIVELY', 'THAT', 'IT', 'IS', 'HERS', 'EITHER', 'BUT', 'FROM', 'LITTLE', 'THINGS', 'I', 'HAVE', 'GATHERED', 'FROM', 'HIM', 'I', 'FANCY', 'SO'] +1688-142285-0094-2042: hyp=['I', "DON'T", 'KNOW', 'POSITIVELY', 'THAT', 'IT', 'IS', 'HERS', 'EITHER', 'BUT', 'FROM', 'LITTLE', 'THINGS', 'I', 'HAVE', 'GATHERED', 'FROM', 'HIM', 'I', 'FANCY', 'SO'] +1688-142285-0095-2043: ref=['THEY', 'CARED', 'TOO', 'LITTLE', 'TO', 'ASK', 'IN', 'WHAT', 'MANNER', 'HER', 'SON', 'HAD', 'SPOKEN', 'ABOUT', 'HER'] +1688-142285-0095-2043: hyp=['THEY', 'CARED', 'TOO', 'LITTLE', 'TO', 'ASK', 'IN', 'WHAT', 'MANNER', 'HER', 'SON', 'HAD', 'SPOKEN', 'ABOUT', 'HER'] +1998-15444-0000-2204: ref=['IF', 'CALLED', 'TO', 'A', 'CASE', 'SUPPOSED', 'OR', 'SUSPECTED', 'TO', 'BE', 'ONE', 'OF', 'POISONING', 'THE', 'MEDICAL', 'MAN', 'HAS', 'TWO', 'DUTIES', 'TO', 'PERFORM', 'TO', 'SAVE', 'THE', "PATIENT'S", 'LIFE', 'AND', 'TO', 'PLACE', 'HIMSELF', 'IN', 'A', 'POSITION', 'TO', 'GIVE', 'EVIDENCE', 'IF', 'CALLED', 'ON', 'TO', 'DO', 'SO'] +1998-15444-0000-2204: hyp=['IF', 'CALLED', 'TO', 'A', 'CASE', 'SUPPOSED', 'AS', 'SUSPECTED', 'TO', 'BE', 'ONE', 'OF', 'POISONING', 'THE', 'MEDICAL', 'MAN', 'HAS', 'TWO', 'DUTIES', 'TO', 'PERFORM', 'TO', 'SAVE', 'THE', "PATIENT'S", 'LIFE', 'AND', 'TO', 'PLACE', 'HIMSELF', 'IN', 'A', 'POSITION', 'TO', 'GIVE', 'EVIDENCE', 'OF', 'CALLED', 'UNTO', 'DO', 'SO'] +1998-15444-0001-2205: ref=['HE', 'SHOULD', 'MAKE', 'INQUIRIES', 'AS', 'TO', 'SYMPTOMS', 'AND', 'TIME', 'AT', 'WHICH', 'FOOD', 'OR', 'MEDICINE', 'WAS', 'LAST', 'TAKEN'] +1998-15444-0001-2205: hyp=['HE', 'SHOULD', 'MAKE', 'INQUIRIES', 'AS', 'TO', 'SYMPTOMS', 'AND', 'TIME', 'AT', 'WHICH', 'FOOD', 'OR', 'MEDICINE', 'MUST', 'LAST', 'TAKEN'] +1998-15444-0002-2206: ref=['HE', 'SHOULD', 'NOTICE', 'THE', 'POSITION', 'AND', 'TEMPERATURE', 'OF', 'THE', 'BODY', 'THE', 'CONDITION', 'OF', 'RIGOR', 'MORTIS', 'MARKS', 'OF', 'VIOLENCE', 'APPEARANCE', 'OF', 'LIPS', 'AND', 'MOUTH'] +1998-15444-0002-2206: hyp=['HE', 'SHOULD', 'NOTICE', 'THE', 'POSITION', 'AND', 'TEMPERATURE', 'OF', 'THE', 'BODY', 'THE', 'CONDITION', 'OF', 'RIGOR', 'MORTIS', 'MARKS', 'OF', 'VIOLENCE', 'APPEARANCE', 'OF', 'LIPS', 'AND', 'MOUTH'] +1998-15444-0003-2207: ref=['IN', 'MAKING', 'A', 'POST', 'MORTEM', 'EXAMINATION', 'THE', 'ALIMENTARY', 'CANAL', 'SHOULD', 'BE', 'REMOVED', 'AND', 'PRESERVED', 'FOR', 'FURTHER', 'INVESTIGATION'] +1998-15444-0003-2207: hyp=['IN', 'MAKING', 'A', 'POST', 'MODE', 'OF', 'EXAMINATION', 'THE', 'ALIMENTARY', 'CANAL', 'SHOULD', 'BE', 'REMOVED', 'AND', 'PRESERVED', 'FOR', 'FURTHER', 'INVESTIGATION'] +1998-15444-0004-2208: ref=['THE', 'GUT', 'AND', 'THE', 'GULLET', 'BEING', 'CUT', 'ACROSS', 'BETWEEN', 'THESE', 'LIGATURES', 'THE', 'STOMACH', 'MAY', 'BE', 'REMOVED', 'ENTIRE', 'WITHOUT', 'SPILLING', 'ITS', 'CONTENTS'] +1998-15444-0004-2208: hyp=['THE', 'GUT', 'AND', 'THE', 'GALLANT', 'BEING', 'CUT', 'ACROSS', 'BETWEEN', 'THESE', 'LIGATURES', 'THE', 'STOMACH', 'MAY', 'BE', 'REMOVED', 'ENTIRE', 'WITHOUT', 'SPINNING', 'ITS', 'CONTENTS'] +1998-15444-0005-2209: ref=['IF', 'THE', 'MEDICAL', 'PRACTITIONER', 'IS', 'IN', 'DOUBT', 'ON', 'ANY', 'POINT', 'HE', 'SHOULD', 'OBTAIN', 'TECHNICAL', 'ASSISTANCE', 'FROM', 'SOMEONE', 'WHO', 'HAS', 'PAID', 'ATTENTION', 'TO', 'THE', 'SUBJECT'] +1998-15444-0005-2209: hyp=['IF', 'THE', 'MEDICA', 'PETITIONERS', 'ENDOWED', 'ON', 'ANY', 'POINT', 'HE', 'SHOULD', 'OBTAIN', 'TECHNICAL', 'ASSISTANCE', 'FROM', 'SOME', 'ONE', 'WHO', 'HAS', 'PAID', 'ATTENTION', 'TO', 'THE', 'SUBJECT'] +1998-15444-0006-2210: ref=['IN', 'A', 'CASE', 'OF', 'ATTEMPTED', 'SUICIDE', 'BY', 'POISONING', 'IS', 'IT', 'THE', 'DUTY', 'OF', 'THE', 'DOCTOR', 'TO', 'INFORM', 'THE', 'POLICE'] +1998-15444-0006-2210: hyp=['IN', 'A', 'CASE', 'OF', 'ATTEMPTED', 'SUICIDE', 'BY', 'POISONING', 'IS', 'IT', 'THE', 'DUTY', 'OF', 'THE', 'DOCTOR', 'TO', 'INFORM', 'THE', 'POLICE'] +1998-15444-0007-2211: ref=['THE', 'BEST', 'EMETIC', 'IS', 'THAT', 'WHICH', 'IS', 'AT', 'HAND'] +1998-15444-0007-2211: hyp=['THE', 'BEST', 'AMATIC', 'IS', 'THAT', 'WHICH', 'IS', 'AT', 'HAND'] +1998-15444-0008-2212: ref=['THE', 'DOSE', 'FOR', 'AN', 'ADULT', 'IS', 'TEN', 'MINIMS'] +1998-15444-0008-2212: hyp=['THE', 'DAYS', 'WERE', 'ADULGE', 'IS', 'TEN', 'MINIMS'] +1998-15444-0009-2213: ref=['APOMORPHINE', 'IS', 'NOT', 'ALLIED', 'IN', 'PHYSIOLOGICAL', 'ACTION', 'TO', 'MORPHINE', 'AND', 'MAY', 'BE', 'GIVEN', 'IN', 'CASES', 'OF', 'NARCOTIC', 'POISONING'] +1998-15444-0009-2213: hyp=['EPIMORPHIN', 'IS', 'NOT', 'ALID', 'AND', 'PHYSIOLOGICAL', 'ACTION', 'TO', 'MORPHINE', 'AND', 'MAY', 'BE', 'GIVEN', 'IN', 'CASES', 'OF', 'NARCOTIC', 'POISONING'] +1998-15444-0010-2214: ref=['TICKLING', 'THE', 'FAUCES', 'WITH', 'A', 'FEATHER', 'MAY', 'EXCITE', 'VOMITING'] +1998-15444-0010-2214: hyp=['TICKLING', 'THE', 'FOSSES', 'WITH', 'THE', 'FEATHER', 'MAY', 'EXCITE', 'WARMITTING'] +1998-15444-0011-2215: ref=['IN', 'USING', 'THE', 'ELASTIC', 'STOMACH', 'TUBE', 'SOME', 'FLUID', 'SHOULD', 'BE', 'INTRODUCED', 'INTO', 'THE', 'STOMACH', 'BEFORE', 'ATTEMPTING', 'TO', 'EMPTY', 'IT', 'OR', 'A', 'PORTION', 'OF', 'THE', 'MUCOUS', 'MEMBRANE', 'MAY', 'BE', 'SUCKED', 'INTO', 'THE', 'APERTURE'] +1998-15444-0011-2215: hyp=['IN', 'USING', 'THE', 'ELECTIC', 'STOMACH', 'TUBE', 'SOME', 'FLUID', 'SHOULD', 'BE', 'INTRODUCED', 'INTO', 'THE', 'STOMACH', 'BEFORE', 'ATTEMPTING', 'TO', 'EMPTY', 'IT', 'OR', 'A', 'PORTION', 'OF', 'THE', 'MUCOUS', 'MEMBRANE', 'MAY', 'BE', 'SACKED', 'INTO', 'THE', 'APERTURE'] +1998-15444-0012-2216: ref=['THE', 'TUBE', 'SHOULD', 'BE', 'EXAMINED', 'TO', 'SEE', 'THAT', 'IT', 'IS', 'NOT', 'BROKEN', 'OR', 'CRACKED', 'AS', 'ACCIDENTS', 'HAVE', 'HAPPENED', 'FROM', 'NEGLECTING', 'THIS', 'PRECAUTION'] +1998-15444-0012-2216: hyp=['THE', 'TUBE', 'SHOULD', 'BE', 'EXAMINED', 'TO', 'SEE', 'THAT', 'IT', 'IS', 'NOT', 'BROKEN', 'OR', 'CRACKED', 'AS', 'ACCIDENTS', 'HAVE', 'HAPPENED', 'FROM', 'NEGLECTING', 'THIS', 'PRECAUTION'] +1998-15444-0013-2217: ref=['ANTIDOTES', 'ARE', 'USUALLY', 'GIVEN', 'HYPODERMICALLY', 'OR', 'IF', 'BY', 'MOUTH', 'IN', 'THE', 'FORM', 'OF', 'TABLETS'] +1998-15444-0013-2217: hyp=['AND', 'HE', 'VOTES', 'A', 'USUALLY', 'GIVEN', 'HYPODERMICALLY', 'OR', 'IF', 'THE', 'MOUSE', 'IN', 'THE', 'FORM', 'OF', 'TABLETS'] +1998-15444-0014-2218: ref=['IN', 'THE', 'ABSENCE', 'OF', 'A', 'HYPODERMIC', 'SYRINGE', 'THE', 'REMEDY', 'MAY', 'BE', 'GIVEN', 'BY', 'THE', 'RECTUM'] +1998-15444-0014-2218: hyp=['IN', 'THE', 'ABSENCE', 'OF', 'THE', 'HYPODERMIC', 'SYRINGE', 'THE', 'REMEDY', 'MAY', 'BE', 'GIVEN', 'BY', 'THE', 'RECTIM'] +1998-15444-0015-2219: ref=['NOTICE', 'THE', 'SMELL', 'COLOUR', 'AND', 'GENERAL', 'APPEARANCE', 'OF', 'THE', 'MATTER', 'SUBMITTED', 'FOR', 'EXAMINATION'] +1998-15444-0015-2219: hyp=['NOTICE', 'THE', 'SMAR', 'COLOUR', 'AND', 'GENERAL', 'APPEARANCE', 'OF', 'THE', 'MATTER', 'SUBMITTED', 'FOR', 'EXAMINATION'] +1998-15444-0016-2220: ref=['FOR', 'THE', 'SEPARATION', 'OF', 'AN', 'ALKALOID', 'THE', 'FOLLOWING', 'IS', 'THE', 'PROCESS', 'OF', 'STAS', 'OTTO'] +1998-15444-0016-2220: hyp=['FOR', 'THE', 'SEPARATION', 'OF', 'AN', 'AKALOID', 'THE', 'FOLLOWING', 'IS', 'THE', 'PROCESS', 'OF', 'STARS', 'OTTO'] +1998-15444-0017-2221: ref=['THIS', 'PROCESS', 'IS', 'BASED', 'UPON', 'THE', 'PRINCIPLE', 'THAT', 'THE', 'SALTS', 'OF', 'THE', 'ALKALOIDS', 'ARE', 'SOLUBLE', 'IN', 'ALCOHOL', 'AND', 'WATER', 'AND', 'INSOLUBLE', 'IN', 'ETHER'] +1998-15444-0017-2221: hyp=['THIS', 'PROCESS', 'IS', 'BASED', 'UPON', 'THE', 'PRINCIPLE', 'THAT', 'THE', 'SALTS', 'OF', 'THE', 'ACHELIDES', 'ARE', 'SOLUBLE', 'IN', 'AKELET', 'WATER', 'AND', 'INSOLUBLE', 'IN', 'ETHER'] +1998-15444-0018-2222: ref=['THE', 'PURE', 'ALKALOIDS', 'WITH', 'THE', 'EXCEPTION', 'OF', 'MORPHINE', 'IN', 'ITS', 'CRYSTALLINE', 'FORM', 'ARE', 'SOLUBLE', 'IN', 'ETHER'] +1998-15444-0018-2222: hyp=['THE', 'PURE', 'IKOLOITS', 'WAS', 'THE', 'EXCEPTION', 'OF', 'MORPHINE', 'IN', 'ITS', 'CRYSTALLINE', 'FORM', 'A', 'SOLUBLE', 'BENEATH', 'THEM'] +1998-15444-0019-2223: ref=['TWO', 'COOL', 'THE', 'MIXTURE', 'AND', 'FILTER', 'WASH', 'THE', 'RESIDUE', 'WITH', 'STRONG', 'ALCOHOL', 'AND', 'MIX', 'THE', 'FILTRATES'] +1998-15444-0019-2223: hyp=['TWO', 'U', 'THE', 'MIXED', 'AND', 'FILTER', 'WASH', 'THE', 'RESIDUE', 'WITH', 'STRONG', 'ALCOHOL', 'AND', 'MIX', 'THE', 'FUR', 'TRADES'] +1998-15444-0020-2224: ref=['THE', 'RESIDUE', 'MAY', 'BE', 'SET', 'ASIDE', 'FOR', 'THE', 'DETECTION', 'OF', 'THE', 'METALLIC', 'POISONS', 'IF', 'SUSPECTED', 'EXPEL', 'THE', 'ALCOHOL', 'BY', 'CAREFUL', 'EVAPORATION'] +1998-15444-0020-2224: hyp=['THE', 'RESIDUE', 'MAY', 'BE', 'SET', 'ASIDE', 'FOR', 'THE', 'DETECTION', 'OF', 'THE', 'METALLIC', 'POISONS', 'OF', 'SUSPECTED', 'EXPELLED', 'THE', 'ALCOHOLBA', 'CAREFUL', 'EVAPORATION'] +1998-15444-0021-2225: ref=['ON', 'THE', 'EVAPORATION', 'OF', 'THE', 'ALCOHOL', 'THE', 'RESINOUS', 'AND', 'FATTY', 'MATTERS', 'SEPARATE'] +1998-15444-0021-2225: hyp=['ON', 'THE', 'EVAPORATION', 'OF', 'THE', 'ALCOHOL', 'THE', 'ZENOUS', 'AND', 'FATTY', 'MATTER', 'SEPARATE'] +1998-15444-0022-2226: ref=['EVAPORATE', 'THE', 'FILTRATE', 'TO', 'A', 'SYRUP', 'AND', 'EXTRACT', 'WITH', 'SUCCESSIVE', 'PORTIONS', 'OF', 'ABSOLUTE', 'ALCOHOL'] +1998-15444-0022-2226: hyp=['EVAPORATE', 'THE', 'FEDERATE', 'TO', 'A', 'CYRUP', 'AND', 'EXTRACT', 'WITH', 'SUCCESSIVE', 'PORTIONS', 'OF', 'ABSOLUTE', 'ALCOHOL'] +1998-15444-0023-2227: ref=['SEPARATE', 'THE', 'ETHEREAL', 'SOLUTION', 'AND', 'EVAPORATE'] +1998-15444-0023-2227: hyp=['SEPARATE', 'THE', 'ETHEREAL', 'SOLUTION', 'AND', 'THE', 'REPARATE'] +1998-15444-0024-2228: ref=['FIVE', 'A', 'PART', 'OF', 'THIS', 'ETHEREAL', 'SOLUTION', 'IS', 'POURED', 'INTO', 'A', 'WATCH', 'GLASS', 'AND', 'ALLOWED', 'TO', 'EVAPORATE'] +1998-15444-0024-2228: hyp=['FIVE', 'A', 'PART', 'OF', 'THIS', 'ASSYRIAL', 'SOLUTION', 'IS', 'PUT', 'INTO', 'A', 'WATCH', 'GLASS', 'AND', 'ALLOW', 'TO', 'EVAPORATE'] +1998-15444-0025-2229: ref=['TO', 'PURIFY', 'IT', 'ADD', 'A', 'SMALL', 'QUANTITY', 'OF', 'DILUTE', 'SULPHURIC', 'ACID', 'AND', 'AFTER', 'EVAPORATING', 'TO', 'THREE', 'QUARTERS', 'OF', 'ITS', 'BULK', 'ADD', 'A', 'SATURATED', 'SOLUTION', 'OF', 'CARBONATE', 'OF', 'POTASH', 'OR', 'SODA'] +1998-15444-0025-2229: hyp=['TO', 'PURIFY', 'IT', 'ADDISMA', 'QUANTITY', 'OF', 'DELUDE', 'SUFFERG', 'ACID', 'AND', 'AFTER', 'EVAPORATING', 'TO', 'THREE', 'QUARTERS', 'OF', 'ITS', 'BULK', 'ADD', 'SATURATED', 'SOLUTION', 'OF', 'CARBONATE', 'OF', 'POTASH', 'OR', 'SODA'] +1998-15444-0026-2230: ref=['BOIL', 'THE', 'FINELY', 'DIVIDED', 'SUBSTANCE', 'WITH', 'ABOUT', 'ONE', 'EIGHTH', 'ITS', 'BULK', 'OF', 'PURE', 'HYDROCHLORIC', 'ACID', 'ADD', 'FROM', 'TIME', 'TO', 'TIME', 'POTASSIC', 'CHLORATE', 'UNTIL', 'THE', 'SOLIDS', 'ARE', 'REDUCED', 'TO', 'A', 'STRAW', 'YELLOW', 'FLUID'] +1998-15444-0026-2230: hyp=['BY', 'THE', 'FINALLY', 'DIVIDE', 'SUBSTANCE', 'WITH', 'ABOUT', 'ONE', 'EIGHTHS', 'ITS', 'BAG', 'OF', 'PURE', 'HYDROCHLORIC', 'ACID', 'ADD', 'FROM', 'TIME', 'TO', 'TIME', 'POTASSIC', 'LOW', 'RAGE', 'UNTIL', 'THE', 'SOLIDS', 'ARE', 'REDUCED', 'TO', 'A', 'STRAW', 'YELLOW', 'FLUID'] +1998-15444-0027-2231: ref=['THE', 'RESIDUE', 'OF', 'THE', 'MATERIAL', 'AFTER', 'DIGESTION', 'WITH', 'HYDROCHLORIC', 'ACID', 'AND', 'POTASSIUM', 'CHLORATE', 'MAY', 'HAVE', 'TO', 'BE', 'EXAMINED', 'FOR', 'SILVER', 'LEAD', 'AND', 'BARIUM'] +1998-15444-0027-2231: hyp=['THE', 'RESIDUE', 'OF', 'THE', 'MATERIAL', 'AFTER', 'DIGESTION', 'WAS', 'HYDROCHLORIC', 'ACID', 'AND', 'POTASSIAN', 'CHLORIDE', 'MAY', 'HAVE', 'TO', 'BE', 'EXAMINED', 'FOR', 'SILVER', 'LEAD', 'AND', 'BURIUM'] +1998-29454-0000-2157: ref=['A', 'THOUSAND', 'BLESSINGS', 'FROM', 'A', 'GRATEFUL', 'HEART'] +1998-29454-0000-2157: hyp=['A', 'THOUSAND', 'BLESSINGS', 'FROM', 'A', 'GRATEFUL', 'HEART'] +1998-29454-0001-2158: ref=['PERUSAL', 'SAID', 'THE', 'PAWNBROKER', "THAT'S", 'THE', 'WAY', 'TO', 'PERNOUNCE', 'IT'] +1998-29454-0001-2158: hyp=['PERUSAL', 'SET', 'UPON', 'BROKER', "THAT'S", 'THE', 'WAY', 'TO', 'PRONOUNCE', 'IT'] +1998-29454-0002-2159: ref=['HIS', 'BOOKS', 'TOLD', 'HIM', 'THAT', 'TREASURE', 'IS', 'BEST', 'HIDDEN', 'UNDER', 'LOOSE', 'BOARDS', 'UNLESS', 'OF', 'COURSE', 'YOUR', 'HOUSE', 'HAS', 'A', 'SECRET', 'PANEL', 'WHICH', 'HIS', 'HAD', 'NOT'] +1998-29454-0002-2159: hyp=['HIS', 'BOOKS', 'TOLD', 'HIM', 'THE', 'TREASURES', 'BEST', 'HIDDEN', 'UNDER', 'LOOSE', 'BOARDS', 'AND', 'AS', 'OF', 'COURSE', 'YOUR', 'HOUSE', 'HAD', 'A', 'SECRET', 'PANEL', 'WHICH', 'HIS', 'HAD', 'NOT'] +1998-29454-0003-2160: ref=['HE', 'GOT', 'IT', 'UP', 'AND', 'PUSHED', 'HIS', 'TREASURES', 'AS', 'FAR', 'IN', 'AS', 'HE', 'COULD', 'ALONG', 'THE', 'ROUGH', 'CRUMBLY', 'SURFACE', 'OF', 'THE', 'LATH', 'AND', 'PLASTER'] +1998-29454-0003-2160: hyp=['HE', 'GOT', 'IT', 'UP', 'AND', 'PUSHED', 'HIS', 'TREASURES', 'AS', 'FAR', 'IN', 'AS', 'HE', 'COULD', 'ALONG', 'THE', 'ROUGH', 'CRAMBLY', 'SURFACE', 'OF', 'THE', 'LAST', 'AND', 'PLASTER'] +1998-29454-0004-2161: ref=['WHEN', 'DICKIE', 'CAME', 'DOWN', 'HIS', 'AUNT', 'SLIGHTLY', 'SLAPPED', 'HIM', 'AND', 'HE', 'TOOK', 'THE', 'HALFPENNY', 'AND', 'LIMPED', 'OFF', 'OBEDIENTLY'] +1998-29454-0004-2161: hyp=['WHEN', 'DICKIE', 'CAME', 'DOWN', 'HIS', 'AUNT', 'SAT', 'HE', 'SLAPPED', 'HIM', 'AND', 'HE', 'TOOK', 'THE', 'HALFPENNY', 'AND', 'LIMP', 'OF', 'OBEDIENTLY'] +1998-29454-0005-2162: ref=['HE', 'HAD', 'NEVER', 'SEEN', 'ONE', 'BEFORE', 'AND', 'IT', 'INTERESTED', 'HIM', 'EXTREMELY'] +1998-29454-0005-2162: hyp=['HE', 'HAD', 'NEVER', 'SEEN', 'ONE', 'BEFORE', 'AND', 'IT', 'INTERESTED', 'HIM', 'EXTREMELY'] +1998-29454-0006-2163: ref=['HE', 'LOOKED', 'ABOUT', 'HIM', 'AND', 'KNEW', 'THAT', 'HE', 'DID', 'NOT', 'AT', 'ALL', 'KNOW', 'WHERE', 'HE', 'WAS'] +1998-29454-0006-2163: hyp=['HE', 'LOOKED', 'ABOUT', 'HIM', 'AND', 'KNEW', 'THAT', 'HE', 'DID', 'NOT', 'AT', 'ALL', 'KNOW', 'WHERE', 'HE', 'WAS'] +1998-29454-0007-2164: ref=["WHAT'S", 'UP', 'MATEY', 'LOST', 'YOUR', 'WAY', 'DICKIE', 'EXPLAINED'] +1998-29454-0007-2164: hyp=["WHAT'S", 'UP', 'MATE', 'ASKED', 'YOUR', 'WAY', 'DICKIE', 'EXPLAINED'] +1998-29454-0008-2165: ref=['WHEN', 'HE', 'SAID', 'AVE', 'I', 'BIN', 'ASLEEP'] +1998-29454-0008-2165: hyp=['WHEN', 'HE', 'SAID', 'HAVE', 'I', 'BEEN', 'ASLEEP'] +1998-29454-0009-2166: ref=['HERE', 'WE', 'ARE', 'SAID', 'THE', 'MAN'] +1998-29454-0009-2166: hyp=['HERE', 'WE', 'ARE', 'SAID', 'THE', 'MAN'] +1998-29454-0010-2167: ref=['NOT', 'EXACKLY', 'SAID', 'THE', 'MAN', 'BUT', "IT'S", 'ALL', 'RIGHT'] +1998-29454-0010-2167: hyp=['NOT', 'EXACTLY', 'SAID', 'THE', 'MAN', 'BUT', "IT'S", 'ALL', 'RIGHT'] +1998-29454-0011-2168: ref=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'MAN', 'ASKED', 'DICKIE', 'IF', 'HE', 'COULD', 'WALK', 'A', 'LITTLE', 'WAY', 'AND', 'WHEN', 'DICKIE', 'SAID', 'HE', 'COULD', 'THEY', 'SET', 'OUT', 'IN', 'THE', 'MOST', 'FRIENDLY', 'WAY', 'SIDE', 'BY', 'SIDE'] +1998-29454-0011-2168: hyp=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'MEN', 'ASKED', 'DICKIE', 'IF', 'HE', 'COULD', 'WALK', 'A', 'LITTLE', 'WAY', 'AND', 'WHEN', 'DICKY', 'SAID', 'HE', 'COULD', 'THEY', 'SET', 'OUT', 'IN', 'THE', 'MOST', 'FRIENDLY', 'WAY', 'SIDE', 'BY', 'SIDE'] +1998-29454-0012-2169: ref=['AND', 'THE', 'TEA', 'AND', 'ALL', 'AN', 'THE', 'EGG'] +1998-29454-0012-2169: hyp=['AND', 'THE', 'TUNO', 'AND', 'THE', 'EGG'] +1998-29454-0013-2170: ref=['AND', 'THIS', 'IS', 'THE', 'PRETTIEST', 'PLACE', 'EVER', 'I', 'SEE'] +1998-29454-0013-2170: hyp=['AND', 'THIS', 'IS', 'THE', 'PRETTIEST', 'PLACE', 'EVER', 'I', 'SEE'] +1998-29454-0014-2171: ref=['I', 'SHALL', 'CATCH', 'IT', 'A', 'FAIR', 'TREAT', 'AS', 'IT', 'IS'] +1998-29454-0014-2171: hyp=['I', 'SHALL', 'CATCH', 'IT', 'IF', 'HER', 'TREAT', 'AS', 'IT', 'IS'] +1998-29454-0015-2172: ref=['SHE', 'WAS', 'WAITIN', 'FOR', 'THE', 'WOOD', 'TO', 'BOIL', 'THE', 'KETTLE', 'WHEN', 'I', 'COME', 'OUT', 'MOTHER'] +1998-29454-0015-2172: hyp=['SHE', 'WAS', 'WAITING', 'FOR', 'THE', 'WOOD', 'TO', 'BOY', 'THE', 'CATTLE', 'WHEN', 'TO', 'COME', 'OUT', 'MOTHER'] +1998-29454-0016-2173: ref=["AIN'T", 'BAD', 'WHEN', "SHE'S", 'IN', 'A', 'GOOD', 'TEMPER'] +1998-29454-0016-2173: hyp=['AND', 'BAD', 'WHEN', "SHE'S", 'IN', 'A', 'GOOD', 'TEMPER'] +1998-29454-0017-2174: ref=['THAT', "AIN'T", 'WHAT', "SHE'LL", 'BE', 'IN', 'WHEN', 'YOU', 'GETS', 'BACK'] +1998-29454-0017-2174: hyp=['THAT', 'ANNE', 'BUT', "YE'LL", 'BE', 'IN', 'WHEN', 'YOU', 'GETS', 'BACK'] +1998-29454-0018-2175: ref=['I', 'GOT', 'TO', 'STICK', 'IT', 'SAID', 'DICKIE', 'SADLY', "I'D", 'BEST', 'BE', 'GETTING', 'HOME'] +1998-29454-0018-2175: hyp=['I', 'GOT', 'A', 'STICKET', 'SAID', 'DICKY', 'SADLY', "I'D", 'BEST', 'BE', 'GETTING', 'HOME'] +1998-29454-0019-2176: ref=['I', "WOULDN'T", 'GO', 'OME', 'NOT', 'IF', 'I', 'WAS', 'YOU', 'SAID', 'THE', 'MAN'] +1998-29454-0019-2176: hyp=['I', "WOULDN'T", 'GO', 'HOME', 'NOT', 'IF', 'EVER', 'WAS', 'YOU', 'SAID', 'THE', 'MAN'] +1998-29454-0020-2177: ref=['NO', 'SAID', 'DICKIE', 'OH', 'NO', 'NO', 'I', 'NEVER'] +1998-29454-0020-2177: hyp=['NO', 'SAID', 'DICKIE', 'OH', 'NO', 'NO', 'I', 'NEVER'] +1998-29454-0021-2178: ref=['I', "AIN'T", 'IT', 'YER', 'HAVE', 'I', 'LIKE', 'WHAT', 'YER', 'AUNT', 'DO'] +1998-29454-0021-2178: hyp=['AND', 'A', 'DEAR', 'HAVE', 'I', 'LIKE', 'WHAT', 'YOU', "AREN'T", 'TO'] +1998-29454-0022-2179: ref=['WELL', "THAT'LL", 'SHOW', 'YOU', 'THE', 'SORT', 'OF', 'MAN', 'I', 'AM'] +1998-29454-0022-2179: hyp=['WELL', 'THOU', 'SHOW', 'YOU', 'THE', 'SORT', 'OF', 'MEN', 'I', 'AM'] +1998-29454-0023-2180: ref=['THE', "MAN'S", 'MANNER', 'WAS', 'SO', 'KIND', 'AND', 'HEARTY', 'THE', 'WHOLE', 'ADVENTURE', 'WAS', 'SO', 'WONDERFUL', 'AND', 'NEW', 'IS', 'IT', 'COUNTRY', 'WHERE', 'YOU', 'GOING'] +1998-29454-0023-2180: hyp=['THE', "MAN'S", 'MANNER', 'WAS', 'SO', 'KIND', 'AND', 'HEARTY', 'THE', 'WHOLE', 'ADVENTURE', 'WAS', 'SO', 'WONDERFUL', 'AND', 'NEW', 'IS', 'IT', 'COUNTRY', 'WHERE', 'YOU', 'GOING'] +1998-29454-0024-2181: ref=['THE', 'SUN', 'SHOT', 'LONG', 'GOLDEN', 'BEAMS', 'THROUGH', 'THE', 'GAPS', 'IN', 'THE', 'HEDGE'] +1998-29454-0024-2181: hyp=['THE', 'SUN', 'HAD', 'LONG', 'GOLDEN', 'BEAMS', 'THROUGH', 'THE', 'GAPS', 'AND', 'THE', 'HEDGE'] +1998-29454-0025-2182: ref=['A', 'BIRD', 'PAUSED', 'IN', 'ITS', 'FLIGHT', 'ON', 'A', 'BRANCH', 'QUITE', 'CLOSE', 'AND', 'CLUNG', 'THERE', 'SWAYING'] +1998-29454-0025-2182: hyp=['A', 'BIRD', 'PASSED', 'IN', 'ITS', 'FLIGHT', 'ON', 'A', 'BRANCH', 'QUITE', 'CLOSE', 'AND', 'CLUNG', 'THERE', 'SWAYING'] +1998-29454-0026-2183: ref=['HE', 'TOOK', 'OUT', 'OF', 'HIS', 'POCKET', 'A', 'NEW', 'ENVELOPE', 'A', 'NEW', 'SHEET', 'OF', 'PAPER', 'AND', 'A', 'NEW', 'PENCIL', 'READY', 'SHARPENED', 'BY', 'MACHINERY'] +1998-29454-0026-2183: hyp=['HE', 'TOOK', 'OUT', 'OF', 'HIS', 'POCKET', 'A', 'NEW', 'ENVELOPE', 'AND', 'NEW', 'SHEET', 'OF', 'PAPER', 'AND', 'A', 'NEW', 'PENCIL', 'READY', 'SHARPENED', 'BY', 'MACHINERY'] +1998-29454-0027-2184: ref=['AN', 'I', 'ASKS', 'YOU', 'LET', 'ME', 'COME', 'ALONGER', 'YOU', 'GOT', 'THAT'] +1998-29454-0027-2184: hyp=['AND', 'I', 'ASK', 'YOU', 'LET', 'ME', 'COME', 'ALONG', 'ARE', 'YOU', 'GOT', 'THAT'] +1998-29454-0028-2185: ref=['GET', 'IT', 'WROTE', 'DOWN', 'THEN', 'DONE'] +1998-29454-0028-2185: hyp=['GERT', 'RODE', 'DOWN', 'THEN', 'DONE'] +1998-29454-0029-2186: ref=['THEN', 'HE', 'FOLDED', 'IT', 'AND', 'PUT', 'IT', 'IN', 'HIS', 'POCKET'] +1998-29454-0029-2186: hyp=['THEN', 'HE', 'FOLDED', 'IT', 'AND', 'PUT', 'IT', 'IN', 'HIS', 'POCKET'] +1998-29454-0030-2187: ref=['NOW', "WE'RE", 'SQUARE', 'HE', 'SAID'] +1998-29454-0030-2187: hyp=['NOW', 'WE', 'ARE', 'SQUARE', 'HE', 'SAID'] +1998-29454-0031-2188: ref=['THEY', 'COULD', 'PUT', 'A', 'MAN', 'AWAY', 'FOR', 'LESS', 'THAN', 'THAT'] +1998-29454-0031-2188: hyp=['THEY', 'COULD', 'PUT', 'A', 'MEN', 'AWAY', 'FOR', 'US', 'THAN', 'THAT'] +1998-29454-0032-2189: ref=['I', 'SEE', 'THAT', 'THERE', 'IN', 'A', 'BOOK', 'SAID', 'DICKIE', 'CHARMED'] +1998-29454-0032-2189: hyp=['I', 'SEE', 'THAT', 'THEN', 'A', 'BOOK', 'SAID', 'DICKET', 'CHUMMED'] +1998-29454-0033-2190: ref=['HE', 'REWARD', 'THE', 'WAKE', 'THE', 'LAST', 'OF', 'THE', 'ENGLISH', 'AND', 'I', 'WUNNERED', 'WHAT', 'IT', 'STOOD', 'FOR'] +1998-29454-0033-2190: hyp=['HE', 'REWARD', 'THE', 'WAKE', 'THE', 'LAST', 'OF', 'THE', 'ENGLISH', 'AND', 'A', 'ONE', 'AT', 'WHAT', 'IT', 'STOOD', 'FOR'] +1998-29454-0034-2191: ref=['WILD', 'ONES', "AIN'T", 'ALF', 'THE', 'SIZE', 'I', 'LAY'] +1998-29454-0034-2191: hyp=['WHITE', 'ONES', 'AND', 'A', 'HALF', 'SIZE', 'I', 'LAY'] +1998-29454-0035-2192: ref=['ADVENTURES', 'I', 'SHOULD', 'THINK', 'SO'] +1998-29454-0035-2192: hyp=['ADVENTURES', 'I', 'SHOULD', 'THINK', 'SO'] +1998-29454-0036-2193: ref=['AH', 'SAID', 'DICKIE', 'AND', 'A', 'FULL', 'SILENCE', 'FELL', 'BETWEEN', 'THEM'] +1998-29454-0036-2193: hyp=['AH', 'SAID', 'DICKY', 'AND', 'A', 'FOOT', 'SILENCE', 'FELL', 'BETWEEN', 'THEM'] +1998-29454-0037-2194: ref=['THAT', 'WAS', 'CHARMING', 'BUT', 'IT', 'WAS', 'PLEASANT', 'TOO', 'TO', 'WASH', 'THE', 'MUD', 'OFF', 'ON', 'THE', 'WET', 'GRASS'] +1998-29454-0037-2194: hyp=['THAT', 'WAS', 'CHARMING', 'BUT', 'IT', 'WAS', 'PLEASANT', 'TOO', 'TO', 'WASH', 'THE', 'MATTER', 'ON', 'THE', 'WET', 'GRASS'] +1998-29454-0038-2195: ref=['DICKIE', 'ALWAYS', 'REMEMBERED', 'THAT', 'MOMENT'] +1998-29454-0038-2195: hyp=['DICKY', 'ALWAYS', 'REMEMBERED', 'THAT', 'MOMENT'] +1998-29454-0039-2196: ref=['SO', 'YOU', 'SHALL', 'SAID', 'MISTER', 'BEALE', 'A', "REG'LER", 'WASH', 'ALL', 'OVER', 'THIS', 'VERY', 'NIGHT', 'I', 'ALWAYS', 'LIKE', 'A', 'WASH', 'MESELF'] +1998-29454-0039-2196: hyp=['SO', 'YOU', 'SHALL', 'SAID', 'MISTER', 'BEALE', 'A', 'REGULAR', 'WASH', 'ALL', 'OVER', 'THIS', 'VERY', 'NIGHT', 'I', 'ALWAYS', 'LIKE', 'A', 'WASH', 'MESELF'] +1998-29454-0040-2197: ref=['SOME', 'BLOKES', 'THINK', 'IT', 'PAYS', 'TO', 'BE', 'DIRTY', 'BUT', 'IT', "DON'T"] +1998-29454-0040-2197: hyp=['SOME', 'LOSS', 'THINK', 'IT', 'PAYS', 'TO', 'BE', 'DIRTY', 'BUT', 'IT', "DON'T"] +1998-29454-0041-2198: ref=['IF', "YOU'RE", 'CLEAN', 'THEY', 'SAY', 'HONEST', 'POVERTY', 'AN', 'IF', "YOU'RE", 'DIRTY', 'THEY', 'SAY', 'SERVE', 'YOU', 'RIGHT'] +1998-29454-0041-2198: hyp=['IF', 'YO', 'CLEAN', 'THEY', 'SAY', 'I', 'DISPOVERTY', 'AN', 'IF', 'YO', 'DIRTY', 'THEY', 'SAY', 'SERVE', 'YOU', 'RIGHT'] +1998-29454-0042-2199: ref=['YOU', 'ARE', 'GOOD', 'SAID', 'DICKIE', 'I', 'DO', 'LIKE', 'YOU'] +1998-29454-0042-2199: hyp=['YOU', 'ARE', 'GOOD', 'SAID', 'DICKIE', 'I', 'DO', 'LIKE', 'YOU'] +1998-29454-0043-2200: ref=['I', 'KNOW', 'YOU', 'WILL', 'SAID', 'DICKIE', 'WITH', 'ENTHUSIASM', 'I', 'KNOW', 'OW', 'GOOD', 'YOU', 'ARE'] +1998-29454-0043-2200: hyp=['I', 'KNOW', 'YOU', 'WILL', 'SAID', 'DICKIE', 'WITH', 'ENTHUSIASM', 'I', 'KNOW', 'HOW', 'GOOD', 'YOU', 'ARE'] +1998-29454-0044-2201: ref=['BLESS', 'ME', 'SAID', 'MISTER', 'BEALE', 'UNCOMFORTABLY', 'WELL', 'THERE'] +1998-29454-0044-2201: hyp=['BLESS', 'ME', 'SAID', 'MISTER', 'BEALE', 'UNCOMFORTABLY', 'WELL', 'THERE'] +1998-29454-0045-2202: ref=['STEP', 'OUT', 'SONNY', 'OR', "WE'LL", 'NEVER', 'GET', 'THERE', 'THIS', 'SIDE', 'CHRISTMAS'] +1998-29454-0045-2202: hyp=['SPATANI', 'ALBEA', 'NEVER', 'GET', 'THERE', 'THIS', 'SORT', 'OF', 'CHRISTMAS'] +1998-29454-0046-2203: ref=['WELL', "YOU'LL", 'KNOW', 'ALL', 'ABOUT', 'IT', 'PRESENTLY'] +1998-29454-0046-2203: hyp=['WELL', 'YOU', 'KNOW', 'ALL', 'ABOUT', 'IT', 'PRESENTLY'] +1998-29455-0000-2232: ref=['THE', 'SINGING', 'AND', 'LAUGHING', 'WENT', 'ON', 'LONG', 'AFTER', 'HE', 'HAD', 'FALLEN', 'ASLEEP', 'AND', 'IF', 'LATER', 'IN', 'THE', 'EVENING', 'THERE', 'WERE', 'LOUD', 'VOICED', 'ARGUMENTS', 'OR', 'QUARRELS', 'EVEN', 'DICKIE', 'DID', 'NOT', 'HEAR', 'THEM'] +1998-29455-0000-2232: hyp=['THE', 'SINGING', 'AND', 'LAUGHING', 'WENT', 'ON', 'LONG', 'AFTER', 'HE', 'HAD', 'FALLEN', 'ASLEEP', 'AND', 'IF', 'LATER', 'IN', 'THE', 'EVENING', 'THEY', 'WERE', 'ALL', 'OUTWARDS', 'ARGUMENTS', 'OR', 'A', 'QUARRELS', 'EVEN', 'DICKIE', 'DID', 'NOT', 'HEAR', 'THEM'] +1998-29455-0001-2233: ref=["WHAT'S", 'ALL', 'THAT', 'THERE', 'DICKIE', 'ASKED', 'POINTING', 'TO', 'THE', 'ODD', 'KNOBBLY', 'BUNDLES', 'OF', 'ALL', 'SORTS', 'AND', 'SHAPES', 'TIED', 'ON', 'TO', 'THE', "PERAMBULATOR'S", 'FRONT'] +1998-29455-0001-2233: hyp=["WHAT'S", 'ON', 'THAT', 'THERE', 'DICKY', 'ASKED', 'POINTING', 'TO', 'THE', 'ODD', 'NOBLY', 'BUNDLES', 'OF', 'ALL', 'SORTS', 'AND', 'SHAPES', 'TIED', 'ON', 'TO', 'THE', 'PRIME', "RELATOR'S", 'FRONT'] +1998-29455-0002-2234: ref=['TELL', 'YER', 'WHAT', 'MATE', 'LOOKS', 'TO', 'ME', 'AS', 'IF', "I'D", 'TOOK', 'A', 'FANCY', 'TO', 'YOU'] +1998-29455-0002-2234: hyp=['TELL', 'YOU', 'WHAT', 'MADE', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'I', 'TOOK', 'A', 'FANCY', 'TO', 'YOU'] +1998-29455-0003-2235: ref=['SWELP', 'ME', 'HE', 'SAID', 'HELPLESSLY'] +1998-29455-0003-2235: hyp=['SWAB', 'ME', 'HE', 'SAID', 'HELPLESSLY'] +1998-29455-0004-2236: ref=['OH', 'LOOK', 'SAID', 'DICKIE', 'THE', 'FLOWERS'] +1998-29455-0004-2236: hyp=['OH', 'LOOK', 'SAID', 'DICKY', 'THE', 'FLOWERS'] +1998-29455-0005-2237: ref=["THEY'RE", 'ONLY', 'WEEDS', 'SAID', 'BEALE'] +1998-29455-0005-2237: hyp=['THEY', 'ONLY', 'READS', 'SAID', 'BEALE'] +1998-29455-0006-2238: ref=['BUT', 'I', 'SHALL', 'HAVE', 'THEM', 'WHILE', "THEY'RE", 'ALIVE', 'SAID', 'DICKIE', 'AS', 'HE', 'HAD', 'SAID', 'TO', 'THE', 'PAWNBROKER', 'ABOUT', 'THE', 'MOONFLOWERS'] +1998-29455-0006-2238: hyp=['BUT', 'I', 'SHOULD', 'HAVE', 'THEM', 'WHETHER', 'ALIVE', 'SAID', 'DICKY', 'AS', 'HE', 'HAD', 'SAID', 'TO', 'THE', 'PAWNBROKER', 'BUT', 'THE', 'MOONFLOWERS'] +1998-29455-0007-2239: ref=['HI', 'THERE', 'GOES', 'A', 'RABBIT'] +1998-29455-0007-2239: hyp=['AY', 'THERE', 'WAS', 'A', 'RABBIT'] +1998-29455-0008-2240: ref=['SEE', 'IM', 'CROST', 'THE', 'ROAD', 'THERE', 'SEE', 'HIM'] +1998-29455-0008-2240: hyp=['SEEM', 'QUEST', 'ROAD', 'THERE', 'SEEM'] +1998-29455-0009-2241: ref=['HOW', 'BEAUTIFUL', 'SAID', 'DICKIE', 'WRIGGLING', 'WITH', 'DELIGHT'] +1998-29455-0009-2241: hyp=['HOW', 'BEAUTIFUL', 'SAID', 'DICKY', 'WRIGGLING', 'WITH', 'DELIGHT'] +1998-29455-0010-2242: ref=['THIS', 'LIFE', 'OF', 'THE', 'RABBIT', 'AS', 'DESCRIBED', 'BY', 'MISTER', 'BEALE', 'WAS', 'THE', "CHILD'S", 'FIRST', 'GLIMPSE', 'OF', 'FREEDOM', "I'D", 'LIKE', 'TO', 'BE', 'A', 'RABBIT'] +1998-29455-0010-2242: hyp=['THIS', 'LIFE', 'OF', 'THE', 'RABBIT', 'AS', 'DESCRIBED', 'BY', 'MISTER', 'BEALE', 'WAS', 'THE', "CHILD'S", 'FIRST', 'GLIMPSE', 'OF', 'FREEDOM', "I'D", 'LIKE', 'TO', 'BE', 'A', 'RABBIT'] +1998-29455-0011-2243: ref=["OW'M", 'I', 'TO', 'WHEEL', 'THE', 'BLOOMIN', 'PRAM', 'IF', 'YOU', 'GOES', 'ON', 'LIKE', 'AS', 'IF', 'YOU', 'WAS', 'A', 'BAG', 'OF', 'EELS'] +1998-29455-0011-2243: hyp=['AM', 'I', 'TO', 'BE', 'AT', 'THE', 'ROOM', 'IN', 'PEM', 'IF', 'YOUR', 'GOES', 'ON', 'LIKE', 'AS', 'IF', 'YOU', 'WAS', 'A', 'PEG', 'OF', 'EELS'] +1998-29455-0012-2244: ref=['I', 'LIKE', 'YOU', 'NEXTER', 'MY', 'OWN', 'DADDY', 'AND', 'MISTER', 'BAXTER', 'NEXT', 'DOOR'] +1998-29455-0012-2244: hyp=['I', 'LIKE', 'YOU', 'NEXT', 'TO', 'MY', 'OWN', 'DADDY', 'AND', 'MISTER', 'BAXT', 'THE', 'NEXT', 'DOOR'] +1998-29455-0013-2245: ref=["THAT'S", 'ALL', 'RIGHT', 'SAID', 'MISTER', 'BEALE', 'AWKWARDLY'] +1998-29455-0013-2245: hyp=["THAT'S", 'ALL', 'RIGHT', 'SAID', 'MISTER', 'BEALE', 'AWKWARDLY'] +1998-29455-0014-2246: ref=['DICKIE', 'QUICK', 'TO', 'IMITATE', 'TOUCHED', 'HIS'] +1998-29455-0014-2246: hyp=['DICKY', 'QUICKLY', 'IMITATE', 'TOUCHED', 'HIS'] +1998-29455-0015-2247: ref=['POOR', 'LITTLE', 'MAN', 'SAID', 'THE', 'LADY', 'YOU', 'MISS', 'YOUR', 'MOTHER', "DON'T", 'YOU'] +1998-29455-0015-2247: hyp=['POOR', 'LITTLE', 'MAN', 'SAID', 'THE', 'LADY', 'YOU', 'MISS', 'YOUR', 'MOTHER', "DON'T", 'YOU'] +1998-29455-0016-2248: ref=['OH', 'WELL', 'DONE', 'LITTLE', 'UN', 'SAID', 'MISTER', 'BEALE', 'TO', 'HIMSELF'] +1998-29455-0016-2248: hyp=['OH', 'WELL', 'DONE', 'LITTLE', 'ONE', 'SAID', 'MISTER', 'BEECH', 'TO', 'HIMSELF'] +1998-29455-0017-2249: ref=['THE', 'TWO', 'TRAVELLERS', 'WERE', 'LEFT', 'FACING', 'EACH', 'OTHER', 'THE', 'RICHER', 'BY', 'A', 'PENNY', 'AND', 'OH', 'WONDERFUL', 'GOOD', 'FORTUNE', 'A', 'WHOLE', 'HALF', 'CROWN'] +1998-29455-0017-2249: hyp=['THE', 'TWO', 'TRAVELLERS', 'WERE', 'LEFT', 'FACING', 'EACH', 'OTHER', 'THE', 'RICHER', 'BY', 'A', 'PENNY', 'AND', 'O', 'WONDERFUL', 'GOOD', 'FORTUNE', 'A', 'WHOLE', 'HALF', 'CROWN'] +1998-29455-0018-2250: ref=['NO', 'I', 'NEVER', 'SAID', 'DICKIE', "ERE'S", 'THE', 'STEEVER'] +1998-29455-0018-2250: hyp=['NO', 'I', 'NEVER', 'SAID', 'DICKIE', 'YES', 'THE', 'STEVER'] +1998-29455-0019-2251: ref=['YOU', 'STICK', 'TO', 'THAT', 'SAID', 'BEALE', 'RADIANT', 'WITH', 'DELIGHT', "YOU'RE", 'A', 'FAIR', 'MASTERPIECE', 'YOU', 'ARE', 'YOU', 'EARNED', 'IT', 'HONEST', 'IF', 'EVER', 'A', 'KID', 'DONE'] +1998-29455-0019-2251: hyp=['YOU', 'STICK', 'TO', 'THAT', 'SAID', 'BEARD', 'RADIANT', 'WAS', 'DELIGHT', "YOU'RE", 'A', 'FAIR', 'MASTERPIECE', 'YOU', 'ARE', 'YOU', 'EARNED', 'IT', 'HONEST', 'IF', 'EVER', 'KIT', 'DONE'] +1998-29455-0020-2252: ref=['THEY', 'WENT', 'ON', 'UP', 'THE', 'HILL', 'AS', 'HAPPY', 'AS', 'ANY', 'ONE', 'NEED', 'WISH', 'TO', 'BE'] +1998-29455-0020-2252: hyp=['THEY', 'WENT', 'ON', 'UP', 'THE', 'HILL', 'AS', 'HAPPY', 'AS', 'ANY', 'ONE', 'NEED', 'WISH', 'TO', 'BE'] +1998-29455-0021-2253: ref=['PLEASE', 'DO', 'NOT', 'BE', 'TOO', 'SHOCKED'] +1998-29455-0021-2253: hyp=['PLEASE', "DON'T", 'REPEAT', 'TOO', 'SHOCKED'] +1998-29455-0022-2254: ref=['REMEMBER', 'THAT', 'NEITHER', 'OF', 'THEM', 'KNEW', 'ANY', 'BETTER'] +1998-29455-0022-2254: hyp=['REMEMBER', 'THAT', 'NEITHER', 'OF', 'THEM', 'KNEW', 'ANY', 'BETTER'] +1998-29455-0023-2255: ref=['TO', 'THE', 'ELDER', 'TRAMP', 'LIES', 'AND', 'BEGGING', 'WERE', 'NATURAL', 'MEANS', 'OF', 'LIVELIHOOD'] +1998-29455-0023-2255: hyp=['TO', 'THE', 'OTHER', 'TRAMP', 'LIES', 'IN', 'PEGGING', 'WHERE', 'NATURAL', 'MEANS', 'OF', 'LIVELIHOOD'] +1998-29455-0024-2256: ref=['BUT', 'YOU', 'SAID', 'THE', 'BED', 'WITH', 'THE', 'GREEN', 'CURTAINS', 'URGED', 'DICKIE'] +1998-29455-0024-2256: hyp=['BUT', 'YOU', 'SAID', 'THE', 'BED', 'WAS', 'THE', 'GREEN', 'CURTAINS', 'ADDED', 'THE', 'KEI'] +1998-29455-0025-2257: ref=['WHICH', 'THIS', "AIN'T", 'NOT', 'BY', 'NO', 'MEANS'] +1998-29455-0025-2257: hyp=['WHICH', 'THIS', 'END', 'NOT', 'BY', 'NO', 'MEANS'] +1998-29455-0026-2258: ref=['THE', 'NIGHT', 'IS', 'FULL', 'OF', 'INTERESTING', 'LITTLE', 'SOUNDS', 'THAT', 'WILL', 'NOT', 'AT', 'FIRST', 'LET', 'YOU', 'SLEEP', 'THE', 'RUSTLE', 'OF', 'LITTLE', 'WILD', 'THINGS', 'IN', 'THE', 'HEDGES', 'THE', 'BARKING', 'OF', 'DOGS', 'IN', 'DISTANT', 'FARMS', 'THE', 'CHIRP', 'OF', 'CRICKETS', 'AND', 'THE', 'CROAKING', 'OF', 'FROGS'] +1998-29455-0026-2258: hyp=['THE', 'NIGHT', 'IS', 'FULL', 'OF', 'INTERESTING', 'LITTLE', 'SOUNDS', 'THAT', 'WILL', 'NOT', 'AT', 'FIRST', 'LET', 'YOU', 'SLEEP', 'THE', 'RUSTLE', 'OF', 'LITTLE', 'WHITE', 'THINGS', 'ON', 'THE', 'HATCHES', 'THE', 'BARKING', 'OF', 'DOGS', 'AND', 'DISTANT', 'FARMS', 'THE', 'CHIRP', 'OF', 'CRICKETS', 'AND', 'THE', 'CROAKING', 'OF', 'FROGS'] +1998-29455-0027-2259: ref=['THE', 'NEW', 'GAME', 'OF', 'BEGGING', 'AND', 'INVENTING', 'STORIES', 'TO', 'INTEREST', 'THE', 'PEOPLE', 'FROM', 'WHOM', 'IT', 'WAS', 'WORTH', 'WHILE', 'TO', 'BEG', 'WENT', 'ON', 'GAILY', 'DAY', 'BY', 'DAY', 'AND', 'WEEK', 'BY', 'WEEK', 'AND', 'DICKIE', 'BY', 'CONSTANT', 'PRACTICE', 'GREW', 'SO', 'CLEVER', 'AT', 'TAKING', 'HIS', 'PART', 'IN', 'THE', 'ACTING', 'THAT', 'MISTER', 'BEALE', 'WAS', 'QUITE', 'DAZED', 'WITH', 'ADMIRATION'] +1998-29455-0027-2259: hyp=['THEN', 'YOU', 'GAME', 'OF', 'BEGGING', 'AND', 'INVENTING', 'STORIES', 'TO', 'INTEREST', 'THE', 'PEOPLE', 'FROM', 'WHOM', 'IT', 'WAS', 'WORSE', 'WHILE', 'TO', 'BEG', 'WENT', 'ON', 'GAILY', 'DAY', 'BY', 'DAY', 'AND', 'WEEK', 'BY', 'WEEK', 'AND', 'DICKIE', 'BY', 'CONSTANT', 'PRACTICE', 'GREW', 'SO', 'CLEVER', 'AT', 'TAKING', 'HIS', 'PART', 'IN', 'THE', 'ACTING', 'THAT', 'MISTER', 'BEER', 'WAS', 'QUITE', 'DAZED', 'WITH', 'ADMIRATION'] +1998-29455-0028-2260: ref=['BLESSED', 'IF', 'I', 'EVER', 'SEE', 'SUCH', 'A', 'NIPPER', 'HE', 'SAID', 'OVER', 'AND', 'OVER', 'AGAIN'] +1998-29455-0028-2260: hyp=['BLEST', 'IF', 'I', 'EVER', 'SEE', 'SUCH', 'A', 'NIPPER', 'HE', 'SAID', 'OVER', 'AND', 'OVER', 'AGAIN'] +1998-29455-0029-2261: ref=['CLEVER', 'AS', 'A', 'TRAINDAWG', 'E', 'IS', 'AN', 'ALL', 'OUTER', 'IS', 'OWN', 'EAD'] +1998-29455-0029-2261: hyp=['CLEVER', 'AS', 'A', 'TRAIN', 'DOG', 'IS', 'IN', 'OUR', "OUTER'S", 'OWN', 'HEAD'] +1998-29455-0030-2262: ref=['I', "AIN'T", 'SURE', 'AS', 'I', "ADN'T", 'BETTER', 'STICK', 'TO', 'THE', 'ROAD', 'AND', 'KEEP', 'AWAY', 'FROM', 'OLD', 'ANDS', 'LIKE', 'YOU', 'JIM'] +1998-29455-0030-2262: hyp=['I', 'AM', 'SURE', 'AS', 'I', "HADN'T", 'BETTER', 'STICK', 'TO', 'THE', 'ROAD', 'AND', 'KEEP', 'AWAY', 'FROM', 'OLD', 'ENDS', 'LIKE', 'EUGEN'] +1998-29455-0031-2263: ref=['I', 'OPE', "E'S", 'CLEVER', 'ENOUGH', 'TO', 'DO', 'WOT', "E'S", 'TOLD', 'KEEP', 'IS', 'MUG', 'SHUT', "THAT'S", 'ALL'] +1998-29455-0031-2263: hyp=['IOPIUS', 'LOVE', 'ENOUGH', 'TO', 'DO', 'WHAT', 'HE', 'STOWED', 'HE', 'WAS', 'MUCH', 'AT', "THAT'S", 'ALL'] +1998-29455-0032-2264: ref=['IF', "E'S", 'STRAIGHT', "E'LL", 'DO', 'FOR', 'ME', 'AND', 'IF', 'HE', "AIN'T", "I'LL", 'DO', 'FOR', 'IM', 'SEE'] +1998-29455-0032-2264: hyp=['IF', 'HE', 'STRAYED', 'YOU', 'DO', 'FOR', 'ME', 'AND', 'IF', 'HE', "AIN'T", "I'LL", 'DO', 'FOR', 'HIM', 'SEE'] +1998-29455-0033-2265: ref=['SEE', 'THAT', 'BLOKE', 'JUST', 'NOW', 'SAID', 'MISTER', 'BEALE', 'YUSS', 'SAID', 'DICKIE'] +1998-29455-0033-2265: hyp=['SEE', 'THAT', 'LOCTICE', 'NOW', 'SAID', 'MISTER', 'BEALE', 'YES', 'SAID', 'DICKIE'] +1998-29455-0034-2266: ref=['WELL', 'YOU', 'NEVER', 'SEE', 'IM'] +1998-29455-0034-2266: hyp=['WELL', 'YOU', 'NEVER', 'SEE', 'HIM'] +1998-29455-0035-2267: ref=['IF', 'ANY', 'ONE', 'ARSTS', 'YOU', 'IF', 'YOU', 'EVER', 'SEE', 'IM', 'YOU', 'NEVER', 'SET', 'EYES', 'ON', 'IM', 'IN', 'ALL', 'YOUR', 'BORN', 'NOT', 'TO', 'REMEMBER', 'IM'] +1998-29455-0035-2267: hyp=['IF', 'ANY', 'ONE', 'ASKED', 'YOU', 'IF', 'YOU', 'EVER', 'SEE', 'HIM', 'YOU', 'NEVER', 'SET', 'EYES', 'ON', 'HIM', 'IN', 'ALL', 'YOUR', 'BORN', 'NOT', 'TO', 'REMEMBER', 'HIM'] +1998-29455-0036-2268: ref=['DICKIE', 'WAS', 'FULL', 'OF', 'QUESTIONS', 'BUT', 'MISTER', 'BEALE', 'HAD', 'NO', 'ANSWERS', 'FOR', 'THEM'] +1998-29455-0036-2268: hyp=['DICKIE', 'WAS', 'FULL', 'OF', 'QUESTIONS', 'BUT', 'MISTER', 'BEE', 'HAD', 'NO', 'ANSWERS', 'FOR', 'THEM'] +1998-29455-0037-2269: ref=['NOR', 'WAS', 'IT', 'SUNDAY', 'ON', 'WHICH', 'THEY', 'TOOK', 'A', 'REST', 'AND', 'WASHED', 'THEIR', 'SHIRTS', 'ACCORDING', 'TO', 'MISTER', "BEALE'S", 'RULE', 'OF', 'LIFE'] +1998-29455-0037-2269: hyp=['NOR', 'WAS', 'IT', 'SUNDAY', 'ON', 'WHICH', 'THEY', 'TOOK', 'A', 'REST', 'AND', 'WASHED', 'THEIR', 'SHIRTS', 'ACCORDING', 'TO', 'MISTER', "BEAT'S", 'RULE', 'OF', 'LIFE'] +1998-29455-0038-2270: ref=['THEY', 'DID', 'NOT', 'STAY', 'THERE', 'BUT', 'WALKED', 'OUT', 'ACROSS', 'THE', 'DOWNS', 'WHERE', 'THE', 'SKYLARKS', 'WERE', 'SINGING', 'AND', 'ON', 'A', 'DIP', 'OF', 'THE', 'DOWNS', 'CAME', 'UPON', 'GREAT', 'STONE', 'WALLS', 'AND', 'TOWERS', 'VERY', 'STRONG', 'AND', 'GRAY'] +1998-29455-0038-2270: hyp=['THEY', 'DID', 'NOT', 'STAY', 'THERE', 'BUT', 'WALKED', 'OUT', 'ACROSS', 'THE', 'DOWNS', 'WITH', 'THE', 'SKYLECKS', 'WERE', 'SINGING', 'AND', 'ON', 'A', 'DIP', 'OF', 'THE', 'DOWNS', 'CAME', 'UPON', 'GREAT', 'STONE', 'WALLS', 'AND', 'TOWERS', 'WHERE', 'STRONG', 'AND', 'GRAY'] +1998-29455-0039-2271: ref=["WHAT'S", 'THAT', 'THERE', 'SAID', 'DICKIE'] +1998-29455-0039-2271: hyp=["WHAT'S", 'THAT', 'THERE', 'SAID', 'DICKY'] +2033-164914-0000-661: ref=['REPLIED', 'HE', 'OF', 'A', 'TRUTH', 'I', 'HEARD', 'HIM', 'NOT', 'AND', 'I', 'WOT', 'HIM', 'NOT', 'AND', 'FOLKS', 'ARE', 'ALL', 'SLEEPING'] +2033-164914-0000-661: hyp=['REPLIED', 'HE', 'OF', 'A', 'TRUTH', 'I', 'HEARD', 'HIM', 'NOT', 'AND', 'I', 'WOT', 'HIM', 'NOT', 'AND', 'FOLKS', 'ARE', 'ALL', 'SLEEPING'] +2033-164914-0001-662: ref=['BUT', 'SHE', 'SAID', 'WHOMSOEVER', 'THOU', 'SEEST', 'AWAKE', 'HE', 'IS', 'THE', 'RECITER'] +2033-164914-0001-662: hyp=['BUT', 'SHE', 'SAID', 'WHOMSOEVER', 'THOU', 'SEEST', 'AWAKE', 'HE', 'IS', 'THE', 'RECITER'] +2033-164914-0002-663: ref=['THEN', 'SAID', 'THE', 'EUNUCH', 'ART', 'THOU', 'HE', 'WHO', 'REPEATED', 'POETRY', 'BUT', 'NOW', 'AND', 'MY', 'LADY', 'HEARD', 'HIM'] +2033-164914-0002-663: hyp=['THEN', 'SAID', 'THE', 'EUNUCH', 'ART', 'THOU', 'HE', 'WHO', 'REPEATED', 'POETRY', 'BUT', 'NOW', 'AND', 'MY', 'LADY', 'HEARD', 'HIM'] +2033-164914-0003-664: ref=['REJOINED', 'THE', 'EUNUCH', 'WHO', 'THEN', 'WAS', 'THE', 'RECITER', 'POINT', 'HIM', 'OUT', 'TO', 'ME'] +2033-164914-0003-664: hyp=['REJOINED', 'THE', 'EUNUCH', 'WHO', 'THEN', 'WAS', 'THE', 'RECITER', 'POINT', 'HIM', 'OUT', 'TO', 'ME'] +2033-164914-0004-665: ref=['BY', 'ALLAH', 'REPLIED', 'THE', 'FIREMAN', 'I', 'TELL', 'THEE', 'THE', 'TRUTH'] +2033-164914-0004-665: hyp=['BY', 'ALLAH', 'REPLIED', 'THE', 'FIREMAN', 'I', 'TELL', 'THEE', 'THE', 'TRUTH'] +2033-164914-0005-666: ref=['TELL', 'ME', 'WHAT', 'HAPPENED', 'QUOTH', 'ZAU', 'AL', 'MAKAN'] +2033-164914-0005-666: hyp=['TELL', 'ME', 'WHAT', 'HAPPENED', 'QUOMAN'] +2033-164914-0006-667: ref=['WHAT', 'AILS', 'THEE', 'THEN', 'THAT', 'THOU', 'MUST', 'NEEDS', 'RECITE', 'VERSES', 'SEEING', 'THAT', 'WE', 'ARE', 'TIRED', 'OUT', 'WITH', 'WALKING', 'AND', 'WATCHING', 'AND', 'ALL', 'THE', 'FOLK', 'ARE', 'ASLEEP', 'FOR', 'THEY', 'REQUIRE', 'SLEEP', 'TO', 'REST', 'THEM', 'OF', 'THEIR', 'FATIGUE'] +2033-164914-0006-667: hyp=['WHAT', 'AILS', 'THEE', 'THEN', 'THAT', 'THOU', 'MUST', 'NEEDS', 'RECITE', 'VERSES', 'SEEING', 'THAT', 'WE', 'ARE', 'TIRED', 'OUT', 'WITH', 'WALKING', 'AND', 'WATCHING', 'AND', 'ALL', 'THE', 'FOLK', 'ARE', 'ASLEEP', 'FOR', 'THEY', 'REQUIRE', 'SLEEP', 'TO', 'REST', 'THEM', 'OF', 'THEIR', 'FATIGUE'] +2033-164914-0007-668: ref=['AND', 'HE', 'ALSO', 'IMPROVISED', 'THE', 'TWO', 'FOLLOWING', 'DISTICHS'] +2033-164914-0007-668: hyp=['AND', 'HE', 'ALSO', 'PROVISED', 'THE', 'TWO', 'FOLLOWING', 'DISTINCTS'] +2033-164914-0008-669: ref=['WHEN', 'NUZHAT', 'AL', 'ZAMAN', 'HEARD', 'THE', 'FIRST', 'IMPROVISATION', 'SHE', 'CALLED', 'TO', 'MIND', 'HER', 'FATHER', 'AND', 'HER', 'MOTHER', 'AND', 'HER', 'BROTHER', 'AND', 'THEIR', 'WHILOME', 'HOME', 'THEN', 'SHE', 'WEPT', 'AND', 'CRIED', 'AT', 'THE', 'EUNUCH', 'AND', 'SAID', 'TO', 'HIM', 'WOE', 'TO', 'THEE'] +2033-164914-0008-669: hyp=['WHEN', "NUZHA'S", 'AL', 'ZAMAN', 'HEARD', 'THE', 'FIRST', 'IMPROVISATION', 'SHE', 'CALLED', 'TO', 'MINE', 'HER', 'FATHER', 'AND', 'HER', 'MOTHER', 'AND', 'HER', 'BROTHER', 'AND', 'THEIR', 'WILHELM', 'HOME', 'THEN', 'SHE', 'WEPT', 'AND', 'CRIED', 'TO', 'THE', 'EUNUCH', 'AND', 'SAID', 'TO', 'HIM', 'WOE', 'TO', 'THEE'] +2033-164914-0009-670: ref=['HE', 'WHO', 'RECITED', 'THE', 'FIRST', 'TIME', 'HATH', 'RECITED', 'A', 'SECOND', 'TIME', 'AND', 'I', 'HEARD', 'HIM', 'HARD', 'BY'] +2033-164914-0009-670: hyp=['HE', 'WHO', 'RECITED', 'THE', 'FIRST', 'TIME', 'HATH', 'RECITED', 'A', 'SECOND', 'TIME', 'AND', 'HEARD', 'HIM', 'HEART', 'BY'] +2033-164914-0010-671: ref=['BY', 'ALLAH', 'AN', 'THOU', 'FETCH', 'HIM', 'NOT', 'TO', 'ME', 'I', 'WILL', 'ASSUREDLY', 'ROUSE', 'THE', 'CHAMBERLAIN', 'ON', 'THEE', 'AND', 'HE', 'SHALL', 'BEAT', 'THEE', 'AND', 'CAST', 'THEE', 'OUT'] +2033-164914-0010-671: hyp=['BY', 'ALLAH', 'AN', 'THOU', 'FETCH', 'HIM', 'NOT', 'TO', 'ME', 'I', 'WILL', 'ASSUREDLY', 'ROUSE', 'THE', 'CHAMBERLAIN', 'ON', 'THEE', 'AND', 'HE', 'SHALL', 'BEAT', 'THEE', 'AND', 'CAST', 'THEE', 'OUT'] +2033-164914-0011-672: ref=['BUT', 'TAKE', 'THESE', 'HUNDRED', 'DINERS', 'AND', 'GIVE', 'THEM', 'TO', 'THE', 'SINGER', 'AND', 'BRING', 'HIM', 'TO', 'ME', 'GENTLY', 'AND', 'DO', 'HIM', 'NO', 'HURT'] +2033-164914-0011-672: hyp=['BUT', 'TAKE', 'THESE', 'HUNDRED', 'DINERS', 'AND', 'GIVE', 'THEM', 'TO', 'THE', 'SINGER', 'AND', 'BRING', 'HIM', 'TO', 'ME', 'GENTLY', 'AND', 'DO', 'HIM', 'NO', 'HURT'] +2033-164914-0012-673: ref=['RETURN', 'QUICKLY', 'AND', 'LINGER', 'NOT'] +2033-164914-0012-673: hyp=['RETURNED', 'QUICKLY', 'AND', 'LINGER', 'NOT'] +2033-164914-0013-674: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'THIRD', 'NIGHT'] +2033-164914-0013-674: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'THIRD', 'NIGHT'] +2033-164914-0014-675: ref=['BUT', 'THE', 'EUNUCH', 'SAID', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'TILL', 'THOU', 'SHOW', 'ME', 'WHO', 'IT', 'WAS', 'THAT', 'RECITED', 'THE', 'VERSES', 'FOR', 'I', 'DREAD', 'RETURNING', 'TO', 'MY', 'LADY', 'WITHOUT', 'HIM'] +2033-164914-0014-675: hyp=['BUT', 'THE', 'EUNUCHS', 'SAID', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'TILL', 'THOU', 'SHOW', 'ME', 'WHO', 'IT', 'WAS', 'THAT', 'RECITED', 'THE', 'VERSES', 'FOR', 'I', 'DREAD', 'RETURNING', 'TO', 'MY', 'LADY', 'WITHOUT', 'HIM'] +2033-164914-0015-676: ref=['NOW', 'WHEN', 'THE', 'FIREMAN', 'HEARD', 'THESE', 'WORDS', 'HE', 'FEARED', 'FOR', 'ZAU', 'AL', 'MAKAN', 'AND', 'WEPT', 'WITH', 'EXCEEDING', 'WEEPING', 'AND', 'SAID', 'TO', 'THE', 'EUNUCH', 'BY', 'ALLAH', 'IT', 'WAS', 'NOT', 'I', 'AND', 'I', 'KNOW', 'HIM', 'NOT'] +2033-164914-0015-676: hyp=['NOW', 'WHEN', 'THE', 'FIREMAN', 'HEARD', 'THESE', 'WORDS', 'HE', 'FEARED', 'FOR', 'ZA', 'AL', 'MAKAN', 'AND', 'WEPT', 'WITH', 'EXCEEDING', 'WEEPING', 'AND', 'SAID', 'TO', 'THE', 'EUNUCH', 'BY', 'ALLAH', 'IT', 'WAS', 'NOT', 'I', 'AND', 'THEY', 'KNOW', 'HIM', 'NOT'] +2033-164914-0016-677: ref=['SO', 'GO', 'THOU', 'TO', 'THY', 'STATION', 'AND', 'IF', 'THOU', 'AGAIN', 'MEET', 'ANY', 'ONE', 'AFTER', 'THIS', 'HOUR', 'RECITING', 'AUGHT', 'OF', 'POETRY', 'WHETHER', 'HE', 'BE', 'NEAR', 'OR', 'FAR', 'IT', 'WILL', 'BE', 'I', 'OR', 'SOME', 'ONE', 'I', 'KNOW', 'AND', 'THOU', 'SHALT', 'NOT', 'LEARN', 'OF', 'HIM', 'BUT', 'BY', 'ME'] +2033-164914-0016-677: hyp=['SO', 'GO', 'THOU', 'TO', 'THY', 'STATION', 'AND', 'IF', 'THOU', 'AGAIN', 'ANY', 'ONE', 'AFTER', 'THIS', 'HOUR', 'RECITING', 'AUGHT', 'OF', 'POETRY', 'WHETHER', 'HE', 'BE', 'NEAR', 'OR', 'FAR', 'IT', 'WILL', 'BE', 'I', 'OR', 'SOME', 'ONE', 'I', 'KNOW', 'AND', 'THOU', 'SHALT', 'NOT', 'LEARN', 'OF', 'HIM', 'BUT', 'BY', 'ME'] +2033-164914-0017-678: ref=['THEN', 'HE', 'KISSED', 'THE', "EUNUCH'S", 'HEAD', 'AND', 'SPAKE', 'HIM', 'FAIR', 'TILL', 'HE', 'WENT', 'AWAY', 'BUT', 'THE', 'CASTRATO', 'FETCHED', 'A', 'ROUND', 'AND', 'RETURNING', 'SECRETLY', 'CAME', 'AND', 'STOOD', 'BEHIND', 'THE', 'FIREMAN', 'FEARING', 'TO', 'GO', 'BACK', 'TO', 'HIS', 'MISTRESS', 'WITHOUT', 'TIDINGS'] +2033-164914-0017-678: hyp=['THEN', 'HE', 'KISSED', 'THE', "EUNUCH'S", 'HEAD', 'AND', 'SPAKE', 'HIM', 'FAIR', 'TILL', 'HE', 'WENT', 'AWAY', 'BUT', 'THE', 'CASTRATO', 'FETCHED', 'AROUND', 'AND', 'RETURNING', 'SECRETLY', 'CAME', 'AND', 'STOOD', 'BEHIND', 'THE', 'FIREMAN', 'FEARING', 'TO', 'GO', 'BACK', 'TO', 'HIS', 'MISTRESS', 'WITHOUT', 'HIDINGS'] +2033-164914-0018-679: ref=['I', 'SAY', 'WHAT', 'MADE', 'MY', 'IGNOMY', "WHATE'ER", 'THE', 'BITTER', 'CUP', 'I', 'DRAIN', 'FAR', 'BE', 'FRO', 'ME', 'THAT', 'LAND', 'TO', 'FLEE', 'NOR', 'WILL', 'I', 'BOW', 'TO', 'THOSE', 'WHO', 'BLAME', 'AND', 'FOR', 'SUCH', 'LOVE', 'WOULD', 'DEAL', 'ME', 'SHAME'] +2033-164914-0018-679: hyp=['I', 'SAY', 'WHAT', 'MADE', 'MY', 'IGNOMY', 'WHATEVER', 'THE', 'BITTER', 'CUPIED', 'DRAIN', 'FAR', 'BE', 'FROM', 'ME', 'THE', 'LAND', 'TO', 'FLEE', 'NOR', 'WILL', 'I', 'BOW', 'TO', 'THOSE', 'WHO', 'BLAME', 'AND', 'FOR', 'SUCH', 'LOVE', 'WOULD', 'DEAL', 'ME', 'SHAME'] +2033-164914-0019-680: ref=['THEN', 'SAID', 'THE', 'EUNUCH', 'TO', 'ZAU', 'AL', 'MAKAN', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'LORD'] +2033-164914-0019-680: hyp=['THEN', 'SAID', 'THE', 'EUNUCH', 'TO', 'ZA', 'AL', 'MAKAN', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'LORD'] +2033-164914-0020-681: ref=['O', 'MY', 'LORD', 'CONTINUED', 'THE', 'EUNUCH', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164914-0020-681: hyp=['O', 'MY', 'LORD', 'CONTINUED', 'THE', 'EUNUCH', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THAT', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164914-0021-682: ref=['WE', 'WILL', 'DO', 'THEE', 'NO', 'UPRIGHT', 'O', 'MY', 'SON', 'NOR', 'WRONG', 'THEE', 'IN', 'AUGHT', 'BUT', 'OUR', 'OBJECT', 'IS', 'THAT', 'THOU', 'BEND', 'THY', 'GRACIOUS', 'STEPS', 'WITH', 'ME', 'TO', 'MY', 'MISTRESS', 'TO', 'RECEIVE', 'HER', 'ANSWER', 'AND', 'RETURN', 'IN', 'WEAL', 'AND', 'SAFETY', 'AND', 'THOU', 'SHALT', 'HAVE', 'A', 'HANDSOME', 'PRESENT', 'AS', 'ONE', 'WHO', 'BRINGETH', 'GOOD', 'NEWS'] +2033-164914-0021-682: hyp=['WE', 'WILL', 'DO', 'THEE', 'NO', 'UPRIGHT', 'O', 'MY', 'SON', 'NOR', 'WRONG', 'THEE', 'IN', 'AUGHT', 'BUT', 'OUR', 'OBJECT', 'IS', 'THAT', 'THOU', 'BEND', 'THY', 'GRECIOUS', 'STEPS', 'WITH', 'ME', 'TO', 'MY', 'MISTRESS', 'TO', 'RECEIVE', 'HER', 'ANSWER', 'AND', 'RETURNING', 'WHEEL', 'AND', 'SAFETY', 'AND', 'THOU', 'SHALT', 'HAVE', 'A', 'HANDSOME', 'PRESENT', 'AS', 'ONE', 'WHO', 'BRINGETH', 'GOOD', 'NEWS'] +2033-164914-0022-683: ref=['THEN', 'THE', 'EUNUCH', 'WENT', 'OUT', 'TO', 'ZAU', 'AL', 'MAKAN', 'AND', 'SAID', 'TO', 'HIM', 'RECITE', 'WHAT', 'VERSES', 'THOU', 'KNOWEST', 'FOR', 'MY', 'LADY', 'IS', 'HERE', 'HARD', 'BY', 'LISTENING', 'TO', 'THEE', 'AND', 'AFTER', 'I', 'WILL', 'ASK', 'THEE', 'OF', 'THY', 'NAME', 'AND', 'THY', 'NATIVE', 'COUNTRY', 'AND', 'THY', 'CONDITION'] +2033-164914-0022-683: hyp=['THEN', 'THE', 'EUNUCH', 'WENT', 'OUT', 'TO', 'ZAO', 'MAKAN', 'AND', 'SAID', 'TO', 'HIM', 'RECITE', 'WHAT', 'VERSEST', 'THOU', 'KNOWEST', 'FOR', 'MY', 'LADIES', 'HERE', 'HARD', 'BY', 'LISTENING', 'TO', 'THEE', 'AND', 'AFTER', 'I', 'WILL', 'ASK', 'THEE', 'OF', 'THY', 'NAME', 'AND', 'THINE', 'NATIVE', 'COUNTRY', 'AND', 'THY', 'CONDITION'] +2033-164915-0000-643: ref=['AND', 'ALSO', 'THESE'] +2033-164915-0000-643: hyp=['AND', 'ALSO', 'THESE'] +2033-164915-0001-644: ref=['THEN', 'SHE', 'THREW', 'HERSELF', 'UPON', 'HIM', 'AND', 'HE', 'GATHERED', 'HER', 'TO', 'HIS', 'BOSOM', 'AND', 'THE', 'TWAIN', 'FELL', 'DOWN', 'IN', 'A', 'FAINTING', 'FIT'] +2033-164915-0001-644: hyp=['THEN', 'SHE', 'THREW', 'HERSELF', 'UPON', 'HIM', 'AND', 'HE', 'GATHERED', 'HER', 'TO', 'HIS', 'BOSOM', 'AND', 'THE', 'TWAIN', 'FELL', 'DOWN', 'IN', 'A', 'FAINTING', 'FIT'] +2033-164915-0002-645: ref=['WHEN', 'THE', 'EUNUCH', 'SAW', 'THIS', 'CASE', 'HE', 'WONDERED', 'AT', 'THEM', 'AND', 'THROWING', 'OVER', 'THEM', 'SOMEWHAT', 'TO', 'COVER', 'THEM', 'WAITED', 'TILL', 'THEY', 'SHOULD', 'RECOVER'] +2033-164915-0002-645: hyp=['WHEN', 'THE', 'EUNUCHS', 'SAW', 'THESE', 'CAVES', 'HE', 'WONDERED', 'AT', 'THEM', 'AND', 'THROWING', 'OVER', 'THEM', 'SOMEWHAT', 'TO', 'COVER', 'THEM', 'WAITED', 'TILL', 'THEY', 'SHOULD', 'RECOVER'] +2033-164915-0003-646: ref=['AFTER', 'A', 'WHILE', 'THEY', 'CAME', 'TO', 'THEMSELVES', 'AND', 'NUZHAT', 'AL', 'ZAMAN', 'REJOICED', 'WITH', 'EXCEEDING', 'JOY', 'OPPRESSION', 'AND', 'DEPRESSION', 'LEFT', 'HER', 'AND', 'GLADNESS', 'TOOK', 'THE', 'MASTERY', 'OF', 'HER', 'AND', 'SHE', 'REPEATED', 'THESE', 'VERSES'] +2033-164915-0003-646: hyp=['AFTER', 'A', 'WHILE', 'THEY', 'CAME', 'TO', 'THEMSELVES', 'AND', 'UZHAT', 'AL', 'ZAMAN', 'REJOICED', 'WITH', 'EXCEEDING', 'JOY', 'OPPRESSION', 'AND', 'DEPRESSION', 'LEFT', 'HER', 'AND', 'GLADNESS', 'TOOK', 'THE', 'MASTERY', 'OF', 'HER', 'AND', 'SHE', 'REPEATED', 'THESE', 'VERSES'] +2033-164915-0004-647: ref=['ACCORDINGLY', 'SHE', 'TOLD', 'HIM', 'ALL', 'THAT', 'HAD', 'COME', 'TO', 'HER', 'SINCE', 'THEIR', 'SEPARATION', 'AT', 'THE', 'KHAN', 'AND', 'WHAT', 'HAD', 'HAPPENED', 'TO', 'HER', 'WITH', 'THE', 'BADAWI', 'HOW', 'THE', 'MERCHANT', 'HAD', 'BOUGHT', 'HER', 'OF', 'HIM', 'AND', 'HAD', 'TAKEN', 'HER', 'TO', 'HER', 'BROTHER', 'SHARRKAN', 'AND', 'HAD', 'SOLD', 'HER', 'TO', 'HIM', 'HOW', 'HE', 'HAD', 'FREED', 'HER', 'AT', 'THE', 'TIME', 'OF', 'BUYING', 'HOW', 'HE', 'HAD', 'MADE', 'A', 'MARRIAGE', 'CONTRACT', 'WITH', 'HER', 'AND', 'HAD', 'GONE', 'IN', 'TO', 'HER', 'AND', 'HOW', 'THE', 'KING', 'THEIR', 'SIRE', 'HAD', 'SENT', 'AND', 'ASKED', 'FOR', 'HER', 'FROM', 'SHARRKAN'] +2033-164915-0004-647: hyp=['ACCORDINGLY', 'SHE', 'TOLD', 'HIM', 'ALL', 'THAT', 'HAD', 'COME', 'TO', 'HER', 'SINCE', 'THEIR', 'SEPARATION', 'AT', 'THE', 'KHAN', 'AND', 'WHAT', 'HAD', 'HAPPENED', 'TO', 'HER', 'WITH', 'THE', 'BADARI', 'HOW', 'THE', 'MERCHANT', 'HAD', 'BOUGHT', 'HER', 'OF', 'HIM', 'AND', 'HAD', 'TAKEN', 'HER', 'TO', 'HER', 'BROTHER', 'SHARKAN', 'AND', 'HAD', 'SOLD', 'HER', 'TO', 'HIM', 'HOW', 'HE', 'HAD', 'FREED', 'HER', 'AT', 'THE', 'TIME', 'OF', 'BUYING', 'HOW', 'HE', 'HAD', 'MADE', 'HER', 'MARRIAGE', 'CONTRACT', 'WITH', 'HER', 'AND', 'HAD', 'GONE', 'IN', 'TO', 'HER', 'AND', 'HOW', 'THE', 'KING', 'THEIR', 'SIRE', 'HAD', 'SENT', 'AND', 'ASKED', 'FOR', 'HER', 'FROM', 'SHARKAN'] +2033-164915-0005-648: ref=['BUT', 'NOW', 'GO', 'TO', 'THY', 'MASTER', 'AND', 'BRING', 'HIM', 'QUICKLY', 'TO', 'ME'] +2033-164915-0005-648: hyp=['BUT', 'NOW', 'GO', 'TO', 'THY', 'MASTER', 'AND', 'BRING', 'HIM', 'QUICKLY', 'TO', 'ME'] +2033-164915-0006-649: ref=['THE', 'CHAMBERLAIN', 'CALLED', 'THE', 'CASTRATO', 'AND', 'CHARGED', 'HIM', 'TO', 'DO', 'ACCORDINGLY', 'SO', 'HE', 'REPLIED', 'I', 'HEAR', 'AND', 'I', 'OBEY', 'AND', 'HE', 'TOOK', 'HIS', 'PAGES', 'WITH', 'HIM', 'AND', 'WENT', 'OUT', 'IN', 'SEARCH', 'OF', 'THE', 'STOKER', 'TILL', 'HE', 'FOUND', 'HIM', 'IN', 'THE', 'REAR', 'OF', 'THE', 'CARAVAN', 'GIRTHING', 'HIS', 'ASS', 'AND', 'PREPARING', 'FOR', 'FLIGHT'] +2033-164915-0006-649: hyp=['THE', 'CHAMBERLAIN', 'CALLED', 'THE', 'CASTRATO', 'AND', 'CHARGED', 'HIM', 'TO', 'DO', 'ACCORDINGLY', 'SO', 'HE', 'REPLIED', 'I', 'HEAR', 'AND', 'I', 'OBEY', 'AND', 'HE', 'TOOK', 'HIS', 'PAGES', 'WITH', 'HIM', 'AND', 'WENT', 'OUT', 'IN', 'SEARCH', 'OF', 'THE', 'STOCKER', 'TILL', 'HE', 'FOUND', 'HIM', 'IN', 'THE', 'REAR', 'OF', 'THE', 'CARAVAN', 'GIRDING', 'HIS', 'ASS', 'AND', 'PREPARING', 'FOR', 'FLIGHT'] +2033-164915-0007-650: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'THE', 'STOKER', 'GIRTHED', 'HIS', 'ASS', 'FOR', 'FLIGHT', 'AND', 'BESPAKE', 'HIMSELF', 'SAYING', 'OH', 'WOULD', 'I', 'KNEW', 'WHAT', 'IS', 'BECOME', 'OF', 'HIM'] +2033-164915-0007-650: hyp=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'THE', 'STOCKER', 'GIRDED', 'HIS', 'ASS', 'FOR', 'FLIGHT', 'AND', 'BESPAKE', 'HIMSELF', 'SAYING', 'O', 'WOULD', 'I', 'KNEW', 'WHAT', 'IS', 'BECOME', 'OF', 'HIM'] +2033-164915-0008-651: ref=['I', 'BELIEVE', 'HE', 'HATH', 'DENOUNCED', 'ME', 'TO', 'THE', 'EUNUCH', 'HENCE', 'THESE', 'PAGES', 'ET', 'ABOUT', 'ME', 'AND', 'HE', 'HATH', 'MADE', 'ME', 'AN', 'ACCOMPLICE', 'IN', 'HIS', 'CRIME'] +2033-164915-0008-651: hyp=['I', 'BELIEVE', 'HE', 'HATH', 'DENOUNCED', 'ME', 'TO', 'THE', 'EUNUCH', 'HENCE', 'THESE', 'PAGES', 'AT', 'ABOUT', 'ME', 'AND', 'HE', 'HATH', 'MADE', 'ME', 'AN', 'ACCOMPLICE', 'IN', 'HIS', 'CRIME'] +2033-164915-0009-652: ref=['WHY', 'DIDST', 'THOU', 'SAY', 'I', 'NEVER', 'REPEATED', 'THESE', 'COUPLETS', 'NOR', 'DO', 'I', 'KNOW', 'WHO', 'REPEATED', 'THEM', 'WHEN', 'IT', 'WAS', 'THY', 'COMPANION'] +2033-164915-0009-652: hyp=['WHY', 'DIDST', 'THOU', 'SAY', 'I', 'NEVER', 'REPEATED', 'THIS', 'COUPLETS', 'NOR', 'DO', 'I', 'KNOW', 'WHO', 'REPEATED', 'THEM', 'WHEN', 'IT', 'WAS', 'THY', 'COMPANION'] +2033-164915-0010-653: ref=['BUT', 'NOW', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'BETWEEN', 'THIS', 'PLACE', 'AND', 'BAGHDAD', 'AND', 'WHAT', 'BETIDETH', 'THY', 'COMRADE', 'SHALL', 'BETIDE', 'THEE'] +2033-164915-0010-653: hyp=['BUT', 'NOW', 'I', 'WILL', 'NOT', 'LEAVE', 'THEE', 'BETWEEN', 'THIS', 'PLACE', 'AND', 'BAGHDAD', 'AND', 'WHAT', 'BETIDETH', 'THY', 'COMRADE', 'SHALL', 'BE', 'TIDE', 'THEE'] +2033-164915-0011-654: ref=['TWAS', 'AS', 'I', 'FEARED', 'THE', 'COMING', 'ILLS', 'DISCERNING', 'BUT', 'UNTO', 'ALLAH', 'WE', 'ARE', 'ALL', 'RETURNING'] +2033-164915-0011-654: hyp=['TWAS', 'AS', 'I', 'FEARED', 'THE', 'CARMINALS', 'DISCERNING', 'BUT', 'UNTO', 'ALLAH', 'WE', 'ARE', 'ALL', 'RETURNING'] +2033-164915-0012-655: ref=['THEN', 'THE', 'EUNUCH', 'CRIED', 'UPON', 'THE', 'PAGES', 'SAYING', 'TAKE', 'HIM', 'OFF', 'THE', 'ASS'] +2033-164915-0012-655: hyp=['THEN', 'THE', 'EUNUCH', 'CRIED', 'UPON', 'HIS', 'PAGES', 'SAYING', 'TAKE', 'HIM', 'OFF', 'THE', 'ASS'] +2033-164915-0013-656: ref=['AND', 'HE', 'ANSWERED', 'I', 'AM', 'THE', 'CHAMBERLAIN', 'OF', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'KING', 'SHARRKAN', 'SON', 'OF', 'OMAR', 'BIN', 'AL', "NU'UMAN", 'LORD', 'OF', 'BAGHDAD', 'AND', 'OF', 'THE', 'LAND', 'OF', 'KHORASAN', 'AND', 'I', 'BRING', 'TRIBUTE', 'AND', 'PRESENTS', 'FROM', 'HIM', 'TO', 'HIS', 'FATHER', 'IN', 'BAGHDAD'] +2033-164915-0013-656: hyp=['AND', 'HE', 'ANSWERED', 'I', 'AM', 'THE', 'CHAMBERLAIN', 'OF', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'KING', 'SHARKAN', 'SONG', 'OF', 'OMAR', 'BIN', 'AL', 'NUMAN', 'LORD', 'OF', 'ADAD', 'AND', 'OF', 'THE', 'LAND', 'OF', 'KHORASAN', 'AND', 'I', 'BRING', 'TRIBUTE', 'AND', 'PRESENTS', 'FROM', 'HIM', 'TO', 'HIS', 'FATHER', 'IN', 'BAGHDAD'] +2033-164915-0014-657: ref=['SO', 'FARE', 'YE', 'FORWARDS', 'NO', 'HARM', 'SHALL', 'BEFAL', 'YOU', 'TILL', 'YOU', 'JOIN', 'HIS', 'GRAND', 'WAZIR', 'DANDAN'] +2033-164915-0014-657: hyp=['SOPHIA', 'HE', 'FORWARDS', 'NO', 'HARM', 'SHALL', 'BEFALL', 'YOU', 'TILL', 'YOU', 'JOIN', 'HIS', 'GRAND', 'WAZIR', 'TAN'] +2033-164915-0015-658: ref=['THEN', 'HE', 'BADE', 'HIM', 'BE', 'SEATED', 'AND', 'QUESTIONED', 'HIM', 'AND', 'HE', 'REPLIED', 'THAT', 'HE', 'WAS', 'CHAMBERLAIN', 'TO', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'AND', 'WAS', 'BOUND', 'TO', 'KING', 'OMAR', 'WITH', 'PRESENTS', 'AND', 'THE', 'TRIBUTE', 'OF', 'SYRIA'] +2033-164915-0015-658: hyp=['THEN', 'HE', 'BADE', 'HIM', 'BE', 'SEATED', 'AND', 'QUESTIONED', 'HIM', 'AND', 'HE', 'REPLIED', 'THAT', 'HE', 'WAS', 'CHAMBERLAIN', 'TO', 'THE', 'EMIR', 'OF', 'DAMASCUS', 'AND', 'WAS', 'BOUND', 'TO', 'KING', 'OMAR', 'WITH', 'PRESENTS', 'AND', 'THE', 'TRIBUTE', 'OF', 'SYRIA'] +2033-164915-0016-659: ref=['SO', 'IT', 'WAS', 'AGREED', 'THAT', 'WE', 'GO', 'TO', 'DAMASCUS', 'AND', 'FETCH', 'THENCE', 'THE', "KING'S", 'SON', 'SHARRKAN', 'AND', 'MAKE', 'HIM', 'SULTAN', 'OVER', 'HIS', "FATHER'S", 'REALM'] +2033-164915-0016-659: hyp=['SO', 'IT', 'WAS', 'AGREED', 'THAT', 'WE', 'GO', 'TO', 'DAMASCUS', 'AND', 'FETCH', 'THENCE', 'THE', "KING'S", 'SON', 'SHARKAN', 'AND', 'MADE', 'HIM', 'SULTAN', 'OVER', 'HIS', "FATHER'S", 'REALM'] +2033-164915-0017-660: ref=['AND', 'AMONGST', 'THEM', 'WERE', 'SOME', 'WHO', 'WOULD', 'HAVE', 'CHOSEN', 'THE', 'CADET', 'ZAU', 'AL', 'MAKAN', 'FOR', 'QUOTH', 'THEY', 'HIS', 'NAME', 'BE', 'LIGHT', 'OF', 'THE', 'PLACE', 'AND', 'HE', 'HATH', 'A', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'HIGHS', 'THE', 'DELIGHT', 'OF', 'THE', 'TIME', 'BUT', 'THEY', 'SET', 'OUT', 'FIVE', 'YEARS', 'AGO', 'FOR', 'AL', 'HIJAZ', 'AND', 'NONE', 'WOTTETH', 'WHAT', 'IS', 'BECOME', 'OF', 'THEM'] +2033-164915-0017-660: hyp=['AND', 'AMONGST', 'THEM', 'WERE', 'SOME', 'WHO', 'WOULD', 'HAVE', 'CHOSEN', 'THE', 'CADET', 'THOU', 'A', 'MACAN', 'FOR', 'QUOTH', 'THEY', 'HIS', 'NAME', 'BE', 'LIGHT', 'OF', 'THE', 'PLACE', 'AND', 'HE', 'HATH', 'A', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'HIES', 'THE', 'DELIGHT', 'OF', 'THE', 'TIME', 'BUT', 'THEY', 'SET', 'OUT', 'FIVE', 'YEARS', 'AGO', 'FOR', 'AL', 'HI', 'JARS', 'AND', 'NONE', 'WHATETH', 'WHAT', 'IS', 'BECOME', 'OF', 'THEM'] +2033-164916-0000-684: ref=['SO', 'HE', 'TURNED', 'TO', 'THE', 'WAZIR', 'DANDAN', 'AND', 'SAID', 'TO', 'HIM', 'VERILY', 'YOUR', 'TALE', 'IS', 'A', 'WONDER', 'OF', 'WONDERS'] +2033-164916-0000-684: hyp=['SO', 'HE', 'TURNED', 'TO', 'THE', 'WAZIR', 'DANDAN', 'AND', 'SAID', 'TO', 'HIM', 'VERILY', 'YOUR', 'TALE', 'IS', 'A', 'WANDER', 'OF', 'WONDERS'] +2033-164916-0001-685: ref=['KNOW', 'O', 'CHIEF', 'WAZIR', 'THAT', 'HERE', 'WHERE', 'YOU', 'HAVE', 'ENCOUNTERED', 'ME', 'ALLAH', 'HATH', 'GIVEN', 'YOU', 'REST', 'FROM', 'FATIGUE', 'AND', 'BRINGETH', 'YOU', 'YOUR', 'DESIRE', 'AFTER', 'THE', 'EASIEST', 'OF', 'FASHIONS', 'FOR', 'THAT', 'HIS', 'ALMIGHTY', 'WILL', 'RESTORETH', 'TO', 'YOU', 'ZAU', 'AL', 'MAKAN', 'AND', 'HIS', 'SISTER', 'NUZHAT', 'AL', 'ZAMAN', 'WHEREBY', 'WE', 'WILL', 'SETTLE', 'THE', 'MATTER', 'AS', 'WE', 'EASILY', 'CAN'] +2033-164916-0001-685: hyp=['NO', 'O', 'CHIEF', 'WAZIR', 'THAT', 'HERE', 'WHERE', 'YOU', 'HAVE', 'ENCOUNTERED', 'ME', 'ALLAH', 'HATH', 'GIVEN', 'YOU', 'REST', 'FROM', 'FATIGUE', 'AND', 'BRINGETH', 'YOU', 'YOUR', 'DESIRE', 'AFTER', 'THE', 'EASIEST', 'OF', 'FASHIONS', 'FOR', 'LET', 'HIS', 'ALMIGHTY', 'WILL', 'RESTORE', 'IT', 'TO', 'YOU', 'THOU', 'ARMANQUIN', 'AND', 'HE', 'SISTER', 'KNOWSAT', 'AL', 'ZAMAN', 'WHEREBY', 'WE', 'WILL', 'SETTLE', 'THE', 'MATTER', 'AS', 'WE', 'EASILY', 'CAN'] +2033-164916-0002-686: ref=['WHEN', 'THE', 'MINISTER', 'HEARD', 'THESE', 'WORDS', 'HE', 'REJOICED', 'WITH', 'GREAT', 'JOY', 'AND', 'SAID', 'O', 'CHAMBERLAIN', 'TELL', 'ME', 'THE', 'TALE', 'OF', 'THE', 'TWAIN', 'AND', 'WHAT', 'BEFEL', 'THEM', 'AND', 'THE', 'CAUSE', 'OF', 'THEIR', 'LONG', 'ABSENCE'] +2033-164916-0002-686: hyp=['WHEN', 'THE', 'MEANESTER', 'HEARD', 'THESE', 'WORDS', 'HE', 'REJOICED', 'WITH', 'GREAT', 'JOY', 'AND', 'SAID', 'O', 'CHAMBERLAIN', 'TELL', 'ME', 'THE', 'TALE', 'OF', 'THE', 'TWAIN', 'AND', 'WHAT', 'BEFELL', 'THEM', 'AND', 'THE', 'CAUSE', 'OF', 'THEIR', 'LONG', 'ABSENCE'] +2033-164916-0003-687: ref=['ZAU', 'AL', 'MAKAN', 'BOWED', 'HIS', 'HEAD', 'AWHILE', 'AND', 'THEN', 'SAID', 'I', 'ACCEPT', 'THIS', 'POSITION', 'FOR', 'INDEED', 'THERE', 'WAS', 'NO', 'REFUSING', 'AND', 'HE', 'WAS', 'CERTIFIED', 'THAT', 'THE', 'CHAMBERLAIN', 'HAD', 'COUNSELLED', 'HIM', 'WELL', 'AND', 'WISELY', 'AND', 'SET', 'HIM', 'ON', 'THE', 'RIGHT', 'WAY'] +2033-164916-0003-687: hyp=['ZAWAIN', 'BOWED', 'HIS', 'HEAD', 'AWHILE', 'AND', 'THEN', 'SAID', 'I', 'ACCEPT', 'THE', 'POSITION', 'FOR', 'INDEED', 'THERE', 'WAS', 'NO', 'REFUSING', 'AND', 'HE', 'WAS', 'CERTIFIED', 'THAT', 'THE', 'CHAMBERLAIN', 'HAD', 'COUNSELLED', 'HIM', 'WELL', 'AND', 'WISELY', 'AND', 'SAID', 'TO', 'HIM', 'ON', 'THE', 'RIGHT', 'WAY'] +2033-164916-0004-688: ref=['THEN', 'HE', 'ADDED', 'O', 'MY', 'UNCLE', 'HOW', 'SHALL', 'I', 'DO', 'WITH', 'MY', 'BROTHER', 'SHARRKAN'] +2033-164916-0004-688: hyp=['THEN', 'HE', 'ADDED', 'O', 'MY', 'UNCLE', 'HOW', 'SHALL', 'I', 'DO', 'WITH', 'MY', 'BROTHER', 'SHARKAN'] +2033-164916-0005-689: ref=['AFTER', 'AWHILE', 'THE', 'DUST', 'DISPERSED', 'AND', 'THERE', 'APPEARED', 'UNDER', 'IT', 'THE', 'ARMY', 'OF', 'BAGHDAD', 'AND', 'KHORASAN', 'A', 'CONQUERING', 'HOST', 'LIKE', 'THE', 'FULL', 'TIDE', 'SEA', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164916-0005-689: hyp=['AFTER', 'A', 'WHILE', 'THE', 'DUST', 'DISPERSED', 'AND', 'THERE', 'APPEARED', 'UNDER', 'IT', 'THE', 'ARMY', 'OF', 'BAGHDAD', 'AND', 'KHORASAN', 'A', 'CONQUERING', 'HOST', 'LIKE', 'THE', 'POOL', 'TIDE', 'SEA', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'TO', 'SAY', 'HER', 'PERMITTED', 'SAY'] +2033-164916-0006-690: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'EIGHTH', 'NIGHT'] +2033-164916-0006-690: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVENTY', 'EIGHTH', 'NIGHT'] +2033-164916-0007-691: ref=['AND', 'IN', 'IT', 'ALL', 'REJOICED', 'AT', 'THE', 'ACCESSION', 'OF', 'THE', 'LIGHT', 'OF', 'THE', 'PLACE'] +2033-164916-0007-691: hyp=['ANY', 'NEAT', 'OR', 'REJOICED', 'AT', 'THE', 'ACCESSION', 'OF', 'THE', 'LIGHT', 'OF', 'THE', 'PLACE'] +2033-164916-0008-692: ref=['LASTLY', 'THE', 'MINISTER', 'WENT', 'IN', 'AND', 'KISSED', 'THE', 'GROUND', 'BEFORE', 'ZAU', 'AL', 'MAKAN', 'WHO', 'ROSE', 'TO', 'MEET', 'HIM', 'SAYING', 'WELCOME', 'O', 'WAZIR', 'AND', 'SIRE', 'SANS', 'PEER'] +2033-164916-0008-692: hyp=['LASTLY', 'THE', 'MINISTER', 'WENT', 'IN', 'AND', 'KISSED', 'THE', 'GROUND', 'BEFORE', 'ZAO', 'MAKAN', 'WHO', 'ROSE', 'TO', 'MEET', 'HIM', 'SAYING', 'WELCOME', 'O', 'WAZIR', 'AND', 'SIRES', 'SONSPIER'] +2033-164916-0009-693: ref=['MOREOVER', 'THE', 'SULTAN', 'COMMANDED', 'HIS', 'WAZIR', 'DANDAN', 'CALL', 'A', 'TEN', 'DAYS', 'HALT', 'OF', 'THE', 'ARMY', 'THAT', 'HE', 'MIGHT', 'BE', 'PRIVATE', 'WITH', 'HIM', 'AND', 'LEARN', 'FROM', 'HIM', 'HOW', 'AND', 'WHEREFORE', 'HIS', 'FATHER', 'HAD', 'BEEN', 'SLAIN'] +2033-164916-0009-693: hyp=['MOREOVER', 'THE', 'SULTAN', 'COMMANDED', 'HIS', 'WAZIR', 'DANDAN', 'CALL', 'AT', 'TEN', 'DAYS', 'HALT', 'OF', 'THE', 'ARMY', 'THAT', 'HE', 'MIGHT', 'BE', 'PRIVATE', 'WITH', 'HIM', 'AND', 'LEARN', 'FROM', 'HIM', 'HOW', 'AND', 'WHEREFORE', 'HIS', 'FATHER', 'HAD', 'BEEN', 'SLAIN'] +2033-164916-0010-694: ref=['HE', 'THEN', 'REPAIRED', 'TO', 'THE', 'HEART', 'OF', 'THE', 'ENCAMPMENT', 'AND', 'ORDERED', 'THE', 'HOST', 'TO', 'HALT', 'TEN', 'DAYS'] +2033-164916-0010-694: hyp=['HE', 'THEN', 'REPAIRED', 'TO', 'THE', 'HEART', 'OF', 'THE', 'ENCAMPMENT', 'AND', 'ORDERED', 'THAT', 'THE', 'HOST', 'TO', 'HALT', 'TEN', 'DAYS'] +2414-128291-0000-2689: ref=['WHAT', 'HATH', 'HAPPENED', 'UNTO', 'ME'] +2414-128291-0000-2689: hyp=['WHAT', 'HATH', 'HAPPENED', 'TO', 'ME'] +2414-128291-0001-2690: ref=['HE', 'ASKED', 'HIMSELF', 'SOMETHING', 'WARM', 'AND', 'LIVING', 'QUICKENETH', 'ME', 'IT', 'MUST', 'BE', 'IN', 'THE', 'NEIGHBOURHOOD'] +2414-128291-0001-2690: hyp=['HE', 'ASKED', 'HIMSELF', 'SOMETHING', 'WRONG', 'AND', 'LIVING', 'QUICKENETH', 'ME', 'IT', 'MUST', 'BE', 'IN', 'THAT', 'NEIGHBOURHOOD'] +2414-128291-0002-2691: ref=['WHEN', 'HOWEVER', 'ZARATHUSTRA', 'WAS', 'QUITE', 'NIGH', 'UNTO', 'THEM', 'THEN', 'DID', 'HE', 'HEAR', 'PLAINLY', 'THAT', 'A', 'HUMAN', 'VOICE', 'SPAKE', 'IN', 'THE', 'MIDST', 'OF', 'THE', 'KINE', 'AND', 'APPARENTLY', 'ALL', 'OF', 'THEM', 'HAD', 'TURNED', 'THEIR', 'HEADS', 'TOWARDS', 'THE', 'SPEAKER'] +2414-128291-0002-2691: hyp=['WHO', 'READ', 'HOWEVER', 'THEIR', 'TWO', 'STRAW', 'WAS', 'QUITE', 'NIGH', 'AND', 'TO', 'THEM', 'THEN', 'DID', 'HE', 'HEAR', 'PLAINLY', 'WITH', 'HUMAN', 'VOICE', 'TAKE', 'IN', 'THE', 'MIDST', 'OF', 'THE', 'KIND', 'AND', 'A', 'FRIENDLY', 'ALL', 'OF', 'THEM', 'HAD', 'TURNED', 'THEIR', 'HEADS', 'TOWARDS', 'THE', 'SPEAKER'] +2414-128291-0003-2692: ref=['WHAT', 'DO', 'I', 'HERE', 'SEEK'] +2414-128291-0003-2692: hyp=['FOR', 'DIEU', 'I', 'HERE', 'SEEK'] +2414-128291-0004-2693: ref=['ANSWERED', 'HE', 'THE', 'SAME', 'THAT', 'THOU', 'SEEKEST', 'THOU', 'MISCHIEF', 'MAKER', 'THAT', 'IS', 'TO', 'SAY', 'HAPPINESS', 'UPON', 'EARTH'] +2414-128291-0004-2693: hyp=['ANSWERED', 'HE', 'THE', 'SAME', 'THAT', 'THOU', "SEEK'ST", 'THOU', 'MISCHIEF', 'MAKER', 'THAT', 'IS', 'TO', 'SAY', 'HAPPINESS', 'UPON', 'EARTH'] +2414-128291-0005-2694: ref=['FOR', 'I', 'TELL', 'THEE', 'THAT', 'I', 'HAVE', 'ALREADY', 'TALKED', 'HALF', 'A', 'MORNING', 'UNTO', 'THEM', 'AND', 'JUST', 'NOW', 'WERE', 'THEY', 'ABOUT', 'TO', 'GIVE', 'ME', 'THEIR', 'ANSWER'] +2414-128291-0005-2694: hyp=['FOR', 'I', 'TELL', 'THEE', 'THAT', 'I', 'HAVE', 'ALREAD', 'TALKED', 'HALF', 'A', 'MORNING', 'UNTO', 'THEM', 'AND', 'JUST', 'NOW', 'WHERE', 'THEY', 'WERE', 'TO', 'GIVE', 'ME', 'THE', 'ANSWER'] +2414-128291-0006-2695: ref=['HE', 'WOULD', 'NOT', 'BE', 'RID', 'OF', 'HIS', 'AFFLICTION'] +2414-128291-0006-2695: hyp=['HE', 'WOULD', 'NOT', 'BE', 'RID', 'OF', 'HIS', 'AFFLICATION'] +2414-128291-0007-2696: ref=['WHO', 'HATH', 'NOT', 'AT', 'PRESENT', 'HIS', 'HEART', 'HIS', 'MOUTH', 'AND', 'HIS', 'EYES', 'FULL', 'OF', 'DISGUST'] +2414-128291-0007-2696: hyp=['WHO', 'HAD', 'NOT', 'AT', 'PRESENT', 'HIS', 'HEART', 'HIS', 'MOUTH', 'AND', 'HIS', 'EYES', 'FULL', 'OF', 'DISGUST'] +2414-128291-0008-2697: ref=['THOU', 'ALSO', 'THOU', 'ALSO'] +2414-128291-0008-2697: hyp=['THOU', 'ALSO', 'THOU', 'ALSO'] +2414-128291-0009-2698: ref=['BUT', 'BEHOLD', 'THESE', 'KINE'] +2414-128291-0009-2698: hyp=['MIGHT', 'BEHOLD', 'HIS', 'KIND'] +2414-128291-0010-2699: ref=['THE', 'KINE', 'HOWEVER', 'GAZED', 'AT', 'IT', 'ALL', 'AND', 'WONDERED'] +2414-128291-0010-2699: hyp=['THE', 'KIND', 'HOWEVER', 'GAZED', 'AT', 'IT', 'ALL', 'AND', 'WONDERED'] +2414-128291-0011-2700: ref=['WANTON', 'AVIDITY', 'BILIOUS', 'ENVY', 'CAREWORN', 'REVENGE', 'POPULACE', 'PRIDE', 'ALL', 'THESE', 'STRUCK', 'MINE', 'EYE'] +2414-128291-0011-2700: hyp=['WANTON', 'ALDITY', 'BILIOUS', 'ENVY', 'CAREWORN', 'REVENGE', 'POPULOUS', 'PRIDE', 'ALL', 'DISTRACT', 'MIGHT', 'EYE'] +2414-128291-0012-2701: ref=['IT', 'IS', 'NO', 'LONGER', 'TRUE', 'THAT', 'THE', 'POOR', 'ARE', 'BLESSED'] +2414-128291-0012-2701: hyp=['IT', 'IS', 'NO', 'LONGER', 'TRUE', 'LITTLE', 'POOR', 'A', 'BLESSED'] +2414-128291-0013-2702: ref=['THE', 'KINGDOM', 'OF', 'HEAVEN', 'HOWEVER', 'IS', 'WITH', 'THE', 'KINE', 'AND', 'WHY', 'IS', 'IT', 'NOT', 'WITH', 'THE', 'RICH'] +2414-128291-0013-2702: hyp=['THE', 'KINGDOM', 'OF', 'HEAVEN', 'HOWEVER', 'IS', 'WITH', 'THE', 'KIND', 'AND', 'WHY', 'IS', 'IT', 'NOT', 'WITH', 'A', 'RICH'] +2414-128291-0014-2703: ref=['WHY', 'DOST', 'THOU', 'TEMPT', 'ME'] +2414-128291-0014-2703: hyp=['WHY', 'THOSE', 'THOU', 'TEMPT', 'ME'] +2414-128291-0015-2704: ref=['ANSWERED', 'THE', 'OTHER'] +2414-128291-0015-2704: hyp=['ANSWERED', 'HER'] +2414-128291-0016-2705: ref=['THOU', 'KNOWEST', 'IT', 'THYSELF', 'BETTER', 'EVEN', 'THAN', 'I'] +2414-128291-0016-2705: hyp=['THOU', 'KNOWEST', 'IT', 'THYSELF', 'BETTER', 'EVEN', 'THAN', 'I'] +2414-128291-0017-2706: ref=['THUS', 'SPAKE', 'THE', 'PEACEFUL', 'ONE', 'AND', 'PUFFED', 'HIMSELF', 'AND', 'PERSPIRED', 'WITH', 'HIS', 'WORDS', 'SO', 'THAT', 'THE', 'KINE', 'WONDERED', 'ANEW'] +2414-128291-0017-2706: hyp=['DOES', 'BEG', 'THE', 'PEACEFUL', 'ONE', 'AND', 'PUFFED', 'HIMSELF', 'AND', 'POISPIED', 'WITH', 'HIS', 'WORDS', 'TO', 'INTER', 'KIND', 'WONDERED', 'ANEW'] +2414-128291-0018-2707: ref=['THOU', 'DOEST', 'VIOLENCE', 'TO', 'THYSELF', 'THOU', 'PREACHER', 'ON', 'THE', 'MOUNT', 'WHEN', 'THOU', 'USEST', 'SUCH', 'SEVERE', 'WORDS'] +2414-128291-0018-2707: hyp=['THOU', 'DOST', 'VIOLENCE', 'TO', 'THYSELF', 'THOU', 'PREACHER', 'ON', 'THE', 'MOUND', 'AND', 'THOU', 'USEST', 'SUCH', 'SAVOUR', 'WORDS'] +2414-128291-0019-2708: ref=['THEY', 'ALSO', 'ABSTAIN', 'FROM', 'ALL', 'HEAVY', 'THOUGHTS', 'WHICH', 'INFLATE', 'THE', 'HEART'] +2414-128291-0019-2708: hyp=['THEY', 'ALSO', 'ABSTAINED', 'FROM', 'ALL', 'HEAVY', 'THOUGHTS', 'WHICH', 'INFLATE', 'THE', 'HEART'] +2414-128291-0020-2709: ref=['WELL'] +2414-128291-0020-2709: hyp=['WELL'] +2414-128291-0021-2710: ref=['SAID', 'ZARATHUSTRA', 'THOU', 'SHOULDST', 'ALSO', 'SEE', 'MINE', 'ANIMALS', 'MINE', 'EAGLE', 'AND', 'MY', 'SERPENT', 'THEIR', 'LIKE', 'DO', 'NOT', 'AT', 'PRESENT', 'EXIST', 'ON', 'EARTH'] +2414-128291-0021-2710: hyp=['SAID', 'GUESTRA', 'THOU', 'SHOULDST', 'ALSO', 'SEE', 'MINE', 'ANIMALS', 'MY', 'EAGLE', 'AND', 'MY', 'SERPENT', 'THEY', 'ARE', 'LIKE', 'DO', 'NOT', 'AT', 'PRESENT', 'EXIST', 'ON', 'EARTH'] +2414-128291-0022-2711: ref=['AND', 'TALK', 'TO', 'MINE', 'ANIMALS', 'OF', 'THE', 'HAPPINESS', 'OF', 'ANIMALS'] +2414-128291-0022-2711: hyp=['AND', 'TALKED', 'TO', 'MY', 'ANIMALS', 'OF', 'THE', 'HAPPINESS', 'OF', 'ANIMALS'] +2414-128291-0023-2712: ref=['NOW', 'HOWEVER', 'TAKE', 'LEAVE', 'AT', 'ONCE', 'OF', 'THY', 'KINE', 'THOU', 'STRANGE', 'ONE'] +2414-128291-0023-2712: hyp=['NOW', 'HOWEVER', 'THEY', 'LEAVE', 'IT', 'WAS', 'OF', 'THEIR', 'KIND', 'THOU', 'STRANGE', 'WORLD'] +2414-128291-0024-2713: ref=['THOU', 'AMIABLE', 'ONE'] +2414-128291-0024-2713: hyp=['THOU', 'AMIABLE', 'ONE'] +2414-128291-0025-2714: ref=['FOR', 'THEY', 'ARE', 'THY', 'WARMEST', 'FRIENDS', 'AND', 'PRECEPTORS'] +2414-128291-0025-2714: hyp=['FOR', 'THEY', 'ARE', 'THY', 'WARMEST', 'FRIENDS', 'AND', 'PERCEPTORS'] +2414-128291-0026-2715: ref=['THOU', 'EVIL', 'FLATTERER'] +2414-128291-0026-2715: hyp=['THOU', 'EVEN', 'SLACKER'] +2414-128292-0000-2618: ref=['WHITHER', 'HATH', 'MY', 'LONESOMENESS', 'GONE', 'SPAKE', 'HE'] +2414-128292-0000-2618: hyp=['WHITHER', 'HAD', 'MY', 'LONESOME', 'DISCOUR', 'SPAKE', 'HE'] +2414-128292-0001-2619: ref=['MY', 'SHADOW', 'CALLETH', 'ME'] +2414-128292-0001-2619: hyp=['MY', 'SHADOW', 'CAUGHT', 'ME'] +2414-128292-0002-2620: ref=['WHAT', 'MATTER', 'ABOUT', 'MY', 'SHADOW'] +2414-128292-0002-2620: hyp=['WHAT', 'MATTER', 'ABOUT', 'MY', 'SHADOW'] +2414-128292-0003-2621: ref=['LET', 'IT', 'RUN', 'AFTER', 'ME', 'I', 'RUN', 'AWAY', 'FROM', 'IT'] +2414-128292-0003-2621: hyp=['NEKHLUD', 'TRUE', 'ENOUGH', 'TO', 'ME', 'I', 'RAN', 'AWAY', 'FROM', 'IT'] +2414-128292-0004-2622: ref=['THUS', 'SPAKE', 'ZARATHUSTRA', 'TO', 'HIS', 'HEART', 'AND', 'RAN', 'AWAY'] +2414-128292-0004-2622: hyp=['THUS', 'BEING', 'THEIR', 'TOO', 'STRIKE', 'TO', 'HIS', 'HEART', 'AND', 'RAN', 'AWAY'] +2414-128292-0005-2623: ref=['VERILY', 'MY', 'FOLLY', 'HATH', 'GROWN', 'BIG', 'IN', 'THE', 'MOUNTAINS'] +2414-128292-0005-2623: hyp=['VERILY', 'MY', 'FOLLY', 'HATH', 'GROWN', 'BIG', 'IN', 'THE', 'MOUNTAINS'] +2414-128292-0006-2624: ref=['NOW', 'DO', 'I', 'HEAR', 'SIX', 'OLD', 'FOOLS', 'LEGS', 'RATTLING', 'BEHIND', 'ONE', 'ANOTHER'] +2414-128292-0006-2624: hyp=['NOW', 'DO', 'I', 'HEAR', 'SIX', 'OLD', "FOOD'S", 'LEGS', 'RATTLING', 'BEHIND', 'ONE', 'ANOTHER'] +2414-128292-0007-2625: ref=['BUT', 'DOTH', 'ZARATHUSTRA', 'NEED', 'TO', 'BE', 'FRIGHTENED', 'BY', 'HIS', 'SHADOW'] +2414-128292-0007-2625: hyp=['BY', 'DIRTS', 'ARE', 'TOUSTRA', 'NEED', 'TO', 'BE', 'FRIGHTENED', 'BY', 'A', 'SHADOW'] +2414-128292-0008-2626: ref=['ALSO', 'METHINKETH', 'THAT', 'AFTER', 'ALL', 'IT', 'HATH', 'LONGER', 'LEGS', 'THAN', 'MINE'] +2414-128292-0008-2626: hyp=['ALSO', 'METHINK', 'IT', 'THAT', 'AFTER', 'ALL', 'IT', 'HAD', 'LONG', 'OR', 'LESS', 'THAN', 'MINE'] +2414-128292-0009-2627: ref=['FOR', 'WHEN', 'ZARATHUSTRA', 'SCRUTINISED', 'HIM', 'WITH', 'HIS', 'GLANCE', 'HE', 'WAS', 'FRIGHTENED', 'AS', 'BY', 'A', 'SUDDEN', 'APPARITION', 'SO', 'SLENDER', 'SWARTHY', 'HOLLOW', 'AND', 'WORN', 'OUT', 'DID', 'THIS', 'FOLLOWER', 'APPEAR'] +2414-128292-0009-2627: hyp=['FOR', 'WHEN', 'THEIR', 'DISTRESS', 'COGNIZED', 'HIM', 'IT', 'IS', 'GLANCE', 'HE', 'WAS', 'FRIGHTENED', 'ALBERT', 'A', 'CERTAIN', 'APPARITION', 'SO', 'SLENDER', 'SWALLTY', 'HOLLOW', 'AND', 'WORN', 'OUT', 'DID', 'HIS', 'FOLLOWER', 'APPEAR'] +2414-128292-0010-2628: ref=['ASKED', 'ZARATHUSTRA', 'VEHEMENTLY', 'WHAT', 'DOEST', 'THOU', 'HERE'] +2414-128292-0010-2628: hyp=['I', 'TAKE', 'TO', 'EXTRAVE', 'IMAGINE', 'WHAT', 'DOST', 'THOU', 'HEAR'] +2414-128292-0011-2629: ref=['AND', 'WHY', 'CALLEST', 'THOU', 'THYSELF', 'MY', 'SHADOW'] +2414-128292-0011-2629: hyp=['AND', 'WHY', 'COLLARST', 'THOU', 'THYSELF', 'MY', 'SHADOW'] +2414-128292-0012-2630: ref=['THOU', 'ART', 'NOT', 'PLEASING', 'UNTO', 'ME'] +2414-128292-0012-2630: hyp=['THOU', 'ART', 'NOT', 'PLEASING', 'INTO', 'ME'] +2414-128292-0013-2631: ref=['MUST', 'I', 'EVER', 'BE', 'ON', 'THE', 'WAY'] +2414-128292-0013-2631: hyp=['MUST', 'I', 'EVER', 'BE', 'ON', 'THE', 'WAY'] +2414-128292-0014-2632: ref=['O', 'EARTH', 'THOU', 'HAST', 'BECOME', 'TOO', 'ROUND', 'FOR', 'ME'] +2414-128292-0014-2632: hyp=['O', 'ART', 'THOU', 'HAST', 'BECOME', 'TO', 'ROUND', 'FOR', 'ME'] +2414-128292-0015-2633: ref=['WHEN', 'THE', 'DEVIL', 'CASTETH', 'HIS', 'SKIN', 'DOTH', 'NOT', 'HIS', 'NAME', 'ALSO', 'FALL', 'AWAY', 'IT', 'IS', 'ALSO', 'SKIN'] +2414-128292-0015-2633: hyp=['WITH', 'THE', 'DEVIL', 'CAST', 'AT', 'HIS', 'SKIN', 'DOTH', 'NOT', 'HIS', 'NAME', 'ALSO', 'FALL', 'AWAY', 'IT', 'IS', 'ALSO', 'SKIN'] +2414-128292-0016-2634: ref=['THE', 'DEVIL', 'HIMSELF', 'IS', 'PERHAPS', 'SKIN'] +2414-128292-0016-2634: hyp=['THE', 'DEVIL', 'HIMSELF', 'IS', 'PERHAPS', 'SKIN'] +2414-128292-0017-2635: ref=['SOMETIMES', 'I', 'MEANT', 'TO', 'LIE', 'AND', 'BEHOLD'] +2414-128292-0017-2635: hyp=['SOMETIMES', 'I', 'MEANT', 'TO', 'LIE', 'AND', 'BEHOLD'] +2414-128292-0018-2636: ref=['THEN', 'ONLY', 'DID', 'I', 'HIT', 'THE', 'TRUTH'] +2414-128292-0018-2636: hyp=['THEN', 'OLD', 'LADY', 'DID', 'I', 'HIT', 'THE', 'TRUTH'] +2414-128292-0019-2637: ref=['HOW', 'HAVE', 'I', 'STILL', 'INCLINATION'] +2414-128292-0019-2637: hyp=['HOW', 'HAIR', 'I', 'STILL', 'INCLINATION'] +2414-128292-0020-2638: ref=['HAVE', 'I', 'STILL', 'A', 'GOAL'] +2414-128292-0020-2638: hyp=['EH', 'I', 'STILL', 'A', 'GOLD'] +2414-128292-0021-2639: ref=['A', 'HAVEN', 'TOWARDS', 'WHICH', 'MY', 'SAIL', 'IS', 'SET'] +2414-128292-0021-2639: hyp=['A', 'HAIRY', 'DOOR', 'SPEECH', 'MY', "SAILOR'S", 'SAKE'] +2414-128292-0022-2640: ref=['FOR', 'IT', 'DO', 'I', 'ASK', 'AND', 'SEEK', 'AND', 'HAVE', 'SOUGHT', 'BUT', 'HAVE', 'NOT', 'FOUND', 'IT'] +2414-128292-0022-2640: hyp=['FOR', 'IT', 'TOO', 'I', 'ASK', 'AND', 'SEEK', 'AND', 'HAVE', 'THOUGHT', 'IT', 'HATH', 'NOT', 'FOUND', 'IT'] +2414-128292-0023-2641: ref=['O', 'ETERNAL', 'EVERYWHERE', 'O', 'ETERNAL', 'NOWHERE', 'O', 'ETERNAL', 'IN', 'VAIN'] +2414-128292-0023-2641: hyp=['OITERNAL', 'EVERYWHERE', 'WHO', 'HAD', 'TURNED', 'OUT', 'NOWHERE', 'WHO', 'HAD', 'TURNED', 'OUT', 'IN', 'VAIN'] +2414-128292-0024-2642: ref=['THOU', 'ART', 'MY', 'SHADOW'] +2414-128292-0024-2642: hyp=['THOU', 'ART', 'MY', 'SHADOW'] +2414-128292-0025-2643: ref=['SAID', 'HE', 'AT', 'LAST', 'SADLY'] +2414-128292-0025-2643: hyp=['SAID', 'HE', 'AT', 'LAST', 'SADLY'] +2414-128292-0026-2644: ref=['THY', 'DANGER', 'IS', 'NOT', 'SMALL', 'THOU', 'FREE', 'SPIRIT', 'AND', 'WANDERER'] +2414-128292-0026-2644: hyp=['THY', 'DANGER', 'HIS', 'PERCHED', 'ALL', 'THOU', 'FREE', 'SPIRIT', 'AND', 'WONDER'] +2414-128292-0027-2645: ref=['THEY', 'SLEEP', 'QUIETLY', 'THEY', 'ENJOY', 'THEIR', 'NEW', 'SECURITY'] +2414-128292-0027-2645: hyp=['THEY', 'SLEEP', 'QUIETLY', 'THEY', 'ENJOYED', 'THEIR', 'NEW', 'SECURITY'] +2414-128292-0028-2646: ref=['BEWARE', 'LEST', 'IN', 'THE', 'END', 'A', 'NARROW', 'FAITH', 'CAPTURE', 'THEE', 'A', 'HARD', 'RIGOROUS', 'DELUSION'] +2414-128292-0028-2646: hyp=['BEWARE', 'LEST', 'IN', 'THE', 'END', 'A', 'NARROW', 'FIT', 'CAPTURE', 'THEE', 'A', 'HARD', 'RECKLESS', 'DELUSION'] +2414-128292-0029-2647: ref=['FOR', 'NOW', 'EVERYTHING', 'THAT', 'IS', 'NARROW', 'AND', 'FIXED', 'SEDUCETH', 'AND', 'TEMPTETH', 'THEE'] +2414-128292-0029-2647: hyp=['FOR', 'NOW', 'EVERYTHING', 'THAT', 'IS', 'NARROW', 'AND', 'FIXED', 'SEDUCE', 'IT', 'AND', 'TEMPTED', 'THEE'] +2414-128292-0030-2648: ref=['THOU', 'HAST', 'LOST', 'THY', 'GOAL'] +2414-128292-0030-2648: hyp=['THOU', 'HAST', 'LOST', 'DAGGULE'] +2414-128292-0031-2649: ref=['THOU', 'POOR', 'ROVER', 'AND', 'RAMBLER', 'THOU', 'TIRED', 'BUTTERFLY'] +2414-128292-0031-2649: hyp=['THOUGH', 'POOR', 'ROVER', 'AND', 'RAMBLER', 'NOW', 'TIRED', 'BUT', 'TO', 'FLY'] +2414-128292-0032-2650: ref=['WILT', 'THOU', 'HAVE', 'A', 'REST', 'AND', 'A', 'HOME', 'THIS', 'EVENING'] +2414-128292-0032-2650: hyp=['WILT', 'THOU', 'HAVE', 'A', 'REST', 'IN', 'THE', 'WHOLE', 'THIS', 'EVENING'] +2414-159411-0000-2653: ref=['ONCE', 'UPON', 'A', 'TIME', 'A', 'BRAHMAN', 'WHO', 'WAS', 'WALKING', 'ALONG', 'THE', 'ROAD', 'CAME', 'UPON', 'AN', 'IRON', 'CAGE', 'IN', 'WHICH', 'A', 'GREAT', 'TIGER', 'HAD', 'BEEN', 'SHUT', 'UP', 'BY', 'THE', 'VILLAGERS', 'WHO', 'CAUGHT', 'HIM'] +2414-159411-0000-2653: hyp=['ONCE', 'UPON', 'HER', 'TIME', 'A', 'BRAHMAN', 'WHO', 'WAS', 'WALKING', 'ALONG', 'THE', 'ROAD', 'CAME', 'UPON', 'AN', 'IRON', 'CAGE', 'IN', 'WHICH', 'A', 'GREAT', 'TIGER', 'AT', 'MONSHAT', 'UP', 'BY', 'THE', 'VILLAGES', 'WHO', 'CAUGHT', 'HIM'] +2414-159411-0001-2654: ref=['THE', 'BRAHMAN', 'ANSWERED', 'NO', 'I', 'WILL', 'NOT', 'FOR', 'IF', 'I', 'LET', 'YOU', 'OUT', 'OF', 'THE', 'CAGE', 'YOU', 'WILL', 'EAT', 'ME'] +2414-159411-0001-2654: hyp=['THE', 'BRAMIAN', 'ANSWERED', 'NO', 'I', 'WILL', 'NOT', 'FOR', 'IF', 'I', 'LET', 'YOU', 'OUT', 'OF', 'THE', 'CAGE', 'YOU', 'WILL', 'EAT', 'ME'] +2414-159411-0002-2655: ref=['OH', 'FATHER', 'OF', 'MERCY', 'ANSWERED', 'THE', 'TIGER', 'IN', 'TRUTH', 'THAT', 'I', 'WILL', 'NOT'] +2414-159411-0002-2655: hyp=['OH', 'FATHER', 'OF', 'MERCY', 'ANSWERED', 'THE', 'TIGER', 'IN', 'TRUTH', 'THAT', 'I', 'WILL', 'NOT'] +2414-159411-0003-2656: ref=['I', 'WILL', 'NEVER', 'BE', 'SO', 'UNGRATEFUL', 'ONLY', 'LET', 'ME', 'OUT', 'THAT', 'I', 'MAY', 'DRINK', 'SOME', 'WATER', 'AND', 'RETURN'] +2414-159411-0003-2656: hyp=['I', 'WILL', 'NEVER', 'BE', 'SO', 'UNGRATEFUL', 'ONLY', 'LET', 'ME', 'OUT', 'THAT', 'I', 'MAY', 'BRING', 'SOME', 'WATER', 'AND', 'RETURN'] +2414-159411-0004-2657: ref=['THEN', 'THE', 'BRAHMAN', 'TOOK', 'PITY', 'ON', 'HIM', 'AND', 'OPENED', 'THE', 'CAGE', 'DOOR', 'BUT', 'NO', 'SOONER', 'HAD', 'HE', 'DONE', 'SO', 'THAN', 'THE', 'TIGER', 'JUMPING', 'OUT', 'SAID', 'NOW', 'I', 'WILL', 'EAT', 'YOU', 'FIRST', 'AND', 'DRINK', 'THE', 'WATER', 'AFTERWARDS'] +2414-159411-0004-2657: hyp=['AND', 'IN', 'THE', 'BRAM', 'INTO', 'PITY', 'ON', 'HIM', 'AND', 'OPENED', 'THE', 'CAGE', 'DOOR', 'BUT', 'NO', 'SOONER', 'HAD', 'HE', 'TURNED', 'SO', 'THAN', 'THE', 'TIGER', 'JUMPING', 'OUT', 'SAID', 'NOW', 'I', 'WILL', 'EAT', 'YOU', 'FIRST', 'AND', 'DRINK', 'THE', 'WATER', 'AFTERWARDS'] +2414-159411-0005-2658: ref=['SO', 'THE', 'BRAHMAN', 'AND', 'THE', 'TIGER', 'WALKED', 'ON', 'TILL', 'THEY', 'CAME', 'TO', 'A', 'BANYAN', 'TREE', 'AND', 'THE', 'BRAHMAN', 'SAID', 'TO', 'IT', 'BANYAN', 'TREE', 'BANYAN', 'TREE', 'HEAR', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0005-2658: hyp=['SO', 'THE', 'BRAMID', 'AND', 'THE', 'TIGER', 'WALKED', 'ON', 'TILL', 'THEY', 'CAME', 'TO', 'A', 'BANDON', 'TREE', 'AND', 'THE', 'BRAMEN', 'SAID', 'TO', 'IT', 'BANION', 'TREE', 'BAN', 'AND', 'TREE', 'HERE', 'AND', 'GIVE', 'JOINTMENT'] +2414-159411-0006-2659: ref=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'BANYAN', 'TREE'] +2414-159411-0006-2659: hyp=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'BEN', 'TREE'] +2414-159411-0007-2660: ref=['THIS', 'TIGER', 'SAID', 'THE', 'BRAHMAN', 'BEGGED', 'ME', 'TO', 'LET', 'HIM', 'OUT', 'OF', 'HIS', 'CAGE', 'TO', 'DRINK', 'A', 'LITTLE', 'WATER', 'AND', 'HE', 'PROMISED', 'NOT', 'TO', 'HURT', 'ME', 'IF', 'I', 'DID', 'SO', 'BUT', 'NOW', 'THAT', 'I', 'HAVE', 'LET', 'HIM', 'OUT', 'HE', 'WISHES', 'TO', 'EAT', 'ME'] +2414-159411-0007-2660: hyp=['DISTAGGER', 'SAID', 'DE', 'BRAHMAN', 'BEGGED', 'ME', 'TO', 'LET', 'HIM', 'OUT', 'OF', 'HIS', 'CAGE', 'TO', 'DRINK', 'A', 'LITTLE', 'WATER', 'AND', 'HE', 'PROMISED', 'NOT', 'TO', 'HIDE', 'ME', 'IF', 'I', 'DID', 'SO', 'BUT', 'NOW', 'THAT', 'I', 'HAVE', 'LEFT', 'HIM', 'OUT', 'HE', 'WISHES', 'TO', 'EAT', 'ME'] +2414-159411-0008-2661: ref=['IS', 'IT', 'JUST', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NO'] +2414-159411-0008-2661: hyp=['IT', 'IS', 'JEALOUS', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'I', 'KNOW'] +2414-159411-0009-2662: ref=['LET', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'ARE', 'AN', 'UNGRATEFUL', 'RACE'] +2414-159411-0009-2662: hyp=['LATE', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'ARE', 'IN', 'UNGRATEFUL', 'RACE'] +2414-159411-0010-2663: ref=['SIR', 'CAMEL', 'SIR', 'CAMEL', 'CRIED', 'THE', 'BRAHMAN', 'HEAR', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0010-2663: hyp=['SO', 'CAMEL', 'SIR', 'CAMEL', 'CRIED', 'THE', 'BRAMIN', 'HERE', 'AND', 'GIVE', 'JUDGMENT'] +2414-159411-0011-2664: ref=['AT', 'A', 'LITTLE', 'DISTANCE', 'THEY', 'FOUND', 'A', 'BULLOCK', 'LYING', 'BY', 'THE', 'ROADSIDE'] +2414-159411-0011-2664: hyp=['AT', 'A', 'LITTLE', 'DISTANCE', 'THEY', 'FOUND', 'A', 'BULLOCK', 'LYING', 'BY', 'THE', 'ROADSIDE'] +2414-159411-0012-2665: ref=['IS', 'IT', 'FAIR', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NOT'] +2414-159411-0012-2665: hyp=['IS', 'IT', 'FAIR', 'THAT', 'HE', 'SHOULD', 'DO', 'SO', 'OR', 'NOT'] +2414-159411-0013-2666: ref=['LET', 'THE', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'HAVE', 'NO', 'PITY'] +2414-159411-0013-2666: hyp=['LATER', 'TIGER', 'EAT', 'THE', 'MAN', 'FOR', 'MEN', 'HAVE', 'NO', 'PITY'] +2414-159411-0014-2667: ref=['THREE', 'OUT', 'OF', 'THE', 'SIX', 'HAD', 'GIVEN', 'JUDGMENT', 'AGAINST', 'THE', 'BRAHMAN', 'BUT', 'STILL', 'HE', 'DID', 'NOT', 'LOSE', 'ALL', 'HOPE', 'AND', 'DETERMINED', 'TO', 'ASK', 'THE', 'OTHER', 'THREE'] +2414-159411-0014-2667: hyp=['THREE', 'OUT', 'OF', 'THE', 'SIX', 'IN', 'GIVING', 'JUDGMENT', 'AGAINST', 'THE', 'BRAHMAN', 'WHICH', 'STILL', 'HE', 'DID', 'NOT', 'LOSE', 'ALL', 'HOPE', 'AND', 'TO', 'TURN', 'MIND', 'TO', 'ASK', 'THE', 'OTHER', 'THREE'] +2414-159411-0015-2668: ref=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'JUDGMENT', 'ASKED', 'THE', 'EAGLE'] +2414-159411-0015-2668: hyp=['ON', 'WHAT', 'MUST', 'I', 'GIVE', 'YOU', 'TELL', 'ME', 'ASKED', 'THE', 'EAGLE'] +2414-159411-0016-2669: ref=['THE', 'BRAHMAN', 'STATED', 'THE', 'CASE', 'AND', 'THE', 'EAGLE', 'ANSWERED', 'WHENEVER', 'MEN', 'SEE', 'ME', 'THEY', 'TRY', 'TO', 'SHOOT', 'ME', 'THEY', 'CLIMB', 'THE', 'ROCKS', 'AND', 'STEAL', 'AWAY', 'MY', 'LITTLE', 'ONES'] +2414-159411-0016-2669: hyp=['THE', 'BRAM', 'IS', 'SUITED', 'THE', 'CASE', 'AND', 'THE', 'EAGLE', 'ANSWERED', 'WHENEVER', 'MEN', 'SEE', 'ME', 'THEY', 'TRY', 'TO', 'SHOOT', 'ME', 'DECLINE', 'THE', 'ROCKS', 'AND', 'STEAL', 'AWAY', 'MY', 'LITTLE', 'ONES'] +2414-159411-0017-2670: ref=['THEN', 'THE', 'TIGER', 'BEGAN', 'TO', 'ROAR', 'AND', 'SAID', 'THE', 'JUDGMENT', 'OF', 'ALL', 'IS', 'AGAINST', 'YOU', 'O', 'BRAHMAN'] +2414-159411-0017-2670: hyp=['THEN', 'THE', 'TIGER', 'BEGAN', 'TO', 'ROAR', 'AND', 'SAID', 'JUDGMENT', 'OF', 'ALL', 'IS', 'AGAINST', 'YOU', 'O', 'BRAHMAN'] +2414-159411-0018-2671: ref=['AFTER', 'THIS', 'THEY', 'SAW', 'AN', 'ALLIGATOR', 'AND', 'THE', 'BRAHMAN', 'RELATED', 'THE', 'MATTER', 'TO', 'HIM', 'HOPING', 'FOR', 'A', 'MORE', 'FAVORABLE', 'VERDICT'] +2414-159411-0018-2671: hyp=['AFTER', 'THIS', 'THEY', 'SAW', 'AN', 'ALLIGATOR', 'AND', 'THE', 'BRAMMER', 'RELATED', 'THE', 'MATTER', 'TO', 'HIM', 'HOPING', 'FOR', 'A', 'MORE', 'FAVOURABLE', 'VERDICT'] +2414-159411-0019-2672: ref=['BUT', 'THE', 'ALLIGATOR', 'SAID', 'WHENEVER', 'I', 'PUT', 'MY', 'NOSE', 'OUT', 'OF', 'THE', 'WATER', 'MEN', 'TORMENT', 'ME', 'AND', 'TRY', 'TO', 'KILL', 'ME'] +2414-159411-0019-2672: hyp=['WITH', 'THE', 'ADDIER', 'TO', 'THE', 'SUIT', 'WHENEVER', 'APPOINT', 'MY', 'NOSE', 'OUT', 'OF', 'THE', 'WATER', "MAYN'T", 'TOM', 'AND', 'ME', 'AND', 'TRIED', 'TO', 'KILL', 'ME'] +2414-159411-0020-2673: ref=['THE', 'BRAHMAN', 'GAVE', 'HIMSELF', 'UP', 'AS', 'LOST', 'BUT', 'AGAIN', 'HE', 'PRAYED', 'THE', 'TIGER', 'TO', 'HAVE', 'PATIENCE', 'AND', 'LET', 'HIM', 'ASK', 'THE', 'OPINION', 'OF', 'THE', 'SIXTH', 'JUDGE'] +2414-159411-0020-2673: hyp=['NO', 'GRAMMEN', 'GAVE', 'HIMSELF', 'UP', 'AS', 'LOST', 'BUT', 'AGAIN', 'HE', 'PRAYED', 'THE', 'TIGER', 'TO', 'HAVE', 'PATIENCE', 'AND', 'LET', 'HIM', 'ASK', 'THE', 'OPINION', 'OF', 'THE', 'SIXTH', 'JUDGE'] +2414-159411-0021-2674: ref=['NOW', 'THE', 'SIXTH', 'WAS', 'A', 'JACKAL'] +2414-159411-0021-2674: hyp=['ON', 'THE', 'SIXTH', 'WAS', 'A', 'JACKAL'] +2414-159411-0022-2675: ref=['THE', 'BRAHMAN', 'TOLD', 'HIS', 'STORY', 'AND', 'SAID', 'TO', 'HIM', 'UNCLE', 'JACKAL', 'UNCLE', 'JACKAL', 'SAY', 'WHAT', 'IS', 'YOUR', 'JUDGMENT'] +2414-159411-0022-2675: hyp=['THE', 'GRAMMAR', 'TOLD', 'HIS', 'STORY', 'AND', 'SAID', 'TO', 'HIM', 'UNCLE', 'JACKO', 'AND', 'WILL', 'JACK', 'HO', 'SAY', 'WHAT', 'IS', 'YOUR', 'JUDGMENT'] +2414-159411-0023-2676: ref=['SHOW', 'ME', 'THE', 'PLACE'] +2414-159411-0023-2676: hyp=['SHOW', 'ME', 'THE', 'PACE'] +2414-159411-0024-2677: ref=['WHEN', 'THEY', 'GOT', 'THERE', 'THE', 'JACKAL', 'SAID', 'NOW', 'BRAHMAN', 'SHOW', 'ME', 'EXACTLY', 'WHERE', 'YOU', 'STOOD'] +2414-159411-0024-2677: hyp=['AND', 'THE', 'COURT', 'THERE', 'THE', 'JACKAL', 'SAID', 'NABRAMAN', 'SHOW', 'ME', 'EXACTLY', 'WHERE', 'YOU', 'STOOD'] +2414-159411-0025-2678: ref=['EXACTLY', 'THERE', 'WAS', 'IT', 'ASKED', 'THE', 'JACKAL'] +2414-159411-0025-2678: hyp=['EXACTLY', 'THERE', 'WAS', 'IT', 'ASKED', 'JACO'] +2414-159411-0026-2679: ref=['EXACTLY', 'HERE', 'REPLIED', 'THE', 'BRAHMAN'] +2414-159411-0026-2679: hyp=['EXACTLY', 'HERE', 'REPLIED', 'THE', 'PROMIN'] +2414-159411-0027-2680: ref=['WHERE', 'WAS', 'THE', 'TIGER', 'THEN'] +2414-159411-0027-2680: hyp=['THERE', 'WAS', 'THE', 'TIGER', 'THEN'] +2414-159411-0028-2681: ref=['WHY', 'I', 'STOOD', 'SO', 'SAID', 'THE', 'TIGER', 'JUMPING', 'INTO', 'THE', 'CAGE', 'AND', 'MY', 'HEAD', 'WAS', 'ON', 'THIS', 'SIDE'] +2414-159411-0028-2681: hyp=['WHY', 'I', 'STOOD', 'SO', 'SAID', 'THE', 'DRAGGER', 'JUMPING', 'INTO', 'THE', 'CAGE', 'AND', 'MY', 'HEAD', 'WAS', 'ON', 'THIS', 'SIDE'] +2414-159411-0029-2682: ref=['VERY', 'GOOD', 'SAID', 'THE', 'JACKAL', 'BUT', 'I', 'CANNOT', 'JUDGE', 'WITHOUT', 'UNDERSTANDING', 'THE', 'WHOLE', 'MATTER', 'EXACTLY'] +2414-159411-0029-2682: hyp=['VERY', 'GOOD', 'SAID', 'THE', 'JACK', 'HOPE', 'BUT', 'I', 'CANNOT', 'JUDGE', 'WITHOUT', 'UNDERSTANDING', 'THE', 'WHOLE', 'MATTER', 'EXACTLY'] +2414-159411-0030-2683: ref=['SHUT', 'AND', 'BOLTED', 'SAID', 'THE', 'BRAHMAN'] +2414-159411-0030-2683: hyp=['SHUT', 'AND', 'BOLTED', 'SAID', 'DEBRAMIN'] +2414-159411-0031-2684: ref=['THEN', 'SHUT', 'AND', 'BOLT', 'IT', 'SAID', 'THE', 'JACKAL'] +2414-159411-0031-2684: hyp=['THEN', 'SHUT', 'AND', 'BOLTED', 'SAID', 'TO', 'JACKAL'] +2414-159411-0032-2685: ref=['WHEN', 'THE', 'BRAHMAN', 'HAD', 'DONE', 'THIS', 'THE', 'JACKAL', 'SAID', 'OH', 'YOU', 'WICKED', 'AND', 'UNGRATEFUL', 'TIGER'] +2414-159411-0032-2685: hyp=['WHEN', 'THE', 'BRAHMAN', 'HAD', 'TURNED', 'THIS', 'THE', 'JACKAL', 'SAID', 'OH', 'YOU', 'WICKED', 'AND', 'UNGRATEFUL', 'TYER'] +2414-159411-0033-2686: ref=['WHEN', 'THE', 'GOOD', 'BRAHMAN', 'OPENED', 'YOUR', 'CAGE', 'DOOR', 'IS', 'TO', 'EAT', 'HIM', 'THE', 'ONLY', 'RETURN', 'YOU', 'WOULD', 'MAKE'] +2414-159411-0033-2686: hyp=['WITH', 'A', 'GOOD', 'BRAHMAN', 'OPENED', 'YOU', 'CARED', 'DOOR', 'IS', 'TO', 'EAT', 'HIM', 'THE', 'ONLY', 'RETURN', 'HE', 'WOULD', 'MAKE'] +2414-159411-0034-2687: ref=['PROCEED', 'ON', 'YOUR', 'JOURNEY', 'FRIEND', 'BRAHMAN'] +2414-159411-0034-2687: hyp=['PROCEED', 'ON', 'YOUR', 'JOURNEY', 'FRANJAMIN'] +2414-159411-0035-2688: ref=['YOUR', 'ROAD', 'LIES', 'THAT', 'WAY', 'AND', 'MINE', 'THIS'] +2414-159411-0035-2688: hyp=['HE', 'RULED', 'LIES', 'THAT', 'WAY', 'IN', 'MIND', 'THIS'] +2414-165385-0000-2651: ref=['THUS', 'ACCOMPLISHED', 'HE', 'EXCITED', 'THE', 'ADMIRATION', 'OF', 'EVERY', 'SILLY', 'COQUETTE', 'AND', 'THE', 'ENVY', 'OF', 'EVERY', 'FLUTTERING', 'COXCOMB', 'BUT', 'BY', 'ALL', 'YOUNG', 'GENTLEMEN', 'AND', 'LADIES', 'OF', 'UNDERSTANDING', 'HE', 'WAS', 'HEARTILY', 'DESPISED', 'AS', 'A', 'MERE', 'CIVILIZED', 'MONKEY'] +2414-165385-0000-2651: hyp=['AND', "THERE'S", 'ACCOMPLISHED', 'HE', 'EXCITED', 'ADMIRATION', 'OF', 'EVERY', 'SILLY', 'POCKET', 'AND', 'THE', 'ENVY', 'OF', 'EVERY', 'REFLECTING', 'ACCOUNT', 'BUT', 'BY', 'ALL', 'YOUNG', 'GENTLEMEN', 'AND', 'LADIES', 'OF', 'UNDERSTANDING', 'HE', 'WAS', 'HEARTILY', 'DESPISED', 'AS', 'A', 'MERE', 'CIVILIZED', 'MONKEY'] +2414-165385-0001-2652: ref=['THAT', 'HIS', 'SOUL', 'MIGHT', 'AFTERWARDS', 'OCCUPY', 'SUCH', 'A', 'STATION', 'AS', 'WOULD', 'BE', 'MOST', 'SUITABLE', 'TO', 'HIS', 'CHARACTER', 'IT', 'WAS', 'SENTENCED', 'TO', 'INHABIT', 'THE', 'BODY', 'OF', 'THAT', 'FINICAL', 'GRINNING', 'AND', 'MISCHIEVOUS', 'LITTLE', 'MIMICK', 'WITH', 'FOUR', 'LEGS', 'WHICH', 'YOU', 'NOW', 'BEHOLD', 'BEFORE', 'YOU'] +2414-165385-0001-2652: hyp=['THAT', 'HIS', 'SOUL', 'MIGHT', 'AFTERWARDS', 'OCCUPY', 'SUCH', 'A', 'STATION', 'AS', 'WOULD', 'BE', 'MOST', 'SUITABLE', 'TO', 'HIS', 'CHARACTER', 'IT', 'WAS', 'INTENSE', 'TO', 'INHABIT', 'A', 'BODY', 'OF', 'THAT', 'PHYNICAL', 'GRINNING', 'AND', 'MACHIEVOUS', 'LITTLE', 'MIMIC', 'WITH', 'FULL', 'LEGS', 'WHICH', 'SHE', 'NOW', 'BEHOLD', 'BEFORE', 'YOU'] +2609-156975-0000-2367: ref=['THEN', 'MOSES', 'WAS', 'AFRAID', 'AND', 'SAID', 'SURELY', 'THE', 'THING', 'IS', 'KNOWN'] +2609-156975-0000-2367: hyp=['THEN', 'MOSES', 'WAS', 'AFRAID', 'AND', 'SAID', 'SURELY', 'THE', 'THING', 'IS', 'KNOWN'] +2609-156975-0001-2368: ref=['HOLD', 'ON', 'HOLD', 'FAST', 'HOLD', 'OUT', 'PATIENCE', 'IS', 'GENIUS'] +2609-156975-0001-2368: hyp=['OR', 'ON', 'HER', 'FAST', 'OH', 'DOUBT', 'PATENTS', 'AS', 'GENIUS'] +2609-156975-0002-2369: ref=['LET', 'US', 'HAVE', 'FAITH', 'THAT', 'RIGHT', 'MAKES', 'MIGHT', 'AND', 'IN', 'THAT', 'FAITH', 'LET', 'US', 'DARE', 'TO', 'DO', 'OUR', 'DUTY', 'AS', 'WE', 'UNDERSTAND', 'IT', 'LINCOLN'] +2609-156975-0002-2369: hyp=['LET', 'US', 'HAVE', 'FAITH', 'THAT', 'RIGHT', 'MATRON', 'MIGHT', 'AND', 'IN', 'THAT', 'FAITH', 'LET', 'STARED', 'TO', 'DO', 'OUR', 'DUTY', 'IF', 'WE', 'UNDERSTAND', 'IT', 'LINCOLN'] +2609-156975-0003-2370: ref=['THE', 'EGYPTIAN', 'BACKGROUND', 'OF', 'THE', 'BONDAGE'] +2609-156975-0003-2370: hyp=['THE', 'EGYPTIAN', 'BACKGROUND', 'OF', 'THE', 'BONDAGE'] +2609-156975-0004-2371: ref=['EVERY', 'ONE', 'WHO', 'IS', 'TURBULENT', 'HAS', 'BEEN', 'FOUND', 'BY', 'KING', 'MERNEPTAH', 'THE', 'TESTIMONY', 'OF', 'THE', 'OLDEST', 'BIBLICAL', 'NARRATIVES', 'REGARDING', 'THE', 'SOJOURN', 'OF', 'THE', 'HEBREWS', 'IN', 'EGYPT', 'IS', 'ALSO', 'IN', 'PERFECT', 'ACCORD', 'WITH', 'THE', 'PICTURE', 'WHICH', 'THE', 'CONTEMPORARY', 'EGYPTIAN', 'INSCRIPTIONS', 'GIVE', 'OF', 'THE', 'PERIOD'] +2609-156975-0004-2371: hyp=['EVERY', 'ONE', 'WHOSE', 'TREBRANT', 'HAS', 'BEEN', 'FOUND', 'BY', 'GIMERNETTE', 'PATH', 'THE', 'TESTIMONY', 'OF', 'THE', 'OLDEST', 'PABRICAL', 'NARRATIVE', 'REGARDING', 'THE', 'SOJOURN', 'OF', 'THE', 'HEBREWS', 'IN', 'EGYPT', 'IS', 'ALSO', 'IN', 'PERFECT', 'ACCORD', 'WITH', 'THE', 'PITCHER', 'WHICH', 'IT', 'CONTEMPORARY', 'EGYPTIAN', 'INSCRIPTIONS', 'GIVE', 'THIS', 'PERIOD'] +2609-156975-0005-2372: ref=['THE', 'ABSENCE', 'OF', 'DETAILED', 'REFERENCE', 'TO', 'THE', 'HEBREWS', 'IS', 'THEREFORE', 'PERFECTLY', 'NATURAL'] +2609-156975-0005-2372: hyp=['THE', 'ABSENCE', 'OF', 'DETAILED', 'REFERENCE', 'TO', 'THE', 'HEBREWS', 'IS', 'THEREFORE', 'PERFECTLY', 'NATURAL'] +2609-156975-0006-2373: ref=['IT', 'SEEMS', 'PROBABLE', 'THAT', 'NOT', 'ALL', 'BUT', 'ONLY', 'PART', 'OF', 'THE', 'TRIBES', 'WHICH', 'ULTIMATELY', 'COALESCED', 'INTO', 'THE', 'HEBREW', 'NATION', 'FOUND', 'THEIR', 'WAY', 'TO', 'EGYPT'] +2609-156975-0006-2373: hyp=['IT', 'SEEMS', 'PROBABLE', 'THAT', 'NOT', 'ALL', 'BUT', 'ONLY', 'PART', 'IN', 'THE', 'TRIBES', 'WHICH', 'ULTIMATE', 'COLLETS', 'INTO', 'THE', 'HEBREW', 'NATION', 'FOUND', 'THEIR', 'WAY', 'TO', 'EGYPT'] +2609-156975-0007-2374: ref=['THE', 'STORIES', 'REGARDING', 'JOSEPH', 'THE', 'TRADITIONAL', 'FATHER', 'OF', 'EPHRAIM', 'AND', 'MANASSEH', 'IMPLY', 'THAT', 'THESE', 'STRONG', 'CENTRAL', 'TRIBES', 'POSSIBLY', 'TOGETHER', 'WITH', 'THE', 'SOUTHERN', 'TRIBES', 'OF', 'BENJAMIN', 'AND', 'JUDAH', 'WERE', 'THE', 'CHIEF', 'ACTORS', 'IN', 'THIS', 'OPENING', 'SCENE', 'IN', "ISRAEL'S", 'HISTORY'] +2609-156975-0007-2374: hyp=['THE', 'STORIES', 'REGARDING', 'JOSEPH', 'THEIR', 'TRADITIONAL', 'FOUNDER', 'THAT', 'FROM', 'IN', 'MANETTE', 'SE', 'INCLINE', 'THAT', 'THESE', 'STRONG', 'CENTRAL', 'TRIBES', 'POSSIBLY', 'TOGETHER', 'WITH', 'THE', 'SOUTHERN', 'TRINES', 'OF', 'BENJAMIN', 'AND', 'JUDAH', 'WERE', 'THE', 'CHIEF', 'ACTORS', 'OPENING', 'SCENE', 'IN', "ISRAEL'S", 'HISTORY'] +2609-156975-0008-2375: ref=['THE', 'BIBLICAL', 'NARRATIVES', 'APPARENTLY', 'DISAGREE', 'REGARDING', 'THE', 'DURATION', 'OF', 'THE', 'SOJOURN', 'IN', 'EGYPT'] +2609-156975-0008-2375: hyp=['THE', 'BIBOCO', 'NARRATIVES', 'APPARENTLY', 'DISAGREED', 'GUARDING', 'THE', 'DURATION', 'OF', 'THE', 'SOJOURN', 'IN', 'EGYPT'] +2609-156975-0009-2376: ref=['THE', 'LATER', 'TRADITIONS', 'TEND', 'TO', 'EXTEND', 'THE', 'PERIOD'] +2609-156975-0009-2376: hyp=['THE', 'LATER', 'JUDICINES', 'INTEREST', 'IN', 'THE', 'PERIOD'] +2609-156975-0010-2377: ref=['HERE', 'WERE', 'FOUND', 'SEVERAL', 'INSCRIPTIONS', 'BEARING', 'THE', 'EGYPTIAN', 'NAME', 'OF', 'THE', 'CITY', 'P', 'ATUM', 'HOUSE', 'OF', 'THE', 'GOD', 'ATUM'] +2609-156975-0010-2377: hyp=['YOU', 'WERE', 'FOUND', 'SEVEREND', 'SCRIPS', 'AND', 'SPARED', 'THE', 'EGYPTIAN', 'NAME', 'OF', 'THE', 'CITY', 'PATUM', 'OUTS', 'OF', 'THE', 'GOD', 'ATOM'] +2609-156975-0011-2378: ref=['A', 'CONTEMPORARY', 'INSCRIPTION', 'ALSO', 'STATES', 'THAT', 'HE', 'FOUNDED', 'NEAR', 'PITHUM', 'THE', 'HOUSE', 'OF', 'RAMSES', 'A', 'CITY', 'WITH', 'A', 'ROYAL', 'RESIDENCE', 'AND', 'TEMPLES'] +2609-156975-0011-2378: hyp=['A', 'CONTEMPORARY', 'INSCRIPTION', 'ONCE', 'ESTATES', 'THAT', 'HE', 'FOUND', 'A', 'NEAR', 'PITTHAM', 'THE', 'HOUSE', 'OF', 'RANSES', 'A', 'CITY', 'WITH', 'THE', 'ROYAL', 'RESIDENCE', 'IN', 'TEMPLES'] +2609-156975-0012-2379: ref=['THAT', 'THE', 'HEBREWS', 'WERE', 'RESTIVE', 'UNDER', 'THIS', 'TYRANNY', 'WAS', 'NATURAL', 'INEVITABLE'] +2609-156975-0012-2379: hyp=['THAT', 'THE', 'HEBREWS', 'WERE', 'WRETS', 'OF', 'UNDER', 'THIS', 'SURNING', 'WAS', 'NATURALLY', 'INEVITABLE'] +2609-156975-0013-2380: ref=['WAS', 'ANY', 'OTHER', 'PROCEDURE', 'TO', 'BE', 'EXPECTED', 'FROM', 'A', 'DESPOTIC', 'RULER', 'OF', 'THAT', 'LAND', 'AND', 'DAY'] +2609-156975-0013-2380: hyp=['WHEREAS', 'ANY', 'OTHER', 'PROCEDURE', 'TO', 'BE', 'INSPECTOR', 'FROM', 'IT', 'THAT', 'SPOTIC', 'ROAR', 'OF', 'THAT', 'LAND', 'AND', 'DAY'] +2609-156975-0014-2381: ref=['THE', 'MAKING', 'OF', 'A', 'LOYAL', 'PATRIOT'] +2609-156975-0014-2381: hyp=['THE', 'MAKING', 'OF', 'THE', 'LOYAL', 'PATRIOT'] +2609-156975-0015-2382: ref=['THE', 'STORY', 'OF', 'MOSES', 'BIRTH', 'AND', 'EARLY', 'CHILDHOOD', 'IS', 'ONE', 'OF', 'THE', 'MOST', 'INTERESTING', 'CHAPTERS', 'IN', 'BIBLICAL', 'HISTORY'] +2609-156975-0015-2382: hyp=['THE', 'STORY', 'OF', 'MOSES', 'BIRTH', 'AN', 'EARLY', 'CHILDHOOD', 'IS', 'ONE', 'OF', 'THE', 'MOST', 'INTERESTING', 'CHAPTERS', 'IN', 'BIBLICAL', 'HISTORY'] +2609-156975-0016-2383: ref=['WAS', 'MOSES', 'JUSTIFIED', 'IN', 'RESISTING', 'THE', 'EGYPTIAN', 'TASKMASTER'] +2609-156975-0016-2383: hyp=['WIS', 'MOVES', "IT'S", 'JEST', 'FIND', 'AN', 'RESIST', 'IN', 'THE', 'EGYPTIAN', 'TAX', 'MASTER'] +2609-156975-0017-2384: ref=['IS', 'PEONAGE', 'ALWAYS', 'DISASTROUS', 'NOT', 'ONLY', 'TO', 'ITS', 'VICTIMS', 'BUT', 'ALSO', 'TO', 'THE', 'GOVERNMENT', 'IMPOSING', 'IT'] +2609-156975-0017-2384: hyp=['HIS', 'PINIONS', 'ALWAYS', 'DISASTERATE', 'NOT', 'OWING', 'TO', 'ITS', 'VICTIMS', 'BUT', 'ALSO', 'TO', 'THE', 'GOVERNMENT', 'IMPOSING', 'IT'] +2609-156975-0018-2385: ref=['NATURALLY', 'HE', 'WENT', 'TO', 'THE', 'LAND', 'OF', 'MIDIAN'] +2609-156975-0018-2385: hyp=['NATURALLY', 'HE', 'WENT', 'TO', 'THE', 'LAND', 'A', 'MILLION'] +2609-156975-0019-2386: ref=['THE', 'WILDERNESS', 'TO', 'THE', 'EAST', 'OF', 'EGYPT', 'HAD', 'FOR', 'CENTURIES', 'BEEN', 'THE', 'PLACE', 'OF', 'REFUGE', 'FOR', 'EGYPTIAN', 'FUGITIVES'] +2609-156975-0019-2386: hyp=['THE', 'WILDERNESS', 'TO', 'THE', 'EAST', 'OF', 'EGYPT', 'AND', 'FOR', 'CENTURIES', 'BEEN', 'THE', 'PLATES', 'OF', 'REFUGE', 'OR', 'EGYPTIAN', 'FUGITIVES'] +2609-156975-0020-2387: ref=['FROM', 'ABOUT', 'TWO', 'THOUSAND', 'B', 'C'] +2609-156975-0020-2387: hyp=['FROM', 'ABOUT', 'TWO', 'THOUSAND', 'B', 'C'] +2609-156975-0021-2388: ref=['ON', 'THE', 'BORDERS', 'OF', 'THE', 'WILDERNESS', 'HE', 'FOUND', 'CERTAIN', 'BEDOUIN', 'HERDSMEN', 'WHO', 'RECEIVED', 'HIM', 'HOSPITABLY'] +2609-156975-0021-2388: hyp=['ON', 'THE', 'BORDERS', 'OF', 'THE', 'WILDERNESS', 'HE', 'FOUND', 'CERTAIN', 'BEDOUIN', 'HERDSMAN', 'WHO', 'RECEIVED', 'HIM', 'HALF', 'SPITABLY'] +2609-156975-0022-2389: ref=['THESE', 'SAND', 'WANDERERS', 'SENT', 'HIM', 'ON', 'FROM', 'TRIBE', 'TO', 'TRIBE', 'UNTIL', 'HE', 'REACHED', 'THE', 'LAND', 'OF', 'KEDEM', 'EAST', 'OF', 'THE', 'DEAD', 'SEA', 'WHERE', 'HE', 'REMAINED', 'FOR', 'A', 'YEAR', 'AND', 'A', 'HALF'] +2609-156975-0022-2389: hyp=['THESE', 'SANDWARES', 'SENT', 'HIM', 'ON', 'FROM', 'TIME', 'TO', 'TIME', 'UNTIL', 'HE', 'REACHED', 'THE', 'LAND', 'OF', 'KIDDAM', 'EACH', 'OF', 'THE', 'DEAD', 'SEA', 'WHERE', 'HE', 'REMAINED', 'FOR', 'A', 'YEAR', 'AND', 'A', 'HALF'] +2609-156975-0023-2390: ref=['LATER', 'HE', 'FOUND', 'HIS', 'WAY', 'TO', 'THE', 'COURT', 'OF', 'ONE', 'OF', 'THE', 'LOCAL', 'KINGS', 'IN', 'CENTRAL', 'PALESTINE', 'WHERE', 'HE', 'MARRIED', 'AND', 'BECAME', 'IN', 'TIME', 'A', 'PROSPEROUS', 'LOCAL', 'PRINCE'] +2609-156975-0023-2390: hyp=['LATER', 'HE', 'FOUND', 'HIS', 'WAY', 'TO', 'THE', 'COURT', 'OF', 'ONE', 'OF', 'THE', 'LOCAL', 'KINGS', 'AND', 'CENTRAL', 'PALESTINE', 'WHERE', 'HE', 'MARRIED', 'AND', 'MICHANG', 'IN', 'THE', 'TIME', 'A', 'PROSPEROUS', 'LOCAL', 'PRINCE'] +2609-156975-0024-2391: ref=['THE', 'SCHOOL', 'OF', 'THE', 'WILDERNESS'] +2609-156975-0024-2391: hyp=['THE', 'SCHOOL', 'OF', 'THE', 'WEARINESS'] +2609-156975-0025-2392: ref=['THE', 'STORY', 'OF', 'MOSES', 'IS', 'IN', 'MANY', 'WAYS', 'CLOSELY', 'PARALLEL', 'TO', 'THAT', 'OF', 'SINUHIT'] +2609-156975-0025-2392: hyp=['THE', 'STORY', 'OF', 'MOSES', 'IS', 'IN', 'MANY', 'WAYS', 'CLOSELY', 'PARALLEL', 'DID', 'NOT', 'ASSUME', 'IT'] +2609-156975-0026-2393: ref=['THE', 'PRIEST', 'OF', 'THE', 'SUB', 'TRIBE', 'OF', 'THE', 'KENITES', 'RECEIVED', 'HIM', 'INTO', 'HIS', 'HOME', 'AND', 'GAVE', 'HIM', 'HIS', 'DAUGHTER', 'IN', 'MARRIAGE'] +2609-156975-0026-2393: hyp=['THE', 'PRIEST', 'THE', 'SUBTRINE', 'OF', 'THE', 'KENITES', 'RECEIVED', 'HIM', 'INTO', 'HIS', 'HOME', 'AND', 'GAVE', 'HIM', 'HIS', 'DAUGHTER', 'IN', 'MARRIAGE'] +2609-156975-0027-2394: ref=['NOTE', 'THE', 'CHARACTERISTIC', 'ORIENTAL', 'IDEA', 'OF', 'MARRIAGE'] +2609-156975-0027-2394: hyp=['NOTE', 'THE', 'CARE', 'OF', 'RIVER', 'STICK', 'ORIENTOUINE', 'OF', 'MARES'] +2609-156975-0028-2395: ref=['HERE', 'MOSES', 'LEARNED', 'THE', 'LESSONS', 'THAT', 'WERE', 'ESSENTIAL', 'FOR', 'HIS', 'TRAINING', 'AS', 'THE', 'LEADER', 'AND', 'DELIVERER', 'OF', 'HIS', 'PEOPLE'] +2609-156975-0028-2395: hyp=['HERE', 'MOSES', 'LEARNED', 'THAT', 'LESSONS', 'THAT', 'WERE', 'ESSENTIAL', 'FOR', 'HIS', 'TRAINED', 'IN', 'AS', 'A', 'LEADER', 'AND', 'DELIVERER', 'OF', 'HIS', 'PEOPLE'] +2609-156975-0029-2396: ref=['AFTER', 'THE', 'CAPTURE', 'OF', 'JERICHO', 'CERTAIN', 'OF', 'THEM', 'WENT', 'UP', 'WITH', 'THE', 'SOUTHERN', 'TRIBES', 'TO', 'CONQUER', 'SOUTHERN', 'PALESTINE'] +2609-156975-0029-2396: hyp=['AFTER', 'THE', 'CAPTURE', 'OF', 'JERICHO', 'CERTAINLY', 'OF', 'THEM', 'WENT', 'UP', 'WITH', 'A', 'SUDDEN', 'TRIUMPHS', 'SHE', 'CONQUER', 'SOUTHERN', 'PALESTINE'] +2609-156975-0030-2397: ref=['MANY', 'MODERN', 'SCHOLARS', 'DRAW', 'THE', 'CONCLUSION', 'FROM', 'THE', 'BIBLICAL', 'NARRATIVE', 'THAT', 'IT', 'WAS', 'FROM', 'THE', 'KENITES', 'THAT', 'MOSES', 'FIRST', 'LEARNED', 'OF', 'YAHWEH', 'OR', 'AS', 'THE', 'DISTINCTIVE', 'NAME', 'OF', "ISRAEL'S", 'GOD', 'WAS', 'TRANSLATED', 'BY', 'LATER', 'JEWISH', 'SCRIBES', 'JEHOVAH'] +2609-156975-0030-2397: hyp=['MANY', 'MODERN', 'SCHOLARS', 'DRAWN', 'THE', 'CONCLUSION', 'FROM', 'THE', 'BIBLICAL', 'NARRATIVE', 'THAT', 'IT', 'WAS', 'FROM', 'THE', 'KENNITES', 'THAT', 'MOSES', 'FIRST', 'LEARNED', 'OF', 'YONWAY', 'OR', 'AS', 'THE', 'DISTINCTIVE', 'NAME', 'OF', "ISRAEL'S", 'GONE', 'WAS', 'TRANSLATED', 'BY', 'LATER', 'JEWISH', 'GRIBES', 'JEHOVAH'] +2609-156975-0031-2398: ref=['DO', 'THE', 'EARLIEST', 'HEBREW', 'TRADITIONS', 'IMPLY', 'THAT', 'THE', 'ANCESTORS', 'OF', 'THE', 'ISRAELITES', 'WERE', 'WORSHIPPERS', 'OF', 'JEHOVAH'] +2609-156975-0031-2398: hyp=['DO', 'THE', 'ARIAD', 'SEA', 'BOU', 'TRADITIONS', 'IMPLY', 'THAT', 'INSECTORS', 'OF', 'THE', 'ISRAIT', 'WERE', 'WORSHIPPED', 'OF', 'JEHOVAH'] +2609-156975-0032-2399: ref=['THE', 'TITLE', 'OF', 'HIS', 'FATHER', 'IN', 'LAW', 'IMPLIES', 'THAT', 'THIS', 'PRIEST', 'MINISTERED', 'AT', 'SOME', 'WILDERNESS', 'SANCTUARY'] +2609-156975-0032-2399: hyp=['THE', 'TOWN', 'OF', 'HIS', 'FUND', 'THEM', 'AND', 'ALL', 'IMPLIES', 'AT', 'THIS', 'PREACH', 'MINISTERED', 'AT', 'SOME', 'LINEN', 'AT', 'SANCTUARY'] +2609-156975-0033-2400: ref=['MOSES', 'IN', 'THE', 'HOME', 'OF', 'THE', 'MIDIAN', 'PRIEST', 'WAS', 'BROUGHT', 'INTO', 'DIRECT', 'AND', 'CONSTANT', 'CONTACT', 'WITH', 'THE', 'JEHOVAH', 'WORSHIP'] +2609-156975-0033-2400: hyp=['ROSES', 'IN', 'THE', 'HOME', 'OF', 'THE', 'MENDIAN', 'PRIESTS', 'WAS', 'BROUGHT', 'INTO', 'DIRECT', 'AND', 'CONSTANT', 'CONTACT', 'WITH', 'THE', 'JEHOVAH', 'WORSHIP'] +2609-156975-0034-2401: ref=['THE', 'CRUEL', 'FATE', 'OF', 'HIS', 'PEOPLE', 'AND', 'THE', 'PAINFUL', 'EXPERIENCE', 'IN', 'EGYPT', 'THAT', 'HAD', 'DRIVEN', 'HIM', 'INTO', 'THE', 'WILDERNESS', 'PREPARED', 'HIS', 'MIND', 'TO', 'RECEIVE', 'THIS', 'TRAINING'] +2609-156975-0034-2401: hyp=['THE', 'CRUEL', 'FATE', 'OF', 'THIS', 'PEOPLE', 'AND', 'THE', 'PAINFUL', 'EXPERIENCE', 'IN', 'EGYPT', 'THAT', 'HAD', 'DRIVEN', 'HIM', 'INTO', 'THE', 'WILDERNESS', 'PREPARED', 'HIS', 'MIND', 'TO', 'RECEIVE', 'THIS', 'TRAINING'] +2609-156975-0035-2402: ref=['HIS', 'QUEST', 'WAS', 'FOR', 'A', 'JUST', 'AND', 'STRONG', 'GOD', 'ABLE', 'TO', 'DELIVER', 'THE', 'OPPRESSED'] +2609-156975-0035-2402: hyp=['HIS', 'PRICE', 'WAS', 'FOR', 'A', 'JETS', 'AND', 'STRONG', 'GOD', 'ABLE', 'TO', 'DELIVER', 'THE', 'OPPRESSED'] +2609-156975-0036-2403: ref=['THE', 'WILDERNESS', 'WITH', 'ITS', 'LURKING', 'FOES', 'AND', 'THE', 'EVER', 'PRESENT', 'DREAD', 'OF', 'HUNGER', 'AND', 'THIRST', 'DEEPENED', 'HIS', 'SENSE', 'OF', 'NEED', 'AND', 'OF', 'DEPENDENCE', 'UPON', 'A', 'POWER', 'ABLE', 'TO', 'GUIDE', 'THE', 'DESTINIES', 'OF', 'MEN'] +2609-156975-0036-2403: hyp=['THE', 'WEDDINGS', 'WITH', 'ITS', 'LURKING', 'FOES', 'AND', 'THE', 'EVER', 'PRESENT', 'DREAD', 'OF', 'HUNGER', 'AND', 'THIRST', 'DEEP', 'INTO', 'SENSE', 'OF', 'NEED', 'AND', 'OF', 'DEPENDENCE', 'UPON', 'THE', 'POWER', 'ABLE', 'TO', 'GUIDE', 'THE', 'DEBTS', 'NEEDS', 'OF', 'MEN'] +2609-156975-0037-2404: ref=['THE', 'PEASANTS', 'OF', 'THE', 'VAST', 'ANTOLIAN', 'PLAIN', 'IN', 'CENTRAL', 'ASIA', 'MINOR', 'STILL', 'CALL', 'EVERY', 'LIFE', 'GIVING', 'SPRING', 'GOD', 'HATH', 'GIVEN'] +2609-156975-0037-2404: hyp=['THE', 'PEASANTS', 'OF', 'THE', 'VATS', 'INTOLLIUM', 'PLAIN', 'OF', 'CENTRAL', 'AS', 'A', 'MINOR', 'SO', 'CALL', 'EVERY', 'LIFE', 'GIVEN', 'SPRING', 'GOD', 'HATH', 'GIVEN'] +2609-156975-0038-2405: ref=['THE', 'CONSTANT', 'NECESSITY', 'OF', 'MEETING', 'THE', 'DANGERS', 'OF', 'THE', 'WILDERNESS', 'AND', 'OF', 'DEFENDING', 'THE', 'FLOCKS', 'ENTRUSTED', 'TO', 'MOSES', 'CARE', 'DEVELOPED', 'HIS', 'COURAGE', 'AND', 'POWER', 'OF', 'LEADERSHIP', 'AND', 'ACTION'] +2609-156975-0038-2405: hyp=['THEY', "CAN'T", 'SENT', 'THE', 'NECESSITY', 'A', 'MEETING', 'THE', 'DANGERS', 'OF', 'THE', 'WORDERNESS', 'AND', 'THE', 'DEFENDING', 'THE', 'FLOCKS', 'AND', 'TRITES', 'OF', 'JEMOSIS', 'CARE', 'DEVELOPED', 'HIS', 'COURAGE', 'AND', 'POWER', 'OF', 'LEISURESHIP', 'AND', 'ACTION'] +2609-157645-0000-2352: ref=['EVIDENTLY', 'THE', 'INTENTION', 'WAS', 'TO', 'MAKE', 'THINGS', 'PLEASANT', 'FOR', 'THE', 'ROYAL', 'FOE', 'OF', 'TOBACCO', 'DURING', 'HIS', 'VISIT'] +2609-157645-0000-2352: hyp=['EVIDENTLY', 'THE', 'INTENTION', 'WHICH', 'MADE', 'THINGS', 'PRESENT', 'FOR', 'THE', 'ROYAL', 'FOLK', 'A', 'TOBACCO', 'DURING', 'HIS', 'VISIT'] +2609-157645-0001-2353: ref=['THE', 'PROHIBITION', 'IN', 'THE', 'REGULATION', 'QUOTED', 'OF', 'SMOKING', 'IN', 'SAINT', "MARY'S", 'CHURCH', 'REFERRED', 'IT', 'MAY', 'BE', 'NOTED', 'TO', 'THE', 'ACT', 'WHICH', 'WAS', 'HELD', 'THEREIN'] +2609-157645-0001-2353: hyp=['THE', 'PROBES', 'AND', 'THE', 'REGULATING', 'QUOTED', 'HER', 'SMOKING', 'AND', 'SAINT', "MARY'S", 'CHURCH', 'REFERRED', 'TO', 'MAY', 'BE', 'NOTED', 'TO', 'THE', 'ACT', 'WHICH', 'WAS', 'HELD', 'THEREIN'] +2609-157645-0002-2354: ref=['SOMETIMES', 'TOBACCO', 'WAS', 'USED', 'IN', 'CHURCH', 'FOR', 'DISINFECTING', 'OR', 'DEODORIZING', 'PURPOSES'] +2609-157645-0002-2354: hyp=['SOMETIMES', 'TOBACCO', 'IS', 'USED', 'IN', 'CHURCH', 'FOR', 'DISINFECT', 'AND', 'NO', 'DEAL', 'ARISING', 'PURPOSES'] +2609-157645-0003-2355: ref=['BLACKBURN', 'ARCHBISHOP', 'OF', 'YORK', 'WAS', 'A', 'GREAT', 'SMOKER'] +2609-157645-0003-2355: hyp=['BLACKBIRD', 'ARCHBISH', 'OF', 'YORK', 'WAS', 'A', 'GREAT', 'SMOKER'] +2609-157645-0004-2356: ref=['ON', 'ONE', 'OCCASION', 'HE', 'WAS', 'AT', 'SAINT', "MARY'S", 'CHURCH', 'NOTTINGHAM', 'FOR', 'A', 'CONFIRMATION'] +2609-157645-0004-2356: hyp=['ON', 'ONE', 'OCCASION', 'HE', 'WAS', 'AT', 'SAINT', "MARY'S", 'CHURCH', 'NINE', 'IN', 'HAM', 'FOR', 'A', 'CONFIRMATESON'] +2609-157645-0005-2357: ref=['ANOTHER', 'EIGHTEENTH', 'CENTURY', 'CLERICAL', 'WORTHY', 'THE', 'FAMOUS', 'DOCTOR', 'PARR', 'AN', 'INVETERATE', 'SMOKER', 'WAS', 'ACCUSTOMED', 'TO', 'DO', 'WHAT', 'MISTER', 'DISNEY', 'PREVENTED', 'ARCHBISHOP', 'BLACKBURN', 'FROM', 'DOING', 'HE', 'SMOKED', 'IN', 'HIS', 'VESTRY', 'AT', 'HATTON'] +2609-157645-0005-2357: hyp=['ANOTHER', 'EIGHTEENTH', 'CENTURY', 'CLERICAL', 'WORTHY', 'THE', 'FAMOUS', 'DOCTRIPAR', 'AN', 'INVETERATE', 'SMOKER', 'WAS', 'ACCUSTOMED', 'TO', 'DO', 'AT', 'MIDSER', 'DIDNY', 'PREVENTED', 'ARCHBISH', 'OF', 'BLACKBURN', 'FROM', 'DOING', 'HE', 'SMOKED', 'IN', 'HIS', 'VETERY', 'AT', 'HATTON'] +2609-157645-0006-2358: ref=['PARR', 'WAS', 'SUCH', 'A', 'CONTINUAL', 'SMOKER', 'THAT', 'ANYONE', 'WHO', 'CAME', 'INTO', 'HIS', 'COMPANY', 'IF', 'HE', 'HAD', 'NEVER', 'SMOKED', 'BEFORE', 'HAD', 'TO', 'LEARN', 'THE', 'USE', 'OF', 'A', 'PIPE', 'AS', 'A', 'MEANS', 'OF', 'SELF', 'DEFENCE'] +2609-157645-0006-2358: hyp=['PAR', 'WITH', 'SUCH', 'A', 'CONTINUOUS', 'MOTOR', 'THAT', 'ANY', 'ONE', 'WHO', 'CAME', 'INTO', 'HIS', 'COMPANY', 'IF', 'HE', 'HAD', 'NEVER', 'SMOKED', 'BEFORE', 'AND', 'TO', 'LEARNED', 'THE', 'USE', 'OF', 'A', 'PIPE', 'AS', 'A', 'MEANS', 'OF', 'SELF', 'DEFENCE'] +2609-157645-0007-2359: ref=['ONE', 'SUNDAY', 'SAYS', 'MISTER', 'DITCHFIELD', 'HE', 'HAD', 'AN', 'EXTRA', 'PIPE', 'AND', 'JOSHUA', 'THE', 'CLERK', 'TOLD', 'HIM', 'THAT', 'THE', 'PEOPLE', 'WERE', 'GETTING', 'IMPATIENT'] +2609-157645-0007-2359: hyp=['ONE', 'SUNDAY', 'SAYS', 'MISTER', 'DITZFIELD', 'HE', 'ENDING', 'THAT', 'SIR', 'PIPE', 'AND', 'JOHNSHAW', 'THE', 'CLERK', 'TOLD', 'HIM', 'THAT', 'THE', 'PEOPLE', 'WERE', 'GETTING', 'IMPATIENT'] +2609-157645-0008-2360: ref=['LET', 'THEM', 'SING', 'ANOTHER', 'PSALM', 'SAID', 'THE', 'CURATE'] +2609-157645-0008-2360: hyp=['THEM', 'TO', 'THEM', 'SINGING', 'NOW', 'THE', 'PSALMS', 'SAKE', 'THE', 'CURATE'] +2609-157645-0009-2361: ref=['THEY', 'HAVE', 'SIR', 'REPLIED', 'THE', 'CLERK'] +2609-157645-0009-2361: hyp=['THEY', 'HAVE', 'SIR', 'REPLIED', 'THE', 'CLERK'] +2609-157645-0010-2362: ref=['THEN', 'LET', 'THEM', 'SING', 'THE', 'HUNDRED', 'AND', 'NINETEENTH', 'REPLIED', 'THE', 'CURATE'] +2609-157645-0010-2362: hyp=['THEN', 'LET', 'THEM', 'SING', 'THE', 'HUNDRED', 'AND', 'NINETEENTH', 'REPLIED', 'THE', 'CURATE'] +2609-157645-0011-2363: ref=['SIX', 'ARMS', 'THE', 'NEAREST', 'WITHIN', 'REACH', 'PRESENTED', 'WITH', 'AN', 'OBEDIENT', 'START', 'AS', 'MANY', 'TOBACCO', 'POUCHES', 'TO', 'THE', 'MAN', 'OF', 'OFFICE'] +2609-157645-0011-2363: hyp=['SIX', 'ARMS', 'THE', 'NURSE', 'WITHIN', 'REACH', 'PRESENTED', 'WITH', 'AN', 'OBEDIENT', 'START', 'AND', 'AS', 'MANY', 'TOBACCO', 'PIUCHES', 'TO', 'THE', 'MEN', 'OF', 'OFFICE'] +2609-157645-0012-2364: ref=['DAVID', 'DEANS', 'HOWEVER', 'DID', 'NOT', 'AT', 'ALL', 'APPROVE', 'THIS', 'IRREVERENCE'] +2609-157645-0012-2364: hyp=['DAVID', 'DEAN', 'SAMURED', 'DID', 'NOT', 'AT', 'ALL', 'IMPROVE', 'THIS', 'IRREVERENCE'] +2609-157645-0013-2365: ref=['GOING', 'TO', 'CHURCH', 'AT', 'HAYES', 'IN', 'THOSE', 'DAYS', 'MUST', 'HAVE', 'BEEN', 'QUITE', 'AN', 'EXCITING', 'EXPERIENCE'] +2609-157645-0013-2365: hyp=['GO', 'INTO', 'CHURCH', 'THAT', "HASAN'S", 'DAYS', 'MISTS', 'HAVE', 'BEEN', 'ACQUAINTED', 'AND', 'THE', 'SIGNING', 'SPIRITS'] +2609-157645-0014-2366: ref=['WHEN', 'THESE', 'MEN', 'IN', 'THE', 'COURSE', 'OF', 'MY', 'REMONSTRANCE', 'FOUND', 'THAT', 'I', 'WAS', 'NOT', 'GOING', 'TO', 'CONTINUE', 'THE', 'CUSTOM', 'THEY', 'NO', 'LONGER', 'CARED', 'TO', 'BE', 'COMMUNICANTS'] +2609-157645-0014-2366: hyp=['WHEN', 'THESE', 'MEN', 'IN', 'THE', 'COURSE', 'OF', 'MY', 'REMONSTRANCE', 'FOUND', 'OUT', 'THAT', 'WAS', 'NOT', 'GOING', 'TO', 'CONTINUE', 'THE', 'COTTOM', 'THEY', 'NO', 'LONGER', 'CARED', 'TO', 'BE', 'COMMUNICANTS'] +2609-169640-0000-2406: ref=['PROAS', 'IN', 'THAT', 'QUARTER', 'WERE', 'USUALLY', 'DISTRUSTED', 'BY', 'SHIPS', 'IT', 'IS', 'TRUE', 'BUT', 'THE', 'SEA', 'IS', 'FULL', 'OF', 'THEM', 'AND', 'FAR', 'MORE', 'ARE', 'INNOCENT', 'THAN', 'ARE', 'GUILTY', 'OF', 'ANY', 'ACTS', 'OF', 'VIOLENCE'] +2609-169640-0000-2406: hyp=['PERHAPS', 'IN', 'THAT', 'QUARTER', 'WERE', 'USUAL', 'DISTRUDGED', 'BY', 'THE', 'STEPS', 'THAT', 'IS', 'TRUE', 'BUT', 'THE', 'SEAS', 'FOR', 'THEM', 'AND', 'FAR', 'MORE', 'ARE', 'INNOCENT', 'THAN', 'ARE', 'GUILTY', 'OF', 'ANY', 'ACTS', 'OF', 'VIOLENCE'] +2609-169640-0001-2407: ref=['AN', 'HOUR', 'AFTER', 'THE', 'SUN', 'HAD', 'SET', 'THE', 'WIND', 'FELL', 'TO', 'A', 'LIGHT', 'AIR', 'THAT', 'JUST', 'KEPT', 'STEERAGE', 'WAY', 'ON', 'THE', 'SHIP'] +2609-169640-0001-2407: hyp=['NOW', 'I', 'OUTDREW', 'THE', 'SUN', 'HAD', 'SET', 'THE', 'WIND', 'FELL', 'TURNED', 'LIGHT', 'AIR', 'DAT', 'JESTS', 'KEPT', 'STEERAGE', 'WAY', 'ON', 'THE', 'SHIP'] +2609-169640-0002-2408: ref=['FORTUNATELY', 'THE', 'JOHN', 'WAS', 'NOT', 'ONLY', 'FAST', 'BUT', 'SHE', 'MINDED', 'HER', 'HELM', 'AS', 'A', 'LIGHT', 'FOOTED', 'GIRL', 'TURNS', 'IN', 'A', 'LIVELY', 'DANCE'] +2609-169640-0002-2408: hyp=['FORTUNATELY', 'THE', 'JOHN', 'WAS', 'NOT', 'ONLY', 'FAT', 'BUT', 'SEA', 'MINDED', 'HER', 'HAIL', 'AS', 'THE', 'LIGHTFOOTED', 'GIRL', 'TURNED', 'TO', 'THE', 'LIVELY', 'DANCE'] +2609-169640-0003-2409: ref=['I', 'NEVER', 'WAS', 'IN', 'A', 'BETTER', 'STEERING', 'SHIP', 'MOST', 'ESPECIALLY', 'IN', 'MODERATE', 'WEATHER'] +2609-169640-0003-2409: hyp=['I', 'NEVER', 'WAS', 'IN', 'A', 'BETTER', 'STIRRING', 'SHIP', 'PERCY', 'SPENT', 'FREE', 'AND', 'MODERATE', 'WEATHER'] +2609-169640-0004-2410: ref=['MISTER', 'MARBLE', 'HE', 'I', 'DO', 'BELIEVE', 'WAS', 'FAIRLY', 'SNOOZING', 'ON', 'THE', 'HEN', 'COOPS', 'BEING', 'LIKE', 'THE', 'SAILS', 'AS', 'ONE', 'MIGHT', 'SAY', 'BARELY', 'ASLEEP'] +2609-169640-0004-2410: hyp=['MISTER', 'MARBLE', 'HE', 'OUGHT', 'TO', 'BELIEVE', 'WAS', 'FAIRLY', 'SNOOZING', 'ON', 'THE', 'INCOUPS', 'BEING', 'LIKE', 'THE', 'SAILORS', 'AS', 'ONE', 'MIGHT', 'SAY', 'VARIOUS', 'LEAP'] +2609-169640-0005-2411: ref=['AT', 'THAT', 'MOMENT', 'I', 'HEARD', 'A', 'NOISE', 'ONE', 'FAMILIAR', 'TO', 'SEAMEN', 'THAT', 'OF', 'AN', 'OAR', 'FALLING', 'IN', 'A', 'BOAT'] +2609-169640-0005-2411: hyp=['AT', 'THAT', 'MOMENT', 'I', 'IN', 'A', 'NOISE', 'WHEN', 'FAMILIAR', 'TO', 'SEAMAN', 'THAT', 'OF', 'AN', 'OAR', 'FOLLOWING', 'IN', 'A', 'BOAT'] +2609-169640-0006-2412: ref=['I', 'SANG', 'OUT', 'SAIL', 'HO', 'AND', 'CLOSE', 'ABOARD'] +2609-169640-0006-2412: hyp=['AS', 'IN', 'YET', 'SO', 'HO', 'AND', 'CLOSE', 'ABROAD'] +2609-169640-0007-2413: ref=['HE', 'WAS', 'TOO', 'MUCH', 'OF', 'A', 'SEAMAN', 'TO', 'REQUIRE', 'A', 'SECOND', 'LOOK', 'IN', 'ORDER', 'TO', 'ASCERTAIN', 'WHAT', 'WAS', 'TO', 'BE', 'DONE'] +2609-169640-0007-2413: hyp=['HE', 'WAS', 'SHIMMERTS', 'OF', 'THE', 'SEAMAN', 'TO', 'REQUIRE', 'SECOND', 'LOOK', 'IN', 'ORDER', 'TO', 'ASCERTAIN', 'BUT', 'WAS', 'TO', 'BE', 'DONE'] +2609-169640-0008-2414: ref=['ALTHOUGH', 'THEY', 'WENT', 'THREE', 'FEET', 'TO', 'OUR', 'TWO', 'THIS', 'GAVE', 'US', 'A', 'MOMENT', 'OF', 'BREATHING', 'TIME'] +2609-169640-0008-2414: hyp=['ALTHOUGH', 'THEY', 'WENT', 'THREE', 'FEET', 'TO', 'OUR', 'TWO', 'THIS', 'GAVE', 'UP', 'SOME', 'MOMENT', 'OF', 'BREATHING', 'TIME'] +2609-169640-0009-2415: ref=['AS', 'OUR', 'SHEETS', 'WERE', 'ALL', 'FLYING', 'FORWARD', 'AND', 'REMAINED', 'SO', 'FOR', 'A', 'FEW', 'MINUTES', 'IT', 'GAVE', 'ME', 'LEISURE', 'TO', 'LOOK', 'ABOUT'] +2609-169640-0009-2415: hyp=['AS', 'OUR', 'SEATS', 'WERE', 'ALL', 'FLYING', 'FORWARD', 'AND', 'REMAINED', 'SO', 'FOR', 'A', 'FEW', 'MINUTES', 'IT', 'GAVE', 'ME', 'A', 'LEISURE', 'TO', 'WORK', 'ABOUT'] +2609-169640-0010-2416: ref=['I', 'SOON', 'SAW', 'BOTH', 'PROAS', 'AND', 'GLAD', 'ENOUGH', 'WAS', 'I', 'TO', 'PERCEIVE', 'THAT', 'THEY', 'HAD', 'NOT', 'APPROACHED', 'MATERIALLY', 'NEARER'] +2609-169640-0010-2416: hyp=['I', 'SOON', 'SAW', 'BOTH', 'PROTS', 'AND', 'GRINDING', 'UP', 'WAS', 'I', 'TO', 'PERCEIVE', 'THAT', 'THEY', 'HAD', 'NOT', 'APPROACHED', 'MATERIALLY', 'IN', 'NEW', 'YORK'] +2609-169640-0011-2417: ref=['MISTER', 'KITE', 'OBSERVED', 'THIS', 'ALSO', 'AND', 'REMARKED', 'THAT', 'OUR', 'MOVEMENTS', 'HAD', 'BEEN', 'SO', 'PROMPT', 'AS', 'TO', 'TAKE', 'THE', 'RASCALS', 'ABACK'] +2609-169640-0011-2417: hyp=['BISHOIS', 'DESERVED', 'THIS', 'ALSO', 'AND', 'REMARKED', 'THAT', 'OUR', 'MOVEMENTS', 'HAD', 'BEEN', 'SO', 'PROMPT', 'AS', 'TO', 'TAKE', 'THE', 'RASCAL', 'WAS', 'ABACK'] +2609-169640-0012-2418: ref=['A', 'BREATHLESS', 'STILLNESS', 'SUCCEEDED'] +2609-169640-0012-2418: hyp=['A', 'BREATH', 'WHICH', 'STILL', 'IN', 'ITS', 'SUCCEEDED'] +2609-169640-0013-2419: ref=['THE', 'PROAS', 'DID', 'NOT', 'ALTER', 'THEIR', 'COURSE', 'BUT', 'NEARED', 'US', 'FAST'] +2609-169640-0013-2419: hyp=['THE', 'PROVIDES', 'DID', 'NOT', 'ALTER', 'THE', 'COURSE', 'BUT', 'NEAR', 'TO', 'ITS', 'FAST'] +2609-169640-0014-2420: ref=['I', 'HEARD', 'THE', 'RATTLING', 'OF', 'THE', 'BOARDING', 'PIKES', 'TOO', 'AS', 'THEY', 'WERE', 'CUT', 'ADRIFT', 'FROM', 'THE', 'SPANKER', 'BOOM', 'AND', 'FELL', 'UPON', 'THE', 'DECKS'] +2609-169640-0014-2420: hyp=['I', 'HEARD', 'THE', 'RIDERING', 'OF', 'THE', 'BOARDING', 'PIPES', 'TOO', 'AS', 'THEY', 'WERE', 'CUT', 'ADRIFT', 'FROM', 'THE', 'SPANKER', 'BOOM', 'AND', 'FELL', 'UPON', 'THE', 'DECKS'] +2609-169640-0015-2421: ref=['KITE', 'WENT', 'AFT', 'AND', 'RETURNED', 'WITH', 'THREE', 'OR', 'FOUR', 'MUSKETS', 'AND', 'AS', 'MANY', 'PIKES'] +2609-169640-0015-2421: hyp=['KIND', 'WENT', 'APT', 'AND', 'RETURNED', 'WITH', 'THREE', 'OR', 'FOUR', 'MASKETS', 'AND', 'AS', 'MANY', 'PIKES'] +2609-169640-0016-2422: ref=['THE', 'STILLNESS', 'THAT', 'REIGNED', 'ON', 'BOTH', 'SIDES', 'WAS', 'LIKE', 'THAT', 'OF', 'DEATH'] +2609-169640-0016-2422: hyp=['THE', 'STILLNESS', 'THAT', 'RAINED', 'ON', 'BOTH', 'SIDES', 'WAS', 'LIKE', 'THAT', 'OF', 'DEATH'] +2609-169640-0017-2423: ref=['THE', 'JOHN', 'BEHAVED', 'BEAUTIFULLY', 'AND', 'CAME', 'ROUND', 'LIKE', 'A', 'TOP'] +2609-169640-0017-2423: hyp=['THE', 'JOHN', 'BEHAVED', 'BEAUTIFULLY', 'HE', 'CAME', 'AROUND', 'LIKE', 'A', 'TOP'] +2609-169640-0018-2424: ref=['THE', 'QUESTION', 'WAS', 'NOW', 'WHETHER', 'WE', 'COULD', 'PASS', 'THEM', 'OR', 'NOT', 'BEFORE', 'THEY', 'GOT', 'NEAR', 'ENOUGH', 'TO', 'GRAPPLE'] +2609-169640-0018-2424: hyp=['THE', 'QUESTION', 'WAS', 'NOW', 'WHETHER', 'WE', 'COULD', 'PASS', 'NO', 'OR', 'NOT', 'BEFORE', 'THEY', 'GOT', 'NEAR', 'ENOUGH', 'TO', 'GRANTEL'] +2609-169640-0019-2425: ref=['THE', 'CAPTAIN', 'BEHAVED', 'PERFECTLY', 'WELL', 'IN', 'THIS', 'CRITICAL', 'INSTANT', 'COMMANDING', 'A', 'DEAD', 'SILENCE', 'AND', 'THE', 'CLOSEST', 'ATTENTION', 'TO', 'HIS', 'ORDERS'] +2609-169640-0019-2425: hyp=['THE', 'CAPTAIN', 'BEHAVED', 'PERFECTUALLY', 'WELL', 'IN', 'ITS', 'CRITICAL', 'INSTANT', 'COMMANDING', 'A', 'DEAD', 'SCIENCE', 'IN', 'THE', 'CITIZENS', 'TO', 'HIS', 'ORDERS'] +2609-169640-0020-2426: ref=['NOT', 'A', 'SOUL', 'ON', 'BOARD', 'THE', 'JOHN', 'WAS', 'HURT'] +2609-169640-0020-2426: hyp=['NOW', 'SO', 'ON', 'BOARD', 'THE', 'JOHN', 'WAS', 'HURT'] +2609-169640-0021-2427: ref=['ON', 'OUR', 'SIDE', 'WE', 'GAVE', 'THE', 'GENTLEMEN', 'THE', 'FOUR', 'SIXES', 'TWO', 'AT', 'THE', 'NEAREST', 'AND', 'TWO', 'AT', 'THE', 'STERN', 'MOST', 'PROA', 'WHICH', 'WAS', 'STILL', 'NEAR', 'A', "CABLE'S", 'LENGTH', 'DISTANT'] +2609-169640-0021-2427: hyp=['WHEN', 'OURSAN', 'WE', 'GAVE', 'THE', 'GENTLEMAN', 'THE', 'FOUR', 'SIX', 'TO', 'OUT', 'THE', 'NEWS', 'AND', 'TWO', 'AT', 'THE', 'STERNMOST', 'PROVERB', 'WHICH', 'WAS', 'STILL', 'NEAR', 'A', "CABLE'S", 'LENGTH', 'OF', 'DISTANT'] +2609-169640-0022-2428: ref=['THEY', 'WERE', 'LIKE', 'THE', 'YELLS', 'OF', 'FIENDS', 'IN', 'ANGUISH'] +2609-169640-0022-2428: hyp=['THEY', 'WERE', 'NIGHTLY', 'YEARS', 'OF', 'FIENDS', 'IN', 'ENGLISH'] +2609-169640-0023-2429: ref=['I', 'DOUBT', 'IF', 'WE', 'TOUCHED', 'A', 'MAN', 'IN', 'THE', 'NEAREST', 'PROA'] +2609-169640-0023-2429: hyp=['AND', 'OUT', 'IF', 'WE', 'TOUCH', 'THE', 'MAIN', 'IN', 'THE', 'NURSE', 'PRAYER'] +2609-169640-0024-2430: ref=['IN', 'THIS', 'STATE', 'THE', 'SHIP', 'PASSED', 'AHEAD', 'ALL', 'HER', 'CANVAS', 'BEING', 'FULL', 'LEAVING', 'THE', 'PROA', 'MOTIONLESS', 'IN', 'HER', 'WAKE'] +2609-169640-0024-2430: hyp=['AND', 'THAT', 'STATE', 'THE', 'SHIP', 'PASSED', 'AHEAD', 'ON', 'FOR', 'A', 'CANVAS', 'BEEN', 'FOR', 'LEAVING', 'THE', 'PROW', 'MUCH', 'ENRICHED', 'IN', 'HER', 'WAKE'] +3005-163389-0000-1108: ref=['THEY', 'SWARMED', 'UP', 'IN', 'FRONT', 'OF', "SHERBURN'S", 'PALINGS', 'AS', 'THICK', 'AS', 'THEY', 'COULD', 'JAM', 'TOGETHER', 'AND', 'YOU', "COULDN'T", 'HEAR', 'YOURSELF', 'THINK', 'FOR', 'THE', 'NOISE'] +3005-163389-0000-1108: hyp=['THEY', 'SWARMED', 'UP', 'IN', 'FRONT', 'OF', "SHERBURNE'S", 'PALINGS', 'AS', 'THICK', 'AS', 'THEY', 'COULD', 'JAM', 'TOGETHER', 'AND', 'YOU', "COULDN'T", 'HEAR', 'YOURSELF', 'THINK', 'FOR', 'THE', 'NOISE'] +3005-163389-0001-1109: ref=['SOME', 'SUNG', 'OUT', 'TEAR', 'DOWN', 'THE', 'FENCE', 'TEAR', 'DOWN', 'THE', 'FENCE'] +3005-163389-0001-1109: hyp=['SOME', 'SUNG', 'OUT', 'TEAR', 'DOWN', 'THE', 'FENCE', 'TEAR', 'DOWN', 'THE', 'FENCE'] +3005-163389-0002-1110: ref=['THE', 'STILLNESS', 'WAS', 'AWFUL', 'CREEPY', 'AND', 'UNCOMFORTABLE'] +3005-163389-0002-1110: hyp=['THE', 'STILLNESS', 'WAS', 'AWFUL', 'CREEPY', 'AND', 'UNCOMFORTABLE'] +3005-163389-0003-1111: ref=['SHERBURN', 'RUN', 'HIS', 'EYE', 'SLOW', 'ALONG', 'THE', 'CROWD', 'AND', 'WHEREVER', 'IT', 'STRUCK', 'THE', 'PEOPLE', 'TRIED', 'A', 'LITTLE', 'TO', 'OUT', 'GAZE', 'HIM', 'BUT', 'THEY', "COULDN'T", 'THEY', 'DROPPED', 'THEIR', 'EYES', 'AND', 'LOOKED', 'SNEAKY'] +3005-163389-0003-1111: hyp=['SHERBURN', 'RUN', 'HIS', 'EYE', 'SLOW', 'ALONG', 'THE', 'CROWD', 'AND', 'WHEREVER', 'IT', 'STRUCK', 'THE', 'PEOPLE', 'TRIED', 'A', 'LITTLE', 'TO', 'OUTGAZE', 'HIM', 'BUT', 'THEY', "COULDN'T", 'THEY', 'DROPPED', 'THEIR', 'EYES', 'AND', 'LOOKED', 'SNEAKY'] +3005-163389-0004-1112: ref=['THE', 'AVERAGE', "MAN'S", 'A', 'COWARD'] +3005-163389-0004-1112: hyp=['THE', 'AVERAGE', "MAN'S", 'A', 'COWARD'] +3005-163389-0005-1113: ref=['BECAUSE', "THEY'RE", 'AFRAID', 'THE', "MAN'S", 'FRIENDS', 'WILL', 'SHOOT', 'THEM', 'IN', 'THE', 'BACK', 'IN', 'THE', 'DARKAND', "IT'S", 'JUST', 'WHAT', 'THEY', 'WOULD', 'DO'] +3005-163389-0005-1113: hyp=['BECAUSE', "THEY'RE", 'AFRAID', 'THE', "MAN'S", 'FRIENDS', 'WILL', 'SHOOT', 'THEM', 'IN', 'THE', 'BACK', 'IN', 'THE', 'DARK', 'AND', "IT'S", 'JUST', 'WHAT', 'THEY', 'WOULD', 'DO'] +3005-163389-0006-1114: ref=['SO', 'THEY', 'ALWAYS', 'ACQUIT', 'AND', 'THEN', 'A', 'MAN', 'GOES', 'IN', 'THE', 'NIGHT', 'WITH', 'A', 'HUNDRED', 'MASKED', 'COWARDS', 'AT', 'HIS', 'BACK', 'AND', 'LYNCHES', 'THE', 'RASCAL'] +3005-163389-0006-1114: hyp=['SO', 'THEY', 'ALWAYS', 'ACQUIT', 'AND', 'THEN', 'A', 'MAN', 'GOES', 'IN', 'THE', 'NIGHT', 'WITH', 'A', 'HUNDRED', 'MASSED', 'COWARDS', 'AT', 'HIS', 'BACK', 'AND', 'LYNCHES', 'THE', 'RASCAL'] +3005-163389-0007-1115: ref=['YOU', "DIDN'T", 'WANT', 'TO', 'COME'] +3005-163389-0007-1115: hyp=['YOU', "DIDN'T", 'WANT', 'TO', 'COME'] +3005-163389-0008-1116: ref=['BUT', 'A', 'MOB', 'WITHOUT', 'ANY', 'MAN', 'AT', 'THE', 'HEAD', 'OF', 'IT', 'IS', 'BENEATH', 'PITIFULNESS'] +3005-163389-0008-1116: hyp=['BUT', 'A', 'MOB', 'WITHOUT', 'ANY', 'MAN', 'AT', 'THE', 'HEAD', 'OF', 'IT', 'IS', 'BENEATH', 'PITIFULNESS'] +3005-163389-0009-1117: ref=['NOW', 'LEAVE', 'AND', 'TAKE', 'YOUR', 'HALF', 'A', 'MAN', 'WITH', 'YOU', 'TOSSING', 'HIS', 'GUN', 'UP', 'ACROSS', 'HIS', 'LEFT', 'ARM', 'AND', 'COCKING', 'IT', 'WHEN', 'HE', 'SAYS', 'THIS'] +3005-163389-0009-1117: hyp=['NOW', 'LE', 'AND', 'TAKE', 'YOUR', 'HALF', 'A', 'MAN', 'WITH', 'YOU', 'TOSSING', 'HIS', 'GUN', 'UP', 'ACROSS', 'HIS', 'LEFT', 'ARM', 'AND', 'COCKING', 'IT', 'WHEN', 'HE', 'SAYS', 'THIS'] +3005-163389-0010-1118: ref=['THE', 'CROWD', 'WASHED', 'BACK', 'SUDDEN', 'AND', 'THEN', 'BROKE', 'ALL', 'APART', 'AND', 'WENT', 'TEARING', 'OFF', 'EVERY', 'WHICH', 'WAY', 'AND', 'BUCK', 'HARKNESS', 'HE', 'HEELED', 'IT', 'AFTER', 'THEM', 'LOOKING', 'TOLERABLE', 'CHEAP'] +3005-163389-0010-1118: hyp=['THE', 'CROWD', 'WASHED', 'BACK', 'SUDDEN', 'AND', 'THEN', 'BROKE', 'ALL', 'APART', 'AND', 'WENT', 'TEARING', 'OFF', 'EVERY', 'WHICH', 'WAY', 'AND', 'BUCK', 'HARKINS', 'HE', 'HEALED', 'IT', 'AFTER', 'THEM', 'LOOKING', 'TOLERABLE', 'CHEAP'] +3005-163389-0011-1119: ref=['YOU', "CAN'T", 'BE', 'TOO', 'CAREFUL'] +3005-163389-0011-1119: hyp=['HE', "CAN'T", 'BE', 'TOO', 'CAREFUL'] +3005-163389-0012-1120: ref=['THEY', 'ARGUED', 'AND', 'TRIED', 'TO', 'KEEP', 'HIM', 'OUT', 'BUT', 'HE', "WOULDN'T", 'LISTEN', 'AND', 'THE', 'WHOLE', 'SHOW', 'COME', 'TO', 'A', 'STANDSTILL'] +3005-163389-0012-1120: hyp=['THEY', 'ARGUED', 'AND', 'TRIED', 'TO', 'KEEP', 'HIM', 'OUT', 'BUT', 'HE', "WOULDN'T", 'LISTEN', 'AND', 'A', 'WHOLE', 'SHOW', 'COME', 'TO', 'A', 'FAN', 'STILL'] +3005-163389-0013-1121: ref=['AND', 'ONE', 'OR', 'TWO', 'WOMEN', 'BEGUN', 'TO', 'SCREAM'] +3005-163389-0013-1121: hyp=['AND', 'ONE', 'OR', 'TWO', 'WOMEN', 'BEGAN', 'TO', 'SCREAM'] +3005-163389-0014-1122: ref=['SO', 'THEN', 'THE', 'RINGMASTER', 'HE', 'MADE', 'A', 'LITTLE', 'SPEECH', 'AND', 'SAID', 'HE', 'HOPED', 'THERE', "WOULDN'T", 'BE', 'NO', 'DISTURBANCE', 'AND', 'IF', 'THE', 'MAN', 'WOULD', 'PROMISE', 'HE', "WOULDN'T", 'MAKE', 'NO', 'MORE', 'TROUBLE', 'HE', 'WOULD', 'LET', 'HIM', 'RIDE', 'IF', 'HE', 'THOUGHT', 'HE', 'COULD', 'STAY', 'ON', 'THE', 'HORSE'] +3005-163389-0014-1122: hyp=['SO', 'THEN', 'A', 'RING', 'MASTER', 'HE', 'MADE', 'A', 'LITTLE', 'SPEECH', 'AND', 'SAID', 'HE', 'HOPED', 'THERE', "WOULDN'T", 'BE', 'NO', 'DISTURBANCE', 'AND', 'IF', 'THE', 'MAN', 'WOULD', 'PROMISE', 'HE', "WOULDN'T", 'MAKE', 'NO', 'MORE', 'TROUBLE', 'HE', 'WOULD', 'LET', 'HIM', 'RIDE', 'IF', 'HE', 'THOUGHT', 'HE', 'COULD', 'STAY', 'ON', 'THE', 'HORSE'] +3005-163389-0015-1123: ref=['IT', "WARN'T", 'FUNNY', 'TO', 'ME', 'THOUGH', 'I', 'WAS', 'ALL', 'OF', 'A', 'TREMBLE', 'TO', 'SEE', 'HIS', 'DANGER'] +3005-163389-0015-1123: hyp=['IT', "WARN'T", 'FUNNY', 'TO', 'ME', 'THOUGH', 'I', 'WAS', 'ALL', 'OF', 'A', 'TREMBLE', 'TO', 'SEE', 'HIS', 'DANGER'] +3005-163389-0016-1124: ref=['AND', 'THE', 'HORSE', 'A', 'GOING', 'LIKE', 'A', 'HOUSE', 'AFIRE', 'TOO'] +3005-163389-0016-1124: hyp=['AND', 'A', 'HORSE', 'A', 'GOING', 'LIKE', 'A', 'HOUSE', 'AFIRE', 'TOO'] +3005-163389-0017-1125: ref=['HE', 'SHED', 'THEM', 'SO', 'THICK', 'THEY', 'KIND', 'OF', 'CLOGGED', 'UP', 'THE', 'AIR', 'AND', 'ALTOGETHER', 'HE', 'SHED', 'SEVENTEEN', 'SUITS'] +3005-163389-0017-1125: hyp=['HE', 'SHARED', 'THEM', 'SO', 'THICK', 'THEY', 'KIND', 'OF', 'CLOGGED', 'UP', 'THE', 'AIR', 'AND', 'ALTOGETHER', 'HE', 'SHED', 'SEVENTEEN', 'SUITS'] +3005-163389-0018-1126: ref=['WHY', 'IT', 'WAS', 'ONE', 'OF', 'HIS', 'OWN', 'MEN'] +3005-163389-0018-1126: hyp=['WHY', 'IT', 'WAS', 'ONE', 'OF', 'HIS', 'OWN', 'MEN'] +3005-163390-0000-1185: ref=['ANDBUT', 'NEVER', 'MIND', 'THE', 'REST', 'OF', 'HIS', 'OUTFIT', 'IT', 'WAS', 'JUST', 'WILD', 'BUT', 'IT', 'WAS', 'AWFUL', 'FUNNY'] +3005-163390-0000-1185: hyp=['AND', 'BUT', 'NEVER', 'MIND', 'THE', 'REST', 'OF', 'HIS', 'OUTFIT', 'IT', 'WAS', 'JUST', 'WILD', 'BUT', 'IT', 'WAS', 'AWFUL', 'FUNNY'] +3005-163390-0001-1186: ref=['THE', 'PEOPLE', 'MOST', 'KILLED', 'THEMSELVES', 'LAUGHING', 'AND', 'WHEN', 'THE', 'KING', 'GOT', 'DONE', 'CAPERING', 'AND', 'CAPERED', 'OFF', 'BEHIND', 'THE', 'SCENES', 'THEY', 'ROARED', 'AND', 'CLAPPED', 'AND', 'STORMED', 'AND', 'HAW', 'HAWED', 'TILL', 'HE', 'COME', 'BACK', 'AND', 'DONE', 'IT', 'OVER', 'AGAIN', 'AND', 'AFTER', 'THAT', 'THEY', 'MADE', 'HIM', 'DO', 'IT', 'ANOTHER', 'TIME'] +3005-163390-0001-1186: hyp=['THE', 'PEOPLE', 'MOST', 'KILLED', 'THEMSELVES', 'LAUGHING', 'AND', 'WHEN', 'THE', 'KING', 'GOT', 'DONE', 'CAPERING', 'AND', 'CAPERED', 'OFF', 'BEHIND', 'THE', 'SCENES', 'THEY', 'ROARED', 'AND', 'CLAPPED', 'AND', 'STORMED', 'AND', 'HAWHAT', 'TILL', 'HE', 'COME', 'BACK', 'AND', 'DONE', 'IT', 'OVER', 'AGAIN', 'AND', 'AFTER', 'THAT', 'THEY', 'MADE', 'HIM', 'DO', 'IT', 'ANOTHER', 'TIME'] +3005-163390-0002-1187: ref=['TWENTY', 'PEOPLE', 'SINGS', 'OUT'] +3005-163390-0002-1187: hyp=['TWENTY', 'PEOPLE', 'SANGS', 'OUT'] +3005-163390-0003-1188: ref=['THE', 'DUKE', 'SAYS', 'YES'] +3005-163390-0003-1188: hyp=['THE', 'DUKE', 'SAYS', 'YES'] +3005-163390-0004-1189: ref=['EVERYBODY', 'SINGS', 'OUT', 'SOLD'] +3005-163390-0004-1189: hyp=['EVERYBODY', 'SINGS', 'OUT', 'SOLD'] +3005-163390-0005-1190: ref=['BUT', 'A', 'BIG', 'FINE', 'LOOKING', 'MAN', 'JUMPS', 'UP', 'ON', 'A', 'BENCH', 'AND', 'SHOUTS', 'HOLD', 'ON'] +3005-163390-0005-1190: hyp=['BUT', 'A', 'BIG', 'FINE', 'LOOKING', 'MAN', 'JUMPS', 'UP', 'ON', 'A', 'BENCH', 'AND', 'SHOUTS', 'HOLD', 'ON'] +3005-163390-0006-1191: ref=['JUST', 'A', 'WORD', 'GENTLEMEN', 'THEY', 'STOPPED', 'TO', 'LISTEN'] +3005-163390-0006-1191: hyp=['JUST', 'A', 'WORD', 'GENTLEMEN', 'THEY', 'STOPPED', 'TO', 'LISTEN'] +3005-163390-0007-1192: ref=['WHAT', 'WE', 'WANT', 'IS', 'TO', 'GO', 'OUT', 'OF', 'HERE', 'QUIET', 'AND', 'TALK', 'THIS', 'SHOW', 'UP', 'AND', 'SELL', 'THE', 'REST', 'OF', 'THE', 'TOWN'] +3005-163390-0007-1192: hyp=['WHAT', 'WE', 'WANT', 'IS', 'TO', 'GO', 'OUT', 'OF', 'HERE', 'QUIET', 'AND', 'TALK', 'THIS', 'SHOW', 'UP', 'AND', 'SELL', 'THE', 'REST', 'O', 'THE', 'TOWN'] +3005-163390-0008-1193: ref=['YOU', 'BET', 'IT', 'IS', 'THE', 'JEDGE', 'IS', 'RIGHT', 'EVERYBODY', 'SINGS', 'OUT'] +3005-163390-0008-1193: hyp=['YE', 'BADE', 'IT', 'IS', 'THE', 'JUDGE', 'IS', 'RIGHT', 'EVERYBODY', 'SINGS', 'OUT'] +3005-163390-0009-1194: ref=['WE', 'STRUCK', 'THE', 'RAFT', 'AT', 'THE', 'SAME', 'TIME', 'AND', 'IN', 'LESS', 'THAN', 'TWO', 'SECONDS', 'WE', 'WAS', 'GLIDING', 'DOWN', 'STREAM', 'ALL', 'DARK', 'AND', 'STILL', 'AND', 'EDGING', 'TOWARDS', 'THE', 'MIDDLE', 'OF', 'THE', 'RIVER', 'NOBODY', 'SAYING', 'A', 'WORD'] +3005-163390-0009-1194: hyp=['WE', 'STRUCK', 'THE', 'RAFT', 'AT', 'THE', 'SAME', 'TIME', 'AND', 'IN', 'LESS', 'THAN', 'TWO', 'SECONDS', 'WE', 'WAS', 'GLIDING', 'DOWN', 'STREAM', 'ALL', 'DARK', 'AND', 'STILL', 'AND', 'EDGING', 'TOWARDS', 'THE', 'MIDDLE', 'OF', 'THE', 'RIVER', 'NOBODY', 'SAYING', 'A', 'WORD'] +3005-163390-0010-1195: ref=['WE', 'NEVER', 'SHOWED', 'A', 'LIGHT', 'TILL', 'WE', 'WAS', 'ABOUT', 'TEN', 'MILE', 'BELOW', 'THE', 'VILLAGE'] +3005-163390-0010-1195: hyp=['WE', 'NEVER', 'SHOWED', 'A', 'LIGHT', 'TILL', 'WE', 'WAS', 'ABOUT', 'TEN', 'MILE', 'BELOW', 'THE', 'VILLAGE'] +3005-163390-0011-1196: ref=['GREENHORNS', 'FLATHEADS'] +3005-163390-0011-1196: hyp=['GREENHORNS', 'FLAT', 'HEADS'] +3005-163390-0012-1197: ref=['NO', 'I', 'SAYS', 'IT', "DON'T"] +3005-163390-0012-1197: hyp=['NO', 'I', 'SAY', 'IS', 'IT', "DON'T"] +3005-163390-0013-1198: ref=['WELL', 'IT', "DON'T", 'BECAUSE', "IT'S", 'IN', 'THE', 'BREED', 'I', 'RECKON', "THEY'RE", 'ALL', 'ALIKE'] +3005-163390-0013-1198: hyp=['WELL', 'IT', "DON'T", 'BECAUSE', "IT'S", 'IN', 'DE', 'BREATHE', 'I', 'RECKON', "THEY'RE", 'ALL', 'ALIKE'] +3005-163390-0014-1199: ref=['WELL', "THAT'S", 'WHAT', "I'M", 'A', 'SAYING', 'ALL', 'KINGS', 'IS', 'MOSTLY', 'RAPSCALLIONS', 'AS', 'FUR', 'AS', 'I', 'CAN', 'MAKE', 'OUT', 'IS', 'DAT', 'SO'] +3005-163390-0014-1199: hyp=['WELL', "THAT'S", 'WHAT', 'I', 'MUST', 'SAYING', 'ALL', 'KINGS', 'IS', 'MOSTLY', 'RATCALIONS', 'AS', 'FUR', 'AS', 'I', 'CAN', 'MAKE', 'OUT', "HERE'S", "DAT'S", 'SO'] +3005-163390-0015-1200: ref=['AND', 'LOOK', 'AT', 'CHARLES', 'SECOND', 'AND', 'LOUIS', 'FOURTEEN', 'AND', 'LOUIS', 'FIFTEEN', 'AND', 'JAMES', 'SECOND', 'AND', 'EDWARD', 'SECOND', 'AND', 'RICHARD', 'THIRD', 'AND', 'FORTY', 'MORE', 'BESIDES', 'ALL', 'THEM', 'SAXON', 'HEPTARCHIES', 'THAT', 'USED', 'TO', 'RIP', 'AROUND', 'SO', 'IN', 'OLD', 'TIMES', 'AND', 'RAISE', 'CAIN'] +3005-163390-0015-1200: hyp=['AND', 'LOOK', 'AT', 'CHARLES', 'SECOND', 'AND', 'LOUIS', 'FOURTEEN', 'AND', 'LOUIS', 'FIFTEEN', 'AND', 'JAMES', 'SECOND', 'AND', 'EDWARD', 'SECOND', 'AND', 'RICHARD', 'THIRD', 'AND', 'FORTY', 'MORE', 'BESIDES', 'ALL', 'THEM', 'SAXON', 'HEPTARK', 'IS', 'THAT', 'USED', 'TO', 'RIP', 'AROUND', 'SO', 'WHEN', 'OLD', 'TIMES', 'AND', 'RAISED', 'GAME'] +3005-163390-0016-1201: ref=['MY', 'YOU', 'OUGHT', 'TO', 'SEEN', 'OLD', 'HENRY', 'THE', 'EIGHT', 'WHEN', 'HE', 'WAS', 'IN', 'BLOOM', 'HE', 'WAS', 'A', 'BLOSSOM'] +3005-163390-0016-1201: hyp=['MY', 'YOU', 'OUGHT', 'TO', 'SEE', 'AN', 'OLD', 'HENRY', 'THE', 'EIGHT', 'WHEN', 'HE', 'WAS', 'IN', 'BLOOM', 'HE', 'WAS', 'A', 'BLOSSOM'] +3005-163390-0017-1202: ref=['RING', 'UP', 'FAIR', 'ROSAMUN'] +3005-163390-0017-1202: hyp=['RING', 'UP', 'FAIR', 'ROSAMOND'] +3005-163390-0018-1203: ref=['WELL', 'HENRY', 'HE', 'TAKES', 'A', 'NOTION', 'HE', 'WANTS', 'TO', 'GET', 'UP', 'SOME', 'TROUBLE', 'WITH', 'THIS', 'COUNTRY'] +3005-163390-0018-1203: hyp=['WELL', 'HENRY', 'HE', 'TAKES', 'A', 'NOTION', 'HE', 'WANTS', 'TO', 'GIT', 'UP', 'SOME', 'TROUBLE', 'WITH', 'THIS', 'COUNTRY'] +3005-163390-0019-1204: ref=["S'POSE", 'HE', 'OPENED', 'HIS', 'MOUTHWHAT', 'THEN'] +3005-163390-0019-1204: hyp=["S'POSE", 'HE', 'OPENED', 'HIS', 'MOUTH', 'WHAT', 'THEN'] +3005-163390-0020-1205: ref=['ALL', 'I', 'SAY', 'IS', 'KINGS', 'IS', 'KINGS', 'AND', 'YOU', 'GOT', 'TO', 'MAKE', 'ALLOWANCES'] +3005-163390-0020-1205: hyp=['ALL', 'I', 'SAY', 'IS', 'KINGS', 'IS', 'KINGS', 'AN', 'YOU', 'GOT', 'TO', 'MAKE', 'ALLOWANCES'] +3005-163390-0021-1206: ref=['TAKE', 'THEM', 'ALL', 'AROUND', "THEY'RE", 'A', 'MIGHTY', 'ORNERY', 'LOT', "IT'S", 'THE', 'WAY', "THEY'RE", 'RAISED'] +3005-163390-0021-1206: hyp=['TAKE', 'THEM', 'ALL', 'AROUND', "THEY'RE", 'A', 'MIGHTY', 'ORNERY', 'LOT', "IT'S", 'THE', 'WAY', "THEY'RE", 'RAISED'] +3005-163390-0022-1207: ref=['WELL', 'THEY', 'ALL', 'DO', 'JIM'] +3005-163390-0022-1207: hyp=['WELL', 'THEY', 'ALL', 'DO', 'JIM'] +3005-163390-0023-1208: ref=['NOW', 'DE', 'DUKE', "HE'S", 'A', 'TOLERBLE', 'LIKELY', 'MAN', 'IN', 'SOME', 'WAYS'] +3005-163390-0023-1208: hyp=['NOW', 'TO', 'DO', "HE'S", 'A', 'TOLERABLE', 'LIKE', 'THE', 'MAN', 'IN', 'SOME', 'WAYS'] +3005-163390-0024-1209: ref=['THIS', "ONE'S", 'A', 'MIDDLING', 'HARD', 'LOT', 'FOR', 'A', 'DUKE'] +3005-163390-0024-1209: hyp=['THIS', "ONE'S", 'A', 'MIDDLIN', 'HARD', 'LOT', 'FOR', 'A', 'DUPE'] +3005-163390-0025-1210: ref=['WHEN', 'I', 'WAKED', 'UP', 'JUST', 'AT', 'DAYBREAK', 'HE', 'WAS', 'SITTING', 'THERE', 'WITH', 'HIS', 'HEAD', 'DOWN', 'BETWIXT', 'HIS', 'KNEES', 'MOANING', 'AND', 'MOURNING', 'TO', 'HIMSELF'] +3005-163390-0025-1210: hyp=['WHEN', 'I', 'WAKED', 'UP', 'JEST', 'AT', 'DAYBREAK', 'HE', 'WAS', 'SITTING', 'THERE', 'WITH', 'HIS', 'HEAD', 'DOWN', 'BETWIXT', 'HIS', 'KNEES', 'MOANING', 'AND', 'MOURNING', 'TO', 'HIMSELF'] +3005-163390-0026-1211: ref=['IT', "DON'T", 'SEEM', 'NATURAL', 'BUT', 'I', 'RECKON', "IT'S", 'SO'] +3005-163390-0026-1211: hyp=['IT', "DON'T", 'SEEM', 'NATURAL', 'BUT', 'I', 'RECKON', "IT'S", 'SO'] +3005-163390-0027-1212: ref=['HE', 'WAS', 'OFTEN', 'MOANING', 'AND', 'MOURNING', 'THAT', 'WAY', 'NIGHTS', 'WHEN', 'HE', 'JUDGED', 'I', 'WAS', 'ASLEEP', 'AND', 'SAYING', 'PO', 'LITTLE', 'LIZABETH'] +3005-163390-0027-1212: hyp=['HE', 'WAS', 'OFTEN', 'MOANING', 'IN', 'MOURNING', 'THAT', 'WAY', 'NIGHTS', 'WHEN', 'HE', 'JUDGED', 'I', 'WAS', 'ASLEEP', 'AND', 'SAYING', 'PO', 'LITTLE', 'ELIZABETH'] +3005-163390-0028-1213: ref=['DOAN', 'YOU', 'HEAR', 'ME', 'SHET', 'DE', 'DO'] +3005-163390-0028-1213: hyp=["DON'T", 'YOU', 'HEAR', 'ME', 'SHUT', 'DE', 'DO'] +3005-163390-0029-1214: ref=['I', 'LAY', 'I', 'MAKE', 'YOU', 'MINE'] +3005-163390-0029-1214: hyp=['I', 'LAY', 'I', 'MAKE', 'YOU', 'MINE'] +3005-163390-0030-1215: ref=['JIS', 'AS', 'LOUD', 'AS', 'I', 'COULD', 'YELL'] +3005-163390-0030-1215: hyp=['GIT', 'AS', 'LOUD', 'AS', 'I', 'COULD', 'YELL'] +3005-163391-0000-1127: ref=['WHICH', 'WAS', 'SOUND', 'ENOUGH', 'JUDGMENT', 'BUT', 'YOU', 'TAKE', 'THE', 'AVERAGE', 'MAN', 'AND', 'HE', "WOULDN'T", 'WAIT', 'FOR', 'HIM', 'TO', 'HOWL'] +3005-163391-0000-1127: hyp=['WHICH', 'WAS', 'SOUND', 'ENOUGH', 'JUDGMENT', 'BUT', 'YOU', 'TAKE', 'THE', 'AVERAGE', 'MAN', 'AND', 'HE', "WOULDN'T", 'WAIT', 'FOR', 'HIM', 'TO', 'HOWL'] +3005-163391-0001-1128: ref=['THE', "KING'S", 'DUDS', 'WAS', 'ALL', 'BLACK', 'AND', 'HE', 'DID', 'LOOK', 'REAL', 'SWELL', 'AND', 'STARCHY'] +3005-163391-0001-1128: hyp=['THE', "KING'S", 'DERDS', 'WAS', 'ALL', 'BLACK', 'AND', 'HE', 'DID', 'LOOK', 'REAL', 'SWELL', 'AN', 'STARCHY'] +3005-163391-0002-1129: ref=['WHY', 'BEFORE', 'HE', 'LOOKED', 'LIKE', 'THE', 'ORNERIEST', 'OLD', 'RIP', 'THAT', 'EVER', 'WAS', 'BUT', 'NOW', 'WHEN', "HE'D", 'TAKE', 'OFF', 'HIS', 'NEW', 'WHITE', 'BEAVER', 'AND', 'MAKE', 'A', 'BOW', 'AND', 'DO', 'A', 'SMILE', 'HE', 'LOOKED', 'THAT', 'GRAND', 'AND', 'GOOD', 'AND', 'PIOUS', 'THAT', "YOU'D", 'SAY', 'HE', 'HAD', 'WALKED', 'RIGHT', 'OUT', 'OF', 'THE', 'ARK', 'AND', 'MAYBE', 'WAS', 'OLD', 'LEVITICUS', 'HIMSELF'] +3005-163391-0002-1129: hyp=['WHY', 'BEFORE', 'HE', 'LOOKED', 'LIKE', 'THE', 'ORNERIEST', 'OLD', 'RIP', 'THAT', 'EVER', 'WAS', 'BUT', 'NOW', 'WHEN', "HE'D", 'TAKE', 'OFF', 'HIS', 'NEW', 'WHITE', 'BEAVER', 'AND', 'MAKE', 'A', 'BOW', 'AND', 'DO', 'A', 'SMILE', 'HE', 'LOOKED', 'THAT', 'GRAND', 'AND', 'GOOD', 'AND', 'PIOUS', 'THAT', "YOU'D", 'SAY', "HE'D", 'WALKED', 'RIGHT', 'OUT', 'OF', 'THE', 'ARK', 'AND', 'MAYBE', 'WAS', 'OLD', 'LUVIDICUS', 'HIMSELF'] +3005-163391-0003-1130: ref=['JIM', 'CLEANED', 'UP', 'THE', 'CANOE', 'AND', 'I', 'GOT', 'MY', 'PADDLE', 'READY'] +3005-163391-0003-1130: hyp=['JIM', 'CLEANED', 'UP', 'THE', 'CANOE', 'AND', 'I', 'GOT', 'MY', 'PADDLE', 'READY'] +3005-163391-0004-1131: ref=['WHER', 'YOU', 'BOUND', 'FOR', 'YOUNG', 'MAN'] +3005-163391-0004-1131: hyp=['WERE', 'YOU', 'BOUND', 'FOR', 'YOUNG', 'MAN'] +3005-163391-0005-1132: ref=['GIT', 'ABOARD', 'SAYS', 'THE', 'KING'] +3005-163391-0005-1132: hyp=['GET', 'ABOARD', 'SAYS', 'THE', 'KING'] +3005-163391-0006-1133: ref=['I', 'DONE', 'SO', 'AND', 'THEN', 'WE', 'ALL', 'THREE', 'STARTED', 'ON', 'AGAIN'] +3005-163391-0006-1133: hyp=['I', 'DONE', 'SO', 'AN', 'THEN', 'WE', 'ALL', 'THREE', 'STARTED', 'ON', 'AGAIN'] +3005-163391-0007-1134: ref=['THE', 'YOUNG', 'CHAP', 'WAS', 'MIGHTY', 'THANKFUL', 'SAID', 'IT', 'WAS', 'TOUGH', 'WORK', 'TOTING', 'HIS', 'BAGGAGE', 'SUCH', 'WEATHER'] +3005-163391-0007-1134: hyp=['THE', 'YOUNG', 'CHAP', 'WAS', 'MIGHTY', 'THANKFUL', 'SAID', 'IT', 'WAS', 'TOUGH', 'WORK', 'TOTING', 'HIS', 'BAGGAGE', 'SUCH', 'WEATHER'] +3005-163391-0008-1135: ref=['HE', 'ASKED', 'THE', 'KING', 'WHERE', 'HE', 'WAS', 'GOING', 'AND', 'THE', 'KING', 'TOLD', 'HIM', "HE'D", 'COME', 'DOWN', 'THE', 'RIVER', 'AND', 'LANDED', 'AT', 'THE', 'OTHER', 'VILLAGE', 'THIS', 'MORNING', 'AND', 'NOW', 'HE', 'WAS', 'GOING', 'UP', 'A', 'FEW', 'MILE', 'TO', 'SEE', 'AN', 'OLD', 'FRIEND', 'ON', 'A', 'FARM', 'UP', 'THERE', 'THE', 'YOUNG', 'FELLOW', 'SAYS'] +3005-163391-0008-1135: hyp=['THE', 'AIR', 'THE', 'KING', 'WHERE', 'HE', 'WAS', 'GOING', 'AND', 'THE', 'KING', 'TOLD', 'HIM', "HE'D", 'COME', 'DOWN', 'A', 'RIVER', 'AND', 'LANDED', 'AT', 'THE', 'OTHER', 'VILLAGE', 'THIS', 'MORNING', 'AND', 'NOW', 'HE', 'WAS', 'GOING', 'UP', 'A', 'FEW', 'MILES', 'TO', 'SEE', 'AN', 'OLD', 'FRIEND', 'ON', 'A', 'FARM', 'UP', 'THERE', 'THE', 'YOUNG', 'FELLOW', 'SAYS'] +3005-163391-0009-1136: ref=['BUT', 'THEN', 'I', 'SAYS', 'AGAIN', 'NO', 'I', 'RECKON', 'IT', "AIN'T", 'HIM', 'OR', 'ELSE', 'HE', "WOULDN'T", 'BE', 'PADDLING', 'UP', 'THE', 'RIVER', 'YOU', "AIN'T", 'HIM', 'ARE', 'YOU'] +3005-163391-0009-1136: hyp=['BUT', 'THEN', 'I', 'SAYS', 'AGAIN', 'NO', 'I', 'RECKON', 'IT', "AIN'T", 'HIM', 'OR', 'ELSE', 'HE', "WOULDN'T", 'BE', 'PADDLIN', 'UP', 'THE', 'RIVER', 'YOU', "AIN'T", 'HIM', 'ARE', 'YOU'] +3005-163391-0010-1137: ref=['NO', 'MY', "NAME'S", 'BLODGETT', 'ELEXANDER', 'BLODGETT', 'REVEREND', 'ELEXANDER', 'BLODGETT', 'I', "S'POSE", 'I', 'MUST', 'SAY', 'AS', "I'M", 'ONE', 'O', 'THE', "LORD'S", 'POOR', 'SERVANTS'] +3005-163391-0010-1137: hyp=['NO', 'MY', "NAME'S", 'OBLIGE', 'IT', 'ALEXANDER', 'BLODGET', 'REVEREND', 'ALEXANDER', 'BLODGET', 'I', "S'POSE", 'I', 'MUST', 'SAY', 'AS', "I'M", 'ONE', 'OF', 'THE', 'LORDS', 'POOR', 'SERVANTS'] +3005-163391-0011-1138: ref=['YOU', 'SEE', 'HE', 'WAS', 'PRETTY', 'OLD', 'AND', "GEORGE'S", "G'YIRLS", 'WAS', 'TOO', 'YOUNG', 'TO', 'BE', 'MUCH', 'COMPANY', 'FOR', 'HIM', 'EXCEPT', 'MARY', 'JANE', 'THE', 'RED', 'HEADED', 'ONE', 'AND', 'SO', 'HE', 'WAS', 'KINDER', 'LONESOME', 'AFTER', 'GEORGE', 'AND', 'HIS', 'WIFE', 'DIED', 'AND', "DIDN'T", 'SEEM', 'TO', 'CARE', 'MUCH', 'TO', 'LIVE'] +3005-163391-0011-1138: hyp=['YOU', 'SEE', 'HE', 'WAS', 'PRETTY', 'OLD', 'AND', 'GEORGE', 'IS', 'GUY', 'EARLS', 'WAS', 'TOO', 'YOUNG', 'TO', 'BE', 'MUCH', 'COMPANY', 'FOR', 'HIM', 'EXCEPT', 'MARY', 'JANE', 'THE', 'RED', 'HEADED', 'ONE', 'AND', 'SO', 'HE', 'WAS', 'KINDER', 'LONESOME', 'AFTER', 'GEORGE', 'AND', 'HIS', 'WIFE', 'DIED', 'AND', "DIDN'T", 'SEEM', 'TO', 'CARE', 'MUCH', 'TO', 'LIVE'] +3005-163391-0012-1139: ref=['TOO', 'BAD', 'TOO', 'BAD', 'HE', "COULDN'T", 'A', 'LIVED', 'TO', 'SEE', 'HIS', 'BROTHERS', 'POOR', 'SOUL'] +3005-163391-0012-1139: hyp=['TOO', 'BAD', 'TOO', 'BAD', 'HE', "COULDN'T", 'HAVE', 'LIVED', 'TO', 'SEE', 'HIS', "BROTHER'S", 'POOR', 'SOUL'] +3005-163391-0013-1140: ref=["I'M", 'GOING', 'IN', 'A', 'SHIP', 'NEXT', 'WEDNESDAY', 'FOR', 'RYO', 'JANEERO', 'WHERE', 'MY', 'UNCLE', 'LIVES'] +3005-163391-0013-1140: hyp=["I'M", 'GOIN', 'IN', 'A', 'SHIP', 'NEXT', 'WEDNESDAY', 'FOR', 'RIO', 'GENERO', 'WHERE', 'MY', 'UNCLE', 'IS'] +3005-163391-0014-1141: ref=['BUT', "IT'LL", 'BE', 'LOVELY', 'WISHT', 'I', 'WAS', 'A', 'GOING'] +3005-163391-0014-1141: hyp=['BUT', "IT'LL", 'BE', 'LOVELY', 'WISHED', 'I', 'WAS', 'A', 'GOIN'] +3005-163391-0015-1142: ref=['MARY', "JANE'S", 'NINETEEN', "SUSAN'S", 'FIFTEEN', 'AND', "JOANNA'S", 'ABOUT', "FOURTEENTHAT'S", 'THE', 'ONE', 'THAT', 'GIVES', 'HERSELF', 'TO', 'GOOD', 'WORKS', 'AND', 'HAS', 'A', 'HARE', 'LIP', 'POOR', 'THINGS'] +3005-163391-0015-1142: hyp=['MARY', "JANE'S", 'NINETEEN', "SUSAN'S", 'FIFTEEN', 'AND', "JOANNA'S", 'ABOUT', 'FOURTEEN', "THAT'S", 'THE', 'ONE', 'THAT', 'GIVES', 'HERSELF', 'TO', 'GOOD', 'WORKS', 'AND', 'HAS', 'A', 'HAIR', 'LIP', 'POOR', 'THINGS'] +3005-163391-0016-1143: ref=['WELL', 'THEY', 'COULD', 'BE', 'WORSE', 'OFF'] +3005-163391-0016-1143: hyp=['WELL', 'THEY', 'COULD', 'BE', 'WORSE', 'OFF'] +3005-163391-0017-1144: ref=['OLD', 'PETER', 'HAD', 'FRIENDS', 'AND', 'THEY', "AIN'T", 'GOING', 'TO', 'LET', 'THEM', 'COME', 'TO', 'NO', 'HARM'] +3005-163391-0017-1144: hyp=['O', 'PETER', 'HAD', 'FRIENDS', 'AND', 'THEY', "AIN'T", 'GOING', 'TO', 'LET', 'THEM', 'COME', 'TO', 'NO', 'HARM'] +3005-163391-0018-1145: ref=['BLAMED', 'IF', 'HE', "DIDN'T", 'INQUIRE', 'ABOUT', 'EVERYBODY', 'AND', 'EVERYTHING', 'IN', 'THAT', 'BLESSED', 'TOWN', 'AND', 'ALL', 'ABOUT', 'THE', 'WILKSES', 'AND', 'ABOUT', "PETER'S", 'BUSINESSWHICH', 'WAS', 'A', 'TANNER', 'AND', 'ABOUT', "GEORGE'SWHICH", 'WAS', 'A', 'CARPENTER', 'AND', 'ABOUT', "HARVEY'SWHICH", 'WAS', 'A', 'DISSENTERING', 'MINISTER', 'AND', 'SO', 'ON', 'AND', 'SO', 'ON', 'THEN', 'HE', 'SAYS'] +3005-163391-0018-1145: hyp=['BLAMED', 'IF', 'HE', "DIDN'T", 'ACQUIRE', 'ABOUT', 'EVERYBODY', 'AND', 'EVERYTHING', 'AND', 'THAT', 'BLESSED', 'TOWN', 'AND', 'ALL', 'ABOUT', 'THE', 'WILKES', 'AND', 'ABOUT', "PETER'S", 'BUSINESS', 'WHICH', 'WAS', 'A', 'TANNER', 'AND', 'ABOUT', "GEORGE'S", 'WHICH', 'WAS', 'A', 'CARPENTER', 'AND', 'ABOUT', 'HARVEST', 'WHICH', 'WAS', 'A', 'DISSENTERING', 'MINISTER', 'AND', 'SO', 'ON', 'AND', 'SO', 'ON', 'THEN', 'HE', 'SAYS'] +3005-163391-0019-1146: ref=['WHEN', "THEY'RE", 'DEEP', 'THEY', "WON'T", 'STOP', 'FOR', 'A', 'HAIL'] +3005-163391-0019-1146: hyp=['WHEN', 'HER', 'DEEP', 'THEY', "WON'T", 'STOP', 'FOR', 'A', 'HAIL'] +3005-163391-0020-1147: ref=['WAS', 'PETER', 'WILKS', 'WELL', 'OFF'] +3005-163391-0020-1147: hyp=['WAS', 'PETER', 'WILKES', 'WELL', 'OFF'] +3005-163391-0021-1148: ref=['WHEN', 'WE', 'STRUCK', 'THE', 'BOAT', 'SHE', 'WAS', 'ABOUT', 'DONE', 'LOADING', 'AND', 'PRETTY', 'SOON', 'SHE', 'GOT', 'OFF'] +3005-163391-0021-1148: hyp=['WHEN', 'WASTED', 'UP', 'THE', 'BOAT', 'SHE', 'WAS', 'ABOUT', 'DONE', 'LOADING', 'AND', 'PRETTY', 'SOON', 'SHE', 'GOT', 'OFF'] +3005-163391-0022-1149: ref=['NOW', 'HUSTLE', 'BACK', 'RIGHT', 'OFF', 'AND', 'FETCH', 'THE', 'DUKE', 'UP', 'HERE', 'AND', 'THE', 'NEW', 'CARPET', 'BAGS'] +3005-163391-0022-1149: hyp=['NOW', 'HUSTLE', 'BACK', 'RIGHT', 'OFF', 'AND', 'FETCH', 'THE', 'DUKE', 'UP', 'HERE', 'AND', 'THE', 'NEW', 'CARPET', 'BAGS'] +3005-163391-0023-1150: ref=['SO', 'THEN', 'THEY', 'WAITED', 'FOR', 'A', 'STEAMBOAT'] +3005-163391-0023-1150: hyp=['SO', 'THEN', 'THEY', 'WAITED', 'FOR', 'A', 'STEAMBOAT'] +3005-163391-0024-1151: ref=['BUT', 'THE', 'KING', 'WAS', "CA'M", 'HE', 'SAYS'] +3005-163391-0024-1151: hyp=['THAT', 'THE', 'KING', 'WAS', 'CALM', 'HE', 'SAYS'] +3005-163391-0025-1152: ref=['THEY', 'GIVE', 'A', 'GLANCE', 'AT', 'ONE', 'ANOTHER', 'AND', 'NODDED', 'THEIR', 'HEADS', 'AS', 'MUCH', 'AS', 'TO', 'SAY', 'WHAT', 'D', 'I', 'TELL', 'YOU'] +3005-163391-0025-1152: hyp=['THEY', 'GAVE', 'A', 'GLANCE', 'AT', 'ONE', 'ANOTHER', 'AND', 'NODDED', 'THEIR', 'HEADS', 'AS', 'MUCH', 'AS', 'TO', 'SAY', 'WOULD', 'THEY', 'TELL', 'YOU'] +3005-163391-0026-1153: ref=['THEN', 'ONE', 'OF', 'THEM', 'SAYS', 'KIND', 'OF', 'SOFT', 'AND', 'GENTLE'] +3005-163391-0026-1153: hyp=['THEN', 'ONE', 'OF', 'THEM', 'SAYS', 'KIND', 'OF', 'SOFT', 'AND', 'GENTLE'] +3005-163399-0000-1154: ref=['PHELPS', 'WAS', 'ONE', 'OF', 'THESE', 'LITTLE', 'ONE', 'HORSE', 'COTTON', 'PLANTATIONS', 'AND', 'THEY', 'ALL', 'LOOK', 'ALIKE'] +3005-163399-0000-1154: hyp=['PHELPS', 'IS', 'ONE', 'OF', 'THESE', 'LITTLE', 'ONE', 'HORSE', 'COTTON', 'PLANTATIONS', 'AND', 'THEY', 'ALL', 'LOOK', 'ALIKE'] +3005-163399-0001-1155: ref=['I', 'WENT', 'AROUND', 'AND', 'CLUMB', 'OVER', 'THE', 'BACK', 'STILE', 'BY', 'THE', 'ASH', 'HOPPER', 'AND', 'STARTED', 'FOR', 'THE', 'KITCHEN'] +3005-163399-0001-1155: hyp=['I', 'WENT', 'AROUND', 'AND', 'CLIMB', 'OVER', 'THE', 'BACK', 'STILE', 'BY', 'THE', 'ASH', 'HOPPER', 'AND', 'STARTED', 'FOR', 'THE', 'KITCHEN'] +3005-163399-0002-1156: ref=['I', 'OUT', 'WITH', 'A', "YES'M", 'BEFORE', 'I', 'THOUGHT'] +3005-163399-0002-1156: hyp=['AH', 'OUT', 'WITH', 'A', 'YES', 'AND', 'FORE', 'I', 'THOUGHT'] +3005-163399-0003-1157: ref=['SO', 'THEN', 'SHE', 'STARTED', 'FOR', 'THE', 'HOUSE', 'LEADING', 'ME', 'BY', 'THE', 'HAND', 'AND', 'THE', 'CHILDREN', 'TAGGING', 'AFTER'] +3005-163399-0003-1157: hyp=['SO', 'THEN', 'SHE', 'STARTED', 'FOR', 'THE', 'HOUSE', 'LEADING', 'ME', 'BY', 'THE', 'HAND', 'AND', 'THE', 'CHILDREN', 'TAGGING', 'AFTER'] +3005-163399-0004-1158: ref=['WHEN', 'WE', 'GOT', 'THERE', 'SHE', 'SET', 'ME', 'DOWN', 'IN', 'A', 'SPLIT', 'BOTTOMED', 'CHAIR', 'AND', 'SET', 'HERSELF', 'DOWN', 'ON', 'A', 'LITTLE', 'LOW', 'STOOL', 'IN', 'FRONT', 'OF', 'ME', 'HOLDING', 'BOTH', 'OF', 'MY', 'HANDS', 'AND', 'SAYS'] +3005-163399-0004-1158: hyp=['WHEN', 'WE', 'GOT', 'THERE', 'SHE', 'SET', 'ME', 'DOWN', 'IN', 'A', 'SPLIT', 'BOTTOM', 'CHAIR', 'AND', 'SET', 'HERSELF', 'DOWN', 'ON', 'A', 'LITTLE', 'LOW', 'STOOL', 'IN', 'FRONT', 'OF', 'ME', 'HOLDING', 'BOTH', 'OF', 'MY', 'HANDS', 'AND', 'SAYS'] +3005-163399-0005-1159: ref=['WELL', "IT'S", 'LUCKY', 'BECAUSE', 'SOMETIMES', 'PEOPLE', 'DO', 'GET', 'HURT'] +3005-163399-0005-1159: hyp=['WELL', "IT'S", 'LUCKY', 'BECAUSE', 'SOMETIMES', 'PEOPLE', 'DO', 'GET', 'HURT'] +3005-163399-0006-1160: ref=['AND', 'I', 'THINK', 'HE', 'DIED', 'AFTERWARDS', 'HE', 'WAS', 'A', 'BAPTIST'] +3005-163399-0006-1160: hyp=['AND', 'I', 'THINK', 'HE', 'DIED', 'AFTERWARDS', 'HE', 'WAS', 'A', 'BAPTIST'] +3005-163399-0007-1161: ref=['YES', 'IT', 'WAS', 'MORTIFICATIONTHAT', 'WAS', 'IT'] +3005-163399-0007-1161: hyp=['YES', 'IT', 'WAS', 'MORTIFICATION', 'THAT', 'WAS', 'IT'] +3005-163399-0008-1162: ref=['YOUR', "UNCLE'S", 'BEEN', 'UP', 'TO', 'THE', 'TOWN', 'EVERY', 'DAY', 'TO', 'FETCH', 'YOU'] +3005-163399-0008-1162: hyp=['YOUR', "UNCLE'S", 'BEEN', 'UP', 'TO', 'THE', 'TOWN', 'EVERY', 'DAY', 'TO', 'FETCH', 'YOU'] +3005-163399-0009-1163: ref=['YOU', 'MUST', 'A', 'MET', 'HIM', 'ON', 'THE', 'ROAD', "DIDN'T", 'YOU', 'OLDISH', 'MAN', 'WITH', 'A'] +3005-163399-0009-1163: hyp=['YOU', 'MUST', 'AMERD', 'HIM', 'ON', 'THE', 'ROAD', "DIDN'T", 'YOU', 'OLDISH', 'MAN', 'WITH', 'A'] +3005-163399-0010-1164: ref=['WHY', 'CHILD', 'IT', 'LL', 'BE', 'STOLE'] +3005-163399-0010-1164: hyp=['WHY', 'CHILD', "IT'LL", 'BE', 'STOLE'] +3005-163399-0011-1165: ref=['IT', 'WAS', 'KINDER', 'THIN', 'ICE', 'BUT', 'I', 'SAYS'] +3005-163399-0011-1165: hyp=['IT', 'WAS', 'KIND', 'OR', 'THIN', 'EYES', 'BUT', 'I', 'SAYS'] +3005-163399-0012-1166: ref=['I', 'HAD', 'MY', 'MIND', 'ON', 'THE', 'CHILDREN', 'ALL', 'THE', 'TIME', 'I', 'WANTED', 'TO', 'GET', 'THEM', 'OUT', 'TO', 'ONE', 'SIDE', 'AND', 'PUMP', 'THEM', 'A', 'LITTLE', 'AND', 'FIND', 'OUT', 'WHO', 'I', 'WAS'] +3005-163399-0012-1166: hyp=['I', 'HAD', 'MY', 'MIND', 'ON', 'THE', 'CHILDREN', 'ALL', 'THE', 'TIME', 'I', 'WANTED', 'TO', 'GET', 'THEM', 'OUT', 'TO', 'ONE', 'SIDE', 'AND', 'PUMPED', 'THEM', 'A', 'LITTLE', 'AND', 'FIND', 'OUT', 'WHO', 'I', 'WAS'] +3005-163399-0013-1167: ref=['PRETTY', 'SOON', 'SHE', 'MADE', 'THE', 'COLD', 'CHILLS', 'STREAK', 'ALL', 'DOWN', 'MY', 'BACK', 'BECAUSE', 'SHE', 'SAYS'] +3005-163399-0013-1167: hyp=['BERTIE', 'SOON', 'SHE', 'MADE', 'THE', 'COLD', 'CHILL', 'STREAK', 'ALL', 'DOWN', 'MY', 'BACK', 'BECAUSE', 'SHE', 'SAYS'] +3005-163399-0014-1168: ref=['I', 'SEE', 'IT', "WARN'T", 'A', 'BIT', 'OF', 'USE', 'TO', 'TRY', 'TO', 'GO', 'AHEAD', "I'D", 'GOT', 'TO', 'THROW', 'UP', 'MY', 'HAND'] +3005-163399-0014-1168: hyp=['I', 'SEE', 'IT', "WARN'T", 'A', 'BIT', 'OF', 'USE', 'TO', 'TRY', 'TO', 'GO', 'AHEAD', "I'D", 'GOT', 'TO', 'THROW', 'UP', 'MY', 'HAND'] +3005-163399-0015-1169: ref=['SO', 'I', 'SAYS', 'TO', 'MYSELF', "HERE'S", 'ANOTHER', 'PLACE', 'WHERE', 'I', 'GOT', 'TO', 'RESK', 'THE', 'TRUTH'] +3005-163399-0015-1169: hyp=['SO', 'I', 'SAYS', 'TO', 'MYSELF', 'HERE', 'IS', 'ANOTHER', 'PLACE', 'WHERE', 'I', 'GOT', 'TO', 'REST', 'THE', 'TRUTH'] +3005-163399-0016-1170: ref=['I', 'OPENED', 'MY', 'MOUTH', 'TO', 'BEGIN', 'BUT', 'SHE', 'GRABBED', 'ME', 'AND', 'HUSTLED', 'ME', 'IN', 'BEHIND', 'THE', 'BED', 'AND', 'SAYS', 'HERE', 'HE', 'COMES'] +3005-163399-0016-1170: hyp=['I', 'OPENED', 'MY', 'MOUTH', 'TO', 'BEGIN', 'BUT', 'SHE', 'GRABBED', 'ME', 'AND', 'HUSTLED', 'ME', 'IN', 'BEHIND', 'THE', 'BED', 'AND', 'SAYS', 'HERE', 'HE', 'COMES'] +3005-163399-0017-1171: ref=['CHILDREN', "DON'T", 'YOU', 'SAY', 'A', 'WORD'] +3005-163399-0017-1171: hyp=['CHILDREN', "DON'T", 'YOU', 'SAY', 'A', 'WORD'] +3005-163399-0018-1172: ref=['I', 'SEE', 'I', 'WAS', 'IN', 'A', 'FIX', 'NOW'] +3005-163399-0018-1172: hyp=['I', 'SEE', 'I', 'WAS', 'IN', 'A', 'FIX', 'NOW'] +3005-163399-0019-1173: ref=['MISSUS', 'PHELPS', 'SHE', 'JUMPS', 'FOR', 'HIM', 'AND', 'SAYS'] +3005-163399-0019-1173: hyp=['MISSUS', 'PHELPS', 'SHE', 'JUMPED', 'FOR', 'HIM', 'AND', 'SAYS'] +3005-163399-0020-1174: ref=['HAS', 'HE', 'COME', 'NO', 'SAYS', 'HER', 'HUSBAND'] +3005-163399-0020-1174: hyp=['HAS', 'HE', 'COME', 'NO', 'SAYS', 'HER', 'HUSBAND'] +3005-163399-0021-1175: ref=['I', "CAN'T", 'IMAGINE', 'SAYS', 'THE', 'OLD', 'GENTLEMAN', 'AND', 'I', 'MUST', 'SAY', 'IT', 'MAKES', 'ME', 'DREADFUL', 'UNEASY'] +3005-163399-0021-1175: hyp=['I', "CAN'T", 'IMAGINE', 'SAYS', 'THE', 'OLD', 'GENTLEMAN', 'AND', 'I', 'MUST', 'SAY', 'IT', 'MAKES', 'ME', 'DREADFUL', 'UNEASY'] +3005-163399-0022-1176: ref=['UNEASY', 'SHE', 'SAYS', "I'M", 'READY', 'TO', 'GO', 'DISTRACTED'] +3005-163399-0022-1176: hyp=['UNEASY', 'SHE', 'SAYS', "I'M", 'READY', 'TO', 'GO', 'DISTRACTED'] +3005-163399-0023-1177: ref=['HE', 'MUST', 'A', 'COME', 'AND', "YOU'VE", 'MISSED', 'HIM', 'ALONG', 'THE', 'ROAD'] +3005-163399-0023-1177: hyp=['HE', 'MUST', 'HAVE', 'COME', 'AND', "YOU'VE", 'MISSED', 'HIM', 'ALONG', 'THE', 'ROAD'] +3005-163399-0024-1178: ref=['OH', "DON'T", 'DISTRESS', 'ME', 'ANY', "MORE'N", "I'M", 'ALREADY', 'DISTRESSED'] +3005-163399-0024-1178: hyp=['OH', "DON'T", 'DISTRESS', 'ME', 'ANY', "MORE'N", "I'M", 'ALREADY', 'DISTRESSED'] +3005-163399-0025-1179: ref=['WHY', 'SILAS', 'LOOK', 'YONDER', 'UP', 'THE', 'ROAD', "AIN'T", 'THAT', 'SOMEBODY', 'COMING'] +3005-163399-0025-1179: hyp=['WHY', 'SILAS', 'LOOK', 'YONDER', 'UP', 'THE', 'ROAD', "HAIN'T", 'THAT', 'SOMEBODY', 'COMIN'] +3005-163399-0026-1180: ref=['THE', 'OLD', 'GENTLEMAN', 'STARED', 'AND', 'SAYS'] +3005-163399-0026-1180: hyp=['THE', 'OLD', 'GENTLEMAN', 'STARED', 'AND', 'SAYS'] +3005-163399-0027-1181: ref=['I', "HAIN'T", 'NO', 'IDEA', 'WHO', 'IS', 'IT'] +3005-163399-0027-1181: hyp=['I', "HAIN'T", 'NO', 'IDEA', 'WHO', 'IS', 'IT'] +3005-163399-0028-1182: ref=["IT'S", 'TOM', 'SAWYER'] +3005-163399-0028-1182: hyp=['IS', 'TOM', 'SAWYER'] +3005-163399-0029-1183: ref=['BEING', 'TOM', 'SAWYER', 'WAS', 'EASY', 'AND', 'COMFORTABLE', 'AND', 'IT', 'STAYED', 'EASY', 'AND', 'COMFORTABLE', 'TILL', 'BY', 'AND', 'BY', 'I', 'HEAR', 'A', 'STEAMBOAT', 'COUGHING', 'ALONG', 'DOWN', 'THE', 'RIVER'] +3005-163399-0029-1183: hyp=['BEING', 'TOM', 'SAWYER', 'WAS', 'EASY', 'AND', 'COMFORTABLE', 'AND', 'ITS', 'STATE', 'EASY', 'AND', 'COMFORTABLE', 'TILL', 'BY', 'AND', 'BY', 'I', 'HEAR', 'A', 'STEAMBOAT', 'COUGHING', 'ALONG', 'DOWN', 'THE', 'RIVER'] +3005-163399-0030-1184: ref=['THEN', 'I', 'SAYS', 'TO', 'MYSELF', "S'POSE", 'TOM', 'SAWYER', 'COMES', 'DOWN', 'ON', 'THAT', 'BOAT'] +3005-163399-0030-1184: hyp=['THEN', 'I', 'SAYS', 'TO', 'MYSELF', "S'POSE", 'TOM', 'SAWYER', 'COMES', 'DOWN', 'ON', 'MY', 'BOAT'] +3080-5032-0000-312: ref=['BUT', 'I', 'AM', 'HUGELY', 'PLEASED', 'THAT', 'YOU', 'HAVE', 'SEEN', 'MY', 'LADY'] +3080-5032-0000-312: hyp=['BUT', 'I', 'AM', 'HUGELY', 'PLEASED', 'THAT', 'YOU', 'HAVE', 'SEEN', 'MY', 'LADY'] +3080-5032-0001-313: ref=['I', 'KNEW', 'YOU', 'COULD', 'NOT', 'CHOOSE', 'BUT', 'LIKE', 'HER', 'BUT', 'YET', 'LET', 'ME', 'TELL', 'YOU', 'YOU', 'HAVE', 'SEEN', 'BUT', 'THE', 'WORST', 'OF', 'HER'] +3080-5032-0001-313: hyp=['I', 'KNEW', 'YOU', 'COULD', 'NOT', 'CHOOSE', 'BUT', 'LIKE', 'HER', 'BUT', 'YET', 'LET', 'ME', 'TELL', 'YOU', 'YOU', 'HAVE', 'SEEN', 'BUT', 'THE', 'WORST', 'OF', 'HER'] +3080-5032-0002-314: ref=['HER', 'CONVERSATION', 'HAS', 'MORE', 'CHARMS', 'THAN', 'CAN', 'BE', 'IN', 'MERE', 'BEAUTY', 'AND', 'HER', 'HUMOUR', 'AND', 'DISPOSITION', 'WOULD', 'MAKE', 'A', 'DEFORMED', 'PERSON', 'APPEAR', 'LOVELY'] +3080-5032-0002-314: hyp=['HER', 'CONVERSATION', 'HAS', 'MORE', 'CHARMS', 'THAN', 'CAN', 'BE', 'IN', 'MERE', 'BEAUTY', 'AND', 'A', 'HUMOUR', 'AND', 'DISPOSITION', 'WOULD', 'MAKE', 'A', 'DEFORMED', 'PERSON', 'APPEAR', 'LOVELY'] +3080-5032-0003-315: ref=['WHY', 'DID', 'YOU', 'NOT', 'SEND', 'ME', 'THAT', 'NEWS', 'AND', 'A', 'GARLAND'] +3080-5032-0003-315: hyp=['WHY', 'DID', 'YOU', 'NOT', 'SEND', 'ME', 'THAT', 'NEWS', 'AND', 'A', 'GARLAND'] +3080-5032-0004-316: ref=['WELL', 'THE', 'BEST', "ON'T", 'IS', 'I', 'HAVE', 'A', 'SQUIRE', 'NOW', 'THAT', 'IS', 'AS', 'GOOD', 'AS', 'A', 'KNIGHT'] +3080-5032-0004-316: hyp=['WHY', 'THE', 'BEST', 'ON', 'IT', 'IS', 'THAT', 'I', 'HAVE', 'A', 'SQUIRE', 'NOW', 'THAT', 'IS', 'AS', 'GOOD', 'AS', 'A', 'KNIGHT'] +3080-5032-0005-317: ref=['IN', 'EARNEST', 'WE', 'HAVE', 'HAD', 'SUCH', 'A', 'SKIRMISH', 'AND', 'UPON', 'SO', 'FOOLISH', 'AN', 'OCCASION', 'AS', 'I', 'CANNOT', 'TELL', 'WHICH', 'IS', 'STRANGEST'] +3080-5032-0005-317: hyp=['IN', 'EARNEST', 'WE', 'HAVE', 'HAD', 'SUCH', 'A', 'SKIRMISH', 'IN', 'A', 'POINT', 'SO', 'FOOLISH', 'AN', 'OCCASION', 'AS', 'I', 'CANNOT', 'TELL', 'WHICH', 'IS', 'STRANGERS'] +3080-5032-0006-318: ref=['ALL', 'THE', 'PEOPLE', 'THAT', 'I', 'HAD', 'EVER', 'IN', 'MY', 'LIFE', 'REFUSED', 'WERE', 'BROUGHT', 'AGAIN', 'UPON', 'THE', 'STAGE', 'LIKE', 'RICHARD', 'THE', 'THREE', 'S', 'GHOSTS', 'TO', 'REPROACH', 'ME', 'WITHAL', 'AND', 'ALL', 'THE', 'KINDNESS', 'HIS', 'DISCOVERIES', 'COULD', 'MAKE', 'I', 'HAD', 'FOR', 'YOU', 'WAS', 'LAID', 'TO', 'MY', 'CHARGE'] +3080-5032-0006-318: hyp=['ALL', 'THE', 'PEOPLE', 'THAT', 'I', 'HAD', 'EVER', 'IN', 'MY', 'LIFE', 'REFUSED', 'WERE', 'BROUGHT', 'AGAIN', 'UPON', 'THE', 'STAGE', 'LIKE', 'RICHARD', 'THE', 'THIRD', 'GHOSTS', 'TO', 'REPROACH', 'ME', 'WITHAL', 'IN', 'ALL', 'THE', 'KINDNESS', 'HIS', 'DISCOVERIES', 'COULD', 'MAKE', 'I', 'HAD', 'FOR', 'YOU', 'WAS', 'LATE', 'TO', 'MY', 'CHARGE'] +3080-5032-0007-319: ref=['MY', 'BEST', 'QUALITIES', 'IF', 'I', 'HAVE', 'ANY', 'THAT', 'ARE', 'GOOD', 'SERVED', 'BUT', 'FOR', 'AGGRAVATIONS', 'OF', 'MY', 'FAULT', 'AND', 'I', 'WAS', 'ALLOWED', 'TO', 'HAVE', 'WIT', 'AND', 'UNDERSTANDING', 'AND', 'DISCRETION', 'IN', 'OTHER', 'THINGS', 'THAT', 'IT', 'MIGHT', 'APPEAR', 'I', 'HAD', 'NONE', 'IN', 'THIS'] +3080-5032-0007-319: hyp=['MY', 'BEST', 'QUALITIES', 'IF', 'I', 'HAVE', 'ANY', 'THAT', 'ARE', 'GOOD', 'SERVED', 'BUT', 'FOR', 'AGGRAVATIONS', 'OF', 'MY', 'FAULT', 'AND', 'I', 'WAS', 'ALLOWED', 'TO', 'HAVE', 'WIT', 'AND', 'UNDERSTANDING', 'AND', 'DISCRETION', 'IN', 'OTHER', 'THINGS', 'THAT', 'IT', 'MIGHT', 'APPEAR', 'I', 'HAD', 'NONE', 'IN', 'THIS'] +3080-5032-0008-320: ref=['TIS', 'A', 'STRANGE', 'CHANGE', 'AND', 'I', 'AM', 'VERY', 'SORRY', 'FOR', 'IT', 'BUT', "I'LL", 'SWEAR', 'I', 'KNOW', 'NOT', 'HOW', 'TO', 'HELP', 'IT'] +3080-5032-0008-320: hyp=['TIS', 'A', 'STRANGE', 'CHANGE', 'AND', 'I', 'AM', 'VERY', 'SORRY', 'FOR', 'IT', 'BUT', "I'LL", 'SWEAR', 'I', 'KNOW', 'NOT', 'HOW', 'TO', 'HELP', 'IT'] +3080-5032-0009-321: ref=['MISTER', 'FISH', 'IS', 'THE', 'SQUIRE', 'OF', 'DAMES', 'AND', 'HAS', 'SO', 'MANY', 'MISTRESSES', 'THAT', 'ANYBODY', 'MAY', 'PRETEND', 'A', 'SHARE', 'IN', 'HIM', 'AND', 'BE', 'BELIEVED', 'BUT', 'THOUGH', 'I', 'HAVE', 'THE', 'HONOUR', 'TO', 'BE', 'HIS', 'NEAR', 'NEIGHBOUR', 'TO', 'SPEAK', 'FREELY', 'I', 'CANNOT', 'BRAG', 'MUCH', 'THAT', 'HE', 'MAKES', 'ANY', 'COURT', 'TO', 'ME', 'AND', 'I', 'KNOW', 'NO', 'YOUNG', 'WOMAN', 'IN', 'THE', 'COUNTRY', 'THAT', 'HE', 'DOES', 'NOT', 'VISIT', 'OFTEN'] +3080-5032-0009-321: hyp=['MISTER', 'FISH', 'IS', 'A', 'SQUIRE', 'OF', 'DAMES', 'AND', 'HAS', 'SO', 'MANY', 'MISTRESSES', 'THAN', 'ANYBODY', 'MAY', 'PRETEND', 'TO', 'SHARE', 'IN', 'HIM', 'AND', 'BE', 'BELIEVED', 'BUT', 'THOUGH', 'I', 'HAVE', 'THE', 'HONOR', 'TO', 'BE', 'HIS', 'NEAR', 'NEIGHBOUR', 'TO', 'SPEAK', 'FREELY', 'I', 'CANNOT', 'BRAG', 'MUCH', 'THAT', 'HE', 'MAKES', 'ANY', 'COURT', 'TO', 'ME', 'AND', 'I', 'KNOW', 'NO', 'YOUNG', 'WOMAN', 'IN', 'THE', 'COUNTRY', 'THAT', 'HE', 'DOES', 'NOT', 'VISIT', 'OFTEN'] +3080-5032-0010-322: ref=['I', 'THINK', 'MY', 'YOUNGEST', 'BROTHER', 'COMES', 'DOWN', 'WITH', 'HIM'] +3080-5032-0010-322: hyp=['I', 'THINK', 'MY', 'YOUNGEST', 'BROTHER', 'COMES', 'DOWN', 'WITH', 'HIM'] +3080-5032-0011-323: ref=['I', 'CAN', 'NO', 'SOONER', 'GIVE', 'YOU', 'SOME', 'LITTLE', 'HINTS', 'WHEREABOUTS', 'THEY', 'LIVE', 'BUT', 'YOU', 'KNOW', 'THEM', 'PRESENTLY', 'AND', 'I', 'MEANT', 'YOU', 'SHOULD', 'BE', 'BEHOLDING', 'TO', 'ME', 'FOR', 'YOUR', 'ACQUAINTANCE'] +3080-5032-0011-323: hyp=['I', 'CAN', 'NO', 'SOONER', 'GIVE', 'YOU', 'SOME', 'LITTLE', 'HINTS', 'WHEREABOUT', 'THEY', 'LIVE', 'BUT', 'YOU', 'KNOW', 'THEM', 'PRESENTLY', 'AND', 'I', 'MEANT', 'YOU', 'SHOULD', 'BE', 'BEHOLDING', 'TO', 'ME', 'FOR', 'YOUR', 'ACQUAINTANCE'] +3080-5032-0012-324: ref=['BUT', 'IT', 'SEEMS', 'THIS', 'GENTLEMAN', 'IS', 'NOT', 'SO', 'EASY', 'ACCESS', 'BUT', 'YOU', 'MAY', 'ACKNOWLEDGE', 'SOMETHING', 'DUE', 'TO', 'ME', 'IF', 'I', 'INCLINE', 'HIM', 'TO', 'LOOK', 'GRACIOUSLY', 'UPON', 'YOU', 'AND', 'THEREFORE', 'THERE', 'IS', 'NOT', 'MUCH', 'HARM', 'DONE'] +3080-5032-0012-324: hyp=['BUT', 'IT', 'SEEMS', 'THIS', 'GENTLEMAN', 'IS', 'NOT', 'SO', 'EASY', 'ACCESS', 'BUT', 'YOU', 'MAY', 'ACKNOWLEDGE', 'SOMETHING', 'DUE', 'TO', 'ME', 'IF', 'I', 'INCLINE', 'HIM', 'TO', 'LOOK', 'GRACIOUSLY', 'UPON', 'YOU', 'AND', 'THEREFORE', 'THERE', 'IS', 'NOT', 'MUCH', 'HARM', 'DONE'] +3080-5032-0013-325: ref=['I', 'HAVE', 'MISSED', 'FOUR', 'FITS', 'AND', 'HAD', 'BUT', 'FIVE', 'AND', 'HAVE', 'RECOVERED', 'SO', 'MUCH', 'STRENGTH', 'AS', 'MADE', 'ME', 'VENTURE', 'TO', 'MEET', 'YOUR', 'LETTER', 'ON', 'WEDNESDAY', 'A', 'MILE', 'FROM', 'HOME'] +3080-5032-0013-325: hyp=['I', 'HAVE', 'MISSED', 'FOUR', 'FITS', 'AND', 'HAVE', 'HAD', 'BUT', 'FIVE', 'AND', 'HAVE', 'RECOVERED', 'SO', 'MUCH', 'STRENGTH', 'AS', 'MADE', 'ME', 'VENTURE', 'TO', 'MEET', 'YOUR', 'LETTER', 'ON', 'WEDNESDAY', 'A', 'MILE', 'FROM', 'HOME'] +3080-5032-0014-326: ref=['BUT', 'BESIDES', 'I', 'CAN', 'GIVE', 'YOU', 'OTHERS'] +3080-5032-0014-326: hyp=['BUT', 'BESIDES', 'I', 'CAN', 'GIVE', 'YOU', 'OTHERS'] +3080-5032-0015-327: ref=['I', 'AM', 'HERE', 'MUCH', 'MORE', 'OUT', 'OF', "PEOPLE'S", 'WAY', 'THAN', 'IN', 'TOWN', 'WHERE', 'MY', 'AUNT', 'AND', 'SUCH', 'AS', 'PRETEND', 'AN', 'INTEREST', 'IN', 'ME', 'AND', 'A', 'POWER', 'OVER', 'ME', 'DO', 'SO', 'PERSECUTE', 'ME', 'WITH', 'THEIR', 'GOOD', 'NATURE', 'AND', 'TAKE', 'IT', 'SO', 'ILL', 'THAT', 'THEY', 'ARE', 'NOT', 'ACCEPTED', 'AS', 'I', 'WOULD', 'LIVE', 'IN', 'A', 'HOLLOW', 'TREE', 'TO', 'AVOID', 'THEM'] +3080-5032-0015-327: hyp=['I', 'AM', 'HERE', 'MUCH', 'MORE', 'OUT', 'OF', "PEOPLE'S", 'WAY', 'THAN', 'IN', 'TOWN', 'WHERE', 'MY', 'AUNTS', 'IN', 'SUCH', 'HAS', 'PRETEND', 'AN', 'INTEREST', 'IN', 'ME', 'AND', 'A', 'POWER', 'OVER', 'ME', 'DO', 'SO', 'PERSECUTE', 'ME', 'MY', 'DEAR', 'GOOD', 'NATURE', "YOU'LL", 'TAKE', 'IT', 'SO', 'ILL', 'THAT', 'THEY', 'ARE', 'NOT', 'ACCEPTED', 'AS', 'I', 'WOULD', 'LIVE', 'IN', 'A', 'HOLLOW', 'TREE', 'TO', 'AVOID', 'THEM'] +3080-5032-0016-328: ref=['YOU', 'WILL', 'THINK', 'HIM', 'ALTERED', 'AND', 'IF', 'IT', 'BE', 'POSSIBLE', 'MORE', 'MELANCHOLY', 'THAN', 'HE', 'WAS'] +3080-5032-0016-328: hyp=['YOU', 'WILL', 'THINK', 'HIM', 'ALTERED', 'AND', 'IF', 'IT', 'BE', 'POSSIBLE', 'MORE', 'MELANCHOLY', 'THAN', 'HE', 'WAS'] +3080-5032-0017-329: ref=['IF', 'MARRIAGE', 'AGREES', 'NO', 'BETTER', 'WITH', 'OTHER', 'PEOPLE', 'THAN', 'IT', 'DOES', 'WITH', 'HIM', 'I', 'SHALL', 'PRAY', 'THAT', 'ALL', 'MY', 'FRIENDS', 'MAY', 'SCAPE', 'IT'] +3080-5032-0017-329: hyp=['IF', 'MARRIAGE', 'AGREES', 'NO', 'BETTER', 'WITH', 'OTHER', 'PEOPLE', 'THAN', 'IT', 'DOES', 'WITH', 'HIM', 'I', 'SHALL', 'PRAY', 'THAT', 'ALL', 'MY', 'FRIENDS', 'MAY', 'ESCAPE', 'IT'] +3080-5032-0018-330: ref=['WELL', 'IN', 'EARNEST', 'IF', 'I', 'WERE', 'A', 'PRINCE', 'THAT', 'LADY', 'SHOULD', 'BE', 'MY', 'MISTRESS', 'BUT', 'I', 'CAN', 'GIVE', 'NO', 'RULE', 'TO', 'ANY', 'ONE', 'ELSE', 'AND', 'PERHAPS', 'THOSE', 'THAT', 'ARE', 'IN', 'NO', 'DANGER', 'OF', 'LOSING', 'THEIR', 'HEARTS', 'TO', 'HER', 'MAY', 'BE', 'INFINITELY', 'TAKEN', 'WITH', 'ONE', 'I', 'SHOULD', 'NOT', 'VALUE', 'AT', 'ALL', 'FOR', 'SO', 'SAYS', 'THE', 'JUSTINIAN', 'WISE', 'PROVIDENCE', 'HAS', 'ORDAINED', 'IT', 'THAT', 'BY', 'THEIR', 'DIFFERENT', 'HUMOURS', 'EVERYBODY', 'MIGHT', 'FIND', 'SOMETHING', 'TO', 'PLEASE', 'THEMSELVES', 'WITHAL', 'WITHOUT', 'ENVYING', 'THEIR', 'NEIGHBOURS'] +3080-5032-0018-330: hyp=['WELL', 'IN', 'EARNEST', 'IF', 'I', 'WERE', 'A', 'PRINCE', 'THAT', 'LADY', 'SHOULD', 'BE', 'MY', 'MISTRESS', 'BUT', 'I', 'CAN', 'GIVE', 'NO', 'RULE', 'TO', 'ANY', 'ONE', 'ELSE', 'AND', 'PERHAPS', 'THOSE', 'THAT', 'ARE', 'IN', 'NO', 'DANGER', 'OF', 'LOSING', 'THEIR', 'HEARTS', 'TO', 'HER', 'MAY', 'BE', 'INFINITELY', 'TAKEN', 'WITH', 'ONE', 'I', 'SHOULD', 'NOT', 'VALUE', 'AT', 'ALL', 'FOR', 'SO', 'SAYS', 'THE', 'JUSTINIAN', 'WISE', 'PROVIDENCE', 'HAS', 'ORDAINED', 'IT', 'THAT', 'BY', 'THEIR', 'DIFFERENT', 'HUMANS', 'EVERYBODY', 'MIGHT', 'FIND', 'SOMETHING', 'TO', 'PLEASE', 'THEMSELVES', 'WITHAL', 'WITHOUT', 'ENVYING', 'THEIR', 'NEIGHBOURS'] +3080-5032-0019-331: ref=['THE', 'MATTER', 'IS', 'NOT', 'GREAT', 'FOR', 'I', 'CONFESS', 'I', 'DO', 'NATURALLY', 'HATE', 'THE', 'NOISE', 'AND', 'TALK', 'OF', 'THE', 'WORLD', 'AND', 'SHOULD', 'BE', 'BEST', 'PLEASED', 'NEVER', 'TO', 'BE', 'KNOWN', "IN'T", 'UPON', 'ANY', 'OCCASION', 'WHATSOEVER', 'YET', 'SINCE', 'IT', 'CAN', 'NEVER', 'BE', 'WHOLLY', 'AVOIDED', 'ONE', 'MUST', 'SATISFY', 'ONESELF', 'BY', 'DOING', 'NOTHING', 'THAT', 'ONE', 'NEED', 'CARE', 'WHO', 'KNOWS'] +3080-5032-0019-331: hyp=['THE', 'MATTER', 'IS', 'NOT', 'GREEN', 'FOR', 'I', 'CONFESS', 'I', 'DO', 'NATURALLY', 'HATE', 'THE', 'NOISE', 'AND', 'TALK', 'OF', 'THE', 'WORLD', 'AND', 'SHOULD', 'BE', 'BEST', 'PLEASED', 'NEVER', 'TO', 'BE', 'KNOWN', 'IN', 'UPON', 'ANY', 'OCCASION', 'WHATSOEVER', 'YET', 'SINCE', 'IT', 'CAN', 'NEVER', 'BE', 'WHOLLY', 'AVOIDED', 'ONE', 'MUST', 'SATISFY', 'ONESELF', 'BY', 'DOING', 'NOTHING', 'THAT', 'ONE', 'NEED', 'CARE', 'WHO', 'KNOWS'] +3080-5032-0020-332: ref=['IF', 'I', 'HAD', 'A', 'PICTURE', 'THAT', 'WERE', 'FIT', 'FOR', 'YOU', 'YOU', 'SHOULD', 'HAVE', 'IT'] +3080-5032-0020-332: hyp=['IF', 'I', 'HAD', 'A', 'PICTURE', 'THAT', 'WERE', 'FIT', 'FOR', 'YOU', 'YOU', 'SHOULD', 'HAVE', 'IT'] +3080-5032-0021-333: ref=['HOW', 'CAN', 'YOU', 'TALK', 'OF', 'DEFYING', 'FORTUNE', 'NOBODY', 'LIVES', 'WITHOUT', 'IT', 'AND', 'THEREFORE', 'WHY', 'SHOULD', 'YOU', 'IMAGINE', 'YOU', 'COULD'] +3080-5032-0021-333: hyp=['HOW', 'CAN', 'YOU', 'TALK', 'OF', 'DEFYING', 'FORTUNE', 'NOBODY', 'LIVES', 'WITHOUT', 'IT', 'AND', 'THEREFORE', 'WHY', 'SHOULD', 'YOU', 'IMAGINE', 'YOU', 'COULD'] +3080-5032-0022-334: ref=['I', 'KNOW', 'NOT', 'HOW', 'MY', 'BROTHER', 'COMES', 'TO', 'BE', 'SO', 'WELL', 'INFORMED', 'AS', 'YOU', 'SAY', 'BUT', 'I', 'AM', 'CERTAIN', 'HE', 'KNOWS', 'THE', 'UTMOST', 'OF', 'THE', 'INJURIES', 'YOU', 'HAVE', 'RECEIVED', 'FROM', 'HER'] +3080-5032-0022-334: hyp=['I', 'KNOW', 'NOT', 'HOW', 'MY', 'BROTHER', 'COMES', 'TO', 'BE', 'SO', 'WELL', 'INFORMED', 'AS', 'YOU', 'SAY', 'BUT', 'I', 'AM', 'CERTAIN', 'HE', 'KNOWS', 'THE', 'UTMOST', 'OF', 'THE', 'INJURIES', 'YOU', 'HAVE', 'RECEIVED', 'FROM', 'HER'] +3080-5032-0023-335: ref=['WE', 'HAVE', 'HAD', 'ANOTHER', 'DEBATE', 'BUT', 'MUCH', 'MORE', 'CALMLY'] +3080-5032-0023-335: hyp=['WE', 'HAVE', 'HAD', 'ANOTHER', 'DEBATE', 'BUT', 'MUCH', 'MORE', 'CALMLY'] +3080-5032-0024-336: ref=['AND', 'BESIDES', 'THERE', 'WAS', 'A', 'TIME', 'WHEN', 'WE', 'OURSELVES', 'WERE', 'INDIFFERENT', 'TO', 'ONE', 'ANOTHER', 'DID', 'I', 'DO', 'SO', 'THEN', 'OR', 'HAVE', 'I', 'LEARNED', 'IT', 'SINCE'] +3080-5032-0024-336: hyp=['AND', 'BESIDES', 'THERE', 'WAS', 'A', 'TIME', 'WHEN', 'WE', 'OURSELVES', 'WERE', 'INDIFFERENT', 'TO', 'ONE', 'ANOTHER', 'DID', 'I', 'DO', 'SO', 'THEN', 'OR', 'HAVE', 'I', 'LEARNED', 'IT', 'SINCE'] +3080-5032-0025-337: ref=['I', 'HAVE', 'BEEN', 'STUDYING', 'HOW', 'TOM', 'CHEEKE', 'MIGHT', 'COME', 'BY', 'HIS', 'INTELLIGENCE', 'AND', 'I', 'VERILY', 'BELIEVE', 'HE', 'HAS', 'IT', 'FROM', 'MY', 'COUSIN', 'PETERS'] +3080-5032-0025-337: hyp=['I', 'HAVE', 'BEEN', 'STUDYING', 'HOW', 'TOM', 'CHEEK', 'MIGHT', 'COME', 'BY', 'HIS', 'INTELLIGENCE', 'AND', 'I', 'VERY', 'BELIEVE', 'HE', 'HAS', 'IT', 'FROM', 'MY', 'COUSIN', 'PETERS'] +3080-5032-0026-338: ref=['HOW', 'KINDLY', 'DO', 'I', 'TAKE', 'THESE', 'CIVILITIES', 'OF', 'YOUR', "FATHER'S", 'IN', 'EARNEST', 'YOU', 'CANNOT', 'IMAGINE', 'HOW', 'HIS', 'LETTER', 'PLEASED', 'ME'] +3080-5032-0026-338: hyp=['HOW', 'KINDLY', 'DO', 'I', 'TAKE', 'THE', 'CIVILITIES', 'OF', 'YOUR', 'FATHERS', 'IN', 'EARNEST', 'YOU', 'CANNOT', 'IMAGINE', 'HOW', 'HIS', 'LETTER', 'PLEASED', 'ME'] +3080-5040-0000-278: ref=['WOULD', 'IT', 'WOULD', 'LEAVE', 'ME', 'AND', 'THEN', 'I', 'COULD', 'BELIEVE', 'I', 'SHALL', 'NOT', 'ALWAYS', 'HAVE', 'OCCASION', 'FOR', 'IT'] +3080-5040-0000-278: hyp=['WOULD', 'IT', 'WOULD', 'LEAVE', 'ME', 'AND', 'THEN', 'I', 'COULD', 'BELIEVE', 'I', 'SHALL', 'NOT', 'ALWAYS', 'HAVE', 'OCCASION', 'FOR', 'IT'] +3080-5040-0001-279: ref=['MY', 'POOR', 'LADY', 'VAVASOUR', 'IS', 'CARRIED', 'TO', 'THE', 'TOWER', 'AND', 'HER', 'GREAT', 'BELLY', 'COULD', 'NOT', 'EXCUSE', 'HER', 'BECAUSE', 'SHE', 'WAS', 'ACQUAINTED', 'BY', 'SOMEBODY', 'THAT', 'THERE', 'WAS', 'A', 'PLOT', 'AGAINST', 'THE', 'PROTECTOR', 'AND', 'DID', 'NOT', 'DISCOVER', 'IT'] +3080-5040-0001-279: hyp=['MY', 'POOR', 'LADY', 'VAVASOR', 'IS', 'CHARACTERED', 'A', 'TOWER', 'IN', 'HER', 'GREAT', 'BELLY', 'COULD', 'NOT', 'EXCUSE', 'HER', 'BECAUSE', 'SHE', 'WAS', 'ACQUAINTED', 'BY', 'SOMEBODY', 'THAT', 'THERE', 'WAS', 'A', 'PLOT', 'AGAINST', 'THE', 'PROTECTOR', 'ANNE', 'DID', 'NOT', 'DISCOVER', 'IT'] +3080-5040-0002-280: ref=['SHE', 'HAS', 'TOLD', 'NOW', 'ALL', 'THAT', 'WAS', 'TOLD', 'HER', 'BUT', 'VOWS', 'SHE', 'WILL', 'NEVER', 'SAY', 'FROM', 'WHENCE', 'SHE', 'HAD', 'IT', 'WE', 'SHALL', 'SEE', 'WHETHER', 'HER', 'RESOLUTIONS', 'ARE', 'AS', 'UNALTERABLE', 'AS', 'THOSE', 'OF', 'MY', 'LADY', 'TALMASH'] +3080-5040-0002-280: hyp=['SHE', 'HAS', 'TOLD', 'NOW', 'ALL', 'THAT', 'WAS', 'TOLD', 'HER', 'BUT', 'VOWS', 'SHE', 'WILL', 'NEVER', 'SAY', 'FROM', 'WHENCE', 'SHE', 'HAD', 'IT', 'WE', 'SHALL', 'SEE', 'WHETHER', 'HER', 'RESOLUTIONS', 'ARE', 'AS', 'UNALTERABLE', 'AS', 'THOSE', 'OF', 'MY', 'LADY', 'THOMMISH'] +3080-5040-0003-281: ref=['I', 'WONDER', 'HOW', 'SHE', 'BEHAVED', 'HERSELF', 'WHEN', 'SHE', 'WAS', 'MARRIED'] +3080-5040-0003-281: hyp=['I', 'WONDER', 'HOW', 'SHE', 'BEHAVED', 'HERSELF', 'WHEN', 'SHE', 'WAS', 'MARRIED'] +3080-5040-0004-282: ref=['I', 'NEVER', 'SAW', 'ANY', 'ONE', 'YET', 'THAT', 'DID', 'NOT', 'LOOK', 'SIMPLY', 'AND', 'OUT', 'OF', 'COUNTENANCE', 'NOR', 'EVER', 'KNEW', 'A', 'WEDDING', 'WELL', 'DESIGNED', 'BUT', 'ONE', 'AND', 'THAT', 'WAS', 'OF', 'TWO', 'PERSONS', 'WHO', 'HAD', 'TIME', 'ENOUGH', 'I', 'CONFESS', 'TO', 'CONTRIVE', 'IT', 'AND', 'NOBODY', 'TO', 'PLEASE', "IN'T", 'BUT', 'THEMSELVES'] +3080-5040-0004-282: hyp=['I', 'NEVER', 'SAW', 'ANY', 'ONE', 'YET', 'THAT', 'DID', 'NOT', 'LOOK', 'SIMPLY', 'AND', 'OUT', 'OF', 'COUNTENANCE', 'NOR', 'EVER', 'KNEW', 'A', 'WEDDING', 'WELL', 'DESIGNED', 'BUT', 'ONE', 'AND', 'THAT', 'WAS', 'OF', 'TWO', 'PERSONS', 'WHO', 'AT', 'TIME', 'ENOUGH', 'I', 'CONFESS', 'TO', 'CONTRIVE', 'IT', 'AND', 'NOBODY', 'TO', 'PLEASE', 'IN', 'BUT', 'THEMSELVES'] +3080-5040-0005-283: ref=['THE', 'TRUTH', 'IS', 'I', 'COULD', 'NOT', 'ENDURE', 'TO', 'BE', 'MISSUS', 'BRIDE', 'IN', 'A', 'PUBLIC', 'WEDDING', 'TO', 'BE', 'MADE', 'THE', 'HAPPIEST', 'PERSON', 'ON', 'EARTH'] +3080-5040-0005-283: hyp=['THE', 'TRUTH', 'IS', 'I', 'COULD', 'NOT', 'ENDURE', 'TO', 'BE', 'MISSUS', 'BRIDE', 'IN', 'A', 'PUBLIC', 'WEDDING', 'TO', 'BE', 'MADE', 'THE', 'HAPPIEST', 'PERSON', 'ON', 'EARTH'] +3080-5040-0006-284: ref=['DO', 'NOT', 'TAKE', 'IT', 'ILL', 'FOR', 'I', 'WOULD', 'ENDURE', 'IT', 'IF', 'I', 'COULD', 'RATHER', 'THAN', 'FAIL', 'BUT', 'IN', 'EARNEST', 'I', 'DO', 'NOT', 'THINK', 'IT', 'WERE', 'POSSIBLE', 'FOR', 'ME'] +3080-5040-0006-284: hyp=['DO', 'NOT', 'TAKE', 'IT', 'ILL', 'FOR', 'I', 'WOULD', 'ENDURE', 'IT', 'IF', 'I', 'COULD', 'RATHER', 'THAN', 'FAIL', 'BUT', 'IN', 'EARNEST', 'I', 'DO', 'NOT', 'THINK', 'IT', 'WERE', 'POSSIBLE', 'FOR', 'ME'] +3080-5040-0007-285: ref=['YET', 'IN', 'EARNEST', 'YOUR', 'FATHER', 'WILL', 'NOT', 'FIND', 'MY', 'BROTHER', 'PEYTON', 'WANTING', 'IN', 'CIVILITY', 'THOUGH', 'HE', 'IS', 'NOT', 'A', 'MAN', 'OF', 'MUCH', 'COMPLIMENT', 'UNLESS', 'IT', 'BE', 'IN', 'HIS', 'LETTERS', 'TO', 'ME', 'NOR', 'AN', 'UNREASONABLE', 'PERSON', 'IN', 'ANYTHING', 'SO', 'HE', 'WILL', 'ALLOW', 'HIM', 'OUT', 'OF', 'HIS', 'KINDNESS', 'TO', 'HIS', 'WIFE', 'TO', 'SET', 'A', 'HIGHER', 'VALUE', 'UPON', 'HER', 'SISTER', 'THAN', 'SHE', 'DESERVES'] +3080-5040-0007-285: hyp=['YET', 'IN', 'EARNEST', 'YOUR', 'FATHER', 'WILL', 'NOT', 'FIND', 'MY', 'BROTHER', 'PEYTON', 'WANTING', 'IN', 'CIVILITY', 'THOUGH', 'HE', 'IS', 'NOT', 'A', 'MAN', 'OF', 'MUCH', 'COMPLIMENT', 'UNLESS', 'IT', 'BE', 'IN', 'HIS', 'LETTER', 'TO', 'ME', 'NO', 'AN', 'UNREASONABLE', 'PERSON', 'IN', 'ANYTHING', 'SO', 'HE', 'WILL', 'ALLOW', 'HIM', 'OUT', 'OF', 'HIS', 'KINDNESS', 'TO', 'HIS', 'WIFE', 'TO', 'SET', 'A', 'HIGHER', 'VALUE', 'UPON', 'HER', 'SISTER', 'THAN', 'SHE', 'DESERVES'] +3080-5040-0008-286: ref=['MY', 'AUNT', 'TOLD', 'ME', 'NO', 'LONGER', 'AGONE', 'THAN', 'YESTERDAY', 'THAT', 'I', 'WAS', 'THE', 'MOST', 'WILFUL', 'WOMAN', 'THAT', 'EVER', 'SHE', 'KNEW', 'AND', 'HAD', 'AN', 'OBSTINACY', 'OF', 'SPIRIT', 'NOTHING', 'COULD', 'OVERCOME', 'TAKE', 'HEED'] +3080-5040-0008-286: hyp=['MY', 'AUNT', 'TOLD', 'ME', 'NO', 'LONGER', 'GONE', 'THAN', 'YESTERDAY', 'THAT', 'I', 'WAS', 'THE', 'MOST', 'WILFUL', 'WOMAN', 'THAT', 'EVER', 'SHE', 'KNEW', 'AND', 'HAD', 'AN', 'OBSTINACY', 'OF', 'SPIRIT', 'NOTHING', 'COULD', 'OVERCOME', 'TAKE', 'HEED'] +3080-5040-0009-287: ref=['YOU', 'SEE', 'I', 'GIVE', 'YOU', 'FAIR', 'WARNING'] +3080-5040-0009-287: hyp=['YOU', 'SEE', 'I', 'GIVE', 'YOU', 'FAIR', 'WARNING'] +3080-5040-0010-288: ref=['BY', 'THE', 'NEXT', 'I', 'SHALL', 'BE', 'GONE', 'INTO', 'KENT', 'AND', 'MY', 'OTHER', 'JOURNEY', 'IS', 'LAID', 'ASIDE', 'WHICH', 'I', 'AM', 'NOT', 'DISPLEASED', 'AT', 'BECAUSE', 'IT', 'WOULD', 'HAVE', 'BROKEN', 'OUR', 'INTERCOURSE', 'VERY', 'MUCH'] +3080-5040-0010-288: hyp=['BY', 'THE', 'NEXT', 'I', 'SHALL', 'BE', 'GONE', 'INTO', 'KENT', 'AND', 'MY', 'OTHER', 'JOURNEY', 'IS', 'LAID', 'ASIDE', 'WHICH', 'I', 'AM', 'NOT', 'DISPLEASED', 'AT', 'BECAUSE', 'IT', 'WOULD', 'HAVE', 'BROKEN', 'OUR', 'INTERCOURSE', 'VERY', 'MUCH'] +3080-5040-0011-289: ref=['HERE', 'ARE', 'SOME', 'VERSES', 'OF', "COWLEY'S", 'TELL', 'ME', 'HOW', 'YOU', 'LIKE', 'THEM'] +3080-5040-0011-289: hyp=['HERE', 'ARE', 'SOME', 'VERSES', 'OF', 'CARLIS', 'TELL', 'ME', 'HOW', 'YOU', 'LIKE', 'THEM'] +3080-5040-0012-290: ref=['I', 'TOLD', 'YOU', 'IN', 'MY', 'LAST', 'THAT', 'MY', 'SUFFOLK', 'JOURNEY', 'WAS', 'LAID', 'ASIDE', 'AND', 'THAT', 'INTO', 'KENT', 'HASTENED'] +3080-5040-0012-290: hyp=['I', 'TOLD', 'YOU', 'IN', 'MY', 'LAST', 'THAT', 'MY', 'SUFFOLK', 'JOURNEY', 'WAS', 'LAID', 'ASIDE', 'AND', 'THAT', 'INTO', 'KENT', 'HASTENED'] +3080-5040-0013-291: ref=['IF', 'I', 'DROWN', 'BY', 'THE', 'WAY', 'THIS', 'WILL', 'BE', 'MY', 'LAST', 'LETTER', 'AND', 'LIKE', 'A', 'WILL', 'I', 'BEQUEATH', 'ALL', 'MY', 'KINDNESS', 'TO', 'YOU', 'IN', 'IT', 'WITH', 'A', 'CHARGE', 'NEVER', 'TO', 'BESTOW', 'IT', 'ALL', 'UPON', 'ANOTHER', 'MISTRESS', 'LEST', 'MY', 'GHOST', 'RISE', 'AGAIN', 'AND', 'HAUNT', 'YOU'] +3080-5040-0013-291: hyp=['IF', 'I', 'DROWN', 'BY', 'THE', 'WAY', 'THIS', 'WILL', 'BE', 'MY', 'LAST', 'LETTER', 'AND', 'LIKE', 'A', 'WILL', 'I', 'BEQUEATH', 'ALL', 'MY', 'KINDNESS', 'TO', 'YOU', 'IN', 'IT', 'WITH', 'A', 'CHARGE', 'NEVER', 'TO', 'BESTOW', 'AT', 'ALL', 'UPON', 'ANOTHER', 'MISTRESS', 'LEST', 'MY', 'GHOST', 'RISE', 'AGAIN', 'AND', 'HAUNT', 'YOU'] +3080-5040-0014-292: ref=['INDEED', 'I', 'LIKE', 'HIM', 'EXTREMELY', 'AND', 'HE', 'IS', 'COMMENDED', 'TO', 'ME', 'BY', 'PEOPLE', 'THAT', 'KNOW', 'HIM', 'VERY', 'WELL', 'AND', 'ARE', 'ABLE', 'TO', 'JUDGE', 'FOR', 'A', 'MOST', 'EXCELLENT', 'SERVANT', 'AND', 'FAITHFUL', 'AS', 'POSSIBLE'] +3080-5040-0014-292: hyp=['INDEED', 'I', 'LIKE', 'HIM', 'EXTREMELY', 'AND', 'HE', 'IS', 'COMMENDED', 'TO', 'ME', 'BY', 'PEOPLE', 'THAT', 'KNOW', 'HIM', 'VERY', 'WELL', 'AND', 'ARE', 'ABLE', 'TO', 'JUDGE', 'FOR', 'A', 'MOST', 'EXCELLENT', 'SERVANT', 'AND', 'FAITHFUL', 'AS', 'POSSIBLE'] +3080-5040-0015-293: ref=['BECAUSE', 'YOU', 'FIND', 'FAULT', 'WITH', 'MY', 'OTHER', 'LETTERS', 'THIS', 'IS', 'LIKE', 'TO', 'BE', 'SHORTER', 'THAN', 'THEY', 'I', 'DID', 'NOT', 'INTEND', 'IT', 'SO', 'THOUGH', 'I', 'CAN', 'ASSURE', 'YOU'] +3080-5040-0015-293: hyp=['BECAUSE', 'YOU', 'FIND', 'FAULT', 'WITH', 'MY', 'OTHER', 'LETTERS', 'THIS', 'IS', 'LIKE', 'TO', 'BE', 'SHORTER', 'THAN', 'THEY', 'I', 'DID', 'NOT', 'INTEND', 'IT', 'SO', 'THOUGH', 'I', 'CAN', 'ASSURE', 'YOU'] +3080-5040-0016-294: ref=['I', 'DO', 'NOT', 'FIND', 'IT', 'THOUGH', 'I', 'AM', 'TOLD', 'I', 'WAS', 'SO', 'EXTREMELY', 'WHEN', 'I', 'BELIEVED', 'YOU', 'LOVED', 'ME'] +3080-5040-0016-294: hyp=['I', 'DO', 'NOT', 'FIND', 'IT', 'THOUGH', 'I', 'AM', 'TOLD', 'I', 'WAS', 'SO', 'EXTREMELY', 'WHEN', 'I', 'BELIEVED', 'YOU', 'LOVED', 'ME'] +3080-5040-0017-295: ref=['BUT', 'I', 'AM', 'CALLED', 'UPON'] +3080-5040-0017-295: hyp=['BUT', 'I', 'AM', 'CALLED', 'UPON'] +3080-5040-0018-296: ref=['DIRECTED', 'FOR', 'YOUR', 'MASTER'] +3080-5040-0018-296: hyp=['DIRECTED', 'FOR', 'YOUR', 'MASTER'] +3080-5040-0019-297: ref=['I', 'SEE', 'YOU', 'CAN', 'CHIDE', 'WHEN', 'YOU', 'PLEASE', 'AND', 'WITH', 'AUTHORITY', 'BUT', 'I', 'DESERVE', 'IT', 'I', 'CONFESS', 'AND', 'ALL', 'I', 'CAN', 'SAY', 'FOR', 'MYSELF', 'IS', 'THAT', 'MY', 'FAULT', 'PROCEEDED', 'FROM', 'A', 'VERY', 'GOOD', 'PRINCIPLE', 'IN', 'ME'] +3080-5040-0019-297: hyp=['I', 'SEE', 'YOU', 'CAN', 'CHID', 'WHEN', 'YOU', 'PLEASE', 'AND', 'WITH', 'AUTHORITY', 'BUT', 'I', 'DESERVE', 'IT', 'I', 'CONFESS', 'AND', 'ALL', 'I', 'CAN', 'SAY', 'FOR', 'MYSELF', 'IS', 'THAT', 'MY', 'FAULT', 'PROCEEDED', 'FROM', 'A', 'VERY', 'GOOD', 'PRINCIPLE', 'IN', 'ME'] +3080-5040-0020-298: ref=['WE', 'DARE', 'NOT', 'LET', 'OUR', 'TONGUES', 'LIE', 'MORE', 'ON', 'ONE', 'SIDE', 'OF', 'OUR', 'MOUTHS', 'THAN', "T'OTHER", 'FOR', 'FEAR', 'OF', 'OVERTURNING', 'IT'] +3080-5040-0020-298: hyp=['WE', 'DARE', 'NOT', 'LET', 'OUR', 'TONGUES', 'LIE', 'MORE', 'AND', 'ONE', 'SIDE', 'OF', 'OUR', 'MOTHS', 'THAN', 'THE', 'OTHER', 'FOR', 'FEAR', 'OF', 'OVERTURNING', 'IT'] +3080-5040-0021-299: ref=['YOU', 'ARE', 'SATISFIED', 'I', 'HOPE', 'ERE', 'THIS', 'THAT', 'I', 'SCAPED', 'DROWNING'] +3080-5040-0021-299: hyp=['YOU', 'ARE', 'SATISFIED', 'I', 'HOPE', 'IF', 'THIS', 'THAT', 'I', 'ESCAPED', 'DROWNING'] +3080-5040-0022-300: ref=['BUT', 'I', 'AM', 'TROUBLED', 'MUCH', 'YOU', 'SHOULD', 'MAKE', 'SO', 'ILL', 'A', 'JOURNEY', 'TO', 'SO', 'LITTLE', 'PURPOSE', 'INDEED', 'I', 'WRIT', 'BY', 'THE', 'FIRST', 'POST', 'AFTER', 'MY', 'ARRIVAL', 'HERE', 'AND', 'CANNOT', 'IMAGINE', 'HOW', 'YOU', 'CAME', 'TO', 'MISS', 'OF', 'MY', 'LETTERS'] +3080-5040-0022-300: hyp=['BUT', 'I', 'AM', 'TROUBLED', 'MUCH', 'YOU', 'SHOULD', 'MAKE', 'SO', 'ILL', 'A', 'JOURNEY', 'TO', 'SO', 'LITTLE', 'PURPOSE', 'INDEED', 'I', 'WRITE', 'BY', 'THE', 'FIRST', 'POST', 'AFTER', 'MY', 'ARRIVAL', 'HERE', 'AND', 'CANNOT', 'IMAGINE', 'HOW', 'YOU', 'CAME', 'TO', 'MISS', 'OF', 'MY', 'LETTERS'] +3080-5040-0023-301: ref=['HOW', 'WELCOME', 'YOU', 'WILL', 'BE', 'BUT', 'ALAS'] +3080-5040-0023-301: hyp=['OH', 'WELCOME', 'YOU', 'WILL', 'BE', 'BUT', 'ALAS'] +3080-5040-0024-302: ref=['FOR', 'MY', 'LIFE', 'I', 'CANNOT', 'BEAT', 'INTO', 'THEIR', 'HEADS', 'A', 'PASSION', 'THAT', 'MUST', 'BE', 'SUBJECT', 'TO', 'NO', 'DECAY', 'AN', 'EVEN', 'PERFECT', 'KINDNESS', 'THAT', 'MUST', 'LAST', 'PERPETUALLY', 'WITHOUT', 'THE', 'LEAST', 'INTERMISSION'] +3080-5040-0024-302: hyp=['FOR', 'MY', 'LIFE', 'I', 'CANNOT', 'BEAT', 'INTO', 'THEIR', 'HEADS', 'A', 'PASSION', 'THAT', 'MUST', 'BE', 'SUBJECT', 'TO', 'NO', 'DECAY', 'AND', 'EVEN', 'PERFECT', 'KINDNESS', 'THAT', 'MUST', 'LAST', 'PERPETUALLY', 'WITHOUT', 'THE', 'LEAST', 'INTERMISSION'] +3080-5040-0025-303: ref=['THEY', 'LAUGH', 'TO', 'HEAR', 'ME', 'SAY', 'THAT', 'ONE', 'UNKIND', 'WORD', 'WOULD', 'DESTROY', 'ALL', 'THE', 'SATISFACTION', 'OF', 'MY', 'LIFE', 'AND', 'THAT', 'I', 'SHOULD', 'EXPECT', 'OUR', 'KINDNESS', 'SHOULD', 'INCREASE', 'EVERY', 'DAY', 'IF', 'IT', 'WERE', 'POSSIBLE', 'BUT', 'NEVER', 'LESSEN'] +3080-5040-0025-303: hyp=['THEY', 'LAUGH', 'TO', 'HEAR', 'ME', 'SAY', 'THAT', 'ONE', 'UNKIND', 'WORD', 'WOULD', 'DESTROY', 'ALL', 'THE', 'SATISFACTION', 'OF', 'MY', 'LIFE', 'AND', 'THAT', 'I', 'SHOULD', 'EXPECT', 'OUR', 'KINDNESS', 'SHOULD', 'INCREASE', 'EVERY', 'DAY', 'IF', 'IT', 'WERE', 'POSSIBLE', 'BUT', 'NEVER', 'LESSEN'] +3080-5040-0026-304: ref=['WE', 'GO', 'ABROAD', 'ALL', 'DAY', 'AND', 'PLAY', 'ALL', 'NIGHT', 'AND', 'SAY', 'OUR', 'PRAYERS', 'WHEN', 'WE', 'HAVE', 'TIME'] +3080-5040-0026-304: hyp=['WE', 'GO', 'ABROAD', 'ALL', 'DAY', 'AND', 'PLAY', 'ALL', 'NIGHT', 'AND', 'SAY', "I'LL", 'PRAY', 'AS', 'WHEN', 'WE', 'HAVE', 'TIME'] +3080-5040-0027-305: ref=['WELL', 'IN', 'SOBER', 'EARNEST', 'NOW', 'I', 'WOULD', 'NOT', 'LIVE', 'THUS', 'A', 'TWELVEMONTH', 'TO', 'GAIN', 'ALL', 'THAT', 'THE', 'KING', 'HAS', 'LOST', 'UNLESS', 'IT', 'WERE', 'TO', 'GIVE', 'IT', 'HIM', 'AGAIN'] +3080-5040-0027-305: hyp=['WHILE', 'IN', 'SOBER', 'EARNEST', 'NOW', 'I', 'WOULD', 'NOT', 'LIVE', 'THUS', 'A', 'TWELVE', 'MONTHS', 'TO', 'GAIN', 'ALL', 'THAT', 'KING', 'HAS', 'LOST', 'UNLESS', 'IT', 'WERE', 'TO', 'GIVE', 'IT', 'HIM', 'AGAIN'] +3080-5040-0028-306: ref=['WILL', 'YOU', 'BE', 'SO', 'GOOD', 'NATURED'] +3080-5040-0028-306: hyp=['WILL', 'YOU', 'BE', 'SO', 'GOOD', 'NATURED'] +3080-5040-0029-307: ref=['HE', 'HAS', 'ONE', 'SON', 'AND', 'TIS', 'THE', 'FINEST', 'BOY', 'THAT', "E'ER", 'YOU', 'SAW', 'AND', 'HAS', 'A', 'NOBLE', 'SPIRIT', 'BUT', 'YET', 'STANDS', 'IN', 'THAT', 'AWE', 'OF', 'HIS', 'FATHER', 'THAT', 'ONE', 'WORD', 'FROM', 'HIM', 'IS', 'AS', 'MUCH', 'AS', 'TWENTY', 'WHIPPINGS'] +3080-5040-0029-307: hyp=['HE', 'HAS', 'ONE', 'SON', 'AND', 'TIS', 'THE', 'FINEST', 'BOY', 'THAT', 'ERE', 'YOU', 'SAW', 'AND', 'HAS', 'A', 'NOBLE', 'SPIRIT', 'BUT', 'YET', 'STANDS', 'IN', 'THAT', 'AWE', 'OF', 'HIS', 'FATHER', 'THAT', 'ONE', 'WORD', 'FROM', 'HIM', 'IS', 'AS', 'MUCH', 'AS', 'TWENTY', 'WHIPPINGS'] +3080-5040-0030-308: ref=['YOU', 'MUST', 'GIVE', 'ME', 'LEAVE', 'TO', 'ENTERTAIN', 'YOU', 'THUS', 'WITH', 'DISCOURSES', 'OF', 'THE', 'FAMILY', 'FOR', 'I', 'CAN', 'TELL', 'YOU', 'NOTHING', 'ELSE', 'FROM', 'HENCE'] +3080-5040-0030-308: hyp=['YOU', 'MUST', 'GIVE', 'ME', 'LEAVE', 'TO', 'ENTERTAIN', 'YOURSELVES', 'WITH', 'DISCOURSES', 'OF', 'THE', 'FAMILY', 'FOR', 'I', 'CAN', 'TELL', 'YOU', 'NOTHING', 'ELSE', 'FROM', 'HENCE'] +3080-5040-0031-309: ref=['NOT', 'TO', 'KNOW', 'WHEN', 'YOU', 'WOULD', 'COME', 'HOME', 'I', 'CAN', 'ASSURE', 'YOU', 'NOR', 'FOR', 'ANY', 'OTHER', 'OCCASION', 'OF', 'MY', 'OWN', 'BUT', 'WITH', 'A', 'COUSIN', 'OF', 'MINE', 'THAT', 'HAD', 'LONG', 'DESIGNED', 'TO', 'MAKE', 'HERSELF', 'SPORT', 'WITH', 'HIM', 'AND', 'DID', 'NOT', 'MISS', 'OF', 'HER', 'AIM'] +3080-5040-0031-309: hyp=['NOT', 'TO', 'KNOW', 'WHEN', 'YOU', 'HAD', 'COME', 'HOME', 'I', 'CAN', 'ASSURE', 'YOU', 'NO', 'FOR', 'ANY', 'OTHER', 'OCCASION', 'ON', 'MY', 'OWN', 'BUT', 'WITH', 'A', 'COUSIN', 'OF', 'MINE', 'THAT', 'HAD', 'LONG', 'DESIGNED', 'TO', 'MAKE', 'HERSELF', 'SPORT', 'WITH', 'HIM', 'AND', 'DID', 'NOT', 'MISS', 'OF', 'HER', 'AIM'] +3080-5040-0032-310: ref=['IN', 'MY', 'LIFE', 'I', 'NEVER', 'HEARD', 'SO', 'RIDICULOUS', 'A', 'DISCOURSE', 'AS', 'HE', 'MADE', 'US', 'AND', 'NO', 'OLD', 'WOMAN', 'WHO', 'PASSES', 'FOR', 'A', 'WITCH', 'COULD', 'HAVE', 'BEEN', 'MORE', 'PUZZLED', 'TO', 'SEEK', 'WHAT', 'TO', 'SAY', 'TO', 'REASONABLE', 'PEOPLE', 'THAN', 'HE', 'WAS'] +3080-5040-0032-310: hyp=['IN', 'MY', 'LIFE', 'I', 'NEVER', 'HEARD', 'SO', 'RIDICULOUS', 'A', 'DISCOURSE', 'AS', 'HE', 'MADE', 'US', 'AND', 'NO', 'OLD', 'WOMAN', 'WHO', 'PAUSES', 'FOR', 'A', 'WITCH', 'COULD', 'HAVE', 'BEEN', 'MORE', 'PUZZLED', 'TO', 'SEEK', 'WHAT', 'TO', 'SAY', 'TO', 'REASONABLE', 'PEOPLE', 'THAN', 'HE', 'WAS'] +3080-5040-0033-311: ref=['EVER', 'SINCE', 'THIS', 'ADVENTURE', 'I', 'HAVE', 'HAD', 'SO', 'GREAT', 'A', 'BELIEF', 'IN', 'ALL', 'THINGS', 'OF', 'THIS', 'NATURE', 'THAT', 'I', 'COULD', 'NOT', 'FORBEAR', 'LAYING', 'A', 'PEAS', 'COD', 'WITH', 'NINE', 'PEAS', "IN'T", 'UNDER', 'MY', 'DOOR', 'YESTERDAY', 'AND', 'WAS', 'INFORMED', 'BY', 'IT', 'THAT', 'MY', "HUSBAND'S", 'NAME', 'SHOULD', 'BE', 'THOMAS', 'HOW', 'DO', 'YOU', 'LIKE', 'THAT'] +3080-5040-0033-311: hyp=['EVER', 'SINCE', 'THIS', 'ADVENTURE', 'I', 'HAVE', 'HAD', 'SO', 'GREAT', 'A', 'BELIEF', 'IN', 'ALL', 'THINGS', 'OF', 'THIS', 'NATURE', 'THAT', 'I', 'COULD', 'NOT', 'FORBEAR', 'LAYING', 'A', 'PEASE', 'COT', 'WITH', 'NINE', 'PEAS', 'INTO', 'UNDER', 'MY', 'DOOR', 'YESTERDAY', 'IT', 'WAS', 'INFORMED', 'BY', 'IT', 'THAT', 'MY', "HUSBAND'S", 'NAME', 'SHOULD', 'BE', 'THOMAS', 'HOW', 'DO', 'YOU', 'LIKE', 'THAT'] +3331-159605-0000-695: ref=['SHE', 'PULLED', 'HER', 'HAIR', 'DOWN', 'TURNED', 'HER', 'SKIRT', 'BACK', 'PUT', 'HER', 'FEET', 'ON', 'THE', 'FENDER', 'AND', 'TOOK', 'PUTTEL', 'INTO', 'HER', 'LAP', 'ALL', 'OF', 'WHICH', 'ARRANGEMENTS', 'SIGNIFIED', 'THAT', 'SOMETHING', 'VERY', 'IMPORTANT', 'HAD', 'GOT', 'TO', 'BE', 'THOUGHT', 'OVER', 'AND', 'SETTLED'] +3331-159605-0000-695: hyp=['SHE', 'PULLED', 'HER', 'HAIR', 'DOWN', 'TURNED', 'HIS', 'GOOD', 'BACK', 'PUT', 'HER', 'FEET', 'ON', 'THE', 'FENDER', 'AND', 'TOOK', 'PATTERN', 'INTO', 'HER', 'LAP', 'ALL', 'OF', 'WHICH', 'ARRANGEMENTS', 'SIGNIFIED', 'THAT', 'SOMETHING', 'VERY', 'IMPORTANT', 'HAD', 'GOT', 'TO', 'BE', 'THOUGHT', 'OVER', 'AND', 'SETTLED'] +3331-159605-0001-696: ref=['THE', 'MORE', 'PROPOSALS', 'THE', 'MORE', 'CREDIT'] +3331-159605-0001-696: hyp=['THE', 'MORE', 'PROPOSALS', 'THE', 'MORE', 'CREDIT'] +3331-159605-0002-697: ref=['I', 'VE', 'TRIED', 'IT', 'AND', 'LIKED', 'IT', 'AND', 'MAYBE', 'THIS', 'IS', 'THE', 'CONSEQUENCE', 'OF', 'THAT', "NIGHT'S", 'FUN'] +3331-159605-0002-697: hyp=["I'VE", 'TRIED', 'IT', 'AND', 'LIKED', 'IT', 'AND', 'MAYBE', 'THIS', 'IS', 'THE', 'CONSEQUENCE', 'OF', 'THAT', "NIGHT'S", 'FUN'] +3331-159605-0003-698: ref=['JUST', 'SUPPOSE', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'DOES', 'ASK', 'ME', 'AND', 'I', 'SAY', 'YES'] +3331-159605-0003-698: hyp=['JUST', 'SUPPOSE', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'DOES', 'ASK', 'ME', 'AND', 'I', 'SAY', 'YES'] +3331-159605-0004-699: ref=['WHAT', 'A', 'SPITEFUL', 'THING', 'I', 'AM'] +3331-159605-0004-699: hyp=['WHAT', 'A', 'SPITEFUL', 'THING', 'I', 'AM'] +3331-159605-0005-700: ref=['I', 'COULD', 'DO', 'SO', 'MUCH', 'FOR', 'ALL', 'AT', 'HOME', 'HOW', 'I', 'SHOULD', 'ENJOY', 'THAT'] +3331-159605-0005-700: hyp=['I', 'COULD', 'DO', 'SO', 'MUCH', 'FOR', 'ALL', 'AT', 'HOME', 'HOW', 'I', 'SHOULD', 'ENJOY', 'THAT'] +3331-159605-0006-701: ref=['LET', 'ME', 'SEE', 'HOW', 'CAN', 'I', 'BEGIN'] +3331-159605-0006-701: hyp=['LET', 'ME', 'SEE', 'HOW', 'CAN', 'I', 'BEGIN'] +3331-159605-0007-702: ref=['HE', 'HAS', 'KNOWN', 'HER', 'ALL', 'HER', 'LIFE', 'AND', 'HAS', 'A', 'GOOD', 'INFLUENCE', 'OVER', 'HER'] +3331-159605-0007-702: hyp=['HE', 'HAS', 'KNOWN', 'HER', 'ALL', 'HER', 'LIFE', 'AND', 'HAS', 'A', 'GOOD', 'INFLUENCE', 'OVER', 'HER'] +3331-159605-0008-703: ref=['NOW', 'AS', 'POLLY', 'WAS', 'BY', 'NO', 'MEANS', 'A', 'PERFECT', 'CREATURE', 'I', 'AM', 'FREE', 'TO', 'CONFESS', 'THAT', 'THE', 'OLD', 'TEMPTATION', 'ASSAILED', 'HER', 'MORE', 'THAN', 'ONCE', 'THAT', 'WEEK', 'FOR', 'WHEN', 'THE', 'FIRST', 'EXCITEMENT', 'OF', 'THE', 'DODGING', 'REFORM', 'HAD', 'SUBSIDED', 'SHE', 'MISSED', 'THE', 'PLEASANT', 'LITTLE', 'INTERVIEWS', 'THAT', 'USED', 'TO', 'PUT', 'A', 'CERTAIN', 'FLAVOR', 'OF', 'ROMANCE', 'INTO', 'HER', 'DULL', 'HARD', 'WORKING', 'DAYS'] +3331-159605-0008-703: hyp=['NOW', 'AS', 'POLLY', 'WAS', 'BY', 'NO', 'MEANS', 'A', 'PERFECT', 'CREATURE', 'I', 'AM', 'FREE', 'TO', 'CONFESS', 'THAT', 'THE', 'OLD', 'TEMPTATION', 'ASSAILED', 'HER', 'MORE', 'THAN', 'ONCE', 'THE', 'WEEK', 'FOR', 'WHEN', 'THE', 'FIRST', 'EXCITEMENT', 'OF', 'THE', 'DODGING', 'REFORM', 'HAD', 'SUBSIDED', 'SHE', 'MISSED', 'THE', 'PLEASANT', 'LITTLE', 'INTERVIEWS', 'THAT', 'USED', 'TO', 'PUT', 'A', 'CERTAIN', 'FLAVOUR', 'OF', 'ROMANCE', 'INTO', 'HER', 'DULL', 'HARD', 'WORKING', 'DAYS'] +3331-159605-0009-704: ref=['I', "DON'T", 'THINK', 'IT', 'WAS', 'HIS', 'WEALTH', 'ACCOMPLISHMENTS', 'OR', 'POSITION', 'THAT', 'MOST', 'ATTRACTED', 'POLLY', 'THOUGH', 'THESE', 'DOUBTLESS', 'POSSESSED', 'A', 'GREATER', 'INFLUENCE', 'THAN', 'SHE', 'SUSPECTED'] +3331-159605-0009-704: hyp=['I', "DON'T", 'THINK', 'IT', 'WAS', 'HIS', 'WEALTH', 'THE', 'ACCOMPLISHMENTS', 'OPPOSITION', 'THAT', 'MOST', 'ATTRACTED', 'POLLY', 'THOUGH', 'THESE', 'DOUBTLESS', 'POSSESSED', 'A', 'GREATER', 'INFLUENCE', 'THAN', 'SHE', 'SUSPECTED'] +3331-159605-0010-705: ref=['IT', 'WAS', 'THAT', 'INDESCRIBABLE', 'SOMETHING', 'WHICH', 'WOMEN', 'ARE', 'QUICK', 'TO', 'SEE', 'AND', 'FEEL', 'IN', 'MEN', 'WHO', 'HAVE', 'BEEN', 'BLESSED', 'WITH', 'WISE', 'AND', 'GOOD', 'MOTHERS'] +3331-159605-0010-705: hyp=['IT', 'WAS', 'THAT', 'INDESCRIBABLE', 'SOMETHING', 'WHICH', 'WOMEN', 'ARE', 'QUICK', 'TO', 'SEE', 'AND', 'FEEL', 'IN', 'MEN', 'WHO', 'HAVE', 'BEEN', 'BLESSED', 'THE', 'WISE', 'AND', 'GOOD', 'MOTHERS'] +3331-159605-0011-706: ref=['THIS', 'HAD', 'AN', 'ESPECIAL', 'CHARM', 'TO', 'POLLY', 'FOR', 'SHE', 'SOON', 'FOUND', 'THAT', 'THIS', 'SIDE', 'OF', 'HIS', 'CHARACTER', 'WAS', 'NOT', 'SHOWN', 'TO', 'EVERY', 'ONE'] +3331-159605-0011-706: hyp=['THIS', 'HAD', 'AN', 'ESPECIAL', 'CHARM', 'TO', 'POLLY', 'FOR', 'SHE', 'SOON', 'FOUND', 'THAT', 'THIS', 'SIGHT', 'OF', 'HIS', 'CHARACTER', 'WAS', 'NOT', 'SHOWN', 'TO', 'EVERY', 'ONE'] +3331-159605-0012-707: ref=['LATELY', 'THIS', 'HAD', 'CHANGED', 'ESPECIALLY', 'TOWARDS', 'POLLY', 'AND', 'IT', 'FLATTERED', 'HER', 'MORE', 'THAN', 'SHE', 'WOULD', 'CONFESS', 'EVEN', 'TO', 'HERSELF'] +3331-159605-0012-707: hyp=['LATELY', 'THIS', 'HAD', 'CHANGED', 'ESPECIALLY', 'TOWARDS', 'POLLY', 'AND', 'IT', 'FURTHER', 'HER', 'MORE', 'THAN', 'SHE', 'WOULD', 'CONFESS', 'EVEN', 'TO', 'HERSELF'] +3331-159605-0013-708: ref=['AT', 'FIRST', 'SHE', 'TRIED', 'TO', 'THINK', 'SHE', 'COULD', 'BUT', 'UNFORTUNATELY', 'HEARTS', 'ARE', 'SO', 'CONTRARY', 'THAT', 'THEY', "WON'T", 'BE', 'OBEDIENT', 'TO', 'REASON', 'WILL', 'OR', 'EVEN', 'GRATITUDE'] +3331-159605-0013-708: hyp=['AT', 'FIRST', 'SHE', 'TRIED', 'TO', 'THINK', 'SHE', 'COULD', 'BUT', 'UNFORTUNATELY', 'HEARTS', 'ARE', 'SO', 'CONTRARY', 'THAT', 'THEY', "WON'T", 'BE', 'OBEDIENT', 'TO', 'REASON', 'WILL', 'OR', 'EVEN', 'CREDITUDE'] +3331-159605-0014-709: ref=['POLLY', 'FELT', 'A', 'VERY', 'CORDIAL', 'FRIENDSHIP', 'FOR', 'MISTER', 'SYDNEY', 'BUT', 'NOT', 'ONE', 'PARTICLE', 'OF', 'THE', 'LOVE', 'WHICH', 'IS', 'THE', 'ONLY', 'COIN', 'IN', 'WHICH', 'LOVE', 'CAN', 'BE', 'TRULY', 'PAID'] +3331-159605-0014-709: hyp=['POLLY', 'FELT', 'A', 'VERY', 'CORDIAL', 'FRIENDSHIP', 'FOR', 'MISTER', 'SYDNEY', 'BUT', 'NOT', 'ONE', 'PARTICLE', 'OF', 'THE', 'LAW', 'PITCHED', 'THE', 'ONLY', 'COIN', 'IN', 'WHICH', 'LOVE', 'CAN', 'BE', 'TRULY', 'PAID'] +3331-159605-0015-710: ref=['THIS', 'FINISHED', "POLLY'S", 'INDECISION', 'AND', 'AFTER', 'THAT', 'NIGHT', 'SHE', 'NEVER', 'ALLOWED', 'HERSELF', 'TO', 'DWELL', 'UPON', 'THE', 'PLEASANT', 'TEMPTATION', 'WHICH', 'CAME', 'IN', 'A', 'GUISE', 'PARTICULARLY', 'ATTRACTIVE', 'TO', 'A', 'YOUNG', 'GIRL', 'WITH', 'A', 'SPICE', 'OF', 'THE', 'OLD', 'EVE', 'IN', 'HER', 'COMPOSITION'] +3331-159605-0015-710: hyp=['THIS', 'FINISHED', "POLLY'S", 'INDECISION', 'AND', 'AFTER', 'THAT', 'NIGHT', 'SHE', 'NEVER', 'ALLOWED', 'HERSELF', 'TO', 'DWELL', 'UPON', 'THE', 'PLEASANT', 'TEMPTATION', 'WHICH', 'CAME', 'IN', 'A', "GUY'S", 'PARTICULARLY', 'ATTRACTIVE', 'TO', 'A', 'YOUNG', 'GIRL', 'BUT', 'THE', 'SPIES', 'OF', 'THE', 'OLD', 'EVE', 'AND', 'HER', 'COMPOSITION'] +3331-159605-0016-711: ref=['WHEN', 'SATURDAY', 'CAME', 'POLLY', 'STARTED', 'AS', 'USUAL', 'FOR', 'A', 'VISIT', 'TO', 'BECKY', 'AND', 'BESS', 'BUT', 'COULD', "N'T", 'RESIST', 'STOPPING', 'AT', 'THE', 'SHAWS', 'TO', 'LEAVE', 'A', 'LITTLE', 'PARCEL', 'FOR', 'FAN', 'THOUGH', 'IT', 'WAS', 'CALLING', 'TIME'] +3331-159605-0016-711: hyp=['WHEN', 'SAID', 'CAME', 'POLLY', 'STARTED', 'AS', 'USUAL', 'FOR', 'A', 'VISIT', 'TO', 'BECKY', 'AND', 'BESS', 'BUT', "COULDN'T", 'RESIST', 'STOPPING', 'AT', 'THE', 'SHORES', 'TO', 'LEAVE', 'A', 'LITTLE', 'PARCEL', 'FOR', 'FAN', 'THAT', 'WAS', 'CALLING', 'TIME'] +3331-159605-0017-712: ref=['A', 'FOOLISH', 'LITTLE', 'SPEECH', 'TO', 'MAKE', 'TO', 'A', 'DOG', 'BUT', 'YOU', 'SEE', 'POLLY', 'WAS', 'ONLY', 'A', 'TENDER', 'HEARTED', 'GIRL', 'TRYING', 'TO', 'DO', 'HER', 'DUTY'] +3331-159605-0017-712: hyp=['A', 'FOOLISH', 'LITTLE', 'SPEECH', 'TO', 'MAKE', 'TO', 'A', 'DARK', 'BUT', 'YOU', 'SEE', 'POLLY', 'WAS', 'ONLY', 'A', 'TENDER', 'HEARTED', 'GIRL', 'TRYING', 'TO', 'DO', 'HER', 'DUTY'] +3331-159605-0018-713: ref=['TAKE', 'HOLD', 'OF', 'MASTER', "CHARLEY'S", 'HAND', 'MISS', 'MAMIE', 'AND', 'WALK', 'PRETTY', 'LIKE', 'WILLY', 'AND', 'FLOSSY', 'SAID', 'THE', 'MAID'] +3331-159605-0018-713: hyp=['TAKE', 'HOLD', 'OF', 'MASSR', "CHARLIE'S", 'HAND', 'MISS', 'MAY', 'AND', 'BUCK', 'PRETTY', 'LIKE', 'BILLY', 'AND', 'FLOSSIE', 'SAID', 'THE', 'MATE'] +3331-159605-0019-714: ref=['AT', 'A', 'STREET', 'CORNER', 'A', 'BLACK', 'EYED', 'SCHOOL', 'BOY', 'WAS', 'PARTING', 'FROM', 'A', 'ROSY', 'FACED', 'SCHOOL', 'GIRL', 'WHOSE', 'MUSIC', 'ROLL', 'HE', 'WAS', 'RELUCTANTLY', 'SURRENDERING'] +3331-159605-0019-714: hyp=['A', 'DISTRICT', 'CORNER', 'A', 'BLACK', 'EYED', 'SCHOOLBOY', 'WAS', 'PARTING', 'FROM', 'A', 'ROSY', 'FACED', 'SCHOOL', 'GIRL', 'WHOSE', 'MUSIC', 'ROLL', 'HE', 'WAS', 'RELUCTANTLY', 'SURRENDERING'] +3331-159605-0020-715: ref=['HOW', 'HE', 'GOT', 'THERE', 'WAS', 'NEVER', 'VERY', 'CLEAR', 'TO', 'POLLY', 'BUT', 'THERE', 'HE', 'WAS', 'FLUSHED', 'AND', 'A', 'LITTLE', 'OUT', 'OF', 'BREATH', 'BUT', 'LOOKING', 'SO', 'GLAD', 'TO', 'SEE', 'HER', 'THAT', 'SHE', 'HAD', "N'T", 'THE', 'HEART', 'TO', 'BE', 'STIFF', 'AND', 'COOL', 'AS', 'SHE', 'HAD', 'FULLY', 'INTENDED', 'TO', 'BE', 'WHEN', 'THEY', 'MET'] +3331-159605-0020-715: hyp=['HOW', 'HE', 'GOT', 'THERE', 'WAS', 'NEVER', 'VERY', 'CLEAR', 'TO', 'POLLY', 'BUT', 'THERE', 'HE', 'WAS', 'FLUSHED', 'AND', 'A', 'LITTLE', 'OUT', 'OF', 'BREATH', 'BUT', 'LOOKING', 'SO', 'GLAD', 'TO', 'SEE', 'HER', 'TILL', 'SHE', 'HAD', 'NOT', 'THE', 'HEART', 'TO', 'BE', 'STIFF', 'AND', 'COOL', 'AS', 'SHE', 'HAD', 'FULLY', 'INTENDED', 'TO', 'BE', 'WHEN', 'THEY', 'MET'] +3331-159605-0021-716: ref=['SHE', 'REALLY', 'COULD', "N'T", 'HELP', 'IT', 'IT', 'WAS', 'SO', 'PLEASANT', 'TO', 'SEE', 'HIM', 'AGAIN', 'JUST', 'WHEN', 'SHE', 'WAS', 'FEELING', 'SO', 'LONELY'] +3331-159605-0021-716: hyp=['SHE', 'REALLY', 'COULD', 'NOT', 'HELP', 'IT', 'IT', 'WAS', 'SO', 'PLEASANT', 'TO', 'SEE', 'HIM', 'AGAIN', 'JUST', 'WHEN', 'SHE', 'WAS', 'FEELING', 'SO', 'LONELY'] +3331-159605-0022-717: ref=['THAT', 'IS', 'THE', 'WAY', 'I', 'GET', 'TO', 'THE', 'ROTHS', 'ANSWERED', 'POLLY'] +3331-159605-0022-717: hyp=['THAT', 'IS', 'THE', 'WAY', 'I', 'GET', 'TO', 'THE', 'WORSE', 'ANSWERED', 'POLLY'] +3331-159605-0023-718: ref=['SHE', 'DID', 'NOT', 'MEAN', 'TO', 'TELL', 'BUT', 'HIS', 'FRANKNESS', 'WAS', 'SO', 'AGREEABLE', 'SHE', 'FORGOT', 'HERSELF'] +3331-159605-0023-718: hyp=['SHE', 'DID', 'NOT', 'MEAN', 'TO', 'TELL', 'BUT', 'HIS', 'FRANKNESS', 'WAS', 'TO', 'AGREEABLE', 'SHE', 'FORGOT', 'HERSELF'] +3331-159605-0024-719: ref=['BUT', 'I', 'KNOW', 'HER', 'BETTER', 'AND', 'I', 'ASSURE', 'YOU', 'THAT', 'SHE', 'DOES', 'IMPROVE', 'SHE', 'TRIES', 'TO', 'MEND', 'HER', 'FAULTS', 'THOUGH', 'SHE', "WON'T", 'OWN', 'IT', 'AND', 'WILL', 'SURPRISE', 'YOU', 'SOME', 'DAY', 'BY', 'THE', 'AMOUNT', 'OF', 'HEART', 'AND', 'SENSE', 'AND', 'GOODNESS', 'SHE', 'HAS', 'GOT'] +3331-159605-0024-719: hyp=['BUT', 'I', 'KNOW', 'HER', 'BETTER', 'AND', 'I', 'ASSURE', 'YOU', 'THAT', 'SHE', "DOESN'T", 'PROVE', 'SHE', 'TRIES', 'TO', 'MEAN', 'TO', 'FAULTS', 'THOUGH', 'SHE', "WON'T", 'OWN', 'IT', 'AND', 'WILL', 'SURPRISE', 'YOU', 'SOME', 'DAY', 'BY', 'THE', 'AMOUNT', 'OF', 'HEART', 'AND', 'SENSE', 'AND', 'GOODNESS', 'SHE', 'HAS', 'GOT'] +3331-159605-0025-720: ref=['THANK', 'YOU', 'NO'] +3331-159605-0025-720: hyp=['THANK', 'YOU', 'NO'] +3331-159605-0026-721: ref=['HOW', 'LOVELY', 'THE', 'PARK', 'LOOKS', 'SHE', 'SAID', 'IN', 'GREAT', 'CONFUSION'] +3331-159605-0026-721: hyp=['HER', 'LOVELY', 'THE', 'PARK', 'LOOKS', 'SHE', 'SAID', 'IN', 'GREAT', 'CONFUSION'] +3331-159605-0027-722: ref=['ASKED', 'THE', 'ARTFUL', 'YOUNG', 'MAN', 'LAYING', 'A', 'TRAP', 'INTO', 'WHICH', 'POLLY', 'IMMEDIATELY', 'FELL'] +3331-159605-0027-722: hyp=['ASKED', 'THE', 'ARTFUL', 'YOUNG', 'MAN', 'LAYING', 'A', 'TRAP', 'INTO', 'WHICH', 'POLLY', 'IMMEDIATELY', 'FELL'] +3331-159605-0028-723: ref=['HE', 'WAS', 'QUICKER', 'TO', 'TAKE', 'A', 'HINT', 'THAN', 'SHE', 'HAD', 'EXPECTED', 'AND', 'BEING', 'BOTH', 'PROUD', 'AND', 'GENEROUS', 'RESOLVED', 'TO', 'SETTLE', 'THE', 'MATTER', 'AT', 'ONCE', 'FOR', "POLLY'S", 'SAKE', 'AS', 'WELL', 'AS', 'HIS', 'OWN'] +3331-159605-0028-723: hyp=['HE', 'WAS', 'QUICKER', 'TO', 'TAKE', 'A', 'HINT', 'THAN', 'SHE', 'HAD', 'EXPECTED', 'AND', 'BEING', 'BOTH', 'PROUD', 'AND', 'GENEROUS', 'WE', 'SOFT', 'TO', 'SETTLE', 'THE', 'MATTER', 'AT', 'ONCE', 'FOR', "POLLY'S", 'SAKE', 'AS', 'WELL', 'AS', 'HIS', 'OWN'] +3331-159605-0029-724: ref=['SO', 'WHEN', 'SHE', 'MADE', 'HER', 'LAST', 'BRILLIANT', 'REMARK', 'HE', 'SAID', 'QUIETLY', 'WATCHING', 'HER', 'FACE', 'KEENLY', 'ALL', 'THE', 'WHILE', 'I', 'THOUGHT', 'SO', 'WELL', 'I', 'M', 'GOING', 'OUT', 'OF', 'TOWN', 'ON', 'BUSINESS', 'FOR', 'SEVERAL', 'WEEKS', 'SO', 'YOU', 'CAN', 'ENJOY', 'YOUR', 'LITTLE', 'BIT', 'OF', 'COUNTRY', 'WITHOUT', 'BEING', 'ANNOYED', 'BY', 'ME', 'ANNOYED'] +3331-159605-0029-724: hyp=['SO', 'WHEN', 'SHE', 'MADE', 'HER', 'LAST', 'BUOYANT', 'REMARK', 'HE', 'SAID', 'QUIETLY', 'WATCHING', 'HER', 'FACE', 'KEENLY', 'ALL', 'THE', 'WHILE', 'I', 'THOUGHT', 'SO', 'WELL', "I'M", 'GOING', 'OUT', 'OF', 'TOWN', 'ON', 'BUSINESS', 'FOR', 'SEVERAL', 'WEEKS', 'SO', 'YOU', 'CAN', 'ENJOY', 'YOUR', 'LITTLE', 'BIT', 'OF', 'COUNTRY', 'WITHOUT', 'BEING', 'ANNOYED', 'BY', 'ME', 'ANNOY', 'IT'] +3331-159605-0030-725: ref=['SHE', 'THOUGHT', 'SHE', 'HAD', 'A', 'GOOD', 'DEAL', 'OF', 'THE', 'COQUETTE', 'IN', 'HER', 'AND', 'I', 'VE', 'NO', 'DOUBT', 'THAT', 'WITH', 'TIME', 'AND', 'TRAINING', 'SHE', 'WOULD', 'HAVE', 'BECOME', 'A', 'VERY', 'DANGEROUS', 'LITTLE', 'PERSON', 'BUT', 'NOW', 'SHE', 'WAS', 'FAR', 'TOO', 'TRANSPARENT', 'AND', 'STRAIGHTFORWARD', 'BY', 'NATURE', 'EVEN', 'TO', 'TELL', 'A', 'WHITE', 'LIE', 'CLEVERLY'] +3331-159605-0030-725: hyp=['SHE', 'THOUGHT', 'SHE', 'HAD', 'A', 'GOOD', 'DEAL', 'OF', 'THE', 'COQUETTE', 'IN', 'HER', 'AND', "I'VE", 'NO', 'DOUBT', 'THAT', 'WITH', 'TIME', 'AND', 'TRAINING', 'SHE', 'WOULD', 'HAVE', 'BECOME', 'A', 'VERY', 'DANGEROUS', 'LITTLE', 'PERSON', 'BUT', 'NOW', 'SHE', 'WAS', 'FAR', 'TO', 'TRANSPARENT', 'AND', 'STRAIGHTFORWARD', 'BY', 'NATURE', 'EVEN', 'TO', 'TELL', 'A', 'WIDE', 'LIKE', 'LEVILY'] +3331-159605-0031-726: ref=['HE', 'WAS', 'GONE', 'BEFORE', 'SHE', 'COULD', 'DO', 'ANYTHING', 'BUT', 'LOOK', 'UP', 'AT', 'HIM', 'WITH', 'A', 'REMORSEFUL', 'FACE', 'AND', 'SHE', 'WALKED', 'ON', 'FEELING', 'THAT', 'THE', 'FIRST', 'AND', 'PERHAPS', 'THE', 'ONLY', 'LOVER', 'SHE', 'WOULD', 'EVER', 'HAVE', 'HAD', 'READ', 'HIS', 'ANSWER', 'AND', 'ACCEPTED', 'IT', 'IN', 'SILENCE'] +3331-159605-0031-726: hyp=['HE', 'WAS', 'GONE', 'BEFORE', 'SHE', 'COULD', 'DO', 'ANYTHING', 'BUT', 'LOOK', 'UP', 'AT', 'HIM', 'WITH', 'A', 'REMORSEFUL', 'FACE', 'AND', 'SHE', 'WALKED', 'ON', 'FEELING', 'THAT', 'THE', 'FIRST', 'AND', 'PERHAPS', 'THE', 'ONLY', 'LOVER', 'SHE', 'WOULD', 'EVER', 'HAVE', 'HAD', 'READ', 'HIS', 'ANSWER', 'AND', 'ACCEPTED', 'IN', 'SILENCE'] +3331-159605-0032-727: ref=['POLLY', 'DID', 'NOT', 'RETURN', 'TO', 'HER', 'FAVORITE', 'WALK', 'TILL', 'SHE', 'LEARNED', 'FROM', 'MINNIE', 'THAT', 'UNCLE', 'HAD', 'REALLY', 'LEFT', 'TOWN', 'AND', 'THEN', 'SHE', 'FOUND', 'THAT', 'HIS', 'FRIENDLY', 'COMPANY', 'AND', 'CONVERSATION', 'WAS', 'WHAT', 'HAD', 'MADE', 'THE', 'WAY', 'SO', 'PLEASANT', 'AFTER', 'ALL'] +3331-159605-0032-727: hyp=['POLLY', 'DID', 'NOT', 'RETURN', 'TO', 'HER', 'FAVOURITE', 'WALK', 'TILL', 'SHE', 'LEARNED', 'FOR', 'MINNIE', 'THAT', 'UNCLE', 'HAD', 'REALLY', 'LEFT', 'TOWN', 'AND', 'THEN', 'SHE', 'FOUND', 'THAT', 'HIS', 'FRIENDLY', 'COMPANY', 'AND', 'CONVERSATION', 'WAS', 'WHAT', 'HAD', 'MADE', 'THE', 'WAY', 'SO', 'PLEASANT', 'AFTER', 'ALL'] +3331-159605-0033-728: ref=['WAGGING', 'TO', 'AND', 'FRO', 'AS', 'USUAL', "WHAT'S", 'THE', 'NEWS', 'WITH', 'YOU'] +3331-159605-0033-728: hyp=['WORKING', 'TO', 'AND', 'FRO', 'AS', 'USUAL', "WHAT'S", 'THE', 'NEWS', 'WITH', 'YOU'] +3331-159605-0034-729: ref=['PERHAPS', 'SHE', 'LL', 'JILT', 'HIM'] +3331-159605-0034-729: hyp=['PERHAPS', "SHE'LL", 'CHILLED', 'HIM'] +3331-159605-0035-730: ref=['UTTERLY', 'DONE', 'WITH', 'AND', 'LAID', 'UPON', 'THE', 'SHELF'] +3331-159605-0035-730: hyp=['UTTERLY', 'DONE', 'WITH', 'AND', 'LAID', 'UPON', 'THE', 'SHELF'] +3331-159605-0036-731: ref=['MINNIE', 'SAID', 'THE', 'OTHER', 'DAY', 'SHE', 'WISHED', 'SHE', 'WAS', 'A', 'PIGEON', 'SO', 'SHE', 'COULD', 'PADDLE', 'IN', 'THE', 'PUDDLES', 'AND', 'NOT', 'FUSS', 'ABOUT', 'RUBBERS'] +3331-159605-0036-731: hyp=['MANY', 'SAID', 'THE', 'OTHER', 'DAY', 'SHE', 'WISHED', 'SHE', 'WAS', 'A', 'PIGEON', 'SO', 'SHE', 'COULD', 'PADDLE', 'IN', 'THE', 'POTTLES', 'AND', 'NOT', 'FUSS', 'ABOUT', 'WRAPPERS'] +3331-159605-0037-732: ref=['NOW', "DON'T", 'BE', 'AFFECTED', 'POLLY', 'BUT', 'JUST', 'TELL', 'ME', 'LIKE', 'A', 'DEAR', 'HAS', "N'T", 'HE', 'PROPOSED'] +3331-159605-0037-732: hyp=['NOW', "DON'T", 'BE', 'AFFECTED', 'POLLY', 'BUT', 'JUST', 'TELL', 'ME', 'LIKE', 'A', 'DEAR', 'HAS', 'NOT', 'HE', 'PROPOSED'] +3331-159605-0038-733: ref=["DON'T", 'YOU', 'THINK', 'HE', 'MEANS', 'TO'] +3331-159605-0038-733: hyp=["DON'T", 'YOU', 'THINK', 'HE', 'MEANS', 'TO'] +3331-159605-0039-734: ref=['TRULY', 'TRULY', 'FAN'] +3331-159605-0039-734: hyp=['TRULY', 'JULIE', 'FAN'] +3331-159605-0040-735: ref=['I', "DON'T", 'MEAN', 'TO', 'BE', 'PRYING', 'BUT', 'I', 'REALLY', 'THOUGHT', 'HE', 'DID'] +3331-159605-0040-735: hyp=['I', "DON'T", 'MEAN', 'TO', 'BE', 'PRYING', 'BUT', 'I', 'REALLY', 'THOUGHT', 'HE', 'DID'] +3331-159605-0041-736: ref=['WELL', 'I', 'ALWAYS', 'MEANT', 'TO', 'TRY', 'IT', 'IF', 'I', 'GOT', 'A', 'CHANCE', 'AND', 'I', 'HAVE'] +3331-159605-0041-736: hyp=['WELL', 'I', 'ALWAYS', 'MEANT', 'TO', 'TRY', 'IT', 'IF', 'I', 'GOT', 'A', 'CHANCE', 'AND', 'I', 'HAVE'] +3331-159605-0042-737: ref=['I', 'JUST', 'GAVE', 'HIM', 'A', 'HINT', 'AND', 'HE', 'TOOK', 'IT'] +3331-159605-0042-737: hyp=['I', 'JUST', 'GAVE', 'HIM', 'A', 'HINT', 'AND', 'HE', 'TOOK', 'IT'] +3331-159605-0043-738: ref=['HE', 'MEANT', 'TO', 'GO', 'AWAY', 'BEFORE', 'THAT', 'SO', "DON'T", 'THINK', 'HIS', 'HEART', 'IS', 'BROKEN', 'OR', 'MIND', 'WHAT', 'SILLY', 'TATTLERS', 'SAY'] +3331-159605-0043-738: hyp=['HE', 'MEANT', 'TO', 'GO', 'AWAY', 'BEFORE', 'THAT', 'SO', "DON'T", 'THINK', 'HIS', 'HEART', 'IS', 'BROKEN', 'OH', 'MIND', 'WHAT', 'DITTY', 'TEDLER', 'SAY'] +3331-159605-0044-739: ref=['HE', 'UNDERSTOOD', 'AND', 'BEING', 'A', 'GENTLEMAN', 'MADE', 'NO', 'FUSS'] +3331-159605-0044-739: hyp=['HE', 'UNDERSTOOD', 'AND', 'BEING', 'A', 'GENTLEMAN', 'MADE', 'NO', 'FUSS'] +3331-159605-0045-740: ref=['BUT', 'POLLY', 'IT', 'WOULD', 'HAVE', 'BEEN', 'A', 'GRAND', 'THING', 'FOR', 'YOU'] +3331-159605-0045-740: hyp=['BUT', 'POLLY', 'IT', 'WOULD', 'HAVE', 'BEEN', 'A', 'GRAND', 'THING', 'FOR', 'YOU'] +3331-159605-0046-741: ref=['I', 'M', 'ODD', 'YOU', 'KNOW', 'AND', 'PREFER', 'TO', 'BE', 'AN', 'INDEPENDENT', 'SPINSTER', 'AND', 'TEACH', 'MUSIC', 'ALL', 'MY', 'DAYS'] +3331-159605-0046-741: hyp=["I'M", 'NOT', 'YOU', 'KNOW', "I'M", 'PREFER', 'TO', 'BE', 'AN', 'INDEPENDENT', 'SPINSTER', 'AND', 'TEACH', 'MUSIC', 'ALL', 'MY', 'DAYS'] +3331-159609-0000-742: ref=['NEVER', 'MIND', 'WHAT', 'THE', 'BUSINESS', 'WAS', 'IT', 'SUFFICES', 'TO', 'SAY', 'THAT', 'IT', 'WAS', 'A', 'GOOD', 'BEGINNING', 'FOR', 'A', 'YOUNG', 'MAN', 'LIKE', 'TOM', 'WHO', 'HAVING', 'BEEN', 'BORN', 'AND', 'BRED', 'IN', 'THE', 'MOST', 'CONSERVATIVE', 'CLASS', 'OF', 'THE', 'MOST', 'CONCEITED', 'CITY', 'IN', 'NEW', 'ENGLAND', 'NEEDED', 'JUST', 'THE', 'HEALTHY', 'HEARTY', 'SOCIAL', 'INFLUENCES', 'OF', 'THE', 'WEST', 'TO', 'WIDEN', 'HIS', 'VIEWS', 'AND', 'MAKE', 'A', 'MAN', 'OF', 'HIM'] +3331-159609-0000-742: hyp=['NEVER', 'MIND', 'WHAT', 'THE', 'BUSINESS', 'WAS', 'IT', 'SURFACES', 'TO', 'SAY', 'THAT', 'IT', 'WAS', 'A', 'GOOD', 'BEGINNING', 'FOR', 'A', 'YOUNG', 'MAN', 'LIKE', 'TOM', 'WHO', 'HAVING', 'BEEN', 'BORN', 'AND', 'BRED', 'IN', 'THE', 'MOST', 'CONSERVATIVE', 'CLASS', 'OF', 'THE', 'MOST', 'CONCEITED', 'CITY', 'IN', 'NEW', 'ENGLAND', 'NEEDED', 'JUST', 'THE', 'HEALTHY', 'HEARTY', 'SOCIAL', 'INFLUENCES', 'OF', 'THE', 'WEST', 'TO', 'WIDEN', 'HIS', 'VIEWS', 'AND', 'MAKE', 'A', 'MAN', 'OF', 'HIM'] +3331-159609-0001-743: ref=['FORTUNATELY', 'EVERY', 'ONE', 'WAS', 'SO', 'BUSY', 'WITH', 'THE', 'NECESSARY', 'PREPARATIONS', 'THAT', 'THERE', 'WAS', 'NO', 'TIME', 'FOR', 'ROMANCE', 'OF', 'ANY', 'SORT', 'AND', 'THE', 'FOUR', 'YOUNG', 'PEOPLE', 'WORKED', 'TOGETHER', 'AS', 'SOBERLY', 'AND', 'SENSIBLY', 'AS', 'IF', 'ALL', 'SORTS', 'OF', 'EMOTIONS', 'WERE', 'NOT', 'BOTTLED', 'UP', 'IN', 'THEIR', 'RESPECTIVE', 'HEARTS'] +3331-159609-0001-743: hyp=['FORTUNATELY', 'EVERY', 'ONE', 'WAS', 'SO', 'BUSY', 'WITH', 'THE', 'NECESSARY', 'PREPARATIONS', 'THAT', 'THERE', 'WAS', 'NO', 'TIME', 'FOR', 'ROMANS', 'OF', 'ANY', 'SORT', 'AND', 'THE', 'FOUR', 'YOUNG', 'PEOPLE', 'WORKED', 'TOGETHER', 'AS', 'SOBERLY', 'AND', 'SENSIBLY', 'AS', 'IF', 'ALL', 'SORTS', 'OF', 'EMOTIONS', 'WERE', 'NOT', 'BOTHERED', 'UP', 'IN', 'THEIR', 'RESPECTIVE', 'HEARTS'] +3331-159609-0002-744: ref=['PITY', 'THAT', 'THE', 'END', 'SHOULD', 'COME', 'SO', 'SOON', 'BUT', 'THE', 'HOUR', 'DID', 'ITS', 'WORK', 'AND', 'WENT', 'ITS', 'WAY', 'LEAVING', 'A', 'CLEARER', 'ATMOSPHERE', 'BEHIND', 'THOUGH', 'THE', 'YOUNG', 'FOLKS', 'DID', 'NOT', 'SEE', 'IT', 'THEN', 'FOR', 'THEIR', 'EYES', 'WERE', 'DIM', 'BECAUSE', 'OF', 'THE', 'PARTINGS', 'THAT', 'MUST', 'BE'] +3331-159609-0002-744: hyp=['PITY', 'THAT', 'THE', 'END', 'SHOULD', 'COME', 'SO', 'SOON', 'BUT', 'THE', 'HOUR', 'DID', 'ITS', 'WORK', 'AND', 'WHEN', 'ITS', 'WAY', 'LEAVING', 'A', 'CLEARER', 'ATMOSPHERE', 'BEHIND', 'THAN', 'THE', 'YOUNG', 'FOLKS', 'DID', 'NOT', 'SEE', 'IT', 'THEN', 'FOR', 'THEIR', 'EYES', 'WERE', 'DIM', 'BECAUSE', 'OF', 'THE', 'PARTING', 'STEP', 'MUST', 'BE'] +3331-159609-0003-745: ref=['IF', 'IT', 'HAD', 'NOT', 'BEEN', 'FOR', 'TWO', 'THINGS', 'I', 'FEAR', 'SHE', 'NEVER', 'WOULD', 'HAVE', 'STOOD', 'A', 'SUMMER', 'IN', 'TOWN', 'BUT', 'SYDNEY', 'OFTEN', 'CALLED', 'TILL', 'HIS', 'VACATION', 'CAME', 'AND', 'A', 'VOLUMINOUS', 'CORRESPONDENCE', 'WITH', 'POLLY', 'BEGUILED', 'THE', 'LONG', 'DAYS'] +3331-159609-0003-745: hyp=['IF', 'IT', 'HAD', 'NOT', 'BEEN', 'FOR', 'TWO', 'THINGS', 'I', 'FEAR', 'SHE', 'NEVER', 'WOULD', 'HAVE', 'STOOD', 'A', 'SUMMER', 'IN', 'TOWN', 'BUT', 'SYDNEY', 'OFTEN', 'CALLED', 'TO', 'HIS', 'VACATION', 'CAME', 'AND', 'THE', 'VOLUMINOUS', 'CORRESPONDENCE', 'WITH', 'POLLY', 'BEGUILED', 'THE', 'LONG', 'DAYS'] +3331-159609-0004-746: ref=['TOM', 'WROTE', 'ONCE', 'A', 'WEEK', 'TO', 'HIS', 'MOTHER', 'BUT', 'THE', 'LETTERS', 'WERE', 'SHORT', 'AND', 'NOT', 'VERY', 'SATISFACTORY', 'FOR', 'MEN', 'NEVER', 'DO', 'TELL', 'THE', 'INTERESTING', 'LITTLE', 'THINGS', 'THAT', 'WOMEN', 'BEST', 'LIKE', 'TO', 'HEAR'] +3331-159609-0004-746: hyp=['TUMBLED', 'ONES', 'A', 'WEEK', 'TO', 'HIS', 'MOTHER', 'BUT', 'THEY', 'LET', 'US', 'WERE', 'SHORT', 'AND', 'NOT', 'VERY', 'SATISFACTORY', 'FOR', 'MEN', 'NEVER', 'DO', 'TELL', 'THE', 'INTERESTING', 'LITTLE', 'THINGS', 'THAT', 'WOMEN', 'BEST', 'LIKE', 'TO', 'HEAR'] +3331-159609-0005-747: ref=['NO', 'I', 'M', 'ONLY', 'TIRED', 'HAD', 'A', 'GOOD', 'DEAL', 'TO', 'DO', 'LATELY', 'AND', 'THE', 'DULL', 'WEATHER', 'MAKES', 'ME', 'JUST', 'A', 'TRIFLE', 'BLUE'] +3331-159609-0005-747: hyp=['NO', 'I', 'AM', 'ONLY', 'TIRED', 'HAD', 'A', 'GOOD', 'DEAL', 'TO', 'DO', 'LATELY', 'AND', 'THE', 'DOLL', 'WEATHER', 'MAKES', 'ME', 'JUST', 'A', 'TRAVEL', 'BLUE'] +3331-159609-0006-748: ref=['FORGIVE', 'ME', 'POLLY', 'BUT', 'I', "CAN'T", 'HELP', 'SAYING', 'IT', 'FOR', 'IT', 'IS', 'THERE', 'AND', 'I', 'WANT', 'TO', 'BE', 'AS', 'TRUE', 'TO', 'YOU', 'AS', 'YOU', 'WERE', 'TO', 'ME', 'IF', 'I', 'CAN'] +3331-159609-0006-748: hyp=['FORGIVE', 'ME', 'POLLY', 'BUT', 'I', "CAN'T", 'HELP', 'SAYING', 'IT', 'FOR', 'THIS', 'THERE', 'AND', 'I', 'WANT', 'TO', 'BE', 'AS', 'TRUE', 'TO', 'YOU', 'AS', 'YOU', 'WERE', 'TO', 'ME', 'IF', 'I', 'CAN'] +3331-159609-0007-749: ref=['I', 'TRY', 'NOT', 'TO', 'DECEIVE', 'MYSELF', 'BUT', 'IT', 'DOES', 'SEEM', 'AS', 'IF', 'THERE', 'WAS', 'A', 'CHANCE', 'OF', 'HAPPINESS', 'FOR', 'ME'] +3331-159609-0007-749: hyp=['I', 'TRIED', 'NOT', 'TO', 'DECEIVE', 'MYSELF', 'BUT', 'IT', 'DOES', 'SEEM', 'AS', 'IF', 'THERE', 'WAS', 'A', 'CHANCE', 'OF', 'HAPPINESS', 'FOR', 'ME'] +3331-159609-0008-750: ref=['THANK', 'HEAVEN', 'FOR', 'THAT'] +3331-159609-0008-750: hyp=['THANK', 'HEAVEN', 'FOR', 'THAT'] +3331-159609-0009-751: ref=['CRIED', 'POLLY', 'WITH', 'THE', 'HEARTIEST', 'SATISFACTION', 'IN', 'HER', 'VOICE'] +3331-159609-0009-751: hyp=['CRIED', 'POLLY', 'WITH', 'THE', 'HEARTIEST', 'SATISFACTION', 'IN', 'HER', 'VOICE'] +3331-159609-0010-752: ref=['POOR', 'POLLY', 'WAS', 'SO', 'TAKEN', 'BY', 'SURPRISE', 'THAT', 'SHE', 'HAD', 'NOT', 'A', 'WORD', 'TO', 'SAY'] +3331-159609-0010-752: hyp=['POOR', 'POLLY', 'WAS', 'SO', 'TAKEN', 'BY', 'SURPRISE', 'THAT', 'SHE', 'HAD', 'NOT', 'A', 'WORD', 'TO', 'SAY'] +3331-159609-0011-753: ref=['NONE', 'WERE', 'NEEDED', 'HER', 'TELLTALE', 'FACE', 'ANSWERED', 'FOR', 'HER', 'AS', 'WELL', 'AS', 'THE', 'IMPULSE', 'WHICH', 'MADE', 'HER', 'HIDE', 'HER', 'HEAD', 'IN', 'THE', 'SOFA', 'CUSHION', 'LIKE', 'A', 'FOOLISH', 'OSTRICH', 'WHEN', 'THE', 'HUNTERS', 'ARE', 'AFTER', 'IT'] +3331-159609-0011-753: hyp=['NONE', 'WERE', 'NEEDED', 'HER', 'TELLS', 'HER', 'FACE', 'ANSWERED', 'FOR', 'HER', 'AS', 'WELL', 'AS', 'THE', 'IMPULSE', 'WHICH', 'MADE', 'HER', 'HIDE', 'HER', 'HEAD', 'IN', 'THE', 'SILVER', 'CUSHION', 'LIKE', 'A', 'FOOLISH', 'OSTRICH', 'AND', 'THE', 'HANDLES', 'ARE', 'AFTER', 'IT'] +3331-159609-0012-754: ref=['ONCE', 'OR', 'TWICE', 'BUT', 'SORT', 'OF', 'JOKINGLY', 'AND', 'I', 'THOUGHT', 'IT', 'WAS', 'ONLY', 'SOME', 'LITTLE', 'FLIRTATION'] +3331-159609-0012-754: hyp=['ONCE', 'OR', 'TWICE', 'THAT', 'SORT', 'OF', 'CHOKINGLY', 'AND', 'I', 'THOUGHT', 'IT', 'WAS', 'ONLY', 'SOME', 'LITTLE', 'FLIRTATION'] +3331-159609-0013-755: ref=['IT', 'WAS', 'SO', 'STUPID', 'OF', 'ME', 'NOT', 'TO', 'GUESS', 'BEFORE'] +3331-159609-0013-755: hyp=['IT', 'WAS', 'SO', 'STUPID', 'OF', 'ME', 'NOT', 'TO', 'GUESS', 'BEFORE'] +3331-159609-0014-756: ref=['IT', 'WAS', 'SO', 'TENDER', 'EARNEST', 'AND', 'DEFIANT', 'THAT', 'FANNY', 'FORGOT', 'THE', 'DEFENCE', 'OF', 'HER', 'OWN', 'LOVER', 'IN', 'ADMIRATION', 'OF', "POLLY'S", 'LOYALTY', 'TO', 'HERS', 'FOR', 'THIS', 'FAITHFUL', 'ALL', 'ABSORBING', 'LOVE', 'WAS', 'A', 'NEW', 'REVELATION', 'TO', 'FANNY', 'WHO', 'WAS', 'USED', 'TO', 'HEARING', 'HER', 'FRIENDS', 'BOAST', 'OF', 'TWO', 'OR', 'THREE', 'LOVERS', 'A', 'YEAR', 'AND', 'CALCULATE', 'THEIR', 'RESPECTIVE', 'VALUES', 'WITH', 'ALMOST', 'AS', 'MUCH', 'COOLNESS', 'AS', 'THE', 'YOUNG', 'MEN', 'DISCUSSED', 'THE', 'FORTUNES', 'OF', 'THE', 'GIRLS', 'THEY', 'WISHED', 'FOR', 'BUT', 'COULD', 'NOT', 'AFFORD', 'TO', 'MARRY'] +3331-159609-0014-756: hyp=['IT', 'WAS', 'SO', 'TENDER', 'EARNEST', 'AND', 'DEFIANT', 'THAT', 'FANNY', 'FORGOT', 'THE', 'DEFENCE', 'OF', 'HER', 'OWN', 'LOVER', 'AND', 'ADMIRATION', 'OF', "POLLY'S", 'LOYALTY', 'TO', 'HERS', 'FOR', 'THIS', 'FAITHFUL', 'ALL', 'ABSORBING', 'LOVE', 'WAS', 'A', 'NEWER', 'RELATION', 'TO', 'FANNY', 'WHO', 'WAS', 'USED', 'TO', 'HEARING', 'HER', 'FRIENDS', 'BOAST', 'OF', 'TWO', 'OR', 'THREE', 'LOVERS', 'A', 'YEAR', 'AND', 'CALCULATE', 'THEIR', 'RESPECTIVE', 'VALUES', 'WITH', 'ALMOST', 'AS', 'MUCH', 'COOLNESS', 'AS', 'THE', 'YOUNG', 'MEN', 'DISCUSSED', 'THE', 'FORTUNES', 'OF', 'THE', 'GIRLS', 'THEY', 'WISHED', 'FOR', 'BUT', 'COULD', 'NOT', 'AFFORD', 'TO', 'MARRY'] +3331-159609-0015-757: ref=['I', 'HOPE', 'MARIA', 'BAILEY', 'IS', 'ALL', 'HE', 'THINKS', 'HER', 'SHE', 'ADDED', 'SOFTLY', 'FOR', 'I', 'COULD', "N'T", 'BEAR', 'TO', 'HAVE', 'HIM', 'DISAPPOINTED', 'AGAIN'] +3331-159609-0015-757: hyp=['I', 'HOPE', 'MARIA', 'BAILEY', 'IS', 'ONLY', 'THINKS', 'HER', 'SHE', 'ADDED', 'SOFTLY', 'FOR', 'I', 'COULD', 'NOT', 'BEAR', 'TO', 'HAVE', 'HIM', 'DISAPPOINTED', 'AGAIN'] +3331-159609-0016-758: ref=['SAID', 'FANNY', 'TURNING', 'HOPEFUL', 'ALL', 'AT', 'ONCE'] +3331-159609-0016-758: hyp=['SAID', 'FANNY', 'TURNING', 'HOPEFUL', 'ALL', 'AT', 'ONCE'] +3331-159609-0017-759: ref=['SUPPOSE', 'I', 'SAY', 'A', 'WORD', 'TO', 'TOM', 'JUST', 'INQUIRE', 'AFTER', 'HIS', 'HEART', 'IN', 'A', 'GENERAL', 'WAY', 'YOU', 'KNOW', 'AND', 'GIVE', 'HIM', 'A', 'CHANCE', 'TO', 'TELL', 'ME', 'IF', 'THERE', 'IS', 'ANYTHING', 'TO', 'TELL'] +3331-159609-0017-759: hyp=['SUPPOSE', 'HER', 'SAY', 'A', 'WORD', 'TO', 'TOM', 'JUST', 'INQUIRE', 'AFTER', 'HIS', 'HEART', 'IN', 'A', 'GENERAL', 'WAY', 'YOU', 'KNOW', 'AND', 'GIVE', 'HIM', 'A', 'CHANCE', 'TO', 'TELL', 'ME', 'IF', "THERE'S", 'ANYTHING', 'TO', 'TELL'] +3331-159609-0018-760: ref=['BEAR', 'IT', 'PEOPLE', 'ALWAYS', 'DO', 'BEAR', 'THINGS', 'SOMEHOW', 'ANSWERED', 'POLLY', 'LOOKING', 'AS', 'IF', 'SENTENCE', 'HAD', 'BEEN', 'PASSED', 'UPON', 'HER'] +3331-159609-0018-760: hyp=['BEAR', 'IT', 'PEOPLE', 'ALWAYS', 'DO', 'BEAR', 'THINGS', 'SOMEHOW', 'ANSWERED', 'POLLY', 'LOOKING', 'AS', 'IF', 'SENTENCE', 'HAD', 'BEEN', 'PASSED', 'UPON', 'HER'] +3331-159609-0019-761: ref=['IT', 'WAS', 'A', 'VERY', 'DIFFERENT', 'WINTER', 'FROM', 'THE', 'LAST', 'FOR', 'BOTH', 'THE', 'GIRLS'] +3331-159609-0019-761: hyp=['IT', 'WAS', 'A', 'VERY', 'DIFFERENT', 'WINDOW', 'FROM', 'THE', 'LAST', 'ABOVE', 'THE', 'GIRLS'] +3331-159609-0020-762: ref=['IF', 'FANNY', 'WANTED', 'TO', 'SHOW', 'HIM', 'WHAT', 'SHE', 'COULD', 'DO', 'TOWARD', 'MAKING', 'A', 'PLEASANT', 'HOME', 'SHE', 'CERTAINLY', 'SUCCEEDED', 'BETTER', 'THAN', 'SHE', 'SUSPECTED', 'FOR', 'IN', 'SPITE', 'OF', 'MANY', 'FAILURES', 'AND', 'DISCOURAGEMENTS', 'BEHIND', 'THE', 'SCENES', 'THE', 'LITTLE', 'HOUSE', 'BECAME', 'A', 'MOST', 'ATTRACTIVE', 'PLACE', 'TO', 'MISTER', 'SYDNEY', 'AT', 'LEAST', 'FOR', 'HE', 'WAS', 'MORE', 'THE', 'HOUSE', 'FRIEND', 'THAN', 'EVER', 'AND', 'SEEMED', 'DETERMINED', 'TO', 'PROVE', 'THAT', 'CHANGE', 'OF', 'FORTUNE', 'MADE', 'NO', 'DIFFERENCE', 'TO', 'HIM'] +3331-159609-0020-762: hyp=['IF', 'ANY', 'WANTED', 'TO', 'SHOW', 'HIM', 'WHAT', 'SHE', 'COULD', 'DO', 'TOWARD', 'MAKING', 'A', 'PLEASANT', 'HOME', 'SHE', 'CERTAINLY', 'SUCCEEDED', 'BY', 'THEN', 'SHE', 'SUSPECTED', 'FOR', 'IN', 'SPITE', 'OF', 'MANY', 'FAILURES', 'AND', 'DISCOURAGEMENTS', 'BEHIND', 'THE', 'SCENES', 'THE', 'LITTLE', 'HOUSE', 'BECAME', 'A', 'MOST', 'ATTRACTIVE', 'PLACE', 'TO', 'MISTER', 'SIDNEY', 'AT', 'LEAST', 'FOR', 'HE', 'WAS', 'MORE', 'THE', 'HOUSE', 'FRIEND', 'THAN', 'EVER', 'AND', 'SEEMED', 'DETERMINED', 'TO', 'PROVE', 'THAT', 'CHANGE', 'OF', 'FORTUNE', 'MADE', 'NO', 'DIFFERENCE', 'TO', 'HIM'] +3331-159609-0021-763: ref=['SHE', 'KEPT', 'MUCH', 'AT', 'HOME', 'WHEN', 'THE', "DAY'S", 'WORK', 'WAS', 'DONE', 'FINDING', 'IT', 'PLEASANTER', 'TO', 'SIT', 'DREAMING', 'OVER', 'BOOK', 'OR', 'SEWING', 'ALONE', 'THAN', 'TO', 'EXERT', 'HERSELF', 'EVEN', 'TO', 'GO', 'TO', 'THE', 'SHAWS'] +3331-159609-0021-763: hyp=['SHE', 'KEPT', 'MUCH', 'AT', 'HOME', 'IN', 'THE', "DAY'S", 'WORK', 'WAS', 'DONE', 'FINDING', 'IT', 'PLEASANTER', 'TO', 'SIT', 'DREAMING', 'OF', 'A', 'BOOK', 'OR', 'SOON', 'ALONE', 'THAN', 'TO', 'EXERT', 'HERSELF', 'EVEN', 'TO', 'GO', 'TO', 'THE', 'SHORES'] +3331-159609-0022-764: ref=['POLLY', 'WAS', 'NOT', 'AT', 'ALL', 'LIKE', 'HERSELF', 'THAT', 'WINTER', 'AND', 'THOSE', 'NEAREST', 'TO', 'HER', 'SAW', 'AND', 'WONDERED', 'AT', 'IT', 'MOST'] +3331-159609-0022-764: hyp=['POLLY', 'WAS', 'NOT', 'AT', 'ALL', 'LIKE', 'HERSELF', 'THAT', 'WINDOW', 'AND', 'THOSE', 'NEAREST', 'TO', 'HER', 'SAW', 'AND', 'WANTED', 'AT', 'IT', 'MOST'] +3331-159609-0023-765: ref=['FOR', 'NED', 'WAS', 'SO', 'ABSORBED', 'IN', 'BUSINESS', 'THAT', 'HE', 'IGNORED', 'THE', 'WHOLE', 'BAILEY', 'QUESTION', 'AND', 'LEFT', 'THEM', 'IN', 'UTTER', 'DARKNESS'] +3331-159609-0023-765: hyp=['FOR', 'NED', 'WAS', 'SO', 'ABSORBED', 'IN', 'BUSINESS', 'THAT', 'HE', 'IGNORED', 'THE', 'WHOLE', 'BAILIQUE', 'QUESTION', 'AND', 'LEFT', 'THEM', 'IN', 'OTHER', 'DARKNESS'] +3331-159609-0024-766: ref=['FANNY', 'CAME', 'WALKING', 'IN', 'UPON', 'HER', 'ONE', 'DAY', 'LOOKING', 'AS', 'IF', 'SHE', 'BROUGHT', 'TIDINGS', 'OF', 'SUCH', 'GREAT', 'JOY', 'THAT', 'SHE', 'HARDLY', 'KNEW', 'HOW', 'TO', 'TELL', 'THEM'] +3331-159609-0024-766: hyp=['THEN', 'HE', 'CAME', 'WALKING', 'IN', 'UPON', 'HER', 'ONE', 'DAY', 'LOOKING', 'AS', 'IF', 'SHE', 'POURED', 'HIDINGS', 'OF', 'SUCH', 'GREAT', 'JOY', 'THAT', 'SHE', 'HARDLY', 'KNEW', 'HOW', 'TO', 'TELL', 'THEM'] +3331-159609-0025-767: ref=['BUT', 'IF', 'WORK', 'BASKETS', 'WERE', 'GIFTED', 'WITH', 'POWERS', 'OF', 'SPEECH', 'THEY', 'COULD', 'TELL', 'STORIES', 'MORE', 'TRUE', 'AND', 'TENDER', 'THAN', 'ANY', 'WE', 'READ'] +3331-159609-0025-767: hyp=['BUT', 'IF', 'WORK', 'BASKETS', 'WERE', 'GIFTED', 'WITH', 'POWERS', 'OF', 'SPEECH', 'THEY', 'COULD', 'TELL', 'STORIES', 'MORE', 'TRUE', 'AND', 'TENDER', 'THAN', 'ANY', 'WE', 'READ'] +3528-168656-0000-864: ref=['SHE', 'HAD', 'EVEN', 'BEEN', 'IN', 'SOCIETY', 'BEFORE', 'THE', 'REVOLUTION'] +3528-168656-0000-864: hyp=['SHE', 'HAD', 'EVEN', 'BEEN', 'IN', 'SOCIETY', 'BEFORE', 'THE', 'REVOLUTION'] +3528-168656-0001-865: ref=['IT', 'WAS', 'HER', 'PLEASURE', 'AND', 'HER', 'VANITY', 'TO', 'DRAG', 'IN', 'THESE', 'NAMES', 'ON', 'EVERY', 'PRETEXT'] +3528-168656-0001-865: hyp=['IT', 'WAS', 'HER', 'PLEASURE', 'AND', 'HER', 'VANITY', 'TO', 'DRAG', 'IN', 'THESE', 'NAMES', 'ON', 'EVERY', 'PRETEXT'] +3528-168656-0002-866: ref=['EVERY', 'YEAR', 'SHE', 'SOLEMNLY', 'RENEWED', 'HER', 'VOWS', 'AND', 'AT', 'THE', 'MOMENT', 'OF', 'TAKING', 'THE', 'OATH', 'SHE', 'SAID', 'TO', 'THE', 'PRIEST', 'MONSEIGNEUR', 'SAINT', 'FRANCOIS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'MONSEIGNEUR', 'SAINT', 'JULIEN', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'EUSEBIUS', 'MONSEIGNEUR', 'SAINT', 'EUSEBIUS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'PROCOPIUS', 'ET', 'CETERA', 'ET', 'CETERA'] +3528-168656-0002-866: hyp=['EVERY', 'YEAR', 'SHE', 'SOLEMNLY', 'RENEWED', 'HER', 'VOWS', 'AND', 'AT', 'THE', 'MOMENT', 'OF', 'TAKING', 'THE', 'OATH', 'SHE', 'SAID', 'TO', 'THE', 'PRIEST', 'MONSEIGNEUR', 'SAINT', 'FROSOIS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'JULIAN', 'MONSEIGNEUR', 'SAINT', 'JULIAN', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'EUSIDIUS', 'MONSIEUR', 'SAINT', 'EUSIBIUS', 'GAVE', 'IT', 'TO', 'MONSEIGNEUR', 'SAINT', 'PROCOPIUS', 'ET', 'CETERA', 'ET', 'CETERA'] +3528-168656-0003-867: ref=['AND', 'THE', 'SCHOOL', 'GIRLS', 'WOULD', 'BEGIN', 'TO', 'LAUGH', 'NOT', 'IN', 'THEIR', 'SLEEVES', 'BUT', 'UNDER', 'THEIR', 'VEILS', 'CHARMING', 'LITTLE', 'STIFLED', 'LAUGHS', 'WHICH', 'MADE', 'THE', 'VOCAL', 'MOTHERS', 'FROWN'] +3528-168656-0003-867: hyp=['AND', 'THE', 'SCHOOLGIRLS', 'WOULD', 'BEGIN', 'TO', 'LAUGH', 'NOT', 'IN', 'THEIR', 'SLEEVES', 'BUT', 'UNDER', 'THE', 'VEILS', 'CHARMING', 'LITTLE', 'STIFLED', 'LAUGHS', 'WHICH', 'MADE', 'THE', 'VOCAL', 'MOTHERS', 'FROWN'] +3528-168656-0004-868: ref=['IT', 'WAS', 'A', 'CENTURY', 'WHICH', 'SPOKE', 'THROUGH', 'HER', 'BUT', 'IT', 'WAS', 'THE', 'EIGHTEENTH', 'CENTURY'] +3528-168656-0004-868: hyp=['IT', 'WAS', 'A', 'CENTURY', 'WHICH', 'SPOKE', 'THROUGH', 'HER', 'BUT', 'IT', 'WAS', 'THE', 'EIGHTEENTH', 'CENTURY'] +3528-168656-0005-869: ref=['THE', 'RULE', 'OF', 'FONTEVRAULT', 'DID', 'NOT', 'FORBID', 'THIS'] +3528-168656-0005-869: hyp=['THE', 'RULE', 'OF', 'FONTREVAL', 'DID', 'NOT', 'FORBID', 'THIS'] +3528-168656-0006-870: ref=['SHE', 'WOULD', 'NOT', 'SHOW', 'THIS', 'OBJECT', 'TO', 'ANYONE'] +3528-168656-0006-870: hyp=['SHE', 'WOULD', 'NOT', 'SHOW', 'THE', 'SUBJECT', 'TO', 'ANY', 'ONE'] +3528-168656-0007-871: ref=['THUS', 'IT', 'FURNISHED', 'A', 'SUBJECT', 'OF', 'COMMENT', 'FOR', 'ALL', 'THOSE', 'WHO', 'WERE', 'UNOCCUPIED', 'OR', 'BORED', 'IN', 'THE', 'CONVENT'] +3528-168656-0007-871: hyp=['THUS', 'IT', 'FURNISHED', 'A', 'SUBJECT', 'OF', 'COMMENT', 'FOR', 'ALL', 'THOSE', 'WHO', 'WERE', 'ON', 'OCCUPIED', 'OR', 'BORED', 'IN', 'THE', 'CONVENT'] +3528-168656-0008-872: ref=['SOME', 'UNIQUE', 'CHAPLET', 'SOME', 'AUTHENTIC', 'RELIC'] +3528-168656-0008-872: hyp=['SOME', 'EUIK', 'CHAPLET', 'SOME', 'AUTHENTIC', 'RELIC'] +3528-168656-0009-873: ref=['THEY', 'LOST', 'THEMSELVES', 'IN', 'CONJECTURES'] +3528-168656-0009-873: hyp=['THEY', 'LOST', 'THEMSELVES', 'IN', 'CONJECTURES'] +3528-168656-0010-874: ref=['WHEN', 'THE', 'POOR', 'OLD', 'WOMAN', 'DIED', 'THEY', 'RUSHED', 'TO', 'HER', 'CUPBOARD', 'MORE', 'HASTILY', 'THAN', 'WAS', 'FITTING', 'PERHAPS', 'AND', 'OPENED', 'IT'] +3528-168656-0010-874: hyp=['WHEN', 'THE', 'POOR', 'OLD', 'WOMAN', 'DIED', 'THEY', 'RUSHED', 'TO', 'HER', 'CUPBOARD', 'MORE', 'HASTILY', 'THAN', 'WAS', 'FITTING', 'PERHAPS', 'AND', 'OPENED', 'IT'] +3528-168656-0011-875: ref=['HE', 'IS', 'RESISTING', 'FLUTTERING', 'HIS', 'TINY', 'WINGS', 'AND', 'STILL', 'MAKING', 'AN', 'EFFORT', 'TO', 'FLY', 'BUT', 'THE', 'DANCER', 'IS', 'LAUGHING', 'WITH', 'A', 'SATANICAL', 'AIR'] +3528-168656-0011-875: hyp=['HE', 'IS', 'RESISTING', 'FLUTTERING', 'HIS', 'TINY', 'WINGS', 'AND', 'STILL', 'MAKING', 'AN', 'EFFORT', 'TO', 'FLY', 'BUT', 'THE', 'DANCERS', 'LAUGHING', 'WITH', 'A', 'SATANICAL', 'AIR'] +3528-168656-0012-876: ref=['MORAL', 'LOVE', 'CONQUERED', 'BY', 'THE', 'COLIC'] +3528-168656-0012-876: hyp=['MORAL', 'LOVE', 'CONQUERED', 'BY', 'THE', 'COLIC'] +3528-168669-0000-877: ref=['THE', 'PRIORESS', 'RETURNED', 'AND', 'SEATED', 'HERSELF', 'ONCE', 'MORE', 'ON', 'HER', 'CHAIR'] +3528-168669-0000-877: hyp=['THE', 'PRIORESS', 'RETURNED', 'AND', 'SEATED', 'HERSELF', 'ONCE', 'MORE', 'ON', 'HER', 'CHAIR'] +3528-168669-0001-878: ref=['WE', 'WILL', 'PRESENT', 'A', 'STENOGRAPHIC', 'REPORT', 'OF', 'THE', 'DIALOGUE', 'WHICH', 'THEN', 'ENSUED', 'TO', 'THE', 'BEST', 'OF', 'OUR', 'ABILITY'] +3528-168669-0001-878: hyp=['WE', 'WILL', 'PRESENT', 'A', 'STENOGRAPHIC', 'REPORT', 'OF', 'THE', 'DIALOGUE', 'WHICH', 'THEN', 'ENSUED', 'TO', 'THE', 'BEST', 'OF', 'OUR', 'ABILITY'] +3528-168669-0002-879: ref=['FATHER', 'FAUVENT'] +3528-168669-0002-879: hyp=['FATHER', 'VUENT'] +3528-168669-0003-880: ref=['REVEREND', 'MOTHER', 'DO', 'YOU', 'KNOW', 'THE', 'CHAPEL'] +3528-168669-0003-880: hyp=['REVEREND', 'MOTHER', 'DO', 'YOU', 'KNOW', 'THE', 'CHAPEL'] +3528-168669-0004-881: ref=['AND', 'YOU', 'HAVE', 'BEEN', 'IN', 'THE', 'CHOIR', 'IN', 'PURSUANCE', 'OF', 'YOUR', 'DUTIES', 'TWO', 'OR', 'THREE', 'TIMES'] +3528-168669-0004-881: hyp=['AND', 'YOU', 'HAVE', 'BEEN', 'IN', 'THE', 'CHOIR', 'IN', 'PURSUANCE', 'OF', 'YOUR', 'DUTIES', 'TWO', 'OR', 'THREE', 'TIMES'] +3528-168669-0005-882: ref=['THERE', 'IS', 'A', 'STONE', 'TO', 'BE', 'RAISED', 'HEAVY'] +3528-168669-0005-882: hyp=['THERE', 'IS', 'A', 'STONE', 'TO', 'BE', 'RAISED', 'HEAVY'] +3528-168669-0006-883: ref=['THE', 'SLAB', 'OF', 'THE', 'PAVEMENT', 'WHICH', 'IS', 'AT', 'THE', 'SIDE', 'OF', 'THE', 'ALTAR'] +3528-168669-0006-883: hyp=['THE', 'SLAB', 'OF', 'THE', 'PAVEMENT', 'WHICH', 'IS', 'AT', 'THE', 'THOUGHT', 'OF', 'THE', 'ALTAR'] +3528-168669-0007-884: ref=['THE', 'SLAB', 'WHICH', 'CLOSES', 'THE', 'VAULT', 'YES'] +3528-168669-0007-884: hyp=['THE', 'FLAP', 'WHICH', 'CLOSES', 'THE', 'VAULT', 'YES'] +3528-168669-0008-885: ref=['IT', 'WOULD', 'BE', 'A', 'GOOD', 'THING', 'TO', 'HAVE', 'TWO', 'MEN', 'FOR', 'IT'] +3528-168669-0008-885: hyp=['IT', 'WOULD', 'BE', 'A', 'GOOD', 'THING', 'TO', 'HAVE', 'TWO', 'MEN', 'FOR', 'IT'] +3528-168669-0009-886: ref=['A', 'WOMAN', 'IS', 'NEVER', 'A', 'MAN'] +3528-168669-0009-886: hyp=['A', 'WOMAN', 'IS', 'NEVER', 'A', 'MAN'] +3528-168669-0010-887: ref=['BECAUSE', 'DOM', 'MABILLON', 'GIVES', 'FOUR', 'HUNDRED', 'AND', 'SEVENTEEN', 'EPISTLES', 'OF', 'SAINT', 'BERNARD', 'WHILE', 'MERLONUS', 'HORSTIUS', 'ONLY', 'GIVES', 'THREE', 'HUNDRED', 'AND', 'SIXTY', 'SEVEN', 'I', 'DO', 'NOT', 'DESPISE', 'MERLONUS', 'HORSTIUS', 'NEITHER', 'DO', 'I'] +3528-168669-0010-887: hyp=['BECAUSE', 'DON', 'MARVALAN', 'GIVES', 'FOUR', 'HUNDRED', 'AND', 'SEVENTEEN', 'EPISTLES', 'OF', 'SAINT', 'BERNARD', 'WHILE', 'MERLINUS', 'HORSES', 'ONLY', 'GIVES', 'THREE', 'HUNDRED', 'AND', 'SIXTY', 'SEVEN', 'I', 'DO', 'NOT', 'DESPISE', "MERLINA'S", 'HORSES', 'NEITHER', 'DO', 'I'] +3528-168669-0011-888: ref=['MERIT', 'CONSISTS', 'IN', 'WORKING', 'ACCORDING', 'TO', "ONE'S", 'STRENGTH', 'A', 'CLOISTER', 'IS', 'NOT', 'A', 'DOCK', 'YARD'] +3528-168669-0011-888: hyp=['MARRIAGE', 'CONSISTS', 'IN', 'WORKING', 'ACCORDING', 'TO', "ONE'S", 'STRENGTH', 'A', 'CLOISTER', 'IS', 'NOT', 'A', 'DOCKYARD'] +3528-168669-0012-889: ref=['AND', 'A', 'WOMAN', 'IS', 'NOT', 'A', 'MAN', 'BUT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'ONE', 'THOUGH'] +3528-168669-0012-889: hyp=['AND', 'A', 'WOMAN', 'IS', 'NOT', 'A', 'MAN', 'BUT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'ONE', 'THOUGH'] +3528-168669-0013-890: ref=['AND', 'CAN', 'YOU', 'GET', 'A', 'LEVER'] +3528-168669-0013-890: hyp=['AND', 'CAN', 'YOU', 'GET', 'A', 'LOVER'] +3528-168669-0014-891: ref=['THERE', 'IS', 'A', 'RING', 'IN', 'THE', 'STONE'] +3528-168669-0014-891: hyp=['THERE', 'IS', 'A', 'RING', 'IN', 'THE', 'STONE'] +3528-168669-0015-892: ref=['I', 'WILL', 'PUT', 'THE', 'LEVER', 'THROUGH', 'IT'] +3528-168669-0015-892: hyp=['I', 'WILL', 'PUT', 'THE', 'LEVER', 'THROUGH', 'IT'] +3528-168669-0016-893: ref=['THAT', 'IS', 'GOOD', 'REVEREND', 'MOTHER', 'I', 'WILL', 'OPEN', 'THE', 'VAULT'] +3528-168669-0016-893: hyp=['THAT', 'IS', 'GOOD', 'REVEREND', 'MOTHER', 'I', 'WILL', 'OPEN', 'THE', 'VAULT'] +3528-168669-0017-894: ref=['WILL', 'THAT', 'BE', 'ALL', 'NO'] +3528-168669-0017-894: hyp=['WILL', 'THAT', 'BE', 'ALL', 'NO'] +3528-168669-0018-895: ref=['GIVE', 'ME', 'YOUR', 'ORDERS', 'VERY', 'REVEREND', 'MOTHER'] +3528-168669-0018-895: hyp=['GIVE', 'ME', 'YOUR', 'ORDERS', 'VERY', 'REVEREND', 'MOTHER'] +3528-168669-0019-896: ref=['FAUVENT', 'WE', 'HAVE', 'CONFIDENCE', 'IN', 'YOU'] +3528-168669-0019-896: hyp=['FOR', 'THAT', 'WE', 'HAVE', 'CONFIDENCE', 'IN', 'YOU'] +3528-168669-0020-897: ref=['I', 'AM', 'HERE', 'TO', 'DO', 'ANYTHING', 'YOU', 'WISH'] +3528-168669-0020-897: hyp=['I', 'AM', 'HERE', 'TO', 'DO', 'ANYTHING', 'YOU', 'WISH'] +3528-168669-0021-898: ref=['AND', 'TO', 'HOLD', 'YOUR', 'PEACE', 'ABOUT', 'EVERYTHING', 'YES', 'REVEREND', 'MOTHER'] +3528-168669-0021-898: hyp=['AND', 'TO', 'HOLD', 'YOUR', 'PEACE', 'ABOUT', 'EVERYTHING', 'YES', 'ROBIN', 'MOTHER'] +3528-168669-0022-899: ref=['WHEN', 'THE', 'VAULT', 'IS', 'OPEN', 'I', 'WILL', 'CLOSE', 'IT', 'AGAIN'] +3528-168669-0022-899: hyp=['WHEN', 'THE', 'WALL', 'IS', 'OPEN', 'I', 'WILL', 'CLOSE', 'IT', 'AGAIN'] +3528-168669-0023-900: ref=['BUT', 'BEFORE', 'THAT', 'WHAT', 'REVEREND', 'MOTHER'] +3528-168669-0023-900: hyp=['BUT', 'BEFORE', 'THAT', 'WHAT', 'REVEREND', 'MOTHER'] +3528-168669-0024-901: ref=['FATHER', 'FAUVENT', 'REVEREND', 'MOTHER'] +3528-168669-0024-901: hyp=['FATHER', 'FERVENT', 'REVEREND', 'MOTHER'] +3528-168669-0025-902: ref=['YOU', 'KNOW', 'THAT', 'A', 'MOTHER', 'DIED', 'THIS', 'MORNING'] +3528-168669-0025-902: hyp=['YOU', 'KNOW', 'THAT', 'A', 'MOTHER', 'DIED', 'THIS', 'MORNING'] +3528-168669-0026-903: ref=['NO', 'DID', 'YOU', 'NOT', 'HEAR', 'THE', 'BELL'] +3528-168669-0026-903: hyp=['NO', 'DID', 'YOU', 'NOT', 'HEAR', 'THE', 'BELL'] +3528-168669-0027-904: ref=['NOTHING', 'CAN', 'BE', 'HEARD', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN', 'REALLY'] +3528-168669-0027-904: hyp=['NOTHING', 'CAN', 'BE', 'HEARD', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN', 'REALLY'] +3528-168669-0028-905: ref=['AND', 'THEN', 'THE', 'WIND', 'IS', 'NOT', 'BLOWING', 'IN', 'MY', 'DIRECTION', 'THIS', 'MORNING'] +3528-168669-0028-905: hyp=['AND', 'THEN', 'THE', 'WIND', 'DOES', 'NOT', 'BLOWING', 'IN', 'MY', 'DIRECTION', 'THIS', 'MORNING'] +3528-168669-0029-906: ref=['IT', 'WAS', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0029-906: hyp=['IT', 'WAS', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0030-907: ref=['THREE', 'YEARS', 'AGO', 'MADAME', 'DE', 'BETHUNE', 'A', 'JANSENIST', 'TURNED', 'ORTHODOX', 'MERELY', 'FROM', 'HAVING', 'SEEN', 'MOTHER', 'CRUCIFIXION', 'AT', 'PRAYER', 'AH'] +3528-168669-0030-907: hyp=['THREE', 'YEARS', 'AGO', 'MADAME', 'DE', 'BESOON', 'A', 'GENTLEST', 'TURNED', 'ORTHODOX', 'MERELY', 'FROM', 'HAVING', 'SEEN', 'MOTHER', 'CRUCIFIXION', 'AT', 'PRAYER', 'AH'] +3528-168669-0031-908: ref=['THE', 'MOTHERS', 'HAVE', 'TAKEN', 'HER', 'TO', 'THE', 'DEAD', 'ROOM', 'WHICH', 'OPENS', 'ON', 'THE', 'CHURCH', 'I', 'KNOW'] +3528-168669-0031-908: hyp=['THE', 'MOTHERS', 'HAVE', 'TAKEN', 'HER', 'TO', 'THE', 'DEAD', 'ROOM', 'WHICH', 'OPENS', 'ON', 'THE', 'CHURCH', 'I', 'KNOW'] +3528-168669-0032-909: ref=['A', 'FINE', 'SIGHT', 'IT', 'WOULD', 'BE', 'TO', 'SEE', 'A', 'MAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'MORE', 'OFTEN'] +3528-168669-0032-909: hyp=['A', 'FINE', 'SIGHT', 'IT', 'WOULD', 'BE', 'TO', 'SEE', 'A', 'MAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'MORE', 'OFTEN'] +3528-168669-0033-910: ref=['HEY', 'MORE', 'OFTEN'] +3528-168669-0033-910: hyp=['HEY', 'MORE', 'OFTEN'] +3528-168669-0034-911: ref=['WHAT', 'DO', 'YOU', 'SAY'] +3528-168669-0034-911: hyp=['WHAT', 'DO', 'YOU', 'SAY'] +3528-168669-0035-912: ref=['I', 'SAY', 'MORE', 'OFTEN', 'MORE', 'OFTEN', 'THAN', 'WHAT'] +3528-168669-0035-912: hyp=['I', 'SAY', 'MORE', 'OFTEN', 'MORE', 'OFTEN', 'THAN', 'WHAT'] +3528-168669-0036-913: ref=['REVEREND', 'MOTHER', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN', 'THAN', 'WHAT', 'I', 'SAID', 'MORE', 'OFTEN'] +3528-168669-0036-913: hyp=['REVEREND', 'MOTHER', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN', 'THAN', 'WHAT', 'I', 'SAID', 'MORE', 'OFTEN'] +3528-168669-0037-914: ref=['BUT', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN'] +3528-168669-0037-914: hyp=['BUT', 'I', 'DID', 'NOT', 'SAY', 'MORE', 'OFTEN'] +3528-168669-0038-915: ref=['AT', 'THAT', 'MOMENT', 'NINE', "O'CLOCK", 'STRUCK'] +3528-168669-0038-915: hyp=['AT', 'THAT', 'MOMENT', 'NINE', "O'CLOCK", 'STRUCK'] +3528-168669-0039-916: ref=['AT', 'NINE', "O'CLOCK", 'IN', 'THE', 'MORNING', 'AND', 'AT', 'ALL', 'HOURS', 'PRAISED', 'AND', 'ADORED', 'BE', 'THE', 'MOST', 'HOLY', 'SACRAMENT', 'OF', 'THE', 'ALTAR', 'SAID', 'THE', 'PRIORESS'] +3528-168669-0039-916: hyp=['AT', 'NINE', "O'CLOCK", 'IN', 'THE', 'MORNING', 'AND', 'AT', 'ALL', 'HOURS', 'PRAISED', 'AND', 'ADORED', 'TO', 'BE', 'THE', 'MOST', 'HOLY', 'SACRAMENT', 'OF', 'THE', 'ALTAR', 'SAID', 'THE', 'PIRATES'] +3528-168669-0040-917: ref=['IT', 'CUT', 'MORE', 'OFTEN', 'SHORT'] +3528-168669-0040-917: hyp=['IT', 'CUT', 'MORE', 'OFTEN', 'SHORT'] +3528-168669-0041-918: ref=['FAUCHELEVENT', 'MOPPED', 'HIS', 'FOREHEAD'] +3528-168669-0041-918: hyp=['FAUCHELEVENT', 'MOPPED', 'HIS', 'FOREHEAD'] +3528-168669-0042-919: ref=['IN', 'HER', 'LIFETIME', 'MOTHER', 'CRUCIFIXION', 'MADE', 'CONVERTS', 'AFTER', 'HER', 'DEATH', 'SHE', 'WILL', 'PERFORM', 'MIRACLES', 'SHE', 'WILL'] +3528-168669-0042-919: hyp=['IN', 'HER', 'LIFETIME', 'MOTHER', 'CRUCIFIXION', 'MADE', 'CONVERTS', 'AFTER', 'HER', 'DEATH', 'SHE', 'WILL', 'PERFORM', 'MIRACLES', 'SHE', 'WILL'] +3528-168669-0043-920: ref=['FATHER', 'FAUVENT', 'THE', 'COMMUNITY', 'HAS', 'BEEN', 'BLESSED', 'IN', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0043-920: hyp=['FATHER', 'FUVENT', 'THE', 'COMMUNITY', 'HAS', 'BEEN', 'BLESSED', 'IN', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0044-921: ref=['SHE', 'RETAINED', 'HER', 'CONSCIOUSNESS', 'TO', 'THE', 'VERY', 'LAST', 'MOMENT'] +3528-168669-0044-921: hyp=['SHE', 'RETAINED', 'HER', 'CONSCIOUSNESS', 'TO', 'THE', 'VERY', 'LAST', 'MOMENT'] +3528-168669-0045-922: ref=['SHE', 'GAVE', 'US', 'HER', 'LAST', 'COMMANDS'] +3528-168669-0045-922: hyp=['SHE', 'GAVE', 'US', 'HER', 'LAST', 'COMMANDS'] +3528-168669-0046-923: ref=['IF', 'YOU', 'HAD', 'A', 'LITTLE', 'MORE', 'FAITH', 'AND', 'IF', 'YOU', 'COULD', 'HAVE', 'BEEN', 'IN', 'HER', 'CELL', 'SHE', 'WOULD', 'HAVE', 'CURED', 'YOUR', 'LEG', 'MERELY', 'BY', 'TOUCHING', 'IT', 'SHE', 'SMILED'] +3528-168669-0046-923: hyp=['IF', 'YOU', 'HAD', 'A', 'LITTLE', 'MORE', 'FAITH', 'AND', 'IF', 'YOU', 'COULD', 'HAVE', 'BEEN', 'IN', 'HERSELF', 'SHE', 'WOULD', 'HAVE', 'CURED', 'YOUR', 'LEG', 'MERELY', 'BY', 'TOUCHING', 'IT', 'SHE', 'SMILED'] +3528-168669-0047-924: ref=['THERE', 'WAS', 'SOMETHING', 'OF', 'PARADISE', 'IN', 'THAT', 'DEATH'] +3528-168669-0047-924: hyp=['THERE', 'WAS', 'SOMETHING', 'OF', 'PARADISE', 'IN', 'THAT', 'DEATH'] +3528-168669-0048-925: ref=['FAUCHELEVENT', 'THOUGHT', 'THAT', 'IT', 'WAS', 'AN', 'ORISON', 'WHICH', 'SHE', 'WAS', 'FINISHING'] +3528-168669-0048-925: hyp=['FAUCHELEVENT', 'THOUGHT', 'THAT', 'IT', 'WAS', 'AN', 'ORISON', 'WHICH', 'SHE', 'WAS', 'FINISHING'] +3528-168669-0049-926: ref=['FAUCHELEVENT', 'HELD', 'HIS', 'PEACE', 'SHE', 'WENT', 'ON'] +3528-168669-0049-926: hyp=['FAUCHELEVENT', 'HELD', 'HIS', 'PEACE', 'SHE', 'WENT', 'ON'] +3528-168669-0050-927: ref=['I', 'HAVE', 'CONSULTED', 'UPON', 'THIS', 'POINT', 'MANY', 'ECCLESIASTICS', 'LABORING', 'IN', 'OUR', 'LORD', 'WHO', 'OCCUPY', 'THEMSELVES', 'IN', 'THE', 'EXERCISES', 'OF', 'THE', 'CLERICAL', 'LIFE', 'AND', 'WHO', 'BEAR', 'WONDERFUL', 'FRUIT'] +3528-168669-0050-927: hyp=['I', 'HAVE', 'CONSULTED', 'UPON', 'THIS', 'POINT', 'MANY', 'ECCLESIASTICS', 'LABORING', 'IN', 'OUR', 'LORD', 'WHO', 'OCCUPY', 'THEMSELVES', 'IN', 'THE', 'EXERCISES', 'OF', 'THE', 'CLERICAL', 'LIFE', 'AND', 'WHO', 'BEAR', 'WONDERFUL', 'FRUIT'] +3528-168669-0051-928: ref=['FORTUNATELY', 'THE', 'PRIORESS', 'COMPLETELY', 'ABSORBED', 'IN', 'HER', 'OWN', 'THOUGHTS', 'DID', 'NOT', 'HEAR', 'IT'] +3528-168669-0051-928: hyp=['FORTUNATELY', 'THE', 'PIRASS', 'COMPLETELY', 'ABSORBED', 'IN', 'HER', 'OWN', 'THOUGHTS', 'DID', 'NOT', 'HEAR', 'IT'] +3528-168669-0052-929: ref=['SHE', 'CONTINUED', 'FATHER', 'FAUVENT'] +3528-168669-0052-929: hyp=['SHE', 'CONTINUED', 'FATHER', 'PROVENCE'] +3528-168669-0053-930: ref=['YES', 'REVEREND', 'MOTHER'] +3528-168669-0053-930: hyp=['YES', 'REVEREND', 'MOTHER'] +3528-168669-0054-931: ref=['SAINT', 'TERENTIUS', 'BISHOP', 'OF', 'PORT', 'WHERE', 'THE', 'MOUTH', 'OF', 'THE', 'TIBER', 'EMPTIES', 'INTO', 'THE', 'SEA', 'REQUESTED', 'THAT', 'ON', 'HIS', 'TOMB', 'MIGHT', 'BE', 'ENGRAVED', 'THE', 'SIGN', 'WHICH', 'WAS', 'PLACED', 'ON', 'THE', 'GRAVES', 'OF', 'PARRICIDES', 'IN', 'THE', 'HOPE', 'THAT', 'PASSERS', 'BY', 'WOULD', 'SPIT', 'ON', 'HIS', 'TOMB', 'THIS', 'WAS', 'DONE'] +3528-168669-0054-931: hyp=['SAINT', 'TERENTIUS', 'BISHOP', 'OF', 'PORT', 'WHERE', 'THE', 'MOUTH', 'OF', 'THE', 'TIBER', 'EMPTIES', 'INTO', 'THE', 'SEA', 'REQUESTED', 'THAT', 'ON', 'HIS', 'TWO', 'MIGHT', 'BE', 'ENGRAVED', 'THE', 'SIGN', 'WHICH', 'WAS', 'PLACED', 'ON', 'THE', 'GRAVES', 'OF', 'PARASITES', 'IN', 'THE', 'HOPE', 'THAT', 'PASSERS', 'BY', 'WOULD', 'SPIT', 'ON', 'HIS', 'TOMB', 'THIS', 'WAS', 'DONE'] +3528-168669-0055-932: ref=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'SO', 'BE', 'IT'] +3528-168669-0055-932: hyp=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'SO', 'BE', 'IT'] +3528-168669-0056-933: ref=['FOR', 'THAT', 'MATTER', 'NO', 'REVEREND', 'MOTHER'] +3528-168669-0056-933: hyp=['FOR', 'THAT', 'MATTER', 'NO', 'REVEREND', 'MOTHER'] +3528-168669-0057-934: ref=['FATHER', 'FAUVENT', 'MOTHER', 'CRUCIFIXION', 'WILL', 'BE', 'INTERRED', 'IN', 'THE', 'COFFIN', 'IN', 'WHICH', 'SHE', 'HAS', 'SLEPT', 'FOR', 'THE', 'LAST', 'TWENTY', 'YEARS', 'THAT', 'IS', 'JUST'] +3528-168669-0057-934: hyp=['FATHER', 'VENT', 'MOTHER', 'CRUCIFIXION', 'WILL', 'BE', 'INTERRED', 'IN', 'THE', 'COFFIN', 'IN', 'WHICH', 'SHE', 'HAS', 'SLEPT', 'FOR', 'THE', 'LAST', 'TWENTY', 'YEARS', 'THAT', 'IS', 'JUST'] +3528-168669-0058-935: ref=['IT', 'IS', 'A', 'CONTINUATION', 'OF', 'HER', 'SLUMBER'] +3528-168669-0058-935: hyp=['IT', 'IS', 'A', 'CONTINUATION', 'OF', 'HER', 'SLUMBER'] +3528-168669-0059-936: ref=['SO', 'I', 'SHALL', 'HAVE', 'TO', 'NAIL', 'UP', 'THAT', 'COFFIN', 'YES'] +3528-168669-0059-936: hyp=['SO', 'I', 'SHALL', 'HAVE', 'TO', 'NAIL', 'UP', 'THAT', 'COFFIN', 'YES'] +3528-168669-0060-937: ref=['I', 'AM', 'AT', 'THE', 'ORDERS', 'OF', 'THE', 'VERY', 'REVEREND', 'COMMUNITY'] +3528-168669-0060-937: hyp=['I', 'AM', 'AT', 'THE', 'ORDERS', 'OF', 'THE', 'VERY', 'REVEREND', 'CUNITY'] +3528-168669-0061-938: ref=['THE', 'FOUR', 'MOTHER', 'PRECENTORS', 'WILL', 'ASSIST', 'YOU'] +3528-168669-0061-938: hyp=['THE', 'FOREMOTHER', 'PRESENTERS', 'WILL', 'ASSIST', 'YOU'] +3528-168669-0062-939: ref=['NO', 'IN', 'LOWERING', 'THE', 'COFFIN'] +3528-168669-0062-939: hyp=['NO', 'IN', 'LOWERING', 'THE', 'COFFIN'] +3528-168669-0063-940: ref=['WHERE', 'INTO', 'THE', 'VAULT'] +3528-168669-0063-940: hyp=['WHERE', 'INTO', 'THE', 'VAULT'] +3528-168669-0064-941: ref=['FAUCHELEVENT', 'STARTED', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR'] +3528-168669-0064-941: hyp=['FAUCHELEVENT', 'STARTED', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR'] +3528-168669-0065-942: ref=['UNDER', 'THE', 'ALTAR', 'BUT'] +3528-168669-0065-942: hyp=['UNDER', 'THE', 'ALTAR', 'BUT'] +3528-168669-0066-943: ref=['YOU', 'WILL', 'HAVE', 'AN', 'IRON', 'BAR', 'YES', 'BUT'] +3528-168669-0066-943: hyp=['YOU', 'WILL', 'HAVE', 'AN', 'IRON', 'BAR', 'YES', 'BUT'] +3528-168669-0067-944: ref=['YOU', 'WILL', 'RAISE', 'THE', 'STONE', 'WITH', 'THE', 'BAR', 'BY', 'MEANS', 'OF', 'THE', 'RING', 'BUT'] +3528-168669-0067-944: hyp=['YOU', 'WILL', 'RAISE', 'THE', 'STONE', 'WITH', 'THE', 'BAR', 'BY', 'MEANS', 'OF', 'THE', 'RING', 'BUT'] +3528-168669-0068-945: ref=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL', 'NOT', 'TO', 'GO', 'TO', 'PROFANE', 'EARTH', 'TO', 'REMAIN', 'THERE', 'IN', 'DEATH', 'WHERE', 'SHE', 'PRAYED', 'WHILE', 'LIVING', 'SUCH', 'WAS', 'THE', 'LAST', 'WISH', 'OF', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0068-945: hyp=['THE', 'DEAD', 'MUST', 'BE', 'OBEYED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL', 'NOT', 'TO', 'GO', 'TO', 'PROFANE', 'EARTH', 'TO', 'REMAIN', 'THERE', 'IN', 'DEATH', 'WHERE', 'SHE', 'PRAYED', 'WHILE', 'LIVING', 'SUCH', 'WAS', 'THE', 'LAST', 'WISH', 'OF', 'MOTHER', 'CRUCIFIXION'] +3528-168669-0069-946: ref=['SHE', 'ASKED', 'IT', 'OF', 'US', 'THAT', 'IS', 'TO', 'SAY', 'COMMANDED', 'US'] +3528-168669-0069-946: hyp=['SHE', 'ASKED', 'IT', 'OF', 'US', 'THAT', 'IS', 'TO', 'SAY', 'COMMANDED', 'US'] +3528-168669-0070-947: ref=['BUT', 'IT', 'IS', 'FORBIDDEN'] +3528-168669-0070-947: hyp=['BUT', 'IT', 'IS', 'FORBIDDEN'] +3528-168669-0071-948: ref=['OH', 'I', 'AM', 'A', 'STONE', 'IN', 'YOUR', 'WALLS'] +3528-168669-0071-948: hyp=['OH', 'I', 'AM', 'A', 'STONE', 'IN', 'YOUR', 'WALLS'] +3528-168669-0072-949: ref=['THINK', 'FATHER', 'FAUVENT', 'IF', 'SHE', 'WERE', 'TO', 'WORK', 'MIRACLES', 'HERE'] +3528-168669-0072-949: hyp=['THINK', 'FATHER', 'IF', 'SHE', 'WERE', 'TO', 'WORK', 'MIRACLES', 'HERE'] +3528-168669-0073-950: ref=['WHAT', 'A', 'GLORY', 'OF', 'GOD', 'FOR', 'THE', 'COMMUNITY', 'AND', 'MIRACLES', 'ISSUE', 'FROM', 'TOMBS'] +3528-168669-0073-950: hyp=['WHAT', 'A', 'GLORY', 'OF', 'GOD', 'FOR', 'THE', 'COMMUNITY', 'AND', 'MIRACLES', 'ISSUE', 'FROM', 'TOMBS'] +3528-168669-0074-951: ref=['BUT', 'REVEREND', 'MOTHER', 'IF', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'COMMISSION'] +3528-168669-0074-951: hyp=['BUT', 'REVEREND', 'MOTHER', 'IF', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'COMMISSION'] +3528-168669-0075-952: ref=['BUT', 'THE', 'COMMISSARY', 'OF', 'POLICE'] +3528-168669-0075-952: hyp=['BUT', 'THE', 'COMMISSARY', 'OF', 'POLICE'] +3528-168669-0076-953: ref=['CHONODEMAIRE', 'ONE', 'OF', 'THE', 'SEVEN', 'GERMAN', 'KINGS', 'WHO', 'ENTERED', 'AMONG', 'THE', 'GAULS', 'UNDER', 'THE', 'EMPIRE', 'OF', 'CONSTANTIUS', 'EXPRESSLY', 'RECOGNIZED', 'THE', 'RIGHT', 'OF', 'NUNS', 'TO', 'BE', 'BURIED', 'IN', 'RELIGION', 'THAT', 'IS', 'TO', 'SAY', 'BENEATH', 'THE', 'ALTAR'] +3528-168669-0076-953: hyp=['CHATEAU', 'DE', 'MER', 'ONE', 'OF', 'THE', 'SEVEN', 'GERMAN', 'KINGS', 'WHO', 'ENTERED', 'AMONG', 'THE', 'GULFS', 'UNDER', 'THE', 'EMPIRE', 'OF', 'CONSTANTIUS', 'EXPRESSLY', 'RECOGNIZED', 'THE', 'RIGHT', 'OF', 'NUNS', 'TO', 'BE', 'BURIED', 'IN', 'RELIGION', 'THAT', 'IS', 'TO', 'SAY', 'BENEATH', 'THE', 'ALTAR'] +3528-168669-0077-954: ref=['THE', 'WORLD', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'THE', 'CROSS'] +3528-168669-0077-954: hyp=['THE', 'WORLD', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'THE', 'CROSS'] +3528-168669-0078-955: ref=['MARTIN', 'THE', 'ELEVENTH', 'GENERAL', 'OF', 'THE', 'CARTHUSIANS', 'GAVE', 'TO', 'HIS', 'ORDER', 'THIS', 'DEVICE', 'STAT', 'CRUX', 'DUM', 'VOLVITUR', 'ORBIS'] +3528-168669-0078-955: hyp=['MARTIN', 'THE', 'ELEVENTH', 'GENERAL', 'OF', 'THE', 'CARTHUSIANS', 'GAVE', 'TO', 'HIS', 'ORDER', 'THIS', 'DEVICE', 'STAT', 'CREW', 'DOOM', 'VOLVETER', 'ORBUS'] +3528-168669-0079-956: ref=['THE', 'PRIORESS', 'WHO', 'WAS', 'USUALLY', 'SUBJECTED', 'TO', 'THE', 'BARRIER', 'OF', 'SILENCE', 'AND', 'WHOSE', 'RESERVOIR', 'WAS', 'OVERFULL', 'ROSE', 'AND', 'EXCLAIMED', 'WITH', 'THE', 'LOQUACITY', 'OF', 'A', 'DAM', 'WHICH', 'HAS', 'BROKEN', 'AWAY'] +3528-168669-0079-956: hyp=['THE', 'PYRIUS', 'WHO', 'WAS', 'USUALLY', 'SUBJECTED', 'TO', 'THE', 'BARRIER', 'OF', 'SILENCE', 'AND', 'WHOSE', 'RESERVOIR', 'WAS', 'OVER', 'FULL', 'ROSE', 'AND', 'EXCLAIMED', 'WITH', 'THE', 'LEQUESTITY', 'OF', 'A', 'DAM', 'WHICH', 'HAS', 'BROKEN', 'AWAY'] +3528-168669-0080-957: ref=['I', 'HAVE', 'ON', 'MY', 'RIGHT', 'BENOIT', 'AND', 'ON', 'MY', 'LEFT', 'BERNARD', 'WHO', 'WAS', 'BERNARD'] +3528-168669-0080-957: hyp=['I', 'HAVE', 'ON', 'MY', 'RIGHT', 'BENOIS', 'AND', 'ON', 'MY', 'LEFT', 'BERNARD', 'WHO', 'WAS', 'BERNARD'] +3528-168669-0081-958: ref=['THE', 'FIRST', 'ABBOT', 'OF', 'CLAIRVAUX'] +3528-168669-0081-958: hyp=['THE', 'FIRST', 'ABBOT', 'OF', 'CLERVAL'] +3528-168669-0082-959: ref=['HIS', 'ORDER', 'HAS', 'PRODUCED', 'FORTY', 'POPES', 'TWO', 'HUNDRED', 'CARDINALS', 'FIFTY', 'PATRIARCHS', 'SIXTEEN', 'HUNDRED', 'ARCHBISHOPS', 'FOUR', 'THOUSAND', 'SIX', 'HUNDRED', 'BISHOPS', 'FOUR', 'EMPERORS', 'TWELVE', 'EMPRESSES', 'FORTY', 'SIX', 'KINGS', 'FORTY', 'ONE', 'QUEENS', 'THREE', 'THOUSAND', 'SIX', 'HUNDRED', 'CANONIZED', 'SAINTS', 'AND', 'HAS', 'BEEN', 'IN', 'EXISTENCE', 'FOR', 'FOURTEEN', 'HUNDRED', 'YEARS'] +3528-168669-0082-959: hyp=['HIS', 'ORDER', 'HAS', 'PRODUCED', 'FORTY', 'POPES', 'TWO', 'HUNDRED', 'CARDINALS', 'FIFTY', 'PATRIARCHS', 'SIXTEEN', 'HUNDRED', 'ARCHBISHOPS', 'FOUR', 'THOUSAND', 'SIX', 'HUNDRED', 'BISHOPS', 'FOUR', 'EMPERORS', 'TWELVE', 'EMPRESSES', 'FORTY', 'SIX', 'KINGS', 'FORTY', 'ONE', 'QUEENS', 'THREE', 'THOUSAND', 'SIX', 'HUNDRED', 'CANONIZED', 'SAINTS', 'AND', 'HAS', 'BEEN', 'IN', 'EXISTENCE', 'FOR', 'FOURTEEN', 'HUNDRED', 'YEARS'] +3528-168669-0083-960: ref=['ON', 'ONE', 'SIDE', 'SAINT', 'BERNARD', 'ON', 'THE', 'OTHER', 'THE', 'AGENT', 'OF', 'THE', 'SANITARY', 'DEPARTMENT'] +3528-168669-0083-960: hyp=['ON', 'ONE', 'SIDE', 'SAINT', 'BERNARD', 'ON', 'THE', 'OTHER', 'THE', 'AGENT', 'OF', 'THE', 'SENATORY', 'DEPARTMENT'] +3528-168669-0084-961: ref=['GOD', 'SUBORDINATED', 'TO', 'THE', 'COMMISSARY', 'OF', 'POLICE', 'SUCH', 'IS', 'THE', 'AGE', 'SILENCE', 'FAUVENT'] +3528-168669-0084-961: hyp=['GOD', 'SUBORDINATED', 'TO', 'THE', 'COMMISSORY', 'OF', 'POLICE', 'SUCH', 'WAS', 'THE', 'AGE', 'SILENCE', 'FAVAN'] +3528-168669-0085-962: ref=['NO', 'ONE', 'DOUBTS', 'THE', 'RIGHT', 'OF', 'THE', 'MONASTERY', 'TO', 'SEPULTURE'] +3528-168669-0085-962: hyp=['NO', 'ONE', 'DOUBTS', 'THE', 'RIGHT', 'OF', 'THE', 'MONASTERY', 'CHOOSE', 'SEPULTURE'] +3528-168669-0086-963: ref=['ONLY', 'FANATICS', 'AND', 'THOSE', 'IN', 'ERROR', 'DENY', 'IT'] +3528-168669-0086-963: hyp=['ONLY', 'FANATICS', 'AND', 'THOSE', 'IN', 'ERROR', 'DENY', 'IT'] +3528-168669-0087-964: ref=['WE', 'LIVE', 'IN', 'TIMES', 'OF', 'TERRIBLE', 'CONFUSION'] +3528-168669-0087-964: hyp=['WE', 'LIVE', 'IN', 'TIMES', 'OF', 'TERRIBLE', 'CONFUSION'] +3528-168669-0088-965: ref=['WE', 'ARE', 'IGNORANT', 'AND', 'IMPIOUS'] +3528-168669-0088-965: hyp=['WE', 'ARE', 'IGNORANT', 'AND', 'IMPIOUS'] +3528-168669-0089-966: ref=['AND', 'THEN', 'RELIGION', 'IS', 'ATTACKED', 'WHY'] +3528-168669-0089-966: hyp=['AND', 'THEN', 'RELIGION', 'IS', 'ATTACKED', 'WHY'] +3528-168669-0090-967: ref=['BECAUSE', 'THERE', 'HAVE', 'BEEN', 'BAD', 'PRIESTS', 'BECAUSE', 'SAGITTAIRE', 'BISHOP', 'OF', 'GAP', 'WAS', 'THE', 'BROTHER', 'OF', 'SALONE', 'BISHOP', 'OF', 'EMBRUN', 'AND', 'BECAUSE', 'BOTH', 'OF', 'THEM', 'FOLLOWED', 'MOMMOL'] +3528-168669-0090-967: hyp=['BECAUSE', 'THERE', 'HAVE', 'BEEN', 'BAD', 'PRIESTS', 'BECAUSE', 'SAGATURE', 'BISHOP', 'OF', 'GAP', 'WAS', 'THE', 'BROTHER', 'OF', 'SALON', 'BISHOP', 'OF', 'EMBRON', 'AND', 'BECAUSE', 'BOTH', 'OF', 'THEM', 'FOLLOWED', 'MAMMA'] +3528-168669-0091-968: ref=['THEY', 'PERSECUTE', 'THE', 'SAINTS'] +3528-168669-0091-968: hyp=['THEY', 'PERSECUTE', 'THE', 'SAINTS'] +3528-168669-0092-969: ref=['THEY', 'SHUT', 'THEIR', 'EYES', 'TO', 'THE', 'TRUTH', 'DARKNESS', 'IS', 'THE', 'RULE'] +3528-168669-0092-969: hyp=['THEY', 'SHUT', 'THEIR', 'EYES', 'TO', 'THE', 'TRUTH', 'DARKNESS', 'IS', 'THE', 'RULE'] +3528-168669-0093-970: ref=['THE', 'MOST', 'FEROCIOUS', 'BEASTS', 'ARE', 'BEASTS', 'WHICH', 'ARE', 'BLIND'] +3528-168669-0093-970: hyp=['THE', 'MOST', 'FEROCIOUS', 'BEASTS', 'ARE', 'BEASTS', 'WHICH', 'ARE', 'BLIND'] +3528-168669-0094-971: ref=['OH', 'HOW', 'WICKED', 'PEOPLE', 'ARE'] +3528-168669-0094-971: hyp=['OH', 'HOW', 'WICKED', 'PEOPLE', 'ARE'] +3528-168669-0095-972: ref=['BY', 'ORDER', 'OF', 'THE', 'KING', 'SIGNIFIES', 'TO', 'DAY', 'BY', 'ORDER', 'OF', 'THE', 'REVOLUTION'] +3528-168669-0095-972: hyp=['BY', 'ORDER', 'OF', 'THE', 'KING', 'SIGNIFIES', 'TO', 'DAY', 'BY', 'ORDER', 'OF', 'THE', 'REVOLUTION'] +3528-168669-0096-973: ref=['ONE', 'NO', 'LONGER', 'KNOWS', 'WHAT', 'IS', 'DUE', 'TO', 'THE', 'LIVING', 'OR', 'TO', 'THE', 'DEAD', 'A', 'HOLY', 'DEATH', 'IS', 'PROHIBITED'] +3528-168669-0096-973: hyp=['ONE', 'NO', 'LONGER', 'KNOWS', 'WHAT', 'IS', 'DUE', 'TO', 'THE', 'LIVING', 'OR', 'TO', 'THE', 'DEAD', 'A', 'HOLY', 'DEATH', 'IS', 'PROHIBITED'] +3528-168669-0097-974: ref=['GAUTHIER', 'BISHOP', 'OF', 'CHALONS', 'HELD', 'HIS', 'OWN', 'IN', 'THIS', 'MATTER', 'AGAINST', 'OTHO', 'DUKE', 'OF', 'BURGUNDY'] +3528-168669-0097-974: hyp=['GATHIER', 'BISHOP', 'OF', 'CALON', 'HELD', 'HIS', 'OWN', 'IN', 'THIS', 'MATTER', 'AGAINST', 'OTHO', 'DUKE', 'OF', 'BURGUNDY'] +3528-168669-0098-975: ref=['THE', 'PRIORESS', 'TOOK', 'BREATH', 'THEN', 'TURNED', 'TO', 'FAUCHELEVENT'] +3528-168669-0098-975: hyp=['THE', 'PRIORS', 'TOOK', 'BREATH', 'THEN', 'TURNED', 'TO', 'FAUCHELEVENT'] +3528-168669-0099-976: ref=['YOU', 'WILL', 'CLOSE', 'THE', 'COFFIN', 'THE', 'SISTERS', 'WILL', 'CARRY', 'IT', 'TO', 'THE', 'CHAPEL'] +3528-168669-0099-976: hyp=['YOU', 'WILL', 'CLOSE', 'THE', 'COFFIN', 'THE', 'SISTERS', 'WILL', 'CARRY', 'IT', 'TO', 'THE', 'CHAPEL'] +3528-168669-0100-977: ref=['THE', 'OFFICE', 'FOR', 'THE', 'DEAD', 'WILL', 'THEN', 'BE', 'SAID'] +3528-168669-0100-977: hyp=['THE', 'OFFICE', 'FOR', 'THE', 'DEAD', 'WILL', 'THEN', 'BE', 'SAID'] +3528-168669-0101-978: ref=['BUT', 'SHE', 'WILL', 'HEAR', 'SHE', 'WILL', 'NOT', 'LISTEN'] +3528-168669-0101-978: hyp=['BUT', 'SHE', 'WILL', 'HEAR', 'SHE', 'WILL', 'NOT', 'LISTEN'] +3528-168669-0102-979: ref=['BESIDES', 'WHAT', 'THE', 'CLOISTER', 'KNOWS', 'THE', 'WORLD', 'LEARNS', 'NOT'] +3528-168669-0102-979: hyp=['BESIDES', 'WHAT', 'THE', 'CLOISTER', 'KNOWS', 'THE', 'WORLD', 'LEARNS', 'NOT'] +3528-168669-0103-980: ref=['A', 'PAUSE', 'ENSUED'] +3528-168669-0103-980: hyp=['A', 'PAUSE', 'ENSUIT'] +3528-168669-0104-981: ref=['YOU', 'WILL', 'REMOVE', 'YOUR', 'BELL'] +3528-168669-0104-981: hyp=['YOU', 'WILL', 'REMOVE', 'YOUR', 'BELT'] +3528-168669-0105-982: ref=['HAS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'PAID', 'HIS', 'VISIT'] +3528-168669-0105-982: hyp=['HAS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'PAID', 'HIS', 'VISIT'] +3528-168669-0106-983: ref=['HE', 'WILL', 'PAY', 'IT', 'AT', 'FOUR', "O'CLOCK", 'TO', 'DAY'] +3528-168669-0106-983: hyp=['HE', 'WILL', 'PAY', 'IT', 'AT', 'FOUR', "O'CLOCK", 'TO', 'DAY'] +3528-168669-0107-984: ref=['THE', 'PEAL', 'WHICH', 'ORDERS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'TO', 'BE', 'SUMMONED', 'HAS', 'ALREADY', 'BEEN', 'RUNG'] +3528-168669-0107-984: hyp=['THE', 'PEAL', 'WHICH', 'ORDERS', 'THE', 'DOCTOR', 'FOR', 'THE', 'DEAD', 'TO', 'BE', 'SUMMONED', 'HAS', 'ALREADY', 'BEEN', 'RUNG'] +3528-168669-0108-985: ref=['BUT', 'YOU', 'DO', 'NOT', 'UNDERSTAND', 'ANY', 'OF', 'THE', 'PEALS'] +3528-168669-0108-985: hyp=['BUT', 'YOU', 'DO', 'NOT', 'UNDERSTAND', 'ANY', 'OF', 'THE', 'PEALS'] +3528-168669-0109-986: ref=['THAT', 'IS', 'WELL', 'FATHER', 'FAUVENT'] +3528-168669-0109-986: hyp=['THAT', 'IS', 'WELL', 'FATHER', 'VENT'] +3528-168669-0110-987: ref=['WHERE', 'WILL', 'YOU', 'OBTAIN', 'IT'] +3528-168669-0110-987: hyp=['WHERE', 'WILL', 'YOU', 'OBTAIN', 'IT'] +3528-168669-0111-988: ref=['I', 'HAVE', 'MY', 'HEAP', 'OF', 'OLD', 'IRON', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN'] +3528-168669-0111-988: hyp=['I', 'HAVE', 'MY', 'HEAP', 'OF', 'OLD', 'IRON', 'AT', 'THE', 'BOTTOM', 'OF', 'THE', 'GARDEN'] +3528-168669-0112-989: ref=['REVEREND', 'MOTHER', 'WHAT'] +3528-168669-0112-989: hyp=['REVEREND', 'MOTHER', 'WHAT'] +3528-168669-0113-990: ref=['IF', 'YOU', 'WERE', 'EVER', 'TO', 'HAVE', 'ANY', 'OTHER', 'JOBS', 'OF', 'THIS', 'SORT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'MAN', 'FOR', 'YOU', 'A', 'PERFECT', 'TURK'] +3528-168669-0113-990: hyp=['IF', 'YOU', 'WERE', 'EVER', 'TO', 'HAVE', 'ANY', 'OTHER', 'JOBS', 'OF', 'THIS', 'SORT', 'MY', 'BROTHER', 'IS', 'THE', 'STRONG', 'MAN', 'FOR', 'YOU', 'A', 'PERFECT', 'TURK'] +3528-168669-0114-991: ref=['YOU', 'WILL', 'DO', 'IT', 'AS', 'SPEEDILY', 'AS', 'POSSIBLE'] +3528-168669-0114-991: hyp=['YOU', 'WILL', 'DO', 'IT', 'AS', 'SPEEDILY', 'AS', 'POSSIBLE'] +3528-168669-0115-992: ref=['I', 'CANNOT', 'WORK', 'VERY', 'FAST', 'I', 'AM', 'INFIRM', 'THAT', 'IS', 'WHY', 'I', 'REQUIRE', 'AN', 'ASSISTANT', 'I', 'LIMP'] +3528-168669-0115-992: hyp=['I', 'CANNOT', 'WORK', 'VERY', 'FAST', 'I', 'AM', 'INFIRM', 'THAT', 'IS', 'WHY', 'I', 'REQUIRE', 'AN', 'ASSISTANT', 'I', 'LIMP'] +3528-168669-0116-993: ref=['EVERYTHING', 'MUST', 'HAVE', 'BEEN', 'COMPLETED', 'A', 'GOOD', 'QUARTER', 'OF', 'AN', 'HOUR', 'BEFORE', 'THAT'] +3528-168669-0116-993: hyp=['EVERYTHING', 'MUST', 'HAVE', 'BEEN', 'COMPLETED', 'A', 'GOOD', 'QUARTER', 'OF', 'AN', 'HOUR', 'BEFORE', 'THAT'] +3528-168669-0117-994: ref=['I', 'WILL', 'DO', 'ANYTHING', 'TO', 'PROVE', 'MY', 'ZEAL', 'TOWARDS', 'THE', 'COMMUNITY', 'THESE', 'ARE', 'MY', 'ORDERS', 'I', 'AM', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN'] +3528-168669-0117-994: hyp=['I', 'WILL', 'DO', 'ANYTHING', 'TO', 'PROVE', 'MY', 'ZEAL', 'TOWARDS', 'THE', 'COMMUNITY', 'THESE', 'ARE', 'MY', 'ORDERS', 'I', 'AM', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN'] +3528-168669-0118-995: ref=['AT', 'ELEVEN', "O'CLOCK", 'EXACTLY', 'I', 'AM', 'TO', 'BE', 'IN', 'THE', 'CHAPEL'] +3528-168669-0118-995: hyp=['AT', 'ELEVEN', "O'CLOCK", 'EXACTLY', 'I', 'AM', 'TO', 'BE', 'IN', 'THE', 'CHAPEL'] +3528-168669-0119-996: ref=['MOTHER', 'ASCENSION', 'WILL', 'BE', 'THERE', 'TWO', 'MEN', 'WOULD', 'BE', 'BETTER'] +3528-168669-0119-996: hyp=['MOTHER', 'ASCENSION', 'WILL', 'BE', 'THERE', 'TWO', 'MEN', 'WOULD', 'BE', 'BETTER'] +3528-168669-0120-997: ref=['HOWEVER', 'NEVER', 'MIND', 'I', 'SHALL', 'HAVE', 'MY', 'LEVER'] +3528-168669-0120-997: hyp=['HOWEVER', 'NEVER', 'MIND', 'I', 'SHALL', 'HAVE', 'MY', 'LOVER'] +3528-168669-0121-998: ref=['AFTER', 'WHICH', 'THERE', 'WILL', 'BE', 'NO', 'TRACE', 'OF', 'ANYTHING'] +3528-168669-0121-998: hyp=['AFTER', 'WHICH', 'THERE', 'WILL', 'BE', 'NO', 'TRACE', 'OF', 'ANYTHING'] +3528-168669-0122-999: ref=['THE', 'GOVERNMENT', 'WILL', 'HAVE', 'NO', 'SUSPICION'] +3528-168669-0122-999: hyp=['THE', 'GOVERNMENT', 'WILL', 'HAVE', 'NO', 'SUSPICION'] +3528-168669-0123-1000: ref=['THE', 'EMPTY', 'COFFIN', 'REMAINS', 'THIS', 'PRODUCED', 'A', 'PAUSE'] +3528-168669-0123-1000: hyp=['THE', 'EMPTY', 'COFFIN', 'REMAINS', 'THIS', 'PRODUCED', 'A', 'PAUSE'] +3528-168669-0124-1001: ref=['WHAT', 'IS', 'TO', 'BE', 'DONE', 'WITH', 'THAT', 'COFFIN', 'FATHER', 'FAUVENT'] +3528-168669-0124-1001: hyp=['WHAT', 'IS', 'TO', 'BE', 'DONE', 'WITH', 'THAT', 'COFFIN', 'FATHER', 'VENT'] +3528-168669-0125-1002: ref=['IT', 'WILL', 'BE', 'GIVEN', 'TO', 'THE', 'EARTH', 'EMPTY'] +3528-168669-0125-1002: hyp=['IT', 'WILL', 'BE', 'GIVEN', 'TO', 'THE', 'EARTH', 'EMPTY'] +3528-168669-0126-1003: ref=['AH', 'THE', 'DE', 'EXCLAIMED', 'FAUCHELEVENT'] +3528-168669-0126-1003: hyp=['AH', 'LEDA', 'EXCLAIMED', 'FAUCHELEVENT'] +3528-168669-0127-1004: ref=['THE', 'VIL', 'STUCK', 'FAST', 'IN', 'HIS', 'THROAT'] +3528-168669-0127-1004: hyp=['THE', 'VILLE', 'STUCK', 'FAST', 'IN', 'HIS', 'THROAT'] +3528-168669-0128-1005: ref=['HE', 'MADE', 'HASTE', 'TO', 'IMPROVISE', 'AN', 'EXPEDIENT', 'TO', 'MAKE', 'HER', 'FORGET', 'THE', 'OATH'] +3528-168669-0128-1005: hyp=['HE', 'MADE', 'HASTE', 'TO', 'IMPROVISE', 'AN', 'EXPEDIENT', 'TO', 'MAKE', 'HER', 'FORGET', 'THE', 'OATH'] +3528-168669-0129-1006: ref=['I', 'WILL', 'PUT', 'EARTH', 'IN', 'THE', 'COFFIN', 'REVEREND', 'MOTHER', 'THAT', 'WILL', 'PRODUCE', 'THE', 'EFFECT', 'OF', 'A', 'CORPSE'] +3528-168669-0129-1006: hyp=['I', 'WILL', 'PUT', 'EARTH', 'IN', 'THE', 'COFFIN', 'REVEREND', 'MOTHER', 'THAT', 'WILL', 'PRODUCE', 'THE', 'EFFECT', 'OF', 'A', 'CORPSE'] +3528-168669-0130-1007: ref=['I', 'WILL', 'MAKE', 'THAT', 'MY', 'SPECIAL', 'BUSINESS'] +3528-168669-0130-1007: hyp=['I', 'WILL', 'MAKE', 'THAT', 'MY', 'SPECIAL', 'BUSINESS'] +3538-142836-0000-1567: ref=['GENERAL', 'OBSERVATIONS', 'ON', 'PRESERVES', 'CONFECTIONARY', 'ICES', 'AND', 'DESSERT', 'DISHES'] +3538-142836-0000-1567: hyp=['GENERAL', 'OBSERVATIONS', 'ON', 'PRESERVES', 'CONFECTIONERY', 'ICES', 'AND', 'DESSERT', 'DISHES'] +3538-142836-0001-1568: ref=['THE', 'EXPENSE', 'OF', 'PRESERVING', 'THEM', 'WITH', 'SUGAR', 'IS', 'A', 'SERIOUS', 'OBJECTION', 'FOR', 'EXCEPT', 'THE', 'SUGAR', 'IS', 'USED', 'IN', 'CONSIDERABLE', 'QUANTITIES', 'THE', 'SUCCESS', 'IS', 'VERY', 'UNCERTAIN'] +3538-142836-0001-1568: hyp=['THE', 'EXPENSE', 'OF', 'PRESERVING', 'THEM', 'WITH', 'SUGAR', 'IS', 'A', 'SERIOUS', 'OBJECTION', 'FOR', 'EXCEPT', 'THE', 'SUGAR', 'IS', 'USED', 'IN', 'CONSIDERABLE', 'QUALITIES', 'THE', 'SUCCESS', 'IS', 'VERY', 'UNCERTAIN'] +3538-142836-0002-1569: ref=['FRUIT', 'GATHERED', 'IN', 'WET', 'OR', 'FOGGY', 'WEATHER', 'WILL', 'SOON', 'BE', 'MILDEWED', 'AND', 'BE', 'OF', 'NO', 'SERVICE', 'FOR', 'PRESERVES'] +3538-142836-0002-1569: hyp=['FRUIT', 'GATHERED', 'IN', 'WET', 'OR', 'FOGGY', 'WEATHER', 'WILL', 'SOON', 'BE', 'MELTED', 'AND', 'BE', 'OF', 'NO', 'SERVICE', 'FOR', 'PRESERVES'] +3538-142836-0003-1570: ref=['BUT', 'TO', 'DISTINGUISH', 'THESE', 'PROPERLY', 'REQUIRES', 'VERY', 'GREAT', 'ATTENTION', 'AND', 'CONSIDERABLE', 'EXPERIENCE'] +3538-142836-0003-1570: hyp=['BUT', 'TO', 'DISTINGUISH', 'THESE', 'PROPERLY', 'REQUIRES', 'VERY', 'GREAT', 'ATTENTION', 'AND', 'CONSIDERABLE', 'EXPERIENCE'] +3538-142836-0004-1571: ref=['IF', 'YOU', 'DIP', 'THE', 'FINGER', 'INTO', 'THE', 'SYRUP', 'AND', 'APPLY', 'IT', 'TO', 'THE', 'THUMB', 'THE', 'TENACITY', 'OF', 'THE', 'SYRUP', 'WILL', 'ON', 'SEPARATING', 'THE', 'FINGER', 'AND', 'THUMB', 'AFFORD', 'A', 'THREAD', 'WHICH', 'SHORTLY', 'BREAKS', 'THIS', 'IS', 'THE', 'LITTLE', 'THREAD'] +3538-142836-0004-1571: hyp=['IF', 'YOU', 'DIP', 'THE', 'FINGER', 'INTO', 'THE', 'SERF', 'AND', 'APPLY', 'IT', 'TO', 'THE', 'THUMB', 'THE', 'TENACITY', 'OF', 'THE', 'SERF', 'WILL', 'ON', 'SEPARATING', 'THE', 'FINGER', 'AND', 'THUMB', 'AFFORD', 'A', 'THREAD', 'WHICH', 'SHORTLY', 'BREAKS', 'THIS', 'IS', 'THE', 'LITTLE', 'THREAD'] +3538-142836-0005-1572: ref=['LET', 'IT', 'BOIL', 'UP', 'AGAIN', 'THEN', 'TAKE', 'IT', 'OFF', 'AND', 'REMOVE', 'CAREFULLY', 'THE', 'SCUM', 'THAT', 'HAS', 'RISEN'] +3538-142836-0005-1572: hyp=['LET', 'IT', 'BOIL', 'UP', 'AGAIN', 'THEN', 'TAKE', 'IT', 'OFF', 'AND', 'REMOVE', 'CAREFULLY', 'THE', 'SCUM', 'THAT', 'HAS', 'RISEN'] +3538-142836-0006-1573: ref=['IT', 'IS', 'CONSIDERED', 'TO', 'BE', 'SUFFICIENTLY', 'BOILED', 'WHEN', 'SOME', 'TAKEN', 'UP', 'IN', 'A', 'SPOON', 'POURS', 'OUT', 'LIKE', 'OIL'] +3538-142836-0006-1573: hyp=['IT', 'IS', 'CONSIDERED', 'TO', 'BE', 'SUFFICIENTLY', 'BOILED', 'WHEN', 'SOME', 'TAKEN', 'UP', 'IN', 'A', 'SPOON', 'POURS', 'OUT', 'LIKE', 'OIL'] +3538-142836-0007-1574: ref=['BEFORE', 'SUGAR', 'WAS', 'IN', 'USE', 'HONEY', 'WAS', 'EMPLOYED', 'TO', 'PRESERVE', 'MANY', 'VEGETABLE', 'PRODUCTIONS', 'THOUGH', 'THIS', 'SUBSTANCE', 'HAS', 'NOW', 'GIVEN', 'WAY', 'TO', 'THE', 'JUICE', 'OF', 'THE', 'SUGAR', 'CANE'] +3538-142836-0007-1574: hyp=['BEFORE', 'SUGAR', 'WAS', 'IN', 'USE', 'HONEY', 'WAS', 'EMPLOYED', 'TO', 'PRESENT', 'MANY', 'VEGETABLE', 'PRODUCTIONS', 'THOUGH', 'THIS', 'SUBSTANCE', 'IS', 'NOW', 'GIVEN', 'WAY', 'TO', 'THE', 'JUICE', 'OF', 'THE', 'SUGAR', 'CANE'] +3538-142836-0008-1575: ref=['FOURTEEN', 'NINETY', 'NINE'] +3538-142836-0008-1575: hyp=['FOURTEEN', 'NINETY', 'NINE'] +3538-142836-0009-1576: ref=['BOIL', 'THEM', 'UP', 'THREE', 'DAYS', 'SUCCESSIVELY', 'SKIMMING', 'EACH', 'TIME', 'AND', 'THEY', 'WILL', 'THEN', 'BE', 'FINISHED', 'AND', 'IN', 'A', 'STATE', 'FIT', 'TO', 'BE', 'PUT', 'INTO', 'POTS', 'FOR', 'USE'] +3538-142836-0009-1576: hyp=['BOIL', 'THEM', 'UP', 'THREE', 'DAYS', 'SUCCESSIVELY', 'SKIMMING', 'EACH', 'TIME', 'AND', 'THEY', 'WILL', 'THEN', 'BE', 'FINISHED', 'AND', 'IN', 'A', 'STATE', 'FIT', 'TO', 'BE', 'PUT', 'INTO', 'POTS', 'FOR', 'USE'] +3538-142836-0010-1577: ref=['THE', 'REASON', 'WHY', 'THE', 'FRUIT', 'IS', 'EMPTIED', 'OUT', 'OF', 'THE', 'PRESERVING', 'PAN', 'INTO', 'AN', 'EARTHEN', 'PAN', 'IS', 'THAT', 'THE', 'ACID', 'OF', 'THE', 'FRUIT', 'ACTS', 'UPON', 'THE', 'COPPER', 'OF', 'WHICH', 'THE', 'PRESERVING', 'PANS', 'ARE', 'USUALLY', 'MADE'] +3538-142836-0010-1577: hyp=['THE', 'REASON', 'WHY', 'THE', 'FRUIT', 'IS', 'EMPTIED', 'OUT', 'OF', 'THE', 'PRESERVING', 'PAN', 'INTO', 'OUR', 'EARTHEN', 'PAN', 'IS', 'THAT', 'THE', 'ACID', 'OF', 'THE', 'FRUIT', 'ACTS', 'UPON', 'THE', 'COPPER', 'OF', 'WHICH', 'THE', 'PRESERVING', 'PANS', 'ARE', 'USUALLY', 'MADE'] +3538-142836-0011-1578: ref=['FROM', 'THIS', 'EXAMPLE', 'THE', 'PROCESS', 'OF', 'PRESERVING', 'FRUITS', 'BY', 'SYRUP', 'WILL', 'BE', 'EASILY', 'COMPREHENDED'] +3538-142836-0011-1578: hyp=['FROM', 'THIS', 'EXAMPLE', 'THE', 'PROCESS', 'OF', 'PRESERVING', 'FRUITS', 'BY', 'SYRUP', 'WOULD', 'BE', 'EASILY', 'COMPREHENDED'] +3538-142836-0012-1579: ref=['THEY', 'SHOULD', 'BE', 'DRIED', 'IN', 'THE', 'STOVE', 'OR', 'OVEN', 'ON', 'A', 'SIEVE', 'AND', 'TURNED', 'EVERY', 'SIX', 'OR', 'EIGHT', 'HOURS', 'FRESH', 'POWDERED', 'SUGAR', 'BEING', 'SIFTED', 'OVER', 'THEM', 'EVERY', 'TIME', 'THEY', 'ARE', 'TURNED'] +3538-142836-0012-1579: hyp=['THEY', 'SHOULD', 'BE', 'DRIED', 'IN', 'THE', 'STOVE', 'OR', 'OVEN', 'ON', 'A', 'SEA', 'AND', 'TURNED', 'EVERY', 'SIX', 'OR', 'EIGHT', 'HOURS', 'FRESH', 'POWDERED', 'SUGAR', 'BEING', 'SIFTED', 'OVER', 'THEM', 'EVERY', 'TIME', 'THEY', 'RETURNED'] +3538-142836-0013-1580: ref=['IN', 'THIS', 'WAY', 'IT', 'IS', 'ALSO', 'THAT', 'ORANGE', 'AND', 'LEMON', 'CHIPS', 'ARE', 'PRESERVED'] +3538-142836-0013-1580: hyp=['IN', 'THIS', 'WAY', 'IT', 'IS', 'ALSO', 'THAT', 'ORANGE', 'AND', 'LINENSHIPS', 'ARE', 'PRESERVED'] +3538-142836-0014-1581: ref=['MARMALADES', 'JAMS', 'AND', 'FRUIT', 'PASTES', 'ARE', 'OF', 'THE', 'SAME', 'NATURE', 'AND', 'ARE', 'NOW', 'IN', 'VERY', 'GENERAL', 'REQUEST'] +3538-142836-0014-1581: hyp=['MARMALADES', 'JAMS', 'AND', 'FRUIT', 'PACE', 'ARE', 'OF', 'THE', 'SAME', 'NATURE', 'AND', 'ARE', 'NOW', 'IN', 'VERY', 'GENERAL', 'QUEST'] +3538-142836-0015-1582: ref=['MARMALADES', 'AND', 'JAMS', 'DIFFER', 'LITTLE', 'FROM', 'EACH', 'OTHER', 'THEY', 'ARE', 'PRESERVES', 'OF', 'A', 'HALF', 'LIQUID', 'CONSISTENCY', 'MADE', 'BY', 'BOILING', 'THE', 'PULP', 'OF', 'FRUITS', 'AND', 'SOMETIMES', 'PART', 'OF', 'THE', 'RINDS', 'WITH', 'SUGAR'] +3538-142836-0015-1582: hyp=['MARMAL', 'ETS', 'AND', 'JAMS', 'DIFFER', 'LITTLE', 'FROM', 'EACH', 'OTHER', 'THEIR', 'PRESERVES', 'OF', 'HALF', 'LIQUID', 'CONSISTENCY', 'MADE', 'BY', 'BOILING', 'THE', 'PULP', 'OF', 'FRUITS', 'AND', 'SOMETIMES', 'PART', 'OF', 'THE', 'RINDS', 'WITH', 'SUGAR'] +3538-142836-0016-1583: ref=['THAT', 'THEY', 'MAY', 'KEEP', 'IT', 'IS', 'NECESSARY', 'NOT', 'TO', 'BE', 'SPARING', 'OF', 'SUGAR', 'FIFTEEN', 'O', 'THREE'] +3538-142836-0016-1583: hyp=['THAT', 'THEY', 'MAY', 'KEEP', 'IT', 'IS', 'NECESSARY', 'NOT', 'TO', 'BE', 'SPARING', 'OF', 'SUGAR', 'FIFTEEN', 'O', 'THREE'] +3538-142836-0017-1584: ref=['IN', 'ALL', 'THE', 'OPERATIONS', 'FOR', 'PRESERVE', 'MAKING', 'WHEN', 'THE', 'PRESERVING', 'PAN', 'IS', 'USED', 'IT', 'SHOULD', 'NOT', 'BE', 'PLACED', 'ON', 'THE', 'FIRE', 'BUT', 'ON', 'A', 'TRIVET', 'UNLESS', 'THE', 'JAM', 'IS', 'MADE', 'ON', 'A', 'HOT', 'PLATE', 'WHEN', 'THIS', 'IS', 'NOT', 'NECESSARY'] +3538-142836-0017-1584: hyp=['IN', 'ALL', 'THE', 'OPERATIONS', 'FOR', 'PRESERVE', 'MAKING', 'WHEN', 'THE', 'PRESERVING', 'PAN', 'IS', 'USED', 'IT', 'SHOULD', 'NOT', 'BE', 'PLACED', 'ON', 'THE', 'FIRE', 'BUT', 'ON', 'A', 'TRIBUT', 'UNLESS', 'THE', 'JAM', 'IS', 'MADE', 'ON', 'A', 'HOT', 'PLATE', 'WHEN', 'THIS', 'IS', 'NOT', 'NECESSARY'] +3538-142836-0018-1585: ref=['CONFECTIONARY', 'FIFTEEN', 'O', 'EIGHT'] +3538-142836-0018-1585: hyp=['CONFECTIONERY', 'FIFTEEN', 'O', 'EIGHT'] +3538-142836-0019-1586: ref=['IN', 'SPEAKING', 'OF', 'CONFECTIONARY', 'IT', 'SHOULD', 'BE', 'REMARKED', 'THAT', 'ALL', 'THE', 'VARIOUS', 'PREPARATIONS', 'ABOVE', 'NAMED', 'COME', 'STRICTLY', 'SPEAKING', 'UNDER', 'THAT', 'HEAD', 'FOR', 'THE', 'VARIOUS', 'FRUITS', 'FLOWERS', 'HERBS', 'ROOTS', 'AND', 'JUICES', 'WHICH', 'WHEN', 'BOILED', 'WITH', 'SUGAR', 'WERE', 'FORMERLY', 'EMPLOYED', 'IN', 'PHARMACY', 'AS', 'WELL', 'AS', 'FOR', 'SWEETMEATS', 'WERE', 'CALLED', 'CONFECTIONS', 'FROM', 'THE', 'LATIN', 'WORD', 'CONFICERE', 'TO', 'MAKE', 'UP', 'BUT', 'THE', 'TERM', 'CONFECTIONARY', 'EMBRACES', 'A', 'VERY', 'LARGE', 'CLASS', 'INDEED', 'OF', 'SWEET', 'FOOD', 'MANY', 'KINDS', 'OF', 'WHICH', 'SHOULD', 'NOT', 'BE', 'ATTEMPTED', 'IN', 'THE', 'ORDINARY', 'CUISINE'] +3538-142836-0019-1586: hyp=['IN', 'SPEAKING', 'OF', 'CONFECTIONERIES', 'SHOULD', 'BE', 'REMARKED', 'THAT', 'ALL', 'THE', 'VARIOUS', 'PREPARATIONS', 'ABOVE', 'NAMED', 'COME', 'STRICTLY', 'SPEAKING', 'UNDER', 'THAT', 'HEAD', 'FOR', 'THE', 'VARIOUS', 'FRUITS', 'FLOWERS', 'HERBS', 'ROOFS', 'AND', 'JUICES', 'WHICH', 'ONE', 'BOILED', 'WITH', 'SUGAR', 'WERE', 'FORMERLY', 'EMPLOYED', 'IN', 'PHARMACY', 'AS', 'WELL', 'AS', 'FOR', 'SWEETMEATS', 'WERE', 'CALLED', 'CONFECTIONS', 'FROM', 'THE', 'LATIN', 'WORD', 'CONFERS', 'TO', 'MAKE', 'UP', 'BUT', 'THE', 'TERM', 'CONFECTIONARY', 'EMBRACES', 'A', 'VERY', 'LARGE', 'CLASS', 'INDEED', 'OF', 'SWEET', 'FOOD', 'MANY', 'KINDS', 'OF', 'WHICH', 'SHOULD', 'NOT', 'BE', 'ATTEMPTED', 'IN', 'THE', 'ORDINARY', 'COUISINE'] +3538-142836-0020-1587: ref=['THE', 'THOUSAND', 'AND', 'ONE', 'ORNAMENTAL', 'DISHES', 'THAT', 'ADORN', 'THE', 'TABLES', 'OF', 'THE', 'WEALTHY', 'SHOULD', 'BE', 'PURCHASED', 'FROM', 'THE', 'CONFECTIONER', 'THEY', 'CANNOT', 'PROFITABLY', 'BE', 'MADE', 'AT', 'HOME'] +3538-142836-0020-1587: hyp=['THE', 'THOUSAND', 'AND', 'ONE', 'ORNAMENTAL', 'DISHES', 'THAT', 'ADORN', 'THE', 'TABLES', 'OF', 'THE', 'WEALTHY', 'SHOULD', 'BE', 'PURCHASED', 'FROM', 'THE', 'CONFECTIONER', 'THEY', 'CANNOT', 'PROFITABLY', 'BE', 'MADE', 'AT', 'HOME'] +3538-142836-0021-1588: ref=['HOWEVER', 'AS', 'LATE', 'AS', 'THE', 'REIGNS', 'OF', 'OUR', 'TWO', 'LAST', 'GEORGES', 'FABULOUS', 'SUMS', 'WERE', 'OFTEN', 'EXPENDED', 'UPON', 'FANCIFUL', 'DESSERTS'] +3538-142836-0021-1588: hyp=['HOWEVER', 'AS', 'LATE', 'AS', 'THE', 'REIGN', 'OF', 'OUR', 'TWO', 'LAST', 'GEORGES', 'FABULOUS', 'SUMS', 'WERE', 'OFTEN', 'EXPENDED', 'UPON', 'FANCIFUL', 'DESERTS'] +3538-142836-0022-1589: ref=['THE', 'SHAPE', 'OF', 'THE', 'DISHES', 'VARIES', 'AT', 'DIFFERENT', 'PERIODS', 'THE', 'PREVAILING', 'FASHION', 'AT', 'PRESENT', 'BEING', 'OVAL', 'AND', 'CIRCULAR', 'DISHES', 'ON', 'STEMS'] +3538-142836-0022-1589: hyp=['THE', 'SHAPE', 'OF', 'THE', 'DISHES', 'VARIES', 'AT', 'DIFFERENT', 'PERIODS', 'THE', 'PREVAILING', 'FASHION', 'AT', 'PRESENT', 'BEING', 'OVAL', 'AND', 'CIRCULAR', 'DISHES', 'ON', 'STEMS'] +3538-142836-0023-1590: ref=['ICES'] +3538-142836-0023-1590: hyp=['ISIS'] +3538-142836-0024-1591: ref=['AT', 'DESSERTS', 'OR', 'AT', 'SOME', 'EVENING', 'PARTIES', 'ICES', 'ARE', 'SCARCELY', 'TO', 'BE', 'DISPENSED', 'WITH'] +3538-142836-0024-1591: hyp=['A', 'DESSERTS', 'OR', 'AT', 'SOME', 'EVENING', 'PARTIES', 'IISES', 'ARE', 'SCARCELY', 'TO', 'BE', 'DISPENSED', 'WITH'] +3538-142836-0025-1592: ref=['THE', 'SPADDLE', 'IS', 'GENERALLY', 'MADE', 'OF', 'COPPER', 'KEPT', 'BRIGHT', 'AND', 'CLEAN'] +3538-142836-0025-1592: hyp=['THE', 'SPADEL', 'IS', 'GENERALLY', 'MADE', 'OF', 'COPPER', 'KEPT', 'BRIGHT', 'AND', 'CLEAN'] +3538-142836-0026-1593: ref=['THEY', 'SHOULD', 'BE', 'TAKEN', 'IMMEDIATELY', 'AFTER', 'THE', 'REPAST', 'OR', 'SOME', 'HOURS', 'AFTER', 'BECAUSE', 'THE', 'TAKING', 'THESE', 'SUBSTANCES', 'DURING', 'THE', 'PROCESS', 'OF', 'DIGESTION', 'IS', 'APT', 'TO', 'PROVOKE', 'INDISPOSITION'] +3538-142836-0026-1593: hyp=['THEY', 'SHOULD', 'BE', 'TAKEN', 'IMMEDIATELY', 'AFTER', 'THE', 'REPAST', 'OR', 'SOME', 'HOURS', 'AFTER', 'BECAUSE', 'THE', 'TAKING', 'OF', 'THESE', 'SUBSTANCES', 'DURING', 'THE', 'PROCESS', 'OF', 'DIGESTION', 'IS', 'APT', 'TO', 'PROVOKE', 'INDISPOSITION'] +3538-163619-0000-1500: ref=['THERE', 'WAS', 'ONCE', 'ON', 'A', 'TIME', 'A', 'WIDOWER', 'WHO', 'HAD', 'A', 'SON', 'AND', 'A', 'DAUGHTER', 'BY', 'HIS', 'FIRST', 'WIFE'] +3538-163619-0000-1500: hyp=['THERE', 'WAS', 'ONCE', 'TILL', 'THE', 'TIME', 'A', 'WIDOWER', 'WHO', 'HAD', 'A', 'SON', 'AND', 'A', 'DAUGHTER', 'BY', 'HIS', 'FIRST', 'WI'] +3538-163619-0001-1501: ref=['FROM', 'THE', 'VERY', 'DAY', 'THAT', 'THE', 'NEW', 'WIFE', 'CAME', 'INTO', 'THE', 'HOUSE', 'THERE', 'WAS', 'NO', 'PEACE', 'FOR', 'THE', "MAN'S", 'CHILDREN', 'AND', 'NOT', 'A', 'CORNER', 'TO', 'BE', 'FOUND', 'WHERE', 'THEY', 'COULD', 'GET', 'ANY', 'REST', 'SO', 'THE', 'BOY', 'THOUGHT', 'THAT', 'THE', 'BEST', 'THING', 'HE', 'COULD', 'DO', 'WAS', 'TO', 'GO', 'OUT', 'INTO', 'THE', 'WORLD', 'AND', 'TRY', 'TO', 'EARN', 'HIS', 'OWN', 'BREAD'] +3538-163619-0001-1501: hyp=['FROM', 'THE', 'VERY', 'DAY', 'THAT', 'THE', 'NEW', 'WIFE', 'CAME', 'INTO', 'THE', 'HOUSE', 'THERE', 'WAS', 'NO', 'PEACE', 'FOR', 'THE', "MAN'S", 'CHILDREN', 'AND', 'NOT', 'A', 'CORNER', 'TO', 'BE', 'FOUND', 'WHERE', 'THEY', 'COULD', 'GET', 'ANY', 'REST', 'SO', 'THE', 'BOY', 'THOUGHT', 'THAT', 'THE', 'BEST', 'THING', 'HE', 'COULD', 'DO', 'WAS', 'TO', 'GO', 'OUT', 'INTO', 'THE', 'WORLD', 'AND', 'TRY', 'TO', 'EARN', 'HIS', 'OWN', 'BREAD'] +3538-163619-0002-1502: ref=['BUT', 'HIS', 'SISTER', 'WHO', 'WAS', 'STILL', 'AT', 'HOME', 'FARED', 'WORSE', 'AND', 'WORSE'] +3538-163619-0002-1502: hyp=['BUT', 'HIS', 'SISTER', 'WHO', 'WAS', 'STILL', 'AT', 'HOME', 'FARED', 'WORSE', 'AND', 'WORSE'] +3538-163619-0003-1503: ref=['KISS', 'ME', 'GIRL', 'SAID', 'THE', 'HEAD'] +3538-163619-0003-1503: hyp=['KISS', 'ME', 'GO', 'SAID', 'THE', 'HEAD'] +3538-163619-0004-1504: ref=['WHEN', 'THE', 'KING', 'ENTERED', 'AND', 'SAW', 'IT', 'HE', 'STOOD', 'STILL', 'AS', 'IF', 'HE', 'WERE', 'IN', 'FETTERS', 'AND', 'COULD', 'NOT', 'STIR', 'FROM', 'THE', 'SPOT', 'FOR', 'THE', 'PICTURE', 'SEEMED', 'TO', 'HIM', 'SO', 'BEAUTIFUL'] +3538-163619-0004-1504: hyp=['WHEN', 'THE', 'KING', 'ENTERED', 'AND', 'SAW', 'IT', 'HE', 'STOOD', 'STILL', 'AS', 'IF', 'HE', 'WERE', 'IN', 'FETTERS', 'AND', 'COULD', 'NOT', 'STIR', 'FROM', 'THE', 'SPOT', 'FOR', 'THE', 'PICTURE', 'SEEMED', 'TO', 'HIM', 'SO', 'BEAUTIFUL'] +3538-163619-0005-1505: ref=['THE', 'YOUTH', 'PROMISED', 'TO', 'MAKE', 'ALL', 'THE', 'HASTE', 'HE', 'COULD', 'AND', 'SET', 'FORTH', 'FROM', 'THE', "KING'S", 'PALACE'] +3538-163619-0005-1505: hyp=['THESE', 'PROMISED', 'TO', 'MAKE', 'ALL', 'THE', 'HASTE', 'HE', 'COULD', 'AND', 'SET', 'FORTH', 'FROM', 'THE', "KING'S", 'PALACE'] +3538-163619-0006-1506: ref=['AT', 'LAST', 'THEY', 'CAME', 'IN', 'SIGHT', 'OF', 'LAND'] +3538-163619-0006-1506: hyp=['AT', 'LAST', 'THEY', 'CAME', 'IN', 'SIGHT', 'OF', 'LAND'] +3538-163619-0007-1507: ref=['WELL', 'IF', 'MY', 'BROTHER', 'SAYS', 'SO', 'I', 'MUST', 'DO', 'IT', 'SAID', 'THE', "MAN'S", 'DAUGHTER', 'AND', 'SHE', 'FLUNG', 'HER', 'CASKET', 'INTO', 'THE', 'SEA'] +3538-163619-0007-1507: hyp=['WELL', 'IF', 'MY', 'BROTHER', 'SAYS', 'SO', 'I', 'MUST', 'DO', 'IT', 'SAID', 'THE', "MAN'S", 'DAUGHTER', 'AND', 'SHE', 'FLUNG', 'HER', 'CASKET', 'INTO', 'THE', 'SEA'] +3538-163619-0008-1508: ref=['WHAT', 'IS', 'MY', 'BROTHER', 'SAYING', 'ASKED', 'HIS', 'SISTER', 'AGAIN'] +3538-163619-0008-1508: hyp=['WHAT', 'IS', 'MY', 'BROTHER', 'SAYING', 'ASKED', 'HIS', 'SISTER', 'AGAIN'] +3538-163619-0009-1509: ref=['ON', 'THE', 'FIRST', 'THURSDAY', 'NIGHT', 'AFTER', 'THIS', 'A', 'BEAUTIFUL', 'MAIDEN', 'CAME', 'INTO', 'THE', 'KITCHEN', 'OF', 'THE', 'PALACE', 'AND', 'BEGGED', 'THE', 'KITCHEN', 'MAID', 'WHO', 'SLEPT', 'THERE', 'TO', 'LEND', 'HER', 'A', 'BRUSH'] +3538-163619-0009-1509: hyp=['ON', 'THE', 'FIRST', 'THURSDAY', 'NIGHT', 'AFTER', 'THIS', 'A', 'BEAUTIFUL', 'MAIDEN', 'CAME', 'INTO', 'THE', 'KITCHEN', 'OF', 'THE', 'PALACE', 'AND', 'BEGGED', 'THE', 'KITCHEN', 'MAID', 'WHO', 'SLEPT', 'THERE', 'TO', 'LEND', 'HER', 'A', 'BRUSH'] +3538-163619-0010-1510: ref=['SHE', 'BEGGED', 'VERY', 'PRETTILY', 'AND', 'GOT', 'IT', 'AND', 'THEN', 'SHE', 'BRUSHED', 'HER', 'HAIR', 'AND', 'THE', 'GOLD', 'DROPPED', 'FROM', 'IT'] +3538-163619-0010-1510: hyp=['SHE', 'BEGGED', 'VERY', 'PRETTILY', 'AND', 'GOT', 'IT', 'AND', 'THEN', 'SHE', 'BRUSHED', 'HER', 'HAIR', 'AND', 'THE', 'GOLD', 'DROPPED', 'FROM', 'IT'] +3538-163619-0011-1511: ref=['OUT', 'ON', 'THEE', 'UGLY', 'BUSHY', 'BRIDE', 'SLEEPING', 'SO', 'SOFT', 'BY', 'THE', 'YOUNG', "KING'S", 'SIDE', 'ON', 'SAND', 'AND', 'STONES', 'MY', 'BED', 'I', 'MAKE', 'AND', 'MY', 'BROTHER', 'SLEEPS', 'WITH', 'THE', 'COLD', 'SNAKE', 'UNPITIED', 'AND', 'UNWEPT'] +3538-163619-0011-1511: hyp=['OUT', 'ON', 'THEE', 'UGLY', 'BUSHY', 'BRIDE', 'SLEEPING', 'SO', 'SOFT', 'BY', 'THE', 'YOUNG', "KING'S", 'SIDE', 'ON', 'SAND', 'AND', 'STONES', 'MY', 'BED', 'I', 'MAKE', 'AND', 'MY', 'BROTHERS', 'SLEEPS', 'WITH', 'THE', 'COLD', 'SNAKE', 'UNPITIED', 'AND', 'UNWEPT'] +3538-163619-0012-1512: ref=['I', 'SHALL', 'COME', 'TWICE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN', 'SAID', 'SHE'] +3538-163619-0012-1512: hyp=['I', 'SHALL', 'COME', 'TWICE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN', 'SAID', 'SHE'] +3538-163619-0013-1513: ref=['THIS', 'TIME', 'ALSO', 'AS', 'BEFORE', 'SHE', 'BORROWED', 'A', 'BRUSH', 'AND', 'BRUSHED', 'HER', 'HAIR', 'WITH', 'IT', 'AND', 'THE', 'GOLD', 'DROPPED', 'DOWN', 'AS', 'SHE', 'DID', 'IT', 'AND', 'AGAIN', 'SHE', 'SENT', 'THE', 'DOG', 'OUT', 'THREE', 'TIMES', 'AND', 'WHEN', 'DAY', 'DAWNED', 'SHE', 'DEPARTED', 'BUT', 'AS', 'SHE', 'WAS', 'GOING', 'SHE', 'SAID', 'AS', 'SHE', 'HAD', 'SAID', 'BEFORE', 'I', 'SHALL', 'COME', 'ONCE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN'] +3538-163619-0013-1513: hyp=['THIS', 'TIME', 'ALSO', 'AS', 'BEFORE', 'SHE', 'BORROWED', 'A', 'BRUSH', 'AND', 'BRUSHED', 'HER', 'HAIR', 'WITH', 'IT', 'AND', 'THE', 'GOLD', 'DROPPED', 'DOWN', 'AS', 'SHE', 'DID', 'IT', 'AND', 'AGAIN', 'SHE', 'SENT', 'THE', 'DOG', 'OUT', 'THREE', 'TIMES', 'AND', 'WHEN', 'THEY', 'DAWNED', 'SHE', 'DEPARTED', 'BUT', 'AS', 'SHE', 'WAS', 'GOING', 'SHE', 'SAID', 'AS', 'SHE', 'HAD', 'SAID', 'BEFORE', 'I', 'SHALL', 'COME', 'ONCE', 'MORE', 'AND', 'THEN', 'NEVER', 'AGAIN'] +3538-163619-0014-1514: ref=['NO', 'ONE', 'CAN', 'TELL', 'HOW', 'DELIGHTED', 'THE', 'KING', 'WAS', 'TO', 'GET', 'RID', 'OF', 'THAT', 'HIDEOUS', 'BUSHY', 'BRIDE', 'AND', 'GET', 'A', 'QUEEN', 'WHO', 'WAS', 'BRIGHT', 'AND', 'BEAUTIFUL', 'AS', 'DAY', 'ITSELF'] +3538-163619-0014-1514: hyp=['NO', 'ONE', 'CAN', 'TELL', 'HOW', 'DELIGHTED', 'THE', 'KING', 'WAS', 'TO', 'GET', 'RID', 'OF', 'THAT', 'HIDEOUS', 'BUSHY', 'BRIDE', 'AND', 'GET', 'A', 'QUEEN', 'WHO', 'WAS', 'BRIGHT', 'AND', 'BEAUTIFUL', 'AS', 'DAY', 'ITSELF'] +3538-163622-0000-1515: ref=['WILT', 'THOU', 'SERVE', 'ME', 'AND', 'WATCH', 'MY', 'SEVEN', 'FOALS', 'ASKED', 'THE', 'KING'] +3538-163622-0000-1515: hyp=['WILT', 'THOU', 'SERVE', 'ME', 'AND', 'WATCH', 'MY', 'SEVEN', 'FOLDS', 'ASKED', 'THE', 'KING'] +3538-163622-0001-1516: ref=['THE', 'YOUTH', 'THOUGHT', 'THAT', 'IT', 'WAS', 'VERY', 'EASY', 'WORK', 'TO', 'WATCH', 'THE', 'FOALS', 'AND', 'THAT', 'HE', 'COULD', 'DO', 'IT', 'WELL', 'ENOUGH'] +3538-163622-0001-1516: hyp=['THE', 'YOUTH', 'THOUGHT', 'THAT', 'IT', 'WAS', 'VERY', 'EASY', 'WORK', 'TO', 'WATCH', 'THE', 'FOALS', 'AND', 'HE', 'COULD', 'DO', 'IT', 'WELL', 'ENOUGH'] +3538-163622-0002-1517: ref=['HAST', 'THOU', 'WATCHED', 'FAITHFULLY', 'AND', 'WELL', 'THE', 'WHOLE', 'DAY', 'LONG', 'SAID', 'THE', 'KING', 'WHEN', 'THE', 'LAD', 'CAME', 'INTO', 'HIS', 'PRESENCE', 'IN', 'THE', 'EVENING'] +3538-163622-0002-1517: hyp=['HAST', 'THOU', 'WATCH', 'FAITHFULLY', 'AND', 'WELL', 'THE', 'WHOLE', 'DAY', 'LONG', 'SAID', 'THE', 'KING', 'WHEN', 'THE', 'LAD', 'CAME', 'INTO', 'HIS', 'PRESENCE', 'IN', 'THE', 'EVENING'] +3538-163622-0003-1518: ref=['YES', 'THAT', 'I', 'HAVE', 'SAID', 'THE', 'YOUTH'] +3538-163622-0003-1518: hyp=['YES', 'THAT', 'I', 'HAVE', 'SAID', 'THE', 'YOUTH'] +3538-163622-0004-1519: ref=['HE', 'HAD', 'GONE', 'OUT', 'ONCE', 'TO', 'SEEK', 'A', 'PLACE', 'HE', 'SAID', 'BUT', 'NEVER', 'WOULD', 'HE', 'DO', 'SUCH', 'A', 'THING', 'AGAIN'] +3538-163622-0004-1519: hyp=['HE', 'HAD', 'GONE', 'OUT', 'ONCE', 'TO', 'SEEK', 'A', 'PLACE', 'HE', 'SAID', 'BUT', 'NEVER', 'WOULD', 'HE', 'DO', 'SUCH', 'A', 'THING', 'AGAIN'] +3538-163622-0005-1520: ref=['THEN', 'THE', 'KING', 'PROMISED', 'HIM', 'THE', 'SAME', 'PUNISHMENT', 'AND', 'THE', 'SAME', 'REWARD', 'THAT', 'HE', 'HAD', 'PROMISED', 'HIS', 'BROTHER'] +3538-163622-0005-1520: hyp=['THE', 'MACKING', 'PROMISED', 'HIM', 'THE', 'SAME', 'PUNISHMENT', 'AND', 'THE', 'SAME', 'REWARD', 'THAT', 'HE', 'HAD', 'PROMISED', 'HIS', 'BROTHER'] +3538-163622-0006-1521: ref=['WHEN', 'HE', 'HAD', 'RUN', 'AFTER', 'THE', 'FOALS', 'FOR', 'A', 'LONG', 'LONG', 'TIME', 'AND', 'WAS', 'HOT', 'AND', 'TIRED', 'HE', 'PASSED', 'BY', 'A', 'CLEFT', 'IN', 'THE', 'ROCK', 'WHERE', 'AN', 'OLD', 'WOMAN', 'WAS', 'SITTING', 'SPINNING', 'WITH', 'A', 'DISTAFF', 'AND', 'SHE', 'CALLED', 'TO', 'HIM'] +3538-163622-0006-1521: hyp=['WHEN', 'HE', 'HAD', 'RUN', 'AFTER', 'THE', 'FOOLS', 'FOR', 'A', 'LONG', 'LONG', 'TIME', 'AND', 'WAS', 'HOT', 'AND', 'TIRED', 'HE', 'PASSED', 'BY', 'CLIFF', 'IN', 'THE', 'ROCK', 'WHERE', 'AN', 'OLD', 'WOMAN', 'WAS', 'SITTING', 'SPINNING', 'WITH', 'A', 'DISTAFF', 'AND', 'SHE', 'CALLED', 'TO', 'HIM'] +3538-163622-0007-1522: ref=['COME', 'HITHER', 'COME', 'HITHER', 'MY', 'HANDSOME', 'SON', 'AND', 'LET', 'ME', 'COMB', 'YOUR', 'HAIR'] +3538-163622-0007-1522: hyp=['COMMANDER', 'COME', 'HITHER', 'MY', 'HANDSOME', 'SON', 'AND', 'LET', 'ME', 'COMB', 'YOUR', 'HAIR'] +3538-163622-0008-1523: ref=['THE', 'YOUTH', 'LIKED', 'THE', 'THOUGHT', 'OF', 'THIS', 'LET', 'THE', 'FOALS', 'RUN', 'WHERE', 'THEY', 'CHOSE', 'AND', 'SEATED', 'HIMSELF', 'IN', 'THE', 'CLEFT', 'OF', 'THE', 'ROCK', 'BY', 'THE', 'SIDE', 'OF', 'THE', 'OLD', 'HAG'] +3538-163622-0008-1523: hyp=['THE', 'YOUTH', 'LIKED', 'THE', 'THOUGHT', 'OF', 'THIS', 'LET', 'THE', 'FOLDS', 'WARM', 'WHERE', 'THEY', 'CHOSE', 'AND', 'SEATED', 'HIMSELF', 'IN', 'THE', 'CLEFT', 'OF', 'THE', 'ROCK', 'BY', 'THE', 'SIDE', 'OF', 'THE', 'OLD', 'HAG'] +3538-163622-0009-1524: ref=['SO', 'THERE', 'HE', 'SAT', 'WITH', 'HIS', 'HEAD', 'ON', 'HER', 'LAP', 'TAKING', 'HIS', 'EASE', 'THE', 'LIVELONG', 'DAY'] +3538-163622-0009-1524: hyp=['SO', 'THERE', 'HE', 'SAT', 'WITH', 'HIS', 'HEAD', 'ON', 'HER', 'LAP', 'TAKING', 'HIS', 'EASE', 'THE', 'LIVELONG', 'DAY'] +3538-163622-0010-1525: ref=['ON', 'THE', 'THIRD', 'DAY', 'CINDERLAD', 'WANTED', 'TO', 'SET', 'OUT'] +3538-163622-0010-1525: hyp=['ON', 'THE', 'THIRD', 'DAY', 'SAID', 'THE', 'LAD', 'WANTED', 'TO', 'SET', 'OUT'] +3538-163622-0011-1526: ref=['THE', 'TWO', 'BROTHERS', 'LAUGHED', 'AT', 'HIM', 'AND', 'HIS', 'FATHER', 'AND', 'MOTHER', 'BEGGED', 'HIM', 'NOT', 'TO', 'GO', 'BUT', 'ALL', 'TO', 'NO', 'PURPOSE', 'AND', 'CINDERLAD', 'SET', 'OUT', 'ON', 'HIS', 'WAY'] +3538-163622-0011-1526: hyp=['THE', 'TWO', 'BROTHERS', 'LAUGHED', 'AT', 'HIM', 'AND', 'HIS', 'FATHER', 'AND', 'MOTHER', 'BEGGED', 'HIM', 'NOT', 'TO', 'GO', 'BUT', 'ALL', 'TO', 'NO', 'PURPOSE', 'AND', 'CINDERLAD', 'SET', 'OUT', 'ON', 'HIS', 'WAY'] +3538-163622-0012-1527: ref=['I', 'AM', 'WALKING', 'ABOUT', 'IN', 'SEARCH', 'OF', 'A', 'PLACE', 'SAID', 'CINDERLAD'] +3538-163622-0012-1527: hyp=['I', 'AM', 'WALKING', 'ABOUT', 'IN', 'SEARCH', 'OF', 'A', 'PLACE', 'SAID', 'SAINTO', 'LAD'] +3538-163622-0013-1528: ref=['I', 'WOULD', 'MUCH', 'RATHER', 'HAVE', 'THE', 'PRINCESS', 'SAID', 'CINDERLAD'] +3538-163622-0013-1528: hyp=['I', 'WOULD', 'MUCH', 'RATHER', 'HAVE', 'THE', 'PRINCESS', 'SAID', 'CINDER', 'LAD'] +3538-163622-0014-1529: ref=['AND', 'THUS', 'THEY', 'JOURNEYED', 'ONWARDS', 'A', 'LONG', 'LONG', 'WAY'] +3538-163622-0014-1529: hyp=['AND', 'THUS', 'THEY', 'JOURNEYED', 'ONWARDS', 'A', 'LONG', 'LONG', 'WAY'] +3538-163622-0015-1530: ref=['WHEN', 'THEY', 'HAD', 'GONE', 'THUS', 'FOR', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FOAL', 'AGAIN', 'ASKED', 'DOST', 'THOU', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0015-1530: hyp=['WHEN', 'THEY', 'HAD', 'GONE', 'THUS', 'FOR', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FULL', 'AGAIN', 'ASKED', 'DOST', 'THOU', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0016-1531: ref=['YES', 'NOW', 'I', 'SEE', 'SOMETHING', 'THAT', 'IS', 'WHITE', 'SAID', 'CINDERLAD'] +3538-163622-0016-1531: hyp=['YES', 'NOW', 'I', 'SEE', 'SOMETHING', 'THAT', 'IS', 'WHITE', 'SAID', 'CINDER', 'LAD'] +3538-163622-0017-1532: ref=['IT', 'LOOKS', 'LIKE', 'THE', 'TRUNK', 'OF', 'A', 'GREAT', 'THICK', 'BIRCH', 'TREE'] +3538-163622-0017-1532: hyp=['IT', 'LOOKS', 'LIKE', 'THE', 'TRUNK', 'OF', 'A', 'GREAT', 'THICK', 'BIRCH', 'TREE'] +3538-163622-0018-1533: ref=['CINDERLAD', 'TRIED', 'BUT', 'COULD', 'NOT', 'DO', 'IT', 'SO', 'HE', 'HAD', 'TO', 'TAKE', 'A', 'DRAUGHT', 'FROM', 'THE', 'PITCHER', 'AND', 'THEN', 'ONE', 'MORE', 'AND', 'AFTER', 'THAT', 'STILL', 'ANOTHER', 'AND', 'THEN', 'HE', 'WAS', 'ABLE', 'TO', 'WIELD', 'THE', 'SWORD', 'WITH', 'PERFECT', 'EASE'] +3538-163622-0018-1533: hyp=['SOONER', 'LAD', 'TRIED', 'BUT', 'COULD', 'NOT', 'DO', 'IT', 'SO', 'HE', 'HAD', 'TO', 'TAKE', 'A', 'DROP', 'FROM', 'THE', 'PITCHER', 'AND', 'THEN', 'ONE', 'MORE', 'AND', 'AFTER', 'THAT', 'STILL', 'ANOTHER', 'AND', 'THEN', 'HE', 'WAS', 'ABLE', 'TO', 'WHEEL', 'THE', 'SWORD', 'WITH', 'PERFECT', 'EASE'] +3538-163622-0019-1534: ref=['FOR', 'WE', 'ARE', 'BROTHERS', 'OF', 'THE', 'PRINCESS', 'WHOM', 'THOU', 'ART', 'TO', 'HAVE', 'WHEN', 'THOU', 'CANST', 'TELL', 'THE', 'KING', 'WHAT', 'WE', 'EAT', 'AND', 'DRINK', 'BUT', 'THERE', 'IS', 'A', 'MIGHTY', 'TROLL', 'WHO', 'HAS', 'CAST', 'A', 'SPELL', 'OVER', 'US'] +3538-163622-0019-1534: hyp=['FOR', 'WE', 'ARE', 'BROTHERS', 'OF', 'THE', 'PRINCESS', 'WHOM', 'THOU', 'ART', 'TO', 'HAVE', 'WHEN', 'THOU', 'CANST', 'TELL', 'THE', 'KING', 'WHAT', 'WE', 'EAT', 'AND', 'DRINK', 'BUT', 'THERE', 'IS', 'A', 'MIGHTY', 'TROLL', 'WHO', 'IS', 'CAST', 'A', 'SPELL', 'OVER', 'US'] +3538-163622-0020-1535: ref=['WHEN', 'THEY', 'HAD', 'TRAVELLED', 'A', 'LONG', 'LONG', 'WAY', 'THE', 'FOAL', 'SAID', 'DOST', 'THOU', 'SEE', 'ANYTHING'] +3538-163622-0020-1535: hyp=['WHEN', 'THEY', 'HAD', 'TRAVELLED', 'ALONG', 'A', 'LONG', 'WAY', 'THE', 'FOAL', 'SAID', 'DOST', 'THOU', 'SEE', 'ANYTHING'] +3538-163622-0021-1536: ref=['AND', 'NOW', 'INQUIRED', 'THE', 'FOAL', 'SEEST', 'THOU', 'NOTHING', 'NOW'] +3538-163622-0021-1536: hyp=['AND', 'NOW', 'INQUIRED', 'THE', 'FULL', 'CEASE', 'DONE', 'NOTHING', 'NOW'] +3538-163622-0022-1537: ref=['NOW', 'THEN', 'SAID', 'THE', 'FOAL', 'DOST', 'THOU', 'NOT', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0022-1537: hyp=['NOW', 'THEN', 'SAID', 'THE', 'FOOL', 'DOST', 'THOU', 'NOT', 'SEE', 'ANYTHING', 'NOW'] +3538-163622-0023-1538: ref=['THAT', 'IS', 'A', 'RIVER', 'SAID', 'THE', 'FOAL', 'AND', 'WE', 'HAVE', 'TO', 'CROSS', 'IT'] +3538-163622-0023-1538: hyp=['THAT', 'IS', 'A', 'RIVER', 'SAID', 'THE', 'FOAL', 'AND', 'WE', 'HAVE', 'TO', 'CROSS', 'IT'] +3538-163622-0024-1539: ref=['I', 'HAVE', 'DONE', 'MY', 'BEST', 'REPLIED', 'CINDERLAD'] +3538-163622-0024-1539: hyp=['I', 'HAVE', 'DONE', 'MY', 'BEST', 'REPLIED', 'SIR', 'LAD'] +3538-163624-0000-1540: ref=['ONCE', 'UPON', 'A', 'TIME', 'THERE', 'WAS', 'A', 'KING', 'IN', 'THE', 'NORTH', 'WHO', 'HAD', 'WON', 'MANY', 'WARS', 'BUT', 'NOW', 'HE', 'WAS', 'OLD'] +3538-163624-0000-1540: hyp=['ONCE', 'UPON', 'A', 'TIME', 'THERE', 'WAS', 'A', 'KING', 'IN', 'THE', 'NORTH', 'WHO', 'HAD', 'WON', 'MANY', 'WARS', 'BUT', 'NOW', 'HE', 'WAS', 'OLD'] +3538-163624-0001-1541: ref=['THE', 'OLD', 'KING', 'WENT', 'OUT', 'AND', 'FOUGHT', 'BRAVELY', 'BUT', 'AT', 'LAST', 'HIS', 'SWORD', 'BROKE', 'AND', 'HE', 'WAS', 'WOUNDED', 'AND', 'HIS', 'MEN', 'FLED'] +3538-163624-0001-1541: hyp=['THE', 'OLD', 'KING', 'WENT', 'OUT', 'AND', 'THOUGHT', 'BRAVELY', 'BUT', 'AT', 'LAST', 'HIS', 'SWORD', 'BROKE', 'AND', 'HE', 'WAS', 'WOUNDED', 'AND', 'HIS', 'MEN', 'FLED'] +3538-163624-0002-1542: ref=['BUT', 'IN', 'THE', 'NIGHT', 'WHEN', 'THE', 'BATTLE', 'WAS', 'OVER', 'HIS', 'YOUNG', 'WIFE', 'CAME', 'OUT', 'AND', 'SEARCHED', 'FOR', 'HIM', 'AMONG', 'THE', 'SLAIN', 'AND', 'AT', 'LAST', 'SHE', 'FOUND', 'HIM', 'AND', 'ASKED', 'WHETHER', 'HE', 'MIGHT', 'BE', 'HEALED'] +3538-163624-0002-1542: hyp=['BUT', 'IN', 'THE', 'NIGHT', 'WHEN', 'THE', 'BATTLE', 'WAS', 'OVER', 'HIS', 'YOUNG', 'WIFE', 'CAME', 'OUT', 'AND', 'SEARCHED', 'FOR', 'HIM', 'AMONG', 'THE', 'SLAIN', 'AND', 'AT', 'LAST', 'SHE', 'FOUND', 'HIM', 'AND', 'ASKED', 'WHETHER', 'HE', 'MIGHT', 'BE', 'HEALED'] +3538-163624-0003-1543: ref=['SO', 'HE', 'ASKED', 'THE', 'QUEEN', 'HOW', 'DO', 'YOU', 'KNOW', 'IN', 'THE', 'DARK', 'OF', 'NIGHT', 'WHETHER', 'THE', 'HOURS', 'ARE', 'WEARING', 'TO', 'THE', 'MORNING', 'AND', 'SHE', 'SAID'] +3538-163624-0003-1543: hyp=['SO', 'YES', 'THE', 'QUEEN', 'HOW', 'DO', 'YOU', 'KNOW', 'IN', 'THE', 'DARK', 'OF', 'NIGHT', 'WHETHER', 'THE', 'HOURS', 'ARE', 'WEARING', 'TO', 'THE', 'MORNING', 'AND', 'SHE', 'SAID'] +3538-163624-0004-1544: ref=['THEN', 'THE', 'OLD', 'MAN', 'SAID', 'DRIVE', 'ALL', 'THE', 'HORSES', 'INTO', 'THE', 'RIVER', 'AND', 'CHOOSE', 'THE', 'ONE', 'THAT', 'SWIMS', 'ACROSS'] +3538-163624-0004-1544: hyp=['THEN', 'THE', 'OLD', 'MAN', 'SAID', 'DRIVE', 'ALL', 'THE', 'HORSES', 'INTO', 'THE', 'RIVER', 'AND', 'CHOOSE', 'THE', 'ONE', 'THAT', 'SWIMS', 'ACROSS'] +3538-163624-0005-1545: ref=['HE', 'IS', 'NO', 'BIGGER', 'THAN', 'OTHER', 'DRAGONS', 'SAID', 'THE', 'TUTOR', 'AND', 'IF', 'YOU', 'WERE', 'AS', 'BRAVE', 'AS', 'YOUR', 'FATHER', 'YOU', 'WOULD', 'NOT', 'FEAR', 'HIM'] +3538-163624-0005-1545: hyp=['HE', 'HAS', 'NO', 'BIGGER', 'THAN', 'OTHER', 'DRAGONS', 'SAID', 'THE', 'TUTOR', 'AND', 'IF', 'YOU', 'WERE', 'AS', 'BRAVE', 'AS', 'YOUR', 'FATHER', 'YOU', 'WOULD', 'NOT', 'FEAR', 'HIM'] +3538-163624-0006-1546: ref=['THEN', 'THE', 'PERSON', 'WHO', 'HAD', 'KILLED', 'OTTER', 'WENT', 'DOWN', 'AND', 'CAUGHT', 'THE', 'DWARF', 'WHO', 'OWNED', 'ALL', 'THE', 'TREASURE', 'AND', 'TOOK', 'IT', 'FROM', 'HIM'] +3538-163624-0006-1546: hyp=['THEN', 'THE', 'PERSON', 'WHO', 'HAD', 'KILLED', 'OTTER', 'WENT', 'DOWN', 'AND', 'CAUGHT', 'THE', 'DWARF', 'WHO', 'OWNED', 'ALL', 'THE', 'TREASURE', 'AND', 'TOOK', 'IT', 'FROM', 'HIM'] +3538-163624-0007-1547: ref=['ONLY', 'ONE', 'RING', 'WAS', 'LEFT', 'WHICH', 'THE', 'DWARF', 'WORE', 'AND', 'EVEN', 'THAT', 'WAS', 'TAKEN', 'FROM', 'HIM'] +3538-163624-0007-1547: hyp=['ONLY', 'ONE', 'RING', 'WAS', 'LEFT', 'WHICH', 'THE', 'DWARF', 'WORE', 'AND', 'EVEN', 'THAT', 'WAS', 'TAKEN', 'FROM', 'HIM'] +3538-163624-0008-1548: ref=['SO', 'REGIN', 'MADE', 'A', 'SWORD', 'AND', 'SIGURD', 'TRIED', 'IT', 'WITH', 'A', 'BLOW', 'ON', 'A', 'LUMP', 'OF', 'IRON', 'AND', 'THE', 'SWORD', 'BROKE'] +3538-163624-0008-1548: hyp=['SO', 'RIGAN', 'MADE', 'A', 'SWORD', 'AND', 'CIGAR', 'TRIED', 'IT', 'WITH', 'A', 'BLOW', 'AND', 'A', 'LUMP', 'OF', 'IRON', 'AND', 'THE', 'SWORD', 'BROKE'] +3538-163624-0009-1549: ref=['THEN', 'SIGURD', 'WENT', 'TO', 'HIS', 'MOTHER', 'AND', 'ASKED', 'FOR', 'THE', 'BROKEN', 'PIECES', 'OF', 'HIS', "FATHER'S", 'BLADE', 'AND', 'GAVE', 'THEM', 'TO', 'REGIN'] +3538-163624-0009-1549: hyp=['THEN', 'CIGAR', 'WENT', 'TO', 'HIS', 'MOTHER', 'AND', 'ASKED', 'FOR', 'THE', 'BROKEN', 'PIECES', 'OF', 'HIS', "FATHER'S", 'BLADE', 'AND', 'GAVE', 'THEM', 'TO', 'REGAN'] +3538-163624-0010-1550: ref=['SO', 'SIGURD', 'SAID', 'THAT', 'SWORD', 'WOULD', 'DO'] +3538-163624-0010-1550: hyp=['SO', 'CIGARS', 'SAID', 'THAT', 'SWORD', 'WOULD', 'DO'] +3538-163624-0011-1551: ref=['THEN', 'HE', 'SAW', 'THE', 'TRACK', 'WHICH', 'THE', 'DRAGON', 'MADE', 'WHEN', 'HE', 'WENT', 'TO', 'A', 'CLIFF', 'TO', 'DRINK', 'AND', 'THE', 'TRACK', 'WAS', 'AS', 'IF', 'A', 'GREAT', 'RIVER', 'HAD', 'ROLLED', 'ALONG', 'AND', 'LEFT', 'A', 'DEEP', 'VALLEY'] +3538-163624-0011-1551: hyp=['THEN', 'HE', 'SAW', 'THE', 'TRACK', 'WHICH', 'THE', 'DRAGON', 'HAD', 'MADE', 'WHEN', 'HE', 'WENT', 'TO', 'A', 'CLIFF', 'TO', 'DRINK', 'AND', 'THE', 'TRACK', 'WAS', 'AS', 'IF', 'A', 'GREAT', 'RIVER', 'HAD', 'ROLLED', 'ALONG', 'AND', 'LEFT', 'A', 'DEEP', 'VALLEY'] +3538-163624-0012-1552: ref=['BUT', 'SIGURD', 'WAITED', 'TILL', 'HALF', 'OF', 'HIM', 'HAD', 'CRAWLED', 'OVER', 'THE', 'PIT', 'AND', 'THEN', 'HE', 'THRUST', 'THE', 'SWORD', 'GRAM', 'RIGHT', 'INTO', 'HIS', 'VERY', 'HEART'] +3538-163624-0012-1552: hyp=['BUT', 'CIGARET', 'WAITED', 'TILL', 'HALF', 'OF', 'HIM', 'HAD', 'CRAWLED', 'OVER', 'THE', 'PIT', 'AND', 'THEN', 'HE', 'THRUST', 'THE', 'SWORD', 'GRAHAM', 'RIGHT', 'INTO', 'HIS', 'VERY', 'HEART'] +3538-163624-0013-1553: ref=['SIGURD', 'SAID', 'I', 'WOULD', 'TOUCH', 'NONE', 'OF', 'IT', 'IF', 'BY', 'LOSING', 'IT', 'I', 'SHOULD', 'NEVER', 'DIE'] +3538-163624-0013-1553: hyp=['CIGAR', 'SAID', 'I', 'WOULD', 'TOUCH', 'NONE', 'OF', 'IT', 'IF', 'BY', 'LOSING', 'IT', 'I', 'SHOULD', 'NEVER', 'DIE'] +3538-163624-0014-1554: ref=['BUT', 'ALL', 'MEN', 'DIE', 'AND', 'NO', 'BRAVE', 'MAN', 'LETS', 'DEATH', 'FRIGHTEN', 'HIM', 'FROM', 'HIS', 'DESIRE'] +3538-163624-0014-1554: hyp=['BUT', 'ALL', 'MEN', 'DIE', 'AND', 'KNOW', 'BRAVE', 'MAN', 'LETS', 'DEATH', 'FRIGHTEN', 'HIM', 'FROM', 'HIS', 'DESIRE'] +3538-163624-0015-1555: ref=['DIE', 'THOU', 'FAFNIR', 'AND', 'THEN', 'FAFNIR', 'DIED'] +3538-163624-0015-1555: hyp=['GUY', 'THOU', 'FAFNER', 'AND', 'THEN', 'STAFF', 'DIED'] +3538-163624-0016-1556: ref=['THEN', 'SIGURD', 'RODE', 'BACK', 'AND', 'MET', 'REGIN', 'AND', 'REGIN', 'ASKED', 'HIM', 'TO', 'ROAST', "FAFNIR'S", 'HEART', 'AND', 'LET', 'HIM', 'TASTE', 'OF', 'IT'] +3538-163624-0016-1556: hyp=['THEN', 'CIGAR', 'RODE', 'BACK', 'AND', 'MET', 'RIGAN', 'AND', 'RIGAN', 'ASKED', 'HIM', 'TO', 'ROAST', "FAFNER'S", 'HEART', 'AND', 'LET', 'HIM', 'TASTE', 'OF', 'IT'] +3538-163624-0017-1557: ref=['SO', 'SIGURD', 'PUT', 'THE', 'HEART', 'OF', 'FAFNIR', 'ON', 'A', 'STAKE', 'AND', 'ROASTED', 'IT'] +3538-163624-0017-1557: hyp=['SO', 'SIR', 'GOD', 'PUT', 'THE', 'HEART', 'OF', 'FAFNER', 'ON', 'A', 'STAKE', 'AND', 'ROASTED', 'IT'] +3538-163624-0018-1558: ref=['THERE', 'IS', 'SIGURD', 'ROASTING', "FAFNIR'S", 'HEART', 'FOR', 'ANOTHER', 'WHEN', 'HE', 'SHOULD', 'TASTE', 'OF', 'IT', 'HIMSELF', 'AND', 'LEARN', 'ALL', 'WISDOM'] +3538-163624-0018-1558: hyp=['THERE', 'IS', 'CIGAR', 'ROASTING', "FASTENER'S", 'HEART', 'FOR', 'ANOTHER', 'WHEN', 'HE', 'SHOULD', 'TASTE', 'OF', 'IT', 'HIMSELF', 'AND', 'LEARN', 'ALL', 'WISDOM'] +3538-163624-0019-1559: ref=['THAT', 'LET', 'HIM', 'DO', 'AND', 'THEN', 'RIDE', 'OVER', 'HINDFELL', 'TO', 'THE', 'PLACE', 'WHERE', 'BRYNHILD', 'SLEEPS'] +3538-163624-0019-1559: hyp=['THAT', 'LET', 'HIM', 'DO', 'THEN', 'RIDE', 'OVER', 'HINFIELD', 'TO', 'THE', 'PLACE', 'WHERE', 'BURNHILD', 'SLEEPS'] +3538-163624-0020-1560: ref=['THERE', 'MUST', 'SHE', 'SLEEP', 'TILL', 'THOU', 'COMEST', 'FOR', 'HER', 'WAKING', 'RISE', 'UP', 'AND', 'RIDE', 'FOR', 'NOW', 'SURE', 'SHE', 'WILL', 'SWEAR', 'THE', 'VOW', 'FEARLESS', 'OF', 'BREAKING'] +3538-163624-0020-1560: hyp=['THERE', 'MUST', 'SHE', 'SLEEP', 'TILL', 'THOU', 'COMES', 'FOR', 'HER', 'WAKING', 'WHO', 'RISE', 'UP', 'AND', 'RIDE', 'FOR', 'NOW', 'SURE', 'SHE', 'WILL', 'SWEAR', 'THE', 'VOW', 'FEARLESS', 'OF', 'BREAKING'] +3538-163624-0021-1561: ref=['THEN', 'HE', 'TOOK', 'THE', 'HELMET', 'OFF', 'THE', 'HEAD', 'OF', 'THE', 'SLEEPER', 'AND', 'BEHOLD', 'SHE', 'WAS', 'A', 'MOST', 'BEAUTIFUL', 'LADY'] +3538-163624-0021-1561: hyp=['THEN', 'HE', 'TOOK', 'THE', 'HELMET', 'OFF', 'THE', 'HEAD', 'OF', 'THE', 'SLEEPER', 'AND', 'BEHOLD', 'SHE', 'WAS', 'A', 'MOST', 'BEAUTIFUL', 'LADY'] +3538-163624-0022-1562: ref=['THEN', 'SIGURD', 'RODE', 'AWAY', 'AND', 'HE', 'CAME', 'TO', 'THE', 'HOUSE', 'OF', 'A', 'KING', 'WHO', 'HAD', 'A', 'FAIR', 'DAUGHTER'] +3538-163624-0022-1562: hyp=['THEN', 'CIGAR', 'RODE', 'AWAY', 'AND', 'HE', 'CAME', 'TO', 'THE', 'HOUSE', 'OF', 'A', 'KING', 'WHO', 'HAD', 'A', 'FAIR', 'DAUGHTER'] +3538-163624-0023-1563: ref=['THEN', "BRYNHILD'S", 'FATHER', 'TOLD', 'GUNNAR', 'THAT', 'SHE', 'WOULD', 'MARRY', 'NONE', 'BUT', 'HIM', 'WHO', 'COULD', 'RIDE', 'THE', 'FLAME', 'IN', 'FRONT', 'OF', 'HER', 'ENCHANTED', 'TOWER', 'AND', 'THITHER', 'THEY', 'RODE', 'AND', 'GUNNAR', 'SET', 'HIS', 'HORSE', 'AT', 'THE', 'FLAME', 'BUT', 'HE', 'WOULD', 'NOT', 'FACE', 'IT'] +3538-163624-0023-1563: hyp=['WHEN', "BRUNHOLD'S", 'FATHER', 'TOLD', 'GUNNER', 'THAT', 'SHE', 'WOULD', 'MARRY', 'NONE', 'BUT', 'HIM', 'WHO', 'COULD', 'RIDE', 'THE', 'FLAME', 'IN', 'FRONT', 'OF', 'HER', 'ENCHANTED', 'TOWER', 'AND', 'THITHER', 'THEY', 'RODE', 'AND', 'GUNNER', 'SET', 'HIS', 'HORSE', 'TO', 'THE', 'FLAME', 'BUT', 'HE', 'WOULD', 'NOT', 'FACE', 'IT'] +3538-163624-0024-1564: ref=['FOR', 'ONE', 'DAY', 'WHEN', 'BRYNHILD', 'AND', 'GUDRUN', 'WERE', 'BATHING', 'BRYNHILD', 'WADED', 'FARTHEST', 'OUT', 'INTO', 'THE', 'RIVER', 'AND', 'SAID', 'SHE', 'DID', 'THAT', 'TO', 'SHOW', 'SHE', 'WAS', "GUIRUN'S", 'SUPERIOR'] +3538-163624-0024-1564: hyp=['FOR', 'ONE', 'DAY', 'WHEN', 'BURNEHELD', 'AND', 'GUNDRAIN', 'WERE', 'BATHING', 'BURNEHELD', 'WAITED', 'FARTHEST', 'SOUTH', 'INTO', 'THE', 'RIVER', 'AND', 'SAID', 'SHE', 'DID', 'THAT', 'TO', 'SHOW', 'SHE', 'WAS', 'GUNDERING', 'SUPERIOR'] +3538-163624-0025-1565: ref=['FOR', 'HER', 'HUSBAND', 'SHE', 'SAID', 'HAD', 'RIDDEN', 'THROUGH', 'THE', 'FLAME', 'WHEN', 'NO', 'OTHER', 'MAN', 'DARED', 'FACE', 'IT'] +3538-163624-0025-1565: hyp=['FOR', 'HER', 'HUSBAND', 'SHE', 'SAID', 'HAD', 'RIDDEN', 'THROUGH', 'THE', 'FLAME', 'WHEN', 'NO', 'OTHER', 'MAN', 'DARED', 'FACE', 'IT'] +3538-163624-0026-1566: ref=['NOT', 'LONG', 'TO', 'WAIT', 'HE', 'SAID', 'TILL', 'THE', 'BITTER', 'SWORD', 'STANDS', 'FAST', 'IN', 'MY', 'HEART', 'AND', 'THOU', 'WILL', 'NOT', 'LIVE', 'LONG', 'WHEN', 'I', 'AM', 'DEAD'] +3538-163624-0026-1566: hyp=['NOT', 'LONG', 'TO', 'WAIT', 'HE', 'SAID', 'TILL', 'THE', 'BITTER', 'SWORD', 'STANDS', 'FAST', 'IN', 'MY', 'HEART', 'AND', 'THOU', 'WILT', 'NOT', 'LIVE', 'LONG', 'WHEN', 'I', 'AM', 'DEAD'] +367-130732-0000-1466: ref=['LOBSTERS', 'AND', 'LOBSTERS'] +367-130732-0000-1466: hyp=['LOBSTERS', 'AND', 'LOBSTERS'] +367-130732-0001-1467: ref=['WHEN', 'IS', 'A', 'LOBSTER', 'NOT', 'A', 'LOBSTER', 'WHEN', 'IT', 'IS', 'A', 'CRAYFISH'] +367-130732-0001-1467: hyp=['WHEN', 'AS', 'A', 'LOBSTER', 'NOT', 'A', 'LOBSTER', 'WHEN', 'IT', 'IS', 'A', 'CRAYFISH'] +367-130732-0002-1468: ref=['THIS', 'QUESTION', 'AND', 'ANSWER', 'MIGHT', 'WELL', 'GO', 'INTO', 'THE', 'PRIMER', 'OF', 'INFORMATION', 'FOR', 'THOSE', 'WHO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'FROM', 'THE', 'EAST', 'FOR', 'WHAT', 'IS', 'CALLED', 'A', 'LOBSTER', 'IN', 'SAN', 'FRANCISCO', 'IS', 'NOT', 'A', 'LOBSTER', 'AT', 'ALL', 'BUT', 'A', 'CRAYFISH'] +367-130732-0002-1468: hyp=['THIS', 'QUESTION', 'AND', 'ANSWER', 'MIGHT', 'WELL', 'GO', 'INTO', 'THE', 'PRIMARY', 'OF', 'INFORMATION', 'FOR', 'THOSE', 'WHO', 'COME', 'THE', 'SAN', 'FRANCISCO', 'FROM', 'THE', 'EAST', 'FOR', 'WHAT', 'IS', 'CALLED', 'A', 'LOBSTER', 'IN', 'SAN', 'FRANCISCO', 'IS', 'NOT', 'A', 'LOBSTER', 'AT', 'ALL', 'BUT', 'A', 'CRAYFISH'] +367-130732-0003-1469: ref=['THE', 'PACIFIC', 'CRAYFISH', 'HOWEVER', 'SERVES', 'EVERY', 'PURPOSE', 'AND', 'WHILE', 'MANY', 'CONTEND', 'THAT', 'ITS', 'MEAT', 'IS', 'NOT', 'SO', 'DELICATE', 'IN', 'FLAVOR', 'AS', 'THAT', 'OF', 'ITS', 'EASTERN', 'COUSIN', 'THE', 'CALIFORNIAN', 'WILL', 'AS', 'STRENUOUSLY', 'INSIST', 'THAT', 'IT', 'IS', 'BETTER', 'BUT', 'OF', 'COURSE', 'SOMETHING', 'MUST', 'ALWAYS', 'BE', 'ALLOWED', 'FOR', 'THE', 'PATRIOTISM', 'OF', 'THE', 'CALIFORNIAN'] +367-130732-0003-1469: hyp=['THE', 'PACIFIC', 'CRAYFISH', 'HOURSERVES', 'EVERY', 'PURPOSE', 'AND', 'WHILE', 'MANY', 'CONTENDED', 'ITS', 'MEAT', 'IS', 'NOT', 'SO', 'DELICATE', 'AND', 'FLAVORITE', 'AS', 'THAT', 'OF', 'ITS', 'EASTERN', 'COUSIN', 'THE', 'CALIFORNIA', 'WILL', 'AS', 'STRENUOUSLY', 'INSIST', 'THAT', 'IT', 'IS', 'BETTER', 'BUT', 'OF', 'COURSE', 'SOMETHING', 'MUST', 'ALWAYS', 'BE', 'ALLOWED', 'FOR', 'THE', 'PATRIOTISM', 'OF', 'THE', 'CALIFORNIA'] +367-130732-0004-1470: ref=['A', 'BOOK', 'COULD', 'BE', 'WRITTEN', 'ABOUT', 'THIS', 'RESTAURANT', 'AND', 'THEN', 'ALL', 'WOULD', 'NOT', 'BE', 'TOLD', 'FOR', 'ALL', 'ITS', 'SECRETS', 'CAN', 'NEVER', 'BE', 'KNOWN'] +367-130732-0004-1470: hyp=['A', 'BOOK', 'COULD', 'BE', 'WRITTEN', 'ABOUT', 'THIS', 'RESTAURANT', 'AND', 'THEN', 'ALL', 'WOULD', 'NOT', 'BE', 'TOLD', 'FOR', 'ALL', 'ITS', 'SECRETS', 'CAN', 'NEVER', 'BE', 'KNOWN'] +367-130732-0005-1471: ref=['IT', 'WAS', 'HERE', 'THAT', 'MOST', 'MAGNIFICENT', 'DINNERS', 'WERE', 'ARRANGED', 'IT', 'WAS', 'HERE', 'THAT', 'EXTRAORDINARY', 'DISHES', 'WERE', 'CONCOCTED', 'BY', 'CHEFS', 'OF', 'WORLD', 'WIDE', 'FAME', 'IT', 'WAS', 'HERE', 'THAT', 'LOBSTER', 'A', 'LA', 'NEWBERG', 'REACHED', 'ITS', 'HIGHEST', 'PERFECTION', 'AND', 'THIS', 'IS', 'THE', 'RECIPE', 'THAT', 'WAS', 'FOLLOWED', 'WHEN', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', 'DELMONICO'] +367-130732-0005-1471: hyp=['IT', 'WAS', 'HERE', 'THAT', 'MOST', 'MAGNIFICENT', 'DINNERS', 'WERE', 'ARRANGED', 'IT', 'WAS', 'HERE', 'THAT', 'EXTRAORDINARY', 'DISHES', 'WERE', 'CONCOCTED', 'BY', 'CHEFS', 'OF', 'WOOLWRIGHT', 'FAME', 'IT', 'WAS', 'HERE', 'THAT', 'LOBSTER', 'ALAD', 'NEWBURG', 'REACHED', 'ITS', 'HIGHEST', 'PERFECTION', 'AND', 'THIS', 'IS', 'THE', 'RECIPE', 'THAT', 'WAS', 'FOLLOWED', 'WHEN', 'HE', 'WAS', 'PREPARED', 'IN', 'THE', 'DOMONICO'] +367-130732-0006-1472: ref=['LOBSTER', 'A', 'LA', 'NEWBERG'] +367-130732-0006-1472: hyp=['LOBSTER', 'OLY', 'NEWBURG'] +367-130732-0007-1473: ref=['ONE', 'POUND', 'OF', 'LOBSTER', 'MEAT', 'ONE', 'TEASPOONFUL', 'OF', 'BUTTER', 'ONE', 'HALF', 'PINT', 'OF', 'CREAM', 'YOLKS', 'OF', 'FOUR', 'EGGS', 'ONE', 'WINE', 'GLASS', 'OF', 'SHERRY', 'LOBSTER', 'FAT'] +367-130732-0007-1473: hyp=['ONE', 'POUND', 'OF', 'LOBS', 'TO', 'ME', 'ONE', 'TEASPOONFUL', 'OF', 'BUTTER', 'ONE', 'HALF', 'PINT', 'OF', 'CREAM', 'YOLKS', 'OF', 'FOUR', 'EGGS', 'ONE', 'WINE', 'GLASS', 'OF', 'SHERRY', 'LOBSTER', 'FAT'] +367-130732-0008-1474: ref=['PUT', 'THIS', 'IN', 'A', 'DOUBLE', 'BOILER', 'AND', 'LET', 'COOK', 'UNTIL', 'THICK', 'STIRRING', 'CONSTANTLY'] +367-130732-0008-1474: hyp=['PUT', 'THIS', 'IN', 'A', 'DOUBLE', 'BOILER', 'AND', 'LET', 'COOK', 'UNTIL', 'THICK', 'STIRRING', 'CONSTANTLY'] +367-130732-0009-1475: ref=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'THIN', 'SLICES', 'OF', 'DRY', 'TOAST'] +367-130732-0009-1475: hyp=['SERVE', 'IN', 'A', 'CHIEFING', 'DISH', 'WITH', 'FLIND', 'SLICES', 'OF', 'DRY', 'TOAST'] +367-130732-0010-1476: ref=['KING', 'OF', 'SHELL', 'FISH'] +367-130732-0010-1476: hyp=['KING', 'OF', 'SHELLFISH'] +367-130732-0011-1477: ref=['ONE', 'HAS', 'TO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'TO', 'PARTAKE', 'OF', 'THE', 'KING', 'OF', 'SHELL', 'FISH', 'THE', 'MAMMOTH', 'PACIFIC', 'CRAB'] +367-130732-0011-1477: hyp=['ONE', 'HAS', 'TO', 'COME', 'TO', 'SAN', 'FRANCISCO', 'TO', 'PARTAKE', 'OF', 'THE', 'KING', 'OF', 'SHELLFISH', 'THE', 'MAMMOTH', 'PACIFIC', 'CRAB'] +367-130732-0012-1478: ref=['I', 'SAY', 'COME', 'TO', 'SAN', 'FRANCISCO', 'ADVISEDLY', 'FOR', 'WHILE', 'THE', 'CRAB', 'IS', 'FOUND', 'ALL', 'ALONG', 'THE', 'COAST', 'IT', 'IS', 'PREPARED', 'NOWHERE', 'SO', 'DELICIOUSLY', 'AS', 'IN', 'SAN', 'FRANCISCO'] +367-130732-0012-1478: hyp=['I', 'SAY', 'COME', 'TO', 'SAN', 'FRANCISCO', 'ADVISEDLY', 'FOR', 'WHILE', 'THE', 'CRAB', 'IS', 'FOUND', 'ALL', 'ALONG', 'THE', 'COAST', 'IT', 'IS', 'PREPARED', 'NOWHERE', 'SO', 'DELICIOUSLY', 'AS', 'IN', 'SAMPANCISCO'] +367-130732-0013-1479: ref=["GOBEY'S", 'PASSED', 'WITH', 'THE', 'FIRE', 'AND', 'THE', 'LITTLE', 'RESTAURANT', 'BEARING', 'HIS', 'NAME', 'AND', 'IN', 'CHARGE', 'OF', 'HIS', 'WIDOW', 'IN', 'UNION', 'SQUARE', 'AVENUE', 'HAS', 'NOT', 'ATTAINED', 'THE', 'FAME', 'OF', 'THE', 'OLD', 'PLACE'] +367-130732-0013-1479: hyp=['GOBYS', 'PASS', 'WITH', 'THE', 'FIRE', 'AND', 'THE', 'LITTLE', 'RESTAURANT', 'BEARING', 'HIS', 'NAME', 'IN', 'CHARGE', 'OF', 'HIS', 'WIDOW', 'AND', 'UNION', 'SQUARE', 'AVENUE', 'HAS', 'NOT', 'ATTAINED', 'THE', 'FAME', 'OF', 'THE', 'OLD', 'PLACE'] +367-130732-0014-1480: ref=['IT', 'IS', 'POSSIBLE', 'THAT', 'SHE', 'KNOWS', 'THE', 'SECRET', 'OF', 'PREPARING', 'CRAB', 'AS', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', "GOBEY'S", 'OF', 'BEFORE', 'THE', 'FIRE', 'BUT', 'HIS', 'PRESTIGE', 'DID', 'NOT', 'DESCEND', 'TO', 'HER'] +367-130732-0014-1480: hyp=['IT', 'IS', 'POSSIBLE', 'THAT', 'SHE', 'KNOWS', 'THE', 'SECRET', 'OF', 'PREPARING', 'CRAB', 'AS', 'IT', 'WAS', 'PREPARED', 'IN', 'THE', 'GOBIES', 'OF', 'BEFORE', 'THE', 'FIRE', 'BUT', 'HIS', 'PRESAGE', 'DID', 'NOT', 'DESCEND', 'TO', 'HER'] +367-130732-0015-1481: ref=["GOBEY'S", 'CRAB', 'STEW'] +367-130732-0015-1481: hyp=["GOBY'S", 'CRABS', 'DO'] +367-130732-0016-1482: ref=['TAKE', 'THE', 'MEAT', 'OF', 'ONE', 'LARGE', 'CRAB', 'SCRAPING', 'OUT', 'ALL', 'OF', 'THE', 'FAT', 'FROM', 'THE', 'SHELL'] +367-130732-0016-1482: hyp=['TAKE', 'THE', 'MEAT', 'OF', 'ONE', 'LARGE', 'CRAB', 'SCRAPING', 'OUT', 'ALL', 'THE', 'BAT', 'FROM', 'THE', 'SHELL'] +367-130732-0017-1483: ref=['SOAK', 'THE', 'CRAB', 'MEAT', 'IN', 'THE', 'SHERRY', 'TWO', 'HOURS', 'BEFORE', 'COOKING'] +367-130732-0017-1483: hyp=['SOAK', 'THE', 'CRAB', 'MEAT', 'IN', 'THE', 'SHERRY', 'TWO', 'HOURS', 'BEFORE', 'COOKING'] +367-130732-0018-1484: ref=['CHOP', 'FINE', 'THE', 'ONION', 'SWEET', 'PEPPER', 'AND', 'TOMATO', 'WITH', 'THE', 'ROSEMARY'] +367-130732-0018-1484: hyp=['CHOP', 'FINE', 'THE', 'ONION', 'SWEET', 'PEPPER', 'AND', 'TOMATO', 'WITH', 'THE', 'ROSEMARY'] +367-130732-0019-1485: ref=['HEAT', 'THIS', 'IN', 'A', 'STEWPAN', 'AND', 'WHEN', 'SIMMERING', 'ADD', 'THE', 'SHERRY', 'AND', 'CRAB', 'MEAT', 'AND', 'LET', 'ALL', 'COOK', 'TOGETHER', 'WITH', 'A', 'SLOW', 'FIRE', 'FOR', 'EIGHT', 'MINUTES'] +367-130732-0019-1485: hyp=['HEAT', 'THIS', 'IN', 'A', 'STEWPANT', 'AND', 'WENT', 'SIMMERING', 'AT', 'THE', 'SHERRY', 'AND', 'CRAB', 'ME', 'AND', 'LET', 'ALL', 'COOK', 'TOGETHER', 'WITH', 'A', 'SLOW', 'FIRE', 'FOR', 'EIGHT', 'MINUTES'] +367-130732-0020-1486: ref=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'TOASTED', 'CRACKERS', 'OR', 'THIN', 'SLICES', 'OF', 'TOASTED', 'BREAD'] +367-130732-0020-1486: hyp=['SERVE', 'IN', 'A', 'CHAFING', 'DISH', 'WITH', 'TOASTED', 'CRACKERS', 'OR', 'THIN', 'SLICES', 'OF', 'TOASTED', 'BREAD'] +367-130732-0021-1487: ref=['LOBSTER', 'IN', 'MINIATURE'] +367-130732-0021-1487: hyp=['LOBSTER', 'IN', 'MINIATURE'] +367-130732-0022-1488: ref=['SO', 'FAR', 'IT', 'HAS', 'BEEN', 'USED', 'MOSTLY', 'FOR', 'GARNISHMENT', 'OF', 'OTHER', 'DISHES', 'AND', 'IT', 'IS', 'ONLY', 'RECENTLY', 'THAT', 'THE', 'HOF', 'BRAU', 'HAS', 'BEEN', 'MAKING', 'A', 'SPECIALTY', 'OF', 'THEM'] +367-130732-0022-1488: hyp=['SO', 'FAR', 'IT', 'HAS', 'BEEN', 'USED', 'MOSTLY', 'FOR', 'GARNISHMENT', 'OF', 'OTHER', 'DISHES', 'AND', 'IT', 'IS', 'ONLY', 'RECENTLY', 'THAT', 'THE', 'WHOLE', 'BROW', 'HAS', 'BEEN', 'MAKING', 'ESPECIALTY', 'OF', 'THEM'] +367-130732-0023-1489: ref=['ALL', 'OF', 'THE', 'BETTER', 'CLASS', 'RESTAURANTS', 'HOWEVER', 'WILL', 'SERVE', 'THEM', 'IF', 'YOU', 'ORDER', 'THEM'] +367-130732-0023-1489: hyp=['ALL', 'THE', 'BETTER', 'CLASS', 'RESTAURANTS', 'HOWEVER', 'WILL', 'SERVE', 'THEM', 'IF', 'YOU', 'ORDER', 'THEM'] +367-130732-0024-1490: ref=['THIS', 'IS', 'THE', 'RECIPE', 'FOR', 'EIGHT', 'PEOPLE', 'AND', 'IT', 'IS', 'WELL', 'WORTH', 'TRYING', 'IF', 'YOU', 'ARE', 'GIVING', 'A', 'DINNER', 'OF', 'IMPORTANCE'] +367-130732-0024-1490: hyp=['THIS', 'IS', 'THE', 'RECIPE', 'FOR', 'EIGHT', 'PEOPLE', 'AND', 'IT', 'IS', 'WELL', 'IT', 'WORTH', 'TRYING', 'IF', 'YOU', 'ARE', 'GIVING', 'A', 'DINNER', 'OF', 'IMPORTANCE'] +367-130732-0025-1491: ref=['BISQUE', 'OF', 'CRAWFISH'] +367-130732-0025-1491: hyp=['THIS', 'OF', 'CROFISH'] +367-130732-0026-1492: ref=['TAKE', 'THIRTY', 'CRAWFISH', 'FROM', 'WHICH', 'REMOVE', 'THE', 'GUT', 'CONTAINING', 'THE', 'GALL', 'IN', 'THE', 'FOLLOWING', 'MANNER', 'TAKE', 'FIRM', 'HOLD', 'OF', 'THE', 'CRAWFISH', 'WITH', 'THE', 'LEFT', 'HAND', 'SO', 'AS', 'TO', 'AVOID', 'BEING', 'PINCHED', 'BY', 'ITS', 'CLAWS', 'WITH', 'THE', 'THUMB', 'AND', 'FOREFINGER', 'OF', 'THE', 'RIGHT', 'HAND', 'PINCH', 'THE', 'EXTREME', 'END', 'OF', 'THE', 'CENTRAL', 'FIN', 'OF', 'THE', 'TAIL', 'AND', 'WITH', 'A', 'SUDDEN', 'JERK', 'THE', 'GUT', 'WILL', 'BE', 'WITHDRAWN'] +367-130732-0026-1492: hyp=['TAKE', 'THIRTY', 'CROPFISH', 'FROM', 'WHICH', 'REMOVE', 'THE', 'GUT', 'CONTAINING', 'THE', 'GALL', 'IN', 'THE', 'FOLLOWING', 'MANNER', 'TAKE', 'FIRM', 'HOLD', 'OF', 'THE', 'CRAWFISH', 'WITH', 'THE', 'LEFT', 'HAND', 'SO', 'AS', 'TO', 'AVOID', 'BEING', 'PINCHED', 'BY', 'ITS', 'CLOTHS', 'WITH', 'THE', 'THUMB', 'AND', 'FOREFINGER', 'OF', 'THE', 'RIGHT', 'HAND', 'PINCH', 'THE', 'EXTREME', 'END', 'OF', 'THE', 'CENTRAL', 'FIN', 'OF', 'THE', 'TAIL', 'AND', 'WITH', 'A', 'SUDDEN', 'JERK', 'THE', 'GUT', 'WILL', 'BE', 'WITHDRAWN'] +367-130732-0027-1493: ref=['MINCE', 'OR', 'CUT', 'INTO', 'SMALL', 'DICE', 'A', 'CARROT', 'AN', 'ONION', 'ONE', 'HEAD', 'OF', 'CELERY', 'AND', 'A', 'FEW', 'PARSLEY', 'ROOTS', 'AND', 'TO', 'THESE', 'ADD', 'A', 'BAY', 'LEAF', 'A', 'SPRIG', 'OF', 'THYME', 'A', 'LITTLE', 'MINIONETTE', 'PEPPER', 'AND', 'TWO', 'OUNCES', 'OF', 'BUTTER'] +367-130732-0027-1493: hyp=['MINCE', 'ARE', 'CUT', 'INTO', 'SMALL', 'DICE', 'A', 'CARROT', 'AND', 'ONION', 'ONE', 'HEAD', 'OF', 'CELERY', 'AND', 'A', 'FEW', 'PARSLEY', 'ROOTS', 'AND', 'TO', 'THESE', 'AT', 'A', 'BAY', 'LEAF', 'A', 'SPRIG', 'OF', 'THYME', 'A', 'LITTLE', 'MEAN', 'ON', 'IT', 'PEPPER', 'AND', 'TWO', 'OZ', 'OF', 'BUTTER'] +367-130732-0028-1494: ref=['PUT', 'THESE', 'INGREDIENTS', 'INTO', 'A', 'STEWPAN', 'AND', 'FRY', 'THEM', 'TEN', 'MINUTES', 'THEN', 'THROW', 'IN', 'THE', 'CRAWFISH', 'AND', 'POUR', 'ON', 'THEM', 'HALF', 'A', 'BOTTLE', 'OF', 'FRENCH', 'WHITE', 'WINE'] +367-130732-0028-1494: hyp=['PUT', 'THESE', 'INGREDIENTS', 'INTO', 'A', 'STEWPAN', 'AND', 'FRY', 'THEM', 'TEN', 'MINUTES', 'THEN', 'THROW', 'IN', 'THE', 'CROPPISH', 'AND', 'POUR', 'ON', 'THEM', 'HALF', 'A', 'BOTTLE', 'OF', 'FRENCH', 'WHITE', 'WINE'] +367-130732-0029-1495: ref=['ALLOW', 'THIS', 'TO', 'BOIL', 'AND', 'THEN', 'ADD', 'A', 'QUART', 'OF', 'STRONG', 'CONSOMME', 'AND', 'LET', 'ALL', 'CONTINUE', 'BOILING', 'FOR', 'HALF', 'AN', 'HOUR'] +367-130732-0029-1495: hyp=['ALLOW', 'US', 'TO', 'BOIL', 'AND', 'THEN', 'ADD', 'A', 'QUART', 'OF', 'STRONG', 'CONSUM', 'AND', 'LET', 'ALL', 'CONTINUE', 'BOILING', 'FOR', 'HALF', 'AN', 'HOUR'] +367-130732-0030-1496: ref=['PICK', 'OUT', 'THE', 'CRAWFISH', 'AND', 'STRAIN', 'THE', 'BROTH', 'THROUGH', 'A', 'NAPKIN', 'BY', 'PRESSURE', 'INTO', 'A', 'BASIN', 'IN', 'ORDER', 'TO', 'EXTRACT', 'ALL', 'THE', 'ESSENCE', 'FROM', 'THE', 'VEGETABLES'] +367-130732-0030-1496: hyp=['PICK', 'OUT', 'THE', 'CRAW', 'FISH', 'AND', 'STRAIN', 'THE', 'BROTH', 'THROUGH', 'A', 'NAPKIN', 'BY', 'PRESSURE', 'INTO', 'A', 'BASIN', 'IN', 'ORDER', 'TO', 'EXTRACT', 'ALL', 'THE', 'ESSENCE', 'FROM', 'THE', 'VEGETABLES'] +367-130732-0031-1497: ref=['PICK', 'THE', 'SHELLS', 'OFF', 'TWENTY', 'FIVE', 'OF', 'THE', 'CRAWFISH', 'TAILS', 'TRIM', 'THEM', 'NEATLY', 'AND', 'SET', 'THEM', 'ASIDE', 'UNTIL', 'WANTED'] +367-130732-0031-1497: hyp=['PICK', 'THE', 'SHELLS', 'OF', 'TWENTY', 'FIVE', 'OF', 'THE', 'CROFISH', 'TAILS', 'TRIM', 'THEM', 'NEATLY', 'AND', 'SET', 'THEM', 'ASIDE', 'UNTIL', 'WANTED'] +367-130732-0032-1498: ref=['RESERVE', 'SOME', 'OF', 'THE', 'SPAWN', 'ALSO', 'HALF', 'OF', 'THE', 'BODY', 'SHELLS', 'WITH', 'WHICH', 'TO', 'MAKE', 'THE', 'CRAWFISH', 'BUTTER', 'TO', 'FINISH', 'THE', 'SOUP'] +367-130732-0032-1498: hyp=['RESERVE', 'SOME', 'OF', 'THE', 'SPAWN', 'ALSO', 'HAPPENED', 'THE', 'BODY', 'SHELLS', 'WITH', 'WHICH', 'TO', 'MAKE', 'THE', 'CRAWFISH', 'BUTTER', 'TO', 'FINISH', 'THE', 'SOUP'] +367-130732-0033-1499: ref=['THIS', 'BUTTER', 'IS', 'MADE', 'AS', 'FOLLOWS', 'PLACE', 'THE', 'SHELLS', 'ON', 'A', 'BAKING', 'SHEET', 'IN', 'THE', 'OVEN', 'TO', 'DRY', 'LET', 'THE', 'SHELLS', 'COOL', 'AND', 'THEN', 'POUND', 'THEM', 'IN', 'A', 'MORTAR', 'WITH', 'A', 'LITTLE', 'LOBSTER', 'CORAL', 'AND', 'FOUR', 'OUNCES', 'OF', 'FRESH', 'BUTTER', 'THOROUGHLY', 'BRUISING', 'THE', 'WHOLE', 'TOGETHER', 'SO', 'AS', 'TO', 'MAKE', 'A', 'FINE', 'PASTE'] +367-130732-0033-1499: hyp=['THIS', 'BUTTER', 'IS', 'MADE', 'AS', 'FOLLOWS', 'PLACE', 'THE', 'SHELLS', 'IN', 'A', 'BAKING', 'SHEET', 'IN', 'THE', 'OVEN', 'TO', 'DRY', 'LET', 'THE', 'SHELLS', 'COOL', 'AND', 'THEN', 'POUND', 'THEM', 'IN', 'A', 'MORTAR', 'WITH', 'A', 'LITTLE', 'LOBSTER', 'COAL', 'AND', 'FOUR', 'OUNCES', 'OF', 'FRESH', 'BUTTER', 'THOROUGHLY', 'BRUISING', 'THE', 'WHOLE', 'TOGETHER', 'SO', 'AS', 'TO', 'MAKE', 'A', 'FINE', 'PASTE'] +367-293981-0000-1445: ref=['I', 'SWEAR', 'IT', 'ANSWERED', 'SANCHO'] +367-293981-0000-1445: hyp=['I', 'SWEAR', 'ANSWERED', 'SANCHO'] +367-293981-0001-1446: ref=['I', 'SAY', 'SO', 'CONTINUED', 'DON', 'QUIXOTE', 'BECAUSE', 'I', 'HATE', 'TAKING', 'AWAY', "ANYONE'S", 'GOOD', 'NAME'] +367-293981-0001-1446: hyp=['I', 'SAY', 'SO', 'CONTINUED', 'DON', 'QUIXOTE', 'BECAUSE', 'I', 'HATE', 'TAKING', 'AWAY', 'ANY', "ONE'S", 'GOOD', 'NAME'] +367-293981-0002-1447: ref=['I', 'SAY', 'REPLIED', 'SANCHO', 'THAT', 'I', 'SWEAR', 'TO', 'HOLD', 'MY', 'TONGUE', 'ABOUT', 'IT', 'TILL', 'THE', 'END', 'OF', 'YOUR', "WORSHIP'S", 'DAYS', 'AND', 'GOD', 'GRANT', 'I', 'MAY', 'BE', 'ABLE', 'TO', 'LET', 'IT', 'OUT', 'TOMORROW'] +367-293981-0002-1447: hyp=['I', 'SAY', 'REPLIED', 'SANCHO', 'THAT', 'I', 'SWEAR', 'TO', 'HOLD', 'MY', 'TONGUE', 'ABOUT', 'IT', 'TILL', 'THE', 'END', 'OF', 'YOUR', 'WORSHIP', 'STAYS', 'AND', 'GONE', 'GRANT', 'I', 'MAY', 'BE', 'ABLE', 'TO', 'LET', 'IT', 'OUT', 'TO', 'MORROW'] +367-293981-0003-1448: ref=['THOUGH', 'YOUR', 'WORSHIP', 'WAS', 'NOT', 'SO', 'BADLY', 'OFF', 'HAVING', 'IN', 'YOUR', 'ARMS', 'THAT', 'INCOMPARABLE', 'BEAUTY', 'YOU', 'SPOKE', 'OF', 'BUT', 'I', 'WHAT', 'DID', 'I', 'HAVE', 'EXCEPT', 'THE', 'HEAVIEST', 'WHACKS', 'I', 'THINK', 'I', 'HAD', 'IN', 'ALL', 'MY', 'LIFE'] +367-293981-0003-1448: hyp=['THOUGH', 'YOUR', 'WORSHIP', 'WAS', 'NOT', 'SO', 'BADLY', 'OFF', 'HAVING', 'IN', 'YOUR', 'ARMS', 'THE', 'INN', 'COMPARABLE', 'BEAUTY', 'YOU', 'SPOKE', 'OF', 'BUT', 'I', 'WHAT', 'DID', 'I', 'HAVE', 'EXCEPT', 'THE', 'HEAVIEST', 'WAX', 'THAT', 'I', 'THINK', 'I', 'HAD', 'IN', 'ALL', 'MY', 'LIFE'] +367-293981-0004-1449: ref=['UNLUCKY', 'ME', 'AND', 'THE', 'MOTHER', 'THAT', 'BORE', 'ME'] +367-293981-0004-1449: hyp=['UNLUCKY', 'ME', 'INTO', 'THE', 'MOTHER', 'THAT', 'BORE', 'ME'] +367-293981-0005-1450: ref=["DIDN'T", 'I', 'SAY', 'SO', 'WORSE', 'LUCK', 'TO', 'MY', 'LINE', 'SAID', 'SANCHO'] +367-293981-0005-1450: hyp=["DIDN'T", 'I', 'SAY', 'SO', 'WORSE', 'LUCK', 'TO', 'MY', 'LINE', 'SAID', 'SANCHO'] +367-293981-0006-1451: ref=['IT', 'CANNOT', 'BE', 'THE', 'MOOR', 'ANSWERED', 'DON', 'QUIXOTE', 'FOR', 'THOSE', 'UNDER', 'ENCHANTMENT', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'BY', 'ANYONE'] +367-293981-0006-1451: hyp=['IT', 'CANNOT', 'BE', 'THE', 'MORE', 'ANSWERED', 'DON', 'QUIXOTE', 'FOR', 'THOSE', 'UNDER', 'ENCHANTMENT', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'BY', 'ANYONE'] +367-293981-0007-1452: ref=['IF', 'THEY', "DON'T", 'LET', 'THEMSELVES', 'BE', 'SEEN', 'THEY', 'LET', 'THEMSELVES', 'BE', 'FELT', 'SAID', 'SANCHO', 'IF', 'NOT', 'LET', 'MY', 'SHOULDERS', 'SPEAK', 'TO', 'THE', 'POINT'] +367-293981-0007-1452: hyp=['IF', 'THEY', 'DO', 'NOT', 'LET', 'THEMSELVES', 'BE', 'SEEN', 'THEY', 'LET', 'THEMSELVES', 'BE', 'FELT', 'SAID', 'SANCHO', 'IF', 'NOT', 'LET', 'MY', 'SHOULDERS', 'SPEAK', 'TO', 'THE', 'POINT'] +367-293981-0008-1453: ref=['MINE', 'COULD', 'SPEAK', 'TOO', 'SAID', 'DON', 'QUIXOTE', 'BUT', 'THAT', 'IS', 'NOT', 'A', 'SUFFICIENT', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'WHAT', 'WE', 'SEE', 'IS', 'THE', 'ENCHANTED', 'MOOR'] +367-293981-0008-1453: hyp=['MIKE', 'COULD', 'SPEAK', 'TOO', 'SAID', 'DON', 'QUIXOTE', 'BUT', 'THAT', 'IS', 'NOT', 'A', 'SUSPICION', 'OF', 'REASON', 'FOR', 'BELIEVING', 'THAT', 'WHAT', 'WE', 'SEE', 'IS', 'THE', 'ENCHANTED', 'MOOR'] +367-293981-0009-1454: ref=['THE', 'OFFICER', 'TURNED', 'TO', 'HIM', 'AND', 'SAID', 'WELL', 'HOW', 'GOES', 'IT', 'GOOD', 'MAN'] +367-293981-0009-1454: hyp=['THE', 'OFFICERS', 'TURNED', 'TO', 'HIM', 'AND', 'SAID', 'WELL', 'HOW', 'GOES', 'A', 'GOOD', 'MAN'] +367-293981-0010-1455: ref=['SANCHO', 'GOT', 'UP', 'WITH', 'PAIN', 'ENOUGH', 'IN', 'HIS', 'BONES', 'AND', 'WENT', 'AFTER', 'THE', 'INNKEEPER', 'IN', 'THE', 'DARK', 'AND', 'MEETING', 'THE', 'OFFICER', 'WHO', 'WAS', 'LOOKING', 'TO', 'SEE', 'WHAT', 'HAD', 'BECOME', 'OF', 'HIS', 'ENEMY', 'HE', 'SAID', 'TO', 'HIM', 'SENOR', 'WHOEVER', 'YOU', 'ARE', 'DO', 'US', 'THE', 'FAVOUR', 'AND', 'KINDNESS', 'TO', 'GIVE', 'US', 'A', 'LITTLE', 'ROSEMARY', 'OIL', 'SALT', 'AND', 'WINE', 'FOR', 'IT', 'IS', 'WANTED', 'TO', 'CURE', 'ONE', 'OF', 'THE', 'BEST', 'KNIGHTS', 'ERRANT', 'ON', 'EARTH', 'WHO', 'LIES', 'ON', 'YONDER', 'BED', 'WOUNDED', 'BY', 'THE', 'HANDS', 'OF', 'THE', 'ENCHANTED', 'MOOR', 'THAT', 'IS', 'IN', 'THIS', 'INN'] +367-293981-0010-1455: hyp=['SANCHA', 'CUT', 'UP', 'WITH', 'PAIN', 'ENOUGH', 'IN', 'HIS', 'BONES', 'AND', 'WENT', 'AFTER', 'THE', 'INNKEEPER', 'IN', 'THE', 'DARK', 'AND', 'MEETING', 'THE', 'OFFICER', 'WHO', 'WAS', 'LOOKING', 'TO', 'SEE', 'WHAT', 'HAD', 'BECOME', 'OF', 'HIS', 'ENEMY', 'HE', 'SAID', 'TO', 'HIM', 'SENOR', 'WHOEVER', 'YOU', 'ARE', 'DO', 'US', 'TO', 'FAVOR', 'AND', 'KINDNESS', 'TO', 'GIVE', 'US', 'A', 'LITTLE', 'ROSEMARY', 'OIL', 'SALT', 'AND', 'WHITE', 'FOR', 'IT', 'IS', 'WATER', 'TO', 'CURE', 'ONE', 'OF', 'OUR', 'BEST', 'KNIGHTS', 'ERRANT', 'ON', 'EARTH', 'WHO', 'LIES', 'ON', 'YONDER', 'BED', 'WOUNDED', 'BY', 'THE', 'HANDS', 'OF', 'THE', 'ENCHANTED', 'MOOR', 'THAT', 'IS', 'IN', 'THIS', 'INN'] +367-293981-0011-1456: ref=['TO', 'BE', 'BRIEF', 'HE', 'TOOK', 'THE', 'MATERIALS', 'OF', 'WHICH', 'HE', 'MADE', 'A', 'COMPOUND', 'MIXING', 'THEM', 'ALL', 'AND', 'BOILING', 'THEM', 'A', 'GOOD', 'WHILE', 'UNTIL', 'IT', 'SEEMED', 'TO', 'HIM', 'THEY', 'HAD', 'COME', 'TO', 'PERFECTION'] +367-293981-0011-1456: hyp=['TO', 'BE', 'BRIEF', 'HE', 'TOOK', 'THE', 'MATURES', 'OF', 'WHICH', 'HE', 'MADE', 'A', 'COMPOUND', 'MIXING', 'THEM', 'ALL', 'OF', 'BOILING', 'THEM', 'A', 'GOOD', 'WHILE', 'IT', 'UNTIL', 'IT', 'SEEMED', 'TO', 'HIM', 'THEY', 'HAD', 'COME', 'TO', 'PERFECTION'] +367-293981-0012-1457: ref=['SANCHO', 'PANZA', 'WHO', 'ALSO', 'REGARDED', 'THE', 'AMENDMENT', 'OF', 'HIS', 'MASTER', 'AS', 'MIRACULOUS', 'BEGGED', 'HIM', 'TO', 'GIVE', 'HIM', 'WHAT', 'WAS', 'LEFT', 'IN', 'THE', 'PIGSKIN', 'WHICH', 'WAS', 'NO', 'SMALL', 'QUANTITY'] +367-293981-0012-1457: hyp=['SANCHO', 'PANZA', 'WHO', 'ALSO', 'REGARDED', 'THE', 'AMENDMENT', 'OF', 'HIS', 'MASTER', 'AS', 'MIRACULOUS', 'BEGGED', 'HIM', 'TO', 'GIVE', 'HIM', 'WHAT', 'WAS', 'LET', 'AN', 'OPINION', 'WHICH', 'WAS', 'NO', 'SMALL', 'QUANTITY'] +367-293981-0013-1458: ref=['DON', 'QUIXOTE', 'CONSENTED', 'AND', 'HE', 'TAKING', 'IT', 'WITH', 'BOTH', 'HANDS', 'IN', 'GOOD', 'FAITH', 'AND', 'WITH', 'A', 'BETTER', 'WILL', 'GULPED', 'DOWN', 'AND', 'DRAINED', 'OFF', 'VERY', 'LITTLE', 'LESS', 'THAN', 'HIS', 'MASTER'] +367-293981-0013-1458: hyp=['DON', 'QUIXOTE', 'CONSENTED', 'AND', 'HE', 'TAKING', 'IT', 'WITH', 'BOTH', 'HANDS', 'IN', 'GOOD', 'FAITH', 'AND', 'WITH', 'A', 'BETTER', 'WILL', 'GULPED', 'IT', 'DOWN', 'AND', 'DRAINED', 'UP', 'VERY', 'LITTLE', 'LESS', 'THAN', 'HIS', 'MASTER'] +367-293981-0014-1459: ref=['IF', 'YOUR', 'WORSHIP', 'KNEW', 'THAT', 'RETURNED', 'SANCHO', 'WOE', 'BETIDE', 'ME', 'AND', 'ALL', 'MY', 'KINDRED', 'WHY', 'DID', 'YOU', 'LET', 'ME', 'TASTE', 'IT'] +367-293981-0014-1459: hyp=['IF', 'YOUR', 'WORSHIP', 'KNEW', 'THAT', 'RETURNED', 'SANCHO', "WON'T", 'BETIDE', 'ME', 'IN', 'ALL', 'MY', 'KINDRED', 'WHY', 'DID', 'YOU', 'LET', 'ME', 'TASTE', 'IT'] +367-293981-0015-1460: ref=['SEARCH', 'YOUR', 'MEMORY', 'AND', 'IF', 'YOU', 'FIND', 'ANYTHING', 'OF', 'THIS', 'KIND', 'YOU', 'NEED', 'ONLY', 'TELL', 'ME', 'OF', 'IT', 'AND', 'I', 'PROMISE', 'YOU', 'BY', 'THE', 'ORDER', 'OF', 'KNIGHTHOOD', 'WHICH', 'I', 'HAVE', 'RECEIVED', 'TO', 'PROCURE', 'YOU', 'SATISFACTION', 'AND', 'REPARATION', 'TO', 'THE', 'UTMOST', 'OF', 'YOUR', 'DESIRE'] +367-293981-0015-1460: hyp=['SEARCH', 'YOUR', 'MEMORY', 'AND', 'IF', 'YOU', 'FIND', 'ANYTHING', 'OF', 'THIS', 'KIND', 'YOU', 'NEED', 'ONLY', 'TELL', 'ME', 'OF', 'IT', 'AND', 'I', 'PROMISE', 'YOU', 'BY', 'THE', 'ORDER', 'OF', 'KNIGHTHOOD', 'WHICH', 'I', 'HAVE', 'RECEIVED', 'TO', 'PROCURE', 'YOU', 'SATISFACTION', 'IN', 'REPARATION', 'TO', 'THE', 'UTMOST', 'OF', 'YOUR', 'DESIRE'] +367-293981-0016-1461: ref=['THEN', 'THIS', 'IS', 'AN', 'INN', 'SAID', 'DON', 'QUIXOTE'] +367-293981-0016-1461: hyp=['THEN', 'THIS', 'IS', 'AN', 'INN', 'SAID', 'DON', 'QUIXOTE'] +367-293981-0017-1462: ref=['AND', 'A', 'VERY', 'RESPECTABLE', 'ONE', 'SAID', 'THE', 'INNKEEPER'] +367-293981-0017-1462: hyp=['IN', 'A', 'VERY', 'RESPECTABLE', 'ONE', 'SAID', 'THE', 'INNKEEPER'] +367-293981-0018-1463: ref=['THE', 'CRIES', 'OF', 'THE', 'POOR', 'BLANKETED', 'WRETCH', 'WERE', 'SO', 'LOUD', 'THAT', 'THEY', 'REACHED', 'THE', 'EARS', 'OF', 'HIS', 'MASTER', 'WHO', 'HALTING', 'TO', 'LISTEN', 'ATTENTIVELY', 'WAS', 'PERSUADED', 'THAT', 'SOME', 'NEW', 'ADVENTURE', 'WAS', 'COMING', 'UNTIL', 'HE', 'CLEARLY', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'HIS', 'SQUIRE', 'WHO', 'UTTERED', 'THEM'] +367-293981-0018-1463: hyp=['THE', 'CRIES', 'OF', 'THE', 'POOR', 'BLANKET', 'WRETCH', 'WERE', 'SO', 'LOUD', 'THAT', 'THEY', 'REACHED', 'THE', 'EARS', 'OF', 'HIS', 'MASTER', 'WHO', 'HALTING', 'TO', 'LISTEN', 'ATTENTIVELY', 'WAS', 'PERSUADED', 'THAT', 'SOME', 'NEW', 'ADVENTURE', 'WAS', 'COMING', 'UNTIL', 'HE', 'CLEARLY', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'THE', 'SQUIRE', 'WHO', 'UTTERED', 'THEM'] +367-293981-0019-1464: ref=['HE', 'SAW', 'HIM', 'RISING', 'AND', 'FALLING', 'IN', 'THE', 'AIR', 'WITH', 'SUCH', 'GRACE', 'AND', 'NIMBLENESS', 'THAT', 'HAD', 'HIS', 'RAGE', 'ALLOWED', 'HIM', 'IT', 'IS', 'MY', 'BELIEF', 'HE', 'WOULD', 'HAVE', 'LAUGHED'] +367-293981-0019-1464: hyp=['HE', 'SAW', 'HIM', 'RISING', 'AND', 'FALLING', 'IN', 'THE', 'AIR', 'WITH', 'SUCH', 'GRACE', 'AND', 'NIMBLENESS', 'THAT', 'HAD', 'HIS', 'RAGE', 'ALLOWED', 'HIM', 'IT', 'IS', 'MY', 'BELIEF', 'HE', 'WOULD', 'HAVE', 'LAUGHED'] +367-293981-0020-1465: ref=['SANCHO', 'TOOK', 'IT', 'AND', 'AS', 'HE', 'WAS', 'RAISING', 'IT', 'TO', 'HIS', 'MOUTH', 'HE', 'WAS', 'STOPPED', 'BY', 'THE', 'CRIES', 'OF', 'HIS', 'MASTER', 'EXCLAIMING', 'SANCHO', 'MY', 'SON', 'DRINK', 'NOT', 'WATER', 'DRINK', 'IT', 'NOT', 'MY', 'SON', 'FOR', 'IT', 'WILL', 'KILL', 'THEE', 'SEE', 'HERE', 'I', 'HAVE', 'THE', 'BLESSED', 'BALSAM', 'AND', 'HE', 'HELD', 'UP', 'THE', 'FLASK', 'OF', 'LIQUOR', 'AND', 'WITH', 'DRINKING', 'TWO', 'DROPS', 'OF', 'IT', 'THOU', 'WILT', 'CERTAINLY', 'BE', 'RESTORED'] +367-293981-0020-1465: hyp=['SANCHO', 'TOOK', 'IT', 'AND', 'AS', 'HE', 'WAS', 'RAISING', 'IT', 'TO', 'HIS', 'MOUTH', 'HE', 'WAS', 'STOPPED', 'BY', 'THE', 'CRIES', 'OF', 'HIS', 'MASTER', 'EXCLAIMING', 'SANCHO', 'MY', 'SON', 'DRINK', 'NOT', 'WATER', 'DRINKIN', 'UP', 'MY', 'SON', 'FOR', 'IT', 'WILL', 'KILL', 'THEE', 'SEE', 'HERE', 'I', 'HAVE', 'THE', 'BLESSED', 'BALSAM', 'AND', 'HE', 'HELD', 'UP', 'THE', 'FLASK', 'OF', 'LIQUOR', 'AND', 'WITH', 'DRINKING', 'TWO', 'DROPS', 'WHAT', 'THOU', 'WILT', 'CERTAINLY', 'BE', 'RESTORED'] +3764-168670-0000-1666: ref=['THE', 'STRIDES', 'OF', 'A', 'LAME', 'MAN', 'ARE', 'LIKE', 'THE', 'OGLING', 'GLANCES', 'OF', 'A', 'ONE', 'EYED', 'MAN', 'THEY', 'DO', 'NOT', 'REACH', 'THEIR', 'GOAL', 'VERY', 'PROMPTLY'] +3764-168670-0000-1666: hyp=['THE', 'STRIDES', 'OF', 'A', 'LAME', 'MAN', 'ARE', 'LIKE', 'THE', 'OGLING', 'GLANCES', 'OF', 'A', 'ONE', 'EYED', 'MAN', 'THEY', 'DO', 'NOT', 'REACH', 'THEIR', 'GOAL', 'VERY', 'PROMPTLY'] +3764-168670-0001-1667: ref=['COSETTE', 'HAD', 'WAKED', 'UP'] +3764-168670-0001-1667: hyp=['COSETTE', 'HAD', 'WAKED', 'UP'] +3764-168670-0002-1668: ref=['JEAN', 'VALJEAN', 'HAD', 'PLACED', 'HER', 'NEAR', 'THE', 'FIRE'] +3764-168670-0002-1668: hyp=['JEAN', 'VALJEAN', 'HAD', 'PLACED', 'HER', 'NEAR', 'THE', 'FIRE'] +3764-168670-0003-1669: ref=['YOU', 'WILL', 'WAIT', 'FOR', 'ME', 'AT', 'A', "LADY'S", 'HOUSE', 'I', 'SHALL', 'COME', 'TO', 'FETCH', 'YOU'] +3764-168670-0003-1669: hyp=['YOU', 'WILL', 'WAIT', 'FOR', 'ME', 'AT', 'A', "LADY'S", 'HOUSE', 'I', 'SHALL', 'COME', 'TO', 'FETCH', 'YOU'] +3764-168670-0004-1670: ref=['EVERYTHING', 'IS', 'ARRANGED', 'AND', 'NOTHING', 'IS', 'SAID', 'FAUCHELEVENT'] +3764-168670-0004-1670: hyp=['EVERYTHING', 'IS', 'RANGED', 'AND', 'NOTHING', 'IS', 'SAID', 'FAUCHELEVENT'] +3764-168670-0005-1671: ref=['I', 'HAVE', 'PERMISSION', 'TO', 'BRING', 'YOU', 'IN', 'BUT', 'BEFORE', 'BRINGING', 'YOU', 'IN', 'YOU', 'MUST', 'BE', 'GOT', 'OUT'] +3764-168670-0005-1671: hyp=['I', 'HAVE', 'PERMISSION', 'TO', 'BRING', 'YOU', 'IN', 'BUT', 'BEFORE', 'BRINGING', 'YOU', 'IN', 'YOU', 'MUST', 'BE', 'GOT', 'OUT'] +3764-168670-0006-1672: ref=["THAT'S", 'WHERE', 'THE', 'DIFFICULTY', 'LIES'] +3764-168670-0006-1672: hyp=["THAT'S", 'WHERE', 'THE', 'DIFFICULTY', 'LIES'] +3764-168670-0007-1673: ref=['IT', 'IS', 'EASY', 'ENOUGH', 'WITH', 'THE', 'CHILD', 'YOU', 'WILL', 'CARRY', 'HER', 'OUT'] +3764-168670-0007-1673: hyp=['IT', 'IS', 'EASY', 'ENOUGH', 'WITH', 'THE', 'CHILD', 'YOU', 'WILL', 'CARRY', 'HER', 'OUT'] +3764-168670-0008-1674: ref=['AND', 'SHE', 'WILL', 'HOLD', 'HER', 'TONGUE', 'I', 'ANSWER', 'FOR', 'THAT'] +3764-168670-0008-1674: hyp=['AND', 'SHE', 'WILL', 'HOLD', 'HER', 'TONGUE', 'I', 'ANSWER', 'FOR', 'THAT'] +3764-168670-0009-1675: ref=['FAUCHELEVENT', 'GRUMBLED', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'JEAN', 'VALJEAN'] +3764-168670-0009-1675: hyp=['FAUCHELEVENT', 'GRUMBLED', 'MORE', 'TO', 'HIMSELF', 'THAN', 'TO', 'JEAN', 'VALJEAN'] +3764-168670-0010-1676: ref=['YOU', 'UNDERSTAND', 'FATHER', 'MADELEINE', 'THE', 'GOVERNMENT', 'WILL', 'NOTICE', 'IT'] +3764-168670-0010-1676: hyp=['YOU', 'UNDERSTAND', 'FATHER', 'MADELEINE', 'THE', 'GOVERNMENT', 'WILL', 'NOTICE', 'IT'] +3764-168670-0011-1677: ref=['JEAN', 'VALJEAN', 'STARED', 'HIM', 'STRAIGHT', 'IN', 'THE', 'EYE', 'AND', 'THOUGHT', 'THAT', 'HE', 'WAS', 'RAVING'] +3764-168670-0011-1677: hyp=['JEAN', 'VALJEAN', 'STARED', 'HIM', 'STRAIGHT', 'IN', 'THE', 'EYE', 'AND', 'THOUGHT', 'THAT', 'HE', 'WAS', 'RAVING'] +3764-168670-0012-1678: ref=['FAUCHELEVENT', 'WENT', 'ON'] +3764-168670-0012-1678: hyp=['FAUCHELEVENT', 'WENT', 'ON'] +3764-168670-0013-1679: ref=['IT', 'IS', 'TO', 'MORROW', 'THAT', 'I', 'AM', 'TO', 'BRING', 'YOU', 'IN', 'THE', 'PRIORESS', 'EXPECTS', 'YOU'] +3764-168670-0013-1679: hyp=['IT', 'IS', 'TO', 'MORROW', 'THAT', 'I', 'AM', 'TO', 'BRING', 'YOU', 'IN', 'THE', 'PRIORS', 'EXPECTS', 'YOU'] +3764-168670-0014-1680: ref=['THEN', 'HE', 'EXPLAINED', 'TO', 'JEAN', 'VALJEAN', 'THAT', 'THIS', 'WAS', 'HIS', 'RECOMPENSE', 'FOR', 'A', 'SERVICE', 'WHICH', 'HE', 'FAUCHELEVENT', 'WAS', 'TO', 'RENDER', 'TO', 'THE', 'COMMUNITY'] +3764-168670-0014-1680: hyp=['THEN', 'HE', 'EXPLAINED', 'TO', 'JEAN', 'VALJEAN', 'THAT', 'THIS', 'WAS', 'HIS', 'RECOMPENSE', 'FOR', 'A', 'SERVICE', 'WHICH', 'HE', 'FOR', 'CHAUVELIN', 'WAS', 'TO', 'RENDER', 'TO', 'THE', 'COMMUNITY'] +3764-168670-0015-1681: ref=['THAT', 'THE', 'NUN', 'WHO', 'HAD', 'DIED', 'THAT', 'MORNING', 'HAD', 'REQUESTED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'COFFIN', 'WHICH', 'HAD', 'SERVED', 'HER', 'FOR', 'A', 'BED', 'AND', 'INTERRED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL'] +3764-168670-0015-1681: hyp=['THAT', 'THE', 'NUN', 'WHO', 'HAD', 'DIED', 'THAT', 'MORNING', 'HAD', 'REQUESTED', 'TO', 'BE', 'BURIED', 'IN', 'THE', 'COFFIN', 'WHICH', 'HAD', 'SERVED', 'HER', 'FOR', 'A', 'BED', 'AND', 'INTERRED', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CHAPEL'] +3764-168670-0016-1682: ref=['THAT', 'THE', 'PRIORESS', 'AND', 'THE', 'VOCAL', 'MOTHERS', 'INTENDED', 'TO', 'FULFIL', 'THE', 'WISH', 'OF', 'THE', 'DECEASED'] +3764-168670-0016-1682: hyp=['THAT', 'THE', 'PRIOR', 'REST', 'AND', 'THE', 'VOCAL', 'MOTHERS', 'INTENDED', 'TO', 'FULFIL', 'THE', 'WISH', 'OF', 'THE', 'DECEASED'] +3764-168670-0017-1683: ref=['THAT', 'HE', 'FAUCHELEVENT', 'WAS', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN', 'IN', 'THE', 'CELL', 'RAISE', 'THE', 'STONE', 'IN', 'THE', 'CHAPEL', 'AND', 'LOWER', 'THE', 'CORPSE', 'INTO', 'THE', 'VAULT'] +3764-168670-0017-1683: hyp=['THAT', 'HE', 'FOR', 'SCHLEVENT', 'WAS', 'TO', 'NAIL', 'UP', 'THE', 'COFFIN', 'IN', 'THE', 'CELL', 'RAISED', 'THE', 'STONE', 'IN', 'THE', 'CHAPEL', 'AND', 'BLOW', 'THE', 'CORPSE', 'INTO', 'THE', 'VAULT'] +3764-168670-0018-1684: ref=['AND', 'THEN', 'THAT', 'THERE', 'WAS', 'ANOTHER', 'THE', 'EMPTY', 'COFFIN'] +3764-168670-0018-1684: hyp=['AND', 'THEN', 'THAT', 'THERE', 'WAS', 'ANOTHER', 'THE', 'EMPTY', 'COFFIN'] +3764-168670-0019-1685: ref=['WHAT', 'IS', 'THAT', 'EMPTY', 'COFFIN'] +3764-168670-0019-1685: hyp=['WHAT', 'IS', 'THAT', 'EMPTY', 'COFFIN'] +3764-168670-0020-1686: ref=['ASKED', 'JEAN', 'VALJEAN', 'FAUCHELEVENT', 'REPLIED'] +3764-168670-0020-1686: hyp=['ASKED', 'JEAN', 'VALJEAN', 'FAUCHELEVENT', 'REPLIED'] +3764-168670-0021-1687: ref=['WHAT', 'COFFIN', 'WHAT', 'ADMINISTRATION'] +3764-168670-0021-1687: hyp=['WHAT', 'COFFIN', 'WHAT', 'ADMINISTRATION'] +3764-168670-0022-1688: ref=['FAUCHELEVENT', 'WHO', 'WAS', 'SEATED', 'SPRANG', 'UP', 'AS', 'THOUGH', 'A', 'BOMB', 'HAD', 'BURST', 'UNDER', 'HIS', 'CHAIR', 'YOU'] +3764-168670-0022-1688: hyp=['FAUCHELEVENT', 'WHO', 'WAS', 'SEATED', 'SPRANG', 'UP', 'AS', 'THOUGH', 'A', 'BALM', 'HAD', 'BURST', 'UNDER', 'HIS', 'CHAIR', 'YOU'] +3764-168670-0023-1689: ref=['YOU', 'KNOW', 'FAUCHELEVENT', 'WHAT', 'YOU', 'HAVE', 'SAID', 'MOTHER', 'CRUCIFIXION', 'IS', 'DEAD'] +3764-168670-0023-1689: hyp=['YOU', 'KNOW', 'FAUCHELEVENT', 'WHAT', 'YOU', 'HAVE', 'SAID', 'MOTHER', 'CRUCIFIXION', 'IS', 'DEAD'] +3764-168670-0024-1690: ref=['AND', 'I', 'ADD', 'AND', 'FATHER', 'MADELEINE', 'IS', 'BURIED', 'AH'] +3764-168670-0024-1690: hyp=['AND', 'I', 'ADD', 'AND', 'FATHER', 'MADELEINE', 'IS', 'BURIED'] +3764-168670-0025-1691: ref=['YOU', 'ARE', 'NOT', 'LIKE', 'OTHER', 'MEN', 'FATHER', 'MADELEINE'] +3764-168670-0025-1691: hyp=['YOU', 'ARE', 'NOT', 'LIKE', 'OTHER', 'MEN', 'FATHER', 'MADELEINE'] +3764-168670-0026-1692: ref=['THIS', 'OFFERS', 'THE', 'MEANS', 'BUT', 'GIVE', 'ME', 'SOME', 'INFORMATION', 'IN', 'THE', 'FIRST', 'PLACE'] +3764-168670-0026-1692: hyp=['THIS', 'OFFERS', 'THE', 'MEANS', 'BUT', 'GIVE', 'ME', 'SOME', 'INFORMATION', 'IN', 'THE', 'FIRST', 'PLACE'] +3764-168670-0027-1693: ref=['HOW', 'LONG', 'IS', 'THE', 'COFFIN', 'SIX', 'FEET'] +3764-168670-0027-1693: hyp=['HOW', 'LONG', 'IS', 'THE', 'COFFIN', 'SIX', 'FEET'] +3764-168670-0028-1694: ref=['IT', 'IS', 'A', 'CHAMBER', 'ON', 'THE', 'GROUND', 'FLOOR', 'WHICH', 'HAS', 'A', 'GRATED', 'WINDOW', 'OPENING', 'ON', 'THE', 'GARDEN', 'WHICH', 'IS', 'CLOSED', 'ON', 'THE', 'OUTSIDE', 'BY', 'A', 'SHUTTER', 'AND', 'TWO', 'DOORS', 'ONE', 'LEADS', 'INTO', 'THE', 'CONVENT', 'THE', 'OTHER', 'INTO', 'THE', 'CHURCH', 'WHAT', 'CHURCH'] +3764-168670-0028-1694: hyp=['IT', 'IS', 'A', 'CHAMBER', 'ON', 'THE', 'GROUND', 'FLOOR', 'WHICH', 'HAS', 'A', 'GRATED', 'WINDOW', 'OPENING', 'ON', 'THE', 'GARDEN', 'WHICH', 'IS', 'CLOSED', 'ON', 'THE', 'OUTSIDE', 'BY', 'A', 'SHUTTER', 'AND', 'TWO', 'DOORS', 'ONE', 'LEADS', 'INTO', 'THE', 'CONVENT', 'THE', 'OTHER', 'INTO', 'THE', 'CHURCH', 'A', 'WATCH', 'CHURCH'] +3764-168670-0029-1695: ref=['THE', 'CHURCH', 'IN', 'THE', 'STREET', 'THE', 'CHURCH', 'WHICH', 'ANY', 'ONE', 'CAN', 'ENTER'] +3764-168670-0029-1695: hyp=['THE', 'CHURCH', 'IN', 'THE', 'STREET', 'THOUGH', 'THE', 'CHURCH', 'WHICH', 'ANY', 'ONE', 'CAN', 'ENTER'] +3764-168670-0030-1696: ref=['HAVE', 'YOU', 'THE', 'KEYS', 'TO', 'THOSE', 'TWO', 'DOORS'] +3764-168670-0030-1696: hyp=['HAVE', 'YOU', 'THE', 'KEYS', 'TO', 'THOSE', 'TWO', 'DOORS'] +3764-168670-0031-1697: ref=['NO', 'I', 'HAVE', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CONVENT', 'THE', 'PORTER', 'HAS', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CHURCH'] +3764-168670-0031-1697: hyp=['AND', 'NO', 'I', 'HAVE', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CONVENT', 'THE', 'PORTER', 'HAS', 'THE', 'KEY', 'TO', 'THE', 'DOOR', 'WHICH', 'COMMUNICATES', 'WITH', 'THE', 'CHURCH'] +3764-168670-0032-1698: ref=['ONLY', 'TO', 'ALLOW', 'THE', "UNDERTAKER'S", 'MEN', 'TO', 'ENTER', 'WHEN', 'THEY', 'COME', 'TO', 'GET', 'THE', 'COFFIN'] +3764-168670-0032-1698: hyp=['ONLY', 'TO', 'ALLOW', 'THE', 'UNDERTAKERS', 'MEN', 'TO', 'ENTER', 'WHEN', 'THEY', 'COME', 'TO', 'GET', 'THE', 'COFFIN'] +3764-168670-0033-1699: ref=['WHO', 'NAILS', 'UP', 'THE', 'COFFIN', 'I', 'DO'] +3764-168670-0033-1699: hyp=['WHO', 'NAILS', 'UP', 'THE', 'COFFIN', 'I', 'DO'] +3764-168670-0034-1700: ref=['WHO', 'SPREADS', 'THE', 'PALL', 'OVER', 'IT'] +3764-168670-0034-1700: hyp=['WHO', 'SPREADS', 'THE', 'POOL', 'OVER', 'IT'] +3764-168670-0035-1701: ref=['NOT', 'ANOTHER', 'MAN', 'EXCEPT', 'THE', 'POLICE', 'DOCTOR', 'CAN', 'ENTER', 'THE', 'DEAD', 'ROOM', 'THAT', 'IS', 'EVEN', 'WRITTEN', 'ON', 'THE', 'WALL'] +3764-168670-0035-1701: hyp=['NOT', 'ANOTHER', 'MAN', 'EXCEPT', 'THE', 'POLICE', 'DOCTOR', 'CAN', 'ENTER', 'THE', 'BEDROOM', 'THAT', 'IS', 'EVEN', 'WRITTEN', 'ON', 'THE', 'WALL'] +3764-168670-0036-1702: ref=['COULD', 'YOU', 'HIDE', 'ME', 'IN', 'THAT', 'ROOM', 'TO', 'NIGHT', 'WHEN', 'EVERY', 'ONE', 'IS', 'ASLEEP'] +3764-168670-0036-1702: hyp=['COULD', 'YOU', 'HIDE', 'ME', 'IN', 'THAT', 'ROOM', 'TO', 'NIGHT', 'WHEN', 'EVERY', 'ONE', 'IS', 'ASLEEP'] +3764-168670-0037-1703: ref=['ABOUT', 'THREE', "O'CLOCK", 'IN', 'THE', 'AFTERNOON'] +3764-168670-0037-1703: hyp=['ABOUT', 'THREE', "O'CLOCK", 'IN', 'THE', 'AFTERNOON'] +3764-168670-0038-1704: ref=['I', 'SHALL', 'BE', 'HUNGRY', 'I', 'WILL', 'BRING', 'YOU', 'SOMETHING'] +3764-168670-0038-1704: hyp=['I', 'SHALL', 'BE', 'HUNGRY', 'I', 'WILL', 'BRING', 'YOU', 'SOMETHING'] +3764-168670-0039-1705: ref=['YOU', 'CAN', 'COME', 'AND', 'NAIL', 'ME', 'UP', 'IN', 'THE', 'COFFIN', 'AT', 'TWO', "O'CLOCK"] +3764-168670-0039-1705: hyp=['YOU', 'CAN', 'COME', 'AND', 'NAIL', 'ME', 'UP', 'IN', 'THE', 'COFFIN', 'AT', 'TWO', "O'CLOCK"] +3764-168670-0040-1706: ref=['FAUCHELEVENT', 'RECOILED', 'AND', 'CRACKED', 'HIS', 'FINGER', 'JOINTS', 'BUT', 'THAT', 'IS', 'IMPOSSIBLE'] +3764-168670-0040-1706: hyp=['FAUCHELEVENT', 'RECOILED', 'AND', 'CRACKED', 'HIS', 'FINGER', 'JOINTS', 'BUT', 'THAT', 'IS', 'IMPOSSIBLE'] +3764-168670-0041-1707: ref=['BAH', 'IMPOSSIBLE', 'TO', 'TAKE', 'A', 'HAMMER', 'AND', 'DRIVE', 'SOME', 'NAILS', 'IN', 'A', 'PLANK'] +3764-168670-0041-1707: hyp=['BAH', 'IMPOSSIBLE', 'TO', 'TAKE', 'A', 'HAMMER', 'AND', 'DRIVE', 'SOME', 'NAILS', 'IN', 'A', 'PLANK'] +3764-168670-0042-1708: ref=['JEAN', 'VALJEAN', 'HAD', 'BEEN', 'IN', 'WORSE', 'STRAITS', 'THAN', 'THIS'] +3764-168670-0042-1708: hyp=['JEAN', 'VALJEAN', 'HAD', 'BEEN', 'IN', 'WORSE', 'STRAIT', 'THAN', 'THIS'] +3764-168670-0043-1709: ref=['ANY', 'MAN', 'WHO', 'HAS', 'BEEN', 'A', 'PRISONER', 'UNDERSTANDS', 'HOW', 'TO', 'CONTRACT', 'HIMSELF', 'TO', 'FIT', 'THE', 'DIAMETER', 'OF', 'THE', 'ESCAPE'] +3764-168670-0043-1709: hyp=['ANY', 'MAN', 'WHO', 'HAS', 'BEEN', 'A', 'PRISONER', 'UNDERSTANDS', 'HOW', 'TO', 'CONTRACT', 'HIMSELF', 'TO', 'FIT', 'THE', 'DIAMETER', 'OF', 'THE', 'ESCAPE'] +3764-168670-0044-1710: ref=['WHAT', 'DOES', 'NOT', 'A', 'MAN', 'UNDERGO', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'CURE'] +3764-168670-0044-1710: hyp=['WHAT', 'DOES', 'NOT', 'A', 'MAN', 'UNDERGO', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'CURE'] +3764-168670-0045-1711: ref=['TO', 'HAVE', 'HIMSELF', 'NAILED', 'UP', 'IN', 'A', 'CASE', 'AND', 'CARRIED', 'OFF', 'LIKE', 'A', 'BALE', 'OF', 'GOODS', 'TO', 'LIVE', 'FOR', 'A', 'LONG', 'TIME', 'IN', 'A', 'BOX', 'TO', 'FIND', 'AIR', 'WHERE', 'THERE', 'IS', 'NONE', 'TO', 'ECONOMIZE', 'HIS', 'BREATH', 'FOR', 'HOURS', 'TO', 'KNOW', 'HOW', 'TO', 'STIFLE', 'WITHOUT', 'DYING', 'THIS', 'WAS', 'ONE', 'OF', 'JEAN', "VALJEAN'S", 'GLOOMY', 'TALENTS'] +3764-168670-0045-1711: hyp=['TO', 'HAVE', 'HIMSELF', 'NAILED', 'UP', 'IN', 'A', 'CASE', 'AND', 'CARRIED', 'OFF', 'LIKE', 'A', 'BALE', 'OF', 'GOODS', 'TO', 'LIVE', 'FOR', 'A', 'LONG', 'TIME', 'IN', 'A', 'BOX', 'TO', 'FIND', 'AIR', 'WHERE', 'THERE', 'IS', 'NONE', 'TO', 'ECONOMIZE', 'HIS', 'BREATH', 'FOR', 'HOURS', 'TO', 'KNOW', 'HOW', 'TO', 'STIFLE', 'WITHOUT', 'DYING', 'THIS', 'WAS', 'ONE', 'OF', 'JEAN', "VALJEAN'S", 'GLOOMY', 'TALENTS'] +3764-168670-0046-1712: ref=['YOU', 'SURELY', 'MUST', 'HAVE', 'A', 'GIMLET', 'YOU', 'WILL', 'MAKE', 'A', 'FEW', 'HOLES', 'HERE', 'AND', 'THERE', 'AROUND', 'MY', 'MOUTH', 'AND', 'YOU', 'WILL', 'NAIL', 'THE', 'TOP', 'PLANK', 'ON', 'LOOSELY', 'GOOD', 'AND', 'WHAT', 'IF', 'YOU', 'SHOULD', 'HAPPEN', 'TO', 'COUGH', 'OR', 'TO', 'SNEEZE'] +3764-168670-0046-1712: hyp=['YOU', 'SURELY', 'MUST', 'HAVE', 'A', 'GIMLET', 'YOU', 'WILL', 'MAKE', 'A', 'FEW', 'HOLES', 'HERE', 'AND', 'THERE', 'AROUND', 'MY', 'MOUTH', 'AND', 'YOU', 'WILL', 'NAIL', 'THE', 'TOP', 'PLANK', 'ON', 'LOOSELY', 'GOOD', 'AND', 'WHAT', 'IF', 'YOU', 'SHOULD', 'HAPPEN', 'TO', 'COUGH', 'OR', 'TO', 'SNEEZE'] +3764-168670-0047-1713: ref=['A', 'MAN', 'WHO', 'IS', 'MAKING', 'HIS', 'ESCAPE', 'DOES', 'NOT', 'COUGH', 'OR', 'SNEEZE'] +3764-168670-0047-1713: hyp=['A', 'MAN', 'WHO', 'IS', 'MAKING', 'HIS', 'ESCAPE', 'DOES', 'NOT', 'COUGH', 'OR', 'SNEEZE'] +3764-168670-0048-1714: ref=['WHO', 'IS', 'THERE', 'WHO', 'HAS', 'NOT', 'SAID', 'TO', 'A', 'CAT', 'DO', 'COME', 'IN'] +3764-168670-0048-1714: hyp=['WHO', 'IS', 'THERE', 'WHO', 'HAS', 'NOT', 'SAID', 'TO', 'A', 'CAT', 'DO', 'COME', 'IN'] +3764-168670-0049-1715: ref=['THE', 'OVER', 'PRUDENT', 'CATS', 'AS', 'THEY', 'ARE', 'AND', 'BECAUSE', 'THEY', 'ARE', 'CATS', 'SOMETIMES', 'INCUR', 'MORE', 'DANGER', 'THAN', 'THE', 'AUDACIOUS'] +3764-168670-0049-1715: hyp=['THE', 'OVERPRUDENT', 'CARTS', 'AS', 'THEY', 'ARE', 'AND', 'BECAUSE', 'THEY', 'ARE', 'CATS', 'SOMETIMES', 'INCUR', 'MORE', 'DANGER', 'THAN', 'THE', 'AUDACIOUS'] +3764-168670-0050-1716: ref=['BUT', 'JEAN', "VALJEAN'S", 'COOLNESS', 'PREVAILED', 'OVER', 'HIM', 'IN', 'SPITE', 'OF', 'HIMSELF', 'HE', 'GRUMBLED'] +3764-168670-0050-1716: hyp=['BUT', 'JEAN', "VALJEAN'S", 'COOLNESS', 'PREVAILED', 'OVER', 'HIM', 'IN', 'SPITE', 'OF', 'HIMSELF', 'HE', 'GRUMBLED'] +3764-168670-0051-1717: ref=['IF', 'YOU', 'ARE', 'SURE', 'OF', 'COMING', 'OUT', 'OF', 'THE', 'COFFIN', 'ALL', 'RIGHT', 'I', 'AM', 'SURE', 'OF', 'GETTING', 'YOU', 'OUT', 'OF', 'THE', 'GRAVE'] +3764-168670-0051-1717: hyp=['IF', 'YOU', 'ARE', 'SURE', 'OF', 'COMING', 'OUT', 'OF', 'THE', 'COFFIN', 'ALL', 'RIGHT', 'I', 'AM', 'SURE', 'OF', 'GETTING', 'YOU', 'OUT', 'OF', 'THE', 'GRAVE'] +3764-168670-0052-1718: ref=['AN', 'OLD', 'FELLOW', 'OF', 'THE', 'OLD', 'SCHOOL', 'THE', 'GRAVE', 'DIGGER', 'PUTS', 'THE', 'CORPSES', 'IN', 'THE', 'GRAVE', 'AND', 'I', 'PUT', 'THE', 'GRAVE', 'DIGGER', 'IN', 'MY', 'POCKET'] +3764-168670-0052-1718: hyp=['AN', 'OLD', 'FELLOW', 'OF', 'THE', 'OLD', 'SCHOOL', 'THE', 'GRAVE', 'DIGGER', 'PUTS', 'THE', 'CORPSES', 'IN', 'THE', 'GRAVE', 'AND', 'I', 'PUT', 'THE', 'GRAVE', 'DIGGER', 'IN', 'MY', 'POCKET'] +3764-168670-0053-1719: ref=['I', 'SHALL', 'FOLLOW', 'THAT', 'IS', 'MY', 'BUSINESS'] +3764-168670-0053-1719: hyp=['I', 'SHALL', 'FOLLOW', 'THAT', 'IS', 'MY', 'BUSINESS'] +3764-168670-0054-1720: ref=['THE', 'HEARSE', 'HALTS', 'THE', "UNDERTAKER'S", 'MEN', 'KNOT', 'A', 'ROPE', 'AROUND', 'YOUR', 'COFFIN', 'AND', 'LOWER', 'YOU', 'DOWN'] +3764-168670-0054-1720: hyp=['THE', 'HOUSEHOLTS', 'THE', 'UNDERTAKERS', 'MEN', 'NOT', 'A', 'ROPE', 'AROUND', 'YOUR', 'COFFIN', 'AND', 'LOWER', 'YOU', 'DOWN'] +3764-168670-0055-1721: ref=['THE', 'PRIEST', 'SAYS', 'THE', 'PRAYERS', 'MAKES', 'THE', 'SIGN', 'OF', 'THE', 'CROSS', 'SPRINKLES', 'THE', 'HOLY', 'WATER', 'AND', 'TAKES', 'HIS', 'DEPARTURE'] +3764-168670-0055-1721: hyp=['THE', 'PRIESTS', 'AS', 'THE', 'PRAYERS', 'MAKES', 'THE', 'SIGN', 'OF', 'THE', 'CROSS', 'SPRINKLES', 'THE', 'HOLY', 'WATER', 'AND', 'TAKES', 'HIS', 'DEPARTURE'] +3764-168670-0056-1722: ref=['ONE', 'OF', 'TWO', 'THINGS', 'WILL', 'HAPPEN', 'HE', 'WILL', 'EITHER', 'BE', 'SOBER', 'OR', 'HE', 'WILL', 'NOT', 'BE', 'SOBER'] +3764-168670-0056-1722: hyp=['ONE', 'OF', 'TWO', 'THINGS', 'WILL', 'HAPPEN', 'HE', 'WILL', 'EITHER', 'BE', 'SOBER', 'OR', 'HE', 'WILL', 'NOT', 'BE', 'SOBER'] +3764-168670-0057-1723: ref=['THAT', 'IS', 'SETTLED', 'FATHER', 'FAUCHELEVENT', 'ALL', 'WILL', 'GO', 'WELL'] +3764-168670-0057-1723: hyp=['THAT', 'IS', 'SETTLED', 'FATHER', 'FAUCHELEVENT', 'ALL', 'WILL', 'GO', 'WELL'] +3764-168671-0000-1724: ref=['ON', 'THE', 'FOLLOWING', 'DAY', 'AS', 'THE', 'SUN', 'WAS', 'DECLINING', 'THE', 'VERY', 'RARE', 'PASSERS', 'BY', 'ON', 'THE', 'BOULEVARD', 'DU', 'MAINE', 'PULLED', 'OFF', 'THEIR', 'HATS', 'TO', 'AN', 'OLD', 'FASHIONED', 'HEARSE', 'ORNAMENTED', 'WITH', 'SKULLS', 'CROSS', 'BONES', 'AND', 'TEARS'] +3764-168671-0000-1724: hyp=['ON', 'THE', 'FOLLOWING', 'DAY', 'AS', 'THE', 'SUN', 'WAS', 'DECLINING', 'THE', 'VERY', 'RARE', 'PASSERS', 'BY', 'ON', 'THE', 'BOULEVARD', 'DU', 'MIN', 'PULLED', 'OFF', 'THEIR', 'HATS', 'TO', 'AN', 'OLD', 'FASHIONED', 'HEARSE', 'ORNAMENTED', 'WITH', 'SKULLS', 'CROSS', 'BONES', 'AND', 'TEARS'] +3764-168671-0001-1725: ref=['THIS', 'HEARSE', 'CONTAINED', 'A', 'COFFIN', 'COVERED', 'WITH', 'A', 'WHITE', 'CLOTH', 'OVER', 'WHICH', 'SPREAD', 'A', 'LARGE', 'BLACK', 'CROSS', 'LIKE', 'A', 'HUGE', 'CORPSE', 'WITH', 'DROOPING', 'ARMS'] +3764-168671-0001-1725: hyp=['THIS', 'HEARSE', 'CONTAINED', 'A', 'COFFIN', 'COVERED', 'WITH', 'A', 'WHITE', 'CLOTH', 'OVER', 'WHICH', 'SPREAD', 'A', 'LARGE', 'BLACK', 'CROSS', 'LIKE', 'A', 'HUGE', 'CORPSE', 'WITH', 'DROOPING', 'ARMS'] +3764-168671-0002-1726: ref=['A', 'MOURNING', 'COACH', 'IN', 'WHICH', 'COULD', 'BE', 'SEEN', 'A', 'PRIEST', 'IN', 'HIS', 'SURPLICE', 'AND', 'A', 'CHOIR', 'BOY', 'IN', 'HIS', 'RED', 'CAP', 'FOLLOWED'] +3764-168671-0002-1726: hyp=['THE', 'MORNING', 'COACH', 'IN', 'WHICH', 'COULD', 'BE', 'SEEN', 'A', 'PRIEST', 'IN', 'HIS', 'SURPLICE', 'AND', 'A', 'CHOIR', 'BOY', 'IN', 'HIS', 'RED', 'CAP', 'FOLLOWED'] +3764-168671-0003-1727: ref=['BEHIND', 'IT', 'CAME', 'AN', 'OLD', 'MAN', 'IN', 'THE', 'GARMENTS', 'OF', 'A', 'LABORER', 'WHO', 'LIMPED', 'ALONG'] +3764-168671-0003-1727: hyp=['BEHIND', 'IT', 'CAME', 'AN', 'OLD', 'MAN', 'IN', 'THE', 'GARMENTS', 'OF', 'A', 'LABORER', 'WHO', 'LIMPED', 'ALONG'] +3764-168671-0004-1728: ref=['THE', 'GRAVE', 'DIGGERS', 'BEING', 'THUS', 'BOUND', 'TO', 'SERVICE', 'IN', 'THE', 'EVENING', 'IN', 'SUMMER', 'AND', 'AT', 'NIGHT', 'IN', 'WINTER', 'IN', 'THIS', 'CEMETERY', 'THEY', 'WERE', 'SUBJECTED', 'TO', 'A', 'SPECIAL', 'DISCIPLINE'] +3764-168671-0004-1728: hyp=['THE', 'GRAVE', 'DIGGERS', 'BEING', 'THUS', 'BOUND', 'TO', 'SERVICE', 'IN', 'THE', 'EVENING', 'IN', 'SUMMER', 'AND', 'AT', 'NIGHT', 'AND', 'WINTER', 'IN', 'THIS', 'CEMETERY', 'THEY', 'WERE', 'SUBJECTED', 'TO', 'A', 'SPECIAL', 'DISCIPLINE'] +3764-168671-0005-1729: ref=['THESE', 'GATES', 'THEREFORE', 'SWUNG', 'INEXORABLY', 'ON', 'THEIR', 'HINGES', 'AT', 'THE', 'INSTANT', 'WHEN', 'THE', 'SUN', 'DISAPPEARED', 'BEHIND', 'THE', 'DOME', 'OF', 'THE', 'INVALIDES'] +3764-168671-0005-1729: hyp=['THESE', 'GATES', 'THEREFORE', 'SWUNG', 'INEXORABLY', 'ON', 'THEIR', 'HINGES', 'AT', 'THE', 'INSTANT', 'WHEN', 'THE', 'SUN', 'DISAPPEARED', 'BEHIND', 'THE', 'DOME', 'OF', 'THE', 'INVALIDE'] +3764-168671-0006-1730: ref=['DAMPNESS', 'WAS', 'INVADING', 'IT', 'THE', 'FLOWERS', 'WERE', 'DESERTING', 'IT'] +3764-168671-0006-1730: hyp=['DAMPNESS', 'WAS', 'INVADING', 'IT', 'THE', 'FLOWERS', 'WERE', 'DESERTING', 'IT'] +3764-168671-0007-1731: ref=['THE', 'BOURGEOIS', 'DID', 'NOT', 'CARE', 'MUCH', 'ABOUT', 'BEING', 'BURIED', 'IN', 'THE', 'VAUGIRARD', 'IT', 'HINTED', 'AT', 'POVERTY', 'PERE', 'LACHAISE', 'IF', 'YOU', 'PLEASE'] +3764-168671-0007-1731: hyp=['THE', 'BOURGEOIS', 'DID', 'NOT', 'CARE', 'MUCH', 'ABOUT', 'BEING', 'BURIED', 'IN', 'THE', 'ROGER', 'IT', 'HINTED', 'AT', 'POVERTY', 'PALACE', 'IF', 'YOU', 'PLEASE'] +3764-168671-0008-1732: ref=['TO', 'BE', 'BURIED', 'IN', 'PERE', 'LACHAISE', 'IS', 'EQUIVALENT', 'TO', 'HAVING', 'FURNITURE', 'OF', 'MAHOGANY', 'IT', 'IS', 'RECOGNIZED', 'AS', 'ELEGANT'] +3764-168671-0008-1732: hyp=['TO', 'BE', 'BURIED', 'IN', 'PERFELASHES', 'IS', 'EQUIVALENT', 'TO', 'HAVING', 'FURNITURE', 'OF', 'MAHOGANY', 'IT', 'IS', 'RECOGNIZED', 'AS', 'ELEGANT'] +3764-168671-0009-1733: ref=['THE', 'INTERMENT', 'OF', 'MOTHER', 'CRUCIFIXION', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'THE', 'EXIT', 'OF', 'COSETTE', 'THE', 'INTRODUCTION', 'OF', 'JEAN', 'VALJEAN', 'TO', 'THE', 'DEAD', 'ROOM', 'ALL', 'HAD', 'BEEN', 'EXECUTED', 'WITHOUT', 'DIFFICULTY', 'AND', 'THERE', 'HAD', 'BEEN', 'NO', 'HITCH', 'LET', 'US', 'REMARK', 'IN', 'PASSING', 'THAT', 'THE', 'BURIAL', 'OF', 'MOTHER', 'CRUCIFIXION', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CONVENT', 'IS', 'A', 'PERFECTLY', 'VENIAL', 'OFFENCE', 'IN', 'OUR', 'SIGHT'] +3764-168671-0009-1733: hyp=['THE', 'INTERMENT', 'OF', 'MOTHER', 'CRUCIFIXION', 'IN', 'THE', 'VAULT', 'UNDER', 'THE', 'ALTAR', 'THE', 'EXIT', 'OF', 'COSETTE', 'THE', 'INTRODUCTION', 'OF', 'JEAN', 'VALJEAN', 'INTO', 'THE', 'DEAD', 'ROOM', 'ALL', 'HAD', 'BEEN', 'EXECUTED', 'WITHOUT', 'DIFFICULTY', 'AND', 'THERE', 'HAD', 'BEEN', 'NO', 'HITCH', 'LET', 'US', 'REMARK', 'IN', 'PASSING', 'THAT', 'THE', 'BURIAL', 'OF', 'MOTHER', 'CRUCIFIXION', 'UNDER', 'THE', 'ALTAR', 'OF', 'THE', 'CONVENT', 'IS', 'A', 'PERFECTLY', 'VENIAL', 'OFFENCE', 'IN', 'OUR', 'SIGHT'] +3764-168671-0010-1734: ref=['IT', 'IS', 'ONE', 'OF', 'THE', 'FAULTS', 'WHICH', 'RESEMBLE', 'A', 'DUTY'] +3764-168671-0010-1734: hyp=['IT', 'IS', 'ONE', 'OF', 'THE', 'FAULTS', 'WHICH', 'RESEMBLE', 'A', 'DUTY'] +3764-168671-0011-1735: ref=['THE', 'NUNS', 'HAD', 'COMMITTED', 'IT', 'NOT', 'ONLY', 'WITHOUT', 'DIFFICULTY', 'BUT', 'EVEN', 'WITH', 'THE', 'APPLAUSE', 'OF', 'THEIR', 'OWN', 'CONSCIENCES'] +3764-168671-0011-1735: hyp=['THE', 'NUNS', 'HAD', 'COMMITTED', 'IT', 'NOT', 'ONLY', 'WITHOUT', 'DIFFICULTY', 'BUT', 'EVEN', 'WITH', 'THE', 'APPLAUSE', 'OF', 'THEIR', 'OWN', 'CONSCIENCES'] +3764-168671-0012-1736: ref=['IN', 'THE', 'CLOISTER', 'WHAT', 'IS', 'CALLED', 'THE', 'GOVERNMENT', 'IS', 'ONLY', 'AN', 'INTERMEDDLING', 'WITH', 'AUTHORITY', 'AN', 'INTERFERENCE', 'WHICH', 'IS', 'ALWAYS', 'QUESTIONABLE'] +3764-168671-0012-1736: hyp=['IN', 'THE', 'CLOISTER', 'WHAT', 'IS', 'CALLED', 'THE', 'GOVERNMENT', 'IS', 'ONLY', 'AN', 'INTERMEDDLING', 'WITH', 'AUTHORITY', 'AN', 'INTERFERENCE', 'WHICH', 'IS', 'ALWAYS', 'QUESTIONABLE'] +3764-168671-0013-1737: ref=['MAKE', 'AS', 'MANY', 'LAWS', 'AS', 'YOU', 'PLEASE', 'MEN', 'BUT', 'KEEP', 'THEM', 'FOR', 'YOURSELVES'] +3764-168671-0013-1737: hyp=['MAKE', 'AS', 'MANY', 'NOISE', 'AS', 'YOU', 'PLEASE', 'MEN', 'BUT', 'KEEP', 'THEM', 'FOR', 'YOURSELVES'] +3764-168671-0014-1738: ref=['A', 'PRINCE', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'PRINCIPLE'] +3764-168671-0014-1738: hyp=['A', 'PRINCE', 'IS', 'NOTHING', 'IN', 'THE', 'PRESENCE', 'OF', 'A', 'PRINCIPLE'] +3764-168671-0015-1739: ref=['FAUCHELEVENT', 'LIMPED', 'ALONG', 'BEHIND', 'THE', 'HEARSE', 'IN', 'A', 'VERY', 'CONTENTED', 'FRAME', 'OF', 'MIND'] +3764-168671-0015-1739: hyp=['FAUCHELEVENT', 'LIMPED', 'ALONG', 'BEHIND', 'THE', 'HEARSE', 'IN', 'A', 'VERY', 'CONTENTED', 'FRAME', 'OF', 'MIND'] +3764-168671-0016-1740: ref=['JEAN', "VALJEAN'S", 'COMPOSURE', 'WAS', 'ONE', 'OF', 'THOSE', 'POWERFUL', 'TRANQUILLITIES', 'WHICH', 'ARE', 'CONTAGIOUS'] +3764-168671-0016-1740: hyp=['JEAN', "VALJEAN'S", 'COMPOSURE', 'WAS', 'ONE', 'OF', 'THOSE', 'POWERFUL', 'TRANQUILLITIES', 'WHICH', 'ARE', 'CONTAGIOUS'] +3764-168671-0017-1741: ref=['WHAT', 'REMAINED', 'TO', 'BE', 'DONE', 'WAS', 'A', 'MERE', 'NOTHING'] +3764-168671-0017-1741: hyp=['WHAT', 'REMAINED', 'TO', 'BE', 'DONE', 'WAS', 'A', 'MERE', 'NOTHING'] +3764-168671-0018-1742: ref=['HE', 'PLAYED', 'WITH', 'FATHER', 'MESTIENNE'] +3764-168671-0018-1742: hyp=['HE', 'PLAYED', 'WITH', 'FATHER', 'MISTIENNE'] +3764-168671-0019-1743: ref=['HE', 'DID', 'WHAT', 'HE', 'LIKED', 'WITH', 'HIM', 'HE', 'MADE', 'HIM', 'DANCE', 'ACCORDING', 'TO', 'HIS', 'WHIM'] +3764-168671-0019-1743: hyp=['HE', 'DID', 'WHAT', 'HE', 'LIKED', 'WITH', 'HIM', 'HE', 'MADE', 'HIM', 'DANCE', 'ACCORDING', 'TO', 'HIS', 'WHIM'] +3764-168671-0020-1744: ref=['THE', 'PERMISSION', 'FOR', 'INTERMENT', 'MUST', 'BE', 'EXHIBITED'] +3764-168671-0020-1744: hyp=['THE', 'PERMISSION', 'FOR', 'INTERMENT', 'MUST', 'BE', 'EXHIBITED'] +3764-168671-0021-1745: ref=['HE', 'WAS', 'A', 'SORT', 'OF', 'LABORING', 'MAN', 'WHO', 'WORE', 'A', 'WAISTCOAT', 'WITH', 'LARGE', 'POCKETS', 'AND', 'CARRIED', 'A', 'MATTOCK', 'UNDER', 'HIS', 'ARM'] +3764-168671-0021-1745: hyp=['HE', 'WAS', 'A', 'SORT', 'OF', 'LABOURING', 'MAN', 'WHO', 'WORE', 'A', 'WAISTCOAT', 'WITH', 'LARGE', 'POCKETS', 'AND', 'CARRIED', 'A', 'MATTOCK', 'UNDER', 'HIS', 'ARM'] +3764-168671-0022-1746: ref=['THE', 'MAN', 'REPLIED', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0022-1746: hyp=['THE', 'MAN', 'REPLIED', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0023-1747: ref=['THE', 'GRAVE', 'DIGGER', 'YES'] +3764-168671-0023-1747: hyp=['THE', 'BRAVE', 'DIGGER', 'YES'] +3764-168671-0024-1748: ref=['YOU', 'I'] +3764-168671-0024-1748: hyp=['YOU', 'I'] +3764-168671-0025-1749: ref=['FATHER', 'MESTIENNE', 'IS', 'THE', 'GRAVE', 'DIGGER', 'HE', 'WAS'] +3764-168671-0025-1749: hyp=['FATHER', 'MISSION', 'IS', 'THE', 'GRAVE', 'DIGGER', 'HE', 'WAS'] +3764-168671-0026-1750: ref=['FAUCHELEVENT', 'HAD', 'EXPECTED', 'ANYTHING', 'BUT', 'THIS', 'THAT', 'A', 'GRAVE', 'DIGGER', 'COULD', 'DIE'] +3764-168671-0026-1750: hyp=['FAUCHELEVENT', 'HAD', 'EXPECTED', 'ANYTHING', 'BUT', 'THIS', 'THAT', 'A', 'GRAVE', 'DIGGER', 'COULD', 'DIE'] +3764-168671-0027-1751: ref=['IT', 'IS', 'TRUE', 'NEVERTHELESS', 'THAT', 'GRAVE', 'DIGGERS', 'DO', 'DIE', 'THEMSELVES'] +3764-168671-0027-1751: hyp=['IT', 'IS', 'TRUE', 'NEVERTHELESS', 'THAT', 'GRAVE', 'DIGGERS', 'DO', 'DIE', 'THEMSELVES'] +3764-168671-0028-1752: ref=['HE', 'HAD', 'HARDLY', 'THE', 'STRENGTH', 'TO', 'STAMMER'] +3764-168671-0028-1752: hyp=['HE', 'HAD', 'HARDLY', 'THE', 'STRENGTH', 'TO', 'STAMMER'] +3764-168671-0029-1753: ref=['BUT', 'HE', 'PERSISTED', 'FEEBLY', 'FATHER', 'MESTIENNE', 'IS', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0029-1753: hyp=['BUT', 'HE', 'PERSISTED', 'FEEBLY', 'FATHER', 'MESSIAN', 'IS', 'THE', 'GRAVE', 'DIGGER'] +3764-168671-0030-1754: ref=['DO', 'YOU', 'KNOW', 'WHO', 'LITTLE', 'FATHER', 'LENOIR', 'IS', 'HE', 'IS', 'A', 'JUG', 'OF', 'RED', 'WINE'] +3764-168671-0030-1754: hyp=['DO', 'YOU', 'KNOW', 'WHO', 'LITTLE', 'FATHER', 'NOIR', 'IS', 'HE', 'IS', 'A', 'JUG', 'OF', 'RED', 'WINE'] +3764-168671-0031-1755: ref=['BUT', 'YOU', 'ARE', 'A', 'JOLLY', 'FELLOW', 'TOO'] +3764-168671-0031-1755: hyp=['BUT', 'YOU', 'ARE', 'A', 'JOLLY', 'FELLOW', 'TOO'] +3764-168671-0032-1756: ref=['ARE', 'YOU', 'NOT', 'COMRADE', "WE'LL", 'GO', 'AND', 'HAVE', 'A', 'DRINK', 'TOGETHER', 'PRESENTLY'] +3764-168671-0032-1756: hyp=['ARE', 'YOU', 'NOT', 'COMRADE', "WE'LL", 'GO', 'AND', 'HAVE', 'A', 'DRINK', 'TOGETHER', 'PRESENTLY'] +3764-168671-0033-1757: ref=['THE', 'MAN', 'REPLIED'] +3764-168671-0033-1757: hyp=['THE', 'MAN', 'REPLIED'] +3764-168671-0034-1758: ref=['HE', 'LIMPED', 'MORE', 'OUT', 'OF', 'ANXIETY', 'THAN', 'FROM', 'INFIRMITY'] +3764-168671-0034-1758: hyp=['HE', 'LIMPED', 'MORE', 'OUT', 'OF', 'ANXIETY', 'THAN', 'FROM', 'INFIRMITY'] +3764-168671-0035-1759: ref=['THE', 'GRAVE', 'DIGGER', 'WALKED', 'ON', 'IN', 'FRONT', 'OF', 'HIM'] +3764-168671-0035-1759: hyp=['THE', 'GRAVE', 'DIGGER', 'WALKED', 'ON', 'IN', 'FRONT', 'OF', 'HIM'] +3764-168671-0036-1760: ref=['FAUCHELEVENT', 'PASSED', 'THE', 'UNEXPECTED', 'GRIBIER', 'ONCE', 'MORE', 'IN', 'REVIEW'] +3764-168671-0036-1760: hyp=['FAUCHELEVENT', 'PASSED', 'THE', 'UNEXPECTED', 'CLAVIER', 'ONCE', 'MORE', 'IN', 'REVIEW'] +3764-168671-0037-1761: ref=['FAUCHELEVENT', 'WHO', 'WAS', 'ILLITERATE', 'BUT', 'VERY', 'SHARP', 'UNDERSTOOD', 'THAT', 'HE', 'HAD', 'TO', 'DEAL', 'WITH', 'A', 'FORMIDABLE', 'SPECIES', 'OF', 'MAN', 'WITH', 'A', 'FINE', 'TALKER', 'HE', 'MUTTERED'] +3764-168671-0037-1761: hyp=['FAUCHELEVENT', 'WHO', 'WAS', 'ILLITERATE', 'BUT', 'VERY', 'SHARP', 'UNDERSTOOD', 'THAT', 'HE', 'HAD', 'TO', 'DEAL', 'WITH', 'A', 'FORMIDABLE', 'SPECIES', 'OF', 'MAN', 'WITH', 'A', 'FINE', 'TALKER', 'HE', 'MUTTERED'] +3764-168671-0038-1762: ref=['SO', 'FATHER', 'MESTIENNE', 'IS', 'DEAD'] +3764-168671-0038-1762: hyp=['MISS', 'OH', 'FATHER', 'MESS', 'TEEN', 'IS', 'DEAD'] +3764-168671-0039-1763: ref=['THE', 'MAN', 'REPLIED', 'COMPLETELY'] +3764-168671-0039-1763: hyp=['THE', 'MAN', 'REPLIED', 'COMPLETELY'] +3764-168671-0040-1764: ref=['THE', 'GOOD', 'GOD', 'CONSULTED', 'HIS', 'NOTE', 'BOOK', 'WHICH', 'SHOWS', 'WHEN', 'THE', 'TIME', 'IS', 'UP', 'IT', 'WAS', 'FATHER', "MESTIENNE'S", 'TURN', 'FATHER', 'MESTIENNE', 'DIED'] +3764-168671-0040-1764: hyp=['THE', 'GOOD', 'GOD', 'CONSULTED', 'HIS', 'NOTE', 'BOOK', 'WHICH', 'SHOWS', 'WHEN', 'THE', 'TIME', 'IS', 'UP', 'IT', 'WAS', 'FATHER', "MESTIENNE'S", 'TURN', 'FOR', 'THE', 'MESSION', 'DIED'] +3764-168671-0041-1765: ref=['STAMMERED', 'FAUCHELEVENT', 'IT', 'IS', 'MADE'] +3764-168671-0041-1765: hyp=['STAMMERED', 'FAUCHELEVENT', 'IT', 'IS', 'MADE'] +3764-168671-0042-1766: ref=['YOU', 'ARE', 'A', 'PEASANT', 'I', 'AM', 'A', 'PARISIAN'] +3764-168671-0042-1766: hyp=['YOU', 'ARE', 'A', 'PEASANT', 'I', 'AM', 'A', 'PARISIAN'] +3764-168671-0043-1767: ref=['FAUCHELEVENT', 'THOUGHT', 'I', 'AM', 'LOST'] +3764-168671-0043-1767: hyp=['FAUCHELEVENT', 'THOUGHT', 'I', 'AM', 'LOST'] +3764-168671-0044-1768: ref=['THEY', 'WERE', 'ONLY', 'A', 'FEW', 'TURNS', 'OF', 'THE', 'WHEEL', 'DISTANT', 'FROM', 'THE', 'SMALL', 'ALLEY', 'LEADING', 'TO', 'THE', 'NUNS', 'CORNER'] +3764-168671-0044-1768: hyp=['THEY', 'WERE', 'ONLY', 'A', 'FEW', 'TURNS', 'OF', 'THE', 'WHEEL', 'DISTANT', 'FROM', 'THE', 'SMALL', 'ALLEY', 'LEADING', 'TO', 'THE', "NUN'S", 'CORNER'] +3764-168671-0045-1769: ref=['AND', 'HE', 'ADDED', 'WITH', 'THE', 'SATISFACTION', 'OF', 'A', 'SERIOUS', 'MAN', 'WHO', 'IS', 'TURNING', 'A', 'PHRASE', 'WELL'] +3764-168671-0045-1769: hyp=['AND', 'HE', 'ADDED', 'WITH', 'THE', 'SATISFACTION', 'OF', 'A', 'SERIOUS', 'MAN', 'WHO', 'IS', 'TURNING', 'A', 'PHRASE', 'WELL'] +3764-168671-0046-1770: ref=['FORTUNATELY', 'THE', 'SOIL', 'WHICH', 'WAS', 'LIGHT', 'AND', 'WET', 'WITH', 'THE', 'WINTER', 'RAINS', 'CLOGGED', 'THE', 'WHEELS', 'AND', 'RETARDED', 'ITS', 'SPEED'] +3764-168671-0046-1770: hyp=['FORTUNATELY', 'THE', 'SOIL', 'WHICH', 'WAS', 'LIGHT', 'AND', 'WET', 'WITH', 'THE', 'WINTER', 'RAINS', 'CLOGGED', 'THE', 'WHEELS', 'AND', 'RETARDED', 'ITS', 'SPEED'] +3764-168671-0047-1771: ref=['MY', 'FATHER', 'WAS', 'A', 'PORTER', 'AT', 'THE', 'PRYTANEUM', 'TOWN', 'HALL'] +3764-168671-0047-1771: hyp=['MY', 'FATHER', 'WAS', 'A', 'PORTER', 'AT', 'THE', 'BRITTANNIUM', 'TOWN', 'HALL'] +3764-168671-0048-1772: ref=['BUT', 'HE', 'HAD', 'REVERSES', 'HE', 'HAD', 'LOSSES', 'ON', 'CHANGE', 'I', 'WAS', 'OBLIGED', 'TO', 'RENOUNCE', 'THE', 'PROFESSION', 'OF', 'AUTHOR', 'BUT', 'I', 'AM', 'STILL', 'A', 'PUBLIC', 'WRITER'] +3764-168671-0048-1772: hyp=['BUT', 'HE', 'HAD', 'REVERSES', 'HE', 'HAD', 'LOSES', 'UNCHANGED', 'I', 'WAS', 'OBLIGED', 'TO', 'RENOUNCE', 'THE', 'PROFESSION', 'OF', 'AUTHOR', 'BUT', 'I', 'AM', 'STILL', 'A', 'PUBLIC', 'WRITER'] +3764-168671-0049-1773: ref=['SO', 'YOU', 'ARE', 'NOT', 'A', 'GRAVE', 'DIGGER', 'THEN'] +3764-168671-0049-1773: hyp=['BUT', 'SO', 'YOU', 'ARE', 'NOT', 'A', 'GRAVE', 'DIGGER', 'THEN'] +3764-168671-0050-1774: ref=['RETURNED', 'FAUCHELEVENT', 'CLUTCHING', 'AT', 'THIS', 'BRANCH', 'FEEBLE', 'AS', 'IT', 'WAS'] +3764-168671-0050-1774: hyp=['RETURNED', 'FAUCHELEVENT', 'CLUTCHING', 'AT', 'THIS', 'BRANCH', 'FEEBLE', 'AS', 'IT', 'WAS'] +3764-168671-0051-1775: ref=['HERE', 'A', 'REMARK', 'BECOMES', 'NECESSARY'] +3764-168671-0051-1775: hyp=['HERE', 'A', 'REMARK', 'BECOMES', 'NECESSARY'] +3764-168671-0052-1776: ref=['FAUCHELEVENT', 'WHATEVER', 'HIS', 'ANGUISH', 'OFFERED', 'A', 'DRINK', 'BUT', 'HE', 'DID', 'NOT', 'EXPLAIN', 'HIMSELF', 'ON', 'ONE', 'POINT', 'WHO', 'WAS', 'TO', 'PAY'] +3764-168671-0052-1776: hyp=['A', 'FAUCHELEVENT', 'WHATEVER', 'HIS', 'ANGUISH', 'OFFERED', 'A', 'DRINK', 'BUT', 'HE', 'DID', 'NOT', 'EXPLAIN', 'HIMSELF', 'ON', 'ONE', 'POINT', 'WHO', 'WAS', 'TO', 'PAY'] +3764-168671-0053-1777: ref=['THE', 'GRAVE', 'DIGGER', 'WENT', 'ON', 'WITH', 'A', 'SUPERIOR', 'SMILE'] +3764-168671-0053-1777: hyp=['THE', 'GRAVE', 'DIGGER', 'WENT', 'ON', 'WITH', 'THE', 'SUPERIOR', 'SMILE'] +3764-168671-0054-1778: ref=['ONE', 'MUST', 'EAT'] +3764-168671-0054-1778: hyp=['ONE', 'MUST', 'EAT'] +3997-180294-0000-1800: ref=['THE', 'DUKE', 'COMES', 'EVERY', 'MORNING', 'THEY', 'WILL', 'TELL', 'HIM', 'WHEN', 'HE', 'COMES', 'THAT', 'I', 'AM', 'ASLEEP', 'AND', 'PERHAPS', 'HE', 'WILL', 'WAIT', 'UNTIL', 'I', 'WAKE'] +3997-180294-0000-1800: hyp=['THE', 'DUKE', 'COMES', 'EVERY', 'MORNING', 'THEY', 'WILL', 'TELL', 'HIM', 'WHEN', 'HE', 'COMES', 'THAT', 'I', 'AM', 'ASLEEP', 'AND', 'PERHAPS', 'HE', 'WILL', 'WAIT', 'UNTIL', 'I', 'AWAKE'] +3997-180294-0001-1801: ref=['YES', 'BUT', 'IF', 'I', 'SHOULD', 'ALREADY', 'ASK', 'FOR', 'SOMETHING', 'WHAT'] +3997-180294-0001-1801: hyp=['YES', 'BUT', 'IF', 'I', 'SHOULD', 'ALREADY', 'ASK', 'FOR', 'SOMETHING', 'WHAT'] +3997-180294-0002-1802: ref=['WELL', 'DO', 'IT', 'FOR', 'ME', 'FOR', 'I', 'SWEAR', 'TO', 'YOU', 'THAT', 'I', "DON'T", 'LOVE', 'YOU', 'AS', 'THE', 'OTHERS', 'HAVE', 'LOVED', 'YOU'] +3997-180294-0002-1802: hyp=['WELL', 'DO', 'IT', 'FOR', 'ME', 'FOR', 'I', 'SWEAR', 'TO', 'YOU', 'THY', "DON'T", 'LOVE', 'YOU', 'AS', 'THE', 'OTHERS', 'HAVE', 'LOVED', 'YOU'] +3997-180294-0003-1803: ref=['THERE', 'ARE', 'BOLTS', 'ON', 'THE', 'DOOR', 'WRETCH'] +3997-180294-0003-1803: hyp=['THERE', 'ARE', 'BOLTS', 'IN', 'THE', 'DOOR', 'WRETCH'] +3997-180294-0004-1804: ref=['I', "DON'T", 'KNOW', 'HOW', 'IT', 'IS', 'BUT', 'IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'I', 'DO'] +3997-180294-0004-1804: hyp=['I', "DON'T", 'KNOW', 'HOW', 'IT', 'IS', 'BUT', 'IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'I', 'DO'] +3997-180294-0005-1805: ref=['NOW', 'GO', 'I', "CAN'T", 'KEEP', 'MY', 'EYES', 'OPEN'] +3997-180294-0005-1805: hyp=['NOW', 'GO', 'I', "CAN'T", 'KEEP', 'MY', 'EYES', 'OPEN'] +3997-180294-0006-1806: ref=['IT', 'SEEMED', 'TO', 'ME', 'AS', 'IF', 'THIS', 'SLEEPING', 'CITY', 'BELONGED', 'TO', 'ME', 'I', 'SEARCHED', 'MY', 'MEMORY', 'FOR', 'THE', 'NAMES', 'OF', 'THOSE', 'WHOSE', 'HAPPINESS', 'I', 'HAD', 'ONCE', 'ENVIED', 'AND', 'I', 'COULD', 'NOT', 'RECALL', 'ONE', 'WITHOUT', 'FINDING', 'MYSELF', 'THE', 'HAPPIER'] +3997-180294-0006-1806: hyp=['IT', 'SEEMS', 'TO', 'ME', 'AS', 'IF', 'THIS', 'SLEEPING', 'CITY', 'BELONGS', 'TO', 'ME', 'I', 'SEARCHED', 'MY', 'MEMORY', 'FOR', 'THE', 'NAMES', 'OF', 'THOSE', 'WHOSE', 'HAPPINESS', 'I', 'HAD', 'ONCE', 'ENVIED', 'AND', 'I', 'COULD', 'NOT', 'RECALL', 'ONE', 'WITHOUT', 'FINDING', 'MYSELF', 'THE', 'HAPPIER'] +3997-180294-0007-1807: ref=['EDUCATION', 'FAMILY', 'FEELING', 'THE', 'SENSE', 'OF', 'DUTY', 'THE', 'FAMILY', 'ARE', 'STRONG', 'SENTINELS', 'BUT', 'THERE', 'ARE', 'NO', 'SENTINELS', 'SO', 'VIGILANT', 'AS', 'NOT', 'TO', 'BE', 'DECEIVED', 'BY', 'A', 'GIRL', 'OF', 'SIXTEEN', 'TO', 'WHOM', 'NATURE', 'BY', 'THE', 'VOICE', 'OF', 'THE', 'MAN', 'SHE', 'LOVES', 'GIVES', 'THE', 'FIRST', 'COUNSELS', 'OF', 'LOVE', 'ALL', 'THE', 'MORE', 'ARDENT', 'BECAUSE', 'THEY', 'SEEM', 'SO', 'PURE'] +3997-180294-0007-1807: hyp=['EDUCATION', 'FAMILY', 'FEELING', 'THE', 'SENSE', 'OF', 'DUTY', 'THE', 'FAMILY', 'ARE', 'STRONG', 'SENTINELS', 'BUT', 'THERE', 'ARE', 'NO', 'SENTINELS', 'SO', 'VIGILANT', 'AS', 'NOT', 'TO', 'BE', 'DECEIVED', 'BY', 'A', 'GIRL', 'OF', 'SIXTEEN', 'TO', 'WHOM', 'NATURE', 'BY', 'THE', 'VOICE', 'OF', 'THE', 'MAN', 'SHE', 'LOVES', 'GIVES', 'THE', 'FIRST', 'COUNSEL', 'OF', 'LOVE', 'ALL', 'THE', 'MORE', 'ARDENTS', 'BECAUSE', 'THEY', 'SEEM', 'SO', 'PURE'] +3997-180294-0008-1808: ref=['THE', 'MORE', 'A', 'GIRL', 'BELIEVES', 'IN', 'GOODNESS', 'THE', 'MORE', 'EASILY', 'WILL', 'SHE', 'GIVE', 'WAY', 'IF', 'NOT', 'TO', 'HER', 'LOVER', 'AT', 'LEAST', 'TO', 'LOVE', 'FOR', 'BEING', 'WITHOUT', 'MISTRUST', 'SHE', 'IS', 'WITHOUT', 'FORCE', 'AND', 'TO', 'WIN', 'HER', 'LOVE', 'IS', 'A', 'TRIUMPH', 'THAT', 'CAN', 'BE', 'GAINED', 'BY', 'ANY', 'YOUNG', 'MAN', 'OF', 'FIVE', 'AND', 'TWENTY', 'SEE', 'HOW', 'YOUNG', 'GIRLS', 'ARE', 'WATCHED', 'AND', 'GUARDED'] +3997-180294-0008-1808: hyp=['THE', 'MORE', 'GIRL', 'BELIEVES', 'IN', 'GOODNESS', 'THE', 'MORE', 'EASILY', 'WILL', 'SHE', 'GIVE', 'WAY', 'IF', 'NOT', 'TO', 'HER', 'LOVER', 'AT', 'LEAST', 'TO', 'LOVE', 'FOR', 'BE', 'WITHOUT', 'MISTRUST', 'SHE', 'IS', 'WITHOUT', 'FORCE', 'AND', 'TO', 'WIN', 'HER', 'LOVE', 'AS', 'A', 'TRIUMPH', 'THAT', 'CAN', 'BE', 'GAINED', 'BY', 'ANY', 'YOUNG', 'MEN', 'OF', 'FIVE', 'AND', 'TWENTY', 'SEE', 'HOW', 'YOUNG', 'GIRLS', 'ARE', 'WATCHED', 'AND', 'GUARDED'] +3997-180294-0009-1809: ref=['THEN', 'HOW', 'SURELY', 'MUST', 'THEY', 'DESIRE', 'THE', 'WORLD', 'WHICH', 'IS', 'HIDDEN', 'FROM', 'THEM', 'HOW', 'SURELY', 'MUST', 'THEY', 'FIND', 'IT', 'TEMPTING', 'HOW', 'SURELY', 'MUST', 'THEY', 'LISTEN', 'TO', 'THE', 'FIRST', 'VOICE', 'WHICH', 'COMES', 'TO', 'TELL', 'ITS', 'SECRETS', 'THROUGH', 'THEIR', 'BARS', 'AND', 'BLESS', 'THE', 'HAND', 'WHICH', 'IS', 'THE', 'FIRST', 'TO', 'RAISE', 'A', 'CORNER', 'OF', 'THE', 'MYSTERIOUS', 'VEIL'] +3997-180294-0009-1809: hyp=['THEN', 'HOW', 'SURELY', 'MUST', 'THEY', 'DESIRE', 'THE', 'WORLD', 'WHICH', 'IS', 'HIDDEN', 'FROM', 'THEM', 'HOW', 'TRULY', 'MUST', 'THEY', 'FIND', 'IT', 'TEMPTING', 'HOW', 'SURELY', 'MUST', 'THEY', 'LISTENED', 'TO', 'THE', 'FIRST', 'VOICE', 'WHICH', 'COMES', 'TO', 'TELL', 'ITS', 'SECRETS', 'THROUGH', 'THEIR', 'BARS', 'AND', 'BLESS', 'THE', 'HAND', 'WHICH', 'HE', 'IS', 'THE', 'FIRST', 'TO', 'RAISE', 'A', 'CORNER', 'OF', 'THE', 'MYSTERY', 'VEIL'] +3997-180294-0010-1810: ref=['WITH', 'THEM', 'THE', 'BODY', 'HAS', 'WORN', 'OUT', 'THE', 'SOUL', 'THE', 'SENSES', 'HAVE', 'BURNED', 'UP', 'THE', 'HEART', 'DISSIPATION', 'HAS', 'BLUNTED', 'THE', 'FEELINGS'] +3997-180294-0010-1810: hyp=['WITH', 'THEM', 'THE', 'BODY', 'HAS', 'WORN', 'OUT', 'THE', 'SOUL', 'THE', 'SENSES', 'HALF', 'BURNED', 'UP', 'THE', 'HEART', 'DISSIPATION', 'HAS', 'BLUNTED', 'THE', 'FEELINGS'] +3997-180294-0011-1811: ref=['THEY', 'LOVE', 'BY', 'PROFESSION', 'AND', 'NOT', 'BY', 'INSTINCT'] +3997-180294-0011-1811: hyp=['THEY', 'LOVE', 'BY', 'PROFESSION', 'AND', 'NOT', 'BY', 'INSTINCT'] +3997-180294-0012-1812: ref=['WHEN', 'A', 'CREATURE', 'WHO', 'HAS', 'ALL', 'HER', 'PAST', 'TO', 'REPROACH', 'HERSELF', 'WITH', 'IS', 'TAKEN', 'ALL', 'AT', 'ONCE', 'BY', 'A', 'PROFOUND', 'SINCERE', 'IRRESISTIBLE', 'LOVE', 'OF', 'WHICH', 'SHE', 'HAD', 'NEVER', 'FELT', 'HERSELF', 'CAPABLE', 'WHEN', 'SHE', 'HAS', 'CONFESSED', 'HER', 'LOVE', 'HOW', 'ABSOLUTELY', 'THE', 'MAN', 'WHOM', 'SHE', 'LOVES', 'DOMINATES', 'HER'] +3997-180294-0012-1812: hyp=['WHEN', 'A', 'CREATURE', 'WHO', 'HAS', 'ALL', 'HER', 'PAST', 'TO', 'REPROACH', 'HERSELF', 'WITH', 'IS', 'TAKEN', 'ALL', 'AT', 'ONCE', 'BY', 'A', 'PROFOUND', 'SINCERE', 'IRRESISTIBLE', 'LOVE', 'OF', 'WHICH', 'SHE', 'HAD', 'NEVER', 'FELT', 'HERSELF', 'CAPABLE', 'WHEN', 'SHE', 'HAS', 'CONFESSED', 'HER', 'LOVE', 'HOW', 'ABSOLUTELY', 'THE', 'MAN', 'WHOM', 'SHE', 'LOVES', 'DOMINATES', 'HER'] +3997-180294-0013-1813: ref=['THEY', 'KNOW', 'NOT', 'WHAT', 'PROOF', 'TO', 'GIVE'] +3997-180294-0013-1813: hyp=['THEY', 'KNOW', 'NOT', 'WHAT', 'PROOF', 'TO', 'GIVE'] +3997-180294-0014-1814: ref=['IN', 'ORDER', 'TO', 'DISTURB', 'THE', 'LABOURERS', 'IN', 'THE', 'FIELD', 'WAS', 'ONE', 'DAY', 'DEVOURED', 'BY', 'A', 'WOLF', 'BECAUSE', 'THOSE', 'WHOM', 'HE', 'HAD', 'SO', 'OFTEN', 'DECEIVED', 'NO', 'LONGER', 'BELIEVED', 'IN', 'HIS', 'CRIES', 'FOR', 'HELP'] +3997-180294-0014-1814: hyp=['IN', 'ORDER', 'TO', 'DISTURB', 'THE', 'LABORERS', 'IN', 'THE', 'FIELDS', 'WAS', 'ONE', 'DAY', 'DEVOURED', 'BY', 'A', 'WOLF', 'BECAUSE', 'THOSE', 'WHOM', 'HE', 'HAD', 'SO', 'OFTEN', 'DECEIVED', 'NO', 'LONGER', 'BELIEVED', 'IN', 'HIS', 'CRIES', 'FOR', 'HELP'] +3997-180294-0015-1815: ref=['IT', 'IS', 'THE', 'SAME', 'WITH', 'THESE', 'UNHAPPY', 'WOMEN', 'WHEN', 'THEY', 'LOVE', 'SERIOUSLY'] +3997-180294-0015-1815: hyp=['THIS', 'IS', 'THE', 'SAME', 'WITH', 'THESE', 'UNHAPPY', 'WOMEN', 'WHEN', 'HE', 'LOVED', 'SERIOUSLY'] +3997-180294-0016-1816: ref=['BUT', 'WHEN', 'THE', 'MAN', 'WHO', 'INSPIRES', 'THIS', 'REDEEMING', 'LOVE', 'IS', 'GREAT', 'ENOUGH', 'IN', 'SOUL', 'TO', 'RECEIVE', 'IT', 'WITHOUT', 'REMEMBERING', 'THE', 'PAST', 'WHEN', 'HE', 'GIVES', 'HIMSELF', 'UP', 'TO', 'IT', 'WHEN', 'IN', 'SHORT', 'HE', 'LOVES', 'AS', 'HE', 'IS', 'LOVED', 'THIS', 'MAN', 'DRAINS', 'AT', 'ONE', 'DRAUGHT', 'ALL', 'EARTHLY', 'EMOTIONS', 'AND', 'AFTER', 'SUCH', 'A', 'LOVE', 'HIS', 'HEART', 'WILL', 'BE', 'CLOSED', 'TO', 'EVERY', 'OTHER'] +3997-180294-0016-1816: hyp=['BUT', 'WHEN', 'THE', 'MAN', 'WHO', 'INSPIRES', 'THIS', 'REDEEMING', 'LOVE', 'IS', 'GREAT', 'ENOUGH', 'IN', 'SOUL', 'TO', 'RECEIVE', 'IT', 'WITHOUT', 'REMEMBERING', 'THE', 'PAST', 'WHEN', 'HE', 'GIVES', 'HIMSELF', 'UP', 'TO', 'IT', 'WHEN', 'IN', 'SHORT', 'HE', 'LOVES', 'AS', 'HE', 'IS', 'LOVED', 'THIS', 'MAN', 'DRAINS', 'AT', 'ONE', 'DRAUGHT', 'ALL', 'EARTHLY', 'EMOTIONS', 'AND', 'AFTER', 'SUCH', 'A', 'LOVE', 'HIS', 'HEART', 'WILL', 'BE', 'CLOSED', 'TO', 'EVERY', 'OTHER'] +3997-180294-0017-1817: ref=['BUT', 'TO', 'RETURN', 'TO', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'LIAISON'] +3997-180294-0017-1817: hyp=['BUT', 'TO', 'RETURN', 'TO', 'THE', 'FIRST', 'DAY', 'OF', 'MY', 'LEAR', 'SONG'] +3997-180294-0018-1818: ref=['WHEN', 'I', 'REACHED', 'HOME', 'I', 'WAS', 'IN', 'A', 'STATE', 'OF', 'MAD', 'GAIETY'] +3997-180294-0018-1818: hyp=['WHEN', 'I', 'REACHED', 'HOME', 'I', 'WAS', 'IN', 'A', 'STATE', 'OF', 'MADGE', 'GAIETY'] +3997-180294-0019-1819: ref=['THE', 'WOMAN', 'BECOMES', 'THE', "MAN'S", 'MISTRESS', 'AND', 'LOVES', 'HIM'] +3997-180294-0019-1819: hyp=['THE', 'WOMAN', 'BECOMES', 'THE', "MAN'S", 'MISTRESS', 'AND', 'LOVES', 'HIM'] +3997-180294-0020-1820: ref=['HOW', 'WHY'] +3997-180294-0020-1820: hyp=['HOW', 'WHY'] +3997-180294-0021-1821: ref=['MY', 'WHOLE', 'BEING', 'WAS', 'EXALTED', 'INTO', 'JOY', 'AT', 'THE', 'MEMORY', 'OF', 'THE', 'WORDS', 'WE', 'HAD', 'EXCHANGED', 'DURING', 'THAT', 'FIRST', 'NIGHT'] +3997-180294-0021-1821: hyp=['MY', 'WHOLE', 'BEING', 'WAS', 'EXALTED', 'INTO', 'JOY', 'AT', 'THE', 'MEMORY', 'OF', 'THE', 'WORDS', 'WE', 'HAD', 'EXCHANGED', 'DURING', 'THAT', 'FIRST', 'NIGHT'] +3997-180294-0022-1822: ref=['HERE', 'ARE', 'MY', 'ORDERS', 'TO', 'NIGHT', 'AT', 'THE', 'VAUDEVILLE'] +3997-180294-0022-1822: hyp=['HERE', 'ARE', 'MY', 'ORDERS', 'TO', 'NIGHT', 'AT', 'THE', 'VAUDEVILLE'] +3997-180294-0023-1823: ref=['COME', 'DURING', 'THE', 'THIRD', "ENTR'ACTE"] +3997-180294-0023-1823: hyp=['CALM', 'DURING', 'THE', 'THIRD', 'ENTRACT'] +3997-180294-0024-1824: ref=['THE', 'BOXES', 'FILLED', 'ONE', 'AFTER', 'ANOTHER'] +3997-180294-0024-1824: hyp=['THE', 'BOXES', 'FILLED', 'ONE', 'AFTER', 'ANOTHER'] +3997-180294-0025-1825: ref=['ONLY', 'ONE', 'REMAINED', 'EMPTY', 'THE', 'STAGE', 'BOX'] +3997-180294-0025-1825: hyp=['ONLY', 'ONE', 'REMAINS', 'EMPTY', 'THE', 'STAGE', 'BOX'] +3997-180294-0026-1826: ref=['AT', 'THE', 'BEGINNING', 'OF', 'THE', 'THIRD', 'ACT', 'I', 'HEARD', 'THE', 'DOOR', 'OF', 'THE', 'BOX', 'ON', 'WHICH', 'MY', 'EYES', 'HAD', 'BEEN', 'ALMOST', 'CONSTANTLY', 'FIXED', 'OPEN', 'AND', 'MARGUERITE', 'APPEARED'] +3997-180294-0026-1826: hyp=['AT', 'THE', 'BEGINNING', 'OF', 'THE', 'THIRD', 'ACT', 'I', 'HEARD', 'THE', 'DOOR', 'OF', 'THE', 'BOX', 'ON', 'WHICH', 'MY', 'EYES', 'HAD', 'BEEN', 'ALMOST', 'CONSTANTLY', 'FIXED', 'OPEN', 'AND', 'MARGUERITE', 'APPEARED'] +3997-180294-0027-1827: ref=['DID', 'SHE', 'LOVE', 'ME', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'THE', 'MORE', 'BEAUTIFUL', 'SHE', 'LOOKED', 'THE', 'HAPPIER', 'I', 'SHOULD', 'BE'] +3997-180294-0027-1827: hyp=['DOES', 'SHE', 'LOVED', 'ME', 'ENOUGH', 'TO', 'BELIEVE', 'THAT', 'THE', 'MORE', 'BEAUTIFUL', 'SHE', 'LOOKED', 'THE', 'HAPPIER', 'I', 'SHOULD', 'BE'] +3997-180294-0028-1828: ref=['WHAT', 'IS', 'THE', 'MATTER', 'WITH', 'YOU', 'TO', 'NIGHT', 'SAID', 'MARGUERITE', 'RISING', 'AND', 'COMING', 'TO', 'THE', 'BACK', 'OF', 'THE', 'BOX', 'AND', 'KISSING', 'ME', 'ON', 'THE', 'FOREHEAD'] +3997-180294-0028-1828: hyp=['WHAT', 'IS', 'THE', 'MATTER', 'WITH', 'YOU', 'TO', 'NIGHT', 'SAID', 'MARGUERITE', 'RISING', 'AND', 'COMING', 'TO', 'THE', 'BACK', 'OF', 'THE', 'BOX', 'AND', 'KISSING', 'ME', 'ON', 'THE', 'FOREHEAD'] +3997-180294-0029-1829: ref=['YOU', 'SHOULD', 'GO', 'TO', 'BED', 'SHE', 'REPLIED', 'WITH', 'THAT', 'IRONICAL', 'AIR', 'WHICH', 'WENT', 'SO', 'WELL', 'WITH', 'HER', 'DELICATE', 'AND', 'WITTY', 'FACE'] +3997-180294-0029-1829: hyp=['HE', 'SHOULD', 'GO', 'TO', 'BED', 'SHE', 'REPLIED', 'WITH', 'THAT', 'IRONIC', 'AIR', 'WHICH', 'WENT', 'SO', 'WELL', 'WITH', 'HER', 'DELICATE', 'AND', 'WITTY', 'FACE'] +3997-180294-0030-1830: ref=['WHERE', 'AT', 'HOME'] +3997-180294-0030-1830: hyp=['WHERE', 'AT', 'HOME'] +3997-180294-0031-1831: ref=['YOU', 'STILL', 'LOVE', 'ME', 'CAN', 'YOU', 'ASK'] +3997-180294-0031-1831: hyp=['YOU', 'STILL', 'LOVE', 'ME', 'CAN', 'YOU', 'ASK'] +3997-180294-0032-1832: ref=['BECAUSE', 'YOU', "DON'T", 'LIKE', 'SEEING', 'HIM'] +3997-180294-0032-1832: hyp=['BECAUSE', 'YOU', "DON'T", 'LIKE', 'SEEING', 'HIM'] +3997-180294-0033-1833: ref=['NONETHELESS', 'I', 'WAS', 'VERY', 'UNHAPPY', 'ALL', 'THE', 'REST', 'OF', 'THE', 'EVENING', 'AND', 'WENT', 'AWAY', 'VERY', 'SADLY', 'AFTER', 'HAVING', 'SEEN', 'PRUDENCE', 'THE', 'COUNT', 'AND', 'MARGUERITE', 'GET', 'INTO', 'THE', 'CARRIAGE', 'WHICH', 'WAS', 'WAITING', 'FOR', 'THEM', 'AT', 'THE', 'DOOR'] +3997-180294-0033-1833: hyp=['NONE', 'THE', 'LESS', 'I', 'WAS', 'VERY', 'UNHAPPY', 'ALL', 'THE', 'REST', 'OF', 'THE', 'EVENING', 'AND', 'WENT', 'AWAY', 'VERY', 'SADLY', 'AFTER', 'HAVING', 'SEEN', 'PRUDENCE', 'THE', 'COUNT', 'AND', 'MARGUERITE', 'GET', 'INTO', 'THE', 'CARRIAGE', 'WHICH', 'WAS', 'WINNING', 'FOR', 'THEM', 'AT', 'THE', 'DOOR'] +3997-180297-0000-1834: ref=['I', 'HAVE', 'NOT', 'COME', 'TO', 'HINDER', 'YOU', 'FROM', 'LEAVING', 'PARIS'] +3997-180297-0000-1834: hyp=['I', 'HAVE', 'NOT', 'COME', 'TO', 'HINDER', 'YOU', 'FROM', 'LEAVING', 'PARIS'] +3997-180297-0001-1835: ref=['YOU', 'IN', 'THE', 'WAY', 'MARGUERITE', 'BUT', 'HOW'] +3997-180297-0001-1835: hyp=['YOU', 'AND', 'THE', 'WAY', 'MARGUERITE', 'BUT', 'HOW'] +3997-180297-0002-1836: ref=['WELL', 'YOU', 'MIGHT', 'HAVE', 'HAD', 'A', 'WOMAN', 'HERE', 'SAID', 'PRUDENCE', 'AND', 'IT', 'WOULD', 'HARDLY', 'HAVE', 'BEEN', 'AMUSING', 'FOR', 'HER', 'TO', 'SEE', 'TWO', 'MORE', 'ARRIVE'] +3997-180297-0002-1836: hyp=['WELL', 'YOU', 'MIGHT', 'HAVE', 'HAD', 'A', 'WOMAN', 'HERE', 'SAID', 'PRUDENCE', 'AND', 'IT', 'WOULD', 'HARDLY', 'HAVE', 'BEEN', 'AMUSING', 'FOR', 'HER', 'TO', 'SEE', 'TWO', 'MORE', 'ARRIVE'] +3997-180297-0003-1837: ref=['DURING', 'THIS', 'REMARK', 'MARGUERITE', 'LOOKED', 'AT', 'ME', 'ATTENTIVELY'] +3997-180297-0003-1837: hyp=['DURING', 'THIS', 'REMARK', 'MARGUERITE', 'LOOKED', 'AT', 'ME', 'ATTENTIVELY'] +3997-180297-0004-1838: ref=['MY', 'DEAR', 'PRUDENCE', 'I', 'ANSWERED', 'YOU', 'DO', 'NOT', 'KNOW', 'WHAT', 'YOU', 'ARE', 'SAYING'] +3997-180297-0004-1838: hyp=['MY', 'DEAR', 'PRUDENCE', 'I', 'ANSWERED', 'YOU', 'DO', 'NOT', 'KNOW', 'WHAT', 'YOU', 'ARE', 'SAYING'] +3997-180297-0005-1839: ref=['YES', 'BUT', 'BESIDES', 'NOT', 'WISHING', 'TO', 'PUT', 'YOU', 'OUT', 'I', 'WAS', 'SURE', 'THAT', 'IF', 'YOU', 'CAME', 'AS', 'FAR', 'AS', 'MY', 'DOOR', 'YOU', 'WOULD', 'WANT', 'TO', 'COME', 'UP', 'AND', 'AS', 'I', 'COULD', 'NOT', 'LET', 'YOU', 'I', 'DID', 'NOT', 'WISH', 'TO', 'LET', 'YOU', 'GO', 'AWAY', 'BLAMING', 'ME', 'FOR', 'SAYING', 'NO'] +3997-180297-0005-1839: hyp=['YES', 'BUT', 'BESIDES', 'NOT', 'WISHING', 'TO', 'PUT', 'YOU', 'OUT', 'I', 'WAS', 'SURE', 'THAT', 'IF', 'YOU', 'CAME', 'AS', 'FAR', 'AS', 'MY', 'DOOR', 'YOU', 'WOULD', 'WANT', 'TO', 'COME', 'UP', 'AND', 'AS', 'I', 'COULD', 'NOT', 'LET', 'YOU', 'I', 'DID', 'NOT', 'WISH', 'TO', 'LET', 'YOU', 'GO', 'AWAY', 'BLAMING', 'ME', 'FOR', 'SAYING', 'NO'] +3997-180297-0006-1840: ref=['BECAUSE', 'I', 'AM', 'WATCHED', 'AND', 'THE', 'LEAST', 'SUSPICION', 'MIGHT', 'DO', 'ME', 'THE', 'GREATEST', 'HARM'] +3997-180297-0006-1840: hyp=['BECAUSE', 'I', 'AM', 'WATCHED', 'AND', 'THE', 'LEAST', 'SUSPICION', 'MIGHT', 'TO', 'ME', 'THE', 'GREATEST', 'HARM'] +3997-180297-0007-1841: ref=['IS', 'THAT', 'REALLY', 'THE', 'ONLY', 'REASON'] +3997-180297-0007-1841: hyp=['IS', 'THAT', 'REALLY', 'THE', 'ONLY', 'REASON'] +3997-180297-0008-1842: ref=['IF', 'THERE', 'WERE', 'ANY', 'OTHER', 'I', 'WOULD', 'TELL', 'YOU', 'FOR', 'WE', 'ARE', 'NOT', 'TO', 'HAVE', 'ANY', 'SECRETS', 'FROM', 'ONE', 'ANOTHER', 'NOW'] +3997-180297-0008-1842: hyp=['IF', 'THERE', 'WERE', 'ANY', 'OTHER', 'I', 'WOULD', 'TELL', 'YOU', 'FOR', 'WE', 'ARE', 'NOT', 'TO', 'HAVE', 'ANY', 'SECRETS', 'FROM', 'ONE', 'ANOTHER', 'NOW'] +3997-180297-0009-1843: ref=['HONESTLY', 'DO', 'YOU', 'CARE', 'FOR', 'ME', 'A', 'LITTLE', 'A', 'GREAT', 'DEAL'] +3997-180297-0009-1843: hyp=['ON', 'THE', 'SUIT', 'DO', 'YOU', 'CARE', 'FOR', 'ME', 'A', 'LITTLE', 'A', 'GREAT', 'DEAL'] +3997-180297-0010-1844: ref=['I', 'FANCIED', 'FOR', 'A', 'MOMENT', 'THAT', 'I', 'MIGHT', 'GIVE', 'MYSELF', 'THAT', 'HAPPINESS', 'FOR', 'SIX', 'MONTHS', 'YOU', 'WOULD', 'NOT', 'HAVE', 'IT', 'YOU', 'INSISTED', 'ON', 'KNOWING', 'THE', 'MEANS'] +3997-180297-0010-1844: hyp=['I', 'FANCIED', 'FOR', 'A', 'MOMENT', 'THAT', 'I', 'MIGHT', 'GIVE', 'MYSELF', 'THAT', 'HAPPINESS', 'FOR', 'SIX', 'MONTHS', 'YOU', 'WOULD', 'NOT', 'HAVE', 'IT', 'YOU', 'INSISTED', 'ON', 'KNOWING', 'THE', 'MEANS'] +3997-180297-0011-1845: ref=['WELL', 'GOOD', 'HEAVENS', 'THE', 'MEANS', 'WERE', 'EASY', 'ENOUGH', 'TO', 'GUESS'] +3997-180297-0011-1845: hyp=['WELL', 'GOOD', 'HEAVENS', 'THE', 'MEANS', 'WERE', 'EASY', 'ENOUGH', 'TO', 'GUESS'] +3997-180297-0012-1846: ref=['I', 'LISTENED', 'AND', 'I', 'GAZED', 'AT', 'MARGUERITE', 'WITH', 'ADMIRATION'] +3997-180297-0012-1846: hyp=['I', 'LISTENED', 'AND', 'I', 'GAZED', 'AT', 'MARGUERITE', 'WITH', 'ADMIRATION'] +3997-180297-0013-1847: ref=['WHEN', 'I', 'THOUGHT', 'THAT', 'THIS', 'MARVELLOUS', 'CREATURE', 'WHOSE', 'FEET', 'I', 'HAD', 'ONCE', 'LONGED', 'TO', 'KISS', 'WAS', 'WILLING', 'TO', 'LET', 'ME', 'TAKE', 'MY', 'PLACE', 'IN', 'HER', 'THOUGHTS', 'MY', 'PART', 'IN', 'HER', 'LIFE', 'AND', 'THAT', 'I', 'WAS', 'NOT', 'YET', 'CONTENT', 'WITH', 'WHAT', 'SHE', 'GAVE', 'ME', 'I', 'ASKED', 'IF', "MAN'S", 'DESIRE', 'HAS', 'INDEED', 'LIMITS', 'WHEN', 'SATISFIED', 'AS', 'PROMPTLY', 'AS', 'MINE', 'HAD', 'BEEN', 'IT', 'REACHED', 'AFTER', 'SOMETHING', 'FURTHER'] +3997-180297-0013-1847: hyp=['WHEN', 'THEY', 'THOUGHT', 'THAT', 'THIS', 'MARVELLOUS', 'CREATURE', 'WHOSE', 'FEET', 'I', 'HAD', 'ONCE', 'LONGED', 'TO', 'KISS', 'WAS', 'WILLING', 'TO', 'LET', 'ME', 'TAKE', 'MY', 'PLACE', 'IN', 'HER', 'THOUGHTS', 'BY', 'PARTS', 'IN', 'HER', 'LIFE', 'AND', 'THAT', 'I', 'WAS', 'NOT', 'YET', 'CONTENT', 'WITH', 'WHAT', 'SHE', 'GAVE', 'ME', 'I', 'ASKED', 'IF', "MAN'S", 'DESIRE', 'HAD', 'INDEED', 'LIMITS', 'WHEN', 'SATISFIED', 'AS', 'PROMPTLY', 'AS', 'MINE', 'HAD', 'BEEN', 'IT', 'REACHED', 'AFTER', 'SOMETHING', 'FURTHER'] +3997-180297-0014-1848: ref=['TRULY', 'SHE', 'CONTINUED', 'WE', 'POOR', 'CREATURES', 'OF', 'CHANCE', 'HAVE', 'FANTASTIC', 'DESIRES', 'AND', 'INCONCEIVABLE', 'LOVES'] +3997-180297-0014-1848: hyp=['TRULY', 'SHE', 'CONTINUED', 'WE', 'POOR', 'CREATURES', 'OF', 'CHANCE', 'HAVE', 'FANTASTIC', 'DESIRE', 'AND', 'INCONCEIVABLE', 'LOVES'] +3997-180297-0015-1849: ref=['WE', 'ARE', 'NOT', 'ALLOWED', 'TO', 'HAVE', 'HEARTS', 'UNDER', 'PENALTY', 'OF', 'BEING', 'HOOTED', 'DOWN', 'AND', 'OF', 'RUINING', 'OUR', 'CREDIT'] +3997-180297-0015-1849: hyp=['WE', 'ARE', 'NOT', 'ALLOWED', 'TO', 'HAVE', 'HEARTS', 'UNDER', 'PENALTY', 'OF', 'BEING', 'HOOTED', 'DOWN', 'AND', 'OF', 'RUINING', 'OUR', 'CREDIT'] +3997-180297-0016-1850: ref=['WE', 'NO', 'LONGER', 'BELONG', 'TO', 'OURSELVES'] +3997-180297-0016-1850: hyp=['WE', 'NO', 'LONGER', 'BELONG', 'TO', 'OURSELVES'] +3997-180297-0017-1851: ref=['WE', 'STAND', 'FIRST', 'IN', 'THEIR', 'SELF', 'ESTEEM', 'LAST', 'IN', 'THEIR', 'ESTEEM'] +3997-180297-0017-1851: hyp=['WE', 'STAND', 'FIRST', 'IN', 'THEIR', 'SELF', 'ESTEEM', 'LAST', 'IN', 'THEIR', 'ESTEEM'] +3997-180297-0018-1852: ref=['NEVER', 'DO', 'THEY', 'GIVE', 'YOU', 'ADVICE', 'WHICH', 'IS', 'NOT', 'LUCRATIVE'] +3997-180297-0018-1852: hyp=['NEVER', 'DID', 'HE', 'GIVE', 'YOU', 'ADVICE', 'WHICH', 'IS', 'NOT', 'LUCRATIVE'] +3997-180297-0019-1853: ref=['IT', 'MEANS', 'LITTLE', 'ENOUGH', 'TO', 'THEM', 'THAT', 'WE', 'SHOULD', 'HAVE', 'TEN', 'LOVERS', 'EXTRA', 'AS', 'LONG', 'AS', 'THEY', 'GET', 'DRESSES', 'OR', 'A', 'BRACELET', 'OUT', 'OF', 'THEM', 'AND', 'THAT', 'THEY', 'CAN', 'DRIVE', 'IN', 'OUR', 'CARRIAGE', 'FROM', 'TIME', 'TO', 'TIME', 'OR', 'COME', 'TO', 'OUR', 'BOX', 'AT', 'THE', 'THEATRE'] +3997-180297-0019-1853: hyp=['IT', 'MEANS', 'LITTLE', 'ENOUGH', 'TO', 'THEM', 'THAT', 'WE', 'SHOULD', 'HAVE', 'TEN', 'LOVERS', 'EXTRA', 'AS', 'LONG', 'AS', 'THEY', 'GET', 'DRESSES', 'OR', 'A', 'BRACELET', 'OUT', 'OF', 'THEM', 'AND', 'THAT', 'THEY', 'CAN', 'DRIVE', 'AND', 'ARE', 'PARISH', 'FROM', 'TIME', 'TO', 'TIME', 'OR', 'COME', 'TO', 'OUR', 'BOX', 'AT', 'THE', 'FUTURE'] +3997-180297-0020-1854: ref=['SUCH', 'A', 'MAN', 'I', 'FOUND', 'IN', 'THE', 'DUKE', 'BUT', 'THE', 'DUKE', 'IS', 'OLD', 'AND', 'OLD', 'AGE', 'NEITHER', 'PROTECTS', 'NOR', 'CONSOLES'] +3997-180297-0020-1854: hyp=['SUCH', 'A', 'MAN', 'I', 'FOUND', 'IN', 'THE', 'DUKE', 'BUT', 'THE', 'DUKE', 'IS', 'OLD', 'AND', 'THE', 'OLD', 'AGE', 'NEITHER', 'PROTECTS', 'NOR', 'CONSOLES'] +3997-180297-0021-1855: ref=['I', 'THOUGHT', 'I', 'COULD', 'ACCEPT', 'THE', 'LIFE', 'WHICH', 'HE', 'OFFERED', 'ME', 'BUT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +3997-180297-0021-1855: hyp=['I', 'THOUGHT', 'I', 'COULD', 'ACCEPT', 'THE', 'LIFE', 'WHICH', 'HE', 'OFFERED', 'ME', 'OR', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +3997-180297-0022-1856: ref=['WHAT', 'I', 'LOVED', 'IN', 'YOU', 'WAS', 'NOT', 'THE', 'MAN', 'WHO', 'WAS', 'BUT', 'THE', 'MAN', 'WHO', 'WAS', 'GOING', 'TO', 'BE'] +3997-180297-0022-1856: hyp=['WHAT', 'I', 'LOVED', 'IN', 'YOU', 'WAS', 'NOT', 'THE', 'MAN', 'WHO', 'WAS', 'BUT', 'THE', 'MAN', 'WHO', 'WAS', 'GOING', 'TO', 'BE'] +3997-180297-0023-1857: ref=['MARGUERITE', 'TIRED', 'OUT', 'WITH', 'THIS', 'LONG', 'CONFESSION', 'THREW', 'HERSELF', 'BACK', 'ON', 'THE', 'SOFA', 'AND', 'TO', 'STIFLE', 'A', 'SLIGHT', 'COUGH', 'PUT', 'UP', 'HER', 'HANDKERCHIEF', 'TO', 'HER', 'LIPS', 'AND', 'FROM', 'THAT', 'TO', 'HER', 'EYES'] +3997-180297-0023-1857: hyp=['MARGUERITE', 'HIRED', 'OUT', 'WITH', 'THIS', 'LONG', 'CONFESSION', 'THREW', 'HERSELF', 'BACK', 'ON', 'THE', 'SOFA', 'AND', 'TO', 'STIFLE', 'A', 'SLIGHT', 'COUGH', 'PULL', 'UP', 'HER', 'HANDKERCHIEF', 'TO', 'HER', 'LIPS', 'AND', 'FROM', 'THAT', 'TO', 'HER', 'EYES'] +3997-180297-0024-1858: ref=['MARGUERITE', 'DO', 'WITH', 'ME', 'AS', 'YOU', 'WILL', 'I', 'AM', 'YOUR', 'SLAVE', 'YOUR', 'DOG', 'BUT', 'IN', 'THE', 'NAME', 'OF', 'HEAVEN', 'TEAR', 'UP', 'THE', 'LETTER', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'AND', 'DO', 'NOT', 'MAKE', 'ME', 'LEAVE', 'YOU', 'TO', 'MORROW', 'IT', 'WOULD', 'KILL', 'ME'] +3997-180297-0024-1858: hyp=['MARGUERITE', 'DO', 'WITH', 'ME', 'AS', 'YOU', 'WILL', 'I', 'AM', 'YOUR', 'SLAVE', 'YOUR', 'DOG', 'BUT', 'IN', 'THE', 'NAME', 'OF', 'HEAVEN', 'TEAR', 'UP', 'THE', 'LETTER', 'WHICH', 'I', 'WROTE', 'TO', 'YOU', 'AND', 'DO', 'NOT', 'MAKE', 'ME', 'LEAVE', 'YOU', 'TO', 'MORROW', 'IT', 'WOULD', 'KILL', 'ME'] +3997-180297-0025-1859: ref=['MARGUERITE', 'DREW', 'THE', 'LETTER', 'FROM', 'HER', 'BOSOM', 'AND', 'HANDING', 'IT', 'TO', 'ME', 'WITH', 'A', 'SMILE', 'OF', 'INFINITE', 'SWEETNESS', 'SAID'] +3997-180297-0025-1859: hyp=['MARGUERITE', 'DREW', 'THE', 'LETTER', 'FROM', 'HER', 'BOSOM', 'AND', 'HANDING', 'IT', 'TO', 'ME', 'WITH', 'A', 'SMILE', 'OF', 'INFINITE', 'SWEETNESS', 'SAID'] +3997-180297-0026-1860: ref=['HERE', 'IT', 'IS', 'I', 'HAVE', 'BROUGHT', 'IT', 'BACK'] +3997-180297-0026-1860: hyp=['HERE', 'IT', 'IS', 'I', 'HAVE', 'BROUGHT', 'IT', 'BACK'] +3997-180297-0027-1861: ref=['I', 'TORE', 'THE', 'LETTER', 'INTO', 'FRAGMENTS', 'AND', 'KISSED', 'WITH', 'TEARS', 'THE', 'HAND', 'THAT', 'GAVE', 'IT', 'TO', 'ME'] +3997-180297-0027-1861: hyp=['I', 'TOILED', 'A', 'LETTER', 'INTO', 'FRAGMENTS', 'AND', 'KISSED', 'WITH', 'TEARS', 'THE', 'HAND', 'THAT', 'GAVE', 'IT', 'TO', 'ME'] +3997-180297-0028-1862: ref=['LOOK', 'HERE', 'PRUDENCE', 'DO', 'YOU', 'KNOW', 'WHAT', 'HE', 'WANTS', 'SAID', 'MARGUERITE'] +3997-180297-0028-1862: hyp=['LOOK', 'HERE', 'PRUDENCE', 'DO', 'YOU', 'KNOW', 'WHAT', 'HE', 'WANTS', 'SAID', 'MARGUERITE'] +3997-180297-0029-1863: ref=['HE', 'WANTS', 'YOU', 'TO', 'FORGIVE', 'HIM'] +3997-180297-0029-1863: hyp=['HE', 'WANTS', 'YOU', 'TO', 'FORGIVE', 'HIM'] +3997-180297-0030-1864: ref=['ONE', 'HAS', 'TO', 'BUT', 'HE', 'WANTS', 'MORE', 'THAN', 'THAT', 'WHAT', 'THEN'] +3997-180297-0030-1864: hyp=['ONE', 'HAS', 'TWO', 'BUT', 'HE', 'WANTS', 'MORE', 'THAN', 'THAT', 'WHAT', 'THEN'] +3997-180297-0031-1865: ref=['I', 'EMBRACED', 'MARGUERITE', 'UNTIL', 'SHE', 'WAS', 'ALMOST', 'STIFLED'] +3997-180297-0031-1865: hyp=['I', 'EMBRACED', 'MARGUERITE', 'UNTIL', 'SHE', 'WAS', 'ALMOST', 'STIFLED'] +3997-182399-0000-1779: ref=['OL', 'MISTAH', 'BUZZARD', 'GRINNED'] +3997-182399-0000-1779: hyp=['ALL', 'MISTER', 'BUZZARD', 'GRINNED'] +3997-182399-0001-1780: ref=['THIS', 'SOUNDED', 'LIKE', 'ANOTHER', 'STORY'] +3997-182399-0001-1780: hyp=['THIS', 'SOUNDED', 'LIKE', 'ANOTHER', 'STORY'] +3997-182399-0002-1781: ref=['HE', 'WAS', 'CURIOUS', 'ABOUT', 'THAT', 'BLACK', 'HEADED', 'COUSIN', 'OF', 'OL', 'MISTAH', 'BUZZARD', 'VERY', 'CURIOUS', 'INDEED'] +3997-182399-0002-1781: hyp=['HE', 'WAS', 'CURIOUS', 'ABOUT', 'THAT', 'BLACK', 'HEADED', 'COUSIN', 'OF', 'ALL', 'MISTER', 'BUZZARD', 'VERY', 'CURIOUS', 'INDEED'] +3997-182399-0003-1782: ref=['ANYWAY', 'HE', 'WOULD', 'FIND', 'OUT'] +3997-182399-0003-1782: hyp=['ANYWAY', 'HE', 'WOULD', 'FIND', 'OUT'] +3997-182399-0004-1783: ref=['PLEASE', 'MISTER', 'BUZZARD', 'PLEASE', 'TELL', 'US', 'THE', 'STORY', 'HE', 'BEGGED'] +3997-182399-0004-1783: hyp=['PLEASE', 'MISTER', 'BOZARD', 'PLEASE', 'TELL', 'US', 'THE', 'STORY', 'HE', 'BEGGED'] +3997-182399-0005-1784: ref=['NOW', 'OL', 'MISTAH', 'BUZZARD', 'IS', 'NATURALLY', 'GOOD', 'NATURED', 'AND', 'ACCOMMODATING', 'AND', 'WHEN', 'PETER', 'BEGGED', 'SO', 'HARD', 'HE', 'JUST', "COULDN'T", 'FIND', 'IT', 'IN', 'HIS', 'HEART', 'TO', 'REFUSE'] +3997-182399-0005-1784: hyp=['NOW', 'ALL', 'MISTER', 'BUZZARD', 'IS', 'NATURALLY', 'GOOD', 'NATURED', 'AND', 'ACCOMMODATING', 'AND', 'WHEN', 'PETER', 'BEGGED', 'SO', 'HARD', 'HE', 'JUST', "COULDN'T", 'FIND', 'IT', 'IN', 'HIS', 'HEART', 'TO', 'REFUSE'] +3997-182399-0006-1785: ref=['WAY', 'BACK', 'IN', 'THE', 'DAYS', 'WHEN', 'GRANDPAP', 'BUZZARD', 'HAD', 'HIS', 'LIL', 'FALLING', 'OUT', 'WITH', 'OL', 'KING', 'EAGLE', 'AND', 'DONE', 'FLY', 'SO', 'HIGH', 'HE', "SCO'TCH", 'THE', 'FEATHERS', 'OFFEN', 'HIS', 'HAID', 'HE', 'HAD', 'A', 'COUSIN', 'DID', 'GRANDPAP', 'BUZZARD', 'AND', 'THIS', 'COUSIN', 'WAS', 'JES', 'NATURALLY', 'LAZY', 'AND', 'NO', 'COUNT'] +3997-182399-0006-1785: hyp=['WAY', 'BACK', 'IN', 'THE', 'DAYS', 'WHEN', 'GRANDPAP', 'BUZZARD', 'HAD', 'HIS', 'LITTLE', 'FALLING', 'ON', 'WITH', 'OLD', 'KING', 'EAGLE', 'AND', 'DONE', 'FLIES', 'SO', 'HIGH', 'HE', 'SCORCHED', 'THE', 'FEATHERS', 'OFF', 'IN', 'HIS', 'HEAD', 'HE', 'HAD', 'A', 'COUSIN', 'DID', 'GRANDPA', 'BUZZARD', 'AND', 'THIS', 'COUSIN', 'WAS', 'JUST', 'NATURALLY', 'LAZY', 'AND', 'NO', 'COUNT'] +3997-182399-0007-1786: ref=['LIKE', 'MOST', 'NO', 'COUNT', 'PEOPLE', 'HE', 'USED', 'TO', 'MAKE', 'A', 'REGULAR', 'NUISANCE', 'OF', 'HISSELF', 'POKING', 'HIS', 'NOSE', 'INTO', "EV'YBODY'S", 'BUSINESS', 'AND', 'NEVER', 'TENDING', 'TO', 'HIS', 'OWN'] +3997-182399-0007-1786: hyp=['LIKE', 'MOST', 'NO', 'COUNT', 'PEOPLE', 'HE', 'USED', 'TO', 'MAKE', 'A', 'REGULAR', 'NOTIONS', 'OF', 'HISSELF', 'POKING', 'HIS', 'NOSE', 'INTO', "EVERYBODY'S", 'BUSINESS', 'AND', 'NEVER', 'TENDING', 'TO', 'HIS', 'OWN'] +3997-182399-0008-1787: ref=["WASN'T", 'ANYTHING', 'GOING', 'ON', 'THAT', 'THIS', 'TRIFLING', 'MEMBER', 'OF', 'THE', 'BUZZARD', "FAM'LY", "DIDN'T", 'FIND', 'OUT', 'ABOUT', 'AND', 'MEDDLE', 'IN', 'HE', 'COULD', 'ASK', 'MO', 'QUESTIONS', 'THAN', 'PETER', 'RABBIT', 'CAN', 'AN', 'ANYBODY', 'THAT', 'CAN', 'DO', 'THAT', 'HAS', 'GOT', 'TO', 'ASK', 'A', 'LOT'] +3997-182399-0008-1787: hyp=["WASN'T", 'ANYTHING', 'GOING', 'ON', 'THAT', 'THIS', 'TRIFLING', 'MEMBER', 'OF', 'THE', 'BUZZARD', 'FAMILY', "DIDN'T", 'FIND', 'OUT', 'ABOUT', 'A', 'MEDDLE', 'IN', 'HE', 'COULD', 'ASK', 'MORE', 'QUESTIONS', 'THAN', 'PETER', 'RABBIT', 'CAN', 'AND', 'ANYBODY', 'THAT', 'CAN', 'DO', 'THAT', 'HAS', 'GOT', 'TO', 'ASK', 'A', 'LOT'] +3997-182399-0009-1788: ref=['EVERYBODY', 'LOOKED', 'AT', 'PETER', 'AND', 'LAUGHED'] +3997-182399-0009-1788: hyp=['EVERYBODY', 'LOOKED', 'AT', 'PETER', 'AND', 'LAUGHED'] +3997-182399-0010-1789: ref=['SO', 'WE', 'UNS', 'SIT', 'ON', 'THE', 'CHIMNEY', 'TOPS', 'WHENEVER', 'OL', 'JACK', 'FROST', 'GETS', 'TO', 'STRAYING', 'DOWN', 'WHERE', 'HE', 'HAVE', 'NO', 'BUSINESS'] +3997-182399-0010-1789: hyp=['SO', 'WE', 'UNSTEAD', 'ON', 'THE', 'CHIMNEY', 'TOPS', 'WHENEVER', 'OLD', 'JACK', 'FROST', 'GETS', 'THE', 'STRAYING', 'DOWN', 'WHERE', 'HE', 'HAVE', 'NO', 'BUSINESS'] +3997-182399-0011-1790: ref=['ONE', 'DAY', 'THIS', 'NO', 'COUNT', 'TRIFLING', 'COUSIN', 'OF', 'GRANDPAP', 'BUZZARD', 'GET', 'COLD', 'IN', 'HIS', 'FEET'] +3997-182399-0011-1790: hyp=['ONE', 'DAY', 'THIS', 'NO', 'COUNT', 'TRIFLING', 'COUSIN', 'OF', 'GRANDPA', 'BUZZARD', 'GET', 'COLD', 'IN', 'HIS', 'FEET'] +3997-182399-0012-1791: ref=['IT', 'WAS', 'ON', 'A', 'LIL', 'OL', 'HOUSE', 'A', 'LIL', 'OL', 'TUMBLE', 'DOWN', 'HOUSE'] +3997-182399-0012-1791: hyp=['IT', 'WAS', 'ON', 'THE', 'LITTLE', 'OLD', 'HOUSE', 'A', 'LITTLE', 'OLD', 'TUMBLE', 'DOWN', 'HOUSE'] +3997-182399-0013-1792: ref=['WHY', 'HE', 'JES', 'STRETCH', 'HIS', 'FOOL', 'HAID', 'AS', 'FAR', 'DOWN', 'THAT', 'CHIMNEY', 'AS', 'HE', 'CAN', 'AN', 'LISTEN', 'AN', 'LISTEN'] +3997-182399-0013-1792: hyp=['WHY', 'HE', 'JUST', 'STRETCH', 'HIS', 'FULL', 'HEAD', 'AS', 'FAR', 'DOWN', 'THE', 'CHIMNEY', 'AS', 'HE', 'CAN', 'AND', 'LISTEN', 'AND', 'LISTEN'] +3997-182399-0014-1793: ref=['BUT', 'HE', "DON'T", 'MIND', 'THAT'] +3997-182399-0014-1793: hyp=['BUT', 'HE', "DON'T", 'MIND', 'THAT'] +3997-182399-0015-1794: ref=['WILL', "YO'", 'ALLS', 'PLEASE', 'SPEAK', 'A', 'LIL', 'LOUDER', 'HE', 'HOLLER', 'DOWN', 'THE', 'CHIMNEY', 'JES', 'LIKE', 'THAT'] +3997-182399-0015-1794: hyp=['WELL', 'YOU', 'ALL', 'PLEASE', 'SPEAK', 'A', 'LITTLE', 'LOUDER', 'HE', 'HOLLERED', 'ON', 'THE', 'CHIMNEY', 'JUST', 'LIKE', 'THAT'] +3997-182399-0016-1795: ref=['YES', 'SAH', 'SHE', "SHO'LY", 'WAS', 'PLUMB', 'SCARED'] +3997-182399-0016-1795: hyp=['YES', 'SAD', 'SHE', 'SURELY', 'YOU', 'WAS', 'PLUM', 'SCARED'] +3997-182399-0017-1796: ref=['THEY', 'LIKE', 'TO', 'CHOKE', 'THAT', 'NO', 'COUNT', 'BUZZARD', 'TO', 'DEATH'] +3997-182399-0017-1796: hyp=['THEY', 'LIKED', 'TO', 'CHOKE', 'THAT', 'NO', 'COMPASSER', 'TO', 'DEATH'] +3997-182399-0018-1797: ref=['WHEN', 'HE', 'GET', 'HOME', 'HE', 'TRY', 'AN', 'TRY', 'TO', 'BRUSH', 'THAT', 'SOOT', 'OFF', 'BUT', 'IT', 'DONE', 'GET', 'INTO', 'THE', 'SKIN', 'AN', 'IT', 'STAY', 'THERE'] +3997-182399-0018-1797: hyp=['WHEN', 'HE', 'GET', 'HOME', 'HE', 'TRY', 'AND', 'TRY', 'TO', 'BRUSH', 'THAT', 'SUIT', 'OFF', 'BUT', 'IT', 'DONE', 'GET', 'INTO', 'THE', 'SKIN', 'AND', 'IT', 'STAY', 'THERE'] +3997-182399-0019-1798: ref=['A', 'LITTLE', 'SIGH', 'OF', 'SATISFACTION', 'WENT', 'AROUND', 'THE', 'CIRCLE', 'OF', 'LISTENERS'] +3997-182399-0019-1798: hyp=['A', 'LITTLE', 'SIGH', 'OF', 'SATISFACTION', 'WENT', 'ROUND', 'THE', 'CIRCLE', 'OF', 'LISTENERS'] +3997-182399-0020-1799: ref=['IT', 'WAS', 'JUST', 'AS', 'GOOD', 'AS', 'ONE', 'OF', 'GRANDFATHER', "FROG'S"] +3997-182399-0020-1799: hyp=['IT', 'WAS', 'JUST', 'AS', 'GOOD', 'AS', 'ONE', 'OF', 'GRANDFATHER', 'FROGS'] +4198-12259-0000-203: ref=['DRAW', 'REACH', 'FILL', 'MIX', 'GIVE', 'IT', 'ME', 'WITHOUT', 'WATER'] +4198-12259-0000-203: hyp=['DRAW', 'REACH', 'FILL', 'MIX', 'GIVE', 'IT', 'ME', 'WITHOUT', 'WATER'] +4198-12259-0001-204: ref=['SO', 'MY', 'FRIEND', 'SO', 'WHIP', 'ME', 'OFF', 'THIS', 'GLASS', 'NEATLY', 'BRING', 'ME', 'HITHER', 'SOME', 'CLARET', 'A', 'FULL', 'WEEPING', 'GLASS', 'TILL', 'IT', 'RUN', 'OVER'] +4198-12259-0001-204: hyp=['SO', 'MY', 'FRIEND', 'SO', 'WHIP', 'ME', 'OFF', 'THIS', 'GLASS', 'NEATLY', 'BRING', 'ME', 'HITHER', 'SOME', 'CLARET', 'A', 'FULL', 'WEEPING', 'GLASS', 'TILL', 'IT', 'RUN', 'OVER'] +4198-12259-0002-205: ref=['A', 'CESSATION', 'AND', 'TRUCE', 'WITH', 'THIRST'] +4198-12259-0002-205: hyp=['A', 'CESSATION', 'AND', 'TRUCE', 'WITH', 'THIRST'] +4198-12259-0003-206: ref=['YOU', 'HAVE', 'CATCHED', 'A', 'COLD', 'GAMMER', 'YEA', 'FORSOOTH', 'SIR'] +4198-12259-0003-206: hyp=['YOU', 'HAVE', 'CAST', 'A', 'COLD', 'GAMMER', 'YEA', 'FORSOOTH', 'SIR'] +4198-12259-0004-207: ref=['BY', 'THE', 'BELLY', 'OF', 'SANCT', 'BUFF', 'LET', 'US', 'TALK', 'OF', 'OUR', 'DRINK', 'I', 'NEVER', 'DRINK', 'BUT', 'AT', 'MY', 'HOURS', 'LIKE', 'THE', "POPE'S", 'MULE'] +4198-12259-0004-207: hyp=['BY', 'THE', 'VALLEY', 'OF', 'SAINT', 'BUFF', 'LET', 'US', 'TALK', 'OF', 'OUR', 'DRINK', 'I', 'NEVER', 'DRINK', 'WITHOUT', 'MY', 'HOURS', 'LIKE', 'THE', "POPE'S", 'MULE'] +4198-12259-0005-208: ref=['WHICH', 'WAS', 'FIRST', 'THIRST', 'OR', 'DRINKING'] +4198-12259-0005-208: hyp=['WHICH', 'WAS', 'FIRST', 'THOSE', 'DRINKING'] +4198-12259-0006-209: ref=['WHAT', 'IT', 'SEEMS', 'I', 'DO', 'NOT', 'DRINK', 'BUT', 'BY', 'AN', 'ATTORNEY'] +4198-12259-0006-209: hyp=['WHAT', 'IT', 'SEEMS', 'I', 'DO', 'NOT', 'DRINK', 'BUT', 'BUY', 'AN', 'ATTORNEY'] +4198-12259-0007-210: ref=['DRINK', 'ALWAYS', 'AND', 'YOU', 'SHALL', 'NEVER', 'DIE'] +4198-12259-0007-210: hyp=['DRINK', 'ALWAYS', 'AND', 'YOU', 'SHALL', 'NEVER', 'DIE'] +4198-12259-0008-211: ref=['IF', 'I', 'DRINK', 'NOT', 'I', 'AM', 'A', 'GROUND', 'DRY', 'GRAVELLED', 'AND', 'SPENT', 'I', 'AM', 'STARK', 'DEAD', 'WITHOUT', 'DRINK', 'AND', 'MY', 'SOUL', 'READY', 'TO', 'FLY', 'INTO', 'SOME', 'MARSH', 'AMONGST', 'FROGS', 'THE', 'SOUL', 'NEVER', 'DWELLS', 'IN', 'A', 'DRY', 'PLACE', 'DROUTH', 'KILLS', 'IT'] +4198-12259-0008-211: hyp=['IF', 'I', 'DRINK', 'NOT', 'I', 'AM', 'A', 'GROUND', 'DRY', 'GRAVELLED', 'AND', 'SPENT', 'I', 'AM', 'START', 'DEAD', 'WITHOUT', 'DRINK', 'AND', 'MY', 'SOUL', 'READY', 'TO', 'FLY', 'INTO', 'SOME', 'MARSH', 'A', "MONTH'S", 'FROGS', 'THE', 'SOUL', 'NEVER', 'DWELLS', 'IN', 'A', 'DRY', 'PLACE', 'DROUTH', 'KILL', 'IT'] +4198-12259-0009-212: ref=['HE', 'DRINKS', 'IN', 'VAIN', 'THAT', 'FEELS', 'NOT', 'THE', 'PLEASURE', 'OF', 'IT'] +4198-12259-0009-212: hyp=['HE', 'DRINKS', 'THEIR', 'VEIN', 'THAT', 'FILLS', 'NOT', 'THE', 'PLEASURE', 'OF', 'IT'] +4198-12259-0010-213: ref=['IT', 'IS', 'ENOUGH', 'TO', 'BREAK', 'BOTH', 'GIRDS', 'AND', 'PETREL'] +4198-12259-0010-213: hyp=['IT', 'IS', 'ENOUGH', 'TO', 'BREAK', 'BOTH', 'GUARDS', 'AND', 'PETEL'] +4198-12259-0011-214: ref=['WHAT', 'DIFFERENCE', 'IS', 'THERE', 'BETWEEN', 'A', 'BOTTLE', 'AND', 'A', 'FLAGON'] +4198-12259-0011-214: hyp=['WHAT', 'DIFFERENCE', 'IS', 'THERE', 'BETWEEN', 'A', 'BOTTLE', 'AND', 'A', 'FLAGON'] +4198-12259-0012-215: ref=['BRAVELY', 'AND', 'WELL', 'PLAYED', 'UPON', 'THE', 'WORDS'] +4198-12259-0012-215: hyp=['BRAVELY', 'AND', 'WELL', 'PLAYED', 'UPON', 'THE', 'WORDS'] +4198-12259-0013-216: ref=['OUR', 'FATHERS', 'DRANK', 'LUSTILY', 'AND', 'EMPTIED', 'THEIR', 'CANS'] +4198-12259-0013-216: hyp=['OUR', 'FATHERS', 'DRANK', 'LUSTILY', 'AND', 'EMPTIED', 'THE', 'AKANS'] +4198-12259-0014-217: ref=['WELL', 'CACKED', 'WELL', 'SUNG'] +4198-12259-0014-217: hyp=['WELL', 'CAGLED', 'WELL', 'SUNG'] +4198-12259-0015-218: ref=['COME', 'LET', 'US', 'DRINK', 'WILL', 'YOU', 'SEND', 'NOTHING', 'TO', 'THE', 'RIVER'] +4198-12259-0015-218: hyp=['COME', 'LET', 'US', 'DRINK', 'WILL', 'YOU', 'SEND', 'NOTHING', 'TO', 'THE', 'RIVER'] +4198-12259-0016-219: ref=['I', 'DRINK', 'NO', 'MORE', 'THAN', 'A', 'SPONGE'] +4198-12259-0016-219: hyp=['I', 'DRANK', 'NO', 'MORE', 'THAN', 'HIS', 'SPINES'] +4198-12259-0017-220: ref=['I', 'DRINK', 'LIKE', 'A', 'TEMPLAR', 'KNIGHT'] +4198-12259-0017-220: hyp=['I', 'DRINK', 'LIKE', 'A', 'TENT', 'LAWN', 'NIGHT'] +4198-12259-0018-221: ref=['AND', 'I', 'TANQUAM', 'SPONSUS'] +4198-12259-0018-221: hyp=['AND', 'I', 'TEN', 'QUALMS', 'BONSES'] +4198-12259-0019-222: ref=['AND', 'I', 'SICUT', 'TERRA', 'SINE', 'AQUA'] +4198-12259-0019-222: hyp=['AND', 'I', 'SICUT', 'TERRACE', 'IN', 'AQUA'] +4198-12259-0020-223: ref=['GIVE', 'ME', 'A', 'SYNONYMON', 'FOR', 'A', 'GAMMON', 'OF', 'BACON'] +4198-12259-0020-223: hyp=['GIVE', 'ME', 'A', 'SNYM', 'FOR', 'A', 'GAMIN', 'OF', 'BACON'] +4198-12259-0021-224: ref=['IT', 'IS', 'THE', 'COMPULSORY', 'OF', 'DRINKERS', 'IT', 'IS', 'A', 'PULLEY'] +4198-12259-0021-224: hyp=['IT', 'IS', 'THE', 'COMPULSORY', 'OF', 'DRINKERS', 'IT', 'IS', 'A', 'POLY'] +4198-12259-0022-225: ref=['A', 'LITTLE', 'RAIN', 'ALLAYS', 'A', 'GREAT', 'DEAL', 'OF', 'WIND', 'LONG', 'TIPPLING', 'BREAKS', 'THE', 'THUNDER'] +4198-12259-0022-225: hyp=['A', 'LITTLE', 'RAIN', 'A', 'LAYS', 'A', 'GREAT', 'DEAL', 'OF', 'WIND', 'LONG', 'TIPPLING', 'BREAKS', 'THAT', 'THUNDER'] +4198-12259-0023-226: ref=['BUT', 'IF', 'THERE', 'CAME', 'SUCH', 'LIQUOR', 'FROM', 'MY', 'BALLOCK', 'WOULD', 'YOU', 'NOT', 'WILLINGLY', 'THEREAFTER', 'SUCK', 'THE', 'UDDER', 'WHENCE', 'IT', 'ISSUED'] +4198-12259-0023-226: hyp=['BUT', 'IF', 'THERE', 'CAME', 'SUCH', 'LIQUOR', 'FOR', 'MY', 'BALLAK', 'WILL', 'YOU', 'NOT', 'WILLINGLY', 'THEREAFTER', 'SUCK', 'THE', 'UTTER', 'WHENCE', 'IT', 'ISSUED'] +4198-12259-0024-227: ref=['HERE', 'PAGE', 'FILL'] +4198-12259-0024-227: hyp=['HERE', 'PAGE', 'FILL'] +4198-12259-0025-228: ref=['I', 'APPEAL', 'FROM', 'THIRST', 'AND', 'DISCLAIM', 'ITS', 'JURISDICTION'] +4198-12259-0025-228: hyp=['I', 'APPEAL', 'FROM', 'THIRST', 'AND', 'DISCLAIM', 'ITS', 'JURIS', 'DIXON'] +4198-12259-0026-229: ref=['I', 'WAS', 'WONT', 'HERETOFORE', 'TO', 'DRINK', 'OUT', 'ALL', 'BUT', 'NOW', 'I', 'LEAVE', 'NOTHING'] +4198-12259-0026-229: hyp=['I', 'WAS', 'WONT', 'HERE', 'TOFORE', 'TO', 'DRINK', 'OUT', 'ALL', 'BUT', 'NOW', 'I', 'LEAVE', 'NOTHING'] +4198-12259-0027-230: ref=['HEYDAY', 'HERE', 'ARE', 'TRIPES', 'FIT', 'FOR', 'OUR', 'SPORT', 'AND', 'IN', 'EARNEST', 'EXCELLENT', 'GODEBILLIOS', 'OF', 'THE', 'DUN', 'OX', 'YOU', 'KNOW', 'WITH', 'THE', 'BLACK', 'STREAK'] +4198-12259-0027-230: hyp=['HEY', 'THEE', 'HERE', 'A', 'TRITE', 'FIT', 'FOR', 'OURSPORT', 'AND', 'IN', 'EARNEST', 'EXCELLENT', 'GO', 'TO', 'BE', 'YOURS', 'OF', 'THE', 'DUN', 'OX', 'YOU', 'KNOW', 'WITH', 'THE', 'BLACK', 'STREET'] +4198-12259-0028-231: ref=['O', 'FOR', "GOD'S", 'SAKE', 'LET', 'US', 'LASH', 'THEM', 'SOUNDLY', 'YET', 'THRIFTILY'] +4198-12259-0028-231: hyp=['OH', 'FOR', "GOD'S", 'SAKE', 'LET', 'US', 'LAST', 'THEM', 'SOUNDLY', 'YET', 'DRIFTILY'] +4198-12259-0029-232: ref=['SPARROWS', 'WILL', 'NOT', 'EAT', 'UNLESS', 'YOU', 'BOB', 'THEM', 'ON', 'THE', 'TAIL', 'NOR', 'CAN', 'I', 'DRINK', 'IF', 'I', 'BE', 'NOT', 'FAIRLY', 'SPOKE', 'TO'] +4198-12259-0029-232: hyp=['SPARROWS', 'WOULD', 'NOT', 'EAT', 'UNLESS', 'YOU', 'BOBBED', 'THEM', 'ON', 'THE', 'TAIL', 'NOR', 'CAN', 'I', 'DRINK', 'IF', 'I', 'BE', 'NOT', 'FAIRLY', 'SPOKE', 'TO'] +4198-12259-0030-233: ref=['HO', 'THIS', 'WILL', 'BANG', 'IT', 'SOUNDLY'] +4198-12259-0030-233: hyp=['OH', 'THIS', 'WAS', "BENNETT'S", 'ARMY'] +4198-12259-0031-234: ref=['BUT', 'THIS', 'SHALL', 'BANISH', 'IT', 'UTTERLY'] +4198-12259-0031-234: hyp=['BUT', 'THIS', 'OUR', 'BANACY', 'UTTERLY'] +4198-12259-0032-235: ref=['LET', 'US', 'WIND', 'OUR', 'HORNS', 'BY', 'THE', 'SOUND', 'OF', 'FLAGONS', 'AND', 'BOTTLES', 'AND', 'CRY', 'ALOUD', 'THAT', 'WHOEVER', 'HATH', 'LOST', 'HIS', 'THIRST', 'COME', 'NOT', 'HITHER', 'TO', 'SEEK', 'IT'] +4198-12259-0032-235: hyp=['LET', 'US', 'WIND', 'OUR', 'HORNS', 'BY', 'THE', 'SOUND', 'OF', 'FLAGONS', 'AND', 'BOTTLES', 'AND', 'CRY', 'ALOUD', 'THAT', 'WHOEVER', 'HATH', 'LOST', 'HIS', 'THIRST', 'COME', 'NIGH', 'HITHER', 'TO', 'SEEK', 'IT'] +4198-12259-0033-236: ref=['THE', 'GREAT', 'GOD', 'MADE', 'THE', 'PLANETS', 'AND', 'WE', 'MAKE', 'THE', 'PLATTERS', 'NEAT'] +4198-12259-0033-236: hyp=['THE', 'GREAT', 'GOD', 'MADE', 'THE', 'PLANETS', 'AND', 'WE', 'MAKE', 'THE', 'PLATTERS', 'NEAT'] +4198-12259-0034-237: ref=['APPETITE', 'COMES', 'WITH', 'EATING', 'SAYS', 'ANGESTON', 'BUT', 'THE', 'THIRST', 'GOES', 'AWAY', 'WITH', 'DRINKING'] +4198-12259-0034-237: hyp=['APPETITE', 'COMES', 'WITH', 'EATING', 'SAYS', 'ANGERSON', 'BUT', 'THE', 'DOZ', 'GOES', 'AWAY', 'WITH', 'DRINKING'] +4198-12259-0035-238: ref=['I', 'HAVE', 'A', 'REMEDY', 'AGAINST', 'THIRST', 'QUITE', 'CONTRARY', 'TO', 'THAT', 'WHICH', 'IS', 'GOOD', 'AGAINST', 'THE', 'BITING', 'OF', 'A', 'MAD', 'DOG'] +4198-12259-0035-238: hyp=['I', 'HAVE', 'A', 'REMEDY', 'AGAINST', 'THIRST', 'QUITE', 'CONTRARY', 'TO', 'THAT', 'WHICH', 'IS', 'GOOD', 'AGAINST', 'ABIDING', 'OF', 'A', 'MAD', 'DOG'] +4198-12259-0036-239: ref=['WHITE', 'WINE', 'HERE', 'WINE', 'BOYS'] +4198-12259-0036-239: hyp=['WHITEWAY', 'HERE', 'WINE', 'BOYS'] +4198-12259-0037-240: ref=['O', 'LACHRYMA', 'CHRISTI', 'IT', 'IS', 'OF', 'THE', 'BEST', 'GRAPE'] +4198-12259-0037-240: hyp=['OH', 'LACK', 'REMAR', 'CHRISTIE', 'IT', 'IS', 'OF', 'THE', 'BEST', 'GRAPE'] +4198-12259-0038-241: ref=["I'FAITH", 'PURE', 'GREEK', 'GREEK', 'O', 'THE', 'FINE', 'WHITE', 'WINE'] +4198-12259-0038-241: hyp=['I', 'FAITH', 'PURE', 'GREEK', 'GREEK', 'O', 'THE', 'FINE', 'WHITE', 'WINE'] +4198-12259-0039-242: ref=['THERE', 'IS', 'NO', 'ENCHANTMENT', 'NOR', 'CHARM', 'THERE', 'EVERY', 'ONE', 'OF', 'YOU', 'HATH', 'SEEN', 'IT'] +4198-12259-0039-242: hyp=['THERE', 'IS', 'NO', 'ENCHANTMENT', 'NOR', 'CHARM', 'THERE', 'EVERY', 'ONE', 'OF', 'YOU', 'HATH', 'SEEN', 'IT'] +4198-12259-0040-243: ref=['MY', 'PRENTICESHIP', 'IS', 'OUT', 'I', 'AM', 'A', 'FREE', 'MAN', 'AT', 'THIS', 'TRADE'] +4198-12259-0040-243: hyp=['MY', 'PRENTICE', 'IT', 'IS', 'OUT', "I'M", 'A', 'FREE', 'MAN', 'AT', 'THIS', 'TRADE'] +4198-12259-0041-244: ref=['I', 'SHOULD', 'SAY', 'MASTER', 'PAST'] +4198-12259-0041-244: hyp=['AS', 'YOU', 'SEE', 'MASTER', 'PASS'] +4198-12259-0042-245: ref=['O', 'THE', 'DRINKERS', 'THOSE', 'THAT', 'ARE', 'A', 'DRY', 'O', 'POOR', 'THIRSTY', 'SOULS'] +4198-12259-0042-245: hyp=['OH', 'THE', 'DRINKERS', 'THOSE', 'THAT', 'ARE', 'A', 'DRY', 'OH', 'PORT', 'THIRSTY', 'SOULS'] +4198-12259-0043-246: ref=['CLEAR', 'OFF', 'NEAT', 'SUPERNACULUM'] +4198-12259-0043-246: hyp=['CLEAR', 'OFF', 'NEAT', 'SUPERNACULUM'] +4198-12281-0000-187: ref=['ALTHOUGH', 'THE', 'PLAGUE', 'WAS', 'THERE', 'IN', 'THE', 'MOST', 'PART', 'OF', 'ALL', 'THE', 'HOUSES', 'THEY', 'NEVERTHELESS', 'ENTERED', 'EVERYWHERE', 'THEN', 'PLUNDERED', 'AND', 'CARRIED', 'AWAY', 'ALL', 'THAT', 'WAS', 'WITHIN', 'AND', 'YET', 'FOR', 'ALL', 'THIS', 'NOT', 'ONE', 'OF', 'THEM', 'TOOK', 'ANY', 'HURT', 'WHICH', 'IS', 'A', 'MOST', 'WONDERFUL', 'CASE'] +4198-12281-0000-187: hyp=['ALTHOUGH', 'THE', 'PLAGUE', 'WAS', 'THERE', 'IN', 'THE', 'MOST', 'PART', 'OF', 'ALL', 'THE', 'HOUSES', 'THEY', 'NEVERTHELESS', 'ENTERED', 'EVERYWHERE', 'THEN', 'PLUNDERED', 'AND', 'CARRIED', 'AWAY', 'ALL', 'THAT', 'WAS', 'WITHIN', 'AND', 'YET', 'FOR', 'ALL', 'THIS', 'NOT', 'ONE', 'OF', 'THEM', 'TOOK', 'ANY', 'HURT', 'WHICH', 'IS', 'A', 'MOST', 'WONDERFUL', 'CASE'] +4198-12281-0001-188: ref=['I', 'BESEECH', 'YOU', 'THINK', 'UPON', 'IT'] +4198-12281-0001-188: hyp=['I', 'BESEECH', 'YOU', 'THINK', 'UPON', 'IT'] +4198-12281-0002-189: ref=['NEVERTHELESS', 'AT', 'ALL', 'ADVENTURES', 'THEY', 'RANG', 'THE', 'BELLS', 'AD', 'CAPITULUM', 'CAPITULANTES'] +4198-12281-0002-189: hyp=['NEVERTHELESS', 'AT', 'ALL', 'VENTURES', 'THEY', 'RANG', 'THE', 'BELLS', 'AT', 'CAPITULAM', 'CAPITULAT', 'DAYS'] +4198-12281-0003-190: ref=['BY', 'THE', 'VIRTUE', 'OF', 'GOD', 'WHY', 'DO', 'NOT', 'YOU', 'SING', 'PANNIERS', 'FAREWELL', 'VINTAGE', 'IS', 'DONE'] +4198-12281-0003-190: hyp=['BY', 'THE', 'VIRTUE', 'OF', 'GOD', 'WHY', 'DO', 'NOT', 'YOU', 'SING', 'PENNYERS', 'FAREWELL', 'VINTAGE', 'IS', 'DONE'] +4198-12281-0004-191: ref=['BY', 'THE', 'BELLY', 'OF', 'SANCT', 'JAMES', 'WHAT', 'SHALL', 'WE', 'POOR', 'DEVILS', 'DRINK', 'THE', 'WHILE'] +4198-12281-0004-191: hyp=['BY', 'THE', 'BELLY', 'OF', 'SAINT', 'JAMES', 'WHAT', 'SHALL', 'WE', 'POOR', 'DEVILS', 'DRINK', 'THE', 'WHILE'] +4198-12281-0005-192: ref=['LORD', 'GOD', 'DA', 'MIHI', 'POTUM'] +4198-12281-0005-192: hyp=['LORD', 'GOD', 'THOU', 'ME', 'HE', 'POT', 'EM'] +4198-12281-0006-193: ref=['LET', 'HIM', 'BE', 'CARRIED', 'TO', 'PRISON', 'FOR', 'TROUBLING', 'THE', 'DIVINE', 'SERVICE'] +4198-12281-0006-193: hyp=['LET', 'HIM', 'BE', 'CARRIED', 'TO', 'PRISON', 'FOR', 'TROUBLING', 'THE', 'DIVINE', 'SERVICE'] +4198-12281-0007-194: ref=['WHEREFORE', 'IS', 'IT', 'THAT', 'OUR', 'DEVOTIONS', 'WERE', 'INSTITUTED', 'TO', 'BE', 'SHORT', 'IN', 'THE', 'TIME', 'OF', 'HARVEST', 'AND', 'VINTAGE', 'AND', 'LONG', 'IN', 'THE', 'ADVENT', 'AND', 'ALL', 'THE', 'WINTER'] +4198-12281-0007-194: hyp=['WHEREFORE', 'IS', 'IT', 'THAT', 'OUR', 'DEVOTIONS', 'WERE', 'INSTITUTED', 'TO', 'BE', 'SHORT', 'IN', 'THE', 'TIME', 'OF', 'HARVEST', 'AND', 'VINTAGE', 'AND', 'LONG', 'IN', 'THE', 'ADVENT', 'IN', 'ALL', 'THE', 'WINTER'] +4198-12281-0008-195: ref=['HARK', 'YOU', 'MY', 'MASTERS', 'YOU', 'THAT', 'LOVE', 'THE', 'WINE', "COP'S", 'BODY', 'FOLLOW', 'ME', 'FOR', 'SANCT', 'ANTHONY', 'BURN', 'ME', 'AS', 'FREELY', 'AS', 'A', 'FAGGOT', 'IF', 'THEY', 'GET', 'LEAVE', 'TO', 'TASTE', 'ONE', 'DROP', 'OF', 'THE', 'LIQUOR', 'THAT', 'WILL', 'NOT', 'NOW', 'COME', 'AND', 'FIGHT', 'FOR', 'RELIEF', 'OF', 'THE', 'VINE'] +4198-12281-0008-195: hyp=['HARK', 'YOU', 'MY', 'MASTERS', 'YOU', 'THAT', 'LOVE', 'THE', 'WINE', 'COPSE', 'BODY', 'FOLLOW', 'ME', 'FOR', 'SAINT', 'AUNT', 'ANY', 'BURN', 'ME', 'AS', 'FREELY', 'AS', 'A', 'FAGGOT', 'THEY', 'GET', 'LEAVE', 'TO', 'TASTE', 'ONE', 'DROP', 'OF', 'THE', 'LIQUOR', 'THAT', 'WILL', 'NOT', 'NOW', 'COME', 'AND', 'FIGHT', 'FOR', 'RELIEF', 'OF', 'THE', 'VINE'] +4198-12281-0009-196: ref=['TO', 'OTHERS', 'AGAIN', 'HE', 'UNJOINTED', 'THE', 'SPONDYLES', 'OR', 'KNUCKLES', 'OF', 'THE', 'NECK', 'DISFIGURED', 'THEIR', 'CHAPS', 'GASHED', 'THEIR', 'FACES', 'MADE', 'THEIR', 'CHEEKS', 'HANG', 'FLAPPING', 'ON', 'THEIR', 'CHIN', 'AND', 'SO', 'SWINGED', 'AND', 'BALAMMED', 'THEM', 'THAT', 'THEY', 'FELL', 'DOWN', 'BEFORE', 'HIM', 'LIKE', 'HAY', 'BEFORE', 'A', 'MOWER'] +4198-12281-0009-196: hyp=['TO', 'OTHERS', 'AGAIN', 'HE', 'UNJOINTED', 'THE', 'SPAWN', 'MULES', 'OR', 'KNUCKLES', 'OF', 'THE', 'NECK', 'THIS', 'FIGURED', 'THEIR', 'CHAPS', 'GASHED', 'THEIR', 'FACES', 'MADE', 'THEIR', 'CHEEKS', 'HANG', 'FLAPPING', 'ON', 'THEIR', 'CHIN', 'AND', 'SO', 'SWINGED', 'AND', 'BLAMMED', 'THEM', 'THAT', 'THEY', 'FELL', 'DOWN', 'BEFORE', 'HIM', 'LIKE', 'HAY', 'BEFORE', 'HIM', 'OVER'] +4198-12281-0010-197: ref=['TO', 'SOME', 'WITH', 'A', 'SMART', 'SOUSE', 'ON', 'THE', 'EPIGASTER', 'HE', 'WOULD', 'MAKE', 'THEIR', 'MIDRIFF', 'SWAG', 'THEN', 'REDOUBLING', 'THE', 'BLOW', 'GAVE', 'THEM', 'SUCH', 'A', 'HOMEPUSH', 'ON', 'THE', 'NAVEL', 'THAT', 'HE', 'MADE', 'THEIR', 'PUDDINGS', 'TO', 'GUSH', 'OUT'] +4198-12281-0010-197: hyp=['TO', 'SOME', 'WOULD', 'THEY', 'SMART', 'SOUS', 'ON', 'THEIR', 'EBERGASTER', 'HE', 'WILL', 'MAKE', 'THEM', 'MIDRIFTS', 'WAG', 'THEN', 'REDOUBLING', 'THE', 'BLOW', 'GAVE', 'THEM', 'SUCH', 'A', 'HOME', 'PUSH', 'ON', 'THE', 'NAVEL', 'THAT', 'HE', 'MADE', 'THEIR', 'PUDDINGS', 'TO', 'GUSH', 'OUT'] +4198-12281-0011-198: ref=['BELIEVE', 'THAT', 'IT', 'WAS', 'THE', 'MOST', 'HORRIBLE', 'SPECTACLE', 'THAT', 'EVER', 'ONE', 'SAW'] +4198-12281-0011-198: hyp=['BELIEVE', 'THAT', 'IT', 'WAS', 'THE', 'MOST', 'HORRIBLE', 'SPECTACLE', 'THAT', 'EVER', 'WON', 'SAW'] +4198-12281-0012-199: ref=['O', 'THE', 'HOLY', 'LADY', 'NYTOUCH', 'SAID', 'ONE', 'THE', 'GOOD', 'SANCTESS', 'O', 'OUR', 'LADY', 'OF', 'SUCCOURS', 'SAID', 'ANOTHER', 'HELP', 'HELP'] +4198-12281-0012-199: hyp=['ALL', 'THE', 'HOLY', 'LADY', 'KNIGHT', 'SAID', 'ONE', 'THE', 'GOOD', 'SANCTIS', 'O', 'OUR', 'LADY', 'OF', 'SECURS', 'SAID', 'ANOTHER', 'HELP', 'HELP'] +4198-12281-0013-200: ref=['SOME', 'DIED', 'WITHOUT', 'SPEAKING', 'OTHERS', 'SPOKE', 'WITHOUT', 'DYING', 'SOME', 'DIED', 'IN', 'SPEAKING', 'OTHERS', 'SPOKE', 'IN', 'DYING'] +4198-12281-0013-200: hyp=['SOME', 'DIED', 'WITHOUT', 'SPEAKING', 'OTHERS', 'SPOKE', 'WITHOUT', 'DYING', 'SOME', 'DIED', 'IN', 'SPEAKING', 'OTHERS', 'SPOKE', 'AND', 'DYING'] +4198-12281-0014-201: ref=['CAN', 'YOU', 'TELL', 'WITH', 'WHAT', 'INSTRUMENTS', 'THEY', 'DID', 'IT'] +4198-12281-0014-201: hyp=['CAN', 'YOU', 'TELL', 'WITH', 'WHAT', 'INSTRUMENTS', 'THEY', 'DID', 'IT'] +4198-12281-0015-202: ref=['IN', 'THE', 'MEANTIME', 'FRIAR', 'JOHN', 'WITH', 'HIS', 'FORMIDABLE', 'BATON', 'OF', 'THE', 'CROSS', 'GOT', 'TO', 'THE', 'BREACH', 'WHICH', 'THE', 'ENEMIES', 'HAD', 'MADE', 'AND', 'THERE', 'STOOD', 'TO', 'SNATCH', 'UP', 'THOSE', 'THAT', 'ENDEAVOURED', 'TO', 'ESCAPE'] +4198-12281-0015-202: hyp=['IN', 'THE', 'MEANTIME', 'FRIED', 'JOHN', 'WITH', 'HIS', 'FORMIDABLE', 'BUT', 'TIME', 'OF', 'THE', 'CROSS', 'GOT', 'TO', 'THE', 'BREACH', 'WHICH', 'THE', 'ENEMIES', 'HAD', 'MADE', 'AND', 'THERE', 'STOOD', 'TO', 'SNATCH', 'UP', 'THOSE', 'THAT', 'ENDEAVOURED', 'TO', 'ESCAPE'] +4198-61336-0000-247: ref=['IT', 'IS', 'SIGNIFICANT', 'TO', 'NOTE', 'IN', 'THIS', 'CONNECTION', 'THAT', 'THE', 'NEW', 'KING', 'WAS', 'AN', 'UNSWERVING', 'ADHERENT', 'OF', 'THE', 'CULT', 'OF', 'ASHUR', 'BY', 'THE', 'ADHERENTS', 'OF', 'WHICH', 'HE', 'WAS', 'PROBABLY', 'STRONGLY', 'SUPPORTED'] +4198-61336-0000-247: hyp=['IT', 'IS', 'SIGNIFICANT', 'TO', 'NOTE', 'IN', 'THIS', 'CONNECTION', 'THAT', 'THE', 'NEW', 'KING', 'WAS', 'AN', 'UNSWERVING', 'ADHERENT', 'OF', 'THE', 'CULT', 'OF', 'AYESHA', 'BY', 'THE', 'ADHERENCE', 'OF', 'WHICH', 'HE', 'WAS', 'PROBABLY', 'STRONGLY', 'SUPPORTED'] +4198-61336-0001-248: ref=['AT', 'THE', 'BEGINNING', 'OF', 'HIS', 'REIGN', 'THERE', 'WAS', 'MUCH', 'SOCIAL', 'DISCONTENT', 'AND', 'SUFFERING'] +4198-61336-0001-248: hyp=['AT', 'THE', 'BEGINNING', 'OF', 'HIS', 'REIGN', 'THERE', 'WAS', 'MUCH', 'SOCIAL', 'DISCONTENT', 'AND', 'SUFFERING'] +4198-61336-0002-249: ref=['WELL', 'MIGHT', 'SHARDURIS', 'EXCLAIM', 'IN', 'THE', 'WORDS', 'OF', 'THE', 'PROPHET', 'WHERE', 'IS', 'THE', 'KING', 'OF', 'ARPAD'] +4198-61336-0002-249: hyp=['WELL', 'MIGHT', 'SHOW', 'DORIS', 'EXCLAIM', 'IN', 'THE', 'WORDS', 'OF', 'THE', 'PROPHET', 'WHERE', 'IS', 'THE', 'KING', 'OF', 'ARBED'] +4198-61336-0003-250: ref=['TIGLATH', 'PILESER', 'HOWEVER', 'CROSSED', 'THE', 'EUPHRATES', 'AND', 'MOVING', 'NORTHWARD', 'DELIVERED', 'AN', 'UNEXPECTED', 'ATTACK', 'ON', 'THE', 'URARTIAN', 'ARMY', 'IN', 'QUMMUKH'] +4198-61336-0003-250: hyp=['DICK', 'LAUGHED', 'PLEASURE', 'HOWEVER', 'CROSSED', 'THE', 'EUPHATEES', 'AND', 'MOVING', 'NORTHWARD', 'DELIVERED', 'AN', 'UNEXPECTED', 'ATTACK', 'ON', 'THE', 'GRACIAN', 'ARMY', 'AND', 'KUMAK'] +4198-61336-0004-251: ref=['A', 'FIERCE', 'BATTLE', 'ENSUED', 'AND', 'ONE', 'OF', 'ITS', 'DRAMATIC', 'INCIDENTS', 'WAS', 'A', 'SINGLE', 'COMBAT', 'BETWEEN', 'THE', 'RIVAL', 'KINGS'] +4198-61336-0004-251: hyp=['A', 'FIERCE', 'BATTLE', 'ENSUED', 'AND', 'ONE', 'OF', 'HIS', 'DRAMATIC', 'INCIDENTS', 'WAS', 'A', 'SINGLE', 'COMBAT', 'BETWEEN', 'THE', 'RIVAL', 'KINGS'] +4198-61336-0005-252: ref=['AN', 'ATTEMPT', 'WAS', 'MADE', 'TO', 'CAPTURE', 'KING', 'SHARDURIS', 'WHO', 'LEAPT', 'FROM', 'HIS', 'CHARIOT', 'AND', 'MADE', 'HASTY', 'ESCAPE', 'ON', 'HORSEBACK', 'HOTLY', 'PURSUED', 'IN', 'THE', 'GATHERING', 'DARKNESS', 'BY', 'AN', 'ASSYRIAN', 'CONTINGENT', 'OF', 'CAVALRY'] +4198-61336-0005-252: hyp=['AN', 'ATTEMPT', 'WAS', 'MADE', 'TO', 'CAPTURE', 'KING', 'SHORDURUS', 'WHO', 'LEAPED', 'FROM', 'HIS', 'CHARIOT', 'AND', 'MADE', 'HASTY', 'ESCAPE', 'ON', 'HORSEBACK', 'HOTLY', 'PURSUED', 'IN', 'THE', 'GATHERING', 'DARKNESS', 'BY', 'AN', 'ASSYRIAN', 'CONTINGENT', 'OF', 'CAVALRY'] +4198-61336-0006-253: ref=['DESPITE', 'THE', 'BLOW', 'DEALT', 'AGAINST', 'URARTU', 'ASSYRIA', 'DID', 'NOT', 'IMMEDIATELY', 'REGAIN', 'POSSESSION', 'OF', 'NORTH', 'SYRIA'] +4198-61336-0006-253: hyp=['DESPITE', 'THE', 'BLUE', 'DEALT', 'AGAINST', 'URITU', 'ASSYRIA', 'DID', 'NOT', 'IMMEDIATELY', 'REGAIN', 'POSSESSION', 'OF', 'NORTH', 'SYRIA'] +4198-61336-0007-254: ref=['THE', 'SHIFTY', 'MATI', 'ILU', 'EITHER', 'CHERISHED', 'THE', 'HOPE', 'THAT', 'SHARDURIS', 'WOULD', 'RECOVER', 'STRENGTH', 'AND', 'AGAIN', 'INVADE', 'NORTH', 'SYRIA', 'OR', 'THAT', 'HE', 'MIGHT', 'HIMSELF', 'ESTABLISH', 'AN', 'EMPIRE', 'IN', 'THAT', 'REGION'] +4198-61336-0007-254: hyp=['THE', 'SHIFTY', 'MAN', 'TO', 'ILIU', 'EITHER', 'CHERISHED', 'THE', 'HOPE', 'THAT', 'SHALL', 'DORRIS', 'WOULD', 'RECOVER', 'STRENGTH', 'AND', 'AGAIN', 'IN', 'VAIN', 'NORTH', 'ASSYRIA', 'OR', 'THAT', 'HE', 'MIGHT', 'HIMSELF', 'ESTABLISH', 'AN', 'EMPIRE', 'IN', 'THAT', 'REGION'] +4198-61336-0008-255: ref=['TIGLATH', 'PILESER', 'HAD', 'THEREFORE', 'TO', 'MARCH', 'WESTWARD', 'AGAIN'] +4198-61336-0008-255: hyp=['T', 'GLASS', 'BE', 'LEISURE', 'HAD', 'THEREFORE', 'TO', 'MARCH', 'WESTWARD', 'AGAIN'] +4198-61336-0009-256: ref=['FOR', 'THREE', 'YEARS', 'HE', 'CONDUCTED', 'VIGOROUS', 'CAMPAIGNS', 'IN', 'THE', 'WESTERN', 'LAND', 'WHERE', 'HE', 'MET', 'WITH', 'VIGOROUS', 'RESISTANCE'] +4198-61336-0009-256: hyp=['FOR', 'THREE', 'YEARS', 'HE', 'CONDUCTED', 'VIGOROUS', 'CAMPAIGNS', 'IN', 'THE', 'WESTERN', 'LAND', 'WHERE', 'HE', 'MET', 'WITH', 'VIGOROUS', 'RESISTANCE'] +4198-61336-0010-257: ref=['ARPAD', 'WAS', 'CAPTURED', 'AND', 'MATI', 'ILU', 'DEPOSED', 'AND', 'PROBABLY', 'PUT', 'TO', 'DEATH'] +4198-61336-0010-257: hyp=['OUR', 'PAD', 'WAS', 'CAPTURED', 'AND', 'MET', 'TO', 'ILL', 'YOU', 'DEPOSED', 'AND', 'PROBABLY', 'PUT', 'TO', 'DEATH'] +4198-61336-0011-258: ref=['ONCE', 'AGAIN', 'THE', 'HEBREWS', 'CAME', 'INTO', 'CONTACT', 'WITH', 'ASSYRIA'] +4198-61336-0011-258: hyp=['ONCE', 'AGAIN', 'THE', 'HEBREWS', 'CAME', 'INTO', 'CONTACT', 'WITH', 'THE', 'SYRIA'] +4198-61336-0012-259: ref=['ITS', 'FALL', 'MAY', 'NOT', 'HAVE', 'BEEN', 'UNCONNECTED', 'WITH', 'THE', 'TREND', 'OF', 'EVENTS', 'IN', 'ASSYRIA', 'DURING', 'THE', 'CLOSING', 'YEARS', 'OF', 'THE', 'MIDDLE', 'EMPIRE'] +4198-61336-0012-259: hyp=["IT'S", 'FOR', 'ME', 'NOT', 'HAV', 'BEEN', 'UNCONNECTED', 'WITH', 'THE', 'TREND', 'OF', 'EVENTS', 'IN', 'ASSYRIA', 'DURING', 'THE', 'CLOSING', 'YEARS', 'OF', 'THE', 'MIDDLE', 'EMPIRE'] +4198-61336-0013-260: ref=['JEHOASH', 'THE', 'GRANDSON', 'OF', 'JEHU', 'HAD', 'ACHIEVED', 'SUCCESSES', 'IN', 'CONFLICT', 'WITH', 'DAMASCUS'] +4198-61336-0013-260: hyp=['JO', 'ASH', 'THE', 'GRANDSON', 'OF', 'JEHOV', 'HAD', 'ACHIEVED', 'SUCCESSES', 'IN', 'CONFLICT', 'WITH', 'DAMASCUS'] +4198-61336-0014-261: ref=['SIX', 'MONTHS', 'AFTERWARDS', 'HE', 'WAS', 'ASSASSINATED', 'BY', 'SHALLUM'] +4198-61336-0014-261: hyp=['SIX', 'MONTHS', 'AFTERWARD', 'HE', 'WAS', 'ASSASSINATED', 'BY', 'CELEM'] +4198-61336-0015-262: ref=['THIS', 'USURPER', 'HELD', 'SWAY', 'AT', 'SAMARIA', 'FOR', 'ONLY', 'A', 'MONTH'] +4198-61336-0015-262: hyp=['THIS', 'USURPER', 'HELD', 'SWAY', 'AT', 'SAMARIA', 'FOR', 'ONLY', 'A', 'MONTH'] +4198-61336-0016-263: ref=['NO', 'RESISTANCE', 'WAS', 'POSSIBLE', 'ON', 'THE', 'PART', 'OF', 'MENAHEM', 'THE', 'USURPER', 'WHO', 'WAS', 'PROBABLY', 'READY', 'TO', 'WELCOME', 'THE', 'ASSYRIAN', 'CONQUEROR', 'SO', 'THAT', 'BY', 'ARRANGING', 'AN', 'ALLIANCE', 'HE', 'MIGHT', 'SECURE', 'HIS', 'OWN', 'POSITION'] +4198-61336-0016-263: hyp=['NO', 'RESISTANCE', 'WAS', 'POSSIBLE', 'ON', 'THE', 'PART', 'OF', 'MANY', 'HIM', 'THE', 'USURPER', 'WHOSE', 'PROBABLY', 'READY', 'TO', 'WELCOME', 'THE', 'ASSYRIAN', 'CONQUEROR', 'SO', 'THAT', 'BY', 'ARRANGING', 'AN', 'ALLIANCE', 'HE', 'MIGHT', 'SECURE', 'HIS', 'OWN', 'POSITION'] +4198-61336-0017-264: ref=['TIGLATH', 'PILESER', 'NEXT', 'OPERATED', 'AGAINST', 'THE', 'MEDIAN', 'AND', 'OTHER', 'HILL', 'TRIBES', 'IN', 'THE', 'NORTH', 'EAST'] +4198-61336-0017-264: hyp=['TAKE', 'LAST', 'PLEASE', 'HER', 'NEXT', 'OPERATED', 'AGAINST', 'THE', 'MEDIUM', 'AND', 'OTHER', 'HILL', 'TRIBES', 'IN', 'THE', 'NORTHEAST'] +4198-61336-0018-265: ref=['HE', 'OVERTHREW', 'BUILDINGS', 'DESTROYED', 'ORCHARDS', 'AND', 'TRANSPORTED', 'TO', 'NINEVEH', 'THOSE', 'OF', 'THE', 'INHABITANTS', 'HE', 'HAD', 'NOT', 'PUT', 'TO', 'THE', 'SWORD', 'WITH', 'ALL', 'THE', 'LIVE', 'STOCK', 'HE', 'COULD', 'LAY', 'HANDS', 'ON'] +4198-61336-0018-265: hyp=['HE', 'OVERTHREW', 'BUILDINGS', 'DESTROYED', 'ORCHARDS', 'AND', 'TRANSPORTED', 'TO', 'NINEVEH', 'THOSE', 'OF', 'THE', 'INHABITANTS', 'HE', 'HAD', 'NOT', 'PUT', 'TO', 'THE', 'SWORD', 'WITH', 'ALL', 'THE', 'LIVE', 'STOCK', 'HE', 'COULD', 'LAY', 'HANDS', 'ON'] +4198-61336-0019-266: ref=['THUS', 'WAS', 'URARTU', 'CRIPPLED', 'AND', 'HUMILIATED', 'IT', 'NEVER', 'REGAINED', 'ITS', 'FORMER', 'PRESTIGE', 'AMONG', 'THE', 'NORTHERN', 'STATES'] +4198-61336-0019-266: hyp=['THIS', 'WAS', 'URA', 'TO', 'CRIPPLED', 'AND', 'HUMILIATED', 'IT', 'NEVER', 'REGAINED', 'ITS', 'FORM', 'OF', 'PRESTIGE', 'AMONG', 'THE', 'NORTHERN', 'STATES'] +4198-61336-0020-267: ref=['IN', 'THE', 'FOLLOWING', 'YEAR', 'TIGLATH', 'PILESER', 'RETURNED', 'TO', 'SYRIA'] +4198-61336-0020-267: hyp=['IN', 'THE', 'FOLLOWING', 'YEAR', 'TIC', 'LAUGH', 'BELLEGER', 'RETURNED', 'TO', 'SYRIA'] +4198-61336-0021-268: ref=['MENAHEM', 'KING', 'OF', 'ISRAEL', 'HAD', 'DIED', 'AND', 'WAS', 'SUCCEEDED', 'BY', 'HIS', 'SON', 'PEKAHIAH'] +4198-61336-0021-268: hyp=['MANY', 'HIM', 'KING', 'OF', 'ISRAEL', 'HAD', 'DIED', 'AND', 'WAS', 'SUCCEEDED', 'BY', 'HIS', 'SON', 'PECAH'] +4198-61336-0022-269: ref=['JUDAH', 'HAD', 'TAKEN', 'ADVANTAGE', 'OF', 'THE', 'DISTURBED', 'CONDITIONS', 'IN', 'ISRAEL', 'TO', 'ASSERT', 'ITS', 'INDEPENDENCE'] +4198-61336-0022-269: hyp=['JULIA', 'HAD', 'TAKEN', 'ADVANTAGE', 'OF', 'THE', 'DISTURBED', 'CONDITIONS', 'IN', 'ISRAEL', 'TO', 'ASSERT', 'ITS', 'INDEPENDENCE'] +4198-61336-0023-270: ref=['HE', 'CONDEMNED', 'ISRAEL', 'FOR', 'ITS', 'IDOLATRIES', 'AND', 'CRIED'] +4198-61336-0023-270: hyp=['HE', 'CONDEMNED', 'ISRAEL', 'FOR', 'ITS', 'IDOLATRIES', 'AND', 'CRIED'] +4198-61336-0024-271: ref=['FOR', 'THUS', 'SAITH', 'THE', 'LORD', 'UNTO', 'THE', 'HOUSE', 'OF', 'ISRAEL', 'SEEK', 'YE', 'ME', 'AND', 'YE', 'SHALL', 'LIVE', 'HAVE', 'YE', 'OFFERED', 'UNTO', 'ME', 'SACRIFICES', 'AND', 'OFFERINGS', 'IN', 'THE', 'WILDERNESS', 'FORTY', 'YEARS', 'O', 'HOUSE', 'OF', 'ISRAEL'] +4198-61336-0024-271: hyp=['FOR', 'THIS', 'SAITH', 'THE', 'LORD', 'UNTO', 'THE', 'HOUSE', 'OF', 'ISRAEL', 'SEEK', 'YE', 'ME', 'A', 'YE', 'TO', 'LIVE', 'HAVE', 'YE', 'OFFERED', 'UNTO', 'ME', 'SACRIFICES', 'AND', 'OFFERINGS', 'IN', 'THE', 'WILDERNESS', 'FORTY', 'YEARS', 'OR', 'HOUSE', 'OF', 'ISRAEL'] +4198-61336-0025-272: ref=['THE', 'REMNANT', 'OF', 'THE', 'PHILISTINES', 'SHALL', 'PERISH'] +4198-61336-0025-272: hyp=['THE', 'REMNANT', 'OF', 'THE', 'PHILISTINES', 'SHALL', 'PERISH'] +4198-61336-0026-273: ref=['ISRAEL', 'WAS', 'ALSO', 'DEALT', 'WITH'] +4198-61336-0026-273: hyp=['ISRAEL', 'WAS', 'ALSO', 'DEALT', 'WITH'] +4198-61336-0027-274: ref=['HE', 'SWEPT', 'THROUGH', 'ISRAEL', 'LIKE', 'A', 'HURRICANE'] +4198-61336-0027-274: hyp=['HE', 'SWEPT', 'THROUGH', 'ISRAEL', 'LIKE', 'A', 'HURRICANE'] +4198-61336-0028-275: ref=['THE', 'PHILISTINES', 'AND', 'THE', 'ARABIANS', 'OF', 'THE', 'DESERT', 'WERE', 'ALSO', 'SUBDUED'] +4198-61336-0028-275: hyp=['THE', 'FURTHER', 'STEAMS', 'AND', 'THE', 'ARABIANS', 'OF', 'THE', 'DESERT', 'WERE', 'ALSO', 'SUBDUED'] +4198-61336-0029-276: ref=['HE', 'INVADED', 'BABYLONIA'] +4198-61336-0029-276: hyp=['HE', 'INVADED', 'BABYLONIA'] +4198-61336-0030-277: ref=['UKINZER', 'TOOK', 'REFUGE', 'IN', 'HIS', 'CAPITAL', 'SHAPIA', 'WHICH', 'HELD', 'OUT', 'SUCCESSFULLY', 'ALTHOUGH', 'THE', 'SURROUNDING', 'COUNTRY', 'WAS', 'RAVAGED', 'AND', 'DESPOILED'] +4198-61336-0030-277: hyp=['A', 'KINDRED', 'TOOK', 'REFUGE', 'IN', 'HIS', 'CAPITAL', 'SHAPIA', 'WHICH', 'HELD', 'OUT', 'SUCCESSFULLY', 'ALTHOUGH', 'THE', 'SURROUNDING', 'COUNTRY', 'WAS', 'RAVAGED', 'AND', 'DESPOILED'] +4294-14317-0000-1866: ref=['AS', 'I', 'THOUGHT', 'THAT', 'THIS', 'WAS', 'DUE', 'TO', 'SOME', 'FAULT', 'IN', 'THE', 'EARTH', 'I', 'WANTED', 'TO', 'MAKE', 'THESE', 'FIRST', 'EXPERIMENTS', 'BEFORE', 'I', 'UNDERTOOK', 'MY', 'PERSEUS'] +4294-14317-0000-1866: hyp=['AS', 'I', 'THOUGHT', 'THAT', 'THIS', 'WAS', 'DUE', 'TO', 'SOME', 'FAULT', 'IN', 'THE', 'EARTH', 'I', 'WANTED', 'TO', 'MAKE', 'THESE', 'FIRST', 'EXPERIMENTS', 'BEFORE', 'AND', 'UNDERTOOK', 'MY', 'PERSEUS'] +4294-14317-0001-1867: ref=['WHEN', 'I', 'SAW', 'THAT', 'THIS', 'BUST', 'CAME', 'OUT', 'SHARP', 'AND', 'CLEAN', 'I', 'SET', 'AT', 'ONCE', 'TO', 'CONSTRUCT', 'A', 'LITTLE', 'FURNACE', 'IN', 'THE', 'WORKSHOP', 'ERECTED', 'FOR', 'ME', 'BY', 'THE', 'DUKE', 'AFTER', 'MY', 'OWN', 'PLANS', 'AND', 'DESIGN', 'IN', 'THE', 'HOUSE', 'WHICH', 'THE', 'DUKE', 'HAD', 'GIVEN', 'ME'] +4294-14317-0001-1867: hyp=['WHEN', 'I', 'SAW', 'THIS', 'BUST', 'CAME', 'OUT', 'SHARP', 'AND', 'CLEAN', 'I', 'SAID', 'AT', 'ONCE', 'TO', 'CONSTRUCT', 'A', 'LITTLE', 'FURNACE', 'IN', 'THE', 'WORKSHOP', 'ERECTED', 'FOR', 'ME', 'BY', 'THE', 'DUKE', 'AFTER', 'MY', 'OWN', 'PLANS', 'AND', 'DESIGN', 'IN', 'THE', 'HOUSE', 'WHICH', 'THE', 'DUKE', 'HAD', 'GIVEN', 'ME'] +4294-14317-0002-1868: ref=['IT', 'WAS', 'AN', 'EXTREMELY', 'DIFFICULT', 'TASK', 'AND', 'I', 'WAS', 'ANXIOUS', 'TO', 'OBSERVE', 'ALL', 'THE', 'NICETIES', 'OF', 'ART', 'WHICH', 'I', 'HAD', 'LEARNED', 'SO', 'AS', 'NOT', 'TO', 'LAPSE', 'INTO', 'SOME', 'ERROR'] +4294-14317-0002-1868: hyp=['IT', 'WAS', 'AN', 'EXTREMELY', 'DIFFICULT', 'TASK', 'AND', 'I', 'WAS', 'ANXIOUS', 'TO', 'OBSERVE', 'ALL', 'THE', 'NICETIES', 'OF', 'ART', 'WHICH', 'I', 'HAD', 'LEARNED', 'SO', 'AS', 'NOT', 'TO', 'LAPSE', 'INTO', 'SOME', 'ERROR'] +4294-14317-0003-1869: ref=['I', 'IN', 'MY', 'TURN', 'FEEL', 'THE', 'SAME', 'DESIRE', 'AND', 'HOPE', 'TO', 'PLAY', 'MY', 'PART', 'LIKE', 'THEM', 'THEREFORE', 'MY', 'LORD', 'GIVE', 'ME', 'THE', 'LEAVE', 'TO', 'GO'] +4294-14317-0003-1869: hyp=['I', 'IN', 'MY', 'TURN', 'FEEL', 'THE', 'SAME', 'DESIRE', 'AND', 'HOPE', 'TO', 'PLAY', 'MY', 'PART', 'LIKE', 'THEM', 'THEREFORE', 'MY', 'LORD', 'GIVE', 'ME', 'THE', 'LEAVE', 'TO', 'GO'] +4294-14317-0004-1870: ref=['BUT', 'BEWARE', 'OF', 'LETTING', 'BANDINELLO', 'QUIT', 'YOU', 'RATHER', 'BESTOW', 'UPON', 'HIM', 'ALWAYS', 'MORE', 'THAN', 'HE', 'DEMANDS', 'FOR', 'IF', 'HE', 'GOES', 'INTO', 'FOREIGN', 'PARTS', 'HIS', 'IGNORANCE', 'IS', 'SO', 'PRESUMPTUOUS', 'THAT', 'HE', 'IS', 'JUST', 'THE', 'MAN', 'TO', 'DISGRACE', 'OUR', 'MOST', 'ILLUSTRIOUS', 'SCHOOL'] +4294-14317-0004-1870: hyp=['BUT', 'BEWARE', 'OF', 'LETTING', 'BEND', 'NELLO', 'QUIT', 'YOU', 'RATHER', 'BESTOW', 'UPON', 'HIM', 'ALWAYS', 'MORE', 'THAN', 'HE', 'DEMANDS', 'FOR', 'IF', 'HE', 'GOES', 'INTO', 'FOREIGN', 'PARTS', 'HIS', 'IGNORANCE', 'IS', 'SO', 'PRESUMPTUOUS', 'THAT', 'HE', 'IS', 'JUST', 'THE', 'MAN', 'TO', 'DISGRACE', 'OUR', 'MOST', 'ILLUSTRIOUS', 'SCHOOL'] +4294-14317-0005-1871: ref=['I', 'ASK', 'NO', 'FURTHER', 'REWARD', 'FOR', 'MY', 'LABOURS', 'UP', 'TO', 'THIS', 'TIME', 'THAN', 'THE', 'GRACIOUS', 'FAVOUR', 'OF', 'YOUR', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0005-1871: hyp=['I', 'ASKED', 'NO', 'FURTHER', 'REWARD', 'FOR', 'MY', 'LABOURS', 'UP', 'TO', 'THIS', 'TIME', 'THAN', 'THE', 'GRACIOUS', 'FAVOUR', 'OF', 'YOUR', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0006-1872: ref=['THEN', 'I', 'THANKED', 'HIM', 'AND', 'SAID', 'I', 'HAD', 'NO', 'GREATER', 'DESIRE', 'THAN', 'TO', 'SHOW', 'THOSE', 'ENVIOUS', 'FOLK', 'THAT', 'I', 'HAD', 'IT', 'IN', 'ME', 'TO', 'EXECUTE', 'THE', 'PROMISED', 'WORK'] +4294-14317-0006-1872: hyp=['THEN', 'I', 'THANKED', 'HIM', 'AND', 'SAID', 'I', 'HAD', 'NO', 'GREATER', 'DESIRE', 'THAN', 'TO', 'SHOW', 'THOSE', 'ENVIOUS', 'FOLK', 'THAT', 'I', 'HAD', 'IT', 'IN', 'ME', 'TO', 'EXECUTE', 'THE', 'PROMISED', 'WORK'] +4294-14317-0007-1873: ref=['I', 'HAD', 'BETTER', 'LOOK', 'TO', 'MY', 'CONDUCT', 'FOR', 'IT', 'HAD', 'COME', 'TO', 'HIS', 'EARS', 'THAT', 'I', 'RELIED', 'UPON', 'HIS', 'FAVOUR', 'TO', 'TAKE', 'IN', 'FIRST', 'ONE', 'MAN', 'AND', 'THEN', 'ANOTHER'] +4294-14317-0007-1873: hyp=['I', 'HAD', 'BETTER', 'LOOK', 'TO', 'MY', 'CONDUCT', 'FOR', 'IT', 'HAD', 'COME', 'TO', 'HIS', 'EARS', 'THAT', 'I', 'RELIED', 'UPON', 'HIS', 'FAVOUR', 'TO', 'TAKE', 'IN', 'FIRST', 'ONE', 'MAN', 'AND', 'THEN', 'ANOTHER'] +4294-14317-0008-1874: ref=['I', 'BEGGED', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY', 'TO', 'NAME', 'A', 'SINGLE', 'PERSON', 'WHOM', 'I', 'HAD', 'EVER', 'TAKEN', 'IN'] +4294-14317-0008-1874: hyp=['I', 'BEGGED', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY', 'TO', 'NAME', 'A', 'SINGLE', 'PERSON', 'WHOM', 'I', 'HAD', 'EVER', 'TAKEN', 'IN'] +4294-14317-0009-1875: ref=['I', 'SAID', 'MY', 'LORD', 'I', 'THANK', 'YOU', 'AND', 'BEG', 'YOU', 'TO', 'CONDESCEND', 'SO', 'FAR', 'AS', 'TO', 'LISTEN', 'TO', 'FOUR', 'WORDS', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'LENT', 'ME', 'A', 'PAIR', 'OF', 'OLD', 'SCALES', 'TWO', 'ANVILS', 'AND', 'THREE', 'LITTLE', 'HAMMERS', 'WHICH', 'ARTICLES', 'I', 'BEGGED', 'HIS', 'WORKMAN', 'GIORGIO', 'DA', 'CORTONA', 'FIFTEEN', 'DAYS', 'AGO', 'TO', 'FETCH', 'BACK'] +4294-14317-0009-1875: hyp=['I', 'SAID', 'MY', 'LORD', 'I', 'THANK', 'YOU', 'AND', 'BEG', 'YOU', 'TO', 'CONDESCEND', 'SO', 'FAR', 'AS', 'TO', 'LISTEN', 'TO', 'FOUR', 'WORDS', 'IT', 'IS', 'TRUE', 'THAT', 'HE', 'LENT', 'ME', 'A', 'PAIR', 'OF', 'OLD', 'SCALES', 'TWO', 'AMBILS', 'AND', 'THREE', 'LITTLE', 'HAMMERS', 'WHICH', 'ARTICLES', 'I', 'BEGGED', 'HIS', 'WORKMEN', 'GEORGIO', 'DECORTUNA', 'FIFTEEN', 'DAYS', 'AGO', 'TO', 'FETCH', 'BACK'] +4294-14317-0010-1876: ref=['GIORGIO', 'CAME', 'FOR', 'THEM', 'HIMSELF'] +4294-14317-0010-1876: hyp=['GEORGIO', 'CAME', 'FOR', 'THEM', 'HIS', 'HEALTH'] +4294-14317-0011-1877: ref=['I', 'HOPE', 'TO', 'PROVE', 'ON', 'WHAT', 'ACCOUNT', 'THAT', 'SCOUNDREL', 'TRIES', 'TO', 'BRING', 'ME', 'INTO', 'DISGRACE'] +4294-14317-0011-1877: hyp=['I', 'HOPE', 'TO', 'PROVE', 'ON', 'WHAT', 'ACCOUNT', 'THAT', 'SCOUNDREL', 'TRIES', 'TO', 'BRING', 'ME', 'INTO', 'DISGRACE'] +4294-14317-0012-1878: ref=['WHEN', 'HE', 'HAD', 'HEARD', 'THIS', 'SPEECH', 'THE', 'DUKE', 'ROSE', 'UP', 'IN', 'ANGER', 'AND', 'SENT', 'FOR', 'BERNARDONE', 'WHO', 'WAS', 'FORCED', 'TO', 'TAKE', 'FLIGHT', 'AS', 'FAR', 'AS', 'VENICE', 'HE', 'AND', 'ANTONIO', 'LANDI', 'WITH', 'HIM'] +4294-14317-0012-1878: hyp=['WHEN', 'HE', 'HAD', 'HEARD', 'THIS', 'SPEECH', 'THE', 'DUKE', 'ROSE', 'UP', 'IN', 'ANGER', 'AND', 'SENT', 'FOR', 'BERNARDONE', 'WHO', 'WAS', 'FORCED', 'TO', 'TAKE', 'FLIGHT', 'AS', 'FAR', 'AS', 'VENICE', 'HE', 'AND', 'ANTONIO', 'LANDY', 'WITH', 'HIM'] +4294-14317-0013-1879: ref=['YOU', 'HAD', 'BETTER', 'PUT', 'THIS', 'TO', 'THE', 'PROOF', 'AND', 'I', 'WILL', 'GO', 'AT', 'ONCE', 'TO', 'THE', 'BARGELLO'] +4294-14317-0013-1879: hyp=['YOU', 'HAD', 'BETTER', 'PUT', 'THIS', 'TO', 'THE', 'PROOF', 'AND', 'I', 'WILL', 'GO', 'AT', 'ONCE', 'TO', 'THE', 'BARGENO'] +4294-14317-0014-1880: ref=['I', 'AM', 'WILLING', 'TO', 'ENTER', 'INTO', 'COMPETITION', 'WITH', 'THE', 'ANCIENTS', 'AND', 'FEEL', 'ABLE', 'TO', 'SURPASS', 'THEM', 'FOR', 'SINCE', 'THOSE', 'EARLY', 'DAYS', 'IN', 'WHICH', 'I', 'MADE', 'THE', 'MEDALS', 'OF', 'POPE', 'CLEMENT', 'I', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'THAT', 'I', 'CAN', 'NOW', 'PRODUCE', 'FAR', 'BETTER', 'PIECES', 'OF', 'THE', 'KIND', 'I', 'THINK', 'I', 'CAN', 'ALSO', 'OUTDO', 'THE', 'COINS', 'I', 'STRUCK', 'FOR', 'DUKE', 'ALESSANDRO', 'WHICH', 'ARE', 'STILL', 'HELD', 'IN', 'HIGH', 'ESTEEM', 'IN', 'LIKE', 'MANNER', 'I', 'COULD', 'MAKE', 'FOR', 'YOU', 'LARGE', 'PIECES', 'OF', 'GOLD', 'AND', 'SILVER', 'PLATE', 'AS', 'I', 'DID', 'SO', 'OFTEN', 'FOR', 'THAT', 'NOBLE', 'MONARCH', 'KING', 'FRANCIS', 'OF', 'FRANCE', 'THANKS', 'TO', 'THE', 'GREAT', 'CONVENIENCES', 'HE', 'ALLOWED', 'ME', 'WITHOUT', 'EVER', 'LOSING', 'TIME', 'FOR', 'THE', 'EXECUTION', 'OF', 'COLOSSAL', 'STATUES', 'OR', 'OTHER', 'WORKS', 'OF', 'THE', 'SCULPTORS', 'CRAFT'] +4294-14317-0014-1880: hyp=['I', 'AM', 'WILLING', 'TO', 'ENTER', 'INTO', 'COMPETITION', 'WITH', 'THE', 'ANCIENTS', 'AND', 'FEEL', 'ABLE', 'TO', 'SURPASS', 'THEM', 'FOR', 'SINCE', 'THOSE', 'EARLY', 'DAYS', 'IN', 'WHICH', 'I', 'MADE', 'THE', 'MEDALS', 'OF', 'POPE', 'CLEMENT', 'I', 'HAVE', 'LEARNED', 'SO', 'MUCH', 'THAT', 'I', 'CAN', 'NOW', 'PRODUCE', 'FAR', 'BETTER', 'PIECES', 'OF', 'THE', 'KIND', 'I', 'THINK', 'I', 'CAN', 'ALSO', 'OUTDO', 'THE', 'COINS', 'I', 'STRUCK', 'FOR', 'DUKE', 'ALISANDRO', 'WHICH', 'ARE', 'STILL', 'HELD', 'IN', 'HIGH', 'ESTEEM', 'IN', 'LIKE', 'MANNER', 'I', 'COULD', 'MAKE', 'FOR', 'YOU', 'LARGE', 'PIECES', 'OF', 'GOLD', 'AND', 'SILVER', 'PLATE', 'AS', 'I', 'DID', 'SO', 'OFTEN', 'FOR', 'THAT', 'NOBLE', 'MONARCH', 'KING', 'FRANCIS', 'OF', 'FRANCE', 'THANKS', 'TO', 'THE', 'GREAT', 'CONVENIENCES', 'HE', 'ALLOWED', 'ME', 'WITHOUT', 'EVER', 'LOSING', 'TIME', 'FOR', 'THE', 'EXECUTION', 'OF', 'COLOSSAL', 'STATUES', 'OR', 'OTHER', 'WORKS', 'OF', 'THE', "SCULPTOR'S", 'CRAFT'] +4294-14317-0015-1881: ref=['AFTER', 'SEVERAL', 'MONTHS', 'WERE', 'WASTED', 'AND', 'PIERO', 'WOULD', 'NEITHER', 'WORK', 'NOR', 'PUT', 'MEN', 'TO', 'WORK', 'UPON', 'THE', 'PIECE', 'I', 'MADE', 'HIM', 'GIVE', 'IT', 'BACK'] +4294-14317-0015-1881: hyp=['AFTER', 'SEVERAL', 'MONTHS', 'WERE', 'WASTED', 'AND', 'PIERRE', 'WOULD', 'NEITHER', 'WORK', 'NOR', 'PUT', 'MEN', 'TO', 'WORK', 'UPON', 'THE', 'PIECE', 'I', 'MADE', 'HIM', 'GIVE', 'IT', 'BACK'] +4294-14317-0016-1882: ref=['AMONG', 'ARTISTS', 'CERTAIN', 'ENRAGED', 'SCULPTORS', 'LAUGHED', 'AT', 'ME', 'AND', 'CALLED', 'ME', 'THE', 'NEW', 'SCULPTOR'] +4294-14317-0016-1882: hyp=['AMONG', 'ARTISTS', 'CERTAIN', 'ENRAGE', 'SCULPTORS', 'LAUGHED', 'AT', 'ME', 'AND', 'CALLED', 'ME', 'THE', 'NEW', 'SCULPTOR'] +4294-14317-0017-1883: ref=['NOW', 'I', 'HOPE', 'TO', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'AN', 'OLD', 'SCULPTOR', 'IF', 'GOD', 'SHALL', 'GRANT', 'ME', 'THE', 'BOON', 'OF', 'FINISHING', 'MY', 'PERSEUS', 'FOR', 'THAT', 'NOBLE', 'PIAZZA', 'OF', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0017-1883: hyp=['NOW', 'I', 'HOPE', 'TO', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'AN', 'OLD', 'SCULPTOR', 'IF', 'GOD', 'SHALL', 'GRANT', 'ME', 'THE', 'BOON', 'OF', 'FINISHING', 'MY', 'PERSEUS', 'FOR', 'THAT', 'NOBLE', 'PIAZZA', 'OF', 'HIS', 'MOST', 'ILLUSTRIOUS', 'EXCELLENCY'] +4294-14317-0018-1884: ref=['HAVING', 'THIS', 'EXCELLENT', 'RESOLVE', 'IN', 'HEART', 'I', 'REACHED', 'MY', 'HOME'] +4294-14317-0018-1884: hyp=['HAVING', 'THIS', 'EXCELLENT', 'RESOLVE', 'IN', 'HEART', 'I', 'REACHED', 'MY', 'HOME'] +4294-32859-0000-1942: ref=['WYLDER', 'WAS', 'RATHER', 'SURLY', 'AFTER', 'THE', 'LADIES', 'HAD', 'FLOATED', 'AWAY', 'FROM', 'THE', 'SCENE', 'AND', 'HE', 'DRANK', 'HIS', 'LIQUOR', 'DOGGEDLY'] +4294-32859-0000-1942: hyp=['WYLDER', 'WAS', 'RATHER', 'SURLY', 'AFTER', 'THE', 'LADIES', 'HAD', 'FLOATED', 'AWAY', 'FROM', 'THE', 'SCENE', 'AND', 'HE', 'DRANK', 'HIS', 'LIQUOR', 'DOGGEDLY'] +4294-32859-0001-1943: ref=['IT', 'WAS', 'HIS', 'FANCY', 'I', 'SUPPOSE', 'TO', 'REVIVE', 'CERTAIN', 'SENTIMENTAL', 'RELATIONS', 'WHICH', 'HAD', 'IT', 'MAY', 'BE', 'ONCE', 'EXISTED', 'BETWEEN', 'HIM', 'AND', 'MISS', 'LAKE', 'AND', 'HE', 'WAS', 'A', 'PERSON', 'OF', 'THAT', 'COMBATIVE', 'TEMPERAMENT', 'THAT', 'MAGNIFIES', 'AN', 'OBJECT', 'IN', 'PROPORTION', 'AS', 'ITS', 'PURSUIT', 'IS', 'THWARTED'] +4294-32859-0001-1943: hyp=['IT', 'WAS', 'HIS', 'FANCY', 'I', 'SUPPOSE', 'TO', 'REVIVE', 'CERTAIN', 'SENTIMENTAL', 'RELATIONS', 'WHICH', 'HAD', 'IT', 'MAY', 'BE', 'ONCE', 'EXISTED', 'BETWEEN', 'HIM', 'AND', 'MISS', 'LAKE', 'AND', 'HE', 'WAS', 'A', 'PERSON', 'OF', 'THAT', 'COMBATIVE', 'TEMPERAMENT', 'THAT', 'MAGNIFIES', 'AN', 'OBJECT', 'IN', 'PROPORTION', 'AS', 'ITS', 'PURSUIT', 'IS', 'THWARTED'] +4294-32859-0002-1944: ref=['THE', 'STORY', 'OF', 'FRIDOLIN', 'AND', "RETZCH'S", 'PRETTY', 'OUTLINES'] +4294-32859-0002-1944: hyp=['THE', 'STORY', 'OF', 'FRIDOLIN', 'AND', 'WRETCHES', 'PRETTY', 'OUTLINE'] +4294-32859-0003-1945: ref=['SIT', 'DOWN', 'BESIDE', 'ME', 'AND', "I'LL", 'TELL', 'YOU', 'THE', 'STORY'] +4294-32859-0003-1945: hyp=['SIT', 'DOWN', 'BESIDE', 'ME', 'AND', "I'LL", 'TELL', 'YOU', 'THE', 'STORY'] +4294-32859-0004-1946: ref=['HE', 'ASSISTED', 'AT', 'IT', 'BUT', 'TOOK', 'NO', 'PART', 'AND', 'IN', 'FACT', 'WAS', 'LISTENING', 'TO', 'THAT', 'OTHER', 'CONVERSATION', 'WHICH', 'SOUNDED', 'WITH', 'ITS', 'PLEASANT', 'GABBLE', 'AND', 'LAUGHTER', 'LIKE', 'A', 'LITTLE', 'MUSICAL', 'TINKLE', 'OF', 'BELLS', 'IN', 'THE', 'DISTANCE'] +4294-32859-0004-1946: hyp=['HE', 'ASSISTED', 'AT', 'IT', 'BUT', 'TOOK', 'NO', 'PART', 'AND', 'IN', 'FACT', 'WAS', 'LISTENING', 'TO', 'THAT', 'OTHER', 'CONVERSATION', 'WHICH', 'SOUNDED', 'WITH', 'ITS', 'PLEASANT', 'GABBLE', 'AND', 'LAUGHTER', 'LIKE', 'A', 'LITTLE', 'MUSICAL', 'TINKLE', 'OF', 'BELLS', 'IN', 'THE', 'DISTANCE'] +4294-32859-0005-1947: ref=['BUT', 'HONEST', 'MARK', 'FORGOT', 'THAT', 'YOUNG', 'LADIES', 'DO', 'NOT', 'ALWAYS', 'COME', 'OUT', 'QUITE', 'ALONE', 'AND', 'JUMP', 'UNASSISTED', 'INTO', 'THEIR', 'VEHICLES'] +4294-32859-0005-1947: hyp=['BUT', 'HONEST', 'MARK', 'FORGOT', 'THAT', 'YOUNG', 'LADIES', 'DO', 'NOT', 'ALWAYS', 'COME', 'OUT', 'QUITE', 'ALONE', 'AND', 'JUMP', 'UNASSISTED', 'INTO', 'THEIR', 'VEHICLES'] +4294-35475-0000-1885: ref=['BUT', 'THE', 'MIDDLE', 'SON', 'WAS', 'LITTLE', 'AND', 'LORN', 'HE', 'WAS', 'NEITHER', 'DARK', 'NOR', 'FAIR', 'HE', 'WAS', 'NEITHER', 'HANDSOME', 'NOR', 'STRONG'] +4294-35475-0000-1885: hyp=['BUT', 'THE', 'MIDDLE', 'SUN', 'WAS', 'LITTLE', 'AND', 'LORN', 'HE', 'WAS', 'NEITHER', 'DARK', 'NOR', 'FAIR', 'HE', 'WAS', 'NEITHER', 'HANDSOME', 'NOR', 'STRONG'] +4294-35475-0001-1886: ref=['THROWING', 'HIMSELF', 'ON', 'HIS', 'KNEES', 'BEFORE', 'THE', 'KING', 'HE', 'CRIED', 'OH', 'ROYAL', 'SIRE', 'BESTOW', 'UPON', 'ME', 'ALSO', 'A', 'SWORD', 'AND', 'A', 'STEED', 'THAT', 'I', 'MAY', 'UP', 'AND', 'AWAY', 'TO', 'FOLLOW', 'MY', 'BRETHREN'] +4294-35475-0001-1886: hyp=['THROWING', 'HIMSELF', 'ON', 'HIS', 'KNEES', 'BEFORE', 'THE', 'KING', 'HE', 'CRIED', 'O', 'ROYAL', 'SIRE', 'BESTOW', 'UPON', 'ME', 'ALSO', 'A', 'SWORD', 'AND', 'A', 'STEED', 'THAT', 'I', 'MAY', 'UP', 'AND', 'AWAY', 'TO', 'FOLLOW', 'MY', 'BRETHREN'] +4294-35475-0002-1887: ref=['BUT', 'THE', 'KING', 'LAUGHED', 'HIM', 'TO', 'SCORN', 'THOU', 'A', 'SWORD', 'HE', 'QUOTH'] +4294-35475-0002-1887: hyp=['BUT', 'THE', 'KING', 'LAUGHED', 'HIM', 'TO', 'SCORN', 'THOU', 'A', 'SWORD', 'HE', 'QUOTH'] +4294-35475-0003-1888: ref=['IN', 'SOOTH', 'THOU', 'SHALT', 'HAVE', 'ONE', 'BUT', 'IT', 'SHALL', 'BE', 'ONE', 'BEFITTING', 'THY', 'MAIDEN', 'SIZE', 'AND', 'COURAGE', 'IF', 'SO', 'SMALL', 'A', 'WEAPON', 'CAN', 'BE', 'FOUND', 'IN', 'ALL', 'MY', 'KINGDOM'] +4294-35475-0003-1888: hyp=['IN', 'SOOTH', 'THOU', 'SHALT', 'HAVE', 'ONE', 'BUT', 'IT', 'SHALL', 'BE', 'ONE', 'BEFITTING', 'THY', 'MAIDEN', 'SIZE', 'AND', 'COURAGE', 'IT', 'SO', 'SMALL', 'A', 'WEAPON', 'CAN', 'BE', 'FOUND', 'IN', 'ALL', 'MY', 'KINGDOM'] +4294-35475-0004-1889: ref=['FORTHWITH', 'THE', 'GRINNING', 'JESTER', 'BEGAN', 'SHRIEKING', 'WITH', 'LAUGHTER', 'SO', 'THAT', 'THE', 'BELLS', 'UPON', 'HIS', 'MOTLEY', 'CAP', 'WERE', 'ALL', 'SET', 'A', 'JANGLING'] +4294-35475-0004-1889: hyp=['FORTHWITH', 'THE', 'GRINNING', 'GESTURE', 'BEGAN', 'SHRIEKING', 'WITH', 'LAUGHTER', 'SO', 'THAT', 'THE', 'BELLS', 'UPON', 'HIS', 'MOTLEY', 'CAP', 'WERE', 'ALL', 'SET', 'A', 'JANGLING'] +4294-35475-0005-1890: ref=['I', 'DID', 'BUT', 'LAUGH', 'TO', 'THINK', 'THE', 'SWORD', 'OF', 'ETHELRIED', 'HAD', 'BEEN', 'SO', 'QUICKLY', 'FOUND', 'RESPONDED', 'THE', 'JESTER', 'AND', 'HE', 'POINTED', 'TO', 'THE', 'SCISSORS', 'HANGING', 'FROM', 'THE', "TAILOR'S", 'GIRDLE'] +4294-35475-0005-1890: hyp=['I', 'DID', 'BUT', 'LAUGH', 'TO', 'THINK', 'THE', 'SORT', 'OF', 'EPLORRIED', 'HAD', 'BEEN', 'SO', 'QUICKLY', 'FOUND', 'RESPONDED', 'THE', 'JESTER', 'AND', 'HE', 'POINTED', 'TO', 'THE', 'SCISSORS', 'HANGING', 'FROM', 'THE', "TAILOR'S", 'GIRDLE'] +4294-35475-0006-1891: ref=['ONE', 'NIGHT', 'AS', 'HE', 'LAY', 'IN', 'A', 'DEEP', 'FOREST', 'TOO', 'UNHAPPY', 'TO', 'SLEEP', 'HE', 'HEARD', 'A', 'NOISE', 'NEAR', 'AT', 'HAND', 'IN', 'THE', 'BUSHES'] +4294-35475-0006-1891: hyp=['ONE', 'NIGHT', 'AS', 'HE', 'LAY', 'IN', 'A', 'DEEP', 'FOREST', 'TWO', 'UNHAPPY', 'TO', 'SLEEP', 'HE', 'HEARD', 'A', 'NOISE', 'NEAR', 'AT', 'HAND', 'IN', 'THE', 'BUSHES'] +4294-35475-0007-1892: ref=['THOU', 'SHALT', 'HAVE', 'THY', 'LIBERTY', 'HE', 'CRIED', 'EVEN', 'THOUGH', 'THOU', 'SHOULDST', 'REND', 'ME', 'IN', 'PIECES', 'THE', 'MOMENT', 'THOU', 'ART', 'FREE'] +4294-35475-0007-1892: hyp=['THOU', 'SHALT', 'HAVE', 'THY', 'LIBERTY', 'HE', 'CRIED', 'EVEN', 'THOUGH', 'THOU', 'SHOULDST', 'RUN', 'ME', 'IN', 'PIECES', 'THE', 'MOMENT', 'THOU', 'ART', 'FREE'] +4294-35475-0008-1893: ref=['IT', 'HAD', 'SUDDENLY', 'DISAPPEARED', 'AND', 'IN', 'ITS', 'PLACE', 'STOOD', 'A', 'BEAUTIFUL', 'FAIRY', 'WITH', 'FILMY', 'WINGS', 'WHICH', 'SHONE', 'LIKE', 'RAINBOWS', 'IN', 'THE', 'MOONLIGHT'] +4294-35475-0008-1893: hyp=['HE', 'HAD', 'HID', 'IT', 'SUDDENLY', 'DISAPPEARED', 'AND', 'IN', 'ITS', 'PLACE', 'STOOD', 'A', 'BEAUTIFUL', 'FAIRY', 'WITH', 'FILMY', 'WINGS', 'WHICH', 'SHONE', 'LIKE', 'RAINBOWS', 'IN', 'THE', 'MOONLIGHT'] +4294-35475-0009-1894: ref=['AT', 'THIS', 'MOMENT', 'THERE', 'WAS', 'A', 'DISTANT', 'RUMBLING', 'AS', 'OF', 'THUNDER', 'TIS', 'THE', 'OGRE', 'CRIED', 'THE', 'FAIRY', 'WE', 'MUST', 'HASTEN'] +4294-35475-0009-1894: hyp=['AT', 'THIS', 'MOMENT', 'THERE', 'WAS', 'A', 'DISTANT', 'RUMBLING', 'AS', 'OF', 'THUNDER', 'TIS', 'THE', 'OGRE', 'CRIED', 'THE', 'FAIRY', 'WE', 'MUST', 'HASTEN'] +4294-35475-0010-1895: ref=['SCISSORS', 'GROW', 'A', "GIANT'S", 'HEIGHT', 'AND', 'SAVE', 'US', 'FROM', 'THE', "OGRE'S", 'MIGHT'] +4294-35475-0010-1895: hyp=['SCISSORS', 'GROW', 'A', "GIANT'S", 'HEIGHT', 'AND', 'SAVE', 'US', 'FROM', 'THE', "OGRE'S", 'MIGHT'] +4294-35475-0011-1896: ref=['HE', 'COULD', 'SEE', 'THE', 'OGRE', 'STANDING', 'POWERLESS', 'TO', 'HURT', 'HIM', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'CHASM', 'AND', 'GNASHING', 'HIS', 'TEETH', 'EACH', 'ONE', 'OF', 'WHICH', 'WAS', 'AS', 'BIG', 'AS', 'A', 'MILLSTON'] +4294-35475-0011-1896: hyp=['HE', 'COULD', 'SEE', 'THE', 'OGRE', 'STANDING', 'POWERLESS', 'TO', 'HURT', 'HIM', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'CHASM', 'AND', 'GNASHING', 'HIS', 'TEETH', 'EACH', 'ONE', 'OF', 'WHICH', 'WAS', 'AS', 'BIG', 'AS', 'A', 'MILLSTONE'] +4294-35475-0012-1897: ref=['THE', 'SIGHT', 'WAS', 'SO', 'TERRIBLE', 'THAT', 'HE', 'TURNED', 'ON', 'HIS', 'HEEL', 'AND', 'FLED', 'AWAY', 'AS', 'FAST', 'AS', 'HIS', 'FEET', 'COULD', 'CARRY', 'HIM'] +4294-35475-0012-1897: hyp=['THE', 'SIGHT', 'WAS', 'SO', 'TERRIBLE', 'THAT', 'HE', 'TURNED', 'ON', 'HIS', 'HEEL', 'AND', 'FLED', 'AWAY', 'AS', 'FAST', 'AS', 'HIS', 'FEET', 'COULD', 'CARRY', 'HIM'] +4294-35475-0013-1898: ref=['THOU', 'SHALT', 'NOT', 'BE', 'LEFT', 'A', 'PRISONER', 'IN', 'THIS', 'DISMAL', 'SPOT', 'WHILE', 'I', 'HAVE', 'THE', 'POWER', 'TO', 'HELP', 'THEE'] +4294-35475-0013-1898: hyp=['THOU', 'SHALT', 'NOT', 'BE', 'LEFT', 'A', 'PRISONER', 'IN', 'THIS', 'DISMAL', 'SPOT', 'WHILE', 'I', 'HAVE', 'THE', 'POWER', 'TO', 'HELP', 'THEE'] +4294-35475-0014-1899: ref=['HE', 'LIFTED', 'THE', 'SCISSORS', 'AND', 'WITH', 'ONE', 'STROKE', 'DESTROYED', 'THE', 'WEB', 'AND', 'GAVE', 'THE', 'FLY', 'ITS', 'FREEDOM'] +4294-35475-0014-1899: hyp=['HE', 'LIFTED', 'THE', 'SCISSORS', 'AND', 'WITH', 'ONE', 'STROKE', 'DESTROYED', 'THE', 'WEB', 'AND', 'GAVE', 'THE', 'FLY', 'TO', 'READ', 'THEM'] +4294-35475-0015-1900: ref=['A', 'FAINT', 'GLIMMER', 'OF', 'LIGHT', 'ON', 'THE', 'OPPOSITE', 'WALL', 'SHOWS', 'ME', 'THE', 'KEYHOLE'] +4294-35475-0015-1900: hyp=['A', 'FAINT', 'GLIMMER', 'OF', 'LIGHT', 'ON', 'THE', 'OPPOSITE', 'WALL', 'SHOWS', 'ME', 'THE', 'KEYHOLE'] +4294-35475-0016-1901: ref=['THE', 'PRINCE', 'SPENT', 'ALL', 'THE', 'FOLLOWING', 'TIME', 'UNTIL', 'MIDNIGHT', 'TRYING', 'TO', 'THINK', 'OF', 'A', 'SUITABLE', 'VERSE', 'TO', 'SAY', 'TO', 'THE', 'SCISSORS'] +4294-35475-0016-1901: hyp=['THE', 'PRINCE', 'SPENT', 'ALL', 'THE', 'FOLLOWING', 'TIME', 'UNTIL', 'MIDNIGHT', 'TRYING', 'TO', 'THINK', 'OF', 'A', 'SUITABLE', 'VERSE', 'TO', 'SAY', 'TO', 'THE', 'SCISSORS'] +4294-35475-0017-1902: ref=['AS', 'HE', 'UTTERED', 'THE', 'WORDS', 'THE', 'SCISSORS', 'LEAPED', 'OUT', 'OF', 'HIS', 'HAND', 'AND', 'BEGAN', 'TO', 'CUT', 'THROUGH', 'THE', 'WOODEN', 'SHUTTERS', 'AS', 'EASILY', 'AS', 'THROUGH', 'A', 'CHEESE'] +4294-35475-0017-1902: hyp=['AS', 'HE', 'UTTERED', 'THE', 'WORDS', 'THE', 'SCISSORS', 'LEAPED', 'OUT', 'OF', 'HIS', 'HAND', 'AND', 'BEGAN', 'TO', 'CUT', 'THROUGH', 'THE', 'WOODEN', 'SHUTTERS', 'AS', 'EASILY', 'AS', 'THROUGH', 'A', 'CHEESE'] +4294-35475-0018-1903: ref=['IN', 'A', 'VERY', 'SHORT', 'TIME', 'THE', 'PRINCE', 'HAD', 'CRAWLED', 'THROUGH', 'THE', 'OPENING'] +4294-35475-0018-1903: hyp=['IN', 'THE', 'VERY', 'SHORT', 'TIME', 'THE', 'PRINCE', 'HAD', 'CRAWLED', 'THROUGH', 'THE', 'OPENING'] +4294-35475-0019-1904: ref=['WHILE', 'HE', 'STOOD', 'LOOKING', 'AROUND', 'HIM', 'IN', 'BEWILDERMENT', 'A', 'FIREFLY', 'ALIGHTED', 'ON', 'HIS', 'ARM', 'FLASHING', 'ITS', 'LITTLE', 'LANTERN', 'IN', 'THE', "PRINCE'S", 'FACE', 'IT', 'CRIED', 'THIS', 'WAY', 'MY', 'FRIEND', 'THE', 'FLY', 'SENT', 'ME', 'TO', 'GUIDE', 'YOU', 'TO', 'A', 'PLACE', 'OF', 'SAFETY'] +4294-35475-0019-1904: hyp=['WHILE', 'HE', 'STOOD', 'LOOKING', 'AROUND', 'HIM', 'IN', 'BEWILDERMENT', 'A', 'FIREFLY', 'LIGHTED', 'ON', 'HIS', 'HEART', 'FLASHING', 'ITS', 'LITTLE', 'LANTERN', 'IN', 'THE', "PRINCE'S", 'FACE', 'IT', 'CRIED', 'THIS', 'WAY', 'MY', 'FRIEND', 'THE', 'FLY', 'SENT', 'ME', 'TO', 'GUIDE', 'YOU', 'TO', 'A', 'PLACE', 'OF', 'SAFETY'] +4294-35475-0020-1905: ref=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'ME', 'CRIED', 'THE', 'POOR', 'PEASANT'] +4294-35475-0020-1905: hyp=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'ME', 'CRIED', 'THE', 'POOR', 'PEASANT'] +4294-35475-0021-1906: ref=['MY', 'GRAIN', 'MUST', 'FALL', 'AND', 'ROT', 'IN', 'THE', 'FIELD', 'FROM', 'OVERRIPENESS', 'BECAUSE', 'I', 'HAVE', 'NOT', 'THE', 'STRENGTH', 'TO', 'RISE', 'AND', 'HARVEST', 'IT', 'THEN', 'INDEED', 'MUST', 'WE', 'ALL', 'STARVE'] +4294-35475-0021-1906: hyp=['MY', 'GRAIN', 'MUST', 'FALL', 'IN', 'ROT', 'IN', 'THE', 'FIELD', 'FROM', 'OVER', 'RIPENESS', 'BECAUSE', 'I', 'HAVE', 'NOT', 'THE', 'STRENGTH', 'TO', 'RISE', 'AND', 'HARVEST', 'IT', 'THEN', 'INDEED', 'MUST', 'WE', 'ALL', 'STARVE'] +4294-35475-0022-1907: ref=['THE', 'GRANDAME', 'WHOM', 'HE', 'SUPPLIED', 'WITH', 'FAGOTS', 'THE', 'MERCHANT', 'WHOM', 'HE', 'RESCUED', 'FROM', 'ROBBERS', 'THE', "KING'S", 'COUNCILLOR', 'TO', 'WHOM', 'HE', 'GAVE', 'AID', 'ALL', 'BECAME', 'HIS', 'FRIENDS', 'UP', 'AND', 'DOWN', 'THE', 'LAND', 'TO', 'BEGGAR', 'OR', 'LORD', 'HOMELESS', 'WANDERER', 'OR', 'HIGH', 'BORN', 'DAME', 'HE', 'GLADLY', 'GAVE', 'UNSELFISH', 'SERVICE', 'ALL', 'UNSOUGHT', 'AND', 'SUCH', 'AS', 'HE', 'HELPED', 'STRAIGHTWAY', 'BECAME', 'HIS', 'FRIENDS'] +4294-35475-0022-1907: hyp=['THE', 'GRAND', 'DAME', 'WHOM', 'HE', 'SUPPLIED', 'WITH', 'FAGOTS', 'THE', 'MERCHANT', 'WHOM', 'HE', 'RESCUED', 'FROM', 'ROBBERS', 'THE', "KING'S", 'COUNSELLOR', 'TO', 'WHOM', 'HE', 'GAVE', 'AID', 'ALL', 'BECAME', 'HIS', 'FRIENDS', 'UP', 'AND', 'DOWN', 'THE', 'LAND', 'BEGGAR', 'OR', 'LORD', 'HOMELESS', 'WANDERER', 'OR', 'HIGH', 'BORN', 'DAME', 'HE', 'GLADLY', 'GAVE', 'UNSELFISH', 'SERVICE', 'ALL', 'UNSOUGHT', 'AND', 'SUCH', 'AS', 'HE', 'HELPED', 'STRAIGHTWAY', 'BECAME', 'HIS', 'FRIENDS'] +4294-35475-0023-1908: ref=['TO', 'HIM', 'WHO', 'COULD', 'BRING', 'HER', 'BACK', 'TO', 'HER', "FATHER'S", 'CASTLE', 'SHOULD', 'BE', 'GIVEN', 'THE', 'THRONE', 'AND', 'KINGDOM', 'AS', 'WELL', 'AS', 'THE', 'PRINCESS', 'HERSELF', 'SO', 'FROM', 'FAR', 'AND', 'NEAR', 'INDEED', 'FROM', 'ALMOST', 'EVERY', 'COUNTRY', 'UNDER', 'THE', 'SUN', 'CAME', 'KNIGHTS', 'AND', 'PRINCES', 'TO', 'FIGHT', 'THE', 'OGRE'] +4294-35475-0023-1908: hyp=['TO', 'HIM', 'WHO', 'COULD', 'BRING', 'HER', 'BACK', 'TO', 'HER', "FATHER'S", 'CASTLE', 'SHOULD', 'BE', 'GIVEN', 'THE', 'THRONE', 'AND', 'KINGDOM', 'AS', 'WELL', 'AS', 'THE', 'PRINCESS', 'HERSELF', 'SO', 'FROM', 'FAR', 'AND', 'NEAR', 'INDEED', 'FROM', 'ALMOST', 'EVERY', 'COUNTRY', 'UNDER', 'THE', 'SUN', 'CAME', 'KNIGHTS', 'AND', 'PRINCES', 'TO', 'FIGHT', 'THE', 'OGRE'] +4294-35475-0024-1909: ref=['AMONG', 'THOSE', 'WHO', 'DREW', 'BACK', 'WERE', "ETHELRIED'S", 'BROTHERS', 'THE', 'THREE', 'THAT', 'WERE', 'DARK', 'AND', 'THE', 'THREE', 'THAT', 'WERE', 'FAIR'] +4294-35475-0024-1909: hyp=['AMONG', 'THOSE', 'WHO', 'DREW', 'BACK', 'WERE', "EPILRIED'S", 'BROTHERS', 'THE', 'THREE', 'THAT', 'WERE', 'DARK', 'AND', 'THE', 'THREE', 'THAT', 'WERE', 'FAIR'] +4294-35475-0025-1910: ref=['BUT', 'ETHELRIED', 'HEEDED', 'NOT', 'THEIR', 'TAUNTS'] +4294-35475-0025-1910: hyp=['BUT', 'ETHELRED', 'HE', 'DID', 'NOT', 'THEIR', 'TAUNTS'] +4294-35475-0026-1911: ref=['SO', 'THEY', 'ALL', 'CRIED', 'OUT', 'LONG', 'AND', 'LOUD', 'LONG', 'LIVE', 'THE', 'PRINCE', 'PRINCE', 'CISEAUX'] +4294-35475-0026-1911: hyp=['SO', 'THEY', 'ALL', 'CRIED', 'OUT', 'LONG', 'AND', 'LOUD', 'LONG', 'LIVE', 'THE', 'PRINCE', 'PRINCE', 'ISAU'] +4294-9934-0000-1912: ref=['HE', 'FELT', 'WHAT', 'THE', 'EARTH', 'MAY', 'POSSIBLY', 'FEEL', 'AT', 'THE', 'MOMENT', 'WHEN', 'IT', 'IS', 'TORN', 'OPEN', 'WITH', 'THE', 'IRON', 'IN', 'ORDER', 'THAT', 'GRAIN', 'MAY', 'BE', 'DEPOSITED', 'WITHIN', 'IT', 'IT', 'FEELS', 'ONLY', 'THE', 'WOUND', 'THE', 'QUIVER', 'OF', 'THE', 'GERM', 'AND', 'THE', 'JOY', 'OF', 'THE', 'FRUIT', 'ONLY', 'ARRIVE', 'LATER'] +4294-9934-0000-1912: hyp=['HE', 'FELT', 'WITH', 'THE', 'EARTH', 'MAY', 'POSSIBLY', 'FEEL', 'AT', 'THE', 'MOMENT', 'WHEN', 'IT', 'IS', 'TORN', 'OPEN', 'WITH', 'THE', 'IRON', 'IN', 'ORDER', 'THAT', 'GRAIN', 'MAY', 'BE', 'DEPOSITED', 'WITHIN', 'IT', 'IT', 'FEELS', 'ONLY', 'THE', 'WOUND', 'THE', 'QUIVER', 'OF', 'THE', 'GERM', 'THE', 'JOY', 'OF', 'THE', 'FRUIT', 'ONLY', 'ARRIVES', 'LATER'] +4294-9934-0001-1913: ref=['HE', 'HAD', 'BUT', 'JUST', 'ACQUIRED', 'A', 'FAITH', 'MUST', 'HE', 'THEN', 'REJECT', 'IT', 'ALREADY'] +4294-9934-0001-1913: hyp=['HE', 'HAD', 'BUT', 'JUST', 'ACQUIRED', 'A', 'FAITH', 'MUST', 'HE', 'THEN', 'REJECTED', 'ALREADY'] +4294-9934-0002-1914: ref=['HE', 'AFFIRMED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'HE', 'DECLARED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'DOUBT', 'AND', 'HE', 'BEGAN', 'TO', 'DOUBT', 'IN', 'SPITE', 'OF', 'HIMSELF'] +4294-9934-0002-1914: hyp=['HE', 'AFFIRMED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'HE', 'DECLARED', 'TO', 'HIMSELF', 'THAT', 'HE', 'WOULD', 'NOT', 'DOUBT', 'AND', 'HE', 'BEGAN', 'TO', 'DOUBT', 'IN', 'SPITE', 'OF', 'HIMSELF'] +4294-9934-0003-1915: ref=['TO', 'STAND', 'BETWEEN', 'TWO', 'RELIGIONS', 'FROM', 'ONE', 'OF', 'WHICH', 'YOU', 'HAVE', 'NOT', 'AS', 'YET', 'EMERGED', 'AND', 'ANOTHER', 'INTO', 'WHICH', 'YOU', 'HAVE', 'NOT', 'YET', 'ENTERED', 'IS', 'INTOLERABLE', 'AND', 'TWILIGHT', 'IS', 'PLEASING', 'ONLY', 'TO', 'BAT', 'LIKE', 'SOULS'] +4294-9934-0003-1915: hyp=['TO', 'STAND', 'BETWEEN', 'TWO', 'RELIGIONS', 'FROM', 'ONE', 'OF', 'WHICH', 'YOU', 'HAVE', 'NOT', 'AS', 'YET', 'EMERGED', 'AND', 'ANOTHER', 'INTO', 'WHICH', 'YOU', 'HAVE', 'NOT', 'YET', 'ENTERED', 'IS', 'INTOLERABLE', 'AND', 'TWILIGHT', 'IS', 'PLEASING', 'ONLY', 'TO', 'BAT', 'LIKE', 'SOULS'] +4294-9934-0004-1916: ref=['MARIUS', 'WAS', 'CLEAR', 'EYED', 'AND', 'HE', 'REQUIRED', 'THE', 'TRUE', 'LIGHT'] +4294-9934-0004-1916: hyp=['MARIUS', 'WAS', 'CLEAR', 'EYED', 'AND', 'HE', 'REQUIRED', 'THE', 'TRUE', 'LIGHT'] +4294-9934-0005-1917: ref=['THE', 'HALF', 'LIGHTS', 'OF', 'DOUBT', 'PAINED', 'HIM'] +4294-9934-0005-1917: hyp=['THE', 'HALF', 'LIGHTS', 'OF', 'DOUBT', 'PAINED', 'HIM'] +4294-9934-0006-1918: ref=['WHATEVER', 'MAY', 'HAVE', 'BEEN', 'HIS', 'DESIRE', 'TO', 'REMAIN', 'WHERE', 'HE', 'WAS', 'HE', 'COULD', 'NOT', 'HALT', 'THERE', 'HE', 'WAS', 'IRRESISTIBLY', 'CONSTRAINED', 'TO', 'CONTINUE', 'TO', 'ADVANCE', 'TO', 'EXAMINE', 'TO', 'THINK', 'TO', 'MARCH', 'FURTHER'] +4294-9934-0006-1918: hyp=['WHATEVER', 'MAY', 'HAVE', 'BEEN', 'HIS', 'DESIRE', 'TO', 'REMAIN', 'WHERE', 'HE', 'WAS', 'HE', 'COULD', 'NOT', 'HELP', 'THERE', 'HE', 'WAS', 'IRRESISTIBLY', 'CONSTRAINED', 'TO', 'CONTINUE', 'TO', 'ADVANCE', 'TO', 'EXAMINE', 'TO', 'THINK', 'TO', 'MARCH', 'FURTHER'] +4294-9934-0007-1919: ref=['HE', 'FEARED', 'AFTER', 'HAVING', 'TAKEN', 'SO', 'MANY', 'STEPS', 'WHICH', 'HAD', 'BROUGHT', 'HIM', 'NEARER', 'TO', 'HIS', 'FATHER', 'TO', 'NOW', 'TAKE', 'A', 'STEP', 'WHICH', 'SHOULD', 'ESTRANGE', 'HIM', 'FROM', 'THAT', 'FATHER'] +4294-9934-0007-1919: hyp=['HE', 'FEARED', 'AFTER', 'HAVING', 'TAKEN', 'SO', 'MANY', 'STEPS', 'WHICH', 'HAD', 'BROUGHT', 'HIM', 'NEARER', 'TO', 'HIS', 'FATHER', 'TO', 'NOW', 'TAKE', 'A', 'STEP', 'WHICH', 'SHOULD', 'ESTRANGE', 'HIM', 'FROM', 'THAT', 'FATHER'] +4294-9934-0008-1920: ref=['HIS', 'DISCOMFORT', 'WAS', 'AUGMENTED', 'BY', 'ALL', 'THE', 'REFLECTIONS', 'WHICH', 'OCCURRED', 'TO', 'HIM'] +4294-9934-0008-1920: hyp=['HIS', 'DISCOMFORT', 'WAS', 'AUGMENTED', 'BY', 'ALL', 'THE', 'REFLECTIONS', 'WHICH', 'OCCURRED', 'TO', 'HIM'] +4294-9934-0009-1921: ref=['IN', 'THE', 'TROUBLED', 'STATE', 'OF', 'HIS', 'CONSCIENCE', 'HE', 'NO', 'LONGER', 'THOUGHT', 'OF', 'CERTAIN', 'SERIOUS', 'SIDES', 'OF', 'EXISTENCE'] +4294-9934-0009-1921: hyp=['IN', 'THE', 'TROUBLED', 'STATE', 'OF', 'HIS', 'CONSCIENCE', 'HE', 'NO', 'LONGER', 'THOUGHT', 'OF', 'CERTAIN', 'SERIOUS', 'SIDES', 'OF', 'EXISTENCE'] +4294-9934-0010-1922: ref=['THEY', 'SOON', 'ELBOWED', 'HIM', 'ABRUPTLY'] +4294-9934-0010-1922: hyp=['THEY', 'SOON', 'ELBOWED', 'HIM', 'ABRUPTLY'] +4294-9934-0011-1923: ref=['REQUEST', 'COURFEYRAC', 'TO', 'COME', 'AND', 'TALK', 'WITH', 'ME', 'SAID', 'MARIUS'] +4294-9934-0011-1923: hyp=['REQUEST', 'COURFEREK', 'TO', 'COME', 'AND', 'TALK', 'WITH', 'ME', 'SAID', 'MARIUS'] +4294-9934-0012-1924: ref=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'YOU', 'SAID', 'COURFEYRAC'] +4294-9934-0012-1924: hyp=['WHAT', 'IS', 'TO', 'BECOME', 'OF', 'YOU', 'SAID', 'COURFEYRAC'] +4294-9934-0013-1925: ref=['WHAT', 'ARE', 'YOU', 'GOING', 'TO', 'DO', 'I', 'DO', 'NOT', 'KNOW'] +4294-9934-0013-1925: hyp=['WHAT', 'ARE', 'YOU', 'GOING', 'TO', 'DO', 'I', 'DO', 'NOT', 'KNOW'] +4294-9934-0014-1926: ref=['SILVER', 'GOLD', 'HERE', 'IT', 'IS'] +4294-9934-0014-1926: hyp=['SILVER', 'GOLD', 'HERE', 'IT', 'IS'] +4294-9934-0015-1927: ref=['YOU', 'WILL', 'THEN', 'HAVE', 'ONLY', 'A', 'PAIR', 'OF', 'TROUSERS', 'A', 'WAISTCOAT', 'A', 'HAT', 'AND', 'A', 'COAT', 'AND', 'MY', 'BOOTS'] +4294-9934-0015-1927: hyp=['YOU', 'WILL', 'THEN', 'HAVE', 'ONLY', 'A', 'PAIR', 'OF', 'TROUSERS', 'A', 'WAISTCOAT', 'A', 'HAT', 'AND', 'A', 'COAT', 'AND', 'MY', 'BOOTS'] +4294-9934-0016-1928: ref=['THAT', 'WILL', 'BE', 'ENOUGH'] +4294-9934-0016-1928: hyp=['THAT', 'WILL', 'BE', 'ENOUGH'] +4294-9934-0017-1929: ref=['NO', 'IT', 'IS', 'NOT', 'GOOD', 'WHAT', 'WILL', 'YOU', 'DO', 'AFTER', 'THAT'] +4294-9934-0017-1929: hyp=['NO', 'IT', 'IS', 'NOT', 'GOOD', 'WHAT', 'WE', 'DO', 'AFTER', 'THAT'] +4294-9934-0018-1930: ref=['DO', 'YOU', 'KNOW', 'GERMAN', 'NO'] +4294-9934-0018-1930: hyp=['DO', 'YOU', 'KNOW', 'GERMAN', 'NO'] +4294-9934-0019-1931: ref=['IT', 'IS', 'BADLY', 'PAID', 'WORK', 'BUT', 'ONE', 'CAN', 'LIVE', 'BY', 'IT'] +4294-9934-0019-1931: hyp=['IT', 'IS', 'BADLY', 'PAID', 'WORK', 'BUT', 'ONE', 'CAN', 'LIVE', 'BY', 'IT'] +4294-9934-0020-1932: ref=['THE', 'CLOTHES', 'DEALER', 'WAS', 'SENT', 'FOR'] +4294-9934-0020-1932: hyp=['THE', 'CLOTHES', 'DEALER', 'WAS', 'SENT', 'FOR'] +4294-9934-0021-1933: ref=['HE', 'PAID', 'TWENTY', 'FRANCS', 'FOR', 'THE', 'CAST', 'OFF', 'GARMENTS', 'THEY', 'WENT', 'TO', 'THE', "WATCHMAKER'S"] +4294-9934-0021-1933: hyp=['HE', 'PAID', 'TWENTY', 'FRANCS', 'FOR', 'THE', 'CAST', 'OFF', 'GARMENTS', 'THEY', 'WENT', 'TO', 'THE', 'WATCHMAKERS'] +4294-9934-0022-1934: ref=['HE', 'BOUGHT', 'THE', 'WATCH', 'FOR', 'FORTY', 'FIVE', 'FRANCS'] +4294-9934-0022-1934: hyp=['HE', 'BOUGHT', 'THE', 'WATCH', 'FOR', 'FORTY', 'FIVE', 'FRANCS'] +4294-9934-0023-1935: ref=['HELLO', 'I', 'HAD', 'FORGOTTEN', 'THAT', 'SAID', 'MARIUS'] +4294-9934-0023-1935: hyp=['HELLO', 'I', 'HAD', 'FORGOTTEN', 'THAT', 'SAID', 'MARIUS'] +4294-9934-0024-1936: ref=['THE', 'LANDLORD', 'PRESENTED', 'HIS', 'BILL', 'WHICH', 'HAD', 'TO', 'BE', 'PAID', 'ON', 'THE', 'SPOT'] +4294-9934-0024-1936: hyp=['THE', 'LANDLORD', 'PRESENTED', 'HIS', 'BILL', 'WHICH', 'HAD', 'TO', 'BE', 'PAID', 'ON', 'THE', 'SPOT'] +4294-9934-0025-1937: ref=['I', 'HAVE', 'TEN', 'FRANCS', 'LEFT', 'SAID', 'MARIUS'] +4294-9934-0025-1937: hyp=['I', 'HAVE', 'TEN', 'FRANCS', 'LEFT', 'SAID', 'MARIUS'] +4294-9934-0026-1938: ref=['THAT', 'WILL', 'BE', 'SWALLOWING', 'A', 'TONGUE', 'VERY', 'FAST', 'OR', 'A', 'HUNDRED', 'SOUS', 'VERY', 'SLOWLY'] +4294-9934-0026-1938: hyp=['THAT', 'WILL', 'BE', 'SWALLOWING', 'A', 'TONGUE', 'VERY', 'FAST', 'OR', 'A', 'HUNDRED', 'SOUS', 'VERY', 'SLOWLY'] +4294-9934-0027-1939: ref=['ONE', 'MORNING', 'ON', 'HIS', 'RETURN', 'FROM', 'THE', 'LAW', 'SCHOOL', 'MARIUS', 'FOUND', 'A', 'LETTER', 'FROM', 'HIS', 'AUNT', 'AND', 'THE', 'SIXTY', 'PISTOLES', 'THAT', 'IS', 'TO', 'SAY', 'SIX', 'HUNDRED', 'FRANCS', 'IN', 'GOLD', 'IN', 'A', 'SEALED', 'BOX'] +4294-9934-0027-1939: hyp=['ONE', 'MORNING', 'ON', 'HIS', 'RETURN', 'FROM', 'THE', 'LAST', 'SCHOOL', 'MARIUS', 'FOUND', 'A', 'LETTER', 'FROM', 'HIS', 'AUNT', 'AND', 'THE', 'SIXTY', 'PISTOL', 'THAT', 'IS', 'TO', 'SAY', 'SIX', 'HUNDRED', 'FRANCS', 'IN', 'GOLD', 'AND', 'A', 'SEALED', 'BOX'] +4294-9934-0028-1940: ref=['MARIUS', 'SENT', 'BACK', 'THE', 'THIRTY', 'LOUIS', 'TO', 'HIS', 'AUNT', 'WITH', 'A', 'RESPECTFUL', 'LETTER', 'IN', 'WHICH', 'HE', 'STATED', 'THAT', 'HE', 'HAD', 'SUFFICIENT', 'MEANS', 'OF', 'SUBSISTENCE', 'AND', 'THAT', 'HE', 'SHOULD', 'BE', 'ABLE', 'THENCEFORTH', 'TO', 'SUPPLY', 'ALL', 'HIS', 'NEEDS'] +4294-9934-0028-1940: hyp=['MARIUS', 'SENT', 'BACK', 'FOR', 'THIRTY', 'LOUIS', 'TO', 'HIS', 'AUNT', 'WITH', 'THE', 'RESPECTFUL', 'LETTER', 'IN', 'WHICH', 'HE', 'STATED', 'THAT', 'HE', 'HAD', 'SUFFICIENT', 'MEANS', 'OF', 'SUBSISTENCE', 'AND', 'THAT', 'HE', 'SHOULD', 'BE', 'ABLE', 'THENCEFORTH', 'TO', 'SUPPLY', 'ALL', 'HIS', 'NEEDS'] +4294-9934-0029-1941: ref=['AT', 'THAT', 'MOMENT', 'HE', 'HAD', 'THREE', 'FRANCS', 'LEFT'] +4294-9934-0029-1941: hyp=['AT', 'THAT', 'MOMENT', 'HE', 'HAD', 'THREE', 'FRANCS', 'LEFT'] +4350-10919-0000-2716: ref=['HE', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'NO', 'GOOD', 'TALKING', 'TO', 'THE', 'OLD', 'MAN', 'AND', 'THAT', 'THE', 'PRINCIPAL', 'PERSON', 'IN', 'THE', 'HOUSE', 'WAS', 'THE', 'MOTHER'] +4350-10919-0000-2716: hyp=['HE', 'PERCEIVED', 'THAT', 'IT', 'WAS', 'NO', 'GOOD', 'TALKING', 'TO', 'THE', 'OLD', 'MAN', 'AND', 'THAT', 'THE', 'PRINCIPAL', 'PERSON', 'IN', 'THE', 'HOUSE', 'WAS', 'THE', 'MOTHER'] +4350-10919-0001-2717: ref=['BEFORE', 'HER', 'HE', 'DECIDED', 'TO', 'SCATTER', 'HIS', 'PEARLS'] +4350-10919-0001-2717: hyp=['BEFORE', 'HER', 'HE', 'DECIDED', 'TO', 'SCATTER', 'HIS', 'PEARLS'] +4350-10919-0002-2718: ref=['THE', 'PRINCESS', 'WAS', 'DISTRACTED', 'AND', 'DID', 'NOT', 'KNOW', 'WHAT', 'TO', 'DO', 'SHE', 'FELT', 'SHE', 'HAD', 'SINNED', 'AGAINST', 'KITTY'] +4350-10919-0002-2718: hyp=['THE', 'PRINCESS', 'WAS', 'DISTRACTED', 'AND', 'DID', 'NOT', 'KNOW', 'WHAT', 'TO', 'DO', 'SHE', 'FELT', 'SHE', 'HAD', 'SINNED', 'AGAINST', 'KITTY'] +4350-10919-0003-2719: ref=['WELL', 'DOCTOR', 'DECIDE', 'OUR', 'FATE', 'SAID', 'THE', 'PRINCESS', 'TELL', 'ME', 'EVERYTHING'] +4350-10919-0003-2719: hyp=['WELL', 'DOCTOR', 'DECIDE', 'OUR', 'PHAETON', 'SAID', 'THE', 'PRINCESS', 'TELL', 'ME', 'EVERYTHING'] +4350-10919-0004-2720: ref=['IS', 'THERE', 'HOPE', 'SHE', 'MEANT', 'TO', 'SAY', 'BUT', 'HER', 'LIPS', 'QUIVERED', 'AND', 'SHE', 'COULD', 'NOT', 'UTTER', 'THE', 'QUESTION', 'WELL', 'DOCTOR'] +4350-10919-0004-2720: hyp=['IS', 'THEIR', 'HOPE', 'SHE', 'MEANT', 'TO', 'SAY', 'BUT', 'HER', 'LIPS', 'QUIVERED', 'AND', 'SHE', 'COULD', 'NOT', 'UTTER', 'THE', 'QUESTION', 'WELL', 'DOCTOR'] +4350-10919-0005-2721: ref=['AS', 'YOU', 'PLEASE', 'THE', 'PRINCESS', 'WENT', 'OUT', 'WITH', 'A', 'SIGH'] +4350-10919-0005-2721: hyp=['AS', 'YOU', 'PLEASE', 'THE', 'PRINCESS', 'WENT', 'OUT', 'WITH', 'A', 'SIGH'] +4350-10919-0006-2722: ref=['THE', 'FAMILY', 'DOCTOR', 'RESPECTFULLY', 'CEASED', 'IN', 'THE', 'MIDDLE', 'OF', 'HIS', 'OBSERVATIONS'] +4350-10919-0006-2722: hyp=['THE', 'FAMILY', 'DOCTOR', 'RESPECTFULLY', 'CEASED', 'IN', 'THE', 'MIDDLE', 'OF', 'HIS', 'OBSERVATIONS'] +4350-10919-0007-2723: ref=['AND', 'THERE', 'ARE', 'INDICATIONS', 'MALNUTRITION', 'NERVOUS', 'EXCITABILITY', 'AND', 'SO', 'ON'] +4350-10919-0007-2723: hyp=['AND', 'THERE', 'ARE', 'INDICATIONS', 'MALTRICIAN', 'NERVOUS', 'EXCITABILITY', 'AND', 'SO', 'ON'] +4350-10919-0008-2724: ref=['THE', 'QUESTION', 'STANDS', 'THUS', 'IN', 'PRESENCE', 'OF', 'INDICATIONS', 'OF', 'TUBERCULOUS', 'PROCESS', 'WHAT', 'IS', 'TO', 'BE', 'DONE', 'TO', 'MAINTAIN', 'NUTRITION'] +4350-10919-0008-2724: hyp=['THE', 'QUESTION', 'SENDS', 'THUS', 'IN', 'PRESENCE', 'OF', 'INDICATIONS', 'OF', 'TUBERK', 'AT', 'THIS', 'PROCESS', 'WHAT', 'IS', 'TO', 'BE', 'DONE', 'TO', 'MAINTAIN', 'NUTRITION'] +4350-10919-0009-2725: ref=['YES', "THAT'S", 'AN', 'UNDERSTOOD', 'THING', 'RESPONDED', 'THE', 'CELEBRATED', 'PHYSICIAN', 'AGAIN', 'GLANCING', 'AT', 'HIS', 'WATCH'] +4350-10919-0009-2725: hyp=['YES', 'I', 'CAN', 'UNDERSTOOD', 'THING', 'RESPONDED', 'THE', 'CELEBRATED', 'PHYSICIAN', 'AGAIN', 'GLANCING', 'AT', 'HIS', 'WATCH'] +4350-10919-0010-2726: ref=['BEG', 'PARDON', 'IS', 'THE', 'YAUSKY', 'BRIDGE', 'DONE', 'YET', 'OR', 'SHALL', 'I', 'HAVE', 'TO', 'DRIVE', 'AROUND'] +4350-10919-0010-2726: hyp=['BEG', 'PARDON', 'IS', 'THE', 'YOKE', 'BRIDGE', 'DONE', 'YET', 'OR', 'SHALL', 'I', 'HAVE', 'TO', 'DRIVE', 'HER', 'ON'] +4350-10919-0011-2727: ref=['HE', 'ASKED', 'AH', 'IT', 'IS'] +4350-10919-0011-2727: hyp=['HE', 'ASKED', 'AH', 'IT', 'IS'] +4350-10919-0012-2728: ref=['OH', 'WELL', 'THEN', 'I', 'CAN', 'DO', 'IT', 'IN', 'TWENTY', 'MINUTES'] +4350-10919-0012-2728: hyp=['OH', 'WELL', 'THEN', 'I', 'CAN', 'DO', 'IT', 'IN', 'TWENTY', 'MINUTES'] +4350-10919-0013-2729: ref=['AND', 'HOW', 'ABOUT', 'A', 'TOUR', 'ABROAD', 'ASKED', 'THE', 'FAMILY', 'DOCTOR'] +4350-10919-0013-2729: hyp=['AND', 'ABOUT', 'IT', 'TO', 'ABROAD', 'ASKED', 'THE', 'FAMILY', 'DOCTOR'] +4350-10919-0014-2730: ref=['WHAT', 'IS', 'WANTED', 'IS', 'MEANS', 'OF', 'IMPROVING', 'NUTRITION', 'AND', 'NOT', 'FOR', 'LOWERING', 'IT'] +4350-10919-0014-2730: hyp=['WHAT', 'IS', 'WANTED', 'IS', 'THE', 'MEANS', 'OF', 'IMPROVING', 'NUTRITION', 'AND', 'NOT', 'FOR', 'LOWERING', 'IT'] +4350-10919-0015-2731: ref=['THE', 'FAMILY', 'DOCTOR', 'LISTENED', 'ATTENTIVELY', 'AND', 'RESPECTFULLY'] +4350-10919-0015-2731: hyp=['THE', 'FAMILY', 'DOCTOR', 'LISTENED', 'ATTENTIVELY', 'AND', 'RESPECTFULLY'] +4350-10919-0016-2732: ref=['BUT', 'IN', 'FAVOR', 'OF', 'FOREIGN', 'TRAVEL', 'I', 'WOULD', 'URGE', 'THE', 'CHANGE', 'OF', 'HABITS', 'THE', 'REMOVAL', 'FROM', 'CONDITIONS', 'CALLING', 'UP', 'REMINISCENCES'] +4350-10919-0016-2732: hyp=['BUT', 'IN', 'FAVOUR', 'OF', 'FOREIGN', 'TRAVEL', 'I', 'WOULD', 'URGE', 'THE', 'CHANGE', 'OF', 'HABITS', 'THE', 'REMOVAL', 'FROM', 'CONDITIONS', 'CALLING', 'UP', 'REMINISCENCES'] +4350-10919-0017-2733: ref=['AND', 'THEN', 'THE', 'MOTHER', 'WISHES', 'IT', 'HE', 'ADDED'] +4350-10919-0017-2733: hyp=['AND', 'THEN', 'THE', 'MOTHER', 'WISHES', 'IT', 'HE', 'ADDED'] +4350-10919-0018-2734: ref=['AH', 'WELL', 'IN', 'THAT', 'CASE', 'TO', 'BE', 'SURE', 'LET', 'THEM', 'GO', 'ONLY', 'THOSE', 'GERMAN', 'QUACKS', 'ARE', 'MISCHIEVOUS'] +4350-10919-0018-2734: hyp=['AH', 'WELL', 'THAT', 'HAS', 'TO', 'BE', 'SURE', 'LET', 'THEM', 'GO', 'ONLY', 'THOSE', 'GERMAN', 'CLACKS', 'ARE', 'MISCHIEVOUS'] +4350-10919-0019-2735: ref=['OH', "TIME'S", 'UP', 'ALREADY', 'AND', 'HE', 'WENT', 'TO', 'THE', 'DOOR'] +4350-10919-0019-2735: hyp=['OH', "TIME'S", 'UP', 'ALREADY', 'AND', 'HE', 'WENT', 'TO', 'THE', 'DOOR'] +4350-10919-0020-2736: ref=['THE', 'CELEBRATED', 'DOCTOR', 'ANNOUNCED', 'TO', 'THE', 'PRINCESS', 'A', 'FEELING', 'OF', 'WHAT', 'WAS', 'DUE', 'FROM', 'HIM', 'DICTATED', 'HIS', 'DOING', 'SO', 'THAT', 'HE', 'OUGHT', 'TO', 'SEE', 'THE', 'PATIENT', 'ONCE', 'MORE'] +4350-10919-0020-2736: hyp=['THE', 'CELEBRATED', 'DOCTOR', 'ANNOUNCED', 'TO', 'THE', 'PRINCESS', 'A', 'FEELING', 'OF', 'WHAT', 'WAS', 'DUE', 'FROM', 'HIM', 'DICTATED', 'HIS', 'DOING', 'SO', 'THAT', 'HE', 'OUGHT', 'TO', 'SEE', 'THE', 'PATIENT', 'ONCE', 'MORE'] +4350-10919-0021-2737: ref=['OH', 'NO', 'ONLY', 'A', 'FEW', 'DETAILS', 'PRINCESS', 'COME', 'THIS', 'WAY'] +4350-10919-0021-2737: hyp=['O', 'NO', 'ONLY', 'A', 'FEW', 'DETAILS', 'PRINCESS', 'COME', 'THIS', 'WAY'] +4350-10919-0022-2738: ref=['AND', 'THE', 'MOTHER', 'ACCOMPANIED', 'BY', 'THE', 'DOCTOR', 'WENT', 'INTO', 'THE', 'DRAWING', 'ROOM', 'TO', 'KITTY'] +4350-10919-0022-2738: hyp=['AND', 'THE', 'MOTHER', 'ACCOMPANIED', 'BY', 'THE', 'DOCTOR', 'WENT', 'INTO', 'THE', 'DRAWING', 'ROOM', 'TO', 'KITTY'] +4350-10919-0023-2739: ref=['WHEN', 'THE', 'DOCTOR', 'CAME', 'IN', 'SHE', 'FLUSHED', 'CRIMSON', 'AND', 'HER', 'EYES', 'FILLED', 'WITH', 'TEARS'] +4350-10919-0023-2739: hyp=['WHEN', 'THE', 'DOCTOR', 'CAME', 'IN', 'SHE', 'FLUSHED', 'CRIMSON', 'AND', 'HER', 'EYES', 'FILLED', 'WITH', 'TEARS'] +4350-10919-0024-2740: ref=['SHE', 'ANSWERED', 'HIM', 'AND', 'ALL', 'AT', 'ONCE', 'GOT', 'UP', 'FURIOUS'] +4350-10919-0024-2740: hyp=['SHE', 'ANSWERED', 'HIM', 'AND', 'ALL', 'AT', 'ONCE', 'GOT', 'UP', 'FURIOUS'] +4350-10919-0025-2741: ref=['EXCUSE', 'ME', 'DOCTOR', 'BUT', 'THERE', 'IS', 'REALLY', 'NO', 'OBJECT', 'IN', 'THIS'] +4350-10919-0025-2741: hyp=['EXCUSE', 'ME', 'DOCTOR', 'BUT', 'THERE', 'IS', 'REALLY', 'NO', 'OBJECT', 'IN', 'THIS'] +4350-10919-0026-2742: ref=['THIS', 'IS', 'THE', 'THIRD', 'TIME', "YOU'VE", 'ASKED', 'ME', 'THE', 'SAME', 'THING'] +4350-10919-0026-2742: hyp=['THIS', 'IS', 'THE', 'THIRD', 'TIME', "YOU'VE", 'ASKED', 'ME', 'THE', 'SAME', 'THING'] +4350-10919-0027-2743: ref=['THE', 'CELEBRATED', 'DOCTOR', 'DID', 'NOT', 'TAKE', 'OFFENSE'] +4350-10919-0027-2743: hyp=['THE', 'CELEBRATED', 'DOCTOR', 'DID', 'NOT', 'TAKE', 'OFFENCE'] +4350-10919-0028-2744: ref=['NERVOUS', 'IRRITABILITY', 'HE', 'SAID', 'TO', 'THE', 'PRINCESS', 'WHEN', 'KITTY', 'HAD', 'LEFT', 'THE', 'ROOM', 'HOWEVER', 'I', 'HAD', 'FINISHED'] +4350-10919-0028-2744: hyp=['NERVOUS', 'IRRITABILITY', 'HE', 'SAID', 'TO', 'THE', 'PRINCESS', 'WHEN', 'KATY', 'HAD', 'LEFT', 'THE', 'ROOM', 'HOWEVER', 'I', 'HAD', 'FINISHED'] +4350-10919-0029-2745: ref=['AND', 'THE', 'DOCTOR', 'BEGAN', 'SCIENTIFICALLY', 'EXPLAINING', 'TO', 'THE', 'PRINCESS', 'AS', 'AN', 'EXCEPTIONALLY', 'INTELLIGENT', 'WOMAN', 'THE', 'CONDITION', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'AND', 'CONCLUDED', 'BY', 'INSISTING', 'ON', 'THE', 'DRINKING', 'OF', 'THE', 'WATERS', 'WHICH', 'WERE', 'CERTAINLY', 'HARMLESS'] +4350-10919-0029-2745: hyp=['AND', 'THE', 'DOCTOR', 'BEGAN', 'SCIENTIFICALLY', 'EXPLAINING', 'TO', 'THE', 'PRINCESS', 'AS', 'AN', 'EXCEPTIONALLY', 'INTELLIGENT', 'WOMAN', 'THE', 'CONDITION', 'OF', 'THE', 'YOUNG', 'PRINCESS', 'AND', 'CONCLUDED', 'BY', 'INSISTING', 'ON', 'THE', 'DRINKING', 'OF', 'THE', 'WATERS', 'WHICH', 'WERE', 'CERTAINLY', 'HARMLESS'] +4350-10919-0030-2746: ref=['AT', 'THE', 'QUESTION', 'SHOULD', 'THEY', 'GO', 'ABROAD', 'THE', 'DOCTOR', 'PLUNGED', 'INTO', 'DEEP', 'MEDITATION', 'AS', 'THOUGH', 'RESOLVING', 'A', 'WEIGHTY', 'PROBLEM'] +4350-10919-0030-2746: hyp=['BUT', 'THE', 'QUESTION', 'SHOULD', 'THEY', 'GO', 'ABROAD', 'THE', 'DOCTOR', 'PLUNGED', 'INTO', 'DEEP', 'MEDITATION', 'AS', 'THOUGH', 'RESOLVING', 'A', 'WEIGHTY', 'PROBLEM'] +4350-10919-0031-2747: ref=['FINALLY', 'HIS', 'DECISION', 'WAS', 'PRONOUNCED', 'THEY', 'WERE', 'TO', 'GO', 'ABROAD', 'BUT', 'TO', 'PUT', 'NO', 'FAITH', 'IN', 'FOREIGN', 'QUACKS', 'AND', 'TO', 'APPLY', 'TO', 'HIM', 'IN', 'ANY', 'NEED'] +4350-10919-0031-2747: hyp=['FINALLY', 'HIS', 'DECISION', 'WAS', 'PRONOUNCED', 'THEY', 'WERE', 'TO', 'GO', 'ABROAD', 'BUT', 'TO', 'PUT', 'NO', 'FAITH', 'IN', 'FOREIGN', 'QUACKS', 'AND', 'TO', 'APPLY', 'TO', 'HIM', 'IN', 'ANY', 'NEED'] +4350-10919-0032-2748: ref=['IT', 'SEEMED', 'AS', 'THOUGH', 'SOME', 'PIECE', 'OF', 'GOOD', 'FORTUNE', 'HAD', 'COME', 'TO', 'PASS', 'AFTER', 'THE', 'DOCTOR', 'HAD', 'GONE'] +4350-10919-0032-2748: hyp=['IT', 'SEEMED', 'AS', 'THOUGH', 'SOME', 'PIECE', 'OF', 'GOOD', 'FORTUNE', 'HAD', 'COME', 'TO', 'PASS', 'AFTER', 'THE', 'DOCTOR', 'HAD', 'GONE'] +4350-10919-0033-2749: ref=['THE', 'MOTHER', 'WAS', 'MUCH', 'MORE', 'CHEERFUL', 'WHEN', 'SHE', 'WENT', 'BACK', 'TO', 'HER', 'DAUGHTER', 'AND', 'KITTY', 'PRETENDED', 'TO', 'BE', 'MORE', 'CHEERFUL'] +4350-10919-0033-2749: hyp=['THE', 'MOTHER', 'WAS', 'MUCH', 'MORE', 'CHEERFUL', 'WHEN', 'SHE', 'WENT', 'BACK', 'TO', 'HER', 'DAUGHTER', 'AND', 'KITTY', 'PRETENDED', 'TO', 'BE', 'MORE', 'CHEERFUL'] +4350-9170-0000-2750: ref=['EDUCATED', 'PEOPLE', 'OF', 'THE', 'UPPER', 'CLASSES', 'ARE', 'TRYING', 'TO', 'STIFLE', 'THE', 'EVER', 'GROWING', 'SENSE', 'OF', 'THE', 'NECESSITY', 'OF', 'TRANSFORMING', 'THE', 'EXISTING', 'SOCIAL', 'ORDER'] +4350-9170-0000-2750: hyp=['EDUCATED', 'PEOPLE', 'OF', 'THE', 'UPPER', 'CLASSES', 'ARE', 'TRYING', 'TO', 'STIFLE', 'THE', 'EVERGREWING', 'SENSE', 'OF', 'THE', 'NECESSITY', 'OF', 'TRANSFORMING', 'THE', 'EXISTING', 'SOCIAL', 'ORDER'] +4350-9170-0001-2751: ref=['THIS', 'IS', 'ABSOLUTELY', 'INCORRECT'] +4350-9170-0001-2751: hyp=['MISSUS', 'ABSOLUTELY', 'INCORRECT'] +4350-9170-0002-2752: ref=['IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IT', 'IS', 'SUPPOSED', 'THAT', 'SINCE', 'THE', 'AIM', 'OF', 'LIFE', 'IS', 'FOUND', 'IN', 'GROUPS', 'OF', 'INDIVIDUALS', 'INDIVIDUALS', 'WILL', 'VOLUNTARILY', 'SACRIFICE', 'THEIR', 'OWN', 'INTERESTS', 'FOR', 'THE', 'INTERESTS', 'OF', 'THE', 'GROUP'] +4350-9170-0002-2752: hyp=['IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IT', 'IS', 'SUPPOSED', 'THAT', 'SINCE', 'THE', 'AIM', 'OF', 'LIFE', 'IS', 'FOUND', 'IN', 'GROUPS', 'OF', 'INDIVIDUALS', 'INDIVIDUALS', 'WILL', 'VOLUNTARILY', 'SACRIFICE', 'THEIR', 'OWN', 'INTERESTS', 'FOR', 'THE', 'INTEREST', 'OF', 'THE', 'GROUP'] +4350-9170-0003-2753: ref=['THE', 'CHAMPIONS', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'USUALLY', 'TRY', 'TO', 'CONNECT', 'THE', 'IDEA', 'OF', 'AUTHORITY', 'THAT', 'IS', 'OF', 'VIOLENCE', 'WITH', 'THE', 'IDEA', 'OF', 'MORAL', 'INFLUENCE', 'BUT', 'THIS', 'CONNECTION', 'IS', 'QUITE', 'IMPOSSIBLE'] +4350-9170-0003-2753: hyp=['THE', 'CHAMPIONS', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'USUALLY', 'TRY', 'TO', 'CONNECT', 'THE', 'IDEA', 'OF', 'AUTHORITY', 'THAT', 'IS', 'OF', 'VIOLENCE', 'WITH', 'THE', 'IDEA', 'OF', 'MORAL', 'INFLUENCE', 'BUT', 'THIS', 'CONNECTION', 'IS', 'QUITE', 'IMPOSSIBLE'] +4350-9170-0004-2754: ref=['THE', 'MAN', 'WHO', 'IS', 'CONTROLLED', 'BY', 'MORAL', 'INFLUENCE', 'ACTS', 'IN', 'ACCORDANCE', 'WITH', 'HIS', 'OWN', 'DESIRES'] +4350-9170-0004-2754: hyp=['THE', 'MAN', 'WHO', 'WAS', 'CONTROLLED', 'BY', 'MORAL', 'INFLUENCE', 'ACTS', 'IN', 'ACCORDANCE', 'WITH', 'HIS', 'OWN', 'DESIRES'] +4350-9170-0005-2755: ref=['THE', 'BASIS', 'OF', 'AUTHORITY', 'IS', 'BODILY', 'VIOLENCE'] +4350-9170-0005-2755: hyp=['THE', 'BASIS', 'OF', 'AUTHORITY', 'IS', 'BODILY', 'VIOLENCE'] +4350-9170-0006-2756: ref=['THE', 'POSSIBILITY', 'OF', 'APPLYING', 'BODILY', 'VIOLENCE', 'TO', 'PEOPLE', 'IS', 'PROVIDED', 'ABOVE', 'ALL', 'BY', 'AN', 'ORGANIZATION', 'OF', 'ARMED', 'MEN', 'TRAINED', 'TO', 'ACT', 'IN', 'UNISON', 'IN', 'SUBMISSION', 'TO', 'ONE', 'WILL'] +4350-9170-0006-2756: hyp=['THE', 'POSSIBILITY', 'OF', 'APPLYING', 'BODILY', 'VIOLENCE', 'TO', 'PEOPLE', 'IS', 'PROVIDED', 'ABOVE', 'ALL', 'BY', 'AN', 'ORGANIZATION', 'OF', 'ARMED', 'MEN', 'TRAINED', 'TO', 'ACT', 'IN', 'UNISON', 'AND', 'SUBMISSION', 'TO', 'ONE', 'WILL'] +4350-9170-0007-2757: ref=['THESE', 'BANDS', 'OF', 'ARMED', 'MEN', 'SUBMISSIVE', 'TO', 'A', 'SINGLE', 'WILL', 'ARE', 'WHAT', 'CONSTITUTE', 'THE', 'ARMY'] +4350-9170-0007-2757: hyp=['THESE', 'BANDS', 'OF', 'ARMED', 'MEN', 'SUBMISSIVE', 'TO', 'A', 'SINGLE', 'WILL', 'ARE', 'WHAT', 'CONSTITUTE', 'THE', 'ARMY'] +4350-9170-0008-2758: ref=['THE', 'ARMY', 'HAS', 'ALWAYS', 'BEEN', 'AND', 'STILL', 'IS', 'THE', 'BASIS', 'OF', 'POWER'] +4350-9170-0008-2758: hyp=['THE', 'ARMY', 'HAS', 'ALWAYS', 'BEEN', 'AND', 'STILL', 'IS', 'THE', 'BASIS', 'OF', 'POWER'] +4350-9170-0009-2759: ref=['POWER', 'IS', 'ALWAYS', 'IN', 'THE', 'HANDS', 'OF', 'THOSE', 'WHO', 'CONTROL', 'THE', 'ARMY', 'AND', 'ALL', 'MEN', 'IN', 'POWER', 'FROM', 'THE', 'ROMAN', 'CAESARS', 'TO', 'THE', 'RUSSIAN', 'AND', 'GERMAN', 'EMPERORS', 'TAKE', 'MORE', 'INTEREST', 'IN', 'THEIR', 'ARMY', 'THAN', 'IN', 'ANYTHING', 'AND', 'COURT', 'POPULARITY', 'IN', 'THE', 'ARMY', 'KNOWING', 'THAT', 'IF', 'THAT', 'IS', 'ON', 'THEIR', 'SIDE', 'THEIR', 'POWER', 'IS', 'SECURE'] +4350-9170-0009-2759: hyp=['POWER', 'IS', 'ALWAYS', 'IN', 'THE', 'HANDS', 'OF', 'THOSE', 'WHO', 'CONTROL', 'THE', 'ARMY', 'AND', 'ALL', 'MEN', 'IN', 'POWER', 'FROM', 'THE', 'ROMAN', 'CAESARS', 'TO', 'THE', 'RUSSIAN', 'AND', 'GERMAN', 'EMPERORS', 'TAKE', 'MORE', 'INTEREST', 'IN', 'THEIR', 'ARMY', 'THAN', 'IN', 'ANYTHING', 'AND', 'COURT', 'POPULARITY', 'IN', 'THE', 'ARMY', 'KNOWING', 'THAT', 'IF', 'THAT', 'IS', 'ON', 'THEIR', 'SIDE', 'THEIR', 'POWER', 'IS', 'SECURE'] +4350-9170-0010-2760: ref=['INDEED', 'IT', 'COULD', 'NOT', 'BE', 'OTHERWISE'] +4350-9170-0010-2760: hyp=['INDEED', 'IT', 'COULD', 'NOT', 'BE', 'OTHERWISE'] +4350-9170-0011-2761: ref=['ONLY', 'UNDER', 'THOSE', 'CONDITIONS', 'COULD', 'THE', 'SOCIAL', 'ORGANIZATION', 'BE', 'JUSTIFIED'] +4350-9170-0011-2761: hyp=['ONLY', 'UNDER', 'THOSE', 'CONDITIONS', 'COULD', 'THE', 'SOCIAL', 'ORGANIZATION', 'BE', 'JUSTIFIED'] +4350-9170-0012-2762: ref=['BUT', 'SINCE', 'THIS', 'IS', 'NOT', 'THE', 'CASE', 'AND', 'ON', 'THE', 'CONTRARY', 'MEN', 'IN', 'POWER', 'ARE', 'ALWAYS', 'FAR', 'FROM', 'BEING', 'SAINTS', 'THROUGH', 'THE', 'VERY', 'FACT', 'OF', 'THEIR', 'POSSESSION', 'OF', 'POWER', 'THE', 'SOCIAL', 'ORGANIZATION', 'BASED', 'ON', 'POWER', 'HAS', 'NO', 'JUSTIFICATION'] +4350-9170-0012-2762: hyp=['BUT', 'SINCE', 'THIS', 'IS', 'NOT', 'THE', 'CASE', 'AND', 'ON', 'THE', 'CONTRARY', 'MEN', 'AND', 'POWER', 'ARE', 'ALWAYS', 'FAR', 'FROM', 'BEING', 'SAINTS', 'THROUGH', 'THE', 'VERY', 'FACT', 'OF', 'THEIR', 'POSSESSION', 'OF', 'POWER', 'THE', 'SOCIAL', 'ORGANIZATION', 'BASED', 'ON', 'POWER', 'HAS', 'NO', 'JUSTIFICATION'] +4350-9170-0013-2763: ref=['EVEN', 'IF', 'THERE', 'WAS', 'ONCE', 'A', 'TIME', 'WHEN', 'OWING', 'TO', 'THE', 'LOW', 'STANDARD', 'OF', 'MORALS', 'AND', 'THE', 'DISPOSITION', 'OF', 'MEN', 'TO', 'VIOLENCE', 'THE', 'EXISTENCE', 'OF', 'AN', 'AUTHORITY', 'TO', 'RESTRAIN', 'SUCH', 'VIOLENCE', 'WAS', 'AN', 'ADVANTAGE', 'BECAUSE', 'THE', 'VIOLENCE', 'OF', 'GOVERNMENT', 'WAS', 'LESS', 'THAN', 'THE', 'VIOLENCE', 'OF', 'INDIVIDUALS', 'ONE', 'CANNOT', 'BUT', 'SEE', 'THAT', 'THIS', 'ADVANTAGE', 'COULD', 'NOT', 'BE', 'LASTING'] +4350-9170-0013-2763: hyp=['EVEN', 'IF', 'THERE', 'WAS', 'ONCE', 'A', 'TIME', 'WHEN', 'OWING', 'TO', 'THE', 'LOW', 'STANDARDS', 'OF', 'MORALS', 'WHEN', 'THE', 'DISPOSITION', 'OF', 'MEN', 'TO', 'VIOLENCE', 'THE', 'EXISTENCE', 'OF', 'AN', 'AUTHORITY', 'TO', 'RESTRAIN', 'SUCH', 'VIOLENCE', 'WAS', 'AN', 'ADVANTAGE', 'BECAUSE', 'THE', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'WAS', 'LESS', 'THAN', 'THE', 'VIOLENCE', 'OF', 'INDIVIDUALS', 'ONE', 'CANNOT', 'BUT', 'SEE', 'THAT', 'THIS', 'ADVANTAGE', 'COULD', 'NOT', 'BE', 'LASTING'] +4350-9170-0014-2764: ref=['BETWEEN', 'THE', 'MEMBERS', 'OF', 'ONE', 'STATE', 'SUBJECT', 'TO', 'A', 'SINGLE', 'AUTHORITY', 'THE', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'SEEMS', 'STILL', 'LESS', 'AND', 'THE', 'LIFE', 'OF', 'THE', 'STATE', 'SEEMS', 'EVEN', 'MORE', 'SECURE'] +4350-9170-0014-2764: hyp=['BETWEEN', 'THE', 'MEMBERS', 'OF', 'ONE', 'STATE', 'SUBJECT', 'TO', 'A', 'SINGLE', 'AUTHORITY', 'THE', 'STRIPE', 'BETWEEN', 'THE', 'INDIVIDUALS', 'SEEMED', 'STILL', 'LESS', 'AND', 'A', 'LIFE', 'OF', 'THE', 'STATE', 'SEEMS', 'EVEN', 'MORE', 'SECURE'] +4350-9170-0015-2765: ref=['IT', 'WAS', 'PRODUCED', 'ON', 'ONE', 'HAND', 'BY', 'THE', 'NATURAL', 'GROWTH', 'OF', 'POPULATION', 'AND', 'ON', 'THE', 'OTHER', 'BY', 'STRUGGLE', 'AND', 'CONQUEST'] +4350-9170-0015-2765: hyp=['IT', 'WAS', 'PRODUCED', 'ON', 'ONE', 'HAND', 'BY', 'THE', 'NATURAL', 'GROWTH', 'OF', 'POPULATION', 'AND', 'ON', 'THE', 'OTHER', 'BY', 'STRUGGLING', 'CONQUEST'] +4350-9170-0016-2766: ref=['AFTER', 'CONQUEST', 'THE', 'POWER', 'OF', 'THE', 'EMPEROR', 'PUTS', 'AN', 'END', 'TO', 'INTERNAL', 'DISSENSIONS', 'AND', 'SO', 'THE', 'STATE', 'CONCEPTION', 'OF', 'LIFE', 'JUSTIFIES', 'ITSELF'] +4350-9170-0016-2766: hyp=['AFTER', 'CONQUEST', 'THE', 'POWER', 'OF', 'THE', 'EMPEROR', 'PUTS', 'AN', 'END', 'TO', 'INTERNAL', 'DISSENSIONS', 'AND', 'SO', 'THE', 'STATE', 'CONCEPTION', 'OF', 'LIFE', 'JUSTIFIES', 'ITSELF'] +4350-9170-0017-2767: ref=['BUT', 'THIS', 'JUSTIFICATION', 'IS', 'NEVER', 'MORE', 'THAN', 'TEMPORARY'] +4350-9170-0017-2767: hyp=['BUT', 'THIS', 'JUSTIFICATION', 'IS', 'NEVER', 'MORE', 'THAN', 'TEMPORARY'] +4350-9170-0018-2768: ref=['INTERNAL', 'DISSENSIONS', 'DISAPPEAR', 'ONLY', 'IN', 'PROPORTION', 'TO', 'THE', 'DEGREE', 'OF', 'OPPRESSION', 'EXERTED', 'BY', 'THE', 'AUTHORITY', 'OVER', 'THE', 'DISSENTIENT', 'INDIVIDUALS'] +4350-9170-0018-2768: hyp=['INTERNAL', 'DISSENSIONS', 'DISAPPEAR', 'ONLY', 'IN', 'PROPORTION', 'TO', 'THE', 'DEGREE', 'OF', 'OPPRESSION', 'EXERTED', 'BY', 'THE', 'AUTHORITY', 'OVER', 'THE', 'DISINDIAN', 'INDIVIDUALS'] +4350-9170-0019-2769: ref=['GOVERNMENT', 'AUTHORITY', 'EVEN', 'IF', 'IT', 'DOES', 'SUPPRESS', 'PRIVATE', 'VIOLENCE', 'ALWAYS', 'INTRODUCES', 'INTO', 'THE', 'LIFE', 'OF', 'MEN', 'FRESH', 'FORMS', 'OF', 'VIOLENCE', 'WHICH', 'TEND', 'TO', 'BECOME', 'GREATER', 'AND', 'GREATER', 'IN', 'PROPORTION', 'TO', 'THE', 'DURATION', 'AND', 'STRENGTH', 'OF', 'THE', 'GOVERNMENT'] +4350-9170-0019-2769: hyp=['GOVERNOR', 'AUTHORITY', 'EVEN', 'IF', 'IT', 'DOES', 'SUPPRESS', 'PRIVATE', 'VIOLENCE', 'ALWAYS', 'INTRODUCES', 'INTO', 'THE', 'LIFE', 'OF', 'MEN', 'FRESH', 'FORMS', 'OF', 'VIOLENCE', 'WHICH', 'TEND', 'TO', 'BECOME', 'GREATER', 'AND', 'GREATER', 'IN', 'PROPORTION', 'TO', 'THE', 'DURATION', 'AND', 'STRENGTH', 'OF', 'THE', 'GOVERNMENT'] +4350-9170-0020-2770: ref=['AND', 'THEREFORE', 'THE', 'OPPRESSION', 'OF', 'THE', 'OPPRESSED', 'ALWAYS', 'GOES', 'ON', 'GROWING', 'UP', 'TO', 'THE', 'FURTHEST', 'LIMIT', 'BEYOND', 'WHICH', 'IT', 'CANNOT', 'GO', 'WITHOUT', 'KILLING', 'THE', 'GOOSE', 'WITH', 'THE', 'GOLDEN', 'EGGS'] +4350-9170-0020-2770: hyp=['AND', 'THEREFORE', 'THE', 'OPPRESSION', 'OF', 'THE', 'OPPRESSED', 'ALWAYS', 'GOES', 'ON', 'GROWING', 'UP', 'TO', 'THE', 'FURTHEST', 'LIMIT', 'BEYOND', 'WHICH', 'IT', 'CANNOT', 'GO', 'WITHOUT', 'KILLING', 'THE', 'GOOSE', 'WITH', 'THE', 'GOLD', "KNIFE'S"] +4350-9170-0021-2771: ref=['THE', 'MOST', 'CONVINCING', 'EXAMPLE', 'OF', 'THIS', 'IS', 'TO', 'BE', 'FOUND', 'IN', 'THE', 'CONDITION', 'OF', 'THE', 'WORKING', 'CLASSES', 'OF', 'OUR', 'EPOCH', 'WHO', 'ARE', 'IN', 'REALITY', 'NO', 'BETTER', 'THAN', 'THE', 'SLAVES', 'OF', 'ANCIENT', 'TIMES', 'SUBDUED', 'BY', 'CONQUEST'] +4350-9170-0021-2771: hyp=['THE', 'MOST', 'CONVINCING', 'EXAMPLE', 'OF', 'THIS', 'IS', 'TO', 'BE', 'FOUND', 'IN', 'THE', 'CONDITION', 'OF', 'THE', 'WORKING', 'CLASSES', 'OF', 'OUR', 'EPOCH', 'WHO', 'ARE', 'IN', 'REALITY', 'NO', 'BETTER', 'THAN', 'THE', 'SLAVES', 'OF', 'ANCIENT', 'TIME', 'SUBDUED', 'BY', 'CONQUEST'] +4350-9170-0022-2772: ref=['SO', 'IT', 'HAS', 'ALWAYS', 'BEEN'] +4350-9170-0022-2772: hyp=['SO', 'IT', 'IS', 'ALWAYS', 'THEN'] +4350-9170-0023-2773: ref=['FOOTNOTE', 'THE', 'FACT', 'THAT', 'IN', 'AMERICA', 'THE', 'ABUSES', 'OF', 'AUTHORITY', 'EXIST', 'IN', 'SPITE', 'OF', 'THE', 'SMALL', 'NUMBER', 'OF', 'THEIR', 'TROOPS', 'NOT', 'ONLY', 'FAILS', 'TO', 'DISPROVE', 'THIS', 'POSITION', 'BUT', 'POSITIVELY', 'CONFIRMS', 'IT'] +4350-9170-0023-2773: hyp=['FOOTNOTE', 'THE', 'FACT', 'THAT', 'IN', 'AMERICA', 'THE', 'ABUSES', 'OF', 'AUTHORITY', 'EXISTS', 'IN', 'SPITE', 'OF', 'THE', 'SMALL', 'NUMBER', 'OF', 'THEIR', 'TROOPS', 'NOT', 'ONLY', 'FAILS', 'TO', 'DISPROVE', 'THIS', 'POSITION', 'BUT', 'POSITIVELY', 'CONFIRMS', 'IT'] +4350-9170-0024-2774: ref=['THE', 'UPPER', 'CLASSES', 'KNOW', 'THAT', 'AN', 'ARMY', 'OF', 'FIFTY', 'THOUSAND', 'WILL', 'SOON', 'BE', 'INSUFFICIENT', 'AND', 'NO', 'LONGER', 'RELYING', 'ON', "PINKERTON'S", 'MEN', 'THEY', 'FEEL', 'THAT', 'THE', 'SECURITY', 'OF', 'THEIR', 'POSITION', 'DEPENDS', 'ON', 'THE', 'INCREASED', 'STRENGTH', 'OF', 'THE', 'ARMY'] +4350-9170-0024-2774: hyp=['THE', 'UPPER', 'CLASSES', 'KNOW', 'THAT', 'AN', 'ARMY', 'OF', 'FIFTY', 'THOUSAND', 'WILL', 'SOON', 'BE', 'INSUFFICIENT', 'AND', 'NO', 'LONGER', 'RELYING', 'ON', "PINKERTIN'S", 'MEN', 'THEY', 'FEEL', 'THAT', 'THE', 'SECURITY', 'OF', 'THEIR', 'POSITION', 'DEPENDS', 'ON', 'THE', 'INCREASED', 'STRENGTH', 'OF', 'THE', 'ARMY'] +4350-9170-0025-2775: ref=['THE', 'REASON', 'TO', 'WHICH', 'HE', 'GAVE', 'EXPRESSION', 'IS', 'ESSENTIALLY', 'THE', 'SAME', 'AS', 'THAT', 'WHICH', 'MADE', 'THE', 'FRENCH', 'KINGS', 'AND', 'THE', 'POPES', 'ENGAGE', 'SWISS', 'AND', 'SCOTCH', 'GUARDS', 'AND', 'MAKES', 'THE', 'RUSSIAN', 'AUTHORITIES', 'OF', 'TO', 'DAY', 'SO', 'CAREFULLY', 'DISTRIBUTE', 'THE', 'RECRUITS', 'SO', 'THAT', 'THE', 'REGIMENTS', 'FROM', 'THE', 'FRONTIERS', 'ARE', 'STATIONED', 'IN', 'CENTRAL', 'DISTRICTS', 'AND', 'THE', 'REGIMENTS', 'FROM', 'THE', 'CENTER', 'ARE', 'STATIONED', 'ON', 'THE', 'FRONTIERS'] +4350-9170-0025-2775: hyp=['THE', 'REASON', 'TO', 'WHICH', 'HE', 'GAVE', 'EXPRESSION', 'IS', 'ESSENTIALLY', 'THE', 'SAME', 'AS', 'THAT', 'WHICH', 'MADE', 'THE', 'FRENCH', 'KINGS', 'AND', 'THE', 'POPES', 'ENGAGE', 'SWISS', 'AND', 'SCOTCH', 'GUARDS', 'AND', 'MAKES', 'THE', 'RUSSIAN', 'AUTHORITIES', 'OF', 'TO', 'DAY', 'SO', 'CAREFULLY', 'DISTRIBUTE', 'THE', 'RECRUITS', 'SO', 'THAT', 'THE', 'REGIMENTS', 'FROM', 'THE', 'FRONTIER', 'THEY', 'ARE', 'STATIONED', 'IN', 'CENTRAL', 'DISTRICTS', 'AND', 'THE', 'REGIMENTS', 'FROM', 'THE', 'CENTRE', 'ARE', 'STATIONED', 'ON', 'THE', 'FRONTIERS'] +4350-9170-0026-2776: ref=['THE', 'MEANING', 'OF', "CAPRIVI'S", 'SPEECH', 'PUT', 'INTO', 'PLAIN', 'LANGUAGE', 'IS', 'THAT', 'FUNDS', 'ARE', 'NEEDED', 'NOT', 'TO', 'RESIST', 'FOREIGN', 'FOES', 'BUT', 'TO', 'BUY', 'UNDER', 'OFFICERS', 'TO', 'BE', 'READY', 'TO', 'ACT', 'AGAINST', 'THE', 'ENSLAVED', 'TOILING', 'MASSES'] +4350-9170-0026-2776: hyp=['THE', 'MEANING', 'OF', 'CAPRIVY', 'SPEECH', 'PUT', 'INTO', 'PLAY', 'AND', 'LANGUAGE', 'IS', 'THAT', 'FUNDS', 'ARE', 'NEEDED', 'NOT', 'TO', 'RESIST', 'FOREIGN', 'FOES', 'BUT', 'TO', 'BUY', 'UNDER', 'OFFICERS', 'TO', 'BE', 'READY', 'TO', 'ACT', 'AGAINST', 'THE', 'ENSLAVED', 'TOILING', 'MASSES'] +4350-9170-0027-2777: ref=['AND', 'THIS', 'ABNORMAL', 'ORDER', 'OF', 'THINGS', 'IS', 'MAINTAINED', 'BY', 'THE', 'ARMY'] +4350-9170-0027-2777: hyp=['AND', 'THIS', 'ABNORMAL', 'ORDER', 'OF', 'THANKS', 'IS', 'MAINTAINED', 'BY', 'THE', 'ARMY'] +4350-9170-0028-2778: ref=['BUT', 'THERE', 'IS', 'NOT', 'ONLY', 'ONE', 'GOVERNMENT', 'THERE', 'ARE', 'OTHER', 'GOVERNMENTS', 'EXPLOITING', 'THEIR', 'SUBJECTS', 'BY', 'VIOLENCE', 'IN', 'THE', 'SAME', 'WAY', 'AND', 'ALWAYS', 'READY', 'TO', 'POUNCE', 'DOWN', 'ON', 'ANY', 'OTHER', 'GOVERNMENT', 'AND', 'CARRY', 'OFF', 'THE', 'FRUITS', 'OF', 'THE', 'TOIL', 'OF', 'ITS', 'ENSLAVED', 'SUBJECTS'] +4350-9170-0028-2778: hyp=['BUT', 'THERE', 'IS', 'NOT', 'ONLY', 'ONE', 'GOVERNMENT', 'THERE', 'ARE', 'OTHER', 'GOVERNMENTS', 'EXPLODING', 'THEIR', 'SUBJECTS', 'BY', 'VIOLENT', 'AND', 'THE', 'SAME', 'WAY', 'AND', 'ARE', 'ALWAYS', 'READY', 'TO', 'POUNCE', 'DOWN', 'ON', 'ANY', 'OTHER', 'GOVERNMENT', 'AND', 'CARRY', 'OFF', 'THE', 'FRUITS', 'OF', 'THE', 'TOIL', 'OF', 'ITS', 'ENSLAVE', 'SUBJECTS'] +4350-9170-0029-2779: ref=['AND', 'SO', 'EVERY', 'GOVERNMENT', 'NEEDS', 'AN', 'ARMY', 'ALSO', 'TO', 'PROTECT', 'ITS', 'BOOTY', 'FROM', 'ITS', 'NEIGHBOR', 'BRIGANDS'] +4350-9170-0029-2779: hyp=['AND', 'SO', 'EVERY', 'GOVERNMENT', 'NEEDS', 'AN', 'ARMY', 'ALSO', 'TO', 'PROTECT', 'ITS', 'BOOTY', 'FROM', 'ITS', 'NEIGHBOUR', 'BRIGANDS'] +4350-9170-0030-2780: ref=['THIS', 'INCREASE', 'IS', 'CONTAGIOUS', 'AS', 'MONTESQUIEU', 'POINTED', 'OUT', 'ONE', 'HUNDRED', 'FIFTY', 'YEARS', 'AGO'] +4350-9170-0030-2780: hyp=['THIS', 'INCREASE', 'IS', 'CONTAGIOUS', 'AS', 'MONTESQUIEU', 'POINTED', 'OUT', 'A', 'HUNDRED', 'FIFTY', 'YEARS', 'AGO'] +4350-9170-0031-2781: ref=['EVERY', 'INCREASE', 'IN', 'THE', 'ARMY', 'OF', 'ONE', 'STATE', 'WITH', 'THE', 'AIM', 'OF', 'SELF', 'DEFENSE', 'AGAINST', 'ITS', 'SUBJECTS', 'BECOMES', 'A', 'SOURCE', 'OF', 'DANGER', 'FOR', 'NEIGHBORING', 'STATES', 'AND', 'CALLS', 'FOR', 'A', 'SIMILAR', 'INCREASE', 'IN', 'THEIR', 'ARMIES'] +4350-9170-0031-2781: hyp=['EVERY', 'INCREASE', 'IN', 'THE', 'ARMY', 'OF', 'ONE', 'STATE', 'WITH', 'THE', 'AIM', 'OF', 'SELF', 'DEFENSE', 'AGAINST', 'ITS', 'SUBJECTS', 'BECOMES', 'A', 'SORT', 'OF', 'DANGER', 'FOR', 'NEIGHBORING', 'STATES', 'AND', 'CALLS', 'FOR', 'A', 'SIMILAR', 'INCREASE', 'IN', 'THEIR', 'ARMIES'] +4350-9170-0032-2782: ref=['THE', 'DESPOTISM', 'OF', 'A', 'GOVERNMENT', 'ALWAYS', 'INCREASES', 'WITH', 'THE', 'STRENGTH', 'OF', 'THE', 'ARMY', 'AND', 'ITS', 'EXTERNAL', 'SUCCESSES', 'AND', 'THE', 'AGGRESSIVENESS', 'OF', 'A', 'GOVERNMENT', 'INCREASES', 'WITH', 'ITS', 'INTERNAL', 'DESPOTISM'] +4350-9170-0032-2782: hyp=['THE', 'DESPOTISM', 'OF', 'THE', 'GOVERNMENT', 'ALWAYS', 'INCREASES', 'WITH', 'THE', 'STRENGTH', 'OF', 'THE', 'ARMY', 'AND', 'ITS', 'EXTERNAL', 'SUCCESSES', 'AND', 'THE', 'AGGRESSIVENESS', 'OF', 'A', 'GOVERNMENT', 'INCREASES', 'WITH', 'ITS', 'INTERNAL', 'DESPOTISM'] +4350-9170-0033-2783: ref=['THE', 'RIVALRY', 'OF', 'THE', 'EUROPEAN', 'STATES', 'IN', 'CONSTANTLY', 'INCREASING', 'THEIR', 'FORCES', 'HAS', 'REDUCED', 'THEM', 'TO', 'THE', 'NECESSITY', 'OF', 'HAVING', 'RECOURSE', 'TO', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'SINCE', 'BY', 'THAT', 'MEANS', 'THE', 'GREATEST', 'POSSIBLE', 'NUMBER', 'OF', 'SOLDIERS', 'IS', 'OBTAINED', 'AT', 'THE', 'LEAST', 'POSSIBLE', 'EXPENSE'] +4350-9170-0033-2783: hyp=['THE', 'RIVALRY', 'OF', 'THE', 'EUROPEAN', 'STATES', 'AND', 'CONSTANTLY', 'INCREASING', 'THEIR', 'FORCES', 'HAS', 'REDUCED', 'THEM', 'TO', 'THE', 'NECESSITY', 'OF', 'HAVING', 'RECOURSE', 'TO', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'SINCE', 'BY', 'THAT', 'MEANS', 'THE', 'GREATEST', 'POSSIBLE', 'NUMBER', 'OF', 'SOLDIERS', 'IS', 'OBTAINED', 'AT', 'THE', 'LEAST', 'POSSIBLE', 'EXPENSE'] +4350-9170-0034-2784: ref=['AND', 'BY', 'THIS', 'MEANS', 'ALL', 'CITIZENS', 'ARE', 'UNDER', 'ARMS', 'TO', 'SUPPORT', 'THE', 'INIQUITIES', 'PRACTICED', 'UPON', 'THEM', 'ALL', 'CITIZENS', 'HAVE', 'BECOME', 'THEIR', 'OWN', 'OPPRESSORS'] +4350-9170-0034-2784: hyp=['AND', 'BY', 'THIS', 'MEANS', 'ALL', 'CITIZENS', 'ARE', 'UNDER', 'ARMS', 'TO', 'SUPPORT', 'THE', 'INIQUITIES', 'PRACTICED', 'UPON', 'THEM', 'ALL', 'CITIZENS', 'HAVE', 'BECOME', 'THEIR', 'OWN', 'IMPRESSORS'] +4350-9170-0035-2785: ref=['THIS', 'INCONSISTENCY', 'HAS', 'BECOME', 'OBVIOUS', 'IN', 'UNIVERSAL', 'MILITARY', 'SERVICE'] +4350-9170-0035-2785: hyp=['THIS', 'INCONSISTENCY', 'HAS', 'BECOME', 'OBVIOUS', 'AND', 'UNIVERSAL', 'MILITARY', 'SERVICE'] +4350-9170-0036-2786: ref=['IN', 'FACT', 'THE', 'WHOLE', 'SIGNIFICANCE', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'CONSISTS', 'IN', "MAN'S", 'RECOGNITION', 'OF', 'THE', 'BARBARITY', 'OF', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'AND', 'THE', 'TRANSITORINESS', 'OF', 'PERSONAL', 'LIFE', 'ITSELF', 'AND', 'THE', 'TRANSFERENCE', 'OF', 'THE', 'AIM', 'OF', 'LIFE', 'TO', 'GROUPS', 'OF', 'PERSONS'] +4350-9170-0036-2786: hyp=['IN', 'FACT', 'THE', 'WHOLE', 'SIGNIFICANCE', 'OF', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'CONSISTS', 'IN', "MAN'S", 'RECOGNITION', 'OF', 'THE', 'BARBARITY', 'OF', 'STRIFE', 'BETWEEN', 'INDIVIDUALS', 'AND', 'THE', 'TRANSITORINESS', 'OF', 'PERSONAL', 'LIFE', 'ITSELF', 'AND', 'THE', 'TRANSFERENCE', 'OF', 'THE', 'AIM', 'OF', 'LIFE', 'THE', 'GROUPS', 'OF', 'PERSONS'] +4350-9170-0037-2787: ref=['BUT', 'WITH', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'IT', 'COMES', 'TO', 'PASS', 'THAT', 'MEN', 'AFTER', 'MAKING', 'EVERY', 'SACRIFICE', 'TO', 'GET', 'RID', 'OF', 'THE', 'CRUELTY', 'OF', 'STRIFE', 'AND', 'THE', 'INSECURITY', 'OF', 'EXISTENCE', 'ARE', 'CALLED', 'UPON', 'TO', 'FACE', 'ALL', 'THE', 'PERILS', 'THEY', 'HAD', 'MEANT', 'TO', 'AVOID'] +4350-9170-0037-2787: hyp=['BUT', 'WITH', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'IT', 'COMES', 'TO', 'PASS', 'THAT', 'MEN', 'AFTER', 'MAKING', 'EVERY', 'SACRIFICE', 'TO', 'GET', 'RID', 'OF', 'THE', 'CRUELTY', 'OF', 'STRIFE', 'AND', 'THE', 'INSECURITY', 'OF', 'EXISTENCE', 'ARE', 'CALLED', 'UPON', 'TO', 'FACE', 'ALL', 'THE', 'PERILS', 'THEY', 'HAD', 'MEANT', 'TO', 'AVOID'] +4350-9170-0038-2788: ref=['BUT', 'INSTEAD', 'OF', 'DOING', 'THAT', 'THEY', 'EXPOSE', 'THE', 'INDIVIDUALS', 'TO', 'THE', 'SAME', 'NECESSITY', 'OF', 'STRIFE', 'SUBSTITUTING', 'STRIFE', 'WITH', 'INDIVIDUALS', 'OF', 'OTHER', 'STATES', 'FOR', 'STRIFE', 'WITH', 'NEIGHBORS'] +4350-9170-0038-2788: hyp=['BUT', 'INSTEAD', 'OF', 'DOING', 'THAT', 'THEY', 'EXPOSED', 'TO', 'INDIVIDUALS', 'TO', 'THE', 'SAME', 'NECESSITY', 'OF', 'STRIFE', 'SUBSTITUTING', 'STRIFE', 'WITH', 'INDIVIDUALS', 'OF', 'OTHER', 'STATES', 'FOR', 'STRIFE', 'WITH', 'NEIGHBORS'] +4350-9170-0039-2789: ref=['THE', 'TAXES', 'RAISED', 'FROM', 'THE', 'PEOPLE', 'FOR', 'WAR', 'PREPARATIONS', 'ABSORB', 'THE', 'GREATER', 'PART', 'OF', 'THE', 'PRODUCE', 'OF', 'LABOR', 'WHICH', 'THE', 'ARMY', 'OUGHT', 'TO', 'DEFEND'] +4350-9170-0039-2789: hyp=['THE', 'TAXES', 'RAISED', 'FROM', 'THE', 'PEOPLE', 'FOR', 'WAR', 'PREPARATIONS', 'ABSORB', 'THE', 'GREATER', 'PART', 'OF', 'THE', 'PRODUCE', 'OF', 'LABOR', 'WHICH', 'THE', 'ARMY', 'OUGHT', 'TO', 'DEFEND'] +4350-9170-0040-2790: ref=['THE', 'DANGER', 'OF', 'WAR', 'EVER', 'READY', 'TO', 'BREAK', 'OUT', 'RENDERS', 'ALL', 'REFORMS', 'OF', 'LIFE', 'SOCIAL', 'LIFE', 'VAIN', 'AND', 'FRUITLESS'] +4350-9170-0040-2790: hyp=['THE', 'DANGER', 'OF', 'WAR', 'EVER', 'READY', 'TO', 'BREAK', 'OUT', 'RENDERS', 'ALL', 'REFORMS', 'OF', 'LIFE', 'SOCIAL', 'LIFE', 'VAIN', 'AND', 'FRUITLESS'] +4350-9170-0041-2791: ref=['BUT', 'THE', 'FATAL', 'SIGNIFICANCE', 'OF', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'AS', 'THE', 'MANIFESTATION', 'OF', 'THE', 'CONTRADICTION', 'INHERENT', 'IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IS', 'NOT', 'ONLY', 'APPARENT', 'IN', 'THAT'] +4350-9170-0041-2791: hyp=['BUT', 'THE', 'FATAL', 'SIGNIFICANCE', 'OF', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'AS', 'THE', 'MANIFESTATION', 'OF', 'THE', 'CONTRADICTION', 'INHERENT', 'IN', 'THE', 'SOCIAL', 'CONCEPTION', 'OF', 'LIFE', 'IS', 'NOT', 'ONLY', 'APPARENT', 'IN', 'THAT'] +4350-9170-0042-2792: ref=['GOVERNMENTS', 'ASSERT', 'THAT', 'ARMIES', 'ARE', 'NEEDED', 'ABOVE', 'ALL', 'FOR', 'EXTERNAL', 'DEFENSE', 'BUT', 'THAT', 'IS', 'NOT', 'TRUE'] +4350-9170-0042-2792: hyp=['GOVERNMENT', 'ASSERT', 'THAT', 'ARMIES', 'ARE', 'NEEDED', 'ABOVE', 'ALL', 'FOR', 'EXTERNAL', 'DEFENSE', 'BUT', 'THAT', 'IS', 'NOT', 'TRUE'] +4350-9170-0043-2793: ref=['THEY', 'ARE', 'NEEDED', 'PRINCIPALLY', 'AGAINST', 'THEIR', 'SUBJECTS', 'AND', 'EVERY', 'MAN', 'UNDER', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'BECOMES', 'AN', 'ACCOMPLICE', 'IN', 'ALL', 'THE', 'ACTS', 'OF', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'AGAINST', 'THE', 'CITIZENS', 'WITHOUT', 'ANY', 'CHOICE', 'OF', 'HIS', 'OWN'] +4350-9170-0043-2793: hyp=['THERE', 'NEEDED', 'PRINCIPALLY', 'AGAINST', 'THEIR', 'SUBJECTS', 'AND', 'EVERY', 'MAN', 'UNDER', 'UNIVERSAL', 'MILITARY', 'SERVICE', 'BECOMES', 'AN', 'ACCOMPLICE', 'AND', 'ALL', 'THAT', 'ACTS', 'OF', 'VIOLENCE', 'OF', 'THE', 'GOVERNMENT', 'AGAINST', 'THE', 'CITIZENS', 'WITHOUT', 'ANY', 'CHOICE', 'OF', 'HIS', 'OWN'] +4350-9170-0044-2794: ref=['AND', 'FOR', 'THE', 'SAKE', 'OF', 'WHAT', 'AM', 'I', 'MAKING', 'THEM'] +4350-9170-0044-2794: hyp=['AND', 'FOR', 'THE', 'SAKE', 'OF', 'WHAT', 'AM', 'I', 'MAKING', 'THEM'] +4350-9170-0045-2795: ref=['I', 'AM', 'EXPECTED', 'FOR', 'THE', 'SAKE', 'OF', 'THE', 'STATE', 'TO', 'MAKE', 'THESE', 'SACRIFICES', 'TO', 'RENOUNCE', 'EVERYTHING', 'THAT', 'CAN', 'BE', 'PRECIOUS', 'TO', 'MAN', 'PEACE', 'FAMILY', 'SECURITY', 'AND', 'HUMAN', 'DIGNITY'] +4350-9170-0045-2795: hyp=['I', 'UNEXPECTED', 'FOR', 'THE', 'SAKE', 'OF', 'A', 'STATE', 'TO', 'MAKE', 'THESE', 'SACRIFICES', 'TO', 'RENOUNCE', 'EVERYTHING', 'THAT', 'CAN', 'BE', 'PRECIOUS', 'TO', 'MAN', 'PEACE', 'FAMILY', 'SECURITY', 'AND', 'HUMAN', 'DIGNITY'] +4350-9170-0046-2796: ref=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'SAY', 'WE', 'SHOULD', 'BE', 'EXPOSED', 'TO', 'THE', 'ATTACKS', 'OF', 'EVIL', 'DISPOSED', 'PERSONS', 'IN', 'OUR', 'OWN', 'COUNTRY'] +4350-9170-0046-2796: hyp=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'SAY', 'WE', 'SHOULD', 'BE', 'EXPOSED', 'TO', 'THE', 'ATTACKS', 'OF', 'EVIL', 'DISPOSED', 'PERSONS', 'IN', 'OUR', 'OWN', 'COUNTRY'] +4350-9170-0047-2797: ref=['WE', 'KNOW', 'NOW', 'THAT', 'THREATS', 'AND', 'PUNISHMENTS', 'CANNOT', 'DIMINISH', 'THEIR', 'NUMBER', 'THAT', 'THAT', 'CAN', 'ONLY', 'BE', 'DONE', 'BY', 'CHANGE', 'OF', 'ENVIRONMENT', 'AND', 'MORAL', 'INFLUENCE'] +4350-9170-0047-2797: hyp=['WE', 'NOW', 'KNOW', 'THAT', 'THREATS', 'AND', 'PUNISHMENTS', 'CANNOT', 'DIMINISH', 'THEIR', 'NUMBER', 'THAT', 'THAT', 'CAN', 'ONLY', 'BE', 'DONE', 'BY', 'CHANGE', 'OF', 'ENVIRONMENT', 'AND', 'MORAL', 'INFLUENCE'] +4350-9170-0048-2798: ref=['SO', 'THAT', 'THE', 'JUSTIFICATION', 'OF', 'STATE', 'VIOLENCE', 'ON', 'THE', 'GROUND', 'OF', 'THE', 'PROTECTION', 'IT', 'GIVES', 'US', 'FROM', 'EVIL', 'DISPOSED', 'PERSONS', 'EVEN', 'IF', 'IT', 'HAD', 'SOME', 'FOUNDATION', 'THREE', 'OR', 'FOUR', 'CENTURIES', 'AGO', 'HAS', 'NONE', 'WHATEVER', 'NOW'] +4350-9170-0048-2798: hyp=['SO', 'THAT', 'THIS', 'JUSTIFICATION', 'OF', 'STATE', 'VIOLENCE', 'ON', 'THE', 'GROUND', 'OF', 'THE', 'PROTECTION', 'IT', 'GIVES', 'US', 'FROM', 'EVIL', 'DISPOS', 'PERSONS', 'EVEN', 'IF', 'I', 'HAD', 'SOME', 'FOUNDATION', 'THREE', 'OR', 'FOUR', 'CENTURIES', 'AGO', 'HAS', 'NONE', 'WHATEVER', 'NOW'] +4350-9170-0049-2799: ref=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'TELL', 'US', 'WE', 'SHOULD', 'NOT', 'HAVE', 'ANY', 'RELIGION', 'EDUCATION', 'CULTURE', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'SO', 'ON'] +4350-9170-0049-2799: hyp=['EXCEPT', 'FOR', 'THE', 'STATE', 'THEY', 'TELL', 'US', 'WE', 'SHOULD', 'NOT', 'HAVE', 'ANY', 'RELIGION', 'EDUCATION', 'CULTURE', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'SO', 'ON'] +4350-9170-0050-2800: ref=['WITHOUT', 'THE', 'STATE', 'MEN', 'WOULD', 'NOT', 'HAVE', 'BEEN', 'ABLE', 'TO', 'FORM', 'THE', 'SOCIAL', 'INSTITUTIONS', 'NEEDED', 'FOR', 'DOING', 'ANY', 'THING'] +4350-9170-0050-2800: hyp=['WITHOUT', 'THE', 'STATE', 'MEN', 'WOULD', 'NOT', 'HAVE', 'BEEN', 'ABLE', 'TO', 'FORM', 'THE', 'SOCIAL', 'INSTITUTIONS', 'NEEDED', 'FOR', 'DOING', 'ANYTHING'] +4350-9170-0051-2801: ref=['THIS', 'ARGUMENT', 'TOO', 'WAS', 'WELL', 'FOUNDED', 'ONLY', 'SOME', 'CENTURIES', 'AGO'] +4350-9170-0051-2801: hyp=['THIS', 'ARGUMENT', 'TOO', 'WAS', 'WELL', 'FOUNDED', 'ONLY', 'SOME', 'CENTURIES', 'AGO'] +4350-9170-0052-2802: ref=['THE', 'GREAT', 'EXTENSION', 'OF', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'INTERCHANGE', 'OF', 'IDEAS', 'HAS', 'MADE', 'MEN', 'COMPLETELY', 'ABLE', 'TO', 'DISPENSE', 'WITH', 'STATE', 'AID', 'IN', 'FORMING', 'SOCIETIES', 'ASSOCIATIONS', 'CORPORATIONS', 'AND', 'CONGRESSES', 'FOR', 'SCIENTIFIC', 'ECONOMIC', 'AND', 'POLITICAL', 'OBJECTS'] +4350-9170-0052-2802: hyp=['THE', 'GREAT', 'EXTENSION', 'OF', 'MEANS', 'OF', 'COMMUNICATION', 'AND', 'INTERCHANGE', 'OF', 'IDEAS', 'HAS', 'MADE', 'MEN', 'COMPLETELY', 'ABLE', 'TO', 'DISPENSE', 'WITH', 'STATE', 'AID', 'IN', 'FORMING', 'SOCIETIES', 'ASSOCIATIONS', 'CORPORATIONS', 'AND', 'CONGRESSES', 'FOR', 'SCIENTIFIC', 'ECONOMIC', 'AND', 'POLITICAL', 'OBJECTS'] +4350-9170-0053-2803: ref=['WITHOUT', 'GOVERNMENTS', 'NATIONS', 'WOULD', 'BE', 'ENSLAVED', 'BY', 'THEIR', 'NEIGHBORS'] +4350-9170-0053-2803: hyp=['WITHOUT', 'GOVERNMENTS', 'NATIONS', 'WOULD', 'BE', 'ENSLAVED', 'BY', 'THEIR', 'NEIGHBORS'] +4350-9170-0054-2804: ref=['THE', 'GOVERNMENT', 'THEY', 'TELL', 'US', 'WITH', 'ITS', 'ARMY', 'IS', 'NECESSARY', 'TO', 'DEFEND', 'US', 'FROM', 'NEIGHBORING', 'STATES', 'WHO', 'MIGHT', 'ENSLAVE', 'US'] +4350-9170-0054-2804: hyp=['THE', 'GOVERNMENT', 'THEY', 'TELL', 'US', 'WITH', 'ITS', 'ARMY', 'IS', 'NECESSARY', 'TO', 'DEFEND', 'US', 'FROM', 'NEIGHBORING', 'STATES', 'WHO', 'MIGHT', 'ENSLAVE', 'US'] +4350-9170-0055-2805: ref=['AND', 'IF', 'DEFENSE', 'AGAINST', 'BARBAROUS', 'NATIONS', 'IS', 'MEANT', 'ONE', 'THOUSANDTH', 'PART', 'OF', 'THE', 'TROOPS', 'NOW', 'UNDER', 'ARMS', 'WOULD', 'BE', 'AMPLY', 'SUFFICIENT', 'FOR', 'THAT', 'PURPOSE'] +4350-9170-0055-2805: hyp=['AND', 'IF', 'DEFENCE', 'AGAINST', 'BARBAROUS', 'NATIONS', 'IS', 'MEANT', 'ONE', 'THOUSANDTH', 'PART', 'OF', 'THE', 'TROOPS', 'NOW', 'UNDER', 'ARMS', 'WOULD', 'BE', 'AMPLY', 'SUFFICIENT', 'FOR', 'THAT', 'PURPOSE'] +4350-9170-0056-2806: ref=['THE', 'POWER', 'OF', 'THE', 'STATE', 'FAR', 'FROM', 'BEING', 'A', 'SECURITY', 'AGAINST', 'THE', 'ATTACKS', 'OF', 'OUR', 'NEIGHBORS', 'EXPOSES', 'US', 'ON', 'THE', 'CONTRARY', 'TO', 'MUCH', 'GREATER', 'DANGER', 'OF', 'SUCH', 'ATTACKS'] +4350-9170-0056-2806: hyp=['THE', 'POWER', 'OF', 'THE', 'STATE', 'FAR', 'FROM', 'BEING', 'A', 'SECURITY', 'AGAINST', 'THE', 'ATTACKS', 'OF', 'OUR', 'NEIGHBORS', 'EXPOSES', 'US', 'ON', 'THE', 'CONTRARY', 'TO', 'MUCH', 'GREATER', 'DANGER', 'OF', 'SUCH', 'ATTACKS'] +4350-9170-0057-2807: ref=['EVEN', 'LOOKING', 'AT', 'IT', 'PRACTICALLY', 'WEIGHING', 'THAT', 'IS', 'TO', 'SAY', 'ALL', 'THE', 'BURDENS', 'LAID', 'ON', 'HIM', 'BY', 'THE', 'STATE', 'NO', 'MAN', 'CAN', 'FAIL', 'TO', 'SEE', 'THAT', 'FOR', 'HIM', 'PERSONALLY', 'TO', 'COMPLY', 'WITH', 'STATE', 'DEMANDS', 'AND', 'SERVE', 'IN', 'THE', 'ARMY', 'WOULD', 'IN', 'THE', 'MAJORITY', 'OF', 'CASES', 'BE', 'MORE', 'DISADVANTAGEOUS', 'THAN', 'TO', 'REFUSE', 'TO', 'DO', 'SO'] +4350-9170-0057-2807: hyp=['EVEN', 'LOOKING', 'AT', 'IT', 'PRACTICALLY', 'WEIGHING', 'THAT', 'IS', 'TO', 'SAY', 'ALL', 'THE', 'BURDENS', 'LAID', 'ON', 'HIM', 'BY', 'THE', 'STATES', 'NO', 'MAN', 'CAN', 'FAIL', 'TO', 'SEE', 'THAT', 'FOR', 'HIM', 'PERSONALLY', 'TO', 'COMPLY', 'WITH', 'THE', 'STATE', 'DEMANDS', 'AND', 'SERVE', 'IN', 'THE', 'ARMY', 'WOULD', 'IN', 'THE', 'MAJORITY', 'OF', 'CASES', 'BE', 'MORE', 'DISADVANTAGEOUS', 'THAN', 'TO', 'REFUSE', 'TO', 'DO', 'SO'] +4350-9170-0058-2808: ref=['TO', 'RESIST', 'WOULD', 'NEED', 'INDEPENDENT', 'THOUGHT', 'AND', 'EFFORT', 'OF', 'WHICH', 'EVERY', 'MAN', 'IS', 'NOT', 'CAPABLE'] +4350-9170-0058-2808: hyp=['TO', 'RESIST', 'WOULD', 'NEED', 'INDEPENDENT', 'THOUGHT', 'AND', 'EFFORT', 'OF', 'WHICH', 'EVERY', 'MAN', 'IS', 'NOT', 'CAPABLE'] +4350-9170-0059-2809: ref=['SO', 'MUCH', 'FOR', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'OF', 'BOTH', 'LINES', 'OF', 'CONDUCT', 'FOR', 'A', 'MAN', 'OF', 'THE', 'WEALTHY', 'CLASSES', 'AN', 'OPPRESSOR'] +4350-9170-0059-2809: hyp=['SO', 'MUCH', 'FOR', 'THE', 'ADVANTAGES', 'OF', 'DISADVANTAGES', 'OF', 'BOTH', 'LINES', 'OF', 'CONDUCT', 'FOR', 'A', 'MAN', 'OF', 'THE', 'WEALTHY', 'CLASS', 'AND', 'OPPRESSOR'] +4350-9170-0060-2810: ref=['FOR', 'A', 'MAN', 'OF', 'THE', 'POOR', 'WORKING', 'CLASS', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'WILL', 'BE', 'THE', 'SAME', 'BUT', 'WITH', 'A', 'GREAT', 'INCREASE', 'OF', 'DISADVANTAGES'] +4350-9170-0060-2810: hyp=['FOR', 'A', 'MAN', 'OF', 'THE', 'POOR', 'WORKING', 'CLASS', 'THE', 'ADVANTAGES', 'AND', 'DISADVANTAGES', 'WILL', 'BE', 'THE', 'SAME', 'BUT', 'WITH', 'A', 'GREAT', 'INCREASE', 'OF', 'DISADVANTAGES'] +4852-28311-0000-2098: ref=['SAY', 'YOU', 'KNOW', 'SUMTHIN'] +4852-28311-0000-2098: hyp=['SAY', 'YOU', 'KNOW', 'SOMETHING'] +4852-28311-0001-2099: ref=['CHRIS', 'LOOKED', 'FROM', 'A', 'NICKEL', 'PLATED', 'FLASHLIGHT', 'TO', 'A', 'CAR', 'JACK', 'AND', 'SPARK', 'PLUG'] +4852-28311-0001-2099: hyp=['CHRIS', 'LOOKED', 'FROM', 'MENDICULATED', 'FLASH', 'LIKE', 'TO', 'A', 'CAR', 'JACK', 'AND', 'SPARK', 'PLUG'] +4852-28311-0002-2100: ref=['KNOW', 'WHO', 'NEEDS', 'A', 'JOB', 'BAD', "THAT'S", 'JAKEY', 'HARRIS'] +4852-28311-0002-2100: hyp=['NO', 'ONE', 'NEEDS', 'A', 'JOB', 'BAN', "THAT'S", 'JAKIE', 'HARRIS'] +4852-28311-0003-2101: ref=['O', 'K', 'HE', 'SAID'] +4852-28311-0003-2101: hyp=['O', 'K', 'HE', 'SAID'] +4852-28311-0004-2102: ref=['ONLY', 'WHY', "DIDN'T", 'YOU', 'ASK', 'HIM', 'YOURSELF'] +4852-28311-0004-2102: hyp=['ONLY', 'WHY', "DIDN'T", 'YOU', 'ASK', 'HIM', 'YOURSELF'] +4852-28311-0005-2103: ref=['MIKE', 'BECAME', 'UNEASY', 'AND', 'FISHED', 'AN', 'ELASTIC', 'BAND', 'OUT', 'OF', 'HIS', 'POCKET', 'MADE', 'A', 'FLICK', 'OF', 'PAPER', 'AND', 'SENT', 'IT', 'SOARING', 'OUT', 'INTO', 'M', 'STREET'] +4852-28311-0005-2103: hyp=['MIKE', 'BECAME', 'UNEASY', 'AND', 'FISHED', 'IT', 'MOLASTIC', 'BAND', 'OUT', 'OF', 'HIS', 'POCKET', 'MADE', 'A', 'FLICK', 'OF', 'PAPER', 'AND', 'SENT', 'IT', 'SOARING', 'OUT', 'IN', 'EM', 'STREET'] +4852-28311-0006-2104: ref=['WELL', 'HE', 'ADMITTED', 'I', 'DID'] +4852-28311-0006-2104: hyp=['WELL', 'HE', 'ADMITTED', 'I', 'DID'] +4852-28311-0007-2105: ref=['CHRIS', 'ASKED', 'AND', 'FOR', 'THE', 'FIRST', 'TIME', 'THAT', 'DAY', 'THE', 'HEAVY', 'WEIGHT', 'HE', 'CARRIED', 'WITHIN', 'HIM', 'LIFTED', 'AND', 'LIGHTENED', 'A', 'LITTLE'] +4852-28311-0007-2105: hyp=['CHRIS', 'ASKED', 'HIM', 'FOR', 'THE', 'FIRST', 'TIME', 'THAT', 'DAY', 'THE', 'HEAVY', 'WEIGHT', 'HE', 'CARRIED', 'WITHIN', 'HIM', 'LIFTED', 'AND', 'LIGHTENED', 'A', 'LITTLE'] +4852-28311-0008-2106: ref=['THINK', 'HE', 'REALLY', 'NEEDS', 'IT', 'HE', 'PURSUED'] +4852-28311-0008-2106: hyp=['THINKING', 'REALLY', 'NEEDS', 'IT', 'HE', 'PURSUED'] +4852-28311-0009-2107: ref=['HE', 'WOULD', 'HAVE', 'LIKED', 'TO', 'GET', 'THE', 'JOB', 'FOR', 'JAKEY', 'WHO', 'NEEDED', 'IT', 'BUT', 'SOMEHOW', 'THE', 'TASK', 'OF', 'FACING', 'MISTER', 'WICKER', 'ESPECIALLY', 'NOW', 'THAT', 'THE', 'LIGHT', 'WAS', 'GOING', 'AND', 'DUSK', 'EDGING', 'INTO', 'THE', 'STREETS', 'WAS', 'NOT', 'WHAT', 'CHRIS', 'HAD', 'INTENDED', 'FOR', 'ENDING', 'THE', 'AFTERNOON'] +4852-28311-0009-2107: hyp=['HE', 'WOULD', 'HAVE', 'LIKED', 'TO', 'GET', 'THE', 'JOB', 'FOR', 'JAKIE', 'WHO', 'NEEDED', 'IT', 'BUT', 'SOMEHOW', 'THE', 'TASK', 'OF', 'FACING', 'MISTER', 'WICKER', 'ESPECIALLY', 'NOW', 'THAT', 'THE', 'LIGHT', 'WAS', 'GOING', 'AND', 'DUSK', 'EDGED', 'INTO', 'THE', 'STREETS', 'WAS', 'NOT', 'WHAT', 'CHRISTEN', 'INTENDED', 'FOR', 'ENDING', 'THE', 'AFTERNOON'] +4852-28311-0010-2108: ref=["MIKE'S", 'EXPRESSION', 'CHANGED', 'AT', 'ONCE', 'TO', 'ONE', 'OF', 'TRIUMPH', 'BUT', 'CHRIS', 'WAS', 'ONLY', 'PARTLY', 'ENCOURAGED'] +4852-28311-0010-2108: hyp=["MIKE'S", 'EXPRESSION', 'CHANGED', 'AT', "ONE'S", 'TO', 'ONE', 'OF', 'TRIUMPH', 'BUT', 'BRUCE', 'WAS', 'ONLY', 'PARTIALLY', 'ENCOURAGED'] +4852-28311-0011-2109: ref=['BETCHA', "AREN'T", 'GOIN', 'AFTER', 'ALL', 'CHRIS', 'TURNED', 'ON', 'HIM'] +4852-28311-0011-2109: hyp=['BETTER', 'AND', 'GOIN', 'AFTER', 'ALL', 'THIS', 'TURNED', 'TO', 'ON', 'HIM'] +4852-28311-0012-2110: ref=['MIKE', 'WAS', 'STANDING', 'ON', 'THE', 'CORNER'] +4852-28311-0012-2110: hyp=['MIKE', 'WAS', 'STANDING', 'ON', 'THE', 'CORNER'] +4852-28311-0013-2111: ref=['AW', 'SHUCKS'] +4852-28311-0013-2111: hyp=['AH', 'SHOCKS'] +4852-28311-0014-2112: ref=['CHRIS', 'STARTED', 'OFF', 'ONCE', 'MORE', 'PASSING', 'THE', 'BLEAK', 'LITTLE', 'VICTORIAN', 'CHURCH', 'PERCHED', 'ON', 'THE', 'HILL', 'ABOVE', 'MISTER', "WICKER'S", 'HOUSE'] +4852-28311-0014-2112: hyp=['CHRIS', 'STARTED', 'OFF', 'ONCE', 'MORE', 'PASSING', 'A', 'BLEAK', 'LITTLE', 'VICTORIAN', 'CHURCH', 'PERCHED', 'ON', 'THE', 'HILL', 'ABOVE', 'MISTER', "WICKER'S", 'HOUSE'] +4852-28311-0015-2113: ref=['AN', 'EMPTY', 'LOT', 'CUT', 'INTO', 'BY', 'CHURCH', 'LANE', 'GAVE', 'A', 'LOOK', 'OF', 'ISOLATION', 'TO', 'THE', 'L', 'SHAPED', 'BRICK', 'BUILDING', 'THAT', 'SERVED', 'MISTER', 'WICKER', 'AS', 'BOTH', 'HOUSE', 'AND', 'PLACE', 'OF', 'BUSINESS'] +4852-28311-0015-2113: hyp=['AN', 'EMPTY', 'LOT', 'CUT', 'IN', 'INTO', 'BY', 'CHURCH', 'LANE', 'GAVE', 'A', 'LOOK', 'OF', 'ISOLATION', 'TO', 'THE', 'ALE', 'SHAPED', 'BRICK', 'BUILDING', 'THAT', 'SERVED', 'MISTER', "WICKER'S", 'BOTH', 'HOUSE', 'AND', 'PLACE', 'OF', 'BUSINESS'] +4852-28311-0016-2114: ref=['THE', 'LONGER', 'WING', 'TOWARD', 'THE', 'BACK', 'HAD', 'A', 'BACK', 'DOOR', 'THAT', 'OPENED', 'ONTO', 'WATER', 'STREET', 'THE', 'SPACE', 'BETWEEN', 'THE', 'HOUSE', 'AND', 'WISCONSIN', 'AVENUE', 'HAD', 'BEEN', 'MADE', 'INTO', 'A', 'NEAT', 'OBLONG', 'FLOWER', 'GARDEN', 'FENCED', 'OFF', 'FROM', 'THE', 'SIDEWALK', 'BY', 'BOX', 'SHRUBS', 'AND', 'A', 'WHITE', 'PICKET', 'FENCE'] +4852-28311-0016-2114: hyp=['NO', 'LONGER', 'WINGED', 'TOWARD', 'THE', 'BACK', 'GOT', 'A', 'BACK', 'DOOR', 'THAT', 'OPENED', 'ON', 'A', 'WATER', 'STREET', 'THE', 'SPACE', 'BETWEEN', 'THE', 'HOUSE', 'AND', 'WISCONSIN', 'AVIGUE', 'HAD', 'BEEN', 'MADE', 'INTO', 'A', 'NEAT', 'OBLONG', 'FLOWER', 'GARDEN', 'FENCED', 'OFF', 'FROM', 'THE', 'SIDEWALK', 'BY', 'BOX', 'SHRUGS', 'AND', 'THE', 'WHITE', 'PICKET', 'FENCE'] +4852-28311-0017-2115: ref=['A', 'LIVID', 'YELLOW', 'STAINED', 'THE', 'HORIZON', 'BEYOND', 'THE', 'FACTORIES', 'AND', 'GRAY', 'CLOUDS', 'LOWERED', 'AND', 'TUMBLED', 'ABOVE'] +4852-28311-0017-2115: hyp=['A', 'LIVID', 'YELLOW', 'STAINED', 'THE', 'HORIZON', 'BEYOND', 'THE', 'FACTORIES', 'IN', 'GLAY', 'CLOUDS', 'LOWERED', 'AND', 'TUMBLED', 'ABOVE'] +4852-28311-0018-2116: ref=['THE', 'AIR', 'WAS', 'GROWING', 'CHILL', 'AND', 'CHRIS', 'DECIDED', 'TO', 'FINISH', 'HIS', 'JOB'] +4852-28311-0018-2116: hyp=['THE', 'AIR', 'WAS', 'GROWING', 'CHILL', 'AND', 'CHRIST', 'DECIDED', 'TO', 'FINISH', 'THE', 'JOB'] +4852-28311-0019-2117: ref=['ALL', 'AT', 'ONCE', 'HE', 'WONDERED', 'HOW', 'HIS', 'MOTHER', 'WAS', 'AND', 'EVERYTHING', 'IN', 'HIM', 'PINCHED', 'AND', 'TIGHTENED', 'ITSELF'] +4852-28311-0019-2117: hyp=['ALL', 'AT', 'ONCE', 'YOU', 'WONDERED', 'HOW', 'HIS', 'MOTHER', 'WAS', 'AND', 'EVERYTHING', 'IN', 'HIM', 'PINCHED', 'AND', 'TIGHTENED', 'ITSELF'] +4852-28311-0020-2118: ref=['AT', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'HE', 'REACHED', 'THE', 'HOUSE'] +4852-28311-0020-2118: hyp=['AT', 'THE', 'FOOT', 'OF', 'THE', 'HILL', 'HE', 'REACHED', 'THE', 'HOUSE'] +4852-28311-0021-2119: ref=['THERE', 'WERE', 'THREE', 'THINGS', 'THAT', 'ALWAYS', 'CAUGHT', 'HIS', 'EYE', 'AMID', 'THE', 'LITTER', 'OF', 'DUSTY', 'PIECES'] +4852-28311-0021-2119: hyp=['THERE', 'WERE', 'THREE', 'THINGS', 'THAT', 'ALWAYS', 'CAUGHT', 'HIS', 'EYE', 'AMID', 'THE', 'LITTER', 'OF', 'DUSTY', 'PIECES'] +4852-28311-0022-2120: ref=['ON', 'THE', 'LEFT', 'THE', 'COIL', 'OF', 'ROPE', 'IN', 'THE', 'CENTER', 'THE', 'MODEL', 'OF', 'A', 'SAILING', 'SHIP', 'IN', 'A', 'GREEN', 'GLASS', 'BOTTLE', 'AND', 'ON', 'THE', 'RIGHT', 'THE', 'WOODEN', 'STATUE', 'OF', 'A', 'NEGRO', 'BOY', 'IN', 'BAGGY', 'TROUSERS', 'TURKISH', 'JACKET', 'AND', 'WHITE', 'TURBAN'] +4852-28311-0022-2120: hyp=['ON', 'THE', 'LEFT', 'THE', 'COIL', 'OF', 'ROPE', 'IN', 'THE', 'CENTER', 'THE', 'MODEL', 'OF', 'A', 'SAILING', 'SHIP', 'IN', 'A', 'GREEN', 'GLASS', 'BOTTLE', 'AND', 'ON', 'THE', 'RIGHT', 'THE', 'WOODEN', 'STATUE', 'OF', 'A', 'NEGRO', 'BOY', 'AND', 'BAGGY', 'TROUSERS', 'TURKISH', 'JACKET', 'AND', 'WHITE', 'TURBAN'] +4852-28311-0023-2121: ref=['BUT', 'THE', 'NAME', 'STILL', 'SHOWED', 'AT', 'THE', 'PROW', 'AND', 'MANY', 'A', 'TIME', 'CHRIS', 'SAFE', 'AT', 'HOME', 'IN', 'BED', 'HAD', 'SAILED', 'IMAGINARY', 'VOYAGES', 'IN', 'THE', 'MIRABELLE'] +4852-28311-0023-2121: hyp=['BUT', 'THE', 'NAME', 'STILL', 'SHOWED', 'AT', 'THE', 'PROW', 'AND', 'MANY', 'A', 'TIME', 'CHRIS', 'SAFE', 'AT', 'HOME', 'IN', 'BED', 'HAD', 'SAILED', 'IMAGINARY', 'VOYAGES', 'IN', 'THE', 'MIRABELLE'] +4852-28311-0024-2122: ref=['HE', 'HAD', 'NEVER', 'SEEN', 'ANYONE', 'GO', 'INTO', 'MISTER', "WICKER'S", 'SHOP', 'NOW', 'HE', 'THOUGHT', 'OF', 'IT'] +4852-28311-0024-2122: hyp=["HE'D", 'NEVER', 'SEEN', 'ANY', 'ONE', 'GO', 'INTO', 'MISTER', "HOOKER'S", 'SHOP', 'NOW', 'HE', 'THOUGHT', 'OF', 'IT'] +4852-28311-0025-2123: ref=['HOW', 'THEN', 'DID', 'HE', 'LIVE', 'AND', 'WHAT', 'DID', 'HE', 'EVER', 'SELL'] +4852-28311-0025-2123: hyp=['HOW', 'THEN', 'DID', 'HE', 'TO', 'LIVE', 'AND', 'WHAT', 'DID', 'HE', 'EVER', 'SELL'] +4852-28311-0026-2124: ref=['A', 'SUDDEN', 'CAR', 'HORN', 'WOKE', 'HIM', 'FROM', 'HIS', 'DREAM'] +4852-28311-0026-2124: hyp=['A', 'SUDDEN', 'CAR', 'HORN', 'WALKING', 'FROM', 'THIS', 'DREAM'] +4852-28312-0000-2125: ref=['OF', 'THE', 'MANY', 'TIMES', 'HE', 'HAD', 'EXAMINED', 'MISTER', "WICKER'S", 'WINDOW', 'AND', 'PORED', 'OVER', 'THE', 'ROPE', 'THE', 'SHIP', 'AND', 'THE', 'NUBIAN', 'BOY', 'HE', 'HAD', 'NEVER', 'GONE', 'INTO', 'MISTER', "WICKER'S", 'SHOP'] +4852-28312-0000-2125: hyp=['OF', 'THE', 'MANY', 'TIMES', 'HE', 'HAD', 'EXAMINED', 'MISTER', "WICKER'S", 'WINDOW', 'AND', 'POURED', 'OVER', 'THE', 'ROPE', 'THE', 'SHIP', 'AND', 'THE', 'NUBIAN', 'BOY', 'HE', 'HAD', 'NEVER', 'GONE', 'INTO', 'MISTER', "ROOKER'S", 'SHOP'] +4852-28312-0001-2126: ref=['SO', 'NOW', 'ALONE', 'UNTIL', 'SOMEONE', 'SHOULD', 'ANSWER', 'THE', 'BELL', 'HE', 'LOOKED', 'EAGERLY', 'IF', 'UNEASILY', 'AROUND', 'HIM'] +4852-28312-0001-2126: hyp=['SO', 'NOW', 'ALONE', 'UNTIL', 'SOME', 'ONE', 'SHOULD', 'ANSWER', 'THE', 'BELL', 'THEY', 'LOOKED', 'EAGERLY', 'IF', 'UNEASILY', 'AROUND', 'HIM'] +4852-28312-0002-2127: ref=['WHAT', 'WITH', 'THE', 'ONE', 'WINDOW', 'AND', 'THE', 'LOWERING', 'DAY', 'OUTSIDE', 'THE', 'LONG', 'NARROW', 'SHOP', 'WAS', 'SOMBER'] +4852-28312-0002-2127: hyp=['WHAT', 'WITH', 'THE', 'ONE', 'WINDOW', 'AND', 'THE', 'LOWERING', 'DAY', 'OUTSIDE', 'THE', 'LONG', 'NARROW', 'SHOP', 'WAS', 'SOMBRE'] +4852-28312-0003-2128: ref=['HEAVY', 'HAND', 'HEWN', 'BEAMS', 'CROSSED', 'IT', 'FROM', 'ONE', 'SIDE', 'TO', 'THE', 'OTHER'] +4852-28312-0003-2128: hyp=['HEAVY', 'HAND', 'YOU', 'AND', 'BEAMS', 'CROSSED', 'IT', 'FROM', 'ONE', 'SIDE', 'TO', 'THE', 'OTHER'] +4852-28312-0004-2129: ref=['MISTER', "WICKER'S", 'BACK', 'BEING', 'TOWARD', 'THE', 'SOURCE', 'OF', 'LIGHT', 'CHRIS', 'COULD', 'NOT', 'SEE', 'HIS', 'FACE'] +4852-28312-0004-2129: hyp=['MISTER', "OCCUR'S", 'BACK', 'BEING', 'TOWARD', 'THE', 'SOURCE', 'OF', 'LIGHT', 'CHRIS', 'COULD', 'NOT', 'SEE', 'HIS', 'FACE'] +4852-28312-0005-2130: ref=['THE', 'DOUBLE', 'FANS', 'OF', 'MINUTE', 'WRINKLES', 'BREAKING', 'FROM', 'EYE', 'CORNER', 'TO', 'TEMPLE', 'AND', 'JOINING', 'WITH', 'THOSE', 'OVER', 'THE', 'CHEEKBONES', 'WERE', 'DRAWN', 'INTO', 'THE', 'HORIZONTAL', 'LINES', 'ACROSS', 'THE', 'DOMED', 'FOREHEAD'] +4852-28312-0005-2130: hyp=['THE', 'DOUBLE', 'FANS', 'OF', 'MINUTE', 'WRINKLES', 'BREAKING', 'FROM', 'EYE', 'CORNERED', 'A', 'TEMPLE', 'ENJOINING', 'WITH', 'THOSE', 'OVER', 'THE', 'SHEEP', 'BONES', 'WERE', 'DRAWN', 'INTO', 'THE', 'HORIZONTAL', 'LINES', 'ACROSS', 'THE', 'DOMED', 'FOREHEAD'] +4852-28312-0006-2131: ref=['LITTLE', 'TUFTS', 'OF', 'WHITE', 'FUZZ', 'ABOVE', 'THE', 'EARS', 'WERE', 'ALL', 'THAT', 'REMAINED', 'OF', 'THE', "ANTIQUARIAN'S", 'HAIR', 'BUT', 'WHAT', 'DREW', 'AND', 'HELD', "CHRIS'S", 'GAZE', 'WERE', 'THE', 'OLD', "MAN'S", 'EYES'] +4852-28312-0006-2131: hyp=['LITTLE', 'TUFTS', 'OF', 'WHITE', 'FUZ', 'ABOVE', 'THE', 'EARS', 'WERE', 'ALL', 'THAT', 'REMAINED', 'OF', 'THE', "ANTIQUARIAN'S", 'HAIR', 'BUT', 'WHAT', 'DREW', 'AND', 'HELD', "CHRIS'S", 'GAZE', 'WERE', 'THE', 'OLD', "MAN'S", 'EYES'] +4852-28312-0007-2132: ref=['CHRIS', 'BLINKED', 'AND', 'LOOKED', 'AGAIN', 'YES', 'THEY', 'WERE', 'STILL', 'THERE'] +4852-28312-0007-2132: hyp=['CRIS', 'BLINKED', 'AND', 'LOOKED', 'AGAIN', 'YES', 'THEY', 'WERE', 'STILL', 'THERE'] +4852-28312-0008-2133: ref=['CHRIS', 'SWALLOWED', 'AND', 'HIS', 'VOICE', 'CAME', 'BACK', 'TO', 'HIM'] +4852-28312-0008-2133: hyp=['CHRIS', 'SWALLOWED', 'AND', 'HIS', 'VOICE', 'CAME', 'BACK', 'TO', 'HIM'] +4852-28312-0009-2134: ref=['YES', 'SIR', 'HE', 'SAID'] +4852-28312-0009-2134: hyp=['YES', 'SIR', 'HE', 'SAID'] +4852-28312-0010-2135: ref=['I', 'SAW', 'YOUR', 'SIGN', 'AND', 'I', 'KNOW', 'A', 'BOY', 'WHO', 'NEEDS', 'THE', 'JOB'] +4852-28312-0010-2135: hyp=['I', 'SAW', 'YOUR', 'SIGN', 'AND', 'I', 'KNOW', 'A', 'BOY', 'WHO', 'NEEDS', 'THE', 'JOB'] +4852-28312-0011-2136: ref=["HE'S", 'A', 'SCHOOLMATE', 'OF', 'MINE'] +4852-28312-0011-2136: hyp=["HE'S", 'A', 'SCHOOLMATE', 'OF', 'MINE'] +4852-28312-0012-2137: ref=['JAKEY', 'HARRIS', 'HIS', 'NAME', 'IS', 'AND', 'HE', 'REALLY', 'NEEDS', 'THE', 'JOB'] +4852-28312-0012-2137: hyp=['J', "HARRIS'S", 'NAME', "ISN'T", 'HE', 'REALLY', 'NEEDS', 'THE', 'JOB'] +4852-28312-0013-2138: ref=['I', 'I', 'JUST', 'WONDERED', 'IF', 'THE', 'PLACE', 'WAS', 'STILL', 'OPEN'] +4852-28312-0013-2138: hyp=['I', 'I', 'JUST', 'WONDERED', 'IF', 'THE', 'PLACE', 'WAS', 'STILL', 'OPEN'] +4852-28312-0014-2139: ref=['WHAT', 'HE', 'SAW', 'WAS', 'A', 'FRESH', 'CHEEKED', 'LAD', 'TALL', 'FOR', 'THIRTEEN', 'STURDY', 'WITH', 'SINCERITY', 'AND', 'GOOD', 'HUMOR', 'IN', 'HIS', 'FACE', 'AND', 'SOMETHING', 'SENSITIVE', 'AND', 'APPEALING', 'ABOUT', 'HIS', 'EYES'] +4852-28312-0014-2139: hyp=['WHAT', 'HE', 'SAW', 'WAS', 'A', 'FRESH', 'CHEEKED', 'LAD', 'TALL', 'FOR', 'THIRTEEN', 'STURDY', 'WITH', 'SINCERITY', 'AND', 'GOOD', 'HUMOUR', 'IN', 'HIS', 'FACE', 'AND', 'SOMETHING', 'SCENTED', 'AND', 'APPEALING', 'ABOUT', 'HIS', 'EYES'] +4852-28312-0015-2140: ref=['HE', 'GUESSED', 'THERE', 'MUST', 'BE', 'A', 'LIVELY', 'FIRE', 'IN', 'THAT', 'ROOM', 'BEYOND'] +4852-28312-0015-2140: hyp=['HE', 'GUESSED', 'THERE', 'IT', 'MUST', 'BE', 'A', 'LIVELY', 'FIRE', 'IN', 'THAT', 'RUM', 'BEYOND'] +4852-28312-0016-2141: ref=['WOULD', 'THAT', 'INTERFERE', 'WITH', "JAKEY'S", 'GETTING', 'THE', 'JOB', 'SIR'] +4852-28312-0016-2141: hyp=['WOULD', 'THAT', 'INTERFERE', 'WITH', 'JAKIE', 'GIGS', 'GETTING', 'THE', 'JOB', 'SIR'] +4852-28312-0017-2142: ref=['BUT', 'EVEN', 'AS', 'HE', 'SLOWLY', 'TURNED', 'THE', 'THOUGHT', 'PIERCED', 'HIS', 'MIND', 'WHY', 'HAD', 'HE', 'NOT', 'SEEN', 'THE', 'REFLECTION', 'OF', 'THE', 'HEADLIGHTS', 'OF', 'THE', 'CARS', 'MOVING', 'UP', 'AROUND', 'THE', 'CORNER', 'OF', 'WATER', 'STREET', 'AND', 'UP', 'THE', 'HILL', 'TOWARD', 'THE', 'TRAFFIC', 'SIGNALS'] +4852-28312-0017-2142: hyp=['BUT', 'EVEN', 'AS', 'HE', 'SLOWLY', 'TURNED', 'THE', 'THOUGHT', 'PIERCED', 'HIS', 'MIND', 'WHY', 'DO', 'YOU', 'NOT', 'SEE', 'THE', 'REFLECTION', 'OF', 'THE', 'HEAD', 'LIGHTS', 'OF', 'THE', 'CARS', 'MOVING', 'UP', 'AROUND', 'THE', 'CORNER', 'OF', 'WALL', 'AT', 'HER', 'STREET', 'NOT', 'THE', 'HILL', 'TOWARD', 'THE', 'EFFIC', 'SIGNALS'] +4852-28312-0018-2143: ref=['THE', 'ROOM', 'SEEMED', 'OVERLY', 'STILL'] +4852-28312-0018-2143: hyp=['THE', 'ROOMS', 'SEEMED', 'OVERLY', 'STILL'] +4852-28312-0019-2144: ref=['THEN', 'IN', 'THAT', 'SECOND', 'HE', 'TURNED', 'AND', 'FACED', 'ABOUT'] +4852-28312-0019-2144: hyp=['THEN', 'IN', 'THAT', 'SECOND', 'HE', 'TURNED', 'AND', 'FACED', 'ABOUT'] +4852-28312-0020-2145: ref=['THE', 'WIDE', 'BOW', 'WINDOW', 'WAS', 'THERE', 'BEFORE', 'HIM', 'THE', 'THREE', 'OBJECTS', 'HE', 'LIKED', 'BEST', 'SHOWING', 'FROSTY', 'IN', 'THE', 'MOONLIGHT', 'THAT', 'POURED', 'IN', 'FROM', 'ACROSS', 'THE', 'WATER'] +4852-28312-0020-2145: hyp=['THE', 'WIDE', 'BOW', 'WINDOW', 'WAS', 'THERE', 'BEFORE', 'HIM', 'THE', 'THREE', 'OBJECTS', 'HE', 'LIKED', 'BEST', 'SHOWING', 'FROSTY', 'IN', 'THE', 'MOONLIGHT', 'THAT', 'POURED', 'IN', 'FROM', 'ACROSS', 'THE', 'WATER'] +4852-28312-0021-2146: ref=['ACROSS', 'THE', 'WATER', 'WHERE', 'WAS', 'THE', 'FREEWAY'] +4852-28312-0021-2146: hyp=['ACROSS', 'THE', 'WATER', 'WHERE', 'WAS', 'THE', 'FREE', 'WAY'] +4852-28312-0022-2147: ref=['IT', 'WAS', 'NO', 'LONGER', 'THERE', 'NOR', 'WERE', 'THE', 'HIGH', 'WALLS', 'AND', 'SMOKESTACKS', 'OF', 'FACTORIES', 'TO', 'BE', 'SEEN'] +4852-28312-0022-2147: hyp=['IT', 'WAS', 'NO', 'LONGER', 'THERE', 'NOR', 'WERE', 'THE', 'HIGH', 'WALLS', 'AND', 'SMOKESTACKS', 'OF', 'FACTORIES', 'TO', 'BE', 'SEEN'] +4852-28312-0023-2148: ref=['THE', 'WAREHOUSES', 'WERE', 'STILL', 'THERE'] +4852-28312-0023-2148: hyp=['THE', 'WAREHOUSES', 'WERE', 'STILL', 'THERE'] +4852-28312-0024-2149: ref=['FLABBERGASTED', 'AND', 'BREATHLESS', 'CHRIS', 'WAS', 'UNAWARE', 'THAT', 'HE', 'HAD', 'MOVED', 'CLOSER', 'TO', 'PEER', 'OUT', 'THE', 'WINDOW', 'IN', 'EVERY', 'DIRECTION'] +4852-28312-0024-2149: hyp=['FLABRA', 'GASTED', 'IN', 'BREATHLESS', 'CHRIS', 'WAS', 'UNAWARE', 'THAT', 'HE', 'HAD', 'MOVED', 'CLOSER', 'TO', 'PEER', 'OUT', 'THE', 'WINDOW', 'IN', 'EVERY', 'DIRECTION'] +4852-28312-0025-2150: ref=['NO', 'ELECTRIC', 'SIGNS', 'NO', 'LAMPLIT', 'STREETS'] +4852-28312-0025-2150: hyp=['NO', 'ELECTRIC', 'SIGNS', 'NO', 'LAMPLIT', 'STREETS'] +4852-28312-0026-2151: ref=['WHERE', 'THE', "PEOPLE'S", 'DRUGSTORE', 'HAD', 'STOOD', 'BUT', 'A', 'HALF', 'HOUR', 'BEFORE', 'ROSE', 'THE', 'ROOFS', 'OF', 'WHAT', 'WAS', 'EVIDENTLY', 'AN', 'INN'] +4852-28312-0026-2151: hyp=['WHERE', 'THE', "PEOPLE'S", 'DRUG', 'STORE', 'HAD', 'STOOD', 'BUT', 'HALF', 'AN', 'HOUR', 'BEFORE', 'ROSE', 'THE', 'ROOFS', 'OF', 'WHAT', 'WAS', 'EVIDENTLY', 'AN', 'INN'] +4852-28312-0027-2152: ref=['A', 'COURTYARD', 'WAS', 'SPARSELY', 'LIT', 'BY', 'A', 'FLARING', 'TORCH', 'OR', 'TWO', 'SHOWING', 'A', 'SWINGING', 'SIGN', 'HUNG', 'ON', 'A', 'POST'] +4852-28312-0027-2152: hyp=['A', 'COURTYARD', 'WAS', 'FIRSTLY', 'LIT', 'BY', 'A', 'FLARING', 'TORTURE', 'TWO', 'SHOWING', 'THE', 'SWINGING', 'SIGN', 'HUNG', 'ON', 'THE', 'POST'] +4852-28312-0028-2153: ref=['THE', 'POST', 'WAS', 'PLANTED', 'AT', 'THE', 'EDGE', 'OF', 'WHAT', 'WAS', 'NOW', 'A', 'BROAD', 'AND', 'MUDDY', 'ROAD'] +4852-28312-0028-2153: hyp=['THE', 'POSTS', 'PLANTED', 'AT', 'THE', 'EDGE', 'OF', 'IT', 'WAS', 'NOW', 'A', 'BROAD', 'AND', 'MUDDY', 'ROAD'] +4852-28312-0029-2154: ref=['A', 'COACH', 'WITH', 'ITS', 'TOP', 'PILED', 'HIGH', 'WITH', 'LUGGAGE', 'STAMPED', 'TO', 'A', 'HALT', 'BESIDE', 'THE', 'FLAGGED', 'COURTYARD'] +4852-28312-0029-2154: hyp=['A', 'COACH', 'WHICH', 'HAD', 'STOPPED', 'PILED', 'HIGH', 'WITH', 'LEGGED', 'STAMPED', 'O', 'HALT', 'BESIDE', 'THE', 'FLAGGED', 'COURTYARD'] +4852-28312-0030-2155: ref=['THEY', 'MOVED', 'INTO', 'THE', 'INN', 'THE', 'COACH', 'RATTLED', 'OFF', 'TO', 'THE', 'STABLE'] +4852-28312-0030-2155: hyp=['THEY', 'MOVED', 'INTO', 'THE', 'INN', 'THE', 'COACH', 'RATTLED', 'OFF', 'TO', 'THE', 'STABLE'] +4852-28312-0031-2156: ref=['MY', 'WINDOW', 'HAS', 'A', 'POWER', 'FOR', 'THOSE', 'FEW', 'WHO', 'ARE', 'TO', 'SEE'] +4852-28312-0031-2156: hyp=['BY', 'WINDOW', 'AS', 'A', 'POWER', 'FOR', 'THOSE', 'FEW', 'WHO', 'ARE', 'TO', 'SEE'] +4852-28319-0000-2070: ref=['THE', 'LEARNING', 'OF', 'MAGIC', 'WAS', 'BY', 'NO', 'MEANS', 'EASY'] +4852-28319-0000-2070: hyp=['THE', 'LEARNING', 'AND', 'MAGIC', 'WAS', 'BY', 'NO', 'MEANS', 'EASY'] +4852-28319-0001-2071: ref=['HE', 'HAD', 'TOLD', 'HIS', 'MASTER', 'AT', 'ONCE', 'ABOUT', 'SIMON', 'GOSLER', 'HIS', 'HORDE', 'OF', 'MONEY', 'AND', 'HIS', 'HIDING', 'PLACES', 'FOR', 'IT'] +4852-28319-0001-2071: hyp=['HE', 'HAD', 'TOLD', 'HIS', 'MASTER', 'AT', 'ONCE', 'HE', 'GOT', 'SIMON', 'GOSLER', 'HIS', 'HOARD', 'OF', 'MONEY', 'AND', 'HIS', 'HIDING', 'PLACES', 'FOR', 'IT'] +4852-28319-0002-2072: ref=['CHRIS', 'THEREFORE', 'THREW', 'HIMSELF', 'INTO', 'ALL', 'THE', 'PRELIMINARIES', 'OF', 'HIS', 'TASK'] +4852-28319-0002-2072: hyp=['CHRIS', 'THEREFORE', 'THREW', 'HIMSELF', 'AND', 'ALL', 'THE', 'PRELIMINARIES', 'OF', 'HIS', 'TASK'] +4852-28319-0003-2073: ref=['ONE', 'AFTERNOON', 'WHEN', 'HE', 'RETURNED', 'AFTER', 'A', 'REST', 'TO', 'MISTER', "WICKER'S", 'STUDY', 'HE', 'SAW', 'THAT', 'THERE', 'WAS', 'SOMETHING', 'NEW', 'IN', 'THE', 'ROOM', 'A', 'BOWL', 'WITH', 'A', 'GOLDFISH', 'IN', 'IT', 'STOOD', 'ON', 'THE', 'TABLE', 'BUT', 'MISTER', 'WICKER', 'WAS', 'NOT', 'TO', 'BE', 'SEEN'] +4852-28319-0003-2073: hyp=['ONE', 'AFTERNOON', 'WHEN', 'HE', 'HAD', 'RETURNED', 'AFTER', 'A', 'REST', 'TO', 'MISTER', "WICKER'S", 'STUDY', 'HE', 'SAW', 'THAT', 'THERE', 'WAS', 'SOMETHING', 'NEW', 'IN', 'THE', 'ROOM', 'A', 'BOWL', 'WITH', 'A', 'GOLD', 'FISH', 'IN', 'IT', 'STOOD', 'ON', 'THE', 'TABLE', 'BUT', 'MISTER', 'WICKER', 'WAS', 'NOT', 'TO', 'BE', 'SEEN'] +4852-28319-0004-2074: ref=['WHAT', 'SHALL', 'I', 'DO', 'FIRST'] +4852-28319-0004-2074: hyp=['WHAT', 'SHOULD', 'ALL', 'I', 'DO', 'FIRST'] +4852-28319-0005-2075: ref=['HOW', 'YOU', 'HAVE', 'IMPROVED', 'MY', 'BOY', 'HE', 'EXCLAIMED', 'IT', 'IS', 'NOW', 'TIME', 'FOR', 'YOU', 'TO', 'TRY', 'AND', 'THIS', 'IS', 'AS', 'GOOD', 'A', 'CHANGE', 'AS', 'ANY'] +4852-28319-0005-2075: hyp=['HOW', 'OFTEN', 'PROVED', 'MY', 'BOY', 'IT', 'EXCLAIMED', 'IS', 'NOW', 'TIME', 'FOR', 'YOU', 'TO', 'TRY', 'MISSUS', 'IS', 'GOOD', 'A', 'CHANGE', 'IS', 'ANY'] +4852-28319-0006-2076: ref=['SUPPOSE', 'I', 'CHANGE', 'AND', "CAN'T", 'CHANGE', 'BACK'] +4852-28319-0006-2076: hyp=['SUPPOSE', 'A', 'CHANGE', 'AND', "CAN'T", 'CHANCE', 'BACK'] +4852-28319-0007-2077: ref=['MISTER', 'WICKER', 'WAITED', 'PATIENTLY', 'BESIDE', 'HIM', 'FOR', 'A', 'FEW', 'MOMENTS', 'FOR', 'CHRIS', 'TO', 'GET', 'UP', 'HIS', 'COURAGE'] +4852-28319-0007-2077: hyp=['MISTER', 'WICKER', 'WAITED', 'PATIENTLY', 'BESIDE', 'HIM', 'FOR', 'A', 'FEW', 'MOMENTS', 'FOR', 'CHRIS', 'TO', 'GET', 'UP', 'HIS', 'COURAGE'] +4852-28319-0008-2078: ref=['THEN', 'AS', 'NOTHING', 'HAPPENED', 'WITH', 'A', 'VOICE', 'LIKE', 'A', 'WHIP', 'MISTER', 'WICKER', 'SAID', 'START', 'AT', 'ONCE'] +4852-28319-0008-2078: hyp=['THAT', 'IS', 'NOTHING', 'HAPPENED', 'WITH', 'A', 'VOICE', 'LIKE', 'A', 'WHIP', 'MISTER', 'WICKER', 'SAID', 'START', 'AT', 'ONCE'] +4852-28319-0009-2079: ref=['THE', 'SENSATION', 'SPREAD', 'FASTER', 'AND', 'FASTER'] +4852-28319-0009-2079: hyp=['THE', 'SENSATION', 'SPREAD', 'FASTER', 'AND', 'FASTER'] +4852-28319-0010-2080: ref=['HIS', 'HEAD', 'SWAM', 'AND', 'HE', 'FELT', 'FAINT', 'AND', 'A', 'LITTLE', 'SICK', 'BUT', 'HE', 'PERSISTED', 'THROUGH', 'THE', 'FINAL', 'WORDS'] +4852-28319-0010-2080: hyp=['HIS', 'HEAD', 'SWAM', 'AND', 'HE', 'FELT', 'FAINT', 'IN', 'A', 'LITTLE', 'SICK', 'BUT', 'HE', 'PERSISTED', 'THROUGH', 'THE', 'FINAL', 'WORDS'] +4852-28319-0011-2081: ref=['HE', 'THOUGHT', 'NOT', 'WITHOUT', 'A', 'FEELING', 'OF', 'PRIDE', 'AND', 'COMMENCED', 'EXPERIMENTING', 'WITH', 'HIS', 'TAIL', 'AND', 'FINS', 'WITH', 'SUCH', 'ENTHUSIASM', 'AND', 'DELIGHT', 'THAT', 'SOME', 'LITTLE', 'TIME', 'ELAPSED', 'BEFORE', 'MISTER', "WICKER'S", 'VOICE', 'BOOMED', 'CLOSE', 'BY'] +4852-28319-0011-2081: hyp=['HE', 'THOUGHT', 'NOT', 'WITHOUT', 'A', 'FEELING', 'OF', 'PRIDE', 'AND', 'COMMENCED', 'THE', 'EXPERIMENTING', 'WITH', 'HIS', 'TAIL', 'AND', 'FINS', 'WITH', 'SUCH', 'ENTHUSIASM', 'AND', 'DELIGHT', 'THAT', 'SOME', 'LITTLE', 'TIME', 'ELAPSED', 'BEFORE', 'MISTER', "WICKER'S", 'VOICE', 'BOOMED', 'CLOSE', 'BY'] +4852-28319-0012-2082: ref=['SEVENTY', 'FOUR', 'BOOK', 'ONE', 'THE', 'RETURN'] +4852-28319-0012-2082: hyp=['SEVENTY', 'FOUR', 'BOOK', 'ONE', 'THE', 'RETURN'] +4852-28319-0013-2083: ref=['THE', "FIGURE'S", 'SHOES', 'CARVED', 'IN', 'SOME', 'EASTERN', 'STYLE', 'HAD', 'CURVED', 'UP', 'POINTING', 'TOES'] +4852-28319-0013-2083: hyp=['THE', 'FIGURES', 'SHOES', 'CARVED', 'IN', 'SOME', 'EASTERN', 'STYLE', 'HAD', 'CURVED', 'UP', 'POINTING', 'TOES'] +4852-28319-0014-2084: ref=['THEN', 'ALL', 'AT', 'ONCE', 'THE', 'IDEA', 'CAME', 'TO', 'CHRIS'] +4852-28319-0014-2084: hyp=['THEN', 'ALL', 'AT', 'ONCE', 'THE', 'IDEA', 'CAME', 'TO', 'CHRIS'] +4852-28319-0015-2085: ref=['IF', 'HE', 'WAS', 'TO', 'BE', 'A', 'MAGICIAN', 'COULD', 'HE', 'MAKE', 'THIS', 'BOY', 'COME', 'TO', 'LIFE'] +4852-28319-0015-2085: hyp=['IF', 'HE', 'WAS', 'TO', 'BE', 'A', 'MAGICIAN', 'COULD', 'HE', 'MAKE', 'THIS', 'BOY', 'COME', 'TO', 'LIFE'] +4852-28319-0016-2086: ref=['HE', 'SQUATTED', 'ON', 'HIS', 'HAUNCHES', 'EXAMINING', 'THE', 'CARVED', 'WOODEN', 'FIGURE', 'ATTENTIVELY', 'AND', 'FELT', 'CONVINCED', 'THAT', 'ONCE', 'ALIVE', 'THE', 'BOY', 'WOULD', 'BE', 'AN', 'IDEAL', 'AND', 'HAPPY', 'COMPANION'] +4852-28319-0016-2086: hyp=['IT', 'SQUATTED', 'ON', 'HIS', 'HAUNCHES', 'EXAMINED', 'THE', 'CARVED', 'WOODEN', 'FIGURE', 'ATTENTIVELY', 'AND', 'FELT', 'CONVINCED', 'THAT', 'ONCE', 'ALIVE', 'THE', 'BOY', 'WOULD', 'BE', 'AN', 'IDEAL', 'AND', 'HAPPY', 'COMPANION'] +4852-28319-0017-2087: ref=['BUT', 'HOW', 'DID', 'ONE', 'CHANGE', 'INANIMATE', 'TO', 'ANIMATE'] +4852-28319-0017-2087: hyp=['BUT', 'HOW', 'DID', 'ONE', 'A', 'CHANGE', 'INANIMATE', 'TO', 'ANIMATE'] +4852-28319-0018-2088: ref=['CHRIS', 'GOT', 'UP', 'AND', 'STOLE', 'BACK', 'TO', 'MISTER', "WICKER'S", 'DOOR'] +4852-28319-0018-2088: hyp=['GRIS', 'GOT', 'UP', 'AND', 'STOLE', 'BACK', 'TO', 'MISTER', "WICKER'S", 'DOOR'] +4852-28319-0019-2089: ref=['HE', 'HEARD', 'THE', 'MAGICIAN', 'GOING', 'UP', 'THE', 'SPIRAL', 'STAIRCASE', 'TO', 'HIS', 'ROOM', 'ABOVE', 'AND', 'AFTER', 'CHANGING', 'HIMSELF', 'TO', 'A', 'MOUSE', 'TO', 'SLIP', 'UNDER', 'THE', 'DOOR', 'AND', 'SEE', 'THAT', 'THE', 'ROOM', 'WAS', 'REALLY', 'EMPTY', 'CHRIS', 'RESUMED', 'HIS', 'PROPER', 'SHAPE', 'AND', 'OPENED', 'THE', 'DOORS', 'OF', 'THE', 'CUPBOARD', 'AT', 'THE', 'FAR', 'END', 'OF', 'THE', 'ROOM'] +4852-28319-0019-2089: hyp=['HE', 'HEARD', 'THAT', 'MAGICIAN', 'GOING', 'UP', 'THE', 'SPIRAL', 'STAIRCASE', 'TO', 'HIS', 'ROOM', 'ABOVE', 'AND', 'AFTER', 'CHANGING', 'HIMSELF', 'TO', 'A', 'MOUSE', 'TO', 'SLIP', 'UNDER', 'THE', 'DOOR', 'AND', 'SEE', 'THAT', 'THE', 'ROOM', 'WAS', 'REALLY', 'EMPTY', 'MISTER', "JUNE'S", 'PROPER', 'SHAPE', 'AND', 'OPENED', 'THE', 'DOORS', 'OF', 'THE', 'CUPBOARD', 'AT', 'THE', 'FAR', 'END', 'OF', 'THE', 'ROOM'] +4852-28319-0020-2090: ref=['THE', 'AFTERNOON', 'RAINY', 'BEFORE', 'INCREASED', 'IN', 'STORM'] +4852-28319-0020-2090: hyp=['THE', 'AFTERNOON', 'RAINING', 'BEFORE', 'INCREASED', 'IN', 'STORM'] +4852-28319-0021-2091: ref=['DUSK', 'CAME', 'TWO', 'HOURS', 'BEFORE', 'ITS', 'TIME', 'THUNDER', 'SNARLED', 'IN', 'THE', 'SKY'] +4852-28319-0021-2091: hyp=['THUS', 'GAINED', 'TWO', 'HOURS', 'BEFORE', 'ITS', 'TIME', 'THUNDER', 'SNARLS', 'IN', 'THE', 'SKY'] +4852-28319-0022-2092: ref=['CERTAIN', 'ELEMENTS', 'WERE', 'TO', 'BE', 'MIXED', 'AND', 'POURED', 'AT', 'THE', 'PROPER', 'TIME'] +4852-28319-0022-2092: hyp=['CERTAIN', 'ELEMENTS', 'WERE', 'TO', 'BE', 'MIXED', 'AND', 'POURED', 'AT', 'THE', 'PROPER', 'TIME'] +4852-28319-0023-2093: ref=['MISTER', 'WICKER', 'BEGAN', 'MOVING', 'ABOUT', 'UPSTAIRS', 'THE', 'FLOORBOARDS', 'CREAKED', 'AND', 'STILL', 'CHRIS', 'COULD', 'NOT', 'LEAVE', 'UNTIL', 'THE', 'POTION', 'FUMED', 'AND', 'GLOWED'] +4852-28319-0023-2093: hyp=['MISTER', 'WICKER', 'BEGAN', 'MOVING', 'ABOUT', 'UPSTAIRS', 'THE', 'FOREBOARDS', 'CREAKED', 'AND', 'STILL', 'CHRIS', 'COULD', 'NOT', 'LEAVE', 'UNTIL', 'THE', 'FORTUNE', 'FUMED', 'AND', 'GLOWED'] +4852-28319-0024-2094: ref=['WITH', 'INFINITE', 'CAUTION', 'CHRIS', 'CLOSED', 'THE', 'DOOR', 'SILENTLY', 'BEHIND', 'HIM', 'AND', 'RUNNING', 'LIGHTLY', 'FORWARD', 'REACHED', 'THE', 'FIGURE', 'OF', 'THE', 'NEGRO', 'BOY'] +4852-28319-0024-2094: hyp=['WITH', 'INFINITE', 'CAUTION', 'CHRIS', 'CLOSED', 'THE', 'DOOR', 'SILENTLY', 'BEHIND', 'HIM', 'AND', 'RUNNING', 'LIGHTLY', 'FORWARD', 'REACHED', 'THE', 'FIGURE', 'AT', 'THE', 'NEGRO', 'BOY'] +4852-28319-0025-2095: ref=['IT', 'WAS', 'AS', 'IF', 'THE', 'STIFFNESS', 'MELTED'] +4852-28319-0025-2095: hyp=['IT', 'WAS', 'AS', 'IF', 'THE', 'STIFFNESS', 'MELTED'] +4852-28319-0026-2096: ref=['UNDER', 'HIS', 'EYES', 'THE', 'WOODEN', 'FOLDS', 'OF', 'CLOTH', 'BECAME', 'RICH', 'SILK', 'EMBROIDERY', 'GLEAMED', 'IN', 'ITS', 'REALITY', 'UPON', 'THE', 'COAT', 'AND', 'OH', 'THE', 'FACE'] +4852-28319-0026-2096: hyp=['UNDER', 'HIS', 'EYES', 'WELLS', 'OF', 'CLOTH', 'BECAME', 'RICH', 'SILK', 'EMBROIDERY', 'GLEAMED', 'IN', 'ITS', 'REALITY', 'UPON', 'THE', 'COAT', 'AND', 'OH', 'THE', 'FACE'] +4852-28319-0027-2097: ref=['THE', 'WOODEN', 'GRIN', 'LOOSENED', 'THE', 'LARGE', 'EYES', 'TURNED', 'THE', 'HAND', 'HOLDING', 'THE', 'HARD', 'BOUQUET', 'OF', 'CARVED', 'FLOWERS', 'MOVED', 'AND', 'LET', 'THE', 'BOUQUET', 'FALL'] +4852-28319-0027-2097: hyp=['THE', 'WOODEN', 'GRIN', 'LOOSENED', 'THE', 'LARGE', 'EYES', 'TURNED', 'THE', 'HAND', 'HOLDING', 'THE', 'HARD', 'BOUQUET', 'OF', 'CARVED', 'FLOWERS', 'MOVED', 'THE', 'BOUQUET', 'FALL'] +4852-28330-0000-2044: ref=['THEY', 'WENT', 'DOWN', 'TO', 'THEIR', 'QUARTERS', 'FIRST'] +4852-28330-0000-2044: hyp=['THEY', 'WENT', 'DOWN', 'TO', 'THEIR', 'QUARTERS', 'FIRST'] +4852-28330-0001-2045: ref=['GUESS', 'MISTER', 'FINNEY', 'WENT', 'TO', 'HIS', 'QUARTERS', 'I', "DON'T", 'REMEMBER', 'SEEING', 'HIM', 'CROSS', 'THE', 'DECK', 'OR', 'COME', 'OVER', 'THAT', 'WAY', 'AT', 'ALL'] +4852-28330-0001-2045: hyp=['GUESS', 'MISTER', 'FINNEY', 'WENT', 'TO', 'HIS', 'QUARTERS', 'I', "DON'T", 'REMEMBER', 'SEEING', 'HIM', 'CROSS', 'THE', 'DECK', 'OR', 'COME', 'OVER', 'THAT', 'WAY', 'AT', 'ALL'] +4852-28330-0002-2046: ref=['NEXT', 'NED', 'CILLEY', 'WAS', 'RELIEVED', 'AT', 'THE', 'HELM', 'BY', 'ELBERT', 'JONES', 'WHO', 'TOOK', 'OVER', 'NED', 'WENT', 'ON', 'DOWN'] +4852-28330-0002-2046: hyp=['NEXT', 'NED', 'CILLEY', 'WAS', 'RELIEVED', 'TO', 'THE', 'HELM', 'BY', 'ELBER', 'JONES', 'WHO', 'TOOK', 'OVER', 'NED', 'WENT', 'ON', 'DOWN'] +4852-28330-0003-2047: ref=['IT', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'IT', 'COULD', 'HAVE', 'BEEN', 'ONE', 'OF', 'SEVERAL', 'PEOPLE', 'AND', "I'LL", 'BE', 'SWITCHED', 'IF', 'I', 'KNOW', 'WHO', "I'LL", 'KEEP', 'MY', 'EYES', 'OPEN'] +4852-28330-0003-2047: hyp=['IT', 'LOOKS', 'TO', 'ME', 'AS', 'IF', 'IT', 'COULD', 'BIT', 'OF', 'IN', 'ONE', 'OF', 'SEVERAL', 'PEOPLE', 'AND', "I'LL", 'BE', 'SWITCHED', 'IF', 'I', 'KNOW', 'WHO', "I'LL", 'KEEP', 'MY', 'EYES', 'UP', 'AND'] +4852-28330-0004-2048: ref=['THE', 'MIRABELLE', 'WAS', 'NEARING', 'TAHITI'] +4852-28330-0004-2048: hyp=['THE', 'MIRABELLE', 'WAS', 'NEAR', 'INDEEDY'] +4852-28330-0005-2049: ref=["WE'VE", 'WATER', 'AND', 'FRESH', 'STORES', 'TO', 'TAKE', 'ON', 'THERE'] +4852-28330-0005-2049: hyp=['REVOLTA', 'AND', 'FRESH', 'STORES', 'TO', 'TAKE', 'ON', 'THERE'] +4852-28330-0006-2050: ref=['CHRIS', 'LOST', 'NO', 'TIME', 'AS', 'SOON', 'AS', 'HE', 'COULD', 'DO', 'IT', 'WITHOUT', 'BEING', 'NOTICED', 'IN', 'HURRYING', 'DOWN', 'TO', 'HIS', 'CABIN'] +4852-28330-0006-2050: hyp=['CHRIS', 'LOST', 'NO', 'TIME', 'AS', 'SOON', 'AS', 'HE', 'COULD', 'DO', 'IT', 'WITHOUT', 'BEING', 'NOTICED', 'AND', 'HURRYING', 'DOWN', 'TO', 'HIS', 'CABIN'] +4852-28330-0007-2051: ref=['CERTAINLY', 'MY', 'BOY', 'BOOMED', 'OUT', 'THE', 'CAPTAIN', 'HIS', 'BLUE', 'EYES', 'ABRUPTLY', 'KEEN', 'AND', 'PENETRATING'] +4852-28330-0007-2051: hyp=['CERTAINLY', 'MY', 'BOY', 'BOOMED', 'OUT', 'THE', 'CAPTAIN', 'AS', 'BLUE', 'EYES', 'ABRUPTLY', 'KEEN', 'AND', 'PENETRATING'] +4852-28330-0008-2052: ref=['MISTER', 'FINNEY', 'WILL', 'BE', 'SOME', 'TIME', 'ON', 'DECK', 'WE', 'CANNOT', 'BE', 'OVERHEARD', 'IN', 'HERE'] +4852-28330-0008-2052: hyp=['MISTER', 'FINNEY', 'WOULD', 'BE', 'SOME', 'TIME', 'ON', 'DECK', 'WE', 'CANNOT', 'BE', 'OWNED', 'HEARD', 'AND', 'HERE'] +4852-28330-0009-2053: ref=['HIS', 'FACE', 'FROZE', 'WITH', 'NERVOUSNESS', 'THAT', 'THIS', 'MIGHT', 'NOT', 'DO', 'AS', 'AN', 'ANSWER', 'AND', 'HE', 'STOOD', 'STIFF', 'AND', 'STILL', 'BEFORE', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0009-2053: hyp=['HIS', 'FACE', 'ROSE', 'WITH', 'NERVOUSNESS', 'THAT', 'THIS', 'MIGHT', 'DO', 'NOT', 'DO', 'AS', 'AN', 'ANSWER', 'AND', 'HE', 'STOOD', 'STIFF', 'AND', 'STILL', 'BEFORE', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0010-2054: ref=['THE', 'CAPTAIN', 'SAT', 'FORWARD', 'IN', 'HIS', 'CHAIR', 'LOOKING', 'AT', 'HIM', 'FOR', 'A', 'LONG', 'MOMENT', 'CONSIDERING'] +4852-28330-0010-2054: hyp=['THE', 'CAPTAIN', 'SAT', 'FORWARD', 'IN', 'HIS', 'CHAIR', 'LOOKING', 'AT', 'HIM', 'FOR', 'A', 'LONG', 'MOMENT', 'CONSIDERING'] +4852-28330-0011-2055: ref=['THEN', 'HE', 'SAID', 'WELL', 'I', 'DO', 'NOT', 'CARE', 'FOR', 'IT', 'I', 'CANNOT', 'SAY', 'I', 'DO'] +4852-28330-0011-2055: hyp=['THEN', 'HE', 'SAID', 'WELL', 'I', 'DO', 'NOT', 'CARE', 'FOR', 'IT', 'I', 'CANNOT', 'SAY', 'THAT', 'DO'] +4852-28330-0012-2056: ref=['THIS', 'SHIP', 'IS', 'MORE', 'TO', 'ME', 'THAN', 'WIFE', 'OR', 'MOTHER', 'OR', 'FAMILY'] +4852-28330-0012-2056: hyp=['THE', 'SHIP', 'IS', 'MORE', 'TO', 'ME', 'THAN', 'MY', 'FULL', 'MOTHER', 'OR', 'FAMILY'] +4852-28330-0013-2057: ref=['HE', 'PAUSED', 'FINGERING', 'HIS', 'LOWER', 'LIP', 'AND', 'LOOKING', 'SIDEWAYS', 'IN', 'A', 'REFLECTIVE', 'FASHION', 'AT', 'CHRIS', 'STANDING', 'BEFORE', 'HIM'] +4852-28330-0013-2057: hyp=['AND', 'PAUSED', 'FINGERING', 'HIS', 'LOWER', 'LIP', 'AND', 'LOOKING', 'SIDEWAYS', 'IN', 'A', 'REFLECTIVE', 'FASHION', 'AT', 'CRIS', 'STANDING', 'BEFORE', 'HIM'] +4852-28330-0014-2058: ref=['WE', 'SHALL', 'SAY', 'NO', 'MORE', 'BUT', 'I', 'TRUST', 'YOU', 'UNDERSTAND', 'THE', 'RESPONSIBILITY', 'YOU', 'HAVE'] +4852-28330-0014-2058: hyp=['WE', 'SHALL', 'SAY', 'NO', 'MORE', 'BUT', 'I', 'TRUST', 'YOU', 'UNDERSTAND', 'THE', 'RESPONSIBILITY', 'YOU', 'HAVE'] +4852-28330-0015-2059: ref=['THIS', 'SHIP', 'ITS', 'CARGO', 'AND', 'ITS', 'MEN', 'WILL', 'BE', 'IN', 'YOUR', 'HANDS'] +4852-28330-0015-2059: hyp=['THE', 'SHIP', 'ITS', 'CARGO', 'IN', 'ITS', 'MEN', 'WILL', 'BE', 'IN', 'YOUR', 'HANDS'] +4852-28330-0016-2060: ref=['YES', 'SIR', 'I', 'THINK', 'I', 'CAN', 'DO', 'IT', 'SAFELY', 'OR', 'I', 'SHOULD', 'NOT', 'TRY', 'SIR'] +4852-28330-0016-2060: hyp=['YES', 'SIR', 'I', 'THINK', 'I', 'CAN', 'DO', 'IT', 'SAFELY', 'OR', 'I', 'SHOULD', 'NOT', 'TRY', 'SIR'] +4852-28330-0017-2061: ref=['CAPTAIN', "BLIZZARD'S", 'ROUND', 'PINK', 'FACE', 'CREASED', 'IN', 'HIS', 'WINNING', 'SMILE'] +4852-28330-0017-2061: hyp=['CAPTAIN', "BLIZZARD'S", 'ROUND', 'PINK', 'FACED', 'CREASED', 'IN', 'ITS', 'WINNING', 'SMILE'] +4852-28330-0018-2062: ref=['HE', 'THEN', 'WENT', 'ON', 'TO', 'DESCRIBE', 'WHAT', 'ELSE', 'WAS', 'TO', 'FOLLOW', 'THE', 'COVERING', 'OF', 'THE', 'SHIP', 'WITH', 'LEAVES', 'TO', 'MAKE', 'IT', 'BLEND', 'WITH', 'ITS', 'SURROUNDINGS'] +4852-28330-0018-2062: hyp=['HE', 'THEN', 'WENT', 'ON', 'TO', 'DESCRIBE', 'WHAT', 'ELSE', 'WAS', 'TO', 'FOLLOW', 'THE', 'COVERING', 'OF', 'THE', 'SHIP', 'WITH', 'LEAVES', 'TO', 'MAKE', 'IT', 'BLEND', 'WITH', 'ITS', 'SURROUNDINGS'] +4852-28330-0019-2063: ref=['CAMOUFLAGE', 'WAS', 'NOT', 'A', 'WORD', 'THE', 'CAPTAIN', 'OR', 'ANYONE', 'ELSE', 'OF', 'HIS', 'TIME', 'YET', 'UNDERSTOOD'] +4852-28330-0019-2063: hyp=['THE', 'CAMOUFLAGE', 'WAS', 'NOT', 'A', 'WORD', 'THE', 'CAPTAIN', 'OR', 'ANY', 'ONE', 'ELSE', 'OF', 'HIS', 'TIME', 'HE', 'HAD', 'UNDERSTOOD'] +4852-28330-0020-2064: ref=['WHAT', 'CAN', 'BE', 'SAID', 'DURING', 'THAT', 'TIME', 'SIR', 'CHRIS', 'THOUGHT', 'TO', 'ASK'] +4852-28330-0020-2064: hyp=['WHAT', 'CAN', 'BE', 'SAID', 'DURING', 'THAT', 'TIME', 'SIR', 'CHRIS', 'THOUGHT', 'TO', 'ASK'] +4852-28330-0021-2065: ref=['I', 'AM', 'SOMEWHAT', 'SKILLED', 'IN', 'MEDICAMENTS', 'I', 'HAVE', 'TO', 'BE', 'AS', 'CAPTAIN', 'OF', 'A', 'SHIP', 'AND', 'THE', 'CREW', 'KNOW', 'IT'] +4852-28330-0021-2065: hyp=['I', 'AM', 'SOMEWHAT', 'SKILLED', 'IN', 'MEDICMENTS', 'I', 'HAVE', 'TO', 'BE', 'AS', 'A', 'CAPTAIN', 'OF', 'SHIP', 'AND', 'THE', 'CREW', 'KNOW', 'IT'] +4852-28330-0022-2066: ref=['I', 'SHALL', 'SAY', 'THAT', 'YOU', 'ARE', 'IN', 'MY', 'OWN', 'CABIN', 'SO', 'THAT', 'I', 'CAN', 'CARE', 'FOR', 'YOU'] +4852-28330-0022-2066: hyp=['I', 'SHALL', 'SAY', 'THAT', 'YOU', 'ARE', 'IN', 'MY', 'OWN', 'CABIN', 'SO', 'THAT', 'I', 'CAN', 'CARE', 'FOR', 'YOU'] +4852-28330-0023-2067: ref=['NOT', 'SINCE', 'HE', 'HAD', 'LEFT', 'MISTER', 'WICKER', 'HAD', 'CHRIS', 'FELT', 'SUCH', 'CONFIDENCE', 'AS', 'HE', 'DID', 'IN', 'THE', 'WORDS', 'AND', 'ACTIONS', 'OF', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0023-2067: hyp=['NOT', 'SINCE', 'HE', 'HAD', 'LEFT', 'MISTER', 'WICKER', 'AND', 'CHRIS', 'FELT', 'SUCH', 'CONFIDENCE', 'AS', 'HE', 'DID', 'IN', 'THE', 'WORDS', 'AND', 'ACTIONS', 'OF', 'CAPTAIN', 'BLIZZARD'] +4852-28330-0024-2068: ref=['HE', 'KNEW', 'NOW', 'THAT', 'HIS', 'ABSENCE', 'FOR', 'AS', 'LONG', 'AS', 'HE', 'HAD', 'TO', 'BE', 'AWAY', 'WOULD', 'BE', 'COVERED', 'UP', 'AND', 'SATISFACTORILY', 'ACCOUNTED', 'FOR'] +4852-28330-0024-2068: hyp=['HE', 'KNEW', 'NOW', 'THAT', 'HIS', 'ABSENCE', 'FOR', 'AS', 'LONG', 'AS', 'HE', 'HAD', 'HAD', 'TO', 'BE', 'AWAY', 'WOULD', 'BE', 'COVERED', 'UP', 'IN', 'SATISFACTORILY', 'ACCOUNTED', 'FOR'] +4852-28330-0025-2069: ref=['THEIR', 'CONVERSATION', 'HAD', 'TAKEN', 'SOME', 'LITTLE', 'WHILE'] +4852-28330-0025-2069: hyp=['THEIR', 'CONVERSATION', 'HAD', 'TAKEN', 'SOME', 'LITTLE', 'WHILE'] +533-1066-0000-796: ref=['WHEN', 'CHURCHYARDS', 'YAWN'] +533-1066-0000-796: hyp=['ONE', 'CHURCHYARDS', 'YAWN'] +533-1066-0001-797: ref=['I', 'KNEW', 'WELL', 'ENOUGH', 'THAT', 'HE', 'MIGHT', 'BE', 'CARRIED', 'THOUSANDS', 'OF', 'MILES', 'IN', 'THE', 'BOX', 'CAR', 'LOCKED', 'IN', 'PERHAPS', 'WITHOUT', 'WATER', 'OR', 'FOOD'] +533-1066-0001-797: hyp=['I', 'KNEW', 'WELL', 'ENOUGH', 'THAT', 'HE', 'MIGHT', 'BE', 'CARRIED', 'THOUSAND', 'OF', 'MILES', 'INTO', 'BOX', 'CAR', 'LOCKED', 'IN', 'PERHAPS', 'WITHOUT', 'WATER', 'OR', 'FOOT'] +533-1066-0002-798: ref=['I', 'AM', 'SURE', 'I', 'KISSED', 'LIDDY', 'AND', 'I', 'HAVE', 'HAD', 'TERRIBLE', 'MOMENTS', 'SINCE', 'WHEN', 'I', 'SEEM', 'TO', 'REMEMBER', 'KISSING', 'MISTER', 'JAMIESON', 'TOO', 'IN', 'THE', 'EXCITEMENT'] +533-1066-0002-798: hyp=['I', 'AM', 'SURE', 'I', 'KISSED', 'LADY', 'AND', "I'VE", 'HAD', 'TERRIBLE', 'MOMENTS', 'SINCE', 'WHEN', 'I', 'SEEMED', 'TO', 'REMEMBER', 'KISSING', 'MISTER', 'JAMIESON', 'TOO', 'IN', 'THE', 'EXCITEMENT'] +533-1066-0003-799: ref=['FORTUNATELY', 'WARNER', 'AND', 'THE', 'DETECTIVES', 'WERE', 'KEEPING', 'BACHELOR', 'HALL', 'IN', 'THE', 'LODGE'] +533-1066-0003-799: hyp=['FORTUNATELY', 'WARNER', 'ON', 'THE', 'DETECTIVE', 'WERE', 'KEEPING', 'BACHELOR', 'HOLLAND', 'LODGE'] +533-1066-0004-800: ref=['OUT', 'OF', 'DEFERENCE', 'TO', 'LIDDY', 'THEY', 'WASHED', 'THEIR', 'DISHES', 'ONCE', 'A', 'DAY', 'AND', 'THEY', 'CONCOCTED', 'QUEER', 'MESSES', 'ACCORDING', 'TO', 'THEIR', 'SEVERAL', 'ABILITIES'] +533-1066-0004-800: hyp=['OUT', 'OF', 'DEFERENCE', 'TO', 'LADY', 'THEY', 'WASHED', 'HER', 'DISHES', 'ONCE', 'A', 'DAY', 'AND', 'THEY', 'CONCLUDED', 'QUEER', 'MASSES', 'ACCORDING', 'TO', 'THEIR', 'SEVERAL', 'ABILITIES'] +533-1066-0005-801: ref=['MISS', 'INNES', 'HE', 'SAID', 'STOPPING', 'ME', 'AS', 'I', 'WAS', 'ABOUT', 'TO', 'GO', 'TO', 'MY', 'ROOM', 'UP', 'STAIRS', 'HOW', 'ARE', 'YOUR', 'NERVES', 'TONIGHT'] +533-1066-0005-801: hyp=['MISS', 'EAMES', 'HE', 'SAID', 'STOPPING', 'ME', 'AS', 'I', 'WAS', 'ABOUT', 'TO', 'GO', 'TO', 'MY', 'ROOM', 'UP', 'STAIRS', 'HOW', 'ARE', 'YOUR', 'NERVES', 'TO', 'NIGHT'] +533-1066-0006-802: ref=['I', 'HAVE', 'NONE', 'I', 'SAID', 'HAPPILY'] +533-1066-0006-802: hyp=['I', 'HAVE', 'NONE', 'I', 'SAID', 'HAPPILY'] +533-1066-0007-803: ref=['I', 'MEAN', 'HE', 'PERSISTED', 'DO', 'YOU', 'FEEL', 'AS', 'THOUGH', 'YOU', 'COULD', 'GO', 'THROUGH', 'WITH', 'SOMETHING', 'RATHER', 'UNUSUAL'] +533-1066-0007-803: hyp=['I', 'MEAN', 'HE', 'PERSISTED', 'DO', 'YOU', 'FEEL', 'AS', 'THOUGH', 'YOU', 'COULD', 'GO', 'THROUGH', 'WITH', 'SOMETHING', 'RATHER', 'UNUSUAL'] +533-1066-0008-804: ref=['THE', 'MOST', 'UNUSUAL', 'THING', 'I', 'CAN', 'THINK', 'OF', 'WOULD', 'BE', 'A', 'PEACEFUL', 'NIGHT'] +533-1066-0008-804: hyp=['THE', 'MOST', 'UNUSUAL', 'THING', 'I', 'CAN', 'THINK', 'OF', 'WOULD', 'BE', 'A', 'PEACEFUL', 'NIGHT'] +533-1066-0009-805: ref=['SOMETHING', 'IS', 'GOING', 'TO', 'OCCUR', 'HE', 'SAID'] +533-1066-0009-805: hyp=['SOMETHING', 'IS', 'GOING', 'TO', 'OCCUR', 'HE', 'SAID'] +533-1066-0010-806: ref=['PUT', 'ON', 'HEAVY', 'SHOES', 'AND', 'SOME', 'OLD', 'DARK', 'CLOTHES', 'AND', 'MAKE', 'UP', 'YOUR', 'MIND', 'NOT', 'TO', 'BE', 'SURPRISED', 'AT', 'ANYTHING'] +533-1066-0010-806: hyp=['PUT', 'ON', 'HEAVY', 'SHOES', 'AND', 'SOME', 'ALL', 'DARK', 'CLOTHES', 'AND', 'MAKE', 'UP', 'YOUR', 'MIND', 'NOT', 'TO', 'BE', 'SURPRISED', 'AT', 'ANYTHING'] +533-1066-0011-807: ref=['LIDDY', 'WAS', 'SLEEPING', 'THE', 'SLEEP', 'OF', 'THE', 'JUST', 'WHEN', 'I', 'WENT', 'UP', 'STAIRS', 'AND', 'I', 'HUNTED', 'OUT', 'MY', 'THINGS', 'CAUTIOUSLY'] +533-1066-0011-807: hyp=['LEAVY', 'WAS', 'SLEEPING', 'SLEEP', 'OF', 'THE', 'JUST', 'WHEN', 'I', 'WENT', 'UPSTAIRS', 'AND', 'I', 'HUNTED', 'OUT', 'MY', 'THINGS', 'CAUTIOUSLY'] +533-1066-0012-808: ref=['THEY', 'WERE', 'TALKING', 'CONFIDENTIALLY', 'TOGETHER', 'BUT', 'WHEN', 'I', 'CAME', 'DOWN', 'THEY', 'CEASED'] +533-1066-0012-808: hyp=['YOU', 'WERE', 'TALKING', 'TO', 'FILIANTLY', 'TOGETHER', 'BUT', 'WHEN', 'I', 'CAME', 'DOWN', 'THEY', 'CEASED'] +533-1066-0013-809: ref=['THERE', 'WERE', 'A', 'FEW', 'PREPARATIONS', 'TO', 'BE', 'MADE', 'THE', 'LOCKS', 'TO', 'BE', 'GONE', 'OVER', 'WINTERS', 'TO', 'BE', 'INSTRUCTED', 'AS', 'TO', 'RENEWED', 'VIGILANCE', 'AND', 'THEN', 'AFTER', 'EXTINGUISHING', 'THE', 'HALL', 'LIGHT', 'WE', 'CREPT', 'IN', 'THE', 'DARKNESS', 'THROUGH', 'THE', 'FRONT', 'DOOR', 'AND', 'INTO', 'THE', 'NIGHT'] +533-1066-0013-809: hyp=['THEY', 'WERE', 'A', 'FEW', 'PREPARATIONS', 'TO', 'BE', 'MADE', 'LOCKS', 'TO', 'BE', 'GONE', 'OVER', 'WINTERS', 'TO', 'BE', 'INSTRUCTIVE', 'AS', 'TO', 'RENEWED', 'VIGILANCE', 'AND', 'THEN', 'AFTER', 'EXTINGUISHING', 'THE', 'WHOLE', 'LIGHT', 'WE', 'CREPT', 'IN', 'THE', 'DARKNESS', 'THROUGH', 'THE', 'FRONT', 'DOOR', 'AND', 'INTO', 'THE', 'NIGHT'] +533-1066-0014-810: ref=['I', 'ASKED', 'NO', 'QUESTIONS'] +533-1066-0014-810: hyp=['I', 'ASKED', 'NO', 'QUESTIONS'] +533-1066-0015-811: ref=['ONCE', 'ONLY', 'SOMEBODY', 'SPOKE', 'AND', 'THEN', 'IT', 'WAS', 'AN', 'EMPHATIC', 'BIT', 'OF', 'PROFANITY', 'FROM', 'DOCTOR', 'STEWART', 'WHEN', 'HE', 'RAN', 'INTO', 'A', 'WIRE', 'FENCE'] +533-1066-0015-811: hyp=['WAS', 'ONLY', 'SOMEBODY', 'SPOKE', 'AND', 'THEN', 'IT', 'WAS', 'AN', 'EMPHATIC', 'FIT', 'OF', 'PROFANITY', 'FROM', 'DOCTOR', 'STEWART', 'WHEN', 'HE', 'RAN', 'INTO', 'A', 'WIRE', 'FENCE'] +533-1066-0016-812: ref=['I', 'HARDLY', 'KNOW', 'WHAT', 'I', 'EXPECTED'] +533-1066-0016-812: hyp=['I', 'ARE', 'TO', 'KNOW', 'WHAT', 'I', 'EXPECTED'] +533-1066-0017-813: ref=['THE', 'DOCTOR', 'WAS', 'PUFFING', 'SOMEWHAT', 'WHEN', 'WE', 'FINALLY', 'CAME', 'TO', 'A', 'HALT'] +533-1066-0017-813: hyp=['THE', 'DOCTOR', 'WAS', 'PUFFING', 'SOMEWHAT', 'WHEN', 'WE', 'FINALLY', 'CAME', 'TO', 'A', 'HALT'] +533-1066-0018-814: ref=['I', 'CONFESS', 'THAT', 'JUST', 'AT', 'THAT', 'MINUTE', 'EVEN', 'SUNNYSIDE', 'SEEMED', 'A', 'CHEERFUL', 'SPOT'] +533-1066-0018-814: hyp=['I', 'CONFESS', 'THAT', 'JUST', 'AT', 'THAT', 'MINUTE', 'EVEN', 'SUNNYSIDE', 'SEEMED', 'A', 'CHEERFUL', 'SPOT'] +533-1066-0019-815: ref=['IN', 'SPITE', 'OF', 'MYSELF', 'I', 'DREW', 'MY', 'BREATH', 'IN', 'SHARPLY'] +533-1066-0019-815: hyp=['IN', 'SPITE', 'OF', 'MYSELF', 'I', 'DREW', 'MY', 'BREATH', 'IN', 'SHARPLY'] +533-1066-0020-816: ref=['IT', 'WAS', 'ALEX', 'ARMED', 'WITH', 'TWO', 'LONG', 'HANDLED', 'SPADES'] +533-1066-0020-816: hyp=['IT', 'WAS', 'ALEX', "I'M", 'THE', 'TWO', 'LONG', 'HANDLED', 'SPADES'] +533-1066-0021-817: ref=['THE', 'DOCTOR', 'KEPT', 'A', 'KEEN', 'LOOKOUT', 'BUT', 'NO', 'ONE', 'APPEARED'] +533-1066-0021-817: hyp=['THE', 'DOCTOR', 'KEPT', 'A', 'KIN', 'LOOK', 'OUT', 'BUT', 'NO', 'ONE', 'APPEARED'] +533-1066-0022-818: ref=["THERE'S", 'ONE', 'THING', 'SURE', "I'LL", 'NOT', 'BE', 'SUSPECTED', 'OF', 'COMPLICITY'] +533-1066-0022-818: hyp=["THERE'S", 'ONE', 'THING', 'SURE', "I'LL", 'NOT', 'BE', 'SUSPECTED', 'OF', 'COMPLICITY'] +533-1066-0023-819: ref=['A', 'DOCTOR', 'IS', 'GENERALLY', 'SUPPOSED', 'TO', 'BE', 'HANDIER', 'AT', 'BURYING', 'FOLKS', 'THAN', 'AT', 'DIGGING', 'THEM', 'UP'] +533-1066-0023-819: hyp=['THE', 'DOCTOR', 'IS', 'GENERALLY', 'SUPPOSED', 'TO', 'BE', 'A', 'HANDIER', 'AT', 'BERING', 'FOLKS', 'THAN', 'A', 'TIGGING', 'THEM', 'UP'] +533-1066-0024-820: ref=['I', 'HELD', 'ON', 'TO', 'HIM', 'FRANTICALLY', 'AND', 'SOMEHOW', 'I', 'GOT', 'THERE', 'AND', 'LOOKED', 'DOWN'] +533-1066-0024-820: hyp=['I', 'HELD', 'ON', 'TO', 'HIM', 'FRANTICALLY', 'AND', 'SOMEHOW', 'I', 'GOT', 'TERRANT', 'LOOKED', 'DOWN'] +533-131556-0000-821: ref=['BUT', 'HOW', 'AM', 'I', 'TO', 'GET', 'OVER', 'THE', 'TEN', 'OR', 'TWELVE', 'DAYS', 'THAT', 'MUST', 'YET', 'ELAPSE', 'BEFORE', 'THEY', 'GO'] +533-131556-0000-821: hyp=['BUT', 'HOW', 'AM', 'I', 'TO', 'IT', 'OVER', 'THE', 'TOWN', 'OR', 'TWELVE', 'DAYS', 'THAT', 'MUST', 'YET', 'ELAPSE', 'BEFORE', 'THEY', 'GO'] +533-131556-0001-822: ref=['FOR', 'NONE', 'COULD', 'INJURE', 'ME', 'AS', 'HE', 'HAS', 'DONE', 'OH'] +533-131556-0001-822: hyp=['FOR', 'NONE', 'COULD', 'ENDURE', 'ME', 'AS', 'HE', 'HAS', 'DONE', 'OH'] +533-131556-0002-823: ref=['THE', 'WORD', 'STARES', 'ME', 'IN', 'THE', 'FACE', 'LIKE', 'A', 'GUILTY', 'CONFESSION', 'BUT', 'IT', 'IS', 'TRUE', 'I', 'HATE', 'HIM', 'I', 'HATE', 'HIM'] +533-131556-0002-823: hyp=['THE', 'WORDS', 'TEARS', 'ME', 'IN', 'THE', 'FACE', 'LIKE', 'A', 'GUILTY', 'CONFESSION', 'BUT', 'IT', 'IS', 'TRUE', 'I', 'HATE', 'HIM', 'I', 'HATE', 'HIM'] +533-131556-0003-824: ref=['I', 'SOMETIMES', 'THINK', 'I', 'OUGHT', 'TO', 'GIVE', 'HIM', 'CREDIT', 'FOR', 'THE', 'GOOD', 'FEELING', 'HE', 'SIMULATES', 'SO', 'WELL', 'AND', 'THEN', 'AGAIN', 'I', 'THINK', 'IT', 'IS', 'MY', 'DUTY', 'TO', 'SUSPECT', 'HIM', 'UNDER', 'THE', 'PECULIAR', 'CIRCUMSTANCES', 'IN', 'WHICH', 'I', 'AM', 'PLACED'] +533-131556-0003-824: hyp=['I', 'SOMETIMES', 'THINK', 'I', 'OUGHT', 'TO', 'GIVE', 'HIM', 'CREDIT', 'FOR', 'THE', 'GOOD', 'FEELING', 'HE', 'SIMILATES', 'SO', 'WELL', 'AND', 'THEN', 'AGAIN', 'I', 'THINK', 'IT', 'IS', 'MY', 'DUTY', 'TO', 'SUSPECT', 'HIM', 'UNDER', 'THE', 'PECULIAR', 'CIRCUMSTANCES', 'IN', 'WHICH', 'I', 'AM', 'PLACED'] +533-131556-0004-825: ref=['I', 'HAVE', 'DONE', 'WELL', 'TO', 'RECORD', 'THEM', 'SO', 'MINUTELY'] +533-131556-0004-825: hyp=['I', 'HAVE', 'DONE', 'WELL', 'TO', 'RECORD', 'HIM', 'SUMINUTELY'] +533-131556-0005-826: ref=['THEY', 'HAD', 'BETAKEN', 'THEMSELVES', 'TO', 'THEIR', 'WORK', 'I', 'LESS', 'TO', 'DIVERT', 'MY', 'MIND', 'THAN', 'TO', 'DEPRECATE', 'CONVERSATION', 'HAD', 'PROVIDED', 'MYSELF', 'WITH', 'A', 'BOOK'] +533-131556-0005-826: hyp=['THE', 'YEAR', 'HAD', 'TAKEN', 'THEMSELVES', 'TO', 'THEIR', 'WORK', 'I', 'LEST', 'DIVERT', 'MY', 'MIND', 'THAN', 'TO', 'THE', 'PROCATE', 'CONVERSATION', 'HAD', 'PROVIDED', 'MYSELF', 'WITH', 'THE', 'BOOK'] +533-131556-0006-827: ref=['I', 'AM', 'TOO', 'WELL', 'ACQUAINTED', 'WITH', 'YOUR', 'CHARACTER', 'AND', 'CONDUCT', 'TO', 'FEEL', 'ANY', 'REAL', 'FRIENDSHIP', 'FOR', 'YOU', 'AND', 'AS', 'I', 'AM', 'WITHOUT', 'YOUR', 'TALENT', 'FOR', 'DISSIMULATION', 'I', 'CANNOT', 'ASSUME', 'THE', 'APPEARANCE', 'OF', 'IT'] +533-131556-0006-827: hyp=['I', 'AM', 'TOO', 'ACQUAINTED', 'WITH', 'YOUR', 'CHARACTER', 'AND', 'CONDUCT', 'TO', 'FEEL', 'ANY', 'REAL', 'FRIENDSHIP', 'FOR', 'YOU', 'AND', 'AS', 'I', 'AM', 'WITHOUT', 'YOUR', 'TALENT', 'FOR', 'DISSIMULATION', 'I', 'CANNOT', 'ASSUME', 'THE', 'APPEARANCE', 'OF', 'IT'] +533-131556-0007-828: ref=['UPON', 'PERUSING', 'THIS', 'SHE', 'TURNED', 'SCARLET', 'AND', 'BIT', 'HER', 'LIP'] +533-131556-0007-828: hyp=['UP', 'AND', 'PERUSING', 'THIS', 'SHE', 'TURNED', 'SCARLET', 'AND', 'BIT', 'HER', 'LIP'] +533-131556-0008-829: ref=['YOU', 'MAY', 'GO', 'MILICENT', 'AND', "SHE'LL", 'FOLLOW', 'IN', 'A', 'WHILE', 'MILICENT', 'WENT'] +533-131556-0008-829: hyp=['YOU', 'MAY', 'GO', 'MILICENT', 'AND', "SHE'LL", 'FOLLOWING', 'AWHILE', 'MELLICENT', 'WENT'] +533-131556-0009-830: ref=['WILL', 'YOU', 'OBLIGE', 'ME', 'HELEN', 'CONTINUED', 'SHE'] +533-131556-0009-830: hyp=['OLIO', 'OBLIGE', 'ME', 'ALAN', 'CONTINUED', 'SHE'] +533-131556-0010-831: ref=['AH', 'YOU', 'ARE', 'SUSPICIOUS'] +533-131556-0010-831: hyp=['HA', 'YOU', 'ARE', 'SUSPICIOUS'] +533-131556-0011-832: ref=['IF', 'I', 'WERE', 'SUSPICIOUS', 'I', 'REPLIED', 'I', 'SHOULD', 'HAVE', 'DISCOVERED', 'YOUR', 'INFAMY', 'LONG', 'BEFORE'] +533-131556-0011-832: hyp=['IF', 'I', 'WERE', 'SUSPICIOUS', 'I', 'REPLIED', 'I', 'SHOULD', 'HAVE', 'DISCOVERED', 'YOUR', 'INFAMY', 'LONG', 'BEFORE'] +533-131556-0012-833: ref=['I', 'ENJOY', 'A', 'MOONLIGHT', 'RAMBLE', 'AS', 'WELL', 'AS', 'YOU', 'I', 'ANSWERED', 'STEADILY', 'FIXING', 'MY', 'EYES', 'UPON', 'HER', 'AND', 'THE', 'SHRUBBERY', 'HAPPENS', 'TO', 'BE', 'ONE', 'OF', 'MY', 'FAVOURITE', 'RESORTS'] +533-131556-0012-833: hyp=['ENJOY', 'EVENLENTH', 'RAMBLE', 'AS', 'WELL', 'AS', 'YOU', 'I', 'ANSWERED', 'STEADILY', 'FIXING', 'MY', 'EYES', 'UP', 'ON', 'EARTH', 'AND', 'FREDERI', 'HAPPENS', 'TO', 'BE', 'ONE', 'OF', 'MY', 'FAVORITE', 'RESORTS'] +533-131556-0013-834: ref=['SHE', 'COLOURED', 'AGAIN', 'EXCESSIVELY', 'AND', 'REMAINED', 'SILENT', 'PRESSING', 'HER', 'FINGER', 'AGAINST', 'HER', 'TEETH', 'AND', 'GAZING', 'INTO', 'THE', 'FIRE'] +533-131556-0013-834: hyp=['SHE', 'COLOURED', 'AGAIN', 'EXCESSIVELY', 'AND', 'REMAINED', 'SILENT', 'PRESSING', 'HER', 'FINGER', 'AGAINST', 'HER', 'TEETH', 'AND', 'GAZING', 'INTO', 'THE', 'FIRE'] +533-131556-0014-835: ref=['I', 'WATCHED', 'HER', 'A', 'FEW', 'MOMENTS', 'WITH', 'A', 'FEELING', 'OF', 'MALEVOLENT', 'GRATIFICATION', 'THEN', 'MOVING', 'TOWARDS', 'THE', 'DOOR', 'I', 'CALMLY', 'ASKED', 'IF', 'SHE', 'HAD', 'ANYTHING', 'MORE', 'TO', 'SAY'] +533-131556-0014-835: hyp=['I', 'WATCH', 'FOR', 'A', 'FEW', 'MOMENTS', 'TO', 'THE', 'FEELING', 'OF', 'MALEVOLENT', 'GRATIFICATION', 'THEN', 'MOVING', 'TOWARDS', 'THE', 'DOOR', 'I', 'CALMLY', 'ASKED', 'IF', 'SHE', 'HAD', 'ANYTHING', 'MORE', 'TO', 'SAY'] +533-131556-0015-836: ref=['YES', 'YES'] +533-131556-0015-836: hyp=['YES', 'YES'] +533-131556-0016-837: ref=['SUPPOSE', 'I', 'DO'] +533-131556-0016-837: hyp=['SUPPOSE', 'I', 'DO'] +533-131556-0017-838: ref=['SHE', 'PAUSED', 'IN', 'EVIDENT', 'DISCONCERTION', 'AND', 'PERPLEXITY', 'MINGLED', 'WITH', 'ANGER', 'SHE', 'DARED', 'NOT', 'SHOW'] +533-131556-0017-838: hyp=['SHE', 'PAUSED', 'IN', 'EVIDENT', 'DISCONCERTION', 'AND', 'PERPLEXITY', 'MINGLED', 'WITH', 'ANGER', 'SHE', 'DARED', 'NOT', 'SHOW'] +533-131556-0018-839: ref=['I', 'CANNOT', 'RENOUNCE', 'WHAT', 'IS', 'DEARER', 'THAN', 'LIFE', 'SHE', 'MUTTERED', 'IN', 'A', 'LOW', 'HURRIED', 'TONE'] +533-131556-0018-839: hyp=['I', 'CANNOT', 'RENOUNCE', 'WHAT', 'IS', 'DEARER', 'THAN', 'LIFE', 'SHE', 'MUTTERED', 'IN', 'A', 'LOW', 'HURRIED', 'TONE'] +533-131556-0019-840: ref=['IF', 'YOU', 'ARE', 'GENEROUS', 'HERE', 'IS', 'A', 'FITTING', 'OPPORTUNITY', 'FOR', 'THE', 'EXERCISE', 'OF', 'YOUR', 'MAGNANIMITY', 'IF', 'YOU', 'ARE', 'PROUD', 'HERE', 'AM', 'I', 'YOUR', 'RIVAL', 'READY', 'TO', 'ACKNOWLEDGE', 'MYSELF', 'YOUR', 'DEBTOR', 'FOR', 'AN', 'ACT', 'OF', 'THE', 'MOST', 'NOBLE', 'FORBEARANCE'] +533-131556-0019-840: hyp=['IF', 'YOU', 'ARE', 'GENEROUS', 'HERE', 'IS', 'A', 'FITTING', 'OPPORTUNITY', 'FOR', 'THE', 'EXERCISE', 'OF', 'YOUR', 'MAGNANIMITY', 'IF', 'YOU', 'ARE', 'PROUD', 'HERE', 'AM', 'I', 'YOUR', 'RIVAL', 'RATHER', 'TO', 'ANNOUNCE', 'MYSELF', 'YOUR', 'DEPTOR', 'FOR', 'AN', 'ACT', 'OF', 'MOST', 'NOBLE', 'FORBEARANCE'] +533-131556-0020-841: ref=['I', 'SHALL', 'NOT', 'TELL', 'HIM'] +533-131556-0020-841: hyp=['I', 'SHALL', 'NOT', 'TELL', 'HIM'] +533-131556-0021-842: ref=['GIVE', 'ME', 'NO', 'THANKS', 'IT', 'IS', 'NOT', 'FOR', 'YOUR', 'SAKE', 'THAT', 'I', 'REFRAIN'] +533-131556-0021-842: hyp=['GIVE', 'ME', 'NO', 'THANKS', 'IT', 'IS', 'NOT', 'FOR', 'YOUR', 'SAKE', 'THAT', 'I', 'REFRAIN'] +533-131556-0022-843: ref=['AND', 'MILICENT', 'WILL', 'YOU', 'TELL', 'HER'] +533-131556-0022-843: hyp=['AND', 'MELLICENT', 'WILL', 'IT', 'TELL', 'HER'] +533-131556-0023-844: ref=['I', 'WOULD', 'NOT', 'FOR', 'MUCH', 'THAT', 'SHE', 'SHOULD', 'KNOW', 'THE', 'INFAMY', 'AND', 'DISGRACE', 'OF', 'HER', 'RELATION'] +533-131556-0023-844: hyp=['I', 'WILL', 'NOT', 'FOR', 'MUCH', 'THAT', 'YOU', 'SHOULD', 'NOT', 'INFAMY', 'AND', 'DISGRACE', 'OF', 'HER', 'RELATION'] +533-131556-0024-845: ref=['YOU', 'USE', 'HARD', 'WORDS', 'MISSUS', 'HUNTINGDON', 'BUT', 'I', 'CAN', 'PARDON', 'YOU'] +533-131556-0024-845: hyp=['YOU', 'USE', 'OUR', 'WORDS', 'MISSUS', 'HUNTINGDON', 'BUT', 'I', 'CAN', 'PARDON', 'YOU'] +533-131556-0025-846: ref=['HOW', 'DARE', 'YOU', 'MENTION', 'HIS', 'NAME', 'TO', 'ME'] +533-131556-0025-846: hyp=['HOW', 'DARE', 'YOU', 'MENTION', 'HIS', 'NAME', 'TO', 'ME'] +533-131562-0000-847: ref=['IT', 'SEEMS', 'VERY', 'INTERESTING', 'LOVE', 'SAID', 'HE', 'LIFTING', 'HIS', 'HEAD', 'AND', 'TURNING', 'TO', 'WHERE', 'I', 'STOOD', 'WRINGING', 'MY', 'HANDS', 'IN', 'SILENT', 'RAGE', 'AND', 'ANGUISH', 'BUT', "IT'S", 'RATHER', 'LONG', "I'LL", 'LOOK', 'AT', 'IT', 'SOME', 'OTHER', 'TIME', 'AND', 'MEANWHILE', "I'LL", 'TROUBLE', 'YOU', 'FOR', 'YOUR', 'KEYS', 'MY', 'DEAR', 'WHAT', 'KEYS'] +533-131562-0000-847: hyp=['IT', 'SEEMS', 'VERY', 'INTERESTING', 'LOVE', 'SAID', 'HE', 'LIFTING', 'HIS', 'HEAD', 'AND', 'TURNING', 'TO', 'HER', 'EYES', 'TOO', 'WRINGING', 'MY', 'HAND', 'IN', 'SILENT', 'RATE', 'AND', 'ANGUISH', 'BUT', "IT'S", 'RATHER', 'LONG', 'I', 'LOOK', 'AT', 'IT', 'SOME', 'OTHER', 'TIME', 'AND', 'MEANWHILE', "I'LL", 'TROUBLE', 'YOU', 'FOR', 'YOUR', 'KEYS', 'MY', 'DEAR', 'WHAT', 'CASE'] +533-131562-0001-848: ref=['THE', 'KEYS', 'OF', 'YOUR', 'CABINET', 'DESK', 'DRAWERS', 'AND', 'WHATEVER', 'ELSE', 'YOU', 'POSSESS', 'SAID', 'HE', 'RISING', 'AND', 'HOLDING', 'OUT', 'HIS', 'HAND'] +533-131562-0001-848: hyp=['IT', 'ACCUSE', 'OF', 'YOUR', 'CABINET', 'DESKED', 'RAOUL', 'AND', 'WHATEVER', 'ELSE', 'YOU', 'POSSESS', 'SAID', 'HE', 'RISING', 'AND', 'HOLDING', 'OUT', 'HIS', 'HAND'] +533-131562-0002-849: ref=['THE', 'KEY', 'OF', 'MY', 'DESK', 'IN', 'FACT', 'WAS', 'AT', 'THAT', 'MOMENT', 'IN', 'THE', 'LOCK', 'AND', 'THE', 'OTHERS', 'WERE', 'ATTACHED', 'TO', 'IT'] +533-131562-0002-849: hyp=['THE', 'KEY', 'OF', 'MY', 'VES', 'IN', 'FACT', 'WAS', 'AT', 'THAT', 'MOMENT', 'IN', 'LOVE', 'AND', 'THE', 'OTHERS', 'WERE', 'ATTACHED', 'TO', 'IT'] +533-131562-0003-850: ref=['NOW', 'THEN', 'SNEERED', 'HE', 'WE', 'MUST', 'HAVE', 'A', 'CONFISCATION', 'OF', 'PROPERTY'] +533-131562-0003-850: hyp=['NOW', 'THEN', 'SNEERED', 'HE', 'WE', 'MUST', 'HAVE', 'A', 'CONFISCATION', 'OF', 'PROPERTY'] +533-131562-0004-851: ref=['AND', 'PUTTING', 'THE', 'KEYS', 'INTO', 'HIS', 'POCKET', 'HE', 'WALKED', 'INTO', 'THE', 'LIBRARY'] +533-131562-0004-851: hyp=['AND', 'PUT', 'IN', 'THE', 'KEYS', 'INTO', 'HIS', 'POCKET', 'HE', 'WALKED', 'INTO', 'THE', 'LIBRARY'] +533-131562-0005-852: ref=['THAT', 'AND', 'ALL', 'REPLIED', 'THE', 'MASTER', 'AND', 'THE', 'THINGS', 'WERE', 'CLEARED', 'AWAY'] +533-131562-0005-852: hyp=['THAT', 'AND', 'ALL', 'REPLIED', 'THE', 'MASTER', 'AND', 'THE', 'THINGS', 'WERE', 'CLEARED', 'AWAY'] +533-131562-0006-853: ref=['MISTER', 'HUNTINGDON', 'THEN', 'WENT', 'UP', 'STAIRS'] +533-131562-0006-853: hyp=['MISTER', 'HUNTINGDON', 'THEN', 'WENT', 'UPSTAIRS'] +533-131562-0007-854: ref=['MUTTERED', 'HE', 'STARTING', 'BACK', "SHE'S", 'THE', 'VERY', 'DEVIL', 'FOR', 'SPITE'] +533-131562-0007-854: hyp=['MUTTERED', 'HE', 'STARTING', 'BACK', "SHE'S", 'VERY', 'DEVIL', 'FOR', 'A', 'SPITE'] +533-131562-0008-855: ref=['I', "DIDN'T", 'SAY', "I'D", 'BROKEN', 'IT', 'DID', 'I', 'RETURNED', 'HE'] +533-131562-0008-855: hyp=['I', "DIDN'T", 'SAY', "I'VE", 'BROKEN', 'IT', 'DID', 'I', 'RETURNED', 'HE'] +533-131562-0009-856: ref=['I', 'SHALL', 'PUT', 'YOU', 'UPON', 'A', 'SMALL', 'MONTHLY', 'ALLOWANCE', 'IN', 'FUTURE', 'FOR', 'YOUR', 'OWN', 'PRIVATE', 'EXPENSES', 'AND', 'YOU', "NEEDN'T", 'TROUBLE', 'YOURSELF', 'ANY', 'MORE', 'ABOUT', 'MY', 'CONCERNS', 'I', 'SHALL', 'LOOK', 'OUT', 'FOR', 'A', 'STEWARD', 'MY', 'DEAR', 'I', "WON'T", 'EXPOSE', 'YOU', 'TO', 'THE', 'TEMPTATION'] +533-131562-0009-856: hyp=['I', 'SHALL', 'PUT', 'YOU', 'UP', 'IN', 'A', 'SMALL', 'MOUTHFULLY', 'ALLOW', 'US', 'IN', 'FUTURE', 'FOR', 'YOUR', 'OWN', 'PRIVATE', 'EXPENSES', 'AND', 'YOU', "NEEDN'T", 'TROUBLE', 'YOURSELF', 'ANY', 'MORE', 'ABOUT', 'MY', 'CONCERNS', 'I', 'SHALL', 'LOOK', 'OUT', 'FOR', 'A', 'STEWARD', 'MY', 'DEAR', 'I', "WON'T", 'EXPOSE', 'YOU', 'TO', 'THE', 'TEMPTATION'] +533-131562-0010-857: ref=['AND', 'AS', 'FOR', 'THE', 'HOUSEHOLD', 'MATTERS', 'MISSUS', 'GREAVES', 'MUST', 'BE', 'VERY', 'PARTICULAR', 'IN', 'KEEPING', 'HER', 'ACCOUNTS', 'WE', 'MUST', 'GO', 'UPON', 'AN', 'ENTIRELY', 'NEW', 'PLAN'] +533-131562-0010-857: hyp=['AND', 'AS', 'FOR', 'THE', 'HOUSE', 'OR', 'MATTERS', 'MISSUS', 'GREEBS', 'MUST', 'BE', 'VERY', 'PARTICULAR', 'IN', 'KEEPING', 'HER', 'ACCOUNTS', 'WE', 'MUST', 'GO', 'UP', 'IN', 'AN', 'ENTIRELY', 'NEW', 'PLAN'] +533-131562-0011-858: ref=['WHAT', 'GREAT', 'DISCOVERY', 'HAVE', 'YOU', 'MADE', 'NOW', 'MISTER', 'HUNTINGDON'] +533-131562-0011-858: hyp=['WHAT', 'GREAT', 'DISCOVERY', 'HAVE', 'YOU', 'MADE', 'NOW', 'MISTER', 'HONDYNON'] +533-131562-0012-859: ref=['HAVE', 'I', 'ATTEMPTED', 'TO', 'DEFRAUD', 'YOU'] +533-131562-0012-859: hyp=['IF', 'I', 'ATTENDED', 'TO', 'DEFRAUD', 'YOU'] +533-131562-0013-860: ref=['NOT', 'IN', 'MONEY', 'MATTERS', 'EXACTLY', 'IT', 'SEEMS', 'BUT', "IT'S", 'BEST', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'TEMPTATION'] +533-131562-0013-860: hyp=['NOT', 'IN', 'MONEY', 'MATTERS', 'EXACTLY', 'IT', 'SEEMS', 'BUT', 'IS', 'FAST', 'TO', 'KEEP', 'OUT', 'OF', 'THE', 'WAY', 'OF', 'TEMPTATION'] +533-131562-0014-861: ref=['HERE', 'BENSON', 'ENTERED', 'WITH', 'THE', 'CANDLES', 'AND', 'THERE', 'FOLLOWED', 'A', 'BRIEF', 'INTERVAL', 'OF', 'SILENCE', 'I', 'SITTING', 'STILL', 'IN', 'MY', 'CHAIR', 'AND', 'HE', 'STANDING', 'WITH', 'HIS', 'BACK', 'TO', 'THE', 'FIRE', 'SILENTLY', 'TRIUMPHING', 'IN', 'MY', 'DESPAIR'] +533-131562-0014-861: hyp=['HERE', 'BUILTON', 'ENTERED', 'THE', 'CANDLES', 'AND', 'THERE', 'FOLLOWED', 'THE', 'ROOF', 'INTERVAL', 'OF', 'SILENCE', 'I', 'SITTING', 'STEALING', 'MY', 'CHAIR', 'AND', 'HE', 'STANDING', 'WITH', 'HIS', 'BACK', 'TO', 'THE', 'FIRE', 'SILENTLY', 'TRIUMPHING', 'IN', 'MY', 'DESPAIR'] +533-131562-0015-862: ref=['I', 'KNOW', 'THAT', 'DAY', 'AFTER', 'DAY', 'SUCH', 'FEELINGS', 'WILL', 'RETURN', 'UPON', 'ME'] +533-131562-0015-862: hyp=['I', 'KNOW', 'THAT', 'DAY', 'AFTER', 'DAY', 'SUCH', 'FEELINGS', 'TO', 'RETURN', 'UPON', 'ME'] +533-131562-0016-863: ref=['I', 'TRY', 'TO', 'LOOK', 'TO', 'HIM', 'AND', 'RAISE', 'MY', 'HEART', 'TO', 'HEAVEN', 'BUT', 'IT', 'WILL', 'CLEAVE', 'TO', 'THE', 'DUST'] +533-131562-0016-863: hyp=['I', 'TRIED', 'TO', 'LOOK', 'TO', 'HIM', 'AND', 'RAISE', 'MY', 'HEART', 'TO', 'HEAVEN', 'BUT', 'IT', 'WILL', 'CLIFF', 'TO', 'THE', 'DUST'] +533-131564-0000-768: ref=['VAIN', 'HOPE', 'I', 'FEAR'] +533-131564-0000-768: hyp=['VAIN', 'HOPE', 'I', 'FEAR'] +533-131564-0001-769: ref=['MISTER', 'AND', 'MISSUS', 'HATTERSLEY', 'HAVE', 'BEEN', 'STAYING', 'AT', 'THE', 'GROVE', 'A', 'FORTNIGHT', 'AND', 'AS', 'MISTER', 'HARGRAVE', 'IS', 'STILL', 'ABSENT', 'AND', 'THE', 'WEATHER', 'WAS', 'REMARKABLY', 'FINE', 'I', 'NEVER', 'PASSED', 'A', 'DAY', 'WITHOUT', 'SEEING', 'MY', 'TWO', 'FRIENDS', 'MILICENT', 'AND', 'ESTHER', 'EITHER', 'THERE', 'OR', 'HERE'] +533-131564-0001-769: hyp=['MISS', 'AND', 'MISSUS', 'HALTERSLEY', 'HAVE', 'BEEN', 'SEEING', 'IT', 'TO', 'GROW', 'A', 'FORTNIGHT', 'AND', 'AS', 'MISSUS', 'HARGRAVE', 'IS', 'STILL', 'ABSENT', 'AND', 'WEATHER', 'WAS', 'REMARKABLY', 'FINE', 'I', 'NEVER', 'PASSED', 'THE', 'DAY', 'WITHOUT', 'SEEING', 'MY', 'TWO', 'FRIENDS', 'MILLSON', 'AND', 'ASSER', 'EITHER', 'THERE', 'OR', 'HERE'] +533-131564-0002-770: ref=['NO', 'UNLESS', 'YOU', 'CAN', 'TELL', 'ME', 'WHEN', 'TO', 'EXPECT', 'HIM', 'HOME'] +533-131564-0002-770: hyp=['NO', 'UNLESS', 'YOU', 'CAN', 'TELL', 'YOU', 'WHEN', 'TO', 'EXPECT', 'HIM', 'HOME'] +533-131564-0003-771: ref=['I', "CAN'T", 'YOU', "DON'T", 'WANT', 'HIM', 'DO', 'YOU'] +533-131564-0003-771: hyp=['I', "CAN'T", 'EVEN', 'WANTS', 'HIM', 'DO', 'YOU'] +533-131564-0004-772: ref=['IT', 'IS', 'A', 'RESOLUTION', 'YOU', 'OUGHT', 'TO', 'HAVE', 'FORMED', 'LONG', 'AGO'] +533-131564-0004-772: hyp=['IT', 'IS', 'A', 'RESOLUTION', 'YOU', 'ARE', 'REFORMED', 'LONG', 'AGO'] +533-131564-0005-773: ref=['WE', 'ALL', 'HAVE', 'A', 'BIT', 'OF', 'A', 'LIKING', 'FOR', 'HIM', 'AT', 'THE', 'BOTTOM', 'OF', 'OUR', 'HEARTS', 'THOUGH', 'WE', "CAN'T", 'RESPECT', 'HIM'] +533-131564-0005-773: hyp=['WE', 'ALL', 'HAVE', 'A', 'BIT', 'OF', 'A', 'LIKING', 'FOR', 'HIM', 'AT', 'THE', 'BOTTOM', 'OF', 'OUR', 'HEART', 'THOUGH', 'WE', "CAN'T", 'RESPECT', 'HIM'] +533-131564-0006-774: ref=['NO', "I'D", 'RATHER', 'BE', 'LIKE', 'MYSELF', 'BAD', 'AS', 'I', 'AM'] +533-131564-0006-774: hyp=['NO', "I'D", 'RATHER', 'BE', 'LIKE', 'MYSELF', 'THAT', 'WAS', 'I', 'AM'] +533-131564-0007-775: ref=['NEVER', 'MIND', 'MY', 'PLAIN', 'SPEAKING', 'SAID', 'I', 'IT', 'IS', 'FROM', 'THE', 'BEST', 'OF', 'MOTIVES'] +533-131564-0007-775: hyp=['NEVER', 'MIND', 'MY', 'PLAIN', 'SPEAKING', 'SAID', 'I', 'IT', 'IS', 'FROM', 'THE', 'BEST', 'OF', 'MOTIVES'] +533-131564-0008-776: ref=['BUT', 'TELL', 'ME', 'SHOULD', 'YOU', 'WISH', 'YOUR', 'SONS', 'TO', 'BE', 'LIKE', 'MISTER', 'HUNTINGDON', 'OR', 'EVEN', 'LIKE', 'YOURSELF'] +533-131564-0008-776: hyp=['BUT', 'TELL', 'ME', 'SHOULD', 'YOU', 'WISH', 'YOURSELVES', 'TO', 'BE', 'LIKE', 'MISTER', 'HUNTINGDON', 'OR', 'EVEN', 'LIKE', 'YOURSELF'] +533-131564-0009-777: ref=['OH', 'NO', 'I', "COULDN'T", 'STAND', 'THAT'] +533-131564-0009-777: hyp=['OH', 'NO', 'ECHOLYN', 'STAND', 'THAT'] +533-131564-0010-778: ref=['FIRE', 'AND', 'FURY'] +533-131564-0010-778: hyp=['FIRE', 'AND', 'FURY'] +533-131564-0011-779: ref=['NOW', "DON'T", 'BURST', 'INTO', 'A', 'TEMPEST', 'AT', 'THAT'] +533-131564-0011-779: hyp=['NOW', "DON'T", 'FORCE', 'INTO', 'A', 'TEMPEST', 'AT', 'THAT'] +533-131564-0012-780: ref=['BUT', 'HANG', 'IT', "THAT'S", 'NOT', 'MY', 'FAULT'] +533-131564-0012-780: hyp=['BUT', 'HANG', 'IT', "THAT'S", 'NOT', 'MY', 'FAULT'] +533-131564-0013-781: ref=['NOT', 'YEARS', 'FOR', "SHE'S", 'ONLY', 'FIVE', 'AND', 'TWENTY'] +533-131564-0013-781: hyp=['NOT', 'EARS', 'FOR', "SHE'S", 'ONLY', 'FIVE', 'AND', 'TWENTY'] +533-131564-0014-782: ref=['WHAT', 'WOULD', 'YOU', 'MAKE', 'OF', 'ME', 'AND', 'THE', 'CHILDREN', 'TO', 'BE', 'SURE', 'THAT', 'WORRY', 'HER', 'TO', 'DEATH', 'BETWEEN', 'THEM'] +533-131564-0014-782: hyp=['WHAT', 'DID', 'YOU', 'MAKE', 'OF', 'ME', 'AND', 'THE', 'CHILDREN', 'TO', 'BE', 'SURE', 'THAT', 'WERE', 'HE', 'HURT', 'DEATH', 'BETWEEN', 'THEM'] +533-131564-0015-783: ref=['I', 'KNOW', 'THEY', 'ARE', 'BLESS', 'THEM'] +533-131564-0015-783: hyp=['I', 'KNOW', 'THEY', 'ARE', 'BLESS', 'THEM'] +533-131564-0016-784: ref=['HE', 'FOLLOWED', 'ME', 'INTO', 'THE', 'LIBRARY'] +533-131564-0016-784: hyp=['IF', 'ALL', 'OF', 'ME', 'INTO', 'THE', 'LIBRARY'] +533-131564-0017-785: ref=['I', 'SOUGHT', 'OUT', 'AND', 'PUT', 'INTO', 'HIS', 'HANDS', 'TWO', 'OF', "MILICENT'S", 'LETTERS', 'ONE', 'DATED', 'FROM', 'LONDON', 'AND', 'WRITTEN', 'DURING', 'ONE', 'OF', 'HIS', 'WILDEST', 'SEASONS', 'OF', 'RECKLESS', 'DISSIPATION', 'THE', 'OTHER', 'IN', 'THE', 'COUNTRY', 'DURING', 'A', 'LUCID', 'INTERVAL'] +533-131564-0017-785: hyp=['I', 'SOUGHT', 'OUT', 'AND', 'PUT', 'INTO', 'HIS', 'HANDS', 'TWO', 'OF', 'MILICENT', 'SLATTERS', 'ONE', 'DID', 'IT', 'FROM', 'LONDON', 'AND', 'WRITTEN', 'DURING', 'ONE', 'OF', 'HIS', "WALLA'S", 'SEASONS', 'OF', 'RECKLESS', 'DISSIPATION', 'THE', 'OTHER', 'IN', 'THE', 'COUNTRY', 'DURING', 'ELUSIVE', 'INTERVAL'] +533-131564-0018-786: ref=['THE', 'FORMER', 'WAS', 'FULL', 'OF', 'TROUBLE', 'AND', 'ANGUISH', 'NOT', 'ACCUSING', 'HIM', 'BUT', 'DEEPLY', 'REGRETTING', 'HIS', 'CONNECTION', 'WITH', 'HIS', 'PROFLIGATE', 'COMPANIONS', 'ABUSING', 'MISTER', 'GRIMSBY', 'AND', 'OTHERS', 'INSINUATING', 'BITTER', 'THINGS', 'AGAINST', 'MISTER', 'HUNTINGDON', 'AND', 'MOST', 'INGENIOUSLY', 'THROWING', 'THE', 'BLAME', 'OF', 'HER', "HUSBAND'S", 'MISCONDUCT', 'ON', 'TO', 'OTHER', "MEN'S", 'SHOULDERS'] +533-131564-0018-786: hyp=['THE', 'FORMER', 'WAS', 'FULL', 'OF', 'TROUBLE', 'AND', 'ANGUISH', 'NOT', 'ACCUSING', 'HIM', 'BUT', 'DEEPLY', 'REGRETTING', 'HIS', 'CONNECTION', 'WITH', 'HIS', 'PROFLIGATE', 'COMPANIONS', 'ABUSING', 'MISTER', "GRIM'S", 'BEING', 'OTHERS', 'INSINUATING', 'BITTER', 'THINGS', 'AGAINST', 'MISTER', 'HUNTINGDON', 'AND', 'MOST', 'INGENUOUSLY', 'THREW', 'IN', 'THE', 'BLAME', 'OF', 'HER', "HUSBAND'S", 'MISCONDUCT', 'ON', 'THE', 'OTHER', "MAN'S", 'SHOULDERS'] +533-131564-0019-787: ref=["I'VE", 'BEEN', 'A', 'CURSED', 'RASCAL', 'GOD', 'KNOWS', 'SAID', 'HE', 'AS', 'HE', 'GAVE', 'IT', 'A', 'HEARTY', 'SQUEEZE', 'BUT', 'YOU', 'SEE', 'IF', 'I', "DON'T", 'MAKE', 'AMENDS', 'FOR', 'IT', 'D', 'N', 'ME', 'IF', 'I', "DON'T"] +533-131564-0019-787: hyp=["I'VE", 'BEEN', 'A', 'CURSED', 'RASCAL', 'GOD', 'KNOWS', 'SAID', 'HE', 'AS', 'HE', 'GAVE', 'IT', 'EARTHLY', 'SQUEEZE', 'BUT', 'YOU', 'SEE', 'IF', 'I', "DON'T", 'MAKE', 'AMENDS', 'FOR', 'IT', 'THEN', 'ME', 'IF', 'I', "DON'T"] +533-131564-0020-788: ref=['IF', 'YOU', 'INTEND', 'TO', 'REFORM', 'INVOKE', "GOD'S", 'BLESSING', 'HIS', 'MERCY', 'AND', 'HIS', 'AID', 'NOT', 'HIS', 'CURSE'] +533-131564-0020-788: hyp=['IF', 'YOU', 'INSENT', 'WITH', 'FORM', 'INVOKE', "GOD'S", 'BLESSING', 'IS', 'A', 'MERCY', 'IN', 'THIS', 'APE', 'NOR', 'DISCOURSE'] +533-131564-0021-789: ref=['GOD', 'HELP', 'ME', 'THEN', 'FOR', "I'M", 'SURE', 'I', 'NEED', 'IT'] +533-131564-0021-789: hyp=['GOD', 'HELP', 'ME', 'THEN', 'FOR', 'I', 'AM', 'SURE', 'I', 'NEEDED'] +533-131564-0022-790: ref=["WHERE'S", 'MILICENT'] +533-131564-0022-790: hyp=['WHERE', 'IS', 'MILICENT'] +533-131564-0023-791: ref=['NAY', 'NOT', 'I', 'SAID', 'HE', 'TURNING', 'HER', 'ROUND', 'AND', 'PUSHING', 'HER', 'TOWARDS', 'ME'] +533-131564-0023-791: hyp=['NAY', 'NOT', 'I', 'SAID', 'HE', 'TURNING', 'AROUND', 'AND', 'PUSHING', 'IT', 'TOWARDS', 'ME'] +533-131564-0024-792: ref=['MILICENT', 'FLEW', 'TO', 'THANK', 'ME', 'OVERFLOWING', 'WITH', 'GRATITUDE'] +533-131564-0024-792: hyp=['MILICENT', 'FLEW', 'TO', 'THANK', 'ME', 'OVERWHELMING', 'ITS', 'GRATITUDE'] +533-131564-0025-793: ref=['CRIED', 'SHE', 'I', "COULDN'T", 'HAVE', 'INFLUENCED', 'HIM', "I'M", 'SURE', 'BY', 'ANYTHING', 'THAT', 'I', 'COULD', 'HAVE', 'SAID'] +533-131564-0025-793: hyp=['CRIED', 'SHE', 'I', "COULDN'T", 'HAVE', 'INFLUENCED', 'HIM', "I'M", 'SURE', 'BY', 'ANYTHING', 'THAT', 'I', 'COULD', 'HAVE', 'SAID'] +533-131564-0026-794: ref=['YOU', 'NEVER', 'TRIED', 'ME', 'MILLY', 'SAID', 'HE'] +533-131564-0026-794: hyp=['YOU', 'NEVER', 'TRIED', 'ME', 'MERELY', 'SAID', 'HE'] +533-131564-0027-795: ref=['AFTER', 'THAT', 'THEY', 'WILL', 'REPAIR', 'TO', 'THEIR', 'COUNTRY', 'HOME'] +533-131564-0027-795: hyp=['AFTER', 'THAT', 'THEY', 'WILL', 'REPAIR', 'TO', 'THEIR', 'COUNTRY', 'HOME'] +5442-32873-0000-1365: ref=['CAPTAIN', 'LAKE', 'DID', 'NOT', 'LOOK', 'AT', 'ALL', 'LIKE', 'A', 'LONDON', 'DANDY', 'NOW'] +5442-32873-0000-1365: hyp=['CAPTAIN', 'LAKE', 'DID', 'NOT', 'LOOK', 'AT', 'ALL', 'LIKE', 'A', 'LONDON', 'DANDY', 'NOW'] +5442-32873-0001-1366: ref=['THERE', 'WAS', 'A', 'VERY', 'NATURAL', 'SAVAGERY', 'AND', 'DEJECTION', 'THERE', 'AND', 'A', 'WILD', 'LEER', 'IN', 'HIS', 'YELLOW', 'EYES', 'RACHEL', 'SAT', 'DOWN'] +5442-32873-0001-1366: hyp=['THERE', 'WAS', 'A', 'VERY', 'NATURAL', 'SAVAGERY', 'AND', 'DEJECTION', 'THERE', 'AND', 'A', 'WILD', 'YARD', 'IN', 'HIS', 'YELLOW', 'EYES', 'RACHEL', 'SAT', 'DOWN'] +5442-32873-0002-1367: ref=['A', 'SLAVE', 'ONLY', 'THINK', 'A', 'SLAVE'] +5442-32873-0002-1367: hyp=['AND', 'SLAVE', 'ONLY', 'THINK', 'A', 'SLAVE'] +5442-32873-0003-1368: ref=['OH', 'FRIGHTFUL', 'FRIGHTFUL', 'IS', 'IT', 'A', 'DREAM'] +5442-32873-0003-1368: hyp=['OH', 'FRIGHTFUL', 'FRIGHTFUL', 'IS', 'IT', 'A', 'DREAM'] +5442-32873-0004-1369: ref=['OH', 'FRIGHTFUL', 'FRIGHTFUL'] +5442-32873-0004-1369: hyp=['OH', 'FRIGHTFUL', 'DREADFUL'] +5442-32873-0005-1370: ref=['STANLEY', 'STANLEY', 'IT', 'WOULD', 'BE', 'MERCY', 'TO', 'KILL', 'ME', 'SHE', 'BROKE', 'OUT', 'AGAIN'] +5442-32873-0005-1370: hyp=['STANLEY', 'STANLEY', 'IT', 'WOULD', 'BE', 'MERCY', 'TO', 'KILL', 'ME', 'SHE', 'BROKE', 'OUT', 'AGAIN'] +5442-32873-0006-1371: ref=['BRIGHT', 'AND', 'NATTY', 'WERE', 'THE', 'CHINTZ', 'CURTAINS', 'AND', 'THE', 'LITTLE', 'TOILET', 'SET', 'OUT', 'NOT', 'INELEGANTLY', 'AND', 'HER', 'PET', 'PIPING', 'GOLDFINCH', 'ASLEEP', 'ON', 'HIS', 'PERCH', 'WITH', 'HIS', 'BIT', 'OF', 'SUGAR', 'BETWEEN', 'THE', 'WIRES', 'OF', 'HIS', 'CAGE', 'HER', 'PILLOW', 'SO', 'WHITE', 'AND', 'UNPRESSED', 'WITH', 'ITS', 'LITTLE', 'EDGING', 'OF', 'LACE'] +5442-32873-0006-1371: hyp=['BRIGHT', 'AND', 'NATTY', 'WITH', 'A', "CHIN'S", 'CURTAINS', 'AND', 'THE', 'LITTLE', 'TOILET', 'SET', 'OUT', 'NOT', 'INELEGANTLY', 'AND', 'HER', 'PET', 'PIPING', 'GOLDFINCH', 'ASLEEP', 'ON', 'HIS', 'PERCH', 'WITH', 'HIS', 'BIT', 'OF', 'SUGAR', 'BETWEEN', 'THE', 'WIVES', 'OF', 'HIS', 'CAGE', 'HER', 'PILLOW', 'SO', 'WHITE', 'AND', 'UNPRESSED', 'WITH', 'ITS', 'LITTLE', 'EDGING', 'OF', 'LACE'] +5442-32873-0007-1372: ref=['WHEN', 'HE', 'CAME', 'BACK', 'TO', 'THE', 'DRAWING', 'ROOM', 'A', 'TOILET', 'BOTTLE', 'OF', 'EAU', 'DE', 'COLOGNE', 'IN', 'HIS', 'HAND', 'WITH', 'HER', 'LACE', 'HANDKERCHIEF', 'HE', 'BATHED', 'HER', 'TEMPLES', 'AND', 'FOREHEAD'] +5442-32873-0007-1372: hyp=['WHEN', 'HE', 'CAME', 'BACK', 'TO', 'THE', 'DRAWING', 'ROOM', 'A', 'TALLED', 'BOTTLE', 'OF', 'OVERCLONE', 'IN', 'HIS', 'HAND', 'WITH', 'HER', 'LACE', 'HANDKERCHIEF', 'HE', 'BATHED', 'HER', 'TEMPLE', 'AND', 'FOREHEAD'] +5442-32873-0008-1373: ref=['THERE', 'WAS', 'NOTHING', 'VERY', 'BROTHERLY', 'IN', 'HIS', 'LOOK', 'AS', 'HE', 'PEERED', 'INTO', 'HER', 'PALE', 'SHARP', 'FEATURES', 'DURING', 'THE', 'PROCESS'] +5442-32873-0008-1373: hyp=['THERE', 'WAS', 'NOTHING', 'VERY', 'BROTHERLY', 'IN', 'HIS', 'LOOK', 'AS', 'HE', 'PEERED', 'INTO', 'A', 'PALE', 'SHARP', 'FEATURES', 'DURING', 'THE', 'PROCESS'] +5442-32873-0009-1374: ref=['THERE', "DON'T", 'MIND', 'ME', 'SHE', 'SAID', 'SHARPLY', 'AND', 'GETTING', 'UP', 'SHE', 'LOOKED', 'DOWN', 'AT', 'HER', 'DRESS', 'AND', 'THIN', 'SHOES', 'AND', 'SEEMING', 'TO', 'RECOLLECT', 'HERSELF', 'SHE', 'TOOK', 'THE', 'CANDLE', 'HE', 'HAD', 'JUST', 'SET', 'DOWN', 'AND', 'WENT', 'SWIFTLY', 'TO', 'HER', 'ROOM'] +5442-32873-0009-1374: hyp=['THERE', "DON'T", 'MIND', 'ME', 'SHE', 'SAID', 'SHARPLY', 'AND', 'GETTING', 'UP', 'SHE', 'LOOKED', 'DOWN', 'AT', 'HER', 'DRESS', 'AND', 'THIN', 'SHOES', 'AND', 'SEEMING', 'TO', 'RECOLLECT', 'HERSELF', 'SHE', 'TOOK', 'THE', 'CANDLE', 'HE', 'HAD', 'JUST', 'SET', 'DOWN', 'AND', 'WENT', 'SWIFTLY', 'TO', 'HER', 'ROOM'] +5442-32873-0010-1375: ref=['AND', 'SHE', 'THREW', 'BACK', 'HER', 'VEIL', 'AND', 'GOING', 'HURRIEDLY', 'TO', 'THE', 'TOILET', 'MECHANICALLY', 'SURVEYED', 'HERSELF', 'IN', 'THE', 'GLASS'] +5442-32873-0010-1375: hyp=['AND', 'SHE', 'THREW', 'BACK', 'HER', 'VEIL', 'AND', 'GOING', 'HURRIEDLY', 'TO', 'THE', 'TOILET', 'MECHANICALLY', 'SURVEYED', 'HERSELF', 'FROM', 'THE', 'GLASS'] +5442-32873-0011-1376: ref=['RACHEL', 'LAKE', 'RACHEL', 'LAKE', 'WHAT', 'ARE', 'YOU', 'NOW'] +5442-32873-0011-1376: hyp=['RIGIDLY', 'LEGALLY', 'WHAT', 'ARE', 'YOU', 'NOW'] +5442-32873-0012-1377: ref=["I'LL", 'STAY', 'HERE', 'THAT', 'IS', 'IN', 'THE', 'DRAWING', 'ROOM', 'SHE', 'ANSWERED', 'AND', 'THE', 'FACE', 'WAS', 'WITHDRAWN'] +5442-32873-0012-1377: hyp=["I'LL", 'STAY', 'HERE', 'THAT', 'IS', 'IN', 'THE', 'DRAWING', 'ROOM', 'SHE', 'ANSWERED', 'AND', 'THE', 'FACE', 'WAS', 'WITHDRAWN'] +5442-32873-0013-1378: ref=['HE', 'SLACKENED', 'HIS', 'PACE', 'AND', 'TAPPED', 'SHARPLY', 'AT', 'THE', 'LITTLE', 'WINDOW', 'OF', 'THAT', 'MODEST', 'POST', 'OFFICE', 'AT', 'WHICH', 'THE', 'YOUNG', 'LADIES', 'IN', 'THE', 'PONY', 'CARRIAGE', 'HAD', 'PULLED', 'UP', 'THE', 'DAY', 'BEFORE', 'AND', 'WITHIN', 'WHICH', 'LUKE', 'WAGGOT', 'WAS', 'WONT', 'TO', 'SLEEP', 'IN', 'A', 'SORT', 'OF', 'WOODEN', 'BOX', 'THAT', 'FOLDED', 'UP', 'AND', 'APPEARED', 'TO', 'BE', 'A', 'CHEST', 'OF', 'DRAWERS', 'ALL', 'DAY'] +5442-32873-0013-1378: hyp=['HIS', 'CLACK', 'IN', 'THE', 'SPACE', 'AND', 'TOP', 'SHARPLY', 'AT', 'THE', 'LITTLE', 'WINDOW', 'OF', 'THE', 'MODEST', 'POST', 'OFFICE', 'AT', 'WHICH', 'THE', 'YOUNG', 'LADIES', 'IN', 'THE', 'PONY', 'CARRIAGE', 'HAD', 'PULLED', 'UP', 'THE', 'DAY', 'BEFORE', 'AND', 'WITHIN', 'WHICH', 'LUKE', 'RAGGED', 'WAS', 'WONT', 'TO', 'SLEEP', 'IN', 'A', 'SORT', 'OF', 'WOODEN', 'BOX', 'THAT', 'FOLDED', 'UP', 'AND', 'APPEARED', 'TO', 'BE', 'A', 'CHEST', 'OF', 'DRAWERS', 'ALL', 'DAY'] +5442-32873-0014-1379: ref=['LUKE', 'TOOK', 'CARE', 'OF', 'MISTER', "LARKIN'S", 'DOGS', 'AND', 'GROOMED', 'MISTER', "WYLDER'S", 'HORSE', 'AND', 'CLEANED', 'UP', 'HIS', 'DOG', 'CART', 'FOR', 'MARK', 'BEING', 'CLOSE', 'ABOUT', 'MONEY', 'AND', 'FINDING', 'THAT', 'THE', 'THING', 'WAS', 'TO', 'BE', 'DONE', 'MORE', 'CHEAPLY', 'THAT', 'WAY', 'PUT', 'UP', 'HIS', 'HORSE', 'AND', 'DOG', 'CART', 'IN', 'THE', 'POST', 'OFFICE', 'PREMISES', 'AND', 'SO', 'EVADED', 'THE', 'LIVERY', 'CHARGES', 'OF', 'THE', 'BRANDON', 'ARMS'] +5442-32873-0014-1379: hyp=['LOOK', 'TOOK', 'CARE', 'OF', 'MISTER', "LARKIN'S", 'DOG', 'AND', 'GROOMED', 'MISTER', "WYLDER'S", 'HORSE', 'AND', 'CLEANED', 'UP', 'HIS', 'DOOR', 'CART', 'FOR', 'MARK', 'BEING', 'CLOSE', 'ABOUT', 'MONEY', 'AND', 'FINDING', 'THAT', 'THE', 'THING', 'WAS', 'TO', 'BE', 'DONE', 'MORE', 'CHEAPLY', 'THAT', 'WAY', 'PUT', 'UP', 'HIS', 'HORSE', 'AND', 'DORCART', 'IN', 'THE', 'POST', 'OFFICE', 'PREMISES', 'AND', 'SO', 'EVADED', 'THE', 'LIVERY', 'CHARGES', 'OF', 'THE', 'BRANDON', 'ARMS'] +5442-32873-0015-1380: ref=['BUT', 'LUKE', 'WAS', 'NOT', 'THERE', 'AND', 'CAPTAIN', 'LAKE', 'RECOLLECTING', 'HIS', 'HABITS', 'AND', 'HIS', 'HAUNT', 'HURRIED', 'ON', 'TO', 'THE', 'SILVER', 'LION', 'WHICH', 'HAS', 'ITS', 'GABLE', 'TOWARDS', 'THE', 'COMMON', 'ONLY', 'ABOUT', 'A', 'HUNDRED', 'STEPS', 'AWAY', 'FOR', 'DISTANCES', 'ARE', 'NOT', 'GREAT', 'IN', 'GYLINGDEN'] +5442-32873-0015-1380: hyp=['BUT', 'LUKE', 'WAS', 'KNOWN', 'THERE', 'AND', 'CAPTAIN', 'LAKE', 'RECOLLECTING', 'HIS', 'HABITS', 'AND', 'HIS', 'HAUNT', 'HURRIED', 'ON', 'TO', 'THE', 'SILVER', 'LION', 'WHICH', 'HAS', 'ITS', 'GABLE', 'TOWARDS', 'THE', 'COMMON', 'ONLY', 'ABOUT', 'A', 'HUNDRED', 'STEPS', 'AWAY', 'FOR', 'DISTANCES', 'ARE', 'NOT', 'GREAT', 'IN', 'GILINGDEN'] +5442-32873-0016-1381: ref=['HERE', 'WERE', 'THE', 'FLOW', 'OF', 'SOUL', 'AND', 'OF', 'STOUT', 'LONG', 'PIPES', 'LONG', 'YARNS', 'AND', 'TOLERABLY', 'LONG', 'CREDITS', 'AND', 'THE', 'HUMBLE', 'SCAPEGRACES', 'OF', 'THE', 'TOWN', 'RESORTED', 'THITHER', 'FOR', 'THE', 'PLEASURES', 'OF', 'A', 'CLUB', 'LIFE', 'AND', 'OFTEN', 'REVELLED', 'DEEP', 'INTO', 'THE', 'SMALL', 'HOURS', 'OF', 'THE', 'MORNING'] +5442-32873-0016-1381: hyp=['HERE', 'WERE', 'THE', 'FLOOR', 'OF', 'SOUL', 'UN', 'OF', 'STOUT', 'LONG', 'PIPES', 'LONG', 'YARNS', 'AND', 'TOLERABLY', 'LONG', 'CREDITS', 'AND', 'THE', 'HUMBLE', 'CAVE', 'BRACES', 'OF', 'THE', 'TOWN', 'RESORTED', 'THITHER', 'FOR', 'THE', 'PLEASURES', 'OF', 'A', 'CLUB', 'LIFE', 'AND', 'OFTEN', 'REVELLED', 'DEEP', 'INTO', 'THE', 'SMALL', 'HOURS', 'OF', 'THE', 'MORNING'] +5442-32873-0017-1382: ref=['LOSE', 'NO', 'TIME', 'AND', "I'LL", 'GIVE', 'YOU', 'HALF', 'A', 'CROWN'] +5442-32873-0017-1382: hyp=['LOSE', 'NO', 'TIME', 'BUT', "I'LL", 'GIVE', 'YOU', 'HALF', 'A', 'CROWN'] +5442-32873-0018-1383: ref=['LUKE', 'STUCK', 'ON', 'HIS', 'GREASY', 'WIDEAWAKE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'MORE', 'THE', 'DOG', 'CART', 'WAS', 'TRUNDLED', 'OUT', 'INTO', 'THE', 'LANE', 'AND', 'THE', 'HORSE', 'HARNESSED', 'WENT', 'BETWEEN', 'THE', 'SHAFTS', 'WITH', 'THAT', 'WONDERFUL', 'CHEERFULNESS', 'WITH', 'WHICH', 'THEY', 'BEAR', 'TO', 'BE', 'CALLED', 'UP', 'UNDER', 'STARTLING', 'CIRCUMSTANCES', 'AT', 'UNSEASONABLE', 'HOURS'] +5442-32873-0018-1383: hyp=['LUKE', 'STUCK', 'ON', 'HIS', 'GREASY', 'WIDE', 'AWAKE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'MORE', 'THE', 'DOOR', 'CART', 'WAS', 'TUMBLED', 'OUT', 'INTO', 'THE', 'LANE', 'AND', 'THE', 'HORSE', 'HARNESSED', 'WENT', 'BETWEEN', 'THE', 'SHAFTS', 'WITH', 'THAT', 'WONDERFUL', 'CHEERFULNESS', 'WITH', 'WHICH', 'THEIR', 'BEAR', 'TO', 'BE', 'CALLED', 'UP', 'AND', 'THE', 'STARTLING', 'CIRCUMSTANCES', 'AND', 'UNSEASONABLE', 'HOURS'] +5442-32873-0019-1384: ref=['IF', 'I', 'THOUGHT', "YOU'D", 'FAIL', 'ME', 'NOW', 'TAMAR', 'I', 'SHOULD', 'NEVER', 'COME', 'BACK', 'GOOD', 'NIGHT', 'TAMAR'] +5442-32873-0019-1384: hyp=['IF', 'I', 'THOUGHT', "YOU'D", 'FAIL', 'ME', 'NOW', 'TO', 'MORROW', 'I', 'SHOULD', 'NEVER', 'COME', 'BACK', 'GOOD', 'NIGHT', 'TO', 'MORROW'] +5442-41168-0000-1385: ref=['THE', 'ACT', 'SAID', 'THAT', 'IN', 'CASE', 'OF', 'DIFFERENCE', 'OF', 'OPINION', 'THERE', 'MUST', 'BE', 'A', 'BALLOT'] +5442-41168-0000-1385: hyp=['THE', 'ACT', 'SAID', 'THAT', 'IN', 'CASE', 'OF', 'DIFFERENCE', 'OF', 'OPINION', 'THERE', 'MUST', 'BE', 'A', 'BALLOT'] +5442-41168-0001-1386: ref=['HE', 'WENT', 'UP', 'TO', 'THE', 'TABLE', 'AND', 'STRIKING', 'IT', 'WITH', 'HIS', 'FINGER', 'RING', 'HE', 'SHOUTED', 'LOUDLY', 'A', 'BALLOT'] +5442-41168-0001-1386: hyp=['HE', 'WENT', 'UP', 'TO', 'THE', 'TABLE', 'AND', 'STRIKING', 'IT', 'WITH', 'HIS', 'FINGER', 'RING', 'HE', 'SHOUTED', 'LOUDLY', 'A', 'BALLOT'] +5442-41168-0002-1387: ref=['HE', 'WAS', 'SHOUTING', 'FOR', 'THE', 'VERY', 'COURSE', 'SERGEY', 'IVANOVITCH', 'HAD', 'PROPOSED', 'BUT', 'IT', 'WAS', 'EVIDENT', 'THAT', 'HE', 'HATED', 'HIM', 'AND', 'ALL', 'HIS', 'PARTY', 'AND', 'THIS', 'FEELING', 'OF', 'HATRED', 'SPREAD', 'THROUGH', 'THE', 'WHOLE', 'PARTY', 'AND', 'ROUSED', 'IN', 'OPPOSITION', 'TO', 'IT', 'THE', 'SAME', 'VINDICTIVENESS', 'THOUGH', 'IN', 'A', 'MORE', 'SEEMLY', 'FORM', 'ON', 'THE', 'OTHER', 'SIDE'] +5442-41168-0002-1387: hyp=['HE', 'WAS', 'SHOUTING', 'FOR', 'THE', 'VERY', 'COARSE', 'SURGY', 'IVANOVITCH', 'HAD', 'PROPOSED', 'BUT', 'IT', 'WAS', 'EVIDENT', 'THAT', 'HE', 'HATED', 'HIM', 'AND', 'ALL', 'HIS', 'PARTY', 'AND', 'THIS', 'FEELING', 'OF', 'HATRED', 'SPREAD', 'THROUGH', 'THE', 'WHOLE', 'PARTY', 'AND', 'ROUSED', 'IN', 'OPPOSITION', 'TO', 'IT', 'THE', 'SAME', 'VINDICTIVENESS', 'THOUGH', 'IN', 'A', 'MORE', 'SEEMLY', 'FORM', 'ON', 'THE', 'OTHER', 'SIDE'] +5442-41168-0003-1388: ref=['SHOUTS', 'WERE', 'RAISED', 'AND', 'FOR', 'A', 'MOMENT', 'ALL', 'WAS', 'CONFUSION', 'SO', 'THAT', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'HAD', 'TO', 'CALL', 'FOR', 'ORDER', 'A', 'BALLOT'] +5442-41168-0003-1388: hyp=['SHOUTS', 'WERE', 'RAISED', 'AND', 'FOR', 'A', 'MOMENT', 'ALL', 'WAS', 'CONFUSION', 'SO', 'THAT', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'HAD', 'TO', 'CALL', 'FOR', 'ODO', 'A', 'BALLOT'] +5442-41168-0004-1389: ref=['WE', 'SHED', 'OUR', 'BLOOD', 'FOR', 'OUR', 'COUNTRY'] +5442-41168-0004-1389: hyp=['WE', 'SHED', 'OUR', 'BLOOD', 'FOR', 'OUR', 'COUNTRY'] +5442-41168-0005-1390: ref=['THE', 'CONFIDENCE', 'OF', 'THE', 'MONARCH', 'NO', 'CHECKING', 'THE', 'ACCOUNTS', 'OF', 'THE', 'MARSHAL', "HE'S", 'NOT', 'A', 'CASHIER', 'BUT', "THAT'S", 'NOT', 'THE', 'POINT'] +5442-41168-0005-1390: hyp=['THE', 'CONFIDENCE', 'OF', 'THE', 'MONARCH', 'BUT', 'NO', 'CHECKING', 'THE', 'ACCOUNTS', 'OF', 'THE', 'MARTIAN', 'IS', 'NOT', 'A', 'CASHIER', 'BUT', "THAT'S", 'NOT', 'THE', 'POINT'] +5442-41168-0006-1391: ref=['VOTES', 'PLEASE', 'BEASTLY'] +5442-41168-0006-1391: hyp=['VAULTS', 'PLEASE', 'BEASTLY'] +5442-41168-0007-1392: ref=['THEY', 'EXPRESSED', 'THE', 'MOST', 'IMPLACABLE', 'HATRED'] +5442-41168-0007-1392: hyp=['THEY', 'EXPRESSED', 'THE', 'MOST', 'IMPLACABLE', 'HATRED'] +5442-41168-0008-1393: ref=['LEVIN', 'DID', 'NOT', 'IN', 'THE', 'LEAST', 'UNDERSTAND', 'WHAT', 'WAS', 'THE', 'MATTER', 'AND', 'HE', 'MARVELED', 'AT', 'THE', 'PASSION', 'WITH', 'WHICH', 'IT', 'WAS', 'DISPUTED', 'WHETHER', 'OR', 'NOT', 'THE', 'DECISION', 'ABOUT', 'FLEROV', 'SHOULD', 'BE', 'PUT', 'TO', 'THE', 'VOTE'] +5442-41168-0008-1393: hyp=['LEVIN', 'DID', 'NOT', 'IN', 'THE', 'LEAST', 'UNDERSTAND', 'WHAT', 'WAS', 'THE', 'MATTER', 'AND', 'HE', 'MARVELLED', 'AT', 'THE', 'PASSION', 'WITH', 'WHICH', 'IT', 'WAS', 'DISPUTED', 'WHETHER', 'OR', 'NOT', 'THE', 'DECISION', 'ABOUT', 'FLARE', 'OFF', 'SHOULD', 'BE', 'PUT', 'TO', 'THE', 'VOTE'] +5442-41168-0009-1394: ref=['HE', 'FORGOT', 'AS', 'SERGEY', 'IVANOVITCH', 'EXPLAINED', 'TO', 'HIM', 'AFTERWARDS', 'THIS', 'SYLLOGISM', 'THAT', 'IT', 'WAS', 'NECESSARY', 'FOR', 'THE', 'PUBLIC', 'GOOD', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'THAT', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'IT', 'WAS', 'NECESSARY', 'TO', 'HAVE', 'A', 'MAJORITY', 'OF', 'VOTES', 'THAT', 'TO', 'GET', 'A', 'MAJORITY', 'OF', 'VOTES', 'IT', 'WAS', 'NECESSARY', 'TO', 'SECURE', "FLEROV'S", 'RIGHT', 'TO', 'VOTE', 'THAT', 'TO', 'SECURE', 'THE', 'RECOGNITION', 'OF', "FLEROV'S", 'RIGHT', 'TO', 'VOTE', 'THEY', 'MUST', 'DECIDE', 'ON', 'THE', 'INTERPRETATION', 'TO', 'BE', 'PUT', 'ON', 'THE', 'ACT'] +5442-41168-0009-1394: hyp=['HE', 'FORGOT', 'AS', 'SO', 'GIVANOVITCH', 'EXPLAINED', 'TO', 'HIM', 'AFTERWARDS', 'THIS', 'DILIGION', 'THAT', 'IT', 'WAS', 'NECESSARY', 'FOR', 'THE', 'PUBLIC', 'GOOD', 'TO', 'GET', 'RID', 'OF', 'THE', 'MARSHAL', 'OF', 'THE', 'PROVINCE', 'THAT', 'TO', 'GET', 'INTO', 'THE', 'MARTIAN', 'IT', 'WAS', 'NECESSARY', 'TO', 'HAVE', 'A', 'MAJORITY', 'OF', 'VOTES', 'THAT', 'TO', 'GET', 'A', 'MAJORITY', 'OF', 'BOATS', 'IT', 'WAS', 'NECESSARY', 'TO', 'SECURE', "FLORO'S", 'RIGHT', 'TO', 'VOTE', 'THAT', 'TO', 'SECURE', 'THE', 'RECOGNITION', 'OF', "FLORA'S", 'RIGHT', 'TO', 'VOTE', 'THEY', 'MUST', 'DECIDE', 'ON', 'THE', 'INTERPRETATION', 'TO', 'BE', 'PUT', 'ON', 'THE', 'ACT'] +5442-41168-0010-1395: ref=['BUT', 'LEVIN', 'FORGOT', 'ALL', 'THAT', 'AND', 'IT', 'WAS', 'PAINFUL', 'TO', 'HIM', 'TO', 'SEE', 'ALL', 'THESE', 'EXCELLENT', 'PERSONS', 'FOR', 'WHOM', 'HE', 'HAD', 'A', 'RESPECT', 'IN', 'SUCH', 'AN', 'UNPLEASANT', 'AND', 'VICIOUS', 'STATE', 'OF', 'EXCITEMENT'] +5442-41168-0010-1395: hyp=['BUT', 'LEVIN', 'FORGOT', 'ALL', 'THAT', 'AND', 'IT', 'WAS', 'PAINFUL', 'TO', 'HIM', 'TO', 'SEE', 'ALL', 'THESE', 'EXCELLENT', 'PERSONS', 'FOR', 'WHOM', 'HE', 'HAD', 'A', 'RESPECT', 'IN', 'SUCH', 'AN', 'UNPLEASANT', 'AND', 'VICIOUS', 'STATE', 'OF', 'EXCITEMENT'] +5442-41168-0011-1396: ref=['TO', 'ESCAPE', 'FROM', 'THIS', 'PAINFUL', 'FEELING', 'HE', 'WENT', 'AWAY', 'INTO', 'THE', 'OTHER', 'ROOM', 'WHERE', 'THERE', 'WAS', 'NOBODY', 'EXCEPT', 'THE', 'WAITERS', 'AT', 'THE', 'REFRESHMENT', 'BAR'] +5442-41168-0011-1396: hyp=['TO', 'ESCAPE', 'FROM', 'THIS', 'PAINFUL', 'FEELING', 'HE', 'WENT', 'AWAY', 'INTO', 'THE', 'OTHER', 'ROOM', 'WHERE', 'THERE', 'WAS', 'NOBODY', 'EXCEPT', 'THE', 'WAITERS', 'AT', 'THE', 'REFRESHMENT', 'BAR'] +5442-41168-0012-1397: ref=['HE', 'PARTICULARLY', 'LIKED', 'THE', 'WAY', 'ONE', 'GRAY', 'WHISKERED', 'WAITER', 'WHO', 'SHOWED', 'HIS', 'SCORN', 'FOR', 'THE', 'OTHER', 'YOUNGER', 'ONES', 'AND', 'WAS', 'JEERED', 'AT', 'BY', 'THEM', 'WAS', 'TEACHING', 'THEM', 'HOW', 'TO', 'FOLD', 'UP', 'NAPKINS', 'PROPERLY'] +5442-41168-0012-1397: hyp=['HE', 'PARTICULARLY', 'LIKED', 'THE', 'WAY', 'ONE', 'GREY', 'WHISKIRT', 'WAITER', 'WHO', 'SHOWED', 'HIS', 'CORN', 'FOR', 'THE', 'OTHER', 'YOUNGER', 'ONES', 'AND', 'WAS', 'JERED', 'AT', 'BY', 'THEM', 'WAS', 'TEACHING', 'THEM', 'HOW', 'TO', 'FOLD', 'UP', 'NAPKINS', 'PROPERLY'] +5442-41168-0013-1398: ref=['LEVIN', 'ADVANCED', 'BUT', 'UTTERLY', 'FORGETTING', 'WHAT', 'HE', 'WAS', 'TO', 'DO', 'AND', 'MUCH', 'EMBARRASSED', 'HE', 'TURNED', 'TO', 'SERGEY', 'IVANOVITCH', 'WITH', 'THE', 'QUESTION', 'WHERE', 'AM', 'I', 'TO', 'PUT', 'IT'] +5442-41168-0013-1398: hyp=['LEVIN', 'ADVANCED', 'BUT', 'UTTERLY', 'FORGETTING', 'WHAT', 'HE', 'WAS', 'TO', 'DO', 'AND', 'MUCH', 'EMBARRASSED', 'HE', 'TURNED', 'TO', 'SERGEY', 'IVANOVITCH', 'WITH', 'THE', 'QUESTION', 'WHERE', 'AM', 'I', 'TO', 'PUT', 'IT'] +5442-41168-0014-1399: ref=['SERGEY', 'IVANOVITCH', 'FROWNED'] +5442-41168-0014-1399: hyp=['SOJOURNOVITCH', 'FROWNED'] +5442-41168-0015-1400: ref=['THAT', 'IS', 'A', 'MATTER', 'FOR', 'EACH', "MAN'S", 'OWN', 'DECISION', 'HE', 'SAID', 'SEVERELY'] +5442-41168-0015-1400: hyp=['THAT', 'IS', 'A', 'MATTER', 'FOR', 'EACH', "MAN'S", 'OWN', 'DECISION', 'HE', 'SAID', 'SEVERELY'] +5442-41168-0016-1401: ref=['HAVING', 'PUT', 'IT', 'IN', 'HE', 'RECOLLECTED', 'THAT', 'HE', 'OUGHT', 'TO', 'HAVE', 'THRUST', 'HIS', 'LEFT', 'HAND', 'TOO', 'AND', 'SO', 'HE', 'THRUST', 'IT', 'IN', 'THOUGH', 'TOO', 'LATE', 'AND', 'STILL', 'MORE', 'OVERCOME', 'WITH', 'CONFUSION', 'HE', 'BEAT', 'A', 'HASTY', 'RETREAT', 'INTO', 'THE', 'BACKGROUND'] +5442-41168-0016-1401: hyp=['HAVING', 'PUT', 'IT', 'IN', 'HE', 'RECOLLECTED', 'THAT', 'HE', 'OUGHT', 'TO', 'HAVE', 'THRUST', 'HIS', 'LEFT', 'HAND', 'TOO', 'AND', 'SO', 'HE', 'THRUST', 'IT', 'THOUGH', 'TOO', 'LATE', 'AND', 'STILL', 'MORE', 'OVERCOME', 'WITH', 'CONFUSION', 'HE', 'BEAT', 'A', 'HASTY', 'RETREAT', 'INTO', 'THE', 'BACKGROUND'] +5442-41168-0017-1402: ref=['A', 'HUNDRED', 'AND', 'TWENTY', 'SIX', 'FOR', 'ADMISSION', 'NINETY', 'EIGHT', 'AGAINST'] +5442-41168-0017-1402: hyp=['A', 'HUNDRED', 'AND', 'TWENTY', 'SIX', 'FOR', 'ADMISSION', 'NINETY', 'EIGHT', 'AGAINST'] +5442-41168-0018-1403: ref=['SANG', 'OUT', 'THE', 'VOICE', 'OF', 'THE', 'SECRETARY', 'WHO', 'COULD', 'NOT', 'PRONOUNCE', 'THE', 'LETTER', 'R'] +5442-41168-0018-1403: hyp=['SANG', 'ALL', 'THE', 'VOICE', 'OF', 'THE', 'SECRETARY', 'WHO', 'COULD', 'NOT', 'PRONOUNCE', 'THE', 'LETTER', 'R'] +5442-41168-0019-1404: ref=['THEN', 'THERE', 'WAS', 'A', 'LAUGH', 'A', 'BUTTON', 'AND', 'TWO', 'NUTS', 'WERE', 'FOUND', 'IN', 'THE', 'BOX'] +5442-41168-0019-1404: hyp=['THEN', 'THERE', 'WAS', 'A', 'LAUGH', 'OF', 'BOTTOM', 'AND', 'TWO', 'KNOTS', 'WERE', 'FOUND', 'IN', 'THE', 'BOX'] +5442-41168-0020-1405: ref=['BUT', 'THE', 'OLD', 'PARTY', 'DID', 'NOT', 'CONSIDER', 'THEMSELVES', 'CONQUERED'] +5442-41168-0020-1405: hyp=['BUT', 'THE', 'OLD', 'PARTY', 'DID', 'NOT', 'CONSIDER', 'THEMSELVES', 'CONQUERED'] +5442-41168-0021-1406: ref=['IN', 'REPLY', 'SNETKOV', 'SPOKE', 'OF', 'THE', 'TRUST', 'THE', 'NOBLEMEN', 'OF', 'THE', 'PROVINCE', 'HAD', 'PLACED', 'IN', 'HIM', 'THE', 'AFFECTION', 'THEY', 'HAD', 'SHOWN', 'HIM', 'WHICH', 'HE', 'DID', 'NOT', 'DESERVE', 'AS', 'HIS', 'ONLY', 'MERIT', 'HAD', 'BEEN', 'HIS', 'ATTACHMENT', 'TO', 'THE', 'NOBILITY', 'TO', 'WHOM', 'HE', 'HAD', 'DEVOTED', 'TWELVE', 'YEARS', 'OF', 'SERVICE'] +5442-41168-0021-1406: hyp=['INTERPLIES', 'NEDCOV', 'SPOKE', 'OF', 'THE', 'TRUST', 'AND', 'NOBLEMEN', 'OF', 'THE', 'PROVINCE', 'HAD', 'PLACED', 'IN', 'HIM', 'THE', 'EFFECT', 'ON', 'THEY', 'HAD', 'SHOWN', 'HIM', 'WHICH', 'HE', 'DID', 'NOT', 'DESERVE', 'AS', 'HIS', 'ONLY', 'MERIT', 'HAD', 'BEEN', 'HIS', 'ATTACHMENT', 'TO', 'THE', 'NOBILITY', 'TO', 'WHOM', 'HE', 'HAD', 'DEVOTED', 'TWELVE', 'YEARS', 'OF', 'SERVICE'] +5442-41168-0022-1407: ref=['THIS', 'EXPRESSION', 'IN', 'THE', "MARSHAL'S", 'FACE', 'WAS', 'PARTICULARLY', 'TOUCHING', 'TO', 'LEVIN', 'BECAUSE', 'ONLY', 'THE', 'DAY', 'BEFORE', 'HE', 'HAD', 'BEEN', 'AT', 'HIS', 'HOUSE', 'ABOUT', 'HIS', 'TRUSTEE', 'BUSINESS', 'AND', 'HAD', 'SEEN', 'HIM', 'IN', 'ALL', 'HIS', 'GRANDEUR', 'A', 'KIND', 'HEARTED', 'FATHERLY', 'MAN'] +5442-41168-0022-1407: hyp=['THIS', 'EXPRESSION', 'IN', 'THE', "MARSHAL'S", 'FACE', 'WAS', 'PARTICULARLY', 'TOUCHING', 'TO', 'LEVIN', 'BECAUSE', 'ONLY', 'THE', 'DAY', 'FOR', 'HE', 'HAD', 'BEEN', 'AT', 'HIS', 'HOUSE', 'ABOUT', 'HIS', 'TRUSTY', 'BUSINESS', 'AND', 'HAD', 'SEEN', 'HIM', 'IN', 'ALL', 'HIS', 'GRANDEUR', 'A', 'KIND', 'HEARTED', 'FATHERLY', 'MAN'] +5442-41168-0023-1408: ref=['IF', 'THERE', 'ARE', 'MEN', 'YOUNGER', 'AND', 'MORE', 'DESERVING', 'THAN', 'I', 'LET', 'THEM', 'SERVE'] +5442-41168-0023-1408: hyp=['IF', 'THERE', 'ARE', 'MEN', 'YOUNGER', 'AND', 'MORE', 'DESERVING', 'THAN', 'I', 'LET', 'THEM', 'SERVE'] +5442-41168-0024-1409: ref=['AND', 'THE', 'MARSHAL', 'DISAPPEARED', 'THROUGH', 'A', 'SIDE', 'DOOR'] +5442-41168-0024-1409: hyp=['AND', 'THE', 'MARSHAL', 'DISAPPEARED', 'THROUGH', 'A', 'SIDE', 'DOOR'] +5442-41168-0025-1410: ref=['THEY', 'WERE', 'TO', 'PROCEED', 'IMMEDIATELY', 'TO', 'THE', 'ELECTION'] +5442-41168-0025-1410: hyp=['THERE', 'WERE', 'TO', 'PROCEED', 'IMMEDIATELY', 'TO', 'THE', 'ELECTION'] +5442-41168-0026-1411: ref=['TWO', 'NOBLE', 'GENTLEMEN', 'WHO', 'HAD', 'A', 'WEAKNESS', 'FOR', 'STRONG', 'DRINK', 'HAD', 'BEEN', 'MADE', 'DRUNK', 'BY', 'THE', 'PARTISANS', 'OF', 'SNETKOV', 'AND', 'A', 'THIRD', 'HAD', 'BEEN', 'ROBBED', 'OF', 'HIS', 'UNIFORM'] +5442-41168-0026-1411: hyp=['DO', 'NOBLE', 'GENTLEMEN', 'WHO', 'HAD', 'A', 'WEAKNESS', 'FOR', 'STRONG', 'DRINK', 'HAD', 'BEEN', 'MADE', 'DRUNK', 'BY', 'THE', 'PARTISANS', 'OF', 'SNETKOV', 'AND', 'THE', 'THIRD', 'HAD', 'BEEN', 'ROBBED', 'OF', 'HIS', 'UNIFORM'] +5442-41168-0027-1412: ref=['ON', 'LEARNING', 'THIS', 'THE', 'NEW', 'PARTY', 'HAD', 'MADE', 'HASTE', 'DURING', 'THE', 'DISPUTE', 'ABOUT', 'FLEROV', 'TO', 'SEND', 'SOME', 'OF', 'THEIR', 'MEN', 'IN', 'A', 'SLEDGE', 'TO', 'CLOTHE', 'THE', 'STRIPPED', 'GENTLEMAN', 'AND', 'TO', 'BRING', 'ALONG', 'ONE', 'OF', 'THE', 'INTOXICATED', 'TO', 'THE', 'MEETING'] +5442-41168-0027-1412: hyp=['ON', 'LEARNING', 'THIS', 'THE', 'NEW', 'PARTY', 'HAD', 'MADE', 'HASTE', 'DURING', 'THE', 'DISPUTE', 'ABOUT', 'FLAREFF', 'TO', 'SEND', 'SOME', 'OF', 'THEIR', 'MEN', 'IN', 'A', 'SLEDGE', 'TO', 'CLOTHE', 'THE', 'STRIPPED', 'GENTLEMAN', 'AND', 'TO', 'BRING', 'ALONG', 'ONE', 'OF', 'THE', 'INTOXICATED', 'TO', 'THE', 'MEETING'] +5442-41169-0000-1413: ref=['LEVIN', 'DID', 'NOT', 'CARE', 'TO', 'EAT', 'AND', 'HE', 'WAS', 'NOT', 'SMOKING', 'HE', 'DID', 'NOT', 'WANT', 'TO', 'JOIN', 'HIS', 'OWN', 'FRIENDS', 'THAT', 'IS', 'SERGEY', 'IVANOVITCH', 'STEPAN', 'ARKADYEVITCH', 'SVIAZHSKY', 'AND', 'THE', 'REST', 'BECAUSE', 'VRONSKY', 'IN', 'HIS', "EQUERRY'S", 'UNIFORM', 'WAS', 'STANDING', 'WITH', 'THEM', 'IN', 'EAGER', 'CONVERSATION'] +5442-41169-0000-1413: hyp=['LEVIN', 'DID', 'NOT', 'CARE', 'TO', 'EAT', 'AND', 'HE', 'WAS', 'NOT', 'SMOKING', 'HE', 'DID', 'NOT', 'WANT', 'TO', 'JOIN', 'HIS', 'OWN', 'FRIENDS', 'THAT', 'IS', 'SOJI', 'IVANOVITCH', 'STEPAN', 'ARKADYEVITCH', 'SVIAZHSKY', 'AND', 'THE', 'REST', 'BECAUSE', 'VRONSKY', 'IN', 'AN', 'EQUERRIES', 'UNIFORM', 'WAS', 'STANDING', 'WITH', 'THEM', 'IN', 'EAGER', 'CONVERSATION'] +5442-41169-0001-1414: ref=['HE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'SAT', 'DOWN', 'SCANNING', 'THE', 'GROUPS', 'AND', 'LISTENING', 'TO', 'WHAT', 'WAS', 'BEING', 'SAID', 'AROUND', 'HIM'] +5442-41169-0001-1414: hyp=['HE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'SAT', 'DOWN', 'SCANNING', 'THE', 'GROUPS', 'AND', 'LISTENING', 'TO', 'WHAT', 'WAS', 'BEING', 'SAID', 'AROUND', 'HIM'] +5442-41169-0002-1415: ref=["HE'S", 'SUCH', 'A', 'BLACKGUARD'] +5442-41169-0002-1415: hyp=["HE'S", 'SUCH', 'A', 'BLACKGUARD'] +5442-41169-0003-1416: ref=['I', 'HAVE', 'TOLD', 'HIM', 'SO', 'BUT', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'ONLY', 'THINK', 'OF', 'IT'] +5442-41169-0003-1416: hyp=['I', 'HAVE', 'TOLD', 'HIM', 'SO', 'BUT', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'ONLY', 'THINK', 'OF', 'IT'] +5442-41169-0004-1417: ref=['THESE', 'PERSONS', 'WERE', 'UNMISTAKABLY', 'SEEKING', 'A', 'PLACE', 'WHERE', 'THEY', 'COULD', 'TALK', 'WITHOUT', 'BEING', 'OVERHEARD'] +5442-41169-0004-1417: hyp=['THESE', 'PERSONS', 'WERE', 'UNMISTAKABLY', 'SEEKING', 'A', 'PLACE', 'WHERE', 'THEY', 'COULD', 'TALK', 'WITHOUT', 'BEING', 'OVERHEARD'] +5442-41169-0005-1418: ref=['SHALL', 'WE', 'GO', 'ON', 'YOUR', 'EXCELLENCY', 'FINE', 'CHAMPAGNE'] +5442-41169-0005-1418: hyp=['SHALL', 'WE', 'GO', 'ON', 'YOUR', 'EXCELLENCY', 'FINE', 'CHAMPAGNE'] +5442-41169-0006-1419: ref=['LAST', 'YEAR', 'AT', 'OUR', 'DISTRICT', 'MARSHAL', 'NIKOLAY', "IVANOVITCH'S"] +5442-41169-0006-1419: hyp=['MASTER', 'AT', 'OUR', 'DISTRICT', 'MARTIAL', 'NIKOLAY', "IVANOVITCH'S"] +5442-41169-0007-1420: ref=['OH', 'STILL', 'JUST', 'THE', 'SAME', 'ALWAYS', 'AT', 'A', 'LOSS', 'THE', 'LANDOWNER', 'ANSWERED', 'WITH', 'A', 'RESIGNED', 'SMILE', 'BUT', 'WITH', 'AN', 'EXPRESSION', 'OF', 'SERENITY', 'AND', 'CONVICTION', 'THAT', 'SO', 'IT', 'MUST', 'BE'] +5442-41169-0007-1420: hyp=['OH', 'STILL', 'JUST', 'THE', 'SAME', 'ALWAYS', 'AT', 'A', 'LOSS', 'THE', 'LANDOWNER', 'ANSWERED', 'WITH', 'A', 'RESIGNED', 'SMILE', 'BUT', 'WITH', 'AN', 'EXPRESSION', 'OF', 'SERENITY', 'AND', 'CONVICTION', 'THAT', 'SO', 'IT', 'MUST', 'BE'] +5442-41169-0008-1421: ref=['WHY', 'WHAT', 'IS', 'THERE', 'TO', 'UNDERSTAND'] +5442-41169-0008-1421: hyp=['WHY', 'WHAT', 'IS', 'THAT', 'TO', 'UNDERSTAND'] +5442-41169-0009-1422: ref=["THERE'S", 'NO', 'MEANING', 'IN', 'IT', 'AT', 'ALL'] +5442-41169-0009-1422: hyp=['THERE', 'IS', 'NO', 'MEANING', 'IN', 'IT', 'AT', 'ALL'] +5442-41169-0010-1423: ref=['THEN', 'TOO', 'ONE', 'MUST', 'KEEP', 'UP', 'CONNECTIONS'] +5442-41169-0010-1423: hyp=['THEN', 'DO', 'ONE', 'MUST', 'KEEP', 'UP', 'CONNECTIONS'] +5442-41169-0011-1424: ref=["IT'S", 'A', 'MORAL', 'OBLIGATION', 'OF', 'A', 'SORT'] +5442-41169-0011-1424: hyp=["IT'S", 'A', 'MORAL', 'OBLIGATION', 'OF', 'A', 'SORT'] +5442-41169-0012-1425: ref=['AND', 'THEN', 'TO', 'TELL', 'THE', 'TRUTH', "THERE'S", "ONE'S", 'OWN', 'INTERESTS'] +5442-41169-0012-1425: hyp=['AND', 'THEN', 'TO', 'TELL', 'THE', 'TRUTH', "THERE'S", "ONE'S", 'OWN', 'INTEREST'] +5442-41169-0013-1426: ref=["THEY'RE", 'PROPRIETORS', 'OF', 'A', 'SORT', 'BUT', "WE'RE", 'THE', 'LANDOWNERS'] +5442-41169-0013-1426: hyp=['THEIR', 'PROPRIETORS', 'OF', 'A', 'SORT', 'BUT', 'WE', 'ARE', 'THE', 'LANDOWNERS'] +5442-41169-0014-1427: ref=['THAT', 'IT', 'MAY', 'BE', 'BUT', 'STILL', 'IT', 'OUGHT', 'TO', 'BE', 'TREATED', 'A', 'LITTLE', 'MORE', 'RESPECTFULLY'] +5442-41169-0014-1427: hyp=['THAT', 'IT', 'MAY', 'BE', 'BUT', 'STILL', 'IT', 'OUGHT', 'TO', 'BE', 'TREATED', 'A', 'LITTLE', 'MORE', 'RESPECTFULLY'] +5442-41169-0015-1428: ref=['IF', "WE'RE", 'LAYING', 'OUT', 'A', 'GARDEN', 'PLANNING', 'ONE', 'BEFORE', 'THE', 'HOUSE', 'YOU', 'KNOW', 'AND', 'THERE', "YOU'VE", 'A', 'TREE', "THAT'S", 'STOOD', 'FOR', 'CENTURIES', 'IN', 'THE', 'VERY', 'SPOT', 'OLD', 'AND', 'GNARLED', 'IT', 'MAY', 'BE', 'AND', 'YET', 'YOU', "DON'T", 'CUT', 'DOWN', 'THE', 'OLD', 'FELLOW', 'TO', 'MAKE', 'ROOM', 'FOR', 'THE', 'FLOWERBEDS', 'BUT', 'LAY', 'OUT', 'YOUR', 'BEDS', 'SO', 'AS', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'THE', 'TREE'] +5442-41169-0015-1428: hyp=['IF', 'WE', 'ARE', 'LAYING', 'OUT', 'A', 'GARDEN', 'PLANNING', 'ONE', 'BEFORE', 'THE', 'HOUSE', 'YOU', 'KNOW', 'AND', 'THERE', 'YOU', 'HAVE', 'A', 'TREE', 'THAT', 'STOOD', 'IN', 'CENTURIES', 'IN', 'THE', 'VERY', 'SPOT', 'OLD', 'AND', 'GNARLED', 'IT', 'MAY', 'BE', 'AND', 'YET', 'YOU', "DON'T", 'CUT', 'DOWN', 'THE', 'OLD', 'FELLOW', 'TO', 'MAKE', 'ROOM', 'FOR', 'THE', 'FLOWER', 'BEDS', 'BUT', 'LAY', 'OUT', 'YOUR', 'BEDS', 'SO', 'AS', 'TO', 'TAKE', 'ADVANTAGE', 'OF', 'THE', 'TREE'] +5442-41169-0016-1429: ref=['WELL', 'AND', 'HOW', 'IS', 'YOUR', 'LAND', 'DOING'] +5442-41169-0016-1429: hyp=['WELL', 'AND', 'HOW', 'IS', 'YOUR', 'LAND', 'DOING'] +5442-41169-0017-1430: ref=['BUT', "ONE'S", 'WORK', 'IS', 'THROWN', 'IN', 'FOR', 'NOTHING'] +5442-41169-0017-1430: hyp=['BUT', "ONE'S", 'WORK', 'IS', 'THROWN', 'IN', 'FOR', 'NOTHING'] +5442-41169-0018-1431: ref=['OH', 'WELL', 'ONE', 'DOES', 'IT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +5442-41169-0018-1431: hyp=['OH', 'WELL', 'ONE', 'DOES', 'IT', 'WHAT', 'WOULD', 'YOU', 'HAVE'] +5442-41169-0019-1432: ref=['AND', "WHAT'S", 'MORE', 'THE', 'LANDOWNER', 'WENT', 'ON', 'LEANING', 'HIS', 'ELBOWS', 'ON', 'THE', 'WINDOW', 'AND', 'CHATTING', 'ON', 'MY', 'SON', 'I', 'MUST', 'TELL', 'YOU', 'HAS', 'NO', 'TASTE', 'FOR', 'IT'] +5442-41169-0019-1432: hyp=['AND', 'ONCE', 'MORE', 'THE', 'LANDOWNER', 'WENT', 'ON', 'LEANING', 'HIS', 'ELBOWS', 'ON', 'THE', 'WINDOW', 'AND', 'CHATTING', 'ON', 'MY', 'SON', 'I', 'MUST', 'TELL', 'YOU', 'HAS', 'NO', 'TASTE', 'FOR', 'IT'] +5442-41169-0020-1433: ref=['SO', "THERE'LL", 'BE', 'NO', 'ONE', 'TO', 'KEEP', 'IT', 'UP', 'AND', 'YET', 'ONE', 'DOES', 'IT'] +5442-41169-0020-1433: hyp=['SO', "THERE'LL", 'BE', 'NO', 'ONE', 'TO', 'KEEP', 'IT', 'UP', 'AND', 'YET', 'ONE', 'DOES', 'IT'] +5442-41169-0021-1434: ref=['WE', 'WALKED', 'ABOUT', 'THE', 'FIELDS', 'AND', 'THE', 'GARDEN', 'NO', 'SAID', 'HE', 'STEPAN', 'VASSILIEVITCH', "EVERYTHING'S", 'WELL', 'LOOKED', 'AFTER', 'BUT', 'YOUR', "GARDEN'S", 'NEGLECTED'] +5442-41169-0021-1434: hyp=['WE', 'WALKED', 'ABOUT', 'THE', 'FIELDS', 'AND', 'THE', 'GARDEN', 'NO', 'SAID', 'HE', 'STEPAN', 'WISLOVITCH', "EVERYTHING'S", 'WELL', 'LOOKED', 'AFTER', 'BUT', 'YOUR', 'GARDENS', 'NEGLECTED'] +5442-41169-0022-1435: ref=['TO', 'MY', 'THINKING', "I'D", 'CUT', 'DOWN', 'THAT', 'LIME', 'TREE'] +5442-41169-0022-1435: hyp=['TO', 'MY', 'THINKING', "I'D", 'GOT', 'DOWN', 'THE', 'LINE', 'TREE'] +5442-41169-0023-1436: ref=['HERE', "YOU'VE", 'THOUSANDS', 'OF', 'LIMES', 'AND', 'EACH', 'WOULD', 'MAKE', 'TWO', 'GOOD', 'BUNDLES', 'OF', 'BARK'] +5442-41169-0023-1436: hyp=['HERE', 'YOU', 'HAVE', 'THOUSANDS', 'OF', 'LIMES', 'AND', 'EACH', 'WOULD', 'MAKE', 'TWO', 'GOOD', 'BUNDLES', 'OF', 'BARK'] +5442-41169-0024-1437: ref=["YOU'RE", 'MARRIED', "I'VE", 'HEARD', 'SAID', 'THE', 'LANDOWNER'] +5442-41169-0024-1437: hyp=["YOU'RE", 'MARRIED', 'I', 'HEARD', 'SAID', 'THE', 'LANDOWNER'] +5442-41169-0025-1438: ref=['YES', "IT'S", 'RATHER', 'STRANGE', 'HE', 'WENT', 'ON'] +5442-41169-0025-1438: hyp=['YES', 'AND', 'JOHN', 'IS', 'STRANGE', 'HE', 'WENT', 'ON'] +5442-41169-0026-1439: ref=['THE', 'LANDOWNER', 'CHUCKLED', 'UNDER', 'HIS', 'WHITE', 'MUSTACHES'] +5442-41169-0026-1439: hyp=['THE', 'LANDOWNER', 'CHUCKLED', 'UNDER', 'HIS', 'WHITE', 'MOUSTACHES'] +5442-41169-0027-1440: ref=['WHY', "DON'T", 'WE', 'CUT', 'DOWN', 'OUR', 'PARKS', 'FOR', 'TIMBER'] +5442-41169-0027-1440: hyp=['WHY', "DON'T", 'WE', 'GO', 'DOWN', 'OUR', 'BOX', 'FOR', 'TIMBER'] +5442-41169-0028-1441: ref=['SAID', 'LEVIN', 'RETURNING', 'TO', 'A', 'THOUGHT', 'THAT', 'HAD', 'STRUCK', 'HIM'] +5442-41169-0028-1441: hyp=['SAID', 'LEVIN', 'RETURNING', 'TO', 'A', 'THOUGHT', 'THAT', 'HAD', 'STRUCK', 'HIM'] +5442-41169-0029-1442: ref=["THERE'S", 'A', 'CLASS', 'INSTINCT', 'TOO', 'OF', 'WHAT', 'ONE', 'OUGHT', 'AND', "OUGHTN'T", 'TO', 'DO'] +5442-41169-0029-1442: hyp=["THERE'S", 'THE', 'CLASS', 'INSTINCT', 'TOO', 'OF', 'WHAT', 'ONE', 'OUGHT', 'AND', 'OUGHT', 'NOT', 'KNOWN', 'TO', 'DO'] +5442-41169-0030-1443: ref=["THERE'S", 'THE', 'PEASANTS', 'TOO', 'I', 'WONDER', 'AT', 'THEM', 'SOMETIMES', 'ANY', 'GOOD', 'PEASANT', 'TRIES', 'TO', 'TAKE', 'ALL', 'THE', 'LAND', 'HE', 'CAN'] +5442-41169-0030-1443: hyp=["THERE'S", 'THE', 'PEASANTS', 'TOO', 'I', 'WONDER', 'AT', 'THEM', 'SOMETIMES', 'ANY', 'GOOD', 'PEASANT', 'TRIES', 'TO', 'TAKE', 'ALL', 'THE', 'LAND', 'HE', 'CAN'] +5442-41169-0031-1444: ref=['WITHOUT', 'A', 'RETURN', 'TOO', 'AT', 'A', 'SIMPLE', 'LOSS'] +5442-41169-0031-1444: hyp=['WITHOUT', 'A', 'RETURN', 'TOO', 'ADD', 'A', 'SIMPLE', 'LOSS'] +5484-24317-0000-571: ref=['WHEN', 'HE', 'CAME', 'FROM', 'THE', 'BATH', 'PROCLUS', 'VISITED', 'HIM', 'AGAIN'] +5484-24317-0000-571: hyp=['WHEN', 'HE', 'CAME', 'FROM', 'THE', 'BATH', 'PROCLASS', 'VISITED', 'HIM', 'AGAIN'] +5484-24317-0001-572: ref=['BUT', 'HERMON', 'WAS', 'NOT', 'IN', 'THE', 'MOOD', 'TO', 'SHARE', 'A', 'JOYOUS', 'REVEL', 'AND', 'HE', 'FRANKLY', 'SAID', 'SO', 'ALTHOUGH', 'IMMEDIATELY', 'AFTER', 'HIS', 'RETURN', 'HE', 'HAD', 'ACCEPTED', 'THE', 'INVITATION', 'TO', 'THE', 'FESTIVAL', 'WHICH', 'THE', 'WHOLE', 'FELLOWSHIP', 'OF', 'ARTISTS', 'WOULD', 'GIVE', 'THE', 'FOLLOWING', 'DAY', 'IN', 'HONOUR', 'OF', 'THE', 'SEVENTIETH', 'BIRTHDAY', 'OF', 'THE', 'OLD', 'SCULPTOR', 'EUPHRANOR'] +5484-24317-0001-572: hyp=['BUT', 'HARMON', 'WAS', 'NOT', 'IN', 'THE', 'MOOD', 'TO', 'SHARE', 'A', 'JOYOUS', 'REVEL', 'AND', 'HE', 'FRANKLY', 'SAID', 'SO', 'ALTHOUGH', 'IMMEDIATELY', 'AFTER', 'HIS', 'RETURN', 'HE', 'HAD', 'ACCEPTED', 'THE', 'INVITATION', 'TO', 'THE', 'FESTIVAL', 'WHICH', 'THE', 'WHOLE', 'FELLOWSHIP', 'OF', 'ARTISTS', 'WOULD', 'GIVE', 'THE', 'FOLLOWING', 'DAY', 'AN', 'HONOUR', 'OF', 'THE', 'SEVENTEENTH', 'BIRTHDAY', 'OF', 'THE', 'OLD', 'SCULPTOR', 'EUPHRANER'] +5484-24317-0002-573: ref=['SHE', 'WOULD', 'APPEAR', 'HERSELF', 'AT', 'DESSERT', 'AND', 'THE', 'BANQUET', 'MUST', 'THEREFORE', 'BEGIN', 'AT', 'AN', 'UNUSUALLY', 'EARLY', 'HOUR'] +5484-24317-0002-573: hyp=['SHE', 'WOULD', 'APPEAR', 'HERSELF', 'AT', 'DESSERT', 'AND', 'THE', 'BANQUET', 'MUST', 'THEREFORE', 'BEGIN', 'AT', 'AN', 'UNUSUALLY', 'EARLY', 'HOUR'] +5484-24317-0003-574: ref=['SO', 'THE', 'ARTIST', 'FOUND', 'HIMSELF', 'OBLIGED', 'TO', 'RELINQUISH', 'HIS', 'OPPOSITION'] +5484-24317-0003-574: hyp=['SO', 'THE', 'ARTIST', 'FOUND', 'HIMSELF', 'OBLIGED', 'TO', 'RELINQUISH', 'HIS', 'OPPOSITION'] +5484-24317-0004-575: ref=['THE', 'BANQUET', 'WAS', 'TO', 'BEGIN', 'IN', 'A', 'FEW', 'HOURS', 'YET', 'HE', 'COULD', 'NOT', 'LET', 'THE', 'DAY', 'PASS', 'WITHOUT', 'SEEING', 'DAPHNE', 'AND', 'TELLING', 'HER', 'THE', 'WORDS', 'OF', 'THE', 'ORACLE'] +5484-24317-0004-575: hyp=['THE', 'BANQUET', 'WAS', 'TO', 'BEGIN', 'IN', 'A', 'FEW', 'HOURS', 'YET', 'HE', 'COULD', 'NOT', 'LET', 'THE', 'DAY', 'PASS', 'WITHOUT', 'SEEING', 'DAPHNE', 'AND', 'TELLING', 'HER', 'THE', 'WORDS', 'OF', 'THE', 'ORACLE'] +5484-24317-0005-576: ref=['HE', 'LONGED', 'WITH', 'ARDENT', 'YEARNING', 'FOR', 'THE', 'SOUND', 'OF', 'HER', 'VOICE', 'AND', 'STILL', 'MORE', 'TO', 'UNBURDEN', 'HIS', 'SORELY', 'TROUBLED', 'SOUL', 'TO', 'HER'] +5484-24317-0005-576: hyp=['HE', 'LONGED', 'WITH', 'ARDENT', 'YEARNING', 'FOR', 'THE', 'SOUND', 'OF', 'HER', 'VOICE', 'AND', 'STILL', 'MORE', 'TO', 'UNBURDEN', 'HIS', 'SORELY', 'TROUBLED', 'SOUL', 'TO', 'HER'] +5484-24317-0006-577: ref=['SINCE', 'HIS', 'RETURN', 'FROM', 'THE', 'ORACLE', 'THE', 'FEAR', 'THAT', 'THE', 'RESCUED', 'DEMETER', 'MIGHT', 'YET', 'BE', 'THE', 'WORK', 'OF', 'MYRTILUS', 'HAD', 'AGAIN', 'MASTERED', 'HIM'] +5484-24317-0006-577: hyp=['SINCE', 'HIS', 'RETURN', 'FROM', 'THE', 'ORACLE', 'THE', 'FEAR', 'THAT', 'THE', 'RESCUE', 'DEMETER', 'MIGHT', 'YET', 'BE', 'THE', 'WORK', 'OF', 'MERTOLUS', 'HAD', 'AGAIN', 'MASTERED', 'HIM'] +5484-24317-0007-578: ref=['THE', 'APPROVAL', 'AS', 'WELL', 'AS', 'THE', 'DOUBTS', 'WHICH', 'IT', 'AROUSED', 'IN', 'OTHERS', 'STRENGTHENED', 'HIS', 'OPINION', 'ALTHOUGH', 'EVEN', 'NOW', 'HE', 'COULD', 'NOT', 'SUCCEED', 'IN', 'BRINGING', 'IT', 'INTO', 'HARMONY', 'WITH', 'THE', 'FACTS'] +5484-24317-0007-578: hyp=['THE', 'APPROVAL', 'AS', 'WELL', 'AS', 'A', 'DOUBTS', 'WHICH', 'IT', 'ARISED', 'IN', 'OTHERS', 'STRENGTHENED', 'HIS', 'OPINION', 'ALTHOUGH', 'EVEN', 'NOW', 'HE', 'COULD', 'NOT', 'SUCCEED', 'IN', 'BRINGING', 'IT', 'INTO', 'HARMONY', 'WITH', 'THE', 'FACTS'] +5484-24317-0008-579: ref=['THEN', 'HE', 'WENT', 'DIRECTLY', 'TO', 'THE', 'NEIGHBOURING', 'PALACE', 'THE', 'QUEEN', 'MIGHT', 'HAVE', 'APPEARED', 'ALREADY', 'AND', 'IT', 'WOULD', 'NOT', 'DO', 'TO', 'KEEP', 'HER', 'WAITING'] +5484-24317-0008-579: hyp=['THEN', 'HE', 'WENT', 'DIRECTLY', 'TO', 'THE', 'NEIGHBORING', 'PALACE', 'THE', 'QUEEN', 'MIGHT', 'HAVE', 'APPEARED', 'ALREADY', 'AND', 'IT', 'WOULD', 'NOT', 'DO', 'TO', 'KEEP', 'HER', 'WAITING'] +5484-24317-0009-580: ref=['HITHERTO', 'THE', 'MERCHANT', 'HAD', 'BEEN', 'INDUCED', 'IT', 'IS', 'TRUE', 'TO', 'ADVANCE', 'LARGE', 'SUMS', 'OF', 'MONEY', 'TO', 'THE', 'QUEEN', 'BUT', 'THE', 'LOYAL', 'DEVOTION', 'WHICH', 'HE', 'SHOWED', 'TO', 'HER', 'ROYAL', 'HUSBAND', 'HAD', 'RENDERED', 'IT', 'IMPOSSIBLE', 'TO', 'GIVE', 'HIM', 'EVEN', 'A', 'HINT', 'OF', 'THE', 'CONSPIRACY'] +5484-24317-0009-580: hyp=['HITHERTO', 'THE', 'MERCHANT', 'HAD', 'BEEN', 'INDUCED', 'IT', 'IS', 'TRUE', 'TO', 'ADVANCE', 'LARGE', 'SUMS', 'OF', 'MONEY', 'TO', 'THE', 'QUEEN', 'BUT', 'THE', 'LOYAL', 'DEVOTION', 'WHICH', 'HE', 'SHOWED', 'TO', 'HER', 'ROYAL', 'HUSBAND', 'HAD', 'RENDERED', 'IT', 'IMPOSSIBLE', 'TO', 'GIVE', 'HIM', 'EVEN', 'A', 'HINT', 'OF', 'THE', 'CONSPIRACY'] +5484-24317-0010-581: ref=['WHEN', 'HERMON', 'ENTERED', 'THE', 'RESIDENCE', 'OF', 'THE', 'GRAMMATEUS', 'IN', 'THE', 'PALACE', 'THE', 'GUESTS', 'HAD', 'ALREADY', 'ASSEMBLED'] +5484-24317-0010-581: hyp=['WHEN', 'HERMAN', 'ANSWERED', 'THE', 'RESIDENCE', 'OF', 'THE', 'GRAMMATIUS', 'IN', 'THE', 'PALACE', 'THE', 'GUESTS', 'HAD', 'ALREADY', 'ASSEMBLED'] +5484-24317-0011-582: ref=['THE', 'PLACE', 'BY', "HERMON'S", 'SIDE', 'WHICH', 'ALTHEA', 'HAD', 'CHOSEN', 'FOR', 'HERSELF', 'WOULD', 'THEN', 'BE', 'GIVEN', 'UP', 'TO', 'ARSINOE'] +5484-24317-0011-582: hyp=['THEY', 'PLACED', 'BY', "HERMANN'S", 'SIDE', 'WHICH', 'ALTHIE', 'HAD', 'CHOSEN', 'FOR', 'HERSELF', 'WOULD', 'THEN', 'BE', 'GIVEN', 'UP', 'TO', 'ARSENO'] +5484-24317-0012-583: ref=['TRUE', 'AN', 'INTERESTING', 'CONVERSATION', 'STILL', 'HAD', 'POWER', 'TO', 'CHARM', 'HIM', 'BUT', 'OFTEN', 'DURING', 'ITS', 'CONTINUANCE', 'THE', 'FULL', 'CONSCIOUSNESS', 'OF', 'HIS', 'MISFORTUNE', 'FORCED', 'ITSELF', 'UPON', 'HIS', 'MIND', 'FOR', 'THE', 'MAJORITY', 'OF', 'THE', 'SUBJECTS', 'DISCUSSED', 'BY', 'THE', 'ARTISTS', 'CAME', 'TO', 'THEM', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'SIGHT', 'AND', 'REFERRED', 'TO', 'NEW', 'CREATIONS', 'OF', 'ARCHITECTURE', 'SCULPTURE', 'AND', 'PAINTING', 'FROM', 'WHOSE', 'ENJOYMENT', 'HIS', 'BLINDNESS', 'DEBARRED', 'HIM'] +5484-24317-0012-583: hyp=['TRUE', 'AN', 'INTERESTING', 'CONVERSATION', 'STILL', 'HAD', 'POWER', 'TO', 'CHARM', 'HIM', 'BUT', 'OFTEN', 'DURING', 'ITS', 'CONTINUANCE', 'THE', 'FULL', 'CONSCIOUSNESS', 'OF', 'HIS', 'MISFORTUNE', 'FORCED', 'ITSELF', 'UPON', 'HIS', 'MIND', 'FOR', 'THE', 'MAJORITY', 'OF', 'THE', 'SUBJECTS', 'DISCUSSED', 'BY', 'THE', 'ARTISTS', 'CAME', 'TO', 'THEM', 'THROUGH', 'THE', 'MEDIUM', 'OF', 'SIGHT', 'AND', 'REFERRED', 'TO', 'NEW', 'CREATIONS', 'OF', 'ARCHITECTURE', 'SCULPTURE', 'AND', 'PAINTING', 'FROM', 'WHOSE', 'ENJOYMENT', 'IS', 'BLINDNESS', 'DEBARED', 'HIM'] +5484-24317-0013-584: ref=['A', 'STRANGER', 'OUT', 'OF', 'HIS', 'OWN', 'SPHERE', 'HE', 'FELT', 'CHILLED', 'AMONG', 'THESE', 'CLOSELY', 'UNITED', 'MEN', 'AND', 'WOMEN', 'TO', 'WHOM', 'NO', 'TIE', 'BOUND', 'HIM', 'SAVE', 'THE', 'PRESENCE', 'OF', 'THE', 'SAME', 'HOST'] +5484-24317-0013-584: hyp=['A', 'STRANGER', 'OUT', 'OF', 'HIS', 'OWN', 'SPHERE', 'HE', 'FELL', 'CHILLED', 'AMONG', 'THESE', 'CLOSELY', 'UNITED', 'MEN', 'AND', 'WOMEN', 'TO', 'WHOM', 'NO', 'TIE', 'BOUND', 'HIM', 'SAVE', 'THE', 'PRESENCE', 'OF', 'THE', 'SAME', 'HOST'] +5484-24317-0014-585: ref=['CRATES', 'HAD', 'REALLY', 'BEEN', 'INVITED', 'IN', 'ORDER', 'TO', 'WIN', 'HIM', 'OVER', 'TO', 'THE', "QUEEN'S", 'CAUSE', 'BUT', 'CHARMING', 'FAIR', 'HAIRED', 'NICO', 'HAD', 'BEEN', 'COMMISSIONED', 'BY', 'THE', 'CONSPIRATORS', 'TO', 'PERSUADE', 'HIM', 'TO', 'SING', "ARSINOE'S", 'PRAISES', 'AMONG', 'HIS', 'PROFESSIONAL', 'ASSOCIATES'] +5484-24317-0014-585: hyp=['CREEDS', 'HAD', 'REALLY', 'BEEN', 'INVITED', 'IN', 'ORDER', 'TO', 'WIN', 'HIM', 'OVER', 'TO', 'THE', "QUEEN'S", 'CAUSE', 'BUT', 'CHARMING', 'FAIR', 'HAIRED', 'NIGO', 'HAD', 'BEEN', 'COMMISSIONED', 'BY', 'THE', 'CONSPIRATORS', 'TO', 'PERSUADE', 'HIM', 'TO', 'SING', "ARSENO'S", 'PRAISES', 'AMONG', 'HIS', 'PROFESSIONAL', 'ASSOCIATES'] +5484-24317-0015-586: ref=['HIS', 'SON', 'HAD', 'BEEN', 'THIS', 'ROYAL', "DAME'S", 'FIRST', 'HUSBAND', 'AND', 'SHE', 'HAD', 'DESERTED', 'HIM', 'TO', 'MARRY', 'LYSIMACHUS', 'THE', 'AGED', 'KING', 'OF', 'THRACE'] +5484-24317-0015-586: hyp=['HIS', 'SON', 'HAD', 'BEEN', 'THE', 'ROYAL', "JAMES'S", 'FIRST', 'HUSBAND', 'AND', 'SHE', 'HAD', 'DESERTED', 'HIM', 'TO', 'MARRY', 'LISSMACHUS', 'THE', 'AGED', 'KING', 'OF', 'THRACE'] +5484-24317-0016-587: ref=['THE', "KING'S", 'SISTER', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'CRIED', 'HERMON', 'INCREDULOUSLY'] +5484-24317-0016-587: hyp=['THE', "KING'S", 'SISTER', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'CRIED', 'HARMON', 'INCREDULOUSLY'] +5484-24317-0017-588: ref=['WE', 'WOMEN', 'ARE', 'ONLY', 'AS', 'OLD', 'AS', 'WE', 'LOOK', 'AND', 'THE', 'LEECHES', 'AND', 'TIRING', 'WOMEN', 'OF', 'THIS', 'BEAUTY', 'OF', 'FORTY', 'PRACTISE', 'ARTS', 'WHICH', 'GIVE', 'HER', 'THE', 'APPEARANCE', 'OF', 'TWENTY', 'FIVE', 'YET', 'PERHAPS', 'THE', 'KING', 'VALUES', 'HER', 'INTELLECT', 'MORE', 'THAN', 'HER', 'PERSON', 'AND', 'THE', 'WISDOM', 'OF', 'A', 'HUNDRED', 'SERPENTS', 'IS', 'CERTAINLY', 'UNITED', 'IN', 'THIS', "WOMAN'S", 'HEAD'] +5484-24317-0017-588: hyp=['WE', 'WOMEN', 'ARE', 'ONLY', 'AS', 'OLD', 'AS', 'WE', 'LOOK', 'AND', 'THE', 'LEECH', 'HAS', 'ENTIRE', 'AND', 'WOMAN', 'OF', 'THIS', 'BEAUTY', 'OF', 'FORTY', 'PRACTISE', 'ARTS', 'WHICH', 'GIVE', 'HER', 'THE', 'APPEARANCE', 'OF', 'TWENTY', 'FIVE', 'YET', 'PERHAPS', 'THE', 'KING', 'VALUES', 'HER', 'INTELLECT', 'MORE', 'THAN', 'HER', 'PERSON', 'AND', 'THE', 'WISDOM', 'OF', 'A', 'HUNDRED', 'SERPENTS', 'IS', 'CERTAINLY', 'UNITED', 'IN', 'THIS', "WOMAN'S", 'HEAD'] +5484-24317-0018-589: ref=['THE', 'THREE', 'MOST', 'TRUSTWORTHY', 'ONES', 'ARE', 'HERE', 'AMYNTAS', 'THE', 'LEECH', 'CHRYSIPPUS', 'AND', 'THE', 'ADMIRABLE', 'PROCLUS'] +5484-24317-0018-589: hyp=['THE', 'THREE', 'MOST', 'TRUSTWORTHY', 'ONES', 'I', 'HEAR', 'I', 'MEANTIS', 'THE', 'LEECH', 'CHRYSIPPUS', 'IN', 'THE', 'ADMIRABLE', 'PROCLASS'] +5484-24317-0019-590: ref=['LET', 'US', 'HOPE', 'THAT', 'YOU', 'WILL', 'MAKE', 'THIS', 'THREE', 'LEAVED', 'CLOVER', 'THE', 'LUCK', 'PROMISING', 'FOUR', 'LEAVED', 'ONE'] +5484-24317-0019-590: hyp=['LET', 'US', 'HOPE', 'THAT', 'YOU', 'WILL', 'MAKE', 'THIS', 'THREE', 'LEAVED', 'CLOVER', 'THE', 'LUCK', 'PROMISING', 'FOLIEVED', 'ONE'] +5484-24317-0020-591: ref=['YOUR', 'UNCLE', 'TOO', 'HAS', 'OFTEN', 'WITH', 'PRAISEWORTHY', 'GENEROSITY', 'HELPED', 'ARSINOE', 'IN', 'MANY', 'AN', 'EMBARRASSMENT'] +5484-24317-0020-591: hyp=['YOUR', 'UNCLE', 'TOO', 'HAS', 'OFTEN', 'WITH', 'PRAISED', 'WORTHY', 'GENEROSITY', 'HELPED', 'AUSTENO', 'IN', 'MANY', 'EMBARRASSMENT'] +5484-24317-0021-592: ref=['HOW', 'LONG', 'HE', 'KEPT', 'YOU', 'WAITING', 'FOR', 'THE', 'FIRST', 'WORD', 'CONCERNING', 'A', 'WORK', 'WHICH', 'JUSTLY', 'TRANSPORTED', 'THE', 'WHOLE', 'CITY', 'WITH', 'DELIGHT'] +5484-24317-0021-592: hyp=['HOW', 'LONG', 'HE', 'KEPT', 'YOU', 'WAITING', 'FROM', 'THE', 'FIRST', 'WORD', 'CONCERNING', 'A', 'WORK', 'WHICH', 'JUSTLY', 'TRANSPORTED', 'THE', 'WHOLE', 'CITY', 'WITH', 'DELIGHT'] +5484-24317-0022-593: ref=['WHEN', 'HE', 'DID', 'FINALLY', 'SUMMON', 'YOU', 'HE', 'SAID', 'THINGS', 'WHICH', 'MUST', 'HAVE', 'WOUNDED', 'YOU'] +5484-24317-0022-593: hyp=['WHEN', 'HE', 'DID', 'FINALLY', 'SUMMON', 'YOU', 'HE', 'SAID', 'THINGS', 'WHICH', 'MUST', 'HAVE', 'WOUNDED', 'YOU'] +5484-24317-0023-594: ref=['THAT', 'IS', 'GOING', 'TOO', 'FAR', 'REPLIED', 'HERMON'] +5484-24317-0023-594: hyp=['THAT', 'IS', 'GOING', 'TOO', 'FAR', 'REPLIED', 'HARMON'] +5484-24317-0024-595: ref=['HE', 'WINKED', 'AT', 'HER', 'AND', 'MADE', 'A', 'SIGNIFICANT', 'GESTURE', 'AS', 'HE', 'SPOKE', 'AND', 'THEN', 'INFORMED', 'THE', 'BLIND', 'ARTIST', 'HOW', 'GRACIOUSLY', 'ARSINOE', 'HAD', 'REMEMBERED', 'HIM', 'WHEN', 'SHE', 'HEARD', 'OF', 'THE', 'REMEDY', 'BY', 'WHOSE', 'AID', 'MANY', 'A', 'WONDERFUL', 'CURE', 'OF', 'BLIND', 'EYES', 'HAD', 'BEEN', 'MADE', 'IN', 'RHODES'] +5484-24317-0024-595: hyp=['HE', 'WINKED', 'AT', 'HER', 'AND', 'MADE', 'A', 'SIGNIFICANT', 'GESTURE', 'AS', 'HE', 'SPOKE', 'AND', 'THEN', 'INFORMED', 'THE', 'BLIND', 'ARTIST', 'HOW', 'GRACIOUSLY', 'ARSENO', 'HAD', 'REMEMBERED', 'HIM', 'WHEN', 'SHE', 'HEARD', 'OF', 'THE', 'REMEDY', 'BY', 'WHOSE', 'AID', 'MANY', 'A', 'WONDERFUL', 'CURE', 'OF', 'BLIND', 'EYE', 'HAD', 'BEEN', 'MADE', 'IN', 'ROADS'] +5484-24317-0025-596: ref=['THE', 'ROYAL', 'LADY', 'HAD', 'INQUIRED', 'ABOUT', 'HIM', 'AND', 'HIS', 'SUFFERINGS', 'WITH', 'ALMOST', 'SISTERLY', 'INTEREST', 'AND', 'ALTHEA', 'EAGERLY', 'CONFIRMED', 'THE', 'STATEMENT'] +5484-24317-0025-596: hyp=['THE', 'ROYAL', 'LADY', 'HAD', 'INQUIRED', 'ABOUT', 'HIM', 'AND', 'HIS', 'SUFFERINGS', 'WITH', 'ALMOST', 'SISTERLY', 'INTEREST', 'AND', 'ALTHEA', 'EAGERLY', 'CONFIRMED', 'THE', 'STATEMENT'] +5484-24317-0026-597: ref=['HERMON', 'LISTENED', 'TO', 'THE', 'PAIR', 'IN', 'SILENCE'] +5484-24317-0026-597: hyp=['HERMAN', 'LISTENED', 'TO', 'THE', 'PARENT', 'SILENCE'] +5484-24317-0027-598: ref=['THE', 'RHODIAN', 'WAS', 'JUST', 'BEGINNING', 'TO', 'PRAISE', 'ARSINOE', 'ALSO', 'AS', 'A', 'SPECIAL', 'FRIEND', 'AND', 'CONNOISSEUR', 'OF', 'THE', "SCULPTOR'S", 'ART', 'WHEN', 'CRATES', "HERMON'S", 'FELLOW', 'STUDENT', 'ASKED', 'THE', 'BLIND', 'ARTIST', 'IN', 'BEHALF', 'OF', 'HIS', 'BEAUTIFUL', 'COMPANION', 'WHY', 'HIS', 'DEMETER', 'WAS', 'PLACED', 'UPON', 'A', 'PEDESTAL', 'WHICH', 'TO', 'OTHERS', 'AS', 'WELL', 'AS', 'HIMSELF', 'SEEMED', 'TOO', 'HIGH', 'FOR', 'THE', 'SIZE', 'OF', 'THE', 'STATUE'] +5484-24317-0027-598: hyp=['THE', 'RADIAN', 'WAS', 'JUST', 'BEGINNING', 'TO', 'PRAISE', 'ARSENAL', 'ALSO', 'AS', 'A', 'SPECIAL', 'FRIEND', 'AND', 'CONNOISSEUR', 'OF', 'THE', "SCULPTOR'S", 'ART', 'WHEN', 'CRATES', "HERMANN'S", 'FELLOW', 'STUDENT', 'ASKED', 'THE', 'BLIND', 'ARTIST', 'IN', 'BEHALF', 'OF', 'HIS', 'BEAUTIFUL', 'COMPANION', 'WHY', 'HIS', 'DEMETER', 'WAS', 'PLACED', 'UPON', 'A', 'PEDESTAL', 'WITCH', 'TO', 'OTHERS', 'AS', 'WELL', 'AS', 'HIMSELF', 'SEEMED', 'TOO', 'HIGH', 'FOR', 'THE', 'SIZE', 'OF', 'THE', 'STATUE'] +5484-24317-0028-599: ref=['YET', 'WHAT', 'MATTERED', 'IT', 'EVEN', 'IF', 'THESE', 'MISERABLE', 'PEOPLE', 'CONSIDERED', 'THEMSELVES', 'DECEIVED', 'AND', 'POINTED', 'THE', 'FINGER', 'OF', 'SCORN', 'AT', 'HIM'] +5484-24317-0028-599: hyp=['YET', 'WHAT', 'MATTERED', 'IT', 'EVEN', 'IF', 'THESE', 'MISERABLE', 'PEOPLE', 'CONSIDERED', 'THEMSELVES', 'DECEIVED', 'AND', 'POINTED', 'THE', 'FINGER', 'OF', 'SCORN', 'AT', 'HIM'] +5484-24317-0029-600: ref=['A', 'WOMAN', 'WHO', 'YEARNS', 'FOR', 'THE', 'REGARD', 'OF', 'ALL', 'MEN', 'AND', 'MAKES', 'LOVE', 'A', 'TOY', 'EASILY', 'LESSENS', 'THE', 'DEMANDS', 'SHE', 'IMPOSES', 'UPON', 'INDIVIDUALS'] +5484-24317-0029-600: hyp=['A', 'WOMAN', 'WHO', 'YEARNS', 'FOR', 'THE', 'REGARD', 'OF', 'ALL', 'MEN', 'AND', 'MAKES', 'LOVE', 'A', 'TOY', 'EASILY', 'LESSENS', 'THE', 'DEMANDS', 'SHE', 'IMPOSES', 'UPON', 'INDIVIDUALS'] +5484-24317-0030-601: ref=['ONLY', 'EVEN', 'THOUGH', 'LOVE', 'HAS', 'WHOLLY', 'DISAPPEARED', 'SHE', 'STILL', 'CLAIMS', 'CONSIDERATION', 'AND', 'ALTHEA', 'DID', 'NOT', 'WISH', 'TO', 'LOSE', "HERMON'S", 'REGARD'] +5484-24317-0030-601: hyp=['ONLY', 'EVEN', 'THOUGH', 'LOVE', 'HAS', 'WHOLLY', 'DISAPPEARED', 'SHE', 'STILL', 'CLAIMS', 'CONSIDERATION', 'AND', 'ALTHIA', 'DID', 'NOT', 'WISH', 'TO', 'LOSE', "HARMON'S", 'REGARD'] +5484-24317-0031-602: ref=['HOW', 'INDIFFERENT', 'YOU', 'LOOK', 'BUT', 'I', 'TELL', 'YOU', 'HER', 'DEEP', 'BLUE', 'EYES', 'FLASHED', 'AS', 'SHE', 'SPOKE', 'THAT', 'SO', 'LONG', 'AS', 'YOU', 'WERE', 'STILL', 'A', 'GENUINE', 'CREATING', 'ARTIST', 'THE', 'CASE', 'WAS', 'DIFFERENT'] +5484-24317-0031-602: hyp=['HOW', 'INDIFFERENT', 'YOU', 'LOOK', 'BUT', 'I', 'TELL', 'YOU', 'HER', 'DEEP', 'BLUE', 'EYES', 'FLASHED', 'AS', 'SHE', 'SPOKE', 'THAT', 'SO', 'LONG', 'AS', 'YOU', 'WAS', 'STILL', 'A', 'GENUINE', 'CREATING', 'ARTIST', 'THE', 'CASE', 'WAS', 'DIFFERENT'] +5484-24317-0032-603: ref=['THOUGH', 'SO', 'LOUD', 'A', 'DENIAL', 'IS', 'WRITTEN', 'ON', 'YOUR', 'FACE', 'I', 'PERSIST', 'IN', 'MY', 'CONVICTION', 'AND', 'THAT', 'NO', 'IDLE', 'DELUSION', 'ENSNARES', 'ME', 'I', 'CAN', 'PROVE'] +5484-24317-0032-603: hyp=['THOUGH', 'SO', 'LOUD', 'A', 'DENIAL', 'IS', 'WRITTEN', 'ON', 'YOUR', 'FACE', 'I', 'PERSIST', 'IN', 'MY', 'CONVICTION', 'AND', 'THAT', 'NO', 'IDLE', 'DELUSION', 'AND', 'SNATHS', 'ME', 'I', 'CAN', 'PROVE'] +5484-24317-0033-604: ref=['IT', 'WAS', 'NAY', 'IT', 'COULD', 'HAVE', 'BEEN', 'NOTHING', 'ELSE', 'THAT', 'VERY', 'SPIDER'] +5484-24317-0033-604: hyp=['IT', 'WAS', 'NAY', 'IT', 'COULD', 'HAVE', 'BEEN', 'NOTHING', 'ELSE', 'THAT', 'VERY', 'SPIDER'] +5484-24318-0000-605: ref=['NOT', 'A', 'SOUND', 'IF', 'YOU', 'VALUE', 'YOUR', 'LIVES'] +5484-24318-0000-605: hyp=['NOT', 'A', 'SOUND', 'IF', 'YOU', 'VALUE', 'YOUR', 'LIVES'] +5484-24318-0001-606: ref=['TO', 'OFFER', 'RESISTANCE', 'WOULD', 'HAVE', 'BEEN', 'MADNESS', 'FOR', 'EVEN', 'HERMON', 'PERCEIVED', 'BY', 'THE', 'LOUD', 'CLANKING', 'OF', 'WEAPONS', 'AROUND', 'THEM', 'THE', 'GREATLY', 'SUPERIOR', 'POWER', 'OF', 'THE', 'ENEMY', 'AND', 'THEY', 'WERE', 'ACTING', 'BY', 'THE', 'ORDERS', 'OF', 'THE', 'KING', 'TO', 'THE', 'PRISON', 'NEAR', 'THE', 'PLACE', 'OF', 'EXECUTION'] +5484-24318-0001-606: hyp=['TO', 'OFFER', 'RESISTANCE', 'WOULD', 'HAVE', 'BEEN', 'MADNESS', 'FOR', 'EVEN', 'HERMANN', 'PERCEIVED', 'BY', 'THE', 'LOUD', 'CLANKING', 'OF', 'WEAPONS', 'AROUND', 'THEM', 'THEY', 'GREATLY', 'SUPERIOR', 'POWER', 'OF', 'THE', 'ENEMY', 'AND', 'THEY', 'WERE', 'ACTING', 'BY', 'THE', 'ORDERS', 'OF', 'THE', 'KING', 'TO', 'THE', 'PRISON', 'NEAR', 'THE', 'PLACE', 'OF', 'EXECUTION'] +5484-24318-0002-607: ref=['WAS', 'HE', 'TO', 'BE', 'LED', 'TO', 'THE', "EXECUTIONER'S", 'BLOCK'] +5484-24318-0002-607: hyp=['WAS', 'HE', 'TO', 'BE', 'LED', 'TO', 'THE', "EXECUTIONER'S", 'BLOCK'] +5484-24318-0003-608: ref=['WHAT', 'PLEASURE', 'HAD', 'LIFE', 'TO', 'OFFER', 'HIM', 'THE', 'BLIND', 'MAN', 'WHO', 'WAS', 'ALREADY', 'DEAD', 'TO', 'HIS', 'ART'] +5484-24318-0003-608: hyp=['WHAT', 'PLEASURE', 'HAD', 'LIFE', 'TO', 'OFFER', 'HIM', 'THE', 'BLIND', 'MAN', 'WHO', 'WAS', 'ALREADY', 'DEAD', 'TO', 'HIS', 'ART'] +5484-24318-0004-609: ref=['OUGHT', 'HE', 'NOT', 'TO', 'GREET', 'THIS', 'SUDDEN', 'END', 'AS', 'A', 'BOON', 'FROM', 'THE', 'IMMORTALS'] +5484-24318-0004-609: hyp=['OUGHT', 'HE', 'NOT', 'TO', 'GREET', 'HIS', 'SUDDEN', 'END', 'AS', 'A', 'BOOM', 'FROM', 'THE', 'IMMORTALS'] +5484-24318-0005-610: ref=['DID', 'IT', 'NOT', 'SPARE', 'HIM', 'A', 'HUMILIATION', 'AS', 'GREAT', 'AND', 'PAINFUL', 'AS', 'COULD', 'BE', 'IMAGINED'] +5484-24318-0005-610: hyp=['DID', 'IT', 'NOT', 'SPARE', 'HIM', 'A', 'HUMILIATION', 'AS', 'GREAT', 'AND', 'PAINFUL', 'AS', 'COULD', 'BE', 'IMAGINED'] +5484-24318-0006-611: ref=['WHATEVER', 'MIGHT', 'AWAIT', 'HIM', 'HE', 'DESIRED', 'NO', 'BETTER', 'FATE'] +5484-24318-0006-611: hyp=['WHATEVER', 'MIGHT', 'AWAIT', 'HIM', 'HE', 'DESIRED', 'NO', 'BETTER', 'FATE'] +5484-24318-0007-612: ref=['IF', 'HE', 'HAD', 'PASSED', 'INTO', 'ANNIHILATION', 'HE', 'HERMON', 'WISHED', 'TO', 'FOLLOW', 'HIM', 'THITHER', 'AND', 'ANNIHILATION', 'CERTAINLY', 'MEANT', 'REDEMPTION', 'FROM', 'PAIN', 'AND', 'MISERY'] +5484-24318-0007-612: hyp=['IF', 'HE', 'HAD', 'PASSED', 'INTO', 'ANNIHILATION', 'HE', 'HERMAN', 'WISHED', 'TO', 'FOLLOW', 'HIM', 'THITHER', 'AND', 'ANNIHILATION', 'CERTAINLY', 'MEANT', 'REDEMPTION', 'FROM', 'PAIN', 'AND', 'MISERY'] +5484-24318-0008-613: ref=['BUT', 'IF', 'HE', 'WERE', 'DESTINED', 'TO', 'MEET', 'HIS', 'MYRTILUS', 'AND', 'HIS', 'MOTHER', 'IN', 'THE', 'WORLD', 'BEYOND', 'THE', 'GRAVE', 'WHAT', 'HAD', 'HE', 'NOT', 'TO', 'TELL', 'THEM', 'HOW', 'SURE', 'HE', 'WAS', 'OF', 'FINDING', 'A', 'JOYFUL', 'RECEPTION', 'THERE', 'FROM', 'BOTH'] +5484-24318-0008-613: hyp=['BUT', 'IF', 'HE', 'WERE', 'DESTINED', 'TO', 'MEET', 'HIS', 'BURTLES', 'AND', 'HIS', 'MOTHER', 'IN', 'THE', 'WORLD', 'BEYOND', 'THE', 'GRAVE', 'WHAT', 'HAD', 'HE', 'NOT', 'TO', 'TELL', 'THEM', 'HOW', 'SURE', 'HE', 'WAS', 'A', 'FINDING', 'A', 'JOYFUL', 'RECEPTION', 'THERE', 'FROM', 'BOTH'] +5484-24318-0009-614: ref=['THE', 'POWER', 'WHICH', 'DELIVERED', 'HIM', 'OVER', 'TO', 'DEATH', 'JUST', 'AT', 'THAT', 'MOMENT', 'WAS', 'NOT', 'NEMESIS', 'NO', 'IT', 'WAS', 'A', 'KINDLY', 'DEITY'] +5484-24318-0009-614: hyp=['THE', 'POWER', 'WHICH', 'DELIVERED', 'HIM', 'OVER', 'TO', 'DEATH', 'JUST', 'AT', 'THAT', 'MOMENT', 'WAS', 'NOT', 'NEMESIS', 'NO', 'IT', 'WAS', 'A', 'KINDLY', 'DEITY'] +5484-24318-0010-615: ref=['YET', 'IT', 'WAS', 'NO', 'ILLUSION', 'THAT', 'DECEIVED', 'HIM'] +5484-24318-0010-615: hyp=['YET', 'IT', 'WAS', 'NO', 'ILLUSION', 'THAT', 'DECEIVED', 'HIM'] +5484-24318-0011-616: ref=['AGAIN', 'HE', 'HEARD', 'THE', 'BELOVED', 'VOICE', 'AND', 'THIS', 'TIME', 'IT', 'ADDRESSED', 'NOT', 'ONLY', 'HIM', 'BUT', 'WITH', 'THE', 'UTMOST', 'HASTE', 'THE', 'COMMANDER', 'OF', 'THE', 'SOLDIERS'] +5484-24318-0011-616: hyp=['AGAIN', 'HE', 'HEARD', 'THE', 'BELOVED', 'VOICE', 'AND', 'THIS', 'TIME', 'IT', 'ADDRESSED', 'NOT', 'ONLY', 'HIM', 'BUT', 'WITH', 'THE', 'UTMOST', 'HASTE', 'THE', 'COMMANDER', 'OF', 'THE', 'SOLDIERS'] +5484-24318-0012-617: ref=['SOMETIMES', 'WITH', 'TOUCHING', 'ENTREATY', 'SOMETIMES', 'WITH', 'IMPERIOUS', 'COMMAND', 'SHE', 'PROTESTED', 'AFTER', 'GIVING', 'HIM', 'HER', 'NAME', 'THAT', 'THIS', 'MATTER', 'COULD', 'BE', 'NOTHING', 'BUT', 'AN', 'UNFORTUNATE', 'MISTAKE'] +5484-24318-0012-617: hyp=['SOMETIMES', 'WITH', 'TOUCHING', 'ENTREATY', 'SOMETIMES', 'WITH', 'IMPERIOUS', 'COMMAND', 'SHE', 'PROTESTED', 'AFTER', 'GIVING', 'HIM', 'HER', 'NAME', 'THAT', 'THIS', 'MATTER', 'COULD', 'BE', 'NOTHING', 'BUT', 'AN', 'UNFORTUNATE', 'MISTAKE'] +5484-24318-0013-618: ref=['LASTLY', 'WITH', 'EARNEST', 'WARMTH', 'SHE', 'BESOUGHT', 'HIM', 'BEFORE', 'TAKING', 'THE', 'PRISONERS', 'AWAY', 'TO', 'PERMIT', 'HER', 'TO', 'SPEAK', 'TO', 'THE', 'COMMANDING', 'GENERAL', 'PHILIPPUS', 'HER', "FATHER'S", 'GUEST', 'WHO', 'SHE', 'WAS', 'CERTAIN', 'WAS', 'IN', 'THE', 'PALACE'] +5484-24318-0013-618: hyp=['LASTLY', 'WITH', 'EARNEST', 'WARMTH', 'SHE', 'BESOUGHT', 'HIM', 'BEFORE', 'TAKING', 'THE', 'PRISONERS', 'AWAY', 'TO', 'PERMIT', 'HER', 'TO', 'SPEAK', 'TO', 'THE', 'COMMANDING', 'GENERAL', 'PHILIPPUS', 'HER', "FATHER'S", 'GUEST', 'WHO', 'SHE', 'WAS', 'CERTAIN', 'WAS', 'IN', 'THE', 'PALACE'] +5484-24318-0014-619: ref=['CRIED', 'HERMON', 'IN', 'GRATEFUL', 'AGITATION', 'BUT', 'SHE', 'WOULD', 'NOT', 'LISTEN', 'TO', 'HIM', 'AND', 'FOLLOWED', 'THE', 'SOLDIER', 'WHOM', 'THE', 'CAPTAIN', 'DETAILED', 'TO', 'GUIDE', 'HER', 'INTO', 'THE', 'PALACE'] +5484-24318-0014-619: hyp=['CRIED', 'HERMANN', 'IN', 'GRATEFUL', 'AGITATION', 'BUT', 'SHE', 'WOULD', 'NOT', 'LISTEN', 'TO', 'HIM', 'AND', 'FOLLOW', 'THE', 'SOLDIER', 'WHOM', 'THE', 'CAPTAIN', 'DETAILED', 'TO', 'GUIDE', 'HER', 'INTO', 'THE', 'PALACE'] +5484-24318-0015-620: ref=['TO', 'MORROW', 'YOU', 'SHALL', 'CONFESS', 'TO', 'ME', 'WHO', 'TREACHEROUSLY', 'DIRECTED', 'YOU', 'TO', 'THIS', 'DANGEROUS', 'PATH'] +5484-24318-0015-620: hyp=['TO', 'MORROW', 'YOU', 'SHALL', 'CONFESS', 'TO', 'ME', 'WHO', 'TREACHEROUSLY', 'DIRECTED', 'YOU', 'TO', 'THIS', 'DANGEROUS', 'PATH'] +5484-24318-0016-621: ref=['DAPHNE', 'AGAIN', 'PLEADED', 'FOR', 'THE', 'LIBERATION', 'OF', 'THE', 'PRISONERS', 'BUT', 'PHILIPPUS', 'SILENCED', 'HER', 'WITH', 'THE', 'GRAVE', 'EXCLAMATION', 'THE', 'ORDER', 'OF', 'THE', 'KING'] +5484-24318-0016-621: hyp=['DAPHNE', 'AGAIN', 'PLEADED', 'FOR', 'THE', 'LIBERATION', 'OF', 'THE', 'PRISONERS', 'BUT', 'PHILIP', 'WAS', 'SILENCED', 'HER', 'WITH', 'A', 'GRAVE', 'EXCLAMATION', 'THE', 'ORDER', 'OF', 'THE', 'KING'] +5484-24318-0017-622: ref=['AS', 'SOON', 'AS', 'THE', 'CAPTIVE', 'ARTIST', 'WAS', 'ALONE', 'WITH', 'THE', 'WOMAN', 'HE', 'LOVED', 'HE', 'CLASPED', 'HER', 'HAND', 'POURING', 'FORTH', 'INCOHERENT', 'WORDS', 'OF', 'THE', 'MOST', 'ARDENT', 'GRATITUDE', 'AND', 'WHEN', 'HE', 'FELT', 'HER', 'WARMLY', 'RETURN', 'THE', 'PRESSURE', 'HE', 'COULD', 'NOT', 'RESTRAIN', 'THE', 'DESIRE', 'TO', 'CLASP', 'HER', 'TO', 'HIS', 'HEART'] +5484-24318-0017-622: hyp=['AS', 'SOON', 'AS', 'THE', 'CAPTIVE', 'ARTIST', 'WAS', 'ALONE', 'WITH', 'THE', 'WOMAN', 'HE', 'LOVED', 'HE', 'CLASPED', 'HER', 'HAND', 'POURING', 'FORTH', 'INCOHERENT', 'WORDS', 'OF', 'THE', 'MOST', 'ARDENT', 'GRATITUDE', 'AND', 'WHEN', 'HE', 'FELT', 'HER', 'WARMLY', 'RETURNED', 'THE', 'PRESSURE', 'HE', 'COULD', 'NOT', 'RESTRAIN', 'THE', 'DESIRE', 'TO', 'CLASP', 'HER', 'TO', 'HIS', 'HEART'] +5484-24318-0018-623: ref=['IN', 'SPITE', 'OF', 'HIS', 'DEEP', 'MENTAL', 'DISTRESS', 'HE', 'COULD', 'HAVE', 'SHOUTED', 'ALOUD', 'IN', 'HIS', 'DELIGHT', 'AND', 'GRATITUDE'] +5484-24318-0018-623: hyp=['IN', 'SPITE', 'OF', 'HIS', 'DEEP', 'MANTLE', 'DISTRESS', 'HE', 'COULD', 'HAVE', 'SHOUTED', 'ALOUD', 'IN', 'HIS', 'DELIGHT', 'AND', 'GRATITUDE'] +5484-24318-0019-624: ref=['HE', 'MIGHT', 'NOW', 'HAVE', 'BEEN', 'PERMITTED', 'TO', 'BIND', 'FOREVER', 'TO', 'HIS', 'LIFE', 'THE', 'WOMAN', 'WHO', 'HAD', 'JUST', 'RESCUED', 'HIM', 'FROM', 'THE', 'GREATEST', 'DANGER', 'BUT', 'THE', 'CONFESSION', 'HE', 'MUST', 'MAKE', 'TO', 'HIS', 'FELLOW', 'ARTISTS', 'IN', 'THE', 'PALAESTRA', 'THE', 'FOLLOWING', 'MORNING', 'STILL', 'SEALED', 'HIS', 'LIPS', 'YET', 'IN', 'THIS', 'HOUR', 'HE', 'FELT', 'THAT', 'HE', 'WAS', 'UNITED', 'TO', 'HER', 'AND', 'OUGHT', 'NOT', 'TO', 'CONCEAL', 'WHAT', 'AWAITED', 'HIM', 'SO', 'OBEYING', 'A', 'STRONG', 'IMPULSE', 'HE', 'EXCLAIMED', 'YOU', 'KNOW', 'THAT', 'I', 'LOVE', 'YOU'] +5484-24318-0019-624: hyp=['HE', 'MIGHT', 'NOW', 'HAVE', 'BEEN', 'PERMITTED', 'TO', 'BIND', 'FOREVER', 'TO', 'HIS', 'LIFE', 'THE', 'WOMAN', 'WHO', 'HAD', 'JUST', 'RESCUED', 'HIM', 'FROM', 'THE', 'GREATEST', 'DANGER', 'BUT', 'THE', 'CONFESSION', 'HE', 'MUST', 'MAKE', 'TO', 'HIS', 'FELLOW', 'ARTISTS', 'IN', 'THE', 'PELLESTRA', 'THE', 'FOLLOWING', 'MORNING', 'STILL', 'SEALED', 'HIS', 'LIPS', 'YET', 'IN', 'THIS', 'HOUR', 'HE', 'FELT', 'THAT', 'HE', 'WAS', 'UNITED', 'TO', 'HER', 'AND', 'OUGHT', 'NOT', 'TO', 'CONCEAL', 'WHAT', 'AWAITED', 'HIM', 'SO', 'OBEYING', 'A', 'STRONG', 'IMPULSE', 'HE', 'EXCLAIMED', 'YOU', 'KNOW', 'THAT', 'I', 'LOVE', 'YOU'] +5484-24318-0020-625: ref=['I', 'LOVE', 'YOU', 'AND', 'HAVE', 'LOVED', 'YOU', 'ALWAYS'] +5484-24318-0020-625: hyp=['I', 'LOVE', 'YOU', 'AND', 'HAVE', 'LOVED', 'YOU', 'ALWAYS'] +5484-24318-0021-626: ref=['DAPHNE', 'EXCLAIMED', 'TENDERLY', 'WHAT', 'MORE', 'IS', 'NEEDED'] +5484-24318-0021-626: hyp=['JAPHANE', 'EXCLAIMED', 'TENDERLY', 'WHAT', 'MORE', 'IS', 'NEEDED'] +5484-24318-0022-627: ref=['BUT', 'HERMON', 'WITH', 'DROOPING', 'HEAD', 'MURMURED', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'WHAT', 'I', 'AM', 'NOW'] +5484-24318-0022-627: hyp=['BUT', 'HARMON', 'WITH', 'DROOPING', 'HEAD', 'MURMURED', 'TO', 'MORROW', 'I', 'SHALL', 'NO', 'LONGER', 'BE', 'WHAT', 'I', 'AM', 'NOW'] +5484-24318-0023-628: ref=['THEN', 'DAPHNE', 'RAISED', 'HER', 'FACE', 'TO', 'HIS', 'ASKING', 'SO', 'THE', 'DEMETER', 'IS', 'THE', 'WORK', 'OF', 'MYRTILUS'] +5484-24318-0023-628: hyp=['THEN', 'JAPANE', 'RAISED', 'HER', 'FACE', 'TO', 'HIS', 'ASKING', 'SO', 'THE', 'DEMEANOR', 'IS', 'THE', 'WORK', 'OF', 'MYRTOLIS'] +5484-24318-0024-629: ref=['WHAT', 'A', 'TERRIBLE', 'ORDEAL', 'AGAIN', 'AWAITS', 'YOU'] +5484-24318-0024-629: hyp=['WHAT', 'A', 'TERRIBLE', 'ORDEAL', 'AGAIN', 'AWAITS', 'YOU'] +5484-24318-0025-630: ref=['AND', 'I', 'FOOL', 'BLINDED', 'ALSO', 'IN', 'MIND', 'COULD', 'BE', 'VEXED', 'WITH', 'YOU', 'FOR', 'IT'] +5484-24318-0025-630: hyp=['AND', 'I', 'FOOL', 'BLINDED', 'ALSO', 'IN', 'MIND', 'COULD', 'BE', 'VEXED', 'WITH', 'YOU', 'FOR', 'IT'] +5484-24318-0026-631: ref=['BRING', 'THIS', 'BEFORE', 'YOUR', 'MIND', 'AND', 'EVERYTHING', 'ELSE', 'THAT', 'YOU', 'MUST', 'ACCEPT', 'WITH', 'IT', 'IF', 'YOU', 'CONSENT', 'WHEN', 'THE', 'TIME', 'ARRIVES', 'TO', 'BECOME', 'MINE', 'CONCEAL', 'AND', 'PALLIATE', 'NOTHING'] +5484-24318-0026-631: hyp=['BRING', 'THIS', 'BEFORE', 'YOUR', 'MIND', 'AND', 'EVERYTHING', 'ELSE', 'THAT', 'YOU', 'MUST', 'ACCEPT', 'WITH', 'IT', 'IF', 'YOU', 'CONSENT', 'WITH', 'THE', 'TIME', 'ARRIVES', 'TO', 'BECOME', 'MINE', 'CONCEAL', 'IN', 'PALE', 'YET', 'NOTHING'] +5484-24318-0027-632: ref=['SO', 'ARCHIAS', 'INTENDED', 'TO', 'LEAVE', 'THE', 'CITY', 'ON', 'ONE', 'OF', 'HIS', 'OWN', 'SHIPS', 'THAT', 'VERY', 'DAY'] +5484-24318-0027-632: hyp=['SARKAIUS', 'INTENDED', 'TO', 'LEAVE', 'THE', 'CITY', 'ON', 'ONE', 'OF', 'HIS', 'OWN', 'SHIPS', 'THAT', 'VERY', 'DAY'] +5484-24318-0028-633: ref=['HE', 'HIMSELF', 'ON', 'THE', 'WAY', 'TO', 'EXPOSE', 'HIMSELF', 'TO', 'THE', 'MALICE', 'AND', 'MOCKERY', 'OF', 'THE', 'WHOLE', 'CITY'] +5484-24318-0028-633: hyp=['SHE', 'HIMSELF', 'ON', 'THE', 'WAY', 'TO', 'EXPOSE', 'HIMSELF', 'TO', 'THE', 'MALICE', 'AND', 'MOCKERY', 'OF', 'THE', 'WHOLE', 'CITY'] +5484-24318-0029-634: ref=['HIS', 'HEART', 'CONTRACTED', 'PAINFULLY', 'AND', 'HIS', 'SOLICITUDE', 'ABOUT', 'HIS', "UNCLE'S", 'FATE', 'INCREASED', 'WHEN', 'PHILIPPUS', 'INFORMED', 'HIM', 'THAT', 'THE', 'CONSPIRATORS', 'HAD', 'BEEN', 'ARRESTED', 'AT', 'THE', 'BANQUET', 'AND', 'HEADED', 'BY', 'AMYNTAS', 'THE', 'RHODIAN', 'CHRYSIPPUS', 'AND', 'PROCLUS', 'HAD', 'PERISHED', 'BY', 'THE', "EXECUTIONER'S", 'SWORD', 'AT', 'SUNRISE'] +5484-24318-0029-634: hyp=['HIS', 'HEART', 'CONTRACTED', 'PAINFULLY', 'AND', 'HIS', 'SOLICITUDE', 'ABOUT', 'HIS', "UNCLE'S", 'FATE', 'INCREASED', 'WHEN', 'PHILIPPUS', 'INFORMED', 'HIM', 'THAT', 'THE', 'CONSPIRATORS', 'HAD', 'BEEN', 'ARRESTED', 'AT', 'THE', 'BANQUET', 'AND', 'HEADED', 'BY', 'A', 'MEANTESSE', 'THE', 'RODIAN', 'CHRYSIPPUS', 'AND', 'PROCLAUS', 'HAD', 'PERISHED', 'BY', 'THE', "EXECUTIONER'S", 'SWORD', 'AT', 'SUNRISE'] +5484-24318-0030-635: ref=['BESIDES', 'HE', 'KNEW', 'THAT', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'WOULD', 'NOT', 'PART', 'FROM', 'HIM', 'WITHOUT', 'GRANTING', 'HIM', 'ONE', 'LAST', 'WORD'] +5484-24318-0030-635: hyp=['BESIDES', 'HE', 'KNEW', 'THAT', 'THE', 'OBJECT', 'OF', 'HIS', 'LOVE', 'WOULD', 'NOT', 'PART', 'FROM', 'HIM', 'WITHOUT', 'GRANTING', 'HIM', 'ONE', 'LAST', 'WORD'] +5484-24318-0031-636: ref=['ON', 'THE', 'WAY', 'HIS', 'HEART', 'THROBBED', 'ALMOST', 'TO', 'BURSTING'] +5484-24318-0031-636: hyp=['ON', 'THE', 'WAY', 'HIS', 'HEART', 'THROBBED', 'ALMOST', 'TO', 'BURSTING'] +5484-24318-0032-637: ref=['EVEN', "DAPHNE'S", 'IMAGE', 'AND', 'WHAT', 'THREATENED', 'HER', 'FATHER', 'AND', 'HER', 'WITH', 'HIM', 'RECEDED', 'FAR', 'INTO', 'THE', 'BACKGROUND'] +5484-24318-0032-637: hyp=['EVEN', 'THESE', 'IMAGE', 'AND', 'WHAT', 'THREATENED', 'HER', 'FATHER', 'AND', 'HER', 'WITH', 'HIM', 'WAS', 'SEATED', 'FAR', 'INTO', 'THE', 'BACKGROUND'] +5484-24318-0033-638: ref=['HE', 'WAS', 'APPEARING', 'BEFORE', 'HIS', 'COMPANIONS', 'ONLY', 'TO', 'GIVE', 'TRUTH', 'ITS', 'JUST', 'DUE'] +5484-24318-0033-638: hyp=['HE', 'WAS', 'APPEARING', 'BEFORE', 'HIS', 'COMPANIONS', 'ONLY', 'TO', 'GIVE', 'TRUTH', 'ITS', 'JUST', 'DUE'] +5484-24318-0034-639: ref=['THE', 'EGYPTIAN', 'OBEYED', 'AND', 'HIS', 'MASTER', 'CROSSED', 'THE', 'WIDE', 'SPACE', 'STREWN', 'WITH', 'SAND', 'AND', 'APPROACHED', 'THE', 'STAGE', 'WHICH', 'HAD', 'BEEN', 'ERECTED', 'FOR', 'THE', 'FESTAL', 'PERFORMANCES', 'EVEN', 'HAD', 'HIS', 'EYES', 'RETAINED', 'THE', 'POWER', 'OF', 'SIGHT', 'HIS', 'BLOOD', 'WAS', 'COURSING', 'SO', 'WILDLY', 'THROUGH', 'HIS', 'VEINS', 'THAT', 'HE', 'MIGHT', 'PERHAPS', 'HAVE', 'BEEN', 'UNABLE', 'TO', 'DISTINGUISH', 'THE', 'STATUES', 'AROUND', 'HIM', 'AND', 'THE', 'THOUSANDS', 'OF', 'SPECTATORS', 'WHO', 'CROWDED', 'CLOSELY', 'TOGETHER', 'RICHLY', 'GARLANDED', 'THEIR', 'CHEEKS', 'GLOWING', 'WITH', 'ENTHUSIASM', 'SURROUNDED', 'THE', 'ARENA', 'HERMON'] +5484-24318-0034-639: hyp=['THE', 'EGYPTIAN', 'OBEYED', 'AND', 'HIS', 'MASTER', 'CROSSED', 'THE', 'WIDE', 'SPACE', 'STREWN', 'WITH', 'SAND', 'AND', 'APPROACHED', 'THE', 'STAGE', 'WHICH', 'HAD', 'BEEN', 'ERECTED', 'FOR', 'THE', 'FEAST', 'OF', 'PERFORMANCES', 'EVEN', 'HAD', 'HIS', 'EYES', 'RETAINED', 'THE', 'POWER', 'OF', 'SIGHT', 'HIS', 'BLOOD', 'WAS', 'COARSING', 'SO', 'WIDELY', 'THROUGH', 'HIS', 'VEINS', 'THAT', 'HE', 'MIGHT', 'PERHAPS', 'HAVE', 'BEEN', 'UNABLE', 'TO', 'DISTINGUISH', 'THE', 'STATUES', 'AROUND', 'HIM', 'AND', 'THE', 'THOUSANDS', 'OF', 'SPECTATORS', 'WHO', 'CROWDED', 'CLOSELY', 'TOGETHER', 'RICHLY', 'GARLANDED', 'THEIR', 'CHEEKS', 'GLOWING', 'WITH', 'ENTHUSIASM', 'SURROUNDED', 'THE', 'ARENA', 'HERMANN'] +5484-24318-0035-640: ref=['SHOUTED', 'HIS', 'FRIEND', 'SOTELES', 'IN', 'JOYFUL', 'SURPRISE', 'IN', 'THE', 'MIDST', 'OF', 'THIS', 'PAINFUL', 'WALK', 'HERMON'] +5484-24318-0035-640: hyp=['SHOUTED', 'HIS', 'FRIEND', 'SARTUOUS', 'AND', 'JOYFUL', 'SURPRISE', 'IN', 'THE', 'MIDST', 'OF', 'HIS', 'PAINFUL', 'WALK', 'HERE', 'ON'] +5484-24318-0036-641: ref=['EVEN', 'WHILE', 'HE', 'BELIEVED', 'HIMSELF', 'TO', 'BE', 'THE', 'CREATOR', 'OF', 'THE', 'DEMETER', 'HE', 'HAD', 'BEEN', 'SERIOUSLY', 'TROUBLED', 'BY', 'THE', 'PRAISE', 'OF', 'SO', 'MANY', 'CRITICS', 'BECAUSE', 'IT', 'HAD', 'EXPOSED', 'HIM', 'TO', 'THE', 'SUSPICION', 'OF', 'HAVING', 'BECOME', 'FAITHLESS', 'TO', 'HIS', 'ART', 'AND', 'HIS', 'NATURE'] +5484-24318-0036-641: hyp=['EVEN', 'WHILE', 'HE', 'BELIEVED', 'HIMSELF', 'TO', 'BE', 'THE', 'CREATOR', 'OF', 'THE', 'DEMETER', 'HE', 'HAD', 'BEEN', 'SERIOUSLY', 'TROUBLED', 'BY', 'THE', 'PRAISE', 'OF', 'SO', 'MANY', 'CRITICS', 'BECAUSE', 'IT', 'HAD', 'EXPOSED', 'HIM', 'TO', 'THE', 'SUSPICION', 'OF', 'HAVING', 'BECOME', 'FAITHLESS', 'TO', 'HIS', 'ART', 'AND', 'HIS', 'NATURE'] +5484-24318-0037-642: ref=['HONOUR', 'TO', 'MYRTILUS', 'AND', 'HIS', 'ART', 'BUT', 'HE', 'TRUSTED', 'THIS', 'NOBLE', 'FESTAL', 'ASSEMBLAGE', 'WOULD', 'PARDON', 'THE', 'UNINTENTIONAL', 'DECEPTION', 'AND', 'AID', 'HIS', 'PRAYER', 'FOR', 'RECOVERY'] +5484-24318-0037-642: hyp=['HONOUR', 'TO', 'MARTILLUS', 'AND', 'HIS', 'ART', 'BUT', 'HE', 'TRUSTED', 'THIS', 'NOBLE', 'FEAST', 'AN', 'ASSEMBLAGE', 'WOULD', 'PARDON', 'THE', 'UNINTENTIONAL', 'DECEPTION', 'AND', 'AID', 'HIS', 'PRAYER', 'FOR', 'RECOVERY'] +5764-299665-0000-405: ref=['AFTERWARD', 'IT', 'WAS', 'SUPPOSED', 'THAT', 'HE', 'WAS', 'SATISFIED', 'WITH', 'THE', 'BLOOD', 'OF', 'OXEN', 'LAMBS', 'AND', 'DOVES', 'AND', 'THAT', 'IN', 'EXCHANGE', 'FOR', 'OR', 'ON', 'ACCOUNT', 'OF', 'THESE', 'SACRIFICES', 'THIS', 'GOD', 'GAVE', 'RAIN', 'SUNSHINE', 'AND', 'HARVEST'] +5764-299665-0000-405: hyp=['AFTERWARD', 'IT', 'WAS', 'SUPPOSED', 'THAT', 'HE', 'WAS', 'SATISFIED', 'WITH', 'THE', 'BLOOD', 'OF', 'OXEN', 'LAMPS', 'AND', 'DOVES', 'AND', 'THAT', 'IN', 'EXCHANGE', 'FOR', 'OR', 'IN', 'ACCOUNT', 'OF', 'THESE', 'SACRIFICES', 'THESE', 'GOD', 'GAVE', 'REIN', 'SUNSHINE', 'AND', 'HARVEST'] +5764-299665-0001-406: ref=['WHETHER', 'HE', 'WAS', 'THE', 'CREATOR', 'OF', 'YOURSELF', 'AND', 'MYSELF'] +5764-299665-0001-406: hyp=['WHETHER', 'HE', 'WAS', 'THE', 'CREATOR', 'OF', 'YOURSELF', 'AND', 'MYSELF'] +5764-299665-0002-407: ref=['WHETHER', 'ANY', 'PRAYER', 'WAS', 'EVER', 'ANSWERED'] +5764-299665-0002-407: hyp=['WEATHER', 'ANY', 'PRAYER', 'WAS', 'EVER', 'ANSWERED'] +5764-299665-0003-408: ref=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'INTELLECTUALLY', 'INFERIOR'] +5764-299665-0003-408: hyp=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'INTELLECTUAL', 'INFERIOR'] +5764-299665-0004-409: ref=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'DEFORMED', 'AND', 'HELPLESS', 'WHY', 'DID', 'HE', 'CREATE', 'THE', 'CRIMINAL', 'THE', 'IDIOTIC', 'THE', 'INSANE'] +5764-299665-0004-409: hyp=['WHY', 'DID', 'HE', 'CREATE', 'THE', 'DEFORMED', 'AND', 'HELPLESS', 'WHY', 'DID', 'HE', 'CREATE', 'THE', 'CRIMINAL', 'THE', 'IDIOTIC', 'THE', 'INSANE'] +5764-299665-0005-410: ref=['ARE', 'THE', 'FAILURES', 'UNDER', 'OBLIGATION', 'TO', 'THEIR', 'CREATOR'] +5764-299665-0005-410: hyp=['ARE', 'THE', 'FAILURES', 'UNDER', 'OBLIGATION', 'TO', 'THEIR', 'CREATOR'] +5764-299665-0006-411: ref=['IS', 'HE', 'RESPONSIBLE', 'FOR', 'ALL', 'THE', 'WARS', 'THAT', 'HAVE', 'BEEN', 'WAGED', 'FOR', 'ALL', 'THE', 'INNOCENT', 'BLOOD', 'THAT', 'HAS', 'BEEN', 'SHED'] +5764-299665-0006-411: hyp=['HIS', 'IRRESPONSIBLE', 'FOR', 'ALL', 'THE', 'WARS', 'THAT', 'HAVE', 'BEEN', 'RAGED', 'FOR', 'ALL', 'THE', 'INNOCENT', 'BLOOD', 'THAT', 'HAS', 'BEEN', 'SHED'] +5764-299665-0007-412: ref=['IS', 'HE', 'RESPONSIBLE', 'FOR', 'THE', 'CENTURIES', 'OF', 'SLAVERY', 'FOR', 'THE', 'BACKS', 'THAT', 'HAVE', 'BEEN', 'SCARRED', 'WITH', 'THE', 'LASH', 'FOR', 'THE', 'BABES', 'THAT', 'HAVE', 'BEEN', 'SOLD', 'FROM', 'THE', 'BREASTS', 'OF', 'MOTHERS', 'FOR', 'THE', 'FAMILIES', 'THAT', 'HAVE', 'BEEN', 'SEPARATED', 'AND', 'DESTROYED'] +5764-299665-0007-412: hyp=['IF', 'HE', 'RESPONSIBLE', 'FOR', 'THE', 'CENTURIES', 'OF', 'SLAVERY', 'FOR', 'THE', 'BACKS', 'THAT', 'HAVE', 'BEEN', 'SCARRED', 'WITH', 'A', 'LASH', 'FOR', 'THE', 'BABE', 'THAT', 'HAVE', 'BEEN', 'SOLD', 'FROM', 'THE', 'BREASTS', 'OF', 'MOTHERS', 'FOR', 'THE', 'FAMILIES', 'THAT', 'HAVE', 'BEEN', 'SEPARATED', 'AND', 'DESTROYED'] +5764-299665-0008-413: ref=['IS', 'THIS', 'GOD', 'RESPONSIBLE', 'FOR', 'RELIGIOUS', 'PERSECUTION', 'FOR', 'THE', 'INQUISITION', 'FOR', 'THE', 'THUMB', 'SCREW', 'AND', 'RACK', 'AND', 'FOR', 'ALL', 'THE', 'INSTRUMENTS', 'OF', 'TORTURE'] +5764-299665-0008-413: hyp=['IS', 'THE', 'SCOTT', 'RESPONSIBLE', 'FOR', 'RELIGIOUS', 'PERSECUTION', 'FOR', 'THE', 'INQUISITION', 'FOR', 'THE', 'TEMP', 'SCREW', 'AND', 'RACK', 'AND', 'FOR', 'ALL', 'THE', 'INSTRUMENTS', 'OF', 'TORTURE'] +5764-299665-0009-414: ref=['DID', 'THIS', 'GOD', 'ALLOW', 'THE', 'CRUEL', 'AND', 'VILE', 'TO', 'DESTROY', 'THE', 'BRAVE', 'AND', 'VIRTUOUS'] +5764-299665-0009-414: hyp=['DID', 'THIS', 'GOT', 'THE', 'LOW', 'THE', 'CRUEL', 'AND', 'VILE', 'TO', 'DESTROY', 'THE', 'BRAVE', 'AND', 'VIRTUOUS'] +5764-299665-0010-415: ref=['DID', 'HE', 'ALLOW', 'TYRANTS', 'TO', 'SHED', 'THE', 'BLOOD', 'OF', 'PATRIOTS'] +5764-299665-0010-415: hyp=['DID', 'HE', 'ALONE', 'TYRANTS', 'TO', 'SHED', 'A', 'BLOOD', 'OF', 'PATRIOTS'] +5764-299665-0011-416: ref=['CAN', 'WE', 'CONCEIVE', 'OF', 'A', 'DEVIL', 'BASE', 'ENOUGH', 'TO', 'PREFER', 'HIS', 'ENEMIES', 'TO', 'HIS', 'FRIENDS'] +5764-299665-0011-416: hyp=['CAN', 'WE', 'CONCEIVE', 'OF', 'A', 'DEVIL', 'BASE', 'ENOUGH', 'TO', 'PREFER', 'HIS', 'ENEMIES', 'TO', 'HIS', 'FRIENDS'] +5764-299665-0012-417: ref=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'THE', 'WILD', 'BEASTS', 'THAT', 'DEVOUR', 'HUMAN', 'BEINGS', 'FOR', 'THE', 'FANGED', 'SERPENTS', 'WHOSE', 'BITE', 'IS', 'DEATH'] +5764-299665-0012-417: hyp=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'THE', 'WILD', 'BEASTS', 'THAT', 'THE', 'FOUR', 'HUMAN', 'BEINGS', 'FOR', 'THE', 'FACT', 'SERPENTS', 'WHOSE', 'BITE', 'IS', 'DEATH'] +5764-299665-0013-418: ref=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'A', 'WORLD', 'WHERE', 'LIFE', 'FEEDS', 'ON', 'LIFE'] +5764-299665-0013-418: hyp=['HOW', 'CAN', 'WE', 'ACCOUNT', 'FOR', 'A', 'WORLD', 'WERE', 'LIE', 'FEATS', 'ON', 'LIFE'] +5764-299665-0014-419: ref=['DID', 'INFINITE', 'WISDOM', 'INTENTIONALLY', 'PRODUCE', 'THE', 'MICROSCOPIC', 'BEASTS', 'THAT', 'FEED', 'UPON', 'THE', 'OPTIC', 'NERVE', 'THINK', 'OF', 'BLINDING', 'A', 'MAN', 'TO', 'SATISFY', 'THE', 'APPETITE', 'OF', 'A', 'MICROBE'] +5764-299665-0014-419: hyp=['THE', 'INFINITE', 'WISDOM', 'INTENTIONALLY', 'PRODUCED', 'A', 'MICROSCOPIC', 'BEASTS', 'THAT', 'FEAT', 'UPON', 'THE', 'OPTIC', 'NERVES', 'THINK', 'OF', 'BLINDING', 'A', 'MAN', 'TO', 'SATISFY', 'THE', 'APPETITE', 'OF', 'A', 'MICROBE'] +5764-299665-0015-420: ref=['FEAR', 'BUILDS', 'THE', 'ALTAR', 'AND', 'OFFERS', 'THE', 'SACRIFICE'] +5764-299665-0015-420: hyp=['FEAR', 'BIDS', 'THE', 'ALTAR', 'AND', 'OFFERS', 'THE', 'SACRIFICE'] +5764-299665-0016-421: ref=['FEAR', 'ERECTS', 'THE', 'CATHEDRAL', 'AND', 'BOWS', 'THE', 'HEAD', 'OF', 'MAN', 'IN', 'WORSHIP'] +5764-299665-0016-421: hyp=['FEAR', 'ERECTS', 'THE', 'KITRAL', 'AND', 'BOWS', 'THE', 'HEAD', 'OF', 'MAN', 'IN', 'WORSHIP'] +5764-299665-0017-422: ref=['LIPS', 'RELIGIOUS', 'AND', 'FEARFUL', 'TREMBLINGLY', 'REPEAT', 'THIS', 'PASSAGE', 'THOUGH', 'HE', 'SLAY', 'ME', 'YET', 'WILL', 'I', 'TRUST', 'HIM'] +5764-299665-0017-422: hyp=['LIPS', 'RELIGIOUS', 'AND', 'FEARFUL', 'TREMBLINGLY', 'REPEAT', 'THIS', 'PASSAGE', 'THOUGH', 'HE', 'SLAY', 'ME', 'YET', 'WILL', 'I', 'TRUST', 'HIM'] +5764-299665-0018-423: ref=['CAN', 'WE', 'SAY', 'THAT', 'HE', 'CARED', 'FOR', 'THE', 'CHILDREN', 'OF', 'MEN'] +5764-299665-0018-423: hyp=['CAN', 'WE', 'SAY', 'THAT', 'HE', 'CARED', 'FOR', 'THE', 'CHILDREN', 'OF', 'MEN'] +5764-299665-0019-424: ref=['CAN', 'WE', 'SAY', 'THAT', 'HIS', 'MERCY', 'ENDURETH', 'FOREVER'] +5764-299665-0019-424: hyp=['CAN', 'WE', 'SAY', 'THAT', 'HIS', 'MERCY', 'AND', 'DURRED', 'FOREVER'] +5764-299665-0020-425: ref=['DO', 'WE', 'PROVE', 'HIS', 'GOODNESS', 'BY', 'SHOWING', 'THAT', 'HE', 'HAS', 'OPENED', 'THE', 'EARTH', 'AND', 'SWALLOWED', 'THOUSANDS', 'OF', 'HIS', 'HELPLESS', 'CHILDREN', 'OR', 'THAT', 'WITH', 'THE', 'VOLCANOES', 'HE', 'HAS', 'OVERWHELMED', 'THEM', 'WITH', 'RIVERS', 'OF', 'FIRE'] +5764-299665-0020-425: hyp=['DO', 'WE', 'PROVE', 'HIS', 'GOODNESS', 'BY', 'SHOWING', 'THAT', 'HE', 'HAS', 'OPENED', 'THE', 'EARTH', 'AND', 'SWALLOWED', 'THOUSAND', 'OF', 'HIS', 'HELPLESS', 'CHILDREN', 'ALL', 'THAT', 'WITH', 'THE', 'VOLCANOES', 'HE', 'HAS', 'OVERWHELMED', 'THEM', 'WITH', 'RIVERS', 'OF', 'FIRE'] +5764-299665-0021-426: ref=['WAS', 'THERE', 'GOODNESS', 'WAS', 'THERE', 'WISDOM', 'IN', 'THIS'] +5764-299665-0021-426: hyp=['WAS', 'THEIR', 'GOODNESS', 'WAS', 'THEIR', 'WISDOM', 'IN', 'THIS'] +5764-299665-0022-427: ref=['OUGHT', 'THE', 'SUPERIOR', 'RACES', 'TO', 'THANK', 'GOD', 'THAT', 'THEY', 'ARE', 'NOT', 'THE', 'INFERIOR'] +5764-299665-0022-427: hyp=['ALL', 'THE', 'SUPERIOR', 'RAYS', 'TO', 'THANK', 'GOD', 'THAT', 'THEY', 'ARE', 'NOT', 'THE', 'INFERIOR'] +5764-299665-0023-428: ref=['MOST', 'PEOPLE', 'CLING', 'TO', 'THE', 'SUPERNATURAL'] +5764-299665-0023-428: hyp=['MOST', 'PEOPLE', 'CLINKED', 'THROUGH', 'THE', 'SUPERNATURAL'] +5764-299665-0024-429: ref=['IF', 'THEY', 'GIVE', 'UP', 'ONE', 'GOD', 'THEY', 'IMAGINE', 'ANOTHER'] +5764-299665-0024-429: hyp=['IF', 'THEY', 'GIVE', 'UP', 'ONE', 'GOD', 'THEY', 'IMAGINE', 'ANOTHER'] +5764-299665-0025-430: ref=['WHAT', 'IS', 'THIS', 'POWER'] +5764-299665-0025-430: hyp=['WHAT', 'IS', 'THIS', 'POWER'] +5764-299665-0026-431: ref=['MAN', 'ADVANCES', 'AND', 'NECESSARILY', 'ADVANCES', 'THROUGH', 'EXPERIENCE'] +5764-299665-0026-431: hyp=['MAN', 'ADVANCES', 'A', 'NECESSARILY', 'ADVANCES', 'TO', 'EXPERIENCE'] +5764-299665-0027-432: ref=['A', 'MAN', 'WISHING', 'TO', 'GO', 'TO', 'A', 'CERTAIN', 'PLACE', 'COMES', 'TO', 'WHERE', 'THE', 'ROAD', 'DIVIDES'] +5764-299665-0027-432: hyp=['A', 'MAN', 'WISHING', 'TO', 'GO', 'TO', 'A', 'CERTAIN', 'PLACE', 'COME', 'TO', 'WHERE', 'THE', 'ROAD', 'DIVIDES'] +5764-299665-0028-433: ref=['HE', 'HAS', 'TRIED', 'THAT', 'ROAD', 'AND', 'KNOWS', 'THAT', 'IT', 'IS', 'THE', 'WRONG', 'ROAD'] +5764-299665-0028-433: hyp=['HE', 'IS', 'TRIED', 'THAT', 'ROAD', 'AND', 'KNOWS', 'THAT', 'IT', 'IS', 'THE', 'WRONG', 'ROAD'] +5764-299665-0029-434: ref=['A', 'CHILD', 'CHARMED', 'BY', 'THE', 'BEAUTY', 'OF', 'THE', 'FLAME', 'GRASPS', 'IT', 'WITH', 'ITS', 'DIMPLED', 'HAND'] +5764-299665-0029-434: hyp=['A', 'CHILD', 'SHOWN', 'BY', 'THE', 'BEAUTY', 'OF', 'THE', 'FLAME', 'GRASPED', 'IT', 'WITH', 'HIS', 'DIMPLED', 'HAND'] +5764-299665-0030-435: ref=['THE', 'POWER', 'THAT', 'WORKS', 'FOR', 'RIGHTEOUSNESS', 'HAS', 'TAUGHT', 'THE', 'CHILD', 'A', 'LESSON'] +5764-299665-0030-435: hyp=['THE', 'POWER', 'THAT', 'WORK', 'FOR', 'RIGHTEOUSNESS', 'HAD', 'TAUGHT', 'THE', 'CHILD', 'A', 'LESSON'] +5764-299665-0031-436: ref=['IT', 'IS', 'A', 'RESULT'] +5764-299665-0031-436: hyp=['IT', 'IS', 'A', 'RESULT'] +5764-299665-0032-437: ref=['IT', 'IS', 'INSISTED', 'BY', 'THESE', 'THEOLOGIANS', 'AND', 'BY', 'MANY', 'OF', 'THE', 'SO', 'CALLED', 'PHILOSOPHERS', 'THAT', 'THIS', 'MORAL', 'SENSE', 'THIS', 'SENSE', 'OF', 'DUTY', 'OF', 'OBLIGATION', 'WAS', 'IMPORTED', 'AND', 'THAT', 'CONSCIENCE', 'IS', 'AN', 'EXOTIC'] +5764-299665-0032-437: hyp=['IT', 'IS', 'INSISTED', 'BY', 'THESE', 'THEOLOGIANS', 'AND', 'BY', 'MANY', 'OF', 'THE', 'SOUL', 'CALLED', 'PHILOSOPHERS', 'THAT', 'THIS', 'MORAL', 'SENSE', 'THIS', 'SENSE', 'OF', 'DUTY', 'OF', 'OBLIGATION', 'WAS', 'IMPORTED', 'AND', 'THAT', 'CONSCIENCE', 'IS', 'AN', 'EXOTIC'] +5764-299665-0033-438: ref=['WE', 'LIVE', 'TOGETHER', 'IN', 'FAMILIES', 'TRIBES', 'AND', 'NATIONS'] +5764-299665-0033-438: hyp=['WE', 'LIVE', 'TOGETHER', 'IN', 'FAMILIES', 'TRIBES', 'AND', 'NATIONS'] +5764-299665-0034-439: ref=['THEY', 'ARE', 'PRAISED', 'ADMIRED', 'AND', 'RESPECTED'] +5764-299665-0034-439: hyp=['THEY', 'ARE', 'PRAISED', 'ADMIRED', 'AND', 'RESPECTED'] +5764-299665-0035-440: ref=['THEY', 'ARE', 'REGARDED', 'AS', 'GOOD', 'THAT', 'IS', 'TO', 'SAY', 'AS', 'MORAL'] +5764-299665-0035-440: hyp=['THEY', 'ARE', 'REGARDED', 'AS', 'GOOD', 'THAT', 'IS', 'TO', 'SAY', 'S', 'MORAL'] +5764-299665-0036-441: ref=['THE', 'MEMBERS', 'WHO', 'ADD', 'TO', 'THE', 'MISERY', 'OF', 'THE', 'FAMILY', 'THE', 'TRIBE', 'OR', 'THE', 'NATION', 'ARE', 'CONSIDERED', 'BAD', 'MEMBERS'] +5764-299665-0036-441: hyp=['THE', 'MEMBERS', 'WHO', 'ADD', 'TO', 'THE', 'MISERY', 'OF', 'THE', 'FAMILY', 'THE', 'TRIBE', 'OF', 'THE', 'NATION', 'ARE', 'CONSIDERED', 'BAD', 'MEMBERS'] +5764-299665-0037-442: ref=['THE', 'GREATEST', 'OF', 'HUMAN', 'BEINGS', 'HAS', 'SAID', 'CONSCIENCE', 'IS', 'BORN', 'OF', 'LOVE'] +5764-299665-0037-442: hyp=['THE', 'GREATEST', 'OF', 'HUMAN', 'BEINGS', 'HAD', 'SAID', 'CONSCIENCE', 'IS', 'BORN', 'OF', 'LOVE'] +5764-299665-0038-443: ref=['AS', 'PEOPLE', 'ADVANCE', 'THE', 'REMOTE', 'CONSEQUENCES', 'ARE', 'PERCEIVED'] +5764-299665-0038-443: hyp=['AS', 'PEOPLE', 'ADVANCE', 'THE', 'REMOTE', 'CONSEQUENCES', 'ARE', 'PERCEIVED'] +5764-299665-0039-444: ref=['THE', 'IMAGINATION', 'IS', 'CULTIVATED'] +5764-299665-0039-444: hyp=['THE', 'IMAGINATION', 'IS', 'CULTIVATED'] +5764-299665-0040-445: ref=['A', 'MAN', 'PUTS', 'HIMSELF', 'IN', 'THE', 'PLACE', 'OF', 'ANOTHER'] +5764-299665-0040-445: hyp=['A', 'MAN', 'BUT', 'HIMSELF', 'IN', 'THE', 'PLACE', 'OF', 'ANOTHER'] +5764-299665-0041-446: ref=['THE', 'SENSE', 'OF', 'DUTY', 'BECOMES', 'STRONGER', 'MORE', 'IMPERATIVE'] +5764-299665-0041-446: hyp=['THE', 'SENSE', 'OF', 'DUTY', 'BECOMES', 'STRONGER', 'MORE', 'IMPERATIVE'] +5764-299665-0042-447: ref=['MAN', 'JUDGES', 'HIMSELF'] +5764-299665-0042-447: hyp=['MAN', 'JUDGES', 'HIMSELF'] +5764-299665-0043-448: ref=['IN', 'ALL', 'THIS', 'THERE', 'IS', 'NOTHING', 'SUPERNATURAL'] +5764-299665-0043-448: hyp=['IN', 'ALL', 'THIS', 'THERE', 'IS', 'NOTHING', 'SUPERNATURAL'] +5764-299665-0044-449: ref=['MAN', 'HAS', 'DECEIVED', 'HIMSELF'] +5764-299665-0044-449: hyp=['MAN', 'HAS', 'DECEIVED', 'HIMSELF'] +5764-299665-0045-450: ref=['HAS', 'CHRISTIANITY', 'DONE', 'GOOD'] +5764-299665-0045-450: hyp=['HESTERITY', 'DONEGOOD'] +5764-299665-0046-451: ref=['WHEN', 'THE', 'CHURCH', 'HAD', 'CONTROL', 'WERE', 'MEN', 'MADE', 'BETTER', 'AND', 'HAPPIER'] +5764-299665-0046-451: hyp=['WHEN', 'THE', 'CHURCH', 'HAD', 'CONTROLLED', 'WHERE', 'MEN', 'MADE', 'BETTER', 'AND', 'HAPPIER'] +5764-299665-0047-452: ref=['WHAT', 'HAS', 'RELIGION', 'DONE', 'FOR', 'HUNGARY', 'OR', 'AUSTRIA'] +5764-299665-0047-452: hyp=['WHAT', 'HAS', 'RELIGION', 'DONE', 'FOR', 'HUNGARY', 'OR', 'AUSTRIA'] +5764-299665-0048-453: ref=['COULD', 'THESE', 'COUNTRIES', 'HAVE', 'BEEN', 'WORSE', 'WITHOUT', 'RELIGION'] +5764-299665-0048-453: hyp=['GOOD', 'THESE', 'COUNTRIES', 'HAVE', 'BEEN', 'WORSE', 'WITHOUT', 'RELIGION'] +5764-299665-0049-454: ref=['COULD', 'THEY', 'HAVE', 'BEEN', 'WORSE', 'HAD', 'THEY', 'HAD', 'ANY', 'OTHER', 'RELIGION', 'THAN', 'CHRISTIANITY'] +5764-299665-0049-454: hyp=['COULD', 'THEY', 'HAVE', 'BEEN', 'WORSE', 'HAD', 'THEY', 'HAD', 'ANY', 'OTHER', 'RELIGION', 'THAN', 'CHRISTIANITY'] +5764-299665-0050-455: ref=['WHAT', 'DID', 'CHRISTIANITY', 'DO', 'FOR', 'THEM'] +5764-299665-0050-455: hyp=['WHAT', 'DID', 'CHRISTIANITY', 'DO', 'FAULT', 'THEM'] +5764-299665-0051-456: ref=['THEY', 'HATED', 'PLEASURE'] +5764-299665-0051-456: hyp=['THEY', 'HATED', 'PLEASURE'] +5764-299665-0052-457: ref=['THEY', 'MUFFLED', 'ALL', 'THE', 'BELLS', 'OF', 'GLADNESS'] +5764-299665-0052-457: hyp=['THEY', 'MUFFLED', 'ALL', 'THE', 'BELLS', 'OF', 'GLADNESS'] +5764-299665-0053-458: ref=['THE', 'RELIGION', 'OF', 'THE', 'PURITAN', 'WAS', 'AN', 'UNADULTERATED', 'CURSE'] +5764-299665-0053-458: hyp=['THE', 'RELIGION', 'OF', 'THE', 'PURITAN', 'WAS', 'AN', 'AN', 'ADULTERATED', 'CURSE'] +5764-299665-0054-459: ref=['THE', 'PURITAN', 'BELIEVED', 'THE', 'BIBLE', 'TO', 'BE', 'THE', 'WORD', 'OF', 'GOD', 'AND', 'THIS', 'BELIEF', 'HAS', 'ALWAYS', 'MADE', 'THOSE', 'WHO', 'HELD', 'IT', 'CRUEL', 'AND', 'WRETCHED'] +5764-299665-0054-459: hyp=['THE', 'PURITAN', 'BELIEVED', 'THE', 'BIBLE', 'TO', 'BE', 'THE', 'WORTH', 'OF', 'GOD', 'AND', 'THIS', 'BELIEF', 'HAS', 'ALWAYS', 'MADE', 'THOSE', 'WHO', 'HELD', 'IT', 'CRUEL', 'AND', 'WRETCHED'] +5764-299665-0055-460: ref=['LET', 'ME', 'REFER', 'TO', 'JUST', 'ONE', 'FACT', 'SHOWING', 'THE', 'INFLUENCE', 'OF', 'A', 'BELIEF', 'IN', 'THE', 'BIBLE', 'ON', 'HUMAN', 'BEINGS'] +5764-299665-0055-460: hyp=['LET', 'ME', 'REFER', 'TO', 'JUST', 'ONE', 'FACT', 'SHOWING', 'THE', 'INFLUENCE', 'OF', 'A', 'BELIEF', 'IN', 'THE', 'BIBLE', 'ON', 'HUMAN', 'BEINGS'] +5764-299665-0056-461: ref=['THE', 'QUEEN', 'RECEIVED', 'THE', 'BIBLE', 'KISSED', 'IT', 'AND', 'PLEDGED', 'HERSELF', 'TO', 'DILIGENTLY', 'READ', 'THEREIN'] +5764-299665-0056-461: hyp=['THE', 'QUEEN', 'RECEIVED', 'THE', 'BIBLE', 'KISSED', 'IT', 'AND', 'PLEDGED', 'HERSELF', 'TO', 'DILIGENTLY', 'READ', 'THEREIN'] +5764-299665-0057-462: ref=['IN', 'OTHER', 'WORDS', 'IT', 'WAS', 'JUST', 'AS', 'FIENDISH', 'JUST', 'AS', 'INFAMOUS', 'AS', 'THE', 'CATHOLIC', 'SPIRIT'] +5764-299665-0057-462: hyp=['IN', 'OTHER', 'WORDS', 'IT', 'WAS', 'JUST', 'AS', 'FIENDISH', 'JUST', 'AS', 'IN', 'FAMOUS', 'AS', 'THE', 'CATTLE', 'EXPERIOR'] +5764-299665-0058-463: ref=['HAS', 'THE', 'BIBLE', 'MADE', 'THE', 'PEOPLE', 'OF', 'GEORGIA', 'KIND', 'AND', 'MERCIFUL'] +5764-299665-0058-463: hyp=['HESDAY', 'BUT', 'MADE', 'THE', 'PEOPLE', 'OF', 'GEORGIA', 'KIND', 'AND', 'MERCIFUL'] +5764-299665-0059-464: ref=['RELIGION', 'HAS', 'BEEN', 'TRIED', 'AND', 'IN', 'ALL', 'COUNTRIES', 'IN', 'ALL', 'TIMES', 'HAS', 'FAILED'] +5764-299665-0059-464: hyp=['WHO', 'RELIGION', 'HAS', 'BEEN', 'TRIED', 'AND', 'IN', 'ALL', 'COUNTRIES', 'IN', 'ALL', 'TIMES', 'THUS', 'FAILED'] +5764-299665-0060-465: ref=['RELIGION', 'HAS', 'ALWAYS', 'BEEN', 'THE', 'ENEMY', 'OF', 'SCIENCE', 'OF', 'INVESTIGATION', 'AND', 'THOUGHT'] +5764-299665-0060-465: hyp=['RELIGION', 'HATH', 'ALWAYS', 'BEEN', 'THE', 'ENEMY', 'OF', 'SCIENCE', 'OF', 'INVESTIGATION', 'AND', 'THOUGHT'] +5764-299665-0061-466: ref=['RELIGION', 'HAS', 'NEVER', 'MADE', 'MAN', 'FREE'] +5764-299665-0061-466: hyp=['RELIGION', 'IS', 'NEVER', 'MADE', 'MEN', 'FREE'] +5764-299665-0062-467: ref=['IT', 'HAS', 'NEVER', 'MADE', 'MAN', 'MORAL', 'TEMPERATE', 'INDUSTRIOUS', 'AND', 'HONEST'] +5764-299665-0062-467: hyp=['HE', 'JUST', 'NEVER', 'MADE', 'MAN', 'MORAL', 'TEMPERATE', 'INDUSTRIOUS', 'AND', 'HONEST'] +5764-299665-0063-468: ref=['ARE', 'CHRISTIANS', 'MORE', 'TEMPERATE', 'NEARER', 'VIRTUOUS', 'NEARER', 'HONEST', 'THAN', 'SAVAGES'] +5764-299665-0063-468: hyp=['AH', 'CHRISTIAN', 'SMALL', 'TEMPERATE', 'NEARER', 'VIRTUOUS', 'NEARER', 'HONEST', 'THAN', 'SAVAGES'] +5764-299665-0064-469: ref=['CAN', 'WE', 'CURE', 'DISEASE', 'BY', 'SUPPLICATION'] +5764-299665-0064-469: hyp=['CAN', 'WE', 'CURE', 'DISEASE', 'BY', 'SUPPLICATION'] +5764-299665-0065-470: ref=['CAN', 'WE', 'RECEIVE', 'VIRTUE', 'OR', 'HONOR', 'AS', 'ALMS'] +5764-299665-0065-470: hyp=['CAN', 'WE', 'RECEIVE', 'VIRTUE', 'OR', 'HUNGER', 'AS', 'ALMS'] +5764-299665-0066-471: ref=['RELIGION', 'RESTS', 'ON', 'THE', 'IDEA', 'THAT', 'NATURE', 'HAS', 'A', 'MASTER', 'AND', 'THAT', 'THIS', 'MASTER', 'WILL', 'LISTEN', 'TO', 'PRAYER', 'THAT', 'THIS', 'MASTER', 'PUNISHES', 'AND', 'REWARDS', 'THAT', 'HE', 'LOVES', 'PRAISE', 'AND', 'FLATTERY', 'AND', 'HATES', 'THE', 'BRAVE', 'AND', 'FREE'] +5764-299665-0066-471: hyp=['RELIGION', 'RESTS', 'ON', 'THE', 'IDEA', 'THAT', 'NATURE', 'HAS', 'A', 'MASTER', 'AND', 'THAT', 'THIS', 'MASTER', 'WILL', 'LISTEN', 'TO', 'PRAYER', 'THAT', 'HIS', 'MASTER', 'PUNISHES', 'AND', 'REWARDS', 'THAT', 'HE', 'LOVES', 'PRAISE', 'AND', 'FLATTERY', 'AND', 'HATES', 'THE', 'BRAVE', 'AND', 'FREE'] +5764-299665-0067-472: ref=['WE', 'MUST', 'HAVE', 'CORNER', 'STONES'] +5764-299665-0067-472: hyp=['WE', 'MUST', 'HAVE', 'CORN', 'THE', 'STONES'] +5764-299665-0068-473: ref=['THE', 'STRUCTURE', 'MUST', 'HAVE', 'A', 'BASEMENT'] +5764-299665-0068-473: hyp=['THE', 'STRUCTURE', 'MUST', 'HAVE', 'ABASEMENT'] +5764-299665-0069-474: ref=['IF', 'WE', 'BUILD', 'WE', 'MUST', 'BEGIN', 'AT', 'THE', 'BOTTOM'] +5764-299665-0069-474: hyp=['IF', 'WE', 'BUILD', 'WE', 'MUST', 'BEGIN', 'AT', 'THE', 'BOTTOM'] +5764-299665-0070-475: ref=['I', 'HAVE', 'A', 'THEORY', 'AND', 'I', 'HAVE', 'FOUR', 'CORNER', 'STONES'] +5764-299665-0070-475: hyp=['I', 'HAVE', 'IT', 'THEORY', 'AND', 'I', 'HAVE', 'FOUR', 'CORNER', 'STONES'] +5764-299665-0071-476: ref=['THE', 'FIRST', 'STONE', 'IS', 'THAT', 'MATTER', 'SUBSTANCE', 'CANNOT', 'BE', 'DESTROYED', 'CANNOT', 'BE', 'ANNIHILATED'] +5764-299665-0071-476: hyp=['THE', 'FIRST', 'STONE', 'EAST', 'AT', 'MAZAR', 'SUBSTANCE', 'CANNOT', 'BE', 'DESTROYED', 'CANNOT', 'BE', 'ANNIHILATED'] +5764-299665-0072-477: ref=['IF', 'THESE', 'CORNER', 'STONES', 'ARE', 'FACTS', 'IT', 'FOLLOWS', 'AS', 'A', 'NECESSITY', 'THAT', 'MATTER', 'AND', 'FORCE', 'ARE', 'FROM', 'AND', 'TO', 'ETERNITY', 'THAT', 'THEY', 'CAN', 'NEITHER', 'BE', 'INCREASED', 'NOR', 'DIMINISHED'] +5764-299665-0072-477: hyp=['IF', 'THIS', 'CORN', 'THE', 'STONES', 'ARE', 'FACTS', 'IT', 'FOLLOWS', 'AS', 'A', 'NECESSITY', 'THAT', 'MATTER', 'AND', 'FORCE', 'ARE', 'FROM', 'END', 'TO', 'ETERNITY', 'THAT', 'THEY', 'CAN', 'NEITHER', 'BE', 'INCREASED', 'NOR', 'DIMINISHED'] +5764-299665-0073-478: ref=['IT', 'FOLLOWS', 'THAT', 'NOTHING', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'CREATED', 'THAT', 'THERE', 'NEVER', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'A', 'CREATOR'] +5764-299665-0073-478: hyp=['IT', 'FOLLOWS', 'THAT', 'NOTHING', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'CREATED', 'THAT', 'THERE', 'NEVER', 'HAS', 'BEEN', 'OR', 'CAN', 'BE', 'A', 'CREATOR'] +5764-299665-0074-479: ref=['IT', 'FOLLOWS', 'THAT', 'THERE', 'COULD', 'NOT', 'HAVE', 'BEEN', 'ANY', 'INTELLIGENCE', 'ANY', 'DESIGN', 'BACK', 'OF', 'MATTER', 'AND', 'FORCE'] +5764-299665-0074-479: hyp=['IT', 'FOLLOWED', 'THAT', 'THERE', 'COULD', 'NOT', 'HAVE', 'BEEN', 'ANY', 'INTELLIGENCE', 'AND', 'A', 'DESIGNED', 'BACK', 'OF', 'MATTER', 'AND', 'FORCE'] +5764-299665-0075-480: ref=['I', 'SAY', 'WHAT', 'I', 'THINK'] +5764-299665-0075-480: hyp=['I', 'SAY', 'WHAT', 'I', 'THINK'] +5764-299665-0076-481: ref=['EVERY', 'EVENT', 'HAS', 'PARENTS'] +5764-299665-0076-481: hyp=['EVERY', 'EVENT', 'HAS', 'PARENTS'] +5764-299665-0077-482: ref=['THAT', 'WHICH', 'HAS', 'NOT', 'HAPPENED', 'COULD', 'NOT'] +5764-299665-0077-482: hyp=['THAT', 'WHICH', 'HATH', 'NOT', 'HAPPENED', 'COULD', 'NOT'] +5764-299665-0078-483: ref=['IN', 'THE', 'INFINITE', 'CHAIN', 'THERE', 'IS', 'AND', 'THERE', 'CAN', 'BE', 'NO', 'BROKEN', 'NO', 'MISSING', 'LINK'] +5764-299665-0078-483: hyp=['IN', 'THE', 'INFINITE', 'CHANGE', 'WRISTS', 'AND', 'THERE', 'CAN', 'BE', 'NO', 'BROKEN', 'NO', 'MISSING', 'LINK'] +5764-299665-0079-484: ref=['WE', 'NOW', 'KNOW', 'THAT', 'OUR', 'FIRST', 'PARENTS', 'WERE', 'NOT', 'FOREIGNERS'] +5764-299665-0079-484: hyp=['WE', 'NOW', 'KNOW', 'THAT', 'OUR', 'FIRST', 'PARENTS', 'WERE', 'NOT', 'FOREIGNERS'] +5764-299665-0080-485: ref=['WE', 'NOW', 'KNOW', 'IF', 'WE', 'KNOW', 'ANYTHING', 'THAT', 'THE', 'UNIVERSE', 'IS', 'NATURAL', 'AND', 'THAT', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'NATURALLY', 'PRODUCED'] +5764-299665-0080-485: hyp=['WE', 'NOW', 'KNOW', 'IF', 'WE', 'KNOW', 'ANYTHING', 'THAT', 'THE', 'UNIVERSE', 'IS', 'NATURAL', 'AND', 'THAT', 'MAN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'NATURALLY', 'PRODUCED'] +5764-299665-0081-486: ref=['WE', 'KNOW', 'THE', 'PATHS', 'THAT', 'LIFE', 'HAS', 'TRAVELED'] +5764-299665-0081-486: hyp=['WE', 'KNOW', 'THE', 'PATHS', 'THAT', 'LIFE', 'HAS', 'TRAVELLED'] +5764-299665-0082-487: ref=['WE', 'KNOW', 'THE', 'FOOTSTEPS', 'OF', 'ADVANCE', 'THEY', 'HAVE', 'BEEN', 'TRACED'] +5764-299665-0082-487: hyp=['WE', 'KNOW', 'THE', 'FOOTSTEPS', 'OF', 'ADVANCE', 'THEY', 'HAVE', 'BEEN', 'PRAISED'] +5764-299665-0083-488: ref=['FOR', 'THOUSANDS', 'OF', 'YEARS', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'TRYING', 'TO', 'REFORM', 'THE', 'WORLD'] +5764-299665-0083-488: hyp=['FOUR', 'THOUSANDS', 'OF', 'YEARS', 'MEN', 'AND', 'WOMEN', 'HAVE', 'BEEN', 'CRYING', 'TO', 'REFORM', 'THE', 'WORLD'] +5764-299665-0084-489: ref=['WHY', 'HAVE', 'THE', 'REFORMERS', 'FAILED'] +5764-299665-0084-489: hyp=['WHY', 'HAVE', 'THE', 'REFORMED', 'FAITH'] +5764-299665-0085-490: ref=['THEY', 'DEPEND', 'ON', 'THE', 'LORD', 'ON', 'LUCK', 'AND', 'CHARITY'] +5764-299665-0085-490: hyp=['THEY', 'DEPEND', 'ON', 'THE', 'LOT', 'UNLUCK', 'AND', 'CHARITY'] +5764-299665-0086-491: ref=['THEY', 'LIVE', 'BY', 'FRAUD', 'AND', 'VIOLENCE', 'AND', 'BEQUEATH', 'THEIR', 'VICES', 'TO', 'THEIR', 'CHILDREN'] +5764-299665-0086-491: hyp=['THEY', 'LEAVE', 'BY', 'FRAUD', 'AND', 'VIOLENCE', 'AND', 'BEQUEATH', 'THEIR', 'VICES', 'TO', 'THEIR', 'CHILDREN'] +5764-299665-0087-492: ref=['FAILURE', 'SEEMS', 'TO', 'BE', 'THE', 'TRADEMARK', 'OF', 'NATURE', 'WHY'] +5764-299665-0087-492: hyp=['FAILURE', 'SEEMS', 'TO', 'BE', 'THE', 'TRADE', 'MARK', 'OF', 'NATURE', 'WHY'] +5764-299665-0088-493: ref=['NATURE', 'PRODUCES', 'WITHOUT', 'PURPOSE', 'SUSTAINS', 'WITHOUT', 'INTENTION', 'AND', 'DESTROYS', 'WITHOUT', 'THOUGHT'] +5764-299665-0088-493: hyp=['NATURE', 'PRODUCED', 'WITHOUT', 'PURPOSE', 'SUSTAINS', 'WITHOUT', 'INTENTION', 'AND', 'DESTROYS', 'WITHOUT', 'THOUGHT'] +5764-299665-0089-494: ref=['MUST', 'THE', 'WORLD', 'FOREVER', 'REMAIN', 'THE', 'VICTIM', 'OF', 'IGNORANT', 'PASSION'] +5764-299665-0089-494: hyp=['MISTER', 'BUILD', 'FOREVER', 'REMAINED', 'A', 'VICTIM', 'OF', 'IGNORANT', 'PASSION'] +5764-299665-0090-495: ref=['WHY', 'SHOULD', 'MEN', 'AND', 'WOMEN', 'HAVE', 'CHILDREN', 'THAT', 'THEY', 'CANNOT', 'TAKE', 'CARE', 'OF', 'CHILDREN', 'THAT', 'ARE', 'BURDENS', 'AND', 'CURSES', 'WHY'] +5764-299665-0090-495: hyp=['WHY', 'SHOULD', 'MEN', 'AND', 'WOMEN', 'HAVE', 'CHILDREN', 'THAT', 'THEY', 'CANNOT', 'TAKE', 'CARE', 'OF', 'CHILDREN', 'THAT', 'ARE', 'A', 'BURDEN', 'AND', 'CURSES', 'WHY'] +5764-299665-0091-496: ref=['PASSION', 'IS', 'AND', 'ALWAYS', 'HAS', 'BEEN', 'DEAF'] +5764-299665-0091-496: hyp=['PASSION', 'EAST', 'AND', 'ALL', 'THESE', 'HAS', 'BEEN', 'DEAF'] +5764-299665-0092-497: ref=['LAW', 'CAN', 'PUNISH', 'BUT', 'IT', 'CAN', 'NEITHER', 'REFORM', 'CRIMINALS', 'NOR', 'PREVENT', 'CRIME'] +5764-299665-0092-497: hyp=['LAW', 'CAN', 'PUNISH', 'THAT', 'IT', 'CAN', 'NEITHER', 'REFORM', 'CRIMINALS', 'NOR', 'PREVENT', 'CRIME'] +5764-299665-0093-498: ref=['THIS', 'CANNOT', 'BE', 'DONE', 'BY', 'TALK', 'OR', 'EXAMPLE'] +5764-299665-0093-498: hyp=['THESE', 'CANNOT', 'BE', 'DONE', 'BY', 'TALK', 'OR', 'EXAMPLE'] +5764-299665-0094-499: ref=['THIS', 'IS', 'THE', 'SOLUTION', 'OF', 'THE', 'WHOLE', 'QUESTION'] +5764-299665-0094-499: hyp=['THIS', 'IS', 'THE', 'SOLUTION', 'OF', 'THE', 'WHOLE', 'QUESTION'] +5764-299665-0095-500: ref=['THIS', 'FREES', 'WOMAN'] +5764-299665-0095-500: hyp=['THIS', 'FREEZE', 'WOMEN'] +5764-299665-0096-501: ref=['POVERTY', 'AND', 'CRIME', 'WILL', 'BE', 'CHILDLESS'] +5764-299665-0096-501: hyp=['BAVARY', 'AND', 'CRIME', 'WILL', 'BE', 'CHILDLESS'] +5764-299665-0097-502: ref=['IT', 'IS', 'FAR', 'BETTER', 'TO', 'BE', 'FREE', 'TO', 'LEAVE', 'THE', 'FORTS', 'AND', 'BARRICADES', 'OF', 'FEAR', 'TO', 'STAND', 'ERECT', 'AND', 'FACE', 'THE', 'FUTURE', 'WITH', 'A', 'SMILE'] +5764-299665-0097-502: hyp=['IT', 'IS', 'FAR', 'BETTER', 'TO', 'BE', 'FREE', 'TO', 'LEAVE', 'THE', 'FAULTS', 'AND', 'BARRICADES', 'OF', 'FEAR', 'TO', 'STAND', 'ERECT', 'AND', 'FAITH', 'THE', 'FUTURE', 'WITH', 'US', 'MIND'] +6070-63485-0000-2599: ref=["THEY'RE", 'DONE', 'FOR', 'SAID', 'THE', 'SCHOOLMASTER', 'IN', 'A', 'LOW', 'KEY', 'TO', 'THE', 'CHOUETTE', 'OUT', 'WITH', 'YOUR', 'VITRIOL', 'AND', 'MIND', 'YOUR', 'EYE'] +6070-63485-0000-2599: hyp=['THERE', 'DONE', 'FAR', 'SAID', 'THE', 'SCHOOLMASTER', 'IN', 'A', 'LOKIE', 'TO', 'THE', 'SWEAT', 'OUT', 'WITH', 'YOUR', 'VITRIOL', 'AND', 'MIND', 'YOUR', 'EYE'] +6070-63485-0001-2600: ref=['THE', 'TWO', 'MONSTERS', 'TOOK', 'OFF', 'THEIR', 'SHOES', 'AND', 'MOVED', 'STEALTHILY', 'ALONG', 'KEEPING', 'IN', 'THE', 'SHADOWS', 'OF', 'THE', 'HOUSES'] +6070-63485-0001-2600: hyp=['THE', 'TWO', 'MONSTERS', 'TOOK', 'OFF', 'THEIR', 'SHOES', 'AND', 'MOVED', 'STEALTHILY', 'ALONG', 'KEEPING', 'IN', 'THE', 'SHADOWS', 'OF', 'THE', 'HOUSES'] +6070-63485-0002-2601: ref=['BY', 'MEANS', 'OF', 'THIS', 'STRATAGEM', 'THEY', 'FOLLOWED', 'SO', 'CLOSELY', 'THAT', 'ALTHOUGH', 'WITHIN', 'A', 'FEW', 'STEPS', 'OF', 'SARAH', 'AND', 'TOM', 'THEY', 'DID', 'NOT', 'HEAR', 'THEM'] +6070-63485-0002-2601: hyp=['BY', 'MEANS', 'OF', 'THIS', 'STRATAGEM', 'THEY', 'FOLLOWED', 'SO', 'CLOSELY', 'THAT', 'ALTHOUGH', 'WITHIN', 'A', 'FEW', 'STEPS', 'OF', 'SEREN', 'TOM', 'THEY', 'DID', 'NOT', 'HEAR', 'THEM'] +6070-63485-0003-2602: ref=['SARAH', 'AND', 'HER', 'BROTHER', 'HAVING', 'AGAIN', 'PASSED', 'BY', 'THE', 'TAPIS', 'FRANC', 'ARRIVED', 'CLOSE', 'TO', 'THE', 'DILAPIDATED', 'HOUSE', 'WHICH', 'WAS', 'PARTLY', 'IN', 'RUINS', 'AND', 'ITS', 'OPENED', 'CELLARS', 'FORMED', 'A', 'KIND', 'OF', 'GULF', 'ALONG', 'WHICH', 'THE', 'STREET', 'RAN', 'IN', 'THAT', 'DIRECTION'] +6070-63485-0003-2602: hyp=['SARAH', 'AND', 'HER', 'BROTHER', 'HAVING', 'AGAIN', 'PASSED', 'BY', 'THE', 'TAPPY', 'FRANK', 'ARRIVED', 'CLOSE', 'TO', 'THE', 'DILAPIDATED', 'HOUSE', 'WHICH', 'WAS', 'PARTLY', 'IN', 'RUINS', 'AND', 'ITS', 'OPEN', 'CELLARS', 'FORMED', 'A', 'KIND', 'OF', 'GULF', 'ALONG', 'WHICH', 'THE', 'STREET', 'RAN', 'IN', 'THAT', 'DIRECTION'] +6070-63485-0004-2603: ref=['IN', 'AN', 'INSTANT', 'THE', 'SCHOOLMASTER', 'WITH', 'A', 'LEAP', 'RESEMBLING', 'IN', 'STRENGTH', 'AND', 'AGILITY', 'THE', 'SPRING', 'OF', 'A', 'TIGER', 'SEIZED', 'SEYTON', 'WITH', 'ONE', 'HAND', 'BY', 'THE', 'THROAT', 'AND', 'EXCLAIMED', 'YOUR', 'MONEY', 'OR', 'I', 'WILL', 'FLING', 'YOU', 'INTO', 'THIS', 'HOLE'] +6070-63485-0004-2603: hyp=['IN', 'AN', 'INSTANT', 'THE', 'SCHOOLMASTER', 'WITH', 'A', 'LEAP', 'RESEMBLING', 'IN', 'STRENGTH', 'AND', 'AGILITY', 'THE', 'SPRING', 'OF', 'A', 'TIGER', 'SEIZED', 'SEYTON', 'WITH', 'ONE', 'HAND', 'BY', 'THE', 'THROAT', 'AND', 'EXCLAIMED', 'YOUR', 'MONEY', 'OR', 'I', 'WILL', 'FLING', 'YOU', 'INTO', 'THIS', 'HOLE'] +6070-63485-0005-2604: ref=['NO', 'SAID', 'THE', 'OLD', 'BRUTE', 'GRUMBLINGLY', 'NO', 'NOT', 'ONE', 'RING', 'WHAT', 'A', 'SHAME'] +6070-63485-0005-2604: hyp=['NO', 'SAID', 'THE', 'OLD', 'BRUTE', 'TREMBLINGLY', 'NO', 'NOT', 'ONE', 'RING', 'WHAT', 'A', 'SHAME'] +6070-63485-0006-2605: ref=['TOM', 'SEYTON', 'DID', 'NOT', 'LOSE', 'HIS', 'PRESENCE', 'OF', 'MIND', 'DURING', 'THIS', 'SCENE', 'RAPIDLY', 'AND', 'UNEXPECTEDLY', 'AS', 'IT', 'HAD', 'OCCURRED'] +6070-63485-0006-2605: hyp=['TOM', 'SEYTON', 'DID', 'NOT', 'LOSE', 'HIS', 'PRESENCE', 'OF', 'MIND', 'DURING', 'THIS', 'SCENE', 'RAPIDLY', 'AND', 'UNEXPECTEDLY', 'AS', 'IT', 'HAD', 'OCCURRED'] +6070-63485-0007-2606: ref=['OH', 'AH', 'TO', 'LAY', 'A', 'TRAP', 'TO', 'CATCH', 'US', 'REPLIED', 'THE', 'THIEF'] +6070-63485-0007-2606: hyp=['UH', 'TO', 'LAY', 'A', 'TRAP', 'TO', 'CATCH', 'US', 'REPLIED', 'THE', 'THIEF'] +6070-63485-0008-2607: ref=['THEN', 'ADDRESSING', 'THOMAS', 'SEYTON', 'YOU', 'KNOW', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0008-2607: hyp=['THEN', 'ADDRESSING', 'THOMAS', 'SETTON', 'YOU', 'KNOW', 'THE', 'PLANE', 'OF', 'SAINT', 'DENIS'] +6070-63485-0009-2608: ref=['DID', 'YOU', 'SEE', 'IN', 'THE', 'CABARET', 'WE', 'HAVE', 'JUST', 'LEFT', 'FOR', 'I', 'KNOW', 'YOU', 'AGAIN', 'THE', 'MAN', 'WHOM', 'THE', 'CHARCOAL', 'MAN', 'CAME', 'TO', 'SEEK'] +6070-63485-0009-2608: hyp=['DID', 'YOU', 'SEE', 'IN', 'THE', 'CABARET', 'WE', 'HAD', 'JUST', 'LEFT', 'FOR', 'I', 'KNOW', 'YOU', 'AGAIN', 'THE', 'MAN', 'WHOM', 'THE', 'CHARCOAL', 'MAN', 'CAME', 'TO', 'SEEK'] +6070-63485-0010-2609: ref=['CRIED', 'THE', 'SCHOOLMASTER', 'A', 'THOUSAND', 'FRANCS', 'AND', "I'LL", 'KILL', 'HIM'] +6070-63485-0010-2609: hyp=['CRIED', 'THE', 'SCHOOLMASTER', 'A', 'THOUSAND', 'FRANCS', 'AND', "I'LL", 'KILL', 'HIM'] +6070-63485-0011-2610: ref=['WRETCH', 'I', 'DO', 'NOT', 'SEEK', 'HIS', 'LIFE', 'REPLIED', 'SARAH', 'TO', 'THE', 'SCHOOLMASTER'] +6070-63485-0011-2610: hyp=['WRETCH', 'I', 'DO', 'NOT', 'SEE', 'HIS', 'LIFE', 'REPLIED', 'SARAH', 'TO', 'THE', 'SCHOOLMASTER'] +6070-63485-0012-2611: ref=["LET'S", 'GO', 'AND', 'MEET', 'HIM'] +6070-63485-0012-2611: hyp=["LET'S", 'GO', 'AND', 'MEET', 'HIM'] +6070-63485-0013-2612: ref=['OLD', 'BOY', 'IT', 'WILL', 'PAY', 'FOR', 'LOOKING', 'AFTER'] +6070-63485-0013-2612: hyp=['OLD', 'BOY', 'IT', 'WILL', 'PAY', 'FOR', 'LOOKING', 'AFTER'] +6070-63485-0014-2613: ref=['WELL', 'MY', 'WIFE', 'SHALL', 'BE', 'THERE', 'SAID', 'THE', 'SCHOOLMASTER', 'YOU', 'WILL', 'TELL', 'HER', 'WHAT', 'YOU', 'WANT', 'AND', 'I', 'SHALL', 'SEE'] +6070-63485-0014-2613: hyp=['WELL', 'MY', 'WIFE', 'SHALL', 'BE', 'THERE', 'SAID', 'THE', 'SCHOOLMASTER', 'YOU', 'WILL', 'TELL', 'HER', 'WHAT', 'YOU', 'WANT', 'AND', 'I', 'SHALL', 'SEE'] +6070-63485-0015-2614: ref=['IN', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0015-2614: hyp=['IN', 'THE', 'PLAIN', 'OF', 'SAINT', 'DENIS'] +6070-63485-0016-2615: ref=['BETWEEN', 'SAINT', 'OUEN', 'AND', 'THE', 'ROAD', 'OF', 'LA', 'REVOLTE', 'AT', 'THE', 'END', 'OF', 'THE', 'ROAD', 'AGREED'] +6070-63485-0016-2615: hyp=['BETWEEN', 'SAINT', 'JOIN', 'AND', 'THE', 'ROAD', 'OF', 'LA', 'REVOLT', 'AT', 'THE', 'END', 'OF', 'THE', 'ROAD', 'AGREED'] +6070-63485-0017-2616: ref=['HE', 'HAD', 'FORGOTTEN', 'THE', 'ADDRESS', 'OF', 'THE', 'SELF', 'STYLED', 'FAN', 'PAINTER'] +6070-63485-0017-2616: hyp=['HE', 'HAD', 'FORGOTTEN', 'THE', 'ADDRESS', 'OF', 'THE', 'SELF', 'STYLED', 'PAMPAINTER'] +6070-63485-0018-2617: ref=['THE', 'FIACRE', 'STARTED'] +6070-63485-0018-2617: hyp=['THE', 'FIACCHUS', 'STARTED'] +6070-86744-0000-2569: ref=['FRANZ', 'WHO', 'SEEMED', 'ATTRACTED', 'BY', 'SOME', 'INVISIBLE', 'INFLUENCE', 'TOWARDS', 'THE', 'COUNT', 'IN', 'WHICH', 'TERROR', 'WAS', 'STRANGELY', 'MINGLED', 'FELT', 'AN', 'EXTREME', 'RELUCTANCE', 'TO', 'PERMIT', 'HIS', 'FRIEND', 'TO', 'BE', 'EXPOSED', 'ALONE', 'TO', 'THE', 'SINGULAR', 'FASCINATION', 'THAT', 'THIS', 'MYSTERIOUS', 'PERSONAGE', 'SEEMED', 'TO', 'EXERCISE', 'OVER', 'HIM', 'AND', 'THEREFORE', 'MADE', 'NO', 'OBJECTION', 'TO', "ALBERT'S", 'REQUEST', 'BUT', 'AT', 'ONCE', 'ACCOMPANIED', 'HIM', 'TO', 'THE', 'DESIRED', 'SPOT', 'AND', 'AFTER', 'A', 'SHORT', 'DELAY', 'THE', 'COUNT', 'JOINED', 'THEM', 'IN', 'THE', 'SALON'] +6070-86744-0000-2569: hyp=['FRANCE', 'WHO', 'SEEMED', 'ATTRACTED', 'BY', 'SOME', 'INVISIBLE', 'INFLUENCE', 'TOWARDS', 'THE', 'COUNT', 'IN', 'WHICH', 'TERROR', 'WAS', 'STRANGELY', 'MINGLED', 'FELT', 'AN', 'EXTREME', 'RELUCTANCE', 'TO', 'PERMIT', 'HIS', 'FRIEND', 'TO', 'BE', 'EXPOSED', 'ALONE', 'TO', 'THE', 'SINGULAR', 'FASCINATION', 'THAT', 'THIS', 'MYSTERIOUS', 'PERSONAGE', 'SEEMED', 'TO', 'EXERCISE', 'OVER', 'HIM', 'AND', 'THEREFORE', 'MADE', 'NO', 'OBJECTION', 'TO', "ALBERT'S", 'REQUEST', 'BUT', 'AT', 'ONCE', 'ACCOMPANIED', 'HIM', 'TO', 'THE', 'DESIRED', 'SPOT', 'AND', 'AFTER', 'A', 'SHORT', 'DELAY', 'THE', 'COUNT', 'JOINED', 'THEM', 'IN', 'THE', 'SALON'] +6070-86744-0001-2570: ref=['MY', 'VERY', 'GOOD', 'FRIEND', 'AND', 'EXCELLENT', 'NEIGHBOR', 'REPLIED', 'THE', 'COUNT', 'WITH', 'A', 'SMILE', 'YOU', 'REALLY', 'EXAGGERATE', 'MY', 'TRIFLING', 'EXERTIONS'] +6070-86744-0001-2570: hyp=['MY', 'VERY', 'GOOD', 'FRIEND', 'AN', 'EXCELLENT', 'NEIGHBOUR', 'REPLIED', 'THE', 'COUNT', 'WITH', 'A', 'SMILE', 'YOU', 'REALLY', 'EXAGGERATE', 'MY', 'TRIFLING', 'EXERTIONS'] +6070-86744-0002-2571: ref=['MY', 'FATHER', 'THE', 'COMTE', 'DE', 'MORCERF', 'ALTHOUGH', 'OF', 'SPANISH', 'ORIGIN', 'POSSESSES', 'CONSIDERABLE', 'INFLUENCE', 'BOTH', 'AT', 'THE', 'COURT', 'OF', 'FRANCE', 'AND', 'MADRID', 'AND', 'I', 'UNHESITATINGLY', 'PLACE', 'THE', 'BEST', 'SERVICES', 'OF', 'MYSELF', 'AND', 'ALL', 'TO', 'WHOM', 'MY', 'LIFE', 'IS', 'DEAR', 'AT', 'YOUR', 'DISPOSAL'] +6070-86744-0002-2571: hyp=['MY', 'FATHER', 'THE', 'COMTE', 'DE', 'MORCERF', 'ALTHOUGH', 'A', 'SPANISH', 'ORIGIN', 'POSSESSES', 'CONSIDERABLE', 'INFLUENCE', 'BOTH', 'AT', 'THE', 'COURT', 'OF', 'FRANCE', 'AND', 'MADRID', 'AND', 'I', 'AM', 'HESITATINGLY', 'PLACE', 'THE', 'BEST', 'SERVICES', 'OF', 'MYSELF', 'AND', 'ALL', 'TO', 'WHOM', 'MY', 'LIFE', 'IS', 'DEAR', 'AT', 'YOUR', 'DISPOSAL'] +6070-86744-0003-2572: ref=['I', 'CAN', 'SCARCELY', 'CREDIT', 'IT'] +6070-86744-0003-2572: hyp=['I', 'CAN', 'SCARCELY', 'CREDIT', 'IT'] +6070-86744-0004-2573: ref=['THEN', 'IT', 'IS', 'SETTLED', 'SAID', 'THE', 'COUNT', 'AND', 'I', 'GIVE', 'YOU', 'MY', 'SOLEMN', 'ASSURANCE', 'THAT', 'I', 'ONLY', 'WAITED', 'AN', 'OPPORTUNITY', 'LIKE', 'THE', 'PRESENT', 'TO', 'REALIZE', 'PLANS', 'THAT', 'I', 'HAVE', 'LONG', 'MEDITATED'] +6070-86744-0004-2573: hyp=['THEN', 'IT', 'IS', 'SETTLED', 'SAID', 'THE', 'COUNT', 'AND', 'I', 'GIVE', 'YOU', 'MY', 'SOLEMN', 'ASSURANCE', 'THAT', 'I', 'ONLY', 'WAITED', 'IN', 'AN', 'OPPORTUNITY', 'LIKE', 'THE', 'PRESENT', 'TO', 'REALIZE', 'PLANS', 'THAT', 'I', 'HAVE', 'LONG', 'MEDITATED'] +6070-86744-0005-2574: ref=['SHALL', 'WE', 'MAKE', 'A', 'POSITIVE', 'APPOINTMENT', 'FOR', 'A', 'PARTICULAR', 'DAY', 'AND', 'HOUR', 'INQUIRED', 'THE', 'COUNT', 'ONLY', 'LET', 'ME', 'WARN', 'YOU', 'THAT', 'I', 'AM', 'PROVERBIAL', 'FOR', 'MY', 'PUNCTILIOUS', 'EXACTITUDE', 'IN', 'KEEPING', 'MY', 'ENGAGEMENTS', 'DAY', 'FOR', 'DAY', 'HOUR', 'FOR', 'HOUR', 'SAID', 'ALBERT', 'THAT', 'WILL', 'SUIT', 'ME', 'TO', 'A', 'DOT'] +6070-86744-0005-2574: hyp=['SHALL', 'WE', 'MAKE', 'A', 'POSITIVE', 'APPOINTMENT', 'FOR', 'A', 'PARTICULAR', 'DAY', 'AND', 'HOUR', 'INQUIRED', 'THE', 'COUNT', 'ONLY', 'LET', 'ME', 'WARN', 'YOU', 'THAT', 'I', 'AM', 'PROVERBIAL', 'FOR', 'MY', 'PUNCTILIOUS', 'EXACTITUDE', 'IN', 'KEEPING', 'MY', 'ENGAGEMENTS', 'DAY', 'FOR', 'DAY', 'HOUR', 'FOR', 'HOUR', 'SAID', 'ALBERT', 'THAT', 'WILL', 'SUIT', 'ME', 'TO', 'A', 'DOT'] +6070-86744-0006-2575: ref=['SO', 'BE', 'IT', 'THEN', 'REPLIED', 'THE', 'COUNT', 'AND', 'EXTENDING', 'HIS', 'HAND', 'TOWARDS', 'A', 'CALENDAR', 'SUSPENDED', 'NEAR', 'THE', 'CHIMNEY', 'PIECE', 'HE', 'SAID', 'TO', 'DAY', 'IS', 'THE', 'TWENTY', 'FIRST', 'OF', 'FEBRUARY', 'AND', 'DRAWING', 'OUT', 'HIS', 'WATCH', 'ADDED', 'IT', 'IS', 'EXACTLY', 'HALF', 'PAST', 'TEN', "O'CLOCK", 'NOW', 'PROMISE', 'ME', 'TO', 'REMEMBER', 'THIS', 'AND', 'EXPECT', 'ME', 'THE', 'TWENTY', 'FIRST', 'OF', 'MAY', 'AT', 'THE', 'SAME', 'HOUR', 'IN', 'THE', 'FORENOON'] +6070-86744-0006-2575: hyp=['SO', 'BE', 'IT', 'THEN', 'REPLIED', 'THE', 'COUNT', 'AND', 'EXTENDING', 'HIS', 'HAND', 'TOWARDS', 'THE', 'CALENDAR', 'SUSPENDED', 'NEAR', 'THE', 'CHIMNEY', 'PIECE', 'HE', 'SAID', 'TO', 'DAY', 'IS', 'THE', 'TWENTY', 'FIRST', 'OF', 'FEBRUARY', 'AND', 'DRAWING', 'OUT', 'HIS', 'WATCH', 'I', 'DID', 'IT', 'IS', 'EXACTLY', 'HALF', 'PAST', 'TEN', "O'CLOCK", 'NOW', 'PROMISE', 'ME', 'TO', 'REMEMBER', 'THIS', 'AND', 'EXPECT', 'ME', 'THAT', 'TWENTY', 'FIRST', 'OF', 'MAY', 'AT', 'THE', 'SAME', 'HOUR', 'IN', 'THE', 'FORENOON'] +6070-86744-0007-2576: ref=['I', 'RESIDE', 'IN', 'MY', "FATHER'S", 'HOUSE', 'BUT', 'OCCUPY', 'A', 'PAVILION', 'AT', 'THE', 'FARTHER', 'SIDE', 'OF', 'THE', 'COURT', 'YARD', 'ENTIRELY', 'SEPARATED', 'FROM', 'THE', 'MAIN', 'BUILDING'] +6070-86744-0007-2576: hyp=['I', 'RESIDE', 'IN', 'MY', "FATHER'S", 'HOUSE', 'BUT', 'OCCUPY', 'A', 'PAVILION', 'AT', 'THE', 'FARTHER', 'SIDE', 'OF', 'THE', 'COURTYARD', 'ENTIRELY', 'SEPARATED', 'FROM', 'THE', 'MAIN', 'BUILDING'] +6070-86744-0008-2577: ref=['NOW', 'THEN', 'SAID', 'THE', 'COUNT', 'RETURNING', 'HIS', 'TABLETS', 'TO', 'HIS', 'POCKET', 'MAKE', 'YOURSELF', 'PERFECTLY', 'EASY', 'THE', 'HAND', 'OF', 'YOUR', 'TIME', 'PIECE', 'WILL', 'NOT', 'BE', 'MORE', 'ACCURATE', 'IN', 'MARKING', 'THE', 'TIME', 'THAN', 'MYSELF'] +6070-86744-0008-2577: hyp=['NOW', 'THEN', 'SAID', 'THE', 'COUNT', 'RETURNING', 'HIS', 'TABLETS', 'TO', 'HIS', 'POCKET', 'MAKE', 'YOURSELF', 'PERFECTLY', 'EASY', 'THE', 'HAND', 'OF', 'YOUR', 'TIME', 'PEACE', 'WILL', 'NOT', 'BE', 'MORE', 'ACCURATE', 'IN', 'MARKING', 'THE', 'TIME', 'THAN', 'MYSELF'] +6070-86744-0009-2578: ref=['THAT', 'DEPENDS', 'WHEN', 'DO', 'YOU', 'LEAVE'] +6070-86744-0009-2578: hyp=['THAT', 'DEPENDS', 'WHEN', 'DO', 'YOU', 'LEAVE'] +6070-86744-0010-2579: ref=['FOR', 'FRANCE', 'NO', 'FOR', 'VENICE', 'I', 'SHALL', 'REMAIN', 'IN', 'ITALY', 'FOR', 'ANOTHER', 'YEAR', 'OR', 'TWO'] +6070-86744-0010-2579: hyp=['FOR', 'FRANCE', 'NO', 'FOR', 'VENICE', 'I', 'SHALL', 'REMAIN', 'IN', 'ITALY', 'FOR', 'ANOTHER', 'YEAR', 'OR', 'TWO'] +6070-86744-0011-2580: ref=['THEN', 'WE', 'SHALL', 'NOT', 'MEET', 'IN', 'PARIS'] +6070-86744-0011-2580: hyp=['THEN', 'WE', 'SHALL', 'NOT', 'MEET', 'IN', 'PARIS'] +6070-86744-0012-2581: ref=['I', 'FEAR', 'I', 'SHALL', 'NOT', 'HAVE', 'THAT', 'HONOR'] +6070-86744-0012-2581: hyp=['I', 'FEAR', 'I', 'SHALL', 'NOT', 'HAVE', 'THAT', 'HONOUR'] +6070-86744-0013-2582: ref=['WELL', 'SINCE', 'WE', 'MUST', 'PART', 'SAID', 'THE', 'COUNT', 'HOLDING', 'OUT', 'A', 'HAND', 'TO', 'EACH', 'OF', 'THE', 'YOUNG', 'MEN', 'ALLOW', 'ME', 'TO', 'WISH', 'YOU', 'BOTH', 'A', 'SAFE', 'AND', 'PLEASANT', 'JOURNEY'] +6070-86744-0013-2582: hyp=['WELL', 'SINCE', 'WE', 'MUST', 'PART', 'SAID', 'THE', 'COUNT', 'HOLDING', 'OUT', 'A', 'HAND', 'TO', 'EACH', 'OF', 'THE', 'YOUNG', 'MEN', 'ALLOW', 'ME', 'TO', 'WISH', 'YOU', 'BOTH', 'A', 'SAFE', 'AND', 'PLEASANT', 'JOURNEY'] +6070-86744-0014-2583: ref=['WHAT', 'IS', 'THE', 'MATTER', 'ASKED', 'ALBERT', 'OF', 'FRANZ', 'WHEN', 'THEY', 'HAD', 'RETURNED', 'TO', 'THEIR', 'OWN', 'APARTMENTS', 'YOU', 'SEEM', 'MORE', 'THAN', 'COMMONLY', 'THOUGHTFUL'] +6070-86744-0014-2583: hyp=['WHAT', 'IS', 'THE', 'MATTER', 'ASKED', 'ALBERT', 'OF', 'FRANZ', 'WHEN', 'THEY', 'HAD', 'RETURNED', 'TO', 'THEIR', 'OWN', 'APARTMENTS', 'YOU', 'SEE', 'MORE', 'THAN', 'COMMONLY', 'THOUGHTFUL'] +6070-86744-0015-2584: ref=['I', 'WILL', 'CONFESS', 'TO', 'YOU', 'ALBERT', 'REPLIED', 'FRANZ', 'THE', 'COUNT', 'IS', 'A', 'VERY', 'SINGULAR', 'PERSON', 'AND', 'THE', 'APPOINTMENT', 'YOU', 'HAVE', 'MADE', 'TO', 'MEET', 'HIM', 'IN', 'PARIS', 'FILLS', 'ME', 'WITH', 'A', 'THOUSAND', 'APPREHENSIONS'] +6070-86744-0015-2584: hyp=['I', 'WILL', 'CONFESS', 'TO', 'YOU', 'ALBERT', 'REPLIED', 'FRANZ', 'THE', 'COUNT', 'IS', 'A', 'VERY', 'SINGULAR', 'PERSON', 'AND', 'THE', 'APPOINTMENT', 'YOU', 'HAVE', 'MADE', 'TO', 'MEET', 'HIM', 'IN', 'PARIS', 'FILLS', 'ME', 'WITH', 'A', 'THOUSAND', 'APPREHENSIONS'] +6070-86744-0016-2585: ref=['DID', 'YOU', 'EVER', 'MEET', 'HIM', 'PREVIOUSLY', 'TO', 'COMING', 'HITHER'] +6070-86744-0016-2585: hyp=['DID', 'YOU', 'EVER', 'MEET', 'HIM', 'PREVIOUSLY', 'TO', 'COMING', 'HITHER'] +6070-86744-0017-2586: ref=['UPON', 'MY', 'HONOR', 'THEN', 'LISTEN', 'TO', 'ME'] +6070-86744-0017-2586: hyp=['UPON', 'MY', 'HONOUR', 'THEN', 'LISTEN', 'TO', 'ME'] +6070-86744-0018-2587: ref=['HE', 'DWELT', 'WITH', 'CONSIDERABLE', 'FORCE', 'AND', 'ENERGY', 'ON', 'THE', 'ALMOST', 'MAGICAL', 'HOSPITALITY', 'HE', 'HAD', 'RECEIVED', 'FROM', 'THE', 'COUNT', 'AND', 'THE', 'MAGNIFICENCE', 'OF', 'HIS', 'ENTERTAINMENT', 'IN', 'THE', 'GROTTO', 'OF', 'THE', 'THOUSAND', 'AND', 'ONE', 'NIGHTS', 'HE', 'RECOUNTED', 'WITH', 'CIRCUMSTANTIAL', 'EXACTITUDE', 'ALL', 'THE', 'PARTICULARS', 'OF', 'THE', 'SUPPER', 'THE', 'HASHISH', 'THE', 'STATUES', 'THE', 'DREAM', 'AND', 'HOW', 'AT', 'HIS', 'AWAKENING', 'THERE', 'REMAINED', 'NO', 'PROOF', 'OR', 'TRACE', 'OF', 'ALL', 'THESE', 'EVENTS', 'SAVE', 'THE', 'SMALL', 'YACHT', 'SEEN', 'IN', 'THE', 'DISTANT', 'HORIZON', 'DRIVING', 'UNDER', 'FULL', 'SAIL', 'TOWARD', 'PORTO', 'VECCHIO'] +6070-86744-0018-2587: hyp=['HE', 'DWELT', 'WITH', 'CONSIDERABLE', 'FORCE', 'AND', 'ENERGY', 'ON', 'THE', 'ALMOST', 'MAGICAL', 'HOSPITALITY', 'HE', 'HAD', 'RECEIVED', 'FROM', 'THE', 'COUNT', 'AND', 'THE', 'MAGNIFICENCE', 'OF', 'HIS', 'ENTERTAINMENT', 'IN', 'THE', 'DRATTO', 'OF', 'THE', 'THOUSAND', 'AND', 'ONE', 'NIGHTS', 'HE', 'RECOUNTED', 'WITH', 'CIRCUMSTANTIAL', 'EXACTITUDE', 'ALL', 'THE', 'PARTICULARS', 'OF', 'THE', 'SUPPER', 'THE', 'HASHISH', 'THE', 'STATUES', 'THE', 'DREAM', 'AND', 'HOW', 'AT', 'HIS', 'AWAKENING', 'THERE', 'REMAINED', 'NO', 'PROOF', 'OR', 'TRACE', 'OF', 'ALL', 'THESE', 'EVENTS', 'SAVE', 'THE', 'SMALL', 'YACHT', 'SEEN', 'IN', 'THE', 'DISTANT', 'HORIZON', 'DRIVING', 'UNDER', 'FULL', 'SAIL', 'TOWARD', 'PORTO', 'VECCHIO'] +6070-86744-0019-2588: ref=['THEN', 'HE', 'DETAILED', 'THE', 'CONVERSATION', 'OVERHEARD', 'BY', 'HIM', 'AT', 'THE', 'COLOSSEUM', 'BETWEEN', 'THE', 'COUNT', 'AND', 'VAMPA', 'IN', 'WHICH', 'THE', 'COUNT', 'HAD', 'PROMISED', 'TO', 'OBTAIN', 'THE', 'RELEASE', 'OF', 'THE', 'BANDIT', 'PEPPINO', 'AN', 'ENGAGEMENT', 'WHICH', 'AS', 'OUR', 'READERS', 'ARE', 'AWARE', 'HE', 'MOST', 'FAITHFULLY', 'FULFILLED'] +6070-86744-0019-2588: hyp=['THEN', 'HE', 'DETAILED', 'THE', 'CONVERSATION', 'OVERHEARD', 'BY', 'HIM', 'AT', 'THE', 'COLOSSEUM', 'BETWEEN', 'THE', 'COUNT', 'AND', 'VAMPA', 'IN', 'WHICH', 'THE', 'COUNT', 'HAD', 'PROMISED', 'TO', 'OBTAIN', 'THE', 'RELEASE', 'OF', 'THE', 'BANDIT', 'PEPPINO', 'AN', 'ENGAGEMENT', 'WHICH', 'AS', 'OUR', 'READERS', 'ARE', 'AWARE', 'HE', 'MOST', 'FAITHFULLY', 'FULFILLED'] +6070-86744-0020-2589: ref=['BUT', 'SAID', 'FRANZ', 'THE', 'CORSICAN', 'BANDITS', 'THAT', 'WERE', 'AMONG', 'THE', 'CREW', 'OF', 'HIS', 'VESSEL'] +6070-86744-0020-2589: hyp=['BUT', 'SAID', 'FRANZ', 'THE', 'CORSICAN', 'BANDITS', 'THAT', 'WERE', 'AMONG', 'THE', 'CREW', 'OF', 'HIS', 'VESSEL'] +6070-86744-0021-2590: ref=['WHY', 'REALLY', 'THE', 'THING', 'SEEMS', 'TO', 'ME', 'SIMPLE', 'ENOUGH'] +6070-86744-0021-2590: hyp=['WHY', 'REALLY', 'THE', 'THING', 'SEEMS', 'TO', 'ME', 'SIMPLE', 'ENOUGH'] +6070-86744-0022-2591: ref=['TALKING', 'OF', 'COUNTRIES', 'REPLIED', 'FRANZ', 'OF', 'WHAT', 'COUNTRY', 'IS', 'THE', 'COUNT', 'WHAT', 'IS', 'HIS', 'NATIVE', 'TONGUE', 'WHENCE', 'DOES', 'HE', 'DERIVE', 'HIS', 'IMMENSE', 'FORTUNE', 'AND', 'WHAT', 'WERE', 'THOSE', 'EVENTS', 'OF', 'HIS', 'EARLY', 'LIFE', 'A', 'LIFE', 'AS', 'MARVELLOUS', 'AS', 'UNKNOWN', 'THAT', 'HAVE', 'TINCTURED', 'HIS', 'SUCCEEDING', 'YEARS', 'WITH', 'SO', 'DARK', 'AND', 'GLOOMY', 'A', 'MISANTHROPY'] +6070-86744-0022-2591: hyp=['TALKING', 'OF', 'COUNTRIES', 'REPLIED', 'FRANZ', 'OF', 'WHAT', 'COUNTRIES', 'THE', 'COUNT', 'WHAT', 'IS', 'HIS', 'NATIVE', 'TONG', 'WHENCE', 'DOES', 'HE', 'DERIVE', 'HIS', 'IMMENSE', 'FORTUNE', 'AND', 'WHAT', 'WERE', 'THOSE', 'EVENTS', 'OF', 'HIS', 'EARLY', 'LIFE', 'A', 'LIFE', 'AS', 'MARVELLOUS', 'AS', 'UNKNOWN', 'THAT', 'HATH', 'TINCTURED', 'HIS', 'SUCCEEDING', 'YEARS', 'WITH', 'SO', 'DARK', 'AND', 'GLOOMY', 'A', 'MISANTHROPY'] +6070-86744-0023-2592: ref=['CERTAINLY', 'THESE', 'ARE', 'QUESTIONS', 'THAT', 'IN', 'YOUR', 'PLACE', 'I', 'SHOULD', 'LIKE', 'TO', 'HAVE', 'ANSWERED'] +6070-86744-0023-2592: hyp=['CERTAINLY', 'THESE', 'ARE', 'QUESTIONS', 'THAT', 'IN', 'YOUR', 'PLACE', 'I', 'SHOULD', 'LIKE', 'TO', 'HAVE', 'ANSWERED'] +6070-86744-0024-2593: ref=['MY', 'DEAR', 'FRANZ', 'REPLIED', 'ALBERT', 'WHEN', 'UPON', 'RECEIPT', 'OF', 'MY', 'LETTER', 'YOU', 'FOUND', 'THE', 'NECESSITY', 'OF', 'ASKING', 'THE', "COUNT'S", 'ASSISTANCE', 'YOU', 'PROMPTLY', 'WENT', 'TO', 'HIM', 'SAYING', 'MY', 'FRIEND', 'ALBERT', 'DE', 'MORCERF', 'IS', 'IN', 'DANGER', 'HELP', 'ME', 'TO', 'DELIVER', 'HIM'] +6070-86744-0024-2593: hyp=['MY', 'DEAR', 'FRIENDS', 'REPLIED', 'ALBERT', 'WHEN', 'UPON', 'RECEIPT', 'OF', 'MY', 'LETTER', 'YOU', 'FOUND', 'THE', 'NECESSITY', 'OF', 'ASKING', 'THE', "COUNT'S", 'ASSISTANCE', 'YOU', 'PROMPTLY', 'WENT', 'TO', 'HIM', 'SAYING', 'MY', 'FRIEND', 'ALBERT', 'DE', 'MORCERF', 'IS', 'IN', 'DANGER', 'HELP', 'ME', 'TO', 'DELIVER', 'HIM'] +6070-86744-0025-2594: ref=['WHAT', 'ARE', 'HIS', 'MEANS', 'OF', 'EXISTENCE', 'WHAT', 'IS', 'HIS', 'BIRTHPLACE', 'OF', 'WHAT', 'COUNTRY', 'IS', 'HE', 'A', 'NATIVE'] +6070-86744-0025-2594: hyp=['WHAT', 'ARE', 'HIS', 'MEANS', 'OF', 'EXISTENCE', 'WHAT', 'IS', 'HIS', 'BIRTHPLACE', 'OF', 'WHAT', 'COUNTRIES', 'HE', 'A', 'NATIVE'] +6070-86744-0026-2595: ref=['I', 'CONFESS', 'HE', 'ASKED', 'ME', 'NONE', 'NO', 'HE', 'MERELY', 'CAME', 'AND', 'FREED', 'ME', 'FROM', 'THE', 'HANDS', 'OF', 'SIGNOR', 'VAMPA', 'WHERE', 'I', 'CAN', 'ASSURE', 'YOU', 'IN', 'SPITE', 'OF', 'ALL', 'MY', 'OUTWARD', 'APPEARANCE', 'OF', 'EASE', 'AND', 'UNCONCERN', 'I', 'DID', 'NOT', 'VERY', 'PARTICULARLY', 'CARE', 'TO', 'REMAIN'] +6070-86744-0026-2595: hyp=['I', 'CONFESS', 'HE', 'ASKED', 'ME', 'NONE', 'NO', 'HE', 'MERELY', 'CAME', 'AND', 'FREED', 'ME', 'FROM', 'THE', 'HANDS', 'OF', 'SENOR', 'VAMPA', 'WHERE', 'I', 'CAN', 'ASSURE', 'YOU', 'IN', 'SPITE', 'OF', 'ALL', 'MY', 'OUTWARD', 'APPEARANCE', 'OF', 'EASE', 'AND', 'UNCONCERN', 'I', 'DID', 'NOT', 'VERY', 'PARTICULARLY', 'CARE', 'TO', 'REMAIN'] +6070-86744-0027-2596: ref=['AND', 'THIS', 'TIME', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'CONTRARY', 'TO', 'THE', 'USUAL', 'STATE', 'OF', 'AFFAIRS', 'IN', 'DISCUSSIONS', 'BETWEEN', 'THE', 'YOUNG', 'MEN', 'THE', 'EFFECTIVE', 'ARGUMENTS', 'WERE', 'ALL', 'ON', "ALBERT'S", 'SIDE'] +6070-86744-0027-2596: hyp=['AND', 'THIS', 'TIME', 'IT', 'MUST', 'BE', 'CONFESSED', 'THAT', 'CONTRARY', 'TO', 'THE', 'USUAL', 'STATE', 'OF', 'AFFAIRS', 'IN', 'DISCUSSIONS', 'BETWEEN', 'THE', 'YOUNG', 'MEN', 'THE', 'EFFECTIVE', 'ARGUMENTS', 'WERE', 'ALL', 'ON', "ALBERT'S", 'SIDE'] +6070-86744-0028-2597: ref=['WELL', 'SAID', 'FRANZ', 'WITH', 'A', 'SIGH', 'DO', 'AS', 'YOU', 'PLEASE', 'MY', 'DEAR', 'VISCOUNT', 'FOR', 'YOUR', 'ARGUMENTS', 'ARE', 'BEYOND', 'MY', 'POWERS', 'OF', 'REFUTATION'] +6070-86744-0028-2597: hyp=['WELL', 'SAID', 'FRANZ', 'WITH', 'A', 'SIGH', 'DO', 'AS', 'YOU', 'PLEASE', 'MY', 'DEAR', 'VISCOUNT', 'FOR', 'YOUR', 'ARGUMENTS', 'ARE', 'BEYOND', 'MY', 'POWERS', 'OF', 'REFUTATION'] +6070-86744-0029-2598: ref=['AND', 'NOW', 'MY', 'DEAR', 'FRANZ', 'LET', 'US', 'TALK', 'OF', 'SOMETHING', 'ELSE'] +6070-86744-0029-2598: hyp=['AND', 'NOW', 'MY', 'DEAR', 'FRANCE', 'LET', 'US', 'TALK', 'OF', 'SOMETHING', 'ELSE'] +6070-86745-0000-2549: ref=['THEN', 'SHOULD', 'ANYTHING', 'APPEAR', 'TO', 'MERIT', 'A', 'MORE', 'MINUTE', 'EXAMINATION', 'ALBERT', 'DE', 'MORCERF', 'COULD', 'FOLLOW', 'UP', 'HIS', 'RESEARCHES', 'BY', 'MEANS', 'OF', 'A', 'SMALL', 'GATE', 'SIMILAR', 'TO', 'THAT', 'CLOSE', 'TO', 'THE', "CONCIERGE'S", 'DOOR', 'AND', 'WHICH', 'MERITS', 'A', 'PARTICULAR', 'DESCRIPTION'] +6070-86745-0000-2549: hyp=['THEN', 'SHOULD', 'ANYTHING', 'APPEAR', 'TO', 'MARRIAGE', 'A', 'MORE', 'MINUTE', 'EXAMINATION', 'ALBERT', 'DE', 'MORCERF', 'COULD', 'FOLLOW', 'UP', 'HIS', 'RESEARCHES', 'BY', 'MEANS', 'OF', 'A', 'SMALL', 'GATE', 'SIMILAR', 'TO', 'THAT', 'CLOSE', 'TO', 'THE', "CONCIERGE'S", 'DOOR', 'AND', 'WHICH', 'MERITS', 'A', 'PARTICULAR', 'DESCRIPTION'] +6070-86745-0001-2550: ref=['SHRUBS', 'AND', 'CREEPING', 'PLANTS', 'COVERED', 'THE', 'WINDOWS', 'AND', 'HID', 'FROM', 'THE', 'GARDEN', 'AND', 'COURT', 'THESE', 'TWO', 'APARTMENTS', 'THE', 'ONLY', 'ROOMS', 'INTO', 'WHICH', 'AS', 'THEY', 'WERE', 'ON', 'THE', 'GROUND', 'FLOOR', 'THE', 'PRYING', 'EYES', 'OF', 'THE', 'CURIOUS', 'COULD', 'PENETRATE'] +6070-86745-0001-2550: hyp=['SHRUBS', 'AND', 'CREEPING', 'PLANTS', 'COVERED', 'THE', 'WINDOWS', 'AND', 'HID', 'FROM', 'THE', 'GARDEN', 'AND', 'COURT', 'THESE', 'TWO', 'APARTMENTS', 'THE', 'ONLY', 'ROOMS', 'INTO', 'WHICH', 'AS', 'THEY', 'WERE', 'ON', 'THE', 'GROUND', 'FLOOR', 'THE', 'PRYING', 'EYES', 'OF', 'THE', 'CURIOUS', 'COULD', 'PENETRATE'] +6070-86745-0002-2551: ref=['AT', 'A', 'QUARTER', 'TO', 'TEN', 'A', 'VALET', 'ENTERED', 'HE', 'COMPOSED', 'WITH', 'A', 'LITTLE', 'GROOM', 'NAMED', 'JOHN', 'AND', 'WHO', 'ONLY', 'SPOKE', 'ENGLISH', 'ALL', "ALBERT'S", 'ESTABLISHMENT', 'ALTHOUGH', 'THE', 'COOK', 'OF', 'THE', 'HOTEL', 'WAS', 'ALWAYS', 'AT', 'HIS', 'SERVICE', 'AND', 'ON', 'GREAT', 'OCCASIONS', 'THE', "COUNT'S", 'CHASSEUR', 'ALSO'] +6070-86745-0002-2551: hyp=['AT', 'A', 'QUARTER', 'TO', 'TEN', 'A', 'VALLEY', 'ENTERED', 'HE', 'COMPOSED', 'WITH', 'A', 'LITTLE', 'GROOM', 'NAMED', 'JOHN', 'AND', 'WHO', 'ONLY', 'SPOKE', 'ENGLISH', 'ALL', "ALBERT'S", 'ESTABLISHMENT', 'ALTHOUGH', 'THE', 'COOK', 'OF', 'THE', 'HOTEL', 'WAS', 'ALWAYS', 'AT', 'HIS', 'SERVICE', 'AND', 'ON', 'GREAT', 'OCCASIONS', 'THE', "COUNT'S", 'CHASSEUR', 'ALSO'] +6070-86745-0003-2552: ref=['WAIT', 'THEN', 'DURING', 'THE', 'DAY', 'TELL', 'ROSA', 'THAT', 'WHEN', 'I', 'LEAVE', 'THE', 'OPERA', 'I', 'WILL', 'SUP', 'WITH', 'HER', 'AS', 'SHE', 'WISHES'] +6070-86745-0003-2552: hyp=['WAIT', 'THEN', 'DURING', 'THE', 'DAY', 'TELL', 'ROSA', 'THAT', 'WHEN', 'I', 'LEAVE', 'THE', 'OPERA', 'I', 'WILL', 'SUP', 'WITH', 'HER', 'AS', 'SHE', 'WISHES'] +6070-86745-0004-2553: ref=['VERY', 'WELL', 'AT', 'HALF', 'PAST', 'TEN'] +6070-86745-0004-2553: hyp=['VERY', 'WELL', 'AT', 'HALF', 'PAST', 'TEN'] +6070-86745-0005-2554: ref=['IS', 'THE', 'COUNTESS', 'UP', 'YET'] +6070-86745-0005-2554: hyp=['IS', 'THE', 'COUNTESS', 'UP', 'YET'] +6070-86745-0006-2555: ref=['THE', 'VALET', 'LEFT', 'THE', 'ROOM'] +6070-86745-0006-2555: hyp=['THE', 'VALET', 'LEFT', 'THE', 'ROOM'] +6070-86745-0007-2556: ref=['GOOD', 'MORNING', 'LUCIEN', 'GOOD', 'MORNING', 'SAID', 'ALBERT', 'YOUR', 'PUNCTUALITY', 'REALLY', 'ALARMS', 'ME'] +6070-86745-0007-2556: hyp=['GOOD', 'MORNING', 'LUCIAN', 'GOOD', 'MORNING', 'SAID', 'ALBERT', 'YOUR', 'PUNCTUALITY', 'REALLY', 'ALARMS', 'ME'] +6070-86745-0008-2557: ref=['YOU', 'WHOM', 'I', 'EXPECTED', 'LAST', 'YOU', 'ARRIVE', 'AT', 'FIVE', 'MINUTES', 'TO', 'TEN', 'WHEN', 'THE', 'TIME', 'FIXED', 'WAS', 'HALF', 'PAST'] +6070-86745-0008-2557: hyp=['YOU', 'WHOM', 'I', 'EXPECTED', 'LAST', 'YOU', 'ARRIVE', 'AT', 'FIVE', 'MINUTES', 'TO', 'TEN', 'WHEN', 'THE', 'TIME', 'FIXED', 'WAS', 'HALF', 'PAST'] +6070-86745-0009-2558: ref=['NO', 'NO', 'MY', 'DEAR', 'FELLOW', 'DO', 'NOT', 'CONFOUND', 'OUR', 'PLANS'] +6070-86745-0009-2558: hyp=['NO', 'NO', 'MY', 'DEAR', 'FELLOW', 'DO', 'NOT', 'CONFOUND', 'OUR', 'PLANS'] +6070-86745-0010-2559: ref=['YES', 'HE', 'HAS', 'NOT', 'MUCH', 'TO', 'COMPLAIN', 'OF', 'BOURGES', 'IS', 'THE', 'CAPITAL', 'OF', 'CHARLES', 'SEVEN'] +6070-86745-0010-2559: hyp=['YES', 'HE', 'HAS', 'NOT', 'MUCH', 'TO', 'COMPLAIN', 'OF', 'BOURGE', 'IS', 'THE', 'CAPITAL', 'OF', 'CHARLES', 'THE', 'SEVENTH'] +6070-86745-0011-2560: ref=['IT', 'IS', 'FOR', 'THAT', 'REASON', 'YOU', 'SEE', 'ME', 'SO', 'EARLY'] +6070-86745-0011-2560: hyp=['IT', 'IS', 'FOR', 'THAT', 'REASON', 'YOU', 'SEE', 'ME', 'SO', 'EARLY'] +6070-86745-0012-2561: ref=['I', 'RETURNED', 'HOME', 'AT', 'DAYBREAK', 'AND', 'STROVE', 'TO', 'SLEEP', 'BUT', 'MY', 'HEAD', 'ACHED', 'AND', 'I', 'GOT', 'UP', 'TO', 'HAVE', 'A', 'RIDE', 'FOR', 'AN', 'HOUR'] +6070-86745-0012-2561: hyp=['I', 'RETURNED', 'HOME', 'AT', 'DAYBREAK', 'AND', 'STROVE', 'TO', 'SLEEP', 'BUT', 'MY', 'HEAD', 'ACHED', 'AND', 'I', 'GOT', 'UP', 'TO', 'HAVE', 'A', 'RIDE', 'FOR', 'AN', 'HOUR'] +6070-86745-0013-2562: ref=['PESTE', 'I', 'WILL', 'DO', 'NOTHING', 'OF', 'THE', 'KIND', 'THE', 'MOMENT', 'THEY', 'COME', 'FROM', 'GOVERNMENT', 'YOU', 'WOULD', 'FIND', 'THEM', 'EXECRABLE'] +6070-86745-0013-2562: hyp=['PESTS', 'I', 'WILL', 'DO', 'NOTHING', 'OF', 'THE', 'KIND', 'THE', 'MOMENT', 'THEY', 'COME', 'FROM', 'GOVERNMENT', 'YOU', 'WOULD', 'FIND', 'THEM', 'EXECRABLE'] +6070-86745-0014-2563: ref=['BESIDES', 'THAT', 'DOES', 'NOT', 'CONCERN', 'THE', 'HOME', 'BUT', 'THE', 'FINANCIAL', 'DEPARTMENT'] +6070-86745-0014-2563: hyp=['BESIDES', 'THAT', 'DOES', 'NOT', 'CONCERN', 'THE', 'HOME', 'BUT', 'THE', 'FINANCIAL', 'DEPARTMENT'] +6070-86745-0015-2564: ref=['ABOUT', 'WHAT', 'ABOUT', 'THE', 'PAPERS'] +6070-86745-0015-2564: hyp=['ABOUT', 'WHAT', 'ABOUT', 'THE', 'PAPERS'] +6070-86745-0016-2565: ref=['IN', 'THE', 'ENTIRE', 'POLITICAL', 'WORLD', 'OF', 'WHICH', 'YOU', 'ARE', 'ONE', 'OF', 'THE', 'LEADERS'] +6070-86745-0016-2565: hyp=['IN', 'THE', 'ENTIRE', 'POLITICAL', 'WORLD', 'OF', 'WHICH', 'YOU', 'ARE', 'ONE', 'OF', 'THE', 'LEADERS'] +6070-86745-0017-2566: ref=['THEY', 'SAY', 'THAT', 'IT', 'IS', 'QUITE', 'FAIR', 'AND', 'THAT', 'SOWING', 'SO', 'MUCH', 'RED', 'YOU', 'OUGHT', 'TO', 'REAP', 'A', 'LITTLE', 'BLUE'] +6070-86745-0017-2566: hyp=['THEY', 'SAY', 'THAT', 'IT', 'IS', 'QUITE', 'FAIR', 'AND', 'THAT', 'SOWING', 'SO', 'MUCH', 'RED', 'YOU', 'OUGHT', 'TO', 'READ', 'A', 'LITTLE', 'BLUE'] +6070-86745-0018-2567: ref=['COME', 'COME', 'THAT', 'IS', 'NOT', 'BAD', 'SAID', 'LUCIEN'] +6070-86745-0018-2567: hyp=['COME', 'COME', 'THAT', 'IS', 'NOT', 'BAD', 'SAID', 'LUCIAN'] +6070-86745-0019-2568: ref=['WITH', 'YOUR', 'TALENTS', 'YOU', 'WOULD', 'MAKE', 'YOUR', 'FORTUNE', 'IN', 'THREE', 'OR', 'FOUR', 'YEARS'] +6070-86745-0019-2568: hyp=['WITH', 'THE', 'OR', 'TALONS', 'HE', 'WOULD', 'MAKE', 'YOUR', 'FORTUNE', 'IN', 'THREE', 'OR', 'FOUR', 'YEARS'] +6128-63240-0000-503: ref=['THE', 'GENTLEMAN', 'HAD', 'NOT', 'EVEN', 'NEEDED', 'TO', 'SIT', 'DOWN', 'TO', 'BECOME', 'INTERESTED', 'APPARENTLY', 'HE', 'HAD', 'TAKEN', 'UP', 'THE', 'VOLUME', 'FROM', 'A', 'TABLE', 'AS', 'SOON', 'AS', 'HE', 'CAME', 'IN', 'AND', 'STANDING', 'THERE', 'AFTER', 'A', 'SINGLE', 'GLANCE', 'ROUND', 'THE', 'APARTMENT', 'HAD', 'LOST', 'HIMSELF', 'IN', 'ITS', 'PAGES'] +6128-63240-0000-503: hyp=['THE', 'GENTLEMAN', 'HAD', 'NOT', 'EVEN', 'NEEDED', 'TO', 'SIT', 'DOWN', 'TO', 'BECOME', 'INTERESTED', 'APPARENTLY', 'HE', 'HAD', 'TAKEN', 'UP', 'THE', 'VOLUME', 'FROM', 'A', 'TABLE', 'AS', 'SOON', 'AS', 'HE', 'CAME', 'IN', 'AND', 'STANDING', 'THERE', 'AFTER', 'A', 'SINGLE', 'GLANCE', 'ROUND', 'THE', 'APARTMENT', 'HAD', 'LOST', 'HIMSELF', 'IN', 'ITS', 'PAGES'] +6128-63240-0001-504: ref=['THAT', 'HAS', 'AN', 'UNFLATTERING', 'SOUND', 'FOR', 'ME', 'SAID', 'THE', 'YOUNG', 'MAN'] +6128-63240-0001-504: hyp=['THAT', 'HAS', 'AN', 'UNFLATTERING', 'SOUND', 'FOR', 'ME', 'SAID', 'THE', 'YOUNG', 'MAN'] +6128-63240-0002-505: ref=['SHE', 'IS', 'WILLING', 'TO', 'RISK', 'THAT'] +6128-63240-0002-505: hyp=['SHE', 'IS', 'WILLING', 'TO', 'RISK', 'THAT'] +6128-63240-0003-506: ref=['JUST', 'AS', 'I', 'AM', 'THE', 'VISITOR', 'INQUIRED', 'PRESENTING', 'HIMSELF', 'WITH', 'RATHER', 'A', 'WORK', 'A', 'DAY', 'ASPECT'] +6128-63240-0003-506: hyp=['JUST', 'AS', 'I', 'AM', 'THE', 'VISITOR', 'INQUIRED', 'PRESENTING', 'HIMSELF', 'WITH', 'RATHER', 'A', 'WORKADAY', 'ASPECT'] +6128-63240-0004-507: ref=['HE', 'WAS', 'TALL', 'AND', 'LEAN', 'AND', 'DRESSED', 'THROUGHOUT', 'IN', 'BLACK', 'HIS', 'SHIRT', 'COLLAR', 'WAS', 'LOW', 'AND', 'WIDE', 'AND', 'THE', 'TRIANGLE', 'OF', 'LINEN', 'A', 'LITTLE', 'CRUMPLED', 'EXHIBITED', 'BY', 'THE', 'OPENING', 'OF', 'HIS', 'WAISTCOAT', 'WAS', 'ADORNED', 'BY', 'A', 'PIN', 'CONTAINING', 'A', 'SMALL', 'RED', 'STONE'] +6128-63240-0004-507: hyp=['HE', 'WAS', 'TALL', 'AND', 'LEAN', 'AND', 'DRESSED', 'THROUGHOUT', 'IN', 'BLACK', 'HIS', 'SHIRT', 'COLLAR', 'WAS', 'LOW', 'AND', 'WIDE', 'AND', 'THE', 'TRIANGLE', 'OF', 'LINEN', 'A', 'LITTLE', 'CRUMPLED', 'EXHIBITED', 'BY', 'THE', 'OPENING', 'OF', 'HIS', 'WAISTCOAT', 'WAS', 'ADORNED', 'BY', 'A', 'PIN', 'CONTAINING', 'A', 'SMALL', 'RED', 'STONE'] +6128-63240-0005-508: ref=['IN', 'SPITE', 'OF', 'THIS', 'DECORATION', 'THE', 'YOUNG', 'MAN', 'LOOKED', 'POOR', 'AS', 'POOR', 'AS', 'A', 'YOUNG', 'MAN', 'COULD', 'LOOK', 'WHO', 'HAD', 'SUCH', 'A', 'FINE', 'HEAD', 'AND', 'SUCH', 'MAGNIFICENT', 'EYES'] +6128-63240-0005-508: hyp=['IN', 'SPITE', 'OF', 'THIS', 'DECORATION', 'THE', 'YOUNG', 'MAN', 'LOOKED', 'POOR', 'AS', 'FAR', 'AS', 'A', 'YOUNG', 'MAN', 'COULD', 'LIVE', 'WHO', 'HAD', 'SUCH', 'A', 'FINE', 'HEAD', 'AND', 'SUCH', 'MAGNIFICENT', 'EYES'] +6128-63240-0006-509: ref=['THOSE', 'OF', 'BASIL', 'RANSOM', 'WERE', 'DARK', 'DEEP', 'AND', 'GLOWING', 'HIS', 'HEAD', 'HAD', 'A', 'CHARACTER', 'OF', 'ELEVATION', 'WHICH', 'FAIRLY', 'ADDED', 'TO', 'HIS', 'STATURE', 'IT', 'WAS', 'A', 'HEAD', 'TO', 'BE', 'SEEN', 'ABOVE', 'THE', 'LEVEL', 'OF', 'A', 'CROWD', 'ON', 'SOME', 'JUDICIAL', 'BENCH', 'OR', 'POLITICAL', 'PLATFORM', 'OR', 'EVEN', 'ON', 'A', 'BRONZE', 'MEDAL'] +6128-63240-0006-509: hyp=['THOSE', 'OF', 'BAZA', 'RANSOM', 'WENT', 'DARK', 'DEEP', 'AND', 'GLOWING', 'HIS', 'HEAD', 'HAD', 'A', 'CHARACTER', 'OF', 'ELEVATION', 'WHICH', 'FAIRLY', 'ADDED', 'TO', 'HIS', 'STATUE', 'IT', 'WAS', 'A', 'HEAD', 'TO', 'BE', 'SEEN', 'ABOVE', 'THE', 'LEVEL', 'OF', 'A', 'CROWD', 'ON', 'SOME', 'JUDICIAL', 'BENCH', 'OR', 'POLITICAL', 'PLATFORM', 'OR', 'EVEN', 'ON', 'A', 'BRONZE', 'MEDAL'] +6128-63240-0007-510: ref=['THESE', 'THINGS', 'THE', 'EYES', 'ESPECIALLY', 'WITH', 'THEIR', 'SMOULDERING', 'FIRE', 'MIGHT', 'HAVE', 'INDICATED', 'THAT', 'HE', 'WAS', 'TO', 'BE', 'A', 'GREAT', 'AMERICAN', 'STATESMAN', 'OR', 'ON', 'THE', 'OTHER', 'HAND', 'THEY', 'MIGHT', 'SIMPLY', 'HAVE', 'PROVED', 'THAT', 'HE', 'CAME', 'FROM', 'CAROLINA', 'OR', 'ALABAMA'] +6128-63240-0007-510: hyp=['THESE', 'THINGS', 'THE', 'EYES', 'ESPECIALLY', 'WITH', 'THEIR', 'SMOULDERING', 'FIRE', 'MIGHT', 'HAVE', 'INDICATED', 'THAT', 'HE', 'WAS', 'TO', 'BE', 'A', 'GREAT', 'AMERICAN', 'STATESMAN', 'OR', 'ON', 'THE', 'OTHER', 'HAND', 'THEY', 'MIGHT', 'SIMPLY', 'HAVE', 'PROVED', 'THAT', 'HE', 'CAME', 'FROM', 'CAROLINA', 'OR', 'ALABAMA'] +6128-63240-0008-511: ref=['AND', 'YET', 'THE', 'READER', 'WHO', 'LIKES', 'A', 'COMPLETE', 'IMAGE', 'WHO', 'DESIRES', 'TO', 'READ', 'WITH', 'THE', 'SENSES', 'AS', 'WELL', 'AS', 'WITH', 'THE', 'REASON', 'IS', 'ENTREATED', 'NOT', 'TO', 'FORGET', 'THAT', 'HE', 'PROLONGED', 'HIS', 'CONSONANTS', 'AND', 'SWALLOWED', 'HIS', 'VOWELS', 'THAT', 'HE', 'WAS', 'GUILTY', 'OF', 'ELISIONS', 'AND', 'INTERPOLATIONS', 'WHICH', 'WERE', 'EQUALLY', 'UNEXPECTED', 'AND', 'THAT', 'HIS', 'DISCOURSE', 'WAS', 'PERVADED', 'BY', 'SOMETHING', 'SULTRY', 'AND', 'VAST', 'SOMETHING', 'ALMOST', 'AFRICAN', 'IN', 'ITS', 'RICH', 'BASKING', 'TONE', 'SOMETHING', 'THAT', 'SUGGESTED', 'THE', 'TEEMING', 'EXPANSE', 'OF', 'THE', 'COTTON', 'FIELD'] +6128-63240-0008-511: hyp=['AND', 'YET', 'THE', 'READER', 'WHO', 'LIKES', 'A', 'COMPLETE', 'IMAGE', 'WHO', 'DESIRES', 'TO', 'READ', 'WITH', 'THE', 'SENSES', 'AS', 'WELL', 'AS', 'WITH', 'THE', 'REASON', 'IS', 'ENTREATED', 'NOT', 'TO', 'FORGET', 'THAT', 'HE', 'PROLONGED', 'HIS', 'COUNTENANCE', 'AND', 'SWALLOWED', 'HIS', 'VOWALS', 'THAT', 'HE', 'WAS', 'GUILTY', 'OF', 'ELYGIANS', 'AND', 'INTERPOLATIONS', 'WHICH', 'WERE', 'EQUALLY', 'UNEXPECTED', 'AND', 'THAT', 'HIS', 'DISCOURSE', 'WAS', 'PERVADED', 'BY', 'SOMETHING', 'SULTRY', 'AND', 'VAST', 'SOMETHING', 'ALMOST', 'AFRICAN', 'IN', 'ITS', 'RICH', 'BASKING', 'TONE', 'SOMETHING', 'THAT', 'SUGGESTED', 'THE', 'TEEMING', 'EXPOUNDS', 'OF', 'THE', 'COTTON', 'FIELD'] +6128-63240-0009-512: ref=['AND', 'HE', 'TOOK', 'UP', 'HIS', 'HAT', 'VAGUELY', 'A', 'SOFT', 'BLACK', 'HAT', 'WITH', 'A', 'LOW', 'CROWN', 'AND', 'AN', 'IMMENSE', 'STRAIGHT', 'BRIM'] +6128-63240-0009-512: hyp=['AND', 'HE', 'TOOK', 'UP', 'HIS', 'HAT', 'VAGUELY', 'A', 'SOFT', 'BLACK', 'HAT', 'WITH', 'A', 'LOW', 'CROWN', 'AND', 'AN', 'IMMENSE', 'STRAIGHT', 'BRIM'] +6128-63240-0010-513: ref=['WELL', 'SO', 'IT', 'IS', 'THEY', 'ARE', 'ALL', 'WITCHES', 'AND', 'WIZARDS', 'MEDIUMS', 'AND', 'SPIRIT', 'RAPPERS', 'AND', 'ROARING', 'RADICALS'] +6128-63240-0010-513: hyp=['WELL', 'SO', 'IT', 'IS', 'THEY', 'ARE', 'ALL', 'WITCHES', 'AND', 'WIZARDS', 'MEDIUMS', 'AND', 'SPIRIT', 'WRAPPERS', 'AND', 'ROWING', 'RADICALS'] +6128-63240-0011-514: ref=['IF', 'YOU', 'ARE', 'GOING', 'TO', 'DINE', 'WITH', 'HER', 'YOU', 'HAD', 'BETTER', 'KNOW', 'IT', 'OH', 'MURDER'] +6128-63240-0011-514: hyp=['IF', 'YOU', 'ARE', 'GOING', 'TO', 'DINE', 'WITH', 'HER', 'YOU', 'HAD', 'BETTER', 'KNOW', 'IT', 'OH', 'MURDER'] +6128-63240-0012-515: ref=['HE', 'LOOKED', 'AT', 'MISSUS', 'LUNA', 'WITH', 'INTELLIGENT', 'INCREDULITY'] +6128-63240-0012-515: hyp=['HE', 'LIFTED', 'MISSUS', 'LUNA', 'WITH', 'INTELLIGENT', 'INCREDULITY'] +6128-63240-0013-516: ref=['SHE', 'WAS', 'ATTRACTIVE', 'AND', 'IMPERTINENT', 'ESPECIALLY', 'THE', 'LATTER'] +6128-63240-0013-516: hyp=['SHE', 'WAS', 'ATTRACTIVE', 'AND', 'IMPERTINENT', 'ESPECIALLY', 'THE', 'LATTER'] +6128-63240-0014-517: ref=['HAVE', 'YOU', 'BEEN', 'IN', 'EUROPE'] +6128-63240-0014-517: hyp=['HAVE', 'YOU', 'BEEN', 'IN', 'EUROPE'] +6128-63240-0015-518: ref=['NO', 'I', "HAVEN'T", 'BEEN', 'ANYWHERE'] +6128-63240-0015-518: hyp=['NO', 'I', "HAVEN'T", 'BEEN', 'ANYWHERE'] +6128-63240-0016-519: ref=['SHE', 'HATES', 'IT', 'SHE', 'WOULD', 'LIKE', 'TO', 'ABOLISH', 'IT'] +6128-63240-0016-519: hyp=['SHE', 'HATES', 'IT', 'SHE', 'WOULD', 'LIKE', 'TO', 'ABOLISH', 'IT'] +6128-63240-0017-520: ref=['THIS', 'LAST', 'REMARK', 'HE', 'MADE', 'AT', 'A', 'VENTURE', 'FOR', 'HE', 'HAD', 'NATURALLY', 'NOT', 'DEVOTED', 'ANY', 'SUPPOSITION', 'WHATEVER', 'TO', 'MISSUS', 'LUNA'] +6128-63240-0017-520: hyp=['THIS', 'LAST', 'REMARK', 'HE', 'MADE', 'AT', 'A', 'VENTURE', 'FOR', 'HE', 'HAD', 'NATURALLY', 'NOT', 'DEVOTED', 'ANY', 'SUPPOSITION', 'WHATEVER', 'TO', 'MISSUS', 'LINA'] +6128-63240-0018-521: ref=['ARE', 'YOU', 'VERY', 'AMBITIOUS', 'YOU', 'LOOK', 'AS', 'IF', 'YOU', 'WERE'] +6128-63240-0018-521: hyp=['ARE', 'YOU', 'VERY', 'AMBITIOUS', 'YOU', 'LOOK', 'AS', 'IF', 'YOU', 'WERE'] +6128-63240-0019-522: ref=['AND', 'MISSUS', 'LUNA', 'ADDED', 'THAT', 'NOW', 'SHE', 'WAS', 'BACK', 'SHE', "DIDN'T", 'KNOW', 'WHAT', 'SHE', 'SHOULD', 'DO'] +6128-63240-0019-522: hyp=['AND', 'MISSUS', 'LENA', 'ADDED', 'THAT', 'NOW', 'SHE', 'WAS', 'BACK', 'SHE', "DIDN'T", 'KNOW', 'WHAT', 'SHE', 'SHOULD', 'DO'] +6128-63240-0020-523: ref=['ONE', "DIDN'T", 'EVEN', 'KNOW', 'WHAT', 'ONE', 'HAD', 'COME', 'BACK', 'FOR'] +6128-63240-0020-523: hyp=['ONE', "DIDN'T", 'EVEN', 'KNOW', 'WHAT', 'ONE', 'HAD', 'COME', 'BACK', 'FOR'] +6128-63240-0021-524: ref=['BESIDES', 'OLIVE', "DIDN'T", 'WANT', 'HER', 'IN', 'BOSTON', 'AND', "DIDN'T", 'GO', 'THROUGH', 'THE', 'FORM', 'OF', 'SAYING', 'SO'] +6128-63240-0021-524: hyp=['BESIDES', 'OLIVE', "DIDN'T", 'WANT', 'HER', 'IN', 'BOSTON', 'AND', "DIDN'T", 'GO', 'THROUGH', 'THE', 'FORM', 'OF', 'SAYING', 'SO'] +6128-63240-0022-525: ref=['THAT', 'WAS', 'ONE', 'COMFORT', 'WITH', 'OLIVE', 'SHE', 'NEVER', 'WENT', 'THROUGH', 'ANY', 'FORMS'] +6128-63240-0022-525: hyp=['THAT', 'WAS', 'ONE', 'COMFORT', 'WITH', 'OLIVE', 'SHE', 'NEVER', 'WON', 'THROUGH', 'ANY', 'FORMS'] +6128-63240-0023-526: ref=['SHE', 'STOOD', 'THERE', 'LOOKING', 'CONSCIOUSLY', 'AND', 'RATHER', 'SERIOUSLY', 'AT', 'MISTER', 'RANSOM', 'A', 'SMILE', 'OF', 'EXCEEDING', 'FAINTNESS', 'PLAYED', 'ABOUT', 'HER', 'LIPS', 'IT', 'WAS', 'JUST', 'PERCEPTIBLE', 'ENOUGH', 'TO', 'LIGHT', 'UP', 'THE', 'NATIVE', 'GRAVITY', 'OF', 'HER', 'FACE'] +6128-63240-0023-526: hyp=['SHE', 'STOOD', 'THERE', 'LOOKING', 'CONSCIOUSLY', 'AND', 'RATHER', 'SERIOUSLY', 'AND', 'MISTER', 'RANSOM', 'A', 'SMILE', 'OF', 'EXCEEDING', 'FAINTNESS', 'PLAYED', 'ABOUT', 'HER', 'LIPS', 'IT', 'WAS', 'JUST', 'PERCEPTIBLE', 'ENOUGH', 'TO', 'LIGHT', 'UP', 'THE', 'NATIVE', 'GRAVITY', 'OF', 'HER', 'FACE'] +6128-63240-0024-527: ref=['HER', 'VOICE', 'WAS', 'LOW', 'AND', 'AGREEABLE', 'A', 'CULTIVATED', 'VOICE', 'AND', 'SHE', 'EXTENDED', 'A', 'SLENDER', 'WHITE', 'HAND', 'TO', 'HER', 'VISITOR', 'WHO', 'REMARKED', 'WITH', 'SOME', 'SOLEMNITY', 'HE', 'FELT', 'A', 'CERTAIN', 'GUILT', 'OF', 'PARTICIPATION', 'IN', 'MISSUS', "LUNA'S", 'INDISCRETION', 'THAT', 'HE', 'WAS', 'INTENSELY', 'HAPPY', 'TO', 'MAKE', 'HER', 'ACQUAINTANCE'] +6128-63240-0024-527: hyp=['HER', 'VOICE', 'WAS', 'LOW', 'AND', 'AGREEABLE', 'A', 'CULTIVATED', 'VOICE', 'AND', 'SHE', 'EXTENDED', 'A', 'SLENDER', 'WHITE', 'HAND', 'TO', 'HER', 'VISITOR', 'HER', 'REMARKED', 'WITH', 'SOME', 'SOLEMNITY', 'HE', 'FELT', 'A', 'CERTAIN', 'GUILT', 'OF', 'PARTICIPATION', 'IN', 'MISSUS', "LUNAR'S", 'INDISCRETION', 'THAT', 'HE', 'WAS', 'INTENSELY', 'HAPPY', 'TO', 'MAKE', 'HER', 'ACQUAINTANCE'] +6128-63240-0025-528: ref=['HE', 'OBSERVED', 'THAT', 'MISS', "CHANCELLOR'S", 'HAND', 'WAS', 'AT', 'ONCE', 'COLD', 'AND', 'LIMP', 'SHE', 'MERELY', 'PLACED', 'IT', 'IN', 'HIS', 'WITHOUT', 'EXERTING', 'THE', 'SMALLEST', 'PRESSURE'] +6128-63240-0025-528: hyp=['HE', 'OBSERVED', 'THAT', 'MISS', "CHANCELLOR'S", 'HAND', 'WAS', 'AT', 'ONCE', 'CALLED', 'AND', 'LIMP', 'SHE', 'MERELY', 'PLACED', 'IT', 'IN', 'HIS', 'WITHOUT', 'EXERTING', 'THE', 'SMALLEST', 'PRESSURE'] +6128-63240-0026-529: ref=['I', 'SHALL', 'BE', 'BACK', 'VERY', 'LATE', 'WE', 'ARE', 'GOING', 'TO', 'A', 'THEATRE', 'PARTY', "THAT'S", 'WHY', 'WE', 'DINE', 'SO', 'EARLY'] +6128-63240-0026-529: hyp=['I', 'SHALL', 'BE', 'BACK', 'VERY', 'LATE', 'WILL', "DON'T", 'YOU', 'THEATER', 'PARTY', "THAT'S", 'WHY', 'WE', 'DINE', 'SO', 'EARLY'] +6128-63240-0027-530: ref=['MISSUS', "LUNA'S", 'FAMILIARITY', 'EXTENDED', 'EVEN', 'TO', 'HER', 'SISTER', 'SHE', 'REMARKED', 'TO', 'MISS', 'CHANCELLOR', 'THAT', 'SHE', 'LOOKED', 'AS', 'IF', 'SHE', 'WERE', 'GOT', 'UP', 'FOR', 'A', 'SEA', 'VOYAGE'] +6128-63240-0027-530: hyp=['MISSUS', "LUNDY'S", 'FAMILIARITY', 'EXTENDED', 'EVEN', 'TO', 'HER', 'SISTER', 'SHE', 'REMARKED', 'TO', 'MISS', 'CHANCELLOR', 'THAT', 'SHE', 'LOOKED', 'AS', 'IF', 'SHE', 'WERE', 'GOT', 'UP', 'FOR', 'A', 'SEA', 'VOYAGE'] +6128-63241-0000-557: ref=['POOR', 'RANSOM', 'ANNOUNCED', 'THIS', 'FACT', 'TO', 'HIMSELF', 'AS', 'IF', 'HE', 'HAD', 'MADE', 'A', 'GREAT', 'DISCOVERY', 'BUT', 'IN', 'REALITY', 'HE', 'HAD', 'NEVER', 'BEEN', 'SO', 'BOEOTIAN', 'AS', 'AT', 'THAT', 'MOMENT'] +6128-63241-0000-557: hyp=['POOR', 'RANSOM', 'ANNOUNCED', 'THIS', 'FACT', 'TO', 'HIMSELF', 'AS', 'IF', 'HE', 'HAD', 'MADE', 'A', 'GREAT', 'DISCOVERY', 'BUT', 'IN', 'REALITY', 'HE', 'HAD', 'NEVER', 'BEEN', 'SO', 'BE', 'OTIAN', 'AS', 'AT', 'THAT', 'MOMENT'] +6128-63241-0001-558: ref=['THE', 'WOMEN', 'HE', 'HAD', 'HITHERTO', 'KNOWN', 'HAD', 'BEEN', 'MAINLY', 'OF', 'HIS', 'OWN', 'SOFT', 'CLIME', 'AND', 'IT', 'WAS', 'NOT', 'OFTEN', 'THEY', 'EXHIBITED', 'THE', 'TENDENCY', 'HE', 'DETECTED', 'AND', 'CURSORILY', 'DEPLORED', 'IN', 'MISSUS', "LUNA'S", 'SISTER'] +6128-63241-0001-558: hyp=['THE', 'WOMEN', 'HE', 'HAD', 'HITHERTO', 'KNOWN', 'HAD', 'BEEN', 'MAINLY', 'OF', 'HIS', 'OWN', 'SOFT', 'CLIMB', 'AND', 'IT', 'WAS', 'NOT', 'OFTEN', 'THEY', 'EXHIBITED', 'THE', 'TENDENCY', 'HE', 'DETECTED', 'AND', 'CURSORY', 'DEPLORED', 'IN', 'MISSUS', "LUNA'S", 'SISTER'] +6128-63241-0002-559: ref=['RANSOM', 'WAS', 'PLEASED', 'WITH', 'THE', 'VISION', 'OF', 'THAT', 'REMEDY', 'IT', 'MUST', 'BE', 'REPEATED', 'THAT', 'HE', 'WAS', 'VERY', 'PROVINCIAL'] +6128-63241-0002-559: hyp=['RANSOM', 'WAS', 'PLEASED', 'WITH', 'THE', 'VISION', 'OF', 'THAT', 'REMEDY', 'IT', 'MUST', 'BE', 'REPEATED', 'THAT', 'HE', 'WAS', 'VERY', 'PROVINCIAL'] +6128-63241-0003-560: ref=['HE', 'WAS', 'SORRY', 'FOR', 'HER', 'BUT', 'HE', 'SAW', 'IN', 'A', 'FLASH', 'THAT', 'NO', 'ONE', 'COULD', 'HELP', 'HER', 'THAT', 'WAS', 'WHAT', 'MADE', 'HER', 'TRAGIC'] +6128-63241-0003-560: hyp=['HE', 'WAS', 'SORRY', 'FOR', 'HER', 'BUT', 'HE', 'SAW', 'IN', 'A', 'FLASH', 'THAT', 'NO', 'ONE', 'COULD', 'HELP', 'HER', 'THAT', 'WAS', 'WHAT', 'MADE', 'HER', 'TRAGIC'] +6128-63241-0004-561: ref=['SHE', 'COULD', 'NOT', 'DEFEND', 'HERSELF', 'AGAINST', 'A', 'RICH', 'ADMIRATION', 'A', 'KIND', 'OF', 'TENDERNESS', 'OF', 'ENVY', 'OF', 'ANY', 'ONE', 'WHO', 'HAD', 'BEEN', 'SO', 'HAPPY', 'AS', 'TO', 'HAVE', 'THAT', 'OPPORTUNITY'] +6128-63241-0004-561: hyp=['SHE', 'COULD', 'NOT', 'DEFEND', 'HERSELF', 'AGAINST', 'A', 'RICH', 'ADMIRATION', 'A', 'KIND', 'OF', 'TENDERNESS', 'OF', 'ENVY', 'OF', 'ANY', 'ONE', 'WHO', 'HAD', 'BEEN', 'SO', 'HAPPY', 'AS', 'TO', 'HAVE', 'THAT', 'OPPORTUNITY'] +6128-63241-0005-562: ref=['HIS', 'FAMILY', 'WAS', 'RUINED', 'THEY', 'HAD', 'LOST', 'THEIR', 'SLAVES', 'THEIR', 'PROPERTY', 'THEIR', 'FRIENDS', 'AND', 'RELATIONS', 'THEIR', 'HOME', 'HAD', 'TASTED', 'OF', 'ALL', 'THE', 'CRUELTY', 'OF', 'DEFEAT'] +6128-63241-0005-562: hyp=['HIS', 'FAMILY', 'WAS', 'RUINED', 'THEY', 'HAD', 'LOST', 'THEIR', 'SLAVES', 'THEIR', 'PROPERTY', 'THEIR', 'FRIENDS', 'AND', 'RELATIONS', 'THEIR', 'HOME', 'HAD', 'TASTED', 'OF', 'ALL', 'THE', 'CRUELTY', 'OF', 'DEFEAT'] +6128-63241-0006-563: ref=['THE', 'STATE', 'OF', 'MISSISSIPPI', 'SEEMED', 'TO', 'HIM', 'THE', 'STATE', 'OF', 'DESPAIR', 'SO', 'HE', 'SURRENDERED', 'THE', 'REMNANTS', 'OF', 'HIS', 'PATRIMONY', 'TO', 'HIS', 'MOTHER', 'AND', 'SISTERS', 'AND', 'AT', 'NEARLY', 'THIRTY', 'YEARS', 'OF', 'AGE', 'ALIGHTED', 'FOR', 'THE', 'FIRST', 'TIME', 'IN', 'NEW', 'YORK', 'IN', 'THE', 'COSTUME', 'OF', 'HIS', 'PROVINCE', 'WITH', 'FIFTY', 'DOLLARS', 'IN', 'HIS', 'POCKET', 'AND', 'A', 'GNAWING', 'HUNGER', 'IN', 'HIS', 'HEART'] +6128-63241-0006-563: hyp=['THE', 'STATE', 'OF', 'MISSISSIPPI', 'SEEMED', 'TO', 'HIM', 'THE', 'STATE', 'OF', 'DESPAIR', 'SO', 'HIS', 'SURRENDERED', 'THE', 'REMNANTS', 'OF', 'HIS', 'PATRIMONY', 'TO', 'HIS', 'MOTHER', 'AND', 'SISTERS', 'AND', 'AT', 'NEARLY', 'THIRTY', 'YEARS', 'OF', 'AGE', 'ALIGHTED', 'FOR', 'THE', 'FIRST', 'TIME', 'IN', 'NEW', 'YORK', 'IN', 'THE', 'COSTUME', 'OF', 'HIS', 'PROVINCE', 'WITH', 'FIFTY', 'DOLLARS', 'IN', 'HIS', 'POCKET', 'AND', 'ENNARING', 'HUNGER', 'IN', 'HIS', 'HEART'] +6128-63241-0007-564: ref=['IT', 'WAS', 'IN', 'THE', 'FEMALE', 'LINE', 'AS', 'BASIL', 'RANSOM', 'HAD', 'WRITTEN', 'IN', 'ANSWERING', 'HER', 'LETTER', 'WITH', 'A', 'GOOD', 'DEAL', 'OF', 'FORM', 'AND', 'FLOURISH', 'HE', 'SPOKE', 'AS', 'IF', 'THEY', 'HAD', 'BEEN', 'ROYAL', 'HOUSES'] +6128-63241-0007-564: hyp=['IT', 'WAS', 'IN', 'THE', 'FEMALE', 'LINE', 'AS', 'BALES', 'AT', 'RANSOM', 'HAD', 'WRITTEN', 'IN', 'ANSWERING', 'HER', 'LETTER', 'WITH', 'A', 'GOOD', 'DEAL', 'OF', 'FORM', 'AND', 'FLOURISH', 'HE', 'SPOKE', 'AS', 'IF', 'THEY', 'HAD', 'BEEN', 'ROYAL', 'HOUSES'] +6128-63241-0008-565: ref=['IF', 'IT', 'HAD', 'BEEN', 'POSSIBLE', 'TO', 'SEND', 'MISSUS', 'RANSOM', 'MONEY', 'OR', 'EVEN', 'CLOTHES', 'SHE', 'WOULD', 'HAVE', 'LIKED', 'THAT', 'BUT', 'SHE', 'HAD', 'NO', 'MEANS', 'OF', 'ASCERTAINING', 'HOW', 'SUCH', 'AN', 'OFFERING', 'WOULD', 'BE', 'TAKEN'] +6128-63241-0008-565: hyp=['IF', 'IT', 'HAD', 'BEEN', 'POSSIBLE', 'TO', 'SEND', 'MISSUS', 'RANDOM', 'MONEY', 'OR', 'EVEN', 'CLOTHES', 'SHE', 'WOULD', 'HAVE', 'LIKED', 'THAT', 'BUT', 'SHE', 'HAD', 'NO', 'MEANS', 'OF', 'ASCERTAINING', 'HER', 'SUCH', 'AN', 'OFFERING', 'WOULD', 'BE', 'TAKEN'] +6128-63241-0009-566: ref=['OLIVE', 'HAD', 'A', 'FEAR', 'OF', 'EVERYTHING', 'BUT', 'HER', 'GREATEST', 'FEAR', 'WAS', 'OF', 'BEING', 'AFRAID'] +6128-63241-0009-566: hyp=['OLIV', 'HAD', 'A', 'FEAR', 'OF', 'EVERYTHING', 'BUT', 'HER', 'GREATEST', 'FEAR', 'WAS', 'OF', 'BEING', 'AFRAID'] +6128-63241-0010-567: ref=['SHE', 'HAD', 'ERECTED', 'IT', 'INTO', 'A', 'SORT', 'OF', 'RULE', 'OF', 'CONDUCT', 'THAT', 'WHENEVER', 'SHE', 'SAW', 'A', 'RISK', 'SHE', 'WAS', 'TO', 'TAKE', 'IT', 'AND', 'SHE', 'HAD', 'FREQUENT', 'HUMILIATIONS', 'AT', 'FINDING', 'HERSELF', 'SAFE', 'AFTER', 'ALL'] +6128-63241-0010-567: hyp=['SHE', 'HAD', 'ERECTED', 'IT', 'INTO', 'A', 'SORT', 'OF', 'RULE', 'OF', 'CONDUCT', 'THAT', 'WHENEVER', 'SHE', 'SAW', 'A', 'RISK', 'SHE', 'WAS', 'TO', 'TAKE', 'IT', 'AND', 'SHE', 'HAD', 'FREQUENT', 'HUMILIATIONS', 'AT', 'FINDING', 'HERSELF', 'SAFE', 'AFTER', 'ALL'] +6128-63241-0011-568: ref=['SHE', 'WAS', 'PERFECTLY', 'SAFE', 'AFTER', 'WRITING', 'TO', 'BASIL', 'RANSOM', 'AND', 'INDEED', 'IT', 'WAS', 'DIFFICULT', 'TO', 'SEE', 'WHAT', 'HE', 'COULD', 'HAVE', 'DONE', 'TO', 'HER', 'EXCEPT', 'THANK', 'HER', 'HE', 'WAS', 'ONLY', 'EXCEPTIONALLY', 'SUPERLATIVE', 'FOR', 'HER', 'LETTER', 'AND', 'ASSURE', 'HER', 'THAT', 'HE', 'WOULD', 'COME', 'AND', 'SEE', 'HER', 'THE', 'FIRST', 'TIME', 'HIS', 'BUSINESS', 'HE', 'WAS', 'BEGINNING', 'TO', 'GET', 'A', 'LITTLE', 'SHOULD', 'TAKE', 'HIM', 'TO', 'BOSTON'] +6128-63241-0011-568: hyp=['SHE', 'WAS', 'PERFECTLY', 'SAFE', 'AFTER', 'WRITING', 'TO', 'BASE', 'OR', 'RANSOM', 'AND', 'INDEED', 'IT', 'WAS', 'DIFFICULT', 'TO', 'SEE', 'WHAT', 'HE', 'COULD', 'HAVE', 'DONE', 'TO', 'HER', 'EXCEPT', 'THANK', 'HER', 'HE', 'WAS', 'ONLY', 'EXCEPTIONALLY', 'SUPERLATIVE', 'FOR', 'HER', 'LETTER', 'AND', 'ASSURE', 'HER', 'THAT', 'HE', 'WOULD', 'COME', 'AND', 'SEE', 'HER', 'THE', 'FIRST', 'TIME', 'HIS', 'BUSINESS', 'HE', 'WAS', 'BEGINNING', 'TO', 'GET', 'A', 'LITTLE', 'SHOULD', 'TAKE', 'HIM', 'TO', 'BOSTON'] +6128-63241-0012-569: ref=['HE', 'WAS', 'TOO', 'SIMPLE', 'TOO', 'MISSISSIPPIAN', 'FOR', 'THAT', 'SHE', 'WAS', 'ALMOST', 'DISAPPOINTED'] +6128-63241-0012-569: hyp=['HE', 'WAS', 'TOO', 'SIMPLE', 'TOO', 'MISSISSIPPIAN', 'FOR', 'THAT', 'SHE', 'WAS', 'ALMOST', 'DISAPPOINTED'] +6128-63241-0013-570: ref=['OF', 'ALL', 'THINGS', 'IN', 'THE', 'WORLD', 'CONTENTION', 'WAS', 'MOST', 'SWEET', 'TO', 'HER', 'THOUGH', 'WHY', 'IT', 'IS', 'HARD', 'TO', 'IMAGINE', 'FOR', 'IT', 'ALWAYS', 'COST', 'HER', 'TEARS', 'HEADACHES', 'A', 'DAY', 'OR', 'TWO', 'IN', 'BED', 'ACUTE', 'EMOTION', 'AND', 'IT', 'WAS', 'VERY', 'POSSIBLE', 'BASIL', 'RANSOM', 'WOULD', 'NOT', 'CARE', 'TO', 'CONTEND'] +6128-63241-0013-570: hyp=['OF', 'ALL', 'THINGS', 'IN', 'THE', 'WORLD', 'CONTENTION', 'WAS', 'MOST', 'SWEET', 'TO', 'HER', 'THE', 'WHY', 'IT', 'IS', 'HARD', 'TO', 'IMAGINE', 'FOR', 'IT', 'ALWAYS', 'COST', 'HER', 'TEARS', 'HEADACHES', 'A', 'DAY', 'OR', 'TWO', 'IN', 'BED', 'ACUTE', 'EMOTION', 'AND', 'IT', 'WAS', 'VERY', 'POSSIBLE', 'BEESER', 'RANSOM', 'WOULD', 'NOT', 'CARE', 'TO', 'COMPEND'] +6128-63244-0000-531: ref=['MISS', 'CHANCELLOR', 'HERSELF', 'HAD', 'THOUGHT', 'SO', 'MUCH', 'ON', 'THE', 'VITAL', 'SUBJECT', 'WOULD', 'NOT', 'SHE', 'MAKE', 'A', 'FEW', 'REMARKS', 'AND', 'GIVE', 'THEM', 'SOME', 'OF', 'HER', 'EXPERIENCES'] +6128-63244-0000-531: hyp=['MISS', 'CHANCELLOR', 'HERSELF', 'HAD', 'THOUGHT', 'SO', 'MUCH', 'ON', 'THE', 'VITAL', 'SUBJECT', 'WOULD', 'NOT', 'SHE', 'MAKE', 'A', 'FEW', 'REMARKS', 'AND', 'GIVE', 'THEM', 'SOME', 'OF', 'HER', 'EXPERIENCES'] +6128-63244-0001-532: ref=['HOW', 'DID', 'THE', 'LADIES', 'ON', 'BEACON', 'STREET', 'FEEL', 'ABOUT', 'THE', 'BALLOT'] +6128-63244-0001-532: hyp=['HOW', 'DID', 'THE', 'LADIES', 'AND', 'BEACON', 'STREET', 'FEEL', 'ABOUT', 'THE', 'BALLOTT'] +6128-63244-0002-533: ref=['PERHAPS', 'SHE', 'COULD', 'SPEAK', 'FOR', 'THEM', 'MORE', 'THAN', 'FOR', 'SOME', 'OTHERS'] +6128-63244-0002-533: hyp=['PERHAPS', 'SHE', 'COULD', 'SPEAK', 'FOR', 'THEM', 'MORE', 'THAN', 'FOR', 'SOME', 'OTHERS'] +6128-63244-0003-534: ref=['WITH', 'HER', 'IMMENSE', 'SYMPATHY', 'FOR', 'REFORM', 'SHE', 'FOUND', 'HERSELF', 'SO', 'OFTEN', 'WISHING', 'THAT', 'REFORMERS', 'WERE', 'A', 'LITTLE', 'DIFFERENT'] +6128-63244-0003-534: hyp=['WITH', 'IMMENSE', 'SMATHY', 'FOR', 'REFORM', 'SHE', 'FOUND', 'HERSELF', 'SO', 'OFTEN', 'WISHING', 'THAT', 'REFUSE', 'WERE', 'A', 'LITTLE', 'DIFFERENT'] +6128-63244-0004-535: ref=['OLIVE', 'HATED', 'TO', 'HEAR', 'THAT', 'FINE', 'AVENUE', 'TALKED', 'ABOUT', 'AS', 'IF', 'IT', 'WERE', 'SUCH', 'A', 'REMARKABLE', 'PLACE', 'AND', 'TO', 'LIVE', 'THERE', 'WERE', 'A', 'PROOF', 'OF', 'WORLDLY', 'GLORY'] +6128-63244-0004-535: hyp=['I', 'HAVE', 'HATED', 'TO', 'HEAR', 'THAT', 'FINE', 'AVENUE', 'TALKS', 'ABOUT', 'AS', 'IF', 'IT', 'WAS', 'SUCH', 'A', 'REMARKABLE', 'PLACE', 'AND', 'TO', 'LIVE', 'THERE', 'WHERE', 'A', 'PROOF', 'OF', 'WORLDLY', 'GLORY'] +6128-63244-0005-536: ref=['ALL', 'SORTS', 'OF', 'INFERIOR', 'PEOPLE', 'LIVED', 'THERE', 'AND', 'SO', 'BRILLIANT', 'A', 'WOMAN', 'AS', 'MISSUS', 'FARRINDER', 'WHO', 'LIVED', 'AT', 'ROXBURY', 'OUGHT', 'NOT', 'TO', 'MIX', 'THINGS', 'UP'] +6128-63244-0005-536: hyp=['ALL', 'SORTS', 'OF', 'INFERIOR', 'PEOPLE', 'IF', 'THERE', 'AND', 'SO', 'BRILLIANT', 'A', 'WOMAN', 'AS', 'MISSUS', 'FARINGDER', 'WHO', 'LIVED', 'AT', 'BRAXBURY', 'OUGHT', 'NOT', 'TO', 'MAKE', 'SPACE', 'UP'] +6128-63244-0006-537: ref=['SHE', 'KNEW', 'HER', 'PLACE', 'IN', 'THE', 'BOSTON', 'HIERARCHY', 'AND', 'IT', 'WAS', 'NOT', 'WHAT', 'MISSUS', 'FARRINDER', 'SUPPOSED', 'SO', 'THAT', 'THERE', 'WAS', 'A', 'WANT', 'OF', 'PERSPECTIVE', 'IN', 'TALKING', 'TO', 'HER', 'AS', 'IF', 'SHE', 'HAD', 'BEEN', 'A', 'REPRESENTATIVE', 'OF', 'THE', 'ARISTOCRACY'] +6128-63244-0006-537: hyp=['SHE', 'KNEW', 'HER', 'PLACE', 'IN', 'THE', 'BOSTON', 'HIRAKEE', 'AND', 'IT', 'WAS', 'NOT', 'WHAT', 'MISSUS', 'BARRING', 'JUST', 'SUPPOSED', 'SO', 'THAT', 'THERE', 'WAS', 'A', 'WANT', 'OF', 'PERSPECTIVE', 'IN', 'TALKING', 'TO', 'HER', 'AS', 'IF', 'SHE', 'HAD', 'BEEN', 'UNREPRESENTATIVE', 'OF', 'THE', 'ARISTOCRACY'] +6128-63244-0007-538: ref=['SHE', 'WISHED', 'TO', 'WORK', 'IN', 'ANOTHER', 'FIELD', 'SHE', 'HAD', 'LONG', 'BEEN', 'PREOCCUPIED', 'WITH', 'THE', 'ROMANCE', 'OF', 'THE', 'PEOPLE'] +6128-63244-0007-538: hyp=['SHE', 'WISHED', 'TO', 'WORK', 'IN', 'ANOTHER', 'FIELD', 'SHE', 'HAD', 'LONG', 'BEEN', 'PREOCCUPIED', 'WITH', 'THE', 'ROMANCE', 'OF', 'THE', 'PEOPLE'] +6128-63244-0008-539: ref=['THIS', 'MIGHT', 'SEEM', 'ONE', 'OF', 'THE', 'MOST', 'ACCESSIBLE', 'OF', 'PLEASURES', 'BUT', 'IN', 'POINT', 'OF', 'FACT', 'SHE', 'HAD', 'NOT', 'FOUND', 'IT', 'SO'] +6128-63244-0008-539: hyp=['THIS', 'MIGHT', 'SEEM', 'ONE', 'OF', 'THE', 'MOST', 'ACCESSIBLE', 'OF', 'PLEASURES', 'BUT', 'IN', 'POINT', 'OF', 'FACT', 'SHE', 'HAD', 'NOT', 'FOUND', 'IT', 'SO'] +6128-63244-0009-540: ref=['CHARLIE', 'WAS', 'A', 'YOUNG', 'MAN', 'IN', 'A', 'WHITE', 'OVERCOAT', 'AND', 'A', 'PAPER', 'COLLAR', 'IT', 'WAS', 'FOR', 'HIM', 'IN', 'THE', 'LAST', 'ANALYSIS', 'THAT', 'THEY', 'CARED', 'MUCH', 'THE', 'MOST'] +6128-63244-0009-540: hyp=['CHARLIE', 'WAS', 'A', 'YOUNG', 'MAN', 'IN', 'A', 'WORLD', 'OVERCOAT', 'AND', 'A', 'PAPER', 'COLLAR', 'IT', 'WAS', 'FOR', 'HIM', 'IN', 'THE', 'LASTIS', 'THAT', 'THE', 'CARED', 'MUCH', 'THE', 'MOST'] +6128-63244-0010-541: ref=['OLIVE', 'CHANCELLOR', 'WONDERED', 'HOW', 'MISSUS', 'FARRINDER', 'WOULD', 'TREAT', 'THAT', 'BRANCH', 'OF', 'THE', 'QUESTION'] +6128-63244-0010-541: hyp=['OUT', 'OF', 'CHANCELLOR', 'I', 'WONDERED', 'HOW', 'MISSUS', 'THORNDER', 'WOULD', 'TREAT', 'THEIR', 'BRANCH', 'OF', 'THE', 'QUESTION'] +6128-63244-0011-542: ref=['IF', 'IT', 'BE', 'NECESSARY', 'WE', 'ARE', 'PREPARED', 'TO', 'TAKE', 'CERTAIN', 'STEPS', 'TO', 'CONCILIATE', 'THE', 'SHRINKING'] +6128-63244-0011-542: hyp=['IF', 'IT', 'BE', 'NECESSARY', 'WE', 'ARE', 'PREPARED', 'TO', 'TAKE', 'CERTAIN', 'STEPS', 'TO', 'CONCILIATE', 'THE', 'SHRINKING'] +6128-63244-0012-543: ref=['OUR', 'MOVEMENT', 'IS', 'FOR', 'ALL', 'IT', 'APPEALS', 'TO', 'THE', 'MOST', 'DELICATE', 'LADIES'] +6128-63244-0012-543: hyp=['OUR', 'MOVEMENT', 'IS', 'FOR', 'ALL', 'IT', 'APPEALS', 'TO', 'THE', 'MOST', 'DELICATE', 'LADIES'] +6128-63244-0013-544: ref=['RAISE', 'THE', 'STANDARD', 'AMONG', 'THEM', 'AND', 'BRING', 'ME', 'A', 'THOUSAND', 'NAMES'] +6128-63244-0013-544: hyp=['FOR', 'IT', 'IS', 'THE', 'STANDARD', 'AMONG', 'THEM', 'AND', 'BRING', 'ME', 'YOUR', 'THOUSAND', 'NAMES'] +6128-63244-0014-545: ref=['I', 'LOOK', 'AFTER', 'THE', 'DETAILS', 'AS', 'WELL', 'AS', 'THE', 'BIG', 'CURRENTS', 'MISSUS', 'FARRINDER', 'ADDED', 'IN', 'A', 'TONE', 'AS', 'EXPLANATORY', 'AS', 'COULD', 'BE', 'EXPECTED', 'OF', 'SUCH', 'A', 'WOMAN', 'AND', 'WITH', 'A', 'SMILE', 'OF', 'WHICH', 'THE', 'SWEETNESS', 'WAS', 'THRILLING', 'TO', 'HER', 'LISTENER'] +6128-63244-0014-545: hyp=['AND', 'LOOK', 'AFTER', 'THE', 'DETAILS', 'AS', 'WELL', 'AS', 'THE', 'BIG', 'CURRANTS', 'MISSUS', 'FARRENDER', 'ADDED', 'IN', 'A', 'TONE', 'AS', 'EXPLANATORY', 'AS', 'COULD', 'BE', 'EXPECTED', 'OF', 'SUCH', 'A', 'WOMAN', 'AND', 'WITH', 'A', 'SMILE', 'OF', 'WHICH', 'THIS', 'SWEETNESS', 'WAS', 'THRILLING', 'TO', 'HER', 'LISTENER'] +6128-63244-0015-546: ref=['SAID', 'OLIVE', 'CHANCELLOR', 'WITH', 'A', 'FACE', 'WHICH', 'SEEMED', 'TO', 'PLEAD', 'FOR', 'A', 'REMISSION', 'OF', 'RESPONSIBILITY'] +6128-63244-0015-546: hyp=['SAID', 'OLDEST', 'CHANCELLOR', 'WITH', 'A', 'FACE', 'WHICH', 'SEEMED', 'TO', 'PLEAD', 'FOR', 'A', 'REMISSIONOUS', 'RESPONSIBILITY'] +6128-63244-0016-547: ref=['I', 'WANT', 'TO', 'BE', 'NEAR', 'TO', 'THEM', 'TO', 'HELP', 'THEM'] +6128-63244-0016-547: hyp=['I', 'WARNED', 'TO', 'BE', 'NEAR', 'TO', 'THEM', 'TO', 'HELP', 'THEM'] +6128-63244-0017-548: ref=['IT', 'WAS', 'ONE', 'THING', 'TO', 'CHOOSE', 'FOR', 'HERSELF', 'BUT', 'NOW', 'THE', 'GREAT', 'REPRESENTATIVE', 'OF', 'THE', 'ENFRANCHISEMENT', 'OF', 'THEIR', 'SEX', 'FROM', 'EVERY', 'FORM', 'OF', 'BONDAGE', 'HAD', 'CHOSEN', 'FOR', 'HER'] +6128-63244-0017-548: hyp=['IT', 'WAS', 'ONE', 'THING', 'TO', 'CHOOSE', 'FOR', 'HERSELF', 'BUT', 'NOW', 'THE', 'GREAT', 'REPRESENTATIVE', 'OF', 'THE', 'ENCOMCHISEMENT', 'OF', 'THEIR', 'SEX', 'FROM', 'EVERY', 'FORM', 'OF', 'BONDAGE', 'HAD', 'CHOSEN', 'FOR', 'HER'] +6128-63244-0018-549: ref=['THE', 'UNHAPPINESS', 'OF', 'WOMEN'] +6128-63244-0018-549: hyp=['THE', 'UNHAPPINESS', 'OF', 'WOMEN'] +6128-63244-0019-550: ref=['THEY', 'WERE', 'HER', 'SISTERS', 'THEY', 'WERE', 'HER', 'OWN', 'AND', 'THE', 'DAY', 'OF', 'THEIR', 'DELIVERY', 'HAD', 'DAWNED'] +6128-63244-0019-550: hyp=['THEY', 'WERE', 'HIS', 'SISTERS', 'THEY', 'WERE', 'HER', 'OWN', 'AND', 'THE', 'DAY', 'OF', 'THEIR', 'DELIVERY', 'HAD', 'DAWNED'] +6128-63244-0020-551: ref=['THIS', 'WAS', 'THE', 'ONLY', 'SACRED', 'CAUSE', 'THIS', 'WAS', 'THE', 'GREAT', 'THE', 'JUST', 'REVOLUTION', 'IT', 'MUST', 'TRIUMPH', 'IT', 'MUST', 'SWEEP', 'EVERYTHING', 'BEFORE', 'IT', 'IT', 'MUST', 'EXACT', 'FROM', 'THE', 'OTHER', 'THE', 'BRUTAL', 'BLOOD', 'STAINED', 'RAVENING', 'RACE', 'THE', 'LAST', 'PARTICLE', 'OF', 'EXPIATION'] +6128-63244-0020-551: hyp=['THIS', 'WAS', 'THE', 'ONLY', 'SACRED', 'CAUSE', 'THIS', 'WAS', 'THE', 'GREAT', 'DRESSED', 'REVELATION', 'IT', 'WAS', 'TRIUMPH', 'IT', 'WAS', 'SWEEP', 'EVERYTHING', 'BEFORE', 'IT', 'IT', 'MUST', 'EXACT', 'FROM', 'THE', 'OTHER', 'THE', 'BRUTAL', 'BLOODSTAINED', 'RAVENING', 'RACE', 'THE', 'LAST', 'PARTICLE', 'OF', 'EXPIATION'] +6128-63244-0021-552: ref=['THEY', 'WOULD', 'BE', 'NAMES', 'OF', 'WOMEN', 'WEAK', 'INSULTED', 'PERSECUTED', 'BUT', 'DEVOTED', 'IN', 'EVERY', 'PULSE', 'OF', 'THEIR', 'BEING', 'TO', 'THE', 'CAUSE', 'AND', 'ASKING', 'NO', 'BETTER', 'FATE', 'THAN', 'TO', 'DIE', 'FOR', 'IT'] +6128-63244-0021-552: hyp=['THERE', 'WOULD', 'BE', 'NAMES', 'OF', 'WOMEN', 'WEAK', 'INSULTED', 'PERSECUTED', 'BUT', 'DEVOTED', 'IN', 'EVERY', 'PULSE', 'OF', 'THEIR', 'BEING', 'TO', 'THE', 'CAUSE', 'AND', 'ASKING', 'NO', 'BETTER', 'FATE', 'THAN', 'TO', 'DIE', 'FOR', 'IT'] +6128-63244-0022-553: ref=['IT', 'WAS', 'NOT', 'CLEAR', 'TO', 'THIS', 'INTERESTING', 'GIRL', 'IN', 'WHAT', 'MANNER', 'SUCH', 'A', 'SACRIFICE', 'AS', 'THIS', 'LAST', 'WOULD', 'BE', 'REQUIRED', 'OF', 'HER', 'BUT', 'SHE', 'SAW', 'THE', 'MATTER', 'THROUGH', 'A', 'KIND', 'OF', 'SUNRISE', 'MIST', 'OF', 'EMOTION', 'WHICH', 'MADE', 'DANGER', 'AS', 'ROSY', 'AS', 'SUCCESS'] +6128-63244-0022-553: hyp=['IT', 'WAS', 'NOT', 'CLEAR', 'TO', 'THIS', 'INTERESTING', 'GIRL', 'IN', 'WHAT', 'MANNER', 'SUCH', 'A', 'SACRIFICE', 'OF', 'THIS', 'LAST', 'WOULD', 'BE', 'REQUIRED', 'OF', 'HER', 'BUT', 'SHE', 'SOLDOM', 'MATTER', 'THROUGH', 'A', 'KIND', 'OF', 'SUNRISE', 'MIST', 'OF', 'THE', 'NATION', 'WHICH', 'MADE', 'DANGER', 'AS', 'ROSY', 'IS', 'SUCCESS'] +6128-63244-0023-554: ref=['WHEN', 'MISS', 'BIRDSEYE', 'APPROACHED', 'IT', 'TRANSFIGURED', 'HER', 'FAMILIAR', 'HER', 'COMICAL', 'SHAPE', 'AND', 'MADE', 'THE', 'POOR', 'LITTLE', 'HUMANITARY', 'HACK', 'SEEM', 'ALREADY', 'A', 'MARTYR'] +6128-63244-0023-554: hyp=['WHEN', 'MISS', "BIRD'S", 'EYE', 'APPROACHED', 'IT', 'TRANSFIGURED', 'HER', 'FAMILIAR', 'HYCOMICAL', 'SHAPE', 'AND', 'MADE', 'THE', 'POOR', 'LITTLE', 'HUMANITARY', 'HACK', 'SEEM', 'ALREADY', 'A', 'MARTYR'] +6128-63244-0024-555: ref=['OLIVE', 'CHANCELLOR', 'LOOKED', 'AT', 'HER', 'WITH', 'LOVE', 'REMEMBERED', 'THAT', 'SHE', 'HAD', 'NEVER', 'IN', 'HER', 'LONG', 'UNREWARDED', 'WEARY', 'LIFE', 'HAD', 'A', 'THOUGHT', 'OR', 'AN', 'IMPULSE', 'FOR', 'HERSELF'] +6128-63244-0024-555: hyp=['ONLY', 'IF', 'CHANCELLOR', 'LOOKED', 'AT', 'HER', 'WITH', 'LOVE', 'REMEMBERED', 'THAT', 'SHE', 'HAD', 'NEVER', 'IN', 'HER', 'LONG', 'IN', 'REWARDED', 'WEARY', 'LIFE', 'HAD', 'A', 'THOUGHT', 'OF', 'AN', 'IMPULSE', 'FOR', 'HERSELF'] +6128-63244-0025-556: ref=['SHE', 'HAD', 'BEEN', 'CONSUMED', 'BY', 'THE', 'PASSION', 'OF', 'SYMPATHY', 'IT', 'HAD', 'CRUMPLED', 'HER', 'INTO', 'AS', 'MANY', 'CREASES', 'AS', 'AN', 'OLD', 'GLAZED', 'DISTENDED', 'GLOVE'] +6128-63244-0025-556: hyp=['IF', 'SHE', 'HAD', 'BEEN', 'CONSUMED', 'BY', 'THE', 'PASSION', 'OF', 'SYMPATHY', 'IT', 'HAD', 'CRUMBLED', 'HER', 'INTO', 'AS', 'MANY', 'CREASES', 'AS', 'AN', 'OLD', 'GLAZED', 'DISTENDED', 'GLOVE'] +6432-63722-0000-2431: ref=['BUT', 'SCUSE', 'ME', "DIDN'T", 'YO', 'FIGGER', 'ON', 'DOIN', 'SOME', 'DETECTIN', 'AN', 'GIVE', 'UP', 'FISHIN'] +6432-63722-0000-2431: hyp=['BUTCHUSE', 'ME', 'THEN', "YOU'LL", 'FAGONNE', 'DOING', 'SOME', 'DETECTIVE', 'AND', 'GIVIN', 'UP', 'FISHING'] +6432-63722-0001-2432: ref=['AND', 'SHAG', 'WITH', 'THE', 'FREEDOM', 'OF', 'AN', 'OLD', 'SERVANT', 'STOOD', 'LOOKING', 'AT', 'HIS', 'MASTER', 'AS', 'IF', 'NOT', 'QUITE', 'UNDERSTANDING', 'THE', 'NEW', 'TWIST', 'THE', 'AFFAIRS', 'HAD', 'TAKEN'] +6432-63722-0001-2432: hyp=['AND', 'SHAG', 'WITH', 'THE', 'FREEDOM', 'OF', 'AN', 'OLD', 'SERVANT', 'STOOD', 'LOOKING', 'AT', 'HIS', 'MASTER', 'AS', 'IF', 'NOT', 'QUITE', 'UNDERSTANDING', 'THE', 'NEW', 'TWIST', 'THE', 'AFFAIRS', 'HAD', 'TAKEN'] +6432-63722-0002-2433: ref=["I'M", 'GOING', 'OFF', 'FISHING', 'I', 'MAY', 'NOT', 'CATCH', 'ANYTHING', 'I', 'MAY', 'NOT', 'WANT', 'TO', 'AFTER', 'I', 'GET', 'THERE'] +6432-63722-0002-2433: hyp=["I'M", 'GOIN', 'OUR', 'FISHIN', 'I', 'MAY', 'NOT', 'CATCH', 'ANYTHING', 'AND', 'MAY', 'NOT', 'WANT', 'TO', 'AFTER', 'I', 'GET', 'THERE'] +6432-63722-0003-2434: ref=['GET', 'READY', 'SHAG', 'YES', 'SAH', 'COLONEL'] +6432-63722-0003-2434: hyp=['GET', 'READY', 'SHAGG', 'YES', 'A', 'COLONEL'] +6432-63722-0004-2435: ref=['AND', 'HAVING', 'PUT', 'HIMSELF', 'IN', 'A', 'FAIR', 'WAY', 'AS', 'HE', 'HOPED', 'TO', 'SOLVE', 'SOME', 'OF', 'THE', 'PROBLEMS', 'CONNECTED', 'WITH', 'THE', 'DARCY', 'CASE', 'COLONEL', 'ASHLEY', 'WENT', 'DOWN', 'TO', 'POLICE', 'HEADQUARTERS', 'TO', 'LEARN', 'MORE', 'FACTS', 'IN', 'CONNECTION', 'WITH', 'THE', 'MURDER', 'OF', 'THE', 'EAST', 'INDIAN'] +6432-63722-0004-2435: hyp=['AND', 'HAVING', 'PUT', 'HIMSELF', 'IN', 'A', 'FAIR', 'WAY', 'AS', 'HE', 'HOPED', 'TO', 'SOLVE', 'SOME', 'OF', 'THE', 'PROBLEMS', 'CONNECTED', 'WITH', 'THE', 'DARCY', 'CASE', 'COLONEL', 'ASHLEY', 'WENT', 'DOWN', 'TO', 'POLICE', 'HEADQUARTERS', 'TO', 'LEARN', 'MORE', 'FACTS', 'IN', 'THE', 'CONNECTION', 'WITH', 'THE', 'MURDER', 'OF', 'THE', 'EAST', 'INDIAN'] +6432-63722-0005-2436: ref=['PINKUS', 'AND', 'DONOVAN', "HAVEN'T", 'THEY', 'CARROLL', 'YEP'] +6432-63722-0005-2436: hyp=['PICK', 'US', 'AND', 'DONOVAN', "HAVEN'T", 'THEY', 'CARROLL', 'YEP'] +6432-63722-0006-2437: ref=['CARROLL', 'WAS', 'TOO', 'MUCH', 'ENGAGED', 'IN', 'WATCHING', 'THE', 'BLUE', 'SMOKE', 'CURL', 'LAZILY', 'UPWARD', 'FROM', 'HIS', 'CIGAR', 'JUST', 'THEN', 'TO', 'SAY', 'MORE'] +6432-63722-0006-2437: hyp=['KAL', 'WAS', 'TOO', 'MUCH', 'ENGAGED', 'IN', 'WATCHING', 'THE', 'BLUE', 'SMOKE', 'GIRL', 'LAZILY', 'UPWARD', 'FROM', 'HIS', 'CIGAR', 'JUST', 'THEN', 'TO', 'SAY', 'MORE'] +6432-63722-0007-2438: ref=['ARE', 'YOU', 'GOING', 'TO', 'WORK', 'ON', 'THAT', 'CASE', 'COLONEL'] +6432-63722-0007-2438: hyp=['ARE', 'YOU', 'GOING', 'TO', 'WORK', 'ON', 'THAT', 'CASE', 'COLONEL'] +6432-63722-0008-2439: ref=['BUT', 'HE', "HADN'T", 'ANY', 'MORE', 'TO', 'DO', 'WITH', 'IT', 'COLONEL', 'THAN', 'THAT', 'CAT'] +6432-63722-0008-2439: hyp=['BUT', 'HE', "HADN'T", 'ANY', 'MORE', 'TO', 'DO', 'WITH', 'IT', 'COLONEL', 'THAN', 'THAT', 'CAT'] +6432-63722-0009-2440: ref=['PERHAPS', 'NOT', 'ADMITTED', 'COLONEL', 'ASHLEY'] +6432-63722-0009-2440: hyp=['PERHAPS', 'NOT', 'ADMITTED', 'COLONEL', 'ASHLEY'] +6432-63722-0010-2441: ref=["WE'VE", 'GOT', 'OUR', 'MAN', 'AND', "THAT'S", 'ALL', 'WE', 'WANT'] +6432-63722-0010-2441: hyp=["WE'VE", 'GOT', 'OUR', 'MAN', 'AND', "THAT'S", 'ALL', 'WE', 'WANT'] +6432-63722-0011-2442: ref=["YOU'RE", 'ON', 'THE', 'DARCY', 'CASE', 'THEY', 'TELL', 'ME', 'IN', 'A', 'WAY', 'YES'] +6432-63722-0011-2442: hyp=["YOU'RE", 'ON', 'THE', 'DARCY', 'CASE', 'THEY', 'TELL', 'ME', 'IN', 'A', 'WAY', 'YES'] +6432-63722-0012-2443: ref=["I'M", 'WORKING', 'IN', 'THE', 'INTERESTS', 'OF', 'THE', 'YOUNG', 'MAN'] +6432-63722-0012-2443: hyp=["I'M", 'WORKING', 'IN', 'THE', 'INTEREST', 'OF', 'THE', 'YOUNG', 'MAN'] +6432-63722-0013-2444: ref=["IT'S", 'JUST', 'ONE', 'OF', 'THEM', 'COINCIDENCES', 'LIKE'] +6432-63722-0013-2444: hyp=["IT'S", 'JUST', 'ONE', 'OF', 'THEM', 'COINCIDENCES', 'LIKE'] +6432-63722-0014-2445: ref=['BUSTED', 'HIS', 'HEAD', 'IN', 'WITH', 'A', 'HEAVY', 'CANDLESTICK', 'ONE', 'OF', 'A', 'PAIR'] +6432-63722-0014-2445: hyp=['BUSTED', 'HIS', 'HEAD', 'IN', 'WITH', 'A', 'HEAVY', 'CANDLESTICK', 'ONE', 'OF', 'A', 'PAIR'] +6432-63722-0015-2446: ref=['GAD', 'EXCLAIMED', 'THE', 'COLONEL'] +6432-63722-0015-2446: hyp=['GAD', 'EXPLAINED', 'THE', 'COLONEL'] +6432-63722-0016-2447: ref=['THE', 'VERY', 'PAIR', 'I', 'WAS', 'GOING', 'TO', 'BUY'] +6432-63722-0016-2447: hyp=['THE', 'VERY', 'PAIR', 'I', 'WAS', 'GOING', 'TO', 'BUY'] +6432-63722-0017-2448: ref=['LOOK', 'HERE', 'COLONEL', 'DO', 'YOU', 'KNOW', 'ANYTHING', 'ABOUT', 'THIS'] +6432-63722-0017-2448: hyp=['LOOK', 'HERE', 'CAROL', 'DO', 'YOU', 'KNOW', 'ANYTHING', 'ABOUT', 'THIS'] +6432-63722-0018-2449: ref=['AND', 'THE', "DETECTIVE'S", 'PROFESSIONAL', 'INSTINCTS', 'GOT', 'THE', 'UPPER', 'HAND', 'OF', 'HIS', 'FRIENDLINESS', 'NOT', 'THE', 'LEAST', 'IN', 'THE', 'WORLD', 'NOT', 'AS', 'MUCH', 'AS', 'YOU', 'DO', 'WAS', 'THE', 'COOL', 'ANSWER'] +6432-63722-0018-2449: hyp=['AND', 'THE', "DETECTIVE'S", 'PROFESSIONAL', 'INSTINCTS', 'GOT', 'THE', 'UPPER', 'HAND', 'OF', 'HIS', 'FRIENDLINESS', 'NOT', 'THE', 'LEAST', 'IN', 'THE', 'WORLD', 'NOT', 'AS', 'MUCH', 'AS', 'YOU', 'DO', 'WAS', 'THE', 'COOL', 'ANSWER'] +6432-63722-0019-2450: ref=['I', 'HAPPENED', 'TO', 'SEE', 'THOSE', 'CANDLESTICKS', 'IN', 'THE', 'WINDOW', 'OF', 'SINGA', "PHUT'S", 'SHOP', 'THE', 'OTHER', 'DAY', 'AND', 'I', 'MADE', 'UP', 'MY', 'MIND', 'TO', 'BUY', 'THEM', 'WHEN', 'I', 'HAD', 'A', 'CHANCE'] +6432-63722-0019-2450: hyp=['I', 'HAPPENED', 'TO', 'SEE', 'THOSE', 'CANDLESTICKS', 'IN', 'THE', 'WINDOW', 'OF', "SINGAFUT'S", 'SHOP', 'THE', 'OTHER', 'DAY', 'AND', 'I', 'MADE', 'UP', 'MY', 'MIND', 'TO', 'BUY', 'THEM', 'WHEN', 'I', 'HAD', 'A', 'CHANCE'] +6432-63722-0020-2451: ref=['NOW', "I'M", 'AFRAID', 'I', "WON'T", 'BUT', 'HOW', 'DID', 'IT', 'HAPPEN'] +6432-63722-0020-2451: hyp=['NOW', "I'M", 'AFRAID', 'I', "WON'T", 'BUT', 'HOW', 'DID', 'IT', 'HAPPEN'] +6432-63722-0021-2452: ref=['PHUT', 'I', "DON'T", 'KNOW', 'WHETHER', "THAT'S", 'HIS', 'FIRST', 'OR', 'HIS', 'LAST', 'NAME', 'ANYHOW', 'HE', 'HAD', 'A', 'PARTNER', 'NAMED', 'SHERE', 'ALI'] +6432-63722-0021-2452: hyp=['FIVE', 'I', "DON'T", 'KNOW', 'WHETHER', "THAT'S", 'HIS', 'FIRST', 'OR', 'HIS', 'LAST', 'NAME', 'ANYHOW', 'HE', 'HAD', 'A', 'PARTNER', 'NAMED', 'TO', 'SHARE', 'ALI'] +6432-63722-0022-2453: ref=['ANYHOW', 'HE', 'AND', 'PHUT', "DIDN'T", 'GET', 'ALONG', 'VERY', 'WELL', 'IT', 'SEEMS'] +6432-63722-0022-2453: hyp=['ANYHOW', 'HE', 'INFECTED', 'GET', 'ALONG', 'VERY', 'WELL', 'IT', 'SEEMS'] +6432-63722-0023-2454: ref=['NEIGHBORS', 'OFTEN', 'HEARD', 'EM', 'SCRAPPIN', 'A', 'LOT', 'AND', 'THIS', 'AFTERNOON', 'THEY', 'WENT', 'AT', 'IT', 'AGAIN', 'HOT', 'AND', 'HEAVY'] +6432-63722-0023-2454: hyp=['LABORS', 'OFTEN', 'HEARD', 'HIM', 'SCRAP', 'IN', 'A', 'LOT', 'AND', 'THIS', 'AFTERNOON', 'THEY', 'WENT', 'AT', 'IT', 'AGAIN', 'HOT', 'AND', 'HEAVY'] +6432-63722-0024-2455: ref=['TOWARD', 'DARK', 'A', 'MAN', 'WENT', 'IN', 'TO', 'BUY', 'A', 'LAMP'] +6432-63722-0024-2455: hyp=['TO', 'OUR', 'DARK', 'A', 'MAN', 'WENT', 'IN', 'TO', 'BUY', 'A', 'LAMP'] +6432-63722-0025-2456: ref=['HE', 'FOUND', 'THE', 'PLACE', 'WITHOUT', 'A', 'LIGHT', 'IN', 'IT', 'STUMBLED', 'OVER', 'SOMETHING', 'ON', 'THE', 'FLOOR', 'AND', 'THERE', 'WAS', "ALI'S", 'BODY', 'WITH', 'THE', 'HEAD', 'BUSTED', 'IN', 'AND', 'THIS', 'HEAVY', 'CANDLESTICK', 'NEAR', 'IT'] +6432-63722-0025-2456: hyp=['HE', 'FOUND', 'THE', 'PLACE', 'WITHOUT', 'A', 'LIGHT', 'IN', 'IT', 'STUMBLED', 'OVER', 'SOMETHING', 'ON', 'THE', 'FLOOR', 'AND', 'THERE', 'WAS', 'ALWAYS', 'BODY', 'WITH', 'THE', 'HEAD', 'BUSTED', 'IN', 'AND', 'THIS', 'HEAVY', 'CANDLESTICK', 'NEAR', 'IT'] +6432-63722-0026-2457: ref=['SURE', 'HELD', 'SO', 'TIGHT', 'WE', 'COULD', 'HARDLY', 'GET', 'IT', 'OUT'] +6432-63722-0026-2457: hyp=['SURE', 'HELD', 'SO', 'TIGHT', 'WE', 'COULD', 'HARDLY', 'GET', 'IT', 'OUT'] +6432-63722-0027-2458: ref=['MAYBE', 'THE', 'FIGHT', 'WAS', 'ABOUT', 'WHO', 'OWNED', 'THE', 'WATCH', 'FOR', 'THE', 'DAGOS', 'TALKED', 'IN', 'THEIR', 'FOREIGN', 'LINGO', 'AND', 'NONE', 'OF', 'THE', 'NEIGHBORS', 'COULD', 'TELL', 'WHAT', 'THEY', 'WERE', 'SAYIN', 'I', 'SEE'] +6432-63722-0027-2458: hyp=['MAYBE', 'THE', 'FIGHT', 'WAS', 'ABOUT', 'WHO', 'OWNED', 'THE', 'WATCH', 'FOR', 'THE', 'DAG', 'WAS', 'TALKED', 'IN', 'THEIR', 'FOREIGN', 'LINGO', 'AND', 'NONE', 'OF', 'THE', 'NEIGHBOURS', 'COULD', 'TELL', 'WHAT', 'THEY', 'WERE', 'SAYING', 'I', 'SEE'] +6432-63722-0028-2459: ref=['AND', 'THE', 'WATCH', 'HAVE', 'YOU', 'IT', 'YES', "IT'S", 'HERE'] +6432-63722-0028-2459: hyp=['AND', 'THE', 'WATCH', 'HAVE', 'YOU', 'IT', 'YES', "IT'S", 'HERE'] +6432-63722-0029-2460: ref=["THAT'S", 'THE', 'WATCH', 'ANNOUNCED', 'THE', 'HEADQUARTERS', 'DETECTIVE', 'REACHING', 'IN', 'FOR', 'IT', 'GOING', 'YET', 'SEE'] +6432-63722-0029-2460: hyp=["THAT'S", 'THE', 'WATCH', 'ANNOUNCED', 'THE', 'HEADQUARTER', 'DETECTIVE', 'REACHING', 'IN', 'FOR', 'IT', 'GOING', 'AT', 'SEE'] +6432-63722-0030-2461: ref=["YOU'RE", 'NOT', 'AS', 'SQUEAMISH', 'AS', 'ALL', 'THAT', 'ARE', 'YOU', 'JUST', 'BECAUSE', 'IT', 'WAS', 'IN', 'A', 'DEAD', "MAN'S", 'HAND', 'AND', 'IN', 'A', "WOMAN'S"] +6432-63722-0030-2461: hyp=["YOU'RE", 'NOT', 'A', 'SCREAMY', 'AS', 'ALL', 'THAT', 'ARE', 'YOU', 'JUST', 'BECAUSE', 'IT', 'WAS', 'IN', 'A', 'DEAD', "MAN'S", 'HANDS', 'AND', 'A', "WOMAN'S"] +6432-63722-0031-2462: ref=['AND', "DONOVAN'S", 'VOICE', 'WAS', 'PLAINLY', 'SKEPTICAL'] +6432-63722-0031-2462: hyp=['AND', "DONOVAN'S", 'VOICE', 'WAS', 'PLAINLY', 'SCEPTICAL'] +6432-63722-0032-2463: ref=['YES', 'IT', 'MAY', 'HAVE', 'SOME', 'ROUGH', 'EDGES', 'ON', 'IT'] +6432-63722-0032-2463: hyp=['YES', 'IT', 'MAY', 'HAVE', 'SOME', 'ROUGH', 'EDGES', 'ON', 'IT'] +6432-63722-0033-2464: ref=['AND', "I'VE", 'READ', 'ENOUGH', 'ABOUT', 'GERMS', 'TO', 'KNOW', 'THE', 'DANGER', "I'D", 'ADVISE', 'YOU', 'TO', 'BE', 'CAREFUL'] +6432-63722-0033-2464: hyp=['AND', "I'VE", 'READ', 'ENOUGH', 'ABOUT', 'GERMS', 'TO', 'KNOW', 'THE', 'DANGER', "I'D", 'ADVISE', 'YOU', 'TO', 'BE', 'CAREFUL'] +6432-63722-0034-2465: ref=['IF', 'YOU', "DON'T", 'MIND', 'I', 'SHOULD', 'LIKE', 'TO', 'EXAMINE', 'THIS', 'A', 'BIT'] +6432-63722-0034-2465: hyp=['IF', 'YOU', "DON'T", 'MIND', 'I', 'SHOULD', 'LIKE', 'TO', 'EXAMINE', 'THIS', 'A', 'BIT'] +6432-63722-0035-2466: ref=['BEFORE', 'THE', 'BIG', 'WIND', 'IN', 'IRELAND', 'SUGGESTED', 'THONG', 'WITH', 'A', 'NOD', 'AT', 'HIS', 'IRISH', 'COMPATRIOT', 'SLIGHTLY', 'LAUGHED', 'THE', 'COLONEL'] +6432-63722-0035-2466: hyp=['BEFORE', 'THE', 'BIG', 'WIND', 'IN', 'IRELAND', 'SUGGESTED', 'THONG', 'WITH', 'A', 'NOD', 'OF', 'HIS', 'IRISH', 'CAMPATRIOT', 'SLIGHTLY', "THEY'LL", 'HAVE', 'THE', 'COLONEL'] +6432-63722-0036-2467: ref=["THAT'S", 'RIGHT', 'AGREED', 'THE', 'COLONEL', 'AS', 'HE', 'CONTINUED', 'TO', 'MOVE', 'HIS', 'MAGNIFYING', 'GLASS', 'OVER', 'THE', 'SURFACE', 'OF', 'THE', 'STILL', 'TICKING', 'WATCH'] +6432-63722-0036-2467: hyp=["THAT'S", 'RIGHT', 'AGREED', 'THE', 'COLONEL', 'AS', 'HE', 'CONTINUED', 'TO', 'MOVE', 'HIS', 'MAGNIFYING', 'GLASS', 'OVER', 'THE', 'SURFACE', 'OF', 'THE', 'STILL', 'TICKING', 'WATCH'] +6432-63722-0037-2468: ref=['AND', 'A', 'CLOSE', 'OBSERVER', 'MIGHT', 'HAVE', 'OBSERVED', 'THAT', 'HE', 'DID', 'NOT', 'TOUCH', 'HIS', 'BARE', 'FINGERS', 'TO', 'THE', 'TIMEPIECE', 'BUT', 'POKED', 'IT', 'ABOUT', 'AND', 'TOUCHED', 'IT', 'HERE', 'AND', 'THERE', 'WITH', 'THE', 'END', 'OF', 'A', 'LEADPENCIL'] +6432-63722-0037-2468: hyp=['IN', 'A', 'CLOSE', 'OBSERVER', 'MIGHT', 'HAVE', 'OBSERVED', 'THAT', 'HE', 'DID', 'NOT', 'TOUCH', 'HIS', 'BARE', 'FINGERS', 'TO', 'THE', 'TIMEPIECE', 'BUT', 'POKED', 'IT', 'ABOUT', 'AND', 'TOUCHED', 'IT', 'HERE', 'AND', 'THERE', 'WITH', 'THE', 'END', 'OF', 'A', 'LEAD', 'PENCIL'] +6432-63722-0038-2469: ref=['AND', 'DONOVAN', 'TAKE', 'A', "FRIEND'S", 'ADVICE', 'AND', "DON'T", 'BE', 'TOO', 'FREE', 'WITH', 'THAT', 'WATCH', 'TOO', 'FREE', 'WITH', 'IT'] +6432-63722-0038-2469: hyp=['AND', 'DONALIN', 'TAKE', 'HER', "FRIEND'S", 'ADVICE', 'AND', "DON'T", 'BE', 'TOO', 'FREE', 'WITH', 'THAT', 'WATCH', 'TOO', 'FREE', 'WITH', 'IT'] +6432-63722-0039-2470: ref=['ASKED', 'THE', 'SURPRISED', 'DETECTIVE', 'YES'] +6432-63722-0039-2470: hyp=['ASKED', 'THE', 'SURPRISED', 'DETECTIVE', 'YES'] +6432-63722-0040-2471: ref=["DON'T", 'SCRATCH', 'YOURSELF', 'ON', 'IT', 'WHATEVER', 'YOU', 'DO', 'WHY', 'NOT'] +6432-63722-0040-2471: hyp=["DON'T", 'SCRATCH', 'YOURSELF', 'ON', 'IT', 'WHATEVER', 'YOU', 'DO', 'WHY', 'NOT'] +6432-63722-0041-2472: ref=['SIMPLY', 'BECAUSE', 'THIS', 'WATCH'] +6432-63722-0041-2472: hyp=['SIMPLY', 'BECAUSE', 'THIS', 'WATCH'] +6432-63722-0042-2473: ref=['SOME', 'ONE', 'OUT', 'HERE', 'TO', 'SEE', 'YOU'] +6432-63722-0042-2473: hyp=['SOME', 'ONE', 'OUT', 'HERE', 'TO', 'SEE', 'YOU'] +6432-63722-0043-2474: ref=['ALL', 'RIGHT', 'BE', 'THERE', 'IN', 'A', 'SECOND'] +6432-63722-0043-2474: hyp=['ALL', 'RIGHT', 'BE', 'THERE', 'IN', 'A', 'SECOND'] +6432-63722-0044-2475: ref=['SINGA', 'PHUT', 'WAS', 'THE', 'PANTING', 'ANSWER'] +6432-63722-0044-2475: hyp=['SHING', 'AFOOT', 'WAS', 'THE', 'PANTING', 'ANSWER'] +6432-63722-0045-2476: ref=['I', 'WANT', 'TO', 'TALK', 'OVER', "DARCY'S", 'CASE', 'WITH', 'YOU', 'THE', 'COLONEL', 'HAD', 'SAID', 'AND', 'THE', 'TWO', 'HAD', 'TALKED', 'HAD', 'THOUGHT', 'HAD', 'TALKED', 'AGAIN', 'AND', 'NOW', 'WERE', 'SILENT', 'FOR', 'A', 'TIME'] +6432-63722-0045-2476: hyp=['I', 'WANT', 'TO', 'TALK', 'OVER', "DARCY'S", 'CASE', 'WITH', 'YOU', 'THE', 'COLONEL', 'HAD', 'SAID', 'AND', 'THE', 'JEW', 'HAD', 'TALKED', 'HAD', 'THOUGHT', 'HAD', 'TALKED', 'AGAIN', 'AND', 'NOW', 'WERE', 'SILENT', 'FOR', 'A', 'TIME'] +6432-63722-0046-2477: ref=['WHAT', 'ARE', 'THE', 'CHANCES', 'OF', 'GETTING', 'HIM', 'OFF', 'LEGALLY', 'IF', 'WE', 'GO', 'AT', 'IT', 'FROM', 'A', 'NEGATIVE', 'STANDPOINT', 'ASKED', 'THE', 'COLONEL'] +6432-63722-0046-2477: hyp=['WHAT', 'ARE', 'THE', 'CHURCHES', 'OF', 'GETTING', 'HIM', 'OFF', 'LEGALLY', 'IF', 'WE', 'GO', 'AT', 'IT', 'FROM', 'A', 'NEGATIVE', 'STANDPOINT', 'ASKED', 'THE', 'COLONEL'] +6432-63722-0047-2478: ref=['RATHER', 'A', 'HYPOTHETICAL', 'QUESTION', 'COLONEL', 'BUT', 'I', 'SHOULD', 'SAY', 'IT', 'MIGHT', 'BE', 'A', 'FIFTY', 'FIFTY', 'PROPOSITION'] +6432-63722-0047-2478: hyp=['RATHER', 'A', 'HYPOTHETICAL', 'QUESTION', 'COLONEL', 'BUT', 'I', 'SHOULD', 'SAY', 'IT', 'MIGHT', 'BE', 'A', 'FIFTY', 'FIFTY', 'PROPOSITION'] +6432-63722-0048-2479: ref=['AT', 'BEST', 'HE', 'WOULD', 'GET', 'OFF', 'WITH', 'A', 'SCOTCH', 'VERDICT', 'OF', 'NOT', 'PROVEN', 'BUT', 'HE', "DOESN'T", 'WANT', 'THAT', 'NOR', 'DO', 'I'] +6432-63722-0048-2479: hyp=['AT', 'BEST', 'HE', 'WOULD', 'GET', 'OFF', 'FOR', 'THE', 'SCOTCH', 'VERDICT', 'OF', 'NOT', 'PROVEN', 'BUT', 'HE', "DOESN'T", 'WANT', 'THAT', 'NOR', 'DO', 'I'] +6432-63722-0049-2480: ref=['AND', 'YOU', 'I', "DON'T", 'WANT', 'IT', 'EITHER'] +6432-63722-0049-2480: hyp=['AND', 'YOU', 'I', "DON'T", 'WANT', 'IT', 'EITHER'] +6432-63722-0050-2481: ref=['BUT', 'I', 'WANT', 'TO', 'KNOW', 'JUST', 'WHERE', 'WE', 'STAND', 'NOW', 'I', 'KNOW'] +6432-63722-0050-2481: hyp=['BUT', 'I', 'WANT', 'TO', 'KNOW', 'JUST', 'WHERE', 'WE', 'STAND', 'NOW', 'I', 'KNOW'] +6432-63722-0051-2482: ref=['BUT', 'I', 'NEED', 'TO', 'DO', 'A', 'LITTLE', 'MORE', 'SMOKING', 'OUT', 'FIRST', 'NOW', 'I', 'WANT', 'TO', 'THINK'] +6432-63722-0051-2482: hyp=['BUT', 'I', 'NEED', 'TO', 'DO', 'A', 'LITTLE', 'MORE', 'SMOKING', 'OUT', 'FIRST', 'NOW', 'I', 'WANT', 'TO', 'THINK'] +6432-63722-0052-2483: ref=['IF', "YOU'LL", 'EXCUSE', 'ME', "I'LL", 'PRETEND', "I'M", 'FISHING', 'AND', 'I', 'MAY', 'CATCH', 'SOMETHING'] +6432-63722-0052-2483: hyp=['IF', "YOU'LL", 'EXCUSE', 'ME', "I'LL", 'PRETEND', "I'M", 'FISHING', 'AND', 'I', 'MAY', 'CATCH', 'SOMETHING'] +6432-63722-0053-2484: ref=['IN', 'FACT', 'I', 'HAVE', 'A', 'FEELING', 'THAT', "I'LL", 'LAND', 'MY', 'FISH'] +6432-63722-0053-2484: hyp=['IN', 'FACT', 'I', 'HAVE', 'A', 'FEELING', 'THAT', 'I', 'LAND', 'MY', 'FISH'] +6432-63722-0054-2485: ref=["I'D", 'RECOMMEND', 'HIM', 'TO', 'YOU', 'INSTEAD', 'OF', 'BLACKSTONE', 'THANKS', 'LAUGHED', 'KENNETH'] +6432-63722-0054-2485: hyp=['I', 'RECOMMEND', 'HIM', 'TO', 'YOU', 'INSTEAD', 'OF', 'BLACKSTONE', 'THANKS', 'LAUGHED', 'KENNETH'] +6432-63722-0055-2486: ref=['WHAT', 'IS', 'IT', 'PERHAPS', 'I', 'CAN', 'HELP', 'YOU'] +6432-63722-0055-2486: hyp=['WHAT', 'IS', 'IT', 'PERHAPS', 'I', 'CAN', 'HELP', 'YOU'] +6432-63722-0056-2487: ref=['THE', 'OLD', 'ADAGE', 'OF', 'TWO', 'HEADS', 'YOU', 'KNOW'] +6432-63722-0056-2487: hyp=['THE', 'OLD', 'ADAGE', 'OF', 'TWO', 'HEADS', 'YOU', 'KNOW'] +6432-63722-0057-2488: ref=['YES', 'IT', 'STILL', 'HOLDS', 'GOOD'] +6432-63722-0057-2488: hyp=['YES', "IT'S", 'STILL', 'HOLDS', 'GOOD'] +6432-63722-0058-2489: ref=['NO', 'ALIMONY', 'REPEATED', 'THE', 'COLONEL', 'PUZZLED', 'YES', 'JUST', 'THAT'] +6432-63722-0058-2489: hyp=['NO', 'ALIMONY', 'REPLIED', 'THE', 'COLONEL', 'PUZZLED', 'YES', 'JUST', 'THAT'] +6432-63722-0059-2490: ref=['AND', "THERE'S", 'NO', 'REASON', 'YOU', "SHOULDN'T", 'KNOW'] +6432-63722-0059-2490: hyp=['AND', "THERE'S", 'NO', 'REASON', 'YOU', "SHOULDN'T", 'KNOW'] +6432-63723-0000-2491: ref=['CHUCKLED', 'THE', 'COLONEL', 'AS', 'HE', 'SKILFULLY', 'PLAYED', 'THE', 'LUCKLESS', 'TROUT', 'NOW', 'STRUGGLING', 'TO', 'GET', 'LOOSE', 'FROM', 'THE', 'HOOK'] +6432-63723-0000-2491: hyp=['CHUCKLED', 'THE', 'COLONEL', 'AS', 'HE', 'SKILFULLY', 'PLAYED', 'THE', 'LUCKLESS', 'TROUT', 'NOW', 'STRUGGLING', 'TO', 'GET', 'LOOSE', 'FROM', 'THE', 'HOOK'] +6432-63723-0001-2492: ref=['AND', 'WHEN', 'THE', 'FISH', 'WAS', 'LANDED', 'PANTING', 'ON', 'THE', 'GRASS', 'AND', 'SHAG', 'HAD', 'BEEN', 'ROUSED', 'FROM', 'HIS', 'SLUMBER', 'TO', 'SLIP', 'THE', 'NOW', 'LIMP', 'FISH', 'INTO', 'THE', 'CREEL', 'COLONEL', 'ASHLEY', 'GAVE', 'A', 'SIGH', 'OF', 'RELIEF', 'AND', 'REMARKED', 'I', 'THINK', 'I', 'SEE', 'IT', 'NOW'] +6432-63723-0001-2492: hyp=['AND', 'WHEN', 'THE', 'FISH', 'WAS', 'LANDED', 'PANTING', 'ON', 'THE', 'GRASS', 'AND', 'SHAG', 'HAD', 'BEEN', 'ROUSED', 'FROM', 'HIS', 'SLUMBER', 'TO', 'SLIP', 'A', 'NOW', 'LIMP', 'FISH', 'INTO', 'THE', 'CREOLE', 'COLONEL', 'ASHLEY', 'GAVE', 'A', 'SIGH', 'OF', 'RELIEF', 'AND', 'REMARKED', 'I', 'THINK', 'I', 'SEE', 'IT', 'NOW'] +6432-63723-0002-2493: ref=['THE', 'REASON', 'SHE', 'ASKED', 'NO', 'ALIMONY', 'INQUIRED', 'KENNETH'] +6432-63723-0002-2493: hyp=['THE', 'REASON', 'SHE', 'ASKED', 'NO', 'ALIMONY', 'INQUIRED', 'KENNETH'] +6432-63723-0003-2494: ref=['NO', 'I', "WASN'T", 'THINKING', 'OF', 'THAT'] +6432-63723-0003-2494: hyp=['NO', 'I', "WASN'T", 'THINKING', 'OF', 'THAT'] +6432-63723-0004-2495: ref=['HOWEVER', "DON'T", 'THINK', "I'M", 'NOT', 'INTERESTED', 'IN', 'YOUR', 'CASE', "I'VE", 'FISHED', 'ENOUGH', 'FOR', 'TO', 'DAY'] +6432-63723-0004-2495: hyp=['HOWEVER', "DON'T", 'THINK', "I'M", 'NOT', 'INTERESTED', 'IN', 'YOUR', 'CASE', "I'VE", 'FINISHED', 'ENOUGH', 'FOR', 'TO', 'DAY'] +6432-63723-0005-2496: ref=['WELL', 'I', "DON'T", 'KNOW', 'THAT', 'YOU', 'CAN'] +6432-63723-0005-2496: hyp=['WELL', 'I', "DON'T", 'KNOW', 'THAT', 'YOU', 'CAN'] +6432-63723-0006-2497: ref=['IT', "ISN'T", 'GENERALLY', 'KNOWN', 'WENT', 'ON', 'THE', 'LAWYER', 'THAT', 'THE', 'HOTEL', "KEEPER'S", 'WIFE', 'HAS', 'LEFT', 'HIM'] +6432-63723-0006-2497: hyp=['IT', "ISN'T", 'GENERALLY', 'KNOWN', 'WENT', 'ON', 'THE', 'LAWYER', 'THAT', 'THE', 'HOTEL', "KEEPER'S", 'WIFE', 'HAS', 'LEFT', 'HIM'] +6432-63723-0007-2498: ref=['IT', 'WAS', 'ONE', 'OF', 'WHAT', 'AT', 'FIRST', 'MIGHT', 'BE', 'CALLED', 'REFINED', 'CRUELTY', 'ON', 'HER', "HUSBAND'S", 'PART', 'DEGENERATING', 'GRADUALLY', 'INTO', 'THAT', 'OF', 'THE', 'BASER', 'SORT'] +6432-63723-0007-2498: hyp=['IT', 'WAS', 'ONE', 'OF', 'WHAT', 'AT', 'FIRST', 'MIGHT', 'BE', 'CALLED', 'REFINED', 'CRUELTY', 'ON', 'HER', "HUSBAND'S", 'PART', 'DEGENERATING', 'GRADUALLY', 'INTO', 'THAT', 'OF', 'A', 'BASER', 'SORT'] +6432-63723-0008-2499: ref=['YOU', "DON'T", 'MEAN', 'THAT', 'LARCH', 'STRUCK', 'HER', 'THAT', 'THERE', 'WAS', 'PHYSICAL', 'ABUSE', 'DO', 'YOU', 'ASKED', 'THE', 'COLONEL', "THAT'S", 'WHAT', 'HE', 'DID'] +6432-63723-0008-2499: hyp=['YOU', "DON'T", 'MEAN', 'THAT', 'LARGE', 'STRUCK', 'HER', 'THAT', 'THERE', 'WAS', 'PHYSICAL', 'ABUSE', 'DO', 'YOU', 'ASKED', 'THE', 'COLONEL', "THAT'S", 'WHAT', 'HE', 'DID'] +6432-63723-0009-2500: ref=['THE', 'COLONEL', 'DID', 'NOT', 'DISCLOSE', 'THE', 'FACT', 'THAT', 'IT', 'WAS', 'NO', 'NEWS', 'TO', 'HIM'] +6432-63723-0009-2500: hyp=['THE', 'COLONEL', 'DID', 'NOT', 'DISCLOSE', 'THE', 'FACT', 'THAT', 'IT', 'WAS', 'NO', 'NEWS', 'TO', 'HIM'] +6432-63723-0010-2501: ref=['AARON', "GRAFTON'S", 'STATEMENT', 'WAS', 'BEING', 'UNEXPECTEDLY', 'CONFIRMED'] +6432-63723-0010-2501: hyp=['AARON', "GRAFTON'S", 'STATEMENT', 'WAS', 'BEING', 'UNEXPECTED', 'GREAT', 'CONFIRMED'] +6432-63723-0011-2502: ref=['HE', 'REMEMBERED', 'THAT', 'CYNTHIA', 'AND', 'GRAFTON', 'HAD', 'ONCE', 'BEEN', 'IN', 'LOVE', 'WITH', 'EACH', 'OTHER'] +6432-63723-0011-2502: hyp=['HE', 'REMEMBERED', 'THAT', 'CYNTHIA', 'AND', 'GRAFTON', 'HAD', 'ONCE', 'BEEN', 'IN', 'LOVE', 'WITH', 'EACH', 'OTHER'] +6432-63723-0012-2503: ref=['SHE', 'SAID', 'HE', 'HAD', 'STRUCK', 'HER', 'MORE', 'THAN', 'ONCE', 'AND', 'SHE', 'COULD', 'STAND', 'IT', 'NO', 'LONGER'] +6432-63723-0012-2503: hyp=['SHE', 'SAID', 'HE', 'HAD', 'STRUCK', 'HER', 'MORE', 'THAN', 'ONCE', 'AND', 'SHE', 'COULD', 'STAND', 'IT', 'NO', 'LONGER'] +6432-63723-0013-2504: ref=['BECAUSE', 'LARCH', 'MADE', 'NO', 'DEFENSE'] +6432-63723-0013-2504: hyp=['BECAUSE', 'LARGE', 'MADE', 'NO', 'DEFENCE'] +6432-63723-0014-2505: ref=['LARCH', 'BY', 'REFUSING', 'TO', 'APPEAR', 'PRACTICALLY', 'ADMITTED', 'THE', 'CHARGES', 'AGAINST', 'HIM', 'AND', 'DID', 'NOT', 'OPPOSE', 'THE', 'SEPARATION'] +6432-63723-0014-2505: hyp=['LARGE', 'BY', 'REFUSING', 'TO', 'APPEAR', 'PRACTICALLY', 'ADMITTED', 'THE', 'CHARGES', 'AGAINST', 'HIM', 'AND', 'DID', 'NOT', 'OPPOSE', 'THE', 'SEPARATION'] +6432-63723-0015-2506: ref=['SO', 'I', 'HAD', 'TO', 'LET', 'HER', 'HAVE', 'HER', 'WAY', 'AND', 'WE', 'DID', 'NOT', 'ASK', 'THE', 'COURT', 'FOR', 'MONEY', 'THOUGH', 'I', 'HAD', 'NO', 'SUCH', 'SQUEAMISH', 'FEELINGS', 'WHEN', 'IT', 'CAME', 'TO', 'MY', 'COUNSEL', 'FEE'] +6432-63723-0015-2506: hyp=['SO', 'I', 'HAD', 'TO', 'LET', 'HER', 'HAVE', 'HER', 'WAY', 'AND', 'WE', 'DID', 'NOT', 'ASK', 'THE', 'CORP', 'FOR', 'MONEY', 'THOUGH', 'I', 'HAD', 'NO', 'SUCH', 'SQUEAMISH', 'FEELINGS', 'WHEN', 'IT', 'CAME', 'TO', 'MY', 'COUNSEL', 'FEE'] +6432-63723-0016-2507: ref=['NO', 'BUT', 'HE', 'WILL', 'OR', "I'LL", 'SUE', 'HIM', 'AND', 'GET', 'JUDGMENT', 'OH', "HE'LL", 'PAY', 'ALL', 'RIGHT'] +6432-63723-0016-2507: hyp=['NO', 'BUT', 'HE', 'WILL', 'OR', 'ELSE', 'UM', 'AND', 'GET', 'JUDGMENT', 'OH', "HE'LL", 'PAY', 'ALL', 'RIGHT'] +6432-63723-0017-2508: ref=['AND', 'IT', 'TAKES', 'ALL', 'SORTS', 'OF', 'PERSONS', 'TO', 'MAKE', 'IT', 'UP'] +6432-63723-0017-2508: hyp=['AND', 'IT', 'TAKES', 'ALL', 'SORTS', 'OF', 'PERSONS', 'TO', 'MAKE', 'IT', 'UP'] +6432-63723-0018-2509: ref=['STILL', 'I', 'WOULD', 'LIKE', 'TO', 'KNOW'] +6432-63723-0018-2509: hyp=['STILL', 'I', 'WOULD', 'LIKE', 'TO', 'KNOW'] +6432-63723-0019-2510: ref=['THE', 'MURDER', 'OF', 'MISSUS', 'DARCY', 'HAD', 'SOME', 'TIME', 'AGO', 'BEEN', 'SHIFTED', 'OFF', 'THE', 'FRONT', 'PAGE', 'THOUGH', 'IT', 'WOULD', 'GET', 'BACK', 'THERE', 'WHEN', 'THE', 'YOUNG', 'JEWELER', 'WAS', 'TRIED'] +6432-63723-0019-2510: hyp=['THE', 'MURDER', 'OF', 'MISSUS', 'DARCY', 'HAD', 'SOME', 'TIME', 'AGO', 'BEEN', 'SHIFTED', 'OFF', 'THE', 'FRONT', 'PAGE', 'THOUGH', 'IT', 'WOULD', 'GET', 'BACK', 'THERE', 'WHEN', 'THE', 'YOUNG', 'JEWELER', 'WAS', 'TRIED'] +6432-63723-0020-2511: ref=['IT', 'HAD', 'A', 'DOUBLE', 'REPUTATION', 'SO', 'TO', 'SPEAK'] +6432-63723-0020-2511: hyp=['IT', 'HAD', 'A', 'DOUBLE', 'REPUTATION', 'SO', 'TO', 'SPEAK'] +6432-63723-0021-2512: ref=['GRAVE', 'AND', 'EVEN', 'REVEREND', 'CONVENTIONS', 'ASSEMBLED', 'IN', 'ITS', 'BALLROOM', 'AND', 'POLITICIANS', 'OF', 'THE', 'UPPER', 'IF', 'NOT', 'BETTER', 'CLASS', 'WERE', 'FREQUENTLY', 'SEEN', 'IN', 'ITS', 'DINING', 'ROOM', 'OR', 'CAFE'] +6432-63723-0021-2512: hyp=['GRAVE', 'AND', 'EVEN', 'REVEREND', 'THE', 'CONVENTIONS', 'ASSEMBLED', 'IN', 'ITS', 'BALL', 'ROOM', 'IN', 'POLITICIANS', 'OF', 'THE', 'UPPER', 'IF', 'NOT', 'BETTER', 'CLASS', 'WERE', 'FREQUENTLY', 'SEEN', 'IN', 'ITS', 'DINING', 'ROOM', 'OR', 'CAFE'] +6432-63723-0022-2513: ref=['LARCH', 'HIMSELF', 'WAS', 'A', 'PECULIAR', 'CHARACTER'] +6432-63723-0022-2513: hyp=['LARGE', 'HIMSELF', 'WAS', 'A', 'PECULIAR', 'CHARACTER'] +6432-63723-0023-2514: ref=['IN', 'A', 'SMALLER', 'PLACE', 'HE', 'WOULD', 'HAVE', 'BEEN', 'CALLED', 'A', 'SALOON', 'KEEPER'] +6432-63723-0023-2514: hyp=['IN', 'A', 'SMALLER', 'PLACE', 'HE', 'WOULD', 'HAVE', 'BEEN', 'CALLED', 'A', 'SALOON', 'KEEPER'] +6432-63723-0024-2515: ref=['AND', 'IT', 'WAS', 'THIS', 'MAN', 'RICH', 'IT', 'WAS', 'SAID', 'HANDSOME', 'CERTAINLY', 'THAT', 'CYNTHIA', 'RATCHFORD', 'HAD', 'MARRIED'] +6432-63723-0024-2515: hyp=['AND', 'IT', 'WAS', 'THIS', 'MAN', 'RICH', 'EVER', 'SAID', 'HANDSOME', 'CERTAINLY', 'THAT', 'CYNTHIA', 'RATCHFORD', 'HAD', 'MARRIED'] +6432-63723-0025-2516: ref=['TO', 'THIS', 'WAS', 'THE', 'ANSWER', 'WHISPERED', 'MONEY'] +6432-63723-0025-2516: hyp=['TO', 'THIS', 'WAS', 'THE', 'ANSWER', 'WHISPERED', 'MONEY'] +6432-63723-0026-2517: ref=['AND', 'IN', 'A', 'WAY', 'IT', 'WAS', 'TRUE'] +6432-63723-0026-2517: hyp=['AND', 'IN', 'A', 'WAY', 'IT', 'WAS', 'TRUE'] +6432-63723-0027-2518: ref=['SHE', 'ALSO', 'SAW', 'AN', 'OPPORTUNITY', 'OF', 'PAYING', 'OLD', 'DEBTS', 'AND', 'REAPING', 'SOME', 'REVENGES'] +6432-63723-0027-2518: hyp=['SHE', 'ALSO', 'SAW', 'AN', 'OPPORTUNITY', 'OF', 'PAYING', 'OLD', 'DEBTS', 'AND', 'REAPING', 'SOME', 'REVENGES'] +6432-63723-0028-2519: ref=['AFTER', 'THE', 'MARRIAGE', 'WHICH', 'WAS', 'A', 'BRILLIANT', 'AND', 'GAY', 'ONE', 'IF', 'NOT', 'HAPPY', 'THE', 'LARCH', 'HOTEL', 'IT', 'COULD', 'HARDLY', 'BE', 'CALLED', 'A', 'HOME', 'BECAME', 'THE', 'SCENE', 'OF', 'MANY', 'FESTIVE', 'OCCASIONS'] +6432-63723-0028-2519: hyp=['AFTER', 'THE', 'MARRIAGE', 'WHICH', 'WAS', 'A', 'BRILLIANT', 'AND', 'GAY', 'ONE', 'IF', 'NOT', 'HAPPY', 'THE', 'LARGE', 'HOTEL', 'IT', 'COULD', 'HARDLY', 'BE', 'CALLED', 'A', 'HOME', 'BECAME', 'THE', 'SCENE', 'OF', 'MANY', 'FESTIVE', 'OCCASIONS'] +6432-63723-0029-2520: ref=['THEN', 'IT', 'WAS', 'SAID', 'OF', 'LARCH', 'THAT', 'SOON', 'AFTER', 'THE', 'ECHOES', 'OF', 'THE', 'WEDDING', 'CHIMES', 'HAD', 'DIED', 'AWAY', 'HE', 'HAD', 'BEGUN', 'TO', 'TREAT', 'HIS', 'WIFE', 'WITH', 'REFINED', 'CRUELTY', 'THAT', 'HIDDEN', 'AWAY', 'FROM', 'THE', 'PUBLIC', 'UNDERNEATH', 'HIS', 'HABITUAL', 'MANNER', 'THERE', 'WAS', 'THE', 'RAWNESS', 'OF', 'THE', 'BRUTE'] +6432-63723-0029-2520: hyp=['THEN', 'IT', 'WAS', 'SAID', 'OF', 'LARGE', 'THAT', 'SOON', 'AFTER', 'THE', 'ECHOES', 'OF', 'THE', 'WEDDING', 'CHIMES', 'HAD', 'DIED', 'AWAY', 'HE', 'HAD', 'BEGUN', 'TO', 'TREAT', 'HIS', 'WIFE', 'WITH', 'A', 'REFINED', 'CRUELTY', 'THAT', 'HIDDEN', 'AWAY', 'FROM', 'THE', 'PUBLIC', 'UNDERNEATH', 'HIS', 'HABITUAL', 'MANNER', 'THERE', 'WAS', 'THE', 'RAWNESS', 'OF', 'THE', 'BRUTE'] +6432-63723-0030-2521: ref=['BUT', 'IT', 'WAS', 'NOTICED', 'THAT', 'THE', 'OLDER', 'AND', 'MORE', 'CONSERVATIVE', 'FAMILIES', 'WERE', 'LESS', 'OFTEN', 'REPRESENTED', 'AND', 'WHEN', 'THEY', 'WERE', 'IT', 'WAS', 'BY', 'SOME', 'OF', 'THE', 'YOUNGER', 'MEMBERS', 'WHOSE', 'REPUTATIONS', 'WERE', 'ALREADY', 'SMIRCHED', 'OR', 'WHO', 'HAD', 'NOT', 'YET', 'ACQUIRED', 'ANY', 'AND', 'WERE', 'WILLING', 'TO', 'TAKE', 'A', 'CHANCE'] +6432-63723-0030-2521: hyp=['BUT', 'IT', 'WAS', 'NOTICED', 'THAT', 'THE', 'OLDER', 'AND', 'MORE', 'CONSERVATIVE', 'FAMILIES', 'WERE', 'LESS', 'OFTEN', 'REPRESENTED', 'AND', 'WHEN', 'THEY', 'WERE', 'IT', 'WAS', 'BY', 'SOME', 'OF', 'THE', 'YOUNGER', 'MEMBERS', 'WHOSE', 'REPUTATIONS', 'WERE', 'ALREADY', 'SMARCHED', 'OR', 'WHO', 'HAD', 'NOT', 'YET', 'ACQUIRED', 'ANY', 'AND', 'WERE', 'WILLING', 'TO', 'TAKE', 'A', 'CHANCE'] +6432-63723-0031-2522: ref=['IT', "WOULDN'T", 'DO', 'YOU', 'KNOW', 'AFTER', 'THAT', 'STORY', 'CAME', 'OUT', 'FOR', 'ME', 'AND', 'THE', 'VICE', 'CHANCELLOR', 'WHO', 'SAT', 'IN', 'THE', 'CASE', 'AS', 'WELL', 'AS', 'OTHER', 'JUDGES', 'AND', 'MEMBERS', 'OF', 'THE', 'BAR', 'TO', 'BE', 'SEEN', 'THERE', 'KENNETH', 'EXPLAINED', 'TO', 'THE', 'COLONEL'] +6432-63723-0031-2522: hyp=['IT', "WOULDN'T", 'DO', 'YOU', 'KNOW', 'AFTER', 'THAT', 'STORY', 'CAME', 'OUT', 'FOR', 'ME', 'AND', 'THE', 'VICE', 'CHANCELLOR', 'WHO', 'SAT', 'IN', 'A', 'CASE', 'AS', 'WELL', 'AS', 'OTHER', 'JUDGES', 'AND', 'MEMBERS', 'OF', 'THE', 'BAR', 'TO', 'BE', 'SEEN', 'THERE', 'KENNETH', 'EXPLAINED', 'TO', 'THE', 'COLONEL'] +6432-63723-0032-2523: ref=['MEANWHILE', 'COLONEL', 'ASHLEY', 'WAS', 'A', 'VERY', 'BUSY', 'MAN', 'AND', 'TO', 'NO', 'ONE', 'DID', 'HE', 'TELL', 'VERY', 'MUCH', 'ABOUT', 'HIS', 'ACTIVITIES', 'HE', 'SAW', 'DARCY', 'FREQUENTLY', 'AT', 'THE', 'JAIL', 'AND', 'TO', 'THAT', 'YOUNG', "MAN'S", 'PLEADINGS', 'THAT', 'SOMETHING', 'BE', 'DONE', 'ALWAYS', 'RETURNED', 'THE', 'ANSWER'] +6432-63723-0032-2523: hyp=['MEANWHILE', 'COLONEL', 'ASHLEY', 'WAS', 'A', 'VERY', 'BUSY', 'MAN', 'AND', 'TO', 'NO', 'ONE', 'DID', 'HE', 'TELL', 'VERY', 'MUCH', 'ABOUT', 'HIS', 'ACTIVITIES', 'HE', 'SAW', 'DARCY', 'FREQUENTLY', 'AT', 'THE', 'JAIL', 'AND', 'TO', 'THAT', 'YOUNG', "MAN'S", 'PLEADINGS', 'THAT', 'SOMETHING', 'TO', 'BE', 'DONE', 'ALWAYS', 'RETURNED', 'THE', 'ANSWER'] +6432-63723-0033-2524: ref=["DON'T", 'WORRY', 'IT', 'WILL', 'COME', 'OUT', 'ALL', 'RIGHT'] +6432-63723-0033-2524: hyp=["DON'T", 'WORRY', 'IT', 'WILL', 'COME', 'OUT', 'ALL', 'RIGHT'] +6432-63723-0034-2525: ref=["I'M", 'GOING', 'TO', 'RECTIFY', 'THEM', 'BUT', 'IT', 'WILL', 'TAKE', 'TIME'] +6432-63723-0034-2525: hyp=["I'M", 'GOING', 'DIRECTIFY', 'THEM', 'BUT', 'I', 'WILL', 'TAKE', 'TIME'] +6432-63723-0035-2526: ref=["IT'S", 'HARD', 'FOR', 'MISS', 'MASON', 'TOO', 'ALTHOUGH', "SHE'S", 'BEARING', 'UP', 'LIKE', 'A', 'MAJOR'] +6432-63723-0035-2526: hyp=['HIS', 'HARD', 'FOR', 'MISS', 'MASON', 'TOO', 'ALTHOUGH', "SHE'S", 'BEARING', 'UP', 'LIKE', 'A', 'MAJOR'] +6432-63723-0036-2527: ref=['SO', 'KING', 'GOT', 'BAIL', 'WHO', 'PUT', 'IT', 'UP'] +6432-63723-0036-2527: hyp=['SO', 'KING', 'GOD', 'BAIL', 'WHO', 'PUT', 'IT', 'UP'] +6432-63723-0037-2528: ref=['IT', 'WAS', 'HIGH', 'LARCH'] +6432-63723-0037-2528: hyp=['IT', 'WAS', 'I', 'LARCH'] +6432-63723-0038-2529: ref=['THEY', 'TOOK', 'HARRY', 'AWAY', 'A', 'WHILE', 'AGO'] +6432-63723-0038-2529: hyp=['THEY', 'TOOK', 'HARRY', 'AWAY', 'A', 'WHILE', 'AGO'] +6432-63723-0039-2530: ref=['BUT', 'HIS', 'ARE', 'PRETTY', 'UNCERTAIN', 'SHOES', 'TO', 'BE', 'IN', 'JUST', 'THE', 'SAME'] +6432-63723-0039-2530: hyp=['BUT', 'HIS', 'ARE', 'PRETTY', 'UNCERTAIN', 'SHOES', 'TO', 'BE', 'IN', 'JUST', 'THE', 'SAME'] +6432-63723-0040-2531: ref=['ONLY', 'THAT', 'I', 'DARCY', 'HESITATED', 'AND', 'GREW', 'RED'] +6432-63723-0040-2531: hyp=['ONLY', 'THAT', 'I', 'DARCY', 'HESITATED', 'AND', 'GREW', 'RED'] +6432-63723-0041-2532: ref=['GOOD', 'EVENING', 'COLONEL', 'HE', 'CALLED', 'GENIALLY', 'WILL', 'YOU', 'JOIN', 'ME', 'IN', 'A', 'WELSH', 'RABBIT'] +6432-63723-0041-2532: hyp=['GOOD', 'EVENING', 'COLONEL', 'HE', 'CALLED', 'GENIALLY', 'WILL', 'YOU', 'JOIN', 'ME', 'IN', 'A', 'WELSH', 'RABBIT'] +6432-63723-0042-2533: ref=['THANK', 'YOU', 'NO'] +6432-63723-0042-2533: hyp=['THANK', 'YOU', 'NO'] +6432-63723-0043-2534: ref=["I'M", 'AFRAID', 'MY', 'DIGESTION', "ISN'T", 'QUITE', 'UP', 'TO', 'THAT', 'AS', "I'VE", 'HAD', 'TO', 'CUT', 'OUT', 'MY', 'FISHING', 'OF', 'LATE'] +6432-63723-0043-2534: hyp=["I'M", 'AFRAID', 'MY', 'DIGESTION', "ISN'T", 'QUITE', 'UP', 'TO', 'THAT', 'AS', "I'VE", 'HAD', 'TO', 'CUT', 'OUT', 'MY', 'FISHING', 'OF', 'LATE'] +6432-63723-0044-2535: ref=['NOW', 'AS', 'TO', 'CERTAIN', 'MATTERS', 'IN', 'THE', 'STORE', 'ON', 'THE', 'MORNING', 'OF', 'THE', 'MURDER'] +6432-63723-0044-2535: hyp=['NOW', 'AS', 'TO', 'CERTAIN', 'MATTERS', 'IN', 'THE', 'STORE', 'ON', 'THE', 'MORNING', 'OF', 'THE', 'MURDER'] +6432-63723-0045-2536: ref=['THE', 'STOPPED', 'CLOCKS', 'FOR', 'INSTANCE', 'HAVE', 'YOU', 'ANY', 'THEORY'] +6432-63723-0045-2536: hyp=['THEY', 'STOPPED', 'CLUXED', 'FOR', 'INSTANCE', 'HAVE', 'YOU', 'ANY', 'THEORY'] +6432-63723-0046-2537: ref=['THERE', 'WERE', 'THREE', 'OF', 'THEM', 'THE', 'CENTER', 'FIGURE', 'BEING', 'THAT', 'OF', 'HARRY', 'KING', 'AND', 'HE', 'WAS', 'VERY', 'MUCH', 'INTOXICATED'] +6432-63723-0046-2537: hyp=['THERE', 'WERE', 'THREE', 'OF', 'THEM', 'THE', 'CENTER', 'FIGURE', 'BEING', 'THAT', 'OF', 'HARRY', 'KING', 'AND', 'HE', 'WAS', 'VERY', 'MUCH', 'INTOXICATED'] +6432-63723-0047-2538: ref=['THAT', 'IS', 'NOT', 'ALWAYS', 'BUT', 'SOMETIMES', 'IT', 'HAPPENED', 'TO', 'BE', 'SO', 'NOW'] +6432-63723-0047-2538: hyp=['THAT', 'IS', 'NOT', 'ALWAYS', 'BUT', 'SOMETIMES', 'IT', 'HAPPENED', 'TO', 'BE', 'SO', 'NOW'] +6432-63723-0048-2539: ref=['I', 'BEG', 'YOUR', 'PARDON', 'HE', 'SAID', 'IN', 'THE', 'CULTURED', 'TONES', 'HE', 'KNEW', 'SO', 'WELL', 'HOW', 'TO', 'USE', 'YET', 'OF', 'WHICH', 'HE', 'MADE', 'SO', 'LITTLE', 'USE', 'OF', 'LATE'] +6432-63723-0048-2539: hyp=['I', 'BEG', 'YOUR', 'PARDON', 'HE', 'SAID', 'IN', 'THE', 'CULTURED', 'TONES', 'HE', 'KNEW', 'SO', 'WELL', 'HOW', 'TO', 'USE', 'YET', 'OF', 'WHICH', 'HE', 'MADE', 'SO', 'LITTLE', 'USE', 'OF', 'LATE'] +6432-63723-0049-2540: ref=['I', 'SAID', 'WHERE', 'HAVE', 'YOU', 'BEEN', 'REMARKED', 'THE', 'OTHER', "WE'VE", 'MISSED', 'YOU'] +6432-63723-0049-2540: hyp=['I', 'SAID', 'WHERE', 'HAVE', 'YOU', 'BEEN', 'REMARKED', 'THE', 'OTHER', "WE'VE", 'MISSED', 'YOU'] +6432-63723-0050-2541: ref=['I', 'SAID', 'I', 'WAS', 'GOLFING', 'HE', 'WENT', 'ON', 'EXCEEDINGLY', 'DISTINCTLY', 'THOUGH', 'WITH', 'AN', 'EFFORT'] +6432-63723-0050-2541: hyp=['I', 'SAID', 'I', 'WAS', 'GOLFING', 'HE', 'WENT', 'ON', 'EXCEEDINGLY', 'DISTINCTLY', 'THOUGH', 'WITH', 'AN', 'EFFORT'] +6432-63723-0051-2542: ref=['WHY', 'POLONIUS', 'SOME', 'ONE', 'ASKED'] +6432-63723-0051-2542: hyp=['WHY', 'BONIUS', 'SOME', 'ONE', 'ASKED'] +6432-63723-0052-2543: ref=['BECAUSE', 'DEAR', 'FRIEND', 'REPLIED', 'KING', 'SOFTLY', 'HE', 'SOMEWHAT', 'RESEMBLES', 'A', 'CERTAIN', 'PERSON', 'HERE', 'WHO', 'TALKS', 'TOO', 'MUCH', 'BUT', 'WHO', 'IS', 'NOT', 'SO', 'WISE', 'AS', 'HE', 'THINKS'] +6432-63723-0052-2543: hyp=['BECAUSE', 'DEAR', 'FRIEND', 'REPLIED', 'KING', 'SOFTLY', 'HE', 'SOMEWHAT', 'RESEMBLES', 'A', 'CERTAIN', 'PERSON', 'HERE', 'WHO', 'TALKS', 'TOO', 'MUCH', 'BUT', 'WHO', 'IS', 'NOT', 'SO', 'WISE', 'AS', 'HE', 'THINKS'] +6432-63723-0053-2544: ref=['THERE', 'WAS', 'A', 'RATTLE', 'OF', 'COINS', 'ON', 'THE', 'MAHOGANY', 'BAR', 'AS', 'KING', 'SOUGHT', 'TO', 'DISENTANGLE', 'A', 'SINGLE', 'BILL', 'FROM', 'THE', 'WADDED', 'UP', 'CURRENCY', 'IN', 'HIS', 'POCKET'] +6432-63723-0053-2544: hyp=['THERE', 'WAS', 'A', 'RATTLE', 'OF', 'COIN', 'DOWN', 'THE', 'MAHOGANY', 'BAR', 'AS', 'KING', 'SOUGHT', 'TO', 'DISENTANGLE', 'A', 'SINGLE', 'BILL', 'FROM', 'THE', 'WATERED', 'UP', 'CURRENCY', 'IN', 'HIS', 'POCKET'] +6432-63723-0054-2545: ref=["IT'S", "IT'S", 'AN', 'ODD', 'COIN', 'AN', 'OLD', 'ROMAN', 'ONE', 'THAT', 'MISSUS', 'DARCY', 'HAD', 'IN', 'HER', 'PRIVATE', 'COLLECTION', 'KEPT', 'IN', 'THE', 'JEWELRY', 'STORE', 'SAFE', 'WAS', 'THE', 'WHISPERED', 'ANSWER'] +6432-63723-0054-2545: hyp=["IT'S", 'AN', 'ODD', 'COIN', 'AN', 'OLD', 'ROMAN', 'ONE', 'THAT', 'MISSUS', 'DARCY', 'HAD', 'IN', 'HER', 'PRIVATE', 'COLLECTION', 'KEPT', 'IN', 'THE', 'JEWELRY', 'STORE', 'SAFE', 'WAS', 'THE', 'WHISPERED', 'ANSWER'] +6432-63723-0055-2546: ref=['I', 'WENT', 'OVER', 'THEM', 'THE', 'OTHER', 'DAY', 'AND', 'NOTICED', 'SOME', 'WERE', 'MISSING', 'THOUGH', 'I', 'SAW', 'THEM', 'ALL', 'WHEN', 'I', 'PAID', 'A', 'VISIT', 'TO', 'HER', 'JUST', 'A', 'SHORT', 'TIME', 'BEFORE', 'SHE', 'WAS', 'KILLED'] +6432-63723-0055-2546: hyp=['I', 'WENT', 'OVER', 'THEM', 'NEAR', 'THE', 'DAY', 'AND', 'NOTICED', 'SOME', 'WERE', 'MISSING', 'THOUGH', 'I', 'SAW', 'THEM', 'ALL', 'WHEN', 'I', 'PAID', 'A', 'VISIT', 'TO', 'HER', 'JUST', 'A', 'SHORT', 'TIME', 'BEFORE', 'SHE', 'WAS', 'KILLED'] +6432-63723-0056-2547: ref=['THAT', 'WAS', 'HERS', 'WENT', 'ON', 'THE', 'JEWELER'] +6432-63723-0056-2547: hyp=['THAT', 'WAS', 'HERS', 'WENT', 'ON', 'THE', 'JEWELER'] +6432-63723-0057-2548: ref=['NOW', 'HARRY', 'KING', 'HAS', 'IT', 'EXCLAIMED', 'COLONEL', 'ASHLEY'] +6432-63723-0057-2548: hyp=['NOW', 'HARRY', 'KING', 'HAS', 'IT', 'EXCLAIMED', 'COLONEL', 'ASHLEY'] +6938-70848-0000-1216: ref=['EVEN', 'THE', 'SUN', 'CAME', 'OUT', 'PALE', 'AND', 'WATERY', 'AT', 'NOON'] +6938-70848-0000-1216: hyp=['EVEN', 'THE', 'SUN', 'CAME', 'OUT', 'PALE', 'AND', 'WATERY', 'AT', 'NOON'] +6938-70848-0001-1217: ref=['THE', 'COLDS', 'AND', 'RHEUMATISM', 'OF', 'THE', 'RAINY', 'MONTHS', 'VANISHED'] +6938-70848-0001-1217: hyp=['THE', 'GOLDS', 'AND', 'RHEUMATISM', 'OF', 'THE', 'RAINY', 'MONTHS', 'VANISHED'] +6938-70848-0002-1218: ref=['ASKED', 'A', 'WORKER', 'LAST', 'SUNDAY', 'YOU', 'DID', 'IT', 'WHEN', 'THE', 'YUNKERS'] +6938-70848-0002-1218: hyp=['AS', 'TO', 'WORKER', 'LAST', 'SUNDAY', 'YOU', 'DID', 'IT', 'WHEN', 'THE', 'YUNKERS'] +6938-70848-0003-1219: ref=['WELL', "DIDN'T", 'THEY', 'SHOOT', 'US', 'ONE', 'MAN', 'EXHIBITED', 'HIS', 'ARM', 'IN', 'A', 'SLING'] +6938-70848-0003-1219: hyp=['WELL', "DIDN'T", 'THEY', 'SHOOT', 'US', 'ONE', 'MAN', 'EXHIBITED', 'HIS', 'ARM', 'IN', 'A', 'SLING'] +6938-70848-0004-1220: ref=["HAVEN'T", 'I', 'GOT', 'SOMETHING', 'TO', 'REMEMBER', 'THEM', 'BY', 'THE', 'DEVILS'] +6938-70848-0004-1220: hyp=["HAVEN'T", 'I', 'GOT', 'SOMETHING', 'TO', 'REMEMBER', 'THEM', 'BY', 'THE', 'DEVILS'] +6938-70848-0005-1221: ref=['WHO', 'ARE', 'YOU', 'TO', 'DESTROY', 'THE', 'LEGAL', 'GOVERNMENT', 'WHO', 'IS', 'LENIN', 'A', 'GERMAN'] +6938-70848-0005-1221: hyp=['WHO', 'ARE', 'YOU', 'TO', 'DESTROY', 'THE', 'LEGAL', 'GOVERNMENT', 'WITH', 'LANY', 'A', 'GERMAN'] +6938-70848-0006-1222: ref=['WHO', 'ARE', 'YOU', 'A', 'COUNTER', 'REVOLUTIONIST', 'A', 'PROVOCATOR', 'THEY', 'BELLOWED', 'AT', 'HIM'] +6938-70848-0006-1222: hyp=['WHO', 'ARE', 'YOU', 'A', 'COUNTER', 'REVOLITIONIST', 'APPROPATOR', 'THEY', 'BELOVED', 'AT', 'HIM'] +6938-70848-0007-1223: ref=['YOU', 'CALL', 'YOURSELVES', 'THE', 'PEOPLE', 'OF', 'RUSSIA', 'BUT', "YOU'RE", 'NOT', 'THE', 'PEOPLE', 'OF', 'RUSSIA'] +6938-70848-0007-1223: hyp=['YOU', 'CALL', 'YOURSELVES', 'THE', 'PEOPLE', 'OF', 'A', 'SHEPHERD', 'YOU', 'ARE', 'NOT', 'THE', 'PEOPLE', 'OF', 'RUSSIA'] +6938-70848-0008-1224: ref=['THE', 'PEASANTS', 'ARE', 'THE', 'PEOPLE', 'OF', 'RUSSIA', 'WAIT', 'UNTIL', 'THE', 'PEASANTS'] +6938-70848-0008-1224: hyp=['TO', 'PIECE', 'AND', 'OTHER', 'PEOPLE', 'OF', 'RUSSIA', 'WAIT', 'UNTIL', 'THE', 'PEASANTS'] +6938-70848-0009-1225: ref=['WE', 'KNOW', 'WHAT', 'THE', 'PEASANTS', 'WILL', 'SAY', "AREN'T", 'THEY', 'WORKINGMEN', 'LIKE', 'OURSELVES'] +6938-70848-0009-1225: hyp=['WE', 'KNOW', 'WHAT', 'THE', 'PEASANTS', 'WILL', 'SAY', "AREN'T", 'THEY', 'WORKING', 'MEN', 'LIKE', 'OURSELVES'] +6938-70848-0010-1226: ref=['THESE', 'MEN', 'ESPECIALLY', 'WELCOMED', 'THE', 'CALL', 'TO', 'A', 'CONGRESS', 'OF', 'PEASANTS'] +6938-70848-0010-1226: hyp=['THIS', 'MAN', 'HAS', 'SPECIALLY', 'WELCOMED', 'THE', 'CALL', 'TO', 'A', 'CONGRESS', 'OF', 'PEASANTS'] +6938-70848-0011-1227: ref=['THESE', 'LAST', 'WERE', 'THE', 'YOUNG', 'GENERATION', 'WHO', 'HAD', 'BEEN', 'SERVING', 'IN', 'THE', 'ARMY'] +6938-70848-0011-1227: hyp=['THIS', 'LAST', 'WHERE', 'THE', 'YOUNG', 'GENERATION', 'WHO', 'HAD', 'BEEN', 'SERVING', 'IN', 'THE', 'ARMY'] +6938-70848-0012-1228: ref=['WHEREUPON', 'THE', 'OLD', 'EXECUTIVE', 'COMMITTEE', 'LEFT', 'THE', 'HALL'] +6938-70848-0012-1228: hyp=['WHEREUPON', 'THE', 'OLD', 'EXECUTED', 'COMMITTEE', 'LEFT', 'THE', 'HALL'] +6938-70848-0013-1229: ref=['DOWN', 'WITH', 'HIM', 'THEY', 'SHRIEKED'] +6938-70848-0013-1229: hyp=['DOWN', 'WITH', 'HIM', 'THEY', 'SHRIEKED'] +6938-70848-0014-1230: ref=['FEARFUL', 'TUMULT', 'CRIES', 'DOWN', 'WITH', 'THE', 'BOLSHEVIKI'] +6938-70848-0014-1230: hyp=['FEARFUL', 'TUMULT', 'CHRIST', 'DOWN', 'WITH', 'THE', 'PULCHEVIKI'] +6938-70848-0015-1231: ref=['UPON', 'MY', 'RETURN', 'I', 'VISITED', 'SMOLNY', 'NO', 'SUCH', 'ACCUSATION', 'WAS', 'MADE', 'AGAINST', 'ME', 'THERE', 'AFTER', 'A', 'BRIEF', 'CONVERSATION', 'I', 'LEFT', 'AND', "THAT'S", 'ALL', 'LET', 'ANY', 'ONE', 'PRESENT', 'MAKE', 'SUCH', 'AN', 'ACCUSATION'] +6938-70848-0015-1231: hyp=['UPON', 'MY', 'RETURN', 'I', 'VISITED', 'MORLEY', 'NO', 'SUCH', 'ACCUSATION', 'WAS', 'MADE', 'AGAINST', 'ME', 'THERE', 'AFTER', 'A', 'BRIEF', 'CONVERSATION', 'I', 'LEFT', 'AND', 'THAT', 'SOUL', 'LET', 'ANYONE', 'PRESENT', 'MAKE', 'SUCH', 'AN', 'ACCUSATION'] +6938-70848-0016-1232: ref=['MEANWHILE', 'THE', 'QUESTION', 'OF', 'THE', 'STATUS', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE', 'WAS', 'AGITATING', 'ALL', 'MINDS'] +6938-70848-0016-1232: hyp=['MEANWHILE', 'THE', 'QUESTION', 'OF', 'THE', 'STRATORS', 'OF', 'THE', 'EXECUTED', 'COMMITTEE', 'WAS', 'AGITATING', 'ALL', 'MINDS'] +6938-70848-0017-1233: ref=['BY', 'DECLARING', 'THE', 'ASSEMBLY', 'EXTRAORDINARY', 'CONFERENCE', 'IT', 'HAD', 'BEEN', 'PLANNED', 'TO', 'BLOCK', 'THE', 'REELECTION', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE'] +6938-70848-0017-1233: hyp=['BY', 'DECLINING', 'THEIR', 'ASSEMBLY', 'EXTRAORDINARY', 'CONFERENCE', 'IT', 'HAD', 'BEEN', 'PLANNED', 'TO', 'PLOT', 'THE', 'RE', 'ELECTION', 'OF', 'THE', 'EXECUTIVE', 'COMMITTEE'] +6938-70848-0018-1234: ref=['BUT', 'THIS', 'WORKED', 'BOTH', 'WAYS', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONISTS', 'DECIDED', 'THAT', 'IF', 'THE', 'CONGRESS', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'EXECUTIVE', 'COMMITTEE', 'THEN', 'THE', 'EXECUTIVE', 'COMMITTEE', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'CONGRESS'] +6938-70848-0018-1234: hyp=['BUT', 'THIS', 'WORTH', 'BOTH', 'WAYS', 'THE', 'LAST', 'SOCIALLY', 'REVOLUTION', 'IS', 'DECIDED', 'THAT', 'IF', 'THE', 'CONGRESS', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'EXECUTING', 'COMMITTEE', 'THEN', 'THE', 'EXECUTIVE', 'COMMITTEE', 'HAD', 'NO', 'POWER', 'OVER', 'THE', 'CONGRESS'] +6938-70848-0019-1235: ref=['ON', 'THE', 'TWENTY', 'SEVENTH', 'OCCURRED', 'THE', 'DEBATE', 'ON', 'THE', 'LAND', 'QUESTION', 'WHICH', 'REVEALED', 'THE', 'DIFFERENCES', 'BETWEEN', 'THE', 'AGRARIAN', 'PROGRAMME', 'OF', 'THE', 'BOLSHEVIKI', 'AND', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONARIES'] +6938-70848-0019-1235: hyp=['ON', 'THE', 'TWENTY', 'SEVENTH', 'OCCURRED', 'THE', 'DEBATE', 'ON', 'THE', 'LAND', 'QUESTION', 'WHICH', 'REVEALED', 'THE', 'DIFFERENCES', 'BETWEEN', 'THE', 'AGRIAN', 'PROGRAMME', 'OF', 'THE', 'BOLSHEVIKI', 'AND', 'THE', 'LEFT', 'SOCIALIST', 'REVOLUTIONARIES'] +6938-70848-0020-1236: ref=['THE', 'CONSTITUENT', 'ASSEMBLY', 'WILL', 'NOT', 'DARE', 'TO', 'BREAK', 'WITH', 'THE', 'WILL', 'OF', 'THE', 'PEOPLE'] +6938-70848-0020-1236: hyp=['THE', 'CONSTITUTE', 'ASSEMBLY', 'WILL', 'NOT', 'DARE', 'TO', 'BREAK', 'WITH', 'THE', 'WILL', 'OF', 'THE', 'PEOPLE'] +6938-70848-0021-1237: ref=['FOLLOWED', 'HIM', 'LENIN', 'LISTENED', 'TO', 'NOW', 'WITH', 'ABSORBING', 'INTENSITY'] +6938-70848-0021-1237: hyp=['FOLLOWED', 'HIM', 'LENIN', 'LISTENED', 'TO', 'NOW', 'WITH', 'ABSORBING', 'INTENSITY'] +6938-70848-0022-1238: ref=['THE', 'FIRST', 'STAGE', 'WAS', 'THE', 'CRUSHING', 'OF', 'AUTOCRACY', 'AND', 'THE', 'CRUSHING', 'OF', 'THE', 'POWER', 'OF', 'THE', 'INDUSTRIAL', 'CAPITALISTS', 'AND', 'LAND', 'OWNERS', 'WHOSE', 'INTERESTS', 'ARE', 'CLOSELY', 'RELATED'] +6938-70848-0022-1238: hyp=['THE', 'FIRST', 'STAGE', 'WAS', 'A', 'CRUSHING', 'OF', 'AUTOCRACY', 'AND', 'A', 'CRASHING', 'OF', 'THE', 'POWER', 'OF', 'THE', 'INDUSTRIAL', 'CAPITALIST', 'AND', 'THE', 'LANDOWNERS', 'WHOSE', 'INTERESTS', 'ARE', 'CLOSELY', 'RELATED'] +6938-70848-0023-1239: ref=['THE', 'DUMAS', 'AND', 'ZEMSTVOS', 'WERE', 'DROPPED'] +6938-70848-0023-1239: hyp=['DID', 'YOU', 'ME', 'SEND', 'THEMSELVES', 'WERE', 'DROPPED'] +6938-70848-0024-1240: ref=['HE', 'KNEW', 'THAT', 'AN', 'AGREEMENT', 'WITH', 'THE', 'BOLSHEVIKI', 'WAS', 'BEING', 'DISCUSSED', 'BUT', 'HE', 'DID', 'NOT', 'KNOW', 'THAT', 'IT', 'HAD', 'BEEN', 'CONCLUDED'] +6938-70848-0024-1240: hyp=['HE', 'KNEW', 'THAT', 'AN', 'AGREEMENT', 'WITH', 'THE', 'BOLSHEVIKI', 'WAS', 'BEING', 'DISCUSSED', 'BUT', 'HE', 'DID', 'NOT', 'KNOW', 'THAT', 'IT', 'HAD', 'BEEN', 'CONCLUDED'] +6938-70848-0025-1241: ref=['HE', 'SPOKE', 'TO', 'THE', 'RUMP', 'CONVENTION'] +6938-70848-0025-1241: hyp=['HE', 'SPOKE', 'TO', 'THE', 'WRONG', 'CONVENTION'] +6938-70848-0026-1242: ref=['THE', 'VILLAGES', 'WILL', 'SAVE', 'US', 'IN', 'THE', 'END'] +6938-70848-0026-1242: hyp=['THE', 'RELIGIOUS', 'WILL', 'SAVE', 'US', 'IN', 'THE', 'END'] +6938-70848-0027-1243: ref=['BUT', 'THE', 'PRESENT', 'MOVEMENT', 'IS', 'INTERNATIONAL', 'AND', 'THAT', 'IS', 'WHY', 'IT', 'IS', 'INVINCIBLE'] +6938-70848-0027-1243: hyp=['BUT', 'THE', 'PRESENT', 'MOMENT', 'IS', 'INTERNATIONAL', 'AND', 'THAT', 'IS', 'WHY', 'IT', 'IS', 'INVINCIBLE'] +6938-70848-0028-1244: ref=['THE', 'WILL', 'OF', 'MILLIONS', 'OF', 'WORKERS', 'IS', 'NOW', 'CONCENTRATED', 'IN', 'THIS', 'HALL'] +6938-70848-0028-1244: hyp=['THE', 'WHEEL', 'OF', 'MILLIONS', 'OF', 'WORKERS', 'IS', 'SO', 'CONCENTRATED', 'IN', 'THE', 'HALL'] +6938-70848-0029-1245: ref=['A', 'NEW', 'HUMANITY', 'WILL', 'BE', 'BORN', 'OF', 'THIS', 'WAR'] +6938-70848-0029-1245: hyp=['A', 'NEW', 'HUMANITY', 'WILL', 'BE', 'BORN', 'OF', 'THIS', 'WAR'] +6938-70848-0030-1246: ref=['I', 'GREET', 'YOU', 'WITH', 'THE', 'CHRISTENING', 'OF', 'A', 'NEW', 'RUSSIAN', 'LIFE', 'AND', 'FREEDOM'] +6938-70848-0030-1246: hyp=['I', 'GREET', 'YOU', 'WITH', 'THE', 'CHRISTIANNING', 'OF', 'A', 'NEW', 'RUSSIAN', 'LIFE', 'AND', 'FREEDOM'] +7018-75788-0000-135: ref=['THEN', 'I', 'TOOK', 'UP', 'A', 'GREAT', 'STONE', 'FROM', 'AMONG', 'THE', 'TREES', 'AND', 'COMING', 'UP', 'TO', 'HIM', 'SMOTE', 'HIM', 'THEREWITH', 'ON', 'THE', 'HEAD', 'WITH', 'ALL', 'MY', 'MIGHT', 'AND', 'CRUSHED', 'IN', 'HIS', 'SKULL', 'AS', 'HE', 'LAY', 'DEAD', 'DRUNK'] +7018-75788-0000-135: hyp=['THEN', 'I', 'TOOK', 'UP', 'A', 'GREAT', 'STONE', 'FROM', 'AMONG', 'THE', 'TREES', 'AND', 'COMING', 'UP', 'TO', 'HIM', 'SMOTE', 'HIM', 'THEREWITH', 'ON', 'THE', 'HEAD', 'WITH', 'ALL', 'MY', 'MIGHT', 'AND', 'CRUSHED', 'IN', 'HIS', 'SKULL', 'AS', 'HE', 'LAY', 'DEAD', 'DRUNK'] +7018-75788-0001-136: ref=['BEHOLD', 'A', 'SHIP', 'WAS', 'MAKING', 'FOR', 'THE', 'ISLAND', 'THROUGH', 'THE', 'DASHING', 'SEA', 'AND', 'CLASHING', 'WAVES'] +7018-75788-0001-136: hyp=['BEHOLD', 'A', 'SHIP', 'WAS', 'MAKING', 'FOR', 'THE', 'ISLAND', 'THROUGH', 'THE', 'DASHING', 'SEA', 'AND', 'CLASHING', 'WAVES'] +7018-75788-0002-137: ref=['HEARING', 'THIS', 'I', 'WAS', 'SORE', 'TROUBLED', 'REMEMBERING', 'WHAT', 'I', 'HAD', 'BEFORE', 'SUFFERED', 'FROM', 'THE', 'APE', 'KIND'] +7018-75788-0002-137: hyp=['HEARING', 'THIS', 'I', 'WAS', 'SORE', 'TROUBLED', 'REMEMBERING', 'WHAT', 'I', 'HAD', 'BEFORE', 'SUFFERED', 'FROM', 'THE', 'APE', 'KIND'] +7018-75788-0003-138: ref=['UPON', 'THIS', 'HE', 'BROUGHT', 'ME', 'A', 'COTTON', 'BAG', 'AND', 'GIVING', 'IT', 'TO', 'ME', 'SAID', 'TAKE', 'THIS', 'BAG', 'AND', 'FILL', 'IT', 'WITH', 'PEBBLES', 'FROM', 'THE', 'BEACH', 'AND', 'GO', 'FORTH', 'WITH', 'A', 'COMPANY', 'OF', 'THE', 'TOWNSFOLK', 'TO', 'WHOM', 'I', 'WILL', 'GIVE', 'A', 'CHARGE', 'RESPECTING', 'THEE'] +7018-75788-0003-138: hyp=['UPON', 'THIS', 'HE', 'BROUGHT', 'ME', 'A', 'COTTON', 'BAG', 'AND', 'GIVEN', 'IT', 'TO', 'ME', 'SAID', 'TAKE', 'THIS', 'BAG', 'AND', 'FILL', 'IT', 'WITH', 'PEBBLES', 'FROM', 'THE', 'BEACH', 'AND', 'GO', 'FORTH', 'WITH', 'A', 'COMPANY', 'OF', 'THE', 'TOWNSFOLK', 'TO', 'WHOM', 'I', 'WILL', 'GIVE', 'A', 'CHARGE', 'RESPECTING', 'THEE'] +7018-75788-0004-139: ref=['DO', 'AS', 'THEY', 'DO', 'AND', 'BELIKE', 'THOU', 'SHALT', 'GAIN', 'WHAT', 'MAY', 'FURTHER', 'THY', 'RETURN', 'VOYAGE', 'TO', 'THY', 'NATIVE', 'LAND'] +7018-75788-0004-139: hyp=['DO', 'AS', 'THEY', 'DO', 'AND', 'BE', 'LIKE', 'THOU', 'SHALT', 'GAIN', 'WHAT', 'MAY', 'FURTHER', 'THY', 'RETURN', 'VOYAGE', 'TO', 'THY', 'NATIVE', 'LAND'] +7018-75788-0005-140: ref=['THEN', 'HE', 'CARRIED', 'ME', 'TO', 'THE', 'BEACH', 'WHERE', 'I', 'FILLED', 'MY', 'BAG', 'WITH', 'PEBBLES', 'LARGE', 'AND', 'SMALL', 'AND', 'PRESENTLY', 'WE', 'SAW', 'A', 'COMPANY', 'OF', 'FOLK', 'ISSUE', 'FROM', 'THE', 'TOWN', 'EACH', 'BEARING', 'A', 'BAG', 'LIKE', 'MINE', 'FILLED', 'WITH', 'PEBBLES'] +7018-75788-0005-140: hyp=['THEN', 'HE', 'CARRIED', 'ME', 'TO', 'THE', 'BEACH', 'WHERE', 'I', 'FILLED', 'MY', 'BAG', 'WITH', 'PEBBLES', 'LARGE', 'AND', 'SMALL', 'AND', 'PRESENTLY', 'WE', 'SAW', 'A', 'COMPANY', 'OF', 'FOLK', 'ISSUE', 'FROM', 'THE', 'TOWN', 'EACH', 'BEARING', 'A', 'BAG', 'LIKE', 'MINE', 'FILLED', 'WITH', 'PEBBLES'] +7018-75788-0006-141: ref=['TO', 'THESE', 'HE', 'COMMITTED', 'ME', 'COMMENDING', 'ME', 'TO', 'THEIR', 'CARE', 'AND', 'SAYING', 'THIS', 'MAN', 'IS', 'A', 'STRANGER', 'SO', 'TAKE', 'HIM', 'WITH', 'YOU', 'AND', 'TEACH', 'HIM', 'HOW', 'TO', 'GATHER', 'THAT', 'HE', 'MAY', 'GET', 'HIS', 'DAILY', 'BREAD', 'AND', 'YOU', 'WILL', 'EARN', 'YOUR', 'REWARD', 'AND', 'RECOMPENSE', 'IN', 'HEAVEN'] +7018-75788-0006-141: hyp=['TO', 'THESE', 'HE', 'COMMITTED', 'ME', 'COMMENDING', 'ME', 'TO', 'THEIR', 'CARE', 'AND', 'SAYING', 'THIS', 'MAN', 'IS', 'A', 'STRANGER', 'SO', 'TAKE', 'HIM', 'WITH', 'YOU', 'AND', 'TEACH', 'HIM', 'HOW', 'TO', 'GATHER', 'THAT', 'HE', 'MAY', 'GET', 'HIS', 'DAILY', 'BREAD', 'AND', 'YOU', 'WILL', 'EARN', 'YOUR', 'REWARD', 'AND', 'RECOMPENSE', 'IN', 'HEAVEN'] +7018-75788-0007-142: ref=['NOW', 'SLEEPING', 'UNDER', 'THESE', 'TREES', 'WERE', 'MANY', 'APES', 'WHICH', 'WHEN', 'THEY', 'SAW', 'US', 'ROSE', 'AND', 'FLED', 'FROM', 'US', 'AND', 'SWARMED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'WHEREUPON', 'MY', 'COMPANIONS', 'BEGAN', 'TO', 'PELT', 'THEM', 'WITH', 'WHAT', 'THEY', 'HAD', 'IN', 'THEIR', 'BAGS', 'AND', 'THE', 'APES', 'FELL', 'TO', 'PLUCKING', 'OF', 'THE', 'FRUIT', 'OF', 'THE', 'TREES', 'AND', 'CASTING', 'THEM', 'AT', 'THE', 'FOLK'] +7018-75788-0007-142: hyp=['NOW', 'SLEEPING', 'UNDER', 'THESE', 'TREES', 'WERE', 'MANY', 'IPES', 'WHICH', 'WHEN', 'THEY', 'SAW', 'US', 'ROSE', 'AND', 'FLED', 'FROM', 'US', 'AND', 'SWARMED', 'UP', 'AMONG', 'THE', 'BRANCHES', 'WHEREUPON', 'MY', 'COMPANIONS', 'BEGAN', 'TO', 'PELT', 'THEM', 'WITH', 'WHAT', 'THEY', 'HAD', 'IN', 'THEIR', 'BAGS', 'AND', 'THE', 'APES', 'FELL', 'TO', 'PLUCKING', 'OF', 'THE', 'FRUIT', 'OF', 'THE', 'TREES', 'AND', 'CASTING', 'THEM', 'AT', 'THE', 'FOLK'] +7018-75788-0008-143: ref=['WE', 'WEIGHED', 'ANCHOR', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'SAYING', 'HER', 'PERMITTED', 'SAY'] +7018-75788-0008-143: hyp=['WE', 'WADE', 'ANCHOR', 'AND', 'SHAHRAZAD', 'PERCEIVED', 'THE', 'DAWN', 'OF', 'DAY', 'AND', 'CEASED', 'SAYING', 'HER', 'PERMITTED', 'SAY'] +7018-75788-0009-144: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'FIFTY', 'NINTH', 'NIGHT'] +7018-75788-0009-144: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'FIFTY', 'NINTH', 'NIGHT'] +7018-75788-0010-145: ref=['AND', 'CEASED', 'NOT', 'SAILING', 'TILL', 'WE', 'ARRIVED', 'SAFELY', 'AT', 'BASSORAH'] +7018-75788-0010-145: hyp=['AND', 'CEASED', 'NOT', 'SAILING', 'TILL', 'WE', 'ARRIVED', 'SAFELY', 'AT', 'PUSSARA'] +7018-75788-0011-146: ref=['THERE', 'I', 'ABODE', 'A', 'LITTLE', 'AND', 'THEN', 'WENT', 'ON', 'TO', 'BAGHDAD', 'WHERE', 'I', 'ENTERED', 'MY', 'QUARTER', 'AND', 'FOUND', 'MY', 'HOUSE', 'AND', 'FOREGATHERED', 'WITH', 'MY', 'FAMILY', 'AND', 'SALUTED', 'MY', 'FRIENDS', 'WHO', 'GAVE', 'ME', 'JOY', 'OF', 'MY', 'SAFE', 'RETURN', 'AND', 'I', 'LAID', 'UP', 'ALL', 'MY', 'GOODS', 'AND', 'VALUABLES', 'IN', 'MY', 'STOREHOUSES'] +7018-75788-0011-146: hyp=['THERE', 'I', 'ABODE', 'A', 'LITTLE', 'AND', 'THEN', 'WENT', 'ON', 'TO', 'BAGDAD', 'WHERE', 'I', 'ENTERED', 'MY', 'QUARTER', 'AND', 'FOUND', 'MY', 'HOUSE', 'AND', 'FOR', 'GATHERED', 'WITH', 'MY', 'FAMILY', 'AND', 'SALUTED', 'MY', 'FRIENDS', 'WHO', 'GAVE', 'ME', 'JOY', 'OF', 'MY', 'SAFE', 'RETURN', 'AND', 'I', 'LAID', 'UP', 'ALL', 'MY', 'GOODS', 'AND', 'VALUABLES', 'IN', 'MY', 'STOREHOUSES'] +7018-75788-0012-147: ref=['AFTER', 'WHICH', 'I', 'RETURNED', 'TO', 'MY', 'OLD', 'MERRY', 'WAY', 'OF', 'LIFE', 'AND', 'FORGOT', 'ALL', 'I', 'HAD', 'SUFFERED', 'IN', 'THE', 'GREAT', 'PROFIT', 'AND', 'GAIN', 'I', 'HAD', 'MADE'] +7018-75788-0012-147: hyp=['AFTER', 'WHICH', 'I', 'RETURNED', 'TO', 'MY', 'OLD', 'MERRY', 'WAY', 'OF', 'LIFE', 'AND', 'FORGOT', 'ALL', 'I', 'HAD', 'SUFFERED', 'IN', 'THE', 'GREAT', 'PROFIT', 'AND', 'GAIN', 'I', 'HAD', 'MADE'] +7018-75788-0013-148: ref=['NEXT', 'MORNING', 'AS', 'SOON', 'AS', 'IT', 'WAS', 'LIGHT', 'HE', 'PRAYED', 'THE', 'DAWN', 'PRAYER', 'AND', 'AFTER', 'BLESSING', 'MOHAMMED', 'THE', 'CREAM', 'OF', 'ALL', 'CREATURES', 'BETOOK', 'HIMSELF', 'TO', 'THE', 'HOUSE', 'OF', 'SINDBAD', 'THE', 'SEAMAN', 'AND', 'WISHED', 'HIM', 'A', 'GOOD', 'DAY'] +7018-75788-0013-148: hyp=['NEXT', 'MORNING', 'AS', 'SOON', 'AS', 'IT', 'WAS', 'LIGHT', 'HE', 'PRAYED', 'THE', 'DAWN', 'PRAYER', 'AND', 'AFTER', 'BLESSING', 'MOHAMMED', 'THE', 'CREAM', 'OF', 'ALL', 'CREATURES', 'BETOOK', 'HIMSELF', 'TO', 'THE', 'HOUSE', 'OF', 'SINBAD', 'THE', 'SEAMAN', 'AND', 'WISHED', 'HIM', 'A', 'GOOD', 'DAY'] +7018-75788-0014-149: ref=['HERE', 'I', 'FOUND', 'A', 'GREAT', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'FULL', 'OF', 'MERCHANTS', 'AND', 'NOTABLES', 'WHO', 'HAD', 'WITH', 'THEM', 'GOODS', 'OF', 'PRICE', 'SO', 'I', 'EMBARKED', 'MY', 'BALES', 'THEREIN'] +7018-75788-0014-149: hyp=['HERE', 'I', 'FOUND', 'A', 'GREAT', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'FULL', 'OF', 'MERCHANTS', 'AND', 'NOTABLES', 'WHO', 'HAD', 'WITH', 'THEM', 'GOODS', 'OF', 'PRICE', 'SO', 'I', 'EMBARKED', 'MY', 'BALES', 'THEREIN'] +7018-75788-0015-150: ref=['HAPLY', 'AMONGST', 'YOU', 'IS', 'ONE', 'RIGHTEOUS', 'WHOSE', 'PRAYERS', 'THE', 'LORD', 'WILL', 'ACCEPT'] +7018-75788-0015-150: hyp=['HAPPILY', 'AMONGST', 'YOU', 'IS', 'ONE', 'RIGHTEOUS', 'WHOSE', 'PRAYERS', 'THE', 'LORD', 'WILL', 'ACCEPT'] +7018-75788-0016-151: ref=['PRESENTLY', 'THE', 'SHIP', 'STRUCK', 'THE', 'MOUNTAIN', 'AND', 'BROKE', 'UP', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'OF', 'HER', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75788-0016-151: hyp=['PRESENTLY', 'THE', 'SHIP', 'STRUCK', 'THE', 'MOUNTAIN', 'AND', 'BROKE', 'UP', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'OF', 'HER', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75788-0017-152: ref=['BUT', 'IT', 'BURNETH', 'IN', 'THEIR', 'BELLIES', 'SO', 'THEY', 'CAST', 'IT', 'UP', 'AGAIN', 'AND', 'IT', 'CONGEALETH', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'WATER', 'WHEREBY', 'ITS', 'COLOR', 'AND', 'QUANTITIES', 'ARE', 'CHANGED', 'AND', 'AT', 'LAST', 'THE', 'WAVES', 'CAST', 'IT', 'ASHORE', 'AND', 'THE', 'TRAVELLERS', 'AND', 'MERCHANTS', 'WHO', 'KNOW', 'IT', 'COLLECT', 'IT', 'AND', 'SELL', 'IT'] +7018-75788-0017-152: hyp=['BUT', 'AT', 'BERNETH', 'IN', 'THEIR', 'BELLIES', 'SO', 'THEY', 'CAST', 'IT', 'UP', 'AGAIN', 'AND', 'IT', 'CONGEALETH', 'ON', 'THE', 'SURFACE', 'OF', 'THE', 'WATER', 'WHEREBY', 'ITS', 'COLOR', 'AND', 'QUANTITIES', 'ARE', 'CHANGED', 'AND', 'AT', 'LAST', 'THE', 'WAVES', 'CAST', 'IT', 'ASHORE', 'AND', 'THE', 'TRAVELLERS', 'AND', 'MERCHANTS', 'WHO', 'KNOW', 'IT', 'COLLECTED', 'AND', 'SELL', 'IT'] +7018-75788-0018-153: ref=['EACH', 'THAT', 'DIED', 'WE', 'WASHED', 'AND', 'SHROUDED', 'IN', 'SOME', 'OF', 'THE', 'CLOTHES', 'AND', 'LINEN', 'CAST', 'ASHORE', 'BY', 'THE', 'TIDES', 'AND', 'AFTER', 'A', 'LITTLE', 'THE', 'REST', 'OF', 'MY', 'FELLOWS', 'PERISHED', 'ONE', 'BY', 'ONE', 'TILL', 'I', 'HAD', 'BURIED', 'THE', 'LAST', 'OF', 'THE', 'PARTY', 'AND', 'ABODE', 'ALONE', 'ON', 'THE', 'ISLAND', 'WITH', 'BUT', 'A', 'LITTLE', 'PROVISION', 'LEFT', 'I', 'WHO', 'WAS', 'WONT', 'TO', 'HAVE', 'SO', 'MUCH'] +7018-75788-0018-153: hyp=['EACH', 'THAT', 'DIED', 'WE', 'WASHED', 'AND', 'SHROUDED', 'IN', 'SOME', 'OF', 'THE', 'CLOTHES', 'AND', 'LINEN', 'CAST', 'ASHORE', 'BY', 'THE', 'TIDES', 'AND', 'AFTER', 'A', 'LITTLE', 'THE', 'REST', 'OF', 'MY', 'FELLOWS', 'PERISHED', 'ONE', 'BY', 'ONE', 'TILL', 'I', 'HAD', 'BURIED', 'THE', 'LAST', 'OF', 'THE', 'PARTY', 'AND', 'A', 'BOAT', 'ALONE', 'ON', 'THE', 'ISLAND', 'WITH', 'BUT', 'A', 'LITTLE', 'PROVISION', 'LEFT', 'I', 'WHO', 'WAS', 'WONT', 'TO', 'HAVE', 'SO', 'MUCH'] +7018-75788-0019-154: ref=['BUT', 'THERE', 'IS', 'MAJESTY', 'AND', 'THERE', 'IS', 'NO', 'MIGHT', 'SAVE', 'IN', 'ALLAH', 'THE', 'GLORIOUS', 'THE', 'GREAT'] +7018-75788-0019-154: hyp=['BUT', 'THERE', 'IS', 'MAJESTY', 'AND', 'THERE', 'IS', 'NO', 'MIGHT', 'SAVE', 'IN', 'ALLAH', 'THE', 'GLORIOUS', 'THE', 'GREAT'] +7018-75789-0000-155: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'FIRST', 'NIGHT'] +7018-75789-0000-155: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'FIRST', 'NIGHT'] +7018-75789-0001-156: ref=['THEN', 'SIGHING', 'FOR', 'MYSELF', 'I', 'SET', 'TO', 'WORK', 'COLLECTING', 'A', 'NUMBER', 'OF', 'PIECES', 'OF', 'CHINESE', 'AND', 'COMORIN', 'ALOES', 'WOOD', 'AND', 'I', 'BOUND', 'THEM', 'TOGETHER', 'WITH', 'ROPES', 'FROM', 'THE', 'WRECKAGE', 'THEN', 'I', 'CHOSE', 'OUT', 'FROM', 'THE', 'BROKEN', 'UP', 'SHIPS', 'STRAIGHT', 'PLANKS', 'OF', 'EVEN', 'SIZE', 'AND', 'FIXED', 'THEM', 'FIRMLY', 'UPON', 'THE', 'ALOES', 'WOOD', 'MAKING', 'ME', 'A', 'BOAT', 'RAFT', 'A', 'LITTLE', 'NARROWER', 'THAN', 'THE', 'CHANNEL', 'OF', 'THE', 'STREAM', 'AND', 'I', 'TIED', 'IT', 'TIGHTLY', 'AND', 'FIRMLY', 'AS', 'THOUGH', 'IT', 'WERE', 'NAILED'] +7018-75789-0001-156: hyp=['THEN', 'SIGNED', 'FOR', 'MYSELF', 'I', 'SET', 'TO', 'WORK', 'COLLECTING', 'A', 'NUMBER', 'OF', 'PIECES', 'OF', 'CHINESE', 'AND', 'CORMOR', 'AND', 'ALLIES', 'WOOD', 'AND', 'I', 'BOUND', 'THEM', 'TOGETHER', 'WITH', 'ROPES', 'FROM', 'THE', 'WRECKAGE', 'THEN', 'I', 'CHOSE', 'OUT', 'FROM', 'THE', 'BROKEN', 'UP', 'SHIP', 'STRAIGHT', 'PLANKS', 'OF', 'EVEN', 'SIZE', 'AND', 'FIXED', 'THEM', 'FIRMLY', 'UPON', 'THE', 'ALLIES', 'WOOD', 'MAKING', 'ME', 'A', 'BOAT', 'RAFT', 'A', 'LITTLE', 'NARROWER', 'THAN', 'THE', 'CHANNEL', 'OF', 'THE', 'STREAM', 'AND', 'I', 'TIED', 'IT', 'TIGHTLY', 'AND', 'FIRMLY', 'AS', 'THOUGH', 'IT', 'WERE', 'NAILED'] +7018-75789-0002-157: ref=['LAND', 'AFTER', 'LAND', 'SHALT', 'THOU', 'SEEK', 'AND', 'FIND', 'BUT', 'NO', 'OTHER', 'LIFE', 'ON', 'THY', 'WISH', 'SHALL', 'WAIT', 'FRET', 'NOT', 'THY', 'SOUL', 'IN', 'THY', 'THOUGHTS', 'O', 'NIGHT', 'ALL', 'WOES', 'SHALL', 'END', 'OR', 'SOONER', 'OR', 'LATE'] +7018-75789-0002-157: hyp=['LAND', 'AFTER', 'LAND', 'SHALT', 'THOU', 'SEE', 'CONFINED', 'BUT', 'NO', 'OTHER', 'LIFE', 'ON', 'THY', 'WISH', 'SHALL', 'WAIT', 'FRET', 'NOT', 'THY', 'SOUL', 'IN', 'THY', 'THOUGHTS', 'ARE', 'NIGHT', 'OR', 'WOES', 'SHALL', 'END', 'OR', 'SOONER', 'OR', 'LATE'] +7018-75789-0003-158: ref=['I', 'ROWED', 'MY', 'CONVEYANCE', 'INTO', 'THE', 'PLACE', 'WHICH', 'WAS', 'INTENSELY', 'DARK', 'AND', 'THE', 'CURRENT', 'CARRIED', 'THE', 'RAFT', 'WITH', 'IT', 'DOWN', 'THE', 'UNDERGROUND', 'CHANNEL'] +7018-75789-0003-158: hyp=['I', 'RIDE', 'MY', 'CONVEYANCE', 'INTO', 'THE', 'PLACE', 'WHICH', 'WAS', 'INTENSELY', 'DARK', 'AND', 'THE', 'CURRENT', 'CARRIED', 'ME', 'THE', 'RAFT', 'WITH', 'IT', 'DOWN', 'THE', 'UNDERGROUND', 'CHANNEL'] +7018-75789-0004-159: ref=['AND', 'I', 'THREW', 'MYSELF', 'DOWN', 'UPON', 'MY', 'FACE', 'ON', 'THE', 'RAFT', 'BY', 'REASON', 'OF', 'THE', 'NARROWNESS', 'OF', 'THE', 'CHANNEL', 'WHILST', 'THE', 'STREAM', 'CEASED', 'NOT', 'TO', 'CARRY', 'ME', 'ALONG', 'KNOWING', 'NOT', 'NIGHT', 'FROM', 'DAY', 'FOR', 'THE', 'EXCESS', 'OF', 'THE', 'GLOOM', 'WHICH', 'ENCOMPASSED', 'ME', 'ABOUT', 'AND', 'MY', 'TERROR', 'AND', 'CONCERN', 'FOR', 'MYSELF', 'LEST', 'I', 'SHOULD', 'PERISH'] +7018-75789-0004-159: hyp=['AND', 'I', 'THREW', 'MYSELF', 'DOWN', 'UPON', 'MY', 'FACE', 'ON', 'THE', 'RAFT', 'BY', 'REASON', 'OF', 'THE', 'NARROWNESS', 'OF', 'THE', 'CHANNEL', 'WHILST', 'THE', 'STREAM', 'CEASED', 'NOT', 'TO', 'CARRY', 'ME', 'ALONG', 'KNOWING', 'NOT', 'NIGHT', 'FROM', 'DAY', 'FOR', 'THE', 'EXCESS', 'OF', 'THE', 'GLOOM', 'WHICH', 'ENCOMPASSED', 'ME', 'ABOUT', 'IN', 'MY', 'TERROR', 'AND', 'CONCERN', 'FOR', 'MYSELF', 'LEST', 'I', 'SHOULD', 'PERISH'] +7018-75789-0005-160: ref=['WHEN', 'I', 'AWOKE', 'AT', 'LAST', 'I', 'FOUND', 'MYSELF', 'IN', 'THE', 'LIGHT', 'OF', 'HEAVEN', 'AND', 'OPENING', 'MY', 'EYES', 'I', 'SAW', 'MYSELF', 'IN', 'A', 'BROAD', 'STREAM', 'AND', 'THE', 'RAFT', 'MOORED', 'TO', 'AN', 'ISLAND', 'IN', 'THE', 'MIDST', 'OF', 'A', 'NUMBER', 'OF', 'INDIANS', 'AND', 'ABYSSINIANS'] +7018-75789-0005-160: hyp=['WHEN', 'I', 'AWOKE', 'AT', 'LAST', 'I', 'FOUND', 'MYSELF', 'IN', 'THE', 'LIGHT', 'OF', 'HEAVEN', 'AND', 'OPENING', 'MY', 'EYES', 'I', 'SAW', 'MYSELF', 'IN', 'A', 'BROAD', 'STREAM', 'AND', 'THE', 'RAFT', 'MOORED', 'TO', 'AN', 'ISLAND', 'IN', 'THE', 'MIDST', 'OF', 'A', 'NUMBER', 'OF', 'INDIANS', 'AND', 'ABYSSINIANS'] +7018-75789-0006-161: ref=['BUT', 'I', 'WAS', 'DELIGHTED', 'AT', 'MY', 'ESCAPE', 'FROM', 'THE', 'RIVER'] +7018-75789-0006-161: hyp=['BUT', 'I', 'WAS', 'DELIGHTED', 'AT', 'MY', 'ESCAPE', 'FROM', 'THE', 'RIVER'] +7018-75789-0007-162: ref=['WHEN', 'THEY', 'SAW', 'I', 'UNDERSTOOD', 'THEM', 'NOT', 'AND', 'MADE', 'THEM', 'NO', 'ANSWER', 'ONE', 'OF', 'THEM', 'CAME', 'FORWARD', 'AND', 'SAID', 'TO', 'ME', 'IN', 'ARABIC', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'BROTHER'] +7018-75789-0007-162: hyp=['WHEN', 'THEY', 'SAW', 'I', 'UNDERSTOOD', 'THEM', 'NOT', 'AND', 'MADE', 'THEM', 'NO', 'ANSWER', 'ONE', 'OF', 'THEM', 'CAME', 'FORWARD', 'AND', 'SAID', 'TO', 'ME', 'IN', 'ARABIC', 'PEACE', 'BE', 'WITH', 'THEE', 'O', 'MY', 'BROTHER'] +7018-75789-0008-163: ref=['O', 'MY', 'BROTHER', 'ANSWERED', 'HE', 'WE', 'ARE', 'HUSBANDMEN', 'AND', 'TILLERS', 'OF', 'THE', 'SOIL', 'WHO', 'CAME', 'OUT', 'TO', 'WATER', 'OUR', 'FIELDS', 'AND', 'PLANTATIONS', 'AND', 'FINDING', 'THEE', 'ASLEEP', 'ON', 'THIS', 'RAFT', 'LAID', 'HOLD', 'OF', 'IT', 'AND', 'MADE', 'IT', 'FAST', 'BY', 'US', 'AGAINST', 'THOU', 'SHOULDST', 'AWAKE', 'AT', 'THY', 'LEISURE'] +7018-75789-0008-163: hyp=['O', 'MY', 'BROTHER', 'ANSWERED', 'HE', 'WE', 'ARE', 'HUSBANDMEN', 'AND', 'TELLERS', 'OF', 'THE', 'SOIL', 'WHO', 'CAME', 'OUT', 'TO', 'WATER', 'OUR', 'FIELDS', 'AND', 'PLANTATIONS', 'AND', 'FINDING', 'THEE', 'ASLEEP', 'ON', 'THIS', 'RAFT', 'LAID', 'HOLD', 'OF', 'IT', 'AND', 'MADE', 'IT', 'FAST', 'BY', 'US', 'AGAINST', 'THOU', 'SHOULDEST', 'AWAKE', 'AT', 'THY', 'LEISURE'] +7018-75789-0009-164: ref=['I', 'ANSWERED', 'FOR', "ALLAH'S", 'SAKE', 'O', 'MY', 'LORD', 'ERE', 'I', 'SPEAK', 'GIVE', 'ME', 'SOMEWHAT', 'TO', 'EAT', 'FOR', 'I', 'AM', 'STARVING', 'AND', 'AFTER', 'ASK', 'ME', 'WHAT', 'THOU', 'WILT'] +7018-75789-0009-164: hyp=['I', 'ANSWERED', 'FOR', "ALLAH'S", 'SAKE', 'AM', 'MY', 'LORD', 'ERE', 'I', 'SPEAK', 'GIVE', 'ME', 'SOMEWHAT', 'TO', 'EAT', 'FOR', 'I', 'AM', 'STARVING', 'AND', 'AFTER', 'ASK', 'ME', 'WHAT', 'THOU', 'WILT'] +7018-75789-0010-165: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'SECOND', 'NIGHT'] +7018-75789-0010-165: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'SECOND', 'NIGHT'] +7018-75789-0011-166: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'SINDBAD', 'THE', 'SEAMAN', 'CONTINUED', 'WHEN', 'I', 'LANDED', 'AND', 'FOUND', 'MYSELF', 'AMONGST', 'THE', 'INDIANS', 'AND', 'ABYSSINIANS', 'AND', 'HAD', 'TAKEN', 'SOME', 'REST', 'THEY', 'CONSULTED', 'AMONG', 'THEMSELVES', 'AND', 'SAID', 'TO', 'ONE', 'ANOTHER', 'THERE', 'IS', 'NO', 'HELP', 'FOR', 'IT', 'BUT', 'WE', 'CARRY', 'HIM', 'WITH', 'US', 'AND', 'PRESENT', 'HIM', 'TO', 'OUR', 'KING', 'THAT', 'HE', 'MAY', 'ACQUAINT', 'HIM', 'WITH', 'HIS', 'ADVENTURES'] +7018-75789-0011-166: hyp=['SHE', 'SAID', 'IT', 'HATH', 'RAGED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'SINBAD', 'THE', 'SEAMAN', 'CONTINUED', 'WHEN', 'I', 'LANDED', 'AND', 'FOUND', 'MYSELF', 'AMONGST', 'THE', 'INDIANS', 'AND', 'ABYSSINIANS', 'AND', 'HAD', 'TAKEN', 'SOME', 'REST', 'THEY', 'CONSULTED', 'AMONG', 'THEMSELVES', 'AND', 'SAID', 'TO', 'ONE', 'ANOTHER', 'THERE', 'IS', 'NO', 'HELP', 'FOR', 'IT', 'BUT', 'WE', 'CARRY', 'HIM', 'WITH', 'US', 'AND', 'PRESENT', 'HIM', 'TO', 'OUR', 'KING', 'THAT', 'HE', 'MAY', 'ACQUAINT', 'HIM', 'WITH', 'HIS', 'ADVENTURES'] +7018-75789-0012-167: ref=['SO', 'I', 'CONSORTED', 'WITH', 'THE', 'CHIEF', 'OF', 'THE', 'ISLANDERS', 'AND', 'THEY', 'PAID', 'ME', 'THE', 'UTMOST', 'RESPECT'] +7018-75789-0012-167: hyp=['SO', 'I', 'CONSORTED', 'WITH', 'THE', 'CHIEF', 'OF', 'THE', 'ISLANDERS', 'AND', 'THEY', 'PAID', 'ME', 'THE', 'UTMOST', 'RESPECT'] +7018-75789-0013-168: ref=['SO', 'I', 'ROSE', 'WITHOUT', 'STAY', 'OR', 'DELAY', 'AND', 'KISSED', 'THE', "KING'S", 'HAND', 'AND', 'ACQUAINTED', 'HIM', 'WITH', 'MY', 'LONGING', 'TO', 'SET', 'OUT', 'WITH', 'THE', 'MERCHANTS', 'FOR', 'THAT', 'I', 'PINED', 'AFTER', 'MY', 'PEOPLE', 'AND', 'MINE', 'OWN', 'LAND'] +7018-75789-0013-168: hyp=['SO', 'I', 'ROSE', 'WITHOUT', 'STAY', 'OR', 'DELAY', 'AND', 'KISSED', 'THE', "KING'S", 'HAND', 'AND', 'ACQUAINTED', 'HIM', 'WITH', 'MY', 'LONGING', 'TO', 'SET', 'OUT', 'WITH', 'THE', 'MERCHANTS', 'FOR', 'THAT', 'I', 'PINED', 'AFTER', 'MY', 'PEOPLE', 'AND', 'MINE', 'OWN', 'LAND'] +7018-75789-0014-169: ref=['QUOTH', 'HE', 'THOU', 'ART', 'THINE', 'OWN', 'MASTER', 'YET', 'IF', 'IT', 'BE', 'THY', 'WILL', 'TO', 'ABIDE', 'WITH', 'US', 'ON', 'OUR', 'HEAD', 'AND', 'EYES', 'BE', 'IT', 'FOR', 'THOU', 'GLADDENEST', 'US', 'WITH', 'THY', 'COMPANY'] +7018-75789-0014-169: hyp=['QUOTH', 'HE', 'THOU', 'ART', 'THINE', 'OWN', 'MASTER', 'YET', 'IF', 'IT', 'BE', 'THY', 'WILL', 'TO', 'ABIDE', 'WITH', 'US', 'HONOUR', 'HEAD', 'AND', 'EYES', 'BE', 'IT', 'FOR', 'THOU', 'GLADDENEST', 'US', 'WITH', 'THY', 'COMPANY'] +7018-75789-0015-170: ref=['BY', 'ALLAH', 'O', 'MY', 'LORD', 'ANSWERED', 'I', 'THOU', 'HAST', 'INDEED', 'OVERWHELMED', 'ME', 'WITH', 'THY', 'FAVOURS', 'AND', 'WELL', 'DOINGS', 'BUT', 'I', 'WEARY', 'FOR', 'A', 'SIGHT', 'OF', 'MY', 'FRIENDS', 'AND', 'FAMILY', 'AND', 'NATIVE', 'COUNTRY'] +7018-75789-0015-170: hyp=['BY', 'ALLAH', 'O', 'MY', 'LORD', 'ANSWERED', 'I', 'THOU', 'HAST', 'INDEED', 'OVERWHELMED', 'ME', 'WITH', 'THY', 'FAVOURS', 'AND', 'WELL', 'DOINGS', 'BUT', 'I', 'WEARY', 'FOR', 'A', 'SIGHT', 'OF', 'MY', 'FRIENDS', 'AND', 'FAMILY', 'AND', 'NATIVE', 'COUNTRY'] +7018-75789-0016-171: ref=['THEN', 'I', 'TOOK', 'LEAVE', 'OF', 'HIM', 'AND', 'OF', 'ALL', 'MY', 'INTIMATES', 'AND', 'ACQUAINTANCES', 'IN', 'THE', 'ISLAND', 'AND', 'EMBARKED', 'WITH', 'THE', 'MERCHANTS', 'AFORESAID'] +7018-75789-0016-171: hyp=['THEN', 'I', 'TOOK', 'LEAVE', 'OF', 'HIM', 'AND', 'OF', 'ALL', 'MY', 'INTIMATES', 'AND', 'ACQUAINTANCES', 'IN', 'THE', 'ISLAND', 'AND', 'EMBARKED', 'WITH', 'THE', 'MERCHANTS', 'AFORESAID'] +7018-75789-0017-172: ref=['HE', 'ASKED', 'ME', 'WHENCE', 'THEY', 'CAME', 'AND', 'I', 'SAID', 'TO', 'HIM', 'BY', 'ALLAH', 'O', 'COMMANDER', 'OF', 'THE', 'FAITHFUL', 'I', 'KNOW', 'NOT', 'THE', 'NAME', 'OF', 'THE', 'CITY', 'NOR', 'THE', 'WAY', 'THITHER'] +7018-75789-0017-172: hyp=['HE', 'ASKED', 'ME', 'WHENCE', 'THEY', 'CAME', 'AND', 'I', 'SAID', 'TO', 'HIM', 'BY', 'ALLAH', 'A', 'COMMANDER', 'OF', 'THE', 'FAITHFUL', 'I', 'KNOW', 'NOT', 'THE', 'NAME', 'OF', 'THE', 'CITY', 'NOR', 'THE', 'WAY', 'THITHER'] +7018-75789-0018-173: ref=['FOR', 'STATE', 'PROCESSIONS', 'A', 'THRONE', 'IS', 'SET', 'FOR', 'HIM', 'UPON', 'A', 'HUGE', 'ELEPHANT', 'ELEVEN', 'CUBITS', 'HIGH', 'AND', 'UPON', 'THIS', 'HE', 'SITTETH', 'HAVING', 'HIS', 'GREAT', 'LORDS', 'AND', 'OFFICERS', 'AND', 'GUESTS', 'STANDING', 'IN', 'TWO', 'RANKS', 'ON', 'HIS', 'RIGHT', 'HAND', 'AND', 'ON', 'HIS', 'LEFT'] +7018-75789-0018-173: hyp=['FOR', 'STATE', 'PROCESSIONS', 'A', 'THRONE', 'IS', 'SET', 'FOR', 'HIM', 'UPON', 'A', 'HUGE', 'ELEPHANT', 'ELEVEN', 'CUBITS', 'HIGH', 'AND', 'UPON', 'THIS', 'HE', 'SITTETH', 'HAVING', 'HIS', 'GREAT', 'LORDS', 'AND', 'OFFICERS', 'AND', 'GUESTS', 'STANDING', 'IN', 'TWO', 'RANKS', 'ON', 'HIS', 'RIGHT', 'HAND', 'AND', 'ON', 'HIS', 'LEFT'] +7018-75789-0019-174: ref=['HIS', 'LETTER', 'HATH', 'SHOWN', 'ME', 'THIS', 'AND', 'AS', 'FOR', 'THE', 'MIGHTINESS', 'OF', 'HIS', 'DOMINION', 'THOU', 'HAST', 'TOLD', 'US', 'WHAT', 'THOU', 'HAST', 'EYE', 'WITNESSED'] +7018-75789-0019-174: hyp=['HIS', 'LETTER', 'HATH', 'SHOWN', 'ME', 'THIS', 'AND', 'AS', 'FOR', 'THE', 'MIGHTINESS', 'OF', 'HIS', 'DOMINION', 'THOU', 'HAST', 'TOLD', 'US', 'WHAT', 'THOU', 'HAST', 'I', 'WITNESSED'] +7018-75789-0020-175: ref=['PRESENTLY', 'MY', 'FRIENDS', 'CAME', 'TO', 'ME', 'AND', 'I', 'DISTRIBUTED', 'PRESENTS', 'AMONG', 'MY', 'FAMILY', 'AND', 'GAVE', 'ALMS', 'AND', 'LARGESSE', 'AFTER', 'WHICH', 'I', 'YIELDED', 'MYSELF', 'TO', 'JOYANCE', 'AND', 'ENJOYMENT', 'MIRTH', 'AND', 'MERRY', 'MAKING', 'AND', 'FORGOT', 'ALL', 'THAT', 'I', 'HAD', 'SUFFERED'] +7018-75789-0020-175: hyp=['PRESENTLY', 'MY', 'FRIENDS', 'CAME', 'TO', 'ME', 'AND', 'I', 'DISTRIBUTED', 'PRESENTS', 'AMONG', 'MY', 'FAMILY', 'AND', 'GAVE', 'ALMS', 'AND', 'LARGESSE', 'AFTER', 'WHICH', 'I', 'YIELDED', 'MYSELF', 'TO', 'JOYANCE', 'AND', 'ENJOYMENT', 'MIRTH', 'AND', 'MERRYMAKING', 'AND', 'FORGOT', 'ALL', 'THAT', 'I', 'HAD', 'SUFFERED'] +7018-75789-0021-176: ref=['SUCH', 'THEN', 'O', 'MY', 'BROTHERS', 'IS', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFEL', 'ME', 'IN', 'MY', 'SIXTH', 'VOYAGE', 'AND', 'TO', 'MORROW', 'INSHALLAH'] +7018-75789-0021-176: hyp=['SUCH', 'THEN', 'O', 'MY', 'BROTHERS', 'IS', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFELL', 'ME', 'IN', 'MY', 'SIXTH', 'VOYAGE', 'AND', 'TO', 'MORROW', 'INSHALLAH'] +7018-75789-0022-177: ref=['I', 'WILL', 'TELL', 'YOU', 'THE', 'STORY', 'OF', 'MY', 'SEVENTH', 'AND', 'LAST', 'VOYAGE', 'WHICH', 'IS', 'STILL', 'MORE', 'WONDROUS', 'AND', 'MARVELLOUS', 'THAN', 'THAT', 'OF', 'THE', 'FIRST', 'SIX'] +7018-75789-0022-177: hyp=['I', 'WILL', 'TELL', 'YOU', 'THE', 'STORY', 'OF', 'MY', 'SEVENTH', 'AND', 'LAST', 'VOYAGE', 'WHICH', 'IS', 'STILL', 'MORE', 'WONDROUS', 'AND', 'MARVELLOUS', 'THAN', 'THAT', 'OF', 'THE', 'FIRST', 'SIX'] +7018-75789-0023-178: ref=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'THIRD', 'NIGHT'] +7018-75789-0023-178: hyp=['WHEN', 'IT', 'WAS', 'THE', 'FIVE', 'HUNDRED', 'AND', 'SIXTY', 'THIRD', 'NIGHT'] +7018-75789-0024-179: ref=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'SINDBAD', 'THE', 'SEAMAN', 'HAD', 'RELATED', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFEL', 'HIM', 'IN', 'HIS', 'SIXTH', 'VOYAGE', 'AND', 'ALL', 'THE', 'COMPANY', 'HAD', 'DISPERSED', 'SINDBAD', 'THE', 'LANDSMAN', 'WENT', 'HOME', 'AND', 'SLEPT', 'AS', 'OF', 'WONT'] +7018-75789-0024-179: hyp=['SHE', 'SAID', 'IT', 'HATH', 'REACHED', 'ME', 'O', 'AUSPICIOUS', 'KING', 'THAT', 'WHEN', 'SINBAD', 'THE', 'SIMON', 'HAD', 'RELIGHTED', 'THE', 'HISTORY', 'OF', 'WHAT', 'BEFELL', 'HIM', 'IN', 'HIS', 'SIXTH', 'VOYAGE', 'AND', 'ALL', 'THE', 'COMPANY', 'HAD', 'DISPERSED', 'SINBAD', 'THE', 'LANDSMAN', 'WENT', 'HOME', 'AND', 'SLEPT', 'AS', 'OF', 'WONT'] +7018-75789-0025-180: ref=['THE', 'SEVENTH', 'VOYAGE', 'OF', 'SINDBAD', 'THE', 'SEAMAN'] +7018-75789-0025-180: hyp=['THE', 'SEVENTH', 'VOYAGE', 'OF', 'SINBAD', 'THE', 'SALMON'] +7018-75789-0026-181: ref=['KNOW', 'O', 'COMPANY', 'THAT', 'AFTER', 'MY', 'RETURN', 'FROM', 'MY', 'SIXTH', 'VOYAGE', 'WHICH', 'BROUGHT', 'ME', 'ABUNDANT', 'PROFIT', 'I', 'RESUMED', 'MY', 'FORMER', 'LIFE', 'IN', 'ALL', 'POSSIBLE', 'JOYANCE', 'AND', 'ENJOYMENT', 'AND', 'MIRTH', 'AND', 'MAKING', 'MERRY', 'DAY', 'AND', 'NIGHT', 'AND', 'I', 'TARRIED', 'SOME', 'TIME', 'IN', 'THIS', 'SOLACE', 'AND', 'SATISFACTION', 'TILL', 'MY', 'SOUL', 'BEGAN', 'ONCE', 'MORE', 'TO', 'LONG', 'TO', 'SAIL', 'THE', 'SEAS', 'AND', 'SEE', 'FOREIGN', 'COUNTRIES', 'AND', 'COMPANY', 'WITH', 'MERCHANTS', 'AND', 'HEAR', 'NEW', 'THINGS'] +7018-75789-0026-181: hyp=['NO', 'O', 'COMPANY', 'THAT', 'AFTER', 'MY', 'RETURN', 'FROM', 'MY', 'SIXTH', 'VOYAGE', 'WHICH', 'BROUGHT', 'ME', 'ABUNDANT', 'PROFIT', 'I', 'RESUMED', 'MY', 'FORMER', 'LIFE', 'AND', 'ALL', 'POSSIBLE', 'JOYANCE', 'AND', 'ENJOYMENT', 'AND', 'MIRTH', 'AND', 'MAKING', 'MERRY', 'DAY', 'AND', 'NIGHT', 'AND', 'I', 'TARRIED', 'SOME', 'TIME', 'IN', 'THIS', 'SOLACE', 'AND', 'SATISFACTION', 'TILL', 'MY', 'SOUL', 'BEGAN', 'ONCE', 'MORE', 'TO', 'LONG', 'TO', 'SAIL', 'THE', 'SEAS', 'AND', 'SEE', 'FOREIGN', 'COUNTRIES', 'AND', 'COMPANY', 'WITH', 'MERCHANTS', 'AND', 'HERE', 'NEW', 'THINGS'] +7018-75789-0027-182: ref=['SO', 'HAVING', 'MADE', 'UP', 'MY', 'MIND', 'I', 'PACKED', 'UP', 'IN', 'BALES', 'A', 'QUANTITY', 'OF', 'PRECIOUS', 'STUFFS', 'SUITED', 'FOR', 'SEA', 'TRADE', 'AND', 'REPAIRED', 'WITH', 'THEM', 'FROM', 'BAGHDAD', 'CITY', 'TO', 'BASSORAH', 'TOWN', 'WHERE', 'I', 'FOUND', 'A', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'IN', 'HER', 'A', 'COMPANY', 'OF', 'CONSIDERABLE', 'MERCHANTS'] +7018-75789-0027-182: hyp=['SO', 'HAVING', 'MADE', 'UP', 'MY', 'MIND', 'I', 'PACKED', 'UP', 'IN', 'BALES', 'A', 'QUANTITY', 'OF', 'PRECIOUS', 'STUFFS', 'SUITED', 'FOR', 'SEA', 'TRADE', 'AND', 'REPAIRED', 'WITH', 'THEM', 'FROM', 'BAGHDAD', 'CITY', 'TO', 'BASSORA', 'TOWN', 'WHERE', 'I', 'FOUND', 'A', 'SHIP', 'READY', 'FOR', 'SEA', 'AND', 'IN', 'HER', 'A', 'COMPANY', 'OF', 'CONSIDERABLE', 'MERCHANTS'] +7018-75789-0028-183: ref=['BUT', 'THE', 'CAPTAIN', 'AROSE', 'AND', 'TIGHTENING', 'HIS', 'GIRDLE', 'TUCKED', 'UP', 'HIS', 'SKIRTS', 'AND', 'AFTER', 'TAKING', 'REFUGE', 'WITH', 'ALLAH', 'FROM', 'SATAN', 'THE', 'STONED', 'CLOMB', 'TO', 'THE', 'MAST', 'HEAD', 'WHENCE', 'HE', 'LOOKED', 'OUT', 'RIGHT', 'AND', 'LEFT', 'AND', 'GAZING', 'AT', 'THE', 'PASSENGERS', 'AND', 'CREW', 'FELL', 'TO', 'BUFFETING', 'HIS', 'FACE', 'AND', 'PLUCKING', 'OUT', 'HIS', 'BEARD'] +7018-75789-0028-183: hyp=['BUT', 'THE', 'CAPTAIN', 'AROSE', 'AND', 'TIGHTENED', 'IN', 'HIS', 'GIRDLE', 'TUCKED', 'UP', 'HIS', 'SKIRTS', 'AND', 'AFTER', 'TAKING', 'REFUGE', 'WITH', 'ALLAH', 'FROM', 'SATAN', 'THE', 'STONE', 'CLIMBED', 'TO', 'THE', 'MAST', 'HEAD', 'WHENCE', 'HE', 'LOOKED', 'OUT', 'RIGHT', 'AND', 'LEFT', 'AND', 'GAZING', 'AT', 'THE', 'PASSENGERS', 'AND', 'CREW', 'FELL', 'TO', 'BUFFET', 'IN', 'HIS', 'FACE', 'AND', 'PLUCKING', 'OUT', 'HIS', 'BEARD'] +7018-75789-0029-184: ref=['THIS', 'HE', 'SET', 'IN', 'A', 'SAUCER', 'WETTED', 'WITH', 'A', 'LITTLE', 'WATER', 'AND', 'AFTER', 'WAITING', 'A', 'SHORT', 'TIME', 'SMELT', 'AND', 'TASTED', 'IT', 'AND', 'THEN', 'HE', 'TOOK', 'OUT', 'OF', 'THE', 'CHEST', 'A', 'BOOKLET', 'WHEREIN', 'HE', 'READ', 'AWHILE', 'AND', 'SAID', 'WEEPING', 'KNOW', 'O', 'YE', 'PASSENGERS', 'THAT', 'IN', 'THIS', 'BOOK', 'IS', 'A', 'MARVELLOUS', 'MATTER', 'DENOTING', 'THAT', 'WHOSO', 'COMETH', 'HITHER', 'SHALL', 'SURELY', 'DIE', 'WITHOUT', 'HOPE', 'OF', 'ESCAPE', 'FOR', 'THAT', 'THIS', 'OCEAN', 'IS', 'CALLED', 'THE', 'SEA', 'OF', 'THE', 'CLIME', 'OF', 'THE', 'KING', 'WHEREIN', 'IS', 'THE', 'SEPULCHRE', 'OF', 'OUR', 'LORD', 'SOLOMON', 'SON', 'OF', 'DAVID', 'ON', 'BOTH', 'BE', 'PEACE'] +7018-75789-0029-184: hyp=['THIS', 'HE', 'SAID', 'IN', 'A', 'SAUCER', 'WETTED', 'WITH', 'A', 'LITTLE', 'WATER', 'AND', 'AFTER', 'WAITING', 'A', 'SHORT', 'TIME', 'SMELT', 'AND', 'TASTED', 'IT', 'AND', 'THEN', 'HE', 'TOOK', 'OUT', 'OF', 'THE', 'CHEST', 'A', 'BOOKLET', 'WHEREIN', 'HE', 'READ', 'A', 'WHILE', 'AND', 'SAID', 'WEEPING', 'KNOW', 'O', 'YE', 'PASSENGERS', 'THAT', 'IN', 'THIS', 'BOOK', 'IS', 'A', 'MARVELLOUS', 'MATTER', 'DENOTING', 'THAT', 'WHOSO', 'COME', 'THITHER', 'SHALL', 'SURELY', 'DIE', 'WITHOUT', 'HOPE', 'OF', 'ESCAPE', 'FOR', 'THAT', 'THIS', 'OCEAN', 'IS', 'CALLED', 'THE', 'SEA', 'OF', 'THE', 'CLIME', 'OF', 'THE', 'KING', 'WHEREIN', 'IS', 'A', 'SEPULCHRE', 'OF', 'OUR', 'LORD', 'SOLOMON', 'SON', 'OF', 'DAVID', 'ON', 'BOTH', 'BE', 'PEACE'] +7018-75789-0030-185: ref=['A', 'SECOND', 'FISH', 'MADE', 'ITS', 'APPEARANCE', 'THAN', 'WHICH', 'WE', 'HAD', 'SEEN', 'NAUGHT', 'MORE', 'MONSTROUS'] +7018-75789-0030-185: hyp=['A', 'SECOND', 'FISH', 'READ', 'ITS', 'APPEARANCE', 'AND', 'WHICH', 'WE', 'HAD', 'SEEN', 'NOUGHT', 'MORE', 'MONSTROUS'] +7018-75789-0031-186: ref=['WHEN', 'SUDDENLY', 'A', 'VIOLENT', 'SQUALL', 'OF', 'WIND', 'AROSE', 'AND', 'SMOTE', 'THE', 'SHIP', 'WHICH', 'ROSE', 'OUT', 'OF', 'THE', 'WATER', 'AND', 'SETTLED', 'UPON', 'A', 'GREAT', 'REEF', 'THE', 'HAUNT', 'OF', 'SEA', 'MONSTERS', 'WHERE', 'IT', 'BROKE', 'UP', 'AND', 'FELL', 'ASUNDER', 'INTO', 'PLANKS', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7018-75789-0031-186: hyp=['WHEN', 'SUDDENLY', 'A', 'VIOLENT', 'SQUALL', 'OF', 'WIND', 'AROSE', 'AND', 'SMOTE', 'THE', 'SHIP', 'WHICH', 'ROSE', 'OUT', 'OF', 'THE', 'WATER', 'AND', 'SETTLED', 'UPON', 'A', 'GREAT', 'REEF', 'THE', 'HAUNT', 'OF', 'SEA', 'MONSTERS', 'WHERE', 'IT', 'BROKE', 'UP', 'AND', 'FELL', 'ASUNDER', 'INTO', 'PLANKS', 'AND', 'ALL', 'AND', 'EVERYTHING', 'ON', 'BOARD', 'WERE', 'PLUNGED', 'INTO', 'THE', 'SEA'] +7105-2330-0000-2310: ref=['UNFORTUNATELY', 'THERE', 'COULD', 'BE', 'NO', 'DOUBT', 'OR', 'MISCONCEPTION', 'AS', 'TO', "PLATTERBAFF'S", 'GUILT'] +7105-2330-0000-2310: hyp=['UNFORTUNATELY', 'THERE', 'COULD', 'BE', 'NO', 'DOUBT', 'OUR', 'MISCONCEPTION', 'AS', 'THE', "PLATTERBUFF'S", 'GUILT'] +7105-2330-0001-2311: ref=['HE', 'HAD', 'NOT', 'ONLY', 'PLEADED', 'GUILTY', 'BUT', 'HAD', 'EXPRESSED', 'HIS', 'INTENTION', 'OF', 'REPEATING', 'HIS', 'ESCAPADE', 'IN', 'OTHER', 'DIRECTIONS', 'AS', 'SOON', 'AS', 'CIRCUMSTANCES', 'PERMITTED', 'THROUGHOUT', 'THE', 'TRIAL', 'HE', 'WAS', 'BUSY', 'EXAMINING', 'A', 'SMALL', 'MODEL', 'OF', 'THE', 'FREE', 'TRADE', 'HALL', 'IN', 'MANCHESTER'] +7105-2330-0001-2311: hyp=['HE', 'HAD', 'NOT', 'ONLY', 'PLAYED', 'IT', 'GUILTY', 'BUT', 'HAD', 'EXPRESSED', 'HIS', 'INTENTION', 'OF', 'REPEATING', 'HIS', 'ESCAPADE', 'IN', 'OTHER', 'DIRECTIONS', 'AS', 'SOON', 'AS', 'CIRCUMSTANCES', 'PERMITTED', 'THROUGHOUT', 'THE', 'TRIAL', 'HE', 'WAS', 'BUSY', 'EXAMINING', 'A', 'SMALL', 'MODEL', 'OF', 'THE', 'FREE', 'TRADE', 'HALL', 'IN', 'MANCHESTER'] +7105-2330-0002-2312: ref=['THE', 'JURY', 'COULD', 'NOT', 'POSSIBLY', 'FIND', 'THAT', 'THE', 'PRISONER', 'HAD', 'NOT', 'DELIBERATELY', 'AND', 'INTENTIONALLY', 'BLOWN', 'UP', 'THE', 'ALBERT', 'HALL', 'THE', 'QUESTION', 'WAS', 'COULD', 'THEY', 'FIND', 'ANY', 'EXTENUATING', 'CIRCUMSTANCES', 'WHICH', 'WOULD', 'PERMIT', 'OF', 'AN', 'ACQUITTAL'] +7105-2330-0002-2312: hyp=['VERY', 'CHEERY', 'COULD', 'NOT', 'POSSIBLY', 'FIND', 'THAT', 'THE', 'PRISONER', 'HAD', 'NOT', 'DELIBERATELY', 'AND', 'INTENTIONALLY', 'BLOWN', 'UP', 'THE', 'ALBERT', 'HALL', 'THE', 'QUESTION', 'WAS', 'COULD', 'THEY', 'FIND', 'ANY', 'EXTINUATING', 'CIRCUMSTANCES', 'WHICH', 'WOULD', 'PERMIT', 'OF', 'AN', 'ACQUITTAL'] +7105-2330-0003-2313: ref=['OF', 'COURSE', 'ANY', 'SENTENCE', 'WHICH', 'THE', 'LAW', 'MIGHT', 'FEEL', 'COMPELLED', 'TO', 'INFLICT', 'WOULD', 'BE', 'FOLLOWED', 'BY', 'AN', 'IMMEDIATE', 'PARDON', 'BUT', 'IT', 'WAS', 'HIGHLY', 'DESIRABLE', 'FROM', 'THE', "GOVERNMENT'S", 'POINT', 'OF', 'VIEW', 'THAT', 'THE', 'NECESSITY', 'FOR', 'SUCH', 'AN', 'EXERCISE', 'OF', 'CLEMENCY', 'SHOULD', 'NOT', 'ARISE'] +7105-2330-0003-2313: hyp=['OF', 'COURSE', 'ANY', 'SENTENCE', 'REACHED', 'THE', 'LAW', 'MIGHT', 'FEEL', 'COMPELLED', 'TO', 'INFLICT', 'WOULD', 'BE', 'FOLLOWED', 'BY', 'AN', 'IMMEDIATE', 'PARDON', 'BUT', 'IT', 'WAS', 'HIGHLY', 'DESIRABLE', 'FROM', 'THE', "GOVERNMENT'S", 'POINT', 'OF', 'VIEW', 'THAT', 'THE', 'NECESSITY', 'FOR', 'SUCH', 'AN', 'EXERCISE', 'OF', 'CLEMENCY', 'SHOULD', 'NOT', 'ARISE'] +7105-2330-0004-2314: ref=['A', 'HEADLONG', 'PARDON', 'ON', 'THE', 'EVE', 'OF', 'A', 'BYE', 'ELECTION', 'WITH', 'THREATS', 'OF', 'A', 'HEAVY', 'VOTING', 'DEFECTION', 'IF', 'IT', 'WERE', 'WITHHELD', 'OR', 'EVEN', 'DELAYED', 'WOULD', 'NOT', 'NECESSARILY', 'BE', 'A', 'SURRENDER', 'BUT', 'IT', 'WOULD', 'LOOK', 'LIKE', 'ONE'] +7105-2330-0004-2314: hyp=['I', 'HAD', 'LONG', 'PARDON', 'AND', 'THE', 'EVE', 'OF', 'A', 'BIOLECTION', 'WITH', 'THREATS', 'OF', 'A', 'HEAVY', 'VOTING', 'AFFECTION', 'IF', 'IT', 'WERE', 'WITHHELD', 'OR', 'EVEN', 'DELAYED', 'WOULD', 'NOT', 'NECESSARILY', 'BE', 'A', 'SURRENDER', 'BUT', 'IT', 'WOULD', 'LOOK', 'LIKE', 'ONE'] +7105-2330-0005-2315: ref=['HENCE', 'THE', 'ANXIETY', 'IN', 'THE', 'CROWDED', 'COURT', 'AND', 'IN', 'THE', 'LITTLE', 'GROUPS', 'GATHERED', 'ROUND', 'THE', 'TAPE', 'MACHINES', 'IN', 'WHITEHALL', 'AND', 'DOWNING', 'STREET', 'AND', 'OTHER', 'AFFECTED', 'CENTRES'] +7105-2330-0005-2315: hyp=['HENCE', 'THEIR', 'ANXIETY', 'IN', 'THE', 'CROWDED', 'COURT', 'AND', 'IN', 'THE', 'LITTLE', 'GROUPS', 'GATHERED', 'ROUND', 'THE', 'TAPE', 'MACHINES', 'IN', 'WHITEHALL', 'AND', 'DAWNING', 'STREET', 'ANOTHER', 'AFFECTED', 'CENTRES'] +7105-2330-0006-2316: ref=['THE', 'JURY', 'RETURNED', 'FROM', 'CONSIDERING', 'THEIR', 'VERDICT', 'THERE', 'WAS', 'A', 'FLUTTER', 'AN', 'EXCITED', 'MURMUR', 'A', 'DEATHLIKE', 'HUSH'] +7105-2330-0006-2316: hyp=['THEIR', 'CHEERY', 'TURN', 'FROM', 'CONSIDERING', 'THEIR', 'VERDICT', 'THERE', 'WAS', 'A', 'FLUTTER', 'AN', 'EXCITED', 'MURMUR', 'A', 'DEATH', 'LIKE', 'HUSH'] +7105-2330-0007-2317: ref=['THE', 'FOREMAN', 'DELIVERED', 'HIS', 'MESSAGE'] +7105-2330-0007-2317: hyp=['THE', 'FOUR', 'MEN', 'DELIVERED', 'HIS', 'MESSAGE'] +7105-2330-0008-2318: ref=['THE', 'JURY', 'FIND', 'THE', 'PRISONER', 'GUILTY', 'OF', 'BLOWING', 'UP', 'THE', 'ALBERT', 'HALL'] +7105-2330-0008-2318: hyp=['THE', 'CHERRY', 'FIND', 'THE', 'PRISONER', 'GUILTY', 'OF', 'BLOWING', 'UP', 'THE', 'ALBERT', 'HALL'] +7105-2330-0009-2319: ref=['THE', 'JURY', 'WISH', 'TO', 'ADD', 'A', 'RIDER', 'DRAWING', 'ATTENTION', 'TO', 'THE', 'FACT', 'THAT', 'A', 'BY', 'ELECTION', 'IS', 'PENDING', 'IN', 'THE', 'PARLIAMENTARY', 'DIVISION', 'OF', 'NEMESIS', 'ON', 'HAND'] +7105-2330-0009-2319: hyp=['THEY', 'JERRY', 'WISH', 'TO', 'ADD', 'A', 'WRITER', 'DRAWING', 'ATTENTION', 'TO', 'THE', 'FACT', 'THAT', 'A', 'BILL', 'IS', 'SPENDING', 'IN', 'THE', 'PARLIAMENTARY', 'DIVISION', 'OF', 'NEMESIS', 'ON', 'HAND'] +7105-2330-0010-2320: ref=['AND', 'MAY', 'THE', 'LORD', 'HAVE', 'MERCY', 'ON', 'THE', 'POLL', 'A', 'JUNIOR', 'COUNSEL', 'EXCLAIMED', 'IRREVERENTLY'] +7105-2330-0010-2320: hyp=['AND', 'MADE', 'THE', 'LARD', 'HAVE', 'MERCY', 'ON', 'THE', 'POLE', 'A', 'GENIOR', 'CONSUL', 'EXCLAIMED', 'IRREVERENTLY'] +7105-2330-0011-2321: ref=['FIFTEEN', 'HUNDRED', 'SAID', 'THE', 'PRIME', 'MINISTER', 'WITH', 'A', 'SHUDDER', "IT'S", 'TOO', 'HORRIBLE', 'TO', 'THINK', 'OF'] +7105-2330-0011-2321: hyp=['FIFTEEN', 'HUNDRED', 'SAID', 'THE', 'PRIME', 'MINISTER', 'WITH', 'A', 'SHUDDER', "IT'S", 'TOO', 'HORRIBLE', 'TO', 'THINK', 'OF'] +7105-2330-0012-2322: ref=['OUR', 'MAJORITY', 'LAST', 'TIME', 'WAS', 'ONLY', 'A', 'THOUSAND', 'AND', 'SEVEN'] +7105-2330-0012-2322: hyp=['OUR', 'MAJORITY', 'LAST', 'TIME', 'WAS', 'ONLY', 'A', 'THOUSAND', 'AND', 'SEVEN'] +7105-2330-0013-2323: ref=['SEVEN', 'THIRTY', 'AMENDED', 'THE', 'PRIME', 'MINISTER', 'WE', 'MUST', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PRECIPITANCY'] +7105-2330-0013-2323: hyp=['SEVEN', 'THIRTY', 'AMENDED', 'THE', 'PRIME', 'MINISTER', 'WE', 'MUST', 'AVOID', 'ANY', 'APPEARANCE', 'OF', 'PRECIPITANCY'] +7105-2330-0014-2324: ref=['NOT', 'LATER', 'THAN', 'SEVEN', 'THIRTY', 'THEN', 'SAID', 'THE', 'CHIEF', 'ORGANISER', 'I', 'HAVE', 'PROMISED', 'THE', 'AGENT', 'DOWN', 'THERE', 'THAT', 'HE', 'SHALL', 'BE', 'ABLE', 'TO', 'DISPLAY', 'POSTERS', 'ANNOUNCING', 'PLATTERBAFF', 'IS', 'OUT', 'BEFORE', 'THE', 'POLL', 'OPENS'] +7105-2330-0014-2324: hyp=['NOT', 'LATER', 'THAN', 'SEVEN', 'THIRTY', 'THEN', 'SAID', 'THE', 'CHIEF', 'ORGANIZER', 'I', 'HAVE', 'PROMISED', 'THE', 'AGENT', 'DOWN', 'THERE', 'THAT', 'HE', 'SHALL', 'BE', 'ABLE', 'TO', 'DISPLAY', 'POSTERS', 'ANNOUNCING', 'PLATTERBAFF', 'IS', 'OUT', 'BEFORE', 'THE', 'POLE', 'OPENS'] +7105-2330-0015-2325: ref=['HE', 'SAID', 'IT', 'WAS', 'OUR', 'ONLY', 'CHANCE', 'OF', 'GETTING', 'A', 'TELEGRAM', 'RADPROP', 'IS', 'IN', 'TO', 'NIGHT'] +7105-2330-0015-2325: hyp=['HE', 'SAID', 'IT', 'WAS', 'HER', 'ONLY', 'CHANCE', 'OF', 'GETTING', 'A', 'TELEGRAM', 'REDRUP', 'IS', 'INN', 'TO', 'NIGHT'] +7105-2330-0016-2326: ref=['DESPITE', 'THE', 'EARLINESS', 'OF', 'THE', 'HOUR', 'A', 'SMALL', 'CROWD', 'HAD', 'GATHERED', 'IN', 'THE', 'STREET', 'OUTSIDE', 'AND', 'THE', 'HORRIBLE', 'MENACING', 'TRELAWNEY', 'REFRAIN', 'OF', 'THE', 'FIFTEEN', 'HUNDRED', 'VOTING', 'MEN', 'CAME', 'IN', 'A', 'STEADY', 'MONOTONOUS', 'CHANT'] +7105-2330-0016-2326: hyp=['THIS', 'SPITE', 'THE', 'EARLINESS', 'OF', 'THE', 'HOUR', 'A', 'SMALL', 'CROWD', 'HAD', 'GATHERED', 'IN', 'THE', 'STREET', 'OUTSIDE', 'AND', 'THE', 'HORRIBLE', 'MENACING', 'TREEONER', 'REFRAIN', 'OF', 'THE', 'FIFTEEN', 'HUNDRED', 'VOTING', 'MEN', 'CAME', 'IN', 'A', 'STEADY', 'MONOTONOUS', 'CHANT'] +7105-2330-0017-2327: ref=['HE', 'EXCLAIMED', "WON'T", 'GO'] +7105-2330-0017-2327: hyp=['HE', 'EXCLAIMED', "WON'T", 'GO'] +7105-2330-0018-2328: ref=['HE', 'SAYS', 'HE', 'NEVER', 'HAS', 'LEFT', 'PRISON', 'WITHOUT', 'A', 'BRASS', 'BAND', 'TO', 'PLAY', 'HIM', 'OUT', 'AND', "HE'S", 'NOT', 'GOING', 'TO', 'GO', 'WITHOUT', 'ONE', 'NOW'] +7105-2330-0018-2328: hyp=['HE', 'SAYS', 'HE', 'NEVER', 'HAS', 'LEFT', 'PRISON', 'WITHOUT', 'A', 'BREASTPAND', 'TO', 'PLAY', 'HIM', 'OUT', 'AND', "HE'S", 'NOT', 'GOING', 'TO', 'GO', 'WITHOUT', 'ONE', 'NOW'] +7105-2330-0019-2329: ref=['SAID', 'THE', 'PRIME', 'MINISTER', 'WE', 'CAN', 'HARDLY', 'BE', 'SUPPOSED', 'TO', 'SUPPLY', 'A', 'RELEASED', 'PRISONER', 'WITH', 'A', 'BRASS', 'BAND', 'HOW', 'ON', 'EARTH', 'COULD', 'WE', 'DEFEND', 'IT', 'ON', 'THE', 'ESTIMATES'] +7105-2330-0019-2329: hyp=['SAID', 'THE', 'PRIME', 'MINISTER', 'WE', 'CAN', 'HARDLY', 'BE', 'SUPPOSED', 'TO', 'SUPPLY', 'A', 'RELISSE', 'PRISONER', 'WITH', 'A', 'BRASS', 'BAND', 'HOW', 'ON', 'EARTH', 'COULD', 'WE', 'DEFENDED', 'ON', 'THE', 'ESTIMATES'] +7105-2330-0020-2330: ref=['ANYWAY', 'HE', "WON'T", 'GO', 'UNLESS', 'HE', 'HAS', 'A', 'BAND'] +7105-2330-0020-2330: hyp=['AND', 'AWAY', 'YOU', "WON'T", 'GO', 'UNLESS', 'HE', 'HAS', 'A', 'BAND'] +7105-2330-0021-2331: ref=['POLL', 'OPENS', 'IN', 'FIVE', 'MINUTES'] +7105-2330-0021-2331: hyp=['PAUL', 'OPENS', 'IN', 'FIVE', 'MINUTES'] +7105-2330-0022-2332: ref=['IS', 'PLATTERBAFF', 'OUT', 'YET'] +7105-2330-0022-2332: hyp=['HIS', 'FURTHER', 'BATH', 'OUT', 'YET'] +7105-2330-0023-2333: ref=['IN', "HEAVEN'S", 'NAME', 'WHY'] +7105-2330-0023-2333: hyp=['IN', "HEAVEN'S", 'NAME', 'WHY'] +7105-2330-0024-2334: ref=['THE', 'CHIEF', 'ORGANISER', 'RANG', 'OFF'] +7105-2330-0024-2334: hyp=['THE', 'CHIEF', 'ORGANIZER', 'RANG', 'OFF'] +7105-2330-0025-2335: ref=['THIS', 'IS', 'NOT', 'A', 'MOMENT', 'FOR', 'STANDING', 'ON', 'DIGNITY', 'HE', 'OBSERVED', 'BLUNTLY', 'MUSICIANS', 'MUST', 'BE', 'SUPPLIED', 'AT', 'ONCE'] +7105-2330-0025-2335: hyp=['THIS', 'IS', 'NOT', 'A', 'MOMENT', 'FOR', 'STANDING', 'ON', 'DIGNITY', 'HE', 'OBSERVED', 'BLUNTLY', 'MEASIANS', 'MUST', 'BE', 'SUPPLIED', 'AT', 'ONCE'] +7105-2330-0026-2336: ref=["CAN'T", 'YOU', 'GET', 'A', 'STRIKE', 'PERMIT', 'ASKED', 'THE', 'ORGANISER'] +7105-2330-0026-2336: hyp=["CAN'T", 'YOU', 'GET', 'US', 'STRIKE', 'PERMIT', 'ASKED', 'THE', 'ORGANIZER'] +7105-2330-0027-2337: ref=["I'LL", 'TRY', 'SAID', 'THE', 'HOME', 'SECRETARY', 'AND', 'WENT', 'TO', 'THE', 'TELEPHONE'] +7105-2330-0027-2337: hyp=["I'LL", 'TRY', 'SAID', 'THE', 'HOME', 'SECRETARY', 'AND', 'WENT', 'TO', 'THE', 'TELEPHONE'] +7105-2330-0028-2338: ref=['EIGHT', "O'CLOCK", 'STRUCK', 'THE', 'CROWD', 'OUTSIDE', 'CHANTED', 'WITH', 'AN', 'INCREASING', 'VOLUME', 'OF', 'SOUND', 'WILL', 'VOTE', 'THE', 'OTHER', 'WAY'] +7105-2330-0028-2338: hyp=['EIGHT', "O'CLOCK", 'STRUCK', 'THE', 'CROWD', 'OUTSIDE', 'CHANTED', 'WITH', 'AN', 'INCREASING', 'VOLUME', 'OF', 'SOUND', 'REVOTE', 'THE', 'OTHER', 'WAY'] +7105-2330-0029-2339: ref=['A', 'TELEGRAM', 'WAS', 'BROUGHT', 'IN'] +7105-2330-0029-2339: hyp=['I', 'TELEGRAMAS', 'BROUGHT', 'IN'] +7105-2330-0030-2340: ref=['IT', 'WAS', 'FROM', 'THE', 'CENTRAL', 'COMMITTEE', 'ROOMS', 'AT', 'NEMESIS'] +7105-2330-0030-2340: hyp=['IT', 'WAS', 'FROM', 'THE', 'CENTRAL', 'COMEDY', 'ROOMS', 'AT', 'NEMESIS'] +7105-2330-0031-2341: ref=['WITHOUT', 'A', 'BAND', 'HE', 'WOULD', 'NOT', 'GO', 'AND', 'THEY', 'HAD', 'NO', 'BAND'] +7105-2330-0031-2341: hyp=['WITHOUT', 'A', 'BAND', 'HE', 'WOULD', 'NOT', 'GO', 'AND', 'THEY', 'HAD', 'NO', 'BEND'] +7105-2330-0032-2342: ref=['A', 'QUARTER', 'PAST', 'TEN', 'HALF', 'PAST'] +7105-2330-0032-2342: hyp=['A', 'QUARTER', 'PAST', 'TEN', 'HALF', 'PAST'] +7105-2330-0033-2343: ref=['HAVE', 'YOU', 'ANY', 'BAND', 'INSTRUMENTS', 'OF', 'AN', 'EASY', 'NATURE', 'TO', 'PLAY'] +7105-2330-0033-2343: hyp=['HAVE', 'YOU', 'ANY', 'BAND', 'INSTRUMENTS', 'OF', 'AN', 'EASY', 'NATURE', 'TO', 'PLAY'] +7105-2330-0034-2344: ref=['DEMANDED', 'THE', 'CHIEF', 'ORGANISER', 'OF', 'THE', 'PRISON', 'GOVERNOR', 'DRUMS', 'CYMBALS', 'THOSE', 'SORT', 'OF', 'THINGS'] +7105-2330-0034-2344: hyp=['DEMANDED', 'THE', 'CHIEF', 'ORGANIZER', 'OF', 'THE', 'PRISON', 'GOVERNOR', 'DRUMS', 'SYMBOLS', 'THOSE', 'SORT', 'OF', 'THINGS'] +7105-2330-0035-2345: ref=['THE', 'WARDERS', 'HAVE', 'A', 'PRIVATE', 'BAND', 'OF', 'THEIR', 'OWN', 'SAID', 'THE', 'GOVERNOR', 'BUT', 'OF', 'COURSE', 'I', "COULDN'T", 'ALLOW', 'THE', 'MEN', 'THEMSELVES'] +7105-2330-0035-2345: hyp=['THE', 'ORDERS', 'HAVE', 'A', 'PRIVATE', 'BAND', 'OF', 'THEIR', 'OWN', 'SAID', 'THE', 'GOVERNOR', 'BUT', 'OF', 'COURSE', 'I', "COULDN'T", 'ALLOW', 'THE', 'MEN', 'THEMSELVES'] +7105-2330-0036-2346: ref=['LEND', 'US', 'THE', 'INSTRUMENTS', 'SAID', 'THE', 'CHIEF', 'ORGANISER'] +7105-2330-0036-2346: hyp=['BLENDEST', 'THE', 'INSTRUMENTS', 'SAID', 'THE', 'CHIEF', 'ORGANIZER'] +7105-2330-0037-2347: ref=['THE', 'POPULAR', 'SONG', 'OF', 'THE', 'MOMENT', 'REPLIED', 'THE', 'AGITATOR', 'AFTER', 'A', "MOMENT'S", 'REFLECTION'] +7105-2330-0037-2347: hyp=['THEIR', 'POPULAR', 'SONG', 'OF', 'THE', 'MOMENT', 'REPLIED', 'THE', 'AGITATOR', 'AFTER', 'A', "MOMENT'S", 'REFLECTION'] +7105-2330-0038-2348: ref=['IT', 'WAS', 'A', 'TUNE', 'THEY', 'HAD', 'ALL', 'HEARD', 'HUNDREDS', 'OF', 'TIMES', 'SO', 'THERE', 'WAS', 'NO', 'DIFFICULTY', 'IN', 'TURNING', 'OUT', 'A', 'PASSABLE', 'IMITATION', 'OF', 'IT', 'TO', 'THE', 'IMPROVISED', 'STRAINS', 'OF', 'I', "DIDN'T", 'WANT', 'TO', 'DO', 'IT', 'THE', 'PRISONER', 'STRODE', 'FORTH', 'TO', 'FREEDOM'] +7105-2330-0038-2348: hyp=['IT', 'WAS', 'A', 'TUNE', 'THEY', 'HAD', 'ALL', 'HEARD', 'HUNDREDS', 'OF', 'TIMES', 'SO', 'THERE', 'IS', 'NO', 'DIFFICULTY', 'IN', 'TURNING', 'OUT', 'A', 'PASSABLE', 'IMITATION', 'OF', 'IT', 'TO', 'THE', 'IMPROVISED', 'TRAINS', 'OF', 'EITHERN', 'WANT', 'TO', 'DO', 'IT', 'THE', 'PRISONER', 'STRODE', 'FORTH', 'TO', 'FREEDOM'] +7105-2330-0039-2349: ref=['THE', 'WORD', 'OF', 'THE', 'SONG', 'HAD', 'REFERENCE', 'IT', 'WAS', 'UNDERSTOOD', 'TO', 'THE', 'INCARCERATING', 'GOVERNMENT', 'AND', 'NOT', 'TO', 'THE', 'DESTROYER', 'OF', 'THE', 'ALBERT', 'HALL'] +7105-2330-0039-2349: hyp=['THE', 'WORD', 'OF', 'THE', 'SUN', 'HAD', 'REFERENCE', 'IT', 'WAS', 'UNDERSTOOD', 'THAT', 'INCARCERATING', 'GOVERNMENT', 'AND', 'NOT', 'TO', 'THE', 'DESTROYER', 'OF', 'THE', 'ALBERT', 'HALL'] +7105-2330-0040-2350: ref=['THE', 'SEAT', 'WAS', 'LOST', 'AFTER', 'ALL', 'BY', 'A', 'NARROW', 'MAJORITY'] +7105-2330-0040-2350: hyp=['THIS', 'HEAT', 'WAS', 'LOST', 'AFTER', 'ALL', 'BY', 'A', 'NARROW', 'MATURITY'] +7105-2330-0041-2351: ref=['THE', 'LOCAL', 'TRADE', 'UNIONISTS', 'TOOK', 'OFFENCE', 'AT', 'THE', 'FACT', 'OF', 'CABINET', 'MINISTERS', 'HAVING', 'PERSONALLY', 'ACTED', 'AS', 'STRIKE', 'BREAKERS', 'AND', 'EVEN', 'THE', 'RELEASE', 'OF', 'PLATTERBAFF', 'FAILED', 'TO', 'PACIFY', 'THEM'] +7105-2330-0041-2351: hyp=['THE', 'LOCAL', 'TRADE', 'UNIONISTS', 'TOOK', 'OFFENCE', 'AT', 'THE', 'FACT', 'OF', 'CABINETS', 'HAVING', 'PERSONALLY', 'ACTED', 'AS', 'STRIKEBREAKERS', 'AND', 'EVEN', 'THE', 'RELEASE', 'OF', 'PLATTERBUFF', 'FAILED', 'TO', 'PACIFY', 'THEM'] +7105-2340-0000-2272: ref=['WITH', 'THAT', 'NOTORIOUS', 'FAILING', 'OF', 'HIS', 'HE', 'WAS', 'NOT', 'THE', 'SORT', 'OF', 'PERSON', 'ONE', 'WANTED', 'IN', "ONE'S", 'HOUSE'] +7105-2340-0000-2272: hyp=['WITH', 'THAT', 'NOTORIOUS', 'FAILING', 'OF', 'HIS', 'HE', 'WAS', 'NOT', 'A', 'SORT', 'OF', 'PERSON', 'ONE', 'WANTED', 'IN', "ONE'S", 'HOUSE'] +7105-2340-0001-2273: ref=['WELL', 'THE', 'FAILING', 'STILL', 'EXISTS', "DOESN'T", 'IT', 'SAID', 'HER', 'HUSBAND', 'OR', 'DO', 'YOU', 'SUPPOSE', 'A', 'REFORM', 'OF', 'CHARACTER', 'IS', 'ENTAILED', 'ALONG', 'WITH', 'THE', 'ESTATE'] +7105-2340-0001-2273: hyp=['WELL', 'THE', 'FAILING', 'STILL', 'EXISTS', "DOESN'T", 'IT', 'SAID', 'THE', 'HUSBAND', 'OR', 'A', 'DO', 'YOU', 'SUPPOSE', 'A', 'REFORM', 'OF', 'CHARACTER', 'IS', 'ENTAILED', 'ALONG', 'WITH', 'THE', 'ESTATE'] +7105-2340-0002-2274: ref=['BESIDES', 'CYNICISM', 'APART', 'HIS', 'BEING', 'RICH', 'WILL', 'MAKE', 'A', 'DIFFERENCE', 'IN', 'THE', 'WAY', 'PEOPLE', 'WILL', 'LOOK', 'AT', 'HIS', 'FAILING'] +7105-2340-0002-2274: hyp=['BESIDES', 'SYNICISM', 'APART', 'IS', 'BEING', 'RICH', "WE'LL", 'MAKE', 'A', 'DIFFERENCE', 'IN', 'THE', 'WAY', 'PEOPLE', 'WILL', 'LOOK', 'AT', 'HIS', 'FEELING'] +7105-2340-0003-2275: ref=['WHEN', 'A', 'MAN', 'IS', 'ABSOLUTELY', 'WEALTHY', 'NOT', 'MERELY', 'WELL', 'TO', 'DO', 'ALL', 'SUSPICION', 'OF', 'SORDID', 'MOTIVE', 'NATURALLY', 'DISAPPEARS', 'THE', 'THING', 'BECOMES', 'MERELY', 'A', 'TIRESOME', 'MALADY'] +7105-2340-0003-2275: hyp=['WHEN', 'A', 'MAN', 'IS', 'ABSOLUTELY', 'WEALTHY', 'NOT', 'MERELY', 'WELL', 'TO', 'DO', 'ALL', 'SUSPICION', 'OF', 'SARDID', 'MOTIVE', 'NATURAL', 'DISAPPEARS', 'THE', 'THING', 'BECOMES', 'MERELY', 'A', 'PARASAN', 'MALADY'] +7105-2340-0004-2276: ref=['WILFRID', 'PIGEONCOTE', 'HAD', 'SUDDENLY', 'BECOME', 'HEIR', 'TO', 'HIS', 'UNCLE', 'SIR', 'WILFRID', 'PIGEONCOTE', 'ON', 'THE', 'DEATH', 'OF', 'HIS', 'COUSIN', 'MAJOR', 'WILFRID', 'PIGEONCOTE', 'WHO', 'HAD', 'SUCCUMBED', 'TO', 'THE', 'AFTER', 'EFFECTS', 'OF', 'A', 'POLO', 'ACCIDENT'] +7105-2340-0004-2276: hyp=['WILFRED', 'DIGESON', 'COLT', 'HAD', 'SUDDENLY', 'BECOME', 'HEIR', 'TO', 'HIS', 'UNCLE', 'SIR', 'WILFRID', 'PIGEON', 'COAT', 'ON', 'THE', 'DEATH', 'OF', 'HIS', 'COUSIN', 'MAJOR', 'WILFRED', 'PIGEONOTE', 'WHO', 'HAD', 'SUCCUMBED', 'THE', 'DAY', 'AFTER', 'EFFECTS', 'OF', 'APOLLO', 'ACCIDENT'] +7105-2340-0005-2277: ref=['A', 'WILFRID', 'PIGEONCOTE', 'HAD', 'COVERED', 'HIMSELF', 'WITH', 'HONOURS', 'IN', 'THE', 'COURSE', 'OF', "MARLBOROUGH'S", 'CAMPAIGNS', 'AND', 'THE', 'NAME', 'WILFRID', 'HAD', 'BEEN', 'A', 'BAPTISMAL', 'WEAKNESS', 'IN', 'THE', 'FAMILY', 'EVER', 'SINCE', 'THE', 'NEW', 'HEIR', 'TO', 'THE', 'FAMILY', 'DIGNITY', 'AND', 'ESTATES', 'WAS', 'A', 'YOUNG', 'MAN', 'OF', 'ABOUT', 'FIVE', 'AND', 'TWENTY', 'WHO', 'WAS', 'KNOWN', 'MORE', 'BY', 'REPUTATION', 'THAN', 'BY', 'PERSON', 'TO', 'A', 'WIDE', 'CIRCLE', 'OF', 'COUSINS', 'AND', 'KINSFOLK'] +7105-2340-0005-2277: hyp=['OF', 'WILFRED', 'BEECH', 'AND', 'COURT', 'HAD', 'COVERED', 'HIMSELF', 'WITH', 'HONORS', 'IN', 'THE', 'COURSE', 'OF', "MARLBOROUGH'S", 'CAMPAIGNS', 'AND', 'THE', 'NAME', 'LOYAL', 'FRED', 'HAD', 'BEEN', 'ABOVE', 'THE', 'SMALL', 'WEAKNESS', 'IN', 'THE', 'FAMILY', 'EVER', 'SINCE', 'THE', 'NEW', 'HEIR', 'TO', 'THE', 'FAMILY', 'DIGNITY', 'AND', 'ESTATES', 'WAS', 'A', 'YOUNG', 'MAN', 'OF', 'ABOUT', 'FIVE', 'AND', 'TWENTY', 'WHO', 'WAS', 'KNOWN', 'MORE', 'BY', 'REPETITION', 'THAN', 'BY', 'PERSON', 'TO', 'AVIDE', 'CIRCLE', 'OF', 'COUSINS', 'AND', 'KINSFOLK'] +7105-2340-0006-2278: ref=['AND', 'THE', 'REPUTATION', 'WAS', 'AN', 'UNPLEASANT', 'ONE'] +7105-2340-0006-2278: hyp=['AND', 'THE', 'REPUTATION', 'WAS', 'AN', 'UNPLEASANT', 'ONE'] +7105-2340-0007-2279: ref=['FROM', 'HIS', 'LATE', 'SCHOOLDAYS', 'ONWARD', 'HE', 'HAD', 'BEEN', 'POSSESSED', 'BY', 'AN', 'ACUTE', 'AND', 'OBSTINATE', 'FORM', 'OF', 'KLEPTOMANIA', 'HE', 'HAD', 'THE', 'ACQUISITIVE', 'INSTINCT', 'OF', 'THE', 'COLLECTOR', 'WITHOUT', 'ANY', 'OF', 'THE', "COLLECTOR'S", 'DISCRIMINATION'] +7105-2340-0007-2279: hyp=['FROM', 'HIS', 'LATE', 'SCHOOL', 'DAYS', 'ONWARD', 'HE', 'HAD', 'BEEN', 'POSSESSED', 'BY', 'AN', 'ACUTE', 'AND', 'OBSTINATE', 'FORM', 'OF', 'CLEFTOMANIA', 'HE', 'HAD', 'THE', 'ACQUISITIVE', 'INSTINCT', 'OF', 'THE', 'COLLECTOR', 'WITHOUT', 'ANY', 'OF', 'THE', "COLLECTOR'S", 'DISCRIMINATION'] +7105-2340-0008-2280: ref=['THE', 'SEARCH', 'USUALLY', 'PRODUCED', 'A', 'LARGE', 'AND', 'VARIED', 'YIELD', 'THIS', 'IS', 'FUNNY', 'SAID', 'PETER', 'PIGEONCOTE', 'TO', 'HIS', 'WIFE', 'SOME', 'HALF', 'HOUR', 'AFTER', 'THEIR', 'CONVERSATION', "HERE'S", 'A', 'TELEGRAM', 'FROM', 'WILFRID', 'SAYING', "HE'S", 'PASSING', 'THROUGH', 'HERE', 'IN', 'HIS', 'MOTOR', 'AND', 'WOULD', 'LIKE', 'TO', 'STOP', 'AND', 'PAY', 'US', 'HIS', 'RESPECTS'] +7105-2340-0008-2280: hyp=['THIS', 'SEARCH', 'USUALLY', 'PRODUCE', 'A', 'LARGE', 'AND', 'VARIED', 'YIELD', 'THIS', 'IS', 'FUNNY', 'SAID', 'PETER', 'PIGEON', 'BOAT', 'TO', 'HIS', 'WIFE', 'THEM', 'HALF', 'HOUR', 'AFTER', 'THEIR', 'CONVERSATION', 'HERE', 'IS', 'A', 'TELEGRAM', 'FROM', 'MILFRED', 'SAYING', "HE'S", 'PASSING', 'THROUGH', 'HERE', 'IN', 'HIS', 'MOTOR', 'AND', 'WOULD', 'LIKE', 'TO', 'STOP', 'AND', 'PAY', 'US', 'HIS', 'RESPECTS'] +7105-2340-0009-2281: ref=['SIGNED', 'WILFRID', 'PIGEONCOTE'] +7105-2340-0009-2281: hyp=['SIGNED', 'WILFRED', 'PEACH', 'AND', 'COLT'] +7105-2340-0010-2282: ref=['I', 'SUPPOSE', "HE'S", 'BRINGING', 'US', 'A', 'PRESENT', 'FOR', 'THE', 'SILVER', 'WEDDING', 'GOOD', 'GRACIOUS'] +7105-2340-0010-2282: hyp=['I', 'SUPPOSE', 'THIS', 'BRINGING', 'US', 'A', 'PRESENT', 'FOR', 'THE', 'SILVER', 'WEDDING', 'GOOD', 'GRACIOUS'] +7105-2340-0011-2283: ref=['THE', 'TALK', 'FLITTED', 'NERVOUSLY', 'AND', 'HURRIEDLY', 'FROM', 'ONE', 'IMPERSONAL', 'TOPIC', 'TO', 'ANOTHER'] +7105-2340-0011-2283: hyp=['THE', 'TALK', 'FLITTED', 'NERVOUSLY', 'AND', 'HURRIEDLY', 'FROM', 'ONE', 'IMPERSONAL', 'TOPIC', 'TO', 'ANOTHER'] +7105-2340-0012-2284: ref=['IN', 'THE', 'DRAWING', 'ROOM', 'AFTER', 'DINNER', 'THEIR', 'NERVOUSNESS', 'AND', 'AWKWARDNESS', 'INCREASED'] +7105-2340-0012-2284: hyp=['IN', 'THE', 'DRAWING', 'ROOM', 'AFTER', 'DINNER', 'THEIR', 'NERVOUSNESS', 'AND', 'AWKWARDNESS', 'INCREASED'] +7105-2340-0013-2285: ref=['OH', 'WE', "HAVEN'T", 'SHOWN', 'YOU', 'THE', 'SILVER', 'WEDDING', 'PRESENTS', 'SAID', 'MISSUS', 'PETER', 'SUDDENLY', 'AS', 'THOUGH', 'STRUCK', 'BY', 'A', 'BRILLIANT', 'IDEA', 'FOR', 'ENTERTAINING', 'THE', 'GUEST', 'HERE', 'THEY', 'ALL', 'ARE'] +7105-2340-0013-2285: hyp=['OH', 'WE', "HAVEN'T", 'SHOWN', 'YOU', 'THE', 'SILVERY', 'WEDDING', 'PRESENTS', 'SAID', 'MISSUS', 'PETER', 'SUDDENLY', 'AS', 'THOUGH', 'STRUCK', 'BY', 'A', 'BRILLIANT', 'IDEA', 'OF', 'HER', 'ENTERTAINING', 'THE', 'GUEST', 'HERE', 'THEY', 'ALL', 'ARE'] +7105-2340-0014-2286: ref=['SUCH', 'NICE', 'USEFUL', 'GIFTS', 'A', 'FEW', 'DUPLICATES', 'OF', 'COURSE'] +7105-2340-0014-2286: hyp=['SUCH', 'NICE', 'USEFUL', 'GIFTS', 'A', 'FEW', 'DEPLICATES', 'OF', 'COURSE'] +7105-2340-0015-2287: ref=['SEVEN', 'CREAM', 'JUGS', 'PUT', 'IN', 'PETER'] +7105-2340-0015-2287: hyp=['SEVEN', 'QUEEN', 'JUGS', 'PUT', 'IN', 'PETER'] +7105-2340-0016-2288: ref=['WE', 'FEEL', 'THAT', 'WE', 'MUST', 'LIVE', 'ON', 'CREAM', 'FOR', 'THE', 'REST', 'OF', 'OUR', 'LIVES'] +7105-2340-0016-2288: hyp=['WE', 'FEEL', 'THAT', 'WE', 'MUST', 'LIVE', 'UNCREAM', 'FOR', 'THE', 'REST', 'OF', 'OUR', 'LIVES'] +7105-2340-0017-2289: ref=['OF', 'COURSE', 'SOME', 'OF', 'THEM', 'CAN', 'BE', 'CHANGED'] +7105-2340-0017-2289: hyp=['OF', 'COURSE', 'SOME', 'OF', 'THEM', 'CAN', 'BE', 'CHANGED'] +7105-2340-0018-2290: ref=['I', 'PUT', 'IT', 'DOWN', 'BY', 'THE', 'CLARET', 'JUG', 'SAID', 'WILFRID', 'BUSY', 'WITH', 'ANOTHER', 'OBJECT'] +7105-2340-0018-2290: hyp=['I', 'PUT', 'IT', 'DOWN', 'BY', 'THE', 'CLARGA', 'SAID', 'WILFRIED', 'BUSY', 'WITH', 'ANOTHER', 'OBJECT'] +7105-2340-0019-2291: ref=['VIGILANCE', 'WAS', 'NOT', 'COMPLETELY', 'CROWNED', 'WITH', 'A', 'SENSE', 'OF', 'VICTORY'] +7105-2340-0019-2291: hyp=['EACH', 'A', 'LENS', 'WAS', 'NOT', 'COMPLETELY', 'CROWNED', 'WITH', 'A', 'SENSE', 'OF', 'VICTORY'] +7105-2340-0020-2292: ref=['AFTER', 'THEY', 'HAD', 'SAID', 'GOOD', 'NIGHT', 'TO', 'THEIR', 'VISITOR', 'MISSUS', 'PETER', 'EXPRESSED', 'HER', 'CONVICTION', 'THAT', 'HE', 'HAD', 'TAKEN', 'SOMETHING'] +7105-2340-0020-2292: hyp=['AFTER', 'THEY', 'HAD', 'SAID', 'GOOD', 'NIGHT', 'TO', 'THEIR', 'VISITOR', 'MISSUS', 'PETER', 'EXPRESSED', 'HER', 'CONVICTION', 'THAT', 'HE', 'HAD', 'TAKEN', 'SOMETHING'] +7105-2340-0021-2293: ref=['HOW', 'ON', 'EARTH', 'ARE', 'WE', 'TO', 'KNOW', 'SAID', 'PETER', 'THE', 'MEAN', 'PIG', "HASN'T", 'BROUGHT', 'US', 'A', 'PRESENT', 'AND', "I'M", 'HANGED', 'IF', 'HE', 'SHALL', 'CARRY', 'ONE', 'OFF'] +7105-2340-0021-2293: hyp=['HOW', 'ON', 'EARTH', 'ARE', 'WE', 'TO', 'KNOW', 'SAID', 'PETER', 'THE', 'MEAN', 'PIG', "HASN'T", 'BROUGHT', 'US', 'A', 'PRESENT', 'AND', "I'M", 'HANGED', 'IF', 'HE', 'SHALL', 'CARRY', 'ONE', 'OFF'] +7105-2340-0022-2294: ref=["IT'S", 'THE', 'ONLY', 'THING', 'TO', 'DO'] +7105-2340-0022-2294: hyp=['IS', 'THE', 'ONLY', 'THING', 'TO', 'DO'] +7105-2340-0023-2295: ref=['WILFRID', 'WAS', 'LATE', 'IN', 'COMING', 'DOWN', 'TO', 'BREAKFAST', 'AND', 'HIS', 'MANNER', 'SHOWED', 'PLAINLY', 'THAT', 'SOMETHING', 'WAS', 'AMISS'] +7105-2340-0023-2295: hyp=['WILFRED', 'WAS', 'LAID', 'IN', 'COMING', 'DOWN', 'TO', 'BREAKFAST', 'AND', 'HIS', 'MANNER', 'SHOWED', 'PLAINLY', 'THAT', 'SOMETHING', 'WAS', 'AMISS'] +7105-2340-0024-2296: ref=["IT'S", 'AN', 'UNPLEASANT', 'THING', 'TO', 'HAVE', 'TO', 'SAY', 'HE', 'BLURTED', 'OUT', 'PRESENTLY', 'BUT', "I'M", 'AFRAID', 'YOU', 'MUST', 'HAVE', 'A', 'THIEF', 'AMONG', 'YOUR', 'SERVANTS', "SOMETHING'S", 'BEEN', 'TAKEN', 'OUT', 'OF', 'MY', 'PORTMANTEAU'] +7105-2340-0024-2296: hyp=['IS', 'AN', 'UNPLEASANT', 'THING', 'TO', 'HAVE', 'TO', 'SAY', 'HE', 'BLURTED', 'OUT', 'PRESENTLY', 'BUT', "I'M", 'AFRAID', 'YOU', 'MUST', 'HAVE', 'A', 'THIEF', 'AMONG', 'YOUR', 'SERVANTS', "SOMETHING'S", 'BEEN', 'TAKEN', 'OUT', 'OF', 'MY', 'PORTMANTEAU'] +7105-2340-0025-2297: ref=['IT', 'WAS', 'A', 'LITTLE', 'PRESENT', 'FROM', 'MY', 'MOTHER', 'AND', 'MYSELF', 'FOR', 'YOUR', 'SILVER', 'WEDDING'] +7105-2340-0025-2297: hyp=['IT', 'WAS', 'A', 'LITTLE', 'PRESENT', 'FROM', 'MY', 'MOTHER', 'AND', 'MYSELF', 'FOR', 'YOUR', 'SILVER', 'WEDDING'] +7105-2340-0026-2298: ref=['I', 'SHOULD', 'HAVE', 'GIVEN', 'IT', 'TO', 'YOU', 'LAST', 'NIGHT', 'AFTER', 'DINNER', 'ONLY', 'IT', 'HAPPENED', 'TO', 'BE', 'A', 'CREAM', 'JUG', 'AND', 'YOU', 'SEEMED', 'ANNOYED', 'AT', 'HAVING', 'SO', 'MANY', 'DUPLICATES', 'SO', 'I', 'FELT', 'RATHER', 'AWKWARD', 'ABOUT', 'GIVING', 'YOU', 'ANOTHER'] +7105-2340-0026-2298: hyp=['I', 'SHOULD', 'HAVE', 'GIVEN', 'IT', 'TO', 'YOU', 'LAST', 'NIGHT', 'AFTER', 'DINNER', 'ONLY', 'IT', 'HAPPENED', 'TO', 'BE', 'A', 'QUEEN', 'JUG', 'AND', 'YOU', 'SEEMED', 'ANNOYED', 'AT', 'HAVING', 'SO', 'MANY', 'DUPLICATES', 'SO', 'I', 'FELT', 'RATHER', 'AWKWARD', 'OF', 'A', 'GIVING', 'YOU', 'ANOTHER'] +7105-2340-0027-2299: ref=['THE', 'SNATCHER', 'HAD', 'BEEN', 'AN', 'ORPHAN', 'THESE', 'MANY', 'YEARS'] +7105-2340-0027-2299: hyp=['THIS', 'SNATCHER', 'HAD', 'BEEN', 'AN', 'ORPHAN', 'THIS', 'MANY', 'YEARS'] +7105-2340-0028-2300: ref=['LADY', 'ERNESTINE', 'PIGEONCOTE', 'HIS', 'MOTHER', 'MOVED', 'IN', 'CIRCLES', 'WHICH', 'WERE', 'ENTIRELY', 'BEYOND', 'THEIR', 'COMPASS', 'OR', 'AMBITIONS', 'AND', 'THE', 'SON', 'WOULD', 'PROBABLY', 'ONE', 'DAY', 'BE', 'AN', 'AMBASSADOR'] +7105-2340-0028-2300: hyp=['LADY', 'ERNESTON', 'BEECH', 'AND', 'COLD', 'HIS', 'MOTHER', 'MOVED', 'IN', 'CIRCLES', 'WHICH', 'WERE', 'ENTIRELY', 'BEYOND', 'THEIR', 'COMPASS', 'OR', 'AMBITIONS', 'AND', 'THE', 'SUN', 'WOULD', 'PROBABLY', 'ONE', 'DAY', 'BE', 'AN', 'AMBASSADOR'] +7105-2340-0029-2301: ref=['HUSBAND', 'AND', 'WIFE', 'LOOKED', 'BLANKLY', 'AND', 'DESPERATELY', 'AT', 'ONE', 'ANOTHER'] +7105-2340-0029-2301: hyp=['HUSBAND', 'AND', 'WIFE', 'LOOKED', 'BLANKLY', 'AND', 'DESPERATELY', 'AT', 'ONE', 'ANOTHER'] +7105-2340-0030-2302: ref=['IT', 'WAS', 'MISSUS', 'PETER', 'WHO', 'ARRIVED', 'FIRST', 'AT', 'AN', 'INSPIRATION', 'HOW', 'DREADFUL', 'TO', 'THINK', 'THERE', 'ARE', 'THIEVES', 'IN', 'THE', 'HOUSE', 'WE', 'KEEP', 'THE', 'DRAWING', 'ROOM', 'LOCKED', 'UP', 'AT', 'NIGHT', 'OF', 'COURSE', 'BUT', 'ANYTHING', 'MIGHT', 'BE', 'CARRIED', 'OFF', 'WHILE', 'WE', 'ARE', 'AT', 'BREAKFAST'] +7105-2340-0030-2302: hyp=['IT', 'WAS', 'MISSUS', 'PETER', 'WHO', 'ARRIVED', 'FIRST', 'AT', 'AN', 'INSPIRATION', 'HOW', 'DREADFUL', 'TO', 'THINK', 'THERE', 'ARE', 'THIEVES', 'IN', 'THE', 'HOUSE', 'WE', 'KEEP', 'THE', 'DRAWING', 'ROOM', 'LOCKED', 'UP', 'AT', 'NIGHT', 'OF', 'COURSE', 'BUT', 'ANYTHING', 'MIGHT', 'BE', 'CARRIED', 'OFF', 'WHILE', 'WE', 'ARE', 'AT', 'BREAKFAST'] +7105-2340-0031-2303: ref=['SHE', 'ROSE', 'AND', 'WENT', 'OUT', 'HURRIEDLY', 'AS', 'THOUGH', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'THE', 'DRAWING', 'ROOM', 'WAS', 'NOT', 'BEING', 'STRIPPED', 'OF', 'ITS', 'SILVERWARE', 'AND', 'RETURNED', 'A', 'MOMENT', 'LATER', 'BEARING', 'A', 'CREAM', 'JUG', 'IN', 'HER', 'HANDS'] +7105-2340-0031-2303: hyp=['SHE', 'ROSE', 'AND', 'WENT', 'OUT', 'HURRIEDLY', 'AS', 'THOUGH', 'TO', 'ASSURE', 'HERSELF', 'THAT', 'THE', 'DRAWING', 'ROOM', 'WAS', 'NOT', 'BEING', 'STRIPPED', 'OF', 'ITS', 'SILVERWARE', 'AND', 'RETURNED', 'A', 'MOMENT', 'LATER', 'BEARING', 'A', 'CREAM', 'CHUG', 'IN', 'HER', 'HANDS'] +7105-2340-0032-2304: ref=['THE', 'PIGEONCOTES', 'HAD', 'TURNED', 'PALER', 'THAN', 'EVER', 'MISSUS', 'PETER', 'HAD', 'A', 'FINAL', 'INSPIRATION'] +7105-2340-0032-2304: hyp=['THE', 'PIGEON', 'CORDS', 'HAD', 'TURNED', 'PALER', 'THAN', 'EVER', 'MISSUS', 'PETER', 'HAD', 'A', 'FINAL', 'INSPIRATION'] +7105-2340-0033-2305: ref=['PETER', 'DASHED', 'OUT', 'OF', 'THE', 'ROOM', 'WITH', 'GLAD', 'RELIEF', 'HE', 'HAD', 'LIVED', 'SO', 'LONG', 'DURING', 'THE', 'LAST', 'FEW', 'MINUTES', 'THAT', 'A', 'GOLDEN', 'WEDDING', 'SEEMED', 'WITHIN', 'MEASURABLE', 'DISTANCE'] +7105-2340-0033-2305: hyp=['EITHER', 'DASHED', 'OUT', 'OF', 'THE', 'ROOM', 'WITH', 'GLAD', 'RELIEF', 'HE', 'HAD', 'LIVED', 'SO', 'LONG', 'DURING', 'THE', 'LAST', 'FEW', 'MINUTES', 'THAT', 'A', 'GOLDEN', 'WEDDING', 'SEEMED', 'WITHIN', 'MEASURABLE', 'DISTANCE'] +7105-2340-0034-2306: ref=['MISSUS', 'PETER', 'TURNED', 'TO', 'HER', 'GUEST', 'WITH', 'CONFIDENTIAL', 'COYNESS'] +7105-2340-0034-2306: hyp=['MISSUS', 'BEATER', 'TURNED', 'TO', 'HER', 'GUEST', 'WITH', 'CONFIDENTIAL', 'KINDNESS'] +7105-2340-0035-2307: ref=["PETER'S", 'LITTLE', 'WEAKNESS', 'IT', 'RUNS', 'IN', 'THE', 'FAMILY', 'GOOD', 'LORD'] +7105-2340-0035-2307: hyp=['PETER', 'IS', 'LITTLE', 'WEAKNESS', 'EACH', "ONE'S", 'IN', 'THE', 'FAMILY', 'GOOD', 'LORD'] +7105-2340-0036-2308: ref=['DO', 'YOU', 'MEAN', 'TO', 'SAY', "HE'S", 'A', 'KLEPTOMANIAC', 'LIKE', 'COUSIN', 'SNATCHER'] +7105-2340-0036-2308: hyp=['DO', 'YOU', 'MEAN', 'TO', 'SAY', "HE'S", 'A', 'CLAPTOMANIA', 'LIKE', 'COUSIN', 'SNATCHER'] +7105-2340-0037-2309: ref=['BRAVE', 'LITTLE', 'WOMAN', 'SAID', 'PETER', 'WITH', 'A', 'GASP', 'OF', 'RELIEF', 'I', 'COULD', 'NEVER', 'HAVE', 'DONE', 'IT'] +7105-2340-0037-2309: hyp=['PRETTY', 'LITTLE', 'WOMAN', 'SAID', 'PETER', 'WITH', 'A', 'GASP', 'OF', 'RELIEF', 'I', 'COULD', 'NEVER', 'HAVE', 'DONE', 'IT'] +7902-96591-0000-0: ref=['I', 'AM', 'FROM', 'THE', 'CUTTER', 'LYING', 'OFF', 'THE', 'COAST'] +7902-96591-0000-0: hyp=['AND', 'FROM', 'THE', 'CUTTER', 'LYING', 'OFF', 'THE', 'COAST'] +7902-96591-0001-1: ref=["DON'T", 'CRY', 'HE', 'SAID', 'I', 'WAS', 'OBLIGED', 'TO', 'COME'] +7902-96591-0001-1: hyp=["DON'T", 'CRY', 'HE', 'SAID', 'I', 'WAS', 'OBLIGED', 'TO', 'COME'] +7902-96591-0002-2: ref=['AND', 'AND', 'YOU', 'HAVE', 'NOT', 'FOUND', 'OUT', 'ANYTHING', 'CAME', 'IN', 'QUICK', 'FRIGHTENED', 'TONES'] +7902-96591-0002-2: hyp=['AND', 'AND', 'YOU', 'HAVE', 'NOT', 'FOUND', 'OUT', 'ANYTHING', 'CAME', 'IN', 'QUICK', 'FRIGHTENED', 'TONES'] +7902-96591-0003-3: ref=['I', 'WISH', 'YOU', 'WOULD', 'BELIEVE', 'ME', 'THAT', 'I', 'AM', 'IN', 'AS', 'GREAT', 'TROUBLE', 'ABOUT', 'IT', 'AS', 'YOU', 'ARE'] +7902-96591-0003-3: hyp=['I', 'WISH', 'YOU', 'WOULD', 'BELIEVE', 'ME', 'THAT', 'I', 'AM', 'IN', 'AS', 'GREAT', 'TROUBLE', 'ABOUT', 'IT', 'AS', 'YOU', 'ARE'] +7902-96591-0004-4: ref=['THAT', 'MY', 'FATHER', 'SIR', 'RISDON', 'GRAEME', 'HAS', 'SMUGGLED', 'GOODS', 'HERE'] +7902-96591-0004-4: hyp=['THAT', 'MY', 'FATHER', 'SIR', 'RISDON', 'GRAHAME', 'SMUGGLED', 'GOODS', 'HERE'] +7902-96591-0005-5: ref=['HE', 'COULD', 'NOT', 'HELP', 'IT', 'HE', 'HATES', 'THE', 'SMUGGLERS', 'YOU', 'SHALL', 'NOT', 'TELL'] +7902-96591-0005-5: hyp=['HE', 'COULD', 'NOT', 'HELP', 'IT', 'HE', 'HATES', 'THE', 'SMUGGLERS', 'YOU', 'SHALL', 'NOT', 'TELL'] +7902-96591-0006-6: ref=['PRAY', 'PRAY', 'SAY', 'YOU', 'WILL', 'NOT', 'ARCHY', 'WAS', 'SILENT'] +7902-96591-0006-6: hyp=['PRAY', 'PRAY', 'SAY', 'YOU', 'WILL', 'NOT', 'ARCHIE', 'WAS', 'SILENT'] +7902-96591-0007-7: ref=['THEN', 'AS', 'ARCHY', 'STOOD', 'IN', 'THE', 'DARK', 'LITERALLY', 'AGHAST', 'WITH', 'ASTONISHMENT', 'HE', 'HEARD', 'THE', 'FAINT', 'RUSTLING', 'ONCE', 'MORE', 'AND', 'AGAIN', 'ALL', 'WAS', 'SILENT'] +7902-96591-0007-7: hyp=['THEN', 'AS', 'ARCHIE', 'STOOD', 'IN', 'THE', 'DARK', 'LITERALLY', 'AGHAST', 'WITH', 'ASTONISHMENT', 'HE', 'HEARD', 'THE', 'FAINT', 'RUSTLING', 'ONCE', 'MORE', 'AND', 'AGAIN', 'ALL', 'WAS', 'SILENT'] +7902-96591-0008-8: ref=['HE', 'LAUGHED', 'BUT', 'IT', 'WAS', 'A', 'CURIOUS', 'KIND', 'OF', 'LAUGH', 'FULL', 'OF', 'VEXATION', 'INJURED', 'AMOUR', 'PROPRE', 'AS', 'THE', 'FRENCH', 'CALL', 'OUR', 'LOVE', 'OF', 'OUR', 'OWN', 'DIGNITY', 'OF', 'WHICH', 'ARCHIBALD', 'RAYSTOKE', 'IN', 'THE', 'FULL', 'FLUSH', 'OF', 'HIS', 'YOUNG', 'BELIEF', 'IN', 'HIS', 'IMPORTANCE', 'AS', 'A', 'BRITISH', 'OFFICER', 'HAD', 'A', 'PRETTY', 'GOOD', 'STOCK'] +7902-96591-0008-8: hyp=['HE', 'LAUGHED', 'BUT', 'IT', 'WAS', 'A', 'CURIOUS', 'KIND', 'OF', 'LAUGH', 'FULL', 'OF', 'VEXATION', 'INJURED', 'AMORE', 'A', 'PROPER', 'AS', 'THE', 'FRENCH', 'CALL', 'OUR', 'LOVE', 'OF', 'OUR', 'OWN', 'DIGNITY', 'OF', 'WHICH', 'ARQUEBALD', 'RAY', 'STROKE', 'IN', 'THE', 'FULL', 'FLUSH', 'OF', 'HIS', 'YOUNG', 'BELIEF', 'IN', 'HIS', 'IMPORTANCE', 'AS', 'A', 'BRITISH', 'OFFICER', 'HAD', 'A', 'PRETTY', 'GOOD', 'STOCK'] +7902-96591-0009-9: ref=['IT', 'ALL', 'COMES', 'OF', 'DRESSING', 'UP', 'IN', 'THIS', 'STUPID', 'WAY', 'LIKE', 'A', 'ROUGH', 'FISHER', 'LAD'] +7902-96591-0009-9: hyp=['AND', 'ALL', 'COMES', 'OF', 'DRESSING', 'UP', 'IN', 'THIS', 'STUPID', 'WAY', 'LIKE', 'A', 'ROUGH', 'FISHER', 'LAD'] +7902-96591-0010-10: ref=['COLD', 'WATER', 'CAME', 'ON', 'THIS', 'IDEA', 'DIRECTLY', 'AS', 'HE', 'RECALLED', 'THE', 'FACT', 'THAT', 'THE', 'DARKNESS', 'WAS', 'INTENSE', 'AND', 'CELIA', 'COULD', 'NOT', 'HAVE', 'SEEN', 'HIM'] +7902-96591-0010-10: hyp=['COLD', 'WATER', 'CAME', 'ON', 'THIS', 'IDEA', 'DIRECTLY', 'AS', 'HE', 'RECALLED', 'THE', 'FACT', 'THAT', 'THE', 'DARKNESS', 'WAS', 'INTENSE', 'AND', 'CELIA', 'COULD', 'NOT', 'HAVE', 'SEEN', 'HIM'] +7902-96591-0011-11: ref=["I'LL", 'SOON', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'NOT', 'GOING', 'TO', 'BE', 'PLAYED', 'WITH'] +7902-96591-0011-11: hyp=["I'LL", 'SOON', 'SHOW', 'THEM', 'THAT', 'I', 'AM', 'NOT', 'GOING', 'TO', 'BE', 'PLAYED', 'WITH'] +7902-96591-0012-12: ref=['FOR', 'IT', 'SUDDENLY', 'OCCURRED', 'TO', 'HIM', 'THAT', 'HE', 'WAS', 'NOT', 'ONLY', 'A', 'PRISONER', 'BUT', 'A', 'PRISONER', 'IN', 'THE', 'POWER', 'OF', 'A', 'VERY', 'RECKLESS', 'SET', 'OF', 'PEOPLE', 'WHO', 'WOULD', 'STOP', 'AT', 'NOTHING'] +7902-96591-0012-12: hyp=['FOR', 'IT', 'SUDDENLY', 'OCCURRED', 'TO', 'HIM', 'THAT', 'HE', 'WAS', 'NOT', 'ONLY', 'A', 'PRISONER', 'BUT', 'A', 'PRISONER', 'IN', 'THE', 'POWER', 'OF', 'A', 'VERY', 'RECKLESS', 'SET', 'OF', 'PEOPLE', 'WHO', 'WOULD', 'STOP', 'AT', 'NOTHING'] +7902-96591-0013-13: ref=['NO', 'HE', 'THOUGHT', 'TO', 'HIMSELF', 'I', "DON'T", 'BELIEVE', 'THEY', 'WOULD', 'KILL', 'ME', 'BUT', 'THEY', 'WOULD', 'KNOCK', 'ME', 'ABOUT'] +7902-96591-0013-13: hyp=['NO', 'HE', 'THOUGHT', 'TO', 'HIMSELF', 'I', "DON'T", 'BELIEVE', 'THEY', 'WOULD', 'KILL', 'ME', 'BUT', 'THEY', 'WOULD', 'KNOCK', 'ME', 'ABOUT'] +7902-96591-0014-14: ref=['THE', 'KICK', 'HE', 'HAD', 'RECEIVED', 'WAS', 'A', 'FORETASTE', 'OF', 'WHAT', 'HE', 'MIGHT', 'EXPECT', 'AND', 'AFTER', 'A', 'LITTLE', 'CONSIDERATION', 'HE', 'CAME', 'TO', 'THE', 'CONCLUSION', 'THAT', 'HIS', 'DUTY', 'WAS', 'TO', 'ESCAPE', 'AND', 'GET', 'BACK', 'TO', 'THE', 'CUTTER', 'AS', 'QUICKLY', 'AS', 'HE', 'COULD'] +7902-96591-0014-14: hyp=['THE', 'KICKIE', 'HAD', 'RECEIVED', 'WAS', 'A', 'FORETASTE', 'OF', 'WHAT', 'HE', 'MIGHT', 'EXPECT', 'AND', 'AFTER', 'A', 'LITTLE', 'CONSIDERATION', 'HE', 'CAME', 'TO', 'THE', 'CONCLUSION', 'THAT', 'HIS', 'DUTY', 'WAS', 'TO', 'ESCAPE', 'AND', 'GET', 'BACK', 'TO', 'THE', 'CUTTER', 'AS', 'QUICKLY', 'AS', 'HE', 'COULD'] +7902-96591-0015-15: ref=['TO', 'DO', 'THIS', 'HE', 'MUST', 'SCHEME', 'LIE', 'HID', 'TILL', 'MORNING', 'THEN', 'MAKE', 'FOR', 'THE', 'NEAREST', 'POINT', 'AND', 'SIGNAL', 'FOR', 'HELP', 'UNLESS', 'A', "BOAT'S", 'CREW', 'WERE', 'ALREADY', 'SEARCHING', 'FOR', 'HIM', 'HOW', 'TO', 'ESCAPE'] +7902-96591-0015-15: hyp=['TO', 'DO', 'THIS', 'HE', 'MUST', 'SCHEME', 'LIE', 'HID', 'TILL', 'MORNING', 'THAN', 'MAKE', 'FOR', 'THE', 'NEAREST', 'POINT', 'AND', 'SIGNAL', 'FOR', 'HELP', 'UNLESS', 'A', "BOAT'S", 'CREW', 'WERE', 'ALREADY', 'SEARCHING', 'FOR', 'HIM', 'HOW', 'TO', 'ESCAPE'] +7902-96591-0016-16: ref=['THE', 'WINDOW', 'WAS', 'BARRED', 'BUT', 'HE', 'WENT', 'TO', 'IT', 'AND', 'TRIED', 'THE', 'BARS', 'ONE', 'BY', 'ONE', 'TO', 'FIND', 'THEM', 'ALL', 'SOLIDLY', 'FITTED', 'INTO', 'THE', 'STONE', 'SILL'] +7902-96591-0016-16: hyp=['THE', 'WINDOW', 'WAS', 'BARRED', 'BUT', 'HE', 'WENT', 'TO', 'IT', 'AND', 'TRIED', 'THE', 'BARS', 'ONE', 'BY', 'ONE', 'TO', 'FIND', 'THEM', 'ALL', 'SOLIDLY', 'FITTED', 'INTO', 'THE', 'STONE', 'SILL'] +7902-96591-0017-17: ref=['NEXT', 'MOMENT', 'AS', 'HE', 'FELT', 'HIS', 'WAY', 'ABOUT', 'HIS', 'HAND', 'TOUCHED', 'AN', 'OLD', 'FASHIONED', 'MARBLE', 'MANTELPIECE', 'FIREPLACE', 'CHIMNEY'] +7902-96591-0017-17: hyp=['NEXT', 'MOMENT', 'AS', 'HE', 'FELT', 'HIS', 'WAY', 'ABOUT', 'HIS', 'HAND', 'TOUCHED', 'AN', 'OLD', 'FASHIONED', 'MARBLE', 'MANTELPIECE', 'FIREPLACE', 'CHIMNEY'] +7902-96591-0018-18: ref=['YES', 'IF', 'OTHER', 'WAYS', 'FAILED', 'HE', 'COULD', 'ESCAPE', 'UP', 'THE', 'CHIMNEY'] +7902-96591-0018-18: hyp=['YES', 'IF', 'OTHER', 'WAYS', 'FAILED', 'HE', 'COULD', 'ESCAPE', 'UP', 'THE', 'CHIMNEY'] +7902-96591-0019-19: ref=['NO', 'THAT', 'WAS', 'TOO', 'BAD', 'HE', 'COULD', 'NOT', 'DO', 'THAT'] +7902-96591-0019-19: hyp=['NO', 'THAT', 'WAS', 'TOO', 'BAD', 'HE', 'CANNOT', 'DO', 'THAT'] +7902-96591-0020-20: ref=['SYMPATHY', 'AND', 'PITY', 'FOR', 'THE', 'DWELLERS', 'IN', 'THE', 'HOZE', 'WERE', 'COMPLETELY', 'GONE', 'NOW', 'AND', 'HE', 'SET', 'HIS', 'TEETH', 'FAST', 'AND', 'MENTALLY', 'CALLED', 'HIMSELF', 'A', 'WEAK', 'IDIOT', 'FOR', 'EVER', 'THINKING', 'ABOUT', 'SUCH', 'PEOPLE'] +7902-96591-0020-20: hyp=['SYMPATHY', 'AND', 'PITY', 'FOR', 'THE', 'DWELLERS', 'IN', 'THE', 'HOSE', 'WERE', 'COMPLETELY', 'GONE', 'NOW', 'AND', 'HE', 'SET', 'HIS', 'TEETH', 'FAST', 'AND', 'MENTALLY', 'CALLED', 'HIMSELF', 'A', 'WEAK', 'IDIOT', 'FOR', 'EVER', 'THINKING', 'ABOUT', 'SUCH', 'PEOPLE'] +7902-96591-0021-21: ref=['A', 'NARROW', 'TABLE', 'AGAINST', 'THE', 'WALL', 'IN', 'TWO', 'PLACES'] +7902-96591-0021-21: hyp=['A', 'NARROW', 'TABLE', 'AGAINST', 'THE', 'WALL', 'IN', 'TWO', 'PLACES'] +7902-96591-0022-22: ref=['HE', 'WENT', 'AND', 'TRIED', 'TO', 'FORCE', 'HIS', 'HEAD', 'THROUGH', 'RECALLING', 'AS', 'HE', 'DID', 'THAT', 'WHERE', 'A', "PERSON'S", 'HEAD', 'WOULD', 'GO', 'THE', 'REST', 'OF', 'THE', 'BODY', 'WOULD', 'PASS'] +7902-96591-0022-22: hyp=['HE', 'WENT', 'AND', 'TRIED', 'TO', 'FORCE', 'HIS', 'HEAD', 'THROUGH', 'RECALLING', 'AS', 'HE', 'DID', 'THAT', 'WHERE', 'A', "PERSON'S", 'HEAD', 'WOULD', 'GO', 'THE', 'REST', 'OF', 'THE', 'BODY', 'WOULD', 'PASS'] +7902-96591-0023-23: ref=['BUT', 'THERE', 'WAS', 'NO', 'CHANCE', 'FOR', 'HIS', 'BODY', 'THERE', 'THE', 'HEAD', 'WOULD', 'NOT', 'GO', 'FIRST'] +7902-96591-0023-23: hyp=['BUT', 'THERE', 'WAS', 'NO', 'CHANCE', 'FOR', 'HIS', 'BODY', 'THERE', 'THE', 'HEAD', 'WOULD', 'NOT', 'GO', 'FIRST'] +7902-96591-0024-24: ref=['A', 'FELLOW', 'WHO', 'WAS', 'SHUT', 'UP', 'IN', 'PRISON', 'FOR', 'LIFE', 'MIGHT', 'DO', 'IT', 'HE', 'SAID', 'BUT', 'NOT', 'IN', 'A', 'CASE', 'LIKE', 'THIS'] +7902-96591-0024-24: hyp=['A', 'FELLOW', 'WHO', 'WAS', 'SHUT', 'UP', 'IN', 'PRISONED', 'FOR', 'LIFE', 'MIGHT', 'DO', 'IT', 'HE', 'SAID', 'BUT', 'NOT', 'IN', 'A', 'CASE', 'LIKE', 'THIS'] +7902-96592-0000-25: ref=['SURE', "YOU'VE", 'LOOKED', 'ROUND', 'EVERYWHERE', 'BOY', 'YES', 'FATHER', 'QUITE'] +7902-96592-0000-25: hyp=['SURE', 'YOU', 'LOOK', 'ROUND', 'EVERYWHERE', 'BOY', 'YES', 'FATHER', 'QUITE'] +7902-96592-0001-26: ref=["I'M", 'GOING', 'HOME', 'TO', 'BREAKFAST'] +7902-96592-0001-26: hyp=["I'M", 'GOING', 'HOME', 'TO', 'BREAKFAST'] +7902-96592-0002-27: ref=['SHALL', 'I', 'COME', 'TOO', 'FATHER', 'NO'] +7902-96592-0002-27: hyp=['SHALL', 'I', 'COME', 'TO', 'FATHER', 'NO'] +7902-96592-0003-28: ref=['STOP', 'HERE', 'TILL', 'SIR', 'RISDON', 'COMES', 'DOWN', 'AND', 'TELL', 'HIM', "I'M", 'VERY', 'SORRY', 'THAT', 'WE', 'SHOULD', 'HAVE', 'CLEARED', 'OUT', 'LAST', 'NIGHT', 'ONLY', 'A', 'BORN', 'FOOL', 'SAW', 'JERRY', "NANDY'S", 'LOBSTER', 'BOAT', 'COMING', 'INTO', 'THE', 'COVE', 'AND', 'CAME', 'RUNNING', 'TO', 'SAY', 'IT', 'WAS', 'A', 'PARTY', 'FROM', 'THE', 'CUTTER', 'YES', 'FATHER'] +7902-96592-0003-28: hyp=['STOP', 'HERE', 'TILL', 'SIR', 'RISDON', 'COMES', 'DOWN', 'AND', 'TELL', 'HIM', "I'M", 'VERY', 'SORRY', 'THAT', 'WE', 'SHOULD', 'HAVE', 'CLEARED', 'OUT', 'LAST', 'NIGHT', 'ONLY', 'A', 'BORN', 'FOOL', 'SAW', 'JERRY', "ANDY'S", 'LOBSTERBOAT', 'COMING', 'INTO', 'THE', 'COVE', 'AND', 'CAME', 'RUNNING', 'TO', 'SAY', 'IT', 'WAS', 'A', 'PARTY', 'FROM', 'THE', 'CUTTER', 'YES', 'FATHER'] +7902-96592-0004-29: ref=['TELL', 'HIM', 'NOT', 'TO', 'BE', 'UNEASY', 'TIS', 'ALL', 'RIGHT', 'AND', "I'LL", 'HAVE', 'EVERYTHING', 'CLEAR', 'AWAY', 'TO', 'NIGHT'] +7902-96592-0004-29: hyp=['TELL', 'HIM', 'NOT', 'TO', 'BE', 'UNEASY', 'TIS', 'ALL', 'RIGHT', 'AND', "I'LL", 'HAVE', 'EVERYTHING', 'CLEAR', 'AWAY', 'TO', 'NIGHT'] +7902-96592-0005-30: ref=['THE', 'DULL', 'SOUND', 'OF', 'DEPARTING', 'STEPS', 'AND', 'A', 'LOW', 'WHISTLING', 'SOUND', 'COMING', 'DOWN', 'THROUGH', 'THE', 'SKYLIGHT', 'WINDOW', 'INTO', 'THE', 'CABIN', 'WHERE', 'ARCHY', 'RAYSTOKE', 'LAY', 'WITH', 'HIS', 'HEAVY', 'EYELIDS', 'PRESSED', 'DOWN', 'BY', 'SLEEP'] +7902-96592-0005-30: hyp=['THE', 'DULL', 'SOUND', 'OF', 'DEPARTING', 'STEPS', 'AND', 'A', 'LOW', 'WHISTLING', 'SOUND', 'COMING', 'DOWN', 'THROUGH', 'THE', 'SKYLIGHT', 'WINDOW', 'INTO', 'THE', 'CABIN', 'WHERE', 'ARCHIE', 'RAYSTROKE', 'LAY', 'WITH', 'HIS', 'HEAVY', 'EYELIDS', 'PRESSED', 'DOWN', 'BY', 'SLEEP'] +7902-96592-0006-31: ref=['WHAT', 'A', 'QUEER', 'DREAM', 'HE', 'THOUGHT', 'TO', 'HIMSELF'] +7902-96592-0006-31: hyp=['WHAT', 'A', 'QUEER', 'DREAM', 'HE', 'THOUGHT', 'TO', 'HIMSELF'] +7902-96592-0007-32: ref=['BUT', 'HOW', 'QUEER', 'FOR', 'MISTER', 'GURR', 'TO', 'BE', 'TALKING', 'LIKE', 'THAT', 'TO', 'ANDREW', 'TEAL', 'THE', 'BOY', 'WHO', 'HELPED', 'THE', 'COOK'] +7902-96592-0007-32: hyp=['BUT', 'HOW', 'QUEER', 'FOR', 'MISTER', 'GIRT', 'TO', 'BE', 'TALKING', 'LIKE', 'THAT', 'TO', 'ANDREW', 'TEALE', 'THE', 'BOY', 'WHO', 'HELPS', 'THE', 'COOK'] +7902-96592-0008-33: ref=['AND', 'WHY', 'DID', 'ANDY', 'CALL', 'MISTER', 'GURR', 'FATHER'] +7902-96592-0008-33: hyp=['AND', 'WHY', 'DID', 'ANDY', 'CALL', 'MISTER', 'GERFATHER'] +7902-96592-0009-34: ref=['THERE', 'WAS', 'AN', 'INTERVAL', 'OF', 'THINKING', 'OVER', 'THIS', 'KNOTTY', 'QUESTION', 'DURING', 'WHICH', 'THE', 'LOW', 'WHISTLING', 'WENT', 'ON'] +7902-96592-0009-34: hyp=['THERE', 'WAS', 'AN', 'INTERVAL', 'OF', 'THINKING', 'OVER', 'THIS', 'NAUGHTY', 'QUESTION', 'DURING', 'WHICH', 'THE', 'LOW', 'WHISTLING', 'WENT', 'ON'] +7902-96592-0010-35: ref=['AND', "I'M", 'HUNGRY', 'TOO', 'TIME', 'I', 'WAS', 'UP', 'I', 'SUPPOSE'] +7902-96592-0010-35: hyp=['AND', 'UNHUNGRY', 'TOO', 'TELL', 'IT', 'WAS', 'UP', 'I', 'SUPPOSE'] +7902-96592-0011-36: ref=['NO', 'HE', 'WAS', 'NOT', 'DREAMING', 'FOR', 'HE', 'WAS', 'LOOKING', 'OUT', 'ON', 'THE', 'SEA', 'OVER', 'WHICH', 'A', 'FAINT', 'MIST', 'HUNG', 'LIKE', 'WREATHS', 'OF', 'SMOKE'] +7902-96592-0011-36: hyp=['NO', 'HE', 'WAS', 'NOT', 'DREAMING', 'FOR', 'HE', 'WAS', 'LOOKING', 'OUT', 'ON', 'THE', 'SEA', 'OVER', 'WHICH', 'A', 'FAINT', 'MIST', 'HUNG', 'LIKE', 'WREATHS', 'OF', 'SMOKE'] +7902-96592-0012-37: ref=['WHAT', 'DID', 'THEY', 'SAY', 'FALSE', 'ALARM', 'TELL', 'SIR', 'RISDON', 'THEY', 'WOULD', 'CLEAR', 'ALL', 'AWAY', 'TO', 'NIGHT', 'SEE', 'IF', 'ANYTHING', 'HAD', 'BEEN', 'LEFT', 'ABOUT', 'LOBSTER', 'BOAT'] +7902-96592-0012-37: hyp=['WHAT', 'DID', 'THEY', 'SAY', 'FALSE', 'ALARM', 'TELL', 'SIR', 'RISDEN', 'THEY', 'WOULD', 'CLEAR', 'ALL', 'AWAY', 'TO', 'NIGHT', 'SEE', 'IF', 'ANYTHING', 'HAD', 'BEEN', 'LEFT', 'ABOUT', 'LOBSTER', 'BOAT'] +7902-96592-0013-38: ref=['ONCE', 'OUT', 'OF', 'THAT', 'ROOM', 'HE', 'COULD', 'RAN', 'AND', 'BY', 'DAYLIGHT', 'THE', 'SMUGGLERS', 'DARE', 'NOT', 'HUNT', 'HIM', 'DOWN'] +7902-96592-0013-38: hyp=['ONCE', 'OUT', 'OF', 'THAT', 'ROOM', 'HE', 'COULD', 'RUN', 'AND', 'BY', 'DAYLIGHT', 'THE', 'SMUGGLERS', 'DARED', 'NOT', 'HUNT', 'HIM', 'DOWN'] +7902-96592-0014-39: ref=['OH', 'THOSE', 'BARS', 'HE', 'MENTALLY', 'EXCLAIMED', 'AND', 'HE', 'WAS', 'ADVANCING', 'TOWARD', 'THEM', 'WHEN', 'JUST', 'AS', 'HE', 'DREW', 'NEAR', 'THERE', 'WAS', 'A', 'RUSTLING', 'NOISE', 'UNDER', 'THE', 'WINDOW', 'A', 'COUPLE', 'OF', 'HANDS', 'SEIZED', 'THE', 'BARS', 'THERE', 'WAS', 'A', 'SCRATCHING', 'OF', 'BOOT', 'TOES', 'AGAINST', 'STONE', 'WORK', 'AND', "RAM'S", 'FACE', 'APPEARED', 'TO', 'GAZE', 'INTO', 'THE', 'ROOM', 'BY', 'INTENTION', 'BUT', 'INTO', 'THE', 'ASTONISHED', 'COUNTENANCE', 'OF', 'THE', 'YOUNG', 'MIDSHIPMAN', 'INSTEAD'] +7902-96592-0014-39: hyp=['OH', 'THOSE', 'BARS', 'HE', 'MENTALLY', 'EXCLAIMED', 'AND', 'HE', 'WAS', 'ADVANCING', 'TOWARDS', 'THEM', 'WHEN', 'JUST', 'AS', 'HE', 'DREW', 'NEAR', 'THERE', 'WAS', 'A', 'RUSTLING', 'NOISE', 'UNDER', 'THE', 'WINDOW', 'A', 'COUPLE', 'OF', 'HANDS', 'SEIZED', 'THE', 'BARS', 'THERE', 'WAS', 'A', 'SCRATCHING', 'OF', 'BOOT', 'TOES', 'AGAINST', 'STONE', 'WORK', 'AND', "RAM'S", 'FACE', 'APPEARED', 'TO', 'GAZE', 'INTO', 'THE', 'ROOM', 'BY', 'INTENTION', 'BUT', 'INTO', 'THE', 'ASTONISHED', 'COUNTENANCE', 'OF', 'THE', 'YOUNG', 'MIDSHIPMAN', 'INSTEAD'] +7902-96592-0015-40: ref=['RAM', 'WAS', 'THE', 'FIRST', 'TO', 'RECOVER', 'FROM', 'HIS', 'SURPRISE'] +7902-96592-0015-40: hyp=['ROOM', 'WAS', 'THE', 'FIRST', 'TO', 'RECOVER', 'FROM', 'HIS', 'SURPRISE'] +7902-96592-0016-41: ref=['HULLO', 'HE', 'SAID', 'WHO', 'ARE', 'YOU'] +7902-96592-0016-41: hyp=['HULLO', 'HE', 'SAID', 'WHO', 'ARE', 'YOU'] +7902-96592-0017-42: ref=['GO', 'ROUND', 'AND', 'OPEN', 'THE', 'DOOR', 'I', 'WAS', 'SHUT', 'IN', 'LAST', 'NIGHT', 'BY', 'MISTAKE'] +7902-96592-0017-42: hyp=['GO', 'ROUND', 'AND', 'OPEN', 'THE', 'DOOR', 'I', 'WAS', 'SHUT', 'IN', 'LAST', 'NIGHT', 'BY', 'MISTAKE'] +7902-96592-0018-43: ref=['I', 'SAW', 'YOU', 'LAST', 'NIGHT', 'AND', 'WONDERED', 'WHOSE', 'BOY', 'YOU', 'WAS'] +7902-96592-0018-43: hyp=['I', 'SAW', 'YOU', 'LAST', 'NIGHT', 'AND', 'WONDERED', 'WHOSE', 'BOY', 'HE', 'WAS'] +7902-96592-0019-44: ref=['IT', 'WAS', 'YOU', 'FATHER', 'KICKED', 'FOR', 'SHIRKING', 'AND', 'MY', 'WELL', 'I', 'HARDLY', 'KNOWED', 'YOU'] +7902-96592-0019-44: hyp=['IT', 'WAS', 'YOUR', 'FATHER', 'KICKED', 'FOR', 'SHIRKING', 'AND', 'MY', 'WELL', 'I', 'HARDLY', 'KNOWED', 'YOU'] +7902-96592-0020-45: ref=['NONSENSE'] +7902-96592-0020-45: hyp=['NONSENSE'] +7902-96592-0021-46: ref=["WON'T", 'DO', 'SAID', 'RAM', 'GRINNING'] +7902-96592-0021-46: hyp=["WON'T", 'DO', 'SAID', 'RAM', 'GRINNING'] +7902-96592-0022-47: ref=['THINK', 'I', "DON'T", 'KNOW', 'YOU', 'MISTER', 'ORFICER'] +7902-96592-0022-47: hyp=['THINK', 'I', "DON'T", 'KNOW', 'YOU', 'MISTER', 'ORFASTER'] +7902-96592-0023-48: ref=["WON'T", 'DO', 'SAID', 'RAM', 'QUICKLY', 'I', 'KNOW', 'YOU'] +7902-96592-0023-48: hyp=['WELL', 'DO', 'SAID', 'RAM', 'QUICKLY', 'I', 'KNOW', 'YOU'] +7902-96592-0024-49: ref=['BEEN', 'PLAYING', 'THE', 'SPY', "THAT'S", 'WHAT', "YOU'VE", 'BEEN', 'DOING', 'WHO', 'LOCKED', 'YOU', 'IN'] +7902-96592-0024-49: hyp=['COMPLYING', 'THE', 'SPY', "THAT'S", 'WHAT', "YOU'VE", 'BEEN', 'DOING', 'WHO', 'LOCKED', 'YOU', 'IN'] +7902-96592-0025-50: ref=['ARCHY', 'STEPPED', 'BACK', 'TO', 'THE', 'DOOR', 'LISTENING', 'BUT', 'THERE', 'WAS', 'NOT', 'A', 'SOUND'] +7902-96592-0025-50: hyp=['ARCHIE', 'STEPPED', 'BACK', 'TO', 'THE', 'DOOR', 'LISTENING', 'BUT', 'THERE', 'WAS', 'NOT', 'A', 'SOUND'] +7902-96592-0026-51: ref=['HE', 'HAS', 'GONE', 'TO', 'GIVE', 'THE', 'ALARM', 'THOUGHT', 'THE', 'PRISONER', 'AND', 'HE', 'LOOKED', 'EXCITEDLY', 'ROUND', 'FOR', 'A', 'WAY', 'OF', 'ESCAPE'] +7902-96592-0026-51: hyp=['HE', 'HAS', 'GONE', 'TO', 'GIVE', 'THE', 'ALARM', 'THOUGHT', 'THE', 'PRISONER', 'AND', 'HE', 'LOOKED', 'EXCITEDLY', 'ROUND', 'FOR', 'A', 'WAY', 'OF', 'ESCAPE'] +7902-96592-0027-52: ref=['NOTHING', 'BUT', 'THE', 'CHIMNEY', 'PRESENTED', 'ITSELF'] +7902-96592-0027-52: hyp=['NOTHING', 'BUT', 'THE', 'CHIMNEY', 'PRESENTED', 'ITSELF'] +7902-96592-0028-53: ref=['A', 'HAPPY', 'INSPIRATION', 'HAD', 'COME', 'AND', 'PLACING', 'ONE', 'HAND', 'UPON', 'HIS', 'BREAST', 'HE', 'THRUST', 'IN', 'THE', 'OTHER', 'GAVE', 'A', 'TUG', 'AND', 'DREW', 'OUT', 'HIS', 'LITTLE', 'CURVED', 'DIRK', 'GLANCED', 'AT', 'THE', 'EDGE', 'RAN', 'TO', 'THE', 'WINDOW', 'AND', 'BEGAN', 'TO', 'CUT', 'AT', 'ONE', 'OF', 'THE', 'BARS', 'LABOUR', 'IN', 'VAIN'] +7902-96592-0028-53: hyp=['A', 'HAPPY', 'INSPIRATION', 'HAD', 'COME', 'AND', 'PLACING', 'ONE', 'HAND', 'UPON', 'HIS', 'CHEST', 'HE', 'THRUST', 'IN', 'THE', 'OTHER', 'GAVE', 'A', 'TUG', 'AND', 'DREW', 'OUT', 'HIS', 'LITTLE', 'CURVED', 'DIRK', 'GLANCED', 'AT', 'THE', 'EDGE', 'RAN', 'TO', 'THE', 'WINDOW', 'AND', 'BEGAN', 'TO', 'CUT', 'IT', 'ONE', 'OF', 'THE', 'BARS', 'LABOR', 'IN', 'VAIN'] +7902-96592-0029-54: ref=['HE', 'DIVIDED', 'THE', 'PAINT', 'AND', 'PRODUCED', 'A', 'FEW', 'SQUEAKS', 'AND', 'GRATING', 'SOUNDS', 'AS', 'HE', 'REALISED', 'THAT', 'THE', 'ATTEMPT', 'WAS', 'MADNESS'] +7902-96592-0029-54: hyp=['HE', 'DIVIDED', 'THE', 'PAINT', 'AND', 'PRODUCED', 'A', 'FEW', 'SQUEAKS', 'AND', 'GRATING', 'SOUNDS', 'AS', 'HE', 'REALIZED', 'THAT', 'THE', 'ATTEMPT', 'WAS', 'MADNESS'] +7902-96592-0030-55: ref=['THE', 'RESULT', 'WAS', 'NOT', 'VERY', 'SATISFACTORY', 'BUT', 'SUFFICIENTLY', 'SO', 'TO', 'MAKE', 'HIM', 'ESSAY', 'THE', 'BAR', 'OF', 'THE', 'WINDOW', 'ONCE', 'MORE', 'PRODUCING', 'A', 'GRATING', 'EAR', 'ASSAILING', 'SOUND', 'AS', 'HE', 'FOUND', 'THAT', 'NOW', 'HE', 'DID', 'MAKE', 'A', 'LITTLE', 'IMPRESSION', 'SO', 'LITTLE', 'THOUGH', 'THAT', 'THE', 'PROBABILITY', 'WAS', 'IF', 'HE', 'KEPT', 'ON', 'WORKING', 'WELL', 'FOR', 'TWENTY', 'FOUR', 'HOURS', 'HE', 'WOULD', 'NOT', 'GET', 'THROUGH'] +7902-96592-0030-55: hyp=['THE', 'RESULT', 'WAS', 'NOT', 'VERY', 'SATISFACTORY', 'BUT', 'SUFFICIENTLY', 'SO', 'TO', 'MAKE', 'HIM', 'ESSAY', 'THE', 'BAR', 'OF', 'THE', 'WINDOW', 'ONCE', 'MORE', 'PRODUCING', 'A', 'GRATING', 'IRRES', 'SELLING', 'SOUND', 'AS', 'HE', 'FOUND', 'THAT', 'NOW', 'HE', 'DID', 'MAKE', 'A', 'LITTLE', 'IMPRESSION', 'SO', 'LITTLE', 'THOUGH', 'THAT', 'THE', 'PROBABILITY', 'WAS', 'IF', 'HE', 'KEPT', 'ON', 'WORKING', 'WELL', 'FOR', 'TWENTY', 'FOUR', 'HOURS', 'HE', 'WOULD', 'NOT', 'GET', 'THROUGH'] +7902-96592-0031-56: ref=['BUT', 'AT', 'THE', 'END', 'OF', 'FIVE', 'MINUTES', 'HE', 'STOPPED', 'AND', 'THRUST', 'BACK', 'THE', 'DIRK', 'INTO', 'ITS', 'SHEATH'] +7902-96592-0031-56: hyp=['BUT', 'AT', 'THE', 'END', 'OF', 'FIVE', 'MINUTES', 'HE', 'STOPPED', 'AND', 'THRUST', 'BACK', 'THE', 'DIRK', 'INTO', 'ITS', 'SHEATH'] +7902-96592-0032-57: ref=['NO', 'I', "CAN'T", 'PART', 'WITH', 'THAT', 'HA', 'HA', 'HA', 'LAUGHED', 'THE', 'BOY', 'JEERINGLY'] +7902-96592-0032-57: hyp=['NO', 'I', "CAN'T", 'PART', 'WITH', 'THAT', 'HA', 'HA', 'LAUGHED', 'THE', 'BOY', 'JEERINGLY'] +7902-96592-0033-58: ref=['BUT', "I'LL", 'YES', "I'LL", 'GIVE', 'YOU', 'A', 'GUINEA', 'IF', 'YOU', 'WILL', 'LET', 'ME', 'OUT'] +7902-96592-0033-58: hyp=['BUT', 'ALL', 'YES', "I'LL", 'GIVE', 'YOU', 'A', 'GUINEA', 'IF', 'YOU', 'WILL', 'LET', 'ME', 'OUT'] +7902-96592-0034-59: ref=['GUINEA', 'SAID', 'THE', 'BOY', 'THINK', "I'D", 'DO', 'IT', 'FOR', 'A', 'GUINEA', 'WELL', 'THEN', 'TWO'] +7902-96592-0034-59: hyp=['GUINEAS', 'OF', 'THE', 'BOY', 'THINK', "I'LL", 'DO', 'IT', 'FOR', 'A', 'GUINEA', 'WELL', 'THEN', 'TOO'] +7902-96592-0035-60: ref=['BE', 'QUICK', "THERE'S", 'A', 'GOOD', 'FELLOW', 'I', 'WANT', 'TO', 'GET', 'AWAY', 'AT', 'ONCE'] +7902-96592-0035-60: hyp=['BE', 'QUICK', "THERE'S", 'A', 'GOOD', 'FELLOW', 'I', 'WANT', 'TO', 'GET', 'AWAY', 'AT', 'ONCE'] +7902-96592-0036-61: ref=['NOT', 'YOU', 'ONLY', 'A', 'SHAM'] +7902-96592-0036-61: hyp=['NOT', 'YOU', 'ONLY', 'A', 'SHAM'] +7902-96592-0037-62: ref=['WHY', 'YOUR', 'CLOTHES', "DON'T", 'FIT', 'YOU', 'AND', 'YOUR', "CAP'S", 'PUT', 'ON', 'ALL', 'SKEW', 'REW'] +7902-96592-0037-62: hyp=['WHY', 'YOUR', 'CLOTHES', "DON'T", 'FIT', 'YOU', 'AND', 'YOUR', "CAP'S", 'PUT', 'ON', 'ALL', 'SKIRO'] +7902-96592-0038-63: ref=['NEVER', 'MIND', 'ABOUT', 'THAT', 'LET', 'ME', 'OUT', 'OF', 'THIS', 'PLACE'] +7902-96592-0038-63: hyp=['NEVER', 'MIND', 'ABOUT', 'THAT', 'LET', 'ME', 'OUT', 'OF', 'THIS', 'PLACE'] +7902-96592-0039-64: ref=['I', 'TOLD', 'YOU', 'A', 'FISHER', 'BOY', 'CRIED', 'ARCHY', 'IMPATIENTLY', 'BUT', 'TRYING', 'NOT', 'TO', 'OFFEND', 'HIS', 'VISITOR', 'WHO', 'POSSESSED', 'THE', 'POWER', 'OF', 'CONFERRING', 'FREEDOM', 'BY', 'SPEAKING', 'SHARPLY'] +7902-96592-0039-64: hyp=['I', 'TOLD', 'YOU', 'A', 'FISHER', 'BOY', 'CRIED', 'ARCHIE', 'IMPATIENTLY', 'BUT', 'TRYING', 'NOT', 'TO', 'OFFEND', 'HIS', 'VISITOR', 'WHO', 'POSSESSED', 'THE', 'POWER', 'OF', 'CONFERRING', 'FREEDOM', 'BY', 'SPEAKING', 'SHARPLY'] +7902-96592-0040-65: ref=['NOT', 'YOU', 'LOOK', 'LIKE', 'A', 'WILD', 'BEAST', 'IN', 'A', 'CAGE', 'LIKE', 'A', 'MONKEY', 'YOU', 'INSOLENT'] +7902-96592-0040-65: hyp=['NOT', 'YOU', 'LOOK', 'LIKE', 'A', 'WILD', 'BEAST', 'IN', 'A', 'CAGE', 'LIKE', 'A', 'MONKEY', 'YOU', 'INSOLENT'] +7902-96592-0041-66: ref=['ARCHY', 'CHECKED', 'HIMSELF', 'AND', 'THE', 'BOY', 'LAUGHED'] +7902-96592-0041-66: hyp=['ARCHIE', 'CHECKED', 'HIMSELF', 'AND', 'THE', 'BOY', 'LAUGHED'] +7902-96592-0042-67: ref=['IT', 'WAS', 'YOUR', 'TURN', 'YESTERDAY', "IT'S", 'MINE', 'TO', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0042-67: hyp=['IT', 'WAS', 'YOUR', 'TURN', 'YESTERDAY', "IT'S", 'MINE', 'TO', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0043-68: ref=['YOU', 'LAUGHED', 'AND', 'FLEERED', 'AT', 'ME', 'WHEN', 'I', 'WAS', 'ON', 'THE', "CUTTER'S", 'DECK'] +7902-96592-0043-68: hyp=['YOU', 'LAUGHED', 'AND', 'FLARED', 'AT', 'ME', 'WHEN', 'I', 'WAS', 'ON', 'THE', "CUTTER'S", 'DECK'] +7902-96592-0044-69: ref=['I', 'SAY', 'YOU', 'DO', 'LOOK', 'A', 'RUM', 'UN', 'JUST', 'LIKE', 'A', 'BIG', 'MONKEY', 'IN', 'A', 'SHOW'] +7902-96592-0044-69: hyp=['I', 'SAY', 'YOU', 'DO', 'LOOK', 'LIKE', 'A', 'ROMAN', 'JUST', 'LIKE', 'A', 'BIG', 'MONKEY', 'IN', 'A', 'SHOW'] +7902-96592-0045-70: ref=['RAM', 'SHOWED', 'HIS', 'WHITE', 'TEETH', 'AS', 'HE', 'BURST', 'OUT', 'WITH', 'A', 'LONG', 'LOW', 'FIT', 'OF', 'LAUGHTER'] +7902-96592-0045-70: hyp=['RAM', 'SHOWED', 'HIS', 'WHITE', 'TEETH', 'AS', 'HE', 'BURST', 'OUT', 'WITH', 'A', 'LONG', 'LOW', 'FIT', 'OF', 'LAUGHTER'] +7902-96592-0046-71: ref=['YOU', "ROPE'S", 'END', 'ME', 'HE', 'SAID'] +7902-96592-0046-71: hyp=['YOU', 'HOPES', 'AND', 'ME', 'HE', 'SAID'] +7902-96592-0047-72: ref=['WHY', 'I', 'COULD', 'TIE', 'YOU', 'UP', 'IN', 'A', 'KNOT', 'AND', 'HEAVE', 'YOU', 'OFF', 'THE', 'CLIFF', 'ANY', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0047-72: hyp=['WHY', 'I', 'COULD', 'TIE', 'YOU', 'UP', 'IN', 'A', 'KNOT', 'AND', 'HEAVE', 'YOU', 'OFF', 'THE', 'CLIFF', 'ANY', 'DAY', 'WHAT', 'A', 'GAME'] +7902-96592-0048-73: ref=['BIT', 'OF', 'A', 'MIDDY', 'FED', 'ON', 'SALT', 'TACK', 'AND', 'WEEVILLY', 'BISCUIT', 'TALK', 'OF', 'GIVING', 'ME', "ROPE'S", 'END'] +7902-96592-0048-73: hyp=['BIT', 'OF', 'AMITY', 'FED', 'ON', 'A', 'SALT', 'TACK', 'AND', 'WEEVILY', 'BISCUIT', 'TALK', 'OF', 'GIVING', 'ME', 'ROPES', 'AND'] +7902-96592-0049-74: ref=['ONCE', 'MORE', 'WILL', 'YOU', 'COME', 'AND', 'LET', 'ME', 'OUT', 'NO'] +7902-96592-0049-74: hyp=['ONCE', 'MORE', 'WILL', 'YOU', 'COME', 'AND', 'LET', 'ME', 'OUT', 'NO'] +7902-96592-0050-75: ref=['TO', 'HIS', 'ASTONISHMENT', 'THE', 'BOY', 'DID', 'NOT', 'FLINCH', 'BUT', 'THRUST', 'HIS', 'OWN', 'ARMS', 'THROUGH', 'PLACING', 'THEM', 'ABOUT', 'THE', "MIDDY'S", 'WAIST', 'CLENCHING', 'HIS', 'HANDS', 'BEHIND', 'AND', 'UTTERING', 'A', 'SHARP', 'WHISTLE'] +7902-96592-0050-75: hyp=['TO', 'HIS', 'ASTONISHMENT', 'THE', 'BOY', 'DID', 'NOT', 'FLINCH', 'BUT', 'THRUST', 'HIS', 'OWN', 'ARMS', 'THROUGH', 'PLACING', 'HIM', 'ABOUT', 'THE', "MIDDY'S", 'WAIST', 'CLENCHING', 'HIS', 'HAND', 'BEHIND', 'AND', 'UTTERING', 'A', 'SHARP', 'WHISTLE'] +7902-96594-0000-76: ref=['SEEMED', 'IN', 'GOOD', 'SPIRITS', 'LAST', 'NIGHT', 'MISTER', 'GURR', 'EH'] +7902-96594-0000-76: hyp=['SEEMING', 'AT', "SPEAR'S", 'LAST', 'NIGHT', 'MISTER', 'GARR', 'EH'] +7902-96594-0001-77: ref=['YES', 'SIR', 'BUT', 'HE', 'MAY', 'TURN', 'UP', 'ON', 'THE', 'CLIFF', 'AT', 'ANY', 'MOMENT'] +7902-96594-0001-77: hyp=['YES', 'SIR', 'BUT', 'HE', 'MAY', 'TURN', 'UP', 'ON', 'THE', 'CLIFF', 'AT', 'ANY', 'MOMENT'] +7902-96594-0002-78: ref=['YES', 'MEN', 'QUITE', 'READY', 'YES', 'SIR'] +7902-96594-0002-78: hyp=['YES', 'MEN', 'QUITE', 'READY', 'YES', 'SIR'] +7902-96594-0003-79: ref=["THAT'S", 'RIGHT', 'OF', 'COURSE', 'WELL', 'ARMED'] +7902-96594-0003-79: hyp=['THE', 'THREAT', 'OF', 'COURSE', 'WILL', 'ALARMED'] +7902-96594-0004-80: ref=['SOON', 'AS', 'THE', 'SIGNAL', 'COMES', 'WE', 'SHALL', 'PUSH', 'OFF'] +7902-96594-0004-80: hyp=['SOON', 'AS', 'THE', 'SIGNAL', 'COMES', 'WE', 'SHALL', 'PUSH', 'OFF'] +7902-96594-0005-81: ref=['AWKWARD', 'BIT', 'O', 'COUNTRY', 'SIR', 'SIX', 'MILES', 'ROW', 'BEFORE', 'YOU', 'CAN', 'FIND', 'A', 'PLACE', 'TO', 'LAND'] +7902-96594-0005-81: hyp=['AWKWARD', 'BITTER', 'COUNTRY', 'SIR', 'SIX', 'MILES', 'ROW', 'BEFORE', 'YOU', 'CAN', 'FIND', 'A', 'PLACE', 'TO', 'LAND'] +7902-96594-0006-82: ref=['SO', 'SHALL', 'WE', 'YET', 'SIR'] +7902-96594-0006-82: hyp=['SO', 'SHALL', 'WE', 'YET', 'SIR'] +7902-96594-0007-83: ref=['YOU', "DON'T", 'THINK', 'MISTER', 'GURR', 'THAT', 'THEY', 'WOULD', 'DARE', 'TO', 'INJURE', 'HIM', 'IF', 'HE', 'WAS', 'SO', 'UNLUCKY', 'AS', 'TO', 'BE', 'CAUGHT'] +7902-96594-0007-83: hyp=['YOU', "DON'T", 'THINK', 'MISTER', 'GORE', 'THAT', 'THEY', 'WOULD', 'DARE', 'TO', 'INJURE', 'HIM', 'IF', 'HE', 'WAS', 'SO', 'UNLUCKY', 'AS', 'TO', 'BE', 'CAUGHT'] +7902-96594-0008-84: ref=['WELL', 'SIR', 'SAID', 'THE', 'MASTER', 'HESITATING', 'SMUGGLERS', 'ARE', 'SMUGGLERS'] +7902-96594-0008-84: hyp=['WELL', 'SIR', 'SAID', 'THE', 'MASTER', 'HESITATING', 'SMUGGLERS', 'ARE', 'SMUGGLERS'] +7902-96594-0009-85: ref=['CERTAINLY', 'SIR', 'SMUGGLERS', 'ARE', 'SMUGGLERS', 'INDEED'] +7902-96594-0009-85: hyp=['CERTAINLY', 'SIR', 'SMUGGLERS', 'ARE', 'SMUGGLERS', 'INDE'] +7902-96594-0010-86: ref=['BEG', 'PARDON', 'SIR', "DIDN'T", 'MEAN', 'ANY', 'HARM'] +7902-96594-0010-86: hyp=['THEY', 'PARDON', 'SIR', "DIDN'T", 'MEAN', 'ANY', 'HARM'] +7902-96594-0011-87: ref=["I'M", 'GETTING', 'VERY', 'ANXIOUS', 'ABOUT', 'MISTER', 'RAYSTOKE', 'START', 'AT', 'ONCE', 'SIR'] +7902-96594-0011-87: hyp=["I'M", 'GETTING', 'VERY', 'ANXIOUS', 'ABOUT', 'MISTER', 'RAYSTROKE', 'START', 'AT', 'ONCE', 'SIR'] +7902-96594-0012-88: ref=['NO', 'WAIT', 'ANOTHER', 'HALF', 'HOUR'] +7902-96594-0012-88: hyp=['NO', 'WAIT', 'ANOTHER', 'AND', 'HALF', 'HOUR'] +7902-96594-0013-89: ref=['VERY', 'ILL', 'ADVISED', 'THING', 'TO', 'DO'] +7902-96594-0013-89: hyp=['VERY', 'ILL', 'ADVISED', 'THING', 'TO', 'DO'] +7902-96594-0014-90: ref=['THEN', 'I', 'MUST', 'REQUEST', 'THAT', 'YOU', 'WILL', 'NOT', 'MAKE', 'IT', 'AGAIN', 'VERY', 'TRUE'] +7902-96594-0014-90: hyp=['THAT', 'I', 'MUST', 'REQUEST', 'THAT', 'YOU', 'WILL', 'NOT', 'MAKE', 'IT', 'AGAIN', 'VERY', 'TRUE'] +7902-96594-0015-91: ref=['AWK', 'WARD', 'MISTER', 'GURR', 'AWKWARD'] +7902-96594-0015-91: hyp=['AWKWARD', 'MISTER', 'GARR', 'AWKWARD'] +7902-96594-0016-92: ref=['YES', 'SIR', 'OF', 'COURSE'] +7902-96594-0016-92: hyp=['YES', 'SIR', 'OF', 'COURSE'] +7902-96594-0017-93: ref=['SAY', 'AWK', 'WARD', 'IN', 'FUTURE', 'NOT', "AWK'ARD"] +7902-96594-0017-93: hyp=['SAY', 'AWKWARD', 'IN', 'THE', 'FUTURE', 'NOT', 'UPWARD'] +7902-96594-0018-94: ref=['I', 'MEAN', 'ALL', 'ALONE', 'BY', 'MYSELF', 'SIR'] +7902-96594-0018-94: hyp=['I', 'MEAN', 'OUR', 'OWN', 'BY', 'MYSELF', 'SIR'] +7902-96594-0019-95: ref=['WHAT', 'FOR', 'THERE', "AREN'T", 'A', 'PUBLIC', 'HOUSE', 'FOR', 'TEN', 'MILES', "DIDN'T", 'MEAN', 'THAT'] +7902-96594-0019-95: hyp=['WHAT', 'FOR', 'THERE', 'ARE', 'TO', 'PUBLIC', 'HOUSE', 'FOR', 'TEN', 'MILES', "DIDN'T", 'MEAN', 'THAT'] +7902-96594-0020-96: ref=['THEN', 'WHAT', 'DID', 'YOU', 'MEAN', 'SPEAK', 'OUT', 'AND', "DON'T", 'DO', 'THE', 'DOUBLE', 'SHUFFLE', 'ALL', 'OVER', 'MY', 'CLEAN', 'DECK', 'NO', 'SIR'] +7902-96594-0020-96: hyp=['THEN', 'WHAT', 'DID', 'YOU', 'MEAN', 'SPEAK', 'OUT', 'AND', "DON'T", 'DO', 'THE', 'DOUBLE', 'SHUFFLE', 'ALL', 'OVER', 'MY', 'CLEAN', 'DECK', 'NO', 'SIR'] +7902-96594-0021-97: ref=['HOPPING', 'ABOUT', 'LIKE', 'A', 'CAT', 'ON', 'HOT', 'BRICKS'] +7902-96594-0021-97: hyp=['HAVING', 'ABOUT', 'THE', 'GOOD', 'CAT', 'ON', 'HOT', 'BRICKS'] +7902-96594-0022-98: ref=['NOW', 'THEN', 'WHY', 'DO', 'YOU', 'WANT', 'TO', 'GO', 'ASHORE'] +7902-96594-0022-98: hyp=['NOW', 'THEN', 'WHY', 'DO', 'YOU', 'WANT', 'TO', 'GO', 'ASHORE'] +7902-96594-0023-99: ref=['BEG', 'PARDON', "DIDN'T", 'MEAN', 'NOWT', 'SIR', 'SAID', 'THE', 'SAILOR', 'TOUCHING', 'HIS', 'FORELOCK'] +7902-96594-0023-99: hyp=['THEY', 'PARDON', "DIDN'T", 'MEAN', 'NOW', 'SIR', 'SAID', 'THE', 'SAILOR', 'TOUCHING', 'HIS', 'FORELOCK'] +7902-96594-0024-100: ref=['YES', 'SIR', 'SAID', 'THE', 'MAN', 'HUMBLY', 'SHALL', 'I', 'GO', 'AT', 'ONCE', 'SIR'] +7902-96594-0024-100: hyp=['YES', 'SIR', 'SAID', 'THE', 'MAN', 'HUMBLY', 'SHALL', 'I', 'GO', 'AT', 'ONCE', 'SIR'] +7902-96594-0025-101: ref=['NO', 'WAIT'] +7902-96594-0025-101: hyp=['NO', 'WAIT'] +7902-96594-0026-102: ref=['KEEP', 'A', 'SHARP', 'LOOK', 'OUT', 'ON', 'THE', 'CLIFF', 'TO', 'SEE', 'IF', 'MISTER', 'RAYSTOKE', 'IS', 'MAKING', 'SIGNALS', 'FOR', 'A', 'BOAT'] +7902-96594-0026-102: hyp=['HE', 'WAS', 'SHARP', 'LOOK', 'OUT', 'ON', 'THE', 'CLIFF', 'TO', 'SEE', 'IF', 'MISTER', 'RAYSTROKE', 'IS', 'MAKING', 'SIGNALS', 'FOR', 'A', 'BOAT'] +7902-96594-0027-103: ref=['HE', 'SWUNG', 'ROUND', 'WALKED', 'AFT', 'AND', 'BEGAN', 'SWEEPING', 'THE', 'SHORE', 'AGAIN', 'WITH', 'HIS', 'GLASS', 'WHILE', 'THE', 'MASTER', 'AND', 'DICK', 'EXCHANGED', 'GLANCES', 'WHICH', 'MEANT', 'A', 'GREAT', 'DEAL'] +7902-96594-0027-103: hyp=['HE', 'SWUNG', 'ROUND', 'WALKED', 'OFF', 'AND', 'BEGAN', 'SWEEPING', 'ASHORE', 'AGAIN', 'WITH', 'HIS', 'GLASS', 'WHILE', 'THE', 'MASTER', 'AND', 'DICK', 'EXCHANGED', 'GLANCES', 'WHICH', 'MEANT', 'A', 'GREAT', 'DEAL'] +7902-96594-0028-104: ref=['AT', 'LAST', 'THE', 'LITTLE', 'LIEUTENANT', 'COULD', 'BEAR', 'THE', 'ANXIETY', 'NO', 'LONGER'] +7902-96594-0028-104: hyp=['AT', 'LAST', 'THE', 'LITTLE', 'LIEUTENANT', 'COULD', 'BEAR', 'THE', 'ANXIETY', 'NO', 'LONGER'] +7902-96594-0029-105: ref=['PIPE', 'AWAY', 'THE', 'MEN', 'TO', 'THAT', 'BOAT', 'THERE', 'HE', 'SAID', 'AND', 'AS', 'THE', 'CREW', 'SPRANG', 'IN'] +7902-96594-0029-105: hyp=['PEG', 'AWAY', 'THEM', 'INTO', 'THAT', 'BOAT', 'THERE', 'HE', 'SAID', 'AND', 'AS', 'THE', 'CREW', 'SPRANG', 'IN'] +7902-96594-0030-106: ref=['NOW', 'MISTER', 'GURR', 'HE', 'SAID', "I'M", 'ONLY', 'GOING', 'TO', 'SAY', 'ONE', 'THING', 'TO', 'YOU', 'IN', 'THE', 'WAY', 'OF', 'INSTRUCTIONS', 'YES', 'SIR'] +7902-96594-0030-106: hyp=['NO', 'MISTER', 'GURR', 'HE', 'SAID', "I'M", 'ONLY', 'GOING', 'TO', 'SAY', 'ONE', 'THING', 'TO', 'YOU', 'IN', 'THE', 'WAY', 'OF', 'INSTRUCTIONS', 'YES', 'SIR'] +7902-96594-0031-107: ref=['BEG', 'PARDON', 'SIR', 'SAID', 'THE', 'MASTER', 'DEPRECATINGLY'] +7902-96594-0031-107: hyp=['BEG', 'PARDON', 'SIR', 'SAID', 'THE', 'MASTER', 'DEPRECATINGLY'] +7902-96594-0032-108: ref=['STEADY', 'MY', 'LADS', 'STEADY', 'CRIED', 'THE', 'MASTER', 'KEEP', 'STROKE', 'AND', 'THEN', 'HE', 'BEGAN', 'TO', 'MAKE', 'PLANS', 'AS', 'TO', 'HIS', 'FIRST', 'PROCEEDINGS', 'ON', 'GETTING', 'ASHORE'] +7902-96594-0032-108: hyp=['STEADY', 'MY', 'LAD', 'STEADY', 'CRIED', 'THE', 'MASTER', 'KEEP', 'STROKE', 'AND', 'THEN', 'HE', 'BEGAN', 'TO', 'MAKE', 'PLANS', 'AS', 'TO', 'HIS', 'FIRST', 'PROCEEDINGS', 'ON', 'GETTING', 'ASHORE'] +7902-96595-0000-109: ref=['SAY', 'MESTER', 'GURR', 'SAID', 'DICK', 'AFTER', 'ONE', 'OF', 'THESE', 'SEARCHES', 'HE', "WOULDN'T", 'RUN', 'AWAY', 'WHAT'] +7902-96595-0000-109: hyp=['SAY', 'MISTER', 'GIRK', 'SAID', 'DICK', 'AFTER', 'ONE', 'OF', 'THESE', 'SEARCHES', 'HE', "WOULDN'T", 'RUN', 'AWAY', 'WHAT'] +7902-96595-0001-110: ref=['MISTER', 'RAYSTOKE', 'SIR', "DON'T", 'BE', 'A', 'FOOL'] +7902-96595-0001-110: hyp=['MISTER', 'RAYSTOKE', 'SIR', "DON'T", 'BE', 'A', 'FOOL'] +7902-96595-0002-111: ref=['WHAT', 'CHUCKED', 'HIM', 'OFF', 'YONDER'] +7902-96595-0002-111: hyp=['WHAT', 'SAID', 'TO', 'MORVE', 'YONDER'] +7902-96595-0003-112: ref=['GURR', 'GLANCED', 'ROUND', 'TO', 'SEE', 'IF', 'THE', 'MEN', 'WERE', 'LOOKING', 'AND', 'THEN', 'SAID', 'RATHER', 'HUSKILY', 'BUT', 'KINDLY'] +7902-96595-0003-112: hyp=['GIRK', 'GLANCED', 'ROUND', 'TO', 'SEE', 'IF', 'THE', 'MEN', 'WERE', 'LOOKING', 'AND', 'THEN', 'SAID', 'RATHER', 'HUSKILY', 'BE', 'KINDLY'] +7902-96595-0004-113: ref=['AH', 'EJACULATED', 'DICK', 'SADLY'] +7902-96595-0004-113: hyp=['AH', 'EJACULATED', 'DICK', 'SADLY'] +7902-96595-0005-114: ref=['SAY', 'MESTER', 'GURR', 'SIR', 'WHICH', 'THANKFUL', 'I', 'AM', 'TO', 'YOU', 'FOR', 'SPEAKING', 'SO', 'BUT', 'YOU', "DON'T", 'REALLY', 'THINK', 'AS', 'HE', 'HAS', 'COME', 'TO', 'HARM'] +7902-96595-0005-114: hyp=['SAY', 'MISTER', 'GURSER', 'WHICH', 'THANKFUL', 'I', 'AM', 'FOR', 'YOU', 'FOR', 'SPEAKING', 'SO', 'BUT', 'YOU', "DON'T", 'REALLY', 'THINK', 'AS', 'HE', 'HAS', 'COME', 'TO', 'HARM'] +7902-96595-0006-115: ref=['I', 'HOPE', 'NOT', 'DICK', 'I', 'HOPE', 'NOT', 'BUT', 'SMUGGLERS', "DON'T", 'STAND', 'AT', 'ANYTHING', 'SOMETIMES'] +7902-96595-0006-115: hyp=['I', 'HOPE', 'NOT', 'DICK', 'I', 'OPEN', 'IT', 'BUT', 'SMUGGLERS', "DON'T", 'STAND', 'AT', 'ANYTHING', 'SOMETIMES'] +7902-96595-0007-116: ref=['I', 'DO', 'ASSURE', 'YOU', "THERE'S", 'NOTHING', 'HERE', 'BUT', 'WHAT', 'YOU', 'MAY', 'SEE'] +7902-96595-0007-116: hyp=['I', 'DO', 'ASSURE', 'YOU', "THERE'S", 'NOTHING', 'HERE', 'BUT', 'WHAT', 'YOU', 'MAY', 'SEE'] +7902-96595-0008-117: ref=['IF', "YOU'D", 'LET', 'ME', 'FINISH', "YOU'D", 'KNOW', 'SAID', 'GURR', 'GRUFFLY', 'ONE', 'OF', 'OUR', 'BOYS', 'IS', 'MISSING', 'SEEN', 'HIM', 'UP', 'HERE'] +7902-96595-0008-117: hyp=['IF', 'YOU', 'LET', 'ME', 'FINISH', "YOU'D", 'KNOW', 'SAID', 'GRIGGLY', 'ONE', 'OF', 'OUR', 'BOYS', 'IS', 'MISSING', 'SEEN', 'EM', 'UP', 'HERE'] +7902-96595-0009-118: ref=['BOY', 'BOUT', 'SEVENTEEN', 'WITH', 'A', 'RED', 'CAP', 'NO', 'SIR', 'INDEED', "I'VE", 'NOT'] +7902-96595-0009-118: hyp=['BOY', 'ABOUT', 'SEVENTEEN', 'WITH', 'A', 'RED', 'CAP', 'NO', 'SIR', 'INDEED', "I'VE", 'NOT'] +7902-96595-0010-119: ref=["DON'T", 'KNOW', 'AS', 'HE', 'HAS', 'BEEN', 'SEEN', 'ABOUT', 'HERE', 'DO', 'YOU', 'SAID', 'GURR', 'LOOKING', 'AT', 'HER', 'SEARCHINGLY', 'NO', 'SIR'] +7902-96595-0010-119: hyp=["DON'T", 'KNOW', 'AS', 'HE', 'HAS', 'BEEN', 'SEEN', 'ABOUT', 'HERE', 'DO', 'YOU', 'SAID', 'GIRL', 'LOOKING', 'AT', 'HER', 'SEARCHINGLY', 'NO', 'SIR'] +7902-96595-0011-120: ref=['IF', 'SHE', 'KNEW', 'EVIL', 'HAD', 'COME', 'TO', 'THE', 'POOR', 'LAD', 'HER', 'FACE', 'WOULD', 'TELL', 'TALES', 'LIKE', 'PRINT'] +7902-96595-0011-120: hyp=['IF', 'SHE', 'KNEW', 'EVIL', 'HAD', 'COME', 'TO', 'THE', 'POOR', 'LAD', 'HER', 'FACE', 'WOULD', 'TELL', 'TALES', 'LIKE', 'PRINT'] +7902-96595-0012-121: ref=['I', 'SAID', 'A', 'LAD', 'BOUT', 'SEVENTEEN', 'IN', 'A', 'RED', 'CAP', 'LIKE', 'YOURS', 'SAID', 'GURR', 'VERY', 'SHORTLY'] +7902-96595-0012-121: hyp=['I', 'STOOD', 'ALOUD', 'ABOUT', 'SEVENTEEN', 'AND', 'A', 'RED', 'CAPLICH', 'YOURS', 'SAID', 'GREW', 'VERY', 'SHORTLY'] +7902-96595-0013-122: ref=['THE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'AND', 'STARED', 'AS', 'IF', 'HE', "DIDN'T", 'HALF', 'UNDERSTAND', 'THE', 'DRIFT', 'OF', 'WHAT', 'WAS', 'SAID'] +7902-96595-0013-122: hyp=['THE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'AND', 'STARED', 'AS', 'IF', 'HE', "DIDN'T", 'HALF', 'UNDERSTAND', 'THE', 'DRIFT', 'OF', 'WHAT', 'WAS', 'SAID'] +7902-96595-0014-123: ref=['HERE', 'MY', 'LAD', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0014-123: hyp=['HERE', 'MY', 'LAD', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0015-124: ref=['EH', 'I', 'SAY', "WHERE'S", 'YOUR', 'MASTER'] +7902-96595-0015-124: hyp=['EH', 'I', 'SAY', 'WAS', 'YOUR', 'MASTER'] +7902-96595-0016-125: ref=['GURR', 'TURNED', 'AWAY', 'IMPATIENTLY', 'AGAIN', 'AND', 'SIGNING', 'TO', 'HIS', 'MEN', 'TO', 'FOLLOW', 'THEY', 'ALL', 'BEGAN', 'TO', 'TRAMP', 'UP', 'THE', 'STEEP', 'TRACK', 'LEADING', 'TOWARD', 'THE', 'HOZE', 'WITH', 'THE', 'RABBITS', 'SCUTTLING', 'AWAY', 'AMONG', 'THE', 'FURZE', 'AND', 'SHOWING', 'THEIR', 'WHITE', 'COTTONY', 'TAILS', 'FOR', 'A', 'MOMENT', 'AS', 'THEY', 'DARTED', 'DOWN', 'INTO', 'THEIR', 'HOLES'] +7902-96595-0016-125: hyp=['GERT', 'TURNED', 'AWAY', 'IMPATIENTLY', 'AGAIN', 'AND', 'SUNNING', 'TO', 'HIS', 'MEN', 'TO', 'FOLLOW', 'THEY', 'ALL', 'BEGAN', 'TO', 'TRAMP', 'UP', 'A', 'STEEP', 'CHECK', 'LEADING', 'TOWARD', 'THE', 'HOSE', 'WITH', 'THE', "RABBIT'S", 'SCUTTLING', 'AWAY', 'AMONG', 'THE', 'FIRS', 'AND', 'SHOWING', 'THEIR', 'WHITE', 'COTTONY', 'TAILS', 'FOR', 'A', 'MOMENT', 'AS', 'THEY', 'DARTED', 'DOWN', 'INTO', 'THEIR', 'HOLES'] +7902-96595-0017-126: ref=['I', 'DUNNO', 'MUTTERED', 'DICK', 'AND', 'A', 'MAN', "CAN'T", 'BE', 'SURE'] +7902-96595-0017-126: hyp=['I', 'DUNNO', 'MUTTERED', 'DICK', 'AND', 'A', 'MEN', "CAN'T", 'BE', 'SURE'] +7902-96595-0018-127: ref=['GURR', 'SALUTED', 'AND', 'STATED', 'HIS', 'BUSINESS', 'WHILE', 'THE', 'BARONET', 'WHO', 'HAD', 'TURNED', 'SALLOWER', 'AND', 'MORE', 'CAREWORN', 'THAN', 'HIS', 'LOT', 'DREW', 'A', 'BREATH', 'FULL', 'OF', 'RELIEF', 'ONE', 'OF', 'YOUR', 'SHIP', 'BOYS', 'HE', 'SAID'] +7902-96595-0018-127: hyp=['GER', 'SALUTED', 'AND', 'STATED', 'HIS', 'BUSINESS', 'WHILE', 'THE', 'BARONET', 'WHO', 'HAD', 'TURNED', 'SALARY', 'AND', 'MORE', 'CAREWORN', 'THAN', 'HIS', 'LOT', 'DREW', 'A', 'BREATH', 'OF', 'FULL', 'OF', 'RELIEF', 'ONE', 'OF', 'YOUR', 'SHIP', 'BOYS', 'HE', 'SAID'] +7902-96595-0019-128: ref=['A', 'LAD', 'LOOKING', 'LIKE', 'A', 'COMMON', 'SAILOR', 'AND', 'WEARING', 'A', 'RED', 'CAP', 'NO', 'SAID', 'SIR', 'RISDON'] +7902-96595-0019-128: hyp=['A', 'LAD', 'LOOKING', 'LIKE', 'A', 'COMMON', 'SAILOR', 'AND', 'WEARING', 'A', 'RED', 'CAP', 'NO', 'SAID', 'SIR', 'RISDON'] +7902-96595-0020-129: ref=['I', 'HAVE', 'SEEN', 'NO', 'ONE', 'ANSWERING', 'TO', 'THE', 'DESCRIPTION', 'HERE'] +7902-96595-0020-129: hyp=['I', 'HAVE', 'SEEN', 'NO', 'ONE', 'ANSWERING', 'TO', 'THE', 'DESCRIPTION', 'HERE'] +7902-96595-0021-130: ref=['BEG', 'PARDON', 'SIR', 'BUT', 'CAN', 'YOU', 'AS', 'A', 'GENTLEMAN', 'ASSURE', 'ME', 'THAT', 'HE', 'IS', 'NOT', 'HERE', 'CERTAINLY', 'SAID', 'SIR', 'RISDON'] +7902-96595-0021-130: hyp=['BIG', 'PARTISER', 'BECAME', 'AS', 'GENTLEMAN', 'ASSURE', 'ME', 'THAT', 'HE', 'IS', 'NOT', 'HERE', 'CERTAINLY', 'SAID', 'SIR', 'RISDON'] +7902-96595-0022-131: ref=['SURELY', 'CRIED', 'SIR', 'RISDON', 'EXCITEDLY'] +7902-96595-0022-131: hyp=['SURELY', 'CRIED', 'SIR', 'RISDON', 'EXCITEDLY'] +7902-96595-0023-132: ref=['SIR', 'RISDON', 'WAS', 'SILENT'] +7902-96595-0023-132: hyp=['SIR', 'RICHMOND', 'WAS', 'SILENT'] +7902-96595-0024-133: ref=['LADY', 'GRAEME', 'LOOKED', 'GHASTLY'] +7902-96595-0024-133: hyp=['LADY', 'GRAHAM', 'LOOKED', 'GHASTLY'] +7902-96595-0025-134: ref=['YOU', 'DO', 'NOT', 'KNOW', 'NO'] +7902-96595-0025-134: hyp=['YOU', 'DO', 'NOT', 'KNOW', 'NO'] +7975-280057-0000-1008: ref=['THESE', 'HATREDS', 'WERE', 'SOON', 'TO', 'MAKE', 'TROUBLE', 'FOR', 'ME', 'OF', 'WHICH', 'I', 'HAD', 'NEVER', 'DREAMED'] +7975-280057-0000-1008: hyp=['THESE', 'HATREDS', 'WERE', 'SOON', 'TO', 'MAKE', 'TROUBLE', 'FOR', 'ME', 'OF', 'WHICH', 'I', 'HAD', 'NEVER', 'DREAMED'] +7975-280057-0001-1009: ref=['HENRY', 'WASHINGTON', 'YOUNGER', 'MY', 'FATHER', 'REPRESENTED', 'JACKSON', 'COUNTY', 'THREE', 'TIMES', 'IN', 'THE', 'LEGISLATURE', 'AND', 'WAS', 'ALSO', 'JUDGE', 'OF', 'THE', 'COUNTY', 'COURT'] +7975-280057-0001-1009: hyp=['HENRY', 'WASHINGTON', 'YOUNGER', 'MY', 'FATHER', 'REPRESENTED', 'JACKSON', 'COUNTY', 'THREE', 'TIMES', 'IN', 'THE', 'LEGISLATURE', 'AND', 'WAS', 'ALSO', 'A', 'JUDGE', 'OF', 'THE', 'COUNTY', 'COURT'] +7975-280057-0002-1010: ref=['MY', 'MOTHER', 'WHO', 'WAS', 'BURSHEBA', 'FRISTOE', 'OF', 'INDEPENDENCE', 'WAS', 'THE', 'DAUGHTER', 'OF', 'RICHARD', 'FRISTOE', 'WHO', 'FOUGHT', 'UNDER', 'GENERAL', 'ANDREW', 'JACKSON', 'AT', 'NEW', 'ORLEANS', 'JACKSON', 'COUNTY', 'HAVING', 'BEEN', 'SO', 'NAMED', 'AT', 'MY', 'GRANDFATHER', "FRISTOE'S", 'INSISTENCE'] +7975-280057-0002-1010: hyp=['MY', 'MOTHER', 'WHO', 'WAS', 'PERCEIVER', 'FOR', 'STOVE', 'OF', 'INDEPENDENCE', 'WAS', 'A', 'DAUGHTER', 'OF', 'RICHARD', 'FRISTOW', 'WHO', 'FOUGHT', 'UNDER', 'GENERAL', 'ANDREW', 'JACKSON', 'AT', 'NEW', 'ORLEANS', 'JACKSON', 'COUNTY', 'HAVING', 'BEEN', 'SO', 'NAMED', 'AND', 'MY', 'GRANDFATHER', 'FIRST', 'DOZE', 'INSISTANTS'] +7975-280057-0003-1011: ref=['I', 'CANNOT', 'REMEMBER', 'WHEN', 'I', 'DID', 'NOT', 'KNOW', 'HOW', 'TO', 'SHOOT'] +7975-280057-0003-1011: hyp=['I', 'CANNOT', 'REMEMBER', 'WHEN', 'I', 'DID', 'NOT', 'KNOW', 'HOW', 'TO', 'SHOOT'] +7975-280057-0004-1012: ref=['MY', 'BROTHER', 'JAMES', 'WAS', 'BORN', 'JANUARY', 'FIFTEENTH', 'EIGHTEEN', 'FORTY', 'EIGHT', 'JOHN', 'IN', 'EIGHTEEN', 'FIFTY', 'ONE', 'AND', 'ROBERT', 'IN', 'DECEMBER', 'EIGHTEEN', 'FIFTY', 'THREE'] +7975-280057-0004-1012: hyp=['MY', 'BROTHER', 'JAMES', 'WAS', 'BORN', 'JANUARY', 'FIFTEEN', 'EIGHTEEN', 'FORTY', 'EIGHT', 'JOHN', 'AND', 'EIGHTEEN', 'FIFTY', 'ONE', 'AND', 'ROBERT', 'IN', 'DECEMBER', 'EIGHTEEN', 'FIFTY', 'THREE'] +7975-280057-0005-1013: ref=['MY', 'ELDEST', 'BROTHER', 'RICHARD', 'DIED', 'IN', 'EIGHTEEN', 'SIXTY'] +7975-280057-0005-1013: hyp=['MY', 'ELDEST', 'BROTHER', 'RICHARD', 'DIED', 'IN', 'EIGHTEEN', 'SIXTY'] +7975-280057-0006-1014: ref=['MY', 'FATHER', 'WAS', 'IN', 'THE', 'EMPLOY', 'OF', 'THE', 'UNITED', 'STATES', 'GOVERNMENT', 'AND', 'HAD', 'THE', 'MAIL', 'CONTRACT', 'FOR', 'FIVE', 'HUNDRED', 'MILES'] +7975-280057-0006-1014: hyp=['MY', 'FATHER', 'WAS', 'IN', 'THE', 'EMPLOY', 'OF', 'THE', 'UNITED', 'STATES', 'GOVERNMENT', 'AND', 'HAD', 'THE', 'MALE', 'CONTRACT', 'FOR', 'FIVE', 'HUNDRED', 'MILES'] +7975-280057-0007-1015: ref=['HE', 'HAD', 'STARTED', 'BACK', 'TO', 'HARRISONVILLE', 'IN', 'A', 'BUGGY', 'BUT', 'WAS', 'WAYLAID', 'ONE', 'MILE', 'SOUTH', 'OF', 'WESTPORT', 'A', 'SUBURB', 'OF', 'KANSAS', 'CITY', 'AND', 'BRUTALLY', 'MURDERED', 'FALLING', 'OUT', 'OF', 'HIS', 'BUGGY', 'INTO', 'THE', 'ROAD', 'WITH', 'THREE', 'MORTAL', 'BULLET', 'WOUNDS'] +7975-280057-0007-1015: hyp=['HE', 'HAD', 'STARTED', 'BACK', 'TO', 'HARRISONVILLE', 'IN', 'A', 'BUGGY', 'BUT', 'WAS', 'WAYLAID', 'ONE', 'MILE', 'SOUTH', 'OF', 'WESTWARD', 'A', 'SUBURB', 'OF', 'KANSAS', 'CITY', 'AND', 'BRUTALLY', 'MURDERED', 'FALLING', 'OUT', 'OF', 'HIS', 'BUGGY', 'INTO', 'THE', 'ROAD', 'WITH', 'THREE', 'MORTAL', 'BULLET', 'WOUNDS'] +7975-280057-0008-1016: ref=['MISSUS', 'WASHINGTON', 'WELLS', 'AND', 'HER', 'SON', 'SAMUEL', 'ON', 'THE', 'ROAD', 'HOME', 'FROM', 'KANSAS', 'CITY', 'TO', "LEE'S", 'SUMMIT', 'RECOGNIZED', 'THE', 'BODY', 'AS', 'THAT', 'OF', 'MY', 'FATHER'] +7975-280057-0008-1016: hyp=['MISS', 'WASHINGTON', 'WALES', 'AND', 'HER', 'SON', 'SAMUEL', 'ON', 'THE', 'ROAD', 'HOME', 'FROM', 'KANSAS', 'CITY', 'TO', 'LEE', 'SUMMIT', 'RECOGNIZED', 'THE', 'BODY', 'AS', 'THAT', 'OF', 'MY', 'FATHER'] +7975-280057-0009-1017: ref=['MISSUS', 'WELLS', 'STAYED', 'TO', 'GUARD', 'THE', 'REMAINS', 'WHILE', 'HER', 'SON', 'CARRIED', 'THE', 'NEWS', 'OF', 'THE', 'MURDER', 'TO', 'COLONEL', 'PEABODY', 'OF', 'THE', 'FEDERAL', 'COMMAND', 'WHO', 'WAS', 'THEN', 'IN', 'CAMP', 'AT', 'KANSAS', 'CITY'] +7975-280057-0009-1017: hyp=['MUST', 'WELL', 'STAY', 'TO', 'GUARD', 'THE', 'REMAINS', 'WHETHER', 'HER', 'SON', 'CARRIED', 'THE', 'NEWS', 'OF', 'THE', 'MURDER', 'TO', 'COLONEL', 'PEABODY', 'OF', 'THE', 'FEDERAL', 'COMMAND', 'WHO', 'WAS', 'THEN', 'IN', 'CAMP', 'AT', 'KANSAS', 'CITY'] +7975-280057-0010-1018: ref=['MISSUS', 'MC', 'CORKLE', 'JUMPED', 'FROM', 'THE', 'WINDOW', 'OF', 'THE', 'HOUSE', 'AND', 'ESCAPED'] +7975-280057-0010-1018: hyp=['MISS', 'MICROCLE', 'JUMPED', 'FROM', 'THE', 'WINDOW', 'OF', 'THE', 'HOUSE', 'AND', 'ESCAPED'] +7975-280057-0011-1019: ref=['AS', 'THE', 'RAIDERS', 'LEFT', 'ONE', 'OF', 'THEM', 'SHOUTED'] +7975-280057-0011-1019: hyp=['AS', 'THE', 'RAIDERS', 'LIVED', 'ONE', 'OF', 'THEM', 'SHOUTED'] +7975-280057-0012-1020: ref=['NOW', 'OLD', 'LADY', 'CALL', 'ON', 'YOUR', 'PROTECTORS', 'WHY', "DON'T", 'YOU', 'CALL', 'ON', 'COLE', 'YOUNGER', 'NOW'] +7975-280057-0012-1020: hyp=['NOW', 'LADY', 'CALL', 'ON', 'YOUR', 'PROTECTORS', 'WHY', "DON'T", 'YOU', 'CALL', 'AND', 'CO', 'YOUNGER', 'NOW'] +7975-280057-0013-1021: ref=['EVERY', 'KNOT', 'REPRESENTED', 'A', 'HUMAN', 'LIFE'] +7975-280057-0013-1021: hyp=['EVERY', 'KNOT', 'REPRESENTED', 'A', 'HUMAN', 'LIFE'] +7975-280057-0014-1022: ref=['BUT', 'SHE', 'FAILED', 'TO', 'FIND', 'THE', 'COMFORT', 'SHE', 'SOUGHT', 'FOR', 'ANNOYANCES', 'CONTINUED', 'IN', 'A', 'MORE', 'AGGRAVATED', 'FORM'] +7975-280057-0014-1022: hyp=['BUT', 'SHE', 'FAILED', 'TO', 'FAMILY', 'COMFORT', 'SHE', 'SOUGHT', 'FOR', 'ANNOYANCES', 'CONTINUED', 'IN', 'A', 'MORE', 'AGGRAVATED', 'FORM'] +7975-280057-0015-1023: ref=['TWO', 'MONTHS', 'AFTER', 'THIS', 'INCIDENT', 'THE', 'SAME', 'PERSECUTORS', 'AGAIN', 'ENTERED', 'OUR', 'HOME', 'IN', 'THE', 'DEAD', 'OF', 'THE', 'NIGHT', 'AND', 'AT', 'THE', 'POINT', 'OF', 'A', 'PISTOL', 'TRIED', 'TO', 'FORCE', 'MY', 'MOTHER', 'TO', 'SET', 'FIRE', 'TO', 'HER', 'OWN', 'HOME'] +7975-280057-0015-1023: hyp=['TWO', 'MONTHS', 'AFTER', 'THE', 'INCIDENT', 'THE', 'SAME', 'PERSECUTORS', 'AGAIN', 'ENTERED', 'OUR', 'HOME', 'IN', 'THE', 'DAY', 'OF', 'THE', 'NIGHT', 'AND', 'AT', 'THE', 'POINT', 'OF', 'A', 'PISTOL', 'TRIED', 'TO', 'FORCE', 'MY', 'MOTHER', 'TO', 'SET', 'FIRE', 'TO', 'HER', 'OWN', 'HOME'] +7975-280057-0016-1024: ref=['I', 'HAVE', 'ALWAYS', 'FELT', 'THAT', 'THE', 'EXPOSURE', 'TO', 'WHICH', 'SHE', 'WAS', 'SUBJECTED', 'ON', 'THIS', 'CRUEL', 'JOURNEY', 'TOO', 'HARD', 'EVEN', 'FOR', 'A', 'MAN', 'TO', 'TAKE', 'WAS', 'THE', 'DIRECT', 'CAUSE', 'OF', 'HER', 'DEATH'] +7975-280057-0016-1024: hyp=['I', 'HAVE', 'ALWAYS', 'FELT', 'THAT', 'THE', 'EXPOSURE', 'TO', 'WHICH', 'SHE', 'WAS', 'SUBJECTED', 'ON', 'THIS', 'CRUEL', 'JOURNEY', 'TOO', 'HARD', 'EVEN', 'FOR', 'A', 'MAN', 'TO', 'TAKE', 'WAS', 'A', 'DIRECT', 'CAUSE', 'OF', 'HER', 'DEATH'] +7975-280057-0017-1025: ref=['FROM', 'HARRISONVILLE', 'SHE', 'WENT', 'TO', 'WAVERLY', 'WHERE', 'SHE', 'WAS', 'HOUNDED', 'CONTINUALLY'] +7975-280057-0017-1025: hyp=['FROM', 'HARRISONVILLE', 'SHE', 'WENT', 'TO', 'WAVERLEY', 'WHERE', 'SHE', 'WAS', 'HANDY', 'CONTINUALLY'] +7975-280057-0018-1026: ref=['ONE', 'OF', 'THE', 'CONDITIONS', 'UPON', 'WHICH', 'HER', 'LIFE', 'WAS', 'SPARED', 'WAS', 'THAT', 'SHE', 'WOULD', 'REPORT', 'AT', 'LEXINGTON', 'WEEKLY'] +7975-280057-0018-1026: hyp=['ONE', 'OF', 'THE', 'CONDITIONS', 'UPON', 'WHICH', 'HER', 'LIFE', 'WAS', 'SPARED', 'WAS', 'THAT', 'SHE', 'WOULD', 'REPORT', 'IT', 'LESSINGTON', 'WEEKLY'] +7975-280057-0019-1027: ref=['ONE', 'OF', 'MY', 'OLD', 'SCHOOL', 'TEACHERS', 'WHOM', 'I', 'HAVE', 'NEVER', 'SEEN', 'SINCE', 'THE', 'SPRING', 'OR', 'SUMMER', 'OF', 'EIGHTEEN', 'SIXTY', 'TWO', 'IS', 'STEPHEN', 'B', 'ELKINS', 'SENATOR', 'FROM', 'WEST', 'VIRGINIA'] +7975-280057-0019-1027: hyp=['ONE', 'OF', 'MY', 'OLD', 'SCHOOL', 'TEACHERS', 'WHOM', 'I', 'HAVE', 'NEVER', 'SEEN', 'SINCE', 'THE', 'SPRING', 'OF', 'SUMMER', 'OF', 'EIGHTEEN', 'SIXTY', 'TWO', 'IS', 'STEPHEN', 'B', 'ELKINS', 'SENATOR', 'FROM', 'WEST', 'VIRGINIA'] +7975-280057-0020-1028: ref=['WHEN', 'I', 'WAS', 'TAKEN', 'PRISONER', 'I', 'EXPECTED', 'TO', 'BE', 'SHOT', 'WITHOUT', 'CEREMONY'] +7975-280057-0020-1028: hyp=['WHEN', 'I', 'WAS', 'TAKING', 'PRISONER', 'I', 'EXPECTED', 'TO', 'BE', 'SHOT', 'WITHOUT', 'CEREMONY'] +7975-280063-0000-1058: ref=['WE', 'TOOK', 'THE', 'OATH', 'PERHAPS', 'THREE', 'HUNDRED', 'OF', 'US', 'DOWN', 'ON', 'LUTHER', "MASON'S", 'FARM', 'A', 'FEW', 'MILES', 'FROM', 'WHERE', 'I', 'NOW', 'WRITE', 'WHERE', 'COLONEL', 'HAYS', 'HAD', 'ENCAMPED', 'AFTER', 'INDEPENDENCE'] +7975-280063-0000-1058: hyp=['WE', 'TOOK', 'THE', 'OATH', 'PERHAPS', 'THREE', 'HUNDRED', 'OF', 'US', 'DOWN', 'ON', 'LUTHER', "MASON'S", 'FARM', 'A', 'FEW', 'MILES', 'FROM', 'WHERE', 'I', 'NOW', 'RIDE', 'WHERE', 'COLONEL', 'HAYES', 'HAD', 'ENCAMPED', 'AFTER', 'INDEPENDENCE'] +7975-280063-0001-1059: ref=['BOONE', 'MUIR', 'AND', 'MYSELF', 'MET', 'COFFEE', 'AND', 'THE', 'REST', 'BELOW', 'ROSE', 'HILL', 'ON', 'GRAND', 'RIVER'] +7975-280063-0001-1059: hyp=['BOOM', 'YOU', 'AND', 'MYSELF', 'MAKE', 'COFFEE', 'AND', 'THE', 'REST', 'BELOW', 'ROSE', 'HILL', 'ON', 'GRAND', 'RIVER'] +7975-280063-0002-1060: ref=['ACCORDINGLY', 'I', 'WAS', 'SHORTLY', 'AWAKENED', 'TO', 'ACCOMPANY', 'HIM', 'TO', 'LONE', 'JACK', 'WHERE', 'HE', 'WOULD', 'PERSONALLY', 'MAKE', 'KNOWN', 'THE', 'SITUATION', 'TO', 'THE', 'OTHER', 'COLONELS'] +7975-280063-0002-1060: hyp=['ACCORDINGLY', 'I', 'WAS', 'SHORTLY', 'AWAKENED', 'TO', 'ACCOMPANY', 'HIM', 'THE', 'LONG', 'JACK', 'WHERE', 'HE', 'WOULD', 'PERSONALLY', 'MAKE', 'KNOWN', 'THE', 'SITUATION', 'TO', 'THE', 'OTHER', 'COLONELS'] +7975-280063-0003-1061: ref=['FOSTER', 'HAD', 'NEARLY', 'ONE', 'THOUSAND', 'CAVALRYMEN', 'AND', 'TWO', 'PIECES', 'OF', "RABB'S", 'INDIANA', 'BATTERY', 'THAT', 'HAD', 'ALREADY', 'MADE', 'FOR', 'ITSELF', 'A', 'NAME', 'FOR', 'HARD', 'FIGHTING'] +7975-280063-0003-1061: hyp=['FOSTER', 'HAD', 'NEARLY', 'ONE', 'THOUSAND', 'CAVERNMENT', 'AND', 'TWO', 'PIECES', 'OF', 'RABBS', 'INDIANA', 'BATTERY', 'THAT', 'HAD', 'ALREADY', 'MADE', 'FOR', 'ITSELF', 'A', 'NAME', 'FOR', 'HARD', 'FIGHTING'] +7975-280063-0004-1062: ref=['COME', 'IN', 'COLONEL', 'HAYS', 'EXCLAIMED', 'COLONEL', 'COCKRELL'] +7975-280063-0004-1062: hyp=['COMMONED', 'COLONEL', 'HAYES', 'EXCLAIMED', 'COLONEL', 'COCKLE'] +7975-280063-0005-1063: ref=['I', 'THINK', "HE'LL", 'BE', 'RATHER', 'TOUGH', 'MEAT', 'FOR', 'BREAKFAST', 'I', 'REPLIED', 'HE', 'MIGHT', 'BE', 'ALL', 'RIGHT', 'FOR', 'DINNER'] +7975-280063-0005-1063: hyp=['I', 'THINK', "HE'LL", 'BE', 'READY', 'TO', 'HAVE', 'MEET', 'FOR', 'BREAKFAST', 'I', 'REPLIED', 'HE', 'MIGHT', 'BE', 'ALL', 'RIPE', 'FOR', 'DINNER'] +7975-280063-0006-1064: ref=['JACKMAN', 'WITH', 'A', 'PARTY', 'OF', 'THIRTY', 'SEASONED', 'MEN', 'CHARGED', 'THE', 'INDIANA', 'GUNS', 'AND', 'CAPTURED', 'THEM', 'BUT', 'MAJOR', 'FOSTER', 'LED', 'A', 'GALLANT', 'CHARGE', 'AGAINST', 'THE', 'INVADERS', 'AND', 'RECAPTURED', 'THE', 'PIECES'] +7975-280063-0006-1064: hyp=['JACK', 'WENT', 'WITH', 'A', 'PARTY', 'OF', 'THIRTY', 'SEASONED', 'MEN', 'CHARGED', 'THE', 'INDIANA', 'GUNS', 'AND', 'CAPTURED', 'THEM', 'BUT', 'MAJOR', 'FOSTER', 'LED', 'A', 'GALLANT', 'CHARGE', 'AGAINST', 'THE', 'INVADERS', 'AND', 'RE', 'CAPTURED', 'THE', 'PIECES'] +7975-280063-0007-1065: ref=['WE', 'WERE', 'OUT', 'OF', 'AMMUNITION', 'AND', 'WERE', 'HELPLESS', 'HAD', 'THE', 'FIGHT', 'BEEN', 'PRESSED'] +7975-280063-0007-1065: hyp=['WE', 'WERE', 'OUT', 'OF', 'AMMUNITION', 'AND', 'WERE', 'HELPLESS', 'HAD', 'THE', 'FIGHT', 'BEEN', 'PRESSED'] +7975-280063-0008-1066: ref=['THEY', 'DID', 'MARK', 'MY', 'CLOTHES', 'IN', 'ONE', 'OR', 'TWO', 'PLACES', 'HOWEVER'] +7975-280063-0008-1066: hyp=['THEY', 'DID', 'MARK', 'MY', 'CLOTHES', 'IN', 'ONE', 'OR', 'TWO', 'PLACES', 'HOWEVER'] +7975-280063-0009-1067: ref=['MAJOR', 'FOSTER', 'IN', 'A', 'LETTER', 'TO', 'JUDGE', 'GEORGE', 'M', 'BENNETT', 'OF', 'MINNEAPOLIS', 'SAID'] +7975-280063-0009-1067: hyp=['MAJOR', 'FOSTER', 'IN', 'A', 'LETTER', 'TO', 'JOE', 'GEORGE', 'I', 'INVITED', 'OF', 'MANY', 'APOLIS', 'SAID'] +7975-280063-0010-1068: ref=['I', 'WAS', 'TOLD', 'BY', 'SOME', 'OF', 'OUR', 'MEN', 'FROM', 'THE', 'WESTERN', 'BORDER', 'OF', 'THE', 'STATE', 'THAT', 'THEY', 'RECOGNIZED', 'THE', 'DARING', 'YOUNG', 'RIDER', 'AS', 'COLE', 'YOUNGER'] +7975-280063-0010-1068: hyp=['I', 'WAS', 'TOLD', 'BY', 'SOME', 'OF', 'OUR', 'MEN', 'FROM', 'THE', 'WESTERN', 'BORDER', 'OF', 'THE', 'STATE', 'THAT', 'THEY', 'RECOGNIZED', 'A', 'DARING', 'YOUNG', 'RATTERAS', 'COAL', 'YOUNGER'] +7975-280063-0011-1069: ref=['ABOUT', 'NINE', 'THIRTY', 'A', 'M', 'I', 'WAS', 'SHOT', 'DOWN'] +7975-280063-0011-1069: hyp=['ABOUT', 'NINE', 'THIRTY', 'A', 'M', 'I', 'WAS', 'SHOT', 'DOWN'] +7975-280063-0012-1070: ref=['THE', 'WOUNDED', 'OF', 'BOTH', 'FORCES', 'WERE', 'GATHERED', 'UP', 'AND', 'WERE', 'PLACED', 'IN', 'HOUSES'] +7975-280063-0012-1070: hyp=['THE', 'WOUNDS', 'OF', 'BOTH', 'FORCES', 'WERE', 'GATHERED', 'UP', 'AND', 'WERE', 'PLACED', 'IN', 'HOUSES'] +7975-280076-0000-1029: ref=['ALTHOUGH', 'EVERY', 'BOOK', 'PURPORTING', 'TO', 'NARRATE', 'THE', 'LIVES', 'OF', 'THE', 'YOUNGER', 'BROTHERS', 'HAS', 'TOLD', 'OF', 'THE', 'LIBERTY', 'ROBBERY', 'AND', 'IMPLIED', 'THAT', 'WE', 'HAD', 'A', 'PART', 'IN', 'IT', 'THE', 'YOUNGERS', 'WERE', 'NOT', 'SUSPECTED', 'AT', 'THAT', 'TIME', 'NOR', 'FOR', 'A', 'LONG', 'TIME', 'AFTERWARD'] +7975-280076-0000-1029: hyp=['ALTHOUGH', 'EVERY', 'BOOK', 'REPORTING', 'TO', 'NARRATE', 'THE', 'LIVES', 'OF', 'THE', 'YOUNGER', 'BROTHERS', 'IS', 'TOLD', 'OF', 'THE', 'LIBERTY', 'ROBBERY', 'AND', 'IMPLIED', 'THAT', 'WE', 'HAD', 'A', 'PART', 'IN', 'IT', 'THE', 'YOUNGERS', 'WERE', 'NOT', 'SUSPECTED', 'AT', 'THAT', 'TIME', 'NOR', 'FOR', 'A', 'LONG', 'TIME', 'AFTERWARD'] +7975-280076-0001-1030: ref=['IT', 'WAS', 'CLAIMED', 'BY', 'PEOPLE', 'OF', 'LIBERTY', 'THAT', 'THEY', 'POSITIVELY', 'RECOGNIZED', 'AMONG', 'THE', 'ROBBERS', 'OLL', 'SHEPHERD', 'RED', 'MONKERS', 'AND', 'BUD', 'PENCE', 'WHO', 'HAD', 'SEEN', 'SERVICE', 'WITH', 'QUANTRELL'] +7975-280076-0001-1030: hyp=['HE', 'WAS', 'CLAIMED', 'BY', 'PEOPLE', 'OF', 'LIBERTY', 'THAT', 'THEY', 'POSITIVELY', 'RECOGNIZED', 'AMONG', 'THE', 'ROBBERS', 'ALL', 'SHEPARD', 'REDMOCKERS', 'AND', 'BUD', 'PENCE', 'WHO', 'HAD', 'SEEN', 'SERVICE', 'WITH', 'QUANTREL'] +7975-280076-0002-1031: ref=['THIS', 'RAID', 'WAS', 'ACCOMPANIED', 'BY', 'BLOODSHED', 'JUDGE', 'MC', 'LAIN', 'THE', 'BANKER', 'BEING', 'SHOT', 'THOUGH', 'NOT', 'FATALLY'] +7975-280076-0002-1031: hyp=['THIS', 'RAY', 'WAS', 'ACCOMPANIED', 'BY', 'BLOTCHYARD', 'JOSE', 'MC', 'LANE', 'THE', 'BANKER', 'BEING', 'SHOT', 'THOUGH', 'NOT', 'FATALLY'] +7975-280076-0003-1032: ref=['NO', 'WARRANT', 'WAS', 'ISSUED', 'FOR', 'THE', 'YOUNGERS', 'BUT', 'SUBSEQUENT', 'HISTORIANS', 'HAVE', 'INFERENTIALLY', 'AT', 'LEAST', 'ACCUSED', 'US', 'OF', 'TAKING', 'PART', 'BUT', 'AS', 'I', 'SAID', 'BEFORE', 'THERE', 'IS', 'NO', 'TRUTH', 'IN', 'THE', 'ACCUSATION'] +7975-280076-0003-1032: hyp=['THOUGH', 'WARRANT', 'WAS', 'ISSUED', 'FOR', 'THE', 'YOUNGERS', 'BUT', 'SUBSEQUENT', 'HISTORIANS', 'HAVE', 'INFERENTIALLY', 'AT', 'LEAST', 'ACCUSED', 'US', 'OF', 'TAKING', 'PART', 'BUT', 'AS', 'I', 'SAID', 'BEFORE', 'THERE', 'IS', 'NO', 'TRUTH', 'IN', 'THE', 'ACCUSATION'] +7975-280076-0004-1033: ref=['JUNE', 'THIRD', 'EIGHTEEN', 'SEVENTY', 'ONE', 'OBOCOCK', 'BROTHERS', 'BANK', 'AT', 'CORYDON', 'IOWA', 'WAS', 'ROBBED', 'OF', 'FORTY', 'THOUSAND', 'DOLLARS', 'BY', 'SEVEN', 'MEN', 'IN', 'BROAD', 'DAYLIGHT'] +7975-280076-0004-1033: hyp=['JUNE', 'THIRD', 'EIGHTEEN', 'SEVENTY', 'ONE', 'OBEY', "BROTHER'S", 'BANK', 'AT', 'CROYDEN', 'HOUR', 'WAS', 'ROBBED', 'OF', 'FORTY', 'THOUSAND', 'DOLLARS', 'BY', 'SEVEN', 'MEN', 'IN', 'BROAD', 'DAYLIGHT'] +7975-280076-0005-1034: ref=['IT', 'WAS', 'CHARGED', 'THAT', 'ARTHUR', 'MC', 'COY', 'OR', 'A', 'C', 'MC', 'COY', 'AND', 'MYSELF', 'HAD', 'BEEN', 'PARTICIPANTS', 'IN', 'THE', "GAD'S", 'HILL', 'AFFAIR', 'AND', 'THE', 'TWO', 'STAGE', 'ROBBERIES'] +7975-280076-0005-1034: hyp=['IT', 'WAS', 'CHARGE', 'THAT', 'OFTEN', 'MA', 'KOY', 'OR', 'A', 'SEA', 'MAC', 'COY', 'AND', 'MYSELF', 'HAD', 'BEEN', 'PARTICIPANTS', 'IN', 'THE', "GAD'S", 'HILL', 'AFFAIR', 'AND', 'THE', 'TWO', 'STAGE', 'ROBBERIES'] +7975-280076-0006-1035: ref=['THE', 'PARTS', 'OF', 'THIS', 'LETTER', 'NOW', 'RELEVANT', 'ARE', 'AS', 'FOLLOWS'] +7975-280076-0006-1035: hyp=['THE', 'PARTS', 'OF', 'THIS', 'LETTER', 'NOW', 'RELEVANT', 'ARE', 'AS', 'FOLLOWS'] +7975-280076-0007-1036: ref=['YOU', 'MAY', 'USE', 'THIS', 'LETTER', 'IN', 'YOUR', 'OWN', 'WAY'] +7975-280076-0007-1036: hyp=['YOU', 'MAY', 'USE', 'THIS', 'LETTER', 'IN', 'YOUR', 'OWN', 'WAY'] +7975-280076-0008-1037: ref=['I', 'WILL', 'GIVE', 'YOU', 'THIS', 'OUTLINE', 'AND', 'SKETCH', 'OF', 'MY', 'WHEREABOUTS', 'AND', 'ACTIONS', 'AT', 'THE', 'TIME', 'OF', 'CERTAIN', 'ROBBERIES', 'WITH', 'WHICH', 'I', 'AM', 'CHARGED'] +7975-280076-0008-1037: hyp=['I', 'WILL', 'GIVE', 'YOU', 'THIS', 'OUTLINE', 'AND', 'SKETCH', 'OF', 'MY', 'WHEREABOUTS', 'AND', 'ACTIONS', 'AT', 'THE', 'TIME', 'OF', 'CERTAIN', 'ROBBERIES', 'WITH', 'WHICH', 'I', 'AM', 'CHARGED'] +7975-280076-0009-1038: ref=['AT', 'THE', 'TIME', 'OF', 'THE', 'GALLATIN', 'BANK', 'ROBBERY', 'I', 'WAS', 'GATHERING', 'CATTLE', 'IN', 'ELLIS', 'COUNTY', 'TEXAS', 'CATTLE', 'THAT', 'I', 'BOUGHT', 'FROM', 'PLEAS', 'TAYLOR', 'AND', 'RECTOR'] +7975-280076-0009-1038: hyp=["IT'S", 'THE', 'TIME', 'OF', 'THE', 'GALLOP', 'AND', 'BANK', 'ROBBERY', 'I', 'WAS', 'GATHERING', 'CATTLE', 'AND', 'ILLIS', 'COUNTY', 'TEXAS', 'CATTLETTA', 'BROUGHT', 'FROM', 'PLACE', 'TAYLOR', 'AND', 'RECTOR'] +7975-280076-0010-1039: ref=['THIS', 'CAN', 'BE', 'PROVED', 'BY', 'BOTH', 'OF', 'THEM', 'ALSO', 'BY', 'SHERIFF', 'BARKLEY', 'AND', 'FIFTY', 'OTHER', 'RESPECTABLE', 'MEN', 'OF', 'THAT', 'COUNTY'] +7975-280076-0010-1039: hyp=['THIS', 'CAN', 'BE', 'PROVED', 'BY', 'BOTH', 'OF', 'THEM', 'ALSO', 'BY', 'SIR', 'PARKLEY', 'AND', 'FIFTY', 'OTHER', 'RESPECTABLE', 'MEN', 'OF', 'THAT', 'COUNTY'] +7975-280076-0011-1040: ref=['I', 'BROUGHT', 'THE', 'CATTLE', 'TO', 'KANSAS', 'THAT', 'FALL', 'AND', 'REMAINED', 'IN', 'SAINT', 'CLAIR', 'COUNTY', 'UNTIL', 'FEBRUARY'] +7975-280076-0011-1040: hyp=['I', 'BROUGHT', 'THE', 'CATTLE', 'THE', 'KANSAS', 'SET', 'FALL', 'AND', 'REMAINED', 'IN', 'SAINT', 'CLAIR', 'COUNTY', 'UNTIL', 'FEBRUARY'] +7975-280076-0012-1041: ref=['I', 'THEN', 'WENT', 'TO', 'ARKANSAS', 'AND', 'RETURNED', 'TO', 'SAINT', 'CLAIR', 'COUNTY', 'ABOUT', 'THE', 'FIRST', 'OF', 'MAY'] +7975-280076-0012-1041: hyp=['I', 'THEN', 'WENT', 'TO', 'OUR', 'CONSOLE', 'AND', 'RETURN', 'TO', 'SAINT', 'CLAIR', 'COUNTY', 'ABOUT', 'THE', 'FIRST', 'OF', 'MAY'] +7975-280076-0013-1042: ref=['I', 'WENT', 'TO', 'KANSAS', 'WHERE', 'OUR', 'CATTLE', 'WERE', 'IN', 'WOODSON', 'COUNTY', 'AT', 'COLONEL', "RIDGE'S"] +7975-280076-0013-1042: hyp=['AND', 'WENT', 'TO', 'KANSAS', 'WHERE', 'OUR', 'CATTLE', 'BURNED', 'WOODSON', 'COUNTY', 'AT', 'COLONEL', 'RICHES'] +7975-280076-0014-1043: ref=['DURING', 'THE', 'SUMMER', 'I', 'WAS', 'EITHER', 'IN', 'SAINT', 'CLAIR', 'JACKSON', 'OR', 'KANSAS', 'BUT', 'AS', 'THERE', 'WAS', 'NO', 'ROBBERY', 'COMMITTED', 'THAT', 'SUMMER', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'WHERE', 'I', 'WAS'] +7975-280076-0014-1043: hyp=['DURING', 'SUMMER', 'I', 'WAS', 'EITHER', 'IN', 'SAINT', 'CLAIR', 'OR', 'JACKSON', 'OR', 'KANSAS', 'BUT', 'AS', 'THERE', 'WAS', 'NO', 'ROBBERY', 'COMMITTED', 'THAT', 'SUMMER', 'IT', 'MAKES', 'NO', 'DIFFERENCE', 'WHERE', 'I', 'WAS'] +7975-280076-0015-1044: ref=['I', 'WENT', 'THROUGH', 'INDEPENDENCE', 'AND', 'FROM', 'THERE', 'TO', 'ACE', "WEBB'S"] +7975-280076-0015-1044: hyp=['AND', 'WENT', 'THROUGH', 'INDEPENDENCE', 'AND', 'FROM', 'THERE', 'TO', 'ACE', 'WEBBS'] +7975-280076-0016-1045: ref=['THERE', 'I', 'TOOK', 'DINNER', 'AND', 'THEN', 'WENT', 'TO', 'DOCTOR', 'L', 'W', "TWYMAN'S"] +7975-280076-0016-1045: hyp=['THERE', 'I', 'TOOK', 'DINNER', 'AND', 'THEN', 'WENT', 'TO', 'DOCTOR', 'OLD', 'W', 'TWIMMAN'] +7975-280076-0017-1046: ref=['OUR', 'BUSINESS', 'THERE', 'WAS', 'TO', 'SEE', 'E', 'P', 'WEST', 'HE', 'WAS', 'NOT', 'AT', 'HOME', 'BUT', 'THE', 'FAMILY', 'WILL', 'REMEMBER', 'THAT', 'WE', 'WERE', 'THERE'] +7975-280076-0017-1046: hyp=['OUR', 'BUSINESS', 'THERE', 'WAS', 'TO', 'SEE', 'E', 'P', 'WEST', 'HE', 'WAS', 'NOT', 'AT', 'HOME', 'BUT', 'THE', 'FAMILY', 'WILL', 'REMEMBER', 'THAT', 'WE', 'WERE', 'THERE'] +7975-280076-0018-1047: ref=['WE', 'CROSSED', 'ON', 'THE', 'BRIDGE', 'STAYED', 'IN', 'THE', 'CITY', 'ALL', 'NIGHT', 'AND', 'THE', 'NEXT', 'MORNING', 'WE', 'RODE', 'UP', 'THROUGH', 'THE', 'CITY'] +7975-280076-0018-1047: hyp=['WE', 'CROSSED', 'ON', 'THE', 'BRIDGE', 'STATE', 'IN', 'THE', 'CITY', 'ALL', 'NIGHT', 'AND', 'THE', 'NEXT', 'MORNING', 'WE', 'RODE', 'UP', 'TO', 'THE', 'CITY'] +7975-280076-0019-1048: ref=['I', 'MET', 'SEVERAL', 'OF', 'MY', 'FRIENDS', 'AMONG', 'THEM', 'WAS', 'BOB', 'HUDSPETH'] +7975-280076-0019-1048: hyp=['AMID', 'SEVERAL', 'OF', 'MY', 'FRIENDS', 'AMONG', 'THEM', 'WAS', 'BOB', 'HUSBATH'] +7975-280076-0020-1049: ref=['WE', 'WERE', 'NOT', 'ON', 'GOOD', 'TERMS', 'AT', 'THE', 'TIME', 'NOR', 'HAVE', 'WE', 'BEEN', 'FOR', 'SEVERAL', 'YEARS'] +7975-280076-0020-1049: hyp=['WE', 'WERE', 'NOT', 'ON', 'GOOD', 'TERMS', 'AT', 'THE', 'TIME', 'NOR', 'HAVE', 'WE', 'BEEN', 'FOR', 'SEVERAL', 'YEARS'] +7975-280076-0021-1050: ref=['POOR', 'JOHN', 'HE', 'HAS', 'BEEN', 'HUNTED', 'DOWN', 'AND', 'SHOT', 'LIKE', 'A', 'WILD', 'BEAST', 'AND', 'NEVER', 'WAS', 'A', 'BOY', 'MORE', 'INNOCENT'] +7975-280076-0021-1050: hyp=['POOR', 'JOHN', 'HE', 'HAS', 'BEEN', 'HUNTED', 'DOWN', 'AND', 'SHOT', 'LIKE', 'A', 'WILD', 'BEAST', 'AND', 'NEVER', 'WAS', 'A', 'BOY', 'MORE', 'INNOCENT'] +7975-280076-0022-1051: ref=['DOCTOR', 'L', 'LEWIS', 'WAS', 'HIS', 'PHYSICIAN'] +7975-280076-0022-1051: hyp=['DOCTOR', 'L', 'LOUIS', 'WAS', 'HIS', 'PHYSICIAN'] +7975-280076-0023-1052: ref=['THERE', 'WERE', 'FIFTY', 'OR', 'A', 'HUNDRED', 'PERSONS', 'THERE', 'WHO', 'WILL', 'TESTIFY', 'IN', 'ANY', 'COURT', 'THAT', 'JOHN', 'AND', 'I', 'WERE', 'THERE'] +7975-280076-0023-1052: hyp=['THERE', 'WERE', 'FIFTY', 'OR', 'HUNDRED', 'PERSONS', 'THERE', 'WHO', 'WILL', 'TESTIFY', 'IN', 'ANY', 'COURT', 'THAT', 'JOHN', 'AND', 'I', 'WERE', 'THERE'] +7975-280076-0024-1053: ref=['HELVIN', 'FICKLE', 'AND', 'WIFE', 'OF', 'GREENTON', 'VALLEY', 'WERE', 'ATTENDING', 'THE', 'SPRINGS', 'AT', 'THAT', 'TIME', 'AND', 'EITHER', 'OF', 'THEM', 'WILL', 'TESTIFY', 'TO', 'THE', 'ABOVE', 'FOR', 'JOHN', 'AND', 'I', 'SAT', 'IN', 'FRONT', 'OF', 'MISTER', 'SMITH', 'WHILE', 'HE', 'WAS', 'PREACHING', 'AND', 'WAS', 'IN', 'HIS', 'COMPANY', 'FOR', 'A', 'FEW', 'MOMENTS', 'TOGETHER', 'WITH', 'HIS', 'WIFE', 'AND', 'MISTER', 'AND', 'MISSUS', 'FICKLE', 'AFTER', 'SERVICE'] +7975-280076-0024-1053: hyp=['HELVAN', 'FICKLE', 'AND', 'WIFE', 'OF', 'GREENSON', 'VALLEY', 'WERE', 'ATTENDING', 'THE', 'SPRINGS', 'AT', 'THAT', 'TIME', 'AND', 'EITHER', 'OF', 'THEM', 'WILL', 'TESTIFY', 'TO', 'THE', 'ABOVE', 'FOR', 'JOHN', 'AND', 'I', 'SET', 'IN', 'FRONT', 'OF', 'MISTER', 'SMITH', 'WHILE', 'HE', 'WAS', 'PREACHING', 'AND', 'WAS', 'IN', 'HIS', 'COMPANY', 'FOR', 'A', 'FEW', 'MOMENTS', 'TOGETHER', 'WITH', 'HIS', 'WIFE', 'AND', 'MISTER', 'AND', 'MISS', 'FICKLE', 'AFTER', 'THE', 'SERVICE'] +7975-280076-0025-1054: ref=['ABOUT', 'THE', 'LAST', 'OF', 'DECEMBER', 'EIGHTEEN', 'SEVENTY', 'THREE', 'I', 'ARRIVED', 'IN', 'CARROLL', 'PARISH', 'LOUISIANA'] +7975-280076-0025-1054: hyp=['ABOUT', 'THE', 'LAST', 'OF', 'DECEMBER', 'EIGHTEEN', 'SEVENTY', 'THREE', 'I', 'ARRIVED', 'IN', 'CAROL', 'PARISH', 'LOUISIANA'] +7975-280076-0026-1055: ref=['I', 'STAYED', 'THERE', 'UNTIL', 'THE', 'EIGHTH', 'OF', 'FEBRUARY', 'EIGHTEEN', 'SEVENTY', 'FOUR'] +7975-280076-0026-1055: hyp=['I', 'STAYED', 'THERE', 'UNTIL', 'THE', 'EIGHTH', 'OF', 'FEBRUARY', 'EIGHTEEN', 'SEVENTY', 'FOUR'] +7975-280076-0027-1056: ref=['I', 'HAD', 'NOT', 'HEARD', 'OF', 'THAT', 'WHEN', 'I', 'WROTE', 'THE', 'LETTER', 'OF', 'EIGHTEEN', 'SEVENTY', 'FOUR', 'AND', 'TO', 'CORRECT', 'ANY', 'MISAPPREHENSION', 'THAT', 'MIGHT', 'BE', 'CREATED', 'BY', 'OMITTING', 'IT', 'I', 'WILL', 'SAY', 'THAT', 'AT', 'THAT', 'TIME', 'I', 'WAS', 'AT', 'NEOSHO', 'KANSAS', 'WITH', 'A', 'DROVE', 'OF', 'CATTLE', 'WHICH', 'I', 'SOLD', 'TO', 'MAJOR', 'RAY'] +7975-280076-0027-1056: hyp=['I', 'HAD', 'NOT', 'HEARD', 'OF', 'THAT', 'WHEN', 'I', 'WROTE', 'THE', 'LETTER', 'OF', 'EIGHTEEN', 'SEVENTY', 'FOUR', 'AND', 'TO', 'CORRECT', 'ANY', 'MISAPPREHENSION', 'THAT', 'MIGHT', 'BE', 'CREATED', 'BY', 'OMITTING', 'IT', 'I', 'WILL', 'SAY', 'THAT', 'AT', 'THE', 'TIME', 'I', 'WAS', 'AT', 'NEOSH', 'O', 'KANSAS', 'WITH', 'A', 'DROVE', 'OF', 'CATTLE', 'WHICH', 'I', 'SOLD', 'TO', 'MAJOR', 'WRAYE'] +7975-280076-0028-1057: ref=['IT', 'WAS', 'IMMEDIATELY', 'FOLLOWING', 'THE', 'ROCK', 'ISLAND', 'ROBBERY', 'AT', 'ADAIR', 'IOWA', 'THAT', 'THERE', 'FIRST', 'APPEARED', 'A', 'DELIBERATE', 'ENLISTMENT', 'OF', 'SOME', 'LOCAL', 'PAPERS', 'IN', 'MISSOURI', 'TO', 'CONNECT', 'US', 'WITH', 'THIS', 'ROBBERY'] +7975-280076-0028-1057: hyp=['IT', 'WAS', 'IMMEDIATELY', 'FOLLOWING', 'THE', 'ROCK', 'ISLAND', 'ROBBERY', 'AT', 'EIGHT', 'AIR', 'IOWA', 'THAT', 'THEIR', 'FIRST', 'APPEARED', 'A', 'DELIVERED', 'ENLISTMENT', 'OF', 'SOME', 'LOCAL', 'PAPERS', 'AND', 'MISSOURI', 'TO', 'CONNECT', 'US', 'WITH', 'THIS', 'ROBBERY'] +7975-280084-0000-1090: ref=['I', 'URGED', 'ON', 'THE', 'BOYS', 'THAT', 'WHATEVER', 'HAPPENED', 'WE', 'SHOULD', 'NOT', 'SHOOT', 'ANY', 'ONE'] +7975-280084-0000-1090: hyp=['I', 'URGED', 'ON', 'THE', 'BOYS', 'AT', 'WHATEVER', 'HAPPENED', 'WE', 'SHOULD', 'NOT', 'SHOOT', 'ANY', 'ONE'] +7975-280084-0001-1091: ref=['WHEN', 'MILLER', 'AND', 'I', 'CROSSED', 'THE', 'BRIDGE', 'THE', 'THREE', 'WERE', 'ON', 'SOME', 'DRY', 'GOODS', 'BOXES', 'AT', 'THE', 'CORNER', 'NEAR', 'THE', 'BANK', 'AND', 'AS', 'SOON', 'AS', 'THEY', 'SAW', 'US', 'WENT', 'RIGHT', 'INTO', 'THE', 'BANK', 'INSTEAD', 'OF', 'WAITING', 'FOR', 'US', 'TO', 'GET', 'THERE'] +7975-280084-0001-1091: hyp=['WHEN', 'MILLER', 'AND', 'I', 'CROSSED', 'THE', 'BRIDGE', 'THE', 'THREE', 'WERE', 'ON', 'SOME', 'DRY', 'GOOD', 'BOXES', 'AT', 'THE', 'CORNER', 'NEAR', 'THE', 'BANK', 'AND', 'AS', 'SOON', 'AS', 'I', 'SAW', 'US', 'WENT', 'RIGHT', 'INTO', 'THE', 'BANK', 'INSTEAD', 'OF', 'WAITING', 'FOR', 'US', 'TO', 'GET', 'THERE'] +7975-280084-0002-1092: ref=['WHEN', 'WE', 'CAME', 'UP', 'I', 'TOLD', 'MILLER', 'TO', 'SHUT', 'THE', 'BANK', 'DOOR', 'WHICH', 'THEY', 'HAD', 'LEFT', 'OPEN', 'IN', 'THEIR', 'HURRY'] +7975-280084-0002-1092: hyp=['WHEN', 'WE', 'CAME', 'UP', 'I', 'TOLD', 'MILLER', 'TO', 'SHUT', 'THE', 'BANK', 'DOOR', 'WHICH', 'THEY', 'HAD', 'LEFT', 'OPEN', 'IN', 'THEIR', 'HURRY'] +7975-280084-0003-1093: ref=['J', 'S', 'ALLEN', 'WHOSE', 'HARDWARE', 'STORE', 'WAS', 'NEAR', 'TRIED', 'TO', 'GO', 'INTO', 'THE', 'BANK', 'BUT', 'MILLER', 'ORDERED', 'HIM', 'AWAY', 'AND', 'HE', 'RAN', 'AROUND', 'THE', 'CORNER', 'SHOUTING'] +7975-280084-0003-1093: hyp=['J', 'HELEN', 'WHOSE', 'HARD', 'WORKED', 'ALWAYS', 'NEAR', 'TRIED', 'TO', 'GO', 'INTO', 'THE', 'BANK', 'BUT', 'MILLER', 'ORDERED', 'HIM', 'AWAY', 'AND', 'HE', 'RAN', 'ROUND', 'THE', 'CORNER', 'SHOUTING'] +7975-280084-0004-1094: ref=['GET', 'YOUR', 'GUNS', 'BOYS', "THEY'RE", 'ROBBING', 'THE', 'BANK'] +7975-280084-0004-1094: hyp=['GET', 'YOUR', 'GUNS', 'BOYS', "THEY'RE", 'ROBBING', 'THE', 'BANK'] +7975-280084-0005-1095: ref=['AND', 'I', 'CALLED', 'TO', 'HIM', 'TO', 'GET', 'INSIDE', 'AT', 'THE', 'SAME', 'TIME', 'FIRING', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'AIR', 'AS', 'A', 'SIGNAL', 'TO', 'THE', 'THREE', 'BOYS', 'AT', 'THE', 'BRIDGE', 'THAT', 'WE', 'HAD', 'BEEN', 'DISCOVERED'] +7975-280084-0005-1095: hyp=['AND', 'I', 'CALL', 'TO', 'HIM', 'TO', 'GET', 'INSIDE', 'AT', 'THE', 'SAME', 'TIME', 'FIRING', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'AIR', 'AS', 'THE', 'SIGNAL', 'TO', 'THE', 'THREE', 'BOYS', 'AT', 'THE', 'BRIDGE', 'THAT', 'WE', 'HAD', 'BEEN', 'DISCOVERED'] +7975-280084-0006-1096: ref=['ALMOST', 'AT', 'THIS', 'INSTANT', 'I', 'HEARD', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'BANK'] +7975-280084-0006-1096: hyp=['ALMOST', 'AT', 'THIS', 'INSTANT', 'I', 'HEARD', 'A', 'PISTOL', 'SHOT', 'IN', 'THE', 'BANK'] +7975-280084-0007-1097: ref=['CHADWELL', 'WOODS', 'AND', 'JIM', 'RODE', 'UP', 'AND', 'JOINED', 'US', 'SHOUTING', 'TO', 'PEOPLE', 'IN', 'THE', 'STREET', 'TO', 'GET', 'INSIDE', 'AND', 'FIRING', 'THEIR', 'PISTOLS', 'TO', 'EMPHASIZE', 'THEIR', 'COMMANDS'] +7975-280084-0007-1097: hyp=['CHED', 'WILL', 'WOODS', 'AND', 'JIM', 'RODE', 'UP', 'AND', 'JARNDYCE', 'SHOUTING', 'TO', 'THE', 'PEOPLE', 'IN', 'THE', 'STREET', 'TO', 'GET', 'INSIDE', 'AND', 'FIRING', 'THEIR', 'PISTOLS', 'TO', 'EMPHASIZE', 'THEIR', 'COMMANDS'] +7975-280084-0008-1098: ref=['IF', 'ANY', 'OF', 'OUR', 'PARTY', 'SHOT', 'HIM', 'IT', 'MUST', 'HAVE', 'BEEN', 'WOODS'] +7975-280084-0008-1098: hyp=['IF', 'ANY', 'OF', 'OUR', 'PARTY', 'SHOT', 'HIM', 'IT', 'MUST', 'HAVE', 'BEEN', 'WOODS'] +7975-280084-0009-1099: ref=['MEANTIME', 'THE', 'STREET', 'WAS', 'GETTING', 'UNCOMFORTABLY', 'HOT'] +7975-280084-0009-1099: hyp=['MEANTIME', 'THE', 'STREET', 'WAS', 'GETTING', 'UNCOMFORTABLY', 'HOT'] +7975-280084-0010-1100: ref=['EVERY', 'TIME', 'I', 'SAW', 'ANY', 'ONE', 'WITH', 'A', 'BEAD', 'ON', 'ME', 'I', 'WOULD', 'DROP', 'OFF', 'MY', 'HORSE', 'AND', 'TRY', 'TO', 'DRIVE', 'THE', 'SHOOTER', 'INSIDE', 'BUT', 'I', 'COULD', 'NOT', 'SEE', 'IN', 'EVERY', 'DIRECTION'] +7975-280084-0010-1100: hyp=['EVERY', 'TIME', 'I', 'SAW', 'ANY', 'ONE', 'WITH', 'A', 'BEAD', 'ON', 'ME', 'I', 'WOULD', 'DROP', 'OFF', 'MY', 'HORSE', 'AND', 'TROT', 'TO', 'DRIVE', 'THE', 'SHOOTER', 'INSIDE', 'BUT', 'I', 'COULD', 'NOT', 'SEE', 'IN', 'EVERY', 'DIRECTION'] +7975-280084-0011-1101: ref=['DOCTOR', 'WHEELER', 'WHO', 'HAD', 'GONE', 'UPSTAIRS', 'IN', 'THE', 'HOTEL', 'SHOT', 'MILLER', 'AND', 'HE', 'LAY', 'DYING', 'IN', 'THE', 'STREET'] +7975-280084-0011-1101: hyp=['DOCTOR', 'WHALER', 'WHO', 'HAD', 'GONE', 'UPSTAIRS', 'IN', 'THE', 'HOTEL', 'SHOT', 'MILLER', 'AND', 'HE', 'LAY', 'DYING', 'IN', 'THE', 'STREET'] +7975-280084-0012-1102: ref=['CHANGING', 'HIS', 'PISTOL', 'TO', 'HIS', 'LEFT', 'HAND', 'BOB', 'RAN', 'OUT', 'AND', 'MOUNTED', "MILLER'S", 'MARE'] +7975-280084-0012-1102: hyp=['CHANGING', 'HIS', 'PISTOL', 'TO', 'HIS', 'LEFT', 'HAND', 'BOB', 'RAN', 'OUT', 'AND', 'MOUNTED', "MILLER'S", 'MARE'] +7975-280084-0013-1103: ref=['WHAT', 'KEPT', 'YOU', 'SO', 'LONG', 'I', 'ASKED', 'PITTS'] +7975-280084-0013-1103: hyp=['WHAT', 'KEPT', 'YOU', 'SO', 'LONG', 'I', 'ASKED', 'PITTS'] +7975-280084-0014-1104: ref=['AS', 'TO', 'THE', 'REST', 'OF', 'THE', 'AFFAIR', 'INSIDE', 'THE', 'BANK', 'I', 'TAKE', 'THE', 'ACCOUNT', 'OF', 'A', 'NORTHFIELD', 'NARRATOR'] +7975-280084-0014-1104: hyp=['AS', 'TO', 'THE', 'REST', 'OF', 'THE', 'AFFAIR', 'INSIDE', 'THE', 'BANK', 'I', 'TAKE', 'THE', 'ACCOUNT', 'OF', 'A', 'NORTH', 'FIELD', 'NARRATOR'] +7975-280084-0015-1105: ref=["WHERE'S", 'THE', 'MONEY', 'OUTSIDE', 'THE', 'SAFE', 'BOB', 'ASKED'] +7975-280084-0015-1105: hyp=["WHERE'S", 'THE', 'MONEY', 'OUTSIDE', 'THE', 'SAFE', 'BOB', 'ASKED'] +7975-280084-0016-1106: ref=['THE', 'SHUTTERS', 'WERE', 'CLOSED', 'AND', 'THIS', 'CAUSED', 'BUNKER', 'AN', "INSTANT'S", 'DELAY', 'THAT', 'WAS', 'ALMOST', 'FATAL', 'PITTS', 'CHASED', 'HIM', 'WITH', 'A', 'BULLET'] +7975-280084-0016-1106: hyp=['THE', 'SHUTTERS', 'WERE', 'CLOSED', 'AND', 'THIS', 'CAUSED', 'BUNKER', 'AN', "INSTANT'S", 'DELAY', 'THAT', 'WAS', 'ALMOST', 'FATAL', 'FITZ', 'CHASED', 'HIM', 'WITH', 'A', 'BULLET'] +7975-280084-0017-1107: ref=['THE', 'FIRST', 'ONE', 'MISSED', 'HIM', 'BUT', 'THE', 'SECOND', 'WENT', 'THROUGH', 'HIS', 'RIGHT', 'SHOULDER'] +7975-280084-0017-1107: hyp=['THE', 'FIRST', 'ONE', 'MISTING', 'BUT', 'THE', 'SECOND', 'WENT', 'THROUGH', 'HIS', 'RIGHT', 'SHOULDER'] +7975-280085-0000-1071: ref=['THAT', 'NIGHT', 'IT', 'STARTED', 'TO', 'RAIN', 'AND', 'WE', 'WORE', 'OUT', 'OUR', 'HORSES'] +7975-280085-0000-1071: hyp=['THAT', 'NIGHT', 'IT', 'STARTED', 'TO', 'RAIN', 'AND', 'WE', 'WORE', 'OUT', 'OUR', 'HORSES'] +7975-280085-0001-1072: ref=['FRIDAY', 'WE', 'MOVED', 'TOWARD', 'WATERVILLE', 'AND', 'FRIDAY', 'NIGHT', 'WE', 'CAMPED', 'BETWEEN', 'ELYSIAN', 'AND', 'GERMAN', 'LAKE'] +7975-280085-0001-1072: hyp=['FRIDAY', 'WE', 'MOVED', 'TOWARD', 'WATERVILLE', 'AND', 'FRIDAY', 'NIGHT', 'WE', 'CAMPED', 'BETWEEN', 'THE', 'LUCIEN', 'AND', 'GERMAN', 'LAKE'] +7975-280085-0002-1073: ref=["BOB'S", 'SHATTERED', 'ELBOW', 'WAS', 'REQUIRING', 'FREQUENT', 'ATTENTION', 'AND', 'THAT', 'NIGHT', 'WE', 'MADE', 'ONLY', 'NINE', 'MILES', 'AND', 'MONDAY', 'MONDAY', 'NIGHT', 'AND', 'TUESDAY', 'WE', 'SPENT', 'IN', 'A', 'DESERTED', 'FARM', 'HOUSE', 'CLOSE', 'TO', 'MANKATO'] +7975-280085-0002-1073: hyp=['BOB', 'SATURDAIL', 'BOWS', 'REQUIRING', 'FREQUENT', 'ATTENTION', 'AND', 'THAT', 'NIGHT', 'WE', 'MADE', 'ONLY', 'NINE', 'MILES', 'AND', 'MONDAY', 'MONDAY', 'NIGHT', 'AND', 'TUESDAY', 'WE', 'SPENT', 'IN', 'A', 'DESERTED', 'FARM', 'HOUSE', 'CLOSE', 'TO', 'MANKADO'] +7975-280085-0003-1074: ref=['THAT', 'DAY', 'A', 'MAN', 'NAMED', 'DUNNING', 'DISCOVERED', 'US', 'AND', 'WE', 'TOOK', 'HIM', 'PRISONER'] +7975-280085-0003-1074: hyp=['THAT', 'THEY', 'A', 'MAN', 'NAMED', 'DARNING', 'DISCOVERED', 'US', 'AND', 'WE', 'TOOK', 'HIM', 'PRISONER'] +7975-280085-0004-1075: ref=['FINALLY', 'WE', 'ADMINISTERED', 'TO', 'HIM', 'AN', 'OATH', 'NOT', 'TO', 'BETRAY', 'OUR', 'WHEREABOUTS', 'UNTIL', 'WE', 'HAD', 'TIME', 'TO', 'MAKE', 'OUR', 'ESCAPE', 'AND', 'HE', 'AGREED', 'NOT', 'TO'] +7975-280085-0004-1075: hyp=['FINALLY', 'WE', 'ADMINISTERED', 'TO', 'HIM', 'AN', 'OATH', 'NOT', 'TO', 'BETRAY', 'OUR', 'WHEREABOUTS', 'UNTIL', 'WE', 'HAD', 'TIME', 'TO', 'MAKE', 'OUR', 'ESCAPE', 'AND', 'HE', 'AGREED', 'NOT', 'TO'] +7975-280085-0005-1076: ref=['NO', 'SOONER', 'HOWEVER', 'WAS', 'HE', 'RELEASED', 'THAN', 'HE', 'MADE', 'POSTHASTE', 'INTO', 'MANKATO', 'TO', 'ANNOUNCE', 'OUR', 'PRESENCE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'ANOTHER', 'POSSE', 'WAS', 'LOOKING', 'FOR', 'US'] +7975-280085-0005-1076: hyp=['NO', 'SOONER', 'HOWEVER', 'WAS', 'HE', 'RELEASED', 'THAN', 'HE', 'MADE', 'POST', 'TASTE', 'INTO', 'MANKE', 'TO', 'ANNOUNCE', 'OUR', 'PRESENCE', 'AND', 'IN', 'A', 'FEW', 'MINUTES', 'ANOTHER', 'POSSE', 'WAS', 'LOOKING', 'FOR', 'US'] +7975-280085-0006-1077: ref=['THE', 'WHISTLE', 'ON', 'THE', 'OIL', 'MILL', 'BLEW', 'AND', 'WE', 'FEARED', 'THAT', 'IT', 'WAS', 'A', 'SIGNAL', 'THAT', 'HAD', 'BEEN', 'AGREED', 'UPON', 'TO', 'ALARM', 'THE', 'TOWN', 'IN', 'CASE', 'WE', 'WERE', 'OBSERVED', 'BUT', 'WE', 'WERE', 'NOT', 'MOLESTED'] +7975-280085-0006-1077: hyp=['THE', 'WHISTLE', 'ON', 'THE', 'ORE', 'MILL', 'BLEW', 'AND', 'WE', 'FEARED', 'THAT', 'IT', 'WAS', 'A', 'SIGNAL', 'THAT', 'HAD', 'BEEN', 'AGREED', 'UPON', 'TO', 'ALARM', 'THE', 'TOWN', 'IN', 'CASE', 'WE', 'WERE', 'OBSERVED', 'BUT', 'WE', 'WERE', 'NOT', 'MOLESTED'] +7975-280085-0007-1078: ref=['HE', 'HAD', 'TO', 'SLEEP', 'WITH', 'IT', 'PILLOWED', 'ON', 'MY', 'BREAST', 'JIM', 'BEING', 'ALSO', 'CRIPPLED', 'WITH', 'A', 'WOUND', 'IN', 'HIS', 'SHOULDER', 'AND', 'WE', 'COULD', 'NOT', 'GET', 'MUCH', 'SLEEP'] +7975-280085-0007-1078: hyp=['HE', 'HAD', 'TO', 'SLEEP', 'WITH', 'A', 'PILLOWED', 'ON', 'MY', 'BREAST', 'JIM', 'BEING', 'ALSO', 'A', 'CRIPPLED', 'WITH', 'A', 'WOUND', 'IN', 'HIS', 'SHOULDER', 'AND', 'WE', 'COULD', 'NOT', 'GET', 'MUCH', 'SLEEP'] +7975-280085-0008-1079: ref=['BUT', 'THEY', 'SOON', 'AFTER', 'GOT', 'CLOSE', 'ENOUGH', 'SO', 'THAT', 'ONE', 'OF', 'THEM', 'BROKE', 'MY', 'WALKING', 'STICK', 'WITH', 'A', 'SHOT'] +7975-280085-0008-1079: hyp=['BUT', 'THEY', 'SOON', 'AFTER', 'GOT', 'CLOSE', 'ENOUGH', 'SO', 'THAT', 'ONE', 'OF', 'THEM', 'BROKE', 'MY', 'WALKING', 'STICK', 'WITH', 'A', 'SHOT'] +7975-280085-0009-1080: ref=['WE', 'WERE', 'IN', 'SIGHT', 'OF', 'OUR', 'LONG', 'SOUGHT', 'HORSES', 'WHEN', 'THEY', 'CUT', 'US', 'OFF', 'FROM', 'THE', 'ANIMALS', 'AND', 'OUR', 'LAST', 'HOPE', 'WAS', 'GONE'] +7975-280085-0009-1080: hyp=['WE', 'WERE', 'INSIDE', 'OF', 'OUR', 'LONG', 'SOWED', 'HORSES', 'WHEN', 'THEY', 'CUT', 'US', 'OFF', 'FROM', 'THE', 'ANIMALS', 'AND', 'OUR', 'LAST', 'HOPE', 'WAS', 'GONE'] +7975-280085-0010-1081: ref=['SIX', 'STEPPED', 'TO', 'THE', 'FRONT', 'SHERIFF', 'GLISPIN', 'COLONEL', 'T', 'L', 'VOUGHT', 'B', 'M', 'RICE', 'G', 'A', 'BRADFORD', 'C', 'A', 'POMEROY', 'AND', 'S', 'J', 'SEVERSON'] +7975-280085-0010-1081: hyp=['SIX', 'STEPS', 'TO', 'THE', 'FRONT', 'SHERIFF', 'CLISPIN', 'COLONEL', 'T', 'L', 'WALT', 'B', 'AND', 'RICE', 'G', 'BRADFORD', 'C', 'A', 'POMROY', 'AND', 'S', 'J', 'SEVERSON'] +7975-280085-0011-1082: ref=['FORMING', 'IN', 'LINE', 'FOUR', 'PACES', 'APART', 'HE', 'ORDERED', 'THEM', 'TO', 'ADVANCE', 'RAPIDLY', 'AND', 'CONCENTRATE', 'THE', 'FIRE', 'OF', 'THE', 'WHOLE', 'LINE', 'THE', 'INSTANT', 'THE', 'ROBBERS', 'WERE', 'DISCOVERED'] +7975-280085-0011-1082: hyp=['FORMING', 'A', 'LINE', 'FOUR', 'PACES', 'APART', 'HE', 'ORDERED', 'THEM', 'TO', 'ADVANCE', 'RAPIDLY', 'AND', 'CONCENTRATE', 'THE', 'FIRE', 'OF', 'THE', 'WHOLE', 'LINE', 'THE', 'INSTANT', 'THE', 'ROBBERS', 'WERE', 'DISCOVERED'] +7975-280085-0012-1083: ref=['MAKE', 'FOR', 'THE', 'HORSES', 'I', 'SAID', 'EVERY', 'MAN', 'FOR', 'HIMSELF'] +7975-280085-0012-1083: hyp=['MAKE', 'FOR', 'THE', 'HORSES', 'I', 'SAID', 'EVERY', 'MAN', 'FOR', 'HIMSELF'] +7975-280085-0013-1084: ref=['THERE', 'IS', 'NO', 'USE', 'STOPPING', 'TO', 'PICK', 'UP', 'A', 'COMRADE', 'HERE', 'FOR', 'WE', "CAN'T", 'GET', 'HIM', 'THROUGH', 'THE', 'LINE', 'JUST', 'CHARGE', 'THEM', 'AND', 'MAKE', 'IT', 'IF', 'WE', 'CAN'] +7975-280085-0013-1084: hyp=['THERE', 'IS', 'NO', 'USE', 'STOPPING', 'TO', 'PICK', 'UP', 'A', 'COMRADE', 'HERE', 'TILL', 'WE', "CAN'T", 'GET', 'HIM', 'THROUGH', 'THE', 'LINE', 'JUST', 'SHARS', 'THEM', 'AND', 'MAKE', 'IT', 'IF', 'WE', 'CAN'] +7975-280085-0014-1085: ref=['I', 'GOT', 'UP', 'AS', 'THE', 'SIGNAL', 'FOR', 'THE', 'CHARGE', 'AND', 'WE', 'FIRED', 'ONE', 'VOLLEY'] +7975-280085-0014-1085: hyp=['I', 'GOT', 'UP', 'AS', 'A', 'SIGNAL', 'FOR', 'THE', 'CHARGE', 'AND', 'WE', 'FIRED', 'ONE', 'VOLLEY'] +7975-280085-0015-1086: ref=['ONE', 'OF', 'THE', 'FELLOWS', 'IN', 'THE', 'OUTER', 'LINE', 'NOT', 'BRAVE', 'ENOUGH', 'HIMSELF', 'TO', 'JOIN', 'THE', 'VOLUNTEERS', 'WHO', 'HAD', 'COME', 'IN', 'TO', 'BEAT', 'US', 'OUT', 'WAS', 'NOT', 'DISPOSED', 'TO', 'BELIEVE', 'IN', 'THE', 'SURRENDER', 'AND', 'HAD', 'HIS', 'GUN', 'LEVELLED', 'ON', 'BOB', 'IN', 'SPITE', 'OF', 'THE', 'HANDKERCHIEF', 'WHICH', 'WAS', 'WAVING', 'AS', 'A', 'FLAG', 'OF', 'TRUCE'] +7975-280085-0015-1086: hyp=['ONE', 'OF', 'THE', 'FELLOWS', 'IN', 'THE', 'OUTER', 'LAND', 'NOT', 'BRAVE', 'ENOUGH', 'HIMSELF', 'TO', 'JOIN', 'THE', 'VOLUNTEERS', 'WHO', 'HAD', 'COME', 'IN', 'TO', 'BEAT', 'US', 'OUT', 'WAS', 'NOT', 'DISPOSED', 'TO', 'BELIEVE', 'IN', 'THE', 'SURRENDER', 'AND', 'HAD', 'HIS', 'GUN', 'LEVELLED', 'ON', 'BOB', 'IN', 'SPITE', 'OF', 'THE', 'HANDKERCHIEF', 'WHICH', 'WAS', 'WAVING', 'AS', 'A', 'FLAG', 'OF', 'TRUCE'] +7975-280085-0016-1087: ref=['SHERIFF', 'GLISPIN', 'OF', 'WATONWAN', 'COUNTY', 'WHO', 'WAS', 'TAKING', "BOB'S", 'PISTOL', 'FROM', 'HIM', 'WAS', 'ALSO', 'SHOUTING', 'TO', 'THE', 'FELLOW'] +7975-280085-0016-1087: hyp=['SURE', 'OF', 'GLISPIN', 'OF', 'WATERWAM', 'COUNTY', 'WHO', 'WAS', 'TAKING', "BOB'S", 'PISTOL', 'FROM', 'HIM', 'WAS', 'ALSO', 'SHOUTING', 'TO', 'THE', 'FELLOW'] +7975-280085-0017-1088: ref=['INCLUDING', 'THOSE', 'RECEIVED', 'IN', 'AND', 'ON', 'THE', 'WAY', 'FROM', 'NORTHFIELD', 'I', 'HAD', 'ELEVEN', 'WOUNDS'] +7975-280085-0017-1088: hyp=['INCLUDING', 'THOSE', 'RECEIVED', 'IN', 'AND', 'ON', 'THE', 'WAY', 'FROM', 'NORTH', 'FIELD', 'I', 'HAD', 'ELEVEN', 'ONES'] +7975-280085-0018-1089: ref=['AND', 'SHERIFF', "GLISPIN'S", 'ORDER', 'NOT', 'TO', 'SHOOT', 'WAS', 'THE', 'BEGINNING', 'OF', 'THE', 'PROTECTORATE', 'THAT', 'MINNESOTA', 'PEOPLE', 'ESTABLISHED', 'OVER', 'US'] +7975-280085-0018-1089: hyp=['IN', 'CHEER', 'OF', "GLISPIN'S", 'ORDER', 'NOT', 'TO', 'SHOOT', 'WAS', 'THE', 'BEGINNING', 'OF', 'THE', 'PROTECTOR', 'THAT', 'MINNESOTA', 'PEOPLE', 'ESTABLISHED', 'OVER', 'US'] +8131-117016-0000-1303: ref=['CAPTAIN', 'MURDOCH'] +8131-117016-0000-1303: hyp=['CAPTAIN', 'MURDOCK'] +8131-117016-0001-1304: ref=['BUT', 'MARSPORT', 'HAD', 'FLOURISHED', 'ENOUGH', 'TO', 'KILL', 'IT', 'OFF'] +8131-117016-0001-1304: hyp=['BUT', 'MARSPORT', 'HAD', 'FLOURISHED', 'ENOUGH', 'TO', 'KILL', 'IT', 'OFF'] +8131-117016-0002-1305: ref=['SOME', 'OF', 'MARS', 'LAWS', 'DATED', 'FROM', 'THE', 'TIME', 'WHEN', 'LAW', 'ENFORCEMENT', 'HAD', 'BEEN', 'HAMPERED', 'BY', 'LACK', 'OF', 'MEN', 'RATHER', 'THAN', 'BY', 'THE', 'TYPE', 'OF', 'MEN'] +8131-117016-0002-1305: hyp=['SOME', 'OF', 'MARS', 'LAWS', 'DATED', 'FROM', 'THE', 'TIME', 'WHEN', 'LAWN', 'FORCEMENT', 'HAD', 'BEEN', 'HAMPERED', 'BY', 'LACK', 'OF', 'MEN', 'RATHER', 'THAN', 'BY', 'THE', 'TYPE', 'OF', 'MEN'] +8131-117016-0003-1306: ref=['THE', 'STONEWALL', 'GANG', 'NUMBERED', 'PERHAPS', 'FIVE', 'HUNDRED'] +8131-117016-0003-1306: hyp=['THE', 'STONE', 'WALL', 'GANG', 'NUMBERED', 'PERHAPS', 'FIVE', 'HUNDRED'] +8131-117016-0004-1307: ref=['EVEN', 'DERELICTS', 'AND', 'FAILURES', 'HAD', 'TO', 'EAT', 'THERE', 'WERE', 'STORES', 'AND', 'SHOPS', 'THROUGHOUT', 'THE', 'DISTRICT', 'WHICH', 'EKED', 'OUT', 'SOME', 'KIND', 'OF', 'A', 'MARGINAL', 'LIVING'] +8131-117016-0004-1307: hyp=['EVEN', 'DEAR', 'ALEXAM', 'FAILURES', 'HAD', 'TO', 'EAT', 'THERE', 'WERE', 'STORIES', 'AND', 'SHOPS', 'THROUGHOUT', 'THE', 'DISTRICT', 'WHICH', 'EKED', 'OUT', 'SOME', 'KIND', 'OF', 'A', 'MARGINAL', 'LIVING'] +8131-117016-0005-1308: ref=['THEY', 'WERE', 'SAFE', 'FROM', 'PROTECTION', 'RACKETEERS', 'THERE', 'NONE', 'BOTHERED', 'TO', 'COME', 'SO', 'FAR', 'OUT'] +8131-117016-0005-1308: hyp=['THEY', 'WERE', 'SAFE', 'FROM', 'PROTECTION', 'RAGATIRS', 'THERE', 'NONE', 'BOTHERED', 'TO', 'COME', 'SO', 'FAR', 'OUT'] +8131-117016-0006-1309: ref=['THE', 'SHOPKEEPERS', 'AND', 'SOME', 'OF', 'THE', 'LESS', 'UNFORTUNATE', 'PEOPLE', 'THERE', 'HAD', 'PROTESTED', 'LOUD', 'ENOUGH', 'TO', 'REACH', 'CLEAR', 'BACK', 'TO', 'EARTH'] +8131-117016-0006-1309: hyp=['THE', 'SHOPKEEPERS', 'AND', 'SOME', 'OF', 'THE', 'LESS', 'UNFORTUNATE', 'PEOPLE', 'THERE', 'HAD', 'PROTESTED', 'LOUD', 'ENOUGH', 'TO', 'REACH', 'CLEAR', 'BACK', 'TO', 'EARTH'] +8131-117016-0007-1310: ref=['CAPTAIN', 'MURDOCH', 'WAS', 'AN', 'UNKNOWN', 'FACTOR', 'AND', 'NOW', 'WAS', 'ASKING', 'FOR', 'MORE', 'MEN'] +8131-117016-0007-1310: hyp=['CAPTAIN', 'MURDOCK', 'WAS', 'AN', 'UNKNOWN', 'FACTOR', 'AND', 'NOW', 'WAS', 'ASKING', 'FOR', 'MORE', 'MEN'] +8131-117016-0008-1311: ref=['THE', 'PRESSURE', 'WAS', 'ENOUGH', 'TO', 'GET', 'THEM', 'FOR', 'HIM'] +8131-117016-0008-1311: hyp=['THE', 'PRESSURE', 'WAS', 'ENOUGH', 'TO', 'GET', 'THEM', 'FOR', 'HIM'] +8131-117016-0009-1312: ref=['GORDON', 'REPORTED', 'FOR', 'WORK', 'WITH', 'A', 'SENSE', 'OF', 'THE', 'BOTTOM', 'FALLING', 'OUT', 'MIXED', 'WITH', 'A', 'VAGUE', 'RELIEF'] +8131-117016-0009-1312: hyp=['GORDON', 'REPORTED', 'FOR', 'WORK', 'WITH', 'A', 'SENSE', 'OF', 'THE', 'BOTTOM', 'FALLING', 'OUT', 'MIXED', 'WITH', 'A', 'VAGUE', 'RELIEF'] +8131-117016-0010-1313: ref=["I'VE", 'GOT', 'A', 'FREE', 'HAND', 'AND', "WE'RE", 'GOING', 'TO', 'RUN', 'THIS', 'THE', 'WAY', 'WE', 'WOULD', 'ON', 'EARTH'] +8131-117016-0010-1313: hyp=["I'VE", 'GOT', 'A', 'FREE', 'HAND', 'AND', "WE'RE", 'GOING', 'TO', 'RUN', 'THIS', 'THE', 'WAY', 'WE', 'WOULD', 'ON', 'EARTH'] +8131-117016-0011-1314: ref=['YOUR', 'JOB', 'IS', 'TO', 'PROTECT', 'THE', 'CITIZENS', 'HERE', 'AND', 'THAT', 'MEANS', 'EVERYONE', 'NOT', 'BREAKING', 'THE', 'LAWS', 'WHETHER', 'YOU', 'FEEL', 'LIKE', 'IT', 'OR', 'NOT', 'NO', 'GRAFT'] +8131-117016-0011-1314: hyp=['YOUR', 'JOB', 'IS', 'TO', 'PROTECT', 'THE', 'CITIZENS', 'HERE', 'AND', 'THAT', 'MEANS', 'EVERY', 'ONE', 'NOT', 'BREAKING', 'THE', 'LAWS', 'WHETHER', 'YOU', 'FEEL', 'LIKE', 'IT', 'OR', 'NOT', 'NO', 'GRAFT'] +8131-117016-0012-1315: ref=['THE', 'FIRST', 'MAN', 'MAKING', 'A', 'SHAKEDOWN', 'WILL', 'GET', 'THE', 'SAME', 'TREATMENT', "WE'RE", 'GOING', 'TO', 'USE', 'ON', 'THE', 'STONEWALL', 'BOYS', "YOU'LL", 'GET', 'DOUBLE', 'PAY', 'HERE', 'AND', 'YOU', 'CAN', 'LIVE', 'ON', 'IT'] +8131-117016-0012-1315: hyp=['THE', 'FIRST', 'MAN', 'MAKING', 'A', 'SHAKE', 'DOWN', 'WILL', 'GET', 'THE', 'SAME', 'TREATMENT', "WE'RE", 'GOING', 'TO', 'USE', 'ON', 'THE', 'STONE', 'WALL', 'BOYS', "YOU'LL", 'GET', 'DOUBLE', 'PAY', 'HERE', 'AND', 'YOU', 'CAN', 'LIVE', 'ON', 'IT'] +8131-117016-0013-1316: ref=['HE', 'PICKED', 'OUT', 'FIVE', 'OF', 'THE', 'MEN', 'INCLUDING', 'GORDON', 'YOU', 'FIVE', 'WILL', 'COME', 'WITH', 'ME'] +8131-117016-0013-1316: hyp=['HE', 'PICKED', 'OUT', 'FIVE', 'OF', 'THE', 'MEN', 'INCLUDING', 'GORDON', 'YOU', 'FIVE', 'WILL', 'COME', 'WITH', 'ME'] +8131-117016-0014-1317: ref=['THE', 'REST', 'OF', 'YOU', 'CAN', 'TEAM', 'UP', 'ANY', 'WAY', 'YOU', 'WANT', 'TONIGHT', 'PICK', 'ANY', 'ROUTE', "THAT'S", 'OPEN', 'OKAY', 'MEN', "LET'S", 'GO'] +8131-117016-0014-1317: hyp=['THE', 'REST', 'OF', 'YOU', 'CAN', 'TEEM', 'UP', 'ANY', 'WAY', 'YOU', 'WANT', 'TO', 'NIGHT', 'PICK', 'ANY', 'ROUGH', "THAT'S", 'OPEN', 'OH', 'CAME', 'AND', "LET'S", 'GO'] +8131-117016-0015-1318: ref=['BRUCE', 'GORDON', 'GRINNED', 'SLOWLY', 'AS', 'HE', 'SWUNG', 'THE', 'STICK', 'AND', "MURDOCH'S", 'EYES', 'FELL', 'ON', 'HIM', 'EARTH', 'COP'] +8131-117016-0015-1318: hyp=['BRUSH', 'GORDON', 'GRINNED', 'SLOWLY', 'AS', 'HE', 'SWUNG', 'THE', 'STICK', 'AND', "MARDOC'S", 'EYES', 'FELL', 'ON', 'HIM', 'EARTH', 'COP'] +8131-117016-0016-1319: ref=['TWO', 'YEARS', 'GORDON', 'ADMITTED'] +8131-117016-0016-1319: hyp=['TWO', 'YEARS', 'GORDON', 'ADMITTED'] +8131-117016-0017-1320: ref=['FOR', 'A', 'SECOND', 'GORDON', 'CURSED', 'HIMSELF'] +8131-117016-0017-1320: hyp=['FOR', 'A', 'SECOND', 'GORDON', 'CURSED', 'HIMSELF'] +8131-117016-0018-1321: ref=['HE', 'BEGAN', 'WONDERING', 'ABOUT', 'SECURITY', 'THEN'] +8131-117016-0018-1321: hyp=['HE', 'BEGAN', 'WONDERING', 'ABOUT', 'SECURITY', 'THEN'] +8131-117016-0019-1322: ref=['NOBODY', 'HAD', 'TRIED', 'TO', 'GET', 'IN', 'TOUCH', 'WITH', 'HIM'] +8131-117016-0019-1322: hyp=['NOBODY', 'HAD', 'TRIED', 'TO', 'GET', 'IN', 'TOUCH', 'WITH', 'HIM'] +8131-117016-0020-1323: ref=['THERE', 'WAS', 'A', 'CRUDE', 'LIGHTING', 'SYSTEM', 'HERE', 'PUT', 'UP', 'BY', 'THE', 'CITIZENS', 'AT', 'THE', 'FRONT', 'OF', 'EACH', 'BUILDING', 'A', 'DIM', 'PHOSPHOR', 'BULB', 'GLOWED', 'WHEN', 'DARKNESS', 'FELL', 'THEY', 'WOULD', 'HAVE', 'NOTHING', 'ELSE', 'TO', 'SEE', 'BY'] +8131-117016-0020-1323: hyp=['THERE', 'WAS', 'A', 'CRUDE', 'LIGHTING', 'SYSTEM', 'HERE', 'PUT', 'UP', 'BY', 'THE', 'CITIZENS', 'AT', 'THE', 'FRONT', 'OF', 'EACH', 'BUILDING', 'A', 'DIM', 'PHOSPHER', 'BULB', 'GLOWED', 'WHEN', 'DARKNESS', 'FELL', 'THEY', 'WOULD', 'HAVE', 'NOTHING', 'ELSE', 'TO', 'SEE', 'BY'] +8131-117016-0021-1324: ref=['MOVING', 'IN', 'TWO', 'GROUPS', 'OF', 'THREES', 'AT', 'OPPOSITE', 'SIDES', 'OF', 'THE', 'STREET', 'THEY', 'BEGAN', 'THEIR', 'BEAT'] +8131-117016-0021-1324: hyp=['MOVING', 'IN', 'TWO', 'GROUPS', 'OF', 'THREES', 'IT', 'OPPOSITE', 'SIDES', 'OF', 'THE', 'STREET', 'THEY', 'BEGAN', 'THEIR', 'BEAT'] +8131-117016-0022-1325: ref=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'SAVE', 'THE', 'CITIZEN', 'WHO', 'WAS', 'DYING', 'FROM', 'LACK', 'OF', 'AIR'] +8131-117016-0022-1325: hyp=['THERE', 'WAS', 'NO', 'CHANCE', 'TO', 'SAVE', 'THE', 'CITIZEN', 'WHO', 'WAS', 'DYING', 'FROM', 'LACK', 'OF', 'AIR'] +8131-117016-0023-1326: ref=['GORDON', 'FELT', 'THE', 'SOLID', 'PLEASURE', 'OF', 'THE', 'FINELY', 'TURNED', 'CLUB', 'IN', 'HIS', 'HANDS'] +8131-117016-0023-1326: hyp=['GORDON', 'FELT', 'THE', 'SOLID', 'PLEASURE', 'OF', 'THE', 'FINELY', 'TURNED', 'CLUB', 'IN', 'HIS', 'HANDS'] +8131-117016-0024-1327: ref=["GORDON'S", 'EYES', 'POPPED', 'AT', 'THAT'] +8131-117016-0024-1327: hyp=["GORDON'S", 'EYES', 'POPPED', 'AT', 'THAT'] +8131-117016-0025-1328: ref=['HE', 'SWALLOWED', 'THE', 'SENTIMENT', 'HIS', 'OWN', 'CLUB', 'WAS', 'MOVING', 'NOW'] +8131-117016-0025-1328: hyp=['HE', 'SWALLOWED', 'THE', 'SENTIMENT', 'HIS', 'OWN', 'CLUB', 'WAS', 'MOVING', 'NOW'] +8131-117016-0026-1329: ref=['THE', 'OTHER', 'FOUR', 'COPS', 'HAD', 'COME', 'IN', 'RELUCTANTLY'] +8131-117016-0026-1329: hyp=['THE', 'OTHER', 'FUPS', 'HAD', 'COME', 'IN', 'RELUCTANTLY'] +8131-117016-0027-1330: ref=['HE', 'BROUGHT', 'HIM', 'TO', 'THE', 'GROUND', 'WITH', 'A', 'SINGLE', 'BLOW', 'ACROSS', 'THE', 'KIDNEYS'] +8131-117016-0027-1330: hyp=['HE', 'BROUGHT', 'HIM', 'TO', 'THE', 'GROUND', 'WITH', 'A', 'SINGLE', 'BLOW', 'ACROSS', 'THE', 'KIDNEYS'] +8131-117016-0028-1331: ref=['THEY', 'ROUNDED', 'UP', 'THE', 'MEN', 'OF', 'THE', 'GANG', 'AND', 'ONE', 'OF', 'THE', 'COPS', 'STARTED', 'OFF'] +8131-117016-0028-1331: hyp=['THEY', 'ROUTED', 'UP', 'THE', 'MEN', 'OF', 'THE', 'GANG', 'AND', 'ONE', 'OF', 'THE', 'CUPS', 'STARTED', 'OFF'] +8131-117016-0029-1332: ref=['TO', 'FIND', 'A', 'PHONE', 'AND', 'CALL', 'THE', 'WAGON'] +8131-117016-0029-1332: hyp=['TO', 'FIND', 'A', 'PHONE', 'AND', 'CALL', 'THE', 'WAGON'] +8131-117016-0030-1333: ref=["WE'RE", 'NOT', 'USING', 'WAGONS', 'MURDOCH', 'TOLD', 'HIM', 'LINE', 'THEM', 'UP'] +8131-117016-0030-1333: hyp=["WE'RE", 'NOT', 'USING', 'WAGONS', 'MURDOCK', 'TOLD', 'HIM', 'LYING', 'THEM', 'UP'] +8131-117016-0031-1334: ref=['IF', 'THEY', 'TRIED', 'TO', 'RUN', 'THEY', 'WERE', 'HIT', 'FROM', 'BEHIND', 'IF', 'THEY', 'STOOD', 'STILL', 'THEY', 'WERE', 'CLUBBED', 'CAREFULLY'] +8131-117016-0031-1334: hyp=['IF', 'THEY', 'TRIED', 'TO', 'RUN', 'THEY', 'WERE', 'HIT', 'FROM', 'BEHIND', 'THAT', 'THEY', 'STOOD', 'STILL', 'THEY', 'WERE', 'CLUBBED', 'CAREFULLY'] +8131-117016-0032-1335: ref=['MURDOCH', 'INDICATED', 'ONE', 'WHO', 'STOOD', 'WITH', 'HIS', 'SHOULDERS', 'SHAKING', 'AND', 'TEARS', 'RUNNING', 'DOWN', 'HIS', 'CHEEKS'] +8131-117016-0032-1335: hyp=['MURDOCK', 'INDICATED', 'ONE', 'WHO', 'STOOD', 'WITH', 'HIS', 'SHOULDER', 'SHAKING', 'AND', 'TEARS', 'RUNNING', 'DOWN', 'HIS', 'CHEEKS'] +8131-117016-0033-1336: ref=['THE', "CAPTAIN'S", 'FACE', 'WAS', 'AS', 'SICK', 'AS', 'GORDON', 'FELT'] +8131-117016-0033-1336: hyp=['THE', "CAPTAIN'S", 'FACE', 'WAS', 'AS', 'SICK', 'AS', "GORDON'S", 'FELT'] +8131-117016-0034-1337: ref=['I', 'WANT', 'THE', 'NAME', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'GANG', 'YOU', 'CAN', 'REMEMBER', 'HE', 'TOLD', 'THE', 'MAN'] +8131-117016-0034-1337: hyp=['I', 'WANT', 'THE', 'NAME', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'GANG', 'YOU', 'CAN', 'REMEMBER', 'HE', 'TOLD', 'THE', 'MAN'] +8131-117016-0035-1338: ref=['COLONEL', "THEY'D", 'KILL', 'ME', 'I', "DON'T", 'KNOW'] +8131-117016-0035-1338: hyp=['COLONEL', "THEY'D", 'KILL', 'ME', 'I', "DON'T", 'KNOW'] +8131-117016-0036-1339: ref=['MURDOCH', 'TOOK', 'HIS', 'NOD', 'AS', 'EVIDENCE', 'ENOUGH', 'AND', 'TURNED', 'TO', 'THE', 'WRETCHED', 'TOUGHS'] +8131-117016-0036-1339: hyp=['MURDOCK', 'TOOK', 'HIS', 'NOD', 'AS', 'EVIDENCE', 'ENOUGH', 'AND', 'TURNED', 'TO', 'THE', 'WRETCHED', 'TUFTS'] +8131-117016-0037-1340: ref=['IF', 'HE', 'SHOULD', 'TURN', 'UP', 'DEAD', "I'LL", 'KNOW', 'YOU', 'BOYS', 'ARE', 'RESPONSIBLE', 'AND', "I'LL", 'FIND', 'YOU'] +8131-117016-0037-1340: hyp=['IF', 'HE', 'SHOULD', 'TURN', 'UP', 'DEAD', "I'LL", 'KNOW', 'YOU', 'BOYS', 'ARE', 'RESPONSIBLE', 'AND', "I'LL", 'FIND', 'YOU'] +8131-117016-0038-1341: ref=['TROUBLE', 'BEGAN', 'BREWING', 'SHORTLY', 'AFTER', 'THOUGH'] +8131-117016-0038-1341: hyp=['TROUBLE', 'BEGAN', 'BREWING', 'SHORTLY', 'AFTER', 'THOUGH'] +8131-117016-0039-1342: ref=['MURDOCH', 'SENT', 'ONE', 'OF', 'THE', 'MEN', 'TO', 'PICK', 'UP', 'A', 'SECOND', 'SQUAD', 'OF', 'SIX', 'AND', 'THEN', 'A', 'THIRD'] +8131-117016-0039-1342: hyp=['MARDOX', 'SAT', 'ONE', 'OF', 'THE', 'MEN', 'TO', 'PICK', 'UP', 'A', 'SECOND', 'SQUAD', 'OF', 'SIX', 'AND', 'THEN', 'A', 'THIRD'] +8131-117016-0040-1343: ref=['IN', 'THE', 'THIRD', 'ONE', 'BRUCE', 'GORDON', 'SPOTTED', 'ONE', 'OF', 'THE', 'MEN', "WHO'D", 'BEEN', 'BEATEN', 'BEFORE'] +8131-117016-0040-1343: hyp=['AND', 'THE', 'THIRD', 'ONE', 'BRUCE', 'GORDON', 'SPOTTED', 'ONE', 'OF', 'THE', 'MEN', 'WHO', 'HAD', 'BEEN', 'BEATEN', 'BEFORE'] +8131-117016-0041-1344: ref=['GET', 'A', 'STRETCHER', 'AND', 'TAKE', 'HIM', 'WHEREVER', 'HE', 'BELONGS', 'HE', 'ORDERED'] +8131-117016-0041-1344: hyp=['GET', 'A', 'STRETCHER', 'AND', 'TAKE', 'HIM', 'WHEREVER', 'HE', 'BELONGS', 'HE', 'ORDERED'] +8131-117016-0042-1345: ref=['BUT', 'THE', 'CAPTAIN', 'STIRRED', 'FINALLY', 'SIGHING'] +8131-117016-0042-1345: hyp=['BUT', 'THE', 'CAPTAIN', 'STIRRED', 'FINALLY', 'SIGHING'] +8131-117016-0043-1346: ref=['NO', 'THE', 'COPS', "THEY'RE", 'GIVING', 'ME', "WE'RE", 'COVERED', 'GORDON'] +8131-117016-0043-1346: hyp=['NOW', 'THE', 'CAPS', 'ARE', 'GIVING', 'ME', 'WERE', 'COVERED', 'GORDON'] +8131-117016-0044-1347: ref=['BUT', 'THE', 'STONEWALL', 'GANG', 'IS', 'BACKING', 'WAYNE'] +8131-117016-0044-1347: hyp=['BUT', 'THE', 'STERN', 'WALL', 'GANG', 'IS', 'BACK', 'IN', 'WAIN'] +8131-117016-0045-1348: ref=['BUT', "IT'S", 'GOING', 'TO', 'BE', 'TOUGH', 'ON', 'THEM'] +8131-117016-0045-1348: hyp=['BUT', "IT'S", 'GOING', 'TO', 'BE', 'TOUGH', 'ON', 'THEM'] +8131-117016-0046-1349: ref=['BRUCE', 'GORDON', 'GRIMACED', "I'VE", 'GOT', 'A', 'YELLOW', 'TICKET', 'FROM', 'SECURITY'] +8131-117016-0046-1349: hyp=['BRUCE', 'GORD', 'AND', 'GRIMACED', "I'VE", 'GOT', 'A', 'YELLOW', 'TICKET', 'FROM', 'SECURITY'] +8131-117016-0047-1350: ref=['MURDOCH', 'BLINKED', 'HE', 'DROPPED', 'HIS', 'EYES', 'SLOWLY'] +8131-117016-0047-1350: hyp=['MURDOCK', 'BLINKED', 'HE', 'DROPPED', 'HIS', 'EYES', 'SLOWLY'] +8131-117016-0048-1351: ref=['WHAT', 'MAKES', 'YOU', 'THINK', 'WAYNE', 'WILL', 'BE', 'RE', 'ELECTED'] +8131-117016-0048-1351: hyp=['WHAT', 'MAKES', 'YOU', 'THINK', 'WAIN', 'WILL', 'BE', 'RE', 'ELECTED'] +8131-117016-0049-1352: ref=['NOBODY', 'WANTS', 'HIM', 'EXCEPT', 'A', 'GANG', 'OF', 'CROOKS', 'AND', 'THOSE', 'IN', 'POWER'] +8131-117016-0049-1352: hyp=['NOBODY', 'WANTS', 'HIM', 'EXCEPT', 'A', 'GANG', 'OF', 'COOKS', 'AND', 'THOSE', 'IN', 'POWER'] +8131-117016-0050-1353: ref=['EVER', 'SEE', 'A', 'MARTIAN', 'ELECTION'] +8131-117016-0050-1353: hyp=['EVER', 'SEE', 'A', 'MARTIAN', 'ELECTION'] +8131-117016-0051-1354: ref=['NO', "YOU'RE", 'A', 'FIRSTER', 'HE', "CAN'T", 'LOSE'] +8131-117016-0051-1354: hyp=['NO', 'YOU', 'ARE', 'FIRSTTER', 'HE', "CAN'T", 'LOSE'] +8131-117016-0052-1355: ref=['AND', 'THEN', 'HELL', 'IS', 'GOING', 'TO', 'POP', 'AND', 'THIS', 'WHOLE', 'PLANET', 'MAY', 'BE', 'BLOWN', 'WIDE', 'OPEN'] +8131-117016-0052-1355: hyp=['AND', 'THEN', 'HELL', 'IS', 'GOING', 'TO', 'POP', 'AND', 'THIS', 'WHOLE', 'PLAN', 'IT', 'MAY', 'BE', 'BLOWN', 'WIDE', 'OPEN'] +8131-117016-0053-1356: ref=['IT', 'FITTED', 'WITH', 'THE', 'DIRE', 'PREDICTIONS', 'OF', 'SECURITY', 'AND', 'WITH', 'THE', 'SPYING', 'GORDON', 'WAS', 'GOING', 'TO', 'DO', 'ACCORDING', 'TO', 'THEM'] +8131-117016-0053-1356: hyp=['YET', 'FITTED', 'WITH', 'THE', 'DIA', 'PREDICTIONS', 'OF', 'SECURITY', 'AND', 'WITH', 'A', 'SPYING', 'GORDON', 'WAS', 'GOING', 'TO', 'DO', 'ACCORDING', 'TO', 'THEM'] +8131-117016-0054-1357: ref=['HE', 'WAS', 'GETTING', 'EVEN', 'FATTER', 'NOW', 'THAT', 'HE', 'WAS', 'EATING', 'BETTER', 'FOOD', 'FROM', 'THE', 'FAIR', 'RESTAURANT', 'AROUND', 'THE', 'CORNER'] +8131-117016-0054-1357: hyp=['HE', 'WAS', 'GETTING', 'EVEN', 'FATTER', 'NOW', 'THAT', 'HE', 'WAS', 'EATING', 'BETTER', 'FOOD', 'FROM', 'THE', 'FAIR', 'RESTAURANT', 'AROUND', 'THE', 'CORNER'] +8131-117016-0055-1358: ref=['COST', 'EM', 'MORE', 'BUT', "THEY'D", 'BE', 'RESPECTABLE'] +8131-117016-0055-1358: hyp=['COSTUM', 'MORE', 'BUT', "THEY'D", 'BE', 'RESPECTABLE'] +8131-117016-0056-1359: ref=['BECAUSE', 'IZZY', 'IS', 'ALWAYS', 'HONEST', 'ACCORDING', 'TO', 'HOW', 'HE', 'SEES', 'IT'] +8131-117016-0056-1359: hyp=['BECAUSE', 'IZZIE', 'IS', 'ALWAYS', 'HONEST', 'ACCORDING', 'TO', 'HOW', 'HE', 'SEES', 'IT'] +8131-117016-0057-1360: ref=['BUT', 'YOU', 'GOT', 'EARTH', 'IDEAS', 'OF', 'THE', 'STUFF', 'LIKE', 'I', 'HAD', 'ONCE'] +8131-117016-0057-1360: hyp=['BUT', 'YOU', 'GOT', 'EARTH', 'IDEAS', 'OF', 'THE', 'STUFF', 'LIKE', 'I', 'HAD', 'ONCE'] +8131-117016-0058-1361: ref=['THE', 'GROUPS', 'GREW', 'MORE', 'EXPERIENCED', 'AND', 'MURDOCH', 'WAS', 'TRAINING', 'A', 'NEW', 'SQUAD', 'EVERY', 'NIGHT'] +8131-117016-0058-1361: hyp=['THE', 'GROUPS', 'GREW', 'MORE', 'EXPERIENCED', 'AND', 'MURDOCK', 'WAS', 'TRAINING', 'A', 'NEW', 'SQUAD', 'EVERY', 'NIGHT'] +8131-117016-0059-1362: ref=['IT', "WASN'T", 'EXACTLY', 'LEGAL', 'BUT', 'NOTHING', 'WAS', 'HERE'] +8131-117016-0059-1362: hyp=['IT', "WASN'T", 'EXACTLY', 'LEGAL', 'BUT', 'NOTHING', 'WAS', 'HERE'] +8131-117016-0060-1363: ref=['THIS', 'COULD', 'LEAD', 'TO', 'ABUSES', 'AS', "HE'D", 'SEEN', 'ON', 'EARTH'] +8131-117016-0060-1363: hyp=['THIS', 'COULD', 'LEAD', 'TO', 'ABUSES', 'AS', "HE'D", 'SEEN', 'ON', 'EARTH'] +8131-117016-0061-1364: ref=['BUT', 'THERE', 'PROBABLY', "WOULDN'T", 'BE', 'TIME', 'FOR', 'IT', 'IF', 'MAYOR', 'WAYNE', 'WAS', 'RE', 'ELECTED'] +8131-117016-0061-1364: hyp=['BUT', 'THEIR', 'PROBABLY', "WOULDN'T", 'BE', 'TIME', 'FOR', 'IT', 'IF', 'MAYOR', 'WAIN', 'WAS', 'RE', 'ELECTED'] +8131-117017-0000-1270: ref=['IT', 'WAS', 'NIGHT', 'OUTSIDE', 'AND', 'THE', 'PHOSPHOR', 'BULBS', 'AT', 'THE', 'CORNERS', 'GLOWED', 'DIMLY', 'GIVING', 'HIM', 'BARELY', 'ENOUGH', 'LIGHT', 'BY', 'WHICH', 'TO', 'LOCATE', 'THE', 'WAY', 'TO', 'THE', 'EXTEMPORIZED', 'PRECINCT', 'HOUSE'] +8131-117017-0000-1270: hyp=['IT', 'WAS', 'NIGHT', 'OUTSIDE', 'AND', 'THE', 'PHOSPHOBS', 'AT', 'THE', 'CORNERS', 'GLOWED', 'DIMLY', 'GIVING', 'HIM', 'BARELY', 'ENOUGH', 'LIGHT', 'BY', 'WHICH', 'TO', 'LOCATE', 'THE', 'WAY', 'TO', 'THE', 'EXTEMPORIZED', 'PRECINCT', 'HOUSE'] +8131-117017-0001-1271: ref=['IT', 'HAD', 'PROBABLY', 'BEEN', 'YEARS', 'SINCE', 'ANY', 'HAD', 'DARED', 'RISK', 'IT', 'AFTER', 'THE', 'SUN', 'WENT', 'DOWN'] +8131-117017-0001-1271: hyp=['IT', 'HAD', 'PROBABLY', 'BEEN', 'YEARS', 'SINCE', 'ANY', 'HAD', 'DARED', 'RISK', 'IT', 'AFTER', 'THE', 'SUN', 'WENT', 'DOWN'] +8131-117017-0002-1272: ref=['AND', 'THE', 'SLOW', 'DOUBTFUL', 'RESPECT', 'ON', 'THE', 'FACES', 'OF', 'THE', 'CITIZENS', 'AS', 'THEY', 'NODDED', 'TO', 'HIM', 'WAS', 'EVEN', 'MORE', 'PROOF', 'THAT', "HALEY'S", 'SYSTEM', 'WAS', 'WORKING'] +8131-117017-0002-1272: hyp=['AND', 'THE', 'SLOW', 'DOUBTFUL', 'RESPECT', 'ON', 'THE', 'FACES', 'OF', 'THE', 'CITIZENS', 'AS', 'THEY', 'NODDED', 'TO', 'HIM', 'WAS', 'EVEN', 'MORE', 'PROOF', 'THAT', 'HALELY', 'SYSTEM', 'WAS', 'WORKING'] +8131-117017-0003-1273: ref=['GORDON', 'HIT', 'THE', 'SIGNAL', 'SWITCH', 'AND', 'THE', 'MARSPEAKER', 'LET', 'OUT', 'A', 'SHRILL', 'WHISTLE'] +8131-117017-0003-1273: hyp=['GORDON', 'HIT', 'THE', 'SIGNAL', 'SWITCH', 'AND', 'THE', 'MARSH', 'SPEAKER', 'LET', 'OUT', 'A', 'SHRILL', 'WHISTLE'] +8131-117017-0004-1274: ref=['GUNS', 'SUDDENLY', 'SEEMED', 'TO', 'BE', 'FLOURISHING', 'EVERYWHERE'] +8131-117017-0004-1274: hyp=['GUN', 'SUDDENLY', 'SEEMED', 'TO', 'BE', 'FLOURISHING', 'EVERYWHERE'] +8131-117017-0005-1275: ref=['YOU', "CAN'T", 'DO', 'IT', 'TO', 'ME'] +8131-117017-0005-1275: hyp=['YOU', "CAN'T", 'DO', 'IT', 'TO', 'ME'] +8131-117017-0006-1276: ref=["I'M", 'REFORMED', "I'M", 'GOING', 'STRAIGHT'] +8131-117017-0006-1276: hyp=['I', 'AM', 'REFORMED', "I'M", 'GOING', 'STRAIGHT'] +8131-117017-0007-1277: ref=['YOU', 'DAMNED', 'COPS', "CAN'T", "O'NEILL", 'WAS', 'BLUBBERING'] +8131-117017-0007-1277: hyp=['YOU', 'DAMNED', 'COPSE', "CAN'T", "O'NEIA", 'WAS', 'BLUBBERING'] +8131-117017-0008-1278: ref=['ONE', 'LOOK', 'WAS', 'ENOUGH', 'THE', 'WORK', 'PAPERS', 'HAD', 'THE', 'TELLTALE', 'OVER', 'THICKENING', 'OF', 'THE', 'SIGNATURE', 'THAT', 'HAD', 'SHOWED', 'UP', 'ON', 'OTHER', 'PAPERS', 'OBVIOUSLY', 'FORGERIES'] +8131-117017-0008-1278: hyp=['ONE', 'LOOK', 'WAS', 'ENOUGH', 'THE', 'WORK', 'PAPERS', 'HAD', 'THE', 'TELL', 'TALE', 'OVER', 'THICKENING', 'OF', 'THE', 'SIGNATURE', 'THEY', 'HAD', 'SHOWED', 'UP', 'ON', 'OTHER', 'PAPERS', 'OBVIOUSLY', 'FORGERIES'] +8131-117017-0009-1279: ref=['SOME', 'TURNED', 'AWAY', 'AS', 'GORDON', 'AND', 'THE', 'OTHER', 'COP', 'WENT', 'TO', 'WORK', 'BUT', 'MOST', 'OF', 'THEM', "WEREN'T", 'SQUEAMISH'] +8131-117017-0009-1279: hyp=['SOME', 'TURNED', 'AWAY', 'AS', 'GORDON', 'AND', 'THE', 'OTHER', 'COPP', 'WENT', 'TO', 'WORK', 'BUT', 'MOST', 'OF', 'THEM', "WEREN'T", 'SQUEAMISH'] +8131-117017-0010-1280: ref=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'TWO', 'PICKED', 'UP', 'THEIR', 'WHIMPERING', 'CAPTIVE'] +8131-117017-0010-1280: hyp=['WHEN', 'IT', 'WAS', 'OVER', 'THE', 'TWO', 'PICKED', 'UP', 'THEIR', 'WHIMPERING', 'CAPTIVE'] +8131-117017-0011-1281: ref=['JENKINS', 'THE', 'OTHER', 'COP', 'HAD', 'BEEN', 'HOLDING', 'THE', 'WALLET'] +8131-117017-0011-1281: hyp=['JENKINS', 'THE', 'OTHER', 'COP', 'HAD', 'BEEN', 'HOLDING', 'THE', 'WALLET'] +8131-117017-0012-1282: ref=['MUST', 'OF', 'BEEN', 'MAKING', 'A', 'BIG', 'CONTACT', 'IN', 'SOMETHING', 'FIFTY', 'FIFTY'] +8131-117017-0012-1282: hyp=['MUST', 'HAVE', 'BEEN', 'MAKING', 'A', 'BIG', 'CONTACT', 'IN', 'SOMETHING', 'FIFTY', 'FIFTY'] +8131-117017-0013-1283: ref=['THERE', 'MUST', 'HAVE', 'BEEN', 'OVER', 'TWO', 'THOUSAND', 'CREDITS', 'IN', 'THE', 'WALLET'] +8131-117017-0013-1283: hyp=['THERE', 'MUST', 'HAVE', 'BEEN', 'OVER', 'TWO', 'THOUSAND', 'CREDITS', 'IN', 'THE', 'WALLET'] +8131-117017-0014-1284: ref=['WHEN', 'GORDON', 'AND', 'JENKINS', 'CAME', 'BACK', 'MURDOCH', 'TOSSED', 'THE', 'MONEY', 'TO', 'THEM', 'SPLIT', 'IT'] +8131-117017-0014-1284: hyp=['WHEN', 'GORDON', 'AND', 'JENKINS', 'CAME', 'BACK', 'MERDOCK', 'TOSSED', 'THE', 'MONEY', 'TO', 'THEM', 'SPLIT', 'IT'] +8131-117017-0015-1285: ref=['WHATEVER', 'COMES', 'TO', 'HAND', "GOV'NOR"] +8131-117017-0015-1285: hyp=['WHATEVER', 'COMES', 'TO', 'HAND', 'GOVERNOR'] +8131-117017-0016-1286: ref=['LIKE', 'THIS', 'SOCIAL', 'CALL', 'GORDON', 'ASKED', 'HIM'] +8131-117017-0016-1286: hyp=['LIKE', 'THIS', 'SOCIAL', 'CALL', 'GORDON', 'ASKED', 'HIM'] +8131-117017-0017-1287: ref=['THE', 'LITTLE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'HIS', 'ANCIENT', 'EIGHTEEN', 'YEAR', 'OLD', 'FACE', 'TURNING', 'SOBER', 'NOPE'] +8131-117017-0017-1287: hyp=['THE', 'LITTLE', 'MAN', 'SHOOK', 'HIS', 'HEAD', 'HIS', 'ANCIENT', 'EIGHTEEN', 'YEAR', 'OLD', 'FACE', 'TURNING', 'SOBER', 'NOTE'] +8131-117017-0018-1288: ref=['YOU', 'OWE', 'ME', 'SOME', 'BILLS', "GOV'NOR"] +8131-117017-0018-1288: hyp=['YOU', 'ARE', 'ME', 'SOME', 'BILLS', "GUV'NER"] +8131-117017-0019-1289: ref=['ELEVEN', 'HUNDRED', 'FIFTY', 'CREDITS'] +8131-117017-0019-1289: hyp=['ELEVEN', 'HUNDRED', 'FIFTY', 'CREDITS'] +8131-117017-0020-1290: ref=['YOU', "DIDN'T", 'PAY', 'UP', 'YOUR', 'PLEDGE', 'TO', 'THE', 'CAMPAIGN', 'FUND', 'SO', 'I', 'HADDA', 'FILL', 'IN'] +8131-117017-0020-1290: hyp=['YOU', "DIDN'T", 'PAY', 'UP', 'YOUR', 'PLEDGE', 'TO', 'THE', 'CAPTAIN', 'FUND', 'SO', 'I', 'HAD', 'A', 'FILL', 'IN'] +8131-117017-0021-1291: ref=['A', 'THOUSAND', 'INTEREST', 'AT', 'TEN', 'PER', 'CENT', 'A', 'WEEK', 'STANDARD', 'RIGHT'] +8131-117017-0021-1291: hyp=['A', 'THOUSAND', 'INTERESTS', 'AT', 'TEN', 'PER', 'CENT', 'A', 'WEEK', 'STANDARD', 'RIGHT'] +8131-117017-0022-1292: ref=['GORDON', 'HAD', 'HEARD', 'OF', 'THE', 'FRIENDLY', 'INTEREST', 'CHARGED', 'ON', 'THE', 'SIDE', 'HERE', 'BUT', 'HE', 'SHOOK', 'HIS', 'HEAD', 'WRONG', 'IZZY'] +8131-117017-0022-1292: hyp=['GORDON', 'HAD', 'HEARD', 'OF', 'THE', 'FRIENDLY', 'INTEREST', 'CHARGED', 'ON', 'THE', 'SIDE', 'HERE', 'BUT', 'HE', 'SHOOK', 'HIS', 'HEAD', 'WRONG', 'IS', 'HE'] +8131-117017-0023-1293: ref=['HUH', 'IZZY', 'TURNED', 'IT', 'OVER', 'AND', 'SHOOK', 'HIS', 'HEAD'] +8131-117017-0023-1293: hyp=['HOW', 'AS', 'HE', 'TURNED', 'IT', 'OVER', 'AND', 'SHOOK', 'HIS', 'HEAD'] +8131-117017-0024-1294: ref=['NOW', 'SHOW', 'ME', 'WHERE', 'I', 'SIGNED', 'ANY', 'AGREEMENT', 'SAYING', "I'D", 'PAY', 'YOU', 'BACK'] +8131-117017-0024-1294: hyp=['NOW', 'SHOW', 'ME', 'WHERE', 'I', 'SIGNED', 'ANY', 'AGREEMENT', 'SAYING', "I'D", 'PAY', 'YOU', 'BACK'] +8131-117017-0025-1295: ref=['FOR', 'A', 'SECOND', "IZZY'S", 'FACE', 'WENT', 'BLANK', 'THEN', 'HE', 'CHUCKLED'] +8131-117017-0025-1295: hyp=['FOR', 'A', 'SECOND', "IZZIE'S", 'FACE', 'WENT', 'BLANK', 'THEN', 'HE', 'CHUCKLED'] +8131-117017-0026-1296: ref=['HE', 'PULLED', 'OUT', 'THE', 'BILLS', 'AND', 'HANDED', 'THEM', 'OVER'] +8131-117017-0026-1296: hyp=['HE', 'POURED', 'OUT', 'THE', 'BILLS', 'AND', 'HANDED', 'THEM', 'OVER'] +8131-117017-0027-1297: ref=['THANKS', 'IZZY', 'THANKS', 'YOURSELF'] +8131-117017-0027-1297: hyp=['THANKS', 'IS', 'HE', 'THANKS', 'YOURSELF'] +8131-117017-0028-1298: ref=['THE', 'KID', 'POCKETED', 'THE', 'MONEY', 'CHEERFULLY', 'NODDING'] +8131-117017-0028-1298: hyp=['THE', 'KID', 'POCKETED', 'THE', 'MONEY', 'CHEERFULLY', 'NODDING'] +8131-117017-0029-1299: ref=['THE', 'LITTLE', 'GUY', 'KNEW', 'MARS', 'AS', 'FEW', 'OTHERS', 'DID', 'APPARENTLY', 'FROM', 'ALL', 'SIDES'] +8131-117017-0029-1299: hyp=['THE', 'LITTLE', 'GUY', 'KNEW', 'MARS', 'AS', 'FEW', 'OTHERS', 'DID', 'APPARENTLY', 'FROM', 'ALL', 'SIDES'] +8131-117017-0030-1300: ref=['AND', 'IF', 'ANY', 'OF', 'THE', 'OTHER', 'COPS', 'HAD', 'PRIVATE', 'RACKETS', 'OF', 'THEIR', 'OWN', 'IZZY', 'WAS', 'UNDOUBTEDLY', 'THE', 'MAN', 'TO', 'FIND', 'IT', 'OUT', 'AND', 'USE', 'THE', 'INFORMATION', 'WITH', 'A', 'BEAT', 'SUCH', 'AS', 'THAT', 'EVEN', 'GOING', 'HALVES', 'AND', 'WITH', 'ALL', 'THE', 'GRAFT', 'TO', 'THE', 'UPPER', 'BRACKETS', "HE'D", 'STILL', 'BE', 'ABLE', 'TO', 'MAKE', 'HIS', 'PILE', 'IN', 'A', 'MATTER', 'OF', 'MONTHS'] +8131-117017-0030-1300: hyp=['AND', 'IF', 'ANY', 'OF', 'THE', 'OTHER', 'CUPS', 'HAD', 'PRIVATE', 'RACKETS', 'OF', 'THEIR', 'OWN', 'IS', 'HE', 'WAS', 'UNDOUBTEDLY', 'THE', 'MAN', 'TO', 'FIND', 'IT', 'OUT', 'AND', 'USED', 'THE', 'INFORMATION', 'WITH', 'A', 'BEAT', 'SUCH', 'AS', 'THAT', 'EVEN', 'GOING', 'HALVES', 'AND', 'WITH', 'ALL', 'THE', 'GRAFT', 'AT', 'THE', 'UPPER', 'BRACKETS', "HE'D", 'STILL', 'BE', 'ABLE', 'TO', 'MAKE', 'HIS', 'PILE', 'IN', 'A', 'MATTER', 'OF', 'MONTHS'] +8131-117017-0031-1301: ref=['THE', 'CAPTAIN', 'LOOKED', 'COMPLETELY', 'BEATEN', 'AS', 'HE', 'CAME', 'INTO', 'THE', 'ROOM', 'AND', 'DROPPED', 'ONTO', 'THE', 'BENCH'] +8131-117017-0031-1301: hyp=['THE', 'CAPTAIN', 'LOOKED', 'COMPLETELY', 'BEATEN', 'AS', 'HE', 'CAME', 'INTO', 'THE', 'ROOM', 'AND', 'DROPPED', 'INTO', 'THE', 'BENCH'] +8131-117017-0032-1302: ref=['GO', 'ON', 'ACCEPT', 'DAMN', 'IT'] +8131-117017-0032-1302: hyp=['GO', 'ON', 'EXCEPT', 'DEAR', 'MIN'] +8131-117029-0000-1247: ref=['THERE', 'WAS', 'A', 'MAN', 'COMING', 'FROM', 'EARTH', 'ON', 'A', 'SECOND', 'SHIP', 'WHO', 'WOULD', 'SEE', 'HIM'] +8131-117029-0000-1247: hyp=['THERE', 'WAS', 'A', 'MAN', 'COMING', 'FROM', 'EARTH', 'ON', 'A', 'SECOND', 'SHIP', 'WHO', 'WOULD', 'SEE', 'HIM'] +8131-117029-0001-1248: ref=['THE', 'LITTLE', 'PUBLISHER', 'WAS', 'BACK', 'AT', 'THE', 'CRUSADER', 'AGAIN'] +8131-117029-0001-1248: hyp=['THE', 'LITTLE', 'PUBLISHER', 'WAS', 'BACK', 'AT', 'THE', 'CRUSADER', 'AGAIN'] +8131-117029-0002-1249: ref=['ONLY', 'GORDON', 'AND', 'SHEILA', 'WERE', 'LEFT'] +8131-117029-0002-1249: hyp=['ONLY', 'GORDON', 'AND', 'SHEILA', 'WERE', 'LEFT'] +8131-117029-0003-1250: ref=['CREDIT', 'HAD', 'BEEN', 'ESTABLISHED', 'AGAIN', 'AND', 'THE', 'BUSINESSES', 'WERE', 'OPEN'] +8131-117029-0003-1250: hyp=['CREDIT', 'HAD', 'BEEN', 'ESTABLISHED', 'AGAIN', 'AND', 'THE', 'BUSINESSES', 'WERE', 'OPEN'] +8131-117029-0004-1251: ref=['GORDON', 'CAME', 'TO', 'A', 'ROW', 'OF', 'TEMPORARY', 'BUBBLES', 'INDIVIDUAL', 'DWELLINGS', 'BUILT', 'LIKE', 'THE', 'DOME', 'BUT', 'OPAQUE', 'FOR', 'PRIVACY'] +8131-117029-0004-1251: hyp=['GORDON', 'CAME', 'TO', 'A', 'ROW', 'OF', 'TEMPORARY', 'BUBBLES', 'INDIVIDUAL', 'DWELLINGS', 'BUILT', 'LIKE', 'THE', 'DOME', 'BUT', 'OPAQUE', 'FOR', 'PRIVACY'] +8131-117029-0005-1252: ref=['THEY', 'HAD', 'BEEN', 'LUCKY'] +8131-117029-0005-1252: hyp=['THEY', 'HAD', 'BEEN', 'LUCKY'] +8131-117029-0006-1253: ref=["SCHULBERG'S", 'VOLUNTEERS', 'WERE', 'OFFICIAL', 'NOW'] +8131-117029-0006-1253: hyp=["SHOALBURG'S", 'VOLUNTEERS', 'WERE', 'OFFICIAL', 'NOW'] +8131-117029-0007-1254: ref=['FATS', 'PLACE', 'WAS', 'STILL', 'OPEN', 'THOUGH', 'THE', 'CROOKED', 'TABLES', 'HAD', 'BEEN', 'REMOVED', 'GORDON', 'DROPPED', 'TO', 'A', 'STOOL', 'SLIPPING', 'OFF', 'HIS', 'HELMET'] +8131-117029-0007-1254: hyp=["FAT'S", 'PLACE', 'WAS', 'STILL', 'OPEN', 'THOUGH', 'THE', 'CROOKED', 'TABLES', 'HAD', 'BEEN', 'REMOVED', 'GORDON', 'DROPPED', 'TO', 'A', 'STOOL', 'SLIPPING', 'OFF', 'HIS', 'HELMET'] +8131-117029-0008-1255: ref=['HE', 'REACHED', 'AUTOMATICALLY', 'FOR', 'THE', 'GLASS', 'OF', 'ETHER', 'NEEDLED', 'BEER'] +8131-117029-0008-1255: hyp=['HE', 'REACHED', 'AUTOMATICALLY', 'FOR', 'THE', 'GLASS', 'OF', 'ETHER', 'NEEDLE', 'BEER'] +8131-117029-0009-1256: ref=['THOUGHT', "YOU'D", 'BE', 'IN', 'THE', 'CHIPS'] +8131-117029-0009-1256: hyp=['THOUGHT', "YE'D", 'BE', 'IN', 'THE', 'CHIPS'] +8131-117029-0010-1257: ref=["THAT'S", 'MARS', 'GORDON', 'ECHOED', 'THE', "OTHER'S", 'COMMENT', 'WHY', "DON'T", 'YOU', 'PULL', 'OFF', 'THE', 'PLANET', 'FATS', 'YOU', 'COULD', 'GO', 'BACK', 'TO', 'EARTH', "I'D", 'GUESS', 'THE', 'OTHER', 'NODDED'] +8131-117029-0010-1257: hyp=["THAT'S", 'MARS', 'GORDON', 'ECHOED', 'THE', 'OTHERS', 'COMMENTS', 'WHY', "DON'T", 'YOU', 'PULL', 'OFF', 'THE', 'PLANET', 'FATS', 'YOU', 'COULD', 'GO', 'BACK', 'TO', 'EARTH', "I'D", 'GUESS', 'THE', 'OTHER', 'NODDED'] +8131-117029-0011-1258: ref=['GUESS', 'A', 'MAN', 'GETS', 'USED', 'TO', 'ANYTHING', 'HELL', 'MAYBE', 'I', 'CAN', 'HIRE', 'SOME', 'BUMS', 'TO', 'SIT', 'AROUND', 'AND', 'WHOOP', 'IT', 'UP', 'WHEN', 'THE', 'SHIPS', 'COME', 'IN', 'AND', 'BILL', 'THIS', 'AS', 'A', 'REAL', 'OLD', 'MARTIAN', 'DEN', 'OF', 'SIN'] +8131-117029-0011-1258: hyp=['GES', 'A', 'MAN', 'GETS', 'USED', 'TO', 'ANYTHING', 'HELL', 'MAYBE', 'I', 'CAN', 'HIRE', 'SOME', 'BUMS', 'TO', 'SIT', 'AROUND', 'AND', 'WHOOP', 'IT', 'UP', 'WHEN', 'THE', 'SHIPS', 'COME', 'IN', 'AND', 'BUILD', 'THIS', 'IS', 'A', 'REAL', 'OLD', 'MARTIAN', 'DEN', 'OF', 'SIN'] +8131-117029-0012-1259: ref=['THERE', 'WAS', 'A', 'GRIN', 'ON', 'THE', "OTHER'S", 'FACE'] +8131-117029-0012-1259: hyp=['THERE', 'WAS', 'A', 'GRIN', 'ON', 'THE', "OTHER'S", 'FACE'] +8131-117029-0013-1260: ref=['FINALLY', 'GOT', 'OUR', 'ORDERS', 'FOR', 'YOU', "IT'S", 'MERCURY'] +8131-117029-0013-1260: hyp=['FINALLY', 'GOT', 'OUR', 'ORDERS', 'FOR', 'YOU', "IT'S", 'MERCURY'] +8131-117029-0014-1261: ref=['WE', 'SENT', 'TWENTY', 'OTHERS', 'THE', 'SAME', 'WAY', 'AND', 'THEY', 'FAILED'] +8131-117029-0014-1261: hyp=['WE', 'SENT', 'TWENTY', 'OTHERS', 'THE', 'SAME', 'WAY', 'AND', 'THEY', 'FAILED'] +8131-117029-0015-1262: ref=["LET'S", 'SAY', "YOU'VE", 'SHIFTED', 'SOME', 'OF', 'THE', 'MISERY', 'AROUND', 'A', 'BIT', 'AND', 'GIVEN', 'THEM', 'A', 'CHANCE', 'TO', 'DO', 'BETTER'] +8131-117029-0015-1262: hyp=["LET'S", 'SAVE', 'SHIFTED', 'SOME', 'OF', 'THE', 'MISERY', 'AROUND', 'A', 'BIT', 'AND', 'GIVEN', 'THEM', 'A', 'CHANCE', 'TO', 'DO', 'BETTER'] +8131-117029-0016-1263: ref=['YOU', "CAN'T", 'STAY', 'HERE'] +8131-117029-0016-1263: hyp=['YOU', "CAN'T", 'STAY', 'HERE'] +8131-117029-0017-1264: ref=["THERE'S", 'A', 'ROCKET', 'WAITING', 'TO', 'TRANSSHIP', 'YOU', 'TO', 'THE', 'MOON', 'ON', 'THE', 'WAY', 'TO', 'MERCURY', 'RIGHT', 'NOW', 'GORDON', 'SIGHED'] +8131-117029-0017-1264: hyp=["THERE'S", 'A', 'ROCKET', 'WAITING', 'TO', 'TRANSHIP', 'YOU', 'TO', 'THE', 'MOON', 'ON', 'THE', 'WAY', 'TO', 'MERCURY', 'RIGHT', 'NOW', 'GORDON', 'SIGHED'] +8131-117029-0018-1265: ref=['AND', "I'VE", 'PAID', 'HER', 'THE', 'PAY', 'WE', 'OWE', 'YOU', 'FROM', 'THE', 'TIME', 'YOU', 'BEGAN', 'USING', 'YOUR', 'BADGE', "SHE'S", 'OUT', 'SHOPPING'] +8131-117029-0018-1265: hyp=['AND', 'I', 'PAID', 'HER', 'THE', 'PAY', 'WE', 'OWE', 'YOU', 'FROM', 'THE', 'TIME', 'YOU', 'BEGIN', 'USING', 'YOUR', 'BADGE', "SHE'S", 'OUT', 'SHOPPING'] +8131-117029-0019-1266: ref=['BUT', 'HIS', 'OLD', 'EYES', 'WERE', 'GLINTING'] +8131-117029-0019-1266: hyp=['BUT', 'HIS', 'OLD', 'EYES', 'WERE', 'GLINTING'] +8131-117029-0020-1267: ref=['DID', 'YOU', 'THINK', "WE'D", 'LET', 'YOU', 'GO', 'WITHOUT', 'SEEING', 'YOU', 'OFF', 'COBBER', 'HE', 'ASKED'] +8131-117029-0020-1267: hyp=['DID', 'YOU', 'THINK', "WE'D", 'LET', 'YOU', 'GO', 'WITHOUT', 'SEEING', 'YOU', 'OFF', 'COPPER', 'HE', 'ASKED'] +8131-117029-0021-1268: ref=['I', 'I', 'OH', 'DRAT', 'IT', "I'M", 'GETTING', 'OLD', 'IZZY', 'YOU', 'TELL', 'HIM'] +8131-117029-0021-1268: hyp=['I', 'I', 'OH', 'DRAT', 'IT', "I'M", 'GETTING', 'OLD', 'IS', 'HE', 'YOU', 'TELL', 'HIM'] +8131-117029-0022-1269: ref=['HE', 'GRABBED', "GORDON'S", 'HAND', 'AND', 'WADDLED', 'DOWN', 'THE', 'LANDING', 'PLANK', 'IZZY', 'SHOOK', 'HIS', 'HEAD'] +8131-117029-0022-1269: hyp=['HE', 'GRABBED', "GORDON'S", 'HAND', 'AND', 'WADDLED', 'DOWN', 'THE', 'LANDING', 'PLANK', 'IZZIE', 'SHOOK', 'HIS', 'HEAD'] +8188-269288-0000-2881: ref=['ANNIE', 'COLCHESTER', 'HAD', 'BEGUN', 'TO', 'MAKE', 'FRIENDS', 'WITH', 'LESLIE'] +8188-269288-0000-2881: hyp=['ANY', 'COLCHESTER', 'HAD', 'BEGUN', 'TO', 'MAKE', 'FRIENDS', 'WITH', 'LISLEY'] +8188-269288-0001-2882: ref=['LESLIE', 'DETERMINED', 'TO', 'TRY', 'FOR', 'HONORS', 'IN', 'ENGLISH', 'LANGUAGE', 'AND', 'LITERATURE'] +8188-269288-0001-2882: hyp=['LESLIE', 'DETERMINED', 'TO', 'TRIFLE', 'HONORS', 'IN', 'ENGLISH', 'LANGUAGE', 'AND', 'LITERATURE'] +8188-269288-0002-2883: ref=['HER', 'TASTES', 'ALL', 'LAY', 'IN', 'THIS', 'DIRECTION', 'HER', 'IDEA', 'BEING', 'BY', 'AND', 'BY', 'TO', 'FOLLOW', 'HER', "MOTHER'S", 'PROFESSION', 'OF', 'JOURNALISM', 'FOR', 'WHICH', 'SHE', 'ALREADY', 'SHOWED', 'CONSIDERABLE', 'APTITUDE'] +8188-269288-0002-2883: hyp=['HER', 'TASTES', 'ALL', 'LAY', 'IN', 'THIS', 'DIRECTION', 'HER', 'IDEA', 'BEING', 'BY', 'AND', 'BY', 'TO', 'FOLLOW', 'HER', "MOTHER'S", 'PROFESSION', 'OF', 'JOURNALISM', 'FOR', 'WHICH', 'SHE', 'ALREADY', 'SHOWED', 'CONSIDERABLE', 'APTITUDE'] +8188-269288-0003-2884: ref=['SHE', 'HAD', 'NO', 'IDEA', 'OF', 'ALLOWING', 'HERSELF', 'TO', 'BREAK', 'DOWN'] +8188-269288-0003-2884: hyp=['SHE', 'HAD', 'NO', 'IDEA', 'OF', 'ALLOWING', 'HERSELF', 'TO', 'BREAK', 'DOWN'] +8188-269288-0004-2885: ref=['WHAT', 'DO', 'YOU', 'MEAN', 'REPLIED', 'LESLIE'] +8188-269288-0004-2885: hyp=['WHAT', 'DO', 'YOU', 'MEAN', 'REPLIED', 'LESLIE'] +8188-269288-0005-2886: ref=['WHY', 'YOU', 'WILL', 'BE', 'PARTING', 'FROM', 'ME', 'YOU', 'KNOW'] +8188-269288-0005-2886: hyp=['WHAT', 'YOU', 'WILL', 'BE', 'PARTING', 'FROM', 'ME', 'YOU', 'KNOW'] +8188-269288-0006-2887: ref=['I', "WON'T", 'BE', 'THE', 'CONSTANT', 'WORRY', 'AND', 'PLAGUE', 'OF', 'YOUR', 'LIFE'] +8188-269288-0006-2887: hyp=['I', 'WOULD', 'BE', 'THE', 'CONSTANT', 'WORRY', 'IN', 'PLAGUE', 'OF', 'YOUR', 'LIFE'] +8188-269288-0007-2888: ref=['IT', 'IS', 'THIS', 'IF', 'BY', 'ANY', 'CHANCE', 'YOU', "DON'T", 'LEAVE', 'SAINT', "WODE'S", 'ANNIE', 'I', 'HOPE', 'YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'BE', 'YOUR', 'ROOMFELLOW', 'AGAIN', 'NEXT', 'TERM'] +8188-269288-0007-2888: hyp=['IT', 'IS', 'THIS', 'IF', 'BY', 'ANY', 'CHANCE', 'YOU', "DON'T", 'LEAVE', 'SAINT', 'WORDS', 'ANNIE', 'I', 'HOPE', 'YOU', 'WILL', 'ALLOW', 'ME', 'TO', 'BE', 'YOUR', 'ROOM', 'FELLOW', 'AGAIN', 'NEXT', 'TERM'] +8188-269288-0008-2889: ref=['SAID', 'ANNIE', 'A', 'FLASH', 'OF', 'LIGHT', 'COMING', 'INTO', 'HER', 'EYES', 'AND', 'THEN', 'LEAVING', 'THEM'] +8188-269288-0008-2889: hyp=['SAID', 'ANNIE', 'A', 'FLASH', 'OF', 'LIGHT', 'COMING', 'INTO', 'HER', 'EYES', 'AND', 'THEN', 'LEAVING', 'THEM'] +8188-269288-0009-2890: ref=['BUT', 'SHE', 'ADDED', 'ABRUPTLY', 'YOU', 'SPEAK', 'OF', 'SOMETHING', 'WHICH', 'MUST', 'NOT', 'TAKE', 'PLACE'] +8188-269288-0009-2890: hyp=['BUT', 'SHE', 'ADDED', 'ABRUPTLY', 'YOU', 'SPEAK', 'OF', 'SOMETHING', 'WHICH', 'MUST', 'NOT', 'TAKE', 'PLACE'] +8188-269288-0010-2891: ref=['I', 'MUST', 'PASS', 'IN', 'HONORS', 'IF', 'I', "DON'T", 'I', 'SHALL', 'DIE'] +8188-269288-0010-2891: hyp=['I', 'MUST', 'PASS', 'AN', 'HONOURS', 'IF', 'I', "DON'T", 'I', 'SHALL', 'DIE'] +8188-269288-0011-2892: ref=['A', 'FEW', 'MOMENTS', 'LATER', 'THERE', 'CAME', 'A', 'TAP', 'AT', 'THE', 'DOOR'] +8188-269288-0011-2892: hyp=['A', 'FEW', 'MOMENTS', 'LATER', 'THERE', 'CAME', 'A', 'TAP', 'AT', 'THE', 'DOOR'] +8188-269288-0012-2893: ref=['LESLIE', 'OPENED', 'THE', 'DOOR'] +8188-269288-0012-2893: hyp=['LESLIE', 'OPENED', 'THE', 'DOOR'] +8188-269288-0013-2894: ref=['JANE', 'HERIOT', 'STOOD', 'WITHOUT'] +8188-269288-0013-2894: hyp=['JANE', 'HEARET', 'STOOD', 'WITHOUT'] +8188-269288-0014-2895: ref=['THESE', 'LETTERS', 'HAVE', 'JUST', 'COME', 'FOR', 'YOU', 'AND', 'ANNIE', 'COLCHESTER', 'SHE', 'SAID', 'AND', 'AS', 'I', 'WAS', 'COMING', 'UPSTAIRS', 'I', 'THOUGHT', 'I', 'WOULD', 'LEAVE', 'THEM', 'WITH', 'YOU'] +8188-269288-0014-2895: hyp=['THESE', 'LITTLE', 'HAVE', 'JUST', 'COME', 'FOR', 'YOU', 'AND', 'ANY', 'COLCHESTER', 'SHE', 'SAID', 'AND', 'AS', 'I', 'WAS', 'COMING', 'UPSTAIRS', 'I', 'THOUGHT', 'I', 'WOULD', 'LEAVE', 'THEM', 'WITH', 'YOU'] +8188-269288-0015-2896: ref=['LESLIE', 'THANKED', 'HER', 'AND', 'EAGERLY', 'GRASPED', 'THE', 'LITTLE', 'PARCEL'] +8188-269288-0015-2896: hyp=['LIZLY', 'THANKED', 'HER', 'AND', 'EAGERLY', 'GRASPED', 'THE', 'LITTLE', 'PARCEL'] +8188-269288-0016-2897: ref=['HER', 'EYES', 'SHONE', 'WITH', 'PLEASURE', 'AT', 'THE', 'ANTICIPATION', 'OF', 'THE', 'DELIGHTFUL', 'TIME', 'SHE', 'WOULD', 'HAVE', 'REVELING', 'IN', 'THE', 'HOME', 'NEWS', 'THE', 'OTHER', 'LETTER', 'WAS', 'DIRECTED', 'TO', 'ANNIE', 'COLCHESTER'] +8188-269288-0016-2897: hyp=['HER', 'EYES', 'SHONE', 'WITH', 'PLEASURE', 'AT', 'THE', 'ANTICIPATION', 'OF', 'THE', 'DELIGHTFUL', 'TIME', 'SHE', 'WOULD', 'HAVE', 'REVELING', 'IN', 'THE', 'HOME', 'NEWS', 'THE', 'OTHER', 'LETTER', 'WAS', 'DIRECTED', 'TO', 'ANY', 'COLCHESTER'] +8188-269288-0017-2898: ref=['HERE', 'IS', 'A', 'LETTER', 'FOR', 'YOU', 'ANNIE', 'CRIED', 'LESLIE'] +8188-269288-0017-2898: hyp=['HERE', 'IS', 'A', 'LETTER', 'FOR', 'YOU', 'ANNIE', 'CRIED', 'LESLIE'] +8188-269288-0018-2899: ref=['HER', 'FACE', 'GREW', 'SUDDENLY', 'WHITE', 'AS', 'DEATH', 'WHAT', 'IS', 'IT', 'DEAR'] +8188-269288-0018-2899: hyp=['HER', 'FACE', 'GREW', 'SUDDENLY', 'WHITE', 'AS', 'DEATH', 'WHAT', 'IS', 'IT', 'DEAR'] +8188-269288-0019-2900: ref=['I', 'HAVE', 'BEEN', 'STARVING', 'OR', 'RATHER', 'I', 'HAVE', 'BEEN', 'THIRSTING'] +8188-269288-0019-2900: hyp=['I', 'HAVE', 'BEEN', 'STARLING', 'OR', 'RATHER', 'I', 'HAVE', 'BEEN', 'THIRSTING'] +8188-269288-0020-2901: ref=['WELL', 'READ', 'IT', 'IN', 'PEACE', 'SAID', 'LESLIE', 'I', "WON'T", 'DISTURB', 'YOU'] +8188-269288-0020-2901: hyp=['WELL', 'READ', 'IT', 'IN', 'PEACE', 'SAID', 'LINLESILY', 'I', "WON'T", 'DISTURB', 'YOU'] +8188-269288-0021-2902: ref=['I', 'AM', 'TRULY', 'GLAD', 'IT', 'HAS', 'COME'] +8188-269288-0021-2902: hyp=['I', 'AM', 'TRULY', 'GLAD', 'IT', 'HAS', 'COME'] +8188-269288-0022-2903: ref=['LESLIE', 'SEATED', 'HERSELF', 'WITH', 'HER', 'BACK', 'TO', 'HER', 'COMPANION', 'AND', 'OPENED', 'HER', 'OWN', 'LETTERS'] +8188-269288-0022-2903: hyp=['LISALLY', 'SEATED', 'HERSELF', 'WITH', 'HER', 'BACK', 'TO', 'HER', 'COMPANION', 'AND', 'OPENED', 'HER', 'ON', 'LETTERS'] +8188-269288-0023-2904: ref=["DON'T", 'NOTICE', 'ME', 'REPLIED', 'ANNIE'] +8188-269288-0023-2904: hyp=["DON'T", 'NOTICE', 'ME', 'REPLIED', 'ANNIE'] +8188-269288-0024-2905: ref=['I', 'MUST', 'GO', 'INTO', 'THE', 'GROUNDS', 'THE', 'AIR', 'IS', 'STIFLING'] +8188-269288-0024-2905: hyp=['I', 'MUST', 'GO', 'INTO', 'THE', 'GROUNDS', 'THE', 'AIR', 'IS', 'STIFLING'] +8188-269288-0025-2906: ref=['BUT', 'THEY', 'ARE', 'JUST', 'SHUTTING', 'UP'] +8188-269288-0025-2906: hyp=['BUT', 'THEY', 'ARE', 'JUST', 'SHUTTING', 'UP'] +8188-269288-0026-2907: ref=['I', 'SHALL', 'GO', 'I', 'KNOW', 'A', 'WAY'] +8188-269288-0026-2907: hyp=['I', 'SHALL', 'GO', 'I', 'KNOW', 'A', 'WAY'] +8188-269288-0027-2908: ref=['JUST', 'AFTER', 'MIDNIGHT', 'SHE', 'ROSE', 'WITH', 'A', 'SIGH', 'TO', 'PREPARE', 'FOR', 'BED'] +8188-269288-0027-2908: hyp=['JUST', 'AFTER', 'MIDNIGHT', 'SHE', 'ROSE', 'WITH', 'A', 'SIGH', 'TO', 'PREPARE', 'FOR', 'BED'] +8188-269288-0028-2909: ref=['SHE', 'LOOKED', 'ROUND', 'THE', 'ROOM'] +8188-269288-0028-2909: hyp=['SHE', 'LOOKED', 'ROUND', 'THE', 'ROOM'] +8188-269288-0029-2910: ref=['NOW', 'I', 'REMEMBER', 'SHE', 'GOT', 'A', 'LETTER', 'WHICH', 'UPSET', 'HER', 'VERY', 'MUCH', 'AND', 'WENT', 'OUT'] +8188-269288-0029-2910: hyp=['NOW', 'I', 'REMEMBER', 'SHE', 'GOT', 'A', 'LETTER', 'WHICH', 'UPSET', 'HER', 'VERY', 'MUCH', 'AND', 'WENT', 'OUT'] +8188-269288-0030-2911: ref=['LESLIE', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'FLUNG', 'IT', 'OPEN', 'SHE', 'PUT', 'HER', 'HEAD', 'OUT', 'AND', 'TRIED', 'TO', 'PEER', 'INTO', 'THE', 'DARKNESS', 'BUT', 'THE', 'MOON', 'HAD', 'ALREADY', 'SET', 'AND', 'SHE', 'COULD', 'NOT', 'SEE', 'MORE', 'THAN', 'A', 'COUPLE', 'OF', 'YARDS', 'IN', 'FRONT', 'OF', 'HER'] +8188-269288-0030-2911: hyp=['LIZLY', 'WENT', 'TO', 'THE', 'WINDOW', 'AND', 'FLUNG', 'IT', 'OPEN', 'SHE', 'PUT', 'HER', 'HEAD', 'OUT', 'AND', 'TRIED', 'TO', 'PEER', 'INTO', 'THE', 'DARKNESS', 'BUT', 'THE', 'MOON', 'HAD', 'ALREADY', 'SET', 'AND', 'SHE', 'COULD', 'NOT', 'SEE', 'MORE', 'THAN', 'A', 'COUPLE', 'OF', 'YARDS', 'IN', 'FRONT', 'OF', 'HER'] +8188-269288-0031-2912: ref=['SHE', 'IS', 'A', 'VERY', 'QUEER', 'ERRATIC', 'CREATURE', 'AND', 'THAT', 'LETTER', 'THERE', 'WAS', 'BAD', 'NEWS', 'IN', 'THAT', 'LETTER'] +8188-269288-0031-2912: hyp=['SHE', 'IS', 'A', 'VERY', 'QUEER', 'ERRATIC', 'CREATURE', 'AND', 'THAT', 'LETTER', 'THERE', 'IS', 'BAD', 'NEWS', 'IN', 'THAT', 'LETTER'] +8188-269288-0032-2913: ref=['WHAT', 'CAN', 'SHE', 'BE', 'DOING', 'OUT', 'BY', 'HERSELF'] +8188-269288-0032-2913: hyp=['WHAT', "CAN'T", 'YOU', 'BE', 'DOING', 'OUT', 'BY', 'HERSELF'] +8188-269288-0033-2914: ref=['LESLIE', 'LEFT', 'THE', 'ROOM', 'BUT', 'SHE', 'HAD', 'SCARCELY', 'GONE', 'A', 'DOZEN', 'PACES', 'DOWN', 'THE', 'CORRIDOR', 'BEFORE', 'SHE', 'MET', 'ANNIE', 'RETURNING'] +8188-269288-0033-2914: hyp=['THIS', 'LILY', 'LIT', 'THE', 'ROOM', 'BUT', 'SHE', 'HAD', 'SCARCELY', 'GONE', 'A', 'DOZEN', 'PLACES', 'DOWN', 'THE', 'CORRIDOR', 'BEFORE', 'SHE', 'MET', 'ANY', 'RETURNING'] +8188-269288-0034-2915: ref=["ANNIE'S", 'EYES', 'WERE', 'VERY', 'BRIGHT', 'HER', 'CHEEKS', 'WERE', 'NO', 'LONGER', 'PALE', 'AND', 'THERE', 'WAS', 'A', 'BRILLIANT', 'COLOR', 'IN', 'THEM'] +8188-269288-0034-2915: hyp=['AND', 'HIS', 'EYES', 'WERE', 'VERY', 'BRIGHT', 'HER', 'CHEEKS', 'WERE', 'NO', 'LONGER', 'PALE', 'AND', 'THERE', 'WAS', 'A', 'BRILLIANT', 'COLOR', 'IN', 'THEM'] +8188-269288-0035-2916: ref=['SHE', 'DID', 'NOT', 'TAKE', 'THE', 'LEAST', 'NOTICE', 'OF', 'LESLIE', 'BUT', 'GOING', 'INTO', 'THE', 'ROOM', 'SHUT', 'THE', 'DOOR'] +8188-269288-0035-2916: hyp=['SHE', 'DID', 'NOT', 'TAKE', 'THE', 'LEAST', 'NOTICE', 'OF', 'LIZZLING', 'BUT', 'GOING', 'INTO', 'THE', 'ROOM', 'SHUT', 'THE', 'DOOR'] +8188-269288-0036-2917: ref=["DON'T", 'BEGIN', 'SAID', 'ANNIE'] +8188-269288-0036-2917: hyp=["DON'T", 'BEGIN', 'SAID', 'ANNIE'] +8188-269288-0037-2918: ref=["DON'T", 'BEGIN', 'WHAT', 'DO', 'YOU', 'MEAN'] +8188-269288-0037-2918: hyp=["DON'T", 'BEGIN', 'WHAT', 'DO', 'YOU', 'MEAN'] +8188-269288-0038-2919: ref=['I', 'MEAN', 'THAT', 'I', "DON'T", 'WANT', 'YOU', 'TO', 'BEGIN', 'TO', 'ASK', 'QUESTIONS'] +8188-269288-0038-2919: hyp=['I', 'MEAN', 'THAT', 'I', "DON'T", 'WANT', 'YOU', 'TO', 'BEGIN', 'TO', 'ASK', 'QUESTIONS'] +8188-269288-0039-2920: ref=['I', 'WALKED', 'UP', 'AND', 'DOWN', 'AS', 'FAST', 'AS', 'EVER', 'I', 'COULD', 'OUTSIDE', 'IN', 'ORDER', 'TO', 'MAKE', 'MYSELF', 'SLEEPY'] +8188-269288-0039-2920: hyp=['I', 'WALKED', 'UP', 'AND', 'DOWN', 'AS', 'FAST', 'AS', 'EVER', 'I', 'COULD', 'OUTSIDE', 'IN', 'ORDER', 'TO', 'MAKE', 'MYSELF', 'SLEEPY'] +8188-269288-0040-2921: ref=["DON'T", 'TALK', 'TO', 'ME', 'LESLIE', "DON'T", 'SAY', 'A', 'SINGLE', 'WORD'] +8188-269288-0040-2921: hyp=["DON'T", 'TALK', 'TO', 'ME', 'LESLIE', "DON'T", 'SAY', 'A', 'SINGLE', 'WORD'] +8188-269288-0041-2922: ref=['I', 'SHALL', 'GO', 'OFF', 'TO', 'SLEEP', 'THAT', 'IS', 'ALL', 'I', 'CARE', 'FOR'] +8188-269288-0041-2922: hyp=['I', 'SHALL', 'GO', 'OFF', 'TO', 'SLEEP', 'THAT', 'IS', 'ALL', 'I', 'CARE', 'FOR'] +8188-269288-0042-2923: ref=["DON'T", 'SAID', 'ANNIE'] +8188-269288-0042-2923: hyp=["DON'T", 'SAID', 'ANNIE'] +8188-269288-0043-2924: ref=['NOW', 'DRINK', 'THIS', 'AT', 'ONCE', 'SHE', 'SAID', 'IN', 'A', 'VOICE', 'OF', 'AUTHORITY', 'IF', 'YOU', 'REALLY', 'WISH', 'TO', 'SLEEP'] +8188-269288-0043-2924: hyp=['NOW', 'DRINK', 'THIS', 'AT', 'ONCE', 'SHE', 'SAID', 'IN', 'A', 'VOICE', 'OF', 'AUTHORITY', 'IF', 'YOU', 'REALLY', 'WISH', 'TO', 'SLEEP'] +8188-269288-0044-2925: ref=['ANNIE', 'STARED', 'VACANTLY', 'AT', 'THE', 'COCOA', 'THEN', 'SHE', 'UTTERED', 'A', 'LAUGH'] +8188-269288-0044-2925: hyp=['ANY', 'STEERED', 'VACANTLY', 'AT', 'THE', 'COOKER', "DIDN'T", 'A', 'LAUGH'] +8188-269288-0045-2926: ref=['DRINK', 'THAT', 'SHE', 'SAID'] +8188-269288-0045-2926: hyp=['DRINK', 'THAT', 'SHE', 'SAID'] +8188-269288-0046-2927: ref=['DO', 'YOU', 'WANT', 'TO', 'KILL', 'ME', "DON'T", 'TALK', 'ANY', 'MORE'] +8188-269288-0046-2927: hyp=['DO', 'YOU', 'WANT', 'TO', 'KILL', 'ME', "DON'T", 'TALK', 'ANY', 'MORE'] +8188-269288-0047-2928: ref=['I', 'AM', 'SLEEPY', 'I', 'SHALL', 'SLEEP'] +8188-269288-0047-2928: hyp=['I', 'AM', 'SLEEPY', 'I', 'SHALL', 'SLEEP'] +8188-269288-0048-2929: ref=['SHE', 'GOT', 'INTO', 'BED', 'AS', 'SHE', 'SPOKE', 'AND', 'WRAPPED', 'THE', 'CLOTHES', 'TIGHTLY', 'ROUND', 'HER'] +8188-269288-0048-2929: hyp=['SHE', 'GOT', 'INTO', 'BED', 'AS', 'SHE', 'SPOKE', 'AND', 'WRAPPED', 'THE', 'CLOTHES', 'TIGHTLY', 'ROUND', 'HER'] +8188-269288-0049-2930: ref=["CAN'T", 'YOU', 'MANAGE', 'WITH', 'A', 'CANDLE', 'JUST', 'FOR', 'ONCE'] +8188-269288-0049-2930: hyp=['COULD', 'YOU', 'MANAGE', 'WITH', 'A', 'CANDLE', 'JUST', 'FOR', 'ONCE'] +8188-269288-0050-2931: ref=['CERTAINLY', 'SAID', 'LESLIE'] +8188-269288-0050-2931: hyp=['CERTAINLY', 'SAID', 'IT', 'EASILY'] +8188-269288-0051-2932: ref=['SHE', 'TURNED', 'OFF', 'THE', 'LIGHT', 'AND', 'LIT', 'A', 'CANDLE', 'WHICH', 'SHE', 'PUT', 'BEHIND', 'HER', 'SCREEN', 'THEN', 'PREPARED', 'TO', 'GET', 'INTO', 'BED'] +8188-269288-0051-2932: hyp=['SHE', 'TURNED', 'OFF', 'THE', 'LIGHT', 'AND', 'LIT', 'A', 'CANDLE', 'WHICH', 'HE', 'PUT', 'BEHIND', 'HER', 'SCREEN', 'THEN', 'PREPARED', 'TO', 'GET', 'INTO', 'BED'] +8188-269288-0052-2933: ref=["ANNIE'S", 'MANNER', 'WAS', 'VERY', 'MYSTERIOUS'] +8188-269288-0052-2933: hyp=['ANY', 'MANNER', 'WAS', 'VERY', 'MYSTERIOUS'] +8188-269288-0053-2934: ref=['ANNIE', 'DID', 'NOT', 'MEAN', 'TO', 'CONFIDE', 'IN', 'ANYONE', 'THAT', 'NIGHT', 'AND', 'THE', 'KINDEST', 'THING', 'WAS', 'TO', 'LEAVE', 'HER', 'ALONE'] +8188-269288-0053-2934: hyp=['AND', 'HE', 'DID', 'NOT', 'MEAN', 'TO', 'CONFINE', 'IN', 'ANY', 'ONE', 'THAT', 'NIGHT', 'AND', 'THE', 'KINDEST', 'THING', 'WAS', 'TO', 'LEAVE', 'HER', 'ALONE'] +8188-269288-0054-2935: ref=['TIRED', 'OUT', 'LESLIE', 'HERSELF', 'DROPPED', 'ASLEEP'] +8188-269288-0054-2935: hyp=['TIE', 'IT', 'OUT', 'LIZZLY', 'HERSELF', 'DROPPED', 'ASLEEP'] +8188-269288-0055-2936: ref=['ANNIE', 'IS', 'THAT', 'YOU', 'SHE', 'CALLED', 'OUT'] +8188-269288-0055-2936: hyp=['ANNIE', 'IS', 'THAT', 'YOU', 'SHE', 'CALLED', 'OUT'] +8188-269288-0056-2937: ref=['THERE', 'WAS', 'NO', 'REPLY', 'BUT', 'THE', 'SOUND', 'OF', 'HURRYING', 'STEPS', 'CAME', 'QUICKER', 'AND', 'QUICKER', 'NOW', 'AND', 'THEN', 'THEY', 'WERE', 'INTERRUPTED', 'BY', 'A', 'GROAN'] +8188-269288-0056-2937: hyp=['THERE', 'WAS', 'NO', 'REPLY', 'BUT', 'THE', 'SOUND', 'OF', 'HURRYING', 'STEPS', 'CAME', 'QUICKER', 'AND', 'QUICKER', 'NOW', 'AND', 'THEN', 'THEIR', 'INTERRUPTED', 'BY', 'A', 'GROAN'] +8188-269288-0057-2938: ref=['OH', 'THIS', 'WILL', 'KILL', 'ME', 'MY', 'HEART', 'WILL', 'BREAK', 'THIS', 'WILL', 'KILL', 'ME'] +8188-269288-0057-2938: hyp=['OH', 'THIS', 'WILL', 'KILL', 'ME', 'MY', 'HEART', 'WILL', 'BREAK', 'THIS', 'WILL', 'KILL', 'ME'] +8188-269290-0000-2823: ref=['THE', 'GUILD', 'OF', 'SAINT', 'ELIZABETH'] +8188-269290-0000-2823: hyp=['THE', 'GOLD', 'OF', 'SAINT', 'ELIZABETH'] +8188-269290-0001-2824: ref=['IMMEDIATELY', 'AFTER', 'DINNER', 'THAT', 'EVENING', 'LESLIE', 'RAN', 'UP', 'TO', 'HER', 'ROOM', 'TO', 'MAKE', 'PREPARATIONS', 'FOR', 'HER', 'VISIT', 'TO', 'EAST', 'HALL'] +8188-269290-0001-2824: hyp=['IMMEDIATELY', 'AFTER', 'DINNER', 'THAT', 'EVENING', 'LESLIE', 'RAN', 'UP', 'TO', 'HER', 'ROOM', 'TO', 'MAKE', 'PREPARATIONS', 'FOR', 'HER', 'VISIT', 'TO', 'EAST', 'HALL'] +8188-269290-0002-2825: ref=["I'M", 'NOT', 'COMING', 'SAID', 'ANNIE'] +8188-269290-0002-2825: hyp=["I'M", 'NOT', 'COMING', 'SAID', 'ANNIE'] +8188-269290-0003-2826: ref=['EVERY', 'STUDENT', 'IS', 'TO', 'BE', 'IN', 'EAST', 'HALL', 'AT', 'HALF', 'PAST', 'EIGHT'] +8188-269290-0003-2826: hyp=['EVERY', 'STUDENT', 'IS', 'TO', 'BE', 'AN', 'EAST', 'HALL', 'AT', 'HALF', 'PAST', 'EIGHT'] +8188-269290-0004-2827: ref=['IT', "DOESN'T", 'MATTER', 'REPLIED', 'ANNIE', 'WHETHER', 'IT', 'IS', 'AN', 'ORDER', 'OR', 'NOT', "I'M", 'NOT', 'COMING', 'SAY', 'NOTHING', 'ABOUT', 'ME', 'PLEASE'] +8188-269290-0004-2827: hyp=['IT', "DOESN'T", 'MATTER', 'REPLIED', 'ANNIE', 'WHETHER', 'IT', 'IS', 'AN', 'ORDER', 'OR', 'NOT', "I'M", 'NOT', 'COMING', 'SAY', 'NOTHING', 'ABOUT', 'ME', 'PLEASE'] +8188-269290-0005-2828: ref=['IT', 'BURNED', 'AS', 'IF', 'WITH', 'FEVER'] +8188-269290-0005-2828: hyp=['IT', 'BURNED', 'AS', 'IF', 'WITH', 'FEVER'] +8188-269290-0006-2829: ref=['YOU', "DON'T", 'KNOW', 'WHAT', 'A', 'TRIAL', 'IT', 'IS', 'FOR', 'ME', 'TO', 'HAVE', 'YOU', 'HERE'] +8188-269290-0006-2829: hyp=['YOU', "DON'T", 'KNOW', 'WHAT', 'A', 'TRIAL', 'IT', 'IS', 'FOR', 'ME', 'TO', 'HAVE', 'YOU', 'HERE'] +8188-269290-0007-2830: ref=['I', 'WANT', 'TO', 'BE', 'ALONE', 'GO'] +8188-269290-0007-2830: hyp=['I', 'WANT', 'TO', 'BE', 'ALONE', 'GO'] +8188-269290-0008-2831: ref=['I', 'KNOW', 'YOU', "DON'T", 'QUITE', 'MEAN', 'WHAT', 'YOU', 'SAY', 'SAID', 'LESLIE', 'BUT', 'OF', 'COURSE', 'IF', 'YOU', 'REALLY', 'WISH', 'ME'] +8188-269290-0008-2831: hyp=['I', 'KNOW', 'YOU', "DON'T", 'QUITE', 'MEAN', 'WHAT', 'YOU', 'SAY', 'SAID', 'LESLIE', 'BUT', 'OF', 'COURSE', 'IF', 'YOU', 'REALLY', 'WISH', 'ME'] +8188-269290-0009-2832: ref=['YOU', 'FRET', 'ME', 'BEYOND', 'ENDURANCE'] +8188-269290-0009-2832: hyp=['YOU', 'FRITTEN', 'ME', 'BEYOND', 'ENDURANCE'] +8188-269290-0010-2833: ref=['WRAPPING', 'A', 'PRETTY', 'BLUE', 'SHAWL', 'ROUND', 'HER', 'HEAD', 'AND', 'SHOULDERS', 'SHE', 'TURNED', 'TO', 'ANNIE'] +8188-269290-0010-2833: hyp=['WRAPPING', 'A', 'PRETTY', 'BLUE', 'SHAWL', 'AROUND', 'A', 'HIDDEN', 'SHOULDERS', 'SHE', 'TURNED', 'TO', 'ANNIE'] +8188-269290-0011-2834: ref=['LESLIE', 'WAS', 'JUST', 'CLOSING', 'THE', 'DOOR', 'BEHIND', 'HER', 'WHEN', 'ANNIE', 'CALLED', 'AFTER', 'HER'] +8188-269290-0011-2834: hyp=['LESLIE', 'WAS', 'JUST', 'CLOSING', 'THE', 'DOOR', 'BEHIND', 'HER', 'WHEN', 'ANY', 'CALLED', 'AFTER', 'HER'] +8188-269290-0012-2835: ref=['I', 'TOOK', 'IT', 'OUT', 'SAID', 'LESLIE', 'TOOK', 'IT', 'OUT'] +8188-269290-0012-2835: hyp=['I', 'TOOK', 'IT', 'OUT', 'SAID', 'LIZZIE', 'TOOK', 'IT', 'OUT'] +8188-269290-0013-2836: ref=['HAVE', 'THE', 'GOODNESS', 'TO', 'FIND', 'IT', 'AND', 'PUT', 'IT', 'BACK'] +8188-269290-0013-2836: hyp=['HAVE', 'THE', 'GOODNESS', 'TO', 'FIND', 'IT', 'AND', 'PUT', 'IT', 'BACK'] +8188-269290-0014-2837: ref=['BUT', "DON'T", 'LOCK', 'ME', 'OUT', 'PLEASE', 'ANNIE'] +8188-269290-0014-2837: hyp=['BUT', "DON'T", 'LOCK', 'ME', 'OUT', 'PLEASE', 'ANNIE'] +8188-269290-0015-2838: ref=['OH', 'I', "WON'T", 'LOCK', 'YOU', 'OUT', 'SHE', 'SAID', 'BUT', 'I', 'MUST', 'HAVE', 'THE', 'KEY'] +8188-269290-0015-2838: hyp=['OH', 'I', "WON'T", 'LOOK', 'YOU', 'OUT', 'SHE', 'SAID', 'BUT', 'I', 'MUST', 'HAVE', 'THE', 'KEY'] +8188-269290-0016-2839: ref=['JANE', "HERIOT'S", 'VOICE', 'WAS', 'HEARD', 'IN', 'THE', 'PASSAGE'] +8188-269290-0016-2839: hyp=['JANE', "HERETT'S", 'VOICE', 'WAS', 'HEARD', 'IN', 'THE', 'PASSAGE'] +8188-269290-0017-2840: ref=['AS', 'SHE', 'WALKED', 'DOWN', 'THE', 'CORRIDOR', 'SHE', 'HEARD', 'IT', 'BEING', 'TURNED', 'IN', 'THE', 'LOCK'] +8188-269290-0017-2840: hyp=['AS', 'SHE', 'WALKED', 'ROUND', 'THE', 'CORRIDOR', 'SHE', 'HEARD', 'IT', 'BEING', 'TURNED', 'TO', 'THE', 'LOCK'] +8188-269290-0018-2841: ref=['WHAT', 'CAN', 'THIS', 'MEAN', 'SHE', 'SAID', 'TO', 'HERSELF'] +8188-269290-0018-2841: hyp=['WHAT', 'CAN', 'THIS', 'MEAN', 'SHE', 'SAID', 'TO', 'HERSELF'] +8188-269290-0019-2842: ref=['OH', 'I', "WON'T", 'PRESS', 'YOU', 'REPLIED', 'JANE'] +8188-269290-0019-2842: hyp=['OH', 'I', "WON'T", 'PRESS', 'YOU', 'REPLIED', 'JANE'] +8188-269290-0020-2843: ref=['OH', 'I', 'SHALL', 'NEVER', 'DO', 'THAT', 'REPLIED', 'LESLIE'] +8188-269290-0020-2843: hyp=['OH', 'I', 'SHALL', 'NEVER', 'DO', 'THAT', 'REPLIED', 'LESLIE'] +8188-269290-0021-2844: ref=['YOU', 'SEE', 'ALL', 'THE', 'GIRLS', 'EXCEPT', 'EILEEN', 'AND', 'MARJORIE', 'LAUGH', 'AT', 'HER', 'AND', 'THAT', 'SEEMS', 'TO', 'ME', 'TO', 'MAKE', 'HER', 'WORSE'] +8188-269290-0021-2844: hyp=['YOU', 'SEE', 'ALL', 'THE', 'GIRLS', 'EXCEPT', 'AILEEN', 'AND', 'MARJORIE', 'LAUGH', 'AT', 'HER', 'AND', 'THAT', 'SEEMS', 'TO', 'ME', 'TO', 'MAKE', 'HER', 'WORSE'] +8188-269290-0022-2845: ref=['SOME', 'DAY', 'JANE', 'YOU', 'MUST', 'SEE', 'HER'] +8188-269290-0022-2845: hyp=['SOME', 'DAY', 'JANE', 'YOU', 'MUST', 'SEE', 'HER'] +8188-269290-0023-2846: ref=['IF', 'YOU', 'ARE', 'IN', 'LONDON', 'DURING', 'THE', 'SUMMER', 'YOU', 'MUST', 'COME', 'AND', 'PAY', 'US', 'A', 'VISIT', 'WILL', 'YOU'] +8188-269290-0023-2846: hyp=['IF', 'YOU', 'IN', 'LONDON', 'DURING', 'THE', 'SUMMER', 'YOU', 'MUST', 'COME', 'AND', 'PASS', 'A', 'VISIT', 'WILL', 'YOU'] +8188-269290-0024-2847: ref=['THAT', 'IS', 'IF', 'YOU', 'CARE', 'TO', 'CONFIDE', 'IN', 'ME'] +8188-269290-0024-2847: hyp=['THAT', 'IS', 'IF', 'YOU', 'CARE', 'TO', 'CONFIDE', 'IN', 'ME'] +8188-269290-0025-2848: ref=['I', 'BELIEVE', 'POOR', 'ANNIE', 'IS', 'DREADFULLY', 'UNHAPPY'] +8188-269290-0025-2848: hyp=['I', 'BELIEVE', 'POOR', 'ANNIE', 'IS', 'DREADFULLY', 'UNHAPPY'] +8188-269290-0026-2849: ref=["THAT'S", 'JUST', 'IT', 'JANE', 'THAT', 'IS', 'WHAT', 'FRIGHTENS', 'ME', 'SHE', 'REFUSES', 'TO', 'COME'] +8188-269290-0026-2849: hyp=["THAT'S", 'JUST', 'A', 'CHANT', 'THAT', 'IS', 'WHAT', 'BRIGHTENS', 'ME', 'SHE', 'REFUSES', 'TO', 'COME'] +8188-269290-0027-2850: ref=['REFUSES', 'TO', 'COME', 'SHE', 'CRIED'] +8188-269290-0027-2850: hyp=['REFUSES', 'TO', 'COME', 'SHE', 'CRIED'] +8188-269290-0028-2851: ref=['SHE', 'WILL', 'GET', 'INTO', 'AN', 'AWFUL', 'SCRAPE'] +8188-269290-0028-2851: hyp=["SHE'LL", 'GET', 'IN', 'AN', 'AWFUL', 'SCRAPE'] +8188-269290-0029-2852: ref=['I', 'AM', 'SURE', 'SHE', 'IS', 'ILL', 'SHE', 'WORKS', 'TOO', 'HARD', 'AND', 'SHE', 'BUT', 'THERE', 'I', "DON'T", 'KNOW', 'THAT', 'I', 'OUGHT', 'TO', 'SAY', 'ANY', 'MORE'] +8188-269290-0029-2852: hyp=['I', 'AM', 'SURE', 'SHE', 'IS', 'ILL', 'SHE', 'WORKS', 'TOO', 'HARD', 'AND', 'SHE', 'BUT', 'THERE', 'I', "DON'T", 'KNOW', 'THAT', 'I', 'OUGHT', 'TO', 'SAY', 'ANY', 'MORE'] +8188-269290-0030-2853: ref=["I'LL", 'WAIT', 'FOR', 'YOU', 'HERE', 'SAID', 'LESLIE'] +8188-269290-0030-2853: hyp=["I'LL", 'WAIT', 'FOR', 'YOU', 'HERE', 'SAID', 'LESLIE'] +8188-269290-0031-2854: ref=['DO', 'COME', 'ANNIE', 'DO'] +8188-269290-0031-2854: hyp=['DO', 'COME', 'ANY', 'DO'] +8188-269290-0032-2855: ref=['SCARCELY', 'LIKELY', 'REPLIED', 'LESLIE', 'SHE', 'TOLD', 'ME', 'SHE', 'WAS', 'DETERMINED', 'NOT', 'TO', 'COME', 'TO', 'THE', 'MEETING'] +8188-269290-0032-2855: hyp=['SCARCELY', 'LIKELY', 'REPLIED', 'LESLIE', 'SHE', 'TOLD', 'ME', 'SHE', 'WAS', 'DETERMINED', 'NOT', 'TO', 'COME', 'TO', 'THE', 'MEETING'] +8188-269290-0033-2856: ref=['BUT', 'MARJORIE', 'AND', 'EILEEN', 'HAD', 'ALREADY', 'DEPARTED', 'AND', 'LESLIE', 'AND', 'JANE', 'FOUND', 'THEMSELVES', 'AMONG', 'THE', 'LAST', 'STUDENTS', 'TO', 'ARRIVE', 'AT', 'THE', 'GREAT', 'EAST', 'HALL'] +8188-269290-0033-2856: hyp=['BUT', 'MARGERY', 'AND', 'AILEEN', 'HAD', 'ALREADY', 'DEPARTED', 'AND', 'LESLIE', 'AND', 'JANE', 'FOUND', 'THEMSELVES', 'AMONG', 'THE', 'LAST', 'STUDENTS', 'TO', 'ARRIVE', 'AT', 'THE', 'GREAT', 'EAST', 'HALL'] +8188-269290-0034-2857: ref=['MISS', 'LAUDERDALE', 'WAS', 'STANDING', 'WITH', 'THE', 'OTHER', 'TUTORS', 'AND', 'PRINCIPALS', 'OF', 'THE', 'DIFFERENT', 'HALLS', 'ON', 'A', 'RAISED', 'PLATFORM'] +8188-269290-0034-2857: hyp=['MISS', 'LAURDALE', 'WAS', 'STANDING', 'WITH', 'THE', 'OTHER', 'TUTORS', 'AND', 'PRINCIPLES', 'OF', 'THE', 'DIFFERENT', 'HALLS', 'ON', 'A', 'RAISED', 'PLATFORM'] +8188-269290-0035-2858: ref=['THEN', 'A', 'ROLL', 'CALL', 'WAS', 'GONE', 'THROUGH', 'BY', 'ONE', 'OF', 'THE', 'TUTORS', 'THE', 'ONLY', 'ABSENTEE', 'WAS', 'ANNIE', 'COLCHESTER'] +8188-269290-0035-2858: hyp=['THEN', 'A', 'RAW', 'CALL', 'WAS', 'GONE', 'THROUGH', 'BY', 'ONE', 'OF', 'THE', 'TUTORS', 'THE', 'ONLY', 'ABSENTE', 'WAS', 'ANY', 'COLCHESTER'] +8188-269290-0036-2859: ref=['THE', 'PHYSICAL', 'PART', 'OF', 'YOUR', 'TRAINING', 'AND', 'ALSO', 'THE', 'MENTAL', 'PART', 'ARE', 'ABUNDANTLY', 'SUPPLIED', 'IN', 'THIS', 'GREAT', 'HOUSE', 'OF', 'LEARNING', 'SHE', 'CONTINUED', 'BUT', 'THE', 'SPIRITUAL', 'PART', 'IT', 'SEEMS', 'TO', 'ME', 'OUGHT', 'NOW', 'TO', 'BE', 'STRENGTHENED'] +8188-269290-0036-2859: hyp=['THE', 'PHYSICAL', 'PART', 'OF', 'THE', 'OLD', 'TRAINING', 'AND', 'ALSO', 'THE', 'MENTAL', 'PART', 'ARE', 'ABUNDANTLY', 'SUPPLIED', 'IN', 'THIS', 'GREAT', 'HOUSE', 'OF', 'LEARNING', 'SHE', 'CONTINUED', 'BUT', 'THE', 'SPIRITUAL', 'PART', 'IT', 'SEEMS', 'TO', 'ME', 'OUGHT', 'NOW', 'TO', 'BE', 'STRENGTHENED'] +8188-269290-0037-2860: ref=['HEAR', 'HEAR', 'AND', 'ONCE', 'AGAIN', 'HEAR'] +8188-269290-0037-2860: hyp=['HAIR', 'HAIR', 'AND', 'ONCE', 'AGAIN', 'HAIR'] +8188-269290-0038-2861: ref=['SHE', 'UTTERED', 'HER', 'STRANGE', 'REMARK', 'STANDING', 'UP'] +8188-269290-0038-2861: hyp=['SHE', 'UTTERED', 'A', 'STRAIN', 'REMARK', 'STANDING', 'UP'] +8188-269290-0039-2862: ref=['MARJORIE', 'AND', 'EILEEN', 'WERE', 'CLOSE', 'TO', 'HER'] +8188-269290-0039-2862: hyp=['MARJORIE', 'AND', 'AILEEN', 'WERE', 'CLOSE', 'TO', 'HER'] +8188-269290-0040-2863: ref=['I', 'WILL', 'TALK', 'WITH', 'YOU', 'BELLE', 'ACHESON', 'PRESENTLY', 'SHE', 'SAID'] +8188-269290-0040-2863: hyp=['I', 'WILL', 'TALK', 'WITH', 'YOU', 'BELL', 'ARCHISON', 'PRESENTLY', 'SHE', 'SAID'] +8188-269290-0041-2864: ref=['THE', 'NAMES', 'OF', 'PROPOSED', 'MEMBERS', 'ARE', 'TO', 'BE', 'SUBMITTED', 'TO', 'ME', 'BEFORE', 'THIS', 'DAY', 'WEEK'] +8188-269290-0041-2864: hyp=['THE', 'NAMES', 'OF', 'THE', 'PROPOSED', 'MEMBERS', 'ARE', 'TO', 'BE', 'SUBMITTED', 'TO', 'ME', 'BEFORE', 'THIS', 'DAY', 'WEEK'] +8188-269290-0042-2865: ref=['AM', 'I', 'MY', "BROTHER'S", 'KEEPER'] +8188-269290-0042-2865: hyp=['AM', 'I', 'MY', "BROTHER'S", 'KEEPER'] +8188-269290-0043-2866: ref=['YOU', 'ASK', 'SHE', 'CONTINUED'] +8188-269290-0043-2866: hyp=['YOU', 'ASK', 'SHE', 'CONTINUED'] +8188-269290-0044-2867: ref=['GOD', 'ANSWERS', 'TO', 'EACH', 'OF', 'YOU', 'YOU', 'ARE'] +8188-269290-0044-2867: hyp=['GOD', 'ADDEST', 'EACH', 'OF', 'YOU', 'YOU', 'ARE'] +8188-269290-0045-2868: ref=['THE', 'WORLD', 'SAYS', 'NO', 'I', 'AM', 'NOT', 'BUT', 'GOD', 'SAYS', 'YES', 'YOU', 'ARE'] +8188-269290-0045-2868: hyp=['THE', 'WORLD', 'TEETH', 'NO', 'I', 'AM', 'NOT', 'BUT', 'GOD', 'SAYS', 'IS', 'YOU', 'ARE'] +8188-269290-0046-2869: ref=['ALL', 'MEN', 'ARE', 'YOUR', 'BROTHERS'] +8188-269290-0046-2869: hyp=['ALL', 'MEN', 'ARE', 'YOUR', 'BROTHERS'] +8188-269290-0047-2870: ref=['FOR', 'ALL', 'WHO', 'SIN', 'ALL', 'WHO', 'SUFFER', 'YOU', 'ARE', 'TO', 'A', 'CERTAIN', 'EXTENT', 'RESPONSIBLE'] +8188-269290-0047-2870: hyp=['FOR', 'ALL', 'WHO', 'SIN', 'ALL', 'WHO', 'SUFFER', 'YOU', 'ARE', 'TO', 'EXERT', 'AN', 'EXTENT', 'RESPONSIBLE'] +8188-269290-0048-2871: ref=['AFTER', 'THE', 'ADDRESS', 'THE', 'GIRLS', 'THEMSELVES', 'WERE', 'ENCOURAGED', 'TO', 'SPEAK', 'AND', 'A', 'VERY', 'ANIMATED', 'DISCUSSION', 'FOLLOWED'] +8188-269290-0048-2871: hyp=['AFTER', 'THE', 'ADDRESS', 'THE', 'GIRLS', 'THEMSELVES', 'WERE', 'ENCOURAGED', 'TO', 'SPEAK', 'AND', 'A', 'VERY', 'ANIMATED', 'DISCUSSION', 'FOLLOWED'] +8188-269290-0049-2872: ref=['IT', 'WAS', 'PAST', 'TEN', "O'CLOCK", 'WHEN', 'SHE', 'LEFT', 'THE', 'HALL'] +8188-269290-0049-2872: hyp=['IT', 'WAS', 'PAST', 'TEN', "O'CLOCK", 'WHEN', 'SHE', 'LEFT', 'THE', 'HALL'] +8188-269290-0050-2873: ref=['JUST', 'AS', 'SHE', 'WAS', 'DOING', 'SO', 'MISS', 'FRERE', 'CAME', 'UP'] +8188-269290-0050-2873: hyp=['JUST', 'AS', 'SHE', 'WAS', 'DOING', 'SO', 'MISS', 'FRERE', 'CAME', 'UP'] +8188-269290-0051-2874: ref=['ANNIE', 'COLCHESTER', 'IS', 'YOUR', 'ROOMFELLOW', 'IS', 'SHE', 'NOT', 'SHE', 'SAID'] +8188-269290-0051-2874: hyp=['ANY', 'COLCHESTER', 'AS', 'YOUR', 'ROOM', 'FELLOW', 'IS', 'SHE', 'NOT', 'SHE', 'SAID'] +8188-269290-0052-2875: ref=['I', 'SEE', 'BY', 'YOUR', 'FACE', 'MISS', 'GILROY', 'THAT', 'YOU', 'ARE', 'DISTRESSED', 'ABOUT', 'SOMETHING', 'ARE', 'YOU', 'KEEPING', 'ANYTHING', 'BACK'] +8188-269290-0052-2875: hyp=['I', 'SEE', 'BY', 'YOUR', 'FACE', 'MISS', 'GILROY', 'THAT', 'YOU', 'ARE', 'DISTRESSED', 'ABOUT', 'SOMETHING', 'ARE', 'YOU', 'KEEPING', 'ANYTHING', 'BACK'] +8188-269290-0053-2876: ref=['I', 'AM', 'AFRAID', 'I', 'AM', 'REPLIED', 'LESLIE', 'DISTRESS', 'NOW', 'IN', 'HER', 'TONE'] +8188-269290-0053-2876: hyp=["I'M", 'AFRAID', 'I', 'AM', 'REPLIED', 'LIZZIE', 'DISTRESSED', 'NOW', 'IN', 'HER', 'TONE'] +8188-269290-0054-2877: ref=['I', 'MUST', 'SEE', 'HER', 'MYSELF', 'EARLY', 'IN', 'THE', 'MORNING', 'AND', 'I', 'AM', 'QUITE', 'SURE', 'THAT', 'NOTHING', 'WILL', 'SATISFY', 'MISS', 'LAUDERDALE', 'EXCEPT', 'A', 'VERY', 'AMPLE', 'APOLOGY', 'AND', 'A', 'FULL', 'EXPLANATION', 'OF', 'THE', 'REASON', 'WHY', 'SHE', 'ABSENTED', 'HERSELF'] +8188-269290-0054-2877: hyp=['I', 'MUST', 'SEE', 'HER', 'MYSELF', 'EARLY', 'IN', 'THE', 'MORNING', 'AND', 'I', 'AM', 'QUITE', 'SURE', 'THAT', 'NOTHING', 'WILL', 'SATISFY', 'MISS', 'LAURAIL', 'EXCEPT', 'A', 'VERY', 'AMPLE', 'APOLOGY', 'AND', 'A', 'FULL', 'EXPLANATION', 'OF', 'THE', 'REASON', 'WHY', 'SHE', 'ABSENTED', 'HERSELF'] +8188-269290-0055-2878: ref=['EXCUSES', 'MAKE', 'NO', 'DIFFERENCE'] +8188-269290-0055-2878: hyp=['EXCUSES', 'MAKE', 'NO', 'DIFFERENCE'] +8188-269290-0056-2879: ref=['THE', 'GIRL', 'WHO', 'BREAKS', 'THE', 'RULES', 'HAS', 'TO', 'BE', 'PUNISHED'] +8188-269290-0056-2879: hyp=['THE', 'GIRL', 'WHO', 'BREAKS', 'THE', 'RULES', 'HAVE', 'TO', 'BE', 'PUNISHED'] +8188-269290-0057-2880: ref=['I', 'WILL', 'TELL', 'HER'] +8188-269290-0057-2880: hyp=['I', 'WILL', 'TELL', 'HER'] +8188-274364-0000-2811: ref=['THE', 'COMMONS', 'ALSO', 'VOTED', 'THAT', 'THE', 'NEW', 'CREATED', 'PEERS', 'OUGHT', 'TO', 'HAVE', 'NO', 'VOICE', 'IN', 'THIS', 'TRIAL', 'BECAUSE', 'THE', 'ACCUSATION', 'BEING', 'AGREED', 'TO', 'WHILE', 'THEY', 'WERE', 'COMMONERS', 'THEIR', 'CONSENT', 'TO', 'IT', 'WAS', 'IMPLIED', 'WITH', 'THAT', 'OF', 'ALL', 'THE', 'COMMONS', 'OF', 'ENGLAND'] +8188-274364-0000-2811: hyp=['THE', 'COMMONS', 'ALSO', 'VOTED', 'THAT', 'THE', 'NEW', 'CREATED', 'PEERS', 'OUGHT', 'TO', 'HAVE', 'NO', 'VOICE', 'IN', 'THIS', 'TRIAL', 'BECAUSE', 'THE', 'ACCUSATION', 'BEING', 'AGREED', 'TO', 'WHILE', 'THEY', 'WERE', 'COMMONERS', 'THEY', 'CONSENT', 'TO', 'IT', 'WAS', 'IMPLIED', 'WITH', 'THAT', 'OF', 'ALL', 'THE', 'COMMONS', 'OF', 'ENGLAND'] +8188-274364-0001-2812: ref=['IN', 'THE', 'GOVERNMENT', 'OF', 'IRELAND', 'HIS', 'ADMINISTRATION', 'HAD', 'BEEN', 'EQUALLY', 'PROMOTIVE', 'OF', 'HIS', "MASTER'S", 'INTEREST', 'AND', 'THAT', 'OF', 'THE', 'SUBJECTS', 'COMMITTED', 'TO', 'HIS', 'CARE'] +8188-274364-0001-2812: hyp=['IN', 'THE', 'GOVERNMENT', 'OF', 'IRELAND', 'HIS', 'ADMINISTRATION', 'HAD', 'BEEN', 'EQUALLY', 'PROMOTED', 'OF', 'HIS', "MASTER'S", 'INTEREST', 'AND', 'THAT', 'OF', 'THE', 'SUBJECTS', 'COMMITTED', 'TO', 'HIS', 'CARE'] +8188-274364-0002-2813: ref=['THE', 'CASE', 'OF', 'LORD', 'MOUNTNORRIS', 'OF', 'ALL', 'THOSE', 'WHICH', 'WERE', 'COLLECTED', 'WITH', 'SO', 'MUCH', 'INDUSTRY', 'IS', 'THE', 'MOST', 'FLAGRANT', 'AND', 'THE', 'LEAST', 'EXCUSABLE'] +8188-274364-0002-2813: hyp=['THE', 'CASE', 'OF', 'LORD', 'MONTORAS', 'OF', 'ALL', 'THOSE', 'WHICH', 'WERE', 'COLLECTED', 'WITH', 'SO', 'MUCH', 'INDUSTRY', 'IS', 'THE', 'MOST', 'FLAGRANT', 'AND', 'THE', 'LEAST', 'EXCUSABLE'] +8188-274364-0003-2814: ref=['THE', 'COURT', 'WHICH', 'CONSISTED', 'OF', 'THE', 'CHIEF', 'OFFICERS', 'OF', 'THE', 'ARMY', 'FOUND', 'THE', 'CRIME', 'TO', 'BE', 'CAPITAL', 'AND', 'CONDEMNED', 'THAT', 'NOBLEMAN', 'TO', 'LOSE', 'HIS', 'HEAD'] +8188-274364-0003-2814: hyp=['THE', 'COURT', 'WHICH', 'CONSISTED', 'OF', 'THE', 'CHEAP', 'OFFICIALS', 'OF', 'THE', 'ARMY', 'FOUND', 'THE', 'CRIME', 'TO', 'BE', 'CAPT', 'ON', 'AND', 'CONDEMNED', 'THAT', 'NOBLEMAN', 'TO', 'LOSE', 'HIS', 'HEAD'] +8188-274364-0004-2815: ref=['WHERE', 'THE', 'TOKEN', 'BY', 'WHICH', 'I', 'SHOULD', 'DISCOVER', 'IT'] +8188-274364-0004-2815: hyp=['WHERE', 'THE', 'TOKEN', 'BY', 'WHICH', 'I', 'SHALL', 'DISCOVER', 'IT'] +8188-274364-0005-2816: ref=['IT', 'IS', 'NOW', 'FULL', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'YEARS', 'SINCE', 'TREASONS', 'WERE', 'DEFINED', 'AND', 'SO', 'LONG', 'HAS', 'IT', 'BEEN', 'SINCE', 'ANY', 'MAN', 'WAS', 'TOUCHED', 'TO', 'THIS', 'EXTENT', 'UPON', 'THIS', 'CRIME', 'BEFORE', 'MYSELF'] +8188-274364-0005-2816: hyp=['IT', 'IS', 'NOW', 'A', 'FULL', 'TWO', 'HUNDRED', 'AND', 'FORTY', 'YEARS', 'SINCE', 'TREASONS', 'WERE', 'DEFINED', 'AND', 'SO', 'LONG', 'HAS', 'IT', 'BEEN', 'SINCE', 'ANY', 'MAN', 'WAS', 'TOUCHED', 'TO', 'THIS', 'EXTENT', 'UPON', 'THIS', 'CRIME', 'BEFORE', 'MYSELF'] +8188-274364-0006-2817: ref=['LET', 'US', 'NOT', 'TO', 'OUR', 'OWN', 'DESTRUCTION', 'AWAKE', 'THOSE', 'SLEEPING', 'LIONS', 'BY', 'RATTLING', 'UP', 'A', 'COMPANY', 'OF', 'OLD', 'RECORDS', 'WHICH', 'HAVE', 'LAIN', 'FOR', 'SO', 'MANY', 'AGES', 'BY', 'THE', 'WALL', 'FORGOTTEN', 'AND', 'NEGLECTED'] +8188-274364-0006-2817: hyp=['LET', 'US', 'NOT', 'TO', 'UNDERSTRUCTION', 'AWAKE', 'THOSE', 'SLEEPING', 'LIONS', 'BY', 'RATTLING', 'UP', 'A', 'COMPANY', 'OF', 'OLD', 'RECORDS', 'WHICH', 'HAVE', 'LAIN', 'FOR', 'SO', 'MANY', 'AGES', 'BY', 'THE', 'WAR', 'FORGOTTEN', 'AND', 'NEGLECTED'] +8188-274364-0007-2818: ref=['HOWEVER', 'THESE', 'GENTLEMEN', 'AT', 'THE', 'BAR', 'SAY', 'THEY', 'SPEAK', 'FOR', 'THE', 'COMMONWEALTH', 'AND', 'THEY', 'BELIEVE', 'SO', 'YET', 'UNDER', 'FAVOR', 'IT', 'IS', 'I', 'WHO', 'IN', 'THIS', 'PARTICULAR', 'SPEAK', 'FOR', 'THE', 'COMMONWEALTH'] +8188-274364-0007-2818: hyp=['HERBERT', 'THESE', 'GENTLEMAN', 'OF', 'THE', 'BAR', 'SO', 'THEY', 'SPEAK', 'FOR', 'THE', 'CONWEALTH', 'AND', 'THEY', 'BELIEVE', 'SO', 'YET', 'UNDER', 'FAVOUR', 'IT', 'IS', 'I', 'WHO', 'IN', 'THIS', 'PARTICULAR', 'SPEAK', 'FOR', 'THE', 'CORNWEALTH'] +8188-274364-0008-2819: ref=['MY', 'LORDS', 'I', 'HAVE', 'NOW', 'TROUBLED', 'YOUR', 'LORDSHIPS', 'A', 'GREAT', 'DEAL', 'LONGER', 'THAN', 'I', 'SHOULD', 'HAVE', 'DONE'] +8188-274364-0008-2819: hyp=['MY', 'LORDS', 'I', 'HAVE', 'NOW', 'TROUBLED', 'YOUR', 'LORDSHIPS', 'A', 'GREAT', 'DEAL', 'LONGER', 'THAN', 'I', 'SHOULD', 'HAVE', 'DONE'] +8188-274364-0009-2820: ref=['YOUNG', 'VANE', 'FALLING', 'UPON', 'THIS', 'PAPER', 'OF', 'NOTES', 'DEEMED', 'THE', 'MATTER', 'OF', 'THE', 'UTMOST', 'IMPORTANCE', 'AND', 'IMMEDIATELY', 'COMMUNICATED', 'IT', 'TO', 'PYM', 'WHO', 'NOW', 'PRODUCED', 'THE', 'PAPER', 'BEFORE', 'THE', 'HOUSE', 'OF', 'COMMONS'] +8188-274364-0009-2820: hyp=['YOUNG', 'VAIN', 'FALLING', 'UPON', 'THIS', 'PAPER', 'OF', 'NOTES', 'DEEMED', 'THE', 'MATTER', 'OF', 'THE', 'UTMOST', 'IMPORTANCE', 'AND', 'IMMEDIATELY', 'COMMUNICATED', 'IT', 'TO', 'POEM', 'WHO', 'NOW', 'PRODUCED', 'THE', 'PAPER', 'BEFORE', 'THE', 'HOUSE', 'OF', 'COMMONS'] +8188-274364-0010-2821: ref=['THE', 'KING', 'PROPOSES', 'THIS', 'DIFFICULTY', 'BUT', 'HOW', 'CAN', 'I', 'UNDERTAKE', 'OFFENSIVE', 'WAR', 'IF', 'I', 'HAVE', 'NO', 'MORE', 'MONEY'] +8188-274364-0010-2821: hyp=['THE', 'KING', 'PROPOSES', 'THIS', 'DIFFICULTY', 'BUT', 'HOW', 'CAN', 'I', 'UNDERTAKE', 'OFFENSIVE', 'FOR', 'IF', 'I', 'HAVE', 'NO', 'MORE', 'MONEY'] +8188-274364-0011-2822: ref=['YOUR', 'MAJESTY', 'HAVING', 'TRIED', 'THE', 'AFFECTIONS', 'OF', 'YOUR', 'PEOPLE', 'YOU', 'ARE', 'ABSOLVED', 'AND', 'LOOSE', 'FROM', 'ALL', 'RULES', 'OF', 'GOVERNMENT', 'AND', 'MAY', 'DO', 'WHAT', 'POWER', 'WILL', 'ADMIT'] +8188-274364-0011-2822: hyp=['YOUR', 'MAJESTY', 'HAVING', 'TRIED', 'THE', 'AFFECTIONS', 'OF', 'YOUR', 'PEOPLE', 'YOU', 'ARE', 'ABSORBED', 'AND', 'LOOSE', 'FROM', 'ALL', 'RULES', 'OF', 'GOVERNMENT', 'AND', 'MAY', 'DO', 'WHAT', 'POWER', 'WILL', 'ADMIT'] +8280-266249-0000-339: ref=['OLD', 'MISTER', 'DINSMORE', 'HAD', 'ACCEPTED', 'A', 'PRESSING', 'INVITATION', 'FROM', 'HIS', 'GRANDDAUGHTER', 'AND', 'HER', 'HUSBAND', 'TO', 'JOIN', 'THE', 'PARTY', 'AND', 'WITH', 'THE', 'ADDITION', 'OF', 'SERVANTS', 'IT', 'WAS', 'A', 'LARGE', 'ONE'] +8280-266249-0000-339: hyp=['OLD', 'MISTER', 'DINSMORE', 'HAD', 'ACCEPTED', 'A', 'PRESSING', 'INVITATION', 'FROM', 'HIS', 'GRANDDAUGHTER', 'AND', 'HER', 'HUSBAND', 'TO', 'JOIN', 'THE', 'PARTY', 'AND', 'WITH', 'THE', 'ADDITION', 'OF', 'SERVANTS', 'IT', 'WAS', 'A', 'LARGE', 'ONE'] +8280-266249-0001-340: ref=['AS', 'THEY', 'WERE', 'IN', 'NO', 'HASTE', 'AND', 'THE', 'CONFINEMENT', 'OF', 'A', 'RAILROAD', 'CAR', 'WOULD', 'BE', 'VERY', 'IRKSOME', 'TO', 'THE', 'YOUNGER', 'CHILDREN', 'IT', 'HAD', 'BEEN', 'DECIDED', 'TO', 'MAKE', 'THE', 'JOURNEY', 'BY', 'WATER'] +8280-266249-0001-340: hyp=['AS', 'THEY', 'WERE', 'IN', 'NO', 'HASTE', 'AND', 'THE', 'CONFINEMENT', 'OF', 'A', 'RAILROAD', 'CAR', 'WOULD', 'BE', 'VERY', 'IRKSOME', 'TO', 'THE', 'YOUNGER', 'CHILDREN', 'IT', 'HAD', 'BEEN', 'DECIDED', 'TO', 'MAKE', 'THE', 'JOURNEY', 'BY', 'WATER'] +8280-266249-0002-341: ref=['THERE', 'WERE', 'NO', 'SAD', 'LEAVE', 'TAKINGS', 'TO', 'MAR', 'THEIR', 'PLEASURE', 'THE', 'CHILDREN', 'WERE', 'IN', 'WILD', 'SPIRITS', 'AND', 'ALL', 'SEEMED', 'CHEERFUL', 'AND', 'HAPPY', 'AS', 'THEY', 'SAT', 'OR', 'STOOD', 'UPON', 'THE', 'DECK', 'WATCHING', 'THE', 'RECEDING', 'SHORE', 'AS', 'THE', 'VESSEL', 'STEAMED', 'OUT', 'OF', 'THE', 'HARBOR'] +8280-266249-0002-341: hyp=['THERE', 'WERE', 'NO', 'SAD', 'LEAVE', 'TAKINGS', 'TO', 'MAR', 'THEIR', 'PLEASURE', 'THE', 'CHILDREN', 'WERE', 'IN', 'WILD', 'SPIRITS', 'AND', 'ALL', 'SEEMED', 'CHEERFUL', 'AND', 'HAPPY', 'AS', 'THEY', 'SAT', 'OR', 'STOOD', 'UPON', 'THE', 'DECK', 'WATCHING', 'THE', 'RECEDING', 'SHORE', 'AS', 'THE', 'VESSEL', 'STEAMED', 'OUT', 'OF', 'THE', 'HARBOR'] +8280-266249-0003-342: ref=['AT', 'LENGTH', 'THE', 'LAND', 'HAD', 'QUITE', 'DISAPPEARED', 'NOTHING', 'COULD', 'BE', 'SEEN', 'BUT', 'THE', 'SKY', 'OVERHEAD', 'AND', 'A', 'VAST', 'EXPANSE', 'OF', 'WATER', 'ALL', 'AROUND', 'AND', 'THE', 'PASSENGERS', 'FOUND', 'LEISURE', 'TO', 'TURN', 'THEIR', 'ATTENTION', 'UPON', 'EACH', 'OTHER'] +8280-266249-0003-342: hyp=['AT', 'LENGTH', 'THE', 'LAND', 'HAD', 'QUITE', 'DISAPPEARED', 'NOTHING', 'COULD', 'BE', 'SEEN', 'BUT', 'THE', 'SKY', 'OVERHEAD', 'AND', 'A', 'VAST', 'EXPANSE', 'OF', 'WATER', 'ALL', 'ROUND', 'AND', 'THE', 'PASSENGERS', 'FOUND', 'LEISURE', 'TO', 'TURN', 'THEIR', 'ATTENTION', 'UPON', 'EACH', 'OTHER'] +8280-266249-0004-343: ref=['THERE', 'ARE', 'SOME', 'NICE', 'LOOKING', 'PEOPLE', 'ON', 'BOARD', 'REMARKED', 'MISTER', 'TRAVILLA', 'IN', 'AN', 'UNDERTONE', 'TO', 'HIS', 'WIFE'] +8280-266249-0004-343: hyp=['THERE', 'ARE', 'SOME', 'NICE', 'LOOKING', 'PEOPLE', 'ON', 'BOARD', 'REMARKED', 'MISTER', 'TRAVILLA', 'IN', 'AN', 'UNDERTONE', 'TO', 'HIS', 'WIFE'] +8280-266249-0005-344: ref=['BESIDE', 'OURSELVES', 'ADDED', 'COUSIN', 'RONALD', 'LAUGHING'] +8280-266249-0005-344: hyp=['BESIDES', 'OURSELVES', 'ADDED', 'COUSIN', 'RANALD', 'LAUGHING'] +8280-266249-0006-345: ref=['YES', 'SHE', 'ANSWERED', 'THAT', 'LITTLE', 'GROUP', 'YONDER', 'A', 'YOUNG', 'MINISTER', 'AND', 'HIS', 'WIFE', 'AND', 'CHILD', 'I', 'SUPPOSE'] +8280-266249-0006-345: hyp=['YES', 'SHE', 'ANSWERED', 'THAT', 'LITTLE', 'GROUP', 'YONDER', 'A', 'YOUNG', 'MINISTER', 'AND', 'HIS', 'WIFE', 'AND', 'CHILD', 'I', 'SUPPOSE'] +8280-266249-0007-346: ref=['AND', 'WHAT', 'A', 'DEAR', 'LITTLE', 'FELLOW', 'HE', 'IS', 'JUST', 'ABOUT', 'THE', 'AGE', 'OF', 'OUR', 'HAROLD', 'I', 'SHOULD', 'JUDGE'] +8280-266249-0007-346: hyp=['AND', 'WHAT', 'A', 'DEAR', 'LITTLE', 'FELLOW', 'HE', 'IS', 'JUST', 'ABOUT', 'THE', 'AGE', 'OF', 'OUR', 'HERALD', 'I', 'SHOULD', 'JUDGE'] +8280-266249-0008-347: ref=['DO', 'YOU', 'SON', 'WAS', 'THE', 'SMILING', 'REJOINDER'] +8280-266249-0008-347: hyp=['DO', 'YOU', 'SON', 'WAS', 'THE', 'SMILING', 'REJOINDER'] +8280-266249-0009-348: ref=['HE', 'CERTAINLY', 'LOOKS', 'LIKE', 'A', 'VERY', 'NICE', 'LITTLE', 'BOY'] +8280-266249-0009-348: hyp=['HE', 'CERTAINLY', 'LOOKS', 'LIKE', 'A', 'VERY', 'NICE', 'LITTLE', 'BOY'] +8280-266249-0010-349: ref=['SUPPOSE', 'YOU', 'AND', 'HE', 'SHAKE', 'HANDS', 'FRANK'] +8280-266249-0010-349: hyp=['SUPPOSE', 'YOU', 'AND', 'HE', 'SHAKE', 'HANDS', 'FRANK'] +8280-266249-0011-350: ref=['I', 'DO', 'INDEED', 'THOUGH', 'PROBABLY', 'COMPARATIVELY', 'FEW', 'ARE', 'AWARE', 'THAT', 'TOBACCO', 'IS', 'THE', 'CAUSE', 'OF', 'THEIR', 'AILMENTS'] +8280-266249-0011-350: hyp=['I', 'DO', 'INDEED', 'THE', 'PROBABLY', 'COMPARATIVELY', 'FEW', 'ARE', 'AWARE', 'THAT', 'TOBACCO', 'IS', 'THE', 'CAUSE', 'OF', 'THEIR', 'AILMENTS'] +8280-266249-0012-351: ref=['DOUBTLESS', 'THAT', 'IS', 'THE', 'CASE', 'REMARKED', 'MISTER', 'DINSMORE'] +8280-266249-0012-351: hyp=['DOUBTLESS', 'THAT', 'IS', 'THE', 'CASE', 'REMARKED', 'MISTER', 'DINSMORE'] +8280-266249-0013-352: ref=['WITH', 'ALL', 'MY', 'HEART', 'IF', 'YOU', 'WILL', 'STEP', 'INTO', 'THE', "GENTLEMEN'S", 'CABIN', 'WHERE', "THERE'S", 'A', 'LIGHT'] +8280-266249-0013-352: hyp=['WITH', 'ALL', 'MY', 'HEART', 'IF', 'YOU', 'WILL', 'STEP', 'INTO', 'THE', "GENTLEMAN'S", 'CABIN', 'WHERE', "THERE'S", 'A', 'LIGHT'] +8280-266249-0014-353: ref=['HE', 'LED', 'THE', 'WAY', 'THE', 'OTHERS', 'ALL', 'FOLLOWING', 'AND', 'TAKING', 'OUT', 'A', 'SLIP', 'OF', 'PAPER', 'READ', 'FROM', 'IT', 'IN', 'A', 'DISTINCT', 'TONE', 'LOUD', 'ENOUGH', 'TO', 'BE', 'HEARD', 'BY', 'THOSE', 'ABOUT', 'HIM', 'WITHOUT', 'DISTURBING', 'THE', 'OTHER', 'PASSENGERS'] +8280-266249-0014-353: hyp=['HE', 'LED', 'THE', 'WAY', 'THE', 'OTHERS', 'ALL', 'FOLLOWING', 'AND', 'TAKING', 'OUT', 'A', 'SLIP', 'OF', 'PAPER', 'READ', 'FROM', 'IT', 'IN', 'A', 'DISTINCT', 'TONE', 'LOUD', 'ENOUGH', 'TO', 'BE', 'HEARD', 'BY', 'THOSE', 'ALL', 'ABOUT', 'HIM', 'WITHOUT', 'DISTURBING', 'THE', 'OTHER', 'PASSENGERS'] +8280-266249-0015-354: ref=['ONE', 'DROP', 'OF', 'NICOTINE', 'EXTRACT', 'OF', 'TOBACCO', 'PLACED', 'ON', 'THE', 'TONGUE', 'OF', 'A', 'DOG', 'WILL', 'KILL', 'HIM', 'IN', 'A', 'MINUTE', 'THE', 'HUNDREDTH', 'PART', 'OF', 'A', 'GRAIN', 'PICKED', 'UNDER', 'THE', 'SKIN', 'OF', 'A', "MAN'S", 'ARM', 'WILL', 'PRODUCE', 'NAUSEA', 'AND', 'FAINTING'] +8280-266249-0015-354: hyp=['ONE', 'DROP', 'OF', 'NICOTINE', 'EXTRACTED', 'TOBACCO', 'PLACED', 'ON', 'THE', 'TONGUE', 'OF', 'THE', 'DOG', 'WILL', 'KILL', 'HIM', 'IN', 'A', 'MINUTE', 'THE', 'HUNDREDTH', 'PART', 'OF', 'THE', 'GRAIN', 'PRICKED', 'UNDER', 'THE', 'SKIN', 'OF', 'A', "MAN'S", 'ARM', 'WILL', 'PRODUCE', 'NAUSEA', 'AND', 'FAINTING'] +8280-266249-0016-355: ref=['THE', 'HALF', 'DOZEN', 'CIGARS', 'WHICH', 'MOST', 'SMOKERS', 'USE', 'A', 'DAY', 'CONTAIN', 'SIX', 'OR', 'SEVEN', 'GRAINS', 'ENOUGH', 'IF', 'CONCENTRATED', 'AND', 'ABSORBED', 'TO', 'KILL', 'THREE', 'MEN', 'AND', 'A', 'POUND', 'OF', 'TOBACCO', 'ACCORDING', 'TO', 'ITS', 'QUALITY', 'CONTAINS', 'FROM', 'ONE', 'QUARTER', 'TO', 'ONE', 'AND', 'A', 'QUARTER', 'OUNCES'] +8280-266249-0016-355: hyp=['THE', 'HALF', 'DOZEN', 'CIGARS', 'WHICH', 'MOST', 'SMOKERS', 'YEARS', 'A', 'DAY', 'CONTAIN', 'SIX', 'OR', 'SEVEN', 'GRAINS', 'ENOUGH', 'IF', 'CONCENTRATED', 'AND', 'ABSORBED', 'TO', 'KILL', 'THREE', 'MEN', 'AND', 'A', 'POUND', 'OF', 'TOBACCO', 'ACCORDING', 'TO', 'ITS', 'QUALITY', 'CONTAINS', 'FROM', 'ONE', 'QUARTER', 'TO', 'ONE', 'AND', 'A', 'QUARTER', 'OUNCES'] +8280-266249-0017-356: ref=['IS', 'IT', 'STRANGE', 'THEN', 'THAT', 'SMOKERS', 'AND', 'CHEWERS', 'HAVE', 'A', 'THOUSAND', 'AILMENTS'] +8280-266249-0017-356: hyp=['IS', 'IT', 'STRANGE', 'THEN', 'THAT', 'SMOKERS', 'AND', 'CHEWERS', 'HAVE', 'A', 'THOUSAND', 'AILMENTS'] +8280-266249-0018-357: ref=['THAT', 'THE', 'FRENCH', 'POLYTECHNIC', 'INSTITUTE', 'HAD', 'TO', 'PROHIBIT', 'ITS', 'USE', 'ON', 'ACCOUNT', 'OF', 'ITS', 'EFFECTS', 'ON', 'THE', 'MIND'] +8280-266249-0018-357: hyp=['THAT', 'THE', 'FRENCH', 'POLY', 'TECHNIC', 'AT', 'INSTITUTE', 'HAD', 'TO', 'PROHIBIT', 'ITS', 'THE', 'USE', 'ON', 'ACCOUNT', 'OF', 'ITS', 'EFFECTS', 'UPON', 'THE', 'MIND'] +8280-266249-0019-358: ref=['NOTICE', 'THE', 'MULTITUDE', 'OF', 'SUDDEN', 'DEATHS', 'AND', 'SEE', 'HOW', 'MANY', 'ARE', 'SMOKERS', 'AND', 'CHEWERS'] +8280-266249-0019-358: hyp=['NOTICED', 'THE', 'MULTITUDE', 'OF', 'SUDDEN', 'DEATHS', 'AND', 'SEE', 'HOW', 'MANY', 'ARE', 'SMOKERS', 'AND', 'CHEWERS'] +8280-266249-0020-359: ref=['IN', 'A', 'SMALL', 'COUNTRY', 'TOWN', 'SEVEN', 'OF', 'THESE', 'MYSTERIOUS', 'PROVIDENCES', 'OCCURRED', 'WITHIN', 'THE', 'CIRCUIT', 'OF', 'A', 'MILE', 'ALL', 'DIRECTLY', 'TRACEABLE', 'TO', 'TOBACCO', 'AND', 'ANY', 'PHYSICIAN', 'ON', 'A', 'FEW', 'MOMENTS', 'REFLECTION', 'CAN', 'MATCH', 'THIS', 'FACT', 'BY', 'HIS', 'OWN', 'OBSERVATION'] +8280-266249-0020-359: hyp=['AND', 'A', 'SMALL', 'COUNTRY', 'TOWN', 'SEVEN', 'OF', 'THESE', 'MYSTERIOUS', 'PROVIDENCES', 'OCCURRED', 'WITHIN', 'THE', 'CIRCUIT', 'OF', 'A', 'MILE', 'ALL', 'DIRECTLY', 'TRACEABLE', 'TO', 'TOBACCO', 'AND', 'ANY', 'PHYSICIAN', 'ON', 'A', 'FEW', 'MOMENTS', 'REFLECTION', 'CAN', 'MATCH', 'THIS', 'FACT', 'BY', 'HIS', 'OWN', 'OBSERVATION'] +8280-266249-0021-360: ref=['AND', 'THEN', 'SUCH', 'POWERFUL', 'ACIDS', 'PRODUCE', 'INTENSE', 'IRRITATION', 'AND', 'THIRST', 'THIRST', 'WHICH', 'WATER', 'DOES', 'NOT', 'QUENCH'] +8280-266249-0021-360: hyp=['AND', 'THEN', 'SUCH', 'POWERFUL', 'ACIDS', 'PRODUCE', 'INTENSE', 'IRRITATION', 'AND', 'THIRST', 'THIRST', 'WHICH', 'WATER', 'DOES', 'NOT', 'QUENCH'] +8280-266249-0022-361: ref=['HENCE', 'A', 'RESORT', 'TO', 'CIDER', 'AND', 'BEER'] +8280-266249-0022-361: hyp=['HENCE', 'A', 'RESORT', 'TO', 'CIDER', 'AND', 'BEER'] +8280-266249-0023-362: ref=['NO', 'SIR', 'WHAT', 'KNOW', 'YE', 'NOT', 'THAT', 'YOUR', 'BODY', 'IS', 'THE', 'TEMPLE', 'OF', 'THE', 'HOLY', 'GHOST', 'WHICH', 'IS', 'IN', 'YOU', 'WHICH', 'YE', 'HAVE', 'OF', 'GOD', 'AND', 'YE', 'ARE', 'NOT', 'YOUR', 'OWN'] +8280-266249-0023-362: hyp=['NO', 'SIR', 'WHAT', 'NO', 'YE', 'NOT', 'THAT', 'YOUR', 'BODY', 'IS', 'THE', 'TEMPLE', 'OF', 'THE', 'HOLY', 'GHOST', 'WHICH', 'IS', 'IN', 'YOU', 'WHICH', 'YE', 'HAVE', 'OF', 'GOD', 'AND', 'YE', 'ARE', 'NOT', 'YOUR', 'OWN'] +8280-266249-0024-363: ref=['FOR', 'YE', 'ARE', 'BOUGHT', 'WITH', 'A', 'PRICE', 'THEREFORE', 'GLORIFY', 'GOD', 'IN', 'YOUR', 'BODY', 'AND', 'IN', 'YOUR', 'SPIRIT', 'WHICH', 'ARE', "GOD'S"] +8280-266249-0024-363: hyp=['FOR', 'YOU', 'ARE', 'BOUGHT', 'WITH', 'A', 'PRICE', 'THEREFORE', 'GLORIFY', 'GOD', 'IN', 'YOUR', 'BODY', 'AND', 'IN', 'YOUR', 'SPIRIT', 'WHICH', 'ARE', "GOD'S"] +8280-266249-0025-364: ref=['WE', 'CERTAINLY', 'HAVE', 'NO', 'RIGHT', 'TO', 'INJURE', 'OUR', 'BODIES', 'EITHER', 'BY', 'NEGLECT', 'OR', 'SELF', 'INDULGENCE'] +8280-266249-0025-364: hyp=['WE', 'CERTAINLY', 'HAVE', 'NO', 'RIGHT', 'TO', 'INJURE', 'OUR', 'BODIES', 'EITHER', 'BY', 'NEGLECT', 'OR', 'SELF', 'INDULGENCE'] +8280-266249-0026-365: ref=['AND', 'AGAIN', 'I', 'BESEECH', 'YOU', 'THEREFORE', 'BRETHREN', 'BY', 'THE', 'MERCIES', 'OF', 'GOD', 'THAT', 'YE', 'PRESENT', 'YOUR', 'BODIES', 'A', 'LIVING', 'SACRIFICE', 'HOLY', 'ACCEPTABLE', 'UNTO', 'GOD', 'WHICH', 'IS', 'YOUR', 'REASONABLE', 'SERVICE'] +8280-266249-0026-365: hyp=['AND', 'AGAIN', 'I', 'BESEECH', 'YOU', 'THEREFORE', 'BRETHREN', 'BY', 'THE', 'MERCIES', 'OF', 'GOD', 'THAT', 'YE', 'PRESENT', 'YOUR', 'BODIES', 'A', 'LIVING', 'SACRIFICE', 'WHOLLY', 'ACCEPTABLE', 'UNTO', 'GOD', 'WHICH', 'IS', 'YOUR', 'REASONABLE', 'SERVICE'] +8280-266249-0027-366: ref=['IT', 'MUST', 'REQUIRE', 'A', 'GOOD', 'DEAL', 'OF', 'RESOLUTION', 'FOR', 'ONE', 'WHO', 'HAS', 'BECOME', 'FOND', 'OF', 'THE', 'INDULGENCE', 'TO', 'GIVE', 'IT', 'UP', 'REMARKED', 'MISTER', 'DALY'] +8280-266249-0027-366: hyp=['IT', 'MUST', 'REQUIRRE', 'A', 'GOOD', 'DEAL', 'OF', 'RESOLUTION', 'FOR', 'ONE', 'WHO', 'HAS', 'BECOME', 'FOND', 'OF', 'THE', 'INDULGENCE', 'TO', 'GIVE', 'IT', 'UP', 'REMARKED', 'MISTER', 'DALEY'] +8280-266249-0028-367: ref=['NO', 'DOUBT', 'NO', 'DOUBT', 'RETURNED', 'MISTER', 'LILBURN', 'BUT', 'IF', 'THY', 'RIGHT', 'EYE', 'OFFEND', 'THEE', 'PLUCK', 'IT', 'OUT', 'AND', 'CAST', 'IT', 'FROM', 'THEE', 'FOR', 'IT', 'IS', 'PROFITABLE', 'FOR', 'THEE', 'THAT', 'ONE', 'OF', 'THY', 'MEMBERS', 'SHOULD', 'PERISH', 'AND', 'NOT', 'THAT', 'THY', 'WHOLE', 'BODY', 'SHOULD', 'BE', 'CAST', 'INTO', 'HELL'] +8280-266249-0028-367: hyp=['NO', 'DOUBT', 'NO', 'DOUBT', 'RETURNED', 'MISTER', 'LOWBOURNE', 'BUT', 'IF', 'I', 'WRITE', 'I', 'OFFEND', 'THEE', 'PLUCK', 'IT', 'UP', 'AND', 'CAST', 'IT', 'FROM', 'ME', 'FOR', 'IT', 'IS', 'PROFITABLE', 'FOR', 'THEE', 'THAT', 'ONE', 'OF', 'THY', 'MEMBERS', 'SHOULD', 'PERISH', 'AND', 'NOT', 'THAT', 'THY', 'WHOLE', 'BODY', 'SHOULD', 'BE', 'CAST', 'INTO', 'HELL'] +8280-266249-0029-368: ref=['THERE', 'WAS', 'A', 'PAUSE', 'BROKEN', 'BY', 'YOUNG', 'HORACE', 'WHO', 'HAD', 'BEEN', 'WATCHING', 'A', 'GROUP', 'OF', 'MEN', 'GATHERED', 'ABOUT', 'A', 'TABLE', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'ROOM'] +8280-266249-0029-368: hyp=['THERE', 'WAS', 'A', 'PAUSE', 'BROKEN', 'BY', 'YOUNG', 'HORACE', 'WHO', 'HAD', 'BEEN', 'WATCHING', 'A', 'GROUP', 'OF', 'MEN', 'GATHERED', 'ABOUT', 'A', 'TABLE', 'AT', 'THE', 'FURTHER', 'END', 'OF', 'THE', 'ROOM'] +8280-266249-0030-369: ref=['THEY', 'ARE', 'GAMBLING', 'YONDER', 'AND', "I'M", 'AFRAID', 'THAT', 'YOUNG', 'FELLOW', 'IS', 'BEING', 'BADLY', 'FLEECED', 'BY', 'THAT', 'MIDDLE', 'AGED', 'MAN', 'OPPOSITE'] +8280-266249-0030-369: hyp=['THEY', 'ARE', 'GAMBLING', 'YONDER', 'AND', "I'M", 'AFRAID', 'THAT', 'YOUNG', 'FELLOW', 'IS', 'BEING', 'BADLY', 'FLEECED', 'BY', 'THE', 'MIDDLE', 'AGED', 'MAN', 'OPPOSITE'] +8280-266249-0031-370: ref=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'WERE', 'AT', 'ONCE', 'TURNED', 'IN', 'THAT', 'DIRECTION'] +8280-266249-0031-370: hyp=['THE', 'EYES', 'OF', 'THE', 'WHOLE', 'PARTY', 'WERE', 'AT', 'ONCE', 'TURNED', 'IN', 'THAT', 'DIRECTION'] +8280-266249-0032-371: ref=['NO', 'SIR', 'HE', 'IS', 'NOT', 'HERE'] +8280-266249-0032-371: hyp=['NO', 'SIR', 'HE', 'IS', 'NOT', 'HERE'] +8280-266249-0033-372: ref=['AND', 'THE', 'DOOR', 'WAS', 'SLAMMED', 'VIOLENTLY', 'TO'] +8280-266249-0033-372: hyp=['AS', 'THE', 'DOOR', 'WAS', 'SLAMMED', 'VIOLENTLY', 'TOO'] +8280-266249-0034-373: ref=['NOW', 'THE', 'VOICE', 'CAME', 'FROM', 'THE', 'SKYLIGHT', 'OVERHEAD', 'APPARENTLY', 'AND', 'WITH', 'A', 'FIERCE', 'IMPRECATION', 'THE', 'IRATE', 'GAMESTER', 'RUSHED', 'UPON', 'DECK', 'AND', 'RAN', 'HITHER', 'AND', 'THITHER', 'IN', 'SEARCH', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0034-373: hyp=['NOW', 'THE', 'VOICE', 'CAME', 'FROM', 'THE', 'SKYLIGHT', 'OVERHEAD', 'APPARENTLY', 'AND', 'WITH', 'A', 'FIERCE', 'IMPRECATION', 'THE', 'IRATE', 'GAMESTER', 'RUSHED', 'UPON', 'DECK', 'AND', 'RAN', 'HITHER', 'AND', 'THITHER', 'IN', 'SEARCH', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0035-374: ref=['HIS', 'VICTIM', 'WHO', 'HAD', 'BEEN', 'LOOKING', 'ON', 'DURING', 'THE', 'LITTLE', 'SCENE', 'AND', 'LISTENING', 'TO', 'THE', 'MYSTERIOUS', 'VOICE', 'IN', 'SILENT', 'WIDE', 'EYED', 'WONDER', 'AND', 'FEAR', 'NOW', 'ROSE', 'HASTILY', 'HIS', 'FACE', 'DEATHLY', 'PALE', 'WITH', 'TREMBLING', 'HANDS', 'GATHERED', 'UP', 'THE', 'MONEY', 'HE', 'HAD', 'STAKED', 'AND', 'HURRYING', 'INTO', 'HIS', 'STATE', 'ROOM', 'LOCKED', 'HIMSELF', 'IN'] +8280-266249-0035-374: hyp=['HIS', 'VICTIM', 'WHO', 'HAD', 'BEEN', 'LOOKING', 'ON', 'DURING', 'THE', 'LITTLE', 'SCENE', 'AND', 'LISTENING', 'TO', 'THE', 'MYSTERIOUS', 'VOICE', 'AND', 'SILENT', 'WIDE', 'EYED', 'WONDER', 'AND', 'FEAR', 'NOW', 'AROSE', 'HASTILY', 'HIS', 'FACE', 'DEFTLY', 'PALE', 'WITH', 'TREMBLING', 'HANDS', 'GATHERED', 'UP', 'THE', 'MONEY', 'HE', 'HAD', 'STAKED', 'AND', 'HURRYING', 'TO', 'HIS', 'STATEROOM', 'LOCKED', 'HIMSELF', 'IN'] +8280-266249-0036-375: ref=['WHAT', 'DOES', 'IT', 'MEAN', 'CRIED', 'ONE'] +8280-266249-0036-375: hyp=['WHAT', 'DOES', 'IT', 'MEAN', 'CRIED', 'ONE'] +8280-266249-0037-376: ref=['A', 'VENTRILOQUIST', 'ABOARD', 'OF', 'COURSE', 'RETURNED', 'ANOTHER', "LET'S", 'FOLLOW', 'AND', 'SEE', 'THE', 'FUN'] +8280-266249-0037-376: hyp=['A', 'VENTILLA', 'QUESTED', 'BORN', 'OF', 'COURSE', 'RETURNED', 'ANOTHER', "LET'S", 'FOLLOW', 'AND', 'SEE', 'THE', 'FUN'] +8280-266249-0038-377: ref=['I', 'WONDER', 'WHICH', 'OF', 'US', 'IT', 'IS', 'REMARKED', 'THE', 'FIRST', 'LOOKING', 'HARD', 'AT', 'OUR', 'PARTY', 'I', "DON'T", 'KNOW', 'BUT', 'COME', 'ON'] +8280-266249-0038-377: hyp=['I', 'WONDER', 'WHICH', 'OF', 'US', 'IT', 'IS', 'REMARKED', 'THE', 'FIRST', 'LOOKING', 'HARD', 'AT', 'OUR', 'PARTY', 'I', "DON'T", 'KNOW', 'BUT', 'COME', 'ON'] +8280-266249-0039-378: ref=['THAT', 'FELLOW', 'NICK', 'WARD', 'IS', 'A', 'NOTED', 'BLACKLEG', 'AND', 'RUFFIAN', 'HAD', 'HIS', 'NOSE', 'BROKEN', 'IN', 'A', 'FIGHT', 'AND', 'IS', 'SENSITIVE', 'ON', 'THE', 'SUBJECT', 'WAS', 'CHEATING', 'OF', 'COURSE'] +8280-266249-0039-378: hyp=['THAT', 'FELLOW', 'NICK', 'WARD', 'IS', 'A', 'NOTED', 'BLACK', 'LAG', 'IN', 'RUFFIAN', 'HAD', 'HIS', 'NOSE', 'BROKEN', 'IN', 'A', 'FIGHT', 'AND', 'IS', 'SENSITIVE', 'ON', 'THE', 'SUBJECT', 'WAS', 'CHEATING', 'OF', 'COURSE'] +8280-266249-0040-379: ref=['WHO', 'ASKED', 'THE', 'MATE', "I'VE", 'SEEN', 'NONE', 'UP', 'HERE', 'THOUGH', 'THERE', 'ARE', 'SOME', 'IN', 'THE', 'STEERAGE'] +8280-266249-0040-379: hyp=['WHO', 'ASKED', 'THE', 'MATE', "I'VE", 'SEEN', 'NON', 'UP', 'HERE', 'THOUGH', 'THERE', 'ARE', 'SOME', 'IN', 'THE', 'STEERAGE'] +8280-266249-0041-380: ref=['THEY', 'HEARD', 'HIM', 'IN', 'SILENCE', 'WITH', 'A', 'COOL', 'PHLEGMATIC', 'INDIFFERENCE', 'MOST', 'EXASPERATING', 'TO', 'ONE', 'IN', 'HIS', 'PRESENT', 'MOOD'] +8280-266249-0041-380: hyp=['THEY', 'HEARD', 'HIM', 'IN', 'SILENCE', 'WITH', 'A', 'COOL', 'PHLEGMATIC', 'INDIFFERENCE', 'MOST', 'EXASPERATING', 'TO', 'ONE', 'IN', 'HIS', 'PRESENT', 'MOOD'] +8280-266249-0042-381: ref=['A', 'MAN', 'OF', 'GIANT', 'SIZE', 'AND', 'HERCULEAN', 'STRENGTH', 'HAD', 'LAID', 'ASIDE', 'HIS', 'PIPE', 'AND', 'SLOWLY', 'RISING', 'TO', 'HIS', 'FEET', 'SEIZED', 'THE', 'SCOUNDREL', 'IN', 'HIS', 'POWERFUL', 'GRASP'] +8280-266249-0042-381: hyp=['A', 'MAN', 'OF', 'GIANT', 'SIZE', 'AND', 'HERCULEAN', 'STRENGTH', 'HAD', 'LAID', 'ASIDE', 'HIS', 'PIPE', 'AND', 'SLOWLY', 'RISING', 'TO', 'HIS', 'FEET', 'SEIZED', 'THE', 'SCOUNDREL', 'IN', 'HIS', 'POWERFUL', 'GRASP'] +8280-266249-0043-382: ref=['LET', 'ME', 'GO', 'YELLED', 'WARD', 'MAKING', 'A', 'DESPERATE', 'EFFORT', 'TO', 'FREE', 'HIS', 'ARMS'] +8280-266249-0043-382: hyp=['LET', 'ME', 'GO', 'YELLED', 'WARD', 'MAKING', 'A', 'DESPERATE', 'EFFORT', 'TO', 'FREE', 'HIS', 'ARMS'] +8280-266249-0044-383: ref=['I', 'DINKS', 'NO', 'I', 'DINKS', 'I', 'DEACH', 'YOU', 'VON', 'LESSON', 'RETURNED', 'HIS', 'CAPTOR', 'NOT', 'RELAXING', 'HIS', 'GRASP', 'IN', 'THE', 'LEAST'] +8280-266249-0044-383: hyp=['I', 'DINKS', 'NO', 'I', 'THINK', 'I', 'DID', 'YOU', 'VON', "MESS'", 'RETURNED', 'HIS', 'CAPTOR', 'NOT', 'RELAXING', 'HIS', 'GRASP', 'IN', 'THE', 'LEAST'] +8280-266249-0045-384: ref=['THE', 'GERMAN', 'RELEASED', 'HIS', 'PRISONER', 'AND', 'THE', 'LATTER', 'SLUNK', 'AWAY', 'WITH', 'MUTTERED', 'THREATS', 'AND', 'IMPRECATIONS', 'UPON', 'THE', 'HEAD', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0045-384: hyp=['THE', 'GERMAN', 'RELEASED', 'HIS', 'PRISONER', 'AND', 'THE', 'LATTER', 'SUNK', 'AWAY', 'WITH', 'MUTTERED', 'THREATS', 'AND', 'IMPRECATIONS', 'UPON', 'THE', 'HEAD', 'OF', 'HIS', 'TORMENTOR'] +8280-266249-0046-385: ref=['MISTER', 'LILBURN', 'AND', 'MISTER', 'DALY', 'EACH', 'AT', 'A', 'DIFFERENT', 'TIME', 'SOUGHT', 'OUT', 'THE', 'YOUNG', 'MAN', "WARD'S", 'INTENDED', 'VICTIM', 'AND', 'TRIED', 'TO', 'INFLUENCE', 'HIM', 'FOR', 'GOOD'] +8280-266249-0046-385: hyp=['MISTER', 'LILBURN', 'AND', 'MISTER', 'DALEY', 'EACH', 'HAD', 'A', 'DIFFERENT', 'TIME', 'SOUGHT', 'OUT', 'THE', 'YOUNG', 'MAN', 'WORDS', 'INTENDED', 'VICTIM', 'AND', 'TRIED', 'TO', 'INFLUENCE', 'HIM', 'FOR', 'GOOD'] +8280-266249-0047-386: ref=['YET', 'THERE', 'WAS', 'GAMBLING', 'AGAIN', 'THE', 'SECOND', 'NIGHT', 'BETWEEN', 'WARD', 'AND', 'SEVERAL', 'OTHERS', 'OF', 'HIS', 'PROFESSION'] +8280-266249-0047-386: hyp=['YET', 'THERE', 'WAS', 'GAMBLING', 'AGAIN', 'THE', 'SECOND', 'NIGHT', 'BETWEEN', 'WARD', 'AND', 'SEVERAL', 'OTHERS', 'OF', 'HIS', 'PROFESSIONS'] +8280-266249-0048-387: ref=['THEY', 'KEPT', 'IT', 'UP', 'TILL', 'AFTER', 'MIDNIGHT'] +8280-266249-0048-387: hyp=['THEY', 'KEPT', 'IT', 'UP', 'TILL', 'AFTER', 'MIDNIGHT'] +8280-266249-0049-388: ref=['THEN', 'MISTER', 'LILBURN', 'WAKING', 'FROM', 'HIS', 'FIRST', 'SLEEP', 'IN', 'A', 'STATEROOM', 'NEAR', 'BY', 'THOUGHT', 'HE', 'WOULD', 'BREAK', 'IT', 'UP', 'ONCE', 'MORE'] +8280-266249-0049-388: hyp=['THEN', 'MISTER', 'LOWBORN', 'WAKING', 'FROM', 'HIS', 'FIRST', 'SLEEP', 'IN', 'A', 'STATEROOM', 'NEAR', 'BY', 'THOUGHT', 'HE', 'WOULD', 'BREAK', 'IT', 'UP', 'ONCE', 'MORE'] +8280-266249-0050-389: ref=['AN', 'INTENSE', 'VOICELESS', 'EXCITEMENT', 'POSSESSED', 'THE', 'PLAYERS', 'FOR', 'THE', 'GAME', 'WAS', 'A', 'CLOSE', 'ONE', 'AND', 'THE', 'STAKES', 'WERE', 'VERY', 'HEAVY'] +8280-266249-0050-389: hyp=['AN', 'INTENSE', 'VOICELESS', 'EXCITEMENT', 'POSSESSED', 'THE', 'PLAYERS', 'FOR', 'THE', 'GAME', 'WAS', 'A', 'CLOSE', 'ONE', 'AND', 'MISTAKES', 'WERE', 'VERY', 'HEAVY'] +8280-266249-0051-390: ref=['THEY', 'BENT', 'EAGERLY', 'OVER', 'THE', 'BOARD', 'EACH', 'WATCHING', 'WITH', 'FEVERISH', 'ANXIETY', 'HIS', "COMPANION'S", 'MOVEMENTS', 'EACH', 'CASTING', 'NOW', 'AND', 'AGAIN', 'A', 'GLOATING', 'EYE', 'UPON', 'THE', 'HEAP', 'OF', 'GOLD', 'AND', 'GREENBACKS', 'THAT', 'LAY', 'BETWEEN', 'THEM', 'AND', 'AT', 'TIMES', 'HALF', 'STRETCHING', 'OUT', 'HIS', 'HAND', 'TO', 'CLUTCH', 'IT'] +8280-266249-0051-390: hyp=['THEY', 'BENT', 'EAGERLY', 'OVER', 'THE', 'BOARD', 'EACH', 'WATCHING', 'WITH', 'FEVERISH', 'ANXIETY', 'HIS', "COMPANION'S", 'MOVEMENTS', 'EACH', 'CASTING', 'NOW', 'AND', 'AGAIN', 'A', 'GLOATING', 'EYE', 'UPON', 'THE', 'HEAP', 'OF', 'GOLD', 'AND', 'GREEN', 'BACKS', 'THAT', 'LAY', 'BETWEEN', 'THEM', 'AND', 'AT', 'TIMES', 'HALF', 'STRETCHING', 'OUT', 'HIS', 'HAND', 'TO', 'CLUTCH', 'IT'] +8280-266249-0052-391: ref=['A', 'DEEP', 'GROAN', 'STARTLED', 'THEM', 'AND', 'THEY', 'SPRANG', 'TO', 'THEIR', 'FEET', 'PALE', 'AND', 'TREMBLING', 'WITH', 'SUDDEN', 'TERROR', 'EACH', 'HOLDING', 'HIS', 'BREATH', 'AND', 'STRAINING', 'HIS', 'EAR', 'TO', 'CATCH', 'A', 'REPETITION', 'OF', 'THE', 'DREAD', 'SOUND'] +8280-266249-0052-391: hyp=['A', 'DEEP', 'GROUND', 'STARTLED', 'THEM', 'AND', 'THEY', 'SPRANG', 'TO', 'THEIR', 'FEET', 'PALE', 'AND', 'TREMBLING', 'WITH', 'SUDDEN', 'TERROR', 'EACH', 'HOLDING', 'HIS', 'BREATH', 'AND', 'STRAINING', 'HIS', 'EAR', 'TO', 'CATCH', 'A', 'REPETITION', 'OF', 'THE', 'DREAD', 'SOUND'] +8280-266249-0053-392: ref=['BUT', 'ALL', 'WAS', 'SILENT', 'AND', 'AFTER', 'A', 'MOMENT', 'OF', 'ANXIOUS', 'WAITING', 'THEY', 'SAT', 'DOWN', 'TO', 'THEIR', 'GAME', 'AGAIN', 'TRYING', 'TO', 'CONCEAL', 'AND', 'SHAKE', 'OFF', 'THEIR', 'FEARS', 'WITH', 'A', 'FORCED', 'UNNATURAL', 'LAUGH'] +8280-266249-0053-392: hyp=['BUT', 'ALWAYS', 'SILENT', 'AND', 'AFTER', 'A', 'MOMENT', 'OF', 'ANXIOUS', 'WAITING', 'THEY', 'SAT', 'DOWN', 'TO', 'THEIR', 'GAME', 'AGAIN', 'TRYING', 'TO', 'CONCEAL', 'AND', 'SHAKE', 'OFF', 'THEIR', 'FEARS', 'FOR', 'THE', 'FORCED', 'UNNATURAL', 'LAUGH'] +8280-266249-0054-393: ref=['IT', 'CAME', 'FROM', 'UNDER', 'THE', 'TABLE', 'GASPED', 'WARD', 'LOOK', "WHAT'S", 'THERE', 'LOOK', 'YOURSELF'] +8280-266249-0054-393: hyp=['IT', 'CAME', 'FROM', 'UNDER', 'THE', 'TABLE', 'GASPED', 'TOWARD', 'LOOK', "WHAT'S", 'THERE', 'LOOKED', 'YOURSELF'] +8280-266249-0055-394: ref=['WHAT', 'CAN', 'IT', 'HAVE', 'BEEN', 'THEY', 'ASKED', 'EACH', 'OTHER'] +8280-266249-0055-394: hyp=['WHAT', 'CAN', 'IT', 'HAVE', 'BEEN', 'THEY', 'ASKED', 'EACH', 'OTHER'] +8280-266249-0056-395: ref=['OH', 'NONSENSE', 'WHAT', 'FOOLS', 'WE', 'ARE'] +8280-266249-0056-395: hyp=['OH', 'NONSENSE', 'WHAT', 'FOOLS', 'WE', 'ARE'] +8280-266249-0057-396: ref=['IT', 'WAS', 'THE', 'LAST', 'GAME', 'OF', 'CARDS', 'FOR', 'THAT', 'TRIP'] +8280-266249-0057-396: hyp=['IT', 'WAS', 'THE', 'LAST', 'GAME', 'OF', 'CARDS', 'FOR', 'THAT', 'TRIP'] +8280-266249-0058-397: ref=['THE', 'CAPTAIN', 'COMING', 'IN', 'SHORTLY', 'AFTER', 'THE', 'SUDDEN', 'FLIGHT', 'OF', 'THE', 'GAMBLERS', 'TOOK', 'CHARGE', 'OF', 'THE', 'MONEY', 'AND', 'THE', 'NEXT', 'DAY', 'RESTORED', 'IT', 'TO', 'THE', 'OWNERS'] +8280-266249-0058-397: hyp=['THE', 'CAPTAIN', 'COMING', 'IN', 'SHORTLY', 'AFTER', 'THE', 'SUDDEN', 'FLIGHT', 'OF', 'THE', 'GAMBLERS', 'TOOK', 'CHARGE', 'OF', 'THE', 'MONEY', 'AND', 'THE', 'NEXT', 'DAY', 'RESTORED', 'IT', 'TO', 'THE', 'OWNERS'] +8280-266249-0059-398: ref=['TO', "ELSIE'S", 'OBSERVANT', 'EYES', 'IT', 'PRESENTLY', 'BECAME', 'EVIDENT', 'THAT', 'THE', 'DALYS', 'WERE', 'IN', 'VERY', 'STRAITENED', 'CIRCUMSTANCES'] +8280-266249-0059-398: hyp=['TO', "ELSIE'S", 'OBSERVANT', 'EYES', 'IT', 'PRESENTLY', 'BECAME', 'EVIDENT', 'THAT', 'THE', 'DAILIES', 'RAN', 'VERY', 'STRAIGHT', 'AND', 'CIRCUMSTANCES'] +8280-266249-0060-399: ref=['OH', 'HOW', 'KIND', 'HOW', 'VERY', 'KIND', 'MISSUS', 'DALY', 'SAID', 'WITH', 'TEARS', 'OF', 'JOY', 'AND', 'GRATITUDE', 'WE', 'HAVE', 'HARDLY', 'KNOWN', 'HOW', 'WE', 'SHOULD', 'MEET', 'THE', 'MOST', 'NECESSARY', 'EXPENSES', 'OF', 'THIS', 'TRIP', 'BUT', 'HAVE', 'BEEN', 'TRYING', 'TO', 'CAST', 'OUR', 'CARE', 'UPON', 'THE', 'LORD', 'ASKING', 'HIM', 'TO', 'PROVIDE'] +8280-266249-0060-399: hyp=['OH', 'HOW', 'KIND', 'HOW', 'VERY', 'KIND', 'MISSUS', 'DALEY', 'SAID', 'WITH', 'TEARS', 'OF', 'JOY', 'AND', 'GRATITUDE', 'WE', 'HAVE', 'HARDLY', 'KNOWN', 'HOW', 'WE', 'SHOULD', 'MEET', 'THE', 'MOST', 'NECESSARY', 'EXPENSES', 'OF', 'THIS', 'TRIP', 'BUT', 'HAVE', 'BEEN', 'TRYING', 'TO', 'CAST', 'OUR', 'CARE', 'UPON', 'THE', 'LORD', 'ASKING', 'HIM', 'TO', 'PROVIDE'] +8280-266249-0061-400: ref=['AND', 'HOW', 'WONDERFULLY', 'HE', 'HAS', 'ANSWERED', 'OUR', 'PETITIONS'] +8280-266249-0061-400: hyp=['AND', 'HOW', 'WONDERFULLY', 'HE', 'HAS', 'ANSWERED', 'OUR', 'PETITIONS'] +8280-266249-0062-401: ref=['ELSIE', 'ANSWERED', 'PRESSING', 'HER', 'HAND', 'AFFECTIONATELY', 'ART', 'WE', 'NOT', 'SISTERS', 'IN', 'CHRIST'] +8280-266249-0062-401: hyp=['ELSIE', 'ANSWERED', 'PRESSING', 'HER', 'HAND', 'AFFECTIONATELY', 'ARE', 'WE', 'NOT', 'SISTERS', 'IN', 'CHRIST'] +8280-266249-0063-402: ref=['YE', 'ARE', 'ALL', 'THE', 'CHILDREN', 'OF', 'GOD', 'BY', 'FAITH', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0063-402: hyp=['YE', 'ARE', 'ALL', 'THE', 'CHILDREN', 'OF', 'GOD', 'BY', 'FAITH', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0064-403: ref=['YE', 'ARE', 'ALL', 'ONE', 'IN', 'CHRIST', 'JESUS'] +8280-266249-0064-403: hyp=['YE', 'ARE', 'ALL', 'WINE', 'AND', 'CHRIST', 'JESUS'] +8280-266249-0065-404: ref=['WE', 'FEEL', 'MY', 'HUSBAND', 'AND', 'I', 'THAT', 'WE', 'ARE', 'ONLY', 'THE', 'STEWARDS', 'OF', 'HIS', 'BOUNTY', 'AND', 'THAT', 'BECAUSE', 'HE', 'HAS', 'SAID', 'INASMUCH', 'AS', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ONE', 'OF', 'THE', 'LEAST', 'OF', 'THESE', 'MY', 'BRETHREN', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ME', 'IT', 'IS', 'THE', 'GREATEST', 'PRIVILEGE', 'AND', 'DELIGHT', 'TO', 'DO', 'ANYTHING', 'FOR', 'HIS', 'PEOPLE'] +8280-266249-0065-404: hyp=['WE', 'SEE', 'ON', 'MY', 'HUSBAND', 'AND', 'I', 'THAT', 'WE', 'ARE', 'ONLY', 'THE', 'STEWARDS', 'OF', 'HIS', 'BOUNTY', 'AND', 'BECAUSE', 'HE', 'HAS', 'SAID', 'INASMUCH', 'AS', 'YE', 'HAVE', 'DONE', 'IT', 'UNTO', 'ONE', 'OF', 'THE', 'LEAST', 'OF', 'THESE', 'MY', 'BRETHREN', 'YOU', 'HAVE', 'DONE', 'IT', 'UNTO', 'ME', 'IT', 'IS', 'THE', 'GREATEST', 'PRIVILEGE', 'AND', 'DELIGHT', 'TO', 'DO', 'ANYTHING', 'FOR', 'HIS', 'PEOPLE'] +8461-258277-0000-1649: ref=['WHEN', 'IT', 'WAS', 'THE', 'SEVEN', 'HUNDRED', 'AND', 'EIGHTEENTH', 'NIGHT'] +8461-258277-0000-1649: hyp=['WHEN', 'IT', 'WAS', 'THE', 'SEVEN', 'HUNDRED', 'AND', 'EIGHTEENTH', 'NIGHT'] +8461-258277-0001-1650: ref=['BUT', 'HE', 'ANSWERED', 'NEEDS', 'MUST', 'I', 'HAVE', 'ZAYNAB', 'ALSO', 'NOW', 'SUDDENLY', 'THERE', 'CAME', 'A', 'RAP', 'AT', 'THE', 'DOOR', 'AND', 'THE', 'MAID', 'SAID', 'WHO', 'IS', 'AT', 'THE', 'DOOR'] +8461-258277-0001-1650: hyp=['BUT', 'HE', 'ANSWERED', 'NEEDS', 'MY', 'THY', 'HAVE', 'THY', 'NABBS', 'SO', 'NOW', 'SUDDENLY', 'THERE', 'CAME', 'A', 'RAP', 'AT', 'THE', 'DOOR', 'AND', 'THE', 'MAID', 'SAID', 'WHO', 'IS', 'AT', 'THE', 'DOOR'] +8461-258277-0002-1651: ref=['THE', 'KNOCKER', 'REPLIED', 'KAMAR', 'DAUGHTER', 'OF', 'AZARIAH', 'THE', 'JEW', 'SAY', 'ME', 'IS', 'ALI', 'OF', 'CAIRO', 'WITH', 'YOU'] +8461-258277-0002-1651: hyp=['THE', 'KNOCKER', 'REPLIED', 'COME', 'ON', 'DAUGHTER', 'VAZARRE', 'THE', 'JEW', 'SAY', 'ME', 'IS', 'ALI', 'OF', 'CAIRO', 'WITH', 'YOU'] +8461-258277-0003-1652: ref=['REPLIED', 'THE', "BROKER'S", 'DAUGHTER', 'O', 'THOU', 'DAUGHTER', 'OF', 'A', 'DOG'] +8461-258277-0003-1652: hyp=['REPLIED', 'THE', "BROKER'S", 'DAUGHTER', 'O', 'THOU', 'DAUGHTER', 'OF', 'A', 'DOG'] +8461-258277-0004-1653: ref=['AND', 'HAVING', 'THUS', 'ISLAMISED', 'SHE', 'ASKED', 'HIM', 'DO', 'MEN', 'IN', 'THE', 'FAITH', 'OF', 'AL', 'ISLAM', 'GIVE', 'MARRIAGE', 'PORTIONS', 'TO', 'WOMEN', 'OR', 'DO', 'WOMEN', 'DOWER', 'MEN'] +8461-258277-0004-1653: hyp=['AND', 'HAVING', 'THUS', 'ISLAMMISED', 'SHE', 'ASKED', 'HIM', 'TWO', 'MEN', 'IN', 'THE', 'FAITH', 'OF', 'ALICELA', 'GAVE', 'MARRIAGE', 'PORTIONS', 'TO', 'WOMEN', 'OR', 'TWO', 'WOMEN', 'DOWER', 'MEN'] +8461-258277-0005-1654: ref=['AND', 'SHE', 'THREW', 'DOWN', 'THE', "JEW'S", 'HEAD', 'BEFORE', 'HIM'] +8461-258277-0005-1654: hyp=['AND', 'SHE', 'THREW', 'DOWN', 'THE', "JEW'S", 'HEAD', 'BEFORE', 'HIM'] +8461-258277-0006-1655: ref=['NOW', 'THE', 'CAUSE', 'OF', 'HER', 'SLAYING', 'HER', 'SIRE', 'WAS', 'AS', 'FOLLOWS'] +8461-258277-0006-1655: hyp=['NOW', 'THE', 'COURSE', 'OF', 'HER', 'SLAYING', 'HER', 'SIRE', 'WAS', 'AS', 'FOLLOWS'] +8461-258277-0007-1656: ref=['THEN', 'HE', 'SET', 'OUT', 'REJOICING', 'TO', 'RETURN', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0007-1656: hyp=['THEN', 'HE', 'SAT', 'OUT', 'REJOICING', 'TO', 'RETURN', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0008-1657: ref=['SO', 'HE', 'ATE', 'AND', 'FELL', 'DOWN', 'SENSELESS', 'FOR', 'THE', 'SWEETMEATS', 'WERE', 'DRUGGED', 'WITH', 'BHANG', 'WHEREUPON', 'THE', 'KAZI', 'BUNDLED', 'HIM', 'INTO', 'THE', 'SACK', 'AND', 'MADE', 'OFF', 'WITH', 'HIM', 'CHARGER', 'AND', 'CHEST', 'AND', 'ALL', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTY'] +8461-258277-0008-1657: hyp=['SO', 'HE', 'ATE', 'AND', 'FELL', 'DOWN', 'SENSELESS', 'FOR', 'THE', 'SWEETMEATS', 'WERE', 'DRUGGED', 'WITH', 'BANG', 'WHEREUPON', 'THE', 'KAZI', 'BUNDLED', 'HIM', 'INTO', 'THE', 'SACK', 'AND', 'MADE', 'OFF', 'WITH', 'THEM', 'CHARGER', 'AND', 'CHEST', 'AND', 'ALL', 'TO', 'THE', 'BARRACK', 'OF', 'THE', 'FORTE'] +8461-258277-0009-1658: ref=['PRESENTLY', 'HASAN', 'SHUMAN', 'CAME', 'OUT', 'OF', 'A', 'CLOSET', 'AND', 'SAID', 'TO', 'HIM', 'HAST', 'THOU', 'GOTTEN', 'THE', 'GEAR', 'O', 'ALI'] +8461-258277-0009-1658: hyp=['PRESENTLY', 'HER', 'SON', 'SCHUMANN', 'CAME', 'OUT', 'OF', 'A', 'CLOTH', 'AND', 'SAID', 'TO', 'HIM', 'HAST', 'THOU', 'GOTTEN', 'THE', 'GEAR', 'O', 'ALI'] +8461-258277-0010-1659: ref=['SO', 'HE', 'TOLD', 'HIM', 'WHAT', 'HAD', 'BEFALLEN', 'HIM', 'AND', 'ADDED', 'IF', 'I', 'KNOW', 'WHITHER', 'THE', 'RASCAL', 'IS', 'GONE', 'AND', 'WHERE', 'TO', 'FIND', 'THE', 'KNAVE', 'I', 'WOULD', 'PAY', 'HIM', 'OUT'] +8461-258277-0010-1659: hyp=['SO', 'HE', 'TOLD', 'HIM', 'WHAT', 'HAD', 'BEFALLEN', 'HIM', 'AND', 'ADDED', 'IF', 'I', 'KNOW', 'WHITHER', 'THE', 'RASCAL', 'IS', 'GONE', 'AND', 'WHERE', 'TO', 'FIND', 'THE', 'KNAVE', 'I', 'WOULD', 'PAY', 'HIM', 'OUT'] +8461-258277-0011-1660: ref=['KNOWEST', 'THOU', 'WHITHER', 'HE', 'WENT'] +8461-258277-0011-1660: hyp=['KNOWEST', 'THOU', 'WHITHER', 'HE', 'WENT'] +8461-258277-0012-1661: ref=['ANSWERED', 'HASAN', 'I', 'KNOW', 'WHERE', 'HE', 'IS', 'AND', 'OPENING', 'THE', 'DOOR', 'OF', 'THE', 'CLOSET', 'SHOWED', 'HIM', 'THE', 'SWEETMEAT', 'SELLER', 'WITHIN', 'DRUGGED', 'AND', 'SENSELESS'] +8461-258277-0012-1661: hyp=['ANSWERED', 'HASAN', 'I', 'KNOW', 'WHERE', 'HE', 'IS', 'AND', 'OPENING', 'THE', 'DOOR', 'OF', 'THE', 'CLOSET', 'SHOWED', 'HIM', 'THE', 'SWEETMEAT', 'CELLAR', 'WITHIN', 'DRUGGED', 'AND', 'SENSELESS'] +8461-258277-0013-1662: ref=['SO', 'I', 'WENT', 'ROUND', 'ABOUT', 'THE', 'HIGHWAYS', 'OF', 'THE', 'CITY', 'TILL', 'I', 'MET', 'A', 'SWEETMEAT', 'SELLER', 'AND', 'BUYING', 'HIS', 'CLOTHES', 'AND', 'STOCK', 'IN', 'TRADE', 'AND', 'GEAR', 'FOR', 'TEN', 'DINARS', 'DID', 'WHAT', 'WAS', 'DONE'] +8461-258277-0013-1662: hyp=['SO', 'I', 'WENT', 'ROUND', 'ABOUT', 'THE', 'HIGHWAYS', 'OF', 'THE', 'CITY', 'TILL', 'I', 'MET', 'A', 'SWEETMEAT', 'CELLAR', 'AND', 'BUYING', 'HIS', 'CLOTHES', 'AND', 'STOCKING', 'TRADE', 'AND', 'GEAR', 'FOR', 'TEN', 'HOURS', 'DID', 'WHAT', 'WAS', 'DONE'] +8461-258277-0014-1663: ref=['QUOTH', 'AL', 'RASHID', 'WHOSE', 'HEAD', 'IS', 'THIS'] +8461-258277-0014-1663: hyp=['QUOTH', 'A', 'RASHID', 'WHOSE', 'HEAD', 'IS', 'THIS'] +8461-258277-0015-1664: ref=['SO', 'ALI', 'RELATED', 'TO', 'HIM', 'ALL', 'THAT', 'HAD', 'PASSED', 'FROM', 'FIRST', 'TO', 'LAST', 'AND', 'THE', 'CALIPH', 'SAID', 'I', 'HAD', 'NOT', 'THOUGHT', 'THOU', 'WOULDST', 'KILL', 'HIM', 'FOR', 'THAT', 'HE', 'WAS', 'A', 'SORCERER'] +8461-258277-0015-1664: hyp=['SO', 'ALI', 'RELATED', 'TO', 'HIM', 'ALL', 'THAT', 'THAT', 'PASSED', 'FROM', 'FIRST', 'LAST', 'AND', 'THE', 'CALIPH', 'SAID', 'I', 'HAD', 'NOT', 'THOUGHT', 'THOU', 'WOULDST', 'KILL', 'HIM', 'FOR', 'THAT', 'HE', 'WAS', 'A', 'SORCERER'] +8461-258277-0016-1665: ref=['HE', 'REPLIED', 'I', 'HAVE', 'FORTY', 'LADS', 'BUT', 'THEY', 'ARE', 'IN', 'CAIRO'] +8461-258277-0016-1665: hyp=['HE', 'REPLIED', 'I', 'HAVE', 'FORTY', 'LADS', 'BUT', 'THEY', 'ARE', 'IN', 'CAIRO'] +8461-278226-0000-1633: ref=['AND', 'LAURA', 'HAD', 'HER', 'OWN', 'PET', 'PLANS'] +8461-278226-0000-1633: hyp=['AND', 'LAURA', 'HAD', 'HER', 'OWN', 'PET', 'PLANS'] +8461-278226-0001-1634: ref=['SHE', 'MEANT', 'TO', 'BE', 'SCRUPULOUSLY', 'CONSCIENTIOUS', 'IN', 'THE', 'ADMINISTRATION', 'OF', 'HER', 'TALENTS', 'AND', 'SOMETIMES', 'AT', 'CHURCH', 'ON', 'A', 'SUNDAY', 'WHEN', 'THE', 'SERMON', 'WAS', 'PARTICULARLY', 'AWAKENING', 'SHE', 'MENTALLY', 'DEBATED', 'THE', 'SERIOUS', 'QUESTION', 'AS', 'TO', 'WHETHER', 'NEW', 'BONNETS', 'AND', 'A', 'PAIR', 'OF', "JOUVIN'S", 'GLOVES', 'DAILY', 'WERE', 'NOT', 'SINFUL', 'BUT', 'I', 'THINK', 'SHE', 'DECIDED', 'THAT', 'THE', 'NEW', 'BONNETS', 'AND', 'GLOVES', 'WERE', 'ON', 'THE', 'WHOLE', 'A', 'PARDONABLE', 'WEAKNESS', 'AS', 'BEING', 'GOOD', 'FOR', 'TRADE'] +8461-278226-0001-1634: hyp=['SHE', 'MEANT', 'TO', 'BE', 'SCRUPULOUSLY', 'CONSCIENTIOUS', 'IN', 'THE', 'ADMINISTRATION', 'OF', 'ITALIANS', 'AND', 'SOMETIMES', 'AT', 'CHURCH', 'ON', 'A', 'SUNDAY', 'WHEN', 'THE', 'SAME', 'WAS', 'PARTICULARLY', 'AWAKENING', 'SHE', 'MENTALLY', 'DEBATED', 'A', 'SERIOUS', 'QUESTION', 'AS', 'TO', 'WHETHER', 'NEW', 'BONNETS', 'AND', 'A', 'PAIR', 'OF', "JUBAND'S", 'GLOVES', 'DAILY', 'WERE', 'NOT', 'SENT', 'FOR', 'BUT', 'I', 'THINK', 'SHE', 'DECIDED', 'THAT', 'THE', 'NEW', 'BONNETS', 'AND', 'GLOVES', 'WERE', 'ON', 'THE', 'WHOLE', 'A', 'PARDONABLE', 'WEAKNESS', 'AS', 'BEING', 'GOOD', 'FOR', 'TRADE'] +8461-278226-0002-1635: ref=['ONE', 'MORNING', 'LAURA', 'TOLD', 'HER', 'HUSBAND', 'WITH', 'A', 'GAY', 'LAUGH', 'THAT', 'SHE', 'WAS', 'GOING', 'TO', 'VICTIMIZE', 'HIM', 'BUT', 'HE', 'WAS', 'TO', 'PROMISE', 'TO', 'BE', 'PATIENT', 'AND', 'BEAR', 'WITH', 'HER', 'FOR', 'ONCE', 'IN', 'A', 'WAY'] +8461-278226-0002-1635: hyp=['ONE', 'MORNING', 'LAURA', 'TOLD', 'HER', 'HUSBAND', 'WITH', 'A', 'GAY', 'LAUGH', 'THAT', 'SHE', 'WAS', 'GOING', 'TO', 'VICTIMISE', 'HIM', 'BUT', 'HE', 'WAS', 'TO', 'PROMISE', 'TO', 'BE', 'PATIENT', 'AND', 'BEAR', 'WITH', 'HER', 'FOR', 'ONCE', 'IN', 'A', 'WAY'] +8461-278226-0003-1636: ref=['I', 'WANT', 'TO', 'SEE', 'ALL', 'THE', 'PICTURES', 'THE', 'MODERN', 'PICTURES', 'ESPECIALLY'] +8461-278226-0003-1636: hyp=['I', 'WANT', 'TO', 'SEE', 'ALL', 'THE', 'PICTURES', 'THE', 'MODERN', 'PICTURES', 'ESPECIALLY'] +8461-278226-0004-1637: ref=['I', 'REMEMBER', 'ALL', 'THE', 'RUBENSES', 'AT', 'THE', 'LOUVRE', 'FOR', 'I', 'SAW', 'THEM', 'THREE', 'YEARS', 'AGO', 'WHEN', 'I', 'WAS', 'STAYING', 'IN', 'PARIS', 'WITH', 'GRANDPAPA'] +8461-278226-0004-1637: hyp=['I', 'REMEMBER', 'ALL', 'THE', 'REUBEN', 'SAYS', 'THAT', 'THE', 'LOUVRE', 'FOR', 'I', 'SAW', 'THEM', 'THREE', 'YEARS', 'AGO', 'WHEN', 'I', 'WAS', 'STAYING', 'IN', 'PARIS', 'WITH', 'GRANDPAPA'] +8461-278226-0005-1638: ref=['SHE', 'RETURNED', 'IN', 'A', 'LITTLE', 'MORE', 'THAN', 'TEN', 'MINUTES', 'IN', 'THE', 'FRESHEST', 'TOILETTE', 'ALL', 'PALE', 'SHIMMERING', 'BLUE', 'LIKE', 'THE', 'SPRING', 'SKY', 'WITH', 'PEARL', 'GREY', 'GLOVES', 'AND', 'BOOTS', 'AND', 'PARASOL', 'AND', 'A', 'BONNET', 'THAT', 'SEEMED', 'MADE', 'OF', 'AZURE', 'BUTTERFLIES'] +8461-278226-0005-1638: hyp=['SHE', 'RETURNED', 'IN', 'A', 'LITTLE', 'MORE', 'THAN', 'TEN', 'MINUTES', 'IN', 'THE', 'FRESHEST', 'TOILETTE', 'ALL', 'PALE', 'SHIMMERING', 'BLUE', 'LIKE', 'THE', 'SPRING', 'SKY', 'WITH', 'POOR', 'GRAY', 'GLOVES', 'AND', 'BOOTS', 'AND', 'PARASOL', 'AND', 'A', 'BONNET', 'THAT', 'SEEMED', 'MADE', 'OF', 'AZURE', 'BUTTERFLIES'] +8461-278226-0006-1639: ref=['IT', 'WAS', 'DRAWING', 'TOWARDS', 'THE', 'CLOSE', 'OF', 'THIS', 'DELIGHTFUL', 'HONEYMOON', 'TOUR', 'AND', 'IT', 'WAS', 'A', 'BRIGHT', 'SUNSHINY', 'MORNING', 'EARLY', 'IN', 'FEBRUARY', 'BUT', 'FEBRUARY', 'IN', 'PARIS', 'IS', 'SOMETIMES', 'BETTER', 'THAN', 'APRIL', 'IN', 'LONDON'] +8461-278226-0006-1639: hyp=['HE', 'WAS', 'DRAWING', 'TOWARDS', 'THE', 'CLOSE', 'OF', 'THIS', 'DELIGHTFUL', 'HONEYMOON', 'TOUR', 'AND', 'IT', 'WAS', 'A', 'BRIGHT', 'SUNSHINY', 'MORNING', 'EARLY', 'IN', 'FEBRUARY', 'BUT', 'FEBRUARY', 'IN', 'PARIS', 'IS', 'SOMETIMES', 'BETTER', 'THAN', 'APRIL', 'IN', 'LONDON'] +8461-278226-0007-1640: ref=['BUT', 'SHE', 'FIXED', 'UPON', 'A', 'PICTURE', 'WHICH', 'SHE', 'SAID', 'SHE', 'PREFERRED', 'TO', 'ANYTHING', 'SHE', 'HAD', 'SEEN', 'IN', 'THE', 'GALLERY'] +8461-278226-0007-1640: hyp=['BUT', 'SHE', 'FIXED', 'UPON', 'A', 'PICTURE', 'WHICH', 'SHE', 'SAID', 'SHE', 'PREFERRED', 'TO', 'ANYTHING', 'SHE', 'HAD', 'SEEN', 'IN', 'THE', 'GALLERY'] +8461-278226-0008-1641: ref=['PHILIP', 'JOCELYN', 'WAS', 'EXAMINING', 'SOME', 'PICTURES', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'ROOM', 'WHEN', 'HIS', 'WIFE', 'MADE', 'THIS', 'DISCOVERY'] +8461-278226-0008-1641: hyp=['PHILIP', 'JOCELYN', 'WAS', 'EXAMINING', 'SOME', 'PICTURES', 'ON', 'THE', 'OTHER', 'SIDE', 'OF', 'THE', 'ROOM', 'WHEN', 'HIS', 'WIFE', 'MADE', 'THE', 'DISCOVERY'] +8461-278226-0009-1642: ref=['HOW', 'I', 'WISH', 'YOU', 'COULD', 'GET', 'ME', 'A', 'COPY', 'OF', 'THAT', 'PICTURE', 'PHILIP', 'LAURA', 'SAID', 'ENTREATINGLY'] +8461-278226-0009-1642: hyp=['HOW', 'I', 'WISH', 'YOU', 'COULD', 'GET', 'ME', 'A', 'COPY', 'OF', 'THAT', 'PICTURE', 'PHILIP', 'LAURA', 'SAID', 'ENTREATINGLY'] +8461-278226-0010-1643: ref=['I', 'SHOULD', 'SO', 'LIKE', 'ONE', 'TO', 'HANG', 'IN', 'MY', 'MORNING', 'ROOM', 'AT', "JOCELYN'S", 'ROCK'] +8461-278226-0010-1643: hyp=['I', 'SHOULD', 'SO', 'LIKE', 'ONE', 'TO', 'HANG', 'IN', 'MY', 'MORNING', 'ROOM', 'A', 'JOSTLING', 'STROKE'] +8461-278226-0011-1644: ref=['SHE', 'TURNED', 'TO', 'THE', 'FRENCH', 'ARTIST', 'PRESENTLY', 'AND', 'ASKED', 'HIM', 'WHERE', 'THE', 'ELDER', 'MISTER', 'KERSTALL', 'LIVED', 'AND', 'IF', 'THERE', 'WAS', 'ANY', 'POSSIBILITY', 'OF', 'SEEING', 'HIM'] +8461-278226-0011-1644: hyp=['SHE', 'TURNED', 'TO', 'THE', 'FRENCHARD', 'THIS', 'PRESENTLY', 'AND', 'ASKED', 'THEM', 'WHERE', 'THE', 'ELDER', 'MISTER', 'COASTON', 'LIVED', 'AND', 'IF', 'THERE', 'WAS', 'ANY', 'POSSIBILITY', 'OF', 'SEEING', 'HIM'] +8461-278226-0012-1645: ref=['THEY', 'HAVE', 'SAID', 'THAT', 'HE', 'IS', 'EVEN', 'A', 'LITTLE', 'IMBECILE', 'THAT', 'HE', 'DOES', 'NOT', 'REMEMBER', 'HIMSELF', 'OF', 'THE', 'MOST', 'COMMON', 'EVENTS', 'OF', 'HIS', 'LIFE'] +8461-278226-0012-1645: hyp=['THEY', 'HAVE', 'SAID', 'THAT', 'HE', 'IS', 'EVEN', 'A', 'LITTLE', 'IMBECILE', 'THAT', 'HE', 'DOES', 'NOT', 'REMEMBER', 'HIMSELF', 'OF', 'THE', 'MOST', 'COMMON', 'EVENTS', 'OF', 'HIS', 'LIFE'] +8461-278226-0013-1646: ref=['BUT', 'THERE', 'ARE', 'SOME', 'OTHERS', 'WHO', 'SAY', 'THAT', 'HIS', 'MEMORY', 'HAS', 'NOT', 'ALTOGETHER', 'FAILED', 'AND', 'THAT', 'HE', 'IS', 'STILL', 'ENOUGH', 'HARSHLY', 'CRITICAL', 'TOWARDS', 'THE', 'WORKS', 'OF', 'OTHERS'] +8461-278226-0013-1646: hyp=['BUT', 'THERE', 'ARE', 'SOME', 'OTHERS', 'WHO', 'SAY', 'THAT', 'HIS', 'MEMORY', 'HAS', 'NOT', 'ALTOGETHER', 'FAILED', 'AND', 'THAT', 'HE', 'STILL', 'ENOUGH', 'HARSHLY', 'CRITICAL', 'TOWARDS', 'THE', 'WORKS', 'OF', 'OTHERS'] +8461-278226-0014-1647: ref=['I', "DON'T", 'THINK', 'YOU', 'WILL', 'HAVE', 'ANY', 'DIFFICULTY', 'IN', 'FINDING', 'THE', 'HOUSE'] +8461-278226-0014-1647: hyp=['I', "DON'T", 'THINK', 'YOU', 'WILL', 'HAVE', 'ANY', 'DIFFICULTY', 'IN', 'FINDING', 'THE', 'HOUSE'] +8461-278226-0015-1648: ref=['YOU', 'WILL', 'BE', 'DOING', 'ME', 'SUCH', 'A', 'FAVOUR', 'PHILIP', 'IF', "YOU'LL", 'SAY', 'YES'] +8461-278226-0015-1648: hyp=['YOU', 'WERE', 'BETWEEN', 'ME', 'SUCH', 'A', 'FAVOUR', 'FELLOW', 'IF', "YOU'LL", 'SAY', 'YES'] +8461-281231-0000-1594: ref=['HIS', 'FOLLOWERS', 'RUSHED', 'FORWARD', 'TO', 'WHERE', 'HE', 'LAY', 'AND', 'THEIR', 'UNITED', 'FORCE', 'COMPELLING', 'THE', 'BLACK', 'KNIGHT', 'TO', 'PAUSE', 'THEY', 'DRAGGED', 'THEIR', 'WOUNDED', 'LEADER', 'WITHIN', 'THE', 'WALLS'] +8461-281231-0000-1594: hyp=['HIS', 'FOLLOWERS', 'RUSH', 'FORWARD', 'TO', 'WHERE', 'HE', 'LAY', 'AND', 'THEIR', 'UNITED', 'FORCE', 'COMPELLING', 'THE', 'BLACK', 'NIGHT', 'TO', 'PAUSE', 'THEY', 'DRAGGED', 'THE', 'WOUNDED', 'LEADER', 'WITHIN', 'THE', 'WALLS'] +8461-281231-0001-1595: ref=['IT', 'WAS', 'ON', 'THEIR', 'JOURNEY', 'TO', 'THAT', 'TOWN', 'THAT', 'THEY', 'WERE', 'OVERTAKEN', 'ON', 'THE', 'ROAD', 'BY', 'CEDRIC', 'AND', 'HIS', 'PARTY', 'IN', 'WHOSE', 'COMPANY', 'THEY', 'WERE', 'AFTERWARDS', 'CARRIED', 'CAPTIVE', 'TO', 'THE', 'CASTLE', 'OF', 'TORQUILSTONE'] +8461-281231-0001-1595: hyp=['IT', 'WAS', 'ON', 'THEIR', 'JOURNEY', 'TO', 'THAT', 'TOWN', 'THAT', 'THEY', 'WERE', 'OVERTAKEN', 'ON', 'THE', 'ROAD', 'BY', 'SEDRIC', 'AND', 'HIS', 'PARTY', 'IN', 'WHOSE', 'COMPANY', 'THEY', 'WERE', 'AFTERWARDS', 'CARRIED', 'CAPTIVE', 'TO', 'THE', 'COUNCIL', 'OF', 'TORCHLESTONE'] +8461-281231-0002-1596: ref=['AS', 'HE', 'LAY', 'UPON', 'HIS', 'BED', 'RACKED', 'WITH', 'PAIN', 'AND', 'MENTAL', 'AGONY', 'AND', 'FILLED', 'WITH', 'THE', 'FEAR', 'OF', 'RAPIDLY', 'APPROACHING', 'DEATH', 'HE', 'HEARD', 'A', 'VOICE', 'ADDRESS', 'HIM'] +8461-281231-0002-1596: hyp=['I', 'SEE', 'LAY', 'UPON', 'HIS', 'BED', 'WRAPPED', 'WITH', 'PAIN', 'AND', 'MENTAL', 'AGONY', 'AND', 'FILLED', 'WITH', 'THE', 'FEAR', 'OF', 'RAPIDLY', 'APPROACHING', 'DEATH', 'HE', 'HEARD', 'A', 'VOICE', 'ADDRESS', 'HIM'] +8461-281231-0003-1597: ref=['WHAT', 'ART', 'THOU', 'HE', 'EXCLAIMED', 'IN', 'TERROR'] +8461-281231-0003-1597: hyp=['WHAT', 'ART', 'THOU', 'HE', 'EXCLAIMED', 'IN', 'TERROR'] +8461-281231-0004-1598: ref=['LEAVE', 'ME', 'AND', 'SEEK', 'THE', 'SAXON', 'WITCH', 'ULRICA', 'WHO', 'WAS', 'MY', 'TEMPTRESS', 'LET', 'HER', 'AS', 'WELL', 'AS', 'I', 'TASTE', 'THE', 'TORTURES', 'WHICH', 'ANTICIPATE', 'HELL'] +8461-281231-0004-1598: hyp=['LEAVE', 'ME', 'AND', 'SEEK', 'THE', 'SAXON', 'WHICH', 'OIKA', 'WHO', 'WAS', 'MY', 'TEMPTRESS', 'LET', 'HER', 'AS', 'WELL', 'AS', 'I', 'CASE', 'THE', 'TORTURES', 'WHICH', 'ANTICIPATE', 'HELL'] +8461-281231-0005-1599: ref=['EXCLAIMED', 'THE', 'NORMAN', 'HO'] +8461-281231-0005-1599: hyp=['EXCLAIMED', 'THE', 'NORMAN', 'OH'] +8461-281231-0006-1600: ref=['REMEMBEREST', 'THOU', 'THE', 'MAGAZINE', 'OF', 'FUEL', 'THAT', 'IS', 'STORED', 'BENEATH', 'THESE', 'APARTMENTS', 'WOMAN'] +8461-281231-0006-1600: hyp=['REMEMBER', 'AS', 'THOU', 'THE', 'MAGAZINE', 'OF', 'FUEL', 'THAT', 'IS', 'STOLE', 'BENEATH', 'THESE', 'APARTMENTS', 'WOMAN'] +8461-281231-0007-1601: ref=['THEY', 'ARE', 'FAST', 'RISING', 'AT', 'LEAST', 'SAID', 'ULRICA', 'AND', 'A', 'SIGNAL', 'SHALL', 'SOON', 'WAVE', 'TO', 'WARN', 'THE', 'BESIEGERS', 'TO', 'PRESS', 'HARD', 'UPON', 'THOSE', 'WHO', 'WOULD', 'EXTINGUISH', 'THEM'] +8461-281231-0007-1601: hyp=['THEY', 'ARE', 'FAST', 'RISING', 'AT', 'LEAST', 'SAID', 'EUREKA', 'AND', 'A', 'SIGNAL', 'SHALL', 'SOON', 'WAVE', 'TOWARD', 'THE', 'BESIEGERS', 'TO', 'PRESS', 'HARD', 'UPON', 'THOSE', 'WHO', 'WOULD', 'EXTINGUISH', 'THEM'] +8461-281231-0008-1602: ref=['MEANWHILE', 'THE', 'BLACK', 'KNIGHT', 'HAD', 'LED', 'HIS', 'FORCES', 'AGAIN', 'TO', 'THE', 'ATTACK', 'AND', 'SO', 'VIGOROUS', 'WAS', 'THEIR', 'ASSAULT', 'THAT', 'BEFORE', 'LONG', 'THE', 'GATE', 'OF', 'THE', 'CASTLE', 'ALONE', 'SEPARATED', 'THEM', 'FROM', 'THOSE', 'WITHIN'] +8461-281231-0008-1602: hyp=['MEANWHILE', 'THE', 'BLACK', 'KNIGHT', 'HAD', 'LED', 'HIS', 'FORCES', 'AGAIN', 'TO', 'THE', 'ATTACK', 'AND', 'SO', 'VIGOROUS', 'WAS', 'THEIR', 'ASSAULT', 'THAT', 'BEFORE', 'LONG', 'THE', 'GATE', 'OF', 'THE', 'CASTLE', 'ALONE', 'SEPARATED', 'THEM', 'FROM', 'THOSE', 'WITHIN'] +8461-281231-0009-1603: ref=['THE', 'DEFENDERS', 'FINDING', 'THE', 'CASTLE', 'TO', 'BE', 'ON', 'FIRE', 'NOW', 'DETERMINED', 'TO', 'SELL', 'THEIR', 'LIVES', 'AS', 'DEARLY', 'AS', 'THEY', 'COULD', 'AND', 'HEADED', 'BY', 'DE', 'BRACY', 'THEY', 'THREW', 'OPEN', 'THE', 'GATE', 'AND', 'WERE', 'AT', 'ONCE', 'INVOLVED', 'IN', 'A', 'TERRIFIC', 'CONFLICT', 'WITH', 'THOSE', 'OUTSIDE'] +8461-281231-0009-1603: hyp=['THE', 'DEFENDERS', 'FIND', 'IN', 'THE', 'CASTLE', 'TO', 'BE', 'ON', 'FIRE', 'NOW', 'DETERMINED', 'TO', 'SELL', 'THEIR', 'LIVES', 'AS', 'DAILY', 'AS', 'THEY', 'COULD', 'AND', 'HEADED', 'BY', 'THE', 'BRACES', 'THEY', 'THREW', 'OPEN', 'THE', 'GATE', 'AND', 'WERE', 'AT', 'ONCE', 'INVOLVED', 'IN', 'A', 'TERRIFIC', 'CONFLICT', 'WITH', 'THOSE', 'OUTSIDE'] +8461-281231-0010-1604: ref=['THE', 'BLACK', 'KNIGHT', 'WITH', 'PORTENTOUS', 'STRENGTH', 'FORCED', 'HIS', 'WAY', 'INWARD', 'IN', 'DESPITE', 'OF', 'DE', 'BRACY', 'AND', 'HIS', 'FOLLOWERS'] +8461-281231-0010-1604: hyp=['THE', 'BLACK', 'NIGHT', 'WITH', 'POTENT', 'OF', 'STRENGTH', 'FORCE', 'HIS', 'WAY', 'INWARD', 'IN', 'DESPITE', 'OF', 'THE', 'BRAZY', 'AND', 'HIS', 'FOLLOWERS'] +8461-281231-0011-1605: ref=['TWO', 'OF', 'THE', 'FOREMOST', 'INSTANTLY', 'FELL', 'AND', 'THE', 'REST', 'GAVE', 'WAY', 'NOTWITHSTANDING', 'ALL', 'THEIR', 'LEADERS', 'EFFORTS', 'TO', 'STOP', 'THEM'] +8461-281231-0011-1605: hyp=['TWO', 'OF', 'THE', 'FOREMOST', 'THING', 'AND', 'THE', 'REST', 'GAVE', 'WAY', 'NOTWITHSTANDING', 'ALL', 'THE', "LEADER'S", 'EFFORTS', 'TO', 'STOP', 'THEM'] +8461-281231-0012-1606: ref=['THE', 'BLACK', 'KNIGHT', 'WAS', 'SOON', 'ENGAGED', 'IN', 'DESPERATE', 'COMBAT', 'WITH', 'THE', 'NORMAN', 'CHIEF', 'AND', 'THE', 'VAULTED', 'ROOF', 'OF', 'THE', 'HALL', 'RUNG', 'WITH', 'THEIR', 'FURIOUS', 'BLOWS'] +8461-281231-0012-1606: hyp=['THE', 'BLACK', 'NIGHT', 'WAS', 'SOON', 'ENGAGED', 'IN', 'DESPERATE', 'COMBAT', 'WITH', 'THE', 'NORMAN', 'CHIEF', 'AND', 'DEVOTED', 'ROOF', 'OF', 'THE', 'HALL', 'RANG', 'WITH', 'THE', 'FURIOUS', 'BLOWS'] +8461-281231-0013-1607: ref=['AT', 'LENGTH', 'DE', 'BRACY', 'FELL'] +8461-281231-0013-1607: hyp=['AT', 'LENGTH', 'THE', 'BRACEY', 'FELL'] +8461-281231-0014-1608: ref=['TELL', 'ME', 'THY', 'NAME', 'OR', 'WORK', 'THY', 'PLEASURE', 'ON', 'ME'] +8461-281231-0014-1608: hyp=['TELL', 'ME', 'THY', 'NAME', 'O', 'WORK', 'THY', 'PLEASURE', 'ON', 'ME'] +8461-281231-0015-1609: ref=['YET', 'FIRST', 'LET', 'ME', 'SAY', 'SAID', 'DE', 'BRACY', 'WHAT', 'IT', 'IMPORTS', 'THEE', 'TO', 'KNOW'] +8461-281231-0015-1609: hyp=['YET', 'FIRST', 'LET', 'ME', 'SAY', 'SAID', 'DEBRACY', 'WHAT', 'IT', 'IMPORTS', 'THEE', 'TO', 'KNOW'] +8461-281231-0016-1610: ref=['EXCLAIMED', 'THE', 'BLACK', 'KNIGHT', 'PRISONER', 'AND', 'PERISH'] +8461-281231-0016-1610: hyp=['EXCLAIMED', 'THE', 'BLACK', 'KNIGHT', 'PRISONER', 'AND', 'PERISH'] +8461-281231-0017-1611: ref=['THE', 'LIFE', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'CASTLE', 'SHALL', 'ANSWER', 'IT', 'IF', 'A', 'HAIR', 'OF', 'HIS', 'HEAD', 'BE', 'SINGED', 'SHOW', 'ME', 'HIS', 'CHAMBER'] +8461-281231-0017-1611: hyp=['THE', 'LIFE', 'OF', 'EVERY', 'MAN', 'IN', 'THE', 'CASTLE', 'SHALL', 'ANSWER', 'IT', 'IF', 'A', 'HAIR', 'OF', 'HIS', 'HEAD', 'BE', 'SINGED', 'SHOW', 'ME', 'HIS', 'CHAMBER'] +8461-281231-0018-1612: ref=['RAISING', 'THE', 'WOUNDED', 'MAN', 'WITH', 'EASE', 'THE', 'BLACK', 'KNIGHT', 'RUSHED', 'WITH', 'HIM', 'TO', 'THE', 'POSTERN', 'GATE', 'AND', 'HAVING', 'THERE', 'DELIVERED', 'HIS', 'BURDEN', 'TO', 'THE', 'CARE', 'OF', 'TWO', 'YEOMEN', 'HE', 'AGAIN', 'ENTERED', 'THE', 'CASTLE', 'TO', 'ASSIST', 'IN', 'THE', 'RESCUE', 'OF', 'THE', 'OTHER', 'PRISONERS'] +8461-281231-0018-1612: hyp=['RAISING', 'THE', 'WOUNDED', 'MAN', 'WITH', 'EASE', 'THE', 'BLACK', 'KNIGHT', 'RUSHED', 'WITH', 'THEM', 'TO', 'THE', 'PASSING', 'GATE', 'AND', 'HAVING', 'THERE', 'DELIVERED', 'HIS', 'BURDEN', 'TO', 'THE', 'CARE', 'OF', 'TWO', 'YOUNG', 'MEN', 'HE', 'AGAIN', 'ENTERED', 'THE', 'CASTLE', 'TO', 'ASSIST', 'IN', 'THE', 'RESCUE', 'OF', 'THEIR', 'PRISONERS'] +8461-281231-0019-1613: ref=['BUT', 'IN', 'OTHER', 'PARTS', 'THE', 'BESIEGERS', 'PURSUED', 'THE', 'DEFENDERS', 'OF', 'THE', 'CASTLE', 'FROM', 'CHAMBER', 'TO', 'CHAMBER', 'AND', 'SATIATED', 'IN', 'THEIR', 'BLOOD', 'THE', 'VENGEANCE', 'WHICH', 'HAD', 'LONG', 'ANIMATED', 'THEM', 'AGAINST', 'THE', 'SOLDIERS', 'OF', 'THE', 'TYRANT', 'FRONT', 'DE', 'BOEUF'] +8461-281231-0019-1613: hyp=['BUT', 'IN', 'OTHER', 'PARTS', 'THE', 'BESIEGERS', 'PURSUED', 'THE', 'DEFENDERS', 'OF', 'THE', 'CASTLE', 'FROM', 'CHAMBER', 'TO', 'CHAMBER', 'AND', 'SATIATED', 'IN', 'THE', 'BLOOD', 'THE', 'VENGEANCE', 'WHICH', 'HAD', 'LONG', 'ANIMATED', 'THEM', 'AGAINST', 'THE', 'SOLDIERS', 'OF', 'THE', 'TYRANT', 'FROM', 'DE', 'BOEUF'] +8461-281231-0020-1614: ref=['AS', 'THE', 'FIRE', 'COMMENCED', 'TO', 'SPREAD', 'RAPIDLY', 'THROUGH', 'ALL', 'PARTS', 'OF', 'THE', 'CASTLE', 'ULRICA', 'APPEARED', 'ON', 'ONE', 'OF', 'THE', 'TURRETS'] +8461-281231-0020-1614: hyp=['AS', 'THE', 'FIRE', 'COMMANDS', 'TO', 'SPREAD', 'RAPIDLY', 'THROUGH', 'ALL', 'PARTS', 'OF', 'THE', 'CASTLE', 'OR', 'RICHA', 'APPEARED', 'ON', 'ONE', 'OF', 'THE', 'TURRETS'] +8461-281231-0021-1615: ref=['BEFORE', 'LONG', 'THE', 'TOWERING', 'FLAMES', 'HAD', 'SURMOUNTED', 'EVERY', 'OBSTRUCTION', 'AND', 'ROSE', 'TO', 'THE', 'EVENING', 'SKIES', 'ONE', 'HUGE', 'AND', 'BURNING', 'BEACON', 'SEEN', 'FAR', 'AND', 'WIDE', 'THROUGH', 'THE', 'ADJACENT', 'COUNTRY', 'TOWER', 'AFTER', 'TOWER', 'CRASHED', 'DOWN', 'WITH', 'BLAZING', 'ROOF', 'AND', 'RAFTER'] +8461-281231-0021-1615: hyp=['BEFORE', 'LONG', 'THE', 'TOWERING', 'FLAMES', 'HAD', 'SURMOUNTED', 'EVERY', 'OBSTRUCTION', 'AND', 'ROSE', 'TO', 'THE', 'EVENING', 'SKIES', 'WHEN', 'HUGE', 'AND', 'BURNING', 'BEACON', 'SEEMED', 'FAR', 'AND', 'WIDE', 'THROUGH', 'THE', 'ADJACENT', 'COUNTRY', 'TOWERED', 'AFTER', 'TOWER', 'CRASHED', 'DOWN', 'WITH', 'BLAZING', 'ROOF', 'AND', 'RAFTER'] +8461-281231-0022-1616: ref=['AT', 'LENGTH', 'WITH', 'A', 'TERRIFIC', 'CRASH', 'THE', 'WHOLE', 'TURRET', 'GAVE', 'WAY', 'AND', 'SHE', 'PERISHED', 'IN', 'THE', 'FLAMES', 'WHICH', 'HAD', 'CONSUMED', 'HER', 'TYRANT'] +8461-281231-0022-1616: hyp=['AT', 'LENGTH', 'WITH', 'A', 'TERRIFIC', 'CRASH', 'THE', 'WHOLE', 'TOWER', 'GAVE', 'WAY', 'AND', 'SHE', 'PERISHED', 'IN', 'THE', 'FLAMES', 'WHICH', 'HAD', 'CONSUMED', 'HER', 'TYRANT'] +8461-281231-0023-1617: ref=['WHEN', 'THE', 'OUTLAWS', 'HAD', 'DIVIDED', 'THE', 'SPOILS', 'WHICH', 'THEY', 'HAD', 'TAKEN', 'FROM', 'THE', 'CASTLE', 'OF', 'TORQUILSTONE', 'CEDRIC', 'PREPARED', 'TO', 'TAKE', 'HIS', 'DEPARTURE'] +8461-281231-0023-1617: hyp=['WHEN', 'THE', 'OUTLAWS', 'ARE', 'DIVIDED', 'THE', 'SPOILS', 'WHICH', 'THEY', 'HAD', 'TAKEN', 'FROM', 'THE', 'CASTLE', 'OF', 'TORCHLESTONE', 'CEDRIC', 'PREPARED', 'TO', 'TAKE', 'HIS', 'DEPARTURE'] +8461-281231-0024-1618: ref=['HE', 'LEFT', 'THE', 'GALLANT', 'BAND', 'OF', 'FORESTERS', 'SORROWING', 'DEEPLY', 'FOR', 'HIS', 'LOST', 'FRIEND', 'THE', 'LORD', 'OF', 'CONINGSBURGH', 'AND', 'HE', 'AND', 'HIS', 'FOLLOWERS', 'HAD', 'SCARCE', 'DEPARTED', 'WHEN', 'A', 'PROCESSION', 'MOVED', 'SLOWLY', 'FROM', 'UNDER', 'THE', 'GREENWOOD', 'BRANCHES', 'IN', 'THE', 'DIRECTION', 'WHICH', 'HE', 'HAD', 'TAKEN', 'IN', 'THE', 'CENTRE', 'OF', 'WHICH', 'WAS', 'THE', 'CAR', 'IN', 'WHICH', 'THE', 'BODY', 'OF', 'ATHELSTANE', 'WAS', 'LAID'] +8461-281231-0024-1618: hyp=['HE', 'LEFT', 'THE', 'GALLANT', 'BAND', 'OF', 'FORESTERS', 'SORROWING', 'DEEPLY', 'FOR', 'HIS', 'LOST', 'FRIEND', 'THE', 'LORD', 'OF', 'CONNINGSBURG', 'AND', 'HE', 'AND', 'HIS', 'FOLLOWERS', 'HAD', 'SCARCE', 'DEPARTED', 'WHEN', 'A', 'PROCESSION', 'MOVED', 'SLOWLY', 'FROM', 'UNDER', 'THE', 'GREENWOOD', 'BRANCHES', 'IN', 'THE', 'DIRECTION', 'WHICH', 'HE', 'HAD', 'TAKEN', 'IN', 'THE', 'CENTRE', 'OF', 'WHICH', 'WAS', 'THE', 'CAR', 'IN', 'WHICH', 'THE', 'BODY', 'OF', 'ADDLESTEIN', 'WAS', 'LAID'] +8461-281231-0025-1619: ref=['DE', 'BRACY', 'BOWED', 'LOW', 'AND', 'IN', 'SILENCE', 'THREW', 'HIMSELF', 'UPON', 'A', 'HORSE', 'AND', 'GALLOPED', 'OFF', 'THROUGH', 'THE', 'WOOD'] +8461-281231-0025-1619: hyp=['DEBRACY', 'BOWED', 'LOW', 'AND', 'IN', 'SILENCE', 'THREW', 'HIMSELF', 'UPON', 'A', 'HORSE', 'AND', 'GALLOPED', 'OFF', 'THROUGH', 'THE', 'WOODS'] +8461-281231-0026-1620: ref=['HERE', 'IS', 'A', 'BUGLE', 'WHICH', 'AN', 'ENGLISH', 'YEOMAN', 'HAS', 'ONCE', 'WORN', 'I', 'PRAY', 'YOU', 'TO', 'KEEP', 'IT', 'AS', 'A', 'MEMORIAL', 'OF', 'YOUR', 'GALLANT', 'BEARING'] +8461-281231-0026-1620: hyp=['HERE', 'IS', 'A', 'BUGLE', 'WHICH', 'AN', 'ENGLISH', 'YEOMAN', 'HAS', 'ONCE', 'WORN', 'I', 'PRAY', 'YOU', 'TO', 'KEEP', 'IT', 'AS', 'A', 'MEMORIAL', 'OF', 'YOUR', 'GALLANT', 'BEARING'] +8461-281231-0027-1621: ref=['SO', 'SAYING', 'HE', 'MOUNTED', 'HIS', 'STRONG', 'WAR', 'HORSE', 'AND', 'RODE', 'OFF', 'THROUGH', 'THE', 'FOREST'] +8461-281231-0027-1621: hyp=['SO', 'SAYING', 'HE', 'MOUNTED', 'HIS', 'STRONG', 'WAR', 'HORSE', 'AND', 'RODE', 'OFF', 'THROUGH', 'THE', 'FOREST'] +8461-281231-0028-1622: ref=['DURING', 'ALL', 'THIS', 'TIME', 'ISAAC', 'OF', 'YORK', 'SAT', 'MOURNFULLY', 'APART', 'GRIEVING', 'FOR', 'THE', 'LOSS', 'OF', 'HIS', 'DEARLY', 'LOVED', 'DAUGHTER', 'REBECCA'] +8461-281231-0028-1622: hyp=['DURING', 'ALL', 'THIS', 'TIME', 'MISERC', 'OF', 'YORK', 'SAT', 'MOURNFULLY', 'APART', 'GRIEVING', 'FOR', 'THE', 'LOSS', 'OF', 'HIS', 'STILL', 'IN', 'LOVED', 'DAUGHTER', 'REBECCA'] +8461-281231-0029-1623: ref=['AND', 'WITH', 'THIS', 'EPISTLE', 'THE', 'UNHAPPY', 'OLD', 'MAN', 'SET', 'OUT', 'TO', 'PROCURE', 'HIS', "DAUGHTER'S", 'LIBERATION'] +8461-281231-0029-1623: hyp=['AND', 'WITH', 'THIS', 'EPISTLE', 'THEN', 'HAPPY', 'OLD', 'MAN', 'SET', 'OUT', 'TO', 'PROCURE', 'HIS', "DAUGHTER'S", 'LIBERATION'] +8461-281231-0030-1624: ref=['THE', 'TEMPLAR', 'IS', 'FLED', 'SAID', 'DE', 'BRACY', 'IN', 'ANSWER', 'TO', 'THE', "PRINCE'S", 'EAGER', 'QUESTIONS', 'FRONT', 'DE', 'BOEUF', 'YOU', 'WILL', 'NEVER', 'SEE', 'MORE', 'AND', 'HE', 'ADDED', 'IN', 'A', 'LOW', 'AND', 'EMPHATIC', 'TONE', 'RICHARD', 'IS', 'IN', 'ENGLAND', 'I', 'HAVE', 'SEEN', 'HIM', 'AND', 'SPOKEN', 'WITH', 'HIM'] +8461-281231-0030-1624: hyp=['THE', 'TEMPLAR', 'IS', 'FLED', 'SAID', 'THE', 'BRACEE', 'IN', 'ANSWER', 'TO', 'THE', "PRINCE'S", 'EAGER', 'QUESTIONS', 'FROM', 'DE', 'BOEUF', 'YOU', 'WILL', 'NEVER', 'SEE', 'MORE', 'AND', 'HE', 'ADDED', 'IN', 'A', 'LOW', 'AND', 'EMPHATIC', 'TONE', 'WRETCHED', 'IS', 'AN', 'ENGLAND', 'I', 'HAVE', 'SEEN', 'HIM', 'AND', 'SPOKEN', 'WITH', 'HIM'] +8461-281231-0031-1625: ref=['HE', 'APPEALED', 'TO', 'DE', 'BRACY', 'TO', 'ASSIST', 'HIM', 'IN', 'THIS', 'PROJECT', 'AND', 'BECAME', 'AT', 'ONCE', 'DEEPLY', 'SUSPICIOUS', 'OF', 'THE', "KNIGHT'S", 'LOYALTY', 'TOWARDS', 'HIM', 'WHEN', 'HE', 'DECLINED', 'TO', 'LIFT', 'HAND', 'AGAINST', 'THE', 'MAN', 'WHO', 'HAD', 'SPARED', 'HIS', 'OWN', 'LIFE'] +8461-281231-0031-1625: hyp=['HE', 'APPEARED', 'TO', 'THE', 'BRACELET', 'TO', 'ASSIST', 'HIM', 'IN', 'THIS', 'PROJECT', 'AND', 'BECAME', 'AT', 'ONCE', 'DEEPLY', 'SUSPICIOUS', 'OF', 'THE', "NIGHT'S", 'LOYALTY', 'TOWARDS', 'HIM', 'WHEN', 'HE', 'DECLINED', 'TO', 'LIFT', 'HAND', 'AGAINST', 'THE', 'MAN', 'WHO', 'HAD', 'SPARED', 'HIS', 'OWN', 'LIFE'] +8461-281231-0032-1626: ref=['BEFORE', 'REACHING', 'HIS', 'DESTINATION', 'HE', 'WAS', 'TOLD', 'THAT', 'LUCAS', 'DE', 'BEAUMANOIR', 'THE', 'GRAND', 'MASTER', 'OF', 'THE', 'ORDER', 'OF', 'THE', 'TEMPLARS', 'WAS', 'THEN', 'ON', 'VISIT', 'TO', 'THE', 'PRECEPTORY'] +8461-281231-0032-1626: hyp=['BEFORE', 'REACHING', 'HIS', 'DESTINATION', 'HE', 'WAS', 'STOLE', 'THAT', 'LUCAS', 'THE', 'BURMANOIS', 'THE', 'GRAND', 'MASTER', 'OF', 'THE', 'ORDER', 'OF', 'THE', 'TEMPLARS', 'WAS', 'THEN', 'ON', 'VISIT', 'TO', 'THE', 'PRECEPTORY'] +8461-281231-0033-1627: ref=['HE', 'HAD', 'NOT', 'UNTIL', 'THEN', 'BEEN', 'INFORMED', 'OF', 'THE', 'PRESENCE', 'OF', 'THE', 'JEWISH', 'MAIDEN', 'IN', 'THE', 'ABODE', 'OF', 'THE', 'TEMPLARS', 'AND', 'GREAT', 'WAS', 'HIS', 'FURY', 'AND', 'INDIGNATION', 'ON', 'LEARNING', 'THAT', 'SHE', 'WAS', 'AMONGST', 'THEM'] +8461-281231-0033-1627: hyp=['HE', 'HAD', 'NOT', 'UNTIL', 'THEN', 'BEEN', 'INFORMED', 'TO', 'THE', 'PRESENCE', 'OF', 'THE', 'JEWISH', 'MAIDEN', 'IN', 'THE', 'ABODE', 'OF', 'THE', 'TEMPLARS', 'AND', 'GREAT', 'WAS', 'HIS', 'FURY', 'AND', 'INDIGNATION', 'ON', 'LEARNING', 'THAT', 'SHE', 'WAS', 'AMONGST', 'THEM'] +8461-281231-0034-1628: ref=['POOR', 'ISAAC', 'WAS', 'HURRIED', 'OFF', 'ACCORDINGLY', 'AND', 'EXPELLED', 'FROM', 'THE', 'PRECEPTORY', 'ALL', 'HIS', 'ENTREATIES', 'AND', 'EVEN', 'HIS', 'OFFERS', 'UNHEARD', 'AND', 'DISREGARDED'] +8461-281231-0034-1628: hyp=['POOR', 'ISAAC', 'WAS', 'HURRIED', 'OFF', 'ACCORDINGLY', 'AND', 'EXPELLED', 'FROM', 'THE', 'PRECEPTORY', 'ALL', 'HIS', 'ENTREATIES', 'AND', 'EVEN', 'HIS', 'OFFERS', 'UNHEARD', 'AND', 'DISREGARDED'] +8461-281231-0035-1629: ref=['THE', 'ASSURANCE', 'THAT', 'SHE', 'POSSESSED', 'SOME', 'FRIEND', 'IN', 'THIS', 'AWFUL', 'ASSEMBLY', 'GAVE', 'HER', 'COURAGE', 'TO', 'LOOK', 'AROUND', 'AND', 'TO', 'MARK', 'INTO', 'WHOSE', 'PRESENCE', 'SHE', 'HAD', 'BEEN', 'CONDUCTED'] +8461-281231-0035-1629: hyp=['THE', 'ASSURANCE', 'THAT', 'SHE', 'POSSESSED', 'SOME', 'FRIEND', 'IN', 'THIS', 'AWFUL', 'ASSEMBLY', 'GAVE', 'A', 'COURAGE', 'TO', 'LOOK', 'AROUND', 'AND', 'TO', 'MARK', 'INTO', 'WHOSE', 'PRESENCE', 'SHE', 'HAD', 'BEEN', 'CONDUCTED'] +8461-281231-0036-1630: ref=['SHE', 'GAZED', 'ACCORDINGLY', 'UPON', 'A', 'SCENE', 'WHICH', 'MIGHT', 'WELL', 'HAVE', 'STRUCK', 'TERROR', 'INTO', 'A', 'BOLDER', 'HEART', 'THAN', 'HERS'] +8461-281231-0036-1630: hyp=['SHE', 'GAZED', 'ACCORDINGLY', 'UPON', 'A', 'SCENE', 'WHICH', 'MIGHT', 'WELL', 'HAVE', 'STRUCK', 'TERROR', 'INTO', 'A', 'BOLDER', 'HEART', 'THAN', 'HERS'] +8461-281231-0037-1631: ref=['AT', 'HIS', 'FEET', 'WAS', 'PLACED', 'A', 'TABLE', 'OCCUPIED', 'BY', 'TWO', 'SCRIBES', 'WHOSE', 'DUTY', 'IT', 'WAS', 'TO', 'RECORD', 'THE', 'PROCEEDINGS', 'OF', 'THE', 'DAY'] +8461-281231-0037-1631: hyp=['AT', 'HIS', 'FEET', 'WAS', 'PLACED', 'THE', 'TABLE', 'OCCUPIED', 'BY', 'TWO', 'SCRIBES', 'WHOSE', 'DUTY', 'WAS', 'TO', 'RECORD', 'THE', 'PROCEEDINGS', 'OF', 'THE', 'DAY'] +8461-281231-0038-1632: ref=['THE', 'PRECEPTORS', 'OF', 'WHOM', 'THERE', 'WERE', 'FOUR', 'PRESENT', 'OCCUPIED', 'SEATS', 'BEHIND', 'THEIR', 'SUPERIORS', 'AND', 'BEHIND', 'THEM', 'STOOD', 'THE', 'ESQUIRES', 'OF', 'THE', 'ORDER', 'ROBED', 'IN', 'WHITE'] +8461-281231-0038-1632: hyp=['THE', 'PRECEPTORS', 'OF', 'WHOM', 'THERE', 'WERE', 'FOUR', 'PRESENT', 'OCCUPIED', 'SEATS', 'BEHIND', 'THE', 'SUPERIORS', 'AND', 'BEHIND', 'THEM', 'STOOD', 'THE', 'ESQUIRES', 'OF', 'THE', 'ORDER', 'ROBED', 'IN', 'WHITE'] diff --git a/log/modified_beam_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt b/log/modified_beam_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..e91d50767c3c151d135f1dca5f613bde05dafb1e --- /dev/null +++ b/log/modified_beam_search/wer-summary-test-clean-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt @@ -0,0 +1,2 @@ +settings WER +beam_size_4 3.41 diff --git a/log/modified_beam_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt b/log/modified_beam_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt new file mode 100644 index 0000000000000000000000000000000000000000..37a0aba38e9b319862ec3f0bbd80bae1354a436f --- /dev/null +++ b/log/modified_beam_search/wer-summary-test-other-epoch-30-avg-9-streaming-chunk-size-32-modified_beam_search-beam-size-4-use-averaged-model.txt @@ -0,0 +1,2 @@ +settings WER +beam_size_4 8.94 diff --git a/test_wavs/1089-134686-0001.wav b/test_wavs/1089-134686-0001.wav new file mode 100644 index 0000000000000000000000000000000000000000..bfe1519ead65b33e26f8b81f25b1c072cbb90d13 Binary files /dev/null and b/test_wavs/1089-134686-0001.wav differ diff --git a/test_wavs/1221-135766-0001.wav b/test_wavs/1221-135766-0001.wav new file mode 100644 index 0000000000000000000000000000000000000000..498b3f3357ccffa1a9deece350839dc58e2dd5c5 Binary files /dev/null and b/test_wavs/1221-135766-0001.wav differ diff --git a/test_wavs/1221-135766-0002.wav b/test_wavs/1221-135766-0002.wav new file mode 100644 index 0000000000000000000000000000000000000000..c76bac8aad2de7529570c12089158e9262aa8091 Binary files /dev/null and b/test_wavs/1221-135766-0002.wav differ diff --git a/test_wavs/trans.txt b/test_wavs/trans.txt new file mode 100644 index 0000000000000000000000000000000000000000..07661742c41af18d8e06b81510df68fadc381f1c --- /dev/null +++ b/test_wavs/trans.txt @@ -0,0 +1,3 @@ +1089-134686-0001 AFTER EARLY NIGHTFALL THE YELLOW LAMPS WOULD LIGHT UP HERE AND THERE THE SQUALID QUARTER OF THE BROTHELS +1221-135766-0001 GOD AS A DIRECT CONSEQUENCE OF THE SIN WHICH MAN THUS PUNISHED HAD GIVEN HER A LOVELY CHILD WHOSE PLACE WAS ON THAT SAME DISHONOURED BOSOM TO CONNECT HER PARENT FOR EVER WITH THE RACE AND DESCENT OF MORTALS AND TO BE FINALLY A BLESSED SOUL IN HEAVEN +1221-135766-0002 YET THESE THOUGHTS AFFECTED HESTER PRYNNE LESS WITH HOPE THAN APPREHENSION